Snap for 7570015 from caf4ae8e54e1573f313bfe81dea1f478c279851e to mainline-sdkext-release

Change-Id: I1b92f7255f4bd152657c9430b5d80bffb84eea4a
diff --git a/Android.bp b/Android.bp
new file mode 100644
index 0000000..60f0ff1
--- /dev/null
+++ b/Android.bp
@@ -0,0 +1,98 @@
+// *** THIS PACKAGE HAS SPECIAL LICENSING CONDITIONS.  PLEASE
+//     CONSULT THE OWNERS AND opensource-licensing@google.com BEFORE
+//     DEPENDING ON IT IN YOUR PROJECT. ***
+package {
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+// Added automatically by a large-scale-change that took the approach of
+// 'apply every license found to every target'. While this makes sure we respect
+// every license restriction, it may not be entirely correct.
+//
+// e.g. GPL in an MIT project might only apply to the contrib/ directory.
+//
+// Please consider splitting the single license below into multiple licenses,
+// taking care not to lose any license_kind information, and overriding the
+// default license using the 'licenses: [...]' property on targets as needed.
+//
+// For unused files, consider creating a 'fileGroup' with "//visibility:private"
+// to attach the license to, and including a comment whether the files may be
+// used in the current project.
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+        "SPDX-license-identifier-BSD",
+        "SPDX-license-identifier-MIT",
+        "SPDX-license-identifier-Unicode-DFS",
+        "legacy_by_exception_only", // by exception only
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+aidl_interface {
+    name: "av-types-aidl",
+    unstable: true,
+    host_supported: true,
+    vendor_available: true,
+    double_loadable: true,
+    local_include_dir: "aidl",
+    srcs: [
+        "aidl/android/media/InterpolatorConfig.aidl",
+        "aidl/android/media/InterpolatorType.aidl",
+        "aidl/android/media/MicrophoneInfoData.aidl",
+        "aidl/android/media/VolumeShaperConfiguration.aidl",
+        "aidl/android/media/VolumeShaperConfigurationOptionFlag.aidl",
+        "aidl/android/media/VolumeShaperConfigurationType.aidl",
+        "aidl/android/media/VolumeShaperOperation.aidl",
+        "aidl/android/media/VolumeShaperOperationFlag.aidl",
+        "aidl/android/media/VolumeShaperState.aidl",
+    ],
+    backend: {
+        cpp: {
+            min_sdk_version: "29",
+            apex_available: [
+                "//apex_available:platform",
+                "com.android.bluetooth.updatable",
+                "com.android.media",
+                "com.android.media.swcodec",
+            ],
+        },
+    },
+}
+
+cc_library_headers {
+    name: "av-headers",
+    export_include_dirs: ["include"],
+    static_libs: [
+        "av-types-aidl-cpp",
+    ],
+    export_static_lib_headers: [
+        "av-types-aidl-cpp",
+    ],
+    header_libs: [
+        "libaudioclient_aidl_conversion_util",
+    ],
+    export_header_lib_headers: [
+        "libaudioclient_aidl_conversion_util",
+    ],
+    host_supported: true,
+    vendor_available: true,
+    double_loadable: true,
+    min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.bluetooth.updatable",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..aabda36
--- /dev/null
+++ b/METADATA
@@ -0,0 +1,23 @@
+# *** THIS PACKAGE HAS SPECIAL LICENSING CONDITIONS.  PLEASE
+#     CONSULT THE OWNERS AND opensource-licensing@google.com BEFORE
+#     DEPENDING ON IT IN YOUR PROJECT. ***
+third_party {
+  # would be NOTICE save for Widevine Master License Agreement in:
+  #   drm/mediadrm/plugins/clearkey/hidl/DeviceFiles.cpp
+  #   drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
+  #   drm/mediadrm/plugins/clearkey/hidl/include/DeviceFiles.h
+  #   drm/mediadrm/plugins/clearkey/hidl/protos/DeviceFiles.proto
+  #   drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
+  # and patent disclaimers in:
+  #   media/codec2/components/aac/patent_disclaimer.txt
+  #   media/codec2/components/amr_nb_wb/patent_disclaimer.txt
+  #   media/codec2/components/mp3/patent_disclaimer.txt
+  #   media/codec2/components/mpeg4_h263/patent_disclaimer.txt
+  #   media/codecs/amrnb/patent_disclaimer.txt
+  #   media/codecs/amrwb/dec/patent_disclaimer.txt
+  #   media/codecs/amrwb/enc/patent_disclaimer.txt
+  #   media/codecs/m4v_h263/patent_disclaimer.txt
+  #   media/codecs/mp3dec/patent_disclaimer.txt
+  #   media/libstagefright/codecs/aacenc/patent_disclaimer.txt
+  license_type: BY_EXCEPTION_ONLY
+}
diff --git a/MainlineFiles.cfg b/MainlineFiles.cfg
index 37d714c..490bbbf 100644
--- a/MainlineFiles.cfg
+++ b/MainlineFiles.cfg
@@ -1,10 +1,13 @@
-# 
+#
 # mainline files for frameworks/av
+# this list used by tools/mainline_hook_*.sh to help separate
+# mainline changes vs framework changes, which release at different paces.
+#
 #
 # ignore comment (#) lines and blank lines
 # rest are path prefixes starting at root of the project
 # (so OWNERS, not frameworks/av/OWNERS)
-# 
+#
 # path
 # INCLUDE path
 # EXCLUDE path
@@ -24,11 +27,5 @@
 media/codec2/components/
 media/codecs/
 media/extractors/
-media/libstagefright/codecs/amrnb/
-media/libstagefright/codecs/amrwb/
-media/libstagefright/codecs/amrwbenc/
-media/libstagefright/codecs/common/
-media/libstagefright/codecs/flac/
-media/libstagefright/codecs/m4v_h263/
-media/libstagefright/codecs/mp3dec/
-media/libstagefright/mpeg2ts
+media/libstagefright/mpeg2ts/
+media/libstagefright/flac/
diff --git a/OWNERS b/OWNERS
index 8f405e9..7f523a2 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,4 +1,10 @@
+chz@google.com
 elaurent@google.com
 etalvala@google.com
+hkuang@google.com
 lajos@google.com
 marcone@google.com
+
+# LON
+olly@google.com
+andrewlewis@google.com
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index ae920c0..716b550 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -1,2 +1,13 @@
 [Hook Scripts]
 mainline_hook = ${REPO_ROOT}/frameworks/av/tools/mainline_hook_partial.sh ${REPO_ROOT} ${PREUPLOAD_FILES}
+
+hidden_api_txt_checksorted_hook = ${REPO_ROOT}/tools/platform-compat/hiddenapi/checksorted_sha.sh ${PREUPLOAD_COMMIT} ${REPO_ROOT}
+
+[Builtin Hooks]
+clang_format = true
+
+[Builtin Hooks Options]
+# Only turn on clang-format check for the following subfolders.
+clang_format = --commit ${PREUPLOAD_COMMIT} --style file --extensions c,h,cc,cpp
+               media/libmediatranscoding/
+               services/mediatranscoding/
diff --git a/aidl/android/media/InterpolatorConfig.aidl b/aidl/android/media/InterpolatorConfig.aidl
new file mode 100644
index 0000000..49f90e8
--- /dev/null
+++ b/aidl/android/media/InterpolatorConfig.aidl
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.InterpolatorType;
+
+/**
+ * {@hide}
+ */
+parcelable InterpolatorConfig {
+    InterpolatorType type = InterpolatorType.CUBIC;
+    /** For cubic interpolation, the boundary conditions in slope. */
+    float firstSlope;
+    float lastSlope;
+    /** A flattened list of <x, y> pairs, monotonically increasing in x. */
+    float[] xy;
+}
diff --git a/aidl/android/media/InterpolatorType.aidl b/aidl/android/media/InterpolatorType.aidl
new file mode 100644
index 0000000..b722cad
--- /dev/null
+++ b/aidl/android/media/InterpolatorType.aidl
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * Polynomial spline interpolators.
+ *
+ * {@hide}
+ */
+@Backing(type="int")
+enum InterpolatorType {
+    /** Not continuous. */
+    STEP,
+    /** C0. */
+    LINEAR,
+    /** C1. */
+    CUBIC,
+    /** C1 (to provide locally monotonic curves). */
+    CUBIC_MONOTONIC,
+    // CUBIC_C2, // TODO - requires global computation / cache
+}
diff --git a/aidl/android/media/MicrophoneInfoData.aidl b/aidl/android/media/MicrophoneInfoData.aidl
new file mode 100644
index 0000000..747bfa5
--- /dev/null
+++ b/aidl/android/media/MicrophoneInfoData.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable MicrophoneInfoData {
+    @utf8InCpp String deviceId;
+    int portId;
+    int type;
+    @utf8InCpp String address;
+    int deviceLocation;
+    int deviceGroup;
+    int indexInTheGroup;
+    float[] geometricLocation;
+    float[] orientation;
+    float[] frequencies;
+    float[] frequencyResponses;
+    int[] channelMapping;
+    float sensitivity;
+    float maxSpl;
+    float minSpl;
+    int directionality;
+}
diff --git a/aidl/android/media/VolumeShaperConfiguration.aidl b/aidl/android/media/VolumeShaperConfiguration.aidl
new file mode 100644
index 0000000..d6e6505
--- /dev/null
+++ b/aidl/android/media/VolumeShaperConfiguration.aidl
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.InterpolatorConfig;
+import android.media.VolumeShaperConfigurationOptionFlag;
+import android.media.VolumeShaperConfigurationType;
+
+/**
+ * {@hide}
+ */
+parcelable VolumeShaperConfiguration {
+    VolumeShaperConfigurationType type = VolumeShaperConfigurationType.ID;
+    int id;
+    /** Bitmask, indexed by VolumeShaperConfigurationOptionFlag. */
+    int optionFlags;
+    double durationMs;
+    @nullable InterpolatorConfig interpolatorConfig; // null if type == ID
+}
diff --git a/aidl/android/media/VolumeShaperConfigurationOptionFlag.aidl b/aidl/android/media/VolumeShaperConfigurationOptionFlag.aidl
new file mode 100644
index 0000000..f583cee
--- /dev/null
+++ b/aidl/android/media/VolumeShaperConfigurationOptionFlag.aidl
@@ -0,0 +1,22 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum VolumeShaperConfigurationOptionFlag {
+    VOLUME_IN_DBFS,
+    CLOCK_TIME,
+}
diff --git a/aidl/android/media/VolumeShaperConfigurationType.aidl b/aidl/android/media/VolumeShaperConfigurationType.aidl
new file mode 100644
index 0000000..aa6334e
--- /dev/null
+++ b/aidl/android/media/VolumeShaperConfigurationType.aidl
@@ -0,0 +1,22 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum VolumeShaperConfigurationType {
+    ID,
+    SCALE,
+}
diff --git a/aidl/android/media/VolumeShaperOperation.aidl b/aidl/android/media/VolumeShaperOperation.aidl
new file mode 100644
index 0000000..dd9a0e7
--- /dev/null
+++ b/aidl/android/media/VolumeShaperOperation.aidl
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable VolumeShaperOperation {
+    /** Operations to do. Bitmask of VolumeShaperOperationFlag. */
+    int flags;
+    /** If >= 0 the id to remove in a replace operation. */
+    int replaceId;
+    /** Position in the curve to set if a valid number (not nan). */
+    float xOffset;
+}
diff --git a/aidl/android/media/VolumeShaperOperationFlag.aidl b/aidl/android/media/VolumeShaperOperationFlag.aidl
new file mode 100644
index 0000000..8fe5275
--- /dev/null
+++ b/aidl/android/media/VolumeShaperOperationFlag.aidl
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum VolumeShaperOperationFlag {
+    /** The absence of this flag indicates "play". */
+    REVERSE,
+    TERMINATE,
+    JOIN,
+    DELAY,
+    CREATE_IF_NECESSARY,
+}
diff --git a/aidl/android/media/VolumeShaperState.aidl b/aidl/android/media/VolumeShaperState.aidl
new file mode 100644
index 0000000..4085e2b
--- /dev/null
+++ b/aidl/android/media/VolumeShaperState.aidl
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable VolumeShaperState {
+    /** Linear volume in the range MIN_LINEAR_VOLUME to MAX_LINEAR_VOLUME. */
+    float volume;
+    /** Position on curve expressed from MIN_CURVE_TIME to MAX_CURVE_TIME. */
+    float xOffset;
+}
diff --git a/apex/Android.bp b/apex/Android.bp
index 2ba6267..b9abd12 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -12,10 +12,20 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 apex_defaults {
     name: "com.android.media-defaults",
     updatable: true,
-    java_libs: ["updatable-media"],
+    bootclasspath_fragments: ["com.android.media-bootclasspath-fragment"],
+    systemserverclasspath_fragments: ["com.android.media-systemserverclasspath-fragment"],
     multilib: {
         first: {
             // Extractor process runs only with the primary ABI.
@@ -32,15 +42,24 @@
                 "liboggextractor",
                 "libwavextractor",
             ],
+            // Transcoding service only run with primary ABI.
+            binaries: [
+                "mediatranscoding",
+            ],
         },
     },
     // JNI
-    native_shared_libs: ["libmediaparser-jni"],
+    native_shared_libs: [
+        "libmediaparser-jni",
+        "libmediaformatshaper",
+    ],
     compile_multilib: "both",
     prebuilts: [
-        "mediaextractor.policy",
         "code_coverage.policy",
+        "com.android.media-mediatranscoding.rc",
         "crash_dump.policy",
+        "mediaextractor.policy",
+        "media-linker-config",
     ],
     key: "com.android.media.key",
     certificate: ":com.android.media.certificate",
@@ -56,12 +75,65 @@
     // - build artifacts (lib/javalib/bin) against Android 10 SDK
     //   so that the artifacts can run.
     min_sdk_version: "29",
+    // Indicates that pre-installed version of this apex can be compressed.
+    // Whether it actually will be compressed is controlled on per-device basis.
+    compressible: true,
 }
 
 apex {
     name: "com.android.media",
     manifest: "manifest.json",
     defaults: ["com.android.media-defaults"],
+    prebuilts: ["current_sdkinfo"],
+}
+
+linker_config {
+    name: "media-linker-config",
+    src: "linker.config.json",
+    installable: false,
+}
+
+// Encapsulate the contributions made by the com.android.media to the bootclasspath.
+bootclasspath_fragment {
+    name: "com.android.media-bootclasspath-fragment",
+    contents: ["updatable-media"],
+    apex_available: ["com.android.media"],
+
+    api: {
+        stub_libs: [
+            // Stubs for the APIs provided by updatable-media. This has to be
+            // specified explicitly because updatable-media is not a
+            // java_sdk_library.
+            "framework-media",
+        ],
+    },
+
+    // The bootclasspath_fragments that provide APIs on which this depends.
+    fragments: [
+        {
+            apex: "com.android.art",
+            module: "art-bootclasspath-fragment",
+        },
+    ],
+
+    // Additional stubs libraries that this fragment's contents use which are
+    // not provided by another bootclasspath_fragment.
+    additional_stubs: [
+        "android-non-updatable",
+    ],
+
+    // Additional hidden API flag files to override the defaults. This must only be
+    // modified by the Soong or platform compat team.
+    hidden_api: {
+        max_target_o_low_priority: ["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+    },
+}
+
+// Encapsulate the contributions made by the com.android.media to the systemserverclasspath.
+systemserverclasspath_fragment {
+    name: "com.android.media-systemserverclasspath-fragment",
+    contents: ["service-media-s"],
+    apex_available: ["com.android.media"],
 }
 
 filegroup {
@@ -80,6 +152,12 @@
     binaries: [
         "mediaswcodec",
     ],
+    native_shared_libs: [
+        "libcodec2_hidl@1.0",
+        "libcodec2_hidl@1.1",
+        "libcodec2_hidl@1.2",
+        "libstagefright_foundation",
+    ],
     prebuilts: [
         "com.android.media.swcodec-mediaswcodec.rc",
         "com.android.media.swcodec-ld.config.txt",
@@ -88,7 +166,6 @@
         "crash_dump.policy",
         "mediaswcodec.xml",
     ],
-    use_vendor: true,
     key: "com.android.media.swcodec.key",
     certificate: ":com.android.media.swcodec.certificate",
 
@@ -103,6 +180,16 @@
     // - build artifacts (lib/javalib/bin) against Android 10 SDK
     //   so that the artifacts can run.
     min_sdk_version: "29",
+    // Indicates that pre-installed version of this apex can be compressed.
+    // Whether it actually will be compressed is controlled on per-device basis.
+    compressible: true,
+}
+
+prebuilt_etc {
+    name: "com.android.media-mediatranscoding.rc",
+    src: "mediatranscoding.rc",
+    filename: "init.rc",
+    installable: false,
 }
 
 prebuilt_etc {
diff --git a/apex/TEST_MAPPING b/apex/TEST_MAPPING
index f036516..4b7c019 100644
--- a/apex/TEST_MAPPING
+++ b/apex/TEST_MAPPING
@@ -14,17 +14,9 @@
         },
         {
           "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
-        }
-      ]
-    },
-    {
-      "name": "GtsExoPlayerTestCases",
-      "options" : [
-        {
-          "include-annotation": "android.platform.test.annotations.SocPresubmit"
         },
         {
-          "include-filter": "com.google.android.exoplayer.gts.DashTest#testWidevine23FpsH264Fixed"
+          "include-filter": "com.google.android.media.gts.WidevineH264PlaybackTests"
         }
       ]
     }
diff --git a/apex/hiddenapi/OWNERS b/apex/hiddenapi/OWNERS
new file mode 100644
index 0000000..ac8a2b6
--- /dev/null
+++ b/apex/hiddenapi/OWNERS
@@ -0,0 +1,5 @@
+# soong-team@ as the hiddenapi files are tightly coupled with Soong
+file:platform/build/soong:/OWNERS
+
+# compat-team@ for changes to hiddenapi files
+file:tools/platform-compat:/OWNERS
diff --git a/apex/hiddenapi/hiddenapi-max-target-o-low-priority.txt b/apex/hiddenapi/hiddenapi-max-target-o-low-priority.txt
new file mode 100644
index 0000000..32bbb10
--- /dev/null
+++ b/apex/hiddenapi/hiddenapi-max-target-o-low-priority.txt
@@ -0,0 +1,6 @@
+Landroid/media/MediaSession2$ControllerInfo;-><init>(Landroid/content/Context;IILjava/lang/String;Landroid/os/IInterface;)V
+Landroid/media/MediaSession2$ControllerInfo;->getPackageName()Ljava/lang/String;
+Landroid/media/MediaSession2$ControllerInfo;->getProvider()Landroid/media/update/MediaSession2Provider$ControllerInfoProvider;
+Landroid/media/MediaSession2$ControllerInfo;->getUid()I
+Landroid/media/MediaSession2$ControllerInfo;->isTrusted()Z
+Landroid/media/MediaSession2$ControllerInfo;->mProvider:Landroid/media/update/MediaSession2Provider$ControllerInfoProvider;
diff --git a/apex/linker.config.json b/apex/linker.config.json
new file mode 100644
index 0000000..67c076e
--- /dev/null
+++ b/apex/linker.config.json
@@ -0,0 +1,3 @@
+{
+    "visible": true
+}
diff --git a/apex/manifest.json b/apex/manifest.json
index f1f69f4..5d72031 100644
--- a/apex/manifest.json
+++ b/apex/manifest.json
@@ -1,4 +1,10 @@
 {
   "name": "com.android.media",
-  "version": 309999900
+  "version": 319999900,
+  "requireNativeLibs": [
+    "libandroid.so",
+    "libbinder_ndk.so",
+    "libmediandk.so",
+    ":sphal"
+  ]
 }
diff --git a/apex/manifest_codec.json b/apex/manifest_codec.json
index e20d867..b0d962d 100644
--- a/apex/manifest_codec.json
+++ b/apex/manifest_codec.json
@@ -1,6 +1,6 @@
 {
   "name": "com.android.media.swcodec",
-  "version": 309999900,
+  "version": 319999900,
   "requireNativeLibs": [
     ":sphal"
   ]
diff --git a/apex/mediaswcodec.rc b/apex/mediaswcodec.rc
index d17481b..0c9b8c8 100644
--- a/apex/mediaswcodec.rc
+++ b/apex/mediaswcodec.rc
@@ -2,6 +2,5 @@
     class main
     user mediacodec
     group camera drmrpc mediadrm
-    override
     ioprio rt 4
     writepid /dev/cpuset/foreground/tasks
diff --git a/apex/mediatranscoding.rc b/apex/mediatranscoding.rc
new file mode 100644
index 0000000..ae9f8ba
--- /dev/null
+++ b/apex/mediatranscoding.rc
@@ -0,0 +1,12 @@
+# media.transcoding service is defined on com.android.media apex which goes back
+# to API29, but we only want it started on API31+ devices. So we declare it as
+# "disabled" and start it explicitly on boot.
+service media.transcoding /apex/com.android.media/bin/mediatranscoding
+    class main
+    user media
+    group media
+    ioprio rt 4
+    # Restrict to little cores only with system-background cpuset.
+    writepid /dev/cpuset/system-background/tasks
+    interface aidl media.transcoding
+    disabled
diff --git a/apex/testing/Android.bp b/apex/testing/Android.bp
index a04ab3f..8b81090 100644
--- a/apex/testing/Android.bp
+++ b/apex/testing/Android.bp
@@ -12,12 +12,24 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 apex_test {
     name: "test_com.android.media",
     manifest: "test_manifest.json",
     file_contexts: ":com.android.media-file_contexts",
     defaults: ["com.android.media-defaults"],
-    prebuilts: ["sdkinfo_45"],
+    prebuilts: [
+        "sdkinfo_45",
+        "media-linker-config",
+    ],
     installable: false,
 }
 
diff --git a/camera/Android.bp b/camera/Android.bp
index fa36bb3..6878c20 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -12,6 +12,37 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    default_applicable_licenses: ["frameworks_av_camera_license"],
+}
+
+// Added automatically by a large-scale-change that took the approach of
+// 'apply every license found to every target'. While this makes sure we respect
+// every license restriction, it may not be entirely correct.
+//
+// e.g. GPL in an MIT project might only apply to the contrib/ directory.
+//
+// Please consider splitting the single license below into multiple licenses,
+// taking care not to lose any license_kind information, and overriding the
+// default license using the 'licenses: [...]' property on targets as needed.
+//
+// For unused files, consider creating a 'fileGroup' with "//visibility:private"
+// to attach the license to, and including a comment whether the files may be
+// used in the current project.
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_camera_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+        "SPDX-license-identifier-MIT",
+        "SPDX-license-identifier-Unicode-DFS",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libcamera_client",
 
@@ -35,10 +66,10 @@
         "CameraParameters.cpp",
         "CaptureResult.cpp",
         "CameraParameters2.cpp",
+        "CameraSessionStats.cpp",
         "ICamera.cpp",
         "ICameraClient.cpp",
         "ICameraRecordingProxy.cpp",
-        "ICameraRecordingProxyListener.cpp",
         "camera2/CaptureRequest.cpp",
         "camera2/ConcurrentCamera.cpp",
         "camera2/OutputConfiguration.cpp",
@@ -50,6 +81,7 @@
     ],
 
     shared_libs: [
+        "libbase",
         "libcutils",
         "libutils",
         "liblog",
@@ -87,6 +119,8 @@
         "aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl",
         "aidl/android/hardware/camera2/ICameraDeviceUser.aidl",
         "aidl/android/hardware/camera2/ICameraOfflineSession.aidl",
+        "aidl/android/hardware/camera2/ICameraInjectionCallback.aidl",
+        "aidl/android/hardware/camera2/ICameraInjectionSession.aidl",
     ],
     path: "aidl",
 }
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 84d1d93..604dbb8 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -25,7 +25,6 @@
 #include <binder/IMemory.h>
 
 #include <Camera.h>
-#include <ICameraRecordingProxyListener.h>
 #include <android/hardware/ICameraService.h>
 #include <android/hardware/ICamera.h>
 
@@ -72,66 +71,10 @@
 }
 
 sp<Camera> Camera::connect(int cameraId, const String16& clientPackageName,
-        int clientUid, int clientPid)
+        int clientUid, int clientPid, int targetSdkVersion)
 {
-    return CameraBaseT::connect(cameraId, clientPackageName, clientUid, clientPid);
-}
-
-status_t Camera::connectLegacy(int cameraId, int halVersion,
-        const String16& clientPackageName,
-        int clientUid,
-        sp<Camera>& camera)
-{
-    ALOGV("%s: connect legacy camera device", __FUNCTION__);
-    sp<Camera> c = new Camera(cameraId);
-    sp<::android::hardware::ICameraClient> cl = c;
-    status_t status = NO_ERROR;
-    const sp<::android::hardware::ICameraService>& cs = CameraBaseT::getCameraService();
-
-    binder::Status ret;
-    if (cs != nullptr) {
-        ret = cs.get()->connectLegacy(cl, cameraId, halVersion, clientPackageName,
-                clientUid, /*out*/&(c->mCamera));
-    }
-    if (ret.isOk() && c->mCamera != nullptr) {
-        IInterface::asBinder(c->mCamera)->linkToDeath(c);
-        c->mStatus = NO_ERROR;
-        camera = c;
-    } else {
-        switch(ret.serviceSpecificErrorCode()) {
-            case hardware::ICameraService::ERROR_DISCONNECTED:
-                status = -ENODEV;
-                break;
-            case hardware::ICameraService::ERROR_CAMERA_IN_USE:
-                status = -EBUSY;
-                break;
-            case hardware::ICameraService::ERROR_INVALID_OPERATION:
-                status = -EINVAL;
-                break;
-            case hardware::ICameraService::ERROR_MAX_CAMERAS_IN_USE:
-                status = -EUSERS;
-                break;
-            case hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT:
-                status = BAD_VALUE;
-                break;
-            case hardware::ICameraService::ERROR_DEPRECATED_HAL:
-                status = -EOPNOTSUPP;
-                break;
-            case hardware::ICameraService::ERROR_DISABLED:
-                status = -EACCES;
-                break;
-            case hardware::ICameraService::ERROR_PERMISSION_DENIED:
-                status = PERMISSION_DENIED;
-                break;
-            default:
-                status = -EINVAL;
-                ALOGW("An error occurred while connecting to camera %d: %s", cameraId,
-                        (cs != nullptr) ? "Service not available" : ret.toString8().string());
-                break;
-        }
-        c.clear();
-    }
-    return status;
+    return CameraBaseT::connect(cameraId, clientPackageName, clientUid,
+            clientPid, targetSdkVersion);
 }
 
 status_t Camera::reconnect()
@@ -214,10 +157,6 @@
 void Camera::stopRecording()
 {
     ALOGV("stopRecording");
-    {
-        Mutex::Autolock _l(mLock);
-        mRecordingProxyListener.clear();
-    }
     sp <::android::hardware::ICamera> c = mCamera;
     if (c == 0) return;
     c->stopRecording();
@@ -325,12 +264,6 @@
     mListener = listener;
 }
 
-void Camera::setRecordingProxyListener(const sp<ICameraRecordingProxyListener>& listener)
-{
-    Mutex::Autolock _l(mLock);
-    mRecordingProxyListener = listener;
-}
-
 void Camera::setPreviewCallbackFlags(int flag)
 {
     ALOGV("setPreviewCallbackFlags");
@@ -384,19 +317,6 @@
 // callback from camera service when timestamped frame is ready
 void Camera::dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr)
 {
-    // If recording proxy listener is registered, forward the frame and return.
-    // The other listener (mListener) is ignored because the receiver needs to
-    // call releaseRecordingFrame.
-    sp<ICameraRecordingProxyListener> proxylistener;
-    {
-        Mutex::Autolock _l(mLock);
-        proxylistener = mRecordingProxyListener;
-    }
-    if (proxylistener != NULL) {
-        proxylistener->dataCallbackTimestamp(timestamp, msgType, dataPtr);
-        return;
-    }
-
     sp<CameraListener> listener;
     {
         Mutex::Autolock _l(mLock);
@@ -413,19 +333,6 @@
 
 void Camera::recordingFrameHandleCallbackTimestamp(nsecs_t timestamp, native_handle_t* handle)
 {
-    // If recording proxy listener is registered, forward the frame and return.
-    // The other listener (mListener) is ignored because the receiver needs to
-    // call releaseRecordingFrameHandle.
-    sp<ICameraRecordingProxyListener> proxylistener;
-    {
-        Mutex::Autolock _l(mLock);
-        proxylistener = mRecordingProxyListener;
-    }
-    if (proxylistener != NULL) {
-        proxylistener->recordingFrameHandleCallbackTimestamp(timestamp, handle);
-        return;
-    }
-
     sp<CameraListener> listener;
     {
         Mutex::Autolock _l(mLock);
@@ -444,19 +351,6 @@
         const std::vector<nsecs_t>& timestamps,
         const std::vector<native_handle_t*>& handles)
 {
-    // If recording proxy listener is registered, forward the frame and return.
-    // The other listener (mListener) is ignored because the receiver needs to
-    // call releaseRecordingFrameHandle.
-    sp<ICameraRecordingProxyListener> proxylistener;
-    {
-        Mutex::Autolock _l(mLock);
-        proxylistener = mRecordingProxyListener;
-    }
-    if (proxylistener != NULL) {
-        proxylistener->recordingFrameHandleCallbackTimestampBatch(timestamps, handles);
-        return;
-    }
-
     sp<CameraListener> listener;
     {
         Mutex::Autolock _l(mLock);
@@ -476,10 +370,9 @@
     return new RecordingProxy(this);
 }
 
-status_t Camera::RecordingProxy::startRecording(const sp<ICameraRecordingProxyListener>& listener)
+status_t Camera::RecordingProxy::startRecording()
 {
     ALOGV("RecordingProxy::startRecording");
-    mCamera->setRecordingProxyListener(listener);
     mCamera->reconnect();
     return mCamera->startRecording();
 }
@@ -490,23 +383,6 @@
     mCamera->stopRecording();
 }
 
-void Camera::RecordingProxy::releaseRecordingFrame(const sp<IMemory>& mem)
-{
-    ALOGV("RecordingProxy::releaseRecordingFrame");
-    mCamera->releaseRecordingFrame(mem);
-}
-
-void Camera::RecordingProxy::releaseRecordingFrameHandle(native_handle_t* handle) {
-    ALOGV("RecordingProxy::releaseRecordingFrameHandle");
-    mCamera->releaseRecordingFrameHandle(handle);
-}
-
-void Camera::RecordingProxy::releaseRecordingFrameHandleBatch(
-        const std::vector<native_handle_t*>& handles) {
-    ALOGV("RecordingProxy::releaseRecordingFrameHandleBatch");
-    mCamera->releaseRecordingFrameHandleBatch(handles);
-}
-
 Camera::RecordingProxy::RecordingProxy(const sp<Camera>& camera)
 {
     mCamera = camera;
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index aecb70a..03439fd 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -29,6 +29,7 @@
 #include <binder/IMemory.h>
 
 #include <camera/CameraBase.h>
+#include <camera/CameraUtils.h>
 
 // needed to instantiate
 #include <camera/Camera.h>
@@ -124,9 +125,7 @@
 {
     Mutex::Autolock _l(gLock);
     if (gCameraService.get() == 0) {
-        char value[PROPERTY_VALUE_MAX];
-        property_get("config.disable_cameraservice", value, "0");
-        if (strncmp(value, "0", 2) != 0 && strncasecmp(value, "false", 6) != 0) {
+        if (CameraUtils::isCameraServiceDisabled()) {
             return gCameraService;
         }
 
@@ -153,7 +152,7 @@
 template <typename TCam, typename TCamTraits>
 sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
                                                const String16& clientPackageName,
-                                               int clientUid, int clientPid)
+                                               int clientUid, int clientPid, int targetSdkVersion)
 {
     ALOGV("%s: connect", __FUNCTION__);
     sp<TCam> c = new TCam(cameraId);
@@ -164,7 +163,7 @@
     if (cs != nullptr) {
         TCamConnectService fnConnectService = TCamTraits::fnConnectService;
         ret = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid,
-                                               clientPid, /*out*/ &c->mCamera);
+                                               clientPid, targetSdkVersion, /*out*/ &c->mCamera);
     }
     if (ret.isOk() && c->mCamera != nullptr) {
         IInterface::asBinder(c->mCamera)->linkToDeath(c);
diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp
index 135384a..a4ae71b 100644
--- a/camera/CameraMetadata.cpp
+++ b/camera/CameraMetadata.cpp
@@ -22,6 +22,7 @@
 
 #include <binder/Parcel.h>
 #include <camera/CameraMetadata.h>
+#include <camera_metadata_hidden.h>
 
 namespace android {
 
@@ -169,6 +170,11 @@
     return entryCount() == 0;
 }
 
+size_t CameraMetadata::bufferSize() const {
+    return (mBuffer == NULL) ? 0 :
+            get_camera_metadata_size(mBuffer);
+}
+
 status_t CameraMetadata::sort() {
     if (mLocked) {
         ALOGE("%s: CameraMetadata is locked", __FUNCTION__);
@@ -522,6 +528,8 @@
             mBuffer = allocate_camera_metadata(newEntryCount,
                     newDataCount);
             if (mBuffer == NULL) {
+                // Maintain old buffer to avoid potential memory leak.
+                mBuffer = oldBuffer;
                 ALOGE("%s: Can't allocate larger metadata buffer", __FUNCTION__);
                 return NO_MEMORY;
             }
@@ -872,5 +880,8 @@
     return OK;
 }
 
+metadata_vendor_id_t CameraMetadata::getVendorId() {
+    return get_camera_metadata_vendor_id(mBuffer);
+}
 
 }; // namespace android
diff --git a/camera/CameraParameters.cpp b/camera/CameraParameters.cpp
index 68969cf..e95c91c 100644
--- a/camera/CameraParameters.cpp
+++ b/camera/CameraParameters.cpp
@@ -20,6 +20,7 @@
 
 #include <string.h>
 #include <stdlib.h>
+#include <unistd.h>
 #include <camera/CameraParameters.h>
 #include <system/graphics.h>
 
diff --git a/camera/CameraParameters2.cpp b/camera/CameraParameters2.cpp
index c29233c..a1cf355 100644
--- a/camera/CameraParameters2.cpp
+++ b/camera/CameraParameters2.cpp
@@ -21,6 +21,7 @@
 
 #include <string.h>
 #include <stdlib.h>
+#include <unistd.h>
 #include <camera/CameraParameters2.h>
 
 namespace android {
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
new file mode 100644
index 0000000..28e037f
--- /dev/null
+++ b/camera/CameraSessionStats.cpp
@@ -0,0 +1,428 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "CameraSessionStats"
+#include <utils/Log.h>
+#include <utils/String16.h>
+
+#include <camera/CameraSessionStats.h>
+
+#include <binder/Parcel.h>
+
+namespace android {
+namespace hardware {
+
+status_t CameraStreamStats::readFromParcel(const android::Parcel* parcel) {
+    if (parcel == NULL) {
+        ALOGE("%s: Null parcel", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    status_t err = OK;
+
+    int width = 0;
+    if ((err = parcel->readInt32(&width)) != OK) {
+        ALOGE("%s: Failed to read width from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int height = 0;
+    if ((err = parcel->readInt32(&height)) != OK) {
+        ALOGE("%s: Failed to read height from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int format = 0;
+    if ((err = parcel->readInt32(&format)) != OK) {
+        ALOGE("%s: Failed to read format from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int dataSpace = 0;
+    if ((err = parcel->readInt32(&dataSpace)) != OK) {
+        ALOGE("%s: Failed to read dataSpace from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int64_t usage = 0;
+    if ((err = parcel->readInt64(&usage)) != OK) {
+        ALOGE("%s: Failed to read usage from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int64_t requestCount = 0;
+    if ((err = parcel->readInt64(&requestCount)) != OK) {
+        ALOGE("%s: Failed to read request count from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int64_t errorCount = 0;
+    if ((err = parcel->readInt64(&errorCount)) != OK) {
+        ALOGE("%s: Failed to read error count from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int startLatencyMs = 0;
+    if ((err = parcel->readInt32(&startLatencyMs)) != OK) {
+        ALOGE("%s: Failed to read start latency from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int maxHalBuffers = 0;
+    if ((err = parcel->readInt32(&maxHalBuffers)) != OK) {
+        ALOGE("%s: Failed to read max Hal buffers from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int maxAppBuffers = 0;
+    if ((err = parcel->readInt32(&maxAppBuffers)) != OK) {
+        ALOGE("%s: Failed to read max app buffers from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int histogramType = HISTOGRAM_TYPE_UNKNOWN;
+    if ((err = parcel->readInt32(&histogramType)) != OK) {
+        ALOGE("%s: Failed to read histogram type from parcel", __FUNCTION__);
+        return err;
+    }
+
+    std::vector<float> histogramBins;
+    if ((err = parcel->readFloatVector(&histogramBins)) != OK) {
+        ALOGE("%s: Failed to read histogram bins from parcel", __FUNCTION__);
+        return err;
+    }
+
+    std::vector<int64_t> histogramCounts;
+    if ((err = parcel->readInt64Vector(&histogramCounts)) != OK) {
+        ALOGE("%s: Failed to read histogram counts from parcel", __FUNCTION__);
+        return err;
+    }
+
+    mWidth = width;
+    mHeight = height;
+    mFormat = format;
+    mDataSpace = dataSpace;
+    mUsage = usage;
+    mRequestCount = requestCount;
+    mErrorCount = errorCount;
+    mStartLatencyMs = startLatencyMs;
+    mMaxHalBuffers = maxHalBuffers;
+    mMaxAppBuffers = maxAppBuffers;
+    mHistogramType = histogramType;
+    mHistogramBins = std::move(histogramBins);
+    mHistogramCounts = std::move(histogramCounts);
+
+    return OK;
+}
+
+status_t CameraStreamStats::writeToParcel(android::Parcel* parcel) const {
+    if (parcel == NULL) {
+        ALOGE("%s: Null parcel", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    status_t err = OK;
+
+    if ((err = parcel->writeInt32(mWidth)) != OK) {
+        ALOGE("%s: Failed to write stream width!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt32(mHeight)) != OK) {
+        ALOGE("%s: Failed to write stream height!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt32(mFormat)) != OK) {
+        ALOGE("%s: Failed to write stream format!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt32(mDataSpace)) != OK) {
+        ALOGE("%s: Failed to write stream dataSpace!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt64(mUsage)) != OK) {
+        ALOGE("%s: Failed to write stream usage!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt64(mRequestCount)) != OK) {
+        ALOGE("%s: Failed to write stream request count!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt64(mErrorCount)) != OK) {
+        ALOGE("%s: Failed to write stream error count!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt32(mStartLatencyMs)) != OK) {
+        ALOGE("%s: Failed to write stream start latency!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt32(mMaxHalBuffers)) != OK) {
+        ALOGE("%s: Failed to write max hal buffers", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt32(mMaxAppBuffers)) != OK) {
+        ALOGE("%s: Failed to write max app buffers", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt32(mHistogramType)) != OK) {
+        ALOGE("%s: Failed to write histogram type", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeFloatVector(mHistogramBins)) != OK) {
+        ALOGE("%s: Failed to write histogram bins!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt64Vector(mHistogramCounts)) != OK) {
+        ALOGE("%s: Failed to write histogram counts!", __FUNCTION__);
+        return err;
+    }
+
+    return OK;
+}
+
+const int CameraSessionStats::CAMERA_STATE_OPEN = 0;
+const int CameraSessionStats::CAMERA_STATE_ACTIVE = 1;
+const int CameraSessionStats::CAMERA_STATE_IDLE = 2;
+const int CameraSessionStats::CAMERA_STATE_CLOSED = 3;
+
+const int CameraSessionStats::CAMERA_FACING_BACK = 0;
+const int CameraSessionStats::CAMERA_FACING_FRONT = 1;
+const int CameraSessionStats::CAMERA_FACING_EXTERNAL = 2;
+
+const int CameraSessionStats::CAMERA_API_LEVEL_1 = 1;
+const int CameraSessionStats::CAMERA_API_LEVEL_2 = 2;
+
+CameraSessionStats::CameraSessionStats() :
+        mFacing(CAMERA_FACING_BACK),
+        mNewCameraState(CAMERA_STATE_CLOSED),
+        mApiLevel(0),
+        mIsNdk(false),
+        mLatencyMs(-1),
+        mSessionType(0),
+        mInternalReconfigure(0),
+        mRequestCount(0),
+        mResultErrorCount(0),
+        mDeviceError(false) {}
+
+CameraSessionStats::CameraSessionStats(const String16& cameraId,
+        int facing, int newCameraState, const String16& clientName,
+        int apiLevel, bool isNdk, int32_t latencyMs) :
+                mCameraId(cameraId),
+                mFacing(facing),
+                mNewCameraState(newCameraState),
+                mClientName(clientName),
+                mApiLevel(apiLevel),
+                mIsNdk(isNdk),
+                mLatencyMs(latencyMs),
+                mSessionType(0),
+                mInternalReconfigure(0),
+                mRequestCount(0),
+                mResultErrorCount(0),
+                mDeviceError(0) {}
+
+status_t CameraSessionStats::readFromParcel(const android::Parcel* parcel) {
+    if (parcel == NULL) {
+        ALOGE("%s: Null parcel", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    status_t err = OK;
+
+    String16 id;
+    if ((err = parcel->readString16(&id)) != OK) {
+        ALOGE("%s: Failed to read camera id!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    int facing = 0;
+    if ((err = parcel->readInt32(&facing)) != OK) {
+        ALOGE("%s: Failed to read camera facing from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int32_t newCameraState;
+    if ((err = parcel->readInt32(&newCameraState)) != OK) {
+        ALOGE("%s: Failed to read new camera state from parcel", __FUNCTION__);
+        return err;
+    }
+
+    String16 clientName;
+    if ((err = parcel->readString16(&clientName)) != OK) {
+        ALOGE("%s: Failed to read client name!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    int32_t apiLevel;
+    if ((err = parcel->readInt32(&apiLevel)) != OK) {
+        ALOGE("%s: Failed to read api level from parcel", __FUNCTION__);
+        return err;
+    }
+
+    bool isNdk;
+    if ((err = parcel->readBool(&isNdk)) != OK) {
+        ALOGE("%s: Failed to read isNdk flag from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int32_t latencyMs;
+    if ((err = parcel->readInt32(&latencyMs)) != OK) {
+        ALOGE("%s: Failed to read latencyMs from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int32_t sessionType;
+    if ((err = parcel->readInt32(&sessionType)) != OK) {
+        ALOGE("%s: Failed to read session type from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int32_t internalReconfigure;
+    if ((err = parcel->readInt32(&internalReconfigure)) != OK) {
+        ALOGE("%s: Failed to read internal reconfigure count from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int64_t requestCount;
+    if ((err = parcel->readInt64(&requestCount)) != OK) {
+        ALOGE("%s: Failed to read request count from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int64_t resultErrorCount;
+    if ((err = parcel->readInt64(&resultErrorCount)) != OK) {
+        ALOGE("%s: Failed to read result error count from parcel", __FUNCTION__);
+        return err;
+    }
+
+    bool deviceError;
+    if ((err = parcel->readBool(&deviceError)) != OK) {
+        ALOGE("%s: Failed to read device error flag from parcel", __FUNCTION__);
+        return err;
+    }
+
+    std::vector<CameraStreamStats> streamStats;
+    if ((err = parcel->readParcelableVector(&streamStats)) != OK) {
+        ALOGE("%s: Failed to read stream state from parcel", __FUNCTION__);
+        return err;
+    }
+
+    mCameraId = id;
+    mFacing = facing;
+    mNewCameraState = newCameraState;
+    mClientName = clientName;
+    mApiLevel = apiLevel;
+    mIsNdk = isNdk;
+    mLatencyMs = latencyMs;
+    mSessionType = sessionType;
+    mInternalReconfigure = internalReconfigure;
+    mRequestCount = requestCount;
+    mResultErrorCount = resultErrorCount;
+    mDeviceError = deviceError;
+    mStreamStats = std::move(streamStats);
+
+    return OK;
+}
+
+status_t CameraSessionStats::writeToParcel(android::Parcel* parcel) const {
+    if (parcel == NULL) {
+        ALOGE("%s: Null parcel", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    status_t err = OK;
+
+    if ((err = parcel->writeString16(mCameraId)) != OK) {
+        ALOGE("%s: Failed to write camera id!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt32(mFacing)) != OK) {
+        ALOGE("%s: Failed to write camera facing!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt32(mNewCameraState)) != OK) {
+        ALOGE("%s: Failed to write new camera state!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeString16(mClientName)) != OK) {
+        ALOGE("%s: Failed to write client name!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt32(mApiLevel)) != OK) {
+        ALOGE("%s: Failed to write api level!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeBool(mIsNdk)) != OK) {
+        ALOGE("%s: Failed to write isNdk flag!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt32(mLatencyMs)) != OK) {
+        ALOGE("%s: Failed to write latency in Ms!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt32(mSessionType)) != OK) {
+        ALOGE("%s: Failed to write session type!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt32(mInternalReconfigure)) != OK) {
+        ALOGE("%s: Failed to write internal reconfigure count!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt64(mRequestCount)) != OK) {
+        ALOGE("%s: Failed to write request count!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt64(mResultErrorCount)) != OK) {
+        ALOGE("%s: Failed to write result error count!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeBool(mDeviceError)) != OK) {
+        ALOGE("%s: Failed to write device error flag!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeParcelableVector(mStreamStats)) != OK) {
+        ALOGE("%s: Failed to write stream states!", __FUNCTION__);
+        return err;
+    }
+
+    return OK;
+}
+
+} // namespace hardware
+} // namesmpace android
diff --git a/camera/CameraUtils.cpp b/camera/CameraUtils.cpp
index 67fc116..af3c492 100644
--- a/camera/CameraUtils.cpp
+++ b/camera/CameraUtils.cpp
@@ -20,6 +20,7 @@
 #include <camera/CameraUtils.h>
 #include <media/hardware/HardwareAPI.h>
 
+#include <android-base/properties.h>
 #include <system/window.h>
 #include <system/graphics.h>
 
@@ -27,6 +28,8 @@
 
 namespace android {
 
+const char *kCameraServiceDisabledProperty = "config.disable_cameraservice";
+
 status_t CameraUtils::getRotationTransform(const CameraMetadata& staticInfo,
                 /*out*/int32_t* transform) {
     ALOGV("%s", __FUNCTION__);
@@ -122,4 +125,8 @@
     return OK;
 }
 
+bool CameraUtils::isCameraServiceDisabled() {
+    return base::GetBoolProperty(kCameraServiceDisabledProperty, false);
+}
+
 } /* namespace android */
diff --git a/camera/CaptureResult.cpp b/camera/CaptureResult.cpp
index 755051c..be47898 100644
--- a/camera/CaptureResult.cpp
+++ b/camera/CaptureResult.cpp
@@ -89,7 +89,7 @@
 status_t PhysicalCaptureResultInfo::readFromParcel(const android::Parcel* parcel) {
     status_t res;
 
-    mPhysicalCameraId.remove(mPhysicalCameraId.size());
+    mPhysicalCameraId.setTo(u"");
     mPhysicalCameraMetadata.clear();
 
     if ((res = parcel->readString16(&mPhysicalCameraId)) != OK) {
diff --git a/camera/ICameraClient.cpp b/camera/ICameraClient.cpp
index c02c81b..bef2ea0 100644
--- a/camera/ICameraClient.cpp
+++ b/camera/ICameraClient.cpp
@@ -142,7 +142,8 @@
             camera_frame_metadata_t metadata;
             if (data.dataAvail() > 0) {
                 metadata.number_of_faces = data.readInt32();
-                if (metadata.number_of_faces <= 0 ||
+                // Zero faces is a valid case, to notify clients that no faces are now visible
+                if (metadata.number_of_faces < 0 ||
                         metadata.number_of_faces > (int32_t)(INT32_MAX / sizeof(camera_face_t))) {
                     ALOGE("%s: Too large face count: %d", __FUNCTION__, metadata.number_of_faces);
                     return BAD_VALUE;
diff --git a/camera/ICameraRecordingProxy.cpp b/camera/ICameraRecordingProxy.cpp
index bd6af75..97523a5 100644
--- a/camera/ICameraRecordingProxy.cpp
+++ b/camera/ICameraRecordingProxy.cpp
@@ -18,7 +18,6 @@
 #define LOG_TAG "ICameraRecordingProxy"
 #include <camera/CameraUtils.h>
 #include <camera/ICameraRecordingProxy.h>
-#include <camera/ICameraRecordingProxyListener.h>
 #include <binder/IMemory.h>
 #include <binder/Parcel.h>
 #include <media/hardware/HardwareAPI.h>
@@ -29,10 +28,7 @@
 
 enum {
     START_RECORDING = IBinder::FIRST_CALL_TRANSACTION,
-    STOP_RECORDING,
-    RELEASE_RECORDING_FRAME,
-    RELEASE_RECORDING_FRAME_HANDLE,
-    RELEASE_RECORDING_FRAME_HANDLE_BATCH,
+    STOP_RECORDING
 };
 
 
@@ -44,12 +40,11 @@
     {
     }
 
-    status_t startRecording(const sp<ICameraRecordingProxyListener>& listener)
+    status_t startRecording()
     {
         ALOGV("startRecording");
         Parcel data, reply;
         data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
-        data.writeStrongBinder(IInterface::asBinder(listener));
         remote()->transact(START_RECORDING, data, &reply);
         return reply.readInt32();
     }
@@ -61,46 +56,6 @@
         data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
         remote()->transact(STOP_RECORDING, data, &reply);
     }
-
-    void releaseRecordingFrame(const sp<IMemory>& mem)
-    {
-        ALOGV("releaseRecordingFrame");
-        Parcel data, reply;
-        data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
-        data.writeStrongBinder(IInterface::asBinder(mem));
-        remote()->transact(RELEASE_RECORDING_FRAME, data, &reply);
-    }
-
-    void releaseRecordingFrameHandle(native_handle_t *handle) {
-        ALOGV("releaseRecordingFrameHandle");
-        Parcel data, reply;
-        data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
-        data.writeNativeHandle(handle);
-
-        remote()->transact(RELEASE_RECORDING_FRAME_HANDLE, data, &reply);
-
-        // Close the native handle because camera received a dup copy.
-        native_handle_close(handle);
-        native_handle_delete(handle);
-    }
-
-    void releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles) {
-        ALOGV("releaseRecordingFrameHandleBatch");
-        Parcel data, reply;
-        data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
-        uint32_t n = handles.size();
-        data.writeUint32(n);
-        for (auto& handle : handles) {
-            data.writeNativeHandle(handle);
-        }
-        remote()->transact(RELEASE_RECORDING_FRAME_HANDLE_BATCH, data, &reply);
-
-        // Close the native handle because camera received a dup copy.
-        for (auto& handle : handles) {
-            native_handle_close(handle);
-            native_handle_delete(handle);
-        }
-    }
 };
 
 IMPLEMENT_META_INTERFACE(CameraRecordingProxy, "android.hardware.ICameraRecordingProxy");
@@ -114,9 +69,7 @@
         case START_RECORDING: {
             ALOGV("START_RECORDING");
             CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
-            sp<ICameraRecordingProxyListener> listener =
-                interface_cast<ICameraRecordingProxyListener>(data.readStrongBinder());
-            reply->writeInt32(startRecording(listener));
+            reply->writeInt32(startRecording());
             return NO_ERROR;
         } break;
         case STOP_RECORDING: {
@@ -125,46 +78,6 @@
             stopRecording();
             return NO_ERROR;
         } break;
-        case RELEASE_RECORDING_FRAME: {
-            ALOGV("RELEASE_RECORDING_FRAME");
-            CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
-            sp<IMemory> mem = interface_cast<IMemory>(data.readStrongBinder());
-            releaseRecordingFrame(mem);
-            return NO_ERROR;
-        } break;
-        case RELEASE_RECORDING_FRAME_HANDLE: {
-            ALOGV("RELEASE_RECORDING_FRAME_HANDLE");
-            CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
-
-            // releaseRecordingFrameHandle will be responsble to close the native handle.
-            releaseRecordingFrameHandle(data.readNativeHandle());
-            return NO_ERROR;
-        } break;
-        case RELEASE_RECORDING_FRAME_HANDLE_BATCH: {
-            ALOGV("RELEASE_RECORDING_FRAME_HANDLE_BATCH");
-            CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
-            uint32_t n = 0;
-            status_t res = data.readUint32(&n);
-            if (res != OK) {
-                ALOGE("%s: Failed to read batch size: %s (%d)", __FUNCTION__, strerror(-res), res);
-                return BAD_VALUE;
-            }
-            std::vector<native_handle_t*> handles;
-            handles.reserve(n);
-            for (uint32_t i = 0; i < n; i++) {
-                native_handle_t* handle = data.readNativeHandle();
-                if (handle == nullptr) {
-                    ALOGE("%s: Received a null native handle at handles[%d]",
-                            __FUNCTION__, i);
-                    return BAD_VALUE;
-                }
-                handles.push_back(handle);
-            }
-
-            // releaseRecordingFrameHandleBatch will be responsble to close the native handle.
-            releaseRecordingFrameHandleBatch(handles);
-            return NO_ERROR;
-        } break;
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
@@ -173,4 +86,3 @@
 // ----------------------------------------------------------------------------
 
 }; // namespace android
-
diff --git a/camera/ICameraRecordingProxyListener.cpp b/camera/ICameraRecordingProxyListener.cpp
deleted file mode 100644
index 66faf8f..0000000
--- a/camera/ICameraRecordingProxyListener.cpp
+++ /dev/null
@@ -1,180 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "ICameraRecordingProxyListener"
-#include <camera/CameraUtils.h>
-#include <camera/ICameraRecordingProxyListener.h>
-#include <binder/IMemory.h>
-#include <binder/Parcel.h>
-#include <media/hardware/HardwareAPI.h>
-#include <utils/Log.h>
-
-namespace android {
-
-enum {
-    DATA_CALLBACK_TIMESTAMP = IBinder::FIRST_CALL_TRANSACTION,
-    RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP,
-    RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH
-};
-
-class BpCameraRecordingProxyListener: public BpInterface<ICameraRecordingProxyListener>
-{
-public:
-    explicit BpCameraRecordingProxyListener(const sp<IBinder>& impl)
-        : BpInterface<ICameraRecordingProxyListener>(impl)
-    {
-    }
-
-    void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& imageData)
-    {
-        ALOGV("dataCallback");
-        Parcel data, reply;
-        data.writeInterfaceToken(ICameraRecordingProxyListener::getInterfaceDescriptor());
-        data.writeInt64(timestamp);
-        data.writeInt32(msgType);
-        data.writeStrongBinder(IInterface::asBinder(imageData));
-        remote()->transact(DATA_CALLBACK_TIMESTAMP, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-
-    void recordingFrameHandleCallbackTimestamp(nsecs_t timestamp, native_handle_t* handle) {
-        ALOGV("recordingFrameHandleCallbackTimestamp");
-        Parcel data, reply;
-        data.writeInterfaceToken(ICameraRecordingProxyListener::getInterfaceDescriptor());
-        data.writeInt64(timestamp);
-        data.writeNativeHandle(handle);
-        remote()->transact(RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP, data, &reply,
-                IBinder::FLAG_ONEWAY);
-
-        // The native handle is dupped in ICameraClient so we need to free it here.
-        native_handle_close(handle);
-        native_handle_delete(handle);
-    }
-
-    void recordingFrameHandleCallbackTimestampBatch(
-            const std::vector<nsecs_t>& timestamps,
-            const std::vector<native_handle_t*>& handles) {
-        ALOGV("recordingFrameHandleCallbackTimestampBatch");
-        Parcel data, reply;
-        data.writeInterfaceToken(ICameraRecordingProxyListener::getInterfaceDescriptor());
-
-        uint32_t n = timestamps.size();
-        if (n != handles.size()) {
-            ALOGE("%s: size of timestamps(%zu) and handles(%zu) mismatch!",
-                    __FUNCTION__, timestamps.size(), handles.size());
-            return;
-        }
-        data.writeUint32(n);
-        for (auto ts : timestamps) {
-            data.writeInt64(ts);
-        }
-        for (auto& handle : handles) {
-            data.writeNativeHandle(handle);
-        }
-        remote()->transact(RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH, data, &reply,
-                IBinder::FLAG_ONEWAY);
-
-        // The native handle is dupped in ICameraClient so we need to free it here.
-        for (auto& handle : handles) {
-            native_handle_close(handle);
-            native_handle_delete(handle);
-        }
-    }
-};
-
-IMPLEMENT_META_INTERFACE(CameraRecordingProxyListener, "android.hardware.ICameraRecordingProxyListener");
-
-// ----------------------------------------------------------------------
-
-status_t BnCameraRecordingProxyListener::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    switch(code) {
-        case DATA_CALLBACK_TIMESTAMP: {
-            ALOGV("DATA_CALLBACK_TIMESTAMP");
-            CHECK_INTERFACE(ICameraRecordingProxyListener, data, reply);
-            nsecs_t timestamp = data.readInt64();
-            int32_t msgType = data.readInt32();
-            sp<IMemory> imageData = interface_cast<IMemory>(data.readStrongBinder());
-            dataCallbackTimestamp(timestamp, msgType, imageData);
-            return NO_ERROR;
-        } break;
-        case RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP: {
-            ALOGV("RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP");
-            CHECK_INTERFACE(ICameraRecordingProxyListener, data, reply);
-            nsecs_t timestamp;
-            status_t res = data.readInt64(&timestamp);
-            if (res != OK) {
-                ALOGE("%s: Failed to read timestamp: %s (%d)", __FUNCTION__, strerror(-res), res);
-                return BAD_VALUE;
-            }
-
-            native_handle_t* handle = data.readNativeHandle();
-            if (handle == nullptr) {
-                ALOGE("%s: Received a null native handle", __FUNCTION__);
-                return BAD_VALUE;
-            }
-            // The native handle will be freed in
-            // BpCameraRecordingProxy::releaseRecordingFrameHandle.
-            recordingFrameHandleCallbackTimestamp(timestamp, handle);
-            return NO_ERROR;
-        } break;
-        case RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH: {
-            ALOGV("RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH");
-            CHECK_INTERFACE(ICameraRecordingProxyListener, data, reply);
-            uint32_t n = 0;
-            status_t res = data.readUint32(&n);
-            if (res != OK) {
-                ALOGE("%s: Failed to read batch size: %s (%d)", __FUNCTION__, strerror(-res), res);
-                return BAD_VALUE;
-            }
-            std::vector<nsecs_t> timestamps;
-            std::vector<native_handle_t*> handles;
-            timestamps.reserve(n);
-            handles.reserve(n);
-            for (uint32_t i = 0; i < n; i++) {
-                nsecs_t t;
-                res = data.readInt64(&t);
-                if (res != OK) {
-                    ALOGE("%s: Failed to read timestamp[%d]: %s (%d)",
-                            __FUNCTION__, i, strerror(-res), res);
-                    return BAD_VALUE;
-                }
-                timestamps.push_back(t);
-            }
-            for (uint32_t i = 0; i < n; i++) {
-                native_handle_t* handle = data.readNativeHandle();
-                if (handle == nullptr) {
-                    ALOGE("%s: Received a null native handle at handles[%d]",
-                            __FUNCTION__, i);
-                    return BAD_VALUE;
-                }
-                handles.push_back(handle);
-            }
-            // The native handle will be freed in
-            // BpCameraRecordingProxy::releaseRecordingFrameHandleBatch.
-            recordingFrameHandleCallbackTimestampBatch(timestamps, handles);
-            return NO_ERROR;
-        } break;
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
-
diff --git a/camera/TEST_MAPPING b/camera/TEST_MAPPING
new file mode 100644
index 0000000..683e183
--- /dev/null
+++ b/camera/TEST_MAPPING
@@ -0,0 +1,11 @@
+{
+  "postsubmit": [
+    {
+      "name": "CtsCameraTestCases"
+    },
+    {
+      "name": "CtsCameraTestCases",
+      "keywords": ["primary-device"]
+    }
+  ]
+}
diff --git a/camera/VendorTagDescriptor.cpp b/camera/VendorTagDescriptor.cpp
index d713d2d..24fa912 100644
--- a/camera/VendorTagDescriptor.cpp
+++ b/camera/VendorTagDescriptor.cpp
@@ -660,6 +660,16 @@
     return sGlobalVendorTagDescriptorCache;
 }
 
+bool VendorTagDescriptorCache::isVendorCachePresent(metadata_vendor_id_t vendorId) {
+    Mutex::Autolock al(sLock);
+    if ((sGlobalVendorTagDescriptorCache.get() != nullptr) &&
+            (sGlobalVendorTagDescriptorCache->getVendorIdsAndTagDescriptors().find(vendorId) !=
+             sGlobalVendorTagDescriptorCache->getVendorIdsAndTagDescriptors().end())) {
+        return true;
+    }
+    return false;
+}
+
 extern "C" {
 
 int vendor_tag_descriptor_get_tag_count(const vendor_tag_ops_t* /*v*/) {
diff --git a/camera/aidl/android/hardware/CameraSessionStats.aidl b/camera/aidl/android/hardware/CameraSessionStats.aidl
new file mode 100644
index 0000000..a8e6774
--- /dev/null
+++ b/camera/aidl/android/hardware/CameraSessionStats.aidl
@@ -0,0 +1,20 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware;
+
+/** @hide */
+parcelable CameraSessionStats cpp_header "camera/CameraSessionStats.h";
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index ac7a35b..78a77d4 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -20,6 +20,8 @@
 import android.hardware.ICameraClient;
 import android.hardware.camera2.ICameraDeviceUser;
 import android.hardware.camera2.ICameraDeviceCallbacks;
+import android.hardware.camera2.ICameraInjectionCallback;
+import android.hardware.camera2.ICameraInjectionSession;
 import android.hardware.camera2.params.VendorTagDescriptor;
 import android.hardware.camera2.params.VendorTagDescriptorCache;
 import android.hardware.camera2.utils.ConcurrentCameraIdCombination;
@@ -69,7 +71,7 @@
 
     /**
      * Default UID/PID values for non-privileged callers of
-     * connect(), connectDevice(), and connectLegacy()
+     * connect() and connectDevice()
      */
     const int USE_CALLING_UID = -1;
     const int USE_CALLING_PID = -1;
@@ -80,7 +82,8 @@
     ICamera connect(ICameraClient client,
             int cameraId,
             String opPackageName,
-            int clientUid, int clientPid);
+            int clientUid, int clientPid,
+            int targetSdkVersion);
 
     /**
      * Open a camera device through the new camera API
@@ -90,21 +93,8 @@
             String cameraId,
             String opPackageName,
             @nullable String featureId,
-            int clientUid);
-
-    /**
-     * halVersion constant for connectLegacy
-     */
-    const int CAMERA_HAL_API_VERSION_UNSPECIFIED = -1;
-
-    /**
-     * Open a camera device in legacy mode, if supported by the camera module HAL.
-     */
-    ICamera connectLegacy(ICameraClient client,
-            int cameraId,
-            int halVersion,
-            String opPackageName,
-            int clientUid);
+            int clientUid, int oomScoreOffset,
+            int targetSdkVersion);
 
     /**
      * Add listener for changes to camera device and flashlight state.
@@ -126,13 +116,15 @@
       * corresponding camera ids.
       *
       * @param sessions the set of camera id and session configuration pairs to be queried.
+      * @param targetSdkVersion the target sdk level of the application calling this function.
       * @return true  - the set of concurrent camera id and stream combinations is supported.
       *         false - the set of concurrent camera id and stream combinations is not supported
       *                 OR the method was called with a set of camera ids not returned by
       *                 getConcurrentCameraIds().
       */
     boolean isConcurrentSessionConfigurationSupported(
-            in CameraIdAndSessionConfiguration[] sessions);
+            in CameraIdAndSessionConfiguration[] sessions,
+            int targetSdkVersion);
 
     /**
      * Remove listener for changes to camera device and flashlight state.
@@ -143,7 +135,7 @@
      * Read the static camera metadata for a camera device.
      * Only supported for device HAL versions >= 3.2
      */
-    CameraMetadataNative getCameraCharacteristics(String cameraId);
+    CameraMetadataNative getCameraCharacteristics(String cameraId, int targetSdkVersion);
 
     /**
      * Read in the vendor tag descriptors from the camera module HAL.
@@ -175,6 +167,9 @@
     boolean supportsCameraApi(String cameraId, int apiVersion);
     // Determines if a cameraId is a hidden physical camera of a logical multi-camera.
     boolean isHiddenPhysicalCamera(String cameraId);
+    // Inject the external camera to replace the internal camera session.
+    ICameraInjectionSession injectCamera(String packageName, String internalCamId,
+            String externalCamId, in ICameraInjectionCallback CameraInjectionCallback);
 
     void setTorchMode(String cameraId, boolean enabled, IBinder clientBinder);
 
@@ -188,6 +183,13 @@
     oneway void notifySystemEvent(int eventId, in int[] args);
 
     /**
+     * Notify the camera service of a display configuration change.
+     *
+     * Callers require the android.permission.CAMERA_SEND_SYSTEM_EVENTS permission.
+     */
+    oneway void notifyDisplayConfigurationChange();
+
+    /**
      * Notify the camera service of a device physical status change. May only be called from
      * a privileged process.
      *
diff --git a/camera/aidl/android/hardware/ICameraServiceProxy.aidl b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
index 7575948..bbb0289 100644
--- a/camera/aidl/android/hardware/ICameraServiceProxy.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
@@ -16,11 +16,11 @@
 
 package android.hardware;
 
+import android.hardware.CameraSessionStats;
+
 /**
  * Binder interface for the camera service proxy running in system_server.
  *
- * Keep in sync with frameworks/av/include/camera/ICameraServiceProxy.h
- *
  * @hide
  */
 interface ICameraServiceProxy
@@ -30,30 +30,15 @@
      */
     oneway void pingForUserUpdate();
 
-    /**
-     * Values for notifyCameraState newCameraState
-     */
-    const int CAMERA_STATE_OPEN = 0;
-    const int CAMERA_STATE_ACTIVE = 1;
-    const int CAMERA_STATE_IDLE = 2;
-    const int CAMERA_STATE_CLOSED = 3;
-
-    /**
-     * Values for notifyCameraState facing
-     */
-    const int CAMERA_FACING_BACK = 0;
-    const int CAMERA_FACING_FRONT = 1;
-    const int CAMERA_FACING_EXTERNAL = 2;
-
-    /**
-     * Values for notifyCameraState api level
-     */
-     const int CAMERA_API_LEVEL_1 = 1;
-     const int CAMERA_API_LEVEL_2 = 2;
 
     /**
      * Update the status of a camera device.
      */
-    oneway void notifyCameraState(String cameraId, int facing, int newCameraState,
-            String clientName, int apiLevel);
+    oneway void notifyCameraState(in CameraSessionStats cameraSessionStats);
+
+    /**
+     * Reports whether the top activity needs a rotate and crop override.
+     */
+    boolean isRotateAndCropOverrideNeeded(String packageName, int sensorOrientation,
+            int lensFacing);
 }
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index b183ccc..8e1fcc0 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -83,9 +83,11 @@
      * @param operatingMode The kind of session to create; either NORMAL_MODE or
      *     CONSTRAINED_HIGH_SPEED_MODE. Must be a non-negative value.
      * @param sessionParams Session wide camera parameters
+     * @param startTimeMs The timestamp of session creation start, measured by
+     *                    SystemClock.uptimeMillis.
      * @return a list of stream ids that can be used in offline mode via "switchToOffline"
      */
-    int[] endConfigure(int operatingMode, in CameraMetadataNative sessionParams);
+    int[] endConfigure(int operatingMode, in CameraMetadataNative sessionParams, long startTimeMs);
 
     /**
       * Check whether a particular session configuration has camera device
@@ -117,10 +119,11 @@
      * @param width Width of the input buffers
      * @param height Height of the input buffers
      * @param format Format of the input buffers. One of HAL_PIXEL_FORMAT_*.
+     * @param isMultiResolution Whether the input stream supports variable resolution image.
      *
      * @return new stream ID
      */
-    int createInputStream(int width, int height, int format);
+    int createInputStream(int width, int height, int format, boolean isMultiResolution);
 
     /**
      * Get the surface of the input stream.
diff --git a/camera/aidl/android/hardware/camera2/ICameraInjectionCallback.aidl b/camera/aidl/android/hardware/camera2/ICameraInjectionCallback.aidl
new file mode 100644
index 0000000..9791352
--- /dev/null
+++ b/camera/aidl/android/hardware/camera2/ICameraInjectionCallback.aidl
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2;
+
+import android.hardware.camera2.ICameraInjectionSession;
+
+/**
+ * Binder interface used to call back the error state injected by the external camera,
+ * and camera service can be switched back to internal camera when binder signals process death.
+ *
+ * @hide
+ */
+interface ICameraInjectionCallback
+{
+    // Error codes for onInjectionError
+    // To indicate all invalid error codes
+    const int ERROR_INJECTION_INVALID_ERROR = -1;
+    // To indicate the camera injection session has encountered a fatal error, such as injection
+    // init failure, configure failure or injecting failure etc.
+    const int ERROR_INJECTION_SESSION = 0;
+    // To indicate the camera service has encountered a fatal error.
+    const int ERROR_INJECTION_SERVICE = 1;
+    // To indicate the injection camera does not support certain camera functions, such as
+    // unsupport stream format, no capture/record function or no multi-camera function etc.
+    // When this error occurs, the default processing is still in the inject state, and the app is
+    // notified to display an error message and a black screen.
+    const int ERROR_INJECTION_UNSUPPORTED = 2;
+
+    oneway void onInjectionError(int errorCode);
+}
diff --git a/camera/aidl/android/hardware/camera2/ICameraInjectionSession.aidl b/camera/aidl/android/hardware/camera2/ICameraInjectionSession.aidl
new file mode 100644
index 0000000..c31c30b
--- /dev/null
+++ b/camera/aidl/android/hardware/camera2/ICameraInjectionSession.aidl
@@ -0,0 +1,23 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2;
+
+/** @hide */
+interface ICameraInjectionSession
+{
+    oneway void stopInjection();
+}
diff --git a/camera/camera2/CaptureRequest.cpp b/camera/camera2/CaptureRequest.cpp
index 1843ec4..ebc09d7 100644
--- a/camera/camera2/CaptureRequest.cpp
+++ b/camera/camera2/CaptureRequest.cpp
@@ -94,12 +94,12 @@
     // Do not distinguish null arrays from 0-sized arrays.
     for (int32_t i = 0; i < size; ++i) {
         // Parcel.writeParcelableArray
-        size_t len;
-        const char16_t* className = parcel->readString16Inplace(&len);
+        std::optional<std::string> className;
+        parcel->readUtf8FromUtf16(&className);
         ALOGV("%s: Read surface class = %s", __FUNCTION__,
-              className != NULL ? String8(className).string() : "<null>");
+              className.value_or("<null>").c_str());
 
-        if (className == NULL) {
+        if (className == std::nullopt) {
             continue;
         }
 
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 4e9b27d..2bccd87 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -68,6 +68,14 @@
     return mPhysicalCameraId;
 }
 
+bool OutputConfiguration::isMultiResolution() const {
+    return mIsMultiResolution;
+}
+
+const std::vector<int32_t> &OutputConfiguration::getSensorPixelModesUsed() const {
+    return mSensorPixelModesUsed;
+}
+
 OutputConfiguration::OutputConfiguration() :
         mRotation(INVALID_ROTATION),
         mSurfaceSetID(INVALID_SET_ID),
@@ -75,7 +83,8 @@
         mWidth(0),
         mHeight(0),
         mIsDeferred(false),
-        mIsShared(false) {
+        mIsShared(false),
+        mIsMultiResolution(false) {
 }
 
 OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
@@ -145,6 +154,17 @@
 
     parcel->readString16(&mPhysicalCameraId);
 
+    int isMultiResolution = 0;
+    if ((err = parcel->readInt32(&isMultiResolution)) != OK) {
+        ALOGE("%s: Failed to read surface isMultiResolution flag from parcel", __FUNCTION__);
+        return err;
+    }
+
+    std::vector<int32_t> sensorPixelModesUsed;
+    if ((err = parcel->readInt32Vector(&sensorPixelModesUsed)) != OK) {
+        ALOGE("%s: Failed to read sensor pixel mode(s) from parcel", __FUNCTION__);
+        return err;
+    }
     mRotation = rotation;
     mSurfaceSetID = setID;
     mSurfaceType = surfaceType;
@@ -152,6 +172,7 @@
     mHeight = height;
     mIsDeferred = isDeferred != 0;
     mIsShared = isShared != 0;
+    mIsMultiResolution = isMultiResolution != 0;
     for (auto& surface : surfaceShims) {
         ALOGV("%s: OutputConfiguration: %p, name %s", __FUNCTION__,
                 surface.graphicBufferProducer.get(),
@@ -159,9 +180,11 @@
         mGbps.push_back(surface.graphicBufferProducer);
     }
 
+    mSensorPixelModesUsed = std::move(sensorPixelModesUsed);
+
     ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
-          " physicalCameraId = %s", __FUNCTION__, mRotation, mSurfaceSetID,
-          mSurfaceType, String8(mPhysicalCameraId).string());
+          " physicalCameraId = %s, isMultiResolution = %d", __FUNCTION__, mRotation,
+          mSurfaceSetID, mSurfaceType, String8(mPhysicalCameraId).string(), mIsMultiResolution);
 
     return err;
 }
@@ -175,6 +198,7 @@
     mIsDeferred = false;
     mIsShared = isShared;
     mPhysicalCameraId = physicalId;
+    mIsMultiResolution = false;
 }
 
 OutputConfiguration::OutputConfiguration(
@@ -183,7 +207,7 @@
     int width, int height, bool isShared)
   : mGbps(gbps), mRotation(rotation), mSurfaceSetID(surfaceSetID), mSurfaceType(surfaceType),
     mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared),
-    mPhysicalCameraId(physicalCameraId) { }
+    mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false) { }
 
 status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
 
@@ -224,24 +248,54 @@
     err = parcel->writeString16(mPhysicalCameraId);
     if (err != OK) return err;
 
+    err = parcel->writeInt32(mIsMultiResolution ? 1 : 0);
+    if (err != OK) return err;
+
+    err = parcel->writeParcelableVector(mSensorPixelModesUsed);
+    if (err != OK) return err;
+
     return OK;
 }
 
+template <typename T>
+static bool simpleVectorsEqual(T first, T second) {
+    if (first.size() != second.size()) {
+        return false;
+    }
+
+    for (size_t i = 0; i < first.size(); i++) {
+        if (first[i] != second[i]) {
+            return false;
+        }
+    }
+    return true;
+}
+
 bool OutputConfiguration::gbpsEqual(const OutputConfiguration& other) const {
     const std::vector<sp<IGraphicBufferProducer> >& otherGbps =
             other.getGraphicBufferProducers();
+    return simpleVectorsEqual(otherGbps, mGbps);
+}
 
-    if (mGbps.size() != otherGbps.size()) {
-        return false;
+bool OutputConfiguration::sensorPixelModesUsedEqual(const OutputConfiguration& other) const {
+    const std::vector<int32_t>& othersensorPixelModesUsed = other.getSensorPixelModesUsed();
+    return simpleVectorsEqual(othersensorPixelModesUsed, mSensorPixelModesUsed);
+}
+
+bool OutputConfiguration::sensorPixelModesUsedLessThan(const OutputConfiguration& other) const {
+    const std::vector<int32_t>& spms = other.getSensorPixelModesUsed();
+
+    if (mSensorPixelModesUsed.size() !=  spms.size()) {
+        return mSensorPixelModesUsed.size() < spms.size();
     }
 
-    for (size_t i = 0; i < mGbps.size(); i++) {
-        if (mGbps[i] != otherGbps[i]) {
-            return false;
+    for (size_t i = 0; i < spms.size(); i++) {
+        if (mSensorPixelModesUsed[i] != spms[i]) {
+            return mSensorPixelModesUsed[i] < spms[i];
         }
     }
 
-    return true;
+    return false;
 }
 
 bool OutputConfiguration::gbpsLessThan(const OutputConfiguration& other) const {
diff --git a/camera/camera2/SessionConfiguration.cpp b/camera/camera2/SessionConfiguration.cpp
index a431a33..7cf6087 100644
--- a/camera/camera2/SessionConfiguration.cpp
+++ b/camera/camera2/SessionConfiguration.cpp
@@ -55,6 +55,12 @@
         return err;
     }
 
+    bool inputIsMultiResolution = false;
+    if ((err = parcel->readBool(&inputIsMultiResolution)) != OK) {
+        ALOGE("%s: Failed to read input multi-resolution flag from parcel", __FUNCTION__);
+        return err;
+    }
+
     std::vector<OutputConfiguration> outputStreams;
     if ((err = parcel->readParcelableVector(&outputStreams)) != OK) {
         ALOGE("%s: Failed to read output configurations from parcel", __FUNCTION__);
@@ -65,6 +71,7 @@
     mInputWidth = inputWidth;
     mInputHeight = inputHeight;
     mInputFormat = inputFormat;
+    mInputIsMultiResolution = inputIsMultiResolution;
     for (auto& stream : outputStreams) {
         mOutputStreams.push_back(stream);
     }
@@ -90,6 +97,9 @@
     err = parcel->writeInt32(mInputFormat);
     if (err != OK) return err;
 
+    err = parcel->writeBool(mInputIsMultiResolution);
+    if (err != OK) return err;
+
     err = parcel->writeParcelableVector(mOutputStreams);
     if (err != OK) return err;
 
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index dc7f88a..8ca8920 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -12,6 +12,15 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_camera_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_camera_license"],
+}
+
 cc_binary {
     name: "cameraserver",
 
@@ -33,11 +42,12 @@
         "android.hardware.camera.provider@2.4",
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
     ],
-    compile_multilib: "32",
+    compile_multilib: "first",
     cflags: [
         "-Wall",
         "-Wextra",
@@ -48,6 +58,6 @@
     init_rc: ["cameraserver.rc"],
 
     vintf_fragments: [
-        "manifest_android.frameworks.cameraservice.service@2.1.xml",
+        "manifest_android.frameworks.cameraservice.service@2.2.xml",
     ],
 }
diff --git a/camera/cameraserver/manifest_android.frameworks.cameraservice.service@2.1.xml b/camera/cameraserver/manifest_android.frameworks.cameraservice.service@2.1.xml
deleted file mode 100644
index 5a15b35..0000000
--- a/camera/cameraserver/manifest_android.frameworks.cameraservice.service@2.1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-<manifest version="1.0" type="framework">
-    <hal>
-        <name>android.frameworks.cameraservice.service</name>
-        <transport>hwbinder</transport>
-        <version>2.1</version>
-        <interface>
-            <name>ICameraService</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-</manifest>
diff --git a/camera/cameraserver/manifest_android.frameworks.cameraservice.service@2.2.xml b/camera/cameraserver/manifest_android.frameworks.cameraservice.service@2.2.xml
new file mode 100644
index 0000000..eeafc91
--- /dev/null
+++ b/camera/cameraserver/manifest_android.frameworks.cameraservice.service@2.2.xml
@@ -0,0 +1,11 @@
+<manifest version="1.0" type="framework">
+    <hal>
+        <name>android.frameworks.cameraservice.service</name>
+        <transport>hwbinder</transport>
+        <version>2.2</version>
+        <interface>
+            <name>ICameraService</name>
+            <instance>default</instance>
+        </interface>
+    </hal>
+</manifest>
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 2cdb617..58ccd69 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -24,7 +24,6 @@
 #include <gui/IGraphicBufferProducer.h>
 #include <system/camera.h>
 #include <camera/ICameraRecordingProxy.h>
-#include <camera/ICameraRecordingProxyListener.h>
 #include <camera/android/hardware/ICamera.h>
 #include <camera/android/hardware/ICameraClient.h>
 #include <camera/CameraBase.h>
@@ -59,7 +58,7 @@
     typedef ::android::hardware::ICameraClient TCamCallbacks;
     typedef ::android::binder::Status(::android::hardware::ICameraService::*TCamConnectService)
         (const sp<::android::hardware::ICameraClient>&,
-        int, const String16&, int, int,
+        int, const String16&, int, int, int,
         /*out*/
         sp<::android::hardware::ICamera>*);
     static TCamConnectService     fnConnectService;
@@ -82,11 +81,7 @@
     static  sp<Camera>  create(const sp<::android::hardware::ICamera>& camera);
     static  sp<Camera>  connect(int cameraId,
                                 const String16& clientPackageName,
-                                int clientUid, int clientPid);
-
-    static  status_t  connectLegacy(int cameraId, int halVersion,
-                                     const String16& clientPackageName,
-                                     int clientUid, sp<Camera>& camera);
+                                int clientUid, int clientPid, int targetSdkVersion);
 
             virtual     ~Camera();
 
@@ -154,7 +149,6 @@
             status_t    setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer);
 
             void        setListener(const sp<CameraListener>& listener);
-            void        setRecordingProxyListener(const sp<ICameraRecordingProxyListener>& listener);
 
             // Configure preview callbacks to app. Only one of the older
             // callbacks or the callback surface can be active at the same time;
@@ -187,12 +181,8 @@
         explicit RecordingProxy(const sp<Camera>& camera);
 
         // ICameraRecordingProxy interface
-        virtual status_t startRecording(const sp<ICameraRecordingProxyListener>& listener);
+        virtual status_t startRecording();
         virtual void stopRecording();
-        virtual void releaseRecordingFrame(const sp<IMemory>& mem);
-        virtual void releaseRecordingFrameHandle(native_handle_t* handle);
-        virtual void releaseRecordingFrameHandleBatch(
-                const std::vector<native_handle_t*>& handles);
 
     private:
         sp<Camera>         mCamera;
@@ -203,8 +193,6 @@
                         Camera(const Camera&);
                         Camera& operator=(const Camera);
 
-    sp<ICameraRecordingProxyListener>  mRecordingProxyListener;
-
     friend class        CameraBase;
 };
 
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index 499b0e6..e156994 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -113,7 +113,7 @@
 
     static sp<TCam>      connect(int cameraId,
                                  const String16& clientPackageName,
-                                 int clientUid, int clientPid);
+                                 int clientUid, int clientPid, int targetSdkVersion);
     virtual void         disconnect();
 
     void                 setListener(const sp<TCamListener>& listener);
diff --git a/camera/include/camera/CameraMetadata.h b/camera/include/camera/CameraMetadata.h
index 9d1b5c7..c56ee6d 100644
--- a/camera/include/camera/CameraMetadata.h
+++ b/camera/include/camera/CameraMetadata.h
@@ -128,6 +128,11 @@
     bool isEmpty() const;
 
     /**
+     * Return the allocated camera metadata buffer size in bytes.
+     */
+    size_t bufferSize() const;
+
+    /**
      * Sort metadata buffer for faster find
      */
     status_t sort();
@@ -237,6 +242,11 @@
     static status_t getTagFromName(const char *name,
             const VendorTagDescriptor* vTags, uint32_t *tag);
 
+    /**
+     * Return the current vendor tag id associated with this metadata.
+     */
+    metadata_vendor_id_t getVendorId();
+
   private:
     camera_metadata_t *mBuffer;
     mutable bool       mLocked;
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
new file mode 100644
index 0000000..c398aca
--- /dev/null
+++ b/camera/include/camera/CameraSessionStats.h
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_CAMERA_SERVICE_SESSION_STATS_H
+#define ANDROID_HARDWARE_CAMERA_SERVICE_SESSION_STATS_H
+
+#include <binder/Parcelable.h>
+
+namespace android {
+namespace hardware {
+
+/**
+ * Camera stream info and statistics
+ */
+class CameraStreamStats : public android::Parcelable {
+public:
+    enum HistogramType {
+        HISTOGRAM_TYPE_UNKNOWN = 0,
+        HISTOGRAM_TYPE_CAPTURE_LATENCY = 1,
+    };
+
+    int mWidth;
+    int mHeight;
+    int mFormat;
+    int mDataSpace;
+    int64_t mUsage;
+
+    // The number of requested buffers
+    int64_t mRequestCount;
+    // The number of buffer errors
+    int64_t mErrorCount;
+
+    // The capture latency of 1st request for this stream
+    int32_t mStartLatencyMs;
+
+    // Buffer count info
+    int mMaxHalBuffers;
+    int mMaxAppBuffers;
+
+    // Histogram type. So far only capture latency histogram is supported.
+    int mHistogramType;
+    // The bounary values separating adjacent histogram bins.
+    // A vector of {h1, h2, h3} represents bins of [0, h1), [h1, h2), [h2, h3),
+    // and [h3, infinity)
+    std::vector<float> mHistogramBins;
+    // The counts for all histogram bins.
+    // size(mHistogramBins) + 1 = size(mHistogramCounts)
+    std::vector<int64_t> mHistogramCounts;
+
+    CameraStreamStats() :
+            mWidth(0), mHeight(0), mFormat(0), mDataSpace(0), mUsage(0),
+            mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
+            mMaxHalBuffers(0), mMaxAppBuffers(0), mHistogramType(HISTOGRAM_TYPE_UNKNOWN) {}
+    CameraStreamStats(int width, int height, int format, int dataSpace, int64_t usage,
+            int maxHalBuffers, int maxAppBuffers)
+            : mWidth(width), mHeight(height), mFormat(format), mDataSpace(dataSpace),
+              mUsage(usage), mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
+              mMaxHalBuffers(maxHalBuffers), mMaxAppBuffers(maxAppBuffers),
+              mHistogramType(HISTOGRAM_TYPE_UNKNOWN) {}
+
+    virtual status_t readFromParcel(const android::Parcel* parcel) override;
+    virtual status_t writeToParcel(android::Parcel* parcel) const override;
+};
+
+/**
+ * Camera session statistics
+ *
+ * This includes session wide info and stream statistics.
+ */
+class CameraSessionStats : public android::Parcelable {
+public:
+    /**
+     * Values for notifyCameraState newCameraState
+     */
+    static const int CAMERA_STATE_OPEN;
+    static const int CAMERA_STATE_ACTIVE;
+    static const int CAMERA_STATE_IDLE;
+    static const int CAMERA_STATE_CLOSED;
+
+    /**
+     * Values for notifyCameraState facing
+     */
+    static const int CAMERA_FACING_BACK;
+    static const int CAMERA_FACING_FRONT;
+    static const int CAMERA_FACING_EXTERNAL;
+
+    /**
+     * Values for notifyCameraState api level
+     */
+    static const int CAMERA_API_LEVEL_1;
+    static const int CAMERA_API_LEVEL_2;
+
+    String16 mCameraId;
+    int mFacing;
+    int mNewCameraState;
+    String16 mClientName;
+    int mApiLevel;
+    bool mIsNdk;
+    // latency in ms for camera open, close, or session creation.
+    int mLatencyMs;
+
+    // Session info and statistics
+    int mSessionType;
+    int mInternalReconfigure;
+    // The number of capture requests
+    int64_t mRequestCount;
+    // The number of result error
+    int64_t mResultErrorCount;
+    // Whether the device runs into an error state
+    bool mDeviceError;
+    std::vector<CameraStreamStats> mStreamStats;
+
+    // Constructors
+    CameraSessionStats();
+    CameraSessionStats(const String16& cameraId, int facing, int newCameraState,
+            const String16& clientName, int apiLevel, bool isNdk, int32_t latencyMs);
+
+    virtual status_t readFromParcel(const android::Parcel* parcel) override;
+    virtual status_t writeToParcel(android::Parcel* parcel) const override;
+};
+
+}; // namespace hardware
+}; // namespace android
+
+#endif // ANDROID_HARDWARE_CAMERA_SERVICE_SESSION_STATS_H
diff --git a/camera/include/camera/CameraUtils.h b/camera/include/camera/CameraUtils.h
index f596f80..a397ccd 100644
--- a/camera/include/camera/CameraUtils.h
+++ b/camera/include/camera/CameraUtils.h
@@ -47,6 +47,11 @@
          */
         static bool isNativeHandleMetadata(const sp<IMemory>& imageData);
 
+        /**
+         * Check if camera service is disabled on this device
+         */
+        static bool isCameraServiceDisabled();
+
     private:
         CameraUtils();
 };
diff --git a/camera/include/camera/ICameraRecordingProxy.h b/camera/include/camera/ICameraRecordingProxy.h
index 02af2f3..4306dc1 100644
--- a/camera/include/camera/ICameraRecordingProxy.h
+++ b/camera/include/camera/ICameraRecordingProxy.h
@@ -24,13 +24,11 @@
 
 namespace android {
 
-class ICameraRecordingProxyListener;
-class IMemory;
 class Parcel;
 
 /*
- * The purpose of ICameraRecordingProxy and ICameraRecordingProxyListener is to
- * allow applications using the camera during recording.
+ * The purpose of ICameraRecordingProxy is to
+ * allow applications to use the camera during recording with the old camera API.
  *
  * Camera service allows only one client at a time. Since camcorder application
  * needs to own the camera to do things like zoom, the media recorder cannot
@@ -42,35 +40,29 @@
  * ICameraRecordingProxy
  *   startRecording()
  *   stopRecording()
- *   releaseRecordingFrame()
  *
- * ICameraRecordingProxyListener
- *   dataCallbackTimestamp()
-
  * The camcorder app opens the camera and starts the preview. The app passes
  * ICamera and ICameraRecordingProxy to the media recorder by
  * MediaRecorder::setCamera(). The recorder uses ICamera to setup the camera in
  * MediaRecorder::start(). After setup, the recorder disconnects from camera
- * service. The recorder calls ICameraRecordingProxy::startRecording() and
- * passes a ICameraRecordingProxyListener to the app. The app connects back to
- * camera service and starts the recording. The app owns the camera and can do
- * things like zoom. The media recorder receives the video frames from the
- * listener and releases them by ICameraRecordingProxy::releaseRecordingFrame.
- * The recorder calls ICameraRecordingProxy::stopRecording() to stop the
- * recording.
+ * service. The recorder calls ICameraRecordingProxy::startRecording() and The
+ * app owns the camera and can do things like zoom. The media recorder receives
+ * the video frames via a buffer queue.  The recorder calls
+ * ICameraRecordingProxy::stopRecording() to stop the recording.
  *
  * The call sequences are as follows:
  * 1. The app: Camera.unlock().
  * 2. The app: MediaRecorder.setCamera().
  * 3. Start recording
  *    (1) The app: MediaRecorder.start().
- *    (2) The recorder: ICamera.unlock() and ICamera.disconnect().
- *    (3) The recorder: ICameraRecordingProxy.startRecording().
- *    (4) The app: ICamera.reconnect().
- *    (5) The app: ICamera.startRecording().
+ *    (2) The recorder: ICamera.setVideoTarget(buffer queue).
+ *    (3) The recorder: ICamera.unlock() and ICamera.disconnect().
+ *    (4) The recorder: ICameraRecordingProxy.startRecording().
+ *    (5) The app: ICamera.reconnect().
+ *    (6) The app: ICamera.startRecording().
  * 4. During recording
- *    (1) The recorder: receive frames from ICameraRecordingProxyListener.dataCallbackTimestamp()
- *    (2) The recorder: release frames by ICameraRecordingProxy.releaseRecordingFrame().
+ *    (1) The recorder: receive frames via a buffer queue
+ *    (2) The recorder: release frames via a buffer queue
  * 5. Stop recording
  *    (1) The app: MediaRecorder.stop()
  *    (2) The recorder: ICameraRecordingProxy.stopRecording().
@@ -82,12 +74,8 @@
 public:
     DECLARE_META_INTERFACE(CameraRecordingProxy);
 
-    virtual status_t        startRecording(const sp<ICameraRecordingProxyListener>& listener) = 0;
+    virtual status_t        startRecording() = 0;
     virtual void            stopRecording() = 0;
-    virtual void            releaseRecordingFrame(const sp<IMemory>& mem) = 0;
-    virtual void            releaseRecordingFrameHandle(native_handle_t *handle) = 0;
-    virtual void            releaseRecordingFrameHandleBatch(
-                                    const std::vector<native_handle_t*>& handles) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/camera/include/camera/ICameraRecordingProxyListener.h b/camera/include/camera/ICameraRecordingProxyListener.h
deleted file mode 100644
index da03c56..0000000
--- a/camera/include/camera/ICameraRecordingProxyListener.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_ICAMERA_RECORDING_PROXY_LISTENER_H
-#define ANDROID_HARDWARE_ICAMERA_RECORDING_PROXY_LISTENER_H
-
-#include <vector>
-#include <binder/IInterface.h>
-#include <cutils/native_handle.h>
-#include <stdint.h>
-#include <utils/RefBase.h>
-#include <utils/Timers.h>
-
-namespace android {
-
-class Parcel;
-class IMemory;
-
-class ICameraRecordingProxyListener: public IInterface
-{
-public:
-    DECLARE_META_INTERFACE(CameraRecordingProxyListener);
-
-    virtual void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType,
-                                       const sp<IMemory>& data) = 0;
-
-    virtual void recordingFrameHandleCallbackTimestamp(nsecs_t timestamp,
-                                                       native_handle_t* handle) = 0;
-
-    virtual void recordingFrameHandleCallbackTimestampBatch(
-            const std::vector<nsecs_t>& timestamps,
-            const std::vector<native_handle_t*>& handles) = 0;
-};
-
-// ----------------------------------------------------------------------------
-
-class BnCameraRecordingProxyListener: public BnInterface<ICameraRecordingProxyListener>
-{
-public:
-    virtual status_t    onTransact( uint32_t code,
-                                    const Parcel& data,
-                                    Parcel* reply,
-                                    uint32_t flags = 0);
-};
-
-}; // namespace android
-
-#endif
diff --git a/camera/include/camera/VendorTagDescriptor.h b/camera/include/camera/VendorTagDescriptor.h
index b2fbf3a..b3440d5 100644
--- a/camera/include/camera/VendorTagDescriptor.h
+++ b/camera/include/camera/VendorTagDescriptor.h
@@ -249,6 +249,12 @@
      */
     static void clearGlobalVendorTagCache();
 
+    /**
+     * Return true if given vendor id is present in the vendor tag caches, return
+     * false otherwise.
+     */
+    static bool isVendorCachePresent(metadata_vendor_id_t vendorId);
+
 };
 
 } /* namespace android */
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 95c4f39..f80ed3a 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -47,6 +47,10 @@
     bool                       isDeferred() const;
     bool                       isShared() const;
     String16                   getPhysicalCameraId() const;
+    bool                       isMultiResolution() const;
+
+    // set of sensor pixel mode resolutions allowed {MAX_RESOLUTION, DEFAULT_MODE};
+    const std::vector<int32_t>&            getSensorPixelModesUsed() const;
     /**
      * Keep impl up-to-date with OutputConfiguration.java in frameworks/base
      */
@@ -83,7 +87,9 @@
                 mIsDeferred == other.mIsDeferred &&
                 mIsShared == other.mIsShared &&
                 gbpsEqual(other) &&
-                mPhysicalCameraId == other.mPhysicalCameraId );
+                mPhysicalCameraId == other.mPhysicalCameraId &&
+                mIsMultiResolution == other.mIsMultiResolution &&
+                sensorPixelModesUsedEqual(other));
     }
     bool operator != (const OutputConfiguration& other) const {
         return !(*this == other);
@@ -114,13 +120,22 @@
         if (mPhysicalCameraId != other.mPhysicalCameraId) {
             return mPhysicalCameraId < other.mPhysicalCameraId;
         }
+        if (mIsMultiResolution != other.mIsMultiResolution) {
+            return mIsMultiResolution < other.mIsMultiResolution;
+        }
+        if (!sensorPixelModesUsedEqual(other)) {
+            return sensorPixelModesUsedLessThan(other);
+        }
         return gbpsLessThan(other);
     }
+
     bool operator > (const OutputConfiguration& other) const {
         return (*this != other && !(*this < other));
     }
 
     bool gbpsEqual(const OutputConfiguration& other) const;
+    bool sensorPixelModesUsedEqual(const OutputConfiguration& other) const;
+    bool sensorPixelModesUsedLessThan(const OutputConfiguration& other) const;
     bool gbpsLessThan(const OutputConfiguration& other) const;
     void addGraphicProducer(sp<IGraphicBufferProducer> gbp) {mGbps.push_back(gbp);}
 private:
@@ -133,6 +148,8 @@
     bool                       mIsDeferred;
     bool                       mIsShared;
     String16                   mPhysicalCameraId;
+    bool                       mIsMultiResolution;
+    std::vector<int32_t>       mSensorPixelModesUsed;
 };
 } // namespace params
 } // namespace camera2
diff --git a/camera/include/camera/camera2/SessionConfiguration.h b/camera/include/camera/camera2/SessionConfiguration.h
index 64288ed..29913f6 100644
--- a/camera/include/camera/camera2/SessionConfiguration.h
+++ b/camera/include/camera/camera2/SessionConfiguration.h
@@ -38,6 +38,7 @@
     int getInputHeight() const { return mInputHeight; }
     int getInputFormat() const { return mInputFormat; }
     int getOperatingMode() const { return mOperatingMode; }
+    bool inputIsMultiResolution() const { return mInputIsMultiResolution; }
 
     virtual status_t writeToParcel(android::Parcel* parcel) const override;
     virtual status_t readFromParcel(const android::Parcel* parcel) override;
@@ -61,7 +62,8 @@
                 mInputWidth == other.mInputWidth &&
                 mInputHeight == other.mInputHeight &&
                 mInputFormat == other.mInputFormat &&
-                mOperatingMode == other.mOperatingMode);
+                mOperatingMode == other.mOperatingMode &&
+                mInputIsMultiResolution == other.mInputIsMultiResolution);
     }
 
     bool operator != (const SessionConfiguration& other) const {
@@ -83,6 +85,10 @@
             return mInputFormat < other.mInputFormat;
         }
 
+        if (mInputIsMultiResolution != other.mInputIsMultiResolution) {
+            return mInputIsMultiResolution < other.mInputIsMultiResolution;
+        }
+
         if (mOperatingMode != other.mOperatingMode) {
             return mOperatingMode < other.mOperatingMode;
         }
@@ -104,6 +110,7 @@
 
     std::vector<OutputConfiguration> mOutputStreams;
     int                              mInputWidth, mInputHeight, mInputFormat, mOperatingMode;
+    bool                             mInputIsMultiResolution = false;
 };
 } // namespace params
 } // namespace camera2
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index 7ba82c1..fef873b 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -16,6 +16,37 @@
 // to refer to headers in parent directories and the headers live in
 // frameworks/av/include.
 
+package {
+    default_applicable_licenses: ["frameworks_av_camera_ndk_license"],
+}
+
+// Added automatically by a large-scale-change that took the approach of
+// 'apply every license found to every target'. While this makes sure we respect
+// every license restriction, it may not be entirely correct.
+//
+// e.g. GPL in an MIT project might only apply to the contrib/ directory.
+//
+// Please consider splitting the single license below into multiple licenses,
+// taking care not to lose any license_kind information, and overriding the
+// default license using the 'licenses: [...]' property on targets as needed.
+//
+// For unused files, consider creating a 'fileGroup' with "//visibility:private"
+// to attach the license to, and including a comment whether the files may be
+// used in the current project.
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_camera_ndk_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+        "SPDX-license-identifier-MIT",
+        "SPDX-license-identifier-Unicode-DFS",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 ndk_library {
     name: "libcamera2ndk",
     symbol_file: "libcamera2ndk.map.txt",
@@ -62,7 +93,7 @@
     ],
     cflags: [
         "-fvisibility=hidden",
-        "-DEXPORT=__attribute__ ((visibility (\"default\")))",
+        "-DEXPORT=__attribute__((visibility(\"default\")))",
         "-Wall",
         "-Wextra",
         "-Werror",
@@ -121,9 +152,11 @@
         "libcamera_metadata",
         "libmediandk",
         "android.frameworks.cameraservice.device@2.0",
+        "android.frameworks.cameraservice.device@2.1",
         "android.frameworks.cameraservice.common@2.0",
         "android.frameworks.cameraservice.service@2.0",
         "android.frameworks.cameraservice.service@2.1",
+        "android.frameworks.cameraservice.service@2.2",
     ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index c15c5a5..dd652c7 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -186,6 +186,7 @@
         const ACaptureRequest* sessionParameters,
         const ACameraCaptureSession_stateCallbacks* callbacks,
         /*out*/ACameraCaptureSession** session) {
+    nsecs_t startTimeNs = systemTime();
     sp<ACameraCaptureSession> currentSession = mCurrentSession.promote();
     Mutex::Autolock _l(mDeviceLock);
     camera_status_t ret = checkCameraClosedOrErrorLocked();
@@ -199,7 +200,7 @@
     }
 
     // Create new session
-    ret = configureStreamsLocked(outputs, sessionParameters);
+    ret = configureStreamsLocked(outputs, sessionParameters, startTimeNs);
     if (ret != ACAMERA_OK) {
         ALOGE("Fail to create new session. cannot configure streams");
         return ret;
@@ -450,7 +451,11 @@
     }
 
     // No new session, unconfigure now
-    camera_status_t ret = configureStreamsLocked(nullptr, nullptr);
+    // Note: The unconfiguration of session won't be accounted for session
+    // latency because a stream configuration with 0 streams won't ever become
+    // active.
+    nsecs_t startTimeNs = systemTime();
+    camera_status_t ret = configureStreamsLocked(nullptr, nullptr, startTimeNs);
     if (ret != ACAMERA_OK) {
         ALOGE("Unconfigure stream failed. Device might still be configured! ret %d", ret);
     }
@@ -609,7 +614,7 @@
 
 camera_status_t
 CameraDevice::configureStreamsLocked(const ACaptureSessionOutputContainer* outputs,
-        const ACaptureRequest* sessionParameters) {
+        const ACaptureRequest* sessionParameters, nsecs_t startTimeNs) {
     ACaptureSessionOutputContainer emptyOutput;
     if (outputs == nullptr) {
         outputs = &emptyOutput;
@@ -711,7 +716,8 @@
         params.append(sessionParameters->settings->getInternalData());
     }
     std::vector<int> offlineStreamIds;
-    remoteRet = mRemote->endConfigure(/*isConstrainedHighSpeed*/ false, params, &offlineStreamIds);
+    remoteRet = mRemote->endConfigure(/*isConstrainedHighSpeed*/ false, params,
+            ns2ms(startTimeNs), &offlineStreamIds);
     if (remoteRet.serviceSpecificErrorCode() == hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT) {
         ALOGE("Camera device %s cannnot support app output configuration: %s", getId(),
                 remoteRet.toString8().string());
@@ -1361,31 +1367,11 @@
                 it->second.isSequenceCompleted = true;
             }
 
-            if (it->second.isSequenceCompleted && hasCallback) {
-                auto cbIt = mSequenceCallbackMap.find(sequenceId);
-                CallbackHolder cbh = cbIt->second;
-
-                // send seq complete callback
-                sp<AMessage> msg = new AMessage(kWhatCaptureSeqEnd, mHandler);
-                msg->setPointer(kContextKey, cbh.mContext);
-                msg->setObject(kSessionSpKey, cbh.mSession);
-                msg->setPointer(kCallbackFpKey, (void*) cbh.mOnCaptureSequenceCompleted);
-                msg->setInt32(kSequenceIdKey, sequenceId);
-                msg->setInt64(kFrameNumberKey, lastFrameNumber);
-
-                // Clear the session sp before we send out the message
-                // This will guarantee the rare case where the message is processed
-                // before cbh goes out of scope and causing we call the session
-                // destructor while holding device lock
-                cbh.mSession.clear();
-                postSessionMsgAndCleanup(msg);
-            }
         }
 
         if (it->second.isSequenceCompleted && it->second.isInflightCompleted) {
-            if (mSequenceCallbackMap.find(sequenceId) != mSequenceCallbackMap.end()) {
-                mSequenceCallbackMap.erase(sequenceId);
-            }
+            sendCaptureSequenceCompletedLocked(sequenceId, lastFrameNumber);
+
             it = mSequenceLastFrameNumberMap.erase(it);
             ALOGV("%s: Remove holder for sequenceId %d", __FUNCTION__, sequenceId);
         } else {
@@ -1412,13 +1398,7 @@
                 lastCompletedRegularFrameNumber);
         if (lastFrameNumber <= lastCompletedRegularFrameNumber) {
             if (it->second.isSequenceCompleted) {
-                // Check if there is callback for this sequence
-                // This should not happen because we always register callback (with nullptr inside)
-                if (mSequenceCallbackMap.count(sequenceId) == 0) {
-                    ALOGW("No callback found for sequenceId %d", sequenceId);
-                } else {
-                    mSequenceCallbackMap.erase(sequenceId);
-                }
+                sendCaptureSequenceCompletedLocked(sequenceId, lastFrameNumber);
 
                 it = mSequenceLastFrameNumberMap.erase(it);
                 ALOGV("%s: Remove holder for sequenceId %d", __FUNCTION__, sequenceId);
@@ -1709,5 +1689,33 @@
     return ret;
 }
 
+void
+CameraDevice::sendCaptureSequenceCompletedLocked(int sequenceId, int64_t lastFrameNumber) {
+    auto cbIt = mSequenceCallbackMap.find(sequenceId);
+    if (cbIt != mSequenceCallbackMap.end()) {
+        CallbackHolder cbh = cbIt->second;
+        mSequenceCallbackMap.erase(cbIt);
+
+        // send seq complete callback
+        sp<AMessage> msg = new AMessage(kWhatCaptureSeqEnd, mHandler);
+        msg->setPointer(kContextKey, cbh.mContext);
+        msg->setObject(kSessionSpKey, cbh.mSession);
+        msg->setPointer(kCallbackFpKey, (void*) cbh.mOnCaptureSequenceCompleted);
+        msg->setInt32(kSequenceIdKey, sequenceId);
+        msg->setInt64(kFrameNumberKey, lastFrameNumber);
+
+        // Clear the session sp before we send out the message
+        // This will guarantee the rare case where the message is processed
+        // before cbh goes out of scope and causing we call the session
+        // destructor while holding device lock
+        cbh.mSession.clear();
+        postSessionMsgAndCleanup(msg);
+    } else {
+        // Check if there is callback for this sequence
+        // This should not happen because we always register callback (with nullptr inside)
+        ALOGW("No callback found for sequenceId %d", sequenceId);
+    }
+}
+
 } // namespace acam
 } // namespace android
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index d937865..344d964 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -167,7 +167,7 @@
     void notifySessionEndOfLifeLocked(ACameraCaptureSession* session);
 
     camera_status_t configureStreamsLocked(const ACaptureSessionOutputContainer* outputs,
-           const ACaptureRequest* sessionParameters);
+           const ACaptureRequest* sessionParameters, nsecs_t startTimeNs);
 
     // Input message will be posted and cleared after this returns
     void postSessionMsgAndCleanup(sp<AMessage>& msg);
@@ -354,6 +354,7 @@
     void checkRepeatingSequenceCompleteLocked(const int sequenceId, const int64_t lastFrameNumber);
     void checkAndFireSequenceCompleteLocked();
     void removeCompletedCallbackHolderLocked(int64_t lastCompletedRegularFrameNumber);
+    void sendCaptureSequenceCompletedLocked(int sequenceId, int64_t lastFrameNumber);
 
     // Misc variables
     int32_t mShadingMapSize[2];   // const after constructor
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index b58ebe2..95ef2b2 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -24,6 +24,7 @@
 #include <utils/Vector.h>
 #include <cutils/properties.h>
 #include <stdlib.h>
+#include <camera/CameraUtils.h>
 #include <camera/VendorTagDescriptor.h>
 
 using namespace android::acam;
@@ -70,12 +71,6 @@
     mCameraService.clear();
 }
 
-static bool isCameraServiceDisabled() {
-    char value[PROPERTY_VALUE_MAX];
-    property_get("config.disable_cameraservice", value, "0");
-    return (strncmp(value, "0", 2) != 0 && strncasecmp(value, "false", 6) != 0);
-}
-
 sp<hardware::ICameraService> CameraManagerGlobal::getCameraService() {
     Mutex::Autolock _l(mLock);
     return getCameraServiceLocked();
@@ -83,7 +78,7 @@
 
 sp<hardware::ICameraService> CameraManagerGlobal::getCameraServiceLocked() {
     if (mCameraService.get() == nullptr) {
-        if (isCameraServiceDisabled()) {
+        if (CameraUtils::isCameraServiceDisabled()) {
             return mCameraService;
         }
 
@@ -694,7 +689,9 @@
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
     }
     CameraMetadata rawMetadata;
-    binder::Status serviceRet = cs->getCameraCharacteristics(String16(cameraIdStr), &rawMetadata);
+    int targetSdkVersion = android_get_application_target_sdk_version();
+    binder::Status serviceRet = cs->getCameraCharacteristics(String16(cameraIdStr),
+            targetSdkVersion, &rawMetadata);
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
             case hardware::ICameraService::ERROR_DISCONNECTED:
@@ -740,11 +737,13 @@
 
     sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = device->getServiceCallback();
     sp<hardware::camera2::ICameraDeviceUser> deviceRemote;
+    int targetSdkVersion = android_get_application_target_sdk_version();
     // No way to get package name from native.
     // Send a zero length package name and let camera service figure it out from UID
     binder::Status serviceRet = cs->connectDevice(
-            callbacks, String16(cameraId), String16(""), std::unique_ptr<String16>(),
-            hardware::ICameraService::USE_CALLING_UID, /*out*/&deviceRemote);
+            callbacks, String16(cameraId), String16(""), {},
+            hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/0,
+            targetSdkVersion, /*out*/&deviceRemote);
 
     if (!serviceRet.isOk()) {
         ALOGE("%s: connect camera device failed: %s", __FUNCTION__, serviceRet.toString8().string());
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index bfa60d9..dab2fef 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -24,6 +24,28 @@
 
 using namespace android;
 
+// Formats not listed in the public API, but still available to AImageReader
+// Enum value must match corresponding enum in ui/PublicFormat.h (which is not
+// available to VNDK)
+enum AIMAGE_PRIVATE_FORMATS {
+    /**
+     * Unprocessed implementation-dependent raw
+     * depth measurements, opaque with 16 bit
+     * samples.
+     *
+     */
+
+    AIMAGE_FORMAT_RAW_DEPTH = 0x1002,
+
+    /**
+     * Device specific 10 bits depth RAW image format.
+     *
+     * <p>Unprocessed implementation-dependent raw depth measurements, opaque with 10 bit samples
+     * and device specific bit layout.</p>
+     */
+    AIMAGE_FORMAT_RAW_DEPTH10 = 0x1003,
+};
+
 /**
  * ACameraMetadata Implementation
  */
@@ -290,6 +312,10 @@
             format = AIMAGE_FORMAT_DEPTH_POINT_CLOUD;
         } else if (format == HAL_PIXEL_FORMAT_Y16) {
             format = AIMAGE_FORMAT_DEPTH16;
+        } else if (format == HAL_PIXEL_FORMAT_RAW16) {
+            format = static_cast<int32_t>(AIMAGE_FORMAT_RAW_DEPTH);
+        } else if (format == HAL_PIXEL_FORMAT_RAW10) {
+            format = static_cast<int32_t>(AIMAGE_FORMAT_RAW_DEPTH10);
         }
 
         filteredDepthStreamConfigs.push_back(format);
@@ -426,6 +452,7 @@
             camera_metadata_ro_entry_t entry;
             int ret = get_camera_metadata_ro_entry(rawMetadata, i, &entry);
             if (ret != 0) {
+                mData->unlock(rawMetadata);
                 ALOGE("%s: error reading metadata index %zu", __FUNCTION__, i);
                 return ACAMERA_ERROR_UNKNOWN;
             }
@@ -527,11 +554,13 @@
         case ACAMERA_LENS_OPTICAL_STABILIZATION_MODE:
         case ACAMERA_NOISE_REDUCTION_MODE:
         case ACAMERA_SCALER_CROP_REGION:
+        case ACAMERA_SCALER_ROTATE_AND_CROP:
         case ACAMERA_SENSOR_EXPOSURE_TIME:
         case ACAMERA_SENSOR_FRAME_DURATION:
         case ACAMERA_SENSOR_SENSITIVITY:
         case ACAMERA_SENSOR_TEST_PATTERN_DATA:
         case ACAMERA_SENSOR_TEST_PATTERN_MODE:
+        case ACAMERA_SENSOR_PIXEL_MODE:
         case ACAMERA_SHADING_MODE:
         case ACAMERA_STATISTICS_FACE_DETECT_MODE:
         case ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE:
@@ -582,6 +611,7 @@
     ANDROID_SENSOR_PROFILE_HUE_SAT_MAP,
     ANDROID_SENSOR_PROFILE_TONE_CURVE,
     ANDROID_SENSOR_OPAQUE_RAW_SIZE,
+    ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION,
     ANDROID_SHADING_STRENGTH,
     ANDROID_STATISTICS_HISTOGRAM_MODE,
     ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index 07176cf..2b7f040 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -45,8 +45,6 @@
 
 __BEGIN_DECLS
 
-#if __ANDROID_API__ >= 24
-
 /**
  * ACameraCaptureSession is an opaque type that manages frame captures of a camera device.
  *
@@ -63,6 +61,10 @@
  */
 typedef void (*ACameraCaptureSession_stateCallback)(void* context, ACameraCaptureSession *session);
 
+/**
+ * Capture session state callbacks used in {@link ACameraDevice_createCaptureSession} and
+ * {@link ACameraDevice_createCaptureSessionWithSessionParameters}
+ */
 typedef struct ACameraCaptureSession_stateCallbacks {
     /// optional application context.
     void*                               context;
@@ -248,6 +250,10 @@
         void* context, ACameraCaptureSession* session,
         ACaptureRequest* request, ACameraWindowType* window, int64_t frameNumber);
 
+/**
+ * ACaptureCaptureSession_captureCallbacks structure used in
+ * {@link ACameraCaptureSession_capture} and {@link ACameraCaptureSession_setRepeatingRequest}.
+ */
 typedef struct ACameraCaptureSession_captureCallbacks {
     /// optional application context.
     void*                                               context;
@@ -415,7 +421,10 @@
  */
 void ACameraCaptureSession_close(ACameraCaptureSession* session);
 
-struct ACameraDevice;
+/**
+ * ACameraDevice is opaque type that provides access to a camera device.
+ * A pointer can be obtained using {@link ACameraManager_openCamera} method.
+ */
 typedef struct ACameraDevice ACameraDevice;
 
 /**
@@ -593,10 +602,10 @@
 camera_status_t ACameraCaptureSession_abortCaptures(ACameraCaptureSession* session)
         __INTRODUCED_IN(24);
 
-#endif /* __ANDROID_API__ >= 24 */
-
-#if __ANDROID_API__ >= 28
-
+/**
+ * Opaque object for capture session output, use {@link ACaptureSessionOutput_create} or
+ * {@link ACaptureSessionSharedOutput_create} to create an instance.
+ */
 typedef struct ACaptureSessionOutput ACaptureSessionOutput;
 
 /**
@@ -610,9 +619,9 @@
  *
  * <p>Native windows that get removed must not be part of any active repeating or single/burst
  * request or have any pending results. Consider updating repeating requests via
- * {@link ACaptureSessionOutput_setRepeatingRequest} and then wait for the last frame number
+ * {@link ACameraCaptureSession_setRepeatingRequest} and then wait for the last frame number
  * when the sequence completes
- * {@link ACameraCaptureSession_captureCallback#onCaptureSequenceCompleted}.</p>
+ * {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceCompleted}.</p>
  *
  * <p>Native windows that get added must not be part of any other registered ACaptureSessionOutput
  * and must be compatible. Compatible windows must have matching format, rotation and
@@ -641,9 +650,7 @@
  */
 camera_status_t ACameraCaptureSession_updateSharedOutput(ACameraCaptureSession* session,
         ACaptureSessionOutput* output) __INTRODUCED_IN(28);
-#endif /* __ANDROID_API__ >= 28 */
 
-#if __ANDROID_API__ >= 29
 /**
  * The definition of final capture result callback with logical multi-camera support.
  *
@@ -721,7 +728,15 @@
      * Same as ACameraCaptureSession_captureCallbacks
      */
     void*                                               context;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureStarted}.
+     */
     ACameraCaptureSession_captureCallback_start         onCaptureStarted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureProgressed}.
+     */
     ACameraCaptureSession_captureCallback_result        onCaptureProgressed;
 
     /**
@@ -759,10 +774,18 @@
     ACameraCaptureSession_logicalCamera_captureCallback_failed onLogicalCameraCaptureFailed;
 
     /**
-     * Same as ACameraCaptureSession_captureCallbacks
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceCompleted}.
      */
     ACameraCaptureSession_captureCallback_sequenceEnd   onCaptureSequenceCompleted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceAborted}.
+     */
     ACameraCaptureSession_captureCallback_sequenceAbort onCaptureSequenceAborted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureBufferLost}.
+     */
     ACameraCaptureSession_captureCallback_bufferLost    onCaptureBufferLost;
 } ACameraCaptureSession_logicalCamera_captureCallbacks;
 
@@ -788,8 +811,6 @@
         int numRequests, ACaptureRequest** requests,
         /*optional*/int* captureSequenceId) __INTRODUCED_IN(29);
 
-#endif /* __ANDROID_API__ >= 29 */
-
 __END_DECLS
 
 #endif /* _NDK_CAMERA_CAPTURE_SESSION_H */
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index 1537bde..7be4bd3 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -44,8 +44,6 @@
 
 __BEGIN_DECLS
 
-#if __ANDROID_API__ >= 24
-
 /**
  * ACameraDevice is opaque type that provides access to a camera device.
  *
@@ -126,6 +124,10 @@
  */
 typedef void (*ACameraDevice_ErrorStateCallback)(void* context, ACameraDevice* device, int error);
 
+/**
+ * Applications' callbacks for camera device state changes, register with
+ * {@link ACameraManager_openCamera}.
+ */
 typedef struct ACameraDevice_StateCallbacks {
     /// optional application context.
     void*                             context;
@@ -200,6 +202,10 @@
  */
 const char* ACameraDevice_getId(const ACameraDevice* device) __INTRODUCED_IN(24);
 
+/**
+ * Capture request pre-defined template types, used in {@link ACameraDevice_createCaptureRequest}
+ * and {@link ACameraDevice_createCaptureRequest_withPhysicalIds}.
+ */
 typedef enum {
     /**
      * Create a request suitable for a camera preview window. Specifically, this
@@ -303,10 +309,12 @@
         const ACameraDevice* device, ACameraDevice_request_template templateId,
         /*out*/ACaptureRequest** request) __INTRODUCED_IN(24);
 
-
+/**
+ * Opaque object for CaptureSessionOutput container, use
+ * {@link ACaptureSessionOutputContainer_create} to create an instance.
+ */
 typedef struct ACaptureSessionOutputContainer ACaptureSessionOutputContainer;
 
-typedef struct ACaptureSessionOutput ACaptureSessionOutput;
 
 /**
  * Create a capture session output container.
@@ -687,10 +695,6 @@
         const ACameraCaptureSession_stateCallbacks* callbacks,
         /*out*/ACameraCaptureSession** session) __INTRODUCED_IN(24);
 
-#endif /* __ANDROID_API__ >= 24 */
-
-#if __ANDROID_API__ >= 28
-
 /**
  * Create a shared ACaptureSessionOutput object.
  *
@@ -782,10 +786,6 @@
         const ACameraCaptureSession_stateCallbacks* callbacks,
         /*out*/ACameraCaptureSession** session) __INTRODUCED_IN(28);
 
-#endif /* __ANDROID_API__ >= 28 */
-
-#if __ANDROID_API__ >= 29
-
 /**
  * Create a ACaptureSessionOutput object used for streaming from a physical
  * camera as part of a logical camera device.
@@ -854,7 +854,7 @@
         /*out*/ACaptureRequest** request) __INTRODUCED_IN(29);
 
 /**
- * Check whether a particular {@ACaptureSessionOutputContainer} is supported by
+ * Check whether a particular {@link ACaptureSessionOutputContainer} is supported by
  * the camera device.
  *
  * <p>This method performs a runtime check of a given {@link
@@ -885,13 +885,12 @@
  *                                                         device.</li>
  *        <li>{@link ACAMERA_ERROR_UNSUPPORTED_OPERATION} if the query operation is not
  *                                                        supported by the camera device.</li>
+ *        </ul>
  */
 camera_status_t ACameraDevice_isSessionConfigurationSupported(
         const ACameraDevice* device,
         const ACaptureSessionOutputContainer* sessionOutputContainer) __INTRODUCED_IN(29);
 
-#endif /* __ANDROID_API__ >= 29 */
-
 __END_DECLS
 
 #endif /* _NDK_CAMERA_DEVICE_H */
diff --git a/camera/ndk/include/camera/NdkCameraError.h b/camera/ndk/include/camera/NdkCameraError.h
index fc618ee..26db7f2 100644
--- a/camera/ndk/include/camera/NdkCameraError.h
+++ b/camera/ndk/include/camera/NdkCameraError.h
@@ -40,9 +40,13 @@
 
 __BEGIN_DECLS
 
-#if __ANDROID_API__ >= 24
-
+/**
+ * Camera status enum types.
+ */
 typedef enum {
+    /**
+     * Camera operation has succeeded.
+     */
     ACAMERA_OK = 0,
 
     ACAMERA_ERROR_BASE                  = -10000,
@@ -138,8 +142,6 @@
     ACAMERA_ERROR_UNSUPPORTED_OPERATION = ACAMERA_ERROR_BASE - 14,
 } camera_status_t;
 
-#endif /* __ANDROID_API__ >= 24 */
-
 __END_DECLS
 
 #endif /* _NDK_CAMERA_ERROR_H */
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index 0a2ee57..729182e 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -44,8 +44,6 @@
 
 __BEGIN_DECLS
 
-#if __ANDROID_API__ >= 24
-
 /**
  * ACameraManager is opaque type that provides access to camera service.
  *
@@ -293,10 +291,6 @@
         ACameraDevice_StateCallbacks* callback,
         /*out*/ACameraDevice** device) __INTRODUCED_IN(24);
 
-#endif /* __ANDROID_API__ >= 24 */
-
-#if __ANDROID_API__ >= 29
-
 /**
  * Definition of camera access permission change callback.
  *
@@ -332,7 +326,7 @@
  * @see ACameraManager_registerExtendedAvailabilityCallback
  */
 typedef struct ACameraManager_ExtendedAvailabilityListener {
-    ///
+    /// Called when a camera becomes available or unavailable
     ACameraManager_AvailabilityCallbacks availabilityCallbacks;
 
     /// Called when there is camera access permission change
@@ -419,8 +413,6 @@
         __INTRODUCED_IN(29);
 #endif
 
-#endif /* __ANDROID_API__ >= 29 */
-
 __END_DECLS
 
 #endif /* _NDK_CAMERA_MANAGER_H */
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index 072bb02..b331d50 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -36,13 +36,12 @@
 #ifndef _NDK_CAMERA_METADATA_H
 #define _NDK_CAMERA_METADATA_H
 
+#include <stdbool.h>
 #include <stdint.h>
 #include <sys/cdefs.h>
 
 #ifndef __ANDROID_VNDK__
-#if __ANDROID_API__ >= 30
 #include "jni.h"
-#endif  /* __ANDROID_API__ >= 30 */
 #endif  /* __ANDROID_VNDK__ */
 
 #include "NdkCameraError.h"
@@ -50,8 +49,6 @@
 
 __BEGIN_DECLS
 
-#if __ANDROID_API__ >= 24
-
 /**
  * ACameraMetadata is opaque type that provides access to read-only camera metadata like camera
  * characteristics (via {@link ACameraManager_getCameraCharacteristics}) or capture results (via
@@ -237,10 +234,6 @@
  */
 void ACameraMetadata_free(ACameraMetadata* metadata) __INTRODUCED_IN(24);
 
-#endif /* __ANDROID_API__ >= 24 */
-
-#if __ANDROID_API__ >= 29
-
 /**
  * Helper function to check if a camera is logical multi-camera.
  *
@@ -259,17 +252,16 @@
         /*out*/size_t* numPhysicalCameras, /*out*/const char* const** physicalCameraIds)
         __INTRODUCED_IN(29);
 
-#endif /* __ANDROID_API__ >= 29 */
-
 #ifndef __ANDROID_VNDK__
-#if __ANDROID_API__ >= 30
 
 /**
  * Return a {@link ACameraMetadata} that references the same data as
- * {@link cameraMetadata}, which is an instance of
- * {@link android.hardware.camera2.CameraMetadata} (e.g., a
- * {@link android.hardware.camera2.CameraCharacteristics} or
- * {@link android.hardware.camera2.CaptureResult}).
+ * <a href="/reference/android/hardware/camera2/CameraMetadata">
+ *     android.hardware.camera2.CameraMetadata</a> from Java API. (e.g., a
+ * <a href="/reference/android/hardware/camera2/CameraCharacteristics">
+ *     android.hardware.camera2.CameraCharacteristics</a>
+ * or <a href="/reference/android/hardware/camera2/CaptureResult">
+ *     android.hardware.camera2.CaptureResult</a>).
  *
  * <p>The returned ACameraMetadata must be freed by the application by {@link ACameraMetadata_free}
  * after application is done using it.</p>
@@ -279,17 +271,18 @@
  * the Java metadata is garbage collected.
  *
  * @param env the JNI environment.
- * @param cameraMetadata the source {@link android.hardware.camera2.CameraMetadata} from which the
+ * @param cameraMetadata the source <a href="/reference/android/hardware/camera2/CameraMetadata">
+                         android.hardware.camera2.CameraMetadata </a>from which the
  *                       returned {@link ACameraMetadata} is a view.
  *
- * @return a valid ACameraMetadata pointer or NULL if {@link cameraMetadata} is null or not a valid
- *         instance of {@link android.hardware.camera2.CameraMetadata}.
+ * @return a valid ACameraMetadata pointer or NULL if cameraMetadata is null or not a valid
+ *         instance of <a href="android/hardware/camera2/CameraMetadata">
+ *         android.hardware.camera2.CameraMetadata</a>.
  *
  */
 ACameraMetadata* ACameraMetadata_fromCameraMetadata(JNIEnv* env, jobject cameraMetadata)
         __INTRODUCED_IN(30);
 
-#endif /* __ANDROID_API__ >= 30 */
 #endif  /* __ANDROID_VNDK__ */
 
 __END_DECLS
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 1354fce..52cd4b4 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -40,7 +40,6 @@
 
 __BEGIN_DECLS
 
-#if __ANDROID_API__ >= 24
 
 typedef enum acamera_metadata_section {
     ACAMERA_COLOR_CORRECTION,
@@ -528,6 +527,13 @@
      * scene as they do before. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use
      * activeArraySize or preCorrectionActiveArraySize still depends on distortion correction
      * mode.</p>
+     * <p>For camera devices with the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability,
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
+     * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
      * <p>The data representation is <code>int[5 * area_count]</code>.
      * Every five elements represent a metering region of <code>(xmin, ymin, xmax, ymax, weight)</code>.
      * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
@@ -537,7 +543,10 @@
      * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
      */
     ACAMERA_CONTROL_AE_REGIONS =                                // int32[5*area_count]
             ACAMERA_CONTROL_START + 4,
@@ -719,6 +728,12 @@
      * scene as they do before. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use
      * activeArraySize or preCorrectionActiveArraySize still depends on distortion correction
      * mode.</p>
+     * <p>For camera devices with the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
+     * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
      * <p>The data representation is <code>int[5 * area_count]</code>.
      * Every five elements represent a metering region of <code>(xmin, ymin, xmax, ymax, weight)</code>.
      * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
@@ -728,7 +743,10 @@
      * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
      */
     ACAMERA_CONTROL_AF_REGIONS =                                // int32[5*area_count]
             ACAMERA_CONTROL_START + 8,
@@ -816,7 +834,7 @@
      * </ul></p>
      *
      * <p>This control is only effective if ACAMERA_CONTROL_MODE is AUTO.</p>
-     * <p>When set to the ON mode, the camera device's auto-white balance
+     * <p>When set to the AUTO mode, the camera device's auto-white balance
      * routine is enabled, overriding the application's selected
      * ACAMERA_COLOR_CORRECTION_TRANSFORM, ACAMERA_COLOR_CORRECTION_GAINS and
      * ACAMERA_COLOR_CORRECTION_MODE. Note that when ACAMERA_CONTROL_AE_MODE
@@ -905,6 +923,12 @@
      * the scene as they do before. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use
      * activeArraySize or preCorrectionActiveArraySize still depends on distortion correction
      * mode.</p>
+     * <p>For camera devices with the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
+     * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
      * <p>The data representation is <code>int[5 * area_count]</code>.
      * Every five elements represent a metering region of <code>(xmin, ymin, xmax, ymax, weight)</code>.
      * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
@@ -914,7 +938,10 @@
      * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
      */
     ACAMERA_CONTROL_AWB_REGIONS =                               // int32[5*area_count]
             ACAMERA_CONTROL_START + 12,
@@ -1841,7 +1868,7 @@
      * <li>If the camera device has BURST_CAPTURE capability, the frame rate requirement of
      * BURST_CAPTURE must still be met.</li>
      * <li>All streams not larger than the maximum streaming dimension for BOKEH_STILL_CAPTURE mode
-     * (queried via {@link ACAMERA_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES })
+     * (queried via {@link ACAMERA_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES })
      * will have preview bokeh effect applied.</li>
      * </ul>
      * <p>When set to BOKEH_CONTINUOUS mode, configured streams dimension should not exceed this mode's
@@ -1957,7 +1984,20 @@
      * explicitly set ACAMERA_CONTROL_ZOOM_RATIO, its value defaults to 1.0.</p>
      * <p>One limitation of controlling zoom using zoomRatio is that the ACAMERA_SCALER_CROP_REGION
      * must only be used for letterboxing or pillarboxing of the sensor active array, and no
-     * FREEFORM cropping can be used with ACAMERA_CONTROL_ZOOM_RATIO other than 1.0.</p>
+     * FREEFORM cropping can be used with ACAMERA_CONTROL_ZOOM_RATIO other than 1.0. If
+     * ACAMERA_CONTROL_ZOOM_RATIO is not 1.0, and ACAMERA_SCALER_CROP_REGION is set to be
+     * windowboxing, the camera framework will override the ACAMERA_SCALER_CROP_REGION to be
+     * the active array.</p>
+     * <p>In the capture request, if the application sets ACAMERA_CONTROL_ZOOM_RATIO to a
+     * value != 1.0, the ACAMERA_CONTROL_ZOOM_RATIO tag in the capture result reflects the
+     * effective zoom ratio achieved by the camera device, and the ACAMERA_SCALER_CROP_REGION
+     * adjusts for additional crops that are not zoom related. Otherwise, if the application
+     * sets ACAMERA_CONTROL_ZOOM_RATIO to 1.0, or does not set it at all, the
+     * ACAMERA_CONTROL_ZOOM_RATIO tag in the result metadata will also be 1.0.</p>
+     * <p>When the application requests a physical stream for a logical multi-camera, the
+     * ACAMERA_CONTROL_ZOOM_RATIO in the physical camera result metadata will be 1.0, and
+     * the ACAMERA_SCALER_CROP_REGION tag reflects the amount of zoom and crop done by the
+     * physical camera device.</p>
      *
      * @see ACAMERA_CONTROL_AE_REGIONS
      * @see ACAMERA_CONTROL_ZOOM_RATIO
@@ -2799,6 +2839,51 @@
      */
     ACAMERA_LENS_DISTORTION =                                   // float[5]
             ACAMERA_LENS_START + 13,
+    /**
+     * <p>The correction coefficients to correct for this camera device's
+     * radial and tangential lens distortion for a
+     * CaptureRequest with ACAMERA_SENSOR_PIXEL_MODE set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: float[5]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_LENS_DISTORTION, when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_LENS_DISTORTION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_LENS_DISTORTION_MAXIMUM_RESOLUTION =                // float[5]
+            ACAMERA_LENS_START + 14,
+    /**
+     * <p>The parameters for this camera device's intrinsic
+     * calibration when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: float[5]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_LENS_INTRINSIC_CALIBRATION, when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION =     // float[5]
+            ACAMERA_LENS_START + 15,
     ACAMERA_LENS_END,
 
     /**
@@ -3426,6 +3511,12 @@
      * coordinate system is post-zoom, meaning that the activeArraySize or
      * preCorrectionActiveArraySize covers the camera device's field of view "after" zoom.  See
      * ACAMERA_CONTROL_ZOOM_RATIO for details.</p>
+     * <p>For camera devices with the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
+     * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
      * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      *
      * @see ACAMERA_CONTROL_AE_TARGET_FPS_RANGE
@@ -3434,7 +3525,10 @@
      * @see ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM
      * @see ACAMERA_SCALER_CROPPING_TYPE
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
      */
     ACAMERA_SCALER_CROP_REGION =                                // int32[4]
             ACAMERA_SCALER_START,
@@ -3492,8 +3586,8 @@
      * <p>Not all output formats may be supported in a configuration with
      * an input stream of a particular format. For more details, see
      * android.scaler.availableInputOutputFormatsMap.</p>
-     * <p>The following table describes the minimum required output stream
-     * configurations based on the hardware level
+     * <p>For applications targeting SDK version older than 31, the following table
+     * describes the minimum required output stream configurations based on the hardware level
      * (ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL):</p>
      * <p>Format         | Size                                         | Hardware Level | Notes
      * :-------------:|:--------------------------------------------:|:--------------:|:--------------:
@@ -3505,6 +3599,28 @@
      * YUV_420_888    | all output sizes available for JPEG          | FULL           |
      * YUV_420_888    | all output sizes available for JPEG, up to the maximum video size | LIMITED        |
      * IMPLEMENTATION_DEFINED | same as YUV_420_888                  | Any            |</p>
+     * <p>For applications targeting SDK version 31 or newer, if the mobile device declares to be
+     * <a href="https://developer.android.com/reference/android/os/Build/VERSION_CDOES/MEDIA_PERFORMANCE_CLASS.html">media performance class</a> S,
+     * the primary camera devices (first rear/front camera in the camera ID list) will not
+     * support JPEG sizes smaller than 1080p. If the application configures a JPEG stream
+     * smaller than 1080p, the camera device will round up the JPEG image size to at least
+     * 1080p. The requirements for IMPLEMENTATION_DEFINED and YUV_420_888 stay the same.
+     * This new minimum required output stream configurations are illustrated by the table below:</p>
+     * <p>Format         | Size                                         | Hardware Level | Notes
+     * :-------------:|:--------------------------------------------:|:--------------:|:--------------:
+     * JPEG           | ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE          | Any            |
+     * JPEG           | 1920x1080 (1080p)                            | Any            | if 1080p &lt;= activeArraySize
+     * YUV_420_888    | ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE          | FULL           |
+     * YUV_420_888    | 1920x1080 (1080p)                            | FULL           | if 1080p &lt;= activeArraySize
+     * YUV_420_888    | 1280x720 (720)                               | FULL           | if 720p &lt;= activeArraySize
+     * YUV_420_888    | 640x480 (480p)                               | FULL           | if 480p &lt;= activeArraySize
+     * YUV_420_888    | 320x240 (240p)                               | FULL           | if 240p &lt;= activeArraySize
+     * YUV_420_888    | all output sizes available for FULL hardware level, up to the maximum video size | LIMITED        |
+     * IMPLEMENTATION_DEFINED | same as YUV_420_888                  | Any            |</p>
+     * <p>For applications targeting SDK version 31 or newer, if the mobile device doesn't declare
+     * to be media performance class S, or if the camera device isn't a primary rear/front
+     * camera, the minimum required output stream configurations are the same as for applications
+     * targeting SDK version older than 31.</p>
      * <p>Refer to ACAMERA_REQUEST_AVAILABLE_CAPABILITIES for additional
      * mandatory stream configurations on a per-capability basis.</p>
      * <p>Exception on 176x144 (QCIF) resolution: camera devices usually have a fixed capability for
@@ -3536,8 +3652,6 @@
      * set to either OFF or FAST.</p>
      * <p>When multiple streams are used in a request, the minimum frame
      * duration will be max(individual stream min durations).</p>
-     * <p>The minimum frame duration of a stream (of a particular format, size)
-     * is the same regardless of whether the stream is input or output.</p>
      * <p>See ACAMERA_SENSOR_FRAME_DURATION and
      * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
      * calculating the max frame rate.</p>
@@ -3651,7 +3765,9 @@
      * </ol>
      * </li>
      * <li>Setting ACAMERA_CONTROL_ZOOM_RATIO to values different than 1.0 and
-     * ACAMERA_SCALER_CROP_REGION to be windowboxing at the same time is undefined behavior.</li>
+     * ACAMERA_SCALER_CROP_REGION to be windowboxing at the same time are not supported. In this
+     * case, the camera framework will override the ACAMERA_SCALER_CROP_REGION to be the active
+     * array.</li>
      * </ul>
      * <p>LEGACY capability devices will only support CENTER_ONLY cropping.</p>
      *
@@ -3736,6 +3852,278 @@
     ACAMERA_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP = 
                                                                 // int32
             ACAMERA_SCALER_START + 15,
+    /**
+     * <p>List of rotate-and-crop modes for ACAMERA_SCALER_ROTATE_AND_CROP that are supported by this camera device.</p>
+     *
+     * @see ACAMERA_SCALER_ROTATE_AND_CROP
+     *
+     * <p>Type: byte[n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>This entry lists the valid modes for ACAMERA_SCALER_ROTATE_AND_CROP for this camera device.</p>
+     * <p>Starting with API level 30, all devices will list at least <code>ROTATE_AND_CROP_NONE</code>.
+     * Devices with support for rotate-and-crop will additionally list at least
+     * <code>ROTATE_AND_CROP_AUTO</code> and <code>ROTATE_AND_CROP_90</code>.</p>
+     *
+     * @see ACAMERA_SCALER_ROTATE_AND_CROP
+     */
+    ACAMERA_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES =            // byte[n]
+            ACAMERA_SCALER_START + 16,
+    /**
+     * <p>Whether a rotation-and-crop operation is applied to processed
+     * outputs from the camera.</p>
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_scaler_rotate_and_crop_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     *   <li>ACaptureRequest</li>
+     * </ul></p>
+     *
+     * <p>This control is primarily intended to help camera applications with no support for
+     * multi-window modes to work correctly on devices where multi-window scenarios are
+     * unavoidable, such as foldables or other devices with variable display geometry or more
+     * free-form window placement (such as laptops, which often place portrait-orientation apps
+     * in landscape with pillarboxing).</p>
+     * <p>If supported, the default value is <code>ROTATE_AND_CROP_AUTO</code>, which allows the camera API
+     * to enable backwards-compatibility support for applications that do not support resizing
+     * / multi-window modes, when the device is in fact in a multi-window mode (such as inset
+     * portrait on laptops, or on a foldable device in some fold states).  In addition,
+     * <code>ROTATE_AND_CROP_NONE</code> and <code>ROTATE_AND_CROP_90</code> will always be available if this control
+     * is supported by the device.  If not supported, devices API level 30 or higher will always
+     * list only <code>ROTATE_AND_CROP_NONE</code>.</p>
+     * <p>When <code>CROP_AUTO</code> is in use, and the camera API activates backward-compatibility mode,
+     * several metadata fields will also be parsed differently to ensure that coordinates are
+     * correctly handled for features like drawing face detection boxes or passing in
+     * tap-to-focus coordinates.  The camera API will convert positions in the active array
+     * coordinate system to/from the cropped-and-rotated coordinate system to make the
+     * operation transparent for applications.  The following controls are affected:</p>
+     * <ul>
+     * <li>ACAMERA_CONTROL_AE_REGIONS</li>
+     * <li>ACAMERA_CONTROL_AF_REGIONS</li>
+     * <li>ACAMERA_CONTROL_AWB_REGIONS</li>
+     * <li>android.statistics.faces</li>
+     * </ul>
+     * <p>Capture results will contain the actual value selected by the API;
+     * <code>ROTATE_AND_CROP_AUTO</code> will never be seen in a capture result.</p>
+     * <p>Applications can also select their preferred cropping mode, either to opt out of the
+     * backwards-compatibility treatment, or to use the cropping feature themselves as needed.
+     * In this case, no coordinate translation will be done automatically, and all controls
+     * will continue to use the normal active array coordinates.</p>
+     * <p>Cropping and rotating is done after the application of digital zoom (via either
+     * ACAMERA_SCALER_CROP_REGION or ACAMERA_CONTROL_ZOOM_RATIO), but before each individual
+     * output is further cropped and scaled. It only affects processed outputs such as
+     * YUV, PRIVATE, and JPEG.  It has no effect on RAW outputs.</p>
+     * <p>When <code>CROP_90</code> or <code>CROP_270</code> are selected, there is a significant loss to the field of
+     * view. For example, with a 4:3 aspect ratio output of 1600x1200, <code>CROP_90</code> will still
+     * produce 1600x1200 output, but these buffers are cropped from a vertical 3:4 slice at the
+     * center of the 4:3 area, then rotated to be 4:3, and then upscaled to 1600x1200.  Only
+     * 56.25% of the original FOV is still visible.  In general, for an aspect ratio of <code>w:h</code>,
+     * the crop and rotate operation leaves <code>(h/w)^2</code> of the field of view visible. For 16:9,
+     * this is ~31.6%.</p>
+     * <p>As a visual example, the figure below shows the effect of <code>ROTATE_AND_CROP_90</code> on the
+     * outputs for the following parameters:</p>
+     * <ul>
+     * <li>Sensor active array: <code>2000x1500</code></li>
+     * <li>Crop region: top-left: <code>(500, 375)</code>, size: <code>(1000, 750)</code> (4:3 aspect ratio)</li>
+     * <li>Output streams: YUV <code>640x480</code> and YUV <code>1280x720</code></li>
+     * <li><code>ROTATE_AND_CROP_90</code></li>
+     * </ul>
+     * <p><img alt="Effect of ROTATE_AND_CROP_90" src="../images/camera2/metadata/android.scaler.rotateAndCrop/crop-region-rotate-90-43-ratio.png" /></p>
+     * <p>With these settings, the regions of the active array covered by the output streams are:</p>
+     * <ul>
+     * <li>640x480 stream crop: top-left: <code>(219, 375)</code>, size: <code>(562, 750)</code></li>
+     * <li>1280x720 stream crop: top-left: <code>(289, 375)</code>, size: <code>(422, 750)</code></li>
+     * </ul>
+     * <p>Since the buffers are rotated, the buffers as seen by the application are:</p>
+     * <ul>
+     * <li>640x480 stream: top-left: <code>(781, 375)</code> on active array, size: <code>(640, 480)</code>, downscaled 1.17x from sensor pixels</li>
+     * <li>1280x720 stream: top-left: <code>(711, 375)</code> on active array, size: <code>(1280, 720)</code>, upscaled 1.71x from sensor pixels</li>
+     * </ul>
+     *
+     * @see ACAMERA_CONTROL_AE_REGIONS
+     * @see ACAMERA_CONTROL_AF_REGIONS
+     * @see ACAMERA_CONTROL_AWB_REGIONS
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
+     * @see ACAMERA_SCALER_CROP_REGION
+     */
+    ACAMERA_SCALER_ROTATE_AND_CROP =                            // byte (acamera_metadata_enum_android_scaler_rotate_and_crop_t)
+            ACAMERA_SCALER_START + 17,
+    /**
+     * <p>Default YUV/PRIVATE size to use for requesting secure image buffers.</p>
+     *
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>This entry lists the default size supported in the secure camera mode. This entry is
+     * optional on devices support the SECURE_IMAGE_DATA capability. This entry will be null
+     * if the camera device does not list SECURE_IMAGE_DATA capability.</p>
+     * <p>When the key is present, only a PRIVATE/YUV output of the specified size is guaranteed
+     * to be supported by the camera HAL in the secure camera mode. Any other format or
+     * resolutions might not be supported. Use
+     * {@link ACameraDevice_isSessionConfigurationSupported }
+     * API to query if a secure session configuration is supported if the device supports this
+     * API.</p>
+     * <p>If this key returns null on a device with SECURE_IMAGE_DATA capability, the application
+     * can assume all output sizes listed in the
+     * {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS }
+     * are supported.</p>
+     */
+    ACAMERA_SCALER_DEFAULT_SECURE_IMAGE_SIZE =                  // int32[2]
+            ACAMERA_SCALER_START + 18,
+    /**
+     * <p>The available multi-resolution stream configurations that this
+     * physical camera device supports
+     * (i.e. format, width, height, output/input stream).</p>
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_scaler_physical_camera_multi_resolution_stream_configurations_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>This list contains a subset of the parent logical camera's multi-resolution stream
+     * configurations which belong to this physical camera, and it will advertise and will only
+     * advertise the maximum supported resolutions for a particular format.</p>
+     * <p>If this camera device isn't a physical camera device constituting a logical camera,
+     * but a standalone <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * camera, this field represents the multi-resolution input/output stream configurations of
+     * default mode and max resolution modes. The sizes will be the maximum resolution of a
+     * particular format for default mode and max resolution mode.</p>
+     * <p>This field will only be advertised if the device is a physical camera of a
+     * logical multi-camera device or an ultra high resolution sensor camera. For a logical
+     * multi-camera, the camera API will derive the logical camera’s multi-resolution stream
+     * configurations from all physical cameras. For an ultra high resolution sensor camera, this
+     * is used directly as the camera’s multi-resolution stream configurations.</p>
+     */
+    ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS = 
+                                                                // int32[n*4] (acamera_metadata_enum_android_scaler_physical_camera_multi_resolution_stream_configurations_t)
+            ACAMERA_SCALER_START + 19,
+    /**
+     * <p>The available stream configurations that this
+     * camera device supports (i.e. format, width, height, output/input stream) for a
+     * CaptureRequest with ACAMERA_SENSOR_PIXEL_MODE set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_scaler_available_stream_configurations_maximum_resolution_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, for configurations
+     * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     * <p>Not all output formats may be supported in a configuration with
+     * an input stream of a particular format. For more details, see
+     * android.scaler.availableInputOutputFormatsMapMaximumResolution.</p>
+     *
+     * @see ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int32[n*4] (acamera_metadata_enum_android_scaler_available_stream_configurations_maximum_resolution_t)
+            ACAMERA_SCALER_START + 20,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination when the camera device is sent a CaptureRequest with
+     * ACAMERA_SENSOR_PIXEL_MODE set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, for configurations
+     * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     * <p>When multiple streams are used in a request (if supported, when ACAMERA_SENSOR_PIXEL_MODE
+     * is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>), the
+     * minimum frame duration will be max(individual stream min durations).</p>
+     * <p>See ACAMERA_SENSOR_FRAME_DURATION and
+     * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION for more details about
+     * calculating the max frame rate.</p>
+     *
+     * @see ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS
+     * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_FRAME_DURATION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_SCALER_START + 21,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination when CaptureRequests are submitted with
+     * ACAMERA_SENSOR_PIXEL_MODE set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a></p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, for configurations
+     * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_SCALER_START + 22,
+    /**
+     * <p>Whether the camera device supports multi-resolution input or output streams</p>
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_scaler_multi_resolution_stream_supported_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>A logical multi-camera or an ultra high resolution camera may support multi-resolution
+     * input or output streams. With multi-resolution output streams, the camera device is able
+     * to output different resolution images depending on the current active physical camera or
+     * pixel mode. With multi-resolution input streams, the camera device can reprocess images
+     * of different resolutions from different physical cameras or sensor pixel modes.</p>
+     * <p>When set to TRUE:
+     * * For a logical multi-camera, the camera framework derives
+     * android.scaler.multiResolutionStreamConfigurationMap by combining the
+     * ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS from its physical
+     * cameras.
+     * * For an ultra-high resolution sensor camera, the camera framework directly copies
+     * the value of ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS to
+     * android.scaler.multiResolutionStreamConfigurationMap.</p>
+     *
+     * @see ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS
+     */
+    ACAMERA_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED =          // byte (acamera_metadata_enum_android_scaler_multi_resolution_stream_supported_t)
+            ACAMERA_SCALER_START + 24,
     ACAMERA_SCALER_END,
 
     /**
@@ -4522,6 +4910,67 @@
      */
     ACAMERA_SENSOR_DYNAMIC_WHITE_LEVEL =                        // int32
             ACAMERA_SENSOR_START + 29,
+    /**
+     * <p>Switches sensor pixel mode between maximum resolution mode and default mode.</p>
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_sensor_pixel_mode_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     *   <li>ACaptureRequest</li>
+     * </ul></p>
+     *
+     * <p>This key controls whether the camera sensor operates in
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
+     * mode or not. By default, all camera devices operate in
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode.
+     * When operating in
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode, sensors
+     * with <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability would typically perform pixel binning in order to improve low light
+     * performance, noise reduction etc. However, in
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
+     * mode (supported only
+     * by <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * sensors), sensors typically operate in unbinned mode allowing for a larger image size.
+     * The stream configurations supported in
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
+     * mode are also different from those of
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode.
+     * They can be queried through
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#get">CameraCharacteristics#get</a> with
+     * <a href="https://developer.android.com/reference/CameraCharacteristics.html#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION)">CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION)</a>.
+     * Unless reported by both
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html">StreamConfigurationMap</a>s, the outputs from
+     * <code>android.scaler.streamConfigurationMapMaximumResolution</code> and
+     * <code>android.scaler.streamConfigurationMap</code>
+     * must not be mixed in the same CaptureRequest. In other words, these outputs are
+     * exclusive to each other.
+     * This key does not need to be set for reprocess requests.</p>
+     */
+    ACAMERA_SENSOR_PIXEL_MODE =                                 // byte (acamera_metadata_enum_android_sensor_pixel_mode_t)
+            ACAMERA_SENSOR_START + 32,
+    /**
+     * <p>Whether <code>RAW</code> images requested have their bayer pattern as described by
+     * ACAMERA_SENSOR_INFO_BINNING_FACTOR.</p>
+     *
+     * @see ACAMERA_SENSOR_INFO_BINNING_FACTOR
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_sensor_raw_binning_factor_used_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     * </ul></p>
+     *
+     * <p>This key will only be present in devices advertisting the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability which also advertise <code>REMOSAIC_REPROCESSING</code> capability. On all other devices
+     * RAW targets will have a regular bayer pattern.</p>
+     */
+    ACAMERA_SENSOR_RAW_BINNING_FACTOR_USED =                    // byte (acamera_metadata_enum_android_sensor_raw_binning_factor_used_t)
+            ACAMERA_SENSOR_START + 33,
     ACAMERA_SENSOR_END,
 
     /**
@@ -4779,7 +5228,7 @@
      * rectangle, and cropping to the rectangle given in ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.</p>
      * <p>E.g. to calculate position of a pixel, (x,y), in a processed YUV output image with the
      * dimensions in ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE given the position of a pixel,
-     * (x', y'), in the raw pixel array with dimensions give in
+     * (x', y'), in the raw pixel array with dimensions given in
      * ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE:</p>
      * <ol>
      * <li>Choose a pixel (x', y') within the active array region of the raw buffer given in
@@ -4823,6 +5272,120 @@
      */
     ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE =      // int32[4]
             ACAMERA_SENSOR_INFO_START + 10,
+    /**
+     * <p>The area of the image sensor which corresponds to active pixels after any geometric
+     * distortion correction has been applied, when the sensor runs in maximum resolution mode.</p>
+     *
+     * <p>Type: int32[4]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, when ACAMERA_SENSOR_PIXEL_MODE
+     * is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
+     * Refer to ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE for details, with sensor array related keys
+     * replaced with their
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
+     * counterparts.
+     * This key will only be present for devices which advertise the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability.</p>
+     * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
+     *
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION =  // int32[4]
+            ACAMERA_SENSOR_INFO_START + 11,
+    /**
+     * <p>Dimensions of the full pixel array, possibly
+     * including black calibration pixels, when the sensor runs in maximum resolution mode.
+     * Analogous to ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE, when ACAMERA_SENSOR_PIXEL_MODE is
+     * set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>The pixel count of the full pixel array of the image sensor, which covers
+     * ACAMERA_SENSOR_INFO_PHYSICAL_SIZE area. This represents the full pixel dimensions of
+     * the raw buffers produced by this sensor, when it runs in maximum resolution mode. That
+     * is, when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
+     * This key will only be present for devices which advertise the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability.</p>
+     *
+     * @see ACAMERA_SENSOR_INFO_PHYSICAL_SIZE
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION =   // int32[2]
+            ACAMERA_SENSOR_INFO_START + 12,
+    /**
+     * <p>The area of the image sensor which corresponds to active pixels prior to the
+     * application of any geometric distortion correction, when the sensor runs in maximum
+     * resolution mode. This key must be used for crop / metering regions, only when
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int32[4]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
+     * when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
+     * This key will only be present for devices which advertise the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability.</p>
+     * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
+     *
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION = 
+                                                                // int32[4]
+            ACAMERA_SENSOR_INFO_START + 13,
+    /**
+     * <p>Dimensions of the group of pixels which are under the same color filter.
+     * This specifies the width and height (pair of integers) of the group of pixels which fall
+     * under the same color filter for ULTRA_HIGH_RESOLUTION sensors.</p>
+     *
+     * <p>Type: int32[2]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Sensors can have pixels grouped together under the same color filter in order
+     * to improve various aspects of imaging such as noise reduction, low light
+     * performance etc. These groups can be of various sizes such as 2X2 (quad bayer),
+     * 3X3 (nona-bayer). This key specifies the length and width of the pixels grouped under
+     * the same color filter.</p>
+     * <p>This key will not be present if REMOSAIC_REPROCESSING is not supported, since RAW images
+     * will have a regular bayer pattern.</p>
+     * <p>This key will not be present for sensors which don't have the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability.</p>
+     */
+    ACAMERA_SENSOR_INFO_BINNING_FACTOR =                        // int32[2]
+            ACAMERA_SENSOR_INFO_START + 14,
     ACAMERA_SENSOR_INFO_END,
 
     /**
@@ -5223,7 +5786,7 @@
      * </ul></p>
      *
      * <p>Since optical image stabilization generally involves motion much faster than the duration
-     * of individualq image exposure, multiple OIS samples can be included for a single capture
+     * of individual image exposure, multiple OIS samples can be included for a single capture
      * result. For example, if the OIS reporting operates at 200 Hz, a typical camera operating
      * at 30fps may have 6-7 OIS samples per capture result. This information can be combined
      * with the rolling shutter skew to account for lens motion during image exposure in
@@ -6028,6 +6591,162 @@
      */
     ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS =     // int64[4*n]
             ACAMERA_DEPTH_START + 8,
+    /**
+     * <p>The available depth dataspace stream
+     * configurations that this camera device supports
+     * (i.e. format, width, height, output/input stream) when a CaptureRequest is submitted with
+     * ACAMERA_SENSOR_PIXEL_MODE set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_depth_available_depth_stream_configurations_maximum_resolution_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, for configurations which
+     * are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int32[n*4] (acamera_metadata_enum_android_depth_available_depth_stream_configurations_maximum_resolution_t)
+            ACAMERA_DEPTH_START + 9,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination for depth output formats when a CaptureRequest is submitted with
+     * ACAMERA_SENSOR_PIXEL_MODE set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS, for configurations which
+     * are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     * <p>See ACAMERA_SENSOR_FRAME_DURATION and
+     * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION for more details about
+     * calculating the max frame rate.</p>
+     *
+     * @see ACAMERA_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS
+     * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_FRAME_DURATION
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_DEPTH_START + 10,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination for depth streams for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS, for configurations which
+     * are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_DEPTH_START + 11,
+    /**
+     * <p>The available dynamic depth dataspace stream
+     * configurations that this camera device supports (i.e. format, width, height,
+     * output/input stream) for CaptureRequests where ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_depth_available_dynamic_depth_stream_configurations_maximum_resolution_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, for configurations
+     * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int32[n*4] (acamera_metadata_enum_android_depth_available_dynamic_depth_stream_configurations_maximum_resolution_t)
+            ACAMERA_DEPTH_START + 12,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination for dynamic depth output streams  for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS, for configurations
+     * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_DEPTH_START + 13,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination for dynamic depth streams for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Analogous to ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS, for configurations
+     * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     */
+    ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_DEPTH_START + 14,
     ACAMERA_DEPTH_END,
 
     /**
@@ -6248,6 +6967,71 @@
      */
     ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS =               // int64[4*n]
             ACAMERA_HEIC_START + 2,
+    /**
+     * <p>The available HEIC (ISO/IEC 23008-12) stream
+     * configurations that this camera device supports
+     * (i.e. format, width, height, output/input stream).</p>
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_heic_available_heic_stream_configurations_maximum_resolution_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS for details.</p>
+     * <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
+     * AIMAGE_FORMAT_HEIC format as OUTPUT only.</p>
+     *
+     * @see ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS
+     */
+    ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int32[n*4] (acamera_metadata_enum_android_heic_available_heic_stream_configurations_maximum_resolution_t)
+            ACAMERA_HEIC_START + 3,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination for HEIC output formats for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS for details.</p>
+     *
+     * @see ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS
+     */
+    ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_HEIC_START + 4,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination for HEIC streams for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS for details.</p>
+     *
+     * @see ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS
+     */
+    ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_HEIC_START + 5,
     ACAMERA_HEIC_END,
 
 } acamera_metadata_tag_t;
@@ -8124,13 +8908,27 @@
      * camera's crop region is set to maximum size, the FOV of the physical streams for the
      * ultrawide lens will be the same as the logical stream, by making the crop region
      * smaller than its active array size to compensate for the smaller focal length.</p>
-     * <p>Even if the underlying physical cameras have different RAW characteristics (such as
-     * size or CFA pattern), a logical camera can still advertise RAW capability. In this
-     * case, when the application configures a RAW stream, the camera device will make sure
-     * the active physical camera will remain active to ensure consistent RAW output
-     * behavior, and not switch to other physical cameras.</p>
+     * <p>There are two ways for the application to capture RAW images from a logical camera
+     * with RAW capability:</p>
+     * <ul>
+     * <li>Because the underlying physical cameras may have different RAW capabilities (such
+     * as resolution or CFA pattern), to maintain backward compatibility, when a RAW stream
+     * is configured, the camera device makes sure the default active physical camera remains
+     * active and does not switch to other physical cameras. (One exception is that, if the
+     * logical camera consists of identical image sensors and advertises multiple focalLength
+     * due to different lenses, the camera device may generate RAW images from different
+     * physical cameras based on the focalLength being set by the application.) This
+     * backward-compatible approach usually results in loss of optical zoom, to telephoto
+     * lens or to ultrawide lens.</li>
+     * <li>Alternatively, to take advantage of the full zoomRatio range of the logical camera,
+     * the application should use <a href="https://developer.android.com/reference/android/hardware/camera2/MultiResolutionImageReader.html">MultiResolutionImageReader</a>
+     * to capture RAW images from the currently active physical camera. Because different
+     * physical camera may have different RAW characteristics, the application needs to use
+     * the characteristics and result metadata of the active physical camera for the
+     * relevant RAW metadata.</li>
+     * </ul>
      * <p>The capture request and result metadata tags required for backward compatible camera
-     * functionalities will be solely based on the logical camera capabiltity. On the other
+     * functionalities will be solely based on the logical camera capability. On the other
      * hand, the use of manual capture controls (sensor or post-processing) with a
      * logical camera may result in unexpected behavior when the HAL decides to switch
      * between physical cameras with different characteristics under the hood. For example,
@@ -8198,6 +8996,20 @@
      */
     ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA             = 14,
 
+    /**
+     * <p>This camera device is capable of producing ultra high resolution images in
+     * addition to the image sizes described in the
+     * android.scaler.streamConfigurationMap.
+     * It can operate in 'default' mode and 'max resolution' mode. It generally does this
+     * by binning pixels in 'default' mode and not binning them in 'max resolution' mode.
+     * <code>android.scaler.streamConfigurationMap</code> describes the streams supported in 'default'
+     * mode.
+     * The stream configurations supported in 'max resolution' mode are described by
+     * <code>android.scaler.streamConfigurationMapMaximumResolution</code>.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR
+                                                                      = 16,
+
 } acamera_metadata_enum_android_request_available_capabilities_t;
 
 
@@ -8298,6 +9110,79 @@
 
 } acamera_metadata_enum_android_scaler_available_recommended_stream_configurations_t;
 
+// ACAMERA_SCALER_ROTATE_AND_CROP
+typedef enum acamera_metadata_enum_acamera_scaler_rotate_and_crop {
+    /**
+     * <p>No rotate and crop is applied. Processed outputs are in the sensor orientation.</p>
+     */
+    ACAMERA_SCALER_ROTATE_AND_CROP_NONE                              = 0,
+
+    /**
+     * <p>Processed images are rotated by 90 degrees clockwise, and then cropped
+     * to the original aspect ratio.</p>
+     */
+    ACAMERA_SCALER_ROTATE_AND_CROP_90                                = 1,
+
+    /**
+     * <p>Processed images are rotated by 180 degrees.  Since the aspect ratio does not
+     * change, no cropping is performed.</p>
+     */
+    ACAMERA_SCALER_ROTATE_AND_CROP_180                               = 2,
+
+    /**
+     * <p>Processed images are rotated by 270 degrees clockwise, and then cropped
+     * to the original aspect ratio.</p>
+     */
+    ACAMERA_SCALER_ROTATE_AND_CROP_270                               = 3,
+
+    /**
+     * <p>The camera API automatically selects the best concrete value for
+     * rotate-and-crop based on the application's support for resizability and the current
+     * multi-window mode.</p>
+     * <p>If the application does not support resizing but the display mode for its main
+     * Activity is not in a typical orientation, the camera API will set <code>ROTATE_AND_CROP_90</code>
+     * or some other supported rotation value, depending on device configuration,
+     * to ensure preview and captured images are correctly shown to the user. Otherwise,
+     * <code>ROTATE_AND_CROP_NONE</code> will be selected.</p>
+     * <p>When a value other than NONE is selected, several metadata fields will also be parsed
+     * differently to ensure that coordinates are correctly handled for features like drawing
+     * face detection boxes or passing in tap-to-focus coordinates.  The camera API will
+     * convert positions in the active array coordinate system to/from the cropped-and-rotated
+     * coordinate system to make the operation transparent for applications.</p>
+     * <p>No coordinate mapping will be done when the application selects a non-AUTO mode.</p>
+     */
+    ACAMERA_SCALER_ROTATE_AND_CROP_AUTO                              = 4,
+
+} acamera_metadata_enum_android_scaler_rotate_and_crop_t;
+
+// ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS
+typedef enum acamera_metadata_enum_acamera_scaler_physical_camera_multi_resolution_stream_configurations {
+    ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS_OUTPUT
+                                                                      = 0,
+
+    ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS_INPUT
+                                                                      = 1,
+
+} acamera_metadata_enum_android_scaler_physical_camera_multi_resolution_stream_configurations_t;
+
+// ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_scaler_available_stream_configurations_maximum_resolution {
+    ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+                                                                      = 0,
+
+    ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+                                                                      = 1,
+
+} acamera_metadata_enum_android_scaler_available_stream_configurations_maximum_resolution_t;
+
+// ACAMERA_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED
+typedef enum acamera_metadata_enum_acamera_scaler_multi_resolution_stream_supported {
+    ACAMERA_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED_FALSE           = 0,
+
+    ACAMERA_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED_TRUE            = 1,
+
+} acamera_metadata_enum_android_scaler_multi_resolution_stream_supported_t;
+
 
 // ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
 typedef enum acamera_metadata_enum_acamera_sensor_reference_illuminant1 {
@@ -8370,10 +9255,10 @@
      * respective color channel provided in
      * ACAMERA_SENSOR_TEST_PATTERN_DATA.</p>
      * <p>For example:</p>
-     * <pre><code>android.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0]
+     * <pre><code>ACAMERA_SENSOR_TEST_PATTERN_DATA = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0]
      * </code></pre>
      * <p>All green pixels are 100% green. All red/blue pixels are black.</p>
-     * <pre><code>android.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0]
+     * <pre><code>ACAMERA_SENSOR_TEST_PATTERN_DATA = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0]
      * </code></pre>
      * <p>All red pixels are 100% red. Only the odd green pixels
      * are 100% green. All blue pixels are 100% black.</p>
@@ -8456,6 +9341,42 @@
 
 } acamera_metadata_enum_android_sensor_test_pattern_mode_t;
 
+// ACAMERA_SENSOR_PIXEL_MODE
+typedef enum acamera_metadata_enum_acamera_sensor_pixel_mode {
+    /**
+     * <p>This is the default sensor pixel mode. This is the only sensor pixel mode
+     * supported unless a camera device advertises
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.</p>
+     */
+    ACAMERA_SENSOR_PIXEL_MODE_DEFAULT                                = 0,
+
+    /**
+     * <p>This sensor pixel mode is offered by devices with capability
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.
+     * In this mode, sensors typically do not bin pixels, as a result can offer larger
+     * image sizes.</p>
+     */
+    ACAMERA_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION                     = 1,
+
+} acamera_metadata_enum_android_sensor_pixel_mode_t;
+
+// ACAMERA_SENSOR_RAW_BINNING_FACTOR_USED
+typedef enum acamera_metadata_enum_acamera_sensor_raw_binning_factor_used {
+    /**
+     * <p>The <code>RAW</code> targets in this capture have ACAMERA_SENSOR_INFO_BINNING_FACTOR as the
+     * bayer pattern.</p>
+     *
+     * @see ACAMERA_SENSOR_INFO_BINNING_FACTOR
+     */
+    ACAMERA_SENSOR_RAW_BINNING_FACTOR_USED_TRUE                      = 0,
+
+    /**
+     * <p>The <code>RAW</code> targets have a regular bayer pattern in this capture.</p>
+     */
+    ACAMERA_SENSOR_RAW_BINNING_FACTOR_USED_FALSE                     = 1,
+
+} acamera_metadata_enum_android_sensor_raw_binning_factor_used_t;
+
 
 // ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
 typedef enum acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement {
@@ -8940,6 +9861,26 @@
 
 } acamera_metadata_enum_android_depth_available_dynamic_depth_stream_configurations_t;
 
+// ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_depth_available_depth_stream_configurations_maximum_resolution {
+    ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+                                                                      = 0,
+
+    ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+                                                                      = 1,
+
+} acamera_metadata_enum_android_depth_available_depth_stream_configurations_maximum_resolution_t;
+
+// ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_depth_available_dynamic_depth_stream_configurations_maximum_resolution {
+    ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+                                                                      = 0,
+
+    ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+                                                                      = 1,
+
+} acamera_metadata_enum_android_depth_available_dynamic_depth_stream_configurations_maximum_resolution_t;
+
 
 // ACAMERA_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE
 typedef enum acamera_metadata_enum_acamera_logical_multi_camera_sensor_sync_type {
@@ -8991,9 +9932,18 @@
 
 } acamera_metadata_enum_android_heic_available_heic_stream_configurations_t;
 
+// ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_heic_available_heic_stream_configurations_maximum_resolution {
+    ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+                                                                      = 0,
+
+    ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+                                                                      = 1,
+
+} acamera_metadata_enum_android_heic_available_heic_stream_configurations_maximum_resolution_t;
 
 
-#endif /* __ANDROID_API__ >= 24 */
+
 
 __END_DECLS
 
diff --git a/camera/ndk/include/camera/NdkCameraWindowType.h b/camera/ndk/include/camera/NdkCameraWindowType.h
index 99f67e9..0838fba 100644
--- a/camera/ndk/include/camera/NdkCameraWindowType.h
+++ b/camera/ndk/include/camera/NdkCameraWindowType.h
@@ -44,10 +44,12 @@
  */
 #ifdef __ANDROID_VNDK__
 #include <cutils/native_handle.h>
-typedef native_handle_t ACameraWindowType;
+typedef const native_handle_t ACameraWindowType;
 #else
 #include <android/native_window.h>
 typedef ANativeWindow ACameraWindowType;
 #endif
 
+/** @} */
+
 #endif //_NDK_CAMERA_WINDOW_TYPE_H
diff --git a/camera/ndk/include/camera/NdkCaptureRequest.h b/camera/ndk/include/camera/NdkCaptureRequest.h
index d3f8826..d83c5b3 100644
--- a/camera/ndk/include/camera/NdkCaptureRequest.h
+++ b/camera/ndk/include/camera/NdkCaptureRequest.h
@@ -44,12 +44,10 @@
 
 __BEGIN_DECLS
 
-#if __ANDROID_API__ >= 24
-
-// Container for output targets
+/** Container for output targets */
 typedef struct ACameraOutputTargets ACameraOutputTargets;
 
-// Container for a single output target
+/** Container for a single output target */
 typedef struct ACameraOutputTarget ACameraOutputTarget;
 
 /**
@@ -304,10 +302,6 @@
  */
 void ACaptureRequest_free(ACaptureRequest* request) __INTRODUCED_IN(24);
 
-#endif /* __ANDROID_API__ >= 24 */
-
-#if __ANDROID_API__ >= 28
-
 /**
  * Associate an arbitrary user context pointer to the {@link ACaptureRequest}
  *
@@ -356,10 +350,6 @@
  */
 ACaptureRequest* ACaptureRequest_copy(const ACaptureRequest* src) __INTRODUCED_IN(28);
 
-#endif /* __ANDROID_API__ >= 28 */
-
-#if __ANDROID_API__ >= 29
-
 /**
  * Get a metadata entry from input {@link ACaptureRequest} for
  * a physical camera backing a logical multi-camera device.
@@ -393,10 +383,10 @@
  * Set/change a camera capture control entry with unsigned 8 bits data type for
  * a physical camera backing a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_u8, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_u8, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -423,10 +413,10 @@
  * Set/change a camera capture control entry with signed 32 bits data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_i32, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_i32, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -453,10 +443,10 @@
  * Set/change a camera capture control entry with float data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_float, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_float, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -483,10 +473,10 @@
  * Set/change a camera capture control entry with signed 64 bits data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_i64, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_i64, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -513,10 +503,10 @@
  * Set/change a camera capture control entry with double data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_double, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_double, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -543,10 +533,10 @@
  * Set/change a camera capture control entry with rational data type for
  * a physical camera of a logical multi-camera device.
  *
- * <p>Same as ACaptureRequest_setEntry_rational, except that if {@link tag} is contained
+ * <p>Same as ACaptureRequest_setEntry_rational, except that if tag is contained
  * in {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, this function
  * sets the entry for a particular physical sub-camera backing the logical multi-camera.
- * If {@link tag} is not contained in
+ * If tag is not contained in
  * {@link ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS}, the key will be ignored
  * by the camera device.</p>
  *
@@ -569,8 +559,6 @@
         ACaptureRequest* request, const char* physicalId, uint32_t tag,
         uint32_t count, const ACameraMetadata_rational* data) __INTRODUCED_IN(29);
 
-#endif /* __ANDROID_API__ >= 29 */
-
 __END_DECLS
 
 #endif /* _NDK_CAPTURE_REQUEST_H */
diff --git a/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h b/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
index e1af8c1..5a1af79 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
@@ -18,7 +18,7 @@
 #include "utils.h"
 
 struct ACaptureSessionOutput {
-    explicit ACaptureSessionOutput(native_handle_t* window, bool isShared = false,
+    explicit ACaptureSessionOutput(const native_handle_t* window, bool isShared = false,
             const char* physicalCameraId = "") :
             mWindow(window), mIsShared(isShared), mPhysicalCameraId(physicalCameraId) {};
 
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index e511a3f..9f63099 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -180,6 +180,7 @@
         const ACaptureRequest* sessionParameters,
         const ACameraCaptureSession_stateCallbacks* callbacks,
         /*out*/ACameraCaptureSession** session) {
+    nsecs_t startTimeNs = systemTime();
     sp<ACameraCaptureSession> currentSession = mCurrentSession.promote();
     Mutex::Autolock _l(mDeviceLock);
     camera_status_t ret = checkCameraClosedOrErrorLocked();
@@ -193,7 +194,7 @@
     }
 
     // Create new session
-    ret = configureStreamsLocked(outputs, sessionParameters);
+    ret = configureStreamsLocked(outputs, sessionParameters, startTimeNs);
     if (ret != ACAMERA_OK) {
         ALOGE("Fail to create new session. cannot configure streams");
         return ret;
@@ -355,7 +356,7 @@
     std::vector<int32_t> requestStreamIdxList;
     std::vector<int32_t> requestSurfaceIdxList;
     for (auto outputTarget : request->targets->mOutputs) {
-        native_handle_t* anw = outputTarget.mWindow;
+        const native_handle_t* anw = outputTarget.mWindow;
         bool found = false;
         req->mSurfaceList.push_back(anw);
         // lookup stream/surface ID
@@ -434,7 +435,7 @@
     }
     pRequest->targets = new ACameraOutputTargets();
     for (size_t i = 0; i < req->mSurfaceList.size(); i++) {
-        native_handle_t* anw = req->mSurfaceList[i];
+        const native_handle_t* anw = req->mSurfaceList[i];
         ACameraOutputTarget outputTarget(anw);
         pRequest->targets->mOutputs.insert(outputTarget);
     }
@@ -472,7 +473,11 @@
     }
 
     // No new session, unconfigure now
-    camera_status_t ret = configureStreamsLocked(nullptr, nullptr);
+    // Note: The unconfiguration of session won't be accounted for session
+    // latency because a stream configuration with 0 streams won't ever become
+    // active.
+    nsecs_t startTimeNs = systemTime();
+    camera_status_t ret = configureStreamsLocked(nullptr, nullptr, startTimeNs);
     if (ret != ACAMERA_OK) {
         ALOGE("Unconfigure stream failed. Device might still be configured! ret %d", ret);
     }
@@ -598,7 +603,7 @@
 
 camera_status_t
 CameraDevice::configureStreamsLocked(const ACaptureSessionOutputContainer* outputs,
-        const ACaptureRequest* sessionParameters) {
+        const ACaptureRequest* sessionParameters, nsecs_t startTimeNs) {
     ACaptureSessionOutputContainer emptyOutput;
     if (outputs == nullptr) {
         outputs = &emptyOutput;
@@ -611,7 +616,7 @@
 
     std::set<std::pair<native_handle_ptr_wrapper, OutputConfigurationWrapper>> outputSet;
     for (auto outConfig : outputs->mOutputs) {
-        native_handle_t* anw = outConfig.mWindow;
+        const native_handle_t* anw = outConfig.mWindow;
         OutputConfigurationWrapper outConfigInsertW;
         OutputConfiguration &outConfigInsert = outConfigInsertW.mOutputConfiguration;
         outConfigInsert.rotation = utils::convertToHidl(outConfig.mRotation);
@@ -697,7 +702,8 @@
         utils::convertToHidl(params_metadata, &hidlParams);
         params.unlock(params_metadata);
     }
-    remoteRet = mRemote->endConfigure(StreamConfigurationMode::NORMAL_MODE, hidlParams);
+    remoteRet = mRemote->endConfigure_2_1(StreamConfigurationMode::NORMAL_MODE,
+                                          hidlParams, startTimeNs);
     CHECK_TRANSACTION_AND_RET(remoteRet, remoteRet, "endConfigure()")
     return ACAMERA_OK;
 }
@@ -846,8 +852,7 @@
             for (auto streamAndWindowId : request->mCaptureRequest.streamAndWindowIds) {
                 int32_t windowId = streamAndWindowId.windowId;
                 if (utils::isWindowNativeHandleEqual(windowHandles[windowId],outHandle)) {
-                    native_handle_t* anw =
-                        const_cast<native_handle_t *>(windowHandles[windowId].getNativeHandle());
+                    const native_handle_t* anw = windowHandles[windowId].getNativeHandle();
                     ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
                             getId(), anw, frameNumber);
 
@@ -1244,7 +1249,7 @@
                         return;
                     }
 
-                    native_handle_t* anw;
+                    const native_handle_t* anw;
                     found = msg->findPointer(kAnwKey, (void**) &anw);
                     if (!found) {
                         ALOGE("%s: Cannot find native_handle_t!", __FUNCTION__);
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.h b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
index 7fc699e..0b6c7c8 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
@@ -26,7 +26,7 @@
 #include <utils/Mutex.h>
 #include <utils/List.h>
 #include <utils/Vector.h>
-#include <android/frameworks/cameraservice/device/2.0/ICameraDeviceUser.h>
+#include <android/frameworks/cameraservice/device/2.1/ICameraDeviceUser.h>
 #include <android/frameworks/cameraservice/device/2.0/ICameraDeviceCallback.h>
 #include <android/frameworks/cameraservice/device/2.0/types.h>
 #include <fmq/MessageQueue.h>
@@ -44,7 +44,8 @@
 namespace acam {
 
 using ICameraDeviceCallback = frameworks::cameraservice::device::V2_0::ICameraDeviceCallback;
-using ICameraDeviceUser = frameworks::cameraservice::device::V2_0::ICameraDeviceUser;
+using ICameraDeviceUser_2_0 = frameworks::cameraservice::device::V2_0::ICameraDeviceUser;
+using ICameraDeviceUser = frameworks::cameraservice::device::V2_1::ICameraDeviceUser;
 using CaptureResultExtras = frameworks::cameraservice::device::V2_0::CaptureResultExtras;
 using PhysicalCaptureResultInfo = frameworks::cameraservice::device::V2_0::PhysicalCaptureResultInfo;
 using PhysicalCameraSettings = frameworks::cameraservice::device::V2_0::PhysicalCameraSettings;
@@ -201,7 +202,7 @@
     void notifySessionEndOfLifeLocked(ACameraCaptureSession* session);
 
     camera_status_t configureStreamsLocked(const ACaptureSessionOutputContainer* outputs,
-           const ACaptureRequest* sessionParameters);
+           const ACaptureRequest* sessionParameters, nsecs_t startTimeNs);
 
     // Input message will be posted and cleared after this returns
     void postSessionMsgAndCleanup(sp<AMessage>& msg);
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
index 5aa9c46..77c934a 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
@@ -764,15 +764,15 @@
     }
 
     sp<ICameraDeviceCallback> callbacks = device->getServiceCallback();
-    sp<ICameraDeviceUser> deviceRemote;
+    sp<ICameraDeviceUser_2_0> deviceRemote_2_0;
 
     // No way to get package name from native.
     // Send a zero length package name and let camera service figure it out from UID
     Status status = Status::NO_ERROR;
     auto serviceRet = cs->connectDevice(
-            callbacks, cameraId, [&status, &deviceRemote](auto s, auto &device) {
+            callbacks, cameraId, [&status, &deviceRemote_2_0](auto s, auto &device) {
                                      status = s;
-                                     deviceRemote = device;
+                                     deviceRemote_2_0 = device;
                                  });
 
     if (!serviceRet.isOk() || status != Status::NO_ERROR) {
@@ -780,11 +780,18 @@
         delete device;
         return utils::convertFromHidl(status);
     }
-    if (deviceRemote == nullptr) {
+    if (deviceRemote_2_0 == nullptr) {
         ALOGE("%s: connect camera device failed! remote device is null", __FUNCTION__);
         delete device;
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
     }
+    auto castResult = ICameraDeviceUser::castFrom(deviceRemote_2_0);
+    if (!castResult.isOk()) {
+        ALOGE("%s: failed to cast remote device to version 2.1", __FUNCTION__);
+        delete device;
+        return ACAMERA_ERROR_CAMERA_DISCONNECTED;
+    }
+    sp<ICameraDeviceUser> deviceRemote = castResult;
     device->setRemoteDevice(deviceRemote);
     device->setDeviceMetadataQueues();
     *outDevice = device;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.h b/camera/ndk/ndk_vendor/impl/ACameraManager.h
index 85da3e9..8359bb1 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.h
@@ -22,6 +22,7 @@
 #include <android-base/parseint.h>
 #include <android/frameworks/cameraservice/service/2.0/ICameraService.h>
 #include <android/frameworks/cameraservice/service/2.1/ICameraService.h>
+#include <android/frameworks/cameraservice/service/2.2/ICameraService.h>
 #include <android/frameworks/cameraservice/service/2.1/ICameraServiceListener.h>
 
 #include <CameraMetadata.h>
@@ -38,7 +39,7 @@
 namespace android {
 namespace acam {
 
-using ICameraService = frameworks::cameraservice::service::V2_1::ICameraService;
+using ICameraService = frameworks::cameraservice::service::V2_2::ICameraService;
 using CameraDeviceStatus = frameworks::cameraservice::service::V2_0::CameraDeviceStatus;
 using ICameraServiceListener = frameworks::cameraservice::service::V2_1::ICameraServiceListener;
 using PhysicalCameraStatusAndId = frameworks::cameraservice::service::V2_1::PhysicalCameraStatusAndId;
diff --git a/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h b/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
index ed67615..5715d77 100644
--- a/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
+++ b/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
@@ -17,7 +17,7 @@
 #include "utils.h"
 
 struct ACameraOutputTarget {
-    explicit ACameraOutputTarget(native_handle_t* window) : mWindow(window) {};
+    explicit ACameraOutputTarget(const native_handle_t* window) : mWindow(window) {};
 
     bool operator == (const ACameraOutputTarget& other) const {
         return mWindow == other.mWindow;
diff --git a/camera/ndk/ndk_vendor/impl/utils.h b/camera/ndk/ndk_vendor/impl/utils.h
index f389f03..6f5820e 100644
--- a/camera/ndk/ndk_vendor/impl/utils.h
+++ b/camera/ndk/ndk_vendor/impl/utils.h
@@ -42,7 +42,7 @@
 // Utility class so that CaptureRequest can be stored by sp<>
 struct CaptureRequest : public RefBase {
   frameworks::cameraservice::device::V2_0::CaptureRequest mCaptureRequest;
-  std::vector<native_handle_t *> mSurfaceList;
+  std::vector<const native_handle_t *> mSurfaceList;
   //Physical camera settings metadata is stored here, since the capture request
   //might not contain it. That's since, fmq might have consumed it.
   hidl_vec<PhysicalCameraSettings> mPhysicalCameraSettings;
@@ -62,13 +62,13 @@
 // Utility class so the native_handle_t can be compared with  its contents instead
 // of just raw pointer comparisons.
 struct native_handle_ptr_wrapper {
-    native_handle_t *mWindow = nullptr;
+    const native_handle_t *mWindow = nullptr;
 
-    native_handle_ptr_wrapper(native_handle_t *nh) : mWindow(nh) { }
+    native_handle_ptr_wrapper(const native_handle_t *nh) : mWindow(nh) { }
 
     native_handle_ptr_wrapper() = default;
 
-    operator native_handle_t *() const { return mWindow; }
+    operator const native_handle_t *() const { return mWindow; }
 
     bool operator ==(const native_handle_ptr_wrapper other) const {
         return isWindowNativeHandleEqual(mWindow, other.mWindow);
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index 938b5f5..ba14c5c 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -50,7 +50,7 @@
 static constexpr int kTestImageFormat = AIMAGE_FORMAT_YUV_420_888;
 
 using android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
-using ConfiguredWindows = std::set<native_handle_t *>;
+using ConfiguredWindows = std::set<const native_handle_t *>;
 
 class CameraHelper {
    public:
@@ -60,11 +60,11 @@
 
     struct PhysicalImgReaderInfo {
         const char* physicalCameraId;
-        native_handle_t* anw;
+        const native_handle_t* anw;
     };
 
     // Retaining the error code in case the caller needs to analyze it.
-    std::variant<int, ConfiguredWindows> initCamera(native_handle_t* imgReaderAnw,
+    std::variant<int, ConfiguredWindows> initCamera(const native_handle_t* imgReaderAnw,
             const std::vector<PhysicalImgReaderInfo>& physicalImgReaders,
             bool usePhysicalSettings) {
         ConfiguredWindows configuredWindows;
@@ -257,7 +257,7 @@
     ACameraDevice_StateCallbacks mDeviceCb{this, nullptr, nullptr};
     ACameraCaptureSession_stateCallbacks mSessionCb{ this, nullptr, nullptr, nullptr};
 
-    native_handle_t* mImgReaderAnw = nullptr;  // not owned by us.
+    const native_handle_t* mImgReaderAnw = nullptr;  // not owned by us.
 
     // Camera device
     ACameraDevice* mDevice = nullptr;
@@ -396,7 +396,7 @@
         return 0;
     }
 
-    native_handle_t* getNativeWindow() { return mImgReaderAnw; }
+    const native_handle_t* getNativeWindow() { return mImgReaderAnw; }
 
     int getAcquiredImageCount() {
         std::lock_guard<std::mutex> lock(mMutex);
diff --git a/camera/tests/Android.mk b/camera/tests/Android.mk
index e5c1631..7f8078e 100644
--- a/camera/tests/Android.mk
+++ b/camera/tests/Android.mk
@@ -42,6 +42,9 @@
 LOCAL_CFLAGS += -Wall -Wextra -Werror
 
 LOCAL_MODULE:= camera_client_test
+LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS:= notice
+LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/../NOTICE
 LOCAL_MODULE_TAGS := tests
 
 include $(BUILD_NATIVE_TEST)
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 8ccded2..9f2f430 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -363,7 +363,8 @@
 
         // Check metadata binder call
         CameraMetadata metadata;
-        res = service->getCameraCharacteristics(cameraId, &metadata);
+        res = service->getCameraCharacteristics(cameraId,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
         EXPECT_TRUE(res.isOk()) << res;
         EXPECT_FALSE(metadata.isEmpty());
 
@@ -378,8 +379,8 @@
         sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
         sp<hardware::camera2::ICameraDeviceUser> device;
         res = service->connectDevice(callbacks, cameraId, String16("meeeeeeeee!"),
-                std::unique_ptr<String16>(), hardware::ICameraService::USE_CALLING_UID,
-                /*out*/&device);
+                {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*out*/&device);
         EXPECT_TRUE(res.isOk()) << res;
         ASSERT_NE(nullptr, device.get());
         device->disconnect();
@@ -421,8 +422,8 @@
         {
             SCOPED_TRACE("openNewDevice");
             binder::Status res = service->connectDevice(callbacks, deviceId, String16("meeeeeeeee!"),
-                    std::unique_ptr<String16>(), hardware::ICameraService::USE_CALLING_UID,
-                    /*out*/&device);
+                    {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
+                    /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*out*/&device);
             EXPECT_TRUE(res.isOk()) << res;
         }
         auto p = std::make_pair(callbacks, device);
@@ -517,7 +518,7 @@
         CameraMetadata sessionParams;
         std::vector<int> offlineStreamIds;
         res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams,
-                &offlineStreamIds);
+                ns2ms(systemTime()), &offlineStreamIds);
         EXPECT_TRUE(res.isOk()) << res;
         EXPECT_FALSE(callbacks->hadError());
 
@@ -629,7 +630,7 @@
         res = device->deleteStream(streamId);
         EXPECT_TRUE(res.isOk()) << res;
         res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams,
-                &offlineStreamIds);
+                ns2ms(systemTime()), &offlineStreamIds);
         EXPECT_TRUE(res.isOk()) << res;
 
         sleep(/*second*/1); // allow some time for errors to show up, if any
diff --git a/camera/tests/CameraCharacteristicsPermission.cpp b/camera/tests/CameraCharacteristicsPermission.cpp
index 135d2a3..76dc38c 100644
--- a/camera/tests/CameraCharacteristicsPermission.cpp
+++ b/camera/tests/CameraCharacteristicsPermission.cpp
@@ -73,7 +73,8 @@
 
         CameraMetadata metadata;
         std::vector<int32_t> tagsNeedingPermission;
-        rc = mCameraService->getCameraCharacteristics(cameraIdStr, &metadata);
+        rc = mCameraService->getCameraCharacteristics(cameraIdStr,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
         ASSERT_TRUE(rc.isOk());
         EXPECT_FALSE(metadata.isEmpty());
         EXPECT_EQ(metadata.removePermissionEntries(CAMERA_METADATA_INVALID_VENDOR_ID,
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index 02c6e2a..efd9dae 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -181,7 +181,8 @@
         }
 
         CameraMetadata metadata;
-        rc = mCameraService->getCameraCharacteristics(cameraIdStr, &metadata);
+        rc = mCameraService->getCameraCharacteristics(cameraIdStr,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
         if (!rc.isOk()) {
             // The test is relevant only for cameras with Hal 3.x
             // support.
@@ -207,7 +208,8 @@
 
         rc = mCameraService->connect(this, cameraId,
                 String16("ZSLTest"), hardware::ICameraService::USE_CALLING_UID,
-                hardware::ICameraService::USE_CALLING_PID, &cameraDevice);
+                hardware::ICameraService::USE_CALLING_PID,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &cameraDevice);
         EXPECT_TRUE(rc.isOk());
 
         CameraParameters params(cameraDevice->getParameters());
diff --git a/cmds/screenrecord/Android.bp b/cmds/screenrecord/Android.bp
index d7d905f..359a835 100644
--- a/cmds/screenrecord/Android.bp
+++ b/cmds/screenrecord/Android.bp
@@ -12,6 +12,15 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_binary {
     name: "screenrecord",
 
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index f4fb626..e6e3473 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -57,7 +57,7 @@
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <mediadrm/ICrypto.h>
-#include <ui/DisplayConfig.h>
+#include <ui/DisplayMode.h>
 #include <ui/DisplayState.h>
 
 #include "screenrecord.h"
@@ -68,7 +68,7 @@
 using android::ALooper;
 using android::AMessage;
 using android::AString;
-using android::DisplayConfig;
+using android::ui::DisplayMode;
 using android::FrameOutput;
 using android::IBinder;
 using android::IGraphicBufferProducer;
@@ -273,14 +273,11 @@
         SurfaceComposerClient::Transaction& t,
         const sp<IBinder>& dpy,
         const ui::DisplayState& displayState) {
-    const ui::Size& viewport = displayState.viewport;
-
-    // Set the region of the layer stack we're interested in, which in our
-    // case is "all of it".
-    Rect layerStackRect(viewport);
+    // Set the region of the layer stack we're interested in, which in our case is "all of it".
+    Rect layerStackRect(displayState.layerStackSpaceRect);
 
     // We need to preserve the aspect ratio of the display.
-    float displayAspect = viewport.getHeight() / static_cast<float>(viewport.getWidth());
+    float displayAspect = layerStackRect.getHeight() / static_cast<float>(layerStackRect.getWidth());
 
 
     // Set the way we map the output onto the display surface (which will
@@ -692,27 +689,28 @@
         return err;
     }
 
-    DisplayConfig displayConfig;
-    err = SurfaceComposerClient::getActiveDisplayConfig(display, &displayConfig);
+    DisplayMode displayMode;
+    err = SurfaceComposerClient::getActiveDisplayMode(display, &displayMode);
     if (err != NO_ERROR) {
         fprintf(stderr, "ERROR: unable to get display config\n");
         return err;
     }
 
-    const ui::Size& viewport = displayState.viewport;
+    const ui::Size& layerStackSpaceRect = displayState.layerStackSpaceRect;
     if (gVerbose) {
         printf("Display is %dx%d @%.2ffps (orientation=%s), layerStack=%u\n",
-                viewport.getWidth(), viewport.getHeight(), displayConfig.refreshRate,
-                toCString(displayState.orientation), displayState.layerStack);
+                layerStackSpaceRect.getWidth(), layerStackSpaceRect.getHeight(),
+                displayMode.refreshRate, toCString(displayState.orientation),
+                displayState.layerStack);
         fflush(stdout);
     }
 
     // Encoder can't take odd number as config
     if (gVideoWidth == 0) {
-        gVideoWidth = floorToEven(viewport.getWidth());
+        gVideoWidth = floorToEven(layerStackSpaceRect.getWidth());
     }
     if (gVideoHeight == 0) {
-        gVideoHeight = floorToEven(viewport.getHeight());
+        gVideoHeight = floorToEven(layerStackSpaceRect.getHeight());
     }
 
     // Configure and start the encoder.
@@ -720,7 +718,7 @@
     sp<FrameOutput> frameOutput;
     sp<IGraphicBufferProducer> encoderInputSurface;
     if (gOutputFormat != FORMAT_FRAMES && gOutputFormat != FORMAT_RAW_FRAMES) {
-        err = prepareEncoder(displayConfig.refreshRate, &encoder, &encoderInputSurface);
+        err = prepareEncoder(displayMode.refreshRate, &encoder, &encoderInputSurface);
 
         if (err != NO_ERROR && !gSizeSpecified) {
             // fallback is defined for landscape; swap if we're in portrait
@@ -733,7 +731,7 @@
                         gVideoWidth, gVideoHeight, newWidth, newHeight);
                 gVideoWidth = newWidth;
                 gVideoHeight = newHeight;
-                err = prepareEncoder(displayConfig.refreshRate, &encoder, &encoderInputSurface);
+                err = prepareEncoder(displayMode.refreshRate, &encoder, &encoderInputSurface);
             }
         }
         if (err != NO_ERROR) return err;
@@ -1170,14 +1168,14 @@
             }
             break;
         case 'd':
-            gPhysicalDisplayId = atoll(optarg);
-            if (gPhysicalDisplayId == 0) {
+            gPhysicalDisplayId = PhysicalDisplayId(atoll(optarg));
+            if (gPhysicalDisplayId.value == 0) {
                 fprintf(stderr, "Please specify a valid physical display id\n");
                 return 2;
             } else if (SurfaceComposerClient::
                     getPhysicalDisplayToken(gPhysicalDisplayId) == nullptr) {
-                fprintf(stderr, "Invalid physical display id: %"
-                        ANDROID_PHYSICAL_DISPLAY_ID_FORMAT "\n", gPhysicalDisplayId);
+                fprintf(stderr, "Invalid physical display id: %s\n",
+                        to_string(gPhysicalDisplayId).c_str());
                 return 2;
             }
             break;
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index 6470fb1..803c4a4 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -16,6 +16,9 @@
         libstagefright_foundation libjpeg libui libgui libcutils liblog \
         libhidlbase libdatasource libaudioclient \
         android.hardware.media.omx@1.0 \
+        framework-permission-aidl-cpp
+
+LOCAL_STATIC_LIBRARIES := framework-permission-aidl-cpp
 
 LOCAL_C_INCLUDES:= \
         frameworks/av/media/libstagefright \
@@ -28,6 +31,9 @@
 
 LOCAL_SYSTEM_EXT_MODULE:= true
 LOCAL_MODULE:= stagefright
+LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS:= notice
+LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
 
 include $(BUILD_EXECUTABLE)
 
@@ -45,7 +51,8 @@
 
 LOCAL_SHARED_LIBRARIES := \
         libstagefright libmedia liblog libutils libbinder \
-        libstagefright_foundation libdatasource libaudioclient
+        libstagefright_foundation libdatasource libaudioclient \
+        framework-permission-aidl-cpp
 
 LOCAL_C_INCLUDES:= \
         frameworks/av/camera/include \
@@ -58,6 +65,9 @@
 LOCAL_MODULE_TAGS := optional
 
 LOCAL_MODULE:= record
+LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS:= notice
+LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
 
 include $(BUILD_EXECUTABLE)
 
@@ -79,13 +89,17 @@
 LOCAL_C_INCLUDES:= \
         frameworks/av/media/libstagefright \
         frameworks/native/include/media/openmax \
-        frameworks/native/include/media/hardware
+        frameworks/native/include/media/hardware \
+        framework-permission-aidl-cpp
 
 LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
 
 LOCAL_MODULE_TAGS := optional
 
 LOCAL_MODULE:= recordvideo
+LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS:= notice
+LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
 
 include $(BUILD_EXECUTABLE)
 
@@ -104,7 +118,8 @@
 
 LOCAL_SHARED_LIBRARIES := \
         libstagefright libmedia liblog libutils libbinder \
-        libstagefright_foundation libaudioclient
+        libstagefright_foundation libaudioclient \
+        framework-permission-aidl-cpp
 
 LOCAL_C_INCLUDES:= \
         frameworks/av/media/libstagefright \
@@ -115,6 +130,9 @@
 LOCAL_MODULE_TAGS := optional
 
 LOCAL_MODULE:= audioloop
+LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS:= notice
+LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
 
 include $(BUILD_EXECUTABLE)
 
@@ -141,6 +159,9 @@
 LOCAL_MODULE_TAGS := optional
 
 LOCAL_MODULE:= stream
+LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS:= notice
+LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
 
 include $(BUILD_EXECUTABLE)
 
@@ -169,6 +190,9 @@
 LOCAL_MODULE_TAGS := optional
 
 LOCAL_MODULE:= codec
+LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS:= notice
+LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
 
 include $(BUILD_EXECUTABLE)
 
@@ -214,6 +238,9 @@
 LOCAL_MODULE_TAGS := optional
 
 LOCAL_MODULE:= mediafilter
+LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS:= notice
+LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
 
 LOCAL_SANITIZE := cfi
 
@@ -242,5 +269,8 @@
 LOCAL_MODULE_TAGS := optional
 
 LOCAL_MODULE:= muxer
+LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS:= notice
+LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
 
 include $(BUILD_EXECUTABLE)
diff --git a/cmds/stagefright/AudioPlayer.cpp b/cmds/stagefright/AudioPlayer.cpp
index eb76953..55427ca 100644
--- a/cmds/stagefright/AudioPlayer.cpp
+++ b/cmds/stagefright/AudioPlayer.cpp
@@ -134,15 +134,18 @@
     success = format->findInt32(kKeySampleRate, &mSampleRate);
     CHECK(success);
 
-    int32_t numChannels, channelMask;
+    int32_t numChannels;
     success = format->findInt32(kKeyChannelCount, &numChannels);
     CHECK(success);
 
-    if(!format->findInt32(kKeyChannelMask, &channelMask)) {
+    audio_channel_mask_t channelMask;
+    if (int32_t rawChannelMask; !format->findInt32(kKeyChannelMask, &rawChannelMask)) {
         // log only when there's a risk of ambiguity of channel mask selection
         ALOGI_IF(numChannels > 2,
                 "source format didn't specify channel mask, using (%d) channel order", numChannels);
         channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
+    } else {
+        channelMask = static_cast<audio_channel_mask_t>(rawChannelMask);
     }
 
     audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT;
diff --git a/cmds/stagefright/SimplePlayer.cpp b/cmds/stagefright/SimplePlayer.cpp
index f4b8164..e000633 100644
--- a/cmds/stagefright/SimplePlayer.cpp
+++ b/cmds/stagefright/SimplePlayer.cpp
@@ -272,7 +272,7 @@
 status_t SimplePlayer::onPrepare() {
     CHECK_EQ(mState, UNPREPARED);
 
-    mExtractor = new NuMediaExtractor;
+    mExtractor = new NuMediaExtractor(NuMediaExtractor::EntryPoint::OTHER);
 
     status_t err = mExtractor->setDataSource(
             NULL /* httpService */, mPath.c_str());
diff --git a/cmds/stagefright/audioloop.cpp b/cmds/stagefright/audioloop.cpp
index 84a6d6b..4b41ff8 100644
--- a/cmds/stagefright/audioloop.cpp
+++ b/cmds/stagefright/audioloop.cpp
@@ -24,6 +24,7 @@
 
 #include <utils/String16.h>
 
+#include <android/content/AttributionSourceState.h>
 #include <binder/ProcessState.h>
 #include <media/mediarecorder.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -38,6 +39,8 @@
 
 using namespace android;
 
+using content::AttributionSourceState;
+
 static void usage(const char* name)
 {
     fprintf(stderr, "Usage: %s [-d du.ration] [-m] [-w] [-N name] [<output-file>]\n", name);
@@ -110,9 +113,10 @@
         audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
         attr.source = AUDIO_SOURCE_MIC;
 
+        // TODO b/182392769: use attribution source util
         source = new AudioSource(
                 &attr,
-                String16(),
+                AttributionSourceState(),
                 sampleRate,
                 channels);
     } else {
diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp
index c26e0b9..beeab54 100644
--- a/cmds/stagefright/codec.cpp
+++ b/cmds/stagefright/codec.cpp
@@ -39,7 +39,7 @@
 #include <gui/ISurfaceComposer.h>
 #include <gui/SurfaceComposerClient.h>
 #include <gui/Surface.h>
-#include <ui/DisplayConfig.h>
+#include <ui/DisplayMode.h>
 
 static void usage(const char *me) {
     fprintf(stderr, "usage: %s [-a] use audio\n"
@@ -79,7 +79,7 @@
 
     static int64_t kTimeout = 500ll;
 
-    sp<NuMediaExtractor> extractor = new NuMediaExtractor;
+    sp<NuMediaExtractor> extractor = new NuMediaExtractor(NuMediaExtractor::EntryPoint::OTHER);
     if (extractor->setDataSource(NULL /* httpService */, path) != OK) {
         fprintf(stderr, "unable to instantiate extractor.\n");
         return 1;
@@ -414,10 +414,10 @@
         const sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
         CHECK(display != nullptr);
 
-        DisplayConfig config;
-        CHECK_EQ(SurfaceComposerClient::getActiveDisplayConfig(display, &config), NO_ERROR);
+        ui::DisplayMode mode;
+        CHECK_EQ(SurfaceComposerClient::getActiveDisplayMode(display, &mode), NO_ERROR);
 
-        const ui::Size& resolution = config.resolution;
+        const ui::Size& resolution = mode.resolution;
         const ssize_t displayWidth = resolution.getWidth();
         const ssize_t displayHeight = resolution.getHeight();
 
diff --git a/cmds/stagefright/mediafilter.cpp b/cmds/stagefright/mediafilter.cpp
index b894545..67c68e6 100644
--- a/cmds/stagefright/mediafilter.cpp
+++ b/cmds/stagefright/mediafilter.cpp
@@ -34,7 +34,7 @@
 #include <media/stagefright/NuMediaExtractor.h>
 #include <media/stagefright/RenderScriptWrapper.h>
 #include <OMX_IVCommon.h>
-#include <ui/DisplayConfig.h>
+#include <ui/DisplayMode.h>
 
 #include "RenderScript.h"
 #include "ScriptC_argbtorgba.h"
@@ -319,7 +319,8 @@
 
     static int64_t kTimeout = 500ll;
 
-    sp<NuMediaExtractor> extractor = new NuMediaExtractor;
+    sp<NuMediaExtractor> extractor = new NuMediaExtractor(NuMediaExtractor::EntryPoint::OTHER);
+
     if (extractor->setDataSource(NULL /* httpService */, path) != OK) {
         fprintf(stderr, "unable to instantiate extractor.\n");
         return 1;
@@ -751,10 +752,10 @@
         const android::sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
         CHECK(display != nullptr);
 
-        DisplayConfig config;
-        CHECK_EQ(SurfaceComposerClient::getActiveDisplayConfig(display, &config), NO_ERROR);
+        ui::DisplayMode mode;
+        CHECK_EQ(SurfaceComposerClient::getActiveDisplayMode(display, &mode), NO_ERROR);
 
-        const ui::Size& resolution = config.resolution;
+        const ui::Size& resolution = mode.resolution;
         const ssize_t displayWidth = resolution.getWidth();
         const ssize_t displayHeight = resolution.getHeight();
 
diff --git a/cmds/stagefright/muxer.cpp b/cmds/stagefright/muxer.cpp
index 4a83a4a..bc7e41e 100644
--- a/cmds/stagefright/muxer.cpp
+++ b/cmds/stagefright/muxer.cpp
@@ -62,7 +62,7 @@
         int trimEndTimeMs,
         int rotationDegrees,
         MediaMuxer::OutputFormat container = MediaMuxer::OUTPUT_FORMAT_MPEG_4) {
-    sp<NuMediaExtractor> extractor = new NuMediaExtractor;
+    sp<NuMediaExtractor> extractor = new NuMediaExtractor(NuMediaExtractor::EntryPoint::OTHER);
     if (extractor->setDataSource(NULL /* httpService */, path) != OK) {
         fprintf(stderr, "unable to instantiate extractor. %s\n", path);
         return 1;
diff --git a/cmds/stagefright/record.cpp b/cmds/stagefright/record.cpp
index 37091c4..098c278 100644
--- a/cmds/stagefright/record.cpp
+++ b/cmds/stagefright/record.cpp
@@ -259,31 +259,6 @@
     printf("$\n");
 #endif
 
-#if 0
-    CameraSource *source = CameraSource::Create(
-            String16(argv[0], strlen(argv[0])));
-    source->start();
-
-    printf("source = %p\n", source);
-
-    for (int i = 0; i < 100; ++i) {
-        MediaBuffer *buffer;
-        status_t err = source->read(&buffer);
-        CHECK_EQ(err, (status_t)OK);
-
-        printf("got a frame, data=%p, size=%d\n",
-               buffer->data(), buffer->range_length());
-
-        buffer->release();
-        buffer = NULL;
-    }
-
-    err = source->stop();
-
-    delete source;
-    source = NULL;
-#endif
-
     if (err != OK && err != ERROR_END_OF_STREAM) {
         fprintf(stderr, "record failed: %d\n", err);
         return 1;
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
index 250d26b..40b2392 100644
--- a/cmds/stagefright/stream.cpp
+++ b/cmds/stagefright/stream.cpp
@@ -42,7 +42,7 @@
 #include <gui/Surface.h>
 
 #include <fcntl.h>
-#include <ui/DisplayConfig.h>
+#include <ui/DisplayMode.h>
 
 using namespace android;
 
@@ -321,10 +321,10 @@
     const sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
     CHECK(display != nullptr);
 
-    DisplayConfig config;
-    CHECK_EQ(SurfaceComposerClient::getActiveDisplayConfig(display, &config), NO_ERROR);
+    ui::DisplayMode mode;
+    CHECK_EQ(SurfaceComposerClient::getActiveDisplayMode(display, &mode), NO_ERROR);
 
-    const ui::Size& resolution = config.resolution;
+    const ui::Size& resolution = mode.resolution;
     const ssize_t displayWidth = resolution.getWidth();
     const ssize_t displayHeight = resolution.getHeight();
 
diff --git a/drm/TEST_MAPPING b/drm/TEST_MAPPING
index 2595e3e..3642898 100644
--- a/drm/TEST_MAPPING
+++ b/drm/TEST_MAPPING
@@ -1,5 +1,5 @@
 {
-  "presubmit": [
+  "presubmit-large": [
     // The following tests validate codec and drm path.
     {
       "name": "GtsMediaTestCases",
@@ -9,17 +9,9 @@
         },
         {
           "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
-        }
-      ]
-    },
-    {
-      "name": "GtsExoPlayerTestCases",
-      "options" : [
-        {
-          "include-annotation": "android.platform.test.annotations.SocPresubmit"
         },
         {
-          "include-filter": "com.google.android.exoplayer.gts.DashTest#testWidevine23FpsH264Fixed"
+          "include-filter": "com.google.android.media.gts.WidevineH264PlaybackTests"
         }
       ]
     }
diff --git a/drm/common/Android.bp b/drm/common/Android.bp
index 272684c..76ee22e 100644
--- a/drm/common/Android.bp
+++ b/drm/common/Android.bp
@@ -14,7 +14,24 @@
 // limitations under the License.
 //
 
-cc_library_static {
+package {
+    default_applicable_licenses: ["frameworks_av_drm_common_license"],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_drm_common_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library {
     name: "libdrmframeworkcommon",
 
     srcs: [
@@ -35,7 +52,11 @@
 
     cflags: ["-Wall", "-Werror"],
 
-    shared_libs: ["libbinder"],
+    shared_libs: [
+        "libbinder",
+        "liblog",
+        "libutils"
+    ],
 
     export_include_dirs: ["include"],
 }
diff --git a/drm/common/include/DrmEngineBase.h b/drm/common/include/DrmEngineBase.h
index 73f11a4..c0a5e3b 100644
--- a/drm/common/include/DrmEngineBase.h
+++ b/drm/common/include/DrmEngineBase.h
@@ -309,7 +309,7 @@
 
     /**
      * Removes all the rights information of each plug-in associated with
-     * DRM framework. Will be used in master reset
+     * DRM framework.
      *
      * @param[in] uniqueId Unique identifier for a session
      * @return status_t
diff --git a/drm/common/include/IDrmEngine.h b/drm/common/include/IDrmEngine.h
index 1837a11..a545941 100644
--- a/drm/common/include/IDrmEngine.h
+++ b/drm/common/include/IDrmEngine.h
@@ -250,7 +250,7 @@
 
     /**
      * Removes all the rights information of each plug-in associated with
-     * DRM framework. Will be used in master reset
+     * DRM framework.
      *
      * @param[in] uniqueId Unique identifier for a session
      * @return status_t
diff --git a/drm/drmserver/Android.bp b/drm/drmserver/Android.bp
index b68e6c2..df3a6a2 100644
--- a/drm/drmserver/Android.bp
+++ b/drm/drmserver/Android.bp
@@ -14,6 +14,23 @@
 // limitations under the License.
 //
 
+package {
+    default_applicable_licenses: ["frameworks_av_drm_drmserver_license"],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_drm_drmserver_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_binary {
     name: "drmserver",
 
@@ -31,19 +48,18 @@
         "liblog",
         "libbinder",
         "libdl",
+        "libdrmframeworkcommon",
         "libselinux",
         "libstagefright_foundation",
     ],
 
-    static_libs: ["libdrmframeworkcommon"],
-
     cflags: [
         "-Wall",
         "-Wextra",
         "-Werror",
     ],
 
-    compile_multilib: "32",
+    compile_multilib: "prefer32",
 
     init_rc: ["drmserver.rc"],
 }
diff --git a/drm/drmserver/DrmManager.cpp b/drm/drmserver/DrmManager.cpp
index 9a32cc5..74e3223 100644
--- a/drm/drmserver/DrmManager.cpp
+++ b/drm/drmserver/DrmManager.cpp
@@ -99,13 +99,13 @@
         }
         default:
         {
-            ALOGW("Unrecognized message type: %zd", msg->what());
+            ALOGW("Unrecognized message type: %u", msg->what());
         }
     }
 }
 
 int64_t DrmManager::getMetricsFlushPeriodUs() {
-    return 1000 * 1000 * std::max(1ll, property_get_int64("drmmanager.metrics.period", 86400));
+    return 1000 * 1000 * std::max(1ll, (long long)property_get_int64("drmmanager.metrics.period", 86400));
 }
 
 void DrmManager::recordEngineMetrics(
diff --git a/drm/libdrmframework/Android.bp b/drm/libdrmframework/Android.bp
index 940c17d..dbce5ea 100644
--- a/drm/libdrmframework/Android.bp
+++ b/drm/libdrmframework/Android.bp
@@ -14,6 +14,23 @@
 // limitations under the License.
 //
 
+package {
+    default_applicable_licenses: ["frameworks_av_drm_libdrmframework_license"],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_drm_libdrmframework_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libdrmframework",
 
@@ -29,13 +46,11 @@
         "liblog",
         "libbinder",
         "libdl",
+        "libdrmframeworkcommon",
     ],
 
-    static_libs: ["libdrmframeworkcommon"],
-
     export_include_dirs: ["include"],
-    export_static_lib_headers: ["libdrmframeworkcommon"],
+    export_shared_lib_headers: ["libdrmframeworkcommon"],
 
     cflags: ["-Werror"],
 }
-
diff --git a/drm/libdrmframework/include/DrmManagerClientImpl.h b/drm/libdrmframework/include/DrmManagerClientImpl.h
index 3858675..8c8783b 100644
--- a/drm/libdrmframework/include/DrmManagerClientImpl.h
+++ b/drm/libdrmframework/include/DrmManagerClientImpl.h
@@ -230,7 +230,7 @@
 
     /**
      * Removes all the rights information of each plug-in associated with
-     * DRM framework. Will be used in master reset
+     * DRM framework.
      *
      * @param[in] uniqueId Unique identifier for a session
      * @return status_t
diff --git a/drm/libdrmframework/plugins/common/util/Android.bp b/drm/libdrmframework/plugins/common/util/Android.bp
index 7372eb7..a47b4a1 100644
--- a/drm/libdrmframework/plugins/common/util/Android.bp
+++ b/drm/libdrmframework/plugins/common/util/Android.bp
@@ -14,6 +14,25 @@
 // limitations under the License.
 //
 
+package {
+    default_applicable_licenses: [
+        "frameworks_av_drm_libdrmframework_plugins_common_util_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_drm_libdrmframework_plugins_common_util_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_static {
     name: "libdrmutility",
 
diff --git a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
index bb9d7ec..be2b546 100644
--- a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
+++ b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
@@ -14,6 +14,25 @@
 // limitations under the License.
 //
 
+package {
+    default_applicable_licenses: [
+        "frameworks_av_drm_libdrmframework_plugins_forward-lock_FwdLockEngine_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_drm_libdrmframework_plugins_forward-lock_FwdLockEngine_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libfwdlockengine",
 
@@ -36,11 +55,11 @@
         "libcrypto",
         "libssl",
         "libdrmframework",
+        "libdrmframeworkcommon",
     ],
 
     static_libs: [
         "libdrmutility",
-        "libdrmframeworkcommon",
         "libfwdlock-common",
         "libfwdlock-converter",
         "libfwdlock-decoder",
diff --git a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/include/FwdLockEngine.h b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/include/FwdLockEngine.h
index b62ddb9..eb5b0f6 100644
--- a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/include/FwdLockEngine.h
+++ b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/include/FwdLockEngine.h
@@ -252,8 +252,7 @@
 
 /**
  * Removes all the rights information of each plug-in associated with
- * DRM framework. Will be used in master reset but does nothing for
- * Forward Lock Engine.
+ * DRM framework. Does nothing for Forward Lock Engine.
  *
  * @param uniqueId Unique identifier for a session
  * @return status_t
diff --git a/drm/libdrmframework/plugins/forward-lock/internal-format/common/Android.bp b/drm/libdrmframework/plugins/forward-lock/internal-format/common/Android.bp
index 3be327a..2e4070b 100644
--- a/drm/libdrmframework/plugins/forward-lock/internal-format/common/Android.bp
+++ b/drm/libdrmframework/plugins/forward-lock/internal-format/common/Android.bp
@@ -14,6 +14,15 @@
 // limitations under the License.
 //
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_drm_libdrmframework_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_drm_libdrmframework_license"],
+}
+
 cc_library_static {
     name: "libfwdlock-common",
 
diff --git a/drm/libdrmframework/plugins/forward-lock/internal-format/converter/Android.bp b/drm/libdrmframework/plugins/forward-lock/internal-format/converter/Android.bp
index d4e04b8..51c778f 100644
--- a/drm/libdrmframework/plugins/forward-lock/internal-format/converter/Android.bp
+++ b/drm/libdrmframework/plugins/forward-lock/internal-format/converter/Android.bp
@@ -14,6 +14,15 @@
 // limitations under the License.
 //
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_drm_libdrmframework_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_drm_libdrmframework_license"],
+}
+
 cc_library_static {
     name: "libfwdlock-converter",
 
diff --git a/drm/libdrmframework/plugins/forward-lock/internal-format/decoder/Android.bp b/drm/libdrmframework/plugins/forward-lock/internal-format/decoder/Android.bp
index 0bf2737..6773fe1 100644
--- a/drm/libdrmframework/plugins/forward-lock/internal-format/decoder/Android.bp
+++ b/drm/libdrmframework/plugins/forward-lock/internal-format/decoder/Android.bp
@@ -14,6 +14,15 @@
 // limitations under the License.
 //
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_drm_libdrmframework_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_drm_libdrmframework_license"],
+}
+
 cc_library_static {
     name: "libfwdlock-decoder",
 
diff --git a/drm/libdrmframework/plugins/forward-lock/internal-format/doc/FwdLock.html b/drm/libdrmframework/plugins/forward-lock/internal-format/doc/FwdLock.html
index 8f95cd2..c1d5b3d 100644
--- a/drm/libdrmframework/plugins/forward-lock/internal-format/doc/FwdLock.html
+++ b/drm/libdrmframework/plugins/forward-lock/internal-format/doc/FwdLock.html
@@ -488,7 +488,7 @@
 <p class=MsoBodyText><b>Note:</b> The key-encryption key must be unique to each
 device; this is what makes the files forward lock–protected. Ideally, it should
 be derived from secret hardware parameters, but at the very least it should be
-persistent from one master reset to the next.</p>
+persistent from one factory reset to the next.</p>
 
 <div style='margin-bottom:24.0pt;border:solid windowtext 1.0pt;padding:1.0pt 4.0pt 1.0pt 4.0pt;
 background:#F2F2F2'>
diff --git a/drm/libdrmframework/plugins/passthru/Android.bp b/drm/libdrmframework/plugins/passthru/Android.bp
index 05b6440..6dffd49 100644
--- a/drm/libdrmframework/plugins/passthru/Android.bp
+++ b/drm/libdrmframework/plugins/passthru/Android.bp
@@ -14,17 +14,35 @@
 // limitations under the License.
 //
 
+package {
+    default_applicable_licenses: [
+        "frameworks_av_drm_libdrmframework_plugins_passthru_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_drm_libdrmframework_plugins_passthru_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libdrmpassthruplugin",
 
     srcs: ["src/DrmPassthruPlugIn.cpp"],
 
-    static_libs: ["libdrmframeworkcommon"],
-
     shared_libs: [
         "libutils",
         "liblog",
         "libdl",
+        "libdrmframeworkcommon",
     ],
 
     local_include_dirs: ["include"],
diff --git a/drm/libmediadrm/Android.bp b/drm/libmediadrm/Android.bp
index 1700a95..0ffe626 100644
--- a/drm/libmediadrm/Android.bp
+++ b/drm/libmediadrm/Android.bp
@@ -2,6 +2,15 @@
 // libmediadrm
 //
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_headers {
     name: "libmediadrm_headers",
 
@@ -11,7 +20,7 @@
 
 }
 
-cc_library_shared {
+cc_library {
     name: "libmediadrm",
 
     srcs: [
@@ -51,6 +60,7 @@
         "android.hardware.drm@1.1",
         "android.hardware.drm@1.2",
         "android.hardware.drm@1.3",
+        "android.hardware.drm@1.4",
         "libhidlallocatorutils",
         "libhidlbase",
     ],
@@ -63,6 +73,7 @@
         "android.hardware.drm@1.0",
         "android.hardware.drm@1.1",
         "android.hardware.drm@1.2",
+        "android.hardware.drm@1.4",
     ],
 
     cflags: [
diff --git a/drm/libmediadrm/CryptoHal.cpp b/drm/libmediadrm/CryptoHal.cpp
index 18772e0..e0db1c4 100644
--- a/drm/libmediadrm/CryptoHal.cpp
+++ b/drm/libmediadrm/CryptoHal.cpp
@@ -28,6 +28,7 @@
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MediaErrors.h>
 #include <mediadrm/CryptoHal.h>
+#include <mediadrm/DrmUtils.h>
 
 using drm::V1_0::BufferType;
 using drm::V1_0::DestinationBuffer;
@@ -39,6 +40,7 @@
 using drm::V1_0::Status;
 using drm::V1_0::SubSample;
 
+using ::android::DrmUtils::toStatusT;
 using ::android::hardware::hidl_array;
 using ::android::hardware::hidl_handle;
 using ::android::hardware::hidl_memory;
@@ -53,42 +55,6 @@
 
 namespace android {
 
-static status_t toStatusT(Status status) {
-    switch (status) {
-    case Status::OK:
-        return OK;
-    case Status::ERROR_DRM_NO_LICENSE:
-        return ERROR_DRM_NO_LICENSE;
-    case Status::ERROR_DRM_LICENSE_EXPIRED:
-        return ERROR_DRM_LICENSE_EXPIRED;
-    case Status::ERROR_DRM_RESOURCE_BUSY:
-        return ERROR_DRM_RESOURCE_BUSY;
-    case Status::ERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION:
-        return ERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION;
-    case Status::ERROR_DRM_SESSION_NOT_OPENED:
-        return ERROR_DRM_SESSION_NOT_OPENED;
-    case Status::ERROR_DRM_CANNOT_HANDLE:
-        return ERROR_DRM_CANNOT_HANDLE;
-    case Status::ERROR_DRM_DECRYPT:
-        return ERROR_DRM_DECRYPT;
-    default:
-        return UNKNOWN_ERROR;
-    }
-}
-
-static status_t toStatusT_1_2(Status_V1_2 status) {
-    switch (status) {
-    case Status_V1_2::ERROR_DRM_SESSION_LOST_STATE:
-        return ERROR_DRM_SESSION_LOST_STATE;;
-    case Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE:
-        return ERROR_DRM_FRAME_TOO_LARGE;
-    case Status_V1_2::ERROR_DRM_INSUFFICIENT_SECURITY:
-        return ERROR_DRM_INSUFFICIENT_SECURITY;
-    default:
-        return toStatusT(static_cast<Status>(status));
-    }
-}
-
 static hidl_vec<uint8_t> toHidlVec(const Vector<uint8_t> &vector) {
     hidl_vec<uint8_t> vec;
     vec.setToExternal(const_cast<uint8_t *>(vector.array()), vector.size());
@@ -180,6 +146,9 @@
                 plugin = hPlugin;
             }
         );
+    if (!hResult.isOk()) {
+        mInitCheck = DEAD_OBJECT;
+    }
     return plugin;
 }
 
@@ -213,10 +182,8 @@
         }
     }
 
-    if (mPlugin == NULL) {
-        mInitCheck = ERROR_UNSUPPORTED;
-    } else {
-        mInitCheck = OK;
+    if (mInitCheck == NO_INIT) {
+        mInitCheck = mPlugin == NULL ? ERROR_UNSUPPORTED : OK;
     }
 
     return mInitCheck;
@@ -376,6 +343,7 @@
 
     Return<void> hResult;
 
+    mLock.unlock();
     if (mPluginV1_2 != NULL) {
         hResult = mPluginV1_2->decrypt_1_2(secure, toHidlArray16(keyId), toHidlArray16(iv),
                 hMode, hPattern, hSubSamples, hSource, offset, hDestination,
@@ -384,7 +352,7 @@
                         bytesWritten = hBytesWritten;
                         *errorDetailMsg = toString8(hDetailedError);
                     }
-                    err = toStatusT_1_2(status);
+                    err = toStatusT(status);
                 }
             );
     } else {
@@ -414,7 +382,8 @@
         return;
     }
 
-    mPlugin->notifyResolution(width, height);
+    auto hResult = mPlugin->notifyResolution(width, height);
+    ALOGE_IF(!hResult.isOk(), "notifyResolution txn failed %s", hResult.description().c_str());
 }
 
 status_t CryptoHal::setMediaDrmSession(const Vector<uint8_t> &sessionId) {
@@ -424,7 +393,12 @@
         return mInitCheck;
     }
 
-    return toStatusT(mPlugin->setMediaDrmSession(toHidlVec(sessionId)));
+    auto err = mPlugin->setMediaDrmSession(toHidlVec(sessionId));
+    return err.isOk() ? toStatusT(err) : DEAD_OBJECT;
 }
 
+status_t CryptoHal::getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const {
+    Mutex::Autolock autoLock(mLock);
+    return DrmUtils::GetLogMessages<drm::V1_4::ICryptoPlugin>(mPlugin, logs);
+}
 }  // namespace android
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index f218041..40d1e0c 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -16,13 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "DrmHal"
-#include <iomanip>
-
-#include <utils/Log.h>
-
-#include <android/binder_manager.h>
 
 #include <aidl/android/media/BnResourceManagerClient.h>
+#include <android/binder_manager.h>
 #include <android/hardware/drm/1.2/types.h>
 #include <android/hidl/manager/1.2/IServiceManager.h>
 #include <hidl/ServiceManagement.h>
@@ -40,7 +36,9 @@
 #include <mediadrm/DrmSessionManager.h>
 #include <mediadrm/IDrmMetricsConsumer.h>
 #include <mediadrm/DrmUtils.h>
+#include <utils/Log.h>
 
+#include <iomanip>
 #include <vector>
 
 using drm::V1_0::KeyedVector;
@@ -55,6 +53,7 @@
 using drm::V1_1::SecurityLevel;
 using drm::V1_2::KeySetId;
 using drm::V1_2::KeyStatusType;
+using ::android::DrmUtils::toStatusT;
 using ::android::hardware::drm::V1_1::DrmMetricGroup;
 using ::android::hardware::hidl_array;
 using ::android::hardware::hidl_string;
@@ -235,58 +234,6 @@
     return keySetIds;
 }
 
-static status_t toStatusT(Status status) {
-    switch (status) {
-    case Status::OK:
-        return OK;
-        break;
-    case Status::ERROR_DRM_NO_LICENSE:
-        return ERROR_DRM_NO_LICENSE;
-        break;
-    case Status::ERROR_DRM_LICENSE_EXPIRED:
-        return ERROR_DRM_LICENSE_EXPIRED;
-        break;
-    case Status::ERROR_DRM_SESSION_NOT_OPENED:
-        return ERROR_DRM_SESSION_NOT_OPENED;
-        break;
-    case Status::ERROR_DRM_CANNOT_HANDLE:
-        return ERROR_DRM_CANNOT_HANDLE;
-        break;
-    case Status::ERROR_DRM_INVALID_STATE:
-        return ERROR_DRM_INVALID_STATE;
-        break;
-    case Status::BAD_VALUE:
-        return BAD_VALUE;
-        break;
-    case Status::ERROR_DRM_NOT_PROVISIONED:
-        return ERROR_DRM_NOT_PROVISIONED;
-        break;
-    case Status::ERROR_DRM_RESOURCE_BUSY:
-        return ERROR_DRM_RESOURCE_BUSY;
-        break;
-    case Status::ERROR_DRM_DEVICE_REVOKED:
-        return ERROR_DRM_DEVICE_REVOKED;
-        break;
-    case Status::ERROR_DRM_UNKNOWN:
-    default:
-        return ERROR_DRM_UNKNOWN;
-        break;
-    }
-}
-
-static status_t toStatusT_1_2(Status_V1_2 status) {
-    switch (status) {
-    case Status_V1_2::ERROR_DRM_RESOURCE_CONTENTION:
-        return ERROR_DRM_RESOURCE_CONTENTION;
-    case Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE:
-        return ERROR_DRM_FRAME_TOO_LARGE;
-    case Status_V1_2::ERROR_DRM_INSUFFICIENT_SECURITY:
-        return ERROR_DRM_INSUFFICIENT_SECURITY;
-    default:
-        return toStatusT(static_cast<Status>(status));
-    }
-}
-
 Mutex DrmHal::mLock;
 
 struct DrmHal::DrmSessionClient : public aidl::android::media::BnResourceManagerClient {
@@ -370,8 +317,7 @@
     closeOpenSessions();
 
     Mutex::Autolock autoLock(mLock);
-    reportPluginMetrics();
-    reportFrameworkMetrics();
+    reportFrameworkMetrics(reportPluginMetrics());
 
     setListener(NULL);
     mInitCheck = NO_INIT;
@@ -387,18 +333,19 @@
     mPlugin.clear();
     mPluginV1_1.clear();
     mPluginV1_2.clear();
+    mPluginV1_4.clear();
 }
 
 std::vector<sp<IDrmFactory>> DrmHal::makeDrmFactories() {
-    std::vector<sp<IDrmFactory>> factories(DrmUtils::MakeDrmFactories());
+    static std::vector<sp<IDrmFactory>> factories(DrmUtils::MakeDrmFactories());
     if (factories.size() == 0) {
         // must be in passthrough mode, load the default passthrough service
         auto passthrough = IDrmFactory::getService();
         if (passthrough != NULL) {
-            ALOGI("makeDrmFactories: using default passthrough drm instance");
+            DrmUtils::LOG2BI("makeDrmFactories: using default passthrough drm instance");
             factories.push_back(passthrough);
         } else {
-            ALOGE("Failed to find any drm factories");
+            DrmUtils::LOG2BE("Failed to find any drm factories");
         }
     }
     return factories;
@@ -414,7 +361,7 @@
     Return<void> hResult = factory->createPlugin(uuid, appPackageName.string(),
             [&](Status status, const sp<IDrmPlugin>& hPlugin) {
                 if (status != Status::OK) {
-                    ALOGE("Failed to make drm plugin");
+                    DrmUtils::LOG2BE(uuid, "Failed to make drm plugin: %d", status);
                     return;
                 }
                 plugin = hPlugin;
@@ -422,7 +369,8 @@
         );
 
     if (!hResult.isOk()) {
-        ALOGE("createPlugin remote call failed");
+        DrmUtils::LOG2BE(uuid, "createPlugin remote call failed: %s",
+                         hResult.description().c_str());
     }
 
     return plugin;
@@ -616,18 +564,21 @@
     Mutex::Autolock autoLock(mLock);
 
     for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
-        if (mFactories[i]->isCryptoSchemeSupported(uuid)) {
+        auto hResult = mFactories[i]->isCryptoSchemeSupported(uuid);
+        if (hResult.isOk() && hResult) {
             auto plugin = makeDrmPlugin(mFactories[i], uuid, appPackageName);
             if (plugin != NULL) {
                 mPlugin = plugin;
                 mPluginV1_1 = drm::V1_1::IDrmPlugin::castFrom(mPlugin);
                 mPluginV1_2 = drm::V1_2::IDrmPlugin::castFrom(mPlugin);
+                mPluginV1_4 = drm::V1_4::IDrmPlugin::castFrom(mPlugin);
                 break;
             }
         }
     }
 
     if (mPlugin == NULL) {
+        DrmUtils::LOG2BE(uuid, "No supported hal instance found");
         mInitCheck = ERROR_UNSUPPORTED;
     } else {
         mInitCheck = OK;
@@ -642,6 +593,7 @@
             mPlugin.clear();
             mPluginV1_1.clear();
             mPluginV1_2.clear();
+            mPluginV1_4.clear();
         }
     }
 
@@ -822,7 +774,7 @@
                         defaultUrl = toString8(hDefaultUrl);
                         *keyRequestType = toKeyRequestType_1_1(hKeyRequestType);
                     }
-                    err = toStatusT_1_2(status);
+                    err = toStatusT(status);
                 });
     } else if (mPluginV1_1 != NULL) {
         hResult = mPluginV1_1->getKeyRequest_1_1(
@@ -936,7 +888,7 @@
     Return<void> hResult;
 
     if (mPluginV1_2 != NULL) {
-        Return<void> hResult = mPluginV1_2->getProvisionRequest_1_2(
+        hResult = mPluginV1_2->getProvisionRequest_1_2(
                 toHidlString(certType), toHidlString(certAuthority),
                 [&](Status_V1_2 status, const hidl_vec<uint8_t>& hRequest,
                         const hidl_string& hDefaultUrl) {
@@ -944,11 +896,11 @@
                         request = toVector(hRequest);
                         defaultUrl = toString8(hDefaultUrl);
                     }
-                    err = toStatusT_1_2(status);
+                    err = toStatusT(status);
                 }
             );
     } else {
-        Return<void> hResult = mPlugin->getProvisionRequest(
+        hResult = mPlugin->getProvisionRequest(
                 toHidlString(certType), toHidlString(certAuthority),
                 [&](Status status, const hidl_vec<uint8_t>& hRequest,
                         const hidl_string& hDefaultUrl) {
@@ -1116,7 +1068,7 @@
                         *connected = toHdcpLevel(hConnected);
                         *max = toHdcpLevel(hMax);
                     }
-                    err = toStatusT_1_2(status);
+                    err = toStatusT(status);
                 });
     } else if (mPluginV1_1 != NULL) {
         hResult = mPluginV1_1->getHdcpLevels(
@@ -1511,7 +1463,7 @@
     return hResult.isOk() ? err : DEAD_OBJECT;
 }
 
-void DrmHal::reportFrameworkMetrics() const
+std::string DrmHal::reportFrameworkMetrics(const std::string& pluginMetrics) const
 {
     mediametrics_handle_t item(mediametrics_create("mediadrm"));
     mediametrics_setUid(item, mMetrics.GetAppUid());
@@ -1540,21 +1492,26 @@
     if (!b64EncodedMetrics.empty()) {
         mediametrics_setCString(item, "serialized_metrics", b64EncodedMetrics.c_str());
     }
+    if (!pluginMetrics.empty()) {
+        mediametrics_setCString(item, "plugin_metrics", pluginMetrics.c_str());
+    }
     if (!mediametrics_selfRecord(item)) {
         ALOGE("Failed to self record framework metrics");
     }
     mediametrics_delete(item);
+    return serializedMetrics;
 }
 
-void DrmHal::reportPluginMetrics() const
+std::string DrmHal::reportPluginMetrics() const
 {
     Vector<uint8_t> metricsVector;
     String8 vendor;
     String8 description;
+    std::string metricsString;
     if (getPropertyStringInternal(String8("vendor"), vendor) == OK &&
             getPropertyStringInternal(String8("description"), description) == OK &&
             getPropertyByteArrayInternal(String8("metrics"), metricsVector) == OK) {
-        std::string metricsString = toBase64StringNoPad(metricsVector.array(),
+        metricsString = toBase64StringNoPad(metricsVector.array(),
                                                         metricsVector.size());
         status_t res = android::reportDrmPluginMetrics(metricsString, vendor,
                                                        description, mMetrics.GetAppUid());
@@ -1562,6 +1519,55 @@
             ALOGE("Metrics were retrieved but could not be reported: %d", res);
         }
     }
+    return metricsString;
+}
+
+status_t DrmHal::requiresSecureDecoder(const char *mime, bool *required) const {
+    Mutex::Autolock autoLock(mLock);
+    if (mPluginV1_4 == NULL) {
+        return false;
+    }
+    auto hResult = mPluginV1_4->requiresSecureDecoderDefault(hidl_string(mime));
+    if (!hResult.isOk()) {
+        DrmUtils::LOG2BE("requiresSecureDecoder txn failed: %s", hResult.description().c_str());
+        return DEAD_OBJECT;
+    }
+    if (required) {
+        *required = hResult;
+    }
+    return OK;
+}
+
+status_t DrmHal::requiresSecureDecoder(const char *mime, DrmPlugin::SecurityLevel securityLevel,
+                                       bool *required) const {
+    Mutex::Autolock autoLock(mLock);
+    if (mPluginV1_4 == NULL) {
+        return false;
+    }
+    auto hLevel = toHidlSecurityLevel(securityLevel);
+    auto hResult = mPluginV1_4->requiresSecureDecoder(hidl_string(mime), hLevel);
+    if (!hResult.isOk()) {
+        DrmUtils::LOG2BE("requiresSecureDecoder txn failed: %s", hResult.description().c_str());
+        return DEAD_OBJECT;
+    }
+    if (required) {
+        *required = hResult;
+    }
+    return OK;
+}
+
+status_t DrmHal::setPlaybackId(Vector<uint8_t> const &sessionId, const char *playbackId) {
+    Mutex::Autolock autoLock(mLock);
+    if (mPluginV1_4 == NULL) {
+        return ERROR_UNSUPPORTED;
+    }
+    auto err = mPluginV1_4->setPlaybackId(toHidlVec(sessionId), hidl_string(playbackId));
+    return err.isOk() ? toStatusT(err) : DEAD_OBJECT;
+}
+
+status_t DrmHal::getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const {
+    Mutex::Autolock autoLock(mLock);
+    return DrmUtils::GetLogMessages<drm::V1_4::IDrmPlugin>(mPlugin, logs);
 }
 
 }  // namespace android
diff --git a/drm/libmediadrm/DrmMetricsConsumer.cpp b/drm/libmediadrm/DrmMetricsConsumer.cpp
index b47b4ff..5f0b26e 100644
--- a/drm/libmediadrm/DrmMetricsConsumer.cpp
+++ b/drm/libmediadrm/DrmMetricsConsumer.cpp
@@ -37,8 +37,8 @@
 template <> std::string GetAttributeName<KeyStatusType>(KeyStatusType type) {
     static const char *type_names[] = {"USABLE", "EXPIRED",
                                        "OUTPUT_NOT_ALLOWED", "STATUS_PENDING",
-                                       "INTERNAL_ERROR"};
-    if (((size_t)type) > arraysize(type_names)) {
+                                       "INTERNAL_ERROR", "USABLE_IN_FUTURE"};
+    if (((size_t)type) >= arraysize(type_names)) {
         return "UNKNOWN_TYPE";
     }
     return type_names[(size_t)type];
@@ -48,7 +48,7 @@
     static const char *type_names[] = {"PROVISION_REQUIRED", "KEY_NEEDED",
                                        "KEY_EXPIRED", "VENDOR_DEFINED",
                                        "SESSION_RECLAIMED"};
-    if (((size_t)type) > arraysize(type_names)) {
+    if (((size_t)type) >= arraysize(type_names)) {
         return "UNKNOWN_TYPE";
     }
     return type_names[(size_t)type];
diff --git a/drm/libmediadrm/DrmSessionManager.cpp b/drm/libmediadrm/DrmSessionManager.cpp
index 5292705..e31395d 100644
--- a/drm/libmediadrm/DrmSessionManager.cpp
+++ b/drm/libmediadrm/DrmSessionManager.cpp
@@ -66,7 +66,7 @@
     std::vector<MediaResourceParcel> resources;
     MediaResourceParcel resource{
             Type::kDrmSession, SubType::kUnspecifiedSubType,
-            toStdVec<int8_t>(sessionId), value};
+            toStdVec<>(sessionId), value};
     resources.push_back(resource);
     return resources;
 }
diff --git a/drm/libmediadrm/DrmUtils.cpp b/drm/libmediadrm/DrmUtils.cpp
index d85fa61..0b117a3 100644
--- a/drm/libmediadrm/DrmUtils.cpp
+++ b/drm/libmediadrm/DrmUtils.cpp
@@ -27,6 +27,8 @@
 #include <android/hardware/drm/1.2/IDrmFactory.h>
 #include <android/hardware/drm/1.3/ICryptoFactory.h>
 #include <android/hardware/drm/1.3/IDrmFactory.h>
+#include <android/hardware/drm/1.4/ICryptoFactory.h>
+#include <android/hardware/drm/1.4/IDrmFactory.h>
 #include <android/hidl/manager/1.2/IServiceManager.h>
 #include <hidl/HidlSupport.h>
 
@@ -41,6 +43,9 @@
 #include <mediadrm/ICrypto.h>
 #include <mediadrm/IDrm.h>
 
+#include <map>
+#include <string>
+
 using HServiceManager = ::android::hidl::manager::V1_2::IServiceManager;
 using ::android::hardware::hidl_array;
 using ::android::hardware::hidl_string;
@@ -64,20 +69,30 @@
     return obj;
 }
 
-template <typename Hal, typename V>
-void MakeHidlFactories(const uint8_t uuid[16], V &factories) {
+template <typename Hal, typename V, typename M>
+void MakeHidlFactories(const uint8_t uuid[16], V &factories, M& instances) {
     sp<HServiceManager> serviceManager = HServiceManager::getService();
     if (serviceManager == nullptr) {
-        ALOGE("Failed to get service manager");
-        exit(-1);
+        LOG2BE("Failed to get service manager");
+        return;
     }
 
     serviceManager->listManifestByInterface(Hal::descriptor, [&](const hidl_vec<hidl_string> &registered) {
         for (const auto &instance : registered) {
             auto factory = Hal::getService(instance);
             if (factory != nullptr) {
-                ALOGI("found %s %s", Hal::descriptor, instance.c_str());
-                if (!uuid || factory->isCryptoSchemeSupported(uuid)) {
+                instances[instance.c_str()] = Hal::descriptor;
+                if (!uuid) {
+                    factories.push_back(factory);
+                    continue;
+                }
+                auto supported = factory->isCryptoSchemeSupported(uuid);
+                if (!supported.isOk()) {
+                    LOG2BE(uuid, "isCryptoSchemeSupported txn failed: %s",
+                           supported.description().c_str());
+                    continue;
+                }
+                if (supported) {
                     factories.push_back(factory);
                 }
             }
@@ -85,6 +100,12 @@
     });
 }
 
+template <typename Hal, typename V>
+void MakeHidlFactories(const uint8_t uuid[16], V &factories) {
+    std::map<std::string, std::string> instances;
+    MakeHidlFactories<Hal>(uuid, factories, instances);
+}
+
 hidl_vec<uint8_t> toHidlVec(const void *ptr, size_t size) {
     hidl_vec<uint8_t> vec(size);
     if (ptr != nullptr) {
@@ -103,28 +124,42 @@
 sp<::V1_0::IDrmPlugin> MakeDrmPlugin(const sp<::V1_0::IDrmFactory> &factory,
                                      const uint8_t uuid[16], const char *appPackageName) {
     sp<::V1_0::IDrmPlugin> plugin;
-    factory->createPlugin(toHidlArray16(uuid), hidl_string(appPackageName),
-                          [&](::V1_0::Status status, const sp<::V1_0::IDrmPlugin> &hPlugin) {
-                              if (status != ::V1_0::Status::OK) {
-                                  return;
-                              }
-                              plugin = hPlugin;
-                          });
-    return plugin;
+    auto err = factory->createPlugin(
+            toHidlArray16(uuid), hidl_string(appPackageName),
+            [&](::V1_0::Status status, const sp<::V1_0::IDrmPlugin> &hPlugin) {
+                if (status != ::V1_0::Status::OK) {
+                    LOG2BE(uuid, "MakeDrmPlugin failed: %d", status);
+                    return;
+                }
+                plugin = hPlugin;
+            });
+    if (err.isOk()) {
+        return plugin;
+    } else {
+        LOG2BE(uuid, "MakeDrmPlugin txn failed: %s", err.description().c_str());
+        return nullptr;
+    }
 }
 
 sp<::V1_0::ICryptoPlugin> MakeCryptoPlugin(const sp<::V1_0::ICryptoFactory> &factory,
                                            const uint8_t uuid[16], const void *initData,
                                            size_t initDataSize) {
     sp<::V1_0::ICryptoPlugin> plugin;
-    factory->createPlugin(toHidlArray16(uuid), toHidlVec(initData, initDataSize),
-                          [&](::V1_0::Status status, const sp<::V1_0::ICryptoPlugin> &hPlugin) {
-                              if (status != ::V1_0::Status::OK) {
-                                  return;
-                              }
-                              plugin = hPlugin;
-                          });
-    return plugin;
+    auto err = factory->createPlugin(
+            toHidlArray16(uuid), toHidlVec(initData, initDataSize),
+            [&](::V1_0::Status status, const sp<::V1_0::ICryptoPlugin> &hPlugin) {
+                if (status != ::V1_0::Status::OK) {
+                    LOG2BE(uuid, "MakeCryptoPlugin failed: %d", status);
+                    return;
+                }
+                plugin = hPlugin;
+            });
+    if (err.isOk()) {
+        return plugin;
+    } else {
+        LOG2BE(uuid, "MakeCryptoPlugin txn failed: %s", err.description().c_str());
+        return nullptr;
+    }
 }
 
 } // namespace
@@ -143,10 +178,15 @@
 
 std::vector<sp<::V1_0::IDrmFactory>> MakeDrmFactories(const uint8_t uuid[16]) {
     std::vector<sp<::V1_0::IDrmFactory>> drmFactories;
-    MakeHidlFactories<::V1_0::IDrmFactory>(uuid, drmFactories);
-    MakeHidlFactories<::V1_1::IDrmFactory>(uuid, drmFactories);
-    MakeHidlFactories<::V1_2::IDrmFactory>(uuid, drmFactories);
-    MakeHidlFactories<::V1_3::IDrmFactory>(uuid, drmFactories);
+    std::map<std::string, std::string> instances;
+    MakeHidlFactories<::V1_0::IDrmFactory>(uuid, drmFactories, instances);
+    MakeHidlFactories<::V1_1::IDrmFactory>(uuid, drmFactories, instances);
+    MakeHidlFactories<::V1_2::IDrmFactory>(uuid, drmFactories, instances);
+    MakeHidlFactories<::V1_3::IDrmFactory>(uuid, drmFactories, instances);
+    MakeHidlFactories<::V1_4::IDrmFactory>(uuid, drmFactories, instances);
+    for (auto const& entry : instances) {
+        LOG2BI("found instance=%s version=%s", entry.first.c_str(), entry.second.c_str());
+    }
     return drmFactories;
 }
 
@@ -165,6 +205,7 @@
     MakeHidlFactories<::V1_1::ICryptoFactory>(uuid, cryptoFactories);
     MakeHidlFactories<::V1_2::ICryptoFactory>(uuid, cryptoFactories);
     MakeHidlFactories<::V1_3::ICryptoFactory>(uuid, cryptoFactories);
+    MakeHidlFactories<::V1_4::ICryptoFactory>(uuid, cryptoFactories);
     return cryptoFactories;
 }
 
@@ -177,5 +218,160 @@
     return plugins;
 }
 
+status_t toStatusT_1_4(::V1_4::Status status) {
+    switch (status) {
+    case ::V1_4::Status::OK:
+        return OK;
+    case ::V1_4::Status::BAD_VALUE:
+        return BAD_VALUE;
+    case ::V1_4::Status::ERROR_DRM_CANNOT_HANDLE:
+        return ERROR_DRM_CANNOT_HANDLE;
+    case ::V1_4::Status::ERROR_DRM_DECRYPT:
+        return ERROR_DRM_DECRYPT;
+    case ::V1_4::Status::ERROR_DRM_DEVICE_REVOKED:
+        return ERROR_DRM_DEVICE_REVOKED;
+    case ::V1_4::Status::ERROR_DRM_FRAME_TOO_LARGE:
+        return ERROR_DRM_FRAME_TOO_LARGE;
+    case ::V1_4::Status::ERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION:
+        return ERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION;
+    case ::V1_4::Status::ERROR_DRM_INSUFFICIENT_SECURITY:
+        return ERROR_DRM_INSUFFICIENT_SECURITY;
+    case ::V1_4::Status::ERROR_DRM_INVALID_STATE:
+        return ERROR_DRM_INVALID_STATE;
+    case ::V1_4::Status::ERROR_DRM_LICENSE_EXPIRED:
+        return ERROR_DRM_LICENSE_EXPIRED;
+    case ::V1_4::Status::ERROR_DRM_NO_LICENSE:
+        return ERROR_DRM_NO_LICENSE;
+    case ::V1_4::Status::ERROR_DRM_NOT_PROVISIONED:
+        return ERROR_DRM_NOT_PROVISIONED;
+    case ::V1_4::Status::ERROR_DRM_RESOURCE_BUSY:
+        return ERROR_DRM_RESOURCE_BUSY;
+    case ::V1_4::Status::ERROR_DRM_RESOURCE_CONTENTION:
+        return ERROR_DRM_RESOURCE_CONTENTION;
+    case ::V1_4::Status::ERROR_DRM_SESSION_LOST_STATE:
+        return ERROR_DRM_SESSION_LOST_STATE;
+    case ::V1_4::Status::ERROR_DRM_SESSION_NOT_OPENED:
+        return ERROR_DRM_SESSION_NOT_OPENED;
+
+    // New in S / drm@1.4:
+    case ::V1_4::Status::CANNOT_DECRYPT_ZERO_SUBSAMPLES:
+        return ERROR_DRM_ZERO_SUBSAMPLES;
+    case ::V1_4::Status::CRYPTO_LIBRARY_ERROR:
+        return ERROR_DRM_CRYPTO_LIBRARY;
+    case ::V1_4::Status::GENERAL_OEM_ERROR:
+        return ERROR_DRM_GENERIC_OEM;
+    case ::V1_4::Status::GENERAL_PLUGIN_ERROR:
+        return ERROR_DRM_GENERIC_PLUGIN;
+    case ::V1_4::Status::INIT_DATA_INVALID:
+        return ERROR_DRM_INIT_DATA;
+    case ::V1_4::Status::KEY_NOT_LOADED:
+        return ERROR_DRM_KEY_NOT_LOADED;
+    case ::V1_4::Status::LICENSE_PARSE_ERROR:
+        return ERROR_DRM_LICENSE_PARSE;
+    case ::V1_4::Status::LICENSE_POLICY_ERROR:
+        return ERROR_DRM_LICENSE_POLICY;
+    case ::V1_4::Status::LICENSE_RELEASE_ERROR:
+        return ERROR_DRM_LICENSE_RELEASE;
+    case ::V1_4::Status::LICENSE_REQUEST_REJECTED:
+        return ERROR_DRM_LICENSE_REQUEST_REJECTED;
+    case ::V1_4::Status::LICENSE_RESTORE_ERROR:
+        return ERROR_DRM_LICENSE_RESTORE;
+    case ::V1_4::Status::LICENSE_STATE_ERROR:
+        return ERROR_DRM_LICENSE_STATE;
+    case ::V1_4::Status::MALFORMED_CERTIFICATE:
+        return ERROR_DRM_CERTIFICATE_MALFORMED;
+    case ::V1_4::Status::MEDIA_FRAMEWORK_ERROR:
+        return ERROR_DRM_MEDIA_FRAMEWORK;
+    case ::V1_4::Status::MISSING_CERTIFICATE:
+        return ERROR_DRM_CERTIFICATE_MISSING;
+    case ::V1_4::Status::PROVISIONING_CERTIFICATE_ERROR:
+        return ERROR_DRM_PROVISIONING_CERTIFICATE;
+    case ::V1_4::Status::PROVISIONING_CONFIGURATION_ERROR:
+        return ERROR_DRM_PROVISIONING_CONFIG;
+    case ::V1_4::Status::PROVISIONING_PARSE_ERROR:
+        return ERROR_DRM_PROVISIONING_PARSE;
+    case ::V1_4::Status::PROVISIONING_REQUEST_REJECTED:
+        return ERROR_DRM_PROVISIONING_REQUEST_REJECTED;
+    case ::V1_4::Status::RETRYABLE_PROVISIONING_ERROR:
+        return ERROR_DRM_PROVISIONING_RETRY;
+    case ::V1_4::Status::SECURE_STOP_RELEASE_ERROR:
+        return ERROR_DRM_SECURE_STOP_RELEASE;
+    case ::V1_4::Status::STORAGE_READ_FAILURE:
+        return ERROR_DRM_STORAGE_READ;
+    case ::V1_4::Status::STORAGE_WRITE_FAILURE:
+        return ERROR_DRM_STORAGE_WRITE;
+
+    case ::V1_4::Status::ERROR_DRM_UNKNOWN:
+    default:
+        return ERROR_DRM_UNKNOWN;
+    }
+    return ERROR_DRM_UNKNOWN;
+}
+
+namespace {
+char logPriorityToChar(::V1_4::LogPriority priority) {
+    char p = 'U';
+    switch (priority) {
+        case ::V1_4::LogPriority::VERBOSE:  p = 'V'; break;
+        case ::V1_4::LogPriority::DEBUG:    p = 'D'; break;
+        case ::V1_4::LogPriority::INFO:     p = 'I'; break;
+        case ::V1_4::LogPriority::WARN:     p = 'W'; break;
+        case ::V1_4::LogPriority::ERROR:    p = 'E'; break;
+        case ::V1_4::LogPriority::FATAL:    p = 'F'; break;
+        default: p = 'U'; break;
+    }
+    return p;
+}
+}  // namespace
+
+std::string GetExceptionMessage(status_t err, const char *msg,
+                                const Vector<::V1_4::LogMessage> &logs) {
+    std::string ruler("==============================");
+    std::string header("Beginning of DRM Plugin Log");
+    std::string footer("End of DRM Plugin Log");
+    String8 msg8;
+    if (msg) {
+        msg8 += msg;
+        msg8 += ": ";
+    }
+    auto errStr = StrCryptoError(err);
+    msg8 += errStr.c_str();
+    msg8 += String8::format("\n%s %s %s", ruler.c_str(), header.c_str(), ruler.c_str());
+
+    for (auto log : logs) {
+        time_t seconds = log.timeMs / 1000;
+        int ms = log.timeMs % 1000;
+        char buf[64] = {0};
+        std::string timeStr = "00-00 00:00:00";
+        if (strftime(buf, sizeof buf, "%m-%d %H:%M:%S", std::localtime(&seconds))) {
+            timeStr = buf;
+        }
+
+        char p = logPriorityToChar(log.priority);
+        msg8 += String8::format("\n  %s.%03d %c %s", timeStr.c_str(), ms, p, log.message.c_str());
+    }
+
+    msg8 += String8::format("\n%s %s %s", ruler.c_str(), footer.c_str(), ruler.c_str());
+    return msg8.c_str();
+}
+
+void LogBuffer::addLog(const ::V1_4::LogMessage &log) {
+    std::unique_lock<std::mutex> lock(mMutex);
+    mBuffer.push_back(log);
+    while (mBuffer.size() > MAX_CAPACITY) {
+        mBuffer.pop_front();
+    }
+}
+
+Vector<::V1_4::LogMessage> LogBuffer::getLogs() {
+    std::unique_lock<std::mutex> lock(mMutex);
+    Vector<::V1_4::LogMessage> logs;
+    for (auto log : mBuffer) {
+        logs.push_back(log);
+    }
+    return logs;
+}
+
+LogBuffer gLogBuf;
 }  // namespace DrmUtils
 }  // namespace android
diff --git a/drm/libmediadrm/fuzzer/Android.bp b/drm/libmediadrm/fuzzer/Android.bp
new file mode 100644
index 0000000..7281066
--- /dev/null
+++ b/drm/libmediadrm/fuzzer/Android.bp
@@ -0,0 +1,69 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_fuzz {
+    name: "mediadrm_fuzzer",
+    srcs: [
+        "mediadrm_fuzzer.cpp",
+    ],
+    static_libs: [
+        "libmediadrm",
+        "liblog",
+        "resourcemanager_aidl_interface-ndk_platform",
+    ],
+    header_libs: [
+        "libmedia_headers",
+        "libmediadrm_headers",
+    ],
+    shared_libs: [
+        "libbinder",
+        "libutils",
+        "libbinder_ndk",
+        "libcutils",
+        "libdl",
+        "libmedia",
+        "libmediadrmmetrics_lite",
+        "libmediametrics#1",
+        "libmediautils",
+        "libstagefright_foundation",
+        "android.hardware.drm@1.0",
+        "android.hardware.drm@1.1",
+        "android.hardware.drm@1.2",
+        "android.hardware.drm@1.3",
+        "android.hardware.drm@1.4",
+        "libhidlallocatorutils",
+        "libhidlbase",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/drm/libmediadrm/fuzzer/README.md b/drm/libmediadrm/fuzzer/README.md
new file mode 100644
index 0000000..3c95cd7
--- /dev/null
+++ b/drm/libmediadrm/fuzzer/README.md
@@ -0,0 +1,56 @@
+# Fuzzer for libmediadrm
+
+## Plugin Design Considerations
+The fuzzer plugin for libmediadrm is designed based on the understanding of the
+library and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+libmediadrm supports the following parameters:
+1. Security Level (parameter name: `securityLevel`)
+2. Mime Type (parameter name: `mimeType`)
+3. Key Type (parameter name: `keyType`)
+4. Crypto Mode (parameter name: `cryptoMode`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `securityLevel` | 0.`DrmPlugin::kSecurityLevelUnknown` 1.`DrmPlugin::kSecurityLevelMax` 2.`DrmPlugin::kSecurityLevelSwSecureCrypto` 3.`DrmPlugin::kSecurityLevelSwSecureDecode`  4.`DrmPlugin::kSecurityLevelHwSecureCrypto` 5.`DrmPlugin::kSecurityLevelHwSecureDecode` 6.`DrmPlugin::kSecurityLevelHwSecureAll`| Value obtained from FuzzedDataProvider in the range 0 to 6|
+| `mimeType` | 0.`video/mp4` 1.`video/mpeg` 2.`video/x-flv` 3.`video/mj2` 4.`video/3gp2` 5.`video/3gpp` 6.`video/3gpp2` 7.`audio/mp4` 8.`audio/mpeg` 9.`audio/aac` 10.`audio/3gp2` 11.`audio/3gpp` 12.`audio/3gpp2` 13.`video/unknown`| Value obtained from FuzzedDataProvider in the range 0 to 13|
+| `keyType` | 0.`DrmPlugin::kKeyType_Offline` 1.`DrmPlugin::kKeyType_Streaming` 2.`DrmPlugin::kKeyType_Release` | Value obtained from FuzzedDataProvider in the range 0 to 2|
+| `cryptoMode` | 0.`CryptoPlugin::kMode_Unencrypted` 1.`CryptoPlugin::kMode_AES_CTR` 2.`CryptoPlugin::kMode_AES_WV` 3.`CryptoPlugin::kMode_AES_CBC` | Value obtained from FuzzedDataProvider in the range 0 to 3|
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the drm module.
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build mediadrm_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) mediadrm_fuzzer
+```
+#### Steps to run
+Create a directory CORPUS_DIR
+```
+  $ adb shell mkdir CORPUS_DIR
+```
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/${TARGET_ARCH}/mediadrm_fuzzer/mediadrm_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/drm/libmediadrm/fuzzer/mediadrm_fuzzer.cpp b/drm/libmediadrm/fuzzer/mediadrm_fuzzer.cpp
new file mode 100644
index 0000000..8df0477
--- /dev/null
+++ b/drm/libmediadrm/fuzzer/mediadrm_fuzzer.cpp
@@ -0,0 +1,458 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include <binder/MemoryDealer.h>
+#include <hidlmemory/FrameworkUtils.h>
+#include <mediadrm/CryptoHal.h>
+#include <mediadrm/DrmHal.h>
+#include <utils/String8.h>
+#include "fuzzer/FuzzedDataProvider.h"
+
+#define AES_BLOCK_SIZE 16
+#define UNUSED_PARAM __attribute__((unused))
+
+using namespace std;
+using namespace android;
+using android::hardware::fromHeap;
+using ::android::os::PersistableBundle;
+using drm::V1_0::BufferType;
+
+enum {
+    INVALID_UUID = 0,
+    PSSH_BOX_UUID,
+    CLEARKEY_UUID,
+};
+
+static const uint8_t kInvalidUUID[16] = {0x10, 0x20, 0x30, 0x40, 0x50, 0x60, 0x70, 0x80,
+                                         0x10, 0x20, 0x30, 0x40, 0x50, 0x60, 0x70, 0x80};
+
+static const uint8_t kCommonPsshBoxUUID[16] = {0x10, 0x77, 0xEF, 0xEC, 0xC0, 0xB2, 0x4D, 0x02,
+                                               0xAC, 0xE3, 0x3C, 0x1E, 0x52, 0xE2, 0xFB, 0x4B};
+
+static const uint8_t kClearKeyUUID[16] = {0xE2, 0x71, 0x9D, 0x58, 0xA9, 0x85, 0xB3, 0xC9,
+                                          0x78, 0x1A, 0xB0, 0x30, 0xAF, 0x78, 0xD3, 0x0E};
+
+static const uint32_t kUUID[] = {INVALID_UUID, PSSH_BOX_UUID, CLEARKEY_UUID};
+
+const DrmPlugin::SecurityLevel kSecurityLevel[] = {
+    DrmPlugin::kSecurityLevelUnknown,        DrmPlugin::kSecurityLevelMax,
+    DrmPlugin::kSecurityLevelSwSecureCrypto, DrmPlugin::kSecurityLevelSwSecureDecode,
+    DrmPlugin::kSecurityLevelHwSecureCrypto, DrmPlugin::kSecurityLevelHwSecureDecode,
+    DrmPlugin::kSecurityLevelHwSecureAll};
+
+const char *kMimeType[] = {"video/mp4",   "video/mpeg",    "video/x-flv",  "video/mj2",
+                           "video/3gp2",  "video/3gpp",    "video/3gpp2",  "audio/mp4",
+                           "audio/mpeg",  "audio/aac",     "audio/3gp2",   "audio/3gpp",
+                           "audio/3gpp2", "video/unknown", "audio/unknown"};
+
+const DrmPlugin::KeyType kKeyType[] = {DrmPlugin::kKeyType_Offline, DrmPlugin::kKeyType_Streaming,
+                                       DrmPlugin::kKeyType_Release};
+
+const CryptoPlugin::Mode kCryptoMode[] = {CryptoPlugin::kMode_Unencrypted,
+                                          CryptoPlugin::kMode_AES_CTR, CryptoPlugin::kMode_AES_WV,
+                                          CryptoPlugin::kMode_AES_CBC};
+
+const char *kCipherAlgorithm[] = {"AES/CBC/NoPadding", ""};
+const char *kMacAlgorithm[] = {"HmacSHA256", ""};
+const char *kRSAAlgorithm[] = {"RSASSA-PSS-SHA1", ""};
+const size_t kNumSecurityLevel = size(kSecurityLevel);
+const size_t kNumMimeType = size(kMimeType);
+const size_t kNumKeyType = size(kKeyType);
+const size_t kNumCryptoMode = size(kCryptoMode);
+const size_t kNumUUID = size(kUUID);
+const size_t kMaxStringLength = 100;
+const size_t kMaxSubSamples = 10;
+const size_t kMaxNumBytes = 1000;
+
+struct DrmListener : virtual public IDrmClient {
+   public:
+    void sendEvent(DrmPlugin::EventType eventType UNUSED_PARAM,
+                   const hardware::hidl_vec<uint8_t> &sessionId UNUSED_PARAM,
+                   const hardware::hidl_vec<uint8_t> &data UNUSED_PARAM) override {}
+
+    void sendExpirationUpdate(const hardware::hidl_vec<uint8_t> &sessionId UNUSED_PARAM,
+                              int64_t expiryTimeInMS UNUSED_PARAM) override {}
+
+    void sendKeysChange(const hardware::hidl_vec<uint8_t> &sessionId UNUSED_PARAM,
+                        const std::vector<DrmKeyStatus> &keyStatusList UNUSED_PARAM,
+                        bool hasNewUsableKey UNUSED_PARAM) override {}
+
+    void sendSessionLostState(const hardware::hidl_vec<uint8_t> &) override {}
+    DrmListener() {}
+
+   private:
+    DISALLOW_EVIL_CONSTRUCTORS(DrmListener);
+};
+
+class DrmFuzzer {
+   public:
+    void process(const uint8_t *data, size_t size);
+
+   private:
+    void invokeDrm(const uint8_t *data, size_t size);
+    bool initDrm();
+    void invokeDrmCreatePlugin();
+    void invokeDrmOpenSession();
+    void invokeDrmSetListener();
+    void invokeDrmSetAlgorithmAPI();
+    void invokeDrmPropertyAPI();
+    void invokeDrmDecryptEncryptAPI(const uint8_t *data, size_t size);
+    void invokeDrmSecureStopAPI();
+    void invokeDrmOfflineLicenseAPI();
+    void invokeDrmCloseSession();
+    void invokeDrmDestroyPlugin();
+    void invokeCrypto(const uint8_t *data);
+    bool initCrypto();
+    void invokeCryptoCreatePlugin();
+    void invokeCryptoDecrypt(const uint8_t *data);
+    void invokeCryptoDestroyPlugin();
+    sp<DrmHal> mDrm = nullptr;
+    sp<CryptoHal> mCrypto = nullptr;
+    Vector<uint8_t> mSessionId = {};
+    FuzzedDataProvider *mFuzzedDataProvider = nullptr;
+};
+
+bool DrmFuzzer::initDrm() {
+    mDrm = new DrmHal();
+    if (!mDrm) {
+        return false;
+    }
+    return true;
+}
+
+void DrmFuzzer::invokeDrmCreatePlugin() {
+    mDrm->initCheck();
+    String8 packageName(mFuzzedDataProvider->ConsumeRandomLengthString(kMaxStringLength).c_str());
+    uint32_t uuidEnum = kUUID[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, kNumUUID - 1)];
+    switch (uuidEnum) {
+        case INVALID_UUID:
+            mDrm->createPlugin(kInvalidUUID, packageName);
+            break;
+        case PSSH_BOX_UUID:
+            mDrm->createPlugin(kCommonPsshBoxUUID, packageName);
+            break;
+        case CLEARKEY_UUID:
+            mDrm->createPlugin(kClearKeyUUID, packageName);
+            break;
+        default:
+            break;
+    }
+}
+
+void DrmFuzzer::invokeDrmDestroyPlugin() { mDrm->destroyPlugin(); }
+
+void DrmFuzzer::invokeDrmOpenSession() {
+    DrmPlugin::SecurityLevel securityLevel;
+    bool shouldPassRandomSecurityLevel = mFuzzedDataProvider->ConsumeBool();
+    if (shouldPassRandomSecurityLevel) {
+        securityLevel =
+            static_cast<DrmPlugin::SecurityLevel>(mFuzzedDataProvider->ConsumeIntegral<size_t>());
+    } else {
+        securityLevel = kSecurityLevel[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+            0, kNumSecurityLevel - 1)];
+    }
+    mDrm->openSession(securityLevel, mSessionId);
+}
+
+void DrmFuzzer::invokeDrmCloseSession() { mDrm->closeSession(mSessionId); }
+
+void DrmFuzzer::invokeDrmSetListener() {
+    sp<DrmListener> listener = new DrmListener();
+    mDrm->setListener(listener);
+}
+
+void DrmFuzzer::invokeDrmSetAlgorithmAPI() {
+    mDrm->setCipherAlgorithm(mSessionId,
+                             String8(kCipherAlgorithm[mFuzzedDataProvider->ConsumeBool()]));
+    mDrm->setMacAlgorithm(mSessionId, String8(kMacAlgorithm[mFuzzedDataProvider->ConsumeBool()]));
+}
+
+void DrmFuzzer::invokeDrmPropertyAPI() {
+    mDrm->setPropertyString(String8("property"), String8("value"));
+    String8 stringValue;
+    mDrm->getPropertyString(String8("property"), stringValue);
+    Vector<uint8_t> value = {};
+    mDrm->setPropertyByteArray(String8("property"), value);
+    Vector<uint8_t> byteValue;
+    mDrm->getPropertyByteArray(String8("property"), byteValue);
+}
+
+void DrmFuzzer::invokeDrmDecryptEncryptAPI(const uint8_t *data, size_t size) {
+    uint32_t openSessions = 0;
+    uint32_t maxSessions = 0;
+    mDrm->getNumberOfSessions(&openSessions, &maxSessions);
+
+    DrmPlugin::HdcpLevel connected;
+    DrmPlugin::HdcpLevel max;
+    mDrm->getHdcpLevels(&connected, &max);
+
+    DrmPlugin::SecurityLevel securityLevel;
+    mDrm->getSecurityLevel(mSessionId, &securityLevel);
+
+    // isCryptoSchemeSupported() shall fill isSupported
+    bool isSupported;
+    String8 mimeType(
+        kMimeType[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, kNumMimeType - 1)]);
+    mDrm->isCryptoSchemeSupported(kClearKeyUUID, mimeType, securityLevel, &isSupported);
+
+    // getProvisionRequest() shall fill legacyRequest and legacyDefaultUrl
+    String8 certificateType(
+        mFuzzedDataProvider->ConsumeRandomLengthString(kMaxStringLength).c_str());
+    String8 certAuthority(mFuzzedDataProvider->ConsumeRandomLengthString(kMaxStringLength).c_str());
+    Vector<uint8_t> legacyRequest = {};
+    String8 legacyDefaultUrl;
+    mDrm->getProvisionRequest(certificateType, certAuthority, legacyRequest, legacyDefaultUrl);
+
+    // provideProvisionResponse() shall fill certificate and wrappedKey
+    Vector<uint8_t> provisionResponse = {};
+    Vector<uint8_t> certificate = {};
+    Vector<uint8_t> wrappedKey = {};
+    mDrm->provideProvisionResponse(provisionResponse, certificate, wrappedKey);
+
+    // getKeyRequest() shall fill keyRequest, defaultUrl and keyRequestType
+    Vector<uint8_t> initData = {};
+    initData.appendArray(data, size);
+    DrmPlugin::KeyType keyType;
+    bool shouldPassRandomKeyType = mFuzzedDataProvider->ConsumeBool();
+    if (shouldPassRandomKeyType) {
+        keyType = static_cast<DrmPlugin::KeyType>(mFuzzedDataProvider->ConsumeIntegral<size_t>());
+    } else {
+        keyType = kKeyType[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, kNumKeyType - 1)];
+    }
+    KeyedVector<String8, String8> mdOptionalParameters = {};
+    Vector<uint8_t> keyRequest = {};
+    String8 defaultUrl;
+    DrmPlugin::KeyRequestType keyRequestType;
+    mDrm->getKeyRequest(mSessionId, initData, mimeType, keyType, mdOptionalParameters, keyRequest,
+                        defaultUrl, &keyRequestType);
+
+    // provideKeyResponse() shall fill keySetId
+    Vector<uint8_t> keyResponse = {};
+    keyResponse.appendArray(data, size);
+    Vector<uint8_t> keySetId = {};
+    mDrm->provideKeyResponse(mSessionId, keyResponse, keySetId);
+
+    // restoreKeys
+    mDrm->restoreKeys(mSessionId, keySetId);
+
+    // queryKeyStatus() shall fill infoMap
+    KeyedVector<String8, String8> infoMap = {};
+    mDrm->queryKeyStatus(mSessionId, infoMap);
+
+    // decrypt() shall fill outputVec
+    Vector<uint8_t> keyIdVec = {};
+    keyIdVec.appendArray(data, size);
+
+    Vector<uint8_t> inputVec = {};
+    inputVec.appendArray(data, size);
+
+    Vector<uint8_t> ivVec = {};
+    ivVec.appendArray(data, size);
+
+    Vector<uint8_t> outputVec = {};
+    mDrm->decrypt(mSessionId, keyIdVec, inputVec, ivVec, outputVec);
+
+    // encrypt() shall fill outputVec
+    mDrm->encrypt(mSessionId, keyIdVec, inputVec, ivVec, outputVec);
+
+    // sign() shall fill signature
+    Vector<uint8_t> message = {};
+    message.appendArray(data, size);
+    Vector<uint8_t> signature = {};
+    mDrm->sign(mSessionId, keyIdVec, message, signature);
+
+    // verify() shall fill match
+    bool match;
+    mDrm->verify(mSessionId, keyIdVec, message, signature, match);
+
+    // signRSA() shall fill signature
+    mDrm->signRSA(mSessionId, String8(kRSAAlgorithm[mFuzzedDataProvider->ConsumeBool()]), message,
+                  wrappedKey, signature);
+
+    mDrm->removeKeys(mSessionId);
+}
+
+void DrmFuzzer::invokeDrmSecureStopAPI() {
+    // getSecureStops() shall fill secureStops
+    List<Vector<uint8_t>> secureStops = {};
+    mDrm->getSecureStops(secureStops);
+
+    // getSecureStopIds() shall fill secureStopIds
+    List<Vector<uint8_t>> secureStopIds = {};
+    mDrm->getSecureStopIds(secureStopIds);
+
+    // getSecureStop() shall fill secureStop
+    Vector<uint8_t> ssid = {};
+    Vector<uint8_t> secureStop = {};
+    mDrm->getSecureStop(ssid, secureStop);
+
+    mDrm->removeSecureStop(ssid);
+
+    mDrm->releaseSecureStops(ssid);
+
+    mDrm->removeAllSecureStops();
+}
+
+void DrmFuzzer::invokeDrmOfflineLicenseAPI() {
+    // getOfflineLicenseKeySetIds() shall keySetIds
+    List<Vector<uint8_t>> keySetIds = {};
+    mDrm->getOfflineLicenseKeySetIds(keySetIds);
+
+    // getOfflineLicenseState() shall fill state
+    Vector<uint8_t> const keySetIdOfflineLicense = {};
+    DrmPlugin::OfflineLicenseState state;
+    mDrm->getOfflineLicenseState(keySetIdOfflineLicense, &state);
+
+    mDrm->removeOfflineLicense(keySetIdOfflineLicense);
+}
+
+bool DrmFuzzer::initCrypto() {
+    mCrypto = new CryptoHal();
+    if (!mCrypto) {
+        return false;
+    }
+    return true;
+}
+
+void DrmFuzzer::invokeCryptoCreatePlugin() {
+    mCrypto->initCheck();
+
+    mCrypto->isCryptoSchemeSupported(kClearKeyUUID);
+    mCrypto->createPlugin(kClearKeyUUID, NULL, 0);
+}
+
+void DrmFuzzer::invokeCryptoDestroyPlugin() { mCrypto->destroyPlugin(); }
+
+void DrmFuzzer::invokeCryptoDecrypt(const uint8_t *data) {
+    mCrypto->requiresSecureDecoderComponent(
+        kMimeType[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, kNumMimeType - 1)]);
+
+    uint32_t width = mFuzzedDataProvider->ConsumeIntegral<uint32_t>();
+    uint32_t height = mFuzzedDataProvider->ConsumeIntegral<uint32_t>();
+    mCrypto->notifyResolution(width, height);
+
+    mCrypto->setMediaDrmSession(mSessionId);
+
+    const CryptoPlugin::Pattern pattern = {0, 0};
+
+    size_t totalSize = 0;
+    size_t numSubSamples = mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(1, kMaxSubSamples);
+
+    CryptoPlugin::SubSample subSamples[numSubSamples];
+
+    for (size_t i = 0; i < numSubSamples; ++i) {
+        uint32_t clearBytes =
+            mFuzzedDataProvider->ConsumeIntegralInRange<uint32_t>(1, kMaxNumBytes);
+        uint32_t encryptedBytes =
+            mFuzzedDataProvider->ConsumeIntegralInRange<uint32_t>(1, kMaxNumBytes);
+        subSamples[i].mNumBytesOfClearData = clearBytes;
+        subSamples[i].mNumBytesOfEncryptedData = encryptedBytes;
+        totalSize += subSamples[i].mNumBytesOfClearData;
+        totalSize += subSamples[i].mNumBytesOfEncryptedData;
+    }
+
+    size_t heapSize = totalSize * 2;
+    sp<MemoryDealer> dealer = new MemoryDealer(heapSize, "DrmFuzzerMemory");
+    if (!dealer) {
+        return;
+    }
+
+    sp<HidlMemory> heap = fromHeap(dealer->getMemoryHeap());
+    if (!heap) {
+        return;
+    }
+    int heapSeqNum = mCrypto->setHeap(heap);
+    if (heapSeqNum < 0) {
+        return;
+    }
+
+    const size_t segmentIndex = 0;
+    const uint8_t keyId[AES_BLOCK_SIZE] = {};
+    memcpy((void *)keyId, data, AES_BLOCK_SIZE);
+
+    const uint8_t iv[AES_BLOCK_SIZE] = {};
+    memset((void *)iv, 0, AES_BLOCK_SIZE);
+
+    const SharedBuffer sourceBuffer = {.bufferId = segmentIndex, .offset = 0, .size = totalSize};
+
+    const DestinationBuffer destBuffer = {
+        .type = BufferType::SHARED_MEMORY,
+        {.bufferId = segmentIndex, .offset = totalSize, .size = totalSize},
+        .secureMemory = nullptr};
+
+    const uint64_t offset = 0;
+    AString *errorDetailMsg = nullptr;
+    CryptoPlugin::Mode mode;
+    bool shouldPassRandomCryptoMode = mFuzzedDataProvider->ConsumeBool();
+    if (shouldPassRandomCryptoMode) {
+        mode = static_cast<CryptoPlugin::Mode>(mFuzzedDataProvider->ConsumeIntegral<size_t>());
+    } else {
+        mode =
+            kCryptoMode[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, kNumCryptoMode - 1)];
+    }
+    mCrypto->decrypt(keyId, iv, mode, pattern, sourceBuffer, offset, subSamples, numSubSamples,
+                     destBuffer, errorDetailMsg);
+
+    if (heapSeqNum >= 0) {
+        mCrypto->unsetHeap(heapSeqNum);
+    }
+    heap.clear();
+}
+
+void DrmFuzzer::invokeDrm(const uint8_t *data, size_t size) {
+    if (!initDrm()) {
+        return;
+    }
+    invokeDrmCreatePlugin();
+    invokeDrmOpenSession();
+    invokeDrmSetAlgorithmAPI();
+    invokeDrmSetListener();
+    invokeDrmPropertyAPI();
+    invokeDrmDecryptEncryptAPI(data, size);
+    invokeDrmSecureStopAPI();
+    invokeDrmOfflineLicenseAPI();
+    invokeDrmCloseSession();
+    invokeDrmDestroyPlugin();
+}
+
+void DrmFuzzer::invokeCrypto(const uint8_t *data) {
+    if (!initCrypto()) {
+        return;
+    }
+    invokeCryptoCreatePlugin();
+    invokeCryptoDecrypt(data);
+    invokeCryptoDestroyPlugin();
+}
+
+void DrmFuzzer::process(const uint8_t *data, size_t size) {
+    mFuzzedDataProvider = new FuzzedDataProvider(data, size);
+    invokeDrm(data, size);
+    invokeCrypto(data);
+    delete mFuzzedDataProvider;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    if (size < AES_BLOCK_SIZE) {
+        return 0;
+    }
+    DrmFuzzer drmFuzzer;
+    drmFuzzer.process(data, size);
+    return 0;
+}
diff --git a/drm/libmediadrm/include/mediadrm/CryptoHal.h b/drm/libmediadrm/include/mediadrm/CryptoHal.h
index c9fda67..5fd39e6 100644
--- a/drm/libmediadrm/include/mediadrm/CryptoHal.h
+++ b/drm/libmediadrm/include/mediadrm/CryptoHal.h
@@ -22,6 +22,7 @@
 #include <android/hardware/drm/1.0/ICryptoPlugin.h>
 #include <android/hardware/drm/1.1/ICryptoFactory.h>
 #include <android/hardware/drm/1.2/ICryptoPlugin.h>
+#include <android/hardware/drm/1.4/ICryptoPlugin.h>
 
 #include <mediadrm/ICrypto.h>
 #include <utils/KeyedVector.h>
@@ -71,6 +72,8 @@
     }
     virtual void unsetHeap(int32_t seqNum) { clearHeapBase(seqNum); }
 
+    virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const;
+
 private:
     mutable Mutex mLock;
 
diff --git a/drm/libmediadrm/include/mediadrm/DrmHal.h b/drm/libmediadrm/include/mediadrm/DrmHal.h
index 3b4639b..7eb1dec 100644
--- a/drm/libmediadrm/include/mediadrm/DrmHal.h
+++ b/drm/libmediadrm/include/mediadrm/DrmHal.h
@@ -25,7 +25,10 @@
 #include <android/hardware/drm/1.2/IDrmFactory.h>
 #include <android/hardware/drm/1.2/IDrmPlugin.h>
 #include <android/hardware/drm/1.2/IDrmPluginListener.h>
+#include <android/hardware/drm/1.4/IDrmPlugin.h>
+#include <android/hardware/drm/1.4/types.h>
 
+#include <media/drm/DrmAPI.h>
 #include <mediadrm/DrmMetrics.h>
 #include <mediadrm/DrmSessionManager.h>
 #include <mediadrm/IDrm.h>
@@ -176,6 +179,17 @@
 
     virtual status_t setListener(const sp<IDrmClient>& listener);
 
+    virtual status_t requiresSecureDecoder(const char *mime, bool *required) const;
+
+    virtual status_t requiresSecureDecoder(const char *mime, DrmPlugin::SecurityLevel securityLevel,
+                                           bool *required) const;
+
+    virtual status_t setPlaybackId(
+            Vector<uint8_t> const &sessionId,
+            const char *playbackId);
+
+    virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const;
+
     // Methods of IDrmPluginListener
     Return<void> sendEvent(EventType eventType,
             const hidl_vec<uint8_t>& sessionId, const hidl_vec<uint8_t>& data);
@@ -202,6 +216,7 @@
     sp<IDrmPlugin> mPlugin;
     sp<drm::V1_1::IDrmPlugin> mPluginV1_1;
     sp<drm::V1_2::IDrmPlugin> mPluginV1_2;
+    sp<drm::V1_4::IDrmPlugin> mPluginV1_4;
     String8 mAppPackageName;
 
     // Mutable to allow modification within GetPropertyByteArray.
@@ -225,8 +240,8 @@
 
     void writeByteArray(Parcel &obj, const hidl_vec<uint8_t>& array);
 
-    void reportPluginMetrics() const;
-    void reportFrameworkMetrics() const;
+    std::string reportPluginMetrics() const;
+    std::string reportFrameworkMetrics(const std::string& pluginMetrics) const;
     status_t getPropertyStringInternal(String8 const &name, String8 &value) const;
     status_t getPropertyByteArrayInternal(String8 const &name,
                                           Vector<uint8_t> &value) const;
diff --git a/drm/libmediadrm/include/mediadrm/DrmSessionManager.h b/drm/libmediadrm/include/mediadrm/DrmSessionManager.h
index 9e43504..c56bf01 100644
--- a/drm/libmediadrm/include/mediadrm/DrmSessionManager.h
+++ b/drm/libmediadrm/include/mediadrm/DrmSessionManager.h
@@ -62,7 +62,7 @@
     void removeSession(const Vector<uint8_t>& sessionId);
     bool reclaimSession(int callingPid);
 
-    // sanity check APIs
+    // inspection APIs
     size_t getSessionCount() const;
     bool containsSession(const Vector<uint8_t>& sessionId) const;
 
diff --git a/drm/libmediadrm/include/mediadrm/IDrm.h b/drm/libmediadrm/include/mediadrm/IDrm.h
index 0177c24..a88784d 100644
--- a/drm/libmediadrm/include/mediadrm/IDrm.h
+++ b/drm/libmediadrm/include/mediadrm/IDrm.h
@@ -24,6 +24,15 @@
 #define ANDROID_IDRM_H_
 
 namespace android {
+namespace hardware {
+namespace drm {
+namespace V1_4 {
+struct LogMessage;
+}  // namespace V1_4
+}  // namespace drm
+}  // namespace hardware
+
+namespace drm = ::android::hardware::drm;
 
 struct AString;
 
@@ -145,6 +154,17 @@
 
     virtual status_t setListener(const sp<IDrmClient>& listener) = 0;
 
+    virtual status_t requiresSecureDecoder(const char *mime, bool *required) const = 0;
+
+    virtual status_t requiresSecureDecoder(const char *mime, DrmPlugin::SecurityLevel securityLevel,
+                                           bool *required) const = 0;
+
+    virtual status_t setPlaybackId(
+            Vector<uint8_t> const &sessionId,
+            const char *playbackId) = 0;
+
+    virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const = 0;
+
 protected:
     IDrm() {}
 
diff --git a/drm/libmediadrm/interface/mediadrm/DrmUtils.h b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
index 20b3fe9..ec0b878 100644
--- a/drm/libmediadrm/interface/mediadrm/DrmUtils.h
+++ b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
@@ -19,11 +19,30 @@
 
 #include <android/hardware/drm/1.0/ICryptoFactory.h>
 #include <android/hardware/drm/1.0/IDrmFactory.h>
+#include <android/hardware/drm/1.4/IDrmPlugin.h>
+#include <android/hardware/drm/1.4/types.h>
+#include <media/stagefright/MediaErrors.h>
 #include <utils/Errors.h>  // for status_t
+#include <utils/Log.h>
+#include <utils/String8.h>
 #include <utils/StrongPointer.h>
+#include <utils/Vector.h>
+#include <algorithm>
+#include <chrono>
+#include <cstddef>
+#include <cstdint>
+#include <ctime>
+#include <deque>
+#include <endian.h>
+#include <iterator>
+#include <mutex>
+#include <string>
 #include <vector>
 
+
 using namespace ::android::hardware::drm;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::Return;
 
 namespace android {
 
@@ -32,6 +51,61 @@
 
 namespace DrmUtils {
 
+// Log APIs
+class LogBuffer {
+  public:
+    static const int MAX_CAPACITY = 100;
+    void addLog(const ::V1_4::LogMessage &log);
+    Vector<::V1_4::LogMessage> getLogs();
+
+  private:
+    std::deque<::V1_4::LogMessage> mBuffer;
+    std::mutex mMutex;
+};
+
+extern LogBuffer gLogBuf;
+
+static inline int formatBuffer(char *buf, size_t size, const char *msg) {
+    return snprintf(buf, size, "%s", msg);
+}
+
+template <typename First, typename... Args>
+static inline int formatBuffer(char *buf, size_t size, const char *fmt, First first, Args... args) {
+    return snprintf(buf, size, fmt, first, args...);
+}
+
+template <typename... Args>
+void LogToBuffer(android_LogPriority level, const char *fmt, Args... args) {
+    const int LOG_BUF_SIZE = 256;
+    char buf[LOG_BUF_SIZE];
+    int len = formatBuffer(buf, LOG_BUF_SIZE, fmt, args...);
+    if (len <= 0) {
+        return;
+    }
+    __android_log_write(level, LOG_TAG, buf);
+    if (level >= ANDROID_LOG_INFO) {
+        int64_t epochTimeMs =
+                std::chrono::system_clock::now().time_since_epoch() / std::chrono::milliseconds(1);
+        gLogBuf.addLog({epochTimeMs, static_cast<::V1_4::LogPriority>(level), buf});
+    }
+}
+
+template <typename... Args>
+void LogToBuffer(android_LogPriority level, const uint8_t uuid[16], const char *fmt, Args... args) {
+    const uint64_t* uuid2 = reinterpret_cast<const uint64_t*>(uuid);
+    std::string uuidFmt("uuid=[%lx %lx] ");
+    uuidFmt += fmt;
+    LogToBuffer(level, uuidFmt.c_str(), htobe64(uuid2[0]), htobe64(uuid2[1]), args...);
+}
+
+#ifndef LOG2BE
+#define LOG2BE(...) LogToBuffer(ANDROID_LOG_ERROR, __VA_ARGS__)
+#define LOG2BW(...) LogToBuffer(ANDROID_LOG_WARN, __VA_ARGS__)
+#define LOG2BI(...) LogToBuffer(ANDROID_LOG_INFO, __VA_ARGS__)
+#define LOG2BD(...) LogToBuffer(ANDROID_LOG_DEBUG, __VA_ARGS__)
+#define LOG2BV(...) LogToBuffer(ANDROID_LOG_VERBOSE, __VA_ARGS__)
+#endif
+
 bool UseDrmService();
 
 sp<IDrm> MakeDrm(status_t *pstatus = nullptr);
@@ -91,8 +165,74 @@
 std::vector<sp<::V1_0::ICryptoPlugin>> MakeCryptoPlugins(const uint8_t uuid[16],
                                                          const void *initData, size_t initDataSize);
 
+status_t toStatusT_1_4(::V1_4::Status status);
+
+template<typename S>
+inline status_t toStatusT(S status) {
+    auto err = static_cast<::V1_4::Status>(status);
+    return toStatusT_1_4(err);
+}
+
+template<typename T>
+inline status_t toStatusT(const android::hardware::Return<T> &status) {
+    auto t = static_cast<T>(status);
+    auto err = static_cast<::V1_4::Status>(t);
+    return toStatusT_1_4(err);
+}
+
+template<typename T, typename U>
+status_t GetLogMessages(const sp<U> &obj, Vector<::V1_4::LogMessage> &logs) {
+    sp<T> plugin = T::castFrom(obj);
+    if (obj == NULL) {
+        LOG2BW("%s obj is null", U::descriptor);
+    } else if (plugin == NULL) {
+        LOG2BW("Cannot cast %s obj to %s plugin", U::descriptor, T::descriptor);
+    }
+
+    ::V1_4::Status err{};
+    std::vector<::V1_4::LogMessage> pluginLogs;
+    ::V1_4::IDrmPlugin::getLogMessages_cb cb = [&](
+            ::V1_4::Status status,
+            hidl_vec<::V1_4::LogMessage> hLogs) {
+        if (::V1_4::Status::OK != status) {
+            err = status;
+            return;
+        }
+        pluginLogs.assign(hLogs.data(), hLogs.data() + hLogs.size());
+    };
+
+    Return<void> hResult;
+    if (plugin != NULL) {
+        hResult = plugin->getLogMessages(cb);
+    }
+    if (!hResult.isOk()) {
+        LOG2BW("%s::getLogMessages remote call failed %s",
+               T::descriptor, hResult.description().c_str());
+    }
+
+    auto allLogs(gLogBuf.getLogs());
+    LOG2BD("framework logs size %zu; plugin logs size %zu",
+           allLogs.size(), pluginLogs.size());
+    std::copy(pluginLogs.begin(), pluginLogs.end(), std::back_inserter(allLogs));
+    std::sort(allLogs.begin(), allLogs.end(),
+              [](const ::V1_4::LogMessage &a, const ::V1_4::LogMessage &b) {
+                  return a.timeMs < b.timeMs;
+              });
+
+    logs.appendVector(allLogs);
+    return OK;
+}
+
+std::string GetExceptionMessage(status_t err, const char *msg,
+                                const Vector<::V1_4::LogMessage> &logs);
+
+template<typename T>
+std::string GetExceptionMessage(status_t err, const char *msg, const sp<T> &iface) {
+    Vector<::V1_4::LogMessage> logs;
+    iface->getLogMessages(logs);
+    return GetExceptionMessage(err, msg, logs);
+}
+
 } // namespace DrmUtils
-
 } // namespace android
-
 #endif // ANDROID_DRMUTILS_H
diff --git a/drm/libmediadrm/interface/mediadrm/ICrypto.h b/drm/libmediadrm/interface/mediadrm/ICrypto.h
index df980ae..2c4df60 100644
--- a/drm/libmediadrm/interface/mediadrm/ICrypto.h
+++ b/drm/libmediadrm/interface/mediadrm/ICrypto.h
@@ -32,6 +32,10 @@
 struct SharedBuffer;
 struct DestinationBuffer;
 }  // namespace V1_0
+
+namespace V1_4 {
+struct LogMessage;
+}  // namespace V1_4
 }  // namespace drm
 }  // namespace hardware
 }  // namespace android
@@ -83,6 +87,8 @@
     virtual int32_t setHeap(const sp<hardware::HidlMemory>& heap) = 0;
     virtual void unsetHeap(int32_t seqNum) = 0;
 
+    virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const = 0;
+
 protected:
     ICrypto() {}
 
diff --git a/drm/libmediadrm/protos/Android.bp b/drm/libmediadrm/protos/Android.bp
new file mode 100644
index 0000000..f8d237a
--- /dev/null
+++ b/drm/libmediadrm/protos/Android.bp
@@ -0,0 +1,47 @@
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This is the version of the drm metrics configured for protobuf full on host.
+// It is used by the metrics_dump tool.
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library_host_shared {
+    name: "libdrm_metrics_protos_full_host",
+    vendor_available: true,
+
+    include_dirs: ["external/protobuf/src"],
+
+    srcs: [
+        "metrics.proto",
+    ],
+
+    proto: {
+        export_proto_headers: true,
+        type: "full",
+    },
+
+    cflags: [
+        // Suppress unused parameter error. This error occurs
+        // when using the map type in a proto definition.
+        "-Wno-unused-parameter",
+    ],
+}
diff --git a/drm/libmediadrm/tests/Android.bp b/drm/libmediadrm/tests/Android.bp
index 6529387..9267e79 100644
--- a/drm/libmediadrm/tests/Android.bp
+++ b/drm/libmediadrm/tests/Android.bp
@@ -1,5 +1,14 @@
 // Build definitions for unit tests.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "CounterMetric_test",
     srcs: ["CounterMetric_test.cpp"],
diff --git a/drm/mediacas/plugins/clearkey/Android.bp b/drm/mediacas/plugins/clearkey/Android.bp
index 0113cb8..569cdbe 100644
--- a/drm/mediacas/plugins/clearkey/Android.bp
+++ b/drm/mediacas/plugins/clearkey/Android.bp
@@ -14,6 +14,15 @@
 // limitations under the License.
 //
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_shared {
     name: "libclearkeycasplugin",
 
diff --git a/drm/mediacas/plugins/clearkey/ClearKeyFetcher.cpp b/drm/mediacas/plugins/clearkey/ClearKeyFetcher.cpp
index cb69f91..466e571 100644
--- a/drm/mediacas/plugins/clearkey/ClearKeyFetcher.cpp
+++ b/drm/mediacas/plugins/clearkey/ClearKeyFetcher.cpp
@@ -62,8 +62,8 @@
     }
     ALOGV("descriptor_size=%zu", container.descriptor_size());
 
-    // Sanity check to verify that the BroadcastEncryptor is sending a properly
-    // formed EcmContainer. If it contains two Ecms, the ids should have different
+    // Validate that the BroadcastEncryptor is sending a properly formed
+    // EcmContainer. If it contains two Ecms, the ids should have different
     // parity (one odd, one even). This does not necessarily affect decryption
     // but indicates a problem with Ecm generation.
     if (container.descriptor_size() == 2) {
diff --git a/drm/mediacas/plugins/clearkey/ClearKeySessionLibrary.h b/drm/mediacas/plugins/clearkey/ClearKeySessionLibrary.h
index a537e63..7c6d86c 100644
--- a/drm/mediacas/plugins/clearkey/ClearKeySessionLibrary.h
+++ b/drm/mediacas/plugins/clearkey/ClearKeySessionLibrary.h
@@ -22,7 +22,6 @@
 #include <openssl/aes.h>
 #include <utils/KeyedVector.h>
 #include <utils/Mutex.h>
-#include <utils/RefBase.h>
 
 namespace android {
 struct ABuffer;
@@ -30,7 +29,7 @@
 namespace clearkeycas {
 class KeyFetcher;
 
-class ClearKeyCasSession : public RefBase {
+class ClearKeyCasSession {
 public:
     explicit ClearKeyCasSession(CasPlugin *plugin);
 
diff --git a/drm/mediacas/plugins/clearkey/tests/Android.bp b/drm/mediacas/plugins/clearkey/tests/Android.bp
index 575863c..9ad8d1e 100644
--- a/drm/mediacas/plugins/clearkey/tests/Android.bp
+++ b/drm/mediacas/plugins/clearkey/tests/Android.bp
@@ -14,6 +14,15 @@
 // limitations under the License.
 //
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "ClearKeyFetcherTest",
 
diff --git a/drm/mediacas/plugins/mock/Android.bp b/drm/mediacas/plugins/mock/Android.bp
index e8a3c6f..6f68c77 100644
--- a/drm/mediacas/plugins/mock/Android.bp
+++ b/drm/mediacas/plugins/mock/Android.bp
@@ -14,6 +14,15 @@
 // limitations under the License.
 //
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_shared {
     name: "libmockcasplugin",
 
diff --git a/drm/mediadrm/plugins/clearkey/common/Android.bp b/drm/mediadrm/plugins/clearkey/common/Android.bp
index 2c674e1..7ed8b88 100644
--- a/drm/mediadrm/plugins/clearkey/common/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/common/Android.bp
@@ -14,6 +14,15 @@
 // limitations under the License.
 //
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_static {
     name: "libclearkeycommon",
     vendor: true,
@@ -35,4 +44,3 @@
         integer_overflow: true,
     },
 }
-
diff --git a/drm/mediadrm/plugins/clearkey/default/Android.bp b/drm/mediadrm/plugins/clearkey/default/Android.bp
index 9803d32..4b5389a 100644
--- a/drm/mediadrm/plugins/clearkey/default/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/default/Android.bp
@@ -14,6 +14,15 @@
 // limitations under the License.
 //
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_shared {
     name: "libdrmclearkeyplugin",
     vendor: true,
@@ -60,4 +69,3 @@
         integer_overflow: true,
     },
 }
-
diff --git a/drm/mediadrm/plugins/clearkey/default/JsonWebKey.cpp b/drm/mediadrm/plugins/clearkey/default/JsonWebKey.cpp
index 53ffae4..a2d506d 100644
--- a/drm/mediadrm/plugins/clearkey/default/JsonWebKey.cpp
+++ b/drm/mediadrm/plugins/clearkey/default/JsonWebKey.cpp
@@ -61,7 +61,7 @@
     // all the base64 encoded keys. Each key is also stored separately as
     // a JSON object in mJsonObjects[1..n] where n is the total
     // number of keys in the set.
-    if (!isJsonWebKeySet(mJsonObjects[0])) {
+    if (mJsonObjects.size() == 0 || !isJsonWebKeySet(mJsonObjects[0])) {
         return false;
     }
 
diff --git a/drm/mediadrm/plugins/clearkey/default/tests/Android.bp b/drm/mediadrm/plugins/clearkey/default/tests/Android.bp
index 4419865..b97e924 100644
--- a/drm/mediadrm/plugins/clearkey/default/tests/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/default/tests/Android.bp
@@ -17,6 +17,15 @@
 // Builds ClearKey Drm Tests
 //
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "ClearKeyDrmUnitTest",
     vendor: true,
diff --git a/drm/mediadrm/plugins/clearkey/hidl/AesCtrDecryptor.cpp b/drm/mediadrm/plugins/clearkey/hidl/AesCtrDecryptor.cpp
index 0ac879c..e03a896 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/AesCtrDecryptor.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/AesCtrDecryptor.cpp
@@ -26,7 +26,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 using ::android::hardware::drm::V1_0::SubSample;
@@ -79,7 +79,7 @@
 }
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/Android.bp b/drm/mediadrm/plugins/clearkey/hidl/Android.bp
index a194416..6c68532 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/hidl/Android.bp
@@ -14,6 +14,19 @@
 // limitations under the License.
 //
 
+// *** THIS PACKAGE HAS SPECIAL LICENSING CONDITIONS.  PLEASE
+//     CONSULT THE OWNERS AND opensource-licensing@google.com BEFORE
+//     DEPENDING ON IT IN YOUR PROJECT. ***
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    //   legacy_by_exception_only (by exception only)
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_defaults {
     name: "clearkey_service_defaults",
     vendor: true,
@@ -37,13 +50,14 @@
 
     relative_install_path: "hw",
 
-    cflags: ["-Wall", "-Werror"],
+    cflags: ["-Wall", "-Werror", "-Wthread-safety"],
 
     shared_libs: [
         "android.hardware.drm@1.0",
         "android.hardware.drm@1.1",
         "android.hardware.drm@1.2",
         "android.hardware.drm@1.3",
+        "android.hardware.drm@1.4",
         "libbase",
         "libbinder",
         "libcrypto",
@@ -97,18 +111,18 @@
 }
 
 cc_binary {
-    name: "android.hardware.drm@1.3-service.clearkey",
+    name: "android.hardware.drm@1.4-service.clearkey",
     defaults: ["clearkey_service_defaults"],
     srcs: ["service.cpp"],
-    init_rc: ["android.hardware.drm@1.3-service.clearkey.rc"],
-    vintf_fragments: ["manifest_android.hardware.drm@1.3-service.clearkey.xml"],
+    init_rc: ["android.hardware.drm@1.4-service.clearkey.rc"],
+    vintf_fragments: ["manifest_android.hardware.drm@1.4-service.clearkey.xml"],
 }
 
 cc_binary {
-    name: "android.hardware.drm@1.3-service-lazy.clearkey",
-    overrides: ["android.hardware.drm@1.3-service.clearkey"],
+    name: "android.hardware.drm@1.4-service-lazy.clearkey",
+    overrides: ["android.hardware.drm@1.4-service.clearkey"],
     defaults: ["clearkey_service_defaults"],
     srcs: ["serviceLazy.cpp"],
-    init_rc: ["android.hardware.drm@1.3-service-lazy.clearkey.rc"],
-    vintf_fragments: ["manifest_android.hardware.drm@1.3-service.clearkey.xml"],
+    init_rc: ["android.hardware.drm@1.4-service-lazy.clearkey.rc"],
+    vintf_fragments: ["manifest_android.hardware.drm@1.4-service.clearkey.xml"],
 }
diff --git a/drm/mediadrm/plugins/clearkey/hidl/Base64.cpp b/drm/mediadrm/plugins/clearkey/hidl/Base64.cpp
index 657a42f..d81f875 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/Base64.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/Base64.cpp
@@ -21,7 +21,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 sp<Buffer> decodeBase64(const std::string &s) {
@@ -169,7 +169,7 @@
 }
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/Buffer.cpp b/drm/mediadrm/plugins/clearkey/hidl/Buffer.cpp
index 75f8395..dcb76f4 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/Buffer.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/Buffer.cpp
@@ -21,7 +21,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 Buffer::Buffer(size_t capacity)
@@ -47,7 +47,7 @@
 }
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CreatePluginFactories.cpp b/drm/mediadrm/plugins/clearkey/hidl/CreatePluginFactories.cpp
index bfb0e05..4ab33d3 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/CreatePluginFactories.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/CreatePluginFactories.cpp
@@ -22,7 +22,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_3 {
+namespace V1_4 {
 namespace clearkey {
 
 extern "C" {
@@ -38,7 +38,7 @@
 } // extern "C"
 
 }  // namespace clearkey
-}  // namespace V1_3
+}  // namespace V1_4
 }  // namespace drm
 }  // namespace hardware
 }  // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CryptoFactory.cpp b/drm/mediadrm/plugins/clearkey/hidl/CryptoFactory.cpp
index a6ed3bd..0bebc3b 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/CryptoFactory.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/CryptoFactory.cpp
@@ -27,11 +27,11 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_3 {
+namespace V1_4 {
 namespace clearkey {
 
 using ::android::hardware::drm::V1_0::Status;
-using ::android::hardware::drm::V1_2::clearkey::CryptoPlugin;
+using ::android::hardware::drm::V1_4::clearkey::CryptoPlugin;
 
 Return<bool> CryptoFactory::isCryptoSchemeSupported(
     const hidl_array<uint8_t, 16> &uuid)
@@ -63,7 +63,7 @@
 }
 
 } // namespace clearkey
-} // namespace V1_3
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
index d278633..3a675f6 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
@@ -27,7 +27,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 using ::android::hardware::drm::V1_0::BufferType;
@@ -37,6 +37,8 @@
     sp<IMemory> hidlMemory = mapMemory(base);
     ALOGE_IF(hidlMemory == nullptr, "mapMemory returns nullptr");
 
+    std::lock_guard<std::mutex> shared_buffer_lock(mSharedBufferLock);
+
     // allow mapMemory to return nullptr
     mSharedBufferMap[bufferId] = hidlMemory;
     return Void();
@@ -94,6 +96,7 @@
         return Void();
     }
 
+    std::unique_lock<std::mutex> shared_buffer_lock(mSharedBufferLock);
     if (mSharedBufferMap.find(source.bufferId) == mSharedBufferMap.end()) {
       _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0,
                "source decrypt buffer base not set");
@@ -142,12 +145,17 @@
 
     base = static_cast<uint8_t *>(static_cast<void *>(destBase->getPointer()));
 
-    if (destBuffer.offset + destBuffer.size > destBase->getSize()) {
+    totalSize = 0;
+    if (__builtin_add_overflow(destBuffer.offset, destBuffer.size, &totalSize) ||
+        totalSize > destBase->getSize()) {
+        android_errorWriteLog(0x534e4554, "176444622");
         _hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "invalid buffer size");
         return Void();
     }
-    destPtr = static_cast<void *>(base + destination.nonsecureMemory.offset);
+    destPtr = static_cast<void*>(base + destination.nonsecureMemory.offset);
 
+    // release mSharedBufferLock
+    shared_buffer_lock.unlock();
 
     // Calculate the output buffer size and determine if any subsamples are
     // encrypted.
@@ -227,8 +235,23 @@
     return Status::OK;
 }
 
+Return<void> CryptoPlugin::getLogMessages(
+        getLogMessages_cb _hidl_cb) {
+    using std::chrono::duration_cast;
+    using std::chrono::milliseconds;
+    using std::chrono::system_clock;
+
+    auto timeMillis = duration_cast<milliseconds>(
+            system_clock::now().time_since_epoch()).count();
+
+    std::vector<LogMessage> logs = {
+            { timeMillis, LogPriority::ERROR, std::string("Not implemented") }};
+    _hidl_cb(drm::V1_4::Status::OK, toHidlVec(logs));
+    return Void();
+}
+
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4.
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/DeviceFiles.cpp b/drm/mediadrm/plugins/clearkey/hidl/DeviceFiles.cpp
index 2415b6f..0385d8f 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/DeviceFiles.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/DeviceFiles.cpp
@@ -38,7 +38,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 bool DeviceFiles::StoreLicense(
@@ -246,7 +246,7 @@
 }
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/DrmFactory.cpp b/drm/mediadrm/plugins/clearkey/hidl/DrmFactory.cpp
index 1ce8269..14cb5c1 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/DrmFactory.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/DrmFactory.cpp
@@ -31,13 +31,13 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_3 {
+namespace V1_4 {
 namespace clearkey {
 
 using ::android::hardware::drm::V1_0::Status;
 using ::android::hardware::drm::V1_1::SecurityLevel;
-using ::android::hardware::drm::V1_2::clearkey::DrmPlugin;
-using ::android::hardware::drm::V1_2::clearkey::SessionLibrary;
+using ::android::hardware::drm::V1_4::clearkey::DrmPlugin;
+using ::android::hardware::drm::V1_4::clearkey::SessionLibrary;
 using ::android::hardware::Void;
 
 Return<bool> DrmFactory::isCryptoSchemeSupported(
@@ -105,7 +105,7 @@
 }
 
 } // namespace clearkey
-} // namespace V1_3
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
index a77759e..bc7c3f2 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "hidl_ClearKeyPlugin"
 #include <utils/Log.h>
 
+#include <chrono>
 #include <stdio.h>
 #include <inttypes.h>
 
@@ -58,7 +59,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 KeyRequestType toKeyRequestType_V1_0(KeyRequestType_V1_1 keyRequestType) {
@@ -220,6 +221,7 @@
         if (requestString.find(kOfflineLicense) != std::string::npos) {
             std::string emptyResponse;
             std::string keySetIdString(keySetId.begin(), keySetId.end());
+            Mutex::Autolock lock(mFileHandleLock);
             if (!mFileHandle.StoreLicense(keySetIdString,
                     DeviceFiles::kLicenseStateReleasing,
                     emptyResponse)) {
@@ -335,6 +337,7 @@
         }
         *keySetId = kKeySetIdPrefix + ByteArrayToHexString(
                 reinterpret_cast<const uint8_t*>(randomData.data()), randomData.size());
+        Mutex::Autolock lock(mFileHandleLock);
         if (mFileHandle.LicenseExists(*keySetId)) {
             // collision, regenerate
             ALOGV("Retry generating KeySetId");
@@ -392,6 +395,7 @@
     if (status == Status::OK) {
         if (isOfflineLicense) {
             if (isRelease) {
+                Mutex::Autolock lock(mFileHandleLock);
                 mFileHandle.DeleteLicense(keySetId);
                 mSessionLibrary->destroySession(session);
             } else {
@@ -400,6 +404,7 @@
                     return Void();
                 }
 
+                Mutex::Autolock lock(mFileHandleLock);
                 bool ok = mFileHandle.StoreLicense(
                         keySetId,
                         DeviceFiles::kLicenseStateActive,
@@ -454,6 +459,7 @@
         DeviceFiles::LicenseState licenseState;
         std::string offlineLicense;
         Status status = Status::OK;
+        Mutex::Autolock lock(mFileHandleLock);
         if (!mFileHandle.RetrieveLicense(std::string(keySetId.begin(), keySetId.end()),
                 &licenseState, &offlineLicense)) {
             ALOGE("Failed to restore offline license");
@@ -629,6 +635,48 @@
     return Void();
 }
 
+Return<void> DrmPlugin::getLogMessages(
+        getLogMessages_cb _hidl_cb) {
+    using std::chrono::duration_cast;
+    using std::chrono::milliseconds;
+    using std::chrono::system_clock;
+
+    auto timeMillis = duration_cast<milliseconds>(
+            system_clock::now().time_since_epoch()).count();
+
+    std::vector<LogMessage> logs = {
+            { timeMillis, LogPriority::ERROR, std::string("Not implemented") }};
+    _hidl_cb(drm::V1_4::Status::OK, toHidlVec(logs));
+    return Void();
+}
+
+Return<bool> DrmPlugin::requiresSecureDecoder(
+        const hidl_string& mime, SecurityLevel level) {
+    UNUSED(mime);
+    UNUSED(level);
+    return false;
+}
+
+Return<bool> DrmPlugin::requiresSecureDecoderDefault(const hidl_string& mime) {
+    UNUSED(mime);
+    // Clearkey only supports SW_SECURE_CRYPTO, so we always returns false
+    // regardless of mime type.
+    return false;
+}
+
+Return<Status> DrmPlugin::setPlaybackId(
+    const hidl_vec<uint8_t>& sessionId,
+    const hidl_string& playbackId) {
+    if (sessionId.size() == 0) {
+        ALOGE("Invalid empty session id");
+        return Status::BAD_VALUE;
+    }
+
+    std::vector<uint8_t> sid = toVector(sessionId);
+    mPlaybackId[sid] = playbackId;
+    return Status::OK;
+}
+
 Return<Status> DrmPlugin::setSecurityLevel(const hidl_vec<uint8_t>& sessionId,
             SecurityLevel level) {
     if (sessionId.size() == 0) {
@@ -696,14 +744,32 @@
       "close_session", { closeSessionNotOpenedAttribute }, { closeSessionNotOpenedMetricValue }
     };
 
-    DrmMetricGroup metrics = { { openSessionMetric, closeSessionMetric,
-                                closeSessionNotOpenedMetric } };
+    // Set the setPlaybackId metric.
+    std::vector<DrmMetricGroup::Attribute> sids;
+    std::vector<DrmMetricGroup::Value> playbackIds;
+    for (const auto&[key, value] : mPlaybackId) {
+        std::string sid(key.begin(), key.end());
+        DrmMetricGroup::Attribute sessionIdAttribute = {
+            "sid", DrmMetricGroup::ValueType::STRING_TYPE, 0, 0, sid };
+        sids.push_back(sessionIdAttribute);
 
+        DrmMetricGroup::Value playbackIdMetricValue = {
+            "playbackId", DrmMetricGroup::ValueType::STRING_TYPE, 0, 0, value };
+        playbackIds.push_back(playbackIdMetricValue);
+    }
+    DrmMetricGroup::Metric setPlaybackIdMetric = {
+            "set_playback_id", { sids }, { playbackIds }};
+
+    DrmMetricGroup metrics = {
+            { openSessionMetric, closeSessionMetric,
+              closeSessionNotOpenedMetric, setPlaybackIdMetric }};
     _hidl_cb(Status::OK, hidl_vec<DrmMetricGroup>({metrics}));
     return Void();
 }
 
 Return<void> DrmPlugin::getOfflineLicenseKeySetIds(getOfflineLicenseKeySetIds_cb _hidl_cb) {
+    Mutex::Autolock lock(mFileHandleLock);
+
     std::vector<std::string> licenseNames = mFileHandle.ListLicenses();
     std::vector<KeySetId> keySetIds;
     if (mMockError != Status_V1_2::OK) {
@@ -724,6 +790,7 @@
         return toStatus_1_0(mMockError);
     }
     std::string licenseName(keySetId.begin(), keySetId.end());
+    Mutex::Autolock lock(mFileHandleLock);
     if (mFileHandle.DeleteLicense(licenseName)) {
         return Status::OK;
     }
@@ -732,6 +799,8 @@
 
 Return<void> DrmPlugin::getOfflineLicenseState(const KeySetId& keySetId,
         getOfflineLicenseState_cb _hidl_cb) {
+    Mutex::Autolock lock(mFileHandleLock);
+
     std::string licenseName(keySetId.begin(), keySetId.end());
     DeviceFiles::LicenseState state;
     std::string license;
@@ -896,7 +965,7 @@
 }
 
 }  // namespace clearkey
-}  // namespace V1_2
+}  // namespace V1_4
 }  // namespace drm
 }  // namespace hardware
 }  // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/InitDataParser.cpp b/drm/mediadrm/plugins/clearkey/hidl/InitDataParser.cpp
index 8513434..eccc843 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/InitDataParser.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/InitDataParser.cpp
@@ -31,7 +31,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 namespace {
@@ -180,7 +180,7 @@
 }
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/JsonWebKey.cpp b/drm/mediadrm/plugins/clearkey/hidl/JsonWebKey.cpp
index d93777d..45cc775 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/JsonWebKey.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/JsonWebKey.cpp
@@ -36,7 +36,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 JsonWebKey::JsonWebKey() {
@@ -65,7 +65,7 @@
     // all the base64 encoded keys. Each key is also stored separately as
     // a JSON object in mJsonObjects[1..n] where n is the total
     // number of keys in the set.
-    if (!isJsonWebKeySet(mJsonObjects[0])) {
+    if (mJsonObjects.size() == 0 || !isJsonWebKeySet(mJsonObjects[0])) {
         return false;
     }
 
@@ -271,7 +271,7 @@
 }
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp b/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
index 2dcd00f..e61db3f 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
@@ -11,11 +11,11 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 std::string MemoryFileSystem::GetFileName(const std::string& path) {
-    size_t index = path.find_last_of("/");
+    size_t index = path.find_last_of('/');
     if (index != std::string::npos) {
         return path.substr(index+1);
     } else {
@@ -24,11 +24,13 @@
 }
 
 bool MemoryFileSystem::FileExists(const std::string& fileName) const {
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(fileName);
     return result != mMemoryFileSystem.end();
 }
 
 ssize_t MemoryFileSystem::GetFileSize(const std::string& fileName) const {
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(fileName);
     if (result != mMemoryFileSystem.end()) {
         return static_cast<ssize_t>(result->second.getFileSize());
@@ -40,6 +42,7 @@
 
 std::vector<std::string> MemoryFileSystem::ListFiles() const {
     std::vector<std::string> list;
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     for (const auto& filename : mMemoryFileSystem) {
         list.push_back(filename.first);
     }
@@ -48,6 +51,7 @@
 
 size_t MemoryFileSystem::Read(const std::string& path, std::string* buffer) {
     std::string key = GetFileName(path);
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(key);
     if (result != mMemoryFileSystem.end()) {
         std::string serializedHashFile = result->second.getContent();
@@ -61,6 +65,7 @@
 
 size_t MemoryFileSystem::Write(const std::string& path, const MemoryFile& memoryFile) {
     std::string key = GetFileName(path);
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(key);
     if (result != mMemoryFileSystem.end()) {
         mMemoryFileSystem.erase(key);
@@ -70,6 +75,7 @@
 }
 
 bool MemoryFileSystem::RemoveFile(const std::string& fileName) {
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(fileName);
     if (result != mMemoryFileSystem.end()) {
         mMemoryFileSystem.erase(result);
@@ -81,12 +87,13 @@
 }
 
 bool MemoryFileSystem::RemoveAllFiles() {
+    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     mMemoryFileSystem.clear();
     return mMemoryFileSystem.empty();
 }
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/Session.cpp b/drm/mediadrm/plugins/clearkey/hidl/Session.cpp
index a9d7016..cf668d4 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/Session.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/Session.cpp
@@ -28,7 +28,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 using ::android::hardware::drm::V1_0::KeyValue;
@@ -95,7 +95,7 @@
 }
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/SessionLibrary.cpp b/drm/mediadrm/plugins/clearkey/hidl/SessionLibrary.cpp
index 99fb30f..88afcc4 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/SessionLibrary.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/SessionLibrary.cpp
@@ -24,7 +24,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 using ::android::hardware::hidl_string;
@@ -86,7 +86,7 @@
 }
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc
new file mode 100644
index 0000000..46aba88
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc
@@ -0,0 +1,18 @@
+service vendor.drm-clearkey-hal-1-4 /vendor/bin/hw/android.hardware.drm@1.4-service-lazy.clearkey
+    interface android.hardware.drm@1.0::ICryptoFactory clearkey
+    interface android.hardware.drm@1.0::IDrmFactory clearkey
+    interface android.hardware.drm@1.1::ICryptoFactory clearkey
+    interface android.hardware.drm@1.1::IDrmFactory clearkey
+    interface android.hardware.drm@1.2::ICryptoFactory clearkey
+    interface android.hardware.drm@1.2::IDrmFactory clearkey
+    interface android.hardware.drm@1.3::ICryptoFactory clearkey
+    interface android.hardware.drm@1.3::IDrmFactory clearkey
+    interface android.hardware.drm@1.4::ICryptoFactory clearkey
+    interface android.hardware.drm@1.4::IDrmFactory clearkey
+    disabled
+    oneshot
+    class hal
+    user media
+    group media mediadrm
+    ioprio rt 4
+    writepid /dev/cpuset/foreground/tasks
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc
new file mode 100644
index 0000000..8186933
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc
@@ -0,0 +1,16 @@
+service vendor.drm-clearkey-hal-1-4 /vendor/bin/hw/android.hardware.drm@1.4-service.clearkey
+    interface android.hardware.drm@1.0::ICryptoFactory clearkey
+    interface android.hardware.drm@1.0::IDrmFactory clearkey
+    interface android.hardware.drm@1.1::ICryptoFactory clearkey
+    interface android.hardware.drm@1.1::IDrmFactory clearkey
+    interface android.hardware.drm@1.2::ICryptoFactory clearkey
+    interface android.hardware.drm@1.2::IDrmFactory clearkey
+    interface android.hardware.drm@1.3::ICryptoFactory clearkey
+    interface android.hardware.drm@1.3::IDrmFactory clearkey
+    interface android.hardware.drm@1.4::ICryptoFactory clearkey
+    interface android.hardware.drm@1.4::IDrmFactory clearkey
+    class hal
+    user media
+    group media mediadrm
+    ioprio rt 4
+    writepid /dev/cpuset/foreground/tasks
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/AesCtrDecryptor.h b/drm/mediadrm/plugins/clearkey/hidl/include/AesCtrDecryptor.h
index 721f4c0..97794f7 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/AesCtrDecryptor.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/AesCtrDecryptor.h
@@ -22,7 +22,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 using ::android::hardware::drm::V1_0::Status;
@@ -42,7 +42,7 @@
 };
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/Base64.h b/drm/mediadrm/plugins/clearkey/hidl/include/Base64.h
index ec30cc1..2349f23 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/Base64.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/Base64.h
@@ -25,7 +25,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 using ::android::sp;
@@ -38,7 +38,7 @@
 void encodeBase64Url(const void *data, size_t size, std::string *out);
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/Buffer.h b/drm/mediadrm/plugins/clearkey/hidl/include/Buffer.h
index c497e37..66aaa73 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/Buffer.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/Buffer.h
@@ -25,7 +25,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 using ::android::sp;
@@ -54,7 +54,7 @@
 };
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyDrmProperties.h b/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyDrmProperties.h
index b83ce69..8e47c45 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyDrmProperties.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyDrmProperties.h
@@ -22,7 +22,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 static const std::string kVendorKey("vendor");
@@ -55,7 +55,7 @@
 static const uint8_t kMetricsData[] = { 0 };
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyTypes.h b/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyTypes.h
index 03c434e..cd18029 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyTypes.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyTypes.h
@@ -24,7 +24,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 using ::android::hardware::drm::V1_0::KeyValue;
@@ -48,7 +48,7 @@
   void operator=(const TypeName&) = delete;
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/CreatePluginFactories.h b/drm/mediadrm/plugins/clearkey/hidl/include/CreatePluginFactories.h
index c1c188e..d4a8a17 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/CreatePluginFactories.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/CreatePluginFactories.h
@@ -17,17 +17,17 @@
 #ifndef CLEARKEY_CREATE_PLUGIN_FACTORIES_H_
 #define CLEARKEY_CREATE_PLUGIN_FACTORIES_H_
 
-#include <android/hardware/drm/1.3/ICryptoFactory.h>
-#include <android/hardware/drm/1.3/IDrmFactory.h>
+#include <android/hardware/drm/1.4/ICryptoFactory.h>
+#include <android/hardware/drm/1.4/IDrmFactory.h>
 
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_3 {
+namespace V1_4 {
 namespace clearkey {
 
-using ::android::hardware::drm::V1_3::ICryptoFactory;
-using ::android::hardware::drm::V1_3::IDrmFactory;
+using ::android::hardware::drm::V1_4::ICryptoFactory;
+using ::android::hardware::drm::V1_4::IDrmFactory;
 
 extern "C" {
     IDrmFactory* createDrmFactory();
@@ -35,7 +35,7 @@
 }
 
 }  // namespace clearkey
-}  // namespace V1_3
+}  // namespace V1_4
 }  // namespace drm
 }  // namespace hardware
 }  // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/CryptoFactory.h b/drm/mediadrm/plugins/clearkey/hidl/include/CryptoFactory.h
index cb4811b..e6b541f 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/CryptoFactory.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/CryptoFactory.h
@@ -18,17 +18,17 @@
 #define CLEARKEY_CRYPTO_FACTORY_H_
 
 #include <android/hardware/drm/1.0/ICryptoPlugin.h>
-#include <android/hardware/drm/1.3/ICryptoFactory.h>
+#include <android/hardware/drm/1.4/ICryptoFactory.h>
 
 #include "ClearKeyTypes.h"
 
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_3 {
+namespace V1_4 {
 namespace clearkey {
 
-using ::android::hardware::drm::V1_3::ICryptoFactory;
+using ::android::hardware::drm::V1_4::ICryptoFactory;
 using ::android::hardware::drm::V1_0::ICryptoPlugin;
 using ::android::hardware::hidl_array;
 using ::android::hardware::hidl_string;
@@ -52,7 +52,7 @@
 };
 
 } // namespace clearkey
-} // namespace V1_3
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h b/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h
index 8680f0c..b272a83 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h
@@ -17,9 +17,11 @@
 #ifndef CLEARKEY_CRYPTO_PLUGIN_H_
 #define CLEARKEY_CRYPTO_PLUGIN_H_
 
-#include <android/hardware/drm/1.2/ICryptoPlugin.h>
+#include <android/hardware/drm/1.4/ICryptoPlugin.h>
 #include <android/hidl/memory/1.0/IMemory.h>
 
+#include <mutex>
+
 #include "ClearKeyTypes.h"
 #include "Session.h"
 #include "Utils.h"
@@ -32,7 +34,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 namespace drm = ::android::hardware::drm;
@@ -54,7 +56,7 @@
 
 typedef drm::V1_2::Status Status_V1_2;
 
-struct CryptoPlugin : public drm::V1_2::ICryptoPlugin {
+struct CryptoPlugin : public drm::V1_4::ICryptoPlugin {
     explicit CryptoPlugin(const hidl_vec<uint8_t>& sessionId) {
         mInitStatus = setMediaDrmSession(sessionId);
     }
@@ -93,7 +95,7 @@
             const SharedBuffer& source,
             uint64_t offset,
             const DestinationBuffer& destination,
-            decrypt_1_2_cb _hidl_cb);
+            decrypt_1_2_cb _hidl_cb) NO_THREAD_SAFETY_ANALYSIS; // use unique_lock
 
     Return<void> setSharedBufferBase(const hidl_memory& base,
             uint32_t bufferId);
@@ -102,16 +104,19 @@
 
     Return<Status> getInitStatus() const { return mInitStatus; }
 
+    Return<void> getLogMessages(
+            getLogMessages_cb _hidl_cb);
 private:
     CLEARKEY_DISALLOW_COPY_AND_ASSIGN(CryptoPlugin);
 
-    std::map<uint32_t, sp<IMemory> > mSharedBufferMap;
+    std::mutex mSharedBufferLock;
+    std::map<uint32_t, sp<IMemory>> mSharedBufferMap GUARDED_BY(mSharedBufferLock);
     sp<Session> mSession;
     Status mInitStatus;
 };
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/DeviceFiles.h b/drm/mediadrm/plugins/clearkey/hidl/include/DeviceFiles.h
index 554ae59..6466ac3 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/DeviceFiles.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/DeviceFiles.h
@@ -20,9 +20,11 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
+using ::android::hardware::drm::V1_2::clearkey::OfflineFile;
+
 class DeviceFiles {
  public:
     typedef enum {
@@ -63,7 +65,7 @@
 };
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/DrmFactory.h b/drm/mediadrm/plugins/clearkey/hidl/include/DrmFactory.h
index 63234cf..fea1ec8 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/DrmFactory.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/DrmFactory.h
@@ -17,15 +17,15 @@
 #ifndef CLEARKEY_DRM_FACTORY_H_
 #define CLEARKEY_DRM_FACTORY_H_
 
-#include <android/hardware/drm/1.2/IDrmPlugin.h>
-#include <android/hardware/drm/1.3/IDrmFactory.h>
+#include <android/hardware/drm/1.4/IDrmPlugin.h>
+#include <android/hardware/drm/1.4/IDrmFactory.h>
 
 #include "ClearKeyTypes.h"
 
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_3 {
+namespace V1_4 {
 namespace clearkey {
 
 using ::android::hardware::drm::V1_1::SecurityLevel;
@@ -63,7 +63,7 @@
 };
 
 } // namespace clearkey
-} // namespace V1_3
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
index 076beb8..5d6e3da 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
@@ -17,7 +17,7 @@
 #ifndef CLEARKEY_DRM_PLUGIN_H_
 #define CLEARKEY_DRM_PLUGIN_H_
 
-#include <android/hardware/drm/1.2/IDrmPlugin.h>
+#include <android/hardware/drm/1.4/IDrmPlugin.h>
 #include <android/hardware/drm/1.2/IDrmPluginListener.h>
 
 #include <map>
@@ -32,7 +32,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 namespace drm = ::android::hardware::drm;
@@ -50,9 +50,12 @@
 using drm::V1_1::HdcpLevel;
 using drm::V1_1::SecureStopRelease;
 using drm::V1_1::SecurityLevel;
-using drm::V1_2::IDrmPlugin;
 using drm::V1_2::KeySetId;
 using drm::V1_2::OfflineLicenseState;
+using drm::V1_4::clearkey::DeviceFiles;
+using drm::V1_4::clearkey::Session;
+using drm::V1_4::clearkey::SessionLibrary;
+using drm::V1_4::IDrmPlugin;
 
 using ::android::hardware::hidl_string;
 using ::android::hardware::hidl_vec;
@@ -199,6 +202,18 @@
     Return<Status> setPropertyByteArray(
             const hidl_string& name, const hidl_vec<uint8_t>& value) override;
 
+    Return<void> getLogMessages(
+        getLogMessages_cb _hidl_cb) override;
+
+    Return<Status> setPlaybackId(
+        const hidl_vec<uint8_t>& sessionId,
+        const hidl_string& playbackId) override;
+
+    Return<bool> requiresSecureDecoder(
+            const hidl_string& mime, SecurityLevel level) override;
+
+    Return<bool> requiresSecureDecoderDefault(const hidl_string& mime) override;
+
     Return<Status> setCipherAlgorithm(
             const hidl_vec<uint8_t>& sessionId, const hidl_string& algorithm) {
         if (sessionId.size() == 0 || algorithm.size() == 0) {
@@ -398,6 +413,7 @@
     std::map<std::string, std::string> mStringProperties;
     std::map<std::string, std::vector<uint8_t> > mByteArrayProperties;
     std::map<std::string, std::vector<uint8_t> > mReleaseKeysMap;
+    std::map<std::vector<uint8_t>, std::string> mPlaybackId;
     std::map<std::vector<uint8_t>, SecurityLevel> mSecurityLevel;
     sp<IDrmPluginListener> mListener;
     sp<IDrmPluginListener_V1_2> mListenerV1_2;
@@ -416,14 +432,15 @@
         mMockError = Status_V1_2::OK;
     }
 
-    DeviceFiles mFileHandle;
+    DeviceFiles mFileHandle GUARDED_BY(mFileHandleLock);
+    Mutex mFileHandleLock;
     Mutex mSecureStopLock;
 
     CLEARKEY_DISALLOW_COPY_AND_ASSIGN_AND_NEW(DrmPlugin);
 };
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/InitDataParser.h b/drm/mediadrm/plugins/clearkey/hidl/include/InitDataParser.h
index 889f511..59338c9 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/InitDataParser.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/InitDataParser.h
@@ -24,7 +24,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 using ::android::hardware::drm::V1_0::Status;
@@ -49,7 +49,7 @@
 };
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/JsonWebKey.h b/drm/mediadrm/plugins/clearkey/hidl/include/JsonWebKey.h
index e57470c..40a2d74 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/JsonWebKey.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/JsonWebKey.h
@@ -23,7 +23,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 class JsonWebKey {
@@ -54,7 +54,7 @@
 };
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h b/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
index bcd9fd6..a90d818 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
@@ -5,7 +5,9 @@
 #ifndef CLEARKEY_MEMORY_FILE_SYSTEM_H_
 #define CLEARKEY_MEMORY_FILE_SYSTEM_H_
 
+#include <android-base/thread_annotations.h>
 #include <map>
+#include <mutex>
 #include <string>
 
 #include "ClearKeyTypes.h"
@@ -13,7 +15,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 // Using android file system requires clearkey plugin to update
@@ -49,10 +51,12 @@
     size_t Write(const std::string& pathName, const MemoryFile& memoryFile);
 
  private:
+    mutable std::mutex mMemoryFileSystemLock;
+
     // License file name is made up of a unique keySetId, therefore,
     // the filename can be used as the key to locate licenses in the
     // memory file system.
-    std::map<std::string, MemoryFile> mMemoryFileSystem;
+    std::map<std::string, MemoryFile> mMemoryFileSystem GUARDED_BY(mMemoryFileSystemLock);
 
     std::string GetFileName(const std::string& path);
 
@@ -60,7 +64,7 @@
 };
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/Session.h b/drm/mediadrm/plugins/clearkey/hidl/include/Session.h
index a159e5a..05cb8c8 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/Session.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/Session.h
@@ -27,7 +27,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 namespace drm = ::android::hardware::drm;
@@ -73,7 +73,7 @@
 };
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/SessionLibrary.h b/drm/mediadrm/plugins/clearkey/hidl/include/SessionLibrary.h
index 1e567b8..5e77438 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/SessionLibrary.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/SessionLibrary.h
@@ -26,7 +26,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 using ::android::sp;
@@ -58,7 +58,7 @@
 };
 
 } // namespace clearkey
-} // namespace V1_2
+} // namespace V1_4
 } // namespace drm
 } // namespace hardware
 } // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/TypeConvert.h b/drm/mediadrm/plugins/clearkey/hidl/include/TypeConvert.h
index b0f8607..22eeccd 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/TypeConvert.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/TypeConvert.h
@@ -14,8 +14,8 @@
  * limitations under the License.
  */
 
-#ifndef CLEARKEY_ANDROID_HARDWARE_DRM_V1_1_TYPECONVERT
-#define CLEARKEY_ANDROID_HARDWARE_DRM_V1_1_TYPECONVERT
+#ifndef CLEARKEY_ANDROID_HARDWARE_DRM_V1_4_TYPECONVERT
+#define CLEARKEY_ANDROID_HARDWARE_DRM_V1_4_TYPECONVERT
 
 #include <vector>
 
@@ -24,7 +24,7 @@
 namespace android {
 namespace hardware {
 namespace drm {
-namespace V1_2 {
+namespace V1_4 {
 namespace clearkey {
 
 using ::android::hardware::hidl_array;
@@ -80,9 +80,9 @@
 }
 
 }  // namespace clearkey
-}  // namespace V1_2
+}  // namespace V1_4
 }  // namespace drm
 }  // namespace hardware
 }  // namespace android
 
-#endif // CLEARKEY_ANDROID_HARDWARE_DRM_V1_1_TYPECONVERT
+#endif // CLEARKEY_ANDROID_HARDWARE_DRM_V1_4_TYPECONVERT
diff --git a/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.4-service.clearkey.xml b/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.4-service.clearkey.xml
new file mode 100644
index 0000000..31ddb5f
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.4-service.clearkey.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<manifest version="1.0" type="device">
+    <hal format="hidl">
+        <name>android.hardware.drm</name>
+        <transport>hwbinder</transport>
+        <fqname>@1.4::ICryptoFactory/clearkey</fqname>
+        <fqname>@1.4::IDrmFactory/clearkey</fqname>
+    </hal>
+</manifest>
diff --git a/drm/mediadrm/plugins/clearkey/hidl/service.cpp b/drm/mediadrm/plugins/clearkey/hidl/service.cpp
index b62baae..d3d6905 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/service.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/service.cpp
@@ -25,10 +25,10 @@
 using ::android::hardware::joinRpcThreadpool;
 using ::android::sp;
 
-using android::hardware::drm::V1_3::ICryptoFactory;
-using android::hardware::drm::V1_3::IDrmFactory;
-using android::hardware::drm::V1_3::clearkey::CryptoFactory;
-using android::hardware::drm::V1_3::clearkey::DrmFactory;
+using android::hardware::drm::V1_4::ICryptoFactory;
+using android::hardware::drm::V1_4::IDrmFactory;
+using android::hardware::drm::V1_4::clearkey::CryptoFactory;
+using android::hardware::drm::V1_4::clearkey::DrmFactory;
 
 int main(int /* argc */, char** /* argv */) {
     sp<IDrmFactory> drmFactory = new DrmFactory;
diff --git a/drm/mediadrm/plugins/clearkey/hidl/serviceLazy.cpp b/drm/mediadrm/plugins/clearkey/hidl/serviceLazy.cpp
index b3e0179..358b5cc 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/serviceLazy.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/serviceLazy.cpp
@@ -25,10 +25,10 @@
 using ::android::hardware::joinRpcThreadpool;
 using ::android::sp;
 
-using android::hardware::drm::V1_3::ICryptoFactory;
-using android::hardware::drm::V1_3::IDrmFactory;
-using android::hardware::drm::V1_3::clearkey::CryptoFactory;
-using android::hardware::drm::V1_3::clearkey::DrmFactory;
+using android::hardware::drm::V1_4::ICryptoFactory;
+using android::hardware::drm::V1_4::IDrmFactory;
+using android::hardware::drm::V1_4::clearkey::CryptoFactory;
+using android::hardware::drm::V1_4::clearkey::DrmFactory;
 using android::hardware::LazyServiceRegistrar;
 
 int main(int /* argc */, char** /* argv */) {
diff --git a/drm/mediadrm/plugins/mock/Android.bp b/drm/mediadrm/plugins/mock/Android.bp
index dd2ad7b..20dfb4a 100644
--- a/drm/mediadrm/plugins/mock/Android.bp
+++ b/drm/mediadrm/plugins/mock/Android.bp
@@ -14,6 +14,15 @@
 // limitations under the License.
 //
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_shared {
     name: "libmockdrmcryptoplugin",
 
diff --git a/include/drm/DrmManagerClient.h b/include/drm/DrmManagerClient.h
index 866edac..a38aa9b 100644
--- a/include/drm/DrmManagerClient.h
+++ b/include/drm/DrmManagerClient.h
@@ -318,7 +318,7 @@
 
     /**
      * Removes all the rights information of each plug-in associated with
-     * DRM framework. Will be used in master reset
+     * DRM framework.
      *
      * @return status_t
      *     Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure
diff --git a/include/drm/TEST_MAPPING b/include/drm/TEST_MAPPING
index 28e432e..74fa50d 100644
--- a/include/drm/TEST_MAPPING
+++ b/include/drm/TEST_MAPPING
@@ -8,17 +8,9 @@
         },
         {
           "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
-        }
-      ]
-    },
-    {
-      "name": "GtsExoPlayerTestCases",
-      "options" : [
-        {
-          "include-annotation": "android.platform.test.annotations.SocPresubmit"
         },
         {
-          "include-filter": "com.google.android.exoplayer.gts.DashTest#testWidevine23FpsH264Fixed"
+          "include-filter": "com.google.android.media.gts.WidevineH264PlaybackTests"
         }
       ]
     }
diff --git a/include/media/Interpolator.h b/include/media/Interpolator.h
index 703cf77..71e7604 100644
--- a/include/media/Interpolator.h
+++ b/include/media/Interpolator.h
@@ -21,6 +21,7 @@
 #include <sstream>
 #include <unordered_map>
 
+#include <android/media/InterpolatorConfig.h>
 #include <binder/Parcel.h>
 #include <utils/RefBase.h>
 
@@ -39,17 +40,10 @@
 class Interpolator : public std::map<S, T> {
 public:
     // Polynomial spline interpolators
-    // Extend only at the end of enum, as this must match order in VolumeShapers.java.
-    enum InterpolatorType : int32_t {
-        INTERPOLATOR_TYPE_STEP,   // Not continuous
-        INTERPOLATOR_TYPE_LINEAR, // C0
-        INTERPOLATOR_TYPE_CUBIC,  // C1
-        INTERPOLATOR_TYPE_CUBIC_MONOTONIC, // C1 (to provide locally monotonic curves)
-        // INTERPOLATOR_TYPE_CUBIC_C2, // TODO - requires global computation / cache
-    };
+    using InterpolatorType  = media::InterpolatorType;
 
     explicit Interpolator(
-            InterpolatorType interpolatorType = INTERPOLATOR_TYPE_LINEAR,
+            InterpolatorType interpolatorType = InterpolatorType::CUBIC,
             bool cache = true)
         : mCache(cache)
         , mFirstSlope(0)
@@ -82,13 +76,13 @@
 
         // now that we have two adjacent points:
         switch (mInterpolatorType) {
-        case INTERPOLATOR_TYPE_STEP:
+        case InterpolatorType::STEP:
             return high->first == x ? high->second : low->second;
-        case INTERPOLATOR_TYPE_LINEAR:
+        case InterpolatorType::LINEAR:
             return ((high->first - x) * low->second + (x - low->first) * high->second)
                     / (high->first - low->first);
-        case INTERPOLATOR_TYPE_CUBIC:
-        case INTERPOLATOR_TYPE_CUBIC_MONOTONIC:
+        case InterpolatorType::CUBIC:
+        case InterpolatorType::CUBIC_MONOTONIC:
         default: {
             // See https://en.wikipedia.org/wiki/Cubic_Hermite_spline
 
@@ -116,7 +110,7 @@
             // non catmullRom (finite difference) with regular cubic;
             // the choices here minimize computation.
             bool monotonic, catmullRom;
-            if (mInterpolatorType == INTERPOLATOR_TYPE_CUBIC_MONOTONIC) {
+            if (mInterpolatorType == InterpolatorType::CUBIC_MONOTONIC) {
                 monotonic = true;
                 catmullRom = false;
             } else {
@@ -202,11 +196,11 @@
 
     status_t setInterpolatorType(InterpolatorType interpolatorType) {
         switch (interpolatorType) {
-        case INTERPOLATOR_TYPE_STEP:   // Not continuous
-        case INTERPOLATOR_TYPE_LINEAR: // C0
-        case INTERPOLATOR_TYPE_CUBIC:  // C1
-        case INTERPOLATOR_TYPE_CUBIC_MONOTONIC: // C1 + other constraints
-        // case INTERPOLATOR_TYPE_CUBIC_C2:
+        case InterpolatorType::STEP:   // Not continuous
+        case InterpolatorType::LINEAR: // C0
+        case InterpolatorType::CUBIC:  // C1
+        case InterpolatorType::CUBIC_MONOTONIC: // C1 + other constraints
+        // case InterpolatorType::CUBIC_C2:
             mInterpolatorType = interpolatorType;
             return NO_ERROR;
         default:
@@ -235,49 +229,50 @@
         mMemo.clear();
     }
 
+    // TODO(ytai): remove this method once it is not used.
     status_t writeToParcel(Parcel *parcel) const {
-        if (parcel == nullptr) {
-            return BAD_VALUE;
-        }
-        status_t res = parcel->writeInt32(mInterpolatorType)
-                ?: parcel->writeFloat(mFirstSlope)
-                ?: parcel->writeFloat(mLastSlope)
-                ?: parcel->writeUint32((uint32_t)this->size()); // silent truncation
-        if (res != NO_ERROR) {
-            return res;
-        }
-        for (const auto &pt : *this) {
-            res = parcel->writeFloat(pt.first)
-                    ?: parcel->writeFloat(pt.second);
-            if (res != NO_ERROR) {
-                return res;
-            }
-        }
-        return NO_ERROR;
+        media::InterpolatorConfig config;
+        writeToConfig(&config);
+        return config.writeToParcel(parcel);
     }
 
+    void writeToConfig(media::InterpolatorConfig *config) const {
+        config->type = mInterpolatorType;
+        config->firstSlope = mFirstSlope;
+        config->lastSlope = mLastSlope;
+        for (const auto &pt : *this) {
+            config->xy.push_back(pt.first);
+            config->xy.push_back(pt.second);
+        }
+    }
+
+    // TODO(ytai): remove this method once it is not used.
     status_t readFromParcel(const Parcel &parcel) {
-        this->clear();
-        int32_t type;
-        uint32_t size;
-        status_t res = parcel.readInt32(&type)
-                        ?: parcel.readFloat(&mFirstSlope)
-                        ?: parcel.readFloat(&mLastSlope)
-                        ?: parcel.readUint32(&size)
-                        ?: setInterpolatorType((InterpolatorType)type);
+        media::InterpolatorConfig config;
+        status_t res = config.readFromParcel(&parcel);
         if (res != NO_ERROR) {
             return res;
         }
+        return readFromConfig(config);
+    }
+
+    status_t readFromConfig(const media::InterpolatorConfig &config) {
+        this->clear();
+        setInterpolatorType(config.type);
+        if ((config.xy.size() & 1) != 0) {
+            // xy size must be even.
+            return BAD_VALUE;
+        }
+        uint32_t size = config.xy.size() / 2;
+        mFirstSlope = config.firstSlope;
+        mLastSlope = config.lastSlope;
+
         // Note: We don't need to check size is within some bounds as
         // the Parcel read will fail if size is incorrectly specified too large.
         float lastx;
         for (uint32_t i = 0; i < size; ++i) {
-            float x, y;
-            res = parcel.readFloat(&x)
-                    ?: parcel.readFloat(&y);
-            if (res != NO_ERROR) {
-                return res;
-            }
+            float x = config.xy[i * 2];
+            float y = config.xy[i * 2 + 1];
             if ((i > 0 && !(x > lastx)) /* handle nan */
                     || y != y /* handle nan */) {
                 // This is a std::map object which imposes sorted order
diff --git a/include/media/MicrophoneInfo.h b/include/media/MicrophoneInfo.h
index 2287aca..a5045b9 100644
--- a/include/media/MicrophoneInfo.h
+++ b/include/media/MicrophoneInfo.h
@@ -17,33 +17,24 @@
 #ifndef ANDROID_MICROPHONE_INFO_H
 #define ANDROID_MICROPHONE_INFO_H
 
+#include <android/media/MicrophoneInfoData.h>
 #include <binder/Parcel.h>
 #include <binder/Parcelable.h>
+#include <media/AidlConversionUtil.h>
 #include <system/audio.h>
-#include <utils/String16.h>
-#include <utils/Vector.h>
 
 namespace android {
 namespace media {
 
-#define RETURN_IF_FAILED(calledOnce)                                     \
-    {                                                                    \
-        status_t returnStatus = calledOnce;                              \
-        if (returnStatus) {                                              \
-            ALOGE("Failed at %s:%d (%s)", __FILE__, __LINE__, __func__); \
-            return returnStatus;                                         \
-         }                                                               \
-    }
-
 class MicrophoneInfo : public Parcelable {
 public:
     MicrophoneInfo() = default;
     MicrophoneInfo(const MicrophoneInfo& microphoneInfo) = default;
     MicrophoneInfo(audio_microphone_characteristic_t& characteristic) {
-        mDeviceId = String16(&characteristic.device_id[0]);
+        mDeviceId = std::string(&characteristic.device_id[0]);
         mPortId = characteristic.id;
         mType = characteristic.device;
-        mAddress = String16(&characteristic.address[0]);
+        mAddress = std::string(&characteristic.address[0]);
         mDeviceLocation = characteristic.location;
         mDeviceGroup = characteristic.group;
         mIndexInTheGroup = characteristic.index_in_the_group;
@@ -53,8 +44,8 @@
         mOrientation.push_back(characteristic.orientation.x);
         mOrientation.push_back(characteristic.orientation.y);
         mOrientation.push_back(characteristic.orientation.z);
-        Vector<float> frequencies;
-        Vector<float> responses;
+        std::vector<float> frequencies;
+        std::vector<float> responses;
         for (size_t i = 0; i < characteristic.num_frequency_responses; i++) {
             frequencies.push_back(characteristic.frequency_responses[0][i]);
             responses.push_back(characteristic.frequency_responses[1][i]);
@@ -73,76 +64,73 @@
     virtual ~MicrophoneInfo() = default;
 
     virtual status_t writeToParcel(Parcel* parcel) const {
-        RETURN_IF_FAILED(parcel->writeString16(mDeviceId));
-        RETURN_IF_FAILED(parcel->writeInt32(mPortId));
-        RETURN_IF_FAILED(parcel->writeUint32(mType));
-        RETURN_IF_FAILED(parcel->writeString16(mAddress));
-        RETURN_IF_FAILED(parcel->writeInt32(mDeviceLocation));
-        RETURN_IF_FAILED(parcel->writeInt32(mDeviceGroup));
-        RETURN_IF_FAILED(parcel->writeInt32(mIndexInTheGroup));
-        RETURN_IF_FAILED(writeFloatVector(parcel, mGeometricLocation));
-        RETURN_IF_FAILED(writeFloatVector(parcel, mOrientation));
+        MicrophoneInfoData parcelable;
+        return writeToParcelable(&parcelable)
+               ?: parcelable.writeToParcel(parcel);
+    }
+
+    virtual status_t writeToParcelable(MicrophoneInfoData* parcelable) const {
+        parcelable->deviceId = mDeviceId;
+        parcelable->portId = mPortId;
+        parcelable->type = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(mType));
+        parcelable->address = mAddress;
+        parcelable->deviceGroup = mDeviceGroup;
+        parcelable->indexInTheGroup = mIndexInTheGroup;
+        parcelable->geometricLocation = mGeometricLocation;
+        parcelable->orientation = mOrientation;
         if (mFrequencyResponses.size() != 2) {
             return BAD_VALUE;
         }
-        for (size_t i = 0; i < mFrequencyResponses.size(); i++) {
-            RETURN_IF_FAILED(parcel->writeInt32(mFrequencyResponses[i].size()));
-            RETURN_IF_FAILED(writeFloatVector(parcel, mFrequencyResponses[i]));
-        }
-        std::vector<int> channelMapping;
-        for (size_t i = 0; i < mChannelMapping.size(); ++i) {
-            channelMapping.push_back(mChannelMapping[i]);
-        }
-        RETURN_IF_FAILED(parcel->writeInt32Vector(channelMapping));
-        RETURN_IF_FAILED(parcel->writeFloat(mSensitivity));
-        RETURN_IF_FAILED(parcel->writeFloat(mMaxSpl));
-        RETURN_IF_FAILED(parcel->writeFloat(mMinSpl));
-        RETURN_IF_FAILED(parcel->writeInt32(mDirectionality));
+        parcelable->frequencies = mFrequencyResponses[0];
+        parcelable->frequencyResponses = mFrequencyResponses[1];
+        parcelable->channelMapping = mChannelMapping;
+        parcelable->sensitivity = mSensitivity;
+        parcelable->maxSpl = mMaxSpl;
+        parcelable->minSpl = mMinSpl;
+        parcelable->directionality = mDirectionality;
         return OK;
     }
 
     virtual status_t readFromParcel(const Parcel* parcel) {
-        RETURN_IF_FAILED(parcel->readString16(&mDeviceId));
-        RETURN_IF_FAILED(parcel->readInt32(&mPortId));
-        RETURN_IF_FAILED(parcel->readUint32(&mType));
-        RETURN_IF_FAILED(parcel->readString16(&mAddress));
-        RETURN_IF_FAILED(parcel->readInt32(&mDeviceLocation));
-        RETURN_IF_FAILED(parcel->readInt32(&mDeviceGroup));
-        RETURN_IF_FAILED(parcel->readInt32(&mIndexInTheGroup));
-        RETURN_IF_FAILED(readFloatVector(parcel, &mGeometricLocation, 3));
-        RETURN_IF_FAILED(readFloatVector(parcel, &mOrientation, 3));
-        int32_t frequenciesNum;
-        RETURN_IF_FAILED(parcel->readInt32(&frequenciesNum));
-        Vector<float> frequencies;
-        RETURN_IF_FAILED(readFloatVector(parcel, &frequencies, frequenciesNum));
-        int32_t responsesNum;
-        RETURN_IF_FAILED(parcel->readInt32(&responsesNum));
-        Vector<float> responses;
-        RETURN_IF_FAILED(readFloatVector(parcel, &responses, responsesNum));
-        if (frequencies.size() != responses.size()) {
+        MicrophoneInfoData data;
+        return data.readFromParcel(parcel)
+            ?: readFromParcelable(data);
+    }
+
+    virtual status_t readFromParcelable(const MicrophoneInfoData& parcelable) {
+        mDeviceId = parcelable.deviceId;
+        mPortId = parcelable.portId;
+        mType = VALUE_OR_RETURN_STATUS(convertReinterpret<uint32_t>(parcelable.type));
+        mAddress = parcelable.address;
+        mDeviceLocation = parcelable.deviceLocation;
+        mDeviceGroup = parcelable.deviceGroup;
+        mIndexInTheGroup = parcelable.indexInTheGroup;
+        if (parcelable.geometricLocation.size() != 3) {
             return BAD_VALUE;
         }
-        mFrequencyResponses.push_back(frequencies);
-        mFrequencyResponses.push_back(responses);
-        std::vector<int> channelMapping;
-        status_t result = parcel->readInt32Vector(&channelMapping);
-        if (result != OK) {
-            return result;
-        }
-        if (channelMapping.size() != AUDIO_CHANNEL_COUNT_MAX) {
+        mGeometricLocation = parcelable.geometricLocation;
+        if (parcelable.orientation.size() != 3) {
             return BAD_VALUE;
         }
-        for (size_t i = 0; i < channelMapping.size(); i++) {
-            mChannelMapping.push_back(channelMapping[i]);
+        mOrientation = parcelable.orientation;
+        if (parcelable.frequencies.size() != parcelable.frequencyResponses.size()) {
+            return BAD_VALUE;
         }
-        RETURN_IF_FAILED(parcel->readFloat(&mSensitivity));
-        RETURN_IF_FAILED(parcel->readFloat(&mMaxSpl));
-        RETURN_IF_FAILED(parcel->readFloat(&mMinSpl));
-        RETURN_IF_FAILED(parcel->readInt32(&mDirectionality));
+
+        mFrequencyResponses.push_back(parcelable.frequencies);
+        mFrequencyResponses.push_back(parcelable.frequencyResponses);
+        if (parcelable.channelMapping.size() != AUDIO_CHANNEL_COUNT_MAX) {
+            return BAD_VALUE;
+        }
+        mChannelMapping = parcelable.channelMapping;
+        mSensitivity = parcelable.sensitivity;
+        mMaxSpl = parcelable.maxSpl;
+        mMinSpl = parcelable.minSpl;
+        mDirectionality = parcelable.directionality;
         return OK;
     }
 
-    String16 getDeviceId() const {
+    std::string getDeviceId() const {
         return mDeviceId;
     }
 
@@ -154,7 +142,7 @@
         return mType;
     }
 
-    String16 getAddress() const {
+    std::string getAddress() const {
         return mAddress;
     }
 
@@ -170,19 +158,19 @@
         return mIndexInTheGroup;
     }
 
-    const Vector<float>& getGeometricLocation() const {
+    const std::vector<float>& getGeometricLocation() const {
         return mGeometricLocation;
     }
 
-    const Vector<float>& getOrientation() const {
+    const std::vector<float>& getOrientation() const {
         return mOrientation;
     }
 
-    const Vector<Vector<float>>& getFrequencyResponses() const {
+    const std::vector<std::vector<float>>& getFrequencyResponses() const {
         return mFrequencyResponses;
     }
 
-    const Vector<int>& getChannelMapping() const {
+    const std::vector<int>& getChannelMapping() const {
         return mChannelMapping;
     }
 
@@ -203,46 +191,38 @@
     }
 
 private:
-    status_t readFloatVector(
-            const Parcel* parcel, Vector<float> *vectorPtr, size_t defaultLength) {
-        std::unique_ptr<std::vector<float>> v;
-        status_t result = parcel->readFloatVector(&v);
-        if (result != OK) return result;
-        vectorPtr->clear();
-        if (v.get() != nullptr) {
-            for (const auto& iter : *v) {
-                vectorPtr->push_back(iter);
-            }
-        } else {
-            vectorPtr->resize(defaultLength);
-        }
-        return OK;
-    }
-    status_t writeFloatVector(Parcel* parcel, const Vector<float>& vector) const {
-        std::vector<float> v;
-        for (size_t i = 0; i < vector.size(); i++) {
-            v.push_back(vector[i]);
-        }
-        return parcel->writeFloatVector(v);
-    }
-
-    String16 mDeviceId;
+    std::string mDeviceId;
     int32_t mPortId;
     uint32_t mType;
-    String16 mAddress;
+    std::string mAddress;
     int32_t mDeviceLocation;
     int32_t mDeviceGroup;
     int32_t mIndexInTheGroup;
-    Vector<float> mGeometricLocation;
-    Vector<float> mOrientation;
-    Vector<Vector<float>> mFrequencyResponses;
-    Vector<int> mChannelMapping;
+    std::vector<float> mGeometricLocation;
+    std::vector<float> mOrientation;
+    std::vector<std::vector<float>> mFrequencyResponses;
+    std::vector<int> mChannelMapping;
     float mSensitivity;
     float mMaxSpl;
     float mMinSpl;
     int32_t mDirectionality;
 };
 
+// Conversion routines, according to AidlConversion.h conventions.
+inline ConversionResult<MicrophoneInfo>
+aidl2legacy_MicrophoneInfo(const media::MicrophoneInfoData& aidl) {
+    MicrophoneInfo legacy;
+    RETURN_IF_ERROR(legacy.readFromParcelable(aidl));
+    return legacy;
+}
+
+inline ConversionResult<media::MicrophoneInfoData>
+legacy2aidl_MicrophoneInfo(const MicrophoneInfo& legacy) {
+    media::MicrophoneInfoData aidl;
+    RETURN_IF_ERROR(legacy.writeToParcelable(&aidl));
+    return aidl;
+}
+
 } // namespace media
 } // namespace android
 
diff --git a/include/media/MmapStreamInterface.h b/include/media/MmapStreamInterface.h
index b3bf16d..61de987 100644
--- a/include/media/MmapStreamInterface.h
+++ b/include/media/MmapStreamInterface.h
@@ -22,6 +22,8 @@
 #include <utils/Errors.h>
 #include <utils/RefBase.h>
 
+#include <time.h>
+
 namespace android {
 
 class MmapStreamCallback;
@@ -103,6 +105,19 @@
     virtual status_t getMmapPosition(struct audio_mmap_position *position) = 0;
 
     /**
+     * Get a recent count of the number of audio frames presented/received to/from an
+     * external observer.
+     *
+     * \param[out] position count of presented audio frames
+     * \param[out] timeNanos associated clock time
+     *
+     * \return OK if the external position is set correctly.
+     *         NO_INIT in case of initialization error
+     *         INVALID_OPERATION if the interface is not implemented
+     */
+    virtual status_t getExternalPosition(uint64_t* position, int64_t* timeNanos) = 0;
+
+    /**
      * Start a stream operating in mmap mode.
      * createMmapBuffer() must be called before calling start()
      *
diff --git a/include/media/VolumeShaper.h b/include/media/VolumeShaper.h
index fe519bb..5271e10 100644
--- a/include/media/VolumeShaper.h
+++ b/include/media/VolumeShaper.h
@@ -22,6 +22,11 @@
 #include <math.h>
 #include <sstream>
 
+#include <android/media/VolumeShaperConfiguration.h>
+#include <android/media/VolumeShaperConfigurationOptionFlag.h>
+#include <android/media/VolumeShaperOperation.h>
+#include <android/media/VolumeShaperOperationFlag.h>
+#include <android/media/VolumeShaperState.h>
 #include <binder/Parcel.h>
 #include <media/Interpolator.h>
 #include <utils/Mutex.h>
@@ -284,30 +289,41 @@
             clampVolume();
         }
 
-        // The parcel layout must match VolumeShaper.java
         status_t writeToParcel(Parcel *parcel) const override {
-            if (parcel == nullptr) return BAD_VALUE;
-            return parcel->writeInt32((int32_t)mType)
-                    ?: parcel->writeInt32(mId)
-                    ?: mType == TYPE_ID
-                        ? NO_ERROR
-                        : parcel->writeInt32((int32_t)mOptionFlags)
-                            ?: parcel->writeDouble(mDurationMs)
-                            ?: Interpolator<S, T>::writeToParcel(parcel);
+            VolumeShaperConfiguration parcelable;
+            writeToParcelable(&parcelable);
+            return parcelable.writeToParcel(parcel);
         }
 
-        status_t readFromParcel(const Parcel *parcel) override {
-            int32_t type, optionFlags;
-            return parcel->readInt32(&type)
-                    ?: setType((Type)type)
-                    ?: parcel->readInt32(&mId)
-                    ?: mType == TYPE_ID
-                        ? NO_ERROR
-                        : parcel->readInt32(&optionFlags)
-                            ?: setOptionFlags((OptionFlag)optionFlags)
-                            ?: parcel->readDouble(&mDurationMs)
-                            ?: Interpolator<S, T>::readFromParcel(*parcel)
-                            ?: checkCurve();
+        void writeToParcelable(VolumeShaperConfiguration *parcelable) const {
+            parcelable->id = getId();
+            parcelable->type = getTypeAsAidl();
+            parcelable->optionFlags = 0;
+            if (mType != TYPE_ID) {
+                parcelable->optionFlags = getOptionFlagsAsAidl();
+                parcelable->durationMs = getDurationMs();
+                parcelable->interpolatorConfig.emplace(); // create value in std::optional
+                Interpolator<S, T>::writeToConfig(&*parcelable->interpolatorConfig);
+            }
+        }
+
+        status_t readFromParcel(const Parcel* parcel) override {
+            VolumeShaperConfiguration data;
+            return data.readFromParcel(parcel)
+                   ?: readFromParcelable(data);
+        }
+
+        status_t readFromParcelable(const VolumeShaperConfiguration& parcelable) {
+            setId(parcelable.id);
+            return setTypeFromAidl(parcelable.type)
+                   ?: mType == TYPE_ID
+                      ? NO_ERROR
+                      : setOptionFlagsFromAidl(parcelable.optionFlags)
+                        ?: setDurationMs(parcelable.durationMs)
+                           ?: !parcelable.interpolatorConfig  // check std::optional for value
+                               ? BAD_VALUE // must be nonnull.
+                               : Interpolator<S, T>::readFromConfig(*parcelable.interpolatorConfig)
+                                   ?: checkCurve();
         }
 
         // Returns a string for debug printing.
@@ -329,6 +345,51 @@
         int32_t mId;             // A valid id is >= 0.
         OptionFlag mOptionFlags; // option flags for the configuration.
         double mDurationMs;      // duration, must be > 0; default is 1000 ms.
+
+        int32_t getOptionFlagsAsAidl() const {
+            int32_t result = 0;
+            if (getOptionFlags() & OPTION_FLAG_VOLUME_IN_DBFS) {
+                result |=
+                        1 << static_cast<int>(VolumeShaperConfigurationOptionFlag::VOLUME_IN_DBFS);
+            }
+            if (getOptionFlags() & OPTION_FLAG_CLOCK_TIME) {
+                result |= 1 << static_cast<int>(VolumeShaperConfigurationOptionFlag::CLOCK_TIME);
+            }
+            return result;
+        }
+
+        status_t setOptionFlagsFromAidl(int32_t aidl) {
+            std::underlying_type_t<OptionFlag> options = 0;
+            if (aidl & (1 << static_cast<int>(VolumeShaperConfigurationOptionFlag::VOLUME_IN_DBFS))) {
+                options |= OPTION_FLAG_VOLUME_IN_DBFS;
+            }
+            if (aidl & (1 << static_cast<int>(VolumeShaperConfigurationOptionFlag::CLOCK_TIME))) {
+                options |= OPTION_FLAG_CLOCK_TIME;
+            }
+            return setOptionFlags(static_cast<OptionFlag>(options));
+        }
+
+        status_t setTypeFromAidl(VolumeShaperConfigurationType aidl) {
+            switch (aidl) {
+                case VolumeShaperConfigurationType::ID:
+                    return setType(TYPE_ID);
+                case VolumeShaperConfigurationType::SCALE:
+                    return setType(TYPE_SCALE);
+                default:
+                    return BAD_VALUE;
+            }
+        }
+
+        VolumeShaperConfigurationType getTypeAsAidl() const {
+            switch (getType()) {
+                case TYPE_ID:
+                    return VolumeShaperConfigurationType::ID;
+                case TYPE_SCALE:
+                    return VolumeShaperConfigurationType::SCALE;
+                default:
+                    LOG_ALWAYS_FATAL("Shouldn't get here");
+            }
+        }
     }; // Configuration
 
     /* VolumeShaper::Operation expresses an operation to perform on the
@@ -420,19 +481,29 @@
             return NO_ERROR;
         }
 
-        status_t writeToParcel(Parcel *parcel) const override {
+        status_t writeToParcel(Parcel* parcel) const override {
             if (parcel == nullptr) return BAD_VALUE;
-            return parcel->writeInt32((int32_t)mFlags)
-                    ?: parcel->writeInt32(mReplaceId)
-                    ?: parcel->writeFloat(mXOffset);
+            VolumeShaperOperation op;
+            writeToParcelable(&op);
+            return op.writeToParcel(parcel);
         }
 
-        status_t readFromParcel(const Parcel *parcel) override {
-            int32_t flags;
-            return parcel->readInt32(&flags)
-                    ?: parcel->readInt32(&mReplaceId)
-                    ?: parcel->readFloat(&mXOffset)
-                    ?: setFlags((Flag)flags);
+        void writeToParcelable(VolumeShaperOperation* op) const {
+            op->flags = getFlagsAsAidl();
+            op->replaceId = mReplaceId;
+            op->xOffset = mXOffset;
+        }
+
+        status_t readFromParcel(const Parcel* parcel) override {
+            VolumeShaperOperation op;
+            return op.readFromParcel(parcel)
+                   ?: readFromParcelable(op);
+        }
+
+        status_t readFromParcelable(const VolumeShaperOperation& op) {
+            mReplaceId = op.replaceId;
+            mXOffset = op.xOffset;
+            return setFlagsFromAidl(op.flags);
         }
 
         std::string toString() const {
@@ -445,6 +516,48 @@
         }
 
     private:
+        status_t setFlagsFromAidl(int32_t aidl) {
+            std::underlying_type_t<Flag> flags = 0;
+            if (aidl & (1 << static_cast<int>(VolumeShaperOperationFlag::REVERSE))) {
+                flags |= FLAG_REVERSE;
+            }
+            if (aidl & (1 << static_cast<int>(VolumeShaperOperationFlag::TERMINATE))) {
+                flags |= FLAG_TERMINATE;
+            }
+            if (aidl & (1 << static_cast<int>(VolumeShaperOperationFlag::JOIN))) {
+                flags |= FLAG_JOIN;
+            }
+            if (aidl & (1 << static_cast<int>(VolumeShaperOperationFlag::DELAY))) {
+                flags |= FLAG_DELAY;
+            }
+            if (aidl & (1 << static_cast<int>(VolumeShaperOperationFlag::CREATE_IF_NECESSARY))) {
+                flags |= FLAG_CREATE_IF_NECESSARY;
+            }
+            return setFlags(static_cast<Flag>(flags));
+        }
+
+        int32_t getFlagsAsAidl() const {
+            int32_t aidl = 0;
+            std::underlying_type_t<Flag> flags = getFlags();
+            if (flags & FLAG_REVERSE) {
+                aidl |= (1 << static_cast<int>(VolumeShaperOperationFlag::REVERSE));
+            }
+            if (flags & FLAG_TERMINATE) {
+                aidl |= (1 << static_cast<int>(VolumeShaperOperationFlag::TERMINATE));
+            }
+            if (flags & FLAG_JOIN) {
+                aidl |= (1 << static_cast<int>(VolumeShaperOperationFlag::JOIN));
+            }
+            if (flags & FLAG_DELAY) {
+                aidl |= (1 << static_cast<int>(VolumeShaperOperationFlag::DELAY));
+            }
+            if (flags & FLAG_CREATE_IF_NECESSARY) {
+                aidl |= (1 << static_cast<int>(VolumeShaperOperationFlag::CREATE_IF_NECESSARY));
+            }
+            return aidl;
+        }
+
+    private:
         Flag mFlags;        // operation to do
         int32_t mReplaceId; // if >= 0 the id to remove in a replace operation.
         S mXOffset;         // position in the curve to set if a valid number (not nan)
@@ -483,15 +596,28 @@
             mXOffset = xOffset;
         }
 
-        status_t writeToParcel(Parcel *parcel) const override {
+        status_t writeToParcel(Parcel* parcel) const override {
             if (parcel == nullptr) return BAD_VALUE;
-            return parcel->writeFloat(mVolume)
-                    ?: parcel->writeFloat(mXOffset);
+            VolumeShaperState state;
+            writeToParcelable(&state);
+            return state.writeToParcel(parcel);
         }
 
-        status_t readFromParcel(const Parcel *parcel) override {
-            return parcel->readFloat(&mVolume)
-                     ?: parcel->readFloat(&mXOffset);
+        void writeToParcelable(VolumeShaperState* parcelable) const {
+            parcelable->volume = mVolume;
+            parcelable->xOffset = mXOffset;
+        }
+
+        status_t readFromParcel(const Parcel* parcel) override {
+            VolumeShaperState state;
+            return state.readFromParcel(parcel)
+                   ?: readFromParcelable(state);
+        }
+
+        status_t readFromParcelable(const VolumeShaperState& parcelable) {
+            mVolume = parcelable.volume;
+            mXOffset = parcelable.xOffset;
+            return OK;
         }
 
         std::string toString() const {
diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h
index 9cabd8b..200e92d 100644
--- a/include/private/media/AudioTrackShared.h
+++ b/include/private/media/AudioTrackShared.h
@@ -182,6 +182,7 @@
                 // This is set by AudioTrack.setBufferSizeInFrames().
                 // A write will not fill the buffer above this limit.
     volatile    uint32_t   mBufferSizeInFrames;  // effective size of the buffer
+    volatile    uint32_t   mStartThresholdInFrames; // min frames in buffer to start streaming
 
 public:
 
@@ -216,6 +217,8 @@
     };
 
     size_t frameCount() const { return mFrameCount; }
+    uint32_t getStartThresholdInFrames() const;
+    uint32_t setStartThresholdInFrames(uint32_t startThresholdInFrames);
 
 protected:
     // These refer to shared memory, and are virtual addresses with respect to the current process.
diff --git a/media/OWNERS b/media/OWNERS
index 1afc253..3e194f0 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -8,13 +8,14 @@
 hunga@google.com
 jiabin@google.com
 jmtrivi@google.com
-krocard@google.com
 lajos@google.com
 marcone@google.com
 mnaganov@google.com
+nchalko@google.com
 pawin@google.com
 philburk@google.com
 pmclean@google.com
+quxiangfang@google.com
 rachad@google.com
 rago@google.com
 robertshih@google.com
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index a6dfb21..5bc7262 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -1,32 +1,65 @@
+// for frameworks/av/media
 {
-  "presubmit": [
-    {
-      "name": "GtsMediaTestCases",
-      "options" : [
+    "presubmit-large": [
+        // runs whenever we change something in this tree
         {
-          "include-annotation": "android.platform.test.annotations.Presubmit"
+            "name": "CtsMediaTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.cts.EncodeDecodeTest"
+                }
+            ]
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+            "name": "CtsMediaTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.cts.DecodeEditEncodeTest"
+                }
+            ]
         }
-      ]
-    },
-    {
-      "name": "GtsExoPlayerTestCases",
-      "options" : [
+    ],
+    "presubmit": [
         {
-          "include-annotation": "android.platform.test.annotations.SocPresubmit"
-        },
-        {
-          "include-filter": "com.google.android.exoplayer.gts.DashTest#testWidevine23FpsH264Fixed"
+            "name": "GtsMediaTestCases",
+            "options" : [
+                {
+                    "include-annotation": "android.platform.test.annotations.Presubmit"
+                },
+                {
+                    "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+                },
+                {
+                    "include-filter": "com.google.android.media.gts.WidevineH264PlaybackTests"
+                }
+            ]
         }
-      ]
-    }
-  ],
-  "imports": [
-    {
-      "path": "frameworks/av/drm/mediadrm/plugins"
-    }
-  ]
-}
+    ],
 
+    "imports": [
+        {
+            "path": "frameworks/av/drm/mediadrm/plugins"
+        }
+    ],
+
+    "platinum-postsubmit": [
+        // runs regularly, independent of changes in this tree.
+        // signals if changes elsewhere break media functionality
+        {
+            "name": "CtsMediaTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.cts.EncodeDecodeTest"
+                }
+            ]
+        },
+        {
+            "name": "CtsMediaTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.cts.DecodeEditEncodeTest"
+                }
+            ]
+        }
+    ]
+}
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
new file mode 100644
index 0000000..be25ffb
--- /dev/null
+++ b/media/audioserver/Android.bp
@@ -0,0 +1,67 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_binary {
+    name: "audioserver",
+
+    srcs: [
+        "main_audioserver.cpp",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+
+    header_libs: [
+        "libaudiohal_headers",
+        "libmediametrics_headers",
+    ],
+
+    shared_libs: [
+        "libaaudioservice",
+        "libaudioflinger",
+        "libaudiopolicyservice",
+        "libaudioprocessing",
+        "libbinder",
+        "libcutils",
+        "libhidlbase",
+        "liblog",
+        "libmedia",
+        "libmedialogservice",
+        "libmediautils",
+        "libnbaio",
+        "libnblog",
+        "libpowermanager",
+        "libutils",
+        "libvibrator",
+
+    ],
+
+    // TODO check if we still need all of these include directories
+    include_dirs: [
+        "external/sonic",
+        "frameworks/av/media/libaaudio/include",
+        "frameworks/av/media/libaaudio/src",
+        "frameworks/av/media/libaaudio/src/binding",
+        "frameworks/av/media/libmedia/include",
+        "frameworks/av/services/audioflinger",
+        "frameworks/av/services/audiopolicy",
+        "frameworks/av/services/audiopolicy/common/include",
+        "frameworks/av/services/audiopolicy/common/managerdefinitions/include",
+        "frameworks/av/services/audiopolicy/engine/interface",
+        "frameworks/av/services/audiopolicy/service",
+        "frameworks/av/services/medialog",
+
+        // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
+        "frameworks/av/services/oboeservice",
+    ],
+
+    init_rc: ["audioserver.rc"],
+}
diff --git a/media/audioserver/Android.mk b/media/audioserver/Android.mk
deleted file mode 100644
index cf1c14c..0000000
--- a/media/audioserver/Android.mk
+++ /dev/null
@@ -1,51 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES := \
-	main_audioserver.cpp \
-
-LOCAL_SHARED_LIBRARIES := \
-	libaaudioservice \
-	libaudioflinger \
-	libaudiopolicyservice \
-	libaudioprocessing \
-	libbinder \
-	libcutils \
-	liblog \
-	libhidlbase \
-	libmedia \
-	libmedialogservice \
-	libmediautils \
-	libnbaio \
-	libnblog \
-	libutils \
-	libvibrator
-
-LOCAL_HEADER_LIBRARIES := \
-	libaudiohal_headers \
-	libmediametrics_headers \
-
-# TODO oboeservice is the old folder name for aaudioservice. It will be changed.
-LOCAL_C_INCLUDES := \
-	frameworks/av/services/audioflinger \
-	frameworks/av/services/audiopolicy \
-	frameworks/av/services/audiopolicy/common/managerdefinitions/include \
-	frameworks/av/services/audiopolicy/common/include \
-	frameworks/av/services/audiopolicy/engine/interface \
-	frameworks/av/services/audiopolicy/service \
-	frameworks/av/services/medialog \
-	frameworks/av/services/oboeservice \
-	frameworks/av/media/libaaudio/include \
-	frameworks/av/media/libaaudio/src \
-	frameworks/av/media/libaaudio/src/binding \
-	frameworks/av/media/libmedia/include \
-	external/sonic \
-
-LOCAL_MODULE := audioserver
-
-LOCAL_INIT_RC := audioserver.rc
-
-LOCAL_CFLAGS := -Werror -Wall
-
-include $(BUILD_EXECUTABLE)
diff --git a/media/audioserver/audioserver.rc b/media/audioserver/audioserver.rc
index f05c2d2..c4a6601 100644
--- a/media/audioserver/audioserver.rc
+++ b/media/audioserver/audioserver.rc
@@ -6,8 +6,12 @@
     capabilities BLOCK_SUSPEND
     ioprio rt 4
     task_profiles ProcessCapacityHigh HighPerformance
-
-    onrestart setprop sys.audio.restart.hal 1
+    onrestart restart vendor.audio-hal
+    onrestart restart vendor.audio-hal-4-0-msd
+    onrestart restart audio_proxy_service
+    # Keep the original service names for backward compatibility
+    onrestart restart vendor.audio-hal-2-0
+    onrestart restart audio-hal-2-0
 
 on property:vts.native_server.on=1
     stop audioserver
@@ -17,6 +21,7 @@
 on property:init.svc.audioserver=stopped
     stop vendor.audio-hal
     stop vendor.audio-hal-4-0-msd
+    stop audio_proxy_service
     # Keep the original service names for backward compatibility
     stop vendor.audio-hal-2-0
     stop audio-hal-2-0
@@ -25,6 +30,7 @@
     # audioserver bringing it back into running state.
     start vendor.audio-hal
     start vendor.audio-hal-4-0-msd
+    start audio_proxy_service
     # Keep the original service names for backward compatibility
     start vendor.audio-hal-2-0
     start audio-hal-2-0
@@ -32,16 +38,24 @@
 on property:init.svc.audioserver=running
     start vendor.audio-hal
     start vendor.audio-hal-4-0-msd
+    start audio_proxy_service
     # Keep the original service names for backward compatibility
     start vendor.audio-hal-2-0
     start audio-hal-2-0
 
 on property:sys.audio.restart.hal=1
-    restart vendor.audio-hal
-    restart vendor.audio-hal-4-0-msd
+    # See b/159966243. Avoid restart loop between audioserver and HAL.
     # Keep the original service names for backward compatibility
-    restart vendor.audio-hal-2-0
-    restart audio-hal-2-0
+    stop vendor.audio-hal
+    stop vendor.audio-hal-4-0-msd
+    stop audio_proxy_service
+    stop vendor.audio-hal-2-0
+    stop audio-hal-2-0
+    start vendor.audio-hal
+    start vendor.audio-hal-4-0-msd
+    start audio_proxy_service
+    start vendor.audio-hal-2-0
+    start audio-hal-2-0
     # reset the property
     setprop sys.audio.restart.hal 0
 
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index f9f4f31..8ee1efb 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -29,8 +29,8 @@
 #include <mediautils/LimitProcessMemory.h>
 #include <utils/Log.h>
 
-// from LOCAL_C_INCLUDES
-#include "aaudio/AAudioTesting.h"
+// from include_dirs
+#include "aaudio/AAudioTesting.h" // aaudio_policy_t, AAUDIO_PROP_MMAP_POLICY, AAUDIO_POLICY_*
 #include "AudioFlinger.h"
 #include "AudioPolicyService.h"
 #include "AAudioService.h"
@@ -49,7 +49,12 @@
 
     signal(SIGPIPE, SIG_IGN);
 
+#if 1
+    // FIXME See bug 165702394 and bug 168511485
+    const bool doLog = false;
+#else
     bool doLog = (bool) property_get_bool("ro.test_harness", 0);
+#endif
 
     pid_t childPid;
     // FIXME The advantage of making the process containing media.log service the parent process of
diff --git a/media/bufferpool/1.0/Android.bp b/media/bufferpool/1.0/Android.bp
index f817c76..16cf920 100644
--- a/media/bufferpool/1.0/Android.bp
+++ b/media/bufferpool/1.0/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library {
     name: "libstagefright_bufferpool@1.0",
     vendor_available: true,
diff --git a/media/bufferpool/1.0/TEST_MAPPING b/media/bufferpool/1.0/TEST_MAPPING
new file mode 100644
index 0000000..a1e6a58
--- /dev/null
+++ b/media/bufferpool/1.0/TEST_MAPPING
@@ -0,0 +1,8 @@
+// mappings for frameworks/av/media/bufferpool/1.0
+{
+  "presubmit": [
+
+    { "name": "VtsVndkHidlBufferpoolV1_0TargetSingleTest" },
+    { "name": "VtsVndkHidlBufferpoolV1_0TargetMultiTest"}
+  ]
+}
diff --git a/media/bufferpool/1.0/vts/Android.bp b/media/bufferpool/1.0/vts/Android.bp
index ee5a757..0977ba5 100644
--- a/media/bufferpool/1.0/vts/Android.bp
+++ b/media/bufferpool/1.0/vts/Android.bp
@@ -14,8 +14,18 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "VtsVndkHidlBufferpoolV1_0TargetSingleTest",
+    test_suites: ["device-tests"],
     defaults: ["VtsHalTargetTestDefaults"],
     srcs: [
         "allocator.cpp",
@@ -34,6 +44,7 @@
 
 cc_test {
     name: "VtsVndkHidlBufferpoolV1_0TargetMultiTest",
+    test_suites: ["device-tests"],
     defaults: ["VtsHalTargetTestDefaults"],
     srcs: [
         "allocator.cpp",
diff --git a/media/bufferpool/1.0/vts/multi.cpp b/media/bufferpool/1.0/vts/multi.cpp
index 1796819..d8cc285 100644
--- a/media/bufferpool/1.0/vts/multi.cpp
+++ b/media/bufferpool/1.0/vts/multi.cpp
@@ -215,7 +215,7 @@
 }  // anonymous namespace
 
 int main(int argc, char** argv) {
-  setenv("TREBLE_TESTING_OVERRIDE", "true", true);
+  android::hardware::details::setTrebleTestingOverride(true);
   ::testing::InitGoogleTest(&argc, argv);
   int status = RUN_ALL_TESTS();
   LOG(INFO) << "Test result = " << status;
diff --git a/media/bufferpool/2.0/Android.bp b/media/bufferpool/2.0/Android.bp
index 536f75e..0d1fe27 100644
--- a/media/bufferpool/2.0/Android.bp
+++ b/media/bufferpool/2.0/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_defaults {
     name: "libstagefright_bufferpool@2.0-default",
     srcs: [
@@ -49,4 +58,3 @@
         enabled: true,
     },
 }
-
diff --git a/media/bufferpool/2.0/BufferPoolClient.cpp b/media/bufferpool/2.0/BufferPoolClient.cpp
index 9308b81..cda23ff 100644
--- a/media/bufferpool/2.0/BufferPoolClient.cpp
+++ b/media/bufferpool/2.0/BufferPoolClient.cpp
@@ -29,7 +29,7 @@
 namespace V2_0 {
 namespace implementation {
 
-static constexpr int64_t kReceiveTimeoutUs = 1000000; // 100ms
+static constexpr int64_t kReceiveTimeoutUs = 2000000; // 2s
 static constexpr int kPostMaxRetry = 3;
 static constexpr int kCacheTtlUs = 1000000; // TODO: tune
 static constexpr size_t kMaxCachedBufferCount = 64;
diff --git a/media/bufferpool/2.0/TEST_MAPPING b/media/bufferpool/2.0/TEST_MAPPING
new file mode 100644
index 0000000..65dee2c
--- /dev/null
+++ b/media/bufferpool/2.0/TEST_MAPPING
@@ -0,0 +1,7 @@
+// mappings for frameworks/av/media/bufferpool/2.0
+{
+  "presubmit": [
+    { "name": "VtsVndkHidlBufferpoolV2_0TargetSingleTest"},
+    { "name": "VtsVndkHidlBufferpoolV2_0TargetMultiTest"}
+  ]
+}
diff --git a/media/bufferpool/2.0/tests/Android.bp b/media/bufferpool/2.0/tests/Android.bp
index 8b44f61..803a813 100644
--- a/media/bufferpool/2.0/tests/Android.bp
+++ b/media/bufferpool/2.0/tests/Android.bp
@@ -14,8 +14,18 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "VtsVndkHidlBufferpoolV2_0TargetSingleTest",
+    test_suites: ["device-tests"],
     defaults: ["VtsHalTargetTestDefaults"],
     srcs: [
         "allocator.cpp",
@@ -24,7 +34,7 @@
     static_libs: [
         "android.hardware.media.bufferpool@2.0",
         "libcutils",
-        "libstagefright_bufferpool@2.0",
+        "libstagefright_bufferpool@2.0.1",
     ],
     shared_libs: [
         "libfmq",
@@ -34,6 +44,7 @@
 
 cc_test {
     name: "VtsVndkHidlBufferpoolV2_0TargetMultiTest",
+    test_suites: ["device-tests"],
     defaults: ["VtsHalTargetTestDefaults"],
     srcs: [
         "allocator.cpp",
@@ -42,10 +53,30 @@
     static_libs: [
         "android.hardware.media.bufferpool@2.0",
         "libcutils",
-        "libstagefright_bufferpool@2.0",
+        "libstagefright_bufferpool@2.0.1",
     ],
     shared_libs: [
         "libfmq",
     ],
     compile_multilib: "both",
 }
+
+cc_test {
+    name: "VtsVndkHidlBufferpoolV2_0TargetCondTest",
+    test_suites: ["device-tests"],
+    defaults: ["VtsHalTargetTestDefaults"],
+    srcs: [
+        "allocator.cpp",
+        "cond.cpp",
+    ],
+    static_libs: [
+        "android.hardware.media.bufferpool@2.0",
+        "libcutils",
+        "libstagefright_bufferpool@2.0.1",
+    ],
+    shared_libs: [
+        "libhidlbase",
+        "libfmq",
+    ],
+    compile_multilib: "both",
+}
diff --git a/media/bufferpool/2.0/tests/allocator.cpp b/media/bufferpool/2.0/tests/allocator.cpp
index 843f7ea..25b08ef 100644
--- a/media/bufferpool/2.0/tests/allocator.cpp
+++ b/media/bufferpool/2.0/tests/allocator.cpp
@@ -120,6 +120,24 @@
 
 }
 
+void IpcMutex::init() {
+  pthread_mutexattr_t mattr;
+  pthread_mutexattr_init(&mattr);
+  pthread_mutexattr_setpshared(&mattr, PTHREAD_PROCESS_SHARED);
+  pthread_mutex_init(&lock, &mattr);
+  pthread_mutexattr_destroy(&mattr);
+
+  pthread_condattr_t cattr;
+  pthread_condattr_init(&cattr);
+  pthread_condattr_setpshared(&cattr, PTHREAD_PROCESS_SHARED);
+  pthread_cond_init(&cond, &cattr);
+  pthread_condattr_destroy(&cattr);
+}
+
+IpcMutex *IpcMutex::Import(void *pMutex) {
+  return reinterpret_cast<IpcMutex *>(pMutex);
+}
+
 
 ResultStatus TestBufferPoolAllocator::allocate(
     const std::vector<uint8_t> &params,
@@ -201,9 +219,33 @@
   return false;
 }
 
+bool TestBufferPoolAllocator::MapMemoryForMutex(const native_handle_t *handle, void **mem) {
+  if (!HandleAshmem::isValid(handle)) {
+    return false;
+  }
+  const HandleAshmem *o = static_cast<const HandleAshmem*>(handle);
+  *mem = mmap(
+      NULL, o->size(), PROT_READ|PROT_WRITE, MAP_SHARED, o->ashmemFd(), 0);
+  if (*mem == MAP_FAILED || *mem == nullptr) {
+    return false;
+  }
+  return true;
+}
+
+bool TestBufferPoolAllocator::UnmapMemoryForMutex(void *mem) {
+  munmap(mem, sizeof(IpcMutex));
+  return true;
+}
+
 void getTestAllocatorParams(std::vector<uint8_t> *params) {
   constexpr static int kAllocationSize = 1024 * 10;
   Params ashmemParams(kAllocationSize);
 
   params->assign(ashmemParams.array, ashmemParams.array + sizeof(ashmemParams));
 }
+
+void getIpcMutexParams(std::vector<uint8_t> *params) {
+  Params ashmemParams(sizeof(IpcMutex));
+
+  params->assign(ashmemParams.array, ashmemParams.array + sizeof(ashmemParams));
+}
diff --git a/media/bufferpool/2.0/tests/allocator.h b/media/bufferpool/2.0/tests/allocator.h
index 5281dc3..862d1a5 100644
--- a/media/bufferpool/2.0/tests/allocator.h
+++ b/media/bufferpool/2.0/tests/allocator.h
@@ -17,6 +17,7 @@
 #ifndef VNDK_HIDL_BUFFERPOOL_V2_0_ALLOCATOR_H
 #define VNDK_HIDL_BUFFERPOOL_V2_0_ALLOCATOR_H
 
+#include <pthread.h>
 #include <bufferpool/BufferPoolTypes.h>
 
 using android::hardware::media::bufferpool::V2_0::ResultStatus;
@@ -25,6 +26,17 @@
 using android::hardware::media::bufferpool::V2_0::implementation::
     BufferPoolAllocator;
 
+struct IpcMutex {
+  pthread_mutex_t lock;
+  pthread_cond_t cond;
+  int counter = 0;
+  bool signalled = false;
+
+  void init();
+
+  static IpcMutex *Import(void *mem);
+};
+
 // buffer allocator for the tests
 class TestBufferPoolAllocator : public BufferPoolAllocator {
  public:
@@ -43,9 +55,14 @@
 
   static bool Verify(const native_handle_t *handle, const unsigned char val);
 
+  static bool MapMemoryForMutex(const native_handle_t *handle, void **mem);
+
+  static bool UnmapMemoryForMutex(void *mem);
 };
 
 // retrieve buffer allocator paramters
 void getTestAllocatorParams(std::vector<uint8_t> *params);
 
+void getIpcMutexParams(std::vector<uint8_t> *params);
+
 #endif  // VNDK_HIDL_BUFFERPOOL_V2_0_ALLOCATOR_H
diff --git a/media/bufferpool/2.0/tests/cond.cpp b/media/bufferpool/2.0/tests/cond.cpp
new file mode 100644
index 0000000..21beea8
--- /dev/null
+++ b/media/bufferpool/2.0/tests/cond.cpp
@@ -0,0 +1,269 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "buffferpool_unit_test"
+
+#include <gtest/gtest.h>
+
+#include <android-base/logging.h>
+#include <binder/ProcessState.h>
+#include <bufferpool/ClientManager.h>
+#include <errno.h>
+#include <hidl/HidlSupport.h>
+#include <hidl/HidlTransportSupport.h>
+#include <hidl/LegacySupport.h>
+#include <hidl/Status.h>
+#include <signal.h>
+#include <sys/types.h>
+#include <sys/wait.h>
+#include <unistd.h>
+#include <iostream>
+#include <memory>
+#include <vector>
+#include "allocator.h"
+
+using android::hardware::configureRpcThreadpool;
+using android::hardware::hidl_handle;
+using android::hardware::media::bufferpool::V2_0::IClientManager;
+using android::hardware::media::bufferpool::V2_0::ResultStatus;
+using android::hardware::media::bufferpool::V2_0::implementation::BufferId;
+using android::hardware::media::bufferpool::V2_0::implementation::ClientManager;
+using android::hardware::media::bufferpool::V2_0::implementation::ConnectionId;
+using android::hardware::media::bufferpool::V2_0::implementation::TransactionId;
+using android::hardware::media::bufferpool::BufferPoolData;
+
+namespace {
+
+// communication message types between processes.
+enum PipeCommand : int32_t {
+    INIT_OK = 0,
+    INIT_ERROR,
+    SEND,
+    RECEIVE_OK,
+    RECEIVE_ERROR,
+};
+
+// communication message between processes.
+union PipeMessage {
+    struct  {
+        int32_t command;
+        BufferId bufferId;
+        ConnectionId connectionId;
+        TransactionId transactionId;
+        int64_t  timestampUs;
+    } data;
+    char array[0];
+};
+
+constexpr int kSignalInt = 200;
+
+// media.bufferpool test setup
+class BufferpoolMultiTest : public ::testing::Test {
+ public:
+  virtual void SetUp() override {
+    ResultStatus status;
+    mReceiverPid = -1;
+    mConnectionValid = false;
+
+    ASSERT_TRUE(pipe(mCommandPipeFds) == 0);
+    ASSERT_TRUE(pipe(mResultPipeFds) == 0);
+
+    mReceiverPid = fork();
+    ASSERT_TRUE(mReceiverPid >= 0);
+
+    if (mReceiverPid == 0) {
+      doReceiver();
+      // In order to ignore gtest behaviour, wait for being killed from
+      // tearDown
+      pause();
+    }
+
+    mManager = ClientManager::getInstance();
+    ASSERT_NE(mManager, nullptr);
+
+    mAllocator = std::make_shared<TestBufferPoolAllocator>();
+    ASSERT_TRUE((bool)mAllocator);
+
+    status = mManager->create(mAllocator, &mConnectionId);
+    ASSERT_TRUE(status == ResultStatus::OK);
+    mConnectionValid = true;
+  }
+
+  virtual void TearDown() override {
+    if (mReceiverPid > 0) {
+      kill(mReceiverPid, SIGKILL);
+      int wstatus;
+      wait(&wstatus);
+    }
+
+    if (mConnectionValid) {
+      mManager->close(mConnectionId);
+    }
+  }
+
+ protected:
+  static void description(const std::string& description) {
+    RecordProperty("description", description);
+  }
+
+  android::sp<ClientManager> mManager;
+  std::shared_ptr<BufferPoolAllocator> mAllocator;
+  bool mConnectionValid;
+  ConnectionId mConnectionId;
+  pid_t mReceiverPid;
+  int mCommandPipeFds[2];
+  int mResultPipeFds[2];
+
+  bool sendMessage(int *pipes, const PipeMessage &message) {
+    int ret = write(pipes[1], message.array, sizeof(PipeMessage));
+    return ret == sizeof(PipeMessage);
+  }
+
+  bool receiveMessage(int *pipes, PipeMessage *message) {
+    int ret = read(pipes[0], message->array, sizeof(PipeMessage));
+    return ret == sizeof(PipeMessage);
+  }
+
+  void doReceiver() {
+    configureRpcThreadpool(1, false);
+    PipeMessage message;
+    mManager = ClientManager::getInstance();
+    if (!mManager) {
+      message.data.command = PipeCommand::INIT_ERROR;
+      sendMessage(mResultPipeFds, message);
+      return;
+    }
+    android::status_t status = mManager->registerAsService();
+    if (status != android::OK) {
+      message.data.command = PipeCommand::INIT_ERROR;
+      sendMessage(mResultPipeFds, message);
+      return;
+    }
+    message.data.command = PipeCommand::INIT_OK;
+    sendMessage(mResultPipeFds, message);
+
+    int val = 0;
+    receiveMessage(mCommandPipeFds, &message);
+    {
+      native_handle_t *rhandle = nullptr;
+      std::shared_ptr<BufferPoolData> rbuffer;
+      void *mem = nullptr;
+      IpcMutex *mutex = nullptr;
+      ResultStatus status = mManager->receive(
+          message.data.connectionId, message.data.transactionId,
+          message.data.bufferId, message.data.timestampUs, &rhandle, &rbuffer);
+      mManager->close(message.data.connectionId);
+      if (status != ResultStatus::OK) {
+          message.data.command = PipeCommand::RECEIVE_ERROR;
+          sendMessage(mResultPipeFds, message);
+          return;
+      }
+      if (!TestBufferPoolAllocator::MapMemoryForMutex(rhandle, &mem)) {
+          message.data.command = PipeCommand::RECEIVE_ERROR;
+          sendMessage(mResultPipeFds, message);
+          return;
+      }
+      mutex = IpcMutex::Import(mem);
+      pthread_mutex_lock(&(mutex->lock));
+      while (mutex->signalled != true) {
+          pthread_cond_wait(&(mutex->cond), &(mutex->lock));
+      }
+      val = mutex->counter;
+      pthread_mutex_unlock(&(mutex->lock));
+
+      (void)TestBufferPoolAllocator::UnmapMemoryForMutex(mem);
+      if (rhandle) {
+        native_handle_close(rhandle);
+        native_handle_delete(rhandle);
+      }
+    }
+    if (val == kSignalInt) {
+      message.data.command = PipeCommand::RECEIVE_OK;
+    } else {
+      message.data.command = PipeCommand::RECEIVE_ERROR;
+    }
+    sendMessage(mResultPipeFds, message);
+  }
+};
+
+// Buffer transfer test between processes.
+TEST_F(BufferpoolMultiTest, TransferBuffer) {
+  ResultStatus status;
+  PipeMessage message;
+
+  ASSERT_TRUE(receiveMessage(mResultPipeFds, &message));
+
+  android::sp<IClientManager> receiver = IClientManager::getService();
+  ConnectionId receiverId;
+  ASSERT_TRUE((bool)receiver);
+
+  status = mManager->registerSender(receiver, mConnectionId, &receiverId);
+  ASSERT_TRUE(status == ResultStatus::OK);
+  {
+    native_handle_t *shandle = nullptr;
+    std::shared_ptr<BufferPoolData> sbuffer;
+    TransactionId transactionId;
+    int64_t postUs;
+    std::vector<uint8_t> vecParams;
+    void *mem = nullptr;
+    IpcMutex *mutex = nullptr;
+
+    getIpcMutexParams(&vecParams);
+    status = mManager->allocate(mConnectionId, vecParams, &shandle, &sbuffer);
+    ASSERT_TRUE(status == ResultStatus::OK);
+
+    ASSERT_TRUE(TestBufferPoolAllocator::MapMemoryForMutex(shandle, &mem));
+
+    mutex = new(mem) IpcMutex();
+    mutex->init();
+
+    status = mManager->postSend(receiverId, sbuffer, &transactionId, &postUs);
+    ASSERT_TRUE(status == ResultStatus::OK);
+
+    message.data.command = PipeCommand::SEND;
+    message.data.bufferId = sbuffer->mId;
+    message.data.connectionId = receiverId;
+    message.data.transactionId = transactionId;
+    message.data.timestampUs = postUs;
+    sendMessage(mCommandPipeFds, message);
+    for (int i=0; i < 200000000; ++i) {
+      // no-op in order to ensure
+      // pthread_cond_wait is called before pthread_cond_signal
+    }
+    pthread_mutex_lock(&(mutex->lock));
+    mutex->counter = kSignalInt;
+    mutex->signalled = true;
+    pthread_cond_signal(&(mutex->cond));
+    pthread_mutex_unlock(&(mutex->lock));
+    (void)TestBufferPoolAllocator::UnmapMemoryForMutex(mem);
+    if (shandle) {
+      native_handle_close(shandle);
+      native_handle_delete(shandle);
+    }
+  }
+  EXPECT_TRUE(receiveMessage(mResultPipeFds, &message));
+  EXPECT_TRUE(message.data.command == PipeCommand::RECEIVE_OK);
+}
+
+}  // anonymous namespace
+
+int main(int argc, char** argv) {
+  android::hardware::details::setTrebleTestingOverride(true);
+  ::testing::InitGoogleTest(&argc, argv);
+  int status = RUN_ALL_TESTS();
+  LOG(INFO) << "Test result = " << status;
+  return status;
+}
diff --git a/media/bufferpool/2.0/tests/multi.cpp b/media/bufferpool/2.0/tests/multi.cpp
index 68b6992..43b0a8c 100644
--- a/media/bufferpool/2.0/tests/multi.cpp
+++ b/media/bufferpool/2.0/tests/multi.cpp
@@ -161,11 +161,18 @@
           message.data.bufferId, message.data.timestampUs, &rhandle, &rbuffer);
       mManager->close(message.data.connectionId);
       if (status != ResultStatus::OK) {
-        if (!TestBufferPoolAllocator::Verify(rhandle, 0x77)) {
-          message.data.command = PipeCommand::RECEIVE_ERROR;
-          sendMessage(mResultPipeFds, message);
-          return;
-        }
+        message.data.command = PipeCommand::RECEIVE_ERROR;
+        sendMessage(mResultPipeFds, message);
+        return;
+      }
+      if (!TestBufferPoolAllocator::Verify(rhandle, 0x77)) {
+        message.data.command = PipeCommand::RECEIVE_ERROR;
+        sendMessage(mResultPipeFds, message);
+        return;
+      }
+      if (rhandle) {
+        native_handle_close(rhandle);
+        native_handle_delete(rhandle);
       }
     }
     message.data.command = PipeCommand::RECEIVE_OK;
@@ -198,6 +205,10 @@
     ASSERT_TRUE(status == ResultStatus::OK);
 
     ASSERT_TRUE(TestBufferPoolAllocator::Fill(shandle, 0x77));
+    if (shandle) {
+        native_handle_close(shandle);
+        native_handle_delete(shandle);
+    }
 
     status = mManager->postSend(receiverId, sbuffer, &transactionId, &postUs);
     ASSERT_TRUE(status == ResultStatus::OK);
@@ -210,12 +221,13 @@
     sendMessage(mCommandPipeFds, message);
   }
   EXPECT_TRUE(receiveMessage(mResultPipeFds, &message));
+  EXPECT_TRUE(message.data.command == PipeCommand::RECEIVE_OK);
 }
 
 }  // anonymous namespace
 
 int main(int argc, char** argv) {
-  setenv("TREBLE_TESTING_OVERRIDE", "true", true);
+  android::hardware::details::setTrebleTestingOverride(true);
   ::testing::InitGoogleTest(&argc, argv);
   int status = RUN_ALL_TESTS();
   LOG(INFO) << "Test result = " << status;
diff --git a/media/bufferpool/2.0/tests/single.cpp b/media/bufferpool/2.0/tests/single.cpp
index 777edcf..1e9027b 100644
--- a/media/bufferpool/2.0/tests/single.cpp
+++ b/media/bufferpool/2.0/tests/single.cpp
@@ -102,6 +102,10 @@
   for (int i = 0; i < kNumAllocationTest; ++i) {
     status = mManager->allocate(mConnectionId, vecParams, &allocHandle, &buffer[i]);
     ASSERT_TRUE(status == ResultStatus::OK);
+    if (allocHandle) {
+      native_handle_close(allocHandle);
+      native_handle_delete(allocHandle);
+    }
   }
   for (int i = 0; i < kNumAllocationTest; ++i) {
     for (int j = i + 1; j < kNumAllocationTest; ++j) {
@@ -125,6 +129,10 @@
     status = mManager->allocate(mConnectionId, vecParams, &allocHandle, &buffer);
     ASSERT_TRUE(status == ResultStatus::OK);
     bid[i] = buffer->mId;
+    if (allocHandle) {
+      native_handle_close(allocHandle);
+      native_handle_delete(allocHandle);
+    }
   }
   for (int i = 1; i < kNumRecycleTest; ++i) {
     ASSERT_TRUE(bid[i - 1] == bid[i]);
@@ -154,6 +162,15 @@
                              &recvHandle, &rbuffer);
   EXPECT_TRUE(status == ResultStatus::OK);
   ASSERT_TRUE(TestBufferPoolAllocator::Verify(recvHandle, 0x77));
+
+  if (allocHandle) {
+    native_handle_close(allocHandle);
+    native_handle_delete(allocHandle);
+  }
+  if (recvHandle) {
+    native_handle_close(recvHandle);
+    native_handle_delete(recvHandle);
+  }
 }
 
 }  // anonymous namespace
diff --git a/media/codec2/TEST_MAPPING b/media/codec2/TEST_MAPPING
index 8afa1a8..6ac4210 100644
--- a/media/codec2/TEST_MAPPING
+++ b/media/codec2/TEST_MAPPING
@@ -1,5 +1,12 @@
 {
   "presubmit": [
+    // TODO failing 4 of 13
+    // { "name": "codec2_core_param_test"},
+    // TODO(b/155516524)
+    // { "name": "codec2_vndk_interface_test"},
+    { "name": "codec2_vndk_test"}
+  ],
+  "presubmit-large": [
     {
       "name": "CtsMediaTestCases",
       "options": [
diff --git a/media/codec2/components/aac/Android.bp b/media/codec2/components/aac/Android.bp
index 9eca585..c547e85 100644
--- a/media/codec2/components/aac/Android.bp
+++ b/media/codec2/components/aac/Android.bp
@@ -1,4 +1,23 @@
-cc_library_shared {
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_codec2_components_aac_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codec2_components_aac_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library {
     name: "libcodec2_soft_aacdec",
     defaults: [
         "libcodec2_soft-defaults",
@@ -15,7 +34,7 @@
     ],
 }
 
-cc_library_shared {
+cc_library {
     name: "libcodec2_soft_aacenc",
     defaults: [
         "libcodec2_soft-defaults",
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index 677f316..342d771 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -55,6 +55,8 @@
 namespace android {
 
 constexpr char COMPONENT_NAME[] = "c2.android.aac.decoder";
+constexpr size_t kDefaultOutputPortDelay = 2;
+constexpr size_t kMaxOutputPortDelay = 16;
 
 class C2SoftAacDec::IntfImpl : public SimpleInterface<void>::BaseParams {
 public:
@@ -73,7 +75,9 @@
 
         addParameter(
                 DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
-                .withConstValue(new C2PortActualDelayTuning::output(2u))
+                .withDefault(new C2PortActualDelayTuning::output(kDefaultOutputPortDelay))
+                .withFields({C2F(mActualOutputDelay, value).inRange(0, kMaxOutputPortDelay)})
+                .withSetter(Setter<decltype(*mActualOutputDelay)>::StrictValueWithNoDeps)
                 .build());
 
         addParameter(
@@ -263,6 +267,7 @@
       mAACDecoder(nullptr),
       mStreamInfo(nullptr),
       mSignalledError(false),
+      mOutputPortDelay(kDefaultOutputPortDelay),
       mOutputDelayRingBuffer(nullptr) {
 }
 
@@ -284,13 +289,14 @@
     mOutputDelayRingBufferFilled = 0;
     mBuffersInfo.clear();
 
-    // To make the codec behave the same before and after a reset, we need to invalidate the
-    // streaminfo struct. This does that:
-    mStreamInfo->sampleRate = 0; // TODO: mStreamInfo is read only
-
+    status_t status = UNKNOWN_ERROR;
+    if (mAACDecoder) {
+        aacDecoder_Close(mAACDecoder);
+        status = initDecoder();
+    }
     mSignalledError = false;
 
-    return C2_OK;
+    return status == OK ? C2_OK : C2_CORRUPTED;
 }
 
 void C2SoftAacDec::onReset() {
@@ -509,8 +515,8 @@
 
                 // TODO: error handling, proper usage, etc.
                 C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
-                c2_status_t err = pool->fetchLinearBlock(
-                        numSamples * sizeof(int16_t), usage, &block);
+                size_t bufferSize = numSamples * sizeof(int16_t);
+                c2_status_t err = pool->fetchLinearBlock(bufferSize, usage, &block);
                 if (err != C2_OK) {
                     ALOGD("failed to fetch a linear block (%d)", err);
                     return std::bind(fillEmptyWork, _1, C2_NO_MEMORY);
@@ -524,7 +530,7 @@
                     mSignalledError = true;
                     return std::bind(fillEmptyWork, _1, C2_CORRUPTED);
                 }
-                return [buffer = createLinearBuffer(block)](
+                return [buffer = createLinearBuffer(block, 0, bufferSize)](
                         const std::unique_ptr<C2Work> &work) {
                     work->result = C2_OK;
                     C2FrameData &output = work->worklets.front()->output;
@@ -877,10 +883,14 @@
             work->worklets.front()->output.configUpdate.push_back(
                     C2Param::Copy(currentBoostFactor));
 
-            C2StreamDrcCompressionModeTuning::input currentCompressMode(0u,
-                    (C2Config::drc_compression_mode_t) compressMode);
-            work->worklets.front()->output.configUpdate.push_back(
-                    C2Param::Copy(currentCompressMode));
+            if (android_get_device_api_level() < __ANDROID_API_S__) {
+                // We used to report DRC compression mode in the output format
+                // in Q and R, but stopped doing that in S
+                C2StreamDrcCompressionModeTuning::input currentCompressMode(0u,
+                        (C2Config::drc_compression_mode_t) compressMode);
+                work->worklets.front()->output.configUpdate.push_back(
+                        C2Param::Copy(currentCompressMode));
+            }
 
             C2StreamDrcEncodedTargetLevelTuning::input currentEncodedTargetLevel(0u,
                     (C2FloatValue) (encTargetLevel*-0.25));
@@ -911,6 +921,29 @@
 
     int32_t outputDelay = mStreamInfo->outputDelay * mStreamInfo->numChannels;
 
+    size_t numSamplesInOutput = mStreamInfo->frameSize * mStreamInfo->numChannels;
+    if (numSamplesInOutput > 0) {
+        size_t actualOutputPortDelay = (outputDelay + numSamplesInOutput - 1) / numSamplesInOutput;
+        if (actualOutputPortDelay > mOutputPortDelay) {
+            mOutputPortDelay = actualOutputPortDelay;
+            ALOGV("New Output port delay %zu ", mOutputPortDelay);
+
+            C2PortActualDelayTuning::output outputPortDelay(mOutputPortDelay);
+            std::vector<std::unique_ptr<C2SettingResult>> failures;
+            c2_status_t err =
+                mIntf->config({&outputPortDelay}, C2_MAY_BLOCK, &failures);
+            if (err == OK) {
+                work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(outputPortDelay));
+            } else {
+                ALOGE("Cannot set output delay");
+                mSignalledError = true;
+                work->workletsProcessed = 1u;
+                work->result = C2_CORRUPTED;
+                return;
+            }
+        }
+    }
     mBuffersInfo.push_back(std::move(inInfo));
     work->workletsProcessed = 0u;
     if (!eos && mOutputDelayCompensated < outputDelay) {
@@ -1061,11 +1094,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftAacDecFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/aac/C2SoftAacDec.h b/media/codec2/components/aac/C2SoftAacDec.h
index 965c29e..986187c 100644
--- a/media/codec2/components/aac/C2SoftAacDec.h
+++ b/media/codec2/components/aac/C2SoftAacDec.h
@@ -57,6 +57,7 @@
     size_t mInputBufferCount;
     size_t mOutputBufferCount;
     bool mSignalledError;
+    size_t mOutputPortDelay;
     struct Info {
         uint64_t frameIndex;
         size_t bufferSize;
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
index 2e85915..d865ab2 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.cpp
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -272,8 +272,9 @@
         return UNKNOWN_ERROR;
     }
 
-    if (sbrMode != -1 && aacProfile == C2Config::PROFILE_AAC_ELD) {
-        if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, sbrMode)) {
+    if (sbrMode != C2Config::AAC_SBR_AUTO && aacProfile == C2Config::PROFILE_AAC_ELD) {
+        int aacSbrMode = sbrMode != C2Config::AAC_SBR_OFF;
+        if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, aacSbrMode)) {
             ALOGE("Failed to set AAC encoder parameters");
             return UNKNOWN_ERROR;
         }
@@ -692,11 +693,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftAacEncFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/aac/DrcPresModeWrap.cpp b/media/codec2/components/aac/DrcPresModeWrap.cpp
index bee969b..7ce5c9d 100644
--- a/media/codec2/components/aac/DrcPresModeWrap.cpp
+++ b/media/codec2/components/aac/DrcPresModeWrap.cpp
@@ -161,7 +161,7 @@
     int newHeavy          = mDesHeavy;
 
     if (mDataUpdate) {
-        // sanity check
+        // Validation check
         if ((mDesTarget < MAX_TARGET_LEVEL) && (mDesTarget != -1)){
             mDesTarget = MAX_TARGET_LEVEL;  // limit target level to -10 dB or below
             newTarget = MAX_TARGET_LEVEL;
@@ -217,7 +217,7 @@
         }
         else { // handle other used encoder target levels
 
-            // Sanity check: DRC presentation mode is only specified for max. 5.1 channels
+            // Validation check: DRC presentation mode is only specified for max. 5.1 channels
             if (mStreamNrAACChan > 6) {
                 drcPresMode = 0;
             }
@@ -308,7 +308,7 @@
             } // switch()
         } // if (mEncoderTarget  == GPM_ENCODER_TARGET_LEVEL)
 
-        // sanity again
+        // Validation check again
         if (newHeavy == 1) {
             newBoostFactor=127; // not really needed as the same would be done by the decoder anyway
             newAttFactor = 127;
diff --git a/media/codec2/components/amr_nb_wb/Android.bp b/media/codec2/components/amr_nb_wb/Android.bp
index ce25bc9..1418ba5 100644
--- a/media/codec2/components/amr_nb_wb/Android.bp
+++ b/media/codec2/components/amr_nb_wb/Android.bp
@@ -1,4 +1,23 @@
-cc_library_shared {
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_codec2_components_amr_nb_wb_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codec2_components_amr_nb_wb_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library {
     name: "libcodec2_soft_amrnbdec",
     defaults: [
         "libcodec2_soft-defaults",
@@ -21,7 +40,7 @@
     ],
 }
 
-cc_library_shared {
+cc_library {
     name: "libcodec2_soft_amrwbdec",
     defaults: [
         "libcodec2_soft-defaults",
@@ -40,7 +59,7 @@
     ],
 }
 
-cc_library_shared {
+cc_library {
     name: "libcodec2_soft_amrnbenc",
     defaults: [
         "libcodec2_soft-defaults",
@@ -58,7 +77,7 @@
     ],
 }
 
-cc_library_shared {
+cc_library {
     name: "libcodec2_soft_amrwbenc",
     defaults: [
         "libcodec2_soft-defaults",
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
index f7943be..e92d38d 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
@@ -143,7 +143,7 @@
     if (!mIsWide) {
         Speech_Decode_Frame_reset(mAmrHandle);
     } else {
-        pvDecoder_AmrWb_Reset(mAmrHandle, 0 /* reset_all */);
+        pvDecoder_AmrWb_Reset(mAmrHandle, 1 /* reset_all */);
     }
     mSignalledError = false;
     mSignalledOutputEos = false;
@@ -361,7 +361,13 @@
 
     work->worklets.front()->output.flags = work->input.flags;
     work->worklets.front()->output.buffers.clear();
-    work->worklets.front()->output.buffers.push_back(createLinearBuffer(block));
+    // we filled the output buffer to (intptr_t)output - (intptr_t)wView.data()
+    // use calOutSize as that contains the expected number of samples
+    ALOGD_IF(calOutSize != ((intptr_t)output - (intptr_t)wView.data()),
+            "Expected %zu output bytes, but filled %lld",
+             calOutSize, (long long)((intptr_t)output - (intptr_t)wView.data()));
+    work->worklets.front()->output.buffers.push_back(
+            createLinearBuffer(block, 0, calOutSize));
     work->worklets.front()->output.ordinal = work->input.ordinal;
     if (eos) {
         mSignalledOutputEos = true;
@@ -420,11 +426,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftAMRDecFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
index e2d8cb6..bb63e1f 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
@@ -337,11 +337,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftAmrNbEncFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
index 84ae4b7..84728ae 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
@@ -411,11 +411,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftAmrWbEncFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/aom/Android.bp b/media/codec2/components/aom/Android.bp
index 61dbd4c..cb9837f 100644
--- a/media/codec2/components/aom/Android.bp
+++ b/media/codec2/components/aom/Android.bp
@@ -1,4 +1,13 @@
-cc_library_shared {
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
     name: "libcodec2_soft_av1dec_aom",
     defaults: [
         "libcodec2_soft-defaults",
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
index c7046cb..c08cd59 100644
--- a/media/codec2/components/aom/C2SoftAomDec.cpp
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -506,30 +506,28 @@
 }
 
 static void copyOutputBufferToYuvPlanarFrame(
-        uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
+        const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
         size_t srcYStride, size_t srcUStride, size_t srcVStride,
         size_t dstYStride, size_t dstUVStride,
         uint32_t width, uint32_t height) {
-    uint8_t* dstStart = dst;
 
     for (size_t i = 0; i < height; ++i) {
-        memcpy(dst, srcY, width);
+        memcpy(dstY, srcY, width);
         srcY += srcYStride;
-        dst += dstYStride;
+        dstY += dstYStride;
     }
 
-    dst = dstStart + dstYStride * height;
     for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dst, srcV, width / 2);
+        memcpy(dstV, srcV, width / 2);
         srcV += srcVStride;
-        dst += dstUVStride;
+        dstV += dstUVStride;
     }
 
-    dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2);
     for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dst, srcU, width / 2);
+        memcpy(dstU, srcU, width / 2);
         srcU += srcUStride;
-        dst += dstUVStride;
+        dstU += dstUVStride;
     }
 }
 
@@ -596,16 +594,12 @@
     return;
 }
 
-static void convertYUV420Planar16ToYUV420Planar(uint8_t *dst,
+static void convertYUV420Planar16ToYUV420Planar(
+        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
         const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
         size_t srcYStride, size_t srcUStride, size_t srcVStride,
-        size_t dstYStride, size_t dstUVStride, size_t width, size_t height) {
-
-    uint8_t *dstY = (uint8_t *)dst;
-    size_t dstYSize = dstYStride * height;
-    size_t dstUVSize = dstUVStride * height / 2;
-    uint8_t *dstV = dstY + dstYSize;
-    uint8_t *dstU = dstV + dstUVSize;
+        size_t dstYStride, size_t dstUVStride,
+        size_t width, size_t height) {
 
     for (size_t y = 0; y < height; ++y) {
         for (size_t x = 0; x < width; ++x) {
@@ -696,7 +690,9 @@
           block->width(), block->height(), mWidth, mHeight,
           (int)*(int64_t*)img->user_priv);
 
-    uint8_t* dst = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
+    uint8_t* dstY = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
+    uint8_t* dstU = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_U]);
+    uint8_t* dstV = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_V]);
     size_t srcYStride = img->stride[AOM_PLANE_Y];
     size_t srcUStride = img->stride[AOM_PLANE_U];
     size_t srcVStride = img->stride[AOM_PLANE_V];
@@ -710,13 +706,14 @@
         const uint16_t *srcV = (const uint16_t *)img->planes[AOM_PLANE_V];
 
         if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
-            convertYUV420Planar16ToY410((uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2,
+            convertYUV420Planar16ToY410((uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
                                     srcUStride / 2, srcVStride / 2,
                                     dstYStride / sizeof(uint32_t),
                                     mWidth, mHeight);
         } else {
-            convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
-                                    srcUStride / 2, srcVStride / 2,
+            convertYUV420Planar16ToYUV420Planar(dstY, dstU, dstV,
+                                    srcY, srcU, srcV,
+                                    srcYStride / 2, srcUStride / 2, srcVStride / 2,
                                     dstYStride, dstUVStride,
                                     mWidth, mHeight);
         }
@@ -725,7 +722,7 @@
         const uint8_t *srcU = (const uint8_t *)img->planes[AOM_PLANE_U];
         const uint8_t *srcV = (const uint8_t *)img->planes[AOM_PLANE_V];
         copyOutputBufferToYuvPlanarFrame(
-                dst, srcY, srcU, srcV,
+                dstY, dstU, dstV, srcY, srcU, srcV,
                 srcYStride, srcUStride, srcVStride,
                 dstYStride, dstUVStride,
                 mWidth, mHeight);
@@ -803,11 +800,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftAomFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/avc/Android.bp b/media/codec2/components/avc/Android.bp
index 4021444..0be1bed 100644
--- a/media/codec2/components/avc/Android.bp
+++ b/media/codec2/components/avc/Android.bp
@@ -1,9 +1,19 @@
-cc_library_shared {
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
     name: "libcodec2_soft_avcdec",
     defaults: [
         "libcodec2_soft-defaults",
         "libcodec2_soft_sanitize_signed-defaults",
-   ],
+        "libcodec2_soft_sanitize_cfi-defaults",
+    ],
 
     static_libs: ["libavcdec"],
 
@@ -15,12 +25,13 @@
     ],
 }
 
-cc_library_shared {
+cc_library {
     name: "libcodec2_soft_avcenc",
     defaults: [
         "libcodec2_soft-defaults",
         "libcodec2_soft_sanitize_signed-defaults",
-   ],
+        "libcodec2_soft_sanitize_cfi-defaults",
+    ],
 
     static_libs: ["libavcenc"],
 
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 3afd670..e8287f9 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -26,7 +26,6 @@
 #include <SimpleC2Interface.h>
 
 #include "C2SoftAvcDec.h"
-#include "ih264d.h"
 
 namespace android {
 
@@ -391,12 +390,14 @@
     }
 
     while (true) {
-        ivd_video_decode_ip_t s_decode_ip;
-        ivd_video_decode_op_t s_decode_op;
+        ih264d_video_decode_ip_t s_h264d_decode_ip = {};
+        ih264d_video_decode_op_t s_h264d_decode_op = {};
+        ivd_video_decode_ip_t *ps_decode_ip = &s_h264d_decode_ip.s_ivd_video_decode_ip_t;
+        ivd_video_decode_op_t *ps_decode_op = &s_h264d_decode_op.s_ivd_video_decode_op_t;
 
-        setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, nullptr, 0, 0, 0);
-        (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        if (0 == s_decode_op.u4_output_present) {
+        setDecodeArgs(ps_decode_ip, ps_decode_op, nullptr, nullptr, 0, 0, 0);
+        (void) ivdec_api_function(mDecHandle, &s_h264d_decode_ip, &s_h264d_decode_op);
+        if (0 == ps_decode_op->u4_output_present) {
             resetPlugin();
             break;
         }
@@ -411,8 +412,8 @@
 }
 
 status_t C2SoftAvcDec::createDecoder() {
-    ivdext_create_ip_t s_create_ip;
-    ivdext_create_op_t s_create_op;
+    ivdext_create_ip_t s_create_ip = {};
+    ivdext_create_op_t s_create_op = {};
 
     s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
     s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
@@ -438,8 +439,8 @@
 }
 
 status_t C2SoftAvcDec::setNumCores() {
-    ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip;
-    ivdext_ctl_set_num_cores_op_t s_set_num_cores_op;
+    ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip = {};
+    ivdext_ctl_set_num_cores_op_t s_set_num_cores_op = {};
 
     s_set_num_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
     s_set_num_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -458,22 +459,26 @@
 }
 
 status_t C2SoftAvcDec::setParams(size_t stride, IVD_VIDEO_DECODE_MODE_T dec_mode) {
-    ivd_ctl_set_config_ip_t s_set_dyn_params_ip;
-    ivd_ctl_set_config_op_t s_set_dyn_params_op;
+    ih264d_ctl_set_config_ip_t s_h264d_set_dyn_params_ip = {};
+    ih264d_ctl_set_config_op_t s_h264d_set_dyn_params_op = {};
+    ivd_ctl_set_config_ip_t *ps_set_dyn_params_ip =
+        &s_h264d_set_dyn_params_ip.s_ivd_ctl_set_config_ip_t;
+    ivd_ctl_set_config_op_t *ps_set_dyn_params_op =
+        &s_h264d_set_dyn_params_op.s_ivd_ctl_set_config_op_t;
 
-    s_set_dyn_params_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
-    s_set_dyn_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_set_dyn_params_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
-    s_set_dyn_params_ip.u4_disp_wd = (UWORD32) stride;
-    s_set_dyn_params_ip.e_frm_skip_mode = IVD_SKIP_NONE;
-    s_set_dyn_params_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
-    s_set_dyn_params_ip.e_vid_dec_mode = dec_mode;
-    s_set_dyn_params_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
+    ps_set_dyn_params_ip->u4_size = sizeof(ih264d_ctl_set_config_ip_t);
+    ps_set_dyn_params_ip->e_cmd = IVD_CMD_VIDEO_CTL;
+    ps_set_dyn_params_ip->e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
+    ps_set_dyn_params_ip->u4_disp_wd = (UWORD32) stride;
+    ps_set_dyn_params_ip->e_frm_skip_mode = IVD_SKIP_NONE;
+    ps_set_dyn_params_ip->e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
+    ps_set_dyn_params_ip->e_vid_dec_mode = dec_mode;
+    ps_set_dyn_params_op->u4_size = sizeof(ih264d_ctl_set_config_op_t);
     IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
-                                                     &s_set_dyn_params_ip,
-                                                     &s_set_dyn_params_op);
+                                                     &s_h264d_set_dyn_params_ip,
+                                                     &s_h264d_set_dyn_params_op);
     if (status != IV_SUCCESS) {
-        ALOGE("error in %s: 0x%x", __func__, s_set_dyn_params_op.u4_error_code);
+        ALOGE("error in %s: 0x%x", __func__, ps_set_dyn_params_op->u4_error_code);
         return UNKNOWN_ERROR;
     }
 
@@ -481,8 +486,8 @@
 }
 
 void C2SoftAvcDec::getVersion() {
-    ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip;
-    ivd_ctl_getversioninfo_op_t s_get_versioninfo_op;
+    ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip = {};
+    ivd_ctl_getversioninfo_op_t s_get_versioninfo_op = {};
     UWORD8 au1_buf[512];
 
     s_get_versioninfo_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
@@ -538,7 +543,7 @@
         if (OK != setParams(mStride, IVD_DECODE_FRAME)) return false;
     }
 
-    ps_decode_ip->u4_size = sizeof(ivd_video_decode_ip_t);
+    ps_decode_ip->u4_size = sizeof(ih264d_video_decode_ip_t);
     ps_decode_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
     if (inBuffer) {
         ps_decode_ip->u4_ts = tsMarker;
@@ -567,14 +572,14 @@
         ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferFlush + lumaSize + chromaSize;
     }
     ps_decode_ip->s_out_buffer.u4_num_bufs = 3;
-    ps_decode_op->u4_size = sizeof(ivd_video_decode_op_t);
+    ps_decode_op->u4_size = sizeof(ih264d_video_decode_op_t);
 
     return true;
 }
 
 bool C2SoftAvcDec::getVuiParams() {
-    ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip;
-    ivdext_ctl_get_vui_params_op_t s_get_vui_params_op;
+    ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip = {};
+    ivdext_ctl_get_vui_params_op_t s_get_vui_params_op = {};
 
     s_get_vui_params_ip.u4_size = sizeof(ivdext_ctl_get_vui_params_ip_t);
     s_get_vui_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -622,8 +627,8 @@
 }
 
 status_t C2SoftAvcDec::setFlushMode() {
-    ivd_ctl_flush_ip_t s_set_flush_ip;
-    ivd_ctl_flush_op_t s_set_flush_op;
+    ivd_ctl_flush_ip_t s_set_flush_ip = {};
+    ivd_ctl_flush_op_t s_set_flush_op = {};
 
     s_set_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
     s_set_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -641,8 +646,8 @@
 }
 
 status_t C2SoftAvcDec::resetDecoder() {
-    ivd_ctl_reset_ip_t s_reset_ip;
-    ivd_ctl_reset_op_t s_reset_op;
+    ivd_ctl_reset_ip_t s_reset_ip = {};
+    ivd_ctl_reset_op_t s_reset_op = {};
 
     s_reset_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
     s_reset_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -671,8 +676,8 @@
 
 status_t C2SoftAvcDec::deleteDecoder() {
     if (mDecHandle) {
-        ivdext_delete_ip_t s_delete_ip;
-        ivdext_delete_op_t s_delete_op;
+        ivdext_delete_ip_t s_delete_ip = {};
+        ivdext_delete_op_t s_delete_op = {};
 
         s_delete_ip.s_ivd_delete_ip_t.u4_size = sizeof(ivdext_delete_ip_t);
         s_delete_ip.s_ivd_delete_ip_t.e_cmd = IVD_CMD_DELETE;
@@ -837,8 +842,10 @@
             return;
         }
 
-        ivd_video_decode_ip_t s_decode_ip;
-        ivd_video_decode_op_t s_decode_op;
+        ih264d_video_decode_ip_t s_h264d_decode_ip = {};
+        ih264d_video_decode_op_t s_h264d_decode_op = {};
+        ivd_video_decode_ip_t *ps_decode_ip = &s_h264d_decode_ip.s_ivd_video_decode_ip_t;
+        ivd_video_decode_op_t *ps_decode_op = &s_h264d_decode_op.s_ivd_video_decode_op_t;
         {
             C2GraphicView wView = mOutBlock->map().get();
             if (wView.error()) {
@@ -846,7 +853,7 @@
                 work->result = wView.error();
                 return;
             }
-            if (!setDecodeArgs(&s_decode_ip, &s_decode_op, &rView, &wView,
+            if (!setDecodeArgs(ps_decode_ip, ps_decode_op, &rView, &wView,
                                inOffset + inPos, inSize - inPos, workIndex)) {
                 mSignalledError = true;
                 work->workletsProcessed = 1u;
@@ -862,26 +869,27 @@
             WORD32 delay;
             GETTIME(&mTimeStart, nullptr);
             TIME_DIFF(mTimeEnd, mTimeStart, delay);
-            (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+            (void) ivdec_api_function(mDecHandle, &s_h264d_decode_ip, &s_h264d_decode_op);
             WORD32 decodeTime;
             GETTIME(&mTimeEnd, nullptr);
             TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
             ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay,
-                  s_decode_op.u4_num_bytes_consumed);
+                  ps_decode_op->u4_num_bytes_consumed);
         }
-        if (IVD_MEM_ALLOC_FAILED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+        if (IVD_MEM_ALLOC_FAILED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGE("allocation failure in decoder");
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
-        } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+        } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED ==
+                (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGE("unsupported resolution : %dx%d", mWidth, mHeight);
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
-        } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+        } else if (IVD_RES_CHANGED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGV("resolution changed");
             drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
             resetDecoder();
@@ -890,16 +898,16 @@
 
             /* Decode header and get new dimensions */
             setParams(mStride, IVD_DECODE_HEADER);
-            (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        } else if (IS_IVD_FATAL_ERROR(s_decode_op.u4_error_code)) {
-            ALOGE("Fatal error in decoder 0x%x", s_decode_op.u4_error_code);
+            (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
+        } else if (IS_IVD_FATAL_ERROR(ps_decode_op->u4_error_code)) {
+            ALOGE("Fatal error in decoder 0x%x", ps_decode_op->u4_error_code);
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
         }
-        if (s_decode_op.i4_reorder_depth >= 0 && mOutputDelay != s_decode_op.i4_reorder_depth) {
-            mOutputDelay = s_decode_op.i4_reorder_depth;
+        if (ps_decode_op->i4_reorder_depth >= 0 && mOutputDelay != ps_decode_op->i4_reorder_depth) {
+            mOutputDelay = ps_decode_op->i4_reorder_depth;
             ALOGV("New Output delay %d ", mOutputDelay);
 
             C2PortActualDelayTuning::output outputDelay(mOutputDelay);
@@ -917,16 +925,16 @@
                 return;
             }
         }
-        if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
+        if (0 < ps_decode_op->u4_pic_wd && 0 < ps_decode_op->u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
-                mStride = ALIGN32(s_decode_op.u4_pic_wd);
+                mStride = ALIGN32(ps_decode_op->u4_pic_wd);
                 setParams(mStride, IVD_DECODE_FRAME);
             }
-            if (s_decode_op.u4_pic_wd != mWidth || s_decode_op.u4_pic_ht != mHeight) {
-                mWidth = s_decode_op.u4_pic_wd;
-                mHeight = s_decode_op.u4_pic_ht;
-                CHECK_EQ(0u, s_decode_op.u4_output_present);
+            if (ps_decode_op->u4_pic_wd != mWidth || ps_decode_op->u4_pic_ht != mHeight) {
+                mWidth = ps_decode_op->u4_pic_wd;
+                mHeight = ps_decode_op->u4_pic_ht;
+                CHECK_EQ(0u, ps_decode_op->u4_output_present);
 
                 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
                 std::vector<std::unique_ptr<C2SettingResult>> failures;
@@ -945,11 +953,11 @@
             }
         }
         (void)getVuiParams();
-        hasPicture |= (1 == s_decode_op.u4_frame_decoded_flag);
-        if (s_decode_op.u4_output_present) {
-            finishWork(s_decode_op.u4_ts, work);
+        hasPicture |= (1 == ps_decode_op->u4_frame_decoded_flag);
+        if (ps_decode_op->u4_output_present) {
+            finishWork(ps_decode_op->u4_ts, work);
         }
-        inPos += s_decode_op.u4_num_bytes_consumed;
+        inPos += ps_decode_op->u4_num_bytes_consumed;
     }
     if (eos) {
         drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
@@ -987,16 +995,18 @@
             ALOGE("graphic view map failed %d", wView.error());
             return C2_CORRUPTED;
         }
-        ivd_video_decode_ip_t s_decode_ip;
-        ivd_video_decode_op_t s_decode_op;
-        if (!setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, &wView, 0, 0, 0)) {
+        ih264d_video_decode_ip_t s_h264d_decode_ip = {};
+        ih264d_video_decode_op_t s_h264d_decode_op = {};
+        ivd_video_decode_ip_t *ps_decode_ip = &s_h264d_decode_ip.s_ivd_video_decode_ip_t;
+        ivd_video_decode_op_t *ps_decode_op = &s_h264d_decode_op.s_ivd_video_decode_op_t;
+        if (!setDecodeArgs(ps_decode_ip, ps_decode_op, nullptr, &wView, 0, 0, 0)) {
             mSignalledError = true;
             work->workletsProcessed = 1u;
             return C2_CORRUPTED;
         }
-        (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        if (s_decode_op.u4_output_present) {
-            finishWork(s_decode_op.u4_ts, work);
+        (void) ivdec_api_function(mDecHandle, &s_h264d_decode_ip, &s_h264d_decode_op);
+        if (ps_decode_op->u4_output_present) {
+            finishWork(ps_decode_op->u4_ts, work);
         } else {
             fillEmptyWork(work);
             break;
@@ -1049,11 +1059,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftAvcDecFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/avc/C2SoftAvcDec.h b/media/codec2/components/avc/C2SoftAvcDec.h
index bd84de0..5c07d29 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.h
+++ b/media/codec2/components/avc/C2SoftAvcDec.h
@@ -25,8 +25,7 @@
 #include <SimpleC2Component.h>
 
 #include "ih264_typedefs.h"
-#include "iv.h"
-#include "ivd.h"
+#include "ih264d.h"
 
 namespace android {
 
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index ab93ce3..d65ffa5 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -19,6 +19,8 @@
 #include <log/log.h>
 #include <utils/misc.h>
 
+#include <algorithm>
+
 #include <media/hardware/VideoAPI.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
@@ -26,6 +28,7 @@
 #include <media/stagefright/foundation/AUtils.h>
 
 #include <C2Debug.h>
+#include <Codec2Mapper.h>
 #include <C2PlatformSupport.h>
 #include <Codec2BufferUtils.h>
 #include <SimpleC2Interface.h>
@@ -121,6 +124,19 @@
                 .build());
 
         addParameter(
+                DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
+                .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
+                        0 /* flexCount */, 0u /* stream */))
+                .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
+                                {C2Config::picture_type_t(I_FRAME),
+                                  C2Config::picture_type_t(P_FRAME),
+                                  C2Config::picture_type_t(B_FRAME)}),
+                             C2F(mPictureQuantization, m.values[0].min).any(),
+                             C2F(mPictureQuantization, m.values[0].max).any()})
+                .withSetter(PictureQuantizationSetter)
+                .build());
+
+        addParameter(
                 DefineParam(mActualInputDelay, C2_PARAMKEY_INPUT_DELAY)
                 .withDefault(new C2PortActualDelayTuning::input(DEFAULT_B_FRAMES))
                 .withFields({C2F(mActualInputDelay, value).inRange(0, MAX_B_FRAMES)})
@@ -198,6 +214,42 @@
                 .withFields({C2F(mSyncFramePeriod, value).any()})
                 .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
                 .build());
+
+        addParameter(
+                DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+                .withDefault(new C2StreamColorAspectsInfo::input(
+                        0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                        C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                .withFields({
+                    C2F(mColorAspects, range).inRange(
+                                C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
+                    C2F(mColorAspects, primaries).inRange(
+                                C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+                    C2F(mColorAspects, transfer).inRange(
+                                C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
+                    C2F(mColorAspects, matrix).inRange(
+                                C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
+                })
+                .withSetter(ColorAspectsSetter)
+                .build());
+
+        addParameter(
+                DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+                .withDefault(new C2StreamColorAspectsInfo::output(
+                        0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                        C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                .withFields({
+                    C2F(mCodedColorAspects, range).inRange(
+                                C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
+                    C2F(mCodedColorAspects, primaries).inRange(
+                                C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+                    C2F(mCodedColorAspects, transfer).inRange(
+                                C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
+                    C2F(mCodedColorAspects, matrix).inRange(
+                                C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
+                })
+                .withSetter(CodedColorAspectsSetter, mColorAspects)
+                .build());
     }
 
     static C2R InputDelaySetter(
@@ -220,6 +272,7 @@
         return res;
     }
 
+
     static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
                           C2P<C2StreamPictureSizeInfo::input> &me) {
         (void)mayBlock;
@@ -336,6 +389,95 @@
         return C2R::Ok();
     }
 
+    static C2R PictureQuantizationSetter(bool mayBlock,
+                                         C2P<C2StreamPictureQuantizationTuning::output> &me) {
+        (void)mayBlock;
+        (void)me;
+
+        // TODO: refactor with same algorithm in the SetQp()
+        int32_t iMin = DEFAULT_I_QP_MIN, pMin = DEFAULT_P_QP_MIN, bMin = DEFAULT_B_QP_MIN;
+        int32_t iMax = DEFAULT_I_QP_MAX, pMax = DEFAULT_P_QP_MAX, bMax = DEFAULT_B_QP_MAX;
+
+        for (size_t i = 0; i < me.v.flexCount(); ++i) {
+            const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+            if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+                iMax = layer.max;
+                iMin = layer.min;
+                ALOGV("iMin %d iMax %d", iMin, iMax);
+            } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+                pMax = layer.max;
+                pMin = layer.min;
+                ALOGV("pMin %d pMax %d", pMin, pMax);
+            } else if (layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+                bMax = layer.max;
+                bMin = layer.min;
+                ALOGV("bMin %d bMax %d", bMin, bMax);
+            }
+        }
+
+        ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d b %d-%d",
+              iMin, iMax, pMin, pMax, bMin, bMax);
+
+        // ensure we have legal values
+        iMax = std::clamp(iMax, CODEC_QP_MIN, CODEC_QP_MAX);
+        iMin = std::clamp(iMin, CODEC_QP_MIN, CODEC_QP_MAX);
+        pMax = std::clamp(pMax, CODEC_QP_MIN, CODEC_QP_MAX);
+        pMin = std::clamp(pMin, CODEC_QP_MIN, CODEC_QP_MAX);
+        bMax = std::clamp(bMax, CODEC_QP_MIN, CODEC_QP_MAX);
+        bMin = std::clamp(bMin, CODEC_QP_MIN, CODEC_QP_MAX);
+
+        // put them back into the structure
+        for (size_t i = 0; i < me.v.flexCount(); ++i) {
+            const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+            if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+                me.set().m.values[i].max = iMax;
+                me.set().m.values[i].min = iMin;
+            }
+            if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+                me.set().m.values[i].max = pMax;
+                me.set().m.values[i].min = pMin;
+            }
+            if (layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+                me.set().m.values[i].max = bMax;
+                me.set().m.values[i].min = bMin;
+            }
+        }
+
+        ALOGV("PictureQuantizationSetter(exit): i %d-%d p %d-%d b %d-%d",
+              iMin, iMax, pMin, pMax, bMin, bMax);
+
+        return C2R::Ok();
+    }
+
+    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
+        (void)mayBlock;
+        if (me.v.range > C2Color::RANGE_OTHER) {
+                me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+                me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+                me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+                me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
+                                       const C2P<C2StreamColorAspectsInfo::input> &coded) {
+        (void)mayBlock;
+        me.set().range = coded.v.range;
+        me.set().primaries = coded.v.primaries;
+        me.set().transfer = coded.v.transfer;
+        me.set().matrix = coded.v.matrix;
+        return C2R::Ok();
+    }
+
     IV_PROFILE_T getProfile_l() const {
         switch (mProfileLevel->profile) {
         case PROFILE_AVC_CONSTRAINED_BASELINE:  [[fallthrough]];
@@ -393,6 +535,11 @@
     std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
     std::shared_ptr<C2StreamGopTuning::output> getGop_l() const { return mGop; }
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const
+    { return mPictureQuantization; }
+    std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() const {
+        return mCodedColorAspects;
+    }
 
 private:
     std::shared_ptr<C2StreamUsageTuning::input> mUsage;
@@ -404,6 +551,9 @@
     std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
     std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
     std::shared_ptr<C2StreamGopTuning::output> mGop;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
+    std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
 };
 
 #define ive_api_function  ih264e_api_function
@@ -664,20 +814,52 @@
     ive_ctl_set_qp_op_t s_qp_op;
     IV_STATUS_T status;
 
+    ALOGV("in setQp()");
+
+    // set the defaults
     s_qp_ip.e_cmd = IVE_CMD_VIDEO_CTL;
     s_qp_ip.e_sub_cmd = IVE_CMD_CTL_SET_QP;
 
-    s_qp_ip.u4_i_qp = DEFAULT_I_QP;
-    s_qp_ip.u4_i_qp_max = DEFAULT_QP_MAX;
-    s_qp_ip.u4_i_qp_min = DEFAULT_QP_MIN;
+    // TODO: refactor with same algorithm in the PictureQuantizationSetter()
+    int32_t iMin = DEFAULT_I_QP_MIN, pMin = DEFAULT_P_QP_MIN, bMin = DEFAULT_B_QP_MIN;
+    int32_t iMax = DEFAULT_I_QP_MAX, pMax = DEFAULT_P_QP_MAX, bMax = DEFAULT_B_QP_MAX;
 
-    s_qp_ip.u4_p_qp = DEFAULT_P_QP;
-    s_qp_ip.u4_p_qp_max = DEFAULT_QP_MAX;
-    s_qp_ip.u4_p_qp_min = DEFAULT_QP_MIN;
+    IntfImpl::Lock lock = mIntf->lock();
 
-    s_qp_ip.u4_b_qp = DEFAULT_P_QP;
-    s_qp_ip.u4_b_qp_max = DEFAULT_QP_MAX;
-    s_qp_ip.u4_b_qp_min = DEFAULT_QP_MIN;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> qp =
+                    mIntf->getPictureQuantization_l();
+    for (size_t i = 0; i < qp->flexCount(); ++i) {
+        const C2PictureQuantizationStruct &layer = qp->m.values[i];
+
+        if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+            iMax = layer.max;
+            iMin = layer.min;
+            ALOGV("iMin %d iMax %d", iMin, iMax);
+        } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+            pMax = layer.max;
+            pMin = layer.min;
+            ALOGV("pMin %d pMax %d", pMin, pMax);
+        } else if (layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+            bMax = layer.max;
+            bMin = layer.min;
+            ALOGV("bMin %d bMax %d", bMin, bMax);
+        }
+    }
+
+    s_qp_ip.u4_i_qp_max = iMax;
+    s_qp_ip.u4_i_qp_min = iMin;
+    s_qp_ip.u4_p_qp_max = pMax;
+    s_qp_ip.u4_p_qp_min = pMin;
+    s_qp_ip.u4_b_qp_max = bMax;
+    s_qp_ip.u4_b_qp_min = bMin;
+
+    // ensure initial qp values are within our newly configured bounds...
+    s_qp_ip.u4_i_qp = std::clamp(DEFAULT_I_QP, iMin, iMax);
+    s_qp_ip.u4_p_qp = std::clamp(DEFAULT_P_QP, pMin, pMax);
+    s_qp_ip.u4_b_qp = std::clamp(DEFAULT_B_QP, bMin, bMax);
+
+    ALOGV("setQp(): i %d-%d p %d-%d b %d-%d", iMin, iMax, pMin, pMax, bMin, bMax);
+
 
     s_qp_ip.u4_timestamp_high = -1;
     s_qp_ip.u4_timestamp_low = -1;
@@ -907,6 +1089,55 @@
     return;
 }
 
+c2_status_t C2SoftAvcEnc::setVuiParams()
+{
+    ColorAspects sfAspects;
+    if (!C2Mapper::map(mColorAspects->primaries, &sfAspects.mPrimaries)) {
+        sfAspects.mPrimaries = android::ColorAspects::PrimariesUnspecified;
+    }
+    if (!C2Mapper::map(mColorAspects->range, &sfAspects.mRange)) {
+        sfAspects.mRange = android::ColorAspects::RangeUnspecified;
+    }
+    if (!C2Mapper::map(mColorAspects->matrix, &sfAspects.mMatrixCoeffs)) {
+        sfAspects.mMatrixCoeffs = android::ColorAspects::MatrixUnspecified;
+    }
+    if (!C2Mapper::map(mColorAspects->transfer, &sfAspects.mTransfer)) {
+        sfAspects.mTransfer = android::ColorAspects::TransferUnspecified;
+    }
+    int32_t primaries, transfer, matrixCoeffs;
+    bool range;
+    ColorUtils::convertCodecColorAspectsToIsoAspects(sfAspects,
+            &primaries,
+            &transfer,
+            &matrixCoeffs,
+            &range);
+    ih264e_vui_ip_t s_vui_params_ip {};
+    ih264e_vui_op_t s_vui_params_op {};
+
+    s_vui_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+    s_vui_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_VUI_PARAMS;
+
+    s_vui_params_ip.u1_video_signal_type_present_flag = 1;
+    s_vui_params_ip.u1_colour_description_present_flag = 1;
+    s_vui_params_ip.u1_colour_primaries = primaries;
+    s_vui_params_ip.u1_transfer_characteristics = transfer;
+    s_vui_params_ip.u1_matrix_coefficients = matrixCoeffs;
+    s_vui_params_ip.u1_video_full_range_flag = range;
+
+    s_vui_params_ip.u4_size = sizeof(ih264e_vui_ip_t);
+    s_vui_params_op.u4_size = sizeof(ih264e_vui_op_t);
+
+    IV_STATUS_T status = ih264e_api_function(mCodecCtx, &s_vui_params_ip,
+                                             &s_vui_params_op);
+    if(status != IV_SUCCESS)
+    {
+        ALOGE("Unable to set vui params = 0x%x\n",
+                s_vui_params_op.u4_error_code);
+        return C2_CORRUPTED;
+    }
+    return C2_OK;
+}
+
 c2_status_t C2SoftAvcEnc::initEncoder() {
     IV_STATUS_T status;
     WORD32 level;
@@ -926,6 +1157,7 @@
         mIInterval = mIntf->getSyncFramePeriod_l();
         mIDRInterval = mIntf->getSyncFramePeriod_l();
         gop = mIntf->getGop_l();
+        mColorAspects = mIntf->getCodedColorAspects_l();
     }
     if (gop && gop->flexCount() > 0) {
         uint32_t syncInterval = 1;
@@ -1009,29 +1241,31 @@
 
     /* Getting MemRecords Attributes */
     {
-        iv_fill_mem_rec_ip_t s_fill_mem_rec_ip;
-        iv_fill_mem_rec_op_t s_fill_mem_rec_op;
+        ih264e_fill_mem_rec_ip_t s_ih264e_mem_rec_ip = {};
+        ih264e_fill_mem_rec_op_t s_ih264e_mem_rec_op = {};
+        iv_fill_mem_rec_ip_t *ps_fill_mem_rec_ip = &s_ih264e_mem_rec_ip.s_ive_ip;
+        iv_fill_mem_rec_op_t *ps_fill_mem_rec_op = &s_ih264e_mem_rec_op.s_ive_op;
 
-        s_fill_mem_rec_ip.u4_size = sizeof(iv_fill_mem_rec_ip_t);
-        s_fill_mem_rec_op.u4_size = sizeof(iv_fill_mem_rec_op_t);
+        ps_fill_mem_rec_ip->u4_size = sizeof(ih264e_fill_mem_rec_ip_t);
+        ps_fill_mem_rec_op->u4_size = sizeof(ih264e_fill_mem_rec_op_t);
 
-        s_fill_mem_rec_ip.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
-        s_fill_mem_rec_ip.ps_mem_rec = mMemRecords;
-        s_fill_mem_rec_ip.u4_num_mem_rec = mNumMemRecords;
-        s_fill_mem_rec_ip.u4_max_wd = width;
-        s_fill_mem_rec_ip.u4_max_ht = height;
-        s_fill_mem_rec_ip.u4_max_level = mAVCEncLevel;
-        s_fill_mem_rec_ip.e_color_format = DEFAULT_INP_COLOR_FORMAT;
-        s_fill_mem_rec_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
-        s_fill_mem_rec_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
-        s_fill_mem_rec_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
-        s_fill_mem_rec_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
+        ps_fill_mem_rec_ip->e_cmd = IV_CMD_FILL_NUM_MEM_REC;
+        ps_fill_mem_rec_ip->ps_mem_rec = mMemRecords;
+        ps_fill_mem_rec_ip->u4_num_mem_rec = mNumMemRecords;
+        ps_fill_mem_rec_ip->u4_max_wd = width;
+        ps_fill_mem_rec_ip->u4_max_ht = height;
+        ps_fill_mem_rec_ip->u4_max_level = mAVCEncLevel;
+        ps_fill_mem_rec_ip->e_color_format = DEFAULT_INP_COLOR_FORMAT;
+        ps_fill_mem_rec_ip->u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
+        ps_fill_mem_rec_ip->u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
+        ps_fill_mem_rec_ip->u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
+        ps_fill_mem_rec_ip->u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
 
-        status = ive_api_function(nullptr, &s_fill_mem_rec_ip, &s_fill_mem_rec_op);
+        status = ive_api_function(nullptr, &s_ih264e_mem_rec_ip, &s_ih264e_mem_rec_op);
 
         if (status != IV_SUCCESS) {
             ALOGE("Fill memory records failed = 0x%x\n",
-                    s_fill_mem_rec_op.u4_error_code);
+                    ps_fill_mem_rec_op->u4_error_code);
             return C2_CORRUPTED;
         }
     }
@@ -1060,48 +1294,51 @@
 
     /* Codec Instance Creation */
     {
-        ive_init_ip_t s_init_ip;
-        ive_init_op_t s_init_op;
+        ih264e_init_ip_t s_enc_ip = {};
+        ih264e_init_op_t s_enc_op = {};
+
+        ive_init_ip_t *ps_init_ip = &s_enc_ip.s_ive_ip;
+        ive_init_op_t *ps_init_op = &s_enc_op.s_ive_op;
 
         mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base;
         mCodecCtx->u4_size = sizeof(iv_obj_t);
         mCodecCtx->pv_fxns = (void *)ive_api_function;
 
-        s_init_ip.u4_size = sizeof(ive_init_ip_t);
-        s_init_op.u4_size = sizeof(ive_init_op_t);
+        ps_init_ip->u4_size = sizeof(ih264e_init_ip_t);
+        ps_init_op->u4_size = sizeof(ih264e_init_op_t);
 
-        s_init_ip.e_cmd = IV_CMD_INIT;
-        s_init_ip.u4_num_mem_rec = mNumMemRecords;
-        s_init_ip.ps_mem_rec = mMemRecords;
-        s_init_ip.u4_max_wd = width;
-        s_init_ip.u4_max_ht = height;
-        s_init_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
-        s_init_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
-        s_init_ip.u4_max_level = mAVCEncLevel;
-        s_init_ip.e_inp_color_fmt = mIvVideoColorFormat;
+        ps_init_ip->e_cmd = IV_CMD_INIT;
+        ps_init_ip->u4_num_mem_rec = mNumMemRecords;
+        ps_init_ip->ps_mem_rec = mMemRecords;
+        ps_init_ip->u4_max_wd = width;
+        ps_init_ip->u4_max_ht = height;
+        ps_init_ip->u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
+        ps_init_ip->u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
+        ps_init_ip->u4_max_level = mAVCEncLevel;
+        ps_init_ip->e_inp_color_fmt = mIvVideoColorFormat;
 
         if (mReconEnable || mPSNREnable) {
-            s_init_ip.u4_enable_recon = 1;
+            ps_init_ip->u4_enable_recon = 1;
         } else {
-            s_init_ip.u4_enable_recon = 0;
+            ps_init_ip->u4_enable_recon = 0;
         }
-        s_init_ip.e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT;
-        s_init_ip.e_rc_mode = DEFAULT_RC_MODE;
-        s_init_ip.u4_max_framerate = DEFAULT_MAX_FRAMERATE;
-        s_init_ip.u4_max_bitrate = DEFAULT_MAX_BITRATE;
-        s_init_ip.u4_num_bframes = mBframes;
-        s_init_ip.e_content_type = IV_PROGRESSIVE;
-        s_init_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
-        s_init_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
-        s_init_ip.e_slice_mode = mSliceMode;
-        s_init_ip.u4_slice_param = mSliceParam;
-        s_init_ip.e_arch = mArch;
-        s_init_ip.e_soc = DEFAULT_SOC;
+        ps_init_ip->e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT;
+        ps_init_ip->e_rc_mode = DEFAULT_RC_MODE;
+        ps_init_ip->u4_max_framerate = DEFAULT_MAX_FRAMERATE;
+        ps_init_ip->u4_max_bitrate = DEFAULT_MAX_BITRATE;
+        ps_init_ip->u4_num_bframes = mBframes;
+        ps_init_ip->e_content_type = IV_PROGRESSIVE;
+        ps_init_ip->u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
+        ps_init_ip->u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
+        ps_init_ip->e_slice_mode = mSliceMode;
+        ps_init_ip->u4_slice_param = mSliceParam;
+        ps_init_ip->e_arch = mArch;
+        ps_init_ip->e_soc = DEFAULT_SOC;
 
-        status = ive_api_function(mCodecCtx, &s_init_ip, &s_init_op);
+        status = ive_api_function(mCodecCtx, &s_enc_ip, &s_enc_op);
 
         if (status != IV_SUCCESS) {
-            ALOGE("Init encoder failed = 0x%x\n", s_init_op.u4_error_code);
+            ALOGE("Init encoder failed = 0x%x\n", ps_init_op->u4_error_code);
             return C2_CORRUPTED;
         }
     }
@@ -1145,6 +1382,9 @@
     /* Video control Set Profile params */
     setProfileParams();
 
+    /* Video control Set VUI params */
+    setVuiParams();
+
     /* Video control Set in Encode header mode */
     setEncMode(IVE_ENC_MODE_HEADER);
 
@@ -1328,13 +1568,13 @@
             ps_inp_raw_buf->apv_bufs[1] = uPlane;
             ps_inp_raw_buf->apv_bufs[2] = vPlane;
 
-            ps_inp_raw_buf->au4_wd[0] = input->width();
-            ps_inp_raw_buf->au4_wd[1] = input->width() / 2;
-            ps_inp_raw_buf->au4_wd[2] = input->width() / 2;
+            ps_inp_raw_buf->au4_wd[0] = mSize->width;
+            ps_inp_raw_buf->au4_wd[1] = mSize->width / 2;
+            ps_inp_raw_buf->au4_wd[2] = mSize->width / 2;
 
-            ps_inp_raw_buf->au4_ht[0] = input->height();
-            ps_inp_raw_buf->au4_ht[1] = input->height() / 2;
-            ps_inp_raw_buf->au4_ht[2] = input->height() / 2;
+            ps_inp_raw_buf->au4_ht[0] = mSize->height;
+            ps_inp_raw_buf->au4_ht[1] = mSize->height / 2;
+            ps_inp_raw_buf->au4_ht[2] = mSize->height / 2;
 
             ps_inp_raw_buf->au4_strd[0] = yStride;
             ps_inp_raw_buf->au4_strd[1] = uStride;
@@ -1359,11 +1599,11 @@
             ps_inp_raw_buf->apv_bufs[0] = yPlane;
             ps_inp_raw_buf->apv_bufs[1] = uPlane;
 
-            ps_inp_raw_buf->au4_wd[0] = input->width();
-            ps_inp_raw_buf->au4_wd[1] = input->width();
+            ps_inp_raw_buf->au4_wd[0] = mSize->width;
+            ps_inp_raw_buf->au4_wd[1] = mSize->width;
 
-            ps_inp_raw_buf->au4_ht[0] = input->height();
-            ps_inp_raw_buf->au4_ht[1] = input->height() / 2;
+            ps_inp_raw_buf->au4_ht[0] = mSize->height;
+            ps_inp_raw_buf->au4_ht[1] = mSize->height / 2;
 
             ps_inp_raw_buf->au4_strd[0] = yStride;
             ps_inp_raw_buf->au4_strd[1] = uStride;
@@ -1429,15 +1669,17 @@
     }
     // while (!mSawOutputEOS && !outQueue.empty()) {
     c2_status_t error;
-    ive_video_encode_ip_t s_encode_ip;
-    ive_video_encode_op_t s_encode_op;
-    memset(&s_encode_op, 0, sizeof(s_encode_op));
+    ih264e_video_encode_ip_t s_video_encode_ip = {};
+    ih264e_video_encode_op_t s_video_encode_op = {};
+    ive_video_encode_ip_t *ps_encode_ip = &s_video_encode_ip.s_ive_ip;
+    ive_video_encode_op_t *ps_encode_op = &s_video_encode_op.s_ive_op;
+    memset(ps_encode_op, 0, sizeof(*ps_encode_op));
 
     if (!mSpsPpsHeaderReceived) {
         constexpr uint32_t kHeaderLength = MIN_STREAM_SIZE;
         uint8_t header[kHeaderLength];
         error = setEncodeArgs(
-                &s_encode_ip, &s_encode_op, nullptr, header, kHeaderLength, workIndex);
+                ps_encode_ip, ps_encode_op, nullptr, header, kHeaderLength, workIndex);
         if (error != C2_OK) {
             ALOGE("setEncodeArgs failed: %d", error);
             mSignalledError = true;
@@ -1445,22 +1687,22 @@
             work->workletsProcessed = 1u;
             return;
         }
-        status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+        status = ive_api_function(mCodecCtx, ps_encode_ip, ps_encode_op);
 
         if (IV_SUCCESS != status) {
             ALOGE("Encode header failed = 0x%x\n",
-                    s_encode_op.u4_error_code);
+                    ps_encode_op->u4_error_code);
             work->workletsProcessed = 1u;
             return;
         } else {
             ALOGV("Bytes Generated in header %d\n",
-                    s_encode_op.s_out_buf.u4_bytes);
+                    ps_encode_op->s_out_buf.u4_bytes);
         }
 
         mSpsPpsHeaderReceived = true;
 
         std::unique_ptr<C2StreamInitDataInfo::output> csd =
-            C2StreamInitDataInfo::output::AllocUnique(s_encode_op.s_out_buf.u4_bytes, 0u);
+            C2StreamInitDataInfo::output::AllocUnique(ps_encode_op->s_out_buf.u4_bytes, 0u);
         if (!csd) {
             ALOGE("CSD allocation failed");
             mSignalledError = true;
@@ -1468,7 +1710,7 @@
             work->workletsProcessed = 1u;
             return;
         }
-        memcpy(csd->m.value, header, s_encode_op.s_out_buf.u4_bytes);
+        memcpy(csd->m.value, header, ps_encode_op->s_out_buf.u4_bytes);
         work->worklets.front()->output.configUpdate.push_back(std::move(csd));
 
         DUMP_TO_FILE(
@@ -1562,7 +1804,7 @@
         }
 
         error = setEncodeArgs(
-                &s_encode_ip, &s_encode_op, view.get(), wView.base(), wView.capacity(), workIndex);
+                ps_encode_ip, ps_encode_op, view.get(), wView.base(), wView.capacity(), workIndex);
         if (error != C2_OK) {
             ALOGE("setEncodeArgs failed : %d", error);
             mSignalledError = true;
@@ -1579,17 +1821,17 @@
         /* Compute time elapsed between end of previous decode()
          * to start of current decode() */
         TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
-        status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+        status = ive_api_function(mCodecCtx, &s_video_encode_ip, &s_video_encode_op);
 
         if (IV_SUCCESS != status) {
-            if ((s_encode_op.u4_error_code & 0xFF) == IH264E_BITSTREAM_BUFFER_OVERFLOW) {
+            if ((ps_encode_op->u4_error_code & 0xFF) == IH264E_BITSTREAM_BUFFER_OVERFLOW) {
                 // TODO: use IVE_CMD_CTL_GETBUFINFO for proper max input size?
                 mOutBufferSize *= 2;
                 mOutBlock.reset();
                 continue;
             }
             ALOGE("Encode Frame failed = 0x%x\n",
-                    s_encode_op.u4_error_code);
+                    ps_encode_op->u4_error_code);
             mSignalledError = true;
             work->result = C2_CORRUPTED;
             work->workletsProcessed = 1u;
@@ -1599,7 +1841,7 @@
 
     // Hold input buffer reference
     if (inputBuffer) {
-        mBuffers[s_encode_ip.s_inp_buf.apv_bufs[0]] = inputBuffer;
+        mBuffers[ps_encode_ip->s_inp_buf.apv_bufs[0]] = inputBuffer;
     }
 
     GETTIME(&mTimeEnd, nullptr);
@@ -1607,9 +1849,9 @@
     TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
 
     ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
-            s_encode_op.s_out_buf.u4_bytes);
+            ps_encode_op->s_out_buf.u4_bytes);
 
-    void *freed = s_encode_op.s_inp_buf.apv_bufs[0];
+    void *freed = ps_encode_op->s_inp_buf.apv_bufs[0];
     /* If encoder frees up an input buffer, mark it as free */
     if (freed != nullptr) {
         if (mBuffers.count(freed) == 0u) {
@@ -1621,17 +1863,17 @@
         }
     }
 
-    if (s_encode_op.output_present) {
-        if (!s_encode_op.s_out_buf.u4_bytes) {
+    if (ps_encode_op->output_present) {
+        if (!ps_encode_op->s_out_buf.u4_bytes) {
             ALOGE("Error: Output present but bytes generated is zero");
             mSignalledError = true;
             work->result = C2_CORRUPTED;
             work->workletsProcessed = 1u;
             return;
         }
-        uint64_t workId = ((uint64_t)s_encode_op.u4_timestamp_high << 32) |
-                      s_encode_op.u4_timestamp_low;
-        finishWork(workId, work, &s_encode_op);
+        uint64_t workId = ((uint64_t)ps_encode_op->u4_timestamp_high << 32) |
+                      ps_encode_op->u4_timestamp_low;
+        finishWork(workId, work, ps_encode_op);
     }
     if (mSawInputEOS) {
         drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
@@ -1671,9 +1913,11 @@
             ALOGE("graphic view map failed %d", wView.error());
             return C2_CORRUPTED;
         }
-        ive_video_encode_ip_t s_encode_ip;
-        ive_video_encode_op_t s_encode_op;
-        if (C2_OK != setEncodeArgs(&s_encode_ip, &s_encode_op, nullptr,
+        ih264e_video_encode_ip_t s_video_encode_ip = {};
+        ih264e_video_encode_op_t s_video_encode_op = {};
+        ive_video_encode_ip_t *ps_encode_ip = &s_video_encode_ip.s_ive_ip;
+        ive_video_encode_op_t *ps_encode_op = &s_video_encode_op.s_ive_op;
+        if (C2_OK != setEncodeArgs(ps_encode_ip, ps_encode_op, nullptr,
                                    wView.base(), wView.capacity(), 0)) {
             ALOGE("setEncodeArgs failed for drainInternal");
             mSignalledError = true;
@@ -1681,9 +1925,9 @@
             work->workletsProcessed = 1u;
             return C2_CORRUPTED;
         }
-        (void)ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+        (void)ive_api_function(mCodecCtx, &s_video_encode_ip, &s_video_encode_op);
 
-        void *freed = s_encode_op.s_inp_buf.apv_bufs[0];
+        void *freed = ps_encode_op->s_inp_buf.apv_bufs[0];
         /* If encoder frees up an input buffer, mark it as free */
         if (freed != nullptr) {
             if (mBuffers.count(freed) == 0u) {
@@ -1695,10 +1939,10 @@
             }
         }
 
-        if (s_encode_op.output_present) {
-            uint64_t workId = ((uint64_t)s_encode_op.u4_timestamp_high << 32) |
-                          s_encode_op.u4_timestamp_low;
-            finishWork(workId, work, &s_encode_op);
+        if (ps_encode_op->output_present) {
+            uint64_t workId = ((uint64_t)ps_encode_op->u4_timestamp_high << 32) |
+                          ps_encode_op->u4_timestamp_low;
+            finishWork(workId, work, ps_encode_op);
         } else {
             if (work->workletsProcessed != 1u) {
                 work->worklets.front()->output.flags = work->input.flags;
@@ -1756,11 +2000,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftAvcEncFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
index 555055b..1fecd9e 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.h
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -24,8 +24,7 @@
 #include <SimpleC2Component.h>
 
 #include "ih264_typedefs.h"
-#include "iv2.h"
-#include "ive2.h"
+#include "ih264e.h"
 
 namespace android {
 
@@ -100,6 +99,13 @@
 #define STRLENGTH                   500
 #define DEFAULT_CONSTRAINED_INTRA   0
 
+/** limits as specified by h264
+ *  (QP_MIN==4 is actually a limitation of this SW codec, not the H.264 standard)
+ **/
+#define CODEC_QP_MIN                4
+#define CODEC_QP_MAX                51
+
+
 #define MIN(a, b) ((a) < (b))? (a) : (b)
 #define MAX(a, b) ((a) > (b))? (a) : (b)
 #define ALIGN16(x) ((((x) + 15) >> 4) << 4)
@@ -192,6 +198,7 @@
     std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
     std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
 
     uint32_t mOutBufferSize;
     UWORD32 mHeaderGenerated;
@@ -225,6 +232,7 @@
     c2_status_t setProfileParams();
     c2_status_t setDeblockParams();
     c2_status_t setVbvParams();
+    c2_status_t setVuiParams();
     void logVersion();
     c2_status_t setEncodeArgs(
             ive_video_encode_ip_t *ps_encode_ip,
diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp
index f10835f..160e250 100644
--- a/media/codec2/components/base/Android.bp
+++ b/media/codec2/components/base/Android.bp
@@ -1,6 +1,15 @@
 // DO NOT DEPEND ON THIS DIRECTLY
 // use libcodec2_soft-defaults instead
-cc_library_shared {
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
     name: "libcodec2_soft_common",
     defaults: ["libcodec2-impl-defaults"],
     vendor_available: true,
@@ -20,7 +29,7 @@
 
     shared_libs: [
         "libcutils", // for properties
-        "liblog",    // for ALOG
+        "liblog", // for ALOG
         "libsfplugin_ccodec_utils", // for ImageCopy
         "libstagefright_foundation", // for Mutexed
     ],
@@ -38,7 +47,7 @@
 
 filegroup {
     name: "codec2_soft_exports",
-    srcs: [ "exports.lds" ],
+    srcs: ["exports.lds"],
 }
 
 // public dependency for software codec implementation
@@ -91,12 +100,22 @@
         misc_undefined: [
             "signed-integer-overflow",
         ],
+    },
+}
+
+cc_defaults {
+    name: "libcodec2_soft_sanitize_cfi-defaults",
+
+    sanitize: {
         cfi: true,
+        config: {
+            cfi_assembly_support: true,
+        },
     },
 }
 
 // TEMP: used by cheets2 project - remove when no longer used
-cc_library_shared {
+cc_library {
     name: "libcodec2_simple_component",
     vendor_available: true,
 
@@ -131,4 +150,3 @@
 
     ldflags: ["-Wl,-Bsymbolic"],
 }
-
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index fb3fbd0..6c4b7d9 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -110,17 +110,20 @@
         }
         case kWhatStop: {
             int32_t err = thiz->onStop();
+            thiz->mOutputBlockPool.reset();
             Reply(msg, &err);
             break;
         }
         case kWhatReset: {
             thiz->onReset();
+            thiz->mOutputBlockPool.reset();
             mRunning = false;
             Reply(msg);
             break;
         }
         case kWhatRelease: {
             thiz->onRelease();
+            thiz->mOutputBlockPool.reset();
             mRunning = false;
             Reply(msg);
             break;
@@ -589,21 +592,11 @@
 }
 
 std::shared_ptr<C2Buffer> SimpleC2Component::createLinearBuffer(
-        const std::shared_ptr<C2LinearBlock> &block) {
-    return createLinearBuffer(block, block->offset(), block->size());
-}
-
-std::shared_ptr<C2Buffer> SimpleC2Component::createLinearBuffer(
         const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size) {
     return C2Buffer::CreateLinearBuffer(block->share(offset, size, ::C2Fence()));
 }
 
 std::shared_ptr<C2Buffer> SimpleC2Component::createGraphicBuffer(
-        const std::shared_ptr<C2GraphicBlock> &block) {
-    return createGraphicBuffer(block, C2Rect(block->width(), block->height()));
-}
-
-std::shared_ptr<C2Buffer> SimpleC2Component::createGraphicBuffer(
         const std::shared_ptr<C2GraphicBlock> &block, const C2Rect &crop) {
     return C2Buffer::CreateGraphicBuffer(block->share(crop, ::C2Fence()));
 }
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index 22d5714..e5e16d8 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -140,15 +140,9 @@
 
 
     std::shared_ptr<C2Buffer> createLinearBuffer(
-            const std::shared_ptr<C2LinearBlock> &block);
-
-    std::shared_ptr<C2Buffer> createLinearBuffer(
             const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size);
 
     std::shared_ptr<C2Buffer> createGraphicBuffer(
-            const std::shared_ptr<C2GraphicBlock> &block);
-
-    std::shared_ptr<C2Buffer> createGraphicBuffer(
             const std::shared_ptr<C2GraphicBlock> &block,
             const C2Rect &crop);
 
diff --git a/media/codec2/components/cmds/Android.bp b/media/codec2/components/cmds/Android.bp
index a081e28..d6ffd12 100644
--- a/media/codec2/components/cmds/Android.bp
+++ b/media/codec2/components/cmds/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_binary {
     name: "codec2play",
     defaults: ["libcodec2-impl-defaults"],
diff --git a/media/codec2/components/cmds/codec2.cpp b/media/codec2/components/cmds/codec2.cpp
index d6025de..a17b04e 100644
--- a/media/codec2/components/cmds/codec2.cpp
+++ b/media/codec2/components/cmds/codec2.cpp
@@ -138,7 +138,7 @@
 
 SimplePlayer::SimplePlayer()
     : mListener(new Listener(this)),
-      mProducerListener(new DummyProducerListener),
+      mProducerListener(new StubProducerListener),
       mLinearPoolId(C2BlockPool::PLATFORM_START),
       mComposerClient(new SurfaceComposerClient) {
     CHECK_EQ(mComposerClient->initCheck(), (status_t)OK);
diff --git a/media/codec2/components/flac/Android.bp b/media/codec2/components/flac/Android.bp
index 48cc51b..38dfce4 100644
--- a/media/codec2/components/flac/Android.bp
+++ b/media/codec2/components/flac/Android.bp
@@ -1,4 +1,23 @@
-cc_library_shared {
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_codec2_components_flac_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codec2_components_flac_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library {
     name: "libcodec2_soft_flacdec",
     defaults: [
         "libcodec2_soft-defaults",
@@ -14,7 +33,7 @@
     ],
 }
 
-cc_library_shared {
+cc_library {
     name: "libcodec2_soft_flacenc",
     defaults: [
         "libcodec2_soft-defaults",
@@ -23,11 +42,8 @@
 
     srcs: ["C2SoftFlacEnc.cpp"],
 
-    shared_libs: [
-        "libaudioutils",
-    ],
-
     static_libs: [
         "libFLAC",
+        "libaudioutils",
     ],
 }
diff --git a/media/codec2/components/flac/C2SoftFlacDec.cpp b/media/codec2/components/flac/C2SoftFlacDec.cpp
index 4039b9b..49892a4 100644
--- a/media/codec2/components/flac/C2SoftFlacDec.cpp
+++ b/media/codec2/components/flac/C2SoftFlacDec.cpp
@@ -221,6 +221,11 @@
 
     uint8_t *input = const_cast<uint8_t *>(rView.data() + inOffset);
     if (codecConfig) {
+        if (mHasStreamInfo) {
+            ALOGV("Ignore Codec Config");
+            fillEmptyWork(work);
+            return;
+        }
         status_t decoderErr = mFLACDecoder->parseMetadata(input, inSize);
         if (decoderErr != OK && decoderErr != WOULD_BLOCK) {
             ALOGE("process: FLACDecoder parseMetaData returns error %d", decoderErr);
@@ -367,11 +372,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftFlacDecFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 408db7e..6fead3a 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -262,9 +262,10 @@
         work->result = C2_NO_MEMORY;
         return;
     }
-    C2WriteView wView = mOutputBlock->map().get();
-    if (wView.error()) {
-        ALOGE("write view map failed %d", wView.error());
+
+    err = mOutputBlock->map().get().error();
+    if (err) {
+        ALOGE("write view map failed %d", err);
         work->result = C2_CORRUPTED;
         return;
     }
@@ -481,11 +482,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftFlacEncFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/g711/Android.bp b/media/codec2/components/g711/Android.bp
index 3ede68c..45e85a9 100644
--- a/media/codec2/components/g711/Android.bp
+++ b/media/codec2/components/g711/Android.bp
@@ -1,4 +1,23 @@
-cc_library_shared {
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_codec2_components_g711_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codec2_components_g711_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library {
     name: "libcodec2_soft_g711alawdec",
     defaults: [
         "libcodec2_soft-defaults",
@@ -7,12 +26,14 @@
 
     srcs: ["C2SoftG711Dec.cpp"],
 
+    static_libs: ["codecs_g711dec"],
+
     cflags: [
         "-DALAW",
     ],
 }
 
-cc_library_shared {
+cc_library {
     name: "libcodec2_soft_g711mlawdec",
     defaults: [
         "libcodec2_soft-defaults",
@@ -20,4 +41,6 @@
     ],
 
     srcs: ["C2SoftG711Dec.cpp"],
+
+    static_libs: ["codecs_g711dec"],
 }
diff --git a/media/codec2/components/g711/C2SoftG711Dec.cpp b/media/codec2/components/g711/C2SoftG711Dec.cpp
index 4ff0793..f952f22 100644
--- a/media/codec2/components/g711/C2SoftG711Dec.cpp
+++ b/media/codec2/components/g711/C2SoftG711Dec.cpp
@@ -22,7 +22,7 @@
 
 #include <C2PlatformSupport.h>
 #include <SimpleC2Interface.h>
-
+#include <g711Dec.h>
 #include "C2SoftG711Dec.h"
 
 namespace android {
@@ -199,7 +199,7 @@
 
     work->worklets.front()->output.flags = work->input.flags;
     work->worklets.front()->output.buffers.clear();
-    work->worklets.front()->output.buffers.push_back(createLinearBuffer(block));
+    work->worklets.front()->output.buffers.push_back(createLinearBuffer(block, 0, outSize));
     work->worklets.front()->output.ordinal = work->input.ordinal;
 
     if (eos) {
@@ -224,53 +224,6 @@
     return C2_OK;
 }
 
-#ifdef ALAW
-void C2SoftG711Dec::DecodeALaw(
-        int16_t *out, const uint8_t *in, size_t inSize) {
-    while (inSize > 0) {
-        inSize--;
-        int32_t x = *in++;
-
-        int32_t ix = x ^ 0x55;
-        ix &= 0x7f;
-
-        int32_t iexp = ix >> 4;
-        int32_t mant = ix & 0x0f;
-
-        if (iexp > 0) {
-            mant += 16;
-        }
-
-        mant = (mant << 4) + 8;
-
-        if (iexp > 1) {
-            mant = mant << (iexp - 1);
-        }
-
-        *out++ = (x > 127) ? mant : -mant;
-    }
-}
-#else
-void C2SoftG711Dec::DecodeMLaw(
-        int16_t *out, const uint8_t *in, size_t inSize) {
-    while (inSize > 0) {
-        inSize--;
-        int32_t x = *in++;
-
-        int32_t mantissa = ~x;
-        int32_t exponent = (mantissa >> 4) & 7;
-        int32_t segment = exponent + 1;
-        mantissa &= 0x0f;
-
-        int32_t step = 4 << segment;
-
-        int32_t abs = (0x80l << exponent) + step * mantissa + step / 2 - 4 * 33;
-
-        *out++ = (x < 0x80) ? -abs : abs;
-    }
-}
-#endif
-
 class C2SoftG711DecFactory : public C2ComponentFactory {
 public:
     C2SoftG711DecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
@@ -306,11 +259,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftG711DecFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/g711/C2SoftG711Dec.h b/media/codec2/components/g711/C2SoftG711Dec.h
index 23e8ffc..f93840b 100644
--- a/media/codec2/components/g711/C2SoftG711Dec.h
+++ b/media/codec2/components/g711/C2SoftG711Dec.h
@@ -45,12 +45,6 @@
     std::shared_ptr<IntfImpl> mIntf;
     bool mSignalledOutputEos;
 
-#ifdef ALAW
-    void DecodeALaw(int16_t *out, const uint8_t *in, size_t inSize);
-#else
-    void DecodeMLaw(int16_t *out, const uint8_t *in, size_t inSize);
-#endif
-
     C2_DO_NOT_COPY(C2SoftG711Dec);
 };
 
diff --git a/media/codec2/components/gav1/Android.bp b/media/codec2/components/gav1/Android.bp
index f374089..7692d37 100644
--- a/media/codec2/components/gav1/Android.bp
+++ b/media/codec2/components/gav1/Android.bp
@@ -1,4 +1,13 @@
-cc_library_shared {
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
     name: "libcodec2_soft_av1dec_gav1",
     defaults: [
         "libcodec2_soft-defaults",
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 120ba7a..f857e87 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -26,6 +26,11 @@
 #include <media/stagefright/foundation/MediaDefs.h>
 
 namespace android {
+namespace {
+
+constexpr uint8_t NEUTRAL_UV_VALUE = 128;
+
+}  // namespace
 
 // codecname set and passed in as a compile flag from Android.bp
 constexpr char COMPONENT_NAME[] = CODECNAME;
@@ -51,8 +56,8 @@
         DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
             .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
             .withFields({
-                C2F(mSize, width).inRange(2, 2048, 2),
-                C2F(mSize, height).inRange(2, 2048, 2),
+                C2F(mSize, width).inRange(2, 4096, 2),
+                C2F(mSize, height).inRange(2, 4096, 2),
             })
             .withSetter(SizeSetter)
             .build());
@@ -65,12 +70,14 @@
                                               C2Config::PROFILE_AV1_1}),
                                   C2F(mProfileLevel, level)
                                       .oneOf({
-                                          C2Config::LEVEL_AV1_2,
-                                          C2Config::LEVEL_AV1_2_1,
-                                          C2Config::LEVEL_AV1_2_2,
-                                          C2Config::LEVEL_AV1_3,
-                                          C2Config::LEVEL_AV1_3_1,
-                                          C2Config::LEVEL_AV1_3_2,
+                                          C2Config::LEVEL_AV1_2, C2Config::LEVEL_AV1_2_1,
+                                          C2Config::LEVEL_AV1_2_2, C2Config::LEVEL_AV1_2_3,
+                                          C2Config::LEVEL_AV1_3, C2Config::LEVEL_AV1_3_1,
+                                          C2Config::LEVEL_AV1_3_2, C2Config::LEVEL_AV1_3_3,
+                                          C2Config::LEVEL_AV1_4, C2Config::LEVEL_AV1_4_1,
+                                          C2Config::LEVEL_AV1_4_2, C2Config::LEVEL_AV1_4_3,
+                                          C2Config::LEVEL_AV1_5, C2Config::LEVEL_AV1_5_1,
+                                          C2Config::LEVEL_AV1_5_2, C2Config::LEVEL_AV1_5_3,
                                       })})
                      .withSetter(ProfileLevelSetter, mSize)
                      .build());
@@ -458,32 +465,40 @@
   }
 }
 
-static void copyOutputBufferToYuvPlanarFrame(uint8_t *dst, const uint8_t *srcY,
-                                             const uint8_t *srcU,
-                                             const uint8_t *srcV, size_t srcYStride,
-                                             size_t srcUStride, size_t srcVStride,
-                                             size_t dstYStride, size_t dstUVStride,
-                                             uint32_t width, uint32_t height) {
-  uint8_t *const dstStart = dst;
+static void copyOutputBufferToYV12Frame(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
+                                        const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+                                        size_t srcYStride, size_t srcUStride, size_t srcVStride,
+                                        size_t dstYStride, size_t dstUVStride,
+                                        uint32_t width, uint32_t height,
+                                        bool isMonochrome) {
 
   for (size_t i = 0; i < height; ++i) {
-    memcpy(dst, srcY, width);
+    memcpy(dstY, srcY, width);
     srcY += srcYStride;
-    dst += dstYStride;
+    dstY += dstYStride;
   }
 
-  dst = dstStart + dstYStride * height;
+  if (isMonochrome) {
+    // Fill with neutral U/V values.
+    for (size_t i = 0; i < height / 2; ++i) {
+      memset(dstV, NEUTRAL_UV_VALUE, width / 2);
+      memset(dstU, NEUTRAL_UV_VALUE, width / 2);
+      dstV += dstUVStride;
+      dstU += dstUVStride;
+    }
+    return;
+  }
+
   for (size_t i = 0; i < height / 2; ++i) {
-    memcpy(dst, srcV, width / 2);
+    memcpy(dstV, srcV, width / 2);
     srcV += srcVStride;
-    dst += dstUVStride;
+    dstV += dstUVStride;
   }
 
-  dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2);
   for (size_t i = 0; i < height / 2; ++i) {
-    memcpy(dst, srcU, width / 2);
+    memcpy(dstU, srcU, width / 2);
     srcU += srcUStride;
-    dst += dstUVStride;
+    dstU += dstUVStride;
   }
 }
 
@@ -555,15 +570,11 @@
 }
 
 static void convertYUV420Planar16ToYUV420Planar(
-    uint8_t *dst, const uint16_t *srcY, const uint16_t *srcU,
-    const uint16_t *srcV, size_t srcYStride, size_t srcUStride,
-    size_t srcVStride, size_t dstYStride, size_t dstUVStride,
-    size_t width, size_t height) {
-  uint8_t *dstY = (uint8_t *)dst;
-  size_t dstYSize = dstYStride * height;
-  size_t dstUVSize = dstUVStride * height / 2;
-  uint8_t *dstV = dstY + dstYSize;
-  uint8_t *dstU = dstV + dstUVSize;
+    uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
+    const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
+    size_t srcYStride, size_t srcUStride, size_t srcVStride,
+    size_t dstYStride, size_t dstUVStride,
+    size_t width, size_t height, bool isMonochrome) {
 
   for (size_t y = 0; y < height; ++y) {
     for (size_t x = 0; x < width; ++x) {
@@ -574,6 +585,17 @@
     dstY += dstYStride;
   }
 
+  if (isMonochrome) {
+    // Fill with neutral U/V values.
+    for (size_t y = 0; y < (height + 1) / 2; ++y) {
+      memset(dstV, NEUTRAL_UV_VALUE, (width + 1) / 2);
+      memset(dstU, NEUTRAL_UV_VALUE, (width + 1) / 2);
+      dstV += dstUVStride;
+      dstU += dstUVStride;
+    }
+    return;
+  }
+
   for (size_t y = 0; y < (height + 1) / 2; ++y) {
     for (size_t x = 0; x < (width + 1) / 2; ++x) {
       dstU[x] = (uint8_t)(srcU[x] >> 2);
@@ -629,8 +651,16 @@
     }
   }
 
-  // TODO(vigneshv): Add support for monochrome videos since AV1 supports it.
-  CHECK(buffer->image_format == libgav1::kImageFormatYuv420);
+  if (!(buffer->image_format == libgav1::kImageFormatYuv420 ||
+        buffer->image_format == libgav1::kImageFormatMonochrome400)) {
+    ALOGE("image_format %d not supported", buffer->image_format);
+    mSignalledError = true;
+    work->workletsProcessed = 1u;
+    work->result = C2_CORRUPTED;
+    return false;
+  }
+  const bool isMonochrome =
+      buffer->image_format == libgav1::kImageFormatMonochrome400;
 
   std::shared_ptr<C2GraphicBlock> block;
   uint32_t format = HAL_PIXEL_FORMAT_YV12;
@@ -642,6 +672,13 @@
     if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
         defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
         defaultColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+      if (buffer->image_format != libgav1::kImageFormatYuv420) {
+        ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
+        mSignalledError = true;
+        work->result = C2_OMITTED;
+        work->workletsProcessed = 1u;
+        return false;
+      }
       format = HAL_PIXEL_FORMAT_RGBA_1010102;
     }
   }
@@ -667,10 +704,13 @@
   ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
         block->height(), mWidth, mHeight, (int)buffer->user_private_data);
 
-  uint8_t *dst = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
+  uint8_t *dstY = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
+  uint8_t *dstU = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_U]);
+  uint8_t *dstV = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_V]);
   size_t srcYStride = buffer->stride[0];
   size_t srcUStride = buffer->stride[1];
   size_t srcVStride = buffer->stride[2];
+
   C2PlanarLayout layout = wView.layout();
   size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
   size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
@@ -682,20 +722,21 @@
 
     if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
       convertYUV420Planar16ToY410(
-          (uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
+          (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
           srcVStride / 2, dstYStride / sizeof(uint32_t), mWidth, mHeight);
     } else {
-      convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
-                                          srcUStride / 2, srcVStride / 2,
-                                          dstYStride, dstUVStride, mWidth, mHeight);
+      convertYUV420Planar16ToYUV420Planar(
+          dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
+          srcVStride / 2, dstYStride, dstUVStride, mWidth, mHeight,
+          isMonochrome);
     }
   } else {
     const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
     const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
     const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
-    copyOutputBufferToYuvPlanarFrame(dst, srcY, srcU, srcV, srcYStride, srcUStride,
-                                     srcVStride, dstYStride, dstUVStride,
-                                     mWidth, mHeight);
+    copyOutputBufferToYV12Frame(
+        dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+        dstYStride, dstUVStride, mWidth, mHeight, isMonochrome);
   }
   finishWork(buffer->user_private_data, work, std::move(block));
   block = nullptr;
@@ -771,11 +812,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory *CreateCodec2Factory() {
   ALOGV("in %s", __func__);
   return new ::android::C2SoftGav1Factory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory *factory) {
   ALOGV("in %s", __func__);
   delete factory;
diff --git a/media/codec2/components/gsm/Android.bp b/media/codec2/components/gsm/Android.bp
index 9330c01..2d0976d 100644
--- a/media/codec2/components/gsm/Android.bp
+++ b/media/codec2/components/gsm/Android.bp
@@ -1,4 +1,23 @@
-cc_library_shared {
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_codec2_components_gsm_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codec2_components_gsm_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library {
     name: "libcodec2_soft_gsmdec",
     defaults: [
         "libcodec2_soft-defaults",
diff --git a/media/codec2/components/gsm/C2SoftGsmDec.cpp b/media/codec2/components/gsm/C2SoftGsmDec.cpp
index 287cfc6..977677d 100644
--- a/media/codec2/components/gsm/C2SoftGsmDec.cpp
+++ b/media/codec2/components/gsm/C2SoftGsmDec.cpp
@@ -294,11 +294,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftGSMDecFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/gsm/C2SoftGsmDec.h b/media/codec2/components/gsm/C2SoftGsmDec.h
index 2b209fe..edd273b 100644
--- a/media/codec2/components/gsm/C2SoftGsmDec.h
+++ b/media/codec2/components/gsm/C2SoftGsmDec.h
@@ -19,10 +19,7 @@
 
 #include <SimpleC2Component.h>
 
-
-extern "C" {
-    #include "gsm.h"
-}
+#include "gsm.h"
 
 namespace android {
 
diff --git a/media/codec2/components/hevc/Android.bp b/media/codec2/components/hevc/Android.bp
index 369bd78..d1388b9 100644
--- a/media/codec2/components/hevc/Android.bp
+++ b/media/codec2/components/hevc/Android.bp
@@ -1,8 +1,18 @@
-cc_library_shared {
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
     name: "libcodec2_soft_hevcdec",
     defaults: [
         "libcodec2_soft-defaults",
         "libcodec2_soft_sanitize_signed-defaults",
+        "libcodec2_soft_sanitize_cfi-defaults",
     ],
 
     srcs: ["C2SoftHevcDec.cpp"],
@@ -11,11 +21,12 @@
 
 }
 
-cc_library_shared {
+cc_library {
     name: "libcodec2_soft_hevcenc",
     defaults: [
         "libcodec2_soft-defaults",
         "libcodec2_soft_sanitize_signed-defaults",
+        "libcodec2_soft_sanitize_cfi-defaults",
     ],
 
     srcs: ["C2SoftHevcEnc.cpp"],
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 23104dc..6bcf3a2 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -26,7 +26,6 @@
 #include <SimpleC2Interface.h>
 
 #include "C2SoftHevcDec.h"
-#include "ihevcd_cxa.h"
 
 namespace android {
 
@@ -380,12 +379,14 @@
     }
 
     while (true) {
-        ivd_video_decode_ip_t s_decode_ip;
-        ivd_video_decode_op_t s_decode_op;
+        ihevcd_cxa_video_decode_ip_t s_hevcd_decode_ip = {};
+        ihevcd_cxa_video_decode_op_t s_hevcd_decode_op = {};
+        ivd_video_decode_ip_t *ps_decode_ip = &s_hevcd_decode_ip.s_ivd_video_decode_ip_t;
+        ivd_video_decode_op_t *ps_decode_op = &s_hevcd_decode_op.s_ivd_video_decode_op_t;
 
-        setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, nullptr, 0, 0, 0);
-        (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        if (0 == s_decode_op.u4_output_present) {
+        setDecodeArgs(ps_decode_ip, ps_decode_op, nullptr, nullptr, 0, 0, 0);
+        (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
+        if (0 == ps_decode_op->u4_output_present) {
             resetPlugin();
             break;
         }
@@ -400,8 +401,8 @@
 }
 
 status_t C2SoftHevcDec::createDecoder() {
-    ivdext_create_ip_t s_create_ip;
-    ivdext_create_op_t s_create_op;
+    ivdext_create_ip_t s_create_ip = {};
+    ivdext_create_op_t s_create_op = {};
 
     s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
     s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
@@ -427,8 +428,8 @@
 }
 
 status_t C2SoftHevcDec::setNumCores() {
-    ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip;
-    ivdext_ctl_set_num_cores_op_t s_set_num_cores_op;
+    ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip = {};
+    ivdext_ctl_set_num_cores_op_t s_set_num_cores_op = {};
 
     s_set_num_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
     s_set_num_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -447,22 +448,26 @@
 }
 
 status_t C2SoftHevcDec::setParams(size_t stride, IVD_VIDEO_DECODE_MODE_T dec_mode) {
-    ivd_ctl_set_config_ip_t s_set_dyn_params_ip;
-    ivd_ctl_set_config_op_t s_set_dyn_params_op;
+    ihevcd_cxa_ctl_set_config_ip_t s_hevcd_set_dyn_params_ip = {};
+    ihevcd_cxa_ctl_set_config_op_t s_hevcd_set_dyn_params_op = {};
+    ivd_ctl_set_config_ip_t *ps_set_dyn_params_ip =
+        &s_hevcd_set_dyn_params_ip.s_ivd_ctl_set_config_ip_t;
+    ivd_ctl_set_config_op_t *ps_set_dyn_params_op =
+        &s_hevcd_set_dyn_params_op.s_ivd_ctl_set_config_op_t;
 
-    s_set_dyn_params_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
-    s_set_dyn_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
-    s_set_dyn_params_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
-    s_set_dyn_params_ip.u4_disp_wd = (UWORD32) stride;
-    s_set_dyn_params_ip.e_frm_skip_mode = IVD_SKIP_NONE;
-    s_set_dyn_params_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
-    s_set_dyn_params_ip.e_vid_dec_mode = dec_mode;
-    s_set_dyn_params_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
+    ps_set_dyn_params_ip->u4_size = sizeof(ihevcd_cxa_ctl_set_config_ip_t);
+    ps_set_dyn_params_ip->e_cmd = IVD_CMD_VIDEO_CTL;
+    ps_set_dyn_params_ip->e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
+    ps_set_dyn_params_ip->u4_disp_wd = (UWORD32) stride;
+    ps_set_dyn_params_ip->e_frm_skip_mode = IVD_SKIP_NONE;
+    ps_set_dyn_params_ip->e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
+    ps_set_dyn_params_ip->e_vid_dec_mode = dec_mode;
+    ps_set_dyn_params_op->u4_size = sizeof(ihevcd_cxa_ctl_set_config_op_t);
     IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
-                                                     &s_set_dyn_params_ip,
-                                                     &s_set_dyn_params_op);
+                                                     ps_set_dyn_params_ip,
+                                                     ps_set_dyn_params_op);
     if (status != IV_SUCCESS) {
-        ALOGE("error in %s: 0x%x", __func__, s_set_dyn_params_op.u4_error_code);
+        ALOGE("error in %s: 0x%x", __func__, ps_set_dyn_params_op->u4_error_code);
         return UNKNOWN_ERROR;
     }
 
@@ -470,8 +475,8 @@
 }
 
 status_t C2SoftHevcDec::getVersion() {
-    ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip;
-    ivd_ctl_getversioninfo_op_t s_get_versioninfo_op;
+    ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip = {};
+    ivd_ctl_getversioninfo_op_t s_get_versioninfo_op = {};
     UWORD8 au1_buf[512];
 
     s_get_versioninfo_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
@@ -529,7 +534,7 @@
         if (OK != setParams(mStride, IVD_DECODE_FRAME)) return false;
     }
 
-    ps_decode_ip->u4_size = sizeof(ivd_video_decode_ip_t);
+    ps_decode_ip->u4_size = sizeof(ihevcd_cxa_video_decode_ip_t);
     ps_decode_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
     if (inBuffer) {
         ps_decode_ip->u4_ts = tsMarker;
@@ -558,15 +563,15 @@
         ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferFlush + lumaSize + chromaSize;
     }
     ps_decode_ip->s_out_buffer.u4_num_bufs = 3;
-    ps_decode_op->u4_size = sizeof(ivd_video_decode_op_t);
+    ps_decode_op->u4_size = sizeof(ihevcd_cxa_video_decode_op_t);
     ps_decode_op->u4_output_present = 0;
 
     return true;
 }
 
 bool C2SoftHevcDec::getVuiParams() {
-    ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip;
-    ivdext_ctl_get_vui_params_op_t s_get_vui_params_op;
+    ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip = {};
+    ivdext_ctl_get_vui_params_op_t s_get_vui_params_op = {};
 
     s_get_vui_params_ip.u4_size = sizeof(ivdext_ctl_get_vui_params_ip_t);
     s_get_vui_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -614,8 +619,8 @@
 }
 
 status_t C2SoftHevcDec::setFlushMode() {
-    ivd_ctl_flush_ip_t s_set_flush_ip;
-    ivd_ctl_flush_op_t s_set_flush_op;
+    ivd_ctl_flush_ip_t s_set_flush_ip = {};
+    ivd_ctl_flush_op_t s_set_flush_op = {};
 
     s_set_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
     s_set_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -633,8 +638,8 @@
 }
 
 status_t C2SoftHevcDec::resetDecoder() {
-    ivd_ctl_reset_ip_t s_reset_ip;
-    ivd_ctl_reset_op_t s_reset_op;
+    ivd_ctl_reset_ip_t s_reset_ip = {};
+    ivd_ctl_reset_op_t s_reset_op = {};
 
     s_reset_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
     s_reset_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -662,8 +667,8 @@
 
 status_t C2SoftHevcDec::deleteDecoder() {
     if (mDecHandle) {
-        ivdext_delete_ip_t s_delete_ip;
-        ivdext_delete_op_t s_delete_op;
+        ivdext_delete_ip_t s_delete_ip = {};
+        ivdext_delete_op_t s_delete_op = {};
 
         s_delete_ip.s_ivd_delete_ip_t.u4_size = sizeof(ivdext_delete_ip_t);
         s_delete_ip.s_ivd_delete_ip_t.e_cmd = IVD_CMD_DELETE;
@@ -835,9 +840,11 @@
             work->result = wView.error();
             return;
         }
-        ivd_video_decode_ip_t s_decode_ip;
-        ivd_video_decode_op_t s_decode_op;
-        if (!setDecodeArgs(&s_decode_ip, &s_decode_op, &rView, &wView,
+        ihevcd_cxa_video_decode_ip_t s_hevcd_decode_ip = {};
+        ihevcd_cxa_video_decode_op_t s_hevcd_decode_op = {};
+        ivd_video_decode_ip_t *ps_decode_ip = &s_hevcd_decode_ip.s_ivd_video_decode_ip_t;
+        ivd_video_decode_op_t *ps_decode_op = &s_hevcd_decode_op.s_ivd_video_decode_op_t;
+        if (!setDecodeArgs(ps_decode_ip, ps_decode_op, &rView, &wView,
                            inOffset + inPos, inSize - inPos, workIndex)) {
             mSignalledError = true;
             work->workletsProcessed = 1u;
@@ -852,26 +859,26 @@
         WORD32 delay;
         GETTIME(&mTimeStart, nullptr);
         TIME_DIFF(mTimeEnd, mTimeStart, delay);
-        (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+        (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
         WORD32 decodeTime;
         GETTIME(&mTimeEnd, nullptr);
         TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
         ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay,
-              s_decode_op.u4_num_bytes_consumed);
-        if (IVD_MEM_ALLOC_FAILED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+              ps_decode_op->u4_num_bytes_consumed);
+        if (IVD_MEM_ALLOC_FAILED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGE("allocation failure in decoder");
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
         } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED ==
-                   (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+                   (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGE("unsupported resolution : %dx%d", mWidth, mHeight);
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
-        } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+        } else if (IVD_RES_CHANGED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGV("resolution changed");
             drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
             resetDecoder();
@@ -880,16 +887,16 @@
 
             /* Decode header and get new dimensions */
             setParams(mStride, IVD_DECODE_HEADER);
-            (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        } else if (IS_IVD_FATAL_ERROR(s_decode_op.u4_error_code)) {
-            ALOGE("Fatal error in decoder 0x%x", s_decode_op.u4_error_code);
+            (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
+        } else if (IS_IVD_FATAL_ERROR(ps_decode_op->u4_error_code)) {
+            ALOGE("Fatal error in decoder 0x%x", ps_decode_op->u4_error_code);
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
             return;
         }
-        if (s_decode_op.i4_reorder_depth >= 0 && mOutputDelay != s_decode_op.i4_reorder_depth) {
-            mOutputDelay = s_decode_op.i4_reorder_depth;
+        if (ps_decode_op->i4_reorder_depth >= 0 && mOutputDelay != ps_decode_op->i4_reorder_depth) {
+            mOutputDelay = ps_decode_op->i4_reorder_depth;
             ALOGV("New Output delay %d ", mOutputDelay);
 
             C2PortActualDelayTuning::output outputDelay(mOutputDelay);
@@ -906,17 +913,16 @@
                 work->result = C2_CORRUPTED;
                 return;
             }
-            continue;
         }
-        if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
+        if (0 < ps_decode_op->u4_pic_wd && 0 < ps_decode_op->u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
-                setParams(ALIGN32(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
+                setParams(ALIGN32(ps_decode_op->u4_pic_wd), IVD_DECODE_FRAME);
             }
-            if (s_decode_op.u4_pic_wd != mWidth ||  s_decode_op.u4_pic_ht != mHeight) {
-                mWidth = s_decode_op.u4_pic_wd;
-                mHeight = s_decode_op.u4_pic_ht;
-                CHECK_EQ(0u, s_decode_op.u4_output_present);
+            if (ps_decode_op->u4_pic_wd != mWidth ||  ps_decode_op->u4_pic_ht != mHeight) {
+                mWidth = ps_decode_op->u4_pic_wd;
+                mHeight = ps_decode_op->u4_pic_ht;
+                CHECK_EQ(0u, ps_decode_op->u4_output_present);
 
                 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
                 std::vector<std::unique_ptr<C2SettingResult>> failures;
@@ -936,15 +942,15 @@
             }
         }
         (void) getVuiParams();
-        hasPicture |= (1 == s_decode_op.u4_frame_decoded_flag);
-        if (s_decode_op.u4_output_present) {
-            finishWork(s_decode_op.u4_ts, work);
+        hasPicture |= (1 == ps_decode_op->u4_frame_decoded_flag);
+        if (ps_decode_op->u4_output_present) {
+            finishWork(ps_decode_op->u4_ts, work);
         }
-        if (0 == s_decode_op.u4_num_bytes_consumed) {
+        if (0 == ps_decode_op->u4_num_bytes_consumed) {
             ALOGD("Bytes consumed is zero. Ignoring remaining bytes");
             break;
         }
-        inPos += s_decode_op.u4_num_bytes_consumed;
+        inPos += ps_decode_op->u4_num_bytes_consumed;
         if (hasPicture && (inSize - inPos)) {
             ALOGD("decoded frame in current access nal, ignoring further trailing bytes %d",
                   (int)inSize - (int)inPos);
@@ -986,16 +992,18 @@
             ALOGE("graphic view map failed %d", wView.error());
             return C2_CORRUPTED;
         }
-        ivd_video_decode_ip_t s_decode_ip;
-        ivd_video_decode_op_t s_decode_op;
-        if (!setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, &wView, 0, 0, 0)) {
+        ihevcd_cxa_video_decode_ip_t s_hevcd_decode_ip = {};
+        ihevcd_cxa_video_decode_op_t s_hevcd_decode_op = {};
+        ivd_video_decode_ip_t *ps_decode_ip = &s_hevcd_decode_ip.s_ivd_video_decode_ip_t;
+        ivd_video_decode_op_t *ps_decode_op = &s_hevcd_decode_op.s_ivd_video_decode_op_t;
+        if (!setDecodeArgs(ps_decode_ip, ps_decode_op, nullptr, &wView, 0, 0, 0)) {
             mSignalledError = true;
             work->workletsProcessed = 1u;
             return C2_CORRUPTED;
         }
-        (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        if (s_decode_op.u4_output_present) {
-            finishWork(s_decode_op.u4_ts, work);
+        (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
+        if (ps_decode_op->u4_output_present) {
+            finishWork(ps_decode_op->u4_ts, work);
         } else {
             fillEmptyWork(work);
             break;
@@ -1048,11 +1056,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftHevcDecFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.h b/media/codec2/components/hevc/C2SoftHevcDec.h
index 600d7c1..b9b0a48 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.h
+++ b/media/codec2/components/hevc/C2SoftHevcDec.h
@@ -23,8 +23,7 @@
 #include <SimpleC2Component.h>
 
 #include "ihevc_typedefs.h"
-#include "iv.h"
-#include "ivd.h"
+#include "ihevcd_cxa.h"
 
 namespace android {
 
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index c2d2540..4bc1777 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -25,6 +25,7 @@
 #include <media/stagefright/foundation/AUtils.h>
 
 #include <C2Debug.h>
+#include <Codec2Mapper.h>
 #include <C2PlatformSupport.h>
 #include <Codec2BufferUtils.h>
 #include <SimpleC2Interface.h>
@@ -208,6 +209,42 @@
                 .withSetter(
                     Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
                 .build());
+
+        addParameter(
+                DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+                .withDefault(new C2StreamColorAspectsInfo::input(
+                        0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                        C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                .withFields({
+                    C2F(mColorAspects, range).inRange(
+                                C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
+                    C2F(mColorAspects, primaries).inRange(
+                                C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+                    C2F(mColorAspects, transfer).inRange(
+                                C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
+                    C2F(mColorAspects, matrix).inRange(
+                                C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
+                })
+                .withSetter(ColorAspectsSetter)
+                .build());
+
+        addParameter(
+                DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+                .withDefault(new C2StreamColorAspectsInfo::output(
+                        0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                        C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                .withFields({
+                    C2F(mCodedColorAspects, range).inRange(
+                                C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
+                    C2F(mCodedColorAspects, primaries).inRange(
+                                C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+                    C2F(mCodedColorAspects, transfer).inRange(
+                                C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
+                    C2F(mCodedColorAspects, matrix).inRange(
+                                C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
+                })
+                .withSetter(CodedColorAspectsSetter, mColorAspects)
+                .build());
     }
 
     static C2R InputDelaySetter(
@@ -402,6 +439,34 @@
     std::shared_ptr<C2StreamGopTuning::output> getGop_l() const {
         return mGop;
     }
+    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
+        (void)mayBlock;
+        if (me.v.range > C2Color::RANGE_OTHER) {
+                me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+                me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+                me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+                me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+    static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
+                                       const C2P<C2StreamColorAspectsInfo::input> &coded) {
+        (void)mayBlock;
+        me.set().range = coded.v.range;
+        me.set().primaries = coded.v.primaries;
+        me.set().transfer = coded.v.transfer;
+        me.set().matrix = coded.v.matrix;
+        return C2R::Ok();
+    }
+    std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() {
+        return mCodedColorAspects;
+    }
 
    private:
     std::shared_ptr<C2StreamUsageTuning::input> mUsage;
@@ -415,6 +480,8 @@
     std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
     std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
     std::shared_ptr<C2StreamGopTuning::output> mGop;
+    std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
 };
 
 static size_t GetCPUCoreCount() {
@@ -533,6 +600,32 @@
             mBframes = maxBframes;
         }
     }
+    ColorAspects sfAspects;
+    if (!C2Mapper::map(mColorAspects->primaries, &sfAspects.mPrimaries)) {
+        sfAspects.mPrimaries = android::ColorAspects::PrimariesUnspecified;
+    }
+    if (!C2Mapper::map(mColorAspects->range, &sfAspects.mRange)) {
+        sfAspects.mRange = android::ColorAspects::RangeUnspecified;
+    }
+    if (!C2Mapper::map(mColorAspects->matrix, &sfAspects.mMatrixCoeffs)) {
+        sfAspects.mMatrixCoeffs = android::ColorAspects::MatrixUnspecified;
+    }
+    if (!C2Mapper::map(mColorAspects->transfer, &sfAspects.mTransfer)) {
+        sfAspects.mTransfer = android::ColorAspects::TransferUnspecified;
+    }
+    int32_t primaries, transfer, matrixCoeffs;
+    bool range;
+    ColorUtils::convertCodecColorAspectsToIsoAspects(sfAspects,
+            &primaries,
+            &transfer,
+            &matrixCoeffs,
+            &range);
+    mEncParams.s_out_strm_prms.i4_vui_enable = 1;
+    mEncParams.s_vui_sei_prms.u1_colour_description_present_flag = 1;
+    mEncParams.s_vui_sei_prms.u1_colour_primaries = primaries;
+    mEncParams.s_vui_sei_prms.u1_transfer_characteristics = transfer;
+    mEncParams.s_vui_sei_prms.u1_matrix_coefficients = matrixCoeffs;
+    mEncParams.s_vui_sei_prms.u1_video_full_range_flag = range;
     // update configuration
     mEncParams.s_src_prms.i4_width = mSize->width;
     mEncParams.s_src_prms.i4_height = mSize->height;
@@ -629,6 +722,7 @@
         mQuality = mIntf->getQuality_l();
         mGop = mIntf->getGop_l();
         mRequestSync = mIntf->getRequestSync_l();
+        mColorAspects = mIntf->getCodedColorAspects_l();
     }
 
     c2_status_t status = initEncParams();
@@ -1078,11 +1172,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftHevcEncFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.h b/media/codec2/components/hevc/C2SoftHevcEnc.h
index 5ea4602..9dbf682 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.h
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.h
@@ -89,6 +89,7 @@
     std::shared_ptr<C2StreamQualityTuning::output> mQuality;
     std::shared_ptr<C2StreamGopTuning::output> mGop;
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
 #ifdef FILE_DUMP_ENABLE
     char mInFile[200];
     char mOutFile[200];
diff --git a/media/codec2/components/mp3/Android.bp b/media/codec2/components/mp3/Android.bp
index 66665ed..437d34f 100644
--- a/media/codec2/components/mp3/Android.bp
+++ b/media/codec2/components/mp3/Android.bp
@@ -1,4 +1,23 @@
-cc_library_shared {
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_codec2_components_mp3_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codec2_components_mp3_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library {
     name: "libcodec2_soft_mp3dec",
     defaults: [
         "libcodec2_soft-defaults",
diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.cpp b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
index 5ba7e3d..30d7394 100644
--- a/media/codec2/components/mp3/C2SoftMp3Dec.cpp
+++ b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
@@ -16,6 +16,7 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2SoftMp3Dec"
+#include <inttypes.h>
 #include <log/log.h>
 
 #include <numeric>
@@ -485,10 +486,10 @@
         }
     }
 
-    uint64_t outTimeStamp = mProcessedSamples * 1000000ll / samplingRate;
+    int64_t outTimeStamp = mProcessedSamples * 1000000ll / samplingRate;
     mProcessedSamples += ((outSize - outOffset) / (numChannels * sizeof(int16_t)));
-    ALOGV("out buffer attr. offset %d size %d timestamp %u", outOffset, outSize - outOffset,
-          (uint32_t)(mAnchorTimeStamp + outTimeStamp));
+    ALOGV("out buffer attr. offset %d size %d timestamp %" PRId64 " ", outOffset,
+          outSize - outOffset, mAnchorTimeStamp + outTimeStamp);
     decodedSizes.clear();
     work->worklets.front()->output.flags = work->input.flags;
     work->worklets.front()->output.buffers.clear();
@@ -539,11 +540,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftMp3DecFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.h b/media/codec2/components/mp3/C2SoftMp3Dec.h
index 402bdc4..e2dfcf3 100644
--- a/media/codec2/components/mp3/C2SoftMp3Dec.h
+++ b/media/codec2/components/mp3/C2SoftMp3Dec.h
@@ -63,7 +63,7 @@
     bool mSignalledError;
     bool mSignalledOutputEos;
     bool mGaplessBytes;
-    uint64_t mAnchorTimeStamp;
+    int64_t mAnchorTimeStamp;
     uint64_t mProcessedSamples;
 
     status_t initDecoder();
diff --git a/media/codec2/components/mpeg2/Android.bp b/media/codec2/components/mpeg2/Android.bp
index 841f0a9..daa10ae 100644
--- a/media/codec2/components/mpeg2/Android.bp
+++ b/media/codec2/components/mpeg2/Android.bp
@@ -1,4 +1,13 @@
-cc_library_shared {
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
     name: "libcodec2_soft_mpeg2dec",
     defaults: [
         "libcodec2_soft-defaults",
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 55dd475..b1cf388 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -30,6 +30,7 @@
 
 namespace android {
 constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
+constexpr size_t kMaxDimension = 1920;
 constexpr char COMPONENT_NAME[] = "c2.android.mpeg2.decoder";
 
 class C2SoftMpeg2Dec::IntfImpl : public SimpleInterface<void>::BaseParams {
@@ -64,8 +65,8 @@
                 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
                 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
                 .withFields({
-                    C2F(mSize, width).inRange(16, 1920, 4),
-                    C2F(mSize, height).inRange(16, 1088, 4),
+                    C2F(mSize, width).inRange(16, kMaxDimension, 2),
+                    C2F(mSize, height).inRange(16, kMaxDimension, 2),
                 })
                 .withSetter(SizeSetter)
                 .build());
@@ -91,8 +92,8 @@
                 DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
                 .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
                 .withFields({
-                    C2F(mSize, width).inRange(2, 1920, 2),
-                    C2F(mSize, height).inRange(2, 1088, 2),
+                    C2F(mSize, width).inRange(2, kMaxDimension, 2),
+                    C2F(mSize, height).inRange(2, kMaxDimension, 2),
                 })
                 .withSetter(MaxPictureSizeSetter, mSize)
                 .build());
@@ -204,8 +205,8 @@
                                     const C2P<C2StreamPictureSizeInfo::output> &size) {
         (void)mayBlock;
         // TODO: get max width/height from the size's field helpers vs. hardcoding
-        me.set().width = c2_min(c2_max(me.v.width, size.v.width), 1920u);
-        me.set().height = c2_min(c2_max(me.v.height, size.v.height), 1088u);
+        me.set().width = c2_min(c2_max(me.v.width, size.v.width), kMaxDimension);
+        me.set().height = c2_min(c2_max(me.v.height, size.v.height), kMaxDimension);
         return C2R::Ok();
     }
 
@@ -1113,11 +1114,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftMpeg2DecFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/mpeg4_h263/Android.bp b/media/codec2/components/mpeg4_h263/Android.bp
index 41e4f44..e19fa45 100644
--- a/media/codec2/components/mpeg4_h263/Android.bp
+++ b/media/codec2/components/mpeg4_h263/Android.bp
@@ -1,4 +1,23 @@
-cc_library_shared {
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_codec2_components_mpeg4_h263_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codec2_components_mpeg4_h263_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library {
     name: "libcodec2_soft_mpeg4dec",
     defaults: [
         "libcodec2_soft-defaults",
@@ -15,7 +34,7 @@
     ],
 }
 
-cc_library_shared {
+cc_library {
     name: "libcodec2_soft_h263dec",
     defaults: [
         "libcodec2_soft-defaults",
@@ -31,7 +50,7 @@
     ],
 }
 
-cc_library_shared {
+cc_library {
     name: "libcodec2_soft_mpeg4enc",
     defaults: [
         "libcodec2_soft-defaults",
@@ -49,7 +68,7 @@
     ],
 }
 
-cc_library_shared {
+cc_library {
     name: "libcodec2_soft_h263enc",
     defaults: [
         "libcodec2_soft-defaults",
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 61b286c..ddd312f 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -35,8 +35,10 @@
 namespace android {
 constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
 #ifdef MPEG4
+constexpr size_t kMaxDimension = 1920;
 constexpr char COMPONENT_NAME[] = "c2.android.mpeg4.decoder";
 #else
+constexpr size_t kMaxDimension = 352;
 constexpr char COMPONENT_NAME[] = "c2.android.h263.decoder";
 #endif
 
@@ -75,13 +77,8 @@
                 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
                 .withDefault(new C2StreamPictureSizeInfo::output(0u, 176, 144))
                 .withFields({
-#ifdef MPEG4
-                    C2F(mSize, width).inRange(2, 1920, 2),
-                    C2F(mSize, height).inRange(2, 1088, 2),
-#else
-                    C2F(mSize, width).inRange(2, 352, 2),
-                    C2F(mSize, height).inRange(2, 288, 2),
-#endif
+                    C2F(mSize, width).inRange(2, kMaxDimension, 2),
+                    C2F(mSize, height).inRange(2, kMaxDimension, 2),
                 })
                 .withSetter(SizeSetter)
                 .build());
@@ -130,19 +127,10 @@
 
         addParameter(
                 DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
-#ifdef MPEG4
-                .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 1920, 1088))
-#else
                 .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 352, 288))
-#endif
                 .withFields({
-#ifdef MPEG4
-                    C2F(mSize, width).inRange(2, 1920, 2),
-                    C2F(mSize, height).inRange(2, 1088, 2),
-#else
-                    C2F(mSize, width).inRange(2, 352, 2),
-                    C2F(mSize, height).inRange(2, 288, 2),
-#endif
+                    C2F(mSize, width).inRange(2, kMaxDimension, 2),
+                    C2F(mSize, height).inRange(2, kMaxDimension, 2),
                 })
                 .withSetter(MaxPictureSizeSetter, mSize)
                 .build());
@@ -200,13 +188,8 @@
                                     const C2P<C2StreamPictureSizeInfo::output> &size) {
         (void)mayBlock;
         // TODO: get max width/height from the size's field helpers vs. hardcoding
-#ifdef MPEG4
-        me.set().width = c2_min(c2_max(me.v.width, size.v.width), 1920u);
-        me.set().height = c2_min(c2_max(me.v.height, size.v.height), 1088u);
-#else
-        me.set().width = c2_min(c2_max(me.v.width, size.v.width), 352u);
-        me.set().height = c2_min(c2_max(me.v.height, size.v.height), 288u);
-#endif
+        me.set().width = c2_min(c2_max(me.v.width, size.v.width), kMaxDimension);
+        me.set().height = c2_min(c2_max(me.v.height, size.v.height), kMaxDimension);
         return C2R::Ok();
     }
 
@@ -464,34 +447,34 @@
 /* TODO: can remove temporary copy after library supports writing to display
  * buffer Y, U and V plane pointers using stride info. */
 static void copyOutputBufferToYuvPlanarFrame(
-        uint8_t *dst, uint8_t *src,
+        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, uint8_t *src,
         size_t dstYStride, size_t dstUVStride,
         size_t srcYStride, uint32_t width,
         uint32_t height) {
     size_t srcUVStride = srcYStride / 2;
     uint8_t *srcStart = src;
-    uint8_t *dstStart = dst;
+
     size_t vStride = align(height, 16);
     for (size_t i = 0; i < height; ++i) {
-         memcpy(dst, src, width);
+         memcpy(dstY, src, width);
          src += srcYStride;
-         dst += dstYStride;
+         dstY += dstYStride;
     }
+
     /* U buffer */
     src = srcStart + vStride * srcYStride;
-    dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2);
     for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dst, src, width / 2);
+         memcpy(dstU, src, width / 2);
          src += srcUVStride;
-         dst += dstUVStride;
+         dstU += dstUVStride;
     }
+
     /* V buffer */
     src = srcStart + vStride * srcYStride * 5 / 4;
-    dst = dstStart + (dstYStride * height);
     for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dst, src, width / 2);
+         memcpy(dstV, src, width / 2);
          src += srcUVStride;
-         dst += dstUVStride;
+         dstV += dstUVStride;
     }
 }
 
@@ -672,11 +655,14 @@
         }
 
         uint8_t *outputBufferY = wView.data()[C2PlanarLayout::PLANE_Y];
+        uint8_t *outputBufferU = wView.data()[C2PlanarLayout::PLANE_U];
+        uint8_t *outputBufferV = wView.data()[C2PlanarLayout::PLANE_V];
+
         C2PlanarLayout layout = wView.layout();
         size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
         size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
         (void)copyOutputBufferToYuvPlanarFrame(
-                outputBufferY,
+                outputBufferY, outputBufferU, outputBufferV,
                 mOutputBuffer[mNumSamplesOutput & 1],
                 dstYStride, dstUVStride,
                 align(mWidth, 16), mWidth, mHeight);
@@ -744,11 +730,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftMpeg4DecFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index 54c8c47..3c87531 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -448,6 +448,20 @@
         work->worklets.front()->output.configUpdate.push_back(std::move(csd));
     }
 
+    // handle dynamic bitrate change
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        std::shared_ptr<C2StreamBitrateInfo::output> bitrate = mIntf->getBitrate_l();
+        lock.unlock();
+
+        if (bitrate != mBitrate) {
+            mBitrate = bitrate;
+            int layerBitrate[2] = {static_cast<int>(mBitrate->value), 0};
+            ALOGV("Calling PVUpdateBitRate %d", layerBitrate[0]);
+            PVUpdateBitRate(mHandle, layerBitrate);
+        }
+    }
+
     std::shared_ptr<const C2GraphicView> rView;
     std::shared_ptr<C2Buffer> inputBuffer;
     bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
@@ -652,11 +666,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftMpeg4EncFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/mpeg4_h263/TEST_MAPPING b/media/codec2/components/mpeg4_h263/TEST_MAPPING
new file mode 100644
index 0000000..93fba22
--- /dev/null
+++ b/media/codec2/components/mpeg4_h263/TEST_MAPPING
@@ -0,0 +1,6 @@
+// mappings for frameworks/av/media/codec2/components/mpeg4_h263
+{
+  "presubmit": [
+    { "name": "C2SoftMpeg4DecTest" }
+  ]
+}
diff --git a/media/codec2/components/opus/Android.bp b/media/codec2/components/opus/Android.bp
index 0ed141b..bfba45c 100644
--- a/media/codec2/components/opus/Android.bp
+++ b/media/codec2/components/opus/Android.bp
@@ -1,4 +1,13 @@
-cc_library_shared {
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
     name: "libcodec2_soft_opusdec",
     defaults: [
         "libcodec2_soft-defaults",
@@ -9,7 +18,7 @@
 
     shared_libs: ["libopus"],
 }
-cc_library_shared {
+cc_library {
     name: "libcodec2_soft_opusenc",
     defaults: [
         "libcodec2_soft-defaults",
diff --git a/media/codec2/components/opus/C2SoftOpusDec.cpp b/media/codec2/components/opus/C2SoftOpusDec.cpp
index b7c1556..d4987c0 100644
--- a/media/codec2/components/opus/C2SoftOpusDec.cpp
+++ b/media/codec2/components/opus/C2SoftOpusDec.cpp
@@ -473,11 +473,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftOpusDecFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.cpp b/media/codec2/components/opus/C2SoftOpusEnc.cpp
index 70d1965..370d33c 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.cpp
+++ b/media/codec2/components/opus/C2SoftOpusEnc.cpp
@@ -78,6 +78,19 @@
                 .build());
 
         addParameter(
+            DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
+                .withDefault(new C2StreamBitrateModeTuning::output(
+                        0u, C2Config::BITRATE_VARIABLE))
+                .withFields({
+                    C2F(mBitrateMode, value).oneOf({
+                        C2Config::BITRATE_CONST,
+                        C2Config::BITRATE_VARIABLE})
+                })
+                .withSetter(
+                    Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
+                .build());
+
+        addParameter(
                 DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
                 .withDefault(new C2StreamBitrateInfo::output(0u, 128000))
                 .withFields({C2F(mBitrate, value).inRange(500, 512000)})
@@ -100,12 +113,14 @@
     uint32_t getSampleRate() const { return mSampleRate->value; }
     uint32_t getChannelCount() const { return mChannelCount->value; }
     uint32_t getBitrate() const { return mBitrate->value; }
+    uint32_t getBitrateMode() const { return mBitrateMode->value; }
     uint32_t getComplexity() const { return mComplexity->value; }
 
 private:
     std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
     std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
     std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+    std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
     std::shared_ptr<C2StreamComplexityTuning::output> mComplexity;
     std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
 };
@@ -135,6 +150,7 @@
     mSampleRate = mIntf->getSampleRate();
     mChannelCount = mIntf->getChannelCount();
     uint32_t bitrate = mIntf->getBitrate();
+    uint32_t bitrateMode = mIntf->getBitrateMode();
     int complexity = mIntf->getComplexity();
     mNumSamplesPerFrame = mSampleRate / (1000 / mFrameDurationMs);
     mNumPcmBytesPerInputFrame =
@@ -189,14 +205,24 @@
         return C2_BAD_VALUE;
     }
 
-    // Constrained VBR
-    if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR(1) != OPUS_OK)) {
-        ALOGE("failed to set vbr type");
-        return C2_BAD_VALUE;
-    }
-    if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR_CONSTRAINT(1) !=
-            OPUS_OK)) {
-        ALOGE("failed to set vbr constraint");
+    if (bitrateMode == C2Config::BITRATE_VARIABLE) {
+        // Constrained VBR
+        if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR(1) != OPUS_OK)) {
+            ALOGE("failed to set vbr type");
+            return C2_BAD_VALUE;
+        }
+        if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR_CONSTRAINT(1) !=
+                OPUS_OK)) {
+            ALOGE("failed to set vbr constraint");
+            return C2_BAD_VALUE;
+        }
+    } else if (bitrateMode == C2Config::BITRATE_CONST) {
+        if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR(0) != OPUS_OK)) {
+            ALOGE("failed to set cbr type");
+            return C2_BAD_VALUE;
+        }
+    } else {
+        ALOGE("unknown bitrate mode");
         return C2_BAD_VALUE;
     }
 
@@ -626,11 +652,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftOpusEncFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/raw/Android.bp b/media/codec2/components/raw/Android.bp
index dc944da..c0b7613 100644
--- a/media/codec2/components/raw/Android.bp
+++ b/media/codec2/components/raw/Android.bp
@@ -1,4 +1,23 @@
-cc_library_shared {
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_codec2_components_raw_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codec2_components_raw_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library {
     name: "libcodec2_soft_rawdec",
     defaults: [
         "libcodec2_soft-defaults",
diff --git a/media/codec2/components/raw/C2SoftRawDec.cpp b/media/codec2/components/raw/C2SoftRawDec.cpp
index 7b6f21a..a03d4e2 100644
--- a/media/codec2/components/raw/C2SoftRawDec.cpp
+++ b/media/codec2/components/raw/C2SoftRawDec.cpp
@@ -87,7 +87,9 @@
                 .withFields({C2F(mPcmEncodingInfo, value).oneOf({
                      C2Config::PCM_16,
                      C2Config::PCM_8,
-                     C2Config::PCM_FLOAT})
+                     C2Config::PCM_FLOAT,
+                     C2Config::PCM_24,
+                     C2Config::PCM_32})
                 })
                 .withSetter((Setter<decltype(*mPcmEncodingInfo)>::StrictValueWithNoDeps))
                 .build());
@@ -215,11 +217,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftRawDecFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/tests/Android.bp b/media/codec2/components/tests/Android.bp
new file mode 100644
index 0000000..3c68eee
--- /dev/null
+++ b/media/codec2/components/tests/Android.bp
@@ -0,0 +1,68 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+    name: "C2SoftCodecTest-defaults",
+    gtest: true,
+    host_supported: false,
+    srcs: [
+        "C2SoftCodecTest.cpp",
+    ],
+
+    static_libs: [
+        "liblog",
+        "libion",
+        "libfmq",
+        "libbase",
+        "libutils",
+        "libcutils",
+        "libcodec2",
+        "libhidlbase",
+        "libdmabufheap",
+        "libcodec2_vndk",
+        "libnativewindow",
+        "libcodec2_soft_common",
+        "libsfplugin_ccodec_utils",
+        "libstagefright_foundation",
+        "libstagefright_bufferpool@2.0.1",
+        "android.hardware.graphics.mapper@2.0",
+        "android.hardware.graphics.mapper@3.0",
+        "android.hardware.media.bufferpool@2.0",
+        "android.hardware.graphics.allocator@2.0",
+        "android.hardware.graphics.allocator@3.0",
+        "android.hardware.graphics.bufferqueue@2.0",
+    ],
+
+    shared_libs: [
+        "libui",
+        "libdl",
+        "libhardware",
+        "libvndksupport",
+        "libprocessgroup",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+}
+
+cc_test {
+    name: "C2SoftMpeg4DecTest",
+    defaults: ["C2SoftCodecTest-defaults"],
+
+    static_libs: [
+        "libstagefright_m4vh263dec",
+        "libcodec2_soft_mpeg4dec",
+    ],
+
+    test_suites: [
+        "general-tests",
+    ],
+}
diff --git a/media/codec2/components/tests/C2SoftCodecTest.cpp b/media/codec2/components/tests/C2SoftCodecTest.cpp
new file mode 100644
index 0000000..84c2562
--- /dev/null
+++ b/media/codec2/components/tests/C2SoftCodecTest.cpp
@@ -0,0 +1,105 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include <C2Config.h>
+#include <C2ComponentFactory.h>
+#include <gtest/gtest.h>
+#include <log/log.h>
+
+using namespace android;
+extern "C" ::C2ComponentFactory* CreateCodec2Factory();
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory);
+
+class C2SoftCodecTest : public ::testing::Test {
+public:
+  void SetUp() override {
+    mFactory = CreateCodec2Factory();
+  }
+
+  void TearDown() override {
+    if (mFactory) {
+      DestroyCodec2Factory(mFactory);
+    }
+  }
+
+  c2_status_t createComponent(
+        std::shared_ptr<C2Component>* const comp) {
+    if (!mFactory) {
+      return C2_NO_INIT;
+    }
+    return mFactory->createComponent(
+        kPlaceholderId, comp, std::default_delete<C2Component>());
+  }
+
+  c2_status_t createInterface(
+      std::shared_ptr<C2ComponentInterface>* const intf) {
+    if (!mFactory) {
+      return C2_NO_INIT;
+    }
+    return mFactory->createInterface(
+        kPlaceholderId, intf, std::default_delete<C2ComponentInterface>());
+  }
+
+  ::C2ComponentFactory *getFactory() { return mFactory; }
+
+private:
+  static constexpr ::c2_node_id_t kPlaceholderId = 0;
+
+  ::C2ComponentFactory *mFactory;
+};
+
+TEST_F(C2SoftCodecTest, PictureSizeInfoTest) {
+  std::shared_ptr<C2ComponentInterface> interface;
+  c2_status_t status = createInterface(&interface);
+  ASSERT_EQ(status, C2_OK) << "Error in createInterface";
+  ASSERT_NE(interface, nullptr) << "interface is null";
+
+  std::unique_ptr<C2StreamPictureSizeInfo::output> param =
+      std::make_unique<C2StreamPictureSizeInfo::output>();
+  std::vector<C2FieldSupportedValuesQuery> validValueInfos = {
+      C2FieldSupportedValuesQuery::Current(
+          C2ParamField(param.get(), &C2StreamPictureSizeInfo::width)),
+      C2FieldSupportedValuesQuery::Current(
+          C2ParamField(param.get(), &C2StreamPictureSizeInfo::height))};
+  status = interface->querySupportedValues_vb(validValueInfos, C2_MAY_BLOCK);
+  ASSERT_EQ(status, C2_OK) << "Error in querySupportedValues_vb";
+  ASSERT_EQ(validValueInfos.size(), 2) << "querySupportedValues_vb didn't return 2 values";
+
+  ASSERT_EQ(validValueInfos[0].values.range.max.ref<uint32_t>(), 1920)
+      << "Incorrect maximum value for width";
+  ASSERT_EQ(validValueInfos[1].values.range.max.ref<uint32_t>(), 1920)
+      << "Incorrect maximum value for height";
+  ASSERT_EQ(validValueInfos[0].values.range.min.ref<uint32_t>(), 2)
+      << "Incorrect minimum value for width";
+  ASSERT_EQ(validValueInfos[1].values.range.min.ref<uint32_t>(), 2)
+      << "Incorrect minimum value for height";
+  ASSERT_EQ(validValueInfos[0].values.range.step.ref<uint32_t>(), 2)
+      << "Incorrect alignment value for width";
+  ASSERT_EQ(validValueInfos[1].values.range.step.ref<uint32_t>(), 2)
+      << "Incorrect alignment value for height";
+
+  return;
+}
+
+int main(int argc, char** argv) {
+  ::testing::InitGoogleTest(&argc, argv);
+  int status = RUN_ALL_TESTS();
+  ALOGV("Test result = %d\n", status);
+  return status;
+}
diff --git a/media/codec2/components/vorbis/Android.bp b/media/codec2/components/vorbis/Android.bp
index bc1c380..ccf6f20 100644
--- a/media/codec2/components/vorbis/Android.bp
+++ b/media/codec2/components/vorbis/Android.bp
@@ -1,4 +1,23 @@
-cc_library_shared {
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_codec2_components_vorbis_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codec2_components_vorbis_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library {
     name: "libcodec2_soft_vorbisdec",
     defaults: [
         "libcodec2_soft-defaults",
diff --git a/media/codec2/components/vorbis/C2SoftVorbisDec.cpp b/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
index a8b5377..899fe9b 100644
--- a/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
+++ b/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
@@ -477,11 +477,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftVorbisDecFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/vpx/Android.bp b/media/codec2/components/vpx/Android.bp
index 34f5753..bb6a545 100644
--- a/media/codec2/components/vpx/Android.bp
+++ b/media/codec2/components/vpx/Android.bp
@@ -1,4 +1,23 @@
-cc_library_shared {
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_codec2_components_vpx_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codec2_components_vpx_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library {
     name: "libcodec2_soft_vp9dec",
     defaults: [
         "libcodec2_soft-defaults",
@@ -14,7 +33,7 @@
     ],
 }
 
-cc_library_shared {
+cc_library {
     name: "libcodec2_soft_vp8dec",
     defaults: [
         "libcodec2_soft-defaults",
@@ -26,7 +45,7 @@
     shared_libs: ["libvpx"],
 }
 
-cc_library_shared {
+cc_library {
     name: "libcodec2_soft_vp9enc",
     defaults: [
         "libcodec2_soft-defaults",
@@ -43,7 +62,7 @@
     cflags: ["-DVP9"],
 }
 
-cc_library_shared {
+cc_library {
     name: "libcodec2_soft_vp8enc",
     defaults: [
         "libcodec2_soft-defaults",
@@ -57,4 +76,3 @@
 
     shared_libs: ["libvpx"],
 }
-
diff --git a/media/codec2/components/vpx/C2SoftVp8Enc.cpp b/media/codec2/components/vpx/C2SoftVp8Enc.cpp
index f18f5d0..049ec38 100644
--- a/media/codec2/components/vpx/C2SoftVp8Enc.cpp
+++ b/media/codec2/components/vpx/C2SoftVp8Enc.cpp
@@ -101,11 +101,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftVp8EncFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/vpx/C2SoftVp9Enc.cpp b/media/codec2/components/vpx/C2SoftVp9Enc.cpp
index 740dbda..6401521 100644
--- a/media/codec2/components/vpx/C2SoftVp9Enc.cpp
+++ b/media/codec2/components/vpx/C2SoftVp9Enc.cpp
@@ -131,11 +131,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftVp9EncFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 3eef1e3..2953d90 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -631,31 +631,30 @@
 }
 
 static void copyOutputBufferToYuvPlanarFrame(
-        uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
+        const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
         size_t srcYStride, size_t srcUStride, size_t srcVStride,
         size_t dstYStride, size_t dstUVStride,
         uint32_t width, uint32_t height) {
-    uint8_t *dstStart = dst;
 
     for (size_t i = 0; i < height; ++i) {
-         memcpy(dst, srcY, width);
+         memcpy(dstY, srcY, width);
          srcY += srcYStride;
-         dst += dstYStride;
+         dstY += dstYStride;
     }
 
-    dst = dstStart + dstYStride * height;
     for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dst, srcV, width / 2);
+         memcpy(dstV, srcV, width / 2);
          srcV += srcVStride;
-         dst += dstUVStride;
+         dstV += dstUVStride;
     }
 
-    dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2);
     for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dst, srcU, width / 2);
+         memcpy(dstU, srcU, width / 2);
          srcU += srcUStride;
-         dst += dstUVStride;
+         dstU += dstUVStride;
     }
+
 }
 
 static void convertYUV420Planar16ToY410(uint32_t *dst,
@@ -721,16 +720,12 @@
     return;
 }
 
-static void convertYUV420Planar16ToYUV420Planar(uint8_t *dst,
+static void convertYUV420Planar16ToYUV420Planar(
+        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
         const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
         size_t srcYStride, size_t srcUStride, size_t srcVStride,
-        size_t dstYStride, size_t dstUVStride, size_t width, size_t height) {
-
-    uint8_t *dstY = (uint8_t *)dst;
-    size_t dstYSize = dstYStride * height;
-    size_t dstUVSize = dstUVStride * height / 2;
-    uint8_t *dstV = dstY + dstYSize;
-    uint8_t *dstU = dstV + dstUVSize;
+        size_t dstYStride, size_t dstUVStride,
+        size_t width, size_t height) {
 
     for (size_t y = 0; y < height; ++y) {
         for (size_t x = 0; x < width; ++x) {
@@ -823,7 +818,10 @@
            block->width(), block->height(), mWidth, mHeight,
            ((c2_cntr64_t *)img->user_priv)->peekll());
 
-    uint8_t *dst = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
+    uint8_t *dstY = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
+    uint8_t *dstU = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_U]);
+    uint8_t *dstV = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_V]);
+
     size_t srcYStride = img->stride[VPX_PLANE_Y];
     size_t srcUStride = img->stride[VPX_PLANE_U];
     size_t srcVStride = img->stride[VPX_PLANE_V];
@@ -842,18 +840,18 @@
             constexpr size_t kHeight = 64;
             for (; i < mHeight; i += kHeight) {
                 queue->entries.push_back(
-                        [dst, srcY, srcU, srcV,
+                        [dstY, srcY, srcU, srcV,
                          srcYStride, srcUStride, srcVStride, dstYStride,
                          width = mWidth, height = std::min(mHeight - i, kHeight)] {
                             convertYUV420Planar16ToY410(
-                                    (uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2,
+                                    (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
                                     srcUStride / 2, srcVStride / 2, dstYStride / sizeof(uint32_t),
                                     width, height);
                         });
                 srcY += srcYStride / 2 * kHeight;
                 srcU += srcUStride / 2 * (kHeight / 2);
                 srcV += srcVStride / 2 * (kHeight / 2);
-                dst += dstYStride * kHeight;
+                dstY += dstYStride * kHeight;
             }
             CHECK_EQ(0u, queue->numPending);
             queue->numPending = queue->entries.size();
@@ -862,8 +860,9 @@
                 queue.waitForCondition(queue->cond);
             }
         } else {
-            convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
-                                                srcUStride / 2, srcVStride / 2,
+            convertYUV420Planar16ToYUV420Planar(dstY, dstU, dstV,
+                                                srcY, srcU, srcV,
+                                                srcYStride / 2, srcUStride / 2, srcVStride / 2,
                                                 dstYStride, dstUVStride,
                                                 mWidth, mHeight);
         }
@@ -871,8 +870,10 @@
         const uint8_t *srcY = (const uint8_t *)img->planes[VPX_PLANE_Y];
         const uint8_t *srcU = (const uint8_t *)img->planes[VPX_PLANE_U];
         const uint8_t *srcV = (const uint8_t *)img->planes[VPX_PLANE_V];
+
         copyOutputBufferToYuvPlanarFrame(
-                dst, srcY, srcU, srcV,
+                dstY, dstU, dstV,
+                srcY, srcU, srcV,
                 srcYStride, srcUStride, srcVStride,
                 dstYStride, dstUVStride,
                 mWidth, mHeight);
@@ -947,11 +948,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftVpxFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 7e9090f..7486d27 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -635,7 +635,8 @@
             }
             work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
             work->worklets.front()->output.buffers.clear();
-            std::shared_ptr<C2Buffer> buffer = createLinearBuffer(block);
+            std::shared_ptr<C2Buffer> buffer =
+                createLinearBuffer(block, 0, encoded_packet->data.frame.sz);
             if (encoded_packet->data.frame.flags & VPX_FRAME_IS_KEY) {
                 buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
                         0u /* stream id */, C2Config::SYNC_FRAME));
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h
index 5e34b8a..c98b802 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.h
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.h
@@ -345,6 +345,42 @@
                 .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
                 .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
                 .build());
+
+        addParameter(
+                DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+                .withDefault(new C2StreamColorAspectsInfo::input(
+                        0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                        C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                .withFields({
+                    C2F(mColorAspects, range).inRange(
+                                C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
+                    C2F(mColorAspects, primaries).inRange(
+                                C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+                    C2F(mColorAspects, transfer).inRange(
+                                C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
+                    C2F(mColorAspects, matrix).inRange(
+                                C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
+                })
+                .withSetter(ColorAspectsSetter)
+                .build());
+
+        addParameter(
+                DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+                .withDefault(new C2StreamColorAspectsInfo::output(
+                        0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                        C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                .withFields({
+                    C2F(mCodedColorAspects, range).inRange(
+                                C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
+                    C2F(mCodedColorAspects, primaries).inRange(
+                                C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+                    C2F(mCodedColorAspects, transfer).inRange(
+                                C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
+                    C2F(mCodedColorAspects, matrix).inRange(
+                                C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
+                })
+                .withSetter(CodedColorAspectsSetter, mColorAspects)
+                .build());
     }
 
     static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
@@ -415,6 +451,31 @@
         double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
         return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
     }
+    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
+        (void)mayBlock;
+        if (me.v.range > C2Color::RANGE_OTHER) {
+                me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+                me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+                me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+                me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+    static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
+                                       const C2P<C2StreamColorAspectsInfo::input> &coded) {
+        (void)mayBlock;
+        me.set().range = coded.v.range;
+        me.set().primaries = coded.v.primaries;
+        me.set().transfer = coded.v.transfer;
+        me.set().matrix = coded.v.matrix;
+        return C2R::Ok();
+    }
 
    private:
     std::shared_ptr<C2StreamUsageTuning::input> mUsage;
@@ -427,6 +488,8 @@
     std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
     std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
     std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
+    std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
 };
 
 }  // namespace android
diff --git a/media/codec2/components/xaac/Android.bp b/media/codec2/components/xaac/Android.bp
index 7795cc1..bb3efb5 100644
--- a/media/codec2/components/xaac/Android.bp
+++ b/media/codec2/components/xaac/Android.bp
@@ -1,8 +1,18 @@
-cc_library_shared {
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
     name: "libcodec2_soft_xaacdec",
     defaults: [
         "libcodec2_soft-defaults",
         "libcodec2_soft_sanitize_all-defaults",
+	"libcodec2_soft_sanitize_cfi-defaults",
     ],
 
     srcs: ["C2SoftXaacDec.cpp"],
diff --git a/media/codec2/components/xaac/C2SoftXaacDec.cpp b/media/codec2/components/xaac/C2SoftXaacDec.cpp
index 951d058..8bf4b72 100644
--- a/media/codec2/components/xaac/C2SoftXaacDec.cpp
+++ b/media/codec2/components/xaac/C2SoftXaacDec.cpp
@@ -361,9 +361,8 @@
     C2WriteView wView = block->map().get();
     int16_t* outBuffer = reinterpret_cast<int16_t*>(wView.data());
     memcpy(outBuffer, mOutputDrainBuffer, mOutputDrainBufferWritePos);
-    mOutputDrainBufferWritePos = 0;
 
-    auto fillWork = [buffer = createLinearBuffer(block)](
+    auto fillWork = [buffer = createLinearBuffer(block, 0, mOutputDrainBufferWritePos)](
         const std::unique_ptr<C2Work>& work) {
         uint32_t flags = 0;
         if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
@@ -376,6 +375,9 @@
         work->worklets.front()->output.ordinal = work->input.ordinal;
         work->workletsProcessed = 1u;
     };
+
+    mOutputDrainBufferWritePos = 0;
+
     if (work && work->input.ordinal.frameIndex == c2_cntr64_t(mCurFrameIndex)) {
         fillWork(work);
     } else {
@@ -1600,11 +1602,13 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
     ALOGV("in %s", __func__);
     return new ::android::C2SoftXaacDecFactory();
 }
 
+__attribute__((cfi_canonical_jump_table))
 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
     ALOGV("in %s", __func__);
     delete factory;
diff --git a/media/codec2/core/Android.bp b/media/codec2/core/Android.bp
index ce1c9ac..64999b7 100644
--- a/media/codec2/core/Android.bp
+++ b/media/codec2/core/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_headers {
     name: "libcodec2_headers",
     vendor_available: true,
@@ -5,7 +14,7 @@
     export_include_dirs: ["include"],
 }
 
-cc_library_shared {
+cc_library {
     name: "libcodec2",
     vendor_available: true,
     min_sdk_version: "29",
@@ -21,6 +30,10 @@
         "-Werror",
     ],
 
+    header_abi_checker: {
+        check_all_apis: true,
+    },
+
     header_libs: [
         "libcodec2_headers",
         "libhardware_headers",
@@ -48,4 +61,3 @@
 
     ldflags: ["-Wl,-Bsymbolic"],
 }
-
diff --git a/media/codec2/core/include/C2Buffer.h b/media/codec2/core/include/C2Buffer.h
index 3d3587c..abe343b 100644
--- a/media/codec2/core/include/C2Buffer.h
+++ b/media/codec2/core/include/C2Buffer.h
@@ -642,7 +642,8 @@
      * \retval C2_REFUSED   no permission to complete the allocation
      * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
      * \retval C2_OMITTED   this allocator does not support 1D allocations
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during allocation
+     *                      (unexpected)
      */
     virtual c2_status_t newLinearAllocation(
             uint32_t capacity __unused, C2MemoryUsage usage __unused,
@@ -666,7 +667,8 @@
      * \retval C2_REFUSED   no permission to recreate the allocation
      * \retval C2_BAD_VALUE invalid handle (caller error)
      * \retval C2_OMITTED   this allocator does not support 1D allocations
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during allocation
+     *                      (unexpected)
      */
     virtual c2_status_t priorLinearAllocation(
             const C2Handle *handle __unused,
@@ -699,7 +701,8 @@
      * \retval C2_REFUSED   no permission to complete the allocation
      * \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller error)
      * \retval C2_OMITTED   this allocator does not support 2D allocations
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during allocation
+     *                      (unexpected)
      */
     virtual c2_status_t newGraphicAllocation(
             uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
@@ -724,7 +727,8 @@
      * \retval C2_REFUSED   no permission to recreate the allocation
      * \retval C2_BAD_VALUE invalid handle (caller error)
      * \retval C2_OMITTED   this allocator does not support 2D allocations
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during recreation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during recreation
+     *                      (unexpected)
      */
     virtual c2_status_t priorGraphicAllocation(
             const C2Handle *handle __unused,
@@ -734,6 +738,22 @@
     }
 
     virtual ~C2Allocator() = default;
+
+    /**
+     * Returns a true if the handle looks valid for this allocator.
+     *
+     * It does not actually validate that the handle represents a valid allocation (by this
+     * allocator), only that the handle could have been returned by this allocator. As such,
+     * multiple allocators may return true for looksValid for the same handle.
+     *
+     * This method MUST be "non-blocking", MUST not access kernel and/or device drivers, and
+     * return within 1us.
+     *
+     * \param handle      the handle for an existing allocation (possibly from another
+     *                    allocator)
+     */
+    virtual bool checkHandle(const C2Handle *const handle) const = 0;
+
 protected:
     C2Allocator() = default;
 };
@@ -878,6 +898,12 @@
      * Obtains a linear writeable block of given |capacity| and |usage|. If successful, the
      * block is stored in |block|. Otherwise, |block| is set to 'nullptr'.
      *
+     * \note The returned buffer may have a larger capacity than requested. In this case the
+     * larger (returned) capacity may be fully used.
+     *
+     * \note There is no guarantee on the alignedness of the returned block. The only guarantee is
+     * that its capacity is equal to or larger than the requested capacity.
+     *
      * \param capacity the size of requested block.
      * \param usage    the memory usage info for the requested block. Returned blocks will be
      *                 optimized for this usage, but may be used with any usage. One exception:
@@ -892,7 +918,8 @@
      * \retval C2_REFUSED   no permission to complete any required allocation
      * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
      * \retval C2_OMITTED   this pool does not support linear blocks
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
      */
     virtual c2_status_t fetchLinearBlock(
             uint32_t capacity __unused, C2MemoryUsage usage __unused,
@@ -905,6 +932,12 @@
      * Obtains a circular writeable block of given |capacity| and |usage|. If successful, the
      * block is stored in |block|. Otherwise, |block| is set to 'nullptr'.
      *
+     * \note The returned buffer may have a larger capacity than requested. In this case the
+     * larger (returned) capacity may be fully used.
+     *
+     * \note There is no guarantee on the alignedness of the returned block. The only guarantee is
+     * that its capacity is equal to or larger than the requested capacity.
+     *
      * \param capacity the size of requested circular block. (note: the size of the obtained
      *                 block could be slightly larger, e.g. to accommodate any system-required
      *                 alignment)
@@ -921,7 +954,8 @@
      * \retval C2_REFUSED   no permission to complete any required allocation
      * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
      * \retval C2_OMITTED   this pool does not support circular blocks
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
      */
     virtual c2_status_t fetchCircularBlock(
             uint32_t capacity __unused, C2MemoryUsage usage __unused,
@@ -934,6 +968,12 @@
      * Obtains a 2D graphic block of given |width|, |height|, |format| and |usage|. If successful,
      * the block is stored in |block|. Otherwise, |block| is set to 'nullptr'.
      *
+     * \note The returned buffer may have a larger capacity (width and height) than requested. In
+     * this case the larger (returned) capacity may be fully used.
+     *
+     * \note There is no guarantee on the alignedness of the returned block. The only guarantee is
+     * that its capacity is equal to or larger than the requested capacity (width and height).
+     *
      * \param width  the width of requested block (the obtained block could be slightly larger, e.g.
      *               to accommodate any system-required alignment)
      * \param height the height of requested block (the obtained block could be slightly larger,
@@ -953,7 +993,8 @@
      * \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller
      *                      error)
      * \retval C2_OMITTED   this pool does not support 2D blocks
-     * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
      */
     virtual c2_status_t fetchGraphicBlock(
             uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
@@ -964,6 +1005,102 @@
     }
 
     virtual ~C2BlockPool() = default;
+
+    /**
+     * Blocking fetch for linear block. Obtains a linear writable block of given |capacity|
+     * and |usage|. If a block can be successfully obtained, the block is stored in |block|,
+     * |fence| is set to a null-fence and C2_OK is returned.
+     *
+     * If a block cannot be temporarily obtained, |block| is set to nullptr, a waitable fence
+     * is stored into |fence| and C2_BLOCKING is returned.
+     *
+     * Otherwise, |block| is set to nullptr and |fence| is set to a null-fence. The waitable
+     * fence is signalled when the temporary restriction on fetch is lifted.
+     * e.g. more memory is available to fetch because some meomory or prior blocks were released.
+     *
+     * \note The returned buffer may have a larger capacity than requested. In this case the
+     * larger (returned) capacity may be fully used.
+     *
+     * \note There is no guarantee on the alignedness of the returned block. The only guarantee is
+     * that its capacity is equal to or larger than the requested capacity.
+     *
+     * \param capacity the size of requested block.
+     * \param usage    the memory usage info for the requested block. Returned blocks will be
+     *                 optimized for this usage, but may be used with any usage. One exception:
+     *                 protected blocks/buffers can only be used in a protected scenario.
+     * \param block    pointer to where the obtained block shall be stored on success. nullptr will
+     *                 be stored here on failure
+     * \param fence    pointer to where the fence shall be stored on C2_BLOCKING error.
+     *
+     * \retval C2_OK        the operation was successful
+     * \retval C2_NO_MEMORY not enough memory to complete any required allocation
+     * \retval C2_TIMED_OUT the operation timed out
+     * \retval C2_BLOCKING  the operation is blocked
+     * \retval C2_REFUSED   no permission to complete any required allocation
+     * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
+     * \retval C2_OMITTED   this pool does not support linear blocks nor fence.
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
+     */
+    virtual c2_status_t fetchLinearBlock(
+            uint32_t capacity __unused, C2MemoryUsage usage __unused,
+            std::shared_ptr<C2LinearBlock> *block /* nonnull */,
+            C2Fence *fence /* nonnull */) {
+        *block = nullptr;
+        (void) fence;
+        return C2_OMITTED;
+    }
+
+    /**
+     * Blocking fetch for 2D graphic block. Obtains a 2D graphic writable block of given |capacity|
+     * and |usage|. If a block can be successfully obtained, the block is stored in |block|,
+     * |fence| is set to a null-fence and C2_OK is returned.
+     *
+     * If a block cannot be temporarily obtained, |block| is set to nullptr, a waitable fence
+     * is stored into |fence| and C2_BLOCKING is returned.
+     *
+     * Otherwise, |block| is set to nullptr and |fence| is set to a null-fence. The waitable
+     * fence is signalled when the temporary restriction on fetch is lifted.
+     * e.g. more memory is available to fetch because some meomory or prior blocks were released.
+     *
+     * \note The returned buffer may have a larger capacity (width and height) than requested. In
+     * this case the larger (returned) capacity may be fully used.
+     *
+     * \note There is no guarantee on the alignedness of the returned block. The only guarantee is
+     * that its capacity is equal to or larger than the requested capacity (width and height).
+     *
+     * \param width  the width of requested block (the obtained block could be slightly larger, e.g.
+     *               to accommodate any system-required alignment)
+     * \param height the height of requested block (the obtained block could be slightly larger,
+     *               e.g. to accommodate any system-required alignment)
+     * \param format the pixel format of requested block. This could be a vendor specific format.
+     * \param usage  the memory usage info for the requested block. Returned blocks will be
+     *               optimized for this usage, but may be used with any usage. One exception:
+     *               protected blocks/buffers can only be used in a protected scenario.
+     * \param block  pointer to where the obtained block shall be stored on success. nullptr
+     *               will be stored here on failure
+     * \param fence  pointer to where the fence shall be stored on C2_BLOCKING error.
+     *
+     * \retval C2_OK        the operation was successful
+     * \retval C2_NO_MEMORY not enough memory to complete any required allocation
+     * \retval C2_TIMED_OUT the operation timed out
+     * \retval C2_BLOCKING  the operation is blocked
+     * \retval C2_REFUSED   no permission to complete any required allocation
+     * \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller
+     *                      error)
+     * \retval C2_OMITTED   this pool does not support 2D blocks
+     * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+     *                      (unexpected)
+     */
+    virtual c2_status_t fetchGraphicBlock(
+            uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
+            C2MemoryUsage usage __unused,
+            std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
+            C2Fence *fence /* nonnull */) {
+        *block = nullptr;
+        (void) fence;
+        return C2_OMITTED;
+    }
 protected:
     C2BlockPool() = default;
 };
@@ -2156,9 +2293,12 @@
 };
 
 /**
- * An extension of C2Info objects that can contain arbitrary buffer data.
+ * A const metadata object that can contain arbitrary buffer data.
  *
- * \note This object is not describable and contains opaque data.
+ * This object is not an actual C2Info and is not attached to buffers (C2Buffer), but rather to
+ * frames (C2FrameData). It is not describable via C2ParamDescriptor.
+ *
+ * C2InfoBuffer is a const object that can be allocated on stack and is copiable.
  */
 class C2InfoBuffer {
 public:
@@ -2167,14 +2307,65 @@
      *
      * \return the parameter index.
      */
-    const C2Param::Index index() const;
+    const C2Param::Index index() const { return mIndex; }
 
     /**
      * Gets the buffer's data.
      *
      * \return the buffer's data.
      */
-    const C2BufferData data() const;
+    const C2BufferData data() const { return mData; }
+
+    /// Returns a clone of this as a global info buffer.
+    C2InfoBuffer asGlobal() const {
+        C2Param::Index index = mIndex;
+        index.convertToGlobal();
+        return C2InfoBuffer(index, mData);
+    }
+
+    /// Returns a clone of this as a port info buffer.
+    C2InfoBuffer asPort(bool output) const {
+        C2Param::Index index = mIndex;
+        index.convertToPort(output);
+        return C2InfoBuffer(index, mData);
+    }
+
+    /// Returns a clone of this as a stream info buffer.
+    C2InfoBuffer asStream(bool output, unsigned stream) const {
+        C2Param::Index index = mIndex;
+        index.convertToStream(output, stream);
+        return C2InfoBuffer(index, mData);
+    }
+
+    /**
+     * Creates a global info buffer containing a single linear block.
+     *
+     * \param index the core parameter index of this info buffer.
+     * \param block the content of the info buffer.
+     *
+     * \return shared pointer to the created info buffer.
+     */
+    static C2InfoBuffer CreateLinearBuffer(C2Param::CoreIndex index, const C2ConstLinearBlock &block);
+
+    /**
+     * Creates a global info buffer containing a single graphic block.
+     *
+     * \param index the core parameter index of this info buffer.
+     * \param block the content of the info buffer.
+     *
+     * \return shared pointer to the created info buffer.
+     */
+    static C2InfoBuffer CreateGraphicBuffer(C2Param::CoreIndex index, const C2ConstGraphicBlock &block);
+
+protected:
+    // no public constructor
+    explicit C2InfoBuffer(C2Param::Index index, const std::vector<C2ConstLinearBlock> &blocks);
+    explicit C2InfoBuffer(C2Param::Index index, const std::vector<C2ConstGraphicBlock> &blocks);
+
+private:
+    C2Param::Index mIndex;
+    C2BufferData mData;
+    explicit C2InfoBuffer(C2Param::Index index, const C2BufferData &data);
 };
 
 /// @}
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 29bccd5..2cc7ab7 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -75,6 +75,10 @@
     enum tiling_mode_t : uint32_t;          ///< tiling modes
 };
 
+struct C2PlatformConfig {
+    enum encoding_quality_level_t : uint32_t; ///< encoding quality level
+};
+
 namespace {
 
 enum C2ParamIndexKind : C2Param::type_index_t {
@@ -151,6 +155,7 @@
 
     /* protected content */
     kParamIndexSecureMode,
+    kParamIndexEncryptedBuffer, // info-buffer, used with SM_READ_PROTECTED_WITH_ENCRYPTED
 
     // deprecated
     kParamIndexDelayRequest = kParamIndexDelay | C2Param::CoreIndex::IS_REQUEST_FLAG,
@@ -186,6 +191,8 @@
     kParamIndexPictureType,
     kParamIndexHdr10PlusMetadata,
 
+    kParamIndexPictureQuantization,
+
     /* ------------------------------------ video components ------------------------------------ */
 
     kParamIndexFrameRate = C2_PARAM_INDEX_VIDEO_PARAM_START,
@@ -221,6 +228,7 @@
     kParamIndexDrcEffectType, // drc, enum
     kParamIndexDrcOutputLoudness, // drc, float (dBFS)
     kParamIndexDrcAlbumMode, // drc, enum
+    kParamIndexAudioFrameSize, // int
 
     /* ============================== platform-defined parameters ============================== */
 
@@ -249,6 +257,19 @@
 
     // low latency mode
     kParamIndexLowLatencyMode, // bool
+
+    // tunneled codec
+    kParamIndexTunneledMode, // struct
+    kParamIndexTunnelHandle, // int32[]
+    kParamIndexTunnelSystemTime, // int64
+    kParamIndexTunnelHoldRender, // bool
+    kParamIndexTunnelStartRender, // bool
+
+    // dmabuf allocator
+    kParamIndexStoreDmaBufUsage,  // store, struct
+
+    // encoding quality requirements
+    kParamIndexEncodingQualityLevel, // encoders, enum
 };
 
 }
@@ -373,6 +394,7 @@
 
 namespace {
 
+// Codec bases are ordered by their date of introduction to the code base.
 enum : uint32_t {
     _C2_PL_MP2V_BASE = 0x1000,
     _C2_PL_AAC_BASE  = 0x2000,
@@ -383,12 +405,16 @@
     _C2_PL_VP9_BASE  = 0x7000,
     _C2_PL_DV_BASE   = 0x8000,
     _C2_PL_AV1_BASE  = 0x9000,
+    _C2_PL_VP8_BASE  = 0xA000,
+    _C2_PL_MPEGH_BASE = 0xB000,     // MPEG-H 3D Audio
 
     C2_PROFILE_LEVEL_VENDOR_START = 0x70000000,
 };
 
 }
 
+// Profiles and levels for each codec are ordered based on how they are ordered in the
+// corresponding standard documents at introduction, and chronologically afterwards.
 enum C2Config::profile_t : uint32_t {
     PROFILE_UNUSED = 0,                         ///< profile is not used by this media type
 
@@ -536,6 +562,18 @@
     PROFILE_AV1_0 = _C2_PL_AV1_BASE,            ///< AV1 Profile 0 (4:2:0, 8 to 10 bit)
     PROFILE_AV1_1,                              ///< AV1 Profile 1 (8 to 10 bit)
     PROFILE_AV1_2,                              ///< AV1 Profile 2 (8 to 12 bit)
+
+    // VP8 profiles
+    PROFILE_VP8_0 = _C2_PL_VP8_BASE,            ///< VP8 Profile 0
+    PROFILE_VP8_1,                              ///< VP8 Profile 1
+    PROFILE_VP8_2,                              ///< VP8 Profile 2
+    PROFILE_VP8_3,                              ///< VP8 Profile 3
+
+    // MPEG-H 3D Audio profiles
+    PROFILE_MPEGH_MAIN = _C2_PL_MPEGH_BASE,     ///< MPEG-H Main
+    PROFILE_MPEGH_HIGH,                         ///< MPEG-H High
+    PROFILE_MPEGH_LC,                           ///< MPEG-H Low-complexity
+    PROFILE_MPEGH_BASELINE,                     ///< MPEG-H Baseline
 };
 
 enum C2Config::level_t : uint32_t {
@@ -678,6 +716,13 @@
     LEVEL_AV1_7_1,                              ///< AV1 Level 7.1
     LEVEL_AV1_7_2,                              ///< AV1 Level 7.2
     LEVEL_AV1_7_3,                              ///< AV1 Level 7.3
+
+    // MPEG-H 3D Audio levels
+    LEVEL_MPEGH_1 = _C2_PL_MPEGH_BASE,          ///< MPEG-H L1
+    LEVEL_MPEGH_2,                              ///< MPEG-H L2
+    LEVEL_MPEGH_3,                              ///< MPEG-H L3
+    LEVEL_MPEGH_4,                              ///< MPEG-H L4
+    LEVEL_MPEGH_5,                              ///< MPEG-H L5
 };
 
 struct C2ProfileLevelStruct {
@@ -1137,6 +1182,8 @@
 C2ENUM(C2Config::secure_mode_t, uint32_t,
     SM_UNPROTECTED,    ///< no content protection
     SM_READ_PROTECTED, ///< input and output buffers shall be protected from reading
+    /// both read protected and readable encrypted buffers are used
+    SM_READ_PROTECTED_WITH_ENCRYPTED,
 )
 
 typedef C2GlobalParam<C2Tuning, C2SimpleValueStruct<C2Config::secure_mode_t>, kParamIndexSecureMode>
@@ -1688,6 +1735,31 @@
 constexpr char C2_PARAMKEY_GOP[] = "coding.gop";
 
 /**
+ * Quantization
+ * min/max for each picture type
+ *
+ */
+struct C2PictureQuantizationStruct {
+    C2PictureQuantizationStruct() : type_((C2Config::picture_type_t)0),
+                                         min(INT32_MIN), max(INT32_MAX) {}
+    C2PictureQuantizationStruct(C2Config::picture_type_t type, int32_t min_, int32_t max_)
+        : type_(type), min(min_), max(max_) { }
+
+    C2Config::picture_type_t type_;
+    int32_t min;      // INT32_MIN == 'no lower bound specified'
+    int32_t max;      // INT32_MAX == 'no upper bound specified'
+
+    DEFINE_AND_DESCRIBE_C2STRUCT(PictureQuantization)
+    C2FIELD(type_, "type")
+    C2FIELD(min, "min")
+    C2FIELD(max, "max")
+};
+
+typedef C2StreamParam<C2Tuning, C2SimpleArrayStruct<C2PictureQuantizationStruct>,
+        kParamIndexPictureQuantization> C2StreamPictureQuantizationTuning;
+constexpr char C2_PARAMKEY_PICTURE_QUANTIZATION[] = "coding.qp";
+
+/**
  * Sync frame can be requested on demand by the client.
  *
  * If true, the next I frame shall be encoded as a sync frame. This config can be passed
@@ -1863,7 +1935,9 @@
 C2ENUM(C2Config::pcm_encoding_t, uint32_t,
     PCM_16,
     PCM_8,
-    PCM_FLOAT
+    PCM_FLOAT,
+    PCM_24,
+    PCM_32
 )
 
 typedef C2StreamParam<C2Info, C2SimpleValueStruct<C2Config::pcm_encoding_t>, kParamIndexPcmEncoding>
@@ -1962,9 +2036,20 @@
 /**
  * DRC output loudness in dBFS. Retrieved during decoding
  */
- typedef C2StreamParam<C2Info, C2FloatValue, kParamIndexDrcOutputLoudness>
+typedef C2StreamParam<C2Info, C2FloatValue, kParamIndexDrcOutputLoudness>
         C2StreamDrcOutputLoudnessTuning;
- constexpr char C2_PARAMKEY_DRC_OUTPUT_LOUDNESS[] = "output.drc.output-loudness";
+constexpr char C2_PARAMKEY_DRC_OUTPUT_LOUDNESS[] = "output.drc.output-loudness";
+
+/**
+ * Audio frame size in samples.
+ *
+ * Audio encoders can expose this parameter to signal the desired audio frame
+ * size that corresponds to a single coded access unit.
+ * Default value is 0, meaning that the encoder accepts input buffers of any size.
+ */
+typedef C2StreamParam<C2Info, C2Uint32Value, kParamIndexAudioFrameSize>
+        C2StreamAudioFrameSizeInfo;
+constexpr char C2_PARAMKEY_AUDIO_FRAME_SIZE[] = "raw.audio-frame-size";
 
 /* --------------------------------------- AAC components --------------------------------------- */
 
@@ -2036,6 +2121,33 @@
         C2StoreIonUsageInfo;
 
 /**
+ * This structure describes the preferred DMA-Buf allocation parameters for a given memory usage.
+ */
+struct C2StoreDmaBufUsageStruct {
+    inline C2StoreDmaBufUsageStruct() { memset(this, 0, sizeof(*this)); }
+
+    inline C2StoreDmaBufUsageStruct(size_t flexCount, uint64_t usage_, uint32_t capacity_)
+        : usage(usage_), capacity(capacity_), allocFlags(0) {
+        memset(heapName, 0, flexCount);
+    }
+
+    uint64_t usage;                         ///< C2MemoryUsage
+    uint32_t capacity;                      ///< capacity
+    int32_t allocFlags;                     ///< ion allocation flags
+    char heapName[];                        ///< dmabuf heap name
+
+    DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(StoreDmaBufUsage, heapName)
+    C2FIELD(usage, "usage")
+    C2FIELD(capacity, "capacity")
+    C2FIELD(allocFlags, "alloc-flags")
+    C2FIELD(heapName, "heap-name")
+};
+
+// store, private
+typedef C2GlobalParam<C2Info, C2StoreDmaBufUsageStruct, kParamIndexStoreDmaBufUsage>
+        C2StoreDmaBufUsageInfo;
+
+/**
  * Flexible pixel format descriptors
  */
 struct C2FlexiblePixelFormatDescriptorStruct {
@@ -2182,6 +2294,123 @@
 typedef C2PortParam<C2Tuning, C2TimestampGapAdjustmentStruct> C2PortTimestampGapTuning;
 constexpr char C2_PARAMKEY_INPUT_SURFACE_TIMESTAMP_ADJUSTMENT[] = "input-surface.timestamp-adjustment";
 
+/* ===================================== TUNNELED CODEC ==================================== */
+
+/**
+ * Tunneled codec control.
+ */
+struct C2TunneledModeStruct {
+    /// mode
+    enum mode_t : uint32_t;
+    /// sync type
+    enum sync_type_t : uint32_t;
+
+    inline C2TunneledModeStruct() = default;
+
+    inline C2TunneledModeStruct(
+            size_t flexCount, mode_t mode_, sync_type_t type, std::vector<int32_t> id)
+        : mode(mode_), syncType(type) {
+        memcpy(&syncId, &id[0], c2_min(id.size(), flexCount) * FLEX_SIZE);
+    }
+
+    inline C2TunneledModeStruct(size_t flexCount, mode_t mode_, sync_type_t type, int32_t id)
+        : mode(mode_), syncType(type) {
+        if (flexCount >= 1) {
+            syncId[0] = id;
+        }
+    }
+
+    mode_t mode;          ///< tunneled mode
+    sync_type_t syncType; ///< type of sync used for tunneled mode
+    int32_t syncId[];     ///< sync id
+
+    DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(TunneledMode, syncId)
+    C2FIELD(mode, "mode")
+    C2FIELD(syncType, "sync-type")
+    C2FIELD(syncId, "sync-id")
+
+};
+
+C2ENUM(C2TunneledModeStruct::mode_t, uint32_t,
+    NONE,
+    SIDEBAND,
+);
+
+
+C2ENUM(C2TunneledModeStruct::sync_type_t, uint32_t,
+    REALTIME,
+    AUDIO_HW_SYNC,
+    HW_AV_SYNC,
+);
+
+/**
+ * Configure tunneled mode
+ */
+typedef C2PortParam<C2Tuning, C2TunneledModeStruct, kParamIndexTunneledMode>
+        C2PortTunneledModeTuning;
+constexpr char C2_PARAMKEY_TUNNELED_RENDER[] = "output.tunneled-render";
+
+/**
+ * Tunneled mode handle. The meaning of this is depends on the
+ * tunneled mode. If the tunneled mode is SIDEBAND, this is the
+ * sideband handle.
+ */
+typedef C2PortParam<C2Tuning, C2Int32Array, kParamIndexTunnelHandle> C2PortTunnelHandleTuning;
+constexpr char C2_PARAMKEY_OUTPUT_TUNNEL_HANDLE[] = "output.tunnel-handle";
+
+/**
+ * The system time using CLOCK_MONOTONIC in nanoseconds at the tunnel endpoint.
+ * For decoders this is the render time for the output frame and
+ * this corresponds to the media timestamp of the output frame.
+ */
+typedef C2PortParam<C2Info, C2SimpleValueStruct<int64_t>, kParamIndexTunnelSystemTime>
+        C2PortTunnelSystemTime;
+constexpr char C2_PARAMKEY_OUTPUT_RENDER_TIME[] = "output.render-time";
+
+
+/**
+ * Tunneled mode video peek signaling flag.
+ *
+ * When a video frame is pushed to the decoder with this parameter set to true,
+ * the decoder must decode the frame, signal partial completion, and hold on the
+ * frame until C2StreamTunnelStartRender is set to true (which resets this
+ * flag). Flush will also result in the frames being returned back to the
+ * client (but not rendered).
+ */
+typedef C2StreamParam<C2Info, C2EasyBoolValue, kParamIndexTunnelHoldRender>
+        C2StreamTunnelHoldRender;
+constexpr char C2_PARAMKEY_TUNNEL_HOLD_RENDER[] = "output.tunnel-hold-render";
+
+/**
+ * Tunneled mode video peek signaling flag.
+ *
+ * Upon receiving this flag, the decoder shall set C2StreamTunnelHoldRender to
+ * false, which shall cause any frames held for rendering to be immediately
+ * displayed, regardless of their timestamps.
+*/
+typedef C2StreamParam<C2Info, C2EasyBoolValue, kParamIndexTunnelStartRender>
+        C2StreamTunnelStartRender;
+constexpr char C2_PARAMKEY_TUNNEL_START_RENDER[] = "output.tunnel-start-render";
+
+/**
+ * Encoding quality level signaling.
+ *
+ * Signal the 'minimum encoding quality' introduced in Android 12/S. It indicates
+ * whether the underlying codec is expected to take extra steps to ensure quality meets the
+ * appropriate minimum. A value of NONE indicates that the codec is not to apply
+ * any minimum quality bar requirements. Other values indicate that the codec is to apply
+ * a minimum quality bar, with the exact quality bar being decided by the parameter value.
+ */
+typedef C2GlobalParam<C2Setting,
+        C2SimpleValueStruct<C2EasyEnum<C2PlatformConfig::encoding_quality_level_t>>,
+        kParamIndexEncodingQualityLevel> C2EncodingQualityLevel;
+constexpr char C2_PARAMKEY_ENCODING_QUALITY_LEVEL[] = "algo.encoding-quality-level";
+
+C2ENUM(C2PlatformConfig::encoding_quality_level_t, uint32_t,
+    NONE = 0,
+    S_HANDHELD = 1              // corresponds to VMAF=70
+);
+
 /// @}
 
 #endif  // C2CONFIG_H_
diff --git a/media/codec2/core/include/C2Enum.h b/media/codec2/core/include/C2Enum.h
index b0fad8f..da1f43b 100644
--- a/media/codec2/core/include/C2Enum.h
+++ b/media/codec2/core/include/C2Enum.h
@@ -54,7 +54,7 @@
 /// \note this will contain any initialization, which we will remove when converting to lower-case
 #define _C2_GET_ENUM_NAME(x, y) #x
 /// mapper to get value of enum
-#define _C2_GET_ENUM_VALUE(x, type) (_C2EnumConst<type>)x
+#define _C2_GET_ENUM_VALUE(x, type_) (_C2EnumConst<typename std::underlying_type<type_>::type>)type_::x
 
 /// \endcond
 
@@ -106,7 +106,7 @@
 template<> \
 C2FieldDescriptor::NamedValuesType C2FieldDescriptor::namedValuesFor(const name &r __unused) { \
     return _C2EnumUtils::sanitizeEnumValues( \
-            std::vector<C2Value::Primitive> { _C2_MAP(_C2_GET_ENUM_VALUE, type, __VA_ARGS__) }, \
+            std::vector<C2Value::Primitive> { _C2_MAP(_C2_GET_ENUM_VALUE, name, __VA_ARGS__) }, \
             { _C2_MAP(_C2_GET_ENUM_NAME, type, __VA_ARGS__) }, \
             prefix); \
 }
diff --git a/media/codec2/core/include/C2Param.h b/media/codec2/core/include/C2Param.h
index 51d417a..e938f96 100644
--- a/media/codec2/core/include/C2Param.h
+++ b/media/codec2/core/include/C2Param.h
@@ -317,7 +317,8 @@
         DEFINE_FIELD_BASED_COMPARISON_OPERATORS(Index, mIndex)
 
     private:
-        friend struct C2Param;           // for setStream, MakeStreamId, isValid
+        friend class C2InfoBuffer;       // for convertTo*
+        friend struct C2Param;           // for setStream, MakeStreamId, isValid, convertTo*
         friend struct _C2ParamInspector; // for testing
 
         /**
@@ -508,6 +509,14 @@
         return _mIndex.setPort(output);
     }
 
+    /// sets the size of this parameter.
+    inline void setSize(size_t size) {
+        if (size < sizeof(C2Param)) {
+            size = 0;
+        }
+        _mSize = c2_min(size, _mSize);
+    }
+
 public:
     /// invalidate this parameter. There is no recovery from this call; e.g. parameter
     /// cannot be 'corrected' to be valid.
diff --git a/media/codec2/core/include/C2ParamDef.h b/media/codec2/core/include/C2ParamDef.h
index 0a33283..d578820 100644
--- a/media/codec2/core/include/C2ParamDef.h
+++ b/media/codec2/core/include/C2ParamDef.h
@@ -97,6 +97,9 @@
         PARAM_TYPE = CoreIndex | TypeFlags
     };
 
+    // the underlying param struct type
+    typedef S Struct;
+
 protected:
     enum : uint32_t {
         FLEX_SIZE = 0,
@@ -270,6 +273,11 @@
         } \
         return 0; \
     } \
+    inline void setFlexCount(size_t count) { \
+        if (count < flexCount()) { \
+            this->setSize(sizeof(_Type) + _Type::FLEX_SIZE * count); \
+        } \
+    } \
 
 /// Mark flexible member variable and make structure flexible.
 #define FLEX(cls, m) \
diff --git a/media/codec2/core/include/C2Work.h b/media/codec2/core/include/C2Work.h
index 6923f3e..794402f 100644
--- a/media/codec2/core/include/C2Work.h
+++ b/media/codec2/core/include/C2Work.h
@@ -145,10 +145,35 @@
          */
         FLAG_INCOMPLETE = (1 << 3),
         /**
+         * This frame has been corrected due to a bitstream error. This is a hint, and in most cases
+         * can be ignored. This flag can be set by components on their output to signal the clients
+         * that errors may be present but the frame should be used nonetheless. It can also be set
+         * by clients to signal that the input frame has been corrected, but nonetheless should be
+         * processed.
+         */
+        FLAG_CORRECTED = (1 << 4),
+        /**
+         * This frame is corrupt due to a bitstream error. This is similar to FLAG_CORRECTED,
+         * with the exception that this is a hint that downstream components should not process this
+         * frame.
+         * <p>
+         * If set on the input by the client, the input is likely non-processable and should be
+         * handled similarly to uncorrectable bitstream error detected. For components that operat
+         * on whole access units, this flag can be propagated to the output. Other components should
+         * aim to detect access unit boundaries to determine if any part of the input frame can be
+         * processed.
+         * <p>
+         * If set by the component, this signals to the client that the output is non-usable -
+         * including possibly the metadata that may also be non-usable; -- however, the component
+         * will try to recover on successive input frames.
+         */
+        FLAG_CORRUPT = (1 << 5),
+
+        /**
          * This frame contains only codec-specific configuration data, and no actual access unit.
          *
-         * \deprecated pass codec configuration with using the \todo codec-specific configuration
-         * info together with the access unit.
+         * \deprecated pass codec configuration with using the C2InitData info parameter together
+         *             with the access unit.
          */
         FLAG_CODEC_CONFIG  = (1u << 31),
     };
@@ -161,7 +186,7 @@
     //< for initial work item, these may also come from the parser - if provided
     //< for output buffers, these are the responses to requestedInfos
     std::vector<std::unique_ptr<C2Param>>      configUpdate;
-    std::vector<std::shared_ptr<C2InfoBuffer>> infoBuffers;
+    std::vector<C2InfoBuffer> infoBuffers;
 };
 
 struct C2Worklet {
diff --git a/media/codec2/faultinjection/Android.bp b/media/codec2/faultinjection/Android.bp
index a0ad3ce..c04ecbe 100644
--- a/media/codec2/faultinjection/Android.bp
+++ b/media/codec2/faultinjection/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_shared {
     name: "libcodec2_component_wrapper",
     vendor_available: true,
@@ -26,4 +35,3 @@
 
     ldflags: ["-Wl,-Bsymbolic"],
 }
-
diff --git a/media/codec2/fuzzer/Android.bp b/media/codec2/fuzzer/Android.bp
new file mode 100644
index 0000000..bd1fac6
--- /dev/null
+++ b/media/codec2/fuzzer/Android.bp
@@ -0,0 +1,342 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+    name: "C2Fuzzer-defaults",
+
+    srcs: [
+        "C2Fuzzer.cpp",
+    ],
+
+    static_libs: [
+        "liblog",
+        "libion",
+        "libfmq",
+        "libbase",
+        "libutils",
+        "libcutils",
+        "libcodec2",
+        "libhidlbase",
+        "libdmabufheap",
+        "libcodec2_vndk",
+        "libnativewindow",
+        "libcodec2_soft_common",
+        "libsfplugin_ccodec_utils",
+        "libstagefright_foundation",
+        "libstagefright_bufferpool@2.0.1",
+        "android.hardware.graphics.mapper@2.0",
+        "android.hardware.graphics.mapper@3.0",
+        "android.hardware.media.bufferpool@2.0",
+        "android.hardware.graphics.allocator@2.0",
+        "android.hardware.graphics.allocator@3.0",
+        "android.hardware.graphics.bufferqueue@2.0",
+    ],
+
+    shared_libs: [
+        "libui",
+        "libdl",
+        "libbinder",
+        "libhardware",
+        "libvndksupport",
+        "libprocessgroup",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerAvcDec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.avc.decoder\"",
+    ],
+
+    static_libs: [
+        "libavcdec",
+        "libcodec2_soft_avcdec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerHevcDec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.hevc.decoder\"",
+    ],
+
+    static_libs: [
+        "libhevcdec",
+        "libcodec2_soft_hevcdec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerMpeg2Dec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.mpeg2.decoder\"",
+    ],
+
+    static_libs: [
+        "libmpeg2dec",
+        "libcodec2_soft_mpeg2dec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerMpeg4Dec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.mpeg4.decoder\"",
+    ],
+
+    static_libs: [
+        "libstagefright_m4vh263dec",
+        "libcodec2_soft_mpeg4dec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerH263Dec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.h263.decoder\"",
+    ],
+
+    static_libs: [
+        "libstagefright_m4vh263dec",
+        "libcodec2_soft_h263dec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerVp8Dec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.vp8.decoder\"",
+    ],
+
+    static_libs: [
+        "libvpx",
+        "libcodec2_soft_vp8dec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerVp9Dec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.vp9.decoder\"",
+    ],
+
+    static_libs: [
+        "libvpx",
+        "libcodec2_soft_vp9dec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerAacDec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.aac.decoder\"",
+    ],
+
+    static_libs: [
+        "libFraunhoferAAC",
+        "libcodec2_soft_aacdec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerAmrnbDec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.amrnb.decoder\"",
+    ],
+
+    static_libs: [
+        "libstagefright_amrnbdec",
+        "libstagefright_amrwbdec",
+        "libstagefright_amrnb_common",
+        "libcodec2_soft_amrnbdec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerAmrwbDec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.amrwb.decoder\"",
+    ],
+
+    static_libs: [
+        "libstagefright_amrnbdec",
+        "libstagefright_amrwbdec",
+        "libstagefright_amrnb_common",
+        "libcodec2_soft_amrwbdec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerFlacDec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.flac.decoder\"",
+    ],
+
+    static_libs: [
+        "libFLAC",
+        "libstagefright_flacdec",
+        "libcodec2_soft_flacdec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerG711AlawDec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.g711.alaw.decoder\"",
+    ],
+
+    static_libs: [
+        "codecs_g711dec",
+        "libcodec2_soft_g711alawdec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerG711MlawDec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.g711.mlaw.decoder\"",
+    ],
+
+    static_libs: [
+        "codecs_g711dec",
+        "libcodec2_soft_g711mlawdec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerGsmDec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.gsm.decoder\"",
+    ],
+
+    static_libs: [
+        "libgsm",
+        "libcodec2_soft_gsmdec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerMp3Dec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.mp3.decoder\"",
+    ],
+
+    static_libs: [
+        "libstagefright_mp3dec",
+        "libcodec2_soft_mp3dec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerOpusDec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.opus.decoder\"",
+    ],
+
+    static_libs: [
+        "libopus",
+        "libcodec2_soft_opusdec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerRawDec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.raw.decoder\"",
+    ],
+
+    static_libs: [
+        "libcodec2_soft_rawdec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerVorbisDec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.vorbis.decoder\"",
+    ],
+
+    static_libs: [
+        "libvorbisidec",
+        "libcodec2_soft_vorbisdec",
+    ],
+}
+
+cc_fuzz {
+    name: "C2FuzzerXaacDec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.xaac.decoder\"",
+    ],
+
+    static_libs: [
+        "libxaacdec",
+        "libcodec2_soft_xaacdec",
+    ],
+}
diff --git a/media/codec2/fuzzer/C2Fuzzer.cpp b/media/codec2/fuzzer/C2Fuzzer.cpp
new file mode 100644
index 0000000..51e1013
--- /dev/null
+++ b/media/codec2/fuzzer/C2Fuzzer.cpp
@@ -0,0 +1,317 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include <stdio.h>
+
+#include <C2Fuzzer.h>
+
+using namespace android;
+
+class LinearBuffer : public C2Buffer {
+ public:
+  explicit LinearBuffer(const std::shared_ptr<C2LinearBlock>& block)
+      : C2Buffer({block->share(block->offset(), block->size(), ::C2Fence())}) {}
+
+  explicit LinearBuffer(const std::shared_ptr<C2LinearBlock>& block, size_t size)
+      : C2Buffer({block->share(block->offset(), size, ::C2Fence())}) {}
+};
+
+/**
+ * Handle Callback functions onWorkDone_nb(), onTripped_nb(), onError_nb() for C2 Components
+ */
+struct CodecListener : public C2Component::Listener {
+ public:
+  CodecListener(const std::function<void(std::weak_ptr<C2Component> comp,
+                                         std::list<std::unique_ptr<C2Work>>& workItems)>
+                    fn = nullptr)
+      : callBack(fn) {}
+  virtual void onWorkDone_nb(const std::weak_ptr<C2Component> comp,
+                             std::list<std::unique_ptr<C2Work>> workItems) {
+    if (callBack) {
+      callBack(comp, workItems);
+    }
+  }
+
+  virtual void onTripped_nb(const std::weak_ptr<C2Component> comp,
+                            const std::vector<std::shared_ptr<C2SettingResult>> settingResults) {
+    (void)comp;
+    (void)settingResults;
+  }
+
+  virtual void onError_nb(const std::weak_ptr<C2Component> comp, uint32_t errorCode) {
+    (void)comp;
+    (void)errorCode;
+  }
+
+  std::function<void(std::weak_ptr<C2Component> comp,
+                     std::list<std::unique_ptr<C2Work>>& workItems)> callBack;
+};
+
+/**
+ * Buffer source implementations to identify a frame and its size
+ */
+bool Codec2Fuzzer::BufferSource::searchForMarker() {
+  while (true) {
+    if (isMarker()) {
+      return true;
+    }
+    --mReadIndex;
+    if (mReadIndex > mSize) {
+      break;
+    }
+  }
+  return false;
+}
+
+void Codec2Fuzzer::BufferSource::parse() {
+  bool isFrameAvailable = true;
+  size_t bytesRemaining = mSize;
+  while (isFrameAvailable) {
+    isFrameAvailable = searchForMarker();
+    if (isFrameAvailable) {
+      size_t location = mReadIndex + kMarkerSize;
+      bool isCSD = isCSDMarker(location);
+      location += kMarkerSuffixSize;
+      uint8_t* framePtr = const_cast<uint8_t*>(&mData[location]);
+      size_t frameSize = bytesRemaining - location;
+      uint32_t flags = 0;
+      if (mFrameList.empty()) {
+        flags |= C2FrameData::FLAG_END_OF_STREAM;
+      } else if (isCSD) {
+        flags |= C2FrameData::FLAG_CODEC_CONFIG;
+      }
+      mFrameList.emplace_back(std::make_tuple(framePtr, frameSize, flags));
+      bytesRemaining -= (frameSize + kMarkerSize + kMarkerSuffixSize);
+      --mReadIndex;
+    }
+  }
+  if (mFrameList.empty()) {
+    /**
+     * Scenario where input data does not contain the custom frame markers.
+     * Hence feed the entire data as single frame.
+     */
+    mFrameList.emplace_back(
+        std::make_tuple(const_cast<uint8_t*>(mData), 0, C2FrameData::FLAG_END_OF_STREAM));
+    mFrameList.emplace_back(
+        std::make_tuple(const_cast<uint8_t*>(mData), mSize, C2FrameData::FLAG_CODEC_CONFIG));
+  }
+}
+
+FrameData Codec2Fuzzer::BufferSource::getFrame() {
+  FrameData frame = mFrameList.back();
+  mFrameList.pop_back();
+  return frame;
+}
+
+void Codec2Fuzzer::handleWorkDone(std::weak_ptr<C2Component> comp,
+                                  std::list<std::unique_ptr<C2Work>>& workItems) {
+  (void)comp;
+  for (std::unique_ptr<C2Work>& work : workItems) {
+    if (!work->worklets.empty()) {
+      if (work->worklets.front()->output.flags != C2FrameData::FLAG_INCOMPLETE) {
+        mEos = (work->worklets.front()->output.flags & C2FrameData::FLAG_END_OF_STREAM) != 0;
+        work->input.buffers.clear();
+        work->worklets.clear();
+        {
+          std::unique_lock<std::mutex> lock(mQueueLock);
+          mWorkQueue.push_back(std::move(work));
+          mQueueCondition.notify_all();
+        }
+        if (mEos) {
+          {
+            std::lock_guard<std::mutex> waitForDecodeComplete(mDecodeCompleteMutex);
+          }
+          mConditionalVariable.notify_one();
+        }
+      }
+    }
+  }
+}
+
+bool Codec2Fuzzer::initDecoder() {
+  std::vector<std::tuple<C2String, C2ComponentFactory::CreateCodec2FactoryFunc,
+        C2ComponentFactory::DestroyCodec2FactoryFunc>> codec2FactoryFunc;
+
+  codec2FactoryFunc.emplace_back(
+      std::make_tuple(C2COMPONENTNAME, &CreateCodec2Factory, &DestroyCodec2Factory));
+
+  std::shared_ptr<C2ComponentStore> componentStore = GetTestComponentStore(codec2FactoryFunc);
+  if (!componentStore) {
+    return false;
+  }
+
+  std::shared_ptr<C2AllocatorStore> allocatorStore = GetCodec2PlatformAllocatorStore();
+  if (!allocatorStore) {
+    return false;
+  }
+
+  c2_status_t status =
+      allocatorStore->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &mLinearAllocator);
+  if (status != C2_OK) {
+    return false;
+  }
+
+  mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, ++mBlockPoolId);
+  if (!mLinearPool) {
+    return false;
+  }
+
+  for (int32_t i = 0; i < kNumberOfC2WorkItems; ++i) {
+    mWorkQueue.emplace_back(new C2Work);
+  }
+
+  status = componentStore->createComponent(C2COMPONENTNAME, &mComponent);
+  if (status != C2_OK) {
+    return false;
+  }
+
+  status = componentStore->createInterface(C2COMPONENTNAME, &mInterface);
+  if (status != C2_OK) {
+    return false;
+  }
+
+  C2ComponentKindSetting kind;
+  C2ComponentDomainSetting domain;
+  status = mInterface->query_vb({&kind, &domain}, {}, C2_MAY_BLOCK, nullptr);
+  if (status != C2_OK) {
+    return false;
+  }
+
+  std::vector<C2Param*> configParams;
+  if (domain.value == DOMAIN_VIDEO) {
+    C2StreamPictureSizeInfo::input inputSize(0u, kWidthOfVideo, kHeightOfVideo);
+    configParams.push_back(&inputSize);
+  } else if (domain.value == DOMAIN_AUDIO) {
+    C2StreamSampleRateInfo::output sampleRateInfo(0u, kSamplingRateOfAudio);
+    C2StreamChannelCountInfo::output channelCountInfo(0u, kChannelsOfAudio);
+    configParams.push_back(&sampleRateInfo);
+    configParams.push_back(&channelCountInfo);
+  }
+
+  mListener.reset(new CodecListener(
+      [this](std::weak_ptr<C2Component> comp, std::list<std::unique_ptr<C2Work>>& workItems) {
+        handleWorkDone(comp, workItems);
+      }));
+  if (!mListener) {
+    return false;
+  }
+
+  status = mComponent->setListener_vb(mListener, C2_DONT_BLOCK);
+  if (status != C2_OK) {
+    return false;
+  }
+
+  std::vector<std::unique_ptr<C2SettingResult>> failures;
+  componentStore->config_sm(configParams, &failures);
+  if (failures.size() != 0) {
+    return false;
+  }
+
+  status = mComponent->start();
+  if (status != C2_OK) {
+    return false;
+  }
+
+  return true;
+}
+
+void Codec2Fuzzer::deInitDecoder() {
+  mComponent->stop();
+  mComponent->reset();
+  mComponent->release();
+  mComponent = nullptr;
+}
+
+void Codec2Fuzzer::decodeFrames(const uint8_t* data, size_t size) {
+  mBufferSource = new BufferSource(data, size);
+  if (!mBufferSource) {
+    return;
+  }
+  mBufferSource->parse();
+  c2_status_t status = C2_OK;
+  size_t numFrames = 0;
+  while (!mBufferSource->isEos()) {
+    uint8_t* frame = nullptr;
+    size_t frameSize = 0;
+    FrameData frameData = mBufferSource->getFrame();
+    frame = std::get<0>(frameData);
+    frameSize = std::get<1>(frameData);
+
+    std::unique_ptr<C2Work> work;
+    {
+      std::unique_lock<std::mutex> lock(mQueueLock);
+      if (mWorkQueue.empty()) mQueueCondition.wait_for(lock, kC2FuzzerTimeOut);
+      if (!mWorkQueue.empty()) {
+        work.swap(mWorkQueue.front());
+        mWorkQueue.pop_front();
+      } else {
+        return;
+      }
+    }
+
+    work->input.flags = (C2FrameData::flags_t)std::get<2>(frameData);
+    work->input.ordinal.timestamp = 0;
+    work->input.ordinal.frameIndex = ++numFrames;
+    work->input.buffers.clear();
+    int32_t alignedSize = C2FUZZER_ALIGN(frameSize, PAGE_SIZE);
+
+    std::shared_ptr<C2LinearBlock> block;
+    status = mLinearPool->fetchLinearBlock(
+        alignedSize, {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE}, &block);
+    if (status != C2_OK || block == nullptr) {
+      return;
+    }
+
+    C2WriteView view = block->map().get();
+    if (view.error() != C2_OK) {
+      return;
+    }
+    memcpy(view.base(), frame, frameSize);
+    work->input.buffers.emplace_back(new LinearBuffer(block, frameSize));
+    work->worklets.clear();
+    work->worklets.emplace_back(new C2Worklet);
+
+    std::list<std::unique_ptr<C2Work>> items;
+    items.push_back(std::move(work));
+    status = mComponent->queue_nb(&items);
+    if (status != C2_OK) {
+      return;
+    }
+  }
+  std::unique_lock<std::mutex> waitForDecodeComplete(mDecodeCompleteMutex);
+  mConditionalVariable.wait_for(waitForDecodeComplete, kC2FuzzerTimeOut, [this] { return mEos; });
+  std::list<std::unique_ptr<C2Work>> c2flushedWorks;
+  mComponent->flush_sm(C2Component::FLUSH_COMPONENT, &c2flushedWorks);
+  delete mBufferSource;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if (size < 1) {
+    return 0;
+  }
+  Codec2Fuzzer* codec = new Codec2Fuzzer();
+  if (!codec) {
+    return 0;
+  }
+  if (codec->initDecoder()) {
+    codec->decodeFrames(data, size);
+  }
+  delete codec;
+  return 0;
+}
diff --git a/media/codec2/fuzzer/C2Fuzzer.h b/media/codec2/fuzzer/C2Fuzzer.h
new file mode 100644
index 0000000..d5ac81a
--- /dev/null
+++ b/media/codec2/fuzzer/C2Fuzzer.h
@@ -0,0 +1,125 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#ifndef __C2FUZZER_H__
+#define __C2FUZZER_H__
+
+#include <C2AllocatorIon.h>
+#include <C2Buffer.h>
+#include <C2BufferPriv.h>
+#include <C2Component.h>
+#include <C2Config.h>
+#include <C2PlatformSupport.h>
+
+using namespace std::chrono_literals;
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory();
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory);
+
+namespace android {
+
+#define C2FUZZER_ALIGN(_sz, _align) (((_sz) + ((_align)-1)) & ~((_align)-1))
+
+constexpr std::chrono::milliseconds kC2FuzzerTimeOut = 5000ms;
+constexpr int32_t kNumberOfC2WorkItems = 8;
+constexpr uint32_t kWidthOfVideo = 3840;
+constexpr uint32_t kHeightOfVideo = 2160;
+constexpr uint32_t kSamplingRateOfAudio = 48000;
+constexpr uint32_t kChannelsOfAudio = 8;
+
+typedef std::tuple<uint8_t*, size_t, uint32_t> FrameData;
+
+class Codec2Fuzzer {
+ public:
+  Codec2Fuzzer() = default;
+  ~Codec2Fuzzer() { deInitDecoder(); }
+  bool initDecoder();
+  void deInitDecoder();
+  void decodeFrames(const uint8_t* data, size_t size);
+
+  void handleWorkDone(std::weak_ptr<C2Component> comp,
+                      std::list<std::unique_ptr<C2Work>>& workItems);
+
+ private:
+  class BufferSource {
+   public:
+    BufferSource(const uint8_t* data, size_t size) : mData(data), mSize(size) {
+      mReadIndex = (size <= kMarkerSize) ? 0 : (size - kMarkerSize);
+    }
+    ~BufferSource() {
+      mData = nullptr;
+      mSize = 0;
+      mReadIndex = 0;
+      mFrameList.clear();
+    }
+    bool isEos() { return mFrameList.empty(); }
+    void parse();
+    FrameData getFrame();
+
+   private:
+    bool isMarker() {
+      if ((kMarkerSize < mSize) && (mReadIndex < mSize - kMarkerSize)) {
+        return (memcmp(&mData[mReadIndex], kMarker, kMarkerSize) == 0);
+      } else {
+        return false;
+      }
+    }
+
+    bool isCSDMarker(size_t position) {
+      if ((kMarkerSuffixSize < mSize) && (position < mSize - kMarkerSuffixSize)) {
+        return (memcmp(&mData[position], kCsdMarkerSuffix, kMarkerSuffixSize) == 0);
+      } else {
+        return false;
+      }
+    }
+
+    bool searchForMarker();
+
+    const uint8_t* mData = nullptr;
+    size_t mSize = 0;
+    size_t mReadIndex = 0;
+    std::vector<FrameData> mFrameList;
+    static constexpr uint8_t kMarker[] = "_MARK";
+    static constexpr uint8_t kCsdMarkerSuffix[] = "_H_";
+    static constexpr uint8_t kFrameMarkerSuffix[] = "_F_";
+    // All markers should be 5 bytes long ( sizeof '_MARK' which is 5)
+    static constexpr size_t kMarkerSize = (sizeof(kMarker) - 1);
+    // All marker types should be 3 bytes long ('_H_', '_F_')
+    static constexpr size_t kMarkerSuffixSize = 3;
+  };
+
+  BufferSource* mBufferSource;
+  bool mEos = false;
+  C2BlockPool::local_id_t mBlockPoolId;
+
+  std::shared_ptr<C2BlockPool> mLinearPool;
+  std::shared_ptr<C2Allocator> mLinearAllocator;
+  std::shared_ptr<C2Component::Listener> mListener;
+  std::shared_ptr<C2Component> mComponent;
+  std::shared_ptr<C2ComponentInterface> mInterface;
+  std::mutex mQueueLock;
+  std::condition_variable mQueueCondition;
+  std::list<std::unique_ptr<C2Work>> mWorkQueue;
+  std::mutex mDecodeCompleteMutex;
+  std::condition_variable mConditionalVariable;
+};
+
+}  // namespace android
+
+#endif  // __C2FUZZER_H__
diff --git a/media/codec2/hidl/1.0/utils/Android.bp b/media/codec2/hidl/1.0/utils/Android.bp
index 3b73350..122aacd 100644
--- a/media/codec2/hidl/1.0/utils/Android.bp
+++ b/media/codec2/hidl/1.0/utils/Android.bp
@@ -1,12 +1,20 @@
 // DO NOT DEPEND ON THIS DIRECTLY
 // use libcodec2-hidl-client-defaults instead
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library {
     name: "libcodec2_hidl_client@1.0",
 
     defaults: ["hidl_defaults"],
 
     srcs: [
-        "OutputBufferQueue.cpp",
         "types.cpp",
     ],
 
@@ -80,6 +88,7 @@
         "libbase",
         "libcodec2",
         "libcodec2_vndk",
+        "libcodec2_hidl_plugin_stub",
         "libcutils",
         "libhidlbase",
         "liblog",
@@ -93,9 +102,17 @@
         vendor: {
             exclude_shared_libs: [
                 "libstagefright_bufferqueue_helper_novndk",
+                "libcodec2_hidl_plugin_stub",
             ],
             shared_libs: [
                 "libstagefright_bufferqueue_helper",
+                "libcodec2_hidl_plugin",
+            ],
+        },
+        apex: {
+            exclude_shared_libs: [
+                "libcodec2_hidl_plugin",
+                "libcodec2_hidl_plugin_stub",
             ],
         },
     },
diff --git a/media/codec2/hidl/1.0/utils/Component.cpp b/media/codec2/hidl/1.0/utils/Component.cpp
index 8a84601..082c5e3 100644
--- a/media/codec2/hidl/1.0/utils/Component.cpp
+++ b/media/codec2/hidl/1.0/utils/Component.cpp
@@ -22,6 +22,10 @@
 #include <codec2/hidl/1.0/ComponentStore.h>
 #include <codec2/hidl/1.0/InputBufferManager.h>
 
+#ifndef __ANDROID_APEX__
+#include <FilterWrapper.h>
+#endif
+
 #include <hidl/HidlBinderSupport.h>
 #include <utils/Timers.h>
 
@@ -390,10 +394,17 @@
         uint32_t allocatorId,
         createBlockPool_cb _hidl_cb) {
     std::shared_ptr<C2BlockPool> blockPool;
+#ifdef __ANDROID_APEX__
     c2_status_t status = CreateCodec2BlockPool(
             static_cast<C2PlatformAllocatorStore::id_t>(allocatorId),
             mComponent,
             &blockPool);
+#else
+    c2_status_t status = ComponentStore::GetFilterWrapper()->createBlockPool(
+            static_cast<C2PlatformAllocatorStore::id_t>(allocatorId),
+            mComponent,
+            &blockPool);
+#endif
     if (status != C2_OK) {
         blockPool = nullptr;
     }
diff --git a/media/codec2/hidl/1.0/utils/ComponentStore.cpp b/media/codec2/hidl/1.0/utils/ComponentStore.cpp
index 9b9d449..1c0d5b0 100644
--- a/media/codec2/hidl/1.0/utils/ComponentStore.cpp
+++ b/media/codec2/hidl/1.0/utils/ComponentStore.cpp
@@ -35,6 +35,14 @@
 #include <ostream>
 #include <sstream>
 
+#ifndef __ANDROID_APEX__
+#include <codec2/hidl/plugin/FilterPlugin.h>
+#include <dlfcn.h>
+#include <C2Config.h>
+#include <DefaultFilterPlugin.h>
+#include <FilterWrapper.h>
+#endif
+
 namespace android {
 namespace hardware {
 namespace media {
@@ -176,6 +184,16 @@
     return mParameterCache;
 }
 
+#ifndef __ANDROID_APEX__
+// static
+std::shared_ptr<FilterWrapper> ComponentStore::GetFilterWrapper() {
+    constexpr const char kPluginPath[] = "libc2filterplugin.so";
+    static std::shared_ptr<FilterWrapper> wrapper = FilterWrapper::Create(
+            std::make_unique<DefaultFilterPlugin>(kPluginPath));
+    return wrapper;
+}
+#endif
+
 // Methods from ::android::hardware::media::c2::V1_0::IComponentStore
 Return<void> ComponentStore::createComponent(
         const hidl_string& name,
@@ -189,6 +207,9 @@
             mStore->createComponent(name, &c2component));
 
     if (status == Status::OK) {
+#ifndef __ANDROID_APEX__
+        c2component = GetFilterWrapper()->maybeWrapComponent(c2component);
+#endif
         onInterfaceLoaded(c2component->intf());
         component = new Component(c2component, listener, this, pool);
         if (!component) {
@@ -214,8 +235,12 @@
         createInterface_cb _hidl_cb) {
     std::shared_ptr<C2ComponentInterface> c2interface;
     c2_status_t res = mStore->createInterface(name, &c2interface);
+
     sp<IComponentInterface> interface;
     if (res == C2_OK) {
+#ifndef __ANDROID_APEX__
+        c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
+#endif
         onInterfaceLoaded(c2interface);
         interface = new ComponentInterface(c2interface, mParameterCache);
     }
@@ -458,7 +483,6 @@
     return Void();
 }
 
-
 }  // namespace utils
 }  // namespace V1_0
 }  // namespace c2
diff --git a/media/codec2/hidl/1.0/utils/InputSurfaceConnection.cpp b/media/codec2/hidl/1.0/utils/InputSurfaceConnection.cpp
index 5ec88ec..7c2e014 100644
--- a/media/codec2/hidl/1.0/utils/InputSurfaceConnection.cpp
+++ b/media/codec2/hidl/1.0/utils/InputSurfaceConnection.cpp
@@ -201,6 +201,8 @@
         c2_status_t err = mAllocator->priorGraphicAllocation(handle, &alloc);
         mAllocatorMutex.unlock();
         if (err != OK) {
+            native_handle_close(handle);
+            native_handle_delete(handle);
             return UNKNOWN_ERROR;
         }
         std::shared_ptr<C2GraphicBlock> block =
diff --git a/media/codec2/hidl/1.0/utils/OutputBufferQueue.cpp b/media/codec2/hidl/1.0/utils/OutputBufferQueue.cpp
deleted file mode 100644
index c4a72ef..0000000
--- a/media/codec2/hidl/1.0/utils/OutputBufferQueue.cpp
+++ /dev/null
@@ -1,335 +0,0 @@
-/*
- * Copyright 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "Codec2-OutputBufferQueue"
-#include <android-base/logging.h>
-
-#include <android/hardware/graphics/bufferqueue/2.0/IGraphicBufferProducer.h>
-#include <codec2/hidl/1.0/OutputBufferQueue.h>
-#include <gui/bufferqueue/2.0/B2HGraphicBufferProducer.h>
-
-#include <C2AllocatorGralloc.h>
-#include <C2BlockInternal.h>
-#include <C2Buffer.h>
-#include <C2PlatformSupport.h>
-
-#include <iomanip>
-
-namespace android {
-namespace hardware {
-namespace media {
-namespace c2 {
-namespace V1_0 {
-namespace utils {
-
-using HGraphicBufferProducer = ::android::hardware::graphics::bufferqueue::
-        V2_0::IGraphicBufferProducer;
-using B2HGraphicBufferProducer = ::android::hardware::graphics::bufferqueue::
-        V2_0::utils::B2HGraphicBufferProducer;
-
-namespace /* unnamed */ {
-
-// Create a GraphicBuffer object from a graphic block.
-sp<GraphicBuffer> createGraphicBuffer(const C2ConstGraphicBlock& block) {
-    uint32_t width;
-    uint32_t height;
-    uint32_t format;
-    uint64_t usage;
-    uint32_t stride;
-    uint32_t generation;
-    uint64_t bqId;
-    int32_t bqSlot;
-    _UnwrapNativeCodec2GrallocMetadata(
-            block.handle(), &width, &height, &format, &usage,
-            &stride, &generation, &bqId, reinterpret_cast<uint32_t*>(&bqSlot));
-    native_handle_t *grallocHandle =
-            UnwrapNativeCodec2GrallocHandle(block.handle());
-    sp<GraphicBuffer> graphicBuffer =
-            new GraphicBuffer(grallocHandle,
-                              GraphicBuffer::CLONE_HANDLE,
-                              width, height, format,
-                              1, usage, stride);
-    native_handle_delete(grallocHandle);
-    return graphicBuffer;
-}
-
-template <typename BlockProcessor>
-void forEachBlock(C2FrameData& frameData,
-                  BlockProcessor process) {
-    for (const std::shared_ptr<C2Buffer>& buffer : frameData.buffers) {
-        if (buffer) {
-            for (const C2ConstGraphicBlock& block :
-                    buffer->data().graphicBlocks()) {
-                process(block);
-            }
-        }
-    }
-}
-
-template <typename BlockProcessor>
-void forEachBlock(const std::list<std::unique_ptr<C2Work>>& workList,
-                  BlockProcessor process) {
-    for (const std::unique_ptr<C2Work>& work : workList) {
-        if (!work) {
-            continue;
-        }
-        for (const std::unique_ptr<C2Worklet>& worklet : work->worklets) {
-            if (worklet) {
-                forEachBlock(worklet->output, process);
-            }
-        }
-    }
-}
-
-sp<HGraphicBufferProducer> getHgbp(const sp<IGraphicBufferProducer>& igbp) {
-    sp<HGraphicBufferProducer> hgbp =
-            igbp->getHalInterface<HGraphicBufferProducer>();
-    return hgbp ? hgbp :
-            new B2HGraphicBufferProducer(igbp);
-}
-
-status_t attachToBufferQueue(const C2ConstGraphicBlock& block,
-                             const sp<IGraphicBufferProducer>& igbp,
-                             uint32_t generation,
-                             int32_t* bqSlot) {
-    if (!igbp) {
-        LOG(WARNING) << "attachToBufferQueue -- null producer.";
-        return NO_INIT;
-    }
-
-    sp<GraphicBuffer> graphicBuffer = createGraphicBuffer(block);
-    graphicBuffer->setGenerationNumber(generation);
-
-    LOG(VERBOSE) << "attachToBufferQueue -- attaching buffer:"
-            << " block dimension " << block.width() << "x"
-                                   << block.height()
-            << ", graphicBuffer dimension " << graphicBuffer->getWidth() << "x"
-                                           << graphicBuffer->getHeight()
-            << std::hex << std::setfill('0')
-            << ", format 0x" << std::setw(8) << graphicBuffer->getPixelFormat()
-            << ", usage 0x" << std::setw(16) << graphicBuffer->getUsage()
-            << std::dec << std::setfill(' ')
-            << ", stride " << graphicBuffer->getStride()
-            << ", generation " << graphicBuffer->getGenerationNumber();
-
-    status_t result = igbp->attachBuffer(bqSlot, graphicBuffer);
-    if (result != OK) {
-        LOG(WARNING) << "attachToBufferQueue -- attachBuffer failed: "
-                        "status = " << result << ".";
-        return result;
-    }
-    LOG(VERBOSE) << "attachToBufferQueue -- attachBuffer returned slot #"
-                 << *bqSlot << ".";
-    return OK;
-}
-
-bool getBufferQueueAssignment(const C2ConstGraphicBlock& block,
-                              uint32_t* generation,
-                              uint64_t* bqId,
-                              int32_t* bqSlot) {
-    return _C2BlockFactory::GetBufferQueueData(
-            _C2BlockFactory::GetGraphicBlockPoolData(block),
-            generation, bqId, bqSlot);
-}
-
-} // unnamed namespace
-
-OutputBufferQueue::OutputBufferQueue()
-      : mGeneration{0}, mBqId{0} {
-}
-
-OutputBufferQueue::~OutputBufferQueue() {
-}
-
-bool OutputBufferQueue::configure(const sp<IGraphicBufferProducer>& igbp,
-                                  uint32_t generation,
-                                  uint64_t bqId) {
-    size_t tryNum = 0;
-    size_t success = 0;
-    sp<GraphicBuffer> buffers[BufferQueueDefs::NUM_BUFFER_SLOTS];
-    std::weak_ptr<_C2BlockPoolData>
-            poolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
-    {
-        std::scoped_lock<std::mutex> l(mMutex);
-        if (generation == mGeneration) {
-            return false;
-        }
-        mIgbp = igbp;
-        mGeneration = generation;
-        mBqId = bqId;
-        mOwner = std::make_shared<int>(0);
-        for (int i = 0; i < BufferQueueDefs::NUM_BUFFER_SLOTS; ++i) {
-            if (mBqId == 0 || !mBuffers[i]) {
-                continue;
-            }
-            std::shared_ptr<_C2BlockPoolData> data = mPoolDatas[i].lock();
-            if (!data ||
-                !_C2BlockFactory::BeginAttachBlockToBufferQueue(data)) {
-                continue;
-            }
-            ++tryNum;
-            int bqSlot;
-            mBuffers[i]->setGenerationNumber(generation);
-            status_t result = igbp->attachBuffer(&bqSlot, mBuffers[i]);
-            if (result != OK) {
-                continue;
-            }
-            bool attach =
-                    _C2BlockFactory::EndAttachBlockToBufferQueue(
-                            data, mOwner, getHgbp(mIgbp),
-                            generation, bqId, bqSlot);
-            if (!attach) {
-                igbp->cancelBuffer(bqSlot, Fence::NO_FENCE);
-                continue;
-            }
-            buffers[bqSlot] = mBuffers[i];
-            poolDatas[bqSlot] = data;
-            ++success;
-        }
-        for (int i = 0; i < BufferQueueDefs::NUM_BUFFER_SLOTS; ++i) {
-            mBuffers[i] = buffers[i];
-            mPoolDatas[i] = poolDatas[i];
-        }
-    }
-    ALOGD("remote graphic buffer migration %zu/%zu", success, tryNum);
-    return true;
-}
-
-bool OutputBufferQueue::registerBuffer(const C2ConstGraphicBlock& block) {
-    std::shared_ptr<_C2BlockPoolData> data =
-            _C2BlockFactory::GetGraphicBlockPoolData(block);
-    if (!data) {
-        return false;
-    }
-    std::scoped_lock<std::mutex> l(mMutex);
-
-    if (!mIgbp) {
-        return false;
-    }
-
-    uint32_t oldGeneration;
-    uint64_t oldId;
-    int32_t oldSlot;
-    // If the block is not bufferqueue-based, do nothing.
-    if (!_C2BlockFactory::GetBufferQueueData(
-            data, &oldGeneration, &oldId, &oldSlot) || (oldId == 0)) {
-        return false;
-    }
-    // If the block's bqId is the same as the desired bqId, just hold.
-    if ((oldId == mBqId) && (oldGeneration == mGeneration)) {
-        LOG(VERBOSE) << "holdBufferQueueBlock -- import without attaching:"
-                     << " bqId " << oldId
-                     << ", bqSlot " << oldSlot
-                     << ", generation " << mGeneration
-                     << ".";
-        _C2BlockFactory::HoldBlockFromBufferQueue(data, mOwner, getHgbp(mIgbp));
-        mPoolDatas[oldSlot] = data;
-        mBuffers[oldSlot] = createGraphicBuffer(block);
-        mBuffers[oldSlot]->setGenerationNumber(mGeneration);
-        return true;
-    }
-    int32_t d = (int32_t) mGeneration - (int32_t) oldGeneration;
-    LOG(WARNING) << "receiving stale buffer: generation "
-                 << mGeneration << " , diff " << d  << " : slot "
-                 << oldSlot;
-    return false;
-}
-
-status_t OutputBufferQueue::outputBuffer(
-        const C2ConstGraphicBlock& block,
-        const BnGraphicBufferProducer::QueueBufferInput& input,
-        BnGraphicBufferProducer::QueueBufferOutput* output) {
-    uint32_t generation;
-    uint64_t bqId;
-    int32_t bqSlot;
-    bool display = displayBufferQueueBlock(block);
-    if (!getBufferQueueAssignment(block, &generation, &bqId, &bqSlot) ||
-        bqId == 0) {
-        // Block not from bufferqueue -- it must be attached before queuing.
-
-        mMutex.lock();
-        sp<IGraphicBufferProducer> outputIgbp = mIgbp;
-        uint32_t outputGeneration = mGeneration;
-        mMutex.unlock();
-
-        status_t status = attachToBufferQueue(
-                block, outputIgbp, outputGeneration, &bqSlot);
-        if (status != OK) {
-            LOG(WARNING) << "outputBuffer -- attaching failed.";
-            return INVALID_OPERATION;
-        }
-
-        status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
-                                     input, output);
-        if (status != OK) {
-            LOG(ERROR) << "outputBuffer -- queueBuffer() failed "
-                       "on non-bufferqueue-based block. "
-                       "Error = " << status << ".";
-            return status;
-        }
-        return OK;
-    }
-
-    mMutex.lock();
-    sp<IGraphicBufferProducer> outputIgbp = mIgbp;
-    uint32_t outputGeneration = mGeneration;
-    uint64_t outputBqId = mBqId;
-    mMutex.unlock();
-
-    if (!outputIgbp) {
-        LOG(VERBOSE) << "outputBuffer -- output surface is null.";
-        return NO_INIT;
-    }
-
-    if (!display) {
-        LOG(WARNING) << "outputBuffer -- cannot display "
-                     "bufferqueue-based block to the bufferqueue.";
-        return UNKNOWN_ERROR;
-    }
-    if (bqId != outputBqId || generation != outputGeneration) {
-        int32_t diff = (int32_t) outputGeneration - (int32_t) generation;
-        LOG(WARNING) << "outputBuffer -- buffers from old generation to "
-                     << outputGeneration << " , diff: " << diff
-                     << " , slot: " << bqSlot;
-        return DEAD_OBJECT;
-    }
-
-    status_t status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
-                                          input, output);
-    if (status != OK) {
-        LOG(ERROR) << "outputBuffer -- queueBuffer() failed "
-                   "on bufferqueue-based block. "
-                   "Error = " << status << ".";
-        return status;
-    }
-    return OK;
-}
-
-void OutputBufferQueue::holdBufferQueueBlocks(
-        const std::list<std::unique_ptr<C2Work>>& workList) {
-    forEachBlock(workList,
-                 std::bind(&OutputBufferQueue::registerBuffer,
-                           this, std::placeholders::_1));
-}
-
-}  // namespace utils
-}  // namespace V1_0
-}  // namespace c2
-}  // namespace media
-}  // namespace hardware
-}  // namespace android
-
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h b/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
index fe7d048..27e2a05 100644
--- a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
+++ b/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
@@ -37,6 +37,8 @@
 #include <vector>
 
 namespace android {
+class FilterWrapper;
+
 namespace hardware {
 namespace media {
 namespace c2 {
@@ -74,6 +76,8 @@
      */
     std::shared_ptr<ParameterCache> getParameterCache() const;
 
+    static std::shared_ptr<FilterWrapper> GetFilterWrapper();
+
     // Methods from ::android::hardware::media::c2::V1_0::IComponentStore.
     virtual Return<void> createComponent(
             const hidl_string& name,
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/OutputBufferQueue.h b/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/OutputBufferQueue.h
deleted file mode 100644
index 80368f7..0000000
--- a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/OutputBufferQueue.h
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Copyright 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CODEC2_HIDL_V1_0_UTILS_OUTPUT_BUFFER_QUEUE
-#define CODEC2_HIDL_V1_0_UTILS_OUTPUT_BUFFER_QUEUE
-
-#include <gui/IGraphicBufferProducer.h>
-#include <codec2/hidl/1.0/types.h>
-#include <C2Work.h>
-
-struct C2_HIDE _C2BlockPoolData;
-
-namespace android {
-namespace hardware {
-namespace media {
-namespace c2 {
-namespace V1_0 {
-namespace utils {
-
-// BufferQueue-Based Block Operations
-// ==================================
-
-// Manage BufferQueue and graphic blocks for both component and codec.
-// Manage graphic blocks ownership consistently during surface change.
-struct OutputBufferQueue {
-
-    OutputBufferQueue();
-
-    ~OutputBufferQueue();
-
-    // Configure a new surface to render graphic blocks.
-    // Graphic blocks from older surface will be migrated to new surface.
-    bool configure(const sp<IGraphicBufferProducer>& igbp,
-                   uint32_t generation,
-                   uint64_t bqId);
-
-    // Render a graphic block to current surface.
-    status_t outputBuffer(
-            const C2ConstGraphicBlock& block,
-            const BnGraphicBufferProducer::QueueBufferInput& input,
-            BnGraphicBufferProducer::QueueBufferOutput* output);
-
-    // Call holdBufferQueueBlock() on output blocks in the given workList.
-    // The OutputBufferQueue will take the ownership of output blocks.
-    //
-    // Note: This function should be called after WorkBundle has been received
-    // from another process.
-    void holdBufferQueueBlocks(
-            const std::list<std::unique_ptr<C2Work>>& workList);
-
-private:
-
-    std::mutex mMutex;
-    sp<IGraphicBufferProducer> mIgbp;
-    uint32_t mGeneration;
-    uint64_t mBqId;
-    std::shared_ptr<int> mOwner;
-    // To migrate existing buffers
-    sp<GraphicBuffer> mBuffers[BufferQueueDefs::NUM_BUFFER_SLOTS]; // find a better way
-    std::weak_ptr<_C2BlockPoolData> mPoolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
-
-    bool registerBuffer(const C2ConstGraphicBlock& block);
-};
-
-}  // namespace utils
-}  // namespace V1_0
-}  // namespace c2
-}  // namespace media
-}  // namespace hardware
-}  // namespace android
-
-#endif  // CODEC2_HIDL_V1_0_UTILS_OUTPUT_BUFFER_QUEUE
diff --git a/media/codec2/hidl/1.0/utils/types.cpp b/media/codec2/hidl/1.0/utils/types.cpp
index c73cb52..72f7c43 100644
--- a/media/codec2/hidl/1.0/utils/types.cpp
+++ b/media/codec2/hidl/1.0/utils/types.cpp
@@ -895,13 +895,12 @@
         BufferPoolSender* bufferPoolSender,
         std::list<BaseBlock>* baseBlocks,
         std::map<const void*, uint32_t>* baseBlockIndices) {
-    // TODO: C2InfoBuffer is not implemented.
-    (void)d;
-    (void)s;
-    (void)bufferPoolSender;
-    (void)baseBlocks;
-    (void)baseBlockIndices;
-    LOG(INFO) << "InfoBuffer not implemented.";
+    d->index = static_cast<ParamIndex>(s.index());
+    Buffer& dBuffer = d->buffer;
+    if (!objcpy(&dBuffer, s.data(), bufferPoolSender, baseBlocks, baseBlockIndices)) {
+        LOG(ERROR) << "Invalid C2InfoBuffer::data";
+        return false;
+    }
     return true;
 }
 
@@ -943,14 +942,9 @@
 
     d->infoBuffers.resize(s.infoBuffers.size());
     i = 0;
-    for (const std::shared_ptr<C2InfoBuffer>& sInfoBuffer : s.infoBuffers) {
+    for (const C2InfoBuffer& sInfoBuffer : s.infoBuffers) {
         InfoBuffer& dInfoBuffer = d->infoBuffers[i++];
-        if (!sInfoBuffer) {
-            LOG(ERROR) << "Null C2FrameData::infoBuffers["
-                       << i - 1 << "].";
-            return false;
-        }
-        if (!objcpy(&dInfoBuffer, *sInfoBuffer,
+        if (!objcpy(&dInfoBuffer, sInfoBuffer,
                 bufferPoolSender, baseBlocks, baseBlockIndices)) {
             LOG(ERROR) << "Invalid C2FrameData::infoBuffers["
                        << i - 1 << "].";
@@ -1341,6 +1335,68 @@
     return true;
 }
 
+// InfoBuffer -> C2InfoBuffer
+bool objcpy(std::vector<C2InfoBuffer> *d, const InfoBuffer& s,
+        const std::vector<C2BaseBlock>& baseBlocks) {
+
+    // Currently, a non-null C2InfoBufer must contain exactly 1 block.
+    if (s.buffer.blocks.size() == 0) {
+        return true;
+    } else if (s.buffer.blocks.size() != 1) {
+        LOG(ERROR) << "Invalid InfoBuffer::Buffer "
+                      "Currently, a C2InfoBuffer must contain exactly 1 block.";
+        return false;
+    }
+
+    const Block &sBlock = s.buffer.blocks[0];
+    if (sBlock.index >= baseBlocks.size()) {
+        LOG(ERROR) << "Invalid InfoBuffer::Buffer::blocks[0].index: "
+                      "Array index out of range.";
+        return false;
+    }
+    const C2BaseBlock &baseBlock = baseBlocks[sBlock.index];
+
+    // Parse meta.
+    std::vector<C2Param*> sBlockMeta;
+    if (!parseParamsBlob(&sBlockMeta, sBlock.meta)) {
+        LOG(ERROR) << "Invalid InfoBuffer::Buffer::blocks[0].meta.";
+        return false;
+    }
+
+    // Copy fence.
+    C2Fence dFence;
+    if (!objcpy(&dFence, sBlock.fence)) {
+        LOG(ERROR) << "Invalid InfoBuffer::Buffer::blocks[0].fence.";
+        return false;
+    }
+
+    // Construct a block.
+    switch (baseBlock.type) {
+    case C2BaseBlock::LINEAR:
+        if (sBlockMeta.size() == 1 && sBlockMeta[0] != nullptr &&
+            sBlockMeta[0]->size() == sizeof(C2Hidl_RangeInfo)) {
+            C2Hidl_RangeInfo *rangeInfo =
+                    reinterpret_cast<C2Hidl_RangeInfo*>(sBlockMeta[0]);
+            d->emplace_back(C2InfoBuffer::CreateLinearBuffer(
+                    s.index,
+                    baseBlock.linear->share(
+                            rangeInfo->offset, rangeInfo->length, dFence)));
+            return true;
+        }
+        LOG(ERROR) << "Invalid Meta for C2BaseBlock::Linear InfoBuffer.";
+        break;
+    case C2BaseBlock::GRAPHIC:
+        // It's not used now
+        LOG(ERROR) << "Non-Used C2BaseBlock::type for InfoBuffer.";
+        break;
+    default:
+        LOG(ERROR) << "Invalid C2BaseBlock::type for InfoBuffer.";
+        break;
+    }
+
+    return false;
+}
+
 // FrameData -> C2FrameData
 bool objcpy(C2FrameData* d, const FrameData& s,
         const std::vector<C2BaseBlock>& baseBlocks) {
@@ -1375,8 +1431,18 @@
         }
     }
 
-    // TODO: Implement this once C2InfoBuffer has constructors.
     d->infoBuffers.clear();
+    if (s.infoBuffers.size() == 0) {
+        // InfoBuffer is optional
+        return true;
+    }
+    d->infoBuffers.reserve(s.infoBuffers.size());
+    for (const InfoBuffer &sInfoBuffer: s.infoBuffers) {
+        if (!objcpy(&(d->infoBuffers), sInfoBuffer, baseBlocks)) {
+            LOG(ERROR) << "Invalid Framedata::infoBuffers.";
+            return false;
+        }
+    }
     return true;
 }
 
diff --git a/media/codec2/hidl/1.0/vts/.clang-format b/media/codec2/hidl/1.0/vts/.clang-format
new file mode 120000
index 0000000..136279c
--- /dev/null
+++ b/media/codec2/hidl/1.0/vts/.clang-format
@@ -0,0 +1 @@
+../../../../../../../build/soong/scripts/system-clang-format
\ No newline at end of file
diff --git a/media/codec2/hidl/1.0/vts/functional/Android.bp b/media/codec2/hidl/1.0/vts/functional/Android.bp
index 5ea4825..0ae133c 100644
--- a/media/codec2/hidl/1.0/vts/functional/Android.bp
+++ b/media/codec2/hidl/1.0/vts/functional/Android.bp
@@ -14,6 +14,15 @@
 // limitations under the License.
 //
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 filegroup {
     name: "media_c2_v1_audio_decode_res",
     path: "res",
@@ -108,4 +117,4 @@
     srcs: [
         "res/bbb_352x288_420p_30fps_32frames.yuv",
     ],
-}
\ No newline at end of file
+}
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/Android.bp b/media/codec2/hidl/1.0/vts/functional/audio/Android.bp
index 014cbe9..624aad2 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/Android.bp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/Android.bp
@@ -14,6 +14,15 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "VtsHalMediaC2V1_0TargetAudioDecTest",
     stem: "vts_media_c2_v1_0_audio_dec_test",
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
index 3a47ae9..abd8b2d 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
@@ -23,24 +23,47 @@
 #include <stdio.h>
 #include <algorithm>
 
-#include <C2AllocatorIon.h>
 #include <C2Buffer.h>
 #include <C2BufferPriv.h>
 #include <C2Config.h>
 #include <C2Debug.h>
 #include <codec2/hidl/client.h>
 
-using android::C2AllocatorIon;
-
 #include "media_c2_hidl_test_common.h"
+using DecodeTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+static std::vector<DecodeTestParameters> gDecodeTestParameters;
 
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kDecodeTestParameters;
+using CsdFlushTestParameters = std::tuple<std::string, std::string, bool>;
+static std::vector<CsdFlushTestParameters> gCsdFlushTestParameters;
 
-static std::vector<std::tuple<std::string, std::string, std::string>> kCsdFlushTestParameters;
+struct CompToFiles {
+    std::string mime;
+    std::string inputFile;
+    std::string infoFile;
+};
 
-// Resource directory
-static std::string sResourceDir = "";
+std::vector<CompToFiles> gCompToFiles = {
+        {"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.info"},
+        {"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac",
+         "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"},
+        {"audio/mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3", "bbb_mp3_stereo_192kbps_48000hz.info"},
+        {"audio/mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3",
+         "bbb_mp3_stereo_192kbps_48000hz_multi_frame.info"},
+        {"3gpp", "sine_amrnb_1ch_12kbps_8000hz.amrnb", "sine_amrnb_1ch_12kbps_8000hz.info"},
+        {"3gpp", "sine_amrnb_1ch_12kbps_8000hz.amrnb",
+         "sine_amrnb_1ch_12kbps_8000hz_multi_frame.info"},
+        {"amr-wb", "bbb_amrwb_1ch_14kbps_16000hz.amrwb", "bbb_amrwb_1ch_14kbps_16000hz.info"},
+        {"amr-wb", "bbb_amrwb_1ch_14kbps_16000hz.amrwb",
+         "bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info"},
+        {"vorbis", "bbb_vorbis_stereo_128kbps_48000hz.vorbis",
+         "bbb_vorbis_stereo_128kbps_48000hz.info"},
+        {"opus", "bbb_opus_stereo_128kbps_48000hz.opus", "bbb_opus_stereo_128kbps_48000hz.info"},
+        {"g711-alaw", "bbb_g711alaw_1ch_8khz.raw", "bbb_g711alaw_1ch_8khz.info"},
+        {"g711-mlaw", "bbb_g711mulaw_1ch_8khz.raw", "bbb_g711mulaw_1ch_8khz.info"},
+        {"gsm", "bbb_gsm_1ch_8khz_13kbps.raw", "bbb_gsm_1ch_8khz_13kbps.info"},
+        {"raw", "bbb_raw_1ch_8khz_s32le.raw", "bbb_raw_1ch_8khz_s32le.info"},
+        {"flac", "bbb_flac_stereo_680kbps_48000hz.flac", "bbb_flac_stereo_680kbps_48000hz.info"},
+};
 
 class LinearBuffer : public C2Buffer {
   public:
@@ -76,33 +99,26 @@
         mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, mBlockPoolId++);
         ASSERT_NE(mLinearPool, nullptr);
 
-        mCompName = unknown_comp;
-        struct StringToName {
-            const char* Name;
-            standardComp CompName;
-        };
-        const StringToName kStringToName[] = {
-                {"xaac", xaac},          {"mp3", mp3}, {"amrnb", amrnb},
-                {"amrwb", amrwb},        {"aac", aac}, {"vorbis", vorbis},
-                {"opus", opus},          {"pcm", pcm}, {"g711.alaw", g711alaw},
-                {"g711.mlaw", g711mlaw}, {"gsm", gsm}, {"raw", raw},
-                {"flac", flac},
-        };
-        const size_t kNumStringToName = sizeof(kStringToName) / sizeof(kStringToName[0]);
+        std::vector<std::unique_ptr<C2Param>> queried;
+        mComponent->query({}, {C2PortMediaTypeSetting::input::PARAM_TYPE}, C2_DONT_BLOCK, &queried);
+        ASSERT_GT(queried.size(), 0);
 
-        // Find the component type
-        for (size_t i = 0; i < kNumStringToName; ++i) {
-            if (strcasestr(mComponentName.c_str(), kStringToName[i].Name)) {
-                mCompName = kStringToName[i].CompName;
-                break;
-            }
-        }
+        mMime = ((C2PortMediaTypeSetting::input*)queried[0].get())->m.value;
+
         mEos = false;
         mFramesReceived = 0;
         mTimestampUs = 0u;
         mWorkResult = C2_OK;
         mTimestampDevTest = false;
-        if (mCompName == unknown_comp) mDisableTest = true;
+
+        bool valid = getFileNames(mStreamIndex);
+        if (!valid) {
+            GTEST_SKIP() << "No test file for  mime " << mMime << " index: " << mStreamIndex;
+        }
+        ALOGV("mStreamIndex : %zu", mStreamIndex);
+        ALOGV("mInputFile : %s", mInputFile.c_str());
+        ALOGV("mInfoFile : %s", mInfoFile.c_str());
+
         if (mDisableTest) std::cout << "[   WARN   ] Test Disabled \n";
     }
 
@@ -119,6 +135,8 @@
 
     virtual void validateTimestampList(int32_t* bitStreamInfo);
 
+    bool getFileNames(size_t streamIndex = 0);
+
     struct outputMetaData {
         uint64_t timestampUs;
         uint32_t rangeLength;
@@ -158,29 +176,12 @@
         }
     }
 
-    enum standardComp {
-        xaac,
-        mp3,
-        amrnb,
-        amrwb,
-        aac,
-        vorbis,
-        opus,
-        pcm,
-        g711alaw,
-        g711mlaw,
-        gsm,
-        raw,
-        flac,
-        unknown_comp,
-    };
-
+    std::string mMime;
     std::string mInstanceName;
     std::string mComponentName;
     bool mEos;
     bool mDisableTest;
     bool mTimestampDevTest;
-    standardComp mCompName;
 
     int32_t mWorkResult;
     uint64_t mTimestampUs;
@@ -201,23 +202,27 @@
     std::shared_ptr<android::Codec2Client::Listener> mListener;
     std::shared_ptr<android::Codec2Client::Component> mComponent;
 
+    std::string mInputFile;
+    std::string mInfoFile;
+    size_t mStreamIndex = 0;
+
   protected:
     static void description(const std::string& description) {
         RecordProperty("description", description);
     }
 };
 
-class Codec2AudioDecHidlTest
-    : public Codec2AudioDecHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2AudioDecHidlTest : public Codec2AudioDecHidlTestBase,
+                               public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
+        mStreamIndex = 0;
     }
 };
 
 void validateComponent(const std::shared_ptr<android::Codec2Client::Component>& component,
-                       Codec2AudioDecHidlTest::standardComp compName, bool& disableTest) {
+                       bool& disableTest) {
     // Validate its a C2 Component
     if (component->getName().find("c2") == std::string::npos) {
         ALOGE("Not a c2 component");
@@ -244,13 +249,6 @@
             return;
         }
     }
-
-    // Validates component name
-    if (compName == Codec2AudioDecHidlTest::unknown_comp) {
-        ALOGE("Component InValid");
-        disableTest = true;
-        return;
-    }
     ALOGV("Component Valid");
 }
 
@@ -271,7 +269,7 @@
 // parsing the header of elementary stream. Client needs to collect this
 // information and reconfigure
 void getInputChannelInfo(const std::shared_ptr<android::Codec2Client::Component>& component,
-                         Codec2AudioDecHidlTest::standardComp compName, int32_t* bitStreamInfo) {
+                         std::string mime, int32_t* bitStreamInfo) {
     // query nSampleRate and nChannels
     std::initializer_list<C2Param::Index> indices{
             C2StreamSampleRateInfo::output::PARAM_TYPE,
@@ -288,91 +286,33 @@
             C2Param* param = inParams[i].get();
             bitStreamInfo[i] = *(int32_t*)((uint8_t*)param + offset);
         }
-        switch (compName) {
-            case Codec2AudioDecHidlTest::amrnb: {
-                ASSERT_EQ(bitStreamInfo[0], 8000);
-                ASSERT_EQ(bitStreamInfo[1], 1);
-                break;
-            }
-            case Codec2AudioDecHidlTest::amrwb: {
-                ASSERT_EQ(bitStreamInfo[0], 16000);
-                ASSERT_EQ(bitStreamInfo[1], 1);
-                break;
-            }
-            case Codec2AudioDecHidlTest::gsm: {
-                ASSERT_EQ(bitStreamInfo[0], 8000);
-                break;
-            }
-            default:
-                break;
+        if (mime.find("3gpp") != std::string::npos) {
+            ASSERT_EQ(bitStreamInfo[0], 8000);
+            ASSERT_EQ(bitStreamInfo[1], 1);
+        } else if (mime.find("amr-wb") != std::string::npos) {
+            ASSERT_EQ(bitStreamInfo[0], 16000);
+            ASSERT_EQ(bitStreamInfo[1], 1);
+        } else if (mime.find("gsm") != std::string::npos) {
+            ASSERT_EQ(bitStreamInfo[0], 8000);
         }
     }
 }
 
-// number of elementary streams per component
-#define STREAM_COUNT 2
-
 // LookUpTable of clips and metadata for component testing
-void GetURLForComponent(Codec2AudioDecHidlTest::standardComp comp, char* mURL, char* info,
-                        size_t streamIndex = 0) {
-    struct CompToURL {
-        Codec2AudioDecHidlTest::standardComp comp;
-        const char mURL[STREAM_COUNT][512];
-        const char info[STREAM_COUNT][512];
-    };
-    ASSERT_TRUE(streamIndex < STREAM_COUNT);
+bool Codec2AudioDecHidlTestBase::getFileNames(size_t streamIndex) {
+    int streamCount = 0;
 
-    static const CompToURL kCompToURL[] = {
-            {Codec2AudioDecHidlTest::standardComp::xaac,
-             {"bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.aac"},
-             {"bbb_aac_stereo_128kbps_48000hz.info",
-              "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"}},
-            {Codec2AudioDecHidlTest::standardComp::mp3,
-             {"bbb_mp3_stereo_192kbps_48000hz.mp3", "bbb_mp3_stereo_192kbps_48000hz.mp3"},
-             {"bbb_mp3_stereo_192kbps_48000hz.info",
-              "bbb_mp3_stereo_192kbps_48000hz_multi_frame.info"}},
-            {Codec2AudioDecHidlTest::standardComp::aac,
-             {"bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.aac"},
-             {"bbb_aac_stereo_128kbps_48000hz.info",
-              "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"}},
-            {Codec2AudioDecHidlTest::standardComp::amrnb,
-             {"sine_amrnb_1ch_12kbps_8000hz.amrnb", "sine_amrnb_1ch_12kbps_8000hz.amrnb"},
-             {"sine_amrnb_1ch_12kbps_8000hz.info",
-              "sine_amrnb_1ch_12kbps_8000hz_multi_frame.info"}},
-            {Codec2AudioDecHidlTest::standardComp::amrwb,
-             {"bbb_amrwb_1ch_14kbps_16000hz.amrwb", "bbb_amrwb_1ch_14kbps_16000hz.amrwb"},
-             {"bbb_amrwb_1ch_14kbps_16000hz.info",
-              "bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info"}},
-            {Codec2AudioDecHidlTest::standardComp::vorbis,
-             {"bbb_vorbis_stereo_128kbps_48000hz.vorbis", ""},
-             {"bbb_vorbis_stereo_128kbps_48000hz.info", ""}},
-            {Codec2AudioDecHidlTest::standardComp::opus,
-             {"bbb_opus_stereo_128kbps_48000hz.opus", ""},
-             {"bbb_opus_stereo_128kbps_48000hz.info", ""}},
-            {Codec2AudioDecHidlTest::standardComp::g711alaw,
-             {"bbb_g711alaw_1ch_8khz.raw", ""},
-             {"bbb_g711alaw_1ch_8khz.info", ""}},
-            {Codec2AudioDecHidlTest::standardComp::g711mlaw,
-             {"bbb_g711mulaw_1ch_8khz.raw", ""},
-             {"bbb_g711mulaw_1ch_8khz.info", ""}},
-            {Codec2AudioDecHidlTest::standardComp::gsm,
-             {"bbb_gsm_1ch_8khz_13kbps.raw", ""},
-             {"bbb_gsm_1ch_8khz_13kbps.info", ""}},
-            {Codec2AudioDecHidlTest::standardComp::raw,
-             {"bbb_raw_1ch_8khz_s32le.raw", ""},
-             {"bbb_raw_1ch_8khz_s32le.info", ""}},
-            {Codec2AudioDecHidlTest::standardComp::flac,
-             {"bbb_flac_stereo_680kbps_48000hz.flac", ""},
-             {"bbb_flac_stereo_680kbps_48000hz.info", ""}},
-    };
-
-    for (size_t i = 0; i < sizeof(kCompToURL) / sizeof(kCompToURL[0]); ++i) {
-        if (kCompToURL[i].comp == comp) {
-            strcat(mURL, kCompToURL[i].mURL[streamIndex]);
-            strcat(info, kCompToURL[i].info[streamIndex]);
-            return;
+    for (size_t i = 0; i < gCompToFiles.size(); ++i) {
+        if (mMime.find(gCompToFiles[i].mime) != std::string::npos) {
+            if (streamCount == streamIndex) {
+                mInputFile = sResourceDir + gCompToFiles[i].inputFile;
+                mInfoFile = sResourceDir + gCompToFiles[i].infoFile;
+                return true;
+            }
+            streamCount++;
         }
     }
+    return false;
 }
 
 void decodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
@@ -461,7 +401,7 @@
 void Codec2AudioDecHidlTestBase::validateTimestampList(int32_t* bitStreamInfo) {
     uint32_t samplesReceived = 0;
     // Update SampleRate and ChannelCount
-    ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+    ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     int32_t nSampleRate = bitStreamInfo[0];
     int32_t nChannels = bitStreamInfo[1];
     std::list<uint64_t>::iterator itIn = mTimestampUslist.begin();
@@ -486,7 +426,7 @@
 TEST_P(Codec2AudioDecHidlTest, validateCompName) {
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     ALOGV("Checks if the given component is a valid audio component");
-    validateComponent(mComponent, mCompName, mDisableTest);
+    validateComponent(mComponent, mDisableTest);
     ASSERT_EQ(mDisableTest, false);
 }
 
@@ -495,18 +435,17 @@
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     ASSERT_EQ(mComponent->start(), C2_OK);
     int32_t bitStreamInfo[2] = {0};
-    ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+    ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     setupConfigParam(mComponent, bitStreamInfo);
     ASSERT_EQ(mComponent->stop(), C2_OK);
 }
 
-class Codec2AudioDecDecodeTest
-    : public Codec2AudioDecHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2AudioDecDecodeTest : public Codec2AudioDecHidlTestBase,
+                                 public ::testing::WithParamInterface<DecodeTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
+        mStreamIndex = std::get<2>(GetParam());
     }
 };
 
@@ -514,42 +453,30 @@
     description("Decodes input file");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    uint32_t streamIndex = std::stoi(std::get<2>(GetParam()));
-    ;
-    bool signalEOS = !std::get<3>(GetParam()).compare("true");
+    bool signalEOS = std::get<3>(GetParam());
     mTimestampDevTest = true;
-    char mURL[512], info[512];
     android::Vector<FrameInfo> Info;
 
-    strcpy(mURL, sResourceDir.c_str());
-    strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info, streamIndex);
-    if (!strcmp(mURL, sResourceDir.c_str())) {
-        ALOGV("EMPTY INPUT sResourceDir.c_str() %s mURL  %s ", sResourceDir.c_str(), mURL);
-        return;
-    }
-
-    int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
-    ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+    int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
+    ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
 
     // Reset total no of frames received
     mFramesReceived = 0;
     mTimestampUs = 0;
     int32_t bitStreamInfo[2] = {0};
-    if (mCompName == raw) {
+    if (mMime.find("raw") != std::string::npos) {
         bitStreamInfo[0] = 8000;
         bitStreamInfo[1] = 1;
     } else {
-        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     }
     if (!setupConfigParam(mComponent, bitStreamInfo)) {
         std::cout << "[   WARN   ] Test Skipped \n";
         return;
     }
     ASSERT_EQ(mComponent->start(), C2_OK);
-    ALOGV("mURL : %s", mURL);
     std::ifstream eleStream;
-    eleStream.open(mURL, std::ifstream::binary);
+    eleStream.open(mInputFile, std::ifstream::binary);
     ASSERT_EQ(eleStream.is_open(), true);
     ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
                                           mFlushedIndices, mLinearPool, eleStream, &Info, 0,
@@ -586,29 +513,23 @@
     description("Test Request for thumbnail");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    char mURL[512], info[512];
     android::Vector<FrameInfo> Info;
 
-    strcpy(mURL, sResourceDir.c_str());
-    strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
-
-    int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
-    ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+    int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
+    ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
 
     int32_t bitStreamInfo[2] = {0};
-    if (mCompName == raw) {
+    if (mMime.find("raw") != std::string::npos) {
         bitStreamInfo[0] = 8000;
         bitStreamInfo[1] = 1;
     } else {
-        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     }
     if (!setupConfigParam(mComponent, bitStreamInfo)) {
         std::cout << "[   WARN   ] Test Skipped \n";
         return;
     }
     ASSERT_EQ(mComponent->start(), C2_OK);
-    ALOGV("mURL : %s", mURL);
 
     // request EOS for thumbnail
     // signal EOS flag with last frame
@@ -621,7 +542,7 @@
 
     } while (!(flags & SYNC_FRAME));
     std::ifstream eleStream;
-    eleStream.open(mURL, std::ifstream::binary);
+    eleStream.open(mInputFile, std::ifstream::binary);
     ASSERT_EQ(eleStream.is_open(), true);
     ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
                                           mFlushedIndices, mLinearPool, eleStream, &Info, 0,
@@ -678,22 +599,17 @@
 TEST_P(Codec2AudioDecHidlTest, FlushTest) {
     description("Tests Flush calls");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
-    char mURL[512], info[512];
     android::Vector<FrameInfo> Info;
 
-    strcpy(mURL, sResourceDir.c_str());
-    strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
-
-    int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
-    ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+    int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
+    ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
 
     int32_t bitStreamInfo[2] = {0};
-    if (mCompName == raw) {
+    if (mMime.find("raw") != std::string::npos) {
         bitStreamInfo[0] = 8000;
         bitStreamInfo[1] = 1;
     } else {
-        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     }
     if (!setupConfigParam(mComponent, bitStreamInfo)) {
         std::cout << "[   WARN   ] Test Skipped \n";
@@ -708,9 +624,8 @@
             verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
     ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
 
-    ALOGV("mURL : %s", mURL);
     std::ifstream eleStream;
-    eleStream.open(mURL, std::ifstream::binary);
+    eleStream.open(mInputFile, std::ifstream::binary);
     ASSERT_EQ(eleStream.is_open(), true);
     // Decode 30 frames and flush.
     uint32_t numFramesFlushed = FLUSH_INTERVAL;
@@ -763,15 +678,10 @@
     description("Decode with multiple empty input frames");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    char mURL[512], info[512];
     std::ifstream eleStream, eleInfo;
 
-    strcpy(mURL, sResourceDir.c_str());
-    strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
-
-    eleInfo.open(info);
-    ASSERT_EQ(eleInfo.is_open(), true) << mURL << " - file not found";
+    eleInfo.open(mInfoFile);
+    ASSERT_EQ(eleInfo.is_open(), true) << mInputFile << " - file not found";
     android::Vector<FrameInfo> Info;
     int bytesCount = 0;
     uint32_t frameId = 0;
@@ -798,19 +708,18 @@
     }
     eleInfo.close();
     int32_t bitStreamInfo[2] = {0};
-    if (mCompName == raw) {
+    if (mMime.find("raw") != std::string::npos) {
         bitStreamInfo[0] = 8000;
         bitStreamInfo[1] = 1;
     } else {
-        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     }
     if (!setupConfigParam(mComponent, bitStreamInfo)) {
         std::cout << "[   WARN   ] Test Skipped \n";
         return;
     }
     ASSERT_EQ(mComponent->start(), C2_OK);
-    ALOGV("mURL : %s", mURL);
-    eleStream.open(mURL, std::ifstream::binary);
+    eleStream.open(mInputFile, std::ifstream::binary);
     ASSERT_EQ(eleStream.is_open(), true);
     ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
                                           mFlushedIndices, mLinearPool, eleStream, &Info, 0,
@@ -833,12 +742,12 @@
     ASSERT_EQ(mComponent->stop(), C2_OK);
 }
 
-class Codec2AudioDecCsdInputTests
-    : public Codec2AudioDecHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string, std::string>> {
+class Codec2AudioDecCsdInputTests : public Codec2AudioDecHidlTestBase,
+                                    public ::testing::WithParamInterface<CsdFlushTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
+        mStreamIndex = 0;
     }
 };
 
@@ -848,27 +757,17 @@
     description("Tests codecs for flush at different states");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    char mURL[512], info[512];
     android::Vector<FrameInfo> Info;
 
-    strcpy(mURL, sResourceDir.c_str());
-    strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
-    if (!strcmp(mURL, sResourceDir.c_str())) {
-        ALOGV("EMPTY INPUT sResourceDir.c_str() %s mURL  %s ", sResourceDir.c_str(), mURL);
-        return;
-    }
-    ALOGV("mURL : %s", mURL);
-
-    int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
+    int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
     ASSERT_GE(numCsds, 0) << "Error in parsing input info file";
 
     int32_t bitStreamInfo[2] = {0};
-    if (mCompName == raw) {
+    if (mMime.find("raw") != std::string::npos) {
         bitStreamInfo[0] = 8000;
         bitStreamInfo[1] = 1;
     } else {
-        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+        ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     }
     if (!setupConfigParam(mComponent, bitStreamInfo)) {
         std::cout << "[   WARN   ] Test Skipped \n";
@@ -877,11 +776,11 @@
 
     ASSERT_EQ(mComponent->start(), C2_OK);
     std::ifstream eleStream;
-    eleStream.open(mURL, std::ifstream::binary);
+    eleStream.open(mInputFile, std::ifstream::binary);
     ASSERT_EQ(eleStream.is_open(), true);
 
     bool signalEOS = false;
-    bool flushCsd = !std::get<2>(GetParam()).compare("true");
+    bool flushCsd = std::get<2>(GetParam());
     ALOGV("sending %d csd data ", numCsds);
     int framesToDecode = numCsds;
     ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
@@ -936,45 +835,37 @@
     ASSERT_EQ(mComponent->stop(), C2_OK);
 }
 
-INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2AudioDecHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2AudioDecHidlTest, testing::ValuesIn(gTestParameters),
+                         PrintInstanceTupleNameToString<>);
 
 // DecodeTest with StreamIndex and EOS / No EOS
 INSTANTIATE_TEST_SUITE_P(StreamIndexAndEOS, Codec2AudioDecDecodeTest,
-                         testing::ValuesIn(kDecodeTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         testing::ValuesIn(gDecodeTestParameters),
+                         PrintInstanceTupleNameToString<>);
 
 INSTANTIATE_TEST_SUITE_P(CsdInputs, Codec2AudioDecCsdInputTests,
-                         testing::ValuesIn(kCsdFlushTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         testing::ValuesIn(gCsdFlushTestParameters),
+                         PrintInstanceTupleNameToString<>);
 
 }  // anonymous namespace
 
 int main(int argc, char** argv) {
-    kTestParameters = getTestParameters(C2Component::DOMAIN_AUDIO, C2Component::KIND_DECODER);
-    for (auto params : kTestParameters) {
-        kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "false"));
-        kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "true"));
-        kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "false"));
-        kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "true"));
+    parseArgs(argc, argv);
+    gTestParameters = getTestParameters(C2Component::DOMAIN_AUDIO, C2Component::KIND_DECODER);
+    for (auto params : gTestParameters) {
+        gDecodeTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
+        gDecodeTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
+        gDecodeTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1, false));
+        gDecodeTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1, true));
 
-        kCsdFlushTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "true"));
-        kCsdFlushTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "false"));
-    }
-
-    // Set the resource directory based on command line args.
-    // Test will fail to set up if the argument is not set.
-    for (int i = 1; i < argc; i++) {
-        if (strcmp(argv[i], "-P") == 0 && i < argc - 1) {
-            sResourceDir = argv[i + 1];
-            break;
-        }
+        gCsdFlushTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), true));
+        gCsdFlushTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), false));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
index e3a4f68..d77b943 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
@@ -24,22 +24,17 @@
 #include <algorithm>
 #include <fstream>
 
-#include <C2AllocatorIon.h>
 #include <C2Buffer.h>
 #include <C2BufferPriv.h>
 #include <C2Config.h>
 #include <C2Debug.h>
 #include <codec2/hidl/client.h>
 
-using android::C2AllocatorIon;
-
 #include "media_c2_hidl_test_common.h"
 
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kEncodeTestParameters;
+using EncodeTestParameters = std::tuple<std::string, std::string, bool, int32_t>;
 
-// Resource directory
-static std::string sResourceDir = "";
+static std::vector<EncodeTestParameters> gEncodeTestParameters;
 
 class LinearBuffer : public C2Buffer {
   public:
@@ -47,6 +42,8 @@
         : C2Buffer({block->share(block->offset(), block->size(), ::C2Fence())}) {}
 };
 
+constexpr uint32_t kMaxSamplesPerFrame = 256;
+
 namespace {
 
 class Codec2AudioEncHidlTestBase : public ::testing::Test {
@@ -75,31 +72,29 @@
         mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, mBlockPoolId++);
         ASSERT_NE(mLinearPool, nullptr);
 
-        mCompName = unknown_comp;
-        struct StringToName {
-            const char* Name;
-            standardComp CompName;
-        };
-        const StringToName kStringToName[] = {
-                {"aac", aac}, {"flac", flac}, {"opus", opus}, {"amrnb", amrnb}, {"amrwb", amrwb},
-        };
-        const size_t kNumStringToName = sizeof(kStringToName) / sizeof(kStringToName[0]);
+        std::vector<std::unique_ptr<C2Param>> queried;
+        mComponent->query({}, {C2PortMediaTypeSetting::output::PARAM_TYPE}, C2_DONT_BLOCK,
+                          &queried);
+        ASSERT_GT(queried.size(), 0);
 
-        // Find the component type
-        for (size_t i = 0; i < kNumStringToName; ++i) {
-            if (strcasestr(mComponentName.c_str(), kStringToName[i].Name)) {
-                mCompName = kStringToName[i].CompName;
-                break;
-            }
-        }
+        mMime = ((C2PortMediaTypeSetting::output*)queried[0].get())->m.value;
         mEos = false;
         mCsd = false;
         mFramesReceived = 0;
         mWorkResult = C2_OK;
         mOutputSize = 0u;
-        if (mCompName == unknown_comp) mDisableTest = true;
-        if (mDisableTest) std::cout << "[   WARN   ] Test Disabled \n";
         getInputMaxBufSize();
+
+        c2_status_t status = getChannelCount(&mNumChannels);
+        ASSERT_EQ(status, C2_OK) << "Unable to get supported channel count";
+
+        status = getSampleRate(&mSampleRate);
+        ASSERT_EQ(status, C2_OK) << "Unable to get supported sample rate";
+
+        status = getSamplesPerFrame(mNumChannels, &mSamplesPerFrame);
+        ASSERT_EQ(status, C2_OK) << "Unable to get supported number of samples per frame";
+
+        getFile(mNumChannels, mSampleRate);
     }
 
     virtual void TearDown() override {
@@ -113,6 +108,12 @@
     // Get the test parameters from GetParam call.
     virtual void getParams() {}
 
+    c2_status_t getChannelCount(int32_t* nChannels);
+    c2_status_t getSampleRate(int32_t* nSampleRate);
+    c2_status_t getSamplesPerFrame(int32_t nChannels, int32_t* samplesPerFrame);
+
+    void getFile(int32_t channelCount, int32_t sampleRate);
+
     // callback function to process onWorkDone received by Listener
     void handleWorkDone(std::list<std::unique_ptr<C2Work>>& workItems) {
         for (std::unique_ptr<C2Work>& work : workItems) {
@@ -133,21 +134,13 @@
             }
         }
     }
-    enum standardComp {
-        aac,
-        flac,
-        opus,
-        amrnb,
-        amrwb,
-        unknown_comp,
-    };
 
+    std::string mMime;
     std::string mInstanceName;
     std::string mComponentName;
     bool mEos;
     bool mCsd;
     bool mDisableTest;
-    standardComp mCompName;
 
     int32_t mWorkResult;
     uint32_t mFramesReceived;
@@ -167,6 +160,12 @@
     std::shared_ptr<android::Codec2Client::Listener> mListener;
     std::shared_ptr<android::Codec2Client::Component> mComponent;
 
+    int32_t mNumChannels;
+    int32_t mSampleRate;
+    int32_t mSamplesPerFrame;
+
+    std::string mInputFile;
+
   protected:
     static void description(const std::string& description) {
         RecordProperty("description", description);
@@ -192,9 +191,8 @@
     }
 };
 
-class Codec2AudioEncHidlTest
-    : public Codec2AudioEncHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2AudioEncHidlTest : public Codec2AudioEncHidlTestBase,
+                               public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -202,7 +200,7 @@
 };
 
 void validateComponent(const std::shared_ptr<android::Codec2Client::Component>& component,
-                       Codec2AudioEncHidlTest::standardComp compName, bool& disableTest) {
+                       bool& disableTest) {
     // Validate its a C2 Component
     if (component->getName().find("c2") == std::string::npos) {
         ALOGE("Not a c2 component");
@@ -229,13 +227,6 @@
             return;
         }
     }
-
-    // Validates component name
-    if (compName == Codec2AudioEncHidlTest::unknown_comp) {
-        ALOGE("Component InValid");
-        disableTest = true;
-        return;
-    }
     ALOGV("Component Valid");
 }
 
@@ -252,61 +243,86 @@
     return false;
 }
 
-// Get config params for a component
-bool getConfigParams(Codec2AudioEncHidlTest::standardComp compName, int32_t* nChannels,
-                     int32_t* nSampleRate, int32_t* samplesPerFrame) {
-    switch (compName) {
-        case Codec2AudioEncHidlTest::aac:
-            *nChannels = 2;
-            *nSampleRate = 48000;
-            *samplesPerFrame = 1024;
-            break;
-        case Codec2AudioEncHidlTest::flac:
-            *nChannels = 2;
-            *nSampleRate = 48000;
-            *samplesPerFrame = 1152;
-            break;
-        case Codec2AudioEncHidlTest::opus:
-            *nChannels = 2;
-            *nSampleRate = 48000;
-            *samplesPerFrame = 960;
-            break;
-        case Codec2AudioEncHidlTest::amrnb:
-            *nChannels = 1;
-            *nSampleRate = 8000;
-            *samplesPerFrame = 160;
-            break;
-        case Codec2AudioEncHidlTest::amrwb:
-            *nChannels = 1;
-            *nSampleRate = 16000;
-            *samplesPerFrame = 160;
-            break;
-        default:
-            return false;
+c2_status_t Codec2AudioEncHidlTestBase::getChannelCount(int32_t* nChannels) {
+    std::unique_ptr<C2StreamChannelCountInfo::input> channelCount =
+            std::make_unique<C2StreamChannelCountInfo::input>();
+    std::vector<C2FieldSupportedValuesQuery> validValueInfos = {
+            C2FieldSupportedValuesQuery::Current(
+                    C2ParamField(channelCount.get(), &C2StreamChannelCountInfo::value))};
+    c2_status_t c2err = mComponent->querySupportedValues(validValueInfos, C2_DONT_BLOCK);
+    if (c2err != C2_OK || validValueInfos.size() != 1u) {
+        ALOGE("querySupportedValues_vb failed for channelCount");
+        return c2err;
     }
-    return true;
+
+    // setting default value of channelCount
+    *nChannels = 1;
+    const auto& c2FSV = validValueInfos[0].values;
+    switch (c2FSV.type) {
+        case C2FieldSupportedValues::type_t::RANGE: {
+            const auto& range = c2FSV.range;
+            uint32_t rmax = (uint32_t)(range.max).ref<uint32_t>();
+            if (rmax >= 2) {
+                *nChannels = 2;
+            } else {
+                *nChannels = 1;
+            }
+            break;
+        }
+        case C2FieldSupportedValues::type_t::VALUES: {
+            for (const C2Value::Primitive& prim : c2FSV.values) {
+                if ((uint32_t)prim.ref<uint32_t>() == 2) {
+                    *nChannels = 2;
+                } else if ((uint32_t)prim.ref<uint32_t>() == 1) {
+                    *nChannels = 1;
+                }
+            }
+            break;
+        }
+        default:
+            break;
+    }
+    return C2_OK;
+}
+c2_status_t Codec2AudioEncHidlTestBase::getSampleRate(int32_t* nSampleRate) {
+    // Use the default sample rate for mComponents
+    std::vector<std::unique_ptr<C2Param>> queried;
+    c2_status_t c2err = mComponent->query({}, {C2StreamSampleRateInfo::input::PARAM_TYPE},
+                                          C2_DONT_BLOCK, &queried);
+    if (c2err != C2_OK || queried.size() == 0) return c2err;
+
+    size_t offset = sizeof(C2Param);
+    C2Param* param = queried[0].get();
+    *nSampleRate = *(int32_t*)((uint8_t*)param + offset);
+
+    return C2_OK;
+}
+
+c2_status_t Codec2AudioEncHidlTestBase::getSamplesPerFrame(int32_t nChannels,
+                                                           int32_t* samplesPerFrame) {
+    std::vector<std::unique_ptr<C2Param>> queried;
+    c2_status_t c2err = mComponent->query({}, {C2StreamMaxBufferSizeInfo::input::PARAM_TYPE},
+                                          C2_DONT_BLOCK, &queried);
+    if (c2err != C2_OK || queried.size() == 0) return c2err;
+
+    size_t offset = sizeof(C2Param);
+    C2Param* param = queried[0].get();
+    uint32_t maxInputSize = *(uint32_t*)((uint8_t*)param + offset);
+    *samplesPerFrame = std::min((maxInputSize / (nChannels * 2)), kMaxSamplesPerFrame);
+
+    return C2_OK;
 }
 
 // LookUpTable of clips and metadata for component testing
-void GetURLForComponent(Codec2AudioEncHidlTest::standardComp comp, char* mURL) {
-    struct CompToURL {
-        Codec2AudioEncHidlTest::standardComp comp;
-        const char* mURL;
-    };
-    static const CompToURL kCompToURL[] = {
-            {Codec2AudioEncHidlTest::standardComp::aac, "bbb_raw_2ch_48khz_s16le.raw"},
-            {Codec2AudioEncHidlTest::standardComp::amrnb, "bbb_raw_1ch_8khz_s16le.raw"},
-            {Codec2AudioEncHidlTest::standardComp::amrwb, "bbb_raw_1ch_16khz_s16le.raw"},
-            {Codec2AudioEncHidlTest::standardComp::flac, "bbb_raw_2ch_48khz_s16le.raw"},
-            {Codec2AudioEncHidlTest::standardComp::opus, "bbb_raw_2ch_48khz_s16le.raw"},
-    };
-
-    for (size_t i = 0; i < sizeof(kCompToURL) / sizeof(kCompToURL[0]); ++i) {
-        if (kCompToURL[i].comp == comp) {
-            strcat(mURL, kCompToURL[i].mURL);
-            return;
-        }
+void Codec2AudioEncHidlTestBase::getFile(int32_t channelCount, int32_t sampleRate) {
+    std::string rawInput = "bbb_raw_1ch_8khz_s16le.raw";
+    if (channelCount == 1 && sampleRate == 16000) {
+        rawInput = "bbb_raw_1ch_16khz_s16le.raw";
+    } else if (channelCount == 2) {
+        rawInput = "bbb_raw_2ch_48khz_s16le.raw";
     }
+
+    mInputFile = sResourceDir + rawInput;
 }
 
 void encodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
@@ -320,9 +336,17 @@
 
     uint32_t frameID = 0;
     uint32_t maxRetry = 0;
-    int bytesCount = samplesPerFrame * nChannels * 2;
+    uint32_t bytesCount = samplesPerFrame * nChannels * 2;
     int32_t timestampIncr = (int)(((float)samplesPerFrame / nSampleRate) * 1000000);
     uint64_t timestamp = 0;
+
+    // get length of file:
+    int32_t currPos = eleStream.tellg();
+    eleStream.seekg(0, eleStream.end);
+    uint32_t remainingBytes = (uint32_t)eleStream.tellg() - currPos;
+    eleStream.seekg(currPos, eleStream.beg);
+
+    nFrames = std::min(nFrames, remainingBytes / bytesCount);
     while (1) {
         if (nFrames == 0) break;
         uint32_t flags = 0;
@@ -356,7 +380,12 @@
         char* data = (char*)malloc(bytesCount);
         ASSERT_NE(data, nullptr);
         eleStream.read(data, bytesCount);
-        ASSERT_EQ(eleStream.gcount(), bytesCount);
+        // if we have reached at the end of input stream, signal eos
+        if (eleStream.gcount() < bytesCount) {
+            bytesCount = eleStream.gcount();
+            if (signalEOS) flags |= C2FrameData::FLAG_END_OF_STREAM;
+        }
+
         std::shared_ptr<C2LinearBlock> block;
         ASSERT_EQ(C2_OK,
                   linearPool->fetchLinearBlock(
@@ -395,14 +424,12 @@
 TEST_P(Codec2AudioEncHidlTest, validateCompName) {
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     ALOGV("Checks if the given component is a valid audio component");
-    validateComponent(mComponent, mCompName, mDisableTest);
+    validateComponent(mComponent, mDisableTest);
     ASSERT_EQ(mDisableTest, false);
 }
 
-class Codec2AudioEncEncodeTest
-    : public Codec2AudioEncHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2AudioEncEncodeTest : public Codec2AudioEncHidlTestBase,
+                                 public ::testing::WithParamInterface<EncodeTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -412,40 +439,26 @@
 TEST_P(Codec2AudioEncEncodeTest, EncodeTest) {
     ALOGV("EncodeTest");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
-    char mURL[512];
-    strcpy(mURL, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL);
-    bool signalEOS = !std::get<2>(GetParam()).compare("true");
+    bool signalEOS = std::get<2>(GetParam());
     // Ratio w.r.t to mInputMaxBufSize
-    int32_t inputMaxBufRatio = std::stoi(std::get<3>(GetParam()));
+    int32_t inputMaxBufRatio = std::get<3>(GetParam());
+    mSamplesPerFrame = ((mInputMaxBufSize / inputMaxBufRatio) / (mNumChannels * 2));
 
-    int32_t nChannels;
-    int32_t nSampleRate;
-    int32_t samplesPerFrame;
+    ALOGV("signalEOS %d mInputMaxBufSize %d mSamplesPerFrame %d", signalEOS, mInputMaxBufSize,
+          mSamplesPerFrame);
 
-    if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
-        std::cout << "Failed to get the config params for " << mCompName << " component\n";
-        std::cout << "[   WARN   ] Test Skipped \n";
-        return;
-    }
+    ASSERT_TRUE(setupConfigParam(mComponent, mNumChannels, mSampleRate))
+            << "Unable to configure for channels: " << mNumChannels << " and sampling rate "
+            << mSampleRate;
 
-    samplesPerFrame = ((mInputMaxBufSize / inputMaxBufRatio) / (nChannels * 2));
-    ALOGV("signalEOS %d mInputMaxBufSize %d samplesPerFrame %d", signalEOS, mInputMaxBufSize,
-          samplesPerFrame);
-
-    if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
-        std::cout << "[   WARN   ] Test Skipped \n";
-        return;
-    }
     ASSERT_EQ(mComponent->start(), C2_OK);
     std::ifstream eleStream;
     uint32_t numFrames = 16;
-    eleStream.open(mURL, std::ifstream::binary);
+    eleStream.open(mInputFile, std::ifstream::binary);
     ASSERT_EQ(eleStream.is_open(), true);
-    ALOGV("mURL : %s", mURL);
     ASSERT_NO_FATAL_FAILURE(encodeNFrames(
             mComponent, mQueueLock, mQueueCondition, mWorkQueue, mFlushedIndices, mLinearPool,
-            eleStream, numFrames, samplesPerFrame, nChannels, nSampleRate, false, signalEOS));
+            eleStream, numFrames, mSamplesPerFrame, mNumChannels, mSampleRate, false, signalEOS));
 
     // If EOS is not sent, sending empty input with EOS flag
     if (!signalEOS) {
@@ -464,11 +477,9 @@
         ALOGE("framesReceived : %d inputFrames : %u", mFramesReceived, numFrames);
         ASSERT_TRUE(false);
     }
-    if ((mCompName == flac || mCompName == opus || mCompName == aac)) {
-        if (!mCsd) {
-            ALOGE("CSD buffer missing");
-            ASSERT_TRUE(false);
-        }
+    if ((mMime.find("flac") != std::string::npos) || (mMime.find("opus") != std::string::npos) ||
+        (mMime.find("mp4a-latm") != std::string::npos)) {
+        ASSERT_TRUE(mCsd) << "CSD buffer missing";
     }
     ASSERT_EQ(mEos, true);
     ASSERT_EQ(mComponent->stop(), C2_OK);
@@ -520,31 +531,18 @@
     description("Test Request for flush");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    char mURL[512];
-    strcpy(mURL, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL);
-
     mFlushedIndices.clear();
-    int32_t nChannels;
-    int32_t nSampleRate;
-    int32_t samplesPerFrame;
 
-    if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
-        std::cout << "Failed to get the config params for " << mCompName << " component\n";
-        std::cout << "[   WARN   ] Test Skipped \n";
-        return;
-    }
+    ASSERT_TRUE(setupConfigParam(mComponent, mNumChannels, mSampleRate))
+            << "Unable to configure for channels: " << mNumChannels << " and sampling rate "
+            << mSampleRate;
 
-    if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
-        std::cout << "[   WARN   ] Test Skipped \n";
-        return;
-    }
     ASSERT_EQ(mComponent->start(), C2_OK);
 
     std::ifstream eleStream;
     uint32_t numFramesFlushed = 30;
     uint32_t numFrames = 128;
-    eleStream.open(mURL, std::ifstream::binary);
+    eleStream.open(mInputFile, std::ifstream::binary);
     ASSERT_EQ(eleStream.is_open(), true);
     // flush
     std::list<std::unique_ptr<C2Work>> flushedWork;
@@ -553,10 +551,9 @@
     ASSERT_NO_FATAL_FAILURE(
             verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
     ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
-    ALOGV("mURL : %s", mURL);
     ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
                                           mFlushedIndices, mLinearPool, eleStream, numFramesFlushed,
-                                          samplesPerFrame, nChannels, nSampleRate));
+                                          mSamplesPerFrame, mNumChannels, mSampleRate));
     err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
     ASSERT_EQ(err, C2_OK);
     waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
@@ -566,8 +563,8 @@
     ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
     ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
                                           mFlushedIndices, mLinearPool, eleStream,
-                                          numFrames - numFramesFlushed, samplesPerFrame, nChannels,
-                                          nSampleRate, true));
+                                          numFrames - numFramesFlushed, mSamplesPerFrame,
+                                          mNumChannels, mSampleRate, true));
     eleStream.close();
     err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
     ASSERT_EQ(err, C2_OK);
@@ -585,34 +582,20 @@
     description("Encodes input file for different channel count");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    char mURL[512];
-    strcpy(mURL, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL);
-
-    std::ifstream eleStream;
-    eleStream.open(mURL, std::ifstream::binary);
-    ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
-    ALOGV("mURL : %s", mURL);
-
-    int32_t nSampleRate;
-    int32_t samplesPerFrame;
-    int32_t nChannels;
     int32_t numFrames = 16;
     int32_t maxChannelCount = 8;
 
-    if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
-        std::cout << "Failed to get the config params for " << mCompName << " component\n";
-        std::cout << "[   WARN   ] Test Skipped \n";
-        return;
-    }
+    std::ifstream eleStream;
+    eleStream.open(mInputFile, std::ifstream::binary);
+    ASSERT_EQ(eleStream.is_open(), true) << mInputFile << " file not found";
 
     uint64_t prevOutputSize = 0u;
     uint32_t prevChannelCount = 0u;
 
     // Looping through the maximum number of channel count supported by encoder
-    for (nChannels = 1; nChannels < maxChannelCount; nChannels++) {
-        ALOGV("Configuring %u encoder for channel count = %d", mCompName, nChannels);
-        if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
+    for (int32_t nChannels = 1; nChannels < maxChannelCount; nChannels++) {
+        ALOGV("Configuring encoder %s  for channel count = %d", mComponentName.c_str(), nChannels);
+        if (!setupConfigParam(mComponent, nChannels, mSampleRate)) {
             std::cout << "[   WARN   ] Test Skipped \n";
             return;
         }
@@ -632,8 +615,11 @@
         }
 
         // To check if the input stream is sufficient to encode for the higher channel count
-        int32_t bytesCount = (samplesPerFrame * nChannels * 2) * numFrames;
-        if (eleStream.gcount() < bytesCount) {
+        struct stat buf;
+        stat(mInputFile.c_str(), &buf);
+        size_t fileSize = buf.st_size;
+        int32_t bytesCount = (mSamplesPerFrame * nChannels * 2) * numFrames;
+        if (fileSize < bytesCount) {
             std::cout << "[   WARN   ] Test Skipped for ChannelCount " << nChannels
                       << " because of insufficient input data\n";
             continue;
@@ -643,7 +629,7 @@
 
         ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
                                               mFlushedIndices, mLinearPool, eleStream, numFrames,
-                                              samplesPerFrame, nChannels, nSampleRate));
+                                              mSamplesPerFrame, nChannels, mSampleRate));
 
         // mDisableTest will be set if buffer was not fetched properly.
         // This may happen when config params is not proper but config succeeded
@@ -657,9 +643,6 @@
         // blocking call to ensures application to Wait till all the inputs are consumed
         waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
 
-        // Validate output size based on chosen ChannelCount
-        EXPECT_GE(mOutputSize, prevOutputSize);
-
         prevChannelCount = nChannels;
         prevOutputSize = mOutputSize;
 
@@ -668,11 +651,14 @@
             ALOGE("framesReceived : %d inputFrames : %u", mFramesReceived, numFrames);
             ASSERT_TRUE(false);
         }
-        if ((mCompName == flac || mCompName == opus || mCompName == aac)) {
+        if ((mMime.find("flac") != std::string::npos) ||
+            (mMime.find("opus") != std::string::npos) ||
+            (mMime.find("mp4a-latm") != std::string::npos)) {
             ASSERT_TRUE(mCsd) << "CSD buffer missing";
         }
         ASSERT_TRUE(mEos);
-        ASSERT_EQ(mComponent->stop(), C2_OK);
+        // TODO(b/147348711) Use reset instead of stop when using the same instance of codec.
+        ASSERT_EQ(mComponent->reset(), C2_OK);
         mFramesReceived = 0;
         mOutputSize = 0;
         mEos = false;
@@ -685,25 +671,11 @@
     description("Encodes input file for different SampleRate");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    char mURL[512];
-    strcpy(mURL, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL);
-
-    std::ifstream eleStream;
-    eleStream.open(mURL, std::ifstream::binary);
-    ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
-    ALOGV("mURL : %s", mURL);
-
-    int32_t nSampleRate;
-    int32_t samplesPerFrame;
-    int32_t nChannels;
     int32_t numFrames = 16;
 
-    if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
-        std::cout << "Failed to get the config params for " << mCompName << " component\n";
-        std::cout << "[   WARN   ] Test Skipped \n";
-        return;
-    }
+    std::ifstream eleStream;
+    eleStream.open(mInputFile, std::ifstream::binary);
+    ASSERT_EQ(eleStream.is_open(), true) << mInputFile << " file not found";
 
     int32_t sampleRateValues[] = {1000, 8000, 16000, 24000, 48000, 96000, 192000};
 
@@ -711,8 +683,8 @@
     uint32_t prevSampleRate = 0u;
 
     for (int32_t nSampleRate : sampleRateValues) {
-        ALOGV("Configuring %u encoder for SampleRate = %d", mCompName, nSampleRate);
-        if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
+        ALOGV("Configuring encoder %s  for SampleRate = %d", mComponentName.c_str(), nSampleRate);
+        if (!setupConfigParam(mComponent, mNumChannels, nSampleRate)) {
             std::cout << "[   WARN   ] Test Skipped \n";
             return;
         }
@@ -733,8 +705,11 @@
         }
 
         // To check if the input stream is sufficient to encode for the higher SampleRate
-        int32_t bytesCount = (samplesPerFrame * nChannels * 2) * numFrames;
-        if (eleStream.gcount() < bytesCount) {
+        struct stat buf;
+        stat(mInputFile.c_str(), &buf);
+        size_t fileSize = buf.st_size;
+        int32_t bytesCount = (mSamplesPerFrame * mNumChannels * 2) * numFrames;
+        if (fileSize < bytesCount) {
             std::cout << "[   WARN   ] Test Skipped for SampleRate " << nSampleRate
                       << " because of insufficient input data\n";
             continue;
@@ -744,7 +719,7 @@
 
         ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
                                               mFlushedIndices, mLinearPool, eleStream, numFrames,
-                                              samplesPerFrame, nChannels, nSampleRate));
+                                              mSamplesPerFrame, mNumChannels, nSampleRate));
 
         // mDisableTest will be set if buffer was not fetched properly.
         // This may happen when config params is not proper but config succeeded
@@ -758,12 +733,6 @@
         // blocking call to ensures application to Wait till all the inputs are consumed
         waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
 
-        // Validate output size based on chosen samplerate
-        if (prevSampleRate >= nSampleRate) {
-            EXPECT_LE(mOutputSize, prevOutputSize);
-        } else {
-            EXPECT_GT(mOutputSize, prevOutputSize);
-        }
         prevSampleRate = nSampleRate;
         prevOutputSize = mOutputSize;
 
@@ -772,11 +741,14 @@
             ALOGE("framesReceived : %d inputFrames : %u", mFramesReceived, numFrames);
             ASSERT_TRUE(false);
         }
-        if ((mCompName == flac || mCompName == opus || mCompName == aac)) {
+        if ((mMime.find("flac") != std::string::npos) ||
+            (mMime.find("opus") != std::string::npos) ||
+            (mMime.find("mp4a-latm") != std::string::npos)) {
             ASSERT_TRUE(mCsd) << "CSD buffer missing";
         }
         ASSERT_TRUE(mEos);
-        ASSERT_EQ(mComponent->stop(), C2_OK);
+        // TODO(b/147348711) Use reset instead of stop when using the same instance of codec.
+        ASSERT_EQ(mComponent->reset(), C2_OK);
         mFramesReceived = 0;
         mOutputSize = 0;
         mEos = false;
@@ -785,37 +757,29 @@
     }
 }
 
-INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2AudioEncHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2AudioEncHidlTest, testing::ValuesIn(gTestParameters),
+                         PrintInstanceTupleNameToString<>);
 
 // EncodeTest with EOS / No EOS and inputMaxBufRatio
 // inputMaxBufRatio is ratio w.r.t. to mInputMaxBufSize
 INSTANTIATE_TEST_SUITE_P(EncodeTest, Codec2AudioEncEncodeTest,
-                         testing::ValuesIn(kEncodeTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         testing::ValuesIn(gEncodeTestParameters),
+                         PrintInstanceTupleNameToString<>);
 
 }  // anonymous namespace
 
 int main(int argc, char** argv) {
-    kTestParameters = getTestParameters(C2Component::DOMAIN_AUDIO, C2Component::KIND_ENCODER);
-    for (auto params : kTestParameters) {
-        kEncodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "false", "1"));
-        kEncodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "false", "2"));
-        kEncodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "true", "1"));
-        kEncodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "true", "2"));
-    }
-
-    // Set the resource directory based on command line args.
-    // Test will fail to set up if the argument is not set.
-    for (int i = 1; i < argc; i++) {
-        if (strcmp(argv[i], "-P") == 0 && i < argc - 1) {
-            sResourceDir = argv[i + 1];
-            break;
-        }
+    parseArgs(argc, argv);
+    gTestParameters = getTestParameters(C2Component::DOMAIN_AUDIO, C2Component::KIND_ENCODER);
+    for (auto params : gTestParameters) {
+        gEncodeTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), false, 1));
+        gEncodeTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), false, 2));
+        gEncodeTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), true, 1));
+        gEncodeTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), true, 2));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/common/Android.bp b/media/codec2/hidl/1.0/vts/functional/common/Android.bp
index f9ec5ae..4106be8 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/Android.bp
+++ b/media/codec2/hidl/1.0/vts/functional/common/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_static {
     name: "VtsHalMediaC2V1_0CommonUtil",
     defaults: [
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
index 0251ec2..1f1681d 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
@@ -22,6 +22,48 @@
 
 #include <android/hardware/media/c2/1.0/IComponentStore.h>
 
+std::string sResourceDir = "";
+
+std::string sComponentNamePrefix = "";
+
+static constexpr struct option kArgOptions[] = {
+        {"res", required_argument, 0, 'P'},
+        {"prefix", required_argument, 0, 'p'},
+        {"help", required_argument, 0, 'h'},
+        {nullptr, 0, nullptr, 0},
+};
+
+void printUsage(char* me) {
+    std::cerr << "VTS tests to test codec2 components \n";
+    std::cerr << "Usage: " << me << " [options] \n";
+    std::cerr << "\t -P,  --res:    Mandatory path to a folder that contains test resources \n";
+    std::cerr << "\t -p,  --prefix: Optional prefix to select component/s to be tested \n";
+    std::cerr << "\t                    All codecs are tested by default \n";
+    std::cerr << "\t                    Eg: c2.android - test codecs starting with c2.android \n";
+    std::cerr << "\t                    Eg: c2.android.aac.decoder - test a specific codec \n";
+    std::cerr << "\t -h,  --help:   Print usage \n";
+}
+
+void parseArgs(int argc, char** argv) {
+    int arg;
+    int option_index;
+    while ((arg = getopt_long(argc, argv, ":P:p:h", kArgOptions, &option_index)) != -1) {
+        switch (arg) {
+            case 'P':
+                sResourceDir = optarg;
+                break;
+            case 'p':
+                sComponentNamePrefix = optarg;
+                break;
+            case 'h':
+                printUsage(argv[0]);
+                break;
+            default:
+                break;
+        }
+    }
+}
+
 // Test the codecs for NullBuffer, Empty Input Buffer with(out) flags set
 void testInputBuffer(const std::shared_ptr<android::Codec2Client::Component>& component,
                      std::mutex& queueLock, std::list<std::unique_ptr<C2Work>>& workQueue,
@@ -92,8 +134,7 @@
         for (size_t i = 0; i < updates.size(); ++i) {
             C2Param* param = updates[i].get();
             if (param->index() == C2StreamInitDataInfo::output::PARAM_TYPE) {
-                C2StreamInitDataInfo::output* csdBuffer =
-                        (C2StreamInitDataInfo::output*)(param);
+                C2StreamInitDataInfo::output* csdBuffer = (C2StreamInitDataInfo::output*)(param);
                 size_t csdSize = csdBuffer->flexCount();
                 if (csdSize > 0) csd = true;
             } else if ((param->index() == C2StreamSampleRateInfo::output::PARAM_TYPE) ||
@@ -118,8 +159,7 @@
             typedef std::unique_lock<std::mutex> ULock;
             ULock l(queueLock);
             workQueue.push_back(std::move(work));
-            if (!flushedIndices.empty() &&
-                (frameIndexIt != flushedIndices.end())) {
+            if (!flushedIndices.empty() && (frameIndexIt != flushedIndices.end())) {
                 flushedIndices.erase(frameIndexIt);
             }
             queueCondition.notify_all();
@@ -136,15 +176,15 @@
 }
 
 // Return all test parameters, a list of tuple of <instance, component>
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters() {
+const std::vector<TestParameters>& getTestParameters() {
     return getTestParameters(C2Component::DOMAIN_OTHER, C2Component::KIND_OTHER);
 }
 
 // Return all test parameters, a list of tuple of <instance, component> with matching domain and
 // kind.
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters(
-        C2Component::domain_t domain, C2Component::kind_t kind) {
-    static std::vector<std::tuple<std::string, std::string>> parameters;
+const std::vector<TestParameters>& getTestParameters(C2Component::domain_t domain,
+                                                     C2Component::kind_t kind) {
+    static std::vector<TestParameters> parameters;
 
     auto instances = android::Codec2Client::GetServiceNames();
     for (std::string instance : instances) {
@@ -157,11 +197,18 @@
                 (traits.domain != domain || traits.kind != kind)) {
                 continue;
             }
-
+            if (traits.name.rfind(sComponentNamePrefix, 0) != 0) {
+                ALOGD("Skipping tests for %s. Prefix specified is %s", traits.name.c_str(),
+                      sComponentNamePrefix.c_str());
+                continue;
+            }
             parameters.push_back(std::make_tuple(instance, traits.name));
         }
     }
 
+    if (parameters.empty()) {
+        ALOGE("No test parameters added. Verify component prefix passed to the test");
+    }
     return parameters;
 }
 
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
index 50e3ac5..2222aaf 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
@@ -40,7 +40,14 @@
 
 using namespace ::std::chrono;
 
-static std::vector<std::tuple<std::string, std::string>> kTestParameters;
+using TestParameters = std::tuple<std::string, std::string>;
+static std::vector<TestParameters> gTestParameters;
+
+// Resource directory
+extern std::string sResourceDir;
+
+// Component name prefix
+extern std::string sComponentNamePrefix;
 
 struct FrameInfo {
     int bytesCount;
@@ -48,6 +55,18 @@
     int64_t timestamp;
 };
 
+template <typename... T>
+static inline std::string PrintInstanceTupleNameToString(
+        const testing::TestParamInfo<std::tuple<T...>>& info) {
+    std::stringstream ss;
+    std::apply([&ss](auto&&... elems) { ((ss << elems << '_'), ...); }, info.param);
+    ss << info.index;
+    std::string param_string = ss.str();
+    auto isNotAlphaNum = [](char c) { return !std::isalnum(c); };
+    std::replace_if(param_string.begin(), param_string.end(), isNotAlphaNum, '_');
+    return param_string;
+}
+
 /*
  * Handle Callback functions onWorkDone(), onTripped(),
  * onError(), onDeath(), onFramesRendered()
@@ -105,13 +124,15 @@
     std::function<void(std::list<std::unique_ptr<C2Work>>& workItems)> callBack;
 };
 
+void parseArgs(int argc, char** argv);
+
 // Return all test parameters, a list of tuple of <instance, component>.
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters();
+const std::vector<TestParameters>& getTestParameters();
 
 // Return all test parameters, a list of tuple of <instance, component> with matching domain and
 // kind.
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters(
-        C2Component::domain_t domain, C2Component::kind_t kind);
+const std::vector<TestParameters>& getTestParameters(C2Component::domain_t domain,
+                                                     C2Component::kind_t kind);
 
 /*
  * common functions declarations
diff --git a/media/codec2/hidl/1.0/vts/functional/component/Android.bp b/media/codec2/hidl/1.0/vts/functional/component/Android.bp
index 7ec64ee..cc019da 100644
--- a/media/codec2/hidl/1.0/vts/functional/component/Android.bp
+++ b/media/codec2/hidl/1.0/vts/functional/component/Android.bp
@@ -14,9 +14,17 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "VtsHalMediaC2V1_0TargetComponentTest",
     defaults: ["VtsHalMediaC2V1_0Defaults"],
     srcs: ["VtsHalMediaC2V1_0TargetComponentTest.cpp"],
 }
-
diff --git a/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
index 6122225..ffec897 100644
--- a/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
@@ -53,9 +53,8 @@
     }
 
 namespace {
-
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kInputTestParameters;
+using InputTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+static std::vector<InputTestParameters> gInputTestParameters;
 
 // google.codec2 Component test setup
 class Codec2ComponentHidlTestBase : public ::testing::Test {
@@ -120,9 +119,8 @@
     }
 };
 
-class Codec2ComponentHidlTest
-    : public Codec2ComponentHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2ComponentHidlTest : public Codec2ComponentHidlTestBase,
+                                public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -317,10 +315,8 @@
     ASSERT_EQ(err, C2_OK);
 }
 
-class Codec2ComponentInputTests
-    : public Codec2ComponentHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2ComponentInputTests : public Codec2ComponentHidlTestBase,
+                                  public ::testing::WithParamInterface<InputTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -330,8 +326,8 @@
 TEST_P(Codec2ComponentInputTests, InputBufferTest) {
     description("Tests for different inputs");
 
-    uint32_t flags = std::stoul(std::get<2>(GetParam()));
-    bool isNullBuffer = !std::get<3>(GetParam()).compare("true");
+    uint32_t flags = std::get<2>(GetParam());
+    bool isNullBuffer = std::get<3>(GetParam());
     if (isNullBuffer)
         ALOGD("Testing for null input buffer with flag : %u", flags);
     else
@@ -349,32 +345,29 @@
     ASSERT_EQ(mComponent->reset(), C2_OK);
 }
 
-INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2ComponentHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2ComponentHidlTest, testing::ValuesIn(gTestParameters),
+                         PrintInstanceTupleNameToString<>);
 
 INSTANTIATE_TEST_CASE_P(NonStdInputs, Codec2ComponentInputTests,
-                        testing::ValuesIn(kInputTestParameters),
-                        android::hardware::PrintInstanceTupleNameToString<>);
+                        testing::ValuesIn(gInputTestParameters), PrintInstanceTupleNameToString<>);
 }  // anonymous namespace
 
 // TODO: Add test for Invalid work,
 // TODO: Add test for Invalid states
 int main(int argc, char** argv) {
-    kTestParameters = getTestParameters();
-    for (auto params : kTestParameters) {
-        kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "true"));
-        kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params),
-                                std::to_string(C2FrameData::FLAG_END_OF_STREAM), "true"));
-        kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "false"));
-        kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params),
-                                std::to_string(C2FrameData::FLAG_CODEC_CONFIG), "false"));
-        kInputTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params),
-                                std::to_string(C2FrameData::FLAG_END_OF_STREAM), "false"));
+    parseArgs(argc, argv);
+    gTestParameters = getTestParameters();
+    for (auto params : gTestParameters) {
+        gInputTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
+        gInputTestParameters.push_back(std::make_tuple(std::get<0>(params), std::get<1>(params),
+                                                       C2FrameData::FLAG_END_OF_STREAM, true));
+        gInputTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
+        gInputTestParameters.push_back(std::make_tuple(std::get<0>(params), std::get<1>(params),
+                                                       C2FrameData::FLAG_CODEC_CONFIG, false));
+        gInputTestParameters.push_back(std::make_tuple(std::get<0>(params), std::get<1>(params),
+                                                       C2FrameData::FLAG_END_OF_STREAM, false));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/master/Android.bp b/media/codec2/hidl/1.0/vts/functional/master/Android.bp
index 53e323e..40f5201 100644
--- a/media/codec2/hidl/1.0/vts/functional/master/Android.bp
+++ b/media/codec2/hidl/1.0/vts/functional/master/Android.bp
@@ -14,9 +14,17 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "VtsHalMediaC2V1_0TargetMasterTest",
     defaults: ["VtsHalMediaC2V1_0Defaults"],
     srcs: ["VtsHalMediaC2V1_0TargetMasterTest.cpp"],
 }
-
diff --git a/media/codec2/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp b/media/codec2/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
index fb1c291..47ceed5 100644
--- a/media/codec2/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
@@ -73,9 +73,10 @@
             ASSERT_NE(listener, nullptr);
 
             // Create component from all known services
-            component =
-                    mClient->CreateComponentByName(listTraits[i].name.c_str(), listener, &mClient);
-            ASSERT_NE(component, nullptr)
+            const c2_status_t status =
+                    android::Codec2Client::CreateComponentByName(
+                            listTraits[i].name.c_str(), listener, &component, &mClient);
+            ASSERT_EQ(status, C2_OK)
                     << "Create component failed for " << listTraits[i].name.c_str();
         }
     }
diff --git a/media/codec2/hidl/1.0/vts/functional/video/Android.bp b/media/codec2/hidl/1.0/vts/functional/video/Android.bp
index c7b0c12..f211ecf 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/Android.bp
+++ b/media/codec2/hidl/1.0/vts/functional/video/Android.bp
@@ -14,6 +14,15 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "VtsHalMediaC2V1_0TargetVideoDecTest",
     stem: "vts_media_c2_v1_0_video_dec_test",
@@ -40,4 +49,3 @@
     data: [":media_c2_v1_video_encode_res"],
     test_config: "VtsHalMediaC2V1_0TargetVideoEncTest.xml",
 }
-
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index 12ed725..c331d0b 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -24,7 +24,6 @@
 
 #include <openssl/md5.h>
 
-#include <C2AllocatorIon.h>
 #include <C2Buffer.h>
 #include <C2BufferPriv.h>
 #include <C2Config.h>
@@ -35,18 +34,47 @@
 #include <gui/IProducerListener.h>
 #include <system/window.h>
 
-using android::C2AllocatorIon;
-
 #include "media_c2_hidl_test_common.h"
 #include "media_c2_video_hidl_test_common.h"
 
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kDecodeTestParameters;
+using DecodeTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+static std::vector<DecodeTestParameters> gDecodeTestParameters;
 
-static std::vector<std::tuple<std::string, std::string, std::string>> kCsdFlushTestParameters;
+using CsdFlushTestParameters = std::tuple<std::string, std::string, bool>;
+static std::vector<CsdFlushTestParameters> gCsdFlushTestParameters;
 
-// Resource directory
-static std::string sResourceDir = "";
+struct CompToFiles {
+    std::string mime;
+    std::string inputFile;
+    std::string infoFile;
+    std::string chksumFile;
+};
+std::vector<CompToFiles> gCompToFiles = {
+        {"avc", "bbb_avc_176x144_300kbps_60fps.h264", "bbb_avc_176x144_300kbps_60fps.info",
+         "bbb_avc_176x144_300kbps_60fps_chksum.md5"},
+        {"avc", "bbb_avc_640x360_768kbps_30fps.h264", "bbb_avc_640x360_768kbps_30fps.info",
+         "bbb_avc_640x360_768kbps_30fps_chksum.md5"},
+        {"hevc", "bbb_hevc_176x144_176kbps_60fps.hevc", "bbb_hevc_176x144_176kbps_60fps.info",
+         "bbb_hevc_176x144_176kbps_60fps_chksum.md5"},
+        {"hevc", "bbb_hevc_640x360_1600kbps_30fps.hevc", "bbb_hevc_640x360_1600kbps_30fps.info",
+         "bbb_hevc_640x360_1600kbps_30fps_chksum.md5"},
+        {"mpeg2", "bbb_mpeg2_176x144_105kbps_25fps.m2v", "bbb_mpeg2_176x144_105kbps_25fps.info",
+         ""},
+        {"mpeg2", "bbb_mpeg2_352x288_1mbps_60fps.m2v", "bbb_mpeg2_352x288_1mbps_60fps.info", ""},
+        {"3gpp", "bbb_h263_352x288_300kbps_12fps.h263", "bbb_h263_352x288_300kbps_12fps.info", ""},
+        {"mp4v-es", "bbb_mpeg4_352x288_512kbps_30fps.m4v", "bbb_mpeg4_352x288_512kbps_30fps.info",
+         ""},
+        {"vp8", "bbb_vp8_176x144_240kbps_60fps.vp8", "bbb_vp8_176x144_240kbps_60fps.info", ""},
+        {"vp8", "bbb_vp8_640x360_2mbps_30fps.vp8", "bbb_vp8_640x360_2mbps_30fps.info",
+         "bbb_vp8_640x360_2mbps_30fps_chksm.md5"},
+        {"vp9", "bbb_vp9_176x144_285kbps_60fps.vp9", "bbb_vp9_176x144_285kbps_60fps.info", ""},
+        {"vp9", "bbb_vp9_640x360_1600kbps_30fps.vp9", "bbb_vp9_640x360_1600kbps_30fps.info",
+         "bbb_vp9_640x360_1600kbps_30fps_chksm.md5"},
+        {"vp9", "bbb_vp9_704x480_280kbps_24fps_altref_2.vp9",
+         "bbb_vp9_704x480_280kbps_24fps_altref_2.info", ""},
+        {"av01", "bbb_av1_640_360.av1", "bbb_av1_640_360.info", "bbb_av1_640_360_chksum.md5"},
+        {"av01", "bbb_av1_176_144.av1", "bbb_av1_176_144.info", "bbb_av1_176_144_chksm.md5"},
+};
 
 class LinearBuffer : public C2Buffer {
   public:
@@ -64,8 +92,8 @@
     // google.codec2 Video test setup
     virtual void SetUp() override {
         getParams();
+
         mDisableTest = false;
-        ALOGV("Codec2VideoDecHidlTest SetUp");
         mClient = android::Codec2Client::CreateFromService(
                 mInstanceName.c_str(),
                 !bool(android::Codec2Client::CreateFromService("default", true)));
@@ -85,26 +113,11 @@
         mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, mBlockPoolId++);
         ASSERT_NE(mLinearPool, nullptr);
 
-        mCompName = unknown_comp;
-        struct StringToName {
-            const char* Name;
-            standardComp CompName;
-        };
+        std::vector<std::unique_ptr<C2Param>> queried;
+        mComponent->query({}, {C2PortMediaTypeSetting::input::PARAM_TYPE}, C2_DONT_BLOCK, &queried);
+        ASSERT_GT(queried.size(), 0);
 
-        const StringToName kStringToName[] = {
-                {"h263", h263}, {"avc", avc}, {"mpeg2", mpeg2}, {"mpeg4", mpeg4},
-                {"hevc", hevc}, {"vp8", vp8}, {"vp9", vp9},     {"av1", av1},
-        };
-
-        const size_t kNumStringToName = sizeof(kStringToName) / sizeof(kStringToName[0]);
-
-        // Find the component type
-        for (size_t i = 0; i < kNumStringToName; ++i) {
-            if (strcasestr(mComponentName.c_str(), kStringToName[i].Name)) {
-                mCompName = kStringToName[i].CompName;
-                break;
-            }
-        }
+        mMime = ((C2PortMediaTypeSetting::input*)queried[0].get())->m.value;
         mEos = false;
         mFramesReceived = 0;
         mTimestampUs = 0u;
@@ -114,14 +127,23 @@
         mMd5Offset = 0;
         mMd5Enable = false;
         mRefMd5 = nullptr;
-        if (mCompName == unknown_comp) mDisableTest = true;
 
         C2SecureModeTuning secureModeTuning{};
         mComponent->query({&secureModeTuning}, {}, C2_MAY_BLOCK, nullptr);
-        if (secureModeTuning.value == C2Config::SM_READ_PROTECTED) {
+        if (secureModeTuning.value == C2Config::SM_READ_PROTECTED ||
+            secureModeTuning.value == C2Config::SM_READ_PROTECTED_WITH_ENCRYPTED) {
             mDisableTest = true;
         }
 
+        bool valid = getFileNames(mStreamIndex);
+        if (!valid) {
+            GTEST_SKIP() << "No test file for  mime " << mMime << " index: " << mStreamIndex;
+        }
+        ALOGV("mStreamIndex : %zu", mStreamIndex);
+        ALOGV("mInputFile : %s", mInputFile.c_str());
+        ALOGV("mInfoFile : %s", mInfoFile.c_str());
+        ALOGV("mChksumFile : %s", mChksumFile.c_str());
+
         if (mDisableTest) std::cout << "[   WARN   ] Test Disabled \n";
     }
 
@@ -136,6 +158,8 @@
     // Get the test parameters from GetParam call.
     virtual void getParams() {}
 
+    bool getFileNames(size_t streamIndex = 0);
+
     /* Calculate the CKSUM for the data in inbuf */
     void calc_md5_cksum(uint8_t* pu1_inbuf, uint32_t u4_stride, uint32_t u4_width,
                         uint32_t u4_height, uint8_t* pu1_cksum_p) {
@@ -220,8 +244,7 @@
                 if (!codecConfig && !work->worklets.front()->output.buffers.empty()) {
                     if (mReorderDepth < 0) {
                         C2PortReorderBufferDepthTuning::output reorderBufferDepth;
-                        mComponent->query({&reorderBufferDepth}, {}, C2_MAY_BLOCK,
-                                          nullptr);
+                        mComponent->query({&reorderBufferDepth}, {}, C2_MAY_BLOCK, nullptr);
                         mReorderDepth = reorderBufferDepth.value;
                         if (mReorderDepth > 0) {
                             // TODO: Add validation for reordered output
@@ -267,18 +290,7 @@
         }
     }
 
-    enum standardComp {
-        h263,
-        avc,
-        mpeg2,
-        mpeg4,
-        hevc,
-        vp8,
-        vp9,
-        av1,
-        unknown_comp,
-    };
-
+    std::string mMime;
     std::string mInstanceName;
     std::string mComponentName;
 
@@ -291,7 +303,6 @@
     char* mRefMd5;
     std::list<uint64_t> mTimestampUslist;
     std::list<uint64_t> mFlushedIndices;
-    standardComp mCompName;
 
     int32_t mWorkResult;
     int32_t mReorderDepth;
@@ -308,23 +319,28 @@
     std::shared_ptr<android::Codec2Client::Listener> mListener;
     std::shared_ptr<android::Codec2Client::Component> mComponent;
 
+    std::string mInputFile;
+    std::string mInfoFile;
+    std::string mChksumFile;
+    size_t mStreamIndex = 0;
+
   protected:
     static void description(const std::string& description) {
         RecordProperty("description", description);
     }
 };
 
-class Codec2VideoDecHidlTest
-    : public Codec2VideoDecHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2VideoDecHidlTest : public Codec2VideoDecHidlTestBase,
+                               public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
+        mStreamIndex = 0;
     }
 };
 
 void validateComponent(const std::shared_ptr<android::Codec2Client::Component>& component,
-                       Codec2VideoDecHidlTest::standardComp compName, bool& disableTest) {
+                       bool& disableTest) {
     // Validate its a C2 Component
     if (component->getName().find("c2") == std::string::npos) {
         ALOGE("Not a c2 component");
@@ -351,83 +367,29 @@
             return;
         }
     }
-
-    // Validates component name
-    if (compName == Codec2VideoDecHidlTest::unknown_comp) {
-        ALOGE("Component InValid");
-        disableTest = true;
-        return;
-    }
     ALOGV("Component Valid");
 }
 
 // number of elementary streams per component
 #define STREAM_COUNT 3
-// LookUpTable of clips, metadata and chksum for component testing
-void GetURLChksmForComponent(Codec2VideoDecHidlTest::standardComp comp, char* mURL, char* info,
-                             char* chksum, size_t streamIndex = 1) {
-    struct CompToURL {
-        Codec2VideoDecHidlTest::standardComp comp;
-        const char mURL[STREAM_COUNT][512];
-        const char info[STREAM_COUNT][512];
-        const char chksum[STREAM_COUNT][512];
-    };
-    ASSERT_TRUE(streamIndex < STREAM_COUNT);
+// number of elementary streams required for adaptive testing
+#define ADAPTIVE_STREAM_COUNT 2
+// LookUpTable of clips, metadata and mChksumFile for component testing
+bool Codec2VideoDecHidlTestBase::getFileNames(size_t streamIndex) {
+    int streamCount = 0;
 
-    static const CompToURL kCompToURL[] = {
-            {Codec2VideoDecHidlTest::standardComp::avc,
-             {"bbb_avc_176x144_300kbps_60fps.h264", "bbb_avc_640x360_768kbps_30fps.h264", ""},
-             {"bbb_avc_176x144_300kbps_60fps.info", "bbb_avc_640x360_768kbps_30fps.info", ""},
-             {"bbb_avc_176x144_300kbps_60fps_chksum.md5",
-              "bbb_avc_640x360_768kbps_30fps_chksum.md5", ""}},
-            {Codec2VideoDecHidlTest::standardComp::hevc,
-             {"bbb_hevc_176x144_176kbps_60fps.hevc", "bbb_hevc_640x360_1600kbps_30fps.hevc", ""},
-             {"bbb_hevc_176x144_176kbps_60fps.info", "bbb_hevc_640x360_1600kbps_30fps.info", ""},
-             {"bbb_hevc_176x144_176kbps_60fps_chksum.md5",
-              "bbb_hevc_640x360_1600kbps_30fps_chksum.md5", ""}},
-            {Codec2VideoDecHidlTest::standardComp::mpeg2,
-             {"bbb_mpeg2_176x144_105kbps_25fps.m2v", "bbb_mpeg2_352x288_1mbps_60fps.m2v", ""},
-             {"bbb_mpeg2_176x144_105kbps_25fps.info", "bbb_mpeg2_352x288_1mbps_60fps.info", ""},
-             {"", "", ""}},
-            {Codec2VideoDecHidlTest::standardComp::h263,
-             {"", "bbb_h263_352x288_300kbps_12fps.h263", ""},
-             {"", "bbb_h263_352x288_300kbps_12fps.info", ""},
-             {"", "", ""}},
-            {Codec2VideoDecHidlTest::standardComp::mpeg4,
-             {"", "bbb_mpeg4_352x288_512kbps_30fps.m4v", ""},
-             {"", "bbb_mpeg4_352x288_512kbps_30fps.info", ""},
-             {"", "", ""}},
-            {Codec2VideoDecHidlTest::standardComp::vp8,
-             {"bbb_vp8_176x144_240kbps_60fps.vp8", "bbb_vp8_640x360_2mbps_30fps.vp8", ""},
-             {"bbb_vp8_176x144_240kbps_60fps.info", "bbb_vp8_640x360_2mbps_30fps.info", ""},
-             {"", "bbb_vp8_640x360_2mbps_30fps_chksm.md5", ""}},
-            {Codec2VideoDecHidlTest::standardComp::vp9,
-             {"bbb_vp9_176x144_285kbps_60fps.vp9", "bbb_vp9_640x360_1600kbps_30fps.vp9",
-              "bbb_vp9_704x480_280kbps_24fps_altref_2.vp9"},
-             {"bbb_vp9_176x144_285kbps_60fps.info", "bbb_vp9_640x360_1600kbps_30fps.info",
-              "bbb_vp9_704x480_280kbps_24fps_altref_2.info"},
-             {"", "bbb_vp9_640x360_1600kbps_30fps_chksm.md5", ""}},
-            {Codec2VideoDecHidlTest::standardComp::av1,
-             {"bbb_av1_640_360.av1", "bbb_av1_176_144.av1", ""},
-             {"bbb_av1_640_360.info", "bbb_av1_176_144.info", ""},
-             {"bbb_av1_640_360_chksum.md5", "bbb_av1_176_144_chksm.md5", ""}},
-    };
-
-    for (size_t i = 0; i < sizeof(kCompToURL) / sizeof(kCompToURL[0]); ++i) {
-        if (kCompToURL[i].comp == comp) {
-            strcat(mURL, kCompToURL[i].mURL[streamIndex]);
-            strcat(info, kCompToURL[i].info[streamIndex]);
-            strcat(chksum, kCompToURL[i].chksum[streamIndex]);
-            return;
+    for (size_t i = 0; i < gCompToFiles.size(); ++i) {
+        if (mMime.find(gCompToFiles[i].mime) != std::string::npos) {
+            if (streamCount == streamIndex) {
+                mInputFile = sResourceDir + gCompToFiles[i].inputFile;
+                mInfoFile = sResourceDir + gCompToFiles[i].infoFile;
+                mChksumFile = sResourceDir + gCompToFiles[i].chksumFile;
+                return true;
+            }
+            streamCount++;
         }
     }
-}
-
-void GetURLForComponent(Codec2VideoDecHidlTest::standardComp comp, char* mURL, char* info,
-                        size_t streamIndex = 1) {
-    char chksum[512];
-    strcpy(chksum, sResourceDir.c_str());
-    GetURLChksmForComponent(comp, mURL, info, chksum, streamIndex);
+    return false;
 }
 
 void decodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
@@ -517,7 +479,7 @@
 TEST_P(Codec2VideoDecHidlTest, validateCompName) {
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     ALOGV("Checks if the given component is a valid video component");
-    validateComponent(mComponent, mCompName, mDisableTest);
+    validateComponent(mComponent, mDisableTest);
     ASSERT_EQ(mDisableTest, false);
 }
 
@@ -573,13 +535,12 @@
     return false;
 }
 
-class Codec2VideoDecDecodeTest
-    : public Codec2VideoDecHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2VideoDecDecodeTest : public Codec2VideoDecHidlTestBase,
+                                 public ::testing::WithParamInterface<DecodeTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
+        mStreamIndex = std::get<2>(GetParam());
     }
 };
 
@@ -588,24 +549,13 @@
     description("Decodes input file");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    uint32_t streamIndex = std::stoi(std::get<2>(GetParam()));
-    bool signalEOS = !std::get<2>(GetParam()).compare("true");
+    bool signalEOS = std::get<3>(GetParam());
     mTimestampDevTest = true;
 
-    char mURL[512], info[512], chksum[512];
     android::Vector<FrameInfo> Info;
 
-    strcpy(mURL, sResourceDir.c_str());
-    strcpy(info, sResourceDir.c_str());
-    strcpy(chksum, sResourceDir.c_str());
-
-    GetURLChksmForComponent(mCompName, mURL, info, chksum, streamIndex);
-    if (!(strcmp(mURL, sResourceDir.c_str())) || !(strcmp(info, sResourceDir.c_str()))) {
-        ALOGV("Skipping Test, Stream not available");
-        return;
-    }
     mMd5Enable = true;
-    if (!strcmp(chksum, sResourceDir.c_str())) mMd5Enable = false;
+    if (!mChksumFile.compare(sResourceDir)) mMd5Enable = false;
 
     uint32_t format = HAL_PIXEL_FORMAT_YCBCR_420_888;
     if (!configPixelFormat(format)) {
@@ -616,23 +566,22 @@
     mFlushedIndices.clear();
     mTimestampUslist.clear();
 
-    int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
-    ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+    int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
+    ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
 
     ASSERT_EQ(mComponent->start(), C2_OK);
     // Reset total no of frames received
     mFramesReceived = 0;
     mTimestampUs = 0;
-    ALOGV("mURL : %s", mURL);
+
     std::ifstream eleStream;
-    eleStream.open(mURL, std::ifstream::binary);
+    eleStream.open(mInputFile, std::ifstream::binary);
     ASSERT_EQ(eleStream.is_open(), true);
 
     size_t refChksmSize = 0;
     std::ifstream refChksum;
     if (mMd5Enable) {
-        ALOGV("chksum file name: %s", chksum);
-        refChksum.open(chksum, std::ifstream::binary | std::ifstream::ate);
+        refChksum.open(mChksumFile, std::ifstream::binary | std::ifstream::ate);
         ASSERT_EQ(refChksum.is_open(), true);
         refChksmSize = refChksum.tellg();
         refChksum.seekg(0, std::ifstream::beg);
@@ -688,9 +637,11 @@
 TEST_P(Codec2VideoDecHidlTest, AdaptiveDecodeTest) {
     description("Adaptive Decode Test");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
-    if (!(mCompName == avc || mCompName == hevc || mCompName == vp8 || mCompName == vp9 ||
-          mCompName == mpeg2))
+    if (!(strcasestr(mMime.c_str(), "avc") || strcasestr(mMime.c_str(), "hevc") ||
+          strcasestr(mMime.c_str(), "vp8") || strcasestr(mMime.c_str(), "vp9") ||
+          strcasestr(mMime.c_str(), "mpeg2"))) {
         return;
+    }
 
     typedef std::unique_lock<std::mutex> ULock;
     ASSERT_EQ(mComponent->start(), C2_OK);
@@ -699,20 +650,17 @@
     uint32_t timestampOffset = 0;
     uint32_t offset = 0;
     android::Vector<FrameInfo> Info;
-    for (uint32_t i = 0; i < STREAM_COUNT * 2; i++) {
-        char mURL[512], info[512];
+    for (uint32_t i = 0; i < ADAPTIVE_STREAM_COUNT * 2; i++) {
         std::ifstream eleStream, eleInfo;
 
-        strcpy(mURL, sResourceDir.c_str());
-        strcpy(info, sResourceDir.c_str());
-        GetURLForComponent(mCompName, mURL, info, i % STREAM_COUNT);
-        if (!(strcmp(mURL, sResourceDir.c_str())) || !(strcmp(info, sResourceDir.c_str()))) {
+        bool valid = getFileNames(i % ADAPTIVE_STREAM_COUNT);
+        if (!valid) {
             ALOGV("Stream not available, skipping this index");
             continue;
         }
 
-        eleInfo.open(info);
-        ASSERT_EQ(eleInfo.is_open(), true) << mURL << " - file not found";
+        eleInfo.open(mInfoFile);
+        ASSERT_EQ(eleInfo.is_open(), true) << mInputFile << " - file not found";
         int bytesCount = 0;
         uint32_t flags = 0;
         uint32_t timestamp = 0;
@@ -734,13 +682,12 @@
             }
             if (timestampMax < timestamp) timestampMax = timestamp;
         }
-        timestampOffset = timestampMax;
+        timestampOffset = timestampMax + 33333;
         eleInfo.close();
 
         // Reset Total frames before second decode loop
         // mFramesReceived = 0;
-        ALOGV("mURL : %s", mURL);
-        eleStream.open(mURL, std::ifstream::binary);
+        eleStream.open(mInputFile, std::ifstream::binary);
         ASSERT_EQ(eleStream.is_open(), true);
         ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
                                               mFlushedIndices, mLinearPool, eleStream, &Info,
@@ -796,15 +743,9 @@
     description("Test Request for thumbnail");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    char mURL[512], info[512];
     android::Vector<FrameInfo> Info;
-
-    strcpy(mURL, sResourceDir.c_str());
-    strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
-
-    int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
-    ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+    int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
+    ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
 
     uint32_t flags = 0;
     for (size_t i = 0; i < MAX_ITERATIONS; i++) {
@@ -821,7 +762,7 @@
         } while (!(flags & SYNC_FRAME));
 
         std::ifstream eleStream;
-        eleStream.open(mURL, std::ifstream::binary);
+        eleStream.open(mInputFile, std::ifstream::binary);
         ASSERT_EQ(eleStream.is_open(), true);
         ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
                                               mFlushedIndices, mLinearPool, eleStream, &Info, 0,
@@ -883,19 +824,12 @@
 
     ASSERT_EQ(mComponent->start(), C2_OK);
 
-    char mURL[512], info[512];
     android::Vector<FrameInfo> Info;
 
-    strcpy(mURL, sResourceDir.c_str());
-    strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
-
     mFlushedIndices.clear();
 
-    int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
-    ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
-
-    ALOGV("mURL : %s", mURL);
+    int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
+    ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
 
     // flush
     std::list<std::unique_ptr<C2Work>> flushedWork;
@@ -906,7 +840,7 @@
     ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
 
     std::ifstream eleStream;
-    eleStream.open(mURL, std::ifstream::binary);
+    eleStream.open(mInputFile, std::ifstream::binary);
     ASSERT_EQ(eleStream.is_open(), true);
     // Decode 30 frames and flush. here 30 is chosen to ensure there is a key
     // frame after this so that the below section can be covered for all
@@ -959,15 +893,10 @@
     description("Decode with multiple empty input frames");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    char mURL[512], info[512];
     std::ifstream eleStream, eleInfo;
 
-    strcpy(mURL, sResourceDir.c_str());
-    strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
-
-    eleInfo.open(info);
-    ASSERT_EQ(eleInfo.is_open(), true) << mURL << " - file not found";
+    eleInfo.open(mInfoFile);
+    ASSERT_EQ(eleInfo.is_open(), true) << mInputFile << " - file not found";
     android::Vector<FrameInfo> Info;
     int bytesCount = 0;
     uint32_t frameId = 0;
@@ -995,8 +924,7 @@
     eleInfo.close();
 
     ASSERT_EQ(mComponent->start(), C2_OK);
-    ALOGV("mURL : %s", mURL);
-    eleStream.open(mURL, std::ifstream::binary);
+    eleStream.open(mInputFile, std::ifstream::binary);
     ASSERT_EQ(eleStream.is_open(), true);
     ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
                                           mFlushedIndices, mLinearPool, eleStream, &Info, 0,
@@ -1017,12 +945,12 @@
     }
 }
 
-class Codec2VideoDecCsdInputTests
-    : public Codec2VideoDecHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string, std::string>> {
+class Codec2VideoDecCsdInputTests : public Codec2VideoDecHidlTestBase,
+                                    public ::testing::WithParamInterface<CsdFlushTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
+        mStreamIndex = 0;
     }
 };
 
@@ -1032,27 +960,20 @@
     description("Tests codecs for flush at different states");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    char mURL[512], info[512];
-
     android::Vector<FrameInfo> Info;
 
-    strcpy(mURL, sResourceDir.c_str());
-    strcpy(info, sResourceDir.c_str());
-    GetURLForComponent(mCompName, mURL, info);
-
-    int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
+    int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
     ASSERT_GE(numCsds, 0) << "Error in parsing input info file";
 
     ASSERT_EQ(mComponent->start(), C2_OK);
 
-    ALOGV("mURL : %s", mURL);
     std::ifstream eleStream;
-    eleStream.open(mURL, std::ifstream::binary);
+    eleStream.open(mInputFile, std::ifstream::binary);
     ASSERT_EQ(eleStream.is_open(), true);
     bool flushedDecoder = false;
     bool signalEOS = false;
     bool keyFrame = false;
-    bool flushCsd = !std::get<2>(GetParam()).compare("true");
+    bool flushCsd = std::get<2>(GetParam());
 
     ALOGV("sending %d csd data ", numCsds);
     int framesToDecode = numCsds;
@@ -1121,50 +1042,42 @@
     ASSERT_EQ(mComponent->stop(), C2_OK);
 }
 
-INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2VideoDecHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2VideoDecHidlTest, testing::ValuesIn(gTestParameters),
+                         PrintInstanceTupleNameToString<>);
 
 // DecodeTest with StreamIndex and EOS / No EOS
 INSTANTIATE_TEST_SUITE_P(StreamIndexAndEOS, Codec2VideoDecDecodeTest,
-                         testing::ValuesIn(kDecodeTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         testing::ValuesIn(gDecodeTestParameters),
+                         PrintInstanceTupleNameToString<>);
 
 INSTANTIATE_TEST_SUITE_P(CsdInputs, Codec2VideoDecCsdInputTests,
-                         testing::ValuesIn(kCsdFlushTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+                         testing::ValuesIn(gCsdFlushTestParameters),
+                         PrintInstanceTupleNameToString<>);
 
 }  // anonymous namespace
 
 // TODO : Video specific configuration Test
 int main(int argc, char** argv) {
-    kTestParameters = getTestParameters(C2Component::DOMAIN_VIDEO, C2Component::KIND_DECODER);
-    for (auto params : kTestParameters) {
-        kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "false"));
-        kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "true"));
-        kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "false"));
-        kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "true"));
-        kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "2", "false"));
-        kDecodeTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "2", "true"));
+    parseArgs(argc, argv);
+    gTestParameters = getTestParameters(C2Component::DOMAIN_VIDEO, C2Component::KIND_DECODER);
+    for (auto params : gTestParameters) {
+        gDecodeTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
+        gDecodeTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
+        gDecodeTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1, false));
+        gDecodeTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1, true));
+        gDecodeTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 2, false));
+        gDecodeTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 2, true));
 
-        kCsdFlushTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "true"));
-        kCsdFlushTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "false"));
-    }
-
-    // Set the resource directory based on command line args.
-    // Test will fail to set up if the argument is not set.
-    for (int i = 1; i < argc; i++) {
-        if (strcmp(argv[i], "-P") == 0 && i < argc - 1) {
-            sResourceDir = argv[i + 1];
-            break;
-        }
+        gCsdFlushTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), true));
+        gCsdFlushTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), false));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
index ecaf3a8..6a00edd 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
@@ -23,15 +23,12 @@
 #include <stdio.h>
 #include <fstream>
 
-#include <C2AllocatorIon.h>
 #include <C2Buffer.h>
 #include <C2BufferPriv.h>
 #include <C2Config.h>
 #include <C2Debug.h>
 #include <codec2/hidl/client.h>
 
-using android::C2AllocatorIon;
-
 #include "media_c2_hidl_test_common.h"
 #include "media_c2_video_hidl_test_common.h"
 
@@ -41,13 +38,11 @@
         : C2Buffer({block->share(C2Rect(block->width(), block->height()), ::C2Fence())}) {}
 };
 
-static std::vector<std::tuple<std::string, std::string, std::string, std::string, std::string>>
-        kEncodeTestParameters;
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
-        kEncodeResolutionTestParameters;
+using EncodeTestParameters = std::tuple<std::string, std::string, bool, bool, bool>;
+static std::vector<EncodeTestParameters> gEncodeTestParameters;
 
-// Resource directory
-static std::string sResourceDir = "";
+using EncodeResolutionTestParameters = std::tuple<std::string, std::string, int32_t, int32_t>;
+static std::vector<EncodeResolutionTestParameters> gEncodeResolutionTestParameters;
 
 namespace {
 
@@ -78,26 +73,13 @@
         mGraphicPool = std::make_shared<C2PooledBlockPool>(mGraphicAllocator, mBlockPoolId++);
         ASSERT_NE(mGraphicPool, nullptr);
 
-        mCompName = unknown_comp;
-        struct StringToName {
-            const char* Name;
-            standardComp CompName;
-        };
+        std::vector<std::unique_ptr<C2Param>> queried;
+        mComponent->query({}, {C2PortMediaTypeSetting::output::PARAM_TYPE}, C2_DONT_BLOCK,
+                          &queried);
+        ASSERT_GT(queried.size(), 0);
 
-        const StringToName kStringToName[] = {
-                {"h263", h263}, {"avc", avc}, {"mpeg4", mpeg4},
-                {"hevc", hevc}, {"vp8", vp8}, {"vp9", vp9},
-        };
-
-        const size_t kNumStringToName = sizeof(kStringToName) / sizeof(kStringToName[0]);
-
-        // Find the component type
-        for (size_t i = 0; i < kNumStringToName; ++i) {
-            if (strcasestr(mComponentName.c_str(), kStringToName[i].Name)) {
-                mCompName = kStringToName[i].CompName;
-                break;
-            }
-        }
+        mMime = ((C2PortMediaTypeSetting::output*)queried[0].get())->m.value;
+        std::cout << "mime : " << mMime << "\n";
         mEos = false;
         mCsd = false;
         mConfigBPictures = false;
@@ -106,7 +88,26 @@
         mTimestampUs = 0u;
         mOutputSize = 0u;
         mTimestampDevTest = false;
-        if (mCompName == unknown_comp) mDisableTest = true;
+        mWidth = ENC_DEFAULT_FRAME_WIDTH;
+        mHeight = ENC_DEFAULT_FRAME_HEIGHT;
+        mMaxWidth = 0;
+        mMaxHeight = 0;
+        mMinWidth = INT32_MAX;
+        mMinHeight = INT32_MAX;
+
+        ASSERT_EQ(getMaxMinResolutionSupported(), C2_OK);
+        mWidth = std::max(std::min(mWidth, mMaxWidth), mMinWidth);
+        mHeight = std::max(std::min(mHeight, mMaxHeight), mMinHeight);
+        ALOGV("mWidth %d mHeight %d", mWidth, mHeight);
+
+        C2SecureModeTuning secureModeTuning{};
+        mComponent->query({&secureModeTuning}, {}, C2_MAY_BLOCK, nullptr);
+        if (secureModeTuning.value == C2Config::SM_READ_PROTECTED ||
+            secureModeTuning.value == C2Config::SM_READ_PROTECTED_WITH_ENCRYPTED) {
+            mDisableTest = true;
+        }
+
+        getFile();
         if (mDisableTest) std::cout << "[   WARN   ] Test Disabled \n";
     }
 
@@ -120,8 +121,9 @@
 
     // Get the test parameters from GetParam call.
     virtual void getParams() {}
-
+    void getFile();
     bool setupConfigParam(int32_t nWidth, int32_t nHeight, int32_t nBFrame = 0);
+    c2_status_t getMaxMinResolutionSupported();
 
     // callback function to process onWorkDone received by Listener
     void handleWorkDone(std::list<std::unique_ptr<C2Work>>& workItems) {
@@ -180,16 +182,7 @@
         }
     }
 
-    enum standardComp {
-        h263,
-        avc,
-        mpeg4,
-        hevc,
-        vp8,
-        vp9,
-        unknown_comp,
-    };
-
+    std::string mMime;
     std::string mInstanceName;
     std::string mComponentName;
     bool mEos;
@@ -197,11 +190,16 @@
     bool mDisableTest;
     bool mConfigBPictures;
     bool mTimestampDevTest;
-    standardComp mCompName;
     uint32_t mFramesReceived;
     uint32_t mFailedWorkReceived;
     uint64_t mTimestampUs;
     uint64_t mOutputSize;
+    int32_t mWidth;
+    int32_t mHeight;
+    int32_t mMaxWidth;
+    int32_t mMaxHeight;
+    int32_t mMinWidth;
+    int32_t mMinHeight;
 
     std::list<uint64_t> mTimestampUslist;
     std::list<uint64_t> mFlushedIndices;
@@ -218,15 +216,16 @@
     std::shared_ptr<android::Codec2Client::Listener> mListener;
     std::shared_ptr<android::Codec2Client::Component> mComponent;
 
+    std::string mInputFile;
+
   protected:
     static void description(const std::string& description) {
         RecordProperty("description", description);
     }
 };
 
-class Codec2VideoEncHidlTest
-    : public Codec2VideoEncHidlTestBase,
-      public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2VideoEncHidlTest : public Codec2VideoEncHidlTestBase,
+                               public ::testing::WithParamInterface<TestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -234,7 +233,7 @@
 };
 
 void validateComponent(const std::shared_ptr<android::Codec2Client::Component>& component,
-                       Codec2VideoEncHidlTest::standardComp compName, bool& disableTest) {
+                       bool& disableTest) {
     // Validate its a C2 Component
     if (component->getName().find("c2") == std::string::npos) {
         ALOGE("Not a c2 component");
@@ -261,13 +260,6 @@
             return;
         }
     }
-
-    // Validates component name
-    if (compName == Codec2VideoEncHidlTest::unknown_comp) {
-        ALOGE("Component InValid");
-        disableTest = true;
-        return;
-    }
     ALOGV("Component Valid");
 }
 
@@ -295,9 +287,39 @@
     return true;
 }
 
-// LookUpTable of clips for component testing
-void GetURLForComponent(char* URL) {
-    strcat(URL, "bbb_352x288_420p_30fps_32frames.yuv");
+void Codec2VideoEncHidlTestBase::getFile() {
+    mInputFile = sResourceDir + "bbb_352x288_420p_30fps_32frames.yuv";
+}
+
+void fillByteBuffer(char* inputBuffer, char* mInputData, uint32_t nWidth, int32_t nHeight) {
+    int width, height, tileWidth, tileHeight;
+    int offset = 0, frmOffset = 0;
+    int numOfPlanes = 3;
+    for (int plane = 0; plane < numOfPlanes; plane++) {
+        if (plane == 0) {
+            width = nWidth;
+            height = nHeight;
+            tileWidth = ENC_DEFAULT_FRAME_WIDTH;
+            tileHeight = ENC_DEFAULT_FRAME_HEIGHT;
+        } else {
+            width = nWidth / 2;
+            tileWidth = ENC_DEFAULT_FRAME_WIDTH / 2;
+            height = nHeight / 2;
+            tileHeight = ENC_DEFAULT_FRAME_HEIGHT / 2;
+        }
+        for (int k = 0; k < height; k += tileHeight) {
+            int rowsToCopy = std::min(height - k, tileHeight);
+            for (int j = 0; j < rowsToCopy; j++) {
+                for (int i = 0; i < width; i += tileWidth) {
+                    int colsToCopy = std::min(width - i, tileWidth);
+                    memcpy(inputBuffer + (offset + (k + j) * width + i),
+                           mInputData + (frmOffset + j * tileWidth), colsToCopy);
+                }
+            }
+        }
+        offset += width * height;
+        frmOffset += tileWidth * tileHeight;
+    }
 }
 
 void encodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
@@ -343,12 +365,22 @@
             ULock l(queueLock);
             flushedIndices.emplace_back(frameID);
         }
-        char* data = (char*)malloc(bytesCount);
-        ASSERT_NE(data, nullptr);
-        memset(data, 0, bytesCount);
-        if (eleStream.is_open()) {
-            eleStream.read(data, bytesCount);
-            ASSERT_EQ(eleStream.gcount(), bytesCount);
+        std::vector<uint8_t> buffer(bytesCount);
+        char* data = (char*)buffer.data();
+        if (nWidth != ENC_DEFAULT_FRAME_WIDTH || nHeight != ENC_DEFAULT_FRAME_HEIGHT) {
+            int defaultBytesCount = ENC_DEFAULT_FRAME_HEIGHT * ENC_DEFAULT_FRAME_WIDTH * 3 >> 1;
+            std::vector<uint8_t> srcBuffer(defaultBytesCount);
+            char* srcData = (char*)srcBuffer.data();
+            if (eleStream.is_open()) {
+                eleStream.read(srcData, defaultBytesCount);
+                ASSERT_EQ(eleStream.gcount(), defaultBytesCount);
+            }
+            fillByteBuffer(data, srcData, nWidth, nHeight);
+        } else {
+            if (eleStream.is_open()) {
+                eleStream.read(data, bytesCount);
+                ASSERT_EQ(eleStream.gcount(), bytesCount);
+            }
         }
         std::shared_ptr<C2GraphicBlock> block;
         err = graphicPool->fetchGraphicBlock(nWidth, nHeight, HAL_PIXEL_FORMAT_YV12,
@@ -381,7 +413,6 @@
         work->input.buffers.emplace_back(new GraphicBuffer(block));
         work->worklets.clear();
         work->worklets.emplace_back(new C2Worklet);
-        free(data);
 
         std::list<std::unique_ptr<C2Work>> items;
         items.push_back(std::move(work));
@@ -398,39 +429,77 @@
 TEST_P(Codec2VideoEncHidlTest, validateCompName) {
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     ALOGV("Checks if the given component is a valid video component");
-    validateComponent(mComponent, mCompName, mDisableTest);
+    validateComponent(mComponent, mDisableTest);
     ASSERT_EQ(mDisableTest, false);
 }
 
-class Codec2VideoEncEncodeTest
-    : public Codec2VideoEncHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string, std::string>> {
+class Codec2VideoEncEncodeTest : public Codec2VideoEncHidlTestBase,
+                                 public ::testing::WithParamInterface<EncodeTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
     }
 };
 
+c2_status_t Codec2VideoEncHidlTestBase::getMaxMinResolutionSupported() {
+    std::unique_ptr<C2StreamPictureSizeInfo::input> param =
+            std::make_unique<C2StreamPictureSizeInfo::input>();
+    std::vector<C2FieldSupportedValuesQuery> validValueInfos = {
+            C2FieldSupportedValuesQuery::Current(
+                    C2ParamField(param.get(), &C2StreamPictureSizeInfo::width)),
+            C2FieldSupportedValuesQuery::Current(
+                    C2ParamField(param.get(), &C2StreamPictureSizeInfo::height))};
+    c2_status_t c2err = mComponent->querySupportedValues(validValueInfos, C2_MAY_BLOCK);
+    if (c2err != C2_OK || validValueInfos.size() != 2u) {
+        ALOGE("querySupportedValues_vb failed for pictureSize");
+        return c2err;
+    }
+
+    const auto& c2FSVWidth = validValueInfos[0].values;
+    const auto& c2FSVHeight = validValueInfos[1].values;
+    switch (c2FSVWidth.type) {
+        case C2FieldSupportedValues::type_t::RANGE: {
+            const auto& widthRange = c2FSVWidth.range;
+            const auto& heightRange = c2FSVHeight.range;
+            mMaxWidth = (uint32_t)(widthRange.max).ref<uint32_t>();
+            mMaxHeight = (uint32_t)(heightRange.max).ref<uint32_t>();
+            mMinWidth = (uint32_t)(widthRange.min).ref<uint32_t>();
+            mMinHeight = (uint32_t)(heightRange.min).ref<uint32_t>();
+            break;
+        }
+        case C2FieldSupportedValues::type_t::VALUES: {
+            int32_t curr = 0;
+            for (const C2Value::Primitive& prim : c2FSVWidth.values) {
+                curr = (uint32_t)prim.ref<uint32_t>();
+                mMaxWidth = std::max(curr, mMaxWidth);
+                mMinWidth = std::min(curr, mMinWidth);
+            }
+            for (const C2Value::Primitive& prim : c2FSVHeight.values) {
+                curr = (uint32_t)prim.ref<uint32_t>();
+                mMaxHeight = std::max(curr, mMaxHeight);
+                mMinHeight = std::min(curr, mMinHeight);
+            }
+            break;
+        }
+        default:
+            ALOGE("Non supported data");
+            return C2_BAD_VALUE;
+    }
+    return C2_OK;
+}
+
 TEST_P(Codec2VideoEncEncodeTest, EncodeTest) {
     description("Encodes input file");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    char mURL[512];
-    int32_t nWidth = ENC_DEFAULT_FRAME_WIDTH;
-    int32_t nHeight = ENC_DEFAULT_FRAME_HEIGHT;
-    bool signalEOS = !std::get<2>(GetParam()).compare("true");
+    bool signalEOS = std::get<3>(GetParam());
     // Send an empty frame to receive CSD data from encoder.
-    bool sendEmptyFirstFrame = !std::get<3>(GetParam()).compare("true");
-    mConfigBPictures = !std::get<4>(GetParam()).compare("true");
-
-    strcpy(mURL, sResourceDir.c_str());
-    GetURLForComponent(mURL);
+    bool sendEmptyFirstFrame = std::get<3>(GetParam());
+    mConfigBPictures = std::get<4>(GetParam());
 
     std::ifstream eleStream;
-    eleStream.open(mURL, std::ifstream::binary);
-    ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
-    ALOGV("mURL : %s", mURL);
+    eleStream.open(mInputFile, std::ifstream::binary);
+    ASSERT_EQ(eleStream.is_open(), true) << mInputFile << " file not found";
 
     mTimestampUs = 0;
     mTimestampDevTest = true;
@@ -446,10 +515,6 @@
         inputFrames--;
     }
 
-    if (!setupConfigParam(nWidth, nHeight, mConfigBPictures ? 1 : 0)) {
-        std::cout << "[   WARN   ] Test Skipped \n";
-        return;
-    }
     std::vector<std::unique_ptr<C2Param>> inParams;
     c2_status_t c2_status = mComponent->query({}, {C2StreamGopTuning::output::PARAM_TYPE},
                                               C2_DONT_BLOCK, &inParams);
@@ -469,6 +534,9 @@
             mConfigBPictures = false;
         }
     }
+    if (!setupConfigParam(mWidth, mHeight, mConfigBPictures ? 1 : 0)) {
+        ASSERT_TRUE(false) << "Failed while configuring height and width for " << mComponentName;
+    }
 
     ASSERT_EQ(mComponent->start(), C2_OK);
 
@@ -478,7 +546,7 @@
     }
     ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
                                           mFlushedIndices, mGraphicPool, eleStream, mDisableTest,
-                                          inputFrames, ENC_NUM_FRAMES, nWidth, nHeight, false,
+                                          inputFrames, ENC_NUM_FRAMES, mWidth, mHeight, false,
                                           signalEOS));
     // mDisableTest will be set if buffer was not fetched properly.
     // This may happen when resolution is not proper but config succeeded
@@ -510,9 +578,9 @@
         ASSERT_TRUE(false);
     }
 
-    if (mCompName == vp8 || mCompName == h263) {
+    if ((mMime.find("vp8") != std::string::npos) || (mMime.find("3gpp") != std::string::npos)) {
         ASSERT_FALSE(mCsd) << "CSD Buffer not expected";
-    } else if (mCompName != vp9) {
+    } else if (mMime.find("vp9") == std::string::npos) {
         ASSERT_TRUE(mCsd) << "CSD Buffer not received";
     }
 
@@ -568,15 +636,8 @@
     description("Test Request for flush");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    char mURL[512];
-    int32_t nWidth = ENC_DEFAULT_FRAME_WIDTH;
-    int32_t nHeight = ENC_DEFAULT_FRAME_HEIGHT;
-    strcpy(mURL, sResourceDir.c_str());
-    GetURLForComponent(mURL);
-
-    if (!setupConfigParam(nWidth, nHeight)) {
-        std::cout << "[   WARN   ] Test Skipped \n";
-        return;
+    if (!setupConfigParam(mWidth, mHeight)) {
+        ASSERT_TRUE(false) << "Failed while configuring height and width for " << mComponentName;
     }
     ASSERT_EQ(mComponent->start(), C2_OK);
 
@@ -585,9 +646,9 @@
     std::ifstream eleStream;
     uint32_t numFramesFlushed = 10;
     uint32_t numFrames = ENC_NUM_FRAMES;
-    eleStream.open(mURL, std::ifstream::binary);
+    eleStream.open(mInputFile, std::ifstream::binary);
     ASSERT_EQ(eleStream.is_open(), true);
-    ALOGV("mURL : %s", mURL);
+
     // flush
     std::list<std::unique_ptr<C2Work>> flushedWork;
     c2_status_t err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
@@ -598,7 +659,7 @@
 
     ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
                                           mFlushedIndices, mGraphicPool, eleStream, mDisableTest, 0,
-                                          numFramesFlushed, nWidth, nHeight, false, false));
+                                          numFramesFlushed, mWidth, mHeight, false, false));
     // mDisableTest will be set if buffer was not fetched properly.
     // This may happen when resolution is not proper but config succeeded
     // In this cases, we skip encoding the input stream
@@ -618,8 +679,8 @@
     ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
     ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
                                           mFlushedIndices, mGraphicPool, eleStream, mDisableTest,
-                                          numFramesFlushed, numFrames - numFramesFlushed, nWidth,
-                                          nHeight, true));
+                                          numFramesFlushed, numFrames - numFramesFlushed, mWidth,
+                                          mHeight, true));
     eleStream.close();
     // mDisableTest will be set if buffer was not fetched properly.
     // This may happen when resolution is not proper but config succeeded
@@ -690,8 +751,7 @@
 
 class Codec2VideoEncResolutionTest
     : public Codec2VideoEncHidlTestBase,
-      public ::testing::WithParamInterface<
-              std::tuple<std::string, std::string, std::string, std::string>> {
+      public ::testing::WithParamInterface<EncodeResolutionTestParameters> {
     void getParams() {
         mInstanceName = std::get<0>(GetParam());
         mComponentName = std::get<1>(GetParam());
@@ -703,8 +763,8 @@
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
     std::ifstream eleStream;
-    int32_t nWidth = std::stoi(std::get<2>(GetParam()));
-    int32_t nHeight = std::stoi(std::get<3>(GetParam()));
+    int32_t nWidth = std::get<2>(GetParam());
+    int32_t nHeight = std::get<3>(GetParam());
     ALOGD("Trying encode for width %d height %d", nWidth, nHeight);
     mEos = false;
 
@@ -735,37 +795,30 @@
     ASSERT_EQ(mComponent->reset(), C2_OK);
 }
 
-INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2VideoEncHidlTest, testing::ValuesIn(kTestParameters),
-                         android::hardware::PrintInstanceTupleNameToString<>);
+INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2VideoEncHidlTest, testing::ValuesIn(gTestParameters),
+                         PrintInstanceTupleNameToString<>);
 
 INSTANTIATE_TEST_SUITE_P(NonStdSizes, Codec2VideoEncResolutionTest,
-                         ::testing::ValuesIn(kEncodeResolutionTestParameters));
+                         ::testing::ValuesIn(gEncodeResolutionTestParameters),
+                         PrintInstanceTupleNameToString<>);
 
 // EncodeTest with EOS / No EOS
 INSTANTIATE_TEST_SUITE_P(EncodeTestwithEOS, Codec2VideoEncEncodeTest,
-                         ::testing::ValuesIn(kEncodeTestParameters));
+                         ::testing::ValuesIn(gEncodeTestParameters),
+                         PrintInstanceTupleNameToString<>);
 
 TEST_P(Codec2VideoEncHidlTest, AdaptiveBitrateTest) {
     description("Encodes input file for different bitrates");
     if (mDisableTest) GTEST_SKIP() << "Test is disabled";
 
-    char mURL[512];
-
-    strcpy(mURL, sResourceDir.c_str());
-    GetURLForComponent(mURL);
-
     std::ifstream eleStream;
-    eleStream.open(mURL, std::ifstream::binary);
-    ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
-    ALOGV("mURL : %s", mURL);
+    eleStream.open(mInputFile, std::ifstream::binary);
+    ASSERT_EQ(eleStream.is_open(), true) << mInputFile << " file not found";
 
     mFlushedIndices.clear();
 
-    int32_t nWidth = ENC_DEFAULT_FRAME_WIDTH;
-    int32_t nHeight = ENC_DEFAULT_FRAME_HEIGHT;
-    if (!setupConfigParam(nWidth, nHeight)) {
-        std::cout << "[   WARN   ] Test Skipped \n";
-        return;
+    if (!setupConfigParam(mWidth, mHeight)) {
+        ASSERT_TRUE(false) << "Failed while configuring height and width for " << mComponentName;
     }
     ASSERT_EQ(mComponent->start(), C2_OK);
 
@@ -786,8 +839,8 @@
 
         ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
                                               mFlushedIndices, mGraphicPool, eleStream,
-                                              mDisableTest, inputFrameId, ENC_NUM_FRAMES, nWidth,
-                                              nHeight, false, false));
+                                              mDisableTest, inputFrameId, ENC_NUM_FRAMES, mWidth,
+                                              mHeight, false, false));
         // mDisableTest will be set if buffer was not fetched properly.
         // This may happen when resolution is not proper but config succeeded
         // In this cases, we skip encoding the input stream
@@ -834,38 +887,26 @@
 }  // anonymous namespace
 
 int main(int argc, char** argv) {
-    kTestParameters = getTestParameters(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER);
-    for (auto params : kTestParameters) {
-        constexpr char const* kBoolString[] = { "false", "true" };
+    parseArgs(argc, argv);
+    gTestParameters = getTestParameters(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER);
+    for (auto params : gTestParameters) {
         for (size_t i = 0; i < 1 << 3; ++i) {
-            kEncodeTestParameters.push_back(std::make_tuple(
-                    std::get<0>(params), std::get<1>(params),
-                    kBoolString[i & 1],
-                    kBoolString[(i >> 1) & 1],
-                    kBoolString[(i >> 2) & 1]));
+            gEncodeTestParameters.push_back(std::make_tuple(
+                    std::get<0>(params), std::get<1>(params), i & 1, (i >> 1) & 1, (i >> 2) & 1));
         }
 
-        kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "52", "18"));
-        kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "365", "365"));
-        kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "484", "362"));
-        kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "244", "488"));
-        kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "852", "608"));
-        kEncodeResolutionTestParameters.push_back(
-                std::make_tuple(std::get<0>(params), std::get<1>(params), "1400", "442"));
-    }
-
-    // Set the resource directory based on command line args.
-    // Test will fail to set up if the argument is not set.
-    for (int i = 1; i < argc; i++) {
-        if (strcmp(argv[i], "-P") == 0 && i < argc - 1) {
-            sResourceDir = argv[i + 1];
-            break;
-        }
+        gEncodeResolutionTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 52, 18));
+        gEncodeResolutionTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 365, 365));
+        gEncodeResolutionTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 484, 362));
+        gEncodeResolutionTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 244, 488));
+        gEncodeResolutionTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 852, 608));
+        gEncodeResolutionTestParameters.push_back(
+                std::make_tuple(std::get<0>(params), std::get<1>(params), 1400, 442));
     }
 
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.1/utils/Android.bp b/media/codec2/hidl/1.1/utils/Android.bp
index 386f6e2..0eeedb6 100644
--- a/media/codec2/hidl/1.1/utils/Android.bp
+++ b/media/codec2/hidl/1.1/utils/Android.bp
@@ -1,12 +1,20 @@
 // DO NOT DEPEND ON THIS DIRECTLY
 // use libcodec2-hidl-client-defaults instead
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library {
     name: "libcodec2_hidl_client@1.1",
 
     defaults: ["hidl_defaults"],
 
     srcs: [
-        "OutputBufferQueue.cpp",
         "types.cpp",
     ],
 
@@ -44,6 +52,12 @@
         "libstagefright_bufferpool@2.0.1",
         "libui",
     ],
+
+    // Device does not boot when global ThinLTO is enabled for this library.
+    // http://b/170595429
+    lto: {
+        never: true,
+    },
 }
 
 
@@ -85,6 +99,7 @@
         "libbase",
         "libcodec2",
         "libcodec2_hidl@1.0",
+        "libcodec2_hidl_plugin_stub",
         "libcodec2_vndk",
         "libcutils",
         "libhidlbase",
@@ -99,9 +114,17 @@
         vendor: {
             exclude_shared_libs: [
                 "libstagefright_bufferqueue_helper_novndk",
+                "libcodec2_hidl_plugin_stub",
             ],
             shared_libs: [
                 "libstagefright_bufferqueue_helper",
+                "libcodec2_hidl_plugin",
+            ],
+        },
+        apex: {
+            exclude_shared_libs: [
+                "libcodec2_hidl_plugin_stub",
+                "libcodec2_hidl_plugin",
             ],
         },
     },
@@ -152,14 +175,3 @@
     ],
 }
 
-// Alias to the latest "defaults" for Codec 2.0 HAL service implementations
-cc_defaults {
-    name: "libcodec2-hidl-defaults",
-    defaults: ["libcodec2-hidl-defaults@1.1"],
-}
-
-// Alias to the latest "defaults" for Codec 2.0 HAL client
-cc_defaults {
-    name: "libcodec2-hidl-client-defaults",
-    defaults: ["libcodec2-hidl-client-defaults@1.1"],
-}
diff --git a/media/codec2/hidl/1.1/utils/Component.cpp b/media/codec2/hidl/1.1/utils/Component.cpp
index ed281e6..1d7d3d8 100644
--- a/media/codec2/hidl/1.1/utils/Component.cpp
+++ b/media/codec2/hidl/1.1/utils/Component.cpp
@@ -22,6 +22,10 @@
 #include <codec2/hidl/1.1/ComponentStore.h>
 #include <codec2/hidl/1.1/InputBufferManager.h>
 
+#ifndef __ANDROID_APEX__
+#include <FilterWrapper.h>
+#endif
+
 #include <hidl/HidlBinderSupport.h>
 #include <utils/Timers.h>
 
@@ -390,10 +394,17 @@
         uint32_t allocatorId,
         createBlockPool_cb _hidl_cb) {
     std::shared_ptr<C2BlockPool> blockPool;
+#ifdef __ANDROID_APEX__
     c2_status_t status = CreateCodec2BlockPool(
             static_cast<C2PlatformAllocatorStore::id_t>(allocatorId),
             mComponent,
             &blockPool);
+#else
+    c2_status_t status = ComponentStore::GetFilterWrapper()->createBlockPool(
+            static_cast<C2PlatformAllocatorStore::id_t>(allocatorId),
+            mComponent,
+            &blockPool);
+#endif
     if (status != C2_OK) {
         blockPool = nullptr;
     }
diff --git a/media/codec2/hidl/1.1/utils/ComponentStore.cpp b/media/codec2/hidl/1.1/utils/ComponentStore.cpp
index 225cd09..d47abdd 100644
--- a/media/codec2/hidl/1.1/utils/ComponentStore.cpp
+++ b/media/codec2/hidl/1.1/utils/ComponentStore.cpp
@@ -35,6 +35,14 @@
 #include <ostream>
 #include <sstream>
 
+#ifndef __ANDROID_APEX__
+#include <codec2/hidl/plugin/FilterPlugin.h>
+#include <dlfcn.h>
+#include <C2Config.h>
+#include <DefaultFilterPlugin.h>
+#include <FilterWrapper.h>
+#endif
+
 namespace android {
 namespace hardware {
 namespace media {
@@ -176,6 +184,16 @@
     return mParameterCache;
 }
 
+#ifndef __ANDROID_APEX__
+// static
+std::shared_ptr<FilterWrapper> ComponentStore::GetFilterWrapper() {
+    constexpr const char kPluginPath[] = "libc2filterplugin.so";
+    static std::shared_ptr<FilterWrapper> wrapper = FilterWrapper::Create(
+            std::make_unique<DefaultFilterPlugin>(kPluginPath));
+    return wrapper;
+}
+#endif
+
 // Methods from ::android::hardware::media::c2::V1_0::IComponentStore
 Return<void> ComponentStore::createComponent(
         const hidl_string& name,
@@ -189,6 +207,9 @@
             mStore->createComponent(name, &c2component));
 
     if (status == Status::OK) {
+#ifndef __ANDROID_APEX__
+        c2component = GetFilterWrapper()->maybeWrapComponent(c2component);
+#endif
         onInterfaceLoaded(c2component->intf());
         component = new Component(c2component, listener, this, pool);
         if (!component) {
@@ -216,6 +237,9 @@
     c2_status_t res = mStore->createInterface(name, &c2interface);
     sp<IComponentInterface> interface;
     if (res == C2_OK) {
+#ifndef __ANDROID_APEX__
+        c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
+#endif
         onInterfaceLoaded(c2interface);
         interface = new ComponentInterface(c2interface, mParameterCache);
     }
@@ -342,6 +366,9 @@
             mStore->createComponent(name, &c2component));
 
     if (status == Status::OK) {
+#ifndef __ANDROID_APEX__
+        c2component = GetFilterWrapper()->maybeWrapComponent(c2component);
+#endif
         onInterfaceLoaded(c2component->intf());
         component = new Component(c2component, listener, this, pool);
         if (!component) {
diff --git a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp b/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
deleted file mode 100644
index 65756e8..0000000
--- a/media/codec2/hidl/1.1/utils/OutputBufferQueue.cpp
+++ /dev/null
@@ -1,17 +0,0 @@
-/*
- * Copyright 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h b/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
index 1f04391..f6daee7 100644
--- a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
+++ b/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
@@ -38,6 +38,8 @@
 #include <vector>
 
 namespace android {
+class FilterWrapper;
+
 namespace hardware {
 namespace media {
 namespace c2 {
@@ -75,6 +77,8 @@
      */
     std::shared_ptr<ParameterCache> getParameterCache() const;
 
+    static std::shared_ptr<FilterWrapper> GetFilterWrapper();
+
     // Methods from ::android::hardware::media::c2::V1_0::IComponentStore.
     virtual Return<void> createComponent(
             const hidl_string& name,
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h b/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h
deleted file mode 100644
index f77852d..0000000
--- a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/OutputBufferQueue.h
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
-#define CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
-
-#include <codec2/hidl/1.0/OutputBufferQueue.h>
-#include <codec2/hidl/1.1/types.h>
-
-namespace android {
-namespace hardware {
-namespace media {
-namespace c2 {
-namespace V1_1 {
-namespace utils {
-
-using ::android::hardware::media::c2::V1_0::utils::OutputBufferQueue;
-
-} // namespace utils
-} // namespace V1_1
-} // namespace c2
-} // namespace media
-} // namespace hardware
-} // namespace android
-
-#endif // CODEC2_HIDL_V1_1_UTILS_OUTPUT_BUFFER_QUEUE
diff --git a/media/codec2/hidl/1.2/utils/Android.bp b/media/codec2/hidl/1.2/utils/Android.bp
new file mode 100644
index 0000000..e4e4ad5
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/Android.bp
@@ -0,0 +1,206 @@
+// DO NOT DEPEND ON THIS DIRECTLY
+// use libcodec2-hidl-client-defaults instead
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
+    name: "libcodec2_hidl_client@1.2",
+
+    defaults: ["hidl_defaults"],
+
+    srcs: [
+        "types.cpp",
+    ],
+
+    header_libs: [
+        "libcodec2_internal", // private
+    ],
+
+    shared_libs: [
+        "android.hardware.media.bufferpool@2.0",
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
+        "libbase",
+        "libcodec2",
+        "libcodec2_hidl_client@1.0",
+        "libcodec2_hidl_client@1.1",
+        "libcodec2_vndk",
+        "libcutils",
+        "libgui",
+        "libhidlbase",
+        "liblog",
+        "libstagefright_bufferpool@2.0.1",
+        "libui",
+        "libutils",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    export_shared_lib_headers: [
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
+        "libcodec2",
+        "libcodec2_hidl_client@1.0",
+        "libcodec2_hidl_client@1.1",
+        "libgui",
+        "libstagefright_bufferpool@2.0.1",
+        "libui",
+    ],
+
+    // Device does not boot when global ThinLTO is enabled for this library.
+    // http://b/170595429
+    lto: {
+        never: true,
+    },
+}
+
+
+// DO NOT DEPEND ON THIS DIRECTLY
+// use libcodec2-hidl-defaults instead
+cc_library {
+    name: "libcodec2_hidl@1.2",
+    vendor_available: true,
+    min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+        "test_com.android.media.swcodec",
+    ],
+
+    defaults: ["hidl_defaults"],
+
+    srcs: [
+        "Component.cpp",
+        "ComponentInterface.cpp",
+        "ComponentStore.cpp",
+        "Configurable.cpp",
+        "InputBufferManager.cpp",
+        "InputSurface.cpp",
+        "InputSurfaceConnection.cpp",
+        "types.cpp",
+    ],
+
+    header_libs: [
+        "libbinder_headers",
+        "libsystem_headers",
+        "libcodec2_internal", // private
+    ],
+
+    shared_libs: [
+        "android.hardware.graphics.bufferqueue@1.0",
+        "android.hardware.graphics.bufferqueue@2.0",
+        "android.hardware.graphics.common@1.0",
+        "android.hardware.media@1.0",
+        "android.hardware.media.bufferpool@2.0",
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
+        "android.hardware.media.omx@1.0",
+        "libbase",
+        "libcodec2",
+        "libcodec2_hidl@1.0",
+        "libcodec2_hidl@1.1",
+        "libcodec2_hidl_plugin_stub",
+        "libcodec2_vndk",
+        "libcutils",
+        "libhidlbase",
+        "liblog",
+        "libstagefright_bufferpool@2.0.1",
+        "libstagefright_bufferqueue_helper_novndk",
+        "libui",
+        "libutils",
+    ],
+
+    target: {
+        vendor: {
+            exclude_shared_libs: [
+                "libstagefright_bufferqueue_helper_novndk",
+                "libcodec2_hidl_plugin_stub",
+            ],
+            shared_libs: [
+                "libstagefright_bufferqueue_helper",
+                "libcodec2_hidl_plugin",
+            ],
+        },
+        apex: {
+            exclude_shared_libs: [
+                "libcodec2_hidl_plugin_stub",
+                "libcodec2_hidl_plugin",
+            ],
+        },
+    },
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    export_shared_lib_headers: [
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
+        "libcodec2",
+        "libcodec2_hidl@1.0",
+        "libcodec2_hidl@1.1",
+        "libcodec2_vndk",
+        "libhidlbase",
+        "libstagefright_bufferpool@2.0.1",
+        "libui",
+    ],
+}
+
+// public dependency for Codec 2.0 HAL service implementations
+cc_defaults {
+    name: "libcodec2-hidl-defaults@1.2",
+    defaults: ["libcodec2-impl-defaults"],
+
+    shared_libs: [
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
+        "libcodec2_hidl@1.0",
+        "libcodec2_hidl@1.1",
+        "libcodec2_hidl@1.2",
+        "libcodec2_vndk",
+        "libhidlbase",
+    ],
+}
+
+// public dependency for Codec 2.0 HAL client
+cc_defaults {
+    name: "libcodec2-hidl-client-defaults@1.2",
+    defaults: ["libcodec2-impl-defaults"],
+
+    shared_libs: [
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
+        "libcodec2_hidl_client@1.0",
+        "libcodec2_hidl_client@1.1",
+        "libcodec2_hidl_client@1.2",
+        "libcodec2_vndk",
+        "libhidlbase",
+    ],
+}
+
+// Alias to the latest "defaults" for Codec 2.0 HAL service implementations
+cc_defaults {
+    name: "libcodec2-hidl-defaults",
+    defaults: ["libcodec2-hidl-defaults@1.2"],
+}
+
+// Alias to the latest "defaults" for Codec 2.0 HAL client
+cc_defaults {
+    name: "libcodec2-hidl-client-defaults",
+    defaults: ["libcodec2-hidl-client-defaults@1.2"],
+}
+
diff --git a/media/codec2/hidl/1.2/utils/Component.cpp b/media/codec2/hidl/1.2/utils/Component.cpp
new file mode 100644
index 0000000..8924e6d
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/Component.cpp
@@ -0,0 +1,535 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-Component@1.2"
+#include <android-base/logging.h>
+
+#include <codec2/hidl/1.2/Component.h>
+#include <codec2/hidl/1.2/ComponentStore.h>
+#include <codec2/hidl/1.2/InputBufferManager.h>
+
+#ifndef __ANDROID_APEX__
+#include <FilterWrapper.h>
+#endif
+
+#include <hidl/HidlBinderSupport.h>
+#include <utils/Timers.h>
+
+#include <C2BqBufferPriv.h>
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+
+#include <chrono>
+#include <thread>
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+namespace utils {
+
+using namespace ::android;
+
+// ComponentListener wrapper
+struct Component::Listener : public C2Component::Listener {
+
+    Listener(const sp<Component>& component) :
+        mComponent(component),
+        mListener(component->mListener) {
+    }
+
+    virtual void onError_nb(
+            std::weak_ptr<C2Component> /* c2component */,
+            uint32_t errorCode) override {
+        sp<IComponentListener> listener = mListener.promote();
+        if (listener) {
+            Return<void> transStatus = listener->onError(Status::OK, errorCode);
+            if (!transStatus.isOk()) {
+                LOG(ERROR) << "Component::Listener::onError_nb -- "
+                           << "transaction failed.";
+            }
+        }
+    }
+
+    virtual void onTripped_nb(
+            std::weak_ptr<C2Component> /* c2component */,
+            std::vector<std::shared_ptr<C2SettingResult>> c2settingResult
+            ) override {
+        sp<IComponentListener> listener = mListener.promote();
+        if (listener) {
+            hidl_vec<SettingResult> settingResults(c2settingResult.size());
+            size_t ix = 0;
+            for (const std::shared_ptr<C2SettingResult> &c2result :
+                    c2settingResult) {
+                if (c2result) {
+                    if (!objcpy(&settingResults[ix++], *c2result)) {
+                        break;
+                    }
+                }
+            }
+            settingResults.resize(ix);
+            Return<void> transStatus = listener->onTripped(settingResults);
+            if (!transStatus.isOk()) {
+                LOG(ERROR) << "Component::Listener::onTripped_nb -- "
+                           << "transaction failed.";
+            }
+        }
+    }
+
+    virtual void onWorkDone_nb(
+            std::weak_ptr<C2Component> /* c2component */,
+            std::list<std::unique_ptr<C2Work>> c2workItems) override {
+        for (const std::unique_ptr<C2Work>& work : c2workItems) {
+            if (work) {
+                if (work->worklets.empty()
+                        || !work->worklets.back()
+                        || (work->worklets.back()->output.flags &
+                            C2FrameData::FLAG_INCOMPLETE) == 0) {
+                    InputBufferManager::
+                            unregisterFrameData(mListener, work->input);
+                }
+            }
+        }
+
+        sp<IComponentListener> listener = mListener.promote();
+        if (listener) {
+            WorkBundle workBundle;
+
+            sp<Component> strongComponent = mComponent.promote();
+            beginTransferBufferQueueBlocks(c2workItems, true);
+            if (!objcpy(&workBundle, c2workItems, strongComponent ?
+                    &strongComponent->mBufferPoolSender : nullptr)) {
+                LOG(ERROR) << "Component::Listener::onWorkDone_nb -- "
+                           << "received corrupted work items.";
+                endTransferBufferQueueBlocks(c2workItems, false, true);
+                return;
+            }
+            Return<void> transStatus = listener->onWorkDone(workBundle);
+            if (!transStatus.isOk()) {
+                LOG(ERROR) << "Component::Listener::onWorkDone_nb -- "
+                           << "transaction failed.";
+                endTransferBufferQueueBlocks(c2workItems, false, true);
+                return;
+            }
+            endTransferBufferQueueBlocks(c2workItems, true, true);
+        }
+    }
+
+protected:
+    wp<Component> mComponent;
+    wp<IComponentListener> mListener;
+};
+
+// Component::Sink
+struct Component::Sink : public IInputSink {
+    std::shared_ptr<Component> mComponent;
+    sp<IConfigurable> mConfigurable;
+
+    virtual Return<Status> queue(const WorkBundle& workBundle) override {
+        return mComponent->queue(workBundle);
+    }
+
+    virtual Return<sp<IConfigurable>> getConfigurable() override {
+        return mConfigurable;
+    }
+
+    Sink(const std::shared_ptr<Component>& component);
+    virtual ~Sink() override;
+
+    // Process-wide map: Component::Sink -> C2Component.
+    static std::mutex sSink2ComponentMutex;
+    static std::map<IInputSink*, std::weak_ptr<C2Component>> sSink2Component;
+
+    static std::shared_ptr<C2Component> findLocalComponent(
+            const sp<IInputSink>& sink);
+};
+
+std::mutex
+        Component::Sink::sSink2ComponentMutex{};
+std::map<IInputSink*, std::weak_ptr<C2Component>>
+        Component::Sink::sSink2Component{};
+
+Component::Sink::Sink(const std::shared_ptr<Component>& component)
+        : mComponent{component},
+          mConfigurable{[&component]() -> sp<IConfigurable> {
+              Return<sp<IComponentInterface>> ret1 = component->getInterface();
+              if (!ret1.isOk()) {
+                  LOG(ERROR) << "Sink::Sink -- component's transaction failed.";
+                  return nullptr;
+              }
+              Return<sp<IConfigurable>> ret2 =
+                      static_cast<sp<IComponentInterface>>(ret1)->
+                      getConfigurable();
+              if (!ret2.isOk()) {
+                  LOG(ERROR) << "Sink::Sink -- interface's transaction failed.";
+                  return nullptr;
+              }
+              return static_cast<sp<IConfigurable>>(ret2);
+          }()} {
+    std::lock_guard<std::mutex> lock(sSink2ComponentMutex);
+    sSink2Component.emplace(this, component->mComponent);
+}
+
+Component::Sink::~Sink() {
+    std::lock_guard<std::mutex> lock(sSink2ComponentMutex);
+    sSink2Component.erase(this);
+}
+
+std::shared_ptr<C2Component> Component::Sink::findLocalComponent(
+        const sp<IInputSink>& sink) {
+    std::lock_guard<std::mutex> lock(sSink2ComponentMutex);
+    auto i = sSink2Component.find(sink.get());
+    if (i == sSink2Component.end()) {
+        return nullptr;
+    }
+    return i->second.lock();
+}
+
+// Component
+Component::Component(
+        const std::shared_ptr<C2Component>& component,
+        const sp<IComponentListener>& listener,
+        const sp<ComponentStore>& store,
+        const sp<::android::hardware::media::bufferpool::V2_0::
+        IClientManager>& clientPoolManager)
+      : mComponent{component},
+        mInterface{new ComponentInterface(component->intf(),
+                                          store->getParameterCache())},
+        mListener{listener},
+        mStore{store},
+        mBufferPoolSender{clientPoolManager} {
+    // Retrieve supported parameters from store
+    // TODO: We could cache this per component/interface type
+    mInit = mInterface->status();
+}
+
+c2_status_t Component::status() const {
+    return mInit;
+}
+
+// Methods from ::android::hardware::media::c2::V1_1::IComponent
+Return<Status> Component::queue(const WorkBundle& workBundle) {
+    std::list<std::unique_ptr<C2Work>> c2works;
+
+    if (!objcpy(&c2works, workBundle)) {
+        return Status::CORRUPTED;
+    }
+
+    // Register input buffers.
+    for (const std::unique_ptr<C2Work>& work : c2works) {
+        if (work) {
+            InputBufferManager::
+                    registerFrameData(mListener, work->input);
+        }
+    }
+
+    return static_cast<Status>(mComponent->queue_nb(&c2works));
+}
+
+Return<void> Component::flush(flush_cb _hidl_cb) {
+    std::list<std::unique_ptr<C2Work>> c2flushedWorks;
+    c2_status_t c2res = mComponent->flush_sm(
+            C2Component::FLUSH_COMPONENT,
+            &c2flushedWorks);
+
+    // Unregister input buffers.
+    for (const std::unique_ptr<C2Work>& work : c2flushedWorks) {
+        if (work) {
+            if (work->worklets.empty()
+                    || !work->worklets.back()
+                    || (work->worklets.back()->output.flags &
+                        C2FrameData::FLAG_INCOMPLETE) == 0) {
+                InputBufferManager::
+                        unregisterFrameData(mListener, work->input);
+            }
+        }
+    }
+
+    WorkBundle flushedWorkBundle;
+    Status res = static_cast<Status>(c2res);
+    beginTransferBufferQueueBlocks(c2flushedWorks, true);
+    if (c2res == C2_OK) {
+        if (!objcpy(&flushedWorkBundle, c2flushedWorks, &mBufferPoolSender)) {
+            res = Status::CORRUPTED;
+        }
+    }
+    _hidl_cb(res, flushedWorkBundle);
+    endTransferBufferQueueBlocks(c2flushedWorks, true, true);
+    return Void();
+}
+
+Return<Status> Component::drain(bool withEos) {
+    return static_cast<Status>(mComponent->drain_nb(withEos ?
+            C2Component::DRAIN_COMPONENT_WITH_EOS :
+            C2Component::DRAIN_COMPONENT_NO_EOS));
+}
+
+Return<Status> Component::setOutputSurface(
+        uint64_t blockPoolId,
+        const sp<HGraphicBufferProducer2>& surface) {
+    std::shared_ptr<C2BlockPool> pool;
+    GetCodec2BlockPool(blockPoolId, mComponent, &pool);
+    if (pool && pool->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+        std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                std::static_pointer_cast<C2BufferQueueBlockPool>(pool);
+        C2BufferQueueBlockPool::OnRenderCallback cb =
+            [this](uint64_t producer, int32_t slot, int64_t nsecs) {
+                // TODO: batch this
+                hidl_vec<IComponentListener::RenderedFrame> rendered;
+                rendered.resize(1);
+                rendered[0] = { producer, slot, nsecs };
+                (void)mListener->onFramesRendered(rendered).isOk();
+        };
+        if (bqPool) {
+            bqPool->setRenderCallback(cb);
+            bqPool->configureProducer(surface);
+        }
+    }
+    return Status::OK;
+}
+
+Return<void> Component::connectToInputSurface(
+        const sp<IInputSurface>& inputSurface,
+        connectToInputSurface_cb _hidl_cb) {
+    Status status;
+    sp<IInputSurfaceConnection> connection;
+    auto transStatus = inputSurface->connect(
+            asInputSink(),
+            [&status, &connection](
+                    Status s, const sp<IInputSurfaceConnection>& c) {
+                status = s;
+                connection = c;
+            }
+        );
+    _hidl_cb(status, connection);
+    return Void();
+}
+
+Return<void> Component::connectToOmxInputSurface(
+        const sp<HGraphicBufferProducer1>& producer,
+        const sp<::android::hardware::media::omx::V1_0::
+        IGraphicBufferSource>& source,
+        connectToOmxInputSurface_cb _hidl_cb) {
+    (void)producer;
+    (void)source;
+    (void)_hidl_cb;
+    return Void();
+}
+
+Return<Status> Component::disconnectFromInputSurface() {
+    // TODO implement
+    return Status::OK;
+}
+
+namespace /* unnamed */ {
+
+struct BlockPoolIntf : public ConfigurableC2Intf {
+    BlockPoolIntf(const std::shared_ptr<C2BlockPool>& pool)
+          : ConfigurableC2Intf{
+                "C2BlockPool:" +
+                    (pool ? std::to_string(pool->getLocalId()) : "null"),
+                0},
+            mPool{pool} {
+    }
+
+    virtual c2_status_t config(
+            const std::vector<C2Param*>& params,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures
+            ) override {
+        (void)params;
+        (void)mayBlock;
+        (void)failures;
+        return C2_OK;
+    }
+
+    virtual c2_status_t query(
+            const std::vector<C2Param::Index>& indices,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2Param>>* const params
+            ) const override {
+        (void)indices;
+        (void)mayBlock;
+        (void)params;
+        return C2_OK;
+    }
+
+    virtual c2_status_t querySupportedParams(
+            std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
+            ) const override {
+        (void)params;
+        return C2_OK;
+    }
+
+    virtual c2_status_t querySupportedValues(
+            std::vector<C2FieldSupportedValuesQuery>& fields,
+            c2_blocking_t mayBlock) const override {
+        (void)fields;
+        (void)mayBlock;
+        return C2_OK;
+    }
+
+protected:
+    std::shared_ptr<C2BlockPool> mPool;
+};
+
+} // unnamed namespace
+
+Return<void> Component::createBlockPool(
+        uint32_t allocatorId,
+        createBlockPool_cb _hidl_cb) {
+    std::shared_ptr<C2BlockPool> blockPool;
+#ifdef __ANDROID_APEX__
+    c2_status_t status = CreateCodec2BlockPool(
+            static_cast<C2PlatformAllocatorStore::id_t>(allocatorId),
+            mComponent,
+            &blockPool);
+#else
+    c2_status_t status = ComponentStore::GetFilterWrapper()->createBlockPool(
+            static_cast<C2PlatformAllocatorStore::id_t>(allocatorId),
+            mComponent,
+            &blockPool);
+#endif
+    if (status != C2_OK) {
+        blockPool = nullptr;
+    }
+    if (blockPool) {
+        mBlockPoolsMutex.lock();
+        mBlockPools.emplace(blockPool->getLocalId(), blockPool);
+        mBlockPoolsMutex.unlock();
+    } else if (status == C2_OK) {
+        status = C2_CORRUPTED;
+    }
+
+    _hidl_cb(static_cast<Status>(status),
+            blockPool ? blockPool->getLocalId() : 0,
+            new CachedConfigurable(
+            std::make_unique<BlockPoolIntf>(blockPool)));
+    return Void();
+}
+
+Return<Status> Component::destroyBlockPool(uint64_t blockPoolId) {
+    std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+    return mBlockPools.erase(blockPoolId) == 1 ?
+            Status::OK : Status::CORRUPTED;
+}
+
+Return<Status> Component::start() {
+    return static_cast<Status>(mComponent->start());
+}
+
+Return<Status> Component::stop() {
+    InputBufferManager::unregisterFrameData(mListener);
+    return static_cast<Status>(mComponent->stop());
+}
+
+Return<Status> Component::reset() {
+    Status status = static_cast<Status>(mComponent->reset());
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        mBlockPools.clear();
+    }
+    InputBufferManager::unregisterFrameData(mListener);
+    return status;
+}
+
+Return<Status> Component::release() {
+    Status status = static_cast<Status>(mComponent->release());
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        mBlockPools.clear();
+    }
+    InputBufferManager::unregisterFrameData(mListener);
+    return status;
+}
+
+Return<sp<IComponentInterface>> Component::getInterface() {
+    return sp<IComponentInterface>(mInterface);
+}
+
+Return<sp<IInputSink>> Component::asInputSink() {
+    std::lock_guard<std::mutex> lock(mSinkMutex);
+    if (!mSink) {
+        mSink = new Sink(shared_from_this());
+    }
+    return {mSink};
+}
+
+Return<void> Component::configureVideoTunnel(
+        uint32_t avSyncHwId, configureVideoTunnel_cb _hidl_cb) {
+    (void)avSyncHwId;
+    _hidl_cb(Status::OMITTED, hidl_handle{});
+    return Void();
+}
+
+Return<Status> Component::setOutputSurfaceWithSyncObj(
+        uint64_t blockPoolId, const sp<HGraphicBufferProducer2>& surface,
+        const SurfaceSyncObj& syncObject) {
+    std::shared_ptr<C2BlockPool> pool;
+    GetCodec2BlockPool(blockPoolId, mComponent, &pool);
+    if (pool && pool->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+        std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                std::static_pointer_cast<C2BufferQueueBlockPool>(pool);
+        C2BufferQueueBlockPool::OnRenderCallback cb =
+            [this](uint64_t producer, int32_t slot, int64_t nsecs) {
+                // TODO: batch this
+                hidl_vec<IComponentListener::RenderedFrame> rendered;
+                rendered.resize(1);
+                rendered[0] = { producer, slot, nsecs };
+                (void)mListener->onFramesRendered(rendered).isOk();
+        };
+        if (bqPool) {
+            const native_handle_t *h = syncObject.syncMemory;
+            native_handle_t *syncMemory = h ? native_handle_clone(h) : nullptr;
+            uint64_t bqId = syncObject.bqId;
+            uint32_t generationId = syncObject.generationId;
+            uint64_t consumerUsage = syncObject.consumerUsage;
+
+            bqPool->setRenderCallback(cb);
+            bqPool->configureProducer(surface, syncMemory, bqId,
+                                      generationId, consumerUsage);
+        }
+    }
+    return Status::OK;
+}
+
+std::shared_ptr<C2Component> Component::findLocalComponent(
+        const sp<IInputSink>& sink) {
+    return Component::Sink::findLocalComponent(sink);
+}
+
+void Component::initListener(const sp<Component>& self) {
+    std::shared_ptr<C2Component::Listener> c2listener =
+            std::make_shared<Listener>(self);
+    c2_status_t res = mComponent->setListener_vb(c2listener, C2_DONT_BLOCK);
+    if (res != C2_OK) {
+        mInit = res;
+    }
+}
+
+Component::~Component() {
+    InputBufferManager::unregisterFrameData(mListener);
+    mStore->reportComponentDeath(this);
+}
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
diff --git a/media/codec2/hidl/1.2/utils/ComponentInterface.cpp b/media/codec2/hidl/1.2/utils/ComponentInterface.cpp
new file mode 100644
index 0000000..30fe4d6
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/ComponentInterface.cpp
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <codec2/hidl/1.2/ComponentInterface.h>
diff --git a/media/codec2/hidl/1.2/utils/ComponentStore.cpp b/media/codec2/hidl/1.2/utils/ComponentStore.cpp
new file mode 100644
index 0000000..9fac5d5
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/ComponentStore.cpp
@@ -0,0 +1,562 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-ComponentStore@1.2"
+#include <android-base/logging.h>
+
+#include <codec2/hidl/1.2/ComponentStore.h>
+#include <codec2/hidl/1.2/InputSurface.h>
+#include <codec2/hidl/1.2/types.h>
+
+#include <android-base/file.h>
+#include <media/stagefright/bqhelper/GraphicBufferSource.h>
+#include <utils/Errors.h>
+
+#include <C2PlatformSupport.h>
+#include <util/C2InterfaceHelper.h>
+
+#include <chrono>
+#include <ctime>
+#include <iomanip>
+#include <ostream>
+#include <sstream>
+
+#ifndef __ANDROID_APEX__
+#include <codec2/hidl/plugin/FilterPlugin.h>
+#include <dlfcn.h>
+#include <C2Config.h>
+#include <DefaultFilterPlugin.h>
+#include <FilterWrapper.h>
+#endif
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+namespace utils {
+
+using namespace ::android;
+using ::android::GraphicBufferSource;
+using namespace ::android::hardware::media::bufferpool::V2_0::implementation;
+
+namespace /* unnamed */ {
+
+struct StoreIntf : public ConfigurableC2Intf {
+    StoreIntf(const std::shared_ptr<C2ComponentStore>& store)
+          : ConfigurableC2Intf{store ? store->getName() : "", 0},
+            mStore{store} {
+    }
+
+    virtual c2_status_t config(
+            const std::vector<C2Param*> &params,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2SettingResult>> *const failures
+            ) override {
+        // Assume all params are blocking
+        // TODO: Filter for supported params
+        if (mayBlock == C2_DONT_BLOCK && params.size() != 0) {
+            return C2_BLOCKING;
+        }
+        return mStore->config_sm(params, failures);
+    }
+
+    virtual c2_status_t query(
+            const std::vector<C2Param::Index> &indices,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2Param>> *const params) const override {
+        // Assume all params are blocking
+        // TODO: Filter for supported params
+        if (mayBlock == C2_DONT_BLOCK && indices.size() != 0) {
+            return C2_BLOCKING;
+        }
+        return mStore->query_sm({}, indices, params);
+    }
+
+    virtual c2_status_t querySupportedParams(
+            std::vector<std::shared_ptr<C2ParamDescriptor>> *const params
+            ) const override {
+        return mStore->querySupportedParams_nb(params);
+    }
+
+    virtual c2_status_t querySupportedValues(
+            std::vector<C2FieldSupportedValuesQuery> &fields,
+            c2_blocking_t mayBlock) const override {
+        // Assume all params are blocking
+        // TODO: Filter for supported params
+        if (mayBlock == C2_DONT_BLOCK && fields.size() != 0) {
+            return C2_BLOCKING;
+        }
+        return mStore->querySupportedValues_sm(fields);
+    }
+
+protected:
+    std::shared_ptr<C2ComponentStore> mStore;
+};
+
+} // unnamed namespace
+
+struct ComponentStore::StoreParameterCache : public ParameterCache {
+    std::mutex mStoreMutex;
+    ComponentStore* mStore;
+
+    StoreParameterCache(ComponentStore* store): mStore{store} {
+    }
+
+    virtual c2_status_t validate(
+            const std::vector<std::shared_ptr<C2ParamDescriptor>>& params
+            ) override {
+        std::scoped_lock _lock(mStoreMutex);
+        return mStore ? mStore->validateSupportedParams(params) : C2_NO_INIT;
+    }
+
+    void onStoreDestroyed() {
+        std::scoped_lock _lock(mStoreMutex);
+        mStore = nullptr;
+    }
+};
+
+ComponentStore::ComponentStore(const std::shared_ptr<C2ComponentStore>& store)
+      : mConfigurable{new CachedConfigurable(std::make_unique<StoreIntf>(store))},
+        mParameterCache{std::make_shared<StoreParameterCache>(this)},
+        mStore{store} {
+
+    std::shared_ptr<C2ComponentStore> platformStore = android::GetCodec2PlatformComponentStore();
+    SetPreferredCodec2ComponentStore(store);
+
+    // Retrieve struct descriptors
+    mParamReflector = mStore->getParamReflector();
+
+    // Retrieve supported parameters from store
+    using namespace std::placeholders;
+    mInit = mConfigurable->init(mParameterCache);
+}
+
+ComponentStore::~ComponentStore() {
+    mParameterCache->onStoreDestroyed();
+}
+
+c2_status_t ComponentStore::status() const {
+    return mInit;
+}
+
+c2_status_t ComponentStore::validateSupportedParams(
+        const std::vector<std::shared_ptr<C2ParamDescriptor>>& params) {
+    c2_status_t res = C2_OK;
+
+    for (const std::shared_ptr<C2ParamDescriptor> &desc : params) {
+        if (!desc) {
+            // All descriptors should be valid
+            res = res ? res : C2_BAD_VALUE;
+            continue;
+        }
+        C2Param::CoreIndex coreIndex = desc->index().coreIndex();
+        std::lock_guard<std::mutex> lock(mStructDescriptorsMutex);
+        auto it = mStructDescriptors.find(coreIndex);
+        if (it == mStructDescriptors.end()) {
+            std::shared_ptr<C2StructDescriptor> structDesc =
+                    mParamReflector->describe(coreIndex);
+            if (!structDesc) {
+                // All supported params must be described
+                res = C2_BAD_INDEX;
+            }
+            mStructDescriptors.insert({ coreIndex, structDesc });
+        }
+    }
+    return res;
+}
+
+std::shared_ptr<ParameterCache> ComponentStore::getParameterCache() const {
+    return mParameterCache;
+}
+
+#ifndef __ANDROID_APEX__
+// static
+std::shared_ptr<FilterWrapper> ComponentStore::GetFilterWrapper() {
+    constexpr const char kPluginPath[] = "libc2filterplugin.so";
+    static std::shared_ptr<FilterWrapper> wrapper = FilterWrapper::Create(
+            std::make_unique<DefaultFilterPlugin>(kPluginPath));
+    return wrapper;
+}
+#endif
+
+// Methods from ::android::hardware::media::c2::V1_0::IComponentStore
+Return<void> ComponentStore::createComponent(
+        const hidl_string& name,
+        const sp<IComponentListener>& listener,
+        const sp<IClientManager>& pool,
+        createComponent_cb _hidl_cb) {
+
+    sp<Component> component;
+    std::shared_ptr<C2Component> c2component;
+    Status status = static_cast<Status>(
+            mStore->createComponent(name, &c2component));
+
+    if (status == Status::OK) {
+#ifndef __ANDROID_APEX__
+        c2component = GetFilterWrapper()->maybeWrapComponent(c2component);
+#endif
+        onInterfaceLoaded(c2component->intf());
+        component = new Component(c2component, listener, this, pool);
+        if (!component) {
+            status = Status::CORRUPTED;
+        } else {
+            reportComponentBirth(component.get());
+            if (component->status() != C2_OK) {
+                status = static_cast<Status>(component->status());
+            } else {
+                component->initListener(component);
+                if (component->status() != C2_OK) {
+                    status = static_cast<Status>(component->status());
+                }
+            }
+        }
+    }
+    _hidl_cb(status, component);
+    return Void();
+}
+
+Return<void> ComponentStore::createInterface(
+        const hidl_string& name,
+        createInterface_cb _hidl_cb) {
+    std::shared_ptr<C2ComponentInterface> c2interface;
+    c2_status_t res = mStore->createInterface(name, &c2interface);
+    sp<IComponentInterface> interface;
+    if (res == C2_OK) {
+#ifndef __ANDROID_APEX__
+        c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
+#endif
+        onInterfaceLoaded(c2interface);
+        interface = new ComponentInterface(c2interface, mParameterCache);
+    }
+    _hidl_cb(static_cast<Status>(res), interface);
+    return Void();
+}
+
+Return<void> ComponentStore::listComponents(listComponents_cb _hidl_cb) {
+    std::vector<std::shared_ptr<const C2Component::Traits>> c2traits =
+            mStore->listComponents();
+    hidl_vec<IComponentStore::ComponentTraits> traits(c2traits.size());
+    size_t ix = 0;
+    for (const std::shared_ptr<const C2Component::Traits> &c2trait : c2traits) {
+        if (c2trait) {
+            if (objcpy(&traits[ix], *c2trait)) {
+                ++ix;
+            } else {
+                break;
+            }
+        }
+    }
+    traits.resize(ix);
+    _hidl_cb(Status::OK, traits);
+    return Void();
+}
+
+Return<void> ComponentStore::createInputSurface(createInputSurface_cb _hidl_cb) {
+    sp<GraphicBufferSource> source = new GraphicBufferSource();
+    if (source->initCheck() != OK) {
+        _hidl_cb(Status::CORRUPTED, nullptr);
+        return Void();
+    }
+    using namespace std::placeholders;
+    sp<InputSurface> inputSurface = new InputSurface(
+            mParameterCache,
+            std::make_shared<C2ReflectorHelper>(),
+            source->getHGraphicBufferProducer(),
+            source);
+    _hidl_cb(inputSurface ? Status::OK : Status::NO_MEMORY,
+             inputSurface);
+    return Void();
+}
+
+void ComponentStore::onInterfaceLoaded(const std::shared_ptr<C2ComponentInterface> &intf) {
+    // invalidate unsupported struct descriptors if a new interface is loaded as it may have
+    // exposed new descriptors
+    std::lock_guard<std::mutex> lock(mStructDescriptorsMutex);
+    if (!mLoadedInterfaces.count(intf->getName())) {
+        mUnsupportedStructDescriptors.clear();
+        mLoadedInterfaces.emplace(intf->getName());
+    }
+}
+
+Return<void> ComponentStore::getStructDescriptors(
+        const hidl_vec<uint32_t>& indices,
+        getStructDescriptors_cb _hidl_cb) {
+    hidl_vec<StructDescriptor> descriptors(indices.size());
+    size_t dstIx = 0;
+    Status res = Status::OK;
+    for (size_t srcIx = 0; srcIx < indices.size(); ++srcIx) {
+        std::lock_guard<std::mutex> lock(mStructDescriptorsMutex);
+        const C2Param::CoreIndex coreIndex = C2Param::CoreIndex(indices[srcIx]).coreIndex();
+        const auto item = mStructDescriptors.find(coreIndex);
+        if (item == mStructDescriptors.end()) {
+            // not in the cache, and not known to be unsupported, query local reflector
+            if (!mUnsupportedStructDescriptors.count(coreIndex)) {
+                std::shared_ptr<C2StructDescriptor> structDesc =
+                    mParamReflector->describe(coreIndex);
+                if (!structDesc) {
+                    mUnsupportedStructDescriptors.emplace(coreIndex);
+                } else {
+                    mStructDescriptors.insert({ coreIndex, structDesc });
+                    if (objcpy(&descriptors[dstIx], *structDesc)) {
+                        ++dstIx;
+                        continue;
+                    }
+                    res = Status::CORRUPTED;
+                    break;
+                }
+            }
+            res = Status::NOT_FOUND;
+        } else if (item->second) {
+            if (objcpy(&descriptors[dstIx], *item->second)) {
+                ++dstIx;
+                continue;
+            }
+            res = Status::CORRUPTED;
+            break;
+        } else {
+            res = Status::NO_MEMORY;
+            break;
+        }
+    }
+    descriptors.resize(dstIx);
+    _hidl_cb(res, descriptors);
+    return Void();
+}
+
+Return<sp<IClientManager>> ComponentStore::getPoolClientManager() {
+    return ClientManager::getInstance();
+}
+
+Return<Status> ComponentStore::copyBuffer(const Buffer& src, const Buffer& dst) {
+    // TODO implement
+    (void)src;
+    (void)dst;
+    return Status::OMITTED;
+}
+
+Return<sp<IConfigurable>> ComponentStore::getConfigurable() {
+    return mConfigurable;
+}
+
+// Methods from ::android::hardware::media::c2::V1_1::IComponentStore
+Return<void> ComponentStore::createComponent_1_1(
+        const hidl_string& name,
+        const sp<IComponentListener>& listener,
+        const sp<IClientManager>& pool,
+        createComponent_1_1_cb _hidl_cb) {
+
+    sp<Component> component;
+    std::shared_ptr<C2Component> c2component;
+    Status status = static_cast<Status>(
+            mStore->createComponent(name, &c2component));
+
+    if (status == Status::OK) {
+#ifndef __ANDROID_APEX__
+        c2component = GetFilterWrapper()->maybeWrapComponent(c2component);
+#endif
+        onInterfaceLoaded(c2component->intf());
+        component = new Component(c2component, listener, this, pool);
+        if (!component) {
+            status = Status::CORRUPTED;
+        } else {
+            reportComponentBirth(component.get());
+            if (component->status() != C2_OK) {
+                status = static_cast<Status>(component->status());
+            } else {
+                component->initListener(component);
+                if (component->status() != C2_OK) {
+                    status = static_cast<Status>(component->status());
+                }
+            }
+        }
+    }
+    _hidl_cb(status, component);
+    return Void();
+}
+
+// Methods from ::android::hardware::media::c2::V1_2::IComponentStore
+Return<void> ComponentStore::createComponent_1_2(
+        const hidl_string& name,
+        const sp<IComponentListener>& listener,
+        const sp<IClientManager>& pool,
+        createComponent_1_2_cb _hidl_cb) {
+
+    sp<Component> component;
+    std::shared_ptr<C2Component> c2component;
+    Status status = static_cast<Status>(
+            mStore->createComponent(name, &c2component));
+
+    if (status == Status::OK) {
+#ifndef __ANDROID_APEX__
+        c2component = GetFilterWrapper()->maybeWrapComponent(c2component);
+#endif
+        onInterfaceLoaded(c2component->intf());
+        component = new Component(c2component, listener, this, pool);
+        if (!component) {
+            status = Status::CORRUPTED;
+        } else {
+            reportComponentBirth(component.get());
+            if (component->status() != C2_OK) {
+                status = static_cast<Status>(component->status());
+            } else {
+                component->initListener(component);
+                if (component->status() != C2_OK) {
+                    status = static_cast<Status>(component->status());
+                }
+            }
+        }
+    }
+    _hidl_cb(status, component);
+    return Void();
+}
+
+// Called from createComponent() after a successful creation of `component`.
+void ComponentStore::reportComponentBirth(Component* component) {
+    ComponentStatus componentStatus;
+    componentStatus.c2Component = component->mComponent;
+    componentStatus.birthTime = std::chrono::system_clock::now();
+
+    std::lock_guard<std::mutex> lock(mComponentRosterMutex);
+    mComponentRoster.emplace(component, componentStatus);
+}
+
+// Called from within the destructor of `component`. No virtual function calls
+// are made on `component` here.
+void ComponentStore::reportComponentDeath(Component* component) {
+    std::lock_guard<std::mutex> lock(mComponentRosterMutex);
+    mComponentRoster.erase(component);
+}
+
+// Dumps component traits.
+std::ostream& ComponentStore::dump(
+        std::ostream& out,
+        const std::shared_ptr<const C2Component::Traits>& comp) {
+
+    constexpr const char indent[] = "    ";
+
+    out << indent << "name: " << comp->name << std::endl;
+    out << indent << "domain: " << comp->domain << std::endl;
+    out << indent << "kind: " << comp->kind << std::endl;
+    out << indent << "rank: " << comp->rank << std::endl;
+    out << indent << "mediaType: " << comp->mediaType << std::endl;
+    out << indent << "aliases:";
+    for (const auto& alias : comp->aliases) {
+        out << ' ' << alias;
+    }
+    out << std::endl;
+
+    return out;
+}
+
+// Dumps component status.
+std::ostream& ComponentStore::dump(
+        std::ostream& out,
+        ComponentStatus& compStatus) {
+
+    constexpr const char indent[] = "    ";
+
+    // Print birth time.
+    std::chrono::milliseconds ms =
+            std::chrono::duration_cast<std::chrono::milliseconds>(
+                compStatus.birthTime.time_since_epoch());
+    std::time_t birthTime = std::chrono::system_clock::to_time_t(
+            compStatus.birthTime);
+    std::tm tm = *std::localtime(&birthTime);
+    out << indent << "Creation time: "
+        << std::put_time(&tm, "%Y-%m-%d %H:%M:%S")
+        << '.' << std::setfill('0') << std::setw(3) << ms.count() % 1000
+        << std::endl;
+
+    // Print name and id.
+    std::shared_ptr<C2ComponentInterface> intf = compStatus.c2Component->intf();
+    if (!intf) {
+        out << indent << "Unknown component -- null interface" << std::endl;
+        return out;
+    }
+    out << indent << "Name: " << intf->getName() << std::endl;
+    out << indent << "Id: " << intf->getId() << std::endl;
+
+    return out;
+}
+
+// Dumps information when lshal is called.
+Return<void> ComponentStore::debug(
+        const hidl_handle& handle,
+        const hidl_vec<hidl_string>& /* args */) {
+    LOG(INFO) << "debug -- dumping...";
+    const native_handle_t *h = handle.getNativeHandle();
+    if (!h || h->numFds != 1) {
+       LOG(ERROR) << "debug -- dumping failed -- "
+               "invalid file descriptor to dump to";
+       return Void();
+    }
+    std::ostringstream out;
+
+    { // Populate "out".
+
+        constexpr const char indent[] = "  ";
+
+        // Show name.
+        out << "Beginning of dump -- C2ComponentStore: "
+                << mStore->getName() << std::endl << std::endl;
+
+        // Retrieve the list of supported components.
+        std::vector<std::shared_ptr<const C2Component::Traits>> traitsList =
+                mStore->listComponents();
+
+        // Dump the traits of supported components.
+        out << indent << "Supported components:" << std::endl << std::endl;
+        if (traitsList.size() == 0) {
+            out << indent << indent << "NONE" << std::endl << std::endl;
+        } else {
+            for (const auto& traits : traitsList) {
+                dump(out, traits) << std::endl;
+            }
+        }
+
+        // Dump active components.
+        {
+            out << indent << "Active components:" << std::endl << std::endl;
+            std::lock_guard<std::mutex> lock(mComponentRosterMutex);
+            if (mComponentRoster.size() == 0) {
+                out << indent << indent << "NONE" << std::endl << std::endl;
+            } else {
+                for (auto& pair : mComponentRoster) {
+                    dump(out, pair.second) << std::endl;
+                }
+            }
+        }
+
+        out << "End of dump -- C2ComponentStore: "
+                << mStore->getName() << std::endl;
+    }
+
+    if (!android::base::WriteStringToFd(out.str(), h->data[0])) {
+        PLOG(WARNING) << "debug -- dumping failed -- write()";
+    } else {
+        LOG(INFO) << "debug -- dumping succeeded";
+    }
+    return Void();
+}
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
diff --git a/media/codec2/hidl/1.2/utils/Configurable.cpp b/media/codec2/hidl/1.2/utils/Configurable.cpp
new file mode 100644
index 0000000..243870e
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/Configurable.cpp
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <codec2/hidl/1.2/Configurable.h>
diff --git a/media/codec2/hidl/1.2/utils/InputBufferManager.cpp b/media/codec2/hidl/1.2/utils/InputBufferManager.cpp
new file mode 100644
index 0000000..1120075
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/InputBufferManager.cpp
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <codec2/hidl/1.2/InputBufferManager.h>
diff --git a/media/codec2/hidl/1.2/utils/InputSurface.cpp b/media/codec2/hidl/1.2/utils/InputSurface.cpp
new file mode 100644
index 0000000..7c4d28b
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/InputSurface.cpp
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <codec2/hidl/1.2/InputSurface.h>
diff --git a/media/codec2/hidl/1.2/utils/InputSurfaceConnection.cpp b/media/codec2/hidl/1.2/utils/InputSurfaceConnection.cpp
new file mode 100644
index 0000000..1bd58c2
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/InputSurfaceConnection.cpp
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <codec2/hidl/1.2/InputSurfaceConnection.h>
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
new file mode 100644
index 0000000..7937664
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_V1_2_UTILS_COMPONENT_H
+#define CODEC2_HIDL_V1_2_UTILS_COMPONENT_H
+
+#include <android/hardware/media/bufferpool/2.0/IClientManager.h>
+#include <android/hardware/media/c2/1.2/IComponent.h>
+#include <android/hardware/media/c2/1.0/IComponentInterface.h>
+#include <android/hardware/media/c2/1.0/IComponentListener.h>
+#include <android/hardware/media/c2/1.2/IComponentStore.h>
+#include <android/hardware/media/c2/1.0/IInputSink.h>
+#include <codec2/hidl/1.2/ComponentInterface.h>
+#include <codec2/hidl/1.2/Configurable.h>
+#include <codec2/hidl/1.2/types.h>
+#include <hidl/Status.h>
+#include <hwbinder/IBinder.h>
+
+#include <C2Component.h>
+#include <C2Buffer.h>
+#include <C2.h>
+
+#include <map>
+#include <memory>
+#include <mutex>
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+
+using ::android::hardware::media::c2::V1_2::IComponent;
+using ::android::hardware::media::c2::V1_0::IComponentListener;
+
+namespace utils {
+
+using ::android::hardware::hidl_array;
+using ::android::hardware::hidl_memory;
+using ::android::hardware::hidl_string;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::hardware::IBinder;
+using ::android::sp;
+using ::android::wp;
+
+struct ComponentStore;
+
+struct Component : public IComponent,
+                   public std::enable_shared_from_this<Component> {
+    Component(
+            const std::shared_ptr<C2Component>&,
+            const sp<IComponentListener>& listener,
+            const sp<ComponentStore>& store,
+            const sp<::android::hardware::media::bufferpool::V2_0::
+                IClientManager>& clientPoolManager);
+    c2_status_t status() const;
+
+    typedef ::android::hardware::graphics::bufferqueue::V1_0::
+            IGraphicBufferProducer HGraphicBufferProducer1;
+    typedef ::android::hardware::graphics::bufferqueue::V2_0::
+            IGraphicBufferProducer HGraphicBufferProducer2;
+
+    // Methods from IComponent follow.
+    virtual Return<Status> queue(const WorkBundle& workBundle) override;
+    virtual Return<void> flush(flush_cb _hidl_cb) override;
+    virtual Return<Status> drain(bool withEos) override;
+    virtual Return<Status> setOutputSurface(
+            uint64_t blockPoolId,
+            const sp<HGraphicBufferProducer2>& surface) override;
+    virtual Return<void> connectToInputSurface(
+            const sp<IInputSurface>& inputSurface,
+            connectToInputSurface_cb _hidl_cb) override;
+    virtual Return<void> connectToOmxInputSurface(
+            const sp<HGraphicBufferProducer1>& producer,
+            const sp<::android::hardware::media::omx::V1_0::
+            IGraphicBufferSource>& source,
+            connectToOmxInputSurface_cb _hidl_cb) override;
+    virtual Return<Status> disconnectFromInputSurface() override;
+    virtual Return<void> createBlockPool(
+            uint32_t allocatorId,
+            createBlockPool_cb _hidl_cb) override;
+    virtual Return<Status> destroyBlockPool(uint64_t blockPoolId) override;
+    virtual Return<Status> start() override;
+    virtual Return<Status> stop() override;
+    virtual Return<Status> reset() override;
+    virtual Return<Status> release() override;
+    virtual Return<sp<IComponentInterface>> getInterface() override;
+    virtual Return<sp<IInputSink>> asInputSink() override;
+    virtual Return<void> configureVideoTunnel(
+            uint32_t avSyncHwId, configureVideoTunnel_cb _hidl_cb) override;
+    virtual Return<Status> setOutputSurfaceWithSyncObj(
+            uint64_t blockPoolId,
+            const sp<HGraphicBufferProducer2>& surface,
+            const SurfaceSyncObj& syncObject) override;
+
+
+    // Returns a C2Component associated to the given sink if the sink is indeed
+    // a local component. Returns nullptr otherwise.
+    //
+    // This function is used by InputSurface::connect().
+    static std::shared_ptr<C2Component> findLocalComponent(
+            const sp<IInputSink>& sink);
+
+protected:
+    c2_status_t mInit;
+    std::shared_ptr<C2Component> mComponent;
+    sp<ComponentInterface> mInterface;
+    sp<IComponentListener> mListener;
+    sp<ComponentStore> mStore;
+    ::android::hardware::media::c2::V1_2::utils::DefaultBufferPoolSender
+            mBufferPoolSender;
+
+    struct Sink;
+    std::mutex mSinkMutex;
+    sp<Sink> mSink;
+
+    std::mutex mBlockPoolsMutex;
+    // This map keeps C2BlockPool objects that are created by createBlockPool()
+    // alive. These C2BlockPool objects can be deleted by calling
+    // destroyBlockPool(), reset() or release(), or by destroying the component.
+    std::map<uint64_t, std::shared_ptr<C2BlockPool>> mBlockPools;
+
+    void initListener(const sp<Component>& self);
+
+    virtual ~Component() override;
+
+    friend struct ComponentStore;
+
+    struct Listener;
+};
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
+
+#endif // CODEC2_HIDL_V1_2_UTILS_COMPONENT_H
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentInterface.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentInterface.h
new file mode 100644
index 0000000..09d9f93
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentInterface.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_V1_2_UTILS_COMPONENT_INTERFACE_H
+#define CODEC2_HIDL_V1_2_UTILS_COMPONENT_INTERFACE_H
+
+#include <codec2/hidl/1.0/ComponentInterface.h>
+#include <codec2/hidl/1.2/types.h>
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+namespace utils {
+
+using ::android::hardware::media::c2::V1_0::utils::ComponentInterface;
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
+
+#endif // CODEC2_HIDL_V1_2_UTILS_COMPONENT_INTERFACE_H
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
new file mode 100644
index 0000000..e95a651
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
@@ -0,0 +1,177 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_V1_2_UTILS_COMPONENT_STORE_H
+#define CODEC2_HIDL_V1_2_UTILS_COMPONENT_STORE_H
+
+#include <codec2/hidl/1.2/Component.h>
+#include <codec2/hidl/1.2/ComponentInterface.h>
+#include <codec2/hidl/1.2/Configurable.h>
+#include <codec2/hidl/1.2/types.h>
+
+#include <android/hardware/media/bufferpool/2.0/IClientManager.h>
+#include <android/hardware/media/c2/1.2/IComponentStore.h>
+#include <hidl/Status.h>
+
+#include <C2Component.h>
+#include <C2Param.h>
+#include <C2.h>
+
+#include <chrono>
+#include <map>
+#include <memory>
+#include <mutex>
+#include <set>
+#include <vector>
+
+namespace android {
+class FilterWrapper;
+
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+namespace utils {
+
+using ::android::hardware::media::bufferpool::V2_0::IClientManager;
+
+using ::android::hardware::hidl_handle;
+using ::android::hardware::hidl_string;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::sp;
+
+struct ComponentStore : public IComponentStore {
+    ComponentStore(const std::shared_ptr<C2ComponentStore>& store);
+    virtual ~ComponentStore();
+
+    /**
+     * Returns the status of the construction of this object.
+     */
+    c2_status_t status() const;
+
+    /**
+     * This function is called by CachedConfigurable::init() to validate
+     * supported parameters.
+     */
+    c2_status_t validateSupportedParams(
+            const std::vector<std::shared_ptr<C2ParamDescriptor>>& params);
+
+    /**
+     * Returns the store's ParameterCache. This is used for validation by
+     * Configurable::init().
+     */
+    std::shared_ptr<ParameterCache> getParameterCache() const;
+
+    static std::shared_ptr<FilterWrapper> GetFilterWrapper();
+
+    // Methods from ::android::hardware::media::c2::V1_0::IComponentStore.
+    virtual Return<void> createComponent(
+            const hidl_string& name,
+            const sp<IComponentListener>& listener,
+            const sp<IClientManager>& pool,
+            createComponent_cb _hidl_cb) override;
+    virtual Return<void> createInterface(
+            const hidl_string& name,
+            createInterface_cb _hidl_cb) override;
+    virtual Return<void> listComponents(listComponents_cb _hidl_cb) override;
+    virtual Return<void> createInputSurface(
+            createInputSurface_cb _hidl_cb) override;
+    virtual Return<void> getStructDescriptors(
+            const hidl_vec<uint32_t>& indices,
+            getStructDescriptors_cb _hidl_cb) override;
+    virtual Return<sp<IClientManager>> getPoolClientManager() override;
+    virtual Return<Status> copyBuffer(
+            const Buffer& src,
+            const Buffer& dst) override;
+    virtual Return<sp<IConfigurable>> getConfigurable() override;
+
+    // Methods from ::android::hardware::media::c2::V1_1::IComponentStore.
+    virtual Return<void> createComponent_1_1(
+            const hidl_string& name,
+            const sp<IComponentListener>& listener,
+            const sp<IClientManager>& pool,
+            createComponent_1_1_cb _hidl_cb) override;
+
+    // Methods from ::android::hardware::media::c2::V1_2::IComponentStore.
+    virtual Return<void> createComponent_1_2(
+            const hidl_string& name,
+            const sp<IComponentListener>& listener,
+            const sp<IClientManager>& pool,
+            createComponent_1_2_cb _hidl_cb) override;
+
+    /**
+     * Dumps information when lshal is called.
+     */
+    virtual Return<void> debug(
+            const hidl_handle& handle,
+            const hidl_vec<hidl_string>& args) override;
+
+protected:
+    sp<CachedConfigurable> mConfigurable;
+    struct StoreParameterCache;
+    std::shared_ptr<StoreParameterCache> mParameterCache;
+
+    // Does bookkeeping for an interface that has been loaded.
+    void onInterfaceLoaded(const std::shared_ptr<C2ComponentInterface> &intf);
+
+    c2_status_t mInit;
+    std::shared_ptr<C2ComponentStore> mStore;
+    std::shared_ptr<C2ParamReflector> mParamReflector;
+
+    std::map<C2Param::CoreIndex, std::shared_ptr<C2StructDescriptor>> mStructDescriptors;
+    std::set<C2Param::CoreIndex> mUnsupportedStructDescriptors;
+    std::set<C2String> mLoadedInterfaces;
+    mutable std::mutex mStructDescriptorsMutex;
+
+    // ComponentStore keeps track of live Components.
+
+    struct ComponentStatus {
+        std::shared_ptr<C2Component> c2Component;
+        std::chrono::system_clock::time_point birthTime;
+    };
+
+    mutable std::mutex mComponentRosterMutex;
+    std::map<Component*, ComponentStatus> mComponentRoster;
+
+    // Called whenever Component is created.
+    void reportComponentBirth(Component* component);
+    // Called only from the destructor of Component.
+    void reportComponentDeath(Component* component);
+
+    friend Component;
+
+    // Helper functions for dumping.
+
+    std::ostream& dump(
+            std::ostream& out,
+            const std::shared_ptr<const C2Component::Traits>& comp);
+
+    std::ostream& dump(
+            std::ostream& out,
+            ComponentStatus& compStatus);
+
+};
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
+
+#endif // CODEC2_HIDL_V1_2_UTILS_COMPONENT_STORE_H
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Configurable.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Configurable.h
new file mode 100644
index 0000000..2efad31
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Configurable.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_V1_2_UTILS_CONFIGURABLE_H
+#define CODEC2_HIDL_V1_2_UTILS_CONFIGURABLE_H
+
+#include <codec2/hidl/1.0/Configurable.h>
+#include <codec2/hidl/1.2/types.h>
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+namespace utils {
+
+using ::android::hardware::media::c2::V1_0::utils::ConfigurableC2Intf;
+using ::android::hardware::media::c2::V1_0::utils::ParameterCache;
+using ::android::hardware::media::c2::V1_0::utils::CachedConfigurable;
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
+
+#endif // CODEC2_HIDL_V1_2_UTILS_CONFIGURABLE_H
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputBufferManager.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputBufferManager.h
new file mode 100644
index 0000000..e4a5db4
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputBufferManager.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_V1_2_UTILS_INPUT_BUFFER_MANAGER_H
+#define CODEC2_HIDL_V1_2_UTILS_INPUT_BUFFER_MANAGER_H
+
+#include <codec2/hidl/1.0/InputBufferManager.h>
+#include <codec2/hidl/1.2/types.h>
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+namespace utils {
+
+using ::android::hardware::media::c2::V1_0::utils::InputBufferManager;
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
+
+#endif // CODEC2_HIDL_V1_2_UTILS_INPUT_BUFFER_MANAGER_H
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurface.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurface.h
new file mode 100644
index 0000000..3fae86b
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurface.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_V1_2_UTILS_INPUT_SURFACE_H
+#define CODEC2_HIDL_V1_2_UTILS_INPUT_SURFACE_H
+
+#include <codec2/hidl/1.0/InputSurface.h>
+#include <codec2/hidl/1.2/types.h>
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+namespace utils {
+
+using ::android::hardware::media::c2::V1_0::utils::InputSurface;
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
+
+#endif // CODEC2_HIDL_V1_2_UTILS_INPUT_SURFACE_H
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurfaceConnection.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurfaceConnection.h
new file mode 100644
index 0000000..13a8a61
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurfaceConnection.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_V1_2_UTILS_INPUT_SURFACE_CONNECTION_H
+#define CODEC2_HIDL_V1_2_UTILS_INPUT_SURFACE_CONNECTION_H
+
+#include <codec2/hidl/1.0/InputSurfaceConnection.h>
+#include <codec2/hidl/1.2/types.h>
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+namespace utils {
+
+using ::android::hardware::media::c2::V1_0::utils::InputSurfaceConnection;
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
+
+#endif // CODEC2_HIDL_V1_2_UTILS_INPUT_SURFACE_CONNECTION_H
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/types.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/types.h
new file mode 100644
index 0000000..d3180b0
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/types.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_V1_2_UTILS_TYPES_H
+#define CODEC2_HIDL_V1_2_UTILS_TYPES_H
+
+#include <android/hardware/media/c2/1.2/IComponent.h>
+#include <android/hardware/media/c2/1.0/IComponentInterface.h>
+#include <android/hardware/media/c2/1.0/IComponentListener.h>
+#include <android/hardware/media/c2/1.2/IComponentStore.h>
+#include <android/hardware/media/c2/1.0/IConfigurable.h>
+#include <android/hardware/media/c2/1.0/IInputSink.h>
+#include <android/hardware/media/c2/1.0/IInputSurface.h>
+#include <android/hardware/media/c2/1.0/IInputSurfaceConnection.h>
+
+#include <codec2/hidl/1.0/types.h>
+#include <android/hardware/media/c2/1.2/types.h>
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+namespace V1_2 {
+
+using ::android::hardware::media::c2::V1_0::BaseBlock;
+using ::android::hardware::media::c2::V1_0::Block;
+using ::android::hardware::media::c2::V1_0::Buffer;
+using ::android::hardware::media::c2::V1_0::FieldDescriptor;
+using ::android::hardware::media::c2::V1_0::FieldId;
+using ::android::hardware::media::c2::V1_0::FieldSupportedValues;
+using ::android::hardware::media::c2::V1_0::FieldSupportedValuesQuery;
+using ::android::hardware::media::c2::V1_0::FieldSupportedValuesQueryResult;
+using ::android::hardware::media::c2::V1_0::FrameData;
+using ::android::hardware::media::c2::V1_0::InfoBuffer;
+using ::android::hardware::media::c2::V1_0::ParamDescriptor;
+using ::android::hardware::media::c2::V1_0::ParamField;
+using ::android::hardware::media::c2::V1_0::ParamFieldValues;
+using ::android::hardware::media::c2::V1_0::ParamIndex;
+using ::android::hardware::media::c2::V1_0::Params;
+using ::android::hardware::media::c2::V1_0::PrimitiveValue;
+using ::android::hardware::media::c2::V1_0::SettingResult;
+using ::android::hardware::media::c2::V1_0::Status;
+using ::android::hardware::media::c2::V1_0::StructDescriptor;
+using ::android::hardware::media::c2::V1_0::ValueRange;
+using ::android::hardware::media::c2::V1_0::Work;
+using ::android::hardware::media::c2::V1_0::WorkBundle;
+using ::android::hardware::media::c2::V1_0::WorkOrdinal;
+using ::android::hardware::media::c2::V1_0::Worklet;
+
+using ::android::hardware::media::c2::V1_2::SurfaceSyncObj;
+
+using ::android::hardware::media::c2::V1_0::IComponentInterface;
+using ::android::hardware::media::c2::V1_0::IComponentListener;
+using ::android::hardware::media::c2::V1_0::IConfigurable;
+using ::android::hardware::media::c2::V1_0::IInputSink;
+using ::android::hardware::media::c2::V1_0::IInputSurface;
+using ::android::hardware::media::c2::V1_0::IInputSurfaceConnection;
+
+namespace utils {
+
+using ::android::hardware::media::c2::V1_0::utils::toC2Status;
+
+using ::android::hardware::media::c2::V1_0::utils::C2Hidl_Range;
+using ::android::hardware::media::c2::V1_0::utils::C2Hidl_RangeInfo;
+using ::android::hardware::media::c2::V1_0::utils::C2Hidl_Rect;
+using ::android::hardware::media::c2::V1_0::utils::C2Hidl_RectInfo;
+
+using ::android::hardware::media::c2::V1_0::utils::objcpy;
+using ::android::hardware::media::c2::V1_0::utils::parseParamsBlob;
+using ::android::hardware::media::c2::V1_0::utils::createParamsBlob;
+using ::android::hardware::media::c2::V1_0::utils::copyParamsFromBlob;
+using ::android::hardware::media::c2::V1_0::utils::updateParamsFromBlob;
+
+using ::android::hardware::media::c2::V1_0::utils::BufferPoolSender;
+using ::android::hardware::media::c2::V1_0::utils::DefaultBufferPoolSender;
+
+using ::android::hardware::media::c2::V1_0::utils::beginTransferBufferQueueBlock;
+using ::android::hardware::media::c2::V1_0::utils::beginTransferBufferQueueBlocks;
+using ::android::hardware::media::c2::V1_0::utils::endTransferBufferQueueBlock;
+using ::android::hardware::media::c2::V1_0::utils::endTransferBufferQueueBlocks;
+using ::android::hardware::media::c2::V1_0::utils::displayBufferQueueBlock;
+
+using ::android::hardware::media::c2::V1_0::utils::operator<<;
+
+} // namespace utils
+} // namespace V1_2
+} // namespace c2
+} // namespace media
+} // namespace hardware
+} // namespace android
+
+#endif // CODEC2_HIDL_V1_2_UTILS_TYPES_H
diff --git a/media/codec2/hidl/1.2/utils/types.cpp b/media/codec2/hidl/1.2/utils/types.cpp
new file mode 100644
index 0000000..9e0a08b
--- /dev/null
+++ b/media/codec2/hidl/1.2/utils/types.cpp
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <codec2/hidl/1.2/types.h>
diff --git a/media/codec2/hidl/client/Android.bp b/media/codec2/hidl/client/Android.bp
index 3c37990..0e52813 100644
--- a/media/codec2/hidl/client/Android.bp
+++ b/media/codec2/hidl/client/Android.bp
@@ -1,8 +1,22 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library {
     name: "libcodec2_client",
 
     srcs: [
         "client.cpp",
+        "output.cpp",
+    ],
+
+    header_libs: [
+        "libcodec2_internal", // private
     ],
 
     shared_libs: [
@@ -10,11 +24,13 @@
         "android.hardware.media.bufferpool@2.0",
         "android.hardware.media.c2@1.0",
         "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
         "libbase",
         "libbinder",
         "libcodec2",
         "libcodec2_hidl_client@1.0",
         "libcodec2_hidl_client@1.1",
+        "libcodec2_hidl_client@1.2",
         "libcodec2_vndk",
         "libcutils",
         "libgui",
@@ -32,11 +48,12 @@
     export_shared_lib_headers: [
         "android.hardware.media.c2@1.0",
         "android.hardware.media.c2@1.1",
+        "android.hardware.media.c2@1.2",
         "libcodec2",
         "libcodec2_hidl_client@1.0",
         "libcodec2_hidl_client@1.1",
+        "libcodec2_hidl_client@1.2",
         "libcodec2_vndk",
     ],
 
 }
-
diff --git a/media/codec2/hidl/client/client.cpp b/media/codec2/hidl/client/client.cpp
index 7e4352d..42b3c43 100644
--- a/media/codec2/hidl/client/client.cpp
+++ b/media/codec2/hidl/client/client.cpp
@@ -21,6 +21,7 @@
 #include <codec2/hidl/client.h>
 #include <C2Debug.h>
 #include <C2BufferPriv.h>
+#include <C2Config.h> // for C2StreamUsageTuning
 #include <C2PlatformSupport.h>
 
 #include <android/hardware/media/bufferpool/2.0/IClientManager.h>
@@ -33,15 +34,19 @@
 
 #include <android-base/properties.h>
 #include <bufferpool/ClientManager.h>
-#include <codec2/hidl/1.0/OutputBufferQueue.h>
 #include <codec2/hidl/1.0/types.h>
-#include <codec2/hidl/1.1/OutputBufferQueue.h>
 #include <codec2/hidl/1.1/types.h>
+#include <codec2/hidl/1.2/types.h>
+#include <codec2/hidl/output.h>
 
 #include <cutils/native_handle.h>
 #include <gui/bufferqueue/2.0/B2HGraphicBufferProducer.h>
 #include <gui/bufferqueue/2.0/H2BGraphicBufferProducer.h>
+#include <hardware/gralloc.h> // for GRALLOC_USAGE_*
 #include <hidl/HidlSupport.h>
+#include <system/window.h> // for NATIVE_WINDOW_QUERY_*
+#include <media/stagefright/foundation/ADebug.h> // for asString(status_t)
+
 
 #include <deque>
 #include <iterator>
@@ -73,12 +78,17 @@
         V2_0::utils::B2HGraphicBufferProducer;
 using H2BGraphicBufferProducer2 = ::android::hardware::graphics::bufferqueue::
         V2_0::utils::H2BGraphicBufferProducer;
+using ::android::hardware::media::c2::V1_2::SurfaceSyncObj;
 
 namespace /* unnamed */ {
 
 // c2_status_t value that corresponds to hwbinder transaction failure.
 constexpr c2_status_t C2_TRANSACTION_FAILED = C2_CORRUPTED;
 
+// By default prepare buffer to be displayed on any of the common surfaces
+constexpr uint64_t kDefaultConsumerUsage =
+    (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER);
+
 // Searches for a name in GetServiceNames() and returns the index found. If the
 // name is not found, the returned index will be equal to
 // GetServiceNames().size().
@@ -592,9 +602,9 @@
 
 // Codec2Client::Component::OutputBufferQueue
 struct Codec2Client::Component::OutputBufferQueue :
-        hardware::media::c2::V1_1::utils::OutputBufferQueue {
+        hardware::media::c2::OutputBufferQueue {
     OutputBufferQueue()
-          : hardware::media::c2::V1_1::utils::OutputBufferQueue() {
+          : hardware::media::c2::OutputBufferQueue() {
     }
 };
 
@@ -612,6 +622,7 @@
         },
         mBase1_0{base},
         mBase1_1{Base1_1::castFrom(base)},
+        mBase1_2{Base1_2::castFrom(base)},
         mServiceIndex{serviceIndex} {
     Return<sp<IClientManager>> transResult = base->getPoolClientManager();
     if (!transResult.isOk()) {
@@ -633,6 +644,10 @@
     return mBase1_1;
 }
 
+sp<Codec2Client::Base1_2> const& Codec2Client::getBase1_2() const {
+    return mBase1_2;
+}
+
 std::string const& Codec2Client::getServiceName() const {
     return GetServiceNames()[mServiceIndex];
 }
@@ -645,8 +660,9 @@
     c2_status_t status;
     sp<Component::HidlListener> hidlListener = new Component::HidlListener{};
     hidlListener->base = listener;
-    Return<void> transStatus = mBase1_1 ?
-        mBase1_1->createComponent_1_1(
+    Return<void> transStatus;
+    if (mBase1_2) {
+        transStatus = mBase1_2->createComponent_1_2(
             name,
             hidlListener,
             ClientManager::getInstance(),
@@ -659,8 +675,25 @@
                 }
                 *component = std::make_shared<Codec2Client::Component>(c);
                 hidlListener->component = *component;
-            }) :
-        mBase1_0->createComponent(
+            });
+    }
+    else if (mBase1_1) {
+        transStatus = mBase1_1->createComponent_1_1(
+            name,
+            hidlListener,
+            ClientManager::getInstance(),
+            [&status, component, hidlListener](
+                    Status s,
+                    const sp<IComponent>& c) {
+                status = static_cast<c2_status_t>(s);
+                if (status != C2_OK) {
+                    return;
+                }
+                *component = std::make_shared<Codec2Client::Component>(c);
+                hidlListener->component = *component;
+            });
+    } else if (mBase1_0) { // ver1_0
+        transStatus = mBase1_0->createComponent(
             name,
             hidlListener,
             ClientManager::getInstance(),
@@ -674,6 +707,9 @@
                 *component = std::make_shared<Codec2Client::Component>(c);
                 hidlListener->component = *component;
             });
+    } else {
+        status = C2_CORRUPTED;
+    }
     if (!transStatus.isOk()) {
         LOG(ERROR) << "createComponent(" << name.c_str()
                    << ") -- transaction failed.";
@@ -843,6 +879,11 @@
                             return;
                         }
                     });
+            if (!transStatus.isOk()) {
+                LOG(DEBUG) << "SimpleParamReflector -- transaction failed: "
+                           << transStatus.description();
+                descriptor.reset();
+            }
             return descriptor;
         }
 
@@ -1003,6 +1044,8 @@
                 std::scoped_lock lock{key2IndexMutex};
                 key2Index[key] = index; // update last known client index
                 return C2_OK;
+            } else if (status == C2_NO_MEMORY) {
+                return C2_NO_MEMORY;
             } else if (status == C2_TRANSACTION_FAILED) {
                 LOG(WARNING) << "\"" << key << "\" failed for service \""
                              << client->getName()
@@ -1023,24 +1066,23 @@
     return status; // return the last status from a valid client
 }
 
-std::shared_ptr<Codec2Client::Component>
-        Codec2Client::CreateComponentByName(
+c2_status_t Codec2Client::CreateComponentByName(
         const char* componentName,
         const std::shared_ptr<Listener>& listener,
+        std::shared_ptr<Component>* component,
         std::shared_ptr<Codec2Client>* owner,
         size_t numberOfAttempts) {
     std::string key{"create:"};
     key.append(componentName);
-    std::shared_ptr<Component> component;
     c2_status_t status = ForAllServices(
             key,
             numberOfAttempts,
-            [owner, &component, componentName, &listener](
+            [owner, component, componentName, &listener](
                     const std::shared_ptr<Codec2Client> &client)
                         -> c2_status_t {
                 c2_status_t status = client->createComponent(componentName,
                                                              listener,
-                                                             &component);
+                                                             component);
                 if (status == C2_OK) {
                     if (owner) {
                         *owner = client;
@@ -1059,7 +1101,7 @@
                    << "\" from all known services. "
                       "Last returned status = " << status << ".";
     }
-    return component;
+    return status;
 }
 
 std::shared_ptr<Codec2Client::Interface>
@@ -1187,6 +1229,7 @@
         },
         mBase1_0{base},
         mBase1_1{Base1_1::castFrom(base)},
+        mBase1_2{Base1_2::castFrom(base)},
         mBufferPoolSender{std::make_unique<BufferPoolSender>()},
         mOutputBufferQueue{std::make_unique<OutputBufferQueue>()} {
 }
@@ -1209,6 +1252,30 @@
         },
         mBase1_0{base},
         mBase1_1{base},
+        mBase1_2{Base1_2::castFrom(base)},
+        mBufferPoolSender{std::make_unique<BufferPoolSender>()},
+        mOutputBufferQueue{std::make_unique<OutputBufferQueue>()} {
+}
+
+Codec2Client::Component::Component(const sp<Base1_2>& base)
+      : Configurable{
+            [base]() -> sp<IConfigurable> {
+                Return<sp<IComponentInterface>> transResult1 =
+                        base->getInterface();
+                if (!transResult1.isOk()) {
+                    return nullptr;
+                }
+                Return<sp<IConfigurable>> transResult2 =
+                        static_cast<sp<IComponentInterface>>(transResult1)->
+                        getConfigurable();
+                return transResult2.isOk() ?
+                        static_cast<sp<IConfigurable>>(transResult2) :
+                        nullptr;
+            }()
+        },
+        mBase1_0{base},
+        mBase1_1{base},
+        mBase1_2{base},
         mBufferPoolSender{std::make_unique<BufferPoolSender>()},
         mOutputBufferQueue{std::make_unique<OutputBufferQueue>()} {
 }
@@ -1423,7 +1490,8 @@
 c2_status_t Codec2Client::Component::setOutputSurface(
         C2BlockPool::local_id_t blockPoolId,
         const sp<IGraphicBufferProducer>& surface,
-        uint32_t generation) {
+        uint32_t generation,
+        int maxDequeueCount) {
     uint64_t bqId = 0;
     sp<IGraphicBufferProducer> nullIgbp;
     sp<HGraphicBufferProducer2> nullHgbp;
@@ -1434,21 +1502,65 @@
         igbp = new B2HGraphicBufferProducer2(surface);
     }
 
+    std::shared_ptr<SurfaceSyncObj> syncObj;
+
     if (!surface) {
-        mOutputBufferQueue->configure(nullIgbp, generation, 0);
+        mOutputBufferQueue->configure(nullIgbp, generation, 0, maxDequeueCount, nullptr);
     } else if (surface->getUniqueId(&bqId) != OK) {
         LOG(ERROR) << "setOutputSurface -- "
                    "cannot obtain bufferqueue id.";
         bqId = 0;
-        mOutputBufferQueue->configure(nullIgbp, generation, 0);
+        mOutputBufferQueue->configure(nullIgbp, generation, 0, maxDequeueCount, nullptr);
     } else {
-        mOutputBufferQueue->configure(surface, generation, bqId);
+        mOutputBufferQueue->configure(surface, generation, bqId, maxDequeueCount, mBase1_2 ?
+                                      &syncObj : nullptr);
     }
-    ALOGD("generation remote change %u", generation);
 
-    Return<Status> transStatus = mBase1_0->setOutputSurface(
-            static_cast<uint64_t>(blockPoolId),
-            bqId == 0 ? nullHgbp : igbp);
+    // set consumer bits
+    // TODO: should this get incorporated into setOutputSurface method so that consumer bits
+    // can be set atomically?
+    uint64_t consumerUsage = kDefaultConsumerUsage;
+    {
+        if (surface) {
+            int usage = 0;
+            status_t err = surface->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, &usage);
+            if (err != NO_ERROR) {
+                ALOGD("setOutputSurface -- failed to get consumer usage bits (%d/%s). ignoring",
+                        err, asString(err));
+            } else {
+                // Note: we are adding the default usage because components must support
+                // producing output frames that can be displayed an all output surfaces.
+
+                // TODO: do not set usage for tunneled scenario. It is unclear if consumer usage
+                // is meaningful in a tunneled scenario; on one hand output buffers exist, but
+                // they do not exist inside of C2 scope. Any buffer usage shall be communicated
+                // through the sideband channel.
+
+                // do an unsigned conversion as bit-31 may be 1
+                consumerUsage = (uint32_t)usage | kDefaultConsumerUsage;
+            }
+        }
+
+        C2StreamUsageTuning::output outputUsage{
+                0u, C2AndroidMemoryUsage::FromGrallocUsage(consumerUsage).expected};
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        c2_status_t err = config({&outputUsage}, C2_MAY_BLOCK, &failures);
+        if (err != C2_OK) {
+            ALOGD("setOutputSurface -- failed to set consumer usage (%d/%s)",
+                    err, asString(err));
+        }
+    }
+    ALOGD("setOutputSurface -- generation=%u consumer usage=%#llx%s",
+            generation, (long long)consumerUsage, syncObj ? " sync" : "");
+
+    Return<Status> transStatus = syncObj ?
+            mBase1_2->setOutputSurfaceWithSyncObj(
+                    static_cast<uint64_t>(blockPoolId),
+                    bqId == 0 ? nullHgbp : igbp, *syncObj) :
+            mBase1_0->setOutputSurface(
+                    static_cast<uint64_t>(blockPoolId),
+                    bqId == 0 ? nullHgbp : igbp);
+
     if (!transStatus.isOk()) {
         LOG(ERROR) << "setOutputSurface -- transaction failed.";
         return C2_TRANSACTION_FAILED;
@@ -1458,6 +1570,7 @@
     if (status != C2_OK) {
         LOG(DEBUG) << "setOutputSurface -- call failed: " << status << ".";
     }
+    ALOGD("Surface configure completed");
     return status;
 }
 
@@ -1468,6 +1581,11 @@
     return mOutputBufferQueue->outputBuffer(block, input, output);
 }
 
+void Codec2Client::Component::setOutputSurfaceMaxDequeueCount(
+        int maxDequeueCount) {
+    mOutputBufferQueue->updateMaxDequeueBufferCount(maxDequeueCount);
+}
+
 c2_status_t Codec2Client::Component::connectToInputSurface(
         const std::shared_ptr<InputSurface>& inputSurface,
         std::shared_ptr<InputSurfaceConnection>* connection) {
@@ -1620,4 +1738,3 @@
 }
 
 }  // namespace android
-
diff --git a/media/codec2/hidl/client/include/codec2/hidl/client.h b/media/codec2/hidl/client/include/codec2/hidl/client.h
index ffd194a..347e58a 100644
--- a/media/codec2/hidl/client/include/codec2/hidl/client.h
+++ b/media/codec2/hidl/client/include/codec2/hidl/client.h
@@ -78,6 +78,11 @@
 struct IComponentStore;
 }  // namespace android::hardware::media::c2::V1_1
 
+namespace android::hardware::media::c2::V1_2 {
+struct IComponent;
+struct IComponentStore;
+}  // namespace android::hardware::media::c2::V1_2
+
 namespace android::hardware::media::bufferpool::V2_0 {
 struct IClientManager;
 }  // namespace android::hardware::media::bufferpool::V2_0
@@ -137,6 +142,7 @@
 
     typedef ::android::hardware::media::c2::V1_0::IComponentStore Base1_0;
     typedef ::android::hardware::media::c2::V1_1::IComponentStore Base1_1;
+    typedef ::android::hardware::media::c2::V1_2::IComponentStore Base1_2;
     typedef Base1_0 Base;
 
     struct Listener;
@@ -156,6 +162,7 @@
     sp<Base> const& getBase() const;
     sp<Base1_0> const& getBase1_0() const;
     sp<Base1_1> const& getBase1_1() const;
+    sp<Base1_2> const& getBase1_2() const;
 
     std::string const& getServiceName() const;
 
@@ -200,9 +207,10 @@
     // Try to create a component with a given name from all known
     // IComponentStore services. numberOfAttempts determines the number of times
     // to retry the HIDL call if the transaction fails.
-    static std::shared_ptr<Component> CreateComponentByName(
+    static c2_status_t CreateComponentByName(
             char const* componentName,
             std::shared_ptr<Listener> const& listener,
+            std::shared_ptr<Component>* component,
             std::shared_ptr<Codec2Client>* owner = nullptr,
             size_t numberOfAttempts = 10);
 
@@ -227,6 +235,7 @@
 protected:
     sp<Base1_0> mBase1_0;
     sp<Base1_1> mBase1_1;
+    sp<Base1_2> mBase1_2;
 
     // Finds the first store where the predicate returns C2_OK and returns the
     // last predicate result. The predicate will be tried on all stores. The
@@ -317,6 +326,7 @@
 
     typedef ::android::hardware::media::c2::V1_0::IComponent Base1_0;
     typedef ::android::hardware::media::c2::V1_1::IComponent Base1_1;
+    typedef ::android::hardware::media::c2::V1_2::IComponent Base1_2;
     typedef Base1_0 Base;
 
     c2_status_t createBlockPool(
@@ -374,7 +384,8 @@
     c2_status_t setOutputSurface(
             C2BlockPool::local_id_t blockPoolId,
             const sp<IGraphicBufferProducer>& surface,
-            uint32_t generation);
+            uint32_t generation,
+            int maxDequeueBufferCount);
 
     // Extract a slot number from of the block, then call
     // IGraphicBufferProducer::queueBuffer().
@@ -397,6 +408,9 @@
             const QueueBufferInput& input,
             QueueBufferOutput* output);
 
+    // Set max dequeue count for output surface.
+    void setOutputSurfaceMaxDequeueCount(int maxDequeueCount);
+
     // Connect to a given InputSurface.
     c2_status_t connectToInputSurface(
             const std::shared_ptr<InputSurface>& inputSurface,
@@ -412,12 +426,14 @@
     // base cannot be null.
     Component(const sp<Base>& base);
     Component(const sp<Base1_1>& base);
+    Component(const sp<Base1_2>& base);
 
     ~Component();
 
 protected:
     sp<Base1_0> mBase1_0;
     sp<Base1_1> mBase1_1;
+    sp<Base1_2> mBase1_2;
 
     struct BufferPoolSender;
     std::unique_ptr<BufferPoolSender> mBufferPoolSender;
diff --git a/media/codec2/hidl/client/include/codec2/hidl/output.h b/media/codec2/hidl/client/include/codec2/hidl/output.h
new file mode 100644
index 0000000..877148a
--- /dev/null
+++ b/media/codec2/hidl/client/include/codec2/hidl/output.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_V1_0_UTILS_OUTPUT_BUFFER_QUEUE
+#define CODEC2_HIDL_V1_0_UTILS_OUTPUT_BUFFER_QUEUE
+
+#include <gui/IGraphicBufferProducer.h>
+#include <codec2/hidl/1.0/types.h>
+#include <codec2/hidl/1.2/types.h>
+#include <C2Work.h>
+
+struct C2_HIDE _C2BlockPoolData;
+class C2SurfaceSyncMemory;
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+
+
+// BufferQueue-Based Block Operations
+// ==================================
+
+// Manage BufferQueue and graphic blocks for both component and codec.
+// Manage graphic blocks ownership consistently during surface change.
+struct OutputBufferQueue {
+
+    OutputBufferQueue();
+
+    ~OutputBufferQueue();
+
+    // Configure a new surface to render graphic blocks.
+    // Graphic blocks from older surface will be migrated to new surface.
+    bool configure(const sp<IGraphicBufferProducer>& igbp,
+                   uint32_t generation,
+                   uint64_t bqId,
+                   int maxDequeueBufferCount,
+                   std::shared_ptr<V1_2::SurfaceSyncObj> *syncObj);
+
+    // Render a graphic block to current surface.
+    status_t outputBuffer(
+            const C2ConstGraphicBlock& block,
+            const BnGraphicBufferProducer::QueueBufferInput& input,
+            BnGraphicBufferProducer::QueueBufferOutput* output);
+
+    // Call holdBufferQueueBlock() on output blocks in the given workList.
+    // The OutputBufferQueue will take the ownership of output blocks.
+    //
+    // Note: This function should be called after WorkBundle has been received
+    // from another process.
+    void holdBufferQueueBlocks(
+            const std::list<std::unique_ptr<C2Work>>& workList);
+
+    // Update # of max dequeue buffer from BQ. If # of max dequeued buffer is shared
+    // via shared memory between HAL and framework, Update # of max dequeued buffer
+    // and synchronize.
+    void updateMaxDequeueBufferCount(int maxDequeueBufferCount);
+
+private:
+
+    std::mutex mMutex;
+    sp<IGraphicBufferProducer> mIgbp;
+    uint32_t mGeneration;
+    uint64_t mBqId;
+    int32_t mMaxDequeueBufferCount;
+    std::shared_ptr<int> mOwner;
+    // To migrate existing buffers
+    sp<GraphicBuffer> mBuffers[BufferQueueDefs::NUM_BUFFER_SLOTS]; // find a better way
+    std::weak_ptr<_C2BlockPoolData> mPoolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
+    std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
+
+    bool registerBuffer(const C2ConstGraphicBlock& block);
+};
+
+}  // namespace c2
+}  // namespace media
+}  // namespace hardware
+}  // namespace android
+
+#endif  // CODEC2_HIDL_V1_0_UTILS_OUTPUT_BUFFER_QUEUE
diff --git a/media/codec2/hidl/client/output.cpp b/media/codec2/hidl/client/output.cpp
new file mode 100644
index 0000000..8cd4934
--- /dev/null
+++ b/media/codec2/hidl/client/output.cpp
@@ -0,0 +1,480 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-OutputBufferQueue"
+#include <android-base/logging.h>
+
+#include <android/hardware/graphics/bufferqueue/2.0/IGraphicBufferProducer.h>
+#include <codec2/hidl/output.h>
+#include <cutils/ashmem.h>
+#include <gui/bufferqueue/2.0/B2HGraphicBufferProducer.h>
+#include <sys/mman.h>
+
+#include <C2AllocatorGralloc.h>
+#include <C2BlockInternal.h>
+#include <C2Buffer.h>
+#include <C2PlatformSupport.h>
+#include <C2SurfaceSyncObj.h>
+
+#include <iomanip>
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace c2 {
+
+using HGraphicBufferProducer = ::android::hardware::graphics::bufferqueue::
+        V2_0::IGraphicBufferProducer;
+using B2HGraphicBufferProducer = ::android::hardware::graphics::bufferqueue::
+        V2_0::utils::B2HGraphicBufferProducer;
+
+namespace /* unnamed */ {
+
+// Create a GraphicBuffer object from a graphic block.
+sp<GraphicBuffer> createGraphicBuffer(const C2ConstGraphicBlock& block) {
+    uint32_t width;
+    uint32_t height;
+    uint32_t format;
+    uint64_t usage;
+    uint32_t stride;
+    uint32_t generation;
+    uint64_t bqId;
+    int32_t bqSlot;
+    _UnwrapNativeCodec2GrallocMetadata(
+            block.handle(), &width, &height, &format, &usage,
+            &stride, &generation, &bqId, reinterpret_cast<uint32_t*>(&bqSlot));
+    native_handle_t *grallocHandle =
+            UnwrapNativeCodec2GrallocHandle(block.handle());
+    sp<GraphicBuffer> graphicBuffer =
+            new GraphicBuffer(grallocHandle,
+                              GraphicBuffer::CLONE_HANDLE,
+                              width, height, format,
+                              1, usage, stride);
+    native_handle_delete(grallocHandle);
+    return graphicBuffer;
+}
+
+template <typename BlockProcessor>
+void forEachBlock(C2FrameData& frameData,
+                  BlockProcessor process) {
+    for (const std::shared_ptr<C2Buffer>& buffer : frameData.buffers) {
+        if (buffer) {
+            for (const C2ConstGraphicBlock& block :
+                    buffer->data().graphicBlocks()) {
+                process(block);
+            }
+        }
+    }
+}
+
+template <typename BlockProcessor>
+void forEachBlock(const std::list<std::unique_ptr<C2Work>>& workList,
+                  BlockProcessor process) {
+    for (const std::unique_ptr<C2Work>& work : workList) {
+        if (!work) {
+            continue;
+        }
+        for (const std::unique_ptr<C2Worklet>& worklet : work->worklets) {
+            if (worklet) {
+                forEachBlock(worklet->output, process);
+            }
+        }
+    }
+}
+
+sp<HGraphicBufferProducer> getHgbp(const sp<IGraphicBufferProducer>& igbp) {
+    sp<HGraphicBufferProducer> hgbp =
+            igbp->getHalInterface<HGraphicBufferProducer>();
+    return hgbp ? hgbp :
+            new B2HGraphicBufferProducer(igbp);
+}
+
+status_t attachToBufferQueue(const C2ConstGraphicBlock& block,
+                             const sp<IGraphicBufferProducer>& igbp,
+                             uint32_t generation,
+                             int32_t* bqSlot,
+                             std::shared_ptr<C2SurfaceSyncMemory> syncMem) {
+    if (!igbp) {
+        LOG(WARNING) << "attachToBufferQueue -- null producer.";
+        return NO_INIT;
+    }
+
+    sp<GraphicBuffer> graphicBuffer = createGraphicBuffer(block);
+    graphicBuffer->setGenerationNumber(generation);
+
+    LOG(VERBOSE) << "attachToBufferQueue -- attaching buffer:"
+            << " block dimension " << block.width() << "x"
+                                   << block.height()
+            << ", graphicBuffer dimension " << graphicBuffer->getWidth() << "x"
+                                           << graphicBuffer->getHeight()
+            << std::hex << std::setfill('0')
+            << ", format 0x" << std::setw(8) << graphicBuffer->getPixelFormat()
+            << ", usage 0x" << std::setw(16) << graphicBuffer->getUsage()
+            << std::dec << std::setfill(' ')
+            << ", stride " << graphicBuffer->getStride()
+            << ", generation " << graphicBuffer->getGenerationNumber();
+
+    C2SyncVariables *syncVar = syncMem ? syncMem->mem() : nullptr;
+    status_t result = OK;
+    if (syncVar) {
+        syncVar->lock();
+        if (!syncVar->isDequeueableLocked() ||
+            syncVar->getSyncStatusLocked() == C2SyncVariables::STATUS_SWITCHING) {
+            syncVar->unlock();
+            LOG(WARNING) << "attachToBufferQueue -- attachBuffer failed: "
+                            "status = " << INVALID_OPERATION << ".";
+            return INVALID_OPERATION;
+        }
+        result = igbp->attachBuffer(bqSlot, graphicBuffer);
+        if (result == OK) {
+            syncVar->notifyDequeuedLocked();
+        }
+        syncVar->unlock();
+    } else {
+        result = igbp->attachBuffer(bqSlot, graphicBuffer);
+    }
+    if (result != OK) {
+        LOG(WARNING) << "attachToBufferQueue -- attachBuffer failed: "
+                        "status = " << result << ".";
+        return result;
+    }
+    LOG(VERBOSE) << "attachToBufferQueue -- attachBuffer returned slot #"
+                 << *bqSlot << ".";
+    return OK;
+}
+
+bool getBufferQueueAssignment(const C2ConstGraphicBlock& block,
+                              uint32_t* generation,
+                              uint64_t* bqId,
+                              int32_t* bqSlot) {
+    return _C2BlockFactory::GetBufferQueueData(
+            _C2BlockFactory::GetGraphicBlockPoolData(block),
+            generation, bqId, bqSlot);
+}
+
+} // unnamed namespace
+
+OutputBufferQueue::OutputBufferQueue()
+      : mGeneration{0}, mBqId{0} {
+}
+
+OutputBufferQueue::~OutputBufferQueue() {
+}
+
+bool OutputBufferQueue::configure(const sp<IGraphicBufferProducer>& igbp,
+                                  uint32_t generation,
+                                  uint64_t bqId,
+                                  int maxDequeueBufferCount,
+                                  std::shared_ptr<V1_2::SurfaceSyncObj> *syncObj) {
+    uint64_t consumerUsage = 0;
+    if (igbp->getConsumerUsage(&consumerUsage) != OK) {
+        ALOGW("failed to get consumer usage");
+    }
+
+    // TODO : Abstract creation process into C2SurfaceSyncMemory class.
+    // use C2LinearBlock instead ashmem.
+    std::shared_ptr<C2SurfaceSyncMemory> syncMem;
+    if (syncObj && igbp) {
+        bool mapped = false;
+        int memFd = ashmem_create_region("C2SurfaceMem", sizeof(C2SyncVariables));
+        size_t memSize = memFd < 0 ? 0 : ashmem_get_size_region(memFd);
+        if (memSize > 0) {
+            syncMem = C2SurfaceSyncMemory::Create(memFd, memSize);
+            if (syncMem) {
+                mapped = true;
+                *syncObj = std::make_shared<V1_2::SurfaceSyncObj>();
+                (*syncObj)->syncMemory = syncMem->handle();
+                (*syncObj)->bqId = bqId;
+                (*syncObj)->generationId = generation;
+                (*syncObj)->consumerUsage = consumerUsage;
+                ALOGD("C2SurfaceSyncMemory created %zu(%zu)", sizeof(C2SyncVariables), memSize);
+            }
+        }
+        if (!mapped) {
+            if (memFd >= 0) {
+                ::close(memFd);
+            }
+            ALOGW("SurfaceSyncObj creation failure");
+        }
+    }
+
+    size_t tryNum = 0;
+    size_t success = 0;
+    sp<GraphicBuffer> buffers[BufferQueueDefs::NUM_BUFFER_SLOTS];
+    std::weak_ptr<_C2BlockPoolData>
+            poolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
+    {
+        std::scoped_lock<std::mutex> l(mMutex);
+        if (generation == mGeneration) {
+            // case of old BlockPool destruction
+            C2SyncVariables *var = mSyncMem ? mSyncMem->mem() : nullptr;
+            if (syncObj && var) {
+                *syncObj = std::make_shared<V1_2::SurfaceSyncObj>();
+                (*syncObj)->bqId = bqId;
+                (*syncObj)->syncMemory = mSyncMem->handle();
+                (*syncObj)->generationId = generation;
+                (*syncObj)->consumerUsage = consumerUsage;
+                mMaxDequeueBufferCount = maxDequeueBufferCount;
+                var->lock();
+                var->setSyncStatusLocked(C2SyncVariables::STATUS_INIT);
+                var->setInitialDequeueCountLocked(mMaxDequeueBufferCount, 0);
+                var->unlock();
+            }
+            return false;
+        }
+        std::shared_ptr<C2SurfaceSyncMemory> oldMem = mSyncMem;
+        C2SyncVariables *oldSync = mSyncMem ? mSyncMem->mem() : nullptr;
+        if (oldSync) {
+            oldSync->lock();
+            oldSync->setSyncStatusLocked(C2SyncVariables::STATUS_SWITCHING);
+            oldSync->unlock();
+        }
+        mSyncMem.reset();
+        if (syncMem) {
+            mSyncMem = syncMem;
+        }
+        C2SyncVariables *newSync = mSyncMem ? mSyncMem->mem() : nullptr;
+
+        mIgbp = igbp;
+        mGeneration = generation;
+        mBqId = bqId;
+        mOwner = std::make_shared<int>(0);
+        mMaxDequeueBufferCount = maxDequeueBufferCount;
+        for (int i = 0; i < BufferQueueDefs::NUM_BUFFER_SLOTS; ++i) {
+            if (mBqId == 0 || !mBuffers[i]) {
+                continue;
+            }
+            std::shared_ptr<_C2BlockPoolData> data = mPoolDatas[i].lock();
+            if (!data ||
+                !_C2BlockFactory::BeginAttachBlockToBufferQueue(data)) {
+                continue;
+            }
+            ++tryNum;
+            int bqSlot;
+
+            // Update buffer's generation and usage.
+            if ((mBuffers[i]->getUsage() & consumerUsage) != consumerUsage) {
+                mBuffers[i] = new GraphicBuffer(
+                    mBuffers[i]->handle, GraphicBuffer::CLONE_HANDLE,
+                    mBuffers[i]->width, mBuffers[i]->height,
+                    mBuffers[i]->format, mBuffers[i]->layerCount,
+                    mBuffers[i]->getUsage() | consumerUsage,
+                    mBuffers[i]->stride);
+                if (mBuffers[i]->initCheck() != OK) {
+                    ALOGW("%s() failed to update usage, original usage=%" PRIx64
+                          ", consumer usage=%" PRIx64,
+                          __func__, mBuffers[i]->getUsage(), consumerUsage);
+                    continue;
+                }
+            }
+            mBuffers[i]->setGenerationNumber(generation);
+
+            status_t result = igbp->attachBuffer(&bqSlot, mBuffers[i]);
+            if (result != OK) {
+                continue;
+            }
+            bool attach =
+                    _C2BlockFactory::EndAttachBlockToBufferQueue(
+                            data, mOwner, getHgbp(mIgbp), mSyncMem,
+                            generation, bqId, bqSlot);
+            if (!attach) {
+                igbp->cancelBuffer(bqSlot, Fence::NO_FENCE);
+                continue;
+            }
+            buffers[bqSlot] = mBuffers[i];
+            poolDatas[bqSlot] = data;
+            ++success;
+        }
+        for (int i = 0; i < BufferQueueDefs::NUM_BUFFER_SLOTS; ++i) {
+            mBuffers[i] = buffers[i];
+            mPoolDatas[i] = poolDatas[i];
+        }
+        if (newSync) {
+            newSync->lock();
+            newSync->setInitialDequeueCountLocked(mMaxDequeueBufferCount, success);
+            newSync->unlock();
+        }
+    }
+    ALOGD("remote graphic buffer migration %zu/%zu",
+          success, tryNum);
+    return true;
+}
+
+bool OutputBufferQueue::registerBuffer(const C2ConstGraphicBlock& block) {
+    std::shared_ptr<_C2BlockPoolData> data =
+            _C2BlockFactory::GetGraphicBlockPoolData(block);
+    if (!data) {
+        return false;
+    }
+    std::scoped_lock<std::mutex> l(mMutex);
+
+    if (!mIgbp) {
+        return false;
+    }
+
+    uint32_t oldGeneration;
+    uint64_t oldId;
+    int32_t oldSlot;
+    // If the block is not bufferqueue-based, do nothing.
+    if (!_C2BlockFactory::GetBufferQueueData(
+            data, &oldGeneration, &oldId, &oldSlot) || (oldId == 0)) {
+        return false;
+    }
+    // If the block's bqId is the same as the desired bqId, just hold.
+    if ((oldId == mBqId) && (oldGeneration == mGeneration)) {
+        LOG(VERBOSE) << "holdBufferQueueBlock -- import without attaching:"
+                     << " bqId " << oldId
+                     << ", bqSlot " << oldSlot
+                     << ", generation " << mGeneration
+                     << ".";
+        _C2BlockFactory::HoldBlockFromBufferQueue(data, mOwner, getHgbp(mIgbp), mSyncMem);
+        mPoolDatas[oldSlot] = data;
+        mBuffers[oldSlot] = createGraphicBuffer(block);
+        mBuffers[oldSlot]->setGenerationNumber(mGeneration);
+        return true;
+    }
+    int32_t d = (int32_t) mGeneration - (int32_t) oldGeneration;
+    LOG(WARNING) << "receiving stale buffer: generation "
+                 << mGeneration << " , diff " << d  << " : slot "
+                 << oldSlot;
+    return false;
+}
+
+status_t OutputBufferQueue::outputBuffer(
+        const C2ConstGraphicBlock& block,
+        const BnGraphicBufferProducer::QueueBufferInput& input,
+        BnGraphicBufferProducer::QueueBufferOutput* output) {
+    uint32_t generation;
+    uint64_t bqId;
+    int32_t bqSlot;
+    bool display = V1_0::utils::displayBufferQueueBlock(block);
+    if (!getBufferQueueAssignment(block, &generation, &bqId, &bqSlot) ||
+        bqId == 0) {
+        // Block not from bufferqueue -- it must be attached before queuing.
+
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem;
+        mMutex.lock();
+        sp<IGraphicBufferProducer> outputIgbp = mIgbp;
+        uint32_t outputGeneration = mGeneration;
+        syncMem = mSyncMem;
+        mMutex.unlock();
+
+        status_t status = attachToBufferQueue(
+                block, outputIgbp, outputGeneration, &bqSlot, syncMem);
+
+        if (status != OK) {
+            LOG(WARNING) << "outputBuffer -- attaching failed.";
+            return INVALID_OPERATION;
+        }
+
+        auto syncVar = syncMem ? syncMem->mem() : nullptr;
+        if(syncVar) {
+            syncVar->lock();
+            status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
+                                         input, output);
+            if (status == OK) {
+                syncVar->notifyQueuedLocked();
+            }
+            syncVar->unlock();
+        } else {
+            status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
+                                         input, output);
+        }
+        if (status != OK) {
+            LOG(ERROR) << "outputBuffer -- queueBuffer() failed "
+                       "on non-bufferqueue-based block. "
+                       "Error = " << status << ".";
+            return status;
+        }
+        return OK;
+    }
+
+    std::shared_ptr<C2SurfaceSyncMemory> syncMem;
+    mMutex.lock();
+    sp<IGraphicBufferProducer> outputIgbp = mIgbp;
+    uint32_t outputGeneration = mGeneration;
+    uint64_t outputBqId = mBqId;
+    syncMem = mSyncMem;
+    mMutex.unlock();
+
+    if (!outputIgbp) {
+        LOG(VERBOSE) << "outputBuffer -- output surface is null.";
+        return NO_INIT;
+    }
+
+    if (!display) {
+        LOG(WARNING) << "outputBuffer -- cannot display "
+                     "bufferqueue-based block to the bufferqueue.";
+        return UNKNOWN_ERROR;
+    }
+    if (bqId != outputBqId || generation != outputGeneration) {
+        int32_t diff = (int32_t) outputGeneration - (int32_t) generation;
+        LOG(WARNING) << "outputBuffer -- buffers from old generation to "
+                     << outputGeneration << " , diff: " << diff
+                     << " , slot: " << bqSlot;
+        return DEAD_OBJECT;
+    }
+
+    auto syncVar = syncMem ? syncMem->mem() : nullptr;
+    status_t status = OK;
+    if (syncVar) {
+        syncVar->lock();
+        status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
+                                                  input, output);
+        if (status == OK) {
+            syncVar->notifyQueuedLocked();
+        }
+        syncVar->unlock();
+    } else {
+        status = outputIgbp->queueBuffer(static_cast<int>(bqSlot),
+                                                  input, output);
+    }
+
+    if (status != OK) {
+        LOG(ERROR) << "outputBuffer -- queueBuffer() failed "
+                   "on bufferqueue-based block. "
+                   "Error = " << status << ".";
+        return status;
+    }
+    return OK;
+}
+
+void OutputBufferQueue::holdBufferQueueBlocks(
+        const std::list<std::unique_ptr<C2Work>>& workList) {
+    forEachBlock(workList,
+                 std::bind(&OutputBufferQueue::registerBuffer,
+                           this, std::placeholders::_1));
+}
+
+void OutputBufferQueue::updateMaxDequeueBufferCount(int maxDequeueBufferCount) {
+    mMutex.lock();
+    mMaxDequeueBufferCount = maxDequeueBufferCount;
+    auto syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
+    if (syncVar) {
+        syncVar->lock();
+        syncVar->updateMaxDequeueCountLocked(maxDequeueBufferCount);
+        syncVar->unlock();
+    }
+    mMutex.unlock();
+    ALOGD("set max dequeue count %d from update", maxDequeueBufferCount);
+}
+
+}  // namespace c2
+}  // namespace media
+}  // namespace hardware
+}  // namespace android
+
diff --git a/media/codec2/hidl/plugin/Android.bp b/media/codec2/hidl/plugin/Android.bp
new file mode 100644
index 0000000..873bb02
--- /dev/null
+++ b/media/codec2/hidl/plugin/Android.bp
@@ -0,0 +1,75 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library_headers {
+    name: "libcodec2_hidl_plugin_headers",
+    vendor_available: true,
+    export_include_dirs: [
+        "include",
+    ],
+}
+
+cc_library {
+    name: "libcodec2_hidl_plugin_stub",
+
+    srcs: [
+        "DefaultFilterPlugin.cpp",
+        "FilterWrapperStub.cpp",
+    ],
+
+    header_libs: [
+        "libcodec2_internal", // private
+    ],
+
+    shared_libs: [
+        "libbase",
+        "libcodec2",
+        "libcodec2_vndk",
+        "liblog",
+        "libutils",
+    ],
+
+    export_include_dirs: [
+        "include",
+        "internal",
+    ],
+
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+    min_sdk_version: "29",
+}
+
+cc_library {
+    name: "libcodec2_hidl_plugin",
+    vendor: true,
+
+    srcs: [
+        "DefaultFilterPlugin.cpp",
+        "FilterWrapper.cpp",
+    ],
+
+    header_libs: [
+        "libcodec2_internal", // private
+    ],
+
+    shared_libs: [
+        "libbase",
+        "libcodec2",
+        "libcodec2_vndk",
+        "liblog",
+        "libutils",
+    ],
+
+    export_include_dirs: [
+        "include",
+        "internal",
+    ],
+}
diff --git a/media/codec2/hidl/plugin/DefaultFilterPlugin.cpp b/media/codec2/hidl/plugin/DefaultFilterPlugin.cpp
new file mode 100644
index 0000000..b26e74b
--- /dev/null
+++ b/media/codec2/hidl/plugin/DefaultFilterPlugin.cpp
@@ -0,0 +1,117 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-DefaultFilterPlugin"
+#include <android-base/logging.h>
+
+#include <set>
+
+#include <dlfcn.h>
+
+#include <C2Config.h>
+#include <C2Debug.h>
+#include <C2ParamInternal.h>
+
+#include <codec2/hidl/plugin/FilterPlugin.h>
+
+#include <DefaultFilterPlugin.h>
+#include <FilterWrapper.h>
+
+namespace android {
+
+DefaultFilterPlugin::DefaultFilterPlugin(const char *pluginPath)
+    : mInit(NO_INIT),
+      mHandle(nullptr),
+      mDestroyPlugin(nullptr),
+      mPlugin(nullptr) {
+    mHandle = dlopen(pluginPath, RTLD_NOW | RTLD_NODELETE);
+    if (!mHandle) {
+        LOG(DEBUG) << "FilterPlugin: no plugin detected";
+        return;
+    }
+    GetFilterPluginVersionFunc getVersion =
+        (GetFilterPluginVersionFunc)dlsym(mHandle, "GetFilterPluginVersion");
+    if (!getVersion) {
+        LOG(WARNING) << "FilterPlugin: GetFilterPluginVersion undefined";
+        return;
+    }
+    int32_t version = getVersion();
+    if (version != FilterPlugin_V1::VERSION) {
+        LOG(WARNING) << "FilterPlugin: unrecognized version (" << version << ")";
+        return;
+    }
+    CreateFilterPluginFunc createPlugin =
+        (CreateFilterPluginFunc)dlsym(mHandle, "CreateFilterPlugin");
+    if (!createPlugin) {
+        LOG(WARNING) << "FilterPlugin: CreateFilterPlugin undefined";
+        return;
+    }
+    mDestroyPlugin =
+        (DestroyFilterPluginFunc)dlsym(mHandle, "DestroyFilterPlugin");
+    if (!mDestroyPlugin) {
+        LOG(WARNING) << "FilterPlugin: DestroyFilterPlugin undefined";
+        return;
+    }
+    mPlugin = (FilterPlugin_V1 *)createPlugin();
+    if (!mPlugin) {
+        LOG(WARNING) << "FilterPlugin: CreateFilterPlugin returned nullptr";
+        return;
+    }
+    mStore = mPlugin->getComponentStore();
+    if (!mStore) {
+        LOG(WARNING) << "FilterPlugin: FilterPlugin_V1::getComponentStore returned nullptr";
+        return;
+    }
+    mInit = OK;
+}
+
+DefaultFilterPlugin::~DefaultFilterPlugin() {
+    if (mHandle) {
+        if (mDestroyPlugin && mPlugin) {
+            mDestroyPlugin(mPlugin);
+            mPlugin = nullptr;
+        }
+        dlclose(mHandle);
+        mHandle = nullptr;
+        mDestroyPlugin = nullptr;
+    }
+}
+
+bool DefaultFilterPlugin::describe(C2String name, FilterWrapper::Descriptor *desc) {
+    if (mInit != OK) {
+        return false;
+    }
+    return mPlugin->describe(name, desc);
+}
+
+bool DefaultFilterPlugin::isFilteringEnabled(const std::shared_ptr<C2ComponentInterface> &intf) {
+    if (mInit != OK) {
+        return false;
+    }
+    return mPlugin->isFilteringEnabled(intf);
+}
+
+c2_status_t DefaultFilterPlugin::queryParamsForPreviousComponent(
+        const std::shared_ptr<C2ComponentInterface> &intf,
+        std::vector<std::unique_ptr<C2Param>> *params) {
+    if (mInit != OK) {
+        return C2_NO_INIT;
+    }
+    return mPlugin->queryParamsForPreviousComponent(intf, params);
+}
+
+}  // namespace android
diff --git a/media/codec2/hidl/plugin/FilterWrapper.cpp b/media/codec2/hidl/plugin/FilterWrapper.cpp
new file mode 100644
index 0000000..70c63f2
--- /dev/null
+++ b/media/codec2/hidl/plugin/FilterWrapper.cpp
@@ -0,0 +1,982 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-FilterWrapper"
+#include <android-base/logging.h>
+
+#include <set>
+
+#include <dlfcn.h>
+
+#include <C2Config.h>
+#include <C2Debug.h>
+#include <C2ParamInternal.h>
+
+#include <codec2/hidl/plugin/FilterPlugin.h>
+
+#include <FilterWrapper.h>
+
+namespace android {
+
+namespace {
+
+// Indices that the last filter in the chain should consume.
+static constexpr uint32_t kTypesForLastFilter[] = {
+    // In case we have an output surface, we want to use the block pool
+    // backed by the output surface for the output buffer going to the client.
+    C2PortBlockPoolsTuning::output::PARAM_TYPE,
+};
+
+class WrappedDecoderInterface : public C2ComponentInterface {
+public:
+    WrappedDecoderInterface(
+            std::shared_ptr<C2ComponentInterface> intf,
+            std::vector<FilterWrapper::Component> &&filters,
+            std::weak_ptr<FilterWrapper> filterWrapper)
+        : mIntf(intf), mFilterWrapper(filterWrapper) {
+        takeFilters(std::move(filters));
+    }
+
+    ~WrappedDecoderInterface() override = default;
+
+    void takeFilters(std::vector<FilterWrapper::Component> &&filters) {
+        std::unique_lock lock(mMutex);
+        std::vector<std::unique_ptr<C2Param>> lastFilterParams;
+        if (!mFilters.empty()) {
+            std::vector<C2Param::Index> indices;
+            std::vector<std::shared_ptr<C2ParamDescriptor>> paramDescs;
+            c2_status_t err = mFilters.back().intf->querySupportedParams_nb(&paramDescs);
+            if (err != C2_OK) {
+                LOG(WARNING) << "WrappedDecoderInterface: " << mFilters.back().traits.name
+                        << " returned error for querySupportedParams_nb; err=" << err;
+                paramDescs.clear();
+            }
+            for (const std::shared_ptr<C2ParamDescriptor> &paramDesc : paramDescs) {
+                C2Param::Index index = paramDesc->index();
+                if (std::count(
+                            std::begin(kTypesForLastFilter),
+                            std::end(kTypesForLastFilter),
+                            index.type()) != 0) {
+                    if (index.forStream()) {
+                        // querySupportedParams does not return per-stream params.
+                        // We only support stream-0 for now.
+                        index = index.withStream(0u);
+                    }
+                    indices.push_back(index);
+                }
+            }
+            if (!indices.empty()) {
+                mFilters.back().intf->query_vb({}, indices, C2_MAY_BLOCK, &lastFilterParams);
+            }
+        }
+
+        // TODO: documentation
+        mFilters = std::move(filters);
+        mTypeToIndexForQuery.clear();
+        mTypeToIndexForConfig.clear();
+        for (size_t i = 0; i < mFilters.size(); ++i) {
+            if (i == 0) {
+                transferParams_l(mIntf, mFilters[0].intf, C2_MAY_BLOCK);
+            } else {
+                transferParams_l(mFilters[i - 1].intf, mFilters[i].intf, C2_MAY_BLOCK);
+            }
+            for (C2Param::Type type : mFilters[i].desc.controlParams) {
+                mTypeToIndexForQuery[type.type()] = i;
+                mTypeToIndexForConfig[type.type() & ~C2Param::CoreIndex::IS_REQUEST_FLAG] = i;
+            }
+            for (C2Param::Type type : mFilters[i].desc.affectedParams) {
+                mTypeToIndexForQuery[type.type()] = i;
+            }
+        }
+        for (size_t i = mFilters.size(); i > 0; --i) {
+            if (i == 1) {
+                backPropagateParams_l(mIntf, mFilters[0].intf, C2_MAY_BLOCK);
+            } else {
+                backPropagateParams_l(mFilters[i - 2].intf, mFilters[i - 1].intf, C2_MAY_BLOCK);
+            }
+        }
+        if (!mFilters.empty()) {
+            for (uint32_t type : kTypesForLastFilter) {
+                mTypeToIndexForQuery[type] = mFilters.size() - 1;
+                mTypeToIndexForConfig[type & ~C2Param::CoreIndex::IS_REQUEST_FLAG] =
+                    mFilters.size() - 1;
+            }
+            if (!lastFilterParams.empty()) {
+                std::vector<C2Param *> paramPtrs(lastFilterParams.size());
+                std::transform(
+                        lastFilterParams.begin(),
+                        lastFilterParams.end(),
+                        paramPtrs.begin(),
+                        [](const std::unique_ptr<C2Param> &param) {
+                            return param.get();
+                        });
+                std::vector<std::unique_ptr<C2SettingResult>> failures;
+                mFilters.back().intf->config_vb(paramPtrs, C2_MAY_BLOCK, &failures);
+            }
+        }
+    }
+
+    C2String getName() const override { return mIntf->getName(); }
+
+    c2_node_id_t getId() const override { return mIntf->getId(); }
+
+    c2_status_t query_vb(
+            const std::vector<C2Param *> &stackParams,
+            const std::vector<C2Param::Index> &heapParamIndices,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2Param>>* const heapParams) const override {
+        std::unique_lock lock(mMutex);
+        std::list<C2Param *> stackParamsList(stackParams.size());
+        std::copy_n(stackParams.begin(), stackParams.size(), stackParamsList.begin());
+        heapParams->clear();
+        c2_status_t result = C2_OK;
+        // TODO: loop optimization
+        for (size_t i = 0; i < mFilters.size(); ++i) {
+            // Filter stack params according to mTypeToIndexForQuery
+            std::vector<C2Param *> stackParamsForFilter;
+            for (auto it = stackParamsList.begin(); it != stackParamsList.end(); ) {
+                C2Param *param = *it;
+                uint32_t type = param->type().type();
+                auto it2 = mTypeToIndexForQuery.find(type);
+                if (it2 == mTypeToIndexForQuery.end() || it2->second != i) {
+                    ++it;
+                    continue;
+                }
+                stackParamsForFilter.push_back(param);
+                it = stackParamsList.erase(it);
+            }
+            // Filter heap params according to mTypeToIndexForQuery
+            std::vector<C2Param::Index> heapParamIndicesForFilter;
+            for (size_t j = 0; j < heapParamIndices.size(); ++j) {
+                uint32_t type = heapParamIndices[j].type();
+                auto it = mTypeToIndexForQuery.find(type);
+                if (it == mTypeToIndexForQuery.end() || it->second != i) {
+                    continue;
+                }
+                heapParamIndicesForFilter.push_back(heapParamIndices[j]);
+            }
+            std::vector<std::unique_ptr<C2Param>> heapParamsForFilter;
+            const std::shared_ptr<C2ComponentInterface> &filter = mFilters[i].intf;
+            c2_status_t err = filter->query_vb(
+                    stackParamsForFilter, heapParamIndicesForFilter, mayBlock,
+                    &heapParamsForFilter);
+            if (err != C2_OK && err != C2_BAD_INDEX) {
+                LOG(WARNING) << "WrappedDecoderInterface: " << filter->getName()
+                        << " returned error for query_vb; err=" << err;
+                result = err;
+                continue;
+            }
+            heapParams->insert(
+                    heapParams->end(),
+                    std::make_move_iterator(heapParamsForFilter.begin()),
+                    std::make_move_iterator(heapParamsForFilter.end()));
+        }
+
+        std::vector<C2Param *> stackParamsForIntf;
+        std::copy_n(stackParamsList.begin(), stackParamsList.size(), stackParamsForIntf.begin());
+
+        // Gather heap params that did not get queried from the filter interfaces above.
+        // These need to be queried from the decoder interface.
+        std::vector<C2Param::Index> heapParamIndicesForIntf;
+        for (size_t j = 0; j < heapParamIndices.size(); ++j) {
+            uint32_t type = heapParamIndices[j].type();
+            if (mTypeToIndexForQuery.find(type) != mTypeToIndexForQuery.end()) {
+                continue;
+            }
+            heapParamIndicesForIntf.push_back(heapParamIndices[j]);
+        }
+
+        std::vector<std::unique_ptr<C2Param>> heapParamsForIntf;
+        c2_status_t err = mIntf->query_vb(
+                stackParamsForIntf, heapParamIndicesForIntf, mayBlock, &heapParamsForIntf);
+        if (err != C2_OK) {
+            LOG(err == C2_BAD_INDEX ? VERBOSE : WARNING)
+                    << "WrappedDecoderInterface: " << mIntf->getName()
+                    << " returned error for query_vb; err=" << err;
+            result = err;
+        }
+
+        // TODO: params needs to preserve the order
+        heapParams->insert(
+                heapParams->end(),
+                std::make_move_iterator(heapParamsForIntf.begin()),
+                std::make_move_iterator(heapParamsForIntf.end()));
+
+        return result;
+    }
+
+    c2_status_t config_vb(
+            const std::vector<C2Param *> &params,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures) override {
+        std::unique_lock lock(mMutex);
+        c2_status_t result = C2_OK;
+        std::vector<C2Param *> paramsForIntf;
+        for (C2Param* param : params) {
+            auto it = mTypeToIndexForConfig.find(param->type().type());
+            if (it != mTypeToIndexForConfig.end()) {
+                continue;
+            }
+            paramsForIntf.push_back(param);
+        }
+        c2_status_t err = mIntf->config_vb(paramsForIntf, mayBlock, failures);
+        if (err != C2_OK) {
+            LOG(err == C2_BAD_INDEX ? VERBOSE : WARNING)
+                    << "WrappedDecoderInterface: " << mIntf->getName()
+                    << " returned error for config_vb; err=" << err;
+            result = err;
+        }
+        for (size_t i = 0; i < mFilters.size(); ++i) {
+            if (i == 0) {
+                transferParams_l(mIntf, mFilters[0].intf, mayBlock);
+            } else {
+                transferParams_l(mFilters[i - 1].intf, mFilters[i].intf, mayBlock);
+            }
+            const std::shared_ptr<C2ComponentInterface> &filter = mFilters[i].intf;
+            std::vector<std::unique_ptr<C2SettingResult>> filterFailures;
+            std::vector<C2Param *> paramsForFilter;
+            for (C2Param* param : params) {
+                auto it = mTypeToIndexForConfig.find(param->type().type());
+                if (it != mTypeToIndexForConfig.end() && it->second != i) {
+                    continue;
+                }
+                paramsForFilter.push_back(param);
+            }
+            c2_status_t err = filter->config_vb(paramsForFilter, mayBlock, &filterFailures);
+            if (err != C2_OK) {
+                LOG(err == C2_BAD_INDEX ? VERBOSE : WARNING)
+                        << "WrappedDecoderInterface: " << filter->getName()
+                        << " returned error for config_vb; err=" << err;
+                result = err;
+            }
+        }
+        for (size_t i = mFilters.size(); i > 0; --i) {
+            if (i == 1) {
+                backPropagateParams_l(mIntf, mFilters[0].intf, mayBlock);
+            } else {
+                backPropagateParams_l(mFilters[i - 2].intf, mFilters[i - 1].intf, mayBlock);
+            }
+        }
+
+        return result;
+    }
+
+    c2_status_t createTunnel_sm(c2_node_id_t) override { return C2_OMITTED; }
+    c2_status_t releaseTunnel_sm(c2_node_id_t) override { return C2_OMITTED; }
+
+    c2_status_t querySupportedParams_nb(
+            std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) const override {
+        std::unique_lock lock(mMutex);
+        c2_status_t result = mIntf->querySupportedParams_nb(params);
+        if (result != C2_OK) {
+            LOG(WARNING) << "WrappedDecoderInterface: " << mIntf->getName()
+                    << " returned error for querySupportedParams_nb; err=" << result;
+            return result;
+        }
+        // TODO: optimization idea --- pre-compute at takeFilter().
+        for (const FilterWrapper::Component &filter : mFilters) {
+            std::vector<std::shared_ptr<C2ParamDescriptor>> filterParams;
+            c2_status_t err = filter.intf->querySupportedParams_nb(&filterParams);
+            if (err != C2_OK) {
+                LOG(WARNING) << "WrappedDecoderInterface: " << filter.intf->getName()
+                        << " returned error for querySupportedParams_nb; err=" << result;
+                result = err;
+                continue;
+            }
+            for (const std::shared_ptr<C2ParamDescriptor> &paramDesc : filterParams) {
+                if (std::count(
+                        filter.desc.controlParams.begin(),
+                        filter.desc.controlParams.end(),
+                        paramDesc->index().type()) == 0) {
+                    continue;
+                }
+                params->push_back(paramDesc);
+            }
+        }
+        return result;
+    }
+
+    c2_status_t querySupportedValues_vb(
+            std::vector<C2FieldSupportedValuesQuery> &fields,
+            c2_blocking_t mayBlock) const override {
+        std::unique_lock lock(mMutex);
+        c2_status_t result = mIntf->querySupportedValues_vb(fields, mayBlock);
+        if (result != C2_OK && result != C2_BAD_INDEX) {
+            LOG(WARNING) << "WrappedDecoderInterface: " << mIntf->getName()
+                    << " returned error for querySupportedParams_nb; err=" << result;
+            return result;
+        }
+        for (const FilterWrapper::Component &filter : mFilters) {
+            std::vector<C2FieldSupportedValuesQuery> filterFields;
+            std::vector<size_t> indices;
+            for (size_t i = 0; i < fields.size(); ++i) {
+                const C2FieldSupportedValuesQuery &field = fields[i];
+                uint32_t type = C2Param::Index(_C2ParamInspector::GetIndex(field.field())).type();
+                if (std::count(
+                        filter.desc.controlParams.begin(),
+                        filter.desc.controlParams.end(),
+                        type) == 0) {
+                    continue;
+                }
+                filterFields.push_back(field);
+                indices.push_back(i);
+            }
+            c2_status_t err = filter.intf->querySupportedValues_vb(filterFields, mayBlock);
+            if (err != C2_OK && err != C2_BAD_INDEX) {
+                LOG(WARNING) << "WrappedDecoderInterface: " << filter.intf->getName()
+                        << " returned error for querySupportedParams_nb; err=" << result;
+                result = err;
+                continue;
+            }
+            for (size_t i = 0; i < filterFields.size(); ++i) {
+                fields[indices[i]] = filterFields[i];
+            }
+        }
+        return result;
+    }
+
+private:
+    mutable std::mutex mMutex;
+    std::shared_ptr<C2ComponentInterface> mIntf;
+    std::vector<FilterWrapper::Component> mFilters;
+    std::weak_ptr<FilterWrapper> mFilterWrapper;
+    std::map<uint32_t, size_t> mTypeToIndexForQuery;
+    std::map<uint32_t, size_t> mTypeToIndexForConfig;
+
+    c2_status_t transferParams_l(
+            const std::shared_ptr<C2ComponentInterface> &curr,
+            const std::shared_ptr<C2ComponentInterface> &next,
+            c2_blocking_t mayBlock) {
+        // NOTE: this implementation is preliminary --- it could change once
+        // we define what parameters needs to be propagated in component chaining.
+        std::vector<std::shared_ptr<C2ParamDescriptor>> paramDescs;
+        c2_status_t err = next->querySupportedParams_nb(&paramDescs);
+        if (err != C2_OK) {
+            LOG(DEBUG) << "WrappedDecoderInterface: " << next->getName()
+                    << " returned error for querySupportedParams_nb; err=" << err;
+            return err;
+        }
+        // Find supported input params from the next interface and flip direction
+        // so they become output params.
+        std::vector<C2Param::Index> indices;
+        for (const std::shared_ptr<C2ParamDescriptor> &paramDesc : paramDescs) {
+            C2Param::Index index = paramDesc->index();
+            if (!index.forInput() || paramDesc->isReadOnly()) {
+                continue;
+            }
+            if (index.forStream()) {
+                uint32_t stream = index.stream();
+                index = index.withPort(true /* output */).withStream(stream);
+            } else {
+                index = index.withPort(true /* output */);
+            }
+            indices.push_back(index);
+        }
+        // Query those output params from the current interface
+        std::vector<std::unique_ptr<C2Param>> heapParams;
+        err = curr->query_vb({}, indices, mayBlock, &heapParams);
+        if (err != C2_OK && err != C2_BAD_INDEX) {
+            LOG(DEBUG) << "WrappedDecoderInterface: " << curr->getName()
+                    << " returned error for query_vb; err=" << err;
+            return err;
+        }
+        // Flip the direction of the queried params, so they become input parameters.
+        // Configure the next interface with the params.
+        std::vector<C2Param *> configParams;
+        for (size_t i = 0; i < heapParams.size(); ++i) {
+            if (!heapParams[i]) {
+                continue;
+            }
+            if (heapParams[i]->forStream()) {
+                heapParams[i] = C2Param::CopyAsStream(
+                        *heapParams[i], false /* output */, heapParams[i]->stream());
+            } else {
+                heapParams[i] = C2Param::CopyAsPort(*heapParams[i], false /* output */);
+            }
+            configParams.push_back(heapParams[i].get());
+        }
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        err = next->config_vb(configParams, mayBlock, &failures);
+        if (err != C2_OK && err != C2_BAD_INDEX) {
+            LOG(DEBUG) << "WrappedDecoderInterface: " << next->getName()
+                    << " returned error for config_vb; err=" << err;
+            return err;
+        }
+        return C2_OK;
+    }
+
+    c2_status_t backPropagateParams_l(
+            const std::shared_ptr<C2ComponentInterface> &curr,
+            const std::shared_ptr<C2ComponentInterface> &next,
+            c2_blocking_t mayBlock) {
+        // NOTE: this implementation is preliminary --- it could change once
+        // we define what parameters needs to be propagated in component chaining.
+        std::shared_ptr<FilterWrapper> filterWrapper = mFilterWrapper.lock();
+        if (!filterWrapper) {
+            LOG(DEBUG) << "WrappedDecoderInterface: FilterWrapper not found";
+            return C2_OK;
+        }
+        std::vector<std::unique_ptr<C2Param>> params;
+        c2_status_t err = filterWrapper->queryParamsForPreviousComponent(next, &params);
+        if (err != C2_OK) {
+            LOG(DEBUG) << "WrappedDecoderInterface: FilterWrapper returned error for "
+                << "queryParamsForPreviousComponent; intf=" << next->getName() << " err=" << err;
+            return C2_OK;
+        }
+        std::vector<C2Param *> configParams;
+        for (size_t i = 0; i < params.size(); ++i) {
+            if (!params[i]) {
+                continue;
+            }
+            configParams.push_back(params[i].get());
+        }
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        curr->config_vb(configParams, mayBlock, &failures);
+        if (err != C2_OK && err != C2_BAD_INDEX) {
+            LOG(DEBUG) << "WrappedDecoderInterface: " << next->getName()
+                    << " returned error for config_vb; err=" << err;
+            return err;
+        }
+        return C2_OK;
+    }
+};
+
+class WrappedDecoder : public C2Component, public std::enable_shared_from_this<WrappedDecoder> {
+public:
+    WrappedDecoder(
+            std::shared_ptr<C2Component> comp,
+            std::vector<FilterWrapper::Component> &&filters,
+            std::weak_ptr<FilterWrapper> filterWrapper)
+        : mComp(comp), mFilters(std::move(filters)), mFilterWrapper(filterWrapper) {
+        std::vector<FilterWrapper::Component> filtersDup(mFilters);
+        mIntf = std::make_shared<WrappedDecoderInterface>(
+                comp->intf(), std::move(filtersDup), filterWrapper);
+    }
+
+    ~WrappedDecoder() override = default;
+
+    std::shared_ptr<C2ComponentInterface> intf() override { return mIntf; }
+
+    c2_status_t setListener_vb(
+            const std::shared_ptr<Listener> &listener, c2_blocking_t mayBlock) override {
+        if (listener) {
+            setListenerInternal(mFilters, listener, mayBlock);
+        } else {
+            mComp->setListener_vb(nullptr, mayBlock);
+            for (FilterWrapper::Component &filter : mFilters) {
+                filter.comp->setListener_vb(nullptr, mayBlock);
+            }
+        }
+        mListener = listener;
+        return C2_OK;
+    }
+
+    c2_status_t queue_nb(std::list<std::unique_ptr<C2Work>>* const items) override {
+        return mComp->queue_nb(items);
+    }
+
+    c2_status_t announce_nb(const std::vector<C2WorkOutline> &) override {
+        return C2_OMITTED;
+    }
+
+    c2_status_t flush_sm(
+            flush_mode_t mode, std::list<std::unique_ptr<C2Work>>* const flushedWork) override {
+        c2_status_t result = mComp->flush_sm(mode, flushedWork);
+        std::list<std::unique_ptr<C2Work>> filterFlushedWork;
+        for (FilterWrapper::Component filter : mRunningFilters) {
+            c2_status_t err = filter.comp->flush_sm(mode, &filterFlushedWork);
+            if (err != C2_OK) {
+                result = err;
+            }
+            flushedWork->splice(flushedWork->end(), filterFlushedWork);
+        }
+        return result;
+    }
+
+    c2_status_t drain_nb(drain_mode_t mode) override {
+        // TODO: simplify using comp->drain_nb(mode)
+        switch (mode) {
+        case DRAIN_COMPONENT_WITH_EOS: {
+            std::unique_ptr<C2Work> eosWork{new C2Work};
+            eosWork->input.flags = C2FrameData::FLAG_END_OF_STREAM;
+            eosWork->worklets.push_back(std::make_unique<C2Worklet>());
+            std::list<std::unique_ptr<C2Work>> items;
+            items.push_back(std::move(eosWork));
+            mComp->queue_nb(&items);
+            return C2_OK;
+        }
+        case DRAIN_COMPONENT_NO_EOS:
+        case DRAIN_CHAIN:
+        default:
+            return C2_BAD_VALUE;
+        }
+    }
+
+    c2_status_t start() override {
+        std::vector<FilterWrapper::Component> filters;
+        if (std::shared_ptr<FilterWrapper> filterWrapper = mFilterWrapper.lock()) {
+            // Let's check if we have filters that we can skip
+            for (FilterWrapper::Component &filter : mFilters) {
+                if (!filterWrapper->isFilteringEnabled(filter.intf)) {
+                    LOG(VERBOSE) << "filtering disabled for " << filter.traits.name;
+                    continue;
+                }
+                LOG(VERBOSE) << "filtering enabled for " << filter.traits.name;
+                filters.push_back(filter);
+            }
+            if (filters.size() < mFilters.size()) {
+                LOG(VERBOSE) << (mFilters.size() - filters.size()) << " filter(s) skipped";
+                setListenerInternal(filters, mListener, C2_MAY_BLOCK);
+                std::vector filtersCopy(filters);
+                mIntf->takeFilters(std::move(filtersCopy));
+            }
+        }
+
+        c2_status_t err = mComp->start();
+        if (err != C2_OK) {
+            return err;
+        }
+        for (FilterWrapper::Component &filter : filters) {
+            c2_status_t err = filter.comp->start();
+            if (err != C2_OK) {
+                // Previous components are already started successfully;
+                // we ended up in an incoherent state.
+                return C2_CORRUPTED;
+            }
+        }
+        mRunningFilters = std::move(filters);
+        return C2_OK;
+    }
+
+    c2_status_t stop() override {
+        c2_status_t err = mComp->stop();
+        if (err != C2_OK) {
+            return err;
+        }
+        for (FilterWrapper::Component filter : mRunningFilters) {
+            c2_status_t err = filter.comp->stop();
+            if (err != C2_OK) {
+                // Previous components are already stopped successfully;
+                // we ended up in an incoherent state.
+                return C2_CORRUPTED;
+            }
+        }
+        mRunningFilters.clear();
+        return C2_OK;
+    }
+
+    c2_status_t reset() override {
+        c2_status_t result = mComp->reset();
+        if (result != C2_OK) {
+            result = C2_CORRUPTED;
+        }
+        for (FilterWrapper::Component filter : mFilters) {
+            c2_status_t err = filter.comp->reset();
+            if (err != C2_OK) {
+                // Previous components are already reset successfully;
+                // we ended up in an incoherent state.
+                result = C2_CORRUPTED;
+                // continue for the rest of the chain
+            }
+        }
+        mRunningFilters.clear();
+        return result;
+    }
+
+    c2_status_t release() override {
+        c2_status_t result = mComp->release();
+        if (result != C2_OK) {
+            result = C2_CORRUPTED;
+        }
+        for (FilterWrapper::Component filter : mFilters) {
+            c2_status_t err = filter.comp->release();
+            if (err != C2_OK) {
+                // Previous components are already released successfully;
+                // we ended up in an incoherent state.
+                result = C2_CORRUPTED;
+                // continue for the rest of the chain
+            }
+        }
+        mRunningFilters.clear();
+        return result;
+    }
+
+private:
+    class PassingListener : public Listener {
+    public:
+        PassingListener(
+                std::shared_ptr<C2Component> wrappedComponent,
+                const std::shared_ptr<Listener> &wrappedComponentListener,
+                std::shared_ptr<C2Component> nextComponent)
+            : mWrappedComponent(wrappedComponent),
+              mWrappedComponentListener(wrappedComponentListener),
+              mNextComponent(nextComponent) {
+        }
+
+        void onWorkDone_nb(
+                std::weak_ptr<C2Component>,
+                std::list<std::unique_ptr<C2Work>> workItems) override {
+            std::shared_ptr<C2Component> nextComponent = mNextComponent.lock();
+            std::list<std::unique_ptr<C2Work>> failedWorkItems;
+            if (!nextComponent) {
+                for (std::unique_ptr<C2Work> &work : workItems) {
+                    // Next component unexpectedly released while the work is
+                    // in-flight. Report C2_CORRUPTED to the client.
+                    work->result = C2_CORRUPTED;
+                    failedWorkItems.push_back(std::move(work));
+                }
+                workItems.clear();
+            } else {
+                for (auto it = workItems.begin(); it != workItems.end(); ) {
+                    const std::unique_ptr<C2Work> &work = *it;
+                    if (work->result != C2_OK
+                            || work->worklets.size() != 1) {
+                        failedWorkItems.push_back(std::move(*it));
+                        it = workItems.erase(it);
+                        continue;
+                    }
+                    C2FrameData &output = work->worklets.front()->output;
+                    c2_cntr64_t customOrdinal = work->input.ordinal.customOrdinal;
+                    work->input = std::move(output);
+                    work->input.ordinal.customOrdinal = customOrdinal;
+                    output.flags = C2FrameData::flags_t(0);
+                    output.buffers.clear();
+                    output.configUpdate.clear();
+                    output.infoBuffers.clear();
+                    ++it;
+                }
+            }
+            if (!failedWorkItems.empty()) {
+                for (const std::unique_ptr<C2Work> &work : failedWorkItems) {
+                    LOG(VERBOSE) << "work #" << work->input.ordinal.frameIndex.peek()
+                            << " failed: err=" << work->result
+                            << " worklets.size()=" << work->worklets.size();
+                }
+                if (std::shared_ptr<Listener> wrappedComponentListener =
+                        mWrappedComponentListener.lock()) {
+                    wrappedComponentListener->onWorkDone_nb(
+                            mWrappedComponent, std::move(failedWorkItems));
+                }
+            }
+            if (!workItems.empty()) {
+                nextComponent->queue_nb(&workItems);
+            }
+        }
+
+        void onTripped_nb(
+                std::weak_ptr<C2Component>,
+                std::vector<std::shared_ptr<C2SettingResult>>) override {
+            // Trip not supported
+        }
+
+        void onError_nb(std::weak_ptr<C2Component>, uint32_t errorCode) {
+            if (std::shared_ptr<Listener> wrappedComponentListener =
+                    mWrappedComponentListener.lock()) {
+                wrappedComponentListener->onError_nb(mWrappedComponent, errorCode);
+            }
+        }
+
+    private:
+        std::weak_ptr<C2Component> mWrappedComponent;
+        std::weak_ptr<Listener> mWrappedComponentListener;
+        std::weak_ptr<C2Component> mNextComponent;
+    };
+
+    class LastListener : public Listener {
+    public:
+        LastListener(
+                std::shared_ptr<C2Component> wrappedComponent,
+                const std::shared_ptr<Listener> &wrappedComponentListener)
+            : mWrappedComponent(wrappedComponent),
+              mWrappedComponentListener(wrappedComponentListener) {
+        }
+
+        void onWorkDone_nb(
+                std::weak_ptr<C2Component>,
+                std::list<std::unique_ptr<C2Work>> workItems) override {
+            if (mWrappedComponent.expired()) {
+                return;
+            }
+            if (std::shared_ptr<Listener> wrappedComponentListener =
+                    mWrappedComponentListener.lock()) {
+                wrappedComponentListener->onWorkDone_nb(
+                        mWrappedComponent, std::move(workItems));
+            }
+        }
+
+        void onTripped_nb(
+                std::weak_ptr<C2Component>,
+                std::vector<std::shared_ptr<C2SettingResult>>) override {
+            // Trip not supported
+        }
+
+        void onError_nb(std::weak_ptr<C2Component>, uint32_t errorCode) {
+            if (mWrappedComponent.expired()) {
+                return;
+            }
+            if (std::shared_ptr<Listener> wrappedComponentListener =
+                    mWrappedComponentListener.lock()) {
+                wrappedComponentListener->onError_nb(mWrappedComponent, errorCode);
+            }
+        }
+
+    private:
+        std::weak_ptr<C2Component> mWrappedComponent;
+        std::weak_ptr<Listener> mWrappedComponentListener;
+    };
+
+    std::shared_ptr<C2Component> mComp;
+    std::shared_ptr<WrappedDecoderInterface> mIntf;
+    std::vector<FilterWrapper::Component> mFilters;
+    std::vector<FilterWrapper::Component> mRunningFilters;
+    std::weak_ptr<FilterWrapper> mFilterWrapper;
+    std::shared_ptr<Listener> mListener;
+#if defined(LOG_NDEBUG) && !LOG_NDEBUG
+    base::ScopedLogSeverity mScopedLogSeverity{base::VERBOSE};
+#endif
+
+    c2_status_t setListenerInternal(
+            const std::vector<FilterWrapper::Component> &filters,
+            const std::shared_ptr<Listener> &listener,
+            c2_blocking_t mayBlock) {
+        if (filters.empty()) {
+            return mComp->setListener_vb(listener, mayBlock);
+        }
+        std::shared_ptr passingListener = std::make_shared<PassingListener>(
+                shared_from_this(),
+                listener,
+                filters.front().comp);
+        mComp->setListener_vb(passingListener, mayBlock);
+        for (size_t i = 0; i < filters.size() - 1; ++i) {
+            filters[i].comp->setListener_vb(
+                    std::make_shared<PassingListener>(
+                            shared_from_this(),
+                            listener,
+                            filters[i + 1].comp),
+                    mayBlock);
+        }
+        filters.back().comp->setListener_vb(
+                std::make_shared<LastListener>(shared_from_this(), listener), mayBlock);
+        return C2_OK;
+    }
+};
+
+}  // anonymous namespace
+
+FilterWrapper::FilterWrapper(std::unique_ptr<Plugin> &&plugin)
+    : mInit(NO_INIT),
+      mPlugin(std::move(plugin)) {
+    if (mPlugin->status() != OK) {
+        LOG(ERROR) << "plugin not OK: " << mPlugin->status();
+        mPlugin.reset();
+        return;
+    }
+    mStore = mPlugin->getStore();
+    if (!mStore) {
+        LOG(ERROR) << "no store";
+        mPlugin.reset();
+        return;
+    }
+    std::vector<std::shared_ptr<const C2Component::Traits>> traits =
+        mStore->listComponents();
+    std::sort(
+            traits.begin(),
+            traits.end(),
+            [](std::shared_ptr<const C2Component::Traits> &a,
+                    std::shared_ptr<const C2Component::Traits> &b) {
+                return a->rank < b->rank;
+            });
+    for (size_t i = 0; i < traits.size(); ++i) {
+        const std::shared_ptr<const C2Component::Traits> &trait = traits[i];
+        if (trait->domain == C2Component::DOMAIN_OTHER
+                || trait->domain == C2Component::DOMAIN_AUDIO
+                || trait->kind != C2Component::KIND_OTHER) {
+            LOG(DEBUG) << trait->name << " is ignored because of domain/kind: "
+                << trait->domain << "/" << trait->kind;
+            continue;
+        }
+        Descriptor desc;
+        if (!mPlugin->describe(trait->name, &desc)) {
+            LOG(DEBUG) << trait->name << " is ignored because describe() failed";
+            continue;
+        }
+        mComponents.push_back({nullptr, nullptr, *trait, desc});
+    }
+    if (mComponents.empty()) {
+        LOG(DEBUG) << "FilterWrapper: no filter component found";
+        mPlugin.reset();
+        return;
+    }
+    mInit = OK;
+}
+
+FilterWrapper::~FilterWrapper() {
+}
+
+std::vector<FilterWrapper::Component> FilterWrapper::createFilters() {
+    std::vector<FilterWrapper::Component> filters;
+    for (const FilterWrapper::Component &filter : mComponents) {
+        std::shared_ptr<C2Component> comp;
+        std::shared_ptr<C2ComponentInterface> intf;
+        if (C2_OK != mStore->createComponent(filter.traits.name, &comp)) {
+            return {};
+        }
+        filters.push_back({comp, comp->intf(), filter.traits, filter.desc});
+    }
+    return filters;
+}
+
+C2Component::Traits FilterWrapper::getTraits(
+        const std::shared_ptr<C2ComponentInterface> &intf) {
+    {
+        std::unique_lock lock(mCacheMutex);
+        if (mCachedTraits.count(intf->getName())) {
+            return mCachedTraits.at(intf->getName());
+        }
+    }
+    C2ComponentDomainSetting domain;
+    C2ComponentKindSetting kind;
+    c2_status_t err = intf->query_vb({&domain, &kind}, {}, C2_MAY_BLOCK, nullptr);
+    C2Component::Traits traits = {
+        "query failed",  // name
+        C2Component::DOMAIN_OTHER,
+        C2Component::KIND_OTHER,
+        0,   // rank, unused
+        "",  // media type, unused
+        "",  // owner, unused
+        {},  // aliases, unused
+    };
+    if (err == C2_OK) {
+        traits = {
+            intf->getName(),
+            domain.value,
+            kind.value,
+            0,   // rank, unused
+            "",  // media type, unused
+            "",  // owner, unused
+            {},  // aliases, unused
+        };
+        std::unique_lock lock(mCacheMutex);
+        mCachedTraits[traits.name] = traits;
+    }
+    return traits;
+}
+
+std::shared_ptr<C2ComponentInterface> FilterWrapper::maybeWrapInterface(
+        const std::shared_ptr<C2ComponentInterface> intf) {
+    if (mInit != OK) {
+        LOG(VERBOSE) << "maybeWrapInterface: Wrapper not initialized: "
+                << intf->getName() << " is not wrapped.";
+        return intf;
+    }
+    C2Component::Traits traits = getTraits(intf);
+    if (traits.name != intf->getName()) {
+        LOG(INFO) << "maybeWrapInterface: Querying traits from " << intf->getName()
+                << " failed; not wrapping the interface";
+        return intf;
+    }
+    if ((traits.domain != C2Component::DOMAIN_VIDEO && traits.domain != C2Component::DOMAIN_IMAGE)
+            || traits.kind != C2Component::KIND_DECODER) {
+        LOG(VERBOSE) << "maybeWrapInterface: " << traits.name
+                << " is not video/image decoder; not wrapping the interface";
+        return intf;
+    }
+    return std::make_shared<WrappedDecoderInterface>(
+            intf, createFilters(), weak_from_this());
+}
+
+std::shared_ptr<C2Component> FilterWrapper::maybeWrapComponent(
+        const std::shared_ptr<C2Component> comp) {
+    if (mInit != OK) {
+        LOG(VERBOSE) << "maybeWrapComponent: Wrapper not initialized: "
+                << comp->intf()->getName() << " is not wrapped.";
+        return comp;
+    }
+    C2Component::Traits traits = getTraits(comp->intf());
+    if (traits.name != comp->intf()->getName()) {
+        LOG(INFO) << "maybeWrapComponent: Querying traits from " << comp->intf()->getName()
+                << " failed; not wrapping the component";
+        return comp;
+    }
+    if ((traits.domain != C2Component::DOMAIN_VIDEO && traits.domain != C2Component::DOMAIN_IMAGE)
+            || traits.kind != C2Component::KIND_DECODER) {
+        LOG(VERBOSE) << "maybeWrapComponent: " << traits.name
+                << " is not video/image decoder; not wrapping the component";
+        return comp;
+    }
+    std::vector<Component> filters = createFilters();
+    std::shared_ptr wrapped = std::make_shared<WrappedDecoder>(
+            comp, std::vector(filters), weak_from_this());
+    {
+        std::unique_lock lock(mWrappedComponentsMutex);
+        std::vector<std::weak_ptr<const C2Component>> &components =
+            mWrappedComponents.emplace_back();
+        components.push_back(wrapped);
+        components.push_back(comp);
+        for (const Component &filter : filters) {
+            components.push_back(filter.comp);
+        }
+    }
+    return wrapped;
+}
+
+bool FilterWrapper::isFilteringEnabled(const std::shared_ptr<C2ComponentInterface> &intf) {
+    if (mInit != OK) {
+        LOG(WARNING) << "isFilteringEnabled: Wrapper not initialized: ";
+        return false;
+    }
+    return mPlugin->isFilteringEnabled(intf);
+}
+
+c2_status_t FilterWrapper::createBlockPool(
+        C2PlatformAllocatorStore::id_t allocatorId,
+        std::shared_ptr<const C2Component> component,
+        std::shared_ptr<C2BlockPool> *pool) {
+    std::unique_lock lock(mWrappedComponentsMutex);
+    for (auto it = mWrappedComponents.begin(); it != mWrappedComponents.end(); ) {
+        std::shared_ptr<const C2Component> comp = it->front().lock();
+        if (!comp) {
+            it = mWrappedComponents.erase(it);
+            continue;
+        }
+        if (component == comp) {
+            std::vector<std::shared_ptr<const C2Component>> components(it->size());
+            std::transform(
+                    it->begin(), it->end(), components.begin(),
+                    [](const std::weak_ptr<const C2Component> &el) {
+                        return el.lock();
+                    });
+            if (C2_OK == CreateCodec2BlockPool(allocatorId, components, pool)) {
+                return C2_OK;
+            }
+        }
+        ++it;
+    }
+    return CreateCodec2BlockPool(allocatorId, component, pool);
+}
+
+c2_status_t FilterWrapper::queryParamsForPreviousComponent(
+        const std::shared_ptr<C2ComponentInterface> &intf,
+        std::vector<std::unique_ptr<C2Param>> *params) {
+    if (mInit != OK) {
+        LOG(WARNING) << "queryParamsForPreviousComponent: Wrapper not initialized: ";
+        return C2_NO_INIT;
+    }
+    return mPlugin->queryParamsForPreviousComponent(intf, params);
+}
+
+}  // namespace android
diff --git a/media/codec2/hidl/plugin/FilterWrapperStub.cpp b/media/codec2/hidl/plugin/FilterWrapperStub.cpp
new file mode 100644
index 0000000..01ca596
--- /dev/null
+++ b/media/codec2/hidl/plugin/FilterWrapperStub.cpp
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-FilterWrapperStub"
+
+#include <FilterWrapper.h>
+
+namespace android {
+
+FilterWrapper::FilterWrapper(std::unique_ptr<Plugin> &&) {
+}
+
+FilterWrapper::~FilterWrapper() {
+}
+
+std::shared_ptr<C2ComponentInterface> FilterWrapper::maybeWrapInterface(
+        const std::shared_ptr<C2ComponentInterface> intf) {
+    return intf;
+}
+
+std::shared_ptr<C2Component> FilterWrapper::maybeWrapComponent(
+        const std::shared_ptr<C2Component> comp) {
+    return comp;
+}
+
+bool FilterWrapper::isFilteringEnabled(const std::shared_ptr<C2ComponentInterface> &) {
+    return false;
+}
+
+c2_status_t FilterWrapper::createBlockPool(
+        C2PlatformAllocatorStore::id_t allocatorId,
+        std::shared_ptr<const C2Component> component,
+        std::shared_ptr<C2BlockPool> *pool) {
+    return CreateCodec2BlockPool(allocatorId, component, pool);
+}
+
+}  // namespace android
diff --git a/media/codec2/hidl/plugin/include/codec2/hidl/plugin/FilterPlugin.h b/media/codec2/hidl/plugin/include/codec2/hidl/plugin/FilterPlugin.h
new file mode 100644
index 0000000..a1ac624
--- /dev/null
+++ b/media/codec2/hidl/plugin/include/codec2/hidl/plugin/FilterPlugin.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2018, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_PLUGIN_FILTER_PLUGIN_H
+
+#define CODEC2_HIDL_PLUGIN_FILTER_PLUGIN_H
+
+#include <memory>
+
+#include <C2Component.h>
+
+namespace android {
+
+class FilterPlugin_V1 {
+public:
+    static constexpr int32_t VERSION = 1;
+
+    virtual ~FilterPlugin_V1() = default;
+
+    /**
+     * Returns a C2ComponentStore object with which clients can create
+     * filter components / interfaces.
+     */
+    virtual std::shared_ptr<C2ComponentStore> getComponentStore() = 0;
+    struct Descriptor {
+        // Parameters that client sets for filter control.
+        std::initializer_list<C2Param::Type> controlParams;
+        // Parameters that the component changes after filtering.
+        std::initializer_list<C2Param::Type> affectedParams;
+    };
+
+    /**
+     * Describe a filter component.
+     *
+     * @param name[in]  filter's name
+     * @param desc[out] pointer to filter descriptor to be populated
+     * @return  true if |name| is in the store and |desc| is populated;
+     *          false if |name| is not recognized
+     */
+    virtual bool describe(C2String name, Descriptor *desc) = 0;
+
+    /**
+     * Returns true if a component will apply filtering after all given the
+     * current configuration; false if it will be no-op.
+     */
+    virtual bool isFilteringEnabled(const std::shared_ptr<C2ComponentInterface> &intf) = 0;
+
+    /**
+     * Query parameters to |intf|, which the component wants applied to
+     * the previous component in the chain. For example, an image/video filter
+     * may require specific usage or pixel format from the previous component.
+     */
+    virtual c2_status_t queryParamsForPreviousComponent(
+            const std::shared_ptr<C2ComponentInterface> &intf,
+            std::vector<std::unique_ptr<C2Param>> *params) = 0;
+};
+
+}  // namespace android
+
+extern "C" {
+
+typedef int32_t (*GetFilterPluginVersionFunc)();
+int32_t GetFilterPluginVersion();
+
+typedef void* (*CreateFilterPluginFunc)();
+void *CreateFilterPlugin();
+
+typedef void (*DestroyFilterPluginFunc)(void *);
+void DestroyFilterPlugin(void *plugin);
+
+}  // extern "C"
+
+#endif  // CODEC2_HIDL_PLUGIN_FILTER_PLUGIN_H
diff --git a/media/codec2/hidl/plugin/internal/DefaultFilterPlugin.h b/media/codec2/hidl/plugin/internal/DefaultFilterPlugin.h
new file mode 100644
index 0000000..0aab39f
--- /dev/null
+++ b/media/codec2/hidl/plugin/internal/DefaultFilterPlugin.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_PLUGIN_DEFAULT_FILTER_PLUGIN_H
+
+#define CODEC2_HIDL_PLUGIN_DEFAULT_FILTER_PLUGIN_H
+
+#include <codec2/hidl/plugin/FilterPlugin.h>
+
+#include <FilterWrapper.h>
+
+namespace android {
+
+class DefaultFilterPlugin : public FilterWrapper::Plugin {
+public:
+    explicit DefaultFilterPlugin(const char *pluginPath);
+
+    ~DefaultFilterPlugin();
+
+    status_t status() const override { return mInit; }
+
+    std::shared_ptr<C2ComponentStore> getStore() override { return mStore; }
+    bool describe(C2String name, FilterWrapper::Descriptor *desc) override;
+    bool isFilteringEnabled(const std::shared_ptr<C2ComponentInterface> &intf) override;
+    c2_status_t queryParamsForPreviousComponent(
+            const std::shared_ptr<C2ComponentInterface> &intf,
+            std::vector<std::unique_ptr<C2Param>> *params) override;
+
+private:
+    status_t mInit;
+    void *mHandle;
+    DestroyFilterPluginFunc mDestroyPlugin;
+    FilterPlugin_V1 *mPlugin;
+    std::shared_ptr<C2ComponentStore> mStore;
+};
+
+}  // namespace android
+
+#endif  // CODEC2_HIDL_PLUGIN_DEFAULT_FILTER_PLUGIN_H
diff --git a/media/codec2/hidl/plugin/internal/FilterWrapper.h b/media/codec2/hidl/plugin/internal/FilterWrapper.h
new file mode 100644
index 0000000..cf2cc30
--- /dev/null
+++ b/media/codec2/hidl/plugin/internal/FilterWrapper.h
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_HIDL_PLUGIN_FILTER_WRAPPER_H
+
+#define CODEC2_HIDL_PLUGIN_FILTER_WRAPPER_H
+
+#include <map>
+#include <memory>
+#include <mutex>
+
+#include <C2Component.h>
+#include <C2PlatformSupport.h>
+
+#include <codec2/hidl/plugin/FilterPlugin.h>
+#include <utils/Errors.h>
+
+namespace android {
+
+// TODO: documentation
+class FilterWrapper : public std::enable_shared_from_this<FilterWrapper> {
+public:
+    using Descriptor = FilterPlugin_V1::Descriptor;
+
+    class Plugin {
+    public:
+        Plugin() = default;
+        virtual ~Plugin() = default;
+        virtual status_t status() const = 0;
+        virtual std::shared_ptr<C2ComponentStore> getStore() = 0;
+        virtual bool describe(C2String name, Descriptor *desc) = 0;
+        virtual bool isFilteringEnabled(const std::shared_ptr<C2ComponentInterface> &intf) = 0;
+        virtual c2_status_t queryParamsForPreviousComponent(
+                const std::shared_ptr<C2ComponentInterface> &intf,
+                std::vector<std::unique_ptr<C2Param>> *params) = 0;
+        C2_DO_NOT_COPY(Plugin);
+    };
+
+    struct Component {
+        const std::shared_ptr<C2Component> comp;
+        const std::shared_ptr<C2ComponentInterface> intf;
+        const C2Component::Traits traits;
+        const Descriptor desc;
+    };
+
+private:
+    explicit FilterWrapper(std::unique_ptr<Plugin> &&plugin);
+public:
+    static std::shared_ptr<FilterWrapper> Create(std::unique_ptr<Plugin> &&plugin) {
+        return std::shared_ptr<FilterWrapper>(new FilterWrapper(std::move(plugin)));
+    }
+    ~FilterWrapper();
+
+    /**
+     * Returns wrapped interface, or |intf| if wrapping is not possible / needed.
+     */
+    std::shared_ptr<C2ComponentInterface> maybeWrapInterface(
+            const std::shared_ptr<C2ComponentInterface> intf);
+
+    /**
+     * Returns wrapped component, or |comp| if wrapping is not possible / needed.
+     */
+    std::shared_ptr<C2Component> maybeWrapComponent(
+            const std::shared_ptr<C2Component> comp);
+
+    /**
+     * Returns ture iff the filtering will apply to the buffer in current configuration.
+     */
+    bool isFilteringEnabled(const std::shared_ptr<C2ComponentInterface> &intf);
+
+    /**
+     * Create a C2BlockPool object with |allocatorId| for |component|.
+     */
+    c2_status_t createBlockPool(
+            C2PlatformAllocatorStore::id_t allocatorId,
+            std::shared_ptr<const C2Component> component,
+            std::shared_ptr<C2BlockPool> *pool);
+
+    /**
+     * Query parameters that |intf| wants from the previous component.
+     */
+    c2_status_t queryParamsForPreviousComponent(
+            const std::shared_ptr<C2ComponentInterface> &intf,
+            std::vector<std::unique_ptr<C2Param>> *params);
+
+private:
+    status_t mInit;
+    std::unique_ptr<Plugin> mPlugin;
+    std::shared_ptr<C2ComponentStore> mStore;
+    std::list<FilterWrapper::Component> mComponents;
+
+    std::mutex mCacheMutex;
+    std::map<std::string, C2Component::Traits> mCachedTraits;
+
+    std::mutex mWrappedComponentsMutex;
+    std::list<std::vector<std::weak_ptr<const C2Component>>> mWrappedComponents;
+
+    std::vector<FilterWrapper::Component> createFilters();
+    C2Component::Traits getTraits(const std::shared_ptr<C2ComponentInterface> &intf);
+
+    C2_DO_NOT_COPY(FilterWrapper);
+};
+
+}  // namespace android
+
+#endif  // CODEC2_HIDL_PLUGIN_FILTER_WRAPPER_H
diff --git a/media/codec2/hidl/plugin/samples/Android.bp b/media/codec2/hidl/plugin/samples/Android.bp
new file mode 100644
index 0000000..32b760d
--- /dev/null
+++ b/media/codec2/hidl/plugin/samples/Android.bp
@@ -0,0 +1,58 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+    name: "sample-codec2-hidl-plugin-defaults",
+
+    srcs: [
+        "SampleFilterPlugin.cpp",
+    ],
+
+    defaults: [
+        "libcodec2-impl-defaults",
+    ],
+
+    header_libs: [
+        "libcodec2_hidl_plugin_headers",
+        "libgui_headers",
+    ],
+
+    shared_libs: [
+        "libEGL",
+        "libGLESv1_CM",
+        "libGLESv2",
+        "libGLESv3",
+        "libbase",
+        "libcodec2",
+        "libcutils",
+        "libprocessgroup",
+        "libsfplugin_ccodec_utils",
+        "libsync",
+        "libui",
+        "libutils",
+    ],
+
+    static_libs: [
+        "librenderfright",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+}
+
+cc_library {
+    name: "sample-codec2-hidl-plugin",
+    vendor: true,
+
+    defaults: [
+        "sample-codec2-hidl-plugin-defaults",
+    ],
+}
diff --git a/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp b/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
new file mode 100644
index 0000000..b942be7
--- /dev/null
+++ b/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
@@ -0,0 +1,997 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "SampleFilterPlugin"
+#include <android-base/logging.h>
+
+#include <chrono>
+#include <thread>
+
+#include <codec2/hidl/plugin/FilterPlugin.h>
+
+#include <C2AllocatorGralloc.h>
+#include <C2Config.h>
+#include <C2PlatformSupport.h>
+#include <Codec2Mapper.h>
+#include <util/C2InterfaceHelper.h>
+
+#include <renderengine/RenderEngine.h>
+#include <system/window.h>
+#include <ui/GraphicBuffer.h>
+#include <utils/RefBase.h>
+
+typedef C2StreamParam<C2Info, C2ColorAspectsStruct,
+                kParamIndexColorAspects | C2Param::CoreIndex::IS_REQUEST_FLAG>
+        C2StreamColorAspectsRequestInfo;
+
+namespace android {
+
+using namespace std::literals::chrono_literals;
+
+class SampleToneMappingFilter
+    : public C2Component, public std::enable_shared_from_this<SampleToneMappingFilter> {
+public:
+    class Interface : public C2ComponentInterface {
+    public:
+        static const std::string NAME;
+        static const FilterPlugin_V1::Descriptor DESCRIPTOR;
+
+        explicit Interface(c2_node_id_t id)
+            : mId(id),
+              mReflector(std::make_shared<C2ReflectorHelper>()),
+              mHelper(mReflector) {
+        }
+        ~Interface() override = default;
+        C2String getName() const override { return NAME; }
+        c2_node_id_t getId() const override { return mId; }
+
+        c2_status_t query_vb(
+                const std::vector<C2Param*> &stackParams,
+                const std::vector<C2Param::Index> &heapParamIndices,
+                c2_blocking_t mayBlock,
+                std::vector<std::unique_ptr<C2Param>>* const heapParams) const override {
+            return mHelper.query(stackParams, heapParamIndices, mayBlock, heapParams);
+        }
+        c2_status_t config_vb(
+                const std::vector<C2Param*> &params,
+                c2_blocking_t mayBlock,
+                std::vector<std::unique_ptr<C2SettingResult>>* const failures) override {
+            return mHelper.config(params, mayBlock, failures);
+        }
+        c2_status_t querySupportedParams_nb(
+                std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) const override {
+            return mHelper.querySupportedParams(params);
+        }
+        c2_status_t querySupportedValues_vb(
+                std::vector<C2FieldSupportedValuesQuery> &fields,
+                c2_blocking_t mayBlock) const override {
+            return mHelper.querySupportedValues(fields, mayBlock);
+        }
+        c2_status_t createTunnel_sm(c2_node_id_t) override { return C2_OMITTED; }
+        c2_status_t releaseTunnel_sm(c2_node_id_t) override { return C2_OMITTED; }
+
+        uint32_t getDataSpace() {
+            Helper::Lock lock = mHelper.lock();
+            uint32_t dataspace = HAL_DATASPACE_UNKNOWN;
+            C2Mapper::map(
+                    mHelper.mInputColorAspectInfo->range,
+                    mHelper.mInputColorAspectInfo->primaries,
+                    mHelper.mInputColorAspectInfo->matrix,
+                    mHelper.mInputColorAspectInfo->transfer,
+                    &dataspace);
+            return dataspace;
+        }
+        std::shared_ptr<C2StreamHdrStaticInfo::input> getHdrStaticMetadata() {
+            Helper::Lock lock = mHelper.lock();
+            return mHelper.mInputHdrStaticInfo;
+        }
+        C2BlockPool::local_id_t getPoolId() {
+            Helper::Lock lock = mHelper.lock();
+            return mHelper.mOutputPoolIds->m.values[0];
+        }
+
+        static bool IsFilteringEnabled(const std::shared_ptr<C2ComponentInterface> &intf) {
+            C2StreamColorAspectsRequestInfo::output info(0u);
+            std::vector<std::unique_ptr<C2Param>> heapParams;
+            c2_status_t err = intf->query_vb({&info}, {}, C2_MAY_BLOCK, &heapParams);
+            if (err != C2_OK && err != C2_BAD_INDEX) {
+                LOG(WARNING) << "SampleToneMappingFilter::Interface::IsFilteringEnabled: "
+                        << "query failed for " << intf->getName();
+                return false;
+            }
+            return info && info.transfer == C2Color::TRANSFER_170M;
+        }
+
+        static c2_status_t QueryParamsForPreviousComponent(
+                [[maybe_unused]] const std::shared_ptr<C2ComponentInterface> &intf,
+                std::vector<std::unique_ptr<C2Param>> *params) {
+            params->emplace_back(new C2StreamUsageTuning::output(
+                    0u, C2AndroidMemoryUsage::HW_TEXTURE_READ));
+            params->emplace_back(new C2StreamPixelFormatInfo::output(
+                    0u, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED));
+            return C2_OK;
+        }
+    private:
+        const c2_node_id_t mId;
+        std::shared_ptr<C2ReflectorHelper> mReflector;
+        struct Helper : public C2InterfaceHelper {
+            explicit Helper(std::shared_ptr<C2ReflectorHelper> reflector)
+                : C2InterfaceHelper(reflector) {
+                setDerivedInstance(this);
+
+                addParameter(
+                        DefineParam(mApiFeatures, C2_PARAMKEY_API_FEATURES)
+                        .withConstValue(new C2ApiFeaturesSetting(C2Config::api_feature_t(
+                                API_REFLECTION |
+                                API_VALUES |
+                                API_CURRENT_VALUES |
+                                API_DEPENDENCY |
+                                API_SAME_INPUT_BUFFER)))
+                        .build());
+
+                mName = C2ComponentNameSetting::AllocShared(NAME.size() + 1);
+                strncpy(mName->m.value, NAME.c_str(), NAME.size() + 1);
+                addParameter(
+                        DefineParam(mName, C2_PARAMKEY_COMPONENT_NAME)
+                        .withConstValue(mName)
+                        .build());
+
+                addParameter(
+                        DefineParam(mKind, C2_PARAMKEY_COMPONENT_KIND)
+                        .withConstValue(new C2ComponentKindSetting(C2Component::KIND_OTHER))
+                        .build());
+
+                addParameter(
+                        DefineParam(mDomain, C2_PARAMKEY_COMPONENT_DOMAIN)
+                        .withConstValue(new C2ComponentDomainSetting(C2Component::DOMAIN_VIDEO))
+                        .build());
+
+                addParameter(
+                        DefineParam(mInputStreamCount, C2_PARAMKEY_INPUT_STREAM_COUNT)
+                        .withConstValue(new C2PortStreamCountTuning::input(1))
+                        .build());
+
+                addParameter(
+                        DefineParam(mOutputStreamCount, C2_PARAMKEY_OUTPUT_STREAM_COUNT)
+                        .withConstValue(new C2PortStreamCountTuning::output(1))
+                        .build());
+
+                addParameter(
+                        DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
+                        .withConstValue(new C2StreamBufferTypeSetting::input(
+                                0u, C2BufferData::GRAPHIC))
+                        .build());
+
+                static const std::string kRawMediaType = "video/raw";
+                mInputMediaType = C2PortMediaTypeSetting::input::AllocShared(
+                        kRawMediaType.size() + 1);
+                strncpy(mInputMediaType->m.value, kRawMediaType.c_str(), kRawMediaType.size() + 1);
+                addParameter(
+                        DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+                        .withConstValue(mInputMediaType)
+                        .build());
+
+                addParameter(
+                        DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
+                        .withConstValue(new C2StreamBufferTypeSetting::output(
+                                0u, C2BufferData::GRAPHIC))
+                        .build());
+
+                mOutputMediaType = C2PortMediaTypeSetting::output::AllocShared(
+                        kRawMediaType.size() + 1);
+                strncpy(mOutputMediaType->m.value, kRawMediaType.c_str(), kRawMediaType.size() + 1);
+                addParameter(
+                        DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+                        .withConstValue(mOutputMediaType)
+                        .build());
+
+                addParameter(
+                        DefineParam(mActualInputDelay, C2_PARAMKEY_INPUT_DELAY)
+                        .withConstValue(new C2PortActualDelayTuning::input(0u))
+                        .build());
+
+                addParameter(
+                        DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
+                        .withConstValue(new C2PortActualDelayTuning::output(0u))
+                        .build());
+
+                addParameter(
+                        DefineParam(mActualPipelineDelay, C2_PARAMKEY_PIPELINE_DELAY)
+                        .withConstValue(new C2ActualPipelineDelayTuning(0u))
+                        .build());
+
+                C2BlockPool::local_id_t outputPoolIds[1] = { C2BlockPool::BASIC_GRAPHIC };
+                addParameter(
+                        DefineParam(mOutputPoolIds, C2_PARAMKEY_OUTPUT_BLOCK_POOLS)
+                        .withDefault(C2PortBlockPoolsTuning::output::AllocShared(outputPoolIds))
+                        .withFields({ C2F(mOutputPoolIds, m.values[0]).any(),
+                                      C2F(mOutputPoolIds, m.values).inRange(0, 1) })
+                        .withSetter(OutputBlockPoolSetter)
+                        .build());
+
+                addParameter(
+                        DefineParam(mInputHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
+                        .withDefault(new C2StreamHdrStaticInfo::input(0u))
+                        .withFields({
+                            C2F(mInputHdrStaticInfo, mastering.red.x).any(),
+                        })
+                        .withSetter(HdrStaticInfoSetter)
+                        .build());
+
+                addParameter(
+                        DefineParam(mOutputHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
+                        .withConstValue(new C2StreamHdrStaticInfo::output(0u))
+                        .build());
+
+                addParameter(
+                        DefineParam(mInputColorAspectInfo, C2_PARAMKEY_COLOR_ASPECTS)
+                        .withDefault(new C2StreamColorAspectsInfo::input(0u))
+                        .withFields({
+                            C2F(mInputColorAspectInfo, range).any(),
+                            C2F(mInputColorAspectInfo, primaries).any(),
+                            C2F(mInputColorAspectInfo, transfer).any(),
+                            C2F(mInputColorAspectInfo, matrix).any(),
+                        })
+                        .withSetter(InputColorAspectsSetter)
+                        .build());
+
+                addParameter(
+                        DefineParam(
+                            mColorAspectRequestInfo,
+                            (std::string(C2_PARAMKEY_COLOR_ASPECTS) + ".request").c_str())
+                        .withDefault(new C2StreamColorAspectsRequestInfo::output(0u))
+                        .withFields({
+                            C2F(mColorAspectRequestInfo, range).any(),
+                            C2F(mColorAspectRequestInfo, primaries).any(),
+                            C2F(mColorAspectRequestInfo, transfer).oneOf({
+                                C2Color::TRANSFER_UNSPECIFIED,
+                                C2Color::TRANSFER_170M,
+                            }),
+                            C2F(mColorAspectRequestInfo, matrix).any(),
+                        })
+                        .withSetter(ColorAspectsRequestSetter)
+                        .build());
+
+                addParameter(
+                        DefineParam(mOutputColorAspectInfo, C2_PARAMKEY_COLOR_ASPECTS)
+                        .withDefault(new C2StreamColorAspectsInfo::output(0u))
+                        .withFields({
+                            C2F(mOutputColorAspectInfo, range).any(),
+                            C2F(mOutputColorAspectInfo, primaries).any(),
+                            C2F(mOutputColorAspectInfo, transfer).any(),
+                            C2F(mOutputColorAspectInfo, matrix).any(),
+                        })
+                        .withSetter(OutputColorAspectsSetter,
+                                    mInputColorAspectInfo,
+                                    mColorAspectRequestInfo)
+                        .build());
+            }
+
+            static C2R OutputBlockPoolSetter(
+                    bool mayBlock,
+                    C2P<C2PortBlockPoolsTuning::output> &me) {
+                (void)mayBlock, (void)me;
+                return C2R::Ok();
+            }
+
+            static C2R HdrStaticInfoSetter(
+                    bool mayBlock,
+                    C2P<C2StreamHdrStaticInfo::input> &me) {
+                (void)mayBlock, (void)me;
+                return C2R::Ok();
+            }
+
+            static C2R InputColorAspectsSetter(
+                    bool mayBlock,
+                    C2P<C2StreamColorAspectsInfo::input> &me) {
+                (void)mayBlock, (void)me;
+                return C2R::Ok();
+            }
+
+            static C2R OutputColorAspectsSetter(
+                    bool mayBlock,
+                    C2P<C2StreamColorAspectsInfo::output> &me,
+                    const C2P<C2StreamColorAspectsInfo::input> &inputColor,
+                    const C2P<C2StreamColorAspectsRequestInfo::output> &request) {
+                (void)mayBlock;
+                me.set().range = inputColor.v.range;
+                me.set().primaries = inputColor.v.primaries;
+                me.set().transfer = inputColor.v.transfer;
+                if (request.v.transfer == C2Color::TRANSFER_170M) {
+                    me.set().transfer = C2Color::TRANSFER_170M;
+                }
+                me.set().matrix = inputColor.v.matrix;
+                return C2R::Ok();
+            }
+
+            static C2R ColorAspectsRequestSetter(
+                    bool mayBlock,
+                    C2P<C2StreamColorAspectsRequestInfo::output> &me) {
+                (void)mayBlock;
+                if (me.v.range != C2Color::RANGE_UNSPECIFIED) {
+                    me.set().range = C2Color::RANGE_UNSPECIFIED;
+                }
+                if (me.v.primaries != C2Color::PRIMARIES_UNSPECIFIED) {
+                    me.set().primaries = C2Color::PRIMARIES_UNSPECIFIED;
+                }
+                if (me.v.transfer != C2Color::TRANSFER_170M) {
+                    me.set().transfer = C2Color::TRANSFER_UNSPECIFIED;
+                }
+                if (me.v.matrix != C2Color::MATRIX_UNSPECIFIED) {
+                    me.set().matrix = C2Color::MATRIX_UNSPECIFIED;
+                }
+                return C2R::Ok();
+            }
+
+            std::shared_ptr<C2ApiFeaturesSetting> mApiFeatures;
+
+            std::shared_ptr<C2ComponentNameSetting> mName;
+            std::shared_ptr<C2ComponentAliasesSetting> mAliases;
+            std::shared_ptr<C2ComponentKindSetting> mKind;
+            std::shared_ptr<C2ComponentDomainSetting> mDomain;
+
+            std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+            std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
+            std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+            std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+
+            std::shared_ptr<C2PortActualDelayTuning::input> mActualInputDelay;
+            std::shared_ptr<C2PortActualDelayTuning::output> mActualOutputDelay;
+            std::shared_ptr<C2ActualPipelineDelayTuning> mActualPipelineDelay;
+
+            std::shared_ptr<C2PortStreamCountTuning::input> mInputStreamCount;
+            std::shared_ptr<C2PortStreamCountTuning::output> mOutputStreamCount;
+
+            std::shared_ptr<C2PortBlockPoolsTuning::output> mOutputPoolIds;
+
+            std::shared_ptr<C2StreamHdrStaticInfo::input> mInputHdrStaticInfo;
+            std::shared_ptr<C2StreamHdrStaticInfo::output> mOutputHdrStaticInfo;
+            std::shared_ptr<C2StreamColorAspectsInfo::input> mInputColorAspectInfo;
+            std::shared_ptr<C2StreamColorAspectsInfo::output> mOutputColorAspectInfo;
+            std::shared_ptr<C2StreamColorAspectsRequestInfo::output> mColorAspectRequestInfo;
+        } mHelper;
+    };
+
+    explicit SampleToneMappingFilter(c2_node_id_t id)
+        : mIntf(std::make_shared<Interface>(id)) {
+    }
+    ~SampleToneMappingFilter() override {
+        if (mProcessingThread.joinable()) {
+            mProcessingThread.join();
+        }
+    }
+
+    c2_status_t setListener_vb(
+            const std::shared_ptr<Listener> &listener, c2_blocking_t mayBlock) override {
+        std::chrono::steady_clock::time_point deadline = std::chrono::steady_clock::now() + 5ms;
+        {
+            std::unique_lock lock(mStateMutex);
+            if (mState == RELEASED) {
+                return C2_BAD_STATE;
+            }
+            if (mState == RUNNING && listener) {
+                return C2_BAD_STATE;
+            }
+            if (mState != STOPPED) {
+                return C2_BAD_STATE;
+            }
+        }
+        std::unique_lock lock(mListenerMutex, std::try_to_lock);
+        if (lock) {
+            mListener = listener;
+            return C2_OK;
+        }
+        if (mayBlock == C2_DONT_BLOCK) {
+            return C2_BLOCKING;
+        }
+        lock.try_lock_until(deadline);
+        if (!lock) {
+            return C2_TIMED_OUT;
+        }
+        mListener = listener;
+        return C2_OK;
+    }
+
+    c2_status_t queue_nb(std::list<std::unique_ptr<C2Work>>* const items) override {
+        if (!items) {
+            return C2_BAD_VALUE;
+        }
+        {
+            std::unique_lock lock(mStateMutex);
+            if (mState != RUNNING) {
+                return C2_BAD_STATE;
+            }
+        }
+        std::unique_lock lock(mQueueMutex);
+        mQueue.splice(mQueue.end(), *items);
+        return C2_OK;
+    }
+
+    c2_status_t announce_nb(const std::vector<C2WorkOutline> &) override { return C2_OMITTED; }
+
+    c2_status_t flush_sm(
+            flush_mode_t mode,
+            std::list<std::unique_ptr<C2Work>>* const flushedWork) override {
+        if (!flushedWork) {
+            return C2_BAD_VALUE;
+        }
+        if (mode == FLUSH_CHAIN) {
+            return C2_BAD_VALUE;
+        }
+        {
+            std::unique_lock lock(mStateMutex);
+            if (mState != RUNNING) {
+                return C2_BAD_STATE;
+            }
+        }
+        {
+            std::unique_lock lock(mQueueMutex);
+            mQueue.swap(*flushedWork);
+        }
+        // NOTE: this component does not have internal state to flush.
+        return C2_OK;
+    }
+
+    c2_status_t drain_nb(drain_mode_t mode) override {
+        if (mode == DRAIN_CHAIN) {
+            return C2_BAD_VALUE;
+        }
+        {
+            std::unique_lock lock(mStateMutex);
+            if (mState != RUNNING) {
+                return C2_BAD_STATE;
+            }
+        }
+        // NOTE: this component does not wait for work items before processing.
+        return C2_OK;
+    }
+
+    c2_status_t start() override {
+        //std::chrono::steady_clock::time_point deadline = std::chrono::steady_clock::now() + 500ms;
+        {
+            std::unique_lock lock(mStateMutex);
+            if (mState == STARTING) {
+                return C2_DUPLICATE;
+            }
+            if (mState != STOPPED) {
+                return C2_BAD_STATE;
+            }
+            mState = STARTING;
+        }
+        {
+            std::unique_lock lock(mProcessingMutex);
+            if (!mProcessingThread.joinable()) {
+                mProcessingThread = std::thread([this]() {
+                    processLoop(shared_from_this());
+                });
+            }
+        }
+        {
+            std::unique_lock lock(mStateMutex);
+            mState = RUNNING;
+        }
+        return C2_OK;
+    }
+
+    c2_status_t stop() override {
+        //std::chrono::steady_clock::time_point deadline = std::chrono::steady_clock::now() + 500ms;
+        {
+            std::unique_lock lock(mStateMutex);
+            if (mState == STOPPING) {
+                return C2_DUPLICATE;
+            }
+            if (mState != RUNNING) {
+                return C2_BAD_STATE;
+            }
+            mState = STOPPING;
+        }
+        {
+            std::unique_lock lock(mQueueMutex);
+            mQueueCondition.notify_all();
+        }
+        {
+            std::unique_lock lock(mProcessingMutex);
+            if (mProcessingThread.joinable()) {
+                mProcessingThread.join();
+            }
+        }
+        {
+            std::unique_lock lock(mStateMutex);
+            mState = STOPPED;
+        }
+        return C2_OK;
+    }
+
+    c2_status_t reset() override {
+        //std::chrono::steady_clock::time_point deadline = std::chrono::steady_clock::now() + 500ms;
+        {
+            std::unique_lock lock(mStateMutex);
+            if (mState == RESETTING) {
+                return C2_DUPLICATE;
+            }
+            if (mState == RELEASED) {
+                return C2_BAD_STATE;
+            }
+            mState = RESETTING;
+        }
+        {
+            std::unique_lock lock(mQueueMutex);
+            mQueueCondition.notify_all();
+        }
+        {
+            std::unique_lock lock(mProcessingMutex);
+            if (mProcessingThread.joinable()) {
+                mProcessingThread.join();
+            }
+        }
+        {
+            std::unique_lock lock(mStateMutex);
+            mState = STOPPED;
+        }
+        return C2_OK;
+    }
+
+    c2_status_t release() override {
+        //std::chrono::steady_clock::time_point deadline = std::chrono::steady_clock::now() + 500ms;
+        {
+            std::unique_lock lock(mStateMutex);
+            if (mState == RELEASED || mState == RELEASING) {
+                return C2_DUPLICATE;
+            }
+            // TODO: return C2_BAD_STATE if not stopped
+            mState = RELEASING;
+        }
+        {
+            std::unique_lock lock(mQueueMutex);
+            mQueueCondition.notify_all();
+        }
+        {
+            std::unique_lock lock(mProcessingMutex);
+            if (mProcessingThread.joinable()) {
+                mProcessingThread.join();
+            }
+        }
+        {
+            std::unique_lock lock(mStateMutex);
+            mState = RELEASED;
+        }
+        return C2_OK;
+    }
+
+    std::shared_ptr<C2ComponentInterface> intf() override {
+        return mIntf;
+    }
+
+private:
+    void processLoop(std::shared_ptr<SampleToneMappingFilter> thiz) {
+        constexpr float kDefaultMaxLumiance = 500.0;
+        constexpr float kDefaultMaxMasteringLuminance = 1000.0;
+        constexpr float kDefaultMaxContentLuminance = 1000.0;
+        constexpr uint32_t kDstUsage =
+                GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
+                GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE;
+
+        int32_t workCount = 0;
+        std::unique_ptr<renderengine::RenderEngine> renderEngine = renderengine::RenderEngine::create(
+                renderengine::RenderEngineCreationArgs::Builder()
+                    .setPixelFormat(static_cast<int>(ui::PixelFormat::RGBA_8888))
+                    .setImageCacheSize(2 /*maxFrameBufferAcquiredBuffers*/)
+                    .setUseColorManagerment(true)
+                    .setEnableProtectedContext(false)
+                    .setPrecacheToneMapperShaderOnly(true)
+                    .setContextPriority(renderengine::RenderEngine::ContextPriority::LOW)
+                    .build());
+        if (!renderEngine) {
+            std::unique_lock lock(mListenerMutex);
+            mListener->onError_nb(thiz, C2_CORRUPTED);
+            return;
+        }
+        uint32_t textureName = 0;
+        renderEngine->genTextures(1, &textureName);
+
+        while (true) {
+            // Before doing anything, verify the state
+            {
+                std::unique_lock lock(mStateMutex);
+                if (mState != RUNNING) {
+                    break;
+                }
+            }
+            // Extract one work item
+            std::unique_ptr<C2Work> work;
+            {
+                std::unique_lock lock(mQueueMutex);
+                if (mQueue.empty()) {
+                    mQueueCondition.wait_for(lock, 1s);
+                }
+                if (mQueue.empty()) {
+                    continue;
+                }
+                mQueue.front().swap(work);
+                mQueue.pop_front();
+                ++workCount;
+            }
+            LOG(VERBOSE) << "work #" << workCount << ": flags=" << work->input.flags
+                    << " timestamp=" << work->input.ordinal.timestamp.peek();;
+
+            std::vector<C2Param *> configUpdate;
+            for (const std::unique_ptr<C2Param> &param : work->input.configUpdate) {
+                configUpdate.push_back(param.get());
+            }
+            std::vector<std::unique_ptr<C2SettingResult>> failures;
+            mIntf->config_vb(configUpdate, C2_MAY_BLOCK, &failures);
+
+            std::shared_ptr<C2StreamHdrStaticInfo::input> hdrStaticInfo =
+                mIntf->getHdrStaticMetadata();
+            uint32_t dataspace = mIntf->getDataSpace();
+
+            std::shared_ptr<C2Buffer> buffer;
+            if (!work->input.buffers.empty()) {
+                buffer = work->input.buffers.front();
+            }
+            std::shared_ptr<C2Buffer> outC2Buffer;
+            status_t err = OK;
+            if (buffer) {
+                if (buffer->hasInfo(C2StreamHdrStaticInfo::output::PARAM_TYPE)) {
+                    std::shared_ptr<const C2Info> info =
+                        buffer->getInfo(C2StreamHdrStaticInfo::output::PARAM_TYPE);
+                    std::unique_ptr<C2Param> flipped = C2Param::CopyAsStream(
+                            *info, false /* output */, info->stream());
+                    hdrStaticInfo.reset(static_cast<C2StreamHdrStaticInfo::input *>(
+                            flipped.release()));
+                }
+                const C2Handle *c2Handle =
+                    buffer->data().graphicBlocks().front().handle();
+                uint32_t width, height, format, stride, igbp_slot, generation;
+                uint64_t usage, igbp_id;
+                _UnwrapNativeCodec2GrallocMetadata(
+                        c2Handle, &width, &height, &format, &usage, &stride, &generation,
+                        &igbp_id, &igbp_slot);
+                native_handle_t *grallocHandle = UnwrapNativeCodec2GrallocHandle(c2Handle);
+                sp<GraphicBuffer> srcBuffer = new GraphicBuffer(
+                        grallocHandle, GraphicBuffer::CLONE_HANDLE,
+                        width, height, format, 1, usage, stride);
+
+                std::shared_ptr<C2GraphicBlock> dstBlock;
+                C2BlockPool::local_id_t poolId = mIntf->getPoolId();
+                std::shared_ptr<C2BlockPool> pool;
+                GetCodec2BlockPool(poolId, thiz, &pool);
+                pool->fetchGraphicBlock(
+                        width, height, HAL_PIXEL_FORMAT_RGBA_8888, C2AndroidMemoryUsage::FromGrallocUsage(kDstUsage),
+                        &dstBlock);
+                outC2Buffer = C2Buffer::CreateGraphicBuffer(
+                        dstBlock->share(C2Rect(width, height), C2Fence()));
+                c2Handle = dstBlock->handle();
+                _UnwrapNativeCodec2GrallocMetadata(
+                        c2Handle, &width, &height, &format, &usage, &stride, &generation,
+                        &igbp_id, &igbp_slot);
+                grallocHandle = UnwrapNativeCodec2GrallocHandle(c2Handle);
+                sp<GraphicBuffer> dstBuffer = new GraphicBuffer(
+                        grallocHandle, GraphicBuffer::CLONE_HANDLE,
+                        width, height, format, 1, usage, stride);
+
+                Rect sourceCrop(0, 0, width, height);
+
+                renderengine::DisplaySettings clientCompositionDisplay;
+                std::vector<const renderengine::LayerSettings*> clientCompositionLayers;
+
+                clientCompositionDisplay.physicalDisplay = sourceCrop;
+                clientCompositionDisplay.clip = sourceCrop;
+
+                clientCompositionDisplay.outputDataspace = ui::Dataspace::V0_SRGB;
+                clientCompositionDisplay.maxLuminance = kDefaultMaxLumiance;
+                clientCompositionDisplay.clearRegion = Region::INVALID_REGION;
+                renderengine::LayerSettings layerSettings;
+                layerSettings.geometry.boundaries = sourceCrop.toFloatRect();
+                layerSettings.alpha = 1.0f;
+
+                layerSettings.sourceDataspace = static_cast<ui::Dataspace>(dataspace);
+
+                // from BufferLayer
+                layerSettings.source.buffer.buffer = srcBuffer;
+                layerSettings.source.buffer.isOpaque = true;
+                // TODO: fence
+                layerSettings.source.buffer.fence = Fence::NO_FENCE;
+                layerSettings.source.buffer.textureName = textureName;
+                layerSettings.source.buffer.usePremultipliedAlpha = false;
+                layerSettings.source.buffer.isY410BT2020 =
+                    (layerSettings.sourceDataspace == ui::Dataspace::BT2020_ITU_PQ ||
+                     layerSettings.sourceDataspace == ui::Dataspace::BT2020_ITU_HLG) &&
+                    format == HAL_PIXEL_FORMAT_RGBA_1010102;
+                layerSettings.source.buffer.maxMasteringLuminance =
+                    (hdrStaticInfo && *hdrStaticInfo &&
+                     hdrStaticInfo->mastering.maxLuminance > 0 &&
+                     hdrStaticInfo->mastering.minLuminance > 0)
+                        ? hdrStaticInfo->mastering.maxLuminance : kDefaultMaxMasteringLuminance;
+                layerSettings.source.buffer.maxContentLuminance =
+                    (hdrStaticInfo && *hdrStaticInfo && hdrStaticInfo->maxCll > 0)
+                        ? hdrStaticInfo->maxCll : kDefaultMaxContentLuminance;
+
+                // Set filtering to false since the capture itself doesn't involve
+                // any scaling, metadata retriever JNI is scaling the bitmap if
+                // display size is different from decoded size. If that scaling
+                // needs to be handled by server side, consider enable this based
+                // display size vs decoded size.
+                layerSettings.source.buffer.useTextureFiltering = false;
+                layerSettings.source.buffer.textureTransform = mat4();
+                clientCompositionLayers.push_back(&layerSettings);
+
+                // Use an empty fence for the buffer fence, since we just created the buffer so
+                // there is no need for synchronization with the GPU.
+                base::unique_fd bufferFence;
+                base::unique_fd drawFence;
+                renderEngine->useProtectedContext(false);
+                err = renderEngine->drawLayers(
+                        clientCompositionDisplay, clientCompositionLayers, dstBuffer.get(),
+                        /*useFramebufferCache=*/false, std::move(bufferFence), &drawFence);
+
+                sp<Fence> fence = new Fence(std::move(drawFence));
+
+                // We can move waiting for fence & sending it back on a separate thread to improve
+                // efficiency, but leaving it here for simplicity.
+                if (err != OK) {
+                    LOG(ERROR) << "drawLayers returned err " << err;
+                } else {
+                    err = fence->wait(500);
+                    if (err != OK) {
+                        LOG(WARNING) << "wait for fence returned err " << err;
+                    }
+                }
+                renderEngine->cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL);
+            }
+
+            work->worklets.front()->output.ordinal = work->input.ordinal;
+            work->worklets.front()->output.flags = work->input.flags;
+            if (err == OK) {
+                work->workletsProcessed = 1;
+                if (outC2Buffer) {
+                    work->worklets.front()->output.buffers.push_back(outC2Buffer);
+                }
+                work->result = C2_OK;
+            } else {
+                work->result = C2_CORRUPTED;
+            }
+            std::list<std::unique_ptr<C2Work>> items;
+            items.push_back(std::move(work));
+
+            std::unique_lock lock(mListenerMutex);
+            mListener->onWorkDone_nb(thiz, std::move(items));
+            LOG(VERBOSE) << "sent work #" << workCount;
+        }
+    }
+
+    mutable std::timed_mutex mListenerMutex;
+    std::shared_ptr<Listener> mListener;
+
+    mutable std::mutex mQueueMutex;
+    mutable std::condition_variable mQueueCondition;
+    std::list<std::unique_ptr<C2Work>> mQueue;
+
+    const std::shared_ptr<Interface> mIntf;
+
+    mutable std::mutex mStateMutex;
+    enum State {
+        STOPPED,
+        RUNNING,
+        RELEASED,
+        STARTING,   // STOPPED -> RUNNING
+        STOPPING,   // RUNNING -> STOPPED
+        RESETTING,  // <<ANY>> -> STOPPED
+        RELEASING,  // STOPPED -> RELEASED
+    } mState;
+
+    mutable std::mutex mProcessingMutex;
+    std::thread mProcessingThread;
+
+};
+
+// static
+const std::string SampleToneMappingFilter::Interface::NAME = "c2.sample.tone-mapper";
+// static
+const FilterPlugin_V1::Descriptor SampleToneMappingFilter::Interface::DESCRIPTOR = {
+    // controlParams
+    { C2StreamColorAspectsRequestInfo::output::PARAM_TYPE },
+    // affectedParams
+    {
+        C2StreamHdrStaticInfo::output::PARAM_TYPE,
+        C2StreamHdr10PlusInfo::output::PARAM_TYPE,
+        C2StreamColorAspectsInfo::output::PARAM_TYPE,
+    },
+};
+
+class SampleC2ComponentStore : public C2ComponentStore {
+public:
+    SampleC2ComponentStore()
+        : mReflector(std::make_shared<C2ReflectorHelper>()),
+          mIntf(mReflector),
+          mFactories(CreateFactories()) {
+    }
+    ~SampleC2ComponentStore() = default;
+
+    C2String getName() const override { return "android.sample.filter-plugin-store"; }
+    c2_status_t createComponent(
+            C2String name, std::shared_ptr<C2Component>* const component) override {
+        if (mFactories.count(name) == 0) {
+            return C2_BAD_VALUE;
+        }
+        return mFactories.at(name)->createComponent(++mNodeId, component);
+    }
+    c2_status_t createInterface(
+            C2String name, std::shared_ptr<C2ComponentInterface>* const interface) override {
+        if (mFactories.count(name) == 0) {
+            return C2_BAD_VALUE;
+        }
+        return mFactories.at(name)->createInterface(++mNodeId, interface);
+    }
+    std::vector<std::shared_ptr<const C2Component::Traits>> listComponents() override {
+        std::vector<std::shared_ptr<const C2Component::Traits>> ret;
+        for (const auto &[name, factory] : mFactories) {
+            ret.push_back(factory->getTraits());
+        }
+        return ret;
+    }
+    c2_status_t copyBuffer(
+            std::shared_ptr<C2GraphicBuffer>, std::shared_ptr<C2GraphicBuffer>) override {
+        return C2_OMITTED;
+    }
+    c2_status_t query_sm(
+            const std::vector<C2Param*> &stackParams,
+            const std::vector<C2Param::Index> &heapParamIndices,
+            std::vector<std::unique_ptr<C2Param>>* const heapParams) const override {
+        return mIntf.query(stackParams, heapParamIndices, C2_MAY_BLOCK, heapParams);
+    }
+    c2_status_t config_sm(
+            const std::vector<C2Param*> &params,
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures) override {
+        return mIntf.config(params, C2_MAY_BLOCK, failures);
+    }
+    std::shared_ptr<C2ParamReflector> getParamReflector() const override {
+        return mReflector;
+    }
+    c2_status_t querySupportedParams_nb(
+            std::vector<std::shared_ptr<C2ParamDescriptor>> * const params) const override {
+        return mIntf.querySupportedParams(params);
+    }
+    c2_status_t querySupportedValues_sm(
+            std::vector<C2FieldSupportedValuesQuery> &fields) const override {
+        return mIntf.querySupportedValues(fields, C2_MAY_BLOCK);
+    }
+
+private:
+    class ComponentFactory {
+    public:
+        virtual ~ComponentFactory() = default;
+
+        const std::shared_ptr<const C2Component::Traits> &getTraits() { return mTraits; }
+
+        virtual c2_status_t createComponent(
+                c2_node_id_t id,
+                std::shared_ptr<C2Component>* const component) const = 0;
+        virtual c2_status_t createInterface(
+                c2_node_id_t id,
+                std::shared_ptr<C2ComponentInterface>* const interface) const = 0;
+    protected:
+        ComponentFactory(const std::shared_ptr<const C2Component::Traits> &traits)
+            : mTraits(traits) {
+        }
+    private:
+        const std::shared_ptr<const C2Component::Traits> mTraits;
+    };
+
+    template <class T>
+    struct ComponentFactoryImpl : public ComponentFactory {
+    public:
+        ComponentFactoryImpl(const std::shared_ptr<const C2Component::Traits> &traits)
+            : ComponentFactory(traits) {
+        }
+        ~ComponentFactoryImpl() override = default;
+        c2_status_t createComponent(
+                c2_node_id_t id,
+                std::shared_ptr<C2Component>* const component) const override {
+            *component = std::make_shared<T>(id);
+            return C2_OK;
+        }
+        c2_status_t createInterface(
+                c2_node_id_t id,
+                std::shared_ptr<C2ComponentInterface>* const interface) const override {
+            *interface = std::make_shared<typename T::Interface>(id);
+            return C2_OK;
+        }
+    };
+
+    template <class T>
+    static void AddFactory(std::map<C2String, std::unique_ptr<ComponentFactory>> *factories) {
+        std::shared_ptr<C2ComponentInterface> intf{new typename T::Interface(0)};
+        std::shared_ptr<C2Component::Traits> traits(new (std::nothrow) C2Component::Traits);
+        CHECK(C2InterfaceUtils::FillTraitsFromInterface(traits.get(), intf))
+                << "Failed to fill traits from interface";
+        factories->emplace(traits->name, new ComponentFactoryImpl<T>(traits));
+    }
+
+    static std::map<C2String, std::unique_ptr<ComponentFactory>> CreateFactories() {
+        std::map<C2String, std::unique_ptr<ComponentFactory>> factories;
+        AddFactory<SampleToneMappingFilter>(&factories);
+        return factories;
+    }
+
+
+    std::shared_ptr<C2ReflectorHelper> mReflector;
+    struct Interface : public C2InterfaceHelper {
+        explicit Interface(std::shared_ptr<C2ReflectorHelper> reflector)
+            : C2InterfaceHelper(reflector) {
+        }
+    } mIntf;
+
+    const std::map<C2String, std::unique_ptr<ComponentFactory>> mFactories;
+
+    std::atomic_int32_t mNodeId{0};
+};
+
+class SampleFilterPlugin : public FilterPlugin_V1 {
+public:
+    SampleFilterPlugin() : mStore(new SampleC2ComponentStore) {}
+    ~SampleFilterPlugin() override = default;
+
+    std::shared_ptr<C2ComponentStore> getComponentStore() override {
+        return mStore;
+    }
+
+    bool describe(C2String name, Descriptor *desc) override {
+        if (name == SampleToneMappingFilter::Interface::NAME) {
+            *desc = SampleToneMappingFilter::Interface::DESCRIPTOR;
+            return true;
+        }
+        return false;
+    }
+
+    bool isFilteringEnabled(const std::shared_ptr<C2ComponentInterface> &intf) override {
+        if (intf->getName() == SampleToneMappingFilter::Interface::NAME) {
+            return SampleToneMappingFilter::Interface::IsFilteringEnabled(intf);
+        }
+        return false;
+    }
+
+    c2_status_t queryParamsForPreviousComponent(
+            const std::shared_ptr<C2ComponentInterface> &intf,
+            std::vector<std::unique_ptr<C2Param>> *params) override {
+        if (intf->getName() == SampleToneMappingFilter::Interface::NAME) {
+            return SampleToneMappingFilter::Interface::QueryParamsForPreviousComponent(
+                    intf, params);
+        }
+        return C2_BAD_VALUE;
+    }
+
+private:
+    std::shared_ptr<C2ComponentStore> mStore;
+};
+
+}  // namespace android
+
+extern "C" {
+
+int32_t GetFilterPluginVersion() {
+    return ::android::SampleFilterPlugin::VERSION;
+}
+
+void *CreateFilterPlugin() {
+    return new ::android::SampleFilterPlugin;
+}
+
+void DestroyFilterPlugin(void *plugin) {
+    delete (::android::SampleFilterPlugin *)plugin;
+}
+
+}  // extern "C"
diff --git a/media/codec2/hidl/services/Android.bp b/media/codec2/hidl/services/Android.bp
index a16b106..bb9f51f 100644
--- a/media/codec2/hidl/services/Android.bp
+++ b/media/codec2/hidl/services/Android.bp
@@ -29,12 +29,21 @@
 // The seccomp_policy file name and its content can be modified, but note that
 // vendor.cpp also needs to be updated because it needs the absolute path to the
 // seccomp policy file on the device.
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_binary {
-    name: "android.hardware.media.c2@1.1-default-service",
+    name: "android.hardware.media.c2@1.2-default-service",
     vendor: true,
     relative_install_path: "hw",
 
-    init_rc: ["android.hardware.media.c2@1.1-default-service.rc"],
+    init_rc: ["android.hardware.media.c2@1.2-default-service.rc"],
 
     defaults: ["libcodec2-hidl-defaults"],
     srcs: [
@@ -46,12 +55,15 @@
         "libavservices_minijail_vendor",
         "libbinder",
     ],
-    required: ["android.hardware.media.c2@1.1-default-seccomp_policy"],
+    required: ["android.hardware.media.c2@1.2-default-seccomp_policy"],
 
     // The content in manifest_media_c2_V1_1_default.xml can be included
     // directly in the main device manifest.xml file or via vintf_fragments.
     // (Remove the line below if the entry is already in the main manifest.)
     vintf_fragments: ["manifest_media_c2_V1_1_default.xml"],
+
+    // Remove this line to enable this module.
+    enabled: false,
 }
 
 // seccomp policy file.
@@ -61,27 +73,26 @@
 // Files in the "seccomp_policy" subdirectory are only provided as examples.
 // They may not work on some devices and/or architectures without modification.
 prebuilt_etc {
-    name: "android.hardware.media.c2@1.1-default-seccomp_policy",
+    name: "android.hardware.media.c2@1.2-default-seccomp_policy",
     vendor: true,
     sub_dir: "seccomp_policy",
 
     // If a specific architecture is targeted, multiple choices are not needed.
     arch: {
         arm: {
-            src: "seccomp_policy/android.hardware.media.c2@1.1-default-arm.policy",
+            src: "seccomp_policy/android.hardware.media.c2@1.2-default-arm.policy",
         },
         arm64: {
-            src: "seccomp_policy/android.hardware.media.c2@1.1-default-arm64.policy",
+            src: "seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy",
         },
         x86: {
-            src: "seccomp_policy/android.hardware.media.c2@1.1-default-x86.policy",
+            src: "seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy",
         },
         x86_64: {
-            src: "seccomp_policy/android.hardware.media.c2@1.1-default-x86_64.policy",
+            src: "seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy",
         },
     },
 
     // This may be removed.
     required: ["crash_dump.policy"],
 }
-
diff --git a/media/codec2/hidl/services/android.hardware.media.c2@1.1-default-service.rc b/media/codec2/hidl/services/android.hardware.media.c2@1.1-default-service.rc
deleted file mode 100644
index 44f2d8e..0000000
--- a/media/codec2/hidl/services/android.hardware.media.c2@1.1-default-service.rc
+++ /dev/null
@@ -1,7 +0,0 @@
-service android-hardware-media-c2-hal-1-1 /vendor/bin/hw/android.hardware.media.c2@1.1-default-service
-    class hal
-    user mediacodec
-    group camera mediadrm drmrpc
-    ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
-
diff --git a/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc b/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc
new file mode 100644
index 0000000..03f6e3d
--- /dev/null
+++ b/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc
@@ -0,0 +1,7 @@
+service android-hardware-media-c2-hal-1-2 /vendor/bin/hw/android.hardware.media.c2@1.2-default-service
+    class hal
+    user mediacodec
+    group camera mediadrm drmrpc
+    ioprio rt 4
+    writepid /dev/cpuset/foreground/tasks
+
diff --git a/media/codec2/hidl/services/manifest_media_c2_V1_2_default.xml b/media/codec2/hidl/services/manifest_media_c2_V1_2_default.xml
new file mode 100644
index 0000000..a5e8d87
--- /dev/null
+++ b/media/codec2/hidl/services/manifest_media_c2_V1_2_default.xml
@@ -0,0 +1,11 @@
+<manifest version="1.0" type="device">
+    <hal>
+        <name>android.hardware.media.c2</name>
+        <transport>hwbinder</transport>
+        <version>1.2</version>
+        <interface>
+            <name>IComponentStore</name>
+            <instance>default</instance>
+        </interface>
+    </hal>
+</manifest>
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm64.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm64.policy
deleted file mode 100644
index 4faf8b2..0000000
--- a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm64.policy
+++ /dev/null
@@ -1,81 +0,0 @@
-# Copyright (C) 2019 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-futex: 1
-# ioctl calls are filtered via the selinux policy.
-ioctl: 1
-sched_yield: 1
-close: 1
-dup: 1
-ppoll: 1
-mprotect: arg2 in ~PROT_EXEC || arg2 in ~PROT_WRITE
-mmap: arg2 in ~PROT_EXEC || arg2 in ~PROT_WRITE
-getuid: 1
-getrlimit: 1
-fstat: 1
-newfstatat: 1
-fstatfs: 1
-memfd_create: 1
-ftruncate: 1
-
-# mremap: Ensure |flags| are (MREMAP_MAYMOVE | MREMAP_FIXED) TODO: Once minijail
-# parser support for '<' is in this needs to be modified to also prevent
-# |old_address| and |new_address| from touching the exception vector page, which
-# on ARM is statically loaded at 0xffff 0000. See
-# http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.ddi0211h/Babfeega.html
-# for more details.
-mremap: arg3 == 3
-munmap: 1
-prctl: 1
-writev: 1
-sigaltstack: 1
-clone: 1
-exit: 1
-lseek: 1
-rt_sigprocmask: 1
-openat: 1
-write: 1
-nanosleep: 1
-setpriority: 1
-set_tid_address: 1
-getdents64: 1
-readlinkat: 1
-read: 1
-pread64: 1
-gettimeofday: 1
-faccessat: 1
-exit_group: 1
-restart_syscall: 1
-rt_sigreturn: 1
-getrandom: 1
-madvise: 1
-
-# crash dump policy additions
-clock_gettime: 1
-getpid: 1
-gettid: 1
-pipe2: 1
-recvmsg: 1
-process_vm_readv: 1
-tgkill: 1
-rt_sigaction: 1
-rt_tgsigqueueinfo: 1
-#mprotect: arg2 in 0x1|0x2
-munmap: 1
-#mmap: arg2 in 0x1|0x2
-geteuid: 1
-getgid: 1
-getegid: 1
-getgroups: 1
-
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86.policy
deleted file mode 100644
index d9c4045..0000000
--- a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86.policy
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright (C) 2017 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-read: 1
-mprotect: 1
-prctl: 1
-openat: 1
-open: 1
-getuid32: 1
-getuid: 1
-getrlimit: 1
-writev: 1
-ioctl: 1
-close: 1
-mmap2: 1
-mmap: 1
-fstat64: 1
-fstat: 1
-stat64: 1
-statfs64: 1
-madvise: 1
-fstatat64: 1
-newfstatat: 1
-futex: 1
-munmap: 1
-faccessat: 1
-_llseek: 1
-lseek: 1
-clone: 1
-sigaltstack: 1
-setpriority: 1
-restart_syscall: 1
-exit: 1
-exit_group: 1
-rt_sigreturn: 1
-ugetrlimit: 1
-readlink: 1
-readlinkat: 1
-_llseek: 1
-fstatfs64: 1
-fstatfs: 1
-pread64: 1
-mremap: 1
-dup: 1
-set_tid_address: 1
-write: 1
-nanosleep: 1
-sched_setscheduler: 1
-uname: 1
-memfd_create: 1
-ftruncate: 1
-ftruncate64: 1
-
-# Required by AddressSanitizer
-gettid: 1
-sched_yield: 1
-getpid: 1
-gettid: 1
-
-@include /system/etc/seccomp_policy/crash_dump.x86.policy
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86_64.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86_64.policy
deleted file mode 100644
index d9c4045..0000000
--- a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-x86_64.policy
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright (C) 2017 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-read: 1
-mprotect: 1
-prctl: 1
-openat: 1
-open: 1
-getuid32: 1
-getuid: 1
-getrlimit: 1
-writev: 1
-ioctl: 1
-close: 1
-mmap2: 1
-mmap: 1
-fstat64: 1
-fstat: 1
-stat64: 1
-statfs64: 1
-madvise: 1
-fstatat64: 1
-newfstatat: 1
-futex: 1
-munmap: 1
-faccessat: 1
-_llseek: 1
-lseek: 1
-clone: 1
-sigaltstack: 1
-setpriority: 1
-restart_syscall: 1
-exit: 1
-exit_group: 1
-rt_sigreturn: 1
-ugetrlimit: 1
-readlink: 1
-readlinkat: 1
-_llseek: 1
-fstatfs64: 1
-fstatfs: 1
-pread64: 1
-mremap: 1
-dup: 1
-set_tid_address: 1
-write: 1
-nanosleep: 1
-sched_setscheduler: 1
-uname: 1
-memfd_create: 1
-ftruncate: 1
-ftruncate64: 1
-
-# Required by AddressSanitizer
-gettid: 1
-sched_yield: 1
-getpid: 1
-gettid: 1
-
-@include /system/etc/seccomp_policy/crash_dump.x86.policy
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm.policy
similarity index 100%
rename from media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.1-default-arm.policy
rename to media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm.policy
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
new file mode 100644
index 0000000..5d0284f
--- /dev/null
+++ b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
@@ -0,0 +1,81 @@
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+futex: 1
+# ioctl calls are filtered via the selinux policy.
+ioctl: 1
+sched_yield: 1
+close: 1
+dup: 1
+ppoll: 1
+mprotect: arg2 in ~PROT_EXEC || arg2 in ~PROT_WRITE
+mmap: arg2 in ~PROT_EXEC || arg2 in ~PROT_WRITE
+getuid: 1
+getrlimit: 1
+fstat: 1
+newfstatat: 1
+fstatfs: 1
+memfd_create: 1
+ftruncate: 1
+
+# mremap: Ensure |flags| are (MREMAP_MAYMOVE | MREMAP_FIXED) TODO: Once minijail
+# parser support for '<' is in this needs to be modified to also prevent
+# |old_address| and |new_address| from touching the exception vector page, which
+# on ARM is statically loaded at 0xffff 0000. See
+# http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.ddi0211h/Babfeega.html
+# for more details.
+mremap: arg3 == 3 || arg3 == MREMAP_MAYMOVE
+munmap: 1
+prctl: 1
+writev: 1
+sigaltstack: 1
+clone: 1
+exit: 1
+lseek: 1
+rt_sigprocmask: 1
+openat: 1
+write: 1
+nanosleep: 1
+setpriority: 1
+set_tid_address: 1
+getdents64: 1
+readlinkat: 1
+read: 1
+pread64: 1
+gettimeofday: 1
+faccessat: 1
+exit_group: 1
+restart_syscall: 1
+rt_sigreturn: 1
+getrandom: 1
+madvise: 1
+
+# crash dump policy additions
+clock_gettime: 1
+getpid: 1
+gettid: 1
+pipe2: 1
+recvmsg: 1
+process_vm_readv: 1
+tgkill: 1
+rt_sigaction: 1
+rt_tgsigqueueinfo: 1
+#mprotect: arg2 in 0x1|0x2
+munmap: 1
+#mmap: arg2 in 0x1|0x2
+geteuid: 1
+getgid: 1
+getegid: 1
+getgroups: 1
+
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy
new file mode 100644
index 0000000..056c690
--- /dev/null
+++ b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy
@@ -0,0 +1,71 @@
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+read: 1
+mprotect: 1
+prctl: 1
+openat: 1
+open: 1
+getuid32: 1
+getuid: 1
+getrlimit: 1
+writev: 1
+ioctl: 1
+close: 1
+mmap2: 1
+mmap: 1
+fstat64: 1
+fstat: 1
+stat64: 1
+statfs64: 1
+madvise: 1
+fstatat64: 1
+newfstatat: 1
+futex: 1
+munmap: 1
+faccessat: 1
+_llseek: 1
+lseek: 1
+clone: 1
+sigaltstack: 1
+setpriority: 1
+restart_syscall: 1
+exit: 1
+exit_group: 1
+rt_sigreturn: 1
+ugetrlimit: 1
+readlink: 1
+readlinkat: 1
+_llseek: 1
+fstatfs64: 1
+fstatfs: 1
+pread64: 1
+mremap: 1
+dup: 1
+set_tid_address: 1
+write: 1
+nanosleep: 1
+sched_setscheduler: 1
+uname: 1
+memfd_create: 1
+ftruncate: 1
+ftruncate64: 1
+
+# Required by AddressSanitizer
+gettid: 1
+sched_yield: 1
+getpid: 1
+gettid: 1
+
+@include /system/etc/seccomp_policy/crash_dump.x86.policy
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy
new file mode 100644
index 0000000..056c690
--- /dev/null
+++ b/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy
@@ -0,0 +1,71 @@
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+read: 1
+mprotect: 1
+prctl: 1
+openat: 1
+open: 1
+getuid32: 1
+getuid: 1
+getrlimit: 1
+writev: 1
+ioctl: 1
+close: 1
+mmap2: 1
+mmap: 1
+fstat64: 1
+fstat: 1
+stat64: 1
+statfs64: 1
+madvise: 1
+fstatat64: 1
+newfstatat: 1
+futex: 1
+munmap: 1
+faccessat: 1
+_llseek: 1
+lseek: 1
+clone: 1
+sigaltstack: 1
+setpriority: 1
+restart_syscall: 1
+exit: 1
+exit_group: 1
+rt_sigreturn: 1
+ugetrlimit: 1
+readlink: 1
+readlinkat: 1
+_llseek: 1
+fstatfs64: 1
+fstatfs: 1
+pread64: 1
+mremap: 1
+dup: 1
+set_tid_address: 1
+write: 1
+nanosleep: 1
+sched_setscheduler: 1
+uname: 1
+memfd_create: 1
+ftruncate: 1
+ftruncate64: 1
+
+# Required by AddressSanitizer
+gettid: 1
+sched_yield: 1
+getpid: 1
+gettid: 1
+
+@include /system/etc/seccomp_policy/crash_dump.x86.policy
diff --git a/media/codec2/hidl/services/vendor.cpp b/media/codec2/hidl/services/vendor.cpp
index 81bffeb..0d0684d 100644
--- a/media/codec2/hidl/services/vendor.cpp
+++ b/media/codec2/hidl/services/vendor.cpp
@@ -15,11 +15,11 @@
  */
 
 //#define LOG_NDEBUG 0
-#define LOG_TAG "android.hardware.media.c2@1.1-service"
+#define LOG_TAG "android.hardware.media.c2@1.2-service"
 
 #include <android-base/logging.h>
 #include <binder/ProcessState.h>
-#include <codec2/hidl/1.1/ComponentStore.h>
+#include <codec2/hidl/1.2/ComponentStore.h>
 #include <hidl/HidlTransportSupport.h>
 #include <minijail.h>
 
@@ -31,13 +31,13 @@
 // "android.hardware.media.c2@1.1-default-seccomp_policy" in Android.bp.
 static constexpr char kBaseSeccompPolicyPath[] =
         "/vendor/etc/seccomp_policy/"
-        "android.hardware.media.c2@1.1-default-seccomp-policy";
+        "android.hardware.media.c2@1.2-default-seccomp-policy";
 
 // Additional seccomp permissions can be added in this file.
 // This file does not exist by default.
 static constexpr char kExtSeccompPolicyPath[] =
         "/vendor/etc/seccomp_policy/"
-        "android.hardware.media.c2@1.1-extended-seccomp-policy";
+        "android.hardware.media.c2@1.2-extended-seccomp-policy";
 
 class StoreImpl : public C2ComponentStore {
 public:
@@ -122,6 +122,18 @@
                 })
                 .withSetter(SetIonUsage)
                 .build());
+
+            addParameter(
+                DefineParam(mDmaBufUsageInfo, "dmabuf-usage")
+                .withDefault(new C2StoreDmaBufUsageInfo())
+                .withFields({
+                    C2F(mDmaBufUsageInfo, usage).flags({C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE}),
+                    C2F(mDmaBufUsageInfo, capacity).inRange(0, UINT32_MAX, 1024),
+                    C2F(mDmaBufUsageInfo, heapName).any(),
+                    C2F(mDmaBufUsageInfo, allocFlags).flags({}),
+                })
+                .withSetter(SetDmaBufUsage)
+                .build());
         }
 
         virtual ~Interface() = default;
@@ -135,7 +147,16 @@
             return C2R::Ok();
         }
 
+        static C2R SetDmaBufUsage(bool /* mayBlock */, C2P<C2StoreDmaBufUsageInfo> &me) {
+            // Vendor's TODO: put appropriate mapping logic
+            strncpy(me.set().m.heapName, "system", me.v.flexCount());
+            me.set().m.allocFlags = 0;
+            return C2R::Ok();
+        }
+
+
         std::shared_ptr<C2StoreIonUsageInfo> mIonUsageInfo;
+        std::shared_ptr<C2StoreDmaBufUsageInfo> mDmaBufUsageInfo;
     };
     std::shared_ptr<C2ReflectorHelper> mReflectorHelper;
     Interface mInterface;
@@ -143,7 +164,7 @@
 
 int main(int /* argc */, char** /* argv */) {
     using namespace ::android;
-    LOG(DEBUG) << "android.hardware.media.c2@1.1-service starting...";
+    LOG(DEBUG) << "android.hardware.media.c2@1.2-service starting...";
 
     // Set up minijail to limit system calls.
     signal(SIGPIPE, SIG_IGN);
@@ -159,7 +180,7 @@
 
     // Create IComponentStore service.
     {
-        using namespace ::android::hardware::media::c2::V1_1;
+        using namespace ::android::hardware::media::c2::V1_2;
         sp<IComponentStore> store;
 
         // TODO: Replace this with
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 94034b5..2bc748f 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_shared {
     name: "libsfplugin_ccodec",
 
@@ -11,6 +20,7 @@
         "CCodecConfig.cpp",
         "Codec2Buffer.cpp",
         "Codec2InfoBuilder.cpp",
+        "FrameReassembler.cpp",
         "PipelineWatcher.cpp",
         "ReflectedParamUpdater.cpp",
     ],
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index c7588e9..c049187 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -25,6 +25,7 @@
 #include <C2AllocatorGralloc.h>
 #include <C2BlockInternal.h>
 #include <C2Component.h>
+#include <C2Config.h>
 #include <C2PlatformSupport.h>
 
 #include <OMX_Component.h>
@@ -32,18 +33,22 @@
 #include <OMX_IndexExt.h>
 
 #include <android/fdsan.h>
+#include <media/stagefright/foundation/ColorUtils.h>
 #include <media/stagefright/omx/OMXUtils.h>
 #include <media/stagefright/MediaErrors.h>
 #include <ui/Fence.h>
 #include <ui/GraphicBuffer.h>
 #include <utils/Thread.h>
 
+#include "utils/Codec2Mapper.h"
 #include "C2OMXNode.h"
 
 namespace android {
 
 namespace {
 
+constexpr OMX_U32 kPortIndexInput = 0;
+
 class Buffer2D : public C2Buffer {
 public:
     explicit Buffer2D(C2ConstGraphicBlock block) : C2Buffer({ block }) {}
@@ -68,6 +73,27 @@
         jobs->cond.broadcast();
     }
 
+    void setDataspace(android_dataspace dataspace) {
+        Mutexed<Jobs>::Locked jobs(mJobs);
+        ColorUtils::convertDataSpaceToV0(dataspace);
+        jobs->configUpdate.emplace_back(new C2StreamDataSpaceInfo::input(0u, dataspace));
+        int32_t standard;
+        int32_t transfer;
+        int32_t range;
+        ColorUtils::getColorConfigFromDataSpace(dataspace, &range, &standard, &transfer);
+        std::unique_ptr<C2StreamColorAspectsInfo::input> colorAspects =
+            std::make_unique<C2StreamColorAspectsInfo::input>(0u);
+        if (C2Mapper::map(standard, &colorAspects->primaries, &colorAspects->matrix)
+                && C2Mapper::map(transfer, &colorAspects->transfer)
+                && C2Mapper::map(range, &colorAspects->range)) {
+            jobs->configUpdate.push_back(std::move(colorAspects));
+        }
+    }
+
+    void setPriority(int priority) {
+        androidSetThreadPriority(getTid(), priority);
+    }
+
 protected:
     bool threadLoop() override {
         constexpr nsecs_t kIntervalNs = nsecs_t(10) * 1000 * 1000;  // 10ms
@@ -99,6 +125,9 @@
                     uniqueFds.push_back(std::move(queue.workList.front().fd1));
                     queue.workList.pop_front();
                 }
+                for (const std::unique_ptr<C2Param> &param : jobs->configUpdate) {
+                    items.front()->input.configUpdate.emplace_back(C2Param::Copy(*param));
+                }
 
                 jobs.unlock();
                 for (int fenceFd : fenceFds) {
@@ -116,6 +145,7 @@
                 queued = true;
             }
             if (queued) {
+                jobs->configUpdate.clear();
                 return true;
             }
             if (i == 0) {
@@ -158,6 +188,7 @@
         std::map<std::weak_ptr<Codec2Client::Component>,
                  Queue,
                  std::owner_less<std::weak_ptr<Codec2Client::Component>>> queues;
+        std::vector<std::unique_ptr<C2Param>> configUpdate;
         Condition cond;
     };
     Mutexed<Jobs> mJobs;
@@ -169,6 +200,9 @@
       mQueueThread(new QueueThread) {
     android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ALWAYS);
     mQueueThread->run("C2OMXNode", PRIORITY_AUDIO);
+
+    Mutexed<android_dataspace>::Locked ds(mDataspace);
+    *ds = HAL_DATASPACE_UNKNOWN;
 }
 
 status_t C2OMXNode::freeNode() {
@@ -200,11 +234,35 @@
                 return BAD_VALUE;
             }
             OMX_PARAM_PORTDEFINITIONTYPE *pDef = (OMX_PARAM_PORTDEFINITIONTYPE *)params;
-            // TODO: read these from intf()
+            if (pDef->nPortIndex != kPortIndexInput) {
+                break;
+            }
+
             pDef->nBufferCountActual = 16;
+
+            // WORKAROUND: having more slots improve performance while consuming
+            // more memory. This is a temporary workaround to reduce memory for
+            // larger-than-4K scenario.
+            if (mWidth * mHeight > 4096 * 2340) {
+                std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
+                C2PortActualDelayTuning::input inputDelay(0);
+                C2ActualPipelineDelayTuning pipelineDelay(0);
+                c2_status_t c2err = C2_NOT_FOUND;
+                if (comp) {
+                    c2err = comp->query(
+                            {&inputDelay, &pipelineDelay}, {}, C2_DONT_BLOCK, nullptr);
+                }
+                if (c2err == C2_OK || c2err == C2_BAD_INDEX) {
+                    pDef->nBufferCountActual = 4;
+                    pDef->nBufferCountActual += (inputDelay ? inputDelay.value : 0u);
+                    pDef->nBufferCountActual += (pipelineDelay ? pipelineDelay.value : 0u);
+                }
+            }
+
             pDef->eDomain = OMX_PortDomainVideo;
             pDef->format.video.nFrameWidth = mWidth;
             pDef->format.video.nFrameHeight = mHeight;
+            pDef->format.video.eColorFormat = OMX_COLOR_FormatAndroidOpaque;
             err = OK;
             break;
         }
@@ -365,6 +423,8 @@
         if (err != OK) {
             (void)fd0.release();
             (void)fd1.release();
+            native_handle_close(handle);
+            native_handle_delete(handle);
             return UNKNOWN_ERROR;
         }
         block = _C2BlockFactory::CreateGraphicBlock(alloc);
@@ -432,8 +492,11 @@
     android_dataspace dataSpace = (android_dataspace)msg.u.event_data.data1;
     uint32_t pixelFormat = msg.u.event_data.data3;
 
-    // TODO: set dataspace on component to see if it impacts color aspects
     ALOGD("dataspace changed to %#x pixel format: %#x", dataSpace, pixelFormat);
+    mQueueThread->setDataspace(dataSpace);
+
+    Mutexed<android_dataspace>::Locked ds(mDataspace);
+    *ds = dataSpace;
     return OK;
 }
 
@@ -466,4 +529,12 @@
     (void)mBufferSource->onInputBufferEmptied(bufferId, -1);
 }
 
+android_dataspace C2OMXNode::getDataspace() {
+    return *mDataspace.lock();
+}
+
+void C2OMXNode::setPriority(int priority) {
+    mQueueThread->setPriority(priority);
+}
+
 }  // namespace android
diff --git a/media/codec2/sfplugin/C2OMXNode.h b/media/codec2/sfplugin/C2OMXNode.h
index 1717c96..6669318 100644
--- a/media/codec2/sfplugin/C2OMXNode.h
+++ b/media/codec2/sfplugin/C2OMXNode.h
@@ -93,6 +93,16 @@
      */
     void onInputBufferDone(c2_cntr64_t index);
 
+    /**
+     * Returns dataspace information from GraphicBufferSource.
+     */
+    android_dataspace getDataspace();
+
+    /**
+     * Sets priority of the queue thread.
+     */
+    void setPriority(int priority);
+
 private:
     std::weak_ptr<Codec2Client::Component> mComp;
     sp<IOMXBufferSource> mBufferSource;
@@ -101,6 +111,7 @@
     uint32_t mWidth;
     uint32_t mHeight;
     uint64_t mUsage;
+    Mutexed<android_dataspace> mDataspace;
 
     // WORKAROUND: timestamp adjustment
 
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 1972d3f..16398a4 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -38,12 +38,14 @@
 #include <media/omx/1.0/WOmxNode.h>
 #include <media/openmax/OMX_Core.h>
 #include <media/openmax/OMX_IndexExt.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/omx/1.0/WGraphicBufferSource.h>
 #include <media/stagefright/omx/OmxGraphicBufferSource.h>
 #include <media/stagefright/CCodec.h>
 #include <media/stagefright/BufferProducerWrapper.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/PersistentSurface.h>
+#include <utils/NativeHandle.h>
 
 #include "C2OMXNode.h"
 #include "CCodecBufferChannel.h"
@@ -210,8 +212,6 @@
                 (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
                 &usage, sizeof(usage));
 
-        // NOTE: we do not use/pass through color aspects from GraphicBufferSource as we
-        // communicate that directly to the component.
         mSource->configure(
                 mOmxNode, static_cast<hardware::graphics::common::V1_0::Dataspace>(mDataSpace));
         return OK;
@@ -246,8 +246,19 @@
         if (source == nullptr) {
             return NO_INIT;
         }
-        constexpr size_t kNumSlots = 16;
-        for (size_t i = 0; i < kNumSlots; ++i) {
+
+        size_t numSlots = 16;
+        constexpr OMX_U32 kPortIndexInput = 0;
+
+        OMX_PARAM_PORTDEFINITIONTYPE param;
+        param.nPortIndex = kPortIndexInput;
+        status_t err = mNode->getParameter(OMX_IndexParamPortDefinition,
+                                           &param, sizeof(param));
+        if (err == OK) {
+            numSlots = param.nBufferCountActual;
+        }
+
+        for (size_t i = 0; i < numSlots; ++i) {
             source->onInputBufferAdded(i);
         }
 
@@ -387,6 +398,14 @@
 
         // consumer usage is queried earlier.
 
+        // priority
+        if (mConfig.mPriority != config.mPriority) {
+            if (config.mPriority != INT_MAX) {
+                mNode->setPriority(config.mPriority);
+            }
+            mConfig.mPriority = config.mPriority;
+        }
+
         if (status.str().empty()) {
             ALOGD("ISConfig not changed");
         } else {
@@ -399,6 +418,10 @@
         mNode->onInputBufferDone(index);
     }
 
+    android_dataspace getDataspace() override {
+        return mNode->getDataspace();
+    }
+
 private:
     sp<HGraphicBufferSource> mSource;
     sp<C2OMXNode> mNode;
@@ -471,6 +494,72 @@
     }
 };
 
+void RevertOutputFormatIfNeeded(
+        const sp<AMessage> &oldFormat, sp<AMessage> &currentFormat) {
+    // We used to not report changes to these keys to the client.
+    const static std::set<std::string> sIgnoredKeys({
+            KEY_BIT_RATE,
+            KEY_FRAME_RATE,
+            KEY_MAX_BIT_RATE,
+            KEY_MAX_WIDTH,
+            KEY_MAX_HEIGHT,
+            "csd-0",
+            "csd-1",
+            "csd-2",
+    });
+    if (currentFormat == oldFormat) {
+        return;
+    }
+    sp<AMessage> diff = currentFormat->changesFrom(oldFormat);
+    AMessage::Type type;
+    for (size_t i = diff->countEntries(); i > 0; --i) {
+        if (sIgnoredKeys.count(diff->getEntryNameAt(i - 1, &type)) > 0) {
+            diff->removeEntryAt(i - 1);
+        }
+    }
+    if (diff->countEntries() == 0) {
+        currentFormat = oldFormat;
+    }
+}
+
+void AmendOutputFormatWithCodecSpecificData(
+        const uint8_t *data, size_t size, const std::string &mediaType,
+        const sp<AMessage> &outputFormat) {
+    if (mediaType == MIMETYPE_VIDEO_AVC) {
+        // Codec specific data should be SPS and PPS in a single buffer,
+        // each prefixed by a startcode (0x00 0x00 0x00 0x01).
+        // We separate the two and put them into the output format
+        // under the keys "csd-0" and "csd-1".
+
+        unsigned csdIndex = 0;
+
+        const uint8_t *nalStart;
+        size_t nalSize;
+        while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
+            sp<ABuffer> csd = new ABuffer(nalSize + 4);
+            memcpy(csd->data(), "\x00\x00\x00\x01", 4);
+            memcpy(csd->data() + 4, nalStart, nalSize);
+
+            outputFormat->setBuffer(
+                    AStringPrintf("csd-%u", csdIndex).c_str(), csd);
+
+            ++csdIndex;
+        }
+
+        if (csdIndex != 2) {
+            ALOGW("Expected two NAL units from AVC codec config, but %u found",
+                    csdIndex);
+        }
+    } else {
+        // For everything else we just stash the codec specific data into
+        // the output format as a single piece of csd under "csd-0".
+        sp<ABuffer> csd = new ABuffer(size);
+        memcpy(csd->data(), data, size);
+        csd->setRange(0, size);
+        outputFormat->setBuffer("csd-0", csd);
+    }
+}
+
 }  // namespace
 
 // CCodec::ClientListener
@@ -502,9 +591,26 @@
     virtual void onError(
             const std::weak_ptr<Codec2Client::Component>& component,
             uint32_t errorCode) override {
-        // TODO
-        (void)component;
-        (void)errorCode;
+        {
+            // Component is only used for reporting as we use a separate listener for each instance
+            std::shared_ptr<Codec2Client::Component> comp = component.lock();
+            if (!comp) {
+                ALOGD("Component died with error: 0x%x", errorCode);
+            } else {
+                ALOGD("Component \"%s\" returned error: 0x%x", comp->getName().c_str(), errorCode);
+            }
+        }
+
+        // Report to MediaCodec
+        // Note: for now we do not propagate the error code to MediaCodec
+        // except for C2_NO_MEMORY, as we would need to translate to a MediaCodec error.
+        sp<CCodec> codec(mCodec.promote());
+        if (!codec || !codec->mCallback) {
+            return;
+        }
+        codec->mCallback->onError(
+                errorCode == C2_NO_MEMORY ? NO_MEMORY : UNKNOWN_ERROR,
+                ACTION_CODE_FATAL);
     }
 
     virtual void onDeath(
@@ -567,6 +673,10 @@
         mCodec->mCallback->onOutputBuffersChanged();
     }
 
+    void onFirstTunnelFrameReady() override {
+        mCodec->mCallback->onFirstTunnelFrameReady();
+    }
+
 private:
     CCodec *mCodec;
 };
@@ -634,17 +744,18 @@
                 std::make_shared<Codec2ClientInterfaceWrapper>(client));
     }
 
-    std::shared_ptr<Codec2Client::Component> comp =
-            Codec2Client::CreateComponentByName(
+    std::shared_ptr<Codec2Client::Component> comp;
+    c2_status_t status = Codec2Client::CreateComponentByName(
             componentName.c_str(),
             mClientListener,
+            &comp,
             &client);
-    if (!comp) {
-        ALOGE("Failed Create component: %s", componentName.c_str());
+    if (status != C2_OK) {
+        ALOGE("Failed Create component: %s, error=%d", componentName.c_str(), status);
         Mutexed<State>::Locked state(mState);
         state->set(RELEASED);
         state.unlock();
-        mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+        mCallback->onError((status == C2_NO_MEMORY ? NO_MEMORY : UNKNOWN_ERROR), ACTION_CODE_FATAL);
         state.lock();
         return;
     }
@@ -739,10 +850,30 @@
             mChannel->setMetaMode(CCodecBufferChannel::MODE_ANW);
         }
 
+        status_t err = OK;
         sp<RefBase> obj;
         sp<Surface> surface;
         if (msg->findObject("native-window", &obj)) {
             surface = static_cast<Surface *>(obj.get());
+            // setup tunneled playback
+            if (surface != nullptr) {
+                Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+                const std::unique_ptr<Config> &config = *configLocked;
+                if ((config->mDomain & Config::IS_DECODER)
+                        && (config->mDomain & Config::IS_VIDEO)) {
+                    int32_t tunneled;
+                    if (msg->findInt32("feature-tunneled-playback", &tunneled) && tunneled != 0) {
+                        ALOGI("Configuring TUNNELED video playback.");
+
+                        err = configureTunneledVideoPlayback(comp, &config->mSidebandHandle, msg);
+                        if (err != OK) {
+                            ALOGE("configureTunneledVideoPlayback failed!");
+                            return err;
+                        }
+                        config->mTunneled = true;
+                    }
+                }
+            }
             setSurface(surface);
         }
 
@@ -773,12 +904,14 @@
                 return BAD_VALUE;
             }
         }
+        int32_t width = 0;
+        int32_t height = 0;
         if (config->mDomain & (Config::IS_IMAGE | Config::IS_VIDEO)) {
-            if (!msg->findInt32(KEY_WIDTH, &i32)) {
+            if (!msg->findInt32(KEY_WIDTH, &width)) {
                 ALOGD("width is missing, which is required for image/video components.");
                 return BAD_VALUE;
             }
-            if (!msg->findInt32(KEY_HEIGHT, &i32)) {
+            if (!msg->findInt32(KEY_HEIGHT, &height)) {
                 ALOGD("height is missing, which is required for image/video components.");
                 return BAD_VALUE;
             }
@@ -862,28 +995,122 @@
                 }
             }
             config->mISConfig->mUsage = 0;
+            config->mISConfig->mPriority = INT_MAX;
         }
 
         /*
          * Handle desired color format.
          */
+        int32_t defaultColorFormat = COLOR_FormatYUV420Flexible;
         if ((config->mDomain & (Config::IS_VIDEO | Config::IS_IMAGE))) {
-            int32_t format = -1;
+            int32_t format = 0;
+            // Query vendor format for Flexible YUV
+            std::vector<std::unique_ptr<C2Param>> heapParams;
+            C2StoreFlexiblePixelFormatDescriptorsInfo *pixelFormatInfo = nullptr;
+            if (mClient->query(
+                        {},
+                        {C2StoreFlexiblePixelFormatDescriptorsInfo::PARAM_TYPE},
+                        C2_MAY_BLOCK,
+                        &heapParams) == C2_OK
+                    && heapParams.size() == 1u) {
+                pixelFormatInfo = C2StoreFlexiblePixelFormatDescriptorsInfo::From(
+                        heapParams[0].get());
+            } else {
+                pixelFormatInfo = nullptr;
+            }
+            std::optional<uint32_t> flexPixelFormat{};
+            std::optional<uint32_t> flexPlanarPixelFormat{};
+            std::optional<uint32_t> flexSemiPlanarPixelFormat{};
+            if (pixelFormatInfo && *pixelFormatInfo) {
+                for (size_t i = 0; i < pixelFormatInfo->flexCount(); ++i) {
+                    const C2FlexiblePixelFormatDescriptorStruct &desc =
+                        pixelFormatInfo->m.values[i];
+                    if (desc.bitDepth != 8
+                            || desc.subsampling != C2Color::YUV_420
+                            // TODO(b/180076105): some device report wrong layout
+                            // || desc.layout == C2Color::INTERLEAVED_PACKED
+                            // || desc.layout == C2Color::INTERLEAVED_ALIGNED
+                            || desc.layout == C2Color::UNKNOWN_LAYOUT) {
+                        continue;
+                    }
+                    if (!flexPixelFormat) {
+                        flexPixelFormat = desc.pixelFormat;
+                    }
+                    if (desc.layout == C2Color::PLANAR_PACKED && !flexPlanarPixelFormat) {
+                        flexPlanarPixelFormat = desc.pixelFormat;
+                    }
+                    if (desc.layout == C2Color::SEMIPLANAR_PACKED && !flexSemiPlanarPixelFormat) {
+                        flexSemiPlanarPixelFormat = desc.pixelFormat;
+                    }
+                }
+            }
             if (!msg->findInt32(KEY_COLOR_FORMAT, &format)) {
-                /*
-                 * Also handle default color format (encoders require color format, so this is only
-                 * needed for decoders.
-                 */
+                // Also handle default color format (encoders require color format, so this is only
+                // needed for decoders.
                 if (!(config->mDomain & Config::IS_ENCODER)) {
-                    format = (surface == nullptr) ? COLOR_FormatYUV420Planar : COLOR_FormatSurface;
+                    if (surface == nullptr) {
+                        const char *prefix = "";
+                        if (flexSemiPlanarPixelFormat) {
+                            format = COLOR_FormatYUV420SemiPlanar;
+                            prefix = "semi-";
+                        } else {
+                            format = COLOR_FormatYUV420Planar;
+                        }
+                        ALOGD("Client requested ByteBuffer mode decoder w/o color format set: "
+                                "using default %splanar color format", prefix);
+                    } else {
+                        format = COLOR_FormatSurface;
+                    }
+                    defaultColorFormat = format;
+                }
+            } else {
+                if ((config->mDomain & Config::IS_ENCODER) || !surface) {
+                    switch (format) {
+                        case COLOR_FormatYUV420Flexible:
+                            format = flexPixelFormat.value_or(COLOR_FormatYUV420Planar);
+                            break;
+                        case COLOR_FormatYUV420Planar:
+                        case COLOR_FormatYUV420PackedPlanar:
+                            format = flexPlanarPixelFormat.value_or(
+                                    flexPixelFormat.value_or(format));
+                            break;
+                        case COLOR_FormatYUV420SemiPlanar:
+                        case COLOR_FormatYUV420PackedSemiPlanar:
+                            format = flexSemiPlanarPixelFormat.value_or(
+                                    flexPixelFormat.value_or(format));
+                            break;
+                        default:
+                            // No-op
+                            break;
+                    }
                 }
             }
 
-            if (format >= 0) {
+            if (format != 0) {
                 msg->setInt32("android._color-format", format);
             }
         }
 
+        /*
+         * Handle dataspace
+         */
+        int32_t usingRecorder;
+        if (msg->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) {
+            android_dataspace dataSpace = HAL_DATASPACE_BT709;
+            int32_t width, height;
+            if (msg->findInt32("width", &width)
+                    && msg->findInt32("height", &height)) {
+                ColorAspects aspects;
+                getColorAspectsFromFormat(msg, aspects);
+                setDefaultCodecColorAspectsIfNeeded(aspects, width, height);
+                // TODO: read dataspace / color aspect from the component
+                setColorAspectsIntoFormat(aspects, const_cast<sp<AMessage> &>(msg));
+                dataSpace = getDataSpaceForColorAspects(aspects, true /* mayexpand */);
+            }
+            msg->setInt32("android._dataspace", (int32_t)dataSpace);
+            ALOGD("setting dataspace to %x", dataSpace);
+        }
+
         int32_t subscribeToAllVendorParams;
         if (msg->findInt32("x-*", &subscribeToAllVendorParams) && subscribeToAllVendorParams) {
             if (config->subscribeToAllVendorParams(comp, C2_MAY_BLOCK) != OK) {
@@ -900,7 +1127,7 @@
             sdkParams = msg->dup();
             sdkParams->removeEntryAt(sdkParams->findEntryByName(PARAMETER_KEY_VIDEO_BITRATE));
         }
-        status_t err = config->getConfigUpdateFromSdkParams(
+        err = config->getConfigUpdateFromSdkParams(
                 comp, sdkParams, Config::IS_CONFIG, C2_DONT_BLOCK, &configUpdate);
         if (err != OK) {
             ALOGW("failed to convert configuration to c2 params");
@@ -921,6 +1148,55 @@
             configUpdate.push_back(std::move(gop));
         }
 
+        if ((config->mDomain & Config::IS_ENCODER)
+                && (config->mDomain & Config::IS_VIDEO)) {
+            // we may not use all 3 of these entries
+            std::unique_ptr<C2StreamPictureQuantizationTuning::output> qp =
+                C2StreamPictureQuantizationTuning::output::AllocUnique(3 /* flexCount */,
+                                                                       0u /* stream */);
+
+            int ix = 0;
+
+            int32_t iMax = INT32_MAX;
+            int32_t iMin = INT32_MIN;
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_I_MAX, &iMax);
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_I_MIN, &iMin);
+            if (iMax != INT32_MAX || iMin != INT32_MIN) {
+                qp->m.values[ix++] = {I_FRAME, iMin, iMax};
+            }
+
+            int32_t pMax = INT32_MAX;
+            int32_t pMin = INT32_MIN;
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_P_MAX, &pMax);
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_P_MIN, &pMin);
+            if (pMax != INT32_MAX || pMin != INT32_MIN) {
+                qp->m.values[ix++] = {P_FRAME, pMin, pMax};
+            }
+
+            int32_t bMax = INT32_MAX;
+            int32_t bMin = INT32_MIN;
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_B_MAX, &bMax);
+            (void) sdkParams->findInt32(KEY_VIDEO_QP_B_MIN, &bMin);
+            if (bMax != INT32_MAX || bMin != INT32_MIN) {
+                qp->m.values[ix++] = {B_FRAME, bMin, bMax};
+            }
+
+            // adjust to reflect actual use.
+            qp->setFlexCount(ix);
+
+            configUpdate.push_back(std::move(qp));
+        }
+
+        int32_t background = 0;
+        if ((config->mDomain & Config::IS_VIDEO)
+                && msg->findInt32("android._background-mode", &background)
+                && background) {
+            androidSetThreadPriority(gettid(), ANDROID_PRIORITY_BACKGROUND);
+            if (config->mISConfig) {
+                config->mISConfig->mPriority = ANDROID_PRIORITY_BACKGROUND;
+            }
+        }
+
         err = config->setParameters(comp, configUpdate, C2_DONT_BLOCK);
         if (err != OK) {
             ALOGW("failed to configure c2 params");
@@ -932,7 +1208,10 @@
         C2StreamMaxBufferSizeInfo::input maxInputSize(0u, 0u);
         C2PrependHeaderModeSetting prepend(PREPEND_HEADER_TO_NONE);
 
+        C2Param::Index colorAspectsRequestIndex =
+            C2StreamColorAspectsInfo::output::PARAM_TYPE | C2Param::CoreIndex::IS_REQUEST_FLAG;
         std::initializer_list<C2Param::Index> indices {
+            colorAspectsRequestIndex.withStream(0u),
         };
         c2_status_t c2err = comp->query(
                 { &usage, &maxInputSize, &prepend },
@@ -943,11 +1222,6 @@
             ALOGE("Failed to query component interface: %d", c2err);
             return UNKNOWN_ERROR;
         }
-        if (params.size() != indices.size()) {
-            ALOGE("Component returns wrong number of params: expected %zu actual %zu",
-                    indices.size(), params.size());
-            return UNKNOWN_ERROR;
-        }
         if (usage) {
             if (usage.value & C2MemoryUsage::CPU_READ) {
                 config->mInputFormat->setInt32("using-sw-read-often", true);
@@ -956,6 +1230,7 @@
                 C2AndroidMemoryUsage androidUsage(C2MemoryUsage(usage.value));
                 config->mISConfig->mUsage = androidUsage.asGrallocUsage();
             }
+            config->mInputFormat->setInt64("android._C2MemoryUsage", usage.value);
         }
 
         // NOTE: we don't blindly use client specified input size if specified as clients
@@ -1012,13 +1287,14 @@
         int32_t clientPrepend;
         if ((config->mDomain & Config::IS_VIDEO)
                 && (config->mDomain & Config::IS_ENCODER)
-                && msg->findInt32(KEY_PREPEND_HEADERS_TO_SYNC_FRAMES, &clientPrepend)
+                && msg->findInt32(KEY_PREPEND_HEADER_TO_SYNC_FRAMES, &clientPrepend)
                 && clientPrepend
                 && (!prepend || prepend.value != PREPEND_HEADER_TO_ALL_SYNC)) {
-            ALOGE("Failed to set KEY_PREPEND_HEADERS_TO_SYNC_FRAMES");
+            ALOGE("Failed to set KEY_PREPEND_HEADER_TO_SYNC_FRAMES");
             return BAD_VALUE;
         }
 
+        int32_t componentColorFormat = 0;
         if ((config->mDomain & (Config::IS_VIDEO | Config::IS_IMAGE))) {
             // propagate HDR static info to output format for both encoders and decoders
             // if component supports this info, we will update from component, but only the raw port,
@@ -1031,12 +1307,16 @@
 
             // Set desired color format from configuration parameter
             int32_t format;
-            if (msg->findInt32("android._color-format", &format)) {
-                if (config->mDomain & Config::IS_ENCODER) {
-                    config->mInputFormat->setInt32(KEY_COLOR_FORMAT, format);
-                } else {
-                    config->mOutputFormat->setInt32(KEY_COLOR_FORMAT, format);
+            if (!msg->findInt32(KEY_COLOR_FORMAT, &format)) {
+                format = defaultColorFormat;
+            }
+            if (config->mDomain & Config::IS_ENCODER) {
+                config->mInputFormat->setInt32(KEY_COLOR_FORMAT, format);
+                if (msg->findInt32("android._color-format", &componentColorFormat)) {
+                    config->mInputFormat->setInt32("android._color-format", componentColorFormat);
                 }
+            } else {
+                config->mOutputFormat->setInt32(KEY_COLOR_FORMAT, format);
             }
         }
 
@@ -1064,8 +1344,86 @@
             }
         }
 
-        ALOGD("setup formats input: %s and output: %s",
-                config->mInputFormat->debugString().c_str(),
+        std::unique_ptr<C2Param> colorTransferRequestParam;
+        for (std::unique_ptr<C2Param> &param : params) {
+            if (param->index() == colorAspectsRequestIndex.withStream(0u)) {
+                ALOGI("found color transfer request param");
+                colorTransferRequestParam = std::move(param);
+            }
+        }
+        int32_t colorTransferRequest = 0;
+        if (config->mDomain & (Config::IS_IMAGE | Config::IS_VIDEO)
+                && !sdkParams->findInt32("color-transfer-request", &colorTransferRequest)) {
+            colorTransferRequest = 0;
+        }
+
+        if (colorTransferRequest != 0) {
+            if (colorTransferRequestParam && *colorTransferRequestParam) {
+                C2StreamColorAspectsInfo::output *info =
+                    static_cast<C2StreamColorAspectsInfo::output *>(
+                            colorTransferRequestParam.get());
+                if (!C2Mapper::map(info->transfer, &colorTransferRequest)) {
+                    colorTransferRequest = 0;
+                }
+            } else {
+                colorTransferRequest = 0;
+            }
+            config->mInputFormat->setInt32("color-transfer-request", colorTransferRequest);
+        }
+
+        if (componentColorFormat != 0 && componentColorFormat != COLOR_FormatSurface) {
+            // Need to get stride/vstride
+            uint32_t pixelFormat = PIXEL_FORMAT_UNKNOWN;
+            if (C2Mapper::mapPixelFormatFrameworkToCodec(componentColorFormat, &pixelFormat)) {
+                // TODO: retrieve these values without allocating a buffer.
+                //       Currently allocating a buffer is necessary to retrieve the layout.
+                int64_t blockUsage =
+                    usage.value | C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE;
+                std::shared_ptr<C2GraphicBlock> block = FetchGraphicBlock(
+                        width, height, pixelFormat, blockUsage, {comp->getName()});
+                sp<GraphicBlockBuffer> buffer;
+                if (block) {
+                    buffer = GraphicBlockBuffer::Allocate(
+                            config->mInputFormat,
+                            block,
+                            [](size_t size) -> sp<ABuffer> { return new ABuffer(size); });
+                } else {
+                    ALOGD("Failed to allocate a graphic block "
+                            "(width=%d height=%d pixelFormat=%u usage=%llx)",
+                            width, height, pixelFormat, (long long)blockUsage);
+                    // This means that byte buffer mode is not supported in this configuration
+                    // anyway. Skip setting stride/vstride to input format.
+                }
+                if (buffer) {
+                    sp<ABuffer> imageData = buffer->getImageData();
+                    MediaImage2 *img = nullptr;
+                    if (imageData && imageData->data()
+                            && imageData->size() >= sizeof(MediaImage2)) {
+                        img = (MediaImage2*)imageData->data();
+                    }
+                    if (img && img->mNumPlanes > 0 && img->mType != img->MEDIA_IMAGE_TYPE_UNKNOWN) {
+                        int32_t stride = img->mPlane[0].mRowInc;
+                        config->mInputFormat->setInt32(KEY_STRIDE, stride);
+                        if (img->mNumPlanes > 1 && stride > 0) {
+                            int64_t offsetDelta =
+                                (int64_t)img->mPlane[1].mOffset - (int64_t)img->mPlane[0].mOffset;
+                            if (offsetDelta % stride == 0) {
+                                int32_t vstride = int32_t(offsetDelta / stride);
+                                config->mInputFormat->setInt32(KEY_SLICE_HEIGHT, vstride);
+                            } else {
+                                ALOGD("Cannot report accurate slice height: "
+                                        "offsetDelta = %lld stride = %d",
+                                        (long long)offsetDelta, stride);
+                            }
+                        }
+                    }
+                }
+            }
+        }
+
+        ALOGD("setup formats input: %s",
+                config->mInputFormat->debugString().c_str());
+        ALOGD("setup formats output: %s",
                 config->mOutputFormat->debugString().c_str());
         return OK;
     };
@@ -1076,6 +1434,8 @@
     Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
     const std::unique_ptr<Config> &config = *configLocked;
 
+    config->queryConfiguration(comp);
+
     mCallback->onComponentConfigured(config->mInputFormat, config->mOutputFormat);
 }
 
@@ -1144,13 +1504,11 @@
     status_t err;
     sp<IGraphicBufferProducer> bufferProducer;
 
-    sp<AMessage> inputFormat;
     sp<AMessage> outputFormat;
     uint64_t usage = 0;
     {
         Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
         const std::unique_ptr<Config> &config = *configLocked;
-        inputFormat = config->mInputFormat;
         outputFormat = config->mOutputFormat;
         usage = config->mISConfig ? config->mISConfig->mUsage : 0;
     }
@@ -1186,6 +1544,14 @@
         return;
     }
 
+    // Formats can change after setupInputSurface
+    sp<AMessage> inputFormat;
+    {
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
+        inputFormat = config->mInputFormat;
+        outputFormat = config->mOutputFormat;
+    }
     mCallback->onInputSurfaceCreated(
             inputFormat,
             outputFormat,
@@ -1235,13 +1601,11 @@
 }
 
 void CCodec::setInputSurface(const sp<PersistentSurface> &surface) {
-    sp<AMessage> inputFormat;
     sp<AMessage> outputFormat;
     uint64_t usage = 0;
     {
         Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
         const std::unique_ptr<Config> &config = *configLocked;
-        inputFormat = config->mInputFormat;
         outputFormat = config->mOutputFormat;
         usage = config->mISConfig ? config->mISConfig->mUsage : 0;
     }
@@ -1273,6 +1637,14 @@
         mCallback->onInputSurfaceDeclined(UNKNOWN_ERROR);
         return;
     }
+    // Formats can change after setupInputSurface
+    sp<AMessage> inputFormat;
+    {
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
+        inputFormat = config->mInputFormat;
+        outputFormat = config->mOutputFormat;
+    }
     mCallback->onInputSurfaceAccepted(inputFormat, outputFormat);
 }
 
@@ -1324,6 +1696,7 @@
         outputFormat = config->mOutputFormat = config->mOutputFormat->dup();
         if (config->mInputSurface) {
             err2 = config->mInputSurface->start();
+            config->mInputSurfaceDataspace = config->mInputSurface->getDataspace();
         }
         buffersBoundToCodec = config->mBuffersBoundToCodec;
     }
@@ -1331,8 +1704,6 @@
         mCallback->onError(err2, ACTION_CODE_FATAL);
         return;
     }
-    // We're not starting after flush.
-    (void)mSentConfigAfterResume.test_and_set();
     err2 = mChannel->start(inputFormat, outputFormat, buffersBoundToCodec);
     if (err2 != OK) {
         mCallback->onError(err2, ACTION_CODE_FATAL);
@@ -1413,6 +1784,7 @@
         if (config->mInputSurface) {
             config->mInputSurface->disconnect();
             config->mInputSurface = nullptr;
+            config->mInputSurfaceDataspace = HAL_DATASPACE_UNKNOWN;
         }
     }
     {
@@ -1462,6 +1834,7 @@
         if (config->mInputSurface) {
             config->mInputSurface->disconnect();
             config->mInputSurface = nullptr;
+            config->mInputSurfaceDataspace = HAL_DATASPACE_UNKNOWN;
         }
     }
 
@@ -1499,6 +1872,21 @@
 }
 
 status_t CCodec::setSurface(const sp<Surface> &surface) {
+    {
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
+        if (config->mTunneled && config->mSidebandHandle != nullptr) {
+            sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(surface.get());
+            status_t err = native_window_set_sideband_stream(
+                    nativeWindow.get(),
+                    const_cast<native_handle_t *>(config->mSidebandHandle->handle()));
+            if (err != OK) {
+                ALOGE("NativeWindow(%p) native_window_set_sideband_stream(%p) failed! (err %d).",
+                        nativeWindow.get(), config->mSidebandHandle->handle(), err);
+                return err;
+            }
+        }
+    }
     return mChannel->setSurface(surface);
 }
 
@@ -1580,11 +1968,12 @@
         return;
     }
 
-    mSentConfigAfterResume.clear();
     {
         Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
         const std::unique_ptr<Config> &config = *configLocked;
+        sp<AMessage> outputFormat = config->mOutputFormat;
         config->queryConfiguration(comp);
+        RevertOutputFormatIfNeeded(outputFormat, config->mOutputFormat);
     }
 
     (void)mChannel->start(nullptr, nullptr, [&]{
@@ -1630,6 +2019,12 @@
         params->removeEntryAt(params->findEntryByName(KEY_BIT_RATE));
     }
 
+    int32_t syncId = 0;
+    if (params->findInt32("audio-hw-sync", &syncId)
+            || params->findInt32("hw-av-sync-id", &syncId)) {
+        configureTunneledVideoPlayback(comp, nullptr, params);
+    }
+
     Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
     const std::unique_ptr<Config> &config = *configLocked;
 
@@ -1672,7 +2067,9 @@
                     || comp->getName().find("c2.android.") == 0)) {
         mChannel->setParameters(configUpdate);
     } else {
+        sp<AMessage> outputFormat = config->mOutputFormat;
         (void)config->setParameters(comp, configUpdate, C2_MAY_BLOCK);
+        RevertOutputFormatIfNeeded(outputFormat, config->mOutputFormat);
     }
 }
 
@@ -1699,6 +2096,39 @@
     config->setParameters(comp, params, C2_MAY_BLOCK);
 }
 
+status_t CCodec::querySupportedParameters(std::vector<std::string> *names) {
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    return config->querySupportedParameters(names);
+}
+
+status_t CCodec::describeParameter(
+        const std::string &name, CodecParameterDescriptor *desc) {
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    return config->describe(name, desc);
+}
+
+status_t CCodec::subscribeToParameters(const std::vector<std::string> &names) {
+    std::shared_ptr<Codec2Client::Component> comp = mState.lock()->comp;
+    if (!comp) {
+        return INVALID_OPERATION;
+    }
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    return config->subscribeToVendorConfigUpdate(comp, names);
+}
+
+status_t CCodec::unsubscribeFromParameters(const std::vector<std::string> &names) {
+    std::shared_ptr<Codec2Client::Component> comp = mState.lock()->comp;
+    if (!comp) {
+        return INVALID_OPERATION;
+    }
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    return config->unsubscribeFromVendorConfigUpdate(comp, names);
+}
+
 void CCodec::onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems) {
     if (!workItems.empty()) {
         Mutexed<std::list<std::unique_ptr<C2Work>>>::Locked queue(mWorkDoneQueue);
@@ -1795,78 +2225,99 @@
             }
 
             // handle configuration changes in work done
-            Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
-            const std::unique_ptr<Config> &config = *configLocked;
-            bool changed = !mSentConfigAfterResume.test_and_set();
-            Config::Watcher<C2StreamInitDataInfo::output> initData =
-                config->watch<C2StreamInitDataInfo::output>();
-            if (!work->worklets.empty()
-                    && (work->worklets.front()->output.flags
-                            & C2FrameData::FLAG_DISCARD_FRAME) == 0) {
+            std::shared_ptr<const C2StreamInitDataInfo::output> initData;
+            sp<AMessage> outputFormat = nullptr;
+            {
+                Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+                const std::unique_ptr<Config> &config = *configLocked;
+                Config::Watcher<C2StreamInitDataInfo::output> initDataWatcher =
+                    config->watch<C2StreamInitDataInfo::output>();
+                if (!work->worklets.empty()
+                        && (work->worklets.front()->output.flags
+                                & C2FrameData::FLAG_DISCARD_FRAME) == 0) {
 
-                // copy buffer info to config
-                std::vector<std::unique_ptr<C2Param>> updates;
-                for (const std::unique_ptr<C2Param> &param
-                        : work->worklets.front()->output.configUpdate) {
-                    updates.push_back(C2Param::Copy(*param));
-                }
-                unsigned stream = 0;
-                for (const std::shared_ptr<C2Buffer> &buf : work->worklets.front()->output.buffers) {
-                    for (const std::shared_ptr<const C2Info> &info : buf->info()) {
-                        // move all info into output-stream #0 domain
-                        updates.emplace_back(C2Param::CopyAsStream(*info, true /* output */, stream));
+                    // copy buffer info to config
+                    std::vector<std::unique_ptr<C2Param>> updates;
+                    for (const std::unique_ptr<C2Param> &param
+                            : work->worklets.front()->output.configUpdate) {
+                        updates.push_back(C2Param::Copy(*param));
                     }
-                    for (const C2ConstGraphicBlock &block : buf->data().graphicBlocks()) {
-                        // ALOGV("got output buffer with crop %u,%u+%u,%u and size %u,%u",
-                        //      block.crop().left, block.crop().top,
-                        //      block.crop().width, block.crop().height,
-                        //      block.width(), block.height());
-                        updates.emplace_back(new C2StreamCropRectInfo::output(stream, block.crop()));
-                        updates.emplace_back(new C2StreamPictureSizeInfo::output(
-                                stream, block.crop().width, block.crop().height));
-                        break; // for now only do the first block
+                    unsigned stream = 0;
+                    std::vector<std::shared_ptr<C2Buffer>> &outputBuffers =
+                        work->worklets.front()->output.buffers;
+                    for (const std::shared_ptr<C2Buffer> &buf : outputBuffers) {
+                        for (const std::shared_ptr<const C2Info> &info : buf->info()) {
+                            // move all info into output-stream #0 domain
+                            updates.emplace_back(
+                                    C2Param::CopyAsStream(*info, true /* output */, stream));
+                        }
+
+                        const std::vector<C2ConstGraphicBlock> blocks = buf->data().graphicBlocks();
+                        // for now only do the first block
+                        if (!blocks.empty()) {
+                            // ALOGV("got output buffer with crop %u,%u+%u,%u and size %u,%u",
+                            //      block.crop().left, block.crop().top,
+                            //      block.crop().width, block.crop().height,
+                            //      block.width(), block.height());
+                            const C2ConstGraphicBlock &block = blocks[0];
+                            updates.emplace_back(new C2StreamCropRectInfo::output(
+                                    stream, block.crop()));
+                            updates.emplace_back(new C2StreamPictureSizeInfo::output(
+                                    stream, block.crop().width, block.crop().height));
+                        }
+                        ++stream;
                     }
-                    ++stream;
-                }
 
-                if (config->updateConfiguration(updates, config->mOutputDomain)) {
-                    changed = true;
-                }
+                    sp<AMessage> oldFormat = config->mOutputFormat;
+                    config->updateConfiguration(updates, config->mOutputDomain);
+                    RevertOutputFormatIfNeeded(oldFormat, config->mOutputFormat);
 
-                // copy standard infos to graphic buffers if not already present (otherwise, we
-                // may overwrite the actual intermediate value with a final value)
-                stream = 0;
-                const static std::vector<C2Param::Index> stdGfxInfos = {
-                    C2StreamRotationInfo::output::PARAM_TYPE,
-                    C2StreamColorAspectsInfo::output::PARAM_TYPE,
-                    C2StreamDataSpaceInfo::output::PARAM_TYPE,
-                    C2StreamHdrStaticInfo::output::PARAM_TYPE,
-                    C2StreamHdr10PlusInfo::output::PARAM_TYPE,
-                    C2StreamPixelAspectRatioInfo::output::PARAM_TYPE,
-                    C2StreamSurfaceScalingInfo::output::PARAM_TYPE
-                };
-                for (const std::shared_ptr<C2Buffer> &buf : work->worklets.front()->output.buffers) {
-                    if (buf->data().graphicBlocks().size()) {
-                        for (C2Param::Index ix : stdGfxInfos) {
-                            if (!buf->hasInfo(ix)) {
-                                const C2Param *param =
-                                    config->getConfigParameterValue(ix.withStream(stream));
-                                if (param) {
-                                    std::shared_ptr<C2Param> info(C2Param::Copy(*param));
-                                    buf->setInfo(std::static_pointer_cast<C2Info>(info));
+                    // copy standard infos to graphic buffers if not already present (otherwise, we
+                    // may overwrite the actual intermediate value with a final value)
+                    stream = 0;
+                    const static C2Param::Index stdGfxInfos[] = {
+                        C2StreamRotationInfo::output::PARAM_TYPE,
+                        C2StreamColorAspectsInfo::output::PARAM_TYPE,
+                        C2StreamDataSpaceInfo::output::PARAM_TYPE,
+                        C2StreamHdrStaticInfo::output::PARAM_TYPE,
+                        C2StreamHdr10PlusInfo::output::PARAM_TYPE,
+                        C2StreamPixelAspectRatioInfo::output::PARAM_TYPE,
+                        C2StreamSurfaceScalingInfo::output::PARAM_TYPE
+                    };
+                    for (const std::shared_ptr<C2Buffer> &buf : outputBuffers) {
+                        if (buf->data().graphicBlocks().size()) {
+                            for (C2Param::Index ix : stdGfxInfos) {
+                                if (!buf->hasInfo(ix)) {
+                                    const C2Param *param =
+                                        config->getConfigParameterValue(ix.withStream(stream));
+                                    if (param) {
+                                        std::shared_ptr<C2Param> info(C2Param::Copy(*param));
+                                        buf->setInfo(std::static_pointer_cast<C2Info>(info));
+                                    }
                                 }
                             }
                         }
+                        ++stream;
                     }
-                    ++stream;
                 }
-            }
-            if (config->mInputSurface) {
-                config->mInputSurface->onInputBufferDone(work->input.ordinal.frameIndex);
+                if (config->mInputSurface) {
+                    if (work->worklets.empty()
+                           || !work->worklets.back()
+                           || (work->worklets.back()->output.flags
+                                  & C2FrameData::FLAG_INCOMPLETE) == 0) {
+                        config->mInputSurface->onInputBufferDone(work->input.ordinal.frameIndex);
+                    }
+                }
+                if (initDataWatcher.hasChanged()) {
+                    initData = initDataWatcher.update();
+                    AmendOutputFormatWithCodecSpecificData(
+                            initData->m.value, initData->flexCount(), config->mCodingMediaType,
+                            config->mOutputFormat);
+                }
+                outputFormat = config->mOutputFormat;
             }
             mChannel->onWorkDone(
-                    std::move(work), changed ? config->mOutputFormat->dup() : nullptr,
-                    initData.hasChanged() ? initData.update().get() : nullptr);
+                    std::move(work), outputFormat, initData ? initData.get() : nullptr);
             break;
         }
         case kWhatWatch: {
@@ -1890,6 +2341,55 @@
     deadline->set(now + (timeout * mult), name);
 }
 
+status_t CCodec::configureTunneledVideoPlayback(
+        std::shared_ptr<Codec2Client::Component> comp,
+        sp<NativeHandle> *sidebandHandle,
+        const sp<AMessage> &msg) {
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+
+    std::unique_ptr<C2PortTunneledModeTuning::output> tunneledPlayback =
+        C2PortTunneledModeTuning::output::AllocUnique(
+            1,
+            C2PortTunneledModeTuning::Struct::SIDEBAND,
+            C2PortTunneledModeTuning::Struct::REALTIME,
+            0);
+    // TODO: use KEY_AUDIO_HW_SYNC, KEY_HARDWARE_AV_SYNC_ID when they are in MediaCodecConstants.h
+    if (msg->findInt32("audio-hw-sync", &tunneledPlayback->m.syncId[0])) {
+        tunneledPlayback->m.syncType = C2PortTunneledModeTuning::Struct::sync_type_t::AUDIO_HW_SYNC;
+    } else if (msg->findInt32("hw-av-sync-id", &tunneledPlayback->m.syncId[0])) {
+        tunneledPlayback->m.syncType = C2PortTunneledModeTuning::Struct::sync_type_t::HW_AV_SYNC;
+    } else {
+        tunneledPlayback->m.syncType = C2PortTunneledModeTuning::Struct::sync_type_t::REALTIME;
+        tunneledPlayback->setFlexCount(0);
+    }
+    c2_status_t c2err = comp->config({ tunneledPlayback.get() }, C2_MAY_BLOCK, &failures);
+    if (c2err != C2_OK) {
+        return UNKNOWN_ERROR;
+    }
+
+    if (sidebandHandle == nullptr) {
+        return OK;
+    }
+
+    std::vector<std::unique_ptr<C2Param>> params;
+    c2err = comp->query({}, {C2PortTunnelHandleTuning::output::PARAM_TYPE}, C2_DONT_BLOCK, &params);
+    if (c2err == C2_OK && params.size() == 1u) {
+        C2PortTunnelHandleTuning::output *videoTunnelSideband =
+            C2PortTunnelHandleTuning::output::From(params[0].get());
+        // Currently, Codec2 only supports non-fd case for sideband native_handle.
+        native_handle_t *handle = native_handle_create(0, videoTunnelSideband->flexCount());
+        *sidebandHandle = NativeHandle::create(handle, true /* ownsHandle */);
+        if (handle != nullptr && videoTunnelSideband->flexCount()) {
+            memcpy(handle->data, videoTunnelSideband->m.values,
+                    sizeof(int32_t) * videoTunnelSideband->flexCount());
+            return OK;
+        } else {
+            return NO_MEMORY;
+        }
+    }
+    return UNKNOWN_ERROR;
+}
+
 void CCodec::initiateReleaseIfStuck() {
     std::string name;
     bool pendingDeadline = false;
@@ -1902,7 +2402,44 @@
             pendingDeadline = true;
         }
     }
-    if (name.empty()) {
+    bool tunneled = false;
+    bool isMediaTypeKnown = false;
+    {
+        static const std::set<std::string> kKnownMediaTypes{
+            MIMETYPE_VIDEO_VP8,
+            MIMETYPE_VIDEO_VP9,
+            MIMETYPE_VIDEO_AV1,
+            MIMETYPE_VIDEO_AVC,
+            MIMETYPE_VIDEO_HEVC,
+            MIMETYPE_VIDEO_MPEG4,
+            MIMETYPE_VIDEO_H263,
+            MIMETYPE_VIDEO_MPEG2,
+            MIMETYPE_VIDEO_RAW,
+            MIMETYPE_VIDEO_DOLBY_VISION,
+
+            MIMETYPE_AUDIO_AMR_NB,
+            MIMETYPE_AUDIO_AMR_WB,
+            MIMETYPE_AUDIO_MPEG,
+            MIMETYPE_AUDIO_AAC,
+            MIMETYPE_AUDIO_QCELP,
+            MIMETYPE_AUDIO_VORBIS,
+            MIMETYPE_AUDIO_OPUS,
+            MIMETYPE_AUDIO_G711_ALAW,
+            MIMETYPE_AUDIO_G711_MLAW,
+            MIMETYPE_AUDIO_RAW,
+            MIMETYPE_AUDIO_FLAC,
+            MIMETYPE_AUDIO_MSGSM,
+            MIMETYPE_AUDIO_AC3,
+            MIMETYPE_AUDIO_EAC3,
+
+            MIMETYPE_IMAGE_ANDROID_HEIC,
+        };
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
+        tunneled = config->mTunneled;
+        isMediaTypeKnown = (kKnownMediaTypes.count(config->mCodingMediaType) != 0);
+    }
+    if (!tunneled && isMediaTypeKnown && name.empty()) {
         constexpr std::chrono::steady_clock::duration kWorkDurationThreshold = 3s;
         std::chrono::steady_clock::duration elapsed = mChannel->elapsed();
         if (elapsed >= kWorkDurationThreshold) {
@@ -1922,7 +2459,18 @@
         return;
     }
 
-    ALOGW("previous call to %s exceeded timeout", name.c_str());
+    C2String compName;
+    {
+        Mutexed<State>::Locked state(mState);
+        if (!state->comp) {
+            ALOGD("previous call to %s exceeded timeout "
+                  "and the component is already released", name.c_str());
+            return;
+        }
+        compName = state->comp->getName();
+    }
+    ALOGW("[%s] previous call to %s exceeded timeout", compName.c_str(), name.c_str());
+
     initiateRelease(false);
     mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
 }
@@ -1994,7 +2542,7 @@
             }
             if (param->type() == C2PortAllocatorsTuning::input::PARAM_TYPE) {
                 mInputAllocators.reset(
-                        C2PortAllocatorsTuning::input::From(params[0].get()));
+                        C2PortAllocatorsTuning::input::From(param));
             }
         }
         mInitStatus = OK;
@@ -2144,7 +2692,11 @@
             *maxUsage = 0;
             continue;
         }
-        *minUsage |= supported.values[0].u64;
+        if (supported.values.size() > 1) {
+            *minUsage |= supported.values[1].u64;
+        } else {
+            *minUsage |= supported.values[0].u64;
+        }
         int64_t currentMaxUsage = 0;
         for (const C2Value::Primitive &flags : supported.values) {
             currentMaxUsage |= flags.u64;
@@ -2168,15 +2720,17 @@
             return OK;
         }
     }
-    uint64_t minUsage = usage.expected;
-    uint64_t maxUsage = ~0ull;
     std::set<C2Allocator::id_t> allocators;
     GetCommonAllocatorIds(names, C2Allocator::LINEAR, &allocators);
     if (allocators.empty()) {
         *isCompatible = false;
         return OK;
     }
+
+    uint64_t minUsage = 0;
+    uint64_t maxUsage = ~0ull;
     CalculateMinMaxUsage(names, &minUsage, &maxUsage);
+    minUsage |= usage.expected;
     *isCompatible = ((maxUsage & minUsage) == minUsage);
     return OK;
 }
@@ -2203,14 +2757,16 @@
 // static
 std::shared_ptr<C2LinearBlock> CCodec::FetchLinearBlock(
         size_t capacity, const C2MemoryUsage &usage, const std::vector<std::string> &names) {
-    uint64_t minUsage = usage.expected;
-    uint64_t maxUsage = ~0ull;
     std::set<C2Allocator::id_t> allocators;
     GetCommonAllocatorIds(names, C2Allocator::LINEAR, &allocators);
     if (allocators.empty()) {
         allocators.insert(C2PlatformAllocatorStore::DEFAULT_LINEAR);
     }
+
+    uint64_t minUsage = 0;
+    uint64_t maxUsage = ~0ull;
     CalculateMinMaxUsage(names, &minUsage, &maxUsage);
+    minUsage |= usage.expected;
     if ((maxUsage & minUsage) != minUsage) {
         allocators.clear();
         allocators.insert(C2PlatformAllocatorStore::DEFAULT_LINEAR);
@@ -2291,4 +2847,3 @@
 }
 
 }  // namespace android
-
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 0626c8d..9e9bdfc 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -30,6 +30,7 @@
 
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
 #include <android/hardware/drm/1.0/types.h>
+#include <android-base/properties.h>
 #include <android-base/stringprintf.h>
 #include <binder/MemoryBase.h>
 #include <binder/MemoryDealer.h>
@@ -143,7 +144,8 @@
       mFrameIndex(0u),
       mFirstValidFrameIndex(0u),
       mMetaMode(MODE_NONE),
-      mInputMetEos(false) {
+      mInputMetEos(false),
+      mSendEncryptedInfoBuffer(false) {
     mOutputSurface.lock()->maxDequeueBuffers = kSmoothnessFactor + kRenderingDepth;
     {
         Mutexed<Input>::Locked input(mInput);
@@ -159,6 +161,10 @@
         output->outputDelay = 0u;
         output->numSlots = kSmoothnessFactor;
     }
+    {
+        Mutexed<BlockPools>::Locked pools(mBlockPools);
+        pools->outputPoolId = C2BlockPool::BASIC_LINEAR;
+    }
 }
 
 CCodecBufferChannel::~CCodecBufferChannel() {
@@ -188,7 +194,10 @@
     return mInputSurface->signalEndOfInputStream();
 }
 
-status_t CCodecBufferChannel::queueInputBufferInternal(sp<MediaCodecBuffer> buffer) {
+status_t CCodecBufferChannel::queueInputBufferInternal(
+        sp<MediaCodecBuffer> buffer,
+        std::shared_ptr<C2LinearBlock> encryptedBlock,
+        size_t blockSize) {
     int64_t timeUs;
     CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
 
@@ -200,6 +209,7 @@
     int32_t flags = 0;
     int32_t tmp = 0;
     bool eos = false;
+    bool tunnelFirstFrame = false;
     if (buffer->meta()->findInt32("eos", &tmp) && tmp) {
         eos = true;
         mInputMetEos = true;
@@ -208,7 +218,11 @@
     if (buffer->meta()->findInt32("csd", &tmp) && tmp) {
         flags |= C2FrameData::FLAG_CODEC_CONFIG;
     }
+    if (buffer->meta()->findInt32("tunnel-first-frame", &tmp) && tmp) {
+        tunnelFirstFrame = true;
+    }
     ALOGV("[%s] queueInputBuffer: buffer->size() = %zu", mName, buffer->size());
+    std::list<std::unique_ptr<C2Work>> items;
     std::unique_ptr<C2Work> work(new C2Work);
     work->input.ordinal.timestamp = timeUs;
     work->input.ordinal.frameIndex = mFrameIndex++;
@@ -218,9 +232,8 @@
     work->input.ordinal.customOrdinal = timeUs;
     work->input.buffers.clear();
 
-    uint64_t queuedFrameIndex = work->input.ordinal.frameIndex.peeku();
-    std::vector<std::shared_ptr<C2Buffer>> queuedBuffers;
     sp<Codec2Buffer> copy;
+    bool usesFrameReassembler = false;
 
     if (buffer->size() > 0u) {
         Mutexed<Input>::Locked input(mInput);
@@ -245,30 +258,55 @@
                       "buffer starvation on component.", mName);
             }
         }
-        work->input.buffers.push_back(c2buffer);
-        queuedBuffers.push_back(c2buffer);
+        if (input->frameReassembler) {
+            usesFrameReassembler = true;
+            input->frameReassembler.process(buffer, &items);
+        } else {
+            int32_t cvo = 0;
+            if (buffer->meta()->findInt32("cvo", &cvo)) {
+                int32_t rotation = cvo % 360;
+                // change rotation to counter-clock wise.
+                rotation = ((rotation <= 0) ? 0 : 360) - rotation;
+
+                Mutexed<OutputSurface>::Locked output(mOutputSurface);
+                uint64_t frameIndex = work->input.ordinal.frameIndex.peeku();
+                output->rotation[frameIndex] = rotation;
+            }
+            work->input.buffers.push_back(c2buffer);
+            if (encryptedBlock) {
+                work->input.infoBuffers.emplace_back(C2InfoBuffer::CreateLinearBuffer(
+                        kParamIndexEncryptedBuffer,
+                        encryptedBlock->share(0, blockSize, C2Fence())));
+            }
+        }
     } else if (eos) {
         flags |= C2FrameData::FLAG_END_OF_STREAM;
     }
-    work->input.flags = (C2FrameData::flags_t)flags;
-    // TODO: fill info's
+    if (usesFrameReassembler) {
+        if (!items.empty()) {
+            items.front()->input.configUpdate = std::move(mParamsToBeSet);
+            mFrameIndex = (items.back()->input.ordinal.frameIndex + 1).peek();
+        }
+    } else {
+        work->input.flags = (C2FrameData::flags_t)flags;
+        // TODO: fill info's
 
-    work->input.configUpdate = std::move(mParamsToBeSet);
-    work->worklets.clear();
-    work->worklets.emplace_back(new C2Worklet);
+        work->input.configUpdate = std::move(mParamsToBeSet);
+        if (tunnelFirstFrame) {
+            C2StreamTunnelHoldRender::input tunnelHoldRender{
+                0u /* stream */,
+                C2_TRUE /* value */
+            };
+            work->input.configUpdate.push_back(C2Param::Copy(tunnelHoldRender));
+        }
+        work->worklets.clear();
+        work->worklets.emplace_back(new C2Worklet);
 
-    std::list<std::unique_ptr<C2Work>> items;
-    items.push_back(std::move(work));
-    mPipelineWatcher.lock()->onWorkQueued(
-            queuedFrameIndex,
-            std::move(queuedBuffers),
-            PipelineWatcher::Clock::now());
-    c2_status_t err = mComponent->queue(&items);
-    if (err != C2_OK) {
-        mPipelineWatcher.lock()->onWorkDone(queuedFrameIndex);
+        items.push_back(std::move(work));
+
+        eos = eos && buffer->size() > 0u;
     }
-
-    if (err == C2_OK && eos && buffer->size() > 0u) {
+    if (eos) {
         work.reset(new C2Work);
         work->input.ordinal.timestamp = timeUs;
         work->input.ordinal.frameIndex = mFrameIndex++;
@@ -277,23 +315,28 @@
         work->input.buffers.clear();
         work->input.flags = C2FrameData::FLAG_END_OF_STREAM;
         work->worklets.emplace_back(new C2Worklet);
-
-        queuedFrameIndex = work->input.ordinal.frameIndex.peeku();
-        queuedBuffers.clear();
-
-        items.clear();
         items.push_back(std::move(work));
-
-        mPipelineWatcher.lock()->onWorkQueued(
-                queuedFrameIndex,
-                std::move(queuedBuffers),
-                PipelineWatcher::Clock::now());
-        err = mComponent->queue(&items);
-        if (err != C2_OK) {
-            mPipelineWatcher.lock()->onWorkDone(queuedFrameIndex);
-        }
     }
-    if (err == C2_OK) {
+    c2_status_t err = C2_OK;
+    if (!items.empty()) {
+        {
+            Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
+            PipelineWatcher::Clock::time_point now = PipelineWatcher::Clock::now();
+            for (const std::unique_ptr<C2Work> &work : items) {
+                watcher->onWorkQueued(
+                        work->input.ordinal.frameIndex.peeku(),
+                        std::vector(work->input.buffers),
+                        now);
+            }
+        }
+        err = mComponent->queue(&items);
+    }
+    if (err != C2_OK) {
+        Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
+        for (const std::unique_ptr<C2Work> &work : items) {
+            watcher->onWorkDone(work->input.ordinal.frameIndex.peeku());
+        }
+    } else {
         Mutexed<Input>::Locked input(mInput);
         bool released = false;
         if (buffer) {
@@ -514,6 +557,40 @@
     }
     sp<EncryptedLinearBlockBuffer> encryptedBuffer((EncryptedLinearBlockBuffer *)buffer.get());
 
+    std::shared_ptr<C2LinearBlock> block;
+    size_t allocSize = buffer->size();
+    size_t bufferSize = 0;
+    c2_status_t blockRes = C2_OK;
+    bool copied = false;
+    if (mSendEncryptedInfoBuffer) {
+        static const C2MemoryUsage kDefaultReadWriteUsage{
+            C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+        constexpr int kAllocGranule0 = 1024 * 64;
+        constexpr int kAllocGranule1 = 1024 * 1024;
+        std::shared_ptr<C2BlockPool> pool = mBlockPools.lock()->inputPool;
+        // round up encrypted sizes to limit fragmentation and encourage buffer reuse
+        if (allocSize <= kAllocGranule1) {
+            bufferSize = align(allocSize, kAllocGranule0);
+        } else {
+            bufferSize = align(allocSize, kAllocGranule1);
+        }
+        blockRes = pool->fetchLinearBlock(
+                bufferSize, kDefaultReadWriteUsage, &block);
+
+        if (blockRes == C2_OK) {
+            C2WriteView view = block->map().get();
+            if (view.error() == C2_OK && view.size() == bufferSize) {
+                copied = true;
+                // TODO: only copy clear sections
+                memcpy(view.data(), buffer->data(), allocSize);
+            }
+        }
+    }
+
+    if (!copied) {
+        block.reset();
+    }
+
     ssize_t result = -1;
     ssize_t codecDataOffset = 0;
     if (numSubSamples == 1
@@ -605,7 +682,8 @@
     }
 
     buffer->setRange(codecDataOffset, result - codecDataOffset);
-    return queueInputBufferInternal(buffer);
+
+    return queueInputBufferInternal(buffer, block, bufferSize);
 }
 
 void CCodecBufferChannel::feedInputBufferIfAvailable() {
@@ -618,25 +696,26 @@
 }
 
 void CCodecBufferChannel::feedInputBufferIfAvailableInternal() {
-    if (mInputMetEos || mPipelineWatcher.lock()->pipelineFull()) {
+    if (mInputMetEos) {
         return;
     }
     {
         Mutexed<Output>::Locked output(mOutput);
         if (!output->buffers ||
                 output->buffers->hasPending() ||
-                output->buffers->numClientBuffers() >= output->numSlots) {
+                output->buffers->numActiveSlots() >= output->numSlots) {
             return;
         }
     }
-    size_t numInputSlots = mInput.lock()->numSlots;
-    for (size_t i = 0; i < numInputSlots; ++i) {
+    size_t numActiveSlots = 0;
+    while (!mPipelineWatcher.lock()->pipelineFull()) {
         sp<MediaCodecBuffer> inBuffer;
         size_t index;
         {
             Mutexed<Input>::Locked input(mInput);
-            if (input->buffers->numClientBuffers() >= input->numSlots) {
-                return;
+            numActiveSlots = input->buffers->numActiveSlots();
+            if (numActiveSlots >= input->numSlots) {
+                break;
             }
             if (!input->buffers->requestNewBuffer(&index, &inBuffer)) {
                 ALOGV("[%s] no new buffer available", mName);
@@ -646,6 +725,7 @@
         ALOGV("[%s] new input index = %zu [%p]", mName, index, inBuffer.get());
         mCallback->onInputBufferAvailable(index, inBuffer);
     }
+    ALOGV("[%s] # active slots after feedInputBufferIfAvailable = %zu", mName, numActiveSlots);
 }
 
 status_t CCodecBufferChannel::renderOutputBuffer(
@@ -693,6 +773,22 @@
                 c2Buffer->getInfo(C2StreamRotationInfo::output::PARAM_TYPE));
     bool flip = rotation && (rotation->flip & 1);
     uint32_t quarters = ((rotation ? rotation->value : 0) / 90) & 3;
+
+    {
+        Mutexed<OutputSurface>::Locked output(mOutputSurface);
+        if (output->surface == nullptr) {
+            ALOGI("[%s] cannot render buffer without surface", mName);
+            return OK;
+        }
+        int64_t frameIndex;
+        buffer->meta()->findInt64("frameIndex", &frameIndex);
+        if (output->rotation.count(frameIndex) != 0) {
+            auto it = output->rotation.find(frameIndex);
+            quarters = (it->second / 90) & 3;
+            output->rotation.erase(it);
+        }
+    }
+
     uint32_t transform = 0;
     switch (quarters) {
         case 0: // no rotation
@@ -736,14 +832,6 @@
         hdr10PlusInfo.reset();
     }
 
-    {
-        Mutexed<OutputSurface>::Locked output(mOutputSurface);
-        if (output->surface == nullptr) {
-            ALOGI("[%s] cannot render buffer without surface", mName);
-            return OK;
-        }
-    }
-
     std::vector<C2ConstGraphicBlock> blocks = c2Buffer->data().graphicBlocks();
     if (blocks.size() != 1u) {
         ALOGD("[%s] expected 1 graphic block, but got %zu", mName, blocks.size());
@@ -814,9 +902,17 @@
     status_t result = mComponent->queueToOutputSurface(block, qbi, &qbo);
     if (result != OK) {
         ALOGI("[%s] queueBuffer failed: %d", mName, result);
+        if (result == NO_INIT) {
+            mCCodecCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+        }
         return result;
     }
-    ALOGV("[%s] queue buffer successful", mName);
+
+    if(android::base::GetBoolProperty("debug.stagefright.fps", false)) {
+        ALOGD("[%s] queue buffer successful", mName);
+    } else {
+        ALOGV("[%s] queue buffer successful", mName);
+    }
 
     int64_t mediaTimeUs = 0;
     (void)buffer->meta()->findInt64("timeUs", &mediaTimeUs);
@@ -877,27 +973,31 @@
         bool buffersBoundToCodec) {
     C2StreamBufferTypeSetting::input iStreamFormat(0u);
     C2StreamBufferTypeSetting::output oStreamFormat(0u);
+    C2ComponentKindSetting kind;
     C2PortReorderBufferDepthTuning::output reorderDepth;
     C2PortReorderKeySetting::output reorderKey;
     C2PortActualDelayTuning::input inputDelay(0);
     C2PortActualDelayTuning::output outputDelay(0);
     C2ActualPipelineDelayTuning pipelineDelay(0);
+    C2SecureModeTuning secureMode(C2Config::SM_UNPROTECTED);
 
     c2_status_t err = mComponent->query(
             {
                 &iStreamFormat,
                 &oStreamFormat,
+                &kind,
                 &reorderDepth,
                 &reorderKey,
                 &inputDelay,
                 &pipelineDelay,
                 &outputDelay,
+                &secureMode,
             },
             {},
             C2_DONT_BLOCK,
             nullptr);
     if (err == C2_BAD_INDEX) {
-        if (!iStreamFormat || !oStreamFormat) {
+        if (!iStreamFormat || !oStreamFormat || !kind) {
             return UNKNOWN_ERROR;
         }
     } else if (err != C2_OK) {
@@ -914,18 +1014,26 @@
     // TODO: get this from input format
     bool secure = mComponent->getName().find(".secure") != std::string::npos;
 
+    // secure mode is a static parameter (shall not change in the executing state)
+    mSendEncryptedInfoBuffer = secureMode.value == C2Config::SM_READ_PROTECTED_WITH_ENCRYPTED;
+
     std::shared_ptr<C2AllocatorStore> allocatorStore = GetCodec2PlatformAllocatorStore();
     int poolMask = GetCodec2PoolMask();
     C2PlatformAllocatorStore::id_t preferredLinearId = GetPreferredLinearAllocatorId(poolMask);
 
     if (inputFormat != nullptr) {
         bool graphic = (iStreamFormat.value == C2BufferData::GRAPHIC);
+        bool audioEncoder = !graphic && (kind.value == C2Component::KIND_ENCODER);
         C2Config::api_feature_t apiFeatures = C2Config::api_feature_t(
                 API_REFLECTION |
                 API_VALUES |
                 API_CURRENT_VALUES |
                 API_DEPENDENCY |
                 API_SAME_INPUT_BUFFER);
+        C2StreamAudioFrameSizeInfo::input encoderFrameSize(0u);
+        C2StreamSampleRateInfo::input sampleRate(0u);
+        C2StreamChannelCountInfo::input channelCount(0u);
+        C2StreamPcmEncodingInfo::input pcmEncoding(0u);
         std::shared_ptr<C2BlockPool> pool;
         {
             Mutexed<BlockPools>::Locked pools(mBlockPools);
@@ -938,7 +1046,19 @@
             // from component, create the input block pool with given ID. Otherwise, use default IDs.
             std::vector<std::unique_ptr<C2Param>> params;
             C2ApiFeaturesSetting featuresSetting{apiFeatures};
-            err = mComponent->query({ &featuresSetting },
+            std::vector<C2Param *> stackParams({&featuresSetting});
+            if (audioEncoder) {
+                stackParams.push_back(&encoderFrameSize);
+                stackParams.push_back(&sampleRate);
+                stackParams.push_back(&channelCount);
+                stackParams.push_back(&pcmEncoding);
+            } else {
+                encoderFrameSize.invalidate();
+                sampleRate.invalidate();
+                channelCount.invalidate();
+                pcmEncoding.invalidate();
+            }
+            err = mComponent->query(stackParams,
                                     { C2PortAllocatorsTuning::input::PARAM_TYPE },
                                     C2_DONT_BLOCK,
                                     &params);
@@ -996,10 +1116,21 @@
         input->numSlots = numInputSlots;
         input->extraBuffers.flush();
         input->numExtraSlots = 0u;
+        if (audioEncoder && encoderFrameSize && sampleRate && channelCount) {
+            input->frameReassembler.init(
+                    pool,
+                    {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE},
+                    encoderFrameSize.value,
+                    sampleRate.value,
+                    channelCount.value,
+                    pcmEncoding ? pcmEncoding.value : C2Config::PCM_16);
+        }
         bool conforming = (apiFeatures & API_SAME_INPUT_BUFFER);
         // For encrypted content, framework decrypts source buffer (ashmem) into
         // C2Buffers. Thus non-conforming codecs can process these.
-        if (!buffersBoundToCodec && (hasCryptoOrDescrambler() || conforming)) {
+        if (!buffersBoundToCodec
+                && !input->frameReassembler
+                && (hasCryptoOrDescrambler() || conforming)) {
             input->buffers.reset(new SlotInputBuffers(mName));
         } else if (graphic) {
             if (mInputSurface) {
@@ -1057,13 +1188,11 @@
     if (outputFormat != nullptr) {
         sp<IGraphicBufferProducer> outputSurface;
         uint32_t outputGeneration;
+        int maxDequeueCount = 0;
         {
             Mutexed<OutputSurface>::Locked output(mOutputSurface);
-            output->maxDequeueBuffers = numOutputSlots +
+            maxDequeueCount = output->maxDequeueBuffers = numOutputSlots +
                     reorderDepth.value + kRenderingDepth;
-            if (!secure) {
-                output->maxDequeueBuffers += numInputSlots;
-            }
             outputSurface = output->surface ?
                     output->surface->getIGraphicBufferProducer() : nullptr;
             if (outputSurface) {
@@ -1074,10 +1203,13 @@
 
         bool graphic = (oStreamFormat.value == C2BufferData::GRAPHIC);
         C2BlockPool::local_id_t outputPoolId_;
+        C2BlockPool::local_id_t prevOutputPoolId;
 
         {
             Mutexed<BlockPools>::Locked pools(mBlockPools);
 
+            prevOutputPoolId = pools->outputPoolId;
+
             // set default allocator ID.
             pools->outputAllocatorId = (graphic) ? C2PlatformAllocatorStore::GRALLOC
                                                  : preferredLinearId;
@@ -1171,6 +1303,15 @@
             outputPoolId_ = pools->outputPoolId;
         }
 
+        if (prevOutputPoolId != C2BlockPool::BASIC_LINEAR
+                && prevOutputPoolId != C2BlockPool::BASIC_GRAPHIC) {
+            c2_status_t err = mComponent->destroyBlockPool(prevOutputPoolId);
+            if (err != C2_OK) {
+                ALOGW("Failed to clean up previous block pool %llu - %s (%d)\n",
+                        (unsigned long long) prevOutputPoolId, asString(err), err);
+            }
+        }
+
         Mutexed<Output>::Locked output(mOutput);
         output->outputDelay = outputDelayValue;
         output->numSlots = numOutputSlots;
@@ -1198,7 +1339,17 @@
             mComponent->setOutputSurface(
                     outputPoolId_,
                     outputSurface,
-                    outputGeneration);
+                    outputGeneration,
+                    maxDequeueCount);
+        } else {
+            // configure CPU read consumer usage
+            C2StreamUsageTuning::output outputUsage{0u, C2MemoryUsage::CPU_READ};
+            std::vector<std::unique_ptr<C2SettingResult>> failures;
+            err = mComponent->config({ &outputUsage }, C2_MAY_BLOCK, &failures);
+            // do not print error message for now as most components may not yet
+            // support this setting
+            ALOGD_IF(err != C2_BAD_INDEX, "[%s] Configured output usage [%#llx]",
+                  mName, (long long)outputUsage.value);
         }
 
         if (oStreamFormat.value == C2BufferData::LINEAR) {
@@ -1235,7 +1386,7 @@
     // about buffers from the previous generation do not interfere with the
     // newly initialized pipeline capacity.
 
-    {
+    if (inputFormat || outputFormat) {
         Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
         watcher->inputDelay(inputDelayValue)
                 .pipelineDelay(pipelineDelayValue)
@@ -1298,54 +1449,30 @@
                 return a.capacity < b.capacity;
             });
 
-    {
-        Mutexed<std::list<sp<ABuffer>>>::Locked configs(mFlushedConfigs);
-        if (!configs->empty()) {
-            while (!configs->empty()) {
-                sp<ABuffer> config = configs->front();
-                configs->pop_front();
-                // Find the smallest input buffer that can fit the config.
-                auto i = std::find_if(
-                        clientInputBuffers.begin(),
-                        clientInputBuffers.end(),
-                        [cfgSize = config->size()](const ClientInputBuffer& b) {
-                            return b.capacity >= cfgSize;
-                        });
-                if (i == clientInputBuffers.end()) {
-                    ALOGW("[%s] no input buffer large enough for the config "
-                          "(%zu bytes)",
-                          mName, config->size());
-                    return NO_MEMORY;
-                }
-                sp<MediaCodecBuffer> buffer = i->buffer;
-                memcpy(buffer->base(), config->data(), config->size());
-                buffer->setRange(0, config->size());
-                buffer->meta()->clear();
-                buffer->meta()->setInt64("timeUs", 0);
-                buffer->meta()->setInt32("csd", 1);
-                if (queueInputBufferInternal(buffer) != OK) {
-                    ALOGW("[%s] Error while queueing a flushed config",
-                          mName);
-                    return UNKNOWN_ERROR;
-                }
-                clientInputBuffers.erase(i);
-            }
-        } else if (oStreamFormat.value == C2BufferData::LINEAR &&
-                   (!prepend || prepend.value == PREPEND_HEADER_TO_NONE)) {
-            sp<MediaCodecBuffer> buffer = clientInputBuffers.front().buffer;
-            // WORKAROUND: Some apps expect CSD available without queueing
-            //             any input. Queue an empty buffer to get the CSD.
-            buffer->setRange(0, 0);
-            buffer->meta()->clear();
-            buffer->meta()->setInt64("timeUs", 0);
-            if (queueInputBufferInternal(buffer) != OK) {
-                ALOGW("[%s] Error while queueing an empty buffer to get CSD",
-                      mName);
-                return UNKNOWN_ERROR;
-            }
-            clientInputBuffers.pop_front();
+    std::list<std::unique_ptr<C2Work>> flushedConfigs;
+    mFlushedConfigs.lock()->swap(flushedConfigs);
+    if (!flushedConfigs.empty()) {
+        err = mComponent->queue(&flushedConfigs);
+        if (err != C2_OK) {
+            ALOGW("[%s] Error while queueing a flushed config", mName);
+            return UNKNOWN_ERROR;
         }
     }
+    if (oStreamFormat.value == C2BufferData::LINEAR &&
+            (!prepend || prepend.value == PREPEND_HEADER_TO_NONE)) {
+        sp<MediaCodecBuffer> buffer = clientInputBuffers.front().buffer;
+        // WORKAROUND: Some apps expect CSD available without queueing
+        //             any input. Queue an empty buffer to get the CSD.
+        buffer->setRange(0, 0);
+        buffer->meta()->clear();
+        buffer->meta()->setInt64("timeUs", 0);
+        if (queueInputBufferInternal(buffer) != OK) {
+            ALOGW("[%s] Error while queueing an empty buffer to get CSD",
+                  mName);
+            return UNKNOWN_ERROR;
+        }
+        clientInputBuffers.pop_front();
+    }
 
     for (const ClientInputBuffer& clientInputBuffer: clientInputBuffers) {
         mCallback->onInputBufferAvailable(
@@ -1359,14 +1486,14 @@
 void CCodecBufferChannel::stop() {
     mSync.stop();
     mFirstValidFrameIndex = mFrameIndex.load(std::memory_order_relaxed);
-    if (mInputSurface != nullptr) {
-        mInputSurface.reset();
-    }
-    mPipelineWatcher.lock()->flush();
 }
 
 void CCodecBufferChannel::reset() {
     stop();
+    if (mInputSurface != nullptr) {
+        mInputSurface.reset();
+    }
+    mPipelineWatcher.lock()->flush();
     {
         Mutexed<Input>::Locked input(mInput);
         input->buffers.reset(new DummyInputBuffers(""));
@@ -1394,27 +1521,39 @@
 
 void CCodecBufferChannel::flush(const std::list<std::unique_ptr<C2Work>> &flushedWork) {
     ALOGV("[%s] flush", mName);
-    {
-        Mutexed<std::list<sp<ABuffer>>>::Locked configs(mFlushedConfigs);
-        for (const std::unique_ptr<C2Work> &work : flushedWork) {
-            if (!(work->input.flags & C2FrameData::FLAG_CODEC_CONFIG)) {
-                continue;
-            }
-            if (work->input.buffers.empty()
-                    || work->input.buffers.front()->data().linearBlocks().empty()) {
-                ALOGD("[%s] no linear codec config data found", mName);
-                continue;
-            }
-            C2ReadView view =
-                    work->input.buffers.front()->data().linearBlocks().front().map().get();
-            if (view.error() != C2_OK) {
-                ALOGD("[%s] failed to map flushed codec config data: %d", mName, view.error());
-                continue;
-            }
-            configs->push_back(ABuffer::CreateAsCopy(view.data(), view.capacity()));
-            ALOGV("[%s] stashed flushed codec config data (size=%u)", mName, view.capacity());
+    std::vector<uint64_t> indices;
+    std::list<std::unique_ptr<C2Work>> configs;
+    for (const std::unique_ptr<C2Work> &work : flushedWork) {
+        indices.push_back(work->input.ordinal.frameIndex.peeku());
+        if (!(work->input.flags & C2FrameData::FLAG_CODEC_CONFIG)) {
+            continue;
         }
+        if (work->input.buffers.empty()
+                || work->input.buffers.front() == nullptr
+                || work->input.buffers.front()->data().linearBlocks().empty()) {
+            ALOGD("[%s] no linear codec config data found", mName);
+            continue;
+        }
+        std::unique_ptr<C2Work> copy(new C2Work);
+        copy->input.flags = C2FrameData::flags_t(work->input.flags | C2FrameData::FLAG_DROP_FRAME);
+        copy->input.ordinal = work->input.ordinal;
+        copy->input.ordinal.frameIndex = mFrameIndex++;
+        copy->input.buffers.insert(
+                copy->input.buffers.begin(),
+                work->input.buffers.begin(),
+                work->input.buffers.end());
+        for (const std::unique_ptr<C2Param> &param : work->input.configUpdate) {
+            copy->input.configUpdate.push_back(C2Param::Copy(*param));
+        }
+        copy->input.infoBuffers.insert(
+                copy->input.infoBuffers.begin(),
+                work->input.infoBuffers.begin(),
+                work->input.infoBuffers.end());
+        copy->worklets.emplace_back(new C2Worklet);
+        configs.push_back(std::move(copy));
+        ALOGV("[%s] stashed flushed codec config data", mName);
     }
+    mFlushedConfigs.lock()->swap(configs);
     {
         Mutexed<Input>::Locked input(mInput);
         input->buffers->flush();
@@ -1427,7 +1566,12 @@
             output->buffers->flushStash();
         }
     }
-    mPipelineWatcher.lock()->flush();
+    {
+        Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
+        for (uint64_t index : indices) {
+            watcher->onWorkDone(index);
+        }
+    }
 }
 
 void CCodecBufferChannel::onWorkDone(
@@ -1523,7 +1667,9 @@
         }
     }
 
-    std::optional<uint32_t> newInputDelay, newPipelineDelay;
+    std::optional<uint32_t> newInputDelay, newPipelineDelay, newOutputDelay, newReorderDepth;
+    std::optional<C2Config::ordinal_key_t> newReorderKey;
+    bool needMaxDequeueBufferCountUpdate = false;
     while (!worklet->output.configUpdate.empty()) {
         std::unique_ptr<C2Param> param;
         worklet->output.configUpdate.back().swap(param);
@@ -1532,24 +1678,10 @@
             case C2PortReorderBufferDepthTuning::CORE_INDEX: {
                 C2PortReorderBufferDepthTuning::output reorderDepth;
                 if (reorderDepth.updateFrom(*param)) {
-                    bool secure = mComponent->getName().find(".secure") !=
-                                  std::string::npos;
-                    mOutput.lock()->buffers->setReorderDepth(
-                            reorderDepth.value);
                     ALOGV("[%s] onWorkDone: updated reorder depth to %u",
                           mName, reorderDepth.value);
-                    size_t numOutputSlots = mOutput.lock()->numSlots;
-                    size_t numInputSlots = mInput.lock()->numSlots;
-                    Mutexed<OutputSurface>::Locked output(mOutputSurface);
-                    output->maxDequeueBuffers = numOutputSlots +
-                            reorderDepth.value + kRenderingDepth;
-                    if (!secure) {
-                        output->maxDequeueBuffers += numInputSlots;
-                    }
-                    if (output->surface) {
-                        output->surface->setMaxDequeuedBufferCount(
-                                output->maxDequeueBuffers);
-                    }
+                    newReorderDepth = reorderDepth.value;
+                    needMaxDequeueBufferCountUpdate = true;
                 } else {
                     ALOGD("[%s] onWorkDone: failed to read reorder depth",
                           mName);
@@ -1559,7 +1691,7 @@
             case C2PortReorderKeySetting::CORE_INDEX: {
                 C2PortReorderKeySetting::output reorderKey;
                 if (reorderKey.updateFrom(*param)) {
-                    mOutput.lock()->buffers->setReorderKey(reorderKey.value);
+                    newReorderKey = reorderKey.value;
                     ALOGV("[%s] onWorkDone: updated reorder key to %u",
                           mName, reorderKey.value);
                 } else {
@@ -1593,53 +1725,34 @@
                     if (outputDelay.updateFrom(*param)) {
                         ALOGV("[%s] onWorkDone: updating output delay %u",
                               mName, outputDelay.value);
-                        bool secure = mComponent->getName().find(".secure") !=
-                                      std::string::npos;
-                        (void)mPipelineWatcher.lock()->outputDelay(
-                                outputDelay.value);
+                        (void)mPipelineWatcher.lock()->outputDelay(outputDelay.value);
+                        newOutputDelay = outputDelay.value;
+                        needMaxDequeueBufferCountUpdate = true;
 
-                        bool outputBuffersChanged = false;
-                        size_t numOutputSlots = 0;
-                        size_t numInputSlots = mInput.lock()->numSlots;
-                        {
-                            Mutexed<Output>::Locked output(mOutput);
-                            if (!output->buffers) {
-                                return false;
-                            }
-                            output->outputDelay = outputDelay.value;
-                            numOutputSlots = outputDelay.value +
-                                             kSmoothnessFactor;
-                            if (output->numSlots < numOutputSlots) {
-                                output->numSlots = numOutputSlots;
-                                if (output->buffers->isArrayMode()) {
-                                    OutputBuffersArray *array =
-                                        (OutputBuffersArray *)output->buffers.get();
-                                    ALOGV("[%s] onWorkDone: growing output buffer array to %zu",
-                                          mName, numOutputSlots);
-                                    array->grow(numOutputSlots);
-                                    outputBuffersChanged = true;
-                                }
-                            }
-                            numOutputSlots = output->numSlots;
-                        }
-
-                        if (outputBuffersChanged) {
-                            mCCodecCallback->onOutputBuffersChanged();
-                        }
-
-                        uint32_t depth = mOutput.lock()->buffers->getReorderDepth();
-                        Mutexed<OutputSurface>::Locked output(mOutputSurface);
-                        output->maxDequeueBuffers = numOutputSlots + depth + kRenderingDepth;
-                        if (!secure) {
-                            output->maxDequeueBuffers += numInputSlots;
-                        }
-                        if (output->surface) {
-                            output->surface->setMaxDequeuedBufferCount(output->maxDequeueBuffers);
-                        }
                     }
                 }
                 break;
             }
+            case C2PortTunnelSystemTime::CORE_INDEX: {
+                C2PortTunnelSystemTime::output frameRenderTime;
+                if (frameRenderTime.updateFrom(*param)) {
+                    ALOGV("[%s] onWorkDone: frame rendered (sys:%lld ns, media:%lld us)",
+                          mName, (long long)frameRenderTime.value,
+                          (long long)worklet->output.ordinal.timestamp.peekll());
+                    mCCodecCallback->onOutputFramesRendered(
+                            worklet->output.ordinal.timestamp.peek(), frameRenderTime.value);
+                }
+                break;
+            }
+            case C2StreamTunnelHoldRender::CORE_INDEX: {
+                C2StreamTunnelHoldRender::output firstTunnelFrameHoldRender;
+                if (!(worklet->output.flags & C2FrameData::FLAG_INCOMPLETE)) break;
+                if (!firstTunnelFrameHoldRender.updateFrom(*param)) break;
+                if (firstTunnelFrameHoldRender.value != C2_TRUE) break;
+                ALOGV("[%s] onWorkDone: first tunnel frame ready", mName);
+                mCCodecCallback->onFirstTunnelFrameReady();
+                break;
+            }
             default:
                 ALOGV("[%s] onWorkDone: unrecognized config update (%08X)",
                       mName, param->index());
@@ -1664,6 +1777,56 @@
             input->numSlots = newNumSlots;
         }
     }
+    size_t numOutputSlots = 0;
+    uint32_t reorderDepth = 0;
+    bool outputBuffersChanged = false;
+    if (newReorderKey || newReorderDepth || needMaxDequeueBufferCountUpdate) {
+        Mutexed<Output>::Locked output(mOutput);
+        if (!output->buffers) {
+            return false;
+        }
+        numOutputSlots = output->numSlots;
+        if (newReorderKey) {
+            output->buffers->setReorderKey(newReorderKey.value());
+        }
+        if (newReorderDepth) {
+            output->buffers->setReorderDepth(newReorderDepth.value());
+        }
+        reorderDepth = output->buffers->getReorderDepth();
+        if (newOutputDelay) {
+            output->outputDelay = newOutputDelay.value();
+            numOutputSlots = newOutputDelay.value() + kSmoothnessFactor;
+            if (output->numSlots < numOutputSlots) {
+                output->numSlots = numOutputSlots;
+                if (output->buffers->isArrayMode()) {
+                    OutputBuffersArray *array =
+                        (OutputBuffersArray *)output->buffers.get();
+                    ALOGV("[%s] onWorkDone: growing output buffer array to %zu",
+                          mName, numOutputSlots);
+                    array->grow(numOutputSlots);
+                    outputBuffersChanged = true;
+                }
+            }
+        }
+        numOutputSlots = output->numSlots;
+    }
+    if (outputBuffersChanged) {
+        mCCodecCallback->onOutputBuffersChanged();
+    }
+    if (needMaxDequeueBufferCountUpdate) {
+        int maxDequeueCount = 0;
+        {
+            Mutexed<OutputSurface>::Locked output(mOutputSurface);
+            maxDequeueCount = output->maxDequeueBuffers =
+                    numOutputSlots + reorderDepth + kRenderingDepth;
+            if (output->surface) {
+                output->surface->setMaxDequeuedBufferCount(output->maxDequeueBuffers);
+            }
+        }
+        if (maxDequeueCount > 0) {
+            mComponent->setOutputSurfaceMaxDequeueCount(maxDequeueCount);
+        }
+    }
 
     int32_t flags = 0;
     if (worklet->output.flags & C2FrameData::FLAG_END_OF_STREAM) {
@@ -1718,7 +1881,13 @@
         }
     }
 
-    if (notifyClient && !buffer && !flags) {
+    bool drop = false;
+    if (worklet->output.flags & C2FrameData::FLAG_DROP_FRAME) {
+        ALOGV("[%s] onWorkDone: drop buffer but keep metadata", mName);
+        drop = true;
+    }
+
+    if (notifyClient && !buffer && !flags && !(drop && outputFormat)) {
         ALOGV("[%s] onWorkDone: Not reporting output buffer (%lld)",
               mName, work->input.ordinal.frameIndex.peekull());
         notifyClient = false;
@@ -1745,7 +1914,7 @@
             return false;
         }
         output->buffers->pushToStash(
-                buffer,
+                drop ? nullptr : buffer,
                 notifyClient,
                 timestamp.peek(),
                 flags,
@@ -1809,10 +1978,11 @@
                 & ((1 << 10) - 1));
 
     sp<IGraphicBufferProducer> producer;
+    int maxDequeueCount = mOutputSurface.lock()->maxDequeueBuffers;
     if (newSurface) {
         newSurface->setScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
         newSurface->setDequeueTimeout(kDequeueTimeoutNs);
-        newSurface->setMaxDequeuedBufferCount(mOutputSurface.lock()->maxDequeueBuffers);
+        newSurface->setMaxDequeuedBufferCount(maxDequeueCount);
         producer = newSurface->getIGraphicBufferProducer();
         producer->setGenerationNumber(generation);
     } else {
@@ -1832,7 +2002,8 @@
         if (mComponent->setOutputSurface(
                 outputPoolId,
                 producer,
-                generation) != C2_OK) {
+                generation,
+                maxDequeueCount) != C2_OK) {
             ALOGI("[%s] setSurface: component setOutputSurface failed", mName);
             return INVALID_OPERATION;
         }
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 046c5c3..5a2aca2 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -31,6 +31,7 @@
 #include <media/stagefright/CodecBase.h>
 
 #include "CCodecBuffers.h"
+#include "FrameReassembler.h"
 #include "InputSurfaceWrapper.h"
 #include "PipelineWatcher.h"
 
@@ -44,6 +45,7 @@
     virtual void onError(status_t err, enum ActionCode actionCode) = 0;
     virtual void onOutputFramesRendered(int64_t mediaTimeUs, nsecs_t renderTimeNs) = 0;
     virtual void onOutputBuffersChanged() = 0;
+    virtual void onFirstTunnelFrameReady() = 0;
 };
 
 /**
@@ -238,7 +240,9 @@
 
     void feedInputBufferIfAvailable();
     void feedInputBufferIfAvailableInternal();
-    status_t queueInputBufferInternal(sp<MediaCodecBuffer> buffer);
+    status_t queueInputBufferInternal(sp<MediaCodecBuffer> buffer,
+                                      std::shared_ptr<C2LinearBlock> encryptedBlock = nullptr,
+                                      size_t blockSize = 0);
     bool handleWork(
             std::unique_ptr<C2Work> work, const sp<AMessage> &outputFormat,
             const C2StreamInitDataInfo::output *initData);
@@ -269,6 +273,8 @@
         size_t numExtraSlots;
         uint32_t inputDelay;
         uint32_t pipelineDelay;
+
+        FrameReassembler frameReassembler;
     };
     Mutexed<Input> mInput;
     struct Output {
@@ -277,7 +283,7 @@
         uint32_t outputDelay;
     };
     Mutexed<Output> mOutput;
-    Mutexed<std::list<sp<ABuffer>>> mFlushedConfigs;
+    Mutexed<std::list<std::unique_ptr<C2Work>>> mFlushedConfigs;
 
     std::atomic_uint64_t mFrameIndex;
     std::atomic_uint64_t mFirstValidFrameIndex;
@@ -288,6 +294,7 @@
         sp<Surface> surface;
         uint32_t generation;
         int maxDequeueBuffers;
+        std::map<uint64_t, int> rotation;
     };
     Mutexed<OutputSurface> mOutputSurface;
 
@@ -315,6 +322,7 @@
     inline bool hasCryptoOrDescrambler() {
         return mCrypto != nullptr || mDescrambler != nullptr;
     }
+    std::atomic_bool mSendEncryptedInfoBuffer;
 };
 
 // Conversion of a c2_status_t value to a status_t value may depend on the
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index e58a1e4..29cc564 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -27,6 +27,7 @@
 #include <mediadrm/ICrypto.h>
 
 #include "CCodecBuffers.h"
+#include "Codec2Mapper.h"
 
 namespace android {
 
@@ -76,29 +77,39 @@
 void CCodecBuffers::handleImageData(const sp<Codec2Buffer> &buffer) {
     sp<ABuffer> imageDataCandidate = buffer->getImageData();
     if (imageDataCandidate == nullptr) {
+        if (mFormatWithImageData) {
+            // We previously sent the format with image data, so use the same format.
+            buffer->setFormat(mFormatWithImageData);
+        }
         return;
     }
-    sp<ABuffer> imageData;
-    if (!mFormat->findBuffer("image-data", &imageData)
-            || imageDataCandidate->size() != imageData->size()
-            || memcmp(imageDataCandidate->data(), imageData->data(), imageData->size()) != 0) {
+    if (!mLastImageData
+            || imageDataCandidate->size() != mLastImageData->size()
+            || memcmp(imageDataCandidate->data(),
+                      mLastImageData->data(),
+                      mLastImageData->size()) != 0) {
         ALOGD("[%s] updating image-data", mName);
-        sp<AMessage> newFormat = dupFormat();
-        newFormat->setBuffer("image-data", imageDataCandidate);
+        mFormatWithImageData = dupFormat();
+        mLastImageData = imageDataCandidate;
+        mFormatWithImageData->setBuffer("image-data", imageDataCandidate);
         MediaImage2 *img = (MediaImage2*)imageDataCandidate->data();
         if (img->mNumPlanes > 0 && img->mType != img->MEDIA_IMAGE_TYPE_UNKNOWN) {
             int32_t stride = img->mPlane[0].mRowInc;
-            newFormat->setInt32(KEY_STRIDE, stride);
+            mFormatWithImageData->setInt32(KEY_STRIDE, stride);
             ALOGD("[%s] updating stride = %d", mName, stride);
             if (img->mNumPlanes > 1 && stride > 0) {
-                int32_t vstride = (img->mPlane[1].mOffset - img->mPlane[0].mOffset) / stride;
-                newFormat->setInt32(KEY_SLICE_HEIGHT, vstride);
+                int64_t offsetDelta =
+                    (int64_t)img->mPlane[1].mOffset - (int64_t)img->mPlane[0].mOffset;
+                int32_t vstride = int32_t(offsetDelta / stride);
+                mFormatWithImageData->setInt32(KEY_SLICE_HEIGHT, vstride);
                 ALOGD("[%s] updating vstride = %d", mName, vstride);
+                buffer->setRange(
+                        img->mPlane[0].mOffset,
+                        buffer->size() - img->mPlane[0].mOffset);
             }
         }
-        setFormat(newFormat);
-        buffer->setFormat(newFormat);
     }
+    buffer->setFormat(mFormatWithImageData);
 }
 
 // InputBuffers
@@ -156,8 +167,7 @@
     setSkipCutBuffer(delay, padding);
 }
 
-void OutputBuffers::updateSkipCutBuffer(
-        const sp<AMessage> &format, bool notify) {
+void OutputBuffers::updateSkipCutBuffer(const sp<AMessage> &format) {
     AString mediaType;
     if (format->findString(KEY_MIME, &mediaType)
             && mediaType == MIMETYPE_AUDIO_RAW) {
@@ -168,9 +178,6 @@
             updateSkipCutBuffer(sampleRate, channelCount);
         }
     }
-    if (notify) {
-        mUnreportedFormat = nullptr;
-    }
 }
 
 void OutputBuffers::submit(const sp<MediaCodecBuffer> &buffer) {
@@ -194,7 +201,6 @@
     mReorderStash.clear();
     mDepth = 0;
     mKey = C2Config::ORDINAL;
-    mUnreportedFormat = nullptr;
 }
 
 void OutputBuffers::flushStash() {
@@ -270,27 +276,15 @@
     *c2Buffer = entry.buffer;
     sp<AMessage> outputFormat = entry.format;
 
-    // The output format can be processed without a registered slot.
-    if (outputFormat) {
-        ALOGD("[%s] popFromStashAndRegister: output format changed to %s",
-                mName, outputFormat->debugString().c_str());
-        updateSkipCutBuffer(outputFormat, entry.notify);
-    }
-
-    if (entry.notify) {
-        if (outputFormat) {
-            setFormat(outputFormat);
-        } else if (mUnreportedFormat) {
-            outputFormat = mUnreportedFormat;
-            setFormat(outputFormat);
-        }
-        mUnreportedFormat = nullptr;
-    } else {
-        if (outputFormat) {
-            mUnreportedFormat = outputFormat;
-        } else if (!mUnreportedFormat) {
-            mUnreportedFormat = mFormat;
-        }
+    if (entry.notify && mFormat != outputFormat) {
+        updateSkipCutBuffer(outputFormat);
+        // Trigger image data processing to the new format
+        mLastImageData.clear();
+        ALOGV("[%s] popFromStashAndRegister: output format reference changed: %p -> %p",
+                mName, mFormat.get(), outputFormat.get());
+        ALOGD("[%s] popFromStashAndRegister: at %lldus, output format changed to %s",
+                mName, (long long)entry.timestamp, outputFormat->debugString().c_str());
+        setFormat(outputFormat);
     }
 
     // Flushing mReorderStash because no other buffers should come after output
@@ -317,6 +311,7 @@
     // Append information from the front stash entry to outBuffer.
     (*outBuffer)->meta()->setInt64("timeUs", entry.timestamp);
     (*outBuffer)->meta()->setInt32("flags", entry.flags);
+    (*outBuffer)->meta()->setInt64("frameIndex", entry.ordinal.frameIndex.peekll());
     ALOGV("[%s] popFromStashAndRegister: "
           "out buffer index = %zu [%p] => %p + %zu (%lld)",
           mName, *index, outBuffer->get(),
@@ -487,11 +482,12 @@
     mBuffers.clear();
 }
 
-size_t FlexBuffersImpl::numClientBuffers() const {
+size_t FlexBuffersImpl::numActiveSlots() const {
     return std::count_if(
             mBuffers.begin(), mBuffers.end(),
             [](const Entry &entry) {
-                return (entry.clientBuffer != nullptr);
+                return (entry.clientBuffer != nullptr
+                        || !entry.compBuffer.expired());
             });
 }
 
@@ -637,11 +633,11 @@
     }
 }
 
-size_t BuffersArrayImpl::numClientBuffers() const {
+size_t BuffersArrayImpl::numActiveSlots() const {
     return std::count_if(
             mBuffers.begin(), mBuffers.end(),
             [](const Entry &entry) {
-                return entry.ownedByClient;
+                return entry.ownedByClient || !entry.compBuffer.expired();
             });
 }
 
@@ -691,8 +687,8 @@
     mImpl.flush();
 }
 
-size_t InputBuffersArray::numClientBuffers() const {
-    return mImpl.numClientBuffers();
+size_t InputBuffersArray::numActiveSlots() const {
+    return mImpl.numActiveSlots();
 }
 
 sp<Codec2Buffer> InputBuffersArray::createNewBuffer() {
@@ -729,8 +725,8 @@
     return nullptr;
 }
 
-size_t SlotInputBuffers::numClientBuffers() const {
-    return mImpl.numClientBuffers();
+size_t SlotInputBuffers::numActiveSlots() const {
+    return mImpl.numActiveSlots();
 }
 
 sp<Codec2Buffer> SlotInputBuffers::createNewBuffer() {
@@ -781,8 +777,8 @@
     return std::move(array);
 }
 
-size_t LinearInputBuffers::numClientBuffers() const {
-    return mImpl.numClientBuffers();
+size_t LinearInputBuffers::numActiveSlots() const {
+    return mImpl.numActiveSlots();
 }
 
 // static
@@ -795,8 +791,9 @@
         capacity = kMaxLinearBufferSize;
     }
 
-    // TODO: read usage from intf
-    C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+    int64_t usageValue = 0;
+    (void)format->findInt64("android._C2MemoryUsage", &usageValue);
+    C2MemoryUsage usage{usageValue | C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE};
     std::shared_ptr<C2LinearBlock> block;
 
     c2_status_t err = pool->fetchLinearBlock(capacity, usage, &block);
@@ -958,8 +955,8 @@
     return std::move(array);
 }
 
-size_t GraphicMetadataInputBuffers::numClientBuffers() const {
-    return mImpl.numClientBuffers();
+size_t GraphicMetadataInputBuffers::numActiveSlots() const {
+    return mImpl.numActiveSlots();
 }
 
 sp<Codec2Buffer> GraphicMetadataInputBuffers::createNewBuffer() {
@@ -1007,31 +1004,46 @@
     // track of the flushed work.
 }
 
+static uint32_t extractPixelFormat(const sp<AMessage> &format) {
+    int32_t frameworkColorFormat = 0;
+    if (!format->findInt32("android._color-format", &frameworkColorFormat)) {
+        return PIXEL_FORMAT_UNKNOWN;
+    }
+    uint32_t pixelFormat = PIXEL_FORMAT_UNKNOWN;
+    if (C2Mapper::mapPixelFormatFrameworkToCodec(frameworkColorFormat, &pixelFormat)) {
+        return pixelFormat;
+    }
+    return PIXEL_FORMAT_UNKNOWN;
+}
+
 std::unique_ptr<InputBuffers> GraphicInputBuffers::toArrayMode(size_t size) {
     std::unique_ptr<InputBuffersArray> array(
             new InputBuffersArray(mComponentName.c_str(), "2D-BB-Input[N]"));
     array->setPool(mPool);
     array->setFormat(mFormat);
+    uint32_t pixelFormat = extractPixelFormat(mFormat);
     array->initialize(
             mImpl,
             size,
-            [pool = mPool, format = mFormat, lbp = mLocalBufferPool]() -> sp<Codec2Buffer> {
+            [pool = mPool, format = mFormat, lbp = mLocalBufferPool, pixelFormat]()
+                    -> sp<Codec2Buffer> {
                 C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
                 return AllocateGraphicBuffer(
-                        pool, format, HAL_PIXEL_FORMAT_YV12, usage, lbp);
+                        pool, format, pixelFormat, usage, lbp);
             });
     return std::move(array);
 }
 
-size_t GraphicInputBuffers::numClientBuffers() const {
-    return mImpl.numClientBuffers();
+size_t GraphicInputBuffers::numActiveSlots() const {
+    return mImpl.numActiveSlots();
 }
 
 sp<Codec2Buffer> GraphicInputBuffers::createNewBuffer() {
-    // TODO: read usage from intf
-    C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+    int64_t usageValue = 0;
+    (void)mFormat->findInt64("android._C2MemoryUsage", &usageValue);
+    C2MemoryUsage usage{usageValue | C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE};
     return AllocateGraphicBuffer(
-            mPool, mFormat, HAL_PIXEL_FORMAT_YV12, usage, mLocalBufferPool);
+            mPool, mFormat, extractPixelFormat(mFormat), usage, mLocalBufferPool);
 }
 
 // OutputBuffersArray
@@ -1113,8 +1125,8 @@
     mImpl.getArray(array);
 }
 
-size_t OutputBuffersArray::numClientBuffers() const {
-    return mImpl.numClientBuffers();
+size_t OutputBuffersArray::numActiveSlots() const {
+    return mImpl.numActiveSlots();
 }
 
 void OutputBuffersArray::realloc(const std::shared_ptr<C2Buffer> &c2buffer) {
@@ -1167,7 +1179,6 @@
 void OutputBuffersArray::transferFrom(OutputBuffers* source) {
     mFormat = source->mFormat;
     mSkipCutBuffer = source->mSkipCutBuffer;
-    mUnreportedFormat = source->mUnreportedFormat;
     mPending = std::move(source->mPending);
     mReorderStash = std::move(source->mReorderStash);
     mDepth = source->mDepth;
@@ -1224,8 +1235,8 @@
     return array;
 }
 
-size_t FlexOutputBuffers::numClientBuffers() const {
-    return mImpl.numClientBuffers();
+size_t FlexOutputBuffers::numActiveSlots() const {
+    return mImpl.numActiveSlots();
 }
 
 // LinearOutputBuffers
@@ -1290,17 +1301,7 @@
 
 sp<Codec2Buffer> RawGraphicOutputBuffers::wrap(const std::shared_ptr<C2Buffer> &buffer) {
     if (buffer == nullptr) {
-        sp<Codec2Buffer> c2buffer = ConstGraphicBlockBuffer::AllocateEmpty(
-                mFormat,
-                [lbp = mLocalBufferPool](size_t capacity) {
-                    return lbp->newBuffer(capacity);
-                });
-        if (c2buffer == nullptr) {
-            ALOGD("[%s] ConstGraphicBlockBuffer::AllocateEmpty failed", mName);
-            return nullptr;
-        }
-        c2buffer->setRange(0, 0);
-        return c2buffer;
+        return new Codec2Buffer(mFormat, new ABuffer(nullptr, 0));
     } else {
         return ConstGraphicBlockBuffer::Allocate(
                 mFormat,
diff --git a/media/codec2/sfplugin/CCodecBuffers.h b/media/codec2/sfplugin/CCodecBuffers.h
index 0d4fa81..995d3a4 100644
--- a/media/codec2/sfplugin/CCodecBuffers.h
+++ b/media/codec2/sfplugin/CCodecBuffers.h
@@ -33,8 +33,8 @@
 class SkipCutBuffer;
 
 constexpr size_t kLinearBufferSize = 1048576;
-// This can fit 4K RGBA frame, and most likely client won't need more than this.
-constexpr size_t kMaxLinearBufferSize = 4096 * 2304 * 4;
+// This can fit an 8K frame.
+constexpr size_t kMaxLinearBufferSize = 7680 * 4320 * 2;
 
 /**
  * Base class for representation of buffers at one port.
@@ -72,7 +72,7 @@
     /**
      * Return number of buffers the client owns.
      */
-    virtual size_t numClientBuffers() const = 0;
+    virtual size_t numActiveSlots() const = 0;
 
     /**
      * Examine image data from the buffer and update the format if necessary.
@@ -86,6 +86,9 @@
     // Format to be used for creating MediaCodec-facing buffers.
     sp<AMessage> mFormat;
 
+    sp<ABuffer> mLastImageData;
+    sp<AMessage> mFormatWithImageData;
+
 private:
     DISALLOW_EVIL_CONSTRUCTORS(CCodecBuffers);
 };
@@ -215,10 +218,8 @@
 
     /**
      * Update SkipCutBuffer from format. The @p format must not be null.
-     * @p notify determines whether the format comes with a buffer that should
-     * be reported to the client or not.
      */
-    void updateSkipCutBuffer(const sp<AMessage> &format, bool notify = true);
+    void updateSkipCutBuffer(const sp<AMessage> &format);
 
     /**
      * Output Stash
@@ -392,9 +393,6 @@
 
     // Output stash
 
-    // Output format that has not been made available to the client.
-    sp<AMessage> mUnreportedFormat;
-
     // Struct for an entry in the output stash (mPending and mReorderStash)
     struct StashEntry {
         inline StashEntry()
@@ -584,7 +582,7 @@
      * Return the number of buffers that are sent to the client but not released
      * yet.
      */
-    size_t numClientBuffers() const;
+    size_t numActiveSlots() const;
 
     /**
      * Return the number of buffers that are sent to the component but not
@@ -705,7 +703,7 @@
      * Return the number of buffers that are sent to the client but not released
      * yet.
      */
-    size_t numClientBuffers() const;
+    size_t numActiveSlots() const;
 
     /**
      * Return the size of the array.
@@ -765,7 +763,7 @@
 
     void flush() override;
 
-    size_t numClientBuffers() const final;
+    size_t numActiveSlots() const final;
 
 protected:
     sp<Codec2Buffer> createNewBuffer() override;
@@ -796,7 +794,7 @@
 
     std::unique_ptr<InputBuffers> toArrayMode(size_t size) final;
 
-    size_t numClientBuffers() const final;
+    size_t numActiveSlots() const final;
 
 protected:
     sp<Codec2Buffer> createNewBuffer() final;
@@ -826,7 +824,7 @@
 
     std::unique_ptr<InputBuffers> toArrayMode(size_t size) override;
 
-    size_t numClientBuffers() const final;
+    size_t numActiveSlots() const final;
 
 protected:
     sp<Codec2Buffer> createNewBuffer() override;
@@ -894,7 +892,7 @@
 
     std::unique_ptr<InputBuffers> toArrayMode(size_t size) final;
 
-    size_t numClientBuffers() const final;
+    size_t numActiveSlots() const final;
 
 protected:
     sp<Codec2Buffer> createNewBuffer() override;
@@ -924,7 +922,7 @@
     std::unique_ptr<InputBuffers> toArrayMode(
             size_t size) final;
 
-    size_t numClientBuffers() const final;
+    size_t numActiveSlots() const final;
 
 protected:
     sp<Codec2Buffer> createNewBuffer() override;
@@ -965,7 +963,7 @@
         array->clear();
     }
 
-    size_t numClientBuffers() const final {
+    size_t numActiveSlots() const final {
         return 0u;
     }
 
@@ -1019,7 +1017,7 @@
 
     void getArray(Vector<sp<MediaCodecBuffer>> *array) const final;
 
-    size_t numClientBuffers() const final;
+    size_t numActiveSlots() const final;
 
     /**
      * Reallocate the array, filled with buffers with the same size as given
@@ -1073,7 +1071,7 @@
 
     std::unique_ptr<OutputBuffersArray> toArrayMode(size_t size) override;
 
-    size_t numClientBuffers() const final;
+    size_t numActiveSlots() const final;
 
     /**
      * Return an appropriate Codec2Buffer object for the type of buffers.
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 96f86e8..c275187 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -18,11 +18,13 @@
 #define LOG_TAG "CCodecConfig"
 #include <cutils/properties.h>
 #include <log/log.h>
+#include <utils/NativeHandle.h>
 
 #include <C2Component.h>
 #include <C2Param.h>
 #include <util/C2InterfaceHelper.h>
 
+#include <media/stagefright/CodecBase.h>
 #include <media/stagefright/MediaCodecConstants.h>
 
 #include "CCodecConfig.h"
@@ -289,8 +291,8 @@
     std::vector<std::string> getPathsForDomain(
             Domain any, Domain all = Domain::ALL) const {
         std::vector<std::string> res;
-        for (const std::pair<std::string, std::vector<ConfigMapper>> &el : mConfigMappers) {
-            for (const ConfigMapper &cm : el.second) {
+        for (const auto &[key, mappers] : mConfigMappers) {
+            for (const ConfigMapper &cm : mappers) {
                 ALOGV("filtering %s %x %x %x %x", cm.path().c_str(), cm.domain(), any,
                         (cm.domain() & any), (cm.domain() & any & all));
                 if ((cm.domain() & any) && ((cm.domain() & any & all) == (any & all))) {
@@ -321,7 +323,8 @@
 CCodecConfig::CCodecConfig()
     : mInputFormat(new AMessage),
       mOutputFormat(new AMessage),
-      mUsingSurface(false) { }
+      mUsingSurface(false),
+      mTunneled(false) { }
 
 void CCodecConfig::initializeStandardParams() {
     typedef Domain D;
@@ -359,7 +362,10 @@
         .limitTo(D::OUTPUT & D::READ));
 
     add(ConfigMapper(KEY_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
-        .limitTo(D::ENCODER & D::OUTPUT));
+        .limitTo(D::ENCODER & D::CODED));
+    // Some audio decoders require bitrate information to be set
+    add(ConfigMapper(KEY_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
+        .limitTo(D::AUDIO & D::DECODER & D::CODED));
     // we also need to put the bitrate in the max bitrate field
     add(ConfigMapper(KEY_MAX_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
         .limitTo(D::ENCODER & D::READ & D::OUTPUT));
@@ -415,19 +421,38 @@
     add(ConfigMapper("color-matrix",        C2_PARAMKEY_DEFAULT_COLOR_ASPECTS,   "matrix")
         .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::CODED & (D::CONFIG | D::PARAM)));
 
+    // read back default for decoders. This is needed in case the component does not support
+    // color aspects. In that case, these values get copied to color-* keys.
+    // TRICKY: We read these values at raw port, since that's where we want to read these.
+    add(ConfigMapper("default-color-range",     C2_PARAMKEY_DEFAULT_COLOR_ASPECTS,   "range")
+        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::RAW & D::READ)
+        .withC2Mappers<C2Color::range_t>());
+    add(ConfigMapper("default-color-transfer",  C2_PARAMKEY_DEFAULT_COLOR_ASPECTS,   "transfer")
+        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::RAW & D::READ)
+        .withC2Mappers<C2Color::transfer_t>());
+    add(ConfigMapper("default-color-primaries", C2_PARAMKEY_DEFAULT_COLOR_ASPECTS,   "primaries")
+        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::RAW & D::READ));
+    add(ConfigMapper("default-color-matrix",    C2_PARAMKEY_DEFAULT_COLOR_ASPECTS,   "matrix")
+        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::RAW & D::READ));
+
     // read back final for decoder output (also, configure final aspects as well. This should be
     // overwritten based on coded/default values if component supports color aspects, but is used
     // as final values if component does not support aspects at all)
     add(ConfigMapper(KEY_COLOR_RANGE,       C2_PARAMKEY_COLOR_ASPECTS,   "range")
-        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::RAW)
+        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::RAW & D::READ)
         .withC2Mappers<C2Color::range_t>());
     add(ConfigMapper(KEY_COLOR_TRANSFER,    C2_PARAMKEY_COLOR_ASPECTS,   "transfer")
-        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::RAW)
+        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::RAW & D::READ)
         .withC2Mappers<C2Color::transfer_t>());
     add(ConfigMapper("color-primaries",     C2_PARAMKEY_COLOR_ASPECTS,   "primaries")
-        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::RAW));
+        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::RAW & D::READ));
     add(ConfigMapper("color-matrix",        C2_PARAMKEY_COLOR_ASPECTS,   "matrix")
-        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::RAW));
+        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::RAW & D::READ));
+
+    // configure transfer request
+    add(ConfigMapper("color-transfer-request", C2_PARAMKEY_COLOR_ASPECTS, "transfer")
+        .limitTo((D::VIDEO | D::IMAGE) & D::DECODER  & D::RAW & D::CONFIG)
+        .withC2Mappers<C2Color::transfer_t>());
 
     // configure source aspects for encoders and read them back on the coded(!) port.
     // This is to ensure muxing the desired aspects into the container.
@@ -488,7 +513,7 @@
     add(ConfigMapper(std::string(KEY_FEATURE_) + FEATURE_SecurePlayback,
                      C2_PARAMKEY_SECURE_MODE, "value"));
 
-    add(ConfigMapper(KEY_PREPEND_HEADERS_TO_SYNC_FRAMES,
+    add(ConfigMapper(KEY_PREPEND_HEADER_TO_SYNC_FRAMES,
                      C2_PARAMKEY_PREPEND_HEADER_MODE, "value")
         .limitTo(D::ENCODER & D::VIDEO)
         .withMappers([](C2Value v) -> C2Value {
@@ -512,7 +537,7 @@
             return C2Value();
         }));
     // remove when codecs switch to PARAMKEY
-    deprecated(ConfigMapper(KEY_PREPEND_HEADERS_TO_SYNC_FRAMES,
+    deprecated(ConfigMapper(KEY_PREPEND_HEADER_TO_SYNC_FRAMES,
                             "coding.add-csd-to-sync-frames", "value")
                .limitTo(D::ENCODER & D::VIDEO));
     // convert to timestamp base
@@ -708,6 +733,17 @@
             return C2Value();
         }));
 
+    add(ConfigMapper(KEY_AAC_PROFILE, C2_PARAMKEY_PROFILE_LEVEL, "profile")
+        .limitTo(D::AUDIO & D::ENCODER & (D::CONFIG | D::PARAM))
+        .withMapper([mapper](C2Value v) -> C2Value {
+            C2Config::profile_t c2 = PROFILE_UNUSED;
+            int32_t sdk;
+            if (mapper && v.get(&sdk) && mapper->mapProfile(sdk, &c2)) {
+                return c2;
+            }
+            return PROFILE_UNUSED;
+        }));
+
     // convert to dBFS and add default
     add(ConfigMapper(KEY_AAC_DRC_TARGET_REFERENCE_LEVEL, C2_PARAMKEY_DRC_TARGET_REFERENCE_LEVEL, "value")
         .limitTo(D::AUDIO & D::DECODER & (D::CONFIG | D::PARAM | D::READ))
@@ -765,21 +801,13 @@
 
     // convert to compression type and add default
     add(ConfigMapper(KEY_AAC_DRC_HEAVY_COMPRESSION, C2_PARAMKEY_DRC_COMPRESSION_MODE, "value")
-        .limitTo(D::AUDIO & D::DECODER & (D::CONFIG | D::PARAM | D::READ))
-        .withMappers([](C2Value v) -> C2Value {
+        .limitTo(D::AUDIO & D::DECODER & (D::CONFIG | D::PARAM))
+        .withMapper([](C2Value v) -> C2Value {
             int32_t value;
             if (!v.get(&value) || value < 0) {
                 value = property_get_int32(PROP_DRC_OVERRIDE_HEAVY, DRC_DEFAULT_MOBILE_DRC_HEAVY);
             }
             return value == 1 ? C2Config::DRC_COMPRESSION_HEAVY : C2Config::DRC_COMPRESSION_LIGHT;
-        },[](C2Value v) -> C2Value {
-            int32_t value;
-            if (v.get(&value)) {
-              return value;
-            }
-            else {
-              return C2Value();
-            }
         }));
 
     // convert to dBFS and add default
@@ -881,6 +909,8 @@
             }
         }));
 
+    add(ConfigMapper("android._encoding-quality-level", C2_PARAMKEY_ENCODING_QUALITY_LEVEL, "value")
+        .limitTo(D::ENCODER & (D::CONFIG | D::PARAM)));
     add(ConfigMapper(KEY_QUALITY, C2_PARAMKEY_QUALITY, "value")
         .limitTo(D::ENCODER & (D::CONFIG | D::PARAM)));
     add(ConfigMapper(KEY_FLAC_COMPRESSION_LEVEL, C2_PARAMKEY_COMPLEXITY, "value")
@@ -910,6 +940,14 @@
             return value == 0 ? C2_FALSE : C2_TRUE;
         }));
 
+    add(ConfigMapper("android._trigger-tunnel-peek", C2_PARAMKEY_TUNNEL_START_RENDER, "value")
+        .limitTo(D::PARAM & D::VIDEO & D::DECODER)
+        .withMapper([](C2Value v) -> C2Value {
+            int32_t value = 0;
+            (void)v.get(&value);
+            return value == 0 ? C2_FALSE : C2_TRUE;
+        }));
+
     /* still to do
     constexpr char KEY_PUSH_BLANK_BUFFERS_ON_STOP[] = "push-blank-buffers-on-shutdown";
 
@@ -1009,11 +1047,14 @@
                     new C2StreamPixelAspectRatioInfo::output(0u, 1u, 1u),
                     C2_PARAMKEY_PIXEL_ASPECT_RATIO);
             addLocalParam(new C2StreamRotationInfo::output(0u, 0), C2_PARAMKEY_ROTATION);
-            addLocalParam(new C2StreamColorAspectsInfo::output(0u), C2_PARAMKEY_COLOR_ASPECTS);
+            addLocalParam(
+                    new C2StreamColorAspectsTuning::output(0u),
+                    C2_PARAMKEY_DEFAULT_COLOR_ASPECTS);
             addLocalParam<C2StreamDataSpaceInfo::output>(C2_PARAMKEY_DATA_SPACE);
             addLocalParam<C2StreamHdrStaticInfo::output>(C2_PARAMKEY_HDR_STATIC_INFO);
-            addLocalParam(new C2StreamSurfaceScalingInfo::output(0u, VIDEO_SCALING_MODE_SCALE_TO_FIT),
-                          C2_PARAMKEY_SURFACE_SCALING_MODE);
+            addLocalParam(
+                    new C2StreamSurfaceScalingInfo::output(0u, VIDEO_SCALING_MODE_SCALE_TO_FIT),
+                    C2_PARAMKEY_SURFACE_SCALING_MODE);
         } else {
             addLocalParam(new C2StreamColorAspectsInfo::input(0u), C2_PARAMKEY_COLOR_ASPECTS);
         }
@@ -1045,7 +1086,7 @@
             std::vector<std::string> keys;
             mParamUpdater->getKeysForParamIndex(desc->index(), &keys);
             for (const std::string &key : keys) {
-                mVendorParamIndices.insert_or_assign(key, desc->index());
+                mVendorParams.insert_or_assign(key, desc);
             }
         }
     }
@@ -1112,6 +1153,12 @@
             insertion.first->second = std::move(p);
         }
     }
+    if (mInputSurface
+            && (domain & mOutputDomain)
+            && mInputSurfaceDataspace != mInputSurface->getDataspace()) {
+        changed = true;
+        mInputSurfaceDataspace = mInputSurface->getDataspace();
+    }
 
     ALOGV("updated configuration has %zu params (%s)", mCurrentConfig.size(),
             changed ? "CHANGED" : "no change");
@@ -1180,8 +1227,8 @@
         const ReflectedParamUpdater::Dict &reflected,
         Domain portDomain) const {
     sp<AMessage> msg = new AMessage;
-    for (const std::pair<std::string, std::vector<ConfigMapper>> &el : mStandardParams->getKeys()) {
-        for (const ConfigMapper &cm : el.second) {
+    for (const auto &[key, mappers] : mStandardParams->getKeys()) {
+        for (const ConfigMapper &cm : mappers) {
             if ((cm.domain() & portDomain) == 0 // input-output-coded-raw
                 || (cm.domain() & mDomain) != mDomain // component domain + kind (these must match)
                 || (cm.domain() & IS_READ) == 0) {
@@ -1205,26 +1252,26 @@
                 ALOGD("unexpected untyped query value for key: %s", cm.path().c_str());
                 continue;
             }
-            msg->setItem(el.first.c_str(), item);
+            msg->setItem(key.c_str(), item);
         }
     }
 
     bool input = (portDomain & Domain::IS_INPUT);
     std::vector<std::string> vendorKeys;
-    for (const std::pair<std::string, ReflectedParamUpdater::Value> &entry : reflected) {
-        auto it = mVendorParamIndices.find(entry.first);
-        if (it == mVendorParamIndices.end()) {
+    for (const auto &[key, value] : reflected) {
+        auto it = mVendorParams.find(key);
+        if (it == mVendorParams.end()) {
             continue;
         }
-        if (mSubscribedIndices.count(it->second) == 0) {
+        C2Param::Index index = it->second->index();
+        if (mSubscribedIndices.count(index) == 0) {
             continue;
         }
         // For vendor parameters, we only care about direction
-        if ((input && !it->second.forInput())
-                || (!input && !it->second.forOutput())) {
+        if ((input && !index.forInput())
+                || (!input && !index.forOutput())) {
             continue;
         }
-        const ReflectedParamUpdater::Value &value = entry.second;
         C2Value c2Value;
         sp<ABuffer> bufValue;
         AString strValue;
@@ -1236,10 +1283,10 @@
         } else if (value.find(&strValue)) {
             item.set(strValue);
         } else {
-            ALOGD("unexpected untyped query value for key: %s", entry.first.c_str());
+            ALOGD("unexpected untyped query value for key: %s", key.c_str());
             continue;
         }
-        msg->setItem(entry.first.c_str(), item);
+        msg->setItem(key.c_str(), item);
     }
 
     { // convert from Codec 2.0 rect to MediaFormat rect and add crop rect if not present
@@ -1299,9 +1346,46 @@
         }
     }
 
+    // Remove KEY_AAC_SBR_MODE from SDK message if it is outside supported range
+    // as SDK doesn't have a way to signal default sbr mode based on profile and
+    // requires that the key isn't present in format to signal that
+    int sbrMode;
+    if (msg->findInt32(KEY_AAC_SBR_MODE, &sbrMode) && (sbrMode < 0 || sbrMode > 2)) {
+        msg->removeEntryAt(msg->findEntryByName(KEY_AAC_SBR_MODE));
+    }
+
     { // convert color info
+        // move default color to color aspect if not read from the component
+        int32_t tmp;
+        int32_t range;
+        if (msg->findInt32("default-color-range", &range)) {
+            if (!msg->findInt32(KEY_COLOR_RANGE, &tmp)) {
+                msg->setInt32(KEY_COLOR_RANGE, range);
+            }
+            msg->removeEntryAt(msg->findEntryByName("default-color-range"));
+        }
+        int32_t transfer;
+        if (msg->findInt32("default-color-transfer", &transfer)) {
+            if (!msg->findInt32(KEY_COLOR_TRANSFER, &tmp)) {
+                msg->setInt32(KEY_COLOR_TRANSFER, transfer);
+            }
+            msg->removeEntryAt(msg->findEntryByName("default-color-transfer"));
+        }
         C2Color::primaries_t primaries;
+        if (msg->findInt32("default-color-primaries", (int32_t*)&primaries)) {
+            if (!msg->findInt32("color-primaries", &tmp)) {
+                msg->setInt32("color-primaries", primaries);
+            }
+            msg->removeEntryAt(msg->findEntryByName("default-color-primaries"));
+        }
         C2Color::matrix_t matrix;
+        if (msg->findInt32("default-color-matrix", (int32_t*)&matrix)) {
+            if (!msg->findInt32("color-matrix", &tmp)) {
+                msg->setInt32("color-matrix", matrix);
+            }
+            msg->removeEntryAt(msg->findEntryByName("default-color-matrix"));
+        }
+
         if (msg->findInt32("color-primaries", (int32_t*)&primaries)
                 && msg->findInt32("color-matrix", (int32_t*)&matrix)) {
             int32_t standard;
@@ -1314,7 +1398,6 @@
             msg->removeEntryAt(msg->findEntryByName("color-matrix"));
         }
 
-
         // calculate dataspace for raw graphic buffers if not specified by component, or if
         // using surface with unspecified aspects (as those must be defaulted which may change
         // the dataspace)
@@ -1352,6 +1435,23 @@
             }
         }
 
+        if (mInputSurface) {
+            android_dataspace dataspace = mInputSurface->getDataspace();
+            ColorUtils::convertDataSpaceToV0(dataspace);
+            int32_t standard;
+            ColorUtils::getColorConfigFromDataSpace(dataspace, &range, &standard, &transfer);
+            if (range != 0) {
+                msg->setInt32(KEY_COLOR_RANGE, range);
+            }
+            if (standard != 0) {
+                msg->setInt32(KEY_COLOR_STANDARD, standard);
+            }
+            if (transfer != 0) {
+                msg->setInt32(KEY_COLOR_TRANSFER, transfer);
+            }
+            msg->setInt32("android._dataspace", dataspace);
+        }
+
         // HDR static info
 
         C2HdrStaticMetadataStruct hdr;
@@ -1393,22 +1493,22 @@
                 meta.sType1.mMinDisplayLuminance = hdr.mastering.minLuminance / 0.0001 + 0.5;
                 meta.sType1.mMaxContentLightLevel = hdr.maxCll + 0.5;
                 meta.sType1.mMaxFrameAverageLightLevel = hdr.maxFall + 0.5;
-                msg->removeEntryAt(msg->findEntryByName("smpte2086.red.x"));
-                msg->removeEntryAt(msg->findEntryByName("smpte2086.red.y"));
-                msg->removeEntryAt(msg->findEntryByName("smpte2086.green.x"));
-                msg->removeEntryAt(msg->findEntryByName("smpte2086.green.y"));
-                msg->removeEntryAt(msg->findEntryByName("smpte2086.blue.x"));
-                msg->removeEntryAt(msg->findEntryByName("smpte2086.blue.y"));
-                msg->removeEntryAt(msg->findEntryByName("smpte2086.white.x"));
-                msg->removeEntryAt(msg->findEntryByName("smpte2086.white.y"));
-                msg->removeEntryAt(msg->findEntryByName("smpte2086.max-luminance"));
-                msg->removeEntryAt(msg->findEntryByName("smpte2086.min-luminance"));
-                msg->removeEntryAt(msg->findEntryByName("cta861.max-cll"));
-                msg->removeEntryAt(msg->findEntryByName("cta861.max-fall"));
                 msg->setBuffer(KEY_HDR_STATIC_INFO, ABuffer::CreateAsCopy(&meta, sizeof(meta)));
             } else {
                 ALOGD("found invalid HDR static metadata %s", msg->debugString(8).c_str());
             }
+            msg->removeEntryAt(msg->findEntryByName("smpte2086.red.x"));
+            msg->removeEntryAt(msg->findEntryByName("smpte2086.red.y"));
+            msg->removeEntryAt(msg->findEntryByName("smpte2086.green.x"));
+            msg->removeEntryAt(msg->findEntryByName("smpte2086.green.y"));
+            msg->removeEntryAt(msg->findEntryByName("smpte2086.blue.x"));
+            msg->removeEntryAt(msg->findEntryByName("smpte2086.blue.y"));
+            msg->removeEntryAt(msg->findEntryByName("smpte2086.white.x"));
+            msg->removeEntryAt(msg->findEntryByName("smpte2086.white.y"));
+            msg->removeEntryAt(msg->findEntryByName("smpte2086.max-luminance"));
+            msg->removeEntryAt(msg->findEntryByName("smpte2086.min-luminance"));
+            msg->removeEntryAt(msg->findEntryByName("cta861.max-cll"));
+            msg->removeEntryAt(msg->findEntryByName("cta861.max-fall"));
         }
     }
 
@@ -1643,8 +1743,8 @@
             }
         }
     }
-    ALOGV("filtered %s to %s", params->debugString(4).c_str(),
-            filtered.debugString(4).c_str());
+    ALOGV("filter src msg %s", params->debugString(4).c_str());
+    ALOGV("filter dst params %s", filtered.debugString(4).c_str());
     return filtered;
 }
 
@@ -1769,8 +1869,81 @@
 status_t CCodecConfig::subscribeToAllVendorParams(
         const std::shared_ptr<Codec2Client::Configurable> &configurable,
         c2_blocking_t blocking) {
-    for (const std::pair<std::string, C2Param::Index> &entry : mVendorParamIndices) {
-        mSubscribedIndices.insert(entry.second);
+    for (const auto &[path, desc] : mVendorParams) {
+        mSubscribedIndices.insert(desc->index());
+    }
+    return subscribeToConfigUpdate(configurable, {}, blocking);
+}
+
+status_t CCodecConfig::querySupportedParameters(std::vector<std::string> *names) {
+    if (!names) {
+        return BAD_VALUE;
+    }
+    names->clear();
+    // TODO: expand to standard params
+    for (const auto &[key, desc] : mVendorParams) {
+        names->push_back(key);
+    }
+    return OK;
+}
+
+status_t CCodecConfig::describe(const std::string &name, CodecParameterDescriptor *desc) {
+    if (!desc) {
+        return BAD_VALUE;
+    }
+    // TODO: expand to standard params
+    desc->name = name;
+    switch (mParamUpdater->getTypeForKey(name)) {
+        case C2FieldDescriptor::INT32:
+        case C2FieldDescriptor::UINT32:
+        case C2FieldDescriptor::CNTR32:
+            desc->type = AMessage::kTypeInt32;
+            return OK;
+        case C2FieldDescriptor::INT64:
+        case C2FieldDescriptor::UINT64:
+        case C2FieldDescriptor::CNTR64:
+            desc->type = AMessage::kTypeInt64;
+            return OK;
+        case C2FieldDescriptor::FLOAT:
+            desc->type = AMessage::kTypeFloat;
+            return OK;
+        case C2FieldDescriptor::STRING:
+            desc->type = AMessage::kTypeString;
+            return OK;
+        case C2FieldDescriptor::BLOB:
+            desc->type = AMessage::kTypeBuffer;
+            return OK;
+        default:
+            return NAME_NOT_FOUND;
+    }
+}
+
+status_t CCodecConfig::subscribeToVendorConfigUpdate(
+        const std::shared_ptr<Codec2Client::Configurable> &configurable,
+        const std::vector<std::string> &names,
+        c2_blocking_t blocking) {
+    for (const std::string &name : names) {
+        auto it = mVendorParams.find(name);
+        if (it == mVendorParams.end()) {
+            ALOGD("%s is not a recognized vendor parameter; ignored.", name.c_str());
+            continue;
+        }
+        mSubscribedIndices.insert(it->second->index());
+    }
+    return subscribeToConfigUpdate(configurable, {}, blocking);
+}
+
+status_t CCodecConfig::unsubscribeFromVendorConfigUpdate(
+        const std::shared_ptr<Codec2Client::Configurable> &configurable,
+        const std::vector<std::string> &names,
+        c2_blocking_t blocking) {
+    for (const std::string &name : names) {
+        auto it = mVendorParams.find(name);
+        if (it == mVendorParams.end()) {
+            ALOGD("%s is not a recognized vendor parameter; ignored.", name.c_str());
+            continue;
+        }
+        mSubscribedIndices.erase(it->second->index());
     }
     return subscribeToConfigUpdate(configurable, {}, blocking);
 }
diff --git a/media/codec2/sfplugin/CCodecConfig.h b/media/codec2/sfplugin/CCodecConfig.h
index 2895746..417b773 100644
--- a/media/codec2/sfplugin/CCodecConfig.h
+++ b/media/codec2/sfplugin/CCodecConfig.h
@@ -35,6 +35,8 @@
 namespace android {
 
 struct AMessage;
+struct CodecParameterDescriptor;
+class NativeHandle;
 struct StandardParams;
 
 /**
@@ -123,6 +125,7 @@
 
     std::shared_ptr<InputSurfaceWrapper> mInputSurface;
     std::unique_ptr<InputSurfaceWrapper::Config> mISConfig;
+    android_dataspace mInputSurfaceDataspace;
 
     /// the current configuration. Updated after configure() and based on configUpdate in
     /// onWorkDone
@@ -136,11 +139,15 @@
     /// For now support a validation function.
     std::map<C2Param::Index, LocalParamValidator> mLocalParams;
 
-    /// Vendor field name -> index map.
-    std::map<std::string, C2Param::Index> mVendorParamIndices;
+    /// Vendor field name -> desc map.
+    std::map<std::string, std::shared_ptr<C2ParamDescriptor>> mVendorParams;
 
     std::set<std::string> mLastConfig;
 
+    /// Tunneled codecs
+    bool mTunneled;
+    sp<NativeHandle> mSidebandHandle;
+
     CCodecConfig();
 
     /// initializes the members required to manage the format: descriptors, reflector,
@@ -321,6 +328,41 @@
         return Watcher<T>(index, this);
     }
 
+    /**
+     * Queries supported parameters and put the keys to |names|.
+     * TODO: currently this method queries vendor parameter keys only.
+     *
+     * \return OK if successful.
+     *         BAD_VALUE if |names| is nullptr.
+     */
+    status_t querySupportedParameters(std::vector<std::string> *names);
+
+    /**
+     * Describe the parameter with |name|, filling the information into |desc|
+     * TODO: currently this method works only for vendor parameters.
+     *
+     * \return OK if successful.
+     *         BAD_VALUE if |desc| is nullptr.
+     *         NAME_NOT_FOUND if |name| is not a recognized parameter name.
+     */
+    status_t describe(const std::string &name, CodecParameterDescriptor *desc);
+
+    /**
+     * Find corresponding indices for |names| and subscribe to them.
+     */
+    status_t subscribeToVendorConfigUpdate(
+            const std::shared_ptr<Codec2Client::Configurable> &configurable,
+            const std::vector<std::string> &names,
+            c2_blocking_t blocking = C2_DONT_BLOCK);
+
+    /**
+     * Find corresponding indices for |names| and unsubscribe from them.
+     */
+    status_t unsubscribeFromVendorConfigUpdate(
+            const std::shared_ptr<Codec2Client::Configurable> &configurable,
+            const std::vector<std::string> &names,
+            c2_blocking_t blocking = C2_DONT_BLOCK);
+
 private:
 
     /// initializes the standard MediaCodec to Codec 2.0 params mapping
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 25e7da9..691bab1 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "Codec2Buffer"
 #include <utils/Log.h>
 
+#include <android-base/properties.h>
 #include <android/hardware/cas/native/1.0/types.h>
 #include <android/hardware/drm/1.0/types.h>
 #include <hidlmemory/FrameworkUtils.h>
@@ -211,21 +212,24 @@
      * Creates a C2GraphicView <=> MediaImage converter
      *
      * \param view C2GraphicView object
-     * \param colorFormat desired SDK color format for the MediaImage (if this is a flexible format,
-     *        an attempt is made to simply represent the graphic view as a flexible SDK format
-     *        without a memcpy)
+     * \param format buffer format
      * \param copy whether the converter is used for copy or not
      */
     GraphicView2MediaImageConverter(
-            const C2GraphicView &view, int32_t colorFormat, bool copy)
+            const C2GraphicView &view, const sp<AMessage> &format, bool copy)
         : mInitCheck(NO_INIT),
           mView(view),
           mWidth(view.width()),
           mHeight(view.height()),
-          mColorFormat(colorFormat),
           mAllocatedDepth(0),
           mBackBufferSize(0),
           mMediaImage(new ABuffer(sizeof(MediaImage2))) {
+        if (!format->findInt32(KEY_COLOR_FORMAT, &mClientColorFormat)) {
+            mClientColorFormat = COLOR_FormatYUV420Flexible;
+        }
+        if (!format->findInt32("android._color-format", &mComponentColorFormat)) {
+            mComponentColorFormat = COLOR_FormatYUV420Flexible;
+        }
         if (view.error() != C2_OK) {
             ALOGD("Converter: view.error() = %d", view.error());
             mInitCheck = BAD_VALUE;
@@ -246,68 +250,57 @@
         uint32_t stride = align(view.crop().width, 2) * divUp(layout.planes[0].allocatedDepth, 8u);
         uint32_t vStride = align(view.crop().height, 2);
 
+        bool tryWrapping = !copy;
+
         switch (layout.type) {
-            case C2PlanarLayout::TYPE_YUV:
+            case C2PlanarLayout::TYPE_YUV: {
                 mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
                 if (layout.numPlanes != 3) {
                     ALOGD("Converter: %d planes for YUV layout", layout.numPlanes);
                     mInitCheck = BAD_VALUE;
                     return;
                 }
-                if (layout.planes[0].channel != C2PlaneInfo::CHANNEL_Y
-                        || layout.planes[1].channel != C2PlaneInfo::CHANNEL_CB
-                        || layout.planes[2].channel != C2PlaneInfo::CHANNEL_CR
-                        || layout.planes[0].colSampling != 1
-                        || layout.planes[0].rowSampling != 1
-                        || layout.planes[1].colSampling != 2
-                        || layout.planes[1].rowSampling != 2
-                        || layout.planes[2].colSampling != 2
-                        || layout.planes[2].rowSampling != 2) {
-                    ALOGD("Converter: not YUV420 for YUV layout");
+                C2PlaneInfo yPlane = layout.planes[C2PlanarLayout::PLANE_Y];
+                C2PlaneInfo uPlane = layout.planes[C2PlanarLayout::PLANE_U];
+                C2PlaneInfo vPlane = layout.planes[C2PlanarLayout::PLANE_V];
+                if (yPlane.channel != C2PlaneInfo::CHANNEL_Y
+                        || uPlane.channel != C2PlaneInfo::CHANNEL_CB
+                        || vPlane.channel != C2PlaneInfo::CHANNEL_CR) {
+                    ALOGD("Converter: not YUV layout");
                     mInitCheck = BAD_VALUE;
                     return;
                 }
-                switch (mColorFormat) {
-                    case COLOR_FormatYUV420Flexible:
-                        if (!copy) {
-                            // try to map directly. check if the planes are near one another
-                            const uint8_t *minPtr = mView.data()[0];
-                            const uint8_t *maxPtr = mView.data()[0];
-                            int32_t planeSize = 0;
-                            for (uint32_t i = 0; i < layout.numPlanes; ++i) {
-                                const C2PlaneInfo &plane = layout.planes[i];
-                                ssize_t minOffset = plane.minOffset(mWidth, mHeight);
-                                ssize_t maxOffset = plane.maxOffset(mWidth, mHeight);
-                                if (minPtr > mView.data()[i] + minOffset) {
-                                    minPtr = mView.data()[i] + minOffset;
-                                }
-                                if (maxPtr < mView.data()[i] + maxOffset) {
-                                    maxPtr = mView.data()[i] + maxOffset;
-                                }
-                                planeSize += std::abs(plane.rowInc) * align(mHeight, 64)
-                                        / plane.rowSampling / plane.colSampling
-                                        * divUp(mAllocatedDepth, 8u);
-                            }
-
-                            if ((maxPtr - minPtr + 1) <= planeSize) {
-                                // FIXME: this is risky as reading/writing data out of bound results
-                                //        in an undefined behavior, but gralloc does assume a
-                                //        contiguous mapping
-                                for (uint32_t i = 0; i < layout.numPlanes; ++i) {
-                                    const C2PlaneInfo &plane = layout.planes[i];
-                                    mediaImage->mPlane[i].mOffset = mView.data()[i] - minPtr;
-                                    mediaImage->mPlane[i].mColInc = plane.colInc;
-                                    mediaImage->mPlane[i].mRowInc = plane.rowInc;
-                                    mediaImage->mPlane[i].mHorizSubsampling = plane.colSampling;
-                                    mediaImage->mPlane[i].mVertSubsampling = plane.rowSampling;
-                                }
-                                mWrapped = new ABuffer(const_cast<uint8_t *>(minPtr),
-                                                       maxPtr - minPtr + 1);
-                                break;
-                            }
+                bool yuv420888 = yPlane.rowSampling == 1 && yPlane.colSampling == 1
+                        && uPlane.rowSampling == 2 && uPlane.colSampling == 2
+                        && vPlane.rowSampling == 2 && vPlane.colSampling == 2;
+                if (yuv420888) {
+                    for (uint32_t i = 0; i < 3; ++i) {
+                        const C2PlaneInfo &plane = layout.planes[i];
+                        if (plane.allocatedDepth != 8 || plane.bitDepth != 8) {
+                            yuv420888 = false;
+                            break;
                         }
-                        [[fallthrough]];
-
+                    }
+                    yuv420888 = yuv420888 && yPlane.colInc == 1 && uPlane.rowInc == vPlane.rowInc;
+                }
+                int32_t copyFormat = mClientColorFormat;
+                if (yuv420888 && mClientColorFormat == COLOR_FormatYUV420Flexible) {
+                    if (uPlane.colInc == 2 && vPlane.colInc == 2
+                            && yPlane.rowInc == uPlane.rowInc) {
+                        copyFormat = COLOR_FormatYUV420PackedSemiPlanar;
+                    } else if (uPlane.colInc == 1 && vPlane.colInc == 1
+                            && yPlane.rowInc == uPlane.rowInc * 2) {
+                        copyFormat = COLOR_FormatYUV420PackedPlanar;
+                    }
+                }
+                ALOGV("client_fmt=0x%x y:{colInc=%d rowInc=%d} u:{colInc=%d rowInc=%d} "
+                        "v:{colInc=%d rowInc=%d}",
+                        mClientColorFormat,
+                        yPlane.colInc, yPlane.rowInc,
+                        uPlane.colInc, uPlane.rowInc,
+                        vPlane.colInc, vPlane.rowInc);
+                switch (copyFormat) {
+                    case COLOR_FormatYUV420Flexible:
                     case COLOR_FormatYUV420Planar:
                     case COLOR_FormatYUV420PackedPlanar:
                         mediaImage->mPlane[mediaImage->Y].mOffset = 0;
@@ -327,6 +320,13 @@
                         mediaImage->mPlane[mediaImage->V].mRowInc = stride / 2;
                         mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
                         mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
+
+                        if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
+                            tryWrapping = yuv420888 && uPlane.colInc == 1 && vPlane.colInc == 1
+                                    && yPlane.rowInc == uPlane.rowInc * 2
+                                    && view.data()[0] < view.data()[1]
+                                    && view.data()[1] < view.data()[2];
+                        }
                         break;
 
                     case COLOR_FormatYUV420SemiPlanar:
@@ -348,64 +348,165 @@
                         mediaImage->mPlane[mediaImage->V].mRowInc = stride;
                         mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
                         mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
+
+                        if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
+                            tryWrapping = yuv420888 && uPlane.colInc == 2 && vPlane.colInc == 2
+                                    && yPlane.rowInc == uPlane.rowInc
+                                    && view.data()[0] < view.data()[1]
+                                    && view.data()[1] < view.data()[2];
+                        }
                         break;
 
-                    default:
-                        ALOGD("Converter: incompactible color format (%d) for YUV layout", mColorFormat);
-                        mInitCheck = BAD_VALUE;
-                        return;
+                    case COLOR_FormatYUVP010:
+                        mediaImage->mPlane[mediaImage->Y].mOffset = 0;
+                        mediaImage->mPlane[mediaImage->Y].mColInc = 2;
+                        mediaImage->mPlane[mediaImage->Y].mRowInc = stride * 2;
+                        mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
+                        mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
+
+                        mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride * 2;
+                        mediaImage->mPlane[mediaImage->U].mColInc = 4;
+                        mediaImage->mPlane[mediaImage->U].mRowInc = stride * 2;
+                        mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
+                        mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
+
+                        mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride * 2 + 2;
+                        mediaImage->mPlane[mediaImage->V].mColInc = 4;
+                        mediaImage->mPlane[mediaImage->V].mRowInc = stride * 2;
+                        mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
+                        mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
+                        if (tryWrapping) {
+                            tryWrapping = yPlane.allocatedDepth == 16
+                                    && uPlane.allocatedDepth == 16
+                                    && vPlane.allocatedDepth == 16
+                                    && yPlane.bitDepth == 10
+                                    && uPlane.bitDepth == 10
+                                    && vPlane.bitDepth == 10
+                                    && yPlane.rightShift == 6
+                                    && uPlane.rightShift == 6
+                                    && vPlane.rightShift == 6
+                                    && yPlane.rowSampling == 1 && yPlane.colSampling == 1
+                                    && uPlane.rowSampling == 2 && uPlane.colSampling == 2
+                                    && vPlane.rowSampling == 2 && vPlane.colSampling == 2
+                                    && yPlane.colInc == 2
+                                    && uPlane.colInc == 4
+                                    && vPlane.colInc == 4
+                                    && yPlane.rowInc == uPlane.rowInc
+                                    && yPlane.rowInc == vPlane.rowInc;
+                        }
+                        break;
+
+                    default: {
+                        // default to fully planar format --- this will be overridden if wrapping
+                        // TODO: keep interleaved format
+                        int32_t colInc = divUp(mAllocatedDepth, 8u);
+                        int32_t rowInc = stride * colInc / yPlane.colSampling;
+                        mediaImage->mPlane[mediaImage->Y].mOffset = 0;
+                        mediaImage->mPlane[mediaImage->Y].mColInc = colInc;
+                        mediaImage->mPlane[mediaImage->Y].mRowInc = rowInc;
+                        mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = yPlane.colSampling;
+                        mediaImage->mPlane[mediaImage->Y].mVertSubsampling = yPlane.rowSampling;
+                        int32_t offset = rowInc * vStride / yPlane.rowSampling;
+
+                        rowInc = stride * colInc / uPlane.colSampling;
+                        mediaImage->mPlane[mediaImage->U].mOffset = offset;
+                        mediaImage->mPlane[mediaImage->U].mColInc = colInc;
+                        mediaImage->mPlane[mediaImage->U].mRowInc = rowInc;
+                        mediaImage->mPlane[mediaImage->U].mHorizSubsampling = uPlane.colSampling;
+                        mediaImage->mPlane[mediaImage->U].mVertSubsampling = uPlane.rowSampling;
+                        offset += rowInc * vStride / uPlane.rowSampling;
+
+                        rowInc = stride * colInc / vPlane.colSampling;
+                        mediaImage->mPlane[mediaImage->V].mOffset = offset;
+                        mediaImage->mPlane[mediaImage->V].mColInc = colInc;
+                        mediaImage->mPlane[mediaImage->V].mRowInc = rowInc;
+                        mediaImage->mPlane[mediaImage->V].mHorizSubsampling = vPlane.colSampling;
+                        mediaImage->mPlane[mediaImage->V].mVertSubsampling = vPlane.rowSampling;
+                        break;
+                    }
                 }
                 break;
+            }
+
             case C2PlanarLayout::TYPE_YUVA:
-                mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_YUVA;
-                // We don't have an SDK YUVA format
-                ALOGD("Converter: incompactible color format (%d) for YUVA layout", mColorFormat);
-                mInitCheck = BAD_VALUE;
+                ALOGD("Converter: unrecognized color format "
+                        "(client %d component %d) for YUVA layout",
+                        mClientColorFormat, mComponentColorFormat);
+                mInitCheck = NO_INIT;
                 return;
             case C2PlanarLayout::TYPE_RGB:
-                mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGB;
-                switch (mColorFormat) {
-                    // TODO media image
-                    case COLOR_FormatRGBFlexible:
-                    case COLOR_Format24bitBGR888:
-                    case COLOR_Format24bitRGB888:
-                        break;
-                    default:
-                        ALOGD("Converter: incompactible color format (%d) for RGB layout", mColorFormat);
-                        mInitCheck = BAD_VALUE;
-                        return;
-                }
-                if (layout.numPlanes != 3) {
-                    ALOGD("Converter: %d planes for RGB layout", layout.numPlanes);
-                    mInitCheck = BAD_VALUE;
-                    return;
-                }
-                break;
+                ALOGD("Converter: unrecognized color format "
+                        "(client %d component %d) for RGB layout",
+                        mClientColorFormat, mComponentColorFormat);
+                mInitCheck = NO_INIT;
+                // TODO: support MediaImage layout
+                return;
             case C2PlanarLayout::TYPE_RGBA:
-                mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGBA;
-                switch (mColorFormat) {
-                    // TODO media image
-                    case COLOR_FormatRGBAFlexible:
-                    case COLOR_Format32bitABGR8888:
-                    case COLOR_Format32bitARGB8888:
-                    case COLOR_Format32bitBGRA8888:
-                        break;
-                    default:
-                        ALOGD("Incompactible color format (%d) for RGBA layout", mColorFormat);
-                        mInitCheck = BAD_VALUE;
-                        return;
-                }
-                if (layout.numPlanes != 4) {
-                    ALOGD("Converter: %d planes for RGBA layout", layout.numPlanes);
-                    mInitCheck = BAD_VALUE;
-                    return;
-                }
-                break;
+                ALOGD("Converter: unrecognized color format "
+                        "(client %d component %d) for RGBA layout",
+                        mClientColorFormat, mComponentColorFormat);
+                mInitCheck = NO_INIT;
+                // TODO: support MediaImage layout
+                return;
             default:
                 mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
-                ALOGD("Unknown layout");
-                mInitCheck = BAD_VALUE;
-                return;
+                if (layout.numPlanes == 1) {
+                    const C2PlaneInfo &plane = layout.planes[0];
+                    if (plane.colInc < 0 || plane.rowInc < 0) {
+                        // Copy-only if we have negative colInc/rowInc
+                        tryWrapping = false;
+                    }
+                    mediaImage->mPlane[0].mOffset = 0;
+                    mediaImage->mPlane[0].mColInc = std::abs(plane.colInc);
+                    mediaImage->mPlane[0].mRowInc = std::abs(plane.rowInc);
+                    mediaImage->mPlane[0].mHorizSubsampling = plane.colSampling;
+                    mediaImage->mPlane[0].mVertSubsampling = plane.rowSampling;
+                } else {
+                    ALOGD("Converter: unrecognized layout: color format (client %d component %d)",
+                            mClientColorFormat, mComponentColorFormat);
+                    mInitCheck = NO_INIT;
+                    return;
+                }
+                break;
+        }
+        if (tryWrapping) {
+            // try to map directly. check if the planes are near one another
+            const uint8_t *minPtr = mView.data()[0];
+            const uint8_t *maxPtr = mView.data()[0];
+            int32_t planeSize = 0;
+            for (uint32_t i = 0; i < layout.numPlanes; ++i) {
+                const C2PlaneInfo &plane = layout.planes[i];
+                int64_t planeStride = std::abs(plane.rowInc / plane.colInc);
+                ssize_t minOffset = plane.minOffset(
+                        mWidth / plane.colSampling, mHeight / plane.rowSampling);
+                ssize_t maxOffset = plane.maxOffset(
+                        mWidth / plane.colSampling, mHeight / plane.rowSampling);
+                if (minPtr > mView.data()[i] + minOffset) {
+                    minPtr = mView.data()[i] + minOffset;
+                }
+                if (maxPtr < mView.data()[i] + maxOffset) {
+                    maxPtr = mView.data()[i] + maxOffset;
+                }
+                planeSize += planeStride * divUp(mAllocatedDepth, 8u)
+                        * align(mHeight, 64) / plane.rowSampling;
+            }
+
+            if ((maxPtr - minPtr + 1) <= planeSize) {
+                // FIXME: this is risky as reading/writing data out of bound results
+                //        in an undefined behavior, but gralloc does assume a
+                //        contiguous mapping
+                for (uint32_t i = 0; i < layout.numPlanes; ++i) {
+                    const C2PlaneInfo &plane = layout.planes[i];
+                    mediaImage->mPlane[i].mOffset = mView.data()[i] - minPtr;
+                    mediaImage->mPlane[i].mColInc = plane.colInc;
+                    mediaImage->mPlane[i].mRowInc = plane.rowInc;
+                    mediaImage->mPlane[i].mHorizSubsampling = plane.colSampling;
+                    mediaImage->mPlane[i].mVertSubsampling = plane.rowSampling;
+                }
+                mWrapped = new ABuffer(const_cast<uint8_t *>(minPtr),
+                                       maxPtr - minPtr + 1);
+                ALOGV("Converter: wrapped (capacity=%zu)", mWrapped->capacity());
+            }
         }
         mediaImage->mNumPlanes = layout.numPlanes;
         mediaImage->mWidth = view.crop().width;
@@ -428,12 +529,12 @@
                 return;
             }
             if (plane.allocatedDepth != mAllocatedDepth || plane.bitDepth != bitDepth) {
-                ALOGV("different allocatedDepth/bitDepth per plane unsupported");
+                ALOGD("different allocatedDepth/bitDepth per plane unsupported");
                 mInitCheck = BAD_VALUE;
                 return;
             }
             bufferSize += stride * vStride
-                    / plane.rowSampling / plane.colSampling;
+                    / plane.rowSampling / plane.colSampling * divUp(mAllocatedDepth, 8u);
         }
 
         mBackBufferSize = bufferSize;
@@ -488,7 +589,8 @@
     const C2GraphicView mView;
     uint32_t mWidth;
     uint32_t mHeight;
-    int32_t mColorFormat;  ///< SDK color format for MediaImage
+    int32_t mClientColorFormat;  ///< SDK color format for MediaImage
+    int32_t mComponentColorFormat;  ///< SDK color format from component
     sp<ABuffer> mWrapped;  ///< wrapped buffer (if we can map C2Buffer to an ABuffer)
     uint32_t mAllocatedDepth;
     uint32_t mBackBufferSize;
@@ -517,10 +619,7 @@
         return nullptr;
     }
 
-    int32_t colorFormat = COLOR_FormatYUV420Flexible;
-    (void)format->findInt32("color-format", &colorFormat);
-
-    GraphicView2MediaImageConverter converter(view, colorFormat, false /* copy */);
+    GraphicView2MediaImageConverter converter(view, format, false /* copy */);
     if (converter.initCheck() != OK) {
         ALOGD("Converter init failed: %d", converter.initCheck());
         return nullptr;
@@ -578,7 +677,26 @@
 }
 
 std::shared_ptr<C2Buffer> GraphicMetadataBuffer::asC2Buffer() {
-#ifndef __LP64__
+#ifdef __LP64__
+    static std::once_flag s_checkOnce;
+    static bool s_is64bitOk {true};
+    std::call_once(s_checkOnce, [&](){
+        const std::string abi32list =
+        ::android::base::GetProperty("ro.product.cpu.abilist32", "");
+        if (!abi32list.empty()) {
+            int32_t inputSurfaceSetting =
+            ::android::base::GetIntProperty("debug.stagefright.c2inputsurface", int32_t(0));
+            s_is64bitOk = inputSurfaceSetting != 0;
+        }
+    });
+
+    if (!s_is64bitOk) {
+        ALOGE("GraphicMetadataBuffer does not work in 32+64 system if compiled as 64-bit object"\
+              "when debug.stagefright.c2inputsurface is set to 0");
+        return nullptr;
+    }
+#endif
+
     VideoNativeMetadata *meta = (VideoNativeMetadata *)base();
     ANativeWindowBuffer *buffer = (ANativeWindowBuffer *)meta->pBuffer;
     if (buffer == nullptr) {
@@ -598,6 +716,8 @@
     c2_status_t err = mAlloc->priorGraphicAllocation(handle, &alloc);
     if (err != C2_OK) {
         ALOGD("Failed to wrap VideoNativeMetadata into C2GraphicAllocation");
+        native_handle_close(handle);
+        native_handle_delete(handle);
         return nullptr;
     }
     std::shared_ptr<C2GraphicBlock> block = _C2BlockFactory::CreateGraphicBlock(alloc);
@@ -611,10 +731,6 @@
     }
     return C2Buffer::CreateGraphicBuffer(
             block->share(C2Rect(buffer->width, buffer->height), C2Fence()));
-#else
-    ALOGE("GraphicMetadataBuffer does not work on 64-bit arch");
-    return nullptr;
-#endif
 }
 
 // ConstGraphicBlockBuffer
@@ -634,10 +750,7 @@
             buffer->data().graphicBlocks()[0].map().get()));
     std::unique_ptr<const C2GraphicView> holder;
 
-    int32_t colorFormat = COLOR_FormatYUV420Flexible;
-    (void)format->findInt32("color-format", &colorFormat);
-
-    GraphicView2MediaImageConverter converter(*view, colorFormat, false /* copy */);
+    GraphicView2MediaImageConverter converter(*view, format, false /* copy */);
     if (converter.initCheck() != OK) {
         ALOGD("Converter init failed: %d", converter.initCheck());
         return nullptr;
@@ -729,12 +842,11 @@
         return false;
     }
 
-    int32_t colorFormat = COLOR_FormatYUV420Flexible;
-    // FIXME: format() is not const, but we cannot change it, so do a const cast here
-    const_cast<ConstGraphicBlockBuffer *>(this)->format()->findInt32("color-format", &colorFormat);
-
     GraphicView2MediaImageConverter converter(
-            buffer->data().graphicBlocks()[0].map().get(), colorFormat, true /* copy */);
+            buffer->data().graphicBlocks()[0].map().get(),
+            // FIXME: format() is not const, but we cannot change it, so do a const cast here
+            const_cast<ConstGraphicBlockBuffer *>(this)->format(),
+            true /* copy */);
     if (converter.initCheck() != OK) {
         ALOGD("ConstGraphicBlockBuffer::canCopy: converter init failed: %d", converter.initCheck());
         return false;
@@ -752,11 +864,9 @@
         setRange(0, 0);
         return true;
     }
-    int32_t colorFormat = COLOR_FormatYUV420Flexible;
-    format()->findInt32("color-format", &colorFormat);
 
     GraphicView2MediaImageConverter converter(
-            buffer->data().graphicBlocks()[0].map().get(), colorFormat, true /* copy */);
+            buffer->data().graphicBlocks()[0].map().get(), format(), true /* copy */);
     if (converter.initCheck() != OK) {
         ALOGD("ConstGraphicBlockBuffer::copy: converter init failed: %d", converter.initCheck());
         return false;
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index b112249..7c4bfb6 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -67,7 +67,8 @@
             s.compare(s.size() - suffixLen, suffixLen, suffix) == 0;
 }
 
-void addSupportedProfileLevels(
+// returns true if component advertised supported profile level(s)
+bool addSupportedProfileLevels(
         std::shared_ptr<Codec2Client::Interface> intf,
         MediaCodecInfo::CapabilitiesWriter *caps,
         const Traits& trait, const std::string &mediaType) {
@@ -87,12 +88,12 @@
     c2_status_t err = intf->querySupportedValues(profileQuery, C2_DONT_BLOCK);
     ALOGV("query supported profiles -> %s | %s", asString(err), asString(profileQuery[0].status));
     if (err != C2_OK || profileQuery[0].status != C2_OK) {
-        return;
+        return false;
     }
 
     // we only handle enumerated values
     if (profileQuery[0].values.type != C2FieldSupportedValues::VALUES) {
-        return;
+        return false;
     }
 
     // determine if codec supports HDR
@@ -103,11 +104,16 @@
     c2_status_t err1 = intf->querySupportedParams(&paramDescs);
     if (err1 == C2_OK) {
         for (const std::shared_ptr<C2ParamDescriptor> &desc : paramDescs) {
-            switch ((uint32_t)desc->index()) {
-            case C2StreamHdr10PlusInfo::output::PARAM_TYPE:
+            C2Param::Type type = desc->index();
+            // only consider supported parameters on raw ports
+            if (!(encoder ? type.forInput() : type.forOutput())) {
+                continue;
+            }
+            switch (type.coreIndex()) {
+            case C2StreamHdr10PlusInfo::CORE_INDEX:
                 supportsHdr10Plus = true;
                 break;
-            case C2StreamHdrStaticInfo::output::PARAM_TYPE:
+            case C2StreamHdrStaticInfo::CORE_INDEX:
                 supportsHdr = true;
                 break;
             default:
@@ -120,6 +126,8 @@
     supportsHdr |= (mediaType == MIMETYPE_VIDEO_VP9);
     supportsHdr |= (mediaType == MIMETYPE_VIDEO_AV1);
 
+    bool added = false;
+
     for (C2Value::Primitive profile : profileQuery[0].values.values) {
         pl.profile = (C2Config::profile_t)profile.ref<uint32_t>();
         std::vector<std::unique_ptr<C2SettingResult>> failures;
@@ -160,6 +168,7 @@
         } else if (!mapper) {
             caps->addProfileLevel(pl.profile, pl.level);
         }
+        added = true;
 
         // for H.263 also advertise the second highest level if the
         // codec supports level 45, as level 45 only covers level 10
@@ -183,6 +192,7 @@
             }
         }
     }
+    return added;
 }
 
 void addSupportedColorFormats(
@@ -333,6 +343,59 @@
     // parse default XML files
     parser.parseXmlFilesInSearchDirs();
 
+    // The mainline modules for media may optionally include some codec shaping information.
+    // Based on vendor partition SDK, and the brand/product/device information
+    // (expect to be empty in almost always)
+    //
+    {
+        // get build info so we know what file to search
+        // ro.vendor.build.fingerprint
+        std::string fingerprint = base::GetProperty("ro.vendor.build.fingerprint",
+                                               "brand/product/device:");
+        ALOGV("property_get for ro.vendor.build.fingerprint == '%s'", fingerprint.c_str());
+
+        // ro.vendor.build.version.sdk
+        std::string sdk = base::GetProperty("ro.vendor.build.version.sdk", "0");
+        ALOGV("property_get for ro.vendor.build.version.sdk == '%s'", sdk.c_str());
+
+        std::string brand;
+        std::string product;
+        std::string device;
+        size_t pos1;
+        pos1 = fingerprint.find('/');
+        if (pos1 != std::string::npos) {
+            brand = fingerprint.substr(0, pos1);
+            size_t pos2 = fingerprint.find('/', pos1+1);
+            if (pos2 != std::string::npos) {
+                product = fingerprint.substr(pos1+1, pos2 - pos1 - 1);
+                size_t pos3 = fingerprint.find('/', pos2+1);
+                if (pos3 != std::string::npos) {
+                    device = fingerprint.substr(pos2+1, pos3 - pos2 - 1);
+                    size_t pos4 = device.find(':');
+                    if (pos4 != std::string::npos) {
+                        device.resize(pos4);
+                    }
+                }
+            }
+        }
+
+        ALOGV("parsed: sdk '%s' brand '%s' product '%s' device '%s'",
+            sdk.c_str(), brand.c_str(), product.c_str(), device.c_str());
+
+        std::string base = "/apex/com.android.media/etc/formatshaper";
+
+        // looking in these directories within the apex
+        const std::vector<std::string> modulePathnames = {
+            base + "/" + sdk + "/" + brand + "/" + product + "/" + device,
+            base + "/" + sdk + "/" + brand + "/" + product,
+            base + "/" + sdk + "/" + brand,
+            base + "/" + sdk,
+            base
+        };
+
+        parser.parseXmlFilesInSearchDirs( { "media_codecs_shaping.xml" }, modulePathnames);
+    }
+
     if (parser.getParsingStatus() != OK) {
         ALOGD("XML parser no good");
         return OK;
@@ -546,7 +609,15 @@
                     }
                 }
 
-                addSupportedProfileLevels(intf, caps.get(), trait, mediaType);
+                if (!addSupportedProfileLevels(intf, caps.get(), trait, mediaType)) {
+                    // TODO(b/193279646) This will get fixed in C2InterfaceHelper
+                    // Some components may not advertise supported values if they use a const
+                    // param for profile/level (they support only one profile). For now cover
+                    // only VP8 here until it is fixed.
+                    if (mediaType == MIMETYPE_VIDEO_VP8) {
+                        caps->addProfileLevel(VP8ProfileMain, VP8Level_Version0);
+                    }
+                }
                 addSupportedColorFormats(intf, caps.get(), trait, mediaType);
             }
         }
diff --git a/media/codec2/sfplugin/FrameReassembler.cpp b/media/codec2/sfplugin/FrameReassembler.cpp
new file mode 100644
index 0000000..af054c7
--- /dev/null
+++ b/media/codec2/sfplugin/FrameReassembler.cpp
@@ -0,0 +1,228 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "FrameReassembler"
+
+#include <log/log.h>
+
+#include <media/stagefright/foundation/AMessage.h>
+
+#include "FrameReassembler.h"
+
+namespace android {
+
+static constexpr uint64_t kToleranceUs = 1000;  // 1ms
+
+FrameReassembler::FrameReassembler()
+    : mUsage{0, 0},
+      mSampleRate(0u),
+      mChannelCount(0u),
+      mEncoding(C2Config::PCM_16),
+      mCurrentOrdinal({0, 0, 0}) {
+}
+
+void FrameReassembler::init(
+        const std::shared_ptr<C2BlockPool> &pool,
+        C2MemoryUsage usage,
+        uint32_t frameSize,
+        uint32_t sampleRate,
+        uint32_t channelCount,
+        C2Config::pcm_encoding_t encoding) {
+    mBlockPool = pool;
+    mUsage = usage;
+    mFrameSize = frameSize;
+    mSampleRate = sampleRate;
+    mChannelCount = channelCount;
+    mEncoding = encoding;
+}
+
+void FrameReassembler::updateFrameSize(uint32_t frameSize) {
+    finishCurrentBlock(&mPendingWork);
+    mFrameSize = frameSize;
+}
+
+void FrameReassembler::updateSampleRate(uint32_t sampleRate) {
+    finishCurrentBlock(&mPendingWork);
+    mSampleRate = sampleRate;
+}
+
+void FrameReassembler::updateChannelCount(uint32_t channelCount) {
+    finishCurrentBlock(&mPendingWork);
+    mChannelCount = channelCount;
+}
+
+void FrameReassembler::updatePcmEncoding(C2Config::pcm_encoding_t encoding) {
+    finishCurrentBlock(&mPendingWork);
+    mEncoding = encoding;
+}
+
+void FrameReassembler::reset() {
+    flush();
+    mCurrentOrdinal = {0, 0, 0};
+    mBlockPool.reset();
+    mFrameSize.reset();
+    mSampleRate = 0u;
+    mChannelCount = 0u;
+    mEncoding = C2Config::PCM_16;
+}
+
+FrameReassembler::operator bool() const {
+    return mFrameSize.has_value();
+}
+
+c2_status_t FrameReassembler::process(
+        const sp<MediaCodecBuffer> &buffer,
+        std::list<std::unique_ptr<C2Work>> *items) {
+    int64_t timeUs;
+    if (buffer->size() == 0u
+            || !buffer->meta()->findInt64("timeUs", &timeUs)) {
+        return C2_BAD_VALUE;
+    }
+
+    items->splice(items->end(), mPendingWork);
+
+    // Fill mCurrentBlock
+    if (mCurrentBlock) {
+        // First check the timestamp
+        c2_cntr64_t endTimestampUs = mCurrentOrdinal.timestamp;
+        endTimestampUs += bytesToSamples(mWriteView->size()) * 1000000 / mSampleRate;
+        if (timeUs < endTimestampUs.peek()) {
+            uint64_t diffUs = (endTimestampUs - timeUs).peeku();
+            if (diffUs > kToleranceUs) {
+                // The timestamp is going back in time in large amount.
+                // TODO: b/145702136
+                ALOGW("timestamp going back in time! from %lld to %lld",
+                        endTimestampUs.peekll(), (long long)timeUs);
+            }
+        } else {  // timeUs >= endTimestampUs.peek()
+            uint64_t diffUs = (timeUs - endTimestampUs).peeku();
+            if (diffUs > kToleranceUs) {
+                // The timestamp is going forward; add silence as necessary.
+                size_t gapSamples = usToSamples(diffUs);
+                size_t remainingSamples =
+                    (mWriteView->capacity() - mWriteView->size())
+                    / mChannelCount / bytesPerSample();
+                if (gapSamples < remainingSamples) {
+                    size_t gapBytes = gapSamples * mChannelCount * bytesPerSample();
+                    memset(mWriteView->base() + mWriteView->size(), 0u, gapBytes);
+                    mWriteView->setSize(mWriteView->size() + gapBytes);
+                } else {
+                    finishCurrentBlock(items);
+                }
+            }
+        }
+    }
+
+    if (mCurrentBlock) {
+        // Append the data at the end of the current block
+        size_t copySize = std::min(
+                buffer->size(),
+                size_t(mWriteView->capacity() - mWriteView->size()));
+        memcpy(mWriteView->base() + mWriteView->size(), buffer->data(), copySize);
+        buffer->setRange(buffer->offset() + copySize, buffer->size() - copySize);
+        mWriteView->setSize(mWriteView->size() + copySize);
+        if (mWriteView->size() == mWriteView->capacity()) {
+            finishCurrentBlock(items);
+        }
+        timeUs += bytesToSamples(copySize) * 1000000 / mSampleRate;
+    }
+
+    if (buffer->size() > 0) {
+        mCurrentOrdinal.timestamp = timeUs;
+        mCurrentOrdinal.customOrdinal = timeUs;
+    }
+
+    size_t frameSizeBytes = mFrameSize.value() * mChannelCount * bytesPerSample();
+    while (buffer->size() > 0) {
+        LOG_ALWAYS_FATAL_IF(
+                mCurrentBlock,
+                "There's remaining data but the pending block is not filled & finished");
+        std::unique_ptr<C2Work> work(new C2Work);
+        c2_status_t err = mBlockPool->fetchLinearBlock(frameSizeBytes, mUsage, &mCurrentBlock);
+        if (err != C2_OK) {
+            return err;
+        }
+        size_t copySize = std::min(buffer->size(), frameSizeBytes);
+        mWriteView = mCurrentBlock->map().get();
+        if (mWriteView->error() != C2_OK) {
+            return mWriteView->error();
+        }
+        ALOGV("buffer={offset=%zu size=%zu} copySize=%zu",
+                buffer->offset(), buffer->size(), copySize);
+        memcpy(mWriteView->base(), buffer->data(), copySize);
+        mWriteView->setOffset(0u);
+        mWriteView->setSize(copySize);
+        buffer->setRange(buffer->offset() + copySize, buffer->size() - copySize);
+        if (copySize == frameSizeBytes) {
+            finishCurrentBlock(items);
+        }
+    }
+
+    int32_t eos = 0;
+    if (buffer->meta()->findInt32("eos", &eos) && eos) {
+        finishCurrentBlock(items);
+    }
+
+    return C2_OK;
+}
+
+void FrameReassembler::flush() {
+    mPendingWork.clear();
+    mWriteView.reset();
+    mCurrentBlock.reset();
+}
+
+uint64_t FrameReassembler::bytesToSamples(size_t numBytes) const {
+    return numBytes / mChannelCount / bytesPerSample();
+}
+
+size_t FrameReassembler::usToSamples(uint64_t us) const {
+    return (us * mChannelCount * mSampleRate / 1000000);
+}
+
+uint32_t FrameReassembler::bytesPerSample() const {
+    return (mEncoding == C2Config::PCM_8) ? 1
+         : (mEncoding == C2Config::PCM_16) ? 2
+         : (mEncoding == C2Config::PCM_FLOAT) ? 4 : 0;
+}
+
+void FrameReassembler::finishCurrentBlock(std::list<std::unique_ptr<C2Work>> *items) {
+    if (!mCurrentBlock) {
+        // No-op
+        return;
+    }
+    if (mWriteView->size() < mWriteView->capacity()) {
+        memset(mWriteView->base() + mWriteView->size(), 0u,
+                mWriteView->capacity() - mWriteView->size());
+        mWriteView->setSize(mWriteView->capacity());
+    }
+    std::unique_ptr<C2Work> work{std::make_unique<C2Work>()};
+    work->input.ordinal = mCurrentOrdinal;
+    work->input.buffers.push_back(C2Buffer::CreateLinearBuffer(
+            mCurrentBlock->share(0, mCurrentBlock->capacity(), C2Fence())));
+    work->worklets.clear();
+    work->worklets.emplace_back(new C2Worklet);
+    items->push_back(std::move(work));
+
+    ++mCurrentOrdinal.frameIndex;
+    mCurrentOrdinal.timestamp += mFrameSize.value() * 1000000 / mSampleRate;
+    mCurrentOrdinal.customOrdinal = mCurrentOrdinal.timestamp;
+    mCurrentBlock.reset();
+    mWriteView.reset();
+}
+
+}  // namespace android
diff --git a/media/codec2/sfplugin/FrameReassembler.h b/media/codec2/sfplugin/FrameReassembler.h
new file mode 100644
index 0000000..17ac06d
--- /dev/null
+++ b/media/codec2/sfplugin/FrameReassembler.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAME_REASSEMBLER_H_
+#define FRAME_REASSEMBLER_H_
+
+#include <set>
+#include <memory>
+
+#include <media/MediaCodecBuffer.h>
+
+#include <C2Config.h>
+#include <C2Work.h>
+
+namespace android {
+
+class FrameReassembler {
+public:
+    FrameReassembler();
+
+    void init(
+            const std::shared_ptr<C2BlockPool> &pool,
+            C2MemoryUsage usage,
+            uint32_t frameSize,
+            uint32_t sampleRate,
+            uint32_t channelCount,
+            C2Config::pcm_encoding_t encoding);
+    void updateFrameSize(uint32_t frameSize);
+    void updateSampleRate(uint32_t sampleRate);
+    void updateChannelCount(uint32_t channelCount);
+    void updatePcmEncoding(C2Config::pcm_encoding_t encoding);
+    void reset();
+    void flush();
+
+    explicit operator bool() const;
+
+    c2_status_t process(
+            const sp<MediaCodecBuffer> &buffer,
+            std::list<std::unique_ptr<C2Work>> *items);
+
+private:
+    std::shared_ptr<C2BlockPool> mBlockPool;
+    C2MemoryUsage mUsage;
+    std::optional<uint32_t> mFrameSize;
+    uint32_t mSampleRate;
+    uint32_t mChannelCount;
+    C2Config::pcm_encoding_t mEncoding;
+    std::list<std::unique_ptr<C2Work>> mPendingWork;
+    C2WorkOrdinalStruct mCurrentOrdinal;
+    std::shared_ptr<C2LinearBlock> mCurrentBlock;
+    std::optional<C2WriteView> mWriteView;
+
+    uint64_t bytesToSamples(size_t numBytes) const;
+    size_t usToSamples(uint64_t us) const;
+    uint32_t bytesPerSample() const;
+
+    void finishCurrentBlock(std::list<std::unique_ptr<C2Work>> *items);
+};
+
+}  // namespace android
+
+#endif  // FRAME_REASSEMBLER_H_
diff --git a/media/codec2/sfplugin/InputSurfaceWrapper.h b/media/codec2/sfplugin/InputSurfaceWrapper.h
index bb35763..44ba78a 100644
--- a/media/codec2/sfplugin/InputSurfaceWrapper.h
+++ b/media/codec2/sfplugin/InputSurfaceWrapper.h
@@ -61,24 +61,25 @@
     /// Input Surface configuration
     struct Config {
         // IN PARAMS (GBS)
-        float mMinFps; // minimum fps (repeat frame to achieve this)
-        float mMaxFps; // max fps (via frame drop)
-        float mCaptureFps; // capture fps
-        float mCodedFps;   // coded fps
-        bool mSuspended; // suspended
-        int64_t mTimeOffsetUs; // time offset (input => codec)
-        int64_t mSuspendAtUs; // suspend/resume time
-        int64_t mStartAtUs; // start time
-        bool mStopped; // stopped
-        int64_t mStopAtUs; // stop time
+        float mMinFps = 0.0; // minimum fps (repeat frame to achieve this)
+        float mMaxFps = 0.0; // max fps (via frame drop)
+        float mCaptureFps = 0.0; // capture fps
+        float mCodedFps = 0.0;   // coded fps
+        bool mSuspended = false; // suspended
+        int64_t mTimeOffsetUs = 0; // time offset (input => codec)
+        int64_t mSuspendAtUs = 0; // suspend/resume time
+        int64_t mStartAtUs = 0; // start time
+        bool mStopped = false; // stopped
+        int64_t mStopAtUs = 0; // stop time
 
         // OUT PARAMS (GBS)
-        int64_t mInputDelayUs; // delay between encoder input and surface input
+        int64_t mInputDelayUs = 0; // delay between encoder input and surface input
 
         // IN PARAMS (CODEC WRAPPER)
-        float mFixedAdjustedFps; // fixed fps via PTS manipulation
-        float mMinAdjustedFps; // minimum fps via PTS manipulation
-        uint64_t mUsage; // consumer usage
+        float mFixedAdjustedFps = 0.0; // fixed fps via PTS manipulation
+        float mMinAdjustedFps = 0.0; // minimum fps via PTS manipulation
+        uint64_t mUsage = 0; // consumer usage
+        int mPriority = INT_MAX; // priority of queue thread (if any); INT_MAX for no-op
     };
 
     /**
@@ -106,6 +107,11 @@
      */
     virtual void onInputBufferDone(c2_cntr64_t /* index */) {}
 
+    /**
+     * Returns dataspace information from GraphicBufferSource.
+     */
+    virtual android_dataspace getDataspace() { return mDataSpace; }
+
 protected:
     android_dataspace mDataSpace;
 };
diff --git a/media/codec2/sfplugin/PipelineWatcher.cpp b/media/codec2/sfplugin/PipelineWatcher.cpp
index 0ee9056..bc9197c 100644
--- a/media/codec2/sfplugin/PipelineWatcher.cpp
+++ b/media/codec2/sfplugin/PipelineWatcher.cpp
@@ -95,6 +95,7 @@
 }
 
 void PipelineWatcher::flush() {
+    ALOGV("flush");
     mFramesInPipeline.clear();
 }
 
diff --git a/media/codec2/sfplugin/ReflectedParamUpdater.cpp b/media/codec2/sfplugin/ReflectedParamUpdater.cpp
index f39051b..d14b9b0 100644
--- a/media/codec2/sfplugin/ReflectedParamUpdater.cpp
+++ b/media/codec2/sfplugin/ReflectedParamUpdater.cpp
@@ -288,6 +288,20 @@
     }
 }
 
+C2FieldDescriptor::type_t ReflectedParamUpdater::getTypeForKey(
+        const std::string &key) const {
+    auto it = mMap.find(key);
+    if (it == mMap.end()) {
+        return C2FieldDescriptor::type_t(~0);
+    }
+
+    if (it->second.fieldDesc) {
+        return it->second.fieldDesc->type();
+    }
+    // whole param is exposed as a blob
+    return C2FieldDescriptor::BLOB;
+}
+
 void ReflectedParamUpdater::updateParamsFromMessage(
         const Dict &params,
         std::vector<std::unique_ptr<C2Param>> *vec /* nonnull */) const {
diff --git a/media/codec2/sfplugin/ReflectedParamUpdater.h b/media/codec2/sfplugin/ReflectedParamUpdater.h
index 752c7e4..6dcf2a3 100644
--- a/media/codec2/sfplugin/ReflectedParamUpdater.h
+++ b/media/codec2/sfplugin/ReflectedParamUpdater.h
@@ -176,6 +176,14 @@
             std::vector<std::string> *keys /* nonnull */) const;
 
     /**
+     * Get field type for the given name
+     *
+     * \param key[in]   field name
+     * \return type of the field, or type_t(~0) if not found.
+     */
+    C2FieldDescriptor::type_t getTypeForKey(const std::string &name) const;
+
+    /**
      * Update C2Param objects from field name and value in AMessage object.
      *
      * \param params[in]    Dict object with field name to value pairs.
diff --git a/media/codec2/sfplugin/TEST_MAPPING b/media/codec2/sfplugin/TEST_MAPPING
new file mode 100644
index 0000000..045e5b5
--- /dev/null
+++ b/media/codec2/sfplugin/TEST_MAPPING
@@ -0,0 +1,12 @@
+// mappings for frameworks/av/media/codec2/sfplugin
+{
+  "presubmit": [
+    // failing 1 of 11
+    // TODO(b/156167471)
+    // { "name": "ccodec_unit_test" },
+
+    // failing 4 of 17, around max-input-size defaults & overrides
+    // TODO(b/156167471)
+    //{ "name": "mc_sanity_test"}
+  ]
+}
diff --git a/media/codec2/sfplugin/include/media/stagefright/CCodec.h b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
index ecb2506..ec18128 100644
--- a/media/codec2/sfplugin/include/media/stagefright/CCodec.h
+++ b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
@@ -65,6 +65,12 @@
     virtual void signalEndOfInputStream() override;
     virtual void signalRequestIDRFrame() override;
 
+    virtual status_t querySupportedParameters(std::vector<std::string> *names) override;
+    virtual status_t describeParameter(
+            const std::string &name, CodecParameterDescriptor *desc) override;
+    virtual status_t subscribeToParameters(const std::vector<std::string> &names) override;
+    virtual status_t unsubscribeFromParameters(const std::vector<std::string> &names) override;
+
     void initiateReleaseIfStuck();
     void onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems);
     void onInputBufferDone(uint64_t frameIndex, size_t arrayIndex);
@@ -126,6 +132,11 @@
             const std::chrono::milliseconds &timeout,
             const char *name);
 
+    status_t configureTunneledVideoPlayback(
+            const std::shared_ptr<Codec2Client::Component> comp,
+            sp<NativeHandle> *sidebandHandle,
+            const sp<AMessage> &msg);
+
     enum {
         kWhatAllocate,
         kWhatConfigure,
@@ -193,7 +204,6 @@
 
     Mutexed<std::unique_ptr<CCodecConfig>> mConfig;
     Mutexed<std::list<std::unique_ptr<C2Work>>> mWorkDoneQueue;
-    std::atomic_flag mSentConfigAfterResume;
 
     friend class CCodecCallbackImpl;
 
diff --git a/media/codec2/sfplugin/tests/Android.bp b/media/codec2/sfplugin/tests/Android.bp
index 8d1a9c3..92f3754 100644
--- a/media/codec2/sfplugin/tests/Android.bp
+++ b/media/codec2/sfplugin/tests/Android.bp
@@ -1,9 +1,20 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "ccodec_unit_test",
+    test_suites: ["device-tests"],
 
     srcs: [
         "CCodecBuffers_test.cpp",
         "CCodecConfig_test.cpp",
+        "FrameReassembler_test.cpp",
         "ReflectedParamUpdater_test.cpp",
     ],
 
@@ -43,6 +54,7 @@
 
 cc_test {
     name: "mc_sanity_test",
+    test_suites: ["device-tests"],
 
     srcs: [
         "MediaCodec_sanity_test.cpp",
diff --git a/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp b/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp
index 5bee605..41e4fff 100644
--- a/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp
@@ -18,22 +18,32 @@
 
 #include <gtest/gtest.h>
 
+#include <codec2/hidl/client.h>
 #include <media/stagefright/MediaCodecConstants.h>
 
+#include <C2BlockInternal.h>
 #include <C2PlatformSupport.h>
+#include <Codec2Mapper.h>
 
 namespace android {
 
+static std::shared_ptr<RawGraphicOutputBuffers> GetRawGraphicOutputBuffers(
+        int32_t width, int32_t height) {
+    std::shared_ptr<RawGraphicOutputBuffers> buffers =
+        std::make_shared<RawGraphicOutputBuffers>("test");
+    sp<AMessage> format{new AMessage};
+    format->setInt32(KEY_WIDTH, width);
+    format->setInt32(KEY_HEIGHT, height);
+    buffers->setFormat(format);
+    return buffers;
+}
+
 TEST(RawGraphicOutputBuffersTest, ChangeNumSlots) {
     constexpr int32_t kWidth = 3840;
     constexpr int32_t kHeight = 2160;
 
     std::shared_ptr<RawGraphicOutputBuffers> buffers =
-        std::make_shared<RawGraphicOutputBuffers>("test");
-    sp<AMessage> format{new AMessage};
-    format->setInt32("width", kWidth);
-    format->setInt32("height", kHeight);
-    buffers->setFormat(format);
+        GetRawGraphicOutputBuffers(kWidth, kHeight);
 
     std::shared_ptr<C2BlockPool> pool;
     ASSERT_EQ(OK, GetCodec2BlockPool(C2BlockPool::BASIC_GRAPHIC, nullptr, &pool));
@@ -96,4 +106,759 @@
     }
 }
 
+TEST(RawGraphicOutputBuffersTest, WrapNullBuffer) {
+    constexpr int32_t kWidth = 320;
+    constexpr int32_t kHeight = 240;
+
+    std::shared_ptr<RawGraphicOutputBuffers> buffers =
+        GetRawGraphicOutputBuffers(kWidth, kHeight);
+
+    sp<Codec2Buffer> buffer = buffers->wrap(nullptr);
+    ASSERT_EQ(nullptr, buffer->base());
+    ASSERT_EQ(0, buffer->size());
+    ASSERT_EQ(0, buffer->offset());
+}
+
+TEST(RawGraphicOutputBuffersTest, FlexYuvColorFormat) {
+    constexpr int32_t kWidth = 320;
+    constexpr int32_t kHeight = 240;
+
+    std::vector<uint32_t> flexPixelFormats({HAL_PIXEL_FORMAT_YCbCr_420_888});
+    std::shared_ptr<Codec2Client> client = Codec2Client::CreateFromService("default");
+    if (client) {
+        // Query vendor format for Flexible YUV
+        std::vector<std::unique_ptr<C2Param>> heapParams;
+        C2StoreFlexiblePixelFormatDescriptorsInfo *pixelFormatInfo = nullptr;
+        if (client->query(
+                    {},
+                    {C2StoreFlexiblePixelFormatDescriptorsInfo::PARAM_TYPE},
+                    C2_MAY_BLOCK,
+                    &heapParams) == C2_OK
+                && heapParams.size() == 1u) {
+            pixelFormatInfo = C2StoreFlexiblePixelFormatDescriptorsInfo::From(
+                    heapParams[0].get());
+        } else {
+            pixelFormatInfo = nullptr;
+        }
+        if (pixelFormatInfo && *pixelFormatInfo) {
+            for (size_t i = 0; i < pixelFormatInfo->flexCount(); ++i) {
+                const C2FlexiblePixelFormatDescriptorStruct &desc =
+                    pixelFormatInfo->m.values[i];
+                if (desc.bitDepth != 8
+                        || desc.subsampling != C2Color::YUV_420
+                        // TODO(b/180076105): some devices report wrong layouts
+                        // || desc.layout == C2Color::INTERLEAVED_PACKED
+                        // || desc.layout == C2Color::INTERLEAVED_ALIGNED
+                        || desc.layout == C2Color::UNKNOWN_LAYOUT) {
+                    continue;
+                }
+                flexPixelFormats.push_back(desc.pixelFormat);
+            }
+        }
+    }
+
+    for (uint32_t pixelFormat : flexPixelFormats) {
+        std::shared_ptr<RawGraphicOutputBuffers> buffers =
+            std::make_shared<RawGraphicOutputBuffers>(
+                    AStringPrintf("test pixel format 0x%x", pixelFormat).c_str());
+
+        sp<AMessage> format{new AMessage};
+        format->setInt32(KEY_WIDTH, kWidth);
+        format->setInt32(KEY_HEIGHT, kHeight);
+        format->setInt32(KEY_COLOR_FORMAT, COLOR_FormatYUV420Flexible);
+        int32_t fwkPixelFormat = 0;
+        if (C2Mapper::mapPixelFormatCodecToFramework(pixelFormat, &fwkPixelFormat)) {
+            format->setInt32("android._color-format", fwkPixelFormat);
+        }
+        buffers->setFormat(format);
+
+        std::shared_ptr<C2BlockPool> pool;
+        ASSERT_EQ(OK, GetCodec2BlockPool(C2BlockPool::BASIC_GRAPHIC, nullptr, &pool));
+
+        std::shared_ptr<C2GraphicBlock> block;
+        ASSERT_EQ(OK, pool->fetchGraphicBlock(
+                kWidth, kHeight, pixelFormat,
+                C2MemoryUsage{C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE}, &block));
+
+        {
+            C2GraphicView view = block->map().get();
+            C2PlanarLayout layout = view.layout();
+
+            // Verify the block is in YUV420 format
+            ASSERT_EQ(C2PlanarLayout::TYPE_YUV, layout.type);
+            ASSERT_EQ(3u, layout.numPlanes);
+            const C2PlaneInfo& yPlane = layout.planes[C2PlanarLayout::PLANE_Y];
+            const C2PlaneInfo& uPlane = layout.planes[C2PlanarLayout::PLANE_U];
+            const C2PlaneInfo& vPlane = layout.planes[C2PlanarLayout::PLANE_V];
+
+            // Y plane
+            ASSERT_EQ(1u, yPlane.colSampling);
+            ASSERT_EQ(1u, yPlane.rowSampling);
+            ASSERT_EQ(8u, yPlane.allocatedDepth);
+            ASSERT_EQ(8u, yPlane.bitDepth);
+            ASSERT_EQ(0u, yPlane.rightShift);
+
+            // U plane
+            ASSERT_EQ(2u, uPlane.colSampling);
+            ASSERT_EQ(2u, uPlane.rowSampling);
+            ASSERT_EQ(8u, uPlane.allocatedDepth);
+            ASSERT_EQ(8u, uPlane.bitDepth);
+            ASSERT_EQ(0u, uPlane.rightShift);
+
+            // V plane
+            ASSERT_EQ(2u, vPlane.colSampling);
+            ASSERT_EQ(2u, vPlane.rowSampling);
+            ASSERT_EQ(8u, vPlane.allocatedDepth);
+            ASSERT_EQ(8u, vPlane.bitDepth);
+            ASSERT_EQ(0u, vPlane.rightShift);
+
+            uint8_t *yRowPtr = view.data()[C2PlanarLayout::PLANE_Y];
+            uint8_t *uRowPtr = view.data()[C2PlanarLayout::PLANE_U];
+            uint8_t *vRowPtr = view.data()[C2PlanarLayout::PLANE_V];
+            for (int32_t row = 0; row < kHeight; ++row) {
+                uint8_t *yPtr = yRowPtr;
+                uint8_t *uPtr = uRowPtr;
+                uint8_t *vPtr = vRowPtr;
+                for (int32_t col = 0; col < kWidth; ++col) {
+                    *yPtr = ((row + col) & 0xFF);
+                    yPtr += yPlane.colInc;
+
+                    if (row < kHeight / 2 && col < kWidth / 2) {
+                        *uPtr = ((row + col + 1) & 0xFF);
+                        *vPtr = ((row + col + 2) & 0xFF);
+                        uPtr += uPlane.colInc;
+                        vPtr += vPlane.colInc;
+                    }
+                }
+                yRowPtr += yPlane.rowInc;
+                if (row < kHeight / 2) {
+                    uRowPtr += uPlane.rowInc;
+                    vRowPtr += vPlane.rowInc;
+                }
+            }
+        }
+
+        std::shared_ptr<C2Buffer> c2Buffer = C2Buffer::CreateGraphicBuffer(block->share(
+                block->crop(), C2Fence{}));
+        size_t index;
+        sp<MediaCodecBuffer> clientBuffer;
+        ASSERT_EQ(OK, buffers->registerBuffer(c2Buffer, &index, &clientBuffer));
+        ASSERT_NE(nullptr, clientBuffer);
+        sp<ABuffer> imageData;
+        ASSERT_TRUE(clientBuffer->format()->findBuffer("image-data", &imageData));
+        MediaImage2 *img = (MediaImage2 *)imageData->data();
+        ASSERT_EQ(MediaImage2::MEDIA_IMAGE_TYPE_YUV, img->mType);
+        ASSERT_EQ(3u, img->mNumPlanes);
+        ASSERT_EQ(kWidth, img->mWidth);
+        ASSERT_EQ(kHeight, img->mHeight);
+        ASSERT_EQ(8u, img->mBitDepth);
+        ASSERT_EQ(8u, img->mBitDepthAllocated);
+        const MediaImage2::PlaneInfo &yPlane = img->mPlane[MediaImage2::Y];
+        const MediaImage2::PlaneInfo &uPlane = img->mPlane[MediaImage2::U];
+        const MediaImage2::PlaneInfo &vPlane = img->mPlane[MediaImage2::V];
+        ASSERT_EQ(1u, yPlane.mHorizSubsampling);
+        ASSERT_EQ(1u, yPlane.mVertSubsampling);
+        ASSERT_EQ(2u, uPlane.mHorizSubsampling);
+        ASSERT_EQ(2u, uPlane.mVertSubsampling);
+        ASSERT_EQ(2u, vPlane.mHorizSubsampling);
+        ASSERT_EQ(2u, vPlane.mVertSubsampling);
+
+        uint8_t *yRowPtr = clientBuffer->data() + yPlane.mOffset;
+        uint8_t *uRowPtr = clientBuffer->data() + uPlane.mOffset;
+        uint8_t *vRowPtr = clientBuffer->data() + vPlane.mOffset;
+        for (int32_t row = 0; row < kHeight; ++row) {
+            uint8_t *yPtr = yRowPtr;
+            uint8_t *uPtr = uRowPtr;
+            uint8_t *vPtr = vRowPtr;
+            for (int32_t col = 0; col < kWidth; ++col) {
+                ASSERT_EQ((row + col) & 0xFF, *yPtr);
+                yPtr += yPlane.mColInc;
+                if (row < kHeight / 2 && col < kWidth / 2) {
+                    ASSERT_EQ((row + col + 1) & 0xFF, *uPtr);
+                    ASSERT_EQ((row + col + 2) & 0xFF, *vPtr);
+                    uPtr += uPlane.mColInc;
+                    vPtr += vPlane.mColInc;
+                }
+            }
+            yRowPtr += yPlane.mRowInc;
+            if (row < kHeight / 2) {
+                uRowPtr += uPlane.mRowInc;
+                vRowPtr += vPlane.mRowInc;
+            }
+        }
+    }
+}
+
+TEST(RawGraphicOutputBuffersTest, P010ColorFormat) {
+    constexpr int32_t kWidth = 320;
+    constexpr int32_t kHeight = 240;
+
+    std::shared_ptr<RawGraphicOutputBuffers> buffers =
+        std::make_shared<RawGraphicOutputBuffers>("test P010");
+
+    sp<AMessage> format{new AMessage};
+    format->setInt32(KEY_WIDTH, kWidth);
+    format->setInt32(KEY_HEIGHT, kHeight);
+    format->setInt32(KEY_COLOR_FORMAT, COLOR_FormatYUVP010);
+    int32_t fwkPixelFormat = 0;
+    if (C2Mapper::mapPixelFormatCodecToFramework(HAL_PIXEL_FORMAT_YCBCR_P010, &fwkPixelFormat)) {
+        format->setInt32("android._color-format", fwkPixelFormat);
+    }
+    buffers->setFormat(format);
+
+    std::shared_ptr<C2BlockPool> pool;
+    ASSERT_EQ(OK, GetCodec2BlockPool(C2BlockPool::BASIC_GRAPHIC, nullptr, &pool));
+
+    std::shared_ptr<C2GraphicBlock> block;
+    c2_status_t err = pool->fetchGraphicBlock(
+            kWidth, kHeight, HAL_PIXEL_FORMAT_YCBCR_P010,
+            C2MemoryUsage{C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE}, &block);
+    if (err != C2_OK) {
+        GTEST_SKIP();
+    }
+
+    {
+        C2GraphicView view = block->map().get();
+        C2PlanarLayout layout = view.layout();
+
+        // Verify the block is in YUV420 format
+        ASSERT_EQ(C2PlanarLayout::TYPE_YUV, layout.type);
+        ASSERT_EQ(3u, layout.numPlanes);
+        const C2PlaneInfo& yPlane = layout.planes[C2PlanarLayout::PLANE_Y];
+        const C2PlaneInfo& uPlane = layout.planes[C2PlanarLayout::PLANE_U];
+        const C2PlaneInfo& vPlane = layout.planes[C2PlanarLayout::PLANE_V];
+
+        // Y plane
+        ASSERT_EQ(1u, yPlane.colSampling);
+        ASSERT_EQ(1u, yPlane.rowSampling);
+        ASSERT_EQ(16u, yPlane.allocatedDepth);
+        ASSERT_EQ(10u, yPlane.bitDepth);
+        ASSERT_EQ(6u, yPlane.rightShift);
+
+        // U plane
+        ASSERT_EQ(2u, uPlane.colSampling);
+        ASSERT_EQ(2u, uPlane.rowSampling);
+        ASSERT_EQ(16u, uPlane.allocatedDepth);
+        ASSERT_EQ(10u, uPlane.bitDepth);
+        ASSERT_EQ(6u, uPlane.rightShift);
+
+        // V plane
+        ASSERT_EQ(2u, vPlane.colSampling);
+        ASSERT_EQ(2u, vPlane.rowSampling);
+        ASSERT_EQ(16u, vPlane.allocatedDepth);
+        ASSERT_EQ(10u, vPlane.bitDepth);
+        ASSERT_EQ(6u, vPlane.rightShift);
+
+        uint8_t *yRowPtr = view.data()[C2PlanarLayout::PLANE_Y];
+        uint8_t *uRowPtr = view.data()[C2PlanarLayout::PLANE_U];
+        uint8_t *vRowPtr = view.data()[C2PlanarLayout::PLANE_V];
+        for (int32_t row = 0; row < kHeight; ++row) {
+            uint8_t *yPtr = yRowPtr;
+            uint8_t *uPtr = uRowPtr;
+            uint8_t *vPtr = vRowPtr;
+            for (int32_t col = 0; col < kWidth; ++col) {
+                yPtr[0] = ((row + col) & 0x3) << 6;
+                yPtr[1] = ((row + col) & 0x3FC) >> 2;
+                yPtr += yPlane.colInc;
+
+                if (row < kHeight / 2 && col < kWidth / 2) {
+                    uPtr[0] = ((row + col + 1) & 0x3) << 6;
+                    uPtr[1] = ((row + col + 1) & 0x3FC) >> 2;
+                    vPtr[0] = ((row + col + 2) & 0x3) << 6;
+                    vPtr[1] = ((row + col + 2) & 0x3FC) >> 2;
+                    uPtr += uPlane.colInc;
+                    vPtr += vPlane.colInc;
+                }
+            }
+            yRowPtr += yPlane.rowInc;
+            if (row < kHeight / 2) {
+                uRowPtr += uPlane.rowInc;
+                vRowPtr += vPlane.rowInc;
+            }
+        }
+    }
+
+    std::shared_ptr<C2Buffer> c2Buffer = C2Buffer::CreateGraphicBuffer(block->share(
+            block->crop(), C2Fence{}));
+    size_t index;
+    sp<MediaCodecBuffer> clientBuffer;
+    ASSERT_EQ(OK, buffers->registerBuffer(c2Buffer, &index, &clientBuffer));
+    ASSERT_NE(nullptr, clientBuffer);
+    sp<ABuffer> imageData;
+    ASSERT_TRUE(clientBuffer->format()->findBuffer("image-data", &imageData));
+    MediaImage2 *img = (MediaImage2 *)imageData->data();
+    ASSERT_EQ(MediaImage2::MEDIA_IMAGE_TYPE_YUV, img->mType);
+    ASSERT_EQ(3u, img->mNumPlanes);
+    ASSERT_EQ(kWidth, img->mWidth);
+    ASSERT_EQ(kHeight, img->mHeight);
+    ASSERT_EQ(10u, img->mBitDepth);
+    ASSERT_EQ(16u, img->mBitDepthAllocated);
+    const MediaImage2::PlaneInfo &yPlane = img->mPlane[MediaImage2::Y];
+    const MediaImage2::PlaneInfo &uPlane = img->mPlane[MediaImage2::U];
+    const MediaImage2::PlaneInfo &vPlane = img->mPlane[MediaImage2::V];
+    ASSERT_EQ(1u, yPlane.mHorizSubsampling);
+    ASSERT_EQ(1u, yPlane.mVertSubsampling);
+    ASSERT_EQ(2u, uPlane.mHorizSubsampling);
+    ASSERT_EQ(2u, uPlane.mVertSubsampling);
+    ASSERT_EQ(2u, vPlane.mHorizSubsampling);
+    ASSERT_EQ(2u, vPlane.mVertSubsampling);
+
+    uint8_t *yRowPtr = clientBuffer->data() + yPlane.mOffset;
+    uint8_t *uRowPtr = clientBuffer->data() + uPlane.mOffset;
+    uint8_t *vRowPtr = clientBuffer->data() + vPlane.mOffset;
+    for (int32_t row = 0; row < kHeight; ++row) {
+        uint8_t *yPtr = yRowPtr;
+        uint8_t *uPtr = uRowPtr;
+        uint8_t *vPtr = vRowPtr;
+        for (int32_t col = 0; col < kWidth; ++col) {
+            ASSERT_EQ(((row + col) & 0x3) << 6, yPtr[0]);
+            ASSERT_EQ(((row + col) & 0x3FC) >> 2, yPtr[1]);
+            yPtr += yPlane.mColInc;
+            if (row < kHeight / 2 && col < kWidth / 2) {
+                ASSERT_EQ(((row + col + 1) & 0x3) << 6, uPtr[0]);
+                ASSERT_EQ(((row + col + 1) & 0x3FC) >> 2, uPtr[1]);
+                ASSERT_EQ(((row + col + 2) & 0x3) << 6, vPtr[0]);
+                ASSERT_EQ(((row + col + 2) & 0x3FC) >> 2, vPtr[1]);
+                uPtr += uPlane.mColInc;
+                vPtr += vPlane.mColInc;
+            }
+        }
+        yRowPtr += yPlane.mRowInc;
+        if (row < kHeight / 2) {
+            uRowPtr += uPlane.mRowInc;
+            vRowPtr += vPlane.mRowInc;
+        }
+    }
+}
+
+class TestGraphicAllocation : public C2GraphicAllocation {
+public:
+    TestGraphicAllocation(
+            uint32_t width,
+            uint32_t height,
+            const C2PlanarLayout &layout,
+            size_t capacity,
+            std::vector<size_t> offsets)
+        : C2GraphicAllocation(width, height),
+          mLayout(layout),
+          mMemory(capacity, 0xAA),
+          mOffsets(offsets) {
+    }
+
+    c2_status_t map(
+            C2Rect rect, C2MemoryUsage usage, C2Fence *fence,
+            C2PlanarLayout *layout, uint8_t **addr) override {
+        (void)rect;
+        (void)usage;
+        (void)fence;
+        *layout = mLayout;
+        for (size_t i = 0; i < mLayout.numPlanes; ++i) {
+            addr[i] = mMemory.data() + mOffsets[i];
+        }
+        return C2_OK;
+    }
+
+    c2_status_t unmap(uint8_t **, C2Rect, C2Fence *) override { return C2_OK; }
+
+    C2Allocator::id_t getAllocatorId() const override { return -1; }
+
+    const C2Handle *handle() const override { return nullptr; }
+
+    bool equals(const std::shared_ptr<const C2GraphicAllocation> &other) const override {
+        return other.get() == this;
+    }
+
+private:
+    C2PlanarLayout mLayout;
+    std::vector<uint8_t> mMemory;
+    std::vector<uint8_t *> mAddr;
+    std::vector<size_t> mOffsets;
+};
+
+class LayoutTest : public ::testing::TestWithParam<std::tuple<bool, std::string, bool, int32_t>> {
+private:
+    static C2PlanarLayout YUVPlanarLayout(int32_t stride) {
+        C2PlanarLayout layout = {
+            C2PlanarLayout::TYPE_YUV,
+            3,  /* numPlanes */
+            3,  /* rootPlanes */
+            {},  /* planes --- to be filled below */
+        };
+        layout.planes[C2PlanarLayout::PLANE_Y] = {
+            C2PlaneInfo::CHANNEL_Y,
+            1,  /* colInc */
+            stride,  /* rowInc */
+            1,  /* colSampling */
+            1,  /* rowSampling */
+            8,  /* allocatedDepth */
+            8,  /* bitDepth */
+            0,  /* rightShift */
+            C2PlaneInfo::NATIVE,
+            C2PlanarLayout::PLANE_Y,  /* rootIx */
+            0,  /* offset */
+        };
+        layout.planes[C2PlanarLayout::PLANE_U] = {
+            C2PlaneInfo::CHANNEL_CB,
+            1,  /* colInc */
+            stride / 2,  /* rowInc */
+            2,  /* colSampling */
+            2,  /* rowSampling */
+            8,  /* allocatedDepth */
+            8,  /* bitDepth */
+            0,  /* rightShift */
+            C2PlaneInfo::NATIVE,
+            C2PlanarLayout::PLANE_U,  /* rootIx */
+            0,  /* offset */
+        };
+        layout.planes[C2PlanarLayout::PLANE_V] = {
+            C2PlaneInfo::CHANNEL_CR,
+            1,  /* colInc */
+            stride / 2,  /* rowInc */
+            2,  /* colSampling */
+            2,  /* rowSampling */
+            8,  /* allocatedDepth */
+            8,  /* bitDepth */
+            0,  /* rightShift */
+            C2PlaneInfo::NATIVE,
+            C2PlanarLayout::PLANE_V,  /* rootIx */
+            0,  /* offset */
+        };
+        return layout;
+    }
+
+    static C2PlanarLayout YUVSemiPlanarLayout(int32_t stride) {
+        C2PlanarLayout layout = {
+            C2PlanarLayout::TYPE_YUV,
+            3,  /* numPlanes */
+            2,  /* rootPlanes */
+            {},  /* planes --- to be filled below */
+        };
+        layout.planes[C2PlanarLayout::PLANE_Y] = {
+            C2PlaneInfo::CHANNEL_Y,
+            1,  /* colInc */
+            stride,  /* rowInc */
+            1,  /* colSampling */
+            1,  /* rowSampling */
+            8,  /* allocatedDepth */
+            8,  /* bitDepth */
+            0,  /* rightShift */
+            C2PlaneInfo::NATIVE,
+            C2PlanarLayout::PLANE_Y,  /* rootIx */
+            0,  /* offset */
+        };
+        layout.planes[C2PlanarLayout::PLANE_U] = {
+            C2PlaneInfo::CHANNEL_CB,
+            2,  /* colInc */
+            stride,  /* rowInc */
+            2,  /* colSampling */
+            2,  /* rowSampling */
+            8,  /* allocatedDepth */
+            8,  /* bitDepth */
+            0,  /* rightShift */
+            C2PlaneInfo::NATIVE,
+            C2PlanarLayout::PLANE_U,  /* rootIx */
+            0,  /* offset */
+        };
+        layout.planes[C2PlanarLayout::PLANE_V] = {
+            C2PlaneInfo::CHANNEL_CR,
+            2,  /* colInc */
+            stride,  /* rowInc */
+            2,  /* colSampling */
+            2,  /* rowSampling */
+            8,  /* allocatedDepth */
+            8,  /* bitDepth */
+            0,  /* rightShift */
+            C2PlaneInfo::NATIVE,
+            C2PlanarLayout::PLANE_U,  /* rootIx */
+            1,  /* offset */
+        };
+        return layout;
+    }
+
+    static C2PlanarLayout YVUSemiPlanarLayout(int32_t stride) {
+        C2PlanarLayout layout = {
+            C2PlanarLayout::TYPE_YUV,
+            3,  /* numPlanes */
+            2,  /* rootPlanes */
+            {},  /* planes --- to be filled below */
+        };
+        layout.planes[C2PlanarLayout::PLANE_Y] = {
+            C2PlaneInfo::CHANNEL_Y,
+            1,  /* colInc */
+            stride,  /* rowInc */
+            1,  /* colSampling */
+            1,  /* rowSampling */
+            8,  /* allocatedDepth */
+            8,  /* bitDepth */
+            0,  /* rightShift */
+            C2PlaneInfo::NATIVE,
+            C2PlanarLayout::PLANE_Y,  /* rootIx */
+            0,  /* offset */
+        };
+        layout.planes[C2PlanarLayout::PLANE_U] = {
+            C2PlaneInfo::CHANNEL_CB,
+            2,  /* colInc */
+            stride,  /* rowInc */
+            2,  /* colSampling */
+            2,  /* rowSampling */
+            8,  /* allocatedDepth */
+            8,  /* bitDepth */
+            0,  /* rightShift */
+            C2PlaneInfo::NATIVE,
+            C2PlanarLayout::PLANE_V,  /* rootIx */
+            1,  /* offset */
+        };
+        layout.planes[C2PlanarLayout::PLANE_V] = {
+            C2PlaneInfo::CHANNEL_CR,
+            2,  /* colInc */
+            stride,  /* rowInc */
+            2,  /* colSampling */
+            2,  /* rowSampling */
+            8,  /* allocatedDepth */
+            8,  /* bitDepth */
+            0,  /* rightShift */
+            C2PlaneInfo::NATIVE,
+            C2PlanarLayout::PLANE_V,  /* rootIx */
+            0,  /* offset */
+        };
+        return layout;
+    }
+
+    static std::shared_ptr<C2GraphicBlock> CreateGraphicBlock(
+            uint32_t width,
+            uint32_t height,
+            const C2PlanarLayout &layout,
+            size_t capacity,
+            std::vector<size_t> offsets) {
+        std::shared_ptr<C2GraphicAllocation> alloc = std::make_shared<TestGraphicAllocation>(
+                width,
+                height,
+                layout,
+                capacity,
+                offsets);
+
+        return _C2BlockFactory::CreateGraphicBlock(alloc);
+    }
+
+    static constexpr uint8_t GetPixelValue(uint8_t value, uint32_t row, uint32_t col) {
+        return (uint32_t(value) * row + col) & 0xFF;
+    }
+
+    static void FillPlane(C2GraphicView &view, size_t index, uint8_t value) {
+        C2PlanarLayout layout = view.layout();
+
+        uint8_t *rowPtr = view.data()[index];
+        C2PlaneInfo plane = layout.planes[index];
+        for (uint32_t row = 0; row < view.height() / plane.rowSampling; ++row) {
+            uint8_t *colPtr = rowPtr;
+            for (uint32_t col = 0; col < view.width() / plane.colSampling; ++col) {
+                *colPtr = GetPixelValue(value, row, col);
+                colPtr += plane.colInc;
+            }
+            rowPtr += plane.rowInc;
+        }
+    }
+
+    static void FillBlock(const std::shared_ptr<C2GraphicBlock> &block) {
+        C2GraphicView view = block->map().get();
+
+        FillPlane(view, C2PlanarLayout::PLANE_Y, 'Y');
+        FillPlane(view, C2PlanarLayout::PLANE_U, 'U');
+        FillPlane(view, C2PlanarLayout::PLANE_V, 'V');
+    }
+
+    static bool VerifyPlane(
+            const MediaImage2 *mediaImage,
+            const uint8_t *base,
+            uint32_t index,
+            uint8_t value,
+            std::string *errorMsg) {
+        *errorMsg = "";
+        MediaImage2::PlaneInfo plane = mediaImage->mPlane[index];
+        const uint8_t *rowPtr = base + plane.mOffset;
+        for (uint32_t row = 0; row < mediaImage->mHeight / plane.mVertSubsampling; ++row) {
+            const uint8_t *colPtr = rowPtr;
+            for (uint32_t col = 0; col < mediaImage->mWidth / plane.mHorizSubsampling; ++col) {
+                if (GetPixelValue(value, row, col) != *colPtr) {
+                    *errorMsg = AStringPrintf("row=%u col=%u expected=%02x actual=%02x",
+                            row, col, GetPixelValue(value, row, col), *colPtr).c_str();
+                    return false;
+                }
+                colPtr += plane.mColInc;
+            }
+            rowPtr += plane.mRowInc;
+        }
+        return true;
+    }
+
+public:
+    static constexpr int32_t kWidth = 320;
+    static constexpr int32_t kHeight = 240;
+    static constexpr int32_t kGapLength = kWidth * kHeight * 10;
+
+    static std::shared_ptr<C2Buffer> CreateAndFillBufferFromParam(const ParamType &param) {
+        bool contiguous = std::get<0>(param);
+        std::string planeOrderStr = std::get<1>(param);
+        bool planar = std::get<2>(param);
+        int32_t stride = std::get<3>(param);
+
+        C2PlanarLayout::plane_index_t planeOrder[3];
+        C2PlanarLayout layout;
+
+        if (planeOrderStr.size() != 3) {
+            return nullptr;
+        }
+        for (size_t i = 0; i < 3; ++i) {
+            C2PlanarLayout::plane_index_t planeIndex;
+            switch (planeOrderStr[i]) {
+                case 'Y': planeIndex = C2PlanarLayout::PLANE_Y; break;
+                case 'U': planeIndex = C2PlanarLayout::PLANE_U; break;
+                case 'V': planeIndex = C2PlanarLayout::PLANE_V; break;
+                default:  return nullptr;
+            }
+            planeOrder[i] = planeIndex;
+        }
+
+        if (planar) {
+            layout = YUVPlanarLayout(stride);
+        } else {  // semi-planar
+            for (size_t i = 0; i < 3; ++i) {
+                if (planeOrder[i] == C2PlanarLayout::PLANE_U) {
+                    layout = YUVSemiPlanarLayout(stride);
+                    break;
+                }
+                if (planeOrder[i] == C2PlanarLayout::PLANE_V) {
+                    layout = YVUSemiPlanarLayout(stride);
+                    break;
+                }
+            }
+        }
+        size_t yPlaneSize = stride * kHeight;
+        size_t uvPlaneSize = stride * kHeight / 4;
+        size_t capacity = yPlaneSize + uvPlaneSize * 2;
+        std::vector<size_t> offsets(3);
+
+        if (!contiguous) {
+            if (planar) {
+                capacity += kGapLength * 2;
+            } else {  // semi-planar
+                capacity += kGapLength;
+            }
+        }
+
+        offsets[planeOrder[0]] = 0;
+        size_t planeSize = (planeOrder[0] == C2PlanarLayout::PLANE_Y) ? yPlaneSize : uvPlaneSize;
+        for (size_t i = 1; i < 3; ++i) {
+            offsets[planeOrder[i]] = offsets[planeOrder[i - 1]] + planeSize;
+            if (!contiguous) {
+                offsets[planeOrder[i]] += kGapLength;
+            }
+            planeSize = (planeOrder[i] == C2PlanarLayout::PLANE_Y) ? yPlaneSize : uvPlaneSize;
+            if (!planar  // semi-planar
+                    && planeOrder[i - 1] != C2PlanarLayout::PLANE_Y
+                    && planeOrder[i] != C2PlanarLayout::PLANE_Y) {
+                offsets[planeOrder[i]] = offsets[planeOrder[i - 1]] + 1;
+                planeSize = uvPlaneSize * 2 - 1;
+            }
+        }
+
+        std::shared_ptr<C2GraphicBlock> block = CreateGraphicBlock(
+                kWidth,
+                kHeight,
+                layout,
+                capacity,
+                offsets);
+        FillBlock(block);
+        return C2Buffer::CreateGraphicBuffer(
+                block->share(block->crop(), C2Fence()));
+    }
+
+    static bool VerifyClientBuffer(
+            const sp<MediaCodecBuffer> &buffer, std::string *errorMsg) {
+        *errorMsg = "";
+        sp<ABuffer> imageData;
+        if (!buffer->format()->findBuffer("image-data", &imageData)) {
+            *errorMsg = "Missing image data";
+            return false;
+        }
+        MediaImage2 *mediaImage = (MediaImage2 *)imageData->data();
+        if (mediaImage->mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV) {
+            *errorMsg = AStringPrintf("Unexpected type: %d", mediaImage->mType).c_str();
+            return false;
+        }
+        std::string planeErrorMsg;
+        if (!VerifyPlane(mediaImage, buffer->base(), MediaImage2::Y, 'Y', &planeErrorMsg)) {
+            *errorMsg = "Y plane does not match: " + planeErrorMsg;
+            return false;
+        }
+        if (!VerifyPlane(mediaImage, buffer->base(), MediaImage2::U, 'U', &planeErrorMsg)) {
+            *errorMsg = "U plane does not match: " + planeErrorMsg;
+            return false;
+        }
+        if (!VerifyPlane(mediaImage, buffer->base(), MediaImage2::V, 'V', &planeErrorMsg)) {
+            *errorMsg = "V plane does not match: " + planeErrorMsg;
+            return false;
+        }
+
+        int32_t width, height, stride;
+        buffer->format()->findInt32(KEY_WIDTH, &width);
+        buffer->format()->findInt32(KEY_HEIGHT, &height);
+        buffer->format()->findInt32(KEY_STRIDE, &stride);
+
+        MediaImage2 legacyYLayout = {
+            MediaImage2::MEDIA_IMAGE_TYPE_Y,
+            1,  // mNumPlanes
+            uint32_t(width),
+            uint32_t(height),
+            8,
+            8,
+            {},  // mPlane
+        };
+        legacyYLayout.mPlane[MediaImage2::Y] = {
+            0,  // mOffset
+            1,  // mColInc
+            stride,  // mRowInc
+            1,  // mHorizSubsampling
+            1,  // mVertSubsampling
+        };
+        if (!VerifyPlane(&legacyYLayout, buffer->data(), MediaImage2::Y, 'Y', &planeErrorMsg)) {
+            *errorMsg = "Y plane by legacy layout does not match: " + planeErrorMsg;
+            return false;
+        }
+        return true;
+    }
+
+};
+
+TEST_P(LayoutTest, VerifyLayout) {
+    std::shared_ptr<RawGraphicOutputBuffers> buffers =
+        GetRawGraphicOutputBuffers(kWidth, kHeight);
+
+    std::shared_ptr<C2Buffer> c2Buffer = CreateAndFillBufferFromParam(GetParam());
+    ASSERT_NE(nullptr, c2Buffer);
+    sp<MediaCodecBuffer> clientBuffer;
+    size_t index;
+    ASSERT_EQ(OK, buffers->registerBuffer(c2Buffer, &index, &clientBuffer));
+    ASSERT_NE(nullptr, clientBuffer);
+    std::string errorMsg;
+    ASSERT_TRUE(VerifyClientBuffer(clientBuffer, &errorMsg)) << errorMsg;
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        RawGraphicOutputBuffersTest,
+        LayoutTest,
+        ::testing::Combine(
+            ::testing::Bool(),  /* contiguous */
+            ::testing::Values("YUV", "YVU", "UVY", "VUY"),
+            ::testing::Bool(),  /* planar */
+            ::testing::Values(320, 512)),
+        [](const ::testing::TestParamInfo<LayoutTest::ParamType> &info) {
+            std::string contiguous = std::get<0>(info.param) ? "Contiguous" : "Noncontiguous";
+            std::string planar = std::get<2>(info.param) ? "Planar" : "SemiPlanar";
+            return contiguous
+                    + std::get<1>(info.param)
+                    + planar
+                    + std::to_string(std::get<3>(info.param));
+        });
+
 } // namespace android
diff --git a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
index c9caa01..7c660dc 100644
--- a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
@@ -208,6 +208,24 @@
                         .withSetter(Setter<C2StreamPixelAspectRatioInfo::output>)
                         .build());
 
+                if (isEncoder) {
+                    addParameter(
+                            DefineParam(mInputBitrate, C2_PARAMKEY_BITRATE)
+                            .withDefault(new C2StreamBitrateInfo::input(0u))
+                            .withFields({C2F(mInputBitrate, value).any()})
+                            .withSetter(Setter<C2StreamBitrateInfo::input>)
+                            .build());
+
+                    addParameter(
+                            DefineParam(mOutputBitrate, C2_PARAMKEY_BITRATE)
+                            .withDefault(new C2StreamBitrateInfo::output(0u))
+                            .withFields({C2F(mOutputBitrate, value).any()})
+                            .calculatedAs(
+                                Copy<C2StreamBitrateInfo::output, C2StreamBitrateInfo::input>,
+                                mInputBitrate)
+                            .build());
+                }
+
                 // TODO: more SDK params
             }
         private:
@@ -221,11 +239,19 @@
             std::shared_ptr<C2StreamVendorInt64Info::output> mInt64Output;
             std::shared_ptr<C2PortVendorStringInfo::input> mStringInput;
             std::shared_ptr<C2StreamPixelAspectRatioInfo::output> mPixelAspectRatio;
+            std::shared_ptr<C2StreamBitrateInfo::input> mInputBitrate;
+            std::shared_ptr<C2StreamBitrateInfo::output> mOutputBitrate;
 
             template<typename T>
             static C2R Setter(bool, C2P<T> &) {
                 return C2R::Ok();
             }
+
+            template<typename ME, typename DEP>
+            static C2R Copy(bool, C2P<ME> &me, const C2P<DEP> &dep) {
+                me.set().value = dep.v.value;
+                return C2R::Ok();
+            }
         };
 
         Impl mImpl;
@@ -457,4 +483,97 @@
             << "mInputFormat = " << mConfig.mInputFormat->debugString().c_str();
 }
 
+TEST_F(CCodecConfigTest, DataspaceUpdate) {
+    init(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER, MIMETYPE_VIDEO_AVC);
+
+    ASSERT_EQ(OK, mConfig.initialize(mReflector, mConfigurable));
+    class InputSurfaceStub : public InputSurfaceWrapper {
+    public:
+        ~InputSurfaceStub() override = default;
+        status_t connect(const std::shared_ptr<Codec2Client::Component> &) override {
+            return OK;
+        }
+        void disconnect() override {}
+        status_t start() override { return OK; }
+        status_t signalEndOfInputStream() override { return OK; }
+        status_t configure(Config &) override { return OK; }
+    };
+    mConfig.mInputSurface = std::make_shared<InputSurfaceStub>();
+
+    sp<AMessage> format{new AMessage};
+    format->setInt32(KEY_COLOR_RANGE, COLOR_RANGE_LIMITED);
+    format->setInt32(KEY_COLOR_STANDARD, COLOR_STANDARD_BT709);
+    format->setInt32(KEY_COLOR_TRANSFER, COLOR_TRANSFER_SDR_VIDEO);
+    format->setInt32(KEY_BIT_RATE, 100);
+
+    std::vector<std::unique_ptr<C2Param>> configUpdate;
+    ASSERT_EQ(OK, mConfig.getConfigUpdateFromSdkParams(
+            mConfigurable, format, D::ALL, C2_MAY_BLOCK, &configUpdate));
+    ASSERT_TRUE(mConfig.updateConfiguration(configUpdate, D::ALL));
+
+    int32_t range{0};
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_RANGE, &range))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_RANGE_LIMITED, range)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    int32_t standard{0};
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_STANDARD, &standard))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_STANDARD_BT709, standard)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    int32_t transfer{0};
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &transfer))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_TRANSFER_SDR_VIDEO, transfer)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    mConfig.mInputSurface->setDataSpace(HAL_DATASPACE_BT2020_PQ);
+
+    // Dataspace from input surface should override the configured setting
+    mConfig.updateFormats(D::ALL);
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_RANGE, &range))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_RANGE_FULL, range)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_STANDARD, &standard))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_STANDARD_BT2020, standard)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &transfer))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_TRANSFER_ST2084, transfer)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    // Simulate bitrate update
+    format = new AMessage;
+    format->setInt32(KEY_BIT_RATE, 200);
+    configUpdate.clear();
+    ASSERT_EQ(OK, mConfig.getConfigUpdateFromSdkParams(
+            mConfigurable, format, D::ALL, C2_MAY_BLOCK, &configUpdate));
+    ASSERT_EQ(OK, mConfig.setParameters(mConfigurable, configUpdate, C2_MAY_BLOCK));
+
+    // Color information should remain the same
+    mConfig.updateFormats(D::ALL);
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_RANGE, &range))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_RANGE_FULL, range)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_STANDARD, &standard))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_STANDARD_BT2020, standard)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+    ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &transfer))
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+    EXPECT_EQ(COLOR_TRANSFER_ST2084, transfer)
+            << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+}
+
 } // namespace android
diff --git a/media/codec2/sfplugin/tests/FrameReassembler_test.cpp b/media/codec2/sfplugin/tests/FrameReassembler_test.cpp
new file mode 100644
index 0000000..6738ee7
--- /dev/null
+++ b/media/codec2/sfplugin/tests/FrameReassembler_test.cpp
@@ -0,0 +1,340 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "FrameReassembler.h"
+
+#include <gtest/gtest.h>
+
+#include <C2PlatformSupport.h>
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+static size_t BytesPerSample(C2Config::pcm_encoding_t encoding) {
+    return encoding == PCM_8 ? 1
+         : encoding == PCM_16 ? 2
+         : encoding == PCM_FLOAT ? 4 : 0;
+}
+
+static uint64_t Diff(c2_cntr64_t a, c2_cntr64_t b) {
+    return std::abs((a - b).peek());
+}
+
+class FrameReassemblerTest : public ::testing::Test {
+public:
+    static const C2MemoryUsage kUsage;
+    static constexpr uint64_t kTimestampToleranceUs = 100;
+
+    FrameReassemblerTest() {
+        mInitStatus = GetCodec2BlockPool(C2BlockPool::BASIC_LINEAR, nullptr, &mPool);
+    }
+
+    status_t initStatus() const { return mInitStatus; }
+
+    void testPushSameSize(
+            size_t encoderFrameSize,
+            size_t sampleRate,
+            size_t channelCount,
+            C2Config::pcm_encoding_t encoding,
+            size_t inputFrameSizeInBytes,
+            size_t count,
+            size_t expectedOutputSize) {
+        FrameReassembler frameReassembler;
+        frameReassembler.init(
+                mPool,
+                kUsage,
+                encoderFrameSize,
+                sampleRate,
+                channelCount,
+                encoding);
+
+        ASSERT_TRUE(frameReassembler) << "FrameReassembler init failed";
+
+        size_t inputIndex = 0, outputIndex = 0;
+        size_t expectCount = 0;
+        for (size_t i = 0; i < count; ++i) {
+            sp<MediaCodecBuffer> buffer = new MediaCodecBuffer(
+                    new AMessage, new ABuffer(inputFrameSizeInBytes));
+            buffer->setRange(0, inputFrameSizeInBytes);
+            buffer->meta()->setInt64(
+                    "timeUs",
+                    inputIndex * 1000000 / sampleRate / channelCount / BytesPerSample(encoding));
+            if (i == count - 1) {
+                buffer->meta()->setInt32("eos", 1);
+            }
+            for (size_t j = 0; j < inputFrameSizeInBytes; ++j, ++inputIndex) {
+                buffer->base()[j] = (inputIndex & 0xFF);
+            }
+            std::list<std::unique_ptr<C2Work>> items;
+            ASSERT_EQ(C2_OK, frameReassembler.process(buffer, &items));
+            while (!items.empty()) {
+                std::unique_ptr<C2Work> work = std::move(*items.begin());
+                items.erase(items.begin());
+                // Verify timestamp
+                uint64_t expectedTimeUs =
+                    outputIndex * 1000000 / sampleRate / channelCount / BytesPerSample(encoding);
+                EXPECT_GE(
+                        kTimestampToleranceUs,
+                        Diff(expectedTimeUs, work->input.ordinal.timestamp))
+                    << "expected timestamp: " << expectedTimeUs
+                    << " actual timestamp: " << work->input.ordinal.timestamp.peeku()
+                    << " output index: " << outputIndex;
+
+                // Verify buffer
+                ASSERT_EQ(1u, work->input.buffers.size());
+                std::shared_ptr<C2Buffer> buffer = work->input.buffers.front();
+                ASSERT_EQ(C2BufferData::LINEAR, buffer->data().type());
+                ASSERT_EQ(1u, buffer->data().linearBlocks().size());
+                C2ReadView view = buffer->data().linearBlocks().front().map().get();
+                ASSERT_EQ(C2_OK, view.error());
+                ASSERT_EQ(encoderFrameSize * BytesPerSample(encoding), view.capacity());
+                for (size_t j = 0; j < view.capacity(); ++j, ++outputIndex) {
+                    ASSERT_TRUE(outputIndex < inputIndex
+                             || inputIndex == inputFrameSizeInBytes * count);
+                    uint8_t expected = outputIndex < inputIndex ? (outputIndex & 0xFF) : 0;
+                    if (expectCount < 10) {
+                        ++expectCount;
+                        EXPECT_EQ(expected, view.data()[j]) << "output index = " << outputIndex;
+                    }
+                }
+            }
+        }
+
+        ASSERT_EQ(inputFrameSizeInBytes * count, inputIndex);
+        size_t encoderFrameSizeInBytes =
+            encoderFrameSize * channelCount * BytesPerSample(encoding);
+        ASSERT_EQ(0, outputIndex % encoderFrameSizeInBytes)
+            << "output size must be multiple of frame size: output size = " << outputIndex
+            << " frame size = " << encoderFrameSizeInBytes;
+        ASSERT_EQ(expectedOutputSize, outputIndex)
+            << "output size must be smallest multiple of frame size, "
+            << "equal to or larger than input size. output size = " << outputIndex
+            << " input size = " << inputIndex << " frame size = " << encoderFrameSizeInBytes;
+    }
+
+private:
+    status_t mInitStatus;
+    std::shared_ptr<C2BlockPool> mPool;
+};
+
+const C2MemoryUsage FrameReassemblerTest::kUsage{C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+
+// Push frames with exactly the same size as the encoder requested.
+TEST_F(FrameReassemblerTest, PushExactFrameSize) {
+    ASSERT_EQ(OK, initStatus());
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_8,
+            1024 /* input frame size in bytes = 1024 samples * 1 channel * 1 bytes/sample */,
+            10 /* count */,
+            10240 /* expected output size = 10 * 1024 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_16,
+            2048 /* input frame size in bytes = 1024 samples * 1 channel * 2 bytes/sample */,
+            10 /* count */,
+            20480 /* expected output size = 10 * 2048 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_FLOAT,
+            4096 /* input frame size in bytes = 1024 samples * 1 channel * 4 bytes/sample */,
+            10 /* count */,
+            40960 /* expected output size = 10 * 4096 bytes/frame */);
+}
+
+// Push frames with half the size that the encoder requested.
+TEST_F(FrameReassemblerTest, PushHalfFrameSize) {
+    ASSERT_EQ(OK, initStatus());
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_8,
+            512 /* input frame size in bytes = 512 samples * 1 channel * 1 bytes per sample */,
+            10 /* count */,
+            5120 /* expected output size = 5 * 1024 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_16,
+            1024 /* input frame size in bytes = 512 samples * 1 channel * 2 bytes per sample */,
+            10 /* count */,
+            10240 /* expected output size = 5 * 2048 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_FLOAT,
+            2048 /* input frame size in bytes = 512 samples * 1 channel * 4 bytes per sample */,
+            10 /* count */,
+            20480 /* expected output size = 5 * 4096 bytes/frame */);
+}
+
+// Push frames with twice the size that the encoder requested.
+TEST_F(FrameReassemblerTest, PushDoubleFrameSize) {
+    ASSERT_EQ(OK, initStatus());
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_8,
+            2048 /* input frame size in bytes = 2048 samples * 1 channel * 1 bytes per sample */,
+            10 /* count */,
+            20480 /* expected output size = 20 * 1024 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_16,
+            4096 /* input frame size in bytes = 2048 samples * 1 channel * 2 bytes per sample */,
+            10 /* count */,
+            40960 /* expected output size = 20 * 2048 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_FLOAT,
+            8192 /* input frame size in bytes = 2048 samples * 1 channel * 4 bytes per sample */,
+            10 /* count */,
+            81920 /* expected output size = 20 * 4096 bytes/frame */);
+}
+
+// Push frames with a little bit larger (+5 samples) than the requested size.
+TEST_F(FrameReassemblerTest, PushLittleLargerFrameSize) {
+    ASSERT_EQ(OK, initStatus());
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_8,
+            1029 /* input frame size in bytes = 1029 samples * 1 channel * 1 bytes per sample */,
+            10 /* count */,
+            11264 /* expected output size = 11 * 1024 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_16,
+            2058 /* input frame size in bytes = 1029 samples * 1 channel * 2 bytes per sample */,
+            10 /* count */,
+            22528 /* expected output size = 11 * 2048 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_FLOAT,
+            4116 /* input frame size in bytes = 1029 samples * 1 channel * 4 bytes per sample */,
+            10 /* count */,
+            45056 /* expected output size = 11 * 4096 bytes/frame */);
+}
+
+// Push frames with a little bit smaller (-5 samples) than the requested size.
+TEST_F(FrameReassemblerTest, PushLittleSmallerFrameSize) {
+    ASSERT_EQ(OK, initStatus());
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_8,
+            1019 /* input frame size in bytes = 1019 samples * 1 channel * 1 bytes per sample */,
+            10 /* count */,
+            10240 /* expected output size = 10 * 1024 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_16,
+            2038 /* input frame size in bytes = 1019 samples * 1 channel * 2 bytes per sample */,
+            10 /* count */,
+            20480 /* expected output size = 10 * 2048 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_FLOAT,
+            4076 /* input frame size in bytes = 1019 samples * 1 channel * 4 bytes per sample */,
+            10 /* count */,
+            40960 /* expected output size = 10 * 4096 bytes/frame */);
+}
+
+// Push single-byte frames
+TEST_F(FrameReassemblerTest, PushSingleByte) {
+    ASSERT_EQ(OK, initStatus());
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_8,
+            1 /* input frame size in bytes */,
+            100000 /* count */,
+            100352 /* expected output size = 98 * 1024 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_16,
+            1 /* input frame size in bytes */,
+            100000 /* count */,
+            100352 /* expected output size = 49 * 2048 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_FLOAT,
+            1 /* input frame size in bytes */,
+            100000 /* count */,
+            102400 /* expected output size = 25 * 4096 bytes/frame */);
+}
+
+// Push one big chunk.
+TEST_F(FrameReassemblerTest, PushBigChunk) {
+    ASSERT_EQ(OK, initStatus());
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_8,
+            100000 /* input frame size in bytes */,
+            1 /* count */,
+            100352 /* expected output size = 98 * 1024 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_16,
+            100000 /* input frame size in bytes */,
+            1 /* count */,
+            100352 /* expected output size = 49 * 2048 bytes/frame */);
+    testPushSameSize(
+            1024 /* frame size in samples */,
+            48000 /* sample rate */,
+            1 /* channel count */,
+            PCM_FLOAT,
+            100000 /* input frame size in bytes */,
+            1 /* count */,
+            102400 /* expected output size = 25 * 4096 bytes/frame */);
+}
+
+} // namespace android
diff --git a/media/codec2/sfplugin/utils/Android.bp b/media/codec2/sfplugin/utils/Android.bp
index 6287221..2f4d6b1 100644
--- a/media/codec2/sfplugin/utils/Android.bp
+++ b/media/codec2/sfplugin/utils/Android.bp
@@ -1,4 +1,13 @@
-cc_library_shared {
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
     name: "libsfplugin_ccodec_utils",
     vendor_available: true,
     min_sdk_version: "29",
@@ -24,11 +33,13 @@
         "libcodec2_vndk",
         "libcutils",
         "liblog",
+        "libnativewindow",
         "libstagefright_foundation",
         "libutils",
     ],
 
     static_libs: [
+        "libarect",
         "libyuv_static",
     ],
 
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index bf2a07e..5f87c66 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -23,6 +23,7 @@
 #include <list>
 #include <mutex>
 
+#include <android/hardware_buffer.h>
 #include <media/hardware/HardwareAPI.h>
 #include <media/stagefright/foundation/AUtils.h>
 
@@ -121,32 +122,69 @@
     if (view.crop().width != img->mWidth || view.crop().height != img->mHeight) {
         return BAD_VALUE;
     }
-    if ((IsNV12(view) && IsI420(img)) || (IsI420(view) && IsNV12(img))) {
-        // Take shortcuts to use libyuv functions between NV12 and I420 conversion.
-        const uint8_t* src_y = view.data()[0];
-        const uint8_t* src_u = view.data()[1];
-        const uint8_t* src_v = view.data()[2];
-        int32_t src_stride_y = view.layout().planes[0].rowInc;
-        int32_t src_stride_u = view.layout().planes[1].rowInc;
-        int32_t src_stride_v = view.layout().planes[2].rowInc;
-        uint8_t* dst_y = imgBase + img->mPlane[0].mOffset;
-        uint8_t* dst_u = imgBase + img->mPlane[1].mOffset;
-        uint8_t* dst_v = imgBase + img->mPlane[2].mOffset;
-        int32_t dst_stride_y = img->mPlane[0].mRowInc;
-        int32_t dst_stride_u = img->mPlane[1].mRowInc;
-        int32_t dst_stride_v = img->mPlane[2].mRowInc;
-        if (IsNV12(view) && IsI420(img)) {
+    const uint8_t* src_y = view.data()[0];
+    const uint8_t* src_u = view.data()[1];
+    const uint8_t* src_v = view.data()[2];
+    int32_t src_stride_y = view.layout().planes[0].rowInc;
+    int32_t src_stride_u = view.layout().planes[1].rowInc;
+    int32_t src_stride_v = view.layout().planes[2].rowInc;
+    uint8_t* dst_y = imgBase + img->mPlane[0].mOffset;
+    uint8_t* dst_u = imgBase + img->mPlane[1].mOffset;
+    uint8_t* dst_v = imgBase + img->mPlane[2].mOffset;
+    int32_t dst_stride_y = img->mPlane[0].mRowInc;
+    int32_t dst_stride_u = img->mPlane[1].mRowInc;
+    int32_t dst_stride_v = img->mPlane[2].mRowInc;
+    int width = view.crop().width;
+    int height = view.crop().height;
+
+    if (IsNV12(view)) {
+        if (IsNV12(img)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
+            return OK;
+        } else if (IsNV21(img)) {
+            if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
+                                    dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        } else if (IsI420(img)) {
             if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
-                                    dst_u, dst_stride_u, dst_v, dst_stride_v, view.crop().width,
-                                    view.crop().height)) {
+                                    dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
                 return OK;
             }
-        } else {
+        }
+    } else if (IsNV21(view)) {
+        if (IsNV12(img)) {
+            if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
+                                    dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
+                return OK;
+            }
+        } else if (IsNV21(img)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
+            return OK;
+        } else if (IsI420(img)) {
+            if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
+                                    dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        }
+    } else if (IsI420(view)) {
+        if (IsNV12(img)) {
             if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
-                                    dst_y, dst_stride_y, dst_u, dst_stride_u, view.crop().width,
-                                    view.crop().height)) {
+                                    dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
                 return OK;
             }
+        } else if (IsNV21(img)) {
+            if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
+                                    dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        } else if (IsI420(img)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
+            libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
+            return OK;
         }
     }
     return _ImageCopy<true>(view, img, imgBase);
@@ -156,32 +194,68 @@
     if (view.crop().width != img->mWidth || view.crop().height != img->mHeight) {
         return BAD_VALUE;
     }
-    if ((IsNV12(img) && IsI420(view)) || (IsI420(img) && IsNV12(view))) {
-        // Take shortcuts to use libyuv functions between NV12 and I420 conversion.
-        const uint8_t* src_y = imgBase + img->mPlane[0].mOffset;
-        const uint8_t* src_u = imgBase + img->mPlane[1].mOffset;
-        const uint8_t* src_v = imgBase + img->mPlane[2].mOffset;
-        int32_t src_stride_y = img->mPlane[0].mRowInc;
-        int32_t src_stride_u = img->mPlane[1].mRowInc;
-        int32_t src_stride_v = img->mPlane[2].mRowInc;
-        uint8_t* dst_y = view.data()[0];
-        uint8_t* dst_u = view.data()[1];
-        uint8_t* dst_v = view.data()[2];
-        int32_t dst_stride_y = view.layout().planes[0].rowInc;
-        int32_t dst_stride_u = view.layout().planes[1].rowInc;
-        int32_t dst_stride_v = view.layout().planes[2].rowInc;
-        if (IsNV12(img) && IsI420(view)) {
+    const uint8_t* src_y = imgBase + img->mPlane[0].mOffset;
+    const uint8_t* src_u = imgBase + img->mPlane[1].mOffset;
+    const uint8_t* src_v = imgBase + img->mPlane[2].mOffset;
+    int32_t src_stride_y = img->mPlane[0].mRowInc;
+    int32_t src_stride_u = img->mPlane[1].mRowInc;
+    int32_t src_stride_v = img->mPlane[2].mRowInc;
+    uint8_t* dst_y = view.data()[0];
+    uint8_t* dst_u = view.data()[1];
+    uint8_t* dst_v = view.data()[2];
+    int32_t dst_stride_y = view.layout().planes[0].rowInc;
+    int32_t dst_stride_u = view.layout().planes[1].rowInc;
+    int32_t dst_stride_v = view.layout().planes[2].rowInc;
+    int width = view.crop().width;
+    int height = view.crop().height;
+    if (IsNV12(img)) {
+        if (IsNV12(view)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
+            return OK;
+        } else if (IsNV21(view)) {
+            if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
+                                    dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        } else if (IsI420(view)) {
             if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
-                                    dst_u, dst_stride_u, dst_v, dst_stride_v, view.width(),
-                                    view.height())) {
+                                    dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
                 return OK;
             }
-        } else {
+        }
+    } else if (IsNV21(img)) {
+        if (IsNV12(view)) {
+            if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
+                                    dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
+                return OK;
+            }
+        } else if (IsNV21(view)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
+            return OK;
+        } else if (IsI420(view)) {
+            if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
+                                    dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        }
+    } else if (IsI420(img)) {
+        if (IsNV12(view)) {
             if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
-                                    dst_y, dst_stride_y, dst_u, dst_stride_u, view.width(),
-                                    view.height())) {
+                                    dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
                 return OK;
             }
+        } else if (IsNV21(view)) {
+            if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
+                                    dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+                return OK;
+            }
+        } else if (IsI420(view)) {
+            libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+            libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
+            libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
+            return OK;
         }
     }
     return _ImageCopy<false>(view, img, imgBase);
@@ -225,6 +299,20 @@
             && layout.planes[layout.PLANE_V].offset == 1);
 }
 
+bool IsNV21(const C2GraphicView &view) {
+    if (!IsYUV420(view)) {
+        return false;
+    }
+    const C2PlanarLayout &layout = view.layout();
+    return (layout.rootPlanes == 2
+            && layout.planes[layout.PLANE_U].colInc == 2
+            && layout.planes[layout.PLANE_U].rootIx == layout.PLANE_V
+            && layout.planes[layout.PLANE_U].offset == 1
+            && layout.planes[layout.PLANE_V].colInc == 2
+            && layout.planes[layout.PLANE_V].rootIx == layout.PLANE_V
+            && layout.planes[layout.PLANE_V].offset == 0);
+}
+
 bool IsI420(const C2GraphicView &view) {
     if (!IsYUV420(view)) {
         return false;
@@ -258,7 +346,16 @@
     }
     return (img->mPlane[1].mColInc == 2
             && img->mPlane[2].mColInc == 2
-            && (img->mPlane[2].mOffset - img->mPlane[1].mOffset == 1));
+            && (img->mPlane[2].mOffset == img->mPlane[1].mOffset + 1));
+}
+
+bool IsNV21(const MediaImage2 *img) {
+    if (!IsYUV420(img)) {
+        return false;
+    }
+    return (img->mPlane[1].mColInc == 2
+            && img->mPlane[2].mColInc == 2
+            && (img->mPlane[1].mOffset == img->mPlane[2].mOffset + 1));
 }
 
 bool IsI420(const MediaImage2 *img) {
@@ -270,6 +367,76 @@
             && img->mPlane[2].mOffset > img->mPlane[1].mOffset);
 }
 
+FlexLayout GetYuv420FlexibleLayout() {
+    static FlexLayout sLayout = []{
+        AHardwareBuffer_Desc desc = {
+            16,  // width
+            16,  // height
+            1,   // layers
+            AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
+            AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
+            0,   // stride
+            0,   // rfu0
+            0,   // rfu1
+        };
+        AHardwareBuffer *buffer = nullptr;
+        int ret = AHardwareBuffer_allocate(&desc, &buffer);
+        if (ret != 0) {
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        class AutoCloser {
+        public:
+            AutoCloser(AHardwareBuffer *buffer) : mBuffer(buffer), mLocked(false) {}
+            ~AutoCloser() {
+                if (mLocked) {
+                    AHardwareBuffer_unlock(mBuffer, nullptr);
+                }
+                AHardwareBuffer_release(mBuffer);
+            }
+
+            void setLocked() { mLocked = true; }
+
+        private:
+            AHardwareBuffer *mBuffer;
+            bool mLocked;
+        } autoCloser(buffer);
+        AHardwareBuffer_Planes planes;
+        ret = AHardwareBuffer_lockPlanes(
+                buffer,
+                AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
+                -1,       // fence
+                nullptr,  // rect
+                &planes);
+        if (ret != 0) {
+            AHardwareBuffer_release(buffer);
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        autoCloser.setLocked();
+        if (planes.planeCount != 3) {
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        if (planes.planes[0].pixelStride != 1) {
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        if (planes.planes[1].pixelStride == 1 && planes.planes[2].pixelStride == 1) {
+            return FLEX_LAYOUT_PLANAR;
+        }
+        if (planes.planes[1].pixelStride == 2 && planes.planes[2].pixelStride == 2) {
+            ssize_t uvDist =
+                static_cast<uint8_t *>(planes.planes[2].data) -
+                static_cast<uint8_t *>(planes.planes[1].data);
+            if (uvDist == 1) {
+                return FLEX_LAYOUT_SEMIPLANAR_UV;
+            } else if (uvDist == -1) {
+                return FLEX_LAYOUT_SEMIPLANAR_VU;
+            }
+            return FLEX_LAYOUT_UNKNOWN;
+        }
+        return FLEX_LAYOUT_UNKNOWN;
+    }();
+    return sLayout;
+}
+
 MediaImage2 CreateYUV420PlanarMediaImage2(
         uint32_t width, uint32_t height, uint32_t stride, uint32_t vstride) {
     return MediaImage2 {
@@ -340,9 +507,21 @@
     };
 }
 
+// Matrix coefficient to convert RGB to Planar YUV data.
+// Each sub-array represents the 3X3 coeff used with R, G and B
+static const int16_t bt601Matrix[2][3][3] = {
+    { { 76, 150, 29 }, { -43, -85, 128 }, { 128, -107, -21 } }, /* RANGE_FULL */
+    { { 66, 129, 25 }, { -38, -74, 112 }, { 112, -94, -18 } },  /* RANGE_LIMITED */
+};
+
+static const int16_t bt709Matrix[2][3][3] = {
+    { { 54, 183, 18 }, { -29, -99, 128 }, { 128, -116, -12 } }, /* RANGE_FULL */
+    { { 47, 157, 16 }, { -26, -86, 112 }, { 112, -102, -10 } }, /* RANGE_LIMITED */
+};
+
 status_t ConvertRGBToPlanarYUV(
         uint8_t *dstY, size_t dstStride, size_t dstVStride, size_t bufferSize,
-        const C2GraphicView &src) {
+        const C2GraphicView &src, C2Color::matrix_t colorMatrix, C2Color::range_t colorRange) {
     CHECK(dstY != nullptr);
     CHECK((src.width() & 1) == 0);
     CHECK((src.height() & 1) == 0);
@@ -360,28 +539,38 @@
     const uint8_t *pGreen = src.data()[C2PlanarLayout::PLANE_G];
     const uint8_t *pBlue  = src.data()[C2PlanarLayout::PLANE_B];
 
-#define CLIP3(x,y,z) (((z) < (x)) ? (x) : (((z) > (y)) ? (y) : (z)))
+    // set default range as limited
+    if (colorRange != C2Color::RANGE_FULL && colorRange != C2Color::RANGE_LIMITED) {
+        colorRange = C2Color::RANGE_LIMITED;
+    }
+    const int16_t (*weights)[3] =
+        (colorMatrix == C2Color::MATRIX_BT709) ?
+            bt709Matrix[colorRange - 1] : bt601Matrix[colorRange - 1];
+    uint8_t zeroLvl =  colorRange == C2Color::RANGE_FULL ? 0 : 16;
+    uint8_t maxLvlLuma =  colorRange == C2Color::RANGE_FULL ? 255 : 235;
+    uint8_t maxLvlChroma =  colorRange == C2Color::RANGE_FULL ? 255 : 240;
+
+#define CLIP3(min,v,max) (((v) < (min)) ? (min) : (((max) > (v)) ? (v) : (max)))
     for (size_t y = 0; y < src.height(); ++y) {
         for (size_t x = 0; x < src.width(); ++x) {
-            uint8_t red = *pRed;
-            uint8_t green = *pGreen;
-            uint8_t blue = *pBlue;
+            uint8_t r = *pRed;
+            uint8_t g = *pGreen;
+            uint8_t b = *pBlue;
 
-            // using ITU-R BT.601 conversion matrix
-            unsigned luma =
-                CLIP3(0, (((red * 66 + green * 129 + blue * 25) >> 8) + 16), 255);
+            unsigned luma = ((r * weights[0][0] + g * weights[0][1] + b * weights[0][2]) >> 8) +
+                             zeroLvl;
 
-            dstY[x] = luma;
+            dstY[x] = CLIP3(zeroLvl, luma, maxLvlLuma);
 
             if ((x & 1) == 0 && (y & 1) == 0) {
-                unsigned U =
-                    CLIP3(0, (((-red * 38 - green * 74 + blue * 112) >> 8) + 128), 255);
+                unsigned U = ((r * weights[1][0] + g * weights[1][1] + b * weights[1][2]) >> 8) +
+                              128;
 
-                unsigned V =
-                    CLIP3(0, (((red * 112 - green * 94 - blue * 18) >> 8) + 128), 255);
+                unsigned V = ((r * weights[2][0] + g * weights[2][1] + b * weights[2][2]) >> 8) +
+                              128;
 
-                dstU[x >> 1] = U;
-                dstV[x >> 1] = V;
+                dstU[x >> 1] = CLIP3(zeroLvl, U, maxLvlChroma);
+                dstV[x >> 1] = CLIP3(zeroLvl, V, maxLvlChroma);
             }
             pRed   += layout.planes[C2PlanarLayout::PLANE_R].colInc;
             pGreen += layout.planes[C2PlanarLayout::PLANE_G].colInc;
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.h b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
index afadf00..9fa642d 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
@@ -18,6 +18,7 @@
 #define CODEC2_BUFFER_UTILS_H_
 
 #include <C2Buffer.h>
+#include <C2Config.h>
 #include <C2ParamDef.h>
 
 #include <media/hardware/VideoAPI.h>
@@ -39,7 +40,8 @@
  */
 status_t ConvertRGBToPlanarYUV(
         uint8_t *dstY, size_t dstStride, size_t dstVStride, size_t bufferSize,
-        const C2GraphicView &src);
+        const C2GraphicView &src, C2Color::matrix_t colorMatrix = C2Color::MATRIX_BT601,
+        C2Color::range_t colorRange = C2Color::RANGE_LIMITED);
 
 /**
  * Returns a planar YUV 420 8-bit media image descriptor.
@@ -96,6 +98,11 @@
 bool IsNV12(const C2GraphicView &view);
 
 /**
+ * Returns true iff a view has a NV21 layout.
+ */
+bool IsNV21(const C2GraphicView &view);
+
+/**
  * Returns true iff a view has a I420 layout.
  */
 bool IsI420(const C2GraphicView &view);
@@ -111,10 +118,26 @@
 bool IsNV12(const MediaImage2 *img);
 
 /**
+ * Returns true iff a MediaImage2 has a NV21 layout.
+ */
+bool IsNV21(const MediaImage2 *img);
+
+/**
  * Returns true iff a MediaImage2 has a I420 layout.
  */
 bool IsI420(const MediaImage2 *img);
 
+enum FlexLayout {
+    FLEX_LAYOUT_UNKNOWN,
+    FLEX_LAYOUT_PLANAR,
+    FLEX_LAYOUT_SEMIPLANAR_UV,
+    FLEX_LAYOUT_SEMIPLANAR_VU,
+};
+/**
+ * Returns layout of YCBCR_420_888 pixel format.
+ */
+FlexLayout GetYuv420FlexibleLayout();
+
 /**
  * A raw memory block to use for internal buffers.
  *
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 903db6c..4d939fa 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -92,6 +92,7 @@
 
 ALookup<C2Config::bitrate_mode_t, int32_t> sBitrateModes = {
     { C2Config::BITRATE_CONST,      BITRATE_MODE_CBR },
+    { C2Config::BITRATE_CONST_SKIP_ALLOWED, BITRATE_MODE_CBR_FD },
     { C2Config::BITRATE_VARIABLE,   BITRATE_MODE_VBR },
     { C2Config::BITRATE_IGNORE,     BITRATE_MODE_CQ },
 };
@@ -311,6 +312,8 @@
     { C2Config::PCM_8, kAudioEncodingPcm8bit },
     { C2Config::PCM_16, kAudioEncodingPcm16bit },
     { C2Config::PCM_FLOAT, kAudioEncodingPcmFloat },
+    { C2Config::PCM_24, kAudioEncodingPcm24bitPacked },
+    { C2Config::PCM_32, kAudioEncodingPcm32bit },
 };
 
 ALookup<C2Config::level_t, int32_t> sVp9Levels = {
@@ -958,17 +961,16 @@
             *c2Value = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
             return true;
         case COLOR_FormatYUV420Flexible:
-            *c2Value = HAL_PIXEL_FORMAT_YCBCR_420_888;
-            return true;
         case COLOR_FormatYUV420Planar:
         case COLOR_FormatYUV420SemiPlanar:
         case COLOR_FormatYUV420PackedPlanar:
         case COLOR_FormatYUV420PackedSemiPlanar:
-            *c2Value = HAL_PIXEL_FORMAT_YV12;
+            *c2Value = HAL_PIXEL_FORMAT_YCBCR_420_888;
             return true;
         default:
-            // TODO: support some sort of passthrough
-            return false;
+            // Passthrough
+            *c2Value = uint32_t(frameworkValue);
+            return true;
     }
 }
 
@@ -979,11 +981,16 @@
         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
             *frameworkValue = COLOR_FormatSurface;
             return true;
-        case HAL_PIXEL_FORMAT_YV12:
+        case HAL_PIXEL_FORMAT_YCBCR_422_SP:
+        case HAL_PIXEL_FORMAT_YCRCB_420_SP:
+        case HAL_PIXEL_FORMAT_YCBCR_422_I:
         case HAL_PIXEL_FORMAT_YCBCR_420_888:
+        case HAL_PIXEL_FORMAT_YV12:
             *frameworkValue = COLOR_FormatYUV420Flexible;
             return true;
         default:
-            return false;
+            // Passthrough
+            *frameworkValue = int32_t(c2Value);
+            return true;
     }
 }
diff --git a/media/codec2/tests/Android.bp b/media/codec2/tests/Android.bp
index fce6e21..b858fa5 100644
--- a/media/codec2/tests/Android.bp
+++ b/media/codec2/tests/Android.bp
@@ -1,5 +1,15 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "codec2_core_param_test",
+    test_suites: ["device-tests"],
 
     srcs: [
         "C2Param_test.cpp",
@@ -28,6 +38,7 @@
 
 cc_test {
     name: "codec2_vndk_test",
+    test_suites: ["device-tests"],
 
     srcs: [
         "C2_test.cpp",
diff --git a/media/codec2/tests/C2Param_test.cpp b/media/codec2/tests/C2Param_test.cpp
index 564d4d2..bb8130c 100644
--- a/media/codec2/tests/C2Param_test.cpp
+++ b/media/codec2/tests/C2Param_test.cpp
@@ -96,7 +96,7 @@
     const static std::vector<C2FieldDescriptor> _FIELD_LIST;
     static const std::vector<C2FieldDescriptor> FieldList();  // <= needed for C2FieldDescriptor
     const static FD::type_t TYPE = (FD::type_t)(CORE_INDEX | FD::STRUCT_FLAG);
-};
+} C2_PACK;
 
 DEFINE_NO_NAMED_VALUES_FOR(C2SizeStruct)
 
@@ -111,11 +111,13 @@
 
 struct C2TestStruct_A {
     int32_t signed32;
+    // 4-byte padding
     int64_t signed64[2];
     uint32_t unsigned32[1];
+    // 4-byte padding
     uint64_t unsigned64;
     float fp32;
-    C2SizeStruct sz[3];
+    C2SizeStruct sz[3]; // 8-byte structure, but 4-byte aligned
     uint8_t blob[100];
     char string[100];
     bool yesNo[100];
@@ -124,21 +126,21 @@
     static const std::vector<C2FieldDescriptor> FieldList();
     // enum : uint32_t { CORE_INDEX = kParamIndexTest };
     // typedef C2TestStruct_A _type;
-} __attribute__((packed));
+} __attribute__((aligned(4)));
 
 const std::vector<C2FieldDescriptor> C2TestStruct_A::FieldList() {
     return _FIELD_LIST;
 }
 const std::vector<C2FieldDescriptor> C2TestStruct_A::_FIELD_LIST =
     { { FD::INT32,    1, "s32",   0, 4 },
-      { FD::INT64,    2, "s64",   4, 8 },
-      { FD::UINT32,   1, "u32",  20, 4 },
-      { FD::UINT64,   1, "u64",  24, 8 },
-      { FD::FLOAT,    1, "fp",   32, 4 },
-      { C2SizeStruct::TYPE, 3, "size", 36, 8 },
-      { FD::BLOB,   100, "blob", 60, 1 },
-      { FD::STRING, 100, "str", 160, 1 },
-      { FD::BLOB,   100, "y-n", 260, 1 } };
+      { FD::INT64,    2, "s64",   8, 8 },
+      { FD::UINT32,   1, "u32",  24, 4 },
+      { FD::UINT64,   1, "u64",  32, 8 },
+      { FD::FLOAT,    1, "fp",   40, 4 },
+      { C2SizeStruct::TYPE, 3, "size", 44, 8 },
+      { FD::BLOB,   100, "blob", 68, 1 },
+      { FD::STRING, 100, "str", 168, 1 },
+      { FD::BLOB,   100, "y-n", 268, 1 } };
 
 TEST_P(C2ParamTest_ParamFieldList, VerifyStruct) {
     std::vector<C2FieldDescriptor> fields = GetParam(), expected = C2TestStruct_A::_FIELD_LIST;
@@ -198,11 +200,13 @@
 
 struct C2TestAStruct {
     int32_t signed32;
+    // 4-byte padding
     int64_t signed64[2];
     uint32_t unsigned32[1];
+    // 4-byte padding
     uint64_t unsigned64;
     float fp32;
-    C2SizeStruct sz[3];
+    C2SizeStruct sz[3]; // 8-byte structure, but 4-byte aligned
     uint8_t blob[100];
     char string[100];
     bool yesNo[100];
@@ -229,11 +233,13 @@
 
 struct C2TestBStruct {
     int32_t signed32;
+    // 4-byte padding
     int64_t signed64[2];
     uint32_t unsigned32[1];
+    // 4-byte padding
     uint64_t unsigned64;
     float fp32;
-    C2SizeStruct sz[3];
+    C2SizeStruct sz[3]; // 8-byte structure, but 4-byte aligned
     uint8_t blob[100];
     char string[100];
     bool yesNo[100];
@@ -286,7 +292,7 @@
         if (fields.size() > 1) {
             EXPECT_EQ(2u, fields.size());
             EXPECT_EQ(C2FieldDescriptor(FD::INT32, 1, "s32", 0, 4), fields[0]);
-            EXPECT_EQ(C2FieldDescriptor(this->FlexType, 0, "flex", 4, this->FLEX_SIZE),
+            EXPECT_EQ(C2FieldDescriptor(this->FlexType, 0, "flex", alignof(TypeParam) /* offset */, this->FLEX_SIZE),
                       fields[1]);
         } else {
             EXPECT_EQ(1u, fields.size());
@@ -392,6 +398,7 @@
 
 struct C2TestStruct_FlexEndS64 {
     int32_t signed32;
+    // 4-byte padding
     int64_t mSigned64Flex[];
 
     const static std::vector<C2FieldDescriptor> _FIELD_LIST;
@@ -406,7 +413,7 @@
 }
 const std::vector<C2FieldDescriptor> C2TestStruct_FlexEndS64::_FIELD_LIST = {
     { FD::INT32, 1, "s32", 0, 4 },
-    { FD::INT64, 0, "flex", 4, 8 },
+    { FD::INT64, 0, "flex", 8, 8 },
 };
 
 struct C2TestFlexS64Struct {
@@ -419,6 +426,7 @@
 
 struct C2TestFlexEndS64Struct {
     int32_t signed32;
+    // 4-byte padding
     int64_t mFlexSigned64[];
     C2TestFlexEndS64Struct() {}
 
@@ -468,7 +476,7 @@
     // enum : uint32_t { CORE_INDEX = C2TestStruct_FlexEndSize, FLEX_SIZE = 8 };
     // typedef C2TestStruct_FlexEndSize _type;
     // typedef C2SizeStruct FlexType;
-};
+} __attribute__((aligned(4)));
 
 const std::vector<C2FieldDescriptor> C2TestStruct_FlexEndSize::FieldList() {
     return _FIELD_LIST;
@@ -539,14 +547,14 @@
 TEST_F(C2ParamTest, FieldId) {
     // pointer constructor
     EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&((C2TestStruct_A*)0)->signed32));
-    EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&((C2TestStruct_A*)0)->signed64));
-    EXPECT_EQ(_C2FieldId(20, 4), _C2FieldId(&((C2TestStruct_A*)0)->unsigned32));
-    EXPECT_EQ(_C2FieldId(24, 8), _C2FieldId(&((C2TestStruct_A*)0)->unsigned64));
-    EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId(&((C2TestStruct_A*)0)->fp32));
-    EXPECT_EQ(_C2FieldId(36, 8), _C2FieldId(&((C2TestStruct_A*)0)->sz));
-    EXPECT_EQ(_C2FieldId(60, 1), _C2FieldId(&((C2TestStruct_A*)0)->blob));
-    EXPECT_EQ(_C2FieldId(160, 1), _C2FieldId(&((C2TestStruct_A*)0)->string));
-    EXPECT_EQ(_C2FieldId(260, 1), _C2FieldId(&((C2TestStruct_A*)0)->yesNo));
+    EXPECT_EQ(_C2FieldId(8, 8), _C2FieldId(&((C2TestStruct_A*)0)->signed64));
+    EXPECT_EQ(_C2FieldId(24, 4), _C2FieldId(&((C2TestStruct_A*)0)->unsigned32));
+    EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId(&((C2TestStruct_A*)0)->unsigned64));
+    EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId(&((C2TestStruct_A*)0)->fp32));
+    EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId(&((C2TestStruct_A*)0)->sz));
+    EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId(&((C2TestStruct_A*)0)->blob));
+    EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId(&((C2TestStruct_A*)0)->string));
+    EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId(&((C2TestStruct_A*)0)->yesNo));
 
     EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&((C2TestFlexEndSizeStruct*)0)->signed32));
     EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&((C2TestFlexEndSizeStruct*)0)->mFlexSize));
@@ -556,14 +564,14 @@
 
     // member pointer constructor
     EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::signed32));
-    EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::signed64));
-    EXPECT_EQ(_C2FieldId(20, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::unsigned32));
-    EXPECT_EQ(_C2FieldId(24, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::unsigned64));
-    EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::fp32));
-    EXPECT_EQ(_C2FieldId(36, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::sz));
-    EXPECT_EQ(_C2FieldId(60, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::blob));
-    EXPECT_EQ(_C2FieldId(160, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::string));
-    EXPECT_EQ(_C2FieldId(260, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::yesNo));
+    EXPECT_EQ(_C2FieldId(8, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::signed64));
+    EXPECT_EQ(_C2FieldId(24, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::unsigned32));
+    EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::unsigned64));
+    EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::fp32));
+    EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::sz));
+    EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::blob));
+    EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::string));
+    EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId((C2TestStruct_A*)0, &C2TestStruct_A::yesNo));
 
     EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId((C2TestFlexEndSizeStruct*)0, &C2TestFlexEndSizeStruct::signed32));
     EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId((C2TestFlexEndSizeStruct*)0, &C2TestFlexEndSizeStruct::mFlexSize));
@@ -573,14 +581,14 @@
 
     // member pointer sans type pointer
     EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&C2TestStruct_A::signed32));
-    EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&C2TestStruct_A::signed64));
-    EXPECT_EQ(_C2FieldId(20, 4), _C2FieldId(&C2TestStruct_A::unsigned32));
-    EXPECT_EQ(_C2FieldId(24, 8), _C2FieldId(&C2TestStruct_A::unsigned64));
-    EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId(&C2TestStruct_A::fp32));
-    EXPECT_EQ(_C2FieldId(36, 8), _C2FieldId(&C2TestStruct_A::sz));
-    EXPECT_EQ(_C2FieldId(60, 1), _C2FieldId(&C2TestStruct_A::blob));
-    EXPECT_EQ(_C2FieldId(160, 1), _C2FieldId(&C2TestStruct_A::string));
-    EXPECT_EQ(_C2FieldId(260, 1), _C2FieldId(&C2TestStruct_A::yesNo));
+    EXPECT_EQ(_C2FieldId(8, 8), _C2FieldId(&C2TestStruct_A::signed64));
+    EXPECT_EQ(_C2FieldId(24, 4), _C2FieldId(&C2TestStruct_A::unsigned32));
+    EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId(&C2TestStruct_A::unsigned64));
+    EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId(&C2TestStruct_A::fp32));
+    EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId(&C2TestStruct_A::sz));
+    EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId(&C2TestStruct_A::blob));
+    EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId(&C2TestStruct_A::string));
+    EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId(&C2TestStruct_A::yesNo));
 
     EXPECT_EQ(_C2FieldId(0, 4), _C2FieldId(&C2TestFlexEndSizeStruct::signed32));
     EXPECT_EQ(_C2FieldId(4, 8), _C2FieldId(&C2TestFlexEndSizeStruct::mFlexSize));
@@ -594,14 +602,14 @@
 
     // pointer constructor in C2Param
     EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&((C2TestAInfo*)0)->signed32));
-    EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId(&((C2TestAInfo*)0)->signed64));
-    EXPECT_EQ(_C2FieldId(28, 4), _C2FieldId(&((C2TestAInfo*)0)->unsigned32));
-    EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId(&((C2TestAInfo*)0)->unsigned64));
-    EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId(&((C2TestAInfo*)0)->fp32));
-    EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId(&((C2TestAInfo*)0)->sz));
-    EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId(&((C2TestAInfo*)0)->blob));
-    EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId(&((C2TestAInfo*)0)->string));
-    EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId(&((C2TestAInfo*)0)->yesNo));
+    EXPECT_EQ(_C2FieldId(16, 8), _C2FieldId(&((C2TestAInfo*)0)->signed64));
+    EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId(&((C2TestAInfo*)0)->unsigned32));
+    EXPECT_EQ(_C2FieldId(40, 8), _C2FieldId(&((C2TestAInfo*)0)->unsigned64));
+    EXPECT_EQ(_C2FieldId(48, 4), _C2FieldId(&((C2TestAInfo*)0)->fp32));
+    EXPECT_EQ(_C2FieldId(52, 8), _C2FieldId(&((C2TestAInfo*)0)->sz));
+    EXPECT_EQ(_C2FieldId(76, 1), _C2FieldId(&((C2TestAInfo*)0)->blob));
+    EXPECT_EQ(_C2FieldId(176, 1), _C2FieldId(&((C2TestAInfo*)0)->string));
+    EXPECT_EQ(_C2FieldId(276, 1), _C2FieldId(&((C2TestAInfo*)0)->yesNo));
 
     EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&((C2TestFlexEndSizeInfo*)0)->m.signed32));
     EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId(&((C2TestFlexEndSizeInfo*)0)->m.mFlexSize));
@@ -611,14 +619,14 @@
 
     // member pointer in C2Param
     EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::signed32));
-    EXPECT_EQ(_C2FieldId(12, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::signed64));
-    EXPECT_EQ(_C2FieldId(28, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::unsigned32));
-    EXPECT_EQ(_C2FieldId(32, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::unsigned64));
-    EXPECT_EQ(_C2FieldId(40, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::fp32));
-    EXPECT_EQ(_C2FieldId(44, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::sz));
-    EXPECT_EQ(_C2FieldId(68, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::blob));
-    EXPECT_EQ(_C2FieldId(168, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::string));
-    EXPECT_EQ(_C2FieldId(268, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::yesNo));
+    EXPECT_EQ(_C2FieldId(16, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::signed64));
+    EXPECT_EQ(_C2FieldId(32, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::unsigned32));
+    EXPECT_EQ(_C2FieldId(40, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::unsigned64));
+    EXPECT_EQ(_C2FieldId(48, 4), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::fp32));
+    EXPECT_EQ(_C2FieldId(52, 8), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::sz));
+    EXPECT_EQ(_C2FieldId(76, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::blob));
+    EXPECT_EQ(_C2FieldId(176, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::string));
+    EXPECT_EQ(_C2FieldId(276, 1), _C2FieldId((C2TestAInfo*)0, &C2TestAInfo::yesNo));
 
     // NOTE: cannot use a member pointer for flex params due to introduction of 'm'
     // EXPECT_EQ(_C2FieldId(8, 4), _C2FieldId(&C2TestFlexEndSizeInfo::m.signed32));
@@ -2328,6 +2336,17 @@
         static_assert(std::is_same<decltype(blobValue->m.value), uint8_t[]>::value, "should be uint8_t[]");
         EXPECT_EQ(0, memcmp(blobValue->m.value, "ABCD\0", 6));
         EXPECT_EQ(6u, blobValue->flexCount());
+        blobValue->setFlexCount(7u); // increasing the count does not change it
+        EXPECT_EQ(6u, blobValue->flexCount());
+        blobValue->setFlexCount(2u); // decreasing the count changes it to it
+        EXPECT_EQ(2u, blobValue->flexCount());
+        blobValue->setFlexCount(0u); // can decrease to 0 and blob remains valid
+        EXPECT_EQ(0u, blobValue->flexCount());
+        EXPECT_TRUE(*blobValue);
+        blobValue->invalidate(); // flex params can be invalidated => results in 0 size
+        EXPECT_FALSE(*blobValue);
+        EXPECT_EQ(0u, blobValue->size());
+
         std::vector<C2FieldDescriptor> fields = blobValue->FieldList();
         EXPECT_EQ(1u, fields.size());
         EXPECT_EQ(FD::BLOB, fields.cbegin()->type());
diff --git a/media/codec2/tests/C2UtilTest.cpp b/media/codec2/tests/C2UtilTest.cpp
index 59cd313..2d66df1 100644
--- a/media/codec2/tests/C2UtilTest.cpp
+++ b/media/codec2/tests/C2UtilTest.cpp
@@ -78,7 +78,7 @@
       { "value2", Enum3Value2 },
       { "value4", Enum3Value4 },
       { "invalid", Invalid } });
-    Enum3 e3;
+    Enum3 e3(Invalid);
     C2FieldDescriptor::namedValuesFor(e3);
 
     // upper case
diff --git a/media/codec2/tests/vndk/C2BufferTest.cpp b/media/codec2/tests/vndk/C2BufferTest.cpp
index 780994a..0cfb465 100644
--- a/media/codec2/tests/vndk/C2BufferTest.cpp
+++ b/media/codec2/tests/vndk/C2BufferTest.cpp
@@ -16,11 +16,12 @@
 
 #include <gtest/gtest.h>
 
-#include <C2AllocatorIon.h>
 #include <C2AllocatorGralloc.h>
 #include <C2Buffer.h>
 #include <C2BufferPriv.h>
+#include <C2Config.h>
 #include <C2ParamDef.h>
+#include <C2PlatformSupport.h>
 
 #include <system/graphics.h>
 
@@ -233,10 +234,10 @@
 public:
     C2BufferTest()
         : mBlockPoolId(C2BlockPool::PLATFORM_START),
-          mLinearAllocator(std::make_shared<C2AllocatorIon>('i')),
           mSize(0u),
           mAddr(nullptr),
           mGraphicAllocator(std::make_shared<C2AllocatorGralloc>('g')) {
+        getLinearAllocator(&mLinearAllocator);
     }
 
     ~C2BufferTest() = default;
@@ -329,6 +330,11 @@
     }
 
 private:
+    void getLinearAllocator(std::shared_ptr<C2Allocator>* mLinearAllocator) {
+        std::shared_ptr<C2AllocatorStore> store = android::GetCodec2PlatformAllocatorStore();
+        ASSERT_EQ(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, mLinearAllocator), C2_OK);
+    }
+
     C2BlockPool::local_id_t mBlockPoolId;
     std::shared_ptr<C2Allocator> mLinearAllocator;
     std::shared_ptr<C2LinearAllocation> mLinearAllocation;
@@ -765,4 +771,54 @@
     }
 }
 
+TEST_F(C2BufferTest, InfoBufferTest) {
+    constexpr size_t kCapacity = 524288u;
+
+    // allocate a linear block
+    std::shared_ptr<C2BlockPool> linearPool(makeLinearBlockPool());
+    std::shared_ptr<C2LinearBlock> linearBlock;
+    ASSERT_EQ(C2_OK, linearPool->fetchLinearBlock(
+            kCapacity,
+            { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE },
+            &linearBlock));
+
+    C2InfoBuffer info = C2InfoBuffer::CreateLinearBuffer(
+            kParamIndexNumber1, linearBlock->share(1024, kCapacity / 2, C2Fence()));
+    std::shared_ptr<C2InfoBuffer> spInfo(new C2InfoBuffer(info));
+    ASSERT_EQ(kParamIndexNumber1, spInfo->index().coreIndex());
+    ASSERT_TRUE(spInfo->index().isGlobal());
+    ASSERT_EQ(C2Param::INFO, spInfo->index().kind());
+    ASSERT_EQ(C2BufferData::LINEAR, spInfo->data().type());
+    ASSERT_EQ(1024, spInfo->data().linearBlocks()[0].offset());
+    ASSERT_EQ(kCapacity / 2, spInfo->data().linearBlocks()[0].size());
+    // handles must actually be identical after sharing into an info buffer
+    ASSERT_EQ(linearBlock->handle(), spInfo->data().linearBlocks()[0].handle());
+    ASSERT_EQ(linearPool->getAllocatorId(), spInfo->data().linearBlocks()[0].getAllocatorId());
+
+    C2InfoBuffer streamInfo = info.asStream(false /* output */,  1u);
+    ASSERT_EQ(kParamIndexNumber1, streamInfo.index().coreIndex());
+    ASSERT_TRUE(streamInfo.index().forStream());
+    ASSERT_TRUE(streamInfo.index().forInput());
+    ASSERT_EQ(1u, streamInfo.index().stream());
+    ASSERT_EQ(C2Param::INFO, streamInfo.index().kind());
+    ASSERT_EQ(C2BufferData::LINEAR, streamInfo.data().type());
+    ASSERT_EQ(1024, streamInfo.data().linearBlocks()[0].offset());
+    ASSERT_EQ(kCapacity / 2, streamInfo.data().linearBlocks()[0].size());
+    // handles must actually be identical after sharing into an info buffer
+    ASSERT_EQ(linearBlock->handle(), streamInfo.data().linearBlocks()[0].handle());
+    ASSERT_EQ(linearPool->getAllocatorId(), streamInfo.data().linearBlocks()[0].getAllocatorId());
+
+    C2InfoBuffer portInfo = streamInfo.asPort(true /* output */);
+    ASSERT_EQ(kParamIndexNumber1, portInfo.index().coreIndex());
+    ASSERT_TRUE(portInfo.index().forPort());
+    ASSERT_TRUE(portInfo.index().forOutput());
+    ASSERT_EQ(C2Param::INFO, portInfo.index().kind());
+    ASSERT_EQ(C2BufferData::LINEAR, portInfo.data().type());
+    ASSERT_EQ(1024, portInfo.data().linearBlocks()[0].offset());
+    ASSERT_EQ(kCapacity / 2, portInfo.data().linearBlocks()[0].size());
+    // handles must actually be identical after sharing into an info buffer
+    ASSERT_EQ(linearBlock->handle(), portInfo.data().linearBlocks()[0].handle());
+    ASSERT_EQ(linearPool->getAllocatorId(), portInfo.data().linearBlocks()[0].getAllocatorId());
+}
+
 } // namespace android
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 6f7acce..be81c84 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_headers {
     name: "libcodec2_internal",
 
@@ -13,7 +22,7 @@
 
 // !!!DO NOT DEPEND ON THIS SHARED LIBRARY DIRECTLY!!!
 // use libcodec2-impl-defaults instead
-cc_library_shared {
+cc_library {
     name: "libcodec2_vndk",
     vendor_available: true,
     min_sdk_version: "29",
@@ -26,9 +35,12 @@
         "C2AllocatorGralloc.cpp",
         "C2Buffer.cpp",
         "C2Config.cpp",
+        "C2DmaBufAllocator.cpp",
+        "C2Fence.cpp",
         "C2PlatformStorePluginLoader.cpp",
         "C2Store.cpp",
         "platform/C2BqBuffer.cpp",
+        "platform/C2SurfaceSyncObj.cpp",
         "types.cpp",
         "util/C2Debug.cpp",
         "util/C2InterfaceHelper.cpp",
@@ -64,6 +76,7 @@
         "libhardware",
         "libhidlbase",
         "libion",
+        "libdmabufheap",
         "libfmq",
         "liblog",
         "libnativewindow",
@@ -110,4 +123,3 @@
 
     // TODO: separate internal headers so they can be exposed here
 }
-
diff --git a/media/codec2/vndk/C2AllocatorBlob.cpp b/media/codec2/vndk/C2AllocatorBlob.cpp
index 50c9e59..8cfa1d7 100644
--- a/media/codec2/vndk/C2AllocatorBlob.cpp
+++ b/media/codec2/vndk/C2AllocatorBlob.cpp
@@ -17,6 +17,8 @@
 // #define LOG_NDEBUG 0
 #define LOG_TAG "C2AllocatorBlob"
 
+#include <set>
+
 #include <C2AllocatorBlob.h>
 #include <C2PlatformSupport.h>
 
@@ -67,6 +69,10 @@
 private:
     const std::shared_ptr<C2GraphicAllocation> mGraphicAllocation;
     const C2Allocator::id_t mAllocatorId;
+
+    std::mutex mMapLock;
+    std::multiset<std::pair<size_t, size_t>> mMappedOffsetSize;
+    uint8_t *mMappedAddr;
 };
 
 C2AllocationBlob::C2AllocationBlob(
@@ -74,20 +80,74 @@
         C2Allocator::id_t allocatorId)
       : C2LinearAllocation(capacity),
         mGraphicAllocation(std::move(graphicAllocation)),
-        mAllocatorId(allocatorId) {}
+        mAllocatorId(allocatorId),
+        mMappedAddr(nullptr) {}
 
-C2AllocationBlob::~C2AllocationBlob() {}
+C2AllocationBlob::~C2AllocationBlob() {
+    if (mMappedAddr) {
+        C2Rect rect(capacity(), kLinearBufferHeight);
+        mGraphicAllocation->unmap(&mMappedAddr, rect, nullptr);
+    }
+}
 
 c2_status_t C2AllocationBlob::map(size_t offset, size_t size, C2MemoryUsage usage,
                                   C2Fence* fence, void** addr /* nonnull */) {
+    *addr = nullptr;
+    if (size > capacity() || offset > capacity() || offset > capacity() - size) {
+        ALOGV("C2AllocationBlob: map: bad offset / size: offset=%zu size=%zu capacity=%u",
+                offset, size, capacity());
+        return C2_BAD_VALUE;
+    }
+    std::unique_lock<std::mutex> lock(mMapLock);
+    if (mMappedAddr) {
+        *addr = mMappedAddr + offset;
+        mMappedOffsetSize.insert({offset, size});
+        ALOGV("C2AllocationBlob: mapped from existing mapping: offset=%zu size=%zu capacity=%u",
+                offset, size, capacity());
+        return C2_OK;
+    }
     C2PlanarLayout layout;
-    C2Rect rect = C2Rect(size, kLinearBufferHeight).at(offset, 0u);
-    return mGraphicAllocation->map(rect, usage, fence, &layout, reinterpret_cast<uint8_t**>(addr));
+    C2Rect rect = C2Rect(capacity(), kLinearBufferHeight);
+    c2_status_t err = mGraphicAllocation->map(rect, usage, fence, &layout, &mMappedAddr);
+    if (err != C2_OK) {
+        ALOGV("C2AllocationBlob: map failed: offset=%zu size=%zu capacity=%u err=%d",
+                offset, size, capacity(), err);
+        mMappedAddr = nullptr;
+        return err;
+    }
+    *addr = mMappedAddr + offset;
+    mMappedOffsetSize.insert({offset, size});
+    ALOGV("C2AllocationBlob: new map succeeded: offset=%zu size=%zu capacity=%u",
+            offset, size, capacity());
+    return C2_OK;
 }
 
 c2_status_t C2AllocationBlob::unmap(void* addr, size_t size, C2Fence* fenceFd) {
-    C2Rect rect(size, kLinearBufferHeight);
-    return mGraphicAllocation->unmap(reinterpret_cast<uint8_t**>(&addr), rect, fenceFd);
+    std::unique_lock<std::mutex> lock(mMapLock);
+    uint8_t *u8Addr = static_cast<uint8_t *>(addr);
+    if (u8Addr < mMappedAddr || mMappedAddr + capacity() < u8Addr + size) {
+        ALOGV("C2AllocationBlob: unmap: Bad addr / size: addr=%p size=%zu capacity=%u",
+                addr, size, capacity());
+        return C2_BAD_VALUE;
+    }
+    auto it = mMappedOffsetSize.find(std::make_pair(u8Addr - mMappedAddr, size));
+    if (it == mMappedOffsetSize.end()) {
+        ALOGV("C2AllocationBlob: unrecognized map: addr=%p size=%zu capacity=%u",
+                addr, size, capacity());
+        return C2_BAD_VALUE;
+    }
+    mMappedOffsetSize.erase(it);
+    if (!mMappedOffsetSize.empty()) {
+        ALOGV("C2AllocationBlob: still maintain mapping: addr=%p size=%zu capacity=%u",
+                addr, size, capacity());
+        return C2_OK;
+    }
+    C2Rect rect(capacity(), kLinearBufferHeight);
+    c2_status_t err = mGraphicAllocation->unmap(&mMappedAddr, rect, fenceFd);
+    ALOGV("C2AllocationBlob: last unmap: addr=%p size=%zu capacity=%u err=%d",
+            addr, size, capacity(), err);
+    mMappedAddr = nullptr;
+    return err;
 }
 
 /* ====================================== BLOB ALLOCATOR ====================================== */
@@ -118,6 +178,8 @@
         return C2_CORRUPTED;
     }
 
+    // Note: the BLOB allocator does not support padding as this functionality is expected
+    // to be provided by the gralloc implementation.
     std::shared_ptr<C2GraphicAllocation> graphicAllocation;
     c2_status_t status = mC2AllocatorGralloc->newGraphicAllocation(
             capacity, kLinearBufferHeight, kLinearBufferFormat, usage, &graphicAllocation);
@@ -175,12 +237,12 @@
 }
 
 // static
-bool C2AllocatorBlob::isValid(const C2Handle* const o) {
+bool C2AllocatorBlob::CheckHandle(const C2Handle* const o) {
     size_t capacity;
     // Distinguish C2Handle purely allocated by C2AllocatorGralloc, or one allocated through
     // C2AllocatorBlob, by checking the handle's height is 1, and its format is
     // PixelFormat::BLOB by GetCapacityFromHandle().
-    return C2AllocatorGralloc::isValid(o) && GetCapacityFromHandle(o, &capacity) == C2_OK;
+    return C2AllocatorGralloc::CheckHandle(o) && GetCapacityFromHandle(o, &capacity) == C2_OK;
 }
 
 }  // namespace android
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index e1e1377..6a7f19c 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -25,6 +25,7 @@
 #include <hardware/gralloc.h>
 #include <ui/GraphicBufferAllocator.h>
 #include <ui/GraphicBufferMapper.h>
+#include <ui/Rect.h>
 
 #include <C2AllocatorGralloc.h>
 #include <C2Buffer.h>
@@ -41,7 +42,9 @@
          * Usage mask that is passed through from gralloc to Codec 2.0 usage.
          */
         PASSTHROUGH_USAGE_MASK =
-            ~(GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK | GRALLOC_USAGE_PROTECTED)
+            ~static_cast<uint64_t>(GRALLOC_USAGE_SW_READ_MASK |
+                                   GRALLOC_USAGE_SW_WRITE_MASK |
+                                   GRALLOC_USAGE_PROTECTED)
     };
 
     // verify that passthrough mask is within the platform mask
@@ -103,7 +106,7 @@
     const static uint32_t MAGIC = '\xc2gr\x00';
 
     static
-    const ExtraData* getExtraData(const C2Handle *const handle) {
+    const ExtraData* GetExtraData(const C2Handle *const handle) {
         if (handle == nullptr
                 || native_handle_is_invalid(handle)
                 || handle->numInts < NUM_INTS) {
@@ -114,23 +117,23 @@
     }
 
     static
-    ExtraData *getExtraData(C2Handle *const handle) {
-        return const_cast<ExtraData *>(getExtraData(const_cast<const C2Handle *const>(handle)));
+    ExtraData *GetExtraData(C2Handle *const handle) {
+        return const_cast<ExtraData *>(GetExtraData(const_cast<const C2Handle *const>(handle)));
     }
 
 public:
     void getIgbpData(uint32_t *generation, uint64_t *igbp_id, uint32_t *igbp_slot) const {
-        const ExtraData *ed = getExtraData(this);
+        const ExtraData *ed = GetExtraData(this);
         *generation = ed->generation;
         *igbp_id = unsigned(ed->igbp_id_lo) | uint64_t(unsigned(ed->igbp_id_hi)) << 32;
         *igbp_slot = ed->igbp_slot;
     }
 
-    static bool isValid(const C2Handle *const o) {
+    static bool IsValid(const C2Handle *const o) {
         if (o == nullptr) { // null handle is always valid
             return true;
         }
-        const ExtraData *xd = getExtraData(o);
+        const ExtraData *xd = GetExtraData(o);
         // we cannot validate width/height/format/usage without accessing gralloc driver
         return xd != nullptr && xd->magic == MAGIC;
     }
@@ -152,7 +155,7 @@
         native_handle_t *res = native_handle_create(handle->numFds, handle->numInts + NUM_INTS);
         if (res != nullptr) {
             memcpy(&res->data, &handle->data, sizeof(int) * (handle->numFds + handle->numInts));
-            *getExtraData(res) = xd;
+            *GetExtraData(res) = xd;
         }
         return reinterpret_cast<C2HandleGralloc *>(res);
     }
@@ -180,10 +183,10 @@
     static bool MigrateNativeHandle(
             native_handle_t *handle,
             uint32_t generation, uint64_t igbp_id, uint32_t igbp_slot) {
-        if (handle == nullptr || !isValid(handle)) {
+        if (handle == nullptr || !IsValid(handle)) {
             return false;
         }
-        ExtraData *ed = getExtraData(handle);
+        ExtraData *ed = GetExtraData(handle);
         if (!ed) return false;
         ed->generation = generation;
         ed->igbp_id_lo = uint32_t(igbp_id & 0xFFFFFFFF);
@@ -195,7 +198,7 @@
 
     static native_handle_t* UnwrapNativeHandle(
             const C2Handle *const handle) {
-        const ExtraData *xd = getExtraData(handle);
+        const ExtraData *xd = GetExtraData(handle);
         if (xd == nullptr || xd->magic != MAGIC) {
             return nullptr;
         }
@@ -211,7 +214,7 @@
             uint32_t *width, uint32_t *height, uint32_t *format,
             uint64_t *usage, uint32_t *stride,
             uint32_t *generation, uint64_t *igbp_id, uint32_t *igbp_slot) {
-        const ExtraData *xd = getExtraData(handle);
+        const ExtraData *xd = GetExtraData(handle);
         if (xd == nullptr) {
             return nullptr;
         }
@@ -253,7 +256,7 @@
     virtual ~C2AllocationGralloc() override;
 
     virtual c2_status_t map(
-            C2Rect rect, C2MemoryUsage usage, C2Fence *fence,
+            C2Rect c2Rect, C2MemoryUsage usage, C2Fence *fence,
             C2PlanarLayout *layout /* nonnull */, uint8_t **addr /* nonnull */) override;
     virtual c2_status_t unmap(
             uint8_t **addr /* nonnull */, C2Rect rect, C2Fence *fence /* nullable */) override;
@@ -336,8 +339,12 @@
 }
 
 c2_status_t C2AllocationGralloc::map(
-        C2Rect rect, C2MemoryUsage usage, C2Fence *fence,
+        C2Rect c2Rect, C2MemoryUsage usage, C2Fence *fence,
         C2PlanarLayout *layout /* nonnull */, uint8_t **addr /* nonnull */) {
+    const Rect rect{(int32_t)c2Rect.left, (int32_t)c2Rect.top,
+                    (int32_t)(c2Rect.left + c2Rect.width) /* right */,
+                    (int32_t)(c2Rect.top + c2Rect.height) /* bottom */};
+
     uint64_t grallocUsage = static_cast<C2AndroidMemoryUsage>(usage).asGrallocUsage();
     ALOGV("mapping buffer with usage %#llx => %#llx",
           (long long)usage.expected, (long long)grallocUsage);
@@ -386,10 +393,7 @@
             void *pointer = nullptr;
             // TODO: fence
             status_t err = GraphicBufferMapper::get().lock(
-                                const_cast<native_handle_t *>(mBuffer), grallocUsage,
-                                { (int32_t)rect.left, (int32_t)rect.top,
-                                  (int32_t)rect.width, (int32_t)rect.height },
-                                &pointer);
+                    const_cast<native_handle_t *>(mBuffer), grallocUsage, rect, &pointer);
             if (err) {
                 ALOGE("failed transaction: lock(RGBA_1010102)");
                 return C2_CORRUPTED;
@@ -464,10 +468,7 @@
             void *pointer = nullptr;
             // TODO: fence
             status_t err = GraphicBufferMapper::get().lock(
-                                const_cast<native_handle_t*>(mBuffer), grallocUsage,
-                                { (int32_t)rect.left, (int32_t)rect.top,
-                                  (int32_t)rect.width, (int32_t)rect.height },
-                                &pointer);
+                    const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, &pointer);
             if (err) {
                 ALOGE("failed transaction: lock(RGBA_8888)");
                 return C2_CORRUPTED;
@@ -524,10 +525,7 @@
             void *pointer = nullptr;
             // TODO: fence
             status_t err = GraphicBufferMapper::get().lock(
-                                const_cast<native_handle_t*>(mBuffer), grallocUsage,
-                                { (int32_t)rect.left, (int32_t)rect.top,
-                                  (int32_t)rect.width, (int32_t)rect.height },
-                                &pointer);
+                    const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, &pointer);
             if (err) {
                 ALOGE("failed transaction: lock(BLOB)");
                 return C2_CORRUPTED;
@@ -536,20 +534,33 @@
             break;
         }
 
+        case static_cast<uint32_t>(PixelFormat4::YCBCR_422_SP):
+            // fall-through
+        case static_cast<uint32_t>(PixelFormat4::YCRCB_420_SP):
+            // fall-through
+        case static_cast<uint32_t>(PixelFormat4::YCBCR_422_I):
+            // fall-through
         case static_cast<uint32_t>(PixelFormat4::YCBCR_420_888):
             // fall-through
-        case static_cast<uint32_t>(PixelFormat4::YV12):
-            // fall-through
-        default: {
+        case static_cast<uint32_t>(PixelFormat4::YV12): {
             android_ycbcr ycbcrLayout;
 
             status_t err = GraphicBufferMapper::get().lockYCbCr(
-                        const_cast<native_handle_t*>(mBuffer), grallocUsage,
-                        { (int32_t)rect.left, (int32_t)rect.top,
-                          (int32_t)rect.width, (int32_t)rect.height },
-                        &ycbcrLayout);
+                    const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, &ycbcrLayout);
             if (err) {
-                ALOGE("failed transaction: lockYCbCr");
+                ALOGE("failed transaction: lockYCbCr (err=%d)", err);
+                return C2_CORRUPTED;
+            }
+            if (!ycbcrLayout.y || !ycbcrLayout.cb || !ycbcrLayout.cr
+                    || ycbcrLayout.ystride == 0
+                    || ycbcrLayout.cstride == 0
+                    || ycbcrLayout.chroma_step == 0) {
+                ALOGE("invalid layout: lockYCbCr (y=%s cb=%s cr=%s "
+                        "ystride=%zu cstride=%zu chroma_step=%zu)",
+                        ycbcrLayout.y ? "(non-null)" : "(null)",
+                        ycbcrLayout.cb ? "(non-null)" : "(null)",
+                        ycbcrLayout.cr ? "(non-null)" : "(null)",
+                        ycbcrLayout.ystride, ycbcrLayout.cstride, ycbcrLayout.chroma_step);
                 return C2_CORRUPTED;
             }
 
@@ -611,6 +622,171 @@
             }
             break;
         }
+
+        case static_cast<uint32_t>(PixelFormat4::YCBCR_P010): {
+            void *pointer = nullptr;
+            status_t err = GraphicBufferMapper::get().lock(
+                    const_cast<native_handle_t *>(mBuffer), grallocUsage, rect, &pointer);
+            if (err) {
+                ALOGE("failed transaction: lock(YCBCR_P010)");
+                return C2_CORRUPTED;
+            }
+            addr[C2PlanarLayout::PLANE_Y] = (uint8_t *)pointer;
+            addr[C2PlanarLayout::PLANE_U] = (uint8_t *)pointer + mStride * 2 * rect.height();
+            addr[C2PlanarLayout::PLANE_V] = addr[C2PlanarLayout::PLANE_U] + 2;
+            layout->type = C2PlanarLayout::TYPE_YUV;
+            layout->numPlanes = 3;
+            layout->rootPlanes = 2;
+            layout->planes[C2PlanarLayout::PLANE_Y] = {
+                C2PlaneInfo::CHANNEL_Y,         // channel
+                2,                              // colInc
+                static_cast<int32_t>(2 * mStride), // rowInc
+                1,                              // mColSampling
+                1,                              // mRowSampling
+                16,                             // allocatedDepth
+                10,                             // bitDepth
+                6,                              // rightShift
+                C2PlaneInfo::LITTLE_END,        // endianness
+                C2PlanarLayout::PLANE_Y,        // rootIx
+                0,                              // offset
+            };
+            layout->planes[C2PlanarLayout::PLANE_U] = {
+                C2PlaneInfo::CHANNEL_CB,        // channel
+                4,                              // colInc
+                static_cast<int32_t>(2 * mStride), // rowInc
+                2,                              // mColSampling
+                2,                              // mRowSampling
+                16,                             // allocatedDepth
+                10,                             // bitDepth
+                6,                              // rightShift
+                C2PlaneInfo::LITTLE_END,        // endianness
+                C2PlanarLayout::PLANE_U,        // rootIx
+                0,                              // offset
+            };
+            layout->planes[C2PlanarLayout::PLANE_V] = {
+                C2PlaneInfo::CHANNEL_CR,        // channel
+                4,                              // colInc
+                static_cast<int32_t>(2 * mStride), // rowInc
+                2,                              // mColSampling
+                2,                              // mRowSampling
+                16,                             // allocatedDepth
+                10,                             // bitDepth
+                6,                              // rightShift
+                C2PlaneInfo::LITTLE_END,        // endianness
+                C2PlanarLayout::PLANE_U,        // rootIx
+                2,                              // offset
+            };
+            break;
+        }
+
+        default: {
+            // We don't know what it is, but let's try to lock it.
+            android_ycbcr ycbcrLayout;
+
+            status_t err = GraphicBufferMapper::get().lockYCbCr(
+                    const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, &ycbcrLayout);
+            if (err == OK && ycbcrLayout.y && ycbcrLayout.cb && ycbcrLayout.cr
+                    && ycbcrLayout.ystride > 0
+                    && ycbcrLayout.cstride > 0
+                    && ycbcrLayout.chroma_step > 0) {
+                addr[C2PlanarLayout::PLANE_Y] = (uint8_t *)ycbcrLayout.y;
+                addr[C2PlanarLayout::PLANE_U] = (uint8_t *)ycbcrLayout.cb;
+                addr[C2PlanarLayout::PLANE_V] = (uint8_t *)ycbcrLayout.cr;
+                layout->type = C2PlanarLayout::TYPE_YUV;
+                layout->numPlanes = 3;
+                layout->rootPlanes = 3;
+                layout->planes[C2PlanarLayout::PLANE_Y] = {
+                    C2PlaneInfo::CHANNEL_Y,         // channel
+                    1,                              // colInc
+                    (int32_t)ycbcrLayout.ystride,   // rowInc
+                    1,                              // mColSampling
+                    1,                              // mRowSampling
+                    8,                              // allocatedDepth
+                    8,                              // bitDepth
+                    0,                              // rightShift
+                    C2PlaneInfo::NATIVE,            // endianness
+                    C2PlanarLayout::PLANE_Y,        // rootIx
+                    0,                              // offset
+                };
+                layout->planes[C2PlanarLayout::PLANE_U] = {
+                    C2PlaneInfo::CHANNEL_CB,          // channel
+                    (int32_t)ycbcrLayout.chroma_step, // colInc
+                    (int32_t)ycbcrLayout.cstride,     // rowInc
+                    2,                                // mColSampling
+                    2,                                // mRowSampling
+                    8,                                // allocatedDepth
+                    8,                                // bitDepth
+                    0,                                // rightShift
+                    C2PlaneInfo::NATIVE,              // endianness
+                    C2PlanarLayout::PLANE_U,          // rootIx
+                    0,                                // offset
+                };
+                layout->planes[C2PlanarLayout::PLANE_V] = {
+                    C2PlaneInfo::CHANNEL_CR,          // channel
+                    (int32_t)ycbcrLayout.chroma_step, // colInc
+                    (int32_t)ycbcrLayout.cstride,     // rowInc
+                    2,                                // mColSampling
+                    2,                                // mRowSampling
+                    8,                                // allocatedDepth
+                    8,                                // bitDepth
+                    0,                                // rightShift
+                    C2PlaneInfo::NATIVE,              // endianness
+                    C2PlanarLayout::PLANE_V,          // rootIx
+                    0,                                // offset
+                };
+                // handle interleaved formats
+                intptr_t uvOffset = addr[C2PlanarLayout::PLANE_V] - addr[C2PlanarLayout::PLANE_U];
+                if (uvOffset > 0 && uvOffset < (intptr_t)ycbcrLayout.chroma_step) {
+                    layout->rootPlanes = 2;
+                    layout->planes[C2PlanarLayout::PLANE_V].rootIx = C2PlanarLayout::PLANE_U;
+                    layout->planes[C2PlanarLayout::PLANE_V].offset = uvOffset;
+                } else if (uvOffset < 0 && uvOffset > -(intptr_t)ycbcrLayout.chroma_step) {
+                    layout->rootPlanes = 2;
+                    layout->planes[C2PlanarLayout::PLANE_U].rootIx = C2PlanarLayout::PLANE_V;
+                    layout->planes[C2PlanarLayout::PLANE_U].offset = -uvOffset;
+                }
+                break;
+            }
+
+            // We really don't know what this is; lock the buffer and pass it through ---
+            // the client may know how to interpret it.
+
+            // unlock previous allocation if it was successful
+            if (err == OK) {
+                err = GraphicBufferMapper::get().unlock(mBuffer);
+                if (err) {
+                    ALOGE("failed transaction: unlock");
+                    return C2_CORRUPTED;
+                }
+            }
+
+            void *pointer = nullptr;
+            err = GraphicBufferMapper::get().lock(
+                    const_cast<native_handle_t *>(mBuffer), grallocUsage, rect, &pointer);
+            if (err) {
+                ALOGE("failed transaction: lock(??? %x)", mFormat);
+                return C2_CORRUPTED;
+            }
+            addr[0] = (uint8_t *)pointer;
+            layout->type = C2PlanarLayout::TYPE_UNKNOWN;
+            layout->numPlanes = 1;
+            layout->rootPlanes = 1;
+            layout->planes[0] = {
+                // TODO: CHANNEL_UNKNOWN?
+                C2PlaneInfo::channel_t(0xFF),   // channel
+                1,                              // colInc
+                int32_t(mStride),               // rowInc
+                1,                              // mColSampling
+                1,                              // mRowSampling
+                8,                              // allocatedDepth
+                8,                              // bitDepth
+                0,                              // rightShift
+                C2PlaneInfo::NATIVE,            // endianness
+                0,                              // rootIx
+                0,                              // offset
+            };
+            break;
+        }
     }
     mLocked = true;
 
@@ -784,8 +960,9 @@
     return mImpl->status();
 }
 
-bool C2AllocatorGralloc::isValid(const C2Handle* const o) {
-    return C2HandleGralloc::isValid(o);
+// static
+bool C2AllocatorGralloc::CheckHandle(const C2Handle* const o) {
+    return C2HandleGralloc::IsValid(o);
 }
 
 } // namespace android
diff --git a/media/codec2/vndk/C2AllocatorIon.cpp b/media/codec2/vndk/C2AllocatorIon.cpp
index 6d27a02..77b265a 100644
--- a/media/codec2/vndk/C2AllocatorIon.cpp
+++ b/media/codec2/vndk/C2AllocatorIon.cpp
@@ -30,10 +30,15 @@
 #include <C2ErrnoUtils.h>
 #include <C2HandleIonInternal.h>
 
+#include <android-base/properties.h>
+
 namespace android {
 
 namespace {
     constexpr size_t USAGE_LRU_CACHE_SIZE = 1024;
+
+    // max padding after ion/dmabuf allocations in bytes
+    constexpr uint32_t MAX_PADDING = 0x8000; // 32KB
 }
 
 /* size_t <=> int(lo), int(hi) conversions */
@@ -73,7 +78,7 @@
 };
 
 // static
-bool C2HandleIon::isValid(const C2Handle * const o) {
+bool C2HandleIon::IsValid(const C2Handle * const o) {
     if (!o || memcmp(o, &cHeader, sizeof(cHeader))) {
         return false;
     }
@@ -376,14 +381,34 @@
         unsigned heapMask, unsigned flags, C2Allocator::id_t id) {
     int bufferFd = -1;
     ion_user_handle_t buffer = -1;
-    size_t alignedSize = align == 0 ? size : (size + align - 1) & ~(align - 1);
+    // NOTE: read this property directly from the property as this code has to run on
+    // Android Q, but the sysprop was only introduced in Android S.
+    static size_t sPadding =
+        base::GetUintProperty("media.c2.dmabuf.padding", (uint32_t)0, MAX_PADDING);
+    if (sPadding > SIZE_MAX - size) {
+        ALOGD("ion_alloc: size %#zx cannot accommodate padding %#zx", size, sPadding);
+        // use ImplV2 as there is no allocation anyways
+        return new ImplV2(ionFd, size, -1, id, -ENOMEM);
+    }
+
+    size_t allocSize = size + sPadding;
+    if (align) {
+        if (align - 1 > SIZE_MAX - allocSize) {
+            ALOGD("ion_alloc: size %#zx cannot accommodate padding %#zx and alignment %#zx",
+                  size, sPadding, align);
+            // use ImplV2 as there is no allocation anyways
+            return new ImplV2(ionFd, size, -1, id, -ENOMEM);
+        }
+        allocSize += align - 1;
+        allocSize &= ~(align - 1);
+    }
     int ret;
 
     if (ion_is_legacy(ionFd)) {
-        ret = ion_alloc(ionFd, alignedSize, align, heapMask, flags, &buffer);
+        ret = ion_alloc(ionFd, allocSize, align, heapMask, flags, &buffer);
         ALOGV("ion_alloc(ionFd = %d, size = %zu, align = %zu, prot = %d, flags = %d) "
               "returned (%d) ; buffer = %d",
-              ionFd, alignedSize, align, heapMask, flags, ret, buffer);
+              ionFd, allocSize, align, heapMask, flags, ret, buffer);
         if (ret == 0) {
             // get buffer fd for native handle constructor
             ret = ion_share(ionFd, buffer, &bufferFd);
@@ -392,15 +417,16 @@
                 buffer = -1;
             }
         }
-        return new Impl(ionFd, alignedSize, bufferFd, buffer, id, ret);
-
+        // the padding is not usable so deduct it from the advertised capacity
+        return new Impl(ionFd, allocSize - sPadding, bufferFd, buffer, id, ret);
     } else {
-        ret = ion_alloc_fd(ionFd, alignedSize, align, heapMask, flags, &bufferFd);
+        ret = ion_alloc_fd(ionFd, allocSize, align, heapMask, flags, &bufferFd);
         ALOGV("ion_alloc_fd(ionFd = %d, size = %zu, align = %zu, prot = %d, flags = %d) "
               "returned (%d) ; bufferFd = %d",
-              ionFd, alignedSize, align, heapMask, flags, ret, bufferFd);
+              ionFd, allocSize, align, heapMask, flags, ret, bufferFd);
 
-        return new ImplV2(ionFd, alignedSize, bufferFd, id, ret);
+        // the padding is not usable so deduct it from the advertised capacity
+        return new ImplV2(ionFd, allocSize - sPadding, bufferFd, id, ret);
     }
 }
 
@@ -579,7 +605,7 @@
         return mInit;
     }
 
-    if (!C2HandleIon::isValid(handle)) {
+    if (!C2HandleIon::IsValid(handle)) {
         return C2_BAD_VALUE;
     }
 
@@ -596,9 +622,8 @@
     return ret;
 }
 
-bool C2AllocatorIon::isValid(const C2Handle* const o) {
-    return C2HandleIon::isValid(o);
+bool C2AllocatorIon::CheckHandle(const C2Handle* const o) {
+    return C2HandleIon::IsValid(o);
 }
 
 } // namespace android
-
diff --git a/media/codec2/vndk/C2Buffer.cpp b/media/codec2/vndk/C2Buffer.cpp
index 0b08f31..143355f 100644
--- a/media/codec2/vndk/C2Buffer.cpp
+++ b/media/codec2/vndk/C2Buffer.cpp
@@ -106,6 +106,7 @@
 class BufferDataBuddy : public C2BufferData {
     using C2BufferData::C2BufferData;
     friend class ::C2Buffer;
+    friend class ::C2InfoBuffer;
 };
 
 }  // namespace
@@ -396,26 +397,18 @@
 std::shared_ptr<C2LinearBlock> _C2BlockFactory::CreateLinearBlock(
         const C2Handle *handle) {
     // TODO: get proper allocator? and mutex?
-    static std::unique_ptr<C2Allocator> sAllocator = []{
-        std::unique_ptr<C2Allocator> allocator;
-        if (android::GetPreferredLinearAllocatorId(android::GetCodec2PoolMask()) ==
-                android::C2PlatformAllocatorStore::BLOB) {
-            allocator = std::make_unique<C2AllocatorBlob>(android::C2PlatformAllocatorStore::BLOB);
-        } else {
-            allocator = std::make_unique<C2AllocatorIon>(android::C2PlatformAllocatorStore::ION);
-        }
+    static std::shared_ptr<C2Allocator> sAllocator = []{
+        std::shared_ptr<C2Allocator> allocator;
+        std::shared_ptr<C2AllocatorStore> allocatorStore = android::GetCodec2PlatformAllocatorStore();
+        allocatorStore->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator);
+
         return allocator;
     }();
 
     if (sAllocator == nullptr)
         return nullptr;
 
-    bool isValidHandle = false;
-    if (sAllocator->getId() == android::C2PlatformAllocatorStore::BLOB) {
-        isValidHandle = C2AllocatorBlob::isValid(handle);
-    } else {
-        isValidHandle = C2AllocatorIon::isValid(handle);
-    }
+    bool isValidHandle = sAllocator->checkHandle(handle);
 
     std::shared_ptr<C2LinearAllocation> alloc;
     if (isValidHandle) {
@@ -431,26 +424,18 @@
 std::shared_ptr<C2LinearBlock> _C2BlockFactory::CreateLinearBlock(
         const C2Handle *cHandle, const std::shared_ptr<BufferPoolData> &data) {
     // TODO: get proper allocator? and mutex?
-    static std::unique_ptr<C2Allocator> sAllocator = []{
-        std::unique_ptr<C2Allocator> allocator;
-        if (android::GetPreferredLinearAllocatorId(android::GetCodec2PoolMask()) ==
-                android::C2PlatformAllocatorStore::BLOB) {
-            allocator = std::make_unique<C2AllocatorBlob>(android::C2PlatformAllocatorStore::BLOB);
-        } else {
-            allocator = std::make_unique<C2AllocatorIon>(android::C2PlatformAllocatorStore::ION);
-        }
+    static std::shared_ptr<C2Allocator> sAllocator = []{
+        std::shared_ptr<C2Allocator> allocator;
+        std::shared_ptr<C2AllocatorStore> allocatorStore = android::GetCodec2PlatformAllocatorStore();
+        allocatorStore->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator);
+
         return allocator;
     }();
 
     if (sAllocator == nullptr)
         return nullptr;
 
-    bool isValidHandle = false;
-    if (sAllocator->getId() == android::C2PlatformAllocatorStore::BLOB) {
-        isValidHandle = C2AllocatorBlob::isValid(cHandle);
-    } else {
-        isValidHandle = C2AllocatorIon::isValid(cHandle);
-    }
+    bool isValidHandle = sAllocator->checkHandle(cHandle);
 
     std::shared_ptr<C2LinearAllocation> alloc;
     if (isValidHandle) {
@@ -1148,7 +1133,7 @@
     static std::unique_ptr<C2AllocatorGralloc> sAllocator = std::make_unique<C2AllocatorGralloc>(0);
 
     std::shared_ptr<C2GraphicAllocation> alloc;
-    if (C2AllocatorGralloc::isValid(cHandle)) {
+    if (sAllocator->isValid(cHandle)) {
         c2_status_t err = sAllocator->priorGraphicAllocation(cHandle, &alloc);
         const std::shared_ptr<C2PooledBlockPoolData> poolData =
                 std::make_shared<C2PooledBlockPoolData>(data);
@@ -1185,6 +1170,7 @@
     type_t mType;
     std::vector<C2ConstLinearBlock> mLinearBlocks;
     std::vector<C2ConstGraphicBlock> mGraphicBlocks;
+    friend class C2InfoBuffer;
 };
 
 C2BufferData::C2BufferData(const std::vector<C2ConstLinearBlock> &blocks) : mImpl(new Impl(blocks)) {}
@@ -1200,6 +1186,35 @@
     return mImpl->graphicBlocks();
 }
 
+C2InfoBuffer::C2InfoBuffer(
+    C2Param::Index index, const std::vector<C2ConstLinearBlock> &blocks)
+    : mIndex(index), mData(BufferDataBuddy(blocks)) {
+}
+
+C2InfoBuffer::C2InfoBuffer(
+    C2Param::Index index, const std::vector<C2ConstGraphicBlock> &blocks)
+    : mIndex(index), mData(BufferDataBuddy(blocks)) {
+}
+
+C2InfoBuffer::C2InfoBuffer(
+    C2Param::Index index, const C2BufferData &data)
+    : mIndex(index), mData(data) {
+}
+
+// static
+C2InfoBuffer C2InfoBuffer::CreateLinearBuffer(
+        C2Param::CoreIndex index, const C2ConstLinearBlock &block) {
+    return C2InfoBuffer(index.coreIndex() | C2Param::Index::KIND_INFO | C2Param::Index::DIR_GLOBAL,
+                        { block });
+}
+
+// static
+C2InfoBuffer C2InfoBuffer::CreateGraphicBuffer(
+        C2Param::CoreIndex index, const C2ConstGraphicBlock &block) {
+    return C2InfoBuffer(index.coreIndex() | C2Param::Index::KIND_INFO | C2Param::Index::DIR_GLOBAL,
+                        { block });
+}
+
 class C2Buffer::Impl {
 public:
     Impl(C2Buffer *thiz, const std::vector<C2ConstLinearBlock> &blocks)
@@ -1330,4 +1345,3 @@
 std::shared_ptr<C2Buffer> C2Buffer::CreateGraphicBuffer(const C2ConstGraphicBlock &block) {
     return std::shared_ptr<C2Buffer>(new C2Buffer({ block }));
 }
-
diff --git a/media/codec2/vndk/C2Config.cpp b/media/codec2/vndk/C2Config.cpp
index 34680a7..e9223fb 100644
--- a/media/codec2/vndk/C2Config.cpp
+++ b/media/codec2/vndk/C2Config.cpp
@@ -142,6 +142,14 @@
         { "av1-0", C2Config::PROFILE_AV1_0 },
         { "av1-1", C2Config::PROFILE_AV1_1 },
         { "av1-2", C2Config::PROFILE_AV1_2 },
+        { "vp8-0", C2Config::PROFILE_VP8_0 },
+        { "vp8-1", C2Config::PROFILE_VP8_1 },
+        { "vp8-2", C2Config::PROFILE_VP8_2 },
+        { "vp8-3", C2Config::PROFILE_VP8_3 },
+        { "mpegh-main", C2Config::PROFILE_MPEGH_MAIN },
+        { "mpegh-high", C2Config::PROFILE_MPEGH_HIGH },
+        { "mpegh-lc", C2Config::PROFILE_MPEGH_LC },
+        { "mpegh-baseline", C2Config::PROFILE_MPEGH_BASELINE },
 }))
 
 DEFINE_C2_ENUM_VALUE_CUSTOM_HELPER(C2Config::level_t, ({
@@ -248,6 +256,11 @@
         { "av1-7.1", C2Config::LEVEL_AV1_7_1 },
         { "av1-7.2", C2Config::LEVEL_AV1_7_2 },
         { "av1-7.3", C2Config::LEVEL_AV1_7_3 },
+        { "mpegh-1", C2Config::LEVEL_MPEGH_1 },
+        { "mpegh-2", C2Config::LEVEL_MPEGH_2 },
+        { "mpegh-3", C2Config::LEVEL_MPEGH_3 },
+        { "mpegh-4", C2Config::LEVEL_MPEGH_4 },
+        { "mpegh-5", C2Config::LEVEL_MPEGH_5 },
 }))
 
 DEFINE_C2_ENUM_VALUE_CUSTOM_HELPER(C2BufferData::type_t, ({
diff --git a/media/codec2/vndk/C2DmaBufAllocator.cpp b/media/codec2/vndk/C2DmaBufAllocator.cpp
new file mode 100644
index 0000000..1aa3d69
--- /dev/null
+++ b/media/codec2/vndk/C2DmaBufAllocator.cpp
@@ -0,0 +1,440 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2DmaBufAllocator"
+
+#include <BufferAllocator/BufferAllocator.h>
+#include <C2Buffer.h>
+#include <C2Debug.h>
+#include <C2DmaBufAllocator.h>
+#include <C2ErrnoUtils.h>
+
+#include <linux/ion.h>
+#include <sys/mman.h>
+#include <unistd.h>  // getpagesize, size_t, close, dup
+#include <utils/Log.h>
+
+#include <list>
+
+#include <android-base/properties.h>
+
+namespace android {
+
+namespace {
+    constexpr size_t USAGE_LRU_CACHE_SIZE = 1024;
+
+    // max padding after ion/dmabuf allocations in bytes
+    constexpr uint32_t MAX_PADDING = 0x8000; // 32KB
+}
+
+/* =========================== BUFFER HANDLE =========================== */
+/**
+ * Buffer handle
+ *
+ * Stores dmabuf fd & metadata
+ *
+ * This handle will not capture mapped fd-s as updating that would require a
+ * global mutex.
+ */
+
+struct C2HandleBuf : public C2Handle {
+    C2HandleBuf(int bufferFd, size_t size)
+        : C2Handle(cHeader),
+          mFds{bufferFd},
+          mInts{int(size & 0xFFFFFFFF), int((uint64_t(size) >> 32) & 0xFFFFFFFF), kMagic} {}
+
+    static bool IsValid(const C2Handle* const o);
+
+    int bufferFd() const { return mFds.mBuffer; }
+    size_t size() const {
+        return size_t(unsigned(mInts.mSizeLo)) | size_t(uint64_t(unsigned(mInts.mSizeHi)) << 32);
+    }
+
+   protected:
+    struct {
+        int mBuffer;  // dmabuf fd
+    } mFds;
+    struct {
+        int mSizeLo;  // low 32-bits of size
+        int mSizeHi;  // high 32-bits of size
+        int mMagic;
+    } mInts;
+
+   private:
+    typedef C2HandleBuf _type;
+    enum {
+        kMagic = '\xc2io\x00',
+        numFds = sizeof(mFds) / sizeof(int),
+        numInts = sizeof(mInts) / sizeof(int),
+        version = sizeof(C2Handle)
+    };
+    // constexpr static C2Handle cHeader = { version, numFds, numInts, {} };
+    const static C2Handle cHeader;
+};
+
+const C2Handle C2HandleBuf::cHeader = {
+        C2HandleBuf::version, C2HandleBuf::numFds, C2HandleBuf::numInts, {}};
+
+// static
+bool C2HandleBuf::IsValid(const C2Handle* const o) {
+    if (!o || memcmp(o, &cHeader, sizeof(cHeader))) {
+        return false;
+    }
+    const C2HandleBuf* other = static_cast<const C2HandleBuf*>(o);
+    return other->mInts.mMagic == kMagic;
+}
+
+/* =========================== DMABUF ALLOCATION =========================== */
+class C2DmaBufAllocation : public C2LinearAllocation {
+   public:
+    /* Interface methods */
+    virtual c2_status_t map(size_t offset, size_t size, C2MemoryUsage usage, C2Fence* fence,
+                            void** addr /* nonnull */) override;
+    virtual c2_status_t unmap(void* addr, size_t size, C2Fence* fenceFd) override;
+    virtual ~C2DmaBufAllocation() override;
+    virtual const C2Handle* handle() const override;
+    virtual id_t getAllocatorId() const override;
+    virtual bool equals(const std::shared_ptr<C2LinearAllocation>& other) const override;
+
+    // internal methods
+
+    /**
+      * Constructs an allocation via a new allocation.
+      *
+      * @param alloc     allocator
+      * @param allocSize size used for the allocator
+      * @param capacity  capacity advertised to the client
+      * @param heap_name name of the dmabuf heap (device)
+      * @param flags     flags
+      * @param id        allocator id
+      */
+    C2DmaBufAllocation(BufferAllocator& alloc, size_t allocSize, size_t capacity,
+                       C2String heap_name, unsigned flags, C2Allocator::id_t id);
+
+    /**
+      * Constructs an allocation by wrapping an existing allocation.
+      *
+      * @param size    capacity advertised to the client
+      * @param shareFd dmabuf fd of the wrapped allocation
+      * @param id      allocator id
+      */
+    C2DmaBufAllocation(size_t size, int shareFd, C2Allocator::id_t id);
+
+    c2_status_t status() const;
+
+   protected:
+    virtual c2_status_t mapInternal(size_t mapSize, size_t mapOffset, size_t alignmentBytes,
+                                    int prot, int flags, void** base, void** addr) {
+        c2_status_t err = C2_OK;
+        *base = mmap(nullptr, mapSize, prot, flags, mHandle.bufferFd(), mapOffset);
+        ALOGV("mmap(size = %zu, prot = %d, flags = %d, mapFd = %d, offset = %zu) "
+              "returned (%d)",
+              mapSize, prot, flags, mHandle.bufferFd(), mapOffset, errno);
+        if (*base == MAP_FAILED) {
+            *base = *addr = nullptr;
+            err = c2_map_errno<EINVAL>(errno);
+        } else {
+            *addr = (uint8_t*)*base + alignmentBytes;
+        }
+        return err;
+    }
+
+    C2Allocator::id_t mId;
+    C2HandleBuf mHandle;
+    c2_status_t mInit;
+    struct Mapping {
+        void* addr;
+        size_t alignmentBytes;
+        size_t size;
+    };
+    std::list<Mapping> mMappings;
+
+    // TODO: we could make this encapsulate shared_ptr and copiable
+    C2_DO_NOT_COPY(C2DmaBufAllocation);
+};
+
+c2_status_t C2DmaBufAllocation::map(size_t offset, size_t size, C2MemoryUsage usage, C2Fence* fence,
+                                    void** addr) {
+    (void)fence;  // TODO: wait for fence
+    *addr = nullptr;
+    if (!mMappings.empty()) {
+        ALOGV("multiple map");
+        // TODO: technically we should return DUPLICATE here, but our block views
+        // don't actually unmap, so we end up remapping the buffer multiple times.
+        //
+        // return C2_DUPLICATE;
+    }
+    if (size == 0) {
+        return C2_BAD_VALUE;
+    }
+
+    int prot = PROT_NONE;
+    int flags = MAP_SHARED;
+    if (usage.expected & C2MemoryUsage::CPU_READ) {
+        prot |= PROT_READ;
+    }
+    if (usage.expected & C2MemoryUsage::CPU_WRITE) {
+        prot |= PROT_WRITE;
+    }
+
+    size_t alignmentBytes = offset % PAGE_SIZE;
+    size_t mapOffset = offset - alignmentBytes;
+    size_t mapSize = size + alignmentBytes;
+    Mapping map = {nullptr, alignmentBytes, mapSize};
+
+    c2_status_t err =
+            mapInternal(mapSize, mapOffset, alignmentBytes, prot, flags, &(map.addr), addr);
+    if (map.addr) {
+        mMappings.push_back(map);
+    }
+    return err;
+}
+
+c2_status_t C2DmaBufAllocation::unmap(void* addr, size_t size, C2Fence* fence) {
+    if (mMappings.empty()) {
+        ALOGD("tried to unmap unmapped buffer");
+        return C2_NOT_FOUND;
+    }
+    for (auto it = mMappings.begin(); it != mMappings.end(); ++it) {
+        if (addr != (uint8_t*)it->addr + it->alignmentBytes ||
+            size + it->alignmentBytes != it->size) {
+            continue;
+        }
+        int err = munmap(it->addr, it->size);
+        if (err != 0) {
+            ALOGD("munmap failed");
+            return c2_map_errno<EINVAL>(errno);
+        }
+        if (fence) {
+            *fence = C2Fence();  // not using fences
+        }
+        (void)mMappings.erase(it);
+        ALOGV("successfully unmapped: %d", mHandle.bufferFd());
+        return C2_OK;
+    }
+    ALOGD("unmap failed to find specified map");
+    return C2_BAD_VALUE;
+}
+
+c2_status_t C2DmaBufAllocation::status() const {
+    return mInit;
+}
+
+C2Allocator::id_t C2DmaBufAllocation::getAllocatorId() const {
+    return mId;
+}
+
+bool C2DmaBufAllocation::equals(const std::shared_ptr<C2LinearAllocation>& other) const {
+    if (!other || other->getAllocatorId() != getAllocatorId()) {
+        return false;
+    }
+    // get user handle to compare objects
+    std::shared_ptr<C2DmaBufAllocation> otherAsBuf =
+            std::static_pointer_cast<C2DmaBufAllocation>(other);
+    return mHandle.bufferFd() == otherAsBuf->mHandle.bufferFd();
+}
+
+const C2Handle* C2DmaBufAllocation::handle() const {
+    return &mHandle;
+}
+
+C2DmaBufAllocation::~C2DmaBufAllocation() {
+    if (!mMappings.empty()) {
+        ALOGD("Dangling mappings!");
+        for (const Mapping& map : mMappings) {
+            int err = munmap(map.addr, map.size);
+            if (err) ALOGD("munmap failed");
+        }
+    }
+    if (mInit == C2_OK) {
+        native_handle_close(&mHandle);
+    }
+}
+
+C2DmaBufAllocation::C2DmaBufAllocation(BufferAllocator& alloc, size_t allocSize, size_t capacity,
+                                       C2String heap_name, unsigned flags, C2Allocator::id_t id)
+    : C2LinearAllocation(capacity), mHandle(-1, 0) {
+    int bufferFd = -1;
+    int ret = 0;
+
+    bufferFd = alloc.Alloc(heap_name, allocSize, flags);
+    if (bufferFd < 0) {
+        ret = bufferFd;
+    }
+
+    // this may be a non-working handle if bufferFd is negative
+    mHandle = C2HandleBuf(bufferFd, capacity);
+    mId = id;
+    mInit = c2_status_t(c2_map_errno<ENOMEM, EACCES, EINVAL>(ret));
+}
+
+C2DmaBufAllocation::C2DmaBufAllocation(size_t size, int shareFd, C2Allocator::id_t id)
+    : C2LinearAllocation(size), mHandle(-1, 0) {
+    mHandle = C2HandleBuf(shareFd, size);
+    mId = id;
+    mInit = c2_status_t(c2_map_errno<ENOMEM, EACCES, EINVAL>(0));
+}
+
+/* =========================== DMABUF ALLOCATOR =========================== */
+C2DmaBufAllocator::C2DmaBufAllocator(id_t id) : mInit(C2_OK) {
+    C2MemoryUsage minUsage = {0, 0};
+    C2MemoryUsage maxUsage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+    Traits traits = {"android.allocator.dmabuf", id, LINEAR, minUsage, maxUsage};
+    mTraits = std::make_shared<Traits>(traits);
+}
+
+C2Allocator::id_t C2DmaBufAllocator::getId() const {
+    std::lock_guard<std::mutex> lock(mUsageMapperLock);
+    return mTraits->id;
+}
+
+C2String C2DmaBufAllocator::getName() const {
+    std::lock_guard<std::mutex> lock(mUsageMapperLock);
+    return mTraits->name;
+}
+
+std::shared_ptr<const C2Allocator::Traits> C2DmaBufAllocator::getTraits() const {
+    std::lock_guard<std::mutex> lock(mUsageMapperLock);
+    return mTraits;
+}
+
+void C2DmaBufAllocator::setUsageMapper(const UsageMapperFn& mapper __unused, uint64_t minUsage,
+                                       uint64_t maxUsage, uint64_t blockSize) {
+    std::lock_guard<std::mutex> lock(mUsageMapperLock);
+    mUsageMapperCache.clear();
+    mUsageMapperLru.clear();
+    mUsageMapper = mapper;
+    Traits traits = {mTraits->name, mTraits->id, LINEAR, C2MemoryUsage(minUsage),
+                     C2MemoryUsage(maxUsage)};
+    mTraits = std::make_shared<Traits>(traits);
+    mBlockSize = blockSize;
+}
+
+std::size_t C2DmaBufAllocator::MapperKeyHash::operator()(const MapperKey& k) const {
+    return std::hash<uint64_t>{}(k.first) ^ std::hash<size_t>{}(k.second);
+}
+
+c2_status_t C2DmaBufAllocator::mapUsage(C2MemoryUsage usage, size_t capacity, C2String* heap_name,
+                                        unsigned* flags) {
+    std::lock_guard<std::mutex> lock(mUsageMapperLock);
+    c2_status_t res = C2_OK;
+    // align capacity
+    capacity = (capacity + mBlockSize - 1) & ~(mBlockSize - 1);
+    MapperKey key = std::make_pair(usage.expected, capacity);
+    auto entry = mUsageMapperCache.find(key);
+    if (entry == mUsageMapperCache.end()) {
+        if (mUsageMapper) {
+            res = mUsageMapper(usage, capacity, heap_name, flags);
+        } else {
+            if (C2DmaBufAllocator::system_uncached_supported() &&
+                !(usage.expected & (C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE)))
+                *heap_name = "system-uncached";
+            else
+                *heap_name = "system";
+            *flags = 0;
+            res = C2_NO_INIT;
+        }
+        // add usage to cache
+        MapperValue value = std::make_tuple(*heap_name, *flags, res);
+        mUsageMapperLru.emplace_front(key, value);
+        mUsageMapperCache.emplace(std::make_pair(key, mUsageMapperLru.begin()));
+        if (mUsageMapperCache.size() > USAGE_LRU_CACHE_SIZE) {
+            // remove LRU entry
+            MapperKey lruKey = mUsageMapperLru.front().first;
+            mUsageMapperCache.erase(lruKey);
+            mUsageMapperLru.pop_back();
+        }
+    } else {
+        // move entry to MRU
+        mUsageMapperLru.splice(mUsageMapperLru.begin(), mUsageMapperLru, entry->second);
+        const MapperValue& value = entry->second->second;
+        std::tie(*heap_name, *flags, res) = value;
+    }
+    return res;
+}
+
+c2_status_t C2DmaBufAllocator::newLinearAllocation(
+        uint32_t capacity, C2MemoryUsage usage, std::shared_ptr<C2LinearAllocation>* allocation) {
+    if (allocation == nullptr) {
+        return C2_BAD_VALUE;
+    }
+
+    allocation->reset();
+    if (mInit != C2_OK) {
+        return mInit;
+    }
+
+    C2String heap_name;
+    unsigned flags = 0;
+    c2_status_t ret = mapUsage(usage, capacity, &heap_name, &flags);
+    if (ret && ret != C2_NO_INIT) {
+        return ret;
+    }
+
+    // TODO: should we pad before mapping usage?
+
+    // NOTE: read this property directly from the property as this code has to run on
+    // Android Q, but the sysprop was only introduced in Android S.
+    static size_t sPadding =
+        base::GetUintProperty("media.c2.dmabuf.padding", (uint32_t)0, MAX_PADDING);
+    if (sPadding > SIZE_MAX - capacity) {
+        // size would overflow
+        ALOGD("dmabuf_alloc: size #%x cannot accommodate padding #%zx", capacity, sPadding);
+        return C2_NO_MEMORY;
+    }
+
+    size_t allocSize = (size_t)capacity + sPadding;
+    // TODO: should we align allocation size to mBlockSize to reflect the true allocation size?
+    std::shared_ptr<C2DmaBufAllocation> alloc = std::make_shared<C2DmaBufAllocation>(
+            mBufferAllocator, allocSize, allocSize - sPadding, heap_name, flags, getId());
+    ret = alloc->status();
+    if (ret == C2_OK) {
+        *allocation = alloc;
+    }
+    return ret;
+}
+
+c2_status_t C2DmaBufAllocator::priorLinearAllocation(
+        const C2Handle* handle, std::shared_ptr<C2LinearAllocation>* allocation) {
+    *allocation = nullptr;
+    if (mInit != C2_OK) {
+        return mInit;
+    }
+
+    if (!C2HandleBuf::IsValid(handle)) {
+        return C2_BAD_VALUE;
+    }
+
+    // TODO: get capacity and validate it
+    const C2HandleBuf* h = static_cast<const C2HandleBuf*>(handle);
+    std::shared_ptr<C2DmaBufAllocation> alloc =
+            std::make_shared<C2DmaBufAllocation>(h->size(), h->bufferFd(), getId());
+    c2_status_t ret = alloc->status();
+    if (ret == C2_OK) {
+        *allocation = alloc;
+        native_handle_delete(
+                const_cast<native_handle_t*>(reinterpret_cast<const native_handle_t*>(handle)));
+    }
+    return ret;
+}
+
+// static
+bool C2DmaBufAllocator::CheckHandle(const C2Handle* const o) {
+    return C2HandleBuf::IsValid(o);
+}
+
+}  // namespace android
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
new file mode 100644
index 0000000..9c5183e
--- /dev/null
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -0,0 +1,145 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2FenceFactory"
+#include <utils/Log.h>
+
+#include <C2FenceFactory.h>
+#include <C2SurfaceSyncObj.h>
+
+class C2Fence::Impl {
+public:
+    virtual c2_status_t wait(c2_nsecs_t timeoutNs) = 0;
+
+    virtual bool valid() const = 0;
+
+    virtual bool ready() const = 0;
+
+    virtual int fd() const = 0;
+
+    virtual bool isHW() const = 0;
+
+    virtual ~Impl() = default;
+
+    Impl() = default;
+};
+
+c2_status_t C2Fence::wait(c2_nsecs_t timeoutNs) {
+    if (mImpl) {
+        return mImpl->wait(timeoutNs);
+    }
+    // null fence is always signalled.
+    return C2_OK;
+}
+
+bool C2Fence::valid() const {
+    if (mImpl) {
+        return mImpl->valid();
+    }
+    // null fence is always valid.
+    return true;
+}
+
+bool C2Fence::ready() const {
+    if (mImpl) {
+        return mImpl->ready();
+    }
+    // null fence is always signalled.
+    return true;
+}
+
+int C2Fence::fd() const {
+    if (mImpl) {
+        return mImpl->fd();
+    }
+    // null fence does not have fd.
+    return -1;
+}
+
+bool C2Fence::isHW() const {
+    if (mImpl) {
+        return mImpl->isHW();
+    }
+    return false;
+}
+
+/**
+ * Fence implementation for C2BufferQueueBlockPool based block allocation.
+ * The implementation supports all C2Fence interface except fd().
+ */
+class _C2FenceFactory::SurfaceFenceImpl: public C2Fence::Impl {
+public:
+    virtual c2_status_t wait(c2_nsecs_t timeoutNs) {
+        if (mPtr) {
+            return mPtr->waitForChange(mWaitId, timeoutNs);
+        }
+        return C2_OK;
+    }
+
+    virtual bool valid() const {
+        return mPtr;
+    }
+
+    virtual bool ready() const {
+        uint32_t status;
+        if (mPtr) {
+            mPtr->lock();
+            status = mPtr->getWaitIdLocked();
+            mPtr->unlock();
+
+            return status != mWaitId;
+        }
+        return true;
+    }
+
+    virtual int fd() const {
+        // does not support fd, since this is shared mem and futex based
+        return -1;
+    }
+
+    virtual bool isHW() const {
+        return false;
+    }
+
+    virtual ~SurfaceFenceImpl() {};
+
+    SurfaceFenceImpl(std::shared_ptr<C2SurfaceSyncMemory> syncMem, uint32_t waitId) :
+            mSyncMem(syncMem),
+            mPtr(syncMem ? syncMem->mem() : nullptr),
+            mWaitId(syncMem ? waitId : 0) {}
+private:
+    const std::shared_ptr<const C2SurfaceSyncMemory> mSyncMem; // This is for life-cycle guarantee
+    C2SyncVariables *const mPtr;
+    const uint32_t mWaitId;
+};
+
+C2Fence::C2Fence(std::shared_ptr<Impl> impl) : mImpl(impl) {}
+
+C2Fence _C2FenceFactory::CreateSurfaceFence(
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem,
+        uint32_t waitId) {
+    if (syncMem) {
+        C2Fence::Impl *p
+                = new _C2FenceFactory::SurfaceFenceImpl(syncMem, waitId);
+        if (p->valid()) {
+            return C2Fence(std::shared_ptr<C2Fence::Impl>(p));
+        } else {
+            delete p;
+        }
+    }
+    return C2Fence();
+}
diff --git a/media/codec2/vndk/C2PlatformStorePluginLoader.cpp b/media/codec2/vndk/C2PlatformStorePluginLoader.cpp
index 4c330e5..bee028a 100644
--- a/media/codec2/vndk/C2PlatformStorePluginLoader.cpp
+++ b/media/codec2/vndk/C2PlatformStorePluginLoader.cpp
@@ -33,7 +33,8 @@
 }  // unnamed
 
 C2PlatformStorePluginLoader::C2PlatformStorePluginLoader(const char *libPath)
-    : mCreateBlockPool(nullptr) {
+    : mCreateBlockPool(nullptr),
+      mCreateAllocator(nullptr) {
     mLibHandle = dlopen(libPath, RTLD_NOW | RTLD_NODELETE);
     if (mLibHandle == nullptr) {
         ALOGD("Failed to load library: %s (%s)", libPath, dlerror());
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index d16527e..c07c09e 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -21,6 +21,7 @@
 #include <C2AllocatorBlob.h>
 #include <C2AllocatorGralloc.h>
 #include <C2AllocatorIon.h>
+#include <C2DmaBufAllocator.h>
 #include <C2BufferPriv.h>
 #include <C2BqBufferPriv.h>
 #include <C2Component.h>
@@ -82,6 +83,7 @@
 
     /// returns a shared-singleton ion allocator
     std::shared_ptr<C2Allocator> fetchIonAllocator();
+    std::shared_ptr<C2Allocator> fetchDmaBufAllocator();
 
     /// returns a shared-singleton gralloc allocator
     std::shared_ptr<C2Allocator> fetchGrallocAllocator();
@@ -99,6 +101,34 @@
 C2PlatformAllocatorStoreImpl::C2PlatformAllocatorStoreImpl() {
 }
 
+static bool using_ion(void) {
+    static int cached_result = []()->int {
+        struct stat buffer;
+        int ret = (stat("/dev/ion", &buffer) == 0);
+
+        if (property_get_int32("debug.c2.use_dmabufheaps", 0)) {
+            /*
+             * Double check that the system heap is present so we
+             * can gracefully fail back to ION if we cannot satisfy
+             * the override
+             */
+            ret = (stat("/dev/dma_heap/system", &buffer) != 0);
+            if (ret)
+                ALOGE("debug.c2.use_dmabufheaps set, but no system heap. Ignoring override!");
+            else
+                ALOGD("debug.c2.use_dmabufheaps set, forcing DMABUF Heaps");
+        }
+
+        if (ret)
+            ALOGD("Using ION\n");
+        else
+            ALOGD("Using DMABUF Heaps\n");
+        return ret;
+    }();
+
+    return (cached_result == 1);
+}
+
 c2_status_t C2PlatformAllocatorStoreImpl::fetchAllocator(
         id_t id, std::shared_ptr<C2Allocator> *const allocator) {
     allocator->reset();
@@ -107,8 +137,11 @@
     }
     switch (id) {
     // TODO: should we implement a generic registry for all, and use that?
-    case C2PlatformAllocatorStore::ION:
-        *allocator = fetchIonAllocator();
+    case C2PlatformAllocatorStore::ION: /* also ::DMABUFHEAP */
+        if (using_ion())
+            *allocator = fetchIonAllocator();
+        else
+            *allocator = fetchDmaBufAllocator();
         break;
 
     case C2PlatformAllocatorStore::GRALLOC:
@@ -142,7 +175,9 @@
 namespace {
 
 std::mutex gIonAllocatorMutex;
+std::mutex gDmaBufAllocatorMutex;
 std::weak_ptr<C2AllocatorIon> gIonAllocator;
+std::weak_ptr<C2DmaBufAllocator> gDmaBufAllocator;
 
 void UseComponentStoreForIonAllocator(
         const std::shared_ptr<C2AllocatorIon> allocator,
@@ -197,6 +232,65 @@
     allocator->setUsageMapper(mapper, minUsage, maxUsage, blockSize);
 }
 
+void UseComponentStoreForDmaBufAllocator(const std::shared_ptr<C2DmaBufAllocator> allocator,
+                                         std::shared_ptr<C2ComponentStore> store) {
+    C2DmaBufAllocator::UsageMapperFn mapper;
+    const size_t maxHeapNameLen = 128;
+    uint64_t minUsage = 0;
+    uint64_t maxUsage = C2MemoryUsage(C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE).expected;
+    size_t blockSize = getpagesize();
+
+    // query min and max usage as well as block size via supported values
+    std::unique_ptr<C2StoreDmaBufUsageInfo> usageInfo;
+    usageInfo = C2StoreDmaBufUsageInfo::AllocUnique(maxHeapNameLen);
+
+    std::vector<C2FieldSupportedValuesQuery> query = {
+            C2FieldSupportedValuesQuery::Possible(C2ParamField::Make(*usageInfo, usageInfo->m.usage)),
+            C2FieldSupportedValuesQuery::Possible(
+                    C2ParamField::Make(*usageInfo, usageInfo->m.capacity)),
+    };
+    c2_status_t res = store->querySupportedValues_sm(query);
+    if (res == C2_OK) {
+        if (query[0].status == C2_OK) {
+            const C2FieldSupportedValues& fsv = query[0].values;
+            if (fsv.type == C2FieldSupportedValues::FLAGS && !fsv.values.empty()) {
+                minUsage = fsv.values[0].u64;
+                maxUsage = 0;
+                for (C2Value::Primitive v : fsv.values) {
+                    maxUsage |= v.u64;
+                }
+            }
+        }
+        if (query[1].status == C2_OK) {
+            const C2FieldSupportedValues& fsv = query[1].values;
+            if (fsv.type == C2FieldSupportedValues::RANGE && fsv.range.step.u32 > 0) {
+                blockSize = fsv.range.step.u32;
+            }
+        }
+
+        mapper = [store](C2MemoryUsage usage, size_t capacity, C2String* heapName,
+                         unsigned* flags) -> c2_status_t {
+            if (capacity > UINT32_MAX) {
+                return C2_BAD_VALUE;
+            }
+
+            std::unique_ptr<C2StoreDmaBufUsageInfo> usageInfo;
+            usageInfo = C2StoreDmaBufUsageInfo::AllocUnique(maxHeapNameLen, usage.expected, capacity);
+            std::vector<std::unique_ptr<C2SettingResult>> failures;  // TODO: remove
+
+            c2_status_t res = store->config_sm({&*usageInfo}, &failures);
+            if (res == C2_OK) {
+                *heapName = C2String(usageInfo->m.heapName);
+                *flags = usageInfo->m.allocFlags;
+            }
+
+            return res;
+        };
+    }
+
+    allocator->setUsageMapper(mapper, minUsage, maxUsage, blockSize);
+}
+
 }
 
 void C2PlatformAllocatorStoreImpl::setComponentStore(std::shared_ptr<C2ComponentStore> store) {
@@ -233,6 +327,22 @@
     return allocator;
 }
 
+std::shared_ptr<C2Allocator> C2PlatformAllocatorStoreImpl::fetchDmaBufAllocator() {
+    std::lock_guard<std::mutex> lock(gDmaBufAllocatorMutex);
+    std::shared_ptr<C2DmaBufAllocator> allocator = gDmaBufAllocator.lock();
+    if (allocator == nullptr) {
+        std::shared_ptr<C2ComponentStore> componentStore;
+        {
+            std::lock_guard<std::mutex> lock(_mComponentStoreReadLock);
+            componentStore = _mComponentStore;
+        }
+        allocator = std::make_shared<C2DmaBufAllocator>(C2PlatformAllocatorStore::DMABUFHEAP);
+        UseComponentStoreForDmaBufAllocator(allocator, componentStore);
+        gDmaBufAllocator = allocator;
+    }
+    return allocator;
+}
+
 std::shared_ptr<C2Allocator> C2PlatformAllocatorStoreImpl::fetchBlobAllocator() {
     static std::mutex mutex;
     static std::weak_ptr<C2Allocator> blobAllocator;
@@ -335,7 +445,7 @@
 
     c2_status_t _createBlockPool(
             C2PlatformAllocatorStore::id_t allocatorId,
-            std::shared_ptr<const C2Component> component,
+            std::vector<std::shared_ptr<const C2Component>> components,
             C2BlockPool::local_id_t poolId,
             std::shared_ptr<C2BlockPool> *pool) {
         std::shared_ptr<C2AllocatorStore> allocatorStore =
@@ -347,7 +457,7 @@
             allocatorId = GetPreferredLinearAllocatorId(GetCodec2PoolMask());
         }
         switch(allocatorId) {
-            case C2PlatformAllocatorStore::ION:
+            case C2PlatformAllocatorStore::ION: /* also ::DMABUFHEAP */
                 res = allocatorStore->fetchAllocator(
                         C2PlatformAllocatorStore::ION, &allocator);
                 if (res == C2_OK) {
@@ -356,7 +466,9 @@
                                     allocator, poolId);
                     *pool = ptr;
                     mBlockPools[poolId] = ptr;
-                    mComponents[poolId] = component;
+                    mComponents[poolId].insert(
+                           mComponents[poolId].end(),
+                           components.begin(), components.end());
                 }
                 break;
             case C2PlatformAllocatorStore::BLOB:
@@ -368,7 +480,9 @@
                                     allocator, poolId);
                     *pool = ptr;
                     mBlockPools[poolId] = ptr;
-                    mComponents[poolId] = component;
+                    mComponents[poolId].insert(
+                           mComponents[poolId].end(),
+                           components.begin(), components.end());
                 }
                 break;
             case C2PlatformAllocatorStore::GRALLOC:
@@ -380,7 +494,9 @@
                         std::make_shared<C2PooledBlockPool>(allocator, poolId);
                     *pool = ptr;
                     mBlockPools[poolId] = ptr;
-                    mComponents[poolId] = component;
+                    mComponents[poolId].insert(
+                           mComponents[poolId].end(),
+                           components.begin(), components.end());
                 }
                 break;
             case C2PlatformAllocatorStore::BUFFERQUEUE:
@@ -392,7 +508,9 @@
                                     allocator, poolId);
                     *pool = ptr;
                     mBlockPools[poolId] = ptr;
-                    mComponents[poolId] = component;
+                    mComponents[poolId].insert(
+                           mComponents[poolId].end(),
+                           components.begin(), components.end());
                 }
                 break;
             default:
@@ -403,7 +521,9 @@
                 if (res == C2_OK) {
                     *pool = ptr;
                     mBlockPools[poolId] = ptr;
-                    mComponents[poolId] = component;
+                    mComponents[poolId].insert(
+                           mComponents[poolId].end(),
+                           components.begin(), components.end());
                 }
                 break;
         }
@@ -412,9 +532,9 @@
 
     c2_status_t createBlockPool(
             C2PlatformAllocatorStore::id_t allocatorId,
-            std::shared_ptr<const C2Component> component,
+            std::vector<std::shared_ptr<const C2Component>> components,
             std::shared_ptr<C2BlockPool> *pool) {
-        return _createBlockPool(allocatorId, component, mBlockPoolSeqId++, pool);
+        return _createBlockPool(allocatorId, components, mBlockPoolSeqId++, pool);
     }
 
     bool getBlockPool(
@@ -430,8 +550,13 @@
                 mBlockPools.erase(it);
                 mComponents.erase(blockPoolId);
             } else {
-                auto found = mComponents.find(blockPoolId);
-                if (component == found->second.lock()) {
+                auto found = std::find_if(
+                        mComponents[blockPoolId].begin(),
+                        mComponents[blockPoolId].end(),
+                        [component](const std::weak_ptr<const C2Component> &ptr) {
+                            return component == ptr.lock();
+                        });
+                if (found != mComponents[blockPoolId].end()) {
                     *pool = ptr;
                     return true;
                 }
@@ -444,7 +569,7 @@
     C2BlockPool::local_id_t mBlockPoolSeqId;
 
     std::map<C2BlockPool::local_id_t, std::weak_ptr<C2BlockPool>> mBlockPools;
-    std::map<C2BlockPool::local_id_t, std::weak_ptr<const C2Component>> mComponents;
+    std::map<C2BlockPool::local_id_t, std::vector<std::weak_ptr<const C2Component>>> mComponents;
 };
 
 static std::unique_ptr<_C2BlockPoolCache> sBlockPoolCache =
@@ -484,7 +609,7 @@
     // TODO: remove this. this is temporary
     case C2BlockPool::PLATFORM_START:
         res = sBlockPoolCache->_createBlockPool(
-                C2PlatformAllocatorStore::BUFFERQUEUE, component, id, pool);
+                C2PlatformAllocatorStore::BUFFERQUEUE, {component}, id, pool);
         break;
     default:
         break;
@@ -494,12 +619,22 @@
 
 c2_status_t CreateCodec2BlockPool(
         C2PlatformAllocatorStore::id_t allocatorId,
+        const std::vector<std::shared_ptr<const C2Component>> &components,
+        std::shared_ptr<C2BlockPool> *pool) {
+    pool->reset();
+
+    std::lock_guard<std::mutex> lock(sBlockPoolCacheMutex);
+    return sBlockPoolCache->createBlockPool(allocatorId, components, pool);
+}
+
+c2_status_t CreateCodec2BlockPool(
+        C2PlatformAllocatorStore::id_t allocatorId,
         std::shared_ptr<const C2Component> component,
         std::shared_ptr<C2BlockPool> *pool) {
     pool->reset();
 
     std::lock_guard<std::mutex> lock(sBlockPoolCacheMutex);
-    return sBlockPoolCache->createBlockPool(allocatorId, component, pool);
+    return sBlockPoolCache->createBlockPool(allocatorId, {component}, pool);
 }
 
 class C2PlatformComponentStore : public C2ComponentStore {
@@ -526,6 +661,12 @@
             std::vector<std::unique_ptr<C2SettingResult>> *const failures) override;
     C2PlatformComponentStore();
 
+    // For testing only
+    C2PlatformComponentStore(
+            std::vector<std::tuple<C2String,
+                                   C2ComponentFactory::CreateCodec2FactoryFunc,
+                                   C2ComponentFactory::DestroyCodec2FactoryFunc>>);
+
     virtual ~C2PlatformComponentStore() override = default;
 
 private:
@@ -566,6 +707,24 @@
         }
 
         /**
+         * Creates an uninitialized component module.
+         * NOTE: For testing only
+         *
+         * \param name[in]  component name.
+         *
+         * \note Only used by ComponentLoader.
+         */
+        ComponentModule(
+                C2ComponentFactory::CreateCodec2FactoryFunc createFactory,
+                C2ComponentFactory::DestroyCodec2FactoryFunc destroyFactory)
+            : mInit(C2_NO_INIT),
+              mLibHandle(nullptr),
+              createFactory(createFactory),
+              destroyFactory(destroyFactory),
+              mComponentFactory(nullptr) {
+        }
+
+        /**
          * Initializes a component module with a given library path. Must be called exactly once.
          *
          * \note Only used by ComponentLoader.
@@ -621,7 +780,13 @@
             std::lock_guard<std::mutex> lock(mMutex);
             std::shared_ptr<ComponentModule> localModule = mModule.lock();
             if (localModule == nullptr) {
-                localModule = std::make_shared<ComponentModule>();
+                if(mCreateFactory) {
+                    // For testing only
+                    localModule = std::make_shared<ComponentModule>(mCreateFactory,
+                                                                    mDestroyFactory);
+                } else {
+                    localModule = std::make_shared<ComponentModule>();
+                }
                 res = localModule->init(mLibPath);
                 if (res == C2_OK) {
                     mModule = localModule;
@@ -637,14 +802,27 @@
         ComponentLoader(std::string libPath)
             : mLibPath(libPath) {}
 
+        // For testing only
+        ComponentLoader(std::tuple<C2String,
+                          C2ComponentFactory::CreateCodec2FactoryFunc,
+                          C2ComponentFactory::DestroyCodec2FactoryFunc> func)
+            : mLibPath(std::get<0>(func)),
+              mCreateFactory(std::get<1>(func)),
+              mDestroyFactory(std::get<2>(func)) {}
+
     private:
         std::mutex mMutex; ///< mutex guarding the module
         std::weak_ptr<ComponentModule> mModule; ///< weak reference to the loaded module
         std::string mLibPath; ///< library path
+
+        // For testing only
+        C2ComponentFactory::CreateCodec2FactoryFunc mCreateFactory = nullptr;
+        C2ComponentFactory::DestroyCodec2FactoryFunc mDestroyFactory = nullptr;
     };
 
     struct Interface : public C2InterfaceHelper {
         std::shared_ptr<C2StoreIonUsageInfo> mIonUsageInfo;
+        std::shared_ptr<C2StoreDmaBufUsageInfo> mDmaBufUsageInfo;
 
         Interface(std::shared_ptr<C2ReflectorHelper> reflector)
             : C2InterfaceHelper(reflector) {
@@ -680,7 +858,19 @@
                     me.set().minAlignment = 0;
 #endif
                     return C2R::Ok();
-                }
+                };
+
+                static C2R setDmaBufUsage(bool /* mayBlock */, C2P<C2StoreDmaBufUsageInfo> &me) {
+                    long long usage = (long long)me.get().m.usage;
+                    if (C2DmaBufAllocator::system_uncached_supported() &&
+                        !(usage & (C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE))) {
+                        strncpy(me.set().m.heapName, "system-uncached", me.v.flexCount());
+                    } else {
+                        strncpy(me.set().m.heapName, "system", me.v.flexCount());
+                    }
+                    me.set().m.allocFlags = 0;
+                    return C2R::Ok();
+                };
             };
 
             addParameter(
@@ -695,6 +885,18 @@
                 })
                 .withSetter(Setter::setIonUsage)
                 .build());
+
+            addParameter(
+                DefineParam(mDmaBufUsageInfo, "dmabuf-usage")
+                .withDefault(C2StoreDmaBufUsageInfo::AllocShared(0))
+                .withFields({
+                    C2F(mDmaBufUsageInfo, m.usage).flags({C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE}),
+                    C2F(mDmaBufUsageInfo, m.capacity).inRange(0, UINT32_MAX, 1024),
+                    C2F(mDmaBufUsageInfo, m.allocFlags).flags({}),
+                    C2F(mDmaBufUsageInfo, m.heapName).any(),
+                })
+                .withSetter(Setter::setDmaBufUsage)
+                .build());
         }
     };
 
@@ -731,25 +933,33 @@
 
     std::shared_ptr<C2ReflectorHelper> mReflector;
     Interface mInterface;
+
+    // For testing only
+    std::vector<std::tuple<C2String,
+                          C2ComponentFactory::CreateCodec2FactoryFunc,
+                          C2ComponentFactory::DestroyCodec2FactoryFunc>> mCodec2FactoryFuncs;
 };
 
 c2_status_t C2PlatformComponentStore::ComponentModule::init(
         std::string libPath) {
     ALOGV("in %s", __func__);
     ALOGV("loading dll");
-    mLibHandle = dlopen(libPath.c_str(), RTLD_NOW|RTLD_NODELETE);
-    LOG_ALWAYS_FATAL_IF(mLibHandle == nullptr,
-            "could not dlopen %s: %s", libPath.c_str(), dlerror());
 
-    createFactory =
-        (C2ComponentFactory::CreateCodec2FactoryFunc)dlsym(mLibHandle, "CreateCodec2Factory");
-    LOG_ALWAYS_FATAL_IF(createFactory == nullptr,
-            "createFactory is null in %s", libPath.c_str());
+    if(!createFactory) {
+        mLibHandle = dlopen(libPath.c_str(), RTLD_NOW|RTLD_NODELETE);
+        LOG_ALWAYS_FATAL_IF(mLibHandle == nullptr,
+                "could not dlopen %s: %s", libPath.c_str(), dlerror());
 
-    destroyFactory =
-        (C2ComponentFactory::DestroyCodec2FactoryFunc)dlsym(mLibHandle, "DestroyCodec2Factory");
-    LOG_ALWAYS_FATAL_IF(destroyFactory == nullptr,
-            "destroyFactory is null in %s", libPath.c_str());
+        createFactory =
+            (C2ComponentFactory::CreateCodec2FactoryFunc)dlsym(mLibHandle, "CreateCodec2Factory");
+        LOG_ALWAYS_FATAL_IF(createFactory == nullptr,
+                "createFactory is null in %s", libPath.c_str());
+
+        destroyFactory =
+            (C2ComponentFactory::DestroyCodec2FactoryFunc)dlsym(mLibHandle, "DestroyCodec2Factory");
+        LOG_ALWAYS_FATAL_IF(destroyFactory == nullptr,
+                "destroyFactory is null in %s", libPath.c_str());
+    }
 
     mComponentFactory = createFactory();
     if (mComponentFactory == nullptr) {
@@ -772,58 +982,10 @@
 
     std::shared_ptr<C2Component::Traits> traits(new (std::nothrow) C2Component::Traits);
     if (traits) {
-        traits->name = intf->getName();
-
-        C2ComponentKindSetting kind;
-        C2ComponentDomainSetting domain;
-        res = intf->query_vb({ &kind, &domain }, {}, C2_MAY_BLOCK, nullptr);
-        bool fixDomain = res != C2_OK;
-        if (res == C2_OK) {
-            traits->kind = kind.value;
-            traits->domain = domain.value;
-        } else {
-            // TODO: remove this fall-back
-            ALOGD("failed to query interface for kind and domain: %d", res);
-
-            traits->kind =
-                (traits->name.find("encoder") != std::string::npos) ? C2Component::KIND_ENCODER :
-                (traits->name.find("decoder") != std::string::npos) ? C2Component::KIND_DECODER :
-                C2Component::KIND_OTHER;
-        }
-
-        uint32_t mediaTypeIndex =
-                traits->kind == C2Component::KIND_ENCODER ? C2PortMediaTypeSetting::output::PARAM_TYPE
-                : C2PortMediaTypeSetting::input::PARAM_TYPE;
-        std::vector<std::unique_ptr<C2Param>> params;
-        res = intf->query_vb({}, { mediaTypeIndex }, C2_MAY_BLOCK, &params);
-        if (res != C2_OK) {
-            ALOGD("failed to query interface: %d", res);
+        if (!C2InterfaceUtils::FillTraitsFromInterface(traits.get(), intf)) {
+            ALOGD("Failed to fill traits from interface");
             return mInit;
         }
-        if (params.size() != 1u) {
-            ALOGD("failed to query interface: unexpected vector size: %zu", params.size());
-            return mInit;
-        }
-        C2PortMediaTypeSetting *mediaTypeConfig = C2PortMediaTypeSetting::From(params[0].get());
-        if (mediaTypeConfig == nullptr) {
-            ALOGD("failed to query media type");
-            return mInit;
-        }
-        traits->mediaType =
-            std::string(mediaTypeConfig->m.value,
-                        strnlen(mediaTypeConfig->m.value, mediaTypeConfig->flexCount()));
-
-        if (fixDomain) {
-            if (strncmp(traits->mediaType.c_str(), "audio/", 6) == 0) {
-                traits->domain = C2Component::DOMAIN_AUDIO;
-            } else if (strncmp(traits->mediaType.c_str(), "video/", 6) == 0) {
-                traits->domain = C2Component::DOMAIN_VIDEO;
-            } else if (strncmp(traits->mediaType.c_str(), "image/", 6) == 0) {
-                traits->domain = C2Component::DOMAIN_IMAGE;
-            } else {
-                traits->domain = C2Component::DOMAIN_OTHER;
-            }
-        }
 
         // TODO: get this properly from the store during emplace
         switch (traits->domain) {
@@ -833,26 +995,6 @@
         default:
             traits->rank = 512;
         }
-
-        params.clear();
-        res = intf->query_vb({}, { C2ComponentAliasesSetting::PARAM_TYPE }, C2_MAY_BLOCK, &params);
-        if (res == C2_OK && params.size() == 1u) {
-            C2ComponentAliasesSetting *aliasesSetting =
-                C2ComponentAliasesSetting::From(params[0].get());
-            if (aliasesSetting) {
-                // Split aliases on ','
-                // This looks simpler in plain C and even std::string would still make a copy.
-                char *aliases = ::strndup(aliasesSetting->m.value, aliasesSetting->flexCount());
-                ALOGD("'%s' has aliases: '%s'", intf->getName().c_str(), aliases);
-
-                for (char *tok, *ptr, *str = aliases; (tok = ::strtok_r(str, ",", &ptr));
-                        str = nullptr) {
-                    traits->aliases.push_back(tok);
-                    ALOGD("adding alias: '%s'", tok);
-                }
-                free(aliases);
-            }
-        }
     }
     mTraits = traits;
 
@@ -950,6 +1092,22 @@
     emplace("libcodec2_soft_vp8enc.so");
     emplace("libcodec2_soft_vp9dec.so");
     emplace("libcodec2_soft_vp9enc.so");
+
+}
+
+// For testing only
+C2PlatformComponentStore::C2PlatformComponentStore(
+    std::vector<std::tuple<C2String,
+                C2ComponentFactory::CreateCodec2FactoryFunc,
+                C2ComponentFactory::DestroyCodec2FactoryFunc>> funcs)
+    : mVisited(false),
+      mReflector(std::make_shared<C2ReflectorHelper>()),
+      mInterface(mReflector),
+      mCodec2FactoryFuncs(funcs) {
+
+    for(auto const& func: mCodec2FactoryFuncs) {
+        mComponents.emplace(std::get<0>(func), func);
+    }
 }
 
 c2_status_t C2PlatformComponentStore::copyBuffer(
@@ -1069,4 +1227,11 @@
     return store;
 }
 
+// For testing only
+std::shared_ptr<C2ComponentStore> GetTestComponentStore(
+        std::vector<std::tuple<C2String,
+        C2ComponentFactory::CreateCodec2FactoryFunc,
+        C2ComponentFactory::DestroyCodec2FactoryFunc>> funcs) {
+    return std::shared_ptr<C2ComponentStore>(new C2PlatformComponentStore(funcs));
+}
 } // namespace android
diff --git a/media/codec2/vndk/include/C2AllocatorBlob.h b/media/codec2/vndk/include/C2AllocatorBlob.h
index 89ce949..fc67af7 100644
--- a/media/codec2/vndk/include/C2AllocatorBlob.h
+++ b/media/codec2/vndk/include/C2AllocatorBlob.h
@@ -44,7 +44,12 @@
 
     virtual ~C2AllocatorBlob() override;
 
-    static bool isValid(const C2Handle* const o);
+    virtual bool checkHandle(const C2Handle* const o) const override { return CheckHandle(o); }
+
+    static bool CheckHandle(const C2Handle* const o);
+
+    // deprecated
+    static bool isValid(const C2Handle* const o) { return CheckHandle(o); }
 
 private:
     std::shared_ptr<const Traits> mTraits;
diff --git a/media/codec2/vndk/include/C2AllocatorGralloc.h b/media/codec2/vndk/include/C2AllocatorGralloc.h
index ee7524e..1da3e14 100644
--- a/media/codec2/vndk/include/C2AllocatorGralloc.h
+++ b/media/codec2/vndk/include/C2AllocatorGralloc.h
@@ -37,7 +37,8 @@
  * Wrap the gralloc handle and metadata into Codec2 handle recognized by
  * C2AllocatorGralloc.
  *
- * @return a new NON-OWNING C2Handle that must be deleted using native_handle_delete.
+ * @return a new NON-OWNING C2Handle that must be closed and deleted using native_handle_close and
+ * native_handle_delete.
  */
 C2Handle *WrapNativeCodec2GrallocHandle(
         const native_handle_t *const handle,
@@ -84,7 +85,12 @@
 
     virtual ~C2AllocatorGralloc() override;
 
-    static bool isValid(const C2Handle* const o);
+    virtual bool checkHandle(const C2Handle* const o) const override { return CheckHandle(o); }
+
+    static bool CheckHandle(const C2Handle* const o);
+
+    // deprecated
+    static bool isValid(const C2Handle* const o) { return CheckHandle(o); }
 
 private:
     class Impl;
diff --git a/media/codec2/vndk/include/C2AllocatorIon.h b/media/codec2/vndk/include/C2AllocatorIon.h
index 1b2051f..6a49b7d 100644
--- a/media/codec2/vndk/include/C2AllocatorIon.h
+++ b/media/codec2/vndk/include/C2AllocatorIon.h
@@ -57,7 +57,12 @@
 
     virtual ~C2AllocatorIon() override;
 
-    static bool isValid(const C2Handle* const o);
+    virtual bool checkHandle(const C2Handle* const o) const override { return CheckHandle(o); }
+
+    static bool CheckHandle(const C2Handle* const o);
+
+    // deprecated
+    static bool isValid(const C2Handle* const o) { return CheckHandle(o); }
 
     /**
      * Updates the usage mapper for subsequent new allocations, as well as the supported
diff --git a/media/codec2/vndk/include/C2BqBufferPriv.h b/media/codec2/vndk/include/C2BqBufferPriv.h
index e1a8138..b2636e9 100644
--- a/media/codec2/vndk/include/C2BqBufferPriv.h
+++ b/media/codec2/vndk/include/C2BqBufferPriv.h
@@ -20,9 +20,14 @@
 #include <android/hardware/graphics/bufferqueue/2.0/IGraphicBufferProducer.h>
 
 #include <C2Buffer.h>
+#include <C2BlockInternal.h>
 
 #include <functional>
 
+namespace android {
+class GraphicBuffer;
+}  // namespace android
+
 class C2BufferQueueBlockPool : public C2BlockPool {
 public:
     C2BufferQueueBlockPool(const std::shared_ptr<C2Allocator> &allocator, const local_id_t localId);
@@ -44,6 +49,14 @@
             C2MemoryUsage usage,
             std::shared_ptr<C2GraphicBlock> *block /* nonnull */) override;
 
+    virtual c2_status_t fetchGraphicBlock(
+            uint32_t width,
+            uint32_t height,
+            uint32_t format,
+            C2MemoryUsage usage,
+            std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
+            C2Fence *fence /* nonnull */) override;
+
     typedef std::function<void(uint64_t producer, int32_t slot, int64_t nsecs)> OnRenderCallback;
 
     /**
@@ -67,6 +80,27 @@
      */
     virtual void configureProducer(const android::sp<HGraphicBufferProducer> &producer);
 
+    /**
+     * Configures an IGBP in order to create blocks. A newly created block is
+     * dequeued from the configured IGBP. Unique Id of IGBP and the slot number of
+     * blocks are passed via native_handle. Managing IGBP is responsibility of caller.
+     * When IGBP is not configured, block will be created via allocator.
+     * Since zero is not used for Unique Id of IGBP, if IGBP is not configured or producer
+     * is configured as nullptr, unique id which is bundled in native_handle is zero.
+     *
+     * \param producer      the IGBP, which will be used to fetch blocks
+     * \param syncMemory    Shared memory for synchronization of allocation & deallocation.
+     * \param bqId          Id of IGBP
+     * \param generationId  Generation Id for rendering output
+     * \param consumerUsage consumerUsage flagof the IGBP
+     */
+    virtual void configureProducer(
+            const android::sp<HGraphicBufferProducer> &producer,
+            native_handle_t *syncMemory,
+            uint64_t bqId,
+            uint32_t generationId,
+            uint64_t consumerUsage);
+
 private:
     const std::shared_ptr<C2Allocator> mAllocator;
     const local_id_t mLocalId;
@@ -77,4 +111,72 @@
     friend struct C2BufferQueueBlockPoolData;
 };
 
+class C2SurfaceSyncMemory;
+
+struct C2BufferQueueBlockPoolData : public _C2BlockPoolData {
+public:
+    typedef ::android::hardware::graphics::bufferqueue::V2_0::
+            IGraphicBufferProducer HGraphicBufferProducer;
+
+    // Create a remote BlockPoolData.
+    C2BufferQueueBlockPoolData(
+            uint32_t generation, uint64_t bqId, int32_t bqSlot,
+            const std::shared_ptr<int> &owner,
+            const android::sp<HGraphicBufferProducer>& producer);
+
+    // Create a local BlockPoolData.
+    C2BufferQueueBlockPoolData(
+            uint32_t generation, uint64_t bqId, int32_t bqSlot,
+            const android::sp<HGraphicBufferProducer>& producer,
+            std::shared_ptr<C2SurfaceSyncMemory>, int noUse);
+
+    virtual ~C2BufferQueueBlockPoolData() override;
+
+    virtual type_t getType() const override;
+
+    int migrate(const android::sp<HGraphicBufferProducer>& producer,
+                uint32_t toGeneration, uint64_t toUsage, uint64_t toBqId,
+                android::sp<android::GraphicBuffer>& graphicBuffer, uint32_t oldGeneration,
+                std::shared_ptr<C2SurfaceSyncMemory> syncMem);
+
+private:
+    friend struct _C2BlockFactory;
+
+    // Methods delegated from _C2BlockFactory.
+    void getBufferQueueData(uint32_t* generation, uint64_t* bqId, int32_t* bqSlot) const;
+    bool holdBlockFromBufferQueue(const std::shared_ptr<int>& owner,
+                                  const android::sp<HGraphicBufferProducer>& igbp,
+                                  std::shared_ptr<C2SurfaceSyncMemory> syncMem);
+    bool beginTransferBlockToClient();
+    bool endTransferBlockToClient(bool transfer);
+    bool beginAttachBlockToBufferQueue();
+    bool endAttachBlockToBufferQueue(const std::shared_ptr<int>& owner,
+                                     const android::sp<HGraphicBufferProducer>& igbp,
+                                     std::shared_ptr<C2SurfaceSyncMemory> syncMem,
+                                     uint32_t generation, uint64_t bqId, int32_t bqSlot);
+    bool displayBlockToBufferQueue();
+
+    const bool mLocal;
+    bool mHeld;
+
+    // Data of the corresponding buffer.
+    uint32_t mGeneration;
+    uint64_t mBqId;
+    int32_t mBqSlot;
+
+    // Data of the current IGBP, updated at migrate(). If the values are
+    // mismatched, then the corresponding buffer will not be cancelled back to
+    // IGBP at the destructor.
+    uint32_t mCurrentGeneration;
+    uint64_t mCurrentBqId;
+
+    bool mTransfer; // local transfer to remote
+    bool mAttach; // attach on remote
+    bool mDisplay; // display on remote;
+    std::weak_ptr<int> mOwner;
+    android::sp<HGraphicBufferProducer> mIgbp;
+    std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
+    mutable std::mutex mLock;
+};
+
 #endif // STAGEFRIGHT_CODEC2_BUFFER_PRIV_H_
diff --git a/media/codec2/vndk/include/C2DmaBufAllocator.h b/media/codec2/vndk/include/C2DmaBufAllocator.h
new file mode 100644
index 0000000..d84c8c6
--- /dev/null
+++ b/media/codec2/vndk/include/C2DmaBufAllocator.h
@@ -0,0 +1,122 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STAGEFRIGHT_CODEC2_ALLOCATOR_BUF_H_
+#define STAGEFRIGHT_CODEC2_ALLOCATOR_BUF_H_
+
+#include <BufferAllocator/BufferAllocator.h>
+#include <C2Buffer.h>
+#include <sys/stat.h>  // stat
+
+#include <functional>
+#include <list>
+#include <mutex>
+#include <tuple>
+#include <unordered_map>
+
+namespace android {
+
+class C2DmaBufAllocator : public C2Allocator {
+   public:
+    virtual c2_status_t newLinearAllocation(
+            uint32_t capacity, C2MemoryUsage usage,
+            std::shared_ptr<C2LinearAllocation>* allocation) override;
+
+    virtual c2_status_t priorLinearAllocation(
+            const C2Handle* handle, std::shared_ptr<C2LinearAllocation>* allocation) override;
+
+    C2DmaBufAllocator(id_t id);
+
+    virtual c2_status_t status() const { return mInit; }
+
+    virtual bool checkHandle(const C2Handle* const o) const override { return CheckHandle(o); }
+
+    static bool CheckHandle(const C2Handle* const o);
+
+    virtual id_t getId() const override;
+
+    virtual C2String getName() const override;
+
+    virtual std::shared_ptr<const Traits> getTraits() const override;
+
+    // Usage mapper function used by the allocator
+    //   (usage, capacity) => (heapName, flags)
+    //
+    // capacity is aligned to the default block-size (defaults to page size) to
+    // reduce caching overhead
+    typedef std::function<c2_status_t(C2MemoryUsage, size_t,
+                                      /* => */ C2String*, unsigned*)>
+            UsageMapperFn;
+
+    /**
+     * Updates the usage mapper for subsequent new allocations, as well as the
+     * supported minimum and maximum usage masks and default block-size to use
+     * for the mapper.
+     *
+     * \param mapper          This method is called to map Codec 2.0 buffer usage
+     *                        to dmabuf heap name and flags required by the dma
+     *                        buf heap device
+     *
+     * \param minUsage        Minimum buffer usage required for supported
+     *                        allocations (defaults to 0)
+     *
+     * \param maxUsage        Maximum buffer usage supported by the ion allocator
+     *                        (defaults to SW_READ | SW_WRITE)
+     *
+     * \param blockSize       Alignment used prior to calling |mapper| for the
+     *                        buffer capacity. This also helps reduce the size of
+     *                        cache required for caching mapper results.
+     *                        (defaults to the page size)
+     */
+    void setUsageMapper(const UsageMapperFn& mapper, uint64_t minUsage, uint64_t maxUsage,
+                        uint64_t blockSize);
+
+    static bool system_uncached_supported(void) {
+        static int cached_result = -1;
+
+        if (cached_result == -1) {
+            struct stat buffer;
+            cached_result = (stat("/dev/dma_heap/system-uncached", &buffer) == 0);
+        }
+        return (cached_result == 1);
+    };
+
+   private:
+    c2_status_t mInit;
+    BufferAllocator mBufferAllocator;
+
+    c2_status_t mapUsage(C2MemoryUsage usage, size_t size,
+                         /* => */ C2String* heap_name, unsigned* flags);
+
+    // this locks mTraits, mBlockSize, mUsageMapper, mUsageMapperLru and
+    // mUsageMapperCache
+    mutable std::mutex mUsageMapperLock;
+    std::shared_ptr<const Traits> mTraits;
+    size_t mBlockSize;
+    UsageMapperFn mUsageMapper;
+    typedef std::pair<uint64_t, size_t> MapperKey;
+    struct MapperKeyHash {
+        std::size_t operator()(const MapperKey&) const;
+    };
+    typedef std::tuple<C2String, unsigned, c2_status_t> MapperValue;
+    typedef std::pair<MapperKey, MapperValue> MapperKeyValue;
+    typedef std::list<MapperKeyValue>::iterator MapperKeyValuePointer;
+    std::list<MapperKeyValue> mUsageMapperLru;
+    std::unordered_map<MapperKey, MapperKeyValuePointer, MapperKeyHash> mUsageMapperCache;
+};
+}  // namespace android
+
+#endif  // STAGEFRIGHT_CODEC2_ALLOCATOR_BUF_H_
diff --git a/media/codec2/vndk/include/C2FenceFactory.h b/media/codec2/vndk/include/C2FenceFactory.h
new file mode 100644
index 0000000..d4bed26
--- /dev/null
+++ b/media/codec2/vndk/include/C2FenceFactory.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STAGEFRIGHT_CODEC2_FENCE_FACTORY_H_
+#define STAGEFRIGHT_CODEC2_FENCE_FACTORY_H_
+
+
+#include <C2Buffer.h>
+
+class C2SurfaceSyncMemory;
+
+/**
+ * C2Fence implementation factory
+ */
+struct _C2FenceFactory {
+
+    class SurfaceFenceImpl;
+
+    /*
+     * Create C2Fence for BufferQueueBased blockpool.
+     *
+     * \param syncMem           Shared memory object for synchronization between processes.
+     * \param waitId            wait id for tracking status change for C2Fence.
+     */
+    static C2Fence CreateSurfaceFence(
+            std::shared_ptr<C2SurfaceSyncMemory> syncMem,
+            uint32_t waitId);
+};
+
+
+#endif // STAGEFRIGHT_CODEC2_FENCE_FACTORY_H_
diff --git a/media/codec2/vndk/include/C2PlatformSupport.h b/media/codec2/vndk/include/C2PlatformSupport.h
index a14e0d3..dc82e82 100644
--- a/media/codec2/vndk/include/C2PlatformSupport.h
+++ b/media/codec2/vndk/include/C2PlatformSupport.h
@@ -47,6 +47,17 @@
          */
         ION = PLATFORM_START,
 
+        /*
+         * ID of the DMA-Buf Heap (ion replacement) backed platform allocator.
+         *
+         * C2Handle consists of:
+         *   fd  shared dmabuf buffer handle
+         *   int size (lo 32 bits)
+         *   int size (hi 32 bits)
+         *   int magic '\xc2io\x00'
+         */
+        DMABUFHEAP = ION,
+
         /**
          * ID of the gralloc backed platform allocator.
          *
@@ -127,12 +138,38 @@
         std::shared_ptr<C2BlockPool> *pool);
 
 /**
+ * Creates a block pool.
+ * \param allocatorId  the allocator ID which is used to allocate blocks
+ * \param components    the components using the block pool
+ * \param pool          pointer to where the created block pool shall be store on success.
+ *                      nullptr will be stored here on failure
+ *
+ * \retval C2_OK        the operation was successful
+ * \retval C2_BAD_VALUE the component is null
+ * \retval C2_NOT_FOUND if the allocator does not exist
+ * \retval C2_NO_MEMORY not enough memory to create a block pool
+ */
+c2_status_t CreateCodec2BlockPool(
+        C2PlatformAllocatorStore::id_t allocatorId,
+        const std::vector<std::shared_ptr<const C2Component>> &components,
+        std::shared_ptr<C2BlockPool> *pool);
+
+/**
  * Returns the platform component store.
  * \retval nullptr if the platform component store could not be obtained
  */
 std::shared_ptr<C2ComponentStore> GetCodec2PlatformComponentStore();
 
 /**
+ * Returns the platform component store.
+ * NOTE: For testing only
+ * \retval nullptr if the platform component store could not be obtained
+ */
+std::shared_ptr<C2ComponentStore> GetTestComponentStore(
+        std::vector<std::tuple<C2String, C2ComponentFactory::CreateCodec2FactoryFunc,
+        C2ComponentFactory::DestroyCodec2FactoryFunc>>);
+
+/**
  * Sets the preferred component store in this process for the sole purpose of accessing its
  * interface. If this is not called, the default IComponentStore HAL (if exists) is the preferred
  * store for this purpose. If the default IComponentStore HAL is not present, the platform
diff --git a/media/codec2/vndk/include/C2SurfaceSyncObj.h b/media/codec2/vndk/include/C2SurfaceSyncObj.h
new file mode 100644
index 0000000..ac87fe4
--- /dev/null
+++ b/media/codec2/vndk/include/C2SurfaceSyncObj.h
@@ -0,0 +1,232 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STAGEFRIGHT_CODEC2_SURFACE_SYNC_OBJ_H_
+#define STAGEFRIGHT_CODEC2_SURFACE_SYNC_OBJ_H_
+
+#include <cutils/native_handle.h>
+#include <memory>
+#include <atomic>
+
+#include <C2Buffer.h>
+
+/**
+ * Futex based lock / wait implementation for sharing output buffer allocation
+ * information between Framework and HAL.
+ */
+struct C2SyncVariables {
+    enum SyncStatus : uint32_t {
+           STATUS_INIT = 0,         // When surface configuration starts.
+           STATUS_ACTIVE = 1,       // When surface configuration finishs.
+                                    // STATUS_INIT -> STATUS_ACTIVE
+           STATUS_SWITCHING = 2,    // When the surface is replaced by a new surface
+                                    // during surface configuration.
+                                    // STATUS_ACTIVE -> STATUS_SWITCHING
+    };
+
+    /**
+     * Lock the memory region
+     */
+    int lock();
+
+    /**
+     * Unlock the memory region
+     */
+    int unlock();
+
+    /**
+     * Set initial dequeued buffer count.
+     *
+     * \param maxDequeueCount           Initial value of # of max dequeued buffer count
+     * \param curDequeueCount           Initial value of # of current dequeued buffer count
+     */
+    void setInitialDequeueCountLocked(int32_t maxDequeueCount, int32_t curDequeueCount);
+
+    /**
+     * Get a waitId which will be used to implement fence.
+     */
+    uint32_t getWaitIdLocked();
+
+    /**
+     * Return whether the upcoming dequeue operation is not blocked.
+     * if it's blocked and waitId is non-null, waitId is returned to be used for waiting.
+     *
+     * \retval false    dequeue operation is blocked now.
+     * \retval true     dequeue operation is possible.
+     */
+    bool isDequeueableLocked(uint32_t *waitId = nullptr);
+
+    /**
+     * Notify a buffer is queued. Return whether the upcoming dequeue operation
+     * is not blocked. if it's blocked and waitId is non-null, waitId is returned
+     * to be used for waiting.
+     *
+     * \retval false    dequeue operation is blocked now.
+     * \retval true     dequeue operation is possible.
+     */
+    bool notifyQueuedLocked(uint32_t *waitId = nullptr);
+
+    /**
+     * Notify a buffer is dequeued.
+     */
+    void notifyDequeuedLocked();
+
+    /**
+     * Set sync status.
+     */
+    void setSyncStatusLocked(SyncStatus status);
+
+    /**
+     * Get sync status.
+     */
+    C2SyncVariables::SyncStatus getSyncStatusLocked();
+
+    /**
+     * Update current max dequeue count.
+     */
+    void updateMaxDequeueCountLocked(int32_t maxDequeueCount);
+
+    /**
+     * Wait until status is no longer equal to waitId, or until timeout.
+     *
+     * \param waitId            internal status for waiting until it is changed.
+     * \param timeousNs         nano seconds to timeout.
+     *
+     * \retval C2_TIMEDOUT      change does not happen during waiting.
+     * \retval C2_BAD_VALUE     invalid event waiting.
+     * \retval C2_OK            change was signalled.
+     */
+    c2_status_t waitForChange(uint32_t waitId, c2_nsecs_t timeoutNs);
+
+    C2SyncVariables() {}
+
+private:
+    /**
+     * signal one waiter to wake up.
+     */
+    int signal();
+
+    /**
+     * signal all waiter to wake up.
+     */
+    int broadcast();
+
+    /**
+     * wait for signal or broadcast.
+     */
+    int wait();
+
+    std::atomic<uint32_t> mLock;
+    std::atomic<uint32_t> mCond;
+    int32_t mMaxDequeueCount;
+    int32_t mCurDequeueCount;
+    SyncStatus mStatus;
+};
+
+/**
+ * Shared memory in order to synchronize information for Surface(IGBP)
+ * based output buffer allocation.
+ */
+class C2SurfaceSyncMemory {
+public:
+    /**
+     * Shared memory handle in order to synchronize information for
+     * Surface based output buffer allocation.
+     */
+    struct HandleSyncMem : public native_handle_t {
+        HandleSyncMem(int fd, size_t size) :
+            native_handle_t(cHeader),
+            mFds{fd},
+            mInts{int(size & 0xFFFFFFFF),
+                int((uint64_t(size) >> 32) & 0xFFFFFFFF), kMagic} {}
+
+        /** Returns a file descriptor of the shared memory
+         * \return a file descriptor representing the shared memory
+         */
+        int memFd() const {return mFds.mMem;}
+
+        /** Returns the size of the shared memory */
+        size_t size() const {
+            return size_t(unsigned(mInts.mSizeLo))
+                    | size_t(uint64_t(unsigned(mInts.mSizeHi)) << 32);
+        }
+
+        /** Check whether the native handle is in the form of HandleSyncMem
+         *
+         * \return whether the native handle is compatible
+         */
+        static bool isValid(const native_handle_t * const o);
+
+    protected:
+        struct {
+            int mMem;
+        } mFds;
+        struct {
+            int mSizeLo;
+            int mSizeHi;
+            int mMagic;
+        } mInts;
+    private:
+        enum {
+            kMagic = 'ssm\x00',
+            numFds = sizeof(mFds) / sizeof(int),
+            numInts = sizeof(mInts) / sizeof(int),
+            version = sizeof(native_handle_t)
+        };
+        const static native_handle_t cHeader;
+    };
+
+    /**
+     * Imports a shared memory object from a native handle(The shared memory is already existing).
+     * This is usually used after native_handle_t is passed via RPC.
+     *
+     * \param handle        handle representing shared memory for output buffer allocation.
+     */
+    static std::shared_ptr<C2SurfaceSyncMemory> Import(native_handle_t *handle);
+
+    /**
+     * Creats a shared memory object for synchronization of output buffer allocation.
+     * Shared memory creation should be done explicitly.
+     *
+     * \param fd            file descriptor to shared memory
+     * \param size          size of the shared memory
+     */
+    static std::shared_ptr<C2SurfaceSyncMemory> Create(int fd, size_t size);
+
+    /**
+     * Returns a handle representing the shread memory for synchronization of
+     * output buffer allocation.
+     */
+    native_handle_t *handle();
+
+    /**
+     * Returns synchronization object which will provide synchronization primitives.
+     *
+     * \return a ptr to synchronization primitive class
+     */
+    C2SyncVariables *mem();
+
+    ~C2SurfaceSyncMemory();
+
+private:
+    bool mInit;
+    HandleSyncMem *mHandle;
+    C2SyncVariables *mMem;
+
+    C2SurfaceSyncMemory();
+};
+
+#endif // STAGEFRIGHT_CODEC2_SURFACE_SYNC_OBJ_H_
diff --git a/media/codec2/vndk/include/util/C2InterfaceUtils.h b/media/codec2/vndk/include/util/C2InterfaceUtils.h
index e9037e5..13bdac3 100644
--- a/media/codec2/vndk/include/util/C2InterfaceUtils.h
+++ b/media/codec2/vndk/include/util/C2InterfaceUtils.h
@@ -17,6 +17,7 @@
 #ifndef C2UTILS_INTERFACE_UTILS_H_
 #define C2UTILS_INTERFACE_UTILS_H_
 
+#include <C2Component.h>
 #include <C2Param.h>
 #include <C2Work.h>
 
@@ -1130,6 +1131,19 @@
 
 };
 
+/**
+ * Utility class for C2ComponentInterface
+ */
+struct C2InterfaceUtils {
+    /**
+     * Create traits from C2ComponentInterface. Note that rank cannot be queried from interfaces,
+     * so left untouched.
+     */
+    static bool FillTraitsFromInterface(
+            C2Component::Traits *traits,
+            const std::shared_ptr<C2ComponentInterface> &intf);
+};
+
 #include <util/C2Debug-interface.h>
 
 #endif  // C2UTILS_INTERFACE_UTILS_H_
diff --git a/media/codec2/vndk/internal/C2BlockInternal.h b/media/codec2/vndk/internal/C2BlockInternal.h
index 4ae946a..c510fca 100644
--- a/media/codec2/vndk/internal/C2BlockInternal.h
+++ b/media/codec2/vndk/internal/C2BlockInternal.h
@@ -52,6 +52,8 @@
 
 struct C2BufferQueueBlockPoolData;
 
+class C2SurfaceSyncMemory;
+
 /**
  * Internal only interface for creating blocks by block pool/buffer passing implementations.
  *
@@ -279,6 +281,8 @@
      *                 anymore.
      * \param igbp     \c IGraphicBufferProducer instance to be assigned to the
      *                 block. This is not needed when the block is local.
+     * \param syncMem  Memory block which will support synchronization
+     *                 between Framework and HAL.
      *
      * \return The previous held status.
      */
@@ -287,7 +291,8 @@
             const std::shared_ptr<_C2BlockPoolData>& poolData,
             const std::shared_ptr<int>& owner,
             const ::android::sp<::android::hardware::graphics::bufferqueue::
-                                V2_0::IGraphicBufferProducer>& igbp = nullptr);
+                                V2_0::IGraphicBufferProducer>& igbp = nullptr,
+            std::shared_ptr<C2SurfaceSyncMemory> syncMem = nullptr);
 
     /**
      * Prepare a block to be transferred to other process. This blocks
@@ -358,6 +363,7 @@
             const std::shared_ptr<int>& owner,
             const ::android::sp<::android::hardware::graphics::bufferqueue::
                                 V2_0::IGraphicBufferProducer>& igbp,
+            std::shared_ptr<C2SurfaceSyncMemory>,
             uint32_t generation,
             uint64_t bqId,
             int32_t bqSlot);
diff --git a/media/codec2/vndk/internal/C2HandleIonInternal.h b/media/codec2/vndk/internal/C2HandleIonInternal.h
index c0e1d83..c67698c 100644
--- a/media/codec2/vndk/internal/C2HandleIonInternal.h
+++ b/media/codec2/vndk/internal/C2HandleIonInternal.h
@@ -28,7 +28,10 @@
           mFds{ bufferFd },
           mInts{ int(size & 0xFFFFFFFF), int((uint64_t(size) >> 32) & 0xFFFFFFFF), kMagic } { }
 
-    static bool isValid(const C2Handle * const o);
+    static bool IsValid(const C2Handle * const o);
+
+    // deprecated
+    static bool isValid(const C2Handle * const o) { return IsValid(o); }
 
     int bufferFd() const { return mFds.mBuffer; }
     size_t size() const {
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 62936f6..169de0c 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -29,6 +29,8 @@
 #include <C2AllocatorGralloc.h>
 #include <C2BqBufferPriv.h>
 #include <C2BlockInternal.h>
+#include <C2FenceFactory.h>
+#include <C2SurfaceSyncObj.h>
 
 #include <list>
 #include <map>
@@ -54,59 +56,13 @@
 using HGraphicBufferProducer = ::android::hardware::graphics::bufferqueue::V2_0
         ::IGraphicBufferProducer;
 
-struct C2BufferQueueBlockPoolData : public _C2BlockPoolData {
-
-    bool held;
-    bool local;
-    uint32_t generation;
-    uint64_t bqId;
-    int32_t bqSlot;
-    bool transfer; // local transfer to remote
-    bool attach; // attach on remote
-    bool display; // display on remote;
-    std::weak_ptr<int> owner;
-    sp<HGraphicBufferProducer> igbp;
-    std::shared_ptr<C2BufferQueueBlockPool::Impl> localPool;
-    mutable std::mutex lock;
-
-    virtual type_t getType() const override {
-        return TYPE_BUFFERQUEUE;
-    }
-
-    // Create a remote BlockPoolData.
-    C2BufferQueueBlockPoolData(
-            uint32_t generation, uint64_t bqId, int32_t bqSlot,
-            const std::shared_ptr<int> &owner,
-            const sp<HGraphicBufferProducer>& producer);
-
-    // Create a local BlockPoolData.
-    C2BufferQueueBlockPoolData(
-            uint32_t generation, uint64_t bqId, int32_t bqSlot,
-            const std::shared_ptr<C2BufferQueueBlockPool::Impl>& pool);
-
-    virtual ~C2BufferQueueBlockPoolData() override;
-
-    int migrate(const sp<HGraphicBufferProducer>& producer,
-                uint32_t toGeneration, uint64_t toBqId,
-                sp<GraphicBuffer> *buffers, uint32_t oldGeneration);
-};
-
 bool _C2BlockFactory::GetBufferQueueData(
         const std::shared_ptr<const _C2BlockPoolData>& data,
         uint32_t* generation, uint64_t* bqId, int32_t* bqSlot) {
     if (data && data->getType() == _C2BlockPoolData::TYPE_BUFFERQUEUE) {
-        if (generation) {
-            const std::shared_ptr<const C2BufferQueueBlockPoolData> poolData =
-                    std::static_pointer_cast<const C2BufferQueueBlockPoolData>(data);
-            std::scoped_lock<std::mutex> lock(poolData->lock);
-            *generation = poolData->generation;
-            if (bqId) {
-                *bqId = poolData->bqId;
-            }
-            if (bqSlot) {
-                *bqSlot = poolData->bqSlot;
-            }
-        }
+        const std::shared_ptr<const C2BufferQueueBlockPoolData> poolData =
+                std::static_pointer_cast<const C2BufferQueueBlockPoolData>(data);
+        poolData->getBufferQueueData(generation, bqId, bqSlot);
         return true;
     }
     return false;
@@ -115,29 +71,18 @@
 bool _C2BlockFactory::HoldBlockFromBufferQueue(
         const std::shared_ptr<_C2BlockPoolData>& data,
         const std::shared_ptr<int>& owner,
-        const sp<HGraphicBufferProducer>& igbp) {
+        const sp<HGraphicBufferProducer>& igbp,
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem) {
     const std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
             std::static_pointer_cast<C2BufferQueueBlockPoolData>(data);
-    std::scoped_lock<std::mutex> lock(poolData->lock);
-    if (!poolData->local) {
-        poolData->owner = owner;
-        poolData->igbp = igbp;
-    }
-    if (poolData->held) {
-        poolData->held = true;
-        return false;
-    }
-    poolData->held = true;
-    return true;
+    return poolData->holdBlockFromBufferQueue(owner, igbp, syncMem);
 }
 
 bool _C2BlockFactory::BeginTransferBlockToClient(
         const std::shared_ptr<_C2BlockPoolData>& data) {
     const std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
             std::static_pointer_cast<C2BufferQueueBlockPoolData>(data);
-    std::scoped_lock<std::mutex> lock(poolData->lock);
-    poolData->transfer = true;
-    return true;
+    return poolData->beginTransferBlockToClient();
 }
 
 bool _C2BlockFactory::EndTransferBlockToClient(
@@ -145,28 +90,14 @@
         bool transfer) {
     const std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
             std::static_pointer_cast<C2BufferQueueBlockPoolData>(data);
-    std::scoped_lock<std::mutex> lock(poolData->lock);
-    poolData->transfer = false;
-    if (transfer) {
-        poolData->held = false;
-    }
-    return true;
+    return poolData->endTransferBlockToClient(transfer);
 }
 
 bool _C2BlockFactory::BeginAttachBlockToBufferQueue(
         const std::shared_ptr<_C2BlockPoolData>& data) {
     const std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
             std::static_pointer_cast<C2BufferQueueBlockPoolData>(data);
-    std::scoped_lock<std::mutex> lock(poolData->lock);
-    if (poolData->local || poolData->display ||
-        poolData->attach || !poolData->held) {
-        return false;
-    }
-    if (poolData->bqId == 0) {
-        return false;
-    }
-    poolData->attach = true;
-    return true;
+    return poolData->beginAttachBlockToBufferQueue();
 }
 
 // if display was tried during attach, buffer should be retired ASAP.
@@ -174,47 +105,20 @@
         const std::shared_ptr<_C2BlockPoolData>& data,
         const std::shared_ptr<int>& owner,
         const sp<HGraphicBufferProducer>& igbp,
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem,
         uint32_t generation,
         uint64_t bqId,
         int32_t bqSlot) {
     const std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
             std::static_pointer_cast<C2BufferQueueBlockPoolData>(data);
-    std::scoped_lock<std::mutex> lock(poolData->lock);
-    if (poolData->local || !poolData->attach ) {
-        return false;
-    }
-    if (poolData->display) {
-        poolData->attach = false;
-        poolData->held = false;
-        return false;
-    }
-    poolData->attach = false;
-    poolData->held = true;
-    poolData->owner = owner;
-    poolData->igbp = igbp;
-    poolData->generation = generation;
-    poolData->bqId = bqId;
-    poolData->bqSlot = bqSlot;
-    return true;
+    return poolData->endAttachBlockToBufferQueue(owner, igbp, syncMem, generation, bqId, bqSlot);
 }
 
 bool _C2BlockFactory::DisplayBlockToBufferQueue(
         const std::shared_ptr<_C2BlockPoolData>& data) {
     const std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
             std::static_pointer_cast<C2BufferQueueBlockPoolData>(data);
-    std::scoped_lock<std::mutex> lock(poolData->lock);
-    if (poolData->local || poolData->display || !poolData->held) {
-        return false;
-    }
-    if (poolData->bqId == 0) {
-        return false;
-    }
-    poolData->display = true;
-    if (poolData->attach) {
-        return false;
-    }
-    poolData->held = false;
-    return true;
+    return poolData->displayBlockToBufferQueue();
 }
 
 std::shared_ptr<C2GraphicBlock> _C2BlockFactory::CreateGraphicBlock(
@@ -223,7 +127,7 @@
     static std::unique_ptr<C2AllocatorGralloc> sAllocator = std::make_unique<C2AllocatorGralloc>(0);
 
     std::shared_ptr<C2GraphicAllocation> alloc;
-    if (C2AllocatorGralloc::isValid(handle)) {
+    if (C2AllocatorGralloc::CheckHandle(handle)) {
         uint32_t width;
         uint32_t height;
         uint32_t format;
@@ -267,8 +171,8 @@
     return stamp;
 }
 
-bool getGenerationNumber(const sp<HGraphicBufferProducer> &producer,
-                         uint32_t *generation) {
+bool getGenerationNumberAndUsage(const sp<HGraphicBufferProducer> &producer,
+                                 uint32_t *generation, uint64_t *usage) {
     status_t status{};
     int slot{};
     bool bufferNeedsReallocation{};
@@ -302,7 +206,7 @@
     // instead of a new allocation.
     transResult = producer->requestBuffer(
             slot,
-            [&status, &slotBuffer, &generation](
+            [&status, &slotBuffer, &generation, &usage](
                     HStatus hStatus,
                     HBuffer const& hBuffer,
                     uint32_t generationNumber){
@@ -310,6 +214,7 @@
                         h2b(hBuffer, &slotBuffer) &&
                         slotBuffer) {
                     *generation = generationNumber;
+                    *usage = slotBuffer->getUsage();
                     slotBuffer->setGenerationNumber(generationNumber);
                 } else {
                     status = android::BAD_VALUE;
@@ -330,12 +235,58 @@
 class C2BufferQueueBlockPool::Impl
         : public std::enable_shared_from_this<C2BufferQueueBlockPool::Impl> {
 private:
+    c2_status_t dequeueBuffer(
+            uint32_t width,
+            uint32_t height,
+            uint32_t format,
+            C2AndroidMemoryUsage androidUsage,
+            int *slot, bool *needsRealloc, sp<Fence> *fence) {
+        status_t status{};
+        using Input = HGraphicBufferProducer::DequeueBufferInput;
+        using Output = HGraphicBufferProducer::DequeueBufferOutput;
+        Return<void> transResult = mProducer->dequeueBuffer(
+                Input{
+                    width,
+                    height,
+                    format,
+                    androidUsage.asGrallocUsage()},
+                [&status, slot, needsRealloc,
+                 fence](HStatus hStatus,
+                         int32_t hSlot,
+                         Output const& hOutput) {
+                    *slot = static_cast<int>(hSlot);
+                    if (!h2b(hStatus, &status) ||
+                            !h2b(hOutput.fence, fence)) {
+                        status = ::android::BAD_VALUE;
+                    } else {
+                        *needsRealloc =
+                                hOutput.bufferNeedsReallocation;
+                    }
+                });
+        if (!transResult.isOk() || status != android::OK) {
+            if (transResult.isOk()) {
+                ++mDqFailure;
+                if (status == android::INVALID_OPERATION ||
+                    status == android::TIMED_OUT ||
+                    status == android::WOULD_BLOCK) {
+                    // Dequeue buffer is blocked temporarily. Retrying is
+                    // required.
+                    return C2_BLOCKING;
+                }
+            }
+            ALOGD("cannot dequeue buffer %d", status);
+            return C2_BAD_VALUE;
+        }
+        return C2_OK;
+    }
+
     c2_status_t fetchFromIgbp_l(
             uint32_t width,
             uint32_t height,
             uint32_t format,
             C2MemoryUsage usage,
-            std::shared_ptr<C2GraphicBlock> *block /* nonnull */) {
+            std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
+            C2Fence *c2Fence) {
         // We have an IGBP now.
         C2AndroidMemoryUsage androidUsage = usage;
         status_t status{};
@@ -344,41 +295,42 @@
         sp<Fence> fence = new Fence();
         ALOGV("tries to dequeue buffer");
 
+        C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem(): nullptr;
         { // Call dequeueBuffer().
-            using Input = HGraphicBufferProducer::DequeueBufferInput;
-            using Output = HGraphicBufferProducer::DequeueBufferOutput;
-            Return<void> transResult = mProducer->dequeueBuffer(
-                    Input{
-                        width,
-                        height,
-                        format,
-                        androidUsage.asGrallocUsage()},
-                    [&status, &slot, &bufferNeedsReallocation,
-                     &fence](HStatus hStatus,
-                             int32_t hSlot,
-                             Output const& hOutput) {
-                        slot = static_cast<int>(hSlot);
-                        if (!h2b(hStatus, &status) ||
-                                !h2b(hOutput.fence, &fence)) {
-                            status = ::android::BAD_VALUE;
-                        } else {
-                            bufferNeedsReallocation =
-                                    hOutput.bufferNeedsReallocation;
-                        }
-                    });
-            if (!transResult.isOk() || status != android::OK) {
-                if (transResult.isOk()) {
-                    ++mDqFailure;
-                    if (status == android::INVALID_OPERATION ||
-                        status == android::TIMED_OUT ||
-                        status == android::WOULD_BLOCK) {
-                        // Dequeue buffer is blocked temporarily. Retrying is
-                        // required.
-                        return C2_BLOCKING;
+            c2_status_t c2Status;
+            if (syncVar) {
+                uint32_t waitId;
+                syncVar->lock();
+                if (!syncVar->isDequeueableLocked(&waitId)) {
+                    syncVar->unlock();
+                    if (c2Fence) {
+                        *c2Fence = _C2FenceFactory::CreateSurfaceFence(mSyncMem, waitId);
                     }
+                    return C2_BLOCKING;
                 }
-                ALOGD("cannot dequeue buffer %d", status);
-                return C2_BAD_VALUE;
+                if (syncVar->getSyncStatusLocked() != C2SyncVariables::STATUS_ACTIVE) {
+                    waitId = syncVar->getWaitIdLocked();
+                    syncVar->unlock();
+                    if (c2Fence) {
+                        *c2Fence = _C2FenceFactory::CreateSurfaceFence(mSyncMem, waitId);
+                    }
+                    return C2_BLOCKING;
+                }
+                syncVar->notifyDequeuedLocked();
+                syncVar->unlock();
+                c2Status = dequeueBuffer(width, height, format, androidUsage,
+                              &slot, &bufferNeedsReallocation, &fence);
+                if (c2Status != C2_OK) {
+                    syncVar->lock();
+                    syncVar->notifyQueuedLocked();
+                    syncVar->unlock();
+                }
+            } else {
+                c2Status = dequeueBuffer(width, height, format, usage,
+                              &slot, &bufferNeedsReallocation, &fence);
+            }
+            if (c2Status != C2_OK) {
+                return c2Status;
             }
             mDqFailure = 0;
             mLastDqTs = getTimestampNow();
@@ -389,18 +341,41 @@
             return C2_BAD_VALUE;
         }
         ALOGV("dequeued a buffer successfully");
+        bool dequeueable = false;
+        uint32_t waitId;
         if (fence) {
             static constexpr int kFenceWaitTimeMs = 10;
 
             status_t status = fence->wait(kFenceWaitTimeMs);
             if (status == -ETIME) {
                 // fence is not signalled yet.
-                (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                if (syncVar) {
+                    syncVar->lock();
+                    (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                    dequeueable = syncVar->notifyQueuedLocked(&waitId);
+                    syncVar->unlock();
+                    if (c2Fence) {
+                        *c2Fence = dequeueable ? C2Fence() :
+                                _C2FenceFactory::CreateSurfaceFence(mSyncMem, waitId);
+                    }
+                } else {
+                    (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                }
                 return C2_BLOCKING;
             }
             if (status != android::NO_ERROR) {
                 ALOGD("buffer fence wait error %d", status);
-                (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                if (syncVar) {
+                    syncVar->lock();
+                    (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                    syncVar->notifyQueuedLocked();
+                    syncVar->unlock();
+                    if (c2Fence) {
+                        *c2Fence = C2Fence();
+                    }
+                } else {
+                    (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                }
                 return C2_BAD_VALUE;
             } else if (mRenderCallback) {
                 nsecs_t signalTime = fence->getSignalTime();
@@ -440,7 +415,17 @@
                 return C2_BAD_VALUE;
             } else if (status != android::NO_ERROR) {
                 slotBuffer.clear();
-                (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                if (syncVar) {
+                    syncVar->lock();
+                    (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                    syncVar->notifyQueuedLocked();
+                    syncVar->unlock();
+                    if (c2Fence) {
+                        *c2Fence = C2Fence();
+                    }
+                } else {
+                    (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                }
                 return C2_BAD_VALUE;
             }
             if (mGeneration == 0) {
@@ -463,20 +448,36 @@
                 std::shared_ptr<C2GraphicAllocation> alloc;
                 c2_status_t err = mAllocator->priorGraphicAllocation(c2Handle, &alloc);
                 if (err != C2_OK) {
+                    native_handle_close(c2Handle);
+                    native_handle_delete(c2Handle);
                     return err;
                 }
                 std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
                         std::make_shared<C2BufferQueueBlockPoolData>(
                                 slotBuffer->getGenerationNumber(),
                                 mProducerId, slot,
-                                shared_from_this());
+                                mProducer, mSyncMem, 0);
                 mPoolDatas[slot] = poolData;
                 *block = _C2BlockFactory::CreateGraphicBlock(alloc, poolData);
                 return C2_OK;
             }
             // Block was not created. call requestBuffer# again next time.
             slotBuffer.clear();
-            (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+            if (syncVar) {
+                syncVar->lock();
+                (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                syncVar->notifyQueuedLocked();
+                syncVar->unlock();
+                if (c2Fence) {
+                    *c2Fence = C2Fence();
+                }
+            } else {
+                (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+            }
+            return C2_BAD_VALUE;
+        }
+        if (c2Fence) {
+            *c2Fence = C2Fence();
         }
         return C2_BAD_VALUE;
     }
@@ -506,7 +507,8 @@
             uint32_t height,
             uint32_t format,
             C2MemoryUsage usage,
-            std::shared_ptr<C2GraphicBlock> *block /* nonnull */) {
+            std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
+            C2Fence *fence) {
         block->reset();
         if (mInit != C2_OK) {
             return mInit;
@@ -537,17 +539,19 @@
             }
             std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
                     std::make_shared<C2BufferQueueBlockPoolData>(
-                            0, (uint64_t)0, ~0, shared_from_this());
+                            0, (uint64_t)0, ~0, nullptr, nullptr, 0);
             *block = _C2BlockFactory::CreateGraphicBlock(alloc, poolData);
             ALOGV("allocated a buffer successfully");
 
             return C2_OK;
         }
-        c2_status_t status = fetchFromIgbp_l(width, height, format, usage, block);
+        c2_status_t status = fetchFromIgbp_l(width, height, format, usage, block, fence);
         if (status == C2_BLOCKING) {
             lock.unlock();
-            // in order not to drain cpu from component's spinning
-            ::usleep(kMaxIgbpRetryDelayUs);
+            if (!fence) {
+                // in order not to drain cpu from component's spinning
+                ::usleep(kMaxIgbpRetryDelayUs);
+            }
         }
         return status;
     }
@@ -557,10 +561,12 @@
         mRenderCallback = renderCallback;
     }
 
+    /* This is for Old HAL request for compatibility */
     void configureProducer(const sp<HGraphicBufferProducer> &producer) {
         uint64_t producerId = 0;
         uint32_t generation = 0;
-        bool haveGeneration = false;
+        uint64_t usage = 0;
+        bool bqInformation = false;
         if (producer) {
             Return<uint64_t> transResult = producer->getUniqueId();
             if (!transResult.isOk()) {
@@ -568,14 +574,32 @@
                 return;
             }
             producerId = static_cast<uint64_t>(transResult);
-            // TODO: provide gneration number from parameter.
-            haveGeneration = getGenerationNumber(producer, &generation);
-            if (!haveGeneration) {
+            bqInformation = getGenerationNumberAndUsage(producer, &generation, &usage);
+            if (!bqInformation) {
                 ALOGW("get generationNumber failed %llu",
                       (unsigned long long)producerId);
             }
         }
+        configureProducer(producer, nullptr, producerId, generation, usage, bqInformation);
+    }
+
+    void configureProducer(const sp<HGraphicBufferProducer> &producer,
+                           native_handle_t *syncHandle,
+                           uint64_t producerId,
+                           uint32_t generation,
+                           uint64_t usage,
+                           bool bqInformation) {
+        std::shared_ptr<C2SurfaceSyncMemory> c2SyncMem;
+        if (syncHandle) {
+            if (!producer) {
+                native_handle_close(syncHandle);
+                native_handle_delete(syncHandle);
+            } else {
+                c2SyncMem = C2SurfaceSyncMemory::Import(syncHandle);
+            }
+        }
         int migrated = 0;
+        std::shared_ptr<C2SurfaceSyncMemory> oldMem;
         // poolDatas dtor should not be called during lock is held.
         std::shared_ptr<C2BufferQueueBlockPoolData>
                 poolDatas[NUM_BUFFER_SLOTS];
@@ -595,22 +619,30 @@
             if (producer) {
                 mProducer = producer;
                 mProducerId = producerId;
-                mGeneration = haveGeneration ? generation : 0;
+                mGeneration = bqInformation ? generation : 0;
             } else {
                 mProducer = nullptr;
                 mProducerId = 0;
                 mGeneration = 0;
                 ALOGW("invalid producer producer(%d), generation(%d)",
-                      (bool)producer, haveGeneration);
+                      (bool)producer, bqInformation);
             }
-            if (mProducer && haveGeneration) { // migrate buffers
+            oldMem = mSyncMem; // preven destruction while locked.
+            mSyncMem = c2SyncMem;
+            C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
+            if (syncVar) {
+                syncVar->lock();
+                syncVar->setSyncStatusLocked(C2SyncVariables::STATUS_ACTIVE);
+                syncVar->unlock();
+            }
+            if (mProducer && bqInformation) { // migrate buffers
                 for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
                     std::shared_ptr<C2BufferQueueBlockPoolData> data =
                             mPoolDatas[i].lock();
                     if (data) {
                         int slot = data->migrate(
-                                mProducer, generation,
-                                producerId, mBuffers, oldGeneration);
+                                mProducer, generation, usage,
+                                producerId, mBuffers[i], oldGeneration, mSyncMem);
                         if (slot >= 0) {
                             buffers[slot] = mBuffers[i];
                             poolDatas[slot] = data;
@@ -624,7 +656,7 @@
                 mPoolDatas[i] = poolDatas[i];
             }
         }
-        if (producer && haveGeneration) {
+        if (producer && bqInformation) {
             ALOGD("local generation change %u , "
                   "bqId: %llu migrated buffers # %d",
                   generation, (unsigned long long)producerId, migrated);
@@ -634,17 +666,6 @@
 private:
     friend struct C2BufferQueueBlockPoolData;
 
-    void cancel(uint32_t generation, uint64_t igbp_id, int32_t igbp_slot) {
-        bool cancelled = false;
-        {
-        std::scoped_lock<std::mutex> lock(mMutex);
-        if (generation == mGeneration && igbp_id == mProducerId && mProducer) {
-            (void)mProducer->cancelBuffer(igbp_slot, hidl_handle{}).isOk();
-            cancelled = true;
-        }
-        }
-    }
-
     c2_status_t mInit;
     uint64_t mProducerId;
     uint32_t mGeneration;
@@ -662,71 +683,123 @@
 
     sp<GraphicBuffer> mBuffers[NUM_BUFFER_SLOTS];
     std::weak_ptr<C2BufferQueueBlockPoolData> mPoolDatas[NUM_BUFFER_SLOTS];
+
+    std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
 };
 
 C2BufferQueueBlockPoolData::C2BufferQueueBlockPoolData(
         uint32_t generation, uint64_t bqId, int32_t bqSlot,
         const std::shared_ptr<int>& owner,
         const sp<HGraphicBufferProducer>& producer) :
-        held(producer && bqId != 0), local(false),
-        generation(generation), bqId(bqId), bqSlot(bqSlot),
-        transfer(false), attach(false), display(false),
-        owner(owner), igbp(producer),
-        localPool() {
+        mLocal(false), mHeld(producer && bqId != 0),
+        mGeneration(generation), mBqId(bqId), mBqSlot(bqSlot),
+        mCurrentGeneration(generation), mCurrentBqId(bqId),
+        mTransfer(false), mAttach(false), mDisplay(false),
+        mOwner(owner), mIgbp(producer) {
 }
 
 C2BufferQueueBlockPoolData::C2BufferQueueBlockPoolData(
         uint32_t generation, uint64_t bqId, int32_t bqSlot,
-        const std::shared_ptr<C2BufferQueueBlockPool::Impl>& pool) :
-        held(true), local(true),
-        generation(generation), bqId(bqId), bqSlot(bqSlot),
-        transfer(false), attach(false), display(false),
-        igbp(pool ? pool->mProducer : nullptr),
-        localPool(pool) {
+        const android::sp<HGraphicBufferProducer>& producer,
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem, int noUse) :
+        mLocal(true), mHeld(true),
+        mGeneration(generation), mBqId(bqId), mBqSlot(bqSlot),
+        mCurrentGeneration(generation), mCurrentBqId(bqId),
+        mTransfer(false), mAttach(false), mDisplay(false),
+        mIgbp(producer), mSyncMem(syncMem) {
+            (void)noUse;
 }
 
 C2BufferQueueBlockPoolData::~C2BufferQueueBlockPoolData() {
-    if (!held || bqId == 0) {
+    if (!mHeld || mBqId == 0 || !mIgbp) {
         return;
     }
-    if (local) {
-        if (localPool) {
-            localPool->cancel(generation, bqId, bqSlot);
+
+    if (mLocal) {
+        if (mGeneration == mCurrentGeneration && mBqId == mCurrentBqId) {
+            C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
+            if (syncVar) {
+                syncVar->lock();
+                if (syncVar->getSyncStatusLocked() == C2SyncVariables::STATUS_ACTIVE) {
+                    mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
+                    syncVar->notifyQueuedLocked();
+                }
+                syncVar->unlock();
+            } else {
+                mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
+            }
         }
-    } else if (igbp && !owner.expired()) {
-        igbp->cancelBuffer(bqSlot, hidl_handle{}).isOk();
+    } else if (!mOwner.expired()) {
+        C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
+        if (syncVar) {
+            syncVar->lock();
+            if (syncVar->getSyncStatusLocked() != C2SyncVariables::STATUS_SWITCHING) {
+                mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
+                syncVar->notifyQueuedLocked();
+            }
+            syncVar->unlock();
+        } else {
+            mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
+        }
     }
 }
+
+C2BufferQueueBlockPoolData::type_t C2BufferQueueBlockPoolData::getType() const {
+    return TYPE_BUFFERQUEUE;
+}
+
 int C2BufferQueueBlockPoolData::migrate(
         const sp<HGraphicBufferProducer>& producer,
-        uint32_t toGeneration, uint64_t toBqId,
-        sp<GraphicBuffer> *buffers, uint32_t oldGeneration) {
-    std::scoped_lock<std::mutex> l(lock);
-    if (!held || bqId == 0) {
+        uint32_t toGeneration, uint64_t toUsage, uint64_t toBqId,
+        sp<GraphicBuffer>& graphicBuffer, uint32_t oldGeneration,
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem) {
+    std::scoped_lock<std::mutex> l(mLock);
+
+    mCurrentBqId = toBqId;
+    mCurrentGeneration = toGeneration;
+
+    if (!mHeld || mBqId == 0) {
         ALOGV("buffer is not owned");
         return -1;
     }
-    if (!local || !localPool) {
+    if (!mLocal) {
         ALOGV("pool is not local");
         return -1;
     }
-    if (bqSlot < 0 || bqSlot >= NUM_BUFFER_SLOTS || !buffers[bqSlot]) {
+    if (mBqSlot < 0 || mBqSlot >= NUM_BUFFER_SLOTS) {
         ALOGV("slot is not in effect");
         return -1;
     }
-    if (toGeneration == generation && bqId == toBqId) {
+    if (!graphicBuffer) {
+        ALOGV("buffer is null");
+        return -1;
+    }
+    if (toGeneration == mGeneration && mBqId == toBqId) {
         ALOGV("cannot migrate to same bufferqueue");
         return -1;
     }
-    if (oldGeneration != generation) {
+    if (oldGeneration != mGeneration) {
         ALOGV("cannot migrate stale buffer");
+        return -1;
     }
-    if (transfer) {
+    if (mTransfer) {
         // either transferred or detached.
         ALOGV("buffer is in transfer");
         return -1;
     }
-    sp<GraphicBuffer> const& graphicBuffer = buffers[bqSlot];
+
+    if (toUsage != graphicBuffer->getUsage()) {
+        sp<GraphicBuffer> newBuffer = new GraphicBuffer(
+            graphicBuffer->handle, GraphicBuffer::CLONE_HANDLE,
+            graphicBuffer->width, graphicBuffer->height, graphicBuffer->format,
+            graphicBuffer->layerCount, toUsage | graphicBuffer->getUsage(), graphicBuffer->stride);
+        if (newBuffer->initCheck() == android::NO_ERROR) {
+            graphicBuffer = std::move(newBuffer);
+        } else {
+            ALOGW("%s() failed to update usage, original usage=%" PRIx64 ", toUsage=%" PRIx64,
+                  __func__, graphicBuffer->getUsage(), toUsage);
+        }
+    }
     graphicBuffer->setGenerationNumber(toGeneration);
 
     HBuffer hBuffer{};
@@ -755,13 +828,124 @@
         return -1;
     }
     ALOGV("local migration from gen %u : %u slot %d : %d",
-          generation, toGeneration, bqSlot, slot);
-    generation = toGeneration;
-    bqId = toBqId;
-    bqSlot = slot;
+          mGeneration, toGeneration, mBqSlot, slot);
+    mIgbp = producer;
+    mGeneration = toGeneration;
+    mBqId = toBqId;
+    mBqSlot = slot;
+    mSyncMem = syncMem;
+
+    C2SyncVariables *syncVar = syncMem ? syncMem->mem() : nullptr;
+    if (syncVar) {
+        syncVar->lock();
+        syncVar->notifyDequeuedLocked();
+        syncVar->unlock();
+    }
     return slot;
 }
 
+void C2BufferQueueBlockPoolData::getBufferQueueData(
+        uint32_t* generation, uint64_t* bqId, int32_t* bqSlot) const {
+    if (generation) {
+        std::scoped_lock<std::mutex> lock(mLock);
+        *generation = mGeneration;
+        if (bqId) {
+            *bqId = mBqId;
+        }
+        if (bqSlot) {
+            *bqSlot = mBqSlot;
+        }
+    }
+}
+
+bool C2BufferQueueBlockPoolData::holdBlockFromBufferQueue(
+        const std::shared_ptr<int>& owner,
+        const sp<HGraphicBufferProducer>& igbp,
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem) {
+    std::scoped_lock<std::mutex> lock(mLock);
+    if (!mLocal) {
+        mOwner = owner;
+        mIgbp = igbp;
+        mSyncMem = syncMem;
+    }
+    if (mHeld) {
+        return false;
+    }
+    mHeld = true;
+    return true;
+}
+
+bool C2BufferQueueBlockPoolData::beginTransferBlockToClient() {
+    std::scoped_lock<std::mutex> lock(mLock);
+    mTransfer = true;
+    return true;
+}
+
+bool C2BufferQueueBlockPoolData::endTransferBlockToClient(bool transfer) {
+    std::scoped_lock<std::mutex> lock(mLock);
+    mTransfer = false;
+    if (transfer) {
+        mHeld = false;
+    }
+    return true;
+}
+
+bool C2BufferQueueBlockPoolData::beginAttachBlockToBufferQueue() {
+    std::scoped_lock<std::mutex> lock(mLock);
+    if (mLocal || mDisplay ||
+        mAttach || !mHeld) {
+        return false;
+    }
+    if (mBqId == 0) {
+        return false;
+    }
+    mAttach = true;
+    return true;
+}
+
+bool C2BufferQueueBlockPoolData::endAttachBlockToBufferQueue(
+        const std::shared_ptr<int>& owner,
+        const sp<HGraphicBufferProducer>& igbp,
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem,
+        uint32_t generation,
+        uint64_t bqId,
+        int32_t bqSlot) {
+    std::scoped_lock<std::mutex> lock(mLock);
+    if (mLocal || !mAttach) {
+        return false;
+    }
+    if (mDisplay) {
+        mAttach = false;
+        mHeld = false;
+        return false;
+    }
+    mAttach = false;
+    mHeld = true;
+    mOwner = owner;
+    mIgbp = igbp;
+    mSyncMem = syncMem;
+    mGeneration = generation;
+    mBqId = bqId;
+    mBqSlot = bqSlot;
+    return true;
+}
+
+bool C2BufferQueueBlockPoolData::displayBlockToBufferQueue() {
+    std::scoped_lock<std::mutex> lock(mLock);
+    if (mLocal || mDisplay || !mHeld) {
+        return false;
+    }
+    if (mBqId == 0) {
+        return false;
+    }
+    mDisplay = true;
+    if (mAttach) {
+        return false;
+    }
+    mHeld = false;
+    return true;
+}
+
 C2BufferQueueBlockPool::C2BufferQueueBlockPool(
         const std::shared_ptr<C2Allocator> &allocator, const local_id_t localId)
         : mAllocator(allocator), mLocalId(localId), mImpl(new Impl(allocator)) {}
@@ -775,7 +959,20 @@
         C2MemoryUsage usage,
         std::shared_ptr<C2GraphicBlock> *block /* nonnull */) {
     if (mImpl) {
-        return mImpl->fetchGraphicBlock(width, height, format, usage, block);
+        return mImpl->fetchGraphicBlock(width, height, format, usage, block, nullptr);
+    }
+    return C2_CORRUPTED;
+}
+
+c2_status_t C2BufferQueueBlockPool::fetchGraphicBlock(
+        uint32_t width,
+        uint32_t height,
+        uint32_t format,
+        C2MemoryUsage usage,
+        std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
+        C2Fence *fence /* nonnull */) {
+    if (mImpl) {
+        return mImpl->fetchGraphicBlock(width, height, format, usage, block, fence);
     }
     return C2_CORRUPTED;
 }
@@ -786,6 +983,18 @@
     }
 }
 
+void C2BufferQueueBlockPool::configureProducer(
+        const sp<HGraphicBufferProducer> &producer,
+        native_handle_t *syncMemory,
+        uint64_t bqId,
+        uint32_t generationId,
+        uint64_t consumerUsage) {
+    if (mImpl) {
+        mImpl->configureProducer(
+               producer, syncMemory, bqId, generationId, consumerUsage, true);
+    }
+}
+
 void C2BufferQueueBlockPool::setRenderCallback(const OnRenderCallback &renderCallback) {
     if (mImpl) {
         mImpl->setRenderCallback(renderCallback);
diff --git a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
new file mode 100644
index 0000000..e55bdc0
--- /dev/null
+++ b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
@@ -0,0 +1,263 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SurfaceSyncObj"
+#include <limits.h>
+#include <linux/futex.h>
+#include <sys/mman.h>
+#include <sys/syscall.h>
+#include <sys/time.h>
+#include <utils/Log.h>
+
+#include <chrono>
+#include <C2SurfaceSyncObj.h>
+
+const native_handle_t C2SurfaceSyncMemory::HandleSyncMem::cHeader = {
+    C2SurfaceSyncMemory::HandleSyncMem::version,
+    C2SurfaceSyncMemory::HandleSyncMem::numFds,
+    C2SurfaceSyncMemory::HandleSyncMem::numInts,
+    {}
+};
+
+bool C2SurfaceSyncMemory::HandleSyncMem::isValid(const native_handle_t * const o) {
+    if (!o || memcmp(o, &cHeader, sizeof(cHeader))) {
+        return false;
+    }
+
+    const HandleSyncMem *other = static_cast<const HandleSyncMem*>(o);
+    return other->mInts.mMagic == kMagic;
+}
+
+C2SurfaceSyncMemory::C2SurfaceSyncMemory()
+    : mInit(false), mHandle(nullptr), mMem(nullptr) {}
+
+C2SurfaceSyncMemory::~C2SurfaceSyncMemory() {
+    if (mInit) {
+        if (mMem) {
+            munmap(static_cast<void *>(mMem), mHandle->size());
+        }
+        if (mHandle) {
+            native_handle_close(mHandle);
+            native_handle_delete(mHandle);
+        }
+    }
+}
+
+std::shared_ptr<C2SurfaceSyncMemory> C2SurfaceSyncMemory::Import(
+        native_handle_t *handle) {
+    if (!HandleSyncMem::isValid(handle)) {
+        return nullptr;
+    }
+
+    HandleSyncMem *o = static_cast<HandleSyncMem*>(handle);
+    void *ptr = mmap(NULL, o->size(), PROT_READ | PROT_WRITE, MAP_SHARED, o->memFd(), 0);
+
+    if (ptr == MAP_FAILED) {
+        native_handle_close(handle);
+        native_handle_delete(handle);
+        return nullptr;
+    }
+
+    std::shared_ptr<C2SurfaceSyncMemory> syncMem(new C2SurfaceSyncMemory);
+    syncMem->mInit = true;
+    syncMem->mHandle = o;
+    syncMem->mMem = static_cast<C2SyncVariables*>(ptr);
+    return syncMem;
+}
+
+std::shared_ptr<C2SurfaceSyncMemory> C2SurfaceSyncMemory::Create(int fd, size_t size) {
+    if (fd < 0 || size == 0) {
+        return nullptr;
+    }
+    HandleSyncMem *handle = new HandleSyncMem(fd, size);
+
+    void *ptr = mmap(NULL, size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
+    if (ptr == MAP_FAILED) {
+        native_handle_close(handle);
+        native_handle_delete(handle);
+        return nullptr;
+    }
+    memset(ptr, 0, size);
+
+    std::shared_ptr<C2SurfaceSyncMemory> syncMem(new C2SurfaceSyncMemory);
+    syncMem->mInit = true;
+    syncMem->mHandle = handle;
+    syncMem->mMem = static_cast<C2SyncVariables*>(ptr);
+    return syncMem;
+}
+
+native_handle_t *C2SurfaceSyncMemory::handle() {
+    return !mInit ? nullptr : mHandle;
+}
+
+C2SyncVariables *C2SurfaceSyncMemory::mem() {
+    return !mInit ? nullptr : mMem;
+}
+
+namespace {
+    constexpr int kSpinNumForLock = 100;
+    constexpr int kSpinNumForUnlock = 200;
+
+    enum : uint32_t {
+        FUTEX_UNLOCKED = 0,
+        FUTEX_LOCKED_UNCONTENDED = 1,  // user-space locking
+        FUTEX_LOCKED_CONTENDED = 2,    // futex locking
+    };
+}
+
+int C2SyncVariables::lock() {
+    uint32_t old;
+    for (int i = 0; i < kSpinNumForLock; i++) {
+        old = 0;
+        if (mLock.compare_exchange_strong(old, FUTEX_LOCKED_UNCONTENDED)) {
+            return 0;
+        }
+        sched_yield();
+    }
+
+    if (old == FUTEX_LOCKED_UNCONTENDED)
+        old = mLock.exchange(FUTEX_LOCKED_CONTENDED);
+
+    while (old) {
+        (void) syscall(__NR_futex, &mLock, FUTEX_WAIT, FUTEX_LOCKED_CONTENDED, NULL, NULL, 0);
+        old = mLock.exchange(FUTEX_LOCKED_CONTENDED);
+    }
+    return 0;
+}
+
+int C2SyncVariables::unlock() {
+    if (mLock.exchange(FUTEX_UNLOCKED) == FUTEX_LOCKED_UNCONTENDED) return 0;
+
+    for (int i = 0; i < kSpinNumForUnlock; i++) {
+        if (mLock.load()) {
+            uint32_t old = FUTEX_LOCKED_UNCONTENDED;
+            mLock.compare_exchange_strong(old, FUTEX_LOCKED_CONTENDED);
+            if (old) {
+                return 0;
+            }
+        }
+        sched_yield();
+    }
+
+    (void) syscall(__NR_futex, &mLock, FUTEX_WAKE, 1, NULL, NULL, 0);
+    return 0;
+}
+
+void C2SyncVariables::setInitialDequeueCountLocked(
+        int32_t maxDequeueCount, int32_t curDequeueCount) {
+    mMaxDequeueCount = maxDequeueCount;
+    mCurDequeueCount = curDequeueCount;
+}
+
+uint32_t C2SyncVariables::getWaitIdLocked() {
+    return mCond.load();
+}
+
+bool C2SyncVariables::isDequeueableLocked(uint32_t *waitId) {
+    if (mMaxDequeueCount <= mCurDequeueCount) {
+        if (waitId) {
+            *waitId = getWaitIdLocked();
+        }
+        return false;
+    }
+    return true;
+}
+
+bool C2SyncVariables::notifyQueuedLocked(uint32_t *waitId) {
+    // Note. thundering herds may occur. Edge trigged signalling.
+    // But one waiter will guarantee to dequeue. others may wait again.
+    // Minimize futex syscall(trap) for the main use case(one waiter case).
+    if (mMaxDequeueCount == mCurDequeueCount--) {
+        broadcast();
+        return true;
+    }
+
+    if (mCurDequeueCount >= mMaxDequeueCount) {
+        if (waitId) {
+            *waitId = getWaitIdLocked();
+        }
+        ALOGV("dequeue blocked %d/%d", mCurDequeueCount, mMaxDequeueCount);
+        return false;
+    }
+    return true;
+}
+
+void C2SyncVariables::notifyDequeuedLocked() {
+    mCurDequeueCount++;
+    ALOGV("dequeue successful %d/%d", mCurDequeueCount, mMaxDequeueCount);
+}
+
+void C2SyncVariables::setSyncStatusLocked(SyncStatus status) {
+    mStatus = status;
+    if (mStatus == STATUS_ACTIVE) {
+        broadcast();
+    }
+}
+
+C2SyncVariables::SyncStatus C2SyncVariables::getSyncStatusLocked() {
+    return mStatus;
+}
+
+void C2SyncVariables::updateMaxDequeueCountLocked(int32_t maxDequeueCount) {
+    mMaxDequeueCount = maxDequeueCount;
+    if (mStatus == STATUS_ACTIVE) {
+        broadcast();
+    }
+}
+
+c2_status_t C2SyncVariables::waitForChange(uint32_t waitId, c2_nsecs_t timeoutNs) {
+    if (timeoutNs < 0) {
+        timeoutNs = 0;
+    }
+    struct timespec tv;
+    tv.tv_sec = timeoutNs / 1000000000;
+    tv.tv_nsec = timeoutNs % 1000000000;
+
+    int ret =  syscall(__NR_futex, &mCond, FUTEX_WAIT, waitId, &tv, NULL, 0);
+    if (ret == 0 || ret == EAGAIN) {
+        return C2_OK;
+    }
+    if (ret == EINTR || ret == ETIMEDOUT) {
+        return C2_TIMED_OUT;
+    }
+    return C2_BAD_VALUE;
+}
+
+int C2SyncVariables::signal() {
+    mCond++;
+
+    (void) syscall(__NR_futex, &mCond, FUTEX_WAKE, 1, NULL, NULL, 0);
+    return 0;
+}
+
+int C2SyncVariables::broadcast() {
+    mCond++;
+
+    (void) syscall(__NR_futex, &mCond, FUTEX_REQUEUE, 1, (void *)INT_MAX, &mLock, 0);
+    return 0;
+}
+
+int C2SyncVariables::wait() {
+    uint32_t old = mCond.load();
+    unlock();
+
+    (void) syscall(__NR_futex, &mCond, FUTEX_WAIT, old, NULL, NULL, 0);
+    while (mLock.exchange(FUTEX_LOCKED_CONTENDED)) {
+        (void) syscall(__NR_futex, &mLock, FUTEX_WAIT, FUTEX_LOCKED_CONTENDED, NULL, NULL, 0);
+    }
+    return 0;
+}
diff --git a/media/codec2/vndk/util/C2InterfaceHelper.cpp b/media/codec2/vndk/util/C2InterfaceHelper.cpp
index 9eb52d2..19d2295 100644
--- a/media/codec2/vndk/util/C2InterfaceHelper.cpp
+++ b/media/codec2/vndk/util/C2InterfaceHelper.cpp
@@ -645,6 +645,7 @@
                     lateReadParams.emplace_back(p);
                     std::unique_ptr<C2Param> request(C2Param::CopyAsRequest(*p));
                     p = request.get();
+                    paramIx = p->index();
                     paramRequests.emplace_back(std::move(request));
                 }
             }
diff --git a/media/codec2/vndk/util/C2InterfaceUtils.cpp b/media/codec2/vndk/util/C2InterfaceUtils.cpp
index 0c1729b..b5bc691 100644
--- a/media/codec2/vndk/util/C2InterfaceUtils.cpp
+++ b/media/codec2/vndk/util/C2InterfaceUtils.cpp
@@ -21,6 +21,7 @@
 
 #define C2_LOG_VERBOSE
 
+#include <C2Config.h>
 #include <C2Debug.h>
 #include <C2Param.h>
 #include <C2ParamDef.h>
@@ -30,6 +31,7 @@
 #include <cmath>
 #include <limits>
 #include <map>
+#include <sstream>
 #include <type_traits>
 
 #include <android-base/stringprintf.h>
@@ -1304,3 +1306,81 @@
     return std::vector<Info>(location.begin(), location.end());
 }
 
+//static
+bool C2InterfaceUtils::FillTraitsFromInterface(
+        C2Component::Traits *traits,
+        const std::shared_ptr<C2ComponentInterface> &intf) {
+    if (!traits) {
+        return false;
+    }
+    traits->name = intf->getName();
+
+    C2ComponentKindSetting kind;
+    C2ComponentDomainSetting domain;
+    c2_status_t res = intf->query_vb({ &kind, &domain }, {}, C2_MAY_BLOCK, nullptr);
+    bool fixDomain = res != C2_OK;
+    if (res == C2_OK) {
+        traits->kind = kind.value;
+        traits->domain = domain.value;
+    } else {
+        // TODO: remove this fall-back
+        C2_LOG(DEBUG) << "failed to query interface for kind and domain: " << res;
+
+        traits->kind =
+            (traits->name.find("encoder") != std::string::npos) ? C2Component::KIND_ENCODER :
+            (traits->name.find("decoder") != std::string::npos) ? C2Component::KIND_DECODER :
+            C2Component::KIND_OTHER;
+    }
+
+    uint32_t mediaTypeIndex = traits->kind == C2Component::KIND_ENCODER
+            ? C2PortMediaTypeSetting::output::PARAM_TYPE
+            : C2PortMediaTypeSetting::input::PARAM_TYPE;
+    std::vector<std::unique_ptr<C2Param>> params;
+    res = intf->query_vb({}, { mediaTypeIndex }, C2_MAY_BLOCK, &params);
+    if (res != C2_OK) {
+        C2_LOG(DEBUG) << "failed to query interface: " << res;
+        return false;
+    }
+    if (params.size() != 1u) {
+        C2_LOG(DEBUG) << "failed to query interface: unexpected vector size: " << params.size();
+        return false;
+    }
+    C2PortMediaTypeSetting *mediaTypeConfig = C2PortMediaTypeSetting::From(params[0].get());
+    if (mediaTypeConfig == nullptr) {
+        C2_LOG(DEBUG) << "failed to query media type";
+        return false;
+    }
+    traits->mediaType =
+        std::string(mediaTypeConfig->m.value,
+                    strnlen(mediaTypeConfig->m.value, mediaTypeConfig->flexCount()));
+
+    if (fixDomain) {
+        if (strncmp(traits->mediaType.c_str(), "audio/", 6) == 0) {
+            traits->domain = C2Component::DOMAIN_AUDIO;
+        } else if (strncmp(traits->mediaType.c_str(), "video/", 6) == 0) {
+            traits->domain = C2Component::DOMAIN_VIDEO;
+        } else if (strncmp(traits->mediaType.c_str(), "image/", 6) == 0) {
+            traits->domain = C2Component::DOMAIN_IMAGE;
+        } else {
+            traits->domain = C2Component::DOMAIN_OTHER;
+        }
+    }
+
+    params.clear();
+    res = intf->query_vb({}, { C2ComponentAliasesSetting::PARAM_TYPE }, C2_MAY_BLOCK, &params);
+    if (res == C2_OK && params.size() == 1u) {
+        C2ComponentAliasesSetting *aliasesSetting =
+            C2ComponentAliasesSetting::From(params[0].get());
+        if (aliasesSetting) {
+            std::istringstream iss(
+                    std::string(aliasesSetting->m.value, aliasesSetting->flexCount()));
+            C2_LOG(DEBUG) << intf->getName() << " has aliases: " << iss.str();
+
+            for (std::string tok; std::getline(iss, tok, ','); ) {
+                traits->aliases.push_back(tok);
+                C2_LOG(DEBUG) << "adding alias: " << tok;
+            }
+        }
+    }
+    return true;
+}
diff --git a/media/codecs/amrnb/TEST_MAPPING b/media/codecs/amrnb/TEST_MAPPING
new file mode 100644
index 0000000..343d08a
--- /dev/null
+++ b/media/codecs/amrnb/TEST_MAPPING
@@ -0,0 +1,10 @@
+// mappings for frameworks/av/media/libstagefright/codecs/amrnb
+{
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "AmrnbDecoderTest"},
+    { "name": "AmrnbEncoderTest"}
+  ]
+}
diff --git a/media/codecs/amrnb/common/Android.bp b/media/codecs/amrnb/common/Android.bp
new file mode 100644
index 0000000..bae65f3
--- /dev/null
+++ b/media/codecs/amrnb/common/Android.bp
@@ -0,0 +1,110 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_codecs_amrnb_common_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codecs_amrnb_common_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library {
+    name: "libstagefright_amrnb_common",
+    vendor_available: true,
+    host_supported: true,
+    min_sdk_version: "29",
+
+    srcs: [
+        "src/add.cpp",
+        "src/az_lsp.cpp",
+        "src/bitno_tab.cpp",
+        "src/bitreorder_tab.cpp",
+        "src/bits2prm.cpp",
+        "src/c2_9pf_tab.cpp",
+        "src/copy.cpp",
+        "src/div_32.cpp",
+        "src/div_s.cpp",
+        "src/extract_h.cpp",
+        "src/extract_l.cpp",
+        "src/gains_tbl.cpp",
+        "src/gc_pred.cpp",
+        "src/gmed_n.cpp",
+        "src/grid_tbl.cpp",
+        "src/gray_tbl.cpp",
+        "src/int_lpc.cpp",
+        "src/inv_sqrt.cpp",
+        "src/inv_sqrt_tbl.cpp",
+        "src/l_abs.cpp",
+        "src/l_deposit_h.cpp",
+        "src/l_deposit_l.cpp",
+        "src/l_shr_r.cpp",
+        "src/log2.cpp",
+        "src/log2_norm.cpp",
+        "src/log2_tbl.cpp",
+        "src/lsfwt.cpp",
+        "src/lsp.cpp",
+        "src/lsp_az.cpp",
+        "src/lsp_lsf.cpp",
+        "src/lsp_lsf_tbl.cpp",
+        "src/lsp_tab.cpp",
+        "src/mult_r.cpp",
+        "src/norm_l.cpp",
+        "src/norm_s.cpp",
+        "src/ph_disp_tab.cpp",
+        "src/pow2.cpp",
+        "src/pow2_tbl.cpp",
+        "src/pred_lt.cpp",
+        "src/q_plsf.cpp",
+        "src/q_plsf_3.cpp",
+        "src/q_plsf_3_tbl.cpp",
+        "src/q_plsf_5.cpp",
+        "src/q_plsf_5_tbl.cpp",
+        "src/qua_gain_tbl.cpp",
+        "src/reorder.cpp",
+        "src/residu.cpp",
+        "src/round.cpp",
+        "src/set_zero.cpp",
+        "src/shr.cpp",
+        "src/shr_r.cpp",
+        "src/sqrt_l.cpp",
+        "src/sqrt_l_tbl.cpp",
+        "src/sub.cpp",
+        "src/syn_filt.cpp",
+        "src/vad1.cpp",
+        "src/weight_a.cpp",
+        "src/window_tab.cpp",
+    ],
+
+    export_include_dirs: ["include"],
+
+    cflags: [
+        "-DOSCL_UNUSED_ARG(x)=(void)(x)",
+        "-DOSCL_IMPORT_REF=",
+        "-DOSCL_EXPORT_REF=",
+
+        "-Werror",
+    ],
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
+    //addressing b/25409744
+    //sanitize: {
+    //    misc_undefined: [
+    //        "signed-integer-overflow",
+    //        "unsigned-integer-overflow",
+    //    ],
+    //},
+}
diff --git a/media/libstagefright/codecs/amrnb/common/MODULE_LICENSE_APACHE2 b/media/codecs/amrnb/common/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/MODULE_LICENSE_APACHE2
rename to media/codecs/amrnb/common/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/amrnb/common/NOTICE b/media/codecs/amrnb/common/NOTICE
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/NOTICE
rename to media/codecs/amrnb/common/NOTICE
diff --git a/media/libstagefright/codecs/amrnb/common/include/abs_s.h b/media/codecs/amrnb/common/include/abs_s.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/abs_s.h
rename to media/codecs/amrnb/common/include/abs_s.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/add.h b/media/codecs/amrnb/common/include/add.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/add.h
rename to media/codecs/amrnb/common/include/add.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/az_lsp.h b/media/codecs/amrnb/common/include/az_lsp.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/az_lsp.h
rename to media/codecs/amrnb/common/include/az_lsp.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/basic_op.h b/media/codecs/amrnb/common/include/basic_op.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/basic_op.h
rename to media/codecs/amrnb/common/include/basic_op.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/basic_op_arm_gcc_v5.h b/media/codecs/amrnb/common/include/basic_op_arm_gcc_v5.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/basic_op_arm_gcc_v5.h
rename to media/codecs/amrnb/common/include/basic_op_arm_gcc_v5.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/basic_op_arm_v5.h b/media/codecs/amrnb/common/include/basic_op_arm_v5.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/basic_op_arm_v5.h
rename to media/codecs/amrnb/common/include/basic_op_arm_v5.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/basic_op_c_equivalent.h b/media/codecs/amrnb/common/include/basic_op_c_equivalent.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/basic_op_c_equivalent.h
rename to media/codecs/amrnb/common/include/basic_op_c_equivalent.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/basicop_malloc.h b/media/codecs/amrnb/common/include/basicop_malloc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/basicop_malloc.h
rename to media/codecs/amrnb/common/include/basicop_malloc.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/bitno_tab.h b/media/codecs/amrnb/common/include/bitno_tab.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/bitno_tab.h
rename to media/codecs/amrnb/common/include/bitno_tab.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/bitreorder_tab.h b/media/codecs/amrnb/common/include/bitreorder_tab.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/bitreorder_tab.h
rename to media/codecs/amrnb/common/include/bitreorder_tab.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/bits2prm.h b/media/codecs/amrnb/common/include/bits2prm.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/bits2prm.h
rename to media/codecs/amrnb/common/include/bits2prm.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/cnst.h b/media/codecs/amrnb/common/include/cnst.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/cnst.h
rename to media/codecs/amrnb/common/include/cnst.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/cnst_vad.h b/media/codecs/amrnb/common/include/cnst_vad.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/cnst_vad.h
rename to media/codecs/amrnb/common/include/cnst_vad.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/copy.h b/media/codecs/amrnb/common/include/copy.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/copy.h
rename to media/codecs/amrnb/common/include/copy.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/d_gain_c.h b/media/codecs/amrnb/common/include/d_gain_c.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/d_gain_c.h
rename to media/codecs/amrnb/common/include/d_gain_c.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/d_gain_p.h b/media/codecs/amrnb/common/include/d_gain_p.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/d_gain_p.h
rename to media/codecs/amrnb/common/include/d_gain_p.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/d_plsf.h b/media/codecs/amrnb/common/include/d_plsf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/d_plsf.h
rename to media/codecs/amrnb/common/include/d_plsf.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/div_32.h b/media/codecs/amrnb/common/include/div_32.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/div_32.h
rename to media/codecs/amrnb/common/include/div_32.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/div_s.h b/media/codecs/amrnb/common/include/div_s.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/div_s.h
rename to media/codecs/amrnb/common/include/div_s.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/dtx_common_def.h b/media/codecs/amrnb/common/include/dtx_common_def.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/dtx_common_def.h
rename to media/codecs/amrnb/common/include/dtx_common_def.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/extract_h.h b/media/codecs/amrnb/common/include/extract_h.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/extract_h.h
rename to media/codecs/amrnb/common/include/extract_h.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/extract_l.h b/media/codecs/amrnb/common/include/extract_l.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/extract_l.h
rename to media/codecs/amrnb/common/include/extract_l.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/frame.h b/media/codecs/amrnb/common/include/frame.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/frame.h
rename to media/codecs/amrnb/common/include/frame.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/frame_type_3gpp.h b/media/codecs/amrnb/common/include/frame_type_3gpp.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/frame_type_3gpp.h
rename to media/codecs/amrnb/common/include/frame_type_3gpp.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/gc_pred.h b/media/codecs/amrnb/common/include/gc_pred.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/gc_pred.h
rename to media/codecs/amrnb/common/include/gc_pred.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/gmed_n.h b/media/codecs/amrnb/common/include/gmed_n.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/gmed_n.h
rename to media/codecs/amrnb/common/include/gmed_n.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/gsm_amr_typedefs.h b/media/codecs/amrnb/common/include/gsm_amr_typedefs.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/gsm_amr_typedefs.h
rename to media/codecs/amrnb/common/include/gsm_amr_typedefs.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/int_lpc.h b/media/codecs/amrnb/common/include/int_lpc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/int_lpc.h
rename to media/codecs/amrnb/common/include/int_lpc.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/int_lsf.h b/media/codecs/amrnb/common/include/int_lsf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/int_lsf.h
rename to media/codecs/amrnb/common/include/int_lsf.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/inv_sqrt.h b/media/codecs/amrnb/common/include/inv_sqrt.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/inv_sqrt.h
rename to media/codecs/amrnb/common/include/inv_sqrt.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_abs.h b/media/codecs/amrnb/common/include/l_abs.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_abs.h
rename to media/codecs/amrnb/common/include/l_abs.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_add.h b/media/codecs/amrnb/common/include/l_add.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_add.h
rename to media/codecs/amrnb/common/include/l_add.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_add_c.h b/media/codecs/amrnb/common/include/l_add_c.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_add_c.h
rename to media/codecs/amrnb/common/include/l_add_c.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_comp.h b/media/codecs/amrnb/common/include/l_comp.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_comp.h
rename to media/codecs/amrnb/common/include/l_comp.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_deposit_h.h b/media/codecs/amrnb/common/include/l_deposit_h.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_deposit_h.h
rename to media/codecs/amrnb/common/include/l_deposit_h.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_deposit_l.h b/media/codecs/amrnb/common/include/l_deposit_l.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_deposit_l.h
rename to media/codecs/amrnb/common/include/l_deposit_l.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_extract.h b/media/codecs/amrnb/common/include/l_extract.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_extract.h
rename to media/codecs/amrnb/common/include/l_extract.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_mac.h b/media/codecs/amrnb/common/include/l_mac.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_mac.h
rename to media/codecs/amrnb/common/include/l_mac.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_msu.h b/media/codecs/amrnb/common/include/l_msu.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_msu.h
rename to media/codecs/amrnb/common/include/l_msu.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_mult.h b/media/codecs/amrnb/common/include/l_mult.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_mult.h
rename to media/codecs/amrnb/common/include/l_mult.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_negate.h b/media/codecs/amrnb/common/include/l_negate.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_negate.h
rename to media/codecs/amrnb/common/include/l_negate.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_shl.h b/media/codecs/amrnb/common/include/l_shl.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_shl.h
rename to media/codecs/amrnb/common/include/l_shl.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_shr.h b/media/codecs/amrnb/common/include/l_shr.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_shr.h
rename to media/codecs/amrnb/common/include/l_shr.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_shr_r.h b/media/codecs/amrnb/common/include/l_shr_r.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_shr_r.h
rename to media/codecs/amrnb/common/include/l_shr_r.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/l_sub.h b/media/codecs/amrnb/common/include/l_sub.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/l_sub.h
rename to media/codecs/amrnb/common/include/l_sub.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/log2.h b/media/codecs/amrnb/common/include/log2.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/log2.h
rename to media/codecs/amrnb/common/include/log2.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/log2_norm.h b/media/codecs/amrnb/common/include/log2_norm.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/log2_norm.h
rename to media/codecs/amrnb/common/include/log2_norm.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/lsfwt.h b/media/codecs/amrnb/common/include/lsfwt.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/lsfwt.h
rename to media/codecs/amrnb/common/include/lsfwt.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/lsp.h b/media/codecs/amrnb/common/include/lsp.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/lsp.h
rename to media/codecs/amrnb/common/include/lsp.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/lsp_az.h b/media/codecs/amrnb/common/include/lsp_az.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/lsp_az.h
rename to media/codecs/amrnb/common/include/lsp_az.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/lsp_lsf.h b/media/codecs/amrnb/common/include/lsp_lsf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/lsp_lsf.h
rename to media/codecs/amrnb/common/include/lsp_lsf.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/lsp_tab.h b/media/codecs/amrnb/common/include/lsp_tab.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/lsp_tab.h
rename to media/codecs/amrnb/common/include/lsp_tab.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/mac_32.h b/media/codecs/amrnb/common/include/mac_32.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/mac_32.h
rename to media/codecs/amrnb/common/include/mac_32.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/mode.h b/media/codecs/amrnb/common/include/mode.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/mode.h
rename to media/codecs/amrnb/common/include/mode.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/mpy_32.h b/media/codecs/amrnb/common/include/mpy_32.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/mpy_32.h
rename to media/codecs/amrnb/common/include/mpy_32.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/mpy_32_16.h b/media/codecs/amrnb/common/include/mpy_32_16.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/mpy_32_16.h
rename to media/codecs/amrnb/common/include/mpy_32_16.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/mult.h b/media/codecs/amrnb/common/include/mult.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/mult.h
rename to media/codecs/amrnb/common/include/mult.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/mult_r.h b/media/codecs/amrnb/common/include/mult_r.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/mult_r.h
rename to media/codecs/amrnb/common/include/mult_r.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/n_proc.h b/media/codecs/amrnb/common/include/n_proc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/n_proc.h
rename to media/codecs/amrnb/common/include/n_proc.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/negate.h b/media/codecs/amrnb/common/include/negate.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/negate.h
rename to media/codecs/amrnb/common/include/negate.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/norm_l.h b/media/codecs/amrnb/common/include/norm_l.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/norm_l.h
rename to media/codecs/amrnb/common/include/norm_l.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/norm_s.h b/media/codecs/amrnb/common/include/norm_s.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/norm_s.h
rename to media/codecs/amrnb/common/include/norm_s.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/oper_32b.h b/media/codecs/amrnb/common/include/oper_32b.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/oper_32b.h
rename to media/codecs/amrnb/common/include/oper_32b.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/p_ol_wgh.h b/media/codecs/amrnb/common/include/p_ol_wgh.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/p_ol_wgh.h
rename to media/codecs/amrnb/common/include/p_ol_wgh.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/pow2.h b/media/codecs/amrnb/common/include/pow2.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/pow2.h
rename to media/codecs/amrnb/common/include/pow2.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/pred_lt.h b/media/codecs/amrnb/common/include/pred_lt.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/pred_lt.h
rename to media/codecs/amrnb/common/include/pred_lt.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/q_plsf.h b/media/codecs/amrnb/common/include/q_plsf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/q_plsf.h
rename to media/codecs/amrnb/common/include/q_plsf.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/q_plsf_3_tbl.h b/media/codecs/amrnb/common/include/q_plsf_3_tbl.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/q_plsf_3_tbl.h
rename to media/codecs/amrnb/common/include/q_plsf_3_tbl.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/q_plsf_5_tbl.h b/media/codecs/amrnb/common/include/q_plsf_5_tbl.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/q_plsf_5_tbl.h
rename to media/codecs/amrnb/common/include/q_plsf_5_tbl.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/qgain475_tab.h b/media/codecs/amrnb/common/include/qgain475_tab.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/qgain475_tab.h
rename to media/codecs/amrnb/common/include/qgain475_tab.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/qua_gain.h b/media/codecs/amrnb/common/include/qua_gain.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/qua_gain.h
rename to media/codecs/amrnb/common/include/qua_gain.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/qua_gain_tbl.h b/media/codecs/amrnb/common/include/qua_gain_tbl.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/qua_gain_tbl.h
rename to media/codecs/amrnb/common/include/qua_gain_tbl.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/reorder.h b/media/codecs/amrnb/common/include/reorder.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/reorder.h
rename to media/codecs/amrnb/common/include/reorder.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/residu.h b/media/codecs/amrnb/common/include/residu.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/residu.h
rename to media/codecs/amrnb/common/include/residu.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/reverse_bits.h b/media/codecs/amrnb/common/include/reverse_bits.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/reverse_bits.h
rename to media/codecs/amrnb/common/include/reverse_bits.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/round.h b/media/codecs/amrnb/common/include/round.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/round.h
rename to media/codecs/amrnb/common/include/round.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/set_zero.h b/media/codecs/amrnb/common/include/set_zero.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/set_zero.h
rename to media/codecs/amrnb/common/include/set_zero.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/shl.h b/media/codecs/amrnb/common/include/shl.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/shl.h
rename to media/codecs/amrnb/common/include/shl.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/shr.h b/media/codecs/amrnb/common/include/shr.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/shr.h
rename to media/codecs/amrnb/common/include/shr.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/shr_r.h b/media/codecs/amrnb/common/include/shr_r.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/shr_r.h
rename to media/codecs/amrnb/common/include/shr_r.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/sqrt_l.h b/media/codecs/amrnb/common/include/sqrt_l.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/sqrt_l.h
rename to media/codecs/amrnb/common/include/sqrt_l.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/sub.h b/media/codecs/amrnb/common/include/sub.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/sub.h
rename to media/codecs/amrnb/common/include/sub.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/syn_filt.h b/media/codecs/amrnb/common/include/syn_filt.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/syn_filt.h
rename to media/codecs/amrnb/common/include/syn_filt.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/typedef.h b/media/codecs/amrnb/common/include/typedef.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/typedef.h
rename to media/codecs/amrnb/common/include/typedef.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/vad.h b/media/codecs/amrnb/common/include/vad.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/vad.h
rename to media/codecs/amrnb/common/include/vad.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/vad1.h b/media/codecs/amrnb/common/include/vad1.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/vad1.h
rename to media/codecs/amrnb/common/include/vad1.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/vad2.h b/media/codecs/amrnb/common/include/vad2.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/vad2.h
rename to media/codecs/amrnb/common/include/vad2.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/weight_a.h b/media/codecs/amrnb/common/include/weight_a.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/weight_a.h
rename to media/codecs/amrnb/common/include/weight_a.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/window_tab.h b/media/codecs/amrnb/common/include/window_tab.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/window_tab.h
rename to media/codecs/amrnb/common/include/window_tab.h
diff --git a/media/libstagefright/codecs/amrnb/common/include/wmf_to_ets.h b/media/codecs/amrnb/common/include/wmf_to_ets.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/include/wmf_to_ets.h
rename to media/codecs/amrnb/common/include/wmf_to_ets.h
diff --git a/media/libstagefright/codecs/amrnb/common/src/add.cpp b/media/codecs/amrnb/common/src/add.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/add.cpp
rename to media/codecs/amrnb/common/src/add.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/az_lsp.cpp b/media/codecs/amrnb/common/src/az_lsp.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/az_lsp.cpp
rename to media/codecs/amrnb/common/src/az_lsp.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/bitno_tab.cpp b/media/codecs/amrnb/common/src/bitno_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/bitno_tab.cpp
rename to media/codecs/amrnb/common/src/bitno_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/bitreorder_tab.cpp b/media/codecs/amrnb/common/src/bitreorder_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/bitreorder_tab.cpp
rename to media/codecs/amrnb/common/src/bitreorder_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/bits2prm.cpp b/media/codecs/amrnb/common/src/bits2prm.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/bits2prm.cpp
rename to media/codecs/amrnb/common/src/bits2prm.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/c2_9pf_tab.cpp b/media/codecs/amrnb/common/src/c2_9pf_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/c2_9pf_tab.cpp
rename to media/codecs/amrnb/common/src/c2_9pf_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/copy.cpp b/media/codecs/amrnb/common/src/copy.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/copy.cpp
rename to media/codecs/amrnb/common/src/copy.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/div_32.cpp b/media/codecs/amrnb/common/src/div_32.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/div_32.cpp
rename to media/codecs/amrnb/common/src/div_32.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/div_s.cpp b/media/codecs/amrnb/common/src/div_s.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/div_s.cpp
rename to media/codecs/amrnb/common/src/div_s.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/extract_h.cpp b/media/codecs/amrnb/common/src/extract_h.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/extract_h.cpp
rename to media/codecs/amrnb/common/src/extract_h.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/extract_l.cpp b/media/codecs/amrnb/common/src/extract_l.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/extract_l.cpp
rename to media/codecs/amrnb/common/src/extract_l.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/gains_tbl.cpp b/media/codecs/amrnb/common/src/gains_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/gains_tbl.cpp
rename to media/codecs/amrnb/common/src/gains_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/gc_pred.cpp b/media/codecs/amrnb/common/src/gc_pred.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/gc_pred.cpp
rename to media/codecs/amrnb/common/src/gc_pred.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/gmed_n.cpp b/media/codecs/amrnb/common/src/gmed_n.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/gmed_n.cpp
rename to media/codecs/amrnb/common/src/gmed_n.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/gray_tbl.cpp b/media/codecs/amrnb/common/src/gray_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/gray_tbl.cpp
rename to media/codecs/amrnb/common/src/gray_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/grid_tbl.cpp b/media/codecs/amrnb/common/src/grid_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/grid_tbl.cpp
rename to media/codecs/amrnb/common/src/grid_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/int_lpc.cpp b/media/codecs/amrnb/common/src/int_lpc.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/int_lpc.cpp
rename to media/codecs/amrnb/common/src/int_lpc.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/inv_sqrt.cpp b/media/codecs/amrnb/common/src/inv_sqrt.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/inv_sqrt.cpp
rename to media/codecs/amrnb/common/src/inv_sqrt.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/inv_sqrt_tbl.cpp b/media/codecs/amrnb/common/src/inv_sqrt_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/inv_sqrt_tbl.cpp
rename to media/codecs/amrnb/common/src/inv_sqrt_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/l_abs.cpp b/media/codecs/amrnb/common/src/l_abs.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/l_abs.cpp
rename to media/codecs/amrnb/common/src/l_abs.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/l_deposit_h.cpp b/media/codecs/amrnb/common/src/l_deposit_h.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/l_deposit_h.cpp
rename to media/codecs/amrnb/common/src/l_deposit_h.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/l_deposit_l.cpp b/media/codecs/amrnb/common/src/l_deposit_l.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/l_deposit_l.cpp
rename to media/codecs/amrnb/common/src/l_deposit_l.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/l_shr_r.cpp b/media/codecs/amrnb/common/src/l_shr_r.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/l_shr_r.cpp
rename to media/codecs/amrnb/common/src/l_shr_r.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/log2.cpp b/media/codecs/amrnb/common/src/log2.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/log2.cpp
rename to media/codecs/amrnb/common/src/log2.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/log2_norm.cpp b/media/codecs/amrnb/common/src/log2_norm.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/log2_norm.cpp
rename to media/codecs/amrnb/common/src/log2_norm.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/log2_tbl.cpp b/media/codecs/amrnb/common/src/log2_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/log2_tbl.cpp
rename to media/codecs/amrnb/common/src/log2_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/lsfwt.cpp b/media/codecs/amrnb/common/src/lsfwt.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/lsfwt.cpp
rename to media/codecs/amrnb/common/src/lsfwt.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/lsp.cpp b/media/codecs/amrnb/common/src/lsp.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/lsp.cpp
rename to media/codecs/amrnb/common/src/lsp.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/lsp_az.cpp b/media/codecs/amrnb/common/src/lsp_az.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/lsp_az.cpp
rename to media/codecs/amrnb/common/src/lsp_az.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/lsp_lsf.cpp b/media/codecs/amrnb/common/src/lsp_lsf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/lsp_lsf.cpp
rename to media/codecs/amrnb/common/src/lsp_lsf.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/lsp_lsf_tbl.cpp b/media/codecs/amrnb/common/src/lsp_lsf_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/lsp_lsf_tbl.cpp
rename to media/codecs/amrnb/common/src/lsp_lsf_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/lsp_tab.cpp b/media/codecs/amrnb/common/src/lsp_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/lsp_tab.cpp
rename to media/codecs/amrnb/common/src/lsp_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/mult_r.cpp b/media/codecs/amrnb/common/src/mult_r.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/mult_r.cpp
rename to media/codecs/amrnb/common/src/mult_r.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/negate.cpp b/media/codecs/amrnb/common/src/negate.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/negate.cpp
rename to media/codecs/amrnb/common/src/negate.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/norm_l.cpp b/media/codecs/amrnb/common/src/norm_l.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/norm_l.cpp
rename to media/codecs/amrnb/common/src/norm_l.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/norm_s.cpp b/media/codecs/amrnb/common/src/norm_s.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/norm_s.cpp
rename to media/codecs/amrnb/common/src/norm_s.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/ph_disp_tab.cpp b/media/codecs/amrnb/common/src/ph_disp_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/ph_disp_tab.cpp
rename to media/codecs/amrnb/common/src/ph_disp_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/pow2.cpp b/media/codecs/amrnb/common/src/pow2.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/pow2.cpp
rename to media/codecs/amrnb/common/src/pow2.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/pow2_tbl.cpp b/media/codecs/amrnb/common/src/pow2_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/pow2_tbl.cpp
rename to media/codecs/amrnb/common/src/pow2_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/pred_lt.cpp b/media/codecs/amrnb/common/src/pred_lt.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/pred_lt.cpp
rename to media/codecs/amrnb/common/src/pred_lt.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/q_plsf.cpp b/media/codecs/amrnb/common/src/q_plsf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/q_plsf.cpp
rename to media/codecs/amrnb/common/src/q_plsf.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/q_plsf_3.cpp b/media/codecs/amrnb/common/src/q_plsf_3.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/q_plsf_3.cpp
rename to media/codecs/amrnb/common/src/q_plsf_3.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/q_plsf_3_tbl.cpp b/media/codecs/amrnb/common/src/q_plsf_3_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/q_plsf_3_tbl.cpp
rename to media/codecs/amrnb/common/src/q_plsf_3_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/q_plsf_5.cpp b/media/codecs/amrnb/common/src/q_plsf_5.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/q_plsf_5.cpp
rename to media/codecs/amrnb/common/src/q_plsf_5.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/q_plsf_5_tbl.cpp b/media/codecs/amrnb/common/src/q_plsf_5_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/q_plsf_5_tbl.cpp
rename to media/codecs/amrnb/common/src/q_plsf_5_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/qua_gain_tbl.cpp b/media/codecs/amrnb/common/src/qua_gain_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/qua_gain_tbl.cpp
rename to media/codecs/amrnb/common/src/qua_gain_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/reorder.cpp b/media/codecs/amrnb/common/src/reorder.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/reorder.cpp
rename to media/codecs/amrnb/common/src/reorder.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/residu.cpp b/media/codecs/amrnb/common/src/residu.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/residu.cpp
rename to media/codecs/amrnb/common/src/residu.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/round.cpp b/media/codecs/amrnb/common/src/round.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/round.cpp
rename to media/codecs/amrnb/common/src/round.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/set_zero.cpp b/media/codecs/amrnb/common/src/set_zero.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/set_zero.cpp
rename to media/codecs/amrnb/common/src/set_zero.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/shr.cpp b/media/codecs/amrnb/common/src/shr.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/shr.cpp
rename to media/codecs/amrnb/common/src/shr.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/shr_r.cpp b/media/codecs/amrnb/common/src/shr_r.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/shr_r.cpp
rename to media/codecs/amrnb/common/src/shr_r.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/sqrt_l.cpp b/media/codecs/amrnb/common/src/sqrt_l.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/sqrt_l.cpp
rename to media/codecs/amrnb/common/src/sqrt_l.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/sqrt_l_tbl.cpp b/media/codecs/amrnb/common/src/sqrt_l_tbl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/sqrt_l_tbl.cpp
rename to media/codecs/amrnb/common/src/sqrt_l_tbl.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/sub.cpp b/media/codecs/amrnb/common/src/sub.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/sub.cpp
rename to media/codecs/amrnb/common/src/sub.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/syn_filt.cpp b/media/codecs/amrnb/common/src/syn_filt.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/syn_filt.cpp
rename to media/codecs/amrnb/common/src/syn_filt.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/vad1.cpp b/media/codecs/amrnb/common/src/vad1.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/vad1.cpp
rename to media/codecs/amrnb/common/src/vad1.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/weight_a.cpp b/media/codecs/amrnb/common/src/weight_a.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/weight_a.cpp
rename to media/codecs/amrnb/common/src/weight_a.cpp
diff --git a/media/libstagefright/codecs/amrnb/common/src/window_tab.cpp b/media/codecs/amrnb/common/src/window_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/common/src/window_tab.cpp
rename to media/codecs/amrnb/common/src/window_tab.cpp
diff --git a/media/codecs/amrnb/dec/Android.bp b/media/codecs/amrnb/dec/Android.bp
new file mode 100644
index 0000000..1083b82
--- /dev/null
+++ b/media/codecs/amrnb/dec/Android.bp
@@ -0,0 +1,138 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_codecs_amrnb_dec_license",
+    ],
+}
+
+// Added automatically by a large-scale-change that took the approach of
+// 'apply every license found to every target'. While this makes sure we respect
+// every license restriction, it may not be entirely correct.
+//
+// e.g. GPL in an MIT project might only apply to the contrib/ directory.
+//
+// Please consider splitting the single license below into multiple licenses,
+// taking care not to lose any license_kind information, and overriding the
+// default license using the 'licenses: [...]' property on targets as needed.
+//
+// For unused files, consider creating a 'fileGroup' with "//visibility:private"
+// to attach the license to, and including a comment whether the files may be
+// used in the current project.
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codecs_amrnb_dec_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+        "SPDX-license-identifier-BSD",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library_static {
+    name: "libstagefright_amrnbdec",
+    vendor_available: true,
+    host_supported: true,
+    min_sdk_version: "29",
+
+    srcs: [
+        "src/a_refl.cpp",
+        "src/agc.cpp",
+        "src/amrdecode.cpp",
+        "src/b_cn_cod.cpp",
+        "src/bgnscd.cpp",
+        "src/c_g_aver.cpp",
+        "src/d1035pf.cpp",
+        "src/d2_11pf.cpp",
+        "src/d2_9pf.cpp",
+        "src/d3_14pf.cpp",
+        "src/d4_17pf.cpp",
+        "src/d8_31pf.cpp",
+        "src/d_gain_c.cpp",
+        "src/d_gain_p.cpp",
+        "src/d_plsf.cpp",
+        "src/d_plsf_3.cpp",
+        "src/d_plsf_5.cpp",
+        "src/dec_amr.cpp",
+        "src/dec_gain.cpp",
+        "src/dec_input_format_tab.cpp",
+        "src/dec_lag3.cpp",
+        "src/dec_lag6.cpp",
+        "src/dtx_dec.cpp",
+        "src/ec_gains.cpp",
+        "src/ex_ctrl.cpp",
+        "src/if2_to_ets.cpp",
+        "src/int_lsf.cpp",
+        "src/lsp_avg.cpp",
+        "src/ph_disp.cpp",
+        "src/post_pro.cpp",
+        "src/preemph.cpp",
+        "src/pstfilt.cpp",
+        "src/qgain475_tab.cpp",
+        "src/sp_dec.cpp",
+        "src/wmf_to_ets.cpp",
+    ],
+
+    export_include_dirs: ["src"],
+
+    cflags: [
+        "-DOSCL_UNUSED_ARG(x)=(void)(x)",
+        "-DOSCL_IMPORT_REF=",
+
+        "-Werror",
+    ],
+
+    //sanitize: {
+    //    misc_undefined: [
+    //        "signed-integer-overflow",
+    //    ],
+    //},
+
+    shared_libs: [
+        "libstagefright_amrnb_common",
+        "liblog",
+    ],
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+//###############################################################################
+cc_test {
+    name: "libstagefright_amrnbdec_test",
+    gtest: false,
+    host_supported: true,
+
+    srcs: ["test/amrnbdec_test.cpp"],
+
+    cflags: ["-Wall", "-Werror"],
+
+    local_include_dirs: ["src"],
+
+    static_libs: [
+        "libstagefright_amrnbdec",
+        "libsndfile",
+    ],
+
+    shared_libs: [
+        "libstagefright_amrnb_common",
+        "libaudioutils",
+        "liblog",
+    ],
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
+    //sanitize: {
+    //    misc_undefined: [
+    //        "signed-integer-overflow",
+    //    ],
+    //},
+}
diff --git a/media/libstagefright/codecs/amrnb/dec/MODULE_LICENSE_APACHE2 b/media/codecs/amrnb/dec/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/MODULE_LICENSE_APACHE2
rename to media/codecs/amrnb/dec/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/amrnb/dec/NOTICE b/media/codecs/amrnb/dec/NOTICE
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/NOTICE
rename to media/codecs/amrnb/dec/NOTICE
diff --git a/media/libstagefright/codecs/amrnb/dec/src/a_refl.cpp b/media/codecs/amrnb/dec/src/a_refl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/a_refl.cpp
rename to media/codecs/amrnb/dec/src/a_refl.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/a_refl.h b/media/codecs/amrnb/dec/src/a_refl.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/a_refl.h
rename to media/codecs/amrnb/dec/src/a_refl.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/agc.cpp b/media/codecs/amrnb/dec/src/agc.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/agc.cpp
rename to media/codecs/amrnb/dec/src/agc.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/agc.h b/media/codecs/amrnb/dec/src/agc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/agc.h
rename to media/codecs/amrnb/dec/src/agc.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/amrdecode.cpp b/media/codecs/amrnb/dec/src/amrdecode.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/amrdecode.cpp
rename to media/codecs/amrnb/dec/src/amrdecode.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/amrdecode.h b/media/codecs/amrnb/dec/src/amrdecode.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/amrdecode.h
rename to media/codecs/amrnb/dec/src/amrdecode.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/b_cn_cod.cpp b/media/codecs/amrnb/dec/src/b_cn_cod.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/b_cn_cod.cpp
rename to media/codecs/amrnb/dec/src/b_cn_cod.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/b_cn_cod.h b/media/codecs/amrnb/dec/src/b_cn_cod.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/b_cn_cod.h
rename to media/codecs/amrnb/dec/src/b_cn_cod.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/bgnscd.cpp b/media/codecs/amrnb/dec/src/bgnscd.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/bgnscd.cpp
rename to media/codecs/amrnb/dec/src/bgnscd.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/bgnscd.h b/media/codecs/amrnb/dec/src/bgnscd.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/bgnscd.h
rename to media/codecs/amrnb/dec/src/bgnscd.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/c_g_aver.cpp b/media/codecs/amrnb/dec/src/c_g_aver.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/c_g_aver.cpp
rename to media/codecs/amrnb/dec/src/c_g_aver.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/c_g_aver.h b/media/codecs/amrnb/dec/src/c_g_aver.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/c_g_aver.h
rename to media/codecs/amrnb/dec/src/c_g_aver.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d1035pf.cpp b/media/codecs/amrnb/dec/src/d1035pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d1035pf.cpp
rename to media/codecs/amrnb/dec/src/d1035pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d1035pf.h b/media/codecs/amrnb/dec/src/d1035pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d1035pf.h
rename to media/codecs/amrnb/dec/src/d1035pf.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d2_11pf.cpp b/media/codecs/amrnb/dec/src/d2_11pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d2_11pf.cpp
rename to media/codecs/amrnb/dec/src/d2_11pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d2_11pf.h b/media/codecs/amrnb/dec/src/d2_11pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d2_11pf.h
rename to media/codecs/amrnb/dec/src/d2_11pf.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d2_9pf.cpp b/media/codecs/amrnb/dec/src/d2_9pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d2_9pf.cpp
rename to media/codecs/amrnb/dec/src/d2_9pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d2_9pf.h b/media/codecs/amrnb/dec/src/d2_9pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d2_9pf.h
rename to media/codecs/amrnb/dec/src/d2_9pf.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d3_14pf.cpp b/media/codecs/amrnb/dec/src/d3_14pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d3_14pf.cpp
rename to media/codecs/amrnb/dec/src/d3_14pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d3_14pf.h b/media/codecs/amrnb/dec/src/d3_14pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d3_14pf.h
rename to media/codecs/amrnb/dec/src/d3_14pf.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d4_17pf.cpp b/media/codecs/amrnb/dec/src/d4_17pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d4_17pf.cpp
rename to media/codecs/amrnb/dec/src/d4_17pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d4_17pf.h b/media/codecs/amrnb/dec/src/d4_17pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d4_17pf.h
rename to media/codecs/amrnb/dec/src/d4_17pf.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d8_31pf.cpp b/media/codecs/amrnb/dec/src/d8_31pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d8_31pf.cpp
rename to media/codecs/amrnb/dec/src/d8_31pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d8_31pf.h b/media/codecs/amrnb/dec/src/d8_31pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d8_31pf.h
rename to media/codecs/amrnb/dec/src/d8_31pf.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d_gain_c.cpp b/media/codecs/amrnb/dec/src/d_gain_c.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d_gain_c.cpp
rename to media/codecs/amrnb/dec/src/d_gain_c.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d_gain_p.cpp b/media/codecs/amrnb/dec/src/d_gain_p.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d_gain_p.cpp
rename to media/codecs/amrnb/dec/src/d_gain_p.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d_plsf.cpp b/media/codecs/amrnb/dec/src/d_plsf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d_plsf.cpp
rename to media/codecs/amrnb/dec/src/d_plsf.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d_plsf_3.cpp b/media/codecs/amrnb/dec/src/d_plsf_3.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d_plsf_3.cpp
rename to media/codecs/amrnb/dec/src/d_plsf_3.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d_plsf_5.cpp b/media/codecs/amrnb/dec/src/d_plsf_5.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/d_plsf_5.cpp
rename to media/codecs/amrnb/dec/src/d_plsf_5.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_amr.cpp b/media/codecs/amrnb/dec/src/dec_amr.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dec_amr.cpp
rename to media/codecs/amrnb/dec/src/dec_amr.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_amr.h b/media/codecs/amrnb/dec/src/dec_amr.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dec_amr.h
rename to media/codecs/amrnb/dec/src/dec_amr.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_gain.cpp b/media/codecs/amrnb/dec/src/dec_gain.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dec_gain.cpp
rename to media/codecs/amrnb/dec/src/dec_gain.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_gain.h b/media/codecs/amrnb/dec/src/dec_gain.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dec_gain.h
rename to media/codecs/amrnb/dec/src/dec_gain.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_input_format_tab.cpp b/media/codecs/amrnb/dec/src/dec_input_format_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dec_input_format_tab.cpp
rename to media/codecs/amrnb/dec/src/dec_input_format_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_lag3.cpp b/media/codecs/amrnb/dec/src/dec_lag3.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dec_lag3.cpp
rename to media/codecs/amrnb/dec/src/dec_lag3.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_lag3.h b/media/codecs/amrnb/dec/src/dec_lag3.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dec_lag3.h
rename to media/codecs/amrnb/dec/src/dec_lag3.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_lag6.cpp b/media/codecs/amrnb/dec/src/dec_lag6.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dec_lag6.cpp
rename to media/codecs/amrnb/dec/src/dec_lag6.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dec_lag6.h b/media/codecs/amrnb/dec/src/dec_lag6.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dec_lag6.h
rename to media/codecs/amrnb/dec/src/dec_lag6.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dtx_dec.cpp b/media/codecs/amrnb/dec/src/dtx_dec.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dtx_dec.cpp
rename to media/codecs/amrnb/dec/src/dtx_dec.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/dtx_dec.h b/media/codecs/amrnb/dec/src/dtx_dec.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/dtx_dec.h
rename to media/codecs/amrnb/dec/src/dtx_dec.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/ec_gains.cpp b/media/codecs/amrnb/dec/src/ec_gains.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/ec_gains.cpp
rename to media/codecs/amrnb/dec/src/ec_gains.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/ec_gains.h b/media/codecs/amrnb/dec/src/ec_gains.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/ec_gains.h
rename to media/codecs/amrnb/dec/src/ec_gains.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/ex_ctrl.cpp b/media/codecs/amrnb/dec/src/ex_ctrl.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/ex_ctrl.cpp
rename to media/codecs/amrnb/dec/src/ex_ctrl.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/ex_ctrl.h b/media/codecs/amrnb/dec/src/ex_ctrl.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/ex_ctrl.h
rename to media/codecs/amrnb/dec/src/ex_ctrl.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/gsmamr_dec.h b/media/codecs/amrnb/dec/src/gsmamr_dec.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/gsmamr_dec.h
rename to media/codecs/amrnb/dec/src/gsmamr_dec.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/if2_to_ets.cpp b/media/codecs/amrnb/dec/src/if2_to_ets.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/if2_to_ets.cpp
rename to media/codecs/amrnb/dec/src/if2_to_ets.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/if2_to_ets.h b/media/codecs/amrnb/dec/src/if2_to_ets.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/if2_to_ets.h
rename to media/codecs/amrnb/dec/src/if2_to_ets.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/int_lsf.cpp b/media/codecs/amrnb/dec/src/int_lsf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/int_lsf.cpp
rename to media/codecs/amrnb/dec/src/int_lsf.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/lsp_avg.cpp b/media/codecs/amrnb/dec/src/lsp_avg.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/lsp_avg.cpp
rename to media/codecs/amrnb/dec/src/lsp_avg.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/lsp_avg.h b/media/codecs/amrnb/dec/src/lsp_avg.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/lsp_avg.h
rename to media/codecs/amrnb/dec/src/lsp_avg.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/ph_disp.cpp b/media/codecs/amrnb/dec/src/ph_disp.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/ph_disp.cpp
rename to media/codecs/amrnb/dec/src/ph_disp.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/ph_disp.h b/media/codecs/amrnb/dec/src/ph_disp.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/ph_disp.h
rename to media/codecs/amrnb/dec/src/ph_disp.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/post_pro.cpp b/media/codecs/amrnb/dec/src/post_pro.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/post_pro.cpp
rename to media/codecs/amrnb/dec/src/post_pro.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/post_pro.h b/media/codecs/amrnb/dec/src/post_pro.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/post_pro.h
rename to media/codecs/amrnb/dec/src/post_pro.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/preemph.cpp b/media/codecs/amrnb/dec/src/preemph.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/preemph.cpp
rename to media/codecs/amrnb/dec/src/preemph.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/preemph.h b/media/codecs/amrnb/dec/src/preemph.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/preemph.h
rename to media/codecs/amrnb/dec/src/preemph.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/pstfilt.cpp b/media/codecs/amrnb/dec/src/pstfilt.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/pstfilt.cpp
rename to media/codecs/amrnb/dec/src/pstfilt.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/pstfilt.h b/media/codecs/amrnb/dec/src/pstfilt.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/pstfilt.h
rename to media/codecs/amrnb/dec/src/pstfilt.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/qgain475_tab.cpp b/media/codecs/amrnb/dec/src/qgain475_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/qgain475_tab.cpp
rename to media/codecs/amrnb/dec/src/qgain475_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/sp_dec.cpp b/media/codecs/amrnb/dec/src/sp_dec.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/sp_dec.cpp
rename to media/codecs/amrnb/dec/src/sp_dec.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/src/sp_dec.h b/media/codecs/amrnb/dec/src/sp_dec.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/sp_dec.h
rename to media/codecs/amrnb/dec/src/sp_dec.h
diff --git a/media/libstagefright/codecs/amrnb/dec/src/wmf_to_ets.cpp b/media/codecs/amrnb/dec/src/wmf_to_ets.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/src/wmf_to_ets.cpp
rename to media/codecs/amrnb/dec/src/wmf_to_ets.cpp
diff --git a/media/libstagefright/codecs/amrnb/dec/test/AmrnbDecTestEnvironment.h b/media/codecs/amrnb/dec/test/AmrnbDecTestEnvironment.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/test/AmrnbDecTestEnvironment.h
rename to media/codecs/amrnb/dec/test/AmrnbDecTestEnvironment.h
diff --git a/media/libstagefright/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp b/media/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp
rename to media/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp
diff --git a/media/codecs/amrnb/dec/test/Android.bp b/media/codecs/amrnb/dec/test/Android.bp
new file mode 100644
index 0000000..74258e0
--- /dev/null
+++ b/media/codecs/amrnb/dec/test/Android.bp
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_codecs_amrnb_dec_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    //   SPDX-license-identifier-BSD
+    default_applicable_licenses: [
+        "frameworks_av_media_codecs_amrnb_dec_license",
+    ],
+}
+
+cc_test {
+    name: "AmrnbDecoderTest",
+    gtest: true,
+    test_suites: ["device-tests"],
+
+    srcs: [
+        "AmrnbDecoderTest.cpp",
+    ],
+
+    static_libs: [
+        "libstagefright_amrnb_common",
+        "libstagefright_amrnbdec",
+        "libaudioutils",
+        "libsndfile",
+    ],
+
+    shared_libs: [
+        "liblog",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/codecs/amrnb/dec/test/AndroidTest.xml b/media/codecs/amrnb/dec/test/AndroidTest.xml
new file mode 100644
index 0000000..1a9e678
--- /dev/null
+++ b/media/codecs/amrnb/dec/test/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for Amr-nb Decoder unit test">
+    <option name="test-suite-tag" value="AmrnbDecoderTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="AmrnbDecoderTest->/data/local/tmp/AmrnbDecoderTest" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/amrnb/dec/test/AmrnbDecoderTest.zip?unzip=true"
+            value="/data/local/tmp/AmrnbDecoderTestRes/" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="AmrnbDecoderTest" />
+        <option name="native-test-flag" value="-P /data/local/tmp/AmrnbDecoderTestRes/" />
+    </test>
+</configuration>
diff --git a/media/codecs/amrnb/dec/test/README.md b/media/codecs/amrnb/dec/test/README.md
new file mode 100644
index 0000000..e9073e4
--- /dev/null
+++ b/media/codecs/amrnb/dec/test/README.md
@@ -0,0 +1,39 @@
+## Media Testing ##
+---
+#### AMR-NB Decoder :
+The Amr-Nb Decoder Test Suite validates the amrnb decoder available in libstagefright.
+
+Run the following steps to build the test suite:
+```
+m AmrnbDecoderTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/AmrnbDecoderTest/AmrnbDecoderTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/AmrnbDecoderTest/AmrnbDecoderTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/amrnb/dec/test/AmrnbDecoderTest.zip). Download, unzip and push these files into device for testing.
+
+```
+adb push AmrnbDecoderTestRes/. /data/local/tmp/
+```
+
+usage: AmrnbDecoderTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/AmrnbDecoderTest -P /data/local/tmp/AmrnbDecoderTestRes/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest AmrnbDecoderTest -- --enable-module-dynamic-download=true
+```
diff --git a/media/libstagefright/codecs/amrnb/dec/test/amrnbdec_test.cpp b/media/codecs/amrnb/dec/test/amrnbdec_test.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/dec/test/amrnbdec_test.cpp
rename to media/codecs/amrnb/dec/test/amrnbdec_test.cpp
diff --git a/media/codecs/amrnb/enc/Android.bp b/media/codecs/amrnb/enc/Android.bp
new file mode 100644
index 0000000..9e947e9
--- /dev/null
+++ b/media/codecs/amrnb/enc/Android.bp
@@ -0,0 +1,145 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_codecs_amrnb_enc_license",
+    ],
+}
+
+// Added automatically by a large-scale-change that took the approach of
+// 'apply every license found to every target'. While this makes sure we respect
+// every license restriction, it may not be entirely correct.
+//
+// e.g. GPL in an MIT project might only apply to the contrib/ directory.
+//
+// Please consider splitting the single license below into multiple licenses,
+// taking care not to lose any license_kind information, and overriding the
+// default license using the 'licenses: [...]' property on targets as needed.
+//
+// For unused files, consider creating a 'fileGroup' with "//visibility:private"
+// to attach the license to, and including a comment whether the files may be
+// used in the current project.
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codecs_amrnb_enc_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+        "SPDX-license-identifier-BSD",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library_static {
+    name: "libstagefright_amrnbenc",
+    vendor_available: true,
+    min_sdk_version: "29",
+
+    srcs: [
+        "src/amrencode.cpp",
+        "src/autocorr.cpp",
+        "src/c1035pf.cpp",
+        "src/c2_11pf.cpp",
+        "src/c2_9pf.cpp",
+        "src/c3_14pf.cpp",
+        "src/c4_17pf.cpp",
+        "src/c8_31pf.cpp",
+        "src/calc_cor.cpp",
+        "src/calc_en.cpp",
+        "src/cbsearch.cpp",
+        "src/cl_ltp.cpp",
+        "src/cod_amr.cpp",
+        "src/convolve.cpp",
+        "src/cor_h.cpp",
+        "src/cor_h_x.cpp",
+        "src/cor_h_x2.cpp",
+        "src/corrwght_tab.cpp",
+        "src/dtx_enc.cpp",
+        "src/enc_lag3.cpp",
+        "src/enc_lag6.cpp",
+        "src/enc_output_format_tab.cpp",
+        "src/ets_to_if2.cpp",
+        "src/ets_to_wmf.cpp",
+        "src/g_adapt.cpp",
+        "src/g_code.cpp",
+        "src/g_pitch.cpp",
+        "src/gain_q.cpp",
+        "src/hp_max.cpp",
+        "src/inter_36.cpp",
+        "src/inter_36_tab.cpp",
+        "src/l_comp.cpp",
+        "src/l_extract.cpp",
+        "src/l_negate.cpp",
+        "src/lag_wind.cpp",
+        "src/lag_wind_tab.cpp",
+        "src/levinson.cpp",
+        "src/lpc.cpp",
+        "src/ol_ltp.cpp",
+        "src/p_ol_wgh.cpp",
+        "src/pitch_fr.cpp",
+        "src/pitch_ol.cpp",
+        "src/pre_big.cpp",
+        "src/pre_proc.cpp",
+        "src/prm2bits.cpp",
+        "src/q_gain_c.cpp",
+        "src/q_gain_p.cpp",
+        "src/qgain475.cpp",
+        "src/qgain795.cpp",
+        "src/qua_gain.cpp",
+        "src/s10_8pf.cpp",
+        "src/set_sign.cpp",
+        "src/sid_sync.cpp",
+        "src/sp_enc.cpp",
+        "src/spreproc.cpp",
+        "src/spstproc.cpp",
+        "src/ton_stab.cpp",
+    ],
+
+    header_libs: ["libstagefright_headers"],
+    export_include_dirs: ["src"],
+
+    cflags: [
+        "-DOSCL_UNUSED_ARG(x)=(void)(x)",
+        "-Werror",
+    ],
+
+    //addressing b/25409744
+    //sanitize: {
+    //    misc_undefined: [
+    //        "signed-integer-overflow",
+    //    ],
+    //},
+
+    shared_libs: ["libstagefright_amrnb_common"],
+
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+//###############################################################################
+
+cc_test {
+    name: "libstagefright_amrnbenc_test",
+    gtest: false,
+
+    srcs: ["test/amrnb_enc_test.cpp"],
+
+    cflags: ["-Wall", "-Werror"],
+
+    local_include_dirs: ["src"],
+
+    static_libs: ["libstagefright_amrnbenc"],
+
+    shared_libs: ["libstagefright_amrnb_common"],
+
+    //addressing b/25409744
+    //sanitize: {
+    //    misc_undefined: [
+    //        "signed-integer-overflow",
+    //    ],
+    //},
+}
diff --git a/media/libstagefright/codecs/amrnb/enc/MODULE_LICENSE_APACHE2 b/media/codecs/amrnb/enc/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/MODULE_LICENSE_APACHE2
rename to media/codecs/amrnb/enc/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/amrnb/enc/NOTICE b/media/codecs/amrnb/enc/NOTICE
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/NOTICE
rename to media/codecs/amrnb/enc/NOTICE
diff --git a/media/codecs/amrnb/enc/fuzzer/Android.bp b/media/codecs/amrnb/enc/fuzzer/Android.bp
new file mode 100644
index 0000000..2c041b7
--- /dev/null
+++ b/media/codecs/amrnb/enc/fuzzer/Android.bp
@@ -0,0 +1,52 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_codecs_amrnb_enc_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_codecs_amrnb_enc_license",
+    ],
+}
+
+cc_fuzz {
+    name: "amrnb_enc_fuzzer",
+    host_supported: true,
+
+    srcs: [
+        "amrnb_enc_fuzzer.cpp",
+    ],
+
+    static_libs: [
+        "liblog",
+        "libstagefright_amrnbenc",
+        "libstagefright_amrnb_common",
+    ],
+
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/media/codecs/amrnb/enc/fuzzer/README.md b/media/codecs/amrnb/enc/fuzzer/README.md
new file mode 100644
index 0000000..239b4a8
--- /dev/null
+++ b/media/codecs/amrnb/enc/fuzzer/README.md
@@ -0,0 +1,60 @@
+# Fuzzer for libstagefright_amrnbenc encoder
+
+## Plugin Design Considerations
+The fuzzer plugin for AMR-NB is designed based on the understanding of the
+codec and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+AMR-WB supports the following parameters:
+1. Output Format (parameter name: `outputFormat`)
+2. Mode (parameter name: `mode`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `outputFormat` | 0. `AMR_TX_WMF` 1. `AMR_TX_IF2` 2. `AMR_TX_ETS` | Bits 0, 1 and 2 of 1st byte of data. |
+| `mode`   | 0. `MR475` 1. `MR515` 2. `MR59` 3. `MR67`  4. `MR74 ` 5. `MR795` 6. `MR102` 7. `MR122` 8. `MRDTX` | Bits 3, 4, 5 and 6 of 1st byte of data. |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the codec using a loop.
+If the encode operation was successful, the input is advanced by the frame size.
+If the encode operation was un-successful, the input is still advanced by frame size so
+that the fuzzer can proceed to feed the next frame.
+
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build amrnb_enc_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) amrnb_enc_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some pcm files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/amrnb_enc_fuzzer/amrnb_enc_fuzzer CORPUS_DIR
+```
+To run on host
+```
+  $ $ANDROID_HOST_OUT/fuzz/x86_64/amrnb_enc_fuzzer/amrnb_enc_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/codecs/amrnb/enc/fuzzer/amrnb_enc_fuzzer.cpp b/media/codecs/amrnb/enc/fuzzer/amrnb_enc_fuzzer.cpp
new file mode 100644
index 0000000..2fcbf24
--- /dev/null
+++ b/media/codecs/amrnb/enc/fuzzer/amrnb_enc_fuzzer.cpp
@@ -0,0 +1,105 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include <string.h>
+#include <utils/Log.h>
+#include <algorithm>
+#include "gsmamr_enc.h"
+
+// Constants for AMR-NB
+const int32_t kNumInputSamples = L_FRAME;  // 160 samples
+const int32_t kOutputBufferSize = 2 * kNumInputSamples * sizeof(Word16);
+const Mode kModes[9] = {MR475, /* 4.75 kbps */
+                        MR515, /* 5.15 kbps */
+                        MR59,  /* 5.90 kbps */
+                        MR67,  /* 6.70 kbps */
+                        MR74,  /* 7.40 kbps */
+                        MR795, /* 7.95 kbps */
+                        MR102, /* 10.2 kbps */
+                        MR122, /* 12.2 kbps */
+                        MRDTX, /* DTX       */};
+const Word16 kOutputFormat[3] = {AMR_TX_WMF, AMR_TX_IF2, AMR_TX_ETS};
+
+class Codec {
+   public:
+    Codec() = default;
+    ~Codec() { deInitEncoder(); }
+    Word16 initEncoder(const uint8_t *data);
+    void deInitEncoder();
+    void encodeFrames(const uint8_t *data, size_t size);
+
+   private:
+    void *mEncState = nullptr;
+    void *mSidState = nullptr;
+};
+
+Word16 Codec::initEncoder(const uint8_t *data) {
+    return AMREncodeInit(&mEncState, &mSidState, (*data >> 1) & 0x01 /* dtx_enable flag */);
+}
+
+void Codec::deInitEncoder() {
+    if (mEncState) {
+        AMREncodeExit(&mEncState, &mSidState);
+        mEncState = nullptr;
+        mSidState = nullptr;
+    }
+}
+
+void Codec::encodeFrames(const uint8_t *data, size_t size) {
+    AMREncodeReset(mEncState, mSidState);
+    uint8_t startByte = *data;
+    int modeIndex = ((startByte >> 3) % 9);
+    int outputFormatIndex = (startByte % 3);
+    Mode mode = kModes[modeIndex];
+    Word16 outputFormat = kOutputFormat[outputFormatIndex];
+
+    // Consume startByte
+    data++;
+    size--;
+
+    while (size > 0) {
+        Frame_Type_3GPP frameType = (Frame_Type_3GPP)mode;
+
+        Word16 inputBuf[kNumInputSamples] = {};
+        int32_t minSize = std::min(size, sizeof(inputBuf));
+
+        uint8_t outputBuf[kOutputBufferSize] = {};
+        memcpy(inputBuf, data, minSize);
+
+        AMREncode(mEncState, mSidState, mode, inputBuf, outputBuf, &frameType, outputFormat);
+
+        data += minSize;
+        size -= minSize;
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    if (size < 1) {
+        return 0;
+    }
+    Codec *codec = new Codec();
+    if (!codec) {
+        return 0;
+    }
+    if (codec->initEncoder(data) == 0) {
+        codec->encodeFrames(data, size);
+    }
+    delete codec;
+    return 0;
+}
diff --git a/media/libstagefright/codecs/amrnb/enc/src/amrencode.cpp b/media/codecs/amrnb/enc/src/amrencode.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/amrencode.cpp
rename to media/codecs/amrnb/enc/src/amrencode.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/amrencode.h b/media/codecs/amrnb/enc/src/amrencode.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/amrencode.h
rename to media/codecs/amrnb/enc/src/amrencode.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/autocorr.cpp b/media/codecs/amrnb/enc/src/autocorr.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/autocorr.cpp
rename to media/codecs/amrnb/enc/src/autocorr.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/autocorr.h b/media/codecs/amrnb/enc/src/autocorr.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/autocorr.h
rename to media/codecs/amrnb/enc/src/autocorr.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c1035pf.cpp b/media/codecs/amrnb/enc/src/c1035pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c1035pf.cpp
rename to media/codecs/amrnb/enc/src/c1035pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c1035pf.h b/media/codecs/amrnb/enc/src/c1035pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c1035pf.h
rename to media/codecs/amrnb/enc/src/c1035pf.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c2_11pf.cpp b/media/codecs/amrnb/enc/src/c2_11pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c2_11pf.cpp
rename to media/codecs/amrnb/enc/src/c2_11pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c2_11pf.h b/media/codecs/amrnb/enc/src/c2_11pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c2_11pf.h
rename to media/codecs/amrnb/enc/src/c2_11pf.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c2_9pf.cpp b/media/codecs/amrnb/enc/src/c2_9pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c2_9pf.cpp
rename to media/codecs/amrnb/enc/src/c2_9pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c2_9pf.h b/media/codecs/amrnb/enc/src/c2_9pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c2_9pf.h
rename to media/codecs/amrnb/enc/src/c2_9pf.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c3_14pf.cpp b/media/codecs/amrnb/enc/src/c3_14pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c3_14pf.cpp
rename to media/codecs/amrnb/enc/src/c3_14pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c3_14pf.h b/media/codecs/amrnb/enc/src/c3_14pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c3_14pf.h
rename to media/codecs/amrnb/enc/src/c3_14pf.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c4_17pf.cpp b/media/codecs/amrnb/enc/src/c4_17pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c4_17pf.cpp
rename to media/codecs/amrnb/enc/src/c4_17pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c4_17pf.h b/media/codecs/amrnb/enc/src/c4_17pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c4_17pf.h
rename to media/codecs/amrnb/enc/src/c4_17pf.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c8_31pf.cpp b/media/codecs/amrnb/enc/src/c8_31pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c8_31pf.cpp
rename to media/codecs/amrnb/enc/src/c8_31pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c8_31pf.h b/media/codecs/amrnb/enc/src/c8_31pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/c8_31pf.h
rename to media/codecs/amrnb/enc/src/c8_31pf.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/calc_cor.cpp b/media/codecs/amrnb/enc/src/calc_cor.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/calc_cor.cpp
rename to media/codecs/amrnb/enc/src/calc_cor.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/calc_cor.h b/media/codecs/amrnb/enc/src/calc_cor.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/calc_cor.h
rename to media/codecs/amrnb/enc/src/calc_cor.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/calc_en.cpp b/media/codecs/amrnb/enc/src/calc_en.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/calc_en.cpp
rename to media/codecs/amrnb/enc/src/calc_en.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/calc_en.h b/media/codecs/amrnb/enc/src/calc_en.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/calc_en.h
rename to media/codecs/amrnb/enc/src/calc_en.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cbsearch.cpp b/media/codecs/amrnb/enc/src/cbsearch.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cbsearch.cpp
rename to media/codecs/amrnb/enc/src/cbsearch.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cbsearch.h b/media/codecs/amrnb/enc/src/cbsearch.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cbsearch.h
rename to media/codecs/amrnb/enc/src/cbsearch.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cl_ltp.cpp b/media/codecs/amrnb/enc/src/cl_ltp.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cl_ltp.cpp
rename to media/codecs/amrnb/enc/src/cl_ltp.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cl_ltp.h b/media/codecs/amrnb/enc/src/cl_ltp.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cl_ltp.h
rename to media/codecs/amrnb/enc/src/cl_ltp.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cod_amr.cpp b/media/codecs/amrnb/enc/src/cod_amr.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cod_amr.cpp
rename to media/codecs/amrnb/enc/src/cod_amr.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cod_amr.h b/media/codecs/amrnb/enc/src/cod_amr.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cod_amr.h
rename to media/codecs/amrnb/enc/src/cod_amr.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/convolve.cpp b/media/codecs/amrnb/enc/src/convolve.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/convolve.cpp
rename to media/codecs/amrnb/enc/src/convolve.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/convolve.h b/media/codecs/amrnb/enc/src/convolve.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/convolve.h
rename to media/codecs/amrnb/enc/src/convolve.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cor_h.cpp b/media/codecs/amrnb/enc/src/cor_h.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cor_h.cpp
rename to media/codecs/amrnb/enc/src/cor_h.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cor_h.h b/media/codecs/amrnb/enc/src/cor_h.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cor_h.h
rename to media/codecs/amrnb/enc/src/cor_h.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cor_h_x.cpp b/media/codecs/amrnb/enc/src/cor_h_x.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cor_h_x.cpp
rename to media/codecs/amrnb/enc/src/cor_h_x.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cor_h_x.h b/media/codecs/amrnb/enc/src/cor_h_x.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cor_h_x.h
rename to media/codecs/amrnb/enc/src/cor_h_x.h
diff --git a/media/codecs/amrnb/enc/src/cor_h_x2.cpp b/media/codecs/amrnb/enc/src/cor_h_x2.cpp
new file mode 100644
index 0000000..e32eb4a
--- /dev/null
+++ b/media/codecs/amrnb/enc/src/cor_h_x2.cpp
@@ -0,0 +1,282 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+/****************************************************************************************
+Portions of this file are derived from the following 3GPP standard:
+
+    3GPP TS 26.073
+    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
+    Available from http://www.3gpp.org
+
+(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
+Permission to distribute, modify and use this file under the standard license
+terms listed above has been obtained from the copyright holder.
+****************************************************************************************/
+/*
+------------------------------------------------------------------------------
+
+
+
+ Pathname: ./audio/gsm-amr/c/src/cor_h_x2.c
+
+     Date: 11/07/2001
+
+------------------------------------------------------------------------------
+ REVISION HISTORY
+
+ Description: Created a separate file for cor_h_x2 function.
+
+ Description: Fixed typecasting issue with TI C compiler and defined one
+              local variable per line. Updated copyright year.
+
+ Description: Added #define for log2(32) = 5.
+
+ Description: Added call to round() and L_shl() functions in the last FOR
+              loop to make code bit-exact.
+
+ Description: Added pOverflow as a variable that's passed in for the EPOC
+              modifications.
+
+ Description: Changed round function name to pv_round to avoid conflict with
+              round function in C standard library.
+
+ Description: Using intrinsics from fxp_arithmetic.h .
+
+ Description: Replacing fxp_arithmetic.h with basic_op.h.
+
+ Description:
+
+------------------------------------------------------------------------------
+*/
+
+/*----------------------------------------------------------------------------
+; INCLUDES
+----------------------------------------------------------------------------*/
+#include "typedef.h"
+#include "cnst.h"
+#include "cor_h_x.h"
+#include "cor_h_x2.h" // BX
+#include "basic_op.h"
+
+/*----------------------------------------------------------------------------
+; MACROS
+; Define module specific macros here
+----------------------------------------------------------------------------*/
+
+
+/*----------------------------------------------------------------------------
+; DEFINES
+; Include all pre-processor statements here. Include conditional
+; compile variables also.
+----------------------------------------------------------------------------*/
+#define LOG2_OF_32  5
+
+/*----------------------------------------------------------------------------
+; LOCAL FUNCTION DEFINITIONS
+; Function Prototype declaration
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; LOCAL STORE/BUFFER/POINTER DEFINITIONS
+; Variable declaration - defined here and used outside this module
+----------------------------------------------------------------------------*/
+
+
+/*
+------------------------------------------------------------------------------
+ FUNCTION NAME: cor_h_x2
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+ Inputs:
+    h = vector containing the impulse response of the weighted synthesis
+        filter; vector contents are of type Word16; vector length is
+        2 * L_SUBFR
+    x = target signal vector; vector contents are of type Word16; vector
+        length is L_SUBFR
+    dn = vector containing the correlation between the target and the
+         impulse response; vector contents are of type Word16; vector
+         length is L_CODE
+    sf = scaling factor of type Word16 ; 2 when mode is MR122, 1 for all
+         other modes
+    nb_track = number of ACB tracks (Word16)
+    step = step size between pulses in one track (Word16)
+    pOverflow = pointer to overflow (Flag)
+
+ Outputs:
+    dn contents are the newly calculated correlation values
+    pOverflow = 1 if the math functions called by cor_h_x2 result in overflow
+    else zero.
+
+ Returns:
+    None
+
+ Global Variables Used:
+    None
+
+ Local Variables Needed:
+    None
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+ This function computes the correlation between the target signal (x) and the
+ impulse response (h).
+
+ The correlation is given by: d[n] = sum_{i=n}^{L-1} x[i] h[i-n],
+ where: n=0,...,L-1
+
+ d[n] is normalized such that the sum of 5 maxima of d[n] corresponding to
+ each position track does not saturate.
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+ None
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+ cor_h.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+The original etsi reference code uses a global flag Overflow. However, in the
+actual implementation a pointer to a the overflow flag is passed in.
+
+void cor_h_x2 (
+    Word16 h[],    // (i): impulse response of weighted synthesis filter
+    Word16 x[],    // (i): target
+    Word16 dn[],   // (o): correlation between target and h[]
+    Word16 sf,     // (i): scaling factor: 2 for 12.2, 1 for others
+    Word16 nb_track,// (i): the number of ACB tracks
+    Word16 step    // (i): step size from one pulse position to the next
+                           in one track
+)
+{
+    Word16 i, j, k;
+    Word32 s, y32[L_CODE], max, tot;
+
+    // first keep the result on 32 bits and find absolute maximum
+
+    tot = 5;
+
+    for (k = 0; k < nb_track; k++)
+    {
+        max = 0;
+        for (i = k; i < L_CODE; i += step)
+        {
+            s = 0;
+            for (j = i; j < L_CODE; j++)
+                s = L_mac (s, x[j], h[j - i]);
+
+            y32[i] = s;
+
+            s = L_abs (s);
+            if (L_sub (s, max) > (Word32) 0L)
+                max = s;
+        }
+        tot = L_add (tot, L_shr (max, 1));
+    }
+
+    j = sub (norm_l (tot), sf);
+
+    for (i = 0; i < L_CODE; i++)
+    {
+        dn[i] = pv_round (L_shl (y32[i], j));
+    }
+}
+
+------------------------------------------------------------------------------
+ RESOURCES USED [optional]
+
+ When the code is written for a specific target processor the
+ the resources used should be documented below.
+
+ HEAP MEMORY USED: x bytes
+
+ STACK MEMORY USED: x bytes
+
+ CLOCK CYCLES: (cycle count equation for this function) + (variable
+                used to represent cycle count for each subroutine
+                called)
+     where: (cycle count variable) = cycle count for [subroutine
+                                     name]
+
+------------------------------------------------------------------------------
+ CAUTION [optional]
+ [State any special notes, constraints or cautions for users of this function]
+
+------------------------------------------------------------------------------
+*/
+
+void cor_h_x2(
+    Word16 h[],    /* (i): impulse response of weighted synthesis filter */
+    Word16 x[],    /* (i): target                                        */
+    Word16 dn[],   /* (o): correlation between target and h[]            */
+    Word16 sf,     /* (i): scaling factor: 2 for 12.2, 1 for others      */
+    Word16 nb_track,/* (i): the number of ACB tracks                     */
+    Word16 step,   /* (i): step size from one pulse position to the next
+                           in one track                                  */
+    Flag *pOverflow
+)
+{
+    Word16 i;
+    Word16 j;
+    Word16 k;
+    Word32 s;
+    Word32 y32[L_CODE]{};
+    Word32 max;
+    Word32 tot;
+
+
+    /* first keep the result on 32 bits and find absolute maximum */
+    tot = LOG2_OF_32;
+    for (k = 0; k < nb_track; k++)
+    {
+        max = 0;
+        for (i = k; i < L_CODE; i += step)
+        {
+            s = 0;
+
+            for (j = i; j < L_CODE; j++)
+            {
+                s = amrnb_fxp_mac_16_by_16bb((Word32)x[j], (Word32)h[j-i], s);
+            }
+
+            s = s << 1;
+            y32[i] = s;
+            s = L_abs(s);
+
+            if (s > max)
+            {
+                max = s;
+            }
+        }
+        tot = (tot + (max >> 1));
+    }
+
+    j = sub(norm_l(tot), sf, pOverflow);
+
+    for (i = 0; i < L_CODE; i++)
+    {
+        dn[i] = pv_round(L_shl(y32[i], j, pOverflow), pOverflow);
+    }
+
+    return;
+}
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cor_h_x2.h b/media/codecs/amrnb/enc/src/cor_h_x2.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/cor_h_x2.h
rename to media/codecs/amrnb/enc/src/cor_h_x2.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/corrwght_tab.cpp b/media/codecs/amrnb/enc/src/corrwght_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/corrwght_tab.cpp
rename to media/codecs/amrnb/enc/src/corrwght_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/dtx_enc.cpp b/media/codecs/amrnb/enc/src/dtx_enc.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/dtx_enc.cpp
rename to media/codecs/amrnb/enc/src/dtx_enc.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/dtx_enc.h b/media/codecs/amrnb/enc/src/dtx_enc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/dtx_enc.h
rename to media/codecs/amrnb/enc/src/dtx_enc.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/enc_lag3.cpp b/media/codecs/amrnb/enc/src/enc_lag3.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/enc_lag3.cpp
rename to media/codecs/amrnb/enc/src/enc_lag3.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/enc_lag3.h b/media/codecs/amrnb/enc/src/enc_lag3.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/enc_lag3.h
rename to media/codecs/amrnb/enc/src/enc_lag3.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/enc_lag6.cpp b/media/codecs/amrnb/enc/src/enc_lag6.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/enc_lag6.cpp
rename to media/codecs/amrnb/enc/src/enc_lag6.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/enc_lag6.h b/media/codecs/amrnb/enc/src/enc_lag6.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/enc_lag6.h
rename to media/codecs/amrnb/enc/src/enc_lag6.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/enc_output_format_tab.cpp b/media/codecs/amrnb/enc/src/enc_output_format_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/enc_output_format_tab.cpp
rename to media/codecs/amrnb/enc/src/enc_output_format_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/ets_to_if2.cpp b/media/codecs/amrnb/enc/src/ets_to_if2.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/ets_to_if2.cpp
rename to media/codecs/amrnb/enc/src/ets_to_if2.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/ets_to_if2.h b/media/codecs/amrnb/enc/src/ets_to_if2.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/ets_to_if2.h
rename to media/codecs/amrnb/enc/src/ets_to_if2.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/ets_to_wmf.cpp b/media/codecs/amrnb/enc/src/ets_to_wmf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/ets_to_wmf.cpp
rename to media/codecs/amrnb/enc/src/ets_to_wmf.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/ets_to_wmf.h b/media/codecs/amrnb/enc/src/ets_to_wmf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/ets_to_wmf.h
rename to media/codecs/amrnb/enc/src/ets_to_wmf.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/g_adapt.cpp b/media/codecs/amrnb/enc/src/g_adapt.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/g_adapt.cpp
rename to media/codecs/amrnb/enc/src/g_adapt.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/g_adapt.h b/media/codecs/amrnb/enc/src/g_adapt.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/g_adapt.h
rename to media/codecs/amrnb/enc/src/g_adapt.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/g_code.cpp b/media/codecs/amrnb/enc/src/g_code.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/g_code.cpp
rename to media/codecs/amrnb/enc/src/g_code.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/g_code.h b/media/codecs/amrnb/enc/src/g_code.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/g_code.h
rename to media/codecs/amrnb/enc/src/g_code.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/g_pitch.cpp b/media/codecs/amrnb/enc/src/g_pitch.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/g_pitch.cpp
rename to media/codecs/amrnb/enc/src/g_pitch.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/g_pitch.h b/media/codecs/amrnb/enc/src/g_pitch.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/g_pitch.h
rename to media/codecs/amrnb/enc/src/g_pitch.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/gain_q.cpp b/media/codecs/amrnb/enc/src/gain_q.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/gain_q.cpp
rename to media/codecs/amrnb/enc/src/gain_q.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/gain_q.h b/media/codecs/amrnb/enc/src/gain_q.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/gain_q.h
rename to media/codecs/amrnb/enc/src/gain_q.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/gsmamr_enc.h b/media/codecs/amrnb/enc/src/gsmamr_enc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/gsmamr_enc.h
rename to media/codecs/amrnb/enc/src/gsmamr_enc.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/hp_max.cpp b/media/codecs/amrnb/enc/src/hp_max.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/hp_max.cpp
rename to media/codecs/amrnb/enc/src/hp_max.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/hp_max.h b/media/codecs/amrnb/enc/src/hp_max.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/hp_max.h
rename to media/codecs/amrnb/enc/src/hp_max.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/inter_36.cpp b/media/codecs/amrnb/enc/src/inter_36.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/inter_36.cpp
rename to media/codecs/amrnb/enc/src/inter_36.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/inter_36.h b/media/codecs/amrnb/enc/src/inter_36.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/inter_36.h
rename to media/codecs/amrnb/enc/src/inter_36.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/inter_36_tab.cpp b/media/codecs/amrnb/enc/src/inter_36_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/inter_36_tab.cpp
rename to media/codecs/amrnb/enc/src/inter_36_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/inter_36_tab.h b/media/codecs/amrnb/enc/src/inter_36_tab.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/inter_36_tab.h
rename to media/codecs/amrnb/enc/src/inter_36_tab.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/l_comp.cpp b/media/codecs/amrnb/enc/src/l_comp.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/l_comp.cpp
rename to media/codecs/amrnb/enc/src/l_comp.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/l_extract.cpp b/media/codecs/amrnb/enc/src/l_extract.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/l_extract.cpp
rename to media/codecs/amrnb/enc/src/l_extract.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/l_negate.cpp b/media/codecs/amrnb/enc/src/l_negate.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/l_negate.cpp
rename to media/codecs/amrnb/enc/src/l_negate.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/lag_wind.cpp b/media/codecs/amrnb/enc/src/lag_wind.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/lag_wind.cpp
rename to media/codecs/amrnb/enc/src/lag_wind.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/lag_wind.h b/media/codecs/amrnb/enc/src/lag_wind.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/lag_wind.h
rename to media/codecs/amrnb/enc/src/lag_wind.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/lag_wind_tab.cpp b/media/codecs/amrnb/enc/src/lag_wind_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/lag_wind_tab.cpp
rename to media/codecs/amrnb/enc/src/lag_wind_tab.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/lag_wind_tab.h b/media/codecs/amrnb/enc/src/lag_wind_tab.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/lag_wind_tab.h
rename to media/codecs/amrnb/enc/src/lag_wind_tab.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/levinson.cpp b/media/codecs/amrnb/enc/src/levinson.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/levinson.cpp
rename to media/codecs/amrnb/enc/src/levinson.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/levinson.h b/media/codecs/amrnb/enc/src/levinson.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/levinson.h
rename to media/codecs/amrnb/enc/src/levinson.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/lpc.cpp b/media/codecs/amrnb/enc/src/lpc.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/lpc.cpp
rename to media/codecs/amrnb/enc/src/lpc.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/lpc.h b/media/codecs/amrnb/enc/src/lpc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/lpc.h
rename to media/codecs/amrnb/enc/src/lpc.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/ol_ltp.cpp b/media/codecs/amrnb/enc/src/ol_ltp.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/ol_ltp.cpp
rename to media/codecs/amrnb/enc/src/ol_ltp.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/ol_ltp.h b/media/codecs/amrnb/enc/src/ol_ltp.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/ol_ltp.h
rename to media/codecs/amrnb/enc/src/ol_ltp.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/p_ol_wgh.cpp b/media/codecs/amrnb/enc/src/p_ol_wgh.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/p_ol_wgh.cpp
rename to media/codecs/amrnb/enc/src/p_ol_wgh.cpp
diff --git a/media/codecs/amrnb/enc/src/pitch_fr.cpp b/media/codecs/amrnb/enc/src/pitch_fr.cpp
new file mode 100644
index 0000000..584f79b
--- /dev/null
+++ b/media/codecs/amrnb/enc/src/pitch_fr.cpp
@@ -0,0 +1,1633 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+/****************************************************************************************
+Portions of this file are derived from the following 3GPP standard:
+
+    3GPP TS 26.073
+    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
+    Available from http://www.3gpp.org
+
+(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
+Permission to distribute, modify and use this file under the standard license
+terms listed above has been obtained from the copyright holder.
+****************************************************************************************/
+/*
+------------------------------------------------------------------------------
+
+
+
+ Pathname: ./audio/gsm-amr/c/src/pitch_fr.c
+ Functions:
+
+
+     Date: 02/04/2002
+
+------------------------------------------------------------------------------
+ REVISION HISTORY
+
+ Description: Added pOverflow as a passed in value to searchFrac and made
+              other fixes to the code regarding simple syntax fixes. Removed
+              the include of stio.h.
+
+ Description: *lag-- decrements the pointer.  (*lag)-- decrements what is
+ pointed to.  The latter is what the coder intended, but the former is
+ the coding instruction that was used.
+
+ Description: A common problem -- a comparison != 0 was inadvertantly replaced
+ by a comparison == 0.
+
+
+ Description:  For Norm_Corr() and getRange()
+              1. Eliminated unused include files.
+              2. Replaced array addressing by pointers
+              3. Eliminated math operations that unnecessary checked for
+                 saturation, in some cases this by shifting before adding and
+                 in other cases by evaluating the operands
+              4. Unrolled loops to speed up processing, use decrement loops
+              5. Replaced extract_l() call with equivalent code
+              6. Modified scaling threshold and group all shifts (avoiding
+                 successive shifts)
+
+ Description:  Replaced OSCL mem type functions and eliminated include
+               files that now are chosen by OSCL definitions
+
+ Description:  Replaced "int" and/or "char" with OSCL defined types.
+
+ Description: Removed compiler warnings.
+
+ Description:
+------------------------------------------------------------------------------
+ MODULE DESCRIPTION
+
+      File             : pitch_fr.c
+      Purpose          : Find the pitch period with 1/3 or 1/6 subsample
+                       : resolution (closed loop).
+
+------------------------------------------------------------------------------
+*/
+
+/*----------------------------------------------------------------------------
+; INCLUDES
+----------------------------------------------------------------------------*/
+#include <stdlib.h>
+
+#include "pitch_fr.h"
+#include "oper_32b.h"
+#include "cnst.h"
+#include "enc_lag3.h"
+#include "enc_lag6.h"
+#include "inter_36.h"
+#include "inv_sqrt.h"
+#include "convolve.h"
+
+#include "basic_op.h"
+
+
+/*----------------------------------------------------------------------------
+; MACROS
+; Define module specific macros here
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; DEFINES
+; Include all pre-processor statements here. Include conditional
+; compile variables also.
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; LOCAL FUNCTION DEFINITIONS
+; Function Prototype declaration
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; LOCAL VARIABLE DEFINITIONS
+; Variable declaration - defined here and used outside this module
+----------------------------------------------------------------------------*/
+
+/*
+ * mode dependent parameters used in Pitch_fr()
+ * Note: order of MRxx in 'enum Mode' is important!
+ */
+static const struct
+{
+    Word16 max_frac_lag;     /* lag up to which fractional lags are used    */
+    Word16 flag3;            /* enable 1/3 instead of 1/6 fract. resolution */
+    Word16 first_frac;       /* first fractional to check                   */
+    Word16 last_frac;        /* last fractional to check                    */
+    Word16 delta_int_low;    /* integer lag below TO to start search from   */
+    Word16 delta_int_range;  /* integer range around T0                     */
+    Word16 delta_frc_low;    /* fractional below T0                         */
+    Word16 delta_frc_range;  /* fractional range around T0                  */
+    Word16 pit_min;          /* minimum pitch                               */
+} mode_dep_parm[N_MODES] =
+{
+    /* MR475 */  { 84,  1, -2,  2,  5, 10,  5,  9, PIT_MIN },
+    /* MR515 */  { 84,  1, -2,  2,  5, 10,  5,  9, PIT_MIN },
+    /* MR59  */  { 84,  1, -2,  2,  3,  6,  5,  9, PIT_MIN },
+    /* MR67  */  { 84,  1, -2,  2,  3,  6,  5,  9, PIT_MIN },
+    /* MR74  */  { 84,  1, -2,  2,  3,  6,  5,  9, PIT_MIN },
+    /* MR795 */  { 84,  1, -2,  2,  3,  6, 10, 19, PIT_MIN },
+    /* MR102 */  { 84,  1, -2,  2,  3,  6,  5,  9, PIT_MIN },
+    /* MR122 */  { 94,  0, -3,  3,  3,  6,  5,  9, PIT_MIN_MR122 }
+};
+
+/*
+------------------------------------------------------------------------------
+ FUNCTION NAME: Norm_Corr
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+ Inputs:
+    exc[] = pointer to buffer of type Word16
+    xn[]  = pointer to buffer of type Word16
+    h[]   = pointer to buffer of type Word16
+    L_subfr = length of sub frame (Word16)
+    t_min  = the minimum table value of type Word16
+    t_max = the maximum table value of type Word16
+    corr_norm[] = pointer to buffer of type Word16
+
+ Outputs:
+    pOverflow = 1 if the math functions called result in overflow else zero.
+
+ Returns:
+    None
+
+ Global Variables Used:
+    None
+
+ Local Variables Needed:
+    None
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+  FUNCTION:   Norm_Corr()
+
+  PURPOSE: Find the normalized correlation between the target vector
+           and the filtered past excitation.
+
+  DESCRIPTION:
+     The normalized correlation is given by the correlation between the
+     target and filtered past excitation divided by the square root of
+     the energy of filtered excitation.
+                   corr[k] = <x[], y_k[]>/sqrt(y_k[],y_k[])
+     where x[] is the target vector and y_k[] is the filtered past
+     excitation at delay k.
+
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+ None
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+ pitch_fr.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+static void Norm_Corr (Word16 exc[], Word16 xn[], Word16 h[], Word16 L_subfr,
+                       Word16 t_min, Word16 t_max, Word16 corr_norm[])
+{
+    Word16 i, j, k;
+    Word16 corr_h, corr_l, norm_h, norm_l;
+    Word32 s;
+
+    // Usally dynamic allocation of (L_subfr)
+    Word16 excf[L_SUBFR];
+    Word16 scaling, h_fac, *s_excf, scaled_excf[L_SUBFR];
+
+    k = -t_min;
+
+    // compute the filtered excitation for the first delay t_min
+
+    Convolve (&exc[k], h, excf, L_subfr);
+
+    // scale "excf[]" to avoid overflow
+
+    for (j = 0; j < L_subfr; j++) {
+        scaled_excf[j] = shr (excf[j], 2);
+    }
+
+    // Compute 1/sqrt(energy of excf[])
+
+    s = 0;
+    for (j = 0; j < L_subfr; j++) {
+        s = L_mac (s, excf[j], excf[j]);
+    }
+    if (L_sub (s, 67108864L) <= 0) {            // if (s <= 2^26)
+        s_excf = excf;
+        h_fac = 15 - 12;
+        scaling = 0;
+    }
+    else {
+        // "excf[]" is divided by 2
+        s_excf = scaled_excf;
+        h_fac = 15 - 12 - 2;
+        scaling = 2;
+    }
+
+    // loop for every possible period
+
+    for (i = t_min; i <= t_max; i++) {
+        // Compute 1/sqrt(energy of excf[])
+
+        s = 0;
+        for (j = 0; j < L_subfr; j++) {
+            s = L_mac (s, s_excf[j], s_excf[j]);
+        }
+
+        s = Inv_sqrt (s);
+        L_Extract (s, &norm_h, &norm_l);
+
+        // Compute correlation between xn[] and excf[]
+
+        s = 0;
+        for (j = 0; j < L_subfr; j++) {
+            s = L_mac (s, xn[j], s_excf[j]);
+        }
+        L_Extract (s, &corr_h, &corr_l);
+
+        // Normalize correlation = correlation * (1/sqrt(energy))
+
+        s = Mpy_32 (corr_h, corr_l, norm_h, norm_l);
+
+        corr_norm[i] = extract_h (L_shl (s, 16));
+
+            // modify the filtered excitation excf[] for the next iteration
+
+        if (sub (i, t_max) != 0) {
+            k--;
+            for (j = L_subfr - 1; j > 0; j--) {
+                s = L_mult (exc[k], h[j]);
+                s = L_shl (s, h_fac);
+                s_excf[j] = add (extract_h (s), s_excf[j - 1]);
+            }
+            s_excf[0] = shr (exc[k], scaling);
+        }
+    }
+    return;
+}
+
+------------------------------------------------------------------------------
+ RESOURCES USED [optional]
+
+ When the code is written for a specific target processor the
+ the resources used should be documented below.
+
+ HEAP MEMORY USED: x bytes
+
+ STACK MEMORY USED: x bytes
+
+ CLOCK CYCLES: (cycle count equation for this function) + (variable
+                used to represent cycle count for each subroutine
+                called)
+     where: (cycle count variable) = cycle count for [subroutine
+                                     name]
+
+------------------------------------------------------------------------------
+ CAUTION [optional]
+ [State any special notes, constraints or cautions for users of this function]
+
+------------------------------------------------------------------------------
+*/
+
+static void Norm_Corr(Word16 exc[],
+                      Word16 xn[],
+                      Word16 h[],
+                      Word16 L_subfr,
+                      Word16 t_min,
+                      Word16 t_max,
+                      Word16 corr_norm[],
+                      Flag *pOverflow)
+{
+    Word16 i;
+    Word16 j;
+    Word16 k;
+    Word16 corr_h;
+    Word16 corr_l;
+    Word16 norm_h;
+    Word16 norm_l;
+    Word32 s;
+    Word32 s2;
+    Word16 excf[L_SUBFR];
+    Word16 scaling;
+    Word16 h_fac;
+    Word16 *s_excf;
+    Word16 scaled_excf[L_SUBFR];
+    Word16 *p_s_excf;
+    Word16 *p_excf;
+    Word16  temp;
+    Word16 *p_x;
+    Word16 *p_h;
+
+    k = -t_min;
+
+    /* compute the filtered excitation for the first delay t_min */
+
+    Convolve(&exc[k], h, excf, L_subfr);
+
+    /* scale "excf[]" to avoid overflow */
+    s = 0;
+    p_s_excf = scaled_excf;
+    p_excf   = excf;
+
+    for (j = (L_subfr >> 1); j != 0; j--)
+    {
+        temp = *(p_excf++);
+        *(p_s_excf++) = temp >> 2;
+        s += (Word32) temp * temp;
+        temp = *(p_excf++);
+        *(p_s_excf++) = temp >> 2;
+        s += (Word32) temp * temp;
+    }
+
+
+    if (s <= (67108864L >> 1))
+    {
+        s_excf = excf;
+        h_fac = 12;
+        scaling = 0;
+    }
+    else
+    {
+        /* "excf[]" is divided by 2 */
+        s_excf = scaled_excf;
+        h_fac = 14;
+        scaling = 2;
+    }
+
+    /* loop for every possible period */
+
+    for (i = t_min; i <= t_max; i++)
+    {
+        /* Compute 1/sqrt(energy of excf[]) */
+
+        s   = s2 = 0;
+        p_x      = xn;
+        p_s_excf = s_excf;
+        j        = L_subfr >> 1;
+
+        while (j--)
+        {
+            s  += (Word32) * (p_x++) * *(p_s_excf);
+            s2 += ((Word32)(*(p_s_excf)) * (*(p_s_excf)));
+            p_s_excf++;
+            s  += (Word32) * (p_x++) * *(p_s_excf);
+            s2 += ((Word32)(*(p_s_excf)) * (*(p_s_excf)));
+            p_s_excf++;
+        }
+
+        s2     = s2 << 1;
+        s2     = Inv_sqrt(s2, pOverflow);
+        norm_h = (Word16)(s2 >> 16);
+        norm_l = (Word16)((s2 >> 1) - (norm_h << 15));
+        corr_h = (Word16)(s >> 15);
+        corr_l = (Word16)((s) - (corr_h << 15));
+
+        /* Normalize correlation = correlation * (1/sqrt(energy)) */
+
+        s = Mpy_32(corr_h, corr_l, norm_h, norm_l, pOverflow);
+
+        corr_norm[i] = (Word16) s ;
+
+        /* modify the filtered excitation excf[] for the next iteration */
+        if (i != t_max)
+        {
+            k--;
+            temp = exc[k];
+            p_s_excf = &s_excf[L_subfr - 1];
+            p_h = &h[L_subfr - 1];
+
+            p_excf = &s_excf[L_subfr - 2];
+            for (j = (L_subfr - 1) >> 1; j != 0; j--)
+            {
+                s = ((Word32) temp * *(p_h--)) >> h_fac;
+                *(p_s_excf--) = (Word16) s  + *(p_excf--);
+                s = ((Word32) temp * *(p_h--)) >> h_fac;
+                *(p_s_excf--) = (Word16) s  + *(p_excf--);
+            }
+
+            s = ((Word32) temp * *(p_h)) >> h_fac;
+            *(p_s_excf--) = (Word16) s  + *(p_excf);
+
+            *(p_s_excf) = temp >> scaling;
+        }
+
+    }
+    return;
+}
+
+/****************************************************************************/
+
+
+/*
+------------------------------------------------------------------------------
+ FUNCTION NAME: searchFrac
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+ Inputs:
+    lag = pointer to integer pitch of type Word16
+    frac = pointer to starting point of search fractional pitch of type Word16
+    last_frac = endpoint of search  of type Word16
+    corr[] = pointer to normalized correlation of type Word16
+    flag3 = subsample resolution (3: =1 / 6: =0) of type Word16
+
+ Outputs:
+    None
+
+ Returns:
+    None
+
+ Global Variables Used:
+    None
+
+ Local Variables Needed:
+    None
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+   FUNCTION:   searchFrac()
+
+   PURPOSE: Find fractional pitch
+
+   DESCRIPTION:
+      The function interpolates the normalized correlation at the
+      fractional positions around lag T0. The position at which the
+      interpolation function reaches its maximum is the fractional pitch.
+      Starting point of the search is frac, end point is last_frac.
+      frac is overwritten with the fractional pitch.
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+ None
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+ pitch_fr.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+static void searchFrac (
+    Word16 *lag,       // i/o : integer pitch
+    Word16 *frac,      // i/o : start point of search -
+                               fractional pitch
+    Word16 last_frac,  // i   : endpoint of search
+    Word16 corr[],     // i   : normalized correlation
+    Word16 flag3       // i   : subsample resolution
+                                (3: =1 / 6: =0)
+)
+{
+    Word16 i;
+    Word16 max;
+    Word16 corr_int;
+
+    // Test the fractions around T0 and choose the one which maximizes
+    // the interpolated normalized correlation.
+
+    max = Interpol_3or6 (&corr[*lag], *frac, flag3); // function result
+
+    for (i = add (*frac, 1); i <= last_frac; i++) {
+        corr_int = Interpol_3or6 (&corr[*lag], i, flag3);
+        if (sub (corr_int, max) > 0) {
+            max = corr_int;
+            *frac = i;
+        }
+    }
+
+    if (flag3 == 0) {
+        // Limit the fraction value in the interval [-2,-1,0,1,2,3]
+
+        if (sub (*frac, -3) == 0) {
+            *frac = 3;
+            *lag = sub (*lag, 1);
+        }
+    }
+    else {
+        // limit the fraction value between -1 and 1
+
+        if (sub (*frac, -2) == 0) {
+            *frac = 1;
+            *lag = sub (*lag, 1);
+        }
+        if (sub (*frac, 2) == 0) {
+            *frac = -1;
+            *lag = add (*lag, 1);
+        }
+    }
+}
+
+------------------------------------------------------------------------------
+ RESOURCES USED [optional]
+
+ When the code is written for a specific target processor the
+ the resources used should be documented below.
+
+ HEAP MEMORY USED: x bytes
+
+ STACK MEMORY USED: x bytes
+
+ CLOCK CYCLES: (cycle count equation for this function) + (variable
+                used to represent cycle count for each subroutine
+                called)
+     where: (cycle count variable) = cycle count for [subroutine
+                                     name]
+
+------------------------------------------------------------------------------
+ CAUTION [optional]
+ [State any special notes, constraints or cautions for users of this function]
+
+------------------------------------------------------------------------------
+*/
+
+static void searchFrac(
+    Word16 *lag,       /* i/o : integer pitch           */
+    Word16 *frac,      /* i/o : start point of search -
+                                fractional pitch        */
+    Word16 last_frac,  /* i   : endpoint of search      */
+    Word16 corr[],     /* i   : normalized correlation  */
+    Word16 flag3,      /* i   : subsample resolution
+                                (3: =1 / 6: =0)         */
+    Flag   *pOverflow,
+    enum Mode mode
+)
+{
+    Word16 i;
+    Word16 max;
+    Word16 corr_int;
+    Word16 minPitch;
+
+    /* Test the fractions around T0 and choose the one which maximizes   */
+    /* the interpolated normalized correlation.                          */
+
+    max = Interpol_3or6(&corr[*lag], *frac, flag3, pOverflow);
+    /* function result */
+
+    for (i = *frac + 1; i <= last_frac; i++)
+    {
+        corr_int = Interpol_3or6(&corr[*lag], i, flag3, pOverflow);
+        if (corr_int > max)
+        {
+            max = corr_int;
+            *frac = i;
+        }
+    }
+
+    minPitch = (mode == MR122) ? PIT_MIN_MR122 : PIT_MIN;
+    if (flag3 == 0)
+    {
+        /* Limit the fraction value in the interval [-2,-1,0,1,2,3] */
+
+        if (*frac == -3)
+        {
+            if (*lag > minPitch)
+            {
+                *frac = 3;
+                (*lag)--;
+            }
+            else
+            {
+                *frac = -2;
+            }
+        }
+    }
+    else
+    {
+        /* limit the fraction value between -1 and 1 */
+
+        if (*frac == -2)
+        {
+            if (*lag > minPitch)
+            {
+                *frac = 1;
+                (*lag)--;
+            }
+            else
+            {
+                *frac = -1;
+            }
+        }
+        else if (*frac == 2)
+        {
+            if (*lag < PIT_MAX)
+            {
+                *frac = -1;
+                (*lag)++;
+            }
+            else
+            {
+                *frac = 1;
+            }
+        }
+    }
+}
+
+/****************************************************************************/
+
+
+/*
+------------------------------------------------------------------------------
+ FUNCTION NAME: getRange
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+ Inputs:
+    T0 = integer pitch of type Word16
+    delta_low = search start offset of type Word16
+    delta_range = search range of type Word16
+    pitmin = minimum pitch of type Word16
+    pitmax = maximum pitch of type Word16
+    t0_min = search range minimum of type Word16
+    t0_max = search range maximum of type Word16
+
+ Outputs:
+    pOverflow = 1 if the math functions called result in overflow else zero.
+
+ Returns:
+    None
+
+ Global Variables Used:
+    None
+
+ Local Variables Needed:
+    None
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+   FUNCTION:   getRange()
+
+   PURPOSE: Sets range around open-loop pitch or integer pitch of last subframe
+
+   DESCRIPTION:
+      Takes integer pitch T0 and calculates a range around it with
+        t0_min = T0-delta_low  and t0_max = (T0-delta_low) + delta_range
+      t0_min and t0_max are bounded by pitmin and pitmax
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+ None
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+ pitch_fr.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+static void getRange (
+    Word16 T0,           // i : integer pitch
+    Word16 delta_low,    // i : search start offset
+    Word16 delta_range,  // i : search range
+    Word16 pitmin,       // i : minimum pitch
+    Word16 pitmax,       // i : maximum pitch
+    Word16 *t0_min,      // o : search range minimum
+    Word16 *t0_max)      // o : search range maximum
+{
+    *t0_min = sub(T0, delta_low);
+    if (sub(*t0_min, pitmin) < 0) {
+        *t0_min = pitmin;
+    }
+    *t0_max = add(*t0_min, delta_range);
+    if (sub(*t0_max, pitmax) > 0) {
+        *t0_max = pitmax;
+        *t0_min = sub(*t0_max, delta_range);
+    }
+}
+
+------------------------------------------------------------------------------
+ RESOURCES USED [optional]
+
+ When the code is written for a specific target processor the
+ the resources used should be documented below.
+
+ HEAP MEMORY USED: x bytes
+
+ STACK MEMORY USED: x bytes
+
+ CLOCK CYCLES: (cycle count equation for this function) + (variable
+                used to represent cycle count for each subroutine
+                called)
+     where: (cycle count variable) = cycle count for [subroutine
+                                     name]
+
+------------------------------------------------------------------------------
+ CAUTION [optional]
+ [State any special notes, constraints or cautions for users of this function]
+
+------------------------------------------------------------------------------
+*/
+static void getRange(
+    Word16 T0,           /* i : integer pitch          */
+    Word16 delta_low,    /* i : search start offset    */
+    Word16 delta_range,  /* i : search range           */
+    Word16 pitmin,       /* i : minimum pitch          */
+    Word16 pitmax,       /* i : maximum pitch          */
+    Word16 *t0_min,      /* o : search range minimum   */
+    Word16 *t0_max,      /* o : search range maximum   */
+    Flag   *pOverflow)
+{
+
+    Word16 temp;
+    OSCL_UNUSED_ARG(pOverflow);
+
+    temp = *t0_min;
+    temp = T0 - delta_low;
+    if (temp < pitmin)
+    {
+        temp = pitmin;
+    }
+    *t0_min = temp;
+
+    temp +=  delta_range;
+    if (temp > pitmax)
+    {
+        temp = pitmax;
+        *t0_min = pitmax - delta_range;
+    }
+    *t0_max = temp;
+
+}
+
+
+/****************************************************************************/
+
+
+/*
+------------------------------------------------------------------------------
+ FUNCTION NAME: Pitch_fr_init
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+ Inputs:
+    state = pointer to a pointer of structure type Pitch_fr_State.
+
+ Outputs:
+    None
+
+ Returns:
+    Returns a zero if successful and -1 if not successful.
+
+ Global Variables Used:
+    None
+
+ Local Variables Needed:
+    None
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+  Function:   Pitch_fr_init
+  Purpose:    Allocates state memory and initializes state memory
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+ None
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+ pitch_fr.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+int Pitch_fr_init (Pitch_frState **state)
+{
+    Pitch_frState* s;
+
+    if (state == (Pitch_frState **) NULL){
+        // fprintf(stderr, "Pitch_fr_init: invalid parameter\n");
+        return -1;
+    }
+    *state = NULL;
+
+    // allocate memory
+    if ((s= (Pitch_frState *) malloc(sizeof(Pitch_frState))) == NULL){
+        // fprintf(stderr, "Pitch_fr_init: can not malloc state structure\n");
+        return -1;
+    }
+
+    Pitch_fr_reset(s);
+    *state = s;
+
+    return 0;
+}
+
+------------------------------------------------------------------------------
+ RESOURCES USED [optional]
+
+ When the code is written for a specific target processor the
+ the resources used should be documented below.
+
+ HEAP MEMORY USED: x bytes
+
+ STACK MEMORY USED: x bytes
+
+ CLOCK CYCLES: (cycle count equation for this function) + (variable
+                used to represent cycle count for each subroutine
+                called)
+     where: (cycle count variable) = cycle count for [subroutine
+                                     name]
+
+------------------------------------------------------------------------------
+ CAUTION [optional]
+ [State any special notes, constraints or cautions for users of this function]
+
+------------------------------------------------------------------------------
+*/
+Word16 Pitch_fr_init(Pitch_frState **state)
+{
+    Pitch_frState* s;
+
+    if (state == (Pitch_frState **) NULL)
+    {
+        /* fprintf(stderr, "Pitch_fr_init: invalid parameter\n"); */
+        return -1;
+    }
+    *state = NULL;
+
+    /* allocate memory */
+    if ((s = (Pitch_frState *) malloc(sizeof(Pitch_frState))) == NULL)
+    {
+        /* fprintf(stderr, "Pitch_fr_init: can not malloc state structure\n"); */
+        return -1;
+    }
+
+    Pitch_fr_reset(s);
+    *state = s;
+
+    return 0;
+}
+
+
+/****************************************************************************/
+
+
+/*
+------------------------------------------------------------------------------
+ FUNCTION NAME: Pitch_fr_reset
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+ Inputs:
+    state = pointer to a pointer of structure type Pitch_fr_State.
+
+ Outputs:
+    None
+
+ Returns:
+    Returns a zero if successful and -1 if not successful.
+
+ Global Variables Used:
+    None
+
+ Local Variables Needed:
+    None
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+  Function:   Pitch_fr_reset
+  Purpose:    Initializes state memory to zero
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+ None
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+ pitch_fr.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+int Pitch_fr_reset (Pitch_frState *state)
+{
+
+    if (state == (Pitch_frState *) NULL){
+        // fprintf(stderr, "Pitch_fr_reset: invalid parameter\n");
+        return -1;
+    }
+
+    state->T0_prev_subframe = 0;
+
+    return 0;
+}
+
+------------------------------------------------------------------------------
+ RESOURCES USED [optional]
+
+ When the code is written for a specific target processor the
+ the resources used should be documented below.
+
+ HEAP MEMORY USED: x bytes
+
+ STACK MEMORY USED: x bytes
+
+ CLOCK CYCLES: (cycle count equation for this function) + (variable
+                used to represent cycle count for each subroutine
+                called)
+     where: (cycle count variable) = cycle count for [subroutine
+                                     name]
+
+------------------------------------------------------------------------------
+ CAUTION [optional]
+ [State any special notes, constraints or cautions for users of this function]
+
+------------------------------------------------------------------------------
+*/
+Word16 Pitch_fr_reset(Pitch_frState *state)
+{
+
+    if (state == (Pitch_frState *) NULL)
+    {
+        /* fprintf(stderr, "Pitch_fr_reset: invalid parameter\n"); */
+        return -1;
+    }
+
+    state->T0_prev_subframe = 0;
+
+    return 0;
+}
+
+
+/****************************************************************************/
+
+
+/*
+------------------------------------------------------------------------------
+ FUNCTION NAME: Pitch_fr_exit
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+ Inputs:
+    state = pointer to a pointer of structure type Pitch_fr_State.
+
+ Outputs:
+    None
+
+ Returns:
+    None
+
+ Global Variables Used:
+    None
+
+ Local Variables Needed:
+    None
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+  Function:   Pitch_fr_exit
+  Purpose:    The memory for state is freed.
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+ None
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+ pitch_fr.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+void Pitch_fr_exit (Pitch_frState **state)
+{
+    if (state == NULL || *state == NULL)
+        return;
+
+    // deallocate memory
+    free(*state);
+    *state = NULL;
+
+    return;
+}
+
+------------------------------------------------------------------------------
+ RESOURCES USED [optional]
+
+ When the code is written for a specific target processor the
+ the resources used should be documented below.
+
+ HEAP MEMORY USED: x bytes
+
+ STACK MEMORY USED: x bytes
+
+ CLOCK CYCLES: (cycle count equation for this function) + (variable
+                used to represent cycle count for each subroutine
+                called)
+     where: (cycle count variable) = cycle count for [subroutine
+                                     name]
+
+------------------------------------------------------------------------------
+ CAUTION [optional]
+ [State any special notes, constraints or cautions for users of this function]
+
+------------------------------------------------------------------------------
+*/
+void Pitch_fr_exit(Pitch_frState **state)
+{
+    if (state == NULL || *state == NULL)
+        return;
+
+    /* deallocate memory */
+    free(*state);
+    *state = NULL;
+
+    return;
+}
+
+/****************************************************************************/
+
+
+/*
+------------------------------------------------------------------------------
+ FUNCTION NAME: Pitch_fr
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+ Inputs:
+    st = pointer to stat structure of type Pitch_frState
+    mode = codec mode of type enum Mode
+    T_op[] = pointer to open loop pitch lags of type Word16
+    exc[] = pointer to excitation buffer of type Word16
+    xn[] = pointer to target vector of type Word16
+    h[] = pointer to impulse response of synthesis and weighting filters
+          of type Word16
+    L_subfr = length of subframe of type Word16
+    i_subfr = subframe offset of type Word16
+
+ Outputs:
+    pit_frac = pointer to pitch period (fractional) of type Word16
+    resu3 = pointer to subsample resolution of type Word16
+    ana_index = pointer to index of encoding of type Word16
+
+ Returns:
+    None
+
+ Global Variables Used:
+    None
+
+ Local Variables Needed:
+    None
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+   FUNCTION:   Pitch_fr()
+
+   PURPOSE: Find the pitch period with 1/3 or 1/6 subsample resolution
+            (closed loop).
+
+   DESCRIPTION:
+         - find the normalized correlation between the target and filtered
+           past excitation in the search range.
+         - select the delay with maximum normalized correlation.
+         - interpolate the normalized correlation at fractions -3/6 to 3/6
+           with step 1/6 around the chosen delay.
+         - The fraction which gives the maximum interpolated value is chosen.
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+ None
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+ pitch_fr.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+Word16 Pitch_fr (        // o   : pitch period (integer)
+    Pitch_frState *st,   // i/o : State struct
+    enum Mode mode,      // i   : codec mode
+    Word16 T_op[],       // i   : open loop pitch lags
+    Word16 exc[],        // i   : excitation buffer                      Q0
+    Word16 xn[],         // i   : target vector                          Q0
+    Word16 h[],          // i   : impulse response of synthesis and
+                                  weighting filters                     Q12
+    Word16 L_subfr,      // i   : Length of subframe
+    Word16 i_subfr,      // i   : subframe offset
+    Word16 *pit_frac,    // o   : pitch period (fractional)
+    Word16 *resu3,       // o   : subsample resolution 1/3 (=1) or 1/6 (=0)
+    Word16 *ana_index    // o   : index of encoding
+)
+{
+    Word16 i;
+    Word16 t_min, t_max;
+    Word16 t0_min, t0_max;
+    Word16 max, lag, frac;
+    Word16 tmp_lag;
+    Word16 *corr;
+    Word16 corr_v[40];    // Total length = t0_max-t0_min+1+2*L_INTER_SRCH
+
+    Word16 max_frac_lag;
+    Word16 flag3, flag4;
+    Word16 last_frac;
+    Word16 delta_int_low, delta_int_range;
+    Word16 delta_frc_low, delta_frc_range;
+    Word16 pit_min;
+    Word16 frame_offset;
+    Word16 delta_search;
+
+    //-----------------------------------------------------------------------
+     //                      set mode specific variables
+     //----------------------------------------------------------------------
+
+    max_frac_lag    = mode_dep_parm[mode].max_frac_lag;
+    flag3           = mode_dep_parm[mode].flag3;
+    frac            = mode_dep_parm[mode].first_frac;
+    last_frac       = mode_dep_parm[mode].last_frac;
+    delta_int_low   = mode_dep_parm[mode].delta_int_low;
+    delta_int_range = mode_dep_parm[mode].delta_int_range;
+
+    delta_frc_low   = mode_dep_parm[mode].delta_frc_low;
+    delta_frc_range = mode_dep_parm[mode].delta_frc_range;
+    pit_min         = mode_dep_parm[mode].pit_min;
+
+    //-----------------------------------------------------------------------
+    //                 decide upon full or differential search
+    //-----------------------------------------------------------------------
+
+    delta_search = 1;
+
+    if ((i_subfr == 0) || (sub(i_subfr,L_FRAME_BY2) == 0)) {
+
+        // Subframe 1 and 3
+
+        if (((sub((Word16)mode, (Word16)MR475) != 0) && (sub((Word16)mode,
+            (Word16)MR515) != 0)) ||
+            (sub(i_subfr,L_FRAME_BY2) != 0)) {
+
+            // set t0_min, t0_max for full search
+            // this is *not* done for mode MR475, MR515 in subframe 3
+
+            delta_search = 0; // no differential search
+
+            // calculate index into T_op which contains the open-loop
+            // pitch estimations for the 2 big subframes
+
+            frame_offset = 1;
+            if (i_subfr == 0)
+                frame_offset = 0;
+
+            // get T_op from the corresponding half frame and
+            // set t0_min, t0_max
+
+            getRange (T_op[frame_offset], delta_int_low, delta_int_range,
+                      pit_min, PIT_MAX, &t0_min, &t0_max);
+        }
+        else {
+
+            // mode MR475, MR515 and 3. Subframe: delta search as well
+            getRange (st->T0_prev_subframe, delta_frc_low, delta_frc_range,
+                      pit_min, PIT_MAX, &t0_min, &t0_max);
+        }
+    }
+    else {
+
+        // for Subframe 2 and 4
+        // get range around T0 of previous subframe for delta search
+
+        getRange (st->T0_prev_subframe, delta_frc_low, delta_frc_range,
+                  pit_min, PIT_MAX, &t0_min, &t0_max);
+    }
+
+    //-----------------------------------------------------------------------
+                Find interval to compute normalized correlation
+     -----------------------------------------------------------------------
+
+    t_min = sub (t0_min, L_INTER_SRCH);
+    t_max = add (t0_max, L_INTER_SRCH);
+
+    corr = &corr_v[-t_min];
+
+    //-----------------------------------------------------------------------
+      Compute normalized correlation between target and filtered excitation
+     -----------------------------------------------------------------------
+
+    Norm_Corr (exc, xn, h, L_subfr, t_min, t_max, corr);
+
+    //-----------------------------------------------------------------------
+                                Find integer pitch
+     -----------------------------------------------------------------------
+
+    max = corr[t0_min];
+    lag = t0_min;
+
+    for (i = t0_min + 1; i <= t0_max; i++) {
+        if (sub (corr[i], max) >= 0) {
+            max = corr[i];
+            lag = i;
+        }
+    }
+
+    //-----------------------------------------------------------------------
+                             Find fractional pitch
+     -----------------------------------------------------------------------
+    if ((delta_search == 0) && (sub (lag, max_frac_lag) > 0)) {
+
+        // full search and integer pitch greater than max_frac_lag
+        // fractional search is not needed, set fractional to zero
+
+        frac = 0;
+    }
+    else {
+
+        // if differential search AND mode MR475 OR MR515 OR MR59 OR MR67
+        // then search fractional with 4 bits resolution
+
+       if ((delta_search != 0) &&
+           ((sub ((Word16)mode, (Word16)MR475) == 0) ||
+            (sub ((Word16)mode, (Word16)MR515) == 0) ||
+            (sub ((Word16)mode, (Word16)MR59) == 0) ||
+            (sub ((Word16)mode, (Word16)MR67) == 0))) {
+
+          // modify frac or last_frac according to position of last
+          // integer pitch: either search around integer pitch,
+          // or only on left or right side
+
+          tmp_lag = st->T0_prev_subframe;
+          if ( sub( sub(tmp_lag, t0_min), 5) > 0)
+             tmp_lag = add (t0_min, 5);
+          if ( sub( sub(t0_max, tmp_lag), 4) > 0)
+               tmp_lag = sub (t0_max, 4);
+
+          if ((sub (lag, tmp_lag) == 0) ||
+              (sub (lag, sub(tmp_lag, 1)) == 0)) {
+
+             // normal search in fractions around T0
+
+             searchFrac (&lag, &frac, last_frac, corr, flag3);
+
+          }
+          else if (sub (lag, sub (tmp_lag, 2)) == 0) {
+             // limit search around T0 to the right side
+             frac = 0;
+             searchFrac (&lag, &frac, last_frac, corr, flag3);
+          }
+          else if (sub (lag, add(tmp_lag, 1)) == 0) {
+             // limit search around T0 to the left side
+             last_frac = 0;
+             searchFrac (&lag, &frac, last_frac, corr, flag3);
+          }
+          else {
+             // no fractional search
+             frac = 0;
+            }
+       }
+       else
+          // test the fractions around T0
+          searchFrac (&lag, &frac, last_frac, corr, flag3);
+    }
+
+    //-----------------------------------------------------------------------
+     //                           encode pitch
+     //-----------------------------------------------------------------------
+
+    if (flag3 != 0) {
+       // flag4 indicates encoding with 4 bit resolution;
+       // this is needed for mode MR475, MR515 and MR59
+
+       flag4 = 0;
+       if ( (sub ((Word16)mode, (Word16)MR475) == 0) ||
+            (sub ((Word16)mode, (Word16)MR515) == 0) ||
+            (sub ((Word16)mode, (Word16)MR59) == 0) ||
+            (sub ((Word16)mode, (Word16)MR67) == 0) ) {
+          flag4 = 1;
+       }
+
+       // encode with 1/3 subsample resolution
+
+       *ana_index = Enc_lag3(lag, frac, st->T0_prev_subframe,
+                             t0_min, t0_max, delta_search, flag4);
+       // function result
+
+    }
+    else
+    {
+       // encode with 1/6 subsample resolution
+
+       *ana_index = Enc_lag6(lag, frac, t0_min, delta_search);
+       // function result
+    }
+
+     //-----------------------------------------------------------------------
+     //                          update state variables
+     //-----------------------------------------------------------------------
+
+    st->T0_prev_subframe = lag;
+
+     //-----------------------------------------------------------------------
+     //                      update output variables
+     //-----------------------------------------------------------------------
+
+    *resu3    = flag3;
+
+    *pit_frac = frac;
+
+    return (lag);
+}
+
+
+------------------------------------------------------------------------------
+ RESOURCES USED [optional]
+
+ When the code is written for a specific target processor the
+ the resources used should be documented below.
+
+ HEAP MEMORY USED: x bytes
+
+ STACK MEMORY USED: x bytes
+
+ CLOCK CYCLES: (cycle count equation for this function) + (variable
+                used to represent cycle count for each subroutine
+                called)
+     where: (cycle count variable) = cycle count for [subroutine
+                                     name]
+
+------------------------------------------------------------------------------
+ CAUTION [optional]
+ [State any special notes, constraints or cautions for users of this function]
+
+------------------------------------------------------------------------------
+*/
+Word16 Pitch_fr(         /* o   : pitch period (integer)                    */
+    Pitch_frState *st,   /* i/o : State struct                              */
+    enum Mode mode,      /* i   : codec mode                                */
+    Word16 T_op[],       /* i   : open loop pitch lags                      */
+    Word16 exc[],        /* i   : excitation buffer                      Q0 */
+    Word16 xn[],         /* i   : target vector                          Q0 */
+    Word16 h[],          /* i   : impulse response of synthesis and
+                                  weighting filters                     Q12 */
+    Word16 L_subfr,      /* i   : Length of subframe                        */
+    Word16 i_subfr,      /* i   : subframe offset                           */
+    Word16 *pit_frac,    /* o   : pitch period (fractional)                 */
+    Word16 *resu3,       /* o   : subsample resolution 1/3 (=1) or 1/6 (=0) */
+    Word16 *ana_index,   /* o   : index of encoding                         */
+    Flag   *pOverflow
+)
+{
+    Word16 i;
+    Word16 t_min;
+    Word16 t_max;
+    Word16 t0_min = 0;
+    Word16 t0_max;
+    Word16 max;
+    Word16 lag;
+    Word16 frac;
+    Word16 tmp_lag;
+    Word16 *corr;
+    Word16 corr_v[40];    /* Total length = t0_max-t0_min+1+2*L_INTER_SRCH */
+
+    Word16 max_frac_lag;
+    Word16 flag3;
+    Word16 flag4;
+    Word16 last_frac;
+    Word16 delta_int_low;
+    Word16 delta_int_range;
+    Word16 delta_frc_low;
+    Word16 delta_frc_range;
+    Word16 pit_min;
+    Word16 frame_offset;
+    Word16 delta_search;
+
+    /*-----------------------------------------------------------------------*
+     *                      set mode specific variables                      *
+     *-----------------------------------------------------------------------*/
+
+    max_frac_lag    = mode_dep_parm[mode].max_frac_lag;
+    flag3           = mode_dep_parm[mode].flag3;
+    frac            = mode_dep_parm[mode].first_frac;
+    last_frac       = mode_dep_parm[mode].last_frac;
+    delta_int_low   = mode_dep_parm[mode].delta_int_low;
+    delta_int_range = mode_dep_parm[mode].delta_int_range;
+
+    delta_frc_low   = mode_dep_parm[mode].delta_frc_low;
+    delta_frc_range = mode_dep_parm[mode].delta_frc_range;
+    pit_min         = mode_dep_parm[mode].pit_min;
+
+    /*-----------------------------------------------------------------------*
+     *                 decide upon full or differential search               *
+     *-----------------------------------------------------------------------*/
+
+    delta_search = 1;
+
+    if ((i_subfr == 0) || (i_subfr == L_FRAME_BY2))
+    {
+
+        /* Subframe 1 and 3 */
+
+        if (((mode != MR475) && (mode != MR515)) || (i_subfr != L_FRAME_BY2))
+        {
+
+            /* set t0_min, t0_max for full search */
+            /* this is *not* done for mode MR475, MR515 in subframe 3 */
+
+            delta_search = 0; /* no differential search */
+
+            /* calculate index into T_op which contains the open-loop */
+            /* pitch estimations for the 2 big subframes */
+
+            frame_offset = 1;
+            if (i_subfr == 0)
+                frame_offset = 0;
+
+            /* get T_op from the corresponding half frame and */
+            /* set t0_min, t0_max */
+
+            getRange(T_op[frame_offset], delta_int_low, delta_int_range,
+                     pit_min, PIT_MAX, &t0_min, &t0_max, pOverflow);
+        }
+        else
+        {
+
+            /* mode MR475, MR515 and 3. Subframe: delta search as well */
+            getRange(st->T0_prev_subframe, delta_frc_low, delta_frc_range,
+                     pit_min, PIT_MAX, &t0_min, &t0_max, pOverflow);
+        }
+    }
+    else
+    {
+
+        /* for Subframe 2 and 4 */
+        /* get range around T0 of previous subframe for delta search */
+
+        getRange(st->T0_prev_subframe, delta_frc_low, delta_frc_range,
+                 pit_min, PIT_MAX, &t0_min, &t0_max, pOverflow);
+    }
+
+    /*-----------------------------------------------------------------------*
+     *           Find interval to compute normalized correlation             *
+     *-----------------------------------------------------------------------*/
+
+    t_min = sub(t0_min, L_INTER_SRCH, pOverflow);
+    t_max = add(t0_max, L_INTER_SRCH, pOverflow);
+
+    corr = &corr_v[-t_min];
+
+    /*-----------------------------------------------------------------------*
+     * Compute normalized correlation between target and filtered excitation *
+     *-----------------------------------------------------------------------*/
+
+    Norm_Corr(exc, xn, h, L_subfr, t_min, t_max, corr, pOverflow);
+
+    /*-----------------------------------------------------------------------*
+     *                           Find integer pitch                          *
+     *-----------------------------------------------------------------------*/
+
+    max = corr[t0_min];
+    lag = t0_min;
+
+    for (i = t0_min + 1; i <= t0_max; i++)
+    {
+        if (corr[i] >= max)
+        {
+            max = corr[i];
+            lag = i;
+        }
+    }
+
+    /*-----------------------------------------------------------------------*
+     *                        Find fractional pitch                          *
+     *-----------------------------------------------------------------------*/
+    if ((delta_search == 0) && (lag > max_frac_lag))
+    {
+
+        /* full search and integer pitch greater than max_frac_lag */
+        /* fractional search is not needed, set fractional to zero */
+
+        frac = 0;
+    }
+    else
+    {
+
+        /* if differential search AND mode MR475 OR MR515 OR MR59 OR MR67   */
+        /* then search fractional with 4 bits resolution           */
+
+        if ((delta_search != 0) &&
+                ((mode == MR475) || (mode == MR515) ||
+                 (mode == MR59) || (mode == MR67)))
+        {
+
+            /* modify frac or last_frac according to position of last */
+            /* integer pitch: either search around integer pitch, */
+            /* or only on left or right side */
+
+            tmp_lag = st->T0_prev_subframe;
+            if (sub(sub(tmp_lag, t0_min, pOverflow), 5, pOverflow) > 0)
+                tmp_lag = add(t0_min, 5, pOverflow);
+            if (sub(sub(t0_max, tmp_lag, pOverflow), 4, pOverflow) > 0)
+                tmp_lag = sub(t0_max, 4, pOverflow);
+
+            if ((lag == tmp_lag) || (lag == (tmp_lag - 1)))
+            {
+
+                /* normal search in fractions around T0 */
+
+                searchFrac(&lag, &frac, last_frac, corr, flag3, pOverflow, mode);
+
+            }
+            else if (lag == (tmp_lag - 2))
+            {
+                /* limit search around T0 to the right side */
+                frac = 0;
+                searchFrac(&lag, &frac, last_frac, corr, flag3, pOverflow, mode);
+            }
+            else if (lag == (tmp_lag + 1))
+            {
+                /* limit search around T0 to the left side */
+                last_frac = 0;
+                searchFrac(&lag, &frac, last_frac, corr, flag3, pOverflow, mode);
+            }
+            else
+            {
+                /* no fractional search */
+                frac = 0;
+            }
+        }
+        else
+            /* test the fractions around T0 */
+            searchFrac(&lag, &frac, last_frac, corr, flag3, pOverflow, mode);
+    }
+
+    /*-----------------------------------------------------------------------*
+     *                           encode pitch                                *
+     *-----------------------------------------------------------------------*/
+
+    if (flag3 != 0)
+    {
+        /* flag4 indicates encoding with 4 bit resolution;         */
+        /* this is needed for mode MR475, MR515 and MR59           */
+
+        flag4 = 0;
+        if ((mode == MR475) || (mode == MR515) ||
+                (mode == MR59) || (mode == MR67))
+        {
+            flag4 = 1;
+        }
+
+        /* encode with 1/3 subsample resolution */
+
+        *ana_index = Enc_lag3(lag, frac, st->T0_prev_subframe,
+                              t0_min, t0_max, delta_search, flag4, pOverflow);
+        /* function result */
+
+    }
+    else
+    {
+        /* encode with 1/6 subsample resolution */
+
+        *ana_index = Enc_lag6(lag, frac, t0_min, delta_search, pOverflow);
+        /* function result */
+    }
+
+    /*-----------------------------------------------------------------------*
+     *                          update state variables                       *
+     *-----------------------------------------------------------------------*/
+
+    st->T0_prev_subframe = lag;
+
+    /*-----------------------------------------------------------------------*
+     *                      update output variables                          *
+     *-----------------------------------------------------------------------*/
+
+    *resu3    = flag3;
+
+    *pit_frac = frac;
+
+    return (lag);
+}
+
diff --git a/media/libstagefright/codecs/amrnb/enc/src/pitch_fr.h b/media/codecs/amrnb/enc/src/pitch_fr.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/pitch_fr.h
rename to media/codecs/amrnb/enc/src/pitch_fr.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/pitch_ol.cpp b/media/codecs/amrnb/enc/src/pitch_ol.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/pitch_ol.cpp
rename to media/codecs/amrnb/enc/src/pitch_ol.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/pitch_ol.h b/media/codecs/amrnb/enc/src/pitch_ol.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/pitch_ol.h
rename to media/codecs/amrnb/enc/src/pitch_ol.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/pre_big.cpp b/media/codecs/amrnb/enc/src/pre_big.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/pre_big.cpp
rename to media/codecs/amrnb/enc/src/pre_big.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/pre_big.h b/media/codecs/amrnb/enc/src/pre_big.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/pre_big.h
rename to media/codecs/amrnb/enc/src/pre_big.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/pre_proc.cpp b/media/codecs/amrnb/enc/src/pre_proc.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/pre_proc.cpp
rename to media/codecs/amrnb/enc/src/pre_proc.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/pre_proc.h b/media/codecs/amrnb/enc/src/pre_proc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/pre_proc.h
rename to media/codecs/amrnb/enc/src/pre_proc.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/prm2bits.cpp b/media/codecs/amrnb/enc/src/prm2bits.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/prm2bits.cpp
rename to media/codecs/amrnb/enc/src/prm2bits.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/prm2bits.h b/media/codecs/amrnb/enc/src/prm2bits.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/prm2bits.h
rename to media/codecs/amrnb/enc/src/prm2bits.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/q_gain_c.cpp b/media/codecs/amrnb/enc/src/q_gain_c.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/q_gain_c.cpp
rename to media/codecs/amrnb/enc/src/q_gain_c.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/q_gain_c.h b/media/codecs/amrnb/enc/src/q_gain_c.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/q_gain_c.h
rename to media/codecs/amrnb/enc/src/q_gain_c.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/q_gain_p.cpp b/media/codecs/amrnb/enc/src/q_gain_p.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/q_gain_p.cpp
rename to media/codecs/amrnb/enc/src/q_gain_p.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/q_gain_p.h b/media/codecs/amrnb/enc/src/q_gain_p.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/q_gain_p.h
rename to media/codecs/amrnb/enc/src/q_gain_p.h
diff --git a/media/codecs/amrnb/enc/src/qgain475.cpp b/media/codecs/amrnb/enc/src/qgain475.cpp
new file mode 100644
index 0000000..08a5c15
--- /dev/null
+++ b/media/codecs/amrnb/enc/src/qgain475.cpp
@@ -0,0 +1,1445 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+/****************************************************************************************
+Portions of this file are derived from the following 3GPP standard:
+
+    3GPP TS 26.073
+    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
+    Available from http://www.3gpp.org
+
+(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
+Permission to distribute, modify and use this file under the standard license
+terms listed above has been obtained from the copyright holder.
+****************************************************************************************/
+/*
+------------------------------------------------------------------------------
+
+
+
+ Pathname: ./audio/gsm-amr/c/src/qgain475.c
+ Funtions: MR475_quant_store_results
+           MR475_update_unq_pred
+           MR475_gain_quant
+
+------------------------------------------------------------------------------
+ MODULE DESCRIPTION
+
+ These modules handle the quantization of pitch and codebook gains for MR475.
+
+------------------------------------------------------------------------------
+*/
+
+
+/*----------------------------------------------------------------------------
+; INCLUDES
+----------------------------------------------------------------------------*/
+#include "qgain475.h"
+#include "typedef.h"
+#include "basic_op.h"
+#include "mode.h"
+#include "cnst.h"
+#include "pow2.h"
+#include "log2.h"
+
+/*----------------------------------------------------------------------------
+; MACROS
+; Define module specific macros here
+----------------------------------------------------------------------------*/
+
+
+/*----------------------------------------------------------------------------
+; DEFINES
+; Include all pre-processor statements here. Include conditional
+; compile variables also.
+----------------------------------------------------------------------------*/
+#define MR475_VQ_SIZE 256
+
+/*----------------------------------------------------------------------------
+; LOCAL FUNCTION DEFINITIONS
+; Function Prototype declaration
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; LOCAL VARIABLE DEFINITIONS
+; Variable declaration - defined here and used outside this module
+----------------------------------------------------------------------------*/
+
+/* The table contains the following data:
+ *
+ *    g_pitch(0)        (Q14) // for sub-
+ *    g_fac(0)          (Q12) // frame 0 and 2
+ *    g_pitch(1)        (Q14) // for sub-
+ *    g_fac(2)          (Q12) // frame 1 and 3
+ *
+ */
+static const Word16 table_gain_MR475[MR475_VQ_SIZE*4] =
+{
+    /*g_pit(0), g_fac(0),      g_pit(1), g_fac(1) */
+    812,          128,           542,      140,
+    2873,         1135,          2266,     3402,
+    2067,          563,         12677,      647,
+    4132,         1798,          5601,     5285,
+    7689,          374,          3735,      441,
+    10912,         2638,         11807,     2494,
+    20490,          797,          5218,      675,
+    6724,         8354,          5282,     1696,
+    1488,          428,          5882,      452,
+    5332,         4072,          3583,     1268,
+    2469,          901,         15894,     1005,
+    14982,         3271,         10331,     4858,
+    3635,         2021,          2596,      835,
+    12360,         4892,         12206,     1704,
+    13432,         1604,          9118,     2341,
+    3968,         1538,          5479,     9936,
+    3795,          417,          1359,      414,
+    3640,         1569,          7995,     3541,
+    11405,          645,          8552,      635,
+    4056,         1377,         16608,     6124,
+    11420,          700,          2007,      607,
+    12415,         1578,         11119,     4654,
+    13680,         1708,         11990,     1229,
+    7996,         7297,         13231,     5715,
+    2428,         1159,          2073,     1941,
+    6218,         6121,          3546,     1804,
+    8925,         1802,          8679,     1580,
+    13935,         3576,         13313,     6237,
+    6142,         1130,          5994,     1734,
+    14141,         4662,         11271,     3321,
+    12226,         1551,         13931,     3015,
+    5081,        10464,          9444,     6706,
+    1689,          683,          1436,     1306,
+    7212,         3933,          4082,     2713,
+    7793,          704,         15070,      802,
+    6299,         5212,          4337,     5357,
+    6676,          541,          6062,      626,
+    13651,         3700,         11498,     2408,
+    16156,          716,         12177,      751,
+    8065,        11489,          6314,     2256,
+    4466,          496,          7293,      523,
+    10213,         3833,          8394,     3037,
+    8403,          966,         14228,     1880,
+    8703,         5409,         16395,     4863,
+    7420,         1979,          6089,     1230,
+    9371,         4398,         14558,     3363,
+    13559,         2873,         13163,     1465,
+    5534,         1678,         13138,    14771,
+    7338,          600,          1318,      548,
+    4252,         3539,         10044,     2364,
+    10587,          622,         13088,      669,
+    14126,         3526,          5039,     9784,
+    15338,          619,          3115,      590,
+    16442,         3013,         15542,     4168,
+    15537,         1611,         15405,     1228,
+    16023,         9299,          7534,     4976,
+    1990,         1213,         11447,     1157,
+    12512,         5519,          9475,     2644,
+    7716,         2034,         13280,     2239,
+    16011,         5093,          8066,     6761,
+    10083,         1413,          5002,     2347,
+    12523,         5975,         15126,     2899,
+    18264,         2289,         15827,     2527,
+    16265,        10254,         14651,    11319,
+    1797,          337,          3115,      397,
+    3510,         2928,          4592,     2670,
+    7519,          628,         11415,      656,
+    5946,         2435,          6544,     7367,
+    8238,          829,          4000,      863,
+    10032,         2492,         16057,     3551,
+    18204,         1054,          6103,     1454,
+    5884,         7900,         18752,     3468,
+    1864,          544,          9198,      683,
+    11623,         4160,          4594,     1644,
+    3158,         1157,         15953,     2560,
+    12349,         3733,         17420,     5260,
+    6106,         2004,          2917,     1742,
+    16467,         5257,         16787,     1680,
+    17205,         1759,          4773,     3231,
+    7386,         6035,         14342,    10012,
+    4035,          442,          4194,      458,
+    9214,         2242,          7427,     4217,
+    12860,          801,         11186,      825,
+    12648,         2084,         12956,     6554,
+    9505,          996,          6629,      985,
+    10537,         2502,         15289,     5006,
+    12602,         2055,         15484,     1653,
+    16194,         6921,         14231,     5790,
+    2626,          828,          5615,     1686,
+    13663,         5778,          3668,     1554,
+    11313,         2633,          9770,     1459,
+    14003,         4733,         15897,     6291,
+    6278,         1870,          7910,     2285,
+    16978,         4571,         16576,     3849,
+    15248,         2311,         16023,     3244,
+    14459,        17808,         11847,     2763,
+    1981,         1407,          1400,      876,
+    4335,         3547,          4391,     4210,
+    5405,          680,         17461,      781,
+    6501,         5118,          8091,     7677,
+    7355,          794,          8333,     1182,
+    15041,         3160,         14928,     3039,
+    20421,          880,         14545,      852,
+    12337,        14708,          6904,     1920,
+    4225,          933,          8218,     1087,
+    10659,         4084,         10082,     4533,
+    2735,          840,         20657,     1081,
+    16711,         5966,         15873,     4578,
+    10871,         2574,          3773,     1166,
+    14519,         4044,         20699,     2627,
+    15219,         2734,         15274,     2186,
+    6257,         3226,         13125,    19480,
+    7196,          930,          2462,     1618,
+    4515,         3092,         13852,     4277,
+    10460,          833,         17339,      810,
+    16891,         2289,         15546,     8217,
+    13603,         1684,          3197,     1834,
+    15948,         2820,         15812,     5327,
+    17006,         2438,         16788,     1326,
+    15671,         8156,         11726,     8556,
+    3762,         2053,          9563,     1317,
+    13561,         6790,         12227,     1936,
+    8180,         3550,         13287,     1778,
+    16299,         6599,         16291,     7758,
+    8521,         2551,          7225,     2645,
+    18269,         7489,         16885,     2248,
+    17882,         2884,         17265,     3328,
+    9417,        20162,         11042,     8320,
+    1286,          620,          1431,      583,
+    5993,         2289,          3978,     3626,
+    5144,          752,         13409,      830,
+    5553,         2860,         11764,     5908,
+    10737,          560,          5446,      564,
+    13321,         3008,         11946,     3683,
+    19887,          798,          9825,      728,
+    13663,         8748,          7391,     3053,
+    2515,          778,          6050,      833,
+    6469,         5074,          8305,     2463,
+    6141,         1865,         15308,     1262,
+    14408,         4547,         13663,     4515,
+    3137,         2983,          2479,     1259,
+    15088,         4647,         15382,     2607,
+    14492,         2392,         12462,     2537,
+    7539,         2949,         12909,    12060,
+    5468,          684,          3141,      722,
+    5081,         1274,         12732,     4200,
+    15302,          681,          7819,      592,
+    6534,         2021,         16478,     8737,
+    13364,          882,          5397,      899,
+    14656,         2178,         14741,     4227,
+    14270,         1298,         13929,     2029,
+    15477,         7482,         15815,     4572,
+    2521,         2013,          5062,     1804,
+    5159,         6582,          7130,     3597,
+    10920,         1611,         11729,     1708,
+    16903,         3455,         16268,     6640,
+    9306,         1007,          9369,     2106,
+    19182,         5037,         12441,     4269,
+    15919,         1332,         15357,     3512,
+    11898,        14141,         16101,     6854,
+    2010,          737,          3779,      861,
+    11454,         2880,          3564,     3540,
+    9057,         1241,         12391,      896,
+    8546,         4629,         11561,     5776,
+    8129,          589,          8218,      588,
+    18728,         3755,         12973,     3149,
+    15729,          758,         16634,      754,
+    15222,        11138,         15871,     2208,
+    4673,          610,         10218,      678,
+    15257,         4146,          5729,     3327,
+    8377,         1670,         19862,     2321,
+    15450,         5511,         14054,     5481,
+    5728,         2888,          7580,     1346,
+    14384,         5325,         16236,     3950,
+    15118,         3744,         15306,     1435,
+    14597,         4070,         12301,    15696,
+    7617,         1699,          2170,      884,
+    4459,         4567,         18094,     3306,
+    12742,          815,         14926,      907,
+    15016,         4281,         15518,     8368,
+    17994,         1087,          2358,      865,
+    16281,         3787,         15679,     4596,
+    16356,         1534,         16584,     2210,
+    16833,         9697,         15929,     4513,
+    3277,         1085,          9643,     2187,
+    11973,         6068,          9199,     4462,
+    8955,         1629,         10289,     3062,
+    16481,         5155,         15466,     7066,
+    13678,         2543,          5273,     2277,
+    16746,         6213,         16655,     3408,
+    20304,         3363,         18688,     1985,
+    14172,        12867,         15154,    15703,
+    4473,         1020,          1681,      886,
+    4311,         4301,          8952,     3657,
+    5893,         1147,         11647,     1452,
+    15886,         2227,          4582,     6644,
+    6929,         1205,          6220,      799,
+    12415,         3409,         15968,     3877,
+    19859,         2109,          9689,     2141,
+    14742,         8830,         14480,     2599,
+    1817,         1238,          7771,      813,
+    19079,         4410,          5554,     2064,
+    3687,         2844,         17435,     2256,
+    16697,         4486,         16199,     5388,
+    8028,         2763,          3405,     2119,
+    17426,         5477,         13698,     2786,
+    19879,         2720,          9098,     3880,
+    18172,         4833,         17336,    12207,
+    5116,          996,          4935,      988,
+    9888,         3081,          6014,     5371,
+    15881,         1667,          8405,     1183,
+    15087,         2366,         19777,     7002,
+    11963,         1562,          7279,     1128,
+    16859,         1532,         15762,     5381,
+    14708,         2065,         20105,     2155,
+    17158,         8245,         17911,     6318,
+    5467,         1504,          4100,     2574,
+    17421,         6810,          5673,     2888,
+    16636,         3382,          8975,     1831,
+    20159,         4737,         19550,     7294,
+    6658,         2781,         11472,     3321,
+    19397,         5054,         18878,     4722,
+    16439,         2373,         20430,     4386,
+    11353,        26526,         11593,     3068,
+    2866,         1566,          5108,     1070,
+    9614,         4915,          4939,     3536,
+    7541,          878,         20717,      851,
+    6938,         4395,         16799,     7733,
+    10137,         1019,          9845,      964,
+    15494,         3955,         15459,     3430,
+    18863,          982,         20120,      963,
+    16876,        12887,         14334,     4200,
+    6599,         1220,          9222,      814,
+    16942,         5134,          5661,     4898,
+    5488,         1798,         20258,     3962,
+    17005,         6178,         17929,     5929,
+    9365,         3420,          7474,     1971,
+    19537,         5177,         19003,     3006,
+    16454,         3788,         16070,     2367,
+    8664,         2743,          9445,    26358,
+    10856,         1287,          3555,     1009,
+    5606,         3622,         19453,     5512,
+    12453,          797,         20634,      911,
+    15427,         3066,         17037,    10275,
+    18883,         2633,          3913,     1268,
+    19519,         3371,         18052,     5230,
+    19291,         1678,         19508,     3172,
+    18072,        10754,         16625,     6845,
+    3134,         2298,         10869,     2437,
+    15580,         6913,         12597,     3381,
+    11116,         3297,         16762,     2424,
+    18853,         6715,         17171,     9887,
+    12743,         2605,          8937,     3140,
+    19033,         7764,         18347,     3880,
+    20475,         3682,         19602,     3380,
+    13044,        19373,         10526,    23124
+};
+
+/*
+------------------------------------------------------------------------------
+ FUNCTION NAME: MR475_quant_store_results
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+ Inputs:
+    pred_st = pointer to structure of type gc_predState
+    p = pointer to selected quantizer table entry (const Word16)
+    gcode0 = predicted CB gain (Word16)
+    exp_gcode0 = exponent of predicted CB gain (Word16)
+    gain_pit = pointer to Pitch gain (Word16)
+    gain_cod = pointer to Code gain (Word16)
+
+ Outputs:
+    pred_st points to the updated structure of type gc_predState
+    gain_pit points to Pitch gain
+    gain_cod points to Code gain
+    pOverflow points to overflow indicator (Flag)
+
+ Returns:
+    None.
+
+ Global Variables Used:
+    None.
+
+ Local Variables Needed:
+    None.
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+ This function calculates the final fixed codebook gain and the predictor
+ update values, and updates the gain predictor.
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+ None.
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+ qgain475.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+static void MR475_quant_store_results(
+
+    gc_predState *pred_st, // i/o: gain predictor state struct
+    const Word16 *p,       // i  : pointer to selected quantizer table entry
+    Word16 gcode0,         // i  : predicted CB gain,     Q(14 - exp_gcode0)
+    Word16 exp_gcode0,     // i  : exponent of predicted CB gain,        Q0
+    Word16 *gain_pit,      // o  : Pitch gain,                           Q14
+    Word16 *gain_cod       // o  : Code gain,                            Q1
+)
+{
+
+    Word16 g_code, exp, frac, tmp;
+    Word32 L_tmp;
+
+    Word16 qua_ener_MR122; // o  : quantized energy error, MR122 version Q10
+    Word16 qua_ener;       // o  : quantized energy error,               Q10
+
+    // Read the quantized gains
+    *gain_pit = *p++;
+    g_code = *p++;
+
+    //------------------------------------------------------------------*
+     *  calculate final fixed codebook gain:                            *
+     *  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~                            *
+     *                                                                  *
+     *   gc = gc0 * g                                                   *
+     *------------------------------------------------------------------
+
+    L_tmp = L_mult(g_code, gcode0);
+    L_tmp = L_shr(L_tmp, sub(10, exp_gcode0));
+    *gain_cod = extract_h(L_tmp);
+
+    //------------------------------------------------------------------*
+     *  calculate predictor update values and update gain predictor:    *
+     *  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~    *
+     *                                                                  *
+     *   qua_ener       = log2(g)                                       *
+     *   qua_ener_MR122 = 20*log10(g)                                   *
+     *------------------------------------------------------------------
+
+    Log2 (L_deposit_l (g_code), &exp, &frac); // Log2(x Q12) = log2(x) + 12
+    exp = sub(exp, 12);
+
+    tmp = shr_r (frac, 5);
+    qua_ener_MR122 = add (tmp, shl (exp, 10));
+
+    L_tmp = Mpy_32_16(exp, frac, 24660); // 24660 Q12 ~= 6.0206 = 20*log10(2)
+    qua_ener = pv_round (L_shl (L_tmp, 13)); // Q12 * Q0 = Q13 -> Q10
+
+    gc_pred_update(pred_st, qua_ener_MR122, qua_ener);
+}
+
+------------------------------------------------------------------------------
+ RESOURCES USED [optional]
+
+ When the code is written for a specific target processor the
+ the resources used should be documented below.
+
+ HEAP MEMORY USED: x bytes
+
+ STACK MEMORY USED: x bytes
+
+ CLOCK CYCLES: (cycle count equation for this function) + (variable
+                used to represent cycle count for each subroutine
+                called)
+     where: (cycle count variable) = cycle count for [subroutine
+                                     name]
+
+------------------------------------------------------------------------------
+ CAUTION [optional]
+ [State any special notes, constraints or cautions for users of this function]
+
+------------------------------------------------------------------------------
+*/
+
+static void MR475_quant_store_results(
+    gc_predState *pred_st, /* i/o: gain predictor state struct               */
+    const Word16 *p,       /* i  : pointer to selected quantizer table entry */
+    Word16 gcode0,         /* i  : predicted CB gain,     Q(14 - exp_gcode0) */
+    Word16 exp_gcode0,     /* i  : exponent of predicted CB gain,        Q0  */
+    Word16 *gain_pit,      /* o  : Pitch gain,                           Q14 */
+    Word16 *gain_cod,      /* o  : Code gain,                            Q1  */
+    Flag   *pOverflow      /* o  : overflow indicator                        */
+)
+{
+    Word16 g_code;
+    Word16 exp;
+    Word16 frac;
+    Word16 tmp;
+    Word32 L_tmp;
+
+    Word16 qua_ener_MR122; /* o  : quantized energy error, MR122 version Q10 */
+    Word16 qua_ener;       /* o  : quantized energy error,               Q10 */
+
+
+    /* Read the quantized gains */
+    *gain_pit = *p++;
+    g_code = *p++;
+
+    /*------------------------------------------------------------------*
+     *  calculate final fixed codebook gain:                            *
+     *  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~                            *
+     *                                                                  *
+     *   gc = gc0 * g                                                   *
+     *------------------------------------------------------------------*/
+
+    L_tmp = ((Word32) g_code * gcode0) << 1;
+    tmp   = 10 - exp_gcode0;
+    L_tmp = L_shr(L_tmp, tmp, pOverflow);
+    *gain_cod = (Word16)(L_tmp >> 16);
+
+    /*------------------------------------------------------------------*
+     *  calculate predictor update values and update gain predictor:    *
+     *  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~    *
+     *                                                                  *
+     *   qua_ener       = log2(g)                                       *
+     *   qua_ener_MR122 = 20*log10(g)                                   *
+     *------------------------------------------------------------------*/
+
+    /* Log2(x Q12) = log2(x) + 12 */
+    Log2((Word32) g_code, &exp, &frac, pOverflow);
+    exp -= 12;
+
+    tmp = shr_r(frac, 5, pOverflow);
+    qua_ener_MR122 = exp << 10;
+    qua_ener_MR122 = tmp + qua_ener_MR122;
+
+    /* 24660 Q12 ~= 6.0206 = 20*log10(2) */
+    L_tmp = Mpy_32_16(exp, frac, 24660, pOverflow);
+    L_tmp = L_tmp << 13;
+
+    /* Q12 * Q0 = Q13 -> Q10 */
+    qua_ener = (Word16)((L_tmp + (Word32) 0x00008000L) >> 16);
+
+    gc_pred_update(pred_st, qua_ener_MR122, qua_ener);
+
+    return;
+}
+
+/****************************************************************************/
+
+
+/*
+------------------------------------------------------------------------------
+ FUNCTION NAME: MR475_update_unq_pred
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+ Inputs:
+    pred_st = pointer to structure of type gc_predState
+    exp_gcode0 = predicted CB gain (exponent MSW) (Word16)
+    frac_gcode0 = predicted CB gain (exponent LSW) (Word16)
+    cod_gain_exp = optimum codebook gain (exponent)(Word16)
+    cod_gain_frac = optimum codebook gain (fraction) (Word16)
+
+ Outputs:
+    pred_st points to the updated structure of type gc_predState
+    pOverflow points to overflow indicator (Flag)
+
+ Returns:
+    None.
+
+ Global Variables Used:
+    None.
+
+ Local Variables Needed:
+    None.
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+ This module uses the optimum codebook gain and updates the "unquantized"
+ gain predictor with the (bounded) prediction error.
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+ None.
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+ qgain475.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+void
+MR475_update_unq_pred(
+    gc_predState *pred_st, // i/o: gain predictor state struct
+    Word16 exp_gcode0,     // i  : predicted CB gain (exponent MSW),  Q0
+    Word16 frac_gcode0,    // i  : predicted CB gain (exponent LSW),  Q15
+    Word16 cod_gain_exp,   // i  : optimum codebook gain (exponent),  Q0
+    Word16 cod_gain_frac   // i  : optimum codebook gain (fraction),  Q15
+)
+{
+    Word16 tmp, exp, frac;
+    Word16 qua_ener, qua_ener_MR122;
+    Word32 L_tmp;
+
+    // calculate prediction error factor (given optimum CB gain gcu):
+    //   predErrFact = gcu / gcode0
+    //   (limit to MIN_PRED_ERR_FACT <= predErrFact <= MAX_PRED_ERR_FACT
+    //    -> limit qua_ener*)
+    //
+    // calculate prediction error (log):
+    //
+    //   qua_ener_MR122 = log2(predErrFact)
+    //   qua_ener       = 20*log10(predErrFact)
+
+    if (cod_gain_frac <= 0)
+    {
+        // if gcu <= 0 -> predErrFact = 0 < MIN_PRED_ERR_FACT
+        // -> set qua_ener(_MR122) directly
+        qua_ener = MIN_QUA_ENER;
+        qua_ener_MR122 = MIN_QUA_ENER_MR122;
+    }
+    else
+    {
+        // convert gcode0 from DPF to standard fraction/exponent format
+        // with normalized frac, i.e. 16384 <= frac <= 32767
+        // Note: exponent correction (exp=exp-14) is done after div_s
+        frac_gcode0 = extract_l (Pow2 (14, frac_gcode0));
+
+        // make sure cod_gain_frac < frac_gcode0  for div_s
+        if (sub(cod_gain_frac, frac_gcode0) >= 0)
+        {
+            cod_gain_frac = shr (cod_gain_frac, 1);
+            cod_gain_exp = add (cod_gain_exp, 1);
+        }
+
+        // predErrFact
+        //   = gcu / gcode0
+        //   = cod_gain_frac/frac_gcode0 * 2^(cod_gain_exp-(exp_gcode0-14))
+        //   = div_s (c_g_f, frac_gcode0)*2^-15 * 2^(c_g_e-exp_gcode0+14)
+        //   = div_s * 2^(cod_gain_exp-exp_gcode0 - 1)
+
+        frac = div_s (cod_gain_frac, frac_gcode0);
+        tmp = sub (sub (cod_gain_exp, exp_gcode0), 1);
+
+        Log2 (L_deposit_l (frac), &exp, &frac);
+        exp = add (exp, tmp);
+
+        // calculate prediction error (log2, Q10)
+        qua_ener_MR122 = shr_r (frac, 5);
+        qua_ener_MR122 = add (qua_ener_MR122, shl (exp, 10));
+
+        if (sub(qua_ener_MR122, MIN_QUA_ENER_MR122) < 0)
+        {
+            qua_ener = MIN_QUA_ENER;
+            qua_ener_MR122 = MIN_QUA_ENER_MR122;
+        }
+        else if (sub(qua_ener_MR122, MAX_QUA_ENER_MR122) > 0)
+        {
+            qua_ener = MAX_QUA_ENER;
+            qua_ener_MR122 = MAX_QUA_ENER_MR122;
+        }
+        else
+        {
+            // calculate prediction error (20*log10, Q10)
+            L_tmp = Mpy_32_16(exp, frac, 24660);
+            // 24660 Q12 ~= 6.0206 = 20*log10(2)
+            qua_ener = pv_round (L_shl (L_tmp, 13));
+            // Q12 * Q0 = Q13 -> Q26 -> Q10
+        }
+    }
+
+    // update MA predictor memory
+    gc_pred_update(pred_st, qua_ener_MR122, qua_ener);
+}
+
+------------------------------------------------------------------------------
+ RESOURCES USED [optional]
+
+ When the code is written for a specific target processor the
+ the resources used should be documented below.
+
+ HEAP MEMORY USED: x bytes
+
+ STACK MEMORY USED: x bytes
+
+ CLOCK CYCLES: (cycle count equation for this function) + (variable
+                used to represent cycle count for each subroutine
+                called)
+     where: (cycle count variable) = cycle count for [subroutine
+                                     name]
+
+------------------------------------------------------------------------------
+ CAUTION [optional]
+ [State any special notes, constraints or cautions for users of this function]
+
+------------------------------------------------------------------------------
+*/
+
+void MR475_update_unq_pred(
+    gc_predState *pred_st, /* i/o: gain predictor state struct            */
+    Word16 exp_gcode0,     /* i  : predicted CB gain (exponent MSW),  Q0  */
+    Word16 frac_gcode0,    /* i  : predicted CB gain (exponent LSW),  Q15 */
+    Word16 cod_gain_exp,   /* i  : optimum codebook gain (exponent),  Q0  */
+    Word16 cod_gain_frac,  /* i  : optimum codebook gain (fraction),  Q15 */
+    Flag   *pOverflow      /* o  : overflow indicator                     */
+)
+{
+    Word16 tmp;
+    Word16 exp;
+    Word16 frac;
+    Word16 qua_ener;
+    Word16 qua_ener_MR122;
+    Word32 L_tmp;
+
+    /* calculate prediction error factor (given optimum CB gain gcu):
+     *
+     *   predErrFact = gcu / gcode0
+     *   (limit to MIN_PRED_ERR_FACT <= predErrFact <= MAX_PRED_ERR_FACT
+     *    -> limit qua_ener*)
+     *
+     * calculate prediction error (log):
+     *
+     *   qua_ener_MR122 = log2(predErrFact)
+     *   qua_ener       = 20*log10(predErrFact)
+     *
+     */
+
+    if (cod_gain_frac <= 0)
+    {
+        /* if gcu <= 0 -> predErrFact = 0 < MIN_PRED_ERR_FACT */
+        /* -> set qua_ener(_MR122) directly                   */
+        qua_ener = MIN_QUA_ENER;
+        qua_ener_MR122 = MIN_QUA_ENER_MR122;
+    }
+    else
+    {
+        /* convert gcode0 from DPF to standard fraction/exponent format */
+        /* with normalized frac, i.e. 16384 <= frac <= 32767            */
+        /* Note: exponent correction (exp=exp-14) is done after div_s   */
+        frac_gcode0 = (Word16)(Pow2(14, frac_gcode0, pOverflow));
+
+        /* make sure cod_gain_frac < frac_gcode0  for div_s */
+        if (cod_gain_frac >= frac_gcode0)
+        {
+            cod_gain_frac >>= 1;
+            cod_gain_exp += 1;
+        }
+
+        /*
+          predErrFact
+             = gcu / gcode0
+             = cod_gain_frac/frac_gcode0 * 2^(cod_gain_exp-(exp_gcode0-14))
+             = div_s (c_g_f, frac_gcode0)*2^-15 * 2^(c_g_e-exp_gcode0+14)
+             = div_s * 2^(cod_gain_exp-exp_gcode0 - 1)
+        */
+        frac = div_s(cod_gain_frac, frac_gcode0);
+        tmp = cod_gain_exp - exp_gcode0;
+        tmp -= 1;
+
+        Log2((Word32) frac, &exp, &frac, pOverflow);
+        exp += tmp;
+
+        /* calculate prediction error (log2, Q10) */
+        qua_ener_MR122 = shr_r(frac, 5, pOverflow);
+        tmp = exp << 10;
+        qua_ener_MR122 += tmp;
+
+        if (qua_ener_MR122 > MAX_QUA_ENER_MR122)
+        {
+            qua_ener = MAX_QUA_ENER;
+            qua_ener_MR122 = MAX_QUA_ENER_MR122;
+        }
+        else
+        {
+            /* calculate prediction error (20*log10, Q10) */
+            L_tmp = Mpy_32_16(exp, frac, 24660, pOverflow);
+            /* 24660 Q12 ~= 6.0206 = 20*log10(2) */
+            L_tmp =  L_shl(L_tmp, 13, pOverflow);
+            qua_ener = pv_round(L_tmp, pOverflow);
+
+            /* Q12 * Q0 = Q13 -> Q26 -> Q10     */
+        }
+    }
+
+    /* update MA predictor memory */
+    gc_pred_update(pred_st, qua_ener_MR122, qua_ener);
+
+
+    return;
+}
+
+/****************************************************************************/
+
+
+/*
+------------------------------------------------------------------------------
+ FUNCTION NAME: MR475_gain_quant
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+ Inputs:
+    pred_st = pointer to structure of type gc_predState
+    sf0_exp_gcode0 = predicted CB gain (exponent) (Word16)
+    f0_frac_gcode0 = predicted CB gain (fraction) (Word16)
+    sf0_exp_coeff = energy coeff. (exponent part) (Word16)
+    sf0_frac_coeff = energy coeff. ((fraction part) (Word16)
+    sf0_exp_target_en = exponent of target energy (Word16)
+    sf0_frac_target_en = fraction of target energy (Word16)
+    sf1_code_nosharp = innovative codebook vector  (Word16)
+    sf1_exp_gcode0 = predicted CB gain (exponent) (Word16)
+    sf1_frac_gcode0 = predicted CB gain (fraction) (Word16)
+    sf1_exp_coeff = energy coeff. (exponent part) (Word16)
+    sf1_frac_coeff = energy coeff. (fraction part) (Word16)
+    sf1_exp_target_en = exponent of target energy (Word16)
+    sf1_frac_target_en = fraction of target energy (Word16)
+    gp_limit = pitch gain limit (Word16)
+    sf0_gain_pit = pointer to Pitch gain (Word16)
+    sf0_gain_cod = pointer to Code gain (Word16)
+    sf1_gain_pit = pointer to Pitch gain (Word16)
+    sf1_gain_cod = pointer to Code gain (Word16)
+
+ Outputs:
+    pred_st points to the updated structure of type gc_predState
+    sf0_gain_pit points to Pitch gain
+    sf0_gain_cod points to Code gain
+    sf1_gain_pit points to Pitch gain
+    sf1_gain_cod points to Code gain
+
+ Returns:
+    index = index of quantization
+
+ Global Variables Used:
+    None.
+
+ Local Variables Needed:
+    None.
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+ This module provides quantization of pitch and codebook gains for two
+ subframes using the predicted codebook gain.
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+ None.
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+ qgain475.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+Word16
+MR475_gain_quant(              // o  : index of quantization.
+    gc_predState *pred_st,     // i/o: gain predictor state struct
+
+                               // data from subframe 0 (or 2)
+    Word16 sf0_exp_gcode0,     // i  : predicted CB gain (exponent),      Q0
+    Word16 sf0_frac_gcode0,    // i  : predicted CB gain (fraction),      Q15
+    Word16 sf0_exp_coeff[],    // i  : energy coeff. (5), exponent part,  Q0
+    Word16 sf0_frac_coeff[],   // i  : energy coeff. (5), fraction part,  Q15
+                               //      (frac_coeff and exp_coeff computed in
+                               //       calc_filt_energies())
+    Word16 sf0_exp_target_en,  // i  : exponent of target energy,         Q0
+    Word16 sf0_frac_target_en, // i  : fraction of target energy,         Q15
+
+                               // data from subframe 1 (or 3)
+    Word16 sf1_code_nosharp[], // i  : innovative codebook vector (L_SUBFR)
+                               //      (whithout pitch sharpening)
+    Word16 sf1_exp_gcode0,     // i  : predicted CB gain (exponent),      Q0
+    Word16 sf1_frac_gcode0,    // i  : predicted CB gain (fraction),      Q15
+    Word16 sf1_exp_coeff[],    // i  : energy coeff. (5), exponent part,  Q0
+    Word16 sf1_frac_coeff[],   // i  : energy coeff. (5), fraction part,  Q15
+                               //      (frac_coeff and exp_coeff computed in
+                               //       calc_filt_energies())
+    Word16 sf1_exp_target_en,  // i  : exponent of target energy,         Q0
+    Word16 sf1_frac_target_en, // i  : fraction of target energy,         Q15
+
+    Word16 gp_limit,           // i  : pitch gain limit
+
+    Word16 *sf0_gain_pit,      // o  : Pitch gain,                        Q14
+    Word16 *sf0_gain_cod,      // o  : Code gain,                         Q1
+
+    Word16 *sf1_gain_pit,      // o  : Pitch gain,                        Q14
+    Word16 *sf1_gain_cod       // o  : Code gain,                         Q1
+)
+{
+    const Word16 *p;
+    Word16 i, index = 0;
+    Word16 tmp;
+    Word16 exp;
+    Word16 sf0_gcode0, sf1_gcode0;
+    Word16 g_pitch, g2_pitch, g_code, g2_code, g_pit_cod;
+    Word16 coeff[10], coeff_lo[10], exp_max[10];  // 0..4: sf0; 5..9: sf1
+    Word32 L_tmp, dist_min;
+
+     *-------------------------------------------------------------------*
+     *  predicted codebook gain                                          *
+     *  ~~~~~~~~~~~~~~~~~~~~~~~                                          *
+     *  gc0     = 2^exp_gcode0 + 2^frac_gcode0                           *
+     *                                                                   *
+     *  gcode0 (Q14) = 2^14*2^frac_gcode0 = gc0 * 2^(14-exp_gcode0)      *
+     *-------------------------------------------------------------------*
+
+    sf0_gcode0 = extract_l(Pow2(14, sf0_frac_gcode0));
+    sf1_gcode0 = extract_l(Pow2(14, sf1_frac_gcode0));
+
+     * For each subframe, the error energy (sum) to be minimized consists
+     * of five terms, t[0..4].
+     *
+     *                      t[0] =    gp^2  * <y1 y1>
+     *                      t[1] = -2*gp    * <xn y1>
+     *                      t[2] =    gc^2  * <y2 y2>
+     *                      t[3] = -2*gc    * <xn y2>
+     *                      t[4] =  2*gp*gc * <y1 y2>
+     *
+
+    // sf 0
+    // determine the scaling exponent for g_code: ec = ec0 - 11
+    exp = sub(sf0_exp_gcode0, 11);
+
+    // calculate exp_max[i] = s[i]-1
+    exp_max[0] = sub(sf0_exp_coeff[0], 13);
+    exp_max[1] = sub(sf0_exp_coeff[1], 14);
+    exp_max[2] = add(sf0_exp_coeff[2], add(15, shl(exp, 1)));
+    exp_max[3] = add(sf0_exp_coeff[3], exp);
+    exp_max[4] = add(sf0_exp_coeff[4], add(1, exp));
+
+    // sf 1
+    // determine the scaling exponent for g_code: ec = ec0 - 11
+    exp = sub(sf1_exp_gcode0, 11);
+
+    // calculate exp_max[i] = s[i]-1
+    exp_max[5] = sub(sf1_exp_coeff[0], 13);
+    exp_max[6] = sub(sf1_exp_coeff[1], 14);
+    exp_max[7] = add(sf1_exp_coeff[2], add(15, shl(exp, 1)));
+    exp_max[8] = add(sf1_exp_coeff[3], exp);
+    exp_max[9] = add(sf1_exp_coeff[4], add(1, exp));
+
+     *-------------------------------------------------------------------*
+     *  Gain search equalisation:                                        *
+     *  ~~~~~~~~~~~~~~~~~~~~~~~~~                                        *
+     *  The MSE for the two subframes is weighted differently if there   *
+     *  is a big difference in the corresponding target energies         *
+     *-------------------------------------------------------------------*
+
+    // make the target energy exponents the same by de-normalizing the
+    // fraction of the smaller one. This is necessary to be able to compare
+    // them
+
+    exp = sf0_exp_target_en - sf1_exp_target_en;
+    if (exp > 0)
+    {
+        sf1_frac_target_en = shr (sf1_frac_target_en, exp);
+    }
+    else
+    {
+        sf0_frac_target_en = shl (sf0_frac_target_en, exp);
+    }
+
+    // assume no change of exponents
+    exp = 0;
+
+    // test for target energy difference; set exp to +1 or -1 to scale
+    // up/down coefficients for sf 1
+
+    tmp = shr_r (sf1_frac_target_en, 1);   // tmp = ceil(0.5*en(sf1))
+    if (sub (tmp, sf0_frac_target_en) > 0) // tmp > en(sf0)?
+    {
+        // target_energy(sf1) > 2*target_energy(sf0)
+        //   -> scale up MSE(sf0) by 2 by adding 1 to exponents 0..4
+        exp = 1;
+    }
+    else
+    {
+        tmp = shr (add (sf0_frac_target_en, 3), 2); // tmp=ceil(0.25*en(sf0))
+        if (sub (tmp, sf1_frac_target_en) > 0)      // tmp > en(sf1)?
+        {
+            // target_energy(sf1) < 0.25*target_energy(sf0)
+            //   -> scale down MSE(sf0) by 0.5 by subtracting 1 from
+            //      coefficients 0..4
+            exp = -1;
+        }
+    }
+
+    for (i = 0; i < 5; i++)
+    {
+        exp_max[i] = add (exp_max[i], exp);
+    }
+
+     *-------------------------------------------------------------------*
+     *  Find maximum exponent:                                           *
+     *  ~~~~~~~~~~~~~~~~~~~~~~                                           *
+     *                                                                   *
+     *  For the sum operation, all terms must have the same scaling;     *
+     *  that scaling should be low enough to prevent overflow. There-    *
+     *  fore, the maximum scale is determined and all coefficients are   *
+     *  re-scaled:                                                       *
+     *                                                                   *
+     *    exp = max(exp_max[i]) + 1;                                     *
+     *    e = exp_max[i]-exp;         e <= 0!                            *
+     *    c[i] = c[i]*2^e                                                *
+     *-------------------------------------------------------------------*
+
+    exp = exp_max[0];
+    for (i = 1; i < 10; i++)
+    {
+        if (sub(exp_max[i], exp) > 0)
+        {
+            exp = exp_max[i];
+        }
+    }
+    exp = add(exp, 1);      // To avoid overflow
+
+    p = &sf0_frac_coeff[0];
+    for (i = 0; i < 5; i++) {
+        tmp = sub(exp, exp_max[i]);
+        L_tmp = L_deposit_h(*p++);
+        L_tmp = L_shr(L_tmp, tmp);
+        L_Extract(L_tmp, &coeff[i], &coeff_lo[i]);
+    }
+    p = &sf1_frac_coeff[0];
+    for (; i < 10; i++) {
+        tmp = sub(exp, exp_max[i]);
+        L_tmp = L_deposit_h(*p++);
+        L_tmp = L_shr(L_tmp, tmp);
+        L_Extract(L_tmp, &coeff[i], &coeff_lo[i]);
+    }
+
+    //-------------------------------------------------------------------*
+     *  Codebook search:                                                 *
+     *  ~~~~~~~~~~~~~~~~                                                 *
+     *                                                                   *
+     *  For each pair (g_pitch, g_fac) in the table calculate the        *
+     *  terms t[0..4] and sum them up; the result is the mean squared    *
+     *  error for the quantized gains from the table. The index for the  *
+     *  minimum MSE is stored and finally used to retrieve the quantized *
+     *  gains                                                            *
+     *-------------------------------------------------------------------
+
+    // start with "infinite" MSE
+    dist_min = MAX_32;
+
+    p = &table_gain_MR475[0];
+
+    for (i = 0; i < MR475_VQ_SIZE; i++)
+    {
+        // subframe 0 (and 2) calculations
+        g_pitch = *p++;
+        g_code = *p++;
+
+        g_code = mult(g_code, sf0_gcode0);
+        g2_pitch = mult(g_pitch, g_pitch);
+        g2_code = mult(g_code, g_code);
+        g_pit_cod = mult(g_code, g_pitch);
+
+        L_tmp = Mpy_32_16(       coeff[0], coeff_lo[0], g2_pitch);
+        L_tmp = Mac_32_16(L_tmp, coeff[1], coeff_lo[1], g_pitch);
+        L_tmp = Mac_32_16(L_tmp, coeff[2], coeff_lo[2], g2_code);
+        L_tmp = Mac_32_16(L_tmp, coeff[3], coeff_lo[3], g_code);
+        L_tmp = Mac_32_16(L_tmp, coeff[4], coeff_lo[4], g_pit_cod);
+
+        tmp = sub (g_pitch, gp_limit);
+
+        // subframe 1 (and 3) calculations
+        g_pitch = *p++;
+        g_code = *p++;
+
+        if (tmp <= 0 && sub(g_pitch, gp_limit) <= 0)
+        {
+            g_code = mult(g_code, sf1_gcode0);
+            g2_pitch = mult(g_pitch, g_pitch);
+            g2_code = mult(g_code, g_code);
+            g_pit_cod = mult(g_code, g_pitch);
+
+            L_tmp = Mac_32_16(L_tmp, coeff[5], coeff_lo[5], g2_pitch);
+            L_tmp = Mac_32_16(L_tmp, coeff[6], coeff_lo[6], g_pitch);
+            L_tmp = Mac_32_16(L_tmp, coeff[7], coeff_lo[7], g2_code);
+            L_tmp = Mac_32_16(L_tmp, coeff[8], coeff_lo[8], g_code);
+            L_tmp = Mac_32_16(L_tmp, coeff[9], coeff_lo[9], g_pit_cod);
+
+            // store table index if MSE for this index is lower
+               than the minimum MSE seen so far
+            if (L_sub(L_tmp, dist_min) < (Word32) 0)
+            {
+                dist_min = L_tmp;
+                index = i;
+            }
+        }
+    }
+
+     *------------------------------------------------------------------*
+     *  read quantized gains and update MA predictor memories           *
+     *  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~           *
+     *------------------------------------------------------------------*
+
+    // for subframe 0, the pre-calculated gcode0/exp_gcode0 are the same
+    // as those calculated from the "real" predictor using quantized gains
+    tmp = shl(index, 2);
+    MR475_quant_store_results(pred_st,
+                              &table_gain_MR475[tmp],
+                              sf0_gcode0,
+                              sf0_exp_gcode0,
+                              sf0_gain_pit,
+                              sf0_gain_cod);
+
+    // calculate new predicted gain for subframe 1 (this time using
+    // the real, quantized gains)
+    gc_pred(pred_st, MR475, sf1_code_nosharp,
+            &sf1_exp_gcode0, &sf1_frac_gcode0,
+            &sf0_exp_gcode0, &sf0_gcode0); // last two args are unused
+    sf1_gcode0 = extract_l(Pow2(14, sf1_frac_gcode0));
+
+    tmp = add (tmp, 2);
+    MR475_quant_store_results(pred_st,
+                              &table_gain_MR475[tmp],
+                              sf1_gcode0,
+                              sf1_exp_gcode0,
+                              sf1_gain_pit,
+                              sf1_gain_cod);
+
+    return index;
+}
+
+------------------------------------------------------------------------------
+ RESOURCES USED [optional]
+
+ When the code is written for a specific target processor the
+ the resources used should be documented below.
+
+ HEAP MEMORY USED: x bytes
+
+ STACK MEMORY USED: x bytes
+
+ CLOCK CYCLES: (cycle count equation for this function) + (variable
+                used to represent cycle count for each subroutine
+                called)
+     where: (cycle count variable) = cycle count for [subroutine
+                                     name]
+
+------------------------------------------------------------------------------
+ CAUTION [optional]
+ [State any special notes, constraints or cautions for users of this function]
+
+------------------------------------------------------------------------------
+*/
+
+Word16 MR475_gain_quant(       /* o  : index of quantization.                 */
+    gc_predState *pred_st,     /* i/o: gain predictor state struct            */
+
+    /* data from subframe 0 (or 2) */
+    Word16 sf0_exp_gcode0,     /* i  : predicted CB gain (exponent),      Q0  */
+    Word16 sf0_frac_gcode0,    /* i  : predicted CB gain (fraction),      Q15 */
+    Word16 sf0_exp_coeff[],    /* i  : energy coeff. (5), exponent part,  Q0  */
+    Word16 sf0_frac_coeff[],   /* i  : energy coeff. (5), fraction part,  Q15 */
+    /*      (frac_coeff and exp_coeff computed in  */
+    /*       calc_filt_energies())                 */
+    Word16 sf0_exp_target_en,  /* i  : exponent of target energy,         Q0  */
+    Word16 sf0_frac_target_en, /* i  : fraction of target energy,         Q15 */
+
+    /* data from subframe 1 (or 3) */
+    Word16 sf1_code_nosharp[], /* i  : innovative codebook vector (L_SUBFR)   */
+    /*      (whithout pitch sharpening)            */
+    Word16 sf1_exp_gcode0,     /* i  : predicted CB gain (exponent),      Q0  */
+    Word16 sf1_frac_gcode0,    /* i  : predicted CB gain (fraction),      Q15 */
+    Word16 sf1_exp_coeff[],    /* i  : energy coeff. (5), exponent part,  Q0  */
+    Word16 sf1_frac_coeff[],   /* i  : energy coeff. (5), fraction part,  Q15 */
+    /*      (frac_coeff and exp_coeff computed in  */
+    /*       calc_filt_energies())                 */
+    Word16 sf1_exp_target_en,  /* i  : exponent of target energy,         Q0  */
+    Word16 sf1_frac_target_en, /* i  : fraction of target energy,         Q15 */
+
+    Word16 gp_limit,           /* i  : pitch gain limit                       */
+
+    Word16 *sf0_gain_pit,      /* o  : Pitch gain,                        Q14 */
+    Word16 *sf0_gain_cod,      /* o  : Code gain,                         Q1  */
+
+    Word16 *sf1_gain_pit,      /* o  : Pitch gain,                        Q14 */
+    Word16 *sf1_gain_cod,      /* o  : Code gain,                         Q1  */
+    Flag   *pOverflow          /* o  : overflow indicator                     */
+)
+{
+    const Word16 *p;
+    Word16 i;
+    Word16 index = 0;
+    Word16 tmp;
+    Word16 exp;
+    Word16 sf0_gcode0;
+    Word16 sf1_gcode0;
+    Word16 g_pitch;
+    Word16 g2_pitch;
+    Word16 g_code;
+    Word16 g2_code;
+    Word16 g_pit_cod;
+    Word16 coeff[10];
+    Word16 coeff_lo[10];
+    Word16 exp_max[10];  /* 0..4: sf0; 5..9: sf1 */
+    Word32 L_tmp;
+    Word32 dist_min;
+
+    /*-------------------------------------------------------------------*
+     *  predicted codebook gain                                          *
+     *  ~~~~~~~~~~~~~~~~~~~~~~~                                          *
+     *  gc0     = 2^exp_gcode0 + 2^frac_gcode0                           *
+     *                                                                   *
+     *  gcode0 (Q14) = 2^14*2^frac_gcode0 = gc0 * 2^(14-exp_gcode0)      *
+     *-------------------------------------------------------------------*/
+
+    sf0_gcode0 = (Word16)(Pow2(14, sf0_frac_gcode0, pOverflow));
+    sf1_gcode0 = (Word16)(Pow2(14, sf1_frac_gcode0, pOverflow));
+
+    /*
+     * For each subframe, the error energy (sum) to be minimized consists
+     * of five terms, t[0..4].
+     *
+     *                      t[0] =    gp^2  * <y1 y1>
+     *                      t[1] = -2*gp    * <xn y1>
+     *                      t[2] =    gc^2  * <y2 y2>
+     *                      t[3] = -2*gc    * <xn y2>
+     *                      t[4] =  2*gp*gc * <y1 y2>
+     *
+     */
+
+    /* sf 0 */
+    /* determine the scaling exponent for g_code: ec = ec0 - 11 */
+    exp = sf0_exp_gcode0 - 11;
+
+    /* calculate exp_max[i] = s[i]-1 */
+    exp_max[0] = (sf0_exp_coeff[0] - 13);
+    exp_max[1] = (sf0_exp_coeff[1] - 14);
+    exp_max[2] = (sf0_exp_coeff[2] + (15 + (exp << 1)));
+    exp_max[3] = (sf0_exp_coeff[3] + exp);
+    exp_max[4] = (sf0_exp_coeff[4] + (1 + exp));
+
+    /* sf 1 */
+    /* determine the scaling exponent for g_code: ec = ec0 - 11 */
+    exp = sf1_exp_gcode0 - 11;
+
+    /* calculate exp_max[i] = s[i]-1 */
+    exp_max[5] = (sf1_exp_coeff[0] - 13);
+    exp_max[6] = (sf1_exp_coeff[1] - 14);
+    exp_max[7] = (sf1_exp_coeff[2] + (15 + (exp << 1)));
+    exp_max[8] = (sf1_exp_coeff[3] + exp);
+    exp_max[9] = (sf1_exp_coeff[4] + (1 + exp));
+
+    /*-------------------------------------------------------------------*
+     *  Gain search equalisation:                                        *
+     *  ~~~~~~~~~~~~~~~~~~~~~~~~~                                        *
+     *  The MSE for the two subframes is weighted differently if there   *
+     *  is a big difference in the corresponding target energies         *
+     *-------------------------------------------------------------------*/
+
+    /* make the target energy exponents the same by de-normalizing the
+       fraction of the smaller one. This is necessary to be able to compare
+       them
+     */
+    exp = sf0_exp_target_en - sf1_exp_target_en;
+    if (exp > 0)
+    {
+        sf1_frac_target_en >>= exp;
+    }
+    else
+    {
+        sf0_frac_target_en >>= (-exp);
+    }
+
+    /* assume no change of exponents */
+    exp = 0;
+
+    /* test for target energy difference; set exp to +1 or -1 to scale
+     * up/down coefficients for sf 1
+     */
+    tmp = shr_r(sf1_frac_target_en, 1, pOverflow);  /* tmp = ceil(0.5*en(sf1)) */
+
+    if (tmp > sf0_frac_target_en)          /* tmp > en(sf0)? */
+    {
+        /*
+         * target_energy(sf1) > 2*target_energy(sf0)
+         *   -> scale up MSE(sf0) by 2 by adding 1 to exponents 0..4
+         */
+        exp = 1;
+    }
+    else
+    {
+        tmp = ((sf0_frac_target_en + 3) >> 2); /* tmp=ceil(0.25*en(sf0)) */
+
+        if (tmp > sf1_frac_target_en)      /* tmp > en(sf1)? */
+        {
+            /*
+             * target_energy(sf1) < 0.25*target_energy(sf0)
+             *   -> scale down MSE(sf0) by 0.5 by subtracting 1 from
+             *      coefficients 0..4
+             */
+            exp = -1;
+        }
+    }
+
+    for (i = 0; i < 5; i++)
+    {
+        exp_max[i] += exp;
+    }
+
+    /*-------------------------------------------------------------------*
+     *  Find maximum exponent:                                           *
+     *  ~~~~~~~~~~~~~~~~~~~~~~                                           *
+     *                                                                   *
+     *  For the sum operation, all terms must have the same scaling;     *
+     *  that scaling should be low enough to prevent overflow. There-    *
+     *  fore, the maximum scale is determined and all coefficients are   *
+     *  re-scaled:                                                       *
+     *                                                                   *
+     *    exp = max(exp_max[i]) + 1;                                     *
+     *    e = exp_max[i]-exp;         e <= 0!                            *
+     *    c[i] = c[i]*2^e                                                *
+     *-------------------------------------------------------------------*/
+
+    exp = exp_max[0];
+    for (i = 9; i > 0; i--)
+    {
+        if (exp_max[i] > exp)
+        {
+            exp = exp_max[i];
+        }
+    }
+    exp++;      /* To avoid overflow */
+
+    p = &sf0_frac_coeff[0];
+    for (i = 0; i < 5; i++)
+    {
+        tmp = (exp - exp_max[i]);
+        L_tmp = ((Word32)(*p++) << 16);
+        L_tmp = L_shr(L_tmp, tmp, pOverflow);
+        coeff[i] = (Word16)(L_tmp >> 16);
+        coeff_lo[i] = (Word16)((L_tmp >> 1) - ((L_tmp >> 16) << 15));
+    }
+    p = &sf1_frac_coeff[0];
+    for (; i < 10; i++)
+    {
+        tmp = exp - exp_max[i];
+        L_tmp = ((Word32)(*p++) << 16);
+        L_tmp = L_shr(L_tmp, tmp, pOverflow);
+        coeff[i] = (Word16)(L_tmp >> 16);
+        coeff_lo[i] = (Word16)((L_tmp >> 1) - ((L_tmp >> 16) << 15));
+    }
+
+
+    /*-------------------------------------------------------------------*
+     *  Codebook search:                                                 *
+     *  ~~~~~~~~~~~~~~~~                                                 *
+     *                                                                   *
+     *  For each pair (g_pitch, g_fac) in the table calculate the        *
+     *  terms t[0..4] and sum them up; the result is the mean squared    *
+     *  error for the quantized gains from the table. The index for the  *
+     *  minimum MSE is stored and finally used to retrieve the quantized *
+     *  gains                                                            *
+     *-------------------------------------------------------------------*/
+
+    /* start with "infinite" MSE */
+    dist_min = MAX_32;
+
+    p = &table_gain_MR475[0];
+
+    for (i = 0; i < MR475_VQ_SIZE; i++)
+    {
+        /* subframe 0 (and 2) calculations */
+        g_pitch = *p++;
+        g_code = *p++;
+
+        /* Need to be there OKA */
+        g_code    = (Word16)(((Word32) g_code * sf0_gcode0) >> 15);
+        g2_pitch  = (Word16)(((Word32) g_pitch * g_pitch) >> 15);
+        g2_code   = (Word16)(((Word32) g_code * g_code) >> 15);
+        g_pit_cod = (Word16)(((Word32) g_code * g_pitch) >> 15);
+
+
+        L_tmp = Mpy_32_16(coeff[0], coeff_lo[0], g2_pitch, pOverflow) +
+                Mpy_32_16(coeff[1], coeff_lo[1], g_pitch, pOverflow) +
+                Mpy_32_16(coeff[2], coeff_lo[2], g2_code, pOverflow) +
+                Mpy_32_16(coeff[3], coeff_lo[3], g_code, pOverflow) +
+                Mpy_32_16(coeff[4], coeff_lo[4], g_pit_cod, pOverflow);
+
+        tmp = (g_pitch - gp_limit);
+
+        /* subframe 1 (and 3) calculations */
+        g_pitch = *p++;
+        g_code = *p++;
+
+        if ((tmp <= 0) && (g_pitch <= gp_limit))
+        {
+            g_code = (Word16)(((Word32) g_code * sf1_gcode0) >> 15);
+            g2_pitch  = (Word16)(((Word32) g_pitch * g_pitch) >> 15);
+            g2_code   = (Word16)(((Word32) g_code * g_code) >> 15);
+            g_pit_cod = (Word16)(((Word32) g_code * g_pitch) >> 15);
+
+            L_tmp += (Mpy_32_16(coeff[5], coeff_lo[5], g2_pitch, pOverflow) +
+                      Mpy_32_16(coeff[6], coeff_lo[6], g_pitch, pOverflow) +
+                      Mpy_32_16(coeff[7], coeff_lo[7], g2_code, pOverflow) +
+                      Mpy_32_16(coeff[8], coeff_lo[8], g_code, pOverflow) +
+                      Mpy_32_16(coeff[9], coeff_lo[9], g_pit_cod, pOverflow));
+
+            /* store table index if MSE for this index is lower
+               than the minimum MSE seen so far */
+            if (L_tmp < dist_min)
+            {
+                dist_min = L_tmp;
+                index = i;
+            }
+        }
+    }
+
+    /*------------------------------------------------------------------*
+     *  read quantized gains and update MA predictor memories           *
+     *  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~           *
+     *------------------------------------------------------------------*/
+
+    /* for subframe 0, the pre-calculated gcode0/exp_gcode0 are the same
+       as those calculated from the "real" predictor using quantized gains */
+    tmp = index << 2;
+    MR475_quant_store_results(pred_st,
+                              &table_gain_MR475[tmp],
+                              sf0_gcode0,
+                              sf0_exp_gcode0,
+                              sf0_gain_pit,
+                              sf0_gain_cod,
+                              pOverflow);
+
+    /* calculate new predicted gain for subframe 1 (this time using
+       the real, quantized gains)                                   */
+    gc_pred(pred_st, MR475, sf1_code_nosharp,
+            &sf1_exp_gcode0, &sf1_frac_gcode0,
+            &sf0_exp_gcode0, &sf0_gcode0, /* unused args */
+            pOverflow);
+
+    sf1_gcode0 = (Word16)(Pow2(14, sf1_frac_gcode0, pOverflow));
+
+    tmp += 2;
+    MR475_quant_store_results(
+        pred_st,
+        &table_gain_MR475[tmp],
+        sf1_gcode0,
+        sf1_exp_gcode0,
+        sf1_gain_pit,
+        sf1_gain_cod,
+        pOverflow);
+
+    return(index);
+}
diff --git a/media/libstagefright/codecs/amrnb/enc/src/qgain475.h b/media/codecs/amrnb/enc/src/qgain475.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/qgain475.h
rename to media/codecs/amrnb/enc/src/qgain475.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/qgain795.cpp b/media/codecs/amrnb/enc/src/qgain795.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/qgain795.cpp
rename to media/codecs/amrnb/enc/src/qgain795.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/qgain795.h b/media/codecs/amrnb/enc/src/qgain795.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/qgain795.h
rename to media/codecs/amrnb/enc/src/qgain795.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/qua_gain.cpp b/media/codecs/amrnb/enc/src/qua_gain.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/qua_gain.cpp
rename to media/codecs/amrnb/enc/src/qua_gain.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/s10_8pf.cpp b/media/codecs/amrnb/enc/src/s10_8pf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/s10_8pf.cpp
rename to media/codecs/amrnb/enc/src/s10_8pf.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/s10_8pf.h b/media/codecs/amrnb/enc/src/s10_8pf.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/s10_8pf.h
rename to media/codecs/amrnb/enc/src/s10_8pf.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/set_sign.cpp b/media/codecs/amrnb/enc/src/set_sign.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/set_sign.cpp
rename to media/codecs/amrnb/enc/src/set_sign.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/set_sign.h b/media/codecs/amrnb/enc/src/set_sign.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/set_sign.h
rename to media/codecs/amrnb/enc/src/set_sign.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/sid_sync.cpp b/media/codecs/amrnb/enc/src/sid_sync.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/sid_sync.cpp
rename to media/codecs/amrnb/enc/src/sid_sync.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/sid_sync.h b/media/codecs/amrnb/enc/src/sid_sync.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/sid_sync.h
rename to media/codecs/amrnb/enc/src/sid_sync.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/sp_enc.cpp b/media/codecs/amrnb/enc/src/sp_enc.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/sp_enc.cpp
rename to media/codecs/amrnb/enc/src/sp_enc.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/sp_enc.h b/media/codecs/amrnb/enc/src/sp_enc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/sp_enc.h
rename to media/codecs/amrnb/enc/src/sp_enc.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/spreproc.cpp b/media/codecs/amrnb/enc/src/spreproc.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/spreproc.cpp
rename to media/codecs/amrnb/enc/src/spreproc.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/spreproc.h b/media/codecs/amrnb/enc/src/spreproc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/spreproc.h
rename to media/codecs/amrnb/enc/src/spreproc.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/spstproc.cpp b/media/codecs/amrnb/enc/src/spstproc.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/spstproc.cpp
rename to media/codecs/amrnb/enc/src/spstproc.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/spstproc.h b/media/codecs/amrnb/enc/src/spstproc.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/spstproc.h
rename to media/codecs/amrnb/enc/src/spstproc.h
diff --git a/media/libstagefright/codecs/amrnb/enc/src/ton_stab.cpp b/media/codecs/amrnb/enc/src/ton_stab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/ton_stab.cpp
rename to media/codecs/amrnb/enc/src/ton_stab.cpp
diff --git a/media/libstagefright/codecs/amrnb/enc/src/ton_stab.h b/media/codecs/amrnb/enc/src/ton_stab.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/src/ton_stab.h
rename to media/codecs/amrnb/enc/src/ton_stab.h
diff --git a/media/libstagefright/codecs/amrnb/enc/test/AmrnbEncTestEnvironment.h b/media/codecs/amrnb/enc/test/AmrnbEncTestEnvironment.h
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/test/AmrnbEncTestEnvironment.h
rename to media/codecs/amrnb/enc/test/AmrnbEncTestEnvironment.h
diff --git a/media/libstagefright/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp b/media/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp
rename to media/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp
diff --git a/media/codecs/amrnb/enc/test/Android.bp b/media/codecs/amrnb/enc/test/Android.bp
new file mode 100644
index 0000000..7e393e3
--- /dev/null
+++ b/media/codecs/amrnb/enc/test/Android.bp
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_codecs_amrnb_enc_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    //   SPDX-license-identifier-BSD
+    default_applicable_licenses: [
+        "frameworks_av_media_codecs_amrnb_enc_license",
+    ],
+}
+
+cc_test {
+    name: "AmrnbEncoderTest",
+    gtest: true,
+    test_suites: ["device-tests"],
+
+    srcs: [
+        "AmrnbEncoderTest.cpp",
+    ],
+
+    static_libs: [
+        "libstagefright_amrnb_common",
+        "libstagefright_amrnbenc",
+        "libaudioutils",
+        "libsndfile",
+    ],
+
+    shared_libs: [
+        "liblog",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/codecs/amrnb/enc/test/AndroidTest.xml b/media/codecs/amrnb/enc/test/AndroidTest.xml
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/test/AndroidTest.xml
rename to media/codecs/amrnb/enc/test/AndroidTest.xml
diff --git a/media/libstagefright/codecs/amrnb/enc/test/README.md b/media/codecs/amrnb/enc/test/README.md
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/test/README.md
rename to media/codecs/amrnb/enc/test/README.md
diff --git a/media/libstagefright/codecs/amrnb/enc/test/amrnb_enc_test.cpp b/media/codecs/amrnb/enc/test/amrnb_enc_test.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrnb/enc/test/amrnb_enc_test.cpp
rename to media/codecs/amrnb/enc/test/amrnb_enc_test.cpp
diff --git a/media/codecs/amrnb/fuzzer/Android.bp b/media/codecs/amrnb/fuzzer/Android.bp
new file mode 100644
index 0000000..833a7ba
--- /dev/null
+++ b/media/codecs/amrnb/fuzzer/Android.bp
@@ -0,0 +1,52 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_fuzz {
+    name: "amrnb_dec_fuzzer",
+    host_supported: true,
+    srcs: [
+        "amrnb_dec_fuzzer.cpp",
+    ],
+    static_libs: [
+        "libstagefright_amrnbdec",
+        "libstagefright_amrnb_common",
+        "liblog",
+    ],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/media/libstagefright/codecs/amrnb/fuzzer/README.md b/media/codecs/amrnb/fuzzer/README.md
similarity index 100%
rename from media/libstagefright/codecs/amrnb/fuzzer/README.md
rename to media/codecs/amrnb/fuzzer/README.md
diff --git a/media/codecs/amrnb/fuzzer/amrnb_dec_fuzzer.cpp b/media/codecs/amrnb/fuzzer/amrnb_dec_fuzzer.cpp
new file mode 100644
index 0000000..c7a7378
--- /dev/null
+++ b/media/codecs/amrnb/fuzzer/amrnb_dec_fuzzer.cpp
@@ -0,0 +1,96 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include <string.h>
+#include <algorithm>
+#include "gsmamr_dec.h"
+
+// Constants for AMR-NB
+constexpr int32_t kSamplesPerFrame = L_FRAME;
+constexpr int32_t kBitsPerSample = 16;
+constexpr int32_t kOutputBufferSize = kSamplesPerFrame * kBitsPerSample / 8;
+const bitstream_format kBitStreamFormats[2] = {MIME_IETF, IF2};
+const int32_t kLocalWmfDecBytesPerFrame[16] = {12, 13, 15, 17, 19, 20, 26, 31,
+                                               5,  6,  5,  5,  0,  0,  0,  0};
+const int32_t kLocalIf2DecBytesPerFrame[16] = {13, 14, 16, 18, 19, 21, 26, 31,
+                                               13, 14, 16, 18, 19, 21, 26, 31};
+
+class Codec {
+ public:
+  Codec() = default;
+  ~Codec() { deInitDecoder(); }
+  int16_t initDecoder();
+  void deInitDecoder();
+  void decodeFrames(const uint8_t *data, size_t size);
+
+ private:
+  void *mAmrHandle = nullptr;
+};
+
+int16_t Codec::initDecoder() { return GSMInitDecode(&mAmrHandle, (Word8 *)"AMRNBDecoder"); }
+
+void Codec::deInitDecoder() { GSMDecodeFrameExit(&mAmrHandle); }
+
+void Codec::decodeFrames(const uint8_t *data, size_t size) {
+  while (size > 0) {
+    uint8_t mode = *data;
+    bool bit = mode & 0x01;
+    bitstream_format bitsreamFormat = kBitStreamFormats[bit];
+    int32_t frameSize = 0;
+    /* Find frame type */
+    Frame_Type_3GPP frameType = static_cast<Frame_Type_3GPP>((mode >> 3) & 0x0f);
+    ++data;
+    --size;
+    if (bit) {
+      frameSize = kLocalIf2DecBytesPerFrame[frameType];
+    } else {
+      frameSize = kLocalWmfDecBytesPerFrame[frameType];
+    }
+    int16_t outputBuf[kOutputBufferSize];
+    uint8_t *inputBuf = new uint8_t[frameSize];
+    if (!inputBuf) {
+      return;
+    }
+    int32_t minSize = std::min((int32_t)size, frameSize);
+    memcpy(inputBuf, data, minSize);
+    AMRDecode(mAmrHandle, frameType, inputBuf, outputBuf, bitsreamFormat);
+    /* AMRDecode() decodes minSize number of bytes if decode is successful.
+     * AMRDecode() returns -1 if decode fails.
+     * Even if no bytes are decoded, increment by minSize to ensure fuzzer proceeds
+     * to feed next data */
+    data += minSize;
+    size -= minSize;
+    delete[] inputBuf;
+  }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  if (size < 2) {
+    return 0;
+  }
+  Codec *codec = new Codec();
+  if (!codec) {
+    return 0;
+  }
+  if (codec->initDecoder() == 0) {
+    codec->decodeFrames(data, size);
+  }
+  delete codec;
+  return 0;
+}
diff --git a/media/libstagefright/codecs/amrnb/patent_disclaimer.txt b/media/codecs/amrnb/patent_disclaimer.txt
similarity index 100%
rename from media/libstagefright/codecs/amrnb/patent_disclaimer.txt
rename to media/codecs/amrnb/patent_disclaimer.txt
diff --git a/media/codecs/amrwb/dec/Android.bp b/media/codecs/amrwb/dec/Android.bp
new file mode 100644
index 0000000..228ea80
--- /dev/null
+++ b/media/codecs/amrwb/dec/Android.bp
@@ -0,0 +1,136 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_codecs_amrwb_dec_license",
+    ],
+}
+
+// Added automatically by a large-scale-change that took the approach of
+// 'apply every license found to every target'. While this makes sure we respect
+// every license restriction, it may not be entirely correct.
+//
+// e.g. GPL in an MIT project might only apply to the contrib/ directory.
+//
+// Please consider splitting the single license below into multiple licenses,
+// taking care not to lose any license_kind information, and overriding the
+// default license using the 'licenses: [...]' property on targets as needed.
+//
+// For unused files, consider creating a 'fileGroup' with "//visibility:private"
+// to attach the license to, and including a comment whether the files may be
+// used in the current project.
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codecs_amrwb_dec_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+        "SPDX-license-identifier-BSD",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library_static {
+    name: "libstagefright_amrwbdec",
+    vendor_available: true,
+    host_supported: true,
+    min_sdk_version: "29",
+
+    srcs: [
+        "src/agc2_amr_wb.cpp",
+        "src/band_pass_6k_7k.cpp",
+        "src/dec_acelp_2p_in_64.cpp",
+        "src/dec_acelp_4p_in_64.cpp",
+        "src/dec_alg_codebook.cpp",
+        "src/dec_gain2_amr_wb.cpp",
+        "src/deemphasis_32.cpp",
+        "src/dtx_decoder_amr_wb.cpp",
+        "src/get_amr_wb_bits.cpp",
+        "src/highpass_400hz_at_12k8.cpp",
+        "src/highpass_50hz_at_12k8.cpp",
+        "src/homing_amr_wb_dec.cpp",
+        "src/interpolate_isp.cpp",
+        "src/isf_extrapolation.cpp",
+        "src/isp_az.cpp",
+        "src/isp_isf.cpp",
+        "src/lagconceal.cpp",
+        "src/low_pass_filt_7k.cpp",
+        "src/median5.cpp",
+        "src/mime_io.cpp",
+        "src/noise_gen_amrwb.cpp",
+        "src/normalize_amr_wb.cpp",
+        "src/oversamp_12k8_to_16k.cpp",
+        "src/phase_dispersion.cpp",
+        "src/pit_shrp.cpp",
+        "src/pred_lt4.cpp",
+        "src/preemph_amrwb_dec.cpp",
+        "src/pvamrwb_math_op.cpp",
+        "src/pvamrwbdecoder.cpp",
+        "src/q_gain2_tab.cpp",
+        "src/qisf_ns.cpp",
+        "src/qisf_ns_tab.cpp",
+        "src/qpisf_2s.cpp",
+        "src/qpisf_2s_tab.cpp",
+        "src/scale_signal.cpp",
+        "src/synthesis_amr_wb.cpp",
+        "src/voice_factor.cpp",
+        "src/wb_syn_filt.cpp",
+        "src/weight_amrwb_lpc.cpp",
+    ],
+
+    export_include_dirs: [
+        "src",
+        "include",
+    ],
+
+    cflags: [
+        "-DOSCL_UNUSED_ARG(x)=(void)(x)",
+        "-DOSCL_IMPORT_REF=",
+
+        "-Werror",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "signed-integer-overflow",
+        ],
+    },
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+//###############################################################################
+cc_test {
+    name: "libstagefright_amrwbdec_test",
+    gtest: false,
+    host_supported: true,
+
+    srcs: ["test/amrwbdec_test.cpp"],
+
+    cflags: ["-Wall", "-Werror"],
+
+    static_libs: [
+        "libstagefright_amrwbdec",
+        "libsndfile",
+    ],
+
+    local_include_dirs: ["src"],
+
+    shared_libs: ["libaudioutils"],
+
+    sanitize: {
+        misc_undefined: [
+            "signed-integer-overflow",
+        ],
+    },
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
diff --git a/media/libstagefright/codecs/amrnb/dec/MODULE_LICENSE_APACHE2 b/media/codecs/amrwb/dec/MODULE_LICENSE_APACHE2
similarity index 100%
copy from media/libstagefright/codecs/amrnb/dec/MODULE_LICENSE_APACHE2
copy to media/codecs/amrwb/dec/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/amrnb/dec/NOTICE b/media/codecs/amrwb/dec/NOTICE
similarity index 100%
copy from media/libstagefright/codecs/amrnb/dec/NOTICE
copy to media/codecs/amrwb/dec/NOTICE
diff --git a/media/codecs/amrwb/dec/TEST_MAPPING b/media/codecs/amrwb/dec/TEST_MAPPING
new file mode 100644
index 0000000..0278d26
--- /dev/null
+++ b/media/codecs/amrwb/dec/TEST_MAPPING
@@ -0,0 +1,10 @@
+// mappings for frameworks/av/media/libstagefright/codecs/amrwb
+{
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "AmrwbDecoderTest"}
+
+  ]
+}
diff --git a/media/codecs/amrwb/dec/fuzzer/Android.bp b/media/codecs/amrwb/dec/fuzzer/Android.bp
new file mode 100644
index 0000000..16f08fa
--- /dev/null
+++ b/media/codecs/amrwb/dec/fuzzer/Android.bp
@@ -0,0 +1,52 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_codecs_amrwb_dec_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_codecs_amrwb_dec_license",
+    ],
+}
+
+cc_fuzz {
+    name: "amrwb_dec_fuzzer",
+    host_supported: true,
+    srcs: [
+        "amrwb_dec_fuzzer.cpp",
+    ],
+    static_libs: [
+        "libstagefright_amrwbdec",
+    ],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/media/libstagefright/codecs/amrwb/fuzzer/README.md b/media/codecs/amrwb/dec/fuzzer/README.md
similarity index 100%
rename from media/libstagefright/codecs/amrwb/fuzzer/README.md
rename to media/codecs/amrwb/dec/fuzzer/README.md
diff --git a/media/libstagefright/codecs/amrwb/fuzzer/amrwb_dec_fuzzer.cpp b/media/codecs/amrwb/dec/fuzzer/amrwb_dec_fuzzer.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/fuzzer/amrwb_dec_fuzzer.cpp
rename to media/codecs/amrwb/dec/fuzzer/amrwb_dec_fuzzer.cpp
diff --git a/media/libstagefright/codecs/amrwb/include/pvamrwbdecoder_api.h b/media/codecs/amrwb/dec/include/pvamrwbdecoder_api.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/include/pvamrwbdecoder_api.h
rename to media/codecs/amrwb/dec/include/pvamrwbdecoder_api.h
diff --git a/media/libstagefright/codecs/mp3dec/patent_disclaimer.txt b/media/codecs/amrwb/dec/patent_disclaimer.txt
similarity index 100%
copy from media/libstagefright/codecs/mp3dec/patent_disclaimer.txt
copy to media/codecs/amrwb/dec/patent_disclaimer.txt
diff --git a/media/libstagefright/codecs/amrwb/src/agc2_amr_wb.cpp b/media/codecs/amrwb/dec/src/agc2_amr_wb.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/agc2_amr_wb.cpp
rename to media/codecs/amrwb/dec/src/agc2_amr_wb.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/band_pass_6k_7k.cpp b/media/codecs/amrwb/dec/src/band_pass_6k_7k.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/band_pass_6k_7k.cpp
rename to media/codecs/amrwb/dec/src/band_pass_6k_7k.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/dec_acelp_2p_in_64.cpp b/media/codecs/amrwb/dec/src/dec_acelp_2p_in_64.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/dec_acelp_2p_in_64.cpp
rename to media/codecs/amrwb/dec/src/dec_acelp_2p_in_64.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/dec_acelp_4p_in_64.cpp b/media/codecs/amrwb/dec/src/dec_acelp_4p_in_64.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/dec_acelp_4p_in_64.cpp
rename to media/codecs/amrwb/dec/src/dec_acelp_4p_in_64.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/dec_alg_codebook.cpp b/media/codecs/amrwb/dec/src/dec_alg_codebook.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/dec_alg_codebook.cpp
rename to media/codecs/amrwb/dec/src/dec_alg_codebook.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/dec_gain2_amr_wb.cpp b/media/codecs/amrwb/dec/src/dec_gain2_amr_wb.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/dec_gain2_amr_wb.cpp
rename to media/codecs/amrwb/dec/src/dec_gain2_amr_wb.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/deemphasis_32.cpp b/media/codecs/amrwb/dec/src/deemphasis_32.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/deemphasis_32.cpp
rename to media/codecs/amrwb/dec/src/deemphasis_32.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/dtx.h b/media/codecs/amrwb/dec/src/dtx.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/dtx.h
rename to media/codecs/amrwb/dec/src/dtx.h
diff --git a/media/libstagefright/codecs/amrwb/src/dtx_decoder_amr_wb.cpp b/media/codecs/amrwb/dec/src/dtx_decoder_amr_wb.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/dtx_decoder_amr_wb.cpp
rename to media/codecs/amrwb/dec/src/dtx_decoder_amr_wb.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/e_pv_amrwbdec.h b/media/codecs/amrwb/dec/src/e_pv_amrwbdec.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/e_pv_amrwbdec.h
rename to media/codecs/amrwb/dec/src/e_pv_amrwbdec.h
diff --git a/media/libstagefright/codecs/amrwb/src/get_amr_wb_bits.cpp b/media/codecs/amrwb/dec/src/get_amr_wb_bits.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/get_amr_wb_bits.cpp
rename to media/codecs/amrwb/dec/src/get_amr_wb_bits.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/get_amr_wb_bits.h b/media/codecs/amrwb/dec/src/get_amr_wb_bits.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/get_amr_wb_bits.h
rename to media/codecs/amrwb/dec/src/get_amr_wb_bits.h
diff --git a/media/libstagefright/codecs/amrwb/src/highpass_400hz_at_12k8.cpp b/media/codecs/amrwb/dec/src/highpass_400hz_at_12k8.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/highpass_400hz_at_12k8.cpp
rename to media/codecs/amrwb/dec/src/highpass_400hz_at_12k8.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/highpass_50hz_at_12k8.cpp b/media/codecs/amrwb/dec/src/highpass_50hz_at_12k8.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/highpass_50hz_at_12k8.cpp
rename to media/codecs/amrwb/dec/src/highpass_50hz_at_12k8.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/homing_amr_wb_dec.cpp b/media/codecs/amrwb/dec/src/homing_amr_wb_dec.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/homing_amr_wb_dec.cpp
rename to media/codecs/amrwb/dec/src/homing_amr_wb_dec.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/interpolate_isp.cpp b/media/codecs/amrwb/dec/src/interpolate_isp.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/interpolate_isp.cpp
rename to media/codecs/amrwb/dec/src/interpolate_isp.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/isf_extrapolation.cpp b/media/codecs/amrwb/dec/src/isf_extrapolation.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/isf_extrapolation.cpp
rename to media/codecs/amrwb/dec/src/isf_extrapolation.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/isp_az.cpp b/media/codecs/amrwb/dec/src/isp_az.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/isp_az.cpp
rename to media/codecs/amrwb/dec/src/isp_az.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/isp_isf.cpp b/media/codecs/amrwb/dec/src/isp_isf.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/isp_isf.cpp
rename to media/codecs/amrwb/dec/src/isp_isf.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/lagconceal.cpp b/media/codecs/amrwb/dec/src/lagconceal.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/lagconceal.cpp
rename to media/codecs/amrwb/dec/src/lagconceal.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/low_pass_filt_7k.cpp b/media/codecs/amrwb/dec/src/low_pass_filt_7k.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/low_pass_filt_7k.cpp
rename to media/codecs/amrwb/dec/src/low_pass_filt_7k.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/median5.cpp b/media/codecs/amrwb/dec/src/median5.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/median5.cpp
rename to media/codecs/amrwb/dec/src/median5.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/mime_io.cpp b/media/codecs/amrwb/dec/src/mime_io.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/mime_io.cpp
rename to media/codecs/amrwb/dec/src/mime_io.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/mime_io.h b/media/codecs/amrwb/dec/src/mime_io.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/mime_io.h
rename to media/codecs/amrwb/dec/src/mime_io.h
diff --git a/media/libstagefright/codecs/amrwb/src/noise_gen_amrwb.cpp b/media/codecs/amrwb/dec/src/noise_gen_amrwb.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/noise_gen_amrwb.cpp
rename to media/codecs/amrwb/dec/src/noise_gen_amrwb.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/normalize_amr_wb.cpp b/media/codecs/amrwb/dec/src/normalize_amr_wb.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/normalize_amr_wb.cpp
rename to media/codecs/amrwb/dec/src/normalize_amr_wb.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/normalize_amr_wb.h b/media/codecs/amrwb/dec/src/normalize_amr_wb.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/normalize_amr_wb.h
rename to media/codecs/amrwb/dec/src/normalize_amr_wb.h
diff --git a/media/libstagefright/codecs/amrwb/src/oversamp_12k8_to_16k.cpp b/media/codecs/amrwb/dec/src/oversamp_12k8_to_16k.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/oversamp_12k8_to_16k.cpp
rename to media/codecs/amrwb/dec/src/oversamp_12k8_to_16k.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/phase_dispersion.cpp b/media/codecs/amrwb/dec/src/phase_dispersion.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/phase_dispersion.cpp
rename to media/codecs/amrwb/dec/src/phase_dispersion.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/pit_shrp.cpp b/media/codecs/amrwb/dec/src/pit_shrp.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pit_shrp.cpp
rename to media/codecs/amrwb/dec/src/pit_shrp.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/pred_lt4.cpp b/media/codecs/amrwb/dec/src/pred_lt4.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pred_lt4.cpp
rename to media/codecs/amrwb/dec/src/pred_lt4.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/preemph_amrwb_dec.cpp b/media/codecs/amrwb/dec/src/preemph_amrwb_dec.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/preemph_amrwb_dec.cpp
rename to media/codecs/amrwb/dec/src/preemph_amrwb_dec.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/pv_amr_wb_type_defs.h b/media/codecs/amrwb/dec/src/pv_amr_wb_type_defs.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pv_amr_wb_type_defs.h
rename to media/codecs/amrwb/dec/src/pv_amr_wb_type_defs.h
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwb_math_op.cpp b/media/codecs/amrwb/dec/src/pvamrwb_math_op.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwb_math_op.cpp
rename to media/codecs/amrwb/dec/src/pvamrwb_math_op.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwb_math_op.h b/media/codecs/amrwb/dec/src/pvamrwb_math_op.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwb_math_op.h
rename to media/codecs/amrwb/dec/src/pvamrwb_math_op.h
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder.cpp b/media/codecs/amrwb/dec/src/pvamrwbdecoder.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwbdecoder.cpp
rename to media/codecs/amrwb/dec/src/pvamrwbdecoder.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder.h b/media/codecs/amrwb/dec/src/pvamrwbdecoder.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwbdecoder.h
rename to media/codecs/amrwb/dec/src/pvamrwbdecoder.h
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_acelp.h b/media/codecs/amrwb/dec/src/pvamrwbdecoder_acelp.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_acelp.h
rename to media/codecs/amrwb/dec/src/pvamrwbdecoder_acelp.h
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op.h b/media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op.h
rename to media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op.h
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op_armv5.h b/media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_armv5.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op_armv5.h
rename to media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_armv5.h
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op_cequivalent.h b/media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_cequivalent.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op_cequivalent.h
rename to media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_cequivalent.h
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op_gcc_armv5.h b/media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_gcc_armv5.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op_gcc_armv5.h
rename to media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_gcc_armv5.h
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_cnst.h b/media/codecs/amrwb/dec/src/pvamrwbdecoder_cnst.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_cnst.h
rename to media/codecs/amrwb/dec/src/pvamrwbdecoder_cnst.h
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_mem_funcs.h b/media/codecs/amrwb/dec/src/pvamrwbdecoder_mem_funcs.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_mem_funcs.h
rename to media/codecs/amrwb/dec/src/pvamrwbdecoder_mem_funcs.h
diff --git a/media/libstagefright/codecs/amrwb/src/q_gain2_tab.cpp b/media/codecs/amrwb/dec/src/q_gain2_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/q_gain2_tab.cpp
rename to media/codecs/amrwb/dec/src/q_gain2_tab.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/q_pulse.h b/media/codecs/amrwb/dec/src/q_pulse.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/q_pulse.h
rename to media/codecs/amrwb/dec/src/q_pulse.h
diff --git a/media/libstagefright/codecs/amrwb/src/qisf_ns.cpp b/media/codecs/amrwb/dec/src/qisf_ns.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/qisf_ns.cpp
rename to media/codecs/amrwb/dec/src/qisf_ns.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/qisf_ns.h b/media/codecs/amrwb/dec/src/qisf_ns.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/qisf_ns.h
rename to media/codecs/amrwb/dec/src/qisf_ns.h
diff --git a/media/libstagefright/codecs/amrwb/src/qisf_ns_tab.cpp b/media/codecs/amrwb/dec/src/qisf_ns_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/qisf_ns_tab.cpp
rename to media/codecs/amrwb/dec/src/qisf_ns_tab.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/qpisf_2s.cpp b/media/codecs/amrwb/dec/src/qpisf_2s.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/qpisf_2s.cpp
rename to media/codecs/amrwb/dec/src/qpisf_2s.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/qpisf_2s.h b/media/codecs/amrwb/dec/src/qpisf_2s.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/qpisf_2s.h
rename to media/codecs/amrwb/dec/src/qpisf_2s.h
diff --git a/media/libstagefright/codecs/amrwb/src/qpisf_2s_tab.cpp b/media/codecs/amrwb/dec/src/qpisf_2s_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/qpisf_2s_tab.cpp
rename to media/codecs/amrwb/dec/src/qpisf_2s_tab.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/scale_signal.cpp b/media/codecs/amrwb/dec/src/scale_signal.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/scale_signal.cpp
rename to media/codecs/amrwb/dec/src/scale_signal.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/synthesis_amr_wb.cpp b/media/codecs/amrwb/dec/src/synthesis_amr_wb.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/synthesis_amr_wb.cpp
rename to media/codecs/amrwb/dec/src/synthesis_amr_wb.cpp
diff --git a/media/libstagefright/codecs/amrwb/src/synthesis_amr_wb.h b/media/codecs/amrwb/dec/src/synthesis_amr_wb.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/synthesis_amr_wb.h
rename to media/codecs/amrwb/dec/src/synthesis_amr_wb.h
diff --git a/media/libstagefright/codecs/amrwb/src/voice_factor.cpp b/media/codecs/amrwb/dec/src/voice_factor.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/voice_factor.cpp
rename to media/codecs/amrwb/dec/src/voice_factor.cpp
diff --git a/media/codecs/amrwb/dec/src/wb_syn_filt.cpp b/media/codecs/amrwb/dec/src/wb_syn_filt.cpp
new file mode 100644
index 0000000..f12828d
--- /dev/null
+++ b/media/codecs/amrwb/dec/src/wb_syn_filt.cpp
@@ -0,0 +1,311 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+/****************************************************************************************
+Portions of this file are derived from the following 3GPP standard:
+
+    3GPP TS 26.173
+    ANSI-C code for the Adaptive Multi-Rate - Wideband (AMR-WB) speech codec
+    Available from http://www.3gpp.org
+
+(C) 2007, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
+Permission to distribute, modify and use this file under the standard license
+terms listed above has been obtained from the copyright holder.
+****************************************************************************************/
+/*
+------------------------------------------------------------------------------
+
+
+
+ Filename: wb_syn_filt.cpp
+
+     Date: 05/08/2004
+
+------------------------------------------------------------------------------
+ REVISION HISTORY
+
+
+ Description:
+
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+wb_syn_filt
+
+     int16 a[],               (i) Q12 : a[m+1] prediction coefficients
+     int16 m,                 (i)     : order of LP filter
+     int16 x[],               (i)     : input signal
+     int16 y[],               (o)     : output signal
+     int16 lg,                (i)     : size of filtering
+     int16 mem[],             (i/o)   : memory associated with this filtering.
+     int16 update,            (i)     : 0=no update, 1=update of memory.
+     int16 y_buf[]
+
+Syn_filt_32
+
+     int16 a[],              (i) Q12 : a[m+1] prediction coefficients
+     int16 m,                (i)     : order of LP filter
+     int16 exc[],            (i) Qnew: excitation (exc[i] >> Qnew)
+     int16 Qnew,             (i)     : exc scaling = 0(min) to 8(max)
+     int16 sig_hi[],         (o) /16 : synthesis high
+     int16 sig_lo[],         (o) /16 : synthesis low
+     int16 lg                (i)     : size of filtering
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+    Do the synthesis filtering 1/A(z)  16 and 32-bits version
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+------------------------------------------------------------------------------
+*/
+
+
+/*----------------------------------------------------------------------------
+; INCLUDES
+----------------------------------------------------------------------------*/
+
+
+#include "pv_amr_wb_type_defs.h"
+#include "pvamrwbdecoder_mem_funcs.h"
+#include "pvamrwbdecoder_basic_op.h"
+#include "pvamrwb_math_op.h"
+#include "pvamrwbdecoder_cnst.h"
+#include "pvamrwbdecoder_acelp.h"
+
+/*----------------------------------------------------------------------------
+; MACROS
+; Define module specific macros here
+----------------------------------------------------------------------------*/
+
+
+/*----------------------------------------------------------------------------
+; DEFINES
+; Include all pre-processor statements here. Include conditional
+; compile variables also.
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL FUNCTION REFERENCES
+; Declare functions defined elsewhere and referenced in this module
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
+; Declare variables used in this module but defined elsewhere
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+void wb_syn_filt(
+    int16 a[],       /* (i) Q12 : a[m+1] prediction coefficients           */
+    int16 m,         /* (i)     : order of LP filter                       */
+    int16 x[],       /* (i)     : input signal                             */
+    int16 y[],       /* (o)     : output signal                            */
+    int16 lg,        /* (i)     : size of filtering                        */
+    int16 mem[],     /* (i/o)   : memory associated with this filtering.   */
+    int16 update,    /* (i)     : 0=no update, 1=update of memory.         */
+    int16 y_buf[]
+)
+{
+
+    int16 i, j;
+    int32 L_tmp1;
+    int32 L_tmp2;
+    int32 L_tmp3;
+    int32 L_tmp4;
+    int16 *yy;
+
+    /* copy initial filter states into synthesis buffer */
+    pv_memcpy(y_buf, mem, m*sizeof(*yy));
+
+    yy = &y_buf[m];
+
+    /* Do the filtering. */
+
+    for (i = 0; i < lg >> 2; i++)
+    {
+        L_tmp1 = -((int32)x[(i<<2)] << 11);
+        L_tmp2 = -((int32)x[(i<<2)+1] << 11);
+        L_tmp3 = -((int32)x[(i<<2)+2] << 11);
+        L_tmp4 = -((int32)x[(i<<2)+3] << 11);
+
+        /* a[] uses Q12 and abs(a) =< 1 */
+
+        L_tmp1  = fxp_mac_16by16(yy[(i<<2) -3], a[3], L_tmp1);
+        L_tmp2  = fxp_mac_16by16(yy[(i<<2) -2], a[3], L_tmp2);
+        L_tmp1  = fxp_mac_16by16(yy[(i<<2) -2], a[2], L_tmp1);
+        L_tmp2  = fxp_mac_16by16(yy[(i<<2) -1], a[2], L_tmp2);
+        L_tmp1  = fxp_mac_16by16(yy[(i<<2) -1], a[1], L_tmp1);
+
+        for (j = 4; j < m; j += 2)
+        {
+            L_tmp1  = fxp_mac_16by16(yy[(i<<2)-1  - j], a[j+1], L_tmp1);
+            L_tmp2  = fxp_mac_16by16(yy[(i<<2)    - j], a[j+1], L_tmp2);
+            L_tmp1  = fxp_mac_16by16(yy[(i<<2)    - j], a[j  ], L_tmp1);
+            L_tmp2  = fxp_mac_16by16(yy[(i<<2)+1  - j], a[j  ], L_tmp2);
+            L_tmp3  = fxp_mac_16by16(yy[(i<<2)+1  - j], a[j+1], L_tmp3);
+            L_tmp4  = fxp_mac_16by16(yy[(i<<2)+2  - j], a[j+1], L_tmp4);
+            L_tmp3  = fxp_mac_16by16(yy[(i<<2)+2  - j], a[j  ], L_tmp3);
+            L_tmp4  = fxp_mac_16by16(yy[(i<<2)+3  - j], a[j  ], L_tmp4);
+        }
+
+        L_tmp1  = fxp_mac_16by16(yy[(i<<2)    - j], a[j], L_tmp1);
+        L_tmp2  = fxp_mac_16by16(yy[(i<<2)+1  - j], a[j], L_tmp2);
+        L_tmp3  = fxp_mac_16by16(yy[(i<<2)+2  - j], a[j], L_tmp3);
+        L_tmp4  = fxp_mac_16by16(yy[(i<<2)+3  - j], a[j], L_tmp4);
+
+        L_tmp1 = shl_int32(L_tmp1, 4);
+        L_tmp1 = L_tmp1 == INT32_MIN ? INT32_MIN + 1 : L_tmp1;
+
+        y[(i<<2)] = yy[(i<<2)] = amr_wb_round(-L_tmp1);
+
+        L_tmp2  = fxp_mac_16by16(yy[(i<<2)], a[1], L_tmp2);
+
+        L_tmp2 = shl_int32(L_tmp2, 4);
+        L_tmp2 = L_tmp2 == INT32_MIN ? INT32_MIN + 1 : L_tmp2;
+
+        y[(i<<2)+1] = yy[(i<<2)+1] = amr_wb_round(-L_tmp2);
+
+        L_tmp3  = fxp_mac_16by16(yy[(i<<2) - 1], a[3], L_tmp3);
+        L_tmp4  = fxp_mac_16by16(yy[(i<<2)], a[3], L_tmp4);
+        L_tmp3  = fxp_mac_16by16(yy[(i<<2)], a[2], L_tmp3);
+        L_tmp4  = fxp_mac_16by16(yy[(i<<2) + 1], a[2], L_tmp4);
+        L_tmp3  = fxp_mac_16by16(yy[(i<<2) + 1], a[1], L_tmp3);
+
+        L_tmp3 = shl_int32(L_tmp3, 4);
+        L_tmp3 = L_tmp3 == INT32_MIN ? INT32_MIN + 1 : L_tmp3;
+
+        y[(i<<2)+2] = yy[(i<<2)+2] = amr_wb_round(-L_tmp3);
+
+        L_tmp4  = fxp_mac_16by16(yy[(i<<2)+2], a[1], L_tmp4);
+
+        L_tmp4 = shl_int32(L_tmp4, 4);
+        L_tmp4 = L_tmp4 == INT32_MIN ? INT32_MIN + 1 : L_tmp4;
+
+        y[(i<<2)+3] = yy[(i<<2)+3] = amr_wb_round(-L_tmp4);
+    }
+
+
+    /* Update memory if required */
+
+    if (update)
+    {
+        pv_memcpy(mem, &y[lg - m], m*sizeof(*y));
+    }
+
+    return;
+}
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+void Syn_filt_32(
+    int16 a[],              /* (i) Q12 : a[m+1] prediction coefficients */
+    int16 m,                /* (i)     : order of LP filter             */
+    int16 exc[],            /* (i) Qnew: excitation (exc[i] >> Qnew)    */
+    int16 Qnew,             /* (i)     : exc scaling = 0(min) to 8(max) */
+    int16 sig_hi[],         /* (o) /16 : synthesis high                 */
+    int16 sig_lo[],         /* (o) /16 : synthesis low                  */
+    int16 lg                /* (i)     : size of filtering              */
+)
+{
+    int16 i, k, a0;
+    int32 L_tmp1;
+    int32 L_tmp2;
+    int32 L_tmp3;
+    int32 L_tmp4;
+
+    a0 = 9 - Qnew;        /* input / 16 and >>Qnew */
+
+    /* Do the filtering. */
+
+    for (i = 0; i < lg >> 1; i++)
+    {
+
+        L_tmp3 = 0;
+        L_tmp4 = 0;
+
+        L_tmp1 = fxp_mul_16by16(sig_lo[(i<<1) - 1], a[1]);
+        L_tmp2 = fxp_mul_16by16(sig_hi[(i<<1) - 1], a[1]);
+
+        for (k = 2; k < m; k += 2)
+        {
+
+            L_tmp1 = fxp_mac_16by16(sig_lo[(i<<1)-1 - k], a[k+1], L_tmp1);
+            L_tmp2 = fxp_mac_16by16(sig_hi[(i<<1)-1 - k], a[k+1], L_tmp2);
+            L_tmp1 = fxp_mac_16by16(sig_lo[(i<<1)   - k], a[k  ], L_tmp1);
+            L_tmp2 = fxp_mac_16by16(sig_hi[(i<<1)   - k], a[k  ], L_tmp2);
+            L_tmp3 = fxp_mac_16by16(sig_lo[(i<<1)   - k], a[k+1], L_tmp3);
+            L_tmp4 = fxp_mac_16by16(sig_hi[(i<<1)   - k], a[k+1], L_tmp4);
+            L_tmp3 = fxp_mac_16by16(sig_lo[(i<<1)+1 - k], a[k  ], L_tmp3);
+            L_tmp4 = fxp_mac_16by16(sig_hi[(i<<1)+1 - k], a[k  ], L_tmp4);
+        }
+
+        L_tmp1 = -fxp_mac_16by16(sig_lo[(i<<1)   - k], a[k], L_tmp1);
+        L_tmp3 =  fxp_mac_16by16(sig_lo[(i<<1)+1 - k], a[k], L_tmp3);
+        L_tmp2 =  fxp_mac_16by16(sig_hi[(i<<1)   - k], a[k], L_tmp2);
+        L_tmp4 =  fxp_mac_16by16(sig_hi[(i<<1)+1 - k], a[k], L_tmp4);
+
+
+
+        L_tmp1 >>= 11;      /* -4 : sig_lo[i] << 4 */
+
+        int64 sig_tmp;
+        sig_tmp = (int64)L_tmp1 + (int32)(exc[(i<<1)] << a0);
+        L_tmp1 = (int32)(sig_tmp - (L_tmp2 << 1));
+
+        /* sig_hi = bit16 to bit31 of synthesis */
+        L_tmp1 = shl_int32(L_tmp1, 3);           /* ai in Q12 */
+
+        sig_hi[(i<<1)] = (int16)(L_tmp1 >> 16);
+
+        L_tmp4 = fxp_mac_16by16((int16)(L_tmp1 >> 16), a[1], L_tmp4);
+
+        /* sig_lo = bit4 to bit15 of synthesis */
+        /* L_tmp1 >>= 4 : sig_lo[i] >> 4 */
+        sig_lo[(i<<1)] = (int16)((L_tmp1 >> 4) - ((L_tmp1 >> 16) << 12));
+
+        L_tmp3 = fxp_mac_16by16(sig_lo[(i<<1)], a[1], L_tmp3);
+        L_tmp3 = -L_tmp3 >> 11;
+
+        sig_tmp = (int64)L_tmp3 + (int32)(exc[(i<<1)+1] << a0);
+        L_tmp3 = (int32)(sig_tmp - (L_tmp4 << 1));
+        /* sig_hi = bit16 to bit31 of synthesis */
+        L_tmp3 = shl_int32(L_tmp3, 3);           /* ai in Q12 */
+        sig_hi[(i<<1)+1] = (int16)(L_tmp3 >> 16);
+
+        /* sig_lo = bit4 to bit15 of synthesis */
+        /* L_tmp1 >>= 4 : sig_lo[i] >> 4 */
+        sig_lo[(i<<1)+1] = (int16)((L_tmp3 >> 4) - (sig_hi[(i<<1)+1] << 12));
+    }
+
+}
+
+
diff --git a/media/libstagefright/codecs/amrwb/src/weight_amrwb_lpc.cpp b/media/codecs/amrwb/dec/src/weight_amrwb_lpc.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/src/weight_amrwb_lpc.cpp
rename to media/codecs/amrwb/dec/src/weight_amrwb_lpc.cpp
diff --git a/media/libstagefright/codecs/amrwb/test/AmrwbDecTestEnvironment.h b/media/codecs/amrwb/dec/test/AmrwbDecTestEnvironment.h
similarity index 100%
rename from media/libstagefright/codecs/amrwb/test/AmrwbDecTestEnvironment.h
rename to media/codecs/amrwb/dec/test/AmrwbDecTestEnvironment.h
diff --git a/media/libstagefright/codecs/amrwb/test/AmrwbDecoderTest.cpp b/media/codecs/amrwb/dec/test/AmrwbDecoderTest.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/test/AmrwbDecoderTest.cpp
rename to media/codecs/amrwb/dec/test/AmrwbDecoderTest.cpp
diff --git a/media/codecs/amrwb/dec/test/Android.bp b/media/codecs/amrwb/dec/test/Android.bp
new file mode 100644
index 0000000..7d0c964
--- /dev/null
+++ b/media/codecs/amrwb/dec/test/Android.bp
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_codecs_amrwb_dec_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    //   SPDX-license-identifier-BSD
+    default_applicable_licenses: [
+        "frameworks_av_media_codecs_amrwb_dec_license",
+    ],
+}
+
+cc_test {
+    name: "AmrwbDecoderTest",
+    test_suites: ["device-tests"],
+    gtest: true,
+
+    srcs: [
+        "AmrwbDecoderTest.cpp",
+    ],
+
+    static_libs: [
+        "libstagefright_amrwbdec",
+        "libsndfile",
+        "libaudioutils",
+    ],
+
+    shared_libs: [
+        "liblog",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/codecs/amrwb/test/AndroidTest.xml b/media/codecs/amrwb/dec/test/AndroidTest.xml
similarity index 100%
rename from media/libstagefright/codecs/amrwb/test/AndroidTest.xml
rename to media/codecs/amrwb/dec/test/AndroidTest.xml
diff --git a/media/libstagefright/codecs/amrwb/test/README.md b/media/codecs/amrwb/dec/test/README.md
similarity index 100%
rename from media/libstagefright/codecs/amrwb/test/README.md
rename to media/codecs/amrwb/dec/test/README.md
diff --git a/media/libstagefright/codecs/amrwb/test/amrwbdec_test.cpp b/media/codecs/amrwb/dec/test/amrwbdec_test.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwb/test/amrwbdec_test.cpp
rename to media/codecs/amrwb/dec/test/amrwbdec_test.cpp
diff --git a/media/codecs/amrwb/enc/Android.bp b/media/codecs/amrwb/enc/Android.bp
new file mode 100644
index 0000000..cc72eb7
--- /dev/null
+++ b/media/codecs/amrwb/enc/Android.bp
@@ -0,0 +1,166 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_codecs_amrwb_enc_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codecs_amrwb_enc_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library_static {
+    name: "libstagefright_amrwbenc",
+    vendor_available: true,
+    min_sdk_version: "29",
+
+    srcs: [
+        "src/autocorr.c",
+        "src/az_isp.c",
+        "src/bits.c",
+        "src/c2t64fx.c",
+        "src/c4t64fx.c",
+        "src/convolve.c",
+        "src/cor_h_x.c",
+        "src/decim54.c",
+        "src/deemph.c",
+        "src/dtx.c",
+        "src/g_pitch.c",
+        "src/gpclip.c",
+        "src/homing.c",
+        "src/hp400.c",
+        "src/hp50.c",
+        "src/hp6k.c",
+        "src/hp_wsp.c",
+        "src/int_lpc.c",
+        "src/isp_az.c",
+        "src/isp_isf.c",
+        "src/lag_wind.c",
+        "src/levinson.c",
+        "src/log2.c",
+        "src/lp_dec2.c",
+        "src/math_op.c",
+        "src/oper_32b.c",
+        "src/p_med_ol.c",
+        "src/pit_shrp.c",
+        "src/pitch_f4.c",
+        "src/pred_lt4.c",
+        "src/preemph.c",
+        "src/q_gain2.c",
+        "src/q_pulse.c",
+        "src/qisf_ns.c",
+        "src/qpisf_2s.c",
+        "src/random.c",
+        "src/residu.c",
+        "src/scale.c",
+        "src/stream.c",
+        "src/syn_filt.c",
+        "src/updt_tar.c",
+        "src/util.c",
+        "src/voAMRWBEnc.c",
+        "src/voicefac.c",
+        "src/wb_vad.c",
+        "src/weight_a.c",
+        "src/mem_align.c",
+    ],
+
+    arch: {
+        arm: {
+            srcs: [
+                "src/asm/ARMV5E/convolve_opt.s",
+                "src/asm/ARMV5E/cor_h_vec_opt.s",
+                "src/asm/ARMV5E/Deemph_32_opt.s",
+                "src/asm/ARMV5E/Dot_p_opt.s",
+                "src/asm/ARMV5E/Filt_6k_7k_opt.s",
+                "src/asm/ARMV5E/Norm_Corr_opt.s",
+                "src/asm/ARMV5E/pred_lt4_1_opt.s",
+                "src/asm/ARMV5E/residu_asm_opt.s",
+                "src/asm/ARMV5E/scale_sig_opt.s",
+                "src/asm/ARMV5E/Syn_filt_32_opt.s",
+                "src/asm/ARMV5E/syn_filt_opt.s",
+            ],
+
+            cflags: [
+                "-DARM",
+                "-DASM_OPT",
+            ],
+            local_include_dirs: ["src/asm/ARMV5E"],
+
+            instruction_set: "arm",
+
+            neon: {
+                exclude_srcs: [
+                    "src/asm/ARMV5E/convolve_opt.s",
+                    "src/asm/ARMV5E/cor_h_vec_opt.s",
+                    "src/asm/ARMV5E/Deemph_32_opt.s",
+                    "src/asm/ARMV5E/Dot_p_opt.s",
+                    "src/asm/ARMV5E/Filt_6k_7k_opt.s",
+                    "src/asm/ARMV5E/Norm_Corr_opt.s",
+                    "src/asm/ARMV5E/pred_lt4_1_opt.s",
+                    "src/asm/ARMV5E/residu_asm_opt.s",
+                    "src/asm/ARMV5E/scale_sig_opt.s",
+                    "src/asm/ARMV5E/Syn_filt_32_opt.s",
+                    "src/asm/ARMV5E/syn_filt_opt.s",
+                ],
+
+                srcs: [
+                    "src/asm/ARMV7/convolve_neon.s",
+                    "src/asm/ARMV7/cor_h_vec_neon.s",
+                    "src/asm/ARMV7/Deemph_32_neon.s",
+                    "src/asm/ARMV7/Dot_p_neon.s",
+                    "src/asm/ARMV7/Filt_6k_7k_neon.s",
+                    "src/asm/ARMV7/Norm_Corr_neon.s",
+                    "src/asm/ARMV7/pred_lt4_1_neon.s",
+                    "src/asm/ARMV7/residu_asm_neon.s",
+                    "src/asm/ARMV7/scale_sig_neon.s",
+                    "src/asm/ARMV7/Syn_filt_32_neon.s",
+                    "src/asm/ARMV7/syn_filt_neon.s",
+                ],
+
+                // don't actually generate neon instructions, see bug 26932980
+                cflags: [
+                    "-DARMV7",
+                    "-mfpu=vfpv3",
+                ],
+                local_include_dirs: [
+                    "src/asm/ARMV5E",
+                    "src/asm/ARMV7",
+                ],
+            },
+
+        },
+    },
+
+    include_dirs: [
+        "frameworks/av/include",
+        "frameworks/av/media/libstagefright/include",
+    ],
+
+    local_include_dirs: ["src"],
+    export_include_dirs: ["inc"],
+
+    shared_libs: [
+        "libstagefright_enc_common",
+        "liblog",
+    ],
+
+    cflags: ["-Werror"],
+    sanitize: {
+        cfi: true,
+    },
+
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
diff --git a/media/libstagefright/codecs/amrnb/enc/MODULE_LICENSE_APACHE2 b/media/codecs/amrwb/enc/MODULE_LICENSE_APACHE2
similarity index 100%
copy from media/libstagefright/codecs/amrnb/enc/MODULE_LICENSE_APACHE2
copy to media/codecs/amrwb/enc/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/amrnb/enc/NOTICE b/media/codecs/amrwb/enc/NOTICE
similarity index 100%
copy from media/libstagefright/codecs/amrnb/enc/NOTICE
copy to media/codecs/amrwb/enc/NOTICE
diff --git a/media/libstagefright/codecs/amrwbenc/SampleCode/AMRWB_E_SAMPLE.c b/media/codecs/amrwb/enc/SampleCode/AMRWB_E_SAMPLE.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/SampleCode/AMRWB_E_SAMPLE.c
rename to media/codecs/amrwb/enc/SampleCode/AMRWB_E_SAMPLE.c
diff --git a/media/codecs/amrwb/enc/SampleCode/Android.bp b/media/codecs/amrwb/enc/SampleCode/Android.bp
new file mode 100644
index 0000000..1448364
--- /dev/null
+++ b/media/codecs/amrwb/enc/SampleCode/Android.bp
@@ -0,0 +1,47 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_codecs_amrwb_enc_SampleCode_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codecs_amrwb_enc_SampleCode_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_test {
+    name: "AMRWBEncTest",
+    gtest: false,
+
+    srcs: ["AMRWB_E_SAMPLE.c"],
+
+    cflags: ["-Wall", "-Werror"],
+
+    arch: {
+        arm: {
+            instruction_set: "arm",
+        },
+    },
+
+    shared_libs: [
+        "libdl",
+        "liblog",
+    ],
+
+    static_libs: [
+        "libstagefright_amrwbenc",
+        "libstagefright_enc_common",
+    ],
+
+    sanitize: {
+        cfi: true,
+    },
+}
diff --git a/media/libstagefright/codecs/amrwbenc/SampleCode/MODULE_LICENSE_APACHE2 b/media/codecs/amrwb/enc/SampleCode/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/SampleCode/MODULE_LICENSE_APACHE2
rename to media/codecs/amrwb/enc/SampleCode/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/amrwbenc/SampleCode/NOTICE b/media/codecs/amrwb/enc/SampleCode/NOTICE
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/SampleCode/NOTICE
rename to media/codecs/amrwb/enc/SampleCode/NOTICE
diff --git a/media/codecs/amrwb/enc/TEST_MAPPING b/media/codecs/amrwb/enc/TEST_MAPPING
new file mode 100644
index 0000000..045e8b3
--- /dev/null
+++ b/media/codecs/amrwb/enc/TEST_MAPPING
@@ -0,0 +1,10 @@
+// mappings for frameworks/av/media/libstagefright/codecs/amrwbenc
+{
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "AmrwbEncoderTest"}
+
+  ]
+}
diff --git a/media/libstagefright/codecs/amrwbenc/doc/voAMRWBEncoderSDK.pdf b/media/codecs/amrwb/enc/doc/voAMRWBEncoderSDK.pdf
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/doc/voAMRWBEncoderSDK.pdf
rename to media/codecs/amrwb/enc/doc/voAMRWBEncoderSDK.pdf
Binary files differ
diff --git a/media/codecs/amrwb/enc/fuzzer/Android.bp b/media/codecs/amrwb/enc/fuzzer/Android.bp
new file mode 100644
index 0000000..f74fa4f
--- /dev/null
+++ b/media/codecs/amrwb/enc/fuzzer/Android.bp
@@ -0,0 +1,52 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_codecs_amrwb_enc_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_codecs_amrwb_enc_license",
+    ],
+}
+
+cc_fuzz {
+    name: "amrwb_enc_fuzzer",
+    host_supported: true,
+
+    srcs: [
+        "amrwb_enc_fuzzer.cpp",
+    ],
+
+    static_libs: [
+        "liblog",
+        "libstagefright_amrwbenc",
+        "libstagefright_enc_common",
+    ],
+
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/media/codecs/amrwb/enc/fuzzer/README.md b/media/codecs/amrwb/enc/fuzzer/README.md
new file mode 100644
index 0000000..447fbfa
--- /dev/null
+++ b/media/codecs/amrwb/enc/fuzzer/README.md
@@ -0,0 +1,60 @@
+# Fuzzer for libstagefright_amrwbenc encoder
+
+## Plugin Design Considerations
+The fuzzer plugin for AMR-WB is designed based on the understanding of the
+codec and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+AMR-WB supports the following parameters:
+1. Frame Type (parameter name: `frameType`)
+2. Mode (parameter name: `mode`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `frameType` | 0. `VOAMRWB_DEFAULT` 1. `VOAMRWB_ITU` 2. `VOAMRWB_RFC3267` | Bits 0, 1 and 2 of 1st byte of data. |
+| `mode`   | 0. `VOAMRWB_MD66` 1. `VOAMRWB_MD885` 2. `VOAMRWB_MD1265` 3. `VOAMRWB_MD1425`  4. `VOAMRWB_MD1585 ` 5. `VOAMRWB_MD1825` 6. `VOAMRWB_MD1985` 7. `VOAMRWB_MD2305` 8. `VOAMRWB_MD2385` 9. `VOAMRWB_N_MODES` | Bits 4, 5, 6 and 7 of 1st byte of data. |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the codec using a loop.
+If the encode operation was successful, the input is advanced by the frame size.
+If the encode operation was un-successful, the input is still advanced by frame size so
+that the fuzzer can proceed to feed the next frame.
+
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build amrwb_enc_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) amrwb_enc_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some pcm files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/amrwb_enc_fuzzer/amrwb_enc_fuzzer CORPUS_DIR
+```
+To run on host
+```
+  $ $ANDROID_HOST_OUT/fuzz/x86_64/amrwb_enc_fuzzer/amrwb_enc_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/codecs/amrwb/enc/fuzzer/amrwb_enc_fuzzer.cpp b/media/codecs/amrwb/enc/fuzzer/amrwb_enc_fuzzer.cpp
new file mode 100644
index 0000000..4773a1f
--- /dev/null
+++ b/media/codecs/amrwb/enc/fuzzer/amrwb_enc_fuzzer.cpp
@@ -0,0 +1,142 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include <string.h>
+#include <utils/Log.h>
+#include <algorithm>
+#include "cmnMemory.h"
+#include "voAMRWB.h"
+#include "cnst.h"
+
+typedef int(VO_API *VOGETAUDIOENCAPI)(VO_AUDIO_CODECAPI *pEncHandle);
+const int32_t kInputBufferSize = L_FRAME16k * sizeof(int16_t) * 2;
+const int32_t kOutputBufferSize = 2 * kInputBufferSize;
+const int32_t kModes[] = {VOAMRWB_MD66 /* 6.60kbps */,    VOAMRWB_MD885 /* 8.85kbps */,
+                          VOAMRWB_MD1265 /* 12.65kbps */, VOAMRWB_MD1425 /* 14.25kbps */,
+                          VOAMRWB_MD1585 /* 15.85kbps */, VOAMRWB_MD1825 /* 18.25kbps */,
+                          VOAMRWB_MD1985 /* 19.85kbps */, VOAMRWB_MD2305 /* 23.05kbps */,
+                          VOAMRWB_MD2385 /* 23.85kbps */, VOAMRWB_N_MODES /* Invalid Mode */};
+const VOAMRWBFRAMETYPE kFrameTypes[] = {VOAMRWB_DEFAULT, VOAMRWB_ITU, VOAMRWB_RFC3267};
+
+class Codec {
+   public:
+    Codec() = default;
+    ~Codec() { deInitEncoder(); }
+    bool initEncoder(const uint8_t *data);
+    void deInitEncoder();
+    void encodeFrames(const uint8_t *data, size_t size);
+
+   private:
+    VO_AUDIO_CODECAPI *mApiHandle = nullptr;
+    VO_MEM_OPERATOR *mMemOperator = nullptr;
+    VO_HANDLE mEncoderHandle = nullptr;
+};
+
+bool Codec::initEncoder(const uint8_t *data) {
+    uint8_t startByte = *data;
+    int32_t mode = kModes[(startByte >> 4) % 10];
+    VOAMRWBFRAMETYPE frameType = kFrameTypes[startByte % 3];
+    mMemOperator = new VO_MEM_OPERATOR;
+    if (!mMemOperator) {
+        return false;
+    }
+
+    mMemOperator->Alloc = cmnMemAlloc;
+    mMemOperator->Copy = cmnMemCopy;
+    mMemOperator->Free = cmnMemFree;
+    mMemOperator->Set = cmnMemSet;
+    mMemOperator->Check = cmnMemCheck;
+
+    VO_CODEC_INIT_USERDATA userData;
+    memset(&userData, 0, sizeof(userData));
+    userData.memflag = VO_IMF_USERMEMOPERATOR;
+    userData.memData = (VO_PTR)mMemOperator;
+
+    mApiHandle = new VO_AUDIO_CODECAPI;
+    if (!mApiHandle) {
+        return false;
+    }
+    if (VO_ERR_NONE != voGetAMRWBEncAPI(mApiHandle)) {
+        // Failed to get api handle
+        return false;
+    }
+    if (VO_ERR_NONE != mApiHandle->Init(&mEncoderHandle, VO_AUDIO_CodingAMRWB, &userData)) {
+        // Failed to init AMRWB encoder
+        return false;
+    }
+    if (VO_ERR_NONE != mApiHandle->SetParam(mEncoderHandle, VO_PID_AMRWB_FRAMETYPE, &frameType)) {
+        // Failed to set AMRWB encoder frame type
+        return false;
+    }
+    if (VO_ERR_NONE != mApiHandle->SetParam(mEncoderHandle, VO_PID_AMRWB_MODE, &mode)) {
+        // Failed to set AMRWB encoder mode
+        return false;
+    }
+    return true;
+}
+
+void Codec::deInitEncoder() {
+    if (mEncoderHandle) {
+        mApiHandle->Uninit(mEncoderHandle);
+        mEncoderHandle = nullptr;
+    }
+    if (mApiHandle) {
+        delete mApiHandle;
+        mApiHandle = nullptr;
+    }
+    if (mMemOperator) {
+        delete mMemOperator;
+        mMemOperator = nullptr;
+    }
+}
+
+void Codec::encodeFrames(const uint8_t *data, size_t size) {
+    do {
+        int32_t minSize = std::min((int32_t)size, kInputBufferSize);
+        uint8_t outputBuf[kOutputBufferSize] = {};
+        VO_CODECBUFFER inData;
+        VO_CODECBUFFER outData;
+        VO_AUDIO_OUTPUTINFO outFormat;
+        inData.Buffer = (unsigned char *)data;
+        inData.Length = minSize;
+        outData.Buffer = outputBuf;
+        mApiHandle->SetInputData(mEncoderHandle, &inData);
+        mApiHandle->GetOutputData(mEncoderHandle, &outData, &outFormat);
+        data += minSize;
+        size -= minSize;
+    } while (size > 0);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    if (size < 1) {
+        return 0;
+    }
+    Codec *codec = new Codec();
+    if (!codec) {
+        return 0;
+    }
+    if (codec->initEncoder(data)) {
+        // Consume first byte
+        ++data;
+        --size;
+        codec->encodeFrames(data, size);
+    }
+    delete codec;
+    return 0;
+}
diff --git a/media/libstagefright/codecs/amrwbenc/inc/acelp.h b/media/codecs/amrwb/enc/inc/acelp.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/acelp.h
rename to media/codecs/amrwb/enc/inc/acelp.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/basic_op.h b/media/codecs/amrwb/enc/inc/basic_op.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/basic_op.h
rename to media/codecs/amrwb/enc/inc/basic_op.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/bits.h b/media/codecs/amrwb/enc/inc/bits.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/bits.h
rename to media/codecs/amrwb/enc/inc/bits.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/cnst.h b/media/codecs/amrwb/enc/inc/cnst.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/cnst.h
rename to media/codecs/amrwb/enc/inc/cnst.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/cod_main.h b/media/codecs/amrwb/enc/inc/cod_main.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/cod_main.h
rename to media/codecs/amrwb/enc/inc/cod_main.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/dtx.h b/media/codecs/amrwb/enc/inc/dtx.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/dtx.h
rename to media/codecs/amrwb/enc/inc/dtx.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/grid100.tab b/media/codecs/amrwb/enc/inc/grid100.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/grid100.tab
rename to media/codecs/amrwb/enc/inc/grid100.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/ham_wind.tab b/media/codecs/amrwb/enc/inc/ham_wind.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/ham_wind.tab
rename to media/codecs/amrwb/enc/inc/ham_wind.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/homing.tab b/media/codecs/amrwb/enc/inc/homing.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/homing.tab
rename to media/codecs/amrwb/enc/inc/homing.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/isp_isf.tab b/media/codecs/amrwb/enc/inc/isp_isf.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/isp_isf.tab
rename to media/codecs/amrwb/enc/inc/isp_isf.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/lag_wind.tab b/media/codecs/amrwb/enc/inc/lag_wind.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/lag_wind.tab
rename to media/codecs/amrwb/enc/inc/lag_wind.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/log2.h b/media/codecs/amrwb/enc/inc/log2.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/log2.h
rename to media/codecs/amrwb/enc/inc/log2.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/log2_tab.h b/media/codecs/amrwb/enc/inc/log2_tab.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/log2_tab.h
rename to media/codecs/amrwb/enc/inc/log2_tab.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/main.h b/media/codecs/amrwb/enc/inc/main.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/main.h
rename to media/codecs/amrwb/enc/inc/main.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/math_op.h b/media/codecs/amrwb/enc/inc/math_op.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/math_op.h
rename to media/codecs/amrwb/enc/inc/math_op.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/mem_align.h b/media/codecs/amrwb/enc/inc/mem_align.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/mem_align.h
rename to media/codecs/amrwb/enc/inc/mem_align.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/mime_io.tab b/media/codecs/amrwb/enc/inc/mime_io.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/mime_io.tab
rename to media/codecs/amrwb/enc/inc/mime_io.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/oper_32b.h b/media/codecs/amrwb/enc/inc/oper_32b.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/oper_32b.h
rename to media/codecs/amrwb/enc/inc/oper_32b.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/p_med_o.h b/media/codecs/amrwb/enc/inc/p_med_o.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/p_med_o.h
rename to media/codecs/amrwb/enc/inc/p_med_o.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/p_med_ol.tab b/media/codecs/amrwb/enc/inc/p_med_ol.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/p_med_ol.tab
rename to media/codecs/amrwb/enc/inc/p_med_ol.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/q_gain2.tab b/media/codecs/amrwb/enc/inc/q_gain2.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/q_gain2.tab
rename to media/codecs/amrwb/enc/inc/q_gain2.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/q_pulse.h b/media/codecs/amrwb/enc/inc/q_pulse.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/q_pulse.h
rename to media/codecs/amrwb/enc/inc/q_pulse.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/qisf_ns.tab b/media/codecs/amrwb/enc/inc/qisf_ns.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/qisf_ns.tab
rename to media/codecs/amrwb/enc/inc/qisf_ns.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/qpisf_2s.tab b/media/codecs/amrwb/enc/inc/qpisf_2s.tab
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/qpisf_2s.tab
rename to media/codecs/amrwb/enc/inc/qpisf_2s.tab
diff --git a/media/libstagefright/codecs/amrwbenc/inc/stream.h b/media/codecs/amrwb/enc/inc/stream.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/stream.h
rename to media/codecs/amrwb/enc/inc/stream.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/typedef.h b/media/codecs/amrwb/enc/inc/typedef.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/typedef.h
rename to media/codecs/amrwb/enc/inc/typedef.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/typedefs.h b/media/codecs/amrwb/enc/inc/typedefs.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/typedefs.h
rename to media/codecs/amrwb/enc/inc/typedefs.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/wb_vad.h b/media/codecs/amrwb/enc/inc/wb_vad.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/wb_vad.h
rename to media/codecs/amrwb/enc/inc/wb_vad.h
diff --git a/media/libstagefright/codecs/amrwbenc/inc/wb_vad_c.h b/media/codecs/amrwb/enc/inc/wb_vad_c.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/inc/wb_vad_c.h
rename to media/codecs/amrwb/enc/inc/wb_vad_c.h
diff --git a/media/libstagefright/codecs/amrwbenc/patent_disclaimer.txt b/media/codecs/amrwb/enc/patent_disclaimer.txt
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/patent_disclaimer.txt
rename to media/codecs/amrwb/enc/patent_disclaimer.txt
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Deemph_32_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/Deemph_32_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Deemph_32_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/Deemph_32_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Dot_p_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/Dot_p_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Dot_p_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/Dot_p_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Filt_6k_7k_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/Filt_6k_7k_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Filt_6k_7k_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/Filt_6k_7k_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Norm_Corr_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/Norm_Corr_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Norm_Corr_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/Norm_Corr_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Syn_filt_32_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/Syn_filt_32_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Syn_filt_32_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/Syn_filt_32_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/convolve_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/convolve_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/convolve_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/convolve_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/cor_h_vec_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/cor_h_vec_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/cor_h_vec_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/cor_h_vec_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/pred_lt4_1_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/pred_lt4_1_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/pred_lt4_1_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/pred_lt4_1_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/residu_asm_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/residu_asm_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/residu_asm_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/residu_asm_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/scale_sig_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/scale_sig_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/scale_sig_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/scale_sig_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/syn_filt_opt.s b/media/codecs/amrwb/enc/src/asm/ARMV5E/syn_filt_opt.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/syn_filt_opt.s
rename to media/codecs/amrwb/enc/src/asm/ARMV5E/syn_filt_opt.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Deemph_32_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/Deemph_32_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Deemph_32_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/Deemph_32_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Dot_p_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/Dot_p_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Dot_p_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/Dot_p_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Filt_6k_7k_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/Filt_6k_7k_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Filt_6k_7k_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/Filt_6k_7k_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Norm_Corr_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/Norm_Corr_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Norm_Corr_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/Norm_Corr_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Syn_filt_32_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/Syn_filt_32_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Syn_filt_32_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/Syn_filt_32_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/convolve_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/convolve_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/convolve_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/convolve_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/cor_h_vec_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/cor_h_vec_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/cor_h_vec_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/cor_h_vec_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/pred_lt4_1_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/pred_lt4_1_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/pred_lt4_1_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/pred_lt4_1_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/residu_asm_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/residu_asm_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/residu_asm_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/residu_asm_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/scale_sig_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/scale_sig_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/scale_sig_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/scale_sig_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/syn_filt_neon.s b/media/codecs/amrwb/enc/src/asm/ARMV7/syn_filt_neon.s
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/syn_filt_neon.s
rename to media/codecs/amrwb/enc/src/asm/ARMV7/syn_filt_neon.s
diff --git a/media/libstagefright/codecs/amrwbenc/src/autocorr.c b/media/codecs/amrwb/enc/src/autocorr.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/autocorr.c
rename to media/codecs/amrwb/enc/src/autocorr.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/az_isp.c b/media/codecs/amrwb/enc/src/az_isp.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/az_isp.c
rename to media/codecs/amrwb/enc/src/az_isp.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/bits.c b/media/codecs/amrwb/enc/src/bits.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/bits.c
rename to media/codecs/amrwb/enc/src/bits.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/c2t64fx.c b/media/codecs/amrwb/enc/src/c2t64fx.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/c2t64fx.c
rename to media/codecs/amrwb/enc/src/c2t64fx.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/c4t64fx.c b/media/codecs/amrwb/enc/src/c4t64fx.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/c4t64fx.c
rename to media/codecs/amrwb/enc/src/c4t64fx.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/convolve.c b/media/codecs/amrwb/enc/src/convolve.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/convolve.c
rename to media/codecs/amrwb/enc/src/convolve.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/cor_h_x.c b/media/codecs/amrwb/enc/src/cor_h_x.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/cor_h_x.c
rename to media/codecs/amrwb/enc/src/cor_h_x.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/decim54.c b/media/codecs/amrwb/enc/src/decim54.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/decim54.c
rename to media/codecs/amrwb/enc/src/decim54.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/deemph.c b/media/codecs/amrwb/enc/src/deemph.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/deemph.c
rename to media/codecs/amrwb/enc/src/deemph.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/dtx.c b/media/codecs/amrwb/enc/src/dtx.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/dtx.c
rename to media/codecs/amrwb/enc/src/dtx.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/g_pitch.c b/media/codecs/amrwb/enc/src/g_pitch.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/g_pitch.c
rename to media/codecs/amrwb/enc/src/g_pitch.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/gpclip.c b/media/codecs/amrwb/enc/src/gpclip.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/gpclip.c
rename to media/codecs/amrwb/enc/src/gpclip.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/homing.c b/media/codecs/amrwb/enc/src/homing.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/homing.c
rename to media/codecs/amrwb/enc/src/homing.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/hp400.c b/media/codecs/amrwb/enc/src/hp400.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/hp400.c
rename to media/codecs/amrwb/enc/src/hp400.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/hp50.c b/media/codecs/amrwb/enc/src/hp50.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/hp50.c
rename to media/codecs/amrwb/enc/src/hp50.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/hp6k.c b/media/codecs/amrwb/enc/src/hp6k.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/hp6k.c
rename to media/codecs/amrwb/enc/src/hp6k.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/hp_wsp.c b/media/codecs/amrwb/enc/src/hp_wsp.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/hp_wsp.c
rename to media/codecs/amrwb/enc/src/hp_wsp.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/int_lpc.c b/media/codecs/amrwb/enc/src/int_lpc.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/int_lpc.c
rename to media/codecs/amrwb/enc/src/int_lpc.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/isp_az.c b/media/codecs/amrwb/enc/src/isp_az.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/isp_az.c
rename to media/codecs/amrwb/enc/src/isp_az.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/isp_isf.c b/media/codecs/amrwb/enc/src/isp_isf.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/isp_isf.c
rename to media/codecs/amrwb/enc/src/isp_isf.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/lag_wind.c b/media/codecs/amrwb/enc/src/lag_wind.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/lag_wind.c
rename to media/codecs/amrwb/enc/src/lag_wind.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/levinson.c b/media/codecs/amrwb/enc/src/levinson.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/levinson.c
rename to media/codecs/amrwb/enc/src/levinson.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/log2.c b/media/codecs/amrwb/enc/src/log2.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/log2.c
rename to media/codecs/amrwb/enc/src/log2.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/lp_dec2.c b/media/codecs/amrwb/enc/src/lp_dec2.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/lp_dec2.c
rename to media/codecs/amrwb/enc/src/lp_dec2.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/math_op.c b/media/codecs/amrwb/enc/src/math_op.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/math_op.c
rename to media/codecs/amrwb/enc/src/math_op.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/mem_align.c b/media/codecs/amrwb/enc/src/mem_align.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/mem_align.c
rename to media/codecs/amrwb/enc/src/mem_align.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/oper_32b.c b/media/codecs/amrwb/enc/src/oper_32b.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/oper_32b.c
rename to media/codecs/amrwb/enc/src/oper_32b.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/p_med_ol.c b/media/codecs/amrwb/enc/src/p_med_ol.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/p_med_ol.c
rename to media/codecs/amrwb/enc/src/p_med_ol.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/pit_shrp.c b/media/codecs/amrwb/enc/src/pit_shrp.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/pit_shrp.c
rename to media/codecs/amrwb/enc/src/pit_shrp.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c b/media/codecs/amrwb/enc/src/pitch_f4.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/pitch_f4.c
rename to media/codecs/amrwb/enc/src/pitch_f4.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/pred_lt4.c b/media/codecs/amrwb/enc/src/pred_lt4.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/pred_lt4.c
rename to media/codecs/amrwb/enc/src/pred_lt4.c
diff --git a/media/codecs/amrwb/enc/src/preemph.c b/media/codecs/amrwb/enc/src/preemph.c
new file mode 100644
index 0000000..a43841a
--- /dev/null
+++ b/media/codecs/amrwb/enc/src/preemph.c
@@ -0,0 +1,101 @@
+/*
+ ** Copyright 2003-2010, VisualOn, Inc.
+ **
+ ** Licensed under the Apache License, Version 2.0 (the "License");
+ ** you may not use this file except in compliance with the License.
+ ** You may obtain a copy of the License at
+ **
+ **     http://www.apache.org/licenses/LICENSE-2.0
+ **
+ ** Unless required by applicable law or agreed to in writing, software
+ ** distributed under the License is distributed on an "AS IS" BASIS,
+ ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ** See the License for the specific language governing permissions and
+ ** limitations under the License.
+ */
+
+/***********************************************************************
+*      File: preemph.c                                                *
+*                                                                     *
+*      Description: Preemphasis: filtering through 1 - g z^-1         *
+*              Preemph2 --> signal is multiplied by 2             *
+*                                                                     *
+************************************************************************/
+
+#include "typedef.h"
+#include "basic_op.h"
+#include <stdint.h>
+
+void Preemph(
+        Word16 x[],                           /* (i/o)   : input signal overwritten by the output */
+        Word16 mu,                            /* (i) Q15 : preemphasis coefficient                */
+        Word16 lg,                            /* (i)     : lenght of filtering                    */
+        Word16 * mem                          /* (i/o)   : memory (x[-1])                         */
+        )
+{
+    Word16 temp;
+    Word32 i, L_tmp;
+
+    temp = x[lg - 1];
+
+    for (i = lg - 1; i > 0; i--)
+    {
+        L_tmp = L_deposit_h(x[i]);
+        L_tmp -= (x[i - 1] * mu)<<1;
+        x[i] = (L_tmp + 0x8000)>>16;
+    }
+
+    L_tmp = L_deposit_h(x[0]);
+    L_tmp -= ((*mem) * mu)<<1;
+    x[0] = (L_tmp + 0x8000)>>16;
+
+    *mem = temp;
+
+    return;
+}
+
+
+void Preemph2(
+        Word16 x[],                           /* (i/o)   : input signal overwritten by the output */
+        Word16 mu,                            /* (i) Q15 : preemphasis coefficient                */
+        Word16 lg,                            /* (i)     : lenght of filtering                    */
+        Word16 * mem                          /* (i/o)   : memory (x[-1])                         */
+         )
+{
+    Word16 temp;
+    Word32 i, L_tmp;
+
+    temp = x[lg - 1];
+
+    for (i = (Word16) (lg - 1); i > 0; i--)
+    {
+        L_tmp = L_deposit_h(x[i]);
+        L_tmp -= (x[i - 1] * mu)<<1; // only called with mu == 22282, so this won't overflow
+        if (L_tmp > INT32_MAX / 2) {
+            L_tmp = INT32_MAX / 2;
+        }
+        L_tmp = (L_tmp << 1);
+        if (L_tmp > INT32_MAX - 0x8000) {
+            L_tmp = INT32_MAX - 0x8000;
+        }
+        x[i] = (L_tmp + 0x8000)>>16;
+    }
+
+    L_tmp = L_deposit_h(x[0]);
+    L_tmp -= ((*mem) * mu)<<1;
+    if (L_tmp > INT32_MAX / 2) {
+        L_tmp = INT32_MAX / 2;
+    }
+    L_tmp = (L_tmp << 1);
+    if (L_tmp > INT32_MAX - 0x8000) {
+        L_tmp = INT32_MAX - 0x8000;
+    }
+    x[0] = (L_tmp + 0x8000)>>16;
+
+    *mem = temp;
+
+    return;
+}
+
+
+
diff --git a/media/libstagefright/codecs/amrwbenc/src/q_gain2.c b/media/codecs/amrwb/enc/src/q_gain2.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/q_gain2.c
rename to media/codecs/amrwb/enc/src/q_gain2.c
diff --git a/media/codecs/amrwb/enc/src/q_pulse.c b/media/codecs/amrwb/enc/src/q_pulse.c
new file mode 100644
index 0000000..657b6fe
--- /dev/null
+++ b/media/codecs/amrwb/enc/src/q_pulse.c
@@ -0,0 +1,401 @@
+/*
+ ** Copyright 2003-2010, VisualOn, Inc.
+ **
+ ** Licensed under the Apache License, Version 2.0 (the "License");
+ ** you may not use this file except in compliance with the License.
+ ** You may obtain a copy of the License at
+ **
+ **     http://www.apache.org/licenses/LICENSE-2.0
+ **
+ ** Unless required by applicable law or agreed to in writing, software
+ ** distributed under the License is distributed on an "AS IS" BASIS,
+ ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ** See the License for the specific language governing permissions and
+ ** limitations under the License.
+ */
+
+/***********************************************************************
+*      File: q_pulse.c                                                 *
+*                                                                      *
+*      Description: Coding and decoding of algebraic codebook          *
+*                                                                      *
+************************************************************************/
+
+#include <stdio.h>
+#include "typedef.h"
+#include "basic_op.h"
+#include "q_pulse.h"
+
+#define NB_POS 16                          /* pos in track, mask for sign bit */
+#define UNUSED_VAR __attribute__((unused))
+
+Word32 quant_1p_N1(                        /* (o) return N+1 bits             */
+        Word16 pos,                        /* (i) position of the pulse       */
+        Word16 N)                          /* (i) number of bits for position */
+{
+    Word16 mask;
+    Word32 index;
+
+    mask = (1 << N) - 1;              /* mask = ((1<<N)-1); */
+    /*-------------------------------------------------------*
+     * Quantization of 1 pulse with N+1 bits:                *
+     *-------------------------------------------------------*/
+    index = L_deposit_l((Word16) (pos & mask));
+    if ((pos & NB_POS) != 0)
+    {
+        index = vo_L_add(index, L_deposit_l(1 << N));   /* index += 1 << N; */
+    }
+    return (index);
+}
+
+
+Word32 quant_2p_2N1(                       /* (o) return (2*N)+1 bits         */
+        Word16 pos1,                          /* (i) position of the pulse 1     */
+        Word16 pos2,                          /* (i) position of the pulse 2     */
+        Word16 N)                             /* (i) number of bits for position */
+{
+    Word16 mask, tmp;
+    Word32 index;
+    mask = (1 << N) - 1;              /* mask = ((1<<N)-1); */
+    /*-------------------------------------------------------*
+     * Quantization of 2 pulses with 2*N+1 bits:             *
+     *-------------------------------------------------------*/
+    if (((pos2 ^ pos1) & NB_POS) == 0)
+    {
+        /* sign of 1st pulse == sign of 2th pulse */
+        if(pos1 <= pos2)          /* ((pos1 - pos2) <= 0) */
+        {
+            /* index = ((pos1 & mask) << N) + (pos2 & mask); */
+            index = L_deposit_l(add1((((Word16) (pos1 & mask)) << N), ((Word16) (pos2 & mask))));
+        } else
+        {
+            /* ((pos2 & mask) << N) + (pos1 & mask); */
+            index = L_deposit_l(add1((((Word16) (pos2 & mask)) << N), ((Word16) (pos1 & mask))));
+        }
+        if ((pos1 & NB_POS) != 0)
+        {
+            tmp = (N << 1);
+            index = vo_L_add(index, (1L << tmp));       /* index += 1 << (2*N); */
+        }
+    } else
+    {
+        /* sign of 1st pulse != sign of 2th pulse */
+        if (vo_sub((Word16) (pos1 & mask), (Word16) (pos2 & mask)) <= 0)
+        {
+            /* index = ((pos2 & mask) << N) + (pos1 & mask); */
+            index = L_deposit_l(add1((((Word16) (pos2 & mask)) << N), ((Word16) (pos1 & mask))));
+            if ((pos2 & NB_POS) != 0)
+            {
+                tmp = (N << 1);           /* index += 1 << (2*N); */
+                index = vo_L_add(index, (1L << tmp));
+            }
+        } else
+        {
+            /* index = ((pos1 & mask) << N) + (pos2 & mask);     */
+            index = L_deposit_l(add1((((Word16) (pos1 & mask)) << N), ((Word16) (pos2 & mask))));
+            if ((pos1 & NB_POS) != 0)
+            {
+                tmp = (N << 1);
+                index = vo_L_add(index, (1 << tmp));    /* index += 1 << (2*N); */
+            }
+        }
+    }
+    return (index);
+}
+
+
+Word32 quant_3p_3N1(                       /* (o) return (3*N)+1 bits         */
+        Word16 pos1,                          /* (i) position of the pulse 1     */
+        Word16 pos2,                          /* (i) position of the pulse 2     */
+        Word16 pos3,                          /* (i) position of the pulse 3     */
+        Word16 N)                             /* (i) number of bits for position */
+{
+    Word16 nb_pos;
+    Word32 index;
+
+    nb_pos =(1 <<(N - 1));            /* nb_pos = (1<<(N-1)); */
+    /*-------------------------------------------------------*
+     * Quantization of 3 pulses with 3*N+1 bits:             *
+     *-------------------------------------------------------*/
+    if (((pos1 ^ pos2) & nb_pos) == 0)
+    {
+        index = quant_2p_2N1(pos1, pos2, sub(N, 1));    /* index = quant_2p_2N1(pos1, pos2, (N-1)); */
+        /* index += (pos1 & nb_pos) << N; */
+        index = vo_L_add(index, (L_deposit_l((Word16) (pos1 & nb_pos)) << N));
+        /* index += quant_1p_N1(pos3, N) << (2*N); */
+        index = vo_L_add(index, (quant_1p_N1(pos3, N)<<(N << 1)));
+
+    } else if (((pos1 ^ pos3) & nb_pos) == 0)
+    {
+        index = quant_2p_2N1(pos1, pos3, sub(N, 1));    /* index = quant_2p_2N1(pos1, pos3, (N-1)); */
+        index = vo_L_add(index, (L_deposit_l((Word16) (pos1 & nb_pos)) << N));
+        /* index += (pos1 & nb_pos) << N; */
+        index = vo_L_add(index, (quant_1p_N1(pos2, N) << (N << 1)));
+        /* index += quant_1p_N1(pos2, N) <<
+         * (2*N); */
+    } else
+    {
+        index = quant_2p_2N1(pos2, pos3, (N - 1));    /* index = quant_2p_2N1(pos2, pos3, (N-1)); */
+        /* index += (pos2 & nb_pos) << N;            */
+        index = vo_L_add(index, (L_deposit_l((Word16) (pos2 & nb_pos)) << N));
+        /* index += quant_1p_N1(pos1, N) << (2*N);   */
+        index = vo_L_add(index, (quant_1p_N1(pos1, N) << (N << 1)));
+    }
+    return (index);
+}
+
+
+Word32 quant_4p_4N1(                       /* (o) return (4*N)+1 bits         */
+        Word16 pos1,                          /* (i) position of the pulse 1     */
+        Word16 pos2,                          /* (i) position of the pulse 2     */
+        Word16 pos3,                          /* (i) position of the pulse 3     */
+        Word16 pos4,                          /* (i) position of the pulse 4     */
+        Word16 N)                             /* (i) number of bits for position */
+{
+    Word16 nb_pos;
+    Word32 index;
+
+    nb_pos = 1 << (N - 1);            /* nb_pos = (1<<(N-1));  */
+    /*-------------------------------------------------------*
+     * Quantization of 4 pulses with 4*N+1 bits:             *
+     *-------------------------------------------------------*/
+    if (((pos1 ^ pos2) & nb_pos) == 0)
+    {
+        index = quant_2p_2N1(pos1, pos2, sub(N, 1));    /* index = quant_2p_2N1(pos1, pos2, (N-1)); */
+        /* index += (pos1 & nb_pos) << N;    */
+        index = vo_L_add(index, (L_deposit_l((Word16) (pos1 & nb_pos)) << N));
+        /* index += quant_2p_2N1(pos3, pos4, N) << (2*N); */
+        index = vo_L_add(index, (quant_2p_2N1(pos3, pos4, N) << (N << 1)));
+    } else if (((pos1 ^ pos3) & nb_pos) == 0)
+    {
+        index = quant_2p_2N1(pos1, pos3, (N - 1));
+        /* index += (pos1 & nb_pos) << N; */
+        index = vo_L_add(index, (L_deposit_l((Word16) (pos1 & nb_pos)) << N));
+        /* index += quant_2p_2N1(pos2, pos4, N) << (2*N); */
+        index = vo_L_add(index, (quant_2p_2N1(pos2, pos4, N) << (N << 1)));
+    } else
+    {
+        index = quant_2p_2N1(pos2, pos3, (N - 1));
+        /* index += (pos2 & nb_pos) << N; */
+        index = vo_L_add(index, (L_deposit_l((Word16) (pos2 & nb_pos)) << N));
+        /* index += quant_2p_2N1(pos1, pos4, N) << (2*N); */
+        index = vo_L_add(index, (quant_2p_2N1(pos1, pos4, N) << (N << 1)));
+    }
+    return (index);
+}
+
+
+Word32 quant_4p_4N(                        /* (o) return 4*N bits             */
+        Word16 pos[],                         /* (i) position of the pulse 1..4  */
+        Word16 N)                             /* (i) number of bits for position */
+{
+    Word16 nb_pos, mask UNUSED_VAR, n_1, tmp;
+    Word16 posA[4], posB[4];
+    Word32 i, j, k, index;
+
+    n_1 = (Word16) (N - 1);
+    nb_pos = (1 << n_1);                  /* nb_pos = (1<<n_1); */
+    mask = vo_sub((1 << N), 1);              /* mask = ((1<<N)-1); */
+
+    i = 0;
+    j = 0;
+    for (k = 0; k < 4; k++)
+    {
+        if ((pos[k] & nb_pos) == 0)
+        {
+            posA[i++] = pos[k];
+        } else
+        {
+            posB[j++] = pos[k];
+        }
+    }
+
+    switch (i)
+    {
+        case 0:
+            tmp = vo_sub((N << 2), 3);           /* index = 1 << ((4*N)-3); */
+            index = (1L << tmp);
+            /* index += quant_4p_4N1(posB[0], posB[1], posB[2], posB[3], n_1); */
+            index = vo_L_add(index, quant_4p_4N1(posB[0], posB[1], posB[2], posB[3], n_1));
+            break;
+        case 1:
+            /* index = quant_1p_N1(posA[0], n_1) << ((3*n_1)+1); */
+            tmp = add1((Word16)((vo_L_mult(3, n_1) >> 1)), 1);
+            index = L_shl(quant_1p_N1(posA[0], n_1), tmp);
+            /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1); */
+            index = vo_L_add(index, quant_3p_3N1(posB[0], posB[1], posB[2], n_1));
+            break;
+        case 2:
+            tmp = ((n_1 << 1) + 1);         /* index = quant_2p_2N1(posA[0], posA[1], n_1) << ((2*n_1)+1); */
+            index = L_shl(quant_2p_2N1(posA[0], posA[1], n_1), tmp);
+            /* index += quant_2p_2N1(posB[0], posB[1], n_1); */
+            index = vo_L_add(index, quant_2p_2N1(posB[0], posB[1], n_1));
+            break;
+        case 3:
+            /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << N; */
+            index = L_shl(quant_3p_3N1(posA[0], posA[1], posA[2], n_1), N);
+            index = vo_L_add(index, quant_1p_N1(posB[0], n_1));        /* index += quant_1p_N1(posB[0], n_1); */
+            break;
+        case 4:
+            index = quant_4p_4N1(posA[0], posA[1], posA[2], posA[3], n_1);
+            break;
+        default:
+            index = 0;
+            fprintf(stderr, "Error in function quant_4p_4N\n");
+    }
+    tmp = ((N << 2) - 2);               /* index += (i & 3) << ((4*N)-2); */
+    index = vo_L_add(index, L_shl((L_deposit_l(i) & (3L)), tmp));
+
+    return (index);
+}
+
+
+
+Word32 quant_5p_5N(                        /* (o) return 5*N bits             */
+        Word16 pos[],                         /* (i) position of the pulse 1..5  */
+        Word16 N)                             /* (i) number of bits for position */
+{
+    Word16 nb_pos, n_1, tmp;
+    Word16 posA[5], posB[5];
+    Word32 i, j, k, index, tmp2;
+
+    n_1 = (Word16) (N - 1);
+    nb_pos = (1 << n_1);                  /* nb_pos = (1<<n_1); */
+
+    i = 0;
+    j = 0;
+    for (k = 0; k < 5; k++)
+    {
+        if ((pos[k] & nb_pos) == 0)
+        {
+            posA[i++] = pos[k];
+        } else
+        {
+            posB[j++] = pos[k];
+        }
+    }
+
+    switch (i)
+    {
+        case 0:
+            tmp = vo_sub((Word16)((vo_L_mult(5, N) >> 1)), 1);        /* ((5*N)-1)) */
+            index = L_shl(1L, tmp);   /* index = 1 << ((5*N)-1); */
+            tmp = add1((N << 1), 1);  /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1) << ((2*N)+1);*/
+            tmp2 = L_shl(quant_3p_3N1(posB[0], posB[1], posB[2], n_1), tmp);
+            index = vo_L_add(index, tmp2);
+            index = vo_L_add(index, quant_2p_2N1(posB[3], posB[4], N));        /* index += quant_2p_2N1(posB[3], posB[4], N); */
+            break;
+        case 1:
+            tmp = vo_sub((Word16)((vo_L_mult(5, N) >> 1)), 1);        /* index = 1 << ((5*N)-1); */
+            index = L_shl(1L, tmp);
+            tmp = add1((N << 1), 1);   /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1) <<((2*N)+1);  */
+            tmp2 = L_shl(quant_3p_3N1(posB[0], posB[1], posB[2], n_1), tmp);
+            index = vo_L_add(index, tmp2);
+            index = vo_L_add(index, quant_2p_2N1(posB[3], posA[0], N));        /* index += quant_2p_2N1(posB[3], posA[0], N); */
+            break;
+        case 2:
+            tmp = vo_sub((Word16)((vo_L_mult(5, N) >> 1)), 1);        /* ((5*N)-1)) */
+            index = L_shl(1L, tmp);            /* index = 1 << ((5*N)-1); */
+            tmp = add1((N << 1), 1);           /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1) << ((2*N)+1);  */
+            tmp2 = L_shl(quant_3p_3N1(posB[0], posB[1], posB[2], n_1), tmp);
+            index = vo_L_add(index, tmp2);
+            index = vo_L_add(index, quant_2p_2N1(posA[0], posA[1], N));        /* index += quant_2p_2N1(posA[0], posA[1], N); */
+            break;
+        case 3:
+            tmp = add1((N << 1), 1);           /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << ((2*N)+1);  */
+            index = L_shl(quant_3p_3N1(posA[0], posA[1], posA[2], n_1), tmp);
+            index = vo_L_add(index, quant_2p_2N1(posB[0], posB[1], N));        /* index += quant_2p_2N1(posB[0], posB[1], N); */
+            break;
+        case 4:
+            tmp = add1((N << 1), 1);           /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << ((2*N)+1);  */
+            index = L_shl(quant_3p_3N1(posA[0], posA[1], posA[2], n_1), tmp);
+            index = vo_L_add(index, quant_2p_2N1(posA[3], posB[0], N));        /* index += quant_2p_2N1(posA[3], posB[0], N); */
+            break;
+        case 5:
+            tmp = add1((N << 1), 1);           /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << ((2*N)+1);  */
+            index = L_shl(quant_3p_3N1(posA[0], posA[1], posA[2], n_1), tmp);
+            index = vo_L_add(index, quant_2p_2N1(posA[3], posA[4], N));        /* index += quant_2p_2N1(posA[3], posA[4], N); */
+            break;
+        default:
+            index = 0;
+            fprintf(stderr, "Error in function quant_5p_5N\n");
+    }
+
+    return (index);
+}
+
+
+Word32 quant_6p_6N_2(                      /* (o) return (6*N)-2 bits         */
+        Word16 pos[],                         /* (i) position of the pulse 1..6  */
+        Word16 N)                             /* (i) number of bits for position */
+{
+    Word16 nb_pos, n_1;
+    Word16 posA[6], posB[6];
+    Word32 i, j, k, index;
+
+    /* !!  N and n_1 are constants -> it doesn't need to be operated by Basic Operators */
+    n_1 = (Word16) (N - 1);
+    nb_pos = (1 << n_1);                  /* nb_pos = (1<<n_1); */
+
+    i = 0;
+    j = 0;
+    for (k = 0; k < 6; k++)
+    {
+        if ((pos[k] & nb_pos) == 0)
+        {
+            posA[i++] = pos[k];
+        } else
+        {
+            posB[j++] = pos[k];
+        }
+    }
+
+    switch (i)
+    {
+        case 0:
+            index = (1 << (Word16) (6 * N - 5));        /* index = 1 << ((6*N)-5); */
+            index = vo_L_add(index, (quant_5p_5N(posB, n_1) << N)); /* index += quant_5p_5N(posB, n_1) << N; */
+            index = vo_L_add(index, quant_1p_N1(posB[5], n_1));        /* index += quant_1p_N1(posB[5], n_1); */
+            break;
+        case 1:
+            index = (1L << (Word16) (6 * N - 5));        /* index = 1 << ((6*N)-5); */
+            index = vo_L_add(index, (quant_5p_5N(posB, n_1) << N)); /* index += quant_5p_5N(posB, n_1) << N; */
+            index = vo_L_add(index, quant_1p_N1(posA[0], n_1));        /* index += quant_1p_N1(posA[0], n_1); */
+            break;
+        case 2:
+            index = (1L << (Word16) (6 * N - 5));        /* index = 1 << ((6*N)-5); */
+            /* index += quant_4p_4N(posB, n_1) << ((2*n_1)+1); */
+            index = vo_L_add(index, (quant_4p_4N(posB, n_1) << (Word16) (2 * n_1 + 1)));
+            index = vo_L_add(index, quant_2p_2N1(posA[0], posA[1], n_1));      /* index += quant_2p_2N1(posA[0], posA[1], n_1); */
+            break;
+        case 3:
+            index = (quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << (Word16) (3 * n_1 + 1));
+                                              /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << ((3*n_1)+1); */
+            index =vo_L_add(index, quant_3p_3N1(posB[0], posB[1], posB[2], n_1));
+                                             /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1); */
+            break;
+        case 4:
+            i = 2;
+            index = (quant_4p_4N(posA, n_1) << (Word16) (2 * n_1 + 1));  /* index = quant_4p_4N(posA, n_1) << ((2*n_1)+1); */
+            index = vo_L_add(index, quant_2p_2N1(posB[0], posB[1], n_1));      /* index += quant_2p_2N1(posB[0], posB[1], n_1); */
+            break;
+        case 5:
+            i = 1;
+            index = (quant_5p_5N(posA, n_1) << N);       /* index = quant_5p_5N(posA, n_1) << N; */
+            index = vo_L_add(index, quant_1p_N1(posB[0], n_1));        /* index += quant_1p_N1(posB[0], n_1); */
+            break;
+        case 6:
+            i = 0;
+            index = (quant_5p_5N(posA, n_1) << N);       /* index = quant_5p_5N(posA, n_1) << N; */
+            index = vo_L_add(index, quant_1p_N1(posA[5], n_1));        /* index += quant_1p_N1(posA[5], n_1); */
+            break;
+        default:
+            index = 0;
+            fprintf(stderr, "Error in function quant_6p_6N_2\n");
+    }
+    index = vo_L_add(index, ((L_deposit_l(i) & 3L) << (Word16) (6 * N - 4)));   /* index += (i & 3) << ((6*N)-4); */
+
+    return (index);
+}
+
+
diff --git a/media/libstagefright/codecs/amrwbenc/src/qisf_ns.c b/media/codecs/amrwb/enc/src/qisf_ns.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/qisf_ns.c
rename to media/codecs/amrwb/enc/src/qisf_ns.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/qpisf_2s.c b/media/codecs/amrwb/enc/src/qpisf_2s.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/qpisf_2s.c
rename to media/codecs/amrwb/enc/src/qpisf_2s.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/random.c b/media/codecs/amrwb/enc/src/random.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/random.c
rename to media/codecs/amrwb/enc/src/random.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/residu.c b/media/codecs/amrwb/enc/src/residu.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/residu.c
rename to media/codecs/amrwb/enc/src/residu.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/scale.c b/media/codecs/amrwb/enc/src/scale.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/scale.c
rename to media/codecs/amrwb/enc/src/scale.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/stream.c b/media/codecs/amrwb/enc/src/stream.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/stream.c
rename to media/codecs/amrwb/enc/src/stream.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/syn_filt.c b/media/codecs/amrwb/enc/src/syn_filt.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/syn_filt.c
rename to media/codecs/amrwb/enc/src/syn_filt.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/updt_tar.c b/media/codecs/amrwb/enc/src/updt_tar.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/updt_tar.c
rename to media/codecs/amrwb/enc/src/updt_tar.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/util.c b/media/codecs/amrwb/enc/src/util.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/util.c
rename to media/codecs/amrwb/enc/src/util.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c b/media/codecs/amrwb/enc/src/voAMRWBEnc.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
rename to media/codecs/amrwb/enc/src/voAMRWBEnc.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/voicefac.c b/media/codecs/amrwb/enc/src/voicefac.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/voicefac.c
rename to media/codecs/amrwb/enc/src/voicefac.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/wb_vad.c b/media/codecs/amrwb/enc/src/wb_vad.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/wb_vad.c
rename to media/codecs/amrwb/enc/src/wb_vad.c
diff --git a/media/libstagefright/codecs/amrwbenc/src/weight_a.c b/media/codecs/amrwb/enc/src/weight_a.c
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/src/weight_a.c
rename to media/codecs/amrwb/enc/src/weight_a.c
diff --git a/media/libstagefright/codecs/amrwbenc/test/AmrwbEncTestEnvironment.h b/media/codecs/amrwb/enc/test/AmrwbEncTestEnvironment.h
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/test/AmrwbEncTestEnvironment.h
rename to media/codecs/amrwb/enc/test/AmrwbEncTestEnvironment.h
diff --git a/media/libstagefright/codecs/amrwbenc/test/AmrwbEncoderTest.cpp b/media/codecs/amrwb/enc/test/AmrwbEncoderTest.cpp
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/test/AmrwbEncoderTest.cpp
rename to media/codecs/amrwb/enc/test/AmrwbEncoderTest.cpp
diff --git a/media/codecs/amrwb/enc/test/Android.bp b/media/codecs/amrwb/enc/test/Android.bp
new file mode 100644
index 0000000..942f6c9
--- /dev/null
+++ b/media/codecs/amrwb/enc/test/Android.bp
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_codecs_amrwb_enc_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_codecs_amrwb_enc_license",
+    ],
+}
+
+cc_test {
+    name: "AmrwbEncoderTest",
+    test_suites: ["device-tests"],
+    gtest: true,
+
+    srcs: [
+        "AmrwbEncoderTest.cpp",
+    ],
+
+    static_libs: [
+        "libstagefright_enc_common",
+        "libstagefright_amrwbenc",
+        "libaudioutils",
+        "libsndfile",
+    ],
+
+    shared_libs: [
+        "liblog",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/codecs/amrwbenc/test/AndroidTest.xml b/media/codecs/amrwb/enc/test/AndroidTest.xml
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/test/AndroidTest.xml
rename to media/codecs/amrwb/enc/test/AndroidTest.xml
diff --git a/media/libstagefright/codecs/amrwbenc/test/README.md b/media/codecs/amrwb/enc/test/README.md
similarity index 100%
rename from media/libstagefright/codecs/amrwbenc/test/README.md
rename to media/codecs/amrwb/enc/test/README.md
diff --git a/media/codecs/g711/decoder/Android.bp b/media/codecs/g711/decoder/Android.bp
new file mode 100644
index 0000000..0cd9740
--- /dev/null
+++ b/media/codecs/g711/decoder/Android.bp
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library_static {
+    name: "codecs_g711dec",
+    vendor_available: true,
+    host_supported: true,
+
+    srcs: [
+        "g711DecAlaw.cpp",
+        "g711DecMlaw.cpp",
+    ],
+
+    export_include_dirs: ["."],
+
+    cflags: ["-Werror"],
+
+    sanitize: {
+        misc_undefined: [
+            "signed-integer-overflow",
+            "unsigned-integer-overflow",
+        ],
+        cfi: true,
+    },
+
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+        "test_com.android.media.swcodec",
+    ],
+
+    min_sdk_version: "29",
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
diff --git a/media/codecs/g711/decoder/g711Dec.h b/media/codecs/g711/decoder/g711Dec.h
new file mode 100644
index 0000000..ca357a5
--- /dev/null
+++ b/media/codecs/g711/decoder/g711Dec.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef G711_DEC_H_
+#define G711_DEC_H_
+
+/**
+ * @file g711Dec.h
+ * @brief g711 Decoder API: DecodeALaw and DecodeMLaw
+ */
+
+/** Decodes input bytes of size inSize according to ALAW
+ *
+ * @param [in] out <tt>int16_t*</tt>: output buffer to be filled with decoded bytes.
+ * @param [in] in <tt>const uint8_t*</tt>: input buffer containing bytes to be decoded.
+ * @param [in] inSize <tt>size_t</tt>: size of the input buffer.
+ */
+void DecodeALaw(int16_t *out, const uint8_t *in, size_t inSize);
+
+/** Decodes input bytes of size inSize according to MLAW
+ *
+ * @param [in] out <tt>int16_t*</tt>: output buffer to be filled with decoded bytes.
+ * @param [in] in <tt>const uint8_t*</tt>: input buffer containing bytes to be decoded.
+ * @param [in] inSize <tt>size_t</tt>: size of the input buffer.
+ */
+void DecodeMLaw(int16_t *out, const uint8_t *in, size_t inSize);
+
+#endif  // G711_DECODER_H_
diff --git a/media/codecs/g711/decoder/g711DecAlaw.cpp b/media/codecs/g711/decoder/g711DecAlaw.cpp
new file mode 100644
index 0000000..e41a7b4
--- /dev/null
+++ b/media/codecs/g711/decoder/g711DecAlaw.cpp
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+void DecodeALaw(int16_t *out, const uint8_t *in, size_t inSize) {
+  if (out != nullptr && in != nullptr) {
+    while (inSize > 0) {
+      inSize--;
+      int32_t x = *in++;
+
+      int32_t ix = x ^ 0x55;
+      ix &= 0x7f;
+
+      int32_t iexp = ix >> 4;
+      int32_t mant = ix & 0x0f;
+
+      if (iexp > 0) {
+        mant += 16;
+      }
+
+      mant = (mant << 4) + 8;
+
+      if (iexp > 1) {
+        mant = mant << (iexp - 1);
+      }
+
+      *out++ = (x > 127) ? mant : -mant;
+    }
+  }
+}
diff --git a/media/codecs/g711/decoder/g711DecMlaw.cpp b/media/codecs/g711/decoder/g711DecMlaw.cpp
new file mode 100644
index 0000000..bb2caea
--- /dev/null
+++ b/media/codecs/g711/decoder/g711DecMlaw.cpp
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+void DecodeMLaw(int16_t *out, const uint8_t *in, size_t inSize) {
+  if (out != nullptr && in != nullptr) {
+    while (inSize > 0) {
+      inSize--;
+      int32_t x = *in++;
+
+      int32_t mantissa = ~x;
+      int32_t exponent = (mantissa >> 4) & 7;
+      int32_t segment = exponent + 1;
+      mantissa &= 0x0f;
+
+      int32_t step = 4 << segment;
+
+      int32_t abs = (0x80l << exponent) + step * mantissa + step / 2 - 4 * 33;
+
+      *out++ = (x < 0x80) ? -abs : abs;
+    }
+  }
+}
diff --git a/media/codecs/g711/fuzzer/Android.bp b/media/codecs/g711/fuzzer/Android.bp
new file mode 100644
index 0000000..376cce7
--- /dev/null
+++ b/media/codecs/g711/fuzzer/Android.bp
@@ -0,0 +1,65 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_fuzz {
+    name: "g711alaw_dec_fuzzer",
+    host_supported: true,
+    srcs: [
+        "g711_dec_fuzzer.cpp",
+    ],
+    static_libs: [
+        "codecs_g711dec",
+    ],
+    cflags: [
+        "-DALAW",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_fuzz {
+    name: "g711mlaw_dec_fuzzer",
+    host_supported: true,
+    srcs: [
+        "g711_dec_fuzzer.cpp",
+    ],
+    static_libs: [
+        "codecs_g711dec",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/media/codecs/g711/fuzzer/README.md b/media/codecs/g711/fuzzer/README.md
new file mode 100644
index 0000000..0c1c36b
--- /dev/null
+++ b/media/codecs/g711/fuzzer/README.md
@@ -0,0 +1,49 @@
+# Fuzzer for libstagefright_g711dec decoder
+
+## Plugin Design Considerations
+The fuzzer plugin for G711 is designed based on the understanding of the
+codec and tries to achieve the following:
+
+##### Maximize code coverage
+G711 supports two types of decoding:
+1. DecodeALaw
+2. DecodeMLaw
+
+These two decoder API's are fuzzed separately using g711alaw_dec_fuzzer and
+g711mlaw_dec_fuzzer respectively.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the codec as expected by decoder API.
+
+## Build
+
+This describes steps to build g711alaw_dec_fuzzer and g711mlaw_dec_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) g711alaw_dec_fuzzer
+  $ mm -j$(nproc) g711mlaw_dec_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some g711 files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/g711alaw_dec_fuzzer/g711alaw_dec_fuzzer CORPUS_DIR
+  $ adb shell /data/fuzz/arm64/g711mlaw_dec_fuzzer/g711mlaw_dec_fuzzer CORPUS_DIR
+```
+To run on host
+```
+  $ $ANDROID_HOST_OUT/fuzz/x86_64/g711alaw_dec_fuzzer/g711alaw_dec_fuzzer CORPUS_DIR
+  $ $ANDROID_HOST_OUT/fuzz/x86_64/g711mlaw_dec_fuzzer/g711mlaw_dec_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/codecs/g711/fuzzer/g711_dec_fuzzer.cpp b/media/codecs/g711/fuzzer/g711_dec_fuzzer.cpp
new file mode 100644
index 0000000..adfbcf5
--- /dev/null
+++ b/media/codecs/g711/fuzzer/g711_dec_fuzzer.cpp
@@ -0,0 +1,58 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include <stdint.h>
+#include <stdlib.h>
+#include <string.h>
+#include "g711Dec.h"
+
+class Codec {
+ public:
+  Codec() = default;
+  ~Codec() = default;
+  void decodeFrames(const uint8_t *data, size_t size);
+};
+
+void Codec::decodeFrames(const uint8_t *data, size_t size) {
+  size_t outputBufferSize = sizeof(int16_t) * size;
+  int16_t *out = new int16_t[outputBufferSize];
+  if (!out) {
+    return;
+  }
+#ifdef ALAW
+  DecodeALaw(out, data, size);
+#else
+  DecodeMLaw(out, data, size);
+#endif
+  delete[] out;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  if (size < 1) {
+    return 0;
+  }
+  Codec *codec = new Codec();
+  if (!codec) {
+    return 0;
+  }
+  codec->decodeFrames(data, size);
+  delete codec;
+  return 0;
+}
diff --git a/media/codecs/m4v_h263/TEST_MAPPING b/media/codecs/m4v_h263/TEST_MAPPING
new file mode 100644
index 0000000..ba3ff1c
--- /dev/null
+++ b/media/codecs/m4v_h263/TEST_MAPPING
@@ -0,0 +1,18 @@
+// mappings for frameworks/av/media/libstagefright/codecs/m4v_h263
+{
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+
+    // the decoder reports something bad about an unexpected newline in the *config file
+    // and the config file looks like the AndroidTest.xml file that we put in there.
+    // I don't get this from the Encoder -- and I don't see any substantive difference
+    // between decode and encode AndroidTest.xml files -- except that encode does NOT
+    // finish with a newline.
+    // strange.
+    { "name": "Mpeg4H263DecoderTest"},
+    { "name": "Mpeg4H263EncoderTest"}
+
+  ]
+}
diff --git a/media/codecs/m4v_h263/dec/Android.bp b/media/codecs/m4v_h263/dec/Android.bp
new file mode 100644
index 0000000..038d0e1
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/Android.bp
@@ -0,0 +1,76 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_codecs_m4v_h263_dec_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codecs_m4v_h263_dec_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library_static {
+    name: "libstagefright_m4vh263dec",
+    vendor_available: true,
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+    min_sdk_version: "29",
+    host_supported: true,
+    shared_libs: ["liblog"],
+
+    srcs: [
+        "src/bitstream.cpp",
+        "src/block_idct.cpp",
+        "src/cal_dc_scaler.cpp",
+        "src/combined_decode.cpp",
+        "src/conceal.cpp",
+        "src/datapart_decode.cpp",
+        "src/dcac_prediction.cpp",
+        "src/dec_pred_intra_dc.cpp",
+        "src/get_pred_adv_b_add.cpp",
+        "src/get_pred_outside.cpp",
+        "src/idct.cpp",
+        "src/idct_vca.cpp",
+        "src/mb_motion_comp.cpp",
+        "src/mb_utils.cpp",
+        "src/packet_util.cpp",
+        "src/post_filter.cpp",
+        "src/pvdec_api.cpp",
+        "src/scaling_tab.cpp",
+        "src/vlc_decode.cpp",
+        "src/vlc_dequant.cpp",
+        "src/vlc_tab.cpp",
+        "src/vop.cpp",
+        "src/zigzag_tab.cpp",
+    ],
+
+    local_include_dirs: ["src"],
+    export_include_dirs: ["include"],
+
+    cflags: [
+        "-Werror",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
diff --git a/media/libstagefright/codecs/m4v_h263/dec/MODULE_LICENSE_APACHE2 b/media/codecs/m4v_h263/dec/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/MODULE_LICENSE_APACHE2
rename to media/codecs/m4v_h263/dec/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/m4v_h263/dec/NOTICE b/media/codecs/m4v_h263/dec/NOTICE
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/NOTICE
rename to media/codecs/m4v_h263/dec/NOTICE
diff --git a/media/libstagefright/codecs/m4v_h263/dec/include/m4vh263_decoder_pv_types.h b/media/codecs/m4v_h263/dec/include/m4vh263_decoder_pv_types.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/include/m4vh263_decoder_pv_types.h
rename to media/codecs/m4v_h263/dec/include/m4vh263_decoder_pv_types.h
diff --git a/media/codecs/m4v_h263/dec/include/mp4dec_api.h b/media/codecs/m4v_h263/dec/include/mp4dec_api.h
new file mode 100644
index 0000000..06aee07
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/include/mp4dec_api.h
@@ -0,0 +1,181 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+#ifndef _MP4DEC_API_H_
+#define _MP4DEC_API_H_
+
+#include "m4vh263_decoder_pv_types.h"
+
+// #define PV_TOLERATE_VOL_ERRORS
+#define PV_MEMORY_POOL
+
+#ifndef _PV_TYPES_
+#define _PV_TYPES_
+
+typedef uint Bool;
+
+#define PV_CODEC_INIT  0
+#define PV_CODEC_STOP  1
+#endif
+
+#define PV_TRUE  1
+#define PV_FALSE 0
+
+#ifndef OSCL_IMPORT_REF
+#define OSCL_IMPORT_REF /* empty */
+#endif
+#ifndef OSCL_EXPORT_REF
+#define OSCL_EXPORT_REF /* empty */
+#endif
+
+
+#define PV_NO_POST_PROC 0
+#define PV_DEBLOCK 1
+#define PV_DERING  2
+
+
+
+#include "visual_header.h" // struct VolInfo is defined
+
+
+/**@name Structure and Data Types
+ * These type definitions specify the input / output from the PVMessage
+ * library.
+ */
+
+/*@{*/
+/* The application has to allocate space for this structure */
+typedef struct tagOutputFrame
+{
+    uint8       *data;          /* pointer to output YUV buffer */
+    uint32      timeStamp;      /* time stamp */
+} OutputFrame;
+
+typedef struct tagApplicationData
+{
+    int layer;          /* current video layer */
+    void *object;       /* some optional data field */
+} applicationData;
+
+/* Application controls, this structed shall be allocated */
+/*    and initialized in the application.                 */
+typedef struct tagvideoDecControls
+{
+    /* The following fucntion pointer is copied to BitstreamDecVideo structure  */
+    /*    upon initialization and never used again. */
+    int (*readBitstreamData)(uint8 *buf, int nbytes_required, void *appData);
+    applicationData appData;
+
+    uint8 *outputFrame;
+    void *videoDecoderData;     /* this is an internal pointer that is only used */
+    /* in the decoder library.   */
+#ifdef PV_MEMORY_POOL
+    int32 size;
+#endif
+    int nLayers;
+    /* pointers to VOL data for frame-based decoding. */
+    uint8 *volbuf[2];           /* maximum of 2 layers for now */
+    int32 volbuf_size[2];
+
+} VideoDecControls;
+
+typedef enum
+{
+    H263_MODE = 0, MPEG4_MODE, UNKNOWN_MODE
+} MP4DecodingMode;
+
+typedef enum
+{
+    MP4_I_FRAME, MP4_P_FRAME, MP4_B_FRAME, MP4_BAD_FRAME
+} MP4FrameType;
+
+typedef struct tagVopHeaderInfo
+{
+    int     currLayer;
+    uint32  timestamp;
+    MP4FrameType    frameType;
+    int     refSelCode;
+    int16       quantizer;
+} VopHeaderInfo;
+
+/*--------------------------------------------------------------------------*
+ * VideoRefCopyInfo:
+ * OMAP DSP specific typedef structure, to support the user (ARM) copying
+ * of a Reference Frame into the Video Decoder.
+ *--------------------------------------------------------------------------*/
+typedef struct tagVideoRefCopyInfoPtr
+{
+    uint8   *yChan;             /* The Y component frame the user can copy a new reference to */
+    uint8   *uChan;             /* The U component frame the user can copy a new reference to */
+    uint8   *vChan;             /* The V component frame the user can copy a new reference to */
+    uint8   *currentVop;        /* The Vop for video the user can copy a new reference to */
+} VideoRefCopyInfoPtr;
+
+typedef struct tagVideoRefCopyInfoData
+{
+    int16   width;              /* Width */
+    int16   height;             /* Height */
+    int16   realWidth;          /* Non-padded width, not a multiple of 16. */
+    int16   realHeight;         /* Non-padded height, not a multiple of 16. */
+} VideoRefCopyInfoData;
+
+typedef struct tagVideoRefCopyInfo
+{
+    VideoRefCopyInfoData data;
+    VideoRefCopyInfoPtr ptrs;
+} VideoRefCopyInfo;
+
+/*@}*/
+
+#ifdef __cplusplus
+extern "C"
+{
+#endif
+
+
+    OSCL_IMPORT_REF Bool    PVInitVideoDecoder(VideoDecControls *decCtrl, uint8 *volbuf[], int32 *volbuf_size, int nLayers, int width, int height, MP4DecodingMode mode);
+    Bool    PVAllocVideoData(VideoDecControls *decCtrl, int width, int height, int nLayers);
+    OSCL_IMPORT_REF Bool    PVCleanUpVideoDecoder(VideoDecControls *decCtrl);
+    Bool    PVResetVideoDecoder(VideoDecControls *decCtrl);
+    OSCL_IMPORT_REF void    PVSetReferenceYUV(VideoDecControls *decCtrl, uint8 *refYUV);
+    Bool    PVDecSetReference(VideoDecControls *decCtrl, uint8 *refYUV, uint32 timestamp);
+    Bool    PVDecSetEnhReference(VideoDecControls *decCtrl, uint8 *refYUV, uint32 timestamp);
+    OSCL_IMPORT_REF Bool    PVDecodeVideoFrame(VideoDecControls *decCtrl, uint8 *bitstream[], uint32 *timestamp, int32 *buffer_size, uint use_ext_timestamp[], uint8* currYUV);
+    Bool    PVDecodeVopHeader(VideoDecControls *decCtrl, uint8 *buffer[], uint32 timestamp[], int32 buffer_size[], VopHeaderInfo *header_info, uint use_ext_timestamp[], uint8 *currYUV);
+    Bool    PVDecodeVopBody(VideoDecControls *decCtrl, int32 buffer_size[]);
+    void    PVDecPostProcess(VideoDecControls *decCtrl, uint8 *outputYUV);
+    OSCL_IMPORT_REF void    PVGetVideoDimensions(VideoDecControls *decCtrl, int32 *display_width, int32 *display_height);
+    OSCL_IMPORT_REF void    PVGetBufferDimensions(VideoDecControls *decCtrl, int32 *buf_width, int32 *buf_height);
+    OSCL_IMPORT_REF void    PVSetPostProcType(VideoDecControls *decCtrl, int mode);
+    uint32  PVGetVideoTimeStamp(VideoDecControls *decoderControl);
+    int     PVGetDecBitrate(VideoDecControls *decCtrl);
+    int     PVGetDecFramerate(VideoDecControls *decCtrl);
+    uint8   *PVGetDecOutputFrame(VideoDecControls *decCtrl);
+    int     PVGetLayerID(VideoDecControls *decCtrl);
+    int32   PVGetDecMemoryUsage(VideoDecControls *decCtrl);
+    OSCL_IMPORT_REF MP4DecodingMode PVGetDecBitstreamMode(VideoDecControls *decCtrl);
+    Bool    PVExtractVolHeader(uint8 *video_buffer, uint8 *vol_header, int32 *vol_header_size);
+    int32   PVLocateFrameHeader(uint8 *video_buffer, int32 vop_size);
+    int32   PVLocateH263FrameHeader(uint8 *video_buffer, int32 vop_size);
+    Bool    PVGetVolInfo(VideoDecControls *decCtrl, VolInfo *pVolInfo); // BX 6/24/04
+    Bool    IsIntraFrame(VideoDecControls *decoderControl);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* _MP4DEC_API_H_ */
+
diff --git a/media/libstagefright/codecs/m4v_h263/dec/include/visual_header.h b/media/codecs/m4v_h263/dec/include/visual_header.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/include/visual_header.h
rename to media/codecs/m4v_h263/dec/include/visual_header.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/bitstream.cpp b/media/codecs/m4v_h263/dec/src/bitstream.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/bitstream.cpp
rename to media/codecs/m4v_h263/dec/src/bitstream.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/bitstream.h b/media/codecs/m4v_h263/dec/src/bitstream.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/bitstream.h
rename to media/codecs/m4v_h263/dec/src/bitstream.h
diff --git a/media/codecs/m4v_h263/dec/src/block_idct.cpp b/media/codecs/m4v_h263/dec/src/block_idct.cpp
new file mode 100644
index 0000000..bc708e2
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/src/block_idct.cpp
@@ -0,0 +1,914 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+/*
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+ Inputs:
+    [input_variable_name] = [description of the input to module, its type
+                 definition, and length (when applicable)]
+
+ Local Stores/Buffers/Pointers Needed:
+    [local_store_name] = [description of the local store, its type
+                  definition, and length (when applicable)]
+    [local_buffer_name] = [description of the local buffer, its type
+                   definition, and length (when applicable)]
+    [local_ptr_name] = [description of the local pointer, its type
+                definition, and length (when applicable)]
+
+ Global Stores/Buffers/Pointers Needed:
+    [global_store_name] = [description of the global store, its type
+                   definition, and length (when applicable)]
+    [global_buffer_name] = [description of the global buffer, its type
+                definition, and length (when applicable)]
+    [global_ptr_name] = [description of the global pointer, its type
+                 definition, and length (when applicable)]
+
+ Outputs:
+    [return_variable_name] = [description of data/pointer returned
+                  by module, its type definition, and length
+                  (when applicable)]
+
+ Pointers and Buffers Modified:
+    [variable_bfr_ptr] points to the [describe where the
+      variable_bfr_ptr points to, its type definition, and length
+      (when applicable)]
+    [variable_bfr] contents are [describe the new contents of
+      variable_bfr]
+
+ Local Stores Modified:
+    [local_store_name] = [describe new contents, its type
+                  definition, and length (when applicable)]
+
+ Global Stores Modified:
+    [global_store_name] = [describe new contents, its type
+                   definition, and length (when applicable)]
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+------------------------------------------------------------------------------
+ RESOURCES USED
+   When the code is written for a specific target processor the
+     the resources used should be documented below.
+
+ STACK USAGE: [stack count for this module] + [variable to represent
+          stack usage for each subroutine called]
+
+     where: [stack usage variable] = stack usage for [subroutine
+         name] (see [filename].ext)
+
+ DATA MEMORY USED: x words
+
+ PROGRAM MEMORY USED: x words
+
+ CLOCK CYCLES: [cycle count equation for this module] + [variable
+           used to represent cycle count for each subroutine
+           called]
+
+     where: [cycle count variable] = cycle count for [subroutine
+        name] (see [filename].ext)
+
+------------------------------------------------------------------------------
+*/
+
+/*----------------------------------------------------------------------------
+; INCLUDES
+----------------------------------------------------------------------------*/
+#include "mp4dec_lib.h"
+#include "idct.h"
+#include "motion_comp.h"
+
+#define OSCL_DISABLE_WARNING_CONV_POSSIBLE_LOSS_OF_DATA
+/*----------------------------------------------------------------------------
+; MACROS
+; Define module specific macros here
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; DEFINES
+; Include all pre-processor statements here. Include conditional
+; compile variables also.
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; LOCAL FUNCTION DEFINITIONS
+; Function Prototype declaration
+----------------------------------------------------------------------------*/
+/* private prototypes */
+static void idctrow(int16 *blk, uint8 *pred, uint8 *dst, int width);
+static void idctrow_intra(int16 *blk, PIXEL *, int width);
+static void idctcol(int16 *blk);
+
+#ifdef FAST_IDCT
+// mapping from nz_coefs to functions to be used
+
+
+// ARM4 does not allow global data when they are not constant hence
+// an array of function pointers cannot be considered as array of constants
+// (actual addresses are only known when the dll is loaded).
+// So instead of arrays of function pointers, we'll store here
+// arrays of rows or columns and then call the idct function
+// corresponding to such the row/column number:
+
+
+static void (*const idctcolVCA[10][4])(int16*) =
+{
+    {&idctcol1, &idctcol0, &idctcol0, &idctcol0},
+    {&idctcol1, &idctcol1, &idctcol0, &idctcol0},
+    {&idctcol2, &idctcol1, &idctcol0, &idctcol0},
+    {&idctcol3, &idctcol1, &idctcol0, &idctcol0},
+    {&idctcol3, &idctcol2, &idctcol0, &idctcol0},
+    {&idctcol3, &idctcol2, &idctcol1, &idctcol0},
+    {&idctcol3, &idctcol2, &idctcol1, &idctcol1},
+    {&idctcol3, &idctcol2, &idctcol2, &idctcol1},
+    {&idctcol3, &idctcol3, &idctcol2, &idctcol1},
+    {&idctcol4, &idctcol3, &idctcol2, &idctcol1}
+};
+
+
+static void (*const idctrowVCA[10])(int16*, uint8*, uint8*, int) =
+{
+    &idctrow1,
+    &idctrow2,
+    &idctrow2,
+    &idctrow2,
+    &idctrow2,
+    &idctrow3,
+    &idctrow4,
+    &idctrow4,
+    &idctrow4,
+    &idctrow4
+};
+
+
+static void (*const idctcolVCA2[16])(int16*) =
+{
+    &idctcol0, &idctcol4, &idctcol3, &idctcol4,
+    &idctcol2, &idctcol4, &idctcol3, &idctcol4,
+    &idctcol1, &idctcol4, &idctcol3, &idctcol4,
+    &idctcol2, &idctcol4, &idctcol3, &idctcol4
+};
+
+static void (*const idctrowVCA2[8])(int16*, uint8*, uint8*, int) =
+{
+    &idctrow1, &idctrow4, &idctrow3, &idctrow4,
+    &idctrow2, &idctrow4, &idctrow3, &idctrow4
+};
+
+static void (*const idctrowVCA_intra[10])(int16*, PIXEL *, int) =
+{
+    &idctrow1_intra,
+    &idctrow2_intra,
+    &idctrow2_intra,
+    &idctrow2_intra,
+    &idctrow2_intra,
+    &idctrow3_intra,
+    &idctrow4_intra,
+    &idctrow4_intra,
+    &idctrow4_intra,
+    &idctrow4_intra
+};
+
+static void (*const idctrowVCA2_intra[8])(int16*, PIXEL *, int) =
+{
+    &idctrow1_intra, &idctrow4_intra, &idctrow3_intra, &idctrow4_intra,
+    &idctrow2_intra, &idctrow4_intra, &idctrow3_intra, &idctrow4_intra
+};
+#endif
+
+/*----------------------------------------------------------------------------
+; LOCAL STORE/BUFFER/POINTER DEFINITIONS
+; Variable declaration - defined here and used outside this module
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL FUNCTION REFERENCES
+; Declare functions defined elsewhere and referenced in this module
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
+; Declare variables used in this module but defined elsewhere
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+void MBlockIDCT(VideoDecData *video)
+{
+    Vop *currVop = video->currVop;
+    MacroBlock *mblock = video->mblock;
+    PIXEL *c_comp;
+    PIXEL *cu_comp;
+    PIXEL *cv_comp;
+    int x_pos = video->mbnum_col;
+    int y_pos = video->mbnum_row;
+    int width, width_uv;
+    int32 offset;
+    width = video->width;
+    width_uv = width >> 1;
+    offset = (int32)(y_pos << 4) * width + (x_pos << 4);
+
+    c_comp  = currVop->yChan + offset;
+    cu_comp = currVop->uChan + (offset >> 2) + (x_pos << 2);
+    cv_comp = currVop->vChan + (offset >> 2) + (x_pos << 2);
+
+    BlockIDCT_intra(mblock, c_comp, 0, width);
+    BlockIDCT_intra(mblock, c_comp + 8, 1, width);
+    BlockIDCT_intra(mblock, c_comp + (width << 3), 2, width);
+    BlockIDCT_intra(mblock, c_comp + (width << 3) + 8, 3, width);
+    BlockIDCT_intra(mblock, cu_comp, 4, width_uv);
+    BlockIDCT_intra(mblock, cv_comp, 5, width_uv);
+}
+
+
+void BlockIDCT_intra(
+    MacroBlock *mblock, PIXEL *c_comp, int comp, int width)
+{
+    /*----------------------------------------------------------------------------
+    ; Define all local variables
+    ----------------------------------------------------------------------------*/
+    int16 *coeff_in = mblock->block[comp];
+#ifdef INTEGER_IDCT
+#ifdef FAST_IDCT  /* VCA IDCT using nzcoefs and bitmaps*/
+    int i, bmapr;
+    int nz_coefs = mblock->no_coeff[comp];
+    uint8 *bitmapcol = mblock->bitmapcol[comp];
+    uint8 bitmaprow = mblock->bitmaprow[comp];
+
+    /*----------------------------------------------------------------------------
+    ; Function body here
+    ----------------------------------------------------------------------------*/
+    if (nz_coefs <= 10)
+    {
+        bmapr = (nz_coefs - 1);
+
+        (*(idctcolVCA[bmapr]))(coeff_in);
+        (*(idctcolVCA[bmapr][1]))(coeff_in + 1);
+        (*(idctcolVCA[bmapr][2]))(coeff_in + 2);
+        (*(idctcolVCA[bmapr][3]))(coeff_in + 3);
+
+        (*idctrowVCA_intra[nz_coefs-1])(coeff_in, c_comp, width);
+    }
+    else
+    {
+        i = 8;
+        while (i--)
+        {
+            bmapr = (int)bitmapcol[i];
+            if (bmapr)
+            {
+                if ((bmapr&0xf) == 0)         /*  07/18/01 */
+                {
+                    (*(idctcolVCA2[bmapr>>4]))(coeff_in + i);
+                }
+                else
+                {
+                    idctcol(coeff_in + i);
+                }
+            }
+        }
+        if ((bitmapcol[4] | bitmapcol[5] | bitmapcol[6] | bitmapcol[7]) == 0)
+        {
+            bitmaprow >>= 4;
+            (*(idctrowVCA2_intra[(int)bitmaprow]))(coeff_in, c_comp, width);
+        }
+        else
+        {
+            idctrow_intra(coeff_in, c_comp, width);
+        }
+    }
+#else
+    void idct_intra(int *block, uint8 *comp, int width);
+    idct_intra(coeff_in, c_comp, width);
+#endif
+#else
+    void idctref_intra(int *block, uint8 *comp, int width);
+    idctref_intra(coeff_in, c_comp, width);
+#endif
+
+
+    /*----------------------------------------------------------------------------
+    ; Return nothing or data or data pointer
+    ----------------------------------------------------------------------------*/
+    return;
+}
+
+/*  08/04/05, no residue, just copy from pred to output */
+void Copy_Blk_to_Vop(uint8 *dst, uint8 *pred, int width)
+{
+    /* copy 4 bytes at a time */
+    width -= 4;
+    *((uint32*)dst) = *((uint32*)pred);
+    *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
+    *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
+    *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
+    *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
+    *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
+    *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
+    *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
+    *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
+    *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
+    *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
+    *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
+    *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
+    *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
+    *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
+    *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
+
+    return ;
+}
+
+/*  08/04/05 compute IDCT and add prediction at the end  */
+void BlockIDCT(
+    uint8 *dst,  /* destination */
+    uint8 *pred, /* prediction block, pitch 16 */
+    int16   *coeff_in,  /* DCT data, size 64 */
+    int width, /* width of dst */
+    int nz_coefs,
+    uint8 *bitmapcol,
+    uint8 bitmaprow
+)
+{
+#ifdef INTEGER_IDCT
+#ifdef FAST_IDCT  /* VCA IDCT using nzcoefs and bitmaps*/
+    int i, bmapr;
+    /*----------------------------------------------------------------------------
+    ; Function body here
+    ----------------------------------------------------------------------------*/
+    if (nz_coefs <= 10)
+    {
+        bmapr = (nz_coefs - 1);
+        (*(idctcolVCA[bmapr]))(coeff_in);
+        (*(idctcolVCA[bmapr][1]))(coeff_in + 1);
+        (*(idctcolVCA[bmapr][2]))(coeff_in + 2);
+        (*(idctcolVCA[bmapr][3]))(coeff_in + 3);
+
+        (*idctrowVCA[nz_coefs-1])(coeff_in, pred, dst, width);
+        return ;
+    }
+    else
+    {
+        i = 8;
+
+        while (i--)
+        {
+            bmapr = (int)bitmapcol[i];
+            if (bmapr)
+            {
+                if ((bmapr&0xf) == 0)         /*  07/18/01 */
+                {
+                    (*(idctcolVCA2[bmapr>>4]))(coeff_in + i);
+                }
+                else
+                {
+                    idctcol(coeff_in + i);
+                }
+            }
+        }
+        if ((bitmapcol[4] | bitmapcol[5] | bitmapcol[6] | bitmapcol[7]) == 0)
+        {
+            (*(idctrowVCA2[bitmaprow>>4]))(coeff_in, pred, dst, width);
+        }
+        else
+        {
+            idctrow(coeff_in, pred, dst, width);
+        }
+        return ;
+    }
+#else // FAST_IDCT
+    void idct(int *block, uint8 *pred, uint8 *dst, int width);
+    idct(coeff_in, pred, dst, width);
+    return;
+#endif // FAST_IDCT
+#else // INTEGER_IDCT
+    void idctref(int *block, uint8 *pred, uint8 *dst, int width);
+    idctref(coeff_in, pred, dst, width);
+    return;
+#endif // INTEGER_IDCT
+
+}
+/*----------------------------------------------------------------------------
+;  End Function: block_idct
+----------------------------------------------------------------------------*/
+
+
+/****************************************************************************/
+
+/*
+------------------------------------------------------------------------------
+ FUNCTION NAME: idctrow
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS FOR idctrow
+
+ Inputs:
+    [input_variable_name] = [description of the input to module, its type
+                 definition, and length (when applicable)]
+
+ Local Stores/Buffers/Pointers Needed:
+    [local_store_name] = [description of the local store, its type
+                  definition, and length (when applicable)]
+    [local_buffer_name] = [description of the local buffer, its type
+                   definition, and length (when applicable)]
+    [local_ptr_name] = [description of the local pointer, its type
+                definition, and length (when applicable)]
+
+ Global Stores/Buffers/Pointers Needed:
+    [global_store_name] = [description of the global store, its type
+                   definition, and length (when applicable)]
+    [global_buffer_name] = [description of the global buffer, its type
+                definition, and length (when applicable)]
+    [global_ptr_name] = [description of the global pointer, its type
+                 definition, and length (when applicable)]
+
+ Outputs:
+    [return_variable_name] = [description of data/pointer returned
+                  by module, its type definition, and length
+                  (when applicable)]
+
+ Pointers and Buffers Modified:
+    [variable_bfr_ptr] points to the [describe where the
+      variable_bfr_ptr points to, its type definition, and length
+      (when applicable)]
+    [variable_bfr] contents are [describe the new contents of
+      variable_bfr]
+
+ Local Stores Modified:
+    [local_store_name] = [describe new contents, its type
+                  definition, and length (when applicable)]
+
+ Global Stores Modified:
+    [global_store_name] = [describe new contents, its type
+                   definition, and length (when applicable)]
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION FOR idctrow
+
+------------------------------------------------------------------------------
+ REQUIREMENTS FOR idctrow
+
+------------------------------------------------------------------------------
+ REFERENCES FOR idctrow
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE FOR idctrow
+
+------------------------------------------------------------------------------
+ RESOURCES USED FOR idctrow
+   When the code is written for a specific target processor the
+     the resources used should be documented below.
+
+ STACK USAGE: [stack count for this module] + [variable to represent
+          stack usage for each subroutine called]
+
+     where: [stack usage variable] = stack usage for [subroutine
+         name] (see [filename].ext)
+
+ DATA MEMORY USED: x words
+
+ PROGRAM MEMORY USED: x words
+
+ CLOCK CYCLES: [cycle count equation for this module] + [variable
+           used to represent cycle count for each subroutine
+           called]
+
+     where: [cycle count variable] = cycle count for [subroutine
+        name] (see [filename].ext)
+
+------------------------------------------------------------------------------
+*/
+
+/*----------------------------------------------------------------------------
+; Function Code FOR idctrow
+----------------------------------------------------------------------------*/
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctrow(
+    int16 *blk, uint8 *pred, uint8 *dst, int width
+)
+{
+    /*----------------------------------------------------------------------------
+    ; Define all local variables
+    ----------------------------------------------------------------------------*/
+    int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
+    int i = 8;
+    uint32 pred_word, dst_word;
+    int res, res2;
+
+    /*----------------------------------------------------------------------------
+    ; Function body here
+    ----------------------------------------------------------------------------*/
+    /* row (horizontal) IDCT
+    *
+    * 7                       pi         1 dst[k] = sum c[l] * src[l] * cos( -- *
+    * ( k + - ) * l ) l=0                      8          2
+    *
+    * where: c[0]    = 128 c[1..7] = 128*sqrt(2) */
+
+    /* preset the offset, such that we can take advantage pre-offset addressing mode   */
+    width -= 4;
+    dst -= width;
+    pred -= 12;
+    blk -= 8;
+
+    while (i--)
+    {
+        x1 = (int32)blk[12] << 8;
+        blk[12] = 0;
+        x2 = blk[14];
+        blk[14] = 0;
+        x3 = blk[10];
+        blk[10] = 0;
+        x4 = blk[9];
+        blk[9] = 0;
+        x5 = blk[15];
+        blk[15] = 0;
+        x6 = blk[13];
+        blk[13] = 0;
+        x7 = blk[11];
+        blk[11] = 0;
+        x0 = ((*(blk += 8)) << 8) + 8192;
+        blk[0] = 0;   /* for proper rounding in the fourth stage */
+
+        /* first stage */
+        x8 = W7 * (x4 + x5) + 4;
+        x4 = (x8 + (W1 - W7) * x4) >> 3;
+        x5 = (x8 - (W1 + W7) * x5) >> 3;
+        x8 = W3 * (x6 + x7) + 4;
+        x6 = (x8 - (W3 - W5) * x6) >> 3;
+        x7 = (x8 - (W3 + W5) * x7) >> 3;
+
+        /* second stage */
+        x8 = x0 + x1;
+        x0 -= x1;
+        x1 = W6 * (x3 + x2) + 4;
+        x2 = (x1 - (W2 + W6) * x2) >> 3;
+        x3 = (x1 + (W2 - W6) * x3) >> 3;
+        x1 = x4 + x6;
+        x4 -= x6;
+        x6 = x5 + x7;
+        x5 -= x7;
+
+        /* third stage */
+        x7 = x8 + x3;
+        x8 -= x3;
+        x3 = x0 + x2;
+        x0 -= x2;
+        x2 = (181 * (x4 + x5) + 128) >> 8;
+        x4 = (181 * (x4 - x5) + 128) >> 8;
+
+        /* fourth stage */
+        pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
+
+        res = (x7 + x1) >> 14;
+        ADD_AND_CLIP1(res);
+        res2 = (x3 + x2) >> 14;
+        ADD_AND_CLIP2(res2);
+        dst_word = (res2 << 8) | res;
+        res = (x0 + x4) >> 14;
+        ADD_AND_CLIP3(res);
+        dst_word |= (res << 16);
+        res = (x8 + x6) >> 14;
+        ADD_AND_CLIP4(res);
+        dst_word |= (res << 24);
+        *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
+
+        pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
+
+        res = (x8 - x6) >> 14;
+        ADD_AND_CLIP1(res);
+        res2 = (x0 - x4) >> 14;
+        ADD_AND_CLIP2(res2);
+        dst_word = (res2 << 8) | res;
+        res = (x3 - x2) >> 14;
+        ADD_AND_CLIP3(res);
+        dst_word |= (res << 16);
+        res = (x7 - x1) >> 14;
+        ADD_AND_CLIP4(res);
+        dst_word |= (res << 24);
+        *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
+    }
+    /*----------------------------------------------------------------------------
+    ; Return nothing or data or data pointer
+    ----------------------------------------------------------------------------*/
+    return;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctrow_intra(
+    int16 *blk, PIXEL *comp, int width
+)
+{
+    /*----------------------------------------------------------------------------
+    ; Define all local variables
+    ----------------------------------------------------------------------------*/
+    int32 x0, x1, x2, x3, x4, x5, x6, x7, x8, temp;
+    int i = 8;
+    int offset = width;
+    int32 word;
+
+    /*----------------------------------------------------------------------------
+    ; Function body here
+    ----------------------------------------------------------------------------*/
+    /* row (horizontal) IDCT
+    *
+    * 7                       pi         1 dst[k] = sum c[l] * src[l] * cos( -- *
+    * ( k + - ) * l ) l=0                      8          2
+    *
+    * where: c[0]    = 128 c[1..7] = 128*sqrt(2) */
+    while (i--)
+    {
+        x1 = (int32)blk[4] << 8;
+        blk[4] = 0;
+        x2 = blk[6];
+        blk[6] = 0;
+        x3 = blk[2];
+        blk[2] = 0;
+        x4 = blk[1];
+        blk[1] = 0;
+        x5 = blk[7];
+        blk[7] = 0;
+        x6 = blk[5];
+        blk[5] = 0;
+        x7 = blk[3];
+        blk[3] = 0;
+#ifndef FAST_IDCT
+        /* shortcut */  /* covered by idctrow1  01/9/2001 */
+        if (!(x1 | x2 | x3 | x4 | x5 | x6 | x7))
+        {
+            blk[0] = blk[1] = blk[2] = blk[3] = blk[4] = blk[5] = blk[6] = blk[7] = (blk[0] + 32) >> 6;
+            return;
+        }
+#endif
+        x0 = ((int32)blk[0] << 8) + 8192;
+        blk[0] = 0;  /* for proper rounding in the fourth stage */
+
+        /* first stage */
+        x8 = W7 * (x4 + x5) + 4;
+        x4 = (x8 + (W1 - W7) * x4) >> 3;
+        x5 = (x8 - (W1 + W7) * x5) >> 3;
+        x8 = W3 * (x6 + x7) + 4;
+        x6 = (x8 - (W3 - W5) * x6) >> 3;
+        x7 = (x8 - (W3 + W5) * x7) >> 3;
+
+        /* second stage */
+        x8 = x0 + x1;
+        x0 -= x1;
+        x1 = W6 * (x3 + x2) + 4;
+        x2 = (x1 - (W2 + W6) * x2) >> 3;
+        x3 = (x1 + (W2 - W6) * x3) >> 3;
+        x1 = x4 + x6;
+        x4 -= x6;
+        x6 = x5 + x7;
+        x5 -= x7;
+
+        /* third stage */
+        x7 = x8 + x3;
+        x8 -= x3;
+        x3 = x0 + x2;
+        x0 -= x2;
+        x2 = (181 * (x4 + x5) + 128) >> 8;
+        x4 = (181 * (x4 - x5) + 128) >> 8;
+
+        /* fourth stage */
+        word = ((x7 + x1) >> 14);
+        CLIP_RESULT(word)
+
+        temp = ((x3 + x2) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 8);
+
+        temp = ((x0 + x4) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 16);
+
+        temp = ((x8 + x6) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 24);
+        *((int32*)(comp)) = word;
+
+        word = ((x8 - x6) >> 14);
+        CLIP_RESULT(word)
+
+        temp = ((x0 - x4) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 8);
+
+        temp = ((x3 - x2) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 16);
+
+        temp = ((x7 - x1) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 24);
+        *((int32*)(comp + 4)) = word;
+        comp += offset;
+
+        blk += B_SIZE;
+    }
+    /*----------------------------------------------------------------------------
+    ; Return nothing or data or data pointer
+    ----------------------------------------------------------------------------*/
+    return;
+}
+
+/*----------------------------------------------------------------------------
+; End Function: idctrow
+----------------------------------------------------------------------------*/
+
+
+/****************************************************************************/
+
+/*
+------------------------------------------------------------------------------
+ FUNCTION NAME: idctcol
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS FOR idctcol
+
+ Inputs:
+    [input_variable_name] = [description of the input to module, its type
+                 definition, and length (when applicable)]
+
+ Local Stores/Buffers/Pointers Needed:
+    [local_store_name] = [description of the local store, its type
+                  definition, and length (when applicable)]
+    [local_buffer_name] = [description of the local buffer, its type
+                   definition, and length (when applicable)]
+    [local_ptr_name] = [description of the local pointer, its type
+                definition, and length (when applicable)]
+
+ Global Stores/Buffers/Pointers Needed:
+    [global_store_name] = [description of the global store, its type
+                   definition, and length (when applicable)]
+    [global_buffer_name] = [description of the global buffer, its type
+                definition, and length (when applicable)]
+    [global_ptr_name] = [description of the global pointer, its type
+                 definition, and length (when applicable)]
+
+ Outputs:
+    [return_variable_name] = [description of data/pointer returned
+                  by module, its type definition, and length
+                  (when applicable)]
+
+ Pointers and Buffers Modified:
+    [variable_bfr_ptr] points to the [describe where the
+      variable_bfr_ptr points to, its type definition, and length
+      (when applicable)]
+    [variable_bfr] contents are [describe the new contents of
+      variable_bfr]
+
+ Local Stores Modified:
+    [local_store_name] = [describe new contents, its type
+                  definition, and length (when applicable)]
+
+ Global Stores Modified:
+    [global_store_name] = [describe new contents, its type
+                   definition, and length (when applicable)]
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION FOR idctcol
+
+------------------------------------------------------------------------------
+ REQUIREMENTS FOR idctcol
+
+------------------------------------------------------------------------------
+ REFERENCES FOR idctcol
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE FOR idctcol
+
+------------------------------------------------------------------------------
+ RESOURCES USED FOR idctcol
+   When the code is written for a specific target processor the
+     the resources used should be documented below.
+
+ STACK USAGE: [stack count for this module] + [variable to represent
+          stack usage for each subroutine called]
+
+     where: [stack usage variable] = stack usage for [subroutine
+         name] (see [filename].ext)
+
+ DATA MEMORY USED: x words
+
+ PROGRAM MEMORY USED: x words
+
+ CLOCK CYCLES: [cycle count equation for this module] + [variable
+           used to represent cycle count for each subroutine
+           called]
+
+     where: [cycle count variable] = cycle count for [subroutine
+        name] (see [filename].ext)
+
+------------------------------------------------------------------------------
+*/
+
+/*----------------------------------------------------------------------------
+; Function Code FOR idctcol
+----------------------------------------------------------------------------*/
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctcol(
+    int16 *blk
+)
+{
+    /*----------------------------------------------------------------------------
+    ; Define all local variables
+    ----------------------------------------------------------------------------*/
+    int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
+
+    /*----------------------------------------------------------------------------
+    ; Function body here
+    ----------------------------------------------------------------------------*/
+    /* column (vertical) IDCT
+    *
+    * 7                         pi         1 dst[8*k] = sum c[l] * src[8*l] *
+    * cos( -- * ( k + - ) * l ) l=0                        8          2
+    *
+    * where: c[0]    = 1/1024 c[1..7] = (1/1024)*sqrt(2) */
+    x1 = (int32)blk[32] << 11;
+    x2 = blk[48];
+    x3 = blk[16];
+    x4 = blk[8];
+    x5 = blk[56];
+    x6 = blk[40];
+    x7 = blk[24];
+#ifndef FAST_IDCT
+    /* shortcut */        /* covered by idctcolumn1  01/9/2001 */
+    if (!(x1 | x2 | x3 | x4 | x5 | x6 | x7))
+    {
+        blk[0] = blk[8] = blk[16] = blk[24] = blk[32] = blk[40] = blk[48] = blk[56]
+                                              = blk[0] << 3;
+        return;
+    }
+#endif
+
+    x0 = ((int32)blk[0] << 11) + 128;
+
+    /* first stage */
+    x8 = W7 * (x4 + x5);
+    x4 = x8 + (W1 - W7) * x4;
+    x5 = x8 - (W1 + W7) * x5;
+    x8 = W3 * (x6 + x7);
+    x6 = x8 - (W3 - W5) * x6;
+    x7 = x8 - (W3 + W5) * x7;
+
+    /* second stage */
+    x8 = x0 + x1;
+    x0 -= x1;
+    x1 = W6 * (x3 + x2);
+    x2 = x1 - (W2 + W6) * x2;
+    x3 = x1 + (W2 - W6) * x3;
+    x1 = x4 + x6;
+    x4 -= x6;
+    x6 = x5 + x7;
+    x5 -= x7;
+
+    /* third stage */
+    x7 = x8 + x3;
+    x8 -= x3;
+    x3 = x0 + x2;
+    x0 -= x2;
+    x2 = (181 * (x4 + x5) + 128) >> 8;
+    x4 = (181 * (x4 - x5) + 128) >> 8;
+
+    /* fourth stage */
+    blk[0]    = (x7 + x1) >> 8;
+    blk[8] = (x3 + x2) >> 8;
+    blk[16] = (x0 + x4) >> 8;
+    blk[24] = (x8 + x6) >> 8;
+    blk[32] = (x8 - x6) >> 8;
+    blk[40] = (x0 - x4) >> 8;
+    blk[48] = (x3 - x2) >> 8;
+    blk[56] = (x7 - x1) >> 8;
+    /*----------------------------------------------------------------------------
+    ; Return nothing or data or data pointer
+    ----------------------------------------------------------------------------*/
+    return;
+}
+/*----------------------------------------------------------------------------
+;  End Function: idctcol
+----------------------------------------------------------------------------*/
+
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/cal_dc_scaler.cpp b/media/codecs/m4v_h263/dec/src/cal_dc_scaler.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/cal_dc_scaler.cpp
rename to media/codecs/m4v_h263/dec/src/cal_dc_scaler.cpp
diff --git a/media/codecs/m4v_h263/dec/src/combined_decode.cpp b/media/codecs/m4v_h263/dec/src/combined_decode.cpp
new file mode 100644
index 0000000..72cbe83
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/src/combined_decode.cpp
@@ -0,0 +1,744 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+#include "mp4dec_lib.h" /* video decoder function prototypes */
+#include "vlc_decode.h"
+#include "bitstream.h"
+#include "scaling.h"
+#include "mbtype_mode.h"
+
+#define OSCL_DISABLE_WARNING_CONDITIONAL_IS_CONSTANT
+/* ======================================================================== */
+/*  Function : DecodeFrameCombinedMode()                                    */
+/*  Purpose  : Decode a frame of MPEG4 bitstream in combined mode.          */
+/*  In/out   :                                                              */
+/*  Return   :                                                              */
+/*  Modified :                                                              */
+/*                                                                          */
+/*      03/30/2000 : Cleaned up and optimized the code.             */
+/*      03/31/2000 : Added proper handling of MB stuffing.          */
+/*      04/13/2000 : Rewrote this combined mode path completely     */
+/*                           so that it handles "Combined Mode With Error   */
+/*                           Resilience."  Now the code resembles the       */
+/*                           pseudo codes in MPEG-4 standard better.        */
+/*      10/13/2000 : Add fast VLC+dequant                           */
+/*      04/13/2001 : fix MB_stuffing                               */
+/*      08/07/2001 : remove MBzero                                  */
+/* ======================================================================== */
+PV_STATUS DecodeFrameCombinedMode(VideoDecData *video)
+{
+    PV_STATUS status;
+    int mbnum;
+    Vop *currVop = video->currVop;
+    BitstreamDecVideo *stream = video->bitstream;
+    int shortVideoHeader = video->shortVideoHeader;
+    int16 QP, *QPMB = video->QPMB;
+    uint8 *Mode = video->headerInfo.Mode;
+    int nTotalMB = video->nTotalMB;
+    int nMBPerRow = video->nMBPerRow;
+    int slice_counter;
+    uint32 tmpvar, long_zero_bits;
+    uint code;
+    int valid_stuffing;
+    int resync_marker_length;
+    int stuffing_length;
+
+    /* add this for error resilient, 05/18/2000 */
+    int32 startPacket;
+    int mb_start;
+    /* copy and pad to prev_Vop for INTER coding */
+    switch (currVop->predictionType)
+    {
+        case I_VOP :
+//      oscl_memset(Mode, MODE_INTRA, sizeof(uint8)*nTotalMB);
+            resync_marker_length = 17;
+            stuffing_length = 9;
+            break;
+        case P_VOP :
+            oscl_memset(video->motX, 0, sizeof(MOT)*4*nTotalMB);
+            oscl_memset(video->motY, 0, sizeof(MOT)*4*nTotalMB);
+//      oscl_memset(Mode, MODE_INTER, sizeof(uint8)*nTotalMB);
+            resync_marker_length = 16 + currVop->fcodeForward;
+            stuffing_length = 10;
+            break;
+        default :
+            mp4dec_log("DecodeFrameCombinedMode(): Vop type not supported.\n");
+            return PV_FAIL;
+    }
+#ifdef PV_ANNEX_IJKT_SUPPORT
+    if (video->shortVideoHeader)
+    {
+        if (video->advanced_INTRA)
+        {
+            if (video->modified_quant)
+            {
+                video->vlcDecCoeffIntra = &VlcDecTCOEFShortHeader_AnnexIT;
+                video->vlcDecCoeffInter = &VlcDecTCOEFShortHeader_AnnexT;
+            }
+            else
+            {
+                video->vlcDecCoeffIntra = &VlcDecTCOEFShortHeader_AnnexI;
+                video->vlcDecCoeffInter = &VlcDecTCOEFShortHeader;
+            }
+        }
+        else
+        {
+            if (video->modified_quant)
+            {
+                video->vlcDecCoeffInter = video->vlcDecCoeffIntra = &VlcDecTCOEFShortHeader_AnnexT;
+            }
+            else
+            {
+                video->vlcDecCoeffInter = video->vlcDecCoeffIntra = &VlcDecTCOEFShortHeader;
+            }
+        }
+    }
+
+#endif
+
+    /** Initialize sliceNo ***/
+    mbnum = slice_counter = 0;
+//  oscl_memset(video->sliceNo, 0, sizeof(uint8)*nTotalMB);
+    QP = video->currVop->quantizer;
+
+    do
+    {
+        /* This section is equivalent to motion_shape_texture() */
+        /*    in the MPEG-4 standard.     04/13/2000          */
+        mb_start = mbnum;
+        video->usePrevQP = 0;             /*  04/27/01 */
+        startPacket = getPointer(stream);
+
+#ifdef PV_ANNEX_IJKT_SUPPORT
+        if (video->modified_quant)
+        {
+            video->QP_CHR = MQ_chroma_QP_table[QP];
+        }
+        else
+        {
+            video->QP_CHR = QP;     /* ANNEX_T */
+        }
+#endif
+        /* remove any stuffing bits */
+        BitstreamShowBits16(stream, stuffing_length, &code);
+        while (code == 1)
+        {
+            PV_BitstreamFlushBits(stream, stuffing_length);
+            BitstreamShowBits16(stream, stuffing_length, &code);
+        }
+
+        do
+        {
+            /* we need video->mbnum in lower level functions */
+            video->mbnum = mbnum;
+            video->mbnum_row = PV_GET_ROW(mbnum, nMBPerRow);
+            video->mbnum_col = mbnum - video->mbnum_row * nMBPerRow;
+            /* assign slice number for each macroblocks */
+            video->sliceNo[mbnum] = (uint8) slice_counter;
+
+            /* decode COD, MCBPC, ACpred_flag, CPBY and DQUANT */
+            /* We have to discard stuffed MB header */
+            status = GetMBheader(video, &QP);
+
+            if (status != PV_SUCCESS)
+            {
+                VideoDecoderErrorDetected(video);
+                video->mbnum = mb_start;
+                movePointerTo(stream, (startPacket & -8));
+                break;
+            }
+
+            /* Store the QP value for later use in AC prediction */
+            QPMB[mbnum] = QP;
+
+            if (Mode[mbnum] != MODE_SKIPPED)
+            {
+                /* decode the DCT coeficients for the MB */
+                status = GetMBData(video);
+                if (status != PV_SUCCESS)
+                {
+                    VideoDecoderErrorDetected(video);
+                    video->mbnum = mb_start;
+                    movePointerTo(stream, (startPacket & -8));
+                    break;
+                }
+            }
+            else /* MODE_SKIPPED */
+            {
+                SkippedMBMotionComp(video); /*  08/04/05 */
+            }
+            // Motion compensation and put video->mblock->pred_block
+            mbnum++;
+
+            /* remove any stuffing bits */
+            BitstreamShowBits16(stream, stuffing_length, &code);
+            while (code == 1)
+            {
+                PV_BitstreamFlushBits(stream, stuffing_length);
+                BitstreamShowBits16(stream, stuffing_length, &code);
+            }
+
+            /* have we reached the end of the video packet or vop? */
+            if (shortVideoHeader)
+            {
+#ifdef PV_ANNEX_IJKT_SUPPORT
+                if (!video->slice_structure)
+                {
+#endif
+                    if (mbnum >= (int)(video->mbnum_row + 1)*video->nMBinGOB)   /*  10/11/01 */
+                    {
+                        if (mbnum >= nTotalMB) return PV_SUCCESS;
+                        status = BitstreamShowBits32(stream, GOB_RESYNC_MARKER_LENGTH, &tmpvar);
+
+                        if (tmpvar == GOB_RESYNC_MARKER)
+                        {
+                            break;
+                        }
+                        else
+                        {
+                            status = PV_BitstreamShowBitsByteAlign(stream, GOB_RESYNC_MARKER_LENGTH, &tmpvar);
+                            if (tmpvar == GOB_RESYNC_MARKER) break;
+                        }
+                    }
+#ifdef PV_ANNEX_IJKT_SUPPORT
+                }
+                else
+                {
+
+                    if (mbnum >= nTotalMB)  /* in case no valid stuffing  06/23/01 */
+                    {
+                        valid_stuffing = validStuffing_h263(stream);
+                        if (valid_stuffing == 0)
+                        {
+                            VideoDecoderErrorDetected(video);
+                            ConcealPacket(video, mb_start, nTotalMB, slice_counter);
+                        }
+                        return PV_SUCCESS;
+                    }
+                    /* ANNEX_K */
+                    PV_BitstreamShowBitsByteAlignNoForceStuffing(stream, 17, &tmpvar);
+                    if (tmpvar == RESYNC_MARKER)
+                    {
+                        valid_stuffing = validStuffing_h263(stream);
+                        if (valid_stuffing)
+                            break; /*  06/21/01 */
+                    }
+
+                }
+#endif
+            }
+            else
+            {
+                if (mbnum >= nTotalMB)  /* in case no valid stuffing  06/23/01 */
+                {
+                    /*  11/01/2002 if we are at the end of the frame and there is some garbage data
+                    at the end of the frame (i.e. no next startcode) break if the stuffing is valid */
+                    valid_stuffing = validStuffing(stream);
+                    if (valid_stuffing == 0)
+                    {
+                        /* end 11/01/2002 */
+                        VideoDecoderErrorDetected(video);
+                        ConcealPacket(video, mb_start, nTotalMB, slice_counter);
+                    }
+                    PV_BitstreamByteAlign(stream);
+                    return PV_SUCCESS;
+                }
+
+                status = PV_BitstreamShowBitsByteAlign(stream, 23, &tmpvar); /* this call is valid for f_code < 8 */
+                long_zero_bits = !tmpvar;
+
+                if ((tmpvar >> (23 - resync_marker_length)) == RESYNC_MARKER || long_zero_bits)
+                {
+                    valid_stuffing = validStuffing(stream);
+                    if (valid_stuffing)
+                        break; /*  06/21/01 */
+                }
+
+            }
+        }
+        while (TRUE);
+
+        if (shortVideoHeader)
+        { /* We need to check newgob to refresh quantizer */
+#ifdef PV_ANNEX_IJKT_SUPPORT
+            if (!video->slice_structure)
+            {
+#endif
+                while ((status = PV_GobHeader(video)) == PV_FAIL)
+                {
+                    if ((status = quickSearchGOBHeader(stream)) != PV_SUCCESS)
+                    {
+                        break;
+                    }
+                }
+
+                mbnum = currVop->gobNumber * video->nMBinGOB;
+#ifdef PV_ANNEX_IJKT_SUPPORT
+            }
+            else
+            {
+                while ((status = PV_H263SliceHeader(video, &mbnum)) == PV_FAIL)
+                {
+                    if ((status = quickSearchH263SliceHeader(stream)) != PV_SUCCESS)
+                    {
+                        break;
+                    }
+                }
+            }
+
+#endif
+        }
+        else
+        {
+            while ((status = PV_ReadVideoPacketHeader(video, &mbnum)) == PV_FAIL)
+            {
+                if ((status = quickSearchVideoPacketHeader(stream, resync_marker_length)) != PV_SUCCESS)
+                {
+                    break;
+                }
+            }
+        }
+
+        if (status == PV_END_OF_VOP)
+        {
+            mbnum = nTotalMB;
+        }
+
+        if (mbnum > video->mbnum + 1)
+        {
+            ConcealPacket(video, video->mbnum, mbnum, slice_counter);
+        }
+        QP = video->currVop->quantizer;
+        slice_counter++;
+        if (mbnum >= nTotalMB) break;
+
+    }
+    while (TRUE);
+    return PV_SUCCESS;
+}
+
+
+/* ============================================================================ */
+/*  Function : GetMBHeader()                                                    */
+/*  Purpose  : Decode MB header, not_coded, mcbpc, ac_pred_flag, cbpy, dquant.  */
+/*  In/out   :                                                                  */
+/*  Return   :                                                                  */
+/*  Modified :                                                                  */
+/*                                                                              */
+/*      3/29/00 : Changed the returned value and optimized the code.    */
+/*      4/01/01 : new ACDC prediction structure                         */
+/* ============================================================================ */
+PV_STATUS GetMBheader(VideoDecData *video, int16 *QP)
+{
+    BitstreamDecVideo *stream = video->bitstream;
+    int mbnum = video->mbnum;
+    uint8 *Mode = video->headerInfo.Mode;
+    int x_pos = video->mbnum_col;
+    typeDCStore *DC = video->predDC + mbnum;
+    typeDCACStore *DCAC_row = video->predDCAC_row + x_pos;
+    typeDCACStore *DCAC_col = video->predDCAC_col;
+    const static int16  DQ_tab[4] = { -1, -2, 1, 2};
+
+    int CBPY, CBPC;
+    int MBtype, VopType;
+    int MCBPC;
+    uint DQUANT;
+    int comp;
+    Bool mb_coded;
+
+    VopType = video->currVop->predictionType;
+    mb_coded = ((VopType == I_VOP) ? TRUE : !BitstreamRead1Bits_INLINE(stream));
+
+    if (!mb_coded)
+    {
+        /* skipped macroblock */
+        Mode[mbnum] = MODE_SKIPPED;
+        //oscl_memset(DCAC_row, 0, sizeof(typeDCACStore));   /*  SKIPPED_ACDC */
+        //oscl_memset(DCAC_col, 0, sizeof(typeDCACStore));
+        ZERO_OUT_64BYTES(DCAC_row);
+        ZERO_OUT_64BYTES(DCAC_col); /*  08/12/05 */
+
+        for (comp = 0; comp < 6; comp++)
+        {
+            (*DC)[comp] = mid_gray;
+        }
+    }
+    else
+    {
+        /* coded macroblock */
+        if (VopType == I_VOP)
+        {
+            MCBPC = PV_VlcDecMCBPC_com_intra(stream);
+        }
+        else
+        {
+#ifdef PV_ANNEX_IJKT_SUPPORT
+            if (!video->deblocking)
+            {
+                MCBPC = PV_VlcDecMCBPC_com_inter(stream);
+            }
+            else
+            {
+                MCBPC = PV_VlcDecMCBPC_com_inter_H263(stream);
+            }
+#else
+            MCBPC = PV_VlcDecMCBPC_com_inter(stream);
+#endif
+        }
+
+        if (VLC_ERROR_DETECTED(MCBPC))
+        {
+            return PV_FAIL;
+        }
+
+        Mode[mbnum] = (uint8)(MBtype = MBtype_mode[MCBPC & 7]);
+        CBPC = (MCBPC >> 4) & 3;
+
+#ifdef PV_ANNEX_IJKT_SUPPORT
+        if (MBtype & INTRA_MASK)
+        {
+            if (!video->shortVideoHeader)
+            {
+                video->acPredFlag[mbnum] = (uint8) BitstreamRead1Bits(stream);
+            }
+            else
+            {
+                if (video->advanced_INTRA)
+                {
+                    if (!BitstreamRead1Bits(stream))
+                    {
+                        video->acPredFlag[mbnum] = 0;
+                    }
+                    else
+                    {
+                        video->acPredFlag[mbnum] = 1;
+                        if (BitstreamRead1Bits(stream))
+                        {
+                            video->mblock->direction = 0;
+                        }
+                        else
+                        {
+                            video->mblock->direction = 1;
+                        }
+                    }
+                }
+                else
+                {
+                    video->acPredFlag[mbnum] = 0;
+                }
+            }
+        }
+#else
+        if ((MBtype & INTRA_MASK) && !video->shortVideoHeader)
+        {
+            video->acPredFlag[mbnum] = (uint8) BitstreamRead1Bits_INLINE(stream);
+        }
+        else
+        {
+            video->acPredFlag[mbnum] = 0;
+        }
+#endif
+        CBPY = PV_VlcDecCBPY(stream, MBtype & INTRA_MASK); /* INTRA || INTRA_Q */
+        if (CBPY < 0)
+        {
+            return PV_FAIL;
+        }
+
+        // GW 04/23/99
+        video->headerInfo.CBP[mbnum] = (uint8)(CBPY << 2 | (CBPC & 3));
+#ifdef PV_ANNEX_IJKT_SUPPORT
+        if (MBtype & Q_MASK)
+        {
+            if (!video->modified_quant)
+            {
+                DQUANT = BitstreamReadBits16(stream, 2);
+                *QP += DQ_tab[DQUANT];
+
+                if (*QP < 1) *QP = 1;
+                else if (*QP > 31) *QP = 31;
+                video->QP_CHR = *QP;  /* ANNEX_T */
+            }
+            else
+            {
+                if (BitstreamRead1Bits(stream))
+                {
+                    if (BitstreamRead1Bits(stream))
+                    {
+                        *QP += DQ_tab_Annex_T_11[*QP];
+                    }
+                    else
+                    {
+                        *QP += DQ_tab_Annex_T_10[*QP];
+                    }
+                    if (*QP < 1) *QP = 1;
+                    else if (*QP > 31) *QP = 31;
+                }
+                else
+                {
+                    *QP = (int16)BitstreamReadBits16(stream, 5);
+                }
+                video->QP_CHR =  MQ_chroma_QP_table[*QP];
+            }
+        }
+#else
+        if (MBtype & Q_MASK)
+        {
+            DQUANT = BitstreamReadBits16(stream, 2);
+            *QP += DQ_tab[DQUANT];
+
+            if (*QP < 1) *QP = 1;
+            else if (*QP > 31) *QP = 31;
+        }
+#endif
+    }
+    return PV_SUCCESS;
+}
+
+
+
+
+
+/***********************************************************CommentBegin******
+*       3/10/00  : initial modification to the
+*                new PV-Decoder Lib format.
+*       4/2/2000 : Cleanup and error-handling modification.  This
+*                   function has been divided into several sub-functions for
+*                   better coding style and maintainance reason.  I also
+*                   greatly shrunk the code size here.
+*       9/18/2000 : VlcDecode+Dequant optimization *
+*       4/01/2001 : new ACDC prediction structure
+*       3/29/2002 : removed GetIntraMB and GetInterMB
+***********************************************************CommentEnd********/
+PV_STATUS GetMBData(VideoDecData *video)
+{
+    BitstreamDecVideo *stream = video->bitstream;
+    int mbnum = video->mbnum;
+    MacroBlock *mblock = video->mblock;
+    int16 *dataBlock;
+    PIXEL *c_comp;
+    uint mode = video->headerInfo.Mode[mbnum];
+    uint CBP = video->headerInfo.CBP[mbnum];
+    typeDCStore *DC = video->predDC + mbnum;
+    int intra_dc_vlc_thr = video->currVop->intraDCVlcThr;
+    int16 QP = video->QPMB[mbnum];
+    int16 QP_tmp = QP;
+    int width = video->width;
+    int  comp;
+    int  switched;
+    int ncoeffs[6] = {0, 0, 0, 0, 0, 0};
+    int *no_coeff = mblock->no_coeff;
+    int16 DC_coeff;
+    PV_STATUS status;
+
+    int y_pos = video->mbnum_row;
+    int x_pos = video->mbnum_col;
+    int32 offset = (int32)(y_pos << 4) * width + (x_pos << 4);
+
+    /* Decode each 8-by-8 blocks. comp 0 ~ 3 are luminance blocks, 4 ~ 5 */
+    /*  are chrominance blocks.   04/03/2000.                          */
+
+    /*  oscl_memset(mblock->block, 0, sizeof(typeMBStore));    Aug 9,2005 */
+
+    if (mode & INTRA_MASK) /* MODE_INTRA || MODE_INTRA_Q */
+    {
+        switched = 0;
+        if (intra_dc_vlc_thr)
+        {
+            if (video->usePrevQP)
+                QP_tmp = video->QPMB[mbnum-1];   /* running QP  04/26/01 */
+
+            switched = (intra_dc_vlc_thr == 7 || QP_tmp >= intra_dc_vlc_thr * 2 + 11);
+        }
+
+        mblock->DCScalarLum = cal_dc_scaler(QP, LUMINANCE_DC_TYPE);   /*  3/01/01 */
+        mblock->DCScalarChr = cal_dc_scaler(QP, CHROMINANCE_DC_TYPE);
+
+        for (comp = 0; comp < 6; comp++)
+        {
+            dataBlock = mblock->block[comp];    /* 10/20/2000 */
+
+            if (video->shortVideoHeader)
+            {
+#ifdef PV_ANNEX_IJKT_SUPPORT
+                if (!video->advanced_INTRA)
+                {
+#endif
+                    DC_coeff = (int16) BitstreamReadBits16_INLINE(stream, 8);
+
+                    if ((DC_coeff & 0x7f) == 0) /* 128 & 0  */
+                    {
+                        /* currently we will only signal FAIL for 128. We will ignore the 0 case  */
+                        if (DC_coeff == 128)
+                        {
+                            return PV_FAIL;
+                        }
+                        else
+                        {
+                            VideoDecoderErrorDetected(video);
+                        }
+                    }
+                    if (DC_coeff == 255)
+                    {
+                        DC_coeff = 128;
+                    }
+                    dataBlock[0] = (int16) DC_coeff;
+#ifdef PV_ANNEX_IJKT_SUPPORT
+                }
+#endif
+                ncoeffs[comp] = VlcDequantH263IntraBlock_SH(video, comp, mblock->bitmapcol[comp], &mblock->bitmaprow[comp]);
+
+            }
+            else
+            {
+                if (switched == 0)
+                {
+                    status = PV_DecodePredictedIntraDC(comp, stream, &DC_coeff);
+                    if (status != PV_SUCCESS) return PV_FAIL;
+
+                    dataBlock[0] = (int16) DC_coeff;
+                }
+                ncoeffs[comp] = VlcDequantH263IntraBlock(video, comp,
+                                switched, mblock->bitmapcol[comp], &mblock->bitmaprow[comp]);
+            }
+
+            if (VLC_ERROR_DETECTED(ncoeffs[comp]))
+            {
+                if (switched)
+                    return PV_FAIL;
+                else
+                {
+                    ncoeffs[comp] = 1;
+                    oscl_memset((dataBlock + 1), 0, sizeof(int16)*63);
+                }
+            }
+            no_coeff[comp] = ncoeffs[comp];
+
+        }
+        MBlockIDCT(video);
+    }
+    else      /* INTER modes */
+    {   /*  moved it here Aug 15, 2005 */
+        /* decode the motion vector (if there are any) */
+        status = PV_GetMBvectors(video, mode);
+        if (status != PV_SUCCESS)
+        {
+            return status;
+        }
+
+
+        MBMotionComp(video, CBP);
+        c_comp  = video->currVop->yChan + offset;
+
+#ifdef PV_ANNEX_IJKT_SUPPORT
+        for (comp = 0; comp < 4; comp++)
+        {
+            (*DC)[comp] = mid_gray;
+            if (CBP & (1 << (5 - comp)))
+            {
+                ncoeffs[comp] = VlcDequantH263InterBlock(video, comp, mblock->bitmapcol[comp], &mblock->bitmaprow[comp]);
+                if (VLC_ERROR_DETECTED(ncoeffs[comp])) return PV_FAIL;
+
+                BlockIDCT(c_comp + (comp&2)*(width << 2) + 8*(comp&1), mblock->pred_block + (comp&2)*64 + 8*(comp&1), mblock->block[comp], width, ncoeffs[comp],
+                          mblock->bitmapcol[comp], mblock->bitmaprow[comp]);
+
+            }
+        }
+
+        video->QPMB[mbnum] = video->QP_CHR;     /* ANNEX_T */
+
+
+
+        (*DC)[4] = mid_gray;
+        if (CBP & 2)
+        {
+            ncoeffs[4] = VlcDequantH263InterBlock(video, 4, mblock->bitmapcol[4], &mblock->bitmaprow[4]);
+            if (VLC_ERROR_DETECTED(ncoeffs[4])) return PV_FAIL;
+
+            BlockIDCT(video->currVop->uChan + (offset >> 2) + (x_pos << 2), mblock->pred_block + 256, mblock->block[4], width >> 1, ncoeffs[4],
+                      mblock->bitmapcol[4], mblock->bitmaprow[4]);
+
+        }
+        (*DC)[5] = mid_gray;
+        if (CBP & 1)
+        {
+            ncoeffs[5] = VlcDequantH263InterBlock(video, 5, mblock->bitmapcol[5], &mblock->bitmaprow[5]);
+            if (VLC_ERROR_DETECTED(ncoeffs[5])) return PV_FAIL;
+
+            BlockIDCT(video->currVop->vChan + (offset >> 2) + (x_pos << 2), mblock->pred_block + 264, mblock->block[5], width >> 1, ncoeffs[5],
+                      mblock->bitmapcol[5], mblock->bitmaprow[5]);
+
+        }
+        video->QPMB[mbnum] = QP;  /* restore the QP values  ANNEX_T*/
+#else
+        for (comp = 0; comp < 4; comp++)
+        {
+            (*DC)[comp] = mid_gray;
+            if (CBP & (1 << (5 - comp)))
+            {
+                ncoeffs[comp] = VlcDequantH263InterBlock(video, comp, mblock->bitmapcol[comp], &mblock->bitmaprow[comp]);
+                if (VLC_ERROR_DETECTED(ncoeffs[comp])) return PV_FAIL;
+
+                BlockIDCT(c_comp + (comp&2)*(width << 2) + 8*(comp&1), mblock->pred_block + (comp&2)*64 + 8*(comp&1), mblock->block[comp], width, ncoeffs[comp],
+                          mblock->bitmapcol[comp], mblock->bitmaprow[comp]);
+
+            }
+        }
+
+        (*DC)[4] = mid_gray;
+        if (CBP & 2)
+        {
+            ncoeffs[4] = VlcDequantH263InterBlock(video, 4, mblock->bitmapcol[4], &mblock->bitmaprow[4]);
+            if (VLC_ERROR_DETECTED(ncoeffs[4])) return PV_FAIL;
+
+            BlockIDCT(video->currVop->uChan + (offset >> 2) + (x_pos << 2), mblock->pred_block + 256, mblock->block[4], width >> 1, ncoeffs[4],
+                      mblock->bitmapcol[4], mblock->bitmaprow[4]);
+
+        }
+        else
+        {
+            /* no IDCT for all zeros blocks  03/28/2002 */
+            /*              BlockIDCT();                */
+        }
+        (*DC)[5] = mid_gray;
+        if (CBP & 1)
+        {
+            ncoeffs[5] = VlcDequantH263InterBlock(video, 5, mblock->bitmapcol[5], &mblock->bitmaprow[5]);
+            if (VLC_ERROR_DETECTED(ncoeffs[5])) return PV_FAIL;
+
+            BlockIDCT(video->currVop->vChan + (offset >> 2) + (x_pos << 2), mblock->pred_block + 264, mblock->block[5], width >> 1, ncoeffs[5],
+                      mblock->bitmapcol[5], mblock->bitmaprow[5]);
+
+        }
+        else
+        {
+            /* no IDCT for all zeros blocks  03/28/2002 */
+            /*              BlockIDCT();                */
+#endif  // PV_ANNEX_IJKT_SUPPORT
+
+
+
+
+
+
+    }
+
+    video->usePrevQP = 1;          /* should be set after decoding the first Coded  04/27/01 */
+    return PV_SUCCESS;
+}
+
+
+
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/conceal.cpp b/media/codecs/m4v_h263/dec/src/conceal.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/conceal.cpp
rename to media/codecs/m4v_h263/dec/src/conceal.cpp
diff --git a/media/codecs/m4v_h263/dec/src/datapart_decode.cpp b/media/codecs/m4v_h263/dec/src/datapart_decode.cpp
new file mode 100644
index 0000000..6071f40
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/src/datapart_decode.cpp
@@ -0,0 +1,758 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+#include "mp4dec_lib.h"
+#include "vlc_decode.h"
+#include "bitstream.h"
+#include "scaling.h"
+#include "mbtype_mode.h"
+#include "idct.h"
+
+#define OSCL_DISABLE_WARNING_CONDITIONAL_IS_CONSTANT
+/* ======================================================================== */
+/*  Function : DecodeFrameDataPartMode()                                    */
+/*  Purpose  : Decode a frame of MPEG4 bitstream in datapartitioning mode.  */
+/*  In/out   :                                                              */
+/*  Return   :                                                              */
+/*  Modified :                                                              */
+/*                                                                          */
+/*      04/25/2000 : Rewrite the data partitioning path completely  */
+/*                           according to the pseudo codes in MPEG-4        */
+/*                           standard.                                      */
+/*  Modified : 09/18/2000 add fast VlcDecode+Dequant                    */
+/*             04/17/2001 cleanup                                       */
+/* ======================================================================== */
+PV_STATUS DecodeFrameDataPartMode(VideoDecData *video)
+{
+    PV_STATUS status;
+    Vop *currVop = video->currVop;
+    BitstreamDecVideo *stream = video->bitstream;
+
+    int nMBPerRow = video->nMBPerRow;
+
+    int vopType = currVop->predictionType;
+    int mbnum;
+    int nTotalMB = video->nTotalMB;
+    int slice_counter;
+    int resync_marker_length;
+
+    /* copy and pad to prev_Vop for INTER coding */
+    switch (vopType)
+    {
+        case I_VOP :
+//      oscl_memset(Mode, MODE_INTRA, sizeof(uint8)*nTotalMB);
+            resync_marker_length = 17;
+            break;
+        case P_VOP :
+            oscl_memset(video->motX, 0, sizeof(MOT)*4*nTotalMB);
+            oscl_memset(video->motY, 0, sizeof(MOT)*4*nTotalMB);
+//      oscl_memset(Mode, MODE_INTER, sizeof(uint8)*nTotalMB);
+            resync_marker_length = 16 + currVop->fcodeForward;
+            break;
+        default :
+            mp4dec_log("DecodeFrameDataPartMode(): Vop type not supported.\n");
+            return PV_FAIL;
+    }
+
+    /** Initialize sliceNo ***/
+    mbnum = slice_counter = 0;
+//  oscl_memset(video->sliceNo, 0, sizeof(uint8)*nTotalMB);
+
+    do
+    {
+        /* This section is equivalent to motion_shape_texture() */
+        /* in the MPEG-4 standard.            04/13/2000      */
+        video->mbnum = mbnum;
+        video->mbnum_row = PV_GET_ROW(mbnum, nMBPerRow);   /*  This is needed if nbnum is read from the packet header */
+        video->mbnum_col = mbnum - video->mbnum_row * nMBPerRow;
+
+        switch (vopType)
+        {
+            case I_VOP :
+                status = DecodeDataPart_I_VideoPacket(video, slice_counter);
+                break;
+
+            case P_VOP :
+                status = DecodeDataPart_P_VideoPacket(video, slice_counter);
+                break;
+
+            default :
+                mp4dec_log("DecodeFrameDataPartMode(): Vop type not supported.\n");
+                return PV_FAIL;
+        }
+
+        while ((status = PV_ReadVideoPacketHeader(video, &mbnum)) == PV_FAIL)
+        {
+            if ((status = quickSearchVideoPacketHeader(stream, resync_marker_length)) != PV_SUCCESS)
+            {
+                break;
+            }
+        }
+
+        if (status == PV_END_OF_VOP)
+        {
+            mbnum = nTotalMB;
+        }
+
+        if (mbnum > video->mbnum + 1)
+        {
+            ConcealPacket(video, video->mbnum, mbnum, slice_counter);
+        }
+        slice_counter++;
+        if (mbnum >= nTotalMB)
+        {
+            break;
+        }
+
+
+    }
+    while (TRUE);
+
+    return PV_SUCCESS;
+}
+
+
+/* ======================================================================== */
+/*  Function : DecodeDataPart_I_VideoPacket()                               */
+/*  Date     : 04/25/2000                                                   */
+/*  Purpose  : Decode Data Partitioned Mode Video Packet in I-VOP           */
+/*  In/out   :                                                              */
+/*  Return   : PV_SUCCESS if successed, PV_FAIL if failed.                  */
+/*  Modified : 09/18/2000 add fast VlcDecode+Dequant                    */
+/*             04/01/2001 fixed MB_stuffing, removed unnecessary code   */
+/* ======================================================================== */
+PV_STATUS DecodeDataPart_I_VideoPacket(VideoDecData *video, int slice_counter)
+{
+    PV_STATUS status;
+    uint8 *Mode = video->headerInfo.Mode;
+    BitstreamDecVideo *stream = video->bitstream;
+    int  nTotalMB = video->nTotalMB;
+    int  mbnum, mb_start, mb_end;
+    int16 QP, *QPMB = video->QPMB;
+    int  MBtype, MCBPC, CBPY;
+    uint32 tmpvar;
+    uint code;
+    int nMBPerRow = video->nMBPerRow;
+    Bool valid_stuffing;
+    int32 startSecondPart, startFirstPart = getPointer(stream);
+
+    /* decode the first partition */
+    QP = video->currVop->quantizer;
+    mb_start = mbnum = video->mbnum;
+    video->usePrevQP = 0;         /*  04/27/01 */
+
+
+    BitstreamShowBits16(stream, 9, &code);
+    while (code == 1)
+    {
+        PV_BitstreamFlushBits(stream, 9);
+        BitstreamShowBits16(stream, 9, &code);
+    }
+
+    do
+    {
+        /* decode COD, MCBPC, ACpred_flag, CPBY and DQUANT */
+        MCBPC = PV_VlcDecMCBPC_com_intra(stream);
+
+        if (!VLC_ERROR_DETECTED(MCBPC))
+        {
+            Mode[mbnum] = (uint8)(MBtype = MBtype_mode[MCBPC & 7]);
+            video->headerInfo.CBP[mbnum] = (uint8)((MCBPC >> 4) & 3);
+            status = GetMBheaderDataPart_DQUANT_DC(video, &QP);
+            video->usePrevQP = 1;        /* set it after the first coded MB      04/27/01 */
+        }
+        else
+        {
+            /* Report the error to the application.   06/20/2000 */
+            VideoDecoderErrorDetected(video);
+            video->mbnum = mb_start;
+            movePointerTo(stream, startFirstPart);
+            return PV_FAIL;
+        }
+
+        video->sliceNo[mbnum] = (uint8) slice_counter;
+        QPMB[mbnum] = QP;
+        video->mbnum = ++mbnum;
+
+        BitstreamShowBits16(stream, 9, &code);
+        while (code == 1)
+        {
+            PV_BitstreamFlushBits(stream, 9);
+            BitstreamShowBits16(stream, 9, &code);
+        }
+        /* have we reached the end of the video packet or vop? */
+        status = BitstreamShowBits32(stream, DC_MARKER_LENGTH, &tmpvar);
+
+    }
+    while (tmpvar != DC_MARKER && video->mbnum < nTotalMB);
+
+    if (tmpvar == DC_MARKER)
+    {
+        PV_BitstreamFlushBits(stream, DC_MARKER_LENGTH);
+    }
+    else
+    {
+        status = quickSearchDCM(stream);
+        if (status == PV_SUCCESS)
+        {
+            /* only way you can end up being here is in the last packet,and there is stuffing at
+            the end of the first partition */
+            PV_BitstreamFlushBits(stream, DC_MARKER_LENGTH);
+        }
+        else
+        {
+            /* Report the error to the application.   06/20/2000 */
+            VideoDecoderErrorDetected(video);
+            movePointerTo(stream, startFirstPart);
+            video->mbnum = mb_start;
+            /* concealment will be taken care of in the upper layer */
+            return PV_FAIL;
+        }
+    }
+
+    /* decode the second partition */
+    startSecondPart = getPointer(stream);
+
+    mb_end = video->mbnum;
+
+    for (mbnum = mb_start; mbnum < mb_end; mbnum++)
+    {
+        MBtype = Mode[mbnum];
+        /* No skipped mode in I-packets  3/1/2001    */
+        video->mbnum = mbnum;
+
+        video->mbnum_row = PV_GET_ROW(mbnum, nMBPerRow);   /*  This is needed if nbnum is read from the packet header */
+        video->mbnum_col = mbnum - video->mbnum_row * nMBPerRow;
+        /* there is always acdcpred in DataPart mode  04/10/01 */
+        video->acPredFlag[mbnum] = (uint8) BitstreamRead1Bits(stream);
+
+        CBPY = PV_VlcDecCBPY(stream, MBtype & INTRA_MASK); /* MODE_INTRA || MODE_INTRA_Q */
+        if (CBPY < 0)
+        {
+            /* Report the error to the application.   06/20/2000 */
+            VideoDecoderErrorDetected(video);
+            movePointerTo(stream, startSecondPart); /*  */
+            /* Conceal packet,  05/15/2000 */
+            ConcealTexture_I(video, startFirstPart, mb_start, mb_end, slice_counter);
+            return PV_FAIL;
+        }
+
+        video->headerInfo.CBP[mbnum] |= (uint8)(CBPY << 2);
+    }
+
+    video->usePrevQP = 0;
+
+    for (mbnum = mb_start; mbnum < mb_end; mbnum++)
+    {
+        video->mbnum = mbnum;
+
+        video->mbnum_row = PV_GET_ROW(mbnum , nMBPerRow);  /*  This is needed if nbnum is read from the packet header */
+        video->mbnum_col = mbnum - video->mbnum_row * nMBPerRow;
+        /* No skipped mode in I-packets  3/1/2001    */
+        /* decode the DCT coeficients for the MB */
+        status = GetMBData_DataPart(video);
+        if (status != PV_SUCCESS)
+        {
+            /* Report the error to the application.   06/20/2000 */
+            VideoDecoderErrorDetected(video);
+            movePointerTo(stream, startSecondPart); /*  */
+            /* Conceal packet,  05/15/2000 */
+            ConcealTexture_I(video, startFirstPart, mb_start, mb_end, slice_counter);
+            return status;
+        }
+        video->usePrevQP = 1;           /*  04/27/01 should be set after decoding first MB */
+    }
+
+    valid_stuffing = validStuffing(stream);
+    if (!valid_stuffing)
+    {
+        VideoDecoderErrorDetected(video);
+        movePointerTo(stream, startSecondPart);
+        ConcealTexture_I(video, startFirstPart, mb_start, mb_end, slice_counter);
+        return PV_FAIL;
+    }
+    return PV_SUCCESS;
+}
+
+
+/* ======================================================================== */
+/*  Function : DecodeDataPart_P_VideoPacket()                               */
+/*  Date     : 04/25/2000                                                   */
+/*  Purpose  : Decode Data Partitioned Mode Video Packet in P-VOP           */
+/*  In/out   :                                                              */
+/*  Return   : PV_SUCCESS if successed, PV_FAIL if failed.                  */
+/*  Modified :   09/18/2000,  fast VlcDecode+Dequant                        */
+/*              04/13/2001,  fixed MB_stuffing, new ACDC pred structure,  */
+/*                              cleanup                                     */
+/*              08/07/2001,  remove MBzero                              */
+/* ======================================================================== */
+PV_STATUS DecodeDataPart_P_VideoPacket(VideoDecData *video, int slice_counter)
+{
+    PV_STATUS status;
+    uint8 *Mode = video->headerInfo.Mode;
+    BitstreamDecVideo *stream = video->bitstream;
+    int nTotalMB = video->nTotalMB;
+    int mbnum, mb_start, mb_end;
+    int16 QP, *QPMB = video->QPMB;
+    int MBtype, CBPY;
+    Bool valid_stuffing;
+    int intra_MB;
+    uint32 tmpvar;
+    uint code;
+    int32  startFirstPart, startSecondPart;
+    int nMBPerRow = video->nMBPerRow;
+    uint8 *pbyte;
+    /* decode the first partition */
+    startFirstPart = getPointer(stream);
+    mb_start = video->mbnum;
+    video->usePrevQP = 0;            /*  04/27/01 */
+
+    BitstreamShowBits16(stream, 10, &code);
+    while (code == 1)
+    {
+        PV_BitstreamFlushBits(stream, 10);
+        BitstreamShowBits16(stream, 10, &code);
+    }
+
+    do
+    {
+        /* decode COD, MCBPC, ACpred_flag, CPBY and DQUANT */
+        /* We have to discard stuffed MB header */
+
+        status = GetMBheaderDataPart_P(video);
+
+        if (status != PV_SUCCESS)
+        {
+            /* Report the error to the application.   06/20/2000 */
+            VideoDecoderErrorDetected(video);
+            movePointerTo(stream, startFirstPart);
+            video->mbnum = mb_start;
+            return PV_FAIL;
+        }
+
+        /* we must update slice_counter before motion vector decoding.   */
+        video->sliceNo[video->mbnum] = (uint8) slice_counter;
+
+        if (Mode[video->mbnum] & INTER_MASK) /* INTER || INTER_Q || INTER_4V */
+        {
+            /* decode the motion vector (if there are any) */
+            status = PV_GetMBvectors(video, Mode[video->mbnum]);
+            if (status != PV_SUCCESS)
+            {
+                /* Report the error to the application.   06/20/2000 */
+                VideoDecoderErrorDetected(video);
+                movePointerTo(stream, startFirstPart);
+                video->mbnum = mb_start;
+                return PV_FAIL;
+            }
+        }
+        video->mbnum++;
+
+        video->mbnum_row = PV_GET_ROW(video->mbnum, nMBPerRow);   /*  This is needed if mbnum is read from the packet header */
+        video->mbnum_col = video->mbnum - video->mbnum_row * nMBPerRow;
+
+        BitstreamShowBits16(stream, 10, &code);
+        while (code == 1)
+        {
+            PV_BitstreamFlushBits(stream, 10);
+            BitstreamShowBits16(stream, 10, &code);
+        }
+        /* have we reached the end of the video packet or vop? */
+        status = BitstreamShowBits32(stream, MOTION_MARKER_COMB_LENGTH, &tmpvar);
+        /*      if (status != PV_SUCCESS && status != PV_END_OF_BUFFER) return status;  */
+    }
+    while (tmpvar != MOTION_MARKER_COMB && video->mbnum < nTotalMB);
+
+    if (tmpvar == MOTION_MARKER_COMB)
+    {
+        PV_BitstreamFlushBits(stream, MOTION_MARKER_COMB_LENGTH);
+    }
+    else
+    {
+        status = quickSearchMotionMarker(stream);
+        if (status == PV_SUCCESS)
+        {
+            /* only way you can end up being here is in the last packet,and there is stuffing at
+            the end of the first partition */
+            PV_BitstreamFlushBits(stream, MOTION_MARKER_COMB_LENGTH);
+        }
+        else
+        {
+            /* Report the error to the application.   06/20/2000 */
+            VideoDecoderErrorDetected(video);
+            movePointerTo(stream, startFirstPart);
+            video->mbnum = mb_start;
+            /* concealment will be taken care of in the upper layer  */
+            return PV_FAIL;
+        }
+    }
+
+    /* decode the second partition */
+    startSecondPart = getPointer(stream);
+    QP = video->currVop->quantizer;
+
+    mb_end = video->mbnum;
+
+    for (mbnum = mb_start; mbnum < mb_end; mbnum++)
+    {
+        MBtype = Mode[mbnum];
+
+        if (MBtype == MODE_SKIPPED)
+        {
+            QPMB[mbnum] = QP; /*  03/01/01 */
+            continue;
+        }
+        intra_MB = (MBtype & INTRA_MASK); /* (MBtype == MODE_INTRA || MBtype == MODE_INTRA_Q) */
+        video->mbnum = mbnum;
+        video->mbnum_row = PV_GET_ROW(mbnum, nMBPerRow);   /*  This is needed if nbnum is read from the packet header */
+        video->mbnum_col = mbnum - video->mbnum_row * nMBPerRow;
+
+        /* there is always acdcprediction in DataPart mode    04/10/01 */
+        if (intra_MB)
+        {
+            video->acPredFlag[mbnum] = (uint8) BitstreamRead1Bits_INLINE(stream);
+        }
+
+        CBPY = PV_VlcDecCBPY(stream, intra_MB);
+        if (CBPY < 0)
+        {
+            /* Report the error to the application.   06/20/2000 */
+            VideoDecoderErrorDetected(video);
+            /* Conceal second partition,  5/15/2000 */
+            movePointerTo(stream, startSecondPart);
+            ConcealTexture_P(video, mb_start, mb_end, slice_counter);
+            return PV_FAIL;
+        }
+
+        video->headerInfo.CBP[mbnum] |= (uint8)(CBPY << 2);
+        if (intra_MB || MBtype == MODE_INTER_Q)                     /*  04/26/01 */
+        {
+            status = GetMBheaderDataPart_DQUANT_DC(video, &QP);
+            if (status != PV_SUCCESS) return status;
+        }
+        video->usePrevQP = 1;        /*  04/27/01 */
+        QPMB[mbnum] = QP;
+    }
+
+    video->usePrevQP = 0;  /*  04/27/01 */
+
+    for (mbnum = mb_start; mbnum < mb_end; mbnum++)
+    {
+        video->mbnum = mbnum;
+        video->mbnum_row = PV_GET_ROW(mbnum, nMBPerRow);  /*  This is needed if nbnum is read from the packet header */
+        video->mbnum_col = mbnum - video->mbnum_row * nMBPerRow;
+
+
+        if (Mode[mbnum] != MODE_SKIPPED)
+        {
+            /* decode the DCT coeficients for the MB */
+            status = GetMBData_DataPart(video);
+            if (status != PV_SUCCESS)
+            {
+                /* Report the error to the application.   06/20/2000 */
+                VideoDecoderErrorDetected(video);
+
+                /* Conceal second partition,  5/15/2000 */
+                movePointerTo(stream, startSecondPart);
+                ConcealTexture_P(video, mb_start, mb_end, slice_counter);
+                return status;
+            }
+            video->usePrevQP = 1;  /*  04/27/01 */
+        }
+        else
+        {   // SKIPPED
+
+            /* Motion compensation and put it to video->mblock->pred_block */
+            SkippedMBMotionComp(video);
+
+            //oscl_memset(video->predDCAC_row + video->mbnum_col, 0, sizeof(typeDCACStore)); /*  SKIPPED_ACDC */
+            //oscl_memset(video->predDCAC_col, 0, sizeof(typeDCACStore));
+            /*  08/08/2005 */
+            pbyte = (uint8*)(video->predDCAC_row + video->mbnum_col);
+            ZERO_OUT_64BYTES(pbyte);
+            pbyte = (uint8*)(video->predDCAC_col);
+            ZERO_OUT_64BYTES(pbyte);
+
+        }
+    }
+
+    valid_stuffing = validStuffing(stream);   /*  */
+    if (!valid_stuffing)
+    {
+        VideoDecoderErrorDetected(video);
+        movePointerTo(stream, startSecondPart); /*  */
+        ConcealTexture_P(video, mb_start, mb_end, slice_counter);
+
+        return PV_FAIL;
+    }
+    return PV_SUCCESS;
+}
+
+
+/* ======================================================================== */
+/*  Function : GetMBheaderDataPart_DQUANT_DC()                              */
+/*  Date     : 04/26/2000                                                   */
+/*  Purpose  : Decode DQUANT and DC in Data Partitioned Mode for both       */
+/*             I-VOP and P-VOP.                                             */
+/*  In/out   :                                                              */
+/*  Return   : PV_SUCCESS if successed, PV_FAIL if failed.                  */
+/*  Modified : 02/13/2001 new ACDC prediction structure,        */
+/*                                       cleanup                            */
+/* ======================================================================== */
+PV_STATUS GetMBheaderDataPart_DQUANT_DC(VideoDecData *video, int16 *QP)
+{
+    PV_STATUS status = PV_SUCCESS;
+    BitstreamDecVideo *stream = video->bitstream;
+    int mbnum = video->mbnum;
+    int intra_dc_vlc_thr = video->currVop->intraDCVlcThr;
+    uint8 *Mode = video->headerInfo.Mode;
+    int  MBtype = Mode[mbnum];
+    typeDCStore *DC = video->predDC + mbnum;
+    int  comp;
+    Bool switched;
+    uint  DQUANT;
+    int16 QP_tmp;
+
+    const static int  DQ_tab[4] = { -1, -2, 1, 2};
+
+    if (MBtype & Q_MASK)             /* INTRA_Q || INTER_Q */
+    {
+        DQUANT = BitstreamReadBits16(stream, 2);
+        *QP += DQ_tab[DQUANT];
+
+        if (*QP < 1) *QP = 1;
+        else if (*QP > 31) *QP = 31;
+    }
+    if (MBtype & INTRA_MASK)  /* INTRA || INTRA_Q */ /* no switch, code DC separately */
+    {
+        QP_tmp = *QP;                      /* running QP  04/26/01*/
+        switched = 0;
+        if (intra_dc_vlc_thr)                 /*  04/27/01 */
+        {
+            if (video->usePrevQP)
+                QP_tmp = video->QPMB[mbnum-1];
+            switched = (intra_dc_vlc_thr == 7 || QP_tmp >= intra_dc_vlc_thr * 2 + 11);
+        }
+        if (!switched)
+        {
+            for (comp = 0; comp < 6; comp++)
+            {
+                status = PV_DecodePredictedIntraDC(comp, stream, (*DC + comp));   /*  03/01/01 */
+                if (status != PV_SUCCESS) return PV_FAIL;
+            }
+        }
+        else
+        {
+            for (comp = 0; comp < 6; comp++)
+            {
+                (*DC)[comp] = 0;   /*  04/26/01 needed for switched case*/
+            }
+        }
+    }
+    return status;
+}
+
+
+/***********************************************************CommentBegin******
+*       04/25/2000 : Initial modification to the new PV Lib format.
+*       04/17/2001 : new ACDC pred structure
+***********************************************************CommentEnd********/
+PV_STATUS GetMBheaderDataPart_P(VideoDecData *video)
+{
+    BitstreamDecVideo *stream = video->bitstream;
+    int mbnum = video->mbnum;
+    uint8 *Mode = video->headerInfo.Mode;
+    typeDCStore *DC = video->predDC + mbnum;
+    uint no_dct_flag;
+    int comp;
+    int MCBPC;
+
+    no_dct_flag = BitstreamRead1Bits_INLINE(stream);
+
+    if (no_dct_flag)
+    {
+        /* skipped macroblock */
+        Mode[mbnum] = MODE_SKIPPED;
+
+        for (comp = 0; comp < 6; comp++)
+        {
+            (*DC)[comp] = mid_gray;
+            /*  ACDC REMOVE AC coefs are set in DecodeDataPart_P */
+        }
+    }
+    else
+    {
+        /* coded macroblock */
+        MCBPC = PV_VlcDecMCBPC_com_inter(stream);
+
+        if (VLC_ERROR_DETECTED(MCBPC))
+        {
+            return PV_FAIL;
+        }
+
+        Mode[mbnum] = (uint8)MBtype_mode[MCBPC & 7];
+        video->headerInfo.CBP[mbnum] = (uint8)((MCBPC >> 4) & 3);
+    }
+
+    return PV_SUCCESS;
+}
+
+
+/***********************************************************CommentBegin******
+*       04/17/01  new ACDC pred structure, reorganized code, cleanup
+***********************************************************CommentEnd********/
+PV_STATUS GetMBData_DataPart(VideoDecData *video)
+{
+    int mbnum = video->mbnum;
+    int16 *dataBlock;
+    MacroBlock *mblock = video->mblock;
+    int QP = video->QPMB[mbnum];
+    int32 offset;
+    PIXEL *c_comp;
+    int width = video->width;
+    int intra_dc_vlc_thr = video->currVop->intraDCVlcThr;
+    uint CBP = video->headerInfo.CBP[mbnum];
+    uint8 mode = video->headerInfo.Mode[mbnum];
+    int x_pos = video->mbnum_col;
+    typeDCStore *DC = video->predDC + mbnum;
+    int  ncoeffs[6], *no_coeff = mblock->no_coeff;
+    int  comp;
+    Bool  switched;
+    int QP_tmp = QP;
+
+    int y_pos = video->mbnum_row;
+
+
+
+
+    /*  oscl_memset(mblock->block, 0, sizeof(typeMBStore));    Aug 9,2005 */
+
+    if (mode & INTRA_MASK) /* MODE_INTRA || mode == MODE_INTRA_Q */
+    {
+        switched = 0;
+        if (intra_dc_vlc_thr)
+        {
+            if (video->usePrevQP)
+                QP_tmp = video->QPMB[mbnum-1];   /* running QP  04/26/01 */
+
+            switched = (intra_dc_vlc_thr == 7 || QP_tmp >= intra_dc_vlc_thr * 2 + 11);
+        }
+
+        mblock->DCScalarLum = cal_dc_scaler(QP, LUMINANCE_DC_TYPE);     /*   ACDC 03/01/01 */
+        mblock->DCScalarChr = cal_dc_scaler(QP, CHROMINANCE_DC_TYPE);
+
+        for (comp = 0; comp < 6; comp++)
+        {
+            dataBlock = mblock->block[comp];    /*, 10/20/2000 */
+
+            dataBlock[0] = (*DC)[comp];
+
+            ncoeffs[comp] = VlcDequantH263IntraBlock(video, comp,
+                            switched, mblock->bitmapcol[comp], &mblock->bitmaprow[comp]);
+
+            if (VLC_ERROR_DETECTED(ncoeffs[comp]))         /*  */
+            {
+                if (switched)
+                    return PV_FAIL;
+                else
+                {
+                    ncoeffs[comp] = 1;
+                    oscl_memset((dataBlock + 1), 0, sizeof(int16)*63);
+                }
+            }
+            no_coeff[comp] = ncoeffs[comp];
+            /*  modified to new semaphore for post-proc */
+            // Future work:: can be combined in the dequant function
+            // @todo Deblocking Semaphore for INTRA block
+        }
+        MBlockIDCT(video);
+    }
+    else /* MODE INTER*/
+    {
+
+
+
+
+        MBMotionComp(video, CBP);
+        offset = (int32)(y_pos << 4) * width + (x_pos << 4);
+        c_comp  = video->currVop->yChan + offset;
+
+
+        for (comp = 0; comp < 4; comp++)
+        {
+            (*DC)[comp] = mid_gray;
+
+            if (CBP & (1 << (5 - comp)))
+            {
+                ncoeffs[comp] = VlcDequantH263InterBlock(video, comp,
+                                mblock->bitmapcol[comp], &mblock->bitmaprow[comp]);
+                if (VLC_ERROR_DETECTED(ncoeffs[comp]))
+                    return PV_FAIL;
+
+
+                BlockIDCT(c_comp + (comp&2)*(width << 2) + 8*(comp&1), mblock->pred_block + (comp&2)*64 + 8*(comp&1), mblock->block[comp], width, ncoeffs[comp],
+                          mblock->bitmapcol[comp], mblock->bitmaprow[comp]);
+
+            }
+            else
+            {
+                ncoeffs[comp] = 0;
+            }
+
+            /*  @todo Deblocking Semaphore for INTRA block, for inter just test for ringing  */
+        }
+
+        (*DC)[4] = mid_gray;
+        if (CBP & 2)
+        {
+            ncoeffs[4] = VlcDequantH263InterBlock(video, 4,
+                                                  mblock->bitmapcol[4], &mblock->bitmaprow[4]);
+            if (VLC_ERROR_DETECTED(ncoeffs[4]))
+                return PV_FAIL;
+
+            BlockIDCT(video->currVop->uChan + (offset >> 2) + (x_pos << 2), mblock->pred_block + 256, mblock->block[4], width >> 1, ncoeffs[4],
+                      mblock->bitmapcol[4], mblock->bitmaprow[4]);
+
+        }
+        else
+        {
+            ncoeffs[4] = 0;
+        }
+        (*DC)[5] = mid_gray;
+        if (CBP & 1)
+        {
+            ncoeffs[5] = VlcDequantH263InterBlock(video, 5,
+                                                  mblock->bitmapcol[5], &mblock->bitmaprow[5]);
+            if (VLC_ERROR_DETECTED(ncoeffs[5]))
+                return PV_FAIL;
+
+            BlockIDCT(video->currVop->vChan + (offset >> 2) + (x_pos << 2), mblock->pred_block + 264, mblock->block[5], width >> 1, ncoeffs[5],
+                      mblock->bitmapcol[5], mblock->bitmaprow[5]);
+
+        }
+        else
+        {
+            ncoeffs[5] = 0;
+        }
+
+
+
+
+        /* Motion compensation and put it to video->mblock->pred_block */
+    }
+    return PV_SUCCESS;
+}
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/dcac_prediction.cpp b/media/codecs/m4v_h263/dec/src/dcac_prediction.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/dcac_prediction.cpp
rename to media/codecs/m4v_h263/dec/src/dcac_prediction.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/dec_pred_intra_dc.cpp b/media/codecs/m4v_h263/dec/src/dec_pred_intra_dc.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/dec_pred_intra_dc.cpp
rename to media/codecs/m4v_h263/dec/src/dec_pred_intra_dc.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/get_pred_adv_b_add.cpp b/media/codecs/m4v_h263/dec/src/get_pred_adv_b_add.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/get_pred_adv_b_add.cpp
rename to media/codecs/m4v_h263/dec/src/get_pred_adv_b_add.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/get_pred_outside.cpp b/media/codecs/m4v_h263/dec/src/get_pred_outside.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/get_pred_outside.cpp
rename to media/codecs/m4v_h263/dec/src/get_pred_outside.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/idct.cpp b/media/codecs/m4v_h263/dec/src/idct.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/idct.cpp
rename to media/codecs/m4v_h263/dec/src/idct.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/idct.h b/media/codecs/m4v_h263/dec/src/idct.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/idct.h
rename to media/codecs/m4v_h263/dec/src/idct.h
diff --git a/media/codecs/m4v_h263/dec/src/idct_vca.cpp b/media/codecs/m4v_h263/dec/src/idct_vca.cpp
new file mode 100644
index 0000000..dbaf5d1
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/src/idct_vca.cpp
@@ -0,0 +1,670 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+#include "mp4def.h"
+#include "idct.h"
+#include "motion_comp.h"
+
+#ifdef FAST_IDCT
+
+/****************************************************************
+*       vca_idct.c : created 6/1/99 for several options
+*                     of hard-coded reduced idct function (using nz_coefs)
+******************************************************************/
+
+/*****************************************************/
+//pretested version
+void idctrow0(int16 *, uint8 *, uint8 *, int)
+{
+    return ;
+}
+void idctcol0(int16 *)
+{
+    return ;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctrow1(int16 *blk, uint8 *pred, uint8 *dst, int width)
+{
+    /* shortcut */
+    int tmp;
+    int i = 8;
+    uint32 pred_word, dst_word;
+    int res, res2;
+
+    /* preset the offset, such that we can take advantage pre-offset addressing mode   */
+    width -= 4;
+    dst -= width;
+    pred -= 12;
+    blk -= 8;
+
+    while (i--)
+    {
+        tmp = (*(blk += 8) + 32) >> 6;
+        *blk = 0;
+
+        pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
+        res = tmp + (pred_word & 0xFF);
+        CLIP_RESULT(res);
+        res2 = tmp + ((pred_word >> 8) & 0xFF);
+        CLIP_RESULT(res2);
+        dst_word = (res2 << 8) | res;
+        res = tmp + ((pred_word >> 16) & 0xFF);
+        CLIP_RESULT(res);
+        dst_word |= (res << 16);
+        res = tmp + ((pred_word >> 24) & 0xFF);
+        CLIP_RESULT(res);
+        dst_word |= (res << 24);
+        *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
+
+        pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
+        res = tmp + (pred_word & 0xFF);
+        CLIP_RESULT(res);
+        res2 = tmp + ((pred_word >> 8) & 0xFF);
+        CLIP_RESULT(res2);
+        dst_word = (res2 << 8) | res;
+        res = tmp + ((pred_word >> 16) & 0xFF);
+        CLIP_RESULT(res);
+        dst_word |= (res << 16);
+        res = tmp + ((pred_word >> 24) & 0xFF);
+        CLIP_RESULT(res);
+        dst_word |= (res << 24);
+        *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
+    }
+    return;
+}
+
+void idctcol1(int16 *blk)
+{ /* shortcut */
+    blk[0] = blk[8] = blk[16] = blk[24] = blk[32] = blk[40] = blk[48] = blk[56] =
+                                              blk[0] << 3;
+    return;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctrow2(int16 *blk, uint8 *pred, uint8 *dst, int width)
+{
+    int32 x0, x1, x2, x4, x5;
+    int i = 8;
+    uint32 pred_word, dst_word;
+    int res, res2;
+
+    /* preset the offset, such that we can take advantage pre-offset addressing mode   */
+    width -= 4;
+    dst -= width;
+    pred -= 12;
+    blk -= 8;
+
+    while (i--)
+    {
+        /* shortcut */
+        x4 = blk[9];
+        blk[9] = 0;
+        x0 = ((*(blk += 8)) << 8) + 8192;
+        *blk = 0;  /* for proper rounding in the fourth stage */
+
+        /* first stage */
+        x5 = (W7 * x4 + 4) >> 3;
+        x4 = (W1 * x4 + 4) >> 3;
+
+        /* third stage */
+        x2 = (181 * (x4 + x5) + 128) >> 8;
+        x1 = (181 * (x4 - x5) + 128) >> 8;
+
+        /* fourth stage */
+        pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
+        res = (x0 + x4) >> 14;
+        ADD_AND_CLIP1(res);
+        res2 = (x0 + x2) >> 14;
+        ADD_AND_CLIP2(res2);
+        dst_word = (res2 << 8) | res;
+        res = (x0 + x1) >> 14;
+        ADD_AND_CLIP3(res);
+        dst_word |= (res << 16);
+        res = (x0 + x5) >> 14;
+        ADD_AND_CLIP4(res);
+        dst_word |= (res << 24);
+        *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
+
+        pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
+        res = (x0 - x5) >> 14;
+        ADD_AND_CLIP1(res);
+        res2 = (x0 - x1) >> 14;
+        ADD_AND_CLIP2(res2);
+        dst_word = (res2 << 8) | res;
+        res = (x0 - x2) >> 14;
+        ADD_AND_CLIP3(res);
+        dst_word |= (res << 16);
+        res = (x0 - x4) >> 14;
+        ADD_AND_CLIP4(res);
+        dst_word |= (res << 24);
+        *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
+    }
+    return ;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctcol2(int16 *blk)
+{
+    int32 x0, x1, x3, x5, x7;//, x8;
+
+    x1 = blk[8];
+    x0 = ((int32)blk[0] << 11) + 128;
+    /* both upper and lower*/
+
+    x7 = W7 * x1;
+    x1 = W1 * x1;
+
+    x3 = x7;
+    x5 = (181 * (x1 - x7) + 128) >> 8;
+    x7 = (181 * (x1 + x7) + 128) >> 8;
+
+    blk[0] = (x0 + x1) >> 8;
+    blk[8] = (x0 + x7) >> 8;
+    blk[16] = (x0 + x5) >> 8;
+    blk[24] = (x0 + x3) >> 8;
+    blk[56] = (x0 - x1) >> 8;
+    blk[48] = (x0 - x7) >> 8;
+    blk[40] = (x0 - x5) >> 8;
+    blk[32] = (x0 - x3) >> 8;
+
+    return ;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctrow3(int16 *blk, uint8 *pred, uint8 *dst, int width)
+{
+    int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
+    int i = 8;
+    uint32 pred_word, dst_word;
+    int res, res2;
+
+    /* preset the offset, such that we can take advantage pre-offset addressing mode   */
+    width -= 4;
+    dst -= width;
+    pred -= 12;
+    blk -= 8;
+
+    while (i--)
+    {
+        x2 = blk[10];
+        blk[10] = 0;
+        x1 = blk[9];
+        blk[9] = 0;
+        x0 = ((*(blk += 8)) << 8) + 8192;
+        *blk = 0;   /* for proper rounding in the fourth stage */
+        /* both upper and lower*/
+        /* both x2orx6 and x0orx4 */
+
+        x4 = x0;
+        x6 = (W6 * x2 + 4) >> 3;
+        x2 = (W2 * x2 + 4) >> 3;
+        x8 = x0 - x2;
+        x0 += x2;
+        x2 = x8;
+        x8 = x4 - x6;
+        x4 += x6;
+        x6 = x8;
+
+        x7 = (W7 * x1 + 4) >> 3;
+        x1 = (W1 * x1 + 4) >> 3;
+        x3 = x7;
+        x5 = (181 * (x1 - x7) + 128) >> 8;
+        x7 = (181 * (x1 + x7) + 128) >> 8;
+
+        pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
+        res = (x0 + x1) >> 14;
+        ADD_AND_CLIP1(res);
+        res2 = (x4 + x7) >> 14;
+        ADD_AND_CLIP2(res2);
+        dst_word = (res2 << 8) | res;
+        res = (x6 + x5) >> 14;
+        ADD_AND_CLIP3(res);
+        dst_word |= (res << 16);
+        res = (x2 + x3) >> 14;
+        ADD_AND_CLIP4(res);
+        dst_word |= (res << 24);
+        *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
+
+        pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
+        res = (x2 - x3) >> 14;
+        ADD_AND_CLIP1(res);
+        res2 = (x6 - x5) >> 14;
+        ADD_AND_CLIP2(res2);
+        dst_word = (res2 << 8) | res;
+        res = (x4 - x7) >> 14;
+        ADD_AND_CLIP3(res);
+        dst_word |= (res << 16);
+        res = (x0 - x1) >> 14;
+        ADD_AND_CLIP4(res);
+        dst_word |= (res << 24);
+        *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
+    }
+
+    return ;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctcol3(int16 *blk)
+{
+    int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
+
+    x2 = blk[16];
+    x1 = blk[8];
+    x0 = ((int32)blk[0] << 11) + 128;
+
+    x4 = x0;
+    x6 = W6 * x2;
+    x2 = W2 * x2;
+    x8 = x0 - x2;
+    x0 += x2;
+    x2 = x8;
+    x8 = x4 - x6;
+    x4 += x6;
+    x6 = x8;
+
+    x7 = W7 * x1;
+    x1 = W1 * x1;
+    x3 = x7;
+    x5 = (181 * (x1 - x7) + 128) >> 8;
+    x7 = (181 * (x1 + x7) + 128) >> 8;
+
+    blk[0] = (x0 + x1) >> 8;
+    blk[8] = (x4 + x7) >> 8;
+    blk[16] = (x6 + x5) >> 8;
+    blk[24] = (x2 + x3) >> 8;
+    blk[56] = (x0 - x1) >> 8;
+    blk[48] = (x4 - x7) >> 8;
+    blk[40] = (x6 - x5) >> 8;
+    blk[32] = (x2 - x3) >> 8;
+
+    return;
+}
+
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctrow4(int16 *blk, uint8 *pred, uint8 *dst, int width)
+{
+    int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
+    int i = 8;
+    uint32 pred_word, dst_word;
+    int res, res2;
+
+    /* preset the offset, such that we can take advantage pre-offset addressing mode   */
+    width -= 4;
+    dst -= width;
+    pred -= 12;
+    blk -= 8;
+
+    while (i--)
+    {
+        x2 = blk[10];
+        blk[10] = 0;
+        x1 = blk[9];
+        blk[9] = 0;
+        x3 = blk[11];
+        blk[11] = 0;
+        x0 = ((*(blk += 8)) << 8) + 8192;
+        *blk = 0;    /* for proper rounding in the fourth stage */
+
+        x4 = x0;
+        x6 = (W6 * x2 + 4) >> 3;
+        x2 = (W2 * x2 + 4) >> 3;
+        x8 = x0 - x2;
+        x0 += x2;
+        x2 = x8;
+        x8 = x4 - x6;
+        x4 += x6;
+        x6 = x8;
+
+        x7 = (W7 * x1 + 4) >> 3;
+        x1 = (W1 * x1 + 4) >> 3;
+        x5 = (W3 * x3 + 4) >> 3;
+        x3 = (- W5 * x3 + 4) >> 3;
+        x8 = x1 - x5;
+        x1 += x5;
+        x5 = x8;
+        x8 = x7 - x3;
+        x3 += x7;
+        x7 = (181 * (x5 + x8) + 128) >> 8;
+        x5 = (181 * (x5 - x8) + 128) >> 8;
+
+        pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
+        res = (x0 + x1) >> 14;
+        ADD_AND_CLIP1(res);
+        res2 = (x4 + x7) >> 14;
+        ADD_AND_CLIP2(res2);
+        dst_word = (res2 << 8) | res;
+        res = (x6 + x5) >> 14;
+        ADD_AND_CLIP3(res);
+        dst_word |= (res << 16);
+        res = (x2 + x3) >> 14;
+        ADD_AND_CLIP4(res);
+        dst_word |= (res << 24);
+        *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
+
+        pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
+        res = (x2 - x3) >> 14;
+        ADD_AND_CLIP1(res);
+        res2 = (x6 - x5) >> 14;
+        ADD_AND_CLIP2(res2);
+        dst_word = (res2 << 8) | res;
+        res = (x4 - x7) >> 14;
+        ADD_AND_CLIP3(res);
+        dst_word |= (res << 16);
+        res = (x0 - x1) >> 14;
+        ADD_AND_CLIP4(res);
+        dst_word |= (res << 24);
+        *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
+    }
+    return ;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctcol4(int16 *blk)
+{
+    int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
+    x2 = blk[16];
+    x1 = blk[8];
+    x3 = blk[24];
+    x0 = ((int32)blk[0] << 11) + 128;
+
+    x4 = x0;
+    x6 = W6 * x2;
+    x2 = W2 * x2;
+    x8 = x0 - x2;
+    x0 += x2;
+    x2 = x8;
+    x8 = x4 - x6;
+    x4 += x6;
+    x6 = x8;
+
+    x7 = W7 * x1;
+    x1 = W1 * x1;
+    x5 = W3 * x3;
+    x3 = -W5 * x3;
+    x8 = x1 - x5;
+    x1 += x5;
+    x5 = x8;
+    x8 = x7 - x3;
+    x3 += x7;
+    x7 = (181 * (x5 + x8) + 128) >> 8;
+    x5 = (181 * (x5 - x8) + 128) >> 8;
+
+
+    blk[0] = (x0 + x1) >> 8;
+    blk[8] = (x4 + x7) >> 8;
+    blk[16] = (x6 + x5) >> 8;
+    blk[24] = (x2 + x3) >> 8;
+    blk[56] = (x0 - x1) >> 8;
+    blk[48] = (x4 - x7) >> 8;
+    blk[40] = (x6 - x5) >> 8;
+    blk[32] = (x2 - x3) >> 8;
+
+    return ;
+}
+
+void idctrow0_intra(int16 *, PIXEL *, int)
+{
+    return ;
+}
+
+void idctrow1_intra(int16 *blk, PIXEL *comp, int width)
+{
+    /* shortcut */
+    int32 tmp;
+    int i = 8;
+    int offset = width;
+    uint32 word;
+
+    comp -= offset;
+    while (i--)
+    {
+        tmp = ((blk[0] + 32) >> 6);
+        blk[0] = 0;
+        CLIP_RESULT(tmp)
+
+        word = (tmp << 8) | tmp;
+        word = (word << 16) | word;
+
+        *((uint32*)(comp += offset)) = word;
+        *((uint32*)(comp + 4)) = word;
+
+
+
+
+        blk += B_SIZE;
+    }
+    return;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctrow2_intra(int16 *blk, PIXEL *comp, int width)
+{
+    int32 x0, x1, x2, x4, x5, temp;
+    int i = 8;
+    int offset = width;
+    int32 word;
+
+    comp -= offset;
+    while (i--)
+    {
+        /* shortcut */
+        x4 = blk[1];
+        blk[1] = 0;
+        x0 = ((int32)blk[0] << 8) + 8192;
+        blk[0] = 0;   /* for proper rounding in the fourth stage */
+
+        /* first stage */
+        x5 = (W7 * x4 + 4) >> 3;
+        x4 = (W1 * x4 + 4) >> 3;
+
+        /* third stage */
+        x2 = (181 * (x4 + x5) + 128) >> 8;
+        x1 = (181 * (x4 - x5) + 128) >> 8;
+
+        /* fourth stage */
+        word = ((x0 + x4) >> 14);
+        CLIP_RESULT(word)
+
+        temp = ((x0 + x2) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 8);
+        temp = ((x0 + x1) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 16);
+        temp = ((x0 + x5) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 24);
+        *((int32*)(comp += offset)) = word;
+
+        word = ((x0 - x5) >> 14);
+        CLIP_RESULT(word)
+        temp = ((x0 - x1) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 8);
+        temp = ((x0 - x2) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 16);
+        temp = ((x0 - x4) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 24);
+        *((int32*)(comp + 4)) = word;
+
+        blk += B_SIZE;
+    }
+    return ;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctrow3_intra(int16 *blk, PIXEL *comp, int width)
+{
+    int32 x0, x1, x2, x3, x4, x5, x6, x7, x8, temp;
+    int i = 8;
+    int offset = width;
+    int32 word;
+
+    comp -= offset;
+
+    while (i--)
+    {
+        x2 = blk[2];
+        blk[2] = 0;
+        x1 = blk[1];
+        blk[1] = 0;
+        x0 = ((int32)blk[0] << 8) + 8192;
+        blk[0] = 0;/* for proper rounding in the fourth stage */
+        /* both upper and lower*/
+        /* both x2orx6 and x0orx4 */
+
+        x4 = x0;
+        x6 = (W6 * x2 + 4) >> 3;
+        x2 = (W2 * x2 + 4) >> 3;
+        x8 = x0 - x2;
+        x0 += x2;
+        x2 = x8;
+        x8 = x4 - x6;
+        x4 += x6;
+        x6 = x8;
+
+        x7 = (W7 * x1 + 4) >> 3;
+        x1 = (W1 * x1 + 4) >> 3;
+        x3 = x7;
+        x5 = (181 * (x1 - x7) + 128) >> 8;
+        x7 = (181 * (x1 + x7) + 128) >> 8;
+
+        word = ((x0 + x1) >> 14);
+        CLIP_RESULT(word)
+        temp = ((x4 + x7) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 8);
+
+
+        temp = ((x6 + x5) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 16);
+
+        temp = ((x2 + x3) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 24);
+        *((int32*)(comp += offset)) = word;
+
+        word = ((x2 - x3) >> 14);
+        CLIP_RESULT(word)
+
+        temp = ((x6 - x5) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 8);
+
+        temp = ((x4 - x7) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 16);
+
+        temp = ((x0 - x1) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 24);
+        *((int32*)(comp + 4)) = word;
+
+        blk += B_SIZE;
+    }
+    return ;
+}
+
+__attribute__((no_sanitize("signed-integer-overflow")))
+void idctrow4_intra(int16 *blk, PIXEL *comp, int width)
+{
+    int32 x0, x1, x2, x3, x4, x5, x6, x7, x8, temp;
+    int i = 8;
+    int offset = width;
+    int32 word;
+
+    comp -= offset;
+
+    while (i--)
+    {
+        x2 = blk[2];
+        blk[2] = 0;
+        x1 = blk[1];
+        blk[1] = 0;
+        x3 = blk[3];
+        blk[3] = 0;
+        x0 = ((int32)blk[0] << 8) + 8192;
+        blk[0] = 0;/* for proper rounding in the fourth stage */
+
+        x4 = x0;
+        x6 = (W6 * x2 + 4) >> 3;
+        x2 = (W2 * x2 + 4) >> 3;
+        x8 = x0 - x2;
+        x0 += x2;
+        x2 = x8;
+        x8 = x4 - x6;
+        x4 += x6;
+        x6 = x8;
+
+        x7 = (W7 * x1 + 4) >> 3;
+        x1 = (W1 * x1 + 4) >> 3;
+        x5 = (W3 * x3 + 4) >> 3;
+        x3 = (- W5 * x3 + 4) >> 3;
+        x8 = x1 - x5;
+        x1 += x5;
+        x5 = x8;
+        x8 = x7 - x3;
+        x3 += x7;
+        x7 = (181 * (x5 + x8) + 128) >> 8;
+        x5 = (181 * (x5 - x8) + 128) >> 8;
+
+        word = ((x0 + x1) >> 14);
+        CLIP_RESULT(word)
+
+        temp = ((x4 + x7) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 8);
+
+
+        temp = ((x6 + x5) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 16);
+
+        temp = ((x2 + x3) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 24);
+        *((int32*)(comp += offset)) = word;
+
+        word = ((x2 - x3) >> 14);
+        CLIP_RESULT(word)
+
+        temp = ((x6 - x5) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 8);
+
+        temp = ((x4 - x7) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 16);
+
+        temp = ((x0 - x1) >> 14);
+        CLIP_RESULT(temp)
+        word = word | (temp << 24);
+        *((int32*)(comp + 4)) = word;
+
+        blk += B_SIZE;
+    }
+
+    return ;
+}
+
+#endif
+
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/max_level.h b/media/codecs/m4v_h263/dec/src/max_level.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/max_level.h
rename to media/codecs/m4v_h263/dec/src/max_level.h
diff --git a/media/codecs/m4v_h263/dec/src/mb_motion_comp.cpp b/media/codecs/m4v_h263/dec/src/mb_motion_comp.cpp
new file mode 100644
index 0000000..79760f5
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/src/mb_motion_comp.cpp
@@ -0,0 +1,565 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+
+#define LOG_TAG "m4v_h263"
+#include <log/log.h>
+
+/*
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+ Inputs:
+    video = pointer to structure of type VideoDecData
+
+ Local Stores/Buffers/Pointers Needed:
+    roundtab16 = rounding table
+
+ Global Stores/Buffers/Pointers Needed:
+    None
+
+ Outputs:
+    None
+
+ Pointers and Buffers Modified:
+    video->currVop->yChan contents are the newly calculated luminance
+      data
+    video->currVop->uChan contents are the newly calculated chrominance
+      b data
+    video->currVop->vChan contents are the newly calculated chrominance
+      r data
+    video->pstprcTypCur contents are the updated semaphore propagation
+      values
+
+ Local Stores Modified:
+    None
+
+ Global Stores Modified:
+    None
+
+------------------------------------------------------------------------------
+ FUNCTION DESCRIPTION
+
+ This function performs high level motion compensation on the luminance and
+ chrominance data. It sets up all the parameters required by the functions
+ that perform luminance and chrominance prediction and it initializes the
+ pointer to the post processing semaphores of a given block. It also checks
+ the motion compensation mode in order to determine which luminance or
+ chrominance prediction functions to call and determines how the post
+ processing semaphores are updated.
+
+*/
+
+
+/*----------------------------------------------------------------------------
+; INCLUDES
+----------------------------------------------------------------------------*/
+#include "mp4dec_lib.h"
+#include "motion_comp.h"
+/*----------------------------------------------------------------------------
+; MACROS
+; Define module specific macros here
+----------------------------------------------------------------------------*/
+
+
+/*----------------------------------------------------------------------------
+; DEFINES
+; Include all pre-processor statements here. Include conditional
+; compile variables also.
+----------------------------------------------------------------------------*/
+
+
+/*----------------------------------------------------------------------------
+; LOCAL FUNCTION DEFINITIONS
+; Function Prototype declaration
+----------------------------------------------------------------------------*/
+
+
+/*----------------------------------------------------------------------------
+; LOCAL STORE/BUFFER/POINTER DEFINITIONS
+; Variable declaration - defined here and used outside this module
+----------------------------------------------------------------------------*/
+/* 09/29/2000 bring this from mp4def.h */
+// const static int roundtab4[] = {0,1,1,1};
+// const static int roundtab8[] = {0,0,1,1,1,1,1,2};
+/*** 10/30 for TPS */
+// const static int roundtab12[] = {0,0,0,1,1,1,1,1,1,1,2,2};
+/* 10/30 for TPS ***/
+const static int roundtab16[] = {0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2};
+
+
+/*----------------------------------------------------------------------------
+; EXTERNAL FUNCTION REFERENCES
+; Declare functions defined elsewhere and referenced in this module
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
+; Declare variables used in this module but defined elsewhere
+----------------------------------------------------------------------------*/
+
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+/** modified 3 August 2005 to do prediction and put the results in
+video->mblock->pred_block, no adding with residue */
+
+void  MBMotionComp(
+    VideoDecData *video,
+    int CBP
+)
+{
+
+    /*----------------------------------------------------------------------------
+    ; Define all local variables
+    ----------------------------------------------------------------------------*/
+    /* Previous Video Object Plane */
+    Vop *prev = video->prevVop;
+
+    /* Current Macroblock (MB) in the VOP */
+    int mbnum = video->mbnum;
+
+    /* Number of MB per data row */
+    int MB_in_width = video->nMBPerRow;
+    int ypos, xpos;
+    PIXEL *c_comp, *c_prev;
+    PIXEL *cu_comp, *cu_prev;
+    PIXEL *cv_comp, *cv_prev;
+    int height, width, pred_width;
+    int imv, mvwidth;
+    int32 offset;
+    uint8 mode;
+    uint8 *pred_block, *pred;
+
+    /* Motion vector (dx,dy) in half-pel resolution */
+    int dx, dy;
+
+    MOT px[4], py[4];
+    int xpred, ypred;
+    int xsum;
+    int round1;
+    /*----------------------------------------------------------------------------
+    ; Function body here
+    ----------------------------------------------------------------------------*/
+    /* Set rounding type */
+    /* change from array to single 09/29/2000 */
+    round1 = (int)(1 - video->currVop->roundingType);
+
+    /* width of luminance data in pixels (y axis) */
+    width = video->width;
+
+    /* heigth of luminance data in pixels (x axis) */
+    height = video->height;
+
+    /* number of blocks per row */
+    mvwidth = MB_in_width << 1;
+
+    /* starting y position in current MB; origin of MB */
+    ypos = video->mbnum_row << 4 ;
+    /* starting x position in current MB; origin of MB */
+    xpos = video->mbnum_col << 4 ;
+
+    /* offset to (x,y) position in current luminance MB */
+    /* in pixel resolution                              */
+    /* ypos*width -> row, +x -> column */
+    offset = (int32)ypos * width + xpos;
+
+    /* get mode for current MB */
+    mode = video->headerInfo.Mode[mbnum];
+
+    /* block index */
+    /* imv = (xpos/8) + ((ypos/8) * mvwidth) */
+    imv = (offset >> 6) - (xpos >> 6) + (xpos >> 3);
+    if (mode & INTER_1VMASK)
+    {
+        dx = px[0] = px[1] = px[2] = px[3] = video->motX[imv];
+        dy = py[0] = py[1] = py[2] = py[3] = video->motY[imv];
+        if ((dx & 3) == 0)
+        {
+            dx = dx >> 1;
+        }
+        else
+        {
+            /* x component of MV is or'ed for rounding (?) */
+            dx = (dx >> 1) | 1;
+        }
+
+        /* y component of motion vector; divide by 2 for to */
+        /* convert to full-pel resolution.                  */
+        if ((dy & 3) == 0)
+        {
+            dy = dy >> 1;
+        }
+        else
+        {
+            /* y component of MV is or'ed for rounding (?) */
+            dy = (dy >> 1) | 1;
+        }
+    }
+    else
+    {
+        px[0] = video->motX[imv];
+        px[1] = video->motX[imv+1];
+        px[2] = video->motX[imv+mvwidth];
+        px[3] = video->motX[imv+mvwidth+1];
+        xsum = px[0] + px[1] + px[2] + px[3];
+        dx = PV_SIGN(xsum) * (roundtab16[(PV_ABS(xsum)) & 0xF] +
+                              (((PV_ABS(xsum)) >> 4) << 1));
+        py[0] = video->motY[imv];
+        py[1] = video->motY[imv+1];
+        py[2] = video->motY[imv+mvwidth];
+        py[3] = video->motY[imv+mvwidth+1];
+        xsum = py[0] + py[1] + py[2] + py[3];
+        dy = PV_SIGN(xsum) * (roundtab16[(PV_ABS(xsum)) & 0xF] +
+                              (((PV_ABS(xsum)) >> 4) << 1));
+    }
+
+    /* Pointer to previous luminance frame */
+    c_prev  = prev->yChan;
+    if (!c_prev) {
+        ALOGE("b/35269635");
+        android_errorWriteLog(0x534e4554, "35269635");
+        return;
+    }
+
+    pred_block = video->mblock->pred_block;
+
+    /* some blocks have no residue or INTER4V */
+    /*if (mode == MODE_INTER4V)   05/08/15 */
+    /* Motion Compensation for an 8x8 block within a MB */
+    /* (4 MV per MB) */
+
+
+
+    /* Call function that performs luminance prediction */
+    /*      luminance_pred_mode_inter4v(xpos, ypos, px, py, c_prev,
+                    video->mblock->pred_block, width, height,
+                    round1, mvwidth, &xsum, &ysum);*/
+    c_comp = video->currVop->yChan + offset;
+
+
+    xpred = (int)((xpos << 1) + px[0]);
+    ypred = (int)((ypos << 1) + py[0]);
+
+    if ((CBP >> 5)&1)
+    {
+        pred = pred_block;
+        pred_width = 16;
+    }
+    else
+    {
+        pred = c_comp;
+        pred_width = width;
+    }
+
+    /* check whether the MV points outside the frame */
+    if (xpred >= 0 && xpred <= ((width << 1) - (2*B_SIZE)) &&
+            ypred >= 0 && ypred <= ((height << 1) - (2*B_SIZE)))
+    {   /*****************************/
+        /* (x,y) is inside the frame */
+        /*****************************/
+        ;
+        GetPredAdvBTable[ypred&1][xpred&1](c_prev + (xpred >> 1) + ((ypred >> 1)*width),
+                                           pred, width, (pred_width << 1) | round1);
+    }
+    else
+    {   /******************************/
+        /* (x,y) is outside the frame */
+        /******************************/
+        GetPredOutside(xpred, ypred, c_prev,
+                       pred, width, height, round1, pred_width);
+    }
+
+
+    /* Compute prediction values over current luminance MB */
+    /* (blocks 1); add motion vector prior to input;       */
+    /* add 8 to x_pos to advance to next block         */
+    xpred = (int)(((xpos + B_SIZE) << 1) + px[1]);
+    ypred = (int)((ypos << 1) + py[1]);
+
+    if ((CBP >> 4)&1)
+    {
+        pred = pred_block + 8;
+        pred_width = 16;
+    }
+    else
+    {
+        pred = c_comp + 8;
+        pred_width = width;
+    }
+
+    /* check whether the MV points outside the frame */
+    if (xpred >= 0 && xpred <= ((width << 1) - (2*B_SIZE)) &&
+            ypred >= 0 && ypred <= ((height << 1) - (2*B_SIZE)))
+    {   /*****************************/
+        /* (x,y) is inside the frame */
+        /*****************************/
+        GetPredAdvBTable[ypred&1][xpred&1](c_prev + (xpred >> 1) + ((ypred >> 1)*width),
+                                           pred, width, (pred_width << 1) | round1);
+    }
+    else
+    {   /******************************/
+        /* (x,y) is outside the frame */
+        /******************************/
+        GetPredOutside(xpred, ypred, c_prev,
+                       pred, width, height, round1, pred_width);
+    }
+
+
+
+    /* Compute prediction values over current luminance MB */
+    /* (blocks 2); add motion vector prior to input        */
+    /* add 8 to y_pos to advance to block on next row      */
+    xpred = (int)((xpos << 1) + px[2]);
+    ypred = (int)(((ypos + B_SIZE) << 1) + py[2]);
+
+    if ((CBP >> 3)&1)
+    {
+        pred = pred_block + 128;
+        pred_width = 16;
+    }
+    else
+    {
+        pred = c_comp + (width << 3);
+        pred_width = width;
+    }
+
+    /* check whether the MV points outside the frame */
+    if (xpred >= 0 && xpred <= ((width << 1) - (2*B_SIZE)) &&
+            ypred >= 0 && ypred <= ((height << 1) - (2*B_SIZE)))
+    {   /*****************************/
+        /* (x,y) is inside the frame */
+        /*****************************/
+        GetPredAdvBTable[ypred&1][xpred&1](c_prev + (xpred >> 1) + ((ypred >> 1)*width),
+                                           pred, width, (pred_width << 1) | round1);
+    }
+    else
+    {   /******************************/
+        /* (x,y) is outside the frame */
+        /******************************/
+        GetPredOutside(xpred, ypred, c_prev,
+                       pred, width, height, round1, pred_width);
+    }
+
+
+
+    /* Compute prediction values over current luminance MB */
+    /* (blocks 3); add motion vector prior to input;       */
+    /* add 8 to x_pos and y_pos to advance to next block   */
+    /* on next row                         */
+    xpred = (int)(((xpos + B_SIZE) << 1) + px[3]);
+    ypred = (int)(((ypos + B_SIZE) << 1) + py[3]);
+
+    if ((CBP >> 2)&1)
+    {
+        pred = pred_block + 136;
+        pred_width = 16;
+    }
+    else
+    {
+        pred = c_comp + (width << 3) + 8;
+        pred_width = width;
+    }
+
+    /* check whether the MV points outside the frame */
+    if (xpred >= 0 && xpred <= ((width << 1) - (2*B_SIZE)) &&
+            ypred >= 0 && ypred <= ((height << 1) - (2*B_SIZE)))
+    {   /*****************************/
+        /* (x,y) is inside the frame */
+        /*****************************/
+        GetPredAdvBTable[ypred&1][xpred&1](c_prev + (xpred >> 1) + ((ypred >> 1)*width),
+                                           pred, width, (pred_width << 1) | round1);
+    }
+    else
+    {   /******************************/
+        /* (x,y) is outside the frame */
+        /******************************/
+        GetPredOutside(xpred, ypred, c_prev,
+                       pred, width, height, round1, pred_width);
+    }
+    /* Call function to set de-blocking and de-ringing */
+    /*   semaphores for luminance                      */
+
+
+
+    /* xpred and ypred calculation for Chrominance is */
+    /* in full-pel resolution.                        */
+
+    /* Chrominance */
+    /* width of chrominance data in pixels (y axis) */
+    width >>= 1;
+
+    /* heigth of chrominance data in pixels (x axis) */
+    height >>= 1;
+
+    /* Pointer to previous chrominance b frame */
+    cu_prev = prev->uChan;
+
+    /* Pointer to previous chrominance r frame */
+    cv_prev = prev->vChan;
+
+    /* x position in prediction data offset by motion vector */
+    /* xpred calculation for Chrominance is in full-pel      */
+    /* resolution.                                           */
+    xpred = xpos + dx;
+
+    /* y position in prediction data offset by motion vector */
+    /* ypred calculation for Chrominance is in full-pel      */
+    /* resolution.                                           */
+    ypred = ypos + dy;
+
+    cu_comp = video->currVop->uChan + (offset >> 2) + (xpos >> 2);
+    cv_comp = video->currVop->vChan + (offset >> 2) + (xpos >> 2);
+
+    /* Call function that performs chrominance prediction */
+    /*      chrominance_pred(xpred, ypred, cu_prev, cv_prev,
+            pred_block, width_uv, height_uv,
+            round1);*/
+    if (xpred >= 0 && xpred <= ((width << 1) - (2*B_SIZE)) && ypred >= 0 &&
+            ypred <= ((height << 1) - (2*B_SIZE)))
+    {
+        /*****************************/
+        /* (x,y) is inside the frame */
+        /*****************************/
+        if ((CBP >> 1)&1)
+        {
+            pred = pred_block + 256;
+            pred_width = 16;
+        }
+        else
+        {
+            pred = cu_comp;
+            pred_width = width;
+        }
+
+        /* Compute prediction for Chrominance b (block[4]) */
+        GetPredAdvBTable[ypred&1][xpred&1](cu_prev + (xpred >> 1) + ((ypred >> 1)*width),
+                                           pred, width, (pred_width << 1) | round1);
+
+        if (CBP&1)
+        {
+            pred = pred_block + 264;
+            pred_width = 16;
+        }
+        else
+        {
+            pred = cv_comp;
+            pred_width = width;
+        }
+        /* Compute prediction for Chrominance r (block[5]) */
+        GetPredAdvBTable[ypred&1][xpred&1](cv_prev + (xpred >> 1) + ((ypred >> 1)*width),
+                                           pred, width, (pred_width << 1) | round1);
+
+        return ;
+    }
+    else
+    {
+        /******************************/
+        /* (x,y) is outside the frame */
+        /******************************/
+        if ((CBP >> 1)&1)
+        {
+            pred = pred_block + 256;
+            pred_width = 16;
+        }
+        else
+        {
+            pred = cu_comp;
+            pred_width = width;
+        }
+
+        /* Compute prediction for Chrominance b (block[4]) */
+        GetPredOutside(xpred, ypred,    cu_prev,
+                       pred, width, height, round1, pred_width);
+
+        if (CBP&1)
+        {
+            pred = pred_block + 264;
+            pred_width = 16;
+        }
+        else
+        {
+            pred = cv_comp;
+            pred_width = width;
+        }
+
+        /* Compute prediction for Chrominance r (block[5]) */
+        GetPredOutside(xpred, ypred,    cv_prev,
+                       pred, width, height, round1, pred_width);
+
+        return ;
+    }
+
+}
+
+/*** special function for skipped macroblock,  Aug 15, 2005 */
+void  SkippedMBMotionComp(
+    VideoDecData *video
+)
+{
+    Vop *prev = video->prevVop;
+    Vop *comp;
+    int ypos, xpos;
+    PIXEL *c_comp, *c_prev;
+    PIXEL *cu_comp, *cu_prev;
+    PIXEL *cv_comp, *cv_prev;
+    int width, width_uv;
+    int32 offset;
+
+    width = video->width;
+    width_uv  = width >> 1;
+    ypos = video->mbnum_row << 4 ;
+    xpos = video->mbnum_col << 4 ;
+    offset = (int32)ypos * width + xpos;
+
+
+    /* zero motion compensation for previous frame */
+    /*mby*width + mbx;*/
+    c_prev  = prev->yChan;
+    if (!c_prev) {
+        ALOGE("b/35269635");
+        android_errorWriteLog(0x534e4554, "35269635");
+        return;
+    }
+    c_prev += offset;
+
+    /*by*width_uv + bx;*/
+    cu_prev = prev->uChan + (offset >> 2) + (xpos >> 2);
+    /*by*width_uv + bx;*/
+    cv_prev = prev->vChan + (offset >> 2) + (xpos >> 2);
+
+    comp = video->currVop;
+
+    c_comp  = comp->yChan + offset;
+    cu_comp = comp->uChan + (offset >> 2) + (xpos >> 2);
+    cv_comp = comp->vChan + (offset >> 2) + (xpos >> 2);
+
+
+    /* Copy previous reconstructed frame into the current frame */
+    PutSKIPPED_MB(c_comp,  c_prev, width);
+    PutSKIPPED_B(cu_comp, cu_prev, width_uv);
+    PutSKIPPED_B(cv_comp, cv_prev, width_uv);
+
+    /*  10/24/2000 post_processing semaphore generation */
+    /*----------------------------------------------------------------------------
+    ; Return nothing or data or data pointer
+    ----------------------------------------------------------------------------*/
+
+    return;
+}
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/mb_utils.cpp b/media/codecs/m4v_h263/dec/src/mb_utils.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/mb_utils.cpp
rename to media/codecs/m4v_h263/dec/src/mb_utils.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/mbtype_mode.h b/media/codecs/m4v_h263/dec/src/mbtype_mode.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/mbtype_mode.h
rename to media/codecs/m4v_h263/dec/src/mbtype_mode.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/motion_comp.h b/media/codecs/m4v_h263/dec/src/motion_comp.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/motion_comp.h
rename to media/codecs/m4v_h263/dec/src/motion_comp.h
diff --git a/media/codecs/m4v_h263/dec/src/mp4dec_lib.h b/media/codecs/m4v_h263/dec/src/mp4dec_lib.h
new file mode 100644
index 0000000..ce6f9c3
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/src/mp4dec_lib.h
@@ -0,0 +1,303 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+#ifndef _MP4DECLIB_H_
+#define _MP4DECLIB_H_
+
+/*----------------------------------------------------------------------------
+; INCLUDES
+----------------------------------------------------------------------------*/
+#include "mp4def.h" /* typedef */
+#include "mp4lib_int.h" /* main video structure */
+
+/*----------------------------------------------------------------------------
+; MACROS
+; Define module specific macros here
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; DEFINES
+; Include all pre-processor statements here.
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL VARIABLES REFERENCES
+; Declare variables used in this module but defined elsewhere
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; SIMPLE TYPEDEF'S
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; ENUMERATED TYPEDEF'S
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; STRUCTURES TYPEDEF'S
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; GLOBAL FUNCTION DEFINITIONS
+; Function Prototype declaration
+----------------------------------------------------------------------------*/
+#ifdef __cplusplus
+extern "C"
+{
+#endif /* __cplusplus */
+
+    /* defined in pvdec_api.c, these function are not supposed to be    */
+    /* exposed to programmers outside PacketVideo.  08/15/2000.    */
+    uint VideoDecoderErrorDetected(VideoDecData *video);
+
+#ifdef ENABLE_LOG
+    void m4vdec_dprintf(char *format, ...);
+#define mp4dec_log(message) m4vdec_dprintf(message)
+#else
+#define mp4dec_log(message)
+#endif
+
+    /*--------------------------------------------------------------------------*/
+    /* defined in frame_buffer.c */
+    PV_STATUS FillFrameBufferNew(BitstreamDecVideo *stream);
+    PV_STATUS FillFrameBuffer(BitstreamDecVideo *stream, int short_header);
+
+    /*--------------------------------------------------------------------------*/
+    /* defined in dc_ac_pred.c */
+    int cal_dc_scaler(int QP, int type);
+    PV_STATUS PV_DecodePredictedIntraDC(int compnum, BitstreamDecVideo *stream,
+                                        int16 *IntraDC_delta);
+
+    void    doDCACPrediction(VideoDecData *video, int comp, int16 *q_block,
+                             int *direction);
+
+#ifdef PV_ANNEX_IJKT_SUPPORT
+    void    doDCACPrediction_I(VideoDecData *video, int comp, int16 *q_block);
+#endif
+    /*--------------------------------------------------------------------------*/
+    /* defined in block_idct.c */
+    void MBlockIDCTAdd(VideoDecData *video, int nz_coefs[]);
+
+    void BlockIDCT(uint8 *dst, uint8 *pred, int16 *blk, int width, int nzcoefs,
+                   uint8 *bitmapcol, uint8 bitmaprow);
+
+    void MBlockIDCT(VideoDecData *video);
+    void BlockIDCT_intra(MacroBlock *mblock, PIXEL *c_comp, int comp, int width_offset);
+    /*--------------------------------------------------------------------------*/
+    /* defined in combined_decode.c */
+    PV_STATUS DecodeFrameCombinedMode(VideoDecData *video);
+    PV_STATUS GetMBheader(VideoDecData *video, int16 *QP);
+    PV_STATUS GetMBData(VideoDecData *video);
+
+    /*--------------------------------------------------------------------------*/
+    /* defined in datapart_decode.c */
+    PV_STATUS DecodeFrameDataPartMode(VideoDecData *video);
+    PV_STATUS GetMBheaderDataPart_DQUANT_DC(VideoDecData *video, int16 *QP);
+    PV_STATUS GetMBheaderDataPart_P(VideoDecData *video);
+    PV_STATUS DecodeDataPart_I_VideoPacket(VideoDecData *video, int slice_counter);
+    PV_STATUS DecodeDataPart_P_VideoPacket(VideoDecData *video, int slice_counter);
+    PV_STATUS GetMBData_DataPart(VideoDecData *video);
+
+    /*--------------------------------------------------------------------------*/
+    /* defined in packet_util.c */
+    PV_STATUS PV_ReadVideoPacketHeader(VideoDecData *video, int *next_MB);
+    PV_STATUS RecoverPacketError(BitstreamDecVideo *stream, int marker_length, int32 *nextVop);
+    PV_STATUS RecoverGOBError(BitstreamDecVideo *stream, int marker_length, int32 *vopPos);
+    PV_STATUS PV_GobHeader(VideoDecData *video);
+#ifdef PV_ANNEX_IJKT_SUPPORT
+    PV_STATUS PV_H263SliceHeader(VideoDecData *videoInt, int *next_MB);
+#endif
+    /*--------------------------------------------------------------------------*/
+    /* defined in motion_comp.c */
+    void MBMotionComp(VideoDecData *video, int CBP);
+    void  SkippedMBMotionComp(VideoDecData *video);
+
+    /*--------------------------------------------------------------------------*/
+    /* defined in chrominance_pred.c */
+    void chrominance_pred(
+        int xpred,          /* i */
+        int ypred,          /* i */
+        uint8 *cu_prev,     /* i */
+        uint8 *cv_prev,     /* i */
+        uint8 *pred_block,  /* i */
+        int width_uv,       /* i */
+        int height_uv,      /* i */
+        int round1
+    );
+
+    /*--------------------------------------------------------------------------*/
+    /* defined in luminance_pred_mode_inter.c */
+    void luminance_pred_mode_inter(
+        int xpred,          /* i */
+        int ypred,          /* i */
+        uint8 *c_prev,      /* i */
+        uint8 *pred_block,  /* i */
+        int width,          /* i */
+        int height,         /* i */
+        int round1
+    );
+
+    /*--------------------------------------------------------------------------*/
+    /* defined in luminance_pred_mode_inter4v.c */
+    void luminance_pred_mode_inter4v(
+        int xpos,           /* i */
+        int ypos,           /* i */
+        MOT *px,            /* i */
+        MOT *py,            /* i */
+        uint8 *c_prev,      /* i */
+        uint8 *pred_block,  /* i */
+        int width,          /* i */
+        int height,         /* i */
+        int round1,         /* i */
+        int mvwidth,            /* i */
+        int *xsum_ptr,          /* i/o */
+        int *ysum_ptr           /* i/o */
+    );
+
+    /*--------------------------------------------------------------------------*/
+    /* defined in pp_semaphore_chroma_inter.c */
+    /*--------------------------------------------------------------------------*/
+    /* defined in get_pred_adv_mb_add.c */
+    int GetPredAdvancedMB(
+        int xpos,
+        int ypos,
+        uint8 *c_prev,
+        uint8 *pred_block,
+        int width,
+        int rnd1
+    );
+
+    /*--------------------------------------------------------------------------*/
+    /* defined in get_pred_adv_b_add.c */
+    int GetPredAdvancedBy0x0(
+        uint8 *c_prev,      /* i */
+        uint8 *pred_block,      /* i */
+        int width,      /* i */
+        int pred_width_rnd /* i */
+    );
+
+    int GetPredAdvancedBy0x1(
+        uint8 *c_prev,      /* i */
+        uint8 *pred_block,      /* i */
+        int width,      /* i */
+        int pred_width_rnd /* i */
+    );
+
+    int GetPredAdvancedBy1x0(
+        uint8 *c_prev,      /* i */
+        uint8 *pred_block,      /* i */
+        int width,      /* i */
+        int pred_width_rnd /* i */
+    );
+
+    int GetPredAdvancedBy1x1(
+        uint8 *c_prev,      /* i */
+        uint8 *pred_block,      /* i */
+        int width,      /* i */
+        int pred_width_rnd /* i */
+    );
+
+    /*--------------------------------------------------------------------------*/
+    /* defined in get_pred_outside.c */
+    int GetPredOutside(
+        int xpos,
+        int ypos,
+        uint8 *c_prev,
+        uint8 *pred_block,
+        int width,
+        int height,
+        int rnd1,
+        int pred_width
+    );
+
+    /*--------------------------------------------------------------------------*/
+    /* defined in find_pmvsErrRes.c */
+    void mv_prediction(VideoDecData *video, int block, MOT *mvx, MOT *mvy);
+
+    /*--------------------------------------------------------------------------*/
+
+    /*--------------------------------------------------------------------------*/
+    /* defined in mb_utils.c */
+    void Copy_MB_into_Vop(uint8 *comp, int yChan[][NCOEFF_BLOCK], int width);
+    void Copy_B_into_Vop(uint8 *comp, int cChan[], int width);
+    void PutSKIPPED_MB(uint8 *comp, uint8 *c_prev, int width);
+    void PutSKIPPED_B(uint8 *comp, uint8 *c_prev, int width);
+
+    /*--------------------------------------------------------------------------*/
+    /* defined in vop.c */
+    PV_STATUS DecodeGOVHeader(BitstreamDecVideo *stream, uint32 *time_base);
+    PV_STATUS DecodeVOLHeader(VideoDecData *video, int layer);
+    PV_STATUS DecodeVOPHeader(VideoDecData *video, Vop *currVop, Bool use_ext_tiemstamp);
+    PV_STATUS DecodeShortHeader(VideoDecData *video, Vop *currVop);
+    PV_STATUS PV_DecodeVop(VideoDecData *video);
+    uint32 CalcVopDisplayTime(Vol *currVol, Vop *currVop, int shortVideoHeader);
+
+    /*--------------------------------------------------------------------------*/
+    /* defined in post_proc.c */
+#ifdef PV_ANNEX_IJKT_SUPPORT
+    void H263_Deblock(uint8 *rec,   int width, int height, int16 *QP_store, uint8 *mode, int chr, int T);
+#endif
+    int  PostProcSemaphore(int16 *q_block);
+    void PostFilter(VideoDecData *video, int filer_type, uint8 *output);
+    void FindMaxMin(uint8 *ptr, int *min, int *max, int incr);
+    void DeringAdaptiveSmoothMMX(uint8 *img, int incr, int thres, int mxdf);
+    void AdaptiveSmooth_NoMMX(uint8 *Rec_Y, int v0, int h0, int v_blk, int h_blk,
+                              int thr, int width, int max_diff);
+    void Deringing_Luma(uint8 *Rec_Y, int width, int height, int16 *QP_store,
+                        int Combined, uint8 *pp_mod);
+    void Deringing_Chroma(uint8 *Rec_C, int width, int height, int16 *QP_store,
+                          int Combined, uint8 *pp_mod);
+    void CombinedHorzVertFilter(uint8 *rec, int width, int height, int16 *QP_store,
+                                int chr, uint8 *pp_mod);
+    void CombinedHorzVertFilter_NoSoftDeblocking(uint8 *rec, int width, int height, int16 *QP_store,
+            int chr, uint8 *pp_mod);
+    void CombinedHorzVertRingFilter(uint8 *rec, int width, int height,
+                                    int16 *QP_store, int chr, uint8 *pp_mod);
+
+    /*--------------------------------------------------------------------------*/
+    /* defined in conceal.c */
+    void ConcealTexture_I(VideoDecData *video, int32 startFirstPartition, int mb_start, int mb_stop,
+                          int slice_counter);
+    void ConcealTexture_P(VideoDecData *video, int mb_start, int mb_stop,
+                          int slice_counter);
+    void ConcealPacket(VideoDecData *video, int mb_start, int mb_stop,
+                       int slice_counter);
+    void CopyVopMB(Vop *curr, uint8 *prev, int mbnum, int width, int height);
+
+    /* define in vlc_dequant.c ,  09/18/2000*/
+#ifdef PV_SUPPORT_MAIN_PROFILE
+    int VlcDequantMpegIntraBlock(void *video, int comp, int switched,
+                                 uint8 *bitmapcol, uint8 *bitmaprow);
+    int VlcDequantMpegInterBlock(void *video, int comp,
+                                 uint8 *bitmapcol, uint8 *bitmaprow);
+#endif
+    int VlcDequantH263IntraBlock(VideoDecData *video, int comp, int switched,
+                                 uint8 *bitmapcol, uint8 *bitmaprow);
+    int VlcDequantH263IntraBlock_SH(VideoDecData *video, int comp,
+                                    uint8 *bitmapcol, uint8 *bitmaprow);
+    int VlcDequantH263InterBlock(VideoDecData *video, int comp,
+                                 uint8 *bitmapcol, uint8 *bitmaprow);
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+/*----------------------------------------------------------------------------
+; END
+----------------------------------------------------------------------------*/
+#endif
+
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/mp4def.h b/media/codecs/m4v_h263/dec/src/mp4def.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/mp4def.h
rename to media/codecs/m4v_h263/dec/src/mp4def.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/mp4lib_int.h b/media/codecs/m4v_h263/dec/src/mp4lib_int.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/mp4lib_int.h
rename to media/codecs/m4v_h263/dec/src/mp4lib_int.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/packet_util.cpp b/media/codecs/m4v_h263/dec/src/packet_util.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/packet_util.cpp
rename to media/codecs/m4v_h263/dec/src/packet_util.cpp
diff --git a/media/codecs/m4v_h263/dec/src/post_filter.cpp b/media/codecs/m4v_h263/dec/src/post_filter.cpp
new file mode 100644
index 0000000..37a03a0
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/src/post_filter.cpp
@@ -0,0 +1,439 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+
+#include    "mp4dec_lib.h"
+
+#ifdef PV_ANNEX_IJKT_SUPPORT
+#include    "motion_comp.h"
+#include "mbtype_mode.h"
+const static int STRENGTH_tab[] = {0, 1, 1, 2, 2, 3, 3, 4, 4, 4, 5, 5, 6, 6, 7, 7, 7, 8, 8, 8, 9, 9, 9, 10, 10, 10, 11, 11, 11, 12, 12, 12};
+#endif
+
+
+
+#ifdef PV_ANNEX_IJKT_SUPPORT
+void H263_Deblock(uint8 *rec,
+                  int width,
+                  int height,
+                  int16 *QP_store,
+                  uint8 *mode,
+                  int chr, int annex_T)
+{
+    /*----------------------------------------------------------------------------
+    ; Define all local variables
+    ----------------------------------------------------------------------------*/
+    int i, j, k;
+    uint8 *rec_y;
+    int tmpvar;
+    int mbnum, strength, A_D, d1_2, d1, d2, A, B, C, D, b_size;
+    int d, offset, nMBPerRow, nMBPerCol, width2 = (width << 1);
+    /* MAKE SURE I-VOP INTRA MACROBLOCKS ARE SET TO NON-SKIPPED MODE*/
+    mbnum = 0;
+
+    if (chr)
+    {
+        nMBPerRow = width >> 3;
+        nMBPerCol = height >> 3;
+        b_size = 8;
+    }
+    else
+    {
+        nMBPerRow = width >> 4;
+        nMBPerCol = height >> 4;
+        b_size = 16;
+    }
+
+
+    /********************************* VERTICAL FILTERING ****************************/
+    /* vertical filtering of mid sections no need to check neighboring QP's etc */
+    if (!chr)
+    {
+        rec_y = rec + (width << 3);
+        for (i = 0; i < (height >> 4); i++)
+        {
+            for (j = 0; j < (width >> 4); j++)
+            {
+                if (mode[mbnum] != MODE_SKIPPED)
+                {
+                    k = 16;
+                    strength = STRENGTH_tab[QP_store[mbnum]];
+                    while (k--)
+                    {
+                        A =  *(rec_y - width2);
+                        D = *(rec_y + width);
+                        A_D = A - D;
+                        C = *rec_y;
+                        B = *(rec_y - width);
+                        d = (((C - B) << 2) + A_D);
+
+                        if (d < 0)
+                        {
+                            d1 = -(-d >> 3);
+                            if (d1 < -(strength << 1))
+                            {
+                                d1 = 0;
+                            }
+                            else if (d1 < -strength)
+                            {
+                                d1 = -d1 - (strength << 1);
+                            }
+                            d1_2 = -d1 >> 1;
+                        }
+                        else
+                        {
+                            d1 = d >> 3;
+                            if (d1 > (strength << 1))
+                            {
+                                d1 = 0;
+                            }
+                            else if (d1 > strength)
+                            {
+                                d1 = (strength << 1) - d1;
+                            }
+                            d1_2 = d1 >> 1;
+                        }
+
+                        if (A_D < 0)
+                        {
+                            d2 = -(-A_D >> 2);
+                            if (d2 < -d1_2)
+                            {
+                                d2 = -d1_2;
+                            }
+                        }
+                        else
+                        {
+                            d2 = A_D >> 2;
+                            if (d2 > d1_2)
+                            {
+                                d2 = d1_2;
+                            }
+                        }
+
+                        *(rec_y - width2) = A - d2;
+                        tmpvar = B + d1;
+                        CLIP_RESULT(tmpvar)
+                        *(rec_y - width) = tmpvar;
+                        tmpvar = C - d1;
+                        CLIP_RESULT(tmpvar)
+                        *rec_y = tmpvar;
+                        *(rec_y + width) = D + d2;
+                        rec_y++;
+                    }
+                }
+                else
+                {
+                    rec_y += b_size;
+                }
+                mbnum++;
+            }
+            rec_y += (15 * width);
+
+        }
+    }
+
+    /* VERTICAL boundary blocks */
+
+
+    rec_y = rec + width * b_size;
+
+    mbnum = nMBPerRow;
+    for (i = 0; i < nMBPerCol - 1; i++)
+    {
+        for (j = 0; j < nMBPerRow; j++)
+        {
+            if (mode[mbnum] != MODE_SKIPPED || mode[mbnum - nMBPerRow] != MODE_SKIPPED)
+            {
+                k = b_size;
+                if (mode[mbnum] != MODE_SKIPPED)
+                {
+                    strength = STRENGTH_tab[(annex_T ?  MQ_chroma_QP_table[QP_store[mbnum]] : QP_store[mbnum])];
+                }
+                else
+                {
+                    strength = STRENGTH_tab[(annex_T ?  MQ_chroma_QP_table[QP_store[mbnum - nMBPerRow]] : QP_store[mbnum - nMBPerRow])];
+                }
+
+                while (k--)
+                {
+                    A =  *(rec_y - width2);
+                    D =  *(rec_y + width);
+                    A_D = A - D;
+                    C = *rec_y;
+                    B = *(rec_y - width);
+                    d = (((C - B) << 2) + A_D);
+
+                    if (d < 0)
+                    {
+                        d1 = -(-d >> 3);
+                        if (d1 < -(strength << 1))
+                        {
+                            d1 = 0;
+                        }
+                        else if (d1 < -strength)
+                        {
+                            d1 = -d1 - (strength << 1);
+                        }
+                        d1_2 = -d1 >> 1;
+                    }
+                    else
+                    {
+                        d1 = d >> 3;
+                        if (d1 > (strength << 1))
+                        {
+                            d1 = 0;
+                        }
+                        else if (d1 > strength)
+                        {
+                            d1 = (strength << 1) - d1;
+                        }
+                        d1_2 = d1 >> 1;
+                    }
+
+                    if (A_D < 0)
+                    {
+                        d2 = -(-A_D >> 2);
+                        if (d2 < -d1_2)
+                        {
+                            d2 = -d1_2;
+                        }
+                    }
+                    else
+                    {
+                        d2 = A_D >> 2;
+                        if (d2 > d1_2)
+                        {
+                            d2 = d1_2;
+                        }
+                    }
+
+                    *(rec_y - width2) = A - d2;
+                    tmpvar = B + d1;
+                    CLIP_RESULT(tmpvar)
+                    *(rec_y - width) = tmpvar;
+                    tmpvar = C - d1;
+                    CLIP_RESULT(tmpvar)
+                    *rec_y = tmpvar;
+                    *(rec_y + width) = D + d2;
+                    rec_y++;
+                }
+            }
+            else
+            {
+                rec_y += b_size;
+            }
+            mbnum++;
+        }
+        rec_y += ((b_size - 1) * width);
+
+    }
+
+
+    /***************************HORIZONTAL FILTERING ********************************************/
+    mbnum = 0;
+    /* HORIZONTAL INNER */
+    if (!chr)
+    {
+        rec_y = rec + 8;
+        offset = width * b_size - b_size;
+
+        for (i = 0; i < nMBPerCol; i++)
+        {
+            for (j = 0; j < nMBPerRow; j++)
+            {
+                if (mode[mbnum] != MODE_SKIPPED)
+                {
+                    k = 16;
+                    strength = STRENGTH_tab[QP_store[mbnum]];
+                    while (k--)
+                    {
+                        A =  *(rec_y - 2);
+                        D =  *(rec_y + 1);
+                        A_D = A - D;
+                        C = *rec_y;
+                        B = *(rec_y - 1);
+                        d = (((C - B) << 2) + A_D);
+
+                        if (d < 0)
+                        {
+                            d1 = -(-d >> 3);
+                            if (d1 < -(strength << 1))
+                            {
+                                d1 = 0;
+                            }
+                            else if (d1 < -strength)
+                            {
+                                d1 = -d1 - (strength << 1);
+                            }
+                            d1_2 = -d1 >> 1;
+                        }
+                        else
+                        {
+                            d1 = d >> 3;
+                            if (d1 > (strength << 1))
+                            {
+                                d1 = 0;
+                            }
+                            else if (d1 > strength)
+                            {
+                                d1 = (strength << 1) - d1;
+                            }
+                            d1_2 = d1 >> 1;
+                        }
+
+                        if (A_D < 0)
+                        {
+                            d2 = -(-A_D >> 2);
+                            if (d2 < -d1_2)
+                            {
+                                d2 = -d1_2;
+                            }
+                        }
+                        else
+                        {
+                            d2 = A_D >> 2;
+                            if (d2 > d1_2)
+                            {
+                                d2 = d1_2;
+                            }
+                        }
+
+                        *(rec_y - 2) = A - d2;
+                        tmpvar = B + d1;
+                        CLIP_RESULT(tmpvar)
+                        *(rec_y - 1) = tmpvar;
+                        tmpvar = C - d1;
+                        CLIP_RESULT(tmpvar)
+                        *rec_y = tmpvar;
+                        *(rec_y + 1) = D + d2;
+                        rec_y += width;
+                    }
+                    rec_y -= offset;
+                }
+                else
+                {
+                    rec_y += b_size;
+                }
+                mbnum++;
+            }
+            rec_y += (15 * width);
+
+        }
+    }
+
+
+
+    /* HORIZONTAL EDGE */
+    rec_y = rec + b_size;
+    offset = width * b_size - b_size;
+    mbnum = 1;
+    for (i = 0; i < nMBPerCol; i++)
+    {
+        for (j = 0; j < nMBPerRow - 1; j++)
+        {
+            if (mode[mbnum] != MODE_SKIPPED || mode[mbnum-1] != MODE_SKIPPED)
+            {
+                k = b_size;
+                if (mode[mbnum] != MODE_SKIPPED)
+                {
+                    strength = STRENGTH_tab[(annex_T ?  MQ_chroma_QP_table[QP_store[mbnum]] : QP_store[mbnum])];
+                }
+                else
+                {
+                    strength = STRENGTH_tab[(annex_T ?  MQ_chroma_QP_table[QP_store[mbnum - 1]] : QP_store[mbnum - 1])];
+                }
+
+                while (k--)
+                {
+                    A =  *(rec_y - 2);
+                    D =  *(rec_y + 1);
+                    A_D = A - D;
+                    C = *rec_y;
+                    B = *(rec_y - 1);
+                    d = (((C - B) << 2) + A_D);
+
+                    if (d < 0)
+                    {
+                        d1 = -(-d >> 3);
+                        if (d1 < -(strength << 1))
+                        {
+                            d1 = 0;
+                        }
+                        else if (d1 < -strength)
+                        {
+                            d1 = -d1 - (strength << 1);
+                        }
+                        d1_2 = -d1 >> 1;
+                    }
+                    else
+                    {
+                        d1 = d >> 3;
+                        if (d1 > (strength << 1))
+                        {
+                            d1 = 0;
+                        }
+                        else if (d1 > strength)
+                        {
+                            d1 = (strength << 1) - d1;
+                        }
+                        d1_2 = d1 >> 1;
+                    }
+
+                    if (A_D < 0)
+                    {
+                        d2 = -(-A_D >> 2);
+                        if (d2 < -d1_2)
+                        {
+                            d2 = -d1_2;
+                        }
+                    }
+                    else
+                    {
+                        d2 = A_D >> 2;
+                        if (d2 > d1_2)
+                        {
+                            d2 = d1_2;
+                        }
+                    }
+
+                    *(rec_y - 2) = A - d2;
+                    tmpvar = B + d1;
+                    CLIP_RESULT(tmpvar)
+                    *(rec_y - 1) = tmpvar;
+                    tmpvar = C - d1;
+                    CLIP_RESULT(tmpvar)
+                    *rec_y = tmpvar;
+                    *(rec_y + 1) = D + d2;
+                    rec_y += width;
+                }
+                rec_y -= offset;
+            }
+            else
+            {
+                rec_y += b_size;
+            }
+            mbnum++;
+        }
+        rec_y += ((width * (b_size - 1)) + b_size);
+        mbnum++;
+    }
+
+    return;
+}
+#endif
+
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/post_proc.h b/media/codecs/m4v_h263/dec/src/post_proc.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/post_proc.h
rename to media/codecs/m4v_h263/dec/src/post_proc.h
diff --git a/media/codecs/m4v_h263/dec/src/pvdec_api.cpp b/media/codecs/m4v_h263/dec/src/pvdec_api.cpp
new file mode 100644
index 0000000..b0828e4
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/src/pvdec_api.cpp
@@ -0,0 +1,1687 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+#define LOG_TAG "pvdec_api"
+#include <log/log.h>
+#include "mp4dec_lib.h"
+#include "vlc_decode.h"
+#include "bitstream.h"
+
+#ifndef INT32_MAX
+#define INT32_MAX 0x7fffffff
+#endif
+
+#ifndef SIZE_MAX
+#define SIZE_MAX ((size_t) -1)
+#endif
+
+#define OSCL_DISABLE_WARNING_CONDITIONAL_IS_CONSTANT
+
+#ifdef DEC_INTERNAL_MEMORY_OPT
+#define QCIF_MBS 99
+#define QCIF_BS (4*QCIF_MBS)
+#define QCIF_MB_ROWS 11
+extern uint8                IMEM_sliceNo[QCIF_MBS];
+extern uint8                IMEM_acPredFlag[QCIF_MBS];
+extern uint8                IMEM_headerInfo_Mode[QCIF_MBS];
+extern uint8                IMEM_headerInfo_CBP[QCIF_MBS];
+extern int                  IMEM_headerInfo_QPMB[QCIF_MBS];
+extern MacroBlock           IMEM_mblock;
+extern MOT                  IMEM_motX[QCIF_BS];
+extern MOT                  IMEM_motY[QCIF_BS];
+extern BitstreamDecVideo    IMEM_BitstreamDecVideo[4];
+extern typeDCStore          IMEM_predDC[QCIF_MBS];
+extern typeDCACStore        IMEM_predDCAC_col[QCIF_MB_ROWS+1];
+
+extern VideoDecData         IMEM_VideoDecData[1];
+extern Vop                  IMEM_currVop[1];
+extern Vop                  IMEM_prevVop[1];
+extern PIXEL                IMEM_currVop_yChan[QCIF_MBS*128*3];
+extern PIXEL                IMEM_prevVop_yChan[QCIF_MBS*128*3];
+extern uint8                IMEM_pstprcTypCur[6*QCIF_MBS];
+extern uint8                IMEM_pstprcTypPrv[6*QCIF_MBS];
+
+
+extern Vop                  IMEM_vopHEADER[2];
+extern Vol                  IMEM_VOL[2];
+extern Vop                  IMEM_vopHeader[2][1];
+extern Vol                  IMEM_vol[2][1];
+
+#endif
+
+/* ======================================================================== */
+/*  Function : PVInitVideoDecoder()                                         */
+/*  Date     : 04/11/2000, 08/29/2000                                       */
+/*  Purpose  : Initialization of the MPEG-4 video decoder library.          */
+/*             The return type is Bool instead of PV_STATUS because         */
+/*             we don't want to expose PV_STATUS to (outside) programmers   */
+/*             that use our decoder library SDK.                            */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/* ======================================================================== */
+OSCL_EXPORT_REF Bool PVInitVideoDecoder(VideoDecControls *decCtrl, uint8 *volbuf[],
+                                        int32 *volbuf_size, int nLayers, int width, int height, MP4DecodingMode mode)
+{
+    VideoDecData *video = (VideoDecData *) decCtrl->videoDecoderData;
+    Bool status = PV_TRUE;
+    int idx;
+    BitstreamDecVideo *stream;
+
+
+    oscl_memset(decCtrl, 0, sizeof(VideoDecControls)); /* fix a size bug.   03/28/2001 */
+    decCtrl->nLayers = nLayers;
+    for (idx = 0; idx < nLayers; idx++)
+    {
+        decCtrl->volbuf[idx] = volbuf[idx];
+        decCtrl->volbuf_size[idx] = volbuf_size[idx];
+    }
+
+    /* memory allocation & initialization */
+#ifdef DEC_INTERNAL_MEMORY_OPT
+    video = IMEM_VideoDecData;
+#else
+    video = (VideoDecData *) oscl_malloc(sizeof(VideoDecData));
+#endif
+    if (video != NULL)
+    {
+        oscl_memset(video, 0, sizeof(VideoDecData));
+        video->memoryUsage = sizeof(VideoDecData);
+        video->numberOfLayers = nLayers;
+#ifdef DEC_INTERNAL_MEMORY_OPT
+        video->vol = (Vol **) IMEM_VOL;
+#else
+        if ((size_t)nLayers > SIZE_MAX / sizeof(Vol *)) {
+            status = PV_FALSE;
+            oscl_free(video);
+            goto fail;
+        }
+
+        video->vol = (Vol **) oscl_malloc(nLayers * sizeof(Vol *));
+#endif
+        if (video->vol == NULL) status = PV_FALSE;
+        video->memoryUsage += nLayers * sizeof(Vol *);
+
+        /* be sure not to leak any previous state */
+        PVCleanUpVideoDecoder(decCtrl);
+        /* we need to setup this pointer for the application to */
+        /*    pass it around.                                   */
+        decCtrl->videoDecoderData = (void *) video;
+        video->videoDecControls = decCtrl;  /* yes. we have a cyclic */
+        /* references here :)    */
+
+        /* Allocating Vop space, this has to change when we add */
+        /*    spatial scalability to the decoder                */
+#ifdef DEC_INTERNAL_MEMORY_OPT
+        video->currVop = IMEM_currVop;
+        if (video->currVop == NULL) status = PV_FALSE;
+        else oscl_memset(video->currVop, 0, sizeof(Vop));
+        video->prevVop = IMEM_prevVop;
+        if (video->prevVop == NULL) status = PV_FALSE;
+        else oscl_memset(video->prevVop, 0, sizeof(Vop));
+        video->memoryUsage += (sizeof(Vop) * 2);
+        video->vopHeader = (Vop **) IMEM_vopHEADER;
+#else
+
+        video->currVop = (Vop *) oscl_malloc(sizeof(Vop));
+        if (video->currVop == NULL) status = PV_FALSE;
+        else oscl_memset(video->currVop, 0, sizeof(Vop));
+        video->prevVop = (Vop *) oscl_malloc(sizeof(Vop));
+        if (video->prevVop == NULL) status = PV_FALSE;
+        else oscl_memset(video->prevVop, 0, sizeof(Vop));
+        video->memoryUsage += (sizeof(Vop) * 2);
+
+        if ((size_t)nLayers > SIZE_MAX / sizeof(Vop *)) {
+            status = PV_FALSE;
+            goto fail;
+        }
+
+        video->vopHeader = (Vop **) oscl_malloc(sizeof(Vop *) * nLayers);
+#endif
+        if (video->vopHeader == NULL) status = PV_FALSE;
+        else oscl_memset(video->vopHeader, 0, sizeof(Vop *)*nLayers);
+        video->memoryUsage += (sizeof(Vop *) * nLayers);
+
+        video->initialized = PV_FALSE;
+        /* Decode the header to get all information to allocate data */
+        if (status == PV_TRUE)
+        {
+            /* initialize decoded frame counter.   04/24/2001 */
+            video->frame_idx = -1;
+
+
+            for (idx = 0; idx < nLayers; idx++)
+            {
+
+#ifdef DEC_INTERNAL_MEMORY_OPT
+                video->vopHeader[idx] = IMEM_vopHeader[idx];
+#else
+                video->vopHeader[idx] = (Vop *) oscl_malloc(sizeof(Vop));
+#endif
+                if (video->vopHeader[idx] == NULL)
+                {
+                    status = PV_FALSE;
+                    break;
+                }
+                else
+                {
+                    oscl_memset(video->vopHeader[idx], 0, sizeof(Vop));
+                    video->vopHeader[idx]->timeStamp = 0;
+                    video->memoryUsage += (sizeof(Vop));
+                }
+#ifdef DEC_INTERNAL_MEMORY_OPT
+                video->vol[idx] = IMEM_vol[idx];
+                video->memoryUsage += sizeof(Vol);
+                if (video->vol[idx] == NULL) status = PV_FALSE;
+                else oscl_memset(video->vol[idx], 0, sizeof(Vol));
+                stream = IMEM_BitstreamDecVideo;
+#else
+                video->vol[idx] = (Vol *) oscl_malloc(sizeof(Vol));
+                if (video->vol[idx] == NULL)
+                {
+                    status = PV_FALSE;
+                    break;
+                }
+                else
+                {
+                    video->memoryUsage += sizeof(Vol);
+                    oscl_memset(video->vol[idx], 0, sizeof(Vol));
+                }
+
+                stream = (BitstreamDecVideo *) oscl_malloc(sizeof(BitstreamDecVideo));
+#endif
+                video->memoryUsage += sizeof(BitstreamDecVideo);
+                if (stream == NULL)
+                {
+                    status = PV_FALSE;
+                    break;
+                }
+                else
+                {
+                    int32 buffer_size;
+                    oscl_memset(stream, 0, sizeof(BitstreamDecVideo));
+                    if ((buffer_size = BitstreamOpen(stream, idx)) < 0)
+                    {
+                        mp4dec_log("InitVideoDecoder(): Can't allocate bitstream buffer.\n");
+                        status = PV_FALSE;
+                        break;
+                    }
+                    video->memoryUsage += buffer_size;
+                    video->vol[idx]->bitstream = stream;
+                    video->vol[idx]->volID = idx;
+                    video->vol[idx]->timeInc_offset = 0;  /*  11/12/01 */
+                    video->vlcDecCoeffIntra = &VlcDecTCOEFShortHeader;
+                    video->vlcDecCoeffInter = &VlcDecTCOEFShortHeader;
+                    if (mode == MPEG4_MODE)
+                    {
+                        /* Set up VOL header bitstream for frame-based decoding.  08/30/2000 */
+                        BitstreamReset(stream, decCtrl->volbuf[idx], decCtrl->volbuf_size[idx]);
+
+                        switch (DecodeVOLHeader(video, idx))
+                        {
+                            case PV_SUCCESS :
+                                if (status == PV_TRUE)
+                                    status = PV_TRUE;   /*  we want to make sure that if first layer is bad, second layer is good return PV_FAIL */
+                                else
+                                    status = PV_FALSE;
+                                break;
+#ifdef PV_TOLERATE_VOL_ERRORS
+                            case PV_BAD_VOLHEADER:
+                                status = PV_TRUE;
+                                break;
+#endif
+                            default :
+                                status = PV_FALSE;
+                                break;
+                        }
+
+                    }
+                    else
+                    {
+                        video->shortVideoHeader = PV_TRUE;
+                    }
+
+                    if (video->shortVideoHeader == PV_TRUE)
+                    {
+                        mode = H263_MODE;
+                        /* Set max width and height.  In H.263 mode, we use    */
+                        /*  volbuf_size[0] to pass in width and volbuf_size[1] */
+                        /*  to pass in height.                    04/23/2001 */
+                        video->prevVop->temporalRef = 0; /*  11/12/01 */
+                        /* Compute some convenience variables:   04/23/2001 */
+                        video->vol[idx]->quantType = 0;
+                        video->vol[idx]->quantPrecision = 5;
+                        video->vol[idx]->errorResDisable = 1;
+                        video->vol[idx]->dataPartitioning = 0;
+                        video->vol[idx]->useReverseVLC = 0;
+                        video->intra_acdcPredDisable = 1;
+                        video->vol[idx]->scalability = 0;
+
+                        video->displayWidth = width;
+                        video->displayHeight = height;
+                        video->width = (width + 15) & -16;
+                        video->height = (height + 15) & -16;
+                        video->size = (int32)video->width * video->height;
+
+#ifdef PV_ANNEX_IJKT_SUPPORT
+                        video->modified_quant = 0;
+                        video->advanced_INTRA = 0;
+                        video->deblocking = 0;
+                        video->slice_structure = 0;
+#endif
+                    }
+
+                }
+            }
+
+        }
+        if (status != PV_FALSE)
+        {
+            status = PVAllocVideoData(decCtrl, width, height, nLayers);
+            video->initialized = PV_TRUE;
+        }
+    }
+    else
+    {
+        status = PV_FALSE;
+    }
+
+fail:
+    if (status == PV_FALSE) PVCleanUpVideoDecoder(decCtrl);
+
+    return status;
+}
+
+Bool PVAllocVideoData(VideoDecControls *decCtrl, int width, int height, int nLayers)
+{
+    VideoDecData *video = (VideoDecData *) decCtrl->videoDecoderData;
+    Bool status = PV_TRUE;
+    int nTotalMB;
+    int nMBPerRow;
+    int32 size;
+
+    if (video->shortVideoHeader == PV_TRUE)
+    {
+        video->displayWidth = width;
+        video->displayHeight = height;
+        video->width = (width + 15) & -16;
+        video->height = (height + 15) & -16;
+
+        video->nMBPerRow =
+            video->nMBinGOB  = video->width / MB_SIZE;
+        video->nMBPerCol =
+            video->nGOBinVop = video->height / MB_SIZE;
+        video->nTotalMB =
+            video->nMBPerRow * video->nMBPerCol;
+    }
+
+    if (((uint64_t)video->width * video->height) > (uint64_t)INT32_MAX / sizeof(PIXEL)) {
+        return PV_FALSE;
+    }
+
+    size = (int32)sizeof(PIXEL) * video->width * video->height;
+#ifdef PV_MEMORY_POOL
+    decCtrl->size = size;
+#else
+#ifdef DEC_INTERNAL_MEMORY_OPT
+    video->currVop->yChan = IMEM_currVop_yChan; /* Allocate memory for all VOP OKA 3/2/1*/
+    if (video->currVop->yChan == NULL) status = PV_FALSE;
+    else {
+        video->currVop->uChan = video->currVop->yChan + size;
+        video->currVop->vChan = video->currVop->uChan + (size >> 2);
+    }
+
+    video->prevVop->yChan = IMEM_prevVop_yChan; /* Allocate memory for all VOP OKA 3/2/1*/
+    if (video->prevVop->yChan == NULL) status = PV_FALSE;
+    else {
+        video->prevVop->uChan = video->prevVop->yChan + size;
+        video->prevVop->vChan = video->prevVop->uChan + (size >> 2);
+    }
+#else
+    if (size > INT32_MAX / 3) {
+        return PV_FALSE;
+    }
+    video->currVop->yChan = (PIXEL *) oscl_malloc(size * 3 / 2); /* Allocate memory for all VOP OKA 3/2/1*/
+    if (video->currVop->yChan == NULL) status = PV_FALSE;
+    else {
+        video->currVop->uChan = video->currVop->yChan + size;
+        video->currVop->vChan = video->currVop->uChan + (size >> 2);
+    }
+    video->prevVop->yChan = (PIXEL *) oscl_malloc(size * 3 / 2); /* Allocate memory for all VOP OKA 3/2/1*/
+    if (video->prevVop->yChan == NULL) status = PV_FALSE;
+    else {
+        video->prevVop->uChan = video->prevVop->yChan + size;
+        video->prevVop->vChan = video->prevVop->uChan + (size >> 2);
+    }
+#endif
+    video->memoryUsage += (size * 3);
+#endif   // MEMORY_POOL
+    /* Note that baseVop, enhcVop is only used to hold enhancement */
+    /*    layer header information.                  05/04/2000  */
+    if (nLayers > 1)
+    {
+        video->prevEnhcVop = (Vop *) oscl_malloc(sizeof(Vop));
+        video->memoryUsage += (sizeof(Vop));
+        if (video->prevEnhcVop == NULL)
+        {
+            status = PV_FALSE;
+        }
+        else
+        {
+            oscl_memset(video->prevEnhcVop, 0, sizeof(Vop));
+#ifndef PV_MEMORY_POOL
+            if (size > INT32_MAX / 3) {
+                return PV_FALSE;
+            }
+
+            video->prevEnhcVop->yChan = (PIXEL *) oscl_malloc(size * 3 / 2); /* Allocate memory for all VOP OKA 3/2/1*/
+            if (video->prevEnhcVop->yChan == NULL) status = PV_FALSE;
+            else {
+                video->prevEnhcVop->uChan = video->prevEnhcVop->yChan + size;
+                video->prevEnhcVop->vChan = video->prevEnhcVop->uChan + (size >> 2);
+            }
+            video->memoryUsage += (3 * size / 2);
+#endif
+        }
+    }
+
+    /* Allocating space for slices, AC prediction flag, and */
+    /*    AC/DC prediction storage */
+    nTotalMB = video->nTotalMB;
+    nMBPerRow = video->nMBPerRow;
+
+#ifdef DEC_INTERNAL_MEMORY_OPT
+    video->sliceNo = (uint8 *)(IMEM_sliceNo);
+    if (video->sliceNo == NULL) status = PV_FALSE;
+    video->memoryUsage += nTotalMB;
+    video->acPredFlag = (uint8 *)(IMEM_acPredFlag);
+    if (video->acPredFlag == NULL) status = PV_FALSE;
+    video->memoryUsage += (nTotalMB);
+    video->predDC = (typeDCStore *)(IMEM_predDC);
+    if (video->predDC == NULL) status = PV_FALSE;
+    video->memoryUsage += (nTotalMB * sizeof(typeDCStore));
+    video->predDCAC_col = (typeDCACStore *)(IMEM_predDCAC_col);
+    if (video->predDCAC_col == NULL) status = PV_FALSE;
+    video->memoryUsage += ((nMBPerRow + 1) * sizeof(typeDCACStore));
+    video->predDCAC_row = video->predDCAC_col + 1;
+    video->headerInfo.Mode = (uint8 *)(IMEM_headerInfo_Mode);
+    if (video->headerInfo.Mode == NULL) status = PV_FALSE;
+    video->memoryUsage += nTotalMB;
+    video->headerInfo.CBP = (uint8 *)(IMEM_headerInfo_CBP);
+    if (video->headerInfo.CBP == NULL) status = PV_FALSE;
+    video->memoryUsage += nTotalMB;
+    video->QPMB = (int *)(IMEM_headerInfo_QPMB);
+    if (video->QPMB == NULL) status = PV_FALSE;
+    video->memoryUsage += (nTotalMB * sizeof(int));
+    video->mblock = &IMEM_mblock;
+    if (video->mblock == NULL) status = PV_FALSE;
+    oscl_memset(video->mblock->block, 0, sizeof(int16)*6*NCOEFF_BLOCK); //  Aug 23,2005
+
+    video->memoryUsage += sizeof(MacroBlock);
+    video->motX = (MOT *)(IMEM_motX);
+    if (video->motX == NULL) status = PV_FALSE;
+    video->motY = (MOT *)(IMEM_motY);
+    if (video->motY == NULL) status = PV_FALSE;
+    video->memoryUsage += (sizeof(MOT) * 8 * nTotalMB);
+#else
+    video->sliceNo = (uint8 *) oscl_malloc(nTotalMB);
+    if (video->sliceNo == NULL) status = PV_FALSE;
+    else oscl_memset(video->sliceNo, 0, nTotalMB);
+    video->memoryUsage += nTotalMB;
+
+    video->acPredFlag = (uint8 *) oscl_malloc(nTotalMB * sizeof(uint8));
+    if (video->acPredFlag == NULL) status = PV_FALSE;
+    else oscl_memset(video->acPredFlag, 0, nTotalMB * sizeof(uint8));
+    video->memoryUsage += (nTotalMB);
+
+    if ((size_t)nTotalMB > SIZE_MAX / sizeof(typeDCStore)) {
+        return PV_FALSE;
+    }
+    video->predDC = (typeDCStore *) oscl_malloc(nTotalMB * sizeof(typeDCStore));
+    if (video->predDC == NULL) status = PV_FALSE;
+    else oscl_memset(video->predDC, 0, nTotalMB * sizeof(typeDCStore));
+    video->memoryUsage += (nTotalMB * sizeof(typeDCStore));
+
+    if (nMBPerRow > INT32_MAX - 1
+            || (size_t)(nMBPerRow + 1) > SIZE_MAX / sizeof(typeDCACStore)) {
+        return PV_FALSE;
+    }
+    video->predDCAC_col = (typeDCACStore *) oscl_malloc((nMBPerRow + 1) * sizeof(typeDCACStore));
+    if (video->predDCAC_col == NULL) status = PV_FALSE;
+    else oscl_memset(video->predDCAC_col, 0, (nMBPerRow + 1) * sizeof(typeDCACStore));
+    video->memoryUsage += ((nMBPerRow + 1) * sizeof(typeDCACStore));
+
+    /* element zero will be used for storing vertical (col) AC coefficients */
+    /*  the rest will be used for storing horizontal (row) AC coefficients  */
+    video->predDCAC_row = video->predDCAC_col + 1;        /*  ACDC */
+
+    /* Allocating HeaderInfo structure & Quantizer array */
+    video->headerInfo.Mode = (uint8 *) oscl_malloc(nTotalMB);
+    if (video->headerInfo.Mode == NULL) status = PV_FALSE;
+    else oscl_memset(video->headerInfo.Mode, 0, nTotalMB);
+    video->memoryUsage += nTotalMB;
+    video->headerInfo.CBP = (uint8 *) oscl_malloc(nTotalMB);
+    if (video->headerInfo.CBP == NULL) status = PV_FALSE;
+    else oscl_memset (video->headerInfo.CBP, 0, nTotalMB);
+    video->memoryUsage += nTotalMB;
+
+    if ((size_t)nTotalMB > SIZE_MAX / sizeof(int16)) {
+        return PV_FALSE;
+    }
+    video->QPMB = (int16 *) oscl_malloc(nTotalMB * sizeof(int16));
+    if (video->QPMB == NULL) status = PV_FALSE;
+    else memset(video->QPMB, 0x0, nTotalMB * sizeof(int16));
+    video->memoryUsage += (nTotalMB * sizeof(int));
+
+    /* Allocating macroblock space */
+    video->mblock = (MacroBlock *) oscl_malloc(sizeof(MacroBlock));
+    if (video->mblock == NULL)
+    {
+        status = PV_FALSE;
+    }
+    else
+    {
+        oscl_memset(video->mblock->block, 0, sizeof(int16)*6*NCOEFF_BLOCK); //  Aug 23,2005
+
+        video->memoryUsage += sizeof(MacroBlock);
+    }
+    /* Allocating motion vector space */
+    if ((size_t)nTotalMB > SIZE_MAX / (sizeof(MOT) * 4)) {
+        return PV_FALSE;
+    }
+    video->motX = (MOT *) oscl_malloc(sizeof(MOT) * 4 * nTotalMB);
+    if (video->motX == NULL) status = PV_FALSE;
+    else memset(video->motX, 0, sizeof(MOT) * 4 * nTotalMB);
+    video->motY = (MOT *) oscl_malloc(sizeof(MOT) * 4 * nTotalMB);
+    if (video->motY == NULL) status = PV_FALSE;
+    else memset(video->motY, 0, sizeof(MOT) * 4 * nTotalMB);
+    video->memoryUsage += (sizeof(MOT) * 8 * nTotalMB);
+#endif
+
+
+    /* initialize the decoder library */
+    video->prevVop->predictionType = I_VOP;
+    video->prevVop->timeStamp = 0;
+#ifndef PV_MEMORY_POOL
+    oscl_memset(video->prevVop->yChan, 16, sizeof(uint8)*size);     /*  10/31/01 */
+    oscl_memset(video->prevVop->uChan, 128, sizeof(uint8)*size / 2);
+
+    oscl_memset(video->currVop->yChan, 0, sizeof(uint8)*size*3 / 2);
+    if (nLayers > 1)
+    {
+        oscl_memset(video->prevEnhcVop->yChan, 0, sizeof(uint8)*size*3 / 2);
+        video->prevEnhcVop->timeStamp = 0;
+    }
+    video->concealFrame = video->prevVop->yChan;               /*  07/07/2001 */
+    decCtrl->outputFrame = video->prevVop->yChan;              /*  06/19/2002 */
+#endif
+
+    /* always start from base layer */
+    video->currLayer = 0;
+    return status;
+}
+
+/* ======================================================================== */
+/*  Function : PVResetVideoDecoder()                                        */
+/*  Date     : 01/14/2002                                                   */
+/*  Purpose  : Reset video timestamps                                       */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/* ======================================================================== */
+Bool PVResetVideoDecoder(VideoDecControls *decCtrl)
+{
+    VideoDecData *video = (VideoDecData *) decCtrl->videoDecoderData;
+    int idx;
+
+    for (idx = 0; idx < decCtrl->nLayers; idx++)
+    {
+        video->vopHeader[idx]->timeStamp = 0;
+    }
+    video->prevVop->timeStamp = 0;
+    if (decCtrl->nLayers > 1)
+        video->prevEnhcVop->timeStamp = 0;
+
+    oscl_memset(video->mblock->block, 0, sizeof(int16)*6*NCOEFF_BLOCK); //  Aug 23,2005
+
+    return PV_TRUE;
+}
+
+
+/* ======================================================================== */
+/*  Function : PVCleanUpVideoDecoder()                                      */
+/*  Date     : 04/11/2000, 08/29/2000                                       */
+/*  Purpose  : Cleanup of the MPEG-4 video decoder library.                 */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/* ======================================================================== */
+OSCL_EXPORT_REF Bool PVCleanUpVideoDecoder(VideoDecControls *decCtrl)
+{
+    int idx;
+    VideoDecData *video = (VideoDecData *) decCtrl->videoDecoderData;
+#ifdef DEC_INTERNAL_MEMORY_OPT
+    if (video)
+    {
+
+        video->acPredFlag       = NULL;
+        video->sliceNo          = NULL;
+        video->motX             = NULL;
+        video->motY             = NULL;
+        video->mblock           = NULL;
+        video->QPMB             = NULL;
+        video->predDC           = NULL;
+        video->predDCAC_row     = NULL;
+        video->predDCAC_col     = NULL;
+        video->headerInfo.Mode  = NULL;
+        video->headerInfo.CBP   = NULL;
+        if (video->numberOfLayers > 1)
+        {
+            if (video->prevEnhcVop)
+            {
+                video->prevEnhcVop->uChan = NULL;
+                video->prevEnhcVop->vChan = NULL;
+                if (video->prevEnhcVop->yChan) oscl_free(video->prevEnhcVop->yChan);
+                oscl_free(video->prevEnhcVop);
+            }
+        }
+        if (video->currVop)
+        {
+            video->currVop->uChan = NULL;
+            video->currVop->vChan = NULL;
+            if (video->currVop->yChan)
+                video->currVop->yChan = NULL;
+            video->currVop = NULL;
+        }
+        if (video->prevVop)
+        {
+            video->prevVop->uChan = NULL;
+            video->prevVop->vChan = NULL;
+            if (video->prevVop->yChan)
+                video->prevVop->yChan = NULL;
+            video->prevVop = NULL;
+        }
+
+        if (video->vol)
+        {
+            for (idx = 0; idx < video->numberOfLayers; idx++)
+            {
+                if (video->vol[idx])
+                {
+                    BitstreamClose(video->vol[idx]->bitstream);
+                    video->vol[idx]->bitstream = NULL;
+                    video->vol[idx] = NULL;
+                }
+                video->vopHeader[idx] = NULL;
+
+            }
+            video->vol = NULL;
+            video->vopHeader = NULL;
+        }
+
+        video = NULL;
+        decCtrl->videoDecoderData = NULL;
+    }
+
+#else
+
+    if (video)
+    {
+        if (video->predDC) oscl_free(video->predDC);
+        video->predDCAC_row = NULL;
+        if (video->predDCAC_col) oscl_free(video->predDCAC_col);
+        if (video->motX) oscl_free(video->motX);
+        if (video->motY) oscl_free(video->motY);
+        if (video->mblock) oscl_free(video->mblock);
+        if (video->QPMB) oscl_free(video->QPMB);
+        if (video->headerInfo.Mode) oscl_free(video->headerInfo.Mode);
+        if (video->headerInfo.CBP) oscl_free(video->headerInfo.CBP);
+        if (video->sliceNo) oscl_free(video->sliceNo);
+        if (video->acPredFlag) oscl_free(video->acPredFlag);
+
+        if (video->numberOfLayers > 1)
+        {
+            if (video->prevEnhcVop)
+            {
+                video->prevEnhcVop->uChan = NULL;
+                video->prevEnhcVop->vChan = NULL;
+                if (video->prevEnhcVop->yChan) oscl_free(video->prevEnhcVop->yChan);
+                oscl_free(video->prevEnhcVop);
+            }
+        }
+        if (video->currVop)
+        {
+
+#ifndef PV_MEMORY_POOL
+            video->currVop->uChan = NULL;
+            video->currVop->vChan = NULL;
+            if (video->currVop->yChan)
+                oscl_free(video->currVop->yChan);
+#endif
+            oscl_free(video->currVop);
+        }
+        if (video->prevVop)
+        {
+#ifndef PV_MEMORY_POOL
+            video->prevVop->uChan = NULL;
+            video->prevVop->vChan = NULL;
+            if (video->prevVop->yChan)
+                oscl_free(video->prevVop->yChan);
+#endif
+            oscl_free(video->prevVop);
+        }
+
+        if (video->vol)
+        {
+            for (idx = 0; idx < video->numberOfLayers; idx++)
+            {
+                if (video->vol[idx])
+                {
+                    if (video->vol[idx]->bitstream)
+                    {
+                        BitstreamClose(video->vol[idx]->bitstream);
+                        oscl_free(video->vol[idx]->bitstream);
+                    }
+                    oscl_free(video->vol[idx]);
+                }
+
+            }
+            oscl_free(video->vol);
+        }
+
+        for (idx = 0; idx < video->numberOfLayers; idx++)
+        {
+            if (video->vopHeader[idx]) oscl_free(video->vopHeader[idx]);
+        }
+
+        if (video->vopHeader) oscl_free(video->vopHeader);
+
+        oscl_free(video);
+        decCtrl->videoDecoderData = NULL;
+    }
+#endif
+    return PV_TRUE;
+}
+/* ======================================================================== */
+/*  Function : PVGetVideoDimensions()                                       */
+/*  Date     : 040505                                                       */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : the display_width and display_height of                      */
+/*          the frame in the current layer.                                 */
+/*  Note     : This is not a macro or inline function because we do         */
+/*              not want to expose our internal data structure.             */
+/*  Modified :                                                              */
+/* ======================================================================== */
+OSCL_EXPORT_REF void PVGetVideoDimensions(VideoDecControls *decCtrl, int32 *display_width, int32 *display_height)
+{
+    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
+    *display_width = video->displayWidth;
+    *display_height = video->displayHeight;
+}
+
+OSCL_EXPORT_REF void PVGetBufferDimensions(VideoDecControls *decCtrl, int32 *width, int32 *height) {
+    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
+    *width = video->width;
+    *height = video->height;
+}
+
+/* ======================================================================== */
+/*  Function : PVGetVideoTimeStamp()                                        */
+/*  Date     : 04/27/2000, 08/29/2000                                       */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : current time stamp in millisecond.                           */
+/*  Note     :                                                              */
+/*  Modified :                                                              */
+/* ======================================================================== */
+uint32 PVGetVideoTimeStamp(VideoDecControls *decCtrl)
+{
+    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
+    return video->currTimestamp;
+}
+
+
+/* ======================================================================== */
+/*  Function : PVSetPostProcType()                                          */
+/*  Date     : 07/07/2000                                                   */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : Set post-processing filter type.                             */
+/*  Note     :                                                              */
+/*  Modified : . 08/29/2000 changes the name for consistency.               */
+/* ======================================================================== */
+OSCL_EXPORT_REF void PVSetPostProcType(VideoDecControls *decCtrl, int mode)
+{
+    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
+    if (mode != 0) {
+        ALOGE("Post processing filters are not supported");
+    }
+    video->postFilterType = 0;
+}
+
+
+/* ======================================================================== */
+/*  Function : PVGetDecBitrate()                                            */
+/*  Date     : 08/23/2000                                                   */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : This function returns the average bits per second.           */
+/*  Note     :                                                              */
+/*  Modified :                                                              */
+/* ======================================================================== */
+int PVGetDecBitrate(VideoDecControls *decCtrl)
+{
+    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
+    int     idx;
+    int32   sum = 0;
+
+    for (idx = 0; idx < BITRATE_AVERAGE_WINDOW; idx++)
+    {
+        sum += video->nBitsPerVop[idx];
+    }
+    sum = (sum * video->frameRate) / (10 * BITRATE_AVERAGE_WINDOW);
+    return (int) sum;
+}
+
+
+/* ======================================================================== */
+/*  Function : PVGetDecFramerate()                                          */
+/*  Date     : 08/23/2000                                                   */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : This function returns the average frame per 10 second.       */
+/*  Note     : The fps can be calculated by PVGetDecFramerate()/10          */
+/*  Modified :                                                              */
+/* ======================================================================== */
+int PVGetDecFramerate(VideoDecControls *decCtrl)
+{
+    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
+
+    return video->frameRate;
+}
+
+/* ======================================================================== */
+/*  Function : PVGetOutputFrame()                                           */
+/*  Date     : 05/07/2001                                                   */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : This function returns the pointer to the output frame        */
+/*  Note     :                                                              */
+/*  Modified :                                                              */
+/* ======================================================================== */
+uint8 *PVGetDecOutputFrame(VideoDecControls *decCtrl)
+{
+    return decCtrl->outputFrame;
+}
+
+/* ======================================================================== */
+/*  Function : PVGetLayerID()                                               */
+/*  Date     : 07/09/2001                                                   */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : This function returns decoded frame layer id (BASE/ENHANCE)  */
+/*  Note     :                                                              */
+/*  Modified :                                                              */
+/* ======================================================================== */
+int PVGetLayerID(VideoDecControls *decCtrl)
+{
+    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
+    return video->currLayer;
+}
+/* ======================================================================== */
+/*  Function : PVGetDecMemoryUsage()                                        */
+/*  Date     : 08/23/2000                                                   */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : This function returns the amount of memory used.             */
+/*  Note     :                                                              */
+/*  Modified :                                                              */
+/* ======================================================================== */
+int32 PVGetDecMemoryUsage(VideoDecControls *decCtrl)
+{
+    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
+    return video->memoryUsage;
+}
+
+
+/* ======================================================================== */
+/*  Function : PVGetDecBitstreamMode()                                      */
+/*  Date     : 08/23/2000                                                   */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : This function returns the decoding mode of the baselayer     */
+/*              bitstream.                                                  */
+/*  Note     :                                                              */
+/*  Modified :                                                              */
+/* ======================================================================== */
+OSCL_EXPORT_REF MP4DecodingMode PVGetDecBitstreamMode(VideoDecControls *decCtrl)
+{
+    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
+    if (video->shortVideoHeader)
+    {
+        return H263_MODE;
+    }
+    else
+    {
+        return MPEG4_MODE;
+    }
+}
+
+
+/* ======================================================================== */
+/*  Function : PVExtractVolHeader()                                         */
+/*  Date     : 08/29/2000                                                   */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : Extract vol header of the bitstream from buffer[].           */
+/*  Note     :                                                              */
+/*  Modified :                                                              */
+/* ======================================================================== */
+Bool PVExtractVolHeader(uint8 *video_buffer, uint8 *vol_header, int32 *vol_header_size)
+{
+    int idx = -1;
+    uint8 start_code_prefix[] = { 0x00, 0x00, 0x01 };
+    uint8 h263_prefix[] = { 0x00, 0x00, 0x80 };
+
+    if (oscl_memcmp(h263_prefix, video_buffer, 3) == 0) /* we have short header stream */
+    {
+        oscl_memcpy(vol_header, video_buffer, 32);
+        *vol_header_size = 32;
+        return TRUE;
+    }
+    else
+    {
+        if (oscl_memcmp(start_code_prefix, video_buffer, 3) ||
+                (video_buffer[3] != 0xb0 && video_buffer[3] >= 0x20)) return FALSE;
+
+        do
+        {
+            idx++;
+            while (oscl_memcmp(start_code_prefix, video_buffer + idx, 3))
+            {
+                idx++;
+                if (idx + 3 >= *vol_header_size) goto quit;
+            }
+        }
+        while (video_buffer[idx+3] != 0xb3 && video_buffer[idx+3] != 0xb6);
+
+        oscl_memcpy(vol_header, video_buffer, idx);
+        *vol_header_size = idx;
+        return TRUE;
+    }
+
+quit:
+    oscl_memcpy(vol_header, video_buffer, *vol_header_size);
+    return FALSE;
+}
+
+
+/* ======================================================================== */
+/*  Function : PVLocateFrameHeader()                                        */
+/*  Date     : 04/8/2005                                                    */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : Return the offset to the first SC in the buffer              */
+/*  Note     :                                                              */
+/*  Modified :                                                              */
+/* ======================================================================== */
+int32 PVLocateFrameHeader(uint8 *ptr, int32 size)
+{
+    int count = 0;
+    int32 i = size;
+
+    if (size < 1)
+    {
+        return 0;
+    }
+    while (i--)
+    {
+        if ((count > 1) && (*ptr == 0x01))
+        {
+            i += 2;
+            break;
+        }
+
+        if (*ptr++)
+            count = 0;
+        else
+            count++;
+    }
+    return (size - (i + 1));
+}
+
+
+/* ======================================================================== */
+/*  Function : PVLocateH263FrameHeader()                                    */
+/*  Date     : 04/8/2005                                                    */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : Return the offset to the first SC in the buffer              */
+/*  Note     :                                                              */
+/*  Modified :                                                              */
+/* ======================================================================== */
+int32 PVLocateH263FrameHeader(uint8 *ptr, int32 size)
+{
+    int count = 0;
+    int32 i = size;
+
+    if (size < 1)
+    {
+        return 0;
+    }
+
+    while (i--)
+    {
+        if ((count > 1) && ((*ptr & 0xFC) == 0x80))
+        {
+            i += 2;
+            break;
+        }
+
+        if (*ptr++)
+            count = 0;
+        else
+            count++;
+    }
+    return (size - (i + 1));
+}
+
+
+/* ======================================================================== */
+/*  Function : PVDecodeVideoFrame()                                         */
+/*  Date     : 08/29/2000                                                   */
+/*  Purpose  : Decode one video frame and return a YUV-12 image.            */
+/*  In/out   :                                                              */
+/*  Return   :                                                              */
+/*  Note     :                                                              */
+/*  Modified : 04/17/2001 removed PV_EOS, PV_END_OF_BUFFER              */
+/*           : 08/22/2002 break up into 2 functions PVDecodeVopHeader and */
+/*                          PVDecodeVopBody                                 */
+/* ======================================================================== */
+OSCL_EXPORT_REF Bool PVDecodeVideoFrame(VideoDecControls *decCtrl, uint8 *buffer[],
+                                        uint32 timestamp[], int32 buffer_size[], uint use_ext_timestamp[], uint8 *currYUV)
+{
+    PV_STATUS status = PV_FAIL;
+    VopHeaderInfo header_info;
+
+    status = (PV_STATUS)PVDecodeVopHeader(decCtrl, buffer, timestamp, buffer_size, &header_info, use_ext_timestamp, currYUV);
+    if (status != PV_TRUE)
+        return PV_FALSE;
+
+    if (PVDecodeVopBody(decCtrl, buffer_size) != PV_TRUE)
+    {
+        return PV_FALSE;
+    }
+
+    return PV_TRUE;
+}
+
+/* ======================================================================== */
+/*  Function : PVDecodeVopHeader()                                          */
+/*  Date     : 08/22/2002                                                   */
+/*  Purpose  : Determine target layer and decode vop header, modified from  */
+/*              original PVDecodeVideoFrame.                                */
+/*  In/out   :                                                              */
+/*  Return   :                                                              */
+/*  Note     :                                                              */
+/*  Modified :                                                              */
+/* ======================================================================== */
+Bool PVDecodeVopHeader(VideoDecControls *decCtrl, uint8 *buffer[],
+                       uint32 timestamp[], int32 buffer_size[], VopHeaderInfo *header_info, uint use_ext_timestamp [], uint8 *currYUV)
+{
+    VideoDecData *video = (VideoDecData *) decCtrl->videoDecoderData;
+    Vol *currVol;
+    Vop *currVop = video->currVop;
+    Vop **vopHeader = video->vopHeader;
+    BitstreamDecVideo *stream;
+
+    int target_layer;
+
+#ifdef PV_SUPPORT_TEMPORAL_SCALABILITY
+    PV_STATUS status = PV_FAIL;
+    int idx;
+    int32 display_time;
+
+    /* decide which frame to decode next */
+    if (decCtrl->nLayers > 1)
+    {
+        display_time = target_layer = -1;
+        for (idx = 0; idx < decCtrl->nLayers; idx++)
+        {
+            /* do we have data for this layer? */
+            if (buffer_size[idx] <= 0)
+            {
+                timestamp[idx] = -1;
+                continue;
+            }
+
+            /* did the application provide a timestamp for this vop? */
+            if (timestamp[idx] < 0)
+            {
+                if (vopHeader[idx]->timeStamp < 0)
+                {
+                    /* decode the timestamp in the bitstream */
+                    video->currLayer = idx;
+                    stream = video->vol[idx]->bitstream;
+                    BitstreamReset(stream, buffer[idx], buffer_size[idx]);
+
+                    while ((status = DecodeVOPHeader(video, vopHeader[idx], FALSE)) != PV_SUCCESS)
+                    {
+                        /* Try to find a VOP header in the buffer.   08/30/2000. */
+                        if (PVSearchNextM4VFrame(stream) != PV_SUCCESS)
+                        {
+                            /* if we don't have data for enhancement layer, */
+                            /*    don't just stop.   09/07/2000.          */
+                            buffer_size[idx] = 0;
+                            break;
+                        }
+                    }
+                    if (status == PV_SUCCESS)
+                    {
+                        vopHeader[idx]->timeStamp =
+                            timestamp[idx] = CalcVopDisplayTime(video->vol[idx], vopHeader[idx], video->shortVideoHeader);
+                        if (idx == 0) vopHeader[idx]->refSelectCode = 1;
+                    }
+                }
+                else
+                {
+                    /* We've decoded this vop header in the previous run already. */
+                    timestamp[idx] = vopHeader[idx]->timeStamp;
+                }
+            }
+
+            /* Use timestamps to select the next VOP to be decoded */
+            if (timestamp[idx] >= 0 && (display_time < 0 || display_time > timestamp[idx]))
+            {
+                display_time = timestamp[idx];
+                target_layer = idx;
+            }
+            else if (display_time == timestamp[idx])
+            {
+                /* we have to handle either SNR or spatial scalability here. */
+            }
+        }
+        if (target_layer < 0) return PV_FALSE;
+
+        /* set up for decoding the target layer */
+        video->currLayer = target_layer;
+        currVol = video->vol[target_layer];
+        video->bitstream = stream = currVol->bitstream;
+
+        /* We need to decode the vop header if external timestamp   */
+        /*    is provided.    10/04/2000                            */
+        if (vopHeader[target_layer]->timeStamp < 0)
+        {
+            stream = video->vol[target_layer]->bitstream;
+            BitstreamReset(stream, buffer[target_layer], buffer_size[target_layer]);
+
+            while (DecodeVOPHeader(video, vopHeader[target_layer], TRUE) != PV_SUCCESS)
+            {
+                /* Try to find a VOP header in the buffer.   08/30/2000. */
+                if (PVSearchNextM4VFrame(stream) != PV_SUCCESS)
+                {
+                    /* if we don't have data for enhancement layer, */
+                    /*    don't just stop.   09/07/2000.          */
+                    buffer_size[target_layer] = 0;
+                    break;
+                }
+            }
+            video->vol[target_layer]->timeInc_offset = vopHeader[target_layer]->timeInc;
+            video->vol[target_layer]->moduloTimeBase = timestamp[target_layer];
+            vopHeader[target_layer]->timeStamp = timestamp[target_layer];
+            if (target_layer == 0) vopHeader[target_layer]->refSelectCode = 1;
+        }
+    }
+    else /* base layer only decoding */
+    {
+#endif
+        video->currLayer = target_layer = 0;
+        currVol = video->vol[0];
+        video->bitstream = stream = currVol->bitstream;
+        if (buffer_size[0] <= 0) return PV_FALSE;
+        BitstreamReset(stream, buffer[0], buffer_size[0]);
+
+        if (video->shortVideoHeader)
+        {
+            while (DecodeShortHeader(video, vopHeader[0]) != PV_SUCCESS)
+            {
+                if (PVSearchNextH263Frame(stream) != PV_SUCCESS)
+                {
+                    /* There is no vop header in the buffer,    */
+                    /*   clean bitstream buffer.     2/5/2001   */
+                    buffer_size[0] = 0;
+                    if (video->initialized == PV_FALSE)
+                    {
+                        video->displayWidth = video->width = 0;
+                        video->displayHeight = video->height = 0;
+                    }
+                    return PV_FALSE;
+                }
+            }
+
+            if (use_ext_timestamp[0])
+            {
+                /* MTB for H263 is absolute TR */
+                /* following line is equivalent to  round((timestamp[0]*30)/1001);   11/13/2001 */
+                video->vol[0]->moduloTimeBase = 30 * ((timestamp[0] + 17) / 1001) + (30 * ((timestamp[0] + 17) % 1001) / 1001);
+                vopHeader[0]->timeStamp = timestamp[0];
+            }
+            else
+                vopHeader[0]->timeStamp = CalcVopDisplayTime(currVol, vopHeader[0], video->shortVideoHeader);
+        }
+        else
+        {
+            while (DecodeVOPHeader(video, vopHeader[0], FALSE) != PV_SUCCESS)
+            {
+                /* Try to find a VOP header in the buffer.   08/30/2000. */
+                if (PVSearchNextM4VFrame(stream) != PV_SUCCESS)
+                {
+                    /* There is no vop header in the buffer,    */
+                    /*   clean bitstream buffer.     2/5/2001   */
+                    buffer_size[0] = 0;
+                    return PV_FALSE;
+                }
+            }
+
+            if (use_ext_timestamp[0])
+            {
+                video->vol[0]->timeInc_offset = vopHeader[0]->timeInc;
+                video->vol[0]->moduloTimeBase = timestamp[0];  /*  11/12/2001 */
+                vopHeader[0]->timeStamp = timestamp[0];
+            }
+            else
+            {
+                vopHeader[0]->timeStamp = CalcVopDisplayTime(currVol, vopHeader[0], video->shortVideoHeader);
+            }
+        }
+
+        /* set up some base-layer only parameters */
+        vopHeader[0]->refSelectCode = 1;
+#ifdef PV_SUPPORT_TEMPORAL_SCALABILITY
+    }
+#endif
+    timestamp[target_layer] = video->currTimestamp = vopHeader[target_layer]->timeStamp;
+#ifdef PV_MEMORY_POOL
+    vopHeader[target_layer]->yChan = (PIXEL *)currYUV;
+    vopHeader[target_layer]->uChan = (PIXEL *)currYUV + decCtrl->size;
+    vopHeader[target_layer]->vChan = (PIXEL *)(vopHeader[target_layer]->uChan) + (decCtrl->size >> 2);
+#else
+    vopHeader[target_layer]->yChan = currVop->yChan;
+    vopHeader[target_layer]->uChan = currVop->uChan;
+    vopHeader[target_layer]->vChan = currVop->vChan;
+#endif
+    oscl_memcpy(currVop, vopHeader[target_layer], sizeof(Vop));
+
+#ifdef PV_SUPPORT_TEMPORAL_SCALABILITY
+    vopHeader[target_layer]->timeStamp = -1;
+#endif
+    /* put header info into the structure */
+    header_info->currLayer = target_layer;
+    header_info->timestamp = video->currTimestamp;
+    header_info->frameType = (MP4FrameType)currVop->predictionType;
+    header_info->refSelCode = vopHeader[target_layer]->refSelectCode;
+    header_info->quantizer = currVop->quantizer;
+    /***************************************/
+
+    return PV_TRUE;
+}
+
+
+/* ======================================================================== */
+/*  Function : PVDecodeVopBody()                                            */
+/*  Date     : 08/22/2002                                                   */
+/*  Purpose  : Decode vop body after the header is decoded, modified from   */
+/*              original PVDecodeVideoFrame.                                */
+/*  In/out   :                                                              */
+/*  Return   :                                                              */
+/*  Note     :                                                              */
+/*  Modified :                                                              */
+/* ======================================================================== */
+Bool PVDecodeVopBody(VideoDecControls *decCtrl, int32 buffer_size[])
+{
+    PV_STATUS status = PV_FAIL;
+    VideoDecData *video = (VideoDecData *) decCtrl->videoDecoderData;
+    int target_layer = video->currLayer;
+    Vol *currVol = video->vol[target_layer];
+    Vop *currVop = video->currVop;
+    Vop *prevVop = video->prevVop;
+    Vop *tempVopPtr;
+    int bytes_consumed = 0; /* Record how many bits we used in the buffer.   04/24/2001 */
+
+    int idx;
+
+    if (currVop->vopCoded == 0)                  /*  07/03/2001 */
+    {
+        PV_BitstreamByteAlign(currVol->bitstream);
+        /* We should always clear up bitstream buffer.   10/10/2000 */
+        bytes_consumed = (getPointer(currVol->bitstream) + 7) >> 3;
+
+        if (bytes_consumed > currVol->bitstream->data_end_pos)
+        {
+            bytes_consumed = currVol->bitstream->data_end_pos;
+        }
+
+        if (bytes_consumed < buffer_size[target_layer])
+        {
+            /* If we only consume part of the bits in the buffer, take those */
+            /*  out.     04/24/2001 */
+            /*          oscl_memcpy(buffer[target_layer], buffer[target_layer]+bytes_consumed,
+                            (buffer_size[target_layer]-=bytes_consumed)); */
+            buffer_size[target_layer] -= bytes_consumed;
+        }
+        else
+        {
+            buffer_size[target_layer] = 0;
+        }
+#ifdef PV_MEMORY_POOL
+
+        if (target_layer)
+        {
+            if (video->prevEnhcVop->timeStamp > video->prevVop->timeStamp)
+            {
+                video->prevVop = video->prevEnhcVop;
+            }
+        }
+
+        if (!video->prevVop->yChan) {
+            ALOGE("b/35269635");
+            android_errorWriteLog(0x534e4554, "35269635");
+            return PV_FALSE;
+        }
+        oscl_memcpy(currVop->yChan, video->prevVop->yChan, (decCtrl->size*3) / 2);
+
+        video->prevVop = prevVop;
+
+        video->concealFrame = currVop->yChan;       /*  07/07/2001 */
+
+        video->vop_coding_type = currVop->predictionType; /*  07/09/01 */
+
+        decCtrl->outputFrame = currVop->yChan;
+
+        /* Swap VOP pointers.  No enhc. frame oscl_memcpy() anymore!   04/24/2001 */
+        if (target_layer)
+        {
+            tempVopPtr = video->prevEnhcVop;
+            video->prevEnhcVop = video->currVop;
+            video->currVop = tempVopPtr;
+        }
+        else
+        {
+            tempVopPtr = video->prevVop;
+            video->prevVop = video->currVop;
+            video->currVop = tempVopPtr;
+        }
+#else
+        if (target_layer)       /* this is necessary to avoid flashback problems   06/21/2002*/
+        {
+            video->prevEnhcVop->timeStamp = currVop->timeStamp;
+        }
+        else
+        {
+            video->prevVop->timeStamp = currVop->timeStamp;
+        }
+#endif
+        video->vop_coding_type = currVop->predictionType; /*  07/09/01 */
+        /* the following is necessary to avoid displaying an notCoded I-VOP at the beginning of a session
+        or after random positioning  07/03/02*/
+        if (currVop->predictionType == I_VOP)
+        {
+            video->vop_coding_type = P_VOP;
+        }
+
+
+        return PV_TRUE;
+    }
+    /* ======================================================= */
+    /*  Decode vop body (if there is no error in the header!)  */
+    /* ======================================================= */
+
+    /* first, we need to select a reference frame */
+    if (decCtrl->nLayers > 1)
+    {
+        if (currVop->predictionType == I_VOP)
+        {
+            /* do nothing here */
+        }
+        else if (currVop->predictionType == P_VOP)
+        {
+            switch (currVop->refSelectCode)
+            {
+                case 0 : /* most recently decoded enhancement vop */
+                    /* Setup video->prevVop before we call PV_DecodeVop().   04/24/2001 */
+                    if (video->prevEnhcVop->timeStamp >= video->prevVop->timeStamp)
+                        video->prevVop = video->prevEnhcVop;
+                    break;
+
+                case 1 : /* most recently displayed base-layer vop */
+                    if (target_layer)
+                    {
+                        if (video->prevEnhcVop->timeStamp > video->prevVop->timeStamp)
+                            video->prevVop = video->prevEnhcVop;
+                    }
+                    break;
+
+                case 2 : /* next base-layer vop in display order */
+                    break;
+
+                case 3 : /* temporally coincident base-layer vop (no MV's) */
+                    break;
+            }
+        }
+        else /* we have a B-Vop */
+        {
+            mp4dec_log("DecodeVideoFrame(): B-VOP not supported.\n");
+        }
+    }
+
+    /* This is for the calculation of the frame rate and bitrate. */
+    idx = ++video->frame_idx % BITRATE_AVERAGE_WINDOW;
+
+    /* Calculate bitrate for this layer.   08/23/2000 */
+    status = PV_DecodeVop(video);
+    video->nBitsPerVop[idx] = getPointer(currVol->bitstream);
+    video->prevTimestamp[idx] = currVop->timeStamp;
+
+    /* restore video->prevVop after PV_DecodeVop().   04/24/2001 */
+//  if (currVop->refSelectCode == 0) video->prevVop = prevVop;
+    video->prevVop = prevVop;
+
+    /* Estimate the frame rate.   08/23/2000 */
+    video->duration = video->prevTimestamp[idx];
+    video->duration -= video->prevTimestamp[(++idx)%BITRATE_AVERAGE_WINDOW];
+    if (video->duration > 0)
+    { /* Only update framerate when the timestamp is right */
+        video->frameRate = (int)(FRAMERATE_SCALE) / video->duration;
+    }
+
+    /* We should always clear up bitstream buffer.   10/10/2000 */
+    bytes_consumed = (getPointer(currVol->bitstream) + 7) >> 3; /*  11/4/03 */
+
+    if (bytes_consumed > currVol->bitstream->data_end_pos)
+    {
+        bytes_consumed = currVol->bitstream->data_end_pos;
+    }
+
+    if (bytes_consumed < buffer_size[target_layer])
+    {
+        /* If we only consume part of the bits in the buffer, take those */
+        /*  out.     04/24/2001 */
+        /*      oscl_memcpy(buffer[target_layer], buffer[target_layer]+bytes_consumed,
+                    (buffer_size[target_layer]-=bytes_consumed)); */
+        buffer_size[target_layer] -= bytes_consumed;
+    }
+    else
+    {
+        buffer_size[target_layer] = 0;
+    }
+    switch (status)
+    {
+        case PV_FAIL :
+            return PV_FALSE;        /* this will take care of concealment if we lose whole frame  */
+
+        case PV_END_OF_VOP :
+            /* we may want to differenciate PV_END_OF_VOP and PV_SUCCESS */
+            /*    in the future.     05/10/2000                      */
+
+        case PV_SUCCESS :
+            /* Nohting is wrong :). */
+
+
+            video->concealFrame = video->currVop->yChan;       /*  07/07/2001 */
+
+            video->vop_coding_type = video->currVop->predictionType; /*  07/09/01 */
+
+            decCtrl->outputFrame = video->currVop->yChan;
+
+            /* Swap VOP pointers.  No enhc. frame oscl_memcpy() anymore!   04/24/2001 */
+            if (target_layer)
+            {
+                tempVopPtr = video->prevEnhcVop;
+                video->prevEnhcVop = video->currVop;
+                video->currVop = tempVopPtr;
+            }
+            else
+            {
+                tempVopPtr = video->prevVop;
+                video->prevVop = video->currVop;
+                video->currVop = tempVopPtr;
+            }
+            break;
+
+        default :
+            /* This will never happen */
+            break;
+    }
+
+    return PV_TRUE;
+}
+
+#ifdef PV_MEMORY_POOL
+OSCL_EXPORT_REF void PVSetReferenceYUV(VideoDecControls *decCtrl, uint8 *YUV)
+{
+    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
+    video->prevVop->yChan = (PIXEL *)YUV;
+    video->prevVop->uChan = (PIXEL *)YUV + video->size;
+    video->prevVop->vChan = (PIXEL *)video->prevVop->uChan + (decCtrl->size >> 2);
+    oscl_memset(video->prevVop->yChan, 16, sizeof(uint8)*decCtrl->size);     /*  10/31/01 */
+    oscl_memset(video->prevVop->uChan, 128, sizeof(uint8)*decCtrl->size / 2);
+    video->concealFrame = video->prevVop->yChan;               /*  07/07/2001 */
+    decCtrl->outputFrame = video->prevVop->yChan;              /*  06/19/2002 */
+}
+#endif
+
+
+/* ======================================================================== */
+/*  Function : VideoDecoderErrorDetected()                                  */
+/*  Date     : 06/20/2000                                                   */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : This function will be called everytime an error int the      */
+/*              bitstream is detected.                                      */
+/*  Note     :                                                              */
+/*  Modified :                                                              */
+/* ======================================================================== */
+uint VideoDecoderErrorDetected(VideoDecData *)
+{
+    /* This is only used for trapping bitstream error for debuging */
+    return 0;
+}
+
+#ifdef ENABLE_LOG
+#include <stdio.h>
+#include <stdarg.h>
+/* ======================================================================== */
+/*  Function : m4vdec_dprintf()                                             */
+/*  Date     : 08/15/2000                                                   */
+/*  Purpose  : This is a function that logs messages in the mpeg4 video     */
+/*             decoder.  We can call the standard PacketVideo PVMessage     */
+/*             from inside this function if necessary.                      */
+/*  In/out   :                                                              */
+/*  Return   :                                                              */
+/*  Note     : To turn on the logging, LOG_MP4DEC_MESSAGE must be defined   */
+/*              when compiling this file (only this file).                  */
+/*  Modified :                                                              */
+/* ======================================================================== */
+void m4vdec_dprintf(char *format, ...)
+{
+    FILE *log_fp;
+    va_list args;
+    va_start(args, format);
+
+    /* open the log file */
+    log_fp = fopen("\\mp4dec_log.txt", "a+");
+    if (log_fp == NULL) return;
+    /* output the message */
+    vfprintf(log_fp, format, args);
+    fclose(log_fp);
+
+    va_end(args);
+}
+#endif
+
+
+/* ======================================================================== */
+/*  Function : IsIntraFrame()                                               */
+/*  Date     : 05/29/2000                                                   */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : The most recently decoded frame is an Intra frame.           */
+/*  Note     :                                                              */
+/*  Modified :                                                              */
+/* ======================================================================== */
+Bool IsIntraFrame(VideoDecControls *decCtrl)
+{
+    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
+    return (video->vop_coding_type == I_VOP);
+}
+
+/* ======================================================================== */
+/*  Function : PVDecPostProcess()                                           */
+/*  Date     : 01/09/2002                                                   */
+/*  Purpose  : PostProcess one video frame and return a YUV-12 image.       */
+/*  In/out   :                                                              */
+/*  Return   :                                                              */
+/*  Note     :                                                              */
+/*  Modified :                                                              */
+/* ======================================================================== */
+void PVDecPostProcess(VideoDecControls *decCtrl, uint8 *outputYUV)
+{
+    uint8 *outputBuffer;
+    outputBuffer = decCtrl->outputFrame;
+    outputYUV;
+    decCtrl->outputFrame = outputBuffer;
+    return;
+}
+
+
+/* ======================================================================== */
+/*  Function : PVDecSetReference(VideoDecControls *decCtrl, uint8 *refYUV,  */
+/*                              int32 timestamp)                            */
+/*  Date     : 07/22/2003                                                   */
+/*  Purpose  : Get YUV reference frame from external source.                */
+/*  In/out   : YUV 4-2-0 frame containing new reference frame in the same   */
+/*   : dimension as original, i.e., doesn't have to be multiple of 16 !!!.  */
+/*  Return   :                                                              */
+/*  Note     :                                                              */
+/*  Modified :                                                              */
+/* ======================================================================== */
+Bool PVDecSetReference(VideoDecControls *decCtrl, uint8 *refYUV, uint32 timestamp)
+{
+    VideoDecData *video = (VideoDecData *) decCtrl->videoDecoderData;
+    Vop *prevVop = video->prevVop;
+    int width = video->width;
+    uint8 *dstPtr, *orgPtr, *dstPtr2, *orgPtr2;
+    int32 size = (int32)width * video->height;
+
+
+    /* set new parameters */
+    prevVop->timeStamp = timestamp;
+    prevVop->predictionType = I_VOP;
+
+    dstPtr = prevVop->yChan;
+    orgPtr = refYUV;
+    oscl_memcpy(dstPtr, orgPtr, size);
+    dstPtr = prevVop->uChan;
+    dstPtr2 = prevVop->vChan;
+    orgPtr = refYUV + size;
+    orgPtr2 = orgPtr + (size >> 2);
+    oscl_memcpy(dstPtr, orgPtr, (size >> 2));
+    oscl_memcpy(dstPtr2, orgPtr2, (size >> 2));
+
+    video->concealFrame = video->prevVop->yChan;
+    video->vop_coding_type = I_VOP;
+    decCtrl->outputFrame = video->prevVop->yChan;
+
+    return PV_TRUE;
+}
+
+/* ======================================================================== */
+/*  Function : PVDecSetEnhReference(VideoDecControls *decCtrl, uint8 *refYUV,   */
+/*                              int32 timestamp)                            */
+/*  Date     : 07/23/2003                                                   */
+/*  Purpose  : Get YUV enhance reference frame from external source.        */
+/*  In/out   : YUV 4-2-0 frame containing new reference frame in the same   */
+/*   : dimension as original, i.e., doesn't have to be multiple of 16 !!!.  */
+/*  Return   :                                                              */
+/*  Note     :                                                              */
+/*  Modified :                                                              */
+/* ======================================================================== */
+Bool PVDecSetEnhReference(VideoDecControls *decCtrl, uint8 *refYUV, uint32 timestamp)
+{
+    VideoDecData *video = (VideoDecData *) decCtrl->videoDecoderData;
+    Vop *prevEnhcVop = video->prevEnhcVop;
+    uint8 *dstPtr, *orgPtr, *dstPtr2, *orgPtr2;
+    int32 size = (int32) video->width * video->height;
+
+    if (video->numberOfLayers <= 1)
+        return PV_FALSE;
+
+
+    /* set new parameters */
+    prevEnhcVop->timeStamp = timestamp;
+    prevEnhcVop->predictionType = I_VOP;
+
+    dstPtr = prevEnhcVop->yChan;
+    orgPtr = refYUV;
+    oscl_memcpy(dstPtr, orgPtr, size);
+    dstPtr = prevEnhcVop->uChan;
+    dstPtr2 = prevEnhcVop->vChan;
+    orgPtr = refYUV + size;
+    orgPtr2 = orgPtr + (size >> 2);
+    oscl_memcpy(dstPtr, orgPtr, (size >> 2));
+    oscl_memcpy(dstPtr2, orgPtr2, (size >> 2));
+    video->concealFrame = video->prevEnhcVop->yChan;
+    video->vop_coding_type = I_VOP;
+    decCtrl->outputFrame = video->prevEnhcVop->yChan;
+
+    return PV_TRUE;
+}
+
+
+/* ======================================================================== */
+/*  Function : PVGetVolInfo()                                               */
+/*  Date     : 08/06/2003                                                   */
+/*  Purpose  : Get the vol info(only base-layer).                           */
+/*  In/out   :                                                              */
+/*  Return   :                                                              */
+/*  Note     :                                                              */
+/*  Modified : 06/24/2004                                                   */
+/* ======================================================================== */
+Bool PVGetVolInfo(VideoDecControls *decCtrl, VolInfo *pVolInfo)
+{
+    Vol *currVol;
+
+    if (pVolInfo == NULL || decCtrl == NULL || decCtrl->videoDecoderData == NULL ||
+            ((VideoDecData *)decCtrl->videoDecoderData)->vol[0] == NULL) return PV_FALSE;
+
+    currVol = ((VideoDecData *)(decCtrl->videoDecoderData))->vol[0];
+
+    // get the VOL info
+    pVolInfo->shortVideoHeader = (int32)((VideoDecData *)(decCtrl->videoDecoderData))->shortVideoHeader;
+    pVolInfo->dataPartitioning = (int32)currVol->dataPartitioning;
+    pVolInfo->errorResDisable  = (int32)currVol->errorResDisable;
+    pVolInfo->useReverseVLC    = (int32)currVol->useReverseVLC;
+    pVolInfo->scalability      = (int32)currVol->scalability;
+    pVolInfo->nbitsTimeIncRes  = (int32)currVol->nbitsTimeIncRes;
+    pVolInfo->profile_level_id = (int32)currVol->profile_level_id;
+
+    return PV_TRUE;
+}
+
+
+
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/scaling.h b/media/codecs/m4v_h263/dec/src/scaling.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/scaling.h
rename to media/codecs/m4v_h263/dec/src/scaling.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/scaling_tab.cpp b/media/codecs/m4v_h263/dec/src/scaling_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/scaling_tab.cpp
rename to media/codecs/m4v_h263/dec/src/scaling_tab.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/vlc_dec_tab.h b/media/codecs/m4v_h263/dec/src/vlc_dec_tab.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/vlc_dec_tab.h
rename to media/codecs/m4v_h263/dec/src/vlc_dec_tab.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/vlc_decode.cpp b/media/codecs/m4v_h263/dec/src/vlc_decode.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/vlc_decode.cpp
rename to media/codecs/m4v_h263/dec/src/vlc_decode.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/vlc_decode.h b/media/codecs/m4v_h263/dec/src/vlc_decode.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/vlc_decode.h
rename to media/codecs/m4v_h263/dec/src/vlc_decode.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/vlc_dequant.cpp b/media/codecs/m4v_h263/dec/src/vlc_dequant.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/vlc_dequant.cpp
rename to media/codecs/m4v_h263/dec/src/vlc_dequant.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/vlc_tab.cpp b/media/codecs/m4v_h263/dec/src/vlc_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/vlc_tab.cpp
rename to media/codecs/m4v_h263/dec/src/vlc_tab.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/vop.cpp b/media/codecs/m4v_h263/dec/src/vop.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/vop.cpp
rename to media/codecs/m4v_h263/dec/src/vop.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/zigzag.h b/media/codecs/m4v_h263/dec/src/zigzag.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/zigzag.h
rename to media/codecs/m4v_h263/dec/src/zigzag.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/zigzag_tab.cpp b/media/codecs/m4v_h263/dec/src/zigzag_tab.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/src/zigzag_tab.cpp
rename to media/codecs/m4v_h263/dec/src/zigzag_tab.cpp
diff --git a/media/codecs/m4v_h263/dec/test/Android.bp b/media/codecs/m4v_h263/dec/test/Android.bp
new file mode 100644
index 0000000..d8de569
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/test/Android.bp
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_codecs_m4v_h263_dec_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_codecs_m4v_h263_dec_license",
+    ],
+}
+
+cc_test {
+    name: "Mpeg4H263DecoderTest",
+    gtest: true,
+
+    test_suites: [
+        "device-tests",
+        "mts",
+    ],
+
+    // Support multilib variants (using different suffix per sub-architecture), which is needed on
+    // build targets with secondary architectures, as the MTS test suite packaging logic flattens
+    // all test artifacts into a single `testcases` directory.
+    compile_multilib: "both",
+    multilib: {
+        lib32: {
+            suffix: "32",
+        },
+        lib64: {
+            suffix: "64",
+        },
+    },
+
+    // this unit test also runs within the mainline tests (MTS),
+    // so it must be compatible back to Android Q/10 (sdk 29)
+    min_sdk_version: "29",
+
+    srcs: [
+        "Mpeg4H263DecoderTest.cpp",
+    ],
+
+    shared_libs: [
+        "liblog",
+    ],
+
+    static_libs: [
+        "libstagefright_m4vh263dec",
+        "libstagefright_foundation",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+}
diff --git a/media/codecs/m4v_h263/dec/test/AndroidTest.xml b/media/codecs/m4v_h263/dec/test/AndroidTest.xml
new file mode 100755
index 0000000..8bb4d1c
--- /dev/null
+++ b/media/codecs/m4v_h263/dec/test/AndroidTest.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for Mpeg4H263 Decoder unit tests">
+    <option name="test-suite-tag" value="Mpeg4H263DecoderTest" />
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.FilePusher">
+        <option name="cleanup" value="true" />
+        <option name="push" value="Mpeg4H263DecoderTest->/data/local/tmp/Mpeg4H263DecoderTest" />
+        <option name="append-bitness" value="true" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263Decoder-1.1.zip?unzip=true"
+            value="/data/local/tmp/Mpeg4H263DecoderTestRes/" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="Mpeg4H263DecoderTest" />
+        <option name="native-test-flag" value="-P /data/local/tmp/Mpeg4H263DecoderTestRes/" />
+    </test>
+</configuration>
diff --git a/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTest.cpp b/media/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTest.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTest.cpp
rename to media/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTest.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTestEnvironment.h b/media/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTestEnvironment.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTestEnvironment.h
rename to media/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTestEnvironment.h
diff --git a/media/libstagefright/codecs/m4v_h263/dec/test/README.md b/media/codecs/m4v_h263/dec/test/README.md
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/dec/test/README.md
rename to media/codecs/m4v_h263/dec/test/README.md
diff --git a/media/codecs/m4v_h263/enc/Android.bp b/media/codecs/m4v_h263/enc/Android.bp
new file mode 100644
index 0000000..e303404
--- /dev/null
+++ b/media/codecs/m4v_h263/enc/Android.bp
@@ -0,0 +1,107 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_codecs_m4v_h263_enc_license",
+    ],
+}
+
+// Added automatically by a large-scale-change that took the approach of
+// 'apply every license found to every target'. While this makes sure we respect
+// every license restriction, it may not be entirely correct.
+//
+// e.g. GPL in an MIT project might only apply to the contrib/ directory.
+//
+// Please consider splitting the single license below into multiple licenses,
+// taking care not to lose any license_kind information, and overriding the
+// default license using the 'licenses: [...]' property on targets as needed.
+//
+// For unused files, consider creating a 'fileGroup' with "//visibility:private"
+// to attach the license to, and including a comment whether the files may be
+// used in the current project.
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codecs_m4v_h263_enc_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+        "SPDX-license-identifier-BSD",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library_static {
+    name: "libstagefright_m4vh263enc",
+    vendor_available: true,
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+    min_sdk_version: "29",
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
+    srcs: [
+        "src/bitstream_io.cpp",
+        "src/combined_encode.cpp", "src/datapart_encode.cpp",
+        "src/dct.cpp",
+        "src/findhalfpel.cpp",
+        "src/fastcodemb.cpp",
+        "src/fastidct.cpp",
+        "src/fastquant.cpp",
+        "src/me_utils.cpp",
+        "src/mp4enc_api.cpp",
+        "src/rate_control.cpp",
+        "src/motion_est.cpp",
+        "src/motion_comp.cpp",
+        "src/sad.cpp",
+        "src/sad_halfpel.cpp",
+        "src/vlc_encode.cpp",
+        "src/vop.cpp",
+    ],
+
+    cflags: [
+        "-DBX_RC",
+        "-Werror",
+    ],
+
+    local_include_dirs: ["src"],
+    export_include_dirs: ["include"],
+
+    sanitize: {
+        misc_undefined: [
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+}
+
+//###############################################################################
+
+cc_test {
+    name: "libstagefright_m4vh263enc_test",
+    gtest: false,
+
+    srcs: ["test/m4v_h263_enc_test.cpp"],
+
+    local_include_dirs: ["src"],
+
+    cflags: [
+        "-DBX_RC",
+        "-Wall",
+        "-Werror",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+
+    static_libs: ["libstagefright_m4vh263enc"],
+}
diff --git a/media/libstagefright/codecs/m4v_h263/enc/MODULE_LICENSE_APACHE2 b/media/codecs/m4v_h263/enc/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/MODULE_LICENSE_APACHE2
rename to media/codecs/m4v_h263/enc/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/m4v_h263/enc/NOTICE b/media/codecs/m4v_h263/enc/NOTICE
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/NOTICE
rename to media/codecs/m4v_h263/enc/NOTICE
diff --git a/media/libstagefright/codecs/m4v_h263/enc/include/cvei.h b/media/codecs/m4v_h263/enc/include/cvei.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/include/cvei.h
rename to media/codecs/m4v_h263/enc/include/cvei.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/include/mp4enc_api.h b/media/codecs/m4v_h263/enc/include/mp4enc_api.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/include/mp4enc_api.h
rename to media/codecs/m4v_h263/enc/include/mp4enc_api.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/bitstream_io.cpp b/media/codecs/m4v_h263/enc/src/bitstream_io.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/bitstream_io.cpp
rename to media/codecs/m4v_h263/enc/src/bitstream_io.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/bitstream_io.h b/media/codecs/m4v_h263/enc/src/bitstream_io.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/bitstream_io.h
rename to media/codecs/m4v_h263/enc/src/bitstream_io.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/combined_encode.cpp b/media/codecs/m4v_h263/enc/src/combined_encode.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/combined_encode.cpp
rename to media/codecs/m4v_h263/enc/src/combined_encode.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/datapart_encode.cpp b/media/codecs/m4v_h263/enc/src/datapart_encode.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/datapart_encode.cpp
rename to media/codecs/m4v_h263/enc/src/datapart_encode.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/dct.cpp b/media/codecs/m4v_h263/enc/src/dct.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/dct.cpp
rename to media/codecs/m4v_h263/enc/src/dct.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/dct.h b/media/codecs/m4v_h263/enc/src/dct.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/dct.h
rename to media/codecs/m4v_h263/enc/src/dct.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/dct_inline.h b/media/codecs/m4v_h263/enc/src/dct_inline.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/dct_inline.h
rename to media/codecs/m4v_h263/enc/src/dct_inline.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/fastcodemb.cpp b/media/codecs/m4v_h263/enc/src/fastcodemb.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/fastcodemb.cpp
rename to media/codecs/m4v_h263/enc/src/fastcodemb.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/fastcodemb.h b/media/codecs/m4v_h263/enc/src/fastcodemb.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/fastcodemb.h
rename to media/codecs/m4v_h263/enc/src/fastcodemb.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/fastidct.cpp b/media/codecs/m4v_h263/enc/src/fastidct.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/fastidct.cpp
rename to media/codecs/m4v_h263/enc/src/fastidct.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/fastquant.cpp b/media/codecs/m4v_h263/enc/src/fastquant.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/fastquant.cpp
rename to media/codecs/m4v_h263/enc/src/fastquant.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/fastquant_inline.h b/media/codecs/m4v_h263/enc/src/fastquant_inline.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/fastquant_inline.h
rename to media/codecs/m4v_h263/enc/src/fastquant_inline.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/findhalfpel.cpp b/media/codecs/m4v_h263/enc/src/findhalfpel.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/findhalfpel.cpp
rename to media/codecs/m4v_h263/enc/src/findhalfpel.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/m4venc_oscl.h b/media/codecs/m4v_h263/enc/src/m4venc_oscl.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/m4venc_oscl.h
rename to media/codecs/m4v_h263/enc/src/m4venc_oscl.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/me_utils.cpp b/media/codecs/m4v_h263/enc/src/me_utils.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/me_utils.cpp
rename to media/codecs/m4v_h263/enc/src/me_utils.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/motion_comp.cpp b/media/codecs/m4v_h263/enc/src/motion_comp.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/motion_comp.cpp
rename to media/codecs/m4v_h263/enc/src/motion_comp.cpp
diff --git a/media/codecs/m4v_h263/enc/src/motion_est.cpp b/media/codecs/m4v_h263/enc/src/motion_est.cpp
new file mode 100644
index 0000000..9deb023
--- /dev/null
+++ b/media/codecs/m4v_h263/enc/src/motion_est.cpp
@@ -0,0 +1,1741 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+#include "mp4def.h"
+#include "mp4enc_lib.h"
+#include "mp4lib_int.h"
+#include "m4venc_oscl.h"
+
+//#define PRINT_MV
+#define MIN_GOP 1   /* minimum size of GOP,  1/23/01, need to be tested */
+
+#define CANDIDATE_DISTANCE  0 /* distance candidate from one another to consider as a distinct one */
+/* shouldn't be more than 3 */
+
+#define ZERO_MV_PREF    0 /* 0: bias (0,0)MV before full-pel search, lowest complexity*/
+/* 1: bias (0,0)MV after full-pel search, before half-pel, highest comp */
+/* 2: bias (0,0)MV after half-pel, high comp, better PSNR */
+
+#define RASTER_REFRESH  /* instead of random INTRA refresh, do raster scan,  2/26/01 */
+
+#ifdef RASTER_REFRESH
+#define TARGET_REFRESH_PER_REGION 4 /* , no. MB per frame to be INTRA refreshed */
+#else
+#define TARGET_REFRESH_PER_REGION 1 /* , no. MB per region to be INTRA refreshed */
+#endif
+
+#define ALL_CAND_EQUAL  10  /*  any number greater than 5 will work */
+
+#define NumPixelMB  256     /*  number of pixels used in SAD calculation */
+
+#define DEF_8X8_WIN 3   /* search region for 8x8 MVs around the 16x16 MV */
+#define MB_Nb  256
+
+#define PREF_NULL_VEC 129   /* for zero vector bias */
+#define PREF_16_VEC 129     /* 1MV bias versus 4MVs*/
+#define PREF_INTRA  512     /* bias for INTRA coding */
+
+const static Int tab_exclude[9][9] =  // [last_loc][curr_loc]
+{
+    {0, 0, 0, 0, 0, 0, 0, 0, 0},
+    {0, 0, 0, 0, 1, 1, 1, 0, 0},
+    {0, 0, 0, 0, 1, 1, 1, 1, 1},
+    {0, 0, 0, 0, 0, 0, 1, 1, 1},
+    {0, 1, 1, 0, 0, 0, 1, 1, 1},
+    {0, 1, 1, 0, 0, 0, 0, 0, 1},
+    {0, 1, 1, 1, 1, 0, 0, 0, 1},
+    {0, 0, 1, 1, 1, 0, 0, 0, 0},
+    {0, 0, 1, 1, 1, 1, 1, 0, 0}
+}; //to decide whether to continue or compute
+
+const static Int refine_next[8][2] =    /* [curr_k][increment] */
+{
+    {0, 0}, {2, 0}, {1, 1}, {0, 2}, { -1, 1}, { -2, 0}, { -1, -1}, {0, -2}
+};
+
+#ifdef __cplusplus
+extern "C"
+{
+#endif
+
+    void MBMotionSearch(VideoEncData *video, UChar *cur, UChar *best_cand[],
+    Int i0, Int j0, Int type_pred, Int fullsearch, Int *hp_guess);
+
+    Int  fullsearch(VideoEncData *video, Vol *currVol, UChar *ref, UChar *cur,
+                    Int *imin, Int *jmin, Int ilow, Int ihigh, Int jlow, Int jhigh);
+    Int fullsearchBlk(VideoEncData *video, Vol *currVol, UChar *cent, UChar *cur,
+                      Int *imin, Int *jmin, Int ilow, Int ihigh, Int jlow, Int jhigh, Int range);
+    void CandidateSelection(Int *mvx, Int *mvy, Int *num_can, Int imb, Int jmb,
+                            VideoEncData *video, Int type_pred);
+    void RasterIntraUpdate(UChar *intraArray, UChar *Mode, Int totalMB, Int numRefresh);
+    void ResetIntraUpdate(UChar *intraArray, Int totalMB);
+    void ResetIntraUpdateRegion(UChar *intraArray, Int start_i, Int rwidth,
+                                Int start_j, Int rheight, Int mbwidth, Int mbheight);
+
+    void MoveNeighborSAD(Int dn[], Int new_loc);
+    Int FindMin(Int dn[]);
+    void PrepareCurMB(VideoEncData *video, UChar *cur);
+
+#ifdef __cplusplus
+}
+#endif
+
+/***************************************/
+/*  2/28/01, for HYPOTHESIS TESTING */
+#ifdef HTFM     /* defined in mp4def.h */
+#ifdef __cplusplus
+extern "C"
+{
+#endif
+    void CalcThreshold(double pf, double exp_lamda[], Int nrmlz_th[]);
+    void    HTFMPrepareCurMB(VideoEncData *video, HTFM_Stat *htfm_stat, UChar *cur);
+#ifdef __cplusplus
+}
+#endif
+
+
+#define HTFM_Pf  0.25   /* 3/2/1, probability of false alarm, can be varied from 0 to 0.5 */
+/***************************************/
+#endif
+
+#ifdef _SAD_STAT
+ULong num_MB = 0;
+ULong num_HP_MB = 0;
+ULong num_Blk = 0;
+ULong num_HP_Blk = 0;
+ULong num_cand = 0;
+ULong num_better_hp = 0;
+ULong i_dist_from_guess = 0;
+ULong j_dist_from_guess = 0;
+ULong num_hp_not_zero = 0;
+#endif
+
+
+
+/*==================================================================
+    Function:   MotionEstimation
+    Date:       10/3/2000
+    Purpose:    Go through all macroblock for motion search and
+                determine scene change detection.
+====================================================================*/
+
+void MotionEstimation(VideoEncData *video)
+{
+    UChar use_4mv = video->encParams->MV8x8_Enabled;
+    Vol *currVol = video->vol[video->currLayer];
+    Vop *currVop = video->currVop;
+    VideoEncFrameIO *currFrame = video->input;
+    Int i, j, comp;
+    Int mbwidth = currVol->nMBPerRow;
+    Int mbheight = currVol->nMBPerCol;
+    Int totalMB = currVol->nTotalMB;
+    Int width = currFrame->pitch;
+    UChar *mode_mb, *Mode = video->headerInfo.Mode;
+    MOT *mot_mb, **mot = video->mot;
+    UChar *intraArray = video->intraArray;
+    Int FS_en = video->encParams->FullSearch_Enabled;
+    void (*ComputeMBSum)(UChar *, Int, MOT *) = video->functionPointer->ComputeMBSum;
+    void (*ChooseMode)(UChar*, UChar*, Int, Int) = video->functionPointer->ChooseMode;
+
+    Int numIntra, start_i, numLoop, incr_i;
+    Int mbnum, offset;
+    UChar *cur, *best_cand[5];
+    Int sad8 = 0, sad16 = 0;
+    Int totalSAD = 0;   /* average SAD for rate control */
+    Int skip_halfpel_4mv;
+    Int f_code_p, f_code_n, max_mag = 0, min_mag = 0;
+    Int type_pred;
+    Int xh[5] = {0, 0, 0, 0, 0};
+    Int yh[5] = {0, 0, 0, 0, 0}; /* half-pel */
+    UChar hp_mem4MV[17*17*4];
+
+#ifdef HTFM
+    /***** HYPOTHESIS TESTING ********/  /* 2/28/01 */
+    Int collect = 0;
+    HTFM_Stat htfm_stat;
+    double newvar[16];
+    double exp_lamda[15];
+    /*********************************/
+#endif
+    Int hp_guess = 0;
+#ifdef PRINT_MV
+    FILE *fp_debug;
+#endif
+
+//  FILE *fstat;
+//  static int frame_num = 0;
+
+    offset = 0;
+
+    if (video->currVop->predictionType == I_VOP)
+    {   /* compute the SAV */
+        mbnum = 0;
+        cur = currFrame->yChan;
+
+        for (j = 0; j < mbheight; j++)
+        {
+            for (i = 0; i < mbwidth; i++)
+            {
+                video->mbnum = mbnum;
+                mot_mb = mot[mbnum];
+
+                (*ComputeMBSum)(cur + (i << 4), width, mot_mb);
+
+                totalSAD += mot_mb[0].sad;
+
+                mbnum++;
+            }
+            cur += (width << 4);
+        }
+
+        video->sumMAD = (float)totalSAD / (float)NumPixelMB;
+
+        ResetIntraUpdate(intraArray, totalMB);
+
+        return  ;
+    }
+
+    /* 09/20/05 */
+    if (video->prevBaseVop->padded == 0 && !video->encParams->H263_Enabled)
+    {
+        PaddingEdge(video->prevBaseVop);
+        video->prevBaseVop->padded = 1;
+    }
+
+    /* Random INTRA update */
+    /*  suggest to do it in CodeMB */
+    /*  2/21/2001 */
+    //if(video->encParams->RC_Type == CBR_1 || video->encParams->RC_Type == CBR_2)
+    if (video->currLayer == 0 && video->encParams->Refresh)
+    {
+        RasterIntraUpdate(intraArray, Mode, totalMB, video->encParams->Refresh);
+    }
+
+    video->sad_extra_info = NULL;
+
+#ifdef HTFM
+    /***** HYPOTHESIS TESTING ********/  /* 2/28/01 */
+    InitHTFM(video, &htfm_stat, newvar, &collect);
+    /*********************************/
+#endif
+
+    if ((video->encParams->SceneChange_Det == 1) /*&& video->currLayer==0 */
+            && ((video->encParams->LayerFrameRate[0] < 5.0) || (video->numVopsInGOP > MIN_GOP)))
+        /* do not try to detect a new scene if low frame rate and too close to previous I-frame */
+    {
+        incr_i = 2;
+        numLoop = 2;
+        start_i = 1;
+        type_pred = 0; /* for initial candidate selection */
+    }
+    else
+    {
+        incr_i = 1;
+        numLoop = 1;
+        start_i = 0;
+        type_pred = 2;
+    }
+
+    /* First pass, loop thru half the macroblock */
+    /* determine scene change */
+    /* Second pass, for the rest of macroblocks */
+    numIntra = 0;
+    while (numLoop--)
+    {
+        for (j = 0; j < mbheight; j++)
+        {
+            if (incr_i > 1)
+                start_i = (start_i == 0 ? 1 : 0) ; /* toggle 0 and 1 */
+
+            offset = width * (j << 4) + (start_i << 4);
+
+            mbnum = j * mbwidth + start_i;
+
+            for (i = start_i; i < mbwidth; i += incr_i)
+            {
+                video->mbnum = mbnum;
+                mot_mb = mot[mbnum];
+                mode_mb = Mode + mbnum;
+
+                cur = currFrame->yChan + offset;
+
+
+                if (*mode_mb != MODE_INTRA)
+                {
+#if defined(HTFM)
+                    HTFMPrepareCurMB(video, &htfm_stat, cur);
+#else
+                    PrepareCurMB(video, cur);
+#endif
+                    /************************************************************/
+                    /******** full-pel 1MV and 4MVs search **********************/
+
+#ifdef _SAD_STAT
+                    num_MB++;
+#endif
+                    MBMotionSearch(video, cur, best_cand, i << 4, j << 4, type_pred,
+                                   FS_en, &hp_guess);
+
+#ifdef PRINT_MV
+                    fp_debug = fopen("c:\\bitstream\\mv1_debug.txt", "a");
+                    fprintf(fp_debug, "#%d (%d,%d,%d) : ", mbnum, mot_mb[0].x, mot_mb[0].y, mot_mb[0].sad);
+                    fprintf(fp_debug, "(%d,%d,%d) : (%d,%d,%d) : (%d,%d,%d) : (%d,%d,%d) : ==>\n",
+                            mot_mb[1].x, mot_mb[1].y, mot_mb[1].sad,
+                            mot_mb[2].x, mot_mb[2].y, mot_mb[2].sad,
+                            mot_mb[3].x, mot_mb[3].y, mot_mb[3].sad,
+                            mot_mb[4].x, mot_mb[4].y, mot_mb[4].sad);
+                    fclose(fp_debug);
+#endif
+                    sad16 = mot_mb[0].sad;
+#ifdef NO_INTER4V
+                    sad8 = sad16;
+#else
+                    sad8 = mot_mb[1].sad + mot_mb[2].sad + mot_mb[3].sad + mot_mb[4].sad;
+#endif
+
+                    /* choose between INTRA or INTER */
+                    (*ChooseMode)(mode_mb, cur, width, ((sad8 < sad16) ? sad8 : sad16));
+                }
+                else    /* INTRA update, use for prediction 3/23/01 */
+                {
+                    mot_mb[0].x = mot_mb[0].y = 0;
+                }
+
+                if (*mode_mb == MODE_INTRA)
+                {
+                    numIntra++ ;
+
+                    /* compute SAV for rate control and fast DCT, 11/28/00 */
+                    (*ComputeMBSum)(cur, width, mot_mb);
+
+                    /* leave mot_mb[0] as it is for fast motion search */
+                    /* set the 4 MVs to zeros */
+                    for (comp = 1; comp <= 4; comp++)
+                    {
+                        mot_mb[comp].x = 0;
+                        mot_mb[comp].y = 0;
+                    }
+#ifdef PRINT_MV
+                    fp_debug = fopen("c:\\bitstream\\mv1_debug.txt", "a");
+                    fprintf(fp_debug, "\n");
+                    fclose(fp_debug);
+#endif
+                }
+                else /* *mode_mb = MODE_INTER;*/
+                {
+                    if (video->encParams->HalfPel_Enabled)
+                    {
+#ifdef _SAD_STAT
+                        num_HP_MB++;
+#endif
+                        /* find half-pel resolution motion vector */
+                        FindHalfPelMB(video, cur, mot_mb, best_cand[0],
+                                      i << 4, j << 4, xh, yh, hp_guess);
+#ifdef PRINT_MV
+                        fp_debug = fopen("c:\\bitstream\\mv1_debug.txt", "a");
+                        fprintf(fp_debug, "(%d,%d), %d\n", mot_mb[0].x, mot_mb[0].y, mot_mb[0].sad);
+                        fclose(fp_debug);
+#endif
+                        skip_halfpel_4mv = ((sad16 - mot_mb[0].sad) <= (MB_Nb >> 1) + 1);
+                        sad16 = mot_mb[0].sad;
+
+#ifndef NO_INTER4V
+                        if (use_4mv && !skip_halfpel_4mv)
+                        {
+                            /* Also decide 1MV or 4MV !!!!!!!!*/
+                            sad8 = FindHalfPelBlk(video, cur, mot_mb, sad16,
+                                                  best_cand, mode_mb, i << 4, j << 4, xh, yh, hp_mem4MV);
+
+#ifdef PRINT_MV
+                            fp_debug = fopen("c:\\bitstream\\mv1_debug.txt", "a");
+                            fprintf(fp_debug, " (%d,%d,%d) : (%d,%d,%d) : (%d,%d,%d) : (%d,%d,%d) \n",
+                                    mot_mb[1].x, mot_mb[1].y, mot_mb[1].sad,
+                                    mot_mb[2].x, mot_mb[2].y, mot_mb[2].sad,
+                                    mot_mb[3].x, mot_mb[3].y, mot_mb[3].sad,
+                                    mot_mb[4].x, mot_mb[4].y, mot_mb[4].sad);
+                            fclose(fp_debug);
+#endif
+                        }
+#endif /* NO_INTER4V */
+                    }
+                    else    /* HalfPel_Enabled ==0  */
+                    {
+#ifndef NO_INTER4V
+                        //if(sad16 < sad8-PREF_16_VEC)
+                        if (sad16 - PREF_16_VEC > sad8)
+                        {
+                            *mode_mb = MODE_INTER4V;
+                        }
+#endif
+                    }
+#if (ZERO_MV_PREF==2)   /* use mot_mb[7].sad as d0 computed in MBMotionSearch*/
+                    /******************************************************/
+                    if (mot_mb[7].sad - PREF_NULL_VEC < sad16 && mot_mb[7].sad - PREF_NULL_VEC < sad8)
+                    {
+                        mot_mb[0].sad = mot_mb[7].sad - PREF_NULL_VEC;
+                        mot_mb[0].x = mot_mb[0].y = 0;
+                        *mode_mb = MODE_INTER;
+                    }
+                    /******************************************************/
+#endif
+                    if (*mode_mb == MODE_INTER)
+                    {
+                        if (mot_mb[0].x == 0 && mot_mb[0].y == 0)   /* use zero vector */
+                            mot_mb[0].sad += PREF_NULL_VEC; /* add back the bias */
+
+                        mot_mb[1].sad = mot_mb[2].sad = mot_mb[3].sad = mot_mb[4].sad = (mot_mb[0].sad + 2) >> 2;
+                        mot_mb[1].x = mot_mb[2].x = mot_mb[3].x = mot_mb[4].x = mot_mb[0].x;
+                        mot_mb[1].y = mot_mb[2].y = mot_mb[3].y = mot_mb[4].y = mot_mb[0].y;
+
+                    }
+                }
+
+                /* find maximum magnitude */
+                /* compute average SAD for rate control, 11/28/00 */
+                if (*mode_mb == MODE_INTER)
+                {
+#ifdef PRINT_MV
+                    fp_debug = fopen("c:\\bitstream\\mv1_debug.txt", "a");
+                    fprintf(fp_debug, "%d MODE_INTER\n", mbnum);
+                    fclose(fp_debug);
+#endif
+                    totalSAD += mot_mb[0].sad;
+                    if (mot_mb[0].x > max_mag)
+                        max_mag = mot_mb[0].x;
+                    if (mot_mb[0].y > max_mag)
+                        max_mag = mot_mb[0].y;
+                    if (mot_mb[0].x < min_mag)
+                        min_mag = mot_mb[0].x;
+                    if (mot_mb[0].y < min_mag)
+                        min_mag = mot_mb[0].y;
+                }
+                else if (*mode_mb == MODE_INTER4V)
+                {
+#ifdef PRINT_MV
+                    fp_debug = fopen("c:\\bitstream\\mv1_debug.txt", "a");
+                    fprintf(fp_debug, "%d MODE_INTER4V\n", mbnum);
+                    fclose(fp_debug);
+#endif
+                    totalSAD += sad8;
+                    for (comp = 1; comp <= 4; comp++)
+                    {
+                        if (mot_mb[comp].x > max_mag)
+                            max_mag = mot_mb[comp].x;
+                        if (mot_mb[comp].y > max_mag)
+                            max_mag = mot_mb[comp].y;
+                        if (mot_mb[comp].x < min_mag)
+                            min_mag = mot_mb[comp].x;
+                        if (mot_mb[comp].y < min_mag)
+                            min_mag = mot_mb[comp].y;
+                    }
+                }
+                else    /* MODE_INTRA */
+                {
+#ifdef PRINT_MV
+                    fp_debug = fopen("c:\\bitstream\\mv1_debug.txt", "a");
+                    fprintf(fp_debug, "%d MODE_INTRA\n", mbnum);
+                    fclose(fp_debug);
+#endif
+                    totalSAD += mot_mb[0].sad;
+                }
+                mbnum += incr_i;
+                offset += (incr_i << 4);
+
+            }
+        }
+
+        if (incr_i > 1 && numLoop) /* scene change on and first loop */
+        {
+            //if(numIntra > ((totalMB>>3)<<1) + (totalMB>>3)) /* 75% of 50%MBs */
+            if (numIntra > (0.30*(totalMB / 2.0))) /* 15% of 50%MBs */
+            {
+                /******** scene change detected *******************/
+                currVop->predictionType = I_VOP;
+                M4VENC_MEMSET(Mode, MODE_INTRA, sizeof(UChar)*totalMB); /* set this for MB level coding*/
+                currVop->quantizer = video->encParams->InitQuantIvop[video->currLayer];
+
+                /* compute the SAV for rate control & fast DCT */
+                totalSAD = 0;
+                offset = 0;
+                mbnum = 0;
+                cur = currFrame->yChan;
+
+                for (j = 0; j < mbheight; j++)
+                {
+                    for (i = 0; i < mbwidth; i++)
+                    {
+                        video->mbnum = mbnum;
+                        mot_mb = mot[mbnum];
+
+
+                        (*ComputeMBSum)(cur + (i << 4), width, mot_mb);
+                        totalSAD += mot_mb[0].sad;
+
+                        mbnum++;
+                    }
+                    cur += (width << 4);
+                }
+
+                video->sumMAD = (float)totalSAD / (float)NumPixelMB;
+                ResetIntraUpdate(intraArray, totalMB);
+                /* video->numVopsInGOP=0; 3/13/01 move it to vop.c*/
+
+                return ;
+            }
+        }
+        /******** no scene change, continue motion search **********************/
+        start_i = 0;
+        type_pred++; /* second pass */
+    }
+
+    video->sumMAD = (float)totalSAD / (float)NumPixelMB;    /* avg SAD */
+
+    /* find f_code , 10/27/2000 */
+    f_code_p = 1;
+    while ((max_mag >> (4 + f_code_p)) > 0)
+        f_code_p++;
+
+    f_code_n = 1;
+    min_mag *= -1;
+    while ((min_mag - 1) >> (4 + f_code_n) > 0)
+        f_code_n++;
+
+    currVop->fcodeForward = (f_code_p > f_code_n ? f_code_p : f_code_n);
+
+#ifdef HTFM
+    /***** HYPOTHESIS TESTING ********/  /* 2/28/01 */
+    if (collect)
+    {
+        collect = 0;
+        UpdateHTFM(video, newvar, exp_lamda, &htfm_stat);
+    }
+    /*********************************/
+#endif
+
+    return ;
+}
+
+
+#ifdef HTFM
+void InitHTFM(VideoEncData *video, HTFM_Stat *htfm_stat, double *newvar, Int *collect)
+{
+    Int i;
+    Int lx = video->currVop->width; //  padding
+    Int lx2 = lx << 1;
+    Int lx3 = lx2 + lx;
+    Int rx = video->currVop->pitch;
+    Int rx2 = rx << 1;
+    Int rx3 = rx2 + rx;
+
+    Int *offset, *offset2;
+
+    /* 4/11/01, collect data every 30 frames, doesn't have to be base layer */
+    if (((Int)video->numVopsInGOP) % 30 == 1)
+    {
+
+        *collect = 1;
+
+        htfm_stat->countbreak = 0;
+        htfm_stat->abs_dif_mad_avg = 0;
+
+        for (i = 0; i < 16; i++)
+        {
+            newvar[i] = 0.0;
+        }
+//      video->functionPointer->SAD_MB_PADDING = &SAD_MB_PADDING_HTFM_Collect;
+        video->functionPointer->SAD_Macroblock = &SAD_MB_HTFM_Collect;
+        video->functionPointer->SAD_MB_HalfPel[0] = NULL;
+        video->functionPointer->SAD_MB_HalfPel[1] = &SAD_MB_HP_HTFM_Collectxh;
+        video->functionPointer->SAD_MB_HalfPel[2] = &SAD_MB_HP_HTFM_Collectyh;
+        video->functionPointer->SAD_MB_HalfPel[3] = &SAD_MB_HP_HTFM_Collectxhyh;
+        video->sad_extra_info = (void*)(htfm_stat);
+        offset = htfm_stat->offsetArray;
+        offset2 = htfm_stat->offsetRef;
+    }
+    else
+    {
+//      video->functionPointer->SAD_MB_PADDING = &SAD_MB_PADDING_HTFM;
+        video->functionPointer->SAD_Macroblock = &SAD_MB_HTFM;
+        video->functionPointer->SAD_MB_HalfPel[0] = NULL;
+        video->functionPointer->SAD_MB_HalfPel[1] = &SAD_MB_HP_HTFMxh;
+        video->functionPointer->SAD_MB_HalfPel[2] = &SAD_MB_HP_HTFMyh;
+        video->functionPointer->SAD_MB_HalfPel[3] = &SAD_MB_HP_HTFMxhyh;
+        video->sad_extra_info = (void*)(video->nrmlz_th);
+        offset = video->nrmlz_th + 16;
+        offset2 = video->nrmlz_th + 32;
+    }
+
+    offset[0] = 0;
+    offset[1] = lx2 + 2;
+    offset[2] = 2;
+    offset[3] = lx2;
+    offset[4] = lx + 1;
+    offset[5] = lx3 + 3;
+    offset[6] = lx + 3;
+    offset[7] = lx3 + 1;
+    offset[8] = lx;
+    offset[9] = lx3 + 2;
+    offset[10] = lx3 ;
+    offset[11] = lx + 2 ;
+    offset[12] = 1;
+    offset[13] = lx2 + 3;
+    offset[14] = lx2 + 1;
+    offset[15] = 3;
+
+    offset2[0] = 0;
+    offset2[1] = rx2 + 2;
+    offset2[2] = 2;
+    offset2[3] = rx2;
+    offset2[4] = rx + 1;
+    offset2[5] = rx3 + 3;
+    offset2[6] = rx + 3;
+    offset2[7] = rx3 + 1;
+    offset2[8] = rx;
+    offset2[9] = rx3 + 2;
+    offset2[10] = rx3 ;
+    offset2[11] = rx + 2 ;
+    offset2[12] = 1;
+    offset2[13] = rx2 + 3;
+    offset2[14] = rx2 + 1;
+    offset2[15] = 3;
+
+    return ;
+}
+
+void UpdateHTFM(VideoEncData *video, double *newvar, double *exp_lamda, HTFM_Stat *htfm_stat)
+{
+    if (htfm_stat->countbreak == 0)
+        htfm_stat->countbreak = 1;
+
+    newvar[0] = (double)(htfm_stat->abs_dif_mad_avg) / (htfm_stat->countbreak * 16.);
+
+    if (newvar[0] < 0.001)
+    {
+        newvar[0] = 0.001; /* to prevent floating overflow */
+    }
+    exp_lamda[0] =  1 / (newvar[0] * 1.4142136);
+    exp_lamda[1] = exp_lamda[0] * 1.5825;
+    exp_lamda[2] = exp_lamda[0] * 2.1750;
+    exp_lamda[3] = exp_lamda[0] * 3.5065;
+    exp_lamda[4] = exp_lamda[0] * 3.1436;
+    exp_lamda[5] = exp_lamda[0] * 3.5315;
+    exp_lamda[6] = exp_lamda[0] * 3.7449;
+    exp_lamda[7] = exp_lamda[0] * 4.5854;
+    exp_lamda[8] = exp_lamda[0] * 4.6191;
+    exp_lamda[9] = exp_lamda[0] * 5.4041;
+    exp_lamda[10] = exp_lamda[0] * 6.5974;
+    exp_lamda[11] = exp_lamda[0] * 10.5341;
+    exp_lamda[12] = exp_lamda[0] * 10.0719;
+    exp_lamda[13] = exp_lamda[0] * 12.0516;
+    exp_lamda[14] = exp_lamda[0] * 15.4552;
+
+    CalcThreshold(HTFM_Pf, exp_lamda, video->nrmlz_th);
+    return ;
+}
+
+
+void CalcThreshold(double pf, double exp_lamda[], Int nrmlz_th[])
+{
+    Int i;
+    double temp[15];
+    //  printf("\nLamda: ");
+
+    /* parametric PREMODELling */
+    for (i = 0; i < 15; i++)
+    {
+        //    printf("%g ",exp_lamda[i]);
+        if (pf < 0.5)
+            temp[i] = 1 / exp_lamda[i] * M4VENC_LOG(2 * pf);
+        else
+            temp[i] = -1 / exp_lamda[i] * M4VENC_LOG(2 * (1 - pf));
+    }
+
+    nrmlz_th[15] = 0;
+    for (i = 0; i < 15; i++)        /* scale upto no.pixels */
+        nrmlz_th[i] = (Int)(temp[i] * ((i + 1) << 4) + 0.5);
+
+    return ;
+}
+
+void    HTFMPrepareCurMB(VideoEncData *video, HTFM_Stat *htfm_stat, UChar *cur)
+{
+    void* tmp = (void*)(video->currYMB);
+    ULong *htfmMB = (ULong*)tmp;
+    UChar *ptr, byte;
+    Int *offset;
+    Int i;
+    ULong word;
+    Int width = video->currVop->width;
+
+    if (((Int)video->numVopsInGOP) % 30 == 1)
+    {
+        offset = htfm_stat->offsetArray;
+    }
+    else
+    {
+        offset = video->nrmlz_th + 16;
+    }
+
+    for (i = 0; i < 16; i++)
+    {
+        ptr = cur + offset[i];
+        word = ptr[0];
+        byte = ptr[4];
+        word |= (byte << 8);
+        byte = ptr[8];
+        word |= (byte << 16);
+        byte = ptr[12];
+        word |= (byte << 24);
+        *htfmMB++ = word;
+
+        word = *(ptr += (width << 2));
+        byte = ptr[4];
+        word |= (byte << 8);
+        byte = ptr[8];
+        word |= (byte << 16);
+        byte = ptr[12];
+        word |= (byte << 24);
+        *htfmMB++ = word;
+
+        word = *(ptr += (width << 2));
+        byte = ptr[4];
+        word |= (byte << 8);
+        byte = ptr[8];
+        word |= (byte << 16);
+        byte = ptr[12];
+        word |= (byte << 24);
+        *htfmMB++ = word;
+
+        word = *(ptr += (width << 2));
+        byte = ptr[4];
+        word |= (byte << 8);
+        byte = ptr[8];
+        word |= (byte << 16);
+        byte = ptr[12];
+        word |= (byte << 24);
+        *htfmMB++ = word;
+    }
+
+    return ;
+}
+
+
+#endif
+
+void    PrepareCurMB(VideoEncData *video, UChar *cur)
+{
+    void* tmp = (void*)(video->currYMB);
+    ULong *currYMB = (ULong*)tmp;
+    Int i;
+    Int width = video->currVop->width;
+
+    cur -= width;
+
+    for (i = 0; i < 16; i++)
+    {
+        *currYMB++ = *((ULong*)(cur += width));
+        *currYMB++ = *((ULong*)(cur + 4));
+        *currYMB++ = *((ULong*)(cur + 8));
+        *currYMB++ = *((ULong*)(cur + 12));
+    }
+
+    return ;
+}
+
+
+/*==================================================================
+    Function:   MBMotionSearch
+    Date:       09/06/2000
+    Purpose:    Perform motion estimation for a macroblock.
+                Find 1MV and 4MVs in half-pels resolutions.
+                Using ST1 algorithm provided by Chalidabhongse and Kuo
+                CSVT March'98.
+
+==================================================================*/
+
+void MBMotionSearch(VideoEncData *video, UChar *cur, UChar *best_cand[],
+                    Int i0, Int j0, Int type_pred, Int FS_en, Int *hp_guess)
+{
+    Vol *currVol = video->vol[video->currLayer];
+    UChar *ref, *cand, *ncand = NULL, *cur8;
+    void *extra_info = video->sad_extra_info;
+    Int mbnum = video->mbnum;
+    Int width = video->currVop->width; /* 6/12/01, must be multiple of 16 */
+    Int height = video->currVop->height;
+    MOT **mot = video->mot;
+    UChar use_4mv = video->encParams->MV8x8_Enabled;
+    UChar h263_mode = video->encParams->H263_Enabled;
+    Int(*SAD_Macroblock)(UChar*, UChar*, Int, void*) = video->functionPointer->SAD_Macroblock;
+    Int(*SAD_Block)(UChar*, UChar*, Int, Int, void*) = video->functionPointer->SAD_Block;
+    VideoEncParams *encParams = video->encParams;
+    Int range = encParams->SearchRange;
+
+    Int lx = video->currVop->pitch; /* padding */
+    Int comp;
+    Int i, j, imin, jmin, ilow, ihigh, jlow, jhigh, iorg, jorg;
+    Int d, dmin, dn[9];
+#if (ZERO_MV_PREF==1)   /* compute (0,0) MV at the end */
+    Int d0;
+#endif
+    Int k;
+    Int mvx[5], mvy[5], imin0, jmin0;
+    Int num_can, center_again;
+    Int last_loc, new_loc = 0;
+    Int step, max_step = range >> 1;
+    Int next;
+
+    ref = video->forwardRefVop->yChan; /* origin of actual frame */
+
+    cur = video->currYMB; /* use smaller memory space for current MB */
+
+    /*  find limit of the search (adjusting search range)*/
+
+    if (!h263_mode)
+    {
+        ilow = i0 - range;
+        if (ilow < -15)
+            ilow = -15;
+        ihigh = i0 + range - 1;
+        if (ihigh > width - 1)
+            ihigh = width - 1;
+        jlow = j0 - range;
+        if (jlow < -15)
+            jlow = -15;
+        jhigh = j0 + range - 1;
+        if (jhigh > height - 1)
+            jhigh = height - 1;
+    }
+    else
+    {
+        ilow = i0 - range;
+        if (ilow < 0)
+            ilow = 0;
+        ihigh = i0 + range - 1;
+        if (ihigh > width - 16)
+            ihigh = width - 16;
+        jlow = j0 - range;
+        if (jlow < 0)
+            jlow = 0;
+        jhigh = j0 + range - 1;
+        if (jhigh > height - 16)
+            jhigh = height - 16;
+    }
+
+    imin = i0;
+    jmin = j0; /* needed for fullsearch */
+    ncand = ref + imin + jmin * lx;
+
+    /* for first row of MB, fullsearch can be used */
+    if (FS_en)
+    {
+        *hp_guess = 0; /* no guess for fast half-pel */
+
+        dmin =  fullsearch(video, currVol, ref, cur, &imin, &jmin, ilow, ihigh, jlow, jhigh);
+
+        ncand = ref + imin + jmin * lx;
+
+        mot[mbnum][0].sad = dmin;
+        mot[mbnum][0].x = (imin - i0) << 1;
+        mot[mbnum][0].y = (jmin - j0) << 1;
+        imin0 = imin << 1;  /* 16x16 MV in half-pel resolution */
+        jmin0 = jmin << 1;
+        best_cand[0] = ncand;
+    }
+    else
+    {   /* 4/7/01, modified this testing for fullsearch the top row to only upto (0,3) MB */
+        /*            upto 30% complexity saving with the same complexity */
+        if (video->forwardRefVop->predictionType == I_VOP && j0 == 0 && i0 <= 64 && type_pred != 1)
+        {
+            *hp_guess = 0; /* no guess for fast half-pel */
+            dmin =  fullsearch(video, currVol, ref, cur, &imin, &jmin, ilow, ihigh, jlow, jhigh);
+            ncand = ref + imin + jmin * lx;
+        }
+        else
+        {
+            /************** initialize candidate **************************/
+            /* find initial motion vector */
+            CandidateSelection(mvx, mvy, &num_can, i0 >> 4, j0 >> 4, video, type_pred);
+
+            dmin = 65535;
+
+            /* check if all are equal */
+            if (num_can == ALL_CAND_EQUAL)
+            {
+                i = i0 + mvx[0];
+                j = j0 + mvy[0];
+
+                if (i >= ilow && i <= ihigh && j >= jlow && j <= jhigh)
+                {
+                    cand = ref + i + j * lx;
+
+                    d = (*SAD_Macroblock)(cand, cur, (dmin << 16) | lx, extra_info);
+
+                    if (d < dmin)
+                    {
+                        dmin = d;
+                        imin = i;
+                        jmin = j;
+                        ncand = cand;
+                    }
+                }
+            }
+            else
+            {
+                /************** evaluate unique candidates **********************/
+                for (k = 0; k < num_can; k++)
+                {
+                    i = i0 + mvx[k];
+                    j = j0 + mvy[k];
+
+                    if (i >= ilow && i <= ihigh && j >= jlow && j <= jhigh)
+                    {
+                        cand = ref + i + j * lx;
+                        d = (*SAD_Macroblock)(cand, cur, (dmin << 16) | lx, extra_info);
+
+                        if (d < dmin)
+                        {
+                            dmin = d;
+                            imin = i;
+                            jmin = j;
+                            ncand = cand;
+                        }
+                        else if ((d == dmin) && PV_ABS(mvx[k]) + PV_ABS(mvy[k]) < PV_ABS(i0 - imin) + PV_ABS(j0 - jmin))
+                        {
+                            dmin = d;
+                            imin = i;
+                            jmin = j;
+                            ncand = cand;
+                        }
+                    }
+                }
+            }
+            if (num_can == 0 || dmin == 65535) /* no candidate selected */
+            {
+                ncand = ref + i0 + j0 * lx; /* use (0,0) MV as initial value */
+                mot[mbnum][7].sad = dmin = (*SAD_Macroblock)(ncand, cur, (65535 << 16) | lx, extra_info);
+#if (ZERO_MV_PREF==1)   /* compute (0,0) MV at the end */
+                d0 = dmin;
+#endif
+                imin = i0;
+                jmin = j0;
+            }
+
+#if (ZERO_MV_PREF==0)  /*  COMPUTE ZERO VECTOR FIRST !!!!!*/
+            dmin -= PREF_NULL_VEC;
+#endif
+
+            /******************* local refinement ***************************/
+            center_again = 0;
+            last_loc = new_loc = 0;
+            //          ncand = ref + jmin*lx + imin;  /* center of the search */
+            step = 0;
+            dn[0] = dmin;
+            while (!center_again && step <= max_step)
+            {
+
+                MoveNeighborSAD(dn, last_loc);
+
+                center_again = 1;
+                i = imin;
+                j = jmin - 1;
+                cand = ref + i + j * lx;
+
+                /*  starting from [0,-1] */
+                /* spiral check one step at a time*/
+                for (k = 2; k <= 8; k += 2)
+                {
+                    if (!tab_exclude[last_loc][k]) /* exclude last step computation */
+                    {       /* not already computed */
+                        if (i >= ilow && i <= ihigh && j >= jlow && j <= jhigh)
+                        {
+                            d = (*SAD_Macroblock)(cand, cur, (dmin << 16) | lx, extra_info);
+                            dn[k] = d; /* keep it for half pel use */
+
+                            if (d < dmin)
+                            {
+                                ncand = cand;
+                                dmin = d;
+                                imin = i;
+                                jmin = j;
+                                center_again = 0;
+                                new_loc = k;
+                            }
+                            else if ((d == dmin) && PV_ABS(i0 - i) + PV_ABS(j0 - j) < PV_ABS(i0 - imin) + PV_ABS(j0 - jmin))
+                            {
+                                ncand = cand;
+                                imin = i;
+                                jmin = j;
+                                center_again = 0;
+                                new_loc = k;
+                            }
+                        }
+                    }
+                    if (k == 8)  /* end side search*/
+                    {
+                        if (!center_again)
+                        {
+                            k = -1; /* start diagonal search */
+                            cand -= lx;
+                            j--;
+                        }
+                    }
+                    else
+                    {
+                        next = refine_next[k][0];
+                        i += next;
+                        cand += next;
+                        next = refine_next[k][1];
+                        j += next;
+                        cand += lx * next;
+                    }
+                }
+                last_loc = new_loc;
+                step ++;
+            }
+            if (!center_again)
+                MoveNeighborSAD(dn, last_loc);
+
+            *hp_guess = FindMin(dn);
+
+        }
+
+#if (ZERO_MV_PREF==1)   /* compute (0,0) MV at the end */
+        if (d0 - PREF_NULL_VEC < dmin)
+        {
+            ncand = ref + i0 + j0 * lx;
+            dmin = d0;
+            imin = i0;
+            jmin = j0;
+        }
+#endif
+        mot[mbnum][0].sad = dmin;
+        mot[mbnum][0].x = (imin - i0) << 1;
+        mot[mbnum][0].y = (jmin - j0) << 1;
+        imin0 = imin << 1;  /* 16x16 MV in half-pel resolution */
+        jmin0 = jmin << 1;
+        best_cand[0] = ncand;
+    }
+    /* imin and jmin is the best 1 MV */
+#ifndef NO_INTER4V
+    /*******************  Find 4 motion vectors ****************************/
+    if (use_4mv && !h263_mode)
+    {
+#ifdef _SAD_STAT
+        num_Blk += 4;
+#endif
+        /* starting from the best 1MV */
+        //offset = imin + jmin*lx;
+        iorg = i0;
+        jorg = j0;
+
+        for (comp = 0; comp < 4; comp++)
+        {
+            i0 = iorg + ((comp & 1) << 3);
+            j0 = jorg + ((comp & 2) << 2);
+
+            imin = (imin0 >> 1) + ((comp & 1) << 3);    /* starting point from 16x16 MV */
+            jmin = (jmin0 >> 1) + ((comp & 2) << 2);
+            ncand = ref + imin + jmin * lx;
+
+            cur8 = cur + ((comp & 1) << 3) + (((comp & 2) << 2) << 4) ; /* 11/30/05, smaller cache */
+
+            /*  find limit of the search (adjusting search range)*/
+            ilow = i0 - range;
+            ihigh = i0 + range - 1 ;/* 4/9/01 */
+            if (ilow < -15)
+                ilow = -15;
+            if (ihigh > width - 1)
+                ihigh = width - 1;
+            jlow = j0 - range;
+            jhigh = j0 + range - 1 ;/* 4/9/01 */
+            if (jlow < -15)
+                jlow = -15;
+            if (jhigh > height - 1)
+                jhigh = height - 1;
+
+            SAD_Block = video->functionPointer->SAD_Block;
+
+            if (FS_en)  /* fullsearch enable, center around 16x16 MV */
+            {
+                dmin =  fullsearchBlk(video, currVol, ncand, cur8, &imin, &jmin, ilow, ihigh, jlow, jhigh, range);
+                ncand = ref + imin + jmin * lx;
+
+                mot[mbnum][comp+1].sad = dmin;
+                mot[mbnum][comp+1].x = (imin - i0) << 1;
+                mot[mbnum][comp+1].y = (jmin - j0) << 1;
+                best_cand[comp+1] = ncand;
+            }
+            else    /* no fullsearch, do local search */
+            {
+                /* starting point from 16x16 */
+                dmin = (*SAD_Block)(ncand, cur8, 65536, lx, extra_info);
+
+                /******************* local refinement ***************************/
+                center_again = 0;
+                last_loc = 0;
+
+                while (!center_again)
+                {
+                    center_again = 1;
+                    i = imin;
+                    j = jmin - 1;
+                    cand = ref + i + j * lx;
+
+                    /*  starting from [0,-1] */
+                    /* spiral check one step at a time*/
+                    for (k = 2; k <= 8; k += 2)
+                    {
+                        if (!tab_exclude[last_loc][k]) /* exclude last step computation */
+                        {       /* not already computed */
+                            if (i >= ilow && i <= ihigh && j >= jlow && j <= jhigh)
+                            {
+                                d = (*SAD_Block)(cand, cur8, dmin, lx, extra_info);
+
+                                if (d < dmin)
+                                {
+                                    ncand = cand;
+                                    dmin = d;
+                                    imin = i;
+                                    jmin = j;
+                                    center_again = 0;
+                                    new_loc = k;
+                                }
+                                else if ((d == dmin) &&
+                                         PV_ABS(i0 - i) + PV_ABS(j0 - j) < PV_ABS(i0 - imin) + PV_ABS(j0 - jmin))
+                                {
+                                    ncand = cand;
+                                    imin = i;
+                                    jmin = j;
+                                    center_again = 0;
+                                    new_loc = k;
+                                }
+                            }
+                        }
+                        if (k == 8)  /* end side search*/
+                        {
+                            if (!center_again)
+                            {
+                                k = -1; /* start diagonal search */
+                                if (j <= height - 1 && j > 0)   cand -= lx;
+                                j--;
+                            }
+                        }
+                        else
+                        {
+                            next = refine_next[k][0];
+                            cand += next;
+                            i += next;
+                            next = refine_next[k][1];
+                            cand += lx * next;
+                            j += next;
+                        }
+                    }
+                    last_loc = new_loc;
+                }
+                mot[mbnum][comp+1].sad = dmin;
+                mot[mbnum][comp+1].x = (imin - i0) << 1;
+                mot[mbnum][comp+1].y = (jmin - j0) << 1;
+                best_cand[comp+1] = ncand;
+            }
+            /********************************************/
+        }
+    }
+    else
+#endif  /* NO_INTER4V */
+    {
+        mot[mbnum][1].sad = mot[mbnum][2].sad = mot[mbnum][3].sad = mot[mbnum][4].sad = (dmin + 2) >> 2;
+        mot[mbnum][1].x = mot[mbnum][2].x = mot[mbnum][3].x = mot[mbnum][4].x = mot[mbnum][0].x;
+        mot[mbnum][1].y = mot[mbnum][2].y = mot[mbnum][3].y = mot[mbnum][4].y = mot[mbnum][0].y;
+        best_cand[1] = best_cand[2] = best_cand[3] = best_cand[4] = ncand;
+
+    }
+    return ;
+}
+
+
+/*===============================================================================
+    Function:   fullsearch
+    Date:       09/16/2000
+    Purpose:    Perform full-search motion estimation over the range of search
+                region in a spiral-outward manner.
+    Input/Output:   VideoEncData, current Vol, previou Vop, pointer to the left corner of
+                current VOP, current coord (also output), boundaries.
+===============================================================================*/
+
+Int fullsearch(VideoEncData *video, Vol *currVol, UChar *prev, UChar *cur,
+               Int *imin, Int *jmin, Int ilow, Int ihigh, Int jlow, Int jhigh)
+{
+    Int range = video->encParams->SearchRange;
+    UChar *cand;
+    Int i, j, k, l;
+    Int d, dmin;
+    Int i0 = *imin; /* current position */
+    Int j0 = *jmin;
+    Int(*SAD_Macroblock)(UChar*, UChar*, Int, void*) = video->functionPointer->SAD_Macroblock;
+    void *extra_info = video->sad_extra_info;
+//  UChar h263_mode = video->encParams->H263_Enabled;
+    Int lx = video->currVop->pitch; /* with padding */
+
+    Int offset = i0 + j0 * lx;
+
+    OSCL_UNUSED_ARG(currVol);
+
+    cand = prev + offset;
+
+    dmin  = (*SAD_Macroblock)(cand, cur, (65535 << 16) | lx, (void*)extra_info) - PREF_NULL_VEC;
+
+    /* perform spiral search */
+    for (k = 1; k <= range; k++)
+    {
+
+        i = i0 - k;
+        j = j0 - k;
+
+        cand = prev + i + j * lx;
+
+        for (l = 0; l < 8*k; l++)
+        {
+            /* no need for boundary checking again */
+            if (i >= ilow && i <= ihigh && j >= jlow && j <= jhigh)
+            {
+                d = (*SAD_Macroblock)(cand, cur, (dmin << 16) | lx, (void*)extra_info);
+
+                if (d < dmin)
+                {
+                    dmin = d;
+                    *imin = i;
+                    *jmin = j;
+                }
+                else if ((d == dmin) && PV_ABS(i0 - i) + PV_ABS(j0 - j) < PV_ABS(i0 - *imin) + PV_ABS(j0 - *jmin))
+                {
+                    dmin = d;
+                    *imin = i;
+                    *jmin = j;
+                }
+            }
+
+            if (l < (k << 1))
+            {
+                i++;
+                cand++;
+            }
+            else if (l < (k << 2))
+            {
+                j++;
+                cand += lx;
+            }
+            else if (l < ((k << 2) + (k << 1)))
+            {
+                i--;
+                cand--;
+            }
+            else
+            {
+                j--;
+                cand -= lx;
+            }
+        }
+    }
+
+    return dmin;
+}
+
+#ifndef NO_INTER4V
+/*===============================================================================
+    Function:   fullsearchBlk
+    Date:       01/9/2001
+    Purpose:    Perform full-search motion estimation of an 8x8 block over the range
+                of search region in a spiral-outward manner centered at the 16x16 MV.
+    Input/Output:   VideoEncData, MB coordinate, pointer to the initial MV on the
+                reference, pointer to coor of current block, search range.
+===============================================================================*/
+Int fullsearchBlk(VideoEncData *video, Vol *currVol, UChar *cent, UChar *cur,
+                  Int *imin, Int *jmin, Int ilow, Int ihigh, Int jlow, Int jhigh, Int range)
+{
+    UChar *cand, *ref;
+    Int i, j, k, l, istart, jstart;
+    Int d, dmin;
+    Int lx = video->currVop->pitch; /* with padding */
+    Int(*SAD_Block)(UChar*, UChar*, Int, Int, void*) = video->functionPointer->SAD_Block;
+    void *extra_info = video->sad_extra_info;
+
+    OSCL_UNUSED_ARG(currVol);
+
+    /* starting point centered at 16x16 MV */
+    ref = cent;
+    istart = *imin;
+    jstart = *jmin;
+
+    dmin = (*SAD_Block)(ref, cur, 65536, lx, (void*)extra_info);
+
+    cand = ref;
+    /* perform spiral search */
+    for (k = 1; k <= range; k++)
+    {
+
+        i = istart - k;
+        j = jstart - k;
+        cand -= (lx + 1);  /* candidate region */
+
+        for (l = 0; l < 8*k; l++)
+        {
+            /* no need for boundary checking again */
+            if (i >= ilow && i <= ihigh && j >= jlow && j <= jhigh)
+            {
+                d = (*SAD_Block)(cand, cur, dmin, lx, (void*)extra_info);
+
+                if (d < dmin)
+                {
+                    dmin = d;
+                    *imin = i;
+                    *jmin = j;
+                }
+                else if ((d == dmin) &&
+                         PV_ABS(istart - i) + PV_ABS(jstart - j) < PV_ABS(istart - *imin) + PV_ABS(jstart - *jmin))
+                {
+                    dmin = d;
+                    *imin = i;
+                    *jmin = j;
+                }
+            }
+
+            if (l < (k << 1))
+            {
+                i++;
+                cand++;
+            }
+            else if (l < (k << 2))
+            {
+                j++;
+                cand += lx;
+            }
+            else if (l < ((k << 2) + (k << 1)))
+            {
+                i--;
+                cand--;
+            }
+            else
+            {
+                j--;
+                cand -= lx;
+            }
+        }
+    }
+
+    return dmin;
+}
+#endif /* NO_INTER4V */
+
+/*===============================================================================
+    Function:   CandidateSelection
+    Date:       09/16/2000
+    Purpose:    Fill up the list of candidate using spatio-temporal correlation
+                among neighboring blocks.
+    Input/Output:   type_pred = 0: first pass, 1: second pass, or no SCD
+    Modified:    09/23/01, get rid of redundant candidates before passing back.
+===============================================================================*/
+
+void CandidateSelection(Int *mvx, Int *mvy, Int *num_can, Int imb, Int jmb,
+                        VideoEncData *video, Int type_pred)
+{
+    MOT **mot = video->mot;
+    MOT *pmot;
+    Int mbnum = video->mbnum;
+    Vol *currVol = video->vol[video->currLayer];
+    Int mbwidth = currVol->nMBPerRow;
+    Int mbheight = currVol->nMBPerCol;
+    Int i, j, same, num1;
+
+    *num_can = 0;
+
+    if (video->forwardRefVop->predictionType == P_VOP)
+    {
+        /* Spatio-Temporal Candidate (five candidates) */
+        if (type_pred == 0) /* first pass */
+        {
+            pmot = &mot[mbnum][0]; /* same coordinate previous frame */
+            mvx[(*num_can)] = (pmot->x) >> 1;
+            mvy[(*num_can)++] = (pmot->y) >> 1;
+            if (imb >= (mbwidth >> 1) && imb > 0)  /*left neighbor previous frame */
+            {
+                pmot = &mot[mbnum-1][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+            }
+            else if (imb + 1 < mbwidth)   /*right neighbor previous frame */
+            {
+                pmot = &mot[mbnum+1][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+            }
+
+            if (jmb < mbheight - 1)  /*bottom neighbor previous frame */
+            {
+                pmot = &mot[mbnum+mbwidth][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+            }
+            else if (jmb > 0)   /*upper neighbor previous frame */
+            {
+                pmot = &mot[mbnum-mbwidth][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+            }
+
+            if (imb > 0 && jmb > 0)  /* upper-left neighbor current frame*/
+            {
+                pmot = &mot[mbnum-mbwidth-1][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+            }
+            if (jmb > 0 && imb < mbheight - 1)  /* upper right neighbor current frame*/
+            {
+                pmot = &mot[mbnum-mbwidth+1][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+            }
+        }
+        else    /* second pass */
+            /* original ST1 algorithm */
+        {
+            pmot = &mot[mbnum][0]; /* same coordinate previous frame */
+            mvx[(*num_can)] = (pmot->x) >> 1;
+            mvy[(*num_can)++] = (pmot->y) >> 1;
+
+            if (imb > 0)  /*left neighbor current frame */
+            {
+                pmot = &mot[mbnum-1][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+            }
+            if (jmb > 0)  /*upper neighbor current frame */
+            {
+                pmot = &mot[mbnum-mbwidth][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+            }
+            if (imb < mbwidth - 1)  /*right neighbor previous frame */
+            {
+                pmot = &mot[mbnum+1][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+            }
+            if (jmb < mbheight - 1)  /*bottom neighbor previous frame */
+            {
+                pmot = &mot[mbnum+mbwidth][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+            }
+        }
+    }
+    else  /* only Spatial Candidate (four candidates)*/
+    {
+        if (type_pred == 0) /*first pass*/
+        {
+            if (imb > 1)  /* neighbor two blocks away to the left */
+            {
+                pmot = &mot[mbnum-2][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+            }
+            if (imb > 0 && jmb > 0)  /* upper-left neighbor */
+            {
+                pmot = &mot[mbnum-mbwidth-1][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+            }
+            if (jmb > 0 && imb < mbheight - 1)  /* upper right neighbor */
+            {
+                pmot = &mot[mbnum-mbwidth+1][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+            }
+        }
+//#ifdef SCENE_CHANGE_DETECTION
+        /* second pass (ST2 algorithm)*/
+        else if (type_pred == 1) /* 4/7/01 */
+        {
+            if (imb > 0)  /*left neighbor current frame */
+            {
+                pmot = &mot[mbnum-1][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+            }
+            if (jmb > 0)  /*upper neighbor current frame */
+            {
+                pmot = &mot[mbnum-mbwidth][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+            }
+            if (imb < mbwidth - 1)  /*right neighbor current frame */
+            {
+                pmot = &mot[mbnum+1][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+            }
+            if (jmb < mbheight - 1)  /*bottom neighbor current frame */
+            {
+                pmot = &mot[mbnum+mbwidth][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+            }
+        }
+//#else
+        else /* original ST1 algorithm */
+        {
+            if (imb > 0)  /*left neighbor current frame */
+            {
+                pmot = &mot[mbnum-1][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+
+                if (jmb > 0)  /*upper-left neighbor current frame */
+                {
+                    pmot = &mot[mbnum-mbwidth-1][0];
+                    mvx[(*num_can)] = (pmot->x) >> 1;
+                    mvy[(*num_can)++] = (pmot->y) >> 1;
+                }
+
+            }
+            if (jmb > 0)  /*upper neighbor current frame */
+            {
+                pmot = &mot[mbnum-mbwidth][0];
+                mvx[(*num_can)] = (pmot->x) >> 1;
+                mvy[(*num_can)++] = (pmot->y) >> 1;
+
+                if (imb < mbheight - 1)  /*upper-right neighbor current frame */
+                {
+                    pmot = &mot[mbnum-mbwidth+1][0];
+                    mvx[(*num_can)] = (pmot->x) >> 1;
+                    mvy[(*num_can)++] = (pmot->y) >> 1;
+                }
+            }
+        }
+//#endif
+    }
+
+    /* 3/23/01, remove redundant candidate (possible k-mean) */
+    num1 = *num_can;
+    *num_can = 1;
+    for (i = 1; i < num1; i++)
+    {
+        same = 0;
+        j = 0;
+        while (!same && j < *num_can)
+        {
+#if (CANDIDATE_DISTANCE==0)
+            if (mvx[i] == mvx[j] && mvy[i] == mvy[j])
+#else
+            // modified k-mean, 3/24/01, shouldn't be greater than 3
+            if (PV_ABS(mvx[i] - mvx[j]) + PV_ABS(mvy[i] - mvy[j]) < CANDIDATE_DISTANCE)
+#endif
+                same = 1;
+            j++;
+        }
+        if (!same)
+        {
+            mvx[*num_can] = mvx[i];
+            mvy[*num_can] = mvy[i];
+            (*num_can)++;
+        }
+    }
+
+#ifdef _SAD_STAT
+    num_cand += (*num_can);
+#endif
+
+    if (num1 == 5 && *num_can == 1)
+        *num_can = ALL_CAND_EQUAL; /* all are equal */
+
+    return ;
+}
+
+/*===========================================================================
+    Function:   RasterIntraUpdate
+    Date:       2/26/01
+    Purpose:    To raster-scan assign INTRA-update .
+                N macroblocks are updated (also was programmable).
+===========================================================================*/
+void RasterIntraUpdate(UChar *intraArray, UChar *Mode, Int totalMB, Int numRefresh)
+{
+    Int indx, i;
+
+    /* find the last refresh MB */
+    indx = 0;
+    while (indx < totalMB && intraArray[indx] == 1)
+        indx++;
+
+    /* add more  */
+    for (i = 0; i < numRefresh && indx < totalMB; i++)
+    {
+        Mode[indx] = MODE_INTRA;
+        intraArray[indx++] = 1;
+    }
+
+    /* if read the end of frame, reset and loop around */
+    if (indx >= totalMB - 1)
+    {
+        ResetIntraUpdate(intraArray, totalMB);
+        indx = 0;
+        while (i < numRefresh && indx < totalMB)
+        {
+            intraArray[indx] = 1;
+            Mode[indx++] = MODE_INTRA;
+            i++;
+        }
+    }
+
+    return ;
+}
+
+/*===========================================================================
+    Function:   ResetIntraUpdate
+    Date:       11/28/00
+    Purpose:    Reset already intra updated flags to all zero
+===========================================================================*/
+
+void ResetIntraUpdate(UChar *intraArray, Int totalMB)
+{
+    M4VENC_MEMSET(intraArray, 0, sizeof(UChar)*totalMB);
+    return ;
+}
+
+/*===========================================================================
+    Function:   ResetIntraUpdateRegion
+    Date:       12/1/00
+    Purpose:    Reset already intra updated flags in one region to all zero
+===========================================================================*/
+void ResetIntraUpdateRegion(UChar *intraArray, Int start_i, Int rwidth,
+                            Int start_j, Int rheight, Int mbwidth, Int mbheight)
+{
+    Int indx, j;
+
+    if (start_i + rwidth >= mbwidth)
+        rwidth = mbwidth - start_i;
+    if (start_j + rheight >= mbheight)
+        rheight = mbheight - start_j;
+
+    for (j = start_j; j < start_j + rheight; j++)
+    {
+        indx = j * mbwidth;
+        M4VENC_MEMSET(intraArray + indx + start_i, 0, sizeof(UChar)*rwidth);
+    }
+
+    return ;
+}
+
+/*************************************************************
+    Function:   MoveNeighborSAD
+    Date:       3/27/01
+    Purpose:    Move neighboring SAD around when center has shifted
+*************************************************************/
+
+void MoveNeighborSAD(Int dn[], Int new_loc)
+{
+    Int tmp[9];
+    tmp[0] = dn[0];
+    tmp[1] = dn[1];
+    tmp[2] = dn[2];
+    tmp[3] = dn[3];
+    tmp[4] = dn[4];
+    tmp[5] = dn[5];
+    tmp[6] = dn[6];
+    tmp[7] = dn[7];
+    tmp[8] = dn[8];
+    dn[0] = dn[1] = dn[2] = dn[3] = dn[4] = dn[5] = dn[6] = dn[7] = dn[8] = 65536;
+
+    switch (new_loc)
+    {
+        case 0:
+            break;
+        case 1:
+            dn[4] = tmp[2];
+            dn[5] = tmp[0];
+            dn[6] = tmp[8];
+            break;
+        case 2:
+            dn[4] = tmp[3];
+            dn[5] = tmp[4];
+            dn[6] = tmp[0];
+            dn[7] = tmp[8];
+            dn[8] = tmp[1];
+            break;
+        case 3:
+            dn[6] = tmp[4];
+            dn[7] = tmp[0];
+            dn[8] = tmp[2];
+            break;
+        case 4:
+            dn[1] = tmp[2];
+            dn[2] = tmp[3];
+            dn[6] = tmp[5];
+            dn[7] = tmp[6];
+            dn[8] = tmp[0];
+            break;
+        case 5:
+            dn[1] = tmp[0];
+            dn[2] = tmp[4];
+            dn[8] = tmp[6];
+            break;
+        case 6:
+            dn[1] = tmp[8];
+            dn[2] = tmp[0];
+            dn[3] = tmp[4];
+            dn[4] = tmp[5];
+            dn[8] = tmp[7];
+            break;
+        case 7:
+            dn[2] = tmp[8];
+            dn[3] = tmp[0];
+            dn[4] = tmp[6];
+            break;
+        case 8:
+            dn[2] = tmp[1];
+            dn[3] = tmp[2];
+            dn[4] = tmp[0];
+            dn[5] = tmp[6];
+            dn[6] = tmp[7];
+            break;
+    }
+    dn[0] = tmp[new_loc];
+
+    return ;
+}
+
+/* 3/28/01, find minimal of dn[9] */
+
+Int FindMin(Int dn[])
+{
+    Int min, i;
+    Int dmin;
+
+    dmin = dn[1];
+    min = 1;
+    for (i = 2; i < 9; i++)
+    {
+        if (dn[i] < dmin)
+        {
+            dmin = dn[i];
+            min = i;
+        }
+    }
+
+    return min;
+}
+
+
+
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/mp4def.h b/media/codecs/m4v_h263/enc/src/mp4def.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/mp4def.h
rename to media/codecs/m4v_h263/enc/src/mp4def.h
diff --git a/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp b/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
new file mode 100644
index 0000000..234faef
--- /dev/null
+++ b/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
@@ -0,0 +1,3305 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+
+#include "mp4enc_lib.h"
+#include "bitstream_io.h"
+#include "rate_control.h"
+#include "m4venc_oscl.h"
+
+#ifndef INT32_MAX
+#define INT32_MAX 0x7fffffff
+#endif
+
+#ifndef SIZE_MAX
+#define SIZE_MAX ((size_t) -1)
+#endif
+
+/* Inverse normal zigzag */
+const static Int zigzag_i[NCOEFF_BLOCK] =
+{
+    0, 1, 8, 16, 9, 2, 3, 10,
+    17, 24, 32, 25, 18, 11, 4, 5,
+    12, 19, 26, 33, 40, 48, 41, 34,
+    27, 20, 13, 6, 7, 14, 21, 28,
+    35, 42, 49, 56, 57, 50, 43, 36,
+    29, 22, 15, 23, 30, 37, 44, 51,
+    58, 59, 52, 45, 38, 31, 39, 46,
+    53, 60, 61, 54, 47, 55, 62, 63
+};
+
+/* INTRA */
+const static Int mpeg_iqmat_def[NCOEFF_BLOCK] =
+    {  8, 17, 18, 19, 21, 23, 25, 27,
+       17, 18, 19, 21, 23, 25, 27, 28,
+       20, 21, 22, 23, 24, 26, 28, 30,
+       21, 22, 23, 24, 26, 28, 30, 32,
+       22, 23, 24, 26, 28, 30, 32, 35,
+       23, 24, 26, 28, 30, 32, 35, 38,
+       25, 26, 28, 30, 32, 35, 38, 41,
+       27, 28, 30, 32, 35, 38, 41, 45
+    };
+
+/* INTER */
+const static Int mpeg_nqmat_def[64]  =
+    { 16, 17, 18, 19, 20, 21, 22, 23,
+      17, 18, 19, 20, 21, 22, 23, 24,
+      18, 19, 20, 21, 22, 23, 24, 25,
+      19, 20, 21, 22, 23, 24, 26, 27,
+      20, 21, 22, 23, 25, 26, 27, 28,
+      21, 22, 23, 24, 26, 27, 28, 30,
+      22, 23, 24, 26, 27, 28, 30, 31,
+      23, 24, 25, 27, 28, 30, 31, 33
+    };
+
+/* Profiles and levels */
+/* Simple profile(level 0-3) and Core profile (level 1-2) */
+/* {SPL0, SPL1, SPL2, SPL3, CPL1, CPL2, CPL2, CPL2} , SPL0: Simple Profile@Level0, CPL1: Core Profile@Level1, the last two are redundant for easy table manipulation */
+const static Int profile_level_code[8] =
+{
+    0x08, 0x01, 0x02, 0x03, 0x21, 0x22, 0x22, 0x22
+};
+
+const static Int profile_level_max_bitrate[8] =
+{
+    64000, 64000, 128000, 384000, 384000, 2000000, 2000000, 2000000
+};
+
+const static Int profile_level_max_packet_size[8] =
+{
+    2048, 2048, 4096, 8192, 4096, 8192, 8192, 8192
+};
+
+const static Int profile_level_max_mbsPerSec[8] =
+{
+    1485, 1485, 5940, 11880, 5940, 23760, 23760, 23760
+};
+
+const static Int profile_level_max_VBV_size[8] =
+{
+    163840, 163840, 655360, 655360, 262144, 1310720, 1310720, 1310720
+};
+
+
+/* Simple scalable profile (level 0-2) and Core scalable profile (level 1-3) */
+/* {SSPL0, SSPL1, SSPL2, SSPL2, CSPL1, CSPL2, CSPL3, CSPL3} , SSPL0: Simple Scalable Profile@Level0, CSPL1: Core Scalable Profile@Level1, the fourth is redundant for easy table manipulation */
+
+const static Int scalable_profile_level_code[8] =
+{
+    0x10, 0x11, 0x12, 0x12, 0xA1, 0xA2, 0xA3, 0xA3
+};
+
+const static Int scalable_profile_level_max_bitrate[8] =
+{
+    128000, 128000, 256000, 256000, 768000, 1500000, 4000000, 4000000
+};
+
+/* in bits */
+const static Int scalable_profile_level_max_packet_size[8] =
+{
+    2048, 2048, 4096, 4096, 4096, 4096, 16384, 16384
+};
+
+const static Int scalable_profile_level_max_mbsPerSec[8] =
+{
+    1485, 7425, 23760, 23760, 14850, 29700, 120960, 120960
+};
+
+const static Int scalable_profile_level_max_VBV_size[8] =
+{
+    163840, 655360, 655360, 655360, 1048576, 1310720, 1310720, 1310720
+};
+
+
+/* H263 profile 0 @ level 10-70 */
+const static Int   h263Level[8] = {0, 10, 20, 30, 40, 50, 60, 70};
+const static float rBR_bound[8] = {0, 1, 2, 6, 32, 64, 128, 256};
+const static float max_h263_framerate[2] = {(float)30000 / (float)2002,
+        (float)30000 / (float)1001
+                                           };
+const static Int   max_h263_width[2]  = {176, 352};
+const static Int   max_h263_height[2] = {144, 288};
+
+/* 6/2/2001, newly added functions to make PVEncodeVop more readable. */
+Int DetermineCodingLayer(VideoEncData *video, Int *nLayer, ULong modTime);
+void DetermineVopType(VideoEncData *video, Int currLayer);
+Int UpdateSkipNextFrame(VideoEncData *video, ULong *modTime, Int *size, PV_STATUS status);
+Bool SetProfile_BufferSize(VideoEncData *video, float delay, Int bInitialized);
+
+#ifdef PRINT_RC_INFO
+extern FILE *facct;
+extern int tiTotalNumBitsGenerated;
+extern int iStuffBits;
+#endif
+
+#ifdef PRINT_EC
+extern FILE *fec;
+#endif
+
+
+/* ======================================================================== */
+/*  Function : PVGetDefaultEncOption()                                      */
+/*  Date     : 12/12/2005                                                   */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVGetDefaultEncOption(VideoEncOptions *encOption, Int encUseCase)
+{
+    VideoEncOptions defaultUseCase = {H263_MODE, profile_level_max_packet_size[SIMPLE_PROFILE_LEVEL0] >> 3,
+                                      SIMPLE_PROFILE_LEVEL0, PV_OFF, 0, 1, 1000, 33, {144, 144}, {176, 176}, {15, 30}, {64000, 128000},
+                                      {10, 10}, {12, 12}, {0, 0}, CBR_1, 0.0, PV_OFF, -1, 0, PV_OFF, 16, PV_OFF, 0, PV_ON
+                                     };
+
+    OSCL_UNUSED_ARG(encUseCase); // unused for now. Later we can add more defaults setting and use this
+    // argument to select the right one.
+    /* in the future we can create more meaningful use-cases */
+    if (encOption == NULL)
+    {
+        return PV_FALSE;
+    }
+
+    M4VENC_MEMCPY(encOption, &defaultUseCase, sizeof(VideoEncOptions));
+
+    return PV_TRUE;
+}
+
+/* ======================================================================== */
+/*  Function : PVInitVideoEncoder()                                         */
+/*  Date     : 08/22/2000                                                   */
+/*  Purpose  : Initialization of MP4 Encoder and VO bitstream               */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :  5/21/01, allocate only yChan and assign uChan & vChan   */
+/*              12/12/05, add encoding option as input argument         */
+/* ======================================================================== */
+OSCL_EXPORT_REF Bool    PVInitVideoEncoder(VideoEncControls *encoderControl, VideoEncOptions *encOption)
+{
+
+    Bool        status = PV_TRUE;
+    Int         nLayers, idx, i, j;
+    Int         max = 0, max_width = 0, max_height = 0, pitch, offset;
+    Int         size = 0, nTotalMB = 0;
+    VideoEncData *video;
+    Vol         *pVol;
+    VideoEncParams  *pEncParams;
+    Int         temp_w, temp_h, mbsPerSec;
+
+    /******************************************/
+    /*      this part use to be PVSetEncode() */
+    Int profile_table_index, *profile_level_table;
+    Int profile_level = encOption->profile_level;
+    Int PacketSize = encOption->packetSize << 3;
+    Int timeInc, timeIncRes;
+    float profile_max_framerate;
+    VideoEncParams *encParams;
+
+    if (encoderControl->videoEncoderData) /* this has been called */
+    {
+        if (encoderControl->videoEncoderInit) /* check if PVInitVideoEncoder() has been called  */
+        {
+            PVCleanUpVideoEncoder(encoderControl);
+            encoderControl->videoEncoderInit = 0;
+        }
+
+        M4VENC_FREE(encoderControl->videoEncoderData);
+        encoderControl->videoEncoderData = NULL;
+    }
+    encoderControl->videoEncoderInit = 0;   /* reset this value */
+
+    video = (VideoEncData *)M4VENC_MALLOC(sizeof(VideoEncData)); /* allocate memory for encData */
+
+    if (video == NULL)
+        return PV_FALSE;
+
+    M4VENC_MEMSET(video, 0, sizeof(VideoEncData));
+
+    encoderControl->videoEncoderData = (void *) video;         /* set up pointer in VideoEncData structure */
+
+    video->encParams = (VideoEncParams *)M4VENC_MALLOC(sizeof(VideoEncParams));
+    if (video->encParams == NULL)
+        goto CLEAN_UP;
+
+    M4VENC_MEMSET(video->encParams, 0, sizeof(VideoEncParams));
+
+    encParams = video->encParams;
+    encParams->nLayers = encOption->numLayers;
+
+    /* Check whether the input packetsize is valid (Note: put code here (before any memory allocation) in order to avoid memory leak */
+    if ((Int)profile_level < (Int)(SIMPLE_SCALABLE_PROFILE_LEVEL0))  /* non-scalable profile */
+    {
+        profile_level_table = (Int *)profile_level_max_packet_size;
+        profile_table_index = (Int)profile_level;
+        if (encParams->nLayers != 1)
+        {
+            goto CLEAN_UP;
+        }
+
+        encParams->LayerMaxMbsPerSec[0] = profile_level_max_mbsPerSec[profile_table_index];
+
+    }
+    else   /* scalable profile */
+    {
+        profile_level_table = (Int *)scalable_profile_level_max_packet_size;
+        profile_table_index = (Int)profile_level - (Int)(SIMPLE_SCALABLE_PROFILE_LEVEL0);
+        if (encParams->nLayers < 2)
+        {
+            goto CLEAN_UP;
+        }
+        for (i = 0; i < encParams->nLayers; i++)
+        {
+            encParams->LayerMaxMbsPerSec[i] = scalable_profile_level_max_mbsPerSec[profile_table_index];
+        }
+
+    }
+
+    /* cannot have zero size packet with these modes */
+    if (PacketSize == 0)
+    {
+        if (encOption->encMode == DATA_PARTITIONING_MODE)
+        {
+            goto CLEAN_UP;
+        }
+        if (encOption->encMode == COMBINE_MODE_WITH_ERR_RES)
+        {
+            encOption->encMode = COMBINE_MODE_NO_ERR_RES;
+        }
+    }
+
+    if (encOption->gobHeaderInterval == 0)
+    {
+        if (encOption->encMode == H263_MODE_WITH_ERR_RES)
+        {
+            encOption->encMode = H263_MODE;
+        }
+
+        if (encOption->encMode == SHORT_HEADER_WITH_ERR_RES)
+        {
+            encOption->encMode = SHORT_HEADER;
+        }
+    }
+
+    if (PacketSize > profile_level_table[profile_table_index])
+        goto CLEAN_UP;
+
+    /* Initial Defaults for all Modes */
+
+    encParams->SequenceStartCode = 1;
+    encParams->GOV_Enabled = 0;
+    encParams->RoundingType = 0;
+    encParams->IntraDCVlcThr = PV_MAX(PV_MIN(encOption->intraDCVlcTh, 7), 0);
+    encParams->ACDCPrediction = ((encOption->useACPred == PV_ON) ? TRUE : FALSE);
+    encParams->RC_Type = encOption->rcType;
+    encParams->Refresh = encOption->numIntraMB;
+    encParams->ResyncMarkerDisable = 0; /* Enable Resync Marker */
+
+    for (i = 0; i < encOption->numLayers; i++)
+    {
+#ifdef NO_MPEG_QUANT
+        encParams->QuantType[i] = 0;
+#else
+        encParams->QuantType[i] = encOption->quantType[i];      /* H263 */
+#endif
+        if (encOption->pQuant[i] >= 1 && encOption->pQuant[i] <= 31)
+        {
+            encParams->InitQuantPvop[i] = encOption->pQuant[i];
+        }
+        else
+        {
+            goto CLEAN_UP;
+        }
+        if (encOption->iQuant[i] >= 1 && encOption->iQuant[i] <= 31)
+        {
+            encParams->InitQuantIvop[i] = encOption->iQuant[i];
+        }
+        else
+        {
+            goto CLEAN_UP;
+        }
+    }
+
+    encParams->HalfPel_Enabled = 1;
+    encParams->SearchRange = encOption->searchRange; /* 4/16/2001 */
+    encParams->FullSearch_Enabled = 0;
+#ifdef NO_INTER4V
+    encParams->MV8x8_Enabled = 0;
+#else
+    encParams->MV8x8_Enabled = 0;// comment out for now!! encOption->mv8x8Enable;
+#endif
+    encParams->H263_Enabled = 0;
+    encParams->GOB_Header_Interval = 0; // need to be reset to 0
+    encParams->IntraPeriod = encOption->intraPeriod;    /* Intra update period update default*/
+    encParams->SceneChange_Det = encOption->sceneDetect;
+    encParams->FineFrameSkip_Enabled = 0;
+    encParams->NoFrameSkip_Enabled = encOption->noFrameSkipped;
+    encParams->NoPreSkip_Enabled = encOption->noFrameSkipped;
+    encParams->GetVolHeader[0] = 0;
+    encParams->GetVolHeader[1] = 0;
+    encParams->ResyncPacketsize = encOption->packetSize << 3;
+    encParams->LayerMaxBitRate[0] = 0;
+    encParams->LayerMaxBitRate[1] = 0;
+    encParams->LayerMaxFrameRate[0] = (float)0.0;
+    encParams->LayerMaxFrameRate[1] = (float)0.0;
+    encParams->VBV_delay = encOption->vbvDelay;  /* 2sec VBV buffer size */
+
+    switch (encOption->encMode)
+    {
+
+        case SHORT_HEADER:
+        case SHORT_HEADER_WITH_ERR_RES:
+
+            /* From Table 6-26 */
+            encParams->nLayers = 1;
+            encParams->QuantType[0] = 0;    /*H263 */
+            encParams->ResyncMarkerDisable = 1; /* Disable Resync Marker */
+            encParams->DataPartitioning = 0; /* Combined Mode */
+            encParams->ReversibleVLC = 0;   /* Disable RVLC */
+            encParams->RoundingType = 0;
+            encParams->IntraDCVlcThr = 7;   /* use_intra_dc_vlc = 0 */
+            encParams->MV8x8_Enabled = 0;
+
+            encParams->GOB_Header_Interval = encOption->gobHeaderInterval;
+            encParams->H263_Enabled = 2;
+            encParams->GOV_Enabled = 0;
+            encParams->TimeIncrementRes = 30000;        /* timeIncrementRes for H263 */
+            break;
+
+        case H263_MODE:
+        case H263_MODE_WITH_ERR_RES:
+
+            /* From Table 6-26 */
+            encParams->nLayers = 1;
+            encParams->QuantType[0] = 0;    /*H263 */
+            encParams->ResyncMarkerDisable = 1; /* Disable Resync Marker */
+            encParams->DataPartitioning = 0; /* Combined Mode */
+            encParams->ReversibleVLC = 0;   /* Disable RVLC */
+            encParams->RoundingType = 0;
+            encParams->IntraDCVlcThr = 7;   /* use_intra_dc_vlc = 0 */
+            encParams->MV8x8_Enabled = 0;
+
+            encParams->H263_Enabled = 1;
+            encParams->GOV_Enabled = 0;
+            encParams->TimeIncrementRes = 30000;        /* timeIncrementRes for H263 */
+
+            break;
+#ifndef H263_ONLY
+        case DATA_PARTITIONING_MODE:
+
+            encParams->DataPartitioning = 1;        /* Base Layer Data Partitioning */
+            encParams->ResyncMarkerDisable = 0; /* Resync Marker */
+#ifdef NO_RVLC
+            encParams->ReversibleVLC = 0;
+#else
+            encParams->ReversibleVLC = (encOption->rvlcEnable == PV_ON); /* RVLC when Data Partitioning */
+#endif
+            encParams->ResyncPacketsize = PacketSize;
+            break;
+
+        case COMBINE_MODE_WITH_ERR_RES:
+
+            encParams->DataPartitioning = 0;        /* Combined Mode */
+            encParams->ResyncMarkerDisable = 0; /* Resync Marker */
+            encParams->ReversibleVLC = 0;           /* No RVLC */
+            encParams->ResyncPacketsize = PacketSize;
+            break;
+
+        case COMBINE_MODE_NO_ERR_RES:
+
+            encParams->DataPartitioning = 0;        /* Combined Mode */
+            encParams->ResyncMarkerDisable = 1; /* Disable Resync Marker */
+            encParams->ReversibleVLC = 0;           /* No RVLC */
+            break;
+#endif
+        default:
+            goto CLEAN_UP;
+    }
+    /* Set the constraints (maximum values) according to the input profile and level */
+    /* Note that profile_table_index is already figured out above */
+
+    /* base layer */
+    encParams->profile_table_index    = profile_table_index; /* Used to limit the profile and level in SetProfile_BufferSize() */
+
+    /* check timeIncRes */
+    timeIncRes = encOption->timeIncRes;
+    timeInc = encOption->tickPerSrc;
+
+    if ((timeIncRes >= 1) && (timeIncRes <= 65536) && (timeInc < timeIncRes) && (timeInc != 0))
+    {
+        if (!encParams->H263_Enabled)
+        {
+            encParams->TimeIncrementRes = timeIncRes;
+        }
+        else
+        {
+            encParams->TimeIncrementRes = 30000;
+//          video->FrameRate = 30000/(float)1001; /* fix it to 29.97 fps */
+        }
+        video->FrameRate = timeIncRes / ((float)timeInc);
+    }
+    else
+    {
+        goto CLEAN_UP;
+    }
+
+    /* check frame dimension */
+    if (encParams->H263_Enabled)
+    {
+        switch (encOption->encWidth[0])
+        {
+            case 128:
+                if (encOption->encHeight[0] != 96) /* source_format = 1 */
+                    goto CLEAN_UP;
+                break;
+            case 176:
+                if (encOption->encHeight[0] != 144) /* source_format = 2 */
+                    goto CLEAN_UP;
+                break;
+            case 352:
+                if (encOption->encHeight[0] != 288) /* source_format = 2 */
+                    goto CLEAN_UP;
+                break;
+
+            case 704:
+                if (encOption->encHeight[0] != 576) /* source_format = 2 */
+                    goto CLEAN_UP;
+                break;
+            case 1408:
+                if (encOption->encHeight[0] != 1152) /* source_format = 2 */
+                    goto CLEAN_UP;
+                break;
+
+            default:
+                goto CLEAN_UP;
+        }
+    }
+    for (i = 0; i < encParams->nLayers; i++)
+    {
+        if (encOption->encHeight[i] == 0 || encOption->encWidth[i] == 0 ||
+                encOption->encHeight[i] % 16 != 0 || encOption->encWidth[i] % 16 != 0)
+            goto CLEAN_UP;
+        encParams->LayerHeight[i] = encOption->encHeight[i];
+        encParams->LayerWidth[i] = encOption->encWidth[i];
+    }
+
+    /* check frame rate */
+    for (i = 0; i < encParams->nLayers; i++)
+    {
+        encParams->LayerFrameRate[i] = encOption->encFrameRate[i];
+    }
+
+    if (encParams->nLayers > 1)
+    {
+        if (encOption->encFrameRate[0] == encOption->encFrameRate[1] ||
+                encOption->encFrameRate[0] == 0. || encOption->encFrameRate[1] == 0.) /* 7/31/03 */
+            goto CLEAN_UP;
+    }
+    /* set max frame rate */
+    for (i = 0; i < encParams->nLayers; i++)
+    {
+
+        /* Make sure the maximum framerate is consistent with the given profile and level */
+        nTotalMB = ((encParams->LayerWidth[i] + 15) / 16) * ((encParams->LayerHeight[i] + 15) / 16);
+
+        if (nTotalMB > 0)
+            profile_max_framerate = (float)encParams->LayerMaxMbsPerSec[i] / (float)nTotalMB;
+
+        else
+            profile_max_framerate = (float)30.0;
+
+        encParams->LayerMaxFrameRate[i] = PV_MIN(profile_max_framerate, encParams->LayerFrameRate[i]);
+    }
+
+    /* check bit rate */
+    for (i = 0; i < encParams->nLayers; i++)
+    {
+        encParams->LayerBitRate[i] = encOption->bitRate[i];
+    }
+    if (encParams->nLayers > 1)
+    {
+        if (encOption->bitRate[0] == encOption->bitRate[1] ||
+                encOption->bitRate[0] == 0 || encOption->bitRate[1] == 0) /* 7/31/03 */
+            goto CLEAN_UP;
+    }
+    /* check rate control and vbv delay*/
+    encParams->RC_Type = encOption->rcType;
+
+    if (encOption->vbvDelay == 0.0) /* set to default */
+    {
+        switch (encOption->rcType)
+        {
+            case CBR_1:
+            case CBR_2:
+                encParams->VBV_delay = (float)2.0; /* default 2sec VBV buffer size */
+                break;
+
+            case CBR_LOWDELAY:
+                encParams->VBV_delay = (float)0.5; /* default 0.5sec VBV buffer size */
+                break;
+
+            case VBR_1:
+            case VBR_2:
+                encParams->VBV_delay = (float)10.0; /* default 10sec VBV buffer size */
+                break;
+            default:
+                break;
+        }
+    }
+    else /* force this value */
+    {
+        encParams->VBV_delay = encOption->vbvDelay;
+    }
+
+    /* check search range */
+    if (encParams->H263_Enabled && encOption->searchRange > 16)
+    {
+        encParams->SearchRange = 16; /* 4/16/2001 */
+    }
+
+    /*****************************************/
+    /* checking for conflict between options */
+    /*****************************************/
+
+    if (video->encParams->RC_Type == CBR_1 || video->encParams->RC_Type == CBR_2 || video->encParams->RC_Type == CBR_LOWDELAY)  /* if CBR */
+    {
+#ifdef _PRINT_STAT
+        if (video->encParams->NoFrameSkip_Enabled == PV_ON ||
+                video->encParams->NoPreSkip_Enabled == PV_ON) /* don't allow frame skip*/
+            printf("WARNING!!!! CBR with NoFrameSkip\n");
+#endif
+    }
+    else if (video->encParams->RC_Type == CONSTANT_Q)   /* constant_Q */
+    {
+        video->encParams->NoFrameSkip_Enabled = PV_ON;  /* no frame skip */
+        video->encParams->NoPreSkip_Enabled = PV_ON;    /* no frame skip */
+#ifdef _PRINT_STAT
+        printf("Turn on NoFrameSkip\n");
+#endif
+    }
+
+    if (video->encParams->NoFrameSkip_Enabled == PV_ON) /* if no frame skip */
+    {
+        video->encParams->FineFrameSkip_Enabled = PV_OFF;
+#ifdef _PRINT_STAT
+        printf("NoFrameSkip !!! may violate VBV_BUFFER constraint.\n");
+        printf("Turn off FineFrameSkip\n");
+#endif
+    }
+
+    /******************************************/
+    /******************************************/
+
+    nLayers = video->encParams->nLayers; /* Number of Layers to be encoded */
+
+    /* Find the maximum width*height for memory allocation of the VOPs */
+    for (idx = 0; idx < nLayers; idx++)
+    {
+        temp_w = video->encParams->LayerWidth[idx];
+        temp_h = video->encParams->LayerHeight[idx];
+
+        if ((temp_w*temp_h) > max)
+        {
+            max = temp_w * temp_h;
+            max_width = ((temp_w + 15) >> 4) << 4;
+            max_height = ((temp_h + 15) >> 4) << 4;
+            if (((uint64_t)max_width * max_height) > (uint64_t)INT32_MAX
+                    || temp_w > INT32_MAX - 15 || temp_h > INT32_MAX - 15) {
+                goto CLEAN_UP;
+            }
+            nTotalMB = ((max_width * max_height) >> 8);
+        }
+
+        /* Check if the video size and framerate(MBsPerSec) are vald */
+        mbsPerSec = (Int)(nTotalMB * video->encParams->LayerFrameRate[idx]);
+        if (mbsPerSec > video->encParams->LayerMaxMbsPerSec[idx]) status = PV_FALSE;
+    }
+
+    /****************************************************/
+    /* Set Profile and Video Buffer Size for each layer */
+    /****************************************************/
+    if (video->encParams->RC_Type == CBR_LOWDELAY) video->encParams->VBV_delay = 0.5; /* For CBR_LOWDELAY, we set 0.5sec buffer */
+    status = SetProfile_BufferSize(video, video->encParams->VBV_delay, 1);
+    if (status != PV_TRUE)
+        goto CLEAN_UP;
+
+    /****************************************/
+    /* memory allocation and initialization */
+    /****************************************/
+
+    if (video == NULL) goto CLEAN_UP;
+
+    /* cyclic reference for passing through both structures */
+    video->videoEncControls = encoderControl;
+
+    //video->currLayer = 0; /* Set current Layer to 0 */
+    //video->currFrameNo = 0; /* Set current frame Number to 0 */
+    video->nextModTime = 0;
+    video->nextEncIVop = 0; /* Sets up very first frame to be I-VOP! */
+    video->numVopsInGOP = 0; /* counter for Vops in Gop, 2/8/01 */
+
+    //video->frameRate = video->encParams->LayerFrameRate[0]; /* Set current layer frame rate */
+
+    video->QPMB = (UChar *) M4VENC_MALLOC(nTotalMB * sizeof(UChar)); /* Memory for MB quantizers */
+    if (video->QPMB == NULL) goto CLEAN_UP;
+
+
+    video->headerInfo.Mode = (UChar *) M4VENC_MALLOC(sizeof(UChar) * nTotalMB); /* Memory for MB Modes */
+    if (video->headerInfo.Mode == NULL) goto CLEAN_UP;
+    video->headerInfo.CBP = (UChar *) M4VENC_MALLOC(sizeof(UChar) * nTotalMB);   /* Memory for CBP (Y and C) of each MB */
+    if (video->headerInfo.CBP == NULL) goto CLEAN_UP;
+
+    /* Allocating motion vector space and interpolation memory*/
+
+    if ((size_t)nTotalMB > SIZE_MAX / sizeof(MOT *)) {
+        goto CLEAN_UP;
+    }
+    video->mot = (MOT **)M4VENC_MALLOC(sizeof(MOT *) * nTotalMB);
+    if (video->mot == NULL) goto CLEAN_UP;
+
+    for (idx = 0; idx < nTotalMB; idx++)
+    {
+        video->mot[idx] = (MOT *)M4VENC_MALLOC(sizeof(MOT) * 8);
+        if (video->mot[idx] == NULL)
+        {
+            goto CLEAN_UP;
+        }
+    }
+
+    video->intraArray = (UChar *)M4VENC_MALLOC(sizeof(UChar) * nTotalMB);
+    if (video->intraArray == NULL) goto CLEAN_UP;
+
+    video->sliceNo = (UChar *) M4VENC_MALLOC(nTotalMB); /* Memory for Slice Numbers */
+    if (video->sliceNo == NULL) goto CLEAN_UP;
+    /* Allocating space for predDCAC[][8][16], Not that I intentionally  */
+    /*    increase the dimension of predDCAC from [][6][15] to [][8][16] */
+    /*    so that compilers can generate faster code to indexing the     */
+    /*    data inside (by using << instead of *).         04/14/2000. */
+    /* 5/29/01, use  decoder lib ACDC prediction memory scheme.  */
+    if ((size_t)nTotalMB > SIZE_MAX / sizeof(typeDCStore)) {
+        goto CLEAN_UP;
+    }
+    video->predDC = (typeDCStore *) M4VENC_MALLOC(nTotalMB * sizeof(typeDCStore));
+    if (video->predDC == NULL) goto CLEAN_UP;
+
+    if (!video->encParams->H263_Enabled)
+    {
+        if ((size_t)((max_width >> 4) + 1) > SIZE_MAX / sizeof(typeDCACStore)) {
+            goto CLEAN_UP;
+        }
+        video->predDCAC_col = (typeDCACStore *) M4VENC_MALLOC(((max_width >> 4) + 1) * sizeof(typeDCACStore));
+        if (video->predDCAC_col == NULL) goto CLEAN_UP;
+
+        /* element zero will be used for storing vertical (col) AC coefficients */
+        /*  the rest will be used for storing horizontal (row) AC coefficients  */
+        video->predDCAC_row = video->predDCAC_col + 1;        /*  ACDC */
+
+        if ((size_t)nTotalMB > SIZE_MAX / sizeof(Int)) {
+            goto CLEAN_UP;
+        }
+        video->acPredFlag = (Int *) M4VENC_MALLOC(nTotalMB * sizeof(Int)); /* Memory for acPredFlag */
+        if (video->acPredFlag == NULL) goto CLEAN_UP;
+    }
+
+    video->outputMB = (MacroBlock *) M4VENC_MALLOC(sizeof(MacroBlock)); /* Allocating macroblock space */
+    if (video->outputMB == NULL) goto CLEAN_UP;
+    M4VENC_MEMSET(video->outputMB->block[0], 0, (sizeof(Short) << 6)*6);
+
+    M4VENC_MEMSET(video->dataBlock, 0, sizeof(Short) << 7);
+    /* Allocate (2*packetsize) working bitstreams */
+
+    video->bitstream1 = BitStreamCreateEnc(2 * 4096); /*allocate working stream 1*/
+    if (video->bitstream1 == NULL) goto CLEAN_UP;
+    video->bitstream2 = BitStreamCreateEnc(2 * 4096); /*allocate working stream 2*/
+    if (video->bitstream2 == NULL) goto CLEAN_UP;
+    video->bitstream3 = BitStreamCreateEnc(2 * 4096); /*allocate working stream 3*/
+    if (video->bitstream3 == NULL) goto CLEAN_UP;
+
+    /* allocate overrun buffer */
+    // this buffer is used when user's buffer is too small to hold one frame.
+    // It is not needed for slice-based encoding.
+    if (nLayers == 1)
+    {
+        video->oBSize = encParams->BufferSize[0] >> 3;
+    }
+    else
+    {
+        video->oBSize = PV_MAX((encParams->BufferSize[0] >> 3), (encParams->BufferSize[1] >> 3));
+    }
+
+    if (video->oBSize > DEFAULT_OVERRUN_BUFFER_SIZE || encParams->RC_Type == CONSTANT_Q) // set limit
+    {
+        video->oBSize = DEFAULT_OVERRUN_BUFFER_SIZE;
+    }
+    video->overrunBuffer = (UChar*) M4VENC_MALLOC(sizeof(UChar) * video->oBSize);
+    if (video->overrunBuffer == NULL) goto CLEAN_UP;
+
+
+    video->currVop = (Vop *) M4VENC_MALLOC(sizeof(Vop)); /* Memory for Current VOP */
+    if (video->currVop == NULL) goto CLEAN_UP;
+
+    /* add padding, 09/19/05 */
+    if (video->encParams->H263_Enabled) /* make it conditional  11/28/05 */
+    {
+        pitch = max_width;
+        offset = 0;
+    }
+    else
+    {
+        pitch = max_width + 32;
+        offset = (pitch << 4) + 16;
+        max_height += 32;
+    }
+    if (((uint64_t)pitch * max_height) > (uint64_t)INT32_MAX) {
+        goto CLEAN_UP;
+    }
+    size = pitch * max_height;
+
+    if (size > INT32_MAX - (size >> 1)
+            || (size_t)(size + (size >> 1)) > SIZE_MAX / sizeof(PIXEL)) {
+        goto CLEAN_UP;
+    }
+    video->currVop->allChan = video->currVop->yChan = (PIXEL *)M4VENC_MALLOC(sizeof(PIXEL) * (size + (size >> 1))); /* Memory for currVop Y */
+    if (video->currVop->yChan == NULL) goto CLEAN_UP;
+    video->currVop->uChan = video->currVop->yChan + size;/* Memory for currVop U */
+    video->currVop->vChan = video->currVop->uChan + (size >> 2);/* Memory for currVop V */
+
+    /* shift for the offset */
+    if (offset)
+    {
+        video->currVop->yChan += offset; /* offset to the origin.*/
+        video->currVop->uChan += (offset >> 2) + 4;
+        video->currVop->vChan += (offset >> 2) + 4;
+    }
+
+    video->forwardRefVop = video->currVop;      /*  Initialize forwardRefVop */
+    video->backwardRefVop = video->currVop;     /*  Initialize backwardRefVop */
+
+    video->prevBaseVop = (Vop *) M4VENC_MALLOC(sizeof(Vop));         /* Memory for Previous Base Vop */
+    if (video->prevBaseVop == NULL) goto CLEAN_UP;
+    video->prevBaseVop->allChan = video->prevBaseVop->yChan = (PIXEL *) M4VENC_MALLOC(sizeof(PIXEL) * (size + (size >> 1))); /* Memory for prevBaseVop Y */
+    if (video->prevBaseVop->yChan == NULL) goto CLEAN_UP;
+    video->prevBaseVop->uChan = video->prevBaseVop->yChan + size; /* Memory for prevBaseVop U */
+    video->prevBaseVop->vChan = video->prevBaseVop->uChan + (size >> 2); /* Memory for prevBaseVop V */
+
+    if (offset)
+    {
+        video->prevBaseVop->yChan += offset; /* offset to the origin.*/
+        video->prevBaseVop->uChan += (offset >> 2) + 4;
+        video->prevBaseVop->vChan += (offset >> 2) + 4;
+    }
+
+
+    if (0) /* If B Frames */
+    {
+        video->nextBaseVop = (Vop *) M4VENC_MALLOC(sizeof(Vop));         /* Memory for Next Base Vop */
+        if (video->nextBaseVop == NULL) goto CLEAN_UP;
+        video->nextBaseVop->allChan = video->nextBaseVop->yChan = (PIXEL *) M4VENC_MALLOC(sizeof(PIXEL) * (size + (size >> 1))); /* Memory for nextBaseVop Y */
+        if (video->nextBaseVop->yChan == NULL) goto CLEAN_UP;
+        video->nextBaseVop->uChan = video->nextBaseVop->yChan + size; /* Memory for nextBaseVop U */
+        video->nextBaseVop->vChan = video->nextBaseVop->uChan + (size >> 2); /* Memory for nextBaseVop V */
+
+        if (offset)
+        {
+            video->nextBaseVop->yChan += offset; /* offset to the origin.*/
+            video->nextBaseVop->uChan += (offset >> 2) + 4;
+            video->nextBaseVop->vChan += (offset >> 2) + 4;
+        }
+    }
+
+    if (nLayers > 1)   /* If enhancement layers */
+    {
+        video->prevEnhanceVop = (Vop *) M4VENC_MALLOC(sizeof(Vop));      /* Memory for Previous Enhancement Vop */
+        if (video->prevEnhanceVop == NULL) goto CLEAN_UP;
+        video->prevEnhanceVop->allChan = video->prevEnhanceVop->yChan = (PIXEL *) M4VENC_MALLOC(sizeof(PIXEL) * (size + (size >> 1))); /* Memory for Previous Ehancement Y */
+        if (video->prevEnhanceVop->yChan == NULL) goto CLEAN_UP;
+        video->prevEnhanceVop->uChan = video->prevEnhanceVop->yChan + size; /* Memory for Previous Enhancement U */
+        video->prevEnhanceVop->vChan = video->prevEnhanceVop->uChan + (size >> 2); /* Memory for Previous Enhancement V */
+
+        if (offset)
+        {
+            video->prevEnhanceVop->yChan += offset; /* offset to the origin.*/
+            video->prevEnhanceVop->uChan += (offset >> 2) + 4;
+            video->prevEnhanceVop->vChan += (offset >> 2) + 4;
+        }
+    }
+
+    video->numberOfLayers = nLayers; /* Number of Layers */
+    video->sumMAD = 0;
+
+
+    /* 04/09/01, for Vops in the use multipass processing */
+    for (idx = 0; idx < nLayers; idx++)
+    {
+        video->pMP[idx] = (MultiPass *)M4VENC_MALLOC(sizeof(MultiPass));
+        if (video->pMP[idx] == NULL)    goto CLEAN_UP;
+        M4VENC_MEMSET(video->pMP[idx], 0, sizeof(MultiPass));
+
+        video->pMP[idx]->encoded_frames = -1; /* forget about the very first I frame */
+
+
+        /* RDInfo **pRDSamples */
+        video->pMP[idx]->pRDSamples = (RDInfo **)M4VENC_MALLOC(30 * sizeof(RDInfo *));
+        if (video->pMP[idx]->pRDSamples == NULL)    goto CLEAN_UP;
+        for (i = 0; i < 30; i++)
+        {
+            video->pMP[idx]->pRDSamples[i] = (RDInfo *)M4VENC_MALLOC(32 * sizeof(RDInfo));
+            if (video->pMP[idx]->pRDSamples[i] == NULL) goto CLEAN_UP;
+            for (j = 0; j < 32; j++)    M4VENC_MEMSET(&(video->pMP[idx]->pRDSamples[i][j]), 0, sizeof(RDInfo));
+        }
+        video->pMP[idx]->frameRange = (Int)(video->encParams->LayerFrameRate[idx] * 1.0); /* 1.0s time frame*/
+        video->pMP[idx]->frameRange = PV_MAX(video->pMP[idx]->frameRange, 5);
+        video->pMP[idx]->frameRange = PV_MIN(video->pMP[idx]->frameRange, 30);
+
+        video->pMP[idx]->framePos = -1;
+
+    }
+    /* /// End /////////////////////////////////////// */
+
+
+    if ((size_t)nLayers > SIZE_MAX / sizeof(Vol *)) {
+        goto CLEAN_UP;
+    }
+    video->vol = (Vol **)M4VENC_MALLOC(nLayers * sizeof(Vol *)); /* Memory for VOL pointers */
+
+    /* Memory allocation and Initialization of Vols and writing of headers */
+    if (video->vol == NULL) goto CLEAN_UP;
+
+    for (idx = 0; idx < nLayers; idx++)
+    {
+        video->volInitialize[idx] = 1;
+        video->refTick[idx] = 0;
+        video->relLayerCodeTime[idx] = 1000;
+        video->vol[idx] = (Vol *)M4VENC_MALLOC(sizeof(Vol));
+        if (video->vol[idx] == NULL)  goto CLEAN_UP;
+
+        pVol = video->vol[idx];
+        pEncParams = video->encParams;
+
+        M4VENC_MEMSET(video->vol[idx], 0, sizeof(Vol));
+        /* Initialize some VOL parameters */
+        pVol->volID = idx;  /* Set VOL ID */
+        pVol->shortVideoHeader = pEncParams->H263_Enabled; /*Short Header */
+        pVol->GOVStart = pEncParams->GOV_Enabled; /* GOV Header */
+        pVol->timeIncrementResolution = video->encParams->TimeIncrementRes;
+        pVol->nbitsTimeIncRes = 1;
+        while (pVol->timeIncrementResolution > (1 << pVol->nbitsTimeIncRes))
+        {
+            pVol->nbitsTimeIncRes++;
+        }
+
+        /* timing stuff */
+        pVol->timeIncrement = 0;
+        pVol->moduloTimeBase = 0;
+        pVol->fixedVopRate = 0; /* No fixed VOP rate */
+        pVol->stream = (BitstreamEncVideo *)M4VENC_MALLOC(sizeof(BitstreamEncVideo)); /* allocate BitstreamEncVideo Instance */
+        if (pVol->stream == NULL)  goto CLEAN_UP;
+
+        pVol->width = pEncParams->LayerWidth[idx];      /* Layer Width */
+        pVol->height = pEncParams->LayerHeight[idx];    /* Layer Height */
+        //  pVol->intra_acdcPredDisable = pEncParams->ACDCPrediction; /* ACDC Prediction */
+        pVol->ResyncMarkerDisable = pEncParams->ResyncMarkerDisable; /* Resync Marker Mode */
+        pVol->dataPartitioning = pEncParams->DataPartitioning; /* Data Partitioning */
+        pVol->useReverseVLC = pEncParams->ReversibleVLC; /* RVLC */
+        if (idx > 0) /* Scalability layers */
+        {
+            pVol->ResyncMarkerDisable = 1;
+            pVol->dataPartitioning = 0;
+            pVol->useReverseVLC = 0; /*  No RVLC */
+        }
+        pVol->quantType = pEncParams->QuantType[idx];           /* Quantizer Type */
+
+        /* no need to init Quant Matrices */
+
+        pVol->scalability = 0;  /* Vol Scalability */
+        if (idx > 0)
+            pVol->scalability = 1; /* Multiple layers => Scalability */
+
+        /* Initialize Vol to Temporal scalability.  It can change during encoding */
+        pVol->scalType = 1;
+        /* Initialize reference Vol ID to the base layer = 0 */
+        pVol->refVolID = 0;
+        /* Initialize layer resolution to same as the reference */
+        pVol->refSampDir = 0;
+        pVol->horSamp_m = 1;
+        pVol->horSamp_n = 1;
+        pVol->verSamp_m = 1;
+        pVol->verSamp_n = 1;
+        pVol->enhancementType = 0; /* We always enhance the entire region */
+
+        pVol->nMBPerRow = (pVol->width + 15) / 16;
+        pVol->nMBPerCol = (pVol->height + 15) / 16;
+        pVol->nTotalMB = pVol->nMBPerRow * pVol->nMBPerCol;
+
+        if (pVol->nTotalMB >= 1)
+            pVol->nBitsForMBID = 1;
+        if (pVol->nTotalMB >= 3)
+            pVol->nBitsForMBID = 2;
+        if (pVol->nTotalMB >= 5)
+            pVol->nBitsForMBID = 3;
+        if (pVol->nTotalMB >= 9)
+            pVol->nBitsForMBID = 4;
+        if (pVol->nTotalMB >= 17)
+            pVol->nBitsForMBID = 5;
+        if (pVol->nTotalMB >= 33)
+            pVol->nBitsForMBID = 6;
+        if (pVol->nTotalMB >= 65)
+            pVol->nBitsForMBID = 7;
+        if (pVol->nTotalMB >= 129)
+            pVol->nBitsForMBID = 8;
+        if (pVol->nTotalMB >= 257)
+            pVol->nBitsForMBID = 9;
+        if (pVol->nTotalMB >= 513)
+            pVol->nBitsForMBID = 10;
+        if (pVol->nTotalMB >= 1025)
+            pVol->nBitsForMBID = 11;
+        if (pVol->nTotalMB >= 2049)
+            pVol->nBitsForMBID = 12;
+        if (pVol->nTotalMB >= 4097)
+            pVol->nBitsForMBID = 13;
+        if (pVol->nTotalMB >= 8193)
+            pVol->nBitsForMBID = 14;
+        if (pVol->nTotalMB >= 16385)
+            pVol->nBitsForMBID = 15;
+        if (pVol->nTotalMB >= 32769)
+            pVol->nBitsForMBID = 16;
+        if (pVol->nTotalMB >= 65537)
+            pVol->nBitsForMBID = 17;
+        if (pVol->nTotalMB >= 131073)
+            pVol->nBitsForMBID = 18;
+
+        if (pVol->shortVideoHeader)
+        {
+            switch (pVol->width)
+            {
+                case 128:
+                    if (pVol->height == 96)  /* source_format = 1 */
+                    {
+                        pVol->nGOBinVop = 6;
+                        pVol->nMBinGOB = 8;
+                    }
+                    else
+                        status = PV_FALSE;
+                    break;
+
+                case 176:
+                    if (pVol->height == 144)  /* source_format = 2 */
+                    {
+                        pVol->nGOBinVop = 9;
+                        pVol->nMBinGOB = 11;
+                    }
+                    else
+                        status = PV_FALSE;
+                    break;
+                case 352:
+                    if (pVol->height == 288)  /* source_format = 2 */
+                    {
+                        pVol->nGOBinVop = 18;
+                        pVol->nMBinGOB = 22;
+                    }
+                    else
+                        status = PV_FALSE;
+                    break;
+
+                case 704:
+                    if (pVol->height == 576)  /* source_format = 2 */
+                    {
+                        pVol->nGOBinVop = 18;
+                        pVol->nMBinGOB = 88;
+                    }
+                    else
+                        status = PV_FALSE;
+                    break;
+                case 1408:
+                    if (pVol->height == 1152)  /* source_format = 2 */
+                    {
+                        pVol->nGOBinVop = 18;
+                        pVol->nMBinGOB = 352;
+                    }
+                    else
+                        status = PV_FALSE;
+                    break;
+
+                default:
+                    status = PV_FALSE;
+                    break;
+            }
+        }
+    }
+
+    /***************************************************/
+    /* allocate and initialize rate control parameters */
+    /***************************************************/
+
+    /* BEGIN INITIALIZATION OF ANNEX L RATE CONTROL */
+    if (video->encParams->RC_Type != CONSTANT_Q)
+    {
+        for (idx = 0; idx < nLayers; idx++) /* 12/25/00 */
+        {
+            video->rc[idx] =
+                (rateControl *)M4VENC_MALLOC(sizeof(rateControl));
+
+            if (video->rc[idx] == NULL) goto CLEAN_UP;
+
+            M4VENC_MEMSET(video->rc[idx], 0, sizeof(rateControl));
+        }
+        if (PV_SUCCESS != RC_Initialize(video))
+        {
+            goto CLEAN_UP;
+        }
+        /* initialization for 2-pass rate control */
+    }
+    /* END INITIALIZATION OF ANNEX L RATE CONTROL */
+
+    /********** assign platform dependent functions ***********************/
+    /* 1/23/01 */
+    /* This must be done at run-time not a compile time */
+    video->functionPointer = (FuncPtr*) M4VENC_MALLOC(sizeof(FuncPtr));
+    if (video->functionPointer == NULL) goto CLEAN_UP;
+
+    video->functionPointer->ComputeMBSum = &ComputeMBSum_C;
+    video->functionPointer->SAD_MB_HalfPel[0] = NULL;
+    video->functionPointer->SAD_MB_HalfPel[1] = &SAD_MB_HalfPel_Cxh;
+    video->functionPointer->SAD_MB_HalfPel[2] = &SAD_MB_HalfPel_Cyh;
+    video->functionPointer->SAD_MB_HalfPel[3] = &SAD_MB_HalfPel_Cxhyh;
+
+#ifndef NO_INTER4V
+    video->functionPointer->SAD_Blk_HalfPel = &SAD_Blk_HalfPel_C;
+    video->functionPointer->SAD_Block = &SAD_Block_C;
+#endif
+    video->functionPointer->SAD_Macroblock = &SAD_Macroblock_C;
+    video->functionPointer->ChooseMode = &ChooseMode_C;
+    video->functionPointer->GetHalfPelMBRegion = &GetHalfPelMBRegion_C;
+//  video->functionPointer->SAD_MB_PADDING = &SAD_MB_PADDING; /* 4/21/01 */
+
+
+    encoderControl->videoEncoderInit = 1;  /* init done! */
+
+    return PV_TRUE;
+
+CLEAN_UP:
+    PVCleanUpVideoEncoder(encoderControl);
+
+    return PV_FALSE;
+}
+
+
+/* ======================================================================== */
+/*  Function : PVCleanUpVideoEncoder()                                      */
+/*  Date     : 08/22/2000                                                   */
+/*  Purpose  : Deallocates allocated memory from InitVideoEncoder()         */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified : 5/21/01, free only yChan in Vop                          */
+/*                                                                          */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool    PVCleanUpVideoEncoder(VideoEncControls *encoderControl)
+{
+    Int idx, i;
+    VideoEncData *video = (VideoEncData *)encoderControl->videoEncoderData;
+    int nTotalMB;
+    int max_width, offset;
+
+#ifdef PRINT_RC_INFO
+    if (facct != NULL)
+    {
+        fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
+        fprintf(facct, "TOTAL NUM BITS GENERATED %d\n", tiTotalNumBitsGenerated);
+        fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
+        fprintf(facct, "TOTAL NUMBER OF FRAMES CODED %d\n",
+                video->encParams->rc[0]->totalFrameNumber);
+        fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
+        fprintf(facct, "Average BitRate %d\n",
+                (tiTotalNumBitsGenerated / (90 / 30)));
+        fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
+        fprintf(facct, "TOTAL NUMBER OF STUFF BITS %d\n", (iStuffBits + 10740));
+        fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
+        fprintf(facct, "TOTAL NUMBER OF BITS TO NETWORK %d\n", (35800*90 / 30));;
+        fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
+        fprintf(facct, "SUM OF STUFF BITS AND GENERATED BITS %d\n",
+                (tiTotalNumBitsGenerated + iStuffBits + 10740));
+        fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
+        fprintf(facct, "UNACCOUNTED DIFFERENCE %d\n",
+                ((35800*90 / 30) - (tiTotalNumBitsGenerated + iStuffBits + 10740)));
+        fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
+        fclose(facct);
+    }
+#endif
+
+#ifdef PRINT_EC
+    fclose(fec);
+#endif
+
+    if (video != NULL)
+    {
+
+        if (video->QPMB) M4VENC_FREE(video->QPMB);
+        if (video->headerInfo.Mode)M4VENC_FREE(video->headerInfo.Mode);
+        if (video->headerInfo.CBP)M4VENC_FREE(video->headerInfo.CBP);
+
+
+        if (video->mot)
+        {
+            nTotalMB = video->vol[0]->nTotalMB;
+            for (idx = 1; idx < video->currLayer; idx++)
+                if (video->vol[idx]->nTotalMB > nTotalMB)
+                    nTotalMB = video->vol[idx]->nTotalMB;
+            for (idx = 0; idx < nTotalMB; idx++)
+            {
+                if (video->mot[idx])
+                    M4VENC_FREE(video->mot[idx]);
+            }
+            M4VENC_FREE(video->mot);
+        }
+
+        if (video->intraArray) M4VENC_FREE(video->intraArray);
+
+        if (video->sliceNo)M4VENC_FREE(video->sliceNo);
+        if (video->acPredFlag)M4VENC_FREE(video->acPredFlag);
+//      if(video->predDCAC)M4VENC_FREE(video->predDCAC);
+        if (video->predDC) M4VENC_FREE(video->predDC);
+        video->predDCAC_row = NULL;
+        if (video->predDCAC_col) M4VENC_FREE(video->predDCAC_col);
+        if (video->outputMB)M4VENC_FREE(video->outputMB);
+
+        if (video->bitstream1)BitstreamCloseEnc(video->bitstream1);
+        if (video->bitstream2)BitstreamCloseEnc(video->bitstream2);
+        if (video->bitstream3)BitstreamCloseEnc(video->bitstream3);
+
+        if (video->overrunBuffer) M4VENC_FREE(video->overrunBuffer);
+
+        max_width = video->encParams->LayerWidth[0];
+        max_width = (((max_width + 15) >> 4) << 4); /* 09/19/05 */
+        if (video->encParams->H263_Enabled)
+        {
+            offset = 0;
+        }
+        else
+        {
+            offset = ((max_width + 32) << 4) + 16;
+        }
+
+        if (video->currVop)
+        {
+            if (video->currVop->allChan)
+            {
+                M4VENC_FREE(video->currVop->allChan);
+            }
+            M4VENC_FREE(video->currVop);
+        }
+
+        if (video->nextBaseVop)
+        {
+            if (video->nextBaseVop->allChan)
+            {
+                M4VENC_FREE(video->nextBaseVop->allChan);
+            }
+            M4VENC_FREE(video->nextBaseVop);
+        }
+
+        if (video->prevBaseVop)
+        {
+            if (video->prevBaseVop->allChan)
+            {
+                M4VENC_FREE(video->prevBaseVop->allChan);
+            }
+            M4VENC_FREE(video->prevBaseVop);
+        }
+        if (video->prevEnhanceVop)
+        {
+            if (video->prevEnhanceVop->allChan)
+            {
+                M4VENC_FREE(video->prevEnhanceVop->allChan);
+            }
+            M4VENC_FREE(video->prevEnhanceVop);
+        }
+
+        /* 04/09/01, for Vops in the use multipass processing */
+        for (idx = 0; idx < video->encParams->nLayers; idx++)
+        {
+            if (video->pMP[idx])
+            {
+                if (video->pMP[idx]->pRDSamples)
+                {
+                    for (i = 0; i < 30; i++)
+                    {
+                        if (video->pMP[idx]->pRDSamples[i])
+                            M4VENC_FREE(video->pMP[idx]->pRDSamples[i]);
+                    }
+                    M4VENC_FREE(video->pMP[idx]->pRDSamples);
+                }
+
+                M4VENC_MEMSET(video->pMP[idx], 0, sizeof(MultiPass));
+                M4VENC_FREE(video->pMP[idx]);
+            }
+        }
+        /* //  End /////////////////////////////////////// */
+
+        if (video->vol)
+        {
+            for (idx = 0; idx < video->encParams->nLayers; idx++)
+            {
+                if (video->vol[idx])
+                {
+                    if (video->vol[idx]->stream)
+                        M4VENC_FREE(video->vol[idx]->stream);
+                    M4VENC_FREE(video->vol[idx]);
+                }
+            }
+            M4VENC_FREE(video->vol);
+        }
+
+        /***************************************************/
+        /* stop rate control parameters */
+        /***************************************************/
+
+        /* ANNEX L RATE CONTROL */
+        if (video->encParams->RC_Type != CONSTANT_Q)
+        {
+            RC_Cleanup(video->rc, video->encParams->nLayers);
+
+            for (idx = 0; idx < video->encParams->nLayers; idx++)
+            {
+                if (video->rc[idx])
+                    M4VENC_FREE(video->rc[idx]);
+            }
+        }
+
+        if (video->functionPointer) M4VENC_FREE(video->functionPointer);
+
+        /* If application has called PVCleanUpVideoEncoder then we deallocate */
+        /* If PVInitVideoEncoder class it, then we DO NOT deallocate */
+        if (video->encParams)
+        {
+            M4VENC_FREE(video->encParams);
+        }
+
+        M4VENC_FREE(video);
+        encoderControl->videoEncoderData = NULL; /* video */
+    }
+
+    encoderControl->videoEncoderInit = 0;
+
+    return PV_TRUE;
+}
+
+/* ======================================================================== */
+/*  Function : PVGetVolHeader()                                             */
+/*  Date     : 7/17/2001,                                                   */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVGetVolHeader(VideoEncControls *encCtrl, UChar *volHeader, Int *size, Int layer)
+{
+    VideoEncData    *encData;
+    PV_STATUS   EncodeVOS_Start(VideoEncControls *encCtrl);
+    encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+
+    if (encData == NULL)
+        return PV_FALSE;
+    if (encData->encParams == NULL)
+        return PV_FALSE;
+
+
+    encData->currLayer = layer; /* Set Layer */
+    /*pv_status = */
+    EncodeVOS_Start(encCtrl); /* Encode VOL Header */
+
+    encData->encParams->GetVolHeader[layer] = 1; /* Set usage flag: Needed to support old method*/
+
+    /* Copy bitstream to buffer and set the size */
+
+    if (*size > encData->bitstream1->byteCount)
+    {
+        *size = encData->bitstream1->byteCount;
+        M4VENC_MEMCPY(volHeader, encData->bitstream1->bitstreamBuffer, *size);
+    }
+    else
+        return PV_FALSE;
+
+    /* Reset bitstream1 buffer parameters */
+    BitstreamEncReset(encData->bitstream1);
+
+    return PV_TRUE;
+}
+
+/* ======================================================================== */
+/*  Function : PVGetOverrunBuffer()                                         */
+/*  Purpose  : Get the overrun buffer `                                     */
+/*  In/out   :                                                              */
+/*  Return   : Pointer to overrun buffer.                                   */
+/*  Modified :                                                              */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF UChar* PVGetOverrunBuffer(VideoEncControls *encCtrl)
+{
+    VideoEncData *video = (VideoEncData *)encCtrl->videoEncoderData;
+    Int currLayer = video->currLayer;
+    Vol *currVol = video->vol[currLayer];
+
+    if (currVol->stream->bitstreamBuffer != video->overrunBuffer) // not used
+    {
+        return NULL;
+    }
+
+    return video->overrunBuffer;
+}
+
+
+
+
+/* ======================================================================== */
+/*  Function : EncodeVideoFrame()                                           */
+/*  Date     : 08/22/2000                                                   */
+/*  Purpose  : Encode video frame and return bitstream                      */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/*  02.14.2001                                      */
+/*              Finishing new timestamp 32-bit input                        */
+/*              Applications need to take care of wrap-around               */
+/* ======================================================================== */
+OSCL_EXPORT_REF Bool PVEncodeVideoFrame(VideoEncControls *encCtrl, VideoEncFrameIO *vid_in, VideoEncFrameIO *vid_out,
+                                        ULong *nextModTime, UChar *bstream, Int *size, Int *nLayer)
+{
+    Bool status = PV_TRUE;
+    PV_STATUS pv_status;
+    VideoEncData *video = (VideoEncData *)encCtrl->videoEncoderData;
+    VideoEncParams *encParams = video->encParams;
+    Vol *currVol;
+    Vop *tempForwRefVop = NULL;
+    Int tempRefSelCode = 0;
+    PV_STATUS   EncodeVOS_Start(VideoEncControls *encCtrl);
+    Int width_16, height_16;
+    Int width, height;
+    Vop *temp;
+    Int encodeVop = 0;
+    void  PaddingEdge(Vop *padVop);
+    Int currLayer = -1;
+    //Int nLayers = encParams->nLayers;
+
+    ULong modTime = vid_in->timestamp;
+
+#ifdef RANDOM_REFSELCODE   /* add random selection of reference Vop */
+    Int random_val[30] = {0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0};
+    static Int rand_idx = 0;
+#endif
+
+    /*******************************************************/
+    /* Determine Next Vop to encode, if any, and nLayer    */
+    /*******************************************************/
+    //i = nLayers-1;
+
+    if (video->volInitialize[0]) /* first vol to code */
+    {
+        video->nextModTime = video->modTimeRef = ((modTime) - ((modTime) % 1000));
+    }
+
+    encodeVop = DetermineCodingLayer(video, nLayer, modTime);
+    currLayer = *nLayer;
+    if ((currLayer < 0) || (currLayer > encParams->nLayers - 1))
+        return PV_FALSE;
+
+    /******************************************/
+    /* If post-skipping still effective --- return */
+    /******************************************/
+
+    if (!encodeVop) /* skip enh layer, no base layer coded --- return */
+    {
+#ifdef _PRINT_STAT
+        printf("No frame coded. Continue to next frame.");
+#endif
+        /* expected next code time, convert back to millisec */
+        *nextModTime = video->nextModTime;
+
+#ifdef ALLOW_VOP_NOT_CODED
+        if (video->vol[0]->shortVideoHeader) /* Short Video Header = 1 */
+        {
+            *size = 0;
+            *nLayer = -1;
+        }
+        else
+        {
+            *nLayer = 0;
+            EncodeVopNotCoded(video, bstream, size, modTime);
+            *size = video->vol[0]->stream->byteCount;
+        }
+#else
+        *size = 0;
+        *nLayer = -1;
+#endif
+        return status;
+    }
+
+
+//ENCODE_VOP_AGAIN:  /* 12/30/00 */
+
+    /**************************************************************/
+    /* Initialize Vol stream structure with application bitstream */
+    /**************************************************************/
+
+    currVol = video->vol[currLayer];
+    currVol->stream->bitstreamBuffer = bstream;
+    currVol->stream->bufferSize = *size;
+    BitstreamEncReset(currVol->stream);
+    BitstreamSetOverrunBuffer(currVol->stream, video->overrunBuffer, video->oBSize, video);
+
+    /***********************************************************/
+    /* Encode VOS and VOL Headers on first call for each layer */
+    /***********************************************************/
+
+    if (video->volInitialize[currLayer])
+    {
+        video->currVop->timeInc = 0;
+        video->prevBaseVop->timeInc = 0;
+        if (!video->encParams->GetVolHeader[currLayer])
+            pv_status = EncodeVOS_Start(encCtrl);
+    }
+
+    /***************************************************/
+    /* Copy Input Video Frame to Internal Video Buffer */
+    /***************************************************/
+    /* Determine Width and Height of Vop Layer */
+
+    width = encParams->LayerWidth[currLayer];   /* Get input width */
+    height = encParams->LayerHeight[currLayer]; /* Get input height */
+    /* Round Up to nearest multiple of 16 : MPEG-4 Standard */
+
+    width_16 = ((width + 15) / 16) * 16;            /* Round up to nearest multiple of 16 */
+    height_16 = ((height + 15) / 16) * 16;          /* Round up to nearest multiple of 16 */
+
+    video->input = vid_in;  /* point to the frame input */
+
+    /*//  End ////////////////////////////// */
+
+
+    /**************************************/
+    /* Determine VOP Type                 */
+    /* 6/2/2001, separate function      */
+    /**************************************/
+    DetermineVopType(video, currLayer);
+
+    /****************************/
+    /*    Initialize VOP        */
+    /****************************/
+    video->currVop->volID = currVol->volID;
+    video->currVop->width = width_16;
+    video->currVop->height = height_16;
+    if (video->encParams->H263_Enabled) /*  11/28/05 */
+    {
+        video->currVop->pitch = width_16;
+    }
+    else
+    {
+        video->currVop->pitch = width_16 + 32;
+    }
+    video->currVop->timeInc = currVol->timeIncrement;
+    video->currVop->vopCoded = 1;
+    video->currVop->roundingType = 0;
+    video->currVop->intraDCVlcThr = encParams->IntraDCVlcThr;
+
+    if (currLayer == 0
+#ifdef RANDOM_REFSELCODE   /* add random selection of reference Vop */
+            || random_val[rand_idx] || video->volInitialize[currLayer]
+#endif
+       )
+    {
+        tempForwRefVop = video->forwardRefVop; /* keep initial state */
+        if (tempForwRefVop != NULL) tempRefSelCode = tempForwRefVop->refSelectCode;
+
+        video->forwardRefVop = video->prevBaseVop;
+        video->forwardRefVop->refSelectCode = 1;
+    }
+#ifdef RANDOM_REFSELCODE
+    else
+    {
+        tempForwRefVop = video->forwardRefVop; /* keep initial state */
+        if (tempForwRefVop != NULL) tempRefSelCode = tempForwRefVop->refSelectCode;
+
+        video->forwardRefVop = video->prevEnhanceVop;
+        video->forwardRefVop->refSelectCode = 0;
+    }
+    rand_idx++;
+    rand_idx %= 30;
+#endif
+
+    video->currVop->refSelectCode = video->forwardRefVop->refSelectCode;
+    video->currVop->gobNumber = 0;
+    video->currVop->gobFrameID = video->currVop->predictionType;
+    video->currVop->temporalRef = (modTime * 30 / 1001) % 256;
+
+    video->currVop->temporalInterval = 0;
+
+    if (video->currVop->predictionType == I_VOP)
+        video->currVop->quantizer = encParams->InitQuantIvop[currLayer];
+    else
+        video->currVop->quantizer = encParams->InitQuantPvop[currLayer];
+
+
+    /****************/
+    /* Encode Vop */
+    /****************/
+    video->slice_coding = 0;
+
+    pv_status = EncodeVop(video);
+#ifdef _PRINT_STAT
+    if (video->currVop->predictionType == I_VOP)
+        printf(" I-VOP ");
+    else
+        printf(" P-VOP (ref.%d)", video->forwardRefVop->refSelectCode);
+#endif
+
+    /************************************/
+    /* Update Skip Next Frame           */
+    /************************************/
+    *nLayer = UpdateSkipNextFrame(video, nextModTime, size, pv_status);
+    if (*nLayer == -1) /* skip current frame */
+    {
+        /* make sure that pointers are restored to the previous state */
+        if (currLayer == 0)
+        {
+            video->forwardRefVop = tempForwRefVop; /* For P-Vop base only */
+            if (video->forwardRefVop != NULL) video->forwardRefVop->refSelectCode = tempRefSelCode;
+        }
+
+        return status;
+    }
+
+    /* If I-VOP was encoded, reset IntraPeriod */
+    if ((currLayer == 0) && (encParams->IntraPeriod > 0) && (video->currVop->predictionType == I_VOP))
+        video->nextEncIVop = encParams->IntraPeriod;
+
+    /* Set HintTrack Information */
+    if (currLayer != -1)
+    {
+        if (currVol->prevModuloTimeBase)
+            video->hintTrackInfo.MTB = 1;
+        else
+            video->hintTrackInfo.MTB = 0;
+        video->hintTrackInfo.LayerID = (UChar)currVol->volID;
+        video->hintTrackInfo.CodeType = (UChar)video->currVop->predictionType;
+        video->hintTrackInfo.RefSelCode = (UChar)video->currVop->refSelectCode;
+    }
+
+    /************************************************/
+    /* Determine nLayer and timeInc for next encode */
+    /* 12/27/00 always go by the highest layer*/
+    /************************************************/
+
+    /**********************************************************/
+    /* Copy Reconstructed Buffer to Output Video Frame Buffer */
+    /**********************************************************/
+    vid_out->yChan = video->currVop->yChan;
+    vid_out->uChan = video->currVop->uChan;
+    vid_out->vChan = video->currVop->vChan;
+    if (video->encParams->H263_Enabled)
+    {
+        vid_out->height = video->currVop->height; /* padded height */
+        vid_out->pitch = video->currVop->width; /* padded width */
+    }
+    else
+    {
+        vid_out->height = video->currVop->height + 32; /* padded height */
+        vid_out->pitch = video->currVop->width + 32; /* padded width */
+    }
+    //video_out->timestamp = video->modTime;
+    vid_out->timestamp = (ULong)(((video->prevFrameNum[currLayer] * 1000) / encParams->LayerFrameRate[currLayer]) + video->modTimeRef + 0.5);
+
+    /*// End /////////////////////// */
+
+    /***********************************/
+    /* Update Ouput bstream byte count */
+    /***********************************/
+
+    *size = currVol->stream->byteCount;
+
+    /****************************************/
+    /* Swap Vop Pointers for Base Layer     */
+    /****************************************/
+    if (currLayer == 0)
+    {
+        temp = video->prevBaseVop;
+        video->prevBaseVop = video->currVop;
+        video->prevBaseVop->padded = 0; /* not padded */
+        video->currVop  = temp;
+        video->forwardRefVop = video->prevBaseVop; /* For P-Vop base only */
+        video->forwardRefVop->refSelectCode = 1;
+    }
+    else
+    {
+        temp = video->prevEnhanceVop;
+        video->prevEnhanceVop = video->currVop;
+        video->prevEnhanceVop->padded = 0; /* not padded */
+        video->currVop = temp;
+        video->forwardRefVop = video->prevEnhanceVop;
+        video->forwardRefVop->refSelectCode = 0;
+    }
+
+    /****************************************/
+    /* Modify the intialize flag at the end.*/
+    /****************************************/
+    if (video->volInitialize[currLayer])
+        video->volInitialize[currLayer] = 0;
+
+    return status;
+}
+
+#ifndef NO_SLICE_ENCODE
+/* ======================================================================== */
+/*  Function : PVEncodeFrameSet()                                           */
+/*  Date     : 04/18/2000                                                   */
+/*  Purpose  : Enter a video frame and perform front-end time check plus ME */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+OSCL_EXPORT_REF Bool PVEncodeFrameSet(VideoEncControls *encCtrl, VideoEncFrameIO *vid_in, ULong *nextModTime, Int *nLayer)
+{
+    Bool status = PV_TRUE;
+    VideoEncData *video = (VideoEncData *)encCtrl->videoEncoderData;
+    VideoEncParams *encParams = video->encParams;
+    Vol *currVol;
+    PV_STATUS   EncodeVOS_Start(VideoEncControls *encCtrl);
+    Int width_16, height_16;
+    Int width, height;
+    Int encodeVop = 0;
+    void  PaddingEdge(Vop *padVop);
+    Int currLayer = -1;
+    //Int nLayers = encParams->nLayers;
+
+    ULong   modTime = vid_in->timestamp;
+
+#ifdef RANDOM_REFSELCODE   /* add random selection of reference Vop */
+    Int random_val[30] = {0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0};
+    static Int rand_idx = 0;
+#endif
+    /*******************************************************/
+    /* Determine Next Vop to encode, if any, and nLayer    */
+    /*******************************************************/
+
+    video->modTime = modTime;
+
+    //i = nLayers-1;
+
+    if (video->volInitialize[0]) /* first vol to code */
+    {
+        video->nextModTime = video->modTimeRef = ((modTime) - ((modTime) % 1000));
+    }
+
+
+    encodeVop = DetermineCodingLayer(video, nLayer, modTime);
+
+    currLayer = *nLayer;
+
+    /******************************************/
+    /* If post-skipping still effective --- return */
+    /******************************************/
+
+    if (!encodeVop) /* skip enh layer, no base layer coded --- return */
+    {
+#ifdef _PRINT_STAT
+        printf("No frame coded. Continue to next frame.");
+#endif
+        *nLayer = -1;
+
+        /* expected next code time, convert back to millisec */
+        *nextModTime = video->nextModTime;;
+        return status;
+    }
+
+    /**************************************************************/
+    /* Initialize Vol stream structure with application bitstream */
+    /**************************************************************/
+
+    currVol = video->vol[currLayer];
+    currVol->stream->bufferSize = 0;
+    BitstreamEncReset(currVol->stream);
+
+    /***********************************************************/
+    /* Encode VOS and VOL Headers on first call for each layer */
+    /***********************************************************/
+
+    if (video->volInitialize[currLayer])
+    {
+        video->currVop->timeInc = 0;
+        video->prevBaseVop->timeInc = 0;
+    }
+
+    /***************************************************/
+    /* Copy Input Video Frame to Internal Video Buffer */
+    /***************************************************/
+    /* Determine Width and Height of Vop Layer */
+
+    width = encParams->LayerWidth[currLayer];   /* Get input width */
+    height = encParams->LayerHeight[currLayer]; /* Get input height */
+    /* Round Up to nearest multiple of 16 : MPEG-4 Standard */
+
+    width_16 = ((width + 15) / 16) * 16;            /* Round up to nearest multiple of 16 */
+    height_16 = ((height + 15) / 16) * 16;          /* Round up to nearest multiple of 16 */
+
+    video->input = vid_in;  /* point to the frame input */
+
+    /*//  End ////////////////////////////// */
+
+
+    /**************************************/
+    /* Determine VOP Type                 */
+    /* 6/2/2001, separate function      */
+    /**************************************/
+    DetermineVopType(video, currLayer);
+
+    /****************************/
+    /*    Initialize VOP        */
+    /****************************/
+    video->currVop->volID = currVol->volID;
+    video->currVop->width = width_16;
+    video->currVop->height = height_16;
+    if (video->encParams->H263_Enabled) /*  11/28/05 */
+    {
+        video->currVop->pitch = width_16;
+    }
+    else
+    {
+        video->currVop->pitch = width_16 + 32;
+    }
+    video->currVop->timeInc = currVol->timeIncrement;
+    video->currVop->vopCoded = 1;
+    video->currVop->roundingType = 0;
+    video->currVop->intraDCVlcThr = encParams->IntraDCVlcThr;
+
+    if (currLayer == 0
+#ifdef RANDOM_REFSELCODE   /* add random selection of reference Vop */
+            || random_val[rand_idx] || video->volInitialize[currLayer]
+#endif
+       )
+    {
+        video->tempForwRefVop = video->forwardRefVop; /* keep initial state */
+        if (video->tempForwRefVop != NULL) video->tempRefSelCode = video->tempForwRefVop->refSelectCode;
+
+        video->forwardRefVop = video->prevBaseVop;
+        video->forwardRefVop->refSelectCode = 1;
+    }
+#ifdef RANDOM_REFSELCODE
+    else
+    {
+        video->tempForwRefVop = video->forwardRefVop; /* keep initial state */
+        if (video->tempForwRefVop != NULL) video->tempRefSelCode = video->tempForwRefVop->refSelectCode;
+
+        video->forwardRefVop = video->prevEnhanceVop;
+        video->forwardRefVop->refSelectCode = 0;
+    }
+    rand_idx++;
+    rand_idx %= 30;
+#endif
+
+    video->currVop->refSelectCode = video->forwardRefVop->refSelectCode;
+    video->currVop->gobNumber = 0;
+    video->currVop->gobFrameID = video->currVop->predictionType;
+    video->currVop->temporalRef = ((modTime) * 30 / 1001) % 256;
+
+    video->currVop->temporalInterval = 0;
+
+    if (video->currVop->predictionType == I_VOP)
+        video->currVop->quantizer = encParams->InitQuantIvop[currLayer];
+    else
+        video->currVop->quantizer = encParams->InitQuantPvop[currLayer];
+
+    /****************/
+    /* Encode Vop   */
+    /****************/
+    video->slice_coding = 1;
+
+    /*pv_status =*/
+    EncodeVop(video);
+
+#ifdef _PRINT_STAT
+    if (video->currVop->predictionType == I_VOP)
+        printf(" I-VOP ");
+    else
+        printf(" P-VOP (ref.%d)", video->forwardRefVop->refSelectCode);
+#endif
+
+    /* Set HintTrack Information */
+    if (currVol->prevModuloTimeBase)
+        video->hintTrackInfo.MTB = 1;
+    else
+        video->hintTrackInfo.MTB = 0;
+
+    video->hintTrackInfo.LayerID = (UChar)currVol->volID;
+    video->hintTrackInfo.CodeType = (UChar)video->currVop->predictionType;
+    video->hintTrackInfo.RefSelCode = (UChar)video->currVop->refSelectCode;
+
+    return status;
+}
+#endif /* NO_SLICE_ENCODE */
+
+#ifndef NO_SLICE_ENCODE
+/* ======================================================================== */
+/*  Function : PVEncodePacket()                                             */
+/*  Date     : 04/18/2002                                                   */
+/*  Purpose  : Encode one packet and return bitstream                       */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+OSCL_EXPORT_REF Bool PVEncodeSlice(VideoEncControls *encCtrl, UChar *bstream, Int *size,
+                                   Int *endofFrame, VideoEncFrameIO *vid_out, ULong *nextModTime)
+{
+    PV_STATUS pv_status;
+    VideoEncData *video = (VideoEncData *)encCtrl->videoEncoderData;
+    VideoEncParams *encParams = video->encParams;
+    Vol *currVol;
+    PV_STATUS   EncodeVOS_Start(VideoEncControls *encCtrl);
+    Vop *temp;
+    void  PaddingEdge(Vop *padVop);
+    Int currLayer = video->currLayer;
+    Int pre_skip;
+    Int pre_size;
+    /**************************************************************/
+    /* Initialize Vol stream structure with application bitstream */
+    /**************************************************************/
+
+    currVol = video->vol[currLayer];
+    currVol->stream->bitstreamBuffer = bstream;
+    pre_size = currVol->stream->byteCount;
+    currVol->stream->bufferSize = pre_size + (*size);
+
+    /***********************************************************/
+    /* Encode VOS and VOL Headers on first call for each layer */
+    /***********************************************************/
+
+    if (video->volInitialize[currLayer])
+    {
+        if (!video->encParams->GetVolHeader[currLayer])
+            pv_status = EncodeVOS_Start(encCtrl);
+    }
+
+    /****************/
+    /* Encode Slice */
+    /****************/
+    pv_status = EncodeSlice(video);
+
+    *endofFrame = 0;
+
+    if (video->mbnum >= currVol->nTotalMB && !video->end_of_buf)
+    {
+        *endofFrame = 1;
+
+        /************************************/
+        /* Update Skip Next Frame           */
+        /************************************/
+        pre_skip = UpdateSkipNextFrame(video, nextModTime, size, pv_status); /* modified such that no pre-skipped */
+
+        if (pre_skip == -1) /* error */
+        {
+            *endofFrame = -1;
+            /* make sure that pointers are restored to the previous state */
+            if (currLayer == 0)
+            {
+                video->forwardRefVop = video->tempForwRefVop; /* For P-Vop base only */
+                video->forwardRefVop->refSelectCode = video->tempRefSelCode;
+            }
+
+            return pv_status;
+        }
+
+        /* If I-VOP was encoded, reset IntraPeriod */
+        if ((currLayer == 0) && (encParams->IntraPeriod > 0) && (video->currVop->predictionType == I_VOP))
+            video->nextEncIVop = encParams->IntraPeriod;
+
+        /**********************************************************/
+        /* Copy Reconstructed Buffer to Output Video Frame Buffer */
+        /**********************************************************/
+        vid_out->yChan = video->currVop->yChan;
+        vid_out->uChan = video->currVop->uChan;
+        vid_out->vChan = video->currVop->vChan;
+        if (video->encParams->H263_Enabled)
+        {
+            vid_out->height = video->currVop->height; /* padded height */
+            vid_out->pitch = video->currVop->width; /* padded width */
+        }
+        else
+        {
+            vid_out->height = video->currVop->height + 32; /* padded height */
+            vid_out->pitch = video->currVop->width + 32; /* padded width */
+        }
+        //vid_out->timestamp = video->modTime;
+        vid_out->timestamp = (ULong)(((video->prevFrameNum[currLayer] * 1000) / encParams->LayerFrameRate[currLayer]) + video->modTimeRef + 0.5);
+
+        /*// End /////////////////////// */
+
+        /****************************************/
+        /* Swap Vop Pointers for Base Layer     */
+        /****************************************/
+
+        if (currLayer == 0)
+        {
+            temp = video->prevBaseVop;
+            video->prevBaseVop = video->currVop;
+            video->prevBaseVop->padded = 0; /* not padded */
+            video->currVop = temp;
+            video->forwardRefVop = video->prevBaseVop; /* For P-Vop base only */
+            video->forwardRefVop->refSelectCode = 1;
+        }
+        else
+        {
+            temp = video->prevEnhanceVop;
+            video->prevEnhanceVop = video->currVop;
+            video->prevEnhanceVop->padded = 0; /* not padded */
+            video->currVop = temp;
+            video->forwardRefVop = video->prevEnhanceVop;
+            video->forwardRefVop->refSelectCode = 0;
+        }
+    }
+
+    /***********************************/
+    /* Update Ouput bstream byte count */
+    /***********************************/
+
+    *size = currVol->stream->byteCount - pre_size;
+
+    /****************************************/
+    /* Modify the intialize flag at the end.*/
+    /****************************************/
+    if (video->volInitialize[currLayer])
+        video->volInitialize[currLayer] = 0;
+
+    return pv_status;
+}
+#endif /* NO_SLICE_ENCODE */
+
+
+/* ======================================================================== */
+/*  Function : PVGetH263ProfileLevelID()                                    */
+/*  Date     : 02/05/2003                                                   */
+/*  Purpose  : Get H.263 Profile ID and level ID for profile 0              */
+/*  In/out   : Profile ID=0, levelID is what we want                        */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/*  Note     : h263Level[8], rBR_bound[8], max_h263_framerate[2]            */
+/*             max_h263_width[2], max_h263_height[2] are global             */
+/*                                                                          */
+/* ======================================================================== */
+OSCL_EXPORT_REF Bool PVGetH263ProfileLevelID(VideoEncControls *encCtrl, Int *profileID, Int *levelID)
+{
+    VideoEncData *encData;
+    Int width, height;
+    float bitrate_r, framerate;
+
+
+    /* For this version, we only support H.263 profile 0 */
+    *profileID = 0;
+
+    *levelID = 0;
+    encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+    if (encData == NULL)
+        return PV_FALSE;
+    if (encData->encParams == NULL)
+        return PV_FALSE;
+
+    if (!encData->encParams->H263_Enabled) return PV_FALSE;
+
+
+    /* get image width, height, bitrate and framerate */
+    width     = encData->encParams->LayerWidth[0];
+    height    = encData->encParams->LayerHeight[0];
+    bitrate_r = (float)(encData->encParams->LayerBitRate[0]) / (float)64000.0;
+    framerate = encData->encParams->LayerFrameRate[0];
+    if (!width || !height || !(bitrate_r > 0 && framerate > 0)) return PV_FALSE;
+
+    /* This is the most frequent case : level 10 */
+    if (bitrate_r <= rBR_bound[1] && framerate <= max_h263_framerate[0] &&
+            (width <= max_h263_width[0] && height <= max_h263_height[0]))
+    {
+        *levelID = h263Level[1];
+        return PV_TRUE;
+    }
+    else if (bitrate_r > rBR_bound[4] ||
+             (width > max_h263_width[1] || height > max_h263_height[1]) ||
+             framerate > max_h263_framerate[1])    /* check the highest level 70 */
+    {
+        *levelID = h263Level[7];
+        return PV_TRUE;
+    }
+    else   /* search level 20, 30, 40 */
+    {
+
+        /* pick out level 20 */
+        if (bitrate_r <= rBR_bound[2] &&
+                ((width <= max_h263_width[0] && height <= max_h263_height[0] && framerate <= max_h263_framerate[1]) ||
+                 (width <= max_h263_width[1] && height <= max_h263_height[1] && framerate <= max_h263_framerate[0])))
+        {
+            *levelID = h263Level[2];
+            return PV_TRUE;
+        }
+        else   /* width, height and framerate are ok, now choose level 30 or 40 */
+        {
+            *levelID = (bitrate_r <= rBR_bound[3] ? h263Level[3] : h263Level[4]);
+            return PV_TRUE;
+        }
+    }
+}
+
+/* ======================================================================== */
+/*  Function : PVGetMPEG4ProfileLevelID()                                   */
+/*  Date     : 26/06/2008                                                   */
+/*  Purpose  : Get MPEG4 Level after initialized                            */
+/*  In/out   : profile_level according to interface                         */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+OSCL_EXPORT_REF Bool PVGetMPEG4ProfileLevelID(VideoEncControls *encCtrl, Int *profile_level, Int nLayer)
+{
+    VideoEncData* video;
+    Int i;
+
+    video = (VideoEncData *)encCtrl->videoEncoderData;
+
+    if (nLayer == 0)
+    {
+        for (i = 0; i < 8; i++)
+        {
+            if (video->encParams->ProfileLevel[0] == profile_level_code[i])
+            {
+                break;
+            }
+        }
+        *profile_level = i;
+    }
+    else
+    {
+        for (i = 0; i < 8; i++)
+        {
+            if (video->encParams->ProfileLevel[0] == scalable_profile_level_code[i])
+            {
+                break;
+            }
+        }
+        *profile_level = i + SIMPLE_SCALABLE_PROFILE_LEVEL0;
+    }
+
+    return true;
+}
+
+#ifndef LIMITED_API
+/* ======================================================================== */
+/*  Function : PVUpdateEncFrameRate                                         */
+/*  Date     : 04/08/2002                                                   */
+/*  Purpose  : Update target frame rates of the encoded base and enhance    */
+/*             layer(if any) while encoding operation is ongoing            */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVUpdateEncFrameRate(VideoEncControls *encCtrl, float *frameRate)
+{
+    VideoEncData    *encData;
+    Int i;// nTotalMB, mbPerSec;
+
+    encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+    if (encData == NULL)
+        return PV_FALSE;
+    if (encData->encParams == NULL)
+        return PV_FALSE;
+
+    /* Update the framerates for all the layers */
+    for (i = 0; i < encData->encParams->nLayers; i++)
+    {
+
+        /* New check: encoding framerate should be consistent with the given profile and level */
+        //nTotalMB = (((encData->encParams->LayerWidth[i]+15)/16)*16)*(((encData->encParams->LayerHeight[i]+15)/16)*16)/(16*16);
+        //mbPerSec = (Int)(nTotalMB * frameRate[i]);
+        //if(mbPerSec > encData->encParams->LayerMaxMbsPerSec[i]) return PV_FALSE;
+        if (frameRate[i] > encData->encParams->LayerMaxFrameRate[i]) return PV_FALSE; /* set by users or profile */
+
+        encData->encParams->LayerFrameRate[i] = frameRate[i];
+    }
+
+    return RC_UpdateBXRCParams((void*) encData);
+
+}
+#endif
+#ifndef LIMITED_API
+/* ======================================================================== */
+/*  Function : PVUpdateBitRate                                              */
+/*  Date     : 04/08/2002                                                   */
+/*  Purpose  : Update target bit rates of the encoded base and enhance      */
+/*             layer(if any) while encoding operation is ongoing            */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVUpdateBitRate(VideoEncControls *encCtrl, Int *bitRate)
+{
+    VideoEncData    *encData;
+    Int i;
+
+    encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+    if (encData == NULL)
+        return PV_FALSE;
+    if (encData->encParams == NULL)
+        return PV_FALSE;
+
+    /* Update the bitrates for all the layers */
+    for (i = 0; i < encData->encParams->nLayers; i++)
+    {
+        if (bitRate[i] > encData->encParams->LayerMaxBitRate[i]) /* set by users or profile */
+        {
+            return PV_FALSE;
+        }
+        encData->encParams->LayerBitRate[i] = bitRate[i];
+    }
+
+    return RC_UpdateBXRCParams((void*) encData);
+
+}
+#endif
+#ifndef LIMITED_API
+/* ============================================================================ */
+/*  Function : PVUpdateVBVDelay()                                                   */
+/*  Date     : 4/23/2004                                                        */
+/*  Purpose  : Update VBV buffer size(in delay)                                 */
+/*  In/out   :                                                                  */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                        */
+/*  Modified :                                                                  */
+/*                                                                              */
+/* ============================================================================ */
+
+Bool PVUpdateVBVDelay(VideoEncControls *encCtrl, float delay)
+{
+
+    VideoEncData    *encData;
+    Int total_bitrate, max_buffer_size;
+    int index;
+
+    encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+    if (encData == NULL)
+        return PV_FALSE;
+    if (encData->encParams == NULL)
+        return PV_FALSE;
+
+    /* Check whether the input delay is valid based on the given profile */
+    total_bitrate   = (encData->encParams->nLayers == 1 ? encData->encParams->LayerBitRate[0] :
+                       encData->encParams->LayerBitRate[1]);
+    index = encData->encParams->profile_table_index;
+    max_buffer_size = (encData->encParams->nLayers == 1 ? profile_level_max_VBV_size[index] :
+                       scalable_profile_level_max_VBV_size[index]);
+
+    if (total_bitrate*delay > (float)max_buffer_size)
+        return PV_FALSE;
+
+    encData->encParams->VBV_delay = delay;
+    return PV_TRUE;
+
+}
+#endif
+#ifndef LIMITED_API
+/* ======================================================================== */
+/*  Function : PVUpdateIFrameInterval()                                         */
+/*  Date     : 04/10/2002                                                   */
+/*  Purpose  : updates the INTRA frame refresh interval while encoding      */
+/*             is ongoing                                                   */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVUpdateIFrameInterval(VideoEncControls *encCtrl, Int aIFramePeriod)
+{
+    VideoEncData    *encData;
+
+    encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+    if (encData == NULL)
+        return PV_FALSE;
+    if (encData->encParams == NULL)
+        return PV_FALSE;
+
+    encData->encParams->IntraPeriod = aIFramePeriod;
+    return PV_TRUE;
+}
+#endif
+#ifndef LIMITED_API
+/* ======================================================================== */
+/*  Function : PVSetNumIntraMBRefresh()                                     */
+/*  Date     : 08/05/2003                                                   */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+OSCL_EXPORT_REF Bool    PVUpdateNumIntraMBRefresh(VideoEncControls *encCtrl, Int numMB)
+{
+    VideoEncData    *encData;
+
+    encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+    if (encData == NULL)
+        return PV_FALSE;
+
+    encData->encParams->Refresh = numMB;
+
+    return PV_TRUE;
+}
+#endif
+#ifndef LIMITED_API
+/* ======================================================================== */
+/*  Function : PVIFrameRequest()                                            */
+/*  Date     : 04/10/2002                                                   */
+/*  Purpose  : encodes the next base frame as an I-Vop                      */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVIFrameRequest(VideoEncControls *encCtrl)
+{
+    VideoEncData    *encData;
+
+    encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+    if (encData == NULL)
+        return PV_FALSE;
+    if (encData->encParams == NULL)
+        return PV_FALSE;
+
+    encData->nextEncIVop = 1;
+    return PV_TRUE;
+}
+#endif
+#ifndef LIMITED_API
+/* ======================================================================== */
+/*  Function : PVGetEncMemoryUsage()                                        */
+/*  Date     : 10/17/2000                                                   */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Int PVGetEncMemoryUsage(VideoEncControls *encCtrl)
+{
+    VideoEncData    *encData;
+
+    encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+    if (encData == NULL)
+        return PV_FALSE;
+    if (encData->encParams == NULL)
+        return PV_FALSE;
+    return encData->encParams->MemoryUsage;
+}
+#endif
+
+/* ======================================================================== */
+/*  Function : PVGetHintTrack()                                             */
+/*  Date     : 1/17/2001,                                                   */
+/*  Purpose  :                                                              */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVGetHintTrack(VideoEncControls *encCtrl, MP4HintTrack *info)
+{
+    VideoEncData    *encData;
+
+    encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+    if (encData == NULL)
+        return PV_FALSE;
+    if (encData->encParams == NULL)
+        return PV_FALSE;
+    info->MTB = encData->hintTrackInfo.MTB;
+    info->LayerID = encData->hintTrackInfo.LayerID;
+    info->CodeType = encData->hintTrackInfo.CodeType;
+    info->RefSelCode = encData->hintTrackInfo.RefSelCode;
+
+    return PV_TRUE;
+}
+
+/* ======================================================================== */
+/*  Function : PVGetMaxVideoFrameSize()                                     */
+/*  Date     : 7/17/2001,                                                   */
+/*  Purpose  : Function merely returns the maximum buffer size              */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVGetMaxVideoFrameSize(VideoEncControls *encCtrl, Int *maxVideoFrameSize)
+{
+    VideoEncData    *encData;
+
+    encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+    if (encData == NULL)
+        return PV_FALSE;
+    if (encData->encParams == NULL)
+        return PV_FALSE;
+
+
+
+    *maxVideoFrameSize = encData->encParams->BufferSize[0];
+
+    if (encData->encParams->nLayers == 2)
+        if (*maxVideoFrameSize < encData->encParams->BufferSize[1])
+            *maxVideoFrameSize = encData->encParams->BufferSize[1];
+    *maxVideoFrameSize >>= 3;   /* Convert to Bytes */
+
+    if (*maxVideoFrameSize <= 4000)
+        *maxVideoFrameSize = 4000;
+
+    return PV_TRUE;
+}
+#ifndef LIMITED_API
+/* ======================================================================== */
+/*  Function : PVGetVBVSize()                                               */
+/*  Date     : 4/15/2002                                                    */
+/*  Purpose  : Function merely returns the maximum buffer size              */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+
+OSCL_EXPORT_REF Bool PVGetVBVSize(VideoEncControls *encCtrl, Int *VBVSize)
+{
+    VideoEncData    *encData;
+
+    encData = (VideoEncData *)encCtrl->videoEncoderData;
+
+    if (encData == NULL)
+        return PV_FALSE;
+    if (encData->encParams == NULL)
+        return PV_FALSE;
+
+    *VBVSize = encData->encParams->BufferSize[0];
+    if (encData->encParams->nLayers == 2)
+        *VBVSize += encData->encParams->BufferSize[1];
+
+    return PV_TRUE;
+
+}
+#endif
+/* ======================================================================== */
+/*  Function : EncodeVOS_Start()                                            */
+/*  Date     : 08/22/2000                                                   */
+/*  Purpose  : Encodes the VOS,VO, and VOL or Short Headers                 */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+PV_STATUS EncodeVOS_Start(VideoEncControls *encoderControl)
+{
+
+    VideoEncData *video = (VideoEncData *)encoderControl->videoEncoderData;
+    Vol         *currVol = video->vol[video->currLayer];
+    PV_STATUS status = PV_SUCCESS;
+    //int profile_level=0x01;
+    BitstreamEncVideo *stream = video->bitstream1;
+    int i, j;
+
+    /********************************/
+    /* Check for short_video_header */
+    /********************************/
+    if (currVol->shortVideoHeader == 1)
+        return status;
+    else
+    {
+        /* Short Video Header or M4V */
+
+        /**************************/
+        /* VisualObjectSequence ()*/
+        /**************************/
+        status = BitstreamPutGT16Bits(stream, 32, SESSION_START_CODE);
+        /*  Determine profile_level */
+        status = BitstreamPutBits(stream, 8, video->encParams->ProfileLevel[video->currLayer]);
+
+        /******************/
+        /* VisualObject() */
+        /******************/
+
+        status = BitstreamPutGT16Bits(stream, 32, VISUAL_OBJECT_START_CODE);
+        status = BitstreamPut1Bits(stream, 0x00); /* visual object identifier */
+        status = BitstreamPutBits(stream, 4, 0x01); /* visual object Type == "video ID" */
+        status = BitstreamPut1Bits(stream, 0x00); /* no video signal type */
+
+        /*temp   = */
+        BitstreamMpeg4ByteAlignStuffing(stream);
+
+
+        status = BitstreamPutGT16Bits(stream, 27, VO_START_CODE);/* byte align: should be 2 bits */
+        status = BitstreamPutBits(stream, 5, 0x00);/*  Video ID = 0  */
+
+
+
+        /**********************/
+        /* VideoObjectLayer() */
+        /**********************/
+        if (currVol->shortVideoHeader == 0)
+        { /* M4V  else Short Video Header */
+            status = BitstreamPutGT16Bits(stream, VOL_START_CODE_LENGTH, VOL_START_CODE);
+            status = BitstreamPutBits(stream, 4, currVol->volID);/*  video_object_layer_id */
+            status = BitstreamPut1Bits(stream, 0x00);/*  Random Access = 0  */
+
+            if (video->currLayer == 0)
+                status = BitstreamPutBits(stream, 8, 0x01);/* Video Object Type Indication = 1  ... Simple Object Type */
+            else
+                status = BitstreamPutBits(stream, 8, 0x02);/* Video Object Type Indication = 2  ... Simple Scalable Object Type */
+
+            status = BitstreamPut1Bits(stream, 0x00);/*  is_object_layer_identifer = 0 */
+
+
+            status = BitstreamPutBits(stream, 4, 0x01); /* aspect_ratio_info = 1 ... 1:1(Square) */
+            status = BitstreamPut1Bits(stream, 0x00);/* vol_control_parameters = 0 */
+            status = BitstreamPutBits(stream, 2, 0x00);/* video_object_layer_shape = 00 ... rectangular */
+            status = BitstreamPut1Bits(stream, 0x01);/* marker bit */
+            status = BitstreamPutGT8Bits(stream, 16, currVol->timeIncrementResolution);/* vop_time_increment_resolution */
+            status = BitstreamPut1Bits(stream, 0x01);/* marker bit */
+            status = BitstreamPut1Bits(stream, currVol->fixedVopRate);/* fixed_vop_rate = 0 */
+
+            /* For Rectangular VO layer shape */
+            status = BitstreamPut1Bits(stream, 0x01);/* marker bit */
+            status = BitstreamPutGT8Bits(stream, 13, currVol->width);/* video_object_layer_width */
+            status = BitstreamPut1Bits(stream, 0x01);/* marker bit */
+            status = BitstreamPutGT8Bits(stream, 13, currVol->height);/* video_object_layer_height */
+            status = BitstreamPut1Bits(stream, 0x01);/*marker bit */
+
+            status = BitstreamPut1Bits(stream, 0x00);/*interlaced = 0 */
+            status = BitstreamPut1Bits(stream, 0x01);/* obmc_disable = 1 */
+            status = BitstreamPut1Bits(stream, 0x00);/* sprite_enable = 0 */
+            status = BitstreamPut1Bits(stream, 0x00);/* not_8_bit = 0 */
+            status = BitstreamPut1Bits(stream, currVol->quantType);/*   quant_type */
+
+            if (currVol->quantType)
+            {
+                status = BitstreamPut1Bits(stream, currVol->loadIntraQuantMat); /* Intra quant matrix */
+                if (currVol->loadIntraQuantMat)
+                {
+                    for (j = 63; j >= 1; j--)
+                        if (currVol->iqmat[*(zigzag_i+j)] != currVol->iqmat[*(zigzag_i+j-1)])
+                            break;
+                    if ((j == 1) && (currVol->iqmat[*(zigzag_i+j)] == currVol->iqmat[*(zigzag_i+j-1)]))
+                        j = 0;
+                    for (i = 0; i < j + 1; i++)
+                        BitstreamPutBits(stream, 8, currVol->iqmat[*(zigzag_i+i)]);
+                    if (j < 63)
+                        BitstreamPutBits(stream, 8, 0);
+                }
+                else
+                {
+                    for (j = 0; j < 64; j++)
+                        currVol->iqmat[j] = mpeg_iqmat_def[j];
+
+                }
+                status = BitstreamPut1Bits(stream, currVol->loadNonIntraQuantMat); /* Non-Intra quant matrix */
+                if (currVol->loadNonIntraQuantMat)
+                {
+                    for (j = 63; j >= 1; j--)
+                        if (currVol->niqmat[*(zigzag_i+j)] != currVol->niqmat[*(zigzag_i+j-1)])
+                            break;
+                    if ((j == 1) && (currVol->niqmat[*(zigzag_i+j)] == currVol->niqmat[*(zigzag_i+j-1)]))
+                        j = 0;
+                    for (i = 0; i < j + 1; i++)
+                        BitstreamPutBits(stream, 8, currVol->niqmat[*(zigzag_i+i)]);
+                    if (j < 63)
+                        BitstreamPutBits(stream, 8, 0);
+                }
+                else
+                {
+                    for (j = 0; j < 64; j++)
+                        currVol->niqmat[j] = mpeg_nqmat_def[j];
+                }
+            }
+
+            status = BitstreamPut1Bits(stream, 0x01);   /* complexity_estimation_disable = 1 */
+            status = BitstreamPut1Bits(stream, currVol->ResyncMarkerDisable);/* Resync_marker_disable */
+            status = BitstreamPut1Bits(stream, currVol->dataPartitioning);/* Data partitioned */
+
+            if (currVol->dataPartitioning)
+                status = BitstreamPut1Bits(stream, currVol->useReverseVLC); /* Reversible_vlc */
+
+
+            if (currVol->scalability) /* Scalability*/
+            {
+
+                status = BitstreamPut1Bits(stream, currVol->scalability);/* Scalability = 1 */
+                status = BitstreamPut1Bits(stream, currVol->scalType);/* hierarchy _type ... Spatial= 0 and Temporal = 1 */
+                status = BitstreamPutBits(stream, 4, currVol->refVolID);/* ref_layer_id  */
+                status = BitstreamPut1Bits(stream, currVol->refSampDir);/* ref_layer_sampling_direc*/
+                status = BitstreamPutBits(stream, 5, currVol->horSamp_n);/*hor_sampling_factor_n*/
+                status = BitstreamPutBits(stream, 5, currVol->horSamp_m);/*hor_sampling_factor_m*/
+                status = BitstreamPutBits(stream, 5, currVol->verSamp_n);/*vert_sampling_factor_n*/
+                status = BitstreamPutBits(stream, 5, currVol->verSamp_m);/*vert_sampling_factor_m*/
+                status = BitstreamPut1Bits(stream, currVol->enhancementType);/* enhancement_type*/
+            }
+            else /* No Scalability */
+                status = BitstreamPut1Bits(stream, currVol->scalability);/* Scalability = 0 */
+
+            /*temp = */
+            BitstreamMpeg4ByteAlignStuffing(stream); /* Byte align Headers for VOP */
+        }
+    }
+
+    return status;
+}
+
+/* ======================================================================== */
+/*  Function : VOS_End()                                                    */
+/*  Date     : 08/22/2000                                                   */
+/*  Purpose  : Visual Object Sequence End                                   */
+/*  In/out   :                                                              */
+/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+
+PV_STATUS VOS_End(VideoEncControls *encoderControl)
+{
+    PV_STATUS status = PV_SUCCESS;
+    VideoEncData *video = (VideoEncData *)encoderControl->videoEncoderData;
+    Vol         *currVol = video->vol[video->currLayer];
+    BitstreamEncVideo *stream = currVol->stream;
+
+
+    status = BitstreamPutBits(stream, SESSION_END_CODE, 32);
+
+    return status;
+}
+
+/* ======================================================================== */
+/*  Function : DetermineCodingLayer                                         */
+/*  Date     : 06/02/2001                                                   */
+/*  Purpose  : Find layer to code based on current mod time, assuming that
+               it's time to encode enhanced layer.                          */
+/*  In/out   :                                                              */
+/*  Return   : Number of layer to code.                                     */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+
+Int DetermineCodingLayer(VideoEncData *video, Int *nLayer, ULong modTime)
+{
+    Vol **vol = video->vol;
+    VideoEncParams *encParams = video->encParams;
+    Int numLayers = encParams->nLayers;
+    UInt modTimeRef = video->modTimeRef;
+    float *LayerFrameRate = encParams->LayerFrameRate;
+    UInt frameNum[4], frameTick;
+    ULong frameModTime, nextFrmModTime;
+#ifdef REDUCE_FRAME_VARIANCE    /* To limit how close 2 frames can be */
+    float frameInterval;
+#endif
+    float srcFrameInterval;
+    Int frameInc;
+    Int i, extra_skip;
+    Int encodeVop = 0;
+
+    i = numLayers - 1;
+
+    if (modTime - video->nextModTime > ((ULong)(-1)) >> 1) /* next time wrapped around */
+        return 0; /* not time to code it yet */
+
+    video->relLayerCodeTime[i] -= 1000;
+    video->nextEncIVop--;  /* number of Vops in highest layer resolution. */
+    video->numVopsInGOP++;
+
+    /* from this point frameModTime and nextFrmModTime are internal */
+
+    frameNum[i] = (UInt)((modTime - modTimeRef) * LayerFrameRate[i] + 500) / 1000;
+    if (video->volInitialize[i])
+    {
+        video->prevFrameNum[i] = frameNum[i] - 1;
+    }
+    else if (frameNum[i] <= video->prevFrameNum[i])
+    {
+        return 0; /* do not encode this frame */
+    }
+
+    /**** this part computes expected next frame *******/
+    frameModTime = (ULong)(((frameNum[i] * 1000) / LayerFrameRate[i]) + modTimeRef + 0.5); /* rec. time */
+    nextFrmModTime = (ULong)((((frameNum[i] + 1) * 1000) / LayerFrameRate[i]) + modTimeRef + 0.5); /* rec. time */
+
+    srcFrameInterval = 1000 / video->FrameRate;
+
+    video->nextModTime = nextFrmModTime - (ULong)(srcFrameInterval / 2.) - 1; /* between current and next frame */
+
+#ifdef REDUCE_FRAME_VARIANCE    /* To limit how close 2 frames can be */
+    frameInterval = 1000 / LayerFrameRate[i]; /* next rec. time */
+    delta = (Int)(frameInterval / 4); /* empirical number */
+    if (video->nextModTime - modTime  < (ULong)delta) /* need to move nextModTime further. */
+    {
+        video->nextModTime += ((delta - video->nextModTime + modTime)); /* empirical formula  */
+    }
+#endif
+    /****************************************************/
+
+    /* map frame no.to tick from modTimeRef */
+    /*frameTick = (frameNum[i]*vol[i]->timeIncrementResolution) ;
+    frameTick = (UInt)((frameTick + (encParams->LayerFrameRate[i]/2))/encParams->LayerFrameRate[i]);*/
+    /*  11/16/01, change frameTick to be the closest tick from the actual modTime */
+    /*  12/12/02, add (double) to prevent large number wrap-around */
+    frameTick = (Int)(((double)(modTime - modTimeRef) * vol[i]->timeIncrementResolution + 500) / 1000);
+
+    /* find timeIncrement to be put in the bitstream */
+    /* refTick is second boundary reference. */
+    vol[i]->timeIncrement = frameTick - video->refTick[i];
+
+
+    vol[i]->moduloTimeBase = 0;
+    while (vol[i]->timeIncrement >= vol[i]->timeIncrementResolution)
+    {
+        vol[i]->timeIncrement -= vol[i]->timeIncrementResolution;
+        vol[i]->moduloTimeBase++;
+        /* do not update refTick and modTimeRef yet, do it after encoding!! */
+    }
+
+    if (video->relLayerCodeTime[i] <= 0)    /* no skipping */
+    {
+        encodeVop = 1;
+        video->currLayer = *nLayer = i;
+        video->relLayerCodeTime[i] += 1000;
+
+        /* takes care of more dropped frame than expected */
+        extra_skip = -1;
+        frameInc = (frameNum[i] - video->prevFrameNum[i]);
+        extra_skip += frameInc;
+
+        if (extra_skip > 0)
+        {   /* update rc->Nr, rc->B, (rc->Rr)*/
+            video->nextEncIVop -= extra_skip;
+            video->numVopsInGOP += extra_skip;
+            if (encParams->RC_Type != CONSTANT_Q)
+            {
+                RC_UpdateBuffer(video, i, extra_skip);
+            }
+        }
+
+    }
+    /* update frame no. */
+    video->prevFrameNum[i] = frameNum[i];
+
+    /* go through all lower layer */
+    for (i = (numLayers - 2); i >= 0; i--)
+    {
+
+        video->relLayerCodeTime[i] -= 1000;
+
+        /* find timeIncrement to be put in the bitstream */
+        vol[i]->timeIncrement = frameTick - video->refTick[i];
+
+        if (video->relLayerCodeTime[i] <= 0) /* time to encode base */
+        {
+            /* 12/27/00 */
+            encodeVop = 1;
+            video->currLayer = *nLayer = i;
+            video->relLayerCodeTime[i] +=
+                (Int)((1000.0 * encParams->LayerFrameRate[numLayers-1]) / encParams->LayerFrameRate[i]);
+
+            vol[i]->moduloTimeBase = 0;
+            while (vol[i]->timeIncrement >= vol[i]->timeIncrementResolution)
+            {
+                vol[i]->timeIncrement -= vol[i]->timeIncrementResolution;
+                vol[i]->moduloTimeBase++;
+                /* do not update refTick and modTimeRef yet, do it after encoding!! */
+            }
+
+            /* takes care of more dropped frame than expected */
+            frameNum[i] = (UInt)((frameModTime - modTimeRef) * encParams->LayerFrameRate[i] + 500) / 1000;
+            if (video->volInitialize[i])
+                video->prevFrameNum[i] = frameNum[i] - 1;
+
+            extra_skip = -1;
+            frameInc = (frameNum[i] - video->prevFrameNum[i]);
+            extra_skip += frameInc;
+
+            if (extra_skip > 0)
+            {   /* update rc->Nr, rc->B, (rc->Rr)*/
+                if (encParams->RC_Type != CONSTANT_Q)
+                {
+                    RC_UpdateBuffer(video, i, extra_skip);
+                }
+            }
+            /* update frame no. */
+            video->prevFrameNum[i] = frameNum[i];
+        }
+    }
+
+#ifdef _PRINT_STAT
+    if (encodeVop)
+        printf(" TI: %d ", vol[*nLayer]->timeIncrement);
+#endif
+
+    return encodeVop;
+}
+
+/* ======================================================================== */
+/*  Function : DetermineVopType                                             */
+/*  Date     : 06/02/2001                                                   */
+/*  Purpose  : The name says it all.                                        */
+/*  In/out   :                                                              */
+/*  Return   : void .                                                       */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+
+void DetermineVopType(VideoEncData *video, Int currLayer)
+{
+    VideoEncParams *encParams = video->encParams;
+//  Vol *currVol = video->vol[currLayer];
+
+    if (encParams->IntraPeriod == 0) /* I-VOPs only */
+    {
+        if (video->currLayer > 0)
+            video->currVop->predictionType = P_VOP;
+        else
+        {
+            video->currVop->predictionType = I_VOP;
+            if (video->numVopsInGOP >= 132)
+                video->numVopsInGOP = 0;
+        }
+    }
+    else if (encParams->IntraPeriod == -1)  /* IPPPPP... */
+    {
+
+        /* maintain frame type if previous frame is pre-skipped, 06/02/2001 */
+        if (encParams->RC_Type == CONSTANT_Q || video->rc[currLayer]->skip_next_frame != -1)
+            video->currVop->predictionType = P_VOP;
+
+        if (video->currLayer == 0)
+        {
+            if (/*video->numVopsInGOP>=132 || */video->volInitialize[currLayer])
+            {
+                video->currVop->predictionType = I_VOP;
+                video->numVopsInGOP = 0; /* force INTRA update every 132 base frames*/
+                video->nextEncIVop = 1;
+            }
+            else if (video->nextEncIVop == 0 || video->currVop->predictionType == I_VOP)
+            {
+                video->numVopsInGOP = 0;
+                video->nextEncIVop = 1;
+            }
+        }
+    }
+    else   /* IntraPeriod>0 : IPPPPPIPPPPPI... */
+    {
+
+        /* maintain frame type if previous frame is pre-skipped, 06/02/2001 */
+        if (encParams->RC_Type == CONSTANT_Q || video->rc[currLayer]->skip_next_frame != -1)
+            video->currVop->predictionType = P_VOP;
+
+        if (currLayer == 0)
+        {
+            if (video->nextEncIVop <= 0 || video->currVop->predictionType == I_VOP)
+            {
+                video->nextEncIVop = encParams->IntraPeriod;
+                video->currVop->predictionType = I_VOP;
+                video->numVopsInGOP = 0;
+            }
+        }
+    }
+
+    return ;
+}
+
+/* ======================================================================== */
+/*  Function : UpdateSkipNextFrame                                          */
+/*  Date     : 06/02/2001                                                   */
+/*  Purpose  : From rate control frame skipping decision, update timing
+                related parameters.                                         */
+/*  In/out   :                                                              */
+/*  Return   : Current coded layer.                                         */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+
+Int UpdateSkipNextFrame(VideoEncData *video, ULong *modTime, Int *size, PV_STATUS status)
+{
+    Int currLayer = video->currLayer;
+    Int nLayer = currLayer;
+    VideoEncParams *encParams = video->encParams;
+    Int numLayers = encParams->nLayers;
+    Vol *currVol = video->vol[currLayer];
+    Vol **vol = video->vol;
+    Int num_skip, extra_skip;
+    Int i;
+    UInt newRefTick, deltaModTime;
+    UInt temp;
+
+    if (encParams->RC_Type != CONSTANT_Q)
+    {
+        if (video->volInitialize[0] && currLayer == 0)  /* always encode the first frame */
+        {
+            RC_ResetSkipNextFrame(video, currLayer);
+            //return currLayer;  09/15/05
+        }
+        else
+        {
+            if (RC_GetSkipNextFrame(video, currLayer) < 0 || status == PV_END_OF_BUF)   /* Skip Current Frame */
+            {
+
+#ifdef _PRINT_STAT
+                printf("Skip current frame");
+#endif
+                currVol->moduloTimeBase = currVol->prevModuloTimeBase;
+
+                /*********************/
+                /* prepare to return */
+                /*********************/
+                *size = 0;  /* Set Bitstream buffer to zero */
+
+                /* Determine nLayer and modTime for next encode */
+
+                *modTime = video->nextModTime;
+                nLayer = -1;
+
+                return nLayer; /* return immediately without updating RefTick & modTimeRef */
+                /* If I-VOP was attempted, then ensure next base is I-VOP */
+                /*if((encParams->IntraPeriod>0) && (video->currVop->predictionType == I_VOP))
+                video->nextEncIVop = 0; commented out by 06/05/01 */
+
+            }
+            else if ((num_skip = RC_GetSkipNextFrame(video, currLayer)) > 0)
+            {
+
+#ifdef _PRINT_STAT
+                printf("Skip next %d frames", num_skip);
+#endif
+                /* to keep the Nr of enh layer the same */
+                /* adjust relLayerCodeTime only, do not adjust layerCodeTime[numLayers-1] */
+                extra_skip = 0;
+                for (i = 0; i < currLayer; i++)
+                {
+                    if (video->relLayerCodeTime[i] <= 1000)
+                    {
+                        extra_skip = 1;
+                        break;
+                    }
+                }
+
+                for (i = currLayer; i < numLayers; i++)
+                {
+                    video->relLayerCodeTime[i] += (num_skip + extra_skip) *
+                                                  ((Int)((1000.0 * encParams->LayerFrameRate[numLayers-1]) / encParams->LayerFrameRate[i]));
+                }
+            }
+        }/* first frame */
+    }
+    /*****  current frame is encoded, now update refTick ******/
+
+    video->refTick[currLayer] += vol[currLayer]->prevModuloTimeBase * vol[currLayer]->timeIncrementResolution;
+
+    /* Reset layerCodeTime every I-VOP to prevent overflow */
+    if (currLayer == 0)
+    {
+        /*  12/12/02, fix for weird targer frame rate of 9.99 fps or 3.33 fps */
+        if (((encParams->IntraPeriod != 0) /*&& (video->currVop->predictionType==I_VOP)*/) ||
+                ((encParams->IntraPeriod == 0) && (video->numVopsInGOP == 0)))
+        {
+            newRefTick = video->refTick[0];
+
+            for (i = 1; i < numLayers; i++)
+            {
+                if (video->refTick[i] < newRefTick)
+                    newRefTick = video->refTick[i];
+            }
+
+            /* check to make sure that the update is integer multiple of frame number */
+            /* how many msec elapsed from last modTimeRef */
+            deltaModTime = (newRefTick / vol[0]->timeIncrementResolution) * 1000;
+
+            for (i = numLayers - 1; i >= 0; i--)
+            {
+                temp = (UInt)(deltaModTime * encParams->LayerFrameRate[i]); /* 12/12/02 */
+                if (temp % 1000)
+                    newRefTick = 0;
+
+            }
+            if (newRefTick > 0)
+            {
+                video->modTimeRef += deltaModTime;
+                for (i = numLayers - 1; i >= 0; i--)
+                {
+                    video->prevFrameNum[i] -= (UInt)(deltaModTime * encParams->LayerFrameRate[i]) / 1000;
+                    video->refTick[i] -= newRefTick;
+                }
+            }
+        }
+    }
+
+    *modTime =  video->nextModTime;
+
+    return nLayer;
+}
+
+
+#ifndef ORIGINAL_VERSION
+
+/* ======================================================================== */
+/*  Function : SetProfile_BufferSize                                        */
+/*  Date     : 04/08/2002                                                   */
+/*  Purpose  : Set profile and video buffer size, copied from Jim's code    */
+/*             in PVInitVideoEncoder(.), since we have different places     */
+/*             to reset profile and video buffer size                       */
+/*  In/out   :                                                              */
+/*  Return   :                                                              */
+/*  Modified :                                                              */
+/*                                                                          */
+/* ======================================================================== */
+
+Bool SetProfile_BufferSize(VideoEncData *video, float delay, Int bInitialized)
+{
+    Int i, j, start, end;
+//  Int BaseMBsPerSec = 0, EnhMBsPerSec = 0;
+    Int nTotalMB = 0;
+    Int idx, temp_w, temp_h, max = 0, max_width, max_height;
+
+    Int nLayers = video->encParams->nLayers; /* Number of Layers to be encoded */
+
+    Int total_bitrate = 0, base_bitrate;
+    Int total_packet_size = 0, base_packet_size;
+    Int total_MBsPerSec = 0, base_MBsPerSec;
+    Int total_VBV_size = 0, base_VBV_size, enhance_VBV_size = 0;
+    float total_framerate, base_framerate;
+    float upper_bound_ratio;
+    Int bFound = 0;
+    Int k = 0, width16, height16, index;
+    Int lowest_level;
+
+#define MIN_BUFF    16000 /* 16k minimum buffer size */
+#define BUFF_CONST  2.0    /* 2000ms */
+#define UPPER_BOUND_RATIO 8.54 /* upper_bound = 1.4*(1.1+bound/10)*bitrate/framerate */
+
+#define QCIF_WIDTH  176
+#define QCIF_HEIGHT 144
+
+    index = video->encParams->profile_table_index;
+
+    /* Calculate "nTotalMB" */
+    /* Find the maximum width*height for memory allocation of the VOPs */
+    for (idx = 0; idx < nLayers; idx++)
+    {
+        temp_w = video->encParams->LayerWidth[idx];
+        temp_h = video->encParams->LayerHeight[idx];
+
+        if ((temp_w*temp_h) > max)
+        {
+            max = temp_w * temp_h;
+            max_width = temp_w;
+            max_height = temp_h;
+            nTotalMB = ((max_width + 15) >> 4) * ((max_height + 15) >> 4);
+        }
+    }
+    upper_bound_ratio = (video->encParams->RC_Type == CBR_LOWDELAY ? (float)5.0 : (float)UPPER_BOUND_RATIO);
+
+
+    /* Get the basic information: bitrate, packet_size, MBs/s and VBV_size */
+    base_bitrate        = video->encParams->LayerBitRate[0];
+    if (video->encParams->LayerMaxBitRate[0] != 0) /* video->encParams->LayerMaxBitRate[0] == 0 means it has not been set */
+    {
+        base_bitrate    = PV_MAX(base_bitrate, video->encParams->LayerMaxBitRate[0]);
+    }
+    else /* if the max is not set, set it to the specified profile/level */
+    {
+        video->encParams->LayerMaxBitRate[0] = profile_level_max_bitrate[index];
+    }
+
+    base_framerate      = video->encParams->LayerFrameRate[0];
+    if (video->encParams->LayerMaxFrameRate[0] != 0)
+    {
+        base_framerate  = PV_MAX(base_framerate, video->encParams->LayerMaxFrameRate[0]);
+    }
+    else /* if the max is not set, set it to the specified profile/level */
+    {
+        video->encParams->LayerMaxFrameRate[0] = (float)profile_level_max_mbsPerSec[index] / nTotalMB;
+    }
+
+    base_packet_size    = video->encParams->ResyncPacketsize;
+    base_MBsPerSec      = (Int)(base_framerate * nTotalMB);
+    base_VBV_size       = PV_MAX((Int)(base_bitrate * delay),
+                                 (Int)(upper_bound_ratio * base_bitrate / base_framerate));
+    base_VBV_size       = PV_MAX(base_VBV_size, MIN_BUFF);
+
+    /* if the buffer is larger than maximum buffer size, we'll clip it */
+    if (base_VBV_size > profile_level_max_VBV_size[5])
+        base_VBV_size = profile_level_max_VBV_size[5];
+
+
+    /* Check if the buffer exceeds the maximum buffer size given the maximum profile and level */
+    if (nLayers == 1 && base_VBV_size > profile_level_max_VBV_size[index])
+        return FALSE;
+
+
+    if (nLayers == 2)
+    {
+        total_bitrate       = video->encParams->LayerBitRate[1];
+        if (video->encParams->LayerMaxBitRate[1] != 0)
+        {
+            total_bitrate   = PV_MIN(total_bitrate, video->encParams->LayerMaxBitRate[1]);
+        }
+        else /* if the max is not set, set it to the specified profile/level */
+        {
+            video->encParams->LayerMaxBitRate[1] = scalable_profile_level_max_bitrate[index];
+        }
+
+        total_framerate     = video->encParams->LayerFrameRate[1];
+        if (video->encParams->LayerMaxFrameRate[1] != 0)
+        {
+            total_framerate     = PV_MIN(total_framerate, video->encParams->LayerMaxFrameRate[1]);
+        }
+        else /* if the max is not set, set it to the specified profile/level */
+        {
+            video->encParams->LayerMaxFrameRate[1] = (float)scalable_profile_level_max_mbsPerSec[index] / nTotalMB;
+        }
+
+        total_packet_size   = video->encParams->ResyncPacketsize;
+        total_MBsPerSec     = (Int)(total_framerate * nTotalMB);
+
+        enhance_VBV_size    = PV_MAX((Int)((total_bitrate - base_bitrate) * delay),
+                                     (Int)(upper_bound_ratio * (total_bitrate - base_bitrate) / (total_framerate - base_framerate)));
+        enhance_VBV_size    = PV_MAX(enhance_VBV_size, MIN_BUFF);
+
+        total_VBV_size      = base_VBV_size + enhance_VBV_size;
+
+        /* if the buffer is larger than maximum buffer size, we'll clip it */
+        if (total_VBV_size > scalable_profile_level_max_VBV_size[6])
+        {
+            total_VBV_size = scalable_profile_level_max_VBV_size[6];
+            enhance_VBV_size = total_VBV_size - base_VBV_size;
+        }
+
+        /* Check if the buffer exceeds the maximum buffer size given the maximum profile and level */
+        if (total_VBV_size > scalable_profile_level_max_VBV_size[index])
+            return FALSE;
+    }
+
+
+    if (!bInitialized) /* Has been initialized --> profile @ level has been figured out! */
+    {
+        video->encParams->BufferSize[0] = base_VBV_size;
+        if (nLayers > 1)
+            video->encParams->BufferSize[1] = enhance_VBV_size;
+
+        return PV_TRUE;
+    }
+
+
+    /* Profile @ level determination */
+    if (nLayers == 1)
+    {
+        /* BASE ONLY : Simple Profile(SP) Or Core Profile(CP) */
+        if (base_bitrate     > profile_level_max_bitrate[index]     ||
+                base_packet_size > profile_level_max_packet_size[index] ||
+                base_MBsPerSec   > profile_level_max_mbsPerSec[index]   ||
+                base_VBV_size    > profile_level_max_VBV_size[index])
+
+            return PV_FALSE; /* Beyond the bound of Core Profile @ Level2 */
+
+        /* For H263/Short header, determine k*16384 */
+        width16  = ((video->encParams->LayerWidth[0] + 15) >> 4) << 4;
+        height16 = ((video->encParams->LayerHeight[0] + 15) >> 4) << 4;
+        if (video->encParams->H263_Enabled)
+        {
+            k = 4;
+            if (width16  == 2*QCIF_WIDTH && height16 == 2*QCIF_HEIGHT)  /* CIF */
+                k = 16;
+
+            else if (width16  == 4*QCIF_WIDTH && height16 == 4*QCIF_HEIGHT)  /* 4CIF */
+                k = 32;
+
+            else if (width16  == 8*QCIF_WIDTH && height16 == 8*QCIF_HEIGHT)  /* 16CIF */
+                k = 64;
+
+            video->encParams->maxFrameSize  = k * 16384;
+
+            /* Make sure the buffer size is limited to the top profile and level: the Core profile and level 2 */
+            if (base_VBV_size > (Int)(k*16384 + 4*(float)profile_level_max_bitrate[5]*1001.0 / 30000.0))
+                base_VBV_size = (Int)(k * 16384 + 4 * (float)profile_level_max_bitrate[5] * 1001.0 / 30000.0);
+
+            if (base_VBV_size > (Int)(k*16384 + 4*(float)profile_level_max_bitrate[index]*1001.0 / 30000.0))
+                return PV_FALSE;
+        }
+
+        /* Search the appropriate profile@level index */
+        if (!video->encParams->H263_Enabled &&
+                (video->encParams->IntraDCVlcThr != 0 || video->encParams->SearchRange > 16))
+        {
+            lowest_level = 1; /* cannot allow SPL0 */
+        }
+        else
+        {
+            lowest_level = 0; /* SPL0 */
+        }
+
+        for (i = lowest_level; i <= index; i++)
+        {
+            if (i != 4 && /* skip Core Profile@Level1 because the parameters in it are smaller than those in Simple Profile@Level3 */
+                    base_bitrate     <= profile_level_max_bitrate[i]     &&
+                    base_packet_size <= profile_level_max_packet_size[i] &&
+                    base_MBsPerSec   <= profile_level_max_mbsPerSec[i]   &&
+                    base_VBV_size    <= (video->encParams->H263_Enabled ? (Int)(k*16384 + 4*(float)profile_level_max_bitrate[i]*1001.0 / 30000.0) :
+                                         profile_level_max_VBV_size[i]))
+                break;
+        }
+        if (i > index) return PV_FALSE; /* Nothing found!! */
+
+        /* Found out the actual profile @ level : index "i" */
+        if (i == 0)
+        {
+            /* For Simple Profile @ Level 0, we need to do one more check: image size <= QCIF */
+            if (width16 > QCIF_WIDTH || height16 > QCIF_HEIGHT)
+                i = 1; /* image size > QCIF, then set SP level1 */
+        }
+
+        video->encParams->ProfileLevel[0] = profile_level_code[i];
+        video->encParams->BufferSize[0]   = base_VBV_size;
+
+        if (video->encParams->LayerMaxBitRate[0] == 0)
+            video->encParams->LayerMaxBitRate[0] = profile_level_max_bitrate[i];
+
+        if (video->encParams->LayerMaxFrameRate[0] == 0)
+            video->encParams->LayerMaxFrameRate[0] = PV_MIN(30, (float)profile_level_max_mbsPerSec[i] / nTotalMB);
+
+        /* For H263/Short header, one special constraint for VBV buffer size */
+        if (video->encParams->H263_Enabled)
+            video->encParams->BufferSize[0] = (Int)(k * 16384 + 4 * (float)profile_level_max_bitrate[i] * 1001.0 / 30000.0);
+
+    }
+    else
+    {
+        /* SCALABALE MODE: Simple Scalable Profile(SSP) Or Core Scalable Profile(CSP) */
+
+        if (total_bitrate       > scalable_profile_level_max_bitrate[index]     ||
+                total_packet_size   > scalable_profile_level_max_packet_size[index] ||
+                total_MBsPerSec     > scalable_profile_level_max_mbsPerSec[index]   ||
+                total_VBV_size      > scalable_profile_level_max_VBV_size[index])
+
+            return PV_FALSE; /* Beyond given profile and level */
+
+        /* One-time check: Simple Scalable Profile or Core Scalable Profile */
+        if (total_bitrate       <= scalable_profile_level_max_bitrate[2]        &&
+                total_packet_size   <= scalable_profile_level_max_packet_size[2]    &&
+                total_MBsPerSec     <= scalable_profile_level_max_mbsPerSec[2]      &&
+                total_VBV_size      <= scalable_profile_level_max_VBV_size[2])
+
+        {
+            start = 0;
+            end = index;
+        }
+
+        else
+        {
+            start = 4;
+            end = index;
+        }
+
+
+        /* Search the scalable profile */
+        for (i = start; i <= end; i++)
+        {
+            if (total_bitrate       <= scalable_profile_level_max_bitrate[i]     &&
+                    total_packet_size   <= scalable_profile_level_max_packet_size[i] &&
+                    total_MBsPerSec     <= scalable_profile_level_max_mbsPerSec[i]   &&
+                    total_VBV_size      <= scalable_profile_level_max_VBV_size[i])
+
+                break;
+        }
+        if (i > end) return PV_FALSE;
+
+        /* Search the base profile */
+        if (i == 0)
+        {
+            j = 0;
+            bFound = 1;
+        }
+        else        bFound = 0;
+
+        for (j = start; !bFound && j <= i; j++)
+        {
+            if (base_bitrate        <= profile_level_max_bitrate[j]      &&
+                    base_packet_size    <= profile_level_max_packet_size[j]  &&
+                    base_MBsPerSec      <= profile_level_max_mbsPerSec[j]    &&
+                    base_VBV_size       <= profile_level_max_VBV_size[j])
+
+            {
+                bFound = 1;
+                break;
+            }
+        }
+
+        if (!bFound) // && start == 4)
+            return PV_FALSE; /* mis-match in the profiles between base layer and enhancement layer */
+
+        /* j for base layer, i for enhancement layer */
+        video->encParams->ProfileLevel[0] = profile_level_code[j];
+        video->encParams->ProfileLevel[1] = scalable_profile_level_code[i];
+        video->encParams->BufferSize[0]   = base_VBV_size;
+        video->encParams->BufferSize[1]   = enhance_VBV_size;
+
+        if (video->encParams->LayerMaxBitRate[0] == 0)
+            video->encParams->LayerMaxBitRate[0] = profile_level_max_bitrate[j];
+
+        if (video->encParams->LayerMaxBitRate[1] == 0)
+            video->encParams->LayerMaxBitRate[1] = scalable_profile_level_max_bitrate[i];
+
+        if (video->encParams->LayerMaxFrameRate[0] == 0)
+            video->encParams->LayerMaxFrameRate[0] = PV_MIN(30, (float)profile_level_max_mbsPerSec[j] / nTotalMB);
+
+        if (video->encParams->LayerMaxFrameRate[1] == 0)
+            video->encParams->LayerMaxFrameRate[1] = PV_MIN(30, (float)scalable_profile_level_max_mbsPerSec[i] / nTotalMB);
+
+
+    } /* end of: if(nLayers == 1) */
+
+
+    if (!video->encParams->H263_Enabled && (video->encParams->ProfileLevel[0] == 0x08)) /* SPL0 restriction*/
+    {
+        /* PV only allow frame-based rate control, no QP change from one MB to another
+        if(video->encParams->ACDCPrediction == TRUE && MB-based rate control)
+         return PV_FALSE */
+    }
+
+    return PV_TRUE;
+}
+
+#endif /* #ifndef ORIGINAL_VERSION */
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/mp4enc_lib.h b/media/codecs/m4v_h263/enc/src/mp4enc_lib.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/mp4enc_lib.h
rename to media/codecs/m4v_h263/enc/src/mp4enc_lib.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/mp4lib_int.h b/media/codecs/m4v_h263/enc/src/mp4lib_int.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/mp4lib_int.h
rename to media/codecs/m4v_h263/enc/src/mp4lib_int.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/rate_control.cpp b/media/codecs/m4v_h263/enc/src/rate_control.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/rate_control.cpp
rename to media/codecs/m4v_h263/enc/src/rate_control.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/rate_control.h b/media/codecs/m4v_h263/enc/src/rate_control.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/rate_control.h
rename to media/codecs/m4v_h263/enc/src/rate_control.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/sad.cpp b/media/codecs/m4v_h263/enc/src/sad.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/sad.cpp
rename to media/codecs/m4v_h263/enc/src/sad.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/sad_halfpel.cpp b/media/codecs/m4v_h263/enc/src/sad_halfpel.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/sad_halfpel.cpp
rename to media/codecs/m4v_h263/enc/src/sad_halfpel.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/sad_halfpel_inline.h b/media/codecs/m4v_h263/enc/src/sad_halfpel_inline.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/sad_halfpel_inline.h
rename to media/codecs/m4v_h263/enc/src/sad_halfpel_inline.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/sad_inline.h b/media/codecs/m4v_h263/enc/src/sad_inline.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/sad_inline.h
rename to media/codecs/m4v_h263/enc/src/sad_inline.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/sad_mb_offset.h b/media/codecs/m4v_h263/enc/src/sad_mb_offset.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/sad_mb_offset.h
rename to media/codecs/m4v_h263/enc/src/sad_mb_offset.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/vlc_enc_tab.h b/media/codecs/m4v_h263/enc/src/vlc_enc_tab.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/vlc_enc_tab.h
rename to media/codecs/m4v_h263/enc/src/vlc_enc_tab.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/vlc_encode.cpp b/media/codecs/m4v_h263/enc/src/vlc_encode.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/vlc_encode.cpp
rename to media/codecs/m4v_h263/enc/src/vlc_encode.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/vlc_encode.h b/media/codecs/m4v_h263/enc/src/vlc_encode.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/vlc_encode.h
rename to media/codecs/m4v_h263/enc/src/vlc_encode.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/vlc_encode_inline.h b/media/codecs/m4v_h263/enc/src/vlc_encode_inline.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/vlc_encode_inline.h
rename to media/codecs/m4v_h263/enc/src/vlc_encode_inline.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/vop.cpp b/media/codecs/m4v_h263/enc/src/vop.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/src/vop.cpp
rename to media/codecs/m4v_h263/enc/src/vop.cpp
diff --git a/media/codecs/m4v_h263/enc/test/Android.bp b/media/codecs/m4v_h263/enc/test/Android.bp
new file mode 100644
index 0000000..2b5e49c
--- /dev/null
+++ b/media/codecs/m4v_h263/enc/test/Android.bp
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_codecs_m4v_h263_enc_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    //   SPDX-license-identifier-BSD
+    default_applicable_licenses: [
+        "frameworks_av_media_codecs_m4v_h263_enc_license",
+    ],
+}
+
+cc_test {
+    name: "Mpeg4H263EncoderTest",
+    gtest: true,
+    test_suites: ["device-tests"],
+
+    srcs : [ "Mpeg4H263EncoderTest.cpp" ],
+
+    shared_libs: [
+        "libutils",
+        "liblog",
+    ],
+
+    static_libs: [
+        "libstagefright_m4vh263enc",
+    ],
+
+    cflags: [
+        "-DOSCL_IMPORT_REF=",
+        "-Wall",
+        "-Werror",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "signed-integer-overflow",
+            "unsigned-integer-overflow",
+        ],
+        cfi: true,
+    },
+}
diff --git a/media/libstagefright/codecs/m4v_h263/enc/test/AndroidTest.xml b/media/codecs/m4v_h263/enc/test/AndroidTest.xml
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/test/AndroidTest.xml
rename to media/codecs/m4v_h263/enc/test/AndroidTest.xml
diff --git a/media/libstagefright/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTest.cpp b/media/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTest.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTest.cpp
rename to media/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTest.cpp
diff --git a/media/libstagefright/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTestEnvironment.h b/media/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTestEnvironment.h
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTestEnvironment.h
rename to media/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTestEnvironment.h
diff --git a/media/libstagefright/codecs/m4v_h263/enc/test/README.md b/media/codecs/m4v_h263/enc/test/README.md
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/test/README.md
rename to media/codecs/m4v_h263/enc/test/README.md
diff --git a/media/libstagefright/codecs/m4v_h263/enc/test/m4v_h263_enc_test.cpp b/media/codecs/m4v_h263/enc/test/m4v_h263_enc_test.cpp
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/enc/test/m4v_h263_enc_test.cpp
rename to media/codecs/m4v_h263/enc/test/m4v_h263_enc_test.cpp
diff --git a/media/codecs/m4v_h263/fuzzer/Android.bp b/media/codecs/m4v_h263/fuzzer/Android.bp
new file mode 100644
index 0000000..a052c11
--- /dev/null
+++ b/media/codecs/m4v_h263/fuzzer/Android.bp
@@ -0,0 +1,120 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+    name: "mpeg4_h263_dec_fuzz_defaults",
+
+    host_supported: true,
+
+    srcs: [
+        "mpeg4_h263_dec_fuzzer.cpp",
+    ],
+
+    static_libs: [
+        "libstagefright_m4vh263dec",
+        "liblog",
+    ],
+
+    cflags: [
+        "-DOSCL_IMPORT_REF=",
+    ],
+
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_fuzz {
+    name: "mpeg4_dec_fuzzer",
+
+    defaults: [
+        "mpeg4_h263_dec_fuzz_defaults",
+    ],
+
+    cflags: [
+        "-DMPEG4",
+    ],
+}
+
+cc_fuzz {
+    name: "h263_dec_fuzzer",
+
+    defaults: [
+        "mpeg4_h263_dec_fuzz_defaults",
+    ],
+}
+
+cc_defaults {
+    name: "mpeg4_h263_enc_fuzz_defaults",
+
+    host_supported: true,
+
+    srcs: ["mpeg4_h263_enc_fuzzer.cpp"],
+
+    shared_libs: [
+        "libutils",
+        "liblog",
+    ],
+
+    static_libs: [
+        "libstagefright_m4vh263enc",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_fuzz {
+    name: "mpeg4_enc_fuzzer",
+
+    defaults: [
+        "mpeg4_h263_enc_fuzz_defaults",
+    ],
+
+    cflags: ["-DMPEG4"],
+}
+
+cc_fuzz {
+    name: "h263_enc_fuzzer",
+
+    defaults: [
+        "mpeg4_h263_enc_fuzz_defaults",
+    ],
+}
diff --git a/media/codecs/m4v_h263/fuzzer/README.md b/media/codecs/m4v_h263/fuzzer/README.md
new file mode 100644
index 0000000..ad4ff97
--- /dev/null
+++ b/media/codecs/m4v_h263/fuzzer/README.md
@@ -0,0 +1,158 @@
+# Fuzzer for libstagefright_m4vh263dec decoder
+
+## Plugin Design Considerations
+The fuzzer plugin for MPEG4/H263 is designed based on the understanding of the
+codec and tries to achieve the following:
+
+##### Maximize code coverage
+Dict files (dictionary files) are created for MPEG4 and H263 to ensure that the required start
+bytes are present in every input file that goes to the fuzzer.
+This ensures that decoder does not reject any input file in the first check
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the codec using a loop.
+ * If the decode operation was successful, the input is advanced by the number of bytes consumed
+   in the decode call.
+ * If the decode operation was un-successful, the input is advanced by 1 byte so that the fuzzer
+   can proceed to feed the next frame.
+
+This ensures that the plugin tolerates any kind of input (empty, huge, malformed, etc)
+and doesnt `exit()` on any input and thereby increasing the chance of identifying vulnerabilities.
+
+##### Other considerations
+ * Two fuzzer binaries - mpeg4_dec_fuzzer and h263_dec_fuzzer are generated based on the presence
+   of a flag - 'MPEG4'
+ * The number of decode calls are kept to a maximum of 100 so that the fuzzer does not timeout.
+
+## Build
+
+This describes steps to build mpeg4_dec_fuzzer and h263_dec_fuzzer binary.
+
+### Android
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) mpeg4_dec_fuzzer
+  $ mm -j$(nproc) h263_dec_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some MPEG4 or H263 files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mpeg4_dec_fuzzer/mpeg4_dec_fuzzer CORPUS_DIR
+  $ adb shell /data/fuzz/arm64/h263_dec_fuzzer/h263_dec_fuzzer CORPUS_DIR
+```
+To run on host
+```
+  $ $ANDROID_HOST_OUT/fuzz/x86_64/mpeg4_dec_fuzzer/mpeg4_dec_fuzzer CORPUS_DIR
+  $ $ANDROID_HOST_OUT/fuzz/x86_64/h263_dec_fuzzer/h263_dec_fuzzer CORPUS_DIR
+```
+
+# Fuzzer for libstagefright_m4vh263enc encoder
+
+## Plugin Design Considerations
+The fuzzer plugin for MPEG4/H263 is designed based on the understanding of the
+codec and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+MPEG4/H263 supports the following parameters:
+1. Frame Width (parameter name: `encWidth`)
+2. Frame Height (parameter name: `encHeight`)
+3. Rate control mode (parameter name: `rcType`)
+4. Number of bytes per packet (parameter name: `packetSize`)
+5. Qp for I-Vop(parameter name: `iQuant`)
+6. Qp for P-Vop (parameter name: `pQuant`)
+7. Enable RVLC mode (parameter name: `rvlcEnable`)
+8. Quantization mode (parameter name: `quantType`)
+9. Disable frame skipping (parameter name: `noFrameSkipped`)
+10. Enable scene change detection (parameter name: `sceneDetect`)
+11. Number of intra MBs in P-frame(parameter name: `numIntraMB`)
+12. Search range of ME (parameter name: `searchRange`)
+13. Enable 8x8 ME and MC (parameter name: `mv8x8Enable`)
+14. Enable AC prediction (parameter name: `useACPred`)
+15. Threshold for intra DC VLC (parameter name: `intraDCVlcTh`)
+16. Encoding Mode (parameter name: `encMode`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `rcType` | 0. `CONSTANT_Q` 1. `CBR_1` 2. `VBR_1` 3. `CBR_2` 4. `VBR_2` 5. `CBR_LOWDELAY` | All the bits of 6th byte of data modulus 6 |
+| `packetSize` | In the range `0 to 255` | All the bits of 7th byte of data |
+| `iQuant` | In the range `1 to 31` | All the bits of 8th byte of data |
+| `pQuant` | In the range `1 to 31` | All the bits of 9th byte of data |
+| `rvlcEnable` | 0. `PV_OFF` 1. `PV_ON` | bit 0 of 10th byte of data |
+| `quantType` | 0. `0` 1. `1` | bit 0 of 11th byte of data |
+| `noFrameSkipped` | 0. `PV_OFF` 1. `PV_ON` | bit 0 of 12th byte of data |
+| `sceneDetect` | 0. `PV_OFF` 1. `PV_ON` | bit 0 of 13th byte of data |
+| `numIntraMB` | In the range `0 to 7` | bit 0, 1 and 2 of 14th byte of data |
+| `searchRange` | In the range `0 to 31` | bit 0, 1, 2, 3 and 4 of 15th byte of data |
+| `mv8x8Enable` | 0. `PV_OFF` 1. `PV_ON` | bit 0 of 16th byte of data |
+| `useACPred` | 0. `PV_OFF` 1. `PV_ON` | bit 0 of 17th byte of data |
+| `intraDCVlcTh` | In the range `0 to 7` | bit 0, 1 and 2 of 18th byte of data |
+
+Following parameters are only for mpeg4_enc_fuzzer
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `encWidth` | In the range `0 to 10239` | All the bits of 1st and 2nd byte of data |
+| `encHeight` | In the range `0 to 10239` | All the bits of 3rd and 4th byte of data |
+| `encMode` | 0. `H263_MODE` 1. `H263_MODE_WITH_ERR_RES` 2. `DATA_PARTITIONING_MODE` 3. `COMBINE_MODE_NO_ERR_RES` 4. `COMBINE_MODE_WITH_ERR_RES` | All the bits of 19th byte of data modulus 5 |
+
+Following parameters are only for h263_enc_fuzzer
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `encWidth` | 0. `128` 1. `176` 2. `352` 3. `704` 4. `1408` | All the bits of 1st byte of data modulus 5|
+| `encHeight` | 0. `96` 1. `144` 2. `288` 3. `576` 4. `1152 ` | All the bits of 3rd byte of data modulus 5|
+| `encMode` | 0. `SHORT_HEADER` 1. `SHORT_HEADER_WITH_ERR_RES` | All the bits of 19th byte of data modulus 2 |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the codec using a loop.
+If the encode operation was successful, the input is advanced by the frame size.
+If the encode operation was un-successful, the input is still advanced by frame size so
+that the fuzzer can proceed to feed the next frame.
+
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build mpeg4_enc_fuzzer and h263_enc_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) mpeg4_enc_fuzzer
+  $ mm -j$(nproc) h263_enc_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some yuv files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/m4v_h263_enc_fuzzer/m4v_h263_enc_fuzzer CORPUS_DIR
+  $ adb shell /data/fuzz/arm64/h263_enc_fuzzer/h263_enc_fuzzer CORPUS_DIR
+```
+To run on host
+```
+  $ $ANDROID_HOST_OUT/fuzz/x86_64/mpeg4_enc_fuzzer/mpeg4_enc_fuzzer CORPUS_DIR
+  $ $ANDROID_HOST_OUT/fuzz/x86_64/h263_enc_fuzzer/h263_enc_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/codecs/m4v_h263/fuzzer/h263_dec_fuzzer.dict b/media/codecs/m4v_h263/fuzzer/h263_dec_fuzzer.dict
new file mode 100644
index 0000000..591d37e
--- /dev/null
+++ b/media/codecs/m4v_h263/fuzzer/h263_dec_fuzzer.dict
@@ -0,0 +1,2 @@
+# Start code (bytes 0-3)
+kw1="\x00\x00\x80\x02"
diff --git a/media/codecs/m4v_h263/fuzzer/mpeg4_dec_fuzzer.dict b/media/codecs/m4v_h263/fuzzer/mpeg4_dec_fuzzer.dict
new file mode 100644
index 0000000..76241a6
--- /dev/null
+++ b/media/codecs/m4v_h263/fuzzer/mpeg4_dec_fuzzer.dict
@@ -0,0 +1,2 @@
+# Start code (bytes 0-3)
+kw1="\x00\x00\x01\xB0"
diff --git a/media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp b/media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
new file mode 100644
index 0000000..912c821
--- /dev/null
+++ b/media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
@@ -0,0 +1,205 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include "mp4dec_api.h"
+#define MPEG4_MAX_WIDTH 1920
+#define MPEG4_MAX_HEIGHT 1080
+#define H263_MAX_WIDTH 352
+#define H263_MAX_HEIGHT 288
+#define DEFAULT_WIDTH 352
+#define DEFAULT_HEIGHT 288
+
+constexpr size_t kMaxNumDecodeCalls = 100;
+constexpr uint8_t kNumOutputBuffers = 2;
+constexpr int kLayer = 1;
+
+struct tagvideoDecControls;
+
+/* == ceil(num / den) * den. T must be integer type, alignment must be positive power of 2 */
+template <class T, class U>
+inline static const T align(const T &num, const U &den) {
+  return (num + (T)(den - 1)) & (T) ~(den - 1);
+}
+
+class Codec {
+ public:
+  Codec() = default;
+  ~Codec() { deInitDecoder(); }
+  bool initDecoder();
+  bool allocOutputBuffer(size_t outputBufferSize);
+  void freeOutputBuffer();
+  void handleResolutionChange();
+  void decodeFrames(const uint8_t *data, size_t size);
+  void deInitDecoder();
+
+ private:
+  tagvideoDecControls *mDecHandle = nullptr;
+  uint8_t *mOutputBuffer[kNumOutputBuffers];
+  bool mInitialized = false;
+  bool mFramesConfigured = false;
+#ifdef MPEG4
+  MP4DecodingMode mInputMode = MPEG4_MODE;
+  size_t mMaxWidth = MPEG4_MAX_WIDTH;
+  size_t mMaxHeight = MPEG4_MAX_HEIGHT;
+#else
+  MP4DecodingMode mInputMode = H263_MODE;
+  size_t mMaxWidth = H263_MAX_WIDTH;
+  size_t mMaxHeight = H263_MAX_HEIGHT;
+#endif
+  uint32_t mNumSamplesOutput = 0;
+  uint32_t mWidth = DEFAULT_WIDTH;
+  uint32_t mHeight = DEFAULT_HEIGHT;
+};
+
+bool Codec::initDecoder() {
+  mDecHandle = new tagvideoDecControls;
+  if (!mDecHandle) {
+    return false;
+  }
+  memset(mDecHandle, 0, sizeof(tagvideoDecControls));
+  return true;
+}
+
+bool Codec::allocOutputBuffer(size_t outputBufferSize) {
+  for (uint8_t i = 0; i < kNumOutputBuffers; ++i) {
+    if (!mOutputBuffer[i]) {
+      mOutputBuffer[i] = static_cast<uint8_t *>(malloc(outputBufferSize));
+      if (!mOutputBuffer[i]) {
+        return false;
+      }
+    }
+  }
+  return true;
+}
+
+void Codec::freeOutputBuffer() {
+  for (uint8_t i = 0; i < kNumOutputBuffers; ++i) {
+    if (mOutputBuffer[i]) {
+      free(mOutputBuffer[i]);
+      mOutputBuffer[i] = nullptr;
+    }
+  }
+}
+
+void Codec::handleResolutionChange() {
+  int32_t dispWidth, dispHeight;
+  PVGetVideoDimensions(mDecHandle, &dispWidth, &dispHeight);
+
+  int32_t bufWidth, bufHeight;
+  PVGetBufferDimensions(mDecHandle, &bufWidth, &bufHeight);
+
+  if (dispWidth != mWidth || dispHeight != mHeight) {
+    mWidth = dispWidth;
+    mHeight = dispHeight;
+  }
+}
+
+void Codec::decodeFrames(const uint8_t *data, size_t size) {
+  size_t outputBufferSize = align(mMaxWidth, 16) * align(mMaxHeight, 16) * 3 / 2;
+  uint8_t *start_code = const_cast<uint8_t *>(data);
+  static const uint8_t volInfo[] = {0x00, 0x00, 0x01, 0xB0};
+  bool volHeader = memcmp(start_code, volInfo, 4) == 0;
+  if (volHeader) {
+    PVCleanUpVideoDecoder(mDecHandle);
+    mInitialized = false;
+  }
+
+  if (!mInitialized) {
+    uint8_t *volData[1]{};
+    int32_t volSize = 0;
+
+    if (volHeader) { /* removed some codec config part */
+      volData[0] = const_cast<uint8_t *>(data);
+      volSize = size;
+    }
+
+    if (!PVInitVideoDecoder(mDecHandle, volData, &volSize, kLayer, mMaxWidth, mMaxHeight,
+                            mInputMode)) {
+      return;
+    }
+    mInitialized = true;
+    MP4DecodingMode actualMode = PVGetDecBitstreamMode(mDecHandle);
+    if (mInputMode != actualMode) {
+      return;
+    }
+
+    PVSetPostProcType(mDecHandle, 0);
+  }
+  size_t yFrameSize = sizeof(uint8) * mDecHandle->size;
+  if (outputBufferSize < yFrameSize * 3 / 2) {
+    return;
+  }
+  if (!allocOutputBuffer(outputBufferSize)) {
+    return;
+  }
+  size_t numDecodeCalls = 0;
+  while ((size > 0) && (numDecodeCalls < kMaxNumDecodeCalls)) {
+    if (!mFramesConfigured) {
+      PVSetReferenceYUV(mDecHandle, mOutputBuffer[1]);
+      mFramesConfigured = true;
+    }
+
+    // Need to check if header contains new info, e.g., width/height, etc.
+    VopHeaderInfo header_info;
+    uint32_t useExtTimestamp = (numDecodeCalls == 0);
+    int32_t tempSize = (int32_t)size;
+    uint8_t *bitstreamTmp = const_cast<uint8_t *>(data);
+    uint32_t timestamp = 0;
+    if (PVDecodeVopHeader(mDecHandle, &bitstreamTmp, &timestamp, &tempSize, &header_info,
+                          &useExtTimestamp, mOutputBuffer[mNumSamplesOutput & 1]) != PV_TRUE) {
+      return;
+    }
+
+    handleResolutionChange();
+
+    PVDecodeVopBody(mDecHandle, &tempSize);
+    uint32_t bytesConsumed = 1;
+    if (size > tempSize) {
+      bytesConsumed = size - tempSize;
+    }
+    data += bytesConsumed;
+    size -= bytesConsumed;
+    ++mNumSamplesOutput;
+    ++numDecodeCalls;
+  }
+  freeOutputBuffer();
+}
+
+void Codec::deInitDecoder() {
+  PVCleanUpVideoDecoder(mDecHandle);
+  delete mDecHandle;
+  mDecHandle = nullptr;
+  mInitialized = false;
+  freeOutputBuffer();
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  if (size < 4) {
+    return 0;
+  }
+  Codec *codec = new Codec();
+  if (!codec) {
+    return 0;
+  }
+  if (codec->initDecoder()) {
+    codec->decodeFrames(data, size);
+  }
+  delete codec;
+  return 0;
+}
diff --git a/media/codecs/m4v_h263/fuzzer/mpeg4_h263_enc_fuzzer.cpp b/media/codecs/m4v_h263/fuzzer/mpeg4_h263_enc_fuzzer.cpp
new file mode 100644
index 0000000..423325d
--- /dev/null
+++ b/media/codecs/m4v_h263/fuzzer/mpeg4_h263_enc_fuzzer.cpp
@@ -0,0 +1,192 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <algorithm>
+#include "mp4enc_api.h"
+
+constexpr int8_t kIDRFrameRefreshIntervalInSec = 1;
+constexpr MP4RateControlType krcType[] = {CONSTANT_Q, CBR_1, VBR_1, CBR_2, VBR_2, CBR_LOWDELAY};
+#ifdef MPEG4
+constexpr MP4EncodingMode kEncodingMode[] = {SHORT_HEADER, SHORT_HEADER_WITH_ERR_RES,
+                                             DATA_PARTITIONING_MODE, COMBINE_MODE_NO_ERR_RES,
+                                             COMBINE_MODE_WITH_ERR_RES};
+constexpr size_t kMaxWidth = 10240;
+constexpr size_t kMaxHeight = 10240;
+#else
+constexpr MP4EncodingMode kEncodingMode[] = {H263_MODE, H263_MODE_WITH_ERR_RES};
+constexpr int kWidth[] = {128, 176, 352, 704, 1408};
+constexpr int kHeight[] = {96, 144, 288, 576, 1152};
+constexpr size_t kWidthNum = std::size(kWidth);
+constexpr size_t kHeightNum = std::size(kHeight);
+#endif
+
+constexpr size_t krcTypeNum = std::size(krcType);
+constexpr size_t kEncodingModeNum = std::size(kEncodingMode);
+constexpr size_t kMaxQP = 51;
+
+enum {
+    IDX_WD_BYTE_1,
+    IDX_WD_BYTE_2,
+    IDX_HT_BYTE_1,
+    IDX_HT_BYTE_2,
+    IDX_FRAME_RATE,
+    IDX_RC_TYPE,
+    IDX_PACKET_SIZE,
+    IDX_I_FRAME_QP,
+    IDX_P_FRAME_QP,
+    IDX_ENABLE_RVLC,
+    IDX_QUANT_TYPE,
+    IDX_NO_FRAME_SKIPPED_FLAG,
+    IDX_ENABLE_SCENE_DETECT,
+    IDX_NUM_INTRA_MB,
+    IDX_SEARCH_RANGE,
+    IDX_ENABLE_MV_8x8,
+    IDX_USE_AC_PRED,
+    IDX_INTRA_DC_VLC_THRESHOLD,
+    IDX_ENC_MODE,
+    IDX_LAST
+};
+
+class Codec {
+   public:
+    Codec() = default;
+    ~Codec() { deInitEncoder(); }
+    bool initEncoder(const uint8_t *data);
+    void encodeFrames(const uint8_t *data, size_t size);
+    void deInitEncoder();
+
+   private:
+    int32_t mFrameWidth = 352;
+    int32_t mFrameHeight = 288;
+    float mFrameRate = 25.0f;
+    VideoEncOptions *mEncodeHandle = nullptr;
+    VideoEncControls *mEncodeControl = nullptr;
+};
+
+bool Codec::initEncoder(const uint8_t *data) {
+    mEncodeHandle = new VideoEncOptions;
+    if (!mEncodeHandle) {
+        return false;
+    }
+    memset(mEncodeHandle, 0, sizeof(VideoEncOptions));
+    mEncodeControl = new VideoEncControls;
+    if (!mEncodeControl) {
+        return false;
+    }
+    memset(mEncodeControl, 0, sizeof(VideoEncControls));
+    PVGetDefaultEncOption(mEncodeHandle, 0);
+
+#ifdef MPEG4
+    mFrameWidth = ((data[IDX_WD_BYTE_1] << 8) | data[IDX_WD_BYTE_2]) % kMaxWidth;
+    mFrameHeight = ((data[IDX_HT_BYTE_1] << 8) | data[IDX_HT_BYTE_2]) % kMaxHeight;
+#else
+    mFrameWidth = kWidth[data[IDX_WD_BYTE_1] % kWidthNum];
+    mFrameHeight = kHeight[data[IDX_HT_BYTE_1] % kHeightNum];
+#endif
+    mFrameRate = data[IDX_FRAME_RATE];
+    mEncodeHandle->rcType = krcType[data[IDX_RC_TYPE] % krcTypeNum];
+    mEncodeHandle->profile_level = CORE_PROFILE_LEVEL2;
+    mEncodeHandle->packetSize = data[IDX_PACKET_SIZE];
+    mEncodeHandle->iQuant[0] = (data[IDX_I_FRAME_QP] % kMaxQP) + 1;
+    mEncodeHandle->pQuant[0] = (data[IDX_P_FRAME_QP] % kMaxQP) + 1;
+    mEncodeHandle->rvlcEnable = (data[IDX_ENABLE_RVLC] & 0x01) ? PV_OFF : PV_ON;
+    mEncodeHandle->quantType[0] = (data[IDX_QUANT_TYPE] & 0x01) ? 0 : 1;
+    mEncodeHandle->noFrameSkipped = (data[IDX_NO_FRAME_SKIPPED_FLAG] & 0x01) ? PV_OFF : PV_ON;
+    mEncodeHandle->sceneDetect = (data[IDX_ENABLE_SCENE_DETECT] & 0x01) ? PV_OFF : PV_ON;
+    mEncodeHandle->numIntraMB = data[IDX_NUM_INTRA_MB] & 0x07;
+    mEncodeHandle->searchRange = data[IDX_SEARCH_RANGE] & 0x1F;
+    mEncodeHandle->mv8x8Enable = (data[IDX_ENABLE_MV_8x8] & 0x01) ? PV_OFF : PV_ON;
+    mEncodeHandle->useACPred = (data[IDX_USE_AC_PRED] & 0x01) ? PV_OFF : PV_ON;
+    mEncodeHandle->intraDCVlcTh = data[IDX_INTRA_DC_VLC_THRESHOLD] & 0x07;
+    mEncodeHandle->encMode = kEncodingMode[data[IDX_ENC_MODE] % kEncodingModeNum];
+    mEncodeHandle->encWidth[0] = mFrameWidth;
+    mEncodeHandle->encHeight[0] = mFrameHeight;
+    mEncodeHandle->encFrameRate[0] = mFrameRate;
+    mEncodeHandle->tickPerSrc = mEncodeHandle->timeIncRes / mFrameRate;
+    mEncodeHandle->intraPeriod = (kIDRFrameRefreshIntervalInSec * mFrameRate);
+    if (!PVInitVideoEncoder(mEncodeControl, mEncodeHandle)) {
+        return false;
+    }
+    return true;
+}
+
+void Codec::deInitEncoder() {
+    if (mEncodeControl) {
+        PVCleanUpVideoEncoder(mEncodeControl);
+        delete mEncodeControl;
+        mEncodeControl = nullptr;
+    }
+    if (mEncodeHandle) {
+        delete mEncodeHandle;
+        mEncodeHandle = nullptr;
+    }
+}
+
+void Codec::encodeFrames(const uint8_t *data, size_t size) {
+    size_t inputBufferSize = (mFrameWidth * mFrameHeight * 3) / 2;
+    size_t outputBufferSize = inputBufferSize * 2;
+    uint8_t *outputBuffer = new uint8_t[outputBufferSize];
+    uint8_t *inputBuffer = new uint8_t[inputBufferSize];
+
+    // Get VOL header.
+    int32_t sizeOutputBuffer = outputBufferSize;
+    PVGetVolHeader(mEncodeControl, outputBuffer, &sizeOutputBuffer, 0);
+
+    size_t numFrame = 0;
+    while (size > 0) {
+        size_t bytesConsumed = std::min(size, inputBufferSize);
+        memcpy(inputBuffer, data, bytesConsumed);
+        if (bytesConsumed < inputBufferSize) {
+            memset(inputBuffer + bytesConsumed, data[0], inputBufferSize - bytesConsumed);
+        }
+        VideoEncFrameIO videoIn{}, videoOut{};
+        videoIn.height = mFrameHeight;
+        videoIn.pitch = mFrameWidth;
+        videoIn.timestamp = (numFrame * 1000) / mFrameRate;
+        videoIn.yChan = inputBuffer;
+        videoIn.uChan = videoIn.yChan + videoIn.height * videoIn.pitch;
+        videoIn.vChan = videoIn.uChan + ((videoIn.height * videoIn.pitch) >> 2);
+        uint32_t modTimeMs = 0;
+        int32_t dataLength = outputBufferSize;
+        int32_t nLayer = 0;
+        PVEncodeVideoFrame(mEncodeControl, &videoIn, &videoOut, &modTimeMs, outputBuffer,
+                           &dataLength, &nLayer);
+        MP4HintTrack hintTrack;
+        PVGetHintTrack(mEncodeControl, &hintTrack);
+        PVGetOverrunBuffer(mEncodeControl);
+        ++numFrame;
+        data += bytesConsumed;
+        size -= bytesConsumed;
+    }
+    delete[] inputBuffer;
+    delete[] outputBuffer;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    if (size < IDX_LAST) {
+        return 0;
+    }
+    Codec *codec = new Codec();
+    if (!codec) {
+        return 0;
+    }
+    if (codec->initEncoder(data)) {
+        data += IDX_LAST;
+        size -= IDX_LAST;
+        codec->encodeFrames(data, size);
+    }
+    delete codec;
+    return 0;
+}
diff --git a/media/libstagefright/codecs/m4v_h263/patent_disclaimer.txt b/media/codecs/m4v_h263/patent_disclaimer.txt
similarity index 100%
rename from media/libstagefright/codecs/m4v_h263/patent_disclaimer.txt
rename to media/codecs/m4v_h263/patent_disclaimer.txt
diff --git a/media/codecs/mp3dec/Android.bp b/media/codecs/mp3dec/Android.bp
new file mode 100644
index 0000000..015b8b6
--- /dev/null
+++ b/media/codecs/mp3dec/Android.bp
@@ -0,0 +1,158 @@
+package {
+    default_applicable_licenses: ["frameworks_av_media_codecs_mp3dec_license"],
+}
+
+// Added automatically by a large-scale-change that took the approach of
+// 'apply every license found to every target'. While this makes sure we respect
+// every license restriction, it may not be entirely correct.
+//
+// e.g. GPL in an MIT project might only apply to the contrib/ directory.
+//
+// Please consider splitting the single license below into multiple licenses,
+// taking care not to lose any license_kind information, and overriding the
+// default license using the 'licenses: [...]' property on targets as needed.
+//
+// For unused files, consider creating a 'fileGroup' with "//visibility:private"
+// to attach the license to, and including a comment whether the files may be
+// used in the current project.
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_codecs_mp3dec_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+        "SPDX-license-identifier-BSD",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library_headers {
+    name: "libstagefright_mp3dec_headers",
+    vendor_available: true,
+    min_sdk_version: "29",
+    host_supported:true,
+    export_include_dirs: [
+        "include",
+        "src",
+    ],
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+}
+
+cc_library_static {
+    name: "libstagefright_mp3dec",
+    vendor_available: true,
+    min_sdk_version: "29",
+
+    host_supported:true,
+    srcs: [
+        "src/pvmp3_normalize.cpp",
+        "src/pvmp3_alias_reduction.cpp",
+        "src/pvmp3_crc.cpp",
+        "src/pvmp3_decode_header.cpp",
+        "src/pvmp3_decode_huff_cw.cpp",
+        "src/pvmp3_getbits.cpp",
+        "src/pvmp3_dequantize_sample.cpp",
+        "src/pvmp3_framedecoder.cpp",
+        "src/pvmp3_get_main_data_size.cpp",
+        "src/pvmp3_get_side_info.cpp",
+        "src/pvmp3_get_scale_factors.cpp",
+        "src/pvmp3_mpeg2_get_scale_data.cpp",
+        "src/pvmp3_mpeg2_get_scale_factors.cpp",
+        "src/pvmp3_mpeg2_stereo_proc.cpp",
+        "src/pvmp3_huffman_decoding.cpp",
+        "src/pvmp3_huffman_parsing.cpp",
+        "src/pvmp3_tables.cpp",
+        "src/pvmp3_imdct_synth.cpp",
+        "src/pvmp3_mdct_6.cpp",
+        "src/pvmp3_dct_6.cpp",
+        "src/pvmp3_poly_phase_synthesis.cpp",
+        "src/pvmp3_equalizer.cpp",
+        "src/pvmp3_seek_synch.cpp",
+        "src/pvmp3_stereo_proc.cpp",
+        "src/pvmp3_reorder.cpp",
+
+        "src/pvmp3_polyphase_filter_window.cpp",
+        "src/pvmp3_mdct_18.cpp",
+        "src/pvmp3_dct_9.cpp",
+        "src/pvmp3_dct_16.cpp",
+    ],
+
+    arch: {
+        arm: {
+            exclude_srcs: [
+                "src/pvmp3_polyphase_filter_window.cpp",
+                "src/pvmp3_mdct_18.cpp",
+                "src/pvmp3_dct_9.cpp",
+                "src/pvmp3_dct_16.cpp",
+            ],
+            srcs: [
+                "src/asm/pvmp3_polyphase_filter_window_gcc.s",
+                "src/asm/pvmp3_mdct_18_gcc.s",
+                "src/asm/pvmp3_dct_9_gcc.s",
+                "src/asm/pvmp3_dct_16_gcc.s",
+            ],
+
+            instruction_set: "arm",
+        },
+    },
+
+    sanitize: {
+        misc_undefined: [
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+
+    include_dirs: ["frameworks/av/media/libstagefright/include"],
+
+    header_libs: ["libstagefright_mp3dec_headers"],
+    export_header_lib_headers: ["libstagefright_mp3dec_headers"],
+
+    cflags: [
+        "-DOSCL_UNUSED_ARG(x)=(void)(x)",
+        "-Werror",
+    ],
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+//###############################################################################
+cc_test {
+    name: "libstagefright_mp3dec_test",
+    gtest: false,
+
+    srcs: [
+        "test/mp3dec_test.cpp",
+        "test/mp3reader.cpp",
+    ],
+
+    cflags: ["-Wall", "-Werror"],
+
+    local_include_dirs: [
+        "src",
+        "include",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+
+    static_libs: [
+        "libstagefright_mp3dec",
+        "libsndfile",
+    ],
+
+    shared_libs: ["libaudioutils"],
+}
diff --git a/media/libstagefright/codecs/mp3dec/MODULE_LICENSE_APACHE2 b/media/codecs/mp3dec/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/MODULE_LICENSE_APACHE2
rename to media/codecs/mp3dec/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/mp3dec/NOTICE b/media/codecs/mp3dec/NOTICE
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/NOTICE
rename to media/codecs/mp3dec/NOTICE
diff --git a/media/codecs/mp3dec/TEST_MAPPING b/media/codecs/mp3dec/TEST_MAPPING
new file mode 100644
index 0000000..4ef4317
--- /dev/null
+++ b/media/codecs/mp3dec/TEST_MAPPING
@@ -0,0 +1,9 @@
+// mappings for frameworks/av/media/libstagefright/codecs/mp3dec
+{
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "Mp3DecoderTest"}
+  ]
+}
diff --git a/media/codecs/mp3dec/fuzzer/Android.bp b/media/codecs/mp3dec/fuzzer/Android.bp
new file mode 100644
index 0000000..514a8a8
--- /dev/null
+++ b/media/codecs/mp3dec/fuzzer/Android.bp
@@ -0,0 +1,48 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_codecs_mp3dec_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_codecs_mp3dec_license"],
+}
+
+cc_fuzz {
+    name: "mp3_dec_fuzzer",
+    host_supported: true,
+
+    static_libs: [
+        "libstagefright_mp3dec",
+    ],
+
+    srcs: [
+        "mp3_dec_fuzzer.cpp",
+    ],
+
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/media/codecs/mp3dec/fuzzer/README.md b/media/codecs/mp3dec/fuzzer/README.md
new file mode 100644
index 0000000..09dd5c3
--- /dev/null
+++ b/media/codecs/mp3dec/fuzzer/README.md
@@ -0,0 +1,56 @@
+# Fuzzer for libstagefright_mp3dec decoder
+
+## Plugin Design Considerations
+The fuzzer plugin for mp3 decoder is designed based on the understanding of the
+codec and tries to achieve the following:
+
+##### Maximize code coverage
+
+This fuzzer makes use of the following config parameters:
+1. Equalizer type (parameter name: `equalizerType`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `equalizerType` | 0. `flat ` 1. `bass_boost ` 2. `rock ` 3. `pop ` 4. `jazz ` 5. `classical ` 6. `talk ` 7. `flat_ ` | Bits 0, 1 and 2 of first byte of input stream |
+| `crcEnabled` | 0. `false ` 1. `true `| Bit 0 of second byte of input stream |
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the codec using a loop.
+ * If the decode operation was successful, the input is advanced by the number
+   of bytes used by the decoder.
+ * If the decode operation was un-successful, the input is advanced by 1 byte
+   till it reaches a valid frame or end of stream.
+
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build mp3_dec_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) mp3_dec_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some mp3 files to that folder.
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mp3_dec_fuzzer/mp3_dec_fuzzer CORPUS_DIR
+```
+To run on host
+```
+  $ $ANDROID_HOST_OUT/fuzz/x86_64/mp3_dec_fuzzer/mp3_dec_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/codecs/mp3dec/fuzzer/mp3_dec_fuzzer.cpp b/media/codecs/mp3dec/fuzzer/mp3_dec_fuzzer.cpp
new file mode 100644
index 0000000..847c8c4
--- /dev/null
+++ b/media/codecs/mp3dec/fuzzer/mp3_dec_fuzzer.cpp
@@ -0,0 +1,237 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include <stdlib.h>
+#include <algorithm>
+
+#include <pvmp3decoder_api.h>
+
+constexpr int kMaxFrameSamples = 4608;
+constexpr int kMaxChannels = 2;
+constexpr e_equalization kEqualizerTypes[] = {flat, bass_boost, rock, pop,
+                                              jazz, classical,  talk, flat_};
+
+static bool parseMp3Header(uint32_t header, size_t *frame_size,
+                           uint32_t *out_sampling_rate = nullptr, uint32_t *out_channels = nullptr,
+                           uint32_t *out_bitrate = nullptr, uint32_t *out_num_samples = nullptr) {
+  *frame_size = 0;
+  if (out_sampling_rate) *out_sampling_rate = 0;
+  if (out_channels) *out_channels = 0;
+  if (out_bitrate) *out_bitrate = 0;
+  if (out_num_samples) *out_num_samples = 0;
+
+  if ((header & 0xffe00000) != 0xffe00000) {
+    return false;
+  }
+  unsigned version = (header >> 19) & 3;
+  if (version == 0x01) {
+    return false;
+  }
+  unsigned layer = (header >> 17) & 3;
+  if (layer == 0x00) {
+    return false;
+  }
+  unsigned bitrate_index = (header >> 12) & 0x0f;
+  if (bitrate_index == 0 || bitrate_index == 0x0f) {
+    return false;
+  }
+  unsigned sampling_rate_index = (header >> 10) & 3;
+  if (sampling_rate_index == 3) {
+    return false;
+  }
+  static const int kSamplingRateV1[] = {44100, 48000, 32000};
+  int sampling_rate = kSamplingRateV1[sampling_rate_index];
+  if (version == 2 /* V2 */) {
+    sampling_rate /= 2;
+  } else if (version == 0 /* V2.5 */) {
+    sampling_rate /= 4;
+  }
+
+  unsigned padding = (header >> 9) & 1;
+
+  if (layer == 3) {  // layer I
+    static const int kBitrateV1[] = {32,  64,  96,  128, 160, 192, 224,
+                                     256, 288, 320, 352, 384, 416, 448};
+    static const int kBitrateV2[] = {32,  48,  56,  64,  80,  96,  112,
+                                     128, 144, 160, 176, 192, 224, 256};
+
+    int bitrate =
+        (version == 3 /* V1 */) ? kBitrateV1[bitrate_index - 1] : kBitrateV2[bitrate_index - 1];
+
+    if (out_bitrate) {
+      *out_bitrate = bitrate;
+    }
+    *frame_size = (12000 * bitrate / sampling_rate + padding) * 4;
+    if (out_num_samples) {
+      *out_num_samples = 384;
+    }
+  } else {  // layer II or III
+    static const int kBitrateV1L2[] = {32,  48,  56,  64,  80,  96,  112,
+                                       128, 160, 192, 224, 256, 320, 384};
+    static const int kBitrateV1L3[] = {32,  40,  48,  56,  64,  80,  96,
+                                       112, 128, 160, 192, 224, 256, 320};
+    static const int kBitrateV2[] = {8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160};
+    int bitrate;
+    if (version == 3 /* V1 */) {
+      bitrate =
+          (layer == 2 /* L2 */) ? kBitrateV1L2[bitrate_index - 1] : kBitrateV1L3[bitrate_index - 1];
+
+      if (out_num_samples) {
+        *out_num_samples = 1152;
+      }
+    } else {  // V2 (or 2.5)
+      bitrate = kBitrateV2[bitrate_index - 1];
+      if (out_num_samples) {
+        *out_num_samples = (layer == 1 /* L3 */) ? 576 : 1152;
+      }
+    }
+
+    if (out_bitrate) {
+      *out_bitrate = bitrate;
+    }
+
+    if (version == 3 /* V1 */) {
+      *frame_size = 144000 * bitrate / sampling_rate + padding;
+    } else {  // V2 or V2.5
+      size_t tmp = (layer == 1 /* L3 */) ? 72000 : 144000;
+      *frame_size = tmp * bitrate / sampling_rate + padding;
+    }
+  }
+
+  if (out_sampling_rate) {
+    *out_sampling_rate = sampling_rate;
+  }
+
+  if (out_channels) {
+    int channel_mode = (header >> 6) & 3;
+    *out_channels = (channel_mode == 3) ? 1 : 2;
+  }
+
+  return true;
+}
+
+static uint32_t U32_AT(const uint8_t *ptr) {
+  return ptr[0] << 24 | ptr[1] << 16 | ptr[2] << 8 | ptr[3];
+}
+
+static bool checkHeader(uint8 *header, size_t inSize) {
+  size_t frameSize;
+  size_t totalInSize = 0;
+  bool isValidBuffer = false;
+
+  while (totalInSize + 4 < inSize) {
+    isValidBuffer = true;
+    uint32_t val = U32_AT(header + totalInSize);
+    if (!parseMp3Header(val, &frameSize, nullptr, nullptr, nullptr, nullptr)) {
+      return false;
+    }
+    totalInSize += frameSize;
+  }
+
+  return (isValidBuffer);
+}
+
+class Codec {
+ public:
+  Codec() = default;
+  ~Codec() { deInitDecoder(); }
+
+  bool initDecoder();
+  void decodeFrames(uint8_t *data, size_t size);
+  void deInitDecoder();
+
+ private:
+  tPVMP3DecoderExternal *mConfig = nullptr;
+  void *mDecoderBuf = nullptr;
+};
+
+bool Codec::initDecoder() {
+  mConfig = new tPVMP3DecoderExternal{};
+  if (!mConfig) {
+    return false;
+  }
+  size_t decoderBufSize = pvmp3_decoderMemRequirements();
+  mDecoderBuf = malloc(decoderBufSize);
+  if (!mDecoderBuf) {
+    return false;
+  }
+  memset(mDecoderBuf, 0x0, decoderBufSize);
+  pvmp3_InitDecoder(mConfig, mDecoderBuf);
+  return true;
+}
+
+void Codec::decodeFrames(uint8_t *data, size_t size) {
+  uint8_t equalizerTypeValue = (data[0] & 0x7);
+  mConfig->equalizerType = kEqualizerTypes[equalizerTypeValue];
+  mConfig->crcEnabled = data[1] & 0x1;
+
+  while (size > 0) {
+    bool status = checkHeader(data, size);
+    if (!status) {
+      size--;
+      data++;
+      continue;
+    }
+    size_t outBufSize = kMaxFrameSamples * kMaxChannels;
+    size_t usedBytes = 0;
+    int16_t outputBuf[outBufSize];
+    mConfig->inputBufferCurrentLength = size;
+    mConfig->inputBufferUsedLength = 0;
+    mConfig->inputBufferMaxLength = 0;
+    mConfig->pInputBuffer = data;
+    mConfig->pOutputBuffer = outputBuf;
+    mConfig->outputFrameSize = outBufSize / sizeof(int16_t);
+
+    ERROR_CODE decoderErr;
+    decoderErr = pvmp3_framedecoder(mConfig, mDecoderBuf);
+    if (decoderErr != NO_DECODING_ERROR) {
+      size--;
+      data++;
+    } else {
+      usedBytes = std::min((int32_t)size, mConfig->inputBufferUsedLength);
+      size -= usedBytes;
+      data += usedBytes;
+    }
+  }
+}
+
+void Codec::deInitDecoder() {
+  if (mDecoderBuf) {
+    free(mDecoderBuf);
+    mDecoderBuf = nullptr;
+  }
+  delete mConfig;
+  mConfig = nullptr;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  if (size < 4) {
+    return 0;
+  }
+  Codec *codec = new Codec();
+  if (!codec) {
+    return 0;
+  }
+  if (codec->initDecoder()) {
+    codec->decodeFrames(const_cast<uint8_t *>(data), size);
+  }
+  delete codec;
+  return 0;
+}
diff --git a/media/libstagefright/codecs/mp3dec/include/mp3_decoder_selection.h b/media/codecs/mp3dec/include/mp3_decoder_selection.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/include/mp3_decoder_selection.h
rename to media/codecs/mp3dec/include/mp3_decoder_selection.h
diff --git a/media/libstagefright/codecs/mp3dec/include/pvmp3_audio_type_defs.h b/media/codecs/mp3dec/include/pvmp3_audio_type_defs.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/include/pvmp3_audio_type_defs.h
rename to media/codecs/mp3dec/include/pvmp3_audio_type_defs.h
diff --git a/media/libstagefright/codecs/mp3dec/include/pvmp3decoder_api.h b/media/codecs/mp3dec/include/pvmp3decoder_api.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/include/pvmp3decoder_api.h
rename to media/codecs/mp3dec/include/pvmp3decoder_api.h
diff --git a/media/libstagefright/codecs/mp3dec/patent_disclaimer.txt b/media/codecs/mp3dec/patent_disclaimer.txt
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/patent_disclaimer.txt
rename to media/codecs/mp3dec/patent_disclaimer.txt
diff --git a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_dct_16_gcc.s b/media/codecs/mp3dec/src/asm/pvmp3_dct_16_gcc.s
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/asm/pvmp3_dct_16_gcc.s
rename to media/codecs/mp3dec/src/asm/pvmp3_dct_16_gcc.s
diff --git a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_dct_9_gcc.s b/media/codecs/mp3dec/src/asm/pvmp3_dct_9_gcc.s
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/asm/pvmp3_dct_9_gcc.s
rename to media/codecs/mp3dec/src/asm/pvmp3_dct_9_gcc.s
diff --git a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_mdct_18_gcc.s b/media/codecs/mp3dec/src/asm/pvmp3_mdct_18_gcc.s
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/asm/pvmp3_mdct_18_gcc.s
rename to media/codecs/mp3dec/src/asm/pvmp3_mdct_18_gcc.s
diff --git a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s b/media/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s
rename to media/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s
diff --git a/media/libstagefright/codecs/mp3dec/src/mp3_mem_funcs.h b/media/codecs/mp3dec/src/mp3_mem_funcs.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/mp3_mem_funcs.h
rename to media/codecs/mp3dec/src/mp3_mem_funcs.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pv_mp3_huffman.h b/media/codecs/mp3dec/src/pv_mp3_huffman.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pv_mp3_huffman.h
rename to media/codecs/mp3dec/src/pv_mp3_huffman.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op.h b/media/codecs/mp3dec/src/pv_mp3dec_fxd_op.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op.h
rename to media/codecs/mp3dec/src/pv_mp3dec_fxd_op.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm.h b/media/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm.h
rename to media/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm_gcc.h b/media/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm_gcc.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm_gcc.h
rename to media/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm_gcc.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op_c_equivalent.h b/media/codecs/mp3dec/src/pv_mp3dec_fxd_op_c_equivalent.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op_c_equivalent.h
rename to media/codecs/mp3dec/src/pv_mp3dec_fxd_op_c_equivalent.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op_msc_evc.h b/media/codecs/mp3dec/src/pv_mp3dec_fxd_op_msc_evc.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pv_mp3dec_fxd_op_msc_evc.h
rename to media/codecs/mp3dec/src/pv_mp3dec_fxd_op_msc_evc.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_alias_reduction.cpp b/media/codecs/mp3dec/src/pvmp3_alias_reduction.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_alias_reduction.cpp
rename to media/codecs/mp3dec/src/pvmp3_alias_reduction.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_alias_reduction.h b/media/codecs/mp3dec/src/pvmp3_alias_reduction.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_alias_reduction.h
rename to media/codecs/mp3dec/src/pvmp3_alias_reduction.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_crc.cpp b/media/codecs/mp3dec/src/pvmp3_crc.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_crc.cpp
rename to media/codecs/mp3dec/src/pvmp3_crc.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_crc.h b/media/codecs/mp3dec/src/pvmp3_crc.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_crc.h
rename to media/codecs/mp3dec/src/pvmp3_crc.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_dct_16.cpp b/media/codecs/mp3dec/src/pvmp3_dct_16.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_dct_16.cpp
rename to media/codecs/mp3dec/src/pvmp3_dct_16.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_dct_16.h b/media/codecs/mp3dec/src/pvmp3_dct_16.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_dct_16.h
rename to media/codecs/mp3dec/src/pvmp3_dct_16.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_dct_6.cpp b/media/codecs/mp3dec/src/pvmp3_dct_6.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_dct_6.cpp
rename to media/codecs/mp3dec/src/pvmp3_dct_6.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_dct_9.cpp b/media/codecs/mp3dec/src/pvmp3_dct_9.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_dct_9.cpp
rename to media/codecs/mp3dec/src/pvmp3_dct_9.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_dec_defs.h b/media/codecs/mp3dec/src/pvmp3_dec_defs.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_dec_defs.h
rename to media/codecs/mp3dec/src/pvmp3_dec_defs.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_decode_header.cpp b/media/codecs/mp3dec/src/pvmp3_decode_header.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_decode_header.cpp
rename to media/codecs/mp3dec/src/pvmp3_decode_header.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_decode_header.h b/media/codecs/mp3dec/src/pvmp3_decode_header.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_decode_header.h
rename to media/codecs/mp3dec/src/pvmp3_decode_header.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_decode_huff_cw.cpp b/media/codecs/mp3dec/src/pvmp3_decode_huff_cw.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_decode_huff_cw.cpp
rename to media/codecs/mp3dec/src/pvmp3_decode_huff_cw.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_decode_huff_cw.h b/media/codecs/mp3dec/src/pvmp3_decode_huff_cw.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_decode_huff_cw.h
rename to media/codecs/mp3dec/src/pvmp3_decode_huff_cw.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_dequantize_sample.cpp b/media/codecs/mp3dec/src/pvmp3_dequantize_sample.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_dequantize_sample.cpp
rename to media/codecs/mp3dec/src/pvmp3_dequantize_sample.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_dequantize_sample.h b/media/codecs/mp3dec/src/pvmp3_dequantize_sample.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_dequantize_sample.h
rename to media/codecs/mp3dec/src/pvmp3_dequantize_sample.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_equalizer.cpp b/media/codecs/mp3dec/src/pvmp3_equalizer.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_equalizer.cpp
rename to media/codecs/mp3dec/src/pvmp3_equalizer.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_equalizer.h b/media/codecs/mp3dec/src/pvmp3_equalizer.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_equalizer.h
rename to media/codecs/mp3dec/src/pvmp3_equalizer.h
diff --git a/media/codecs/mp3dec/src/pvmp3_framedecoder.cpp b/media/codecs/mp3dec/src/pvmp3_framedecoder.cpp
new file mode 100644
index 0000000..5cf1ed3
--- /dev/null
+++ b/media/codecs/mp3dec/src/pvmp3_framedecoder.cpp
@@ -0,0 +1,822 @@
+/* ------------------------------------------------------------------
+ * Copyright (C) 1998-2009 PacketVideo
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ * -------------------------------------------------------------------
+ */
+/*
+------------------------------------------------------------------------------
+
+   PacketVideo Corp.
+   MP3 Decoder Library
+
+   Filename: pvmp3_framedecoder.cpp
+
+   Functions:
+    pvmp3_framedecoder
+    pvmp3_InitDecoder
+    pvmp3_resetDecoder
+
+    Date: 09/21/2007
+
+------------------------------------------------------------------------------
+ REVISION HISTORY
+
+
+ Description:
+
+------------------------------------------------------------------------------
+ INPUT AND OUTPUT DEFINITIONS
+
+Input
+    pExt = pointer to the external interface structure. See the file
+           pvmp3decoder_api.h for a description of each field.
+           Data type of pointer to a tPVMP3DecoderExternal
+           structure.
+
+    pMem = void pointer to hide the internal implementation of the library
+           It is cast back to a tmp3dec_file structure. This structure
+           contains information that needs to persist between calls to
+           this function, or is too big to be placed on the stack, even
+           though the data is only needed during execution of this function
+           Data type void pointer, internally pointer to a tmp3dec_file
+           structure.
+
+
+ Outputs:
+     status = ERROR condition.  see structure  ERROR_CODE
+
+ Pointers and Buffers Modified:
+    pMem contents are modified.
+    pExt: (more detail in the file pvmp3decoder_api.h)
+    inputBufferUsedLength - number of array elements used up by the stream.
+    samplingRate - sampling rate in samples per sec
+    bitRate - bit rate in bits per second, varies frame to frame.
+
+
+
+------------------------------------------------------------------------------
+ FUNCTIONS DESCRIPTION
+
+    pvmp3_framedecoder
+        frame decoder library driver
+    pvmp3_InitDecoder
+        Decoder Initialization
+    pvmp3_resetDecoder
+        Reset Decoder
+
+------------------------------------------------------------------------------
+ REQUIREMENTS
+
+
+------------------------------------------------------------------------------
+ REFERENCES
+
+ [1] ISO MPEG Audio Subgroup Software Simulation Group (1996)
+     ISO 13818-3 MPEG-2 Audio Decoder - Lower Sampling Frequency Extension
+
+------------------------------------------------------------------------------
+ PSEUDO-CODE
+
+------------------------------------------------------------------------------
+*/
+
+
+/*----------------------------------------------------------------------------
+; INCLUDES
+----------------------------------------------------------------------------*/
+
+
+#include "pvmp3_framedecoder.h"
+#include "pvmp3_dec_defs.h"
+#include "pvmp3_poly_phase_synthesis.h"
+#include "pvmp3_tables.h"
+#include "pvmp3_imdct_synth.h"
+#include "pvmp3_alias_reduction.h"
+#include "pvmp3_reorder.h"
+#include "pvmp3_dequantize_sample.h"
+#include "pvmp3_stereo_proc.h"
+#include "pvmp3_mpeg2_stereo_proc.h"
+#include "pvmp3_get_side_info.h"
+#include "pvmp3_get_scale_factors.h"
+#include "pvmp3_mpeg2_get_scale_factors.h"
+#include "pvmp3_decode_header.h"
+#include "pvmp3_get_main_data_size.h"
+#include "s_tmp3dec_file.h"
+#include "pvmp3_getbits.h"
+#include "mp3_mem_funcs.h"
+
+
+/*----------------------------------------------------------------------------
+; MACROS
+; Define module specific macros here
+----------------------------------------------------------------------------*/
+
+
+/*----------------------------------------------------------------------------
+; DEFINES
+; Include all pre-processor statements here. Include conditional
+; compile variables also.
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; LOCAL FUNCTION DEFINITIONS
+; Function Prototype declaration
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; LOCAL STORE/BUFFER/POINTER DEFINITIONS
+; Variable declaration - defined here and used outside this module
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL FUNCTION REFERENCES
+; Declare functions defined elsewhere and referenced in this module
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
+; Declare variables used in this module but defined elsewhere
+----------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+ERROR_CODE pvmp3_framedecoder(tPVMP3DecoderExternal *pExt,
+                              void              *pMem)
+{
+
+    ERROR_CODE        errorCode  = NO_DECODING_ERROR;
+
+    int32   crc_error_count = 0;
+    uint32  sent_crc = 0;
+    uint32  computed_crc = 0;
+
+    tmp3dec_chan   *pChVars[CHAN];
+    tmp3dec_file   *pVars = (tmp3dec_file *)pMem;
+
+    mp3Header info_data;
+    mp3Header *info = &info_data;
+
+    pVars->inputStream.pBuffer  = pExt->pInputBuffer;
+
+
+    pVars->inputStream.usedBits  = pExt->inputBufferUsedLength << 3;
+    pVars->inputStream.inputBufferCurrentLength  = pExt->inputBufferCurrentLength;
+
+
+    errorCode = pvmp3_decode_header(&pVars->inputStream,
+                                    info,
+                                    &computed_crc);
+
+    if (errorCode != NO_DECODING_ERROR)
+    {
+        pExt->outputFrameSize = 0;
+        return errorCode;
+    }
+
+    pVars->num_channels = (info->mode == MPG_MD_MONO) ? 1 : 2;
+    pExt->num_channels = pVars->num_channels;
+
+    int32 outputFrameSize = (info->version_x == MPEG_1) ?
+                            2 * SUBBANDS_NUMBER * FILTERBANK_BANDS :
+                            SUBBANDS_NUMBER * FILTERBANK_BANDS;
+
+    outputFrameSize = (info->mode == MPG_MD_MONO) ? outputFrameSize : outputFrameSize << 1;
+
+
+    /*
+     *  Check if output buffer has enough room to hold output PCM
+     */
+    if (pExt->outputFrameSize >= outputFrameSize)
+    {
+        pExt->outputFrameSize = outputFrameSize;
+    }
+    else
+    {
+        pExt->outputFrameSize = 0;
+        return OUTPUT_BUFFER_TOO_SMALL;
+    }
+
+
+    pChVars[ LEFT] = &pVars->perChan[ LEFT];
+    pChVars[RIGHT] = &pVars->perChan[RIGHT];
+
+
+
+
+    if (info->error_protection)
+    {
+        if (!bitsAvailable(&pVars->inputStream, 16))
+        {
+            return SIDE_INFO_ERROR;
+        }
+
+        /*
+         *  Get crc content
+         */
+        sent_crc = getUpTo17bits(&pVars->inputStream, 16);
+    }
+
+
+    if (info->layer_description == 3)
+    {
+        int32 gr;
+        int32 ch;
+        uint32 main_data_end;
+        int32 bytes_to_discard;
+        int16 *ptrOutBuffer = pExt->pOutputBuffer;
+
+        /*
+         * Side Information must be extracted from the bitstream and store for use
+         * during the decoded of the associated frame
+         */
+
+        errorCode = pvmp3_get_side_info(&pVars->inputStream,
+                                        &pVars->sideInfo,
+                                        info,
+                                        &computed_crc);
+
+        if (errorCode != NO_DECODING_ERROR)
+        {
+            pExt->outputFrameSize = 0;
+            return errorCode;
+        }
+
+        /*
+         *  If CRC was sent, check that matches the one got while parsing data
+         *  disable crc if this is the desired mode
+         */
+        if (info->error_protection)
+        {
+            if ((computed_crc != sent_crc) && pExt->crcEnabled)
+            {
+                crc_error_count++;
+            }
+        }
+
+        /*
+         * main data (scalefactors, Huffman coded, etc,) are not necessarily located
+         * adjacent to the side-info. Beginning of main data is located using
+         * field "main_data_begin" of the current frame. The length does not include
+         * header and side info.
+         * "main_data_begin" points to the first bit of main data of a frame. It is a negative
+         * offset in bytes from the first byte of the sync word
+         * main_data_begin = 0  <===> main data start rigth after side info.
+         */
+
+        int32 temp = pvmp3_get_main_data_size(info, pVars);
+
+
+        /*
+         *  Check if available data holds a full frame, if not flag an error
+         */
+
+        if ((uint32)pVars->predicted_frame_size > pVars->inputStream.inputBufferCurrentLength)
+        {
+            pExt->outputFrameSize = 0;
+            return NO_ENOUGH_MAIN_DATA_ERROR;
+        }
+
+        /*
+         *  Fill in internal circular buffer
+         */
+        fillMainDataBuf(pVars, temp);
+
+
+        main_data_end = pVars->mainDataStream.usedBits >> 3; /* in bytes */
+        if ((main_data_end << 3) < pVars->mainDataStream.usedBits)
+        {
+            main_data_end++;
+            pVars->mainDataStream.usedBits = main_data_end << 3;
+        }
+
+
+        // force signed computation; buffer sizes and offsets are all going to be
+        // well within the constraints of 32-bit signed math.
+        bytes_to_discard = pVars->frame_start
+                           - ((int32)pVars->sideInfo.main_data_begin)
+                           - ((int32)main_data_end);
+
+
+        if (main_data_end > BUFSIZE)   /* check overflow on the buffer */
+        {
+            pVars->frame_start -= BUFSIZE;
+
+            pVars->mainDataStream.usedBits -= (BUFSIZE << 3);
+        }
+
+        pVars->frame_start += temp;
+
+
+        if (bytes_to_discard < 0 || crc_error_count)
+        {
+            /*
+             *  Not enough data to decode, then we should avoid reading this
+             *  data ( getting/ignoring sido info and scale data)
+             *  Main data could be located in the previous frame, so an unaccounted
+             *  frame can cause incorrect processing
+             *  Just run the polyphase filter to "clean" the history buffer
+             */
+            errorCode = NO_ENOUGH_MAIN_DATA_ERROR;
+
+            /*
+             *  Clear the input to these filters
+             */
+
+            pv_memset((void*)pChVars[RIGHT]->work_buf_int32,
+                      0,
+                      SUBBANDS_NUMBER*FILTERBANK_BANDS*sizeof(pChVars[RIGHT]->work_buf_int32[0]));
+
+            pv_memset((void*)pChVars[LEFT]->work_buf_int32,
+                      0,
+                      SUBBANDS_NUMBER*FILTERBANK_BANDS*sizeof(pChVars[LEFT]->work_buf_int32[0]));
+
+            /*  clear circular buffers, to avoid any glitch */
+            pv_memset((void*)&pChVars[ LEFT]->circ_buffer[576],
+                      0,
+                      480*sizeof(pChVars[ LEFT]->circ_buffer[0]));
+            pv_memset((void*)&pChVars[RIGHT]->circ_buffer[576],
+                      0,
+                      480*sizeof(pChVars[RIGHT]->circ_buffer[0]));
+
+            pChVars[ LEFT]->used_freq_lines = 575;
+            pChVars[RIGHT]->used_freq_lines = 575;
+
+        }
+        else
+        {
+            pVars->mainDataStream.usedBits += (bytes_to_discard << 3);
+        }
+
+        /*
+         *  if (fr_ps->header->version_x == MPEG_1), use 2 granules, otherwise just 1
+         */
+        for (gr = 0; gr < (1 + !(info->version_x)); gr++)
+        {
+            if (errorCode != NO_ENOUGH_MAIN_DATA_ERROR)
+            {
+                for (ch = 0; ch < pVars->num_channels; ch++)
+                {
+                    int32 part2_start = pVars->mainDataStream.usedBits;
+
+                    if (info->version_x == MPEG_1)
+                    {
+
+                        pvmp3_get_scale_factors(&pVars->scaleFactors[ch],
+                                                &pVars->sideInfo,
+                                                gr,
+                                                ch,
+                                                &pVars->mainDataStream);
+                    }
+                    else
+                    {
+                        int32 * tmp = pVars->Scratch_mem;
+                        pvmp3_mpeg2_get_scale_factors(&pVars->scaleFactors[ch],
+                                                      &pVars->sideInfo,
+                                                      gr,
+                                                      ch,
+                                                      info,
+                                                      (uint32 *)tmp,
+                                                      &pVars->mainDataStream);
+                    }
+
+                    pChVars[ch]->used_freq_lines = pvmp3_huffman_parsing(pChVars[ch]->work_buf_int32,
+                                                   &pVars->sideInfo.ch[ch].gran[gr],
+                                                   pVars,
+                                                   part2_start,
+                                                   info);
+
+
+                    pvmp3_dequantize_sample(pChVars[ch]->work_buf_int32,
+                                            &pVars->scaleFactors[ch],
+                                            &pVars->sideInfo.ch[ch].gran[gr],
+                                            pChVars[ch]->used_freq_lines,
+                                            info);
+
+
+
+
+                }   /* for (ch=0; ch<stereo; ch++)  */
+
+                if (pVars->num_channels == 2)
+                {
+
+                    int32 used_freq_lines = (pChVars[ LEFT]->used_freq_lines  >
+                                             pChVars[RIGHT]->used_freq_lines) ?
+                                            pChVars[ LEFT]->used_freq_lines  :
+                                            pChVars[RIGHT]->used_freq_lines;
+
+                    pChVars[ LEFT]->used_freq_lines = used_freq_lines;
+                    pChVars[RIGHT]->used_freq_lines = used_freq_lines;
+
+                    if (info->version_x == MPEG_1)
+                    {
+                        pvmp3_stereo_proc(pChVars[ LEFT]->work_buf_int32,
+                                          pChVars[RIGHT]->work_buf_int32,
+                                          &pVars->scaleFactors[RIGHT],
+                                          &pVars->sideInfo.ch[LEFT].gran[gr],
+                                          used_freq_lines,
+                                          info);
+                    }
+                    else
+                    {
+                        int32 * tmp = pVars->Scratch_mem;
+                        pvmp3_mpeg2_stereo_proc(pChVars[ LEFT]->work_buf_int32,
+                                                pChVars[RIGHT]->work_buf_int32,
+                                                &pVars->scaleFactors[RIGHT],
+                                                &pVars->sideInfo.ch[ LEFT].gran[gr],
+                                                &pVars->sideInfo.ch[RIGHT].gran[gr],
+                                                (uint32 *)tmp,
+                                                used_freq_lines,
+                                                info);
+                    }
+                }
+
+            } /* if ( errorCode != NO_ENOUGH_MAIN_DATA_ERROR) */
+
+            for (ch = 0; ch < pVars->num_channels; ch++)
+            {
+
+                pvmp3_reorder(pChVars[ch]->work_buf_int32,
+                              &pVars->sideInfo.ch[ch].gran[gr],
+                              &pChVars[ ch]->used_freq_lines,
+                              info,
+                              pVars->Scratch_mem);
+
+                pvmp3_alias_reduction(pChVars[ch]->work_buf_int32,
+                                      &pVars->sideInfo.ch[ch].gran[gr],
+                                      &pChVars[ ch]->used_freq_lines,
+                                      info);
+
+
+                /*
+                 *   IMDCT
+                 */
+                /* set mxposition
+                 * In case of mixed blocks, # of bands with long
+                 * blocks (2 or 4) else 0
+                 */
+                uint16 mixedBlocksLongBlocks = 0; /*  0 = long or short, 2=mixed, 4=mixed 2.5@8000 */
+                if (pVars->sideInfo.ch[ch].gran[gr].mixed_block_flag &&
+                        pVars->sideInfo.ch[ch].gran[gr].window_switching_flag)
+                {
+                    if ((info->version_x == MPEG_2_5) && (info->sampling_frequency == 2))
+                    {
+                        mixedBlocksLongBlocks = 4; /* mpeg2.5 @ 8 KHz */
+                    }
+                    else
+                    {
+                        mixedBlocksLongBlocks = 2;
+                    }
+                }
+
+                pvmp3_imdct_synth(pChVars[ch]->work_buf_int32,
+                                  pChVars[ch]->overlap,
+                                  pVars->sideInfo.ch[ch].gran[gr].block_type,
+                                  mixedBlocksLongBlocks,
+                                  pChVars[ ch]->used_freq_lines,
+                                  pVars->Scratch_mem);
+
+
+                /*
+                 *   Polyphase synthesis
+                 */
+
+                pvmp3_poly_phase_synthesis(pChVars[ch],
+                                           pVars->num_channels,
+                                           pExt->equalizerType,
+                                           &ptrOutBuffer[ch]);
+
+
+            }/* end ch loop */
+
+            ptrOutBuffer += pVars->num_channels * SUBBANDS_NUMBER * FILTERBANK_BANDS;
+        }  /*   for (gr=0;gr<Max_gr;gr++)  */
+
+        /* skip ancillary data */
+        if (info->bitrate_index > 0)
+        { /* if not free-format */
+
+            int32 ancillary_data_lenght = pVars->predicted_frame_size << 3;
+
+            ancillary_data_lenght  -= pVars->inputStream.usedBits;
+
+            /* skip ancillary data */
+            if (ancillary_data_lenght > 0)
+            {
+                pVars->inputStream.usedBits += ancillary_data_lenght;
+            }
+
+        }
+
+        /*
+         *  This overrides a possible NO_ENOUGH_MAIN_DATA_ERROR
+         */
+        errorCode = NO_DECODING_ERROR;
+
+    }
+    else
+    {
+        /*
+         * The info on the header leads to an unsupported layer, more data
+         * will not fix this, so this is a bad frame,
+         */
+
+        pExt->outputFrameSize = 0;
+        return UNSUPPORTED_LAYER;
+    }
+
+    pExt->inputBufferUsedLength = pVars->inputStream.usedBits >> 3;
+    pExt->totalNumberOfBitsUsed += pVars->inputStream.usedBits;
+    pExt->version = info->version_x;
+    pExt->samplingRate = mp3_s_freq[info->version_x][info->sampling_frequency];
+    pExt->bitRate = mp3_bitrate[pExt->version][info->bitrate_index];
+
+
+    /*
+     *  Always verify buffer overrun condition
+     */
+
+    if (pExt->inputBufferUsedLength > pExt->inputBufferCurrentLength)
+    {
+        pExt->outputFrameSize = 0;
+        errorCode = NO_ENOUGH_MAIN_DATA_ERROR;
+    }
+
+    return errorCode;
+
+}
+
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+__inline void fillDataBuf(tmp3Bits *pMainData,
+                          uint32 val)       /* val to write into the buffer */
+{
+    pMainData->pBuffer[module(pMainData->offset++, BUFSIZE)] = (uint8)val;
+}
+
+
+void fillMainDataBuf(void  *pMem, int32 temp)
+{
+    tmp3dec_file   *pVars = (tmp3dec_file *)pMem;
+
+
+    int32 offset = (pVars->inputStream.usedBits) >> INBUF_ARRAY_INDEX_SHIFT;
+
+    /*
+     *  Check if input circular buffer boundaries need to be enforced
+     */
+    if ((offset + temp) < BUFSIZE)
+    {
+        uint8 * ptr = pVars->inputStream.pBuffer + offset;
+
+        offset = pVars->mainDataStream.offset;
+
+        /*
+         *  Check if main data circular buffer boundaries need to be enforced
+         */
+        if ((offset + temp) < BUFSIZE)
+        {
+            pv_memcpy((pVars->mainDataStream.pBuffer + offset), ptr, temp*sizeof(uint8));
+            pVars->mainDataStream.offset += temp;
+        }
+        else
+        {
+            for (int32 nBytes = temp; nBytes != 0; nBytes--)  /* read main data. */
+            {
+                int32 tmp = *(ptr++);
+                fillDataBuf(&pVars->mainDataStream, tmp);
+            }
+
+            /* adjust circular buffer counter */
+            pVars->mainDataStream.offset = module(pVars->mainDataStream.offset, BUFSIZE);
+        }
+    }
+    else
+    {
+        for (int32 nBytes = temp; nBytes != 0; nBytes--)  /* read main data. */
+        {
+            fillDataBuf(&pVars->mainDataStream, *(pVars->inputStream.pBuffer + module(offset++  , BUFSIZE)));
+        }
+    }
+
+
+    pVars->inputStream.usedBits += (temp) << INBUF_ARRAY_INDEX_SHIFT;
+}
+
+
+
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+uint32 pvmp3_decoderMemRequirements(void)
+{
+    uint32 size;
+
+    size = (uint32) sizeof(tmp3dec_file);
+    return (size);
+}
+
+
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+#include "pvmp3_decode_huff_cw.h"
+
+void pvmp3_InitDecoder(tPVMP3DecoderExternal *pExt,
+                       void  *pMem)
+{
+
+    tmp3dec_file      *pVars;
+    huffcodetab       *pHuff;
+
+    pVars = (tmp3dec_file *)pMem;
+    memset(pVars, 0, sizeof(*pVars));
+
+    pExt->totalNumberOfBitsUsed = 0;
+    pExt->inputBufferCurrentLength = 0;
+    pExt->inputBufferUsedLength    = 0;
+
+    pVars->inputStream.pBuffer = pExt->pInputBuffer;
+
+    /*
+     *  Initialize huffman decoding table
+     */
+
+    pHuff = pVars->ht;
+    pHuff[0].linbits = 0;
+    pHuff[0].pdec_huff_tab = pvmp3_decode_huff_cw_tab0;
+    pHuff[1].linbits = 0;
+    pHuff[1].pdec_huff_tab = pvmp3_decode_huff_cw_tab1;
+    pHuff[2].linbits = 0;
+    pHuff[2].pdec_huff_tab = pvmp3_decode_huff_cw_tab2;
+    pHuff[3].linbits = 0;
+    pHuff[3].pdec_huff_tab = pvmp3_decode_huff_cw_tab3;
+    pHuff[4].linbits = 0;
+    pHuff[4].pdec_huff_tab = pvmp3_decode_huff_cw_tab0; /* tbl 4 is not used */
+    pHuff[5].linbits = 4;
+    pHuff[5].pdec_huff_tab = pvmp3_decode_huff_cw_tab5;
+    pHuff[6].linbits = 0;
+    pHuff[6].pdec_huff_tab = pvmp3_decode_huff_cw_tab6;
+    pHuff[7].linbits = 0;
+    pHuff[7].pdec_huff_tab = pvmp3_decode_huff_cw_tab7;
+    pHuff[8].linbits = 0;
+    pHuff[8].pdec_huff_tab = pvmp3_decode_huff_cw_tab8;
+    pHuff[9].linbits = 0;
+    pHuff[9].pdec_huff_tab = pvmp3_decode_huff_cw_tab9;
+    pHuff[10].linbits = 0;
+    pHuff[10].pdec_huff_tab = pvmp3_decode_huff_cw_tab10;
+    pHuff[11].linbits = 0;
+    pHuff[11].pdec_huff_tab = pvmp3_decode_huff_cw_tab11;
+    pHuff[12].linbits = 0;
+    pHuff[12].pdec_huff_tab = pvmp3_decode_huff_cw_tab12;
+    pHuff[13].linbits = 0;
+    pHuff[13].pdec_huff_tab = pvmp3_decode_huff_cw_tab13;
+    pHuff[14].linbits = 0;
+    pHuff[14].pdec_huff_tab = pvmp3_decode_huff_cw_tab0; /* tbl 14 is not used */
+    pHuff[15].linbits = 0;
+    pHuff[15].pdec_huff_tab = pvmp3_decode_huff_cw_tab15;
+    pHuff[16].linbits = 1;
+    pHuff[16].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
+    pHuff[17].linbits = 2;
+    pHuff[17].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
+    pHuff[18].linbits = 3;
+    pHuff[18].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
+    pHuff[19].linbits = 4;
+    pHuff[19].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
+    pHuff[20].linbits = 6;
+    pHuff[20].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
+    pHuff[21].linbits = 8;
+    pHuff[21].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
+    pHuff[22].linbits = 10;
+    pHuff[22].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
+    pHuff[23].linbits = 13;
+    pHuff[23].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
+    pHuff[24].linbits = 4;
+    pHuff[24].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
+    pHuff[25].linbits = 5;
+    pHuff[25].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
+    pHuff[26].linbits = 6;
+    pHuff[26].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
+    pHuff[27].linbits = 7;
+    pHuff[27].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
+    pHuff[28].linbits = 8;
+    pHuff[28].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
+    pHuff[29].linbits = 9;
+    pHuff[29].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
+    pHuff[30].linbits = 11;
+    pHuff[30].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
+    pHuff[31].linbits = 13;
+    pHuff[31].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
+    pHuff[32].linbits = 0;
+    pHuff[32].pdec_huff_tab = pvmp3_decode_huff_cw_tab32;
+    pHuff[33].linbits = 0;
+    pHuff[33].pdec_huff_tab = pvmp3_decode_huff_cw_tab33;
+
+    /*
+     *  Initialize polysynthesis circular buffer mechanism
+     */
+    /* clear buffers */
+
+    pvmp3_resetDecoder(pMem);
+
+}
+
+
+/*----------------------------------------------------------------------------
+; FUNCTION CODE
+----------------------------------------------------------------------------*/
+
+
+void pvmp3_resetDecoder(void  *pMem)
+{
+
+    tmp3dec_file      *pVars;
+    tmp3dec_chan      *pChVars[CHAN];
+
+    pVars = (tmp3dec_file *)pMem;
+    pChVars[ LEFT] = &pVars->perChan[ LEFT];
+    pChVars[RIGHT] = &pVars->perChan[RIGHT];
+
+    pVars->frame_start = 0;
+
+    pVars->mainDataStream.offset = 0;
+
+    pVars->mainDataStream.pBuffer =  pVars->mainDataBuffer;
+    pVars->mainDataStream.usedBits = 0;
+
+
+    pVars->inputStream.usedBits = 0; // in bits
+
+
+    pChVars[ LEFT]->used_freq_lines = 575;
+    pChVars[RIGHT]->used_freq_lines = 575;
+
+
+    /*
+     *  Initialize polysynthesis circular buffer mechanism
+     */
+
+    pv_memset((void*)&pChVars[ LEFT]->circ_buffer[576],
+              0,
+              480*sizeof(pChVars[ LEFT]->circ_buffer[0]));
+    pv_memset((void*)&pChVars[RIGHT]->circ_buffer[576],
+              0,
+              480*sizeof(pChVars[RIGHT]->circ_buffer[0]));
+
+
+    pv_memset((void*)pChVars[ LEFT]->overlap,
+              0,
+              SUBBANDS_NUMBER*FILTERBANK_BANDS*sizeof(pChVars[ LEFT]->overlap[0]));
+
+
+    pv_memset((void*)pChVars[ RIGHT]->overlap,
+              0,
+              SUBBANDS_NUMBER*FILTERBANK_BANDS*sizeof(pChVars[ RIGHT]->overlap[0]));
+
+
+
+
+
+    /*
+     *  Clear all the structures
+     */
+
+
+    pv_memset((void*)&pVars->scaleFactors[RIGHT],
+              0,
+              sizeof(mp3ScaleFactors));
+
+    pv_memset((void*)&pVars->scaleFactors[LEFT],
+              0,
+              sizeof(mp3ScaleFactors));
+
+    pv_memset((void*)&pVars->sideInfo,
+              0,
+              sizeof(mp3SideInfo));
+
+    pv_memset((void*)&pVars->sideInfo,
+              0,
+              sizeof(mp3SideInfo));
+
+}
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_framedecoder.h b/media/codecs/mp3dec/src/pvmp3_framedecoder.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_framedecoder.h
rename to media/codecs/mp3dec/src/pvmp3_framedecoder.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_get_main_data_size.cpp b/media/codecs/mp3dec/src/pvmp3_get_main_data_size.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_get_main_data_size.cpp
rename to media/codecs/mp3dec/src/pvmp3_get_main_data_size.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_get_main_data_size.h b/media/codecs/mp3dec/src/pvmp3_get_main_data_size.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_get_main_data_size.h
rename to media/codecs/mp3dec/src/pvmp3_get_main_data_size.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_get_scale_factors.cpp b/media/codecs/mp3dec/src/pvmp3_get_scale_factors.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_get_scale_factors.cpp
rename to media/codecs/mp3dec/src/pvmp3_get_scale_factors.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_get_scale_factors.h b/media/codecs/mp3dec/src/pvmp3_get_scale_factors.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_get_scale_factors.h
rename to media/codecs/mp3dec/src/pvmp3_get_scale_factors.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_get_side_info.cpp b/media/codecs/mp3dec/src/pvmp3_get_side_info.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_get_side_info.cpp
rename to media/codecs/mp3dec/src/pvmp3_get_side_info.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_get_side_info.h b/media/codecs/mp3dec/src/pvmp3_get_side_info.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_get_side_info.h
rename to media/codecs/mp3dec/src/pvmp3_get_side_info.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_getbits.cpp b/media/codecs/mp3dec/src/pvmp3_getbits.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_getbits.cpp
rename to media/codecs/mp3dec/src/pvmp3_getbits.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_getbits.h b/media/codecs/mp3dec/src/pvmp3_getbits.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_getbits.h
rename to media/codecs/mp3dec/src/pvmp3_getbits.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_huffman_decoding.cpp b/media/codecs/mp3dec/src/pvmp3_huffman_decoding.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_huffman_decoding.cpp
rename to media/codecs/mp3dec/src/pvmp3_huffman_decoding.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_huffman_parsing.cpp b/media/codecs/mp3dec/src/pvmp3_huffman_parsing.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_huffman_parsing.cpp
rename to media/codecs/mp3dec/src/pvmp3_huffman_parsing.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_imdct_synth.cpp b/media/codecs/mp3dec/src/pvmp3_imdct_synth.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_imdct_synth.cpp
rename to media/codecs/mp3dec/src/pvmp3_imdct_synth.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_imdct_synth.h b/media/codecs/mp3dec/src/pvmp3_imdct_synth.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_imdct_synth.h
rename to media/codecs/mp3dec/src/pvmp3_imdct_synth.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mdct_18.cpp b/media/codecs/mp3dec/src/pvmp3_mdct_18.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mdct_18.cpp
rename to media/codecs/mp3dec/src/pvmp3_mdct_18.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mdct_18.h b/media/codecs/mp3dec/src/pvmp3_mdct_18.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mdct_18.h
rename to media/codecs/mp3dec/src/pvmp3_mdct_18.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mdct_6.cpp b/media/codecs/mp3dec/src/pvmp3_mdct_6.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mdct_6.cpp
rename to media/codecs/mp3dec/src/pvmp3_mdct_6.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mdct_6.h b/media/codecs/mp3dec/src/pvmp3_mdct_6.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mdct_6.h
rename to media/codecs/mp3dec/src/pvmp3_mdct_6.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.cpp b/media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.cpp
rename to media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.h b/media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.h
rename to media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.cpp b/media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.cpp
rename to media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.h b/media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.h
rename to media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.cpp b/media/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.cpp
rename to media/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.h b/media/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.h
rename to media/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_normalize.cpp b/media/codecs/mp3dec/src/pvmp3_normalize.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_normalize.cpp
rename to media/codecs/mp3dec/src/pvmp3_normalize.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_normalize.h b/media/codecs/mp3dec/src/pvmp3_normalize.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_normalize.h
rename to media/codecs/mp3dec/src/pvmp3_normalize.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.cpp b/media/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.cpp
rename to media/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.h b/media/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.h
rename to media/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_polyphase_filter_window.cpp b/media/codecs/mp3dec/src/pvmp3_polyphase_filter_window.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_polyphase_filter_window.cpp
rename to media/codecs/mp3dec/src/pvmp3_polyphase_filter_window.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_polyphase_filter_window.h b/media/codecs/mp3dec/src/pvmp3_polyphase_filter_window.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_polyphase_filter_window.h
rename to media/codecs/mp3dec/src/pvmp3_polyphase_filter_window.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_reorder.cpp b/media/codecs/mp3dec/src/pvmp3_reorder.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_reorder.cpp
rename to media/codecs/mp3dec/src/pvmp3_reorder.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_reorder.h b/media/codecs/mp3dec/src/pvmp3_reorder.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_reorder.h
rename to media/codecs/mp3dec/src/pvmp3_reorder.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_seek_synch.cpp b/media/codecs/mp3dec/src/pvmp3_seek_synch.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_seek_synch.cpp
rename to media/codecs/mp3dec/src/pvmp3_seek_synch.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_seek_synch.h b/media/codecs/mp3dec/src/pvmp3_seek_synch.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_seek_synch.h
rename to media/codecs/mp3dec/src/pvmp3_seek_synch.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_stereo_proc.cpp b/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
rename to media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_stereo_proc.h b/media/codecs/mp3dec/src/pvmp3_stereo_proc.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_stereo_proc.h
rename to media/codecs/mp3dec/src/pvmp3_stereo_proc.h
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_tables.cpp b/media/codecs/mp3dec/src/pvmp3_tables.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_tables.cpp
rename to media/codecs/mp3dec/src/pvmp3_tables.cpp
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_tables.h b/media/codecs/mp3dec/src/pvmp3_tables.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/pvmp3_tables.h
rename to media/codecs/mp3dec/src/pvmp3_tables.h
diff --git a/media/libstagefright/codecs/mp3dec/src/s_huffcodetab.h b/media/codecs/mp3dec/src/s_huffcodetab.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/s_huffcodetab.h
rename to media/codecs/mp3dec/src/s_huffcodetab.h
diff --git a/media/libstagefright/codecs/mp3dec/src/s_mp3bits.h b/media/codecs/mp3dec/src/s_mp3bits.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/s_mp3bits.h
rename to media/codecs/mp3dec/src/s_mp3bits.h
diff --git a/media/libstagefright/codecs/mp3dec/src/s_tmp3dec_chan.h b/media/codecs/mp3dec/src/s_tmp3dec_chan.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/s_tmp3dec_chan.h
rename to media/codecs/mp3dec/src/s_tmp3dec_chan.h
diff --git a/media/libstagefright/codecs/mp3dec/src/s_tmp3dec_file.h b/media/codecs/mp3dec/src/s_tmp3dec_file.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/src/s_tmp3dec_file.h
rename to media/codecs/mp3dec/src/s_tmp3dec_file.h
diff --git a/media/codecs/mp3dec/test/Android.bp b/media/codecs/mp3dec/test/Android.bp
new file mode 100644
index 0000000..f10b6ae
--- /dev/null
+++ b/media/codecs/mp3dec/test/Android.bp
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_codecs_mp3dec_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    //   SPDX-license-identifier-BSD
+    default_applicable_licenses: ["frameworks_av_media_codecs_mp3dec_license"],
+}
+
+cc_test {
+    name: "Mp3DecoderTest",
+    gtest: true,
+    test_suites: ["device-tests"],
+
+    srcs: [
+        "mp3reader.cpp",
+        "Mp3DecoderTest.cpp",
+    ],
+
+    static_libs: [
+        "libstagefright_mp3dec",
+        "libsndfile",
+        "libaudioutils",
+    ],
+
+    shared_libs: [
+        "liblog",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/codecs/mp3dec/test/AndroidTest.xml b/media/codecs/mp3dec/test/AndroidTest.xml
new file mode 100644
index 0000000..29952eb
--- /dev/null
+++ b/media/codecs/mp3dec/test/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for Mp3 Decoder unit test">
+    <option name="test-suite-tag" value="Mp3DecoderTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="Mp3DecoderTest->/data/local/tmp/Mp3DecoderTest" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/mp3dec/test/Mp3DecoderTest-1.2.zip?unzip=true"
+            value="/data/local/tmp/Mp3DecoderTestRes/" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="Mp3DecoderTest" />
+        <option name="native-test-flag" value="-P /data/local/tmp/Mp3DecoderTestRes/" />
+    </test>
+</configuration>
diff --git a/media/codecs/mp3dec/test/Mp3DecoderTest.cpp b/media/codecs/mp3dec/test/Mp3DecoderTest.cpp
new file mode 100644
index 0000000..91326a8
--- /dev/null
+++ b/media/codecs/mp3dec/test/Mp3DecoderTest.cpp
@@ -0,0 +1,202 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Mp3DecoderTest"
+
+#include <utils/Log.h>
+
+#include <audio_utils/sndfile.h>
+#include <stdio.h>
+
+#include "mp3reader.h"
+#include "pvmp3decoder_api.h"
+
+#include "Mp3DecoderTestEnvironment.h"
+
+#define OUTPUT_FILE "/data/local/tmp/mp3Decode.out"
+
+constexpr int32_t kInputBufferSize = 1024 * 10;
+constexpr int32_t kOutputBufferSize = 4608 * 2;
+constexpr int32_t kMaxCount = 10;
+constexpr int32_t kNumFrameReset = 150;
+
+static Mp3DecoderTestEnvironment *gEnv = nullptr;
+
+class Mp3DecoderTest : public ::testing::TestWithParam<string> {
+  public:
+    Mp3DecoderTest() : mConfig(nullptr) {}
+
+    ~Mp3DecoderTest() {
+        if (mConfig) {
+            delete mConfig;
+            mConfig = nullptr;
+        }
+    }
+
+    virtual void SetUp() override {
+        mConfig = new tPVMP3DecoderExternal{};
+        ASSERT_NE(mConfig, nullptr) << "Failed to initialize config. No Memory available";
+        mConfig->equalizerType = flat;
+        mConfig->crcEnabled = false;
+    }
+
+    tPVMP3DecoderExternal *mConfig;
+    Mp3Reader mMp3Reader;
+
+    ERROR_CODE DecodeFrames(void *decoderbuf, SNDFILE *outFileHandle, SF_INFO sfInfo,
+                            int32_t frameCount = INT32_MAX);
+    SNDFILE *openOutputFile(SF_INFO *sfInfo);
+};
+
+ERROR_CODE Mp3DecoderTest::DecodeFrames(void *decoderBuf, SNDFILE *outFileHandle, SF_INFO sfInfo,
+                                        int32_t frameCount) {
+    uint8_t inputBuf[kInputBufferSize];
+    int16_t outputBuf[kOutputBufferSize];
+    uint32_t bytesRead;
+    ERROR_CODE decoderErr;
+    while (frameCount > 0) {
+        bool success = mMp3Reader.getFrame(inputBuf, &bytesRead);
+        if (!success) {
+            break;
+        }
+        mConfig->inputBufferCurrentLength = bytesRead;
+        mConfig->inputBufferMaxLength = 0;
+        mConfig->inputBufferUsedLength = 0;
+        mConfig->pInputBuffer = inputBuf;
+        mConfig->pOutputBuffer = outputBuf;
+        mConfig->outputFrameSize = kOutputBufferSize / sizeof(int16_t);
+        decoderErr = pvmp3_framedecoder(mConfig, decoderBuf);
+        if (decoderErr != NO_DECODING_ERROR) break;
+        sf_writef_short(outFileHandle, outputBuf, mConfig->outputFrameSize / sfInfo.channels);
+        frameCount--;
+    }
+    return decoderErr;
+}
+
+SNDFILE *Mp3DecoderTest::openOutputFile(SF_INFO *sfInfo) {
+    memset(sfInfo, 0, sizeof(SF_INFO));
+    sfInfo->channels = mMp3Reader.getNumChannels();
+    sfInfo->format = SF_FORMAT_WAV | SF_FORMAT_PCM_16;
+    sfInfo->samplerate = mMp3Reader.getSampleRate();
+    SNDFILE *outFileHandle = sf_open(OUTPUT_FILE, SFM_WRITE, sfInfo);
+    return outFileHandle;
+}
+
+TEST_F(Mp3DecoderTest, MultiCreateMp3DecoderTest) {
+    size_t memRequirements = pvmp3_decoderMemRequirements();
+    ASSERT_NE(memRequirements, 0) << "Failed to get the memory requirement size";
+    void *decoderBuf = malloc(memRequirements);
+    ASSERT_NE(decoderBuf, nullptr)
+            << "Failed to allocate decoder memory of size " << memRequirements;
+    for (int count = 0; count < kMaxCount; count++) {
+        pvmp3_InitDecoder(mConfig, decoderBuf);
+        ALOGV("Decoder created successfully");
+    }
+    if (decoderBuf) {
+        free(decoderBuf);
+        decoderBuf = nullptr;
+    }
+}
+
+TEST_P(Mp3DecoderTest, DecodeTest) {
+    size_t memRequirements = pvmp3_decoderMemRequirements();
+    ASSERT_NE(memRequirements, 0) << "Failed to get the memory requirement size";
+    void *decoderBuf = malloc(memRequirements);
+    ASSERT_NE(decoderBuf, nullptr)
+            << "Failed to allocate decoder memory of size " << memRequirements;
+
+    pvmp3_InitDecoder(mConfig, decoderBuf);
+    ALOGV("Decoder created successfully");
+    string inputFile = gEnv->getRes() + GetParam();
+    bool status = mMp3Reader.init(inputFile.c_str());
+    ASSERT_TRUE(status) << "Unable to initialize the mp3Reader";
+
+    // Open the output file.
+    SF_INFO sfInfo;
+    SNDFILE *outFileHandle = openOutputFile(&sfInfo);
+    ASSERT_NE(outFileHandle, nullptr) << "Error opening output file for writing decoded output";
+
+    ERROR_CODE decoderErr = DecodeFrames(decoderBuf, outFileHandle, sfInfo);
+    ASSERT_EQ(decoderErr, NO_DECODING_ERROR) << "Failed to decode the frames";
+    ASSERT_EQ(sfInfo.channels, mConfig->num_channels) << "Number of channels does not match";
+    ASSERT_EQ(sfInfo.samplerate, mConfig->samplingRate) << "Sample rate does not match";
+
+    mMp3Reader.close();
+    sf_close(outFileHandle);
+    if (decoderBuf) {
+        free(decoderBuf);
+        decoderBuf = nullptr;
+    }
+}
+
+TEST_P(Mp3DecoderTest, ResetDecoderTest) {
+    size_t memRequirements = pvmp3_decoderMemRequirements();
+    ASSERT_NE(memRequirements, 0) << "Failed to get the memory requirement size";
+    void *decoderBuf = malloc(memRequirements);
+    ASSERT_NE(decoderBuf, nullptr)
+            << "Failed to allocate decoder memory of size " << memRequirements;
+
+    pvmp3_InitDecoder(mConfig, decoderBuf);
+    ALOGV("Decoder created successfully.");
+    string inputFile = gEnv->getRes() + GetParam();
+    bool status = mMp3Reader.init(inputFile.c_str());
+    ASSERT_TRUE(status) << "Unable to initialize the mp3Reader";
+
+    // Open the output file.
+    SF_INFO sfInfo;
+    SNDFILE *outFileHandle = openOutputFile(&sfInfo);
+    ASSERT_NE(outFileHandle, nullptr) << "Error opening output file for writing decoded output";
+
+    ERROR_CODE decoderErr;
+    decoderErr = DecodeFrames(decoderBuf, outFileHandle, sfInfo, kNumFrameReset);
+    ASSERT_EQ(decoderErr, NO_DECODING_ERROR) << "Failed to decode the frames";
+    ASSERT_EQ(sfInfo.channels, mConfig->num_channels) << "Number of channels does not match";
+    ASSERT_EQ(sfInfo.samplerate, mConfig->samplingRate) << "Sample rate does not match";
+
+    pvmp3_resetDecoder(decoderBuf);
+    // Decode the same file.
+    decoderErr = DecodeFrames(decoderBuf, outFileHandle, sfInfo);
+    ASSERT_EQ(decoderErr, NO_DECODING_ERROR) << "Failed to decode the frames";
+    ASSERT_EQ(sfInfo.channels, mConfig->num_channels) << "Number of channels does not match";
+    ASSERT_EQ(sfInfo.samplerate, mConfig->samplingRate) << "Sample rate does not match";
+
+    mMp3Reader.close();
+    sf_close(outFileHandle);
+    if (decoderBuf) {
+        free(decoderBuf);
+        decoderBuf = nullptr;
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(Mp3DecoderTestAll, Mp3DecoderTest,
+                         ::testing::Values(("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3"),
+                                           ("bbb_44100hz_2ch_128kbps_mp3_5mins.mp3"),
+                                           ("bug_136053885.mp3"),
+                                           ("bbb_2ch_44kHz_lame_crc.mp3"),
+                                           ("bbb_mp3_stereo_192kbps_48000hz.mp3")));
+
+int main(int argc, char **argv) {
+    gEnv = new Mp3DecoderTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGV("Test result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/codecs/mp3dec/test/Mp3DecoderTestEnvironment.h b/media/codecs/mp3dec/test/Mp3DecoderTestEnvironment.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/test/Mp3DecoderTestEnvironment.h
rename to media/codecs/mp3dec/test/Mp3DecoderTestEnvironment.h
diff --git a/media/libstagefright/codecs/mp3dec/test/README.md b/media/codecs/mp3dec/test/README.md
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/test/README.md
rename to media/codecs/mp3dec/test/README.md
diff --git a/media/libstagefright/codecs/mp3dec/test/mp3dec_test.cpp b/media/codecs/mp3dec/test/mp3dec_test.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/test/mp3dec_test.cpp
rename to media/codecs/mp3dec/test/mp3dec_test.cpp
diff --git a/media/libstagefright/codecs/mp3dec/test/mp3reader.cpp b/media/codecs/mp3dec/test/mp3reader.cpp
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/test/mp3reader.cpp
rename to media/codecs/mp3dec/test/mp3reader.cpp
diff --git a/media/libstagefright/codecs/mp3dec/test/mp3reader.h b/media/codecs/mp3dec/test/mp3reader.h
similarity index 100%
rename from media/libstagefright/codecs/mp3dec/test/mp3reader.h
rename to media/codecs/mp3dec/test/mp3reader.h
diff --git a/media/extractors/Android.bp b/media/extractors/Android.bp
index 7c4e62f..7513cb1 100644
--- a/media/extractors/Android.bp
+++ b/media/extractors/Android.bp
@@ -12,6 +12,15 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_defaults {
     name: "extractor-defaults",
 
@@ -21,7 +30,6 @@
 
     shared_libs: [
         "liblog",
-        "libmediandk#29",
     ],
 
     // extractors are supposed to work on Q(29)
@@ -39,6 +47,21 @@
 
     version_script: "exports.lds",
 
+    target: {
+        android: {
+            shared_libs: [
+                "libmediandk#29",
+            ],
+        },
+        host: {
+            static_libs: [
+                "libutils",
+                "libmediandk_format",
+                "libmedia_ndkformatpriv",
+            ],
+        },
+    },
+
     sanitize: {
         cfi: true,
         misc_undefined: [
@@ -46,4 +69,4 @@
             "signed-integer-overflow",
         ],
     },
-}
\ No newline at end of file
+}
diff --git a/media/extractors/TEST_MAPPING b/media/extractors/TEST_MAPPING
index abefb0f..4984b8f 100644
--- a/media/extractors/TEST_MAPPING
+++ b/media/extractors/TEST_MAPPING
@@ -1,5 +1,6 @@
 {
   "presubmit": [
+
     // TODO(b/153661591) enable test once the bug is fixed
     // This tests the extractor path
     // {
@@ -13,5 +14,14 @@
     //      }
     //    ]
     //  }
+  ],
+
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "ExtractorUnitTest" }
   ]
+
+
 }
diff --git a/media/extractors/aac/AACExtractor.cpp b/media/extractors/aac/AACExtractor.cpp
index 8f60f6b..2fc4584 100644
--- a/media/extractors/aac/AACExtractor.cpp
+++ b/media/extractors/aac/AACExtractor.cpp
@@ -18,6 +18,8 @@
 #define LOG_TAG "AACExtractor"
 #include <utils/Log.h>
 
+#include <inttypes.h>
+
 #include "AACExtractor.h"
 #include <media/MediaExtractorPluginApi.h>
 #include <media/stagefright/foundation/ABuffer.h>
@@ -277,7 +279,22 @@
     ReadOptions::SeekMode mode;
     if (options && options->getSeekTo(&seekTimeUs, &mode)) {
         if (mFrameDurationUs > 0) {
-            int64_t seekFrame = seekTimeUs / mFrameDurationUs;
+            int64_t seekFrame = 0;
+            switch(mode & 0x7) {
+                case ReadOptions::SEEK_NEXT_SYNC:
+                    // "at or after"
+                    seekFrame = (seekTimeUs + mFrameDurationUs - 1) / mFrameDurationUs;
+                    break;
+                case ReadOptions::SEEK_CLOSEST_SYNC:
+                case ReadOptions::SEEK_CLOSEST:
+                    seekFrame = (seekTimeUs + mFrameDurationUs/2) / mFrameDurationUs;
+                    break;
+                case ReadOptions::SEEK_PREVIOUS_SYNC:
+                default:
+                    // 'at or before'
+                    seekFrame = seekTimeUs / mFrameDurationUs;
+                    break;
+            }
             if (seekFrame < 0 || seekFrame >= (int64_t)mOffsetVector.size()) {
                 android_errorWriteLog(0x534e4554, "70239507");
                 return AMEDIA_ERROR_MALFORMED;
diff --git a/media/extractors/aac/Android.bp b/media/extractors/aac/Android.bp
index 60d3ae1..7bf3a13 100644
--- a/media/extractors/aac/Android.bp
+++ b/media/extractors/aac/Android.bp
@@ -1,3 +1,20 @@
+package {
+    default_applicable_licenses: ["frameworks_av_media_extractors_aac_license"],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_extractors_aac_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library {
     name: "libaacextractor",
     defaults: ["extractor-defaults"],
@@ -10,4 +27,11 @@
         "libutils",
     ],
 
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/extractors/amr/AMRExtractor.cpp b/media/extractors/amr/AMRExtractor.cpp
index 26431a4..e26ff0a 100644
--- a/media/extractors/amr/AMRExtractor.cpp
+++ b/media/extractors/amr/AMRExtractor.cpp
@@ -18,6 +18,8 @@
 #define LOG_TAG "AMRExtractor"
 #include <utils/Log.h>
 
+#include <inttypes.h>
+
 #include "AMRExtractor.h"
 
 #include <media/stagefright/foundation/ADebug.h>
@@ -283,8 +285,22 @@
     ReadOptions::SeekMode mode;
     if (mOffsetTableLength > 0 && options && options->getSeekTo(&seekTimeUs, &mode)) {
         size_t size;
-        int64_t seekFrame = seekTimeUs / 20000LL;  // 20ms per frame.
-        mCurrentTimeUs = seekFrame * 20000LL;
+        const int64_t frameDurationUs = 20000LL;  // 20ms per frame.
+        int64_t seekFrame = 0;
+        switch(mode & 0x7) {
+            case ReadOptions::SEEK_NEXT_SYNC:
+                seekFrame = (seekTimeUs + frameDurationUs - 1) / frameDurationUs;
+                break;
+            case ReadOptions::SEEK_CLOSEST_SYNC:
+            case ReadOptions::SEEK_CLOSEST:
+                seekFrame = (seekTimeUs + frameDurationUs/2) / frameDurationUs;
+                break;
+            case ReadOptions::SEEK_PREVIOUS_SYNC:
+            default:
+                seekFrame = seekTimeUs / frameDurationUs;
+                break;
+        }
+        mCurrentTimeUs = seekFrame * frameDurationUs;
 
         size_t index = seekFrame < 0 ? 0 : seekFrame / 50;
         if (index >= mOffsetTableLength) {
diff --git a/media/extractors/amr/Android.bp b/media/extractors/amr/Android.bp
index 49c9567..712360d 100644
--- a/media/extractors/amr/Android.bp
+++ b/media/extractors/amr/Android.bp
@@ -1,3 +1,20 @@
+package {
+    default_applicable_licenses: ["frameworks_av_media_extractors_amr_license"],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_extractors_amr_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library {
     name: "libamrextractor",
     defaults: ["extractor-defaults"],
@@ -8,4 +25,10 @@
         "libstagefright_foundation",
     ],
 
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    }
 }
diff --git a/media/extractors/flac/Android.bp b/media/extractors/flac/Android.bp
index 826c1a0..9a2a76b 100644
--- a/media/extractors/flac/Android.bp
+++ b/media/extractors/flac/Android.bp
@@ -1,6 +1,25 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_extractors_flac_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_extractors_flac_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library {
     name: "libflacextractor",
-    defaults: ["extractor-defaults"],
+    defaults: ["extractor-defaults", "libbinder_ndk_host_user"],
 
     srcs: ["FLACExtractor.cpp"],
 
@@ -21,4 +40,12 @@
         "libutils",
     ],
 
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
 }
diff --git a/media/extractors/fuzzers/Android.bp b/media/extractors/fuzzers/Android.bp
new file mode 100644
index 0000000..0e54b58
--- /dev/null
+++ b/media/extractors/fuzzers/Android.bp
@@ -0,0 +1,346 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+    name: "extractor-fuzzerbase-defaults",
+
+    local_include_dirs: [
+        "include",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    static_libs: [
+        "liblog",
+        "libstagefright_foundation_colorutils_ndk",
+        "libstagefright_foundation",
+        "libmediandk_format",
+        "libmedia_ndkformatpriv",
+    ],
+
+    shared_libs: [
+        "libutils",
+        "libbinder",
+        "libbase",
+        "libcutils",
+    ],
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+cc_defaults {
+    name: "extractor-fuzzer-defaults",
+    defaults: ["extractor-fuzzerbase-defaults"],
+
+    static_libs: [
+        "libextractorfuzzerbase",
+    ],
+
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_defaults {
+    name: "mpeg2-extractor-fuzzer-defaults",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
+
+    include_dirs: [
+        "frameworks/av/media/extractors/mpeg2",
+        "frameworks/av/media/libstagefright",
+    ],
+
+    static_libs: [
+        "libstagefright_foundation_without_imemory",
+        "libstagefright_mpeg2support",
+        "libstagefright_mpeg2extractor",
+        "libstagefright_esds",
+        "libmpeg2extractor",
+        "libmedia_helper",
+    ],
+
+    shared_libs: [
+        "android.hardware.cas@1.0",
+        "android.hardware.cas.native@1.0",
+        "android.hidl.token@1.0-utils",
+        "android.hidl.allocator@1.0",
+        "libcrypto",
+        "libhidlmemory",
+        "libhidlbase",
+    ],
+}
+
+cc_library_static {
+    name: "libextractorfuzzerbase",
+    defaults: ["extractor-fuzzerbase-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "ExtractorFuzzerBase.cpp",
+    ],
+}
+
+cc_fuzz {
+    name: "mp4_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "mp4_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/mp4",
+    ],
+
+    header_libs: [
+        "libaudioclient_headers",
+    ],
+
+    static_libs: [
+        "libstagefright_id3",
+        "libstagefright_esds",
+        "libmp4extractor",
+    ],
+
+    dictionary: "mp4_extractor_fuzzer.dict",
+}
+
+cc_fuzz {
+    name: "wav_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "wav_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/wav",
+    ],
+
+    static_libs: [
+        "libfifo",
+        "libwavextractor",
+    ],
+
+    shared_libs: [
+        "libbinder_ndk",
+    ],
+}
+
+cc_fuzz {
+    name: "amr_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "amr_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/amr",
+    ],
+
+    static_libs: [
+        "libamrextractor",
+    ],
+
+    dictionary: "amr_extractor_fuzzer.dict",
+}
+
+cc_fuzz {
+    name: "mkv_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "mkv_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/mkv",
+    ],
+
+    static_libs: [
+        "libwebm",
+        "libstagefright_flacdec",
+        "libstagefright_metadatautils",
+        "libmkvextractor",
+        "libFLAC",
+    ],
+
+    dictionary: "mkv_extractor_fuzzer.dict",
+}
+
+cc_fuzz {
+    name: "ogg_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "ogg_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/ogg",
+    ],
+
+    static_libs: [
+        "libstagefright_metadatautils",
+        "libvorbisidec",
+        "liboggextractor",
+    ],
+
+    dictionary: "ogg_extractor_fuzzer.dict",
+}
+
+cc_fuzz {
+    name: "mpeg2ps_extractor_fuzzer",
+    defaults: ["mpeg2-extractor-fuzzer-defaults"],
+
+    srcs: [
+        "mpeg2_extractor_fuzzer.cpp",
+    ],
+
+    cflags: [
+        "-DMPEG2PS",
+    ],
+
+    dictionary: "mpeg2ps_extractor_fuzzer.dict",
+}
+
+cc_fuzz {
+    name: "mpeg2ts_extractor_fuzzer",
+    defaults: ["mpeg2-extractor-fuzzer-defaults"],
+
+    srcs: [
+        "mpeg2_extractor_fuzzer.cpp",
+    ],
+
+    dictionary: "mpeg2ts_extractor_fuzzer.dict",
+}
+
+cc_fuzz {
+    name: "mp3_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "mp3_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/mp3",
+    ],
+
+    static_libs: [
+        "libfifo",
+        "libmp3extractor",
+        "libstagefright_id3",
+    ],
+}
+
+cc_fuzz {
+    name: "aac_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "aac_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/aac",
+    ],
+
+    static_libs: [
+        "libaacextractor",
+        "libstagefright_metadatautils",
+    ],
+}
+
+cc_fuzz {
+    name: "flac_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+    host_supported: true,
+
+    srcs: [
+        "flac_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/flac",
+    ],
+
+    static_libs: [
+        "libstagefright_metadatautils",
+        "libFLAC",
+        "libflacextractor",
+    ],
+
+    shared_libs: [
+        "libbinder_ndk",
+    ],
+
+    dictionary: "flac_extractor_fuzzer.dict",
+}
+
+cc_fuzz {
+    name: "midi_extractor_fuzzer",
+    defaults: ["extractor-fuzzer-defaults"],
+
+    srcs: [
+        "midi_extractor_fuzzer.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/midi",
+    ],
+
+    static_libs: [
+        "libsonivox",
+        "libmedia_midiiowrapper",
+        "libmidiextractor",
+        "libwatchdog",
+    ],
+
+    dictionary: "midi_extractor_fuzzer.dict",
+
+    host_supported: true,
+}
diff --git a/media/extractors/fuzzers/ExtractorFuzzerBase.cpp b/media/extractors/fuzzers/ExtractorFuzzerBase.cpp
new file mode 100644
index 0000000..1be8466
--- /dev/null
+++ b/media/extractors/fuzzers/ExtractorFuzzerBase.cpp
@@ -0,0 +1,201 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ExtractorFuzzerBase"
+#include <utils/Log.h>
+
+#include "ExtractorFuzzerBase.h"
+
+using namespace android;
+
+bool ExtractorFuzzerBase::setDataSource(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return false;
+  }
+  mBufferSource = new BufferSource(data, size);
+  mDataSource = reinterpret_cast<DataSource*>(mBufferSource.get());
+  if (!mDataSource) {
+    return false;
+  }
+  return true;
+}
+
+void ExtractorFuzzerBase::getExtractorDef() {
+  float confidence;
+  void* meta = nullptr;
+  FreeMetaFunc freeMeta = nullptr;
+
+  ExtractorDef extractorDef = GETEXTRACTORDEF();
+  if (extractorDef.def_version == EXTRACTORDEF_VERSION_NDK_V1) {
+    extractorDef.u.v2.sniff(mDataSource->wrap(), &confidence, &meta, &freeMeta);
+  } else if (extractorDef.def_version == EXTRACTORDEF_VERSION_NDK_V2) {
+    extractorDef.u.v3.sniff(mDataSource->wrap(), &confidence, &meta, &freeMeta);
+  }
+
+  if (meta != nullptr && freeMeta != nullptr) {
+    freeMeta(meta);
+  }
+}
+
+void ExtractorFuzzerBase::extractTracks() {
+  MediaBufferGroup* bufferGroup = new MediaBufferGroup();
+  if (!bufferGroup) {
+    return;
+  }
+  size_t trackCount = mExtractor->countTracks();
+  for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
+    MediaTrackHelper* track = mExtractor->getTrack(trackIndex);
+    if (!track) {
+      continue;
+    }
+    extractTrack(track, bufferGroup);
+    delete track;
+  }
+  delete bufferGroup;
+}
+
+void ExtractorFuzzerBase::extractTrack(MediaTrackHelper* track, MediaBufferGroup* bufferGroup) {
+  CMediaTrack* cTrack = wrap(track);
+  if (!cTrack) {
+    return;
+  }
+
+  media_status_t status = cTrack->start(track, bufferGroup->wrap());
+  if (status != AMEDIA_OK) {
+    free(cTrack);
+    return;
+  }
+
+  do {
+    MediaBufferHelper* buffer = nullptr;
+    status = track->read(&buffer);
+    if (buffer) {
+      buffer->release();
+    }
+  } while (status == AMEDIA_OK);
+
+  cTrack->stop(track);
+  free(cTrack);
+}
+
+void ExtractorFuzzerBase::getTracksMetadata() {
+  AMediaFormat* format = AMediaFormat_new();
+  uint32_t flags = MediaExtractorPluginHelper::kIncludeExtensiveMetaData;
+
+  size_t trackCount = mExtractor->countTracks();
+  for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
+    mExtractor->getTrackMetaData(format, trackIndex, flags);
+  }
+
+  AMediaFormat_delete(format);
+}
+
+void ExtractorFuzzerBase::getMetadata() {
+  AMediaFormat* format = AMediaFormat_new();
+  mExtractor->getMetaData(format);
+  AMediaFormat_delete(format);
+}
+
+void ExtractorFuzzerBase::setDataSourceFlags(uint32_t flags) {
+  mBufferSource->setFlags(flags);
+}
+
+void ExtractorFuzzerBase::seekAndExtractTracks() {
+  MediaBufferGroup* bufferGroup = new MediaBufferGroup();
+  if (!bufferGroup) {
+    return;
+  }
+  size_t trackCount = mExtractor->countTracks();
+  for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
+    MediaTrackHelper* track = mExtractor->getTrack(trackIndex);
+    if (!track) {
+      continue;
+    }
+
+    AMediaFormat* trackMetaData = AMediaFormat_new();
+    int64_t trackDuration = 0;
+    uint32_t flags = MediaExtractorPluginHelper::kIncludeExtensiveMetaData;
+    mExtractor->getTrackMetaData(trackMetaData, trackIndex, flags);
+    AMediaFormat_getInt64(trackMetaData, AMEDIAFORMAT_KEY_DURATION, &trackDuration);
+
+    seekAndExtractTrack(track, bufferGroup, trackDuration);
+    AMediaFormat_delete(trackMetaData);
+    delete track;
+  }
+  delete bufferGroup;
+}
+
+void ExtractorFuzzerBase::seekAndExtractTrack(MediaTrackHelper* track,
+                                              MediaBufferGroup* bufferGroup,
+                                              int64_t trackDuration) {
+  CMediaTrack* cTrack = wrap(track);
+  if (!cTrack) {
+    return;
+  }
+
+  media_status_t status = cTrack->start(track, bufferGroup->wrap());
+  if (status != AMEDIA_OK) {
+    free(cTrack);
+    return;
+  }
+
+  int32_t seekCount = 0;
+  std::vector<int64_t> seekToTimeStamp;
+  while (seekCount <= kFuzzerMaxSeekPointsCount) {
+    /* This ensures kFuzzerMaxSeekPointsCount seek points are within the clipDuration and 1 seek
+     * point is outside of the clipDuration.
+     */
+    int64_t timeStamp = (seekCount * trackDuration) / (kFuzzerMaxSeekPointsCount - 1);
+    seekToTimeStamp.push_back(timeStamp);
+    seekCount++;
+  }
+
+  std::vector<uint32_t> seekOptions;
+  seekOptions.push_back(CMediaTrackReadOptions::SEEK | CMediaTrackReadOptions::SEEK_CLOSEST);
+  seekOptions.push_back(CMediaTrackReadOptions::SEEK | CMediaTrackReadOptions::SEEK_CLOSEST_SYNC);
+  seekOptions.push_back(CMediaTrackReadOptions::SEEK | CMediaTrackReadOptions::SEEK_PREVIOUS_SYNC);
+  seekOptions.push_back(CMediaTrackReadOptions::SEEK | CMediaTrackReadOptions::SEEK_NEXT_SYNC);
+  seekOptions.push_back(CMediaTrackReadOptions::SEEK | CMediaTrackReadOptions::SEEK_FRAME_INDEX);
+
+  for (uint32_t seekOption : seekOptions) {
+    for (int64_t seekPts : seekToTimeStamp) {
+      MediaTrackHelper::ReadOptions* options =
+          new MediaTrackHelper::ReadOptions(seekOption, seekPts);
+      MediaBufferHelper* buffer = nullptr;
+      track->read(&buffer, options);
+      if (buffer) {
+        buffer->release();
+      }
+      delete options;
+    }
+  }
+
+  cTrack->stop(track);
+  free(cTrack);
+}
+
+void ExtractorFuzzerBase::processData(const uint8_t* data, size_t size) {
+  if (setDataSource(data, size)) {
+    if (createExtractor()) {
+      getExtractorDef();
+      getMetadata();
+      extractTracks();
+      getTracksMetadata();
+      seekAndExtractTracks();
+    }
+  }
+}
diff --git a/media/extractors/fuzzers/README.md b/media/extractors/fuzzers/README.md
new file mode 100644
index 0000000..fb1d52f
--- /dev/null
+++ b/media/extractors/fuzzers/README.md
@@ -0,0 +1,362 @@
+# Fuzzer for extractors
+
+## Table of contents
++ [libextractorfuzzerbase](#ExtractorFuzzerBase)
++ [libmp4extractor](#mp4ExtractorFuzzer)
++ [libwavextractor](#wavExtractorFuzzer)
++ [libamrextractor](#amrExtractorFuzzer)
++ [libmkvextractor](#mkvExtractorFuzzer)
++ [liboggextractor](#oggExtractorFuzzer)
++ [libmpeg2extractor](#mpeg2ExtractorFuzzer)
++ [libmp3extractor](#mp3ExtractorFuzzer)
++ [libaacextractor](#aacExtractorFuzzer)
++ [libflacextractor](#flacExtractor)
++ [libmidiextractor](#midiExtractorFuzzer)
+
+# <a name="ExtractorFuzzerBase"></a> Fuzzer for libextractorfuzzerbase
+All the extractors have a common API - creating a data source, extraction
+of all the tracks, etc. These common APIs have been abstracted in a base class
+called `ExtractorFuzzerBase` to ensure code is reused between fuzzer plugins.
+
+Additionally, `ExtractorFuzzerBase` also has support for memory based buffer
+`BufferSource` since the fuzzing engine feeds data using memory buffers and
+usage of standard data source objects like FileSource, HTTPSource, etc. is
+not feasible.
+
+# <a name="mp4ExtractorFuzzer"></a> Fuzzer for libmp4extractor
+
+## Plugin Design Considerations
+The fuzzer plugin for MP4 extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the MP4 extractor class.
+
+##### Maximize code coverage
+Dict file (dictionary file) is created for MP4 to ensure that the required MP4
+atoms are present in every input file that goes to the fuzzer.
+This ensures that larger code gets covered as a range of MP4 atoms will be
+present in the input data.
+
+
+## Build
+
+This describes steps to build mp4_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) mp4_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some MP4 files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mp4_extractor_fuzzer/mp4_extractor_fuzzer CORPUS_DIR
+```
+
+# <a name="wavExtractorFuzzer"></a> Fuzzer for libwavextractor
+
+## Plugin Design Considerations
+The fuzzer plugin for WAV extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the WAV extractor class.
+
+
+## Build
+
+This describes steps to build wav_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) wav_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some wav files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/wav_extractor_fuzzer/wav_extractor_fuzzer CORPUS_DIR
+```
+
+# <a name="amrExtractorFuzzer"></a> Fuzzer for libamrextractor
+
+## Plugin Design Considerations
+The fuzzer plugin for AMR extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the AMR extractor class.
+
+##### Maximize code coverage
+Dict file (dictionary file) is created for AMR to ensure that the required start
+bytes are present in every input file that goes to the fuzzer.
+This ensures that larger code gets covered.
+
+
+## Build
+
+This describes steps to build amr_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) amr_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some AMR files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/amr_extractor_fuzzer/amr_extractor_fuzzer CORPUS_DIR
+```
+
+# <a name="mkvExtractorFuzzer"></a> Fuzzer for libmkvextractor
+
+## Plugin Design Considerations
+The fuzzer plugin for MKV extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the MKV extractor class.
+
+##### Maximize code coverage
+Dict file (dictionary file) is created for MKV to ensure that the required element
+ID's are present in every input file that goes to the fuzzer.
+This ensures that larger code gets covered.
+
+
+## Build
+
+This describes steps to build mkv_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) mkv_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some mkv files to that folder.
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mkv_extractor_fuzzer/mkv_extractor_fuzzer CORPUS_DIR
+```
+
+# <a name="oggExtractorFuzzer"></a> Fuzzer for liboggextractor
+
+## Plugin Design Considerations
+The fuzzer plugin for OGG extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the OGG extractor object.
+
+##### Maximize code coverage
+Dict file (dictionary file) is created for OGG to ensure that the required start
+bytes are present in every input file that goes to the fuzzer.
+This ensures that larger code gets covered.
+
+
+## Build
+
+This describes steps to build ogg_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) ogg_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some ogg files to that folder.
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/ogg_extractor_fuzzer/ogg_extractor_fuzzer CORPUS_DIR
+```
+
+# <a name="mpeg2ExtractorFuzzer"></a> Fuzzer for libmpeg2extractor
+
+## Plugin Design Considerations
+The fuzzer plugins for MPEG2-PS and MPEG2-TS extractor use the `ExtractorFuzzerBase` class and
+implement only the `createExtractor` to create the MPEG2-PS or MPEG2-TS extractor
+object respectively.
+
+##### Maximize code coverage
+Dict files (dictionary files) are created for MPEG2-PS and MPEG2-TS to ensure that the
+required start bytes are present in every input file that goes to the fuzzer.
+This ensures that larger code gets covered.
+
+##### Other considerations
+Two fuzzer binaries - mpeg2ps_extractor_fuzzer and mpeg2ts_extractor_fuzzer are
+generated based on the presence of a flag - `MPEG2PS`
+
+
+## Build
+
+This describes steps to build mpeg2ps_extractor_fuzzer and mpeg2ts_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) mpeg2ps_extractor_fuzzer
+  $ mm -j$(nproc) mpeg2ts_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some mpeg2 files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mpeg2ps_extractor_fuzzer/mpeg2ps_extractor_fuzzer CORPUS_DIR
+  $ adb shell /data/fuzz/arm64/mpeg2ts_extractor_fuzzer/mpeg2ts_extractor_fuzzer CORPUS_DIR
+```
+
+# <a name="mp3ExtractorFuzzer"></a> Fuzzer for libmp3extractor
+
+## Plugin Design Considerations
+The fuzzer plugin for MP3 extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the MP3 extractor class.
+
+
+## Build
+
+This describes steps to build mp3_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) mp3_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some mp3 files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mp3_extractor_fuzzer/mp3_extractor_fuzzer CORPUS_DIR
+```
+
+# <a name="aacExtractorFuzzer"></a> Fuzzer for libaacextractor
+
+## Plugin Design Considerations
+The fuzzer plugin for AAC extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the AAC extractor class.
+
+
+## Build
+
+This describes steps to build aac_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) aac_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some aac files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/aac_extractor_fuzzer/aac_extractor_fuzzer CORPUS_DIR
+```
+
+# <a name="flacExtractor"></a> Fuzzer for libflacextractor
+
+## Plugin Design Considerations
+The fuzzer plugin for FLAC extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the FLAC extractor object.
+
+##### Maximize code coverage
+Dict file (dictionary file) is created for FLAC to ensure that the required start
+bytes are present in every input file that goes to the fuzzer.
+This ensures that larger code gets covered.
+
+
+## Build
+
+This describes steps to build flac_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) flac_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some flac files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/flac_extractor_fuzzer/flac_extractor_fuzzer CORPUS_DIR
+```
+
+# <a name="midiExtractorFuzzer"></a> Fuzzer for libmidiextractor
+
+## Plugin Design Considerations
+The fuzzer plugin for MIDI extractor uses the `ExtractorFuzzerBase` class and
+implements only the `createExtractor` to create the MIDI extractor class.
+
+##### Maximize code coverage
+Dict file (dictionary file) is created for MIDI to ensure that the required MIDI
+headers are present in every input file that goes to the fuzzer.
+This ensures that larger code gets covered as a range of MIDI headers will be
+present in the input data.
+
+
+## Build
+
+This describes steps to build midi_extractor_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) midi_extractor_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some MIDI files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/midi_extractor_fuzzer/midi_extractor_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/extractors/fuzzers/aac_extractor_fuzzer.cpp b/media/extractors/fuzzers/aac_extractor_fuzzer.cpp
new file mode 100644
index 0000000..98a6cc9
--- /dev/null
+++ b/media/extractors/fuzzers/aac_extractor_fuzzer.cpp
@@ -0,0 +1,54 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "AACExtractor.h"
+
+#include "ExtractorFuzzerBase.h"
+
+using namespace android;
+
+class AacExtractor : public ExtractorFuzzerBase {
+ public:
+  AacExtractor() = default;
+  ~AacExtractor() = default;
+
+  bool createExtractor();
+};
+
+bool AacExtractor::createExtractor() {
+  mExtractor = new AACExtractor(new DataSourceHelper(mDataSource->wrap()), 0);
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  AacExtractor* extractor = new AacExtractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/amr_extractor_fuzzer.cpp b/media/extractors/fuzzers/amr_extractor_fuzzer.cpp
new file mode 100644
index 0000000..6c9e1a5
--- /dev/null
+++ b/media/extractors/fuzzers/amr_extractor_fuzzer.cpp
@@ -0,0 +1,54 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#include "AMRExtractor.h"
+
+using namespace android;
+
+class AmrExtractor : public ExtractorFuzzerBase {
+ public:
+  AmrExtractor() = default;
+  ~AmrExtractor() = default;
+
+  bool createExtractor();
+};
+
+bool AmrExtractor::createExtractor() {
+  mExtractor = new AMRExtractor(new DataSourceHelper(mDataSource->wrap()));
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  AmrExtractor* extractor = new AmrExtractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/amr_extractor_fuzzer.dict b/media/extractors/fuzzers/amr_extractor_fuzzer.dict
new file mode 100644
index 0000000..bc5726c
--- /dev/null
+++ b/media/extractors/fuzzers/amr_extractor_fuzzer.dict
@@ -0,0 +1,2 @@
+# Start code
+kw1="#!AMR"
diff --git a/media/extractors/fuzzers/flac_extractor_fuzzer.cpp b/media/extractors/fuzzers/flac_extractor_fuzzer.cpp
new file mode 100644
index 0000000..8734d45
--- /dev/null
+++ b/media/extractors/fuzzers/flac_extractor_fuzzer.cpp
@@ -0,0 +1,54 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#include "FLACExtractor.h"
+
+using namespace android;
+
+class FlacExtractor : public ExtractorFuzzerBase {
+ public:
+  FlacExtractor() = default;
+  ~FlacExtractor() = default;
+
+  bool createExtractor();
+};
+
+bool FlacExtractor::createExtractor() {
+  mExtractor = new FLACExtractor(new DataSourceHelper(mDataSource->wrap()));
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  FlacExtractor* extractor = new FlacExtractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/flac_extractor_fuzzer.dict b/media/extractors/fuzzers/flac_extractor_fuzzer.dict
new file mode 100644
index 0000000..53ad44f
--- /dev/null
+++ b/media/extractors/fuzzers/flac_extractor_fuzzer.dict
@@ -0,0 +1,3 @@
+# Start code (bytes 0-3)
+# The below 4 bytes correspond to "fLaC" in ASCII
+kw1="\x66\x4C\x61\x43"
diff --git a/media/extractors/fuzzers/include/ExtractorFuzzerBase.h b/media/extractors/fuzzers/include/ExtractorFuzzerBase.h
new file mode 100644
index 0000000..6a2a1c1
--- /dev/null
+++ b/media/extractors/fuzzers/include/ExtractorFuzzerBase.h
@@ -0,0 +1,139 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#ifndef __EXTRACTOR_FUZZER_BASE_H__
+#define __EXTRACTOR_FUZZER_BASE_H__
+
+#include <media/DataSource.h>
+#include <media/MediaExtractorPluginHelper.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <vector>
+
+extern "C" {
+android::ExtractorDef GETEXTRACTORDEF();
+}
+
+constexpr int32_t kFuzzerMaxSeekPointsCount = 5;
+
+namespace android {
+
+class ExtractorFuzzerBase {
+ public:
+  ExtractorFuzzerBase() = default;
+  virtual ~ExtractorFuzzerBase() {
+    if (mExtractor) {
+      delete mExtractor;
+      mExtractor = nullptr;
+    }
+    if (mBufferSource) {
+      mBufferSource.clear();
+      mBufferSource = nullptr;
+    }
+  }
+
+  /** Function to create the media extractor component.
+    * To be implemented by the derived class.
+    */
+  virtual bool createExtractor() = 0;
+
+  /** Parent class functions to be reused by derived class.
+    * These are common for all media extractor components.
+    */
+  bool setDataSource(const uint8_t* data, size_t size);
+
+  void getExtractorDef();
+
+  void extractTracks();
+
+  void getMetadata();
+
+  void getTracksMetadata();
+
+  void setDataSourceFlags(uint32_t flags);
+
+  void seekAndExtractTracks();
+
+  void processData(const uint8_t* data, size_t size);
+
+ protected:
+  class BufferSource : public DataSource {
+   public:
+    BufferSource(const uint8_t* data, size_t length) : mData(data), mLength(length) {}
+    virtual ~BufferSource() { mData = nullptr; }
+
+    void setFlags(uint32_t flags) { mFlags = flags; }
+
+    uint32_t flags() { return mFlags; }
+
+    status_t initCheck() const { return mData != nullptr ? OK : NO_INIT; }
+
+    ssize_t readAt(off64_t offset, void* data, size_t size) {
+      if (!mData) {
+        return NO_INIT;
+      }
+
+      Mutex::Autolock autoLock(mLock);
+      if ((offset >= static_cast<off64_t>(mLength)) || (offset < 0)) {
+        return 0;  // read beyond bounds.
+      }
+      size_t numAvailable = mLength - static_cast<size_t>(offset);
+      if (size > numAvailable) {
+        size = numAvailable;
+      }
+      return readAt_l(offset, data, size);
+    }
+
+    status_t getSize(off64_t* size) {
+      if (!mData) {
+        return NO_INIT;
+      }
+
+      Mutex::Autolock autoLock(mLock);
+      *size = static_cast<off64_t>(mLength);
+      return OK;
+    }
+
+   protected:
+    ssize_t readAt_l(off64_t offset, void* data, size_t size) {
+      void* result = memcpy(data, mData + offset, size);
+      return result != nullptr ? size : 0;
+    }
+
+    const uint8_t* mData = nullptr;
+    size_t mLength = 0;
+    Mutex mLock;
+    uint32_t mFlags = 0;
+
+   private:
+    DISALLOW_EVIL_CONSTRUCTORS(BufferSource);
+  };
+
+  sp<BufferSource> mBufferSource;
+  DataSource* mDataSource = nullptr;
+  MediaExtractorPluginHelper* mExtractor = nullptr;
+
+  virtual void extractTrack(MediaTrackHelper* track, MediaBufferGroup* bufferGroup);
+  virtual void seekAndExtractTrack(MediaTrackHelper* track, MediaBufferGroup* bufferGroup,
+                                   int64_t trackDuration);
+};
+
+}  // namespace android
+
+#endif  // __EXTRACTOR_FUZZER_BASE_H__
diff --git a/media/extractors/fuzzers/midi_extractor_fuzzer.cpp b/media/extractors/fuzzers/midi_extractor_fuzzer.cpp
new file mode 100644
index 0000000..e02a12b
--- /dev/null
+++ b/media/extractors/fuzzers/midi_extractor_fuzzer.cpp
@@ -0,0 +1,54 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#include "MidiExtractor.h"
+
+using namespace android;
+
+class MIDIExtractor : public ExtractorFuzzerBase {
+ public:
+  MIDIExtractor() = default;
+  ~MIDIExtractor() = default;
+
+  bool createExtractor();
+};
+
+bool MIDIExtractor::createExtractor() {
+  mExtractor = new MidiExtractor(mDataSource->wrap());
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  MIDIExtractor* extractor = new MIDIExtractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/midi_extractor_fuzzer.dict b/media/extractors/fuzzers/midi_extractor_fuzzer.dict
new file mode 100644
index 0000000..5b6bb8b
--- /dev/null
+++ b/media/extractors/fuzzers/midi_extractor_fuzzer.dict
@@ -0,0 +1,3 @@
+# MIDI Chunks
+kw1="MThd"
+kw2="MTrk"
diff --git a/media/extractors/fuzzers/mkv_extractor_fuzzer.cpp b/media/extractors/fuzzers/mkv_extractor_fuzzer.cpp
new file mode 100644
index 0000000..eceb93f
--- /dev/null
+++ b/media/extractors/fuzzers/mkv_extractor_fuzzer.cpp
@@ -0,0 +1,54 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#include "MatroskaExtractor.h"
+
+using namespace android;
+
+class MKVExtractor : public ExtractorFuzzerBase {
+ public:
+  MKVExtractor() = default;
+  ~MKVExtractor() = default;
+
+  bool createExtractor();
+};
+
+bool MKVExtractor::createExtractor() {
+  mExtractor = new MatroskaExtractor(new DataSourceHelper(mDataSource->wrap()));
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  MKVExtractor* extractor = new MKVExtractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/mkv_extractor_fuzzer.dict b/media/extractors/fuzzers/mkv_extractor_fuzzer.dict
new file mode 100644
index 0000000..b3815dc
--- /dev/null
+++ b/media/extractors/fuzzers/mkv_extractor_fuzzer.dict
@@ -0,0 +1,244 @@
+# Elements ID's
+kw1="\x42\x86"
+kw2="\x42\xF7"
+kw3="\x42\xF2"
+kw4="\x42\xF3"
+kw5="\x42\x87"
+kw6="\x42\x85"
+kw7="\x18\x53\x80\x67"
+kw8="\x11\x4D\x9B\x74"
+kw9="\x4D\xBB"
+kw10="\x53\xAB"
+kw11="\x53\xAC"
+kw12="\x15\x49\xA9\x66"
+kw13="\x73\xA4"
+kw14="\x73\x84"
+kw15="\x3C\xB9\x23"
+kw16="\x3C\x83\xAB"
+kw17="\x3C\xB9\x23"
+kw18="\x3E\x83\xBB"
+kw19="\x44\x44"
+kw20="\x69\x24"
+kw21="\x69\xFC"
+kw22="\x69\xBF"
+kw23="\x69\xA5"
+kw24="\x2A\xD7\xB1"
+kw25="\x44\x89"
+kw26="\x44\x61"
+kw27="\x7B\xA9"
+kw28="\x4D\x80"
+kw29="\x57\x41"
+kw30="\x1F\x43\xB6\x75"
+kw31="\xE7"
+kw32="\x58\x54"
+kw33="\x58\xD7"
+kw34="\xA7"
+kw35="\xAB"
+kw36="\xA3"
+kw37="\xA0"
+kw38="\xA1"
+kw39="\xA2"
+kw40="\x75\xA1"
+kw41="\x2A\xD7\xB1"
+kw42="\xA6"
+kw43="\xEE"
+kw44="\xA5"
+kw45="\x9A"
+kw46="\xFA"
+kw47="\xFB"
+kw48="\xFD"
+kw49="\xA4"
+kw50="\x75\xA2"
+kw51="\x8E"
+kw52="\xE8"
+kw53="\xCC"
+kw54="\xCD"
+kw55="\xCB"
+kw56="\xCE"
+kw57="\xCF"
+kw58="\xC8"
+kw59="\xC9"
+kw60="\xCA"
+kw61="\xAF"
+kw62="\x16\x54\xAE\x6B"
+kw63="\xAE"
+kw64="\xD7"
+kw65="\x73\xC5"
+kw66="\x83"
+kw67="\xB9"
+kw68="\x88"
+kw69="\x55\xAA"
+kw70="\x9C"
+kw71="\x6D\xE7"
+kw72="\x6D\xF8"
+kw73="\x23\xE3\x83"
+kw74="\x23\x4E\x7A"
+kw75="\x23\x31\x4F"
+kw76="\x53\x7F"
+kw77="\x55\xEE"
+kw78="\x53\x6E"
+kw79="\x22\xB5\x9C"
+kw80="\x22\xB5\x9D"
+kw81="\x86"
+kw82="\x63\xA2"
+kw83="\x25\x86\x88"
+kw84="\x26\xB2\x40"
+kw85="\xAA"
+kw86="\x6F\xAB"
+kw87="\x56\xAA"
+kw88="\x56\xBB"
+kw89="\x66\x24"
+kw90="\x66\xFC"
+kw91="\x66\xBF"
+kw92="\xE0"
+kw93="\x9A"
+kw94="\x9D"
+kw95="\x53\xB8"
+kw96="\x53\xC0"
+kw97="\x53\xB9"
+kw98="\xB0"
+kw99="\xBA"
+kw100="\x54\xAA"
+kw101="\x54\xBB"
+kw102="\x54\xCC"
+kw103="\x54\xDD"
+kw104="\x54\xB0"
+kw105="\x54\xBA"
+kw106="\x54\xB2"
+kw107="\x54\xB3"
+kw108="\x2E\xB5\x24"
+kw109="\x2F\xB5\x23"
+kw110="\x23\x83\xE3"
+kw111="\x55\xB0"
+kw112="\x55\xB1"
+kw113="\x55\xB2"
+kw114="\x55\xB3"
+kw115="\x55\xB4"
+kw116="\x55\xB5"
+kw117="\x55\xB6"
+kw118="\x55\xB7"
+kw119="\x55\xB8"
+kw120="\x55\xB9"
+kw121="\x55\xBA"
+kw122="\x55\xBB"
+kw123="\x55\xBC"
+kw124="\x55\xBD"
+kw125="\x55\xD0"
+kw126="\x55\xD1"
+kw127="\x55\xD2"
+kw128="\x55\xD3"
+kw129="\x55\xD4"
+kw130="\x55\xD5"
+kw131="\x55\xD6"
+kw132="\x55\xD7"
+kw133="\x55\xD8"
+kw134="\x55\xD9"
+kw135="\x55\xDA"
+kw136="\x76\x70"
+kw137="\x76\x71"
+kw138="\x76\x72"
+kw139="\x76\x73"
+kw140="\x76\x74"
+kw141="\x76\x75"
+kw142="\xE1"
+kw143="\xB5"
+kw144="\x78\xB5"
+kw145="\x9F"
+kw146="\x7D\x7B"
+kw147="\x62\x64"
+kw148="\xE2"
+kw149="\xE3"
+kw150="\xE4"
+kw151="\xE5"
+kw152="\xE6"
+kw153="\xE9"
+kw154="\xED"
+kw155="\xC0"
+kw156="\xC1"
+kw157="\xC6"
+kw158="\xC7"
+kw159="\xC4"
+kw160="\x6D\x80"
+kw161="\x62\x40"
+kw162="\x50\x31"
+kw163="\x50\x32"
+kw164="\x50\x33"
+kw165="\x50\x34"
+kw166="\x50\x35"
+kw167="\x42\x54"
+kw168="\x42\x55"
+kw169="\x47\xE1"
+kw170="\x47\xE2"
+kw171="\x47\xE7"
+kw172="\x47\xE8"
+kw173="\x47\xE3"
+kw174="\x47\xE4"
+kw175="\x47\xE5"
+kw176="\x47\xE6"
+kw177="\x1C\x53\xBB\x6B"
+kw178="\xBB"
+kw179="\xB3"
+kw180="\xB7"
+kw181="\xF7"
+kw182="\xF1"
+kw183="\xF0"
+kw184="\xB2"
+kw185="\x53\x78"
+kw186="\xEA"
+kw187="\xDB"
+kw188="\x96"
+kw189="\x97"
+kw190="\x53\x5F"
+kw191="\xEB"
+kw192="\x19\x41\xA4\x69"
+kw193="\x46\x7E"
+kw194="\x46\x6E"
+kw195="\x46\x60"
+kw196="\x46\x5C"
+kw197="\x46\xAE"
+kw198="\x46\x75"
+kw199="\x46\x61"
+kw200="\x46\x62"
+kw201="\x10\x43\xA7\x70"
+kw202="\x45\xB9"
+kw203="\x45\xBC"
+kw204="\x45\xBD"
+kw205="\x45\xDB"
+kw206="\x45\xDD"
+kw207="\xB6"
+kw208="\x73\xC4"
+kw209="\x56\x54"
+kw210="\x91"
+kw211="\x92"
+kw212="\x98"
+kw213="\x45\x98"
+kw214="\x6E\x67"
+kw215="\x6E\xBC"
+kw216="\x63\xC3"
+kw217="\x8F"
+kw218="\x89"
+kw219="\x80"
+kw220="\x85"
+kw221="\x43\x7C"
+kw222="\x43\x7D"
+kw223="\x43\x7E"
+kw224="\x69\x44"
+kw225="\x69\x55"
+kw226="\x45\x0D"
+kw227="\x69\x11"
+kw228="\x69\x22"
+kw229="\x69\x33"
+kw230="\x12\x54\xC3\x67"
+kw231="\x73\x73"
+kw232="\x63\xC0"
+kw233="\x68\xCA"
+kw234="\x63\xCA"
+kw235="\x63\xC5"
+kw236="\x63\xC9"
+kw237="\x67\xC8"
+kw238="\x45\xA3"
+kw239="\x44\x7A"
+kw240="\x44\x7B"
+kw241="\x44\x84"
+kw242="\x44\x87"
+kw243="\x44\x85"
diff --git a/media/extractors/fuzzers/mp3_extractor_fuzzer.cpp b/media/extractors/fuzzers/mp3_extractor_fuzzer.cpp
new file mode 100644
index 0000000..9a47c18
--- /dev/null
+++ b/media/extractors/fuzzers/mp3_extractor_fuzzer.cpp
@@ -0,0 +1,54 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#include "MP3Extractor.h"
+
+using namespace android;
+
+class Mp3Extractor : public ExtractorFuzzerBase {
+ public:
+  Mp3Extractor() = default;
+  ~Mp3Extractor() = default;
+
+  bool createExtractor();
+};
+
+bool Mp3Extractor::createExtractor() {
+  mExtractor = new MP3Extractor(new DataSourceHelper(mDataSource->wrap()), nullptr);
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  Mp3Extractor* extractor = new Mp3Extractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/mp4_extractor_fuzzer.cpp b/media/extractors/fuzzers/mp4_extractor_fuzzer.cpp
new file mode 100644
index 0000000..3903519
--- /dev/null
+++ b/media/extractors/fuzzers/mp4_extractor_fuzzer.cpp
@@ -0,0 +1,56 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#include "MPEG4Extractor.h"
+#include "SampleTable.h"
+
+using namespace android;
+
+class MP4Extractor : public ExtractorFuzzerBase {
+ public:
+  MP4Extractor() = default;
+  ~MP4Extractor() = default;
+
+  bool createExtractor();
+};
+
+bool MP4Extractor::createExtractor() {
+  mExtractor = new MPEG4Extractor(new DataSourceHelper(mDataSource->wrap()));
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  setDataSourceFlags(DataSourceBase::kWantsPrefetching | DataSourceBase::kIsCachingDataSource);
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  MP4Extractor* extractor = new MP4Extractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/mp4_extractor_fuzzer.dict b/media/extractors/fuzzers/mp4_extractor_fuzzer.dict
new file mode 100644
index 0000000..3683649
--- /dev/null
+++ b/media/extractors/fuzzers/mp4_extractor_fuzzer.dict
@@ -0,0 +1,248 @@
+# MP4 Atoms/Boxes
+kw1="ftyp"
+kw2="free"
+kw3="mdat"
+kw4="moov"
+kw5="mvhd"
+kw6="trak"
+kw7="tkhd"
+kw8="edts"
+kw9="elst"
+kw10="mdia"
+kw11="mdhd"
+kw12="hdlr"
+kw13="minf"
+kw14="vmhd"
+kw15="dinf"
+kw16="dref"
+kw17="url "
+kw18="stbl"
+kw19="stsd"
+kw20="avc1"
+kw21="avcC"
+kw22="stts"
+kw23="stss"
+kw24="ctts"
+kw25="stsc"
+kw26="stsz"
+kw27="stco"
+kw28="mp4a"
+kw29="esds"
+kw30="udta"
+kw31="meta"
+kw32="ilst"
+kw33="samr"
+kw34="sawb"
+kw35="ec-3"
+kw36="mp4v"
+kw37="s263"
+kw38="h263"
+kw39="H263"
+kw40="avc1"
+kw41="hvc1"
+kw42="hev1"
+kw43="ac-4"
+kw44="Opus"
+kw45="twos"
+kw46="sowt"
+kw47="alac"
+kw48="fLaC"
+kw49="av01"
+kw50=".mp3"
+kw51="keys"
+kw52="cprt"
+kw53="covr"
+kw54="mvex"
+kw55="moof"
+kw56="traf"
+kw57="mfra"
+kw58="sinf"
+kw59="schi"
+kw60="wave"
+kw61="schm"
+kw62="cbc1"
+kw63="cbcs"
+kw64="cenc"
+kw65="cens"
+kw66="frma"
+kw67="tenc"
+kw68="tref"
+kw69="thmb"
+kw70="pssh"
+kw71="mett"
+kw72="enca"
+kw73="encv"
+kw74="co64"
+kw75="stz2"
+kw76="\xA9xyz"
+kw77="btrt"
+kw78="hvcC"
+kw79="av1C"
+kw80="d263"
+kw81="iloc"
+kw82="iinf"
+kw83="iprp"
+kw84="pitm"
+kw85="idat"
+kw86="iref"
+kw87="ipro"
+kw88="mean"
+kw89="name"
+kw90="data"
+kw91="mehd"
+kw92="text"
+kw93="sbtl"
+kw94="trex"
+kw95="tx3g"
+kw96="colr"
+kw97="titl"
+kw98="perf"
+kw99="auth"
+kw100="gnre"
+kw101="albm"
+kw102="yrrc"
+kw103="ID32"
+kw104="----"
+kw105="sidx"
+kw106="ac-3"
+kw107="qt  "
+kw108="mif1"
+kw109="heic"
+kw110="dac4"
+kw111="dec3"
+kw112="dac3"
+kw113="\xA9alb"
+kw114="\xA9ART"
+kw115="aART"
+kw116="\xA9day"
+kw117="\xA9nam"
+kw118="\xA9wrt"
+kw119="\xA9gen"
+kw120="cpil"
+kw121="trkn"
+kw122="disk"
+kw123="nclx"
+kw124="nclc"
+kw125="tfhd"
+kw126="trun"
+kw127="saiz"
+kw128="saio"
+kw129="senc"
+kw130="isom"
+kw131="iso2"
+kw132="3gp4"
+kw133="mp41"
+kw134="mp42"
+kw135="dash"
+kw136="nvr1"
+kw137="MSNV"
+kw138="wmf "
+kw139="3g2a"
+kw140="3g2b"
+kw141="msf1"
+kw142="hevc"
+kw143="pdin"
+kw144="trgr"
+kw145="smhd"
+kw146="hmhd"
+kw147="nmhd"
+kw148="cslg"
+kw149="stsh"
+kw150="padb"
+kw151="stdp"
+kw152="sdtp"
+kw153="sbgp"
+kw154="sgpd"
+kw155="subs"
+kw156="leva"
+kw157="mfhd"
+kw158="tfdt"
+kw159="tfra"
+kw160="mfro"
+kw161="skip"
+kw162="tsel"
+kw163="strk"
+kw164="stri"
+kw165="strd"
+kw166="xml "
+kw167="bxml"
+kw168="fiin"
+kw169="paen"
+kw170="fire"
+kw171="fpar"
+kw172="fecr"
+kw173="segr"
+kw174="gitn"
+kw175="meco"
+kw176="mere"
+kw177="styp"
+kw178="ssix"
+kw179="prft"
+kw180="hint"
+kw181="cdsc"
+kw182="hind"
+kw183="vdep"
+kw184="vplx"
+kw185="msrc"
+kw186="urn "
+kw187="enct"
+kw188="encs"
+kw189="rinf"
+kw190="srpp"
+kw191="stsg"
+kw192="stvi"
+kw193="tims"
+kw194="tsro"
+kw195="snro"
+kw196="rtp "
+kw197="srtp"
+kw198="rtpo"
+kw199="hnti"
+kw200="sdp "
+kw201="trpy"
+kw202="nump"
+kw203="tpyl"
+kw204="totl"
+kw205="npck"
+kw206="tpay"
+kw207="maxr"
+kw208="dmed"
+kw209="dimm"
+kw210="drep"
+kw211="tmin"
+kw212="tmax"
+kw213="pmax"
+kw214="dmax"
+kw215="payt"
+kw216="fdp "
+kw217="fdsa"
+kw218="fdpa"
+kw219="extr"
+kw220="feci"
+kw221="rm2t"
+kw222="sm2t"
+kw223="tPAT"
+kw224="tPMT"
+kw225="tOD "
+kw226="tsti"
+kw227="istm"
+kw228="pm2t"
+kw229="rrtp"
+kw230="rssr"
+kw231="rscr"
+kw232="rsrp"
+kw233="rssr"
+kw234="ccid"
+kw235="sroc"
+kw236="prtp"
+kw237="roll"
+kw238="rash"
+kw239="alst"
+kw240="rap "
+kw241="tele"
+kw242="mp71"
+kw243="iso3"
+kw244="iso4"
+kw245="iso5"
+kw246="resv"
+kw247="iso6"
diff --git a/media/extractors/fuzzers/mpeg2_extractor_fuzzer.cpp b/media/extractors/fuzzers/mpeg2_extractor_fuzzer.cpp
new file mode 100644
index 0000000..240ef66
--- /dev/null
+++ b/media/extractors/fuzzers/mpeg2_extractor_fuzzer.cpp
@@ -0,0 +1,62 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#ifdef MPEG2PS
+#include "MPEG2PSExtractor.h"
+#else
+#include "MPEG2TSExtractor.h"
+#endif
+
+using namespace android;
+
+class MPEG2Extractor : public ExtractorFuzzerBase {
+ public:
+  MPEG2Extractor() = default;
+  ~MPEG2Extractor() = default;
+
+  bool createExtractor();
+};
+
+bool MPEG2Extractor::createExtractor() {
+#ifdef MPEG2PS
+  mExtractor = new MPEG2PSExtractor(new DataSourceHelper(mDataSource->wrap()));
+#else
+  mExtractor = new MPEG2TSExtractor(new DataSourceHelper(mDataSource->wrap()));
+#endif
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  MPEG2Extractor* extractor = new MPEG2Extractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/mpeg2ps_extractor_fuzzer.dict b/media/extractors/fuzzers/mpeg2ps_extractor_fuzzer.dict
new file mode 100644
index 0000000..69d390a
--- /dev/null
+++ b/media/extractors/fuzzers/mpeg2ps_extractor_fuzzer.dict
@@ -0,0 +1,2 @@
+# Start code (bytes 0-3)
+kw1="\x00\x00\x01\xBA"
diff --git a/media/extractors/fuzzers/mpeg2ts_extractor_fuzzer.dict b/media/extractors/fuzzers/mpeg2ts_extractor_fuzzer.dict
new file mode 100644
index 0000000..006a1eb
--- /dev/null
+++ b/media/extractors/fuzzers/mpeg2ts_extractor_fuzzer.dict
@@ -0,0 +1,2 @@
+# Start byte
+kw1="\x47"
diff --git a/media/extractors/fuzzers/ogg_extractor_fuzzer.cpp b/media/extractors/fuzzers/ogg_extractor_fuzzer.cpp
new file mode 100644
index 0000000..bd2fcc5
--- /dev/null
+++ b/media/extractors/fuzzers/ogg_extractor_fuzzer.cpp
@@ -0,0 +1,54 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#include "OggExtractor.h"
+
+using namespace android;
+
+class OGGExtractor : public ExtractorFuzzerBase {
+ public:
+  OGGExtractor() = default;
+  ~OGGExtractor() = default;
+
+  bool createExtractor();
+};
+
+bool OGGExtractor::createExtractor() {
+  mExtractor = new OggExtractor(new DataSourceHelper(mDataSource->wrap()));
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  OGGExtractor* extractor = new OGGExtractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/fuzzers/ogg_extractor_fuzzer.dict b/media/extractors/fuzzers/ogg_extractor_fuzzer.dict
new file mode 100644
index 0000000..df2fc38
--- /dev/null
+++ b/media/extractors/fuzzers/ogg_extractor_fuzzer.dict
@@ -0,0 +1,3 @@
+# Start code(bytes 0-3)
+# The below 4 bytes correspond to "OggS" in ASCII
+kw1="\x4F\x67\x67\x53"
diff --git a/media/extractors/fuzzers/wav_extractor_fuzzer.cpp b/media/extractors/fuzzers/wav_extractor_fuzzer.cpp
new file mode 100644
index 0000000..cb11ebd
--- /dev/null
+++ b/media/extractors/fuzzers/wav_extractor_fuzzer.cpp
@@ -0,0 +1,54 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "ExtractorFuzzerBase.h"
+
+#include "WAVExtractor.h"
+
+using namespace android;
+
+class wavExtractor : public ExtractorFuzzerBase {
+ public:
+  wavExtractor() = default;
+  ~wavExtractor() = default;
+
+  bool createExtractor();
+};
+
+bool wavExtractor::createExtractor() {
+  mExtractor = new WAVExtractor(new DataSourceHelper(mDataSource->wrap()));
+  if (!mExtractor) {
+    return false;
+  }
+  mExtractor->name();
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+  if ((!data) || (size == 0)) {
+    return 0;
+  }
+  wavExtractor* extractor = new wavExtractor();
+  if (extractor) {
+    extractor->processData(data, size);
+    delete extractor;
+  }
+  return 0;
+}
diff --git a/media/extractors/midi/Android.bp b/media/extractors/midi/Android.bp
index b8255fc..08a6fa0 100644
--- a/media/extractors/midi/Android.bp
+++ b/media/extractors/midi/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_extractors_midi_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_extractors_midi_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library {
     name: "libmidiextractor",
     defaults: ["extractor-defaults"],
@@ -5,7 +24,7 @@
     srcs: ["MidiExtractor.cpp"],
 
     header_libs: [
-        "libmedia_headers",
+        "libmedia_datasource_headers",
     ],
 
     static_libs: [
@@ -18,4 +37,12 @@
     shared_libs: [
         "libbase",
     ],
+
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/extractors/mkv/Android.bp b/media/extractors/mkv/Android.bp
index 7ad8cc1..54c5b27 100644
--- a/media/extractors/mkv/Android.bp
+++ b/media/extractors/mkv/Android.bp
@@ -1,3 +1,20 @@
+package {
+    default_applicable_licenses: ["frameworks_av_media_extractors_mkv_license"],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_extractors_mkv_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library {
     name: "libmkvextractor",
     defaults: ["extractor-defaults"],
@@ -15,10 +32,19 @@
     ],
 
     static_libs: [
+        "libstagefright_foundation_colorutils_ndk",   // for mainline-safe ColorUtils
         "libstagefright_foundation",
         "libstagefright_metadatautils",
         "libwebm",
         "libutils",
     ],
 
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
 }
diff --git a/media/extractors/mkv/MatroskaExtractor.cpp b/media/extractors/mkv/MatroskaExtractor.cpp
index 27bd357..443e26c 100644
--- a/media/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/extractors/mkv/MatroskaExtractor.cpp
@@ -1700,17 +1700,17 @@
         return ERROR_MALFORMED;
     }
 
-    size_t header_start = 0;
-    size_t header_lenth = 0;
+    long header_start = 0;
+    long header_length = 0;
     for (header_start = 0; header_start < frame.len - 4; header_start++) {
         if (ntohl(0x000001b3) == *(uint32_t*)((uint8_t*)tmpData.get() + header_start)) {
             break;
         }
     }
     bool isComplete_csd = false;
-    for (header_lenth = 0; header_lenth < frame.len - 4 - header_start; header_lenth++) {
+    for (header_length = 0; header_length < frame.len - 4 - header_start; header_length++) {
         if (ntohl(0x000001b8) == *(uint32_t*)((uint8_t*)tmpData.get()
-                                + header_start + header_lenth)) {
+                                + header_start + header_length)) {
             isComplete_csd = true;
             break;
         }
@@ -1720,7 +1720,7 @@
         return ERROR_MALFORMED;
     }
     addESDSFromCodecPrivate(trackInfo->mMeta, false,
-                              (uint8_t*)(tmpData.get()) + header_start, header_lenth);
+                            (uint8_t*)(tmpData.get()) + header_start, header_length);
 
     return OK;
 
@@ -1888,13 +1888,12 @@
 
     for(size_t i = 0; i < track->GetContentEncodingCount(); i++) {
         const mkvparser::ContentEncoding *encoding = track->GetContentEncodingByIndex(i);
-        for(size_t j = 0; j < encoding->GetEncryptionCount(); j++) {
+        if (encoding->GetEncryptionCount() > 0) {
             const mkvparser::ContentEncoding::ContentEncryption *encryption;
-            encryption = encoding->GetEncryptionByIndex(j);
+            encryption = encoding->GetEncryptionByIndex(0);
             AMediaFormat_setBuffer(trackInfo->mMeta,
                     AMEDIAFORMAT_KEY_CRYPTO_KEY, encryption->key_id, encryption->key_id_len);
             trackInfo->mEncrypted = true;
-            break;
         }
 
         for(size_t j = 0; j < encoding->GetCompressionCount(); j++) {
diff --git a/media/extractors/mp3/Android.bp b/media/extractors/mp3/Android.bp
index 102ac81..75b9b7b 100644
--- a/media/extractors/mp3/Android.bp
+++ b/media/extractors/mp3/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library {
     name: "libmp3extractor",
     defaults: ["extractor-defaults"],
@@ -13,4 +22,11 @@
         "libstagefright_foundation",
     ],
 
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/extractors/mp3/MP3Extractor.cpp b/media/extractors/mp3/MP3Extractor.cpp
index 5165822..248a39c 100644
--- a/media/extractors/mp3/MP3Extractor.cpp
+++ b/media/extractors/mp3/MP3Extractor.cpp
@@ -425,8 +425,7 @@
     if (mInitCheck != OK || index != 0) {
         return AMEDIA_ERROR_UNKNOWN;
     }
-    AMediaFormat_copy(meta, mMeta);
-    return AMEDIA_OK;
+    return AMediaFormat_copy(meta, mMeta);
 }
 
 ////////////////////////////////////////////////////////////////////////////////
@@ -505,7 +504,14 @@
             }
 
             mCurrentTimeUs = seekTimeUs;
-            mCurrentPos = mFirstFramePos + seekTimeUs * bitrate / 8000000;
+            int64_t seekTimeUsTimesBitrate;
+            if (__builtin_mul_overflow(seekTimeUs, bitrate, &seekTimeUsTimesBitrate)) {
+              return AMEDIA_ERROR_UNSUPPORTED;
+            }
+            if (__builtin_add_overflow(
+                    mFirstFramePos, seekTimeUsTimesBitrate / 8000000, &mCurrentPos)) {
+                return AMEDIA_ERROR_UNSUPPORTED;
+            }
             seekCBR = true;
         } else {
             mCurrentTimeUs = actualSeekTimeUs;
diff --git a/media/extractors/mp4/Android.bp b/media/extractors/mp4/Android.bp
index e48e1b7..7fa6bfd 100644
--- a/media/extractors/mp4/Android.bp
+++ b/media/extractors/mp4/Android.bp
@@ -1,3 +1,20 @@
+package {
+    default_applicable_licenses: ["frameworks_av_media_extractors_mp4_license"],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_extractors_mp4_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library {
     name: "libmp4extractor",
     defaults: ["extractor-defaults"],
@@ -16,4 +33,12 @@
         "libstagefright_id3",
         "libutils",
     ],
+
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/extractors/mp4/ItemTable.cpp b/media/extractors/mp4/ItemTable.cpp
index 0773387..444664c 100644
--- a/media/extractors/mp4/ItemTable.cpp
+++ b/media/extractors/mp4/ItemTable.cpp
@@ -76,16 +76,19 @@
     size_t size;
     sp<ABuffer> hvcc;
     sp<ABuffer> icc;
+    sp<ABuffer> av1c;
 
     Vector<uint32_t> thumbnails;
     Vector<uint32_t> dimgRefs;
-    Vector<uint32_t> cdscRefs;
+    Vector<uint32_t> exifRefs;
+    Vector<uint32_t> xmpRefs;
     size_t nextTileIndex;
 };
 
-struct ExifItem {
+struct ExternalMetaItem {
     off64_t offset;
     size_t size;
+    bool isExif;
 };
 
 /////////////////////////////////////////////////////////////////////
@@ -481,7 +484,7 @@
 
     void apply(
             KeyedVector<uint32_t, ImageItem> &itemIdToItemMap,
-            KeyedVector<uint32_t, ExifItem> &itemIdToExifMap) const;
+            KeyedVector<uint32_t, ExternalMetaItem> &itemIdToMetaMap) const;
 
 private:
     uint32_t mItemId;
@@ -493,7 +496,7 @@
 
 void ItemReference::apply(
         KeyedVector<uint32_t, ImageItem> &itemIdToItemMap,
-        KeyedVector<uint32_t, ExifItem> &itemIdToExifMap) const {
+        KeyedVector<uint32_t, ExternalMetaItem> &itemIdToMetaMap) const {
     ALOGV("attach reference type 0x%x to item id %d)", type(), mItemId);
 
     switch(type()) {
@@ -546,24 +549,24 @@
                 continue;
             }
             ALOGV("Image item id %d uses thumbnail item id %d", mRefs[i], mItemId);
-            ImageItem &masterImage = itemIdToItemMap.editValueAt(itemIndex);
-            if (!masterImage.thumbnails.empty()) {
+            ImageItem &imageItem = itemIdToItemMap.editValueAt(itemIndex);
+            if (!imageItem.thumbnails.empty()) {
                 ALOGW("already has thumbnails!");
             }
-            masterImage.thumbnails.push_back(mItemId);
+            imageItem.thumbnails.push_back(mItemId);
         }
         break;
     }
     case FOURCC("cdsc"): {
-        ssize_t itemIndex = itemIdToExifMap.indexOfKey(mItemId);
+        ssize_t metaIndex = itemIdToMetaMap.indexOfKey(mItemId);
 
-        // ignore non-exif block items
-        if (itemIndex < 0) {
+        // ignore non-meta items
+        if (metaIndex < 0) {
             return;
         }
 
         for (size_t i = 0; i < mRefs.size(); i++) {
-            itemIndex = itemIdToItemMap.indexOfKey(mRefs[i]);
+            ssize_t itemIndex = itemIdToItemMap.indexOfKey(mRefs[i]);
 
             // ignore non-image items
             if (itemIndex < 0) {
@@ -571,7 +574,11 @@
             }
             ALOGV("Image item id %d uses metadata item id %d", mRefs[i], mItemId);
             ImageItem &image = itemIdToItemMap.editValueAt(itemIndex);
-            image.cdscRefs.push_back(mItemId);
+            if (itemIdToMetaMap[metaIndex].isExif) {
+                image.exifRefs.push_back(mItemId);
+            } else {
+                image.xmpRefs.push_back(mItemId);
+            }
         }
         break;
     }
@@ -764,6 +771,39 @@
     return OK;
 }
 
+struct Av1cBox : public Box, public ItemProperty {
+    Av1cBox(DataSourceHelper *source) :
+        Box(source, FOURCC("av1C")) {}
+
+    status_t parse(off64_t offset, size_t size) override;
+
+    void attachTo(ImageItem &image) const override {
+        image.av1c = mAv1c;
+    }
+
+private:
+    sp<ABuffer> mAv1c;
+};
+
+status_t Av1cBox::parse(off64_t offset, size_t size) {
+    ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
+
+    mAv1c = new ABuffer(size);
+
+    if (mAv1c->data() == NULL) {
+        ALOGE("b/28471206");
+        return NO_MEMORY;
+    }
+
+    if (source()->readAt(offset, mAv1c->data(), size) < (ssize_t)size) {
+        return ERROR_IO;
+    }
+
+    ALOGV("property av1C");
+
+    return OK;
+}
+
 struct IrotBox : public Box, public ItemProperty {
     IrotBox(DataSourceHelper *source) :
         Box(source, FOURCC("irot")), mAngle(0) {}
@@ -929,7 +969,7 @@
 
 status_t IpcoBox::parse(off64_t offset, size_t size) {
     ALOGV("%s: offset %lld, size %zu", __FUNCTION__, (long long)offset, size);
-    // push dummy as the index is 1-based
+    // push a placeholder as the index is 1-based
     mItemProperties->push_back(new ItemProperty());
     return parseChunks(offset, size);
 }
@@ -957,6 +997,11 @@
             itemProperty = new ColrBox(source());
             break;
         }
+        case FOURCC("av1C"):
+        {
+            itemProperty = new Av1cBox(source());
+            break;
+        }
         default:
         {
             // push dummy to maintain correct item property index
@@ -1026,7 +1071,21 @@
 struct ItemInfo {
     uint32_t itemId;
     uint32_t itemType;
+    String8 contentType;
     bool hidden;
+
+    bool isXmp() const {
+        return itemType == FOURCC("mime") && contentType == String8("application/rdf+xml");
+    }
+    bool isExif() const {
+        return itemType == FOURCC("Exif");
+    }
+    bool isGrid() const {
+        return itemType == FOURCC("grid");
+    }
+    bool isSample() const {
+        return itemType == FOURCC("av01") || itemType == FOURCC("hvc1");
+    }
 };
 
 struct InfeBox : public FullBox {
@@ -1116,6 +1175,7 @@
             if (!parseNullTerminatedString(&offset, &size, &content_type)) {
                 return ERROR_MALFORMED;
             }
+            itemInfo->contentType = content_type;
 
             // content_encoding is optional; can be omitted if would be empty
             if (size > 0) {
@@ -1136,18 +1196,18 @@
 
 struct IinfBox : public FullBox {
     IinfBox(DataSourceHelper *source, Vector<ItemInfo> *itemInfos) :
-        FullBox(source, FOURCC("iinf")), mItemInfos(itemInfos) {}
+        FullBox(source, FOURCC("iinf")), mItemInfos(itemInfos), mNeedIref(false) {}
 
     status_t parse(off64_t offset, size_t size);
 
-    bool hasFourCC(uint32_t type) { return mFourCCSeen.count(type) > 0; }
+    bool needIrefBox() { return mNeedIref; }
 
 protected:
     status_t onChunkData(uint32_t type, off64_t offset, size_t size) override;
 
 private:
     Vector<ItemInfo> *mItemInfos;
-    std::unordered_set<uint32_t> mFourCCSeen;
+    bool mNeedIref;
 };
 
 status_t IinfBox::parse(off64_t offset, size_t size) {
@@ -1194,7 +1254,7 @@
     status_t err = infeBox.parse(offset, size, &itemInfo);
     if (err == OK) {
         mItemInfos->push_back(itemInfo);
-        mFourCCSeen.insert(itemInfo.itemType);
+        mNeedIref |= (itemInfo.isExif() || itemInfo.isXmp() || itemInfo.isGrid());
     }
     // InfeBox parse returns ERROR_UNSUPPORTED if the box if an unsupported
     // version. Ignore this error as it's not fatal.
@@ -1203,8 +1263,9 @@
 
 //////////////////////////////////////////////////////////////////
 
-ItemTable::ItemTable(DataSourceHelper *source)
+ItemTable::ItemTable(DataSourceHelper *source, bool isHeif)
     : mDataSource(source),
+      mIsHeif(isHeif),
       mPrimaryItemId(0),
       mIdatOffset(0),
       mIdatSize(0),
@@ -1283,7 +1344,7 @@
         return err;
     }
 
-    if (iinfBox.hasFourCC(FOURCC("grid")) || iinfBox.hasFourCC(FOURCC("Exif"))) {
+    if (iinfBox.needIrefBox()) {
         mRequiredBoxes.insert('iref');
     }
 
@@ -1359,11 +1420,9 @@
 
         // Only handle 3 types of items, all others are ignored:
         //   'grid': derived image from tiles
-        //   'hvc1': coded image (or tile)
-        //   'Exif': EXIF metadata
-        if (info.itemType != FOURCC("grid") &&
-            info.itemType != FOURCC("hvc1") &&
-            info.itemType != FOURCC("Exif")) {
+        //   'hvc1' or 'av01': coded image (or tile)
+        //   'Exif' or XMP: metadata
+        if (!info.isGrid() && !info.isSample() && !info.isExif() && !info.isXmp()) {
             continue;
         }
 
@@ -1386,15 +1445,18 @@
             return ERROR_MALFORMED;
         }
 
-        if (info.itemType == FOURCC("Exif")) {
-            // Only add if the Exif data is non-empty. The first 4 bytes contain
+        if (info.isExif() || info.isXmp()) {
+            // Only add if the meta is non-empty. For Exif, the first 4 bytes contain
             // the offset to TIFF header, which the Exif parser doesn't use.
-            if (size > 4) {
-                ExifItem exifItem = {
+            ALOGV("adding meta to mItemIdToMetaMap: isExif %d, offset %lld, size %lld",
+                    info.isExif(), (long long)offset, (long long)size);
+            if ((info.isExif() && size > 4) || (info.isXmp() && size > 0)) {
+                ExternalMetaItem metaItem = {
+                        .isExif = info.isExif(),
                         .offset = offset,
                         .size = size,
                 };
-                mItemIdToExifMap.add(info.itemId, exifItem);
+                mItemIdToMetaMap.add(info.itemId, metaItem);
             }
             continue;
         }
@@ -1429,7 +1491,7 @@
     }
 
     for (size_t i = 0; i < mItemReferences.size(); i++) {
-        mItemReferences[i]->apply(mItemIdToItemMap, mItemIdToExifMap);
+        mItemReferences[i]->apply(mItemIdToItemMap, mItemIdToMetaMap);
     }
 
     bool foundPrimary = false;
@@ -1509,7 +1571,9 @@
     }
 
     AMediaFormat *meta = AMediaFormat_new();
-    AMediaFormat_setString(meta, AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
+    AMediaFormat_setString(
+        meta, AMEDIAFORMAT_KEY_MIME,
+        mIsHeif ? MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC : MEDIA_MIMETYPE_IMAGE_AVIF);
 
     if (image->itemId == mPrimaryItemId) {
         AMediaFormat_setInt32(meta, AMEDIAFORMAT_KEY_IS_DEFAULT, 1);
@@ -1539,15 +1603,24 @@
         ssize_t thumbItemIndex = mItemIdToItemMap.indexOfKey(image->thumbnails[0]);
         if (thumbItemIndex >= 0) {
             const ImageItem &thumbnail = mItemIdToItemMap[thumbItemIndex];
-
-            if (thumbnail.hvcc != NULL) {
+            if (thumbnail.hvcc != NULL || thumbnail.av1c != NULL) {
                 AMediaFormat_setInt32(meta,
                         AMEDIAFORMAT_KEY_THUMBNAIL_WIDTH, thumbnail.width);
                 AMediaFormat_setInt32(meta,
                         AMEDIAFORMAT_KEY_THUMBNAIL_HEIGHT, thumbnail.height);
-                AMediaFormat_setBuffer(meta,
-                        AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC,
-                        thumbnail.hvcc->data(), thumbnail.hvcc->size());
+                if (thumbnail.hvcc != NULL) {
+                    AMediaFormat_setBuffer(meta,
+                            AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC,
+                            thumbnail.hvcc->data(), thumbnail.hvcc->size());
+                } else {
+                    // We use a hard-coded string here instead of
+                    // AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C. The key is available only from SDK 31.
+                    // The mp4 extractor is part of mainline and builds against SDK 29 as of
+                    // writing. This hard-coded string can be replaced with the named constant once
+                    // the mp4 extractor is built against SDK >= 31.
+                    AMediaFormat_setBuffer(meta,
+                            "thumbnail-csd-av1c", thumbnail.av1c->data(), thumbnail.av1c->size());
+                }
                 ALOGV("image[%u]: thumbnail: size %dx%d, item index %zd",
                         imageIndex, thumbnail.width, thumbnail.height, thumbItemIndex);
             } else {
@@ -1574,12 +1647,21 @@
                 AMEDIAFORMAT_KEY_MAX_INPUT_SIZE, image->width * image->height * 3 / 2);
     }
 
-    if (image->hvcc == NULL) {
-        ALOGE("%s: hvcc is missing for image[%u]!", __FUNCTION__, imageIndex);
-        return NULL;
+    if (mIsHeif) {
+        if (image->hvcc == NULL) {
+            ALOGE("%s: hvcc is missing for image[%u]!", __FUNCTION__, imageIndex);
+            return NULL;
+        }
+        AMediaFormat_setBuffer(meta,
+                AMEDIAFORMAT_KEY_CSD_HEVC, image->hvcc->data(), image->hvcc->size());
+    } else {
+        if (image->av1c == NULL) {
+            ALOGE("%s: av1c is missing for image[%u]!", __FUNCTION__, imageIndex);
+            return NULL;
+        }
+        AMediaFormat_setBuffer(meta,
+                AMEDIAFORMAT_KEY_CSD_0, image->av1c->data(), image->av1c->size());
     }
-    AMediaFormat_setBuffer(meta,
-            AMEDIAFORMAT_KEY_CSD_HEVC, image->hvcc->data(), image->hvcc->size());
 
     if (image->icc != NULL) {
         AMediaFormat_setBuffer(meta,
@@ -1614,17 +1696,17 @@
         return BAD_VALUE;
     }
 
-    uint32_t masterItemIndex = mDisplayables[imageIndex];
+    uint32_t imageItemIndex = mDisplayables[imageIndex];
 
-    const ImageItem &masterImage = mItemIdToItemMap[masterItemIndex];
-    if (masterImage.thumbnails.empty()) {
-        *itemIndex = masterItemIndex;
+    const ImageItem &imageItem = mItemIdToItemMap[imageItemIndex];
+    if (imageItem.thumbnails.empty()) {
+        *itemIndex = imageItemIndex;
         return OK;
     }
 
-    ssize_t thumbItemIndex = mItemIdToItemMap.indexOfKey(masterImage.thumbnails[0]);
+    ssize_t thumbItemIndex = mItemIdToItemMap.indexOfKey(imageItem.thumbnails[0]);
     if (thumbItemIndex < 0) {
-        // Do not return the master image in this case, fail it so that the
+        // Do not return the image item in this case, fail it so that the
         // thumbnail extraction code knows we really don't have it.
         return INVALID_OPERATION;
     }
@@ -1686,11 +1768,11 @@
     }
 
     const ImageItem &image = mItemIdToItemMap[itemIndex];
-    if (image.cdscRefs.size() == 0) {
+    if (image.exifRefs.size() == 0) {
         return NAME_NOT_FOUND;
     }
 
-    ssize_t exifIndex = mItemIdToExifMap.indexOfKey(image.cdscRefs[0]);
+    ssize_t exifIndex = mItemIdToMetaMap.indexOfKey(image.exifRefs[0]);
     if (exifIndex < 0) {
         return NAME_NOT_FOUND;
     }
@@ -1698,7 +1780,7 @@
     // skip the first 4-byte of the offset to TIFF header
     uint32_t tiffOffset;
     if (!mDataSource->readAt(
-            mItemIdToExifMap[exifIndex].offset, &tiffOffset, 4)) {
+            mItemIdToMetaMap[exifIndex].offset, &tiffOffset, 4)) {
         return ERROR_IO;
     }
 
@@ -1711,16 +1793,43 @@
     // exif data. The size of the item should be > 4 for a non-empty exif (this
     // was already checked when the item was added). Also check that the tiff
     // header offset is valid.
-    if (mItemIdToExifMap[exifIndex].size <= 4 ||
-            tiffOffset > mItemIdToExifMap[exifIndex].size - 4) {
+    if (mItemIdToMetaMap[exifIndex].size <= 4 ||
+            tiffOffset > mItemIdToMetaMap[exifIndex].size - 4) {
         return ERROR_MALFORMED;
     }
 
     // Offset of 'Exif\0\0' relative to the beginning of 'Exif' item
     // (first 4-byte is the tiff header offset)
     uint32_t exifOffset = 4 + tiffOffset - 6;
-    *offset = mItemIdToExifMap[exifIndex].offset + exifOffset;
-    *size = mItemIdToExifMap[exifIndex].size - exifOffset;
+    *offset = mItemIdToMetaMap[exifIndex].offset + exifOffset;
+    *size = mItemIdToMetaMap[exifIndex].size - exifOffset;
+    return OK;
+}
+
+status_t ItemTable::getXmpOffsetAndSize(off64_t *offset, size_t *size) {
+    if (!mImageItemsValid) {
+        return INVALID_OPERATION;
+    }
+
+    ssize_t itemIndex = mItemIdToItemMap.indexOfKey(mPrimaryItemId);
+
+    // this should not happen, something's seriously wrong.
+    if (itemIndex < 0) {
+        return INVALID_OPERATION;
+    }
+
+    const ImageItem &image = mItemIdToItemMap[itemIndex];
+    if (image.xmpRefs.size() == 0) {
+        return NAME_NOT_FOUND;
+    }
+
+    ssize_t xmpIndex = mItemIdToMetaMap.indexOfKey(image.xmpRefs[0]);
+    if (xmpIndex < 0) {
+        return NAME_NOT_FOUND;
+    }
+
+    *offset = mItemIdToMetaMap[xmpIndex].offset;
+    *size = mItemIdToMetaMap[xmpIndex].size;
     return OK;
 }
 
diff --git a/media/extractors/mp4/ItemTable.h b/media/extractors/mp4/ItemTable.h
index be81b59..62826b6 100644
--- a/media/extractors/mp4/ItemTable.h
+++ b/media/extractors/mp4/ItemTable.h
@@ -34,7 +34,7 @@
 
 struct AssociationEntry;
 struct ImageItem;
-struct ExifItem;
+struct ExternalMetaItem;
 struct ItemLoc;
 struct ItemInfo;
 struct ItemProperty;
@@ -42,12 +42,12 @@
 
 /*
  * ItemTable keeps track of all image items (including coded images, grids and
- * tiles) inside a HEIF still image (ISO/IEC FDIS 23008-12.2:2017(E)).
+ * tiles) inside a HEIF/AVIF still image (ISO/IEC FDIS 23008-12.2:2017(E)).
  */
 
 class ItemTable : public RefBase {
 public:
-    explicit ItemTable(DataSourceHelper *source);
+    ItemTable(DataSourceHelper *source, bool isHeif);
 
     status_t parse(uint32_t type, off64_t offset, size_t size);
 
@@ -59,12 +59,15 @@
     status_t getImageOffsetAndSize(
             uint32_t *itemIndex, off64_t *offset, size_t *size);
     status_t getExifOffsetAndSize(off64_t *offset, size_t *size);
+    status_t getXmpOffsetAndSize(off64_t *offset, size_t *size);
 
 protected:
     ~ItemTable();
 
 private:
     DataSourceHelper *mDataSource;
+    // If this is true, then this item table is for a HEIF image. Otherwise it is for an AVIF image.
+    bool mIsHeif;
 
     KeyedVector<uint32_t, ItemLoc> mItemLocs;
     Vector<ItemInfo> mItemInfos;
@@ -82,7 +85,7 @@
     bool mImageItemsValid;
     uint32_t mCurrentItemIndex;
     KeyedVector<uint32_t, ImageItem> mItemIdToItemMap;
-    KeyedVector<uint32_t, ExifItem> mItemIdToExifMap;
+    KeyedVector<uint32_t, ExternalMetaItem> mItemIdToMetaMap;
     Vector<uint32_t> mDisplayables;
 
     status_t parseIlocBox(off64_t offset, size_t size);
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
old mode 100755
new mode 100644
index a976a2b..fbcd554
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -62,6 +62,16 @@
 
 #define ALAC_SPECIFIC_INFO_SIZE (36)
 
+// TODO : Remove the defines once mainline media is built against NDK >= 31.
+// The mp4 extractor is part of mainline and builds against NDK 29 as of
+// writing. These keys are available only from NDK 31:
+#define AMEDIAFORMAT_KEY_MPEGH_PROFILE_LEVEL_INDICATION \
+  "mpegh-profile-level-indication"
+#define AMEDIAFORMAT_KEY_MPEGH_REFERENCE_CHANNEL_LAYOUT \
+  "mpegh-reference-channel-layout"
+#define AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS \
+  "mpegh-compatible-sets"
+
 namespace android {
 
 enum {
@@ -139,6 +149,7 @@
     bool mIsHEVC;
     bool mIsDolbyVision;
     bool mIsAC4;
+    bool mIsMpegH = false;
     bool mIsPcm;
     size_t mNALLengthSize;
 
@@ -149,6 +160,7 @@
     uint8_t *mSrcBuffer;
 
     bool mIsHeif;
+    bool mIsAvif;
     bool mIsAudio;
     bool mIsUsac = false;
     sp<ItemTable> mItemTable;
@@ -202,8 +214,8 @@
         uint32_t duration;
         int32_t compositionOffset;
         uint8_t iv[16];
-        Vector<size_t> clearsizes;
-        Vector<size_t> encryptedsizes;
+        Vector<uint32_t> clearsizes;
+        Vector<uint32_t> encryptedsizes;
     };
     Vector<Sample> mCurrentSamples;
     std::map<off64_t, uint32_t> mDrmOffsets;
@@ -372,9 +384,15 @@
             return MEDIA_MIMETYPE_AUDIO_FLAC;
         case FOURCC("av01"):
             return MEDIA_MIMETYPE_VIDEO_AV1;
+        case FOURCC("vp09"):
+            return MEDIA_MIMETYPE_VIDEO_VP9;
         case FOURCC(".mp3"):
         case 0x6D730055: // "ms U" mp3 audio
             return MEDIA_MIMETYPE_AUDIO_MPEG;
+        case FOURCC("mha1"):
+            return MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1;
+        case FOURCC("mhm1"):
+            return MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1;
         default:
             ALOGW("Unknown fourcc: %c%c%c%c",
                    (fourcc >> 24) & 0xff,
@@ -412,6 +430,7 @@
       mIsHeif(false),
       mHasMoovBox(false),
       mPreferHeif(mime != NULL && !strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_HEIF)),
+      mIsAvif(false),
       mFirstTrack(NULL),
       mLastTrack(NULL) {
     ALOGV("mime=%s, mPreferHeif=%d", mime, mPreferHeif);
@@ -636,8 +655,7 @@
         }
     }
 
-    AMediaFormat_copy(meta, track->meta);
-    return AMEDIA_OK;
+    return AMediaFormat_copy(meta, track->meta);
 }
 
 status_t MPEG4Extractor::readMetaData() {
@@ -668,7 +686,7 @@
         }
     }
 
-    if (mIsHeif && (mItemTable != NULL) && (mItemTable->countImages() > 0)) {
+    if ((mIsAvif || mIsHeif) && (mItemTable != NULL) && (mItemTable->countImages() > 0)) {
         off64_t exifOffset;
         size_t exifSize;
         if (mItemTable->getExifOffsetAndSize(&exifOffset, &exifSize) == OK) {
@@ -677,6 +695,19 @@
             AMediaFormat_setInt64(mFileMetaData,
                     AMEDIAFORMAT_KEY_EXIF_SIZE, (int64_t)exifSize);
         }
+        off64_t xmpOffset;
+        size_t xmpSize;
+        if (mItemTable->getXmpOffsetAndSize(&xmpOffset, &xmpSize) == OK) {
+            // TODO(chz): b/175717339
+            // Use a hard-coded string here instead of named keys. The keys are available
+            // only on API 31+. The mp4 extractor is part of mainline and has min_sdk_version
+            // of 29. This hard-coded string can be replaced with the named constant once
+            // the mp4 extractor is built against API 31+.
+            AMediaFormat_setInt64(mFileMetaData,
+                    "xmp-offset" /*AMEDIAFORMAT_KEY_XMP_OFFSET*/, (int64_t)xmpOffset);
+            AMediaFormat_setInt64(mFileMetaData,
+                    "xmp-size" /*AMEDIAFORMAT_KEY_XMP_SIZE*/, (int64_t)xmpSize);
+        }
         for (uint32_t imageIndex = 0;
                 imageIndex < mItemTable->countImages(); imageIndex++) {
             AMediaFormat *meta = mItemTable->getImageMeta(imageIndex);
@@ -694,7 +725,7 @@
             }
             mInitCheck = OK;
 
-            ALOGV("adding HEIF image track %u", imageIndex);
+            ALOGV("adding %s image track %u", mIsHeif ? "HEIF" : "AVIF", imageIndex);
             Track *track = new Track;
             if (mLastTrack != NULL) {
                 mLastTrack->next = track;
@@ -720,6 +751,10 @@
                 MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC) != NULL) {
             AMediaFormat_setString(mFileMetaData,
                     AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_CONTAINER_HEIF);
+        } else if (findTrackByMimePrefix(
+                MEDIA_MIMETYPE_IMAGE_AVIF) != NULL) {
+            AMediaFormat_setString(mFileMetaData,
+                    AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_IMAGE_AVIF);
         } else {
             AMediaFormat_setString(mFileMetaData,
                     AMEDIAFORMAT_KEY_MIME, "application/octet-stream");
@@ -1091,7 +1126,9 @@
                     void *data;
                     size_t size;
 
-                    if (AMediaFormat_getBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_2, &data, &size)) {
+                    if (AMediaFormat_getBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_2,
+                                               &data, &size)
+                        && size >= 5) {
                         const uint8_t *ptr = (const uint8_t *)data;
                         const uint8_t profile = ptr[2] >> 1;
                         const uint8_t bl_compatibility_id = (ptr[4]) >> 4;
@@ -1128,8 +1165,12 @@
                             mLastTrack->next = track_b;
                             track_b->next = NULL;
 
-                            auto id = track_b->meta->mFormat->findEntryByName(AMEDIAFORMAT_KEY_CSD_2);
-                            track_b->meta->mFormat->removeEntryAt(id);
+                            // we want to remove the csd-2 key from the metadata, but
+                            // don't have an AMediaFormat_* function to do so. Settle
+                            // for replacing this csd-2 with an empty csd-2.
+                            uint8_t emptybuffer[8] = {};
+                            AMediaFormat_setBuffer(track_b->meta, AMEDIAFORMAT_KEY_CSD_2,
+                                                   emptybuffer, 0);
 
                             if (4 == profile || 7 == profile || 8 == profile ) {
                                 AMediaFormat_setString(track_b->meta,
@@ -1149,7 +1190,7 @@
             } else if (chunk_type == FOURCC("moov")) {
                 mInitCheck = OK;
 
-                return UNKNOWN_ERROR;  // Return a dummy error.
+                return UNKNOWN_ERROR;  // Return a generic error.
             }
             break;
         }
@@ -1758,6 +1799,8 @@
         case FOURCC("fLaC"):
         case FOURCC(".mp3"):
         case 0x6D730055: // "ms U" mp3 audio
+        case FOURCC("mha1"):
+        case FOURCC("mhm1"):
         {
             if (mIsQT && depth >= 1 && mPath[depth - 1] == FOURCC("wave")) {
 
@@ -1787,7 +1830,7 @@
                 return ERROR_IO;
             }
 
-            uint16_t data_ref_index __unused = U16_AT(&buffer[6]);
+            // we can get data_ref_index value from U16_AT(&buffer[6])
             uint16_t version = U16_AT(&buffer[8]);
             uint32_t num_channels = U16_AT(&buffer[16]);
 
@@ -1957,7 +2000,94 @@
             }
             break;
         }
+        case FOURCC("mhaC"):
+        {
+            // See ISO_IEC_23008-3;2019 MHADecoderConfigurationRecord
+            constexpr uint32_t mhac_header_size = 4 /* size */ + 4 /* boxtype 'mhaC' */
+                    + 1 /* configurationVersion */ + 1 /* mpegh3daProfileLevelIndication */
+                    + 1 /* referenceChannelLayout */ + 2 /* mpegh3daConfigLength */;
+            uint8_t mhac_header[mhac_header_size];
+            off64_t data_offset = *offset;
 
+            if (chunk_size < sizeof(mhac_header)) {
+                return ERROR_MALFORMED;
+            }
+
+            if (mDataSource->readAt(data_offset, mhac_header, sizeof(mhac_header))
+                    < (ssize_t)sizeof(mhac_header)) {
+                return ERROR_IO;
+            }
+
+            //get mpegh3daProfileLevelIndication
+            const uint32_t mpegh3daProfileLevelIndication = mhac_header[9];
+            AMediaFormat_setInt32(mLastTrack->meta,
+                    AMEDIAFORMAT_KEY_MPEGH_PROFILE_LEVEL_INDICATION,
+                    mpegh3daProfileLevelIndication);
+
+             //get referenceChannelLayout
+            const uint32_t referenceChannelLayout = mhac_header[10];
+            AMediaFormat_setInt32(mLastTrack->meta,
+                    AMEDIAFORMAT_KEY_MPEGH_REFERENCE_CHANNEL_LAYOUT,
+                    referenceChannelLayout);
+
+            // get mpegh3daConfigLength
+            const uint32_t mhac_config_size = U16_AT(&mhac_header[11]);
+            if (chunk_size != sizeof(mhac_header) + mhac_config_size) {
+                return ERROR_MALFORMED;
+            }
+
+            data_offset += sizeof(mhac_header);
+            uint8_t mhac_config[mhac_config_size];
+            if (mDataSource->readAt(data_offset, mhac_config, sizeof(mhac_config))
+                    < (ssize_t)sizeof(mhac_config)) {
+                return ERROR_IO;
+            }
+
+            AMediaFormat_setBuffer(mLastTrack->meta,
+                    AMEDIAFORMAT_KEY_CSD_0, mhac_config, sizeof(mhac_config));
+            data_offset += sizeof(mhac_config);
+            *offset = data_offset;
+            break;
+        }
+        case FOURCC("mhaP"):
+        {
+            // FDAmd_2 of ISO_IEC_23008-3;2019 MHAProfileAndLevelCompatibilitySetBox
+            constexpr uint32_t mhap_header_size = 4 /* size */ + 4 /* boxtype 'mhaP' */
+                    + 1 /* numCompatibleSets */;
+
+            uint8_t mhap_header[mhap_header_size];
+            off64_t data_offset = *offset;
+
+            if (chunk_size < (ssize_t)mhap_header_size) {
+                return ERROR_MALFORMED;
+            }
+
+            if (mDataSource->readAt(data_offset, mhap_header, sizeof(mhap_header))
+                    < (ssize_t)sizeof(mhap_header)) {
+                return ERROR_IO;
+            }
+
+            // mhap_compatible_sets_size = numCompatibleSets * sizeof(uint8_t)
+            const uint32_t mhap_compatible_sets_size = mhap_header[8];
+            if (chunk_size != sizeof(mhap_header) + mhap_compatible_sets_size) {
+                return ERROR_MALFORMED;
+            }
+
+            data_offset += sizeof(mhap_header);
+            uint8_t mhap_compatible_sets[mhap_compatible_sets_size];
+            if (mDataSource->readAt(
+                    data_offset, mhap_compatible_sets, sizeof(mhap_compatible_sets))
+                            < (ssize_t)sizeof(mhap_compatible_sets)) {
+                return ERROR_IO;
+            }
+
+            AMediaFormat_setBuffer(mLastTrack->meta,
+                    AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS,
+                    mhap_compatible_sets, sizeof(mhap_compatible_sets));
+            data_offset += sizeof(mhap_compatible_sets);
+            *offset = data_offset;
+            break;
+        }
         case FOURCC("mp4v"):
         case FOURCC("encv"):
         case FOURCC("s263"):
@@ -1972,6 +2102,7 @@
         case FOURCC("dvh1"):
         case FOURCC("dav1"):
         case FOURCC("av01"):
+        case FOURCC("vp09"):
         {
             uint8_t buffer[78];
             if (chunk_data_size < (ssize_t)sizeof(buffer)) {
@@ -1984,7 +2115,7 @@
                 return ERROR_IO;
             }
 
-            uint16_t data_ref_index __unused = U16_AT(&buffer[6]);
+            // we can get data_ref_index value from U16_AT(&buffer[6])
             uint16_t width = U16_AT(&buffer[6 + 18]);
             uint16_t height = U16_AT(&buffer[6 + 20]);
 
@@ -2324,7 +2455,7 @@
             if (mLastTrack == NULL)
                 return ERROR_MALFORMED;
 
-            AMediaFormat_setBuffer(mLastTrack->meta, 
+            AMediaFormat_setBuffer(mLastTrack->meta,
                     AMEDIAFORMAT_KEY_ESDS, &buffer[4], chunk_data_size - 4);
 
             if (mPath.size() >= 2
@@ -2406,7 +2537,7 @@
             if (mLastTrack == NULL)
                 return ERROR_MALFORMED;
 
-            AMediaFormat_setBuffer(mLastTrack->meta, 
+            AMediaFormat_setBuffer(mLastTrack->meta,
                     AMEDIAFORMAT_KEY_CSD_AVC, buffer.get(), chunk_data_size);
 
             break;
@@ -2428,12 +2559,14 @@
             if (mLastTrack == NULL)
                 return ERROR_MALFORMED;
 
-            AMediaFormat_setBuffer(mLastTrack->meta, 
+            AMediaFormat_setBuffer(mLastTrack->meta,
                     AMEDIAFORMAT_KEY_CSD_HEVC, buffer.get(), chunk_data_size);
 
             *offset += chunk_size;
             break;
         }
+
+        case FOURCC("vpcC"):
         case FOURCC("av1C"):
         {
             auto buffer = heapbuffer<uint8_t>(chunk_data_size);
@@ -2460,7 +2593,9 @@
         case FOURCC("dvcC"):
         case FOURCC("dvvC"): {
 
-            CHECK_EQ(chunk_data_size, 24);
+            if (chunk_data_size != 24) {
+                return ERROR_MALFORMED;
+            }
 
             auto buffer = heapbuffer<uint8_t>(chunk_data_size);
 
@@ -2571,9 +2706,9 @@
         case FOURCC("iref"):
         case FOURCC("ipro"):
         {
-            if (mIsHeif) {
+            if (mIsHeif || mIsAvif) {
                 if (mItemTable == NULL) {
-                    mItemTable = new ItemTable(mDataSource);
+                    mItemTable = new ItemTable(mDataSource, mIsHeif);
                 }
                 status_t err = mItemTable->parse(
                         chunk_type, data_offset, chunk_data_size);
@@ -2875,6 +3010,21 @@
             break;
         }
 
+        case FOURCC("pasp"):
+        {
+            *offset += chunk_size;
+            // this must be in a VisualSampleEntry box under the Sample Description Box ('stsd')
+            // ignore otherwise
+            if (depth >= 2 && mPath[depth - 2] == FOURCC("stsd")) {
+                status_t err = parsePaspBox(data_offset, chunk_data_size);
+                if (err != OK) {
+                    return err;
+                }
+            }
+
+            break;
+        }
+
         case FOURCC("titl"):
         case FOURCC("perf"):
         case FOURCC("auth"):
@@ -2999,14 +3149,20 @@
                     mIsHeif = true;
                     brandSet.erase(FOURCC("mif1"));
                     brandSet.erase(FOURCC("heic"));
+                } else if (brandSet.count(FOURCC("avif")) > 0 ||
+                       brandSet.count(FOURCC("avis")) > 0) {
+                    ALOGV("identified AVIF image");
+                    mIsAvif = true;
+                    brandSet.erase(FOURCC("avif"));
+                    brandSet.erase(FOURCC("avis"));
                 }
 
                 if (!brandSet.empty()) {
                     // This means that the file should have moov box.
                     // It could be any iso files (mp4, heifs, etc.)
                     mHasMoovBox = true;
-                    if (mIsHeif) {
-                        ALOGV("identified HEIF image with other tracks");
+                    if (mIsHeif || mIsAvif) {
+                        ALOGV("identified %s image with other tracks", mIsHeif ? "HEIF" : "AVIF");
                     }
                 }
             }
@@ -3407,7 +3563,7 @@
     }
 
     // skip
-    unsigned bsmod __unused = br.getBits(3);
+    br.skipBits(3); // bsmod
 
     unsigned acmod = br.getBits(3);
     unsigned lfeon = br.getBits(1);
@@ -3718,19 +3874,18 @@
         return ERROR_IO;
     }
 
-    uint64_t ctime __unused, mtime __unused, duration __unused;
     int32_t id;
 
     if (version == 1) {
-        ctime = U64_AT(&buffer[4]);
-        mtime = U64_AT(&buffer[12]);
+        // we can get ctime value from U64_AT(&buffer[4])
+        // we can get mtime value from U64_AT(&buffer[12])
         id = U32_AT(&buffer[20]);
-        duration = U64_AT(&buffer[28]);
+        // we can get duration value from U64_AT(&buffer[28])
     } else if (version == 0) {
-        ctime = U32_AT(&buffer[4]);
-        mtime = U32_AT(&buffer[8]);
+        // we can get ctime value from U32_AT(&buffer[4])
+        // we can get mtime value from U32_AT(&buffer[8])
         id = U32_AT(&buffer[12]);
-        duration = U32_AT(&buffer[20]);
+        // we can get duration value from U32_AT(&buffer[20])
     } else {
         return ERROR_UNSUPPORTED;
     }
@@ -3978,13 +4133,13 @@
                 // custom genre string
                 buffer[size] = '\0';
 
-                AMediaFormat_setString(mFileMetaData, 
+                AMediaFormat_setString(mFileMetaData,
                         metadataKey, (const char *)buffer + 8);
             }
         } else {
             buffer[size] = '\0';
 
-            AMediaFormat_setString(mFileMetaData, 
+            AMediaFormat_setString(mFileMetaData,
                     metadataKey, (const char *)buffer + 8);
         }
     }
@@ -4048,6 +4203,26 @@
     return OK;
 }
 
+status_t MPEG4Extractor::parsePaspBox(off64_t offset, size_t size) {
+    if (size < 8 || size == SIZE_MAX || mLastTrack == NULL) {
+        return ERROR_MALFORMED;
+    }
+
+    uint32_t data[2]; // hSpacing, vSpacing
+    if (mDataSource->readAt(offset, data, 8) < 8) {
+        return ERROR_IO;
+    }
+    uint32_t hSpacing = ntohl(data[0]);
+    uint32_t vSpacing = ntohl(data[1]);
+
+    if (hSpacing != 0 && vSpacing != 0) {
+        AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_SAR_WIDTH, hSpacing);
+        AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_SAR_HEIGHT, vSpacing);
+    }
+
+    return OK;
+}
+
 status_t MPEG4Extractor::parse3GPPMetaData(off64_t offset, size_t size, int depth) {
     if (size < 4 || size == SIZE_MAX) {
         return ERROR_MALFORMED;
@@ -4325,7 +4500,8 @@
         if (size != 24 || ((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1))) {
             return NULL;
         }
-   } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)) {
+   } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)
+           || !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
         void *data;
         size_t size;
         if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
@@ -4334,7 +4510,22 @@
 
         const uint8_t *ptr = (const uint8_t *)data;
 
-        if (size < 5 || ptr[0] != 0x81) {  // configurationVersion == 1
+        if (size < 4 || ptr[0] != 0x81) {  // configurationVersion == 1
+            return NULL;
+        }
+        if (!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
+            itemTable = mItemTable;
+        }
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_VP9)) {
+        void *data;
+        size_t size;
+        if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
+            return NULL;
+        }
+
+        const uint8_t *ptr = (const uint8_t *)data;
+
+        if (size < 5 || ptr[0] != 0x01) {  // configurationVersion == 1
             return NULL;
         }
     }
@@ -4391,6 +4582,10 @@
         if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
             return ERROR_MALFORMED;
         }
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_VP9)) {
+        if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
+            return ERROR_MALFORMED;
+        }
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)
             || !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG2)
             || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
@@ -4485,6 +4680,9 @@
 
     if (objectTypeIndication == 0x6B || objectTypeIndication == 0x69) {
         // mp3 audio
+        if (mLastTrack == NULL)
+            return ERROR_MALFORMED;
+
         AMediaFormat_setString(mLastTrack->meta,AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_AUDIO_MPEG);
         return OK;
     }
@@ -4575,6 +4773,10 @@
         if (offset >= csd_size || csd[offset] != 0x01) {
             return ERROR_MALFORMED;
         }
+
+        if (mLastTrack == NULL) {
+            return ERROR_MALFORMED;
+        }
         // formerly kKeyVorbisInfo
         AMediaFormat_setBuffer(mLastTrack->meta,
                 AMEDIAFORMAT_KEY_CSD_0, &csd[offset], len1);
@@ -4638,18 +4840,17 @@
     if (objectType == AOT_SBR || objectType == AOT_PS) {//SBR specific config per 14496-3 tbl 1.13
         if (br.numBitsLeft() < 4) return ERROR_MALFORMED;
         uint32_t extFreqIndex = br.getBits(4);
-        int32_t extSampleRate __unused;
         if (extFreqIndex == 15) {
             if (csd_size < 8) {
                 return ERROR_MALFORMED;
             }
             if (br.numBitsLeft() < 24) return ERROR_MALFORMED;
-            extSampleRate = br.getBits(24);
+            br.skipBits(24); // extSampleRate
         } else {
             if (extFreqIndex == 13 || extFreqIndex == 14) {
                 return ERROR_MALFORMED;
             }
-            extSampleRate = kSamplingRate[extFreqIndex];
+            //extSampleRate = kSamplingRate[extFreqIndex];
         }
         //TODO: save the extension sampling rate value in meta data =>
         //      AMediaFormat_setInt32(mLastTrack->meta, kKeyExtSampleRate, extSampleRate);
@@ -4692,13 +4893,13 @@
                 objectType == AOT_ER_AAC_LD || objectType == AOT_ER_AAC_SCAL ||
                 objectType == AOT_ER_BSAC) {
             if (br.numBitsLeft() < 2) return ERROR_MALFORMED;
-            const int32_t frameLengthFlag __unused = br.getBits(1);
+            br.skipBits(1); // frameLengthFlag
 
             const int32_t dependsOnCoreCoder = br.getBits(1);
 
             if (dependsOnCoreCoder ) {
                 if (br.numBitsLeft() < 14) return ERROR_MALFORMED;
-                const int32_t coreCoderDelay __unused = br.getBits(14);
+                br.skipBits(14); // coreCoderDelay
             }
 
             int32_t extensionFlag = -1;
@@ -4730,64 +4931,64 @@
                 if (br.numBitsLeft() < 32) {
                     return ERROR_MALFORMED;
                 }
-                const int32_t ElementInstanceTag __unused = br.getBits(4);
-                const int32_t Profile __unused = br.getBits(2);
-                const int32_t SamplingFrequencyIndex __unused = br.getBits(4);
+                br.skipBits(4); // ElementInstanceTag
+                br.skipBits(2); // Profile
+                br.skipBits(4); // SamplingFrequencyIndex
                 const int32_t NumFrontChannelElements = br.getBits(4);
                 const int32_t NumSideChannelElements = br.getBits(4);
                 const int32_t NumBackChannelElements = br.getBits(4);
                 const int32_t NumLfeChannelElements = br.getBits(2);
-                const int32_t NumAssocDataElements __unused = br.getBits(3);
-                const int32_t NumValidCcElements __unused = br.getBits(4);
+                br.skipBits(3); // NumAssocDataElements
+                br.skipBits(4); // NumValidCcElements
 
                 const int32_t MonoMixdownPresent = br.getBits(1);
 
                 if (MonoMixdownPresent != 0) {
                     if (br.numBitsLeft() < 4) return ERROR_MALFORMED;
-                    const int32_t MonoMixdownElementNumber __unused = br.getBits(4);
+                    br.skipBits(4); // MonoMixdownElementNumber
                 }
 
                 if (br.numBitsLeft() < 1) return ERROR_MALFORMED;
                 const int32_t StereoMixdownPresent = br.getBits(1);
                 if (StereoMixdownPresent != 0) {
                     if (br.numBitsLeft() < 4) return ERROR_MALFORMED;
-                    const int32_t StereoMixdownElementNumber __unused = br.getBits(4);
+                    br.skipBits(4); // StereoMixdownElementNumber
                 }
 
                 if (br.numBitsLeft() < 1) return ERROR_MALFORMED;
                 const int32_t MatrixMixdownIndexPresent = br.getBits(1);
                 if (MatrixMixdownIndexPresent != 0) {
                     if (br.numBitsLeft() < 3) return ERROR_MALFORMED;
-                    const int32_t MatrixMixdownIndex __unused = br.getBits(2);
-                    const int32_t PseudoSurroundEnable __unused = br.getBits(1);
+                    br.skipBits(2); // MatrixMixdownIndex
+                    br.skipBits(1); // PseudoSurroundEnable
                 }
 
                 int i;
                 for (i=0; i < NumFrontChannelElements; i++) {
                     if (br.numBitsLeft() < 5) return ERROR_MALFORMED;
                     const int32_t FrontElementIsCpe = br.getBits(1);
-                    const int32_t FrontElementTagSelect __unused = br.getBits(4);
+                    br.skipBits(4); // FrontElementTagSelect
                     channelsNum += FrontElementIsCpe ? 2 : 1;
                 }
 
                 for (i=0; i < NumSideChannelElements; i++) {
                     if (br.numBitsLeft() < 5) return ERROR_MALFORMED;
                     const int32_t SideElementIsCpe = br.getBits(1);
-                    const int32_t SideElementTagSelect __unused = br.getBits(4);
+                    br.skipBits(4); // SideElementTagSelect
                     channelsNum += SideElementIsCpe ? 2 : 1;
                 }
 
                 for (i=0; i < NumBackChannelElements; i++) {
                     if (br.numBitsLeft() < 5) return ERROR_MALFORMED;
                     const int32_t BackElementIsCpe = br.getBits(1);
-                    const int32_t BackElementTagSelect __unused = br.getBits(4);
+                    br.skipBits(4); // BackElementTagSelect
                     channelsNum += BackElementIsCpe ? 2 : 1;
                 }
                 channelsEffectiveNum = channelsNum;
 
                 for (i=0; i < NumLfeChannelElements; i++) {
                     if (br.numBitsLeft() < 4) return ERROR_MALFORMED;
-                    const int32_t LfeElementTagSelect __unused = br.getBits(4);
+                    br.skipBits(4); // LfeElementTagSelect
                     channelsNum += 1;
                 }
                 ALOGV("mpeg4 audio channelsNum = %d", channelsNum);
@@ -4883,7 +5084,6 @@
       mStarted(false),
       mBuffer(NULL),
       mSrcBuffer(NULL),
-      mIsHeif(itemTable != NULL),
       mItemTable(itemTable),
       mElstShiftStartTicks(elstShiftStartTicks),
       mElstInitialEmptyEditTicks(elstInitialEmptyEditTicks) {
@@ -4913,11 +5113,15 @@
     bool success = AMediaFormat_getString(mFormat, AMEDIAFORMAT_KEY_MIME, &mime);
     CHECK(success);
 
+    mIsMpegH = !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1) ||
+               !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1);
     mIsAVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
     mIsHEVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC) ||
               !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
     mIsAC4 = !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC4);
     mIsDolbyVision = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
+    mIsHeif = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC) && mItemTable != NULL;
+    mIsAvif = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF) && mItemTable != NULL;
 
     if (mIsAVC) {
         void *data;
@@ -5766,7 +5970,7 @@
             return -EINVAL;
         }
 
-        // apply some sanity (vs strict legality) checks
+        // apply some quick (vs strict legality) checks
         //
         static constexpr uint32_t kMaxTrunSampleCount = 10000;
         if (sampleCount > kMaxTrunSampleCount) {
@@ -5912,7 +6116,7 @@
 
     if (options && options->getSeekTo(&seekTimeUs, &mode)) {
         ALOGV("seekTimeUs:%" PRId64, seekTimeUs);
-        if (mIsHeif) {
+        if (mIsHeif || mIsAvif) {
             CHECK(mSampleTable == NULL);
             CHECK(mItemTable != NULL);
             int32_t imageIndex;
@@ -5982,10 +6186,11 @@
             }
 
             uint32_t syncSampleIndex = sampleIndex;
-            // assume every non-USAC audio sample is a sync sample. This works around
+            // assume every non-USAC/non-MPEGH audio sample is a sync sample.
+            // This works around
             // seek issues with files that were incorrectly written with an
             // empty or single-sample stss block for the audio track
-            if (err == OK && (!mIsAudio || mIsUsac)) {
+            if (err == OK && (!mIsAudio || mIsUsac || mIsMpegH)) {
                 err = mSampleTable->findSyncSampleNear(
                         sampleIndex, &syncSampleIndex, findFlags);
             }
@@ -6057,7 +6262,7 @@
         newBuffer = true;
 
         status_t err;
-        if (!mIsHeif) {
+        if (!mIsHeif && !mIsAvif) {
             err = mSampleTable->getMetaDataForSample(mCurrentSampleIndex, &offset, &size,
                                                     (uint64_t*)&cts, &isSyncSample, &stts);
             if(err == OK) {
@@ -6104,9 +6309,13 @@
         if (newBuffer) {
             if (mIsPcm) {
                 // The twos' PCM block reader assumes that all samples has the same size.
-
-                uint32_t samplesToRead = mSampleTable->getLastSampleIndexInChunk()
-                                                      - mCurrentSampleIndex + 1;
+                uint32_t lastSampleIndexInChunk = mSampleTable->getLastSampleIndexInChunk();
+                if (lastSampleIndexInChunk < mCurrentSampleIndex) {
+                    mBuffer->release();
+                    mBuffer = nullptr;
+                    return AMEDIA_ERROR_UNKNOWN;
+                }
+                uint32_t samplesToRead = lastSampleIndexInChunk - mCurrentSampleIndex + 1;
                 if (samplesToRead > kMaxPcmFrameSize) {
                     samplesToRead = kMaxPcmFrameSize;
                 }
@@ -6115,13 +6324,17 @@
                       samplesToRead, size, mCurrentSampleIndex,
                       mSampleTable->getLastSampleIndexInChunk());
 
-               size_t totalSize = samplesToRead * size;
+                size_t totalSize = samplesToRead * size;
+                if (mBuffer->size() < totalSize) {
+                    mBuffer->release();
+                    mBuffer = nullptr;
+                    return AMEDIA_ERROR_UNKNOWN;
+                }
                 uint8_t* buf = (uint8_t *)mBuffer->data();
                 ssize_t bytesRead = mDataSource->readAt(offset, buf, totalSize);
                 if (bytesRead < (ssize_t)totalSize) {
                     mBuffer->release();
                     mBuffer = NULL;
-
                     return AMEDIA_ERROR_IO;
                 }
 
@@ -6175,7 +6388,19 @@
                 if (isSyncSample) {
                     AMediaFormat_setInt32(meta, AMEDIAFORMAT_KEY_IS_SYNC_FRAME, 1);
                 }
- 
+
+                AMediaFormat_setInt64(
+                        meta, "sample-file-offset" /*AMEDIAFORMAT_KEY_SAMPLE_FILE_OFFSET*/,
+                        offset);
+
+                if (mSampleTable != nullptr &&
+                        mCurrentSampleIndex == mSampleTable->getLastSampleIndexInChunk()) {
+                    AMediaFormat_setInt64(
+                    meta,
+                    "last-sample-index-in-chunk" /*AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK*/,
+                    mSampleTable->getLastSampleIndexInChunk());
+                }
+
                 ++mCurrentSampleIndex;
             }
         }
@@ -6325,6 +6550,17 @@
             AMediaFormat_setInt32(meta, AMEDIAFORMAT_KEY_IS_SYNC_FRAME, 1);
         }
 
+        AMediaFormat_setInt64(
+                meta, "sample-file-offset" /*AMEDIAFORMAT_KEY_SAMPLE_FILE_OFFSET*/, offset);
+
+        if (mSampleTable != nullptr &&
+                mCurrentSampleIndex == mSampleTable->getLastSampleIndexInChunk()) {
+            AMediaFormat_setInt64(
+                    meta,
+                    "last-sample-index-in-chunk" /*AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK*/,
+                    mSampleTable->getLastSampleIndexInChunk());
+        }
+
         ++mCurrentSampleIndex;
 
         *out = mBuffer;
@@ -6485,9 +6721,9 @@
     if (smpl->encryptedsizes.size()) {
         // store clear/encrypted lengths in metadata
         AMediaFormat_setBuffer(bufmeta, AMEDIAFORMAT_KEY_CRYPTO_PLAIN_SIZES,
-                smpl->clearsizes.array(), smpl->clearsizes.size() * 4);
+                smpl->clearsizes.array(), smpl->clearsizes.size() * sizeof(uint32_t));
         AMediaFormat_setBuffer(bufmeta, AMEDIAFORMAT_KEY_CRYPTO_ENCRYPTED_SIZES,
-                smpl->encryptedsizes.array(), smpl->encryptedsizes.size() * 4);
+                smpl->encryptedsizes.array(), smpl->encryptedsizes.size() * sizeof(uint32_t));
         AMediaFormat_setInt32(bufmeta, AMEDIAFORMAT_KEY_CRYPTO_DEFAULT_IV_SIZE, mDefaultIVSize);
         AMediaFormat_setInt32(bufmeta, AMEDIAFORMAT_KEY_CRYPTO_MODE, mCryptoMode);
         AMediaFormat_setBuffer(bufmeta, AMEDIAFORMAT_KEY_CRYPTO_KEY, mCryptoKey, 16);
@@ -6696,7 +6932,8 @@
         || !memcmp(header, "ftypM4A ", 8) || !memcmp(header, "ftypf4v ", 8)
         || !memcmp(header, "ftypkddi", 8) || !memcmp(header, "ftypM4VP", 8)
         || !memcmp(header, "ftypmif1", 8) || !memcmp(header, "ftypheic", 8)
-        || !memcmp(header, "ftypmsf1", 8) || !memcmp(header, "ftyphevc", 8)) {
+        || !memcmp(header, "ftypmsf1", 8) || !memcmp(header, "ftyphevc", 8)
+        || !memcmp(header, "ftypavif", 8) || !memcmp(header, "ftypavis", 8)) {
         *confidence = 0.4;
 
         return true;
@@ -6713,6 +6950,7 @@
         FOURCC("hvc1"),
         FOURCC("hev1"),
         FOURCC("av01"),
+        FOURCC("vp09"),
         FOURCC("3gp4"),
         FOURCC("mp41"),
         FOURCC("mp42"),
@@ -6731,6 +6969,8 @@
         FOURCC("heic"),  // HEIF image
         FOURCC("msf1"),  // HEIF image sequence
         FOURCC("hevc"),  // HEIF image sequence
+        FOURCC("avif"),  // AVIF image
+        FOURCC("avis"),  // AVIF image sequence
     };
 
     for (size_t i = 0;
diff --git a/media/extractors/mp4/MPEG4Extractor.h b/media/extractors/mp4/MPEG4Extractor.h
index 1e49d50..542a3e6 100644
--- a/media/extractors/mp4/MPEG4Extractor.h
+++ b/media/extractors/mp4/MPEG4Extractor.h
@@ -144,6 +144,7 @@
     bool mIsHeif;
     bool mHasMoovBox;
     bool mPreferHeif;
+    bool mIsAvif;
 
     Track *mFirstTrack, *mLastTrack;
 
@@ -160,6 +161,7 @@
     status_t parseChunk(off64_t *offset, int depth);
     status_t parseITunesMetaData(off64_t offset, size_t size);
     status_t parseColorInfo(off64_t offset, size_t size);
+    status_t parsePaspBox(off64_t offset, size_t size);
     status_t parse3GPPMetaData(off64_t offset, size_t size, int depth);
     void parseID3v2MetaData(off64_t offset, uint64_t size);
     status_t parseQTMetaKey(off64_t data_offset, size_t data_size);
diff --git a/media/extractors/mpeg2/Android.bp b/media/extractors/mpeg2/Android.bp
index bc8632c..7e6247b 100644
--- a/media/extractors/mpeg2/Android.bp
+++ b/media/extractors/mpeg2/Android.bp
@@ -1,6 +1,35 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_extractors_mpeg2_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_extractors_mpeg2_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library {
     name: "libmpeg2extractor",
 
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+        android: {
+            shared_libs: ["libvndksupport#29"],
+        },
+    },
+
     defaults: ["extractor-defaults"],
 
     srcs: [
@@ -12,14 +41,13 @@
     shared_libs: [
         "libbase",
         "libcgrouprc#29",
-        "libvndksupport#29",
     ],
 
     header_libs: [
         "libaudioclient_headers",
         "libbase_headers",
         "libstagefright_headers",
-        "libmedia_headers",
+        "libmedia_datasource_headers",
     ],
 
     static_libs: [
@@ -37,7 +65,7 @@
         "libstagefright_esds",
         "libstagefright_foundation_without_imemory",
         "libstagefright_mpeg2extractor",
-        "libstagefright_mpeg2support",
+        "libstagefright_mpeg2support_nocrypto",
         "libutils",
     ],
 
diff --git a/media/extractors/mpeg2/MPEG2TSExtractor.cpp b/media/extractors/mpeg2/MPEG2TSExtractor.cpp
index 9e093eb..2e68809 100644
--- a/media/extractors/mpeg2/MPEG2TSExtractor.cpp
+++ b/media/extractors/mpeg2/MPEG2TSExtractor.cpp
@@ -268,6 +268,9 @@
 media_status_t MPEG2TSExtractor::getTrackMetaData(
         AMediaFormat *meta,
         size_t index, uint32_t /* flags */) {
+    if (meta == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
     sp<MetaData> implMeta = index < mSourceImpls.size()
         ? mSourceImpls.editItemAt(index)->getFormat() : NULL;
     if (implMeta == NULL) {
diff --git a/media/extractors/ogg/Android.bp b/media/extractors/ogg/Android.bp
index 7aed683..d7540c4 100644
--- a/media/extractors/ogg/Android.bp
+++ b/media/extractors/ogg/Android.bp
@@ -1,3 +1,20 @@
+package {
+    default_applicable_licenses: ["frameworks_av_media_extractors_ogg_license"],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_extractors_ogg_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library {
     name: "liboggextractor",
 
@@ -20,4 +37,11 @@
         "libvorbisidec",
     ],
 
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/extractors/tests/Android.bp b/media/extractors/tests/Android.bp
index b3afe2f..5d97d9a 100644
--- a/media/extractors/tests/Android.bp
+++ b/media/extractors/tests/Android.bp
@@ -14,9 +14,19 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "ExtractorUnitTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: ["ExtractorUnitTest.cpp"],
 
@@ -41,6 +51,7 @@
         "libstagefright_esds",
         "libstagefright_mpeg2support",
         "libstagefright_mpeg2extractor",
+        "libstagefright_foundation_colorutils_ndk",
         "libstagefright_foundation",
         "libstagefright_metadatautils",
 
diff --git a/media/extractors/tests/AndroidTest.xml b/media/extractors/tests/AndroidTest.xml
index 6bb2c8a..fc8152c 100644
--- a/media/extractors/tests/AndroidTest.xml
+++ b/media/extractors/tests/AndroidTest.xml
@@ -19,7 +19,7 @@
         <option name="cleanup" value="true" />
         <option name="push" value="ExtractorUnitTest->/data/local/tmp/ExtractorUnitTest" />
         <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor.zip?unzip=true"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor-1.4.zip?unzip=true"
             value="/data/local/tmp/ExtractorUnitTestRes/" />
     </target_preparer>
 
diff --git a/media/extractors/tests/ExtractorUnitTest.cpp b/media/extractors/tests/ExtractorUnitTest.cpp
index 518166e..84ec1f2 100644
--- a/media/extractors/tests/ExtractorUnitTest.cpp
+++ b/media/extractors/tests/ExtractorUnitTest.cpp
@@ -18,10 +18,14 @@
 #define LOG_TAG "ExtractorUnitTest"
 #include <utils/Log.h>
 
+#include <inttypes.h>
+
 #include <datasource/FileSource.h>
 #include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaDataUtils.h>
+#include <media/stagefright/foundation/OpusHeader.h>
 
 #include "aac/AACExtractor.h"
 #include "amr/AMRExtractor.h"
@@ -43,11 +47,76 @@
 #define OUTPUT_DUMP_FILE "/data/local/tmp/extractorOutput"
 
 constexpr int32_t kMaxCount = 10;
-constexpr int32_t kOpusSeekPreRollUs = 80000;  // 80 ms;
+constexpr int32_t kAudioDefaultSampleDuration = 20000;                       // 20ms
+constexpr int32_t kRandomSeekToleranceUs = 2 * kAudioDefaultSampleDuration;  // 40 ms;
+constexpr int32_t kRandomSeed = 700;
+constexpr int32_t kUndefined = -1;
+
+enum inputID {
+    // audio streams
+    AAC_1,
+    AMR_NB_1,
+    AMR_WB_1,
+    FLAC_1,
+    GSM_1,
+    MIDI_1,
+    MP3_1,
+    OPUS_1,
+    VORBIS_1,
+    // video streams
+    HEVC_1,
+    HEVC_2,
+    MPEG2_PS_1,
+    MPEG2_TS_1,
+    MPEG4_1,
+    VP9_1,
+    UNKNOWN_ID,
+};
+
+// LookUpTable of clips and metadata for component testing
+static const struct InputData {
+    inputID inpId;
+    string mime;
+    string inputFile;
+    int32_t firstParam;
+    int32_t secondParam;
+    int32_t profile;
+    int32_t frameRate;
+} kInputData[] = {
+        {AAC_1, MEDIA_MIMETYPE_AUDIO_AAC, "test_mono_44100Hz_aac.aac", 44100, 1, AACObjectLC,
+         kUndefined},
+        {AMR_NB_1, MEDIA_MIMETYPE_AUDIO_AMR_NB, "bbb_mono_8kHz_amrnb.amr", 8000, 1, kUndefined,
+         kUndefined},
+        {AMR_WB_1, MEDIA_MIMETYPE_AUDIO_AMR_WB, "bbb_mono_16kHz_amrwb.amr", 16000, 1, kUndefined,
+         kUndefined},
+        {FLAC_1, MEDIA_MIMETYPE_AUDIO_RAW, "bbb_stereo_48kHz_flac.flac", 48000, 2, kUndefined,
+         kUndefined},
+        {GSM_1, MEDIA_MIMETYPE_AUDIO_MSGSM, "test_mono_8kHz_gsm.wav", 8000, 1, kUndefined,
+         kUndefined},
+        {MIDI_1, MEDIA_MIMETYPE_AUDIO_RAW, "midi_a.mid", 22050, 2, kUndefined, kUndefined},
+        {MP3_1, MEDIA_MIMETYPE_AUDIO_MPEG, "bbb_stereo_48kHz_mp3.mp3", 48000, 2, kUndefined,
+         kUndefined},
+        {OPUS_1, MEDIA_MIMETYPE_AUDIO_OPUS, "test_stereo_48kHz_opus.opus", 48000, 2, kUndefined,
+         kUndefined},
+        {VORBIS_1, MEDIA_MIMETYPE_AUDIO_VORBIS, "bbb_stereo_48kHz_vorbis.ogg", 48000, 2, kUndefined,
+         kUndefined},
+
+        // Test (b/151677264) for MP4 extractor
+        {HEVC_1, MEDIA_MIMETYPE_VIDEO_HEVC, "crowd_508x240_25fps_hevc.mp4", 508, 240,
+         HEVCProfileMain, 25},
+        {HEVC_2, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, "test3.heic", 820, 460, kUndefined, kUndefined},
+        {MPEG2_PS_1, MEDIA_MIMETYPE_VIDEO_MPEG2, "swirl_144x136_mpeg2.mpg", 144, 136,
+         MPEG2ProfileMain, 12},
+        {MPEG2_TS_1, MEDIA_MIMETYPE_VIDEO_MPEG2, "bbb_cif_768kbps_30fps_mpeg2.ts", 352, 288,
+         MPEG2ProfileMain, 30},
+        {MPEG4_1, MEDIA_MIMETYPE_VIDEO_MPEG4, "bbb_cif_768kbps_30fps_mpeg4.mkv", 352, 288,
+         MPEG4ProfileSimple, 30},
+        {VP9_1, MEDIA_MIMETYPE_VIDEO_VP9, "bbb_340x280_30fps_vp9.webm", 340, 280, VP9Profile0, 30},
+};
 
 static ExtractorUnitTestEnvironment *gEnv = nullptr;
 
-class ExtractorUnitTest : public ::testing::TestWithParam<pair<string, string>> {
+class ExtractorUnitTest {
   public:
     ExtractorUnitTest() : mInputFp(nullptr), mDataSource(nullptr), mExtractor(nullptr) {}
 
@@ -66,16 +135,29 @@
         }
     }
 
-    virtual void SetUp() override {
+    void setupExtractor(string writerFormat) {
         mExtractorName = unknown_comp;
         mDisableTest = false;
 
         static const std::map<std::string, standardExtractors> mapExtractor = {
-                {"aac", AAC},     {"amr", AMR},         {"mp3", MP3},        {"ogg", OGG},
-                {"wav", WAV},     {"mkv", MKV},         {"flac", FLAC},      {"midi", MIDI},
-                {"mpeg4", MPEG4}, {"mpeg2ts", MPEG2TS}, {"mpeg2ps", MPEG2PS}};
+                {"aac", AAC},
+                {"amr", AMR},
+                {"flac", FLAC},
+                {"mid", MIDI},
+                {"midi", MIDI},
+                {"mkv", MKV},
+                {"mp3", MP3},
+                {"mp4", MPEG4},
+                {"mpeg2ps", MPEG2PS},
+                {"mpeg2ts", MPEG2TS},
+                {"mpeg4", MPEG4},
+                {"mpg", MPEG2PS},
+                {"ogg", OGG},
+                {"opus", OGG},
+                {"ts", MPEG2TS},
+                {"wav", WAV},
+                {"webm", MKV}};
         // Find the component type
-        string writerFormat = GetParam().first;
         if (mapExtractor.find(writerFormat) != mapExtractor.end()) {
             mExtractorName = mapExtractor.at(writerFormat);
         }
@@ -112,6 +194,39 @@
     MediaExtractorPluginHelper *mExtractor;
 };
 
+class ExtractorFunctionalityTest
+    : public ExtractorUnitTest,
+      public ::testing::TestWithParam<tuple<string /* container */, string /* InputFile */,
+                                            int32_t /* numTracks */, bool /* seekSupported */>> {
+  public:
+    virtual void SetUp() override {
+        tuple<string, string, int32_t, bool> params = GetParam();
+        mContainer = get<0>(params);
+        mNumTracks = get<2>(params);
+        setupExtractor(mContainer);
+    }
+    string mContainer;
+    int32_t mNumTracks;
+};
+
+class ConfigParamTest : public ExtractorUnitTest,
+                        public ::testing::TestWithParam<pair<string, inputID>> {
+  public:
+    virtual void SetUp() override { setupExtractor(GetParam().first); }
+
+    struct configFormat {
+        string mime;
+        int32_t width;
+        int32_t height;
+        int32_t sampleRate;
+        int32_t channelCount;
+        int32_t profile;
+        int32_t frameRate;
+    };
+
+    void getFileProperties(inputID inputId, string &inputFile, configFormat &configParam);
+};
+
 int32_t ExtractorUnitTest::setDataSource(string inputFileName) {
     mInputFp = fopen(inputFileName.c_str(), "rb");
     if (!mInputFp) {
@@ -168,6 +283,75 @@
     return 0;
 }
 
+void ConfigParamTest::getFileProperties(inputID inputId, string &inputFile,
+                                        configFormat &configParam) {
+    int32_t inputDataSize = sizeof(kInputData) / sizeof(kInputData[0]);
+    int32_t inputIdx = 0;
+    for (; inputIdx < inputDataSize; inputIdx++) {
+        if (inputId == kInputData[inputIdx].inpId) {
+            break;
+        }
+    }
+    if (inputIdx == inputDataSize) {
+        return;
+    }
+    inputFile += kInputData[inputIdx].inputFile;
+    configParam.mime = kInputData[inputIdx].mime;
+    size_t found = configParam.mime.find("audio/");
+    // Check if 'audio/' is present in the begininig of the mime type
+    if (found == 0) {
+        configParam.sampleRate = kInputData[inputIdx].firstParam;
+        configParam.channelCount = kInputData[inputIdx].secondParam;
+    } else {
+        configParam.width = kInputData[inputIdx].firstParam;
+        configParam.height = kInputData[inputIdx].secondParam;
+    }
+    configParam.profile = kInputData[inputIdx].profile;
+    configParam.frameRate = kInputData[inputIdx].frameRate;
+    return;
+}
+
+void randomSeekTest(MediaTrackHelper *track, int64_t clipDuration) {
+    int32_t status = 0;
+    int32_t seekCount = 0;
+    bool hasTimestamp = false;
+    vector<int64_t> seekToTimeStamp;
+    string seekPtsString;
+
+    srand(kRandomSeed);
+    while (seekCount < kMaxCount) {
+        int64_t timeStamp = ((double)rand() / RAND_MAX) * clipDuration;
+        seekToTimeStamp.push_back(timeStamp);
+        seekPtsString.append(to_string(timeStamp));
+        seekPtsString.append(", ");
+        seekCount++;
+    }
+
+    for (int64_t seekPts : seekToTimeStamp) {
+        MediaTrackHelper::ReadOptions *options = new MediaTrackHelper::ReadOptions(
+                CMediaTrackReadOptions::SEEK_CLOSEST | CMediaTrackReadOptions::SEEK, seekPts);
+        ASSERT_NE(options, nullptr) << "Cannot create read option";
+
+        MediaBufferHelper *buffer = nullptr;
+        status = track->read(&buffer, options);
+        if (buffer) {
+            AMediaFormat *metaData = buffer->meta_data();
+            int64_t timeStamp = 0;
+            hasTimestamp = AMediaFormat_getInt64(metaData, AMEDIAFORMAT_KEY_TIME_US, &timeStamp);
+            ASSERT_TRUE(hasTimestamp) << "Extractor didn't set timestamp for the given sample";
+
+            buffer->release();
+            EXPECT_LE(abs(timeStamp - seekPts), kRandomSeekToleranceUs)
+                    << "Seek unsuccessful. Expected timestamp range ["
+                    << seekPts - kRandomSeekToleranceUs << ", " << seekPts + kRandomSeekToleranceUs
+                    << "] "
+                    << "received " << timeStamp << ", list of input seek timestamps ["
+                    << seekPtsString << "]";
+        }
+        delete options;
+    }
+}
+
 void getSeekablePoints(vector<int64_t> &seekablePoints, MediaTrackHelper *track) {
     int32_t status = 0;
     if (!seekablePoints.empty()) {
@@ -190,20 +374,21 @@
     }
 }
 
-TEST_P(ExtractorUnitTest, CreateExtractorTest) {
+TEST_P(ExtractorFunctionalityTest, CreateExtractorTest) {
     if (mDisableTest) return;
 
     ALOGV("Checks if a valid extractor is created for a given input file");
-    string inputFileName = gEnv->getRes() + GetParam().second;
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
 
-    ASSERT_EQ(setDataSource(inputFileName), 0)
-            << "SetDataSource failed for" << GetParam().first << "extractor";
+    int32_t status = setDataSource(inputFileName);
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
 
-    ASSERT_EQ(createExtractor(), 0)
-            << "Extractor creation failed for" << GetParam().first << "extractor";
+    status = createExtractor();
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
 
-    // A valid extractor instace should return success for following calls
-    ASSERT_GT(mExtractor->countTracks(), 0);
+    int32_t numTracks = mExtractor->countTracks();
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
 
     AMediaFormat *format = AMediaFormat_new();
     ASSERT_NE(format, nullptr) << "AMediaFormat_new returned null AMediaformat";
@@ -212,20 +397,21 @@
     AMediaFormat_delete(format);
 }
 
-TEST_P(ExtractorUnitTest, ExtractorTest) {
+TEST_P(ExtractorFunctionalityTest, ExtractorTest) {
     if (mDisableTest) return;
 
-    ALOGV("Validates %s Extractor for a given input file", GetParam().first.c_str());
-    string inputFileName = gEnv->getRes() + GetParam().second;
+    ALOGV("Validates %s Extractor for a given input file", mContainer.c_str());
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
 
     int32_t status = setDataSource(inputFileName);
-    ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
 
     status = createExtractor();
-    ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
 
     int32_t numTracks = mExtractor->countTracks();
-    ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
 
     for (int32_t idx = 0; idx < numTracks; idx++) {
         MediaTrackHelper *track = mExtractor->getTrack(idx);
@@ -262,20 +448,21 @@
     }
 }
 
-TEST_P(ExtractorUnitTest, MetaDataComparisonTest) {
+TEST_P(ExtractorFunctionalityTest, MetaDataComparisonTest) {
     if (mDisableTest) return;
 
     ALOGV("Validates Extractor's meta data for a given input file");
-    string inputFileName = gEnv->getRes() + GetParam().second;
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
 
     int32_t status = setDataSource(inputFileName);
-    ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
 
     status = createExtractor();
-    ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
 
     int32_t numTracks = mExtractor->countTracks();
-    ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
 
     AMediaFormat *extractorFormat = AMediaFormat_new();
     ASSERT_NE(extractorFormat, nullptr) << "AMediaFormat_new returned null AMediaformat";
@@ -318,7 +505,7 @@
                                               &trackSampleRate));
             ASSERT_EQ(exChannelCount, trackChannelCount) << "ChannelCount not as expected";
             ASSERT_EQ(exSampleRate, trackSampleRate) << "SampleRate not as expected";
-        } else {
+        } else if (!strncmp(extractorMime, "video/", 6)) {
             int32_t exWidth, exHeight;
             int32_t trackWidth, trackHeight;
             ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_WIDTH, &exWidth));
@@ -327,6 +514,8 @@
             ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_HEIGHT, &trackHeight));
             ASSERT_EQ(exWidth, trackWidth) << "Width not as expected";
             ASSERT_EQ(exHeight, trackHeight) << "Height not as expected";
+        } else {
+            ALOGV("non a/v track");
         }
         status = cTrack->stop(track);
         ASSERT_EQ(OK, status) << "Failed to stop the track";
@@ -337,20 +526,21 @@
     AMediaFormat_delete(extractorFormat);
 }
 
-TEST_P(ExtractorUnitTest, MultipleStartStopTest) {
+TEST_P(ExtractorFunctionalityTest, MultipleStartStopTest) {
     if (mDisableTest) return;
 
-    ALOGV("Test %s extractor for multiple start and stop calls", GetParam().first.c_str());
-    string inputFileName = gEnv->getRes() + GetParam().second;
+    ALOGV("Test %s extractor for multiple start and stop calls", mContainer.c_str());
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
 
     int32_t status = setDataSource(inputFileName);
-    ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
 
     status = createExtractor();
-    ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
 
     int32_t numTracks = mExtractor->countTracks();
-    ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
 
     // start/stop the tracks multiple times
     for (int32_t count = 0; count < kMaxCount; count++) {
@@ -379,27 +569,29 @@
     }
 }
 
-TEST_P(ExtractorUnitTest, SeekTest) {
-    // Both Flac and Wav extractor can give samples from any pts and mark the given sample as
-    // sync frame. So, this seek test is not applicable to FLAC and WAV extractors
-    if (mDisableTest || mExtractorName == FLAC || mExtractorName == WAV) return;
+TEST_P(ExtractorFunctionalityTest, SeekTest) {
+    if (mDisableTest) return;
 
-    ALOGV("Validates %s Extractor behaviour for different seek modes", GetParam().first.c_str());
-    string inputFileName = gEnv->getRes() + GetParam().second;
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
+    ALOGV("Validates %s Extractor behaviour for different seek modes filename %s",
+          mContainer.c_str(), inputFileName.c_str());
 
     int32_t status = setDataSource(inputFileName);
-    ASSERT_EQ(status, 0) << "SetDataSource failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
 
     status = createExtractor();
-    ASSERT_EQ(status, 0) << "Extractor creation failed for" << GetParam().first << "extractor";
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
 
     int32_t numTracks = mExtractor->countTracks();
-    ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
 
     uint32_t seekFlag = mExtractor->flags();
-    if (!(seekFlag & MediaExtractorPluginHelper::CAN_SEEK)) {
-        cout << "[   WARN   ] Test Skipped. " << GetParam().first
-             << " Extractor doesn't support seek\n";
+    bool seekSupported = get<3>(GetParam());
+    bool seekable = seekFlag & MediaExtractorPluginHelper::CAN_SEEK;
+    if (!seekable) {
+        ASSERT_FALSE(seekSupported) << mContainer << "Extractor is expected to support seek ";
+        cout << "[   WARN   ] Test Skipped. " << mContainer << " Extractor doesn't support seek\n";
         return;
     }
 
@@ -415,19 +607,73 @@
         MediaBufferGroup *bufferGroup = new MediaBufferGroup();
         status = cTrack->start(track, bufferGroup->wrap());
         ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
-        getSeekablePoints(seekablePoints, track);
-        ASSERT_GT(seekablePoints.size(), 0)
-                << "Failed to get seekable points for " << GetParam().first << " extractor";
+
+        // For Flac, Wav and Midi extractor, all samples are seek points.
+        // We cannot create list of all seekable points for these.
+        // This means that if we pass a seekToTimeStamp between two seek points, we may
+        // end up getting the timestamp of next sample as a seekable timestamp.
+        // This timestamp may/may not be a part of the seekable point vector thereby failing the
+        // test. So we test these extractors using random seek test.
+        if (mExtractorName == FLAC || mExtractorName == WAV || mExtractorName == MIDI) {
+            AMediaFormat *trackMeta = AMediaFormat_new();
+            ASSERT_NE(trackMeta, nullptr) << "AMediaFormat_new returned null AMediaformat";
+
+            status = mExtractor->getTrackMetaData(trackMeta, idx, 1);
+            ASSERT_EQ(OK, (media_status_t)status) << "Failed to get trackMetaData";
+
+            int64_t clipDuration = 0;
+            AMediaFormat_getInt64(trackMeta, AMEDIAFORMAT_KEY_DURATION, &clipDuration);
+            ASSERT_GT(clipDuration, 0) << "Invalid clip duration ";
+            randomSeekTest(track, clipDuration);
+            AMediaFormat_delete(trackMeta);
+            continue;
+        }
 
         AMediaFormat *trackFormat = AMediaFormat_new();
         ASSERT_NE(trackFormat, nullptr) << "AMediaFormat_new returned null format";
         status = track->getFormat(trackFormat);
         ASSERT_EQ(OK, (media_status_t)status) << "Failed to get track meta data";
 
-        bool isOpus = false;
         const char *mime;
-        AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
-        if (!strcmp(mime, "audio/opus")) isOpus = true;
+        ASSERT_TRUE(AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime))
+                << "Failed to get mime";
+
+        // Image formats are not expected to be seekable
+        if (!strncmp(mime, "image/", 6)) continue;
+
+        // Request seekable points for remaining extractors which will be used to validate the seek
+        // accuracy for the extractors. Depending on SEEK Mode, we expect the extractors to return
+        // the expected sync frame. We don't prefer random seek test for these extractors because
+        // they aren't expected to seek to random samples. MP4 for instance can seek to
+        // next/previous sync frames but not to samples between two sync frames.
+        getSeekablePoints(seekablePoints, track);
+        ASSERT_GT(seekablePoints.size(), 0)
+                << "Failed to get seekable points for " << mContainer << " extractor";
+
+        bool isOpus = false;
+        int64_t opusSeekPreRollUs = 0;
+        if (!strcmp(mime, "audio/opus")) {
+            isOpus = true;
+            void *seekPreRollBuf = nullptr;
+            size_t size = 0;
+            if (!AMediaFormat_getBuffer(trackFormat, "csd-2", &seekPreRollBuf, &size)) {
+                size_t opusHeadSize = 0;
+                size_t codecDelayBufSize = 0;
+                size_t seekPreRollBufSize = 0;
+                void *csdBuffer = nullptr;
+                void *opusHeadBuf = nullptr;
+                void *codecDelayBuf = nullptr;
+                AMediaFormat_getBuffer(trackFormat, "csd-0", &csdBuffer, &size);
+                ASSERT_NE(csdBuffer, nullptr);
+
+                GetOpusHeaderBuffers((uint8_t *)csdBuffer, size, &opusHeadBuf, &opusHeadSize,
+                                     &codecDelayBuf, &codecDelayBufSize, &seekPreRollBuf,
+                                     &seekPreRollBufSize);
+            }
+            ASSERT_NE(seekPreRollBuf, nullptr)
+                    << "Invalid track format. SeekPreRoll info missing for Opus file";
+            opusSeekPreRollUs = *((int64_t *)seekPreRollBuf);
+        }
         AMediaFormat_delete(trackFormat);
 
         int32_t seekIdx = 0;
@@ -439,7 +685,8 @@
                 if (seekIdx >= seekablePointsSize) seekIdx = seekablePointsSize - 1;
 
                 int64_t seekToTimeStamp = seekablePoints[seekIdx];
-                if (seekablePointsSize > 1) {
+                if (seekIdx > 1) {
+                    // pick a time just earlier than this seek point
                     int64_t prevTimeStamp = seekablePoints[seekIdx - 1];
                     seekToTimeStamp = seekToTimeStamp - ((seekToTimeStamp - prevTimeStamp) >> 3);
                 }
@@ -448,7 +695,7 @@
                 // extractor is calculated based on (seekPts - seekPreRollUs).
                 // So we add the preRoll value to the timeStamp we want to seek to.
                 if (isOpus) {
-                    seekToTimeStamp += kOpusSeekPreRollUs;
+                    seekToTimeStamp += opusSeekPreRollUs;
                 }
 
                 MediaTrackHelper::ReadOptions *options = new MediaTrackHelper::ReadOptions(
@@ -470,11 +717,7 @@
                     // CMediaTrackReadOptions::SEEK is 8. Using mask 0111b to get true modes
                     switch (mode & 0x7) {
                         case CMediaTrackReadOptions::SEEK_PREVIOUS_SYNC:
-                            if (seekablePointsSize == 1) {
-                                EXPECT_EQ(timeStamp, seekablePoints[seekIdx]);
-                            } else {
-                                EXPECT_EQ(timeStamp, seekablePoints[seekIdx - 1]);
-                            }
+                            EXPECT_EQ(timeStamp, seekablePoints[seekIdx > 0 ? (seekIdx - 1) : 0]);
                             break;
                         case CMediaTrackReadOptions::SEEK_NEXT_SYNC:
                         case CMediaTrackReadOptions::SEEK_CLOSEST_SYNC:
@@ -496,24 +739,523 @@
     seekablePoints.clear();
 }
 
-// TODO: (b/145332185)
-// Add MIDI inputs
-INSTANTIATE_TEST_SUITE_P(ExtractorUnitTestAll, ExtractorUnitTest,
-                         ::testing::Values(make_pair("aac", "loudsoftaac.aac"),
-                                           make_pair("amr", "testamr.amr"),
-                                           make_pair("amr", "amrwb.wav"),
-                                           make_pair("ogg", "john_cage.ogg"),
-                                           make_pair("wav", "monotestgsm.wav"),
-                                           make_pair("mpeg2ts", "segment000001.ts"),
-                                           make_pair("flac", "sinesweepflac.flac"),
-                                           make_pair("ogg", "testopus.opus"),
-                                           make_pair("mkv", "sinesweepvorbis.mkv"),
-                                           make_pair("mpeg4", "sinesweepoggmp4.mp4"),
-                                           make_pair("mp3", "sinesweepmp3lame.mp3"),
-                                           make_pair("mkv", "swirl_144x136_vp9.webm"),
-                                           make_pair("mkv", "swirl_144x136_vp8.webm"),
-                                           make_pair("mpeg2ps", "swirl_144x136_mpeg2.mpg"),
-                                           make_pair("mpeg4", "swirl_132x130_mpeg4.mp4")));
+// Tests the extractors for seek beyond range : (0, ClipDuration)
+TEST_P(ExtractorFunctionalityTest, MonkeySeekTest) {
+    if (mDisableTest) return;
+    // TODO(b/155630778): Enable test for wav extractors
+    if (mExtractorName == WAV) return;
+
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
+    ALOGV("Validates %s Extractor behaviour for invalid seek points, filename %s",
+          mContainer.c_str(), inputFileName.c_str());
+
+    int32_t status = setDataSource(inputFileName);
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
+
+    status = createExtractor();
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
+
+    int32_t numTracks = mExtractor->countTracks();
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
+
+    uint32_t seekFlag = mExtractor->flags();
+    bool seekSupported = get<3>(GetParam());
+    bool seekable = seekFlag & MediaExtractorPluginHelper::CAN_SEEK;
+    if (!seekable) {
+        ASSERT_FALSE(seekSupported) << mContainer << "Extractor is expected to support seek ";
+        cout << "[   WARN   ] Test Skipped. " << mContainer << " Extractor doesn't support seek\n";
+        return;
+    }
+
+    for (int32_t idx = 0; idx < numTracks; idx++) {
+        MediaTrackHelper *track = mExtractor->getTrack(idx);
+        ASSERT_NE(track, nullptr) << "Failed to get track for index " << idx;
+
+        CMediaTrack *cTrack = wrap(track);
+        ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << idx;
+
+        MediaBufferGroup *bufferGroup = new MediaBufferGroup();
+        status = cTrack->start(track, bufferGroup->wrap());
+        ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
+
+        AMediaFormat *trackMeta = AMediaFormat_new();
+        ASSERT_NE(trackMeta, nullptr) << "AMediaFormat_new returned null AMediaformat";
+
+        status = mExtractor->getTrackMetaData(
+                trackMeta, idx, MediaExtractorPluginHelper::kIncludeExtensiveMetaData);
+        ASSERT_EQ(OK, (media_status_t)status) << "Failed to get trackMetaData";
+
+        const char *mime;
+        ASSERT_TRUE(AMediaFormat_getString(trackMeta, AMEDIAFORMAT_KEY_MIME, &mime))
+                << "Failed to get mime";
+
+        int64_t clipDuration = 0;
+        AMediaFormat_getInt64(trackMeta, AMEDIAFORMAT_KEY_DURATION, &clipDuration);
+        // Image formats are not expected to have duration information
+        ASSERT_TRUE(clipDuration > 0 || !strncmp(mime, "image/", 6)) << "Invalid clip duration ";
+        AMediaFormat_delete(trackMeta);
+
+        int64_t seekToTimeStampUs[] = {-clipDuration, clipDuration / 2, clipDuration,
+                                       clipDuration * 2};
+        for (int32_t mode = CMediaTrackReadOptions::SEEK_PREVIOUS_SYNC;
+             mode <= CMediaTrackReadOptions::SEEK_CLOSEST; mode++) {
+            for (int64_t seekTimeUs : seekToTimeStampUs) {
+                MediaTrackHelper::ReadOptions *options = new MediaTrackHelper::ReadOptions(
+                        mode | CMediaTrackReadOptions::SEEK, seekTimeUs);
+                ASSERT_NE(options, nullptr) << "Cannot create read option";
+
+                MediaBufferHelper *buffer = nullptr;
+                status = track->read(&buffer, options);
+                if (status == AMEDIA_ERROR_END_OF_STREAM) {
+                    delete options;
+                    continue;
+                }
+                if (buffer) {
+                    AMediaFormat *metaData = buffer->meta_data();
+                    int64_t timeStamp;
+                    AMediaFormat_getInt64(metaData, AMEDIAFORMAT_KEY_TIME_US, &timeStamp);
+                    ALOGV("Seeked to timestamp : %lld, requested : %lld", (long long)timeStamp,
+                          (long long)seekTimeUs);
+                    buffer->release();
+                }
+                delete options;
+            }
+        }
+        status = cTrack->stop(track);
+        ASSERT_EQ(OK, status) << "Failed to stop the track";
+        delete bufferGroup;
+        delete track;
+    }
+}
+
+// Tests extractors for invalid tracks
+TEST_P(ExtractorFunctionalityTest, SanityTest) {
+    if (mDisableTest) return;
+    // TODO(b/155626946): Enable test for MPEG2 TS/PS extractors
+    if (mExtractorName == MPEG2TS || mExtractorName == MPEG2PS) return;
+
+    string inputFileName = gEnv->getRes() + get<1>(GetParam());
+    ALOGV("Validates %s Extractor behaviour for invalid tracks - file %s",
+          mContainer.c_str(), inputFileName.c_str());
+
+    int32_t status = setDataSource(inputFileName);
+    ASSERT_EQ(status, 0) << "SetDataSource failed for" << mContainer << "extractor";
+
+    status = createExtractor();
+    ASSERT_EQ(status, 0) << "Extractor creation failed for" << mContainer << "extractor";
+
+    int32_t numTracks = mExtractor->countTracks();
+    ASSERT_EQ(numTracks, mNumTracks)
+            << "Extractor reported wrong number of track for the given clip";
+
+    int32_t trackIdx[] = {-1, numTracks};
+    for (int32_t idx : trackIdx) {
+        MediaTrackHelper *track = mExtractor->getTrack(idx);
+        ASSERT_EQ(track, nullptr) << "Failed to get track for index " << idx << "\n";
+
+        AMediaFormat *extractorFormat = AMediaFormat_new();
+        ASSERT_NE(extractorFormat, nullptr) << "AMediaFormat_new returned null AMediaformat";
+
+        status = mExtractor->getTrackMetaData(
+                extractorFormat, idx, MediaExtractorPluginHelper::kIncludeExtensiveMetaData);
+        ASSERT_NE(OK, status) << "getTrackMetaData should return error for invalid index " << idx;
+        AMediaFormat_delete(extractorFormat);
+    }
+
+    // Validate Extractor's getTrackMetaData for null format
+    AMediaFormat *mediaFormat = nullptr;
+    status = mExtractor->getTrackMetaData(mediaFormat, 0,
+                                          MediaExtractorPluginHelper::kIncludeExtensiveMetaData);
+    ASSERT_NE(OK, status) << "getTrackMetaData should return error for null Media format";
+}
+
+// This test validates config params for a given input file.
+// For this test we only take single track files since the focus of this test is
+// to validate the file properties reported by Extractor and not multi-track behavior
+TEST_P(ConfigParamTest, ConfigParamValidation) {
+    if (mDisableTest) return;
+
+    const int trackNumber = 0;
+
+    string container = GetParam().first;
+    string inputFileName = gEnv->getRes();
+    inputID inputFileId = GetParam().second;
+    configFormat configParam;
+    getFileProperties(inputFileId, inputFileName, configParam);
+
+    ALOGV("Validates %s Extractor for input's file properties, file %s",
+          container.c_str(), inputFileName.c_str());
+
+    int32_t status = setDataSource(inputFileName);
+    ASSERT_EQ(status, 0) << "SetDataSource failed for " << container << "extractor";
+
+    status = createExtractor();
+    ASSERT_EQ(status, 0) << "Extractor creation failed for " << container << "extractor";
+
+    int32_t numTracks = mExtractor->countTracks();
+    ASSERT_GT(numTracks, 0) << "Extractor didn't find any track for the given clip";
+
+    MediaTrackHelper *track = mExtractor->getTrack(trackNumber);
+    ASSERT_NE(track, nullptr) << "Failed to get track for index 0";
+
+    AMediaFormat *trackFormat = AMediaFormat_new();
+    ASSERT_NE(trackFormat, nullptr) << "AMediaFormat_new returned null format";
+
+    status = track->getFormat(trackFormat);
+    ASSERT_EQ(OK, (media_status_t)status) << "Failed to get track meta data";
+
+    const char *trackMime;
+    bool valueFound = AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &trackMime);
+    ASSERT_TRUE(valueFound) << "Mime type not set by extractor";
+    ASSERT_STREQ(configParam.mime.c_str(), trackMime) << "Invalid track format";
+
+    if (!strncmp(trackMime, "audio/", 6)) {
+        int32_t trackSampleRate, trackChannelCount;
+        ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+                                          &trackChannelCount));
+        ASSERT_TRUE(
+                AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_SAMPLE_RATE, &trackSampleRate));
+        ASSERT_EQ(configParam.sampleRate, trackSampleRate) << "SampleRate not as expected";
+        ASSERT_EQ(configParam.channelCount, trackChannelCount) << "ChannelCount not as expected";
+    } else if (!strncmp(trackMime, "video/", 6)) {
+        int32_t trackWidth, trackHeight;
+        ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_WIDTH, &trackWidth));
+        ASSERT_TRUE(AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_HEIGHT, &trackHeight));
+        ASSERT_EQ(configParam.width, trackWidth) << "Width not as expected";
+        ASSERT_EQ(configParam.height, trackHeight) << "Height not as expected";
+
+        if (configParam.frameRate != kUndefined) {
+            int32_t frameRate;
+            ASSERT_TRUE(
+                    AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_FRAME_RATE, &frameRate));
+            ASSERT_EQ(configParam.frameRate, frameRate) << "frameRate not as expected";
+        }
+    }
+    // validate the profile for the input clip
+    int32_t profile;
+    if (configParam.profile != kUndefined) {
+        if (AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_PROFILE, &profile)) {
+            ASSERT_EQ(configParam.profile, profile) << "profile not as expected";
+        } else if (mExtractorName == AAC &&
+                   AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_AAC_PROFILE, &profile)) {
+            ASSERT_EQ(configParam.profile, profile) << "profile not as expected";
+        } else {
+            ASSERT_TRUE(false) << "profile not returned in extractor";
+        }
+    }
+
+    delete track;
+    AMediaFormat_delete(trackFormat);
+}
+
+class ExtractorComparison
+    : public ExtractorUnitTest,
+      public ::testing::TestWithParam<pair<string /* InputFile0 */, string /* InputFile1 */>> {
+  public:
+    ~ExtractorComparison() {
+        for (int8_t *extractorOp : mExtractorOutput) {
+            if (extractorOp != nullptr) {
+                free(extractorOp);
+            }
+        }
+    }
+
+    int8_t *mExtractorOutput[2]{};
+    size_t mExtractorOuputSize[2]{};
+};
+
+size_t allocateOutputBuffers(string inputFileName, AMediaFormat *extractorFormat) {
+    size_t bufferSize = 0u;
+    // allocating the buffer size as sampleRate * channelCount * clipDuration since
+    // some extractors like flac, midi and wav decodes the file. These extractors
+    // advertise the mime type as raw.
+    const char *mime;
+    AMediaFormat_getString(extractorFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+    if (!strcmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
+        int64_t clipDurationUs = -1;
+        int32_t channelCount = -1;
+        int32_t sampleRate = -1;
+        int32_t bitsPerSampple = -1;
+        if (!AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+                                   &channelCount) || channelCount <= 0) {
+            ALOGE("Invalid channelCount for input file : %s", inputFileName.c_str());
+            return 0;
+        }
+        if (!AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_SAMPLE_RATE, &sampleRate) ||
+            sampleRate <= 0) {
+            ALOGE("Invalid sampleRate for input file : %s", inputFileName.c_str());
+            return 0;
+        }
+        if (!AMediaFormat_getInt64(extractorFormat, AMEDIAFORMAT_KEY_DURATION, &clipDurationUs) ||
+            clipDurationUs <= 0) {
+            ALOGE("Invalid clip duration for input file : %s", inputFileName.c_str());
+            return 0;
+        }
+        if (!AMediaFormat_getInt32(extractorFormat, AMEDIAFORMAT_KEY_PCM_ENCODING,
+                                   &bitsPerSampple) || bitsPerSampple <= 0) {
+            ALOGE("Invalid bits per sample for input file : %s", inputFileName.c_str());
+            return 0;
+        }
+        bufferSize = bitsPerSampple * channelCount * sampleRate * (clipDurationUs / 1000000 + 1);
+    } else {
+        struct stat buf;
+        int32_t status = stat(inputFileName.c_str(), &buf);
+        if (status != 0) {
+            ALOGE("Unable to get file properties for: %s", inputFileName.c_str());
+            return 0;
+        }
+        bufferSize = buf.st_size;
+    }
+    return bufferSize;
+}
+
+// Compare output of two extractors for identical content
+TEST_P(ExtractorComparison, ExtractorComparisonTest) {
+    vector<string> inputFileNames = {GetParam().first, GetParam().second};
+    size_t extractedOutputSize[2]{};
+    AMediaFormat *extractorFormat[2]{};
+    int32_t status = OK;
+
+    for (int32_t idx = 0; idx < inputFileNames.size(); idx++) {
+        string containerFormat = inputFileNames[idx].substr(inputFileNames[idx].find(".") + 1);
+        setupExtractor(containerFormat);
+        if (mDisableTest) {
+            ALOGV("Unknown extractor %s. Skipping the test", containerFormat.c_str());
+            return;
+        }
+
+        ALOGV("Validates %s Extractor for %s", containerFormat.c_str(),
+              inputFileNames[idx].c_str());
+        string inputFileName = gEnv->getRes() + inputFileNames[idx];
+
+        status = setDataSource(inputFileName);
+        ASSERT_EQ(status, 0) << "SetDataSource failed for" << containerFormat << "extractor";
+
+        status = createExtractor();
+        ASSERT_EQ(status, 0) << "Extractor creation failed for " << containerFormat << " extractor";
+
+        int32_t numTracks = mExtractor->countTracks();
+        ASSERT_EQ(numTracks, 1) << "This test expects inputs with one track only";
+
+        int32_t trackIdx = 0;
+        MediaTrackHelper *track = mExtractor->getTrack(trackIdx);
+        ASSERT_NE(track, nullptr) << "Failed to get track for index " << trackIdx;
+
+        extractorFormat[idx] = AMediaFormat_new();
+        ASSERT_NE(extractorFormat[idx], nullptr) << "AMediaFormat_new returned null AMediaformat";
+
+        status = track->getFormat(extractorFormat[idx]);
+        ASSERT_EQ(OK, (media_status_t)status) << "Failed to get track meta data";
+
+        CMediaTrack *cTrack = wrap(track);
+        ASSERT_NE(cTrack, nullptr) << "Failed to get track wrapper for index " << trackIdx;
+
+        mExtractorOuputSize[idx] = allocateOutputBuffers(inputFileName, extractorFormat[idx]);
+        ASSERT_GT(mExtractorOuputSize[idx], 0u) << " Invalid size for output buffers";
+
+        mExtractorOutput[idx] = (int8_t *)calloc(1, mExtractorOuputSize[idx]);
+        ASSERT_NE(mExtractorOutput[idx], nullptr)
+                << "Unable to allocate memory for writing extractor's output";
+
+        MediaBufferGroup *bufferGroup = new MediaBufferGroup();
+        status = cTrack->start(track, bufferGroup->wrap());
+        ASSERT_EQ(OK, (media_status_t)status) << "Failed to start the track";
+
+        int32_t offset = 0;
+        while (status != AMEDIA_ERROR_END_OF_STREAM) {
+            MediaBufferHelper *buffer = nullptr;
+            status = track->read(&buffer);
+            ALOGV("track->read Status = %d buffer %p", status, buffer);
+            if (buffer) {
+                ASSERT_LE(offset + buffer->range_length(), mExtractorOuputSize[idx])
+                        << "Memory overflow. Extracted output size more than expected";
+
+                memcpy(mExtractorOutput[idx] + offset, buffer->data(), buffer->range_length());
+                extractedOutputSize[idx] += buffer->range_length();
+                offset += buffer->range_length();
+                buffer->release();
+            }
+        }
+        status = cTrack->stop(track);
+        ASSERT_EQ(OK, status) << "Failed to stop the track";
+
+        fclose(mInputFp);
+        delete bufferGroup;
+        delete track;
+        mDataSource.clear();
+        delete mExtractor;
+        mInputFp = nullptr;
+        mExtractor = nullptr;
+    }
+
+    // Compare the meta data from both the extractors
+    const char *mime[2];
+    AMediaFormat_getString(extractorFormat[0], AMEDIAFORMAT_KEY_MIME, &mime[0]);
+    AMediaFormat_getString(extractorFormat[1], AMEDIAFORMAT_KEY_MIME, &mime[1]);
+    ASSERT_STREQ(mime[0], mime[1]) << "Mismatch between extractor's format";
+
+    if (!strncmp(mime[0], "audio/", 6)) {
+        int32_t channelCount0, channelCount1;
+        int32_t sampleRate0, sampleRate1;
+        ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat[0], AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+                                          &channelCount0));
+        ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat[0], AMEDIAFORMAT_KEY_SAMPLE_RATE,
+                                          &sampleRate0));
+        ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat[1], AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+                                          &channelCount1));
+        ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat[1], AMEDIAFORMAT_KEY_SAMPLE_RATE,
+                                          &sampleRate1));
+        ASSERT_EQ(channelCount0, channelCount1) << "Mismatch between extractor's channelCount";
+        ASSERT_EQ(sampleRate0, sampleRate1) << "Mismatch between extractor's sampleRate";
+    } else if (!strncmp(mime[0], "video/", 6)) {
+        int32_t width0, height0;
+        int32_t width1, height1;
+        ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat[0], AMEDIAFORMAT_KEY_WIDTH, &width0));
+        ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat[0], AMEDIAFORMAT_KEY_HEIGHT, &height0));
+        ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat[1], AMEDIAFORMAT_KEY_WIDTH, &width1));
+        ASSERT_TRUE(AMediaFormat_getInt32(extractorFormat[1], AMEDIAFORMAT_KEY_HEIGHT, &height1));
+        ASSERT_EQ(width0, width1) << "Mismatch between extractor's width";
+        ASSERT_EQ(height0, height1) << "Mismatch between extractor's height";
+    } else {
+        ASSERT_TRUE(false) << "Invalid mime type " << mime[0];
+    }
+
+    for (AMediaFormat *exFormat : extractorFormat) {
+        AMediaFormat_delete(exFormat);
+    }
+
+    // Compare the extracted outputs of both extractor
+    ASSERT_EQ(extractedOutputSize[0], extractedOutputSize[1])
+            << "Extractor's output size doesn't match between " << inputFileNames[0] << "and "
+            << inputFileNames[1] << " extractors";
+    status = memcmp(mExtractorOutput[0], mExtractorOutput[1], extractedOutputSize[0]);
+    ASSERT_EQ(status, 0) << "Extracted content mismatch between " << inputFileNames[0] << "and "
+                         << inputFileNames[1] << " extractors";
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        ExtractorComparisonAll, ExtractorComparison,
+        ::testing::Values(make_pair("swirl_144x136_vp9.mp4", "swirl_144x136_vp9.webm"),
+                          make_pair("video_480x360_mp4_vp9_333kbps_25fps.mp4",
+                                    "video_480x360_webm_vp9_333kbps_25fps.webm"),
+                          make_pair("video_1280x720_av1_hdr_static_3mbps.mp4",
+                                    "video_1280x720_av1_hdr_static_3mbps.webm"),
+                          make_pair("swirl_132x130_mpeg4.3gp", "swirl_132x130_mpeg4.mkv"),
+                          make_pair("swirl_144x136_avc.mkv", "swirl_144x136_avc.mp4"),
+                          make_pair("swirl_132x130_mpeg4.mp4", "swirl_132x130_mpeg4.mkv"),
+                          make_pair("crowd_508x240_25fps_hevc.mp4","crowd_508x240_25fps_hevc.mkv"),
+                          make_pair("bbb_cif_768kbps_30fps_mpeg2.mp4",
+                                    "bbb_cif_768kbps_30fps_mpeg2.ts"),
+
+                          make_pair("loudsoftaac.aac", "loudsoftaac.mkv"),
+                          make_pair("sinesweepflacmkv.mkv", "sinesweepflacmp4.mp4"),
+                          make_pair("sinesweepmp3lame.mp3", "sinesweepmp3lame.mkv"),
+                          make_pair("sinesweepoggmp4.mp4", "sinesweepogg.ogg"),
+                          make_pair("sinesweepvorbis.mp4", "sinesweepvorbis.ogg"),
+                          make_pair("sinesweepvorbis.mkv", "sinesweepvorbis.ogg"),
+                          make_pair("testopus.mkv", "testopus.mp4"),
+                          make_pair("testopus.mp4", "testopus.opus"),
+
+                          make_pair("loudsoftaac.aac", "loudsoftaac.aac"),
+                          make_pair("testamr.amr", "testamr.amr"),
+                          make_pair("sinesweepflac.flac", "sinesweepflac.flac"),
+                          make_pair("midi_a.mid", "midi_a.mid"),
+                          make_pair("sinesweepvorbis.mkv", "sinesweepvorbis.mkv"),
+                          make_pair("sinesweepmp3lame.mp3", "sinesweepmp3lame.mp3"),
+                          make_pair("sinesweepoggmp4.mp4", "sinesweepoggmp4.mp4"),
+                          make_pair("testopus.opus", "testopus.opus"),
+                          make_pair("john_cage.ogg", "john_cage.ogg"),
+                          make_pair("monotestgsm.wav", "monotestgsm.wav"),
+
+                          make_pair("swirl_144x136_mpeg2.mpg", "swirl_144x136_mpeg2.mpg"),
+                          make_pair("swirl_132x130_mpeg4.mp4", "swirl_132x130_mpeg4.mp4"),
+                          make_pair("swirl_144x136_vp9.webm", "swirl_144x136_vp9.webm"),
+                          make_pair("swirl_144x136_vp8.webm", "swirl_144x136_vp8.webm")));
+
+INSTANTIATE_TEST_SUITE_P(ConfigParamTestAll, ConfigParamTest,
+                         ::testing::Values(make_pair("aac", AAC_1),
+                                           make_pair("amr", AMR_NB_1),
+                                           make_pair("amr", AMR_WB_1),
+                                           make_pair("flac", FLAC_1),
+                                           make_pair("wav", GSM_1),
+                                           make_pair("midi", MIDI_1),
+                                           make_pair("mp3", MP3_1),
+                                           make_pair("ogg", OPUS_1),
+                                           make_pair("ogg", VORBIS_1),
+
+                                           make_pair("mpeg4", HEVC_1),
+                                           make_pair("mpeg4", HEVC_2),
+                                           make_pair("mpeg2ps", MPEG2_PS_1),
+                                           make_pair("mpeg2ts", MPEG2_TS_1),
+                                           make_pair("mkv", MPEG4_1),
+                                           make_pair("mkv", VP9_1)));
+
+// Validate extractors for container format, input file, no. of tracks and supports seek flag
+INSTANTIATE_TEST_SUITE_P(
+        ExtractorUnitTestAll, ExtractorFunctionalityTest,
+        ::testing::Values(
+                make_tuple("aac", "loudsoftaac.aac", 1, true),
+                make_tuple("amr", "testamr.amr", 1, true),
+                make_tuple("amr", "amrwb.wav", 1, true),
+                make_tuple("flac", "sinesweepflac.flac", 1, true),
+                make_tuple("midi", "midi_a.mid", 1, true),
+                make_tuple("mkv", "sinesweepvorbis.mkv", 1, true),
+                make_tuple("mkv", "sinesweepmp3lame.mkv", 1, true),
+                make_tuple("mkv", "loudsoftaac.mkv", 1, true),
+                make_tuple("mp3", "sinesweepmp3lame.mp3", 1, true),
+                make_tuple("mp3", "id3test10.mp3", 1, true),
+                make_tuple("mpeg2ts", "segment000001.ts", 2, false),
+                make_tuple("mpeg2ts", "testac3ts.ts", 1, false),
+                make_tuple("mpeg2ts", "testac4ts.ts", 1, false),
+                make_tuple("mpeg2ts", "testeac3ts.ts", 1, false),
+                make_tuple("mpeg4", "audio_aac_mono_70kbs_44100hz.mp4", 2, true),
+                make_tuple("mpeg4", "multi0_ac4.mp4", 1, true),
+                make_tuple("mpeg4", "noise_6ch_44khz_aot5_dr_sbr_sig2_mp4.m4a", 1, true),
+                make_tuple("mpeg4", "sinesweepalac.mov", 1, true),
+                make_tuple("mpeg4", "sinesweepflacmp4.mp4", 1, true),
+                make_tuple("mpeg4", "sinesweepm4a.m4a", 1, true),
+                make_tuple("mpeg4", "sinesweepoggmp4.mp4", 1, true),
+                make_tuple("mpeg4", "sinesweepopusmp4.mp4", 1, true),
+                make_tuple("mpeg4", "testac3mp4.mp4", 1, true),
+                make_tuple("mpeg4", "testeac3mp4.mp4", 1, true),
+                make_tuple("ogg", "john_cage.ogg", 1, true),
+                make_tuple("ogg", "testopus.opus", 1, true),
+                make_tuple("ogg", "sinesweepoggalbumart.ogg", 1, true),
+                make_tuple("wav", "loudsoftwav.wav", 1, true),
+                make_tuple("wav", "monotestgsm.wav", 1, true),
+                make_tuple("wav", "noise_5ch_44khz_aot2_wave.wav", 1, true),
+                make_tuple("wav", "sine1khzm40db_alaw.wav", 1, true),
+                make_tuple("wav", "sine1khzm40db_f32le.wav", 1, true),
+                make_tuple("wav", "sine1khzm40db_mulaw.wav", 1, true),
+
+                make_tuple("mkv", "swirl_144x136_avc.mkv", 1, true),
+                make_tuple("mkv", "withoutcues.mkv", 2, true),
+                make_tuple("mkv", "swirl_144x136_vp9.webm", 1, true),
+                make_tuple("mkv", "swirl_144x136_vp8.webm", 1, true),
+                make_tuple("mpeg2ps", "swirl_144x136_mpeg2.mpg", 1, false),
+                make_tuple("mpeg2ps", "programstream.mpeg", 2, false),
+                make_tuple("mpeg4", "color_176x144_bt601_525_lr_sdr_h264.mp4", 1, true),
+                make_tuple("mpeg4", "heifwriter_input.heic", 4, false),
+                make_tuple("mpeg4", "psshtest.mp4", 1, true),
+                make_tuple("mpeg4", "swirl_132x130_mpeg4.mp4", 1, true),
+                make_tuple("mpeg4", "testvideo.3gp", 4, true),
+                make_tuple("mpeg4", "testvideo_with_2_timedtext_tracks.3gp", 4, true),
+                make_tuple("mpeg4",
+                           "video_176x144_3gp_h263_300kbps_25fps_aac_stereo_128kbps_11025hz_"
+                           "metadata_gyro_compliant.3gp",
+                           3, true),
+                make_tuple(
+                        "mpeg4",
+                        "video_1920x1080_mp4_mpeg2_12000kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
+                        2, true),
+                make_tuple("mpeg4",
+                           "video_480x360_mp4_hevc_650kbps_30fps_aac_stereo_128kbps_48000hz.mp4", 2,
+                           true),
+                make_tuple(
+                        "mpeg4",
+                        "video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_128kbps_44100hz_dash.mp4",
+                        2, true)));
 
 int main(int argc, char **argv) {
     gEnv = new ExtractorUnitTestEnvironment();
diff --git a/media/extractors/tests/README.md b/media/extractors/tests/README.md
index 69538b6..cff09ca 100644
--- a/media/extractors/tests/README.md
+++ b/media/extractors/tests/README.md
@@ -22,7 +22,7 @@
 adb push ${OUT}/data/nativetest/ExtractorUnitTest/ExtractorUnitTest /data/local/tmp/
 ```
 
-The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor.zip). Download, unzip and push these files into device for testing.
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor-1.4.zip). Download, unzip and push these files into device for testing.
 
 ```
 adb push extractor /data/local/tmp/
diff --git a/media/extractors/wav/Android.bp b/media/extractors/wav/Android.bp
index 5d38a81..cc5e1c7 100644
--- a/media/extractors/wav/Android.bp
+++ b/media/extractors/wav/Android.bp
@@ -1,7 +1,24 @@
+package {
+    default_applicable_licenses: ["frameworks_av_media_extractors_wav_license"],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_extractors_wav_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library {
     name: "libwavextractor",
 
-    defaults: ["extractor-defaults"],
+    defaults: ["extractor-defaults", "libbinder_ndk_host_user"],
 
     srcs: ["WAVExtractor.cpp"],
 
@@ -19,4 +36,11 @@
         "libfifo",
         "libstagefright_foundation",
     ],
+
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/extractors/wav/WAVExtractor.cpp b/media/extractors/wav/WAVExtractor.cpp
index 901b29d..9e94587 100644
--- a/media/extractors/wav/WAVExtractor.cpp
+++ b/media/extractors/wav/WAVExtractor.cpp
@@ -440,19 +440,22 @@
     int64_t seekTimeUs;
     ReadOptions::SeekMode mode;
     if (options != NULL && options->getSeekTo(&seekTimeUs, &mode)) {
-        int64_t pos = 0;
-
+        int64_t pos;
+        int64_t sampleNumber;
+        bool overflowed = __builtin_mul_overflow(seekTimeUs, mSampleRate, &sampleNumber);
+        sampleNumber /= 1000000;
         if (mWaveFormat == WAVE_FORMAT_MSGSM) {
             // 65 bytes decode to 320 8kHz samples
-            int64_t samplenumber = (seekTimeUs * mSampleRate) / 1000000;
-            int64_t framenumber = samplenumber / 320;
-            pos = framenumber * 65;
+            pos = sampleNumber / 320 * 65;
         } else {
-            pos = (seekTimeUs * mSampleRate) / 1000000 * mNumChannels * (mBitsPerSample >> 3);
+            int64_t bytesPerFrame;
+            overflowed |= __builtin_mul_overflow(mNumChannels, mBitsPerSample >> 3, &bytesPerFrame);
+            overflowed |= __builtin_mul_overflow(bytesPerFrame, sampleNumber, &pos);
         }
-        if (pos > (off64_t)mSize) {
-            pos = mSize;
+        if (overflowed) {
+          return AMEDIA_ERROR_MALFORMED;
         }
+        pos = std::clamp(pos, (int64_t) 0, (int64_t) mSize);
         mCurrentPos = pos + mOffset;
     }
 
diff --git a/media/img_utils/Android.bp b/media/img_utils/Android.bp
index 64530e1..237cd2b 100644
--- a/media/img_utils/Android.bp
+++ b/media/img_utils/Android.bp
@@ -12,6 +12,15 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_shared {
     name: "libimg_utils",
 
diff --git a/media/janitors/README b/media/janitors/README
new file mode 100644
index 0000000..9db8e0e
--- /dev/null
+++ b/media/janitors/README
@@ -0,0 +1,4 @@
+A collection of OWNERS files that we reference from other projects,
+such as the software codecs in directories like external/libavc.
+This is to simplify our owner/approver management across the multiple
+projects related to media.
diff --git a/media/janitors/codec_OWNERS b/media/janitors/codec_OWNERS
new file mode 100644
index 0000000..e201399
--- /dev/null
+++ b/media/janitors/codec_OWNERS
@@ -0,0 +1,5 @@
+# gerrit owner/approvers for the actual software codec libraries
+# differentiated from plugins connecting those codecs to either omx or codec2 infrastructure
+essick@google.com
+lajos@google.com
+marcone@google.com
diff --git a/media/libaaudio/Android.bp b/media/libaaudio/Android.bp
index 140052f..add28e0 100644
--- a/media/libaaudio/Android.bp
+++ b/media/libaaudio/Android.bp
@@ -12,6 +12,15 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 ndk_headers {
     name: "libAAudio_headers",
     from: "include",
@@ -32,5 +41,6 @@
 cc_library_headers {
     name: "libaaudio_headers",
     export_include_dirs: ["include"],
+    export_shared_lib_headers: ["aaudio-aidl-cpp"],
+    shared_libs: ["aaudio-aidl-cpp"],
 }
-
diff --git a/media/libaaudio/Doxyfile.orig b/media/libaaudio/Doxyfile.orig
deleted file mode 100644
index 137facb..0000000
--- a/media/libaaudio/Doxyfile.orig
+++ /dev/null
@@ -1,2303 +0,0 @@
-# Doxyfile 1.8.6
-
-# This file describes the settings to be used by the documentation system
-# doxygen (www.doxygen.org) for a project.
-#
-# All text after a double hash (##) is considered a comment and is placed in
-# front of the TAG it is preceding.
-#
-# All text after a single hash (#) is considered a comment and will be ignored.
-# The format is:
-# TAG = value [value, ...]
-# For lists, items can also be appended using:
-# TAG += value [value, ...]
-# Values that contain spaces should be placed between quotes (\" \").
-
-#---------------------------------------------------------------------------
-# Project related configuration options
-#---------------------------------------------------------------------------
-
-# This tag specifies the encoding used for all characters in the config file
-# that follow. The default is UTF-8 which is also the encoding used for all text
-# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv
-# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv
-# for the list of possible encodings.
-# The default value is: UTF-8.
-
-DOXYFILE_ENCODING      = UTF-8
-
-# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
-# double-quotes, unless you are using Doxywizard) that should identify the
-# project for which the documentation is generated. This name is used in the
-# title of most generated pages and in a few other places.
-# The default value is: My Project.
-
-PROJECT_NAME           = "My Project"
-
-# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
-# could be handy for archiving the generated documentation or if some version
-# control system is used.
-
-PROJECT_NUMBER         =
-
-# Using the PROJECT_BRIEF tag one can provide an optional one line description
-# for a project that appears at the top of each page and should give viewer a
-# quick idea about the purpose of the project. Keep the description short.
-
-PROJECT_BRIEF          =
-
-# With the PROJECT_LOGO tag one can specify an logo or icon that is included in
-# the documentation. The maximum height of the logo should not exceed 55 pixels
-# and the maximum width should not exceed 200 pixels. Doxygen will copy the logo
-# to the output directory.
-
-PROJECT_LOGO           =
-
-# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
-# into which the generated documentation will be written. If a relative path is
-# entered, it will be relative to the location where doxygen was started. If
-# left blank the current directory will be used.
-
-OUTPUT_DIRECTORY       =
-
-# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create 4096 sub-
-# directories (in 2 levels) under the output directory of each output format and
-# will distribute the generated files over these directories. Enabling this
-# option can be useful when feeding doxygen a huge amount of source files, where
-# putting all generated files in the same directory would otherwise causes
-# performance problems for the file system.
-# The default value is: NO.
-
-CREATE_SUBDIRS         = NO
-
-# The OUTPUT_LANGUAGE tag is used to specify the language in which all
-# documentation generated by doxygen is written. Doxygen will use this
-# information to generate all constant output in the proper language.
-# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese,
-# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States),
-# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian,
-# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages),
-# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian,
-# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian,
-# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish,
-# Ukrainian and Vietnamese.
-# The default value is: English.
-
-OUTPUT_LANGUAGE        = English
-
-# If the BRIEF_MEMBER_DESC tag is set to YES doxygen will include brief member
-# descriptions after the members that are listed in the file and class
-# documentation (similar to Javadoc). Set to NO to disable this.
-# The default value is: YES.
-
-BRIEF_MEMBER_DESC      = YES
-
-# If the REPEAT_BRIEF tag is set to YES doxygen will prepend the brief
-# description of a member or function before the detailed description
-#
-# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
-# brief descriptions will be completely suppressed.
-# The default value is: YES.
-
-REPEAT_BRIEF           = YES
-
-# This tag implements a quasi-intelligent brief description abbreviator that is
-# used to form the text in various listings. Each string in this list, if found
-# as the leading text of the brief description, will be stripped from the text
-# and the result, after processing the whole list, is used as the annotated
-# text. Otherwise, the brief description is used as-is. If left blank, the
-# following values are used ($name is automatically replaced with the name of
-# the entity):The $name class, The $name widget, The $name file, is, provides,
-# specifies, contains, represents, a, an and the.
-
-ABBREVIATE_BRIEF       =
-
-# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
-# doxygen will generate a detailed section even if there is only a brief
-# description.
-# The default value is: NO.
-
-ALWAYS_DETAILED_SEC    = NO
-
-# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
-# inherited members of a class in the documentation of that class as if those
-# members were ordinary class members. Constructors, destructors and assignment
-# operators of the base classes will not be shown.
-# The default value is: NO.
-
-INLINE_INHERITED_MEMB  = NO
-
-# If the FULL_PATH_NAMES tag is set to YES doxygen will prepend the full path
-# before files name in the file list and in the header files. If set to NO the
-# shortest path that makes the file name unique will be used
-# The default value is: YES.
-
-FULL_PATH_NAMES        = YES
-
-# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
-# Stripping is only done if one of the specified strings matches the left-hand
-# part of the path. The tag can be used to show relative paths in the file list.
-# If left blank the directory from which doxygen is run is used as the path to
-# strip.
-#
-# Note that you can specify absolute paths here, but also relative paths, which
-# will be relative from the directory where doxygen is started.
-# This tag requires that the tag FULL_PATH_NAMES is set to YES.
-
-STRIP_FROM_PATH        =
-
-# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
-# path mentioned in the documentation of a class, which tells the reader which
-# header file to include in order to use a class. If left blank only the name of
-# the header file containing the class definition is used. Otherwise one should
-# specify the list of include paths that are normally passed to the compiler
-# using the -I flag.
-
-STRIP_FROM_INC_PATH    =
-
-# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
-# less readable) file names. This can be useful is your file systems doesn't
-# support long names like on DOS, Mac, or CD-ROM.
-# The default value is: NO.
-
-SHORT_NAMES            = NO
-
-# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the
-# first line (until the first dot) of a Javadoc-style comment as the brief
-# description. If set to NO, the Javadoc-style will behave just like regular Qt-
-# style comments (thus requiring an explicit @brief command for a brief
-# description.)
-# The default value is: NO.
-
-JAVADOC_AUTOBRIEF      = NO
-
-# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first
-# line (until the first dot) of a Qt-style comment as the brief description. If
-# set to NO, the Qt-style will behave just like regular Qt-style comments (thus
-# requiring an explicit \brief command for a brief description.)
-# The default value is: NO.
-
-QT_AUTOBRIEF           = NO
-
-# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a
-# multi-line C++ special comment block (i.e. a block of //! or /// comments) as
-# a brief description. This used to be the default behavior. The new default is
-# to treat a multi-line C++ comment block as a detailed description. Set this
-# tag to YES if you prefer the old behavior instead.
-#
-# Note that setting this tag to YES also means that rational rose comments are
-# not recognized any more.
-# The default value is: NO.
-
-MULTILINE_CPP_IS_BRIEF = NO
-
-# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
-# documentation from any documented member that it re-implements.
-# The default value is: YES.
-
-INHERIT_DOCS           = YES
-
-# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce a
-# new page for each member. If set to NO, the documentation of a member will be
-# part of the file/class/namespace that contains it.
-# The default value is: NO.
-
-SEPARATE_MEMBER_PAGES  = NO
-
-# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen
-# uses this value to replace tabs by spaces in code fragments.
-# Minimum value: 1, maximum value: 16, default value: 4.
-
-TAB_SIZE               = 4
-
-# This tag can be used to specify a number of aliases that act as commands in
-# the documentation. An alias has the form:
-# name=value
-# For example adding
-# "sideeffect=@par Side Effects:\n"
-# will allow you to put the command \sideeffect (or @sideeffect) in the
-# documentation, which will result in a user-defined paragraph with heading
-# "Side Effects:". You can put \n's in the value part of an alias to insert
-# newlines.
-
-ALIASES                =
-
-# This tag can be used to specify a number of word-keyword mappings (TCL only).
-# A mapping has the form "name=value". For example adding "class=itcl::class"
-# will allow you to use the command class in the itcl::class meaning.
-
-TCL_SUBST              =
-
-# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
-# only. Doxygen will then generate output that is more tailored for C. For
-# instance, some of the names that are used will be different. The list of all
-# members will be omitted, etc.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_FOR_C  = NO
-
-# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or
-# Python sources only. Doxygen will then generate output that is more tailored
-# for that language. For instance, namespaces will be presented as packages,
-# qualified scopes will look different, etc.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_JAVA   = NO
-
-# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
-# sources. Doxygen will then generate output that is tailored for Fortran.
-# The default value is: NO.
-
-OPTIMIZE_FOR_FORTRAN   = NO
-
-# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
-# sources. Doxygen will then generate output that is tailored for VHDL.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_VHDL   = NO
-
-# Doxygen selects the parser to use depending on the extension of the files it
-# parses. With this tag you can assign which parser to use for a given
-# extension. Doxygen has a built-in mapping, but you can override or extend it
-# using this tag. The format is ext=language, where ext is a file extension, and
-# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
-# C#, C, C++, D, PHP, Objective-C, Python, Fortran, VHDL. For instance to make
-# doxygen treat .inc files as Fortran files (default is PHP), and .f files as C
-# (default is Fortran), use: inc=Fortran f=C.
-#
-# Note For files without extension you can use no_extension as a placeholder.
-#
-# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
-# the files are not read by doxygen.
-
-EXTENSION_MAPPING      =
-
-# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
-# according to the Markdown format, which allows for more readable
-# documentation. See http://daringfireball.net/projects/markdown/ for details.
-# The output of markdown processing is further processed by doxygen, so you can
-# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
-# case of backward compatibilities issues.
-# The default value is: YES.
-
-MARKDOWN_SUPPORT       = YES
-
-# When enabled doxygen tries to link words that correspond to documented
-# classes, or namespaces to their corresponding documentation. Such a link can
-# be prevented in individual cases by by putting a % sign in front of the word
-# or globally by setting AUTOLINK_SUPPORT to NO.
-# The default value is: YES.
-
-AUTOLINK_SUPPORT       = YES
-
-# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
-# to include (a tag file for) the STL sources as input, then you should set this
-# tag to YES in order to let doxygen match functions declarations and
-# definitions whose arguments contain STL classes (e.g. func(std::string);
-# versus func(std::string) {}). This also make the inheritance and collaboration
-# diagrams that involve STL classes more complete and accurate.
-# The default value is: NO.
-
-BUILTIN_STL_SUPPORT    = NO
-
-# If you use Microsoft's C++/CLI language, you should set this option to YES to
-# enable parsing support.
-# The default value is: NO.
-
-CPP_CLI_SUPPORT        = NO
-
-# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
-# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen
-# will parse them like normal C++ but will assume all classes use public instead
-# of private inheritance when no explicit protection keyword is present.
-# The default value is: NO.
-
-SIP_SUPPORT            = NO
-
-# For Microsoft's IDL there are propget and propput attributes to indicate
-# getter and setter methods for a property. Setting this option to YES will make
-# doxygen to replace the get and set methods by a property in the documentation.
-# This will only work if the methods are indeed getting or setting a simple
-# type. If this is not the case, or you want to show the methods anyway, you
-# should set this option to NO.
-# The default value is: YES.
-
-IDL_PROPERTY_SUPPORT   = YES
-
-# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
-# tag is set to YES, then doxygen will reuse the documentation of the first
-# member in the group (if any) for the other members of the group. By default
-# all members of a group must be documented explicitly.
-# The default value is: NO.
-
-DISTRIBUTE_GROUP_DOC   = NO
-
-# Set the SUBGROUPING tag to YES to allow class member groups of the same type
-# (for instance a group of public functions) to be put as a subgroup of that
-# type (e.g. under the Public Functions section). Set it to NO to prevent
-# subgrouping. Alternatively, this can be done per class using the
-# \nosubgrouping command.
-# The default value is: YES.
-
-SUBGROUPING            = YES
-
-# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions
-# are shown inside the group in which they are included (e.g. using \ingroup)
-# instead of on a separate page (for HTML and Man pages) or section (for LaTeX
-# and RTF).
-#
-# Note that this feature does not work in combination with
-# SEPARATE_MEMBER_PAGES.
-# The default value is: NO.
-
-INLINE_GROUPED_CLASSES = NO
-
-# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions
-# with only public data fields or simple typedef fields will be shown inline in
-# the documentation of the scope in which they are defined (i.e. file,
-# namespace, or group documentation), provided this scope is documented. If set
-# to NO, structs, classes, and unions are shown on a separate page (for HTML and
-# Man pages) or section (for LaTeX and RTF).
-# The default value is: NO.
-
-INLINE_SIMPLE_STRUCTS  = NO
-
-# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or
-# enum is documented as struct, union, or enum with the name of the typedef. So
-# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
-# with name TypeT. When disabled the typedef will appear as a member of a file,
-# namespace, or class. And the struct will be named TypeS. This can typically be
-# useful for C code in case the coding convention dictates that all compound
-# types are typedef'ed and only the typedef is referenced, never the tag name.
-# The default value is: NO.
-
-TYPEDEF_HIDES_STRUCT   = NO
-
-# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
-# cache is used to resolve symbols given their name and scope. Since this can be
-# an expensive process and often the same symbol appears multiple times in the
-# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small
-# doxygen will become slower. If the cache is too large, memory is wasted. The
-# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range
-# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536
-# symbols. At the end of a run doxygen will report the cache usage and suggest
-# the optimal cache size from a speed point of view.
-# Minimum value: 0, maximum value: 9, default value: 0.
-
-LOOKUP_CACHE_SIZE      = 0
-
-#---------------------------------------------------------------------------
-# Build related configuration options
-#---------------------------------------------------------------------------
-
-# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
-# documentation are documented, even if no documentation was available. Private
-# class members and static file members will be hidden unless the
-# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.
-# Note: This will also disable the warnings about undocumented members that are
-# normally produced when WARNINGS is set to YES.
-# The default value is: NO.
-
-EXTRACT_ALL            = NO
-
-# If the EXTRACT_PRIVATE tag is set to YES all private members of a class will
-# be included in the documentation.
-# The default value is: NO.
-
-EXTRACT_PRIVATE        = NO
-
-# If the EXTRACT_PACKAGE tag is set to YES all members with package or internal
-# scope will be included in the documentation.
-# The default value is: NO.
-
-EXTRACT_PACKAGE        = NO
-
-# If the EXTRACT_STATIC tag is set to YES all static members of a file will be
-# included in the documentation.
-# The default value is: NO.
-
-EXTRACT_STATIC         = NO
-
-# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) defined
-# locally in source files will be included in the documentation. If set to NO
-# only classes defined in header files are included. Does not have any effect
-# for Java sources.
-# The default value is: YES.
-
-EXTRACT_LOCAL_CLASSES  = YES
-
-# This flag is only useful for Objective-C code. When set to YES local methods,
-# which are defined in the implementation section but not in the interface are
-# included in the documentation. If set to NO only methods in the interface are
-# included.
-# The default value is: NO.
-
-EXTRACT_LOCAL_METHODS  = NO
-
-# If this flag is set to YES, the members of anonymous namespaces will be
-# extracted and appear in the documentation as a namespace called
-# 'anonymous_namespace{file}', where file will be replaced with the base name of
-# the file that contains the anonymous namespace. By default anonymous namespace
-# are hidden.
-# The default value is: NO.
-
-EXTRACT_ANON_NSPACES   = NO
-
-# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
-# undocumented members inside documented classes or files. If set to NO these
-# members will be included in the various overviews, but no documentation
-# section is generated. This option has no effect if EXTRACT_ALL is enabled.
-# The default value is: NO.
-
-HIDE_UNDOC_MEMBERS     = NO
-
-# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all
-# undocumented classes that are normally visible in the class hierarchy. If set
-# to NO these classes will be included in the various overviews. This option has
-# no effect if EXTRACT_ALL is enabled.
-# The default value is: NO.
-
-HIDE_UNDOC_CLASSES     = NO
-
-# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
-# (class|struct|union) declarations. If set to NO these declarations will be
-# included in the documentation.
-# The default value is: NO.
-
-HIDE_FRIEND_COMPOUNDS  = NO
-
-# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any
-# documentation blocks found inside the body of a function. If set to NO these
-# blocks will be appended to the function's detailed documentation block.
-# The default value is: NO.
-
-HIDE_IN_BODY_DOCS      = NO
-
-# The INTERNAL_DOCS tag determines if documentation that is typed after a
-# \internal command is included. If the tag is set to NO then the documentation
-# will be excluded. Set it to YES to include the internal documentation.
-# The default value is: NO.
-
-INTERNAL_DOCS          = NO
-
-# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
-# names in lower-case letters. If set to YES upper-case letters are also
-# allowed. This is useful if you have classes or files whose names only differ
-# in case and if your file system supports case sensitive file names. Windows
-# and Mac users are advised to set this option to NO.
-# The default value is: system dependent.
-
-CASE_SENSE_NAMES       = YES
-
-# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with
-# their full class and namespace scopes in the documentation. If set to YES the
-# scope will be hidden.
-# The default value is: NO.
-
-HIDE_SCOPE_NAMES       = NO
-
-# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of
-# the files that are included by a file in the documentation of that file.
-# The default value is: YES.
-
-SHOW_INCLUDE_FILES     = YES
-
-# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
-# grouped member an include statement to the documentation, telling the reader
-# which file to include in order to use the member.
-# The default value is: NO.
-
-SHOW_GROUPED_MEMB_INC  = NO
-
-# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include
-# files with double quotes in the documentation rather than with sharp brackets.
-# The default value is: NO.
-
-FORCE_LOCAL_INCLUDES   = NO
-
-# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the
-# documentation for inline members.
-# The default value is: YES.
-
-INLINE_INFO            = YES
-
-# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the
-# (detailed) documentation of file and class members alphabetically by member
-# name. If set to NO the members will appear in declaration order.
-# The default value is: YES.
-
-SORT_MEMBER_DOCS       = YES
-
-# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief
-# descriptions of file, namespace and class members alphabetically by member
-# name. If set to NO the members will appear in declaration order. Note that
-# this will also influence the order of the classes in the class list.
-# The default value is: NO.
-
-SORT_BRIEF_DOCS        = NO
-
-# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the
-# (brief and detailed) documentation of class members so that constructors and
-# destructors are listed first. If set to NO the constructors will appear in the
-# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.
-# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief
-# member documentation.
-# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting
-# detailed member documentation.
-# The default value is: NO.
-
-SORT_MEMBERS_CTORS_1ST = NO
-
-# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy
-# of group names into alphabetical order. If set to NO the group names will
-# appear in their defined order.
-# The default value is: NO.
-
-SORT_GROUP_NAMES       = NO
-
-# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by
-# fully-qualified names, including namespaces. If set to NO, the class list will
-# be sorted only by class name, not including the namespace part.
-# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
-# Note: This option applies only to the class list, not to the alphabetical
-# list.
-# The default value is: NO.
-
-SORT_BY_SCOPE_NAME     = NO
-
-# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper
-# type resolution of all parameters of a function it will reject a match between
-# the prototype and the implementation of a member function even if there is
-# only one candidate or it is obvious which candidate to choose by doing a
-# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still
-# accept a match between prototype and implementation in such cases.
-# The default value is: NO.
-
-STRICT_PROTO_MATCHING  = NO
-
-# The GENERATE_TODOLIST tag can be used to enable ( YES) or disable ( NO) the
-# todo list. This list is created by putting \todo commands in the
-# documentation.
-# The default value is: YES.
-
-GENERATE_TODOLIST      = YES
-
-# The GENERATE_TESTLIST tag can be used to enable ( YES) or disable ( NO) the
-# test list. This list is created by putting \test commands in the
-# documentation.
-# The default value is: YES.
-
-GENERATE_TESTLIST      = YES
-
-# The GENERATE_BUGLIST tag can be used to enable ( YES) or disable ( NO) the bug
-# list. This list is created by putting \bug commands in the documentation.
-# The default value is: YES.
-
-GENERATE_BUGLIST       = YES
-
-# The GENERATE_DEPRECATEDLIST tag can be used to enable ( YES) or disable ( NO)
-# the deprecated list. This list is created by putting \deprecated commands in
-# the documentation.
-# The default value is: YES.
-
-GENERATE_DEPRECATEDLIST= YES
-
-# The ENABLED_SECTIONS tag can be used to enable conditional documentation
-# sections, marked by \if <section_label> ... \endif and \cond <section_label>
-# ... \endcond blocks.
-
-ENABLED_SECTIONS       =
-
-# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the
-# initial value of a variable or macro / define can have for it to appear in the
-# documentation. If the initializer consists of more lines than specified here
-# it will be hidden. Use a value of 0 to hide initializers completely. The
-# appearance of the value of individual variables and macros / defines can be
-# controlled using \showinitializer or \hideinitializer command in the
-# documentation regardless of this setting.
-# Minimum value: 0, maximum value: 10000, default value: 30.
-
-MAX_INITIALIZER_LINES  = 30
-
-# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at
-# the bottom of the documentation of classes and structs. If set to YES the list
-# will mention the files that were used to generate the documentation.
-# The default value is: YES.
-
-SHOW_USED_FILES        = YES
-
-# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This
-# will remove the Files entry from the Quick Index and from the Folder Tree View
-# (if specified).
-# The default value is: YES.
-
-SHOW_FILES             = YES
-
-# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces
-# page. This will remove the Namespaces entry from the Quick Index and from the
-# Folder Tree View (if specified).
-# The default value is: YES.
-
-SHOW_NAMESPACES        = YES
-
-# The FILE_VERSION_FILTER tag can be used to specify a program or script that
-# doxygen should invoke to get the current version for each file (typically from
-# the version control system). Doxygen will invoke the program by executing (via
-# popen()) the command command input-file, where command is the value of the
-# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided
-# by doxygen. Whatever the program writes to standard output is used as the file
-# version. For an example see the documentation.
-
-FILE_VERSION_FILTER    =
-
-# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
-# by doxygen. The layout file controls the global structure of the generated
-# output files in an output format independent way. To create the layout file
-# that represents doxygen's defaults, run doxygen with the -l option. You can
-# optionally specify a file name after the option, if omitted DoxygenLayout.xml
-# will be used as the name of the layout file.
-#
-# Note that if you run doxygen from a directory containing a file called
-# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE
-# tag is left empty.
-
-LAYOUT_FILE            =
-
-# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
-# the reference definitions. This must be a list of .bib files. The .bib
-# extension is automatically appended if omitted. This requires the bibtex tool
-# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info.
-# For LaTeX the style of the bibliography can be controlled using
-# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
-# search path. Do not use file names with spaces, bibtex cannot handle them. See
-# also \cite for info how to create references.
-
-CITE_BIB_FILES         =
-
-#---------------------------------------------------------------------------
-# Configuration options related to warning and progress messages
-#---------------------------------------------------------------------------
-
-# The QUIET tag can be used to turn on/off the messages that are generated to
-# standard output by doxygen. If QUIET is set to YES this implies that the
-# messages are off.
-# The default value is: NO.
-
-QUIET                  = NO
-
-# The WARNINGS tag can be used to turn on/off the warning messages that are
-# generated to standard error ( stderr) by doxygen. If WARNINGS is set to YES
-# this implies that the warnings are on.
-#
-# Tip: Turn warnings on while writing the documentation.
-# The default value is: YES.
-
-WARNINGS               = YES
-
-# If the WARN_IF_UNDOCUMENTED tag is set to YES, then doxygen will generate
-# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag
-# will automatically be disabled.
-# The default value is: YES.
-
-WARN_IF_UNDOCUMENTED   = YES
-
-# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
-# potential errors in the documentation, such as not documenting some parameters
-# in a documented function, or documenting parameters that don't exist or using
-# markup commands wrongly.
-# The default value is: YES.
-
-WARN_IF_DOC_ERROR      = YES
-
-# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
-# are documented, but have no documentation for their parameters or return
-# value. If set to NO doxygen will only warn about wrong or incomplete parameter
-# documentation, but not about the absence of documentation.
-# The default value is: NO.
-
-WARN_NO_PARAMDOC       = NO
-
-# The WARN_FORMAT tag determines the format of the warning messages that doxygen
-# can produce. The string should contain the $file, $line, and $text tags, which
-# will be replaced by the file and line number from which the warning originated
-# and the warning text. Optionally the format may contain $version, which will
-# be replaced by the version of the file (if it could be obtained via
-# FILE_VERSION_FILTER)
-# The default value is: $file:$line: $text.
-
-WARN_FORMAT            = "$file:$line: $text"
-
-# The WARN_LOGFILE tag can be used to specify a file to which warning and error
-# messages should be written. If left blank the output is written to standard
-# error (stderr).
-
-WARN_LOGFILE           =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the input files
-#---------------------------------------------------------------------------
-
-# The INPUT tag is used to specify the files and/or directories that contain
-# documented source files. You may enter file names like myfile.cpp or
-# directories like /usr/src/myproject. Separate the files or directories with
-# spaces.
-# Note: If this tag is empty the current directory is searched.
-
-INPUT                  =
-
-# This tag can be used to specify the character encoding of the source files
-# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
-# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
-# documentation (see: http://www.gnu.org/software/libiconv) for the list of
-# possible encodings.
-# The default value is: UTF-8.
-
-INPUT_ENCODING         = UTF-8
-
-# If the value of the INPUT tag contains directories, you can use the
-# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and
-# *.h) to filter out the source-files in the directories. If left blank the
-# following patterns are tested:*.c, *.cc, *.cxx, *.cpp, *.c++, *.java, *.ii,
-# *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp,
-# *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown,
-# *.md, *.mm, *.dox, *.py, *.f90, *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf,
-# *.qsf, *.as and *.js.
-
-FILE_PATTERNS          =
-
-# The RECURSIVE tag can be used to specify whether or not subdirectories should
-# be searched for input files as well.
-# The default value is: NO.
-
-RECURSIVE              = NO
-
-# The EXCLUDE tag can be used to specify files and/or directories that should be
-# excluded from the INPUT source files. This way you can easily exclude a
-# subdirectory from a directory tree whose root is specified with the INPUT tag.
-#
-# Note that relative paths are relative to the directory from which doxygen is
-# run.
-
-EXCLUDE                =
-
-# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
-# directories that are symbolic links (a Unix file system feature) are excluded
-# from the input.
-# The default value is: NO.
-
-EXCLUDE_SYMLINKS       = NO
-
-# If the value of the INPUT tag contains directories, you can use the
-# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
-# certain files from those directories.
-#
-# Note that the wildcards are matched against the file with absolute path, so to
-# exclude all test directories for example use the pattern */test/*
-
-EXCLUDE_PATTERNS       =
-
-# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
-# (namespaces, classes, functions, etc.) that should be excluded from the
-# output. The symbol name can be a fully qualified name, a word, or if the
-# wildcard * is used, a substring. Examples: ANamespace, AClass,
-# AClass::ANamespace, ANamespace::*Test
-#
-# Note that the wildcards are matched against the file with absolute path, so to
-# exclude all test directories use the pattern */test/*
-
-EXCLUDE_SYMBOLS        =
-
-# The EXAMPLE_PATH tag can be used to specify one or more files or directories
-# that contain example code fragments that are included (see the \include
-# command).
-
-EXAMPLE_PATH           =
-
-# If the value of the EXAMPLE_PATH tag contains directories, you can use the
-# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
-# *.h) to filter out the source-files in the directories. If left blank all
-# files are included.
-
-EXAMPLE_PATTERNS       =
-
-# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
-# searched for input files to be used with the \include or \dontinclude commands
-# irrespective of the value of the RECURSIVE tag.
-# The default value is: NO.
-
-EXAMPLE_RECURSIVE      = NO
-
-# The IMAGE_PATH tag can be used to specify one or more files or directories
-# that contain images that are to be included in the documentation (see the
-# \image command).
-
-IMAGE_PATH             =
-
-# The INPUT_FILTER tag can be used to specify a program that doxygen should
-# invoke to filter for each input file. Doxygen will invoke the filter program
-# by executing (via popen()) the command:
-#
-# <filter> <input-file>
-#
-# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the
-# name of an input file. Doxygen will then use the output that the filter
-# program writes to standard output. If FILTER_PATTERNS is specified, this tag
-# will be ignored.
-#
-# Note that the filter must not add or remove lines; it is applied before the
-# code is scanned, but not when the output code is generated. If lines are added
-# or removed, the anchors will not be placed correctly.
-
-INPUT_FILTER           =
-
-# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
-# basis. Doxygen will compare the file name with each pattern and apply the
-# filter if there is a match. The filters are a list of the form: pattern=filter
-# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how
-# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
-# patterns match the file name, INPUT_FILTER is applied.
-
-FILTER_PATTERNS        =
-
-# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
-# INPUT_FILTER ) will also be used to filter the input files that are used for
-# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).
-# The default value is: NO.
-
-FILTER_SOURCE_FILES    = NO
-
-# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
-# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and
-# it is also possible to disable source filtering for a specific pattern using
-# *.ext= (so without naming a filter).
-# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.
-
-FILTER_SOURCE_PATTERNS =
-
-# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that
-# is part of the input, its contents will be placed on the main page
-# (index.html). This can be useful if you have a project on for instance GitHub
-# and want to reuse the introduction page also for the doxygen output.
-
-USE_MDFILE_AS_MAINPAGE =
-
-#---------------------------------------------------------------------------
-# Configuration options related to source browsing
-#---------------------------------------------------------------------------
-
-# If the SOURCE_BROWSER tag is set to YES then a list of source files will be
-# generated. Documented entities will be cross-referenced with these sources.
-#
-# Note: To get rid of all source code in the generated output, make sure that
-# also VERBATIM_HEADERS is set to NO.
-# The default value is: NO.
-
-SOURCE_BROWSER         = NO
-
-# Setting the INLINE_SOURCES tag to YES will include the body of functions,
-# classes and enums directly into the documentation.
-# The default value is: NO.
-
-INLINE_SOURCES         = NO
-
-# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any
-# special comment blocks from generated source code fragments. Normal C, C++ and
-# Fortran comments will always remain visible.
-# The default value is: YES.
-
-STRIP_CODE_COMMENTS    = YES
-
-# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
-# function all documented functions referencing it will be listed.
-# The default value is: NO.
-
-REFERENCED_BY_RELATION = NO
-
-# If the REFERENCES_RELATION tag is set to YES then for each documented function
-# all documented entities called/used by that function will be listed.
-# The default value is: NO.
-
-REFERENCES_RELATION    = NO
-
-# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set
-# to YES, then the hyperlinks from functions in REFERENCES_RELATION and
-# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will
-# link to the documentation.
-# The default value is: YES.
-
-REFERENCES_LINK_SOURCE = YES
-
-# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the
-# source code will show a tooltip with additional information such as prototype,
-# brief description and links to the definition and documentation. Since this
-# will make the HTML file larger and loading of large files a bit slower, you
-# can opt to disable this feature.
-# The default value is: YES.
-# This tag requires that the tag SOURCE_BROWSER is set to YES.
-
-SOURCE_TOOLTIPS        = YES
-
-# If the USE_HTAGS tag is set to YES then the references to source code will
-# point to the HTML generated by the htags(1) tool instead of doxygen built-in
-# source browser. The htags tool is part of GNU's global source tagging system
-# (see http://www.gnu.org/software/global/global.html). You will need version
-# 4.8.6 or higher.
-#
-# To use it do the following:
-# - Install the latest version of global
-# - Enable SOURCE_BROWSER and USE_HTAGS in the config file
-# - Make sure the INPUT points to the root of the source tree
-# - Run doxygen as normal
-#
-# Doxygen will invoke htags (and that will in turn invoke gtags), so these
-# tools must be available from the command line (i.e. in the search path).
-#
-# The result: instead of the source browser generated by doxygen, the links to
-# source code will now point to the output of htags.
-# The default value is: NO.
-# This tag requires that the tag SOURCE_BROWSER is set to YES.
-
-USE_HTAGS              = NO
-
-# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a
-# verbatim copy of the header file for each class for which an include is
-# specified. Set to NO to disable this.
-# See also: Section \class.
-# The default value is: YES.
-
-VERBATIM_HEADERS       = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to the alphabetical class index
-#---------------------------------------------------------------------------
-
-# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all
-# compounds will be generated. Enable this if the project contains a lot of
-# classes, structs, unions or interfaces.
-# The default value is: YES.
-
-ALPHABETICAL_INDEX     = YES
-
-# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
-# which the alphabetical index list will be split.
-# Minimum value: 1, maximum value: 20, default value: 5.
-# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
-
-COLS_IN_ALPHA_INDEX    = 5
-
-# In case all classes in a project start with a common prefix, all classes will
-# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
-# can be used to specify a prefix (or a list of prefixes) that should be ignored
-# while generating the index headers.
-# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
-
-IGNORE_PREFIX          =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the HTML output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_HTML tag is set to YES doxygen will generate HTML output
-# The default value is: YES.
-
-GENERATE_HTML          = YES
-
-# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: html.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_OUTPUT            = html
-
-# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
-# generated HTML page (for example: .htm, .php, .asp).
-# The default value is: .html.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_FILE_EXTENSION    = .html
-
-# The HTML_HEADER tag can be used to specify a user-defined HTML header file for
-# each generated HTML page. If the tag is left blank doxygen will generate a
-# standard header.
-#
-# To get valid HTML the header file that includes any scripts and style sheets
-# that doxygen needs, which is dependent on the configuration options used (e.g.
-# the setting GENERATE_TREEVIEW). It is highly recommended to start with a
-# default header using
-# doxygen -w html new_header.html new_footer.html new_stylesheet.css
-# YourConfigFile
-# and then modify the file new_header.html. See also section "Doxygen usage"
-# for information on how to generate the default header that doxygen normally
-# uses.
-# Note: The header is subject to change so you typically have to regenerate the
-# default header when upgrading to a newer version of doxygen. For a description
-# of the possible markers and block names see the documentation.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_HEADER            =
-
-# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each
-# generated HTML page. If the tag is left blank doxygen will generate a standard
-# footer. See HTML_HEADER for more information on how to generate a default
-# footer and what special commands can be used inside the footer. See also
-# section "Doxygen usage" for information on how to generate the default footer
-# that doxygen normally uses.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_FOOTER            =
-
-# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style
-# sheet that is used by each HTML page. It can be used to fine-tune the look of
-# the HTML output. If left blank doxygen will generate a default style sheet.
-# See also section "Doxygen usage" for information on how to generate the style
-# sheet that doxygen normally uses.
-# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as
-# it is more robust and this tag (HTML_STYLESHEET) will in the future become
-# obsolete.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_STYLESHEET        =
-
-# The HTML_EXTRA_STYLESHEET tag can be used to specify an additional user-
-# defined cascading style sheet that is included after the standard style sheets
-# created by doxygen. Using this option one can overrule certain style aspects.
-# This is preferred over using HTML_STYLESHEET since it does not replace the
-# standard style sheet and is therefor more robust against future updates.
-# Doxygen will copy the style sheet file to the output directory. For an example
-# see the documentation.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_EXTRA_STYLESHEET  =
-
-# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
-# other source files which should be copied to the HTML output directory. Note
-# that these files will be copied to the base HTML output directory. Use the
-# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
-# files. In the HTML_STYLESHEET file, use the file name only. Also note that the
-# files will be copied as-is; there are no commands or markers available.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_EXTRA_FILES       =
-
-# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
-# will adjust the colors in the stylesheet and background images according to
-# this color. Hue is specified as an angle on a colorwheel, see
-# http://en.wikipedia.org/wiki/Hue for more information. For instance the value
-# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
-# purple, and 360 is red again.
-# Minimum value: 0, maximum value: 359, default value: 220.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_COLORSTYLE_HUE    = 220
-
-# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors
-# in the HTML output. For a value of 0 the output will use grayscales only. A
-# value of 255 will produce the most vivid colors.
-# Minimum value: 0, maximum value: 255, default value: 100.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_COLORSTYLE_SAT    = 100
-
-# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the
-# luminance component of the colors in the HTML output. Values below 100
-# gradually make the output lighter, whereas values above 100 make the output
-# darker. The value divided by 100 is the actual gamma applied, so 80 represents
-# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not
-# change the gamma.
-# Minimum value: 40, maximum value: 240, default value: 80.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_COLORSTYLE_GAMMA  = 80
-
-# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
-# page will contain the date and time when the page was generated. Setting this
-# to NO can help when comparing the output of multiple runs.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_TIMESTAMP         = YES
-
-# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
-# documentation will contain sections that can be hidden and shown after the
-# page has loaded.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_DYNAMIC_SECTIONS  = NO
-
-# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
-# shown in the various tree structured indices initially; the user can expand
-# and collapse entries dynamically later on. Doxygen will expand the tree to
-# such a level that at most the specified number of entries are visible (unless
-# a fully collapsed tree already exceeds this amount). So setting the number of
-# entries 1 will produce a full collapsed tree by default. 0 is a special value
-# representing an infinite number of entries and will result in a full expanded
-# tree by default.
-# Minimum value: 0, maximum value: 9999, default value: 100.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_INDEX_NUM_ENTRIES = 100
-
-# If the GENERATE_DOCSET tag is set to YES, additional index files will be
-# generated that can be used as input for Apple's Xcode 3 integrated development
-# environment (see: http://developer.apple.com/tools/xcode/), introduced with
-# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a
-# Makefile in the HTML output directory. Running make will produce the docset in
-# that directory and running make install will install the docset in
-# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
-# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
-# for more information.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_DOCSET        = NO
-
-# This tag determines the name of the docset feed. A documentation feed provides
-# an umbrella under which multiple documentation sets from a single provider
-# (such as a company or product suite) can be grouped.
-# The default value is: Doxygen generated docs.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_FEEDNAME        = "Doxygen generated docs"
-
-# This tag specifies a string that should uniquely identify the documentation
-# set bundle. This should be a reverse domain-name style string, e.g.
-# com.mycompany.MyDocSet. Doxygen will append .docset to the name.
-# The default value is: org.doxygen.Project.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_BUNDLE_ID       = org.doxygen.Project
-
-# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify
-# the documentation publisher. This should be a reverse domain-name style
-# string, e.g. com.mycompany.MyDocSet.documentation.
-# The default value is: org.doxygen.Publisher.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_PUBLISHER_ID    = org.doxygen.Publisher
-
-# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.
-# The default value is: Publisher.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_PUBLISHER_NAME  = Publisher
-
-# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
-# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
-# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
-# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on
-# Windows.
-#
-# The HTML Help Workshop contains a compiler that can convert all HTML output
-# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
-# files are now used as the Windows 98 help format, and will replace the old
-# Windows help format (.hlp) on all Windows platforms in the future. Compressed
-# HTML files also contain an index, a table of contents, and you can search for
-# words in the documentation. The HTML workshop also contains a viewer for
-# compressed HTML files.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_HTMLHELP      = NO
-
-# The CHM_FILE tag can be used to specify the file name of the resulting .chm
-# file. You can add a path in front of the file if the result should not be
-# written to the html output directory.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-CHM_FILE               =
-
-# The HHC_LOCATION tag can be used to specify the location (absolute path
-# including file name) of the HTML help compiler ( hhc.exe). If non-empty
-# doxygen will try to run the HTML help compiler on the generated index.hhp.
-# The file has to be specified with full path.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-HHC_LOCATION           =
-
-# The GENERATE_CHI flag controls if a separate .chi index file is generated (
-# YES) or that it should be included in the master .chm file ( NO).
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-GENERATE_CHI           = NO
-
-# The CHM_INDEX_ENCODING is used to encode HtmlHelp index ( hhk), content ( hhc)
-# and project file content.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-CHM_INDEX_ENCODING     =
-
-# The BINARY_TOC flag controls whether a binary table of contents is generated (
-# YES) or a normal table of contents ( NO) in the .chm file.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-BINARY_TOC             = NO
-
-# The TOC_EXPAND flag can be set to YES to add extra items for group members to
-# the table of contents of the HTML help documentation and to the tree view.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-TOC_EXPAND             = NO
-
-# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
-# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
-# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
-# (.qch) of the generated HTML documentation.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_QHP           = NO
-
-# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify
-# the file name of the resulting .qch file. The path specified is relative to
-# the HTML output folder.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QCH_FILE               =
-
-# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
-# Project output. For more information please see Qt Help Project / Namespace
-# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace).
-# The default value is: org.doxygen.Project.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_NAMESPACE          = org.doxygen.Project
-
-# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
-# Help Project output. For more information please see Qt Help Project / Virtual
-# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual-
-# folders).
-# The default value is: doc.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_VIRTUAL_FOLDER     = doc
-
-# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
-# filter to add. For more information please see Qt Help Project / Custom
-# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
-# filters).
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_CUST_FILTER_NAME   =
-
-# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
-# custom filter to add. For more information please see Qt Help Project / Custom
-# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
-# filters).
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_CUST_FILTER_ATTRS  =
-
-# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
-# project's filter section matches. Qt Help Project / Filter Attributes (see:
-# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes).
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_SECT_FILTER_ATTRS  =
-
-# The QHG_LOCATION tag can be used to specify the location of Qt's
-# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
-# generated .qhp file.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHG_LOCATION           =
-
-# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be
-# generated, together with the HTML files, they form an Eclipse help plugin. To
-# install this plugin and make it available under the help contents menu in
-# Eclipse, the contents of the directory containing the HTML and XML files needs
-# to be copied into the plugins directory of eclipse. The name of the directory
-# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.
-# After copying Eclipse needs to be restarted before the help appears.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_ECLIPSEHELP   = NO
-
-# A unique identifier for the Eclipse help plugin. When installing the plugin
-# the directory name containing the HTML and XML files should also have this
-# name. Each documentation set should have its own identifier.
-# The default value is: org.doxygen.Project.
-# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.
-
-ECLIPSE_DOC_ID         = org.doxygen.Project
-
-# If you want full control over the layout of the generated HTML pages it might
-# be necessary to disable the index and replace it with your own. The
-# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top
-# of each HTML page. A value of NO enables the index and the value YES disables
-# it. Since the tabs in the index contain the same information as the navigation
-# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-DISABLE_INDEX          = NO
-
-# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
-# structure should be generated to display hierarchical information. If the tag
-# value is set to YES, a side panel will be generated containing a tree-like
-# index structure (just like the one that is generated for HTML Help). For this
-# to work a browser that supports JavaScript, DHTML, CSS and frames is required
-# (i.e. any modern browser). Windows users are probably better off using the
-# HTML help feature. Via custom stylesheets (see HTML_EXTRA_STYLESHEET) one can
-# further fine-tune the look of the index. As an example, the default style
-# sheet generated by doxygen has an example that shows how to put an image at
-# the root of the tree instead of the PROJECT_NAME. Since the tree basically has
-# the same information as the tab index, you could consider setting
-# DISABLE_INDEX to YES when enabling this option.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_TREEVIEW      = NO
-
-# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
-# doxygen will group on one line in the generated HTML documentation.
-#
-# Note that a value of 0 will completely suppress the enum values from appearing
-# in the overview section.
-# Minimum value: 0, maximum value: 20, default value: 4.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-ENUM_VALUES_PER_LINE   = 4
-
-# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used
-# to set the initial width (in pixels) of the frame in which the tree is shown.
-# Minimum value: 0, maximum value: 1500, default value: 250.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-TREEVIEW_WIDTH         = 250
-
-# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open links to
-# external symbols imported via tag files in a separate window.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-EXT_LINKS_IN_WINDOW    = NO
-
-# Use this tag to change the font size of LaTeX formulas included as images in
-# the HTML documentation. When you change the font size after a successful
-# doxygen run you need to manually remove any form_*.png images from the HTML
-# output directory to force them to be regenerated.
-# Minimum value: 8, maximum value: 50, default value: 10.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-FORMULA_FONTSIZE       = 10
-
-# Use the FORMULA_TRANPARENT tag to determine whether or not the images
-# generated for formulas are transparent PNGs. Transparent PNGs are not
-# supported properly for IE 6.0, but are supported on all modern browsers.
-#
-# Note that when changing this option you need to delete any form_*.png files in
-# the HTML output directory before the changes have effect.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-FORMULA_TRANSPARENT    = YES
-
-# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
-# http://www.mathjax.org) which uses client side Javascript for the rendering
-# instead of using prerendered bitmaps. Use this if you do not have LaTeX
-# installed or if you want to formulas look prettier in the HTML output. When
-# enabled you may also need to install MathJax separately and configure the path
-# to it using the MATHJAX_RELPATH option.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-USE_MATHJAX            = NO
-
-# When MathJax is enabled you can set the default output format to be used for
-# the MathJax output. See the MathJax site (see:
-# http://docs.mathjax.org/en/latest/output.html) for more details.
-# Possible values are: HTML-CSS (which is slower, but has the best
-# compatibility), NativeMML (i.e. MathML) and SVG.
-# The default value is: HTML-CSS.
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_FORMAT         = HTML-CSS
-
-# When MathJax is enabled you need to specify the location relative to the HTML
-# output directory using the MATHJAX_RELPATH option. The destination directory
-# should contain the MathJax.js script. For instance, if the mathjax directory
-# is located at the same level as the HTML output directory, then
-# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
-# Content Delivery Network so you can quickly see the result without installing
-# MathJax. However, it is strongly recommended to install a local copy of
-# MathJax from http://www.mathjax.org before deployment.
-# The default value is: http://cdn.mathjax.org/mathjax/latest.
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_RELPATH        = http://cdn.mathjax.org/mathjax/latest
-
-# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
-# extension names that should be enabled during MathJax rendering. For example
-# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_EXTENSIONS     =
-
-# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
-# of code that will be used on startup of the MathJax code. See the MathJax site
-# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
-# example see the documentation.
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_CODEFILE       =
-
-# When the SEARCHENGINE tag is enabled doxygen will generate a search box for
-# the HTML output. The underlying search engine uses javascript and DHTML and
-# should work on any modern browser. Note that when using HTML help
-# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)
-# there is already a search function so this one should typically be disabled.
-# For large projects the javascript based search engine can be slow, then
-# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to
-# search using the keyboard; to jump to the search box use <access key> + S
-# (what the <access key> is depends on the OS and browser, but it is typically
-# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down
-# key> to jump into the search results window, the results can be navigated
-# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel
-# the search. The filter options can be selected when the cursor is inside the
-# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys>
-# to select a filter and <Enter> or <escape> to activate or cancel the filter
-# option.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-SEARCHENGINE           = YES
-
-# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
-# implemented using a web server instead of a web client using Javascript. There
-# are two flavours of web server based searching depending on the
-# EXTERNAL_SEARCH setting. When disabled, doxygen will generate a PHP script for
-# searching and an index file used by the script. When EXTERNAL_SEARCH is
-# enabled the indexing and searching needs to be provided by external tools. See
-# the section "External Indexing and Searching" for details.
-# The default value is: NO.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-SERVER_BASED_SEARCH    = NO
-
-# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP
-# script for searching. Instead the search results are written to an XML file
-# which needs to be processed by an external indexer. Doxygen will invoke an
-# external search engine pointed to by the SEARCHENGINE_URL option to obtain the
-# search results.
-#
-# Doxygen ships with an example indexer ( doxyindexer) and search engine
-# (doxysearch.cgi) which are based on the open source search engine library
-# Xapian (see: http://xapian.org/).
-#
-# See the section "External Indexing and Searching" for details.
-# The default value is: NO.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-EXTERNAL_SEARCH        = NO
-
-# The SEARCHENGINE_URL should point to a search engine hosted by a web server
-# which will return the search results when EXTERNAL_SEARCH is enabled.
-#
-# Doxygen ships with an example indexer ( doxyindexer) and search engine
-# (doxysearch.cgi) which are based on the open source search engine library
-# Xapian (see: http://xapian.org/). See the section "External Indexing and
-# Searching" for details.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-SEARCHENGINE_URL       =
-
-# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed
-# search data is written to a file for indexing by an external tool. With the
-# SEARCHDATA_FILE tag the name of this file can be specified.
-# The default file is: searchdata.xml.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-SEARCHDATA_FILE        = searchdata.xml
-
-# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the
-# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is
-# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple
-# projects and redirect the results back to the right project.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-EXTERNAL_SEARCH_ID     =
-
-# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen
-# projects other than the one defined by this configuration file, but that are
-# all added to the same external search index. Each project needs to have a
-# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of
-# to a relative location where the documentation can be found. The format is:
-# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ...
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-EXTRA_SEARCH_MAPPINGS  =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the LaTeX output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_LATEX tag is set to YES doxygen will generate LaTeX output.
-# The default value is: YES.
-
-GENERATE_LATEX         = YES
-
-# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: latex.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_OUTPUT           = latex
-
-# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
-# invoked.
-#
-# Note that when enabling USE_PDFLATEX this option is only used for generating
-# bitmaps for formulas in the HTML output, but not in the Makefile that is
-# written to the output directory.
-# The default file is: latex.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_CMD_NAME         = latex
-
-# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate
-# index for LaTeX.
-# The default file is: makeindex.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-MAKEINDEX_CMD_NAME     = makeindex
-
-# If the COMPACT_LATEX tag is set to YES doxygen generates more compact LaTeX
-# documents. This may be useful for small projects and may help to save some
-# trees in general.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-COMPACT_LATEX          = NO
-
-# The PAPER_TYPE tag can be used to set the paper type that is used by the
-# printer.
-# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x
-# 14 inches) and executive (7.25 x 10.5 inches).
-# The default value is: a4.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-PAPER_TYPE             = a4
-
-# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names
-# that should be included in the LaTeX output. To get the times font for
-# instance you can specify
-# EXTRA_PACKAGES=times
-# If left blank no extra packages will be included.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-EXTRA_PACKAGES         =
-
-# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the
-# generated LaTeX document. The header should contain everything until the first
-# chapter. If it is left blank doxygen will generate a standard header. See
-# section "Doxygen usage" for information on how to let doxygen write the
-# default header to a separate file.
-#
-# Note: Only use a user-defined header if you know what you are doing! The
-# following commands have a special meaning inside the header: $title,
-# $datetime, $date, $doxygenversion, $projectname, $projectnumber. Doxygen will
-# replace them by respectively the title of the page, the current date and time,
-# only the current date, the version number of doxygen, the project name (see
-# PROJECT_NAME), or the project number (see PROJECT_NUMBER).
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_HEADER           =
-
-# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the
-# generated LaTeX document. The footer should contain everything after the last
-# chapter. If it is left blank doxygen will generate a standard footer.
-#
-# Note: Only use a user-defined footer if you know what you are doing!
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_FOOTER           =
-
-# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or
-# other source files which should be copied to the LATEX_OUTPUT output
-# directory. Note that the files will be copied as-is; there are no commands or
-# markers available.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_EXTRA_FILES      =
-
-# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is
-# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will
-# contain links (just like the HTML output) instead of page references. This
-# makes the output suitable for online browsing using a PDF viewer.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-PDF_HYPERLINKS         = YES
-
-# If the LATEX_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
-# the PDF file directly from the LaTeX files. Set this option to YES to get a
-# higher quality PDF documentation.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-USE_PDFLATEX           = YES
-
-# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode
-# command to the generated LaTeX files. This will instruct LaTeX to keep running
-# if errors occur, instead of asking the user for help. This option is also used
-# when generating formulas in HTML.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_BATCHMODE        = NO
-
-# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the
-# index chapters (such as File Index, Compound Index, etc.) in the output.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_HIDE_INDICES     = NO
-
-# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source
-# code with syntax highlighting in the LaTeX output.
-#
-# Note that which sources are shown also depends on other settings such as
-# SOURCE_BROWSER.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_SOURCE_CODE      = NO
-
-# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
-# bibliography, e.g. plainnat, or ieeetr. See
-# http://en.wikipedia.org/wiki/BibTeX and \cite for more info.
-# The default value is: plain.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_BIB_STYLE        = plain
-
-#---------------------------------------------------------------------------
-# Configuration options related to the RTF output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_RTF tag is set to YES doxygen will generate RTF output. The
-# RTF output is optimized for Word 97 and may not look too pretty with other RTF
-# readers/editors.
-# The default value is: NO.
-
-GENERATE_RTF           = NO
-
-# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: rtf.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_OUTPUT             = rtf
-
-# If the COMPACT_RTF tag is set to YES doxygen generates more compact RTF
-# documents. This may be useful for small projects and may help to save some
-# trees in general.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-COMPACT_RTF            = NO
-
-# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will
-# contain hyperlink fields. The RTF file will contain links (just like the HTML
-# output) instead of page references. This makes the output suitable for online
-# browsing using Word or some other Word compatible readers that support those
-# fields.
-#
-# Note: WordPad (write) and others do not support links.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_HYPERLINKS         = NO
-
-# Load stylesheet definitions from file. Syntax is similar to doxygen's config
-# file, i.e. a series of assignments. You only have to provide replacements,
-# missing definitions are set to their default value.
-#
-# See also section "Doxygen usage" for information on how to generate the
-# default style sheet that doxygen normally uses.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_STYLESHEET_FILE    =
-
-# Set optional variables used in the generation of an RTF document. Syntax is
-# similar to doxygen's config file. A template extensions file can be generated
-# using doxygen -e rtf extensionFile.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_EXTENSIONS_FILE    =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the man page output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_MAN tag is set to YES doxygen will generate man pages for
-# classes and files.
-# The default value is: NO.
-
-GENERATE_MAN           = NO
-
-# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it. A directory man3 will be created inside the directory specified by
-# MAN_OUTPUT.
-# The default directory is: man.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_OUTPUT             = man
-
-# The MAN_EXTENSION tag determines the extension that is added to the generated
-# man pages. In case the manual section does not start with a number, the number
-# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is
-# optional.
-# The default value is: .3.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_EXTENSION          = .3
-
-# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it
-# will generate one additional man file for each entity documented in the real
-# man page(s). These additional files only source the real man page, but without
-# them the man command would be unable to find the correct page.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_LINKS              = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the XML output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_XML tag is set to YES doxygen will generate an XML file that
-# captures the structure of the code including all documentation.
-# The default value is: NO.
-
-GENERATE_XML           = NO
-
-# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: xml.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_OUTPUT             = xml
-
-# The XML_SCHEMA tag can be used to specify a XML schema, which can be used by a
-# validating XML parser to check the syntax of the XML files.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_SCHEMA             =
-
-# The XML_DTD tag can be used to specify a XML DTD, which can be used by a
-# validating XML parser to check the syntax of the XML files.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_DTD                =
-
-# If the XML_PROGRAMLISTING tag is set to YES doxygen will dump the program
-# listings (including syntax highlighting and cross-referencing information) to
-# the XML output. Note that enabling this will significantly increase the size
-# of the XML output.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_PROGRAMLISTING     = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to the DOCBOOK output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_DOCBOOK tag is set to YES doxygen will generate Docbook files
-# that can be used to generate PDF.
-# The default value is: NO.
-
-GENERATE_DOCBOOK       = NO
-
-# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put.
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in
-# front of it.
-# The default directory is: docbook.
-# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
-
-DOCBOOK_OUTPUT         = docbook
-
-#---------------------------------------------------------------------------
-# Configuration options for the AutoGen Definitions output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_AUTOGEN_DEF tag is set to YES doxygen will generate an AutoGen
-# Definitions (see http://autogen.sf.net) file that captures the structure of
-# the code including all documentation. Note that this feature is still
-# experimental and incomplete at the moment.
-# The default value is: NO.
-
-GENERATE_AUTOGEN_DEF   = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the Perl module output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_PERLMOD tag is set to YES doxygen will generate a Perl module
-# file that captures the structure of the code including all documentation.
-#
-# Note that this feature is still experimental and incomplete at the moment.
-# The default value is: NO.
-
-GENERATE_PERLMOD       = NO
-
-# If the PERLMOD_LATEX tag is set to YES doxygen will generate the necessary
-# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI
-# output from the Perl module output.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_PERLMOD is set to YES.
-
-PERLMOD_LATEX          = NO
-
-# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be nicely
-# formatted so it can be parsed by a human reader. This is useful if you want to
-# understand what is going on. On the other hand, if this tag is set to NO the
-# size of the Perl module output will be much smaller and Perl will parse it
-# just the same.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_PERLMOD is set to YES.
-
-PERLMOD_PRETTY         = YES
-
-# The names of the make variables in the generated doxyrules.make file are
-# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful
-# so different doxyrules.make files included by the same Makefile don't
-# overwrite each other's variables.
-# This tag requires that the tag GENERATE_PERLMOD is set to YES.
-
-PERLMOD_MAKEVAR_PREFIX =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the preprocessor
-#---------------------------------------------------------------------------
-
-# If the ENABLE_PREPROCESSING tag is set to YES doxygen will evaluate all
-# C-preprocessor directives found in the sources and include files.
-# The default value is: YES.
-
-ENABLE_PREPROCESSING   = YES
-
-# If the MACRO_EXPANSION tag is set to YES doxygen will expand all macro names
-# in the source code. If set to NO only conditional compilation will be
-# performed. Macro expansion can be done in a controlled way by setting
-# EXPAND_ONLY_PREDEF to YES.
-# The default value is: NO.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-MACRO_EXPANSION        = NO
-
-# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then
-# the macro expansion is limited to the macros specified with the PREDEFINED and
-# EXPAND_AS_DEFINED tags.
-# The default value is: NO.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-EXPAND_ONLY_PREDEF     = NO
-
-# If the SEARCH_INCLUDES tag is set to YES the includes files in the
-# INCLUDE_PATH will be searched if a #include is found.
-# The default value is: YES.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-SEARCH_INCLUDES        = YES
-
-# The INCLUDE_PATH tag can be used to specify one or more directories that
-# contain include files that are not input files but should be processed by the
-# preprocessor.
-# This tag requires that the tag SEARCH_INCLUDES is set to YES.
-
-INCLUDE_PATH           =
-
-# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
-# patterns (like *.h and *.hpp) to filter out the header-files in the
-# directories. If left blank, the patterns specified with FILE_PATTERNS will be
-# used.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-INCLUDE_FILE_PATTERNS  =
-
-# The PREDEFINED tag can be used to specify one or more macro names that are
-# defined before the preprocessor is started (similar to the -D option of e.g.
-# gcc). The argument of the tag is a list of macros of the form: name or
-# name=definition (no spaces). If the definition and the "=" are omitted, "=1"
-# is assumed. To prevent a macro definition from being undefined via #undef or
-# recursively expanded use the := operator instead of the = operator.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-PREDEFINED             =
-
-# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
-# tag can be used to specify a list of macro names that should be expanded. The
-# macro definition that is found in the sources will be used. Use the PREDEFINED
-# tag if you want to use a different macro definition that overrules the
-# definition found in the source code.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-EXPAND_AS_DEFINED      =
-
-# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will
-# remove all refrences to function-like macros that are alone on a line, have an
-# all uppercase name, and do not end with a semicolon. Such function macros are
-# typically used for boiler-plate code, and will confuse the parser if not
-# removed.
-# The default value is: YES.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-SKIP_FUNCTION_MACROS   = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to external references
-#---------------------------------------------------------------------------
-
-# The TAGFILES tag can be used to specify one or more tag files. For each tag
-# file the location of the external documentation should be added. The format of
-# a tag file without this location is as follows:
-# TAGFILES = file1 file2 ...
-# Adding location for the tag files is done as follows:
-# TAGFILES = file1=loc1 "file2 = loc2" ...
-# where loc1 and loc2 can be relative or absolute paths or URLs. See the
-# section "Linking to external documentation" for more information about the use
-# of tag files.
-# Note: Each tag file must have an unique name (where the name does NOT include
-# the path). If a tag file is not located in the directory in which doxygen is
-# run, you must also specify the path to the tagfile here.
-
-TAGFILES               =
-
-# When a file name is specified after GENERATE_TAGFILE, doxygen will create a
-# tag file that is based on the input files it reads. See section "Linking to
-# external documentation" for more information about the usage of tag files.
-
-GENERATE_TAGFILE       =
-
-# If the ALLEXTERNALS tag is set to YES all external class will be listed in the
-# class index. If set to NO only the inherited external classes will be listed.
-# The default value is: NO.
-
-ALLEXTERNALS           = NO
-
-# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed in
-# the modules index. If set to NO, only the current project's groups will be
-# listed.
-# The default value is: YES.
-
-EXTERNAL_GROUPS        = YES
-
-# If the EXTERNAL_PAGES tag is set to YES all external pages will be listed in
-# the related pages index. If set to NO, only the current project's pages will
-# be listed.
-# The default value is: YES.
-
-EXTERNAL_PAGES         = YES
-
-# The PERL_PATH should be the absolute path and name of the perl script
-# interpreter (i.e. the result of 'which perl').
-# The default file (with absolute path) is: /usr/bin/perl.
-
-PERL_PATH              = /usr/bin/perl
-
-#---------------------------------------------------------------------------
-# Configuration options related to the dot tool
-#---------------------------------------------------------------------------
-
-# If the CLASS_DIAGRAMS tag is set to YES doxygen will generate a class diagram
-# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to
-# NO turns the diagrams off. Note that this option also works with HAVE_DOT
-# disabled, but it is recommended to install and use dot, since it yields more
-# powerful graphs.
-# The default value is: YES.
-
-CLASS_DIAGRAMS         = YES
-
-# You can define message sequence charts within doxygen comments using the \msc
-# command. Doxygen will then run the mscgen tool (see:
-# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the
-# documentation. The MSCGEN_PATH tag allows you to specify the directory where
-# the mscgen tool resides. If left empty the tool is assumed to be found in the
-# default search path.
-
-MSCGEN_PATH            =
-
-# You can include diagrams made with dia in doxygen documentation. Doxygen will
-# then run dia to produce the diagram and insert it in the documentation. The
-# DIA_PATH tag allows you to specify the directory where the dia binary resides.
-# If left empty dia is assumed to be found in the default search path.
-
-DIA_PATH               =
-
-# If set to YES, the inheritance and collaboration graphs will hide inheritance
-# and usage relations if the target is undocumented or is not a class.
-# The default value is: YES.
-
-HIDE_UNDOC_RELATIONS   = YES
-
-# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
-# available from the path. This tool is part of Graphviz (see:
-# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
-# Bell Labs. The other options in this section have no effect if this option is
-# set to NO
-# The default value is: NO.
-
-HAVE_DOT               = NO
-
-# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed
-# to run in parallel. When set to 0 doxygen will base this on the number of
-# processors available in the system. You can set it explicitly to a value
-# larger than 0 to get control over the balance between CPU load and processing
-# speed.
-# Minimum value: 0, maximum value: 32, default value: 0.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_NUM_THREADS        = 0
-
-# When you want a differently looking font n the dot files that doxygen
-# generates you can specify the font name using DOT_FONTNAME. You need to make
-# sure dot is able to find the font, which can be done by putting it in a
-# standard location or by setting the DOTFONTPATH environment variable or by
-# setting DOT_FONTPATH to the directory containing the font.
-# The default value is: Helvetica.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTNAME           = Helvetica
-
-# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of
-# dot graphs.
-# Minimum value: 4, maximum value: 24, default value: 10.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTSIZE           = 10
-
-# By default doxygen will tell dot to use the default font as specified with
-# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set
-# the path where dot can find it using this tag.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTPATH           =
-
-# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for
-# each documented class showing the direct and indirect inheritance relations.
-# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-CLASS_GRAPH            = YES
-
-# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a
-# graph for each documented class showing the direct and indirect implementation
-# dependencies (inheritance, containment, and class references variables) of the
-# class with other documented classes.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-COLLABORATION_GRAPH    = YES
-
-# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for
-# groups, showing the direct groups dependencies.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-GROUP_GRAPHS           = YES
-
-# If the UML_LOOK tag is set to YES doxygen will generate inheritance and
-# collaboration diagrams in a style similar to the OMG's Unified Modeling
-# Language.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-UML_LOOK               = NO
-
-# If the UML_LOOK tag is enabled, the fields and methods are shown inside the
-# class node. If there are many fields or methods and many nodes the graph may
-# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the
-# number of items for each type to make the size more manageable. Set this to 0
-# for no limit. Note that the threshold may be exceeded by 50% before the limit
-# is enforced. So when you set the threshold to 10, up to 15 fields may appear,
-# but if the number exceeds 15, the total amount of fields shown is limited to
-# 10.
-# Minimum value: 0, maximum value: 100, default value: 10.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-UML_LIMIT_NUM_FIELDS   = 10
-
-# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
-# collaboration graphs will show the relations between templates and their
-# instances.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-TEMPLATE_RELATIONS     = NO
-
-# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to
-# YES then doxygen will generate a graph for each documented file showing the
-# direct and indirect include dependencies of the file with other documented
-# files.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-INCLUDE_GRAPH          = YES
-
-# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are
-# set to YES then doxygen will generate a graph for each documented file showing
-# the direct and indirect include dependencies of the file with other documented
-# files.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-INCLUDED_BY_GRAPH      = YES
-
-# If the CALL_GRAPH tag is set to YES then doxygen will generate a call
-# dependency graph for every global function or class method.
-#
-# Note that enabling this option will significantly increase the time of a run.
-# So in most cases it will be better to enable call graphs for selected
-# functions only using the \callgraph command.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-CALL_GRAPH             = NO
-
-# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller
-# dependency graph for every global function or class method.
-#
-# Note that enabling this option will significantly increase the time of a run.
-# So in most cases it will be better to enable caller graphs for selected
-# functions only using the \callergraph command.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-CALLER_GRAPH           = NO
-
-# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical
-# hierarchy of all classes instead of a textual one.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-GRAPHICAL_HIERARCHY    = YES
-
-# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the
-# dependencies a directory has on other directories in a graphical way. The
-# dependency relations are determined by the #include relations between the
-# files in the directories.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DIRECTORY_GRAPH        = YES
-
-# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
-# generated by dot.
-# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order
-# to make the SVG files visible in IE 9+ (other browsers do not have this
-# requirement).
-# Possible values are: png, jpg, gif and svg.
-# The default value is: png.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_IMAGE_FORMAT       = png
-
-# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to
-# enable generation of interactive SVG images that allow zooming and panning.
-#
-# Note that this requires a modern browser other than Internet Explorer. Tested
-# and working are Firefox, Chrome, Safari, and Opera.
-# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make
-# the SVG files visible. Older versions of IE do not have SVG support.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-INTERACTIVE_SVG        = NO
-
-# The DOT_PATH tag can be used to specify the path where the dot tool can be
-# found. If left blank, it is assumed the dot tool can be found in the path.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_PATH               =
-
-# The DOTFILE_DIRS tag can be used to specify one or more directories that
-# contain dot files that are included in the documentation (see the \dotfile
-# command).
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOTFILE_DIRS           =
-
-# The MSCFILE_DIRS tag can be used to specify one or more directories that
-# contain msc files that are included in the documentation (see the \mscfile
-# command).
-
-MSCFILE_DIRS           =
-
-# The DIAFILE_DIRS tag can be used to specify one or more directories that
-# contain dia files that are included in the documentation (see the \diafile
-# command).
-
-DIAFILE_DIRS           =
-
-# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes
-# that will be shown in the graph. If the number of nodes in a graph becomes
-# larger than this value, doxygen will truncate the graph, which is visualized
-# by representing a node as a red box. Note that doxygen if the number of direct
-# children of the root node in a graph is already larger than
-# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that
-# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
-# Minimum value: 0, maximum value: 10000, default value: 50.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_GRAPH_MAX_NODES    = 50
-
-# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs
-# generated by dot. A depth value of 3 means that only nodes reachable from the
-# root by following a path via at most 3 edges will be shown. Nodes that lay
-# further from the root node will be omitted. Note that setting this option to 1
-# or 2 may greatly reduce the computation time needed for large code bases. Also
-# note that the size of a graph can be further restricted by
-# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
-# Minimum value: 0, maximum value: 1000, default value: 0.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-MAX_DOT_GRAPH_DEPTH    = 0
-
-# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
-# background. This is disabled by default, because dot on Windows does not seem
-# to support this out of the box.
-#
-# Warning: Depending on the platform used, enabling this option may lead to
-# badly anti-aliased labels on the edges of a graph (i.e. they become hard to
-# read).
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_TRANSPARENT        = NO
-
-# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output
-# files in one run (i.e. multiple -o and -T options on the command line). This
-# makes dot run faster, but since only newer versions of dot (>1.8.10) support
-# this, this feature is disabled by default.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_MULTI_TARGETS      = YES
-
-# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page
-# explaining the meaning of the various boxes and arrows in the dot generated
-# graphs.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-GENERATE_LEGEND        = YES
-
-# If the DOT_CLEANUP tag is set to YES doxygen will remove the intermediate dot
-# files that are used to generate the various graphs.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_CLEANUP            = YES
diff --git a/media/libaaudio/examples/Android.bp b/media/libaaudio/examples/Android.bp
index 49bd5ee..e2c1878 100644
--- a/media/libaaudio/examples/Android.bp
+++ b/media/libaaudio/examples/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_headers {
     name: "libaaudio_example_utils",
     export_include_dirs: ["utils"],
diff --git a/media/libaaudio/examples/input_monitor/Android.bp b/media/libaaudio/examples/input_monitor/Android.bp
index d8c5843..72adfd7 100644
--- a/media/libaaudio/examples/input_monitor/Android.bp
+++ b/media/libaaudio/examples/input_monitor/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "input_monitor",
     gtest: false,
diff --git a/media/libaaudio/examples/loopback/Android.bp b/media/libaaudio/examples/loopback/Android.bp
index 5b7d956..b18aeec 100644
--- a/media/libaaudio/examples/loopback/Android.bp
+++ b/media/libaaudio/examples/loopback/Android.bp
@@ -1,12 +1,23 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "aaudio_loopback",
     gtest: false,
     srcs: ["src/loopback.cpp"],
     cflags: ["-Wall", "-Werror"],
     static_libs: ["libsndfile"],
+    include_dirs: ["external/oboe/apps/OboeTester/app/src/main/cpp"],
     shared_libs: [
         "libaaudio",
         "libaudioutils",
+        "liblog"
         ],
     header_libs: ["libaaudio_example_utils"],
 }
diff --git a/media/libaaudio/examples/loopback/src/analyzer/GlitchAnalyzer.h b/media/libaaudio/examples/loopback/src/analyzer/GlitchAnalyzer.h
deleted file mode 100644
index 04435d1..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/GlitchAnalyzer.h
+++ /dev/null
@@ -1,445 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANALYZER_GLITCH_ANALYZER_H
-#define ANALYZER_GLITCH_ANALYZER_H
-
-#include <algorithm>
-#include <cctype>
-#include <iomanip>
-#include <iostream>
-
-#include "LatencyAnalyzer.h"
-#include "PseudoRandom.h"
-
-/**
- * Output a steady sine wave and analyze the return signal.
- *
- * Use a cosine transform to measure the predicted magnitude and relative phase of the
- * looped back sine wave. Then generate a predicted signal and compare with the actual signal.
- */
-class GlitchAnalyzer : public LoopbackProcessor {
-public:
-
-    int32_t getState() const {
-        return mState;
-    }
-
-    double getPeakAmplitude() const {
-        return mPeakFollower.getLevel();
-    }
-
-    double getTolerance() {
-        return mTolerance;
-    }
-
-    void setTolerance(double tolerance) {
-        mTolerance = tolerance;
-        mScaledTolerance = mMagnitude * mTolerance;
-    }
-
-    void setMagnitude(double magnitude) {
-        mMagnitude = magnitude;
-        mScaledTolerance = mMagnitude * mTolerance;
-    }
-
-    int32_t getGlitchCount() const {
-        return mGlitchCount;
-    }
-
-    int32_t getStateFrameCount(int state) const {
-        return mStateFrameCounters[state];
-    }
-
-    double getSignalToNoiseDB() {
-        static const double threshold = 1.0e-14;
-        if (mMeanSquareSignal < threshold || mMeanSquareNoise < threshold) {
-            return 0.0;
-        } else {
-            double signalToNoise = mMeanSquareSignal / mMeanSquareNoise; // power ratio
-            double signalToNoiseDB = 10.0 * log(signalToNoise);
-            if (signalToNoiseDB < MIN_SNR_DB) {
-                ALOGD("ERROR - signal to noise ratio is too low! < %d dB. Adjust volume.",
-                     MIN_SNR_DB);
-                setResult(ERROR_VOLUME_TOO_LOW);
-            }
-            return signalToNoiseDB;
-        }
-    }
-
-    std::string analyze() override {
-        std::stringstream report;
-        report << "GlitchAnalyzer ------------------\n";
-        report << LOOPBACK_RESULT_TAG "peak.amplitude     = " << std::setw(8)
-               << getPeakAmplitude() << "\n";
-        report << LOOPBACK_RESULT_TAG "sine.magnitude     = " << std::setw(8)
-               << mMagnitude << "\n";
-        report << LOOPBACK_RESULT_TAG "rms.noise          = " << std::setw(8)
-               << mMeanSquareNoise << "\n";
-        report << LOOPBACK_RESULT_TAG "signal.to.noise.db = " << std::setw(8)
-               << getSignalToNoiseDB() << "\n";
-        report << LOOPBACK_RESULT_TAG "frames.accumulated = " << std::setw(8)
-               << mFramesAccumulated << "\n";
-        report << LOOPBACK_RESULT_TAG "sine.period        = " << std::setw(8)
-               << mSinePeriod << "\n";
-        report << LOOPBACK_RESULT_TAG "test.state         = " << std::setw(8)
-               << mState << "\n";
-        report << LOOPBACK_RESULT_TAG "frame.count        = " << std::setw(8)
-               << mFrameCounter << "\n";
-        // Did we ever get a lock?
-        bool gotLock = (mState == STATE_LOCKED) || (mGlitchCount > 0);
-        if (!gotLock) {
-            report << "ERROR - failed to lock on reference sine tone.\n";
-            setResult(ERROR_NO_LOCK);
-        } else {
-            // Only print if meaningful.
-            report << LOOPBACK_RESULT_TAG "glitch.count       = " << std::setw(8)
-                   << mGlitchCount << "\n";
-            report << LOOPBACK_RESULT_TAG "max.glitch         = " << std::setw(8)
-                   << mMaxGlitchDelta << "\n";
-            if (mGlitchCount > 0) {
-                report << "ERROR - number of glitches > 0\n";
-                setResult(ERROR_GLITCHES);
-            }
-        }
-        return report.str();
-    }
-
-    void printStatus() override {
-        ALOGD("st = %d, #gl = %3d,", mState, mGlitchCount);
-    }
-    /**
-     * Calculate the magnitude of the component of the input signal
-     * that matches the analysis frequency.
-     * Also calculate the phase that we can use to create a
-     * signal that matches that component.
-     * The phase will be between -PI and +PI.
-     */
-    double calculateMagnitude(double *phasePtr = nullptr) {
-        if (mFramesAccumulated == 0) {
-            return 0.0;
-        }
-        double sinMean = mSinAccumulator / mFramesAccumulated;
-        double cosMean = mCosAccumulator / mFramesAccumulated;
-        double magnitude = 2.0 * sqrt((sinMean * sinMean) + (cosMean * cosMean));
-        if (phasePtr != nullptr) {
-            double phase = M_PI_2 - atan2(sinMean, cosMean);
-            *phasePtr = phase;
-        }
-        return magnitude;
-    }
-
-    /**
-     * @param frameData contains microphone data with sine signal feedback
-     * @param channelCount
-     */
-    result_code processInputFrame(float *frameData, int /* channelCount */) override {
-        result_code result = RESULT_OK;
-
-        float sample = frameData[0];
-        float peak = mPeakFollower.process(sample);
-
-        // Force a periodic glitch to test the detector!
-        if (mForceGlitchDuration > 0) {
-            if (mForceGlitchCounter == 0) {
-                ALOGE("%s: force a glitch!!", __func__);
-                mForceGlitchCounter = getSampleRate();
-            } else if (mForceGlitchCounter <= mForceGlitchDuration) {
-                // Force an abrupt offset.
-                sample += (sample > 0.0) ? -0.5f : 0.5f;
-            }
-            --mForceGlitchCounter;
-        }
-
-        mStateFrameCounters[mState]++; // count how many frames we are in each state
-
-        switch (mState) {
-            case STATE_IDLE:
-                mDownCounter--;
-                if (mDownCounter <= 0) {
-                    mState = STATE_IMMUNE;
-                    mDownCounter = IMMUNE_FRAME_COUNT;
-                    mInputPhase = 0.0; // prevent spike at start
-                    mOutputPhase = 0.0;
-                }
-                break;
-
-            case STATE_IMMUNE:
-                mDownCounter--;
-                if (mDownCounter <= 0) {
-                    mState = STATE_WAITING_FOR_SIGNAL;
-                }
-                break;
-
-            case STATE_WAITING_FOR_SIGNAL:
-                if (peak > mThreshold) {
-                    mState = STATE_WAITING_FOR_LOCK;
-                    //ALOGD("%5d: switch to STATE_WAITING_FOR_LOCK", mFrameCounter);
-                    resetAccumulator();
-                }
-                break;
-
-            case STATE_WAITING_FOR_LOCK:
-                mSinAccumulator += sample * sinf(mInputPhase);
-                mCosAccumulator += sample * cosf(mInputPhase);
-                mFramesAccumulated++;
-                // Must be a multiple of the period or the calculation will not be accurate.
-                if (mFramesAccumulated == mSinePeriod * PERIODS_NEEDED_FOR_LOCK) {
-                    double phaseOffset = 0.0;
-                    setMagnitude(calculateMagnitude(&phaseOffset));
-//                    ALOGD("%s() mag = %f, offset = %f, prev = %f",
-//                            __func__, mMagnitude, mPhaseOffset, mPreviousPhaseOffset);
-                    if (mMagnitude > mThreshold) {
-                        if (abs(phaseOffset) < kMaxPhaseError) {
-                            mState = STATE_LOCKED;
-//                            ALOGD("%5d: switch to STATE_LOCKED", mFrameCounter);
-                        }
-                        // Adjust mInputPhase to match measured phase
-                        mInputPhase += phaseOffset;
-                    }
-                    resetAccumulator();
-                }
-                incrementInputPhase();
-                break;
-
-            case STATE_LOCKED: {
-                // Predict next sine value
-                double predicted = sinf(mInputPhase) * mMagnitude;
-                double diff = predicted - sample;
-                double absDiff = fabs(diff);
-                mMaxGlitchDelta = std::max(mMaxGlitchDelta, absDiff);
-                if (absDiff > mScaledTolerance) {
-                    result = ERROR_GLITCHES;
-                    onGlitchStart();
-//                    LOGI("diff glitch detected, absDiff = %g", absDiff);
-                } else {
-                    mSumSquareSignal += predicted * predicted;
-                    mSumSquareNoise += diff * diff;
-                    // Track incoming signal and slowly adjust magnitude to account
-                    // for drift in the DRC or AGC.
-                    mSinAccumulator += sample * sinf(mInputPhase);
-                    mCosAccumulator += sample * cosf(mInputPhase);
-                    mFramesAccumulated++;
-                    // Must be a multiple of the period or the calculation will not be accurate.
-                    if (mFramesAccumulated == mSinePeriod) {
-                        const double coefficient = 0.1;
-                        double phaseOffset = 0.0;
-                        double magnitude = calculateMagnitude(&phaseOffset);
-                        // One pole averaging filter.
-                        setMagnitude((mMagnitude * (1.0 - coefficient)) + (magnitude * coefficient));
-
-                        mMeanSquareNoise = mSumSquareNoise * mInverseSinePeriod;
-                        mMeanSquareSignal = mSumSquareSignal * mInverseSinePeriod;
-                        resetAccumulator();
-
-                        if (abs(phaseOffset) > kMaxPhaseError) {
-                            result = ERROR_GLITCHES;
-                            onGlitchStart();
-                            ALOGD("phase glitch detected, phaseOffset = %g", phaseOffset);
-                        } else if (mMagnitude < mThreshold) {
-                            result = ERROR_GLITCHES;
-                            onGlitchStart();
-                            ALOGD("magnitude glitch detected, mMagnitude = %g", mMagnitude);
-                        }
-                    }
-                }
-                incrementInputPhase();
-            } break;
-
-            case STATE_GLITCHING: {
-                // Predict next sine value
-                mGlitchLength++;
-                double predicted = sinf(mInputPhase) * mMagnitude;
-                double diff = predicted - sample;
-                double absDiff = fabs(diff);
-                mMaxGlitchDelta = std::max(mMaxGlitchDelta, absDiff);
-                if (absDiff < mScaledTolerance) { // close enough?
-                    // If we get a full sine period of non-glitch samples in a row then consider the glitch over.
-                    // We don't want to just consider a zero crossing the end of a glitch.
-                    if (mNonGlitchCount++ > mSinePeriod) {
-                        onGlitchEnd();
-                    }
-                } else {
-                    mNonGlitchCount = 0;
-                    if (mGlitchLength > (4 * mSinePeriod)) {
-                        relock();
-                    }
-                }
-                incrementInputPhase();
-            } break;
-
-            case NUM_STATES: // not a real state
-                break;
-        }
-
-        mFrameCounter++;
-
-        return result;
-    }
-
-    // advance and wrap phase
-    void incrementInputPhase() {
-        mInputPhase += mPhaseIncrement;
-        if (mInputPhase > M_PI) {
-            mInputPhase -= (2.0 * M_PI);
-        }
-    }
-
-    // advance and wrap phase
-    void incrementOutputPhase() {
-        mOutputPhase += mPhaseIncrement;
-        if (mOutputPhase > M_PI) {
-            mOutputPhase -= (2.0 * M_PI);
-        }
-    }
-
-    /**
-     * @param frameData upon return, contains the reference sine wave
-     * @param channelCount
-     */
-    result_code processOutputFrame(float *frameData, int channelCount) override {
-        float output = 0.0f;
-        // Output sine wave so we can measure it.
-        if (mState != STATE_IDLE) {
-            float sinOut = sinf(mOutputPhase);
-            incrementOutputPhase();
-            output = (sinOut * mOutputAmplitude)
-                     + (mWhiteNoise.nextRandomDouble() * kNoiseAmplitude);
-            // ALOGD("sin(%f) = %f, %f\n", mOutputPhase, sinOut,  mPhaseIncrement);
-        }
-        frameData[0] = output;
-        for (int i = 1; i < channelCount; i++) {
-            frameData[i] = 0.0f;
-        }
-        return RESULT_OK;
-    }
-
-    void onGlitchStart() {
-        mGlitchCount++;
-//        ALOGD("%5d: STARTED a glitch # %d", mFrameCounter, mGlitchCount);
-        mState = STATE_GLITCHING;
-        mGlitchLength = 1;
-        mNonGlitchCount = 0;
-    }
-
-    void onGlitchEnd() {
-//        ALOGD("%5d: ENDED a glitch # %d, length = %d", mFrameCounter, mGlitchCount, mGlitchLength);
-        mState = STATE_LOCKED;
-        resetAccumulator();
-    }
-
-    // reset the sine wave detector
-    void resetAccumulator() {
-        mFramesAccumulated = 0;
-        mSinAccumulator = 0.0;
-        mCosAccumulator = 0.0;
-        mSumSquareSignal = 0.0;
-        mSumSquareNoise = 0.0;
-    }
-
-    void relock() {
-//        ALOGD("relock: %d because of a very long %d glitch", mFrameCounter, mGlitchLength);
-        mState = STATE_WAITING_FOR_LOCK;
-        resetAccumulator();
-    }
-
-    void reset() override {
-        LoopbackProcessor::reset();
-        mState = STATE_IDLE;
-        mDownCounter = IDLE_FRAME_COUNT;
-        resetAccumulator();
-    }
-
-    void prepareToTest() override {
-        LoopbackProcessor::prepareToTest();
-        mSinePeriod = getSampleRate() / kTargetGlitchFrequency;
-        mOutputPhase = 0.0f;
-        mInverseSinePeriod = 1.0 / mSinePeriod;
-        mPhaseIncrement = 2.0 * M_PI * mInverseSinePeriod;
-        mGlitchCount = 0;
-        mMaxGlitchDelta = 0.0;
-        for (int i = 0; i < NUM_STATES; i++) {
-            mStateFrameCounters[i] = 0;
-        }
-    }
-
-private:
-
-    // These must match the values in GlitchActivity.java
-    enum sine_state_t {
-        STATE_IDLE,               // beginning
-        STATE_IMMUNE,             // ignoring input, waiting fo HW to settle
-        STATE_WAITING_FOR_SIGNAL, // looking for a loud signal
-        STATE_WAITING_FOR_LOCK,   // trying to lock onto the phase of the sine
-        STATE_LOCKED,             // locked on the sine wave, looking for glitches
-        STATE_GLITCHING,           // locked on the sine wave but glitching
-        NUM_STATES
-    };
-
-    enum constants {
-        // Arbitrary durations, assuming 48000 Hz
-        IDLE_FRAME_COUNT = 48 * 100,
-        IMMUNE_FRAME_COUNT = 48 * 100,
-        PERIODS_NEEDED_FOR_LOCK = 8,
-        MIN_SNR_DB = 65
-    };
-
-    static constexpr float kNoiseAmplitude = 0.00; // Used to experiment with warbling caused by DRC.
-    static constexpr int kTargetGlitchFrequency = 607;
-    static constexpr double kMaxPhaseError = M_PI * 0.05;
-
-    float   mTolerance = 0.10; // scaled from 0.0 to 1.0
-    double  mThreshold = 0.005;
-    int     mSinePeriod = 1; // this will be set before use
-    double  mInverseSinePeriod = 1.0;
-
-    int32_t mStateFrameCounters[NUM_STATES];
-
-    double  mPhaseIncrement = 0.0;
-    double  mInputPhase = 0.0;
-    double  mOutputPhase = 0.0;
-    double  mMagnitude = 0.0;
-    int32_t mFramesAccumulated = 0;
-    double  mSinAccumulator = 0.0;
-    double  mCosAccumulator = 0.0;
-    double  mMaxGlitchDelta = 0.0;
-    int32_t mGlitchCount = 0;
-    int32_t mNonGlitchCount = 0;
-    int32_t mGlitchLength = 0;
-    // This is used for processing every frame so we cache it here.
-    double  mScaledTolerance = 0.0;
-    int     mDownCounter = IDLE_FRAME_COUNT;
-    int32_t mFrameCounter = 0;
-    double  mOutputAmplitude = 0.75;
-
-    int32_t mForceGlitchDuration = 0; // if > 0 then force a glitch for debugging
-    int32_t mForceGlitchCounter = 4 * 48000; // count down and trigger at zero
-
-    // measure background noise continuously as a deviation from the expected signal
-    double  mSumSquareSignal = 0.0;
-    double  mSumSquareNoise = 0.0;
-    double  mMeanSquareSignal = 0.0;
-    double  mMeanSquareNoise = 0.0;
-
-    PeakDetector  mPeakFollower;
-
-    PseudoRandom  mWhiteNoise;
-
-    sine_state_t  mState = STATE_IDLE;
-};
-
-
-#endif //ANALYZER_GLITCH_ANALYZER_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/LatencyAnalyzer.h b/media/libaaudio/examples/loopback/src/analyzer/LatencyAnalyzer.h
deleted file mode 100644
index e506791..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/LatencyAnalyzer.h
+++ /dev/null
@@ -1,606 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Tools for measuring latency and for detecting glitches.
- * These classes are pure math and can be used with any audio system.
- */
-
-#ifndef ANALYZER_LATENCY_ANALYZER_H
-#define ANALYZER_LATENCY_ANALYZER_H
-
-#include <algorithm>
-#include <assert.h>
-#include <cctype>
-#include <iomanip>
-#include <iostream>
-#include <math.h>
-#include <memory>
-#include <sstream>
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <vector>
-
-#include "PeakDetector.h"
-#include "PseudoRandom.h"
-#include "RandomPulseGenerator.h"
-
-// This is used when the code is in Oboe.
-#ifndef ALOGD
-#define ALOGD printf
-#define ALOGE printf
-#define ALOGW printf
-#endif
-
-#define LOOPBACK_RESULT_TAG  "RESULT: "
-
-static constexpr int32_t kDefaultSampleRate = 48000;
-static constexpr int32_t kMillisPerSecond   = 1000;
-static constexpr int32_t kMaxLatencyMillis  = 700;  // arbitrary and generous
-static constexpr double  kMinimumConfidence = 0.2;
-
-struct LatencyReport {
-    int32_t latencyInFrames = 0.0;
-    double confidence = 0.0;
-
-    void reset() {
-        latencyInFrames = 0;
-        confidence = 0.0;
-    }
-};
-
-// Calculate a normalized cross correlation.
-static double calculateNormalizedCorrelation(const float *a,
-                                             const float *b,
-                                             int windowSize) {
-    double correlation = 0.0;
-    double sumProducts = 0.0;
-    double sumSquares = 0.0;
-
-    // Correlate a against b.
-    for (int i = 0; i < windowSize; i++) {
-        float s1 = a[i];
-        float s2 = b[i];
-        // Use a normalized cross-correlation.
-        sumProducts += s1 * s2;
-        sumSquares += ((s1 * s1) + (s2 * s2));
-    }
-
-    if (sumSquares >= 1.0e-9) {
-        correlation = 2.0 * sumProducts / sumSquares;
-    }
-    return correlation;
-}
-
-static double calculateRootMeanSquare(float *data, int32_t numSamples) {
-    double sum = 0.0;
-    for (int32_t i = 0; i < numSamples; i++) {
-        float sample = data[i];
-        sum += sample * sample;
-    }
-    return sqrt(sum / numSamples);
-}
-
-/**
- * Monophonic recording with processing.
- */
-class AudioRecording
-{
-public:
-
-    void allocate(int maxFrames) {
-        mData = std::make_unique<float[]>(maxFrames);
-        mMaxFrames = maxFrames;
-    }
-
-    // Write SHORT data from the first channel.
-    int32_t write(int16_t *inputData, int32_t inputChannelCount, int32_t numFrames) {
-        // stop at end of buffer
-        if ((mFrameCounter + numFrames) > mMaxFrames) {
-            numFrames = mMaxFrames - mFrameCounter;
-        }
-        for (int i = 0; i < numFrames; i++) {
-            mData[mFrameCounter++] = inputData[i * inputChannelCount] * (1.0f / 32768);
-        }
-        return numFrames;
-    }
-
-    // Write FLOAT data from the first channel.
-    int32_t write(float *inputData, int32_t inputChannelCount, int32_t numFrames) {
-        // stop at end of buffer
-        if ((mFrameCounter + numFrames) > mMaxFrames) {
-            numFrames = mMaxFrames - mFrameCounter;
-        }
-        for (int i = 0; i < numFrames; i++) {
-            mData[mFrameCounter++] = inputData[i * inputChannelCount];
-        }
-        return numFrames;
-    }
-
-    // Write FLOAT data from the first channel.
-    int32_t write(float sample) {
-        // stop at end of buffer
-        if (mFrameCounter < mMaxFrames) {
-            mData[mFrameCounter++] = sample;
-            return 1;
-        }
-        return 0;
-    }
-
-    void clear() {
-        mFrameCounter = 0;
-    }
-    int32_t size() const {
-        return mFrameCounter;
-    }
-
-    bool isFull() const {
-        return mFrameCounter >= mMaxFrames;
-    }
-
-    float *getData() const {
-        return mData.get();
-    }
-
-    void setSampleRate(int32_t sampleRate) {
-        mSampleRate = sampleRate;
-    }
-
-    int32_t getSampleRate() const {
-        return mSampleRate;
-    }
-
-    /**
-     * Square the samples so they are all positive and so the peaks are emphasized.
-     */
-    void square() {
-        float *x = mData.get();
-        for (int i = 0; i < mFrameCounter; i++) {
-            x[i] *= x[i];
-        }
-    }
-
-    /**
-     * Amplify a signal so that the peak matches the specified target.
-     *
-     * @param target final max value
-     * @return gain applied to signal
-     */
-    float normalize(float target) {
-        float maxValue = 1.0e-9f;
-        for (int i = 0; i < mFrameCounter; i++) {
-            maxValue = std::max(maxValue, abs(mData[i]));
-        }
-        float gain = target / maxValue;
-        for (int i = 0; i < mFrameCounter; i++) {
-            mData[i] *= gain;
-        }
-        return gain;
-    }
-
-private:
-    std::unique_ptr<float[]> mData;
-    int32_t       mFrameCounter = 0;
-    int32_t       mMaxFrames = 0;
-    int32_t       mSampleRate = kDefaultSampleRate; // common default
-};
-
-static int measureLatencyFromPulse(AudioRecording &recorded,
-                                   AudioRecording &pulse,
-                                   LatencyReport *report) {
-
-    report->latencyInFrames = 0;
-    report->confidence = 0.0;
-
-    int numCorrelations = recorded.size() - pulse.size();
-    if (numCorrelations < 10) {
-        ALOGE("%s() recording too small = %d frames\n", __func__, recorded.size());
-        return -1;
-    }
-    std::unique_ptr<float[]> correlations= std::make_unique<float[]>(numCorrelations);
-
-    // Correlate pulse against the recorded data.
-    for (int i = 0; i < numCorrelations; i++) {
-        float correlation = (float) calculateNormalizedCorrelation(&recorded.getData()[i],
-                                                                   &pulse.getData()[0],
-                                                                   pulse.size());
-        correlations[i] = correlation;
-    }
-
-    // Find highest peak in correlation array.
-    float peakCorrelation = 0.0;
-    int peakIndex = -1;
-    for (int i = 0; i < numCorrelations; i++) {
-        float value = abs(correlations[i]);
-        if (value > peakCorrelation) {
-            peakCorrelation = value;
-            peakIndex = i;
-        }
-    }
-    if (peakIndex < 0) {
-        ALOGE("%s() no signal for correlation\n", __func__);
-        return -2;
-    }
-
-    report->latencyInFrames = peakIndex;
-    report->confidence = peakCorrelation;
-
-    return 0;
-}
-
-// ====================================================================================
-class LoopbackProcessor {
-public:
-    virtual ~LoopbackProcessor() = default;
-
-    enum result_code {
-        RESULT_OK = 0,
-        ERROR_NOISY = -99,
-        ERROR_VOLUME_TOO_LOW,
-        ERROR_VOLUME_TOO_HIGH,
-        ERROR_CONFIDENCE,
-        ERROR_INVALID_STATE,
-        ERROR_GLITCHES,
-        ERROR_NO_LOCK
-    };
-
-    virtual void prepareToTest() {
-        reset();
-    }
-
-    virtual void reset() {
-        mResult = 0;
-        mResetCount++;
-    }
-
-    virtual result_code processInputFrame(float *frameData, int channelCount) = 0;
-    virtual result_code processOutputFrame(float *frameData, int channelCount) = 0;
-
-    void process(float *inputData, int inputChannelCount, int numInputFrames,
-                 float *outputData, int outputChannelCount, int numOutputFrames) {
-        int numBoth = std::min(numInputFrames, numOutputFrames);
-        // Process one frame at a time.
-        for (int i = 0; i < numBoth; i++) {
-            processInputFrame(inputData, inputChannelCount);
-            inputData += inputChannelCount;
-            processOutputFrame(outputData, outputChannelCount);
-            outputData += outputChannelCount;
-        }
-        // If there is more input than output.
-        for (int i = numBoth; i < numInputFrames; i++) {
-            processInputFrame(inputData, inputChannelCount);
-            inputData += inputChannelCount;
-        }
-        // If there is more output than input.
-        for (int i = numBoth; i < numOutputFrames; i++) {
-            processOutputFrame(outputData, outputChannelCount);
-            outputData += outputChannelCount;
-        }
-    }
-
-    virtual std::string analyze() = 0;
-
-    virtual void printStatus() {};
-
-    int32_t getResult() {
-        return mResult;
-    }
-
-    void setResult(int32_t result) {
-        mResult = result;
-    }
-
-    virtual bool isDone() {
-        return false;
-    }
-
-    virtual int save(const char *fileName) {
-        (void) fileName;
-        return -1;
-    }
-
-    virtual int load(const char *fileName) {
-        (void) fileName;
-        return -1;
-    }
-
-    virtual void setSampleRate(int32_t sampleRate) {
-        mSampleRate = sampleRate;
-    }
-
-    int32_t getSampleRate() const {
-        return mSampleRate;
-    }
-
-    int32_t getResetCount() const {
-        return mResetCount;
-    }
-
-    /** Called when not enough input frames could be read after synchronization.
-     */
-    virtual void onInsufficientRead() {
-        reset();
-    }
-
-protected:
-    int32_t   mResetCount = 0;
-
-private:
-    int32_t mSampleRate = kDefaultSampleRate;
-    int32_t mResult = 0;
-};
-
-class LatencyAnalyzer : public LoopbackProcessor {
-public:
-
-    LatencyAnalyzer() : LoopbackProcessor() {}
-    virtual ~LatencyAnalyzer() = default;
-
-    virtual int32_t getProgress() const = 0;
-
-    virtual int getState() = 0;
-
-    // @return latency in frames
-    virtual int32_t getMeasuredLatency() = 0;
-
-    virtual double getMeasuredConfidence() = 0;
-
-    virtual double getBackgroundRMS() = 0;
-
-    virtual double getSignalRMS() = 0;
-
-};
-
-// ====================================================================================
-/**
- * Measure latency given a loopback stream data.
- * Use an encoded bit train as the sound source because it
- * has an unambiguous correlation value.
- * Uses a state machine to cycle through various stages.
- *
- */
-class PulseLatencyAnalyzer : public LatencyAnalyzer {
-public:
-
-    PulseLatencyAnalyzer() : LatencyAnalyzer() {
-        int32_t maxLatencyFrames = getSampleRate() * kMaxLatencyMillis / kMillisPerSecond;
-        int32_t numPulseBits = getSampleRate() * kPulseLengthMillis
-                / (kFramesPerEncodedBit * kMillisPerSecond);
-        int32_t  pulseLength = numPulseBits * kFramesPerEncodedBit;
-        mFramesToRecord = pulseLength + maxLatencyFrames;
-        mAudioRecording.allocate(mFramesToRecord);
-        mAudioRecording.setSampleRate(getSampleRate());
-        generateRandomPulse(pulseLength);
-    }
-
-    void generateRandomPulse(int32_t pulseLength) {
-        mPulse.allocate(pulseLength);
-        RandomPulseGenerator pulser(kFramesPerEncodedBit);
-        for (int i = 0; i < pulseLength; i++) {
-            mPulse.write(pulser.nextFloat());
-        }
-    }
-
-    int getState() override {
-        return mState;
-    }
-
-    void setSampleRate(int32_t sampleRate) override {
-        LoopbackProcessor::setSampleRate(sampleRate);
-        mAudioRecording.setSampleRate(sampleRate);
-    }
-
-    void reset() override {
-        LoopbackProcessor::reset();
-        mDownCounter = getSampleRate() / 2;
-        mLoopCounter = 0;
-
-        mPulseCursor = 0;
-        mBackgroundSumSquare = 0.0f;
-        mBackgroundSumCount = 0;
-        mBackgroundRMS = 0.0f;
-        mSignalRMS = 0.0f;
-
-        mState = STATE_MEASURE_BACKGROUND;
-        mAudioRecording.clear();
-        mLatencyReport.reset();
-    }
-
-    bool hasEnoughData() {
-        return mAudioRecording.isFull();
-    }
-
-    bool isDone() override {
-        return mState == STATE_DONE;
-    }
-
-    int32_t getProgress() const override {
-        return mAudioRecording.size();
-    }
-
-    std::string analyze() override {
-        std::stringstream report;
-        report << "PulseLatencyAnalyzer ---------------\n";
-        report << LOOPBACK_RESULT_TAG "test.state             = "
-                << std::setw(8) << mState << "\n";
-        report << LOOPBACK_RESULT_TAG "test.state.name        = "
-                << convertStateToText(mState) << "\n";
-        report << LOOPBACK_RESULT_TAG "background.rms         = "
-                << std::setw(8) << mBackgroundRMS << "\n";
-
-        int32_t newResult = RESULT_OK;
-        if (mState != STATE_GOT_DATA) {
-            report << "WARNING - Bad state. Check volume on device.\n";
-            // setResult(ERROR_INVALID_STATE);
-        } else {
-            float gain = mAudioRecording.normalize(1.0f);
-            measureLatencyFromPulse(mAudioRecording,
-                                    mPulse,
-                                    &mLatencyReport);
-
-            if (mLatencyReport.confidence < kMinimumConfidence) {
-                report << "   ERROR - confidence too low!";
-                newResult = ERROR_CONFIDENCE;
-            } else {
-                mSignalRMS = calculateRootMeanSquare(
-                        &mAudioRecording.getData()[mLatencyReport.latencyInFrames], mPulse.size())
-                                / gain;
-            }
-            double latencyMillis = kMillisPerSecond * (double) mLatencyReport.latencyInFrames
-                                   / getSampleRate();
-            report << LOOPBACK_RESULT_TAG "latency.frames         = " << std::setw(8)
-                   << mLatencyReport.latencyInFrames << "\n";
-            report << LOOPBACK_RESULT_TAG "latency.msec           = " << std::setw(8)
-                   << latencyMillis << "\n";
-            report << LOOPBACK_RESULT_TAG "latency.confidence     = " << std::setw(8)
-                   << mLatencyReport.confidence << "\n";
-        }
-        mState = STATE_DONE;
-        if (getResult() == RESULT_OK) {
-            setResult(newResult);
-        }
-
-        return report.str();
-    }
-
-    int32_t getMeasuredLatency() override {
-        return mLatencyReport.latencyInFrames;
-    }
-
-    double getMeasuredConfidence() override {
-        return mLatencyReport.confidence;
-    }
-
-    double getBackgroundRMS() override {
-        return mBackgroundRMS;
-    }
-
-    double getSignalRMS() override {
-        return mSignalRMS;
-    }
-
-    void printStatus() override {
-        ALOGD("st = %d", mState);
-    }
-
-    result_code processInputFrame(float *frameData, int channelCount) override {
-        echo_state nextState = mState;
-        mLoopCounter++;
-
-        switch (mState) {
-            case STATE_MEASURE_BACKGROUND:
-                // Measure background RMS on channel 0
-                mBackgroundSumSquare += frameData[0] * frameData[0];
-                mBackgroundSumCount++;
-                mDownCounter--;
-                if (mDownCounter <= 0) {
-                    mBackgroundRMS = sqrtf(mBackgroundSumSquare / mBackgroundSumCount);
-                    nextState = STATE_IN_PULSE;
-                    mPulseCursor = 0;
-                }
-                break;
-
-            case STATE_IN_PULSE:
-                // Record input until the mAudioRecording is full.
-                mAudioRecording.write(frameData, channelCount, 1);
-                if (hasEnoughData()) {
-                    nextState = STATE_GOT_DATA;
-                }
-                break;
-
-            case STATE_GOT_DATA:
-            case STATE_DONE:
-            default:
-                break;
-        }
-
-        mState = nextState;
-        return RESULT_OK;
-    }
-
-    result_code processOutputFrame(float *frameData, int channelCount) override {
-        switch (mState) {
-            case STATE_IN_PULSE:
-                if (mPulseCursor < mPulse.size()) {
-                    float pulseSample = mPulse.getData()[mPulseCursor++];
-                    for (int i = 0; i < channelCount; i++) {
-                        frameData[i] = pulseSample;
-                    }
-                } else {
-                    for (int i = 0; i < channelCount; i++) {
-                        frameData[i] = 0;
-                    }
-                }
-                break;
-
-            case STATE_MEASURE_BACKGROUND:
-            case STATE_GOT_DATA:
-            case STATE_DONE:
-            default:
-                for (int i = 0; i < channelCount; i++) {
-                    frameData[i] = 0.0f; // silence
-                }
-                break;
-        }
-
-        return RESULT_OK;
-    }
-
-private:
-
-    enum echo_state {
-        STATE_MEASURE_BACKGROUND,
-        STATE_IN_PULSE,
-        STATE_GOT_DATA, // must match RoundTripLatencyActivity.java
-        STATE_DONE,
-    };
-
-    const char *convertStateToText(echo_state state) {
-        switch (state) {
-            case STATE_MEASURE_BACKGROUND:
-                return "INIT";
-            case STATE_IN_PULSE:
-                return "PULSE";
-            case STATE_GOT_DATA:
-                return "GOT_DATA";
-            case STATE_DONE:
-                return "DONE";
-        }
-        return "UNKNOWN";
-    }
-
-    int32_t         mDownCounter = 500;
-    int32_t         mLoopCounter = 0;
-    echo_state      mState = STATE_MEASURE_BACKGROUND;
-
-    static constexpr int32_t kFramesPerEncodedBit = 8; // multiple of 2
-    static constexpr int32_t kPulseLengthMillis = 500;
-
-    AudioRecording     mPulse;
-    int32_t            mPulseCursor = 0;
-
-    double             mBackgroundSumSquare = 0.0;
-    int32_t            mBackgroundSumCount = 0;
-    double             mBackgroundRMS = 0.0;
-    double             mSignalRMS = 0.0;
-    int32_t            mFramesToRecord = 0;
-
-    AudioRecording     mAudioRecording; // contains only the input after starting the pulse
-    LatencyReport      mLatencyReport;
-};
-
-#endif // ANALYZER_LATENCY_ANALYZER_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/ManchesterEncoder.h b/media/libaaudio/examples/loopback/src/analyzer/ManchesterEncoder.h
deleted file mode 100644
index 0a4bd5b..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/ManchesterEncoder.h
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANALYZER_MANCHESTER_ENCODER_H
-#define ANALYZER_MANCHESTER_ENCODER_H
-
-#include <cstdint>
-
-/**
- * Encode bytes using Manchester Coding scheme.
- *
- * Manchester Code is self clocking.
- * There is a transition in the middle of every bit.
- * Zero is high then low.
- * One is low then high.
- *
- * This avoids having long DC sections that would droop when
- * passed though analog circuits with AC coupling.
- *
- * IEEE 802.3 compatible.
- */
-
-class ManchesterEncoder {
-public:
-    ManchesterEncoder(int samplesPerPulse)
-            : mSamplesPerPulse(samplesPerPulse)
-            , mSamplesPerPulseHalf(samplesPerPulse / 2)
-            , mCursor(samplesPerPulse) {
-    }
-
-    virtual ~ManchesterEncoder() = default;
-
-    /**
-     * This will be called when the next byte is needed.
-     * @return
-     */
-    virtual uint8_t onNextByte() = 0;
-
-    /**
-     * Generate the next floating point sample.
-     * @return
-     */
-    virtual float nextFloat() {
-        advanceSample();
-        if (mCurrentBit) {
-            return (mCursor < mSamplesPerPulseHalf) ? -1.0f : 1.0f; // one
-        } else {
-            return (mCursor < mSamplesPerPulseHalf) ? 1.0f : -1.0f; // zero
-        }
-    }
-
-protected:
-    /**
-     * This will be called when a new bit is ready to be encoded.
-     * It can be used to prepare the encoded samples.
-     * @param current
-     */
-    virtual void onNextBit(bool /* current */) {};
-
-    void advanceSample() {
-        // Are we ready for a new bit?
-        if (++mCursor >= mSamplesPerPulse) {
-            mCursor = 0;
-            if (mBitsLeft == 0) {
-                mCurrentByte = onNextByte();
-                mBitsLeft = 8;
-            }
-            --mBitsLeft;
-            mCurrentBit = (mCurrentByte >> mBitsLeft) & 1;
-            onNextBit(mCurrentBit);
-        }
-    }
-
-    bool getCurrentBit() {
-        return mCurrentBit;
-    }
-
-    const int mSamplesPerPulse;
-    const int mSamplesPerPulseHalf;
-    int       mCursor;
-    int       mBitsLeft = 0;
-    uint8_t   mCurrentByte = 0;
-    bool      mCurrentBit = false;
-};
-#endif //ANALYZER_MANCHESTER_ENCODER_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/PeakDetector.h b/media/libaaudio/examples/loopback/src/analyzer/PeakDetector.h
deleted file mode 100644
index 4b3b4e7..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/PeakDetector.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANALYZER_PEAK_DETECTOR_H
-#define ANALYZER_PEAK_DETECTOR_H
-
-#include <math.h>
-
-/**
- * Measure a peak envelope by rising with the peaks,
- * and decaying exponentially after each peak.
- * The absolute value of the input signal is used.
- */
-class PeakDetector {
-public:
-
-    void reset() {
-        mLevel = 0.0;
-    }
-
-    double process(double input) {
-        mLevel *= mDecay; // exponential decay
-        input = fabs(input);
-        // never fall below the input signal
-        if (input > mLevel) {
-            mLevel = input;
-        }
-        return mLevel;
-    }
-
-    double getLevel() const {
-        return mLevel;
-    }
-
-    double getDecay() const {
-        return mDecay;
-    }
-
-    /**
-     * Multiply the level by this amount on every iteration.
-     * This provides an exponential decay curve.
-     * A value just under 1.0 is best, for example, 0.99;
-     * @param decay scale level for each input
-     */
-    void setDecay(double decay) {
-        mDecay = decay;
-    }
-
-private:
-    static constexpr double kDefaultDecay = 0.99f;
-
-    double mLevel = 0.0;
-    double mDecay = kDefaultDecay;
-};
-#endif //ANALYZER_PEAK_DETECTOR_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/PseudoRandom.h b/media/libaaudio/examples/loopback/src/analyzer/PseudoRandom.h
deleted file mode 100644
index 1c4938c..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/PseudoRandom.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-#ifndef ANALYZER_PSEUDORANDOM_H
-#define ANALYZER_PSEUDORANDOM_H
-
-#include <cctype>
-
-class PseudoRandom {
-public:
-    PseudoRandom(int64_t seed = 99887766)
-            :    mSeed(seed)
-    {}
-
-    /**
-     * Returns the next random double from -1.0 to 1.0
-     *
-     * @return value from -1.0 to 1.0
-     */
-    double nextRandomDouble() {
-        return nextRandomInteger() * (0.5 / (((int32_t)1) << 30));
-    }
-
-    /** Calculate random 32 bit number using linear-congruential method
-     * with known real-time performance.
-     */
-    int32_t nextRandomInteger() {
-#if __has_builtin(__builtin_mul_overflow) && __has_builtin(__builtin_add_overflow)
-        int64_t prod;
-        // Use values for 64-bit sequence from MMIX by Donald Knuth.
-        __builtin_mul_overflow(mSeed, (int64_t)6364136223846793005, &prod);
-        __builtin_add_overflow(prod, (int64_t)1442695040888963407, &mSeed);
-#else
-        mSeed = (mSeed * (int64_t)6364136223846793005) + (int64_t)1442695040888963407;
-#endif
-        return (int32_t) (mSeed >> 32); // The higher bits have a longer sequence.
-    }
-
-private:
-    int64_t mSeed;
-};
-
-#endif //ANALYZER_PSEUDORANDOM_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/RandomPulseGenerator.h b/media/libaaudio/examples/loopback/src/analyzer/RandomPulseGenerator.h
deleted file mode 100644
index 030050b..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/RandomPulseGenerator.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANALYZER_RANDOM_PULSE_GENERATOR_H
-#define ANALYZER_RANDOM_PULSE_GENERATOR_H
-
-#include <stdlib.h>
-#include "RoundedManchesterEncoder.h"
-
-/**
- * Encode random ones and zeros using Manchester Code per IEEE 802.3.
- */
-class RandomPulseGenerator : public RoundedManchesterEncoder {
-public:
-    RandomPulseGenerator(int samplesPerPulse)
-    : RoundedManchesterEncoder(samplesPerPulse) {
-    }
-
-    virtual ~RandomPulseGenerator() = default;
-
-    /**
-     * This will be called when the next byte is needed.
-     * @return random byte
-     */
-    uint8_t onNextByte() override {
-        return static_cast<uint8_t>(rand());
-    }
-};
-
-#endif //ANALYZER_RANDOM_PULSE_GENERATOR_H
diff --git a/media/libaaudio/examples/loopback/src/analyzer/RoundedManchesterEncoder.h b/media/libaaudio/examples/loopback/src/analyzer/RoundedManchesterEncoder.h
deleted file mode 100644
index f2eba84..0000000
--- a/media/libaaudio/examples/loopback/src/analyzer/RoundedManchesterEncoder.h
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANALYZER_ROUNDED_MANCHESTER_ENCODER_H
-#define ANALYZER_ROUNDED_MANCHESTER_ENCODER_H
-
-#include <math.h>
-#include <memory.h>
-#include <stdlib.h>
-#include "ManchesterEncoder.h"
-
-/**
- * Encode bytes using Manchester Code.
- * Round the edges using a half cosine to reduce ringing caused by a hard edge.
- */
-
-class RoundedManchesterEncoder : public ManchesterEncoder {
-public:
-    RoundedManchesterEncoder(int samplesPerPulse)
-            : ManchesterEncoder(samplesPerPulse) {
-        int rampSize = samplesPerPulse / 4;
-        mZeroAfterZero = std::make_unique<float[]>(samplesPerPulse);
-        mZeroAfterOne = std::make_unique<float[]>(samplesPerPulse);
-
-        int sampleIndex = 0;
-        for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
-            float phase = (rampIndex + 1) * M_PI / rampSize;
-            float sample = -cosf(phase);
-            mZeroAfterZero[sampleIndex] = sample;
-            mZeroAfterOne[sampleIndex] = 1.0f;
-            sampleIndex++;
-        }
-        for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
-            mZeroAfterZero[sampleIndex] = 1.0f;
-            mZeroAfterOne[sampleIndex] = 1.0f;
-            sampleIndex++;
-        }
-        for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
-            float phase = (rampIndex + 1) * M_PI / rampSize;
-            float sample = cosf(phase);
-            mZeroAfterZero[sampleIndex] = sample;
-            mZeroAfterOne[sampleIndex] = sample;
-            sampleIndex++;
-        }
-        for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
-            mZeroAfterZero[sampleIndex] = -1.0f;
-            mZeroAfterOne[sampleIndex] = -1.0f;
-            sampleIndex++;
-        }
-    }
-
-    void onNextBit(bool current) override {
-        // Do we need to use the rounded edge?
-        mCurrentSamples = (current ^ mPreviousBit)
-                          ? mZeroAfterOne.get()
-                          : mZeroAfterZero.get();
-        mPreviousBit = current;
-    }
-
-    float nextFloat() override {
-        advanceSample();
-        float output = mCurrentSamples[mCursor];
-        if (getCurrentBit()) output = -output;
-        return output;
-    }
-
-private:
-
-    bool mPreviousBit = false;
-    float *mCurrentSamples = nullptr;
-    std::unique_ptr<float[]> mZeroAfterZero;
-    std::unique_ptr<float[]> mZeroAfterOne;
-};
-
-#endif //ANALYZER_ROUNDED_MANCHESTER_ENCODER_H
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index 0d2ec70..6fff568 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -36,8 +36,12 @@
 #include "AAudioSimpleRecorder.h"
 #include "AAudioExampleUtils.h"
 
+// Get logging macros from OboeTester
+#include "android_debug.h"
+// Get signal analyzers from OboeTester
 #include "analyzer/GlitchAnalyzer.h"
 #include "analyzer/LatencyAnalyzer.h"
+
 #include "../../utils/AAudioExampleUtils.h"
 
 // V0.4.00 = rectify and low-pass filter the echos, auto-correlate entire echo
@@ -45,8 +49,9 @@
 //           fix -n option to set output buffer for -tm
 //           plot first glitch
 // V0.4.02 = allow -n0 for minimal buffer size
-// V0.5.00 = use latency analyzer from OboeTester, uses random noise for latency
-#define APP_VERSION             "0.5.00"
+// V0.5.00 = use latency analyzer copied from OboeTester, uses random noise for latency
+// V0.5.01 = use latency analyzer directly from OboeTester in external/oboe
+#define APP_VERSION             "0.5.01"
 
 // Tag for machine readable results as property = value pairs
 #define RESULT_TAG              "RESULT: "
diff --git a/media/libaaudio/examples/utils/AAudioArgsParser.h b/media/libaaudio/examples/utils/AAudioArgsParser.h
index 4bba436..e670642 100644
--- a/media/libaaudio/examples/utils/AAudioArgsParser.h
+++ b/media/libaaudio/examples/utils/AAudioArgsParser.h
@@ -421,7 +421,9 @@
         printf("      -f{0|1|2} set format\n");
         printf("          0 = UNSPECIFIED\n");
         printf("          1 = PCM_I16\n");
-        printf("          2 = FLOAT\n");
+        printf("          2 = PCM_FLOAT\n");
+        printf("          3 = PCM_I24_PACKED\n");
+        printf("          4 = PCM_I32\n");
         printf("      -i{inputPreset} eg. 5 for AAUDIO_INPUT_PRESET_CAMCORDER\n");
         printf("      -m{0|1|2|3} set MMAP policy\n");
         printf("          0 = _UNSPECIFIED, use aaudio.mmap_policy system property, default\n");
diff --git a/media/libaaudio/examples/utils/AAudioExampleUtils.h b/media/libaaudio/examples/utils/AAudioExampleUtils.h
index 46b8895..5819dfd 100644
--- a/media/libaaudio/examples/utils/AAudioExampleUtils.h
+++ b/media/libaaudio/examples/utils/AAudioExampleUtils.h
@@ -32,6 +32,7 @@
 #define NANOS_PER_MILLISECOND (NANOS_PER_MICROSECOND * 1000)
 #define NANOS_PER_SECOND      (NANOS_PER_MILLISECOND * 1000)
 
+// Use template functions to avoid warning of unused static functions.
 template <class T = aaudio_sharing_mode_t>
 const char *getSharingModeText(aaudio_sharing_mode_t mode) {
     const char *text = "unknown";
@@ -48,6 +49,7 @@
     return text;
 }
 
+template <class T = aaudio_performance_mode_t>
 const char *getPerformanceModeText(aaudio_performance_mode_t mode) {
     const char *text = "unknown";
     switch (mode) {
@@ -66,6 +68,7 @@
     return text;
 }
 
+template <class T = aaudio_direction_t>
 const char *getDirectionText(aaudio_direction_t direction) {
     const char *text = "unknown";
     switch (direction) {
@@ -81,6 +84,29 @@
     return text;
 }
 
+template <class T = aaudio_direction_t>
+constexpr int32_t getBytesPerSample(aaudio_format_t format) {
+    switch (format) {
+        case AAUDIO_FORMAT_PCM_I16:
+            return 2;
+        case AAUDIO_FORMAT_PCM_FLOAT:
+            return 4;
+        case AAUDIO_FORMAT_PCM_I24_PACKED:
+            return 3;
+        case AAUDIO_FORMAT_PCM_I32:
+            return 4;
+        default:
+            return -1;
+    }
+}
+
+// Return true if CPU is native Little Endian
+inline bool isNativeLittleEndian() {
+    // If the first byte of the data word in memory is 1 then Little Endian.
+    constexpr union { unsigned u; unsigned char c[sizeof(unsigned)]; } one = {1};
+    return one.c[0] != 0;
+}
+
 template <class T = int64_t>
 void convertNanosecondsToTimespec(int64_t nanoseconds, struct timespec *time) {
     time->tv_sec = nanoseconds / NANOS_PER_SECOND;
diff --git a/media/libaaudio/examples/utils/AAudioSimplePlayer.h b/media/libaaudio/examples/utils/AAudioSimplePlayer.h
index fd1fc45..7daac20 100644
--- a/media/libaaudio/examples/utils/AAudioSimplePlayer.h
+++ b/media/libaaudio/examples/utils/AAudioSimplePlayer.h
@@ -359,22 +359,38 @@
 
     int32_t samplesPerFrame = AAudioStream_getChannelCount(stream);
 
-
-    int numActiveOscilators = (samplesPerFrame > MAX_CHANNELS) ? MAX_CHANNELS : samplesPerFrame;
+    int numActiveOscillators = std::min(samplesPerFrame, MAX_CHANNELS);
     switch (AAudioStream_getFormat(stream)) {
         case AAUDIO_FORMAT_PCM_I16: {
             int16_t *audioBuffer = (int16_t *) audioData;
-            for (int i = 0; i < numActiveOscilators; ++i) {
-                sineData->sineOscillators[i].render(&audioBuffer[i], samplesPerFrame,
-                                                    numFrames);
+            for (int i = 0; i < numActiveOscillators; ++i) {
+                sineData->sineOscillators[i].render(&audioBuffer[i],
+                                                    samplesPerFrame, numFrames);
             }
         }
             break;
         case AAUDIO_FORMAT_PCM_FLOAT: {
             float *audioBuffer = (float *) audioData;
-            for (int i = 0; i < numActiveOscilators; ++i) {
-                sineData->sineOscillators[i].render(&audioBuffer[i], samplesPerFrame,
-                                                    numFrames);
+            for (int i = 0; i < numActiveOscillators; ++i) {
+                sineData->sineOscillators[i].render(&audioBuffer[i],
+                                                    samplesPerFrame, numFrames);
+            }
+        }
+            break;
+        case AAUDIO_FORMAT_PCM_I24_PACKED: {
+            uint8_t *audioBuffer = (uint8_t *) audioData;
+            for (int i = 0; i < numActiveOscillators; ++i) {
+                static const int bytesPerSample = getBytesPerSample(AAUDIO_FORMAT_PCM_I24_PACKED);
+                sineData->sineOscillators[i].render24(&audioBuffer[i * bytesPerSample],
+                                                      samplesPerFrame, numFrames);
+            }
+        }
+            break;
+        case AAUDIO_FORMAT_PCM_I32: {
+            int32_t *audioBuffer = (int32_t *) audioData;
+            for (int i = 0; i < numActiveOscillators; ++i) {
+                sineData->sineOscillators[i].render(&audioBuffer[i],
+                                                    samplesPerFrame, numFrames);
             }
         }
             break;
diff --git a/media/libaaudio/examples/utils/SineGenerator.h b/media/libaaudio/examples/utils/SineGenerator.h
index 9e6d46d..66a08fd 100644
--- a/media/libaaudio/examples/utils/SineGenerator.h
+++ b/media/libaaudio/examples/utils/SineGenerator.h
@@ -41,20 +41,54 @@
         }
     }
 
+    float next() {
+        float value = sinf(mPhase) * mAmplitude;
+        advancePhase();
+        return value;
+    }
+
     void render(int16_t *buffer, int32_t channelStride, int32_t numFrames) {
         int sampleIndex = 0;
         for (int i = 0; i < numFrames; i++) {
-            buffer[sampleIndex] = (int16_t) (INT16_MAX * sin(mPhase) * mAmplitude);
+            buffer[sampleIndex] = (int16_t) (INT16_MAX * next());
             sampleIndex += channelStride;
-            advancePhase();
         }
     }
+
     void render(float *buffer, int32_t channelStride, int32_t numFrames) {
         int sampleIndex = 0;
         for (int i = 0; i < numFrames; i++) {
-            buffer[sampleIndex] = sin(mPhase) * mAmplitude;
+            buffer[sampleIndex] = next();
             sampleIndex += channelStride;
-            advancePhase();
+        }
+    }
+
+    void render(int32_t *buffer, int32_t channelStride, int32_t numFrames) {
+        int sampleIndex = 0;
+        for (int i = 0; i < numFrames; i++) {
+            buffer[sampleIndex] = (int32_t) (INT32_MAX * next());
+            sampleIndex += channelStride;
+        }
+    }
+
+    void render24(uint8_t *buffer, int32_t channelStride, int32_t numFrames) {
+        int sampleIndex = 0;
+        constexpr int32_t INT24_MAX = (1 << 23) - 1;
+        constexpr int bytesPerSample = getBytesPerSample(AAUDIO_FORMAT_PCM_I24_PACKED);
+        const bool isLittleEndian = isNativeLittleEndian();
+        for (int i = 0; i < numFrames; i++) {
+            int32_t sample = (int32_t) (INT24_MAX * next());
+            uint32_t usample = (uint32_t) sample;
+            if (isLittleEndian) {
+                buffer[sampleIndex] = usample; // little end first
+                buffer[sampleIndex + 1] = usample >> 8;
+                buffer[sampleIndex + 2] = usample >> 16;
+            } else {
+                buffer[sampleIndex] = usample >> 16; // big end first
+                buffer[sampleIndex + 1] = usample >> 8;
+                buffer[sampleIndex + 2] = usample;
+            }
+            sampleIndex += channelStride * bytesPerSample;
         }
     }
 
@@ -100,4 +134,3 @@
 };
 
 #endif /* SINE_GENERATOR_H */
-
diff --git a/media/libaaudio/examples/utils/dummy.cpp b/media/libaaudio/examples/utils/dummy.cpp
deleted file mode 100644
index 8ef7e36..0000000
--- a/media/libaaudio/examples/utils/dummy.cpp
+++ /dev/null
@@ -1,5 +0,0 @@
-/**
- * Dummy file needed to get Android Studio to scan this folder.
- */
-
-int g_DoNotUseThisVariable = 0;
diff --git a/media/libaaudio/examples/utils/unused.cpp b/media/libaaudio/examples/utils/unused.cpp
new file mode 100644
index 0000000..9a5205e
--- /dev/null
+++ b/media/libaaudio/examples/utils/unused.cpp
@@ -0,0 +1,5 @@
+/**
+ * Unused file required to get Android Studio to scan this folder.
+ */
+
+int g_DoNotUseThisVariable = 0;
diff --git a/media/libaaudio/examples/write_sine/Android.bp b/media/libaaudio/examples/write_sine/Android.bp
index aa25e67..1c7e0f1 100644
--- a/media/libaaudio/examples/write_sine/Android.bp
+++ b/media/libaaudio/examples/write_sine/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "write_sine",
     srcs: ["src/write_sine.cpp"],
diff --git a/media/libaaudio/examples/write_sine/src/write_sine.cpp b/media/libaaudio/examples/write_sine/src/write_sine.cpp
index 8e33a31..33d07f0 100644
--- a/media/libaaudio/examples/write_sine/src/write_sine.cpp
+++ b/media/libaaudio/examples/write_sine/src/write_sine.cpp
@@ -47,9 +47,11 @@
     int32_t  framesToPlay = 0;
     int32_t  framesLeft = 0;
     int32_t  xRunCount = 0;
-    int      numActiveOscilators = 0;
+    int      numActiveOscillators = 0;
     float   *floatData = nullptr;
     int16_t *shortData = nullptr;
+    int32_t *int32Data = nullptr;
+    uint8_t *byteData = nullptr;
 
     int      testFd = -1;
 
@@ -57,7 +59,7 @@
     // in a buffer if we hang or crash.
     setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
 
-    printf("%s - Play a sine wave using AAudio V0.1.3\n", argv[0]);
+    printf("%s - Play a sine wave using AAudio V0.1.4\n", argv[0]);
 
     if (argParser.parseArgs(argc, argv)) {
         return EXIT_FAILURE;
@@ -91,13 +93,23 @@
     printf("Buffer: framesPerWrite = %d\n",framesPerWrite);
 
     // Allocate a buffer for the audio data.
-    if (actualDataFormat == AAUDIO_FORMAT_PCM_FLOAT) {
-        floatData = new float[framesPerWrite * actualChannelCount];
-    } else if (actualDataFormat == AAUDIO_FORMAT_PCM_I16) {
-        shortData = new int16_t[framesPerWrite * actualChannelCount];
-    } else {
-        printf("ERROR Unsupported data format!\n");
-        goto finish;
+    switch (actualDataFormat) {
+        case AAUDIO_FORMAT_PCM_FLOAT:
+            floatData = new float[framesPerWrite * actualChannelCount];
+            break;
+        case AAUDIO_FORMAT_PCM_I16:
+            shortData = new int16_t[framesPerWrite * actualChannelCount];
+            break;
+        case AAUDIO_FORMAT_PCM_I24_PACKED:
+            byteData = new uint8_t[framesPerWrite * actualChannelCount
+                                   * getBytesPerSample(AAUDIO_FORMAT_PCM_I24_PACKED)];
+            break;
+        case AAUDIO_FORMAT_PCM_I32:
+            int32Data = new int32_t[framesPerWrite * actualChannelCount];
+            break;
+        default:
+            printf("ERROR Unsupported data format!\n");
+            goto finish;
     }
 
     testFd = open("/data/aaudio_temp.raw", O_CREAT | O_RDWR, S_IRWXU);
@@ -117,29 +129,56 @@
     // Play for a while.
     framesToPlay = actualSampleRate * argParser.getDurationSeconds();
     framesLeft = framesToPlay;
-    numActiveOscilators = (actualChannelCount > MAX_CHANNELS) ? MAX_CHANNELS : actualChannelCount;
+    numActiveOscillators = (actualChannelCount > MAX_CHANNELS) ? MAX_CHANNELS : actualChannelCount;
     while (framesLeft > 0) {
         // Render as FLOAT or PCM
-        if (actualDataFormat == AAUDIO_FORMAT_PCM_FLOAT) {
-            for (int i = 0; i < numActiveOscilators; ++i) {
-                myData.sineOscillators[i].render(&floatData[i], actualChannelCount,
-                                                  framesPerWrite);
-            }
-        } else if (actualDataFormat == AAUDIO_FORMAT_PCM_I16) {
-            for (int i = 0; i < numActiveOscilators; ++i) {
-                myData.sineOscillators[i].render(&shortData[i], actualChannelCount,
-                                                  framesPerWrite);
-            }
+        switch (actualDataFormat) {
+            case AAUDIO_FORMAT_PCM_FLOAT:
+                for (int i = 0; i < numActiveOscillators; ++i) {
+                    myData.sineOscillators[i].render(&floatData[i], actualChannelCount,
+                                                     framesPerWrite);
+                }
+                break;
+            case AAUDIO_FORMAT_PCM_I16:
+                for (int i = 0; i < numActiveOscillators; ++i) {
+                    myData.sineOscillators[i].render(&shortData[i], actualChannelCount,
+                                                     framesPerWrite);
+                }
+                break;
+            case AAUDIO_FORMAT_PCM_I32:
+                for (int i = 0; i < numActiveOscillators; ++i) {
+                    myData.sineOscillators[i].render(&int32Data[i], actualChannelCount,
+                                                     framesPerWrite);
+                }
+                break;
+            case AAUDIO_FORMAT_PCM_I24_PACKED:
+                for (int i = 0; i < numActiveOscillators; ++i) {
+                    static const int
+                        bytesPerSample = getBytesPerSample(AAUDIO_FORMAT_PCM_I24_PACKED);
+                    myData.sineOscillators[i].render24(&byteData[i * bytesPerSample],
+                                                       actualChannelCount,
+                                                       framesPerWrite);
+                }
+                break;
         }
 
         // Write audio data to the stream.
         int64_t timeoutNanos = 1000 * NANOS_PER_MILLISECOND;
         int32_t minFrames = (framesToPlay < framesPerWrite) ? framesToPlay : framesPerWrite;
         int32_t actual = 0;
-        if (actualDataFormat == AAUDIO_FORMAT_PCM_FLOAT) {
-            actual = AAudioStream_write(aaudioStream, floatData, minFrames, timeoutNanos);
-        } else if (actualDataFormat == AAUDIO_FORMAT_PCM_I16) {
-            actual = AAudioStream_write(aaudioStream, shortData, minFrames, timeoutNanos);
+        switch (actualDataFormat) {
+            case AAUDIO_FORMAT_PCM_FLOAT:
+                actual = AAudioStream_write(aaudioStream, floatData, minFrames, timeoutNanos);
+                break;
+            case AAUDIO_FORMAT_PCM_I16:
+                actual = AAudioStream_write(aaudioStream, shortData, minFrames, timeoutNanos);
+                break;
+            case AAUDIO_FORMAT_PCM_I32:
+                actual = AAudioStream_write(aaudioStream, int32Data, minFrames, timeoutNanos);
+                break;
+            case AAUDIO_FORMAT_PCM_I24_PACKED:
+                actual = AAudioStream_write(aaudioStream, byteData, minFrames, timeoutNanos);
+                break;
         }
         if (actual < 0) {
             fprintf(stderr, "ERROR - AAudioStream_write() returned %d\n", actual);
@@ -196,6 +235,8 @@
 
     delete[] floatData;
     delete[] shortData;
+    delete[] int32Data;
+    delete[] byteData;
     printf("exiting - AAudio result = %d = %s\n", result, AAudio_convertResultToText(result));
     return (result != AAUDIO_OK) ? EXIT_FAILURE : EXIT_SUCCESS;
 }
diff --git a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
index ca60233..cdc987b 100644
--- a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
+++ b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
@@ -31,7 +31,7 @@
 #include "AAudioSimplePlayer.h"
 #include "AAudioArgsParser.h"
 
-#define APP_VERSION  "0.1.7"
+#define APP_VERSION  "0.1.8"
 
 constexpr int32_t kDefaultHangTimeMSec = 10;
 
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index a47f189..4b08295 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -29,6 +29,8 @@
 #ifndef AAUDIO_AAUDIO_H
 #define AAUDIO_AAUDIO_H
 
+#include <stdbool.h>
+#include <stdint.h>
 #include <time.h>
 
 #ifdef __cplusplus
@@ -63,7 +65,7 @@
 
     /**
      * This format uses the int16_t data type.
-     * The maximum range of the data is -32768 to 32767.
+     * The maximum range of the data is -32768 (0x8000) to 32767 (0x7FFF).
      */
     AAUDIO_FORMAT_PCM_I16,
 
@@ -72,10 +74,37 @@
      * The nominal range of the data is [-1.0f, 1.0f).
      * Values outside that range may be clipped.
      *
-     * See also 'floatData' at
-     * https://developer.android.com/reference/android/media/AudioTrack#write(float[],%20int,%20int,%20int)
+     * See also the float Data in
+     * <a href="/reference/android/media/AudioTrack#write(float[],%20int,%20int,%20int)">
+     *   write(float[], int, int, int)</a>.
      */
-    AAUDIO_FORMAT_PCM_FLOAT
+    AAUDIO_FORMAT_PCM_FLOAT,
+
+    /**
+     * This format uses 24-bit samples packed into 3 bytes.
+     * The bytes are in little-endian order, so the least significant byte
+     * comes first in the byte array.
+     *
+     * The maximum range of the data is -8388608 (0x800000)
+     * to 8388607 (0x7FFFFF).
+     *
+     * Note that the lower precision bits may be ignored by the device.
+     *
+     * Available since API level 31.
+     */
+    AAUDIO_FORMAT_PCM_I24_PACKED,
+
+    /**
+     * This format uses 32-bit samples stored in an int32_t data type.
+     * The maximum range of the data is -2147483648 (0x80000000)
+     * to 2147483647 (0x7FFFFFFF).
+     *
+     * Note that the lower precision bits may be ignored by the device.
+     *
+     * Available since API level 31.
+     */
+    AAUDIO_FORMAT_PCM_I32
+
 };
 typedef int32_t aaudio_format_t;
 
@@ -170,21 +199,69 @@
 };
 typedef int32_t  aaudio_result_t;
 
+/**
+ * AAudio Stream states, for details, refer to
+ * <a href="/ndk/guides/audio/aaudio/aaudio#using-streams">Using an Audio Stream</a>
+ */
 enum
 {
+
+    /**
+     * The stream is created but not initialized yet.
+     */
     AAUDIO_STREAM_STATE_UNINITIALIZED = 0,
+    /**
+     * The stream is in an unrecognized state.
+     */
     AAUDIO_STREAM_STATE_UNKNOWN,
+
+    /**
+     * The stream is open and ready to use.
+     */
     AAUDIO_STREAM_STATE_OPEN,
+    /**
+     * The stream is just starting up.
+     */
     AAUDIO_STREAM_STATE_STARTING,
+    /**
+     * The stream has started.
+     */
     AAUDIO_STREAM_STATE_STARTED,
+    /**
+     * The stream is pausing.
+     */
     AAUDIO_STREAM_STATE_PAUSING,
+    /**
+     * The stream has paused, could be restarted or flushed.
+     */
     AAUDIO_STREAM_STATE_PAUSED,
+    /**
+     * The stream is being flushed.
+     */
     AAUDIO_STREAM_STATE_FLUSHING,
+    /**
+     * The stream is flushed, ready to be restarted.
+     */
     AAUDIO_STREAM_STATE_FLUSHED,
+    /**
+     * The stream is stopping.
+     */
     AAUDIO_STREAM_STATE_STOPPING,
+    /**
+     * The stream has been stopped.
+     */
     AAUDIO_STREAM_STATE_STOPPED,
+    /**
+     * The stream is closing.
+     */
     AAUDIO_STREAM_STATE_CLOSING,
+    /**
+     * The stream has been closed.
+     */
     AAUDIO_STREAM_STATE_CLOSED,
+    /**
+     * The stream is disconnected from audio device.
+     */
     AAUDIO_STREAM_STATE_DISCONNECTED
 };
 typedef int32_t aaudio_stream_state_t;
@@ -234,7 +311,8 @@
  * This information is used by certain platforms or routing policies
  * to make more refined volume or routing decisions.
  *
- * Note that these match the equivalent values in {@link android.media.AudioAttributes}
+ * Note that these match the equivalent values in
+ * <a href="/reference/android/media/AudioAttributes">AudioAttributes</a>
  * in the Android Java API.
  *
  * Added in API level 28.
@@ -335,7 +413,8 @@
  * an audio book application) this information might be used by the audio framework to
  * enforce audio focus.
  *
- * Note that these match the equivalent values in {@link android.media.AudioAttributes}
+ * Note that these match the equivalent values in
+ * <a href="/reference/android/media/AudioAttributes">AudioAttributes</a>
  * in the Android Java API.
  *
  * Added in API level 28.
@@ -415,7 +494,8 @@
 /**
  * Specifying if audio may or may not be captured by other apps or the system.
  *
- * Note that these match the equivalent values in {@link android.media.AudioAttributes}
+ * Note that these match the equivalent values in
+ * <a href="/reference/android/media/AudioAttributes">AudioAttributes</a>
  * in the Android Java API.
  *
  * Added in API level 29.
@@ -427,10 +507,11 @@
      * For privacy, the following usages can not be recorded: AAUDIO_VOICE_COMMUNICATION*,
      * AAUDIO_USAGE_NOTIFICATION*, AAUDIO_USAGE_ASSISTANCE* and {@link #AAUDIO_USAGE_ASSISTANT}.
      *
-     * On {@link android.os.Build.VERSION_CODES#Q}, this means only {@link #AAUDIO_USAGE_MEDIA}
-     * and {@link #AAUDIO_USAGE_GAME} may be captured.
+     * On <a href="/reference/android/os/Build.VERSION_CODES#Q">Build.VERSION_CODES</a>,
+     * this means only {@link #AAUDIO_USAGE_MEDIA} and {@link #AAUDIO_USAGE_GAME} may be captured.
      *
-     * See {@link android.media.AudioAttributes#ALLOW_CAPTURE_BY_ALL}.
+     * See <a href="/reference/android/media/AudioAttributes.html#ALLOW_CAPTURE_BY_ALL">
+     * ALLOW_CAPTURE_BY_ALL</a>.
      */
     AAUDIO_ALLOW_CAPTURE_BY_ALL = 1,
     /**
@@ -438,8 +519,9 @@
      *
      * System apps can capture for many purposes like accessibility, user guidance...
      * but have strong restriction. See
-     * {@link android.media.AudioAttributes#ALLOW_CAPTURE_BY_SYSTEM} for what the system apps
-     * can do with the capture audio.
+     * <a href="/reference/android/media/AudioAttributes.html#ALLOW_CAPTURE_BY_SYSTEM">
+     * ALLOW_CAPTURE_BY_SYSTEM</a>
+     * for what the system apps can do with the capture audio.
      */
     AAUDIO_ALLOW_CAPTURE_BY_SYSTEM = 2,
     /**
@@ -447,7 +529,8 @@
      *
      * It is encouraged to use {@link #AAUDIO_ALLOW_CAPTURE_BY_SYSTEM} instead of this value as system apps
      * provide significant and useful features for the user (eg. accessibility).
-     * See {@link android.media.AudioAttributes#ALLOW_CAPTURE_BY_NONE}.
+     * See <a href="/reference/android/media/AudioAttributes.html#ALLOW_CAPTURE_BY_NONE">
+     * ALLOW_CAPTURE_BY_NONE</a>.
      */
     AAUDIO_ALLOW_CAPTURE_BY_NONE = 3,
 };
@@ -555,6 +638,38 @@
                                                 int32_t deviceId) __INTRODUCED_IN(26);
 
 /**
+ * Declare the name of the package creating the stream.
+ *
+ * This is usually {@code Context#getPackageName()}.
+ *
+ * The default, if you do not call this function, is a random package in the calling uid.
+ * The vast majority of apps have only one package per calling UID. If the package
+ * name does not match the calling UID, then requests will be rejected.
+ *
+ * Available since API level 31.
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param packageName packageName of the calling app.
+ */
+AAUDIO_API void AAudioStreamBuilder_setPackageName(AAudioStreamBuilder* builder,
+                                                   const char * packageName) __INTRODUCED_IN(31);
+
+/**
+ * Declare the attribution tag of the context creating the stream.
+ *
+ * This is usually {@code Context#getAttributionTag()}.
+ *
+ * The default, if you do not call this function, is null.
+ *
+ * Available since API level 31.
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param attributionTag attributionTag of the calling context.
+ */
+AAUDIO_API void AAudioStreamBuilder_setAttributionTag(AAudioStreamBuilder* builder,
+        const char * attributionTag) __INTRODUCED_IN(31);
+
+/**
  * Request a sample rate in Hertz.
  *
  * The default, if you do not call this function, is {@link #AAUDIO_UNSPECIFIED}.
@@ -687,7 +802,7 @@
         aaudio_performance_mode_t mode) __INTRODUCED_IN(26);
 
 /**
- * Set the intended use case for the stream.
+ * Set the intended use case for the output stream.
  *
  * The AAudio system will use this information to optimize the
  * behavior of the stream.
@@ -704,7 +819,7 @@
         aaudio_usage_t usage) __INTRODUCED_IN(28);
 
 /**
- * Set the type of audio data that the stream will carry.
+ * Set the type of audio data that the output stream will carry.
  *
  * The AAudio system will use this information to optimize the
  * behavior of the stream.
@@ -746,7 +861,9 @@
  * The default is {@link #AAUDIO_ALLOW_CAPTURE_BY_ALL}.
  *
  * Note that an application can also set its global policy, in which case the most restrictive
- * policy is always applied. See {@link android.media.AudioAttributes#setAllowedCapturePolicy(int)}
+ * policy is always applied. See
+ * <a href="/reference/android/media/AudioManager#setAllowedCapturePolicy(int)">
+ * setAllowedCapturePolicy(int)</a>
  *
  * Available since API level 29.
  *
@@ -894,8 +1011,9 @@
  * It will stop being called after AAudioStream_requestPause() or
  * AAudioStream_requestStop() is called.
  *
- * This callback function will be called on a real-time thread owned by AAudio. See
- * {@link #AAudioStream_dataCallback} for more information.
+ * This callback function will be called on a real-time thread owned by AAudio.
+ * The low latency streams may have callback threads with higher priority than normal streams.
+ * See {@link #AAudioStream_dataCallback} for more information.
  *
  * Note that the AAudio callbacks will never be called simultaneously from multiple threads.
  *
@@ -1022,7 +1140,6 @@
 // Stream Control
 // ============================================================
 
-#if __ANDROID_API__ >= 30
 /**
  * Free the audio resources associated with a stream created by
  * AAudioStreamBuilder_openStream().
@@ -1035,11 +1152,17 @@
  * but still allow queries to the stream to occur from other threads. This often
  * happens if you are monitoring stream progress from a UI thread.
  *
+ * NOTE: This function is only fully implemented for MMAP streams,
+ * which are low latency streams supported by some devices.
+ * On other "Legacy" streams some audio resources will still be in use
+ * and some callbacks may still be in process after this call.
+ *
+ * Available since API level 30.
+ *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return {@link #AAUDIO_OK} or a negative error.
  */
 AAUDIO_API aaudio_result_t  AAudioStream_release(AAudioStream* stream) __INTRODUCED_IN(30);
-#endif // __ANDROID_API__
 
 /**
  * Delete the internal data structures associated with the stream created
@@ -1047,6 +1170,8 @@
  *
  * If AAudioStream_release() has not been called then it will be called automatically.
  *
+ * Available since API level 26.
+ *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return {@link #AAUDIO_OK} or a negative error.
  */
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index 717f31a..33a5c7f 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library {
     name: "libaaudio",
 
@@ -21,6 +30,7 @@
     ],
 
     cflags: [
+        "-Wthread-safety",
         "-Wno-unused-parameter",
         "-Wall",
         "-Werror",
@@ -42,6 +52,7 @@
         "libcutils",
         "libutils",
         "libbinder",
+        "libpermission",
     ],
 
     sanitize: {
@@ -75,6 +86,10 @@
     ],
     export_header_lib_headers: ["libaaudio_headers"],
 
+    export_shared_lib_headers: [
+        "framework-permission-aidl-cpp",
+    ],
+
     shared_libs: [
         "libaudioclient",
         "libaudioutils",
@@ -85,6 +100,9 @@
         "libcutils",
         "libutils",
         "libbinder",
+        "framework-permission-aidl-cpp",
+        "aaudio-aidl-cpp",
+        "libaudioclient_aidl_conversion",
     ],
 
     cflags: [
@@ -114,11 +132,10 @@
         "client/AudioStreamInternalPlay.cpp",
         "client/IsochronousClockModel.cpp",
         "binding/AudioEndpointParcelable.cpp",
+        "binding/AAudioBinderAdapter.cpp",
         "binding/AAudioBinderClient.cpp",
         "binding/AAudioStreamRequest.cpp",
         "binding/AAudioStreamConfiguration.cpp",
-        "binding/IAAudioClient.cpp",
-        "binding/IAAudioService.cpp",
         "binding/RingBufferParcelable.cpp",
         "binding/SharedMemoryParcelable.cpp",
         "binding/SharedRegionParcelable.cpp",
@@ -129,12 +146,45 @@
         "flowgraph/SinkFloat.cpp",
         "flowgraph/SinkI16.cpp",
         "flowgraph/SinkI24.cpp",
+        "flowgraph/SinkI32.cpp",
         "flowgraph/SourceFloat.cpp",
         "flowgraph/SourceI16.cpp",
         "flowgraph/SourceI24.cpp",
+        "flowgraph/SourceI32.cpp",
     ],
     sanitize: {
         integer_overflow: true,
         misc_undefined: ["bounds"],
     },
 }
+
+aidl_interface {
+    name: "aaudio-aidl",
+    unstable: true,
+    local_include_dir: "binding/aidl",
+    srcs: [
+        "binding/aidl/aaudio/Endpoint.aidl",
+        "binding/aidl/aaudio/RingBuffer.aidl",
+        "binding/aidl/aaudio/SharedRegion.aidl",
+        "binding/aidl/aaudio/StreamParameters.aidl",
+        "binding/aidl/aaudio/StreamRequest.aidl",
+        "binding/aidl/aaudio/IAAudioClient.aidl",
+        "binding/aidl/aaudio/IAAudioService.aidl",
+    ],
+    imports: [
+        "audio_common-aidl",
+        "shared-file-region-aidl",
+        "framework-permission-aidl"
+    ],
+    backend:
+    {
+        cpp: {
+            enabled: true,
+        },
+        java: {
+            // TODO: need to have audio_common-aidl available in Java to enable
+            //       this.
+            enabled: false,
+        },
+    },
+}
diff --git a/media/libaaudio/src/binding/AAudioBinderAdapter.cpp b/media/libaaudio/src/binding/AAudioBinderAdapter.cpp
new file mode 100644
index 0000000..6e3a1c8
--- /dev/null
+++ b/media/libaaudio/src/binding/AAudioBinderAdapter.cpp
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <binding/AAudioBinderAdapter.h>
+#include <media/AidlConversionUtil.h>
+#include <utility/AAudioUtilities.h>
+
+namespace aaudio {
+
+using android::aidl_utils::statusTFromBinderStatus;
+using android::binder::Status;
+
+AAudioBinderAdapter::AAudioBinderAdapter(IAAudioService* delegate)
+        : mDelegate(delegate) {}
+
+void AAudioBinderAdapter::registerClient(const android::sp<IAAudioClient>& client) {
+    mDelegate->registerClient(client);
+}
+
+aaudio_handle_t AAudioBinderAdapter::openStream(const AAudioStreamRequest& request,
+                                                AAudioStreamConfiguration& config) {
+    aaudio_handle_t result;
+    StreamParameters params;
+    Status status = mDelegate->openStream(request.parcelable(),
+                                          &params,
+                                          &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+    }
+    config = params;
+    return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::closeStream(aaudio_handle_t streamHandle) {
+    aaudio_result_t result;
+    Status status = mDelegate->closeStream(streamHandle, &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+    }
+    return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::getStreamDescription(aaudio_handle_t streamHandle,
+                                                          AudioEndpointParcelable& endpointOut) {
+    aaudio_result_t result;
+    Endpoint endpoint;
+    Status status = mDelegate->getStreamDescription(streamHandle,
+                                                    &endpoint,
+                                                    &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+    }
+    endpointOut = std::move(endpoint);
+    return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::startStream(aaudio_handle_t streamHandle) {
+    aaudio_result_t result;
+    Status status = mDelegate->startStream(streamHandle, &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+    }
+    return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::pauseStream(aaudio_handle_t streamHandle) {
+    aaudio_result_t result;
+    Status status = mDelegate->pauseStream(streamHandle, &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+    }
+    return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::stopStream(aaudio_handle_t streamHandle) {
+    aaudio_result_t result;
+    Status status = mDelegate->stopStream(streamHandle, &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+    }
+    return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::flushStream(aaudio_handle_t streamHandle) {
+    aaudio_result_t result;
+    Status status = mDelegate->flushStream(streamHandle, &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+    }
+    return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::registerAudioThread(aaudio_handle_t streamHandle,
+                                                         pid_t clientThreadId,
+                                                         int64_t periodNanoseconds) {
+    aaudio_result_t result;
+    Status status = mDelegate->registerAudioThread(streamHandle, clientThreadId, periodNanoseconds, &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+    }
+    return result;
+}
+
+aaudio_result_t AAudioBinderAdapter::unregisterAudioThread(aaudio_handle_t streamHandle,
+                                                           pid_t clientThreadId) {
+    aaudio_result_t result;
+    Status status = mDelegate->unregisterAudioThread(streamHandle, clientThreadId, &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+    }
+    return result;
+}
+
+}  // namespace aaudio
diff --git a/media/libaaudio/src/binding/AAudioBinderAdapter.h b/media/libaaudio/src/binding/AAudioBinderAdapter.h
new file mode 100644
index 0000000..5e9ab57
--- /dev/null
+++ b/media/libaaudio/src/binding/AAudioBinderAdapter.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aaudio/IAAudioService.h>
+#include <binding/AAudioServiceInterface.h>
+
+namespace aaudio {
+
+/**
+ * An adapter that takes in an underlying IAAudioService and exposes an
+ * AAudioServiceInterface.
+ *
+ * This class is abstract: the client is expected to inherit from this class and implement those
+ * methods from AAudioServiceInterface that don't have counterparts in IAAudioService.
+ */
+class AAudioBinderAdapter : public AAudioServiceInterface {
+public:
+    explicit AAudioBinderAdapter(IAAudioService* delegate);
+
+    void registerClient(const android::sp<IAAudioClient>& client) override;
+
+    aaudio_handle_t openStream(const AAudioStreamRequest& request,
+                               AAudioStreamConfiguration& configuration) override;
+
+    aaudio_result_t closeStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
+                                         AudioEndpointParcelable& endpoint) override;
+
+    aaudio_result_t startStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t pauseStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t stopStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t flushStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t registerAudioThread(aaudio_handle_t streamHandle,
+                                        pid_t clientThreadId,
+                                        int64_t periodNanoseconds) override;
+
+    aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
+                                          pid_t clientThreadId) override;
+
+private:
+    IAAudioService* const mDelegate;
+};
+
+}  // namespace aaudio
diff --git a/media/libaaudio/src/binding/AAudioBinderClient.cpp b/media/libaaudio/src/binding/AAudioBinderClient.cpp
index 7b0d31f..fa5a2da 100644
--- a/media/libaaudio/src/binding/AAudioBinderClient.cpp
+++ b/media/libaaudio/src/binding/AAudioBinderClient.cpp
@@ -19,35 +19,30 @@
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
-#include <binder/IInterface.h>
 #include <binder/IServiceManager.h>
 #include <binder/ProcessState.h>
 #include <utils/Mutex.h>
 #include <utils/RefBase.h>
 #include <utils/Singleton.h>
-#include <media/AudioSystem.h>
-
 #include <aaudio/AAudio.h>
 
 #include "AudioEndpointParcelable.h"
-#include "binding/AAudioBinderClient.h"
-//#include "binding/AAudioStreamRequest.h"
-//#include "binding/AAudioStreamConfiguration.h"
-//#include "binding/IAAudioService.h"
-//#include "binding/AAudioServiceMessage.h"
 
-//#include "AAudioServiceInterface.h"
+#include "binding/AAudioBinderClient.h"
+
+#define AAUDIO_SERVICE_NAME  "media.aaudio"
 
 using android::String16;
 using android::IServiceManager;
 using android::defaultServiceManager;
 using android::interface_cast;
 using android::IInterface;
-using android::IAAudioService;
 using android::Mutex;
 using android::ProcessState;
 using android::sp;
+using android::status_t;
 using android::wp;
+using android::binder::Status;
 
 using namespace aaudio;
 
@@ -67,20 +62,18 @@
 AAudioBinderClient::~AAudioBinderClient() {
     ALOGV("%s - destroying %p", __func__, this);
     Mutex::Autolock _l(mServiceLock);
-    if (mAAudioService != 0) {
-        IInterface::asBinder(mAAudioService)->unlinkToDeath(mAAudioClient);
-    }
 }
 
 // TODO Share code with other service clients.
 // Helper function to get access to the "AAudioService" service.
 // This code was modeled after frameworks/av/media/libaudioclient/AudioSystem.cpp
-const sp<IAAudioService> AAudioBinderClient::getAAudioService() {
+std::shared_ptr<AAudioServiceInterface> AAudioBinderClient::getAAudioService() {
+    std::shared_ptr<AAudioServiceInterface> result;
     sp<IAAudioService> aaudioService;
     bool needToRegister = false;
     {
         Mutex::Autolock _l(mServiceLock);
-        if (mAAudioService.get() == nullptr) {
+        if (mAdapter == nullptr) {
             sp<IBinder> binder;
             sp<IServiceManager> sm = defaultServiceManager();
             // Try several times to get the service.
@@ -99,7 +92,8 @@
                 if (status != NO_ERROR) {
                     ALOGE("%s() - linkToDeath() returned %d", __func__, status);
                 }
-                mAAudioService = interface_cast<IAAudioService>(binder);
+                aaudioService = interface_cast<IAAudioService>(binder);
+                mAdapter.reset(new Adapter(aaudioService, mAAudioClient));
                 needToRegister = true;
                 // Make sure callbacks can be received by mAAudioClient
                 ProcessState::self()->startThreadPool();
@@ -107,18 +101,18 @@
                 ALOGE("AAudioBinderClient could not connect to %s", AAUDIO_SERVICE_NAME);
             }
         }
-        aaudioService = mAAudioService;
+        result = mAdapter;
     }
     // Do this outside the mutex lock.
     if (needToRegister && aaudioService.get() != nullptr) { // new client?
         aaudioService->registerClient(mAAudioClient);
     }
-    return aaudioService;
+    return result;
 }
 
 void AAudioBinderClient::dropAAudioService() {
     Mutex::Autolock _l(mServiceLock);
-    mAAudioService.clear(); // force a reconnect
+    mAdapter.reset();
 }
 
 /**
@@ -127,13 +121,13 @@
 * @return handle to the stream or a negative error
 */
 aaudio_handle_t AAudioBinderClient::openStream(const AAudioStreamRequest &request,
-                                               AAudioStreamConfiguration &configurationOutput) {
+                                               AAudioStreamConfiguration &configuration) {
     aaudio_handle_t stream;
     for (int i = 0; i < 2; i++) {
-        const sp<IAAudioService> &service = getAAudioService();
+        std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
         if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
 
-        stream = service->openStream(request, configurationOutput);
+        stream = service->openStream(request, configuration);
 
         if (stream == AAUDIO_ERROR_NO_SERVICE) {
             ALOGE("openStream lost connection to AAudioService.");
@@ -146,8 +140,9 @@
 }
 
 aaudio_result_t AAudioBinderClient::closeStream(aaudio_handle_t streamHandle) {
-    const sp<IAAudioService> service = getAAudioService();
+    std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
+
     return service->closeStream(streamHandle);
 }
 
@@ -155,33 +150,38 @@
 * used to communicate with the underlying HAL or Service.
 */
 aaudio_result_t AAudioBinderClient::getStreamDescription(aaudio_handle_t streamHandle,
-                                                         AudioEndpointParcelable &parcelable) {
-    const sp<IAAudioService> service = getAAudioService();
+                                                         AudioEndpointParcelable& endpointOut) {
+    std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
-    return service->getStreamDescription(streamHandle, parcelable);
+
+    return service->getStreamDescription(streamHandle, endpointOut);
 }
 
 aaudio_result_t AAudioBinderClient::startStream(aaudio_handle_t streamHandle) {
-    const sp<IAAudioService> service = getAAudioService();
+    std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
+
     return service->startStream(streamHandle);
 }
 
 aaudio_result_t AAudioBinderClient::pauseStream(aaudio_handle_t streamHandle) {
-    const sp<IAAudioService> service = getAAudioService();
+    std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
+
     return service->pauseStream(streamHandle);
 }
 
 aaudio_result_t AAudioBinderClient::stopStream(aaudio_handle_t streamHandle) {
-    const sp<IAAudioService> service = getAAudioService();
+    std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
+
     return service->stopStream(streamHandle);
 }
 
 aaudio_result_t AAudioBinderClient::flushStream(aaudio_handle_t streamHandle) {
-    const sp<IAAudioService> service = getAAudioService();
+    std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
+
     return service->flushStream(streamHandle);
 }
 
@@ -191,17 +191,16 @@
 aaudio_result_t AAudioBinderClient::registerAudioThread(aaudio_handle_t streamHandle,
                                                         pid_t clientThreadId,
                                                         int64_t periodNanoseconds) {
-    const sp<IAAudioService> service = getAAudioService();
+    std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
-    return service->registerAudioThread(streamHandle,
-                                        clientThreadId,
-                                        periodNanoseconds);
+
+    return service->registerAudioThread(streamHandle, clientThreadId, periodNanoseconds);
 }
 
 aaudio_result_t AAudioBinderClient::unregisterAudioThread(aaudio_handle_t streamHandle,
                                                           pid_t clientThreadId) {
-    const sp<IAAudioService> service = getAAudioService();
+    std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
-    return service->unregisterAudioThread(streamHandle,
-                                          clientThreadId);
+
+    return service->unregisterAudioThread(streamHandle, clientThreadId);
 }
diff --git a/media/libaaudio/src/binding/AAudioBinderClient.h b/media/libaaudio/src/binding/AAudioBinderClient.h
index e8c91fc..6a7b639 100644
--- a/media/libaaudio/src/binding/AAudioBinderClient.h
+++ b/media/libaaudio/src/binding/AAudioBinderClient.h
@@ -21,12 +21,15 @@
 #include <utils/Singleton.h>
 
 #include <aaudio/AAudio.h>
-#include "AAudioServiceDefinitions.h"
+#include <binder/IInterface.h>
+
+#include "aaudio/BnAAudioClient.h"
+#include "aaudio/IAAudioService.h"
 #include "AAudioServiceInterface.h"
+#include "binding/AAudioBinderAdapter.h"
 #include "binding/AAudioStreamRequest.h"
-#include "binding/AAudioStreamConfiguration.h"
 #include "binding/AudioEndpointParcelable.h"
-#include "binding/IAAudioService.h"
+#include "core/AAudioStreamParameters.h"
 
 /**
  * Implements the AAudioServiceInterface by talking to the service through Binder.
@@ -44,11 +47,7 @@
 
     virtual ~AAudioBinderClient();
 
-    const android::sp<android::IAAudioService> getAAudioService();
-
-    void dropAAudioService();
-
-    void registerClient(const android::sp<android::IAAudioClient>& client __unused) override {}
+    void registerClient(const android::sp<IAAudioClient>& client __unused) override {}
 
     /**
      * @param request info needed to create the stream
@@ -64,7 +63,7 @@
     * used to communicate with the underlying HAL or Service.
     */
     aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
-                                                 AudioEndpointParcelable &parcelable) override;
+                                         AudioEndpointParcelable &endpointOut) override;
 
     /**
      * Start the flow of data.
@@ -115,8 +114,7 @@
         ALOGW("onStreamChange called!");
     }
 
-    class AAudioClient : public android::IBinder::DeathRecipient , public android::BnAAudioClient
-    {
+    class AAudioClient : public android::IBinder::DeathRecipient, public BnAAudioClient {
     public:
         AAudioClient(android::wp<AAudioBinderClient> aaudioBinderClient)
                 : mBinderClient(aaudioBinderClient) {
@@ -132,21 +130,66 @@
         }
 
         // implement BnAAudioClient
-        void onStreamChange(aaudio_handle_t handle, int32_t opcode, int32_t value) {
+        android::binder::Status onStreamChange(int32_t handle, int32_t opcode, int32_t value) {
+            static_assert(std::is_same_v<aaudio_handle_t, int32_t>);
             android::sp<AAudioBinderClient> client = mBinderClient.promote();
             if (client.get() != nullptr) {
                 client->onStreamChange(handle, opcode, value);
             }
+            return android::binder::Status::ok();
         }
     private:
         android::wp<AAudioBinderClient> mBinderClient;
     };
 
-private:
+    // This adapter is used to convert the binder interface (delegate) to the AudioServiceInterface
+    // conventions (translating between data types and respective parcelables, translating error
+    // codes and calling conventions).
+    // The adapter also owns the underlying service object and is responsible to unlink its death
+    // listener when destroyed.
+    class Adapter : public AAudioBinderAdapter {
+    public:
+        Adapter(const android::sp<IAAudioService>& delegate,
+                const android::sp<AAudioClient>& aaudioClient)
+                : AAudioBinderAdapter(delegate.get()),
+                  mDelegate(delegate),
+                  mAAudioClient(aaudioClient) {}
 
-    android::Mutex                  mServiceLock;
-    android::sp<android::IAAudioService>  mAAudioService;
-    android::sp<AAudioClient>       mAAudioClient;
+        virtual ~Adapter() {
+            if (mDelegate != nullptr) {
+                android::IInterface::asBinder(mDelegate)->unlinkToDeath(mAAudioClient);
+            }
+        }
+
+        // This should never be called (call is rejected at the AudioBinderClient level).
+        aaudio_result_t startClient(aaudio_handle_t streamHandle __unused,
+                                    const android::AudioClient& client __unused,
+                                    const audio_attributes_t* attr __unused,
+                                    audio_port_handle_t* clientHandle __unused) override {
+            LOG_ALWAYS_FATAL("Shouldn't get here");
+            return AAUDIO_ERROR_UNAVAILABLE;
+        }
+
+        // This should never be called (call is rejected at the AudioBinderClient level).
+        aaudio_result_t stopClient(aaudio_handle_t streamHandle __unused,
+                                   audio_port_handle_t clientHandle __unused) override {
+            LOG_ALWAYS_FATAL("Shouldn't get here");
+            return AAUDIO_ERROR_UNAVAILABLE;
+        }
+
+    private:
+        android::sp<IAAudioService> mDelegate;
+        android::sp<AAudioClient> mAAudioClient;
+    };
+
+private:
+    android::Mutex                          mServiceLock;
+    std::shared_ptr<AAudioServiceInterface> mAdapter;
+    android::sp<AAudioClient>               mAAudioClient;
+
+    std::shared_ptr<AAudioServiceInterface> getAAudioService();
+
+    void dropAAudioService();
 
 };
 
diff --git a/media/libaaudio/src/binding/AAudioServiceInterface.h b/media/libaaudio/src/binding/AAudioServiceInterface.h
index 9c28cc7..5d11512 100644
--- a/media/libaaudio/src/binding/AAudioServiceInterface.h
+++ b/media/libaaudio/src/binding/AAudioServiceInterface.h
@@ -20,11 +20,11 @@
 #include <utils/StrongPointer.h>
 #include <media/AudioClient.h>
 
+#include "aaudio/IAAudioClient.h"
 #include "binding/AAudioServiceDefinitions.h"
 #include "binding/AAudioStreamRequest.h"
 #include "binding/AAudioStreamConfiguration.h"
 #include "binding/AudioEndpointParcelable.h"
-#include "binding/IAAudioClient.h"
 
 /**
  * This has the same methods as IAAudioService but without the Binder features.
@@ -40,7 +40,7 @@
     AAudioServiceInterface() {};
     virtual ~AAudioServiceInterface() = default;
 
-    virtual void registerClient(const android::sp<android::IAAudioClient>& client) = 0;
+    virtual void registerClient(const android::sp<IAAudioClient>& client) = 0;
 
     /**
      * @param request info needed to create the stream
diff --git a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
index b785f88..2d501ef 100644
--- a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
@@ -23,101 +23,66 @@
 #include <sys/mman.h>
 #include <aaudio/AAudio.h>
 
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
-
 #include "binding/AAudioStreamConfiguration.h"
 
-using android::NO_ERROR;
-using android::status_t;
-using android::Parcel;
-using android::Parcelable;
-
 using namespace aaudio;
 
-AAudioStreamConfiguration::AAudioStreamConfiguration() {}
-AAudioStreamConfiguration::~AAudioStreamConfiguration() {}
+using android::media::audio::common::AudioFormat;
 
-status_t AAudioStreamConfiguration::writeToParcel(Parcel* parcel) const {
-    status_t status;
-
-    status = parcel->writeInt32(getDeviceId());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32(getSampleRate());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32(getSamplesPerFrame());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32((int32_t) getSharingMode());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32((int32_t) getFormat());
-    if (status != NO_ERROR) goto error;
-
-    status = parcel->writeInt32((int32_t) getDirection());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32(getBufferCapacity());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32((int32_t) getUsage());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32((int32_t) getContentType());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32((int32_t) getInputPreset());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32((int32_t) getAllowedCapturePolicy());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32(getSessionId());
-    if (status != NO_ERROR) goto error;
-    status = parcel->writeInt32(isPrivacySensitive() ? 1 : 0);
-    if (status != NO_ERROR) goto error;
-    return NO_ERROR;
-error:
-    ALOGE("%s(): write failed = %d", __func__, status);
-    return status;
+AAudioStreamConfiguration::AAudioStreamConfiguration(const StreamParameters& parcelable) {
+    setSamplesPerFrame(parcelable.samplesPerFrame);
+    setSampleRate(parcelable.sampleRate);
+    setDeviceId(parcelable.deviceId);
+    static_assert(sizeof(aaudio_sharing_mode_t) == sizeof(parcelable.sharingMode));
+    setSharingMode(parcelable.sharingMode);
+    static_assert(sizeof(audio_format_t) == sizeof(parcelable.audioFormat));
+    setFormat(static_cast<audio_format_t>(parcelable.audioFormat));
+    static_assert(sizeof(aaudio_direction_t) == sizeof(parcelable.direction));
+    setDirection(parcelable.direction);
+    static_assert(sizeof(audio_usage_t) == sizeof(parcelable.usage));
+    setUsage(parcelable.usage);
+    static_assert(sizeof(aaudio_content_type_t) == sizeof(parcelable.contentType));
+    setContentType(parcelable.contentType);
+    static_assert(sizeof(aaudio_input_preset_t) == sizeof(parcelable.inputPreset));
+    setInputPreset(parcelable.inputPreset);
+    setBufferCapacity(parcelable.bufferCapacity);
+    static_assert(
+            sizeof(aaudio_allowed_capture_policy_t) == sizeof(parcelable.allowedCapturePolicy));
+    setAllowedCapturePolicy(parcelable.allowedCapturePolicy);
+    static_assert(sizeof(aaudio_session_id_t) == sizeof(parcelable.sessionId));
+    setSessionId(parcelable.sessionId);
+    setPrivacySensitive(parcelable.isPrivacySensitive);
 }
 
-status_t AAudioStreamConfiguration::readFromParcel(const Parcel* parcel) {
-    int32_t value;
-    status_t status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setDeviceId(value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setSampleRate(value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setSamplesPerFrame(value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setSharingMode((aaudio_sharing_mode_t) value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setFormat((audio_format_t) value);
+AAudioStreamConfiguration&
+AAudioStreamConfiguration::operator=(const StreamParameters& parcelable) {
+    this->~AAudioStreamConfiguration();
+    new (this) AAudioStreamConfiguration(parcelable);
+    return *this;
+}
 
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setDirection((aaudio_direction_t) value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setBufferCapacity(value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setUsage((aaudio_usage_t) value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setContentType((aaudio_content_type_t) value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setInputPreset((aaudio_input_preset_t) value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setAllowedCapturePolicy((aaudio_allowed_capture_policy_t) value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setSessionId(value);
-    status = parcel->readInt32(&value);
-    if (status != NO_ERROR) goto error;
-    setPrivacySensitive(value == 1);
-    return NO_ERROR;
-error:
-    ALOGE("%s(): read failed = %d", __func__, status);
-    return status;
+StreamParameters AAudioStreamConfiguration::parcelable() const {
+    StreamParameters result;
+    result.samplesPerFrame = getSamplesPerFrame();
+    result.sampleRate = getSampleRate();
+    result.deviceId = getDeviceId();
+    static_assert(sizeof(aaudio_sharing_mode_t) == sizeof(result.sharingMode));
+    result.sharingMode = getSharingMode();
+    static_assert(sizeof(audio_format_t) == sizeof(result.audioFormat));
+    result.audioFormat = static_cast<AudioFormat>(getFormat());
+    static_assert(sizeof(aaudio_direction_t) == sizeof(result.direction));
+    result.direction = getDirection();
+    static_assert(sizeof(audio_usage_t) == sizeof(result.usage));
+    result.usage = getUsage();
+    static_assert(sizeof(aaudio_content_type_t) == sizeof(result.contentType));
+    result.contentType = getContentType();
+    static_assert(sizeof(aaudio_input_preset_t) == sizeof(result.inputPreset));
+    result.inputPreset = getInputPreset();
+    result.bufferCapacity = getBufferCapacity();
+    static_assert(sizeof(aaudio_allowed_capture_policy_t) == sizeof(result.allowedCapturePolicy));
+    result.allowedCapturePolicy = getAllowedCapturePolicy();
+    static_assert(sizeof(aaudio_session_id_t) == sizeof(result.sessionId));
+    result.sessionId = getSessionId();
+    result.isPrivacySensitive = isPrivacySensitive();
+    return result;
 }
diff --git a/media/libaaudio/src/binding/AAudioStreamConfiguration.h b/media/libaaudio/src/binding/AAudioStreamConfiguration.h
index b324896..f428eb0 100644
--- a/media/libaaudio/src/binding/AAudioStreamConfiguration.h
+++ b/media/libaaudio/src/binding/AAudioStreamConfiguration.h
@@ -20,24 +20,24 @@
 #include <stdint.h>
 
 #include <aaudio/AAudio.h>
+#include <aaudio/StreamParameters.h>
 #include <binder/Parcel.h>
 #include <binder/Parcelable.h>
 #include "core/AAudioStreamParameters.h"
 
-using android::status_t;
-using android::Parcel;
-using android::Parcelable;
-
 namespace aaudio {
 
-class AAudioStreamConfiguration : public AAudioStreamParameters, public Parcelable {
+// This is a holder for AAudioStreamParameters, which allows conversion to/from it parcelable
+// representation, StreamParameters.
+class AAudioStreamConfiguration : public AAudioStreamParameters {
 public:
-    AAudioStreamConfiguration();
-    virtual ~AAudioStreamConfiguration();
+    AAudioStreamConfiguration() = default;
 
-    virtual status_t writeToParcel(Parcel* parcel) const override;
+    explicit AAudioStreamConfiguration(const StreamParameters& parcelable);
 
-    virtual status_t readFromParcel(const Parcel* parcel) override;
+    AAudioStreamConfiguration& operator=(const StreamParameters& parcelable);
+
+    StreamParameters parcelable() const;
 };
 
 } /* namespace aaudio */
diff --git a/media/libaaudio/src/binding/AAudioStreamRequest.cpp b/media/libaaudio/src/binding/AAudioStreamRequest.cpp
index c30c5b9..8d90034 100644
--- a/media/libaaudio/src/binding/AAudioStreamRequest.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamRequest.cpp
@@ -21,67 +21,28 @@
 #include <stdint.h>
 
 #include <sys/mman.h>
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
 
 #include <aaudio/AAudio.h>
 
 #include "binding/AAudioStreamConfiguration.h"
 #include "binding/AAudioStreamRequest.h"
 
-using android::NO_ERROR;
-using android::status_t;
-using android::Parcel;
-using android::Parcelable;
-
 using namespace aaudio;
 
-AAudioStreamRequest::AAudioStreamRequest()
-    : mConfiguration()
-    {}
-
-AAudioStreamRequest::~AAudioStreamRequest() {}
-
-status_t AAudioStreamRequest::writeToParcel(Parcel* parcel) const {
-    status_t status = parcel->writeInt32((int32_t) mUserId);
-    if (status != NO_ERROR) goto error;
-
-    status = parcel->writeBool(mSharingModeMatchRequired);
-    if (status != NO_ERROR) goto error;
-
-    status = parcel->writeBool(mInService);
-    if (status != NO_ERROR) goto error;
-
-    status = mConfiguration.writeToParcel(parcel);
-    if (status != NO_ERROR) goto error;
-
-    return NO_ERROR;
-
-error:
-    ALOGE("writeToParcel(): write failed = %d", status);
-    return status;
+AAudioStreamRequest::AAudioStreamRequest(const StreamRequest& parcelable) :
+        mConfiguration(std::move(parcelable.params)),
+        mAttributionSource(parcelable.attributionSource),
+        mSharingModeMatchRequired(parcelable.sharingModeMatchRequired),
+        mInService(parcelable.inService) {
 }
 
-status_t AAudioStreamRequest::readFromParcel(const Parcel* parcel) {
-    int32_t temp;
-    status_t status = parcel->readInt32(&temp);
-    if (status != NO_ERROR) goto error;
-    mUserId = (uid_t) temp;
-
-    status = parcel->readBool(&mSharingModeMatchRequired);
-    if (status != NO_ERROR) goto error;
-
-    status = parcel->readBool(&mInService);
-    if (status != NO_ERROR) goto error;
-
-    status = mConfiguration.readFromParcel(parcel);
-    if (status != NO_ERROR) goto error;
-
-    return NO_ERROR;
-
-error:
-    ALOGE("readFromParcel(): read failed = %d", status);
-    return status;
+StreamRequest AAudioStreamRequest::parcelable() const {
+    StreamRequest result;
+    result.params = std::move(mConfiguration).parcelable();
+    result.attributionSource = mAttributionSource;
+    result.sharingModeMatchRequired = mSharingModeMatchRequired;
+    result.inService = mInService;
+    return result;
 }
 
 aaudio_result_t AAudioStreamRequest::validate() const {
@@ -89,8 +50,7 @@
 }
 
 void AAudioStreamRequest::dump() const {
-    ALOGD("mUserId    = %d", mUserId);
-    ALOGD("mProcessId = %d", mProcessId);
+    ALOGD("mAttributionSource  = %s", mAttributionSource.toString().c_str());
     ALOGD("mSharingModeMatchRequired = %d", mSharingModeMatchRequired);
     ALOGD("mInService = %d", mInService);
     mConfiguration.dump();
diff --git a/media/libaaudio/src/binding/AAudioStreamRequest.h b/media/libaaudio/src/binding/AAudioStreamRequest.h
index 492f69d..cc43a48 100644
--- a/media/libaaudio/src/binding/AAudioStreamRequest.h
+++ b/media/libaaudio/src/binding/AAudioStreamRequest.h
@@ -20,36 +20,28 @@
 #include <stdint.h>
 
 #include <aaudio/AAudio.h>
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
+#include <aaudio/StreamRequest.h>
 
 #include "binding/AAudioStreamConfiguration.h"
-
-using android::status_t;
-using android::Parcel;
-using android::Parcelable;
+#include <android/content/AttributionSourceState.h>
 
 namespace aaudio {
 
-class AAudioStreamRequest : public Parcelable {
+using android::content::AttributionSourceState;
+
+class AAudioStreamRequest {
 public:
-    AAudioStreamRequest();
-    virtual ~AAudioStreamRequest();
+    AAudioStreamRequest() = default;
 
-    uid_t getUserId() const {
-        return mUserId;
+    // Construct based on a parcelable representation.
+    explicit AAudioStreamRequest(const StreamRequest& parcelable);
+
+    const AttributionSourceState &getAttributionSource() const {
+        return mAttributionSource;
     }
 
-    void setUserId(uid_t userId) {
-        mUserId = userId;
-    }
-
-    pid_t getProcessId() const {
-        return mProcessId;
-    }
-
-    void setProcessId(pid_t processId) {
-        mProcessId = processId;
+    void setAttributionSource(const AttributionSourceState &attributionSource) {
+        mAttributionSource = attributionSource;
     }
 
     bool isSharingModeMatchRequired() const {
@@ -76,18 +68,16 @@
         mInService = inService;
     }
 
-    virtual status_t writeToParcel(Parcel* parcel) const override;
-
-    virtual status_t readFromParcel(const Parcel* parcel) override;
-
     aaudio_result_t validate() const;
 
     void dump() const;
 
-protected:
+    // Extract a parcelable representation of this object.
+    StreamRequest parcelable() const;
+
+private:
     AAudioStreamConfiguration  mConfiguration;
-    uid_t                      mUserId = (uid_t) -1;
-    pid_t                      mProcessId = (pid_t) -1;
+    AttributionSourceState mAttributionSource;
     bool                       mSharingModeMatchRequired = false;
     bool                       mInService = false; // Stream opened by AAudioservice
 };
diff --git a/media/libaaudio/src/binding/AudioEndpointParcelable.cpp b/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
index 61d7d27..aa4ac27 100644
--- a/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
+++ b/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
@@ -29,22 +29,43 @@
 #include "binding/AudioEndpointParcelable.h"
 
 using android::base::unique_fd;
+using android::media::SharedFileRegion;
 using android::NO_ERROR;
 using android::status_t;
-using android::Parcel;
-using android::Parcelable;
 
 using namespace aaudio;
 
-/**
- * Container for information about the message queues plus
- * general stream information needed by AAudio clients.
- * It contains no addresses, just sizes, offsets and file descriptors for
- * shared memory that can be passed through Binder.
- */
-AudioEndpointParcelable::AudioEndpointParcelable() {}
+AudioEndpointParcelable::AudioEndpointParcelable(Endpoint&& parcelable)
+        : mUpMessageQueueParcelable(std::move(parcelable.upMessageQueueParcelable)),
+          mDownMessageQueueParcelable(std::move(parcelable.downMessageQueueParcelable)),
+          mUpDataQueueParcelable(std::move(parcelable.upDataQueueParcelable)),
+          mDownDataQueueParcelable(std::move(parcelable.downDataQueueParcelable)),
+          mNumSharedMemories(parcelable.sharedMemories.size()) {
+    for (size_t i = 0; i < parcelable.sharedMemories.size() && i < MAX_SHARED_MEMORIES; ++i) {
+        // Re-construct.
+        mSharedMemories[i].~SharedMemoryParcelable();
+        new(&mSharedMemories[i]) SharedMemoryParcelable(std::move(parcelable.sharedMemories[i]));
+    }
+}
 
-AudioEndpointParcelable::~AudioEndpointParcelable() {}
+AudioEndpointParcelable& AudioEndpointParcelable::operator=(Endpoint&& parcelable) {
+    this->~AudioEndpointParcelable();
+    new(this) AudioEndpointParcelable(std::move(parcelable));
+    return *this;
+}
+
+Endpoint AudioEndpointParcelable::parcelable()&& {
+    Endpoint result;
+    result.upMessageQueueParcelable = std::move(mUpMessageQueueParcelable).parcelable();
+    result.downMessageQueueParcelable = std::move(mDownMessageQueueParcelable).parcelable();
+    result.upDataQueueParcelable = std::move(mUpDataQueueParcelable).parcelable();
+    result.downDataQueueParcelable = std::move(mDownDataQueueParcelable).parcelable();
+    result.sharedMemories.reserve(std::min(mNumSharedMemories, MAX_SHARED_MEMORIES));
+    for (size_t i = 0; i < mNumSharedMemories && i < MAX_SHARED_MEMORIES; ++i) {
+        result.sharedMemories.emplace_back(std::move(mSharedMemories[i]).parcelable());
+    }
+    return result;
+}
 
 /**
  * Add the file descriptor to the table.
@@ -60,60 +81,6 @@
     return index;
 }
 
-/**
- * The read and write must be symmetric.
- */
-status_t AudioEndpointParcelable::writeToParcel(Parcel* parcel) const {
-    status_t status = AAudioConvert_aaudioToAndroidStatus(validate());
-    if (status != NO_ERROR) goto error;
-
-    status = parcel->writeInt32(mNumSharedMemories);
-    if (status != NO_ERROR) goto error;
-
-    for (int i = 0; i < mNumSharedMemories; i++) {
-        status = mSharedMemories[i].writeToParcel(parcel);
-        if (status != NO_ERROR) goto error;
-    }
-    status = mUpMessageQueueParcelable.writeToParcel(parcel);
-    if (status != NO_ERROR) goto error;
-    status = mDownMessageQueueParcelable.writeToParcel(parcel);
-    if (status != NO_ERROR) goto error;
-    status = mUpDataQueueParcelable.writeToParcel(parcel);
-    if (status != NO_ERROR) goto error;
-    status = mDownDataQueueParcelable.writeToParcel(parcel);
-    if (status != NO_ERROR) goto error;
-
-    return NO_ERROR;
-
-error:
-    ALOGE("%s returning %d", __func__, status);
-    return status;
-}
-
-status_t AudioEndpointParcelable::readFromParcel(const Parcel* parcel) {
-    status_t status = parcel->readInt32(&mNumSharedMemories);
-    if (status != NO_ERROR) goto error;
-
-    for (int i = 0; i < mNumSharedMemories; i++) {
-        mSharedMemories[i].readFromParcel(parcel);
-        if (status != NO_ERROR) goto error;
-    }
-    status = mUpMessageQueueParcelable.readFromParcel(parcel);
-    if (status != NO_ERROR) goto error;
-    status = mDownMessageQueueParcelable.readFromParcel(parcel);
-    if (status != NO_ERROR) goto error;
-    status = mUpDataQueueParcelable.readFromParcel(parcel);
-    if (status != NO_ERROR) goto error;
-    status = mDownDataQueueParcelable.readFromParcel(parcel);
-    if (status != NO_ERROR) goto error;
-
-    return AAudioConvert_aaudioToAndroidStatus(validate());
-
-error:
-    ALOGE("%s returning %d", __func__, status);
-    return status;
-}
-
 aaudio_result_t AudioEndpointParcelable::resolve(EndpointDescriptor *descriptor) {
     aaudio_result_t result = mUpMessageQueueParcelable.resolve(mSharedMemories,
                                                            &descriptor->upMessageQueueDescriptor);
diff --git a/media/libaaudio/src/binding/AudioEndpointParcelable.h b/media/libaaudio/src/binding/AudioEndpointParcelable.h
index e4f8b9e..5237a1a 100644
--- a/media/libaaudio/src/binding/AudioEndpointParcelable.h
+++ b/media/libaaudio/src/binding/AudioEndpointParcelable.h
@@ -20,16 +20,13 @@
 #include <stdint.h>
 
 //#include <sys/mman.h>
+#include <aaudio/Endpoint.h>
 #include <android-base/unique_fd.h>
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
 
 #include "binding/AAudioServiceDefinitions.h"
 #include "binding/RingBufferParcelable.h"
 
 using android::status_t;
-using android::Parcel;
-using android::Parcelable;
 
 namespace aaudio {
 
@@ -39,10 +36,15 @@
  * It contains no addresses, just sizes, offsets and file descriptors for
  * shared memory that can be passed through Binder.
  */
-class AudioEndpointParcelable : public Parcelable {
+class AudioEndpointParcelable {
 public:
-    AudioEndpointParcelable();
-    virtual ~AudioEndpointParcelable();
+    AudioEndpointParcelable() = default;
+
+    // Ctor/assignment from a parcelable representation.
+    // Since the parcelable object owns unique FDs (for shared memory blocks), move semantics are
+    // provided to avoid the need to dupe.
+    AudioEndpointParcelable(Endpoint&& parcelable);
+    AudioEndpointParcelable& operator=(Endpoint&& parcelable);
 
     /**
      * Add the file descriptor to the table.
@@ -50,16 +52,17 @@
      */
     int32_t addFileDescriptor(const android::base::unique_fd& fd, int32_t sizeInBytes);
 
-    virtual status_t writeToParcel(Parcel* parcel) const override;
-
-    virtual status_t readFromParcel(const Parcel* parcel) override;
-
     aaudio_result_t resolve(EndpointDescriptor *descriptor);
 
     aaudio_result_t close();
 
     void dump();
 
+    // Extract a parcelable representation of this object.
+    // Since our shared memory objects own a unique FD, move semantics are provided to avoid the
+    // need to dupe.
+    Endpoint parcelable()&&;
+
 public: // TODO add getters
     // Set capacityInFrames to zero if Queue is unused.
     RingBufferParcelable    mUpMessageQueueParcelable;   // server to client
diff --git a/media/libaaudio/src/binding/IAAudioClient.cpp b/media/libaaudio/src/binding/IAAudioClient.cpp
deleted file mode 100644
index c69c4e8..0000000
--- a/media/libaaudio/src/binding/IAAudioClient.cpp
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "AAudio"
-//#define LOG_NDEBUG 0
-#include <utils/Log.h>
-
-#include <aaudio/AAudio.h>
-
-#include "binding/AAudioBinderClient.h"
-#include "binding/AAudioServiceDefinitions.h"
-#include "binding/IAAudioClient.h"
-#include "utility/AAudioUtilities.h"
-
-namespace android {
-
-using aaudio::aaudio_handle_t;
-
-/**
- * This is used by the AAudio Service to talk to an AAudio Client.
- *
- * The order of parameters in the Parcels must match with code in AAudioClient.cpp.
- */
-class BpAAudioClient : public BpInterface<IAAudioClient>
-{
-public:
-    explicit BpAAudioClient(const sp<IBinder>& impl)
-        : BpInterface<IAAudioClient>(impl)
-    {
-    }
-
-    void onStreamChange(aaudio_handle_t handle, int32_t opcode, int32_t value) override {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAAudioClient::getInterfaceDescriptor());
-        data.writeInt32(handle);
-        data.writeInt32(opcode);
-        data.writeInt32(value);
-        remote()->transact(ON_STREAM_CHANGE, data,  &reply, IBinder::FLAG_ONEWAY);
-    }
-
-};
-
-// Implement an interface to the service.
-IMPLEMENT_META_INTERFACE(AAudioClient, "IAAudioClient");
-
-// The order of parameters in the Parcels must match with code in BpAAudioClient
-
-status_t BnAAudioClient::onTransact(uint32_t code, const Parcel& data,
-                                        Parcel* reply, uint32_t flags) {
-    aaudio_handle_t streamHandle;
-    int32_t opcode = 0;
-    int32_t value = 0;
-    ALOGV("BnAAudioClient::onTransact(%u) %u", code, flags);
-
-    switch(code) {
-        case ON_STREAM_CHANGE: {
-            CHECK_INTERFACE(IAAudioClient, data, reply);
-            data.readInt32(&streamHandle);
-            data.readInt32(&opcode);
-            data.readInt32(&value);
-            onStreamChange(streamHandle, opcode, value);
-            ALOGD("BnAAudioClient onStreamChange(%x, %d, %d)", streamHandle, opcode, value);
-            return NO_ERROR;
-        } break;
-
-        default:
-            // ALOGW("BnAAudioClient::onTransact not handled %u", code);
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-} /* namespace android */
diff --git a/media/libaaudio/src/binding/IAAudioClient.h b/media/libaaudio/src/binding/IAAudioClient.h
deleted file mode 100644
index f21fd93..0000000
--- a/media/libaaudio/src/binding/IAAudioClient.h
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_AAUDIO_IAAUDIO_CLIENT_H
-#define ANDROID_AAUDIO_IAAUDIO_CLIENT_H
-
-#include <stdint.h>
-#include <binder/IInterface.h>
-
-#include <aaudio/AAudio.h>
-
-#include "binding/AAudioCommon.h"
-
-namespace android {
-
-
-// Interface (our AIDL) - client methods called by service
-class IAAudioClient : public IInterface {
-public:
-
-    DECLARE_META_INTERFACE(AAudioClient);
-
-    virtual void onStreamChange(aaudio::aaudio_handle_t handle, int32_t opcode, int32_t value) = 0;
-
-};
-
-class BnAAudioClient : public BnInterface<IAAudioClient> {
-public:
-    virtual status_t onTransact(uint32_t code, const Parcel& data,
-                                Parcel* reply, uint32_t flags = 0);
-};
-
-} /* namespace android */
-
-#endif //ANDROID_AAUDIO_IAAUDIO_SERVICE_H
diff --git a/media/libaaudio/src/binding/IAAudioService.cpp b/media/libaaudio/src/binding/IAAudioService.cpp
deleted file mode 100644
index e017b3a..0000000
--- a/media/libaaudio/src/binding/IAAudioService.cpp
+++ /dev/null
@@ -1,424 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "AAudio"
-//#define LOG_NDEBUG 0
-#include <utils/Log.h>
-
-#include <aaudio/AAudio.h>
-#include <binder/IPCThreadState.h>
-
-#include "binding/AudioEndpointParcelable.h"
-#include "binding/AAudioStreamRequest.h"
-#include "binding/AAudioServiceDefinitions.h"
-#include "binding/AAudioStreamConfiguration.h"
-#include "binding/IAAudioService.h"
-#include "utility/AAudioUtilities.h"
-
-namespace android {
-
-using aaudio::aaudio_handle_t;
-
-/**
- * This is used by the AAudio Client to talk to the AAudio Service.
- *
- * The order of parameters in the Parcels must match with code in AAudioService.cpp.
- */
-class BpAAudioService : public BpInterface<IAAudioService>
-{
-public:
-    explicit BpAAudioService(const sp<IBinder>& impl)
-        : BpInterface<IAAudioService>(impl)
-    {
-    }
-
-    void registerClient(const sp<IAAudioClient>& client) override
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        data.writeStrongBinder(IInterface::asBinder(client));
-        remote()->transact(REGISTER_CLIENT, data, &reply);
-    }
-
-    aaudio_handle_t openStream(const aaudio::AAudioStreamRequest &request,
-                               aaudio::AAudioStreamConfiguration &configurationOutput) override {
-        Parcel data, reply;
-        // send command
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        // request.dump();
-        request.writeToParcel(&data);
-        status_t err = remote()->transact(OPEN_STREAM, data, &reply);
-        if (err != NO_ERROR) {
-            ALOGE("BpAAudioService::client openStream transact failed %d", err);
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        // parse reply
-        aaudio_handle_t stream;
-        err = reply.readInt32(&stream);
-        if (err != NO_ERROR) {
-            ALOGE("BpAAudioService::client transact(OPEN_STREAM) readInt %d", err);
-            return AAudioConvert_androidToAAudioResult(err);
-        } else if (stream < 0) {
-            return stream;
-        }
-        err = configurationOutput.readFromParcel(&reply);
-        if (err != NO_ERROR) {
-            ALOGE("BpAAudioService::client openStream readFromParcel failed %d", err);
-            closeStream(stream);
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        return stream;
-    }
-
-    virtual aaudio_result_t closeStream(aaudio_handle_t streamHandle) override {
-        Parcel data, reply;
-        // send command
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        data.writeInt32(streamHandle);
-        status_t err = remote()->transact(CLOSE_STREAM, data, &reply);
-        if (err != NO_ERROR) {
-            ALOGE("BpAAudioService::client closeStream transact failed %d", err);
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        // parse reply
-        aaudio_result_t res;
-        reply.readInt32(&res);
-        return res;
-    }
-
-    virtual aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
-                                               aaudio::AudioEndpointParcelable &parcelable)   {
-        Parcel data, reply;
-        // send command
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        data.writeInt32(streamHandle);
-        status_t err = remote()->transact(GET_STREAM_DESCRIPTION, data, &reply);
-        if (err != NO_ERROR) {
-            ALOGE("BpAAudioService::client transact(GET_STREAM_DESCRIPTION) returns %d", err);
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        // parse reply
-        aaudio_result_t result;
-        err = reply.readInt32(&result);
-        if (err != NO_ERROR) {
-            ALOGE("BpAAudioService::client transact(GET_STREAM_DESCRIPTION) readInt %d", err);
-            return AAudioConvert_androidToAAudioResult(err);
-        } else if (result != AAUDIO_OK) {
-            ALOGE("BpAAudioService::client GET_STREAM_DESCRIPTION passed result %d", result);
-            return result;
-        }
-        err = parcelable.readFromParcel(&reply);
-        if (err != NO_ERROR) {
-            ALOGE("BpAAudioService::client transact(GET_STREAM_DESCRIPTION) read endpoint %d", err);
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        return result;
-    }
-
-    // TODO should we wait for a reply?
-    virtual aaudio_result_t startStream(aaudio_handle_t streamHandle) override {
-        Parcel data, reply;
-        // send command
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        data.writeInt32(streamHandle);
-        status_t err = remote()->transact(START_STREAM, data, &reply);
-        if (err != NO_ERROR) {
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        // parse reply
-        aaudio_result_t res;
-        reply.readInt32(&res);
-        return res;
-    }
-
-    virtual aaudio_result_t pauseStream(aaudio_handle_t streamHandle) override {
-        Parcel data, reply;
-        // send command
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        data.writeInt32(streamHandle);
-        status_t err = remote()->transact(PAUSE_STREAM, data, &reply);
-        if (err != NO_ERROR) {
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        // parse reply
-        aaudio_result_t res;
-        reply.readInt32(&res);
-        return res;
-    }
-
-    virtual aaudio_result_t stopStream(aaudio_handle_t streamHandle) override {
-        Parcel data, reply;
-        // send command
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        data.writeInt32(streamHandle);
-        status_t err = remote()->transact(STOP_STREAM, data, &reply);
-        if (err != NO_ERROR) {
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        // parse reply
-        aaudio_result_t res;
-        reply.readInt32(&res);
-        return res;
-    }
-
-    virtual aaudio_result_t flushStream(aaudio_handle_t streamHandle) override {
-        Parcel data, reply;
-        // send command
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        data.writeInt32(streamHandle);
-        status_t err = remote()->transact(FLUSH_STREAM, data, &reply);
-        if (err != NO_ERROR) {
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        // parse reply
-        aaudio_result_t res;
-        reply.readInt32(&res);
-        return res;
-    }
-
-    virtual aaudio_result_t registerAudioThread(aaudio_handle_t streamHandle,
-                                                pid_t clientThreadId,
-                                                int64_t periodNanoseconds)
-    override {
-        Parcel data, reply;
-        // send command
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        data.writeInt32(streamHandle);
-        data.writeInt32((int32_t) clientThreadId);
-        data.writeInt64(periodNanoseconds);
-        status_t err = remote()->transact(REGISTER_AUDIO_THREAD, data, &reply);
-        if (err != NO_ERROR) {
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        // parse reply
-        aaudio_result_t res;
-        reply.readInt32(&res);
-        return res;
-    }
-
-    virtual aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
-                                                  pid_t clientThreadId)
-    override {
-        Parcel data, reply;
-        // send command
-        data.writeInterfaceToken(IAAudioService::getInterfaceDescriptor());
-        data.writeInt32(streamHandle);
-        data.writeInt32((int32_t) clientThreadId);
-        status_t err = remote()->transact(UNREGISTER_AUDIO_THREAD, data, &reply);
-        if (err != NO_ERROR) {
-            return AAudioConvert_androidToAAudioResult(err);
-        }
-        // parse reply
-        aaudio_result_t res;
-        reply.readInt32(&res);
-        return res;
-    }
-
-};
-
-// Implement an interface to the service.
-// This is here so that you don't have to link with libaaudio static library.
-IMPLEMENT_META_INTERFACE(AAudioService, "IAAudioService");
-
-// The order of parameters in the Parcels must match with code in BpAAudioService
-
-status_t BnAAudioService::onTransact(uint32_t code, const Parcel& data,
-                                        Parcel* reply, uint32_t flags) {
-    aaudio_handle_t streamHandle = 0;
-    aaudio::AAudioStreamRequest request;
-    aaudio::AAudioStreamConfiguration configuration;
-    pid_t tid = 0;
-    int64_t nanoseconds = 0;
-    aaudio_result_t result = AAUDIO_OK;
-    status_t status = NO_ERROR;
-    ALOGV("BnAAudioService::onTransact(%i) %i", code, flags);
-
-    switch(code) {
-        case REGISTER_CLIENT: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            sp<IAAudioClient> client = interface_cast<IAAudioClient>(
-                    data.readStrongBinder());
-            // readStrongBinder() can return null
-            if (client.get() == nullptr) {
-                ALOGE("BnAAudioService::%s(REGISTER_CLIENT) client is NULL!", __func__);
-                android_errorWriteLog(0x534e4554, "116230453");
-                return DEAD_OBJECT;
-            } else {
-                registerClient(client);
-                return NO_ERROR;
-            }
-        } break;
-
-        case OPEN_STREAM: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            request.readFromParcel(&data);
-            result = request.validate();
-            if (result != AAUDIO_OK) {
-                streamHandle = result;
-            } else {
-                //ALOGD("BnAAudioService::client openStream request dump --------------------");
-                //request.dump();
-                // Override the uid and pid from the client in case they are incorrect.
-                request.setUserId(IPCThreadState::self()->getCallingUid());
-                request.setProcessId(IPCThreadState::self()->getCallingPid());
-                streamHandle = openStream(request, configuration);
-                //ALOGD("BnAAudioService::onTransact OPEN_STREAM server handle = 0x%08X",
-                //        streamHandle);
-            }
-            reply->writeInt32(streamHandle);
-            configuration.writeToParcel(reply);
-            return NO_ERROR;
-        } break;
-
-        case CLOSE_STREAM: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            status = data.readInt32(&streamHandle);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(CLOSE_STREAM) streamHandle failed!", __func__);
-                return status;
-            }
-            result = closeStream(streamHandle);
-            //ALOGD("BnAAudioService::onTransact CLOSE_STREAM 0x%08X, result = %d",
-            //      streamHandle, result);
-            reply->writeInt32(result);
-            return NO_ERROR;
-        } break;
-
-        case GET_STREAM_DESCRIPTION: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            status = data.readInt32(&streamHandle);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(GET_STREAM_DESCRIPTION) streamHandle failed!", __func__);
-                return status;
-            }
-            aaudio::AudioEndpointParcelable parcelable;
-            result = getStreamDescription(streamHandle, parcelable);
-            if (result != AAUDIO_OK) {
-                return AAudioConvert_aaudioToAndroidStatus(result);
-            }
-            status = reply->writeInt32(result);
-            if (status != NO_ERROR) {
-                return status;
-            }
-            return parcelable.writeToParcel(reply);
-        } break;
-
-        case START_STREAM: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            status = data.readInt32(&streamHandle);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(START_STREAM) streamHandle failed!", __func__);
-                return status;
-            }
-            result = startStream(streamHandle);
-            ALOGV("BnAAudioService::onTransact START_STREAM 0x%08X, result = %d",
-                    streamHandle, result);
-            reply->writeInt32(result);
-            return NO_ERROR;
-        } break;
-
-        case PAUSE_STREAM: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            status = data.readInt32(&streamHandle);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(PAUSE_STREAM) streamHandle failed!", __func__);
-                return status;
-            }
-            result = pauseStream(streamHandle);
-            ALOGV("BnAAudioService::onTransact PAUSE_STREAM 0x%08X, result = %d",
-                  streamHandle, result);
-            reply->writeInt32(result);
-            return NO_ERROR;
-        } break;
-
-        case STOP_STREAM: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            status = data.readInt32(&streamHandle);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(STOP_STREAM) streamHandle failed!", __func__);
-                return status;
-            }
-            result = stopStream(streamHandle);
-            ALOGV("BnAAudioService::onTransact STOP_STREAM 0x%08X, result = %d",
-                  streamHandle, result);
-            reply->writeInt32(result);
-            return NO_ERROR;
-        } break;
-
-        case FLUSH_STREAM: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            status = data.readInt32(&streamHandle);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(FLUSH_STREAM) streamHandle failed!", __func__);
-                return status;
-            }
-            result = flushStream(streamHandle);
-            ALOGV("BnAAudioService::onTransact FLUSH_STREAM 0x%08X, result = %d",
-                    streamHandle, result);
-            reply->writeInt32(result);
-            return NO_ERROR;
-        } break;
-
-        case REGISTER_AUDIO_THREAD: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            status = data.readInt32(&streamHandle);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(REGISTER_AUDIO_THREAD) streamHandle failed!", __func__);
-                return status;
-            }
-            status = data.readInt32(&tid);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(REGISTER_AUDIO_THREAD) tid failed!", __func__);
-                return status;
-            }
-            status = data.readInt64(&nanoseconds);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(REGISTER_AUDIO_THREAD) nanoseconds failed!", __func__);
-                return status;
-            }
-            result = registerAudioThread(streamHandle, tid, nanoseconds);
-            ALOGV("BnAAudioService::%s(REGISTER_AUDIO_THREAD) 0x%08X, result = %d",
-                    __func__, streamHandle, result);
-            reply->writeInt32(result);
-            return NO_ERROR;
-        } break;
-
-        case UNREGISTER_AUDIO_THREAD: {
-            CHECK_INTERFACE(IAAudioService, data, reply);
-            status = data.readInt32(&streamHandle);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(UNREGISTER_AUDIO_THREAD) streamHandle failed!", __func__);
-                return status;
-            }
-            status = data.readInt32(&tid);
-            if (status != NO_ERROR) {
-                ALOGE("BnAAudioService::%s(UNREGISTER_AUDIO_THREAD) tid failed!", __func__);
-                return status;
-            }
-            result = unregisterAudioThread(streamHandle, tid);
-            ALOGV("BnAAudioService::onTransact UNREGISTER_AUDIO_THREAD 0x%08X, result = %d",
-                    streamHandle, result);
-            reply->writeInt32(result);
-            return NO_ERROR;
-        } break;
-
-        default:
-            // ALOGW("BnAAudioService::onTransact not handled %u", code);
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-} /* namespace android */
diff --git a/media/libaaudio/src/binding/IAAudioService.h b/media/libaaudio/src/binding/IAAudioService.h
deleted file mode 100644
index 6bdb826..0000000
--- a/media/libaaudio/src/binding/IAAudioService.h
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_AAUDIO_IAAUDIO_SERVICE_H
-#define ANDROID_AAUDIO_IAAUDIO_SERVICE_H
-
-#include <stdint.h>
-#include <utils/RefBase.h>
-#include <binder/TextOutput.h>
-#include <binder/IInterface.h>
-
-#include <aaudio/AAudio.h>
-
-#include "binding/AAudioCommon.h"
-#include "binding/AAudioServiceDefinitions.h"
-#include "binding/AAudioStreamConfiguration.h"
-#include "binding/AAudioStreamRequest.h"
-#include "binding/AudioEndpointParcelable.h"
-#include "binding/IAAudioClient.h"
-
-namespace android {
-
-#define AAUDIO_SERVICE_NAME  "media.aaudio"
-
-// Interface (our AIDL) - service methods called by client
-class IAAudioService : public IInterface {
-public:
-
-    DECLARE_META_INTERFACE(AAudioService);
-
-    // Register an object to receive audio input/output change and track notifications.
-    // For a given calling pid, AAudio service disregards any registrations after the first.
-    // Thus the IAAudioClient must be a singleton per process.
-    virtual void registerClient(const sp<IAAudioClient>& client) = 0;
-
-    /**
-     * @param request info needed to create the stream
-     * @param configuration contains information about the created stream
-     * @return handle to the stream or a negative error
-     */
-    virtual aaudio::aaudio_handle_t openStream(const aaudio::AAudioStreamRequest &request,
-                                     aaudio::AAudioStreamConfiguration &configurationOutput) = 0;
-
-    virtual aaudio_result_t closeStream(aaudio::aaudio_handle_t streamHandle) = 0;
-
-    /* Get an immutable description of the in-memory queues
-    * used to communicate with the underlying HAL or Service.
-    */
-    virtual aaudio_result_t getStreamDescription(aaudio::aaudio_handle_t streamHandle,
-                                               aaudio::AudioEndpointParcelable &parcelable) = 0;
-
-    /**
-     * Start the flow of data.
-     * This is asynchronous. When complete, the service will send a STARTED event.
-     */
-    virtual aaudio_result_t startStream(aaudio::aaudio_handle_t streamHandle) = 0;
-
-    /**
-     * Stop the flow of data such that start() can resume without loss of data.
-     * This is asynchronous. When complete, the service will send a PAUSED event.
-     */
-    virtual aaudio_result_t pauseStream(aaudio::aaudio_handle_t streamHandle) = 0;
-
-    /**
-     * Stop the flow of data such that the data currently in the buffer is played.
-     * This is asynchronous. When complete, the service will send a STOPPED event.
-     */
-    virtual aaudio_result_t stopStream(aaudio::aaudio_handle_t streamHandle) = 0;
-
-    /**
-     *  Discard any data held by the underlying HAL or Service.
-     * This is asynchronous. When complete, the service will send a FLUSHED event.
-     */
-    virtual aaudio_result_t flushStream(aaudio::aaudio_handle_t streamHandle) = 0;
-
-    /**
-     * Manage the specified thread as a low latency audio thread.
-     */
-    virtual aaudio_result_t registerAudioThread(aaudio::aaudio_handle_t streamHandle,
-                                              pid_t clientThreadId,
-                                              int64_t periodNanoseconds) = 0;
-
-    virtual aaudio_result_t unregisterAudioThread(aaudio::aaudio_handle_t streamHandle,
-                                                pid_t clientThreadId) = 0;
-};
-
-class BnAAudioService : public BnInterface<IAAudioService> {
-public:
-    virtual status_t onTransact(uint32_t code, const Parcel& data,
-                                Parcel* reply, uint32_t flags = 0);
-
-};
-
-} /* namespace android */
-
-#endif //ANDROID_AAUDIO_IAAUDIO_SERVICE_H
diff --git a/media/libaaudio/src/binding/RingBufferParcelable.cpp b/media/libaaudio/src/binding/RingBufferParcelable.cpp
index 4996b3f..a4b3cec 100644
--- a/media/libaaudio/src/binding/RingBufferParcelable.cpp
+++ b/media/libaaudio/src/binding/RingBufferParcelable.cpp
@@ -29,8 +29,29 @@
 
 using namespace aaudio;
 
-RingBufferParcelable::RingBufferParcelable() {}
-RingBufferParcelable::~RingBufferParcelable() {}
+RingBufferParcelable::RingBufferParcelable(const RingBuffer& parcelable)
+        : mReadCounterParcelable(std::move(parcelable.readCounterParcelable)),
+          mWriteCounterParcelable(std::move(parcelable.writeCounterParcelable)),
+          mDataParcelable(std::move(parcelable.dataParcelable)),
+          mBytesPerFrame(parcelable.bytesPerFrame),
+          mFramesPerBurst(parcelable.framesPerBurst),
+          mCapacityInFrames(parcelable.capacityInFrames),
+          mFlags(static_cast<RingbufferFlags>(parcelable.flags)) {
+    static_assert(sizeof(mFlags) == sizeof(parcelable.flags));
+}
+
+RingBuffer RingBufferParcelable::parcelable() const {
+    RingBuffer result;
+    result.readCounterParcelable = std::move(mReadCounterParcelable).parcelable();
+    result.writeCounterParcelable = std::move(mWriteCounterParcelable).parcelable();
+    result.dataParcelable = std::move(mDataParcelable).parcelable();
+    result.bytesPerFrame = mBytesPerFrame;
+    result.framesPerBurst = mFramesPerBurst;
+    result.capacityInFrames = mCapacityInFrames;
+    static_assert(sizeof(mFlags) == sizeof(result.flags));
+    result.flags = static_cast<int32_t>(mFlags);
+    return result;
+}
 
 // TODO This assumes that all three use the same SharedMemoryParcelable
 void RingBufferParcelable::setupMemory(int32_t sharedMemoryIndex,
@@ -76,58 +97,6 @@
     mCapacityInFrames = capacityInFrames;
 }
 
-/**
- * The read and write must be symmetric.
- */
-status_t RingBufferParcelable::writeToParcel(Parcel* parcel) const {
-    status_t status = AAudioConvert_aaudioToAndroidStatus(validate());
-    if (status != NO_ERROR) goto error;
-
-    status = parcel->writeInt32(mCapacityInFrames);
-    if (status != NO_ERROR) goto error;
-    if (mCapacityInFrames > 0) {
-        status = parcel->writeInt32(mBytesPerFrame);
-        if (status != NO_ERROR) goto error;
-        status = parcel->writeInt32(mFramesPerBurst);
-        if (status != NO_ERROR) goto error;
-        status = parcel->writeInt32(mFlags);
-        if (status != NO_ERROR) goto error;
-        status = mReadCounterParcelable.writeToParcel(parcel);
-        if (status != NO_ERROR) goto error;
-        status = mWriteCounterParcelable.writeToParcel(parcel);
-        if (status != NO_ERROR) goto error;
-        status = mDataParcelable.writeToParcel(parcel);
-        if (status != NO_ERROR) goto error;
-    }
-    return NO_ERROR;
-error:
-    ALOGE("%s returning %d", __func__, status);
-    return status;
-}
-
-status_t RingBufferParcelable::readFromParcel(const Parcel* parcel) {
-    status_t status = parcel->readInt32(&mCapacityInFrames);
-    if (status != NO_ERROR) goto error;
-    if (mCapacityInFrames > 0) {
-        status = parcel->readInt32(&mBytesPerFrame);
-        if (status != NO_ERROR) goto error;
-        status = parcel->readInt32(&mFramesPerBurst);
-        if (status != NO_ERROR) goto error;
-        status = parcel->readInt32((int32_t *)&mFlags);
-        if (status != NO_ERROR) goto error;
-        status = mReadCounterParcelable.readFromParcel(parcel);
-        if (status != NO_ERROR) goto error;
-        status = mWriteCounterParcelable.readFromParcel(parcel);
-        if (status != NO_ERROR) goto error;
-        status = mDataParcelable.readFromParcel(parcel);
-        if (status != NO_ERROR) goto error;
-    }
-    return AAudioConvert_aaudioToAndroidStatus(validate());
-error:
-    ALOGE("%s returning %d", __func__, status);
-    return status;
-}
-
 aaudio_result_t RingBufferParcelable::resolve(SharedMemoryParcelable *memoryParcels, RingBufferDescriptor *descriptor) {
     aaudio_result_t result;
 
diff --git a/media/libaaudio/src/binding/RingBufferParcelable.h b/media/libaaudio/src/binding/RingBufferParcelable.h
index 1dbcf07..2508cea 100644
--- a/media/libaaudio/src/binding/RingBufferParcelable.h
+++ b/media/libaaudio/src/binding/RingBufferParcelable.h
@@ -19,6 +19,7 @@
 
 #include <stdint.h>
 
+#include <aaudio/RingBuffer.h>
 #include <binder/Parcelable.h>
 
 #include "binding/AAudioServiceDefinitions.h"
@@ -26,10 +27,12 @@
 
 namespace aaudio {
 
-class RingBufferParcelable : public Parcelable {
+class RingBufferParcelable  {
 public:
-    RingBufferParcelable();
-    virtual ~RingBufferParcelable();
+    RingBufferParcelable() = default;
+
+    // Construct based on a parcelable representation.
+    explicit RingBufferParcelable(const RingBuffer& parcelable);
 
     // TODO This assumes that all three use the same SharedMemoryParcelable
     void setupMemory(int32_t sharedMemoryIndex,
@@ -57,21 +60,14 @@
 
     bool isFileDescriptorSafe(SharedMemoryParcelable *memoryParcels);
 
-    /**
-     * The read and write must be symmetric.
-     */
-    virtual status_t writeToParcel(Parcel* parcel) const override;
-
-    virtual status_t readFromParcel(const Parcel* parcel) override;
-
     aaudio_result_t resolve(SharedMemoryParcelable *memoryParcels, RingBufferDescriptor *descriptor);
 
     void dump();
 
+    // Extract a parcelable representation of this object.
+    RingBuffer parcelable() const;
+
 private:
-
-    aaudio_result_t validate() const;
-
     SharedRegionParcelable  mReadCounterParcelable;
     SharedRegionParcelable  mWriteCounterParcelable;
     SharedRegionParcelable  mDataParcelable;
@@ -79,6 +75,8 @@
     int32_t                 mFramesPerBurst = 0;    // for ISOCHRONOUS queues
     int32_t                 mCapacityInFrames = 0;  // zero if unused
     RingbufferFlags         mFlags = RingbufferFlags::NONE;
+
+    aaudio_result_t validate() const;
 };
 
 } /* namespace aaudio */
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
index b6e8472..eef238f 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
@@ -18,6 +18,7 @@
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
+#include <inttypes.h>
 #include <stdint.h>
 #include <stdio.h>
 
@@ -33,61 +34,37 @@
 using android::base::unique_fd;
 using android::NO_ERROR;
 using android::status_t;
-using android::Parcel;
-using android::Parcelable;
+using android::media::SharedFileRegion;
 
 using namespace aaudio;
 
-SharedMemoryParcelable::SharedMemoryParcelable() {}
-SharedMemoryParcelable::~SharedMemoryParcelable() {};
+SharedMemoryParcelable::SharedMemoryParcelable(SharedFileRegion&& parcelable) {
+    mFd = parcelable.fd.release();
+    mSizeInBytes = parcelable.size;
+    mOffsetInBytes = parcelable.offset;
+}
+
+SharedFileRegion SharedMemoryParcelable::parcelable() && {
+    SharedFileRegion result;
+    result.fd.reset(std::move(mFd));
+    result.size = mSizeInBytes;
+    result.offset = mOffsetInBytes;
+    return result;
+}
+
+SharedMemoryParcelable SharedMemoryParcelable::dup() const {
+    SharedMemoryParcelable result;
+    result.setup(mFd, static_cast<int32_t>(mSizeInBytes));
+    return result;
+}
 
 void SharedMemoryParcelable::setup(const unique_fd& fd, int32_t sizeInBytes) {
-    mFd.reset(dup(fd.get())); // store a duplicate fd
+    constexpr int minFd = 3; // skip over stdout, stdin and stderr
+    mFd.reset(fcntl(fd.get(), F_DUPFD_CLOEXEC, minFd)); // store a duplicate FD
     ALOGV("setup(fd = %d -> %d, size = %d) this = %p\n", fd.get(), mFd.get(), sizeInBytes, this);
     mSizeInBytes = sizeInBytes;
 }
 
-status_t SharedMemoryParcelable::writeToParcel(Parcel* parcel) const {
-    status_t status = AAudioConvert_aaudioToAndroidStatus(validate());
-    if (status != NO_ERROR) return status;
-
-    status = parcel->writeInt32(mSizeInBytes);
-    if (status != NO_ERROR) return status;
-    if (mSizeInBytes > 0) {
-        ALOGV("writeToParcel() mFd = %d, this = %p\n", mFd.get(), this);
-        status = parcel->writeUniqueFileDescriptor(mFd);
-        ALOGE_IF(status != NO_ERROR, "SharedMemoryParcelable writeDupFileDescriptor failed : %d",
-                 status);
-    }
-    return status;
-}
-
-status_t SharedMemoryParcelable::readFromParcel(const Parcel* parcel) {
-    status_t status = parcel->readInt32(&mSizeInBytes);
-    if (status != NO_ERROR) goto error;
-
-    if (mSizeInBytes > 0) {
-        // The Parcel owns the file descriptor and will close it later.
-        unique_fd mmapFd;
-        status = parcel->readUniqueFileDescriptor(&mmapFd);
-        if (status != NO_ERROR) {
-            ALOGE("readFromParcel() readUniqueFileDescriptor() failed : %d", status);
-            goto error;
-        }
-
-        // Resolve the memory now while we still have the FD from the Parcel.
-        // Closing the FD will not affect the shared memory once mmap() has been called.
-        aaudio_result_t result = resolveSharedMemory(mmapFd);
-        status = AAudioConvert_aaudioToAndroidStatus(result);
-        if (status != NO_ERROR) goto error;
-    }
-
-    return AAudioConvert_aaudioToAndroidStatus(validate());
-
-error:
-    return status;
-}
-
 aaudio_result_t SharedMemoryParcelable::close() {
     if (mResolvedAddress != MMAP_UNRESOLVED_ADDRESS) {
         int err = munmap(mResolvedAddress, mSizeInBytes);
@@ -104,7 +81,7 @@
     mResolvedAddress = (uint8_t *) mmap(0, mSizeInBytes, PROT_READ | PROT_WRITE,
                                         MAP_SHARED, fd.get(), 0);
     if (mResolvedAddress == MMAP_UNRESOLVED_ADDRESS) {
-        ALOGE("mmap() failed for fd = %d, nBytes = %d, errno = %s",
+        ALOGE("mmap() failed for fd = %d, nBytes = %" PRId64 ", errno = %s",
               fd.get(), mSizeInBytes, strerror(errno));
         return AAUDIO_ERROR_INTERNAL;
     }
@@ -118,7 +95,7 @@
         return AAUDIO_ERROR_OUT_OF_RANGE;
     } else if ((offsetInBytes + sizeInBytes) > mSizeInBytes) {
         ALOGE("out of range, offsetInBytes = %d, "
-                      "sizeInBytes = %d, mSizeInBytes = %d",
+                      "sizeInBytes = %d, mSizeInBytes = %" PRId64,
               offsetInBytes, sizeInBytes, mSizeInBytes);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
@@ -148,7 +125,11 @@
 
 aaudio_result_t SharedMemoryParcelable::validate() const {
     if (mSizeInBytes < 0 || mSizeInBytes >= MAX_MMAP_SIZE_BYTES) {
-        ALOGE("invalid mSizeInBytes = %d", mSizeInBytes);
+        ALOGE("invalid mSizeInBytes = %" PRId64, mSizeInBytes);
+        return AAUDIO_ERROR_OUT_OF_RANGE;
+    }
+    if (mOffsetInBytes != 0) {
+        ALOGE("invalid mOffsetInBytes = %" PRId64, mOffsetInBytes);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
     return AAUDIO_OK;
@@ -156,5 +137,5 @@
 
 void SharedMemoryParcelable::dump() {
     ALOGD("mFd = %d", mFd.get());
-    ALOGD("mSizeInBytes = %d", mSizeInBytes);
+    ALOGD("mSizeInBytes = %" PRId64, mSizeInBytes);
 }
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.h b/media/libaaudio/src/binding/SharedMemoryParcelable.h
index 4ec38c5..1f2c335 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.h
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.h
@@ -21,12 +21,11 @@
 #include <sys/mman.h>
 
 #include <android-base/unique_fd.h>
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
+#include <android/media/SharedFileRegion.h>
 
 namespace aaudio {
 
-// Arbitrary limits for sanity checks. TODO remove after debugging.
+// Arbitrary limits for range checks.
 #define MAX_SHARED_MEMORIES (32)
 #define MAX_MMAP_OFFSET_BYTES (32 * 1024 * 8)
 #define MAX_MMAP_SIZE_BYTES (32 * 1024 * 8)
@@ -36,10 +35,14 @@
  * It may be divided into several regions.
  * The memory can be shared using Binder or simply shared between threads.
  */
-class SharedMemoryParcelable : public android::Parcelable {
+class SharedMemoryParcelable {
 public:
-    SharedMemoryParcelable();
-    virtual ~SharedMemoryParcelable();
+    SharedMemoryParcelable() = default;
+
+    // Ctor from a parcelable representation.
+    // Since the parcelable object owns a unique FD, move semantics are provided to avoid the need
+    // to dupe.
+    explicit SharedMemoryParcelable(android::media::SharedFileRegion&& parcelable);
 
     /**
      * Make a dup() of the fd and store it for later use.
@@ -49,10 +52,6 @@
      */
     void setup(const android::base::unique_fd& fd, int32_t sizeInBytes);
 
-    virtual android::status_t writeToParcel(android::Parcel* parcel) const override;
-
-    virtual android::status_t readFromParcel(const android::Parcel* parcel) override;
-
     // mmap() shared memory
     aaudio_result_t resolve(int32_t offsetInBytes, int32_t sizeInBytes, void **regionAddressPtr);
 
@@ -63,20 +62,23 @@
 
     void dump();
 
-protected:
+    // Extract a parcelable representation of this object.
+    // Since we own a unique FD, move semantics are provided to avoid the need to dupe.
+    android::media::SharedFileRegion parcelable() &&;
 
-#define MMAP_UNRESOLVED_ADDRESS    reinterpret_cast<uint8_t*>(MAP_FAILED)
-
-    aaudio_result_t resolveSharedMemory(const android::base::unique_fd& fd);
-
-    android::base::unique_fd   mFd;
-    int32_t                    mSizeInBytes = 0;
-    uint8_t                   *mResolvedAddress = MMAP_UNRESOLVED_ADDRESS;
+    // Copy this instance. Duplicates the underlying FD.
+    SharedMemoryParcelable dup() const;
 
 private:
+#define MMAP_UNRESOLVED_ADDRESS    reinterpret_cast<uint8_t*>(MAP_FAILED)
 
+    android::base::unique_fd   mFd;
+    int64_t                    mSizeInBytes = 0;
+    int64_t                    mOffsetInBytes = 0;
+    uint8_t                   *mResolvedAddress = MMAP_UNRESOLVED_ADDRESS;
+
+    aaudio_result_t resolveSharedMemory(const android::base::unique_fd& fd);
     aaudio_result_t validate() const;
-
 };
 
 } /* namespace aaudio */
diff --git a/media/libaaudio/src/binding/SharedRegionParcelable.cpp b/media/libaaudio/src/binding/SharedRegionParcelable.cpp
index c776116..56b99c0 100644
--- a/media/libaaudio/src/binding/SharedRegionParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedRegionParcelable.cpp
@@ -36,8 +36,18 @@
 
 using namespace aaudio;
 
-SharedRegionParcelable::SharedRegionParcelable() {}
-SharedRegionParcelable::~SharedRegionParcelable() {}
+SharedRegionParcelable::SharedRegionParcelable(const SharedRegion& parcelable)
+        : mSharedMemoryIndex(parcelable.sharedMemoryIndex),
+          mOffsetInBytes(parcelable.offsetInBytes),
+          mSizeInBytes(parcelable.sizeInBytes) {}
+
+SharedRegion SharedRegionParcelable::parcelable() const {
+    SharedRegion result;
+    result.sharedMemoryIndex = mSharedMemoryIndex;
+    result.offsetInBytes = mOffsetInBytes;
+    result.sizeInBytes = mSizeInBytes;
+    return result;
+}
 
 void SharedRegionParcelable::setup(int32_t sharedMemoryIndex,
                                    int32_t offsetInBytes,
@@ -47,41 +57,6 @@
     mSizeInBytes = sizeInBytes;
 }
 
-status_t SharedRegionParcelable::writeToParcel(Parcel* parcel) const {
-    status_t status = AAudioConvert_aaudioToAndroidStatus(validate());
-    if (status != NO_ERROR) goto error;
-
-    status = parcel->writeInt32(mSizeInBytes);
-    if (status != NO_ERROR) goto error;
-    if (mSizeInBytes > 0) {
-        status = parcel->writeInt32(mSharedMemoryIndex);
-        if (status != NO_ERROR) goto error;
-        status = parcel->writeInt32(mOffsetInBytes);
-        if (status != NO_ERROR) goto error;
-    }
-    return NO_ERROR;
-
-error:
-    ALOGE("%s returning %d", __func__, status);
-    return status;
-}
-
-status_t SharedRegionParcelable::readFromParcel(const Parcel* parcel) {
-    status_t status = parcel->readInt32(&mSizeInBytes);
-    if (status != NO_ERROR) goto error;
-    if (mSizeInBytes > 0) {
-        status = parcel->readInt32(&mSharedMemoryIndex);
-        if (status != NO_ERROR) goto error;
-        status = parcel->readInt32(&mOffsetInBytes);
-        if (status != NO_ERROR) goto error;
-    }
-    return AAudioConvert_aaudioToAndroidStatus(validate());
-
-error:
-    ALOGE("%s returning %d", __func__, status);
-    return status;
-}
-
 aaudio_result_t SharedRegionParcelable::resolve(SharedMemoryParcelable *memoryParcels,
                                               void **regionAddressPtr) {
     if (mSizeInBytes == 0) {
diff --git a/media/libaaudio/src/binding/SharedRegionParcelable.h b/media/libaaudio/src/binding/SharedRegionParcelable.h
index 0cd8c04..c15fc30 100644
--- a/media/libaaudio/src/binding/SharedRegionParcelable.h
+++ b/media/libaaudio/src/binding/SharedRegionParcelable.h
@@ -20,41 +20,39 @@
 #include <stdint.h>
 
 #include <sys/mman.h>
-#include <binder/Parcelable.h>
 
 #include <aaudio/AAudio.h>
+#include <aaudio/SharedRegion.h>
 
 #include "binding/SharedMemoryParcelable.h"
 
 using android::status_t;
-using android::Parcel;
-using android::Parcelable;
 
 namespace aaudio {
 
-class SharedRegionParcelable : public Parcelable {
+class SharedRegionParcelable {
 public:
-    SharedRegionParcelable();
-    virtual ~SharedRegionParcelable();
+    SharedRegionParcelable() = default;
+
+    // Construct based on a parcelable representation.
+    explicit SharedRegionParcelable(const SharedRegion& parcelable);
 
     void setup(int32_t sharedMemoryIndex, int32_t offsetInBytes, int32_t sizeInBytes);
 
-    virtual status_t writeToParcel(Parcel* parcel) const override;
-
-    virtual status_t readFromParcel(const Parcel* parcel) override;
-
     aaudio_result_t resolve(SharedMemoryParcelable *memoryParcels, void **regionAddressPtr);
 
     bool isFileDescriptorSafe(SharedMemoryParcelable *memoryParcels);
 
     void dump();
 
-protected:
+    // Extract a parcelable representation of this object.
+    SharedRegion parcelable() const;
+
+private:
     int32_t mSharedMemoryIndex = -1;
     int32_t mOffsetInBytes     = 0;
     int32_t mSizeInBytes       = 0;
 
-private:
     aaudio_result_t validate() const;
 };
 
diff --git a/media/libaaudio/src/binding/aidl/aaudio/Endpoint.aidl b/media/libaaudio/src/binding/aidl/aaudio/Endpoint.aidl
new file mode 100644
index 0000000..3600b6a
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/Endpoint.aidl
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+import aaudio.RingBuffer;
+import android.media.SharedFileRegion;
+
+parcelable Endpoint {
+    // Set capacityInFrames to zero if Queue is unused.
+    RingBuffer upMessageQueueParcelable;   // server to client
+    RingBuffer downMessageQueueParcelable; // to server
+    RingBuffer upDataQueueParcelable;      // eg. record, could share same queue
+    RingBuffer downDataQueueParcelable;    // eg. playback
+    SharedFileRegion[] sharedMemories;
+}
diff --git a/media/libaaudio/src/binding/aidl/aaudio/IAAudioClient.aidl b/media/libaaudio/src/binding/aidl/aaudio/IAAudioClient.aidl
new file mode 100644
index 0000000..a010dbc
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/IAAudioClient.aidl
@@ -0,0 +1,21 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+interface IAAudioClient {
+    oneway void onStreamChange(int handle, int opcode, int value);
+}
diff --git a/media/libaaudio/src/binding/aidl/aaudio/IAAudioService.aidl b/media/libaaudio/src/binding/aidl/aaudio/IAAudioService.aidl
new file mode 100644
index 0000000..44d2211
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/IAAudioService.aidl
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+import aaudio.Endpoint;
+import aaudio.IAAudioClient;
+import aaudio.StreamParameters;
+import aaudio.StreamRequest;
+
+interface IAAudioService {
+    /**
+     * Register an object to receive audio input/output change and track notifications.
+     * For a given calling pid, AAudio service disregards any registrations after the first.
+     * Thus the IAAudioClient must be a singleton per process.
+     */
+    void registerClient(IAAudioClient client);
+
+    /**
+     * @param request info needed to create the stream
+     * @param paramsOut contains information about the created stream
+     * @return handle to the stream or a negative error
+     */
+    int openStream(in StreamRequest request,
+                   out StreamParameters paramsOut);
+
+    int closeStream(int streamHandle);
+
+    /*
+     * Get an immutable description of the in-memory queues
+     * used to communicate with the underlying HAL or Service.
+     */
+    int getStreamDescription(int streamHandle, out Endpoint endpoint);
+
+    /**
+     * Start the flow of data.
+     * This is asynchronous. When complete, the service will send a STARTED event.
+     */
+    int startStream(int streamHandle);
+
+    /**
+     * Stop the flow of data such that start() can resume without loss of data.
+     * This is asynchronous. When complete, the service will send a PAUSED event.
+     */
+    int pauseStream(int streamHandle);
+
+    /**
+     * Stop the flow of data such that the data currently in the buffer is played.
+     * This is asynchronous. When complete, the service will send a STOPPED event.
+     */
+    int stopStream(int streamHandle);
+
+    /**
+     *  Discard any data held by the underlying HAL or Service.
+     * This is asynchronous. When complete, the service will send a FLUSHED event.
+     */
+    int flushStream(int streamHandle);
+
+    /**
+     * Manage the specified thread as a low latency audio thread.
+     */
+    int registerAudioThread(int streamHandle,
+                            int clientThreadId,
+                            long periodNanoseconds);
+
+    int unregisterAudioThread(int streamHandle,
+                              int clientThreadId);
+}
diff --git a/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl b/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl
new file mode 100644
index 0000000..a58b33a
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+import aaudio.SharedRegion;
+
+parcelable RingBuffer {
+    SharedRegion        readCounterParcelable;
+    SharedRegion        writeCounterParcelable;
+    SharedRegion        dataParcelable;
+    int                 bytesPerFrame;     // index is in frames
+    int                 framesPerBurst;    // for ISOCHRONOUS queues
+    int                 capacityInFrames;  // zero if unused
+    int /* RingbufferFlags */ flags;  // = RingbufferFlags::NONE;
+}
\ No newline at end of file
diff --git a/media/libaaudio/src/binding/aidl/aaudio/SharedRegion.aidl b/media/libaaudio/src/binding/aidl/aaudio/SharedRegion.aidl
new file mode 100644
index 0000000..26153e8
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/SharedRegion.aidl
@@ -0,0 +1,23 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+parcelable SharedRegion {
+    int sharedMemoryIndex;
+    int offsetInBytes;
+    int sizeInBytes;
+}
diff --git a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
new file mode 100644
index 0000000..b7c4f70
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+import android.media.audio.common.AudioFormat;
+
+parcelable StreamParameters {
+    int                                       samplesPerFrame;  //      = AAUDIO_UNSPECIFIED;
+    int                                       sampleRate;  //           = AAUDIO_UNSPECIFIED;
+    int                                       deviceId;  //             = AAUDIO_UNSPECIFIED;
+    int /* aaudio_sharing_mode_t */           sharingMode;  //          = AAUDIO_SHARING_MODE_SHARED;
+    AudioFormat                               audioFormat;  //          = AUDIO_FORMAT_DEFAULT;
+    int /* aaudio_direction_t */              direction;  //            = AAUDIO_DIRECTION_OUTPUT;
+    int /* aaudio_usage_t */                  usage;  //                = AAUDIO_UNSPECIFIED;
+    int /* aaudio_content_type_t */           contentType;  //          = AAUDIO_UNSPECIFIED;
+    int /* aaudio_input_preset_t */           inputPreset;  //          = AAUDIO_UNSPECIFIED;
+    int                                       bufferCapacity;  //       = AAUDIO_UNSPECIFIED;
+    int /* aaudio_allowed_capture_policy_t */ allowedCapturePolicy;  // = AAUDIO_UNSPECIFIED;
+    int /* aaudio_session_id_t */             sessionId;  //            = AAUDIO_SESSION_ID_NONE;
+    boolean                                   isPrivacySensitive;  //   = false;
+}
diff --git a/media/libaaudio/src/binding/aidl/aaudio/StreamRequest.aidl b/media/libaaudio/src/binding/aidl/aaudio/StreamRequest.aidl
new file mode 100644
index 0000000..53787a0
--- /dev/null
+++ b/media/libaaudio/src/binding/aidl/aaudio/StreamRequest.aidl
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aaudio;
+
+import aaudio.StreamParameters;
+import android.content.AttributionSourceState;
+
+parcelable StreamRequest {
+    StreamParameters       params;
+    AttributionSourceState attributionSource;
+    boolean                sharingModeMatchRequired; // = false;
+    boolean                inService; // = false; // Stream opened by AAudioservice
+}
\ No newline at end of file
diff --git a/media/libaaudio/src/client/AAudioFlowGraph.cpp b/media/libaaudio/src/client/AAudioFlowGraph.cpp
index 8f2c488..61b50f3 100644
--- a/media/libaaudio/src/client/AAudioFlowGraph.cpp
+++ b/media/libaaudio/src/client/AAudioFlowGraph.cpp
@@ -26,9 +26,11 @@
 #include <flowgraph/SinkFloat.h>
 #include <flowgraph/SinkI16.h>
 #include <flowgraph/SinkI24.h>
+#include <flowgraph/SinkI32.h>
 #include <flowgraph/SourceFloat.h>
 #include <flowgraph/SourceI16.h>
 #include <flowgraph/SourceI24.h>
+#include <flowgraph/SourceI32.h>
 
 using namespace flowgraph;
 
@@ -38,7 +40,8 @@
                           int32_t sinkChannelCount) {
     AudioFloatOutputPort *lastOutput = nullptr;
 
-    ALOGV("%s() source format = 0x%08x, channels = %d, sink format = 0x%08x, channels = %d",
+    // TODO change back to ALOGD
+    ALOGI("%s() source format = 0x%08x, channels = %d, sink format = 0x%08x, channels = %d",
           __func__, sourceFormat, sourceChannelCount, sinkFormat, sinkChannelCount);
 
     switch (sourceFormat) {
@@ -51,7 +54,10 @@
         case AUDIO_FORMAT_PCM_24_BIT_PACKED:
             mSource = std::make_unique<SourceI24>(sourceChannelCount);
             break;
-        default: // TODO add I32
+        case AUDIO_FORMAT_PCM_32_BIT:
+            mSource = std::make_unique<SourceI32>(sourceChannelCount);
+            break;
+        default:
             ALOGE("%s() Unsupported source format = %d", __func__, sourceFormat);
             return AAUDIO_ERROR_UNIMPLEMENTED;
     }
@@ -90,7 +96,10 @@
         case AUDIO_FORMAT_PCM_24_BIT_PACKED:
             mSink = std::make_unique<SinkI24>(sinkChannelCount);
             break;
-        default: // TODO add I32
+        case AUDIO_FORMAT_PCM_32_BIT:
+            mSink = std::make_unique<SinkI32>(sinkChannelCount);
+            break;
+        default:
             ALOGE("%s() Unsupported sink format = %d", __func__, sinkFormat);
             return AAUDIO_ERROR_UNIMPLEMENTED;
     }
diff --git a/media/libaaudio/src/client/AudioEndpoint.cpp b/media/libaaudio/src/client/AudioEndpoint.cpp
index 06f66d3..ebc9f2b 100644
--- a/media/libaaudio/src/client/AudioEndpoint.cpp
+++ b/media/libaaudio/src/client/AudioEndpoint.cpp
@@ -137,7 +137,7 @@
         return AAUDIO_ERROR_INTERNAL;
     }
 
-    mUpCommandQueue = std::make_unique<FifoBuffer>(
+    mUpCommandQueue = std::make_unique<FifoBufferIndirect>(
             descriptor->bytesPerFrame,
             descriptor->capacityInFrames,
             descriptor->readCounterAddress,
@@ -166,7 +166,11 @@
                                   ? &mDataWriteCounter
                                   : descriptor->writeCounterAddress;
 
-    mDataQueue = std::make_unique<FifoBuffer>(
+    // Clear buffer to avoid an initial glitch on some devices.
+    size_t bufferSizeBytes = descriptor->capacityInFrames * descriptor->bytesPerFrame;
+    memset(descriptor->dataAddress, 0, bufferSizeBytes);
+
+    mDataQueue = std::make_unique<FifoBufferIndirect>(
             descriptor->bytesPerFrame,
             descriptor->capacityInFrames,
             readCounterAddress,
diff --git a/media/libaaudio/src/client/AudioEndpoint.h b/media/libaaudio/src/client/AudioEndpoint.h
index 484d917..4c8d60f 100644
--- a/media/libaaudio/src/client/AudioEndpoint.h
+++ b/media/libaaudio/src/client/AudioEndpoint.h
@@ -93,8 +93,8 @@
     void dump() const;
 
 private:
-    std::unique_ptr<android::FifoBuffer> mUpCommandQueue;
-    std::unique_ptr<android::FifoBuffer> mDataQueue;
+    std::unique_ptr<android::FifoBufferIndirect> mUpCommandQueue;
+    std::unique_ptr<android::FifoBufferIndirect> mDataQueue;
     bool                    mFreeRunning;
     android::fifo_counter_t mDataReadCounter; // only used if free-running
     android::fifo_counter_t mDataWriteCounter; // only used if free-running
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 4520823..cf2abe8 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -28,18 +28,17 @@
 #include <cutils/properties.h>
 
 #include <media/MediaMetricsItem.h>
-#include <utils/String16.h>
 #include <utils/Trace.h>
 
 #include "AudioEndpointParcelable.h"
 #include "binding/AAudioStreamRequest.h"
 #include "binding/AAudioStreamConfiguration.h"
-#include "binding/IAAudioService.h"
 #include "binding/AAudioServiceMessage.h"
 #include "core/AudioGlobal.h"
 #include "core/AudioStreamBuilder.h"
 #include "fifo/FifoBuffer.h"
 #include "utility/AudioClock.h"
+#include <media/AidlConversion.h>
 
 #include "AudioStreamInternal.h"
 
@@ -50,9 +49,9 @@
 // This is needed to make sense of the logs more easily.
 #define LOG_TAG (mInService ? "AudioStreamInternal_Service" : "AudioStreamInternal_Client")
 
-using android::String16;
 using android::Mutex;
 using android::WrappingBuffer;
+using android::content::AttributionSourceState;
 
 using namespace aaudio;
 
@@ -76,6 +75,7 @@
 }
 
 AudioStreamInternal::~AudioStreamInternal() {
+    ALOGD("%s() %p called", __func__, this);
 }
 
 aaudio_result_t AudioStreamInternal::open(const AudioStreamBuilder &builder) {
@@ -100,16 +100,24 @@
     const int32_t burstMinMicros = AAudioProperty_getHardwareBurstMinMicros();
     int32_t burstMicros = 0;
 
+    const audio_format_t requestedFormat = getFormat();
     // We have to do volume scaling. So we prefer FLOAT format.
-    if (getFormat() == AUDIO_FORMAT_DEFAULT) {
+    if (requestedFormat == AUDIO_FORMAT_DEFAULT) {
         setFormat(AUDIO_FORMAT_PCM_FLOAT);
     }
-    // Request FLOAT for the shared mixer.
+    // Request FLOAT for the shared mixer or the device.
     request.getConfiguration().setFormat(AUDIO_FORMAT_PCM_FLOAT);
 
+    // TODO b/182392769: use attribution source util
+    AttributionSourceState attributionSource;
+    attributionSource.uid = VALUE_OR_FATAL(android::legacy2aidl_uid_t_int32_t(getuid()));
+    attributionSource.pid = VALUE_OR_FATAL(android::legacy2aidl_pid_t_int32_t(getpid()));
+    attributionSource.packageName = builder.getOpPackageName();
+    attributionSource.attributionTag = builder.getAttributionTag();
+    attributionSource.token = sp<android::BBinder>::make();
+
     // Build the request to send to the server.
-    request.setUserId(getuid());
-    request.setProcessId(getpid());
+    request.setAttributionSource(attributionSource);
     request.setSharingModeMatchRequired(isSharingModeMatchRequired());
     request.setInService(isInService());
 
@@ -150,6 +158,14 @@
     mMetricsId = std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_STREAM)
             + std::to_string(mServiceStreamHandle);
 
+    android::mediametrics::LogItem(mMetricsId)
+            .set(AMEDIAMETRICS_PROP_PERFORMANCEMODE,
+                 AudioGlobal_convertPerformanceModeToText(builder.getPerformanceMode()))
+            .set(AMEDIAMETRICS_PROP_SHARINGMODE,
+                 AudioGlobal_convertSharingModeToText(builder.getSharingMode()))
+            .set(AMEDIAMETRICS_PROP_ENCODINGCLIENT,
+                 android::toString(requestedFormat).c_str()).record();
+
     result = configurationOutput.validate();
     if (result != AAUDIO_OK) {
         goto error;
@@ -211,10 +227,10 @@
         result = AAUDIO_ERROR_OUT_OF_RANGE;
         goto error;
     }
-    mFramesPerBurst = framesPerBurst; // only save good value
+    setFramesPerBurst(framesPerBurst); // only save good value
 
     mBufferCapacityInFrames = mEndpointDescriptor.dataQueueDescriptor.capacityInFrames;
-    if (mBufferCapacityInFrames < mFramesPerBurst
+    if (mBufferCapacityInFrames < getFramesPerBurst()
             || mBufferCapacityInFrames > MAX_BUFFER_CAPACITY_IN_FRAMES) {
         ALOGE("%s - bufferCapacity out of range = %d", __func__, mBufferCapacityInFrames);
         result = AAUDIO_ERROR_OUT_OF_RANGE;
@@ -239,7 +255,7 @@
 
         }
         if (mCallbackFrames == AAUDIO_UNSPECIFIED) {
-            mCallbackFrames = mFramesPerBurst;
+            mCallbackFrames = getFramesPerBurst();
         }
 
         const int32_t callbackBufferSize = mCallbackFrames * getBytesPerFrame();
@@ -271,21 +287,21 @@
     return result;
 
 error:
-    releaseCloseFinal();
+    safeReleaseClose();
     return result;
 }
 
 // This must be called under mStreamLock.
 aaudio_result_t AudioStreamInternal::release_l() {
     aaudio_result_t result = AAUDIO_OK;
-    ALOGV("%s(): mServiceStreamHandle = 0x%08X", __func__, mServiceStreamHandle);
+    ALOGD("%s(): mServiceStreamHandle = 0x%08X", __func__, mServiceStreamHandle);
     if (mServiceStreamHandle != AAUDIO_HANDLE_INVALID) {
         aaudio_stream_state_t currentState = getState();
         // Don't release a stream while it is running. Stop it first.
         // If DISCONNECTED then we should still try to stop in case the
         // error callback is still running.
         if (isActive() || currentState == AAUDIO_STREAM_STATE_DISCONNECTED) {
-            requestStop();
+            requestStop_l();
         }
 
         logReleaseBufferState();
@@ -331,7 +347,7 @@
  * The processing code will then save the current offset
  * between client and server and apply that to any position given to the app.
  */
-aaudio_result_t AudioStreamInternal::requestStart()
+aaudio_result_t AudioStreamInternal::requestStart_l()
 {
     int64_t startTime;
     if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
@@ -353,6 +369,8 @@
     // Clear any stale timestamps from the previous run.
     drainTimestampsFromService();
 
+    prepareBuffersForStart(); // tell subclasses to get ready
+
     aaudio_result_t result = mServiceInterface.startStream(mServiceStreamHandle);
     if (result == AAUDIO_ERROR_INVALID_HANDLE) {
         ALOGD("%s() INVALID_HANDLE, stream was probably stolen", __func__);
@@ -372,7 +390,7 @@
                               * AAUDIO_NANOS_PER_SECOND
                               / getSampleRate();
         mCallbackEnabled.store(true);
-        result = createThread(periodNanos, aaudio_callback_thread_proc, this);
+        result = createThread_l(periodNanos, aaudio_callback_thread_proc, this);
     }
     if (result != AAUDIO_OK) {
         setState(originalState);
@@ -398,33 +416,36 @@
 }
 
 // This must be called under mStreamLock.
-aaudio_result_t AudioStreamInternal::stopCallback()
+aaudio_result_t AudioStreamInternal::stopCallback_l()
 {
     if (isDataCallbackSet()
             && (isActive() || getState() == AAUDIO_STREAM_STATE_DISCONNECTED)) {
         mCallbackEnabled.store(false);
-        aaudio_result_t result = joinThread(NULL); // may temporarily unlock mStreamLock
+        aaudio_result_t result = joinThread_l(NULL); // may temporarily unlock mStreamLock
         if (result == AAUDIO_ERROR_INVALID_HANDLE) {
             ALOGD("%s() INVALID_HANDLE, stream was probably stolen", __func__);
             result = AAUDIO_OK;
         }
         return result;
     } else {
+        ALOGD("%s() skipped, isDataCallbackSet() = %d, isActive() = %d, getState()  = %d", __func__,
+            isDataCallbackSet(), isActive(), getState());
         return AAUDIO_OK;
     }
 }
 
-// This must be called under mStreamLock.
-aaudio_result_t AudioStreamInternal::requestStop() {
-    aaudio_result_t result = stopCallback();
+aaudio_result_t AudioStreamInternal::requestStop_l() {
+    aaudio_result_t result = stopCallback_l();
     if (result != AAUDIO_OK) {
+        ALOGW("%s() stop callback returned %d, returning early", __func__, result);
         return result;
     }
     // The stream may have been unlocked temporarily to let a callback finish
     // and the callback may have stopped the stream.
     // Check to make sure the stream still needs to be stopped.
-    // See also AudioStream::safeStop().
+    // See also AudioStream::safeStop_l().
     if (!(isActive() || getState() == AAUDIO_STREAM_STATE_DISCONNECTED)) {
+        ALOGD("%s() returning early, not active or disconnected", __func__);
         return AAUDIO_OK;
     }
 
@@ -755,9 +776,9 @@
 
 aaudio_result_t AudioStreamInternal::setBufferSize(int32_t requestedFrames) {
     int32_t adjustedFrames = requestedFrames;
-    const int32_t maximumSize = getBufferCapacity() - mFramesPerBurst;
+    const int32_t maximumSize = getBufferCapacity() - getFramesPerBurst();
     // Minimum size should be a multiple number of bursts.
-    const int32_t minimumSize = 1 * mFramesPerBurst;
+    const int32_t minimumSize = 1 * getFramesPerBurst();
 
     // Clip to minimum size so that rounding up will work better.
     adjustedFrames = std::max(minimumSize, adjustedFrames);
@@ -767,9 +788,9 @@
         adjustedFrames = maximumSize;
     } else {
         // Round to the next highest burst size.
-        int32_t numBursts = (adjustedFrames + mFramesPerBurst - 1) / mFramesPerBurst;
-        adjustedFrames = numBursts * mFramesPerBurst;
-        // Clip just in case maximumSize is not a multiple of mFramesPerBurst.
+        int32_t numBursts = (adjustedFrames + getFramesPerBurst() - 1) / getFramesPerBurst();
+        adjustedFrames = numBursts * getFramesPerBurst();
+        // Clip just in case maximumSize is not a multiple of getFramesPerBurst().
         adjustedFrames = std::min(maximumSize, adjustedFrames);
     }
 
@@ -804,15 +825,6 @@
     return mBufferCapacityInFrames;
 }
 
-int32_t AudioStreamInternal::getFramesPerBurst() const {
-    return mFramesPerBurst;
-}
-
-// This must be called under mStreamLock.
-aaudio_result_t AudioStreamInternal::joinThread(void** returnArg) {
-    return AudioStream::joinThread(returnArg, calculateReasonableTimeout(getFramesPerBurst()));
-}
-
 bool AudioStreamInternal::isClockModelInControl() const {
     return isActive() && mAudioEndpoint->isFreeRunning() && mClockModel.isRunning();
 }
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 61591b3..fbe4c13 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -20,7 +20,6 @@
 #include <stdint.h>
 #include <aaudio/AAudio.h>
 
-#include "binding/IAAudioService.h"
 #include "binding/AudioEndpointParcelable.h"
 #include "binding/AAudioServiceInterface.h"
 #include "client/IsochronousClockModel.h"
@@ -29,7 +28,6 @@
 #include "utility/AudioClock.h"
 
 using android::sp;
-using android::IAAudioService;
 
 namespace aaudio {
 
@@ -46,10 +44,6 @@
     AudioStreamInternal(AAudioServiceInterface  &serviceInterface, bool inService);
     virtual ~AudioStreamInternal();
 
-    aaudio_result_t requestStart() override;
-
-    aaudio_result_t requestStop() override;
-
     aaudio_result_t getTimestamp(clockid_t clockId,
                                        int64_t *framePosition,
                                        int64_t *timeNanoseconds) override;
@@ -58,16 +52,12 @@
 
     aaudio_result_t open(const AudioStreamBuilder &builder) override;
 
-    aaudio_result_t release_l() override;
-
     aaudio_result_t setBufferSize(int32_t requestedFrames) override;
 
     int32_t getBufferSize() const override;
 
     int32_t getBufferCapacity() const override;
 
-    int32_t getFramesPerBurst() const override;
-
     int32_t getXRunCount() const override {
         return mXRunCount;
     }
@@ -76,12 +66,9 @@
 
     aaudio_result_t unregisterThread() override;
 
-    aaudio_result_t joinThread(void** returnArg);
-
     // Called internally from 'C'
     virtual void *callbackLoop() = 0;
 
-
     bool isMMap() override {
         return true;
     }
@@ -100,6 +87,10 @@
     }
 
 protected:
+    aaudio_result_t requestStart_l() REQUIRES(mStreamLock) override;
+    aaudio_result_t requestStop_l() REQUIRES(mStreamLock) override;
+
+    aaudio_result_t release_l() REQUIRES(mStreamLock) override;
 
     aaudio_result_t processData(void *buffer,
                          int32_t numFrames,
@@ -121,9 +112,11 @@
 
     aaudio_result_t processCommands();
 
-    aaudio_result_t stopCallback();
+    aaudio_result_t stopCallback_l();
 
-    virtual void advanceClientToMatchServerPosition() = 0;
+    virtual void prepareBuffersForStart() {}
+
+    virtual void advanceClientToMatchServerPosition(int32_t serverMargin = 0) = 0;
 
     virtual void onFlushFromServer() {}
 
@@ -159,7 +152,6 @@
 
     aaudio_handle_t          mServiceStreamHandle; // opaque handle returned from service
 
-    int32_t                  mFramesPerBurst = MIN_FRAMES_PER_BURST; // frames per HAL transfer
     int32_t                  mXRunCount = 0;      // how many underrun events?
 
     // Offset from underlying frame position.
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
index 9fa2e40..2da5406 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
@@ -14,14 +14,13 @@
  * limitations under the License.
  */
 
-#define LOG_TAG (mInService ? "AudioStreamInternalCapture_Service" \
-                          : "AudioStreamInternalCapture_Client")
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
 #include <algorithm>
-#include <audio_utils/primitives.h>
+#include <audio_utils/format.h>
 #include <aaudio/AAudio.h>
+#include <media/MediaMetricsItem.h>
 
 #include "client/AudioStreamInternalCapture.h"
 #include "utility/AudioClock.h"
@@ -29,6 +28,14 @@
 #define ATRACE_TAG ATRACE_TAG_AUDIO
 #include <utils/Trace.h>
 
+// We do this after the #includes because if a header uses ALOG.
+// it would fail on the reference to mInService.
+#undef LOG_TAG
+// This file is used in both client and server processes.
+// This is needed to make sense of the logs more easily.
+#define LOG_TAG (mInService ? "AudioStreamInternalCapture_Service" \
+                          : "AudioStreamInternalCapture_Client")
+
 using android::WrappingBuffer;
 
 using namespace aaudio;
@@ -41,9 +48,9 @@
 
 AudioStreamInternalCapture::~AudioStreamInternalCapture() {}
 
-void AudioStreamInternalCapture::advanceClientToMatchServerPosition() {
+void AudioStreamInternalCapture::advanceClientToMatchServerPosition(int32_t serverMargin) {
     int64_t readCounter = mAudioEndpoint->getDataReadCounter();
-    int64_t writeCounter = mAudioEndpoint->getDataWriteCounter();
+    int64_t writeCounter = mAudioEndpoint->getDataWriteCounter() + serverMargin;
 
     // Bump offset so caller does not see the retrograde motion in getFramesRead().
     int64_t offset = readCounter - writeCounter;
@@ -143,7 +150,7 @@
                 // Calculate frame position based off of the readCounter because
                 // the writeCounter might have just advanced in the background,
                 // causing us to sleep until a later burst.
-                int64_t nextPosition = mAudioEndpoint->getDataReadCounter() + mFramesPerBurst;
+                int64_t nextPosition = mAudioEndpoint->getDataReadCounter() + getFramesPerBurst();
                 wakeTime = mClockModel.convertPositionToLatestTime(nextPosition);
             }
                 break;
@@ -183,26 +190,10 @@
 
         const audio_format_t sourceFormat = getDeviceFormat();
         const audio_format_t destinationFormat = getFormat();
-        // TODO factor this out into a utility function
-        if (sourceFormat == destinationFormat) {
-            memcpy(destination, wrappingBuffer.data[partIndex], numBytes);
-        } else if (sourceFormat == AUDIO_FORMAT_PCM_16_BIT
-                   && destinationFormat == AUDIO_FORMAT_PCM_FLOAT) {
-            memcpy_to_float_from_i16(
-                    (float *) destination,
-                    (const int16_t *) wrappingBuffer.data[partIndex],
-                    numSamples);
-        } else if (sourceFormat == AUDIO_FORMAT_PCM_FLOAT
-                   && destinationFormat == AUDIO_FORMAT_PCM_16_BIT) {
-            memcpy_to_i16_from_float(
-                    (int16_t *) destination,
-                    (const float *) wrappingBuffer.data[partIndex],
-                    numSamples);
-        } else {
-            ALOGE("%s() - Format conversion not supported! audio_format_t source = %u, dest = %u",
-                __func__, sourceFormat, destinationFormat);
-            return AAUDIO_ERROR_INVALID_FORMAT;
-        }
+
+        memcpy_by_audio_format(destination, destinationFormat,
+                wrappingBuffer.data[partIndex], sourceFormat, numSamples);
+
         destination += numBytes;
         framesLeft -= framesToProcess;
     }
@@ -262,7 +253,7 @@
 
         if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
             ALOGD("%s(): callback returned AAUDIO_CALLBACK_RESULT_STOP", __func__);
-            result = systemStopFromCallback();
+            result = systemStopInternal();
             break;
         }
     }
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.h b/media/libaaudio/src/client/AudioStreamInternalCapture.h
index 6436a53..251a7f2 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.h
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.h
@@ -23,7 +23,6 @@
 #include "client/AudioStreamInternal.h"
 
 using android::sp;
-using android::IAAudioService;
 
 namespace aaudio {
 
@@ -46,7 +45,7 @@
     }
 protected:
 
-    void advanceClientToMatchServerPosition() override;
+    void advanceClientToMatchServerPosition(int32_t serverOffset = 0) override;
 
 /**
  * Low level data processing that will not block. It will just read or write as much as it can.
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 1303daf..71bde90 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -14,18 +14,26 @@
  * limitations under the License.
  */
 
-#define LOG_TAG (mInService ? "AudioStreamInternalPlay_Service" \
-                          : "AudioStreamInternalPlay_Client")
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
 #define ATRACE_TAG ATRACE_TAG_AUDIO
 
+#include <media/MediaMetricsItem.h>
 #include <utils/Trace.h>
 
 #include "client/AudioStreamInternalPlay.h"
 #include "utility/AudioClock.h"
 
+// We do this after the #includes because if a header uses ALOG.
+// it would fail on the reference to mInService.
+#undef LOG_TAG
+// This file is used in both client and server processes.
+// This is needed to make sense of the logs more easily.
+#define LOG_TAG (mInService ? "AudioStreamInternalPlay_Service" \
+                            : "AudioStreamInternalPlay_Client")
+
+using android::status_t;
 using android::WrappingBuffer;
 
 using namespace aaudio;
@@ -49,7 +57,7 @@
                              getDeviceChannelCount());
 
         if (result != AAUDIO_OK) {
-            releaseCloseFinal();
+            safeReleaseClose();
         }
         // Sample rate is constrained to common values by now and should not overflow.
         int32_t numFrames = kRampMSec * getSampleRate() / AAUDIO_MILLIS_PER_SECOND;
@@ -59,9 +67,9 @@
 }
 
 // This must be called under mStreamLock.
-aaudio_result_t AudioStreamInternalPlay::requestPause()
+aaudio_result_t AudioStreamInternalPlay::requestPause_l()
 {
-    aaudio_result_t result = stopCallback();
+    aaudio_result_t result = stopCallback_l();
     if (result != AAUDIO_OK) {
         return result;
     }
@@ -76,7 +84,7 @@
     return mServiceInterface.pauseStream(mServiceStreamHandle);
 }
 
-aaudio_result_t AudioStreamInternalPlay::requestFlush() {
+aaudio_result_t AudioStreamInternalPlay::requestFlush_l() {
     if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
         ALOGW("%s() mServiceStreamHandle invalid", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
@@ -86,8 +94,13 @@
     return mServiceInterface.flushStream(mServiceStreamHandle);
 }
 
-void AudioStreamInternalPlay::advanceClientToMatchServerPosition() {
-    int64_t readCounter = mAudioEndpoint->getDataReadCounter();
+void AudioStreamInternalPlay::prepareBuffersForStart() {
+    // Prevent stale data from being played.
+    mAudioEndpoint->eraseDataMemory();
+}
+
+void AudioStreamInternalPlay::advanceClientToMatchServerPosition(int32_t serverMargin) {
+    int64_t readCounter = mAudioEndpoint->getDataReadCounter() + serverMargin;
     int64_t writeCounter = mAudioEndpoint->getDataWriteCounter();
 
     // Bump offset so caller does not see the retrograde motion in getFramesRead().
@@ -145,7 +158,9 @@
     if (mNeedCatchUp.isRequested()) {
         // Catch an MMAP pointer that is already advancing.
         // This will avoid initial underruns caused by a slow cold start.
-        advanceClientToMatchServerPosition();
+        // We add a one burst margin in case the DSP advances before we can write the data.
+        // This can help prevent the beginning of the stream from being skipped.
+        advanceClientToMatchServerPosition(getFramesPerBurst());
         mNeedCatchUp.acknowledge();
     }
 
@@ -287,7 +302,7 @@
             }
         } else if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
             ALOGD("%s(): callback returned AAUDIO_CALLBACK_RESULT_STOP", __func__);
-            result = systemStopFromCallback();
+            result = systemStopInternal();
             break;
         }
     }
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.h b/media/libaaudio/src/client/AudioStreamInternalPlay.h
index 2e93157..03c957d 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.h
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.h
@@ -25,7 +25,6 @@
 #include "client/AudioStreamInternal.h"
 
 using android::sp;
-using android::IAAudioService;
 
 namespace aaudio {
 
@@ -36,9 +35,9 @@
 
     aaudio_result_t open(const AudioStreamBuilder &builder) override;
 
-    aaudio_result_t requestPause() override;
+    aaudio_result_t requestPause_l() override;
 
-    aaudio_result_t requestFlush() override;
+    aaudio_result_t requestFlush_l() override;
 
     bool isFlushSupported() const override {
         // Only implement FLUSH for OUTPUT streams.
@@ -65,7 +64,9 @@
 
 protected:
 
-    void advanceClientToMatchServerPosition() override;
+    void prepareBuffersForStart() override;
+
+    void advanceClientToMatchServerPosition(int32_t serverMargin = 0) override;
 
     void onFlushFromServer() override;
 
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 8965875..d103aca 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -87,6 +87,30 @@
     streamBuilder->setDeviceId(deviceId);
 }
 
+AAUDIO_API void AAudioStreamBuilder_setPackageName(AAudioStreamBuilder* builder,
+                                                   const char* packageName)
+{
+    AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
+    std::optional<std::string> optionalPackageName;
+    if (packageName != nullptr) {
+      optionalPackageName = std::string(packageName);
+    }
+    // Only system apps can read the op package name. For regular apps the
+    // regular package name is a sufficient replacement
+    streamBuilder->setOpPackageName(optionalPackageName);
+}
+
+AAUDIO_API void AAudioStreamBuilder_setAttributionTag(AAudioStreamBuilder* builder,
+                                                      const char* attributionTag)
+{
+    AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
+    std::optional<std::string> optionalAttrTag;
+    if (attributionTag != nullptr) {
+      optionalAttrTag = std::string(attributionTag);
+    }
+    streamBuilder->setAttributionTag(optionalAttrTag);
+}
+
 AAUDIO_API void AAudioStreamBuilder_setSampleRate(AAudioStreamBuilder* builder,
                                               int32_t sampleRate)
 {
@@ -209,7 +233,6 @@
     AudioStreamBuilder *streamBuilder = COMMON_GET_FROM_BUILDER_OR_RETURN(streamPtr);
     aaudio_result_t result = streamBuilder->build(&audioStream);
     if (result == AAUDIO_OK) {
-        audioStream->registerPlayerBase();
         *streamPtr = (AAudioStream*) audioStream;
         id = audioStream->getId();
     } else {
@@ -255,17 +278,16 @@
     if (audioStream != nullptr) {
         aaudio_stream_id_t id = audioStream->getId();
         ALOGD("%s(s#%u) called ---------------", __func__, id);
-        result = audioStream->safeRelease();
-        // safeRelease will only fail if called illegally, for example, from a callback.
+        result = audioStream->safeReleaseClose();
+        // safeReleaseClose will only fail if called illegally, for example, from a callback.
         // That would result in deleting an active stream, which would cause a crash.
         if (result != AAUDIO_OK) {
             ALOGW("%s(s#%u) failed. Close it from another thread.",
                   __func__, id);
         } else {
             audioStream->unregisterPlayerBase();
-             // Mark CLOSED to keep destructors from asserting.
-            audioStream->closeFinal();
-            delete audioStream;
+            // Allow the stream to be deleted.
+            AudioStreamBuilder::stopUsingStream(audioStream);
         }
         ALOGD("%s(s#%u) returned %d ---------", __func__, id, result);
     }
@@ -349,7 +371,8 @@
 
     // Don't allow writes when playing with a callback.
     if (audioStream->isDataCallbackActive()) {
-        ALOGD("Cannot write to a callback stream when running.");
+        // A developer requested this warning because it would have saved lots of debugging.
+        ALOGW("%s() - Cannot write to a callback stream when running.", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index 5f45261..acfac24 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -25,8 +25,7 @@
 
 // TODO These defines should be moved to a central place in audio.
 #define SAMPLES_PER_FRAME_MIN        1
-// TODO Remove 8 channel limitation.
-#define SAMPLES_PER_FRAME_MAX        FCC_8
+#define SAMPLES_PER_FRAME_MAX        FCC_LIMIT
 #define SAMPLE_RATE_HZ_MIN           8000
 // HDMI supports up to 32 channels at 1536000 Hz.
 #define SAMPLE_RATE_HZ_MAX           1600000
@@ -48,13 +47,17 @@
     mInputPreset          = other.mInputPreset;
     mAllowedCapturePolicy = other.mAllowedCapturePolicy;
     mIsPrivacySensitive   = other.mIsPrivacySensitive;
+    mOpPackageName        = other.mOpPackageName;
+    mAttributionTag       = other.mAttributionTag;
 }
 
 static aaudio_result_t isFormatValid(audio_format_t format) {
     switch (format) {
         case AUDIO_FORMAT_DEFAULT:
         case AUDIO_FORMAT_PCM_16_BIT:
+        case AUDIO_FORMAT_PCM_32_BIT:
         case AUDIO_FORMAT_PCM_FLOAT:
+        case AUDIO_FORMAT_PCM_24_BIT_PACKED:
             break; // valid
         default:
             ALOGD("audioFormat not valid, audio_format_t = 0x%08x", format);
@@ -201,4 +204,8 @@
     ALOGD("mInputPreset          = %6d", mInputPreset);
     ALOGD("mAllowedCapturePolicy = %6d", mAllowedCapturePolicy);
     ALOGD("mIsPrivacySensitive   = %s", mIsPrivacySensitive ? "true" : "false");
+    ALOGD("mOpPackageName        = %s", !mOpPackageName.has_value() ?
+        "(null)" : mOpPackageName.value().c_str());
+    ALOGD("mAttributionTag       = %s", !mAttributionTag.has_value() ?
+        "(null)" : mAttributionTag.value().c_str());
 }
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.h b/media/libaaudio/src/core/AAudioStreamParameters.h
index 3e65b37..5737052 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.h
+++ b/media/libaaudio/src/core/AAudioStreamParameters.h
@@ -136,6 +136,23 @@
         mIsPrivacySensitive = privacySensitive;
     }
 
+    const std::optional<std::string> getOpPackageName() const {
+        return mOpPackageName;
+    }
+
+    // TODO b/182392769: reexamine if Identity can be used
+    void setOpPackageName(const std::optional<std::string> opPackageName) {
+        mOpPackageName = opPackageName;
+    }
+
+    const std::optional<std::string> getAttributionTag() const {
+        return mAttributionTag;
+    }
+
+    void setAttributionTag(const std::optional<std::string> attributionTag) {
+        mAttributionTag = attributionTag;
+    }
+
     /**
      * @return bytes per frame of getFormat()
      */
@@ -167,6 +184,8 @@
     aaudio_allowed_capture_policy_t mAllowedCapturePolicy = AAUDIO_UNSPECIFIED;
     aaudio_session_id_t             mSessionId            = AAUDIO_SESSION_ID_NONE;
     bool                            mIsPrivacySensitive   = false;
+    std::optional<std::string>      mOpPackageName        = {};
+    std::optional<std::string>      mAttributionTag       = {};
 };
 
 } /* namespace aaudio */
diff --git a/media/libaaudio/src/core/AudioGlobal.cpp b/media/libaaudio/src/core/AudioGlobal.cpp
index 7f5d8d5..0e5b8be 100644
--- a/media/libaaudio/src/core/AudioGlobal.cpp
+++ b/media/libaaudio/src/core/AudioGlobal.cpp
@@ -80,6 +80,8 @@
         AAUDIO_CASE_ENUM(AAUDIO_FORMAT_INVALID);
         AAUDIO_CASE_ENUM(AAUDIO_FORMAT_PCM_I16);
         AAUDIO_CASE_ENUM(AAUDIO_FORMAT_PCM_FLOAT);
+        AAUDIO_CASE_ENUM(AAUDIO_FORMAT_PCM_I24_PACKED);
+        AAUDIO_CASE_ENUM(AAUDIO_FORMAT_PCM_I32);
     }
     return "Unrecognized";
 }
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index f5c75ca..09d9535 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -39,15 +39,23 @@
 }
 
 AudioStream::AudioStream()
-        : mPlayerBase(new MyPlayerBase(this))
+        : mPlayerBase(new MyPlayerBase())
         , mStreamId(AAudio_getNextStreamId())
         {
-    // mThread is a pthread_t of unknown size so we need memset.
-    memset(&mThread, 0, sizeof(mThread));
     setPeriodNanoseconds(0);
 }
 
 AudioStream::~AudioStream() {
+    // Please preserve these logs because there have been several bugs related to
+    // AudioStream deletion and late callbacks.
+    ALOGD("%s(s#%u) mPlayerBase strongCount = %d",
+            __func__, getId(), mPlayerBase->getStrongCount());
+
+    ALOGE_IF(pthread_equal(pthread_self(), mThread),
+            "%s() destructor running in callback", __func__);
+
+    ALOGE_IF(mHasThread, "%s() callback thread never join()ed", __func__);
+
     // If the stream is deleted when OPEN or in use then audio resources will leak.
     // This would indicate an internal error. So we want to find this ASAP.
     LOG_ALWAYS_FATAL_IF(!(getState() == AAUDIO_STREAM_STATE_CLOSED
@@ -55,8 +63,6 @@
                           || getState() == AAUDIO_STREAM_STATE_DISCONNECTED),
                         "~AudioStream() - still in use, state = %s",
                         AudioGlobal_convertStreamStateToText(getState()));
-
-    mPlayerBase->clearParentReference(); // remove reference to this AudioStream
 }
 
 aaudio_result_t AudioStream::open(const AudioStreamBuilder& builder)
@@ -105,14 +111,23 @@
     return AAUDIO_OK;
 }
 
-void AudioStream::logOpen() {
+void AudioStream::logOpenActual() {
     if (mMetricsId.size() > 0) {
-        android::mediametrics::LogItem(mMetricsId)
-                .set(AMEDIAMETRICS_PROP_PERFORMANCEMODE,
-                     AudioGlobal_convertPerformanceModeToText(getPerformanceMode()))
-                .set(AMEDIAMETRICS_PROP_SHARINGMODE,
-                     AudioGlobal_convertSharingModeToText(getSharingMode()))
-                .record();
+        android::mediametrics::LogItem item(mMetricsId);
+        item.set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_OPEN)
+            .set(AMEDIAMETRICS_PROP_PERFORMANCEMODEACTUAL,
+                AudioGlobal_convertPerformanceModeToText(getPerformanceMode()))
+            .set(AMEDIAMETRICS_PROP_SHARINGMODEACTUAL,
+                AudioGlobal_convertSharingModeToText(getSharingMode()))
+            .set(AMEDIAMETRICS_PROP_BUFFERCAPACITYFRAMES, getBufferCapacity())
+            .set(AMEDIAMETRICS_PROP_BURSTFRAMES, getFramesPerBurst())
+            .set(AMEDIAMETRICS_PROP_DIRECTION,
+                AudioGlobal_convertDirectionToText(getDirection()));
+
+        if (getDirection() == AAUDIO_DIRECTION_OUTPUT) {
+            item.set(AMEDIAMETRICS_PROP_PLAYERIID, mPlayerBase->getPlayerIId());
+        }
+        item.record();
     }
 }
 
@@ -127,13 +142,13 @@
 }
 
 aaudio_result_t AudioStream::systemStart() {
-    std::lock_guard<std::mutex> lock(mStreamLock);
-
     if (collidesWithCallback()) {
         ALOGE("%s cannot be called from a callback!", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
+    std::lock_guard<std::mutex> lock(mStreamLock);
+
     switch (getState()) {
         // Is this a good time to start?
         case AAUDIO_STREAM_STATE_OPEN:
@@ -162,16 +177,15 @@
             return AAUDIO_ERROR_INVALID_STATE;
     }
 
-    aaudio_result_t result = requestStart();
+    aaudio_result_t result = requestStart_l();
     if (result == AAUDIO_OK) {
         // We only call this for logging in "dumpsys audio". So ignore return code.
-        (void) mPlayerBase->start();
+        (void) mPlayerBase->startWithStatus(getDeviceId());
     }
     return result;
 }
 
 aaudio_result_t AudioStream::systemPause() {
-    std::lock_guard<std::mutex> lock(mStreamLock);
 
     if (!isPauseSupported()) {
         return AAUDIO_ERROR_UNIMPLEMENTED;
@@ -182,6 +196,7 @@
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
+    std::lock_guard<std::mutex> lock(mStreamLock);
     switch (getState()) {
         // Proceed with pausing.
         case AAUDIO_STREAM_STATE_STARTING:
@@ -212,10 +227,10 @@
             return AAUDIO_ERROR_INVALID_STATE;
     }
 
-    aaudio_result_t result = requestPause();
+    aaudio_result_t result = requestPause_l();
     if (result == AAUDIO_OK) {
         // We only call this for logging in "dumpsys audio". So ignore return code.
-        (void) mPlayerBase->pause();
+        (void) mPlayerBase->pauseWithStatus();
     }
     return result;
 }
@@ -226,46 +241,40 @@
         return AAUDIO_ERROR_UNIMPLEMENTED;
     }
 
-    std::lock_guard<std::mutex> lock(mStreamLock);
     if (collidesWithCallback()) {
         ALOGE("stream cannot be flushed from a callback!");
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
+    std::lock_guard<std::mutex> lock(mStreamLock);
     aaudio_result_t result = AAudio_isFlushAllowed(getState());
     if (result != AAUDIO_OK) {
         return result;
     }
 
-    return requestFlush();
+    return requestFlush_l();
 }
 
-aaudio_result_t AudioStream::systemStopFromCallback() {
+aaudio_result_t AudioStream::systemStopInternal() {
     std::lock_guard<std::mutex> lock(mStreamLock);
-    aaudio_result_t result = safeStop();
+    aaudio_result_t result = safeStop_l();
     if (result == AAUDIO_OK) {
         // We only call this for logging in "dumpsys audio". So ignore return code.
-        (void) mPlayerBase->stop();
+        (void) mPlayerBase->stopWithStatus();
     }
     return result;
 }
 
 aaudio_result_t AudioStream::systemStopFromApp() {
-    std::lock_guard<std::mutex> lock(mStreamLock);
+    // This check can and should be done outside the lock.
     if (collidesWithCallback()) {
         ALOGE("stream cannot be stopped by calling from a callback!");
         return AAUDIO_ERROR_INVALID_STATE;
     }
-    aaudio_result_t result = safeStop();
-    if (result == AAUDIO_OK) {
-        // We only call this for logging in "dumpsys audio". So ignore return code.
-        (void) mPlayerBase->stop();
-    }
-    return result;
+    return systemStopInternal();
 }
 
-// This must be called under mStreamLock.
-aaudio_result_t AudioStream::safeStop() {
+aaudio_result_t AudioStream::safeStop_l() {
 
     switch (getState()) {
         // Proceed with stopping.
@@ -297,26 +306,61 @@
             return AAUDIO_ERROR_INVALID_STATE;
     }
 
-    return requestStop();
+    return requestStop_l();
 }
 
 aaudio_result_t AudioStream::safeRelease() {
-    // This get temporarily unlocked in the release() when joining callback threads.
-    std::lock_guard<std::mutex> lock(mStreamLock);
     if (collidesWithCallback()) {
         ALOGE("%s cannot be called from a callback!", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
-    if (getState() == AAUDIO_STREAM_STATE_CLOSING) {
+    // This may get temporarily unlocked in the MMAP release() when joining callback threads.
+    std::lock_guard<std::mutex> lock(mStreamLock);
+    if (getState() == AAUDIO_STREAM_STATE_CLOSING) { // already released?
         return AAUDIO_OK;
     }
     return release_l();
 }
 
+aaudio_result_t AudioStream::safeReleaseClose() {
+    if (collidesWithCallback()) {
+        ALOGE("%s cannot be called from a callback!", __func__);
+        return AAUDIO_ERROR_INVALID_STATE;
+    }
+    return safeReleaseCloseInternal();
+}
+
+aaudio_result_t AudioStream::safeReleaseCloseInternal() {
+    // This get temporarily unlocked in the MMAP release() when joining callback threads.
+    std::lock_guard<std::mutex> lock(mStreamLock);
+    releaseCloseFinal_l();
+    return AAUDIO_OK;
+}
+
+void AudioStream::close_l() {
+    // Releasing the stream will set the state to CLOSING.
+    assert(getState() == AAUDIO_STREAM_STATE_CLOSING);
+    // setState() prevents a transition from CLOSING to any state other than CLOSED.
+    // State is checked by destructor.
+    setState(AAUDIO_STREAM_STATE_CLOSED);
+
+    if (!mMetricsId.empty()) {
+        android::mediametrics::LogItem(mMetricsId)
+                .set(AMEDIAMETRICS_PROP_FRAMESTRANSFERRED,
+                        getDirection() == AAUDIO_DIRECTION_INPUT ? getFramesWritten()
+                                                                 : getFramesRead())
+                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAAUDIOSTREAM)
+                .record();
+    }
+}
+
 void AudioStream::setState(aaudio_stream_state_t state) {
     ALOGD("%s(s#%d) from %d to %d", __func__, getId(), mState, state);
+    if (state == mState) {
+        return; // no change
+    }
     // Track transition to DISCONNECTED state.
-    if (state == AAUDIO_STREAM_STATE_DISCONNECTED && mState != state) {
+    if (state == AAUDIO_STREAM_STATE_DISCONNECTED) {
         android::mediametrics::LogItem(mMetricsId)
                 .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT)
                 .set(AMEDIAMETRICS_PROP_STATE, AudioGlobal_convertStreamStateToText(getState()))
@@ -324,18 +368,18 @@
     }
     // CLOSED is a final state
     if (mState == AAUDIO_STREAM_STATE_CLOSED) {
-        ALOGE("%s(%d) tried to set to %d but already CLOSED", __func__, getId(), state);
+        ALOGW("%s(%d) tried to set to %d but already CLOSED", __func__, getId(), state);
 
     // Once CLOSING, we can only move to CLOSED state.
     } else if (mState == AAUDIO_STREAM_STATE_CLOSING
                && state != AAUDIO_STREAM_STATE_CLOSED) {
-        ALOGE("%s(%d) tried to set to %d but already CLOSING", __func__, getId(), state);
+        ALOGW("%s(%d) tried to set to %d but already CLOSING", __func__, getId(), state);
 
     // Once DISCONNECTED, we can only move to CLOSING or CLOSED state.
     } else if (mState == AAUDIO_STREAM_STATE_DISCONNECTED
                && !(state == AAUDIO_STREAM_STATE_CLOSING
                    || state == AAUDIO_STREAM_STATE_CLOSED)) {
-        ALOGE("%s(%d) tried to set to %d but already DISCONNECTED", __func__, getId(), state);
+        ALOGW("%s(%d) tried to set to %d but already DISCONNECTED", __func__, getId(), state);
 
     } else {
         mState = state;
@@ -387,22 +431,29 @@
     return procResult;
 }
 
-// This is the entry point for the new thread created by createThread().
+
+// This is the entry point for the new thread created by createThread_l().
 // It converts the 'C' function call to a C++ method call.
 static void* AudioStream_internalThreadProc(void* threadArg) {
     AudioStream *audioStream = (AudioStream *) threadArg;
-    return audioStream->wrapUserThread();
+    // Prevent the stream from being deleted while being used.
+    // This is just for extra safety. It is probably not needed because
+    // this callback should be joined before the stream is closed.
+    android::sp<AudioStream> protectedStream(audioStream);
+    // Balance the incStrong() in createThread_l().
+    protectedStream->decStrong(nullptr);
+    return protectedStream->wrapUserThread();
 }
 
 // This is not exposed in the API.
 // But it is still used internally to implement callbacks for MMAP mode.
-aaudio_result_t AudioStream::createThread(int64_t periodNanoseconds,
-                                     aaudio_audio_thread_proc_t threadProc,
-                                     void* threadArg)
+aaudio_result_t AudioStream::createThread_l(int64_t periodNanoseconds,
+                                            aaudio_audio_thread_proc_t threadProc,
+                                            void* threadArg)
 {
     if (mHasThread) {
-        ALOGE("createThread() - mHasThread already true");
-        return AAUDIO_ERROR_INVALID_STATE;
+        ALOGD("%s() - previous thread was not joined, join now to be safe", __func__);
+        joinThread_l(nullptr);
     }
     if (threadProc == nullptr) {
         return AAUDIO_ERROR_NULL;
@@ -411,10 +462,16 @@
     mThreadProc = threadProc;
     mThreadArg = threadArg;
     setPeriodNanoseconds(periodNanoseconds);
+    mHasThread = true;
+    // Prevent this object from getting deleted before the thread has a chance to create
+    // its strong pointer. Assume the thread will call decStrong().
+    this->incStrong(nullptr);
     int err = pthread_create(&mThread, nullptr, AudioStream_internalThreadProc, this);
     if (err != 0) {
         android::status_t status = -errno;
-        ALOGE("createThread() - pthread_create() failed, %d", status);
+        ALOGE("%s() - pthread_create() failed, %d", __func__, status);
+        this->decStrong(nullptr); // Because the thread won't do it.
+        mHasThread = false;
         return AAudioConvert_androidToAAudioResult(status);
     } else {
         // TODO Use AAudioThread or maybe AndroidThread
@@ -429,41 +486,42 @@
         err = pthread_setname_np(mThread, name);
         ALOGW_IF((err != 0), "Could not set name of AAudio thread. err = %d", err);
 
-        mHasThread = true;
         return AAUDIO_OK;
     }
 }
 
+aaudio_result_t AudioStream::joinThread(void** returnArg) {
+    // This may get temporarily unlocked in the MMAP release() when joining callback threads.
+    std::lock_guard<std::mutex> lock(mStreamLock);
+    return joinThread_l(returnArg);
+}
+
 // This must be called under mStreamLock.
-aaudio_result_t AudioStream::joinThread(void** returnArg, int64_t timeoutNanoseconds __unused)
-{
+aaudio_result_t AudioStream::joinThread_l(void** returnArg) {
     if (!mHasThread) {
-        ALOGE("joinThread() - but has no thread");
+        ALOGD("joinThread() - but has no thread or already join()ed");
         return AAUDIO_ERROR_INVALID_STATE;
     }
     aaudio_result_t result = AAUDIO_OK;
     // If the callback is stopping the stream because the app passed back STOP
     // then we don't need to join(). The thread is already about to exit.
-    if (pthread_self() != mThread) {
+    if (!pthread_equal(pthread_self(), mThread)) {
         // Called from an app thread. Not the callback.
         // Unlock because the callback may be trying to stop the stream but is blocked.
         mStreamLock.unlock();
-#if 0
-        // TODO implement equivalent of pthread_timedjoin_np()
-        struct timespec abstime;
-        int err = pthread_timedjoin_np(mThread, returnArg, &abstime);
-#else
         int err = pthread_join(mThread, returnArg);
-#endif
         mStreamLock.lock();
         if (err) {
             ALOGE("%s() pthread_join() returns err = %d", __func__, err);
             result = AAudioConvert_androidToAAudioResult(-err);
+        } else {
+            ALOGD("%s() pthread_join succeeded", __func__);
+            // Prevent joining a second time, which has undefined behavior.
+            mHasThread = false;
         }
+    } else {
+        ALOGD("%s() pthread_join() called on itself!", __func__);
     }
-    // This must be set false so that the callback thread can be created
-    // when the stream is restarted.
-    mHasThread = false;
     return (result != AAUDIO_OK) ? result : mThreadRegistrationResult;
 }
 
@@ -520,11 +578,18 @@
 }
 
 #if AAUDIO_USE_VOLUME_SHAPER
-android::media::VolumeShaper::Status AudioStream::applyVolumeShaper(
-        const android::media::VolumeShaper::Configuration& configuration __unused,
-        const android::media::VolumeShaper::Operation& operation __unused) {
-    ALOGW("applyVolumeShaper() is not supported");
-    return android::media::VolumeShaper::Status::ok();
+::android::binder::Status AudioStream::MyPlayerBase::applyVolumeShaper(
+        const ::android::media::VolumeShaper::Configuration& configuration,
+        const ::android::media::VolumeShaper::Operation& operation) {
+    android::sp<AudioStream> audioStream;
+    {
+        std::lock_guard<std::mutex> lock(mParentLock);
+        audioStream = mParent.promote();
+    }
+    if (audioStream) {
+        return audioStream->applyVolumeShaper(configuration, operation);
+    }
+    return android::NO_ERROR;
 }
 #endif
 
@@ -534,26 +599,37 @@
     doSetVolume(); // apply this change
 }
 
-AudioStream::MyPlayerBase::MyPlayerBase(AudioStream *parent) : mParent(parent) {
-}
-
-AudioStream::MyPlayerBase::~MyPlayerBase() {
-}
-
-void AudioStream::MyPlayerBase::registerWithAudioManager() {
+void AudioStream::MyPlayerBase::registerWithAudioManager(const android::sp<AudioStream>& parent) {
+    std::lock_guard<std::mutex> lock(mParentLock);
+    mParent = parent;
     if (!mRegistered) {
-        init(android::PLAYER_TYPE_AAUDIO, AAudioConvert_usageToInternal(mParent->getUsage()));
+        init(android::PLAYER_TYPE_AAUDIO, AAudioConvert_usageToInternal(parent->getUsage()),
+            (audio_session_t)parent->getSessionId());
         mRegistered = true;
     }
 }
 
 void AudioStream::MyPlayerBase::unregisterWithAudioManager() {
+    std::lock_guard<std::mutex> lock(mParentLock);
     if (mRegistered) {
         baseDestroy();
         mRegistered = false;
     }
 }
 
+android::status_t AudioStream::MyPlayerBase::playerSetVolume() {
+    android::sp<AudioStream> audioStream;
+    {
+        std::lock_guard<std::mutex> lock(mParentLock);
+        audioStream = mParent.promote();
+    }
+    if (audioStream) {
+        // No pan and only left volume is taken into account from IPLayer interface
+        audioStream->setDuckAndMuteVolume(mVolumeMultiplierL  /* mPanMultiplierL */);
+    }
+    return android::NO_ERROR;
+}
+
 void AudioStream::MyPlayerBase::destroy() {
     unregisterWithAudioManager();
 }
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index fb71c36..9835c8c 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -20,13 +20,17 @@
 #include <atomic>
 #include <mutex>
 #include <stdint.h>
-#include <aaudio/AAudio.h>
+
+#include <android-base/thread_annotations.h>
 #include <binder/IServiceManager.h>
 #include <binder/Status.h>
 #include <utils/StrongPointer.h>
 
-#include "media/VolumeShaper.h"
-#include "media/PlayerBase.h"
+#include <aaudio/AAudio.h>
+#include <media/AudioSystem.h>
+#include <media/PlayerBase.h>
+#include <media/VolumeShaper.h>
+
 #include "utility/AAudioUtilities.h"
 #include "utility/MonotonicCounter.h"
 
@@ -45,7 +49,8 @@
 /**
  * AAudio audio stream.
  */
-class AudioStream {
+// By extending AudioDeviceCallback, we also inherit from RefBase.
+class AudioStream : public android::AudioSystem::AudioDeviceCallback {
 public:
 
     AudioStream();
@@ -54,11 +59,6 @@
 
 protected:
 
-    /* Asynchronous requests.
-     * Use waitForStateChange() to wait for completion.
-     */
-    virtual aaudio_result_t requestStart() = 0;
-
     /**
      * Check the state to see if Pause is currently legal.
      *
@@ -77,18 +77,22 @@
         return false;
     }
 
-    virtual aaudio_result_t requestPause()
-    {
+    /* Asynchronous requests.
+     * Use waitForStateChange() to wait for completion.
+     */
+    virtual aaudio_result_t requestStart_l() REQUIRES(mStreamLock) = 0;
+
+    virtual aaudio_result_t requestPause_l() REQUIRES(mStreamLock) {
         // Only implement this for OUTPUT streams.
         return AAUDIO_ERROR_UNIMPLEMENTED;
     }
 
-    virtual aaudio_result_t requestFlush() {
+    virtual aaudio_result_t requestFlush_l() REQUIRES(mStreamLock) {
         // Only implement this for OUTPUT streams.
         return AAUDIO_ERROR_UNIMPLEMENTED;
     }
 
-    virtual aaudio_result_t requestStop() = 0;
+    virtual aaudio_result_t requestStop_l() REQUIRES(mStreamLock) = 0;
 
 public:
     virtual aaudio_result_t getTimestamp(clockid_t clockId,
@@ -114,36 +118,37 @@
     virtual aaudio_result_t open(const AudioStreamBuilder& builder);
 
     // log to MediaMetrics
-    virtual void logOpen();
+    virtual void logOpenActual();
     void logReleaseBufferState();
 
+    /* Note about naming for "release"  and "close" related methods.
+     *
+     * These names are intended to match the public AAudio API.
+     * The original AAudio API had an AAudioStream_close() function that
+     * released the hardware and deleted the stream. That made it difficult
+     * because apps want to release the HW ASAP but are not in a rush to delete
+     * the stream object. So in R we added an AAudioStream_release() function
+     * that just released the hardware.
+     * The AAudioStream_close() method releases if needed and then closes.
+     */
+
+protected:
     /**
      * Free any hardware or system resources from the open() call.
      * It is safe to call release_l() multiple times.
      */
-    virtual aaudio_result_t release_l() {
+    virtual aaudio_result_t release_l() REQUIRES(mStreamLock) {
         setState(AAUDIO_STREAM_STATE_CLOSING);
         return AAUDIO_OK;
     }
 
-    aaudio_result_t closeFinal() {
-        // State is checked by destructor.
-        setState(AAUDIO_STREAM_STATE_CLOSED);
-        return AAUDIO_OK;
-    }
-
     /**
-     * Release then close the stream.
-     * @return AAUDIO_OK or negative error.
+     * Free any resources not already freed by release_l().
+     * Assume release_l() already called.
      */
-    aaudio_result_t releaseCloseFinal() {
-        aaudio_result_t result = release_l(); // TODO review locking
-        if (result == AAUDIO_OK) {
-          result = closeFinal();
-        }
-        return result;
-    }
+    virtual void close_l() REQUIRES(mStreamLock);
 
+public:
     // This is only used to identify a stream in the logs without
     // revealing any pointers.
     aaudio_stream_id_t getId() {
@@ -152,11 +157,15 @@
 
     virtual aaudio_result_t setBufferSize(int32_t requestedFrames) = 0;
 
-    virtual aaudio_result_t createThread(int64_t periodNanoseconds,
-                                       aaudio_audio_thread_proc_t threadProc,
-                                       void *threadArg);
+    aaudio_result_t createThread(int64_t periodNanoseconds,
+                                 aaudio_audio_thread_proc_t threadProc,
+                                 void *threadArg)
+                                 EXCLUDES(mStreamLock) {
+        std::lock_guard<std::mutex> lock(mStreamLock);
+        return createThread_l(periodNanoseconds, threadProc, threadArg);
+    }
 
-    aaudio_result_t joinThread(void **returnArg, int64_t timeoutNanoseconds);
+    aaudio_result_t joinThread(void **returnArg);
 
     virtual aaudio_result_t registerThread() {
         return AAUDIO_OK;
@@ -183,11 +192,11 @@
     }
 
     virtual int32_t getBufferCapacity() const {
-        return AAUDIO_ERROR_UNIMPLEMENTED;
+        return mBufferCapacity;
     }
 
     virtual int32_t getFramesPerBurst() const {
-        return AAUDIO_ERROR_UNIMPLEMENTED;
+        return mFramesPerBurst;
     }
 
     virtual int32_t getXRunCount() const {
@@ -328,6 +337,10 @@
      */
     bool collidesWithCallback() const;
 
+    // Implement AudioDeviceCallback
+    void onAudioDeviceUpdate(audio_io_handle_t audioIo,
+            audio_port_handle_t deviceId) override {};
+
     // ============== I/O ===========================
     // A Stream will only implement read() or write() depending on its direction.
     virtual aaudio_result_t write(const void *buffer __unused,
@@ -366,7 +379,7 @@
      */
     void registerPlayerBase() {
         if (getDirection() == AAUDIO_DIRECTION_OUTPUT) {
-            mPlayerBase->registerWithAudioManager();
+            mPlayerBase->registerWithAudioManager(this);
         }
     }
 
@@ -393,23 +406,37 @@
     /**
      * This is called internally when an app callback returns AAUDIO_CALLBACK_RESULT_STOP.
      */
-    aaudio_result_t systemStopFromCallback();
+    aaudio_result_t systemStopInternal();
 
+    /**
+     * Safely RELEASE a stream after taking mStreamLock and checking
+     * to make sure we are not being called from a callback.
+     * @return AAUDIO_OK or a negative error
+     */
     aaudio_result_t safeRelease();
 
+    /**
+     * Safely RELEASE and CLOSE a stream after taking mStreamLock and checking
+     * to make sure we are not being called from a callback.
+     * @return AAUDIO_OK or a negative error
+     */
+    aaudio_result_t safeReleaseClose();
+
+    aaudio_result_t safeReleaseCloseInternal();
+
 protected:
 
     // PlayerBase allows the system to control the stream volume.
     class MyPlayerBase : public android::PlayerBase {
     public:
-        explicit MyPlayerBase(AudioStream *parent);
+        MyPlayerBase() {};
 
-        virtual ~MyPlayerBase();
+        virtual ~MyPlayerBase() = default;
 
         /**
          * Register for volume changes and remote control.
          */
-        void registerWithAudioManager();
+        void registerWithAudioManager(const android::sp<AudioStream>& parent);
 
         /**
          * UnRegister.
@@ -421,8 +448,6 @@
          */
         void destroy() override;
 
-        void clearParentReference() { mParent = nullptr; }
-
         // Just a stub. The ability to start audio through PlayerBase is being deprecated.
         android::status_t playerStart() override {
             return android::NO_ERROR;
@@ -438,28 +463,27 @@
             return android::NO_ERROR;
         }
 
-        android::status_t playerSetVolume() override {
-            // No pan and only left volume is taken into account from IPLayer interface
-            mParent->setDuckAndMuteVolume(mVolumeMultiplierL  /* * mPanMultiplierL */);
-            return android::NO_ERROR;
-        }
+        android::status_t playerSetVolume() override;
 
 #if AAUDIO_USE_VOLUME_SHAPER
-        ::android::binder::Status applyVolumeShaper(
-                const ::android::media::VolumeShaper::Configuration& configuration,
-                const ::android::media::VolumeShaper::Operation& operation) {
-            return mParent->applyVolumeShaper(configuration, operation);
-        }
+        ::android::binder::Status applyVolumeShaper();
 #endif
 
         aaudio_result_t getResult() {
             return mResult;
         }
 
+        // Returns the playerIId if registered, -1 otherwise.
+        int32_t getPlayerIId() const {
+            return mPIId;
+        }
+
     private:
-        AudioStream          *mParent;
-        aaudio_result_t       mResult = AAUDIO_OK;
-        bool                  mRegistered = false;
+        // Use a weak pointer so the AudioStream can be deleted.
+        std::mutex               mParentLock;
+        android::wp<AudioStream> mParent GUARDED_BY(mParentLock);
+        aaudio_result_t          mResult = AAUDIO_OK;
+        bool                     mRegistered = false;
     };
 
     /**
@@ -470,30 +494,32 @@
         mSampleRate = sampleRate;
     }
 
-    /**
-     * This should not be called after the open() call.
-     */
+    // This should not be called after the open() call.
     void setSamplesPerFrame(int32_t samplesPerFrame) {
         mSamplesPerFrame = samplesPerFrame;
     }
 
-    /**
-     * This should not be called after the open() call.
-     */
+    // This should not be called after the open() call.
+    void setFramesPerBurst(int32_t framesPerBurst) {
+        mFramesPerBurst = framesPerBurst;
+    }
+
+    // This should not be called after the open() call.
+    void setBufferCapacity(int32_t bufferCapacity) {
+        mBufferCapacity = bufferCapacity;
+    }
+
+    // This should not be called after the open() call.
     void setSharingMode(aaudio_sharing_mode_t sharingMode) {
         mSharingMode = sharingMode;
     }
 
-    /**
-     * This should not be called after the open() call.
-     */
+    // This should not be called after the open() call.
     void setFormat(audio_format_t format) {
         mFormat = format;
     }
 
-    /**
-     * This should not be called after the open() call.
-     */
+    // This should not be called after the open() call.
     void setDeviceFormat(audio_format_t format) {
         mDeviceFormat = format;
     }
@@ -508,10 +534,18 @@
         mDeviceId = deviceId;
     }
 
+    // This should not be called after the open() call.
     void setSessionId(int32_t sessionId) {
         mSessionId = sessionId;
     }
 
+    aaudio_result_t createThread_l(int64_t periodNanoseconds,
+                                           aaudio_audio_thread_proc_t threadProc,
+                                           void *threadArg)
+                                           REQUIRES(mStreamLock);
+
+    aaudio_result_t joinThread_l(void **returnArg) REQUIRES(mStreamLock);
+
     std::atomic<bool>    mCallbackEnabled{false};
 
     float                mDuckAndMuteVolume = 1.0f;
@@ -578,11 +612,22 @@
 
     std::string mMetricsId; // set once during open()
 
+    std::mutex                 mStreamLock;
+
 private:
 
-    aaudio_result_t safeStop();
+    aaudio_result_t safeStop_l() REQUIRES(mStreamLock);
 
-    std::mutex                 mStreamLock;
+    /**
+     * Release then close the stream.
+     */
+    void releaseCloseFinal_l() REQUIRES(mStreamLock) {
+        if (getState() != AAUDIO_STREAM_STATE_CLOSING) { // not already released?
+            // Ignore result and keep closing.
+            (void) release_l();
+        }
+        close_l();
+    }
 
     const android::sp<MyPlayerBase>   mPlayerBase;
 
@@ -595,6 +640,8 @@
     audio_format_t              mFormat = AUDIO_FORMAT_DEFAULT;
     aaudio_stream_state_t       mState = AAUDIO_STREAM_STATE_UNINITIALIZED;
     aaudio_performance_mode_t   mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
+    int32_t                     mFramesPerBurst = 0;
+    int32_t                     mBufferCapacity = 0;
 
     aaudio_usage_t              mUsage           = AAUDIO_UNSPECIFIED;
     aaudio_content_type_t       mContentType     = AAUDIO_UNSPECIFIED;
@@ -620,8 +667,9 @@
     std::atomic<pid_t>          mErrorCallbackThread{CALLBACK_THREAD_NONE};
 
     // background thread ----------------------------------
-    bool                        mHasThread = false;
-    pthread_t                   mThread; // initialized in constructor
+    // Use mHasThread to prevent joining twice, which has undefined behavior.
+    bool                        mHasThread GUARDED_BY(mStreamLock) = false;
+    pthread_t                   mThread  GUARDED_BY(mStreamLock) = {};
 
     // These are set by the application thread and then read by the audio pthread.
     std::atomic<int64_t>        mPeriodNanoseconds; // for tuning SCHED_FIFO threads
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.cpp b/media/libaaudio/src/core/AudioStreamBuilder.cpp
index 60dad84..e015592 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.cpp
+++ b/media/libaaudio/src/core/AudioStreamBuilder.cpp
@@ -43,8 +43,7 @@
 // on the edge of being ridiculous.
 // TODO These defines should be moved to a central place in audio.
 #define SAMPLES_PER_FRAME_MIN        1
-// TODO Remove 8 channel limitation.
-#define SAMPLES_PER_FRAME_MAX        FCC_8
+#define SAMPLES_PER_FRAME_MAX        FCC_LIMIT
 #define SAMPLE_RATE_HZ_MIN           8000
 // HDMI supports up to 32 channels at 1536000 Hz.
 #define SAMPLE_RATE_HZ_MAX           1600000
@@ -63,27 +62,26 @@
 static aaudio_result_t builder_createStream(aaudio_direction_t direction,
                                          aaudio_sharing_mode_t sharingMode,
                                          bool tryMMap,
-                                         AudioStream **audioStreamPtr) {
-    *audioStreamPtr = nullptr;
+                                         android::sp<AudioStream> &stream) {
     aaudio_result_t result = AAUDIO_OK;
 
     switch (direction) {
 
         case AAUDIO_DIRECTION_INPUT:
             if (tryMMap) {
-                *audioStreamPtr = new AudioStreamInternalCapture(AAudioBinderClient::getInstance(),
+                stream = new AudioStreamInternalCapture(AAudioBinderClient::getInstance(),
                                                                  false);
             } else {
-                *audioStreamPtr = new AudioStreamRecord();
+                stream = new AudioStreamRecord();
             }
             break;
 
         case AAUDIO_DIRECTION_OUTPUT:
             if (tryMMap) {
-                *audioStreamPtr = new AudioStreamInternalPlay(AAudioBinderClient::getInstance(),
+                stream = new AudioStreamInternalPlay(AAudioBinderClient::getInstance(),
                                                               false);
             } else {
-                *audioStreamPtr = new AudioStreamTrack();
+                stream = new AudioStreamTrack();
             }
             break;
 
@@ -98,7 +96,7 @@
 // Fall back to Legacy path if MMAP not available.
 // Exact behavior is controlled by MMapPolicy.
 aaudio_result_t AudioStreamBuilder::build(AudioStream** streamPtr) {
-    AudioStream *audioStream = nullptr;
+
     if (streamPtr == nullptr) {
         ALOGE("%s() streamPtr is null", __func__);
         return AAUDIO_ERROR_NULL;
@@ -171,41 +169,49 @@
         setPrivacySensitive(true);
     }
 
-    result = builder_createStream(getDirection(), sharingMode, allowMMap, &audioStream);
+    android::sp<AudioStream> audioStream;
+    result = builder_createStream(getDirection(), sharingMode, allowMMap, audioStream);
     if (result == AAUDIO_OK) {
         // Open the stream using the parameters from the builder.
         result = audioStream->open(*this);
-        if (result == AAUDIO_OK) {
-            *streamPtr = audioStream;
-        } else {
+        if (result != AAUDIO_OK) {
             bool isMMap = audioStream->isMMap();
-            delete audioStream;
-            audioStream = nullptr;
-
             if (isMMap && allowLegacy) {
                 ALOGV("%s() MMAP stream did not open so try Legacy path", __func__);
                 // If MMAP stream failed to open then TRY using a legacy stream.
                 result = builder_createStream(getDirection(), sharingMode,
-                                              false, &audioStream);
+                                              false, audioStream);
                 if (result == AAUDIO_OK) {
                     result = audioStream->open(*this);
-                    if (result == AAUDIO_OK) {
-                        *streamPtr = audioStream;
-                    } else {
-                        delete audioStream;
-                        audioStream = nullptr;
-                    }
                 }
             }
         }
-        if (audioStream != nullptr) {
-            audioStream->logOpen();
-        }
+        if (result == AAUDIO_OK) {
+            audioStream->registerPlayerBase();
+            audioStream->logOpenActual();
+            *streamPtr = startUsingStream(audioStream);
+        } // else audioStream will go out of scope and be deleted
     }
 
     return result;
 }
 
+AudioStream *AudioStreamBuilder::startUsingStream(android::sp<AudioStream> &audioStream) {
+    // Increment the smart pointer so it will not get deleted when
+    // we pass it to the C caller and it goes out of scope.
+    // The C code cannot hold a smart pointer so we increment the reference
+    // count to indicate that the C app owns a reference.
+    audioStream->incStrong(nullptr);
+    return audioStream.get();
+}
+
+void AudioStreamBuilder::stopUsingStream(AudioStream *stream) {
+    // Undo the effect of startUsingStream()
+    android::sp<AudioStream> spAudioStream(stream);
+    ALOGV("%s() strongCount = %d", __func__, spAudioStream->getStrongCount());
+    spAudioStream->decStrong(nullptr);
+}
+
 aaudio_result_t AudioStreamBuilder::validate() const {
 
     // Check for values that are ridiculously out of range to prevent math overflow exploits.
@@ -275,4 +281,8 @@
     ALOGI("usage  = %6d, contentType = %d, inputPreset = %d, allowedCapturePolicy = %d",
           getUsage(), getContentType(), getInputPreset(), getAllowedCapturePolicy());
     ALOGI("privacy sensitive = %s", isPrivacySensitive() ? "true" : "false");
+    ALOGI("opPackageName = %s", !getOpPackageName().has_value() ?
+        "(null)" : getOpPackageName().value().c_str());
+    ALOGI("attributionTag = %s", !getAttributionTag().has_value() ?
+        "(null)" : getAttributionTag().value().c_str());
 }
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.h b/media/libaaudio/src/core/AudioStreamBuilder.h
index d5fb80d..9f93341 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.h
+++ b/media/libaaudio/src/core/AudioStreamBuilder.h
@@ -108,9 +108,16 @@
 
     virtual aaudio_result_t validate() const override;
 
+
     void logParameters() const;
 
+    // Mark the stream so it can be deleted.
+    static void stopUsingStream(AudioStream *stream);
+
 private:
+    // Extract a raw pointer that we can pass to a 'C' app.
+    static AudioStream *startUsingStream(android::sp<AudioStream> &spAudioStream);
+
     bool                       mSharingModeMatchRequired = false; // must match sharing mode requested
     aaudio_performance_mode_t  mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
 
diff --git a/media/libaaudio/src/fifo/FifoBuffer.cpp b/media/libaaudio/src/fifo/FifoBuffer.cpp
index f5113f2..5c11882 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.cpp
+++ b/media/libaaudio/src/fifo/FifoBuffer.cpp
@@ -31,40 +31,37 @@
 #include "FifoBuffer.h"
 
 using android::FifoBuffer;
+using android::FifoBufferAllocated;
+using android::FifoBufferIndirect;
 using android::fifo_frames_t;
 
-FifoBuffer::FifoBuffer(int32_t bytesPerFrame, fifo_frames_t capacityInFrames)
-        : mBytesPerFrame(bytesPerFrame)
+FifoBuffer::FifoBuffer(int32_t bytesPerFrame)
+        : mBytesPerFrame(bytesPerFrame) {}
+
+FifoBufferAllocated::FifoBufferAllocated(int32_t bytesPerFrame, fifo_frames_t capacityInFrames)
+        : FifoBuffer(bytesPerFrame)
 {
     mFifo = std::make_unique<FifoController>(capacityInFrames, capacityInFrames);
     // allocate buffer
     int32_t bytesPerBuffer = bytesPerFrame * capacityInFrames;
-    mStorage = new uint8_t[bytesPerBuffer];
-    mStorageOwned = true;
+    mInternalStorage = std::make_unique<uint8_t[]>(bytesPerBuffer);
     ALOGV("%s() capacityInFrames = %d, bytesPerFrame = %d",
           __func__, capacityInFrames, bytesPerFrame);
 }
 
-FifoBuffer::FifoBuffer( int32_t   bytesPerFrame,
+FifoBufferIndirect::FifoBufferIndirect( int32_t   bytesPerFrame,
                         fifo_frames_t   capacityInFrames,
-                        fifo_counter_t *  readIndexAddress,
-                        fifo_counter_t *  writeIndexAddress,
+                        fifo_counter_t *readIndexAddress,
+                        fifo_counter_t *writeIndexAddress,
                         void *  dataStorageAddress
                         )
-        : mBytesPerFrame(bytesPerFrame)
-        , mStorage(static_cast<uint8_t *>(dataStorageAddress))
+        : FifoBuffer(bytesPerFrame)
+        , mExternalStorage(static_cast<uint8_t *>(dataStorageAddress))
 {
     mFifo = std::make_unique<FifoControllerIndirect>(capacityInFrames,
                                        capacityInFrames,
                                        readIndexAddress,
                                        writeIndexAddress);
-    mStorageOwned = false;
-}
-
-FifoBuffer::~FifoBuffer() {
-    if (mStorageOwned) {
-        delete[] mStorage;
-    }
 }
 
 int32_t FifoBuffer::convertFramesToBytes(fifo_frames_t frames) {
@@ -76,15 +73,16 @@
                                     int32_t startIndex) {
     wrappingBuffer->data[1] = nullptr;
     wrappingBuffer->numFrames[1] = 0;
+    uint8_t *storage = getStorage();
     if (framesAvailable > 0) {
         fifo_frames_t capacity = mFifo->getCapacity();
-        uint8_t *source = &mStorage[convertFramesToBytes(startIndex)];
+        uint8_t *source = &storage[convertFramesToBytes(startIndex)];
         // Does the available data cross the end of the FIFO?
         if ((startIndex + framesAvailable) > capacity) {
             wrappingBuffer->data[0] = source;
             fifo_frames_t firstFrames = capacity - startIndex;
             wrappingBuffer->numFrames[0] = firstFrames;
-            wrappingBuffer->data[1] = &mStorage[0];
+            wrappingBuffer->data[1] = &storage[0];
             wrappingBuffer->numFrames[1] = framesAvailable - firstFrames;
         } else {
             wrappingBuffer->data[0] = source;
@@ -191,6 +189,6 @@
 void FifoBuffer::eraseMemory() {
     int32_t numBytes = convertFramesToBytes(getBufferCapacityInFrames());
     if (numBytes > 0) {
-        memset(mStorage, 0, (size_t) numBytes);
+        memset(getStorage(), 0, (size_t) numBytes);
     }
 }
diff --git a/media/libaaudio/src/fifo/FifoBuffer.h b/media/libaaudio/src/fifo/FifoBuffer.h
index 0d188c4..37548f0 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.h
+++ b/media/libaaudio/src/fifo/FifoBuffer.h
@@ -38,15 +38,9 @@
 
 class FifoBuffer {
 public:
-    FifoBuffer(int32_t bytesPerFrame, fifo_frames_t capacityInFrames);
+    FifoBuffer(int32_t bytesPerFrame);
 
-    FifoBuffer(int32_t bytesPerFrame,
-               fifo_frames_t capacityInFrames,
-               fifo_counter_t *readCounterAddress,
-               fifo_counter_t *writeCounterAddress,
-               void *dataStorageAddress);
-
-    ~FifoBuffer();
+    virtual ~FifoBuffer() = default;
 
     int32_t convertFramesToBytes(fifo_frames_t frames);
 
@@ -121,19 +115,53 @@
      */
     void eraseMemory();
 
-private:
+protected:
+
+    virtual uint8_t *getStorage() const = 0;
 
     void fillWrappingBuffer(WrappingBuffer *wrappingBuffer,
                             int32_t framesAvailable, int32_t startIndex);
 
     const int32_t             mBytesPerFrame;
-    // We do not use a std::unique_ptr for mStorage because it is often a pointer to
-    // memory shared between processes and cannot be deleted trivially.
-    uint8_t                  *mStorage = nullptr;
-    bool                      mStorageOwned = false; // did this object allocate the storage?
     std::unique_ptr<FifoControllerBase> mFifo{};
 };
 
+// Define two subclasses to handle the two ways that storage is allocated.
+
+// Allocate storage internally.
+class FifoBufferAllocated : public FifoBuffer {
+public:
+    FifoBufferAllocated(int32_t bytesPerFrame, fifo_frames_t capacityInFrames);
+
+private:
+
+    uint8_t *getStorage() const override {
+        return mInternalStorage.get();
+    };
+
+    std::unique_ptr<uint8_t[]> mInternalStorage;
+};
+
+// Allocate storage externally and pass it in.
+class FifoBufferIndirect : public FifoBuffer {
+public:
+    // We use raw pointers because the memory may be
+    // in the middle of an allocated block and cannot be deleted directly.
+    FifoBufferIndirect(int32_t bytesPerFrame,
+                       fifo_frames_t capacityInFrames,
+                       fifo_counter_t* readCounterAddress,
+                       fifo_counter_t* writeCounterAddress,
+                       void* dataStorageAddress);
+
+private:
+
+    uint8_t *getStorage() const override {
+        return mExternalStorage;
+    };
+
+    uint8_t *mExternalStorage = nullptr;
+};
+
 }  // android
 
 #endif //FIFO_FIFO_BUFFER_H
diff --git a/media/libaaudio/src/fifo/FifoControllerIndirect.h b/media/libaaudio/src/fifo/FifoControllerIndirect.h
index 5832d9c..ec48e57 100644
--- a/media/libaaudio/src/fifo/FifoControllerIndirect.h
+++ b/media/libaaudio/src/fifo/FifoControllerIndirect.h
@@ -27,7 +27,7 @@
 /**
  * A FifoControllerBase with counters external to the class.
  *
- * The actual copunters may be stored in separate regions of shared memory
+ * The actual counters may be stored in separate regions of shared memory
  * with different access rights.
  */
 class FifoControllerIndirect : public FifoControllerBase {
diff --git a/media/libaaudio/src/flowgraph/AudioProcessorBase.h b/media/libaaudio/src/flowgraph/AudioProcessorBase.h
index eda46ae..972932f 100644
--- a/media/libaaudio/src/flowgraph/AudioProcessorBase.h
+++ b/media/libaaudio/src/flowgraph/AudioProcessorBase.h
@@ -267,7 +267,7 @@
     AudioFloatInputPort input;
 
     /**
-     * Dummy processor. The work happens in the read() method.
+     * Do nothing. The work happens in the read() method.
      *
      * @param framePosition index of first frame to be processed
      * @param numFrames
diff --git a/media/libaaudio/src/flowgraph/FlowgraphUtilities.h b/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
new file mode 100644
index 0000000..b750410
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLOWGRAPH_UTILITIES_H
+#define FLOWGRAPH_UTILITIES_H
+
+#include <unistd.h>
+
+using namespace flowgraph;
+
+class FlowgraphUtilities {
+public:
+// This was copied from audio_utils/primitives.h
+/**
+ * Convert a single-precision floating point value to a Q0.31 integer value.
+ * Rounds to nearest, ties away from 0.
+ *
+ * Values outside the range [-1.0, 1.0) are properly clamped to -2147483648 and 2147483647,
+ * including -Inf and +Inf. NaN values are considered undefined, and behavior may change
+ * depending on hardware and future implementation of this function.
+ */
+static int32_t clamp32FromFloat(float f)
+{
+    static const float scale = (float)(1UL << 31);
+    static const float limpos = 1.;
+    static const float limneg = -1.;
+
+    if (f <= limneg) {
+        return -0x80000000; /* or 0x80000000 */
+    } else if (f >= limpos) {
+        return 0x7fffffff;
+    }
+    f *= scale;
+    /* integer conversion is through truncation (though int to float is not).
+     * ensure that we round to nearest, ties away from 0.
+     */
+    return f > 0 ? f + 0.5 : f - 0.5;
+}
+
+};
+
+#endif // FLOWGRAPH_UTILITIES_H
diff --git a/media/libaaudio/src/flowgraph/SinkI24.cpp b/media/libaaudio/src/flowgraph/SinkI24.cpp
index 6592828..0cb077d 100644
--- a/media/libaaudio/src/flowgraph/SinkI24.cpp
+++ b/media/libaaudio/src/flowgraph/SinkI24.cpp
@@ -15,7 +15,7 @@
  */
 
 #include <algorithm>
-#include <unistd.h>
+#include <stdint.h>
 
 #ifdef __ANDROID__
 #include <audio_utils/primitives.h>
@@ -26,7 +26,6 @@
 
 using namespace flowgraph;
 
-
 SinkI24::SinkI24(int32_t channelCount)
         : AudioSink(channelCount) {}
 
diff --git a/media/libaaudio/src/flowgraph/SinkI32.cpp b/media/libaaudio/src/flowgraph/SinkI32.cpp
new file mode 100644
index 0000000..eab863d
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/SinkI32.cpp
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifdef __ANDROID__
+#include <audio_utils/primitives.h>
+#endif
+
+#include "AudioProcessorBase.h"
+#include "FlowgraphUtilities.h"
+#include "SinkI32.h"
+
+using namespace flowgraph;
+
+SinkI32::SinkI32(int32_t channelCount)
+        : AudioSink(channelCount) {}
+
+int32_t SinkI32::read(void *data, int32_t numFrames) {
+    int32_t *intData = (int32_t *) data;
+    const int32_t channelCount = input.getSamplesPerFrame();
+
+    int32_t framesLeft = numFrames;
+    while (framesLeft > 0) {
+        // Run the graph and pull data through the input port.
+        int32_t framesRead = pull(framesLeft);
+        if (framesRead <= 0) {
+            break;
+        }
+        const float *signal = input.getBlock();
+        int32_t numSamples = framesRead * channelCount;
+#ifdef __ANDROID__
+        memcpy_to_i32_from_float(intData, signal, numSamples);
+        intData += numSamples;
+        signal += numSamples;
+#else
+        for (int i = 0; i < numSamples; i++) {
+            *intData++ = FlowgraphUtilities::clamp32FromFloat(*signal++);
+        }
+#endif
+        framesLeft -= framesRead;
+    }
+    return numFrames - framesLeft;
+}
diff --git a/media/libaaudio/src/flowgraph/SinkI32.h b/media/libaaudio/src/flowgraph/SinkI32.h
new file mode 100644
index 0000000..09d23b7
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/SinkI32.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLOWGRAPH_SINK_I32_H
+#define FLOWGRAPH_SINK_I32_H
+
+#include <stdint.h>
+
+#include "AudioProcessorBase.h"
+
+namespace flowgraph {
+
+class SinkI32 : public AudioSink {
+public:
+    explicit SinkI32(int32_t channelCount);
+    ~SinkI32() override = default;
+
+    int32_t read(void *data, int32_t numFrames) override;
+};
+
+} /* namespace flowgraph */
+
+#endif //FLOWGRAPH_SINK_I32_H
diff --git a/media/libaaudio/src/flowgraph/SourceI24.cpp b/media/libaaudio/src/flowgraph/SourceI24.cpp
index f319880..097954e 100644
--- a/media/libaaudio/src/flowgraph/SourceI24.cpp
+++ b/media/libaaudio/src/flowgraph/SourceI24.cpp
@@ -15,7 +15,7 @@
  */
 
 #include <algorithm>
-#include <unistd.h>
+#include <stdint.h>
 
 #ifdef __ANDROID__
 #include <audio_utils/primitives.h>
diff --git a/media/libaaudio/src/flowgraph/SourceI24.h b/media/libaaudio/src/flowgraph/SourceI24.h
index 39f14da..2ed6f18 100644
--- a/media/libaaudio/src/flowgraph/SourceI24.h
+++ b/media/libaaudio/src/flowgraph/SourceI24.h
@@ -17,8 +17,7 @@
 #ifndef FLOWGRAPH_SOURCE_I24_H
 #define FLOWGRAPH_SOURCE_I24_H
 
-#include <unistd.h>
-#include <sys/types.h>
+#include <stdint.h>
 
 #include "AudioProcessorBase.h"
 
diff --git a/media/libaaudio/src/flowgraph/SourceI32.cpp b/media/libaaudio/src/flowgraph/SourceI32.cpp
new file mode 100644
index 0000000..e8177ad
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/SourceI32.cpp
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <unistd.h>
+
+#ifdef __ANDROID__
+#include <audio_utils/primitives.h>
+#endif
+
+#include "AudioProcessorBase.h"
+#include "SourceI32.h"
+
+using namespace flowgraph;
+
+SourceI32::SourceI32(int32_t channelCount)
+        : AudioSource(channelCount) {
+}
+
+int32_t SourceI32::onProcess(int64_t framePosition, int32_t numFrames) {
+    float *floatData = output.getBlock();
+    int32_t channelCount = output.getSamplesPerFrame();
+
+    int32_t framesLeft = mSizeInFrames - mFrameIndex;
+    int32_t framesToProcess = std::min(numFrames, framesLeft);
+    int32_t numSamples = framesToProcess * channelCount;
+
+    const int32_t *intBase = static_cast<const int32_t *>(mData);
+    const int32_t *intData = &intBase[mFrameIndex * channelCount];
+
+#ifdef __ANDROID__
+    memcpy_to_float_from_i32(floatData, intData, numSamples);
+#else
+    for (int i = 0; i < numSamples; i++) {
+        *floatData++ = *intData++ * kScale;
+    }
+#endif
+
+    mFrameIndex += framesToProcess;
+    return framesToProcess;
+}
diff --git a/media/libaaudio/src/flowgraph/SourceI32.h b/media/libaaudio/src/flowgraph/SourceI32.h
new file mode 100644
index 0000000..e50f9be
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/SourceI32.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLOWGRAPH_SOURCE_I32_H
+#define FLOWGRAPH_SOURCE_I32_H
+
+#include <stdint.h>
+
+#include "AudioProcessorBase.h"
+
+namespace flowgraph {
+
+class SourceI32 : public AudioSource {
+public:
+    explicit SourceI32(int32_t channelCount);
+    ~SourceI32() override = default;
+
+    int32_t onProcess(int64_t framePosition, int32_t numFrames) override;
+
+private:
+    static constexpr float kScale = 1.0 / (1UL << 31);
+};
+
+} /* namespace flowgraph */
+
+#endif //FLOWGRAPH_SOURCE_I32_H
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index c062882..e96e134 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -34,8 +34,7 @@
 using namespace aaudio;
 
 AudioStreamLegacy::AudioStreamLegacy()
-        : AudioStream()
-        , mDeviceCallback(new StreamDeviceCallback(this)) {
+        : AudioStream() {
 }
 
 AudioStreamLegacy::~AudioStreamLegacy() {
@@ -95,10 +94,15 @@
             AudioTrack::Buffer *audioBuffer = static_cast<AudioTrack::Buffer *>(info);
             if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED) {
                 ALOGW("processCallbackCommon() data, stream disconnected");
+                // This will kill the stream and prevent it from being restarted.
+                // That is OK because the stream is disconnected.
                 audioBuffer->size = SIZE_STOP_CALLBACKS;
             } else if (!mCallbackEnabled.load()) {
-                ALOGW("processCallbackCommon() no data because callback disabled");
-                audioBuffer->size = SIZE_STOP_CALLBACKS;
+                ALOGW("processCallbackCommon() no data because callback disabled, set size=0");
+                // Do NOT use SIZE_STOP_CALLBACKS here because that will kill the stream and
+                // prevent it from being restarted. This can occur because of a race condition
+                // caused by Legacy callbacks running after the track is "stopped".
+                audioBuffer->size = 0;
             } else {
                 if (audioBuffer->frameCount == 0) {
                     ALOGW("processCallbackCommon() data, frameCount is zero");
@@ -125,7 +129,7 @@
                               __func__, callbackResult);
                     }
                     audioBuffer->size = 0;
-                    systemStopFromCallback();
+                    systemStopInternal();
                     // Disable the callback just in case the system keeps trying to call us.
                     mCallbackEnabled.store(false);
                 }
@@ -163,7 +167,11 @@
 }
 
 void AudioStreamLegacy::forceDisconnect(bool errorCallbackEnabled) {
-    if (getState() != AAUDIO_STREAM_STATE_DISCONNECTED) {
+    // There is no need to disconnect if already in these states.
+    if (getState() != AAUDIO_STREAM_STATE_DISCONNECTED
+            && getState() != AAUDIO_STREAM_STATE_CLOSING
+            && getState() != AAUDIO_STREAM_STATE_CLOSED
+            ) {
         setState(AAUDIO_STREAM_STATE_DISCONNECTED);
         if (errorCallbackEnabled) {
             maybeCallErrorCallback(AAUDIO_ERROR_DISCONNECTED);
@@ -205,24 +213,30 @@
     return AAudioConvert_androidToAAudioResult(status);
 }
 
-void AudioStreamLegacy::onAudioDeviceUpdate(audio_port_handle_t deviceId)
-{
+void AudioStreamLegacy::onAudioDeviceUpdate(audio_io_handle_t /* audioIo */,
+            audio_port_handle_t deviceId) {
     // Device routing is a common source of errors and DISCONNECTS.
-    // Please leave this log in place.
-    ALOGD("%s() devId %d => %d", __func__, (int) getDeviceId(), (int)deviceId);
-    if (getDeviceId() != AAUDIO_UNSPECIFIED && getDeviceId() != deviceId &&
-            getState() != AAUDIO_STREAM_STATE_DISCONNECTED) {
+    // Please leave this log in place. If there is a bug then this might
+    // get called after the stream has been deleted so log before we
+    // touch the stream object.
+    ALOGD("%s(deviceId = %d)", __func__, (int)deviceId);
+    if (getDeviceId() != AAUDIO_UNSPECIFIED
+            && getDeviceId() != deviceId
+            && getState() != AAUDIO_STREAM_STATE_DISCONNECTED
+            ) {
         // Note that isDataCallbackActive() is affected by state so call it before DISCONNECTING.
         // If we have a data callback and the stream is active, then ask the data callback
         // to DISCONNECT and call the error callback.
         if (isDataCallbackActive()) {
-            ALOGD("onAudioDeviceUpdate() request DISCONNECT in data callback due to device change");
+            ALOGD("%s() request DISCONNECT in data callback, device %d => %d",
+                  __func__, (int) getDeviceId(), (int) deviceId);
             // If the stream is stopped before the data callback has a chance to handle the
-            // request then the requestStop() and requestPause() methods will handle it after
+            // request then the requestStop_l() and requestPause() methods will handle it after
             // the callback has stopped.
             mRequestDisconnect.request();
         } else {
-            ALOGD("onAudioDeviceUpdate() DISCONNECT the stream now");
+            ALOGD("%s() DISCONNECT the stream now, device %d => %d",
+                  __func__, (int) getDeviceId(), (int) deviceId);
             forceDisconnect();
         }
     }
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.h b/media/libaaudio/src/legacy/AudioStreamLegacy.h
index 9c24b2b..88ef270 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.h
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.h
@@ -87,29 +87,13 @@
 
 protected:
 
-    class StreamDeviceCallback : public android::AudioSystem::AudioDeviceCallback
-    {
-    public:
-
-        StreamDeviceCallback(AudioStreamLegacy *parent) : mParent(parent) {}
-        virtual ~StreamDeviceCallback() {}
-
-        virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo __unused,
-                                         audio_port_handle_t deviceId) {
-            if (mParent != nullptr) {
-                mParent->onAudioDeviceUpdate(deviceId);
-            }
-        }
-
-        AudioStreamLegacy *mParent;
-    };
-
     aaudio_result_t getBestTimestamp(clockid_t clockId,
                                      int64_t *framePosition,
                                      int64_t *timeNanoseconds,
                                      android::ExtendedTimestamp *extendedTimestamp);
 
-    void onAudioDeviceUpdate(audio_port_handle_t deviceId);
+    void onAudioDeviceUpdate(audio_io_handle_t audioIo,
+            audio_port_handle_t deviceId) override;
 
     /*
      * Check to see whether a callback thread has requested a disconnected.
@@ -128,6 +112,18 @@
         return mFramesRead.increment(frames);
     }
 
+    /**
+     * Get the framesPerBurst from the underlying API.
+     * @return framesPerBurst
+     */
+    virtual int32_t getFramesPerBurstFromDevice() const = 0;
+
+    /**
+     * Get the bufferCapacity from the underlying API.
+     * @return bufferCapacity in frames
+     */
+    virtual int32_t getBufferCapacityFromDevice() const = 0;
+
     // This is used for exact matching by MediaMetrics. So do not change it.
     // MediaMetricsConstants.h: AMEDIAMETRICS_PROP_CALLERNAME_VALUE_AAUDIO
     static constexpr char     kCallerName[] = "aaudio";
@@ -140,7 +136,6 @@
     int32_t                    mBlockAdapterBytesPerFrame = 0;
     aaudio_wrapping_frames_t   mPositionWhenStarting = 0;
     int32_t                    mCallbackBufferSize = 0;
-    const android::sp<StreamDeviceCallback>   mDeviceCallback;
 
     AtomicRequestor            mRequestDisconnect;
 
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index b0dc59e..dc66742 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -22,14 +22,18 @@
 
 #include <aaudio/AAudio.h>
 #include <audio_utils/primitives.h>
+#include <media/AidlConversion.h>
 #include <media/AudioRecord.h>
 #include <utils/String16.h>
 
+#include "core/AudioGlobal.h"
 #include "legacy/AudioStreamLegacy.h"
 #include "legacy/AudioStreamRecord.h"
 #include "utility/AudioClock.h"
 #include "utility/FixedBlockWriter.h"
 
+using android::content::AttributionSourceState;
+
 using namespace android;
 using namespace aaudio;
 
@@ -89,8 +93,9 @@
             break;
     }
 
+    const audio_format_t requestedFormat = getFormat();
     // Preserve behavior of API 26
-    if (getFormat() == AUDIO_FORMAT_DEFAULT) {
+    if (requestedFormat == AUDIO_FORMAT_DEFAULT) {
         setFormat(AUDIO_FORMAT_PCM_FLOAT);
     }
 
@@ -118,6 +123,7 @@
         setDeviceFormat(getFormat());
     }
 
+    // To avoid glitching, let AudioFlinger pick the optimal burst size.
     uint32_t notificationFrames = 0;
 
     // Setup the callback if there is one.
@@ -128,7 +134,6 @@
         streamTransferType = AudioRecord::transfer_type::TRANSFER_CALLBACK;
         callback = getLegacyCallback();
         callbackData = this;
-        notificationFrames = builder.getFramesPerDataCallback();
     }
     mCallbackBufferSize = builder.getFramesPerDataCallback();
 
@@ -152,13 +157,21 @@
             .tags = ""
     };
 
+    // TODO b/182392769: use attribution source util
+    AttributionSourceState attributionSource;
+    attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+    attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
+    attributionSource.packageName = builder.getOpPackageName();
+    attributionSource.attributionTag = builder.getAttributionTag();
+    attributionSource.token = sp<BBinder>::make();
+
     // ----------- open the AudioRecord ---------------------
     // Might retry, but never more than once.
     for (int i = 0; i < 2; i ++) {
         const audio_format_t requestedInternalFormat = getDeviceFormat();
 
         mAudioRecord = new AudioRecord(
-                mOpPackageName // const String16& opPackageName TODO does not compile
+                attributionSource
         );
         mAudioRecord->set(
                 AUDIO_SOURCE_DEFAULT, // ignored because we pass attributes below
@@ -185,7 +198,7 @@
         // Did we get a valid track?
         status_t status = mAudioRecord->initCheck();
         if (status != OK) {
-            releaseCloseFinal();
+            safeReleaseClose();
             ALOGE("open(), initCheck() returned %d", status);
             return AAudioConvert_androidToAAudioResult(status);
         }
@@ -207,15 +220,18 @@
 
     mMetricsId = std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD)
             + std::to_string(mAudioRecord->getPortId());
+    android::mediametrics::LogItem(mMetricsId)
+            .set(AMEDIAMETRICS_PROP_PERFORMANCEMODE,
+                 AudioGlobal_convertPerformanceModeToText(builder.getPerformanceMode()))
+            .set(AMEDIAMETRICS_PROP_SHARINGMODE,
+                 AudioGlobal_convertSharingModeToText(builder.getSharingMode()))
+            .set(AMEDIAMETRICS_PROP_ENCODINGCLIENT, toString(requestedFormat).c_str()).record();
 
     // Get the actual values from the AudioRecord.
     setSamplesPerFrame(mAudioRecord->channelCount());
-
-    int32_t actualSampleRate = mAudioRecord->getSampleRate();
-    ALOGW_IF(actualSampleRate != getSampleRate(),
-             "open() sampleRate changed from %d to %d",
-             getSampleRate(), actualSampleRate);
-    setSampleRate(actualSampleRate);
+    setSampleRate(mAudioRecord->getSampleRate());
+    setBufferCapacity(getBufferCapacityFromDevice());
+    setFramesPerBurst(getFramesPerBurstFromDevice());
 
     // We may need to pass the data through a block size adapter to guarantee constant size.
     if (mCallbackBufferSize != AAUDIO_UNSPECIFIED) {
@@ -282,7 +298,7 @@
             : (aaudio_session_id_t) mAudioRecord->getSessionId();
     setSessionId(actualSessionId);
 
-    mAudioRecord->addAudioDeviceCallback(mDeviceCallback);
+    mAudioRecord->addAudioDeviceCallback(this);
 
     return AAUDIO_OK;
 }
@@ -291,16 +307,32 @@
     // TODO add close() or release() to AudioFlinger's AudioRecord API.
     //  Then call it from here
     if (getState() != AAUDIO_STREAM_STATE_CLOSING) {
-        mAudioRecord->removeAudioDeviceCallback(mDeviceCallback);
+        mAudioRecord->removeAudioDeviceCallback(this);
         logReleaseBufferState();
-        mAudioRecord.clear();
-        mFixedBlockWriter.close();
+        // Data callbacks may still be running!
         return AudioStream::release_l();
     } else {
         return AAUDIO_OK; // already released
     }
 }
 
+void AudioStreamRecord::close_l() {
+    // The callbacks are normally joined in the AudioRecord destructor.
+    // But if another object has a reference to the AudioRecord then
+    // it will not get deleted here.
+    // So we should join callbacks explicitly before returning.
+    // Unlock around the join to avoid deadlocks if the callback tries to lock.
+    // This can happen if the callback returns AAUDIO_CALLBACK_RESULT_STOP
+    mStreamLock.unlock();
+    mAudioRecord->stopAndJoinCallbacks();
+    mStreamLock.lock();
+
+    mAudioRecord.clear();
+    // Do not close mFixedBlockReader. It has a unique_ptr to its buffer
+    // so it will clean up by itself.
+    AudioStream::close_l();
+}
+
 const void * AudioStreamRecord::maybeConvertDeviceData(const void *audioData, int32_t numFrames) {
     if (mFormatConversionBufferFloat.get() != nullptr) {
         LOG_ALWAYS_FATAL_IF(numFrames > mFormatConversionBufferSizeInFrames,
@@ -336,7 +368,7 @@
     return;
 }
 
-aaudio_result_t AudioStreamRecord::requestStart()
+aaudio_result_t AudioStreamRecord::requestStart_l()
 {
     if (mAudioRecord.get() == nullptr) {
         return AAUDIO_ERROR_INVALID_STATE;
@@ -360,7 +392,7 @@
     return AAUDIO_OK;
 }
 
-aaudio_result_t AudioStreamRecord::requestStop() {
+aaudio_result_t AudioStreamRecord::requestStop_l() {
     if (mAudioRecord.get() == nullptr) {
         return AAUDIO_ERROR_INVALID_STATE;
     }
@@ -483,7 +515,7 @@
     return getBufferCapacity(); // TODO implement in AudioRecord?
 }
 
-int32_t AudioStreamRecord::getBufferCapacity() const
+int32_t AudioStreamRecord::getBufferCapacityFromDevice() const
 {
     return static_cast<int32_t>(mAudioRecord->frameCount());
 }
@@ -493,8 +525,7 @@
     return 0; // TODO implement when AudioRecord supports it
 }
 
-int32_t AudioStreamRecord::getFramesPerBurst() const
-{
+int32_t AudioStreamRecord::getFramesPerBurstFromDevice() const {
     return static_cast<int32_t>(mAudioRecord->getNotificationPeriodInFrames());
 }
 
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.h b/media/libaaudio/src/legacy/AudioStreamRecord.h
index c5944c7..692651d 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.h
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.h
@@ -25,6 +25,7 @@
 #include "AAudioLegacy.h"
 #include "legacy/AudioStreamLegacy.h"
 #include "utility/FixedBlockWriter.h"
+#include <android/content/AttributionSourceState.h>
 
 namespace aaudio {
 
@@ -39,9 +40,7 @@
 
     aaudio_result_t open(const AudioStreamBuilder & builder) override;
     aaudio_result_t release_l() override;
-
-    aaudio_result_t requestStart() override;
-    aaudio_result_t requestStop() override;
+    void close_l() override;
 
     virtual aaudio_result_t getTimestamp(clockid_t clockId,
                                          int64_t *framePosition,
@@ -55,14 +54,10 @@
 
     int32_t getBufferSize() const override;
 
-    int32_t getBufferCapacity() const override;
-
     int32_t getXRunCount() const override;
 
     int64_t getFramesWritten() override;
 
-    int32_t getFramesPerBurst() const override;
-
     aaudio_result_t updateStateMachine() override;
 
     aaudio_direction_t getDirection() const override {
@@ -78,13 +73,21 @@
 
     const void * maybeConvertDeviceData(const void *audioData, int32_t numFrames) override;
 
+protected:
+
+    aaudio_result_t requestStart_l() REQUIRES(mStreamLock) override;
+    aaudio_result_t requestStop_l() REQUIRES(mStreamLock) override;
+
+    int32_t getFramesPerBurstFromDevice() const override;
+    int32_t getBufferCapacityFromDevice() const override;
+
 private:
     android::sp<android::AudioRecord> mAudioRecord;
     // adapts between variable sized blocks and fixed size blocks
     FixedBlockWriter                 mFixedBlockWriter;
 
     // TODO add 64-bit position reporting to AudioRecord and use it.
-    android::String16                mOpPackageName;
+    android::content::AttributionSourceState mAttributionSource;
 
     // Only one type of conversion buffer is used.
     std::unique_ptr<float[]>         mFormatConversionBufferFloat;
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 4869480..1d412c0 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -23,14 +23,18 @@
 
 #include <aaudio/AAudio.h>
 #include <system/audio.h>
-#include "utility/AudioClock.h"
+
+#include "core/AudioGlobal.h"
 #include "legacy/AudioStreamLegacy.h"
 #include "legacy/AudioStreamTrack.h"
+#include "utility/AudioClock.h"
 #include "utility/FixedBlockReader.h"
 
 using namespace android;
 using namespace aaudio;
 
+using android::content::AttributionSourceState;
+
 // Arbitrary and somewhat generous number of bursts.
 #define DEFAULT_BURSTS_PER_BUFFER_CAPACITY     8
 
@@ -96,6 +100,7 @@
 
     size_t frameCount = (size_t)builder.getBufferCapacity();
 
+    // To avoid glitching, let AudioFlinger pick the optimal burst size.
     int32_t notificationFrames = 0;
 
     const audio_format_t format = (getFormat() == AUDIO_FORMAT_DEFAULT)
@@ -118,8 +123,6 @@
             // Take advantage of a special trick that allows us to create a buffer
             // that is some multiple of the burst size.
             notificationFrames = 0 - DEFAULT_BURSTS_PER_BUFFER_CAPACITY;
-        } else {
-            notificationFrames = builder.getFramesPerDataCallback();
         }
     }
     mCallbackBufferSize = builder.getFramesPerDataCallback();
@@ -148,6 +151,7 @@
     };
 
     mAudioTrack = new AudioTrack();
+    // TODO b/182392769: use attribution source util
     mAudioTrack->set(
             AUDIO_STREAM_DEFAULT,  // ignored because we pass attributes below
             getSampleRate(),
@@ -163,8 +167,7 @@
             sessionId,
             streamTransferType,
             NULL,    // DEFAULT audio_offload_info_t
-            AUDIO_UID_INVALID, // DEFAULT uid
-            -1,      // DEFAULT pid
+            AttributionSourceState(), // DEFAULT uid and pid
             &attributes,
             // WARNING - If doNotReconnect set true then audio stops after plugging and unplugging
             // headphones a few times.
@@ -179,13 +182,19 @@
     // Did we get a valid track?
     status_t status = mAudioTrack->initCheck();
     if (status != NO_ERROR) {
-        releaseCloseFinal();
+        safeReleaseClose();
         ALOGE("open(), initCheck() returned %d", status);
         return AAudioConvert_androidToAAudioResult(status);
     }
 
     mMetricsId = std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK)
             + std::to_string(mAudioTrack->getPortId());
+    android::mediametrics::LogItem(mMetricsId)
+            .set(AMEDIAMETRICS_PROP_PERFORMANCEMODE,
+                 AudioGlobal_convertPerformanceModeToText(builder.getPerformanceMode()))
+            .set(AMEDIAMETRICS_PROP_SHARINGMODE,
+                 AudioGlobal_convertSharingModeToText(builder.getSharingMode()))
+            .set(AMEDIAMETRICS_PROP_ENCODINGCLIENT, toString(getFormat()).c_str()).record();
 
     doSetVolume();
 
@@ -193,12 +202,9 @@
     setSamplesPerFrame(mAudioTrack->channelCount());
     setFormat(mAudioTrack->format());
     setDeviceFormat(mAudioTrack->format());
-
-    int32_t actualSampleRate = mAudioTrack->getSampleRate();
-    ALOGW_IF(actualSampleRate != getSampleRate(),
-             "open() sampleRate changed from %d to %d",
-             getSampleRate(), actualSampleRate);
-    setSampleRate(actualSampleRate);
+    setSampleRate(mAudioTrack->getSampleRate());
+    setBufferCapacity(getBufferCapacityFromDevice());
+    setFramesPerBurst(getFramesPerBurstFromDevice());
 
     // We may need to pass the data through a block size adapter to guarantee constant size.
     if (mCallbackBufferSize != AAUDIO_UNSPECIFIED) {
@@ -221,10 +227,7 @@
             : (aaudio_session_id_t) mAudioTrack->getSessionId();
     setSessionId(actualSessionId);
 
-    mInitialBufferCapacity = getBufferCapacity();
-    mInitialFramesPerBurst = getFramesPerBurst();
-
-    mAudioTrack->addAudioDeviceCallback(mDeviceCallback);
+    mAudioTrack->addAudioDeviceCallback(this);
 
     // Update performance mode based on the actual stream flags.
     // For example, if the sample rate is not allowed then you won't get a FAST track.
@@ -240,11 +243,11 @@
 
     setSharingMode(AAUDIO_SHARING_MODE_SHARED); // EXCLUSIVE mode not supported in legacy
 
-    // Log warning if we did not get what we asked for.
-    ALOGW_IF(actualFlags != flags,
+    // Log if we did not get what we asked for.
+    ALOGD_IF(actualFlags != flags,
              "open() flags changed from 0x%08X to 0x%08X",
              flags, actualFlags);
-    ALOGW_IF(actualPerformanceMode != perfMode,
+    ALOGD_IF(actualPerformanceMode != perfMode,
              "open() perfMode changed from %d to %d",
              perfMode, actualPerformanceMode);
 
@@ -253,19 +256,32 @@
 
 aaudio_result_t AudioStreamTrack::release_l() {
     if (getState() != AAUDIO_STREAM_STATE_CLOSING) {
-        mAudioTrack->removeAudioDeviceCallback(mDeviceCallback);
+        status_t err = mAudioTrack->removeAudioDeviceCallback(this);
+        ALOGE_IF(err, "%s() removeAudioDeviceCallback returned %d", __func__, err);
         logReleaseBufferState();
-        // TODO Investigate why clear() causes a hang in test_various.cpp
-        // if I call close() from a data callback.
-        // But the same thing in AudioRecord is OK!
-        // mAudioTrack.clear();
-        mFixedBlockReader.close();
+        // Data callbacks may still be running!
         return AudioStream::release_l();
     } else {
         return AAUDIO_OK; // already released
     }
 }
 
+void AudioStreamTrack::close_l() {
+    // The callbacks are normally joined in the AudioTrack destructor.
+    // But if another object has a reference to the AudioTrack then
+    // it will not get deleted here.
+    // So we should join callbacks explicitly before returning.
+    // Unlock around the join to avoid deadlocks if the callback tries to lock.
+    // This can happen if the callback returns AAUDIO_CALLBACK_RESULT_STOP
+    mStreamLock.unlock();
+    mAudioTrack->stopAndJoinCallbacks();
+    mStreamLock.lock();
+    mAudioTrack.clear();
+    // Do not close mFixedBlockReader. It has a unique_ptr to its buffer
+    // so it will clean up by itself.
+    AudioStream::close_l();
+}
+
 void AudioStreamTrack::processCallback(int event, void *info) {
 
     switch (event) {
@@ -281,8 +297,8 @@
                     || mAudioTrack->format() != getFormat()
                     || mAudioTrack->getSampleRate() != getSampleRate()
                     || mAudioTrack->getRoutedDeviceId() != getDeviceId()
-                    || getBufferCapacity() != mInitialBufferCapacity
-                    || getFramesPerBurst() != mInitialFramesPerBurst) {
+                    || getBufferCapacityFromDevice() != getBufferCapacity()
+                    || getFramesPerBurstFromDevice() != getFramesPerBurst()) {
                 processCallbackCommon(AAUDIO_CALLBACK_OPERATION_DISCONNECTED, info);
             }
             break;
@@ -293,7 +309,7 @@
     return;
 }
 
-aaudio_result_t AudioStreamTrack::requestStart() {
+aaudio_result_t AudioStreamTrack::requestStart_l() {
     if (mAudioTrack.get() == nullptr) {
         ALOGE("requestStart() no AudioTrack");
         return AAUDIO_ERROR_INVALID_STATE;
@@ -320,7 +336,7 @@
     return AAUDIO_OK;
 }
 
-aaudio_result_t AudioStreamTrack::requestPause() {
+aaudio_result_t AudioStreamTrack::requestPause_l() {
     if (mAudioTrack.get() == nullptr) {
         ALOGE("%s() no AudioTrack", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
@@ -336,7 +352,7 @@
     return checkForDisconnectRequest(false);
 }
 
-aaudio_result_t AudioStreamTrack::requestFlush() {
+aaudio_result_t AudioStreamTrack::requestFlush_l() {
     if (mAudioTrack.get() == nullptr) {
         ALOGE("%s() no AudioTrack", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
@@ -350,7 +366,7 @@
     return AAUDIO_OK;
 }
 
-aaudio_result_t AudioStreamTrack::requestStop() {
+aaudio_result_t AudioStreamTrack::requestStop_l() {
     if (mAudioTrack.get() == nullptr) {
         ALOGE("%s() no AudioTrack", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
@@ -471,7 +487,7 @@
     return static_cast<int32_t>(mAudioTrack->getBufferSizeInFrames());
 }
 
-int32_t AudioStreamTrack::getBufferCapacity() const
+int32_t AudioStreamTrack::getBufferCapacityFromDevice() const
 {
     return static_cast<int32_t>(mAudioTrack->frameCount());
 }
@@ -481,8 +497,7 @@
     return static_cast<int32_t>(mAudioTrack->getUnderrunCount());
 }
 
-int32_t AudioStreamTrack::getFramesPerBurst() const
-{
+int32_t AudioStreamTrack::getFramesPerBurstFromDevice() const {
     return static_cast<int32_t>(mAudioTrack->getNotificationPeriodInFrames());
 }
 
@@ -559,7 +574,7 @@
         if (status < 0) { // a non-negative value is the volume shaper id.
             ALOGE("applyVolumeShaper() failed with status %d", status);
         }
-        return binder::Status::fromStatusT(status);
+        return aidl_utils::binderStatusFromStatusT(status);
     } else {
         ALOGD("applyVolumeShaper()"
                       " no AudioTrack for volume control from IPlayer");
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.h b/media/libaaudio/src/legacy/AudioStreamTrack.h
index 93a1ff4..f604871 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.h
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.h
@@ -42,12 +42,15 @@
 
     aaudio_result_t open(const AudioStreamBuilder & builder) override;
     aaudio_result_t release_l() override;
+    void close_l() override;
 
-    aaudio_result_t requestStart() override;
-    aaudio_result_t requestPause() override;
-    aaudio_result_t requestFlush() override;
-    aaudio_result_t requestStop() override;
+protected:
+    aaudio_result_t requestStart_l() REQUIRES(mStreamLock)  override;
+    aaudio_result_t requestPause_l() REQUIRES(mStreamLock) override;
+    aaudio_result_t requestFlush_l() REQUIRES(mStreamLock) override;
+    aaudio_result_t requestStop_l() REQUIRES(mStreamLock) override;
 
+public:
     bool isFlushSupported() const override {
         // Only implement FLUSH for OUTPUT streams.
         return true;
@@ -68,8 +71,6 @@
 
     aaudio_result_t setBufferSize(int32_t requestedFrames) override;
     int32_t getBufferSize() const override;
-    int32_t getBufferCapacity() const override;
-    int32_t getFramesPerBurst()const  override;
     int32_t getXRunCount() const override;
 
     int64_t getFramesRead() override;
@@ -95,6 +96,11 @@
             const android::media::VolumeShaper::Operation& operation) override;
 #endif
 
+protected:
+
+    int32_t getFramesPerBurstFromDevice() const override;
+    int32_t getBufferCapacityFromDevice() const override;
+
 private:
 
     android::sp<android::AudioTrack> mAudioTrack;
@@ -104,10 +110,6 @@
 
     // TODO add 64-bit position reporting to AudioTrack and use it.
     aaudio_wrapping_frames_t         mPositionWhenPausing = 0;
-
-    // initial AudioTrack frame count and notification period
-    int32_t mInitialBufferCapacity = 0;
-    int32_t mInitialFramesPerBurst = 0;
 };
 
 } /* namespace aaudio */
diff --git a/media/libaaudio/src/libaaudio.map.txt b/media/libaaudio/src/libaaudio.map.txt
index 2e00aa5..1dd44d1 100644
--- a/media/libaaudio/src/libaaudio.map.txt
+++ b/media/libaaudio/src/libaaudio.map.txt
@@ -23,6 +23,8 @@
     AAudioStreamBuilder_setAllowedCapturePolicy; # introduced=29
     AAudioStreamBuilder_setSessionId;   # introduced=28
     AAudioStreamBuilder_setPrivacySensitive;   # introduced=30
+    AAudioStreamBuilder_setPackageName;   # introduced=31
+    AAudioStreamBuilder_setAttributionTag;   # introduced=31
     AAudioStreamBuilder_openStream;
     AAudioStreamBuilder_delete;
     AAudioStream_close;
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index 9007b10..d795725 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -27,7 +27,7 @@
 #include "core/AudioGlobal.h"
 #include <aaudio/AAudioTesting.h>
 #include <math.h>
-#include <system/audio-base.h>
+#include <system/audio.h>
 #include <assert.h>
 
 #include "utility/AAudioUtilities.h"
@@ -134,6 +134,12 @@
     case AAUDIO_FORMAT_PCM_FLOAT:
         androidFormat = AUDIO_FORMAT_PCM_FLOAT;
         break;
+    case AAUDIO_FORMAT_PCM_I24_PACKED:
+        androidFormat = AUDIO_FORMAT_PCM_24_BIT_PACKED;
+        break;
+    case AAUDIO_FORMAT_PCM_I32:
+        androidFormat = AUDIO_FORMAT_PCM_32_BIT;
+        break;
     default:
         androidFormat = AUDIO_FORMAT_INVALID;
         ALOGE("%s() 0x%08X unrecognized", __func__, aaudioFormat);
@@ -154,6 +160,12 @@
     case AUDIO_FORMAT_PCM_FLOAT:
         aaudioFormat = AAUDIO_FORMAT_PCM_FLOAT;
         break;
+    case AUDIO_FORMAT_PCM_24_BIT_PACKED:
+        aaudioFormat = AAUDIO_FORMAT_PCM_I24_PACKED;
+        break;
+    case AUDIO_FORMAT_PCM_32_BIT:
+        aaudioFormat = AAUDIO_FORMAT_PCM_I32;
+        break;
     default:
         aaudioFormat = AAUDIO_FORMAT_INVALID;
         ALOGE("%s() 0x%08X unrecognized", __func__, androidFormat);
@@ -231,7 +243,8 @@
         case AAUDIO_ALLOW_CAPTURE_BY_SYSTEM:
             return AUDIO_FLAG_NO_MEDIA_PROJECTION;
         case AAUDIO_ALLOW_CAPTURE_BY_NONE:
-            return AUDIO_FLAG_NO_MEDIA_PROJECTION | AUDIO_FLAG_NO_SYSTEM_CAPTURE;
+            return static_cast<audio_flags_mask_t>(
+                    AUDIO_FLAG_NO_MEDIA_PROJECTION | AUDIO_FLAG_NO_SYSTEM_CAPTURE);
         default:
             ALOGE("%s() 0x%08X unrecognized", __func__, policy);
             return AUDIO_FLAG_NONE; //
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index d2e4805..82eb77d 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -21,6 +21,7 @@
 #include <functional>
 #include <stdint.h>
 #include <sys/types.h>
+#include <unistd.h>
 
 #include <utils/Errors.h>
 #include <system/audio.h>
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index 8935d57..98e9727 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_defaults {
     name: "libaaudio_tests_defaults",
     cflags: [
@@ -11,10 +20,12 @@
     defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_marshalling.cpp"],
     shared_libs: [
+        "aaudio-aidl-cpp",
         "libaaudio_internal",
         "libbinder",
         "libcutils",
         "libutils",
+        "shared-file-region-aidl-cpp",
     ],
 }
 
@@ -198,9 +209,9 @@
 }
 
 cc_test {
-    name: "test_stop_hang",
+    name: "test_callback_race",
     defaults: ["libaaudio_tests_defaults"],
-    srcs: ["test_stop_hang.cpp"],
+    srcs: ["test_callback_race.cpp"],
     shared_libs: [
         "libaaudio",
         "libbinder",
@@ -239,3 +250,16 @@
         "libutils",
     ],
 }
+
+
+cc_test {
+    name: "test_disconnect_race",
+    defaults: ["libaaudio_tests_defaults"],
+    srcs: ["test_disconnect_race.cpp"],
+    shared_libs: [
+        "libaaudio",
+        "libbinder",
+        "libcutils",
+        "libutils",
+    ],
+}
diff --git a/media/libaaudio/tests/test_aaudio_monkey.cpp b/media/libaaudio/tests/test_aaudio_monkey.cpp
index be54835..cc29678 100644
--- a/media/libaaudio/tests/test_aaudio_monkey.cpp
+++ b/media/libaaudio/tests/test_aaudio_monkey.cpp
@@ -46,11 +46,10 @@
         int32_t numFrames);
 
 void AAudioMonkeyErrorCallbackProc(
-        AAudioStream *stream __unused,
-        void *userData __unused,
-        aaudio_result_t error) {
-    printf("Error Callback, error: %d\n",(int)error);
-}
+        AAudioStream * /* stream */,
+        void *userData,
+        aaudio_result_t error);
+
 
 // This function is not thread safe. Only use this from a single thread.
 double nextRandomDouble() {
@@ -99,6 +98,10 @@
         aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
         aaudio_result_t result = AAudioStream_waitForStateChange(getStream(),
             AAUDIO_STREAM_STATE_UNKNOWN, &state, 0);
+        if (result == AAUDIO_ERROR_DISCONNECTED) {
+            printf("WARNING - AAudioStream_waitForStateChange returned DISCONNECTED\n");
+            return true; // OK
+        }
         if (result != AAUDIO_OK) {
             printf("ERROR - AAudioStream_waitForStateChange returned %d\n", result);
             return false;
@@ -114,7 +117,7 @@
                (unsigned long long) framesRead,
                xRuns);
 
-        if (framesWritten < framesRead) {
+        if (state != AAUDIO_STREAM_STATE_STARTING && framesWritten < framesRead) {
             printf("WARNING - UNDERFLOW - diff = %d !!!!!!!!!!!!\n",
                    (int) (framesWritten - framesRead));
         }
@@ -132,8 +135,23 @@
             return -1;
         }
 
+        // update and query stream state
+        aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
+        state = AAudioStream_getState(getStream());
+        if (state < 0) {
+            printf("ERROR - AAudioStream_getState returned %d\n", state);
+            return state;
+        }
+
+        if (state == AAUDIO_STREAM_STATE_DISCONNECTED) {
+            printf("#%d, Closing disconnected stream.\n", getIndex());
+            result = close();
+            return result;
+        }
+
         double dice = nextRandomDouble();
         // Select an action based on a weighted probability.
+        printf("    "); // indent action
         if (dice < PROB_START) {
             printf("start\n");
             result = AAudioStream_requestStart(getStream());
@@ -200,6 +218,10 @@
         return AAUDIO_CALLBACK_RESULT_CONTINUE;
     }
 
+    int getIndex() const {
+        return mIndex;
+    }
+
 private:
     const AAudioArgsParser  *mArgParser;
     const int                mIndex;
@@ -223,6 +245,13 @@
     return monkey->renderAudio(stream, audioData, numFrames);
 }
 
+void AAudioMonkeyErrorCallbackProc(
+        AAudioStream * /* stream */,
+        void *userData,
+        aaudio_result_t error) {
+    AAudioMonkey *monkey = (AAudioMonkey *) userData;
+    printf("#%d, Error Callback, error: %d\n", monkey->getIndex(), (int)error);
+}
 
 static void usage() {
     AAudioArgsParser::usage();
diff --git a/media/libaaudio/tests/test_atomic_fifo.cpp b/media/libaaudio/tests/test_atomic_fifo.cpp
index 130ef43..4dbb219 100644
--- a/media/libaaudio/tests/test_atomic_fifo.cpp
+++ b/media/libaaudio/tests/test_atomic_fifo.cpp
@@ -26,6 +26,7 @@
 using android::fifo_counter_t;
 using android::FifoController;
 using android::FifoBuffer;
+using android::FifoBufferIndirect;
 using android::WrappingBuffer;
 
 TEST(test_fifo_controller, fifo_indices) {
@@ -325,7 +326,7 @@
         verifyStorageIntegrity();
     }
 
-    FifoBuffer     mFifoBuffer;
+    FifoBufferIndirect     mFifoBuffer;
     fifo_frames_t  mNextWriteIndex = 0;
     fifo_frames_t  mNextVerifyIndex = 0;
     fifo_frames_t  mThreshold;
diff --git a/media/libaaudio/tests/test_callback_race.cpp b/media/libaaudio/tests/test_callback_race.cpp
new file mode 100644
index 0000000..843d5d7
--- /dev/null
+++ b/media/libaaudio/tests/test_callback_race.cpp
@@ -0,0 +1,209 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Test whether the callback is joined before the close finishes.
+ *
+ * Start a stream with a callback.
+ * The callback just sleeps for a long time.
+ * While the callback is sleeping, close() the stream from the main thread.
+ * Then check to make sure the callback was joined before the close() returns.
+ *
+ * This can hang if there are deadlocks. So make sure you get a PASSED result.
+ */
+
+#include <atomic>
+#include <stdio.h>
+#include <unistd.h>
+
+#include <gtest/gtest.h>
+
+#include <aaudio/AAudio.h>
+
+// Sleep long enough that the foreground has a change to call close.
+static constexpr int kCallbackSleepMicros = 600 * 1000;
+
+class AudioEngine {
+public:
+
+    // Check for a crash or late callback if we close without stopping.
+    void checkCloseJoins(aaudio_direction_t direction,
+                             aaudio_performance_mode_t perfMode,
+                             aaudio_data_callback_result_t callbackResult) {
+
+        // Make printf print immediately so that debug info is not stuck
+        // in a buffer if we hang or crash.
+        setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
+
+        mCallbackResult = callbackResult;
+        startStreamForStall(direction, perfMode);
+        // When the callback starts it will go to sleep.
+        waitForCallbackToStart();
+
+        printf("call AAudioStream_close()\n");
+        ASSERT_FALSE(mCallbackFinished); // Still sleeping?
+        aaudio_result_t result = AAudioStream_close(mStream); // May hang here!
+        ASSERT_TRUE(mCallbackFinished);
+        ASSERT_EQ(AAUDIO_OK, result);
+        printf("AAudioStream_close() returned %d\n", result);
+
+        ASSERT_EQ(AAUDIO_OK, mError.load());
+        // Did calling stop() from callback fail? It should have.
+        ASSERT_NE(AAUDIO_OK, mStopResult.load());
+    }
+
+private:
+    void startStreamForStall(aaudio_direction_t direction,
+                             aaudio_performance_mode_t perfMode) {
+        AAudioStreamBuilder* builder = nullptr;
+        aaudio_result_t result = AAUDIO_OK;
+
+        // Use an AAudioStreamBuilder to contain requested parameters.
+        result = AAudio_createStreamBuilder(&builder);
+        ASSERT_EQ(AAUDIO_OK, result);
+
+        // Request stream properties.
+        AAudioStreamBuilder_setDirection(builder, direction);
+        AAudioStreamBuilder_setPerformanceMode(builder, perfMode);
+        AAudioStreamBuilder_setDataCallback(builder, s_myDataCallbackProc, this);
+        AAudioStreamBuilder_setErrorCallback(builder, s_myErrorCallbackProc, this);
+
+        // Create an AAudioStream using the Builder.
+        result = AAudioStreamBuilder_openStream(builder, &mStream);
+        AAudioStreamBuilder_delete(builder);
+        ASSERT_EQ(AAUDIO_OK, result);
+
+        // Check to see what kind of stream we actually got.
+        int32_t deviceId = AAudioStream_getDeviceId(mStream);
+        aaudio_performance_mode_t
+            actualPerfMode = AAudioStream_getPerformanceMode(mStream);
+        printf("-------- opened: deviceId = %3d, perfMode = %d\n",
+               deviceId,
+               actualPerfMode);
+
+        // Start stream.
+        result = AAudioStream_requestStart(mStream);
+        ASSERT_EQ(AAUDIO_OK, result);
+    }
+
+    void waitForCallbackToStart() {
+        // Wait for callback to say it has been called.
+        int countDownMillis = 2000;
+        constexpr int countDownPeriodMillis = 50;
+        while (!mCallbackStarted && countDownMillis > 0) {
+            printf("Waiting for callback to start, %d\n", countDownMillis);
+            usleep(countDownPeriodMillis * 1000);
+            countDownMillis -= countDownPeriodMillis;
+        }
+        ASSERT_LT(0, countDownMillis);
+        ASSERT_TRUE(mCallbackStarted);
+    }
+
+// Callback function that fills the audio output buffer.
+    static aaudio_data_callback_result_t s_myDataCallbackProc(
+            AAudioStream *stream,
+            void *userData,
+            void * /*audioData */,
+            int32_t /* numFrames */
+    ) {
+        AudioEngine* engine = (AudioEngine*) userData;
+        engine->mCallbackStarted = true;
+        usleep(kCallbackSleepMicros);
+        // it is illegal to call stop() from the callback. It should
+        // return an error and not hang.
+        engine->mStopResult = AAudioStream_requestStop(stream);
+        engine->mCallbackFinished = true;
+        return engine->mCallbackResult;
+    }
+
+    static void s_myErrorCallbackProc(
+                AAudioStream * /* stream */,
+                void *userData,
+                aaudio_result_t error) {
+        AudioEngine *engine = (AudioEngine *)userData;
+        engine->mError = error;
+    }
+
+    AAudioStream* mStream = nullptr;
+
+    std::atomic<aaudio_result_t> mError{AAUDIO_OK}; // written by error callback
+    std::atomic<bool> mCallbackStarted{false};   // written by data callback
+    std::atomic<bool> mCallbackFinished{false};  // written by data callback
+    std::atomic<aaudio_data_callback_result_t> mCallbackResult{AAUDIO_CALLBACK_RESULT_CONTINUE};
+    std::atomic<aaudio_result_t> mStopResult{AAUDIO_OK};
+};
+
+/*********************************************************************/
+// Tell the callback to return AAUDIO_CALLBACK_RESULT_CONTINUE.
+
+TEST(test_close_timing, aaudio_close_joins_input_none) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_INPUT,
+        AAUDIO_PERFORMANCE_MODE_NONE,
+        AAUDIO_CALLBACK_RESULT_CONTINUE);
+}
+
+TEST(test_close_timing, aaudio_close_joins_output_none) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_OUTPUT,
+        AAUDIO_PERFORMANCE_MODE_NONE,
+        AAUDIO_CALLBACK_RESULT_CONTINUE);
+}
+
+TEST(test_close_timing, aaudio_close_joins_input_lowlat) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_INPUT,
+        AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+        AAUDIO_CALLBACK_RESULT_CONTINUE);
+}
+
+TEST(test_close_timing, aaudio_close_joins_output_lowlat) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_OUTPUT,
+        AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+        AAUDIO_CALLBACK_RESULT_CONTINUE);
+}
+
+/*********************************************************************/
+// Tell the callback to return AAUDIO_CALLBACK_RESULT_STOP.
+
+TEST(test_close_timing, aaudio_close_joins_input_lowlat_stop) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_INPUT,
+        AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+        AAUDIO_CALLBACK_RESULT_STOP);
+}
+
+TEST(test_close_timing, aaudio_close_joins_output_lowlat_stop) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_OUTPUT,
+        AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+        AAUDIO_CALLBACK_RESULT_STOP);
+}
+
+TEST(test_close_timing, aaudio_close_joins_output_none_stop) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_OUTPUT,
+        AAUDIO_PERFORMANCE_MODE_NONE,
+        AAUDIO_CALLBACK_RESULT_STOP);
+}
+
+TEST(test_close_timing, aaudio_close_joins_input_none_stop) {
+    AudioEngine engine;
+    engine.checkCloseJoins(AAUDIO_DIRECTION_INPUT,
+        AAUDIO_PERFORMANCE_MODE_NONE,
+        AAUDIO_CALLBACK_RESULT_STOP);
+}
diff --git a/media/libaaudio/tests/test_disconnect_race.cpp b/media/libaaudio/tests/test_disconnect_race.cpp
new file mode 100644
index 0000000..6dbe165
--- /dev/null
+++ b/media/libaaudio/tests/test_disconnect_race.cpp
@@ -0,0 +1,198 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Test whether an error callback is joined before the close finishes.
+ *
+ * Start a stream with a callback.
+ * The callback just sleeps for a long time.
+ * While the callback is sleeping, close() the stream from the main thread.
+ * Then check to make sure the callback was joined before the close() returns.
+ *
+ * This can hang if there are deadlocks. So make sure you get a PASSED result.
+ */
+
+#include <atomic>
+#include <stdio.h>
+#include <unistd.h>
+
+#include <aaudio/AAudio.h>
+
+// Sleep long enough that the foreground has a chance to call close.
+static constexpr int kCallbackSleepMillis = 1000;
+static constexpr int kPollSleepMillis     =  100;
+
+static int sErrorCount = 0;
+
+#define MY_ASSERT_TRUE(statement) \
+    if (!(statement)) { \
+        printf("ERROR line:%d - " #statement "\n", __LINE__); \
+        sErrorCount++; \
+        return false; \
+    }
+
+#define MY_ASSERT_EQ(aa,bb) MY_ASSERT_TRUE(((aa) == (bb)))
+#define MY_ASSERT_NE(aa,bb) MY_ASSERT_TRUE(((aa) != (bb)))
+
+class AudioEngine {
+public:
+
+    // Check for a crash or late callback if we close without stopping.
+    bool checkCloseJoins(aaudio_direction_t direction,
+                             aaudio_performance_mode_t perfMode,
+                             bool callStopFromCallback) {
+        mCallStopFromCallback = callStopFromCallback;
+
+        if (!startStreamForStall(direction, perfMode)) return false;
+
+        printf("--------------------------------------------------------\n");
+        printf("%s() - direction = %d, perfMode = %d, callStop = %d\n",
+            __func__, direction, perfMode, callStopFromCallback);
+
+        // When the callback starts it will go to sleep.
+        if (!waitForCallbackToStart()) return false;
+
+        printf("call AAudioStream_close()\n");
+        MY_ASSERT_TRUE(!mCallbackFinished); // Still sleeping?
+        aaudio_result_t result = AAudioStream_close(mStream); // May hang here!
+        if (mCallbackStarted) {
+            MY_ASSERT_TRUE(mCallbackFinished);
+        }
+        MY_ASSERT_EQ(AAUDIO_OK, result);
+        printf("AAudioStream_close() returned %d\n", result);
+
+        MY_ASSERT_EQ(AAUDIO_ERROR_DISCONNECTED, mError.load());
+        if (mCallStopFromCallback) {
+            // Did calling stop() from callback fail? It should have.
+            MY_ASSERT_NE(AAUDIO_OK, mStopResult.load());
+        }
+
+        return true;
+    }
+
+private:
+    bool startStreamForStall(aaudio_direction_t direction,
+                             aaudio_performance_mode_t perfMode) {
+        AAudioStreamBuilder* builder = nullptr;
+        aaudio_result_t result = AAUDIO_OK;
+
+        // Use an AAudioStreamBuilder to contain requested parameters.
+        result = AAudio_createStreamBuilder(&builder);
+        MY_ASSERT_EQ(AAUDIO_OK, result);
+
+        // Request stream properties.
+        AAudioStreamBuilder_setDirection(builder, direction);
+        AAudioStreamBuilder_setPerformanceMode(builder, perfMode);
+        AAudioStreamBuilder_setDataCallback(builder, s_myDataCallbackProc, this);
+        AAudioStreamBuilder_setErrorCallback(builder, s_myErrorCallbackProc, this);
+
+        // Create an AAudioStream using the Builder.
+        result = AAudioStreamBuilder_openStream(builder, &mStream);
+        AAudioStreamBuilder_delete(builder);
+        MY_ASSERT_EQ(AAUDIO_OK, result);
+
+        // Check to see what kind of stream we actually got.
+        int32_t deviceId = AAudioStream_getDeviceId(mStream);
+        aaudio_performance_mode_t
+            actualPerfMode = AAudioStream_getPerformanceMode(mStream);
+        printf("-------- opened: deviceId = %3d, perfMode = %d\n",
+               deviceId,
+               actualPerfMode);
+
+        // Start stream.
+        result = AAudioStream_requestStart(mStream);
+        MY_ASSERT_EQ(AAUDIO_OK, result);
+
+        return true;
+    }
+
+    bool waitForCallbackToStart() {
+        // Wait for callback to say it has been called.
+        int countDown = 10 * 1000 / kPollSleepMillis;
+        while (!mCallbackStarted && countDown > 0) {
+            if ((countDown % 5) == 0) {
+                printf("===== Please PLUG or UNPLUG headphones! ======= %d\n", countDown);
+            }
+            usleep(kPollSleepMillis * 1000);
+            countDown--;
+        }
+        MY_ASSERT_TRUE(countDown > 0);
+        MY_ASSERT_TRUE(mCallbackStarted);
+        return true;
+    }
+
+// Callback function that fills the audio output buffer.
+    static aaudio_data_callback_result_t s_myDataCallbackProc(
+            AAudioStream * /* stream */,
+            void * /* userData */,
+            void * /* audioData */,
+            int32_t /* numFrames */
+    ) {
+        return AAUDIO_CALLBACK_RESULT_CONTINUE;
+    }
+
+    static void s_myErrorCallbackProc(
+                AAudioStream * stream,
+                void *userData,
+                aaudio_result_t error) {
+        AudioEngine *engine = (AudioEngine *)userData;
+        engine->mError = error;
+        engine->mCallbackStarted = true;
+        usleep(kCallbackSleepMillis * 1000);
+        // it is illegal to call stop() from the callback. It should
+        // return an error and not hang.
+        if (engine->mCallStopFromCallback) {
+            engine->mStopResult = AAudioStream_requestStop(stream);
+        }
+        engine->mCallbackFinished = true;
+    }
+
+    AAudioStream* mStream = nullptr;
+
+    std::atomic<aaudio_result_t> mError{AAUDIO_OK}; // written by error callback
+    std::atomic<bool> mCallStopFromCallback{false};
+    std::atomic<bool> mCallbackStarted{false};   // written by error callback
+    std::atomic<bool> mCallbackFinished{false};  // written by error callback
+    std::atomic<aaudio_result_t> mStopResult{AAUDIO_OK};
+};
+
+int main(int, char **) {
+    // Parameters to test.
+    static aaudio_direction_t directions[] = {AAUDIO_DIRECTION_OUTPUT,
+                                            AAUDIO_DIRECTION_INPUT};
+    static aaudio_performance_mode_t perfModes[] =
+        {AAUDIO_PERFORMANCE_MODE_LOW_LATENCY, AAUDIO_PERFORMANCE_MODE_NONE};
+    static bool callStops[] = { false, true };
+
+    // Make printf print immediately so that debug info is not stuck
+    // in a buffer if we hang or crash.
+    setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
+
+    printf("Test Disconnect Race V1.0\n");
+    printf("\n");
+
+    for (auto callStop : callStops) {
+        for (auto direction : directions) {
+            for (auto perfMode : perfModes) {
+                AudioEngine engine;
+                engine.checkCloseJoins(direction, perfMode, callStop);
+            }
+        }
+    }
+
+    printf("Error Count = %d, %s\n", sErrorCount,
+           ((sErrorCount == 0) ? "PASS" : "FAIL"));
+}
diff --git a/media/libaaudio/tests/test_marshalling.cpp b/media/libaaudio/tests/test_marshalling.cpp
index c51fbce..49213dc 100644
--- a/media/libaaudio/tests/test_marshalling.cpp
+++ b/media/libaaudio/tests/test_marshalling.cpp
@@ -33,6 +33,29 @@
 using namespace android;
 using namespace aaudio;
 
+template<typename T>
+T copy(const T& object) {
+    return T(object);
+}
+
+template<>
+SharedMemoryParcelable copy<SharedMemoryParcelable>(const SharedMemoryParcelable& object) {
+    return object.dup();
+}
+
+template<typename T>
+void writeToParcel(const T& object, Parcel* parcel) {
+    copy(object).parcelable().writeToParcel(parcel);
+}
+
+template<typename T>
+T readFromParcel(const Parcel& parcel) {
+    using ParcelType = std::decay_t<decltype(std::declval<T>().parcelable())>;
+    ParcelType parcelable;
+    parcelable.readFromParcel(&parcel);
+    return T(std::move(parcelable));
+}
+
 // Test adding one value.
 TEST(test_marshalling, aaudio_one_read_write) {
     Parcel parcel;
@@ -48,7 +71,6 @@
 // Test SharedMemoryParcel.
 TEST(test_marshalling, aaudio_shared_memory) {
     SharedMemoryParcelable sharedMemoryA;
-    SharedMemoryParcelable sharedMemoryB;
     const size_t memSizeBytes = 840;
     unique_fd fd(ashmem_create_region("TestMarshalling", memSizeBytes));
     ASSERT_LE(0, fd);
@@ -63,10 +85,10 @@
 
     Parcel parcel;
     size_t pos = parcel.dataPosition();
-    sharedMemoryA.writeToParcel(&parcel);
+    writeToParcel(sharedMemoryA, &parcel);
 
     parcel.setDataPosition(pos);
-    sharedMemoryB.readFromParcel(&parcel);
+    SharedMemoryParcelable sharedMemoryB = readFromParcel<SharedMemoryParcelable>(parcel);
     EXPECT_EQ(sharedMemoryA.getSizeInBytes(), sharedMemoryB.getSizeInBytes());
 
     // should see same value at two different addresses
@@ -81,7 +103,6 @@
 TEST(test_marshalling, aaudio_shared_region) {
     SharedMemoryParcelable sharedMemories[2];
     SharedRegionParcelable sharedRegionA;
-    SharedRegionParcelable sharedRegionB;
     const size_t memSizeBytes = 840;
     unique_fd fd(ashmem_create_region("TestMarshalling", memSizeBytes));
     ASSERT_LE(0, fd);
@@ -97,10 +118,10 @@
 
     Parcel parcel;
     size_t pos = parcel.dataPosition();
-    sharedRegionA.writeToParcel(&parcel);
+    writeToParcel(sharedRegionA, &parcel);
 
     parcel.setDataPosition(pos);
-    sharedRegionB.readFromParcel(&parcel);
+    SharedRegionParcelable sharedRegionB = readFromParcel<SharedRegionParcelable>(parcel);
 
     // should see same value
     void *region2;
@@ -113,7 +134,6 @@
 TEST(test_marshalling, aaudio_ring_buffer_parcelable) {
     SharedMemoryParcelable sharedMemories[2];
     RingBufferParcelable ringBufferA;
-    RingBufferParcelable ringBufferB;
 
     const size_t bytesPerFrame = 8;
     const size_t framesPerBurst = 32;
@@ -147,11 +167,11 @@
     // write A to parcel
     Parcel parcel;
     size_t pos = parcel.dataPosition();
-    ringBufferA.writeToParcel(&parcel);
+    writeToParcel(ringBufferA, &parcel);
 
     // read B from parcel
     parcel.setDataPosition(pos);
-    ringBufferB.readFromParcel(&parcel);
+    RingBufferParcelable ringBufferB = readFromParcel<RingBufferParcelable>(parcel);
 
     RingBufferDescriptor descriptorB;
     EXPECT_EQ(AAUDIO_OK, ringBufferB.resolve(sharedMemories, &descriptorB));
diff --git a/media/libaaudio/tests/test_stop_hang.cpp b/media/libaaudio/tests/test_stop_hang.cpp
deleted file mode 100644
index 2397b6c..0000000
--- a/media/libaaudio/tests/test_stop_hang.cpp
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Return stop from the callback
- * and then close the stream immediately.
- */
-
-#include <atomic>
-#include <mutex>
-#include <stdio.h>
-#include <thread>
-#include <unistd.h>
-
-#include <aaudio/AAudio.h>
-
-#define DURATION_SECONDS   5
-
-struct AudioEngine {
-    AAudioStreamBuilder *builder = nullptr;
-    AAudioStream        *stream = nullptr;
-    std::thread         *thread = nullptr;
-
-    std::atomic<bool>   started{false};
-    std::mutex          doneLock; // Use a mutex so we can sleep on it while join()ing.
-    std::atomic<bool>   done{false};
-
-    aaudio_result_t join() {
-        aaudio_result_t result = AAUDIO_ERROR_INVALID_STATE;
-        if (stream != nullptr) {
-            while (true) {
-                {
-                    // Will block if the thread is running.
-                    // This mutex is used to close() immediately after the callback returns
-                    // and before the requestStop() is called.
-                    std::lock_guard<std::mutex> lock(doneLock);
-                    if (done) break;
-                }
-                printf("join() got mutex but stream not done!");
-                usleep(10 * 1000); // sleep then check again
-            }
-            result = AAudioStream_close(stream);
-            stream = nullptr;
-        }
-        return result;
-    }
-};
-
-// Callback function that fills the audio output buffer.
-static aaudio_data_callback_result_t s_myDataCallbackProc(
-        AAudioStream *stream,
-        void *userData,
-        void *audioData,
-        int32_t numFrames
-) {
-    (void) stream;
-    (void) audioData;
-    (void) numFrames;
-    AudioEngine *engine = (struct AudioEngine *)userData;
-    std::lock_guard<std::mutex> lock(engine->doneLock);
-    engine->started = true;
-    usleep(DURATION_SECONDS * 1000 * 1000); // Mimic SynthMark procedure.
-    engine->done = true;
-    return AAUDIO_CALLBACK_RESULT_STOP;
-}
-
-static void s_myErrorCallbackProc(
-    AAudioStream *stream __unused,
-    void *userData __unused,
-    aaudio_result_t error) {
-    printf("%s() - error = %d\n", __func__, error);
-}
-
-static aaudio_result_t s_OpenAudioStream(struct AudioEngine *engine) {
-    // Use an AAudioStreamBuilder to contain requested parameters.
-    aaudio_result_t result = AAudio_createStreamBuilder(&engine->builder);
-    if (result != AAUDIO_OK) {
-        printf("AAudio_createStreamBuilder returned %s",
-               AAudio_convertResultToText(result));
-        return result;
-    }
-
-    // Request stream properties.
-    AAudioStreamBuilder_setPerformanceMode(engine->builder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
-    AAudioStreamBuilder_setDataCallback(engine->builder, s_myDataCallbackProc, engine);
-    AAudioStreamBuilder_setErrorCallback(engine->builder, s_myErrorCallbackProc, engine);
-
-    // Create an AAudioStream using the Builder.
-    result = AAudioStreamBuilder_openStream(engine->builder, &engine->stream);
-    if (result != AAUDIO_OK) {
-        printf("AAudioStreamBuilder_openStream returned %s",
-               AAudio_convertResultToText(result));
-        return result;
-    }
-
-    return result;
-}
-
-int main(int argc, char **argv) {
-    (void) argc;
-    (void) argv;
-    struct AudioEngine engine;
-    aaudio_result_t result = AAUDIO_OK;
-    int errorCount = 0;
-
-    // Make printf print immediately so that debug info is not stuck
-    // in a buffer if we hang or crash.
-    setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
-
-    printf("Test Return Stop Hang V1.0\n");
-
-    result = s_OpenAudioStream(&engine);
-    if (result != AAUDIO_OK) {
-        printf("s_OpenAudioStream returned %s\n",
-               AAudio_convertResultToText(result));
-        errorCount++;
-    }
-
-    // Check to see what kind of stream we actually got.
-    int32_t deviceId = AAudioStream_getDeviceId(engine.stream);
-    aaudio_performance_mode_t actualPerfMode = AAudioStream_getPerformanceMode(engine.stream);
-    printf("-------- opened: deviceId = %3d, perfMode = %d\n", deviceId, actualPerfMode);
-
-    // Start stream.
-    result = AAudioStream_requestStart(engine.stream);
-    printf("AAudioStream_requestStart() returned %d >>>>>>>>>>>>>>>>>>>>>>\n", result);
-    if (result != AAUDIO_OK) {
-        errorCount++;
-    } else {
-        int counter = 0;
-        while (!engine.started) {
-            printf("Waiting for stream to start, %d\n", counter++);
-            usleep(5 * 1000);
-        }
-        printf("You should see more messages %d seconds after this. If not then the test failed!\n",
-               DURATION_SECONDS);
-        result = engine.join(); // This might hang!
-        AAudioStreamBuilder_delete(engine.builder);
-        engine.builder = nullptr;
-    }
-
-    printf("aaudio result = %d = %s\n", result, AAudio_convertResultToText(result));
-    printf("test %s\n", errorCount ? "FAILED" : "PASSED");
-
-    return errorCount ? EXIT_FAILURE : EXIT_SUCCESS;
-}
diff --git a/media/libaaudio/tests/test_various.cpp b/media/libaaudio/tests/test_various.cpp
index a20c799..cbf863f 100644
--- a/media/libaaudio/tests/test_various.cpp
+++ b/media/libaaudio/tests/test_various.cpp
@@ -33,6 +33,11 @@
         void *audioData,
         int32_t numFrames
 ) {
+    aaudio_direction_t direction = AAudioStream_getDirection(stream);
+    if (direction == AAUDIO_DIRECTION_INPUT) {
+        return AAUDIO_CALLBACK_RESULT_CONTINUE;
+    }
+    // Check to make sure the buffer is initialized to all zeros.
     int channels = AAudioStream_getChannelCount(stream);
     int numSamples = channels * numFrames;
     bool allZeros = true;
@@ -48,7 +53,8 @@
 constexpr int64_t NANOS_PER_MILLISECOND = 1000 * 1000;
 
 void checkReleaseThenClose(aaudio_performance_mode_t perfMode,
-        aaudio_sharing_mode_t sharingMode) {
+        aaudio_sharing_mode_t sharingMode,
+        aaudio_direction_t direction = AAUDIO_DIRECTION_OUTPUT) {
     AAudioStreamBuilder* aaudioBuilder = nullptr;
     AAudioStream* aaudioStream = nullptr;
 
@@ -61,6 +67,7 @@
                                         nullptr);
     AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, perfMode);
     AAudioStreamBuilder_setSharingMode(aaudioBuilder, sharingMode);
+    AAudioStreamBuilder_setDirection(aaudioBuilder, direction);
     AAudioStreamBuilder_setFormat(aaudioBuilder, AAUDIO_FORMAT_PCM_FLOAT);
 
     // Create an AAudioStream using the Builder.
@@ -88,14 +95,28 @@
     // We should NOT be able to start or change a stream after it has been released.
     EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE, AAudioStream_requestStart(aaudioStream));
     EXPECT_EQ(AAUDIO_STREAM_STATE_CLOSING, AAudioStream_getState(aaudioStream));
-    EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE, AAudioStream_requestPause(aaudioStream));
+    // Pause is only implemented for OUTPUT.
+    if (direction == AAUDIO_DIRECTION_OUTPUT) {
+        EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE,
+                  AAudioStream_requestPause(aaudioStream));
+    }
     EXPECT_EQ(AAUDIO_STREAM_STATE_CLOSING, AAudioStream_getState(aaudioStream));
     EXPECT_EQ(AAUDIO_ERROR_INVALID_STATE, AAudioStream_requestStop(aaudioStream));
     EXPECT_EQ(AAUDIO_STREAM_STATE_CLOSING, AAudioStream_getState(aaudioStream));
 
     // Does this crash?
-    EXPECT_LT(0, AAudioStream_getFramesRead(aaudioStream));
-    EXPECT_LT(0, AAudioStream_getFramesWritten(aaudioStream));
+    EXPECT_GT(AAudioStream_getFramesRead(aaudioStream), 0);
+    EXPECT_GT(AAudioStream_getFramesWritten(aaudioStream), 0);
+    EXPECT_GT(AAudioStream_getFramesPerBurst(aaudioStream), 0);
+    EXPECT_GE(AAudioStream_getXRunCount(aaudioStream), 0);
+    EXPECT_GT(AAudioStream_getBufferCapacityInFrames(aaudioStream), 0);
+    EXPECT_GT(AAudioStream_getBufferSizeInFrames(aaudioStream), 0);
+
+    int64_t timestampFrames = 0;
+    int64_t timestampNanos = 0;
+    aaudio_result_t result = AAudioStream_getTimestamp(aaudioStream, CLOCK_MONOTONIC,
+            &timestampFrames, &timestampNanos);
+    EXPECT_TRUE(result == AAUDIO_ERROR_INVALID_STATE || result == AAUDIO_ERROR_UNIMPLEMENTED);
 
     // Verify Closing State. Does this crash?
     aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
@@ -107,20 +128,42 @@
     EXPECT_EQ(AAUDIO_OK, AAudioStream_close(aaudioStream));
 }
 
-TEST(test_various, aaudio_release_close_none) {
+TEST(test_various, aaudio_release_close_none_output) {
     checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_NONE,
-            AAUDIO_SHARING_MODE_SHARED);
+            AAUDIO_SHARING_MODE_SHARED,
+            AAUDIO_DIRECTION_OUTPUT);
     // No EXCLUSIVE streams with MODE_NONE.
 }
 
-TEST(test_various, aaudio_release_close_low_shared) {
-    checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
-            AAUDIO_SHARING_MODE_SHARED);
+TEST(test_various, aaudio_release_close_none_input) {
+    checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_NONE,
+            AAUDIO_SHARING_MODE_SHARED,
+            AAUDIO_DIRECTION_INPUT);
+    // No EXCLUSIVE streams with MODE_NONE.
 }
 
-TEST(test_various, aaudio_release_close_low_exclusive) {
+TEST(test_various, aaudio_release_close_low_shared_output) {
     checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
-            AAUDIO_SHARING_MODE_EXCLUSIVE);
+            AAUDIO_SHARING_MODE_SHARED,
+            AAUDIO_DIRECTION_OUTPUT);
+}
+
+TEST(test_various, aaudio_release_close_low_shared_input) {
+    checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+            AAUDIO_SHARING_MODE_SHARED,
+            AAUDIO_DIRECTION_INPUT);
+}
+
+TEST(test_various, aaudio_release_close_low_exclusive_output) {
+    checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+            AAUDIO_SHARING_MODE_EXCLUSIVE,
+            AAUDIO_DIRECTION_OUTPUT);
+}
+
+TEST(test_various, aaudio_release_close_low_exclusive_input) {
+    checkReleaseThenClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+            AAUDIO_SHARING_MODE_EXCLUSIVE,
+            AAUDIO_DIRECTION_INPUT);
 }
 
 enum FunctionToCall {
diff --git a/media/libaudioclient/AidlConversion.cpp b/media/libaudioclient/AidlConversion.cpp
new file mode 100644
index 0000000..321e7f9
--- /dev/null
+++ b/media/libaudioclient/AidlConversion.cpp
@@ -0,0 +1,2350 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlConversion"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include "media/AidlConversion.h"
+
+#include <media/ShmemCompat.h>
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Utilities
+
+namespace android {
+
+using base::unexpected;
+
+namespace {
+
+enum class Direction {
+    INPUT, OUTPUT
+};
+
+ConversionResult<Direction> direction(media::AudioPortRole role, media::AudioPortType type) {
+    switch (type) {
+        case media::AudioPortType::NONE:
+        case media::AudioPortType::SESSION:
+            break;  // must be listed  -Werror,-Wswitch
+        case media::AudioPortType::DEVICE:
+            switch (role) {
+                case media::AudioPortRole::NONE:
+                     break;  // must be listed  -Werror,-Wswitch
+                case media::AudioPortRole::SOURCE:
+                    return Direction::INPUT;
+                case media::AudioPortRole::SINK:
+                    return Direction::OUTPUT;
+            }
+            break;
+        case media::AudioPortType::MIX:
+            switch (role) {
+                case media::AudioPortRole::NONE:
+                     break;  // must be listed  -Werror,-Wswitch
+                case media::AudioPortRole::SOURCE:
+                    return Direction::OUTPUT;
+                case media::AudioPortRole::SINK:
+                    return Direction::INPUT;
+            }
+            break;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<Direction> direction(audio_port_role_t role, audio_port_type_t type) {
+    switch (type) {
+        case AUDIO_PORT_TYPE_NONE:
+        case AUDIO_PORT_TYPE_SESSION:
+            break;  // must be listed  -Werror,-Wswitch
+        case AUDIO_PORT_TYPE_DEVICE:
+            switch (role) {
+                case AUDIO_PORT_ROLE_NONE:
+                     break;  // must be listed  -Werror,-Wswitch
+                case AUDIO_PORT_ROLE_SOURCE:
+                    return Direction::INPUT;
+                case AUDIO_PORT_ROLE_SINK:
+                    return Direction::OUTPUT;
+            }
+            break;
+        case AUDIO_PORT_TYPE_MIX:
+            switch (role) {
+                case AUDIO_PORT_ROLE_NONE:
+                     break;  // must be listed  -Werror,-Wswitch
+                case AUDIO_PORT_ROLE_SOURCE:
+                    return Direction::OUTPUT;
+                case AUDIO_PORT_ROLE_SINK:
+                    return Direction::INPUT;
+            }
+            break;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+}  // namespace
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Converters
+
+status_t aidl2legacy_string(std::string_view aidl, char* dest, size_t maxSize) {
+    if (aidl.size() > maxSize - 1) {
+        return BAD_VALUE;
+    }
+    aidl.copy(dest, aidl.size());
+    dest[aidl.size()] = '\0';
+    return OK;
+}
+
+ConversionResult<std::string> legacy2aidl_string(const char* legacy, size_t maxSize) {
+    if (legacy == nullptr) {
+        return unexpected(BAD_VALUE);
+    }
+    if (strnlen(legacy, maxSize) == maxSize) {
+        // No null-terminator.
+        return unexpected(BAD_VALUE);
+    }
+    return std::string(legacy);
+}
+
+ConversionResult<audio_module_handle_t> aidl2legacy_int32_t_audio_module_handle_t(int32_t aidl) {
+    return convertReinterpret<audio_module_handle_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_module_handle_t_int32_t(audio_module_handle_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_io_handle_t> aidl2legacy_int32_t_audio_io_handle_t(int32_t aidl) {
+    return convertReinterpret<audio_io_handle_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_io_handle_t_int32_t(audio_io_handle_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_port_handle_t> aidl2legacy_int32_t_audio_port_handle_t(int32_t aidl) {
+    return convertReinterpret<audio_port_handle_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_port_handle_t_int32_t(audio_port_handle_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_patch_handle_t> aidl2legacy_int32_t_audio_patch_handle_t(int32_t aidl) {
+    return convertReinterpret<audio_patch_handle_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_patch_handle_t_int32_t(audio_patch_handle_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_unique_id_t> aidl2legacy_int32_t_audio_unique_id_t(int32_t aidl) {
+    return convertReinterpret<audio_unique_id_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_unique_id_t_int32_t(audio_unique_id_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_hw_sync_t> aidl2legacy_int32_t_audio_hw_sync_t(int32_t aidl) {
+    return convertReinterpret<audio_hw_sync_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_hw_sync_t_int32_t(audio_hw_sync_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<pid_t> aidl2legacy_int32_t_pid_t(int32_t aidl) {
+    return convertReinterpret<pid_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_pid_t_int32_t(pid_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<uid_t> aidl2legacy_int32_t_uid_t(int32_t aidl) {
+    return convertReinterpret<uid_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_uid_t_int32_t(uid_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<String16> aidl2legacy_string_view_String16(std::string_view aidl) {
+    return String16(aidl.data(), aidl.size());
+}
+
+ConversionResult<std::string> legacy2aidl_String16_string(const String16& legacy) {
+    return std::string(String8(legacy).c_str());
+}
+
+// TODO b/182392769: create an optional -> optional util
+ConversionResult<std::optional<String16>>
+aidl2legacy_optional_string_view_optional_String16(std::optional<std::string_view> aidl) {
+    if (!aidl.has_value()) {
+        return std::nullopt;
+    }
+    ConversionResult<String16> conversion =
+        VALUE_OR_RETURN(aidl2legacy_string_view_String16(aidl.value()));
+    return conversion.value();
+}
+
+ConversionResult<std::optional<std::string_view>>
+legacy2aidl_optional_String16_optional_string(std::optional<String16> legacy) {
+  if (!legacy.has_value()) {
+    return std::nullopt;
+  }
+  ConversionResult<std::string> conversion =
+      VALUE_OR_RETURN(legacy2aidl_String16_string(legacy.value()));
+  return conversion.value();
+}
+
+ConversionResult<String8> aidl2legacy_string_view_String8(std::string_view aidl) {
+    return String8(aidl.data(), aidl.size());
+}
+
+ConversionResult<std::string> legacy2aidl_String8_string(const String8& legacy) {
+    return std::string(legacy.c_str());
+}
+
+// The legacy enum is unnamed. Thus, we use int32_t.
+ConversionResult<int32_t> aidl2legacy_AudioPortConfigType_int32_t(
+        media::AudioPortConfigType aidl) {
+    switch (aidl) {
+        case media::AudioPortConfigType::SAMPLE_RATE:
+            return AUDIO_PORT_CONFIG_SAMPLE_RATE;
+        case media::AudioPortConfigType::CHANNEL_MASK:
+            return AUDIO_PORT_CONFIG_CHANNEL_MASK;
+        case media::AudioPortConfigType::FORMAT:
+            return AUDIO_PORT_CONFIG_FORMAT;
+        case media::AudioPortConfigType::GAIN:
+            return AUDIO_PORT_CONFIG_GAIN;
+        case media::AudioPortConfigType::FLAGS:
+            return AUDIO_PORT_CONFIG_FLAGS;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+// The legacy enum is unnamed. Thus, we use int32_t.
+ConversionResult<media::AudioPortConfigType> legacy2aidl_int32_t_AudioPortConfigType(
+        int32_t legacy) {
+    switch (legacy) {
+        case AUDIO_PORT_CONFIG_SAMPLE_RATE:
+            return media::AudioPortConfigType::SAMPLE_RATE;
+        case AUDIO_PORT_CONFIG_CHANNEL_MASK:
+            return media::AudioPortConfigType::CHANNEL_MASK;
+        case AUDIO_PORT_CONFIG_FORMAT:
+            return media::AudioPortConfigType::FORMAT;
+        case AUDIO_PORT_CONFIG_GAIN:
+            return media::AudioPortConfigType::GAIN;
+        case AUDIO_PORT_CONFIG_FLAGS:
+            return media::AudioPortConfigType::FLAGS;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<unsigned int> aidl2legacy_int32_t_config_mask(int32_t aidl) {
+    return convertBitmask<unsigned int, int32_t, int, media::AudioPortConfigType>(
+            aidl, aidl2legacy_AudioPortConfigType_int32_t,
+            // AudioPortConfigType enum is index-based.
+            indexToEnum_index<media::AudioPortConfigType>,
+            // AUDIO_PORT_CONFIG_* flags are mask-based.
+            enumToMask_bitmask<unsigned int, int>);
+}
+
+ConversionResult<int32_t> legacy2aidl_config_mask_int32_t(unsigned int legacy) {
+    return convertBitmask<int32_t, unsigned int, media::AudioPortConfigType, int>(
+            legacy, legacy2aidl_int32_t_AudioPortConfigType,
+            // AUDIO_PORT_CONFIG_* flags are mask-based.
+            indexToEnum_bitmask<unsigned>,
+            // AudioPortConfigType enum is index-based.
+            enumToMask_index<int32_t, media::AudioPortConfigType>);
+}
+
+ConversionResult<audio_channel_mask_t> aidl2legacy_int32_t_audio_channel_mask_t(int32_t aidl) {
+    // TODO(ytai): should we convert bit-by-bit?
+    // One problem here is that the representation is both opaque and is different based on the
+    // context (input vs. output). Can determine based on type and role, as per useInChannelMask().
+    return convertReinterpret<audio_channel_mask_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_channel_mask_t_int32_t(audio_channel_mask_t legacy) {
+    // TODO(ytai): should we convert bit-by-bit?
+    // One problem here is that the representation is both opaque and is different based on the
+    // context (input vs. output). Can determine based on type and role, as per useInChannelMask().
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_io_config_event> aidl2legacy_AudioIoConfigEvent_audio_io_config_event(
+        media::AudioIoConfigEvent aidl) {
+    switch (aidl) {
+        case media::AudioIoConfigEvent::OUTPUT_REGISTERED:
+            return AUDIO_OUTPUT_REGISTERED;
+        case media::AudioIoConfigEvent::OUTPUT_OPENED:
+            return AUDIO_OUTPUT_OPENED;
+        case media::AudioIoConfigEvent::OUTPUT_CLOSED:
+            return AUDIO_OUTPUT_CLOSED;
+        case media::AudioIoConfigEvent::OUTPUT_CONFIG_CHANGED:
+            return AUDIO_OUTPUT_CONFIG_CHANGED;
+        case media::AudioIoConfigEvent::INPUT_REGISTERED:
+            return AUDIO_INPUT_REGISTERED;
+        case media::AudioIoConfigEvent::INPUT_OPENED:
+            return AUDIO_INPUT_OPENED;
+        case media::AudioIoConfigEvent::INPUT_CLOSED:
+            return AUDIO_INPUT_CLOSED;
+        case media::AudioIoConfigEvent::INPUT_CONFIG_CHANGED:
+            return AUDIO_INPUT_CONFIG_CHANGED;
+        case media::AudioIoConfigEvent::CLIENT_STARTED:
+            return AUDIO_CLIENT_STARTED;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioIoConfigEvent> legacy2aidl_audio_io_config_event_AudioIoConfigEvent(
+        audio_io_config_event legacy) {
+    switch (legacy) {
+        case AUDIO_OUTPUT_REGISTERED:
+            return media::AudioIoConfigEvent::OUTPUT_REGISTERED;
+        case AUDIO_OUTPUT_OPENED:
+            return media::AudioIoConfigEvent::OUTPUT_OPENED;
+        case AUDIO_OUTPUT_CLOSED:
+            return media::AudioIoConfigEvent::OUTPUT_CLOSED;
+        case AUDIO_OUTPUT_CONFIG_CHANGED:
+            return media::AudioIoConfigEvent::OUTPUT_CONFIG_CHANGED;
+        case AUDIO_INPUT_REGISTERED:
+            return media::AudioIoConfigEvent::INPUT_REGISTERED;
+        case AUDIO_INPUT_OPENED:
+            return media::AudioIoConfigEvent::INPUT_OPENED;
+        case AUDIO_INPUT_CLOSED:
+            return media::AudioIoConfigEvent::INPUT_CLOSED;
+        case AUDIO_INPUT_CONFIG_CHANGED:
+            return media::AudioIoConfigEvent::INPUT_CONFIG_CHANGED;
+        case AUDIO_CLIENT_STARTED:
+            return media::AudioIoConfigEvent::CLIENT_STARTED;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_port_role_t> aidl2legacy_AudioPortRole_audio_port_role_t(
+        media::AudioPortRole aidl) {
+    switch (aidl) {
+        case media::AudioPortRole::NONE:
+            return AUDIO_PORT_ROLE_NONE;
+        case media::AudioPortRole::SOURCE:
+            return AUDIO_PORT_ROLE_SOURCE;
+        case media::AudioPortRole::SINK:
+            return AUDIO_PORT_ROLE_SINK;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioPortRole> legacy2aidl_audio_port_role_t_AudioPortRole(
+        audio_port_role_t legacy) {
+    switch (legacy) {
+        case AUDIO_PORT_ROLE_NONE:
+            return media::AudioPortRole::NONE;
+        case AUDIO_PORT_ROLE_SOURCE:
+            return media::AudioPortRole::SOURCE;
+        case AUDIO_PORT_ROLE_SINK:
+            return media::AudioPortRole::SINK;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_port_type_t> aidl2legacy_AudioPortType_audio_port_type_t(
+        media::AudioPortType aidl) {
+    switch (aidl) {
+        case media::AudioPortType::NONE:
+            return AUDIO_PORT_TYPE_NONE;
+        case media::AudioPortType::DEVICE:
+            return AUDIO_PORT_TYPE_DEVICE;
+        case media::AudioPortType::MIX:
+            return AUDIO_PORT_TYPE_MIX;
+        case media::AudioPortType::SESSION:
+            return AUDIO_PORT_TYPE_SESSION;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioPortType> legacy2aidl_audio_port_type_t_AudioPortType(
+        audio_port_type_t legacy) {
+    switch (legacy) {
+        case AUDIO_PORT_TYPE_NONE:
+            return media::AudioPortType::NONE;
+        case AUDIO_PORT_TYPE_DEVICE:
+            return media::AudioPortType::DEVICE;
+        case AUDIO_PORT_TYPE_MIX:
+            return media::AudioPortType::MIX;
+        case AUDIO_PORT_TYPE_SESSION:
+            return media::AudioPortType::SESSION;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_format_t> aidl2legacy_AudioFormat_audio_format_t(
+        media::audio::common::AudioFormat aidl) {
+    // This relies on AudioFormat being kept in sync with audio_format_t.
+    static_assert(sizeof(media::audio::common::AudioFormat) == sizeof(audio_format_t));
+    return static_cast<audio_format_t>(aidl);
+}
+
+ConversionResult<media::audio::common::AudioFormat> legacy2aidl_audio_format_t_AudioFormat(
+        audio_format_t legacy) {
+    // This relies on AudioFormat being kept in sync with audio_format_t.
+    static_assert(sizeof(media::audio::common::AudioFormat) == sizeof(audio_format_t));
+    return static_cast<media::audio::common::AudioFormat>(legacy);
+}
+
+ConversionResult<audio_gain_mode_t> aidl2legacy_AudioGainMode_audio_gain_mode_t(media::AudioGainMode aidl) {
+    switch (aidl) {
+        case media::AudioGainMode::JOINT:
+            return AUDIO_GAIN_MODE_JOINT;
+        case media::AudioGainMode::CHANNELS:
+            return AUDIO_GAIN_MODE_CHANNELS;
+        case media::AudioGainMode::RAMP:
+            return AUDIO_GAIN_MODE_RAMP;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioGainMode> legacy2aidl_audio_gain_mode_t_AudioGainMode(audio_gain_mode_t legacy) {
+    switch (legacy) {
+        case AUDIO_GAIN_MODE_JOINT:
+            return media::AudioGainMode::JOINT;
+        case AUDIO_GAIN_MODE_CHANNELS:
+            return media::AudioGainMode::CHANNELS;
+        case AUDIO_GAIN_MODE_RAMP:
+            return media::AudioGainMode::RAMP;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_gain_mode_t> aidl2legacy_int32_t_audio_gain_mode_t_mask(int32_t aidl) {
+    return convertBitmask<audio_gain_mode_t, int32_t, audio_gain_mode_t, media::AudioGainMode>(
+            aidl, aidl2legacy_AudioGainMode_audio_gain_mode_t,
+            // AudioGainMode is index-based.
+            indexToEnum_index<media::AudioGainMode>,
+            // AUDIO_GAIN_MODE_* constants are mask-based.
+            enumToMask_bitmask<audio_gain_mode_t, audio_gain_mode_t>);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_gain_mode_t_int32_t_mask(audio_gain_mode_t legacy) {
+    return convertBitmask<int32_t, audio_gain_mode_t, media::AudioGainMode, audio_gain_mode_t>(
+            legacy, legacy2aidl_audio_gain_mode_t_AudioGainMode,
+            // AUDIO_GAIN_MODE_* constants are mask-based.
+            indexToEnum_bitmask<audio_gain_mode_t>,
+            // AudioGainMode is index-based.
+            enumToMask_index<int32_t, media::AudioGainMode>);
+}
+
+ConversionResult<audio_devices_t> aidl2legacy_int32_t_audio_devices_t(int32_t aidl) {
+    // TODO(ytai): bitfield?
+    return convertReinterpret<audio_devices_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_devices_t_int32_t(audio_devices_t legacy) {
+    // TODO(ytai): bitfield?
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_gain_config> aidl2legacy_AudioGainConfig_audio_gain_config(
+        const media::AudioGainConfig& aidl, media::AudioPortRole role, media::AudioPortType type) {
+    audio_gain_config legacy;
+    legacy.index = VALUE_OR_RETURN(convertIntegral<int>(aidl.index));
+    legacy.mode = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_gain_mode_t_mask(aidl.mode));
+    legacy.channel_mask =
+            VALUE_OR_RETURN(aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+    const bool isInput = VALUE_OR_RETURN(direction(role, type)) == Direction::INPUT;
+    const bool isJoint = bitmaskIsSet(aidl.mode, media::AudioGainMode::JOINT);
+    size_t numValues = isJoint ? 1
+                               : isInput ? audio_channel_count_from_in_mask(legacy.channel_mask)
+                                         : audio_channel_count_from_out_mask(legacy.channel_mask);
+    if (aidl.values.size() != numValues || aidl.values.size() > std::size(legacy.values)) {
+        return unexpected(BAD_VALUE);
+    }
+    for (size_t i = 0; i < numValues; ++i) {
+        legacy.values[i] = VALUE_OR_RETURN(convertIntegral<int>(aidl.values[i]));
+    }
+    legacy.ramp_duration_ms = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.rampDurationMs));
+    return legacy;
+}
+
+ConversionResult<media::AudioGainConfig> legacy2aidl_audio_gain_config_AudioGainConfig(
+        const audio_gain_config& legacy, audio_port_role_t role, audio_port_type_t type) {
+    media::AudioGainConfig aidl;
+    aidl.index = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.index));
+    aidl.mode = VALUE_OR_RETURN(legacy2aidl_audio_gain_mode_t_int32_t_mask(legacy.mode));
+    aidl.channelMask =
+            VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+    const bool isInput = VALUE_OR_RETURN(direction(role, type)) == Direction::INPUT;
+    const bool isJoint = (legacy.mode & AUDIO_GAIN_MODE_JOINT) != 0;
+    size_t numValues = isJoint ? 1
+                               : isInput ? audio_channel_count_from_in_mask(legacy.channel_mask)
+                                         : audio_channel_count_from_out_mask(legacy.channel_mask);
+    aidl.values.resize(numValues);
+    for (size_t i = 0; i < numValues; ++i) {
+        aidl.values[i] = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.values[i]));
+    }
+    aidl.rampDurationMs = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.ramp_duration_ms));
+    return aidl;
+}
+
+ConversionResult<audio_input_flags_t> aidl2legacy_AudioInputFlags_audio_input_flags_t(
+        media::AudioInputFlags aidl) {
+    switch (aidl) {
+        case media::AudioInputFlags::FAST:
+            return AUDIO_INPUT_FLAG_FAST;
+        case media::AudioInputFlags::HW_HOTWORD:
+            return AUDIO_INPUT_FLAG_HW_HOTWORD;
+        case media::AudioInputFlags::RAW:
+            return AUDIO_INPUT_FLAG_RAW;
+        case media::AudioInputFlags::SYNC:
+            return AUDIO_INPUT_FLAG_SYNC;
+        case media::AudioInputFlags::MMAP_NOIRQ:
+            return AUDIO_INPUT_FLAG_MMAP_NOIRQ;
+        case media::AudioInputFlags::VOIP_TX:
+            return AUDIO_INPUT_FLAG_VOIP_TX;
+        case media::AudioInputFlags::HW_AV_SYNC:
+            return AUDIO_INPUT_FLAG_HW_AV_SYNC;
+        case media::AudioInputFlags::DIRECT:
+            return AUDIO_INPUT_FLAG_DIRECT;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioInputFlags> legacy2aidl_audio_input_flags_t_AudioInputFlags(
+        audio_input_flags_t legacy) {
+    switch (legacy) {
+        case AUDIO_INPUT_FLAG_NONE:
+            break; // shouldn't get here. must be listed  -Werror,-Wswitch
+        case AUDIO_INPUT_FLAG_FAST:
+            return media::AudioInputFlags::FAST;
+        case AUDIO_INPUT_FLAG_HW_HOTWORD:
+            return media::AudioInputFlags::HW_HOTWORD;
+        case AUDIO_INPUT_FLAG_RAW:
+            return media::AudioInputFlags::RAW;
+        case AUDIO_INPUT_FLAG_SYNC:
+            return media::AudioInputFlags::SYNC;
+        case AUDIO_INPUT_FLAG_MMAP_NOIRQ:
+            return media::AudioInputFlags::MMAP_NOIRQ;
+        case AUDIO_INPUT_FLAG_VOIP_TX:
+            return media::AudioInputFlags::VOIP_TX;
+        case AUDIO_INPUT_FLAG_HW_AV_SYNC:
+            return media::AudioInputFlags::HW_AV_SYNC;
+        case AUDIO_INPUT_FLAG_DIRECT:
+            return media::AudioInputFlags::DIRECT;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_output_flags_t> aidl2legacy_AudioOutputFlags_audio_output_flags_t(
+        media::AudioOutputFlags aidl) {
+    switch (aidl) {
+        case media::AudioOutputFlags::DIRECT:
+            return AUDIO_OUTPUT_FLAG_DIRECT;
+        case media::AudioOutputFlags::PRIMARY:
+            return AUDIO_OUTPUT_FLAG_PRIMARY;
+        case media::AudioOutputFlags::FAST:
+            return AUDIO_OUTPUT_FLAG_FAST;
+        case media::AudioOutputFlags::DEEP_BUFFER:
+            return AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+        case media::AudioOutputFlags::COMPRESS_OFFLOAD:
+            return AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+        case media::AudioOutputFlags::NON_BLOCKING:
+            return AUDIO_OUTPUT_FLAG_NON_BLOCKING;
+        case media::AudioOutputFlags::HW_AV_SYNC:
+            return AUDIO_OUTPUT_FLAG_HW_AV_SYNC;
+        case media::AudioOutputFlags::TTS:
+            return AUDIO_OUTPUT_FLAG_TTS;
+        case media::AudioOutputFlags::RAW:
+            return AUDIO_OUTPUT_FLAG_RAW;
+        case media::AudioOutputFlags::SYNC:
+            return AUDIO_OUTPUT_FLAG_SYNC;
+        case media::AudioOutputFlags::IEC958_NONAUDIO:
+            return AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO;
+        case media::AudioOutputFlags::DIRECT_PCM:
+            return AUDIO_OUTPUT_FLAG_DIRECT_PCM;
+        case media::AudioOutputFlags::MMAP_NOIRQ:
+            return AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+        case media::AudioOutputFlags::VOIP_RX:
+            return AUDIO_OUTPUT_FLAG_VOIP_RX;
+        case media::AudioOutputFlags::INCALL_MUSIC:
+            return AUDIO_OUTPUT_FLAG_INCALL_MUSIC;
+        case media::AudioOutputFlags::GAPLESS_OFFLOAD:
+            return AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioOutputFlags> legacy2aidl_audio_output_flags_t_AudioOutputFlags(
+        audio_output_flags_t legacy) {
+    switch (legacy) {
+        case AUDIO_OUTPUT_FLAG_NONE:
+            break; // shouldn't get here. must be listed  -Werror,-Wswitch
+        case AUDIO_OUTPUT_FLAG_DIRECT:
+            return media::AudioOutputFlags::DIRECT;
+        case AUDIO_OUTPUT_FLAG_PRIMARY:
+            return media::AudioOutputFlags::PRIMARY;
+        case AUDIO_OUTPUT_FLAG_FAST:
+            return media::AudioOutputFlags::FAST;
+        case AUDIO_OUTPUT_FLAG_DEEP_BUFFER:
+            return media::AudioOutputFlags::DEEP_BUFFER;
+        case AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD:
+            return media::AudioOutputFlags::COMPRESS_OFFLOAD;
+        case AUDIO_OUTPUT_FLAG_NON_BLOCKING:
+            return media::AudioOutputFlags::NON_BLOCKING;
+        case AUDIO_OUTPUT_FLAG_HW_AV_SYNC:
+            return media::AudioOutputFlags::HW_AV_SYNC;
+        case AUDIO_OUTPUT_FLAG_TTS:
+            return media::AudioOutputFlags::TTS;
+        case AUDIO_OUTPUT_FLAG_RAW:
+            return media::AudioOutputFlags::RAW;
+        case AUDIO_OUTPUT_FLAG_SYNC:
+            return media::AudioOutputFlags::SYNC;
+        case AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO:
+            return media::AudioOutputFlags::IEC958_NONAUDIO;
+        case AUDIO_OUTPUT_FLAG_DIRECT_PCM:
+            return media::AudioOutputFlags::DIRECT_PCM;
+        case AUDIO_OUTPUT_FLAG_MMAP_NOIRQ:
+            return media::AudioOutputFlags::MMAP_NOIRQ;
+        case AUDIO_OUTPUT_FLAG_VOIP_RX:
+            return media::AudioOutputFlags::VOIP_RX;
+        case AUDIO_OUTPUT_FLAG_INCALL_MUSIC:
+            return media::AudioOutputFlags::INCALL_MUSIC;
+        case AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD:
+            return media::AudioOutputFlags::GAPLESS_OFFLOAD;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_input_flags_t> aidl2legacy_int32_t_audio_input_flags_t_mask(
+        int32_t aidl) {
+    using LegacyMask = std::underlying_type_t<audio_input_flags_t>;
+
+    LegacyMask converted = VALUE_OR_RETURN(
+            (convertBitmask<LegacyMask, int32_t, audio_input_flags_t, media::AudioInputFlags>(
+                    aidl, aidl2legacy_AudioInputFlags_audio_input_flags_t,
+                    indexToEnum_index<media::AudioInputFlags>,
+                    enumToMask_bitmask<LegacyMask, audio_input_flags_t>)));
+    return static_cast<audio_input_flags_t>(converted);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_input_flags_t_int32_t_mask(
+        audio_input_flags_t legacy) {
+    using LegacyMask = std::underlying_type_t<audio_input_flags_t>;
+
+    LegacyMask legacyMask = static_cast<LegacyMask>(legacy);
+    return convertBitmask<int32_t, LegacyMask, media::AudioInputFlags, audio_input_flags_t>(
+            legacyMask, legacy2aidl_audio_input_flags_t_AudioInputFlags,
+            indexToEnum_bitmask<audio_input_flags_t>,
+            enumToMask_index<int32_t, media::AudioInputFlags>);
+}
+
+ConversionResult<audio_output_flags_t> aidl2legacy_int32_t_audio_output_flags_t_mask(
+        int32_t aidl) {
+    return convertBitmask<audio_output_flags_t,
+            int32_t,
+            audio_output_flags_t,
+            media::AudioOutputFlags>(
+            aidl, aidl2legacy_AudioOutputFlags_audio_output_flags_t,
+            indexToEnum_index<media::AudioOutputFlags>,
+            enumToMask_bitmask<audio_output_flags_t, audio_output_flags_t>);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_output_flags_t_int32_t_mask(
+        audio_output_flags_t legacy) {
+    using LegacyMask = std::underlying_type_t<audio_output_flags_t>;
+
+    LegacyMask legacyMask = static_cast<LegacyMask>(legacy);
+    return convertBitmask<int32_t, LegacyMask, media::AudioOutputFlags, audio_output_flags_t>(
+            legacyMask, legacy2aidl_audio_output_flags_t_AudioOutputFlags,
+            indexToEnum_bitmask<audio_output_flags_t>,
+            enumToMask_index<int32_t, media::AudioOutputFlags>);
+}
+
+ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
+        const media::AudioIoFlags& aidl, media::AudioPortRole role, media::AudioPortType type) {
+    audio_io_flags legacy;
+    Direction dir = VALUE_OR_RETURN(direction(role, type));
+    switch (dir) {
+        case Direction::INPUT: {
+            legacy.input = VALUE_OR_RETURN(
+                    aidl2legacy_int32_t_audio_input_flags_t_mask(
+                            VALUE_OR_RETURN(UNION_GET(aidl, input))));
+        }
+            break;
+
+        case Direction::OUTPUT: {
+            legacy.output = VALUE_OR_RETURN(
+                    aidl2legacy_int32_t_audio_output_flags_t_mask(
+                            VALUE_OR_RETURN(UNION_GET(aidl, output))));
+        }
+            break;
+    }
+
+    return legacy;
+}
+
+ConversionResult<media::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
+        const audio_io_flags& legacy, audio_port_role_t role, audio_port_type_t type) {
+    media::AudioIoFlags aidl;
+
+    Direction dir = VALUE_OR_RETURN(direction(role, type));
+    switch (dir) {
+        case Direction::INPUT:
+            UNION_SET(aidl, input,
+                      VALUE_OR_RETURN(legacy2aidl_audio_input_flags_t_int32_t_mask(
+                              legacy.input)));
+            break;
+        case Direction::OUTPUT:
+            UNION_SET(aidl, output,
+                      VALUE_OR_RETURN(legacy2aidl_audio_output_flags_t_int32_t_mask(
+                              legacy.output)));
+            break;
+    }
+    return aidl;
+}
+
+ConversionResult<audio_port_config_device_ext>
+aidl2legacy_AudioPortConfigDeviceExt_audio_port_config_device_ext(
+        const media::AudioPortConfigDeviceExt& aidl) {
+    audio_port_config_device_ext legacy;
+    legacy.hw_module = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_module_handle_t(aidl.hwModule));
+    legacy.type = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_devices_t(aidl.type));
+    RETURN_IF_ERROR(aidl2legacy_string(aidl.address, legacy.address, AUDIO_DEVICE_MAX_ADDRESS_LEN));
+    return legacy;
+}
+
+ConversionResult<media::AudioPortConfigDeviceExt>
+legacy2aidl_audio_port_config_device_ext_AudioPortConfigDeviceExt(
+        const audio_port_config_device_ext& legacy) {
+    media::AudioPortConfigDeviceExt aidl;
+    aidl.hwModule = VALUE_OR_RETURN(legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
+    aidl.type = VALUE_OR_RETURN(legacy2aidl_audio_devices_t_int32_t(legacy.type));
+    aidl.address = VALUE_OR_RETURN(
+            legacy2aidl_string(legacy.address, AUDIO_DEVICE_MAX_ADDRESS_LEN));
+    return aidl;
+}
+
+ConversionResult<audio_stream_type_t> aidl2legacy_AudioStreamType_audio_stream_type_t(
+        media::AudioStreamType aidl) {
+    switch (aidl) {
+        case media::AudioStreamType::DEFAULT:
+            return AUDIO_STREAM_DEFAULT;
+        case media::AudioStreamType::VOICE_CALL:
+            return AUDIO_STREAM_VOICE_CALL;
+        case media::AudioStreamType::SYSTEM:
+            return AUDIO_STREAM_SYSTEM;
+        case media::AudioStreamType::RING:
+            return AUDIO_STREAM_RING;
+        case media::AudioStreamType::MUSIC:
+            return AUDIO_STREAM_MUSIC;
+        case media::AudioStreamType::ALARM:
+            return AUDIO_STREAM_ALARM;
+        case media::AudioStreamType::NOTIFICATION:
+            return AUDIO_STREAM_NOTIFICATION;
+        case media::AudioStreamType::BLUETOOTH_SCO:
+            return AUDIO_STREAM_BLUETOOTH_SCO;
+        case media::AudioStreamType::ENFORCED_AUDIBLE:
+            return AUDIO_STREAM_ENFORCED_AUDIBLE;
+        case media::AudioStreamType::DTMF:
+            return AUDIO_STREAM_DTMF;
+        case media::AudioStreamType::TTS:
+            return AUDIO_STREAM_TTS;
+        case media::AudioStreamType::ACCESSIBILITY:
+            return AUDIO_STREAM_ACCESSIBILITY;
+        case media::AudioStreamType::ASSISTANT:
+            return AUDIO_STREAM_ASSISTANT;
+        case media::AudioStreamType::REROUTING:
+            return AUDIO_STREAM_REROUTING;
+        case media::AudioStreamType::PATCH:
+            return AUDIO_STREAM_PATCH;
+        case media::AudioStreamType::CALL_ASSISTANT:
+            return AUDIO_STREAM_CALL_ASSISTANT;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioStreamType> legacy2aidl_audio_stream_type_t_AudioStreamType(
+        audio_stream_type_t legacy) {
+    switch (legacy) {
+        case AUDIO_STREAM_DEFAULT:
+            return media::AudioStreamType::DEFAULT;
+        case AUDIO_STREAM_VOICE_CALL:
+            return media::AudioStreamType::VOICE_CALL;
+        case AUDIO_STREAM_SYSTEM:
+            return media::AudioStreamType::SYSTEM;
+        case AUDIO_STREAM_RING:
+            return media::AudioStreamType::RING;
+        case AUDIO_STREAM_MUSIC:
+            return media::AudioStreamType::MUSIC;
+        case AUDIO_STREAM_ALARM:
+            return media::AudioStreamType::ALARM;
+        case AUDIO_STREAM_NOTIFICATION:
+            return media::AudioStreamType::NOTIFICATION;
+        case AUDIO_STREAM_BLUETOOTH_SCO:
+            return media::AudioStreamType::BLUETOOTH_SCO;
+        case AUDIO_STREAM_ENFORCED_AUDIBLE:
+            return media::AudioStreamType::ENFORCED_AUDIBLE;
+        case AUDIO_STREAM_DTMF:
+            return media::AudioStreamType::DTMF;
+        case AUDIO_STREAM_TTS:
+            return media::AudioStreamType::TTS;
+        case AUDIO_STREAM_ACCESSIBILITY:
+            return media::AudioStreamType::ACCESSIBILITY;
+        case AUDIO_STREAM_ASSISTANT:
+            return media::AudioStreamType::ASSISTANT;
+        case AUDIO_STREAM_REROUTING:
+            return media::AudioStreamType::REROUTING;
+        case AUDIO_STREAM_PATCH:
+            return media::AudioStreamType::PATCH;
+        case AUDIO_STREAM_CALL_ASSISTANT:
+            return media::AudioStreamType::CALL_ASSISTANT;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_source_t> aidl2legacy_AudioSourceType_audio_source_t(
+        media::AudioSourceType aidl) {
+    switch (aidl) {
+        case media::AudioSourceType::INVALID:
+            // This value does not have an enum
+            return AUDIO_SOURCE_INVALID;
+        case media::AudioSourceType::DEFAULT:
+            return AUDIO_SOURCE_DEFAULT;
+        case media::AudioSourceType::MIC:
+            return AUDIO_SOURCE_MIC;
+        case media::AudioSourceType::VOICE_UPLINK:
+            return AUDIO_SOURCE_VOICE_UPLINK;
+        case media::AudioSourceType::VOICE_DOWNLINK:
+            return AUDIO_SOURCE_VOICE_DOWNLINK;
+        case media::AudioSourceType::VOICE_CALL:
+            return AUDIO_SOURCE_VOICE_CALL;
+        case media::AudioSourceType::CAMCORDER:
+            return AUDIO_SOURCE_CAMCORDER;
+        case media::AudioSourceType::VOICE_RECOGNITION:
+            return AUDIO_SOURCE_VOICE_RECOGNITION;
+        case media::AudioSourceType::VOICE_COMMUNICATION:
+            return AUDIO_SOURCE_VOICE_COMMUNICATION;
+        case media::AudioSourceType::REMOTE_SUBMIX:
+            return AUDIO_SOURCE_REMOTE_SUBMIX;
+        case media::AudioSourceType::UNPROCESSED:
+            return AUDIO_SOURCE_UNPROCESSED;
+        case media::AudioSourceType::VOICE_PERFORMANCE:
+            return AUDIO_SOURCE_VOICE_PERFORMANCE;
+        case media::AudioSourceType::ECHO_REFERENCE:
+            return AUDIO_SOURCE_ECHO_REFERENCE;
+        case media::AudioSourceType::FM_TUNER:
+            return AUDIO_SOURCE_FM_TUNER;
+        case media::AudioSourceType::HOTWORD:
+            return AUDIO_SOURCE_HOTWORD;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioSourceType> legacy2aidl_audio_source_t_AudioSourceType(
+        audio_source_t legacy) {
+    switch (legacy) {
+        case AUDIO_SOURCE_INVALID:
+            return media::AudioSourceType::INVALID;
+        case AUDIO_SOURCE_DEFAULT:
+            return media::AudioSourceType::DEFAULT;
+        case AUDIO_SOURCE_MIC:
+            return media::AudioSourceType::MIC;
+        case AUDIO_SOURCE_VOICE_UPLINK:
+            return media::AudioSourceType::VOICE_UPLINK;
+        case AUDIO_SOURCE_VOICE_DOWNLINK:
+            return media::AudioSourceType::VOICE_DOWNLINK;
+        case AUDIO_SOURCE_VOICE_CALL:
+            return media::AudioSourceType::VOICE_CALL;
+        case AUDIO_SOURCE_CAMCORDER:
+            return media::AudioSourceType::CAMCORDER;
+        case AUDIO_SOURCE_VOICE_RECOGNITION:
+            return media::AudioSourceType::VOICE_RECOGNITION;
+        case AUDIO_SOURCE_VOICE_COMMUNICATION:
+            return media::AudioSourceType::VOICE_COMMUNICATION;
+        case AUDIO_SOURCE_REMOTE_SUBMIX:
+            return media::AudioSourceType::REMOTE_SUBMIX;
+        case AUDIO_SOURCE_UNPROCESSED:
+            return media::AudioSourceType::UNPROCESSED;
+        case AUDIO_SOURCE_VOICE_PERFORMANCE:
+            return media::AudioSourceType::VOICE_PERFORMANCE;
+        case AUDIO_SOURCE_ECHO_REFERENCE:
+            return media::AudioSourceType::ECHO_REFERENCE;
+        case AUDIO_SOURCE_FM_TUNER:
+            return media::AudioSourceType::FM_TUNER;
+        case AUDIO_SOURCE_HOTWORD:
+            return media::AudioSourceType::HOTWORD;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_session_t> aidl2legacy_int32_t_audio_session_t(int32_t aidl) {
+    return convertReinterpret<audio_session_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_session_t_int32_t(audio_session_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+// This type is unnamed in the original definition, thus we name it here.
+using audio_port_config_mix_ext_usecase = decltype(audio_port_config_mix_ext::usecase);
+
+ConversionResult<audio_port_config_mix_ext_usecase> aidl2legacy_AudioPortConfigMixExtUseCase(
+        const media::AudioPortConfigMixExtUseCase& aidl, media::AudioPortRole role) {
+    audio_port_config_mix_ext_usecase legacy;
+
+    switch (role) {
+        case media::AudioPortRole::NONE:
+            // Just verify that the union is empty.
+            VALUE_OR_RETURN(UNION_GET(aidl, unspecified));
+            return legacy;
+
+        case media::AudioPortRole::SOURCE:
+            // This is not a bug. A SOURCE role corresponds to the stream field.
+            legacy.stream = VALUE_OR_RETURN(aidl2legacy_AudioStreamType_audio_stream_type_t(
+                    VALUE_OR_RETURN(UNION_GET(aidl, stream))));
+            return legacy;
+
+        case media::AudioPortRole::SINK:
+            // This is not a bug. A SINK role corresponds to the source field.
+            legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSourceType_audio_source_t(
+                    VALUE_OR_RETURN(UNION_GET(aidl, source))));
+            return legacy;
+    }
+    LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+ConversionResult<media::AudioPortConfigMixExtUseCase> legacy2aidl_AudioPortConfigMixExtUseCase(
+        const audio_port_config_mix_ext_usecase& legacy, audio_port_role_t role) {
+    media::AudioPortConfigMixExtUseCase aidl;
+
+    switch (role) {
+        case AUDIO_PORT_ROLE_NONE:
+            UNION_SET(aidl, unspecified, false);
+            return aidl;
+        case AUDIO_PORT_ROLE_SOURCE:
+            // This is not a bug. A SOURCE role corresponds to the stream field.
+            UNION_SET(aidl, stream, VALUE_OR_RETURN(
+                    legacy2aidl_audio_stream_type_t_AudioStreamType(legacy.stream)));
+            return aidl;
+        case AUDIO_PORT_ROLE_SINK:
+            // This is not a bug. A SINK role corresponds to the source field.
+            UNION_SET(aidl, source,
+                      VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSourceType(legacy.source)));
+            return aidl;
+    }
+    LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortConfigMixExt(
+        const media::AudioPortConfigMixExt& aidl, media::AudioPortRole role) {
+    audio_port_config_mix_ext legacy;
+    legacy.hw_module = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_module_handle_t(aidl.hwModule));
+    legacy.handle = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.handle));
+    legacy.usecase = VALUE_OR_RETURN(aidl2legacy_AudioPortConfigMixExtUseCase(aidl.usecase, role));
+    return legacy;
+}
+
+ConversionResult<media::AudioPortConfigMixExt> legacy2aidl_AudioPortConfigMixExt(
+        const audio_port_config_mix_ext& legacy, audio_port_role_t role) {
+    media::AudioPortConfigMixExt aidl;
+    aidl.hwModule = VALUE_OR_RETURN(legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
+    aidl.handle = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(legacy.handle));
+    aidl.usecase = VALUE_OR_RETURN(legacy2aidl_AudioPortConfigMixExtUseCase(legacy.usecase, role));
+    return aidl;
+}
+
+ConversionResult<audio_port_config_session_ext>
+aidl2legacy_AudioPortConfigSessionExt_audio_port_config_session_ext(
+        const media::AudioPortConfigSessionExt& aidl) {
+    audio_port_config_session_ext legacy;
+    legacy.session = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.session));
+    return legacy;
+}
+
+ConversionResult<media::AudioPortConfigSessionExt>
+legacy2aidl_audio_port_config_session_ext_AudioPortConfigSessionExt(
+        const audio_port_config_session_ext& legacy) {
+    media::AudioPortConfigSessionExt aidl;
+    aidl.session = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(legacy.session));
+    return aidl;
+}
+
+// This type is unnamed in the original definition, thus we name it here.
+using audio_port_config_ext = decltype(audio_port_config::ext);
+
+ConversionResult<audio_port_config_ext> aidl2legacy_AudioPortConfigExt(
+        const media::AudioPortConfigExt& aidl, media::AudioPortType type,
+        media::AudioPortRole role) {
+    audio_port_config_ext legacy;
+    switch (type) {
+        case media::AudioPortType::NONE:
+            // Just verify that the union is empty.
+            VALUE_OR_RETURN(UNION_GET(aidl, unspecified));
+            return legacy;
+        case media::AudioPortType::DEVICE:
+            legacy.device = VALUE_OR_RETURN(
+                    aidl2legacy_AudioPortConfigDeviceExt_audio_port_config_device_ext(
+                            VALUE_OR_RETURN(UNION_GET(aidl, device))));
+            return legacy;
+        case media::AudioPortType::MIX:
+            legacy.mix = VALUE_OR_RETURN(
+                    aidl2legacy_AudioPortConfigMixExt(VALUE_OR_RETURN(UNION_GET(aidl, mix)), role));
+            return legacy;
+        case media::AudioPortType::SESSION:
+            legacy.session = VALUE_OR_RETURN(
+                    aidl2legacy_AudioPortConfigSessionExt_audio_port_config_session_ext(
+                            VALUE_OR_RETURN(UNION_GET(aidl, session))));
+            return legacy;
+
+    }
+    LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+ConversionResult<media::AudioPortConfigExt> legacy2aidl_AudioPortConfigExt(
+        const audio_port_config_ext& legacy, audio_port_type_t type, audio_port_role_t role) {
+    media::AudioPortConfigExt aidl;
+
+    switch (type) {
+        case AUDIO_PORT_TYPE_NONE:
+            UNION_SET(aidl, unspecified, false);
+            return aidl;
+        case AUDIO_PORT_TYPE_DEVICE:
+            UNION_SET(aidl, device,
+                      VALUE_OR_RETURN(
+                        legacy2aidl_audio_port_config_device_ext_AudioPortConfigDeviceExt(
+                          legacy.device)));
+            return aidl;
+        case AUDIO_PORT_TYPE_MIX:
+            UNION_SET(aidl, mix,
+                      VALUE_OR_RETURN(legacy2aidl_AudioPortConfigMixExt(legacy.mix, role)));
+            return aidl;
+        case AUDIO_PORT_TYPE_SESSION:
+            UNION_SET(aidl, session,
+                      VALUE_OR_RETURN(
+                        legacy2aidl_audio_port_config_session_ext_AudioPortConfigSessionExt(
+                          legacy.session)));
+            return aidl;
+    }
+    LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+ConversionResult<audio_port_config> aidl2legacy_AudioPortConfig_audio_port_config(
+        const media::AudioPortConfig& aidl) {
+    audio_port_config legacy;
+    legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.id));
+    legacy.role = VALUE_OR_RETURN(aidl2legacy_AudioPortRole_audio_port_role_t(aidl.role));
+    legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioPortType_audio_port_type_t(aidl.type));
+    legacy.config_mask = VALUE_OR_RETURN(aidl2legacy_int32_t_config_mask(aidl.configMask));
+    if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::SAMPLE_RATE)) {
+        legacy.sample_rate = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.sampleRate));
+    }
+    if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::CHANNEL_MASK)) {
+        legacy.channel_mask =
+                VALUE_OR_RETURN(aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+    }
+    if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::FORMAT)) {
+        legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+    }
+    if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::GAIN)) {
+        legacy.gain = VALUE_OR_RETURN(
+                aidl2legacy_AudioGainConfig_audio_gain_config(aidl.gain, aidl.role, aidl.type));
+    }
+    if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::FLAGS)) {
+        legacy.flags = VALUE_OR_RETURN(
+                aidl2legacy_AudioIoFlags_audio_io_flags(aidl.flags, aidl.role, aidl.type));
+    }
+    legacy.ext = VALUE_OR_RETURN(aidl2legacy_AudioPortConfigExt(aidl.ext, aidl.type, aidl.role));
+    return legacy;
+}
+
+ConversionResult<media::AudioPortConfig> legacy2aidl_audio_port_config_AudioPortConfig(
+        const audio_port_config& legacy) {
+    media::AudioPortConfig aidl;
+    aidl.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
+    aidl.role = VALUE_OR_RETURN(legacy2aidl_audio_port_role_t_AudioPortRole(legacy.role));
+    aidl.type = VALUE_OR_RETURN(legacy2aidl_audio_port_type_t_AudioPortType(legacy.type));
+    aidl.configMask = VALUE_OR_RETURN(legacy2aidl_config_mask_int32_t(legacy.config_mask));
+    if (legacy.config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE) {
+        aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
+    }
+    if (legacy.config_mask & AUDIO_PORT_CONFIG_CHANNEL_MASK) {
+        aidl.channelMask =
+                VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+    }
+    if (legacy.config_mask & AUDIO_PORT_CONFIG_FORMAT) {
+        aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+    }
+    if (legacy.config_mask & AUDIO_PORT_CONFIG_GAIN) {
+        aidl.gain = VALUE_OR_RETURN(legacy2aidl_audio_gain_config_AudioGainConfig(
+                legacy.gain, legacy.role, legacy.type));
+    }
+    if (legacy.config_mask & AUDIO_PORT_CONFIG_FLAGS) {
+        aidl.flags = VALUE_OR_RETURN(
+                legacy2aidl_audio_io_flags_AudioIoFlags(legacy.flags, legacy.role, legacy.type));
+    }
+    aidl.ext =
+            VALUE_OR_RETURN(legacy2aidl_AudioPortConfigExt(legacy.ext, legacy.type, legacy.role));
+    return aidl;
+}
+
+ConversionResult<struct audio_patch> aidl2legacy_AudioPatch_audio_patch(
+        const media::AudioPatch& aidl) {
+    struct audio_patch legacy;
+    legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_patch_handle_t(aidl.id));
+    legacy.num_sinks = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.sinks.size()));
+    if (legacy.num_sinks > AUDIO_PATCH_PORTS_MAX) {
+        return unexpected(BAD_VALUE);
+    }
+    for (size_t i = 0; i < legacy.num_sinks; ++i) {
+        legacy.sinks[i] =
+                VALUE_OR_RETURN(aidl2legacy_AudioPortConfig_audio_port_config(aidl.sinks[i]));
+    }
+    legacy.num_sources = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.sources.size()));
+    if (legacy.num_sources > AUDIO_PATCH_PORTS_MAX) {
+        return unexpected(BAD_VALUE);
+    }
+    for (size_t i = 0; i < legacy.num_sources; ++i) {
+        legacy.sources[i] =
+                VALUE_OR_RETURN(aidl2legacy_AudioPortConfig_audio_port_config(aidl.sources[i]));
+    }
+    return legacy;
+}
+
+ConversionResult<media::AudioPatch> legacy2aidl_audio_patch_AudioPatch(
+        const struct audio_patch& legacy) {
+    media::AudioPatch aidl;
+    aidl.id = VALUE_OR_RETURN(legacy2aidl_audio_patch_handle_t_int32_t(legacy.id));
+
+    if (legacy.num_sinks > AUDIO_PATCH_PORTS_MAX) {
+        return unexpected(BAD_VALUE);
+    }
+    for (unsigned int i = 0; i < legacy.num_sinks; ++i) {
+        aidl.sinks.push_back(
+                VALUE_OR_RETURN(legacy2aidl_audio_port_config_AudioPortConfig(legacy.sinks[i])));
+    }
+    if (legacy.num_sources > AUDIO_PATCH_PORTS_MAX) {
+        return unexpected(BAD_VALUE);
+    }
+    for (unsigned int i = 0; i < legacy.num_sources; ++i) {
+        aidl.sources.push_back(
+                VALUE_OR_RETURN(legacy2aidl_audio_port_config_AudioPortConfig(legacy.sources[i])));
+    }
+    return aidl;
+}
+
+ConversionResult<sp<AudioIoDescriptor>> aidl2legacy_AudioIoDescriptor_AudioIoDescriptor(
+        const media::AudioIoDescriptor& aidl) {
+    sp<AudioIoDescriptor> legacy(new AudioIoDescriptor());
+    legacy->mIoHandle = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.ioHandle));
+    legacy->mPatch = VALUE_OR_RETURN(aidl2legacy_AudioPatch_audio_patch(aidl.patch));
+    legacy->mSamplingRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.samplingRate));
+    legacy->mFormat = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+    legacy->mChannelMask =
+            VALUE_OR_RETURN(aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+    legacy->mFrameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
+    legacy->mFrameCountHAL = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCountHAL));
+    legacy->mLatency = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.latency));
+    legacy->mPortId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
+    return legacy;
+}
+
+ConversionResult<media::AudioIoDescriptor> legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(
+        const sp<AudioIoDescriptor>& legacy) {
+    media::AudioIoDescriptor aidl;
+    aidl.ioHandle = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(legacy->mIoHandle));
+    aidl.patch = VALUE_OR_RETURN(legacy2aidl_audio_patch_AudioPatch(legacy->mPatch));
+    aidl.samplingRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy->mSamplingRate));
+    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy->mFormat));
+    aidl.channelMask = VALUE_OR_RETURN(
+            legacy2aidl_audio_channel_mask_t_int32_t(legacy->mChannelMask));
+    aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy->mFrameCount));
+    aidl.frameCountHAL = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy->mFrameCountHAL));
+    aidl.latency = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy->mLatency));
+    aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy->mPortId));
+    return aidl;
+}
+
+ConversionResult<AudioClient> aidl2legacy_AudioClient_AudioClient(
+        const media::AudioClient& aidl) {
+    AudioClient legacy;
+    legacy.clientTid = VALUE_OR_RETURN(aidl2legacy_int32_t_pid_t(aidl.clientTid));
+    legacy.attributionSource = aidl.attributionSource;
+    return legacy;
+}
+
+ConversionResult<media::AudioClient> legacy2aidl_AudioClient_AudioClient(
+        const AudioClient& legacy) {
+    media::AudioClient aidl;
+    aidl.clientTid = VALUE_OR_RETURN(legacy2aidl_pid_t_int32_t(legacy.clientTid));
+    aidl.attributionSource = legacy.attributionSource;
+    return aidl;
+}
+
+ConversionResult<audio_content_type_t>
+aidl2legacy_AudioContentType_audio_content_type_t(media::AudioContentType aidl) {
+    switch (aidl) {
+        case media::AudioContentType::UNKNOWN:
+            return AUDIO_CONTENT_TYPE_UNKNOWN;
+        case media::AudioContentType::SPEECH:
+            return AUDIO_CONTENT_TYPE_SPEECH;
+        case media::AudioContentType::MUSIC:
+            return AUDIO_CONTENT_TYPE_MUSIC;
+        case media::AudioContentType::MOVIE:
+            return AUDIO_CONTENT_TYPE_MOVIE;
+        case media::AudioContentType::SONIFICATION:
+            return AUDIO_CONTENT_TYPE_SONIFICATION;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioContentType>
+legacy2aidl_audio_content_type_t_AudioContentType(audio_content_type_t legacy) {
+    switch (legacy) {
+        case AUDIO_CONTENT_TYPE_UNKNOWN:
+            return media::AudioContentType::UNKNOWN;
+        case AUDIO_CONTENT_TYPE_SPEECH:
+            return media::AudioContentType::SPEECH;
+        case AUDIO_CONTENT_TYPE_MUSIC:
+            return media::AudioContentType::MUSIC;
+        case AUDIO_CONTENT_TYPE_MOVIE:
+            return media::AudioContentType::MOVIE;
+        case AUDIO_CONTENT_TYPE_SONIFICATION:
+            return media::AudioContentType::SONIFICATION;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_usage_t>
+aidl2legacy_AudioUsage_audio_usage_t(media::AudioUsage aidl) {
+    switch (aidl) {
+        case media::AudioUsage::UNKNOWN:
+            return AUDIO_USAGE_UNKNOWN;
+        case media::AudioUsage::MEDIA:
+            return AUDIO_USAGE_MEDIA;
+        case media::AudioUsage::VOICE_COMMUNICATION:
+            return AUDIO_USAGE_VOICE_COMMUNICATION;
+        case media::AudioUsage::VOICE_COMMUNICATION_SIGNALLING:
+            return AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING;
+        case media::AudioUsage::ALARM:
+            return AUDIO_USAGE_ALARM;
+        case media::AudioUsage::NOTIFICATION:
+            return AUDIO_USAGE_NOTIFICATION;
+        case media::AudioUsage::NOTIFICATION_TELEPHONY_RINGTONE:
+            return AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE;
+        case media::AudioUsage::NOTIFICATION_COMMUNICATION_REQUEST:
+            return AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST;
+        case media::AudioUsage::NOTIFICATION_COMMUNICATION_INSTANT:
+            return AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT;
+        case media::AudioUsage::NOTIFICATION_COMMUNICATION_DELAYED:
+            return AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED;
+        case media::AudioUsage::NOTIFICATION_EVENT:
+            return AUDIO_USAGE_NOTIFICATION_EVENT;
+        case media::AudioUsage::ASSISTANCE_ACCESSIBILITY:
+            return AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY;
+        case media::AudioUsage::ASSISTANCE_NAVIGATION_GUIDANCE:
+            return AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE;
+        case media::AudioUsage::ASSISTANCE_SONIFICATION:
+            return AUDIO_USAGE_ASSISTANCE_SONIFICATION;
+        case media::AudioUsage::GAME:
+            return AUDIO_USAGE_GAME;
+        case media::AudioUsage::VIRTUAL_SOURCE:
+            return AUDIO_USAGE_VIRTUAL_SOURCE;
+        case media::AudioUsage::ASSISTANT:
+            return AUDIO_USAGE_ASSISTANT;
+        case media::AudioUsage::CALL_ASSISTANT:
+            return AUDIO_USAGE_CALL_ASSISTANT;
+        case media::AudioUsage::EMERGENCY:
+            return AUDIO_USAGE_EMERGENCY;
+        case media::AudioUsage::SAFETY:
+            return AUDIO_USAGE_SAFETY;
+        case media::AudioUsage::VEHICLE_STATUS:
+            return AUDIO_USAGE_VEHICLE_STATUS;
+        case media::AudioUsage::ANNOUNCEMENT:
+            return AUDIO_USAGE_ANNOUNCEMENT;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioUsage>
+legacy2aidl_audio_usage_t_AudioUsage(audio_usage_t legacy) {
+    switch (legacy) {
+        case AUDIO_USAGE_UNKNOWN:
+            return media::AudioUsage::UNKNOWN;
+        case AUDIO_USAGE_MEDIA:
+            return media::AudioUsage::MEDIA;
+        case AUDIO_USAGE_VOICE_COMMUNICATION:
+            return media::AudioUsage::VOICE_COMMUNICATION;
+        case AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING:
+            return media::AudioUsage::VOICE_COMMUNICATION_SIGNALLING;
+        case AUDIO_USAGE_ALARM:
+            return media::AudioUsage::ALARM;
+        case AUDIO_USAGE_NOTIFICATION:
+            return media::AudioUsage::NOTIFICATION;
+        case AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE:
+            return media::AudioUsage::NOTIFICATION_TELEPHONY_RINGTONE;
+        case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST:
+            return media::AudioUsage::NOTIFICATION_COMMUNICATION_REQUEST;
+        case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT:
+            return media::AudioUsage::NOTIFICATION_COMMUNICATION_INSTANT;
+        case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED:
+            return media::AudioUsage::NOTIFICATION_COMMUNICATION_DELAYED;
+        case AUDIO_USAGE_NOTIFICATION_EVENT:
+            return media::AudioUsage::NOTIFICATION_EVENT;
+        case AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY:
+            return media::AudioUsage::ASSISTANCE_ACCESSIBILITY;
+        case AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
+            return media::AudioUsage::ASSISTANCE_NAVIGATION_GUIDANCE;
+        case AUDIO_USAGE_ASSISTANCE_SONIFICATION:
+            return media::AudioUsage::ASSISTANCE_SONIFICATION;
+        case AUDIO_USAGE_GAME:
+            return media::AudioUsage::GAME;
+        case AUDIO_USAGE_VIRTUAL_SOURCE:
+            return media::AudioUsage::VIRTUAL_SOURCE;
+        case AUDIO_USAGE_ASSISTANT:
+            return media::AudioUsage::ASSISTANT;
+        case AUDIO_USAGE_CALL_ASSISTANT:
+            return media::AudioUsage::CALL_ASSISTANT;
+        case AUDIO_USAGE_EMERGENCY:
+            return media::AudioUsage::EMERGENCY;
+        case AUDIO_USAGE_SAFETY:
+            return media::AudioUsage::SAFETY;
+        case AUDIO_USAGE_VEHICLE_STATUS:
+            return media::AudioUsage::VEHICLE_STATUS;
+        case AUDIO_USAGE_ANNOUNCEMENT:
+            return media::AudioUsage::ANNOUNCEMENT;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_flags_mask_t>
+aidl2legacy_AudioFlag_audio_flags_mask_t(media::AudioFlag aidl) {
+    switch (aidl) {
+        case media::AudioFlag::AUDIBILITY_ENFORCED:
+            return AUDIO_FLAG_AUDIBILITY_ENFORCED;
+        case media::AudioFlag::SECURE:
+            return AUDIO_FLAG_SECURE;
+        case media::AudioFlag::SCO:
+            return AUDIO_FLAG_SCO;
+        case media::AudioFlag::BEACON:
+            return AUDIO_FLAG_BEACON;
+        case media::AudioFlag::HW_AV_SYNC:
+            return AUDIO_FLAG_HW_AV_SYNC;
+        case media::AudioFlag::HW_HOTWORD:
+            return AUDIO_FLAG_HW_HOTWORD;
+        case media::AudioFlag::BYPASS_INTERRUPTION_POLICY:
+            return AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY;
+        case media::AudioFlag::BYPASS_MUTE:
+            return AUDIO_FLAG_BYPASS_MUTE;
+        case media::AudioFlag::LOW_LATENCY:
+            return AUDIO_FLAG_LOW_LATENCY;
+        case media::AudioFlag::DEEP_BUFFER:
+            return AUDIO_FLAG_DEEP_BUFFER;
+        case media::AudioFlag::NO_MEDIA_PROJECTION:
+            return AUDIO_FLAG_NO_MEDIA_PROJECTION;
+        case media::AudioFlag::MUTE_HAPTIC:
+            return AUDIO_FLAG_MUTE_HAPTIC;
+        case media::AudioFlag::NO_SYSTEM_CAPTURE:
+            return AUDIO_FLAG_NO_SYSTEM_CAPTURE;
+        case media::AudioFlag::CAPTURE_PRIVATE:
+            return AUDIO_FLAG_CAPTURE_PRIVATE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioFlag>
+legacy2aidl_audio_flags_mask_t_AudioFlag(audio_flags_mask_t legacy) {
+    switch (legacy) {
+        case AUDIO_FLAG_NONE:
+            return unexpected(BAD_VALUE);
+        case AUDIO_FLAG_AUDIBILITY_ENFORCED:
+            return media::AudioFlag::AUDIBILITY_ENFORCED;
+        case AUDIO_FLAG_SECURE:
+            return media::AudioFlag::SECURE;
+        case AUDIO_FLAG_SCO:
+            return media::AudioFlag::SCO;
+        case AUDIO_FLAG_BEACON:
+            return media::AudioFlag::BEACON;
+        case AUDIO_FLAG_HW_AV_SYNC:
+            return media::AudioFlag::HW_AV_SYNC;
+        case AUDIO_FLAG_HW_HOTWORD:
+            return media::AudioFlag::HW_HOTWORD;
+        case AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY:
+            return media::AudioFlag::BYPASS_INTERRUPTION_POLICY;
+        case AUDIO_FLAG_BYPASS_MUTE:
+            return media::AudioFlag::BYPASS_MUTE;
+        case AUDIO_FLAG_LOW_LATENCY:
+            return media::AudioFlag::LOW_LATENCY;
+        case AUDIO_FLAG_DEEP_BUFFER:
+            return media::AudioFlag::DEEP_BUFFER;
+        case AUDIO_FLAG_NO_MEDIA_PROJECTION:
+            return media::AudioFlag::NO_MEDIA_PROJECTION;
+        case AUDIO_FLAG_MUTE_HAPTIC:
+            return media::AudioFlag::MUTE_HAPTIC;
+        case AUDIO_FLAG_NO_SYSTEM_CAPTURE:
+            return media::AudioFlag::NO_SYSTEM_CAPTURE;
+        case AUDIO_FLAG_CAPTURE_PRIVATE:
+            return media::AudioFlag::CAPTURE_PRIVATE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_flags_mask_t>
+aidl2legacy_int32_t_audio_flags_mask_t_mask(int32_t aidl) {
+    return convertBitmask<audio_flags_mask_t, int32_t, audio_flags_mask_t, media::AudioFlag>(
+            aidl, aidl2legacy_AudioFlag_audio_flags_mask_t, indexToEnum_index<media::AudioFlag>,
+            enumToMask_bitmask<audio_flags_mask_t, audio_flags_mask_t>);
+}
+
+ConversionResult<int32_t>
+legacy2aidl_audio_flags_mask_t_int32_t_mask(audio_flags_mask_t legacy) {
+    return convertBitmask<int32_t, audio_flags_mask_t, media::AudioFlag, audio_flags_mask_t>(
+            legacy, legacy2aidl_audio_flags_mask_t_AudioFlag,
+            indexToEnum_bitmask<audio_flags_mask_t>,
+            enumToMask_index<int32_t, media::AudioFlag>);
+}
+
+ConversionResult<audio_attributes_t>
+aidl2legacy_AudioAttributesInternal_audio_attributes_t(const media::AudioAttributesInternal& aidl) {
+    audio_attributes_t legacy;
+    legacy.content_type = VALUE_OR_RETURN(
+            aidl2legacy_AudioContentType_audio_content_type_t(aidl.contentType));
+    legacy.usage = VALUE_OR_RETURN(aidl2legacy_AudioUsage_audio_usage_t(aidl.usage));
+    legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSourceType_audio_source_t(aidl.source));
+    legacy.flags = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_flags_mask_t_mask(aidl.flags));
+    RETURN_IF_ERROR(aidl2legacy_string(aidl.tags, legacy.tags, sizeof(legacy.tags)));
+    return legacy;
+}
+
+ConversionResult<media::AudioAttributesInternal>
+legacy2aidl_audio_attributes_t_AudioAttributesInternal(const audio_attributes_t& legacy) {
+    media::AudioAttributesInternal aidl;
+    aidl.contentType = VALUE_OR_RETURN(
+            legacy2aidl_audio_content_type_t_AudioContentType(legacy.content_type));
+    aidl.usage = VALUE_OR_RETURN(legacy2aidl_audio_usage_t_AudioUsage(legacy.usage));
+    aidl.source = VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSourceType(legacy.source));
+    aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_flags_mask_t_int32_t_mask(legacy.flags));
+    aidl.tags = VALUE_OR_RETURN(legacy2aidl_string(legacy.tags, sizeof(legacy.tags)));
+    return aidl;
+}
+
+ConversionResult<audio_encapsulation_mode_t>
+aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(media::AudioEncapsulationMode aidl) {
+    switch (aidl) {
+        case media::AudioEncapsulationMode::NONE:
+            return AUDIO_ENCAPSULATION_MODE_NONE;
+        case media::AudioEncapsulationMode::ELEMENTARY_STREAM:
+            return AUDIO_ENCAPSULATION_MODE_ELEMENTARY_STREAM;
+        case media::AudioEncapsulationMode::HANDLE:
+            return AUDIO_ENCAPSULATION_MODE_HANDLE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioEncapsulationMode>
+legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode(audio_encapsulation_mode_t legacy) {
+    switch (legacy) {
+        case AUDIO_ENCAPSULATION_MODE_NONE:
+            return media::AudioEncapsulationMode::NONE;
+        case AUDIO_ENCAPSULATION_MODE_ELEMENTARY_STREAM:
+            return media::AudioEncapsulationMode::ELEMENTARY_STREAM;
+        case AUDIO_ENCAPSULATION_MODE_HANDLE:
+            return media::AudioEncapsulationMode::HANDLE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_offload_info_t>
+aidl2legacy_AudioOffloadInfo_audio_offload_info_t(const media::AudioOffloadInfo& aidl) {
+    audio_offload_info_t legacy;
+    legacy.version = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.version));
+    legacy.size = sizeof(audio_offload_info_t);
+    audio_config_base_t config = VALUE_OR_RETURN(
+            aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.config));
+    legacy.sample_rate = config.sample_rate;
+    legacy.channel_mask = config.channel_mask;
+    legacy.format = config.format;
+    legacy.stream_type = VALUE_OR_RETURN(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(aidl.streamType));
+    legacy.bit_rate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.bitRate));
+    legacy.duration_us = VALUE_OR_RETURN(convertIntegral<int64_t>(aidl.durationUs));
+    legacy.has_video = aidl.hasVideo;
+    legacy.is_streaming = aidl.isStreaming;
+    legacy.bit_width = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.bitWidth));
+    legacy.offload_buffer_size = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.offloadBufferSize));
+    legacy.usage = VALUE_OR_RETURN(aidl2legacy_AudioUsage_audio_usage_t(aidl.usage));
+    legacy.encapsulation_mode = VALUE_OR_RETURN(
+            aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(aidl.encapsulationMode));
+    legacy.content_id = VALUE_OR_RETURN(convertReinterpret<int32_t>(aidl.contentId));
+    legacy.sync_id = VALUE_OR_RETURN(convertReinterpret<int32_t>(aidl.syncId));
+    return legacy;
+}
+
+ConversionResult<media::AudioOffloadInfo>
+legacy2aidl_audio_offload_info_t_AudioOffloadInfo(const audio_offload_info_t& legacy) {
+    media::AudioOffloadInfo aidl;
+    // Version 0.1 fields.
+    if (legacy.size < offsetof(audio_offload_info_t, usage) + sizeof(audio_offload_info_t::usage)) {
+        return unexpected(BAD_VALUE);
+    }
+    aidl.version = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.version));
+    aidl.config.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
+    aidl.config.channelMask = VALUE_OR_RETURN(
+            legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+    aidl.config.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+    aidl.streamType = VALUE_OR_RETURN(
+            legacy2aidl_audio_stream_type_t_AudioStreamType(legacy.stream_type));
+    aidl.bitRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.bit_rate));
+    aidl.durationUs = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy.duration_us));
+    aidl.hasVideo = legacy.has_video;
+    aidl.isStreaming = legacy.is_streaming;
+    aidl.bitWidth = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.bit_width));
+    aidl.offloadBufferSize = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.offload_buffer_size));
+    aidl.usage = VALUE_OR_RETURN(legacy2aidl_audio_usage_t_AudioUsage(legacy.usage));
+
+    // Version 0.2 fields.
+    if (legacy.version >= AUDIO_OFFLOAD_INFO_VERSION_0_2) {
+        if (legacy.size <
+            offsetof(audio_offload_info_t, sync_id) + sizeof(audio_offload_info_t::sync_id)) {
+            return unexpected(BAD_VALUE);
+        }
+        aidl.encapsulationMode = VALUE_OR_RETURN(
+                legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode(
+                        legacy.encapsulation_mode));
+        aidl.contentId = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.content_id));
+        aidl.syncId = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.sync_id));
+    }
+    return aidl;
+}
+
+ConversionResult<audio_config_t>
+aidl2legacy_AudioConfig_audio_config_t(const media::AudioConfig& aidl) {
+    audio_config_t legacy;
+    legacy.sample_rate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
+    legacy.channel_mask = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+    legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+    legacy.offload_info = VALUE_OR_RETURN(
+            aidl2legacy_AudioOffloadInfo_audio_offload_info_t(aidl.offloadInfo));
+    legacy.frame_count = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.frameCount));
+    return legacy;
+}
+
+ConversionResult<media::AudioConfig>
+legacy2aidl_audio_config_t_AudioConfig(const audio_config_t& legacy) {
+    media::AudioConfig aidl;
+    aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
+    aidl.channelMask = VALUE_OR_RETURN(
+            legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+    aidl.offloadInfo = VALUE_OR_RETURN(
+            legacy2aidl_audio_offload_info_t_AudioOffloadInfo(legacy.offload_info));
+    aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy.frame_count));
+    return aidl;
+}
+
+ConversionResult<audio_config_base_t>
+aidl2legacy_AudioConfigBase_audio_config_base_t(const media::AudioConfigBase& aidl) {
+    audio_config_base_t legacy;
+    legacy.sample_rate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
+    legacy.channel_mask = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+    legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+    return legacy;
+}
+
+ConversionResult<media::AudioConfigBase>
+legacy2aidl_audio_config_base_t_AudioConfigBase(const audio_config_base_t& legacy) {
+    media::AudioConfigBase aidl;
+    aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
+    aidl.channelMask = VALUE_OR_RETURN(
+            legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+    return aidl;
+}
+
+ConversionResult<sp<IMemory>>
+aidl2legacy_SharedFileRegion_IMemory(const media::SharedFileRegion& aidl) {
+    sp<IMemory> legacy;
+    if (!convertSharedFileRegionToIMemory(aidl, &legacy)) {
+        return unexpected(BAD_VALUE);
+    }
+    return legacy;
+}
+
+ConversionResult<media::SharedFileRegion>
+legacy2aidl_IMemory_SharedFileRegion(const sp<IMemory>& legacy) {
+    media::SharedFileRegion aidl;
+    if (!convertIMemoryToSharedFileRegion(legacy, &aidl)) {
+        return unexpected(BAD_VALUE);
+    }
+    return aidl;
+}
+
+ConversionResult<sp<IMemory>>
+aidl2legacy_NullableSharedFileRegion_IMemory(const std::optional<media::SharedFileRegion>& aidl) {
+    sp<IMemory> legacy;
+    if (!convertNullableSharedFileRegionToIMemory(aidl, &legacy)) {
+        return unexpected(BAD_VALUE);
+    }
+    return legacy;
+}
+
+ConversionResult<std::optional<media::SharedFileRegion>>
+legacy2aidl_NullableIMemory_SharedFileRegion(const sp<IMemory>& legacy) {
+    std::optional<media::SharedFileRegion> aidl;
+    if (!convertNullableIMemoryToSharedFileRegion(legacy, &aidl)) {
+        return unexpected(BAD_VALUE);
+    }
+    return aidl;
+}
+
+ConversionResult<AudioTimestamp>
+aidl2legacy_AudioTimestampInternal_AudioTimestamp(const media::AudioTimestampInternal& aidl) {
+    AudioTimestamp legacy;
+    legacy.mPosition = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.position));
+    legacy.mTime.tv_sec = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sec));
+    legacy.mTime.tv_nsec = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.nsec));
+    return legacy;
+}
+
+ConversionResult<media::AudioTimestampInternal>
+legacy2aidl_AudioTimestamp_AudioTimestampInternal(const AudioTimestamp& legacy) {
+    media::AudioTimestampInternal aidl;
+    aidl.position = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.mPosition));
+    aidl.sec = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy.mTime.tv_sec));
+    aidl.nsec = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.mTime.tv_nsec));
+    return aidl;
+}
+
+ConversionResult<audio_uuid_t>
+aidl2legacy_AudioUuid_audio_uuid_t(const media::AudioUuid& aidl) {
+    audio_uuid_t legacy;
+    legacy.timeLow = VALUE_OR_RETURN(convertReinterpret<uint32_t>(aidl.timeLow));
+    legacy.timeMid = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.timeMid));
+    legacy.timeHiAndVersion = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.timeHiAndVersion));
+    legacy.clockSeq = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.clockSeq));
+    if (aidl.node.size() != std::size(legacy.node)) {
+        return unexpected(BAD_VALUE);
+    }
+    std::copy(aidl.node.begin(), aidl.node.end(), legacy.node);
+    return legacy;
+}
+
+ConversionResult<media::AudioUuid>
+legacy2aidl_audio_uuid_t_AudioUuid(const audio_uuid_t& legacy) {
+    media::AudioUuid aidl;
+    aidl.timeLow = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.timeLow));
+    aidl.timeMid = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.timeMid));
+    aidl.timeHiAndVersion = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.timeHiAndVersion));
+    aidl.clockSeq = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.clockSeq));
+    std::copy(legacy.node, legacy.node + std::size(legacy.node), std::back_inserter(aidl.node));
+    return aidl;
+}
+
+ConversionResult<effect_descriptor_t>
+aidl2legacy_EffectDescriptor_effect_descriptor_t(const media::EffectDescriptor& aidl) {
+    effect_descriptor_t legacy;
+    legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioUuid_audio_uuid_t(aidl.type));
+    legacy.uuid = VALUE_OR_RETURN(aidl2legacy_AudioUuid_audio_uuid_t(aidl.uuid));
+    legacy.apiVersion = VALUE_OR_RETURN(convertReinterpret<uint32_t>(aidl.apiVersion));
+    legacy.flags = VALUE_OR_RETURN(convertReinterpret<uint32_t>(aidl.flags));
+    legacy.cpuLoad = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.cpuLoad));
+    legacy.memoryUsage = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.memoryUsage));
+    RETURN_IF_ERROR(aidl2legacy_string(aidl.name, legacy.name, sizeof(legacy.name)));
+    RETURN_IF_ERROR(
+            aidl2legacy_string(aidl.implementor, legacy.implementor, sizeof(legacy.implementor)));
+    return legacy;
+}
+
+ConversionResult<media::EffectDescriptor>
+legacy2aidl_effect_descriptor_t_EffectDescriptor(const effect_descriptor_t& legacy) {
+    media::EffectDescriptor aidl;
+    aidl.type = VALUE_OR_RETURN(legacy2aidl_audio_uuid_t_AudioUuid(legacy.type));
+    aidl.uuid = VALUE_OR_RETURN(legacy2aidl_audio_uuid_t_AudioUuid(legacy.uuid));
+    aidl.apiVersion = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.apiVersion));
+    aidl.flags = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.flags));
+    aidl.cpuLoad = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.cpuLoad));
+    aidl.memoryUsage = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.memoryUsage));
+    aidl.name = VALUE_OR_RETURN(legacy2aidl_string(legacy.name, sizeof(legacy.name)));
+    aidl.implementor = VALUE_OR_RETURN(
+            legacy2aidl_string(legacy.implementor, sizeof(legacy.implementor)));
+    return aidl;
+}
+
+ConversionResult<audio_encapsulation_metadata_type_t>
+aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t(
+        media::AudioEncapsulationMetadataType aidl) {
+    switch (aidl) {
+        case media::AudioEncapsulationMetadataType::NONE:
+            return AUDIO_ENCAPSULATION_METADATA_TYPE_NONE;
+        case media::AudioEncapsulationMetadataType::FRAMEWORK_TUNER:
+            return AUDIO_ENCAPSULATION_METADATA_TYPE_FRAMEWORK_TUNER;
+        case media::AudioEncapsulationMetadataType::DVB_AD_DESCRIPTOR:
+            return AUDIO_ENCAPSULATION_METADATA_TYPE_DVB_AD_DESCRIPTOR;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioEncapsulationMetadataType>
+legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType(
+        audio_encapsulation_metadata_type_t legacy) {
+    switch (legacy) {
+        case AUDIO_ENCAPSULATION_METADATA_TYPE_NONE:
+            return media::AudioEncapsulationMetadataType::NONE;
+        case AUDIO_ENCAPSULATION_METADATA_TYPE_FRAMEWORK_TUNER:
+            return media::AudioEncapsulationMetadataType::FRAMEWORK_TUNER;
+        case AUDIO_ENCAPSULATION_METADATA_TYPE_DVB_AD_DESCRIPTOR:
+            return media::AudioEncapsulationMetadataType::DVB_AD_DESCRIPTOR;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioEncapsulationMode_mask(int32_t aidl) {
+    return convertBitmask<uint32_t,
+            int32_t,
+            audio_encapsulation_mode_t,
+            media::AudioEncapsulationMode>(
+            aidl, aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t,
+            indexToEnum_index<media::AudioEncapsulationMode>,
+            enumToMask_index<uint32_t, audio_encapsulation_mode_t>);
+}
+
+ConversionResult<int32_t>
+legacy2aidl_AudioEncapsulationMode_mask(uint32_t legacy) {
+    return convertBitmask<int32_t,
+            uint32_t,
+            media::AudioEncapsulationMode,
+            audio_encapsulation_mode_t>(
+            legacy, legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode,
+            indexToEnum_index<audio_encapsulation_mode_t>,
+            enumToMask_index<int32_t, media::AudioEncapsulationMode>);
+}
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioEncapsulationMetadataType_mask(int32_t aidl) {
+    return convertBitmask<uint32_t,
+            int32_t,
+            audio_encapsulation_metadata_type_t,
+            media::AudioEncapsulationMetadataType>(
+            aidl, aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t,
+            indexToEnum_index<media::AudioEncapsulationMetadataType>,
+            enumToMask_index<uint32_t, audio_encapsulation_metadata_type_t>);
+}
+
+ConversionResult<int32_t>
+legacy2aidl_AudioEncapsulationMetadataType_mask(uint32_t legacy) {
+    return convertBitmask<int32_t,
+            uint32_t,
+            media::AudioEncapsulationMetadataType,
+            audio_encapsulation_metadata_type_t>(
+            legacy, legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType,
+            indexToEnum_index<audio_encapsulation_metadata_type_t>,
+            enumToMask_index<int32_t, media::AudioEncapsulationMetadataType>);
+}
+
+ConversionResult<audio_mix_latency_class_t>
+aidl2legacy_AudioMixLatencyClass_audio_mix_latency_class_t(
+        media::AudioMixLatencyClass aidl) {
+    switch (aidl) {
+        case media::AudioMixLatencyClass::LOW:
+            return AUDIO_LATENCY_LOW;
+        case media::AudioMixLatencyClass::NORMAL:
+            return AUDIO_LATENCY_NORMAL;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioMixLatencyClass>
+legacy2aidl_audio_mix_latency_class_t_AudioMixLatencyClass(
+        audio_mix_latency_class_t legacy) {
+    switch (legacy) {
+        case AUDIO_LATENCY_LOW:
+            return media::AudioMixLatencyClass::LOW;
+        case AUDIO_LATENCY_NORMAL:
+            return media::AudioMixLatencyClass::NORMAL;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_port_device_ext>
+aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(const media::AudioPortDeviceExt& aidl) {
+    audio_port_device_ext legacy;
+    legacy.hw_module = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_module_handle_t(aidl.hwModule));
+    legacy.type = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_devices_t(aidl.device.type));
+    RETURN_IF_ERROR(
+            aidl2legacy_string(aidl.device.address, legacy.address, sizeof(legacy.address)));
+    legacy.encapsulation_modes = VALUE_OR_RETURN(
+            aidl2legacy_AudioEncapsulationMode_mask(aidl.encapsulationModes));
+    legacy.encapsulation_metadata_types = VALUE_OR_RETURN(
+            aidl2legacy_AudioEncapsulationMetadataType_mask(aidl.encapsulationMetadataTypes));
+    return legacy;
+}
+
+ConversionResult<media::AudioPortDeviceExt>
+legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(const audio_port_device_ext& legacy) {
+    media::AudioPortDeviceExt aidl;
+    aidl.hwModule = VALUE_OR_RETURN(legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
+    aidl.device.type = VALUE_OR_RETURN(legacy2aidl_audio_devices_t_int32_t(legacy.type));
+    aidl.device.address = VALUE_OR_RETURN(
+            legacy2aidl_string(legacy.address, sizeof(legacy.address)));
+    aidl.encapsulationModes = VALUE_OR_RETURN(
+            legacy2aidl_AudioEncapsulationMode_mask(legacy.encapsulation_modes));
+    aidl.encapsulationMetadataTypes = VALUE_OR_RETURN(
+            legacy2aidl_AudioEncapsulationMetadataType_mask(legacy.encapsulation_metadata_types));
+    return aidl;
+}
+
+ConversionResult<audio_port_mix_ext>
+aidl2legacy_AudioPortMixExt_audio_port_mix_ext(const media::AudioPortMixExt& aidl) {
+    audio_port_mix_ext legacy;
+    legacy.hw_module = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_module_handle_t(aidl.hwModule));
+    legacy.handle = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.handle));
+    legacy.latency_class = VALUE_OR_RETURN(
+            aidl2legacy_AudioMixLatencyClass_audio_mix_latency_class_t(aidl.latencyClass));
+    return legacy;
+}
+
+ConversionResult<media::AudioPortMixExt>
+legacy2aidl_audio_port_mix_ext_AudioPortMixExt(const audio_port_mix_ext& legacy) {
+    media::AudioPortMixExt aidl;
+    aidl.hwModule = VALUE_OR_RETURN(legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
+    aidl.handle = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(legacy.handle));
+    aidl.latencyClass = VALUE_OR_RETURN(
+            legacy2aidl_audio_mix_latency_class_t_AudioMixLatencyClass(legacy.latency_class));
+    return aidl;
+}
+
+ConversionResult<audio_port_session_ext>
+aidl2legacy_AudioPortSessionExt_audio_port_session_ext(const media::AudioPortSessionExt& aidl) {
+    audio_port_session_ext legacy;
+    legacy.session = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.session));
+    return legacy;
+}
+
+ConversionResult<media::AudioPortSessionExt>
+legacy2aidl_audio_port_session_ext_AudioPortSessionExt(const audio_port_session_ext& legacy) {
+    media::AudioPortSessionExt aidl;
+    aidl.session = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(legacy.session));
+    return aidl;
+}
+
+// This type is unnamed in the original definition, thus we name it here.
+using audio_port_v7_ext = decltype(audio_port_v7::ext);
+
+ConversionResult<audio_port_v7_ext> aidl2legacy_AudioPortExt(
+        const media::AudioPortExt& aidl, media::AudioPortType type) {
+    audio_port_v7_ext legacy;
+    switch (type) {
+        case media::AudioPortType::NONE:
+            // Just verify that the union is empty.
+            VALUE_OR_RETURN(UNION_GET(aidl, unspecified));
+            return legacy;
+        case media::AudioPortType::DEVICE:
+            legacy.device = VALUE_OR_RETURN(
+                    aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
+                            VALUE_OR_RETURN(UNION_GET(aidl, device))));
+            return legacy;
+        case media::AudioPortType::MIX:
+            legacy.mix = VALUE_OR_RETURN(
+                    aidl2legacy_AudioPortMixExt_audio_port_mix_ext(
+                            VALUE_OR_RETURN(UNION_GET(aidl, mix))));
+            return legacy;
+        case media::AudioPortType::SESSION:
+            legacy.session = VALUE_OR_RETURN(aidl2legacy_AudioPortSessionExt_audio_port_session_ext(
+                    VALUE_OR_RETURN(UNION_GET(aidl, session))));
+            return legacy;
+
+    }
+    LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+ConversionResult<media::AudioPortExt> legacy2aidl_AudioPortExt(
+        const audio_port_v7_ext& legacy, audio_port_type_t type) {
+    media::AudioPortExt aidl;
+    switch (type) {
+        case AUDIO_PORT_TYPE_NONE:
+            UNION_SET(aidl, unspecified, false);
+            return aidl;
+        case AUDIO_PORT_TYPE_DEVICE:
+            UNION_SET(aidl, device,
+                      VALUE_OR_RETURN(
+                              legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(legacy.device)));
+            return aidl;
+        case AUDIO_PORT_TYPE_MIX:
+            UNION_SET(aidl, mix,
+                      VALUE_OR_RETURN(legacy2aidl_audio_port_mix_ext_AudioPortMixExt(legacy.mix)));
+            return aidl;
+        case AUDIO_PORT_TYPE_SESSION:
+            UNION_SET(aidl, session,
+                      VALUE_OR_RETURN(legacy2aidl_audio_port_session_ext_AudioPortSessionExt(
+                              legacy.session)));
+            return aidl;
+    }
+    LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+ConversionResult<audio_profile>
+aidl2legacy_AudioProfile_audio_profile(const media::AudioProfile& aidl) {
+    audio_profile legacy;
+    legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+
+    if (aidl.samplingRates.size() > std::size(legacy.sample_rates)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(
+            convertRange(aidl.samplingRates.begin(), aidl.samplingRates.end(), legacy.sample_rates,
+                         convertIntegral<int32_t, unsigned int>));
+    legacy.num_sample_rates = aidl.samplingRates.size();
+
+    if (aidl.channelMasks.size() > std::size(legacy.channel_masks)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(
+            convertRange(aidl.channelMasks.begin(), aidl.channelMasks.end(), legacy.channel_masks,
+                         aidl2legacy_int32_t_audio_channel_mask_t));
+    legacy.num_channel_masks = aidl.channelMasks.size();
+
+    legacy.encapsulation_type = VALUE_OR_RETURN(
+            aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(aidl.encapsulationType));
+    return legacy;
+}
+
+ConversionResult<media::AudioProfile>
+legacy2aidl_audio_profile_AudioProfile(const audio_profile& legacy) {
+    media::AudioProfile aidl;
+    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+
+    if (legacy.num_sample_rates > std::size(legacy.sample_rates)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(
+            convertRange(legacy.sample_rates, legacy.sample_rates + legacy.num_sample_rates,
+                         std::back_inserter(aidl.samplingRates),
+                         convertIntegral<unsigned int, int32_t>));
+
+    if (legacy.num_channel_masks > std::size(legacy.channel_masks)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(
+            convertRange(legacy.channel_masks, legacy.channel_masks + legacy.num_channel_masks,
+                         std::back_inserter(aidl.channelMasks),
+                         legacy2aidl_audio_channel_mask_t_int32_t));
+
+    aidl.encapsulationType = VALUE_OR_RETURN(
+            legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
+                    legacy.encapsulation_type));
+    return aidl;
+}
+
+ConversionResult<audio_gain>
+aidl2legacy_AudioGain_audio_gain(const media::AudioGain& aidl) {
+    audio_gain legacy;
+    legacy.mode = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_gain_mode_t_mask(aidl.mode));
+    legacy.channel_mask = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+    legacy.min_value = VALUE_OR_RETURN(convertIntegral<int>(aidl.minValue));
+    legacy.max_value = VALUE_OR_RETURN(convertIntegral<int>(aidl.maxValue));
+    legacy.default_value = VALUE_OR_RETURN(convertIntegral<int>(aidl.defaultValue));
+    legacy.step_value = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.stepValue));
+    legacy.min_ramp_ms = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.minRampMs));
+    legacy.max_ramp_ms = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.maxRampMs));
+    return legacy;
+}
+
+ConversionResult<media::AudioGain>
+legacy2aidl_audio_gain_AudioGain(const audio_gain& legacy) {
+    media::AudioGain aidl;
+    aidl.mode = VALUE_OR_RETURN(legacy2aidl_audio_gain_mode_t_int32_t_mask(legacy.mode));
+    aidl.channelMask = VALUE_OR_RETURN(
+            legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+    aidl.minValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.min_value));
+    aidl.maxValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.max_value));
+    aidl.defaultValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.default_value));
+    aidl.stepValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.step_value));
+    aidl.minRampMs = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.min_ramp_ms));
+    aidl.maxRampMs = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.max_ramp_ms));
+    return aidl;
+}
+
+ConversionResult<audio_port_v7>
+aidl2legacy_AudioPort_audio_port_v7(const media::AudioPort& aidl) {
+    audio_port_v7 legacy;
+    legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.id));
+    legacy.role = VALUE_OR_RETURN(aidl2legacy_AudioPortRole_audio_port_role_t(aidl.role));
+    legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioPortType_audio_port_type_t(aidl.type));
+    RETURN_IF_ERROR(aidl2legacy_string(aidl.name, legacy.name, sizeof(legacy.name)));
+
+    if (aidl.profiles.size() > std::size(legacy.audio_profiles)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(convertRange(aidl.profiles.begin(), aidl.profiles.end(), legacy.audio_profiles,
+                                 aidl2legacy_AudioProfile_audio_profile));
+    legacy.num_audio_profiles = aidl.profiles.size();
+
+    if (aidl.extraAudioDescriptors.size() > std::size(legacy.extra_audio_descriptors)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(
+            convertRange(aidl.extraAudioDescriptors.begin(), aidl.extraAudioDescriptors.end(),
+                         legacy.extra_audio_descriptors,
+                         aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor));
+    legacy.num_extra_audio_descriptors = aidl.extraAudioDescriptors.size();
+
+    if (aidl.gains.size() > std::size(legacy.gains)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(convertRange(aidl.gains.begin(), aidl.gains.end(), legacy.gains,
+                                 aidl2legacy_AudioGain_audio_gain));
+    legacy.num_gains = aidl.gains.size();
+
+    legacy.active_config = VALUE_OR_RETURN(
+            aidl2legacy_AudioPortConfig_audio_port_config(aidl.activeConfig));
+    legacy.ext = VALUE_OR_RETURN(aidl2legacy_AudioPortExt(aidl.ext, aidl.type));
+    return legacy;
+}
+
+ConversionResult<media::AudioPort>
+legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy) {
+    media::AudioPort aidl;
+    aidl.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
+    aidl.role = VALUE_OR_RETURN(legacy2aidl_audio_port_role_t_AudioPortRole(legacy.role));
+    aidl.type = VALUE_OR_RETURN(legacy2aidl_audio_port_type_t_AudioPortType(legacy.type));
+    aidl.name = VALUE_OR_RETURN(legacy2aidl_string(legacy.name, sizeof(legacy.name)));
+
+    if (legacy.num_audio_profiles > std::size(legacy.audio_profiles)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(
+            convertRange(legacy.audio_profiles, legacy.audio_profiles + legacy.num_audio_profiles,
+                         std::back_inserter(aidl.profiles),
+                         legacy2aidl_audio_profile_AudioProfile));
+
+    if (legacy.num_extra_audio_descriptors > std::size(legacy.extra_audio_descriptors)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(
+            convertRange(legacy.extra_audio_descriptors,
+                    legacy.extra_audio_descriptors + legacy.num_extra_audio_descriptors,
+                    std::back_inserter(aidl.extraAudioDescriptors),
+                    legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor));
+
+    if (legacy.num_gains > std::size(legacy.gains)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(
+            convertRange(legacy.gains, legacy.gains + legacy.num_gains,
+                         std::back_inserter(aidl.gains),
+                         legacy2aidl_audio_gain_AudioGain));
+
+    aidl.activeConfig = VALUE_OR_RETURN(
+            legacy2aidl_audio_port_config_AudioPortConfig(legacy.active_config));
+    aidl.ext = VALUE_OR_RETURN(legacy2aidl_AudioPortExt(legacy.ext, legacy.type));
+    return aidl;
+}
+
+ConversionResult<audio_mode_t>
+aidl2legacy_AudioMode_audio_mode_t(media::AudioMode aidl) {
+    switch (aidl) {
+        case media::AudioMode::INVALID:
+            return AUDIO_MODE_INVALID;
+        case media::AudioMode::CURRENT:
+            return AUDIO_MODE_CURRENT;
+        case media::AudioMode::NORMAL:
+            return AUDIO_MODE_NORMAL;
+        case media::AudioMode::RINGTONE:
+            return AUDIO_MODE_RINGTONE;
+        case media::AudioMode::IN_CALL:
+            return AUDIO_MODE_IN_CALL;
+        case media::AudioMode::IN_COMMUNICATION:
+            return AUDIO_MODE_IN_COMMUNICATION;
+        case media::AudioMode::CALL_SCREEN:
+            return AUDIO_MODE_CALL_SCREEN;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioMode>
+legacy2aidl_audio_mode_t_AudioMode(audio_mode_t legacy) {
+    switch (legacy) {
+        case AUDIO_MODE_INVALID:
+            return media::AudioMode::INVALID;
+        case AUDIO_MODE_CURRENT:
+            return media::AudioMode::CURRENT;
+        case AUDIO_MODE_NORMAL:
+            return media::AudioMode::NORMAL;
+        case AUDIO_MODE_RINGTONE:
+            return media::AudioMode::RINGTONE;
+        case AUDIO_MODE_IN_CALL:
+            return media::AudioMode::IN_CALL;
+        case AUDIO_MODE_IN_COMMUNICATION:
+            return media::AudioMode::IN_COMMUNICATION;
+        case AUDIO_MODE_CALL_SCREEN:
+            return media::AudioMode::CALL_SCREEN;
+        case AUDIO_MODE_CNT:
+            break;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_unique_id_use_t>
+aidl2legacy_AudioUniqueIdUse_audio_unique_id_use_t(media::AudioUniqueIdUse aidl) {
+    switch (aidl) {
+        case media::AudioUniqueIdUse::UNSPECIFIED:
+            return AUDIO_UNIQUE_ID_USE_UNSPECIFIED;
+        case media::AudioUniqueIdUse::SESSION:
+            return AUDIO_UNIQUE_ID_USE_SESSION;
+        case media::AudioUniqueIdUse::MODULE:
+            return AUDIO_UNIQUE_ID_USE_MODULE;
+        case media::AudioUniqueIdUse::EFFECT:
+            return AUDIO_UNIQUE_ID_USE_EFFECT;
+        case media::AudioUniqueIdUse::PATCH:
+            return AUDIO_UNIQUE_ID_USE_PATCH;
+        case media::AudioUniqueIdUse::OUTPUT:
+            return AUDIO_UNIQUE_ID_USE_OUTPUT;
+        case media::AudioUniqueIdUse::INPUT:
+            return AUDIO_UNIQUE_ID_USE_INPUT;
+        case media::AudioUniqueIdUse::CLIENT:
+            return AUDIO_UNIQUE_ID_USE_CLIENT;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioUniqueIdUse>
+legacy2aidl_audio_unique_id_use_t_AudioUniqueIdUse(audio_unique_id_use_t legacy) {
+    switch (legacy) {
+        case AUDIO_UNIQUE_ID_USE_UNSPECIFIED:
+            return media::AudioUniqueIdUse::UNSPECIFIED;
+        case AUDIO_UNIQUE_ID_USE_SESSION:
+            return media::AudioUniqueIdUse::SESSION;
+        case AUDIO_UNIQUE_ID_USE_MODULE:
+            return media::AudioUniqueIdUse::MODULE;
+        case AUDIO_UNIQUE_ID_USE_EFFECT:
+            return media::AudioUniqueIdUse::EFFECT;
+        case AUDIO_UNIQUE_ID_USE_PATCH:
+            return media::AudioUniqueIdUse::PATCH;
+        case AUDIO_UNIQUE_ID_USE_OUTPUT:
+            return media::AudioUniqueIdUse::OUTPUT;
+        case AUDIO_UNIQUE_ID_USE_INPUT:
+            return media::AudioUniqueIdUse::INPUT;
+        case AUDIO_UNIQUE_ID_USE_CLIENT:
+            return media::AudioUniqueIdUse::CLIENT;
+        case AUDIO_UNIQUE_ID_USE_MAX:
+            break;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<volume_group_t>
+aidl2legacy_int32_t_volume_group_t(int32_t aidl) {
+    return convertReinterpret<volume_group_t>(aidl);
+}
+
+ConversionResult<int32_t>
+legacy2aidl_volume_group_t_int32_t(volume_group_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<product_strategy_t>
+aidl2legacy_int32_t_product_strategy_t(int32_t aidl) {
+    return convertReinterpret<product_strategy_t>(aidl);
+}
+
+ConversionResult<int32_t>
+legacy2aidl_product_strategy_t_int32_t(product_strategy_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_dual_mono_mode_t>
+aidl2legacy_AudioDualMonoMode_audio_dual_mono_mode_t(media::AudioDualMonoMode aidl) {
+    switch (aidl) {
+        case media::AudioDualMonoMode::OFF:
+            return AUDIO_DUAL_MONO_MODE_OFF;
+        case media::AudioDualMonoMode::LR:
+            return AUDIO_DUAL_MONO_MODE_LR;
+        case media::AudioDualMonoMode::LL:
+            return AUDIO_DUAL_MONO_MODE_LL;
+        case media::AudioDualMonoMode::RR:
+            return AUDIO_DUAL_MONO_MODE_RR;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioDualMonoMode>
+legacy2aidl_audio_dual_mono_mode_t_AudioDualMonoMode(audio_dual_mono_mode_t legacy) {
+    switch (legacy) {
+        case AUDIO_DUAL_MONO_MODE_OFF:
+            return media::AudioDualMonoMode::OFF;
+        case AUDIO_DUAL_MONO_MODE_LR:
+            return media::AudioDualMonoMode::LR;
+        case AUDIO_DUAL_MONO_MODE_LL:
+            return media::AudioDualMonoMode::LL;
+        case AUDIO_DUAL_MONO_MODE_RR:
+            return media::AudioDualMonoMode::RR;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_timestretch_fallback_mode_t>
+aidl2legacy_int32_t_audio_timestretch_fallback_mode_t(int32_t aidl) {
+    return convertReinterpret<audio_timestretch_fallback_mode_t>(aidl);
+}
+
+ConversionResult<int32_t>
+legacy2aidl_audio_timestretch_fallback_mode_t_int32_t(audio_timestretch_fallback_mode_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_timestretch_stretch_mode_t>
+aidl2legacy_int32_t_audio_timestretch_stretch_mode_t(int32_t aidl) {
+    return convertReinterpret<audio_timestretch_stretch_mode_t>(aidl);
+}
+
+ConversionResult<int32_t>
+legacy2aidl_audio_timestretch_stretch_mode_t_int32_t(audio_timestretch_stretch_mode_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_playback_rate_t>
+aidl2legacy_AudioPlaybackRate_audio_playback_rate_t(const media::AudioPlaybackRate& aidl) {
+    audio_playback_rate_t legacy;
+    legacy.mSpeed = aidl.speed;
+    legacy.mPitch = aidl.pitch;
+    legacy.mFallbackMode = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_timestretch_fallback_mode_t(aidl.fallbackMode));
+    legacy.mStretchMode = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_timestretch_stretch_mode_t(aidl.stretchMode));
+    return legacy;
+}
+
+ConversionResult<media::AudioPlaybackRate>
+legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(const audio_playback_rate_t& legacy) {
+    media::AudioPlaybackRate aidl;
+    aidl.speed = legacy.mSpeed;
+    aidl.pitch = legacy.mPitch;
+    aidl.fallbackMode = VALUE_OR_RETURN(
+            legacy2aidl_audio_timestretch_fallback_mode_t_int32_t(legacy.mFallbackMode));
+    aidl.stretchMode = VALUE_OR_RETURN(
+            legacy2aidl_audio_timestretch_stretch_mode_t_int32_t(legacy.mStretchMode));
+    return aidl;
+}
+
+ConversionResult<audio_standard_t>
+aidl2legacy_AudioStandard_audio_standard_t(media::AudioStandard aidl) {
+    switch (aidl) {
+        case media::AudioStandard::NONE:
+            return AUDIO_STANDARD_NONE;
+        case media::AudioStandard::EDID:
+            return AUDIO_STANDARD_EDID;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioStandard>
+legacy2aidl_audio_standard_t_AudioStandard(audio_standard_t legacy) {
+    switch (legacy) {
+        case AUDIO_STANDARD_NONE:
+            return media::AudioStandard::NONE;
+        case AUDIO_STANDARD_EDID:
+            return media::AudioStandard::EDID;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_extra_audio_descriptor>
+aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor(
+        const media::ExtraAudioDescriptor& aidl) {
+    audio_extra_audio_descriptor legacy;
+    legacy.standard = VALUE_OR_RETURN(aidl2legacy_AudioStandard_audio_standard_t(aidl.standard));
+    if (aidl.audioDescriptor.size() > EXTRA_AUDIO_DESCRIPTOR_SIZE) {
+        return unexpected(BAD_VALUE);
+    }
+    legacy.descriptor_length = aidl.audioDescriptor.size();
+    std::copy(aidl.audioDescriptor.begin(), aidl.audioDescriptor.end(),
+              std::begin(legacy.descriptor));
+    legacy.encapsulation_type =
+            VALUE_OR_RETURN(aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
+                    aidl.encapsulationType));
+    return legacy;
+}
+
+ConversionResult<media::ExtraAudioDescriptor>
+legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor(
+        const audio_extra_audio_descriptor& legacy) {
+    media::ExtraAudioDescriptor aidl;
+    aidl.standard = VALUE_OR_RETURN(legacy2aidl_audio_standard_t_AudioStandard(legacy.standard));
+    if (legacy.descriptor_length > EXTRA_AUDIO_DESCRIPTOR_SIZE) {
+        return unexpected(BAD_VALUE);
+    }
+    aidl.audioDescriptor.resize(legacy.descriptor_length);
+    std::copy(legacy.descriptor, legacy.descriptor + legacy.descriptor_length,
+              aidl.audioDescriptor.begin());
+    aidl.encapsulationType =
+            VALUE_OR_RETURN(legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
+                    legacy.encapsulation_type));
+    return aidl;
+}
+
+ConversionResult<audio_encapsulation_type_t>
+aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
+        const media::AudioEncapsulationType& aidl) {
+    switch (aidl) {
+        case media::AudioEncapsulationType::NONE:
+            return AUDIO_ENCAPSULATION_TYPE_NONE;
+        case media::AudioEncapsulationType::IEC61937:
+            return AUDIO_ENCAPSULATION_TYPE_IEC61937;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioEncapsulationType>
+legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
+        const audio_encapsulation_type_t & legacy) {
+    switch (legacy) {
+        case AUDIO_ENCAPSULATION_TYPE_NONE:
+            return media::AudioEncapsulationType::NONE;
+        case AUDIO_ENCAPSULATION_TYPE_IEC61937:
+            return media::AudioEncapsulationType::IEC61937;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<TrackSecondaryOutputInfoPair>
+aidl2legacy_TrackSecondaryOutputInfo_TrackSecondaryOutputInfoPair(
+        const media::TrackSecondaryOutputInfo& aidl) {
+    TrackSecondaryOutputInfoPair trackSecondaryOutputInfoPair;
+    trackSecondaryOutputInfoPair.first =
+            VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
+    trackSecondaryOutputInfoPair.second =
+            VALUE_OR_RETURN(convertContainer<std::vector<audio_port_handle_t>>(
+                    aidl.secondaryOutputIds, aidl2legacy_int32_t_audio_io_handle_t));
+    return trackSecondaryOutputInfoPair;
+}
+
+ConversionResult<media::TrackSecondaryOutputInfo>
+legacy2aidl_TrackSecondaryOutputInfoPair_TrackSecondaryOutputInfo(
+        const TrackSecondaryOutputInfoPair& legacy) {
+    media::TrackSecondaryOutputInfo trackSecondaryOutputInfo;
+    trackSecondaryOutputInfo.portId =
+            VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.first));
+    trackSecondaryOutputInfo.secondaryOutputIds =
+            VALUE_OR_RETURN(convertContainer<std::vector<int32_t>>(
+                    legacy.second, legacy2aidl_audio_io_handle_t_int32_t));
+    return trackSecondaryOutputInfo;
+}
+
+}  // namespace android
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 0c40cbb..9c307ff 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -1,7 +1,17 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_headers {
     name: "libaudioclient_headers",
     vendor_available: true,
     min_sdk_version: "29",
+    host_supported: true,
 
     header_libs: [
         "libaudiofoundation_headers",
@@ -12,6 +22,21 @@
     export_header_lib_headers: [
         "libaudiofoundation_headers",
     ],
+    static_libs: [
+        "audioflinger-aidl-cpp",
+        "audiopolicy-aidl-cpp",
+        "av-types-aidl-cpp",
+    ],
+    export_static_lib_headers: [
+        "audioflinger-aidl-cpp",
+        "audiopolicy-aidl-cpp",
+        "av-types-aidl-cpp",
+    ],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 cc_library_shared {
@@ -21,10 +46,16 @@
         "AudioPolicy.cpp",
         "AudioProductStrategy.cpp",
         "AudioVolumeGroup.cpp",
+        "PolicyAidlConversion.cpp"
     ],
     shared_libs: [
+        "audioclient-types-aidl-cpp",
+        "audioflinger-aidl-cpp",
+        "audiopolicy-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
         "capture_state_listener-aidl-cpp",
         "libaudiofoundation",
+        "libaudioclient_aidl_conversion",
         "libaudioutils",
         "libbinder",
         "libcutils",
@@ -38,18 +69,25 @@
     include_dirs: ["system/media/audio_utils/include"],
     export_include_dirs: ["include"],
     export_shared_lib_headers: [
+        "audioclient-types-aidl-cpp",
+        "audioflinger-aidl-cpp",
+        "audiopolicy-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
         "capture_state_listener-aidl-cpp",
+        "libaudiofoundation",
+        "libaudioclient_aidl_conversion",
     ],
+    header_libs: ["libaudioclient_headers"],
 }
 
-cc_library_shared {
+cc_library {
     name: "libaudioclient",
 
     aidl: {
         export_aidl_headers: true,
         local_include_dirs: ["aidl"],
         include_dirs: [
-            "frameworks/av/media/libaudioclient/aidl",
+            "frameworks/av/aidl",
         ],
     },
 
@@ -57,8 +95,6 @@
         // AIDL files for audioclient interfaces
         // The headers for these interfaces will be available to any modules that
         // include libaudioclient, at the path "aidl/package/path/BnFoo.h"
-        ":libaudioclient_aidl_callback",
-        ":libaudioclient_aidl_private",
         ":libaudioclient_aidl",
 
         "AudioEffect.cpp",
@@ -67,19 +103,19 @@
         "AudioTrack.cpp",
         "AudioTrackShared.cpp",
         "IAudioFlinger.cpp",
-        "IAudioFlingerClient.cpp",
-        "IAudioPolicyService.cpp",
-        "IAudioPolicyServiceClient.cpp",
-        "IAudioTrack.cpp",
-        "IEffect.cpp",
-        "IEffectClient.cpp",
         "ToneGenerator.cpp",
         "PlayerBase.cpp",
         "RecordingActivityTracker.cpp",
         "TrackPlayerBase.cpp",
     ],
     shared_libs: [
+        "audioclient-types-aidl-cpp",
+        "audioflinger-aidl-cpp",
+        "audiopolicy-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
+        "av-types-aidl-cpp",
         "capture_state_listener-aidl-cpp",
+        "libaudioclient_aidl_conversion",
         "libaudiofoundation",
         "libaudioutils",
         "libaudiopolicy",
@@ -93,16 +129,24 @@
         "libmediautils",
         "libnblog",
         "libprocessgroup",
+        "libshmemcompat",
         "libutils",
         "libvibrator",
+        "framework-permission-aidl-cpp",
     ],
-    export_shared_lib_headers: ["libbinder"],
+    export_shared_lib_headers: [
+        "audioflinger-aidl-cpp",
+        "audiopolicy-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libbinder",
+    ],
 
     include_dirs: [
         "frameworks/av/media/libnbaio/include_mono/",
     ],
     local_include_dirs: [
-        "include/media", "aidl"
+        "include/media",
+        "aidl",
     ],
     header_libs: [
         "libaudioclient_headers",
@@ -110,10 +154,16 @@
         "libmedia_headers",
     ],
     export_header_lib_headers: ["libaudioclient_headers"],
+    export_static_lib_headers: [
+        "effect-aidl-cpp",
+        "shared-file-region-aidl-cpp",
+    ],
 
-    // for memory heap analysis
     static_libs: [
+        "effect-aidl-cpp",
+        // for memory heap analysis
         "libc_malloc_debug_backtrace",
+        "shared-file-region-aidl-cpp",
     ],
     cflags: [
         "-Wall",
@@ -121,13 +171,91 @@
         "-Wno-error=deprecated-declarations",
     ],
     sanitize: {
-        misc_undefined : [
+        misc_undefined: [
             "unsigned-integer-overflow",
             "signed-integer-overflow",
         ],
     },
 }
 
+// This is intended for clients needing to include AidlConversionUtil.h, without dragging in a lot of extra
+// dependencies.
+cc_library_headers {
+    name: "libaudioclient_aidl_conversion_util",
+    host_supported: true,
+    vendor_available: true,
+    double_loadable: true,
+    min_sdk_version: "29",
+    export_include_dirs: [
+        "include",
+    ],
+    header_libs: [
+        "libbase_headers",
+    ],
+    export_header_lib_headers: [
+        "libbase_headers",
+    ],
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.bluetooth.updatable",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+cc_library {
+    name: "libaudioclient_aidl_conversion",
+    srcs: ["AidlConversion.cpp"],
+    export_include_dirs: ["include"],
+    host_supported: true,
+    vendor_available: true,
+    double_loadable: true,
+    min_sdk_version: "29",
+    header_libs: [
+        "libaudioclient_aidl_conversion_util",
+        "libaudio_system_headers",
+    ],
+    export_header_lib_headers: [
+        "libaudioclient_aidl_conversion_util",
+    ],
+    shared_libs: [
+        "audioclient-types-aidl-cpp",
+        "libbase",
+        "libbinder",
+        "liblog",
+        "libshmemcompat",
+        "libutils",
+        "shared-file-region-aidl-cpp",
+        "framework-permission-aidl-cpp",
+    ],
+    export_shared_lib_headers: [
+        "audioclient-types-aidl-cpp",
+        "libbase",
+        "shared-file-region-aidl-cpp",
+    ],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wno-error=deprecated-declarations",
+    ],
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
 // AIDL interface between libaudioclient and framework.jar
 filegroup {
     name: "libaudioclient_aidl",
@@ -137,30 +265,221 @@
     path: "aidl",
 }
 
-// Used to strip the "aidl/" from the path, so the build system can predict the
-// output filename.
-filegroup {
-    name: "libaudioclient_aidl_private",
-    srcs: [
-        "aidl/android/media/IAudioRecord.aidl",
-    ],
-    path: "aidl",
-}
-
-// AIDL interface for audio track callback
-filegroup {
-    name: "libaudioclient_aidl_callback",
-    srcs: [
-        "aidl/android/media/IAudioTrackCallback.aidl",
-    ],
-    path: "aidl",
-}
-
 aidl_interface {
     name: "capture_state_listener-aidl",
     unstable: true,
     local_include_dir: "aidl",
+    host_supported: true,
+    double_loadable: true,
+    vendor_available: true,
     srcs: [
         "aidl/android/media/ICaptureStateListener.aidl",
     ],
 }
+
+aidl_interface {
+    name: "effect-aidl",
+    unstable: true,
+    local_include_dir: "aidl",
+    host_supported: true,
+    double_loadable: true,
+    vendor_available: true,
+    srcs: [
+        "aidl/android/media/IEffect.aidl",
+        "aidl/android/media/IEffectClient.aidl",
+    ],
+    imports: [
+        "shared-file-region-aidl",
+    ],
+}
+
+aidl_interface {
+    name: "audioclient-types-aidl",
+    unstable: true,
+    host_supported: true,
+    vendor_available: true,
+    double_loadable: true,
+    local_include_dir: "aidl",
+    srcs: [
+        "aidl/android/media/AudioAttributesInternal.aidl",
+        "aidl/android/media/AudioClient.aidl",
+        "aidl/android/media/AudioConfig.aidl",
+        "aidl/android/media/AudioConfigBase.aidl",
+        "aidl/android/media/AudioContentType.aidl",
+        "aidl/android/media/AudioDevice.aidl",
+        "aidl/android/media/AudioDualMonoMode.aidl",
+        "aidl/android/media/AudioEncapsulationMode.aidl",
+        "aidl/android/media/AudioEncapsulationMetadataType.aidl",
+        "aidl/android/media/AudioEncapsulationType.aidl",
+        "aidl/android/media/AudioFlag.aidl",
+        "aidl/android/media/AudioGain.aidl",
+        "aidl/android/media/AudioGainConfig.aidl",
+        "aidl/android/media/AudioGainMode.aidl",
+        "aidl/android/media/AudioInputFlags.aidl",
+        "aidl/android/media/AudioIoConfigEvent.aidl",
+        "aidl/android/media/AudioIoDescriptor.aidl",
+        "aidl/android/media/AudioIoFlags.aidl",
+        "aidl/android/media/AudioMixLatencyClass.aidl",
+        "aidl/android/media/AudioMode.aidl",
+        "aidl/android/media/AudioOffloadInfo.aidl",
+        "aidl/android/media/AudioOutputFlags.aidl",
+        "aidl/android/media/AudioPatch.aidl",
+        "aidl/android/media/AudioPlaybackRate.aidl",
+        "aidl/android/media/AudioPort.aidl",
+        "aidl/android/media/AudioPortConfig.aidl",
+        "aidl/android/media/AudioPortConfigType.aidl",
+        "aidl/android/media/AudioPortConfigDeviceExt.aidl",
+        "aidl/android/media/AudioPortConfigExt.aidl",
+        "aidl/android/media/AudioPortConfigMixExt.aidl",
+        "aidl/android/media/AudioPortConfigMixExtUseCase.aidl",
+        "aidl/android/media/AudioPortConfigSessionExt.aidl",
+        "aidl/android/media/AudioPortDeviceExt.aidl",
+        "aidl/android/media/AudioPortExt.aidl",
+        "aidl/android/media/AudioPortMixExt.aidl",
+        "aidl/android/media/AudioPortRole.aidl",
+        "aidl/android/media/AudioPortSessionExt.aidl",
+        "aidl/android/media/AudioPortType.aidl",
+        "aidl/android/media/AudioProfile.aidl",
+        "aidl/android/media/AudioSourceType.aidl",
+        "aidl/android/media/AudioStandard.aidl",
+        "aidl/android/media/AudioStreamType.aidl",
+        "aidl/android/media/AudioTimestampInternal.aidl",
+        "aidl/android/media/AudioUniqueIdUse.aidl",
+        "aidl/android/media/AudioUsage.aidl",
+        "aidl/android/media/AudioUuid.aidl",
+        "aidl/android/media/AudioVibratorInfo.aidl",
+        "aidl/android/media/EffectDescriptor.aidl",
+        "aidl/android/media/ExtraAudioDescriptor.aidl",
+        "aidl/android/media/TrackSecondaryOutputInfo.aidl",
+    ],
+    imports: [
+        "audio_common-aidl",
+        "framework-permission-aidl",
+    ],
+    backend: {
+        cpp: {
+            min_sdk_version: "29",
+            apex_available: [
+                "//apex_available:platform",
+                "com.android.media",
+            ],
+        },
+    },
+}
+aidl_interface {
+    name: "audiopolicy-types-aidl",
+    unstable: true,
+    host_supported: true,
+    vendor_available: true,
+    double_loadable: true,
+    local_include_dir: "aidl",
+    srcs: [
+        "aidl/android/media/AudioAttributesEx.aidl",
+        "aidl/android/media/AudioMix.aidl",
+        "aidl/android/media/AudioMixCallbackFlag.aidl",
+        "aidl/android/media/AudioMixMatchCriterion.aidl",
+        "aidl/android/media/AudioMixMatchCriterionValue.aidl",
+        "aidl/android/media/AudioMixRouteFlag.aidl",
+        "aidl/android/media/AudioMixType.aidl",
+        "aidl/android/media/AudioOffloadMode.aidl",
+        "aidl/android/media/AudioPolicyDeviceState.aidl",
+        "aidl/android/media/AudioPolicyForceUse.aidl",
+        "aidl/android/media/AudioPolicyForcedConfig.aidl",
+        "aidl/android/media/AudioProductStrategy.aidl",
+        "aidl/android/media/AudioVolumeGroup.aidl",
+        "aidl/android/media/DeviceRole.aidl",
+        "aidl/android/media/SoundTriggerSession.aidl",
+    ],
+    imports: [
+        "audio_common-aidl",
+        "audioclient-types-aidl",
+    ],
+    backend: {
+        cpp: {
+            min_sdk_version: "29",
+            apex_available: [
+                "//apex_available:platform",
+                "com.android.media",
+            ],
+        },
+    },
+}
+
+aidl_interface {
+    name: "audioflinger-aidl",
+    unstable: true,
+    local_include_dir: "aidl",
+    host_supported: true,
+    vendor_available: true,
+    srcs: [
+        "aidl/android/media/CreateEffectRequest.aidl",
+        "aidl/android/media/CreateEffectResponse.aidl",
+        "aidl/android/media/CreateRecordRequest.aidl",
+        "aidl/android/media/CreateRecordResponse.aidl",
+        "aidl/android/media/CreateTrackRequest.aidl",
+        "aidl/android/media/CreateTrackResponse.aidl",
+        "aidl/android/media/OpenInputRequest.aidl",
+        "aidl/android/media/OpenInputResponse.aidl",
+        "aidl/android/media/OpenOutputRequest.aidl",
+        "aidl/android/media/OpenOutputResponse.aidl",
+        "aidl/android/media/RenderPosition.aidl",
+
+        "aidl/android/media/IAudioFlingerService.aidl",
+        "aidl/android/media/IAudioFlingerClient.aidl",
+        "aidl/android/media/IAudioRecord.aidl",
+        "aidl/android/media/IAudioTrack.aidl",
+        "aidl/android/media/IAudioTrackCallback.aidl",
+    ],
+    imports: [
+        "audio_common-aidl",
+        "audioclient-types-aidl",
+        "av-types-aidl",
+        "effect-aidl",
+        "shared-file-region-aidl",
+        "framework-permission-aidl",
+    ],
+    double_loadable: true,
+    backend: {
+        cpp: {
+            min_sdk_version: "29",
+            apex_available: [
+                "//apex_available:platform",
+                "com.android.media",
+            ],
+        },
+    },
+}
+
+aidl_interface {
+    name: "audiopolicy-aidl",
+    unstable: true,
+    local_include_dir: "aidl",
+    host_supported: true,
+    vendor_available: true,
+    srcs: [
+        "aidl/android/media/GetInputForAttrResponse.aidl",
+        "aidl/android/media/GetOutputForAttrResponse.aidl",
+        "aidl/android/media/Int.aidl",
+        "aidl/android/media/RecordClientInfo.aidl",
+        "aidl/android/media/IAudioPolicyService.aidl",
+        "aidl/android/media/IAudioPolicyServiceClient.aidl",
+    ],
+    imports: [
+        "audio_common-aidl",
+        "audioclient-types-aidl",
+        "audiopolicy-types-aidl",
+        "capture_state_listener-aidl",
+        "framework-permission-aidl",
+    ],
+
+    double_loadable: true,
+    backend: {
+        cpp: {
+            min_sdk_version: "29",
+            apex_available: [
+                "//apex_available:platform",
+                "com.android.media",
+            ],
+        },
+    },
+}
diff --git a/media/libaudioclient/AudioAttributes.cpp b/media/libaudioclient/AudioAttributes.cpp
index 1ee6930..83bf5a7 100644
--- a/media/libaudioclient/AudioAttributes.cpp
+++ b/media/libaudioclient/AudioAttributes.cpp
@@ -20,48 +20,46 @@
 
 #include <binder/Parcel.h>
 
+#include <media/AidlConversion.h>
 #include <media/AudioAttributes.h>
+#include <media/PolicyAidlConversion.h>
+
+#define RETURN_STATUS_IF_ERROR(x) \
+    { auto _tmp = (x); if (_tmp != OK) return _tmp; }
 
 namespace android {
 
-status_t AudioAttributes::readFromParcel(const Parcel *parcel)
-{
-    status_t ret = NO_ERROR;
-    mAttributes.content_type = static_cast<audio_content_type_t>(parcel->readInt32());
-    mAttributes.usage = static_cast<audio_usage_t>(parcel->readInt32());
-    mAttributes.source = static_cast<audio_source_t>(parcel->readInt32());
-    mAttributes.flags = static_cast<audio_flags_mask_t>(parcel->readInt32());
-    const bool hasFlattenedTag = (parcel->readInt32() == 1);
-    if (hasFlattenedTag) {
-        std::string tags;
-        ret = parcel->readUtf8FromUtf16(&tags);
-        if (ret != NO_ERROR) {
-            return ret;
-        }
-        std::strncpy(mAttributes.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
-    } else {
-        strcpy(mAttributes.tags, "");
-    }
-    mStreamType = static_cast<audio_stream_type_t>(parcel->readInt32());
-    mGroupId = static_cast<volume_group_t>(parcel->readUint32());
-    return NO_ERROR;
+status_t AudioAttributes::readFromParcel(const Parcel* parcel) {
+    media::AudioAttributesEx aidl;
+    RETURN_STATUS_IF_ERROR(aidl.readFromParcel(parcel));
+    *this = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioAttributesEx_AudioAttributes(aidl));
+    return OK;
 }
 
-status_t AudioAttributes::writeToParcel(Parcel *parcel) const
-{
-    parcel->writeInt32(static_cast<int32_t>(mAttributes.content_type));
-    parcel->writeInt32(static_cast<int32_t>(mAttributes.usage));
-    parcel->writeInt32(static_cast<int32_t>(mAttributes.source));
-    parcel->writeInt32(static_cast<int32_t>(mAttributes.flags));
-    if (strlen(mAttributes.tags) == 0) {
-        parcel->writeInt32(0);
-    } else {
-        parcel->writeInt32(1);
-        parcel->writeUtf8AsUtf16(mAttributes.tags);
-    }
-    parcel->writeInt32(static_cast<int32_t>(mStreamType));
-    parcel->writeUint32(static_cast<uint32_t>(mGroupId));
-    return NO_ERROR;
+status_t AudioAttributes::writeToParcel(Parcel* parcel) const {
+    media::AudioAttributesEx aidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_AudioAttributes_AudioAttributesEx(*this));
+    return aidl.writeToParcel(parcel);
+}
+
+ConversionResult<media::AudioAttributesEx>
+legacy2aidl_AudioAttributes_AudioAttributesEx(const AudioAttributes& legacy) {
+    media::AudioAttributesEx aidl;
+    aidl.attributes = VALUE_OR_RETURN(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(legacy.getAttributes()));
+    aidl.streamType = VALUE_OR_RETURN(
+            legacy2aidl_audio_stream_type_t_AudioStreamType(legacy.getStreamType()));
+    aidl.groupId = VALUE_OR_RETURN(legacy2aidl_volume_group_t_int32_t(legacy.getGroupId()));
+    return aidl;
+}
+
+ConversionResult<AudioAttributes>
+aidl2legacy_AudioAttributesEx_AudioAttributes(const media::AudioAttributesEx& aidl) {
+    return AudioAttributes(VALUE_OR_RETURN(aidl2legacy_int32_t_volume_group_t(aidl.groupId)),
+                           VALUE_OR_RETURN(aidl2legacy_AudioStreamType_audio_stream_type_t(
+                                   aidl.streamType)),
+                           VALUE_OR_RETURN(aidl2legacy_AudioAttributesInternal_audio_attributes_t(
+                                   aidl.attributes)));
 }
 
 } // namespace android
diff --git a/media/libaudioclient/AudioEffect.cpp b/media/libaudioclient/AudioEffect.cpp
index 3ead6cb..6ad5483 100644
--- a/media/libaudioclient/AudioEffect.cpp
+++ b/media/libaudioclient/AudioEffect.cpp
@@ -23,77 +23,45 @@
 #include <sys/types.h>
 #include <limits.h>
 
-#include <private/media/AudioEffectShared.h>
-#include <media/AudioEffect.h>
-
-#include <utils/Log.h>
+#include <android/media/IAudioPolicyService.h>
 #include <binder/IPCThreadState.h>
+#include <media/AidlConversion.h>
+#include <media/AudioEffect.h>
+#include <media/PolicyAidlConversion.h>
+#include <media/ShmemCompat.h>
+#include <private/media/AudioEffectShared.h>
+#include <utils/Log.h>
 
-
+#define RETURN_STATUS_IF_ERROR(x)    \
+    {                                \
+        auto _tmp = (x);             \
+        if (_tmp != OK) return _tmp; \
+    }
 
 namespace android {
+using aidl_utils::statusTFromBinderStatus;
+using binder::Status;
+using media::IAudioPolicyService;
+
+namespace {
+
+// Copy from a raw pointer + size into a vector of bytes.
+void appendToBuffer(const void* data,
+                    size_t size,
+                    std::vector<uint8_t>* buffer) {
+    const uint8_t* p = reinterpret_cast<const uint8_t*>(data);
+    buffer->insert(buffer->end(), p, p + size);
+}
+
+}  // namespace
 
 // ---------------------------------------------------------------------------
 
-AudioEffect::AudioEffect(const String16& opPackageName)
-    : mStatus(NO_INIT), mProbe(false), mOpPackageName(opPackageName)
+AudioEffect::AudioEffect(const android::content::AttributionSourceState& attributionSource)
+    : mClientAttributionSource(attributionSource)
 {
 }
 
-
-AudioEffect::AudioEffect(const effect_uuid_t *type,
-                const String16& opPackageName,
-                const effect_uuid_t *uuid,
-                int32_t priority,
-                effect_callback_t cbf,
-                void* user,
-                audio_session_t sessionId,
-                audio_io_handle_t io,
-                const AudioDeviceTypeAddr& device,
-                bool probe
-                )
-    : mStatus(NO_INIT), mProbe(false), mOpPackageName(opPackageName)
-{
-    AutoMutex lock(mConstructLock);
-    mStatus = set(type, uuid, priority, cbf, user, sessionId, io, device, probe);
-}
-
-AudioEffect::AudioEffect(const char *typeStr,
-                const String16& opPackageName,
-                const char *uuidStr,
-                int32_t priority,
-                effect_callback_t cbf,
-                void* user,
-                audio_session_t sessionId,
-                audio_io_handle_t io,
-                const AudioDeviceTypeAddr& device,
-                bool probe
-                )
-    : mStatus(NO_INIT), mProbe(false), mOpPackageName(opPackageName)
-{
-    effect_uuid_t type;
-    effect_uuid_t *pType = NULL;
-    effect_uuid_t uuid;
-    effect_uuid_t *pUuid = NULL;
-
-    ALOGV("Constructor string\n - type: %s\n - uuid: %s", typeStr, uuidStr);
-
-    if (typeStr != NULL) {
-        if (stringToGuid(typeStr, &type) == NO_ERROR) {
-            pType = &type;
-        }
-    }
-
-    if (uuidStr != NULL) {
-        if (stringToGuid(uuidStr, &uuid) == NO_ERROR) {
-            pUuid = &uuid;
-        }
-    }
-
-    AutoMutex lock(mConstructLock);
-    mStatus = set(pType, pUuid, priority, cbf, user, sessionId, io, device, probe);
-}
-
 status_t AudioEffect::set(const effect_uuid_t *type,
                 const effect_uuid_t *uuid,
                 int32_t priority,
@@ -104,7 +72,7 @@
                 const AudioDeviceTypeAddr& device,
                 bool probe)
 {
-    sp<IEffect> iEffect;
+    sp<media::IEffect> iEffect;
     sp<IMemory> cblk;
     int enabled;
 
@@ -139,13 +107,35 @@
     mDescriptor.type = *(type != NULL ? type : EFFECT_UUID_NULL);
     mDescriptor.uuid = *(uuid != NULL ? uuid : EFFECT_UUID_NULL);
 
+    // TODO b/182392769: use attribution source util
     mIEffectClient = new EffectClient(this);
-    mClientPid = IPCThreadState::self()->getCallingPid();
-    mClientUid = IPCThreadState::self()->getCallingUid();
+    pid_t pid = IPCThreadState::self()->getCallingPid();
+    mClientAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(pid));
+    pid_t uid = IPCThreadState::self()->getCallingUid();
+    mClientAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
 
-    iEffect = audioFlinger->createEffect((effect_descriptor_t *)&mDescriptor,
-            mIEffectClient, priority, io, mSessionId, device, mOpPackageName, mClientPid,
-            probe, &mStatus, &mId, &enabled);
+    media::CreateEffectRequest request;
+    request.desc = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_effect_descriptor_t_EffectDescriptor(mDescriptor));
+    request.client = mIEffectClient;
+    request.priority = priority;
+    request.output = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(io));
+    request.sessionId = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(mSessionId));
+    request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioDeviceTypeAddress(device));
+    request.attributionSource = mClientAttributionSource;
+    request.probe = probe;
+
+    media::CreateEffectResponse response;
+
+    mStatus = audioFlinger->createEffect(request, &response);
+
+    if (mStatus == OK) {
+        mId = response.id;
+        enabled = response.enabled;
+        iEffect = response.effect;
+        mDescriptor = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_EffectDescriptor_effect_descriptor_t(response.desc));
+    }
 
     // In probe mode, we stop here and return the status: the IEffect interface to
     // audio flinger will not be retained. initCheck() will return the creation status
@@ -166,8 +156,10 @@
 
     mEnabled = (volatile int32_t)enabled;
 
-    cblk = iEffect->getCblk();
-    if (cblk == 0) {
+    if (media::SharedFileRegion shmem;
+            !iEffect->getCblk(&shmem).isOk()
+            || !convertSharedFileRegionToIMemory(shmem, &cblk)
+            || cblk == 0) {
         mStatus = NO_INIT;
         ALOGE("Could not get control block");
         return mStatus;
@@ -185,15 +177,43 @@
 
     IInterface::asBinder(iEffect)->linkToDeath(mIEffectClient);
     ALOGV("set() %p OK effect: %s id: %d status %d enabled %d pid %d", this, mDescriptor.name, mId,
-            mStatus, mEnabled, mClientPid);
+            mStatus, mEnabled, mClientAttributionSource.pid);
 
     if (!audio_is_global_session(mSessionId)) {
-        AudioSystem::acquireAudioSessionId(mSessionId, mClientPid, mClientUid);
+        AudioSystem::acquireAudioSessionId(mSessionId, pid, uid);
     }
 
     return mStatus;
 }
 
+status_t AudioEffect::set(const char *typeStr,
+                const char *uuidStr,
+                int32_t priority,
+                effect_callback_t cbf,
+                void* user,
+                audio_session_t sessionId,
+                audio_io_handle_t io,
+                const AudioDeviceTypeAddr& device,
+                bool probe)
+{
+    effect_uuid_t type;
+    effect_uuid_t *pType = nullptr;
+    effect_uuid_t uuid;
+    effect_uuid_t *pUuid = nullptr;
+
+    ALOGV("AudioEffect::set string\n - type: %s\n - uuid: %s",
+            typeStr ? typeStr : "nullptr", uuidStr ? uuidStr : "nullptr");
+
+    if (stringToGuid(typeStr, &type) == NO_ERROR) {
+        pType = &type;
+    }
+    if (stringToGuid(uuidStr, &uuid) == NO_ERROR) {
+        pUuid = &uuid;
+    }
+
+    return set(pType, pUuid, priority, cbf, user, sessionId, io, device, probe);
+}
+
 
 AudioEffect::~AudioEffect()
 {
@@ -201,7 +221,8 @@
 
     if (!mProbe && (mStatus == NO_ERROR || mStatus == ALREADY_EXISTS)) {
         if (!audio_is_global_session(mSessionId)) {
-            AudioSystem::releaseAudioSessionId(mSessionId, mClientPid);
+            AudioSystem::releaseAudioSessionId(mSessionId,
+                VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mClientAttributionSource.pid)));
         }
         if (mIEffect != NULL) {
             mIEffect->disconnect();
@@ -242,15 +263,19 @@
     }
 
     status_t status = NO_ERROR;
-
     AutoMutex lock(mLock);
     if (enabled != mEnabled) {
+        Status bs;
+
         if (enabled) {
             ALOGV("enable %p", this);
-            status = mIEffect->enable();
+            bs = mIEffect->enable(&status);
         } else {
             ALOGV("disable %p", this);
-            status = mIEffect->disable();
+            bs = mIEffect->disable(&status);
+        }
+        if (!bs.isOk()) {
+            status = statusTFromBinderStatus(bs);
         }
         if (status == NO_ERROR) {
             mEnabled = enabled;
@@ -283,7 +308,20 @@
         mLock.lock();
     }
 
-    status_t status = mIEffect->command(cmdCode, cmdSize, cmdData, replySize, replyData);
+    std::vector<uint8_t> data;
+    appendToBuffer(cmdData, cmdSize, &data);
+
+    status_t status;
+    std::vector<uint8_t> response;
+
+    Status bs = mIEffect->command(cmdCode, data, *replySize, &response, &status);
+    if (!bs.isOk()) {
+        status = statusTFromBinderStatus(bs);
+    }
+    if (status == NO_ERROR) {
+        memcpy(replyData, response.data(), response.size());
+        *replySize = response.size();
+    }
 
     if (cmdCode == EFFECT_CMD_ENABLE || cmdCode == EFFECT_CMD_DISABLE) {
         if (status == NO_ERROR) {
@@ -298,7 +336,6 @@
     return status;
 }
 
-
 status_t AudioEffect::setParameter(effect_param_t *param)
 {
     if (mProbe) {
@@ -312,14 +349,27 @@
         return BAD_VALUE;
     }
 
-    uint32_t size = sizeof(int);
     uint32_t psize = ((param->psize - 1) / sizeof(int) + 1) * sizeof(int) + param->vsize;
 
     ALOGV("setParameter: param: %d, param2: %d", *(int *)param->data,
             (param->psize == 8) ? *((int *)param->data + 1): -1);
 
-    return mIEffect->command(EFFECT_CMD_SET_PARAM, sizeof (effect_param_t) + psize, param, &size,
-            &param->status);
+    std::vector<uint8_t> cmd;
+    appendToBuffer(param, sizeof(effect_param_t) + psize, &cmd);
+    std::vector<uint8_t> response;
+    status_t status;
+    Status bs = mIEffect->command(EFFECT_CMD_SET_PARAM,
+                                  cmd,
+                                  sizeof(int),
+                                  &response,
+                                  &status);
+    if (!bs.isOk()) {
+        status = statusTFromBinderStatus(bs);
+        return status;
+    }
+    assert(response.size() == sizeof(int));
+    memcpy(&param->status, response.data(), response.size());
+    return status;
 }
 
 status_t AudioEffect::setParameterDeferred(effect_param_t *param)
@@ -364,8 +414,18 @@
     if (mCblk->clientIndex == 0) {
         return INVALID_OPERATION;
     }
-    uint32_t size = 0;
-    return mIEffect->command(EFFECT_CMD_SET_PARAM_COMMIT, 0, NULL, &size, NULL);
+    std::vector<uint8_t> cmd;
+    std::vector<uint8_t> response;
+    status_t status;
+    Status bs = mIEffect->command(EFFECT_CMD_SET_PARAM_COMMIT,
+                                  cmd,
+                                  0,
+                                  &response,
+                                  &status);
+    if (!bs.isOk()) {
+        status = statusTFromBinderStatus(bs);
+    }
+    return status;
 }
 
 status_t AudioEffect::getParameter(effect_param_t *param)
@@ -387,8 +447,18 @@
     uint32_t psize = sizeof(effect_param_t) + ((param->psize - 1) / sizeof(int) + 1) * sizeof(int) +
             param->vsize;
 
-    return mIEffect->command(EFFECT_CMD_GET_PARAM, sizeof(effect_param_t) + param->psize, param,
-            &psize, param);
+    status_t status;
+    std::vector<uint8_t> cmd;
+    std::vector<uint8_t> response;
+    appendToBuffer(param, sizeof(effect_param_t) + param->psize, &cmd);
+
+    Status bs = mIEffect->command(EFFECT_CMD_GET_PARAM, cmd, psize, &response, &status);
+    if (!bs.isOk()) {
+        status = statusTFromBinderStatus(bs);
+        return status;
+    }
+    memcpy(param, response.data(), response.size());
+    return status;
 }
 
 
@@ -436,19 +506,18 @@
     }
 }
 
-void AudioEffect::commandExecuted(uint32_t cmdCode,
-                                  uint32_t cmdSize __unused,
-                                  void *cmdData,
-                                  uint32_t replySize __unused,
-                                  void *replyData)
+void AudioEffect::commandExecuted(int32_t cmdCode,
+                                  const std::vector<uint8_t>& cmdData,
+                                  const std::vector<uint8_t>& replyData)
 {
-    if (cmdData == NULL || replyData == NULL) {
+    if (cmdData.empty() || replyData.empty()) {
         return;
     }
 
     if (mCbf != NULL && cmdCode == EFFECT_CMD_SET_PARAM) {
-        effect_param_t *cmd = (effect_param_t *)cmdData;
-        cmd->status = *(int32_t *)replyData;
+        std::vector<uint8_t> cmdDataCopy(cmdData);
+        effect_param_t* cmd = reinterpret_cast<effect_param_t *>(cmdDataCopy.data());
+        cmd->status = *reinterpret_cast<const int32_t *>(replyData.data());
         mCbf(EVENT_PARAMETER_CHANGED, mUserData, cmd);
     }
 }
@@ -483,9 +552,23 @@
                                           effect_descriptor_t *descriptors,
                                           uint32_t *count)
 {
+    if (descriptors == nullptr || count == nullptr) {
+        return BAD_VALUE;
+    }
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->queryDefaultPreProcessing(audioSession, descriptors, count);
+
+    int32_t audioSessionAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_session_t_int32_t(audioSession));
+    media::Int countAidl;
+    countAidl.value = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*count));
+    std::vector<media::EffectDescriptor> retAidl;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->queryDefaultPreProcessing(audioSessionAidl, &countAidl, &retAidl)));
+    *count = VALUE_OR_RETURN_STATUS(convertIntegral<uint32_t>(countAidl.value));
+    RETURN_STATUS_IF_ERROR(convertRange(retAidl.begin(), retAidl.end(), descriptors,
+                                        aidl2legacy_EffectDescriptor_effect_descriptor_t));
+    return OK;
 }
 
 status_t AudioEffect::newEffectUniqueId(audio_unique_id_t* id)
@@ -525,7 +608,18 @@
         uuid = *EFFECT_UUID_NULL;
     }
 
-    return aps->addSourceDefaultEffect(&type, opPackageName, &uuid, priority, source, id);
+    media::AudioUuid typeAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(type));
+    media::AudioUuid uuidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(uuid));
+    std::string opPackageNameAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_String16_string(opPackageName));
+    media::AudioSourceType sourceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_source_t_AudioSourceType(source));
+    int32_t retAidl;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->addSourceDefaultEffect(typeAidl, opPackageNameAidl, uuidAidl, priority, sourceAidl,
+                                        &retAidl)));
+    *id = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_unique_id_t(retAidl));
+    return OK;
 }
 
 status_t AudioEffect::addStreamDefaultEffect(const char *typeStr,
@@ -557,7 +651,18 @@
         uuid = *EFFECT_UUID_NULL;
     }
 
-    return aps->addStreamDefaultEffect(&type, opPackageName, &uuid, priority, usage, id);
+    media::AudioUuid typeAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(type));
+    media::AudioUuid uuidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(uuid));
+    std::string opPackageNameAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_String16_string(opPackageName));
+    media::AudioUsage usageAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_usage_t_AudioUsage(usage));
+    int32_t retAidl;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->addStreamDefaultEffect(typeAidl, opPackageNameAidl, uuidAidl, priority, usageAidl,
+                                        &retAidl)));
+    *id = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_unique_id_t(retAidl));
+    return OK;
 }
 
 status_t AudioEffect::removeSourceDefaultEffect(audio_unique_id_t id)
@@ -565,7 +670,8 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    return aps->removeSourceDefaultEffect(id);
+    int32_t idAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_unique_id_t_int32_t(id));
+    return statusTFromBinderStatus(aps->removeSourceDefaultEffect(idAidl));
 }
 
 status_t AudioEffect::removeStreamDefaultEffect(audio_unique_id_t id)
@@ -573,7 +679,8 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    return aps->removeStreamDefaultEffect(id);
+    int32_t idAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_unique_id_t_int32_t(id));
+    return statusTFromBinderStatus(aps->removeStreamDefaultEffect(idAidl));
 }
 
 // -------------------------------------------------------------------------
diff --git a/media/libaudioclient/AudioPolicy.cpp b/media/libaudioclient/AudioPolicy.cpp
index 0522099..c2f7229 100644
--- a/media/libaudioclient/AudioPolicy.cpp
+++ b/media/libaudioclient/AudioPolicy.cpp
@@ -85,7 +85,7 @@
     mDeviceType = (audio_devices_t) parcel->readInt32();
     mDeviceAddress = parcel->readString8();
     mCbFlags = (uint32_t)parcel->readInt32();
-    mAllowPrivilegedPlaybackCapture = parcel->readBool();
+    mAllowPrivilegedMediaPlaybackCapture = parcel->readBool();
     mVoiceCommunicationCaptureAllowed = parcel->readBool();
     size_t size = (size_t)parcel->readInt32();
     if (size > MAX_CRITERIA_PER_MIX) {
@@ -110,7 +110,7 @@
     parcel->writeInt32(mDeviceType);
     parcel->writeString8(mDeviceAddress);
     parcel->writeInt32(mCbFlags);
-    parcel->writeBool(mAllowPrivilegedPlaybackCapture);
+    parcel->writeBool(mAllowPrivilegedMediaPlaybackCapture);
     parcel->writeBool(mVoiceCommunicationCaptureAllowed);
     size_t size = mCriteria.size();
     if (size > MAX_CRITERIA_PER_MIX) {
diff --git a/media/libaudioclient/AudioProductStrategy.cpp b/media/libaudioclient/AudioProductStrategy.cpp
index cff72fd..f98027a 100644
--- a/media/libaudioclient/AudioProductStrategy.cpp
+++ b/media/libaudioclient/AudioProductStrategy.cpp
@@ -19,55 +19,47 @@
 #include <utils/Log.h>
 #include <media/AudioProductStrategy.h>
 #include <media/AudioAttributes.h>
-#include <media/AudioSystem.h>
+#include <media/PolicyAidlConversion.h>
+
+#define RETURN_STATUS_IF_ERROR(x) \
+    { auto _tmp = (x); if (_tmp != OK) return _tmp; }
 
 namespace android {
 
-status_t AudioProductStrategy::readFromParcel(const Parcel *parcel)
-{
-    mId = static_cast<product_strategy_t>(parcel->readInt32());
-    status_t ret = parcel->readUtf8FromUtf16(&mName);
-    if (ret != NO_ERROR) {
-        return ret;
-    }
-    size_t size = static_cast<size_t>(parcel->readInt32());
-    for (size_t i = 0; i < size; i++) {
-        AudioAttributes attribute;
-        ret = attribute.readFromParcel(parcel);
-        if (ret != NO_ERROR) {
-            mAudioAttributes.clear();
-            return ret;
-        }
-        mAudioAttributes.push_back(attribute);
-    }
-    return NO_ERROR;
+status_t AudioProductStrategy::readFromParcel(const Parcel* parcel) {
+    media::AudioProductStrategy aidl;
+    RETURN_STATUS_IF_ERROR(aidl.readFromParcel(parcel));
+    *this = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioProductStrategy(aidl));
+    return OK;
 }
 
-status_t AudioProductStrategy::writeToParcel(Parcel *parcel) const
-{
-    parcel->writeInt32(static_cast<int32_t>(mId));
-    parcel->writeUtf8AsUtf16(mName);
-    size_t size = mAudioAttributes.size();
-    size_t sizePosition = parcel->dataPosition();
-    parcel->writeInt32(size);
-    size_t finalSize = size;
+status_t AudioProductStrategy::writeToParcel(Parcel* parcel) const {
+    media::AudioProductStrategy aidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_AudioProductStrategy(*this));
+    return aidl.writeToParcel(parcel);
+}
 
-    for (size_t i = 0; i < size; i++) {
-        size_t position = parcel->dataPosition();
-        AudioAttributes attribute(mAudioAttributes[i]);
-        status_t ret = attribute.writeToParcel(parcel);
-        if (ret != NO_ERROR) {
-            parcel->setDataPosition(position);
-            finalSize--;
-        }
-    }
-    if (size != finalSize) {
-        size_t position = parcel->dataPosition();
-        parcel->setDataPosition(sizePosition);
-        parcel->writeInt32(finalSize);
-        parcel->setDataPosition(position);
-    }
-    return NO_ERROR;
+ConversionResult<media::AudioProductStrategy>
+legacy2aidl_AudioProductStrategy(const AudioProductStrategy& legacy) {
+    media::AudioProductStrategy aidl;
+    aidl.name = legacy.getName();
+    aidl.audioAttributes = VALUE_OR_RETURN(
+            convertContainer<std::vector<media::AudioAttributesEx>>(
+                    legacy.getAudioAttributes(),
+                    legacy2aidl_AudioAttributes_AudioAttributesEx));
+    aidl.id = VALUE_OR_RETURN(legacy2aidl_product_strategy_t_int32_t(legacy.getId()));
+    return aidl;
+}
+
+ConversionResult<AudioProductStrategy>
+aidl2legacy_AudioProductStrategy(const media::AudioProductStrategy& aidl) {
+    return AudioProductStrategy(
+            aidl.name,
+            VALUE_OR_RETURN(
+                    convertContainer<std::vector<AudioAttributes>>(
+                            aidl.audioAttributes,
+                            aidl2legacy_AudioAttributesEx_AudioAttributes)),
+            VALUE_OR_RETURN(aidl2legacy_int32_t_product_strategy_t(aidl.id)));
 }
 
 // Keep in sync with android/media/audiopolicy/AudioProductStrategy#attributeMatches
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index df47def..a1d3bdb 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -38,6 +38,10 @@
 #define WAIT_PERIOD_MS          10
 
 namespace android {
+
+using android::content::AttributionSourceState;
+using aidl_utils::statusTFromBinderStatus;
+
 // ---------------------------------------------------------------------------
 
 // static
@@ -103,6 +107,7 @@
         mMetricsItem->setInt32(MM_PREFIX "lastError.code", (int32_t)mLastError);
         mMetricsItem->setCString(MM_PREFIX "lastError.at", mLastErrorFunc.c_str());
     }
+    mMetricsItem->setCString(MM_PREFIX "logSessionId", record->mLogSessionId.c_str());
 }
 
 static const char *stateToString(bool active) {
@@ -121,12 +126,11 @@
     return NO_ERROR;
 }
 
-AudioRecord::AudioRecord(const String16 &opPackageName)
-    : mActive(false), mStatus(NO_INIT), mOpPackageName(opPackageName),
-      mSessionId(AUDIO_SESSION_ALLOCATE),
-      mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT),
-      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE), mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mSelectedMicDirection(MIC_DIRECTION_UNSPECIFIED),
+AudioRecord::AudioRecord(const AttributionSourceState &client)
+    : mActive(false), mStatus(NO_INIT), mClientAttributionSource(client),
+      mSessionId(AUDIO_SESSION_ALLOCATE), mPreviousPriority(ANDROID_PRIORITY_NORMAL),
+      mPreviousSchedulingGroup(SP_DEFAULT), mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
+      mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE), mSelectedMicDirection(MIC_DIRECTION_UNSPECIFIED),
       mSelectedMicFieldDimension(MIC_FIELD_DIMENSION_DEFAULT)
 {
 }
@@ -136,7 +140,7 @@
         uint32_t sampleRate,
         audio_format_t format,
         audio_channel_mask_t channelMask,
-        const String16& opPackageName,
+        const AttributionSourceState& client,
         size_t frameCount,
         callback_t cbf,
         void* user,
@@ -144,24 +148,24 @@
         audio_session_t sessionId,
         transfer_type transferType,
         audio_input_flags_t flags,
-        uid_t uid,
-        pid_t pid,
         const audio_attributes_t* pAttributes,
         audio_port_handle_t selectedDeviceId,
         audio_microphone_direction_t selectedMicDirection,
         float microphoneFieldDimension)
     : mActive(false),
       mStatus(NO_INIT),
-      mOpPackageName(opPackageName),
+      mClientAttributionSource(client),
       mSessionId(AUDIO_SESSION_ALLOCATE),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL),
       mPreviousSchedulingGroup(SP_DEFAULT),
       mProxy(NULL)
 {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mClientAttributionSource.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mClientAttributionSource.pid));
     (void)set(inputSource, sampleRate, format, channelMask, frameCount, cbf, user,
             notificationFrames, false /*threadCanCallJava*/, sessionId, transferType, flags,
-            uid, pid, pAttributes, selectedDeviceId,
-            selectedMicDirection, microphoneFieldDimension);
+            uid, pid, pAttributes, selectedDeviceId, selectedMicDirection,
+            microphoneFieldDimension);
 }
 
 AudioRecord::~AudioRecord()
@@ -177,21 +181,9 @@
         .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)mStatus)
         .record();
 
+    stopAndJoinCallbacks(); // checks mStatus
+
     if (mStatus == NO_ERROR) {
-        // Make sure that callback function exits in the case where
-        // it is looping on buffer empty condition in obtainBuffer().
-        // Otherwise the callback thread will never exit.
-        stop();
-        if (mAudioRecordThread != 0) {
-            mProxy->interrupt();
-            mAudioRecordThread->requestExit();  // see comment in AudioRecord.h
-            mAudioRecordThread->requestExitAndWait();
-            mAudioRecordThread.clear();
-        }
-        // No lock here: worst case we remove a NULL callback which will be a nop
-        if (mDeviceCallback != 0 && mInput != AUDIO_IO_HANDLE_NONE) {
-            AudioSystem::removeAudioDeviceCallback(this, mInput, mPortId);
-        }
         IInterface::asBinder(mAudioRecord)->unlinkToDeath(mDeathNotifier, this);
         mAudioRecord.clear();
         mCblkMemory.clear();
@@ -199,10 +191,32 @@
         IPCThreadState::self()->flushCommands();
         ALOGV("%s(%d): releasing session id %d",
                 __func__, mPortId, mSessionId);
-        AudioSystem::releaseAudioSessionId(mSessionId, mClientPid);
+        pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mClientAttributionSource.pid));
+        AudioSystem::releaseAudioSessionId(mSessionId, pid);
     }
 }
 
+void AudioRecord::stopAndJoinCallbacks() {
+    // Prevent nullptr crash if it did not open properly.
+    if (mStatus != NO_ERROR) return;
+
+    // Make sure that callback function exits in the case where
+    // it is looping on buffer empty condition in obtainBuffer().
+    // Otherwise the callback thread will never exit.
+    stop();
+    if (mAudioRecordThread != 0) {
+        mProxy->interrupt();
+        mAudioRecordThread->requestExit();  // see comment in AudioRecord.h
+        mAudioRecordThread->requestExitAndWait();
+        mAudioRecordThread.clear();
+    }
+    // No lock here: worst case we remove a NULL callback which will be a nop
+    if (mDeviceCallback != 0 && mInput != AUDIO_IO_HANDLE_NONE) {
+        // This may not stop all of these device callbacks!
+        // TODO: Add some sort of protection.
+        AudioSystem::removeAudioDeviceCallback(this, mInput, mPortId);
+    }
+}
 status_t AudioRecord::set(
         audio_source_t inputSource,
         uint32_t sampleRate,
@@ -221,26 +235,41 @@
         const audio_attributes_t* pAttributes,
         audio_port_handle_t selectedDeviceId,
         audio_microphone_direction_t selectedMicDirection,
-        float microphoneFieldDimension)
+        float microphoneFieldDimension,
+        int32_t maxSharedAudioHistoryMs)
 {
     status_t status = NO_ERROR;
     uint32_t channelCount;
-    pid_t callingPid;
-    pid_t myPid;
 
     // Note mPortId is not valid until the track is created, so omit mPortId in ALOG for set.
     ALOGV("%s(): inputSource %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
-          "notificationFrames %u, sessionId %d, transferType %d, flags %#x, opPackageName %s "
+          "notificationFrames %u, sessionId %d, transferType %d, flags %#x, attributionSource %s"
           "uid %d, pid %d",
           __func__,
           inputSource, sampleRate, format, channelMask, frameCount, notificationFrames,
-          sessionId, transferType, flags, String8(mOpPackageName).string(), uid, pid);
+          sessionId, transferType, flags, mClientAttributionSource.toString().c_str(), uid, pid);
+
+    // TODO b/182392553: refactor or remove
+    pid_t callingPid = IPCThreadState::self()->getCallingPid();
+    pid_t myPid = getpid();
+    pid_t adjPid = pid;
+    if (pid == -1 || (callingPid != myPid)) {
+        adjPid = callingPid;
+    }
+    mClientAttributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(adjPid));
+
+    uid_t adjUid = uid;
+    if (uid == -1 || (callingPid != myPid)) {
+        adjUid = IPCThreadState::self()->getCallingUid();
+    }
+    mClientAttributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(adjUid));
 
     mTracker.reset(new RecordingActivityTracker());
 
     mSelectedDeviceId = selectedDeviceId;
     mSelectedMicDirection = selectedMicDirection;
     mSelectedMicFieldDimension = microphoneFieldDimension;
+    mMaxSharedAudioHistoryMs = maxSharedAudioHistoryMs;
 
     switch (transferType) {
     case TRANSFER_DEFAULT:
@@ -279,7 +308,8 @@
         mAttributes.source = inputSource;
         if (inputSource == AUDIO_SOURCE_VOICE_COMMUNICATION
                 || inputSource == AUDIO_SOURCE_CAMCORDER) {
-            mAttributes.flags |= AUDIO_FLAG_CAPTURE_PRIVATE;
+            mAttributes.flags = static_cast<audio_flags_mask_t>(
+                    mAttributes.flags | AUDIO_FLAG_CAPTURE_PRIVATE);
         }
     } else {
         // stream type shouldn't be looked at, this track has audio attributes
@@ -328,19 +358,6 @@
     mSessionId = sessionId;
     ALOGV("%s(): mSessionId %d", __func__, mSessionId);
 
-    callingPid = IPCThreadState::self()->getCallingPid();
-    myPid = getpid();
-    if (uid == AUDIO_UID_INVALID || (callingPid != myPid)) {
-        mClientUid = IPCThreadState::self()->getCallingUid();
-    } else {
-        mClientUid = uid;
-    }
-    if (pid == -1 || (callingPid != myPid)) {
-        mClientPid = callingPid;
-    } else {
-        mClientPid = pid;
-    }
-
     mOrigFlags = mFlags = flags;
     mCbf = cbf;
 
@@ -353,7 +370,7 @@
     // create the IAudioRecord
     {
         AutoMutex lock(mLock);
-        status = createRecord_l(0 /*epoch*/, mOpPackageName);
+        status = createRecord_l(0 /*epoch*/);
     }
 
     ALOGV("%s(%d): status %d", __func__, mPortId, status);
@@ -374,7 +391,7 @@
     mMarkerReached = false;
     mNewPosition = 0;
     mUpdatePeriod = 0;
-    AudioSystem::acquireAudioSessionId(mSessionId, mClientPid, mClientUid);
+    AudioSystem::acquireAudioSessionId(mSessionId, adjPid, adjUid);
     mSequence = 1;
     mObservedSequence = mSequence;
     mInOverrun = false;
@@ -440,7 +457,7 @@
     mActive = true;
 
     if (!(flags & CBLK_INVALID)) {
-        status = mAudioRecord->start(event, triggerSession).transactionError();
+        status = statusTFromBinderStatus(mAudioRecord->start(event, triggerSession));
         if (status == DEAD_OBJECT) {
             flags |= CBLK_INVALID;
         }
@@ -731,17 +748,18 @@
 }
 
 // must be called with mLock held
-status_t AudioRecord::createRecord_l(const Modulo<uint32_t> &epoch, const String16& opPackageName)
+status_t AudioRecord::createRecord_l(const Modulo<uint32_t> &epoch)
 {
     const int64_t beginNs = systemTime();
     const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger();
     IAudioFlinger::CreateRecordInput input;
     IAudioFlinger::CreateRecordOutput output;
     audio_session_t originalSessionId;
-    sp<media::IAudioRecord> record;
     void *iMemPointer;
     audio_track_cblk_t* cblk;
     status_t status;
+    static const int32_t kMaxCreateAttempts = 3;
+    int32_t remainingAttempts = kMaxCreateAttempts;
 
     if (audioFlinger == 0) {
         ALOGE("%s(%d): Could not get audioflinger", __func__, mPortId);
@@ -783,15 +801,13 @@
     input.config.sample_rate = mSampleRate;
     input.config.channel_mask = mChannelMask;
     input.config.format = mFormat;
-    input.clientInfo.clientUid = mClientUid;
-    input.clientInfo.clientPid = mClientPid;
+    input.clientInfo.attributionSource = mClientAttributionSource;
     input.clientInfo.clientTid = -1;
     if (mFlags & AUDIO_INPUT_FLAG_FAST) {
         if (mAudioRecordThread != 0) {
             input.clientInfo.clientTid = mAudioRecordThread->getTid();
         }
     }
-    input.opPackageName = opPackageName;
     input.riid = mTracker->getRiid();
 
     input.flags = mFlags;
@@ -802,17 +818,29 @@
     input.selectedDeviceId = mSelectedDeviceId;
     input.sessionId = mSessionId;
     originalSessionId = mSessionId;
+    input.maxSharedAudioHistoryMs = mMaxSharedAudioHistoryMs;
 
-    record = audioFlinger->createRecord(input,
-                                                              output,
-                                                              &status);
+    do {
+        media::CreateRecordResponse response;
+        status = audioFlinger->createRecord(VALUE_OR_FATAL(input.toAidl()), response);
+        output = VALUE_OR_FATAL(IAudioFlinger::CreateRecordOutput::fromAidl(response));
+        if (status == NO_ERROR) {
+            break;
+        }
+        if (status != FAILED_TRANSACTION || --remainingAttempts <= 0) {
+            ALOGE("%s(%d): AudioFlinger could not create record track, status: %d",
+                  __func__, mPortId, status);
+            goto exit;
+        }
+        // FAILED_TRANSACTION happens under very specific conditions causing a state mismatch
+        // between audio policy manager and audio flinger during the input stream open sequence
+        // and can be recovered by retrying.
+        // Leave time for race condition to clear before retrying and randomize delay
+        // to reduce the probability of concurrent retries in locked steps.
+        usleep((20 + rand() % 30) * 10000);
+    } while (1);
 
-    if (status != NO_ERROR) {
-        ALOGE("%s(%d): AudioFlinger could not create record track, status: %d",
-              __func__, mPortId, status);
-        goto exit;
-    }
-    ALOG_ASSERT(record != 0);
+    ALOG_ASSERT(output.audioRecord != 0);
 
     // AudioFlinger now owns the reference to the I/O handle,
     // so we are no longer responsible for releasing it.
@@ -870,7 +898,7 @@
         IInterface::asBinder(mAudioRecord)->unlinkToDeath(mDeathNotifier, this);
         mDeathNotifier.clear();
     }
-    mAudioRecord = record;
+    mAudioRecord = output.audioRecord;
     mCblkMemory = output.cblk;
     mBufferMemory = output.buffers;
     IPCThreadState::self()->flushCommands();
@@ -900,6 +928,10 @@
         AudioSystem::addAudioDeviceCallback(this, output.inputId, output.portId);
     }
 
+    if (!mSharedAudioPackageName.empty()) {
+        mAudioRecord->shareAudioHistory(mSharedAudioPackageName, mSharedAudioStartMs);
+    }
+
     mPortId = output.portId;
     // We retain a copy of the I/O handle, but don't own the reference
     mInput = output.inputId;
@@ -929,6 +961,7 @@
         .set(AMEDIAMETRICS_PROP_ORIGINALFLAGS, toString(mOrigFlags).c_str())
         .set(AMEDIAMETRICS_PROP_SESSIONID, (int32_t)mSessionId)
         .set(AMEDIAMETRICS_PROP_TRACKID, mPortId)
+        .set(AMEDIAMETRICS_PROP_LOGSESSIONID, mLogSessionId)
         .set(AMEDIAMETRICS_PROP_SOURCE, toString(mAttributes.source).c_str())
         .set(AMEDIAMETRICS_PROP_THREADID, (int32_t)output.inputId)
         .set(AMEDIAMETRICS_PROP_SELECTEDDEVICEID, (int32_t)mSelectedDeviceId)
@@ -1092,7 +1125,7 @@
     }
 
     if (ssize_t(userSize) < 0 || (buffer == NULL && userSize != 0)) {
-        // sanity-check. user is most-likely passing an error code, and it would
+        // Validation. user is most-likely passing an error code, and it would
         // make the return value ambiguous (actualSize vs error).
         ALOGE("%s(%d) (buffer=%p, size=%zu (%zu)",
                 __func__, mPortId, buffer, userSize, userSize);
@@ -1319,7 +1352,7 @@
         mCbf(EVENT_MORE_DATA, mUserData, &audioBuffer);
         size_t readSize = audioBuffer.size;
 
-        // Sanity check on returned size
+        // Validate on returned size
         if (ssize_t(readSize) < 0 || readSize > reqSize) {
             ALOGE("%s(%d):  EVENT_MORE_DATA requested %zu bytes but callback returned %zd bytes",
                     __func__, mPortId, reqSize, ssize_t(readSize));
@@ -1411,14 +1444,14 @@
     // It will also delete the strong references on previous IAudioRecord and IMemory
     Modulo<uint32_t> position(mProxy->getPosition());
     mNewPosition = position + mUpdatePeriod;
-    result = createRecord_l(position, mOpPackageName);
+    result = createRecord_l(position);
 
     if (result == NO_ERROR) {
         if (mActive) {
             // callback thread or sync event hasn't changed
             // FIXME this fails if we have a new AudioFlinger instance
-            result = mAudioRecord->start(
-                AudioSystem::SYNC_EVENT_SAME, AUDIO_SESSION_NONE).transactionError();
+            result = statusTFromBinderStatus(mAudioRecord->start(
+                AudioSystem::SYNC_EVENT_SAME, AUDIO_SESSION_NONE));
         }
         mFramesReadServerOffset = mFramesRead; // server resets to zero so we need an offset.
     }
@@ -1508,7 +1541,13 @@
 status_t AudioRecord::getActiveMicrophones(std::vector<media::MicrophoneInfo>* activeMicrophones)
 {
     AutoMutex lock(mLock);
-    return mAudioRecord->getActiveMicrophones(activeMicrophones).transactionError();
+    std::vector<media::MicrophoneInfoData> mics;
+    status_t status = statusTFromBinderStatus(mAudioRecord->getActiveMicrophones(&mics));
+    activeMicrophones->resize(mics.size());
+    for (size_t i = 0; status == OK && i < mics.size(); ++i) {
+        status = activeMicrophones->at(i).readFromParcelable(mics[i]);
+    }
+    return status;
 }
 
 status_t AudioRecord::setPreferredMicrophoneDirection(audio_microphone_direction_t direction)
@@ -1524,7 +1563,7 @@
         // the internal AudioRecord hasn't be created yet, so just stash the attribute.
         return OK;
     } else {
-        return mAudioRecord->setPreferredMicrophoneDirection(direction).transactionError();
+        return statusTFromBinderStatus(mAudioRecord->setPreferredMicrophoneDirection(direction));
     }
 }
 
@@ -1540,10 +1579,39 @@
         // the internal AudioRecord hasn't be created yet, so just stash the attribute.
         return OK;
     } else {
-        return mAudioRecord->setPreferredMicrophoneFieldDimension(zoom).transactionError();
+        return statusTFromBinderStatus(mAudioRecord->setPreferredMicrophoneFieldDimension(zoom));
     }
 }
 
+void AudioRecord::setLogSessionId(const char *logSessionId)
+{
+    AutoMutex lock(mLock);
+    if (logSessionId == nullptr) logSessionId = "";  // an empty string is an unset session id.
+    if (mLogSessionId == logSessionId) return;
+
+     mLogSessionId = logSessionId;
+     mediametrics::LogItem(mMetricsId)
+         .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETLOGSESSIONID)
+         .set(AMEDIAMETRICS_PROP_LOGSESSIONID, logSessionId)
+         .record();
+}
+
+status_t AudioRecord::shareAudioHistory(const std::string& sharedPackageName,
+                                        int64_t sharedStartMs)
+{
+    AutoMutex lock(mLock);
+    if (mAudioRecord == 0) {
+        return NO_INIT;
+    }
+    status_t status = statusTFromBinderStatus(
+            mAudioRecord->shareAudioHistory(sharedPackageName, sharedStartMs));
+    if (status == NO_ERROR) {
+        mSharedAudioPackageName = sharedPackageName;
+        mSharedAudioStartMs = sharedStartMs;
+    }
+    return status;
+}
+
 // =========================================================================
 
 void AudioRecord::DeathNotifier::binderDied(const wp<IBinder>& who __unused)
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 6357da4..88e752b 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -19,22 +19,40 @@
 
 #include <utils/Log.h>
 
+#include <android/media/IAudioPolicyService.h>
 #include <android/media/BnCaptureStateListener.h>
 #include <binder/IServiceManager.h>
 #include <binder/ProcessState.h>
 #include <binder/IPCThreadState.h>
+#include <media/AidlConversion.h>
 #include <media/AudioResamplerPublic.h>
 #include <media/AudioSystem.h>
 #include <media/IAudioFlinger.h>
-#include <media/IAudioPolicyService.h>
+#include <media/PolicyAidlConversion.h>
 #include <media/TypeConverter.h>
 #include <math.h>
 
 #include <system/audio.h>
+#include <android/media/GetInputForAttrResponse.h>
+
+#define VALUE_OR_RETURN_BINDER_STATUS(x) \
+    ({ auto _tmp = (x); \
+       if (!_tmp.ok()) return aidl_utils::binderStatusFromStatusT(_tmp.error()); \
+       std::move(_tmp.value()); })
+
+#define RETURN_STATUS_IF_ERROR(x)    \
+    {                                \
+        auto _tmp = (x);             \
+        if (_tmp != OK) return _tmp; \
+    }
 
 // ----------------------------------------------------------------------------
 
 namespace android {
+using aidl_utils::statusTFromBinderStatus;
+using binder::Status;
+using media::IAudioPolicyService;
+using android::content::AttributionSourceState;
 
 // client singleton for AudioFlinger binder interface
 Mutex AudioSystem::gLock;
@@ -45,36 +63,62 @@
 std::set<audio_error_callback> AudioSystem::gAudioErrorCallbacks;
 dynamic_policy_callback AudioSystem::gDynPolicyCallback = NULL;
 record_config_callback AudioSystem::gRecordConfigCallback = NULL;
+routing_callback AudioSystem::gRoutingCallback = NULL;
 
 // Required to be held while calling into gSoundTriggerCaptureStateListener.
+class CaptureStateListenerImpl;
+
 Mutex gSoundTriggerCaptureStateListenerLock;
-sp<AudioSystem::CaptureStateListener> gSoundTriggerCaptureStateListener = nullptr;
+sp<CaptureStateListenerImpl> gSoundTriggerCaptureStateListener = nullptr;
+
+// Binder for the AudioFlinger service that's passed to this client process from the system server.
+// This allows specific isolated processes to access the audio system. Currently used only for the
+// HotwordDetectionService.
+sp<IBinder> gAudioFlingerBinder = nullptr;
+
+void AudioSystem::setAudioFlingerBinder(const sp<IBinder>& audioFlinger) {
+    if (audioFlinger->getInterfaceDescriptor() != media::IAudioFlingerService::descriptor) {
+        ALOGE("setAudioFlingerBinder: received a binder of type %s",
+              String8(audioFlinger->getInterfaceDescriptor()).string());
+        return;
+    }
+    Mutex::Autolock _l(gLock);
+    if (gAudioFlinger != nullptr) {
+        ALOGW("setAudioFlingerBinder: ignoring; AudioFlinger connection already established.");
+        return;
+    }
+    gAudioFlingerBinder = audioFlinger;
+}
 
 // establish binder interface to AudioFlinger service
-const sp<IAudioFlinger> AudioSystem::get_audio_flinger()
-{
+const sp<IAudioFlinger> AudioSystem::get_audio_flinger() {
     sp<IAudioFlinger> af;
     sp<AudioFlingerClient> afc;
     bool reportNoError = false;
     {
         Mutex::Autolock _l(gLock);
         if (gAudioFlinger == 0) {
-            sp<IServiceManager> sm = defaultServiceManager();
             sp<IBinder> binder;
-            do {
-                binder = sm->getService(String16("media.audio_flinger"));
-                if (binder != 0)
-                    break;
-                ALOGW("AudioFlinger not published, waiting...");
-                usleep(500000); // 0.5 s
-            } while (true);
+            if (gAudioFlingerBinder != nullptr) {
+                binder = gAudioFlingerBinder;
+            } else {
+                sp<IServiceManager> sm = defaultServiceManager();
+                do {
+                    binder = sm->getService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
+                    if (binder != 0)
+                        break;
+                    ALOGW("AudioFlinger not published, waiting...");
+                    usleep(500000); // 0.5 s
+                } while (true);
+            }
             if (gAudioFlingerClient == NULL) {
                 gAudioFlingerClient = new AudioFlingerClient();
             } else {
                 reportNoError = true;
             }
             binder->linkToDeath(gAudioFlingerClient);
-            gAudioFlinger = interface_cast<IAudioFlinger>(binder);
+            gAudioFlinger = new AudioFlingerClientAdapter(
+                    interface_cast<media::IAudioFlingerService>(binder));
             LOG_ALWAYS_FATAL_IF(gAudioFlinger == 0);
             afc = gAudioFlingerClient;
             // Make sure callbacks can be received by gAudioFlingerClient
@@ -91,8 +135,7 @@
     return af;
 }
 
-const sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient()
-{
+const sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient() {
     // calling get_audio_flinger() will initialize gAudioFlingerClient if needed
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return 0;
@@ -100,8 +143,7 @@
     return gAudioFlingerClient;
 }
 
-sp<AudioIoDescriptor> AudioSystem::getIoDescriptor(audio_io_handle_t ioHandle)
-{
+sp<AudioIoDescriptor> AudioSystem::getIoDescriptor(audio_io_handle_t ioHandle) {
     sp<AudioIoDescriptor> desc;
     const sp<AudioFlingerClient> afc = getAudioFlingerClient();
     if (afc != 0) {
@@ -110,8 +152,7 @@
     return desc;
 }
 
-/* static */ status_t AudioSystem::checkAudioFlinger()
-{
+/* static */ status_t AudioSystem::checkAudioFlinger() {
     if (defaultServiceManager()->checkService(String16("media.audio_flinger")) != 0) {
         return NO_ERROR;
     }
@@ -120,47 +161,41 @@
 
 // FIXME Declare in binder opcode order, similarly to IAudioFlinger.h and IAudioFlinger.cpp
 
-status_t AudioSystem::muteMicrophone(bool state)
-{
+status_t AudioSystem::muteMicrophone(bool state) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setMicMute(state);
 }
 
-status_t AudioSystem::isMicrophoneMuted(bool* state)
-{
+status_t AudioSystem::isMicrophoneMuted(bool* state) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     *state = af->getMicMute();
     return NO_ERROR;
 }
 
-status_t AudioSystem::setMasterVolume(float value)
-{
+status_t AudioSystem::setMasterVolume(float value) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     af->setMasterVolume(value);
     return NO_ERROR;
 }
 
-status_t AudioSystem::setMasterMute(bool mute)
-{
+status_t AudioSystem::setMasterMute(bool mute) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     af->setMasterMute(mute);
     return NO_ERROR;
 }
 
-status_t AudioSystem::getMasterVolume(float* volume)
-{
+status_t AudioSystem::getMasterVolume(float* volume) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     *volume = af->masterVolume();
     return NO_ERROR;
 }
 
-status_t AudioSystem::getMasterMute(bool* mute)
-{
+status_t AudioSystem::getMasterMute(bool* mute) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     *mute = af->masterMute();
@@ -168,8 +203,7 @@
 }
 
 status_t AudioSystem::setStreamVolume(audio_stream_type_t stream, float value,
-        audio_io_handle_t output)
-{
+                                      audio_io_handle_t output) {
     if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
@@ -177,8 +211,7 @@
     return NO_ERROR;
 }
 
-status_t AudioSystem::setStreamMute(audio_stream_type_t stream, bool mute)
-{
+status_t AudioSystem::setStreamMute(audio_stream_type_t stream, bool mute) {
     if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
@@ -187,8 +220,7 @@
 }
 
 status_t AudioSystem::getStreamVolume(audio_stream_type_t stream, float* volume,
-        audio_io_handle_t output)
-{
+                                      audio_io_handle_t output) {
     if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
@@ -196,8 +228,7 @@
     return NO_ERROR;
 }
 
-status_t AudioSystem::getStreamMute(audio_stream_type_t stream, bool* mute)
-{
+status_t AudioSystem::getStreamMute(audio_stream_type_t stream, bool* mute) {
     if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
@@ -205,23 +236,20 @@
     return NO_ERROR;
 }
 
-status_t AudioSystem::setMode(audio_mode_t mode)
-{
+status_t AudioSystem::setMode(audio_mode_t mode) {
     if (uint32_t(mode) >= AUDIO_MODE_CNT) return BAD_VALUE;
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setMode(mode);
 }
 
-status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs)
-{
+status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setParameters(ioHandle, keyValuePairs);
 }
 
-String8 AudioSystem::getParameters(audio_io_handle_t ioHandle, const String8& keys)
-{
+String8 AudioSystem::getParameters(audio_io_handle_t ioHandle, const String8& keys) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     String8 result = String8("");
     if (af == 0) return result;
@@ -230,13 +258,11 @@
     return result;
 }
 
-status_t AudioSystem::setParameters(const String8& keyValuePairs)
-{
+status_t AudioSystem::setParameters(const String8& keyValuePairs) {
     return setParameters(AUDIO_IO_HANDLE_NONE, keyValuePairs);
 }
 
-String8 AudioSystem::getParameters(const String8& keys)
-{
+String8 AudioSystem::getParameters(const String8& keys) {
     return getParameters(AUDIO_IO_HANDLE_NONE, keys);
 }
 
@@ -248,16 +274,14 @@
 static const float dBConvert = -dBPerStep * 2.302585093f / 20.0f;
 static const float dBConvertInverse = 1.0f / dBConvert;
 
-float AudioSystem::linearToLog(int volume)
-{
+float AudioSystem::linearToLog(int volume) {
     // float v = volume ? exp(float(100 - volume) * dBConvert) : 0;
     // ALOGD("linearToLog(%d)=%f", volume, v);
     // return v;
     return volume ? exp(float(100 - volume) * dBConvert) : 0;
 }
 
-int AudioSystem::logToLinear(float volume)
-{
+int AudioSystem::logToLinear(float volume) {
     // int v = volume ? 100 - int(dBConvertInverse * log(volume) + 0.5) : 0;
     // ALOGD("logTolinear(%d)=%f", v, volume);
     // return v;
@@ -266,31 +290,30 @@
 
 /* static */ size_t AudioSystem::calculateMinFrameCount(
         uint32_t afLatencyMs, uint32_t afFrameCount, uint32_t afSampleRate,
-        uint32_t sampleRate, float speed /*, uint32_t notificationsPerBufferReq*/)
-{
+        uint32_t sampleRate, float speed /*, uint32_t notificationsPerBufferReq*/) {
     // Ensure that buffer depth covers at least audio hardware latency
     uint32_t minBufCount = afLatencyMs / ((1000 * afFrameCount) / afSampleRate);
     if (minBufCount < 2) {
         minBufCount = 2;
     }
 #if 0
-    // The notificationsPerBufferReq parameter is not yet used for non-fast tracks,
-    // but keeping the code here to make it easier to add later.
-    if (minBufCount < notificationsPerBufferReq) {
-        minBufCount = notificationsPerBufferReq;
-    }
+        // The notificationsPerBufferReq parameter is not yet used for non-fast tracks,
+        // but keeping the code here to make it easier to add later.
+        if (minBufCount < notificationsPerBufferReq) {
+            minBufCount = notificationsPerBufferReq;
+        }
 #endif
     ALOGV("calculateMinFrameCount afLatency %u  afFrameCount %u  afSampleRate %u  "
-            "sampleRate %u  speed %f  minBufCount: %u" /*"  notificationsPerBufferReq %u"*/,
-            afLatencyMs, afFrameCount, afSampleRate, sampleRate, speed, minBufCount
-            /*, notificationsPerBufferReq*/);
+          "sampleRate %u  speed %f  minBufCount: %u" /*"  notificationsPerBufferReq %u"*/,
+          afLatencyMs, afFrameCount, afSampleRate, sampleRate, speed, minBufCount
+    /*, notificationsPerBufferReq*/);
     return minBufCount * sourceFramesNeededWithTimestretch(
             sampleRate, afFrameCount, afSampleRate, speed);
 }
 
 
-status_t AudioSystem::getOutputSamplingRate(uint32_t* samplingRate, audio_stream_type_t streamType)
-{
+status_t
+AudioSystem::getOutputSamplingRate(uint32_t* samplingRate, audio_stream_type_t streamType) {
     audio_io_handle_t output;
 
     if (streamType == AUDIO_STREAM_DEFAULT) {
@@ -306,8 +329,7 @@
 }
 
 status_t AudioSystem::getSamplingRate(audio_io_handle_t ioHandle,
-                                      uint32_t* samplingRate)
-{
+                                      uint32_t* samplingRate) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
@@ -326,8 +348,7 @@
     return NO_ERROR;
 }
 
-status_t AudioSystem::getOutputFrameCount(size_t* frameCount, audio_stream_type_t streamType)
-{
+status_t AudioSystem::getOutputFrameCount(size_t* frameCount, audio_stream_type_t streamType) {
     audio_io_handle_t output;
 
     if (streamType == AUDIO_STREAM_DEFAULT) {
@@ -343,8 +364,7 @@
 }
 
 status_t AudioSystem::getFrameCount(audio_io_handle_t ioHandle,
-                                    size_t* frameCount)
-{
+                                    size_t* frameCount) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
@@ -363,8 +383,7 @@
     return NO_ERROR;
 }
 
-status_t AudioSystem::getOutputLatency(uint32_t* latency, audio_stream_type_t streamType)
-{
+status_t AudioSystem::getOutputLatency(uint32_t* latency, audio_stream_type_t streamType) {
     audio_io_handle_t output;
 
     if (streamType == AUDIO_STREAM_DEFAULT) {
@@ -380,8 +399,7 @@
 }
 
 status_t AudioSystem::getLatency(audio_io_handle_t output,
-                                 uint32_t* latency)
-{
+                                 uint32_t* latency) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     sp<AudioIoDescriptor> outputDesc = getIoDescriptor(output);
@@ -397,8 +415,7 @@
 }
 
 status_t AudioSystem::getInputBufferSize(uint32_t sampleRate, audio_format_t format,
-        audio_channel_mask_t channelMask, size_t* buffSize)
-{
+                                         audio_channel_mask_t channelMask, size_t* buffSize) {
     const sp<AudioFlingerClient> afc = getAudioFlingerClient();
     if (afc == 0) {
         return NO_INIT;
@@ -406,24 +423,21 @@
     return afc->getInputBufferSize(sampleRate, format, channelMask, buffSize);
 }
 
-status_t AudioSystem::setVoiceVolume(float value)
-{
+status_t AudioSystem::setVoiceVolume(float value) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setVoiceVolume(value);
 }
 
-status_t AudioSystem::getRenderPosition(audio_io_handle_t output, uint32_t *halFrames,
-                                        uint32_t *dspFrames)
-{
+status_t AudioSystem::getRenderPosition(audio_io_handle_t output, uint32_t* halFrames,
+                                        uint32_t* dspFrames) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
 
     return af->getRenderPosition(halFrames, dspFrames, output);
 }
 
-uint32_t AudioSystem::getInputFramesLost(audio_io_handle_t ioHandle)
-{
+uint32_t AudioSystem::getInputFramesLost(audio_io_handle_t ioHandle) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     uint32_t result = 0;
     if (af == 0) return result;
@@ -433,47 +447,41 @@
     return result;
 }
 
-audio_unique_id_t AudioSystem::newAudioUniqueId(audio_unique_id_use_t use)
-{
+audio_unique_id_t AudioSystem::newAudioUniqueId(audio_unique_id_use_t use) {
     // Must not use AF as IDs will re-roll on audioserver restart, b/130369529.
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return AUDIO_UNIQUE_ID_ALLOCATE;
     return af->newAudioUniqueId(use);
 }
 
-void AudioSystem::acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid)
-{
+void AudioSystem::acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af != 0) {
         af->acquireAudioSessionId(audioSession, pid, uid);
     }
 }
 
-void AudioSystem::releaseAudioSessionId(audio_session_t audioSession, pid_t pid)
-{
+void AudioSystem::releaseAudioSessionId(audio_session_t audioSession, pid_t pid) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af != 0) {
         af->releaseAudioSessionId(audioSession, pid);
     }
 }
 
-audio_hw_sync_t AudioSystem::getAudioHwSyncForSession(audio_session_t sessionId)
-{
+audio_hw_sync_t AudioSystem::getAudioHwSyncForSession(audio_session_t sessionId) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return AUDIO_HW_SYNC_INVALID;
     return af->getAudioHwSyncForSession(sessionId);
 }
 
-status_t AudioSystem::systemReady()
-{
+status_t AudioSystem::systemReady() {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return NO_INIT;
     return af->systemReady();
 }
 
 status_t AudioSystem::getFrameCountHAL(audio_io_handle_t ioHandle,
-                                       size_t* frameCount)
-{
+                                       size_t* frameCount) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
@@ -495,8 +503,7 @@
 // ---------------------------------------------------------------------------
 
 
-void AudioSystem::AudioFlingerClient::clearIoCache()
-{
+void AudioSystem::AudioFlingerClient::clearIoCache() {
     Mutex::Autolock _l(mLock);
     mIoDescriptors.clear();
     mInBuffSize = 0;
@@ -505,8 +512,7 @@
     mInChannelMask = AUDIO_CHANNEL_NONE;
 }
 
-void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who __unused)
-{
+void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who __unused) {
     {
         Mutex::Autolock _l(AudioSystem::gLock);
         AudioSystem::gAudioFlinger.clear();
@@ -520,11 +526,18 @@
     ALOGW("AudioFlinger server died!");
 }
 
-void AudioSystem::AudioFlingerClient::ioConfigChanged(audio_io_config_event event,
-                                                      const sp<AudioIoDescriptor>& ioDesc) {
+Status AudioSystem::AudioFlingerClient::ioConfigChanged(
+        media::AudioIoConfigEvent _event,
+        const media::AudioIoDescriptor& _ioDesc) {
+    audio_io_config_event event = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioIoConfigEvent_audio_io_config_event(_event));
+    sp<AudioIoDescriptor> ioDesc(
+            VALUE_OR_RETURN_BINDER_STATUS(
+                    aidl2legacy_AudioIoDescriptor_AudioIoDescriptor(_ioDesc)));
+
     ALOGV("ioConfigChanged() event %d", event);
 
-    if (ioDesc == 0 || ioDesc->mIoHandle == AUDIO_IO_HANDLE_NONE) return;
+    if (ioDesc->mIoHandle == AUDIO_IO_HANDLE_NONE) return Status::ok();
 
     audio_port_handle_t deviceId = AUDIO_PORT_HANDLE_NONE;
     std::vector<sp<AudioDeviceCallback>> callbacksToCall;
@@ -533,95 +546,102 @@
         auto callbacks = std::map<audio_port_handle_t, wp<AudioDeviceCallback>>();
 
         switch (event) {
-        case AUDIO_OUTPUT_OPENED:
-        case AUDIO_OUTPUT_REGISTERED:
-        case AUDIO_INPUT_OPENED:
-        case AUDIO_INPUT_REGISTERED: {
-            sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->mIoHandle);
-            if (oldDesc == 0) {
-                mIoDescriptors.add(ioDesc->mIoHandle, ioDesc);
-            } else {
+            case AUDIO_OUTPUT_OPENED:
+            case AUDIO_OUTPUT_REGISTERED:
+            case AUDIO_INPUT_OPENED:
+            case AUDIO_INPUT_REGISTERED: {
+                sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->mIoHandle);
+                if (oldDesc == 0) {
+                    mIoDescriptors.add(ioDesc->mIoHandle, ioDesc);
+                } else {
+                    deviceId = oldDesc->getDeviceId();
+                    mIoDescriptors.replaceValueFor(ioDesc->mIoHandle, ioDesc);
+                }
+
+                if (ioDesc->getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
+                    deviceId = ioDesc->getDeviceId();
+                    if (event == AUDIO_OUTPUT_OPENED || event == AUDIO_INPUT_OPENED) {
+                        auto it = mAudioDeviceCallbacks.find(ioDesc->mIoHandle);
+                        if (it != mAudioDeviceCallbacks.end()) {
+                            callbacks = it->second;
+                        }
+                    }
+                }
+                ALOGV("ioConfigChanged() new %s %s %d samplingRate %u, format %#x channel mask %#x "
+                      "frameCount %zu deviceId %d",
+                      event == AUDIO_OUTPUT_OPENED || event == AUDIO_OUTPUT_REGISTERED ?
+                      "output" : "input",
+                      event == AUDIO_OUTPUT_OPENED || event == AUDIO_INPUT_OPENED ?
+                      "opened" : "registered",
+                      ioDesc->mIoHandle, ioDesc->mSamplingRate, ioDesc->mFormat,
+                      ioDesc->mChannelMask,
+                      ioDesc->mFrameCount, ioDesc->getDeviceId());
+            }
+                break;
+            case AUDIO_OUTPUT_CLOSED:
+            case AUDIO_INPUT_CLOSED: {
+                if (getIoDescriptor_l(ioDesc->mIoHandle) == 0) {
+                    ALOGW("ioConfigChanged() closing unknown %s %d",
+                          event == AUDIO_OUTPUT_CLOSED ? "output" : "input", ioDesc->mIoHandle);
+                    break;
+                }
+                ALOGV("ioConfigChanged() %s %d closed",
+                      event == AUDIO_OUTPUT_CLOSED ? "output" : "input", ioDesc->mIoHandle);
+
+                mIoDescriptors.removeItem(ioDesc->mIoHandle);
+                mAudioDeviceCallbacks.erase(ioDesc->mIoHandle);
+            }
+                break;
+
+            case AUDIO_OUTPUT_CONFIG_CHANGED:
+            case AUDIO_INPUT_CONFIG_CHANGED: {
+                sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->mIoHandle);
+                if (oldDesc == 0) {
+                    ALOGW("ioConfigChanged() modifying unknown %s! %d",
+                          event == AUDIO_OUTPUT_CONFIG_CHANGED ? "output" : "input",
+                          ioDesc->mIoHandle);
+                    break;
+                }
+
                 deviceId = oldDesc->getDeviceId();
                 mIoDescriptors.replaceValueFor(ioDesc->mIoHandle, ioDesc);
-            }
 
-            if (ioDesc->getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
-                deviceId = ioDesc->getDeviceId();
-                if (event == AUDIO_OUTPUT_OPENED || event == AUDIO_INPUT_OPENED) {
+                if (deviceId != ioDesc->getDeviceId()) {
+                    deviceId = ioDesc->getDeviceId();
                     auto it = mAudioDeviceCallbacks.find(ioDesc->mIoHandle);
                     if (it != mAudioDeviceCallbacks.end()) {
                         callbacks = it->second;
                     }
                 }
+                ALOGV("ioConfigChanged() new config for %s %d samplingRate %u, format %#x "
+                      "channel mask %#x frameCount %zu frameCountHAL %zu deviceId %d",
+                      event == AUDIO_OUTPUT_CONFIG_CHANGED ? "output" : "input",
+                      ioDesc->mIoHandle, ioDesc->mSamplingRate, ioDesc->mFormat,
+                      ioDesc->mChannelMask, ioDesc->mFrameCount, ioDesc->mFrameCountHAL,
+                      ioDesc->getDeviceId());
+
             }
-            ALOGV("ioConfigChanged() new %s %s %d samplingRate %u, format %#x channel mask %#x "
-                    "frameCount %zu deviceId %d",
-                    event == AUDIO_OUTPUT_OPENED || event == AUDIO_OUTPUT_REGISTERED ?
-                            "output" : "input",
-                            event == AUDIO_OUTPUT_OPENED || event == AUDIO_INPUT_OPENED ?
-                            "opened" : "registered",
-                    ioDesc->mIoHandle, ioDesc->mSamplingRate, ioDesc->mFormat, ioDesc->mChannelMask,
-                    ioDesc->mFrameCount, ioDesc->getDeviceId());
-            } break;
-        case AUDIO_OUTPUT_CLOSED:
-        case AUDIO_INPUT_CLOSED: {
-            if (getIoDescriptor_l(ioDesc->mIoHandle) == 0) {
-                ALOGW("ioConfigChanged() closing unknown %s %d",
-                      event == AUDIO_OUTPUT_CLOSED ? "output" : "input", ioDesc->mIoHandle);
                 break;
-            }
-            ALOGV("ioConfigChanged() %s %d closed",
-                  event == AUDIO_OUTPUT_CLOSED ? "output" : "input", ioDesc->mIoHandle);
-
-            mIoDescriptors.removeItem(ioDesc->mIoHandle);
-            mAudioDeviceCallbacks.erase(ioDesc->mIoHandle);
-            } break;
-
-        case AUDIO_OUTPUT_CONFIG_CHANGED:
-        case AUDIO_INPUT_CONFIG_CHANGED: {
-            sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->mIoHandle);
-            if (oldDesc == 0) {
-                ALOGW("ioConfigChanged() modifying unknown output! %d", ioDesc->mIoHandle);
-                break;
-            }
-
-            deviceId = oldDesc->getDeviceId();
-            mIoDescriptors.replaceValueFor(ioDesc->mIoHandle, ioDesc);
-
-            if (deviceId != ioDesc->getDeviceId()) {
-                deviceId = ioDesc->getDeviceId();
+            case AUDIO_CLIENT_STARTED: {
+                sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->mIoHandle);
+                if (oldDesc == 0) {
+                    ALOGW("ioConfigChanged() start client on unknown io! %d", ioDesc->mIoHandle);
+                    break;
+                }
+                ALOGV("ioConfigChanged() AUDIO_CLIENT_STARTED  io %d port %d num callbacks %zu",
+                      ioDesc->mIoHandle, ioDesc->mPortId, mAudioDeviceCallbacks.size());
+                oldDesc->mPatch = ioDesc->mPatch;
                 auto it = mAudioDeviceCallbacks.find(ioDesc->mIoHandle);
                 if (it != mAudioDeviceCallbacks.end()) {
-                    callbacks = it->second;
+                    auto cbks = it->second;
+                    auto it2 = cbks.find(ioDesc->mPortId);
+                    if (it2 != cbks.end()) {
+                        callbacks.emplace(ioDesc->mPortId, it2->second);
+                        deviceId = oldDesc->getDeviceId();
+                    }
                 }
             }
-            ALOGV("ioConfigChanged() new config for %s %d samplingRate %u, format %#x "
-                    "channel mask %#x frameCount %zu frameCountHAL %zu deviceId %d",
-                    event == AUDIO_OUTPUT_CONFIG_CHANGED ? "output" : "input",
-                    ioDesc->mIoHandle, ioDesc->mSamplingRate, ioDesc->mFormat,
-                    ioDesc->mChannelMask, ioDesc->mFrameCount, ioDesc->mFrameCountHAL,
-                    ioDesc->getDeviceId());
-
-        } break;
-        case AUDIO_CLIENT_STARTED: {
-            sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->mIoHandle);
-            if (oldDesc == 0) {
-                ALOGW("ioConfigChanged() start client on unknown io! %d", ioDesc->mIoHandle);
                 break;
-            }
-            ALOGV("ioConfigChanged() AUDIO_CLIENT_STARTED  io %d port %d num callbacks %zu",
-                ioDesc->mIoHandle, ioDesc->mPortId, mAudioDeviceCallbacks.size());
-            oldDesc->mPatch = ioDesc->mPatch;
-            auto it = mAudioDeviceCallbacks.find(ioDesc->mIoHandle);
-            if (it != mAudioDeviceCallbacks.end()) {
-                auto cbks = it->second;
-                auto it2 = cbks.find(ioDesc->mPortId);
-                if (it2 != cbks.end()) {
-                   callbacks.emplace(ioDesc->mPortId, it2->second);
-                   deviceId = oldDesc->getDeviceId();
-                }
-            }
-        } break;
         }
 
         for (auto wpCbk : callbacks) {
@@ -638,12 +658,13 @@
         // If callbacksToCall is not empty, it implies ioDesc->mIoHandle and deviceId are valid
         cb->onAudioDeviceUpdate(ioDesc->mIoHandle, deviceId);
     }
+
+    return Status::ok();
 }
 
 status_t AudioSystem::AudioFlingerClient::getInputBufferSize(
-                                                uint32_t sampleRate, audio_format_t format,
-                                                audio_channel_mask_t channelMask, size_t* buffSize)
-{
+        uint32_t sampleRate, audio_format_t format,
+        audio_channel_mask_t channelMask, size_t* buffSize) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) {
         return PERMISSION_DENIED;
@@ -655,7 +676,7 @@
         size_t inBuffSize = af->getInputBufferSize(sampleRate, format, channelMask);
         if (inBuffSize == 0) {
             ALOGE("AudioSystem::getInputBufferSize failed sampleRate %d format %#x channelMask %#x",
-                    sampleRate, format, channelMask);
+                  sampleRate, format, channelMask);
             return BAD_VALUE;
         }
         // A benign race is possible here: we could overwrite a fresher cache entry
@@ -672,8 +693,8 @@
     return NO_ERROR;
 }
 
-sp<AudioIoDescriptor> AudioSystem::AudioFlingerClient::getIoDescriptor_l(audio_io_handle_t ioHandle)
-{
+sp<AudioIoDescriptor>
+AudioSystem::AudioFlingerClient::getIoDescriptor_l(audio_io_handle_t ioHandle) {
     sp<AudioIoDescriptor> desc;
     ssize_t index = mIoDescriptors.indexOfKey(ioHandle);
     if (index >= 0) {
@@ -682,19 +703,19 @@
     return desc;
 }
 
-sp<AudioIoDescriptor> AudioSystem::AudioFlingerClient::getIoDescriptor(audio_io_handle_t ioHandle)
-{
+sp<AudioIoDescriptor> AudioSystem::AudioFlingerClient::getIoDescriptor(audio_io_handle_t ioHandle) {
     Mutex::Autolock _l(mLock);
     return getIoDescriptor_l(ioHandle);
 }
 
 status_t AudioSystem::AudioFlingerClient::addAudioDeviceCallback(
         const wp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo,
-        audio_port_handle_t portId)
-{
+        audio_port_handle_t portId) {
     ALOGV("%s audioIo %d portId %d", __func__, audioIo, portId);
     Mutex::Autolock _l(mLock);
-    auto& callbacks = mAudioDeviceCallbacks.emplace(audioIo, std::map<audio_port_handle_t, wp<AudioDeviceCallback>>()).first->second;
+    auto& callbacks = mAudioDeviceCallbacks.emplace(
+            audioIo,
+            std::map<audio_port_handle_t, wp<AudioDeviceCallback>>()).first->second;
     auto result = callbacks.try_emplace(portId, callback);
     if (!result.second) {
         return INVALID_OPERATION;
@@ -704,8 +725,7 @@
 
 status_t AudioSystem::AudioFlingerClient::removeAudioDeviceCallback(
         const wp<AudioDeviceCallback>& callback __unused, audio_io_handle_t audioIo,
-        audio_port_handle_t portId)
-{
+        audio_port_handle_t portId) {
     ALOGV("%s audioIo %d portId %d", __func__, audioIo, portId);
     Mutex::Autolock _l(mLock);
     auto it = mAudioDeviceCallbacks.find(audioIo);
@@ -721,8 +741,7 @@
     return NO_ERROR;
 }
 
-/* static */ uintptr_t AudioSystem::addErrorCallback(audio_error_callback cb)
-{
+/* static */ uintptr_t AudioSystem::addErrorCallback(audio_error_callback cb) {
     Mutex::Autolock _l(gLockErrorCallbacks);
     gAudioErrorCallbacks.insert(cb);
     return reinterpret_cast<uintptr_t>(cb);
@@ -736,22 +755,25 @@
 /* static */ void AudioSystem::reportError(status_t err) {
     Mutex::Autolock _l(gLockErrorCallbacks);
     for (auto callback : gAudioErrorCallbacks) {
-      callback(err);
+        callback(err);
     }
 }
 
-/*static*/ void AudioSystem::setDynPolicyCallback(dynamic_policy_callback cb)
-{
+/*static*/ void AudioSystem::setDynPolicyCallback(dynamic_policy_callback cb) {
     Mutex::Autolock _l(gLock);
     gDynPolicyCallback = cb;
 }
 
-/*static*/ void AudioSystem::setRecordConfigCallback(record_config_callback cb)
-{
+/*static*/ void AudioSystem::setRecordConfigCallback(record_config_callback cb) {
     Mutex::Autolock _l(gLock);
     gRecordConfigCallback = cb;
 }
 
+/*static*/ void AudioSystem::setRoutingCallback(routing_callback cb) {
+    Mutex::Autolock _l(gLock);
+    gRoutingCallback = cb;
+}
+
 // client singleton for AudioPolicyService binder interface
 // protected by gLockAPS
 sp<IAudioPolicyService> AudioSystem::gAudioPolicyService;
@@ -759,8 +781,7 @@
 
 
 // establish binder interface to AudioPolicy service
-const sp<IAudioPolicyService> AudioSystem::get_audio_policy_service()
-{
+const sp<IAudioPolicyService> AudioSystem::get_audio_policy_service() {
     sp<IAudioPolicyService> ap;
     sp<AudioPolicyServiceClient> apc;
     {
@@ -800,8 +821,7 @@
 
 // ---------------------------------------------------------------------------
 
-void AudioSystem::onNewAudioModulesAvailable()
-{
+void AudioSystem::onNewAudioModulesAvailable() {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return;
     aps->onNewAudioModulesAvailable();
@@ -809,13 +829,12 @@
 
 status_t AudioSystem::setDeviceConnectionState(audio_devices_t device,
                                                audio_policy_dev_state_t state,
-                                               const char *device_address,
-                                               const char *device_name,
-                                               audio_format_t encodedFormat)
-{
+                                               const char* device_address,
+                                               const char* device_name,
+                                               audio_format_t encodedFormat) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
-    const char *address = "";
-    const char *name = "";
+    const char* address = "";
+    const char* name = "";
 
     if (aps == 0) return PERMISSION_DENIED;
 
@@ -825,26 +844,46 @@
     if (device_name != NULL) {
         name = device_name;
     }
-    return aps->setDeviceConnectionState(device, state, address, name, encodedFormat);
+
+    media::AudioDevice deviceAidl;
+    deviceAidl.type = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_devices_t_int32_t(device));
+    deviceAidl.address = address;
+
+    return statusTFromBinderStatus(
+            aps->setDeviceConnectionState(
+                    deviceAidl,
+                    VALUE_OR_RETURN_STATUS(
+                            legacy2aidl_audio_policy_dev_state_t_AudioPolicyDeviceState(state)),
+                    name,
+                    VALUE_OR_RETURN_STATUS(legacy2aidl_audio_format_t_AudioFormat(encodedFormat))));
 }
 
 audio_policy_dev_state_t AudioSystem::getDeviceConnectionState(audio_devices_t device,
-                                                  const char *device_address)
-{
+                                                               const char* device_address) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE;
 
-    return aps->getDeviceConnectionState(device, device_address);
+    auto result = [&]() -> ConversionResult<audio_policy_dev_state_t> {
+        media::AudioDevice deviceAidl;
+        deviceAidl.type = VALUE_OR_RETURN(legacy2aidl_audio_devices_t_int32_t(device));
+        deviceAidl.address = device_address;
+
+        media::AudioPolicyDeviceState result;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                aps->getDeviceConnectionState(deviceAidl, &result)));
+
+        return aidl2legacy_AudioPolicyDeviceState_audio_policy_dev_state_t(result);
+    }();
+    return result.value_or(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE);
 }
 
 status_t AudioSystem::handleDeviceConfigChange(audio_devices_t device,
-                                               const char *device_address,
-                                               const char *device_name,
-                                               audio_format_t encodedFormat)
-{
+                                               const char* device_address,
+                                               const char* device_name,
+                                               audio_format_t encodedFormat) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
-    const char *address = "";
-    const char *name = "";
+    const char* address = "";
+    const char* name = "";
 
     if (aps == 0) return PERMISSION_DENIED;
 
@@ -854,294 +893,528 @@
     if (device_name != NULL) {
         name = device_name;
     }
-    return aps->handleDeviceConfigChange(device, address, name, encodedFormat);
+
+    media::AudioDevice deviceAidl;
+    deviceAidl.type = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_devices_t_int32_t(device));
+    deviceAidl.address = address;
+
+    return statusTFromBinderStatus(
+            aps->handleDeviceConfigChange(deviceAidl, name, VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_audio_format_t_AudioFormat(encodedFormat))));
 }
 
-status_t AudioSystem::setPhoneState(audio_mode_t state, uid_t uid)
-{
+status_t AudioSystem::setPhoneState(audio_mode_t state, uid_t uid) {
     if (uint32_t(state) >= AUDIO_MODE_CNT) return BAD_VALUE;
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    return aps->setPhoneState(state, uid);
+    return statusTFromBinderStatus(aps->setPhoneState(
+            VALUE_OR_RETURN_STATUS(legacy2aidl_audio_mode_t_AudioMode(state)),
+            VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid))));
 }
 
-status_t AudioSystem::setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config)
-{
+status_t
+AudioSystem::setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->setForceUse(usage, config);
+
+    return statusTFromBinderStatus(
+            aps->setForceUse(
+                    VALUE_OR_RETURN_STATUS(
+                            legacy2aidl_audio_policy_force_use_t_AudioPolicyForceUse(usage)),
+                    VALUE_OR_RETURN_STATUS(
+                            legacy2aidl_audio_policy_forced_cfg_t_AudioPolicyForcedConfig(
+                                    config))));
 }
 
-audio_policy_forced_cfg_t AudioSystem::getForceUse(audio_policy_force_use_t usage)
-{
+audio_policy_forced_cfg_t AudioSystem::getForceUse(audio_policy_force_use_t usage) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return AUDIO_POLICY_FORCE_NONE;
-    return aps->getForceUse(usage);
+
+    auto result = [&]() -> ConversionResult<audio_policy_forced_cfg_t> {
+        media::AudioPolicyForceUse usageAidl = VALUE_OR_RETURN(
+                legacy2aidl_audio_policy_force_use_t_AudioPolicyForceUse(usage));
+        media::AudioPolicyForcedConfig configAidl;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                aps->getForceUse(usageAidl, &configAidl)));
+        return aidl2legacy_AudioPolicyForcedConfig_audio_policy_forced_cfg_t(configAidl);
+    }();
+
+    return result.value_or(AUDIO_POLICY_FORCE_NONE);
 }
 
 
-audio_io_handle_t AudioSystem::getOutput(audio_stream_type_t stream)
-{
+audio_io_handle_t AudioSystem::getOutput(audio_stream_type_t stream) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
-    if (aps == 0) return 0;
-    return aps->getOutput(stream);
+    if (aps == 0) return AUDIO_IO_HANDLE_NONE;
+
+    auto result = [&]() -> ConversionResult<audio_io_handle_t> {
+        media::AudioStreamType streamAidl = VALUE_OR_RETURN(
+                legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+        int32_t outputAidl;
+        RETURN_IF_ERROR(
+                statusTFromBinderStatus(aps->getOutput(streamAidl, &outputAidl)));
+        return aidl2legacy_int32_t_audio_io_handle_t(outputAidl);
+    }();
+
+    return result.value_or(AUDIO_IO_HANDLE_NONE);
 }
 
-status_t AudioSystem::getOutputForAttr(audio_attributes_t *attr,
-                                        audio_io_handle_t *output,
-                                        audio_session_t session,
-                                        audio_stream_type_t *stream,
-                                        pid_t pid,
-                                        uid_t uid,
-                                        const audio_config_t *config,
-                                        audio_output_flags_t flags,
-                                        audio_port_handle_t *selectedDeviceId,
-                                        audio_port_handle_t *portId,
-                                        std::vector<audio_io_handle_t> *secondaryOutputs)
-{
+status_t AudioSystem::getOutputForAttr(audio_attributes_t* attr,
+                                       audio_io_handle_t* output,
+                                       audio_session_t session,
+                                       audio_stream_type_t* stream,
+                                       const AttributionSourceState& attributionSource,
+                                       const audio_config_t* config,
+                                       audio_output_flags_t flags,
+                                       audio_port_handle_t* selectedDeviceId,
+                                       audio_port_handle_t* portId,
+                                       std::vector<audio_io_handle_t>* secondaryOutputs) {
+    if (attr == nullptr) {
+        ALOGE("%s NULL audio attributes", __func__);
+        return BAD_VALUE;
+    }
+    if (output == nullptr) {
+        ALOGE("%s NULL output - shouldn't happen", __func__);
+        return BAD_VALUE;
+    }
+    if (selectedDeviceId == nullptr) {
+        ALOGE("%s NULL selectedDeviceId - shouldn't happen", __func__);
+        return BAD_VALUE;
+    }
+    if (portId == nullptr) {
+        ALOGE("%s NULL portId - shouldn't happen", __func__);
+        return BAD_VALUE;
+    }
+    if (secondaryOutputs == nullptr) {
+        ALOGE("%s NULL secondaryOutputs - shouldn't happen", __func__);
+        return BAD_VALUE;
+    }
+
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return NO_INIT;
-    return aps->getOutputForAttr(attr, output, session, stream, pid, uid,
-                                 config,
-                                 flags, selectedDeviceId, portId, secondaryOutputs);
+
+    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
+    int32_t sessionAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(session));
+    media::AudioConfig configAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_config_t_AudioConfig(*config));
+    int32_t flagsAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+    int32_t selectedDeviceIdAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_handle_t_int32_t(*selectedDeviceId));
+
+    media::GetOutputForAttrResponse responseAidl;
+
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getOutputForAttr(attrAidl, sessionAidl, attributionSource, configAidl, flagsAidl,
+                                  selectedDeviceIdAidl, &responseAidl)));
+
+    *output = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_int32_t_audio_io_handle_t(responseAidl.output));
+
+    if (stream != nullptr) {
+        *stream = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_AudioStreamType_audio_stream_type_t(responseAidl.stream));
+    }
+    *selectedDeviceId = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(responseAidl.selectedDeviceId));
+    *portId = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_port_handle_t(responseAidl.portId));
+    *secondaryOutputs = VALUE_OR_RETURN_STATUS(convertContainer<std::vector<audio_io_handle_t>>(
+            responseAidl.secondaryOutputs, aidl2legacy_int32_t_audio_io_handle_t));
+
+    return OK;
 }
 
-status_t AudioSystem::startOutput(audio_port_handle_t portId)
-{
+status_t AudioSystem::startOutput(audio_port_handle_t portId) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->startOutput(portId);
+
+    int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
+    return statusTFromBinderStatus(aps->startOutput(portIdAidl));
 }
 
-status_t AudioSystem::stopOutput(audio_port_handle_t portId)
-{
+status_t AudioSystem::stopOutput(audio_port_handle_t portId) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->stopOutput(portId);
+
+    int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
+    return statusTFromBinderStatus(aps->stopOutput(portIdAidl));
 }
 
-void AudioSystem::releaseOutput(audio_port_handle_t portId)
-{
+void AudioSystem::releaseOutput(audio_port_handle_t portId) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return;
-    aps->releaseOutput(portId);
+
+    auto status = [&]() -> status_t {
+        int32_t portIdAidl = VALUE_OR_RETURN_STATUS(
+                legacy2aidl_audio_port_handle_t_int32_t(portId));
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(aps->releaseOutput(portIdAidl)));
+        return OK;
+    }();
+
+    // Ignore status.
+    (void) status;
 }
 
-status_t AudioSystem::getInputForAttr(const audio_attributes_t *attr,
-                                audio_io_handle_t *input,
-                                audio_unique_id_t riid,
-                                audio_session_t session,
-                                pid_t pid,
-                                uid_t uid,
-                                const String16& opPackageName,
-                                const audio_config_base_t *config,
-                                audio_input_flags_t flags,
-                                audio_port_handle_t *selectedDeviceId,
-                                audio_port_handle_t *portId)
-{
+status_t AudioSystem::getInputForAttr(const audio_attributes_t* attr,
+                                      audio_io_handle_t* input,
+                                      audio_unique_id_t riid,
+                                      audio_session_t session,
+                                      const AttributionSourceState &attributionSource,
+                                      const audio_config_base_t* config,
+                                      audio_input_flags_t flags,
+                                      audio_port_handle_t* selectedDeviceId,
+                                      audio_port_handle_t* portId) {
+    if (attr == NULL) {
+        ALOGE("getInputForAttr NULL attr - shouldn't happen");
+        return BAD_VALUE;
+    }
+    if (input == NULL) {
+        ALOGE("getInputForAttr NULL input - shouldn't happen");
+        return BAD_VALUE;
+    }
+    if (selectedDeviceId == NULL) {
+        ALOGE("getInputForAttr NULL selectedDeviceId - shouldn't happen");
+        return BAD_VALUE;
+    }
+    if (portId == NULL) {
+        ALOGE("getInputForAttr NULL portId - shouldn't happen");
+        return BAD_VALUE;
+    }
+
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return NO_INIT;
-    return aps->getInputForAttr(
-            attr, input, riid, session, pid, uid, opPackageName,
-            config, flags, selectedDeviceId, portId);
+
+    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
+    int32_t inputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(*input));
+    int32_t riidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_unique_id_t_int32_t(riid));
+    int32_t sessionAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(session));
+    media::AudioConfigBase configAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_config_base_t_AudioConfigBase(*config));
+    int32_t flagsAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_input_flags_t_int32_t_mask(flags));
+    int32_t selectedDeviceIdAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_handle_t_int32_t(*selectedDeviceId));
+
+    media::GetInputForAttrResponse response;
+
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getInputForAttr(attrAidl, inputAidl, riidAidl, sessionAidl, attributionSource,
+                configAidl, flagsAidl, selectedDeviceIdAidl, &response)));
+
+    *input = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_io_handle_t(response.input));
+    *selectedDeviceId = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(response.selectedDeviceId));
+    *portId = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_port_handle_t(response.portId));
+
+    return OK;
 }
 
-status_t AudioSystem::startInput(audio_port_handle_t portId)
-{
+status_t AudioSystem::startInput(audio_port_handle_t portId) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->startInput(portId);
+
+    int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
+    return statusTFromBinderStatus(aps->startInput(portIdAidl));
 }
 
-status_t AudioSystem::stopInput(audio_port_handle_t portId)
-{
+status_t AudioSystem::stopInput(audio_port_handle_t portId) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->stopInput(portId);
+
+    int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
+    return statusTFromBinderStatus(aps->stopInput(portIdAidl));
 }
 
-void AudioSystem::releaseInput(audio_port_handle_t portId)
-{
+void AudioSystem::releaseInput(audio_port_handle_t portId) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return;
-    aps->releaseInput(portId);
+
+    auto status = [&]() -> status_t {
+        int32_t portIdAidl = VALUE_OR_RETURN_STATUS(
+                legacy2aidl_audio_port_handle_t_int32_t(portId));
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(aps->releaseInput(portIdAidl)));
+        return OK;
+    }();
+
+    // Ignore status.
+    (void) status;
 }
 
 status_t AudioSystem::initStreamVolume(audio_stream_type_t stream,
-                                    int indexMin,
-                                    int indexMax)
-{
+                                       int indexMin,
+                                       int indexMax) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->initStreamVolume(stream, indexMin, indexMax);
+
+    media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+    int32_t indexMinAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(indexMin));
+    int32_t indexMaxAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(indexMax));
+    return statusTFromBinderStatus(
+            aps->initStreamVolume(streamAidl, indexMinAidl, indexMaxAidl));
 }
 
 status_t AudioSystem::setStreamVolumeIndex(audio_stream_type_t stream,
                                            int index,
-                                           audio_devices_t device)
-{
+                                           audio_devices_t device) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->setStreamVolumeIndex(stream, index, device);
+
+    media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+    int32_t indexAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(index));
+    int32_t deviceAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_devices_t_int32_t(device));
+    return statusTFromBinderStatus(
+            aps->setStreamVolumeIndex(streamAidl, deviceAidl, indexAidl));
 }
 
 status_t AudioSystem::getStreamVolumeIndex(audio_stream_type_t stream,
-                                           int *index,
-                                           audio_devices_t device)
-{
+                                           int* index,
+                                           audio_devices_t device) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->getStreamVolumeIndex(stream, index, device);
+
+    media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+    int32_t deviceAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_devices_t_int32_t(device));
+    int32_t indexAidl;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getStreamVolumeIndex(streamAidl, deviceAidl, &indexAidl)));
+    if (index != nullptr) {
+        *index = VALUE_OR_RETURN_STATUS(convertIntegral<int>(indexAidl));
+    }
+    return OK;
 }
 
-status_t AudioSystem::setVolumeIndexForAttributes(const audio_attributes_t &attr,
+status_t AudioSystem::setVolumeIndexForAttributes(const audio_attributes_t& attr,
                                                   int index,
-                                                  audio_devices_t device)
-{
+                                                  audio_devices_t device) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->setVolumeIndexForAttributes(attr, index, device);
+
+    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+    int32_t indexAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(index));
+    int32_t deviceAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_devices_t_int32_t(device));
+    return statusTFromBinderStatus(
+            aps->setVolumeIndexForAttributes(attrAidl, deviceAidl, indexAidl));
 }
 
-status_t AudioSystem::getVolumeIndexForAttributes(const audio_attributes_t &attr,
-                                                  int &index,
-                                                  audio_devices_t device)
-{
+status_t AudioSystem::getVolumeIndexForAttributes(const audio_attributes_t& attr,
+                                                  int& index,
+                                                  audio_devices_t device) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->getVolumeIndexForAttributes(attr, index, device);
+
+    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+    int32_t deviceAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_devices_t_int32_t(device));
+    int32_t indexAidl;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getVolumeIndexForAttributes(attrAidl, deviceAidl, &indexAidl)));
+    index = VALUE_OR_RETURN_STATUS(convertIntegral<int>(indexAidl));
+    return OK;
 }
 
-status_t AudioSystem::getMaxVolumeIndexForAttributes(const audio_attributes_t &attr, int &index)
-{
+status_t AudioSystem::getMaxVolumeIndexForAttributes(const audio_attributes_t& attr, int& index) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->getMaxVolumeIndexForAttributes(attr, index);
+
+    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+    int32_t indexAidl;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getMaxVolumeIndexForAttributes(attrAidl, &indexAidl)));
+    index = VALUE_OR_RETURN_STATUS(convertIntegral<int>(indexAidl));
+    return OK;
 }
 
-status_t AudioSystem::getMinVolumeIndexForAttributes(const audio_attributes_t &attr, int &index)
-{
+status_t AudioSystem::getMinVolumeIndexForAttributes(const audio_attributes_t& attr, int& index) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->getMinVolumeIndexForAttributes(attr, index);
+
+    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+    int32_t indexAidl;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getMinVolumeIndexForAttributes(attrAidl, &indexAidl)));
+    index = VALUE_OR_RETURN_STATUS(convertIntegral<int>(indexAidl));
+    return OK;
 }
 
-uint32_t AudioSystem::getStrategyForStream(audio_stream_type_t stream)
-{
+product_strategy_t AudioSystem::getStrategyForStream(audio_stream_type_t stream) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PRODUCT_STRATEGY_NONE;
-    return aps->getStrategyForStream(stream);
+
+    auto result = [&]() -> ConversionResult<product_strategy_t> {
+        media::AudioStreamType streamAidl = VALUE_OR_RETURN(
+                legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+        int32_t resultAidl;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                aps->getStrategyForStream(streamAidl, &resultAidl)));
+        return aidl2legacy_int32_t_product_strategy_t(resultAidl);
+    }();
+    return result.value_or(PRODUCT_STRATEGY_NONE);
 }
 
-audio_devices_t AudioSystem::getDevicesForStream(audio_stream_type_t stream)
-{
+audio_devices_t AudioSystem::getDevicesForStream(audio_stream_type_t stream) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return AUDIO_DEVICE_NONE;
-    return aps->getDevicesForStream(stream);
+
+    auto result = [&]() -> ConversionResult<audio_devices_t> {
+        media::AudioStreamType streamAidl = VALUE_OR_RETURN(
+                legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+        int32_t resultAidl;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                aps->getDevicesForStream(streamAidl, &resultAidl)));
+        return aidl2legacy_int32_t_audio_devices_t(resultAidl);
+    }();
+    return result.value_or(AUDIO_DEVICE_NONE);
 }
 
-status_t AudioSystem::getDevicesForAttributes(const AudioAttributes &aa,
-                                              AudioDeviceTypeAddrVector *devices) {
+status_t AudioSystem::getDevicesForAttributes(const AudioAttributes& aa,
+                                              AudioDeviceTypeAddrVector* devices) {
     if (devices == nullptr) {
         return BAD_VALUE;
     }
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->getDevicesForAttributes(aa, devices);
+
+    media::AudioAttributesEx aaAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_AudioAttributes_AudioAttributesEx(aa));
+    std::vector<media::AudioDevice> retAidl;
+    RETURN_STATUS_IF_ERROR(
+            statusTFromBinderStatus(aps->getDevicesForAttributes(aaAidl, &retAidl)));
+    *devices = VALUE_OR_RETURN_STATUS(
+            convertContainer<AudioDeviceTypeAddrVector>(
+                    retAidl,
+                    aidl2legacy_AudioDeviceTypeAddress));
+    return OK;
 }
 
-audio_io_handle_t AudioSystem::getOutputForEffect(const effect_descriptor_t *desc)
-{
+audio_io_handle_t AudioSystem::getOutputForEffect(const effect_descriptor_t* desc) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     // FIXME change return type to status_t, and return PERMISSION_DENIED here
     if (aps == 0) return AUDIO_IO_HANDLE_NONE;
-    return aps->getOutputForEffect(desc);
+
+    auto result = [&]() -> ConversionResult<audio_io_handle_t> {
+        media::EffectDescriptor descAidl = VALUE_OR_RETURN(
+                legacy2aidl_effect_descriptor_t_EffectDescriptor(*desc));
+        int32_t retAidl;
+        RETURN_IF_ERROR(
+                statusTFromBinderStatus(aps->getOutputForEffect(descAidl, &retAidl)));
+        return aidl2legacy_int32_t_audio_io_handle_t(retAidl);
+    }();
+
+    return result.value_or(AUDIO_IO_HANDLE_NONE);
 }
 
-status_t AudioSystem::registerEffect(const effect_descriptor_t *desc,
-                                audio_io_handle_t io,
-                                uint32_t strategy,
-                                audio_session_t session,
-                                int id)
-{
+status_t AudioSystem::registerEffect(const effect_descriptor_t* desc,
+                                     audio_io_handle_t io,
+                                     product_strategy_t strategy,
+                                     audio_session_t session,
+                                     int id) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->registerEffect(desc, io, strategy, session, id);
+
+    media::EffectDescriptor descAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_effect_descriptor_t_EffectDescriptor(*desc));
+    int32_t ioAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(io));
+    int32_t strategyAidl = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_product_strategy_t(strategy));
+    int32_t sessionAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(session));
+    int32_t idAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(id));
+    return statusTFromBinderStatus(
+            aps->registerEffect(descAidl, ioAidl, strategyAidl, sessionAidl, idAidl));
 }
 
-status_t AudioSystem::unregisterEffect(int id)
-{
+status_t AudioSystem::unregisterEffect(int id) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->unregisterEffect(id);
+
+    int32_t idAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(id));
+    return statusTFromBinderStatus(
+            aps->unregisterEffect(idAidl));
 }
 
-status_t AudioSystem::setEffectEnabled(int id, bool enabled)
-{
+status_t AudioSystem::setEffectEnabled(int id, bool enabled) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->setEffectEnabled(id, enabled);
+
+    int32_t idAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(id));
+    return statusTFromBinderStatus(
+            aps->setEffectEnabled(idAidl, enabled));
 }
 
-status_t AudioSystem::moveEffectsToIo(const std::vector<int>& ids, audio_io_handle_t io)
-{
+status_t AudioSystem::moveEffectsToIo(const std::vector<int>& ids, audio_io_handle_t io) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->moveEffectsToIo(ids, io);
+
+    std::vector<int32_t> idsAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<int32_t>>(ids, convertReinterpret<int32_t, int>));
+    int32_t ioAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(io));
+    return statusTFromBinderStatus(aps->moveEffectsToIo(idsAidl, ioAidl));
 }
 
-status_t AudioSystem::isStreamActive(audio_stream_type_t stream, bool* state, uint32_t inPastMs)
-{
+status_t AudioSystem::isStreamActive(audio_stream_type_t stream, bool* state, uint32_t inPastMs) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     if (state == NULL) return BAD_VALUE;
-    *state = aps->isStreamActive(stream, inPastMs);
-    return NO_ERROR;
+
+    media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+    int32_t inPastMsAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(inPastMs));
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->isStreamActive(streamAidl, inPastMsAidl, state)));
+    return OK;
 }
 
 status_t AudioSystem::isStreamActiveRemotely(audio_stream_type_t stream, bool* state,
-        uint32_t inPastMs)
-{
+                                             uint32_t inPastMs) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     if (state == NULL) return BAD_VALUE;
-    *state = aps->isStreamActiveRemotely(stream, inPastMs);
-    return NO_ERROR;
+
+    media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+    int32_t inPastMsAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(inPastMs));
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->isStreamActiveRemotely(streamAidl, inPastMsAidl, state)));
+    return OK;
 }
 
-status_t AudioSystem::isSourceActive(audio_source_t stream, bool* state)
-{
+status_t AudioSystem::isSourceActive(audio_source_t stream, bool* state) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
     if (state == NULL) return BAD_VALUE;
-    *state = aps->isSourceActive(stream);
-    return NO_ERROR;
+
+    media::AudioSourceType streamAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_source_t_AudioSourceType(stream));
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->isSourceActive(streamAidl, state)));
+    return OK;
 }
 
-uint32_t AudioSystem::getPrimaryOutputSamplingRate()
-{
+uint32_t AudioSystem::getPrimaryOutputSamplingRate() {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return 0;
     return af->getPrimaryOutputSamplingRate();
 }
 
-size_t AudioSystem::getPrimaryOutputFrameCount()
-{
+size_t AudioSystem::getPrimaryOutputFrameCount() {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return 0;
     return af->getPrimaryOutputFrameCount();
 }
 
-status_t AudioSystem::setLowRamDevice(bool isLowRamDevice, int64_t totalMemory)
-{
+status_t AudioSystem::setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setLowRamDevice(isLowRamDevice, totalMemory);
 }
 
-void AudioSystem::clearAudioConfigCache()
-{
+void AudioSystem::clearAudioConfigCache() {
     // called by restoreTrack_l(), which needs new IAudioFlinger and IAudioPolicyService instances
     ALOGV("clearAudioConfigCache()");
     {
@@ -1160,74 +1433,152 @@
 status_t AudioSystem::setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == nullptr) return PERMISSION_DENIED;
-    return aps->setSupportedSystemUsages(systemUsages);
+
+    std::vector<media::AudioUsage> systemUsagesAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<media::AudioUsage>>(systemUsages,
+                                                             legacy2aidl_audio_usage_t_AudioUsage));
+    return statusTFromBinderStatus(aps->setSupportedSystemUsages(systemUsagesAidl));
 }
 
-status_t AudioSystem::setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags) {
+status_t AudioSystem::setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t capturePolicy) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == nullptr) return PERMISSION_DENIED;
-    return aps->setAllowedCapturePolicy(uid, flags);
+
+    int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
+    int32_t capturePolicyAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_flags_mask_t_int32_t_mask(capturePolicy));
+    return statusTFromBinderStatus(aps->setAllowedCapturePolicy(uidAidl, capturePolicyAidl));
 }
 
-bool AudioSystem::isOffloadSupported(const audio_offload_info_t& info)
-{
-    ALOGV("isOffloadSupported()");
+audio_offload_mode_t AudioSystem::getOffloadSupport(const audio_offload_info_t& info) {
+    ALOGV("%s", __func__);
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
-    if (aps == 0) return false;
-    return aps->isOffloadSupported(info);
+    if (aps == 0) return AUDIO_OFFLOAD_NOT_SUPPORTED;
+
+    auto result = [&]() -> ConversionResult<audio_offload_mode_t> {
+        media::AudioOffloadInfo infoAidl = VALUE_OR_RETURN(
+                legacy2aidl_audio_offload_info_t_AudioOffloadInfo(info));
+        media::AudioOffloadMode retAidl;
+        RETURN_IF_ERROR(
+                statusTFromBinderStatus(aps->getOffloadSupport(infoAidl, &retAidl)));
+        return aidl2legacy_AudioOffloadMode_audio_offload_mode_t(retAidl);
+    }();
+
+    return result.value_or(static_cast<audio_offload_mode_t>(0));
 }
 
 status_t AudioSystem::listAudioPorts(audio_port_role_t role,
                                      audio_port_type_t type,
-                                     unsigned int *num_ports,
-                                     struct audio_port *ports,
-                                     unsigned int *generation)
-{
+                                     unsigned int* num_ports,
+                                     struct audio_port_v7* ports,
+                                     unsigned int* generation) {
+    if (num_ports == nullptr || (*num_ports != 0 && ports == nullptr) ||
+        generation == nullptr) {
+        return BAD_VALUE;
+    }
+
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->listAudioPorts(role, type, num_ports, ports, generation);
+
+    media::AudioPortRole roleAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_role_t_AudioPortRole(role));
+    media::AudioPortType typeAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_type_t_AudioPortType(type));
+    media::Int numPortsAidl;
+    numPortsAidl.value = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*num_ports));
+    std::vector<media::AudioPort> portsAidl;
+    int32_t generationAidl;
+
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->listAudioPorts(roleAidl, typeAidl, &numPortsAidl, &portsAidl, &generationAidl)));
+    *num_ports = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(numPortsAidl.value));
+    *generation = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(generationAidl));
+    RETURN_STATUS_IF_ERROR(convertRange(portsAidl.begin(), portsAidl.end(), ports,
+                                        aidl2legacy_AudioPort_audio_port_v7));
+    return OK;
 }
 
-status_t AudioSystem::getAudioPort(struct audio_port *port)
-{
+status_t AudioSystem::getAudioPort(struct audio_port_v7* port) {
+    if (port == nullptr) {
+        return BAD_VALUE;
+    }
+
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->getAudioPort(port);
+
+    media::AudioPort portAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_v7_AudioPort(*port));
+    RETURN_STATUS_IF_ERROR(
+            statusTFromBinderStatus(aps->getAudioPort(portAidl, &portAidl)));
+    *port = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPort_audio_port_v7(portAidl));
+    return OK;
 }
 
-status_t AudioSystem::createAudioPatch(const struct audio_patch *patch,
-                                   audio_patch_handle_t *handle)
-{
+status_t AudioSystem::createAudioPatch(const struct audio_patch* patch,
+                                       audio_patch_handle_t* handle) {
+    if (patch == nullptr || handle == nullptr) {
+        return BAD_VALUE;
+    }
+
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->createAudioPatch(patch, handle);
+
+    media::AudioPatch patchAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_patch_AudioPatch(*patch));
+    int32_t handleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(*handle));
+    RETURN_STATUS_IF_ERROR(
+            statusTFromBinderStatus(aps->createAudioPatch(patchAidl, handleAidl, &handleAidl)));
+    *handle = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_patch_handle_t(handleAidl));
+    return OK;
 }
 
-status_t AudioSystem::releaseAudioPatch(audio_patch_handle_t handle)
-{
+status_t AudioSystem::releaseAudioPatch(audio_patch_handle_t handle) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->releaseAudioPatch(handle);
+
+    int32_t handleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(handle));
+    return statusTFromBinderStatus(aps->releaseAudioPatch(handleAidl));
 }
 
-status_t AudioSystem::listAudioPatches(unsigned int *num_patches,
-                                  struct audio_patch *patches,
-                                  unsigned int *generation)
-{
+status_t AudioSystem::listAudioPatches(unsigned int* num_patches,
+                                       struct audio_patch* patches,
+                                       unsigned int* generation) {
+    if (num_patches == nullptr || (*num_patches != 0 && patches == nullptr) ||
+        generation == nullptr) {
+        return BAD_VALUE;
+    }
+
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->listAudioPatches(num_patches, patches, generation);
+
+
+    media::Int numPatchesAidl;
+    numPatchesAidl.value = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*num_patches));
+    std::vector<media::AudioPatch> patchesAidl;
+    int32_t generationAidl;
+
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->listAudioPatches(&numPatchesAidl, &patchesAidl, &generationAidl)));
+    *num_patches = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(numPatchesAidl.value));
+    *generation = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(generationAidl));
+    RETURN_STATUS_IF_ERROR(convertRange(patchesAidl.begin(), patchesAidl.end(), patches,
+                                        aidl2legacy_AudioPatch_audio_patch));
+    return OK;
 }
 
-status_t AudioSystem::setAudioPortConfig(const struct audio_port_config *config)
-{
+status_t AudioSystem::setAudioPortConfig(const struct audio_port_config* config) {
+    if (config == nullptr) {
+        return BAD_VALUE;
+    }
+
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->setAudioPortConfig(config);
+
+    media::AudioPortConfig configAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_config_AudioPortConfig(*config));
+    return statusTFromBinderStatus(aps->setAudioPortConfig(configAidl));
 }
 
-status_t AudioSystem::addAudioPortCallback(const sp<AudioPortCallback>& callback)
-{
+status_t AudioSystem::addAudioPortCallback(const sp<AudioPortCallback>& callback) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
@@ -1243,8 +1594,7 @@
 }
 
 /*static*/
-status_t AudioSystem::removeAudioPortCallback(const sp<AudioPortCallback>& callback)
-{
+status_t AudioSystem::removeAudioPortCallback(const sp<AudioPortCallback>& callback) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
@@ -1259,8 +1609,7 @@
     return (ret < 0) ? INVALID_OPERATION : NO_ERROR;
 }
 
-status_t AudioSystem::addAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback)
-{
+status_t AudioSystem::addAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
@@ -1275,8 +1624,7 @@
     return (ret < 0) ? INVALID_OPERATION : NO_ERROR;
 }
 
-status_t AudioSystem::removeAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback)
-{
+status_t AudioSystem::removeAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
@@ -1293,8 +1641,7 @@
 
 status_t AudioSystem::addAudioDeviceCallback(
         const wp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo,
-        audio_port_handle_t portId)
-{
+        audio_port_handle_t portId) {
     const sp<AudioFlingerClient> afc = getAudioFlingerClient();
     if (afc == 0) {
         return NO_INIT;
@@ -1311,8 +1658,7 @@
 
 status_t AudioSystem::removeAudioDeviceCallback(
         const wp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo,
-        audio_port_handle_t portId)
-{
+        audio_port_handle_t portId) {
     const sp<AudioFlingerClient> afc = getAudioFlingerClient();
     if (afc == 0) {
         return NO_INIT;
@@ -1320,8 +1666,7 @@
     return afc->removeAudioDeviceCallback(callback, audioIo, portId);
 }
 
-audio_port_handle_t AudioSystem::getDeviceIdForIo(audio_io_handle_t audioIo)
-{
+audio_port_handle_t AudioSystem::getDeviceIdForIo(audio_io_handle_t audioIo) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     const sp<AudioIoDescriptor> desc = getIoDescriptor(audioIo);
@@ -1331,224 +1676,346 @@
     return desc->getDeviceId();
 }
 
-status_t AudioSystem::acquireSoundTriggerSession(audio_session_t *session,
-                                       audio_io_handle_t *ioHandle,
-                                       audio_devices_t *device)
-{
+status_t AudioSystem::acquireSoundTriggerSession(audio_session_t* session,
+                                                 audio_io_handle_t* ioHandle,
+                                                 audio_devices_t* device) {
+    if (session == nullptr || ioHandle == nullptr || device == nullptr) {
+        return BAD_VALUE;
+    }
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->acquireSoundTriggerSession(session, ioHandle, device);
+
+    media::SoundTriggerSession retAidl;
+    RETURN_STATUS_IF_ERROR(
+            statusTFromBinderStatus(aps->acquireSoundTriggerSession(&retAidl)));
+    *session = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_session_t(retAidl.session));
+    *ioHandle = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_io_handle_t(retAidl.ioHandle));
+    *device = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_devices_t(retAidl.device));
+    return OK;
 }
 
-status_t AudioSystem::releaseSoundTriggerSession(audio_session_t session)
-{
+status_t AudioSystem::releaseSoundTriggerSession(audio_session_t session) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->releaseSoundTriggerSession(session);
+
+    int32_t sessionAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(session));
+    return statusTFromBinderStatus(aps->releaseSoundTriggerSession(sessionAidl));
 }
 
-audio_mode_t AudioSystem::getPhoneState()
-{
+audio_mode_t AudioSystem::getPhoneState() {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return AUDIO_MODE_INVALID;
-    return aps->getPhoneState();
+
+    auto result = [&]() -> ConversionResult<audio_mode_t> {
+        media::AudioMode retAidl;
+        RETURN_IF_ERROR(statusTFromBinderStatus(aps->getPhoneState(&retAidl)));
+        return aidl2legacy_AudioMode_audio_mode_t(retAidl);
+    }();
+
+    return result.value_or(AUDIO_MODE_INVALID);
 }
 
-status_t AudioSystem::registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration)
-{
+status_t AudioSystem::registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->registerPolicyMixes(mixes, registration);
+
+    size_t mixesSize = std::min(mixes.size(), size_t{MAX_MIXES_PER_POLICY});
+    std::vector<media::AudioMix> mixesAidl;
+    RETURN_STATUS_IF_ERROR(
+            convertRange(mixes.begin(), mixes.begin() + mixesSize, std::back_inserter(mixesAidl),
+                         legacy2aidl_AudioMix));
+    return statusTFromBinderStatus(aps->registerPolicyMixes(mixesAidl, registration));
 }
 
-status_t AudioSystem::setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices)
-{
+status_t AudioSystem::setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->setUidDeviceAffinities(uid, devices);
+
+    int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
+    std::vector<media::AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<media::AudioDevice>>(devices,
+                                                              legacy2aidl_AudioDeviceTypeAddress));
+    return statusTFromBinderStatus(aps->setUidDeviceAffinities(uidAidl, devicesAidl));
 }
 
 status_t AudioSystem::removeUidDeviceAffinities(uid_t uid) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->removeUidDeviceAffinities(uid);
+
+    int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
+    return statusTFromBinderStatus(aps->removeUidDeviceAffinities(uidAidl));
 }
 
 status_t AudioSystem::setUserIdDeviceAffinities(int userId,
-                                                const Vector<AudioDeviceTypeAddr>& devices)
-{
+                                                const AudioDeviceTypeAddrVector& devices) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->setUserIdDeviceAffinities(userId, devices);
+
+    int32_t userIdAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(userId));
+    std::vector<media::AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<media::AudioDevice>>(devices,
+                                                              legacy2aidl_AudioDeviceTypeAddress));
+    return statusTFromBinderStatus(
+            aps->setUserIdDeviceAffinities(userIdAidl, devicesAidl));
 }
 
-status_t AudioSystem::removeUserIdDeviceAffinities(int userId)
-{
+status_t AudioSystem::removeUserIdDeviceAffinities(int userId) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->removeUserIdDeviceAffinities(userId);
+    int32_t userIdAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(userId));
+    return statusTFromBinderStatus(aps->removeUserIdDeviceAffinities(userIdAidl));
 }
 
-status_t AudioSystem::startAudioSource(const struct audio_port_config *source,
-                                       const audio_attributes_t *attributes,
-                                       audio_port_handle_t *portId)
-{
+status_t AudioSystem::startAudioSource(const struct audio_port_config* source,
+                                       const audio_attributes_t* attributes,
+                                       audio_port_handle_t* portId) {
+    if (source == nullptr || attributes == nullptr || portId == nullptr) {
+        return BAD_VALUE;
+    }
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->startAudioSource(source, attributes, portId);
+
+    media::AudioPortConfig sourceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_config_AudioPortConfig(*source));
+    media::AudioAttributesInternal attributesAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attributes));
+    int32_t portIdAidl;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->startAudioSource(sourceAidl, attributesAidl, &portIdAidl)));
+    *portId = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
+    return OK;
 }
 
-status_t AudioSystem::stopAudioSource(audio_port_handle_t portId)
-{
+status_t AudioSystem::stopAudioSource(audio_port_handle_t portId) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->stopAudioSource(portId);
+
+    int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
+    return statusTFromBinderStatus(aps->stopAudioSource(portIdAidl));
 }
 
-status_t AudioSystem::setMasterMono(bool mono)
-{
+status_t AudioSystem::setMasterMono(bool mono) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->setMasterMono(mono);
+    return statusTFromBinderStatus(aps->setMasterMono(mono));
 }
 
-status_t AudioSystem::getMasterMono(bool *mono)
-{
+status_t AudioSystem::getMasterMono(bool* mono) {
+    if (mono == nullptr) {
+        return BAD_VALUE;
+    }
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->getMasterMono(mono);
+    return statusTFromBinderStatus(aps->getMasterMono(mono));
 }
 
-status_t AudioSystem::setMasterBalance(float balance)
-{
+status_t AudioSystem::setMasterBalance(float balance) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->setMasterBalance(balance);
 }
 
-status_t AudioSystem::getMasterBalance(float *balance)
-{
+status_t AudioSystem::getMasterBalance(float* balance) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->getMasterBalance(balance);
 }
 
-float AudioSystem::getStreamVolumeDB(audio_stream_type_t stream, int index, audio_devices_t device)
-{
+float
+AudioSystem::getStreamVolumeDB(audio_stream_type_t stream, int index, audio_devices_t device) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return NAN;
-    return aps->getStreamVolumeDB(stream, index, device);
+
+    auto result = [&]() -> ConversionResult<float> {
+        media::AudioStreamType streamAidl = VALUE_OR_RETURN(
+                legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+        int32_t indexAidl = VALUE_OR_RETURN(convertIntegral<int32_t>(index));
+        int32_t deviceAidl = VALUE_OR_RETURN(legacy2aidl_audio_devices_t_int32_t(device));
+        float retAidl;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                aps->getStreamVolumeDB(streamAidl, indexAidl, deviceAidl, &retAidl)));
+        return retAidl;
+    }();
+    return result.value_or(NAN);
 }
 
-status_t AudioSystem::getMicrophones(std::vector<media::MicrophoneInfo> *microphones)
-{
+status_t AudioSystem::getMicrophones(std::vector<media::MicrophoneInfo>* microphones) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->getMicrophones(microphones);
 }
 
 status_t AudioSystem::setAudioHalPids(const std::vector<pid_t>& pids) {
-  const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
-  if (af == nullptr) return PERMISSION_DENIED;
-  return af->setAudioHalPids(pids);
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) return PERMISSION_DENIED;
+    return af->setAudioHalPids(pids);
 }
 
-status_t AudioSystem::getSurroundFormats(unsigned int *numSurroundFormats,
-                                         audio_format_t *surroundFormats,
-                                         bool *surroundFormatsEnabled,
-                                         bool reported)
-{
+status_t AudioSystem::getSurroundFormats(unsigned int* numSurroundFormats,
+                                         audio_format_t* surroundFormats,
+                                         bool* surroundFormatsEnabled) {
+    if (numSurroundFormats == nullptr || (*numSurroundFormats != 0 &&
+                                          (surroundFormats == nullptr ||
+                                           surroundFormatsEnabled == nullptr))) {
+        return BAD_VALUE;
+    }
+
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->getSurroundFormats(
-            numSurroundFormats, surroundFormats, surroundFormatsEnabled, reported);
+    media::Int numSurroundFormatsAidl;
+    numSurroundFormatsAidl.value =
+            VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*numSurroundFormats));
+    std::vector<media::audio::common::AudioFormat> surroundFormatsAidl;
+    std::vector<bool> surroundFormatsEnabledAidl;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getSurroundFormats(&numSurroundFormatsAidl, &surroundFormatsAidl,
+                                    &surroundFormatsEnabledAidl)));
+
+    *numSurroundFormats = VALUE_OR_RETURN_STATUS(
+            convertIntegral<unsigned int>(numSurroundFormatsAidl.value));
+    RETURN_STATUS_IF_ERROR(
+            convertRange(surroundFormatsAidl.begin(), surroundFormatsAidl.end(), surroundFormats,
+                         aidl2legacy_AudioFormat_audio_format_t));
+    std::copy(surroundFormatsEnabledAidl.begin(), surroundFormatsEnabledAidl.end(),
+            surroundFormatsEnabled);
+    return OK;
 }
 
-status_t AudioSystem::setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled)
-{
+status_t AudioSystem::getReportedSurroundFormats(unsigned int* numSurroundFormats,
+                                                 audio_format_t* surroundFormats) {
+    if (numSurroundFormats == nullptr || (*numSurroundFormats != 0 && surroundFormats == nullptr)) {
+        return BAD_VALUE;
+    }
+
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->setSurroundFormatEnabled(audioFormat, enabled);
+    media::Int numSurroundFormatsAidl;
+    numSurroundFormatsAidl.value =
+            VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*numSurroundFormats));
+    std::vector<media::audio::common::AudioFormat> surroundFormatsAidl;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getReportedSurroundFormats(&numSurroundFormatsAidl, &surroundFormatsAidl)));
+
+    *numSurroundFormats = VALUE_OR_RETURN_STATUS(
+            convertIntegral<unsigned int>(numSurroundFormatsAidl.value));
+    RETURN_STATUS_IF_ERROR(
+            convertRange(surroundFormatsAidl.begin(), surroundFormatsAidl.end(), surroundFormats,
+                         aidl2legacy_AudioFormat_audio_format_t));
+    return OK;
 }
 
-status_t AudioSystem::setAssistantUid(uid_t uid)
-{
-    const sp <IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+status_t AudioSystem::setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    return aps->setAssistantUid(uid);
+    media::audio::common::AudioFormat audioFormatAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_format_t_AudioFormat(audioFormat));
+    return statusTFromBinderStatus(
+            aps->setSurroundFormatEnabled(audioFormatAidl, enabled));
 }
 
-status_t AudioSystem::setA11yServicesUids(const std::vector<uid_t>& uids)
-{
-    const sp <IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+status_t AudioSystem::setAssistantUid(uid_t uid) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    return aps->setA11yServicesUids(uids);
+    int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
+    return statusTFromBinderStatus(aps->setAssistantUid(uidAidl));
 }
 
-status_t AudioSystem::setCurrentImeUid(uid_t uid)
-{
-    const sp <IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+status_t AudioSystem::setA11yServicesUids(const std::vector<uid_t>& uids) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    return aps->setCurrentImeUid(uid);
+    std::vector<int32_t> uidsAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<int32_t>>(uids, legacy2aidl_uid_t_int32_t));
+    return statusTFromBinderStatus(aps->setA11yServicesUids(uidsAidl));
 }
 
-bool AudioSystem::isHapticPlaybackSupported()
-{
+status_t AudioSystem::setCurrentImeUid(uid_t uid) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) return PERMISSION_DENIED;
+
+    int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
+    return statusTFromBinderStatus(aps->setCurrentImeUid(uidAidl));
+}
+
+bool AudioSystem::isHapticPlaybackSupported() {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return false;
-    return aps->isHapticPlaybackSupported();
+
+    auto result = [&]() -> ConversionResult<bool> {
+        bool retVal;
+        RETURN_IF_ERROR(
+                statusTFromBinderStatus(aps->isHapticPlaybackSupported(&retVal)));
+        return retVal;
+    }();
+    return result.value_or(false);
 }
 
 status_t AudioSystem::getHwOffloadEncodingFormatsSupportedForA2DP(
-                                std::vector<audio_format_t> *formats) {
-    const sp <IAudioPolicyService>
-        & aps = AudioSystem::get_audio_policy_service();
+        std::vector<audio_format_t>* formats) {
+    if (formats == nullptr) {
+        return BAD_VALUE;
+    }
+
+    const sp<IAudioPolicyService>
+            & aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->getHwOffloadEncodingFormatsSupportedForA2DP(formats);
+
+    std::vector<media::audio::common::AudioFormat> formatsAidl;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getHwOffloadEncodingFormatsSupportedForA2DP(&formatsAidl)));
+    *formats = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<audio_format_t>>(formatsAidl,
+                                                          aidl2legacy_AudioFormat_audio_format_t));
+    return OK;
 }
 
-status_t AudioSystem::listAudioProductStrategies(AudioProductStrategyVector &strategies)
-{
+status_t AudioSystem::listAudioProductStrategies(AudioProductStrategyVector& strategies) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->listAudioProductStrategies(strategies);
+
+    std::vector<media::AudioProductStrategy> strategiesAidl;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->listAudioProductStrategies(&strategiesAidl)));
+    strategies = VALUE_OR_RETURN_STATUS(
+            convertContainer<AudioProductStrategyVector>(strategiesAidl,
+                                                         aidl2legacy_AudioProductStrategy));
+    return OK;
 }
 
-audio_attributes_t AudioSystem::streamTypeToAttributes(audio_stream_type_t stream)
-{
+audio_attributes_t AudioSystem::streamTypeToAttributes(audio_stream_type_t stream) {
     AudioProductStrategyVector strategies;
     listAudioProductStrategies(strategies);
-    for (const auto &strategy : strategies) {
+    for (const auto& strategy : strategies) {
         auto attrVect = strategy.getAudioAttributes();
-        auto iter = std::find_if(begin(attrVect), end(attrVect), [&stream](const auto &attributes) {
-                         return attributes.getStreamType() == stream; });
+        auto iter = std::find_if(begin(attrVect), end(attrVect), [&stream](const auto& attributes) {
+            return attributes.getStreamType() == stream;
+        });
         if (iter != end(attrVect)) {
             return iter->getAttributes();
         }
     }
-    ALOGE("invalid stream type %s when converting to attributes",  toString(stream).c_str());
+    ALOGE("invalid stream type %s when converting to attributes", toString(stream).c_str());
     return AUDIO_ATTRIBUTES_INITIALIZER;
 }
 
-audio_stream_type_t AudioSystem::attributesToStreamType(const audio_attributes_t &attr)
-{
+audio_stream_type_t AudioSystem::attributesToStreamType(const audio_attributes_t& attr) {
     product_strategy_t psId;
     status_t ret = AudioSystem::getProductStrategyFromAudioAttributes(AudioAttributes(attr), psId);
     if (ret != NO_ERROR) {
-        ALOGE("no strategy found for attributes %s",  toString(attr).c_str());
+        ALOGE("no strategy found for attributes %s", toString(attr).c_str());
         return AUDIO_STREAM_MUSIC;
     }
     AudioProductStrategyVector strategies;
     listAudioProductStrategies(strategies);
-    for (const auto &strategy : strategies) {
+    for (const auto& strategy : strategies) {
         if (strategy.getId() == psId) {
             auto attrVect = strategy.getAudioAttributes();
-            auto iter = std::find_if(begin(attrVect), end(attrVect), [&attr](const auto &refAttr) {
-                             return AudioProductStrategy::attributesMatches(
-                                 refAttr.getAttributes(), attr); });
+            auto iter = std::find_if(begin(attrVect), end(attrVect), [&attr](const auto& refAttr) {
+                return AudioProductStrategy::attributesMatches(
+                        refAttr.getAttributes(), attr);
+            });
             if (iter != end(attrVect)) {
                 return iter->getStreamType();
             }
@@ -1559,125 +2026,274 @@
             // virtual source is not expected to have an associated product strategy
             break;
         default:
-            ALOGE("invalid attributes %s when converting to stream",  toString(attr).c_str());
+            ALOGE("invalid attributes %s when converting to stream", toString(attr).c_str());
             break;
     }
     return AUDIO_STREAM_MUSIC;
 }
 
-status_t AudioSystem::getProductStrategyFromAudioAttributes(const AudioAttributes &aa,
-                                                            product_strategy_t &productStrategy)
-{
+status_t AudioSystem::getProductStrategyFromAudioAttributes(const AudioAttributes& aa,
+                                                            product_strategy_t& productStrategy,
+                                                            bool fallbackOnDefault) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->getProductStrategyFromAudioAttributes(aa,productStrategy);
+
+    media::AudioAttributesEx aaAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_AudioAttributes_AudioAttributesEx(aa));
+    int32_t productStrategyAidl;
+
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getProductStrategyFromAudioAttributes(aaAidl, fallbackOnDefault,
+            &productStrategyAidl)));
+    productStrategy = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_int32_t_product_strategy_t(productStrategyAidl));
+    return OK;
 }
 
-status_t AudioSystem::listAudioVolumeGroups(AudioVolumeGroupVector &groups)
-{
+status_t AudioSystem::listAudioVolumeGroups(AudioVolumeGroupVector& groups) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->listAudioVolumeGroups(groups);
+
+    std::vector<media::AudioVolumeGroup> groupsAidl;
+    RETURN_STATUS_IF_ERROR(
+            statusTFromBinderStatus(aps->listAudioVolumeGroups(&groupsAidl)));
+    groups = VALUE_OR_RETURN_STATUS(
+            convertContainer<AudioVolumeGroupVector>(groupsAidl, aidl2legacy_AudioVolumeGroup));
+    return OK;
 }
 
-status_t AudioSystem::getVolumeGroupFromAudioAttributes(const AudioAttributes &aa,
-                                                        volume_group_t &volumeGroup)
-{
+status_t AudioSystem::getVolumeGroupFromAudioAttributes(const AudioAttributes& aa,
+                                                        volume_group_t& volumeGroup,
+                                                        bool fallbackOnDefault) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->getVolumeGroupFromAudioAttributes(aa, volumeGroup);
+
+    media::AudioAttributesEx aaAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_AudioAttributes_AudioAttributesEx(aa));
+    int32_t volumeGroupAidl;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getVolumeGroupFromAudioAttributes(aaAidl, fallbackOnDefault, &volumeGroupAidl)));
+    volumeGroup = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_volume_group_t(volumeGroupAidl));
+    return OK;
 }
 
-status_t AudioSystem::setRttEnabled(bool enabled)
-{
+status_t AudioSystem::setRttEnabled(bool enabled) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    return aps->setRttEnabled(enabled);
+    return statusTFromBinderStatus(aps->setRttEnabled(enabled));
 }
 
-bool AudioSystem::isCallScreenModeSupported()
-{
+bool AudioSystem::isCallScreenModeSupported() {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return false;
-    return aps->isCallScreenModeSupported();
+
+    auto result = [&]() -> ConversionResult<bool> {
+        bool retAidl;
+        RETURN_IF_ERROR(
+                statusTFromBinderStatus(aps->isCallScreenModeSupported(&retAidl)));
+        return retAidl;
+    }();
+    return result.value_or(false);
 }
 
-status_t AudioSystem::setPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                    const AudioDeviceTypeAddr &device)
-{
+status_t AudioSystem::setDevicesRoleForStrategy(product_strategy_t strategy,
+                                                device_role_t role,
+                                                const AudioDeviceTypeAddrVector& devices) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
-    return aps->setPreferredDeviceForStrategy(strategy, device);
+
+    int32_t strategyAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_product_strategy_t_int32_t(strategy));
+    media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
+    std::vector<media::AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<media::AudioDevice>>(devices,
+                                                              legacy2aidl_AudioDeviceTypeAddress));
+    return statusTFromBinderStatus(
+            aps->setDevicesRoleForStrategy(strategyAidl, roleAidl, devicesAidl));
 }
 
-status_t AudioSystem::removePreferredDeviceForStrategy(product_strategy_t strategy)
-{
+status_t
+AudioSystem::removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
-    return aps->removePreferredDeviceForStrategy(strategy);
+    int32_t strategyAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_product_strategy_t_int32_t(strategy));
+    media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
+    return statusTFromBinderStatus(
+            aps->removeDevicesRoleForStrategy(strategyAidl, roleAidl));
 }
 
-status_t AudioSystem::getPreferredDeviceForStrategy(product_strategy_t strategy,
-        AudioDeviceTypeAddr &device)
-{
+status_t AudioSystem::getDevicesForRoleAndStrategy(product_strategy_t strategy,
+                                                   device_role_t role,
+                                                   AudioDeviceTypeAddrVector& devices) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
-    return aps->getPreferredDeviceForStrategy(strategy, device);
+    int32_t strategyAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_product_strategy_t_int32_t(strategy));
+    media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
+    std::vector<media::AudioDevice> devicesAidl;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getDevicesForRoleAndStrategy(strategyAidl, roleAidl, &devicesAidl)));
+    devices = VALUE_OR_RETURN_STATUS(
+            convertContainer<AudioDeviceTypeAddrVector>(devicesAidl,
+                                                        aidl2legacy_AudioDeviceTypeAddress));
+    return OK;
+}
+
+status_t AudioSystem::setDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                     device_role_t role,
+                                                     const AudioDeviceTypeAddrVector& devices) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) {
+        return PERMISSION_DENIED;
+    }
+
+    media::AudioSourceType audioSourceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_source_t_AudioSourceType(audioSource));
+    media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
+    std::vector<media::AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<media::AudioDevice>>(devices,
+                                                              legacy2aidl_AudioDeviceTypeAddress));
+    return statusTFromBinderStatus(
+            aps->setDevicesRoleForCapturePreset(audioSourceAidl, roleAidl, devicesAidl));
+}
+
+status_t AudioSystem::addDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                     device_role_t role,
+                                                     const AudioDeviceTypeAddrVector& devices) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) {
+        return PERMISSION_DENIED;
+    }
+    media::AudioSourceType audioSourceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_source_t_AudioSourceType(audioSource));
+    media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
+    std::vector<media::AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<media::AudioDevice>>(devices,
+                                                              legacy2aidl_AudioDeviceTypeAddress));
+    return statusTFromBinderStatus(
+            aps->addDevicesRoleForCapturePreset(audioSourceAidl, roleAidl, devicesAidl));
+}
+
+status_t AudioSystem::removeDevicesRoleForCapturePreset(
+        audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector& devices) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) {
+        return PERMISSION_DENIED;
+    }
+    media::AudioSourceType audioSourceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_source_t_AudioSourceType(audioSource));
+    media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
+    std::vector<media::AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<media::AudioDevice>>(devices,
+                                                              legacy2aidl_AudioDeviceTypeAddress));
+    return statusTFromBinderStatus(
+            aps->removeDevicesRoleForCapturePreset(audioSourceAidl, roleAidl, devicesAidl));
+}
+
+status_t AudioSystem::clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                       device_role_t role) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) {
+        return PERMISSION_DENIED;
+    }
+    media::AudioSourceType audioSourceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_source_t_AudioSourceType(audioSource));
+    media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
+    return statusTFromBinderStatus(
+            aps->clearDevicesRoleForCapturePreset(audioSourceAidl, roleAidl));
+}
+
+status_t AudioSystem::getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+                                                        device_role_t role,
+                                                        AudioDeviceTypeAddrVector& devices) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) {
+        return PERMISSION_DENIED;
+    }
+    media::AudioSourceType audioSourceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_source_t_AudioSourceType(audioSource));
+    media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
+    std::vector<media::AudioDevice> devicesAidl;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getDevicesForRoleAndCapturePreset(audioSourceAidl, roleAidl, &devicesAidl)));
+    devices = VALUE_OR_RETURN_STATUS(
+            convertContainer<AudioDeviceTypeAddrVector>(devicesAidl,
+                                                        aidl2legacy_AudioDeviceTypeAddress));
+    return OK;
 }
 
 class CaptureStateListenerImpl : public media::BnCaptureStateListener,
                                  public IBinder::DeathRecipient {
 public:
+    CaptureStateListenerImpl(
+            const sp<IAudioPolicyService>& aps,
+            const sp<AudioSystem::CaptureStateListener>& listener)
+            : mAps(aps), mListener(listener) {}
+
+    void init() {
+        bool active;
+        status_t status = statusTFromBinderStatus(
+                mAps->registerSoundTriggerCaptureStateListener(this, &active));
+        if (status != NO_ERROR) {
+            mListener->onServiceDied();
+            return;
+        }
+        mListener->onStateChanged(active);
+        IInterface::asBinder(mAps)->linkToDeath(this);
+    }
+
     binder::Status setCaptureState(bool active) override {
         Mutex::Autolock _l(gSoundTriggerCaptureStateListenerLock);
-        gSoundTriggerCaptureStateListener->onStateChanged(active);
+        mListener->onStateChanged(active);
         return binder::Status::ok();
     }
 
     void binderDied(const wp<IBinder>&) override {
         Mutex::Autolock _l(gSoundTriggerCaptureStateListenerLock);
-        gSoundTriggerCaptureStateListener->onServiceDied();
+        mListener->onServiceDied();
         gSoundTriggerCaptureStateListener = nullptr;
     }
+
+private:
+    // Need this in order to keep the death receipent alive.
+    sp<IAudioPolicyService> mAps;
+    sp<AudioSystem::CaptureStateListener> mListener;
 };
 
 status_t AudioSystem::registerSoundTriggerCaptureStateListener(
-    const sp<CaptureStateListener>& listener) {
+        const sp<CaptureStateListener>& listener) {
+    LOG_ALWAYS_FATAL_IF(listener == nullptr);
+
     const sp<IAudioPolicyService>& aps =
             AudioSystem::get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
 
-    sp<CaptureStateListenerImpl> wrapper = new CaptureStateListenerImpl();
-
     Mutex::Autolock _l(gSoundTriggerCaptureStateListenerLock);
+    gSoundTriggerCaptureStateListener = new CaptureStateListenerImpl(aps, listener);
+    gSoundTriggerCaptureStateListener->init();
 
-    bool active;
-    status_t status =
-        aps->registerSoundTriggerCaptureStateListener(wrapper, &active);
-    if (status != NO_ERROR) {
-        listener->onServiceDied();
-        return NO_ERROR;
-    }
-    gSoundTriggerCaptureStateListener = listener;
-    listener->onStateChanged(active);
-    sp<IBinder> binder = IInterface::asBinder(aps);
-    binder->linkToDeath(wrapper);
     return NO_ERROR;
 }
 
+status_t AudioSystem::setVibratorInfos(
+        const std::vector<media::AudioVibratorInfo>& vibratorInfos) {
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->setVibratorInfos(vibratorInfos);
+}
+
 // ---------------------------------------------------------------------------
 
 int AudioSystem::AudioPolicyServiceClient::addAudioPortCallback(
-        const sp<AudioPortCallback>& callback)
-{
+        const sp<AudioPortCallback>& callback) {
     Mutex::Autolock _l(mLock);
     for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
         if (mAudioPortCallbacks[i] == callback) {
@@ -1689,8 +2305,7 @@
 }
 
 int AudioSystem::AudioPolicyServiceClient::removeAudioPortCallback(
-        const sp<AudioPortCallback>& callback)
-{
+        const sp<AudioPortCallback>& callback) {
     Mutex::Autolock _l(mLock);
     size_t i;
     for (i = 0; i < mAudioPortCallbacks.size(); i++) {
@@ -1706,26 +2321,25 @@
 }
 
 
-void AudioSystem::AudioPolicyServiceClient::onAudioPortListUpdate()
-{
+Status AudioSystem::AudioPolicyServiceClient::onAudioPortListUpdate() {
     Mutex::Autolock _l(mLock);
     for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
         mAudioPortCallbacks[i]->onAudioPortListUpdate();
     }
+    return Status::ok();
 }
 
-void AudioSystem::AudioPolicyServiceClient::onAudioPatchListUpdate()
-{
+Status AudioSystem::AudioPolicyServiceClient::onAudioPatchListUpdate() {
     Mutex::Autolock _l(mLock);
     for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
         mAudioPortCallbacks[i]->onAudioPatchListUpdate();
     }
+    return Status::ok();
 }
 
 // ----------------------------------------------------------------------------
 int AudioSystem::AudioPolicyServiceClient::addAudioVolumeGroupCallback(
-        const sp<AudioVolumeGroupCallback>& callback)
-{
+        const sp<AudioVolumeGroupCallback>& callback) {
     Mutex::Autolock _l(mLock);
     for (size_t i = 0; i < mAudioVolumeGroupCallback.size(); i++) {
         if (mAudioVolumeGroupCallback[i] == callback) {
@@ -1737,8 +2351,7 @@
 }
 
 int AudioSystem::AudioPolicyServiceClient::removeAudioVolumeGroupCallback(
-        const sp<AudioVolumeGroupCallback>& callback)
-{
+        const sp<AudioVolumeGroupCallback>& callback) {
     Mutex::Autolock _l(mLock);
     size_t i;
     for (i = 0; i < mAudioVolumeGroupCallback.size(); i++) {
@@ -1753,20 +2366,26 @@
     return mAudioVolumeGroupCallback.size();
 }
 
-void AudioSystem::AudioPolicyServiceClient::onAudioVolumeGroupChanged(volume_group_t group,
-                                                                      int flags)
-{
+Status AudioSystem::AudioPolicyServiceClient::onAudioVolumeGroupChanged(int32_t group,
+                                                                        int32_t flags) {
+    volume_group_t groupLegacy = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_volume_group_t(group));
+    int flagsLegacy = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(flags));
+
     Mutex::Autolock _l(mLock);
     for (size_t i = 0; i < mAudioVolumeGroupCallback.size(); i++) {
-        mAudioVolumeGroupCallback[i]->onAudioVolumeGroupChanged(group, flags);
+        mAudioVolumeGroupCallback[i]->onAudioVolumeGroupChanged(groupLegacy, flagsLegacy);
     }
+    return Status::ok();
 }
 // ----------------------------------------------------------------------------
 
-void AudioSystem::AudioPolicyServiceClient::onDynamicPolicyMixStateUpdate(
-        String8 regId, int32_t state)
-{
-    ALOGV("AudioPolicyServiceClient::onDynamicPolicyMixStateUpdate(%s, %d)", regId.string(), state);
+Status AudioSystem::AudioPolicyServiceClient::onDynamicPolicyMixStateUpdate(
+        const ::std::string& regId, int32_t state) {
+    ALOGV("AudioPolicyServiceClient::onDynamicPolicyMixStateUpdate(%s, %d)", regId.c_str(), state);
+
+    String8 regIdLegacy = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_string_view_String8(regId));
+    int stateLegacy = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(state));
     dynamic_policy_callback cb = NULL;
     {
         Mutex::Autolock _l(AudioSystem::gLock);
@@ -1774,19 +2393,20 @@
     }
 
     if (cb != NULL) {
-        cb(DYNAMIC_POLICY_EVENT_MIX_STATE_UPDATE, regId, state);
+        cb(DYNAMIC_POLICY_EVENT_MIX_STATE_UPDATE, regIdLegacy, stateLegacy);
     }
+    return Status::ok();
 }
 
-void AudioSystem::AudioPolicyServiceClient::onRecordingConfigurationUpdate(
-                                                int event,
-                                                const record_client_info_t *clientInfo,
-                                                const audio_config_base_t *clientConfig,
-                                                std::vector<effect_descriptor_t> clientEffects,
-                                                const audio_config_base_t *deviceConfig,
-                                                std::vector<effect_descriptor_t> effects,
-                                                audio_patch_handle_t patchHandle,
-                                                audio_source_t source) {
+Status AudioSystem::AudioPolicyServiceClient::onRecordingConfigurationUpdate(
+        int32_t event,
+        const media::RecordClientInfo& clientInfo,
+        const media::AudioConfigBase& clientConfig,
+        const std::vector<media::EffectDescriptor>& clientEffects,
+        const media::AudioConfigBase& deviceConfig,
+        const std::vector<media::EffectDescriptor>& effects,
+        int32_t patchHandle,
+        media::AudioSourceType source) {
     record_config_callback cb = NULL;
     {
         Mutex::Autolock _l(AudioSystem::gLock);
@@ -1794,13 +2414,45 @@
     }
 
     if (cb != NULL) {
-        cb(event, clientInfo, clientConfig, clientEffects,
-           deviceConfig, effects, patchHandle, source);
+        int eventLegacy = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(event));
+        record_client_info_t clientInfoLegacy = VALUE_OR_RETURN_BINDER_STATUS(
+                aidl2legacy_RecordClientInfo_record_client_info_t(clientInfo));
+        audio_config_base_t clientConfigLegacy = VALUE_OR_RETURN_BINDER_STATUS(
+                aidl2legacy_AudioConfigBase_audio_config_base_t(clientConfig));
+        std::vector<effect_descriptor_t> clientEffectsLegacy = VALUE_OR_RETURN_BINDER_STATUS(
+                convertContainer<std::vector<effect_descriptor_t>>(
+                        clientEffects,
+                        aidl2legacy_EffectDescriptor_effect_descriptor_t));
+        audio_config_base_t deviceConfigLegacy = VALUE_OR_RETURN_BINDER_STATUS(
+                aidl2legacy_AudioConfigBase_audio_config_base_t(deviceConfig));
+        std::vector<effect_descriptor_t> effectsLegacy = VALUE_OR_RETURN_BINDER_STATUS(
+                convertContainer<std::vector<effect_descriptor_t>>(
+                        effects,
+                        aidl2legacy_EffectDescriptor_effect_descriptor_t));
+        audio_patch_handle_t patchHandleLegacy = VALUE_OR_RETURN_BINDER_STATUS(
+                aidl2legacy_int32_t_audio_patch_handle_t(patchHandle));
+        audio_source_t sourceLegacy = VALUE_OR_RETURN_BINDER_STATUS(
+                aidl2legacy_AudioSourceType_audio_source_t(source));
+        cb(eventLegacy, &clientInfoLegacy, &clientConfigLegacy, clientEffectsLegacy,
+           &deviceConfigLegacy, effectsLegacy, patchHandleLegacy, sourceLegacy);
     }
+    return Status::ok();
 }
 
-void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who __unused)
-{
+Status AudioSystem::AudioPolicyServiceClient::onRoutingUpdated() {
+    routing_callback cb = NULL;
+    {
+        Mutex::Autolock _l(AudioSystem::gLock);
+        cb = gRoutingCallback;
+    }
+
+    if (cb != NULL) {
+        cb();
+    }
+    return Status::ok();
+}
+
+void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who __unused) {
     {
         Mutex::Autolock _l(mLock);
         for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
@@ -1818,4 +2470,28 @@
     ALOGW("AudioPolicyService server died!");
 }
 
+ConversionResult<record_client_info_t>
+aidl2legacy_RecordClientInfo_record_client_info_t(const media::RecordClientInfo& aidl) {
+    record_client_info_t legacy;
+    legacy.riid = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_unique_id_t(aidl.riid));
+    legacy.uid = VALUE_OR_RETURN(aidl2legacy_int32_t_uid_t(aidl.uid));
+    legacy.session = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.session));
+    legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSourceType_audio_source_t(aidl.source));
+    legacy.port_id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
+    legacy.silenced = aidl.silenced;
+    return legacy;
+}
+
+ConversionResult<media::RecordClientInfo>
+legacy2aidl_record_client_info_t_RecordClientInfo(const record_client_info_t& legacy) {
+    media::RecordClientInfo aidl;
+    aidl.riid = VALUE_OR_RETURN(legacy2aidl_audio_unique_id_t_int32_t(legacy.riid));
+    aidl.uid = VALUE_OR_RETURN(legacy2aidl_uid_t_int32_t(legacy.uid));
+    aidl.session = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(legacy.session));
+    aidl.source = VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSourceType(legacy.source));
+    aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.port_id));
+    aidl.silenced = legacy.silenced;
+    return aidl;
+}
+
 } // namespace android
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 32129f0..5f802de 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -22,6 +22,7 @@
 #include <math.h>
 #include <sys/resource.h>
 
+#include <android/media/IAudioPolicyService.h>
 #include <android-base/macros.h>
 #include <audio_utils/clock.h>
 #include <audio_utils/primitives.h>
@@ -31,7 +32,6 @@
 #include <private/media/AudioTrackShared.h>
 #include <processgroup/sched_policy.h>
 #include <media/IAudioFlinger.h>
-#include <media/IAudioPolicyService.h>
 #include <media/AudioParameter.h>
 #include <media/AudioResamplerPublic.h>
 #include <media/AudioSystem.h>
@@ -42,10 +42,13 @@
 #define WAIT_STREAM_END_TIMEOUT_SEC     120
 static const int kMaxLoopCountNotifications = 32;
 
+using ::android::aidl_utils::statusTFromBinderStatus;
+
 namespace android {
 // ---------------------------------------------------------------------------
 
 using media::VolumeShaper;
+using android::content::AttributionSourceState;
 
 // TODO: Move to a separate .h
 
@@ -163,9 +166,20 @@
 bool AudioTrack::isDirectOutputSupported(const audio_config_base_t& config,
                                          const audio_attributes_t& attributes) {
     ALOGV("%s()", __FUNCTION__);
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    const sp<media::IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return false;
-    return aps->isDirectOutputSupported(config, attributes);
+
+    auto result = [&]() -> ConversionResult<bool> {
+        media::AudioConfigBase configAidl = VALUE_OR_RETURN(
+                legacy2aidl_audio_config_base_t_AudioConfigBase(config));
+        media::AudioAttributesInternal attributesAidl = VALUE_OR_RETURN(
+                legacy2aidl_audio_attributes_t_AudioAttributesInternal(attributes));
+        bool retAidl;
+        RETURN_IF_ERROR(aidl_utils::statusTFromBinderStatus(
+                aps->isDirectOutputSupported(configAidl, attributesAidl, &retAidl)));
+        return retAidl;
+    }();
+    return result.value_or(false);
 }
 
 // ---------------------------------------------------------------------------
@@ -196,6 +210,7 @@
     mMetricsItem->setCString(MM_PREFIX "encoding", toString(track->mFormat).c_str());
     mMetricsItem->setInt32(MM_PREFIX "frameCount", (int32_t)track->mFrameCount);
     mMetricsItem->setCString(MM_PREFIX "attributes", toString(track->mAttributes).c_str());
+    mMetricsItem->setCString(MM_PREFIX "logSessionId", track->mLogSessionId.c_str());
 }
 
 // hand the user a snapshot of the metrics.
@@ -210,7 +225,11 @@
     return NO_ERROR;
 }
 
-AudioTrack::AudioTrack()
+AudioTrack::AudioTrack() : AudioTrack(AttributionSourceState())
+{
+}
+
+AudioTrack::AudioTrack(const AttributionSourceState& attributionSource)
     : mStatus(NO_INIT),
       mState(STATE_STOPPED),
       mPreviousPriority(ANDROID_PRIORITY_NORMAL),
@@ -218,11 +237,12 @@
       mPausedPosition(0),
       mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
       mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
+      mClientAttributionSource(attributionSource),
       mAudioTrackCallback(new AudioTrackCallback())
 {
     mAttributes.content_type = AUDIO_CONTENT_TYPE_UNKNOWN;
     mAttributes.usage = AUDIO_USAGE_UNKNOWN;
-    mAttributes.flags = 0x0;
+    mAttributes.flags = AUDIO_FLAG_NONE;
     strcpy(mAttributes.tags, "");
 }
 
@@ -239,8 +259,7 @@
         audio_session_t sessionId,
         transfer_type transferType,
         const audio_offload_info_t *offloadInfo,
-        uid_t uid,
-        pid_t pid,
+        const AttributionSourceState& attributionSource,
         const audio_attributes_t* pAttributes,
         bool doNotReconnect,
         float maxRequiredSpeed,
@@ -256,8 +275,8 @@
 
     (void)set(streamType, sampleRate, format, channelMask,
             frameCount, flags, cbf, user, notificationFrames,
-            0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId, transferType,
-            offloadInfo, uid, pid, pAttributes, doNotReconnect, maxRequiredSpeed, selectedDeviceId);
+            0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo,
+            attributionSource, pAttributes, doNotReconnect, maxRequiredSpeed, selectedDeviceId);
 }
 
 AudioTrack::AudioTrack(
@@ -273,8 +292,7 @@
         audio_session_t sessionId,
         transfer_type transferType,
         const audio_offload_info_t *offloadInfo,
-        uid_t uid,
-        pid_t pid,
+        const AttributionSourceState& attributionSource,
         const audio_attributes_t* pAttributes,
         bool doNotReconnect,
         float maxRequiredSpeed)
@@ -291,7 +309,7 @@
     (void)set(streamType, sampleRate, format, channelMask,
             0 /*frameCount*/, flags, cbf, user, notificationFrames,
             sharedBuffer, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo,
-            uid, pid, pAttributes, doNotReconnect, maxRequiredSpeed);
+            attributionSource, pAttributes, doNotReconnect, maxRequiredSpeed);
 }
 
 AudioTrack::~AudioTrack()
@@ -309,30 +327,42 @@
         .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)mStatus)
         .record();
 
+    stopAndJoinCallbacks(); // checks mStatus
+
     if (mStatus == NO_ERROR) {
-        // Make sure that callback function exits in the case where
-        // it is looping on buffer full condition in obtainBuffer().
-        // Otherwise the callback thread will never exit.
-        stop();
-        if (mAudioTrackThread != 0) {
-            mProxy->interrupt();
-            mAudioTrackThread->requestExit();   // see comment in AudioTrack.h
-            mAudioTrackThread->requestExitAndWait();
-            mAudioTrackThread.clear();
-        }
-        // No lock here: worst case we remove a NULL callback which will be a nop
-        if (mDeviceCallback != 0 && mOutput != AUDIO_IO_HANDLE_NONE) {
-            AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
-        }
         IInterface::asBinder(mAudioTrack)->unlinkToDeath(mDeathNotifier, this);
         mAudioTrack.clear();
         mCblkMemory.clear();
         mSharedBuffer.clear();
         IPCThreadState::self()->flushCommands();
+        pid_t clientPid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mClientAttributionSource.pid));
         ALOGV("%s(%d), releasing session id %d from %d on behalf of %d",
                 __func__, mPortId,
-                mSessionId, IPCThreadState::self()->getCallingPid(), mClientPid);
-        AudioSystem::releaseAudioSessionId(mSessionId, mClientPid);
+                mSessionId, IPCThreadState::self()->getCallingPid(), clientPid);
+        AudioSystem::releaseAudioSessionId(mSessionId, clientPid);
+    }
+}
+
+void AudioTrack::stopAndJoinCallbacks() {
+    // Prevent nullptr crash if it did not open properly.
+    if (mStatus != NO_ERROR) return;
+
+    // Make sure that callback function exits in the case where
+    // it is looping on buffer full condition in obtainBuffer().
+    // Otherwise the callback thread will never exit.
+    stop();
+    if (mAudioTrackThread != 0) { // not thread safe
+        mProxy->interrupt();
+        mAudioTrackThread->requestExit();   // see comment in AudioTrack.h
+        mAudioTrackThread->requestExitAndWait();
+        mAudioTrackThread.clear();
+    }
+    // No lock here: worst case we remove a NULL callback which will be a nop
+    if (mDeviceCallback != 0 && mOutput != AUDIO_IO_HANDLE_NONE) {
+        // This may not stop all of these device callbacks!
+        // TODO: Add some sort of protection.
+        AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
+        mDeviceCallback.clear();
     }
 }
 
@@ -351,8 +381,7 @@
         audio_session_t sessionId,
         transfer_type transferType,
         const audio_offload_info_t *offloadInfo,
-        uid_t uid,
-        pid_t pid,
+        const AttributionSourceState& attributionSource,
         const audio_attributes_t* pAttributes,
         bool doNotReconnect,
         float maxRequiredSpeed,
@@ -362,13 +391,15 @@
     uint32_t channelCount;
     pid_t callingPid;
     pid_t myPid;
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
 
     // Note mPortId is not valid until the track is created, so omit mPortId in ALOG for set.
     ALOGV("%s(): streamType %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
           "flags #%x, notificationFrames %d, sessionId %d, transferType %d, uid %d, pid %d",
           __func__,
           streamType, sampleRate, format, channelMask, frameCount, flags, notificationFrames,
-          sessionId, transferType, uid, pid);
+          sessionId, transferType, attributionSource.uid, attributionSource.pid);
 
     mThreadCanCallJava = threadCanCallJava;
     mSelectedDeviceId = selectedDeviceId;
@@ -441,7 +472,7 @@
             status = BAD_VALUE;
             goto exit;
         }
-        mStreamType = streamType;
+        mOriginalStreamType = streamType;
 
     } else {
         // stream type shouldn't be looked at, this track has audio attributes
@@ -450,7 +481,7 @@
                 " usage=%d content=%d flags=0x%x tags=[%s]",
                 __func__,
                  mAttributes.usage, mAttributes.content_type, mAttributes.flags, mAttributes.tags);
-        mStreamType = AUDIO_STREAM_DEFAULT;
+        mOriginalStreamType = AUDIO_STREAM_DEFAULT;
         audio_flags_to_audio_output_flags(mAttributes.flags, &flags);
     }
 
@@ -458,7 +489,7 @@
     if (format == AUDIO_FORMAT_DEFAULT) {
         format = AUDIO_FORMAT_PCM_16_BIT;
     } else if (format == AUDIO_FORMAT_IEC61937) { // HDMI pass-through?
-        mAttributes.flags |= AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO;
+        flags = static_cast<audio_output_flags_t>(flags | AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO);
     }
 
     // validate parameters
@@ -529,6 +560,7 @@
     } else {
         mOffloadInfo = NULL;
         memset(&mOffloadInfoCopy, 0, sizeof(audio_offload_info_t));
+        mOffloadInfoCopy = AUDIO_INFO_INITIALIZER;
     }
 
     mVolume[AUDIO_INTERLEAVE_LEFT] = 1.0f;
@@ -563,17 +595,16 @@
                 notificationFrames, minNotificationsPerBuffer, maxNotificationsPerBuffer);
     }
     mNotificationFramesAct = 0;
+    // TODO b/182392553: refactor or remove
+    mClientAttributionSource = AttributionSourceState(attributionSource);
     callingPid = IPCThreadState::self()->getCallingPid();
     myPid = getpid();
-    if (uid == AUDIO_UID_INVALID || (callingPid != myPid)) {
-        mClientUid = IPCThreadState::self()->getCallingUid();
-    } else {
-        mClientUid = uid;
+    if (uid == -1 || (callingPid != myPid)) {
+        mClientAttributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(
+            IPCThreadState::self()->getCallingUid()));
     }
-    if (pid == -1 || (callingPid != myPid)) {
-        mClientPid = callingPid;
-    } else {
-        mClientPid = pid;
+    if (pid == (pid_t)-1 || (callingPid != myPid)) {
+        mClientAttributionSource.pid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(callingPid));
     }
     mAuxEffectId = 0;
     mOrigFlags = mFlags = flags;
@@ -612,7 +643,7 @@
     mReleased = 0;
     mStartNs = 0;
     mStartFromZeroUs = 0;
-    AudioSystem::acquireAudioSessionId(mSessionId, mClientPid, mClientUid);
+    AudioSystem::acquireAudioSessionId(mSessionId, pid, uid);
     mSequence = 1;
     mObservedSequence = mSequence;
     mInUnderrun = false;
@@ -635,6 +666,40 @@
     return status;
 }
 
+
+status_t AudioTrack::set(
+        audio_stream_type_t streamType,
+        uint32_t sampleRate,
+        audio_format_t format,
+        uint32_t channelMask,
+        size_t frameCount,
+        audio_output_flags_t flags,
+        callback_t cbf,
+        void* user,
+        int32_t notificationFrames,
+        const sp<IMemory>& sharedBuffer,
+        bool threadCanCallJava,
+        audio_session_t sessionId,
+        transfer_type transferType,
+        const audio_offload_info_t *offloadInfo,
+        uid_t uid,
+        pid_t pid,
+        const audio_attributes_t* pAttributes,
+        bool doNotReconnect,
+        float maxRequiredSpeed,
+        audio_port_handle_t selectedDeviceId)
+{
+    AttributionSourceState attributionSource;
+    attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(uid));
+    attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(pid));
+    attributionSource.token = sp<BBinder>::make();
+    return set(streamType, sampleRate, format,
+            static_cast<audio_channel_mask_t>(channelMask),
+            frameCount, flags, cbf, user, notificationFrames, sharedBuffer,
+            threadCanCallJava, sessionId, transferType, offloadInfo, attributionSource,
+            pAttributes, doNotReconnect, maxRequiredSpeed, selectedDeviceId);
+}
+
 // -------------------------------------------------------------------------
 
 status_t AudioTrack::start()
@@ -735,7 +800,7 @@
     int32_t flags = android_atomic_and(~(CBLK_STREAM_END_DONE | CBLK_DISABLED), &mCblk->mFlags);
 
     if (!(flags & CBLK_INVALID)) {
-        status = mAudioTrack->start();
+        mAudioTrack->start(&status);
         if (status == DEAD_OBJECT) {
             flags |= CBLK_INVALID;
         }
@@ -1036,6 +1101,53 @@
     return mOriginalSampleRate;
 }
 
+status_t AudioTrack::setDualMonoMode(audio_dual_mono_mode_t mode)
+{
+    AutoMutex lock(mLock);
+    return setDualMonoMode_l(mode);
+}
+
+status_t AudioTrack::setDualMonoMode_l(audio_dual_mono_mode_t mode)
+{
+    const status_t status = statusTFromBinderStatus(
+        mAudioTrack->setDualMonoMode(VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_dual_mono_mode_t_AudioDualMonoMode(mode))));
+    if (status == NO_ERROR) mDualMonoMode = mode;
+    return status;
+}
+
+status_t AudioTrack::getDualMonoMode(audio_dual_mono_mode_t* mode) const
+{
+    AutoMutex lock(mLock);
+    media::AudioDualMonoMode mediaMode;
+    const status_t status = statusTFromBinderStatus(mAudioTrack->getDualMonoMode(&mediaMode));
+    if (status == NO_ERROR) {
+        *mode = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_AudioDualMonoMode_audio_dual_mono_mode_t(mediaMode));
+    }
+    return status;
+}
+
+status_t AudioTrack::setAudioDescriptionMixLevel(float leveldB)
+{
+    AutoMutex lock(mLock);
+    return setAudioDescriptionMixLevel_l(leveldB);
+}
+
+status_t AudioTrack::setAudioDescriptionMixLevel_l(float leveldB)
+{
+    const status_t status = statusTFromBinderStatus(
+             mAudioTrack->setAudioDescriptionMixLevel(leveldB));
+    if (status == NO_ERROR) mAudioDescriptionMixLeveldB = leveldB;
+    return status;
+}
+
+status_t AudioTrack::getAudioDescriptionMixLevel(float* leveldB) const
+{
+    AutoMutex lock(mLock);
+    return statusTFromBinderStatus(mAudioTrack->getAudioDescriptionMixLevel(leveldB));
+}
+
 status_t AudioTrack::setPlaybackRate(const AudioPlaybackRate &playbackRate)
 {
     AutoMutex lock(mLock);
@@ -1043,7 +1155,13 @@
         return NO_ERROR;
     }
     if (isOffloadedOrDirect_l()) {
-        return INVALID_OPERATION;
+        const status_t status = statusTFromBinderStatus(mAudioTrack->setPlaybackRateParameters(
+                VALUE_OR_RETURN_STATUS(
+                        legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(playbackRate))));
+        if (status == NO_ERROR) {
+            mPlaybackRate = playbackRate;
+        }
+        return status;
     }
     if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
         return INVALID_OPERATION;
@@ -1108,9 +1226,18 @@
     return NO_ERROR;
 }
 
-const AudioPlaybackRate& AudioTrack::getPlaybackRate() const
+const AudioPlaybackRate& AudioTrack::getPlaybackRate()
 {
     AutoMutex lock(mLock);
+    if (isOffloadedOrDirect_l()) {
+        media::AudioPlaybackRate playbackRateTemp;
+        const status_t status = statusTFromBinderStatus(
+                mAudioTrack->getPlaybackRateParameters(&playbackRateTemp));
+        if (status == NO_ERROR) { // update local version if changed.
+            mPlaybackRate =
+                    aidl2legacy_AudioPlaybackRate_audio_playback_rate_t(playbackRateTemp).value();
+        }
+    }
     return mPlaybackRate;
 }
 
@@ -1165,6 +1292,46 @@
     return finalBufferSize;
 }
 
+ssize_t AudioTrack::getStartThresholdInFrames() const
+{
+    AutoMutex lock(mLock);
+    if (mOutput == AUDIO_IO_HANDLE_NONE || mProxy.get() == 0) {
+        return NO_INIT;
+    }
+    return (ssize_t) mProxy->getStartThresholdInFrames();
+}
+
+ssize_t AudioTrack::setStartThresholdInFrames(size_t startThresholdInFrames)
+{
+    if (startThresholdInFrames > INT32_MAX || startThresholdInFrames == 0) {
+        // contractually we could simply return the current threshold in frames
+        // to indicate the request was ignored, but we return an error here.
+        return BAD_VALUE;
+    }
+    AutoMutex lock(mLock);
+    // We do not permit calling setStartThresholdInFrames() between the AudioTrack
+    // default ctor AudioTrack() and set(...) but rather fail such an attempt.
+    // (To do so would require a cached mOrigStartThresholdInFrames and we may
+    // not have proper validation for the actual set value).
+    if (mOutput == AUDIO_IO_HANDLE_NONE || mProxy.get() == 0) {
+        return NO_INIT;
+    }
+    const uint32_t original = mProxy->getStartThresholdInFrames();
+    const uint32_t final = mProxy->setStartThresholdInFrames(startThresholdInFrames);
+    if (original != final) {
+        android::mediametrics::LogItem(mMetricsId)
+                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETSTARTTHRESHOLD)
+                .set(AMEDIAMETRICS_PROP_STARTTHRESHOLDFRAMES, (int32_t)final)
+                .record();
+        if (original > final) {
+            // restart track if it was disabled by audioflinger due to previous underrun
+            // and we reduced the number of frames for the threshold.
+            restartIfDisabled();
+        }
+    }
+    return final;
+}
+
 status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount)
 {
     if (mSharedBuffer == 0 || isOffloadedOrDirect()) {
@@ -1428,7 +1595,8 @@
 status_t AudioTrack::attachAuxEffect(int effectId)
 {
     AutoMutex lock(mLock);
-    status_t status = mAudioTrack->attachAuxEffect(effectId);
+    status_t status;
+    mAudioTrack->attachAuxEffect(effectId, &status);
     if (status == NO_ERROR) {
         mAuxEffectId = effectId;
     }
@@ -1437,9 +1605,6 @@
 
 audio_stream_type_t AudioTrack::streamType() const
 {
-    if (mStreamType == AUDIO_STREAM_DEFAULT) {
-        return AudioSystem::attributesToStreamType(mAttributes);
-    }
     return mStreamType;
 }
 
@@ -1520,8 +1685,9 @@
     }
 
     IAudioFlinger::CreateTrackInput input;
-    if (mStreamType != AUDIO_STREAM_DEFAULT) {
-        input.attr = AudioSystem::streamTypeToAttributes(mStreamType);
+    if (mOriginalStreamType != AUDIO_STREAM_DEFAULT) {
+        // Legacy: This is based on original parameters even if the track is recreated.
+        input.attr = AudioSystem::streamTypeToAttributes(mOriginalStreamType);
     } else {
         input.attr = mAttributes;
     }
@@ -1530,8 +1696,7 @@
     input.config.channel_mask = mChannelMask;
     input.config.format = mFormat;
     input.config.offload_info = mOffloadInfoCopy;
-    input.clientInfo.clientUid = mClientUid;
-    input.clientInfo.clientPid = mClientPid;
+    input.clientInfo.attributionSource = mClientAttributionSource;
     input.clientInfo.clientTid = -1;
     if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
         // It is currently meaningless to request SCHED_FIFO for a Java thread.  Even if the
@@ -1556,11 +1721,13 @@
     input.sessionId = mSessionId;
     input.audioTrackCallback = mAudioTrackCallback;
 
-    IAudioFlinger::CreateTrackOutput output;
+    media::CreateTrackResponse response;
+    status = audioFlinger->createTrack(VALUE_OR_FATAL(input.toAidl()), response);
 
-    sp<IAudioTrack> track = audioFlinger->createTrack(input,
-                                                      output,
-                                                      &status);
+    IAudioFlinger::CreateTrackOutput output{};
+    if (status == NO_ERROR) {
+        output = VALUE_OR_FATAL(IAudioFlinger::CreateTrackOutput::fromAidl(response));
+    }
 
     if (status != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
         ALOGE("%s(%d): AudioFlinger could not create track, status: %d output %d",
@@ -1570,12 +1737,13 @@
         }
         goto exit;
     }
-    ALOG_ASSERT(track != 0);
+    ALOG_ASSERT(output.audioTrack != 0);
 
     mFrameCount = output.frameCount;
     mNotificationFramesAct = (uint32_t)output.notificationFrameCount;
     mRoutedDeviceId = output.selectedDeviceId;
     mSessionId = output.sessionId;
+    mStreamType = output.streamType;
 
     mSampleRate = output.sampleRate;
     if (mOriginalSampleRate == 0) {
@@ -1592,7 +1760,9 @@
     // so we are no longer responsible for releasing it.
 
     // FIXME compare to AudioRecord
-    sp<IMemory> iMem = track->getCblk();
+    std::optional<media::SharedFileRegion> sfr;
+    output.audioTrack->getCblk(&sfr);
+    sp<IMemory> iMem = VALUE_OR_FATAL(aidl2legacy_NullableSharedFileRegion_IMemory(sfr));
     if (iMem == 0) {
         ALOGE("%s(%d): Could not get control block", __func__, mPortId);
         status = NO_INIT;
@@ -1613,7 +1783,7 @@
         IInterface::asBinder(mAudioTrack)->unlinkToDeath(mDeathNotifier, this);
         mDeathNotifier.clear();
     }
-    mAudioTrack = track;
+    mAudioTrack = output.audioTrack;
     mCblkMemory = iMem;
     IPCThreadState::self()->flushCommands();
 
@@ -1669,7 +1839,7 @@
         }
     }
 
-    mAudioTrack->attachAuxEffect(mAuxEffectId);
+    mAudioTrack->attachAuxEffect(mAuxEffectId, &status);
 
     // If IAudioTrack is re-created, don't let the requested frameCount
     // decrease.  This can confuse clients that cache frameCount().
@@ -1705,6 +1875,13 @@
     mProxy->setPlaybackRate(playbackRateTemp);
     mProxy->setMinimum(mNotificationFramesAct);
 
+    if (mDualMonoMode != AUDIO_DUAL_MONO_MODE_OFF) {
+        setDualMonoMode_l(mDualMonoMode);
+    }
+    if (mAudioDescriptionMixLeveldB != -std::numeric_limits<float>::infinity()) {
+        setAudioDescriptionMixLevel_l(mAudioDescriptionMixLeveldB);
+    }
+
     mDeathNotifier = new DeathNotifier(this);
     IInterface::asBinder(mAudioTrack)->linkToDeath(mDeathNotifier, this);
 
@@ -1720,6 +1897,8 @@
         .set(AMEDIAMETRICS_PROP_FLAGS, toString(mFlags).c_str())
         .set(AMEDIAMETRICS_PROP_ORIGINALFLAGS, toString(mOrigFlags).c_str())
         .set(AMEDIAMETRICS_PROP_SESSIONID, (int32_t)mSessionId)
+        .set(AMEDIAMETRICS_PROP_LOGSESSIONID, mLogSessionId)
+        .set(AMEDIAMETRICS_PROP_PLAYERIID, mPlayerIId)
         .set(AMEDIAMETRICS_PROP_TRACKID, mPortId) // dup from key
         .set(AMEDIAMETRICS_PROP_CONTENTTYPE, toString(mAttributes.content_type).c_str())
         .set(AMEDIAMETRICS_PROP_USAGE, toString(mAttributes.usage).c_str())
@@ -1913,7 +2092,8 @@
         ALOGW("%s(%d): releaseBuffer() track %p disabled due to previous underrun, restarting",
                 __func__, mPortId, this);
         // FIXME ignoring status
-        mAudioTrack->start();
+        status_t status;
+        mAudioTrack->start(&status);
     }
 }
 
@@ -1936,7 +2116,7 @@
     }
 
     if (ssize_t(userSize) < 0 || (buffer == NULL && userSize != 0)) {
-        // Sanity-check: user is most-likely passing an error code, and it would
+        // Validation: user is most-likely passing an error code, and it would
         // make the return value ambiguous (actualSize vs error).
         ALOGE("%s(%d): AudioTrack::write(buffer=%p, size=%zu (%zd)",
                 __func__, mPortId, buffer, userSize, userSize);
@@ -2326,7 +2506,7 @@
                 mUserData, &audioBuffer);
         size_t writtenSize = audioBuffer.size;
 
-        // Sanity check on returned size
+        // Validate on returned size
         if (ssize_t(writtenSize) < 0 || writtenSize > reqSize) {
             ALOGE("%s(%d): EVENT_MORE_DATA requested %zu bytes but callback returned %zd bytes",
                     __func__, mPortId, reqSize, ssize_t(writtenSize));
@@ -2470,6 +2650,10 @@
         staticPosition = mStaticProxy->getPosition().unsignedValue();
     }
 
+    // save the old startThreshold and framecount
+    const uint32_t originalStartThresholdInFrames = mProxy->getStartThresholdInFrames();
+    const uint32_t originalFrameCount = mProxy->frameCount();
+
     // See b/74409267. Connecting to a BT A2DP device supporting multiple codecs
     // causes a lot of churn on the service side, and it can reject starting
     // playback of a previously created track. May also apply to other cases.
@@ -2521,11 +2705,29 @@
             if (shaper.isStarted()) {
                 operationToEnd->setNormalizedTime(1.f);
             }
-            return mAudioTrack->applyVolumeShaper(shaper.mConfiguration, operationToEnd);
+            media::VolumeShaperConfiguration config;
+            shaper.mConfiguration->writeToParcelable(&config);
+            media::VolumeShaperOperation operation;
+            operationToEnd->writeToParcelable(&operation);
+            status_t status;
+            mAudioTrack->applyVolumeShaper(config, operation, &status);
+            return status;
         });
 
+        // restore the original start threshold if different than frameCount.
+        if (originalStartThresholdInFrames != originalFrameCount) {
+            // Note: mProxy->setStartThresholdInFrames() call is in the Proxy
+            // and does not trigger a restart.
+            // (Also CBLK_DISABLED is not set, buffers are empty after track recreation).
+            // Any start would be triggered on the mState == ACTIVE check below.
+            const uint32_t currentThreshold =
+                    mProxy->setStartThresholdInFrames(originalStartThresholdInFrames);
+            ALOGD_IF(originalStartThresholdInFrames != currentThreshold,
+                    "%s(%d) startThresholdInFrames changing from %u to %u",
+                    __func__, mPortId, originalStartThresholdInFrames, currentThreshold);
+        }
         if (mState == STATE_ACTIVE) {
-            result = mAudioTrack->start();
+            mAudioTrack->start(&result);
         }
         // server resets to zero so we offset
         mFramesWrittenServerOffset =
@@ -2595,7 +2797,9 @@
 status_t AudioTrack::setParameters(const String8& keyValuePairs)
 {
     AutoMutex lock(mLock);
-    return mAudioTrack->setParameters(keyValuePairs);
+    status_t status;
+    mAudioTrack->setParameters(keyValuePairs.c_str(), &status);
+    return status;
 }
 
 status_t AudioTrack::selectPresentation(int presentationId, int programId)
@@ -2607,7 +2811,9 @@
     ALOGV("%s(%d): PresentationId/ProgramId[%s]",
             __func__, mPortId, param.toString().string());
 
-    return mAudioTrack->setParameters(param.toString());
+    status_t status;
+    mAudioTrack->setParameters(param.toString().c_str(), &status);
+    return status;
 }
 
 VolumeShaper::Status AudioTrack::applyVolumeShaper(
@@ -2616,11 +2822,16 @@
 {
     AutoMutex lock(mLock);
     mVolumeHandler->setIdIfNecessary(configuration);
-    VolumeShaper::Status status = mAudioTrack->applyVolumeShaper(configuration, operation);
+    media::VolumeShaperConfiguration config;
+    configuration->writeToParcelable(&config);
+    media::VolumeShaperOperation op;
+    operation->writeToParcelable(&op);
+    VolumeShaper::Status status;
+    mAudioTrack->applyVolumeShaper(config, op, &status);
 
     if (status == DEAD_OBJECT) {
         if (restoreTrack_l("applyVolumeShaper") == OK) {
-            status = mAudioTrack->applyVolumeShaper(configuration, operation);
+            mAudioTrack->applyVolumeShaper(config, op, &status);
         }
     }
     if (status >= 0) {
@@ -2640,10 +2851,20 @@
 sp<VolumeShaper::State> AudioTrack::getVolumeShaperState(int id)
 {
     AutoMutex lock(mLock);
-    sp<VolumeShaper::State> state = mAudioTrack->getVolumeShaperState(id);
+    std::optional<media::VolumeShaperState> vss;
+    mAudioTrack->getVolumeShaperState(id, &vss);
+    sp<VolumeShaper::State> state;
+    if (vss.has_value()) {
+        state = new VolumeShaper::State();
+        state->readFromParcelable(vss.value());
+    }
     if (state.get() == nullptr && (mCblk->mFlags & CBLK_INVALID) != 0) {
         if (restoreTrack_l("getVolumeShaperState") == OK) {
-            state = mAudioTrack->getVolumeShaperState(id);
+            mAudioTrack->getVolumeShaperState(id, &vss);
+            if (vss.has_value()) {
+                state = new VolumeShaper::State();
+                state->readFromParcelable(vss.value());
+            }
         }
     }
     return state;
@@ -2737,7 +2958,11 @@
     status_t status;
     if (isOffloadedOrDirect_l()) {
         // use Binder to get timestamp
-        status = mAudioTrack->getTimestamp(timestamp);
+        media::AudioTimestampInternal ts;
+        mAudioTrack->getTimestamp(&ts, &status);
+        if (status == OK) {
+            timestamp = VALUE_OR_FATAL(aidl2legacy_AudioTimestampInternal_AudioTimestamp(ts));
+        }
     } else {
         // read timestamp from shared memory
         ExtendedTimestamp ets;
@@ -3058,8 +3283,6 @@
     result.appendFormat("  id(%d) status(%d), state(%d), session Id(%d), flags(%#x)\n",
                         mPortId, mStatus, mState, mSessionId, mFlags);
     result.appendFormat("  stream type(%d), left - right volume(%f, %f)\n",
-                        (mStreamType == AUDIO_STREAM_DEFAULT) ?
-                            AudioSystem::attributesToStreamType(mAttributes) :
                             mStreamType,
                         mVolume[AUDIO_INTERLEAVE_LEFT], mVolume[AUDIO_INTERLEAVE_RIGHT]);
     result.appendFormat("  format(%#x), channel mask(%#x), channel count(%u)\n",
@@ -3096,6 +3319,31 @@
     return mProxy->getUnderrunFrames();
 }
 
+void AudioTrack::setLogSessionId(const char *logSessionId)
+{
+     AutoMutex lock(mLock);
+    if (logSessionId == nullptr) logSessionId = "";  // an empty string is an unset session id.
+    if (mLogSessionId == logSessionId) return;
+
+     mLogSessionId = logSessionId;
+     mediametrics::LogItem(mMetricsId)
+         .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETLOGSESSIONID)
+         .set(AMEDIAMETRICS_PROP_LOGSESSIONID, logSessionId)
+         .record();
+}
+
+void AudioTrack::setPlayerIId(int playerIId)
+{
+    AutoMutex lock(mLock);
+    if (mPlayerIId == playerIId) return;
+
+    mPlayerIId = playerIId;
+    mediametrics::LogItem(mMetricsId)
+        .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYERIID)
+        .set(AMEDIAMETRICS_PROP_PLAYERIID, playerIId)
+        .record();
+}
+
 status_t AudioTrack::addAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback)
 {
 
diff --git a/media/libaudioclient/AudioTrackShared.cpp b/media/libaudioclient/AudioTrackShared.cpp
index f1f8f9c..35719be 100644
--- a/media/libaudioclient/AudioTrackShared.cpp
+++ b/media/libaudioclient/AudioTrackShared.cpp
@@ -17,6 +17,7 @@
 #define LOG_TAG "AudioTrackShared"
 //#define LOG_NDEBUG 0
 
+#include <atomic>
 #include <android-base/macros.h>
 #include <private/media/AudioTrackShared.h>
 #include <utils/Log.h>
@@ -33,6 +34,21 @@
     return sizeof(T) > sizeof(size_t) && x > (T) SIZE_MAX ? SIZE_MAX : x < 0 ? 0 : (size_t) x;
 }
 
+// compile-time safe atomics. TODO: update all methods to use it
+template <typename T>
+T android_atomic_load(const volatile T* addr) {
+    static_assert(sizeof(T) == sizeof(std::atomic<T>)); // no extra sync data required.
+    static_assert(std::atomic<T>::is_always_lock_free); // no hash lock somewhere.
+    return atomic_load((std::atomic<T>*)addr);          // memory_order_seq_cst
+}
+
+template <typename T>
+void android_atomic_store(const volatile T* addr, T value) {
+    static_assert(sizeof(T) == sizeof(std::atomic<T>)); // no extra sync data required.
+    static_assert(std::atomic<T>::is_always_lock_free); // no hash lock somewhere.
+    atomic_store((std::atomic<T>*)addr, value);         // memory_order_seq_cst
+}
+
 // incrementSequence is used to determine the next sequence value
 // for the loop and position sequence counters.  It should return
 // a value between "other" + 1 and "other" + INT32_MAX, the choice of
@@ -51,6 +67,7 @@
     : mServer(0), mFutex(0), mMinimum(0)
     , mVolumeLR(GAIN_MINIFLOAT_PACKED_UNITY), mSampleRate(0), mSendLevel(0)
     , mBufferSizeInFrames(0)
+    , mStartThresholdInFrames(0) // filled in by the server.
     , mFlags(0)
 {
     memset(&u, 0, sizeof(u));
@@ -66,6 +83,26 @@
 {
 }
 
+uint32_t Proxy::getStartThresholdInFrames() const
+{
+    const uint32_t startThresholdInFrames =
+           android_atomic_load(&mCblk->mStartThresholdInFrames);
+    if (startThresholdInFrames == 0 || startThresholdInFrames > mFrameCount) {
+        ALOGD("%s: startThresholdInFrames %u not between 1 and frameCount %zu, "
+                "setting to frameCount",
+                __func__, startThresholdInFrames, mFrameCount);
+        return mFrameCount;
+    }
+    return startThresholdInFrames;
+}
+
+uint32_t Proxy::setStartThresholdInFrames(uint32_t startThresholdInFrames)
+{
+    const uint32_t actual = std::min((size_t)startThresholdInFrames, frameCount());
+    android_atomic_store(&mCblk->mStartThresholdInFrames, actual);
+    return actual;
+}
+
 // ---------------------------------------------------------------------------
 
 ClientProxy::ClientProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount,
@@ -663,6 +700,7 @@
     , mTimestampMutator(&cblk->mExtendedTimestampQueue)
 {
     cblk->mBufferSizeInFrames = frameCount;
+    cblk->mStartThresholdInFrames = frameCount;
 }
 
 __attribute__((no_sanitize("integer")))
@@ -900,11 +938,8 @@
     }
     audio_track_cblk_t* cblk = mCblk;
 
-    int32_t flush = cblk->u.mStreaming.mFlush;
-    if (flush != mFlush) {
-        // FIXME should return an accurate value, but over-estimate is better than under-estimate
-        return mFrameCount;
-    }
+    flushBufferIfNeeded();
+
     const int32_t rear = getRear();
     ssize_t filled = audio_utils::safe_sub_overflow(rear, cblk->u.mStreaming.mFront);
     // pipe should not already be overfull
diff --git a/media/libaudioclient/AudioVolumeGroup.cpp b/media/libaudioclient/AudioVolumeGroup.cpp
index e79a362..361f7b8 100644
--- a/media/libaudioclient/AudioVolumeGroup.cpp
+++ b/media/libaudioclient/AudioVolumeGroup.cpp
@@ -21,64 +21,57 @@
 #include <utils/Log.h>
 #include <binder/Parcel.h>
 
+#include <media/AidlConversion.h>
 #include <media/AudioVolumeGroup.h>
 #include <media/AudioAttributes.h>
+#include <media/PolicyAidlConversion.h>
+
+#define RETURN_STATUS_IF_ERROR(x) \
+    { auto _tmp = (x); if (_tmp != OK) return _tmp; }
 
 namespace android {
 
 status_t AudioVolumeGroup::readFromParcel(const Parcel *parcel)
 {
-    status_t ret = parcel->readUtf8FromUtf16(&mName);
-    if (ret != NO_ERROR) {
-        return ret;
-    }
-    mGroupId = static_cast<volume_group_t>(parcel->readInt32());
-    size_t size = static_cast<size_t>(parcel->readInt32());
-    for (size_t i = 0; i < size; i++) {
-        AudioAttributes attribute;
-        attribute.readFromParcel(parcel);
-        if (ret != NO_ERROR) {
-            mAudioAttributes.clear();
-            return ret;
-        }
-        mAudioAttributes.push_back(attribute.getAttributes());
-    }
-    size = static_cast<size_t>(parcel->readInt32());
-    for (size_t i = 0; i < size; i++) {
-        audio_stream_type_t stream = static_cast<audio_stream_type_t>(parcel->readInt32());
-        mStreams.push_back(stream);
-    }
-    return NO_ERROR;
+    media::AudioVolumeGroup aidl;
+    RETURN_STATUS_IF_ERROR(aidl.readFromParcel(parcel));
+    *this = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioVolumeGroup(aidl));
+    return OK;
 }
 
 status_t AudioVolumeGroup::writeToParcel(Parcel *parcel) const
 {
-    parcel->writeUtf8AsUtf16(mName);
-    parcel->writeInt32(static_cast<int32_t>(mGroupId));
-    size_t size = mAudioAttributes.size();
-    size_t sizePosition = parcel->dataPosition();
-    parcel->writeInt32(size);
-    size_t finalSize = size;
-    for (const auto &attributes : mAudioAttributes) {
-        size_t position = parcel->dataPosition();
-        AudioAttributes attribute(attributes);
-        status_t ret = attribute.writeToParcel(parcel);
-        if (ret != NO_ERROR) {
-            parcel->setDataPosition(position);
-            finalSize--;
-        }
-    }
-    if (size != finalSize) {
-        size_t position = parcel->dataPosition();
-        parcel->setDataPosition(sizePosition);
-        parcel->writeInt32(finalSize);
-        parcel->setDataPosition(position);
-    }
-    parcel->writeInt32(mStreams.size());
-    for (const auto &stream : mStreams) {
-        parcel->writeInt32(static_cast<int32_t>(stream));
-    }
-    return NO_ERROR;
+    media::AudioVolumeGroup aidl = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioVolumeGroup(*this));
+    return aidl.writeToParcel(parcel);
+}
+
+ConversionResult<media::AudioVolumeGroup>
+legacy2aidl_AudioVolumeGroup(const AudioVolumeGroup& legacy) {
+    media::AudioVolumeGroup aidl;
+    aidl.groupId = VALUE_OR_RETURN(legacy2aidl_volume_group_t_int32_t(legacy.getId()));
+    aidl.name = legacy.getName();
+    aidl.audioAttributes = VALUE_OR_RETURN(
+            convertContainer<std::vector<media::AudioAttributesInternal>>(
+                    legacy.getAudioAttributes(),
+                    legacy2aidl_audio_attributes_t_AudioAttributesInternal));
+    aidl.streams = VALUE_OR_RETURN(
+            convertContainer<std::vector<media::AudioStreamType>>(legacy.getStreamTypes(),
+            legacy2aidl_audio_stream_type_t_AudioStreamType));
+    return aidl;
+}
+
+ConversionResult<AudioVolumeGroup>
+aidl2legacy_AudioVolumeGroup(const media::AudioVolumeGroup& aidl) {
+    return AudioVolumeGroup(
+            aidl.name,
+            VALUE_OR_RETURN(aidl2legacy_int32_t_volume_group_t(aidl.groupId)),
+            VALUE_OR_RETURN(convertContainer<AttributesVector>(
+                    aidl.audioAttributes,
+                    aidl2legacy_AudioAttributesInternal_audio_attributes_t)),
+            VALUE_OR_RETURN(convertContainer<StreamTypeVector>(
+                    aidl.streams,
+                    aidl2legacy_AudioStreamType_audio_stream_type_t))
+    );
 }
 
 } // namespace android
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 16d2232..e46b349 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -24,1588 +24,1202 @@
 
 #include <binder/IPCThreadState.h>
 #include <binder/Parcel.h>
-#include <mediautils/ServiceUtilities.h>
-#include <mediautils/TimeCheck.h>
 #include "IAudioFlinger.h"
 
 namespace android {
 
-enum {
-    CREATE_TRACK = IBinder::FIRST_CALL_TRANSACTION,
-    CREATE_RECORD,
-    SAMPLE_RATE,
-    RESERVED,   // obsolete, was CHANNEL_COUNT
-    FORMAT,
-    FRAME_COUNT,
-    LATENCY,
-    SET_MASTER_VOLUME,
-    SET_MASTER_MUTE,
-    MASTER_VOLUME,
-    MASTER_MUTE,
-    SET_STREAM_VOLUME,
-    SET_STREAM_MUTE,
-    STREAM_VOLUME,
-    STREAM_MUTE,
-    SET_MODE,
-    SET_MIC_MUTE,
-    GET_MIC_MUTE,
-    SET_RECORD_SILENCED,
-    SET_PARAMETERS,
-    GET_PARAMETERS,
-    REGISTER_CLIENT,
-    GET_INPUTBUFFERSIZE,
-    OPEN_OUTPUT,
-    OPEN_DUPLICATE_OUTPUT,
-    CLOSE_OUTPUT,
-    SUSPEND_OUTPUT,
-    RESTORE_OUTPUT,
-    OPEN_INPUT,
-    CLOSE_INPUT,
-    INVALIDATE_STREAM,
-    SET_VOICE_VOLUME,
-    GET_RENDER_POSITION,
-    GET_INPUT_FRAMES_LOST,
-    NEW_AUDIO_UNIQUE_ID,
-    ACQUIRE_AUDIO_SESSION_ID,
-    RELEASE_AUDIO_SESSION_ID,
-    QUERY_NUM_EFFECTS,
-    QUERY_EFFECT,
-    GET_EFFECT_DESCRIPTOR,
-    CREATE_EFFECT,
-    MOVE_EFFECTS,
-    LOAD_HW_MODULE,
-    GET_PRIMARY_OUTPUT_SAMPLING_RATE,
-    GET_PRIMARY_OUTPUT_FRAME_COUNT,
-    SET_LOW_RAM_DEVICE,
-    LIST_AUDIO_PORTS,
-    GET_AUDIO_PORT,
-    CREATE_AUDIO_PATCH,
-    RELEASE_AUDIO_PATCH,
-    LIST_AUDIO_PATCHES,
-    SET_AUDIO_PORT_CONFIG,
-    GET_AUDIO_HW_SYNC_FOR_SESSION,
-    SYSTEM_READY,
-    FRAME_COUNT_HAL,
-    GET_MICROPHONES,
-    SET_MASTER_BALANCE,
-    GET_MASTER_BALANCE,
-    SET_EFFECT_SUSPENDED,
-    SET_AUDIO_HAL_PIDS
-};
+using aidl_utils::statusTFromBinderStatus;
+using binder::Status;
 
 #define MAX_ITEMS_PER_LIST 1024
 
+#define VALUE_OR_RETURN_BINDER(x)                                 \
+    ({                                                            \
+       auto _tmp = (x);                                           \
+       if (!_tmp.ok()) return Status::fromStatusT(_tmp.error());  \
+       std::move(_tmp.value()); \
+     })
 
-class BpAudioFlinger : public BpInterface<IAudioFlinger>
-{
-public:
-    explicit BpAudioFlinger(const sp<IBinder>& impl)
-        : BpInterface<IAudioFlinger>(impl)
-    {
+#define RETURN_STATUS_IF_ERROR(x)    \
+    {                                \
+       auto _tmp = (x);              \
+       if (_tmp != OK) return _tmp;  \
     }
 
-    virtual sp<IAudioTrack> createTrack(const CreateTrackInput& input,
-                                        CreateTrackOutput& output,
-                                        status_t *status)
-    {
-        Parcel data, reply;
-        sp<IAudioTrack> track;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-
-        if (status == nullptr) {
-            return track;
-        }
-
-        input.writeToParcel(&data);
-
-        status_t lStatus = remote()->transact(CREATE_TRACK, data, &reply);
-        if (lStatus != NO_ERROR) {
-            ALOGE("createTrack transaction error %d", lStatus);
-            *status = DEAD_OBJECT;
-            return track;
-        }
-        *status = reply.readInt32();
-        if (*status != NO_ERROR) {
-            ALOGE("createTrack returned error %d", *status);
-            return track;
-        }
-        track = interface_cast<IAudioTrack>(reply.readStrongBinder());
-        if (track == 0) {
-            ALOGE("createTrack returned an NULL IAudioTrack with status OK");
-            *status = DEAD_OBJECT;
-            return track;
-        }
-        output.readFromParcel(&reply);
-        return track;
+#define RETURN_BINDER_IF_ERROR(x)                         \
+    {                                                     \
+       auto _tmp = (x);                                   \
+       if (_tmp != OK) return Status::fromStatusT(_tmp);  \
     }
 
-    virtual sp<media::IAudioRecord> createRecord(const CreateRecordInput& input,
-                                                 CreateRecordOutput& output,
-                                                 status_t *status)
-    {
-        Parcel data, reply;
-        sp<media::IAudioRecord> record;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-
-        if (status == nullptr) {
-            return record;
-        }
-
-        input.writeToParcel(&data);
-
-        status_t lStatus = remote()->transact(CREATE_RECORD, data, &reply);
-        if (lStatus != NO_ERROR) {
-            ALOGE("createRecord transaction error %d", lStatus);
-            *status = DEAD_OBJECT;
-            return record;
-        }
-        *status = reply.readInt32();
-        if (*status != NO_ERROR) {
-            ALOGE("createRecord returned error %d", *status);
-            return record;
-        }
-
-        record = interface_cast<media::IAudioRecord>(reply.readStrongBinder());
-        if (record == 0) {
-            ALOGE("createRecord returned a NULL IAudioRecord with status OK");
-            *status = DEAD_OBJECT;
-            return record;
-        }
-        output.readFromParcel(&reply);
-        return record;
-    }
-
-    virtual uint32_t sampleRate(audio_io_handle_t ioHandle) const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) ioHandle);
-        remote()->transact(SAMPLE_RATE, data, &reply);
-        return reply.readInt32();
-    }
-
-    // RESERVED for channelCount()
-
-    virtual audio_format_t format(audio_io_handle_t output) const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) output);
-        remote()->transact(FORMAT, data, &reply);
-        return (audio_format_t) reply.readInt32();
-    }
-
-    virtual size_t frameCount(audio_io_handle_t ioHandle) const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) ioHandle);
-        remote()->transact(FRAME_COUNT, data, &reply);
-        return reply.readInt64();
-    }
-
-    virtual uint32_t latency(audio_io_handle_t output) const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) output);
-        remote()->transact(LATENCY, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t setMasterVolume(float value)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeFloat(value);
-        remote()->transact(SET_MASTER_VOLUME, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t setMasterMute(bool muted)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32(muted);
-        remote()->transact(SET_MASTER_MUTE, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual float masterVolume() const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        remote()->transact(MASTER_VOLUME, data, &reply);
-        return reply.readFloat();
-    }
-
-    virtual bool masterMute() const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        remote()->transact(MASTER_MUTE, data, &reply);
-        return reply.readInt32();
-    }
-
-    status_t setMasterBalance(float balance) override
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeFloat(balance);
-        status_t status = remote()->transact(SET_MASTER_BALANCE, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        return reply.readInt32();
-    }
-
-    status_t getMasterBalance(float *balance) const override
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        status_t status = remote()->transact(GET_MASTER_BALANCE, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = (status_t)reply.readInt32();
-        if (status != NO_ERROR) {
-            return status;
-        }
-        *balance = reply.readFloat();
-        return NO_ERROR;
-    }
-
-    virtual status_t setStreamVolume(audio_stream_type_t stream, float value,
-            audio_io_handle_t output)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) stream);
-        data.writeFloat(value);
-        data.writeInt32((int32_t) output);
-        remote()->transact(SET_STREAM_VOLUME, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t setStreamMute(audio_stream_type_t stream, bool muted)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) stream);
-        data.writeInt32(muted);
-        remote()->transact(SET_STREAM_MUTE, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual float streamVolume(audio_stream_type_t stream, audio_io_handle_t output) const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) stream);
-        data.writeInt32((int32_t) output);
-        remote()->transact(STREAM_VOLUME, data, &reply);
-        return reply.readFloat();
-    }
-
-    virtual bool streamMute(audio_stream_type_t stream) const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) stream);
-        remote()->transact(STREAM_MUTE, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t setMode(audio_mode_t mode)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32(mode);
-        remote()->transact(SET_MODE, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t setMicMute(bool state)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32(state);
-        remote()->transact(SET_MIC_MUTE, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual bool getMicMute() const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        remote()->transact(GET_MIC_MUTE, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual void setRecordSilenced(audio_port_handle_t portId, bool silenced)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32(portId);
-        data.writeInt32(silenced ? 1 : 0);
-        remote()->transact(SET_RECORD_SILENCED, data, &reply);
-    }
-
-    virtual status_t setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) ioHandle);
-        data.writeString8(keyValuePairs);
-        remote()->transact(SET_PARAMETERS, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual String8 getParameters(audio_io_handle_t ioHandle, const String8& keys) const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) ioHandle);
-        data.writeString8(keys);
-        remote()->transact(GET_PARAMETERS, data, &reply);
-        return reply.readString8();
-    }
-
-    virtual void registerClient(const sp<IAudioFlingerClient>& client)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeStrongBinder(IInterface::asBinder(client));
-        remote()->transact(REGISTER_CLIENT, data, &reply);
-    }
-
-    virtual size_t getInputBufferSize(uint32_t sampleRate, audio_format_t format,
-            audio_channel_mask_t channelMask) const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32(sampleRate);
-        data.writeInt32(format);
-        data.writeInt32(channelMask);
-        remote()->transact(GET_INPUTBUFFERSIZE, data, &reply);
-        return reply.readInt64();
-    }
-
-    virtual status_t openOutput(audio_module_handle_t module,
-                                audio_io_handle_t *output,
-                                audio_config_t *config,
-                                const sp<DeviceDescriptorBase>& device,
-                                uint32_t *latencyMs,
-                                audio_output_flags_t flags)
-    {
-        if (output == nullptr || config == nullptr || device == nullptr || latencyMs == nullptr) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32(module);
-        data.write(config, sizeof(audio_config_t));
-        data.writeParcelable(*device);
-        data.writeInt32((int32_t) flags);
-        status_t status = remote()->transact(OPEN_OUTPUT, data, &reply);
-        if (status != NO_ERROR) {
-            *output = AUDIO_IO_HANDLE_NONE;
-            return status;
-        }
-        status = (status_t)reply.readInt32();
-        if (status != NO_ERROR) {
-            *output = AUDIO_IO_HANDLE_NONE;
-            return status;
-        }
-        *output = (audio_io_handle_t)reply.readInt32();
-        ALOGV("openOutput() returned output, %d", *output);
-        reply.read(config, sizeof(audio_config_t));
-        *latencyMs = reply.readInt32();
-        return NO_ERROR;
-    }
-
-    virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1,
-            audio_io_handle_t output2)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) output1);
-        data.writeInt32((int32_t) output2);
-        remote()->transact(OPEN_DUPLICATE_OUTPUT, data, &reply);
-        return (audio_io_handle_t) reply.readInt32();
-    }
-
-    virtual status_t closeOutput(audio_io_handle_t output)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) output);
-        remote()->transact(CLOSE_OUTPUT, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t suspendOutput(audio_io_handle_t output)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) output);
-        remote()->transact(SUSPEND_OUTPUT, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t restoreOutput(audio_io_handle_t output)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) output);
-        remote()->transact(RESTORE_OUTPUT, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t openInput(audio_module_handle_t module,
-                               audio_io_handle_t *input,
-                               audio_config_t *config,
-                               audio_devices_t *device,
-                               const String8& address,
-                               audio_source_t source,
-                               audio_input_flags_t flags)
-    {
-        if (input == NULL || config == NULL || device == NULL) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32(module);
-        data.writeInt32(*input);
-        data.write(config, sizeof(audio_config_t));
-        data.writeInt32(*device);
-        data.writeString8(address);
-        data.writeInt32(source);
-        data.writeInt32(flags);
-        status_t status = remote()->transact(OPEN_INPUT, data, &reply);
-        if (status != NO_ERROR) {
-            *input = AUDIO_IO_HANDLE_NONE;
-            return status;
-        }
-        status = (status_t)reply.readInt32();
-        if (status != NO_ERROR) {
-            *input = AUDIO_IO_HANDLE_NONE;
-            return status;
-        }
-        *input = (audio_io_handle_t)reply.readInt32();
-        reply.read(config, sizeof(audio_config_t));
-        *device = (audio_devices_t)reply.readInt32();
-        return NO_ERROR;
-    }
-
-    virtual status_t closeInput(int input)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32(input);
-        remote()->transact(CLOSE_INPUT, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t invalidateStream(audio_stream_type_t stream)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) stream);
-        remote()->transact(INVALIDATE_STREAM, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t setVoiceVolume(float volume)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeFloat(volume);
-        remote()->transact(SET_VOICE_VOLUME, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames,
-            audio_io_handle_t output) const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) output);
-        remote()->transact(GET_RENDER_POSITION, data, &reply);
-        status_t status = reply.readInt32();
-        if (status == NO_ERROR) {
-            uint32_t tmp = reply.readInt32();
-            if (halFrames != NULL) {
-                *halFrames = tmp;
-            }
-            tmp = reply.readInt32();
-            if (dspFrames != NULL) {
-                *dspFrames = tmp;
-            }
-        }
-        return status;
-    }
-
-    virtual uint32_t getInputFramesLost(audio_io_handle_t ioHandle) const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) ioHandle);
-        status_t status = remote()->transact(GET_INPUT_FRAMES_LOST, data, &reply);
-        if (status != NO_ERROR) {
-            return 0;
-        }
-        return (uint32_t) reply.readInt32();
-    }
-
-    virtual audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t use)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) use);
-        status_t status = remote()->transact(NEW_AUDIO_UNIQUE_ID, data, &reply);
-        audio_unique_id_t id = AUDIO_UNIQUE_ID_ALLOCATE;
-        if (status == NO_ERROR) {
-            id = reply.readInt32();
-        }
-        return id;
-    }
-
-    void acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) override
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32(audioSession);
-        data.writeInt32((int32_t)pid);
-        data.writeInt32((int32_t)uid);
-        remote()->transact(ACQUIRE_AUDIO_SESSION_ID, data, &reply);
-    }
-
-    virtual void releaseAudioSessionId(audio_session_t audioSession, int pid)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32(audioSession);
-        data.writeInt32(pid);
-        remote()->transact(RELEASE_AUDIO_SESSION_ID, data, &reply);
-    }
-
-    virtual status_t queryNumberEffects(uint32_t *numEffects) const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        status_t status = remote()->transact(QUERY_NUM_EFFECTS, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = reply.readInt32();
-        if (status != NO_ERROR) {
-            return status;
-        }
-        if (numEffects != NULL) {
-            *numEffects = (uint32_t)reply.readInt32();
-        }
-        return NO_ERROR;
-    }
-
-    virtual status_t queryEffect(uint32_t index, effect_descriptor_t *pDescriptor) const
-    {
-        if (pDescriptor == NULL) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32(index);
-        status_t status = remote()->transact(QUERY_EFFECT, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = reply.readInt32();
-        if (status != NO_ERROR) {
-            return status;
-        }
-        reply.read(pDescriptor, sizeof(effect_descriptor_t));
-        return NO_ERROR;
-    }
-
-    virtual status_t getEffectDescriptor(const effect_uuid_t *pUuid,
-                                         const effect_uuid_t *pType,
-                                         uint32_t preferredTypeFlag,
-                                         effect_descriptor_t *pDescriptor) const
-    {
-        if (pUuid == NULL || pType == NULL || pDescriptor == NULL) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.write(pUuid, sizeof(effect_uuid_t));
-        data.write(pType, sizeof(effect_uuid_t));
-        data.writeUint32(preferredTypeFlag);
-        status_t status = remote()->transact(GET_EFFECT_DESCRIPTOR, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = reply.readInt32();
-        if (status != NO_ERROR) {
-            return status;
-        }
-        reply.read(pDescriptor, sizeof(effect_descriptor_t));
-        return NO_ERROR;
-    }
-
-    virtual sp<IEffect> createEffect(
-                                    effect_descriptor_t *pDesc,
-                                    const sp<IEffectClient>& client,
-                                    int32_t priority,
-                                    audio_io_handle_t output,
-                                    audio_session_t sessionId,
-                                    const AudioDeviceTypeAddr& device,
-                                    const String16& opPackageName,
-                                    pid_t pid,
-                                    bool probe,
-                                    status_t *status,
-                                    int *id,
-                                    int *enabled)
-    {
-        Parcel data, reply;
-        sp<IEffect> effect;
-        if (pDesc == NULL) {
-            if (status != NULL) {
-                *status = BAD_VALUE;
-            }
-            return nullptr;
-        }
-
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.write(pDesc, sizeof(effect_descriptor_t));
-        data.writeStrongBinder(IInterface::asBinder(client));
-        data.writeInt32(priority);
-        data.writeInt32((int32_t) output);
-        data.writeInt32(sessionId);
-        if (data.writeParcelable(device) != NO_ERROR) {
-            if (status != NULL) {
-                *status = NO_INIT;
-            }
-            return nullptr;
-        }
-        data.writeString16(opPackageName);
-        data.writeInt32((int32_t) pid);
-        data.writeInt32(probe ? 1 : 0);
-
-        status_t lStatus = remote()->transact(CREATE_EFFECT, data, &reply);
-        if (lStatus != NO_ERROR) {
-            ALOGE("createEffect error: %s", strerror(-lStatus));
-        } else {
-            lStatus = reply.readInt32();
-            int tmp = reply.readInt32();
-            if (id != NULL) {
-                *id = tmp;
-            }
-            tmp = reply.readInt32();
-            if (enabled != NULL) {
-                *enabled = tmp;
-            }
-            effect = interface_cast<IEffect>(reply.readStrongBinder());
-            reply.read(pDesc, sizeof(effect_descriptor_t));
-        }
-        if (status != NULL) {
-            *status = lStatus;
-        }
-
-        return effect;
-    }
-
-    virtual status_t moveEffects(audio_session_t session, audio_io_handle_t srcOutput,
-            audio_io_handle_t dstOutput)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32(session);
-        data.writeInt32((int32_t) srcOutput);
-        data.writeInt32((int32_t) dstOutput);
-        remote()->transact(MOVE_EFFECTS, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual void setEffectSuspended(int effectId,
-                                    audio_session_t sessionId,
-                                    bool suspended)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32(effectId);
-        data.writeInt32(sessionId);
-        data.writeInt32(suspended ? 1 : 0);
-        remote()->transact(SET_EFFECT_SUSPENDED, data, &reply);
-    }
-
-    virtual audio_module_handle_t loadHwModule(const char *name)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeCString(name);
-        remote()->transact(LOAD_HW_MODULE, data, &reply);
-        return (audio_module_handle_t) reply.readInt32();
-    }
-
-    virtual uint32_t getPrimaryOutputSamplingRate()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        remote()->transact(GET_PRIMARY_OUTPUT_SAMPLING_RATE, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual size_t getPrimaryOutputFrameCount()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        remote()->transact(GET_PRIMARY_OUTPUT_FRAME_COUNT, data, &reply);
-        return reply.readInt64();
-    }
-
-    virtual status_t setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) override
-    {
-        Parcel data, reply;
-
-        static_assert(NO_ERROR == 0, "NO_ERROR must be 0");
-        return data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor())
-                ?: data.writeInt32((int) isLowRamDevice)
-                ?: data.writeInt64(totalMemory)
-                ?: remote()->transact(SET_LOW_RAM_DEVICE, data, &reply)
-                ?: reply.readInt32();
-    }
-
-    virtual status_t listAudioPorts(unsigned int *num_ports,
-                                    struct audio_port *ports)
-    {
-        if (num_ports == NULL || *num_ports == 0 || ports == NULL) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32(*num_ports);
-        status_t status = remote()->transact(LIST_AUDIO_PORTS, data, &reply);
-        if (status != NO_ERROR ||
-                (status = (status_t)reply.readInt32()) != NO_ERROR) {
-            return status;
-        }
-        *num_ports = (unsigned int)reply.readInt32();
-        reply.read(ports, *num_ports * sizeof(struct audio_port));
-        return status;
-    }
-    virtual status_t getAudioPort(struct audio_port *port)
-    {
-        if (port == NULL) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.write(port, sizeof(struct audio_port));
-        status_t status = remote()->transact(GET_AUDIO_PORT, data, &reply);
-        if (status != NO_ERROR ||
-                (status = (status_t)reply.readInt32()) != NO_ERROR) {
-            return status;
-        }
-        reply.read(port, sizeof(struct audio_port));
-        return status;
-    }
-    virtual status_t createAudioPatch(const struct audio_patch *patch,
-                                       audio_patch_handle_t *handle)
-    {
-        if (patch == NULL || handle == NULL) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.write(patch, sizeof(struct audio_patch));
-        data.write(handle, sizeof(audio_patch_handle_t));
-        status_t status = remote()->transact(CREATE_AUDIO_PATCH, data, &reply);
-        if (status != NO_ERROR ||
-                (status = (status_t)reply.readInt32()) != NO_ERROR) {
-            return status;
-        }
-        reply.read(handle, sizeof(audio_patch_handle_t));
-        return status;
-    }
-    virtual status_t releaseAudioPatch(audio_patch_handle_t handle)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.write(&handle, sizeof(audio_patch_handle_t));
-        status_t status = remote()->transact(RELEASE_AUDIO_PATCH, data, &reply);
-        if (status != NO_ERROR) {
-            status = (status_t)reply.readInt32();
-        }
-        return status;
-    }
-    virtual status_t listAudioPatches(unsigned int *num_patches,
-                                      struct audio_patch *patches)
-    {
-        if (num_patches == NULL || *num_patches == 0 || patches == NULL) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32(*num_patches);
-        status_t status = remote()->transact(LIST_AUDIO_PATCHES, data, &reply);
-        if (status != NO_ERROR ||
-                (status = (status_t)reply.readInt32()) != NO_ERROR) {
-            return status;
-        }
-        *num_patches = (unsigned int)reply.readInt32();
-        reply.read(patches, *num_patches * sizeof(struct audio_patch));
-        return status;
-    }
-    virtual status_t setAudioPortConfig(const struct audio_port_config *config)
-    {
-        if (config == NULL) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.write(config, sizeof(struct audio_port_config));
-        status_t status = remote()->transact(SET_AUDIO_PORT_CONFIG, data, &reply);
-        if (status != NO_ERROR) {
-            status = (status_t)reply.readInt32();
-        }
-        return status;
-    }
-    virtual audio_hw_sync_t getAudioHwSyncForSession(audio_session_t sessionId)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32(sessionId);
-        status_t status = remote()->transact(GET_AUDIO_HW_SYNC_FOR_SESSION, data, &reply);
-        if (status != NO_ERROR) {
-            return AUDIO_HW_SYNC_INVALID;
-        }
-        return (audio_hw_sync_t)reply.readInt32();
-    }
-    virtual status_t systemReady()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        return remote()->transact(SYSTEM_READY, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-    virtual size_t frameCountHAL(audio_io_handle_t ioHandle) const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32((int32_t) ioHandle);
-        status_t status = remote()->transact(FRAME_COUNT_HAL, data, &reply);
-        if (status != NO_ERROR) {
-            return 0;
-        }
-        return reply.readInt64();
-    }
-    virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        status_t status = remote()->transact(GET_MICROPHONES, data, &reply);
-        if (status != NO_ERROR ||
-                (status = (status_t)reply.readInt32()) != NO_ERROR) {
-            return status;
-        }
-        status = reply.readParcelableVector(microphones);
-        return status;
-    }
-    virtual status_t setAudioHalPids(const std::vector<pid_t>& pids)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
-        data.writeInt32(pids.size());
-        for (auto pid : pids) {
-            data.writeInt32(pid);
-        }
-        status_t status = remote()->transact(SET_AUDIO_HAL_PIDS, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        return static_cast <status_t> (reply.readInt32());
-    }
-};
-
-IMPLEMENT_META_INTERFACE(AudioFlinger, "android.media.IAudioFlinger");
-
-// ----------------------------------------------------------------------
-
-status_t BnAudioFlinger::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    // make sure transactions reserved to AudioPolicyManager do not come from other processes
-    switch (code) {
-        case SET_STREAM_VOLUME:
-        case SET_STREAM_MUTE:
-        case OPEN_OUTPUT:
-        case OPEN_DUPLICATE_OUTPUT:
-        case CLOSE_OUTPUT:
-        case SUSPEND_OUTPUT:
-        case RESTORE_OUTPUT:
-        case OPEN_INPUT:
-        case CLOSE_INPUT:
-        case INVALIDATE_STREAM:
-        case SET_VOICE_VOLUME:
-        case MOVE_EFFECTS:
-        case SET_EFFECT_SUSPENDED:
-        case LOAD_HW_MODULE:
-        case LIST_AUDIO_PORTS:
-        case GET_AUDIO_PORT:
-        case CREATE_AUDIO_PATCH:
-        case RELEASE_AUDIO_PATCH:
-        case LIST_AUDIO_PATCHES:
-        case SET_AUDIO_PORT_CONFIG:
-        case SET_RECORD_SILENCED:
-            ALOGW("%s: transaction %d received from PID %d",
-                  __func__, code, IPCThreadState::self()->getCallingPid());
-            // return status only for non void methods
-            switch (code) {
-                case SET_RECORD_SILENCED:
-                case SET_EFFECT_SUSPENDED:
-                    break;
-                default:
-                    reply->writeInt32(static_cast<int32_t> (INVALID_OPERATION));
-                    break;
-            }
-            return OK;
-        default:
-            break;
-    }
-
-    // make sure the following transactions come from system components
-    switch (code) {
-        case SET_MASTER_VOLUME:
-        case SET_MASTER_MUTE:
-        case SET_MODE:
-        case SET_MIC_MUTE:
-        case SET_LOW_RAM_DEVICE:
-        case SYSTEM_READY:
-        case SET_AUDIO_HAL_PIDS: {
-            if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
-                ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
-                      __func__, code, IPCThreadState::self()->getCallingPid(),
-                      IPCThreadState::self()->getCallingUid());
-                // return status only for non void methods
-                switch (code) {
-                    case SYSTEM_READY:
-                        break;
-                    default:
-                        reply->writeInt32(static_cast<int32_t> (INVALID_OPERATION));
-                        break;
-                }
-                return OK;
-            }
-        } break;
-        default:
-            break;
-    }
-
-    // Whitelist of relevant events to trigger log merging.
-    // Log merging should activate during audio activity of any kind. This are considered the
-    // most relevant events.
-    // TODO should select more wisely the items from the list
-    switch (code) {
-        case CREATE_TRACK:
-        case CREATE_RECORD:
-        case SET_MASTER_VOLUME:
-        case SET_MASTER_MUTE:
-        case SET_MIC_MUTE:
-        case SET_PARAMETERS:
-        case CREATE_EFFECT:
-        case SYSTEM_READY: {
-            requestLogMerge();
-            break;
-        }
-        default:
-            break;
-    }
-
-    std::string tag("IAudioFlinger command " + std::to_string(code));
-    TimeCheck check(tag.c_str());
-
-    switch (code) {
-        case CREATE_TRACK: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-
-            CreateTrackInput input;
-            if (input.readFromParcel((Parcel*)&data) != NO_ERROR) {
-                reply->writeInt32(DEAD_OBJECT);
-                return NO_ERROR;
-            }
-
-            status_t status;
-            CreateTrackOutput output;
-
-            sp<IAudioTrack> track= createTrack(input,
-                                               output,
-                                               &status);
-
-            LOG_ALWAYS_FATAL_IF((track != 0) != (status == NO_ERROR));
-            reply->writeInt32(status);
-            if (status != NO_ERROR) {
-                return NO_ERROR;
-            }
-            reply->writeStrongBinder(IInterface::asBinder(track));
-            output.writeToParcel(reply);
-            return NO_ERROR;
-        } break;
-        case CREATE_RECORD: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-
-            CreateRecordInput input;
-            if (input.readFromParcel((Parcel*)&data) != NO_ERROR) {
-                reply->writeInt32(DEAD_OBJECT);
-                return NO_ERROR;
-            }
-
-            status_t status;
-            CreateRecordOutput output;
-
-            sp<media::IAudioRecord> record = createRecord(input,
-                                                          output,
-                                                          &status);
-
-            LOG_ALWAYS_FATAL_IF((record != 0) != (status == NO_ERROR));
-            reply->writeInt32(status);
-            if (status != NO_ERROR) {
-                return NO_ERROR;
-            }
-            reply->writeStrongBinder(IInterface::asBinder(record));
-            output.writeToParcel(reply);
-            return NO_ERROR;
-        } break;
-        case SAMPLE_RATE: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt32( sampleRate((audio_io_handle_t) data.readInt32()) );
-            return NO_ERROR;
-        } break;
-
-        // RESERVED for channelCount()
-
-        case FORMAT: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt32( format((audio_io_handle_t) data.readInt32()) );
-            return NO_ERROR;
-        } break;
-        case FRAME_COUNT: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt64( frameCount((audio_io_handle_t) data.readInt32()) );
-            return NO_ERROR;
-        } break;
-        case LATENCY: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt32( latency((audio_io_handle_t) data.readInt32()) );
-            return NO_ERROR;
-        } break;
-        case SET_MASTER_VOLUME: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt32( setMasterVolume(data.readFloat()) );
-            return NO_ERROR;
-        } break;
-        case SET_MASTER_MUTE: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt32( setMasterMute(data.readInt32()) );
-            return NO_ERROR;
-        } break;
-        case MASTER_VOLUME: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeFloat( masterVolume() );
-            return NO_ERROR;
-        } break;
-        case MASTER_MUTE: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt32( masterMute() );
-            return NO_ERROR;
-        } break;
-        case SET_MASTER_BALANCE: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt32( setMasterBalance(data.readFloat()) );
-            return NO_ERROR;
-        } break;
-        case GET_MASTER_BALANCE: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            float f;
-            const status_t status = getMasterBalance(&f);
-            reply->writeInt32((int32_t)status);
-            if (status == NO_ERROR) {
-                (void)reply->writeFloat(f);
-            }
-            return NO_ERROR;
-        } break;
-        case SET_STREAM_VOLUME: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            int stream = data.readInt32();
-            float volume = data.readFloat();
-            audio_io_handle_t output = (audio_io_handle_t) data.readInt32();
-            reply->writeInt32( setStreamVolume((audio_stream_type_t) stream, volume, output) );
-            return NO_ERROR;
-        } break;
-        case SET_STREAM_MUTE: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            int stream = data.readInt32();
-            reply->writeInt32( setStreamMute((audio_stream_type_t) stream, data.readInt32()) );
-            return NO_ERROR;
-        } break;
-        case STREAM_VOLUME: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            int stream = data.readInt32();
-            int output = data.readInt32();
-            reply->writeFloat( streamVolume((audio_stream_type_t) stream, output) );
-            return NO_ERROR;
-        } break;
-        case STREAM_MUTE: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            int stream = data.readInt32();
-            reply->writeInt32( streamMute((audio_stream_type_t) stream) );
-            return NO_ERROR;
-        } break;
-        case SET_MODE: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            audio_mode_t mode = (audio_mode_t) data.readInt32();
-            reply->writeInt32( setMode(mode) );
-            return NO_ERROR;
-        } break;
-        case SET_MIC_MUTE: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            int state = data.readInt32();
-            reply->writeInt32( setMicMute(state) );
-            return NO_ERROR;
-        } break;
-        case GET_MIC_MUTE: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt32( getMicMute() );
-            return NO_ERROR;
-        } break;
-        case SET_RECORD_SILENCED: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            audio_port_handle_t portId = data.readInt32();
-            bool silenced = data.readInt32() == 1;
-            setRecordSilenced(portId, silenced);
-            return NO_ERROR;
-        } break;
-        case SET_PARAMETERS: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            audio_io_handle_t ioHandle = (audio_io_handle_t) data.readInt32();
-            String8 keyValuePairs(data.readString8());
-            reply->writeInt32(setParameters(ioHandle, keyValuePairs));
-            return NO_ERROR;
-        } break;
-        case GET_PARAMETERS: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            audio_io_handle_t ioHandle = (audio_io_handle_t) data.readInt32();
-            String8 keys(data.readString8());
-            reply->writeString8(getParameters(ioHandle, keys));
-            return NO_ERROR;
-        } break;
-
-        case REGISTER_CLIENT: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            sp<IAudioFlingerClient> client = interface_cast<IAudioFlingerClient>(
-                    data.readStrongBinder());
-            registerClient(client);
-            return NO_ERROR;
-        } break;
-        case GET_INPUTBUFFERSIZE: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            uint32_t sampleRate = data.readInt32();
-            audio_format_t format = (audio_format_t) data.readInt32();
-            audio_channel_mask_t channelMask = data.readInt32();
-            reply->writeInt64( getInputBufferSize(sampleRate, format, channelMask) );
-            return NO_ERROR;
-        } break;
-        case OPEN_OUTPUT: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            audio_module_handle_t module = (audio_module_handle_t)data.readInt32();
-            audio_config_t config = {};
-            if (data.read(&config, sizeof(audio_config_t)) != NO_ERROR) {
-                ALOGE("b/23905951");
-            }
-            sp<DeviceDescriptorBase> device = new DeviceDescriptorBase(AUDIO_DEVICE_NONE);
-            status_t status = NO_ERROR;
-            if ((status = data.readParcelable(device.get())) != NO_ERROR) {
-                reply->writeInt32((int32_t)status);
-                return NO_ERROR;
-            }
-            audio_output_flags_t flags = (audio_output_flags_t) data.readInt32();
-            uint32_t latencyMs = 0;
-            audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
-            status = openOutput(module, &output, &config, device, &latencyMs, flags);
-            ALOGV("OPEN_OUTPUT output, %d", output);
-            reply->writeInt32((int32_t)status);
-            if (status == NO_ERROR) {
-                reply->writeInt32((int32_t)output);
-                reply->write(&config, sizeof(audio_config_t));
-                reply->writeInt32(latencyMs);
-            }
-            return NO_ERROR;
-        } break;
-        case OPEN_DUPLICATE_OUTPUT: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            audio_io_handle_t output1 = (audio_io_handle_t) data.readInt32();
-            audio_io_handle_t output2 = (audio_io_handle_t) data.readInt32();
-            reply->writeInt32((int32_t) openDuplicateOutput(output1, output2));
-            return NO_ERROR;
-        } break;
-        case CLOSE_OUTPUT: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt32(closeOutput((audio_io_handle_t) data.readInt32()));
-            return NO_ERROR;
-        } break;
-        case SUSPEND_OUTPUT: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt32(suspendOutput((audio_io_handle_t) data.readInt32()));
-            return NO_ERROR;
-        } break;
-        case RESTORE_OUTPUT: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt32(restoreOutput((audio_io_handle_t) data.readInt32()));
-            return NO_ERROR;
-        } break;
-        case OPEN_INPUT: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            audio_module_handle_t module = (audio_module_handle_t)data.readInt32();
-            audio_io_handle_t input = (audio_io_handle_t)data.readInt32();
-            audio_config_t config = {};
-            if (data.read(&config, sizeof(audio_config_t)) != NO_ERROR) {
-                ALOGE("b/23905951");
-            }
-            audio_devices_t device = (audio_devices_t)data.readInt32();
-            String8 address(data.readString8());
-            audio_source_t source = (audio_source_t)data.readInt32();
-            audio_input_flags_t flags = (audio_input_flags_t) data.readInt32();
-
-            status_t status = openInput(module, &input, &config,
-                                        &device, address, source, flags);
-            reply->writeInt32((int32_t) status);
-            if (status == NO_ERROR) {
-                reply->writeInt32((int32_t) input);
-                reply->write(&config, sizeof(audio_config_t));
-                reply->writeInt32(device);
-            }
-            return NO_ERROR;
-        } break;
-        case CLOSE_INPUT: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt32(closeInput((audio_io_handle_t) data.readInt32()));
-            return NO_ERROR;
-        } break;
-        case INVALIDATE_STREAM: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            audio_stream_type_t stream = (audio_stream_type_t) data.readInt32();
-            reply->writeInt32(invalidateStream(stream));
-            return NO_ERROR;
-        } break;
-        case SET_VOICE_VOLUME: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            float volume = data.readFloat();
-            reply->writeInt32( setVoiceVolume(volume) );
-            return NO_ERROR;
-        } break;
-        case GET_RENDER_POSITION: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            audio_io_handle_t output = (audio_io_handle_t) data.readInt32();
-            uint32_t halFrames = 0;
-            uint32_t dspFrames = 0;
-            status_t status = getRenderPosition(&halFrames, &dspFrames, output);
-            reply->writeInt32(status);
-            if (status == NO_ERROR) {
-                reply->writeInt32(halFrames);
-                reply->writeInt32(dspFrames);
-            }
-            return NO_ERROR;
-        }
-        case GET_INPUT_FRAMES_LOST: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            audio_io_handle_t ioHandle = (audio_io_handle_t) data.readInt32();
-            reply->writeInt32((int32_t) getInputFramesLost(ioHandle));
-            return NO_ERROR;
-        } break;
-        case NEW_AUDIO_UNIQUE_ID: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt32(newAudioUniqueId((audio_unique_id_use_t) data.readInt32()));
-            return NO_ERROR;
-        } break;
-        case ACQUIRE_AUDIO_SESSION_ID: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            audio_session_t audioSession = (audio_session_t) data.readInt32();
-            const pid_t pid = (pid_t)data.readInt32();
-            const uid_t uid = (uid_t)data.readInt32();
-            acquireAudioSessionId(audioSession, pid, uid);
-            return NO_ERROR;
-        } break;
-        case RELEASE_AUDIO_SESSION_ID: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            audio_session_t audioSession = (audio_session_t) data.readInt32();
-            int pid = data.readInt32();
-            releaseAudioSessionId(audioSession, pid);
-            return NO_ERROR;
-        } break;
-        case QUERY_NUM_EFFECTS: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            uint32_t numEffects = 0;
-            status_t status = queryNumberEffects(&numEffects);
-            reply->writeInt32(status);
-            if (status == NO_ERROR) {
-                reply->writeInt32((int32_t)numEffects);
-            }
-            return NO_ERROR;
-        }
-        case QUERY_EFFECT: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            effect_descriptor_t desc = {};
-            status_t status = queryEffect(data.readInt32(), &desc);
-            reply->writeInt32(status);
-            if (status == NO_ERROR) {
-                reply->write(&desc, sizeof(effect_descriptor_t));
-            }
-            return NO_ERROR;
-        }
-        case GET_EFFECT_DESCRIPTOR: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            effect_uuid_t uuid = {};
-            if (data.read(&uuid, sizeof(effect_uuid_t)) != NO_ERROR) {
-                android_errorWriteLog(0x534e4554, "139417189");
-            }
-            effect_uuid_t type = {};
-            if (data.read(&type, sizeof(effect_uuid_t)) != NO_ERROR) {
-                android_errorWriteLog(0x534e4554, "139417189");
-            }
-            uint32_t preferredTypeFlag = data.readUint32();
-            effect_descriptor_t desc = {};
-            status_t status = getEffectDescriptor(&uuid, &type, preferredTypeFlag, &desc);
-            reply->writeInt32(status);
-            if (status == NO_ERROR) {
-                reply->write(&desc, sizeof(effect_descriptor_t));
-            }
-            return NO_ERROR;
-        }
-        case CREATE_EFFECT: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            effect_descriptor_t desc = {};
-            if (data.read(&desc, sizeof(effect_descriptor_t)) != NO_ERROR) {
-                ALOGE("b/23905951");
-            }
-            sp<IEffectClient> client = interface_cast<IEffectClient>(data.readStrongBinder());
-            int32_t priority = data.readInt32();
-            audio_io_handle_t output = (audio_io_handle_t) data.readInt32();
-            audio_session_t sessionId = (audio_session_t) data.readInt32();
-            AudioDeviceTypeAddr device;
-            status_t status = NO_ERROR;
-            if ((status = data.readParcelable(&device)) != NO_ERROR) {
-                return status;
-            }
-            const String16 opPackageName = data.readString16();
-            pid_t pid = (pid_t)data.readInt32();
-            bool probe = data.readInt32() == 1;
-
-            int id = 0;
-            int enabled = 0;
-
-            sp<IEffect> effect = createEffect(&desc, client, priority, output, sessionId, device,
-                    opPackageName, pid, probe, &status, &id, &enabled);
-            reply->writeInt32(status);
-            reply->writeInt32(id);
-            reply->writeInt32(enabled);
-            reply->writeStrongBinder(IInterface::asBinder(effect));
-            reply->write(&desc, sizeof(effect_descriptor_t));
-            return NO_ERROR;
-        } break;
-        case MOVE_EFFECTS: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            audio_session_t session = (audio_session_t) data.readInt32();
-            audio_io_handle_t srcOutput = (audio_io_handle_t) data.readInt32();
-            audio_io_handle_t dstOutput = (audio_io_handle_t) data.readInt32();
-            reply->writeInt32(moveEffects(session, srcOutput, dstOutput));
-            return NO_ERROR;
-        } break;
-        case SET_EFFECT_SUSPENDED: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            int effectId = data.readInt32();
-            audio_session_t sessionId = (audio_session_t) data.readInt32();
-            bool suspended = data.readInt32() == 1;
-            setEffectSuspended(effectId, sessionId, suspended);
-            return NO_ERROR;
-        } break;
-        case LOAD_HW_MODULE: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt32(loadHwModule(data.readCString()));
-            return NO_ERROR;
-        } break;
-        case GET_PRIMARY_OUTPUT_SAMPLING_RATE: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt32(getPrimaryOutputSamplingRate());
-            return NO_ERROR;
-        } break;
-        case GET_PRIMARY_OUTPUT_FRAME_COUNT: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt64(getPrimaryOutputFrameCount());
-            return NO_ERROR;
-        } break;
-        case SET_LOW_RAM_DEVICE: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            int32_t isLowRamDevice;
-            int64_t totalMemory;
-            const status_t status =
-                    data.readInt32(&isLowRamDevice) ?:
-                    data.readInt64(&totalMemory) ?:
-                    setLowRamDevice(isLowRamDevice != 0, totalMemory);
-            (void)reply->writeInt32(status);
-            return NO_ERROR;
-        } break;
-        case LIST_AUDIO_PORTS: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            unsigned int numPortsReq = data.readInt32();
-            if (numPortsReq > MAX_ITEMS_PER_LIST) {
-                numPortsReq = MAX_ITEMS_PER_LIST;
-            }
-            unsigned int numPorts = numPortsReq;
-            struct audio_port *ports =
-                    (struct audio_port *)calloc(numPortsReq,
-                                                           sizeof(struct audio_port));
-            if (ports == NULL) {
-                reply->writeInt32(NO_MEMORY);
-                reply->writeInt32(0);
-                return NO_ERROR;
-            }
-            status_t status = listAudioPorts(&numPorts, ports);
-            reply->writeInt32(status);
-            reply->writeInt32(numPorts);
-            if (status == NO_ERROR) {
-                if (numPortsReq > numPorts) {
-                    numPortsReq = numPorts;
-                }
-                reply->write(ports, numPortsReq * sizeof(struct audio_port));
-            }
-            free(ports);
-            return NO_ERROR;
-        } break;
-        case GET_AUDIO_PORT: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            struct audio_port port = {};
-            if (data.read(&port, sizeof(struct audio_port)) != NO_ERROR) {
-                ALOGE("b/23905951");
-            }
-            status_t status = getAudioPort(&port);
-            reply->writeInt32(status);
-            if (status == NO_ERROR) {
-                reply->write(&port, sizeof(struct audio_port));
-            }
-            return NO_ERROR;
-        } break;
-        case CREATE_AUDIO_PATCH: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            struct audio_patch patch;
-            data.read(&patch, sizeof(struct audio_patch));
-            audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
-            if (data.read(&handle, sizeof(audio_patch_handle_t)) != NO_ERROR) {
-                ALOGE("b/23905951");
-            }
-            status_t status = createAudioPatch(&patch, &handle);
-            reply->writeInt32(status);
-            if (status == NO_ERROR) {
-                reply->write(&handle, sizeof(audio_patch_handle_t));
-            }
-            return NO_ERROR;
-        } break;
-        case RELEASE_AUDIO_PATCH: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            audio_patch_handle_t handle;
-            data.read(&handle, sizeof(audio_patch_handle_t));
-            status_t status = releaseAudioPatch(handle);
-            reply->writeInt32(status);
-            return NO_ERROR;
-        } break;
-        case LIST_AUDIO_PATCHES: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            unsigned int numPatchesReq = data.readInt32();
-            if (numPatchesReq > MAX_ITEMS_PER_LIST) {
-                numPatchesReq = MAX_ITEMS_PER_LIST;
-            }
-            unsigned int numPatches = numPatchesReq;
-            struct audio_patch *patches =
-                    (struct audio_patch *)calloc(numPatchesReq,
-                                                 sizeof(struct audio_patch));
-            if (patches == NULL) {
-                reply->writeInt32(NO_MEMORY);
-                reply->writeInt32(0);
-                return NO_ERROR;
-            }
-            status_t status = listAudioPatches(&numPatches, patches);
-            reply->writeInt32(status);
-            reply->writeInt32(numPatches);
-            if (status == NO_ERROR) {
-                if (numPatchesReq > numPatches) {
-                    numPatchesReq = numPatches;
-                }
-                reply->write(patches, numPatchesReq * sizeof(struct audio_patch));
-            }
-            free(patches);
-            return NO_ERROR;
-        } break;
-        case SET_AUDIO_PORT_CONFIG: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            struct audio_port_config config;
-            data.read(&config, sizeof(struct audio_port_config));
-            status_t status = setAudioPortConfig(&config);
-            reply->writeInt32(status);
-            return NO_ERROR;
-        } break;
-        case GET_AUDIO_HW_SYNC_FOR_SESSION: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt32(getAudioHwSyncForSession((audio_session_t) data.readInt32()));
-            return NO_ERROR;
-        } break;
-        case SYSTEM_READY: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            systemReady();
-            return NO_ERROR;
-        } break;
-        case FRAME_COUNT_HAL: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            reply->writeInt64( frameCountHAL((audio_io_handle_t) data.readInt32()) );
-            return NO_ERROR;
-        } break;
-        case GET_MICROPHONES: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            std::vector<media::MicrophoneInfo> microphones;
-            status_t status = getMicrophones(&microphones);
-            reply->writeInt32(status);
-            if (status == NO_ERROR) {
-                reply->writeParcelableVector(microphones);
-            }
-            return NO_ERROR;
-        }
-        case SET_AUDIO_HAL_PIDS: {
-            CHECK_INTERFACE(IAudioFlinger, data, reply);
-            std::vector<pid_t> pids;
-            int32_t size;
-            status_t status = data.readInt32(&size);
-            if (status != NO_ERROR) {
-                return status;
-            }
-            if (size < 0) {
-                return BAD_VALUE;
-            }
-            if (size > MAX_ITEMS_PER_LIST) {
-                size = MAX_ITEMS_PER_LIST;
-            }
-            for (int32_t i = 0; i < size; i++) {
-                int32_t pid;
-                status =  data.readInt32(&pid);
-                if (status != NO_ERROR) {
-                    return status;
-                }
-                pids.push_back(pid);
-            }
-            reply->writeInt32(setAudioHalPids(pids));
-            return NO_ERROR;
-        }
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
+ConversionResult<media::CreateTrackRequest> IAudioFlinger::CreateTrackInput::toAidl() const {
+    media::CreateTrackRequest aidl;
+    aidl.attr = VALUE_OR_RETURN(legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+    aidl.config = VALUE_OR_RETURN(legacy2aidl_audio_config_t_AudioConfig(config));
+    aidl.clientInfo = VALUE_OR_RETURN(legacy2aidl_AudioClient_AudioClient(clientInfo));
+    aidl.sharedBuffer = VALUE_OR_RETURN(legacy2aidl_NullableIMemory_SharedFileRegion(sharedBuffer));
+    aidl.notificationsPerBuffer = VALUE_OR_RETURN(convertIntegral<int32_t>(notificationsPerBuffer));
+    aidl.speed = speed;
+    aidl.audioTrackCallback = audioTrackCallback;
+    aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+    aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(frameCount));
+    aidl.notificationFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(notificationFrameCount));
+    aidl.selectedDeviceId = VALUE_OR_RETURN(
+            legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+    aidl.sessionId = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(sessionId));
+    return aidl;
 }
 
-// ----------------------------------------------------------------------------
+ConversionResult<IAudioFlinger::CreateTrackInput>
+IAudioFlinger::CreateTrackInput::fromAidl(const media::CreateTrackRequest& aidl) {
+    IAudioFlinger::CreateTrackInput legacy;
+    legacy.attr = VALUE_OR_RETURN(aidl2legacy_AudioAttributesInternal_audio_attributes_t(aidl.attr));
+    legacy.config = VALUE_OR_RETURN(aidl2legacy_AudioConfig_audio_config_t(aidl.config));
+    legacy.clientInfo = VALUE_OR_RETURN(aidl2legacy_AudioClient_AudioClient(aidl.clientInfo));
+    legacy.sharedBuffer = VALUE_OR_RETURN(aidl2legacy_NullableSharedFileRegion_IMemory(aidl.sharedBuffer));
+    legacy.notificationsPerBuffer = VALUE_OR_RETURN(
+            convertIntegral<uint32_t>(aidl.notificationsPerBuffer));
+    legacy.speed = aidl.speed;
+    legacy.audioTrackCallback = aidl.audioTrackCallback;
+    legacy.flags = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_output_flags_t_mask(aidl.flags));
+    legacy.frameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
+    legacy.notificationFrameCount = VALUE_OR_RETURN(
+            convertIntegral<size_t>(aidl.notificationFrameCount));
+    legacy.selectedDeviceId = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_port_handle_t(aidl.selectedDeviceId));
+    legacy.sessionId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.sessionId));
+    return legacy;
+}
+
+ConversionResult<media::CreateTrackResponse>
+IAudioFlinger::CreateTrackOutput::toAidl() const {
+    media::CreateTrackResponse aidl;
+    aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+    aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(frameCount));
+    aidl.notificationFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(notificationFrameCount));
+    aidl.selectedDeviceId = VALUE_OR_RETURN(
+            legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+    aidl.sessionId = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(sessionId));
+    aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(sampleRate));
+    aidl.streamType =  VALUE_OR_RETURN(
+            legacy2aidl_audio_stream_type_t_AudioStreamType(streamType));
+    aidl.afFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(afFrameCount));
+    aidl.afSampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(afSampleRate));
+    aidl.afLatencyMs = VALUE_OR_RETURN(convertIntegral<int32_t>(afLatencyMs));
+    aidl.outputId = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(outputId));
+    aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(portId));
+    aidl.audioTrack = audioTrack;
+    return aidl;
+}
+
+ConversionResult<IAudioFlinger::CreateTrackOutput>
+IAudioFlinger::CreateTrackOutput::fromAidl(
+        const media::CreateTrackResponse& aidl) {
+    IAudioFlinger::CreateTrackOutput legacy;
+    legacy.flags = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_output_flags_t_mask(aidl.flags));
+    legacy.frameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
+    legacy.notificationFrameCount = VALUE_OR_RETURN(
+            convertIntegral<size_t>(aidl.notificationFrameCount));
+    legacy.selectedDeviceId = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_port_handle_t(aidl.selectedDeviceId));
+    legacy.sessionId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.sessionId));
+    legacy.sampleRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
+    legacy.streamType = VALUE_OR_RETURN(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(aidl.streamType));
+    legacy.afFrameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.afFrameCount));
+    legacy.afSampleRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.afSampleRate));
+    legacy.afLatencyMs = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.afLatencyMs));
+    legacy.outputId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.outputId));
+    legacy.portId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
+    legacy.audioTrack = aidl.audioTrack;
+    return legacy;
+}
+
+ConversionResult<media::CreateRecordRequest>
+IAudioFlinger::CreateRecordInput::toAidl() const {
+    media::CreateRecordRequest aidl;
+    aidl.attr = VALUE_OR_RETURN(legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+    aidl.config = VALUE_OR_RETURN(legacy2aidl_audio_config_base_t_AudioConfigBase(config));
+    aidl.clientInfo = VALUE_OR_RETURN(legacy2aidl_AudioClient_AudioClient(clientInfo));
+    aidl.riid = VALUE_OR_RETURN(legacy2aidl_audio_unique_id_t_int32_t(riid));
+    aidl.maxSharedAudioHistoryMs = VALUE_OR_RETURN(
+            convertIntegral<int32_t>(maxSharedAudioHistoryMs));
+    aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_input_flags_t_int32_t_mask(flags));
+    aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(frameCount));
+    aidl.notificationFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(notificationFrameCount));
+    aidl.selectedDeviceId = VALUE_OR_RETURN(
+            legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+    aidl.sessionId = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(sessionId));
+    return aidl;
+}
+
+ConversionResult<IAudioFlinger::CreateRecordInput>
+IAudioFlinger::CreateRecordInput::fromAidl(
+        const media::CreateRecordRequest& aidl) {
+    IAudioFlinger::CreateRecordInput legacy;
+    legacy.attr = VALUE_OR_RETURN(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(aidl.attr));
+    legacy.config = VALUE_OR_RETURN(aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.config));
+    legacy.clientInfo = VALUE_OR_RETURN(aidl2legacy_AudioClient_AudioClient(aidl.clientInfo));
+    legacy.riid = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_unique_id_t(aidl.riid));
+    legacy.maxSharedAudioHistoryMs = VALUE_OR_RETURN(
+            convertIntegral<int32_t>(aidl.maxSharedAudioHistoryMs));
+    legacy.flags = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_input_flags_t_mask(aidl.flags));
+    legacy.frameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
+    legacy.notificationFrameCount = VALUE_OR_RETURN(
+            convertIntegral<size_t>(aidl.notificationFrameCount));
+    legacy.selectedDeviceId = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_port_handle_t(aidl.selectedDeviceId));
+    legacy.sessionId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.sessionId));
+    return legacy;
+}
+
+ConversionResult<media::CreateRecordResponse>
+IAudioFlinger::CreateRecordOutput::toAidl() const {
+    media::CreateRecordResponse aidl;
+    aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_input_flags_t_int32_t_mask(flags));
+    aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(frameCount));
+    aidl.notificationFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(notificationFrameCount));
+    aidl.selectedDeviceId = VALUE_OR_RETURN(
+            legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+    aidl.sessionId = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(sessionId));
+    aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(sampleRate));
+    aidl.inputId = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(inputId));
+    aidl.cblk = VALUE_OR_RETURN(legacy2aidl_NullableIMemory_SharedFileRegion(cblk));
+    aidl.buffers = VALUE_OR_RETURN(legacy2aidl_NullableIMemory_SharedFileRegion(buffers));
+    aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(portId));
+    aidl.audioRecord = audioRecord;
+    return aidl;
+}
+
+ConversionResult<IAudioFlinger::CreateRecordOutput>
+IAudioFlinger::CreateRecordOutput::fromAidl(
+        const media::CreateRecordResponse& aidl) {
+    IAudioFlinger::CreateRecordOutput legacy;
+    legacy.flags = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_input_flags_t_mask(aidl.flags));
+    legacy.frameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
+    legacy.notificationFrameCount = VALUE_OR_RETURN(
+            convertIntegral<size_t>(aidl.notificationFrameCount));
+    legacy.selectedDeviceId = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_port_handle_t(aidl.selectedDeviceId));
+    legacy.sessionId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.sessionId));
+    legacy.sampleRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
+    legacy.inputId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.inputId));
+    legacy.cblk = VALUE_OR_RETURN(aidl2legacy_NullableSharedFileRegion_IMemory(aidl.cblk));
+    legacy.buffers = VALUE_OR_RETURN(aidl2legacy_NullableSharedFileRegion_IMemory(aidl.buffers));
+    legacy.portId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
+    legacy.audioRecord = aidl.audioRecord;
+    return legacy;
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// AudioFlingerClientAdapter
+
+AudioFlingerClientAdapter::AudioFlingerClientAdapter(
+        const sp<media::IAudioFlingerService> delegate) : mDelegate(delegate) {}
+
+status_t AudioFlingerClientAdapter::createTrack(const media::CreateTrackRequest& input,
+                                                media::CreateTrackResponse& output) {
+    return statusTFromBinderStatus(mDelegate->createTrack(input, &output));
+}
+
+status_t AudioFlingerClientAdapter::createRecord(const media::CreateRecordRequest& input,
+                                                 media::CreateRecordResponse& output) {
+    return statusTFromBinderStatus(mDelegate->createRecord(input, &output));
+}
+
+uint32_t AudioFlingerClientAdapter::sampleRate(audio_io_handle_t ioHandle) const {
+    auto result = [&]() -> ConversionResult<uint32_t> {
+        int32_t ioHandleAidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(ioHandle));
+        int32_t aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(mDelegate->sampleRate(ioHandleAidl, &aidlRet)));
+        return convertIntegral<uint32_t>(aidlRet);
+    }();
+    // Failure is ignored.
+    return result.value_or(0);
+}
+
+audio_format_t AudioFlingerClientAdapter::format(audio_io_handle_t output) const {
+    auto result = [&]() -> ConversionResult<audio_format_t> {
+        int32_t outputAidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(output));
+        media::audio::common::AudioFormat aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(mDelegate->format(outputAidl, &aidlRet)));
+        return aidl2legacy_AudioFormat_audio_format_t(aidlRet);
+    }();
+    return result.value_or(AUDIO_FORMAT_INVALID);
+}
+
+size_t AudioFlingerClientAdapter::frameCount(audio_io_handle_t ioHandle) const {
+    auto result = [&]() -> ConversionResult<size_t> {
+        int32_t ioHandleAidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(ioHandle));
+        int64_t aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(mDelegate->frameCount(ioHandleAidl, &aidlRet)));
+        return convertIntegral<size_t>(aidlRet);
+    }();
+    // Failure is ignored.
+    return result.value_or(0);
+}
+
+uint32_t AudioFlingerClientAdapter::latency(audio_io_handle_t output) const {
+    auto result = [&]() -> ConversionResult<uint32_t> {
+        int32_t outputAidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(output));
+        int32_t aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(mDelegate->latency(outputAidl, &aidlRet)));
+        return convertIntegral<uint32_t>(aidlRet);
+    }();
+    // Failure is ignored.
+    return result.value_or(0);
+}
+
+status_t AudioFlingerClientAdapter::setMasterVolume(float value) {
+    return statusTFromBinderStatus(mDelegate->setMasterVolume(value));
+}
+
+status_t AudioFlingerClientAdapter::setMasterMute(bool muted) {
+    return statusTFromBinderStatus(mDelegate->setMasterMute(muted));
+}
+
+float AudioFlingerClientAdapter::masterVolume() const {
+    auto result = [&]() -> ConversionResult<float> {
+        float aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(mDelegate->masterVolume(&aidlRet)));
+        return aidlRet;
+    }();
+    // Failure is ignored.
+    return result.value_or(0.f);
+}
+
+bool AudioFlingerClientAdapter::masterMute() const {
+    auto result = [&]() -> ConversionResult<bool> {
+        bool aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(mDelegate->masterMute(&aidlRet)));
+        return aidlRet;
+    }();
+    // Failure is ignored.
+    return result.value_or(false);
+}
+
+status_t AudioFlingerClientAdapter::setMasterBalance(float balance) {
+    return statusTFromBinderStatus(mDelegate->setMasterBalance(balance));
+}
+
+status_t AudioFlingerClientAdapter::getMasterBalance(float* balance) const{
+    return statusTFromBinderStatus(mDelegate->getMasterBalance(balance));
+}
+
+status_t AudioFlingerClientAdapter::setStreamVolume(audio_stream_type_t stream, float value,
+                                                    audio_io_handle_t output) {
+    media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+    int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+    return statusTFromBinderStatus(mDelegate->setStreamVolume(streamAidl, value, outputAidl));
+}
+
+status_t AudioFlingerClientAdapter::setStreamMute(audio_stream_type_t stream, bool muted) {
+    media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+    return statusTFromBinderStatus(mDelegate->setStreamMute(streamAidl, muted));
+}
+
+float AudioFlingerClientAdapter::streamVolume(audio_stream_type_t stream,
+                                              audio_io_handle_t output) const {
+    auto result = [&]() -> ConversionResult<float> {
+        media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+                legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+        int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+        float aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                mDelegate->streamVolume(streamAidl, outputAidl, &aidlRet)));
+        return aidlRet;
+    }();
+    // Failure is ignored.
+    return result.value_or(0.f);
+}
+
+bool AudioFlingerClientAdapter::streamMute(audio_stream_type_t stream) const {
+    auto result = [&]() -> ConversionResult<bool> {
+        media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+                legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+        bool aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                mDelegate->streamMute(streamAidl, &aidlRet)));
+        return aidlRet;
+    }();
+    // Failure is ignored.
+    return result.value_or(false);
+}
+
+status_t AudioFlingerClientAdapter::setMode(audio_mode_t mode) {
+    media::AudioMode modeAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_mode_t_AudioMode(mode));
+    return statusTFromBinderStatus(mDelegate->setMode(modeAidl));
+}
+
+status_t AudioFlingerClientAdapter::setMicMute(bool state) {
+    return statusTFromBinderStatus(mDelegate->setMicMute(state));
+}
+
+bool AudioFlingerClientAdapter::getMicMute() const {
+    auto result = [&]() -> ConversionResult<bool> {
+        bool aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                mDelegate->getMicMute(&aidlRet)));
+        return aidlRet;
+    }();
+    // Failure is ignored.
+    return result.value_or(false);
+}
+
+void AudioFlingerClientAdapter::setRecordSilenced(audio_port_handle_t portId, bool silenced) {
+    auto result = [&]() -> status_t {
+        int32_t portIdAidl = VALUE_OR_RETURN_STATUS(
+                legacy2aidl_audio_port_handle_t_int32_t(portId));
+        return statusTFromBinderStatus(mDelegate->setRecordSilenced(portIdAidl, silenced));
+    }();
+    // Failure is ignored.
+    (void) result;
+}
+
+status_t AudioFlingerClientAdapter::setParameters(audio_io_handle_t ioHandle,
+                                                  const String8& keyValuePairs) {
+    int32_t ioHandleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(ioHandle));
+    std::string keyValuePairsAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_String8_string(keyValuePairs));
+    return statusTFromBinderStatus(mDelegate->setParameters(ioHandleAidl, keyValuePairsAidl));
+}
+
+String8 AudioFlingerClientAdapter::getParameters(audio_io_handle_t ioHandle, const String8& keys)
+const {
+    auto result = [&]() -> ConversionResult<String8> {
+        int32_t ioHandleAidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(ioHandle));
+        std::string keysAidl = VALUE_OR_RETURN(legacy2aidl_String8_string(keys));
+        std::string aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                mDelegate->getParameters(ioHandleAidl, keysAidl, &aidlRet)));
+        return aidl2legacy_string_view_String8(aidlRet);
+    }();
+    // Failure is ignored.
+    return result.value_or(String8());
+}
+
+void AudioFlingerClientAdapter::registerClient(const sp<media::IAudioFlingerClient>& client) {
+    mDelegate->registerClient(client);
+    // Failure is ignored.
+}
+
+size_t AudioFlingerClientAdapter::getInputBufferSize(uint32_t sampleRate, audio_format_t format,
+                                                     audio_channel_mask_t channelMask) const {
+    auto result = [&]() -> ConversionResult<size_t> {
+        int32_t sampleRateAidl = VALUE_OR_RETURN(convertIntegral<int32_t>(sampleRate));
+        media::audio::common::AudioFormat formatAidl = VALUE_OR_RETURN(
+                legacy2aidl_audio_format_t_AudioFormat(format));
+        int32_t channelMaskAidl = VALUE_OR_RETURN(
+                legacy2aidl_audio_channel_mask_t_int32_t(channelMask));
+        int64_t aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                mDelegate->getInputBufferSize(sampleRateAidl, formatAidl, channelMaskAidl,
+                                              &aidlRet)));
+        return convertIntegral<size_t>(aidlRet);
+    }();
+    // Failure is ignored.
+    return result.value_or(0);
+}
+
+status_t AudioFlingerClientAdapter::openOutput(const media::OpenOutputRequest& request,
+                                               media::OpenOutputResponse* response) {
+    return statusTFromBinderStatus(mDelegate->openOutput(request, response));
+}
+
+audio_io_handle_t AudioFlingerClientAdapter::openDuplicateOutput(audio_io_handle_t output1,
+                                                                 audio_io_handle_t output2) {
+    auto result = [&]() -> ConversionResult<audio_io_handle_t> {
+        int32_t output1Aidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(output1));
+        int32_t output2Aidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(output2));
+        int32_t aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                mDelegate->openDuplicateOutput(output1Aidl, output2Aidl, &aidlRet)));
+        return aidl2legacy_int32_t_audio_io_handle_t(aidlRet);
+    }();
+    // Failure is ignored.
+    return result.value_or(0);
+}
+
+status_t AudioFlingerClientAdapter::closeOutput(audio_io_handle_t output) {
+    int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+    return statusTFromBinderStatus(mDelegate->closeOutput(outputAidl));
+}
+
+status_t AudioFlingerClientAdapter::suspendOutput(audio_io_handle_t output) {
+    int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+    return statusTFromBinderStatus(mDelegate->suspendOutput(outputAidl));
+}
+
+status_t AudioFlingerClientAdapter::restoreOutput(audio_io_handle_t output) {
+    int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+    return statusTFromBinderStatus(mDelegate->restoreOutput(outputAidl));
+}
+
+status_t AudioFlingerClientAdapter::openInput(const media::OpenInputRequest& request,
+                                              media::OpenInputResponse* response) {
+    return statusTFromBinderStatus(mDelegate->openInput(request, response));
+}
+
+status_t AudioFlingerClientAdapter::closeInput(audio_io_handle_t input) {
+    int32_t inputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(input));
+    return statusTFromBinderStatus(mDelegate->closeInput(inputAidl));
+}
+
+status_t AudioFlingerClientAdapter::invalidateStream(audio_stream_type_t stream) {
+    media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+    return statusTFromBinderStatus(mDelegate->invalidateStream(streamAidl));
+}
+
+status_t AudioFlingerClientAdapter::setVoiceVolume(float volume) {
+    return statusTFromBinderStatus(mDelegate->setVoiceVolume(volume));
+}
+
+status_t AudioFlingerClientAdapter::getRenderPosition(uint32_t* halFrames, uint32_t* dspFrames,
+                                                      audio_io_handle_t output) const {
+    int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+    media::RenderPosition aidlRet;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            mDelegate->getRenderPosition(outputAidl, &aidlRet)));
+    if (halFrames != nullptr) {
+        *halFrames = VALUE_OR_RETURN_STATUS(convertIntegral<uint32_t>(aidlRet.halFrames));
+    }
+    if (dspFrames != nullptr) {
+        *dspFrames = VALUE_OR_RETURN_STATUS(convertIntegral<uint32_t>(aidlRet.dspFrames));
+    }
+    return OK;
+}
+
+uint32_t AudioFlingerClientAdapter::getInputFramesLost(audio_io_handle_t ioHandle) const {
+    auto result = [&]() -> ConversionResult<uint32_t> {
+        int32_t ioHandleAidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(ioHandle));
+        int32_t aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                mDelegate->getInputFramesLost(ioHandleAidl, &aidlRet)));
+        return convertIntegral<uint32_t>(aidlRet);
+    }();
+    // Failure is ignored.
+    return result.value_or(0);
+}
+
+audio_unique_id_t AudioFlingerClientAdapter::newAudioUniqueId(audio_unique_id_use_t use) {
+    auto result = [&]() -> ConversionResult<audio_unique_id_t> {
+        media::AudioUniqueIdUse useAidl = VALUE_OR_RETURN(
+                legacy2aidl_audio_unique_id_use_t_AudioUniqueIdUse(use));
+        int32_t aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                mDelegate->newAudioUniqueId(useAidl, &aidlRet)));
+        return aidl2legacy_int32_t_audio_unique_id_t(aidlRet);
+    }();
+    return result.value_or(AUDIO_UNIQUE_ID_ALLOCATE);
+}
+
+void AudioFlingerClientAdapter::acquireAudioSessionId(audio_session_t audioSession, pid_t pid,
+                                                      uid_t uid) {
+    [&]() -> status_t {
+        int32_t audioSessionAidl = VALUE_OR_RETURN_STATUS(
+                legacy2aidl_audio_session_t_int32_t(audioSession));
+        int32_t pidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(pid));
+        int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(uid));
+        return statusTFromBinderStatus(
+                mDelegate->acquireAudioSessionId(audioSessionAidl, pidAidl, uidAidl));
+    }();
+    // Failure is ignored.
+}
+
+void AudioFlingerClientAdapter::releaseAudioSessionId(audio_session_t audioSession, pid_t pid) {
+    [&]() -> status_t {
+        int32_t audioSessionAidl = VALUE_OR_RETURN_STATUS(
+                legacy2aidl_audio_session_t_int32_t(audioSession));
+        int32_t pidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(pid));
+        return statusTFromBinderStatus(
+                mDelegate->releaseAudioSessionId(audioSessionAidl, pidAidl));
+    }();
+    // Failure is ignored.
+}
+
+status_t AudioFlingerClientAdapter::queryNumberEffects(uint32_t* numEffects) const {
+    int32_t aidlRet;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            mDelegate->queryNumberEffects(&aidlRet)));
+    if (numEffects != nullptr) {
+        *numEffects = VALUE_OR_RETURN_STATUS(convertIntegral<uint32_t>(aidlRet));
+    }
+    return OK;
+}
+
+status_t
+AudioFlingerClientAdapter::queryEffect(uint32_t index, effect_descriptor_t* pDescriptor) const {
+    int32_t indexAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(index));
+    media::EffectDescriptor aidlRet;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            mDelegate->queryEffect(indexAidl, &aidlRet)));
+    if (pDescriptor != nullptr) {
+        *pDescriptor = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_EffectDescriptor_effect_descriptor_t(aidlRet));
+    }
+    return OK;
+}
+
+status_t AudioFlingerClientAdapter::getEffectDescriptor(const effect_uuid_t* pEffectUUID,
+                                                        const effect_uuid_t* pTypeUUID,
+                                                        uint32_t preferredTypeFlag,
+                                                        effect_descriptor_t* pDescriptor) const {
+    media::AudioUuid effectUuidAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_uuid_t_AudioUuid(*pEffectUUID));
+    media::AudioUuid typeUuidAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_uuid_t_AudioUuid(*pTypeUUID));
+    int32_t preferredTypeFlagAidl = VALUE_OR_RETURN_STATUS(
+            convertReinterpret<int32_t>(preferredTypeFlag));
+    media::EffectDescriptor aidlRet;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            mDelegate->getEffectDescriptor(effectUuidAidl, typeUuidAidl, preferredTypeFlagAidl,
+                                           &aidlRet)));
+    if (pDescriptor != nullptr) {
+        *pDescriptor = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_EffectDescriptor_effect_descriptor_t(aidlRet));
+    }
+    return OK;
+}
+
+status_t AudioFlingerClientAdapter::createEffect(const media::CreateEffectRequest& request,
+                                                 media::CreateEffectResponse* response) {
+    return statusTFromBinderStatus(mDelegate->createEffect(request, response));
+}
+
+status_t
+AudioFlingerClientAdapter::moveEffects(audio_session_t session, audio_io_handle_t srcOutput,
+                                       audio_io_handle_t dstOutput) {
+    int32_t sessionAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(session));
+    int32_t srcOutputAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_io_handle_t_int32_t(srcOutput));
+    int32_t dstOutputAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_io_handle_t_int32_t(dstOutput));
+    return statusTFromBinderStatus(
+            mDelegate->moveEffects(sessionAidl, srcOutputAidl, dstOutputAidl));
+}
+
+void AudioFlingerClientAdapter::setEffectSuspended(int effectId,
+                                                   audio_session_t sessionId,
+                                                   bool suspended) {
+    [&]() -> status_t {
+        int32_t effectIdAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(effectId));
+        int32_t sessionIdAidl = VALUE_OR_RETURN_STATUS(
+                legacy2aidl_audio_session_t_int32_t(sessionId));
+        return statusTFromBinderStatus(
+                mDelegate->setEffectSuspended(effectIdAidl, sessionIdAidl, suspended));
+    }();
+    // Failure is ignored.
+}
+
+audio_module_handle_t AudioFlingerClientAdapter::loadHwModule(const char* name) {
+    auto result = [&]() -> ConversionResult<audio_module_handle_t> {
+        std::string nameAidl(name);
+        int32_t aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                mDelegate->loadHwModule(nameAidl, &aidlRet)));
+        return aidl2legacy_int32_t_audio_module_handle_t(aidlRet);
+    }();
+    // Failure is ignored.
+    return result.value_or(0);
+}
+
+uint32_t AudioFlingerClientAdapter::getPrimaryOutputSamplingRate() {
+    auto result = [&]() -> ConversionResult<uint32_t> {
+        int32_t aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                mDelegate->getPrimaryOutputSamplingRate(&aidlRet)));
+        return convertIntegral<uint32_t>(aidlRet);
+    }();
+    // Failure is ignored.
+    return result.value_or(0);
+}
+
+size_t AudioFlingerClientAdapter::getPrimaryOutputFrameCount() {
+    auto result = [&]() -> ConversionResult<size_t> {
+        int64_t aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                mDelegate->getPrimaryOutputFrameCount(&aidlRet)));
+        return convertIntegral<size_t>(aidlRet);
+    }();
+    // Failure is ignored.
+    return result.value_or(0);
+}
+
+status_t AudioFlingerClientAdapter::setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) {
+    return statusTFromBinderStatus(mDelegate->setLowRamDevice(isLowRamDevice, totalMemory));
+}
+
+status_t AudioFlingerClientAdapter::getAudioPort(struct audio_port_v7* port) {
+    media::AudioPort portAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_v7_AudioPort(*port));
+    media::AudioPort aidlRet;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            mDelegate->getAudioPort(portAidl, &aidlRet)));
+    *port = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPort_audio_port_v7(aidlRet));
+    return OK;
+}
+
+status_t AudioFlingerClientAdapter::createAudioPatch(const struct audio_patch* patch,
+                                                     audio_patch_handle_t* handle) {
+    media::AudioPatch patchAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_AudioPatch(*patch));
+    int32_t aidlRet;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            mDelegate->createAudioPatch(patchAidl, &aidlRet)));
+    if (handle != nullptr) {
+        *handle = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_patch_handle_t(aidlRet));
+    }
+    return OK;
+}
+
+status_t AudioFlingerClientAdapter::releaseAudioPatch(audio_patch_handle_t handle) {
+    int32_t handleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(handle));
+    return statusTFromBinderStatus(mDelegate->releaseAudioPatch(handleAidl));
+}
+
+status_t AudioFlingerClientAdapter::listAudioPatches(unsigned int* num_patches,
+                                                     struct audio_patch* patches) {
+    std::vector<media::AudioPatch> aidlRet;
+    int32_t maxPatches = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*num_patches));
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            mDelegate->listAudioPatches(maxPatches, &aidlRet)));
+    *num_patches = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(aidlRet.size()));
+    return convertRange(aidlRet.begin(), aidlRet.end(), patches,
+                        aidl2legacy_AudioPatch_audio_patch);
+}
+
+status_t AudioFlingerClientAdapter::setAudioPortConfig(const struct audio_port_config* config) {
+    media::AudioPortConfig configAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_config_AudioPortConfig(*config));
+    return statusTFromBinderStatus(mDelegate->setAudioPortConfig(configAidl));
+}
+
+audio_hw_sync_t AudioFlingerClientAdapter::getAudioHwSyncForSession(audio_session_t sessionId) {
+    auto result = [&]() -> ConversionResult<audio_hw_sync_t> {
+        int32_t sessionIdAidl = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(sessionId));
+        int32_t aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                mDelegate->getAudioHwSyncForSession(sessionIdAidl, &aidlRet)));
+        return aidl2legacy_int32_t_audio_hw_sync_t(aidlRet);
+    }();
+    return result.value_or(AUDIO_HW_SYNC_INVALID);
+}
+
+status_t AudioFlingerClientAdapter::systemReady() {
+    return statusTFromBinderStatus(mDelegate->systemReady());
+}
+
+size_t AudioFlingerClientAdapter::frameCountHAL(audio_io_handle_t ioHandle) const {
+    auto result = [&]() -> ConversionResult<size_t> {
+        int32_t ioHandleAidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(ioHandle));
+        int64_t aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                mDelegate->frameCountHAL(ioHandleAidl, &aidlRet)));
+        return convertIntegral<size_t>(aidlRet);
+    }();
+    // Failure is ignored.
+    return result.value_or(0);
+}
+
+status_t
+AudioFlingerClientAdapter::getMicrophones(std::vector<media::MicrophoneInfo>* microphones) {
+    std::vector<media::MicrophoneInfoData> aidlRet;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            mDelegate->getMicrophones(&aidlRet)));
+    if (microphones != nullptr) {
+        *microphones = VALUE_OR_RETURN_STATUS(
+                convertContainer<std::vector<media::MicrophoneInfo>>(aidlRet,
+                         media::aidl2legacy_MicrophoneInfo));
+    }
+    return OK;
+}
+
+status_t AudioFlingerClientAdapter::setAudioHalPids(const std::vector<pid_t>& pids) {
+    std::vector<int32_t> pidsAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<int32_t>>(pids, legacy2aidl_pid_t_int32_t));
+    return statusTFromBinderStatus(mDelegate->setAudioHalPids(pidsAidl));
+}
+
+status_t AudioFlingerClientAdapter::setVibratorInfos(
+        const std::vector<media::AudioVibratorInfo>& vibratorInfos) {
+    return statusTFromBinderStatus(mDelegate->setVibratorInfos(vibratorInfos));
+}
+
+status_t AudioFlingerClientAdapter::updateSecondaryOutputs(
+        const TrackSecondaryOutputsMap& trackSecondaryOutputs) {
+    std::vector<media::TrackSecondaryOutputInfo> trackSecondaryOutputInfos =
+            VALUE_OR_RETURN_STATUS(
+                    convertContainer<std::vector<media::TrackSecondaryOutputInfo>>(
+                            trackSecondaryOutputs,
+                            legacy2aidl_TrackSecondaryOutputInfoPair_TrackSecondaryOutputInfo));
+    return statusTFromBinderStatus(mDelegate->updateSecondaryOutputs(trackSecondaryOutputInfos));
+}
+
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// AudioFlingerServerAdapter
+AudioFlingerServerAdapter::AudioFlingerServerAdapter(
+        const sp<AudioFlingerServerAdapter::Delegate>& delegate) : mDelegate(delegate) {}
+
+status_t AudioFlingerServerAdapter::onTransact(uint32_t code,
+                                               const Parcel& data,
+                                               Parcel* reply,
+                                               uint32_t flags) {
+    return mDelegate->onTransactWrapper(static_cast<Delegate::TransactionCode>(code),
+                                        data,
+                                        flags,
+                                        [&] {
+                                            return BnAudioFlingerService::onTransact(
+                                                    code,
+                                                    data,
+                                                    reply,
+                                                    flags);
+                                        });
+}
+
+status_t AudioFlingerServerAdapter::dump(int fd, const Vector<String16>& args) {
+    return mDelegate->dump(fd, args);
+}
+
+Status AudioFlingerServerAdapter::createTrack(const media::CreateTrackRequest& request,
+                                              media::CreateTrackResponse* _aidl_return) {
+    return Status::fromStatusT(mDelegate->createTrack(request, *_aidl_return));
+}
+
+Status AudioFlingerServerAdapter::createRecord(const media::CreateRecordRequest& request,
+                                               media::CreateRecordResponse* _aidl_return) {
+    return Status::fromStatusT(mDelegate->createRecord(request, *_aidl_return));
+}
+
+Status AudioFlingerServerAdapter::sampleRate(int32_t ioHandle, int32_t* _aidl_return) {
+    audio_io_handle_t ioHandleLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(ioHandle));
+    *_aidl_return = VALUE_OR_RETURN_BINDER(
+            convertIntegral<int32_t>(mDelegate->sampleRate(ioHandleLegacy)));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::format(int32_t output,
+                                         media::audio::common::AudioFormat* _aidl_return) {
+    audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(output));
+    *_aidl_return = VALUE_OR_RETURN_BINDER(
+            legacy2aidl_audio_format_t_AudioFormat(mDelegate->format(outputLegacy)));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::frameCount(int32_t ioHandle, int64_t* _aidl_return) {
+    audio_io_handle_t ioHandleLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(ioHandle));
+    *_aidl_return = VALUE_OR_RETURN_BINDER(
+            convertIntegral<int64_t>(mDelegate->frameCount(ioHandleLegacy)));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::latency(int32_t output, int32_t* _aidl_return) {
+    audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(output));
+    *_aidl_return = VALUE_OR_RETURN_BINDER(
+            convertIntegral<int32_t>(mDelegate->latency(outputLegacy)));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::setMasterVolume(float value) {
+    return Status::fromStatusT(mDelegate->setMasterVolume(value));
+}
+
+Status AudioFlingerServerAdapter::setMasterMute(bool muted) {
+    return Status::fromStatusT(mDelegate->setMasterMute(muted));
+}
+
+Status AudioFlingerServerAdapter::masterVolume(float* _aidl_return) {
+    *_aidl_return = mDelegate->masterVolume();
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::masterMute(bool* _aidl_return) {
+    *_aidl_return = mDelegate->masterMute();
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::setMasterBalance(float balance) {
+    return Status::fromStatusT(mDelegate->setMasterBalance(balance));
+}
+
+Status AudioFlingerServerAdapter::getMasterBalance(float* _aidl_return) {
+    return Status::fromStatusT(mDelegate->getMasterBalance(_aidl_return));
+}
+
+Status AudioFlingerServerAdapter::setStreamVolume(media::AudioStreamType stream, float value,
+                                                  int32_t output) {
+    audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
+    audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(output));
+    return Status::fromStatusT(mDelegate->setStreamVolume(streamLegacy, value, outputLegacy));
+}
+
+Status AudioFlingerServerAdapter::setStreamMute(media::AudioStreamType stream, bool muted) {
+    audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
+    return Status::fromStatusT(mDelegate->setStreamMute(streamLegacy, muted));
+}
+
+Status AudioFlingerServerAdapter::streamVolume(media::AudioStreamType stream, int32_t output,
+                                               float* _aidl_return) {
+    audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
+    audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(output));
+    *_aidl_return = mDelegate->streamVolume(streamLegacy, outputLegacy);
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::streamMute(media::AudioStreamType stream, bool* _aidl_return) {
+    audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
+    *_aidl_return = mDelegate->streamMute(streamLegacy);
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::setMode(media::AudioMode mode) {
+    audio_mode_t modeLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioMode_audio_mode_t(mode));
+    return Status::fromStatusT(mDelegate->setMode(modeLegacy));
+}
+
+Status AudioFlingerServerAdapter::setMicMute(bool state) {
+    return Status::fromStatusT(mDelegate->setMicMute(state));
+}
+
+Status AudioFlingerServerAdapter::getMicMute(bool* _aidl_return) {
+    *_aidl_return = mDelegate->getMicMute();
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::setRecordSilenced(int32_t portId, bool silenced) {
+    audio_port_handle_t portIdLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_port_handle_t(portId));
+    mDelegate->setRecordSilenced(portIdLegacy, silenced);
+    return Status::ok();
+}
+
+Status
+AudioFlingerServerAdapter::setParameters(int32_t ioHandle, const std::string& keyValuePairs) {
+    audio_io_handle_t ioHandleLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(ioHandle));
+    String8 keyValuePairsLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_string_view_String8(keyValuePairs));
+    return Status::fromStatusT(mDelegate->setParameters(ioHandleLegacy, keyValuePairsLegacy));
+}
+
+Status AudioFlingerServerAdapter::getParameters(int32_t ioHandle, const std::string& keys,
+                                                std::string* _aidl_return) {
+    audio_io_handle_t ioHandleLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(ioHandle));
+    String8 keysLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_string_view_String8(keys));
+    *_aidl_return = VALUE_OR_RETURN_BINDER(
+            legacy2aidl_String8_string(mDelegate->getParameters(ioHandleLegacy, keysLegacy)));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::registerClient(const sp<media::IAudioFlingerClient>& client) {
+    mDelegate->registerClient(client);
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::getInputBufferSize(int32_t sampleRate,
+                                                     media::audio::common::AudioFormat format,
+                                                     int32_t channelMask, int64_t* _aidl_return) {
+    uint32_t sampleRateLegacy = VALUE_OR_RETURN_BINDER(convertIntegral<uint32_t>(sampleRate));
+    audio_format_t formatLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_AudioFormat_audio_format_t(format));
+    audio_channel_mask_t channelMaskLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_channel_mask_t(channelMask));
+    size_t size = mDelegate->getInputBufferSize(sampleRateLegacy, formatLegacy, channelMaskLegacy);
+    *_aidl_return = VALUE_OR_RETURN_BINDER(convertIntegral<int64_t>(size));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::openOutput(const media::OpenOutputRequest& request,
+                                             media::OpenOutputResponse* _aidl_return) {
+    return Status::fromStatusT(mDelegate->openOutput(request, _aidl_return));
+}
+
+Status AudioFlingerServerAdapter::openDuplicateOutput(int32_t output1, int32_t output2,
+                                                      int32_t* _aidl_return) {
+    audio_io_handle_t output1Legacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(output1));
+    audio_io_handle_t output2Legacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(output2));
+    audio_io_handle_t result = mDelegate->openDuplicateOutput(output1Legacy, output2Legacy);
+    *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_io_handle_t_int32_t(result));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::closeOutput(int32_t output) {
+    audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(output));
+    return Status::fromStatusT(mDelegate->closeOutput(outputLegacy));
+}
+
+Status AudioFlingerServerAdapter::suspendOutput(int32_t output) {
+    audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(output));
+    return Status::fromStatusT(mDelegate->suspendOutput(outputLegacy));
+}
+
+Status AudioFlingerServerAdapter::restoreOutput(int32_t output) {
+    audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(output));
+    return Status::fromStatusT(mDelegate->restoreOutput(outputLegacy));
+}
+
+Status AudioFlingerServerAdapter::openInput(const media::OpenInputRequest& request,
+                                            media::OpenInputResponse* _aidl_return) {
+    return Status::fromStatusT(mDelegate->openInput(request, _aidl_return));
+}
+
+Status AudioFlingerServerAdapter::closeInput(int32_t input) {
+    audio_io_handle_t inputLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(input));
+    return Status::fromStatusT(mDelegate->closeInput(inputLegacy));
+}
+
+Status AudioFlingerServerAdapter::invalidateStream(media::AudioStreamType stream) {
+    audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
+    return Status::fromStatusT(mDelegate->invalidateStream(streamLegacy));
+}
+
+Status AudioFlingerServerAdapter::setVoiceVolume(float volume) {
+    return Status::fromStatusT(mDelegate->setVoiceVolume(volume));
+}
+
+Status
+AudioFlingerServerAdapter::getRenderPosition(int32_t output, media::RenderPosition* _aidl_return) {
+    audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(output));
+    uint32_t halFramesLegacy;
+    uint32_t dspFramesLegacy;
+    RETURN_BINDER_IF_ERROR(
+            mDelegate->getRenderPosition(&halFramesLegacy, &dspFramesLegacy, outputLegacy));
+    _aidl_return->halFrames = VALUE_OR_RETURN_BINDER(convertIntegral<int32_t>(halFramesLegacy));
+    _aidl_return->dspFrames = VALUE_OR_RETURN_BINDER(convertIntegral<int32_t>(dspFramesLegacy));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::getInputFramesLost(int32_t ioHandle, int32_t* _aidl_return) {
+    audio_io_handle_t ioHandleLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(ioHandle));
+    uint32_t result = mDelegate->getInputFramesLost(ioHandleLegacy);
+    *_aidl_return = VALUE_OR_RETURN_BINDER(convertIntegral<int32_t>(result));
+    return Status::ok();
+}
+
+Status
+AudioFlingerServerAdapter::newAudioUniqueId(media::AudioUniqueIdUse use, int32_t* _aidl_return) {
+    audio_unique_id_use_t useLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_AudioUniqueIdUse_audio_unique_id_use_t(use));
+    audio_unique_id_t result = mDelegate->newAudioUniqueId(useLegacy);
+    *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_unique_id_t_int32_t(result));
+    return Status::ok();
+}
+
+Status
+AudioFlingerServerAdapter::acquireAudioSessionId(int32_t audioSession, int32_t pid, int32_t uid) {
+    audio_session_t audioSessionLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_session_t(audioSession));
+    pid_t pidLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_int32_t_pid_t(pid));
+    uid_t uidLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_int32_t_uid_t(uid));
+    mDelegate->acquireAudioSessionId(audioSessionLegacy, pidLegacy, uidLegacy);
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::releaseAudioSessionId(int32_t audioSession, int32_t pid) {
+    audio_session_t audioSessionLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_session_t(audioSession));
+    pid_t pidLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_int32_t_pid_t(pid));
+    mDelegate->releaseAudioSessionId(audioSessionLegacy, pidLegacy);
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::queryNumberEffects(int32_t* _aidl_return) {
+    uint32_t result;
+    RETURN_BINDER_IF_ERROR(mDelegate->queryNumberEffects(&result));
+    *_aidl_return = VALUE_OR_RETURN_BINDER(convertIntegral<uint32_t>(result));
+    return Status::ok();
+}
+
+Status
+AudioFlingerServerAdapter::queryEffect(int32_t index, media::EffectDescriptor* _aidl_return) {
+    uint32_t indexLegacy = VALUE_OR_RETURN_BINDER(convertIntegral<uint32_t>(index));
+    effect_descriptor_t result;
+    RETURN_BINDER_IF_ERROR(mDelegate->queryEffect(indexLegacy, &result));
+    *_aidl_return = VALUE_OR_RETURN_BINDER(
+            legacy2aidl_effect_descriptor_t_EffectDescriptor(result));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::getEffectDescriptor(const media::AudioUuid& effectUUID,
+                                                      const media::AudioUuid& typeUUID,
+                                                      int32_t preferredTypeFlag,
+                                                      media::EffectDescriptor* _aidl_return) {
+    effect_uuid_t effectUuidLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_AudioUuid_audio_uuid_t(effectUUID));
+    effect_uuid_t typeUuidLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_AudioUuid_audio_uuid_t(typeUUID));
+    uint32_t preferredTypeFlagLegacy = VALUE_OR_RETURN_BINDER(
+            convertReinterpret<uint32_t>(preferredTypeFlag));
+    effect_descriptor_t result;
+    RETURN_BINDER_IF_ERROR(mDelegate->getEffectDescriptor(&effectUuidLegacy, &typeUuidLegacy,
+                                                          preferredTypeFlagLegacy, &result));
+    *_aidl_return = VALUE_OR_RETURN_BINDER(
+            legacy2aidl_effect_descriptor_t_EffectDescriptor(result));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::createEffect(const media::CreateEffectRequest& request,
+                                               media::CreateEffectResponse* _aidl_return) {
+    return Status::fromStatusT(mDelegate->createEffect(request, _aidl_return));
+}
+
+Status
+AudioFlingerServerAdapter::moveEffects(int32_t session, int32_t srcOutput, int32_t dstOutput) {
+    audio_session_t sessionLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_session_t(session));
+    audio_io_handle_t srcOutputLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(srcOutput));
+    audio_io_handle_t dstOutputLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(dstOutput));
+    return Status::fromStatusT(
+            mDelegate->moveEffects(sessionLegacy, srcOutputLegacy, dstOutputLegacy));
+}
+
+Status AudioFlingerServerAdapter::setEffectSuspended(int32_t effectId, int32_t sessionId,
+                                                     bool suspended) {
+    int effectIdLegacy = VALUE_OR_RETURN_BINDER(convertReinterpret<int>(effectId));
+    audio_session_t sessionIdLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_session_t(sessionId));
+    mDelegate->setEffectSuspended(effectIdLegacy, sessionIdLegacy, suspended);
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::loadHwModule(const std::string& name, int32_t* _aidl_return) {
+    audio_module_handle_t result = mDelegate->loadHwModule(name.c_str());
+    *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_module_handle_t_int32_t(result));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::getPrimaryOutputSamplingRate(int32_t* _aidl_return) {
+    *_aidl_return = VALUE_OR_RETURN_BINDER(
+            convertIntegral<int32_t>(mDelegate->getPrimaryOutputSamplingRate()));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::getPrimaryOutputFrameCount(int64_t* _aidl_return) {
+    *_aidl_return = VALUE_OR_RETURN_BINDER(
+            convertIntegral<int64_t>(mDelegate->getPrimaryOutputFrameCount()));
+    return Status::ok();
+
+}
+
+Status AudioFlingerServerAdapter::setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) {
+    return Status::fromStatusT(mDelegate->setLowRamDevice(isLowRamDevice, totalMemory));
+}
+
+Status AudioFlingerServerAdapter::getAudioPort(const media::AudioPort& port,
+                                               media::AudioPort* _aidl_return) {
+    audio_port_v7 portLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPort_audio_port_v7(port));
+    RETURN_BINDER_IF_ERROR(mDelegate->getAudioPort(&portLegacy));
+    *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_port_v7_AudioPort(portLegacy));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::createAudioPatch(const media::AudioPatch& patch,
+                                                   int32_t* _aidl_return) {
+    audio_patch patchLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPatch_audio_patch(patch));
+    audio_patch_handle_t handleLegacy;
+    RETURN_BINDER_IF_ERROR(mDelegate->createAudioPatch(&patchLegacy, &handleLegacy));
+    *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_patch_handle_t_int32_t(handleLegacy));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::releaseAudioPatch(int32_t handle) {
+    audio_patch_handle_t handleLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_patch_handle_t(handle));
+    return Status::fromStatusT(mDelegate->releaseAudioPatch(handleLegacy));
+}
+
+Status AudioFlingerServerAdapter::listAudioPatches(int32_t maxCount,
+                            std::vector<media::AudioPatch>* _aidl_return) {
+    unsigned int count = VALUE_OR_RETURN_BINDER(convertIntegral<unsigned int>(maxCount));
+    count = std::min(count, static_cast<unsigned int>(MAX_ITEMS_PER_LIST));
+    std::unique_ptr<audio_patch[]> patchesLegacy(new audio_patch[count]);
+    RETURN_BINDER_IF_ERROR(mDelegate->listAudioPatches(&count, patchesLegacy.get()));
+    RETURN_BINDER_IF_ERROR(convertRange(&patchesLegacy[0],
+                           &patchesLegacy[count],
+                           std::back_inserter(*_aidl_return),
+                           legacy2aidl_audio_patch_AudioPatch));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::setAudioPortConfig(const media::AudioPortConfig& config) {
+    audio_port_config configLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_AudioPortConfig_audio_port_config(config));
+    return Status::fromStatusT(mDelegate->setAudioPortConfig(&configLegacy));
+}
+
+Status AudioFlingerServerAdapter::getAudioHwSyncForSession(int32_t sessionId,
+                                                           int32_t* _aidl_return) {
+    audio_session_t sessionIdLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_session_t(sessionId));
+    audio_hw_sync_t result = mDelegate->getAudioHwSyncForSession(sessionIdLegacy);
+    *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_hw_sync_t_int32_t(result));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::systemReady() {
+    return Status::fromStatusT(mDelegate->systemReady());
+}
+
+Status AudioFlingerServerAdapter::frameCountHAL(int32_t ioHandle, int64_t* _aidl_return) {
+    audio_io_handle_t ioHandleLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(ioHandle));
+    size_t result = mDelegate->frameCountHAL(ioHandleLegacy);
+    *_aidl_return = VALUE_OR_RETURN_BINDER(convertIntegral<int64_t>(result));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::getMicrophones(
+        std::vector<media::MicrophoneInfoData>* _aidl_return) {
+    std::vector<media::MicrophoneInfo> resultLegacy;
+    RETURN_BINDER_IF_ERROR(mDelegate->getMicrophones(&resultLegacy));
+    *_aidl_return = VALUE_OR_RETURN_BINDER(convertContainer<std::vector<media::MicrophoneInfoData>>(
+            resultLegacy, media::legacy2aidl_MicrophoneInfo));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::setAudioHalPids(const std::vector<int32_t>& pids) {
+    std::vector<pid_t> pidsLegacy = VALUE_OR_RETURN_BINDER(
+            convertContainer<std::vector<pid_t>>(pids, aidl2legacy_int32_t_pid_t));
+    RETURN_BINDER_IF_ERROR(mDelegate->setAudioHalPids(pidsLegacy));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::setVibratorInfos(
+        const std::vector<media::AudioVibratorInfo>& vibratorInfos) {
+    return Status::fromStatusT(mDelegate->setVibratorInfos(vibratorInfos));
+}
+
+Status AudioFlingerServerAdapter::updateSecondaryOutputs(
+        const std::vector<media::TrackSecondaryOutputInfo>& trackSecondaryOutputInfos) {
+    TrackSecondaryOutputsMap trackSecondaryOutputs =
+            VALUE_OR_RETURN_BINDER(convertContainer<TrackSecondaryOutputsMap>(
+                    trackSecondaryOutputInfos,
+                    aidl2legacy_TrackSecondaryOutputInfo_TrackSecondaryOutputInfoPair));
+    return Status::fromStatusT(mDelegate->updateSecondaryOutputs(trackSecondaryOutputs));
+}
 
 } // namespace android
diff --git a/media/libaudioclient/IAudioFlingerClient.cpp b/media/libaudioclient/IAudioFlingerClient.cpp
deleted file mode 100644
index 47eb7dc..0000000
--- a/media/libaudioclient/IAudioFlingerClient.cpp
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "IAudioFlingerClient"
-#include <utils/Log.h>
-
-#include <stdint.h>
-#include <sys/types.h>
-
-#include <binder/Parcel.h>
-
-#include <media/IAudioFlingerClient.h>
-#include <media/AudioSystem.h>
-
-namespace android {
-
-enum {
-    IO_CONFIG_CHANGED = IBinder::FIRST_CALL_TRANSACTION
-};
-
-class BpAudioFlingerClient : public BpInterface<IAudioFlingerClient>
-{
-public:
-    explicit BpAudioFlingerClient(const sp<IBinder>& impl)
-        : BpInterface<IAudioFlingerClient>(impl)
-    {
-    }
-
-    void ioConfigChanged(audio_io_config_event event, const sp<AudioIoDescriptor>& ioDesc)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioFlingerClient::getInterfaceDescriptor());
-        data.writeInt32(event);
-        data.writeInt32((int32_t)ioDesc->mIoHandle);
-        data.write(&ioDesc->mPatch, sizeof(struct audio_patch));
-        data.writeInt32(ioDesc->mSamplingRate);
-        data.writeInt32(ioDesc->mFormat);
-        data.writeInt32(ioDesc->mChannelMask);
-        data.writeInt64(ioDesc->mFrameCount);
-        data.writeInt64(ioDesc->mFrameCountHAL);
-        data.writeInt32(ioDesc->mLatency);
-        data.writeInt32(ioDesc->mPortId);
-        remote()->transact(IO_CONFIG_CHANGED, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-};
-
-IMPLEMENT_META_INTERFACE(AudioFlingerClient, "android.media.IAudioFlingerClient");
-
-// ----------------------------------------------------------------------
-
-status_t BnAudioFlingerClient::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    switch (code) {
-    case IO_CONFIG_CHANGED: {
-            CHECK_INTERFACE(IAudioFlingerClient, data, reply);
-            audio_io_config_event event = (audio_io_config_event)data.readInt32();
-            sp<AudioIoDescriptor> ioDesc = new AudioIoDescriptor();
-            ioDesc->mIoHandle = (audio_io_handle_t) data.readInt32();
-            data.read(&ioDesc->mPatch, sizeof(struct audio_patch));
-            ioDesc->mSamplingRate = data.readInt32();
-            ioDesc->mFormat = (audio_format_t) data.readInt32();
-            ioDesc->mChannelMask = (audio_channel_mask_t) data.readInt32();
-            ioDesc->mFrameCount = data.readInt64();
-            ioDesc->mFrameCountHAL = data.readInt64();
-            ioDesc->mLatency = data.readInt32();
-            ioDesc->mPortId = data.readInt32();
-            ioConfigChanged(event, ioDesc);
-            return NO_ERROR;
-        } break;
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-// ----------------------------------------------------------------------------
-
-} // namespace android
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
deleted file mode 100644
index 60af84b..0000000
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ /dev/null
@@ -1,2805 +0,0 @@
-/*
-**
-** Copyright 2009, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-#define LOG_TAG "IAudioPolicyService"
-#include <utils/Log.h>
-
-#include <stdint.h>
-#include <math.h>
-#include <sys/types.h>
-
-#include <android/media/ICaptureStateListener.h>
-#include <binder/IPCThreadState.h>
-#include <binder/Parcel.h>
-#include <media/AudioEffect.h>
-#include <media/IAudioPolicyService.h>
-#include <mediautils/ServiceUtilities.h>
-#include <mediautils/TimeCheck.h>
-#include <system/audio.h>
-
-namespace android {
-
-using media::ICaptureStateListener;
-
-enum {
-    SET_DEVICE_CONNECTION_STATE = IBinder::FIRST_CALL_TRANSACTION,
-    GET_DEVICE_CONNECTION_STATE,
-    HANDLE_DEVICE_CONFIG_CHANGE,
-    SET_PHONE_STATE,
-    SET_RINGER_MODE,    // reserved, no longer used
-    SET_FORCE_USE,
-    GET_FORCE_USE,
-    GET_OUTPUT,
-    START_OUTPUT,
-    STOP_OUTPUT,
-    RELEASE_OUTPUT,
-    GET_INPUT_FOR_ATTR,
-    START_INPUT,
-    STOP_INPUT,
-    RELEASE_INPUT,
-    INIT_STREAM_VOLUME,
-    SET_STREAM_VOLUME,
-    GET_STREAM_VOLUME,
-    SET_VOLUME_ATTRIBUTES,
-    GET_VOLUME_ATTRIBUTES,
-    GET_MIN_VOLUME_FOR_ATTRIBUTES,
-    GET_MAX_VOLUME_FOR_ATTRIBUTES,
-    GET_STRATEGY_FOR_STREAM,
-    GET_OUTPUT_FOR_EFFECT,
-    REGISTER_EFFECT,
-    UNREGISTER_EFFECT,
-    IS_STREAM_ACTIVE,
-    IS_SOURCE_ACTIVE,
-    GET_DEVICES_FOR_STREAM,
-    QUERY_DEFAULT_PRE_PROCESSING,
-    SET_EFFECT_ENABLED,
-    IS_STREAM_ACTIVE_REMOTELY,
-    IS_OFFLOAD_SUPPORTED,
-    IS_DIRECT_OUTPUT_SUPPORTED,
-    LIST_AUDIO_PORTS,
-    GET_AUDIO_PORT,
-    CREATE_AUDIO_PATCH,
-    RELEASE_AUDIO_PATCH,
-    LIST_AUDIO_PATCHES,
-    SET_AUDIO_PORT_CONFIG,
-    REGISTER_CLIENT,
-    GET_OUTPUT_FOR_ATTR,
-    ACQUIRE_SOUNDTRIGGER_SESSION,
-    RELEASE_SOUNDTRIGGER_SESSION,
-    GET_PHONE_STATE,
-    REGISTER_POLICY_MIXES,
-    START_AUDIO_SOURCE,
-    STOP_AUDIO_SOURCE,
-    SET_AUDIO_PORT_CALLBACK_ENABLED,
-    SET_AUDIO_VOLUME_GROUP_CALLBACK_ENABLED,
-    SET_MASTER_MONO,
-    GET_MASTER_MONO,
-    GET_STREAM_VOLUME_DB,
-    GET_SURROUND_FORMATS,
-    SET_SURROUND_FORMAT_ENABLED,
-    ADD_STREAM_DEFAULT_EFFECT,
-    REMOVE_STREAM_DEFAULT_EFFECT,
-    ADD_SOURCE_DEFAULT_EFFECT,
-    REMOVE_SOURCE_DEFAULT_EFFECT,
-    SET_ASSISTANT_UID,
-    SET_A11Y_SERVICES_UIDS,
-    IS_HAPTIC_PLAYBACK_SUPPORTED,
-    SET_UID_DEVICE_AFFINITY,
-    REMOVE_UID_DEVICE_AFFINITY,
-    SET_USERID_DEVICE_AFFINITY,
-    REMOVE_USERID_DEVICE_AFFINITY,
-    GET_OFFLOAD_FORMATS_A2DP,
-    LIST_AUDIO_PRODUCT_STRATEGIES,
-    GET_STRATEGY_FOR_ATTRIBUTES,
-    LIST_AUDIO_VOLUME_GROUPS,
-    GET_VOLUME_GROUP_FOR_ATTRIBUTES,
-    SET_SUPPORTED_SYSTEM_USAGES,
-    SET_ALLOWED_CAPTURE_POLICY,
-    MOVE_EFFECTS_TO_IO,
-    SET_RTT_ENABLED,
-    IS_CALL_SCREEN_MODE_SUPPORTED,
-    SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
-    REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
-    GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
-    GET_DEVICES_FOR_ATTRIBUTES,
-    AUDIO_MODULES_UPDATED,  // oneway
-    SET_CURRENT_IME_UID,
-    REGISTER_SOUNDTRIGGER_CAPTURE_STATE_LISTENER,
-};
-
-#define MAX_ITEMS_PER_LIST 1024
-
-class BpAudioPolicyService : public BpInterface<IAudioPolicyService>
-{
-public:
-    explicit BpAudioPolicyService(const sp<IBinder>& impl)
-        : BpInterface<IAudioPolicyService>(impl)
-    {
-    }
-
-    virtual status_t setDeviceConnectionState(
-                                    audio_devices_t device,
-                                    audio_policy_dev_state_t state,
-                                    const char *device_address,
-                                    const char *device_name,
-                                    audio_format_t encodedFormat)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(static_cast <uint32_t>(device));
-        data.writeInt32(static_cast <uint32_t>(state));
-        data.writeCString(device_address);
-        data.writeCString(device_name);
-        data.writeInt32(static_cast <uint32_t>(encodedFormat));
-        remote()->transact(SET_DEVICE_CONNECTION_STATE, data, &reply);
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    virtual audio_policy_dev_state_t getDeviceConnectionState(
-                                    audio_devices_t device,
-                                    const char *device_address)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(static_cast <uint32_t>(device));
-        data.writeCString(device_address);
-        remote()->transact(GET_DEVICE_CONNECTION_STATE, data, &reply);
-        return static_cast <audio_policy_dev_state_t>(reply.readInt32());
-    }
-
-    virtual status_t handleDeviceConfigChange(audio_devices_t device,
-                                              const char *device_address,
-                                              const char *device_name,
-                                              audio_format_t encodedFormat)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(static_cast <uint32_t>(device));
-        data.writeCString(device_address);
-        data.writeCString(device_name);
-        data.writeInt32(static_cast <uint32_t>(encodedFormat));
-        remote()->transact(HANDLE_DEVICE_CONFIG_CHANGE, data, &reply);
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    virtual status_t setPhoneState(audio_mode_t state, uid_t uid)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(state);
-        data.writeInt32(uid);
-        remote()->transact(SET_PHONE_STATE, data, &reply);
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    virtual status_t setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(static_cast <uint32_t>(usage));
-        data.writeInt32(static_cast <uint32_t>(config));
-        remote()->transact(SET_FORCE_USE, data, &reply);
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    virtual audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(static_cast <uint32_t>(usage));
-        remote()->transact(GET_FORCE_USE, data, &reply);
-        return static_cast <audio_policy_forced_cfg_t> (reply.readInt32());
-    }
-
-    virtual audio_io_handle_t getOutput(audio_stream_type_t stream)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(static_cast <uint32_t>(stream));
-        remote()->transact(GET_OUTPUT, data, &reply);
-        return static_cast <audio_io_handle_t> (reply.readInt32());
-    }
-
-    status_t getOutputForAttr(audio_attributes_t *attr,
-                              audio_io_handle_t *output,
-                              audio_session_t session,
-                              audio_stream_type_t *stream,
-                              pid_t pid,
-                              uid_t uid,
-                              const audio_config_t *config,
-                              audio_output_flags_t flags,
-                              audio_port_handle_t *selectedDeviceId,
-                              audio_port_handle_t *portId,
-                              std::vector<audio_io_handle_t> *secondaryOutputs) override
-        {
-            Parcel data, reply;
-            data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-            if (attr == nullptr) {
-                ALOGE("%s NULL audio attributes", __func__);
-                return BAD_VALUE;
-            }
-            if (output == nullptr) {
-                ALOGE("%s NULL output - shouldn't happen", __func__);
-                return BAD_VALUE;
-            }
-            if (selectedDeviceId == nullptr) {
-                ALOGE("%s NULL selectedDeviceId - shouldn't happen", __func__);
-                return BAD_VALUE;
-            }
-            if (portId == nullptr) {
-                ALOGE("%s NULL portId - shouldn't happen", __func__);
-                return BAD_VALUE;
-            }
-            if (secondaryOutputs == nullptr) {
-                ALOGE("%s NULL secondaryOutputs - shouldn't happen", __func__);
-                return BAD_VALUE;
-            }
-            data.write(attr, sizeof(audio_attributes_t));
-            data.writeInt32(session);
-            if (stream == NULL) {
-                data.writeInt32(0);
-            } else {
-                data.writeInt32(1);
-                data.writeInt32(*stream);
-            }
-            data.writeInt32(pid);
-            data.writeInt32(uid);
-            data.write(config, sizeof(audio_config_t));
-            data.writeInt32(static_cast <uint32_t>(flags));
-            data.writeInt32(*selectedDeviceId);
-            data.writeInt32(*portId);
-            status_t status = remote()->transact(GET_OUTPUT_FOR_ATTR, data, &reply);
-            if (status != NO_ERROR) {
-                return status;
-            }
-            status = (status_t)reply.readInt32();
-            if (status != NO_ERROR) {
-                return status;
-            }
-            status = (status_t)reply.read(&attr, sizeof(audio_attributes_t));
-            if (status != NO_ERROR) {
-                return status;
-            }
-            *output = (audio_io_handle_t)reply.readInt32();
-            audio_stream_type_t lStream = (audio_stream_type_t)reply.readInt32();
-            if (stream != NULL) {
-                *stream = lStream;
-            }
-            *selectedDeviceId = (audio_port_handle_t)reply.readInt32();
-            *portId = (audio_port_handle_t)reply.readInt32();
-            secondaryOutputs->resize(reply.readInt32());
-            return reply.read(secondaryOutputs->data(),
-                              secondaryOutputs->size() * sizeof(audio_io_handle_t));
-        }
-
-    virtual status_t startOutput(audio_port_handle_t portId)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32((int32_t)portId);
-        remote()->transact(START_OUTPUT, data, &reply);
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    virtual status_t stopOutput(audio_port_handle_t portId)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32((int32_t)portId);
-        remote()->transact(STOP_OUTPUT, data, &reply);
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    virtual void releaseOutput(audio_port_handle_t portId)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32((int32_t)portId);
-        remote()->transact(RELEASE_OUTPUT, data, &reply);
-    }
-
-    virtual status_t getInputForAttr(const audio_attributes_t *attr,
-                                     audio_io_handle_t *input,
-                                     audio_unique_id_t riid,
-                                     audio_session_t session,
-                                     pid_t pid,
-                                     uid_t uid,
-                                     const String16& opPackageName,
-                                     const audio_config_base_t *config,
-                                     audio_input_flags_t flags,
-                                     audio_port_handle_t *selectedDeviceId,
-                                     audio_port_handle_t *portId)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        if (attr == NULL) {
-            ALOGE("getInputForAttr NULL attr - shouldn't happen");
-            return BAD_VALUE;
-        }
-        if (input == NULL) {
-            ALOGE("getInputForAttr NULL input - shouldn't happen");
-            return BAD_VALUE;
-        }
-        if (selectedDeviceId == NULL) {
-            ALOGE("getInputForAttr NULL selectedDeviceId - shouldn't happen");
-            return BAD_VALUE;
-        }
-        if (portId == NULL) {
-            ALOGE("getInputForAttr NULL portId - shouldn't happen");
-            return BAD_VALUE;
-        }
-
-        data.write(attr, sizeof(audio_attributes_t));
-        data.writeInt32(*input);
-        data.writeInt32(riid);
-        data.writeInt32(session);
-        data.writeInt32(pid);
-        data.writeInt32(uid);
-        data.writeString16(opPackageName);
-        data.write(config, sizeof(audio_config_base_t));
-        data.writeInt32(flags);
-        data.writeInt32(*selectedDeviceId);
-        data.writeInt32(*portId);
-        status_t status = remote()->transact(GET_INPUT_FOR_ATTR, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = reply.readInt32();
-        if (status != NO_ERROR) {
-            return status;
-        }
-        *input = (audio_io_handle_t)reply.readInt32();
-        *selectedDeviceId = (audio_port_handle_t)reply.readInt32();
-        *portId = (audio_port_handle_t)reply.readInt32();
-        return NO_ERROR;
-    }
-
-    virtual status_t startInput(audio_port_handle_t portId)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(portId);
-        remote()->transact(START_INPUT, data, &reply);
-        status_t status = static_cast <status_t> (reply.readInt32());
-        return status;
-    }
-
-    virtual status_t stopInput(audio_port_handle_t portId)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(portId);
-        remote()->transact(STOP_INPUT, data, &reply);
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    virtual void releaseInput(audio_port_handle_t portId)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(portId);
-        remote()->transact(RELEASE_INPUT, data, &reply);
-    }
-
-    virtual status_t initStreamVolume(audio_stream_type_t stream,
-                                    int indexMin,
-                                    int indexMax)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(static_cast <uint32_t>(stream));
-        data.writeInt32(indexMin);
-        data.writeInt32(indexMax);
-        remote()->transact(INIT_STREAM_VOLUME, data, &reply);
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    virtual status_t setStreamVolumeIndex(audio_stream_type_t stream,
-                                          int index,
-                                          audio_devices_t device)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(static_cast <uint32_t>(stream));
-        data.writeInt32(index);
-        data.writeInt32(static_cast <uint32_t>(device));
-        remote()->transact(SET_STREAM_VOLUME, data, &reply);
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    virtual status_t getStreamVolumeIndex(audio_stream_type_t stream,
-                                          int *index,
-                                          audio_devices_t device)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(static_cast <uint32_t>(stream));
-        data.writeInt32(static_cast <uint32_t>(device));
-
-        remote()->transact(GET_STREAM_VOLUME, data, &reply);
-        int lIndex = reply.readInt32();
-        if (index) *index = lIndex;
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    virtual status_t setVolumeIndexForAttributes(const audio_attributes_t &attr, int index,
-                                                 audio_devices_t device)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.write(&attr, sizeof(audio_attributes_t));
-        data.writeInt32(index);
-        data.writeInt32(static_cast <uint32_t>(device));
-        status_t status = remote()->transact(SET_VOLUME_ATTRIBUTES, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        return static_cast <status_t> (reply.readInt32());
-    }
-    virtual status_t getVolumeIndexForAttributes(const audio_attributes_t &attr, int &index,
-                                                 audio_devices_t device)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.write(&attr, sizeof(audio_attributes_t));
-        data.writeInt32(static_cast <uint32_t>(device));
-        status_t status = remote()->transact(GET_VOLUME_ATTRIBUTES, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = static_cast <status_t> (reply.readInt32());
-        if (status != NO_ERROR) {
-            return status;
-        }
-        index = reply.readInt32();
-        return NO_ERROR;
-    }
-    virtual status_t getMinVolumeIndexForAttributes(const audio_attributes_t &attr, int &index)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.write(&attr, sizeof(audio_attributes_t));
-        status_t status = remote()->transact(GET_MIN_VOLUME_FOR_ATTRIBUTES, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = static_cast <status_t> (reply.readInt32());
-        if (status != NO_ERROR) {
-            return status;
-        }
-        index = reply.readInt32();
-        return NO_ERROR;
-    }
-    virtual status_t getMaxVolumeIndexForAttributes(const audio_attributes_t &attr, int &index)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.write(&attr, sizeof(audio_attributes_t));
-        status_t status = remote()->transact(GET_MAX_VOLUME_FOR_ATTRIBUTES, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = static_cast <status_t> (reply.readInt32());
-        if (status != NO_ERROR) {
-            return status;
-        }
-        index = reply.readInt32();
-        return NO_ERROR;
-    }
-    virtual uint32_t getStrategyForStream(audio_stream_type_t stream)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(static_cast <uint32_t>(stream));
-        remote()->transact(GET_STRATEGY_FOR_STREAM, data, &reply);
-        return reply.readUint32();
-    }
-
-    virtual audio_devices_t getDevicesForStream(audio_stream_type_t stream)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(static_cast <uint32_t>(stream));
-        remote()->transact(GET_DEVICES_FOR_STREAM, data, &reply);
-        return (audio_devices_t) reply.readInt32();
-    }
-
-    virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.write(desc, sizeof(effect_descriptor_t));
-        remote()->transact(GET_OUTPUT_FOR_EFFECT, data, &reply);
-        return static_cast <audio_io_handle_t> (reply.readInt32());
-    }
-
-    virtual status_t registerEffect(const effect_descriptor_t *desc,
-                                        audio_io_handle_t io,
-                                        uint32_t strategy,
-                                        audio_session_t session,
-                                        int id)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.write(desc, sizeof(effect_descriptor_t));
-        data.writeInt32(io);
-        data.writeInt32(strategy);
-        data.writeInt32(session);
-        data.writeInt32(id);
-        remote()->transact(REGISTER_EFFECT, data, &reply);
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    virtual status_t unregisterEffect(int id)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(id);
-        remote()->transact(UNREGISTER_EFFECT, data, &reply);
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    virtual status_t setEffectEnabled(int id, bool enabled)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(id);
-        data.writeInt32(enabled);
-        remote()->transact(SET_EFFECT_ENABLED, data, &reply);
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    status_t moveEffectsToIo(const std::vector<int>& ids, audio_io_handle_t io) override
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(ids.size());
-        for (auto id : ids) {
-            data.writeInt32(id);
-        }
-        data.writeInt32(io);
-        status_t status = remote()->transact(MOVE_EFFECTS_TO_IO, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    virtual bool isStreamActive(audio_stream_type_t stream, uint32_t inPastMs) const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32((int32_t) stream);
-        data.writeInt32(inPastMs);
-        remote()->transact(IS_STREAM_ACTIVE, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual bool isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs) const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32((int32_t) stream);
-        data.writeInt32(inPastMs);
-        remote()->transact(IS_STREAM_ACTIVE_REMOTELY, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual bool isSourceActive(audio_source_t source) const
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32((int32_t) source);
-        remote()->transact(IS_SOURCE_ACTIVE, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t queryDefaultPreProcessing(audio_session_t audioSession,
-                                               effect_descriptor_t *descriptors,
-                                               uint32_t *count)
-    {
-        if (descriptors == NULL || count == NULL) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(audioSession);
-        data.writeInt32(*count);
-        status_t status = remote()->transact(QUERY_DEFAULT_PRE_PROCESSING, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = static_cast <status_t> (reply.readInt32());
-        uint32_t retCount = reply.readInt32();
-        if (retCount != 0) {
-            uint32_t numDesc = (retCount < *count) ? retCount : *count;
-            reply.read(descriptors, sizeof(effect_descriptor_t) * numDesc);
-        }
-        *count = retCount;
-        return status;
-    }
-
-    status_t setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(systemUsages.size());
-        for (auto systemUsage : systemUsages) {
-            data.writeInt32(systemUsage);
-        }
-        status_t status = remote()->transact(SET_SUPPORTED_SYSTEM_USAGES, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags) override {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(uid);
-        data.writeInt32(flags);
-        remote()->transact(SET_ALLOWED_CAPTURE_POLICY, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual bool isOffloadSupported(const audio_offload_info_t& info)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.write(&info, sizeof(audio_offload_info_t));
-        remote()->transact(IS_OFFLOAD_SUPPORTED, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual bool isDirectOutputSupported(const audio_config_base_t& config,
-                                         const audio_attributes_t& attributes) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.write(&config, sizeof(audio_config_base_t));
-        data.write(&attributes, sizeof(audio_attributes_t));
-        status_t status = remote()->transact(IS_DIRECT_OUTPUT_SUPPORTED, data, &reply);
-        return status == NO_ERROR ? static_cast<bool>(reply.readInt32()) : false;
-    }
-
-    virtual status_t listAudioPorts(audio_port_role_t role,
-                                    audio_port_type_t type,
-                                    unsigned int *num_ports,
-                                    struct audio_port *ports,
-                                    unsigned int *generation)
-    {
-        if (num_ports == NULL || (*num_ports != 0 && ports == NULL) ||
-                generation == NULL) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        unsigned int numPortsReq = (ports == NULL) ? 0 : *num_ports;
-        data.writeInt32(role);
-        data.writeInt32(type);
-        data.writeInt32(numPortsReq);
-        status_t status = remote()->transact(LIST_AUDIO_PORTS, data, &reply);
-        if (status == NO_ERROR) {
-            status = (status_t)reply.readInt32();
-            *num_ports = (unsigned int)reply.readInt32();
-        }
-        if (status == NO_ERROR) {
-            if (numPortsReq > *num_ports) {
-                numPortsReq = *num_ports;
-            }
-            if (numPortsReq > 0) {
-                reply.read(ports, numPortsReq * sizeof(struct audio_port));
-            }
-            *generation = reply.readInt32();
-        }
-        return status;
-    }
-
-    virtual status_t getAudioPort(struct audio_port *port)
-    {
-        if (port == NULL) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.write(port, sizeof(struct audio_port));
-        status_t status = remote()->transact(GET_AUDIO_PORT, data, &reply);
-        if (status != NO_ERROR ||
-                (status = (status_t)reply.readInt32()) != NO_ERROR) {
-            return status;
-        }
-        reply.read(port, sizeof(struct audio_port));
-        return status;
-    }
-
-    virtual status_t createAudioPatch(const struct audio_patch *patch,
-                                       audio_patch_handle_t *handle)
-    {
-        if (patch == NULL || handle == NULL) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.write(patch, sizeof(struct audio_patch));
-        data.write(handle, sizeof(audio_patch_handle_t));
-        status_t status = remote()->transact(CREATE_AUDIO_PATCH, data, &reply);
-        if (status != NO_ERROR ||
-                (status = (status_t)reply.readInt32()) != NO_ERROR) {
-            return status;
-        }
-        reply.read(handle, sizeof(audio_patch_handle_t));
-        return status;
-    }
-
-    virtual status_t releaseAudioPatch(audio_patch_handle_t handle)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.write(&handle, sizeof(audio_patch_handle_t));
-        status_t status = remote()->transact(RELEASE_AUDIO_PATCH, data, &reply);
-        if (status != NO_ERROR) {
-            status = (status_t)reply.readInt32();
-        }
-        return status;
-    }
-
-    virtual status_t listAudioPatches(unsigned int *num_patches,
-                                      struct audio_patch *patches,
-                                      unsigned int *generation)
-    {
-        if (num_patches == NULL || (*num_patches != 0 && patches == NULL) ||
-                generation == NULL) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        unsigned int numPatchesReq = (patches == NULL) ? 0 : *num_patches;
-        data.writeInt32(numPatchesReq);
-        status_t status = remote()->transact(LIST_AUDIO_PATCHES, data, &reply);
-        if (status == NO_ERROR) {
-            status = (status_t)reply.readInt32();
-            *num_patches = (unsigned int)reply.readInt32();
-        }
-        if (status == NO_ERROR) {
-            if (numPatchesReq > *num_patches) {
-                numPatchesReq = *num_patches;
-            }
-            if (numPatchesReq > 0) {
-                reply.read(patches, numPatchesReq * sizeof(struct audio_patch));
-            }
-            *generation = reply.readInt32();
-        }
-        return status;
-    }
-
-    virtual status_t setAudioPortConfig(const struct audio_port_config *config)
-    {
-        if (config == NULL) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.write(config, sizeof(struct audio_port_config));
-        status_t status = remote()->transact(SET_AUDIO_PORT_CONFIG, data, &reply);
-        if (status != NO_ERROR) {
-            status = (status_t)reply.readInt32();
-        }
-        return status;
-    }
-
-    virtual void registerClient(const sp<IAudioPolicyServiceClient>& client)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeStrongBinder(IInterface::asBinder(client));
-        remote()->transact(REGISTER_CLIENT, data, &reply);
-    }
-
-    virtual void setAudioPortCallbacksEnabled(bool enabled)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(enabled ? 1 : 0);
-        remote()->transact(SET_AUDIO_PORT_CALLBACK_ENABLED, data, &reply);
-    }
-
-    virtual void setAudioVolumeGroupCallbacksEnabled(bool enabled)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(enabled ? 1 : 0);
-        remote()->transact(SET_AUDIO_VOLUME_GROUP_CALLBACK_ENABLED, data, &reply);
-    }
-
-    virtual status_t acquireSoundTriggerSession(audio_session_t *session,
-                                            audio_io_handle_t *ioHandle,
-                                            audio_devices_t *device)
-    {
-        if (session == NULL || ioHandle == NULL || device == NULL) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        status_t status = remote()->transact(ACQUIRE_SOUNDTRIGGER_SESSION, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = (status_t)reply.readInt32();
-        if (status == NO_ERROR) {
-            *session = (audio_session_t)reply.readInt32();
-            *ioHandle = (audio_io_handle_t)reply.readInt32();
-            *device = (audio_devices_t)reply.readInt32();
-        }
-        return status;
-    }
-
-    virtual status_t releaseSoundTriggerSession(audio_session_t session)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(session);
-        status_t status = remote()->transact(RELEASE_SOUNDTRIGGER_SESSION, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        return (status_t)reply.readInt32();
-    }
-
-    virtual audio_mode_t getPhoneState()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        status_t status = remote()->transact(GET_PHONE_STATE, data, &reply);
-        if (status != NO_ERROR) {
-            return AUDIO_MODE_INVALID;
-        }
-        return (audio_mode_t)reply.readInt32();
-    }
-
-    virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(registration ? 1 : 0);
-        size_t size = mixes.size();
-        if (size > MAX_MIXES_PER_POLICY) {
-            size = MAX_MIXES_PER_POLICY;
-        }
-        size_t sizePosition = data.dataPosition();
-        data.writeInt32(size);
-        size_t finalSize = size;
-        for (size_t i = 0; i < size; i++) {
-            size_t position = data.dataPosition();
-            if (mixes[i].writeToParcel(&data) != NO_ERROR) {
-                data.setDataPosition(position);
-                finalSize--;
-            }
-        }
-        if (size != finalSize) {
-            size_t position = data.dataPosition();
-            data.setDataPosition(sizePosition);
-            data.writeInt32(finalSize);
-            data.setDataPosition(position);
-        }
-        status_t status = remote()->transact(REGISTER_POLICY_MIXES, data, &reply);
-        if (status == NO_ERROR) {
-            status = (status_t)reply.readInt32();
-        }
-        return status;
-    }
-
-    virtual status_t startAudioSource(const struct audio_port_config *source,
-                                      const audio_attributes_t *attributes,
-                                      audio_port_handle_t *portId)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        if (source == NULL || attributes == NULL || portId == NULL) {
-            return BAD_VALUE;
-        }
-        data.write(source, sizeof(struct audio_port_config));
-        data.write(attributes, sizeof(audio_attributes_t));
-        status_t status = remote()->transact(START_AUDIO_SOURCE, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = (status_t)reply.readInt32();
-        if (status != NO_ERROR) {
-            return status;
-        }
-        *portId = (audio_port_handle_t)reply.readInt32();
-        return status;
-    }
-
-    virtual status_t stopAudioSource(audio_port_handle_t portId)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(portId);
-        status_t status = remote()->transact(STOP_AUDIO_SOURCE, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = (status_t)reply.readInt32();
-        return status;
-    }
-
-    virtual status_t setMasterMono(bool mono)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(static_cast<int32_t>(mono));
-        status_t status = remote()->transact(SET_MASTER_MONO, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        return static_cast<status_t>(reply.readInt32());
-    }
-
-    virtual status_t getMasterMono(bool *mono)
-    {
-        if (mono == nullptr) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-
-        status_t status = remote()->transact(GET_MASTER_MONO, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = static_cast<status_t>(reply.readInt32());
-        if (status == NO_ERROR) {
-            *mono = static_cast<bool>(reply.readInt32());
-        }
-        return status;
-    }
-
-    virtual float getStreamVolumeDB(audio_stream_type_t stream, int index, audio_devices_t device)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(static_cast <int32_t>(stream));
-        data.writeInt32(static_cast <int32_t>(index));
-        data.writeUint32(static_cast <uint32_t>(device));
-        status_t status = remote()->transact(GET_STREAM_VOLUME_DB, data, &reply);
-        if (status != NO_ERROR) {
-            return NAN;
-        }
-        return reply.readFloat();
-    }
-
-    virtual status_t getSurroundFormats(unsigned int *numSurroundFormats,
-                                        audio_format_t *surroundFormats,
-                                        bool *surroundFormatsEnabled,
-                                        bool reported)
-    {
-        if (numSurroundFormats == NULL || (*numSurroundFormats != 0 &&
-                (surroundFormats == NULL || surroundFormatsEnabled == NULL))) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        unsigned int numSurroundFormatsReq = *numSurroundFormats;
-        data.writeUint32(numSurroundFormatsReq);
-        data.writeBool(reported);
-        status_t status = remote()->transact(GET_SURROUND_FORMATS, data, &reply);
-        if (status == NO_ERROR && (status = (status_t)reply.readInt32()) == NO_ERROR) {
-            *numSurroundFormats = reply.readUint32();
-        }
-        if (status == NO_ERROR) {
-            if (numSurroundFormatsReq > *numSurroundFormats) {
-                numSurroundFormatsReq = *numSurroundFormats;
-            }
-            if (numSurroundFormatsReq > 0) {
-                status = reply.read(surroundFormats,
-                                    numSurroundFormatsReq * sizeof(audio_format_t));
-                if (status != NO_ERROR) {
-                    return status;
-                }
-                status = reply.read(surroundFormatsEnabled,
-                                    numSurroundFormatsReq * sizeof(bool));
-            }
-        }
-        return status;
-    }
-
-    virtual status_t setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(audioFormat);
-        data.writeBool(enabled);
-        status_t status = remote()->transact(SET_SURROUND_FORMAT_ENABLED, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        return reply.readInt32();
-    }
-
-    virtual status_t getHwOffloadEncodingFormatsSupportedForA2DP(
-                std::vector<audio_format_t> *formats)
-    {
-        if (formats == NULL) {
-            return BAD_VALUE;
-        }
-
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        status_t status = remote()->transact(GET_OFFLOAD_FORMATS_A2DP, data, &reply);
-        if (status != NO_ERROR || (status = (status_t)reply.readInt32()) != NO_ERROR) {
-            return status;
-        }
-
-        size_t list_size = reply.readUint32();
-
-        for (size_t i = 0; i < list_size; i++) {
-            formats->push_back(static_cast<audio_format_t>(reply.readInt32()));
-        }
-        return NO_ERROR;
-    }
-
-
-     virtual status_t addStreamDefaultEffect(const effect_uuid_t *type,
-                                            const String16& opPackageName,
-                                            const effect_uuid_t *uuid,
-                                            int32_t priority,
-                                            audio_usage_t usage,
-                                            audio_unique_id_t* id)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.write(type, sizeof(effect_uuid_t));
-        data.writeString16(opPackageName);
-        data.write(uuid, sizeof(effect_uuid_t));
-        data.writeInt32(priority);
-        data.writeInt32((int32_t) usage);
-        status_t status = remote()->transact(ADD_STREAM_DEFAULT_EFFECT, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = static_cast <status_t> (reply.readInt32());
-        *id = reply.readInt32();
-        return status;
-    }
-
-    virtual status_t removeStreamDefaultEffect(audio_unique_id_t id)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(id);
-        status_t status = remote()->transact(REMOVE_STREAM_DEFAULT_EFFECT, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    virtual status_t addSourceDefaultEffect(const effect_uuid_t *type,
-                                            const String16& opPackageName,
-                                            const effect_uuid_t *uuid,
-                                            int32_t priority,
-                                            audio_source_t source,
-                                            audio_unique_id_t* id)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.write(type, sizeof(effect_uuid_t));
-        data.writeString16(opPackageName);
-        data.write(uuid, sizeof(effect_uuid_t));
-        data.writeInt32(priority);
-        data.writeInt32((int32_t) source);
-        status_t status = remote()->transact(ADD_SOURCE_DEFAULT_EFFECT, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = static_cast <status_t> (reply.readInt32());
-        *id = reply.readInt32();
-        return status;
-    }
-
-    virtual status_t removeSourceDefaultEffect(audio_unique_id_t id)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(id);
-        status_t status = remote()->transact(REMOVE_SOURCE_DEFAULT_EFFECT, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    virtual status_t setAssistantUid(uid_t uid)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(uid);
-        status_t status = remote()->transact(SET_ASSISTANT_UID, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    virtual status_t setA11yServicesUids(const std::vector<uid_t>& uids)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(uids.size());
-        for (auto uid : uids) {
-            data.writeInt32(uid);
-        }
-        status_t status = remote()->transact(SET_A11Y_SERVICES_UIDS, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    virtual status_t setCurrentImeUid(uid_t uid)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(uid);
-        status_t status = remote()->transact(SET_CURRENT_IME_UID, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        return static_cast <status_t> (reply.readInt32());
-    }
-
-    virtual bool isHapticPlaybackSupported()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        status_t status = remote()->transact(IS_HAPTIC_PLAYBACK_SUPPORTED, data, &reply);
-        if (status != NO_ERROR) {
-            return false;
-        }
-        return reply.readBool();
-    }
-
-    virtual status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-
-        data.writeInt32((int32_t) uid);
-        size_t size = devices.size();
-        size_t sizePosition = data.dataPosition();
-        data.writeInt32((int32_t) size);
-        size_t finalSize = size;
-        for (size_t i = 0; i < size; i++) {
-            size_t position = data.dataPosition();
-            if (devices[i].writeToParcel(&data) != NO_ERROR) {
-                data.setDataPosition(position);
-                finalSize--;
-            }
-        }
-        if (size != finalSize) {
-            size_t position = data.dataPosition();
-            data.setDataPosition(sizePosition);
-            data.writeInt32(finalSize);
-            data.setDataPosition(position);
-        }
-
-        status_t status = remote()->transact(SET_UID_DEVICE_AFFINITY, data, &reply);
-        if (status == NO_ERROR) {
-            status = (status_t)reply.readInt32();
-        }
-        return status;
-    }
-
-    virtual status_t removeUidDeviceAffinities(uid_t uid) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-
-        data.writeInt32((int32_t) uid);
-
-        status_t status =
-            remote()->transact(REMOVE_UID_DEVICE_AFFINITY, data, &reply);
-        if (status == NO_ERROR) {
-            status = (status_t) reply.readInt32();
-        }
-        return status;
-    }
-
-        virtual status_t setUserIdDeviceAffinities(int userId,
-                const Vector<AudioDeviceTypeAddr>& devices)
-        {
-            Parcel data, reply;
-            data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-
-            data.writeInt32((int32_t) userId);
-            size_t size = devices.size();
-            size_t sizePosition = data.dataPosition();
-            data.writeInt32((int32_t) size);
-            size_t finalSize = size;
-            for (size_t i = 0; i < size; i++) {
-                size_t position = data.dataPosition();
-                if (devices[i].writeToParcel(&data) != NO_ERROR) {
-                    data.setDataPosition(position);
-                    finalSize--;
-                }
-            }
-            if (size != finalSize) {
-                size_t position = data.dataPosition();
-                data.setDataPosition(sizePosition);
-                data.writeInt32(finalSize);
-                data.setDataPosition(position);
-            }
-
-            status_t status = remote()->transact(SET_USERID_DEVICE_AFFINITY, data, &reply);
-            if (status == NO_ERROR) {
-                status = (status_t)reply.readInt32();
-            }
-            return status;
-        }
-
-        virtual status_t removeUserIdDeviceAffinities(int userId) {
-            Parcel data, reply;
-            data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-
-            data.writeInt32((int32_t) userId);
-
-            status_t status =
-                remote()->transact(REMOVE_USERID_DEVICE_AFFINITY, data, &reply);
-            if (status == NO_ERROR) {
-                status = (status_t) reply.readInt32();
-            }
-            return status;
-        }
-
-    virtual status_t listAudioProductStrategies(AudioProductStrategyVector &strategies)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-
-        status_t status = remote()->transact(LIST_AUDIO_PRODUCT_STRATEGIES, data, &reply);
-        if (status != NO_ERROR) {
-            ALOGE("%s: permission denied", __func__);
-            return status;
-        }
-        status = static_cast<status_t>(reply.readInt32());
-        if (status != NO_ERROR) {
-            return status;
-        }
-        uint32_t numStrategies = static_cast<uint32_t>(reply.readInt32());
-        for (size_t i = 0; i < numStrategies; i++) {
-            AudioProductStrategy strategy;
-            status = strategy.readFromParcel(&reply);
-            if (status != NO_ERROR) {
-                ALOGE("%s: failed to read strategies", __FUNCTION__);
-                strategies.clear();
-                return status;
-            }
-            strategies.push_back(strategy);
-        }
-        return NO_ERROR;
-    }
-
-    virtual status_t getProductStrategyFromAudioAttributes(const AudioAttributes &aa,
-                                                           product_strategy_t &productStrategy)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        status_t status = aa.writeToParcel(&data);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = remote()->transact(GET_STRATEGY_FOR_ATTRIBUTES, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = static_cast<status_t>(reply.readInt32());
-        if (status != NO_ERROR) {
-            return status;
-        }
-        productStrategy = static_cast<product_strategy_t>(reply.readInt32());
-        return NO_ERROR;
-    }
-
-    virtual status_t listAudioVolumeGroups(AudioVolumeGroupVector &groups)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-
-        status_t status = remote()->transact(LIST_AUDIO_VOLUME_GROUPS, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = static_cast<status_t>(reply.readInt32());
-        if (status != NO_ERROR) {
-            return status;
-        }
-        uint32_t numGroups = static_cast<uint32_t>(reply.readInt32());
-        for (size_t i = 0; i < numGroups; i++) {
-            AudioVolumeGroup group;
-            status = group.readFromParcel(&reply);
-            if (status != NO_ERROR) {
-                ALOGE("%s: failed to read volume groups", __FUNCTION__);
-                groups.clear();
-                return status;
-            }
-            groups.push_back(group);
-        }
-        return NO_ERROR;
-    }
-
-    virtual status_t getVolumeGroupFromAudioAttributes(const AudioAttributes &aa,
-                                                       volume_group_t &volumeGroup)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        status_t status = aa.writeToParcel(&data);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = remote()->transact(GET_VOLUME_GROUP_FOR_ATTRIBUTES, data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = static_cast<status_t>(reply.readInt32());
-        if (status != NO_ERROR) {
-            return status;
-        }
-        volumeGroup = static_cast<volume_group_t>(reply.readInt32());
-        return NO_ERROR;
-    }
-
-    virtual status_t setRttEnabled(bool enabled)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeInt32(static_cast<int32_t>(enabled));
-        status_t status = remote()->transact(SET_RTT_ENABLED, data, &reply);
-        if (status != NO_ERROR) {
-           return status;
-        }
-        return static_cast<status_t>(reply.readInt32());
-    }
-
-    virtual bool isCallScreenModeSupported()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        status_t status = remote()->transact(IS_CALL_SCREEN_MODE_SUPPORTED, data, &reply);
-        if (status != NO_ERROR) {
-            return false;
-        }
-        return reply.readBool();
-    }
-
-    virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
-            const AudioDeviceTypeAddr &device)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeUint32(static_cast<uint32_t>(strategy));
-        status_t status = device.writeToParcel(&data);
-        if (status != NO_ERROR) {
-            return BAD_VALUE;
-        }
-        status = remote()->transact(SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
-                data, &reply);
-        if (status != NO_ERROR) {
-           return status;
-        }
-        return static_cast<status_t>(reply.readInt32());
-    }
-
-    virtual status_t removePreferredDeviceForStrategy(product_strategy_t strategy)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeUint32(static_cast<uint32_t>(strategy));
-        status_t status = remote()->transact(REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
-                data, &reply);
-        if (status != NO_ERROR) {
-           return status;
-        }
-        return static_cast<status_t>(reply.readInt32());
-    }
-
-    virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
-            AudioDeviceTypeAddr &device)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        data.writeUint32(static_cast<uint32_t>(strategy));
-        status_t status = remote()->transact(GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
-                data, &reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = device.readFromParcel(&reply);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        return static_cast<status_t>(reply.readInt32());
-    }
-
-    virtual status_t getDevicesForAttributes(const AudioAttributes &aa,
-            AudioDeviceTypeAddrVector *devices) const
-    {
-        if (devices == nullptr) {
-            return BAD_VALUE;
-        }
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        status_t status = aa.writeToParcel(&data);
-        if (status != NO_ERROR) {
-            return status;
-        }
-        status = remote()->transact(GET_DEVICES_FOR_ATTRIBUTES, data, &reply);
-        if (status != NO_ERROR) {
-            // transaction failed, return error
-            return status;
-        }
-        status = static_cast<status_t>(reply.readInt32());
-        if (status != NO_ERROR) {
-            // APM method call failed, return error
-            return status;
-        }
-
-        const size_t numberOfDevices = (size_t)reply.readInt32();
-        for (size_t i = 0; i < numberOfDevices; i++) {
-            AudioDeviceTypeAddr device;
-            if (device.readFromParcel((Parcel*)&reply) == NO_ERROR) {
-                devices->push_back(device);
-            } else {
-                return FAILED_TRANSACTION;
-            }
-        }
-        return NO_ERROR;
-    }
-
-    virtual void onNewAudioModulesAvailable()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        remote()->transact(AUDIO_MODULES_UPDATED, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-
-    status_t registerSoundTriggerCaptureStateListener(
-            const sp<media::ICaptureStateListener>& listener,
-            bool* result) override {
-        Parcel data, reply;
-        status_t status;
-        status =
-            data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
-        if (status != NO_ERROR) return status;
-        status = data.writeStrongBinder(IInterface::asBinder(listener));
-        if (status != NO_ERROR) return status;
-        status =
-            remote()->transact(REGISTER_SOUNDTRIGGER_CAPTURE_STATE_LISTENER,
-                               data,
-                               &reply,
-                               0);
-        if (status != NO_ERROR) return status;
-        status = reply.readBool(result);
-        if (status != NO_ERROR) return status;
-        return NO_ERROR;
-    }
-};
-
-IMPLEMENT_META_INTERFACE(AudioPolicyService, "android.media.IAudioPolicyService");
-
-// ----------------------------------------------------------------------
-
-status_t BnAudioPolicyService::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    // make sure transactions reserved to AudioFlinger do not come from other processes
-    switch (code) {
-        case START_OUTPUT:
-        case STOP_OUTPUT:
-        case RELEASE_OUTPUT:
-        case GET_INPUT_FOR_ATTR:
-        case START_INPUT:
-        case STOP_INPUT:
-        case RELEASE_INPUT:
-        case GET_OUTPUT_FOR_EFFECT:
-        case REGISTER_EFFECT:
-        case UNREGISTER_EFFECT:
-        case SET_EFFECT_ENABLED:
-        case GET_OUTPUT_FOR_ATTR:
-        case MOVE_EFFECTS_TO_IO:
-            ALOGW("%s: transaction %d received from PID %d",
-                  __func__, code, IPCThreadState::self()->getCallingPid());
-            // return status only for non void methods
-            switch (code) {
-                case RELEASE_OUTPUT:
-                case RELEASE_INPUT:
-                    break;
-                default:
-                    reply->writeInt32(static_cast<int32_t> (INVALID_OPERATION));
-                    break;
-            }
-            return OK;
-        default:
-            break;
-    }
-
-    // make sure the following transactions come from system components
-    switch (code) {
-        case SET_DEVICE_CONNECTION_STATE:
-        case HANDLE_DEVICE_CONFIG_CHANGE:
-        case SET_PHONE_STATE:
-//FIXME: Allow SET_FORCE_USE calls from system apps until a better use case routing API is available
-//      case SET_FORCE_USE:
-        case INIT_STREAM_VOLUME:
-        case SET_STREAM_VOLUME:
-        case REGISTER_POLICY_MIXES:
-        case SET_MASTER_MONO:
-        case GET_SURROUND_FORMATS:
-        case SET_SURROUND_FORMAT_ENABLED:
-        case SET_ASSISTANT_UID:
-        case SET_A11Y_SERVICES_UIDS:
-        case SET_UID_DEVICE_AFFINITY:
-        case REMOVE_UID_DEVICE_AFFINITY:
-        case SET_USERID_DEVICE_AFFINITY:
-        case REMOVE_USERID_DEVICE_AFFINITY:
-        case GET_OFFLOAD_FORMATS_A2DP:
-        case LIST_AUDIO_VOLUME_GROUPS:
-        case GET_VOLUME_GROUP_FOR_ATTRIBUTES:
-        case ACQUIRE_SOUNDTRIGGER_SESSION:
-        case RELEASE_SOUNDTRIGGER_SESSION:
-        case SET_RTT_ENABLED:
-        case IS_CALL_SCREEN_MODE_SUPPORTED:
-        case SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
-        case SET_SUPPORTED_SYSTEM_USAGES:
-        case REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
-        case GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
-        case GET_DEVICES_FOR_ATTRIBUTES:
-        case SET_ALLOWED_CAPTURE_POLICY:
-        case AUDIO_MODULES_UPDATED:
-        case SET_CURRENT_IME_UID:
-        case REGISTER_SOUNDTRIGGER_CAPTURE_STATE_LISTENER: {
-            if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
-                ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
-                      __func__, code, IPCThreadState::self()->getCallingPid(),
-                      IPCThreadState::self()->getCallingUid());
-                reply->writeInt32(static_cast<int32_t> (INVALID_OPERATION));
-                return OK;
-            }
-        } break;
-        default:
-            break;
-    }
-
-    std::string tag("IAudioPolicyService command " + std::to_string(code));
-    TimeCheck check(tag.c_str());
-
-    switch (code) {
-        case SET_DEVICE_CONNECTION_STATE: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_devices_t device =
-                    static_cast <audio_devices_t>(data.readInt32());
-            audio_policy_dev_state_t state =
-                    static_cast <audio_policy_dev_state_t>(data.readInt32());
-            const char *device_address = data.readCString();
-            const char *device_name = data.readCString();
-            audio_format_t codecFormat = static_cast <audio_format_t>(data.readInt32());
-            if (device_address == nullptr || device_name == nullptr) {
-                ALOGE("Bad Binder transaction: SET_DEVICE_CONNECTION_STATE for device %u", device);
-                reply->writeInt32(static_cast<int32_t> (BAD_VALUE));
-            } else {
-                reply->writeInt32(static_cast<uint32_t> (setDeviceConnectionState(device,
-                                                                                  state,
-                                                                                  device_address,
-                                                                                  device_name,
-                                                                                  codecFormat)));
-            }
-            return NO_ERROR;
-        } break;
-
-        case GET_DEVICE_CONNECTION_STATE: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_devices_t device =
-                    static_cast<audio_devices_t> (data.readInt32());
-            const char *device_address = data.readCString();
-            if (device_address == nullptr) {
-                ALOGE("Bad Binder transaction: GET_DEVICE_CONNECTION_STATE for device %u", device);
-                reply->writeInt32(static_cast<int32_t> (AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
-            } else {
-                reply->writeInt32(static_cast<uint32_t> (getDeviceConnectionState(device,
-                                                                                  device_address)));
-            }
-            return NO_ERROR;
-        } break;
-
-        case HANDLE_DEVICE_CONFIG_CHANGE: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_devices_t device =
-                    static_cast <audio_devices_t>(data.readInt32());
-            const char *device_address = data.readCString();
-            const char *device_name = data.readCString();
-            audio_format_t codecFormat =
-                    static_cast <audio_format_t>(data.readInt32());
-            if (device_address == nullptr || device_name == nullptr) {
-                ALOGE("Bad Binder transaction: HANDLE_DEVICE_CONFIG_CHANGE for device %u", device);
-                reply->writeInt32(static_cast<int32_t> (BAD_VALUE));
-            } else {
-                reply->writeInt32(static_cast<uint32_t> (handleDeviceConfigChange(device,
-                                                                                  device_address,
-                                                                                  device_name,
-                                                                                  codecFormat)));
-            }
-            return NO_ERROR;
-        } break;
-
-        case SET_PHONE_STATE: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            reply->writeInt32(static_cast <uint32_t>(setPhoneState(
-                    (audio_mode_t) data.readInt32(),
-                    (uid_t) data.readInt32())));
-            return NO_ERROR;
-        } break;
-
-        case SET_FORCE_USE: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_policy_force_use_t usage = static_cast <audio_policy_force_use_t>(
-                    data.readInt32());
-            audio_policy_forced_cfg_t config =
-                    static_cast <audio_policy_forced_cfg_t>(data.readInt32());
-            reply->writeInt32(static_cast <uint32_t>(setForceUse(usage, config)));
-            return NO_ERROR;
-        } break;
-
-        case GET_FORCE_USE: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_policy_force_use_t usage = static_cast <audio_policy_force_use_t>(
-                    data.readInt32());
-            reply->writeInt32(static_cast <uint32_t>(getForceUse(usage)));
-            return NO_ERROR;
-        } break;
-
-        case GET_OUTPUT: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_stream_type_t stream =
-                    static_cast <audio_stream_type_t>(data.readInt32());
-            audio_io_handle_t output = getOutput(stream);
-            reply->writeInt32(static_cast <int>(output));
-            return NO_ERROR;
-        } break;
-
-        case GET_OUTPUT_FOR_ATTR: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
-            status_t status = data.read(&attr, sizeof(audio_attributes_t));
-            if (status != NO_ERROR) {
-                return status;
-            }
-            sanetizeAudioAttributes(&attr);
-            audio_session_t session = (audio_session_t)data.readInt32();
-            audio_stream_type_t stream = AUDIO_STREAM_DEFAULT;
-            bool hasStream = data.readInt32() != 0;
-            if (hasStream) {
-                stream = (audio_stream_type_t)data.readInt32();
-            }
-            pid_t pid = (pid_t)data.readInt32();
-            uid_t uid = (uid_t)data.readInt32();
-            audio_config_t config;
-            memset(&config, 0, sizeof(audio_config_t));
-            data.read(&config, sizeof(audio_config_t));
-            audio_output_flags_t flags =
-                    static_cast <audio_output_flags_t>(data.readInt32());
-            audio_port_handle_t selectedDeviceId = data.readInt32();
-            audio_port_handle_t portId = (audio_port_handle_t)data.readInt32();
-            audio_io_handle_t output = 0;
-            std::vector<audio_io_handle_t> secondaryOutputs;
-            status = getOutputForAttr(&attr,
-                    &output, session, &stream, pid, uid,
-                    &config,
-                    flags, &selectedDeviceId, &portId, &secondaryOutputs);
-            reply->writeInt32(status);
-            status = reply->write(&attr, sizeof(audio_attributes_t));
-            if (status != NO_ERROR) {
-                return status;
-            }
-            reply->writeInt32(output);
-            reply->writeInt32(stream);
-            reply->writeInt32(selectedDeviceId);
-            reply->writeInt32(portId);
-            reply->writeInt32(secondaryOutputs.size());
-            return reply->write(secondaryOutputs.data(),
-                                secondaryOutputs.size() * sizeof(audio_io_handle_t));
-        } break;
-
-        case START_OUTPUT: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            const audio_port_handle_t portId = static_cast <audio_port_handle_t>(data.readInt32());
-            reply->writeInt32(static_cast <uint32_t>(startOutput(portId)));
-            return NO_ERROR;
-        } break;
-
-        case STOP_OUTPUT: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            const audio_port_handle_t portId = static_cast <audio_port_handle_t>(data.readInt32());
-            reply->writeInt32(static_cast <uint32_t>(stopOutput(portId)));
-            return NO_ERROR;
-        } break;
-
-        case RELEASE_OUTPUT: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            const audio_port_handle_t portId = static_cast <audio_port_handle_t>(data.readInt32());
-            releaseOutput(portId);
-            return NO_ERROR;
-        } break;
-
-        case GET_INPUT_FOR_ATTR: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_attributes_t attr = {};
-            data.read(&attr, sizeof(audio_attributes_t));
-            sanetizeAudioAttributes(&attr);
-            audio_io_handle_t input = (audio_io_handle_t)data.readInt32();
-            audio_unique_id_t riid = (audio_unique_id_t)data.readInt32();
-            audio_session_t session = (audio_session_t)data.readInt32();
-            pid_t pid = (pid_t)data.readInt32();
-            uid_t uid = (uid_t)data.readInt32();
-            const String16 opPackageName = data.readString16();
-            audio_config_base_t config;
-            memset(&config, 0, sizeof(audio_config_base_t));
-            data.read(&config, sizeof(audio_config_base_t));
-            audio_input_flags_t flags = (audio_input_flags_t) data.readInt32();
-            audio_port_handle_t selectedDeviceId = (audio_port_handle_t) data.readInt32();
-            audio_port_handle_t portId = (audio_port_handle_t)data.readInt32();
-            status_t status = getInputForAttr(&attr, &input, riid, session, pid, uid,
-                                              opPackageName, &config,
-                                              flags, &selectedDeviceId, &portId);
-            reply->writeInt32(status);
-            if (status == NO_ERROR) {
-                reply->writeInt32(input);
-                reply->writeInt32(selectedDeviceId);
-                reply->writeInt32(portId);
-            }
-            return NO_ERROR;
-        } break;
-
-        case START_INPUT: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_port_handle_t portId = static_cast <audio_port_handle_t>(data.readInt32());
-            status_t status = startInput(portId);
-            reply->writeInt32(static_cast <uint32_t>(status));
-            return NO_ERROR;
-        } break;
-
-        case STOP_INPUT: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_port_handle_t portId = static_cast <audio_port_handle_t>(data.readInt32());
-            reply->writeInt32(static_cast <uint32_t>(stopInput(portId)));
-            return NO_ERROR;
-        } break;
-
-        case RELEASE_INPUT: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_port_handle_t portId = static_cast <audio_port_handle_t>(data.readInt32());
-            releaseInput(portId);
-            return NO_ERROR;
-        } break;
-
-        case INIT_STREAM_VOLUME: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_stream_type_t stream =
-                    static_cast <audio_stream_type_t>(data.readInt32());
-            int indexMin = data.readInt32();
-            int indexMax = data.readInt32();
-            reply->writeInt32(static_cast <uint32_t>(initStreamVolume(stream, indexMin,indexMax)));
-            return NO_ERROR;
-        } break;
-
-        case SET_STREAM_VOLUME: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_stream_type_t stream =
-                    static_cast <audio_stream_type_t>(data.readInt32());
-            int index = data.readInt32();
-            audio_devices_t device = static_cast <audio_devices_t>(data.readInt32());
-            reply->writeInt32(static_cast <uint32_t>(setStreamVolumeIndex(stream,
-                                                                          index,
-                                                                          device)));
-            return NO_ERROR;
-        } break;
-
-        case GET_STREAM_VOLUME: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_stream_type_t stream =
-                    static_cast <audio_stream_type_t>(data.readInt32());
-            audio_devices_t device = static_cast <audio_devices_t>(data.readInt32());
-            int index = 0;
-            status_t status = getStreamVolumeIndex(stream, &index, device);
-            reply->writeInt32(index);
-            reply->writeInt32(static_cast <uint32_t>(status));
-            return NO_ERROR;
-        } break;
-
-        case GET_STRATEGY_FOR_STREAM: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_stream_type_t stream =
-                    static_cast <audio_stream_type_t>(data.readInt32());
-            reply->writeUint32(getStrategyForStream(stream));
-            return NO_ERROR;
-        } break;
-
-        case SET_VOLUME_ATTRIBUTES: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_attributes_t attributes = {};
-            status_t status = data.read(&attributes, sizeof(audio_attributes_t));
-            if (status != NO_ERROR) {
-                return status;
-            }
-            int index = data.readInt32();
-            audio_devices_t device = static_cast <audio_devices_t>(data.readInt32());
-
-            reply->writeInt32(static_cast <uint32_t>(setVolumeIndexForAttributes(attributes,
-                                                                                 index, device)));
-            return NO_ERROR;
-        } break;
-
-        case GET_VOLUME_ATTRIBUTES: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_attributes_t attributes = {};
-            status_t status = data.read(&attributes, sizeof(audio_attributes_t));
-            if (status != NO_ERROR) {
-                return status;
-            }
-            audio_devices_t device = static_cast <audio_devices_t>(data.readInt32());
-
-            int index = 0;
-            status = getVolumeIndexForAttributes(attributes, index, device);
-            reply->writeInt32(static_cast <uint32_t>(status));
-            if (status == NO_ERROR) {
-                reply->writeInt32(index);
-            }
-            return NO_ERROR;
-        } break;
-
-        case GET_MIN_VOLUME_FOR_ATTRIBUTES: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_attributes_t attributes = {};
-            status_t status = data.read(&attributes, sizeof(audio_attributes_t));
-            if (status != NO_ERROR) {
-                return status;
-            }
-
-            int index = 0;
-            status = getMinVolumeIndexForAttributes(attributes, index);
-            reply->writeInt32(static_cast <uint32_t>(status));
-            if (status == NO_ERROR) {
-                reply->writeInt32(index);
-            }
-            return NO_ERROR;
-        } break;
-
-        case GET_MAX_VOLUME_FOR_ATTRIBUTES: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_attributes_t attributes = {};
-            status_t status = data.read(&attributes, sizeof(audio_attributes_t));
-            if (status != NO_ERROR) {
-                return status;
-            }
-
-            int index = 0;
-            status = getMaxVolumeIndexForAttributes(attributes, index);
-            reply->writeInt32(static_cast <uint32_t>(status));
-            if (status == NO_ERROR) {
-                reply->writeInt32(index);
-            }
-            return NO_ERROR;
-        } break;
-
-        case GET_DEVICES_FOR_STREAM: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_stream_type_t stream =
-                    static_cast <audio_stream_type_t>(data.readInt32());
-            reply->writeInt32(static_cast <int>(getDevicesForStream(stream)));
-            return NO_ERROR;
-        } break;
-
-        case GET_OUTPUT_FOR_EFFECT: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            effect_descriptor_t desc = {};
-            if (data.read(&desc, sizeof(desc)) != NO_ERROR) {
-                android_errorWriteLog(0x534e4554, "73126106");
-            }
-            (void)sanitizeEffectDescriptor(&desc);
-            audio_io_handle_t output = getOutputForEffect(&desc);
-            reply->writeInt32(static_cast <int>(output));
-            return NO_ERROR;
-        } break;
-
-        case REGISTER_EFFECT: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            effect_descriptor_t desc = {};
-            if (data.read(&desc, sizeof(desc)) != NO_ERROR) {
-                android_errorWriteLog(0x534e4554, "73126106");
-            }
-            (void)sanitizeEffectDescriptor(&desc);
-            audio_io_handle_t io = data.readInt32();
-            uint32_t strategy = data.readInt32();
-            audio_session_t session = (audio_session_t) data.readInt32();
-            int id = data.readInt32();
-            reply->writeInt32(static_cast <int32_t>(registerEffect(&desc,
-                                                                   io,
-                                                                   strategy,
-                                                                   session,
-                                                                   id)));
-            return NO_ERROR;
-        } break;
-
-        case UNREGISTER_EFFECT: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            int id = data.readInt32();
-            reply->writeInt32(static_cast <int32_t>(unregisterEffect(id)));
-            return NO_ERROR;
-        } break;
-
-        case SET_EFFECT_ENABLED: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            int id = data.readInt32();
-            bool enabled = static_cast <bool>(data.readInt32());
-            reply->writeInt32(static_cast <int32_t>(setEffectEnabled(id, enabled)));
-            return NO_ERROR;
-        } break;
-
-        case MOVE_EFFECTS_TO_IO: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            std::vector<int> ids;
-            int32_t size;
-            status_t status = data.readInt32(&size);
-            if (status != NO_ERROR) {
-                return status;
-            }
-            if (size > MAX_ITEMS_PER_LIST) {
-                return BAD_VALUE;
-            }
-            for (int32_t i = 0; i < size; i++) {
-                int id;
-                status =  data.readInt32(&id);
-                if (status != NO_ERROR) {
-                    return status;
-                }
-                ids.push_back(id);
-            }
-
-            audio_io_handle_t io = data.readInt32();
-            reply->writeInt32(static_cast <int32_t>(moveEffectsToIo(ids, io)));
-            return NO_ERROR;
-        } break;
-
-        case IS_STREAM_ACTIVE: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_stream_type_t stream = (audio_stream_type_t) data.readInt32();
-            uint32_t inPastMs = (uint32_t)data.readInt32();
-            reply->writeInt32( isStreamActive(stream, inPastMs) );
-            return NO_ERROR;
-        } break;
-
-        case IS_STREAM_ACTIVE_REMOTELY: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_stream_type_t stream = (audio_stream_type_t) data.readInt32();
-            uint32_t inPastMs = (uint32_t)data.readInt32();
-            reply->writeInt32( isStreamActiveRemotely(stream, inPastMs) );
-            return NO_ERROR;
-        } break;
-
-        case IS_SOURCE_ACTIVE: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_source_t source = (audio_source_t) data.readInt32();
-            reply->writeInt32( isSourceActive(source));
-            return NO_ERROR;
-        }
-
-        case QUERY_DEFAULT_PRE_PROCESSING: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_session_t audioSession = (audio_session_t) data.readInt32();
-            uint32_t count = data.readInt32();
-            if (count > AudioEffect::kMaxPreProcessing) {
-                count = AudioEffect::kMaxPreProcessing;
-            }
-            uint32_t retCount = count;
-            effect_descriptor_t *descriptors = new effect_descriptor_t[count]{};
-            status_t status = queryDefaultPreProcessing(audioSession, descriptors, &retCount);
-            reply->writeInt32(status);
-            if (status != NO_ERROR && status != NO_MEMORY) {
-                retCount = 0;
-            }
-            reply->writeInt32(retCount);
-            if (retCount != 0) {
-                if (retCount < count) {
-                    count = retCount;
-                }
-                reply->write(descriptors, sizeof(effect_descriptor_t) * count);
-            }
-            delete[] descriptors;
-            return status;
-        }
-
-        case IS_OFFLOAD_SUPPORTED: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_offload_info_t info = {};
-            data.read(&info, sizeof(audio_offload_info_t));
-            bool isSupported = isOffloadSupported(info);
-            reply->writeInt32(isSupported);
-            return NO_ERROR;
-        }
-
-        case IS_DIRECT_OUTPUT_SUPPORTED: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_config_base_t config = {};
-            audio_attributes_t attributes = {};
-            status_t status = data.read(&config, sizeof(audio_config_base_t));
-            if (status != NO_ERROR) return status;
-            status = data.read(&attributes, sizeof(audio_attributes_t));
-            if (status != NO_ERROR) return status;
-            reply->writeInt32(isDirectOutputSupported(config, attributes));
-            return NO_ERROR;
-        }
-
-        case LIST_AUDIO_PORTS: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_port_role_t role = (audio_port_role_t)data.readInt32();
-            audio_port_type_t type = (audio_port_type_t)data.readInt32();
-            unsigned int numPortsReq = data.readInt32();
-            if (numPortsReq > MAX_ITEMS_PER_LIST) {
-                numPortsReq = MAX_ITEMS_PER_LIST;
-            }
-            unsigned int numPorts = numPortsReq;
-            struct audio_port *ports =
-                    (struct audio_port *)calloc(numPortsReq, sizeof(struct audio_port));
-            if (ports == NULL) {
-                reply->writeInt32(NO_MEMORY);
-                reply->writeInt32(0);
-                return NO_ERROR;
-            }
-            unsigned int generation;
-            status_t status = listAudioPorts(role, type, &numPorts, ports, &generation);
-            reply->writeInt32(status);
-            reply->writeInt32(numPorts);
-
-            if (status == NO_ERROR) {
-                if (numPortsReq > numPorts) {
-                    numPortsReq = numPorts;
-                }
-                reply->write(ports, numPortsReq * sizeof(struct audio_port));
-                reply->writeInt32(generation);
-            }
-            free(ports);
-            return NO_ERROR;
-        }
-
-        case GET_AUDIO_PORT: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            struct audio_port port = {};
-            if (data.read(&port, sizeof(struct audio_port)) != NO_ERROR) {
-                ALOGE("b/23912202");
-            }
-            status_t status = getAudioPort(&port);
-            reply->writeInt32(status);
-            if (status == NO_ERROR) {
-                reply->write(&port, sizeof(struct audio_port));
-            }
-            return NO_ERROR;
-        }
-
-        case CREATE_AUDIO_PATCH: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            struct audio_patch patch = {};
-            data.read(&patch, sizeof(struct audio_patch));
-            audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
-            if (data.read(&handle, sizeof(audio_patch_handle_t)) != NO_ERROR) {
-                ALOGE("b/23912202");
-            }
-            status_t status = createAudioPatch(&patch, &handle);
-            reply->writeInt32(status);
-            if (status == NO_ERROR) {
-                reply->write(&handle, sizeof(audio_patch_handle_t));
-            }
-            return NO_ERROR;
-        }
-
-        case RELEASE_AUDIO_PATCH: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
-            data.read(&handle, sizeof(audio_patch_handle_t));
-            status_t status = releaseAudioPatch(handle);
-            reply->writeInt32(status);
-            return NO_ERROR;
-        }
-
-        case LIST_AUDIO_PATCHES: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            unsigned int numPatchesReq = data.readInt32();
-            if (numPatchesReq > MAX_ITEMS_PER_LIST) {
-                numPatchesReq = MAX_ITEMS_PER_LIST;
-            }
-            unsigned int numPatches = numPatchesReq;
-            struct audio_patch *patches =
-                    (struct audio_patch *)calloc(numPatchesReq,
-                                                 sizeof(struct audio_patch));
-            if (patches == NULL) {
-                reply->writeInt32(NO_MEMORY);
-                reply->writeInt32(0);
-                return NO_ERROR;
-            }
-            unsigned int generation;
-            status_t status = listAudioPatches(&numPatches, patches, &generation);
-            reply->writeInt32(status);
-            reply->writeInt32(numPatches);
-            if (status == NO_ERROR) {
-                if (numPatchesReq > numPatches) {
-                    numPatchesReq = numPatches;
-                }
-                reply->write(patches, numPatchesReq * sizeof(struct audio_patch));
-                reply->writeInt32(generation);
-            }
-            free(patches);
-            return NO_ERROR;
-        }
-
-        case SET_AUDIO_PORT_CONFIG: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            struct audio_port_config config = {};
-            data.read(&config, sizeof(struct audio_port_config));
-            (void)sanitizeAudioPortConfig(&config);
-            status_t status = setAudioPortConfig(&config);
-            reply->writeInt32(status);
-            return NO_ERROR;
-        }
-
-        case REGISTER_CLIENT: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            sp<IAudioPolicyServiceClient> client = interface_cast<IAudioPolicyServiceClient>(
-                    data.readStrongBinder());
-            registerClient(client);
-            return NO_ERROR;
-        } break;
-
-        case SET_AUDIO_PORT_CALLBACK_ENABLED: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            setAudioPortCallbacksEnabled(data.readInt32() == 1);
-            return NO_ERROR;
-        } break;
-
-        case SET_AUDIO_VOLUME_GROUP_CALLBACK_ENABLED: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            setAudioVolumeGroupCallbacksEnabled(data.readInt32() == 1);
-            return NO_ERROR;
-        } break;
-
-        case ACQUIRE_SOUNDTRIGGER_SESSION: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_session_t session = AUDIO_SESSION_NONE;
-            audio_io_handle_t ioHandle = AUDIO_IO_HANDLE_NONE;
-            audio_devices_t device = AUDIO_DEVICE_NONE;
-            status_t status = acquireSoundTriggerSession(&session, &ioHandle, &device);
-            reply->writeInt32(status);
-            if (status == NO_ERROR) {
-                reply->writeInt32(session);
-                reply->writeInt32(ioHandle);
-                reply->writeInt32(device);
-            }
-            return NO_ERROR;
-        } break;
-
-        case RELEASE_SOUNDTRIGGER_SESSION: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_session_t session = (audio_session_t)data.readInt32();
-            status_t status = releaseSoundTriggerSession(session);
-            reply->writeInt32(status);
-            return NO_ERROR;
-        } break;
-
-        case GET_PHONE_STATE: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            reply->writeInt32((int32_t)getPhoneState());
-            return NO_ERROR;
-        } break;
-
-        case REGISTER_POLICY_MIXES: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            bool registration = data.readInt32() == 1;
-            Vector<AudioMix> mixes;
-            size_t size = (size_t)data.readInt32();
-            if (size > MAX_MIXES_PER_POLICY) {
-                size = MAX_MIXES_PER_POLICY;
-            }
-            for (size_t i = 0; i < size; i++) {
-                AudioMix mix;
-                if (mix.readFromParcel((Parcel*)&data) == NO_ERROR) {
-                    mixes.add(mix);
-                }
-            }
-            status_t status = registerPolicyMixes(mixes, registration);
-            reply->writeInt32(status);
-            return NO_ERROR;
-        } break;
-
-        case START_AUDIO_SOURCE: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            struct audio_port_config source = {};
-            data.read(&source, sizeof(struct audio_port_config));
-            (void)sanitizeAudioPortConfig(&source);
-            audio_attributes_t attributes = {};
-            data.read(&attributes, sizeof(audio_attributes_t));
-            sanetizeAudioAttributes(&attributes);
-            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
-            status_t status = startAudioSource(&source, &attributes, &portId);
-            reply->writeInt32(status);
-            reply->writeInt32(portId);
-            return NO_ERROR;
-        } break;
-
-        case STOP_AUDIO_SOURCE: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_port_handle_t portId = (audio_port_handle_t) data.readInt32();
-            status_t status = stopAudioSource(portId);
-            reply->writeInt32(status);
-            return NO_ERROR;
-        } break;
-
-        case SET_MASTER_MONO: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            bool mono = static_cast<bool>(data.readInt32());
-            status_t status = setMasterMono(mono);
-            reply->writeInt32(status);
-            return NO_ERROR;
-        } break;
-
-        case GET_MASTER_MONO: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            bool mono;
-            status_t status = getMasterMono(&mono);
-            reply->writeInt32(status);
-            if (status == NO_ERROR) {
-                reply->writeInt32(static_cast<int32_t>(mono));
-            }
-            return NO_ERROR;
-        } break;
-
-        case GET_STREAM_VOLUME_DB: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_stream_type_t stream =
-                    static_cast <audio_stream_type_t>(data.readInt32());
-            int index = static_cast <int>(data.readInt32());
-            audio_devices_t device =
-                    static_cast <audio_devices_t>(data.readUint32());
-            reply->writeFloat(getStreamVolumeDB(stream, index, device));
-            return NO_ERROR;
-        }
-
-        case GET_SURROUND_FORMATS: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            unsigned int numSurroundFormatsReq = data.readUint32();
-            if (numSurroundFormatsReq > MAX_ITEMS_PER_LIST) {
-                numSurroundFormatsReq = MAX_ITEMS_PER_LIST;
-            }
-            bool reported = data.readBool();
-            unsigned int numSurroundFormats = numSurroundFormatsReq;
-            audio_format_t *surroundFormats = (audio_format_t *)calloc(
-                    numSurroundFormats, sizeof(audio_format_t));
-            bool *surroundFormatsEnabled = (bool *)calloc(numSurroundFormats, sizeof(bool));
-            if (numSurroundFormatsReq > 0 &&
-                    (surroundFormats == NULL || surroundFormatsEnabled == NULL)) {
-                free(surroundFormats);
-                free(surroundFormatsEnabled);
-                reply->writeInt32(NO_MEMORY);
-                return NO_ERROR;
-            }
-            status_t status = getSurroundFormats(
-                    &numSurroundFormats, surroundFormats, surroundFormatsEnabled, reported);
-            reply->writeInt32(status);
-
-            if (status == NO_ERROR) {
-                reply->writeUint32(numSurroundFormats);
-                if (numSurroundFormatsReq > numSurroundFormats) {
-                    numSurroundFormatsReq = numSurroundFormats;
-                }
-                reply->write(surroundFormats, numSurroundFormatsReq * sizeof(audio_format_t));
-                reply->write(surroundFormatsEnabled, numSurroundFormatsReq * sizeof(bool));
-            }
-            free(surroundFormats);
-            free(surroundFormatsEnabled);
-            return NO_ERROR;
-        }
-
-        case SET_SURROUND_FORMAT_ENABLED: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_format_t audioFormat = (audio_format_t) data.readInt32();
-            bool enabled = data.readBool();
-            status_t status = setSurroundFormatEnabled(audioFormat, enabled);
-            reply->writeInt32(status);
-            return NO_ERROR;
-        }
-
-        case GET_OFFLOAD_FORMATS_A2DP: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            std::vector<audio_format_t> encodingFormats;
-            status_t status = getHwOffloadEncodingFormatsSupportedForA2DP(&encodingFormats);
-            reply->writeInt32(status);
-            if (status != NO_ERROR) {
-                return NO_ERROR;
-            }
-            reply->writeUint32(static_cast<uint32_t>(encodingFormats.size()));
-            for (size_t i = 0; i < encodingFormats.size(); i++)
-                reply->writeInt32(static_cast<int32_t>(encodingFormats[i]));
-            return NO_ERROR;
-        }
-
-
-        case ADD_STREAM_DEFAULT_EFFECT: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            effect_uuid_t type;
-            status_t status = data.read(&type, sizeof(effect_uuid_t));
-            if (status != NO_ERROR) {
-                return status;
-            }
-            String16 opPackageName;
-            status = data.readString16(&opPackageName);
-            if (status != NO_ERROR) {
-                return status;
-            }
-            effect_uuid_t uuid;
-            status = data.read(&uuid, sizeof(effect_uuid_t));
-            if (status != NO_ERROR) {
-                return status;
-            }
-            int32_t priority = data.readInt32();
-            audio_usage_t usage = (audio_usage_t) data.readInt32();
-            audio_unique_id_t id = 0;
-            reply->writeInt32(static_cast <int32_t>(addStreamDefaultEffect(&type,
-                                                                           opPackageName,
-                                                                           &uuid,
-                                                                           priority,
-                                                                           usage,
-                                                                           &id)));
-            reply->writeInt32(id);
-            return NO_ERROR;
-        }
-
-        case REMOVE_STREAM_DEFAULT_EFFECT: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_unique_id_t id = static_cast<audio_unique_id_t>(data.readInt32());
-            reply->writeInt32(static_cast <int32_t>(removeStreamDefaultEffect(id)));
-            return NO_ERROR;
-        }
-
-        case ADD_SOURCE_DEFAULT_EFFECT: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            effect_uuid_t type;
-            status_t status = data.read(&type, sizeof(effect_uuid_t));
-            if (status != NO_ERROR) {
-                return status;
-            }
-            String16 opPackageName;
-            status = data.readString16(&opPackageName);
-            if (status != NO_ERROR) {
-                return status;
-            }
-            effect_uuid_t uuid;
-            status = data.read(&uuid, sizeof(effect_uuid_t));
-            if (status != NO_ERROR) {
-                return status;
-            }
-            int32_t priority = data.readInt32();
-            audio_source_t source = (audio_source_t) data.readInt32();
-            audio_unique_id_t id = 0;
-            reply->writeInt32(static_cast <int32_t>(addSourceDefaultEffect(&type,
-                                                                           opPackageName,
-                                                                           &uuid,
-                                                                           priority,
-                                                                           source,
-                                                                           &id)));
-            reply->writeInt32(id);
-            return NO_ERROR;
-        }
-
-        case REMOVE_SOURCE_DEFAULT_EFFECT: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            audio_unique_id_t id = static_cast<audio_unique_id_t>(data.readInt32());
-            reply->writeInt32(static_cast <int32_t>(removeSourceDefaultEffect(id)));
-            return NO_ERROR;
-        }
-
-        case SET_ASSISTANT_UID: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            int32_t uid;
-            status_t status = data.readInt32(&uid);
-            if (status != NO_ERROR) {
-                return status;
-            }
-            status = setAssistantUid(uid);
-            reply->writeInt32(static_cast <int32_t>(status));
-            return NO_ERROR;
-        }
-
-        case SET_A11Y_SERVICES_UIDS: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            std::vector<uid_t> uids;
-            int32_t size;
-            status_t status = data.readInt32(&size);
-            if (status != NO_ERROR) {
-                return status;
-            }
-            if (size > MAX_ITEMS_PER_LIST) {
-                size = MAX_ITEMS_PER_LIST;
-            }
-            for (int32_t i = 0; i < size; i++) {
-                int32_t uid;
-                status =  data.readInt32(&uid);
-                if (status != NO_ERROR) {
-                    return status;
-                }
-                uids.push_back(uid);
-            }
-            status = setA11yServicesUids(uids);
-            reply->writeInt32(static_cast <int32_t>(status));
-            return NO_ERROR;
-        }
-
-        case IS_HAPTIC_PLAYBACK_SUPPORTED: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            bool isSupported = isHapticPlaybackSupported();
-            reply->writeBool(isSupported);
-            return NO_ERROR;
-        }
-        case SET_UID_DEVICE_AFFINITY: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            const uid_t uid = (uid_t) data.readInt32();
-            Vector<AudioDeviceTypeAddr> devices;
-            size_t size = (size_t)data.readInt32();
-            for (size_t i = 0; i < size; i++) {
-                AudioDeviceTypeAddr device;
-                if (device.readFromParcel((Parcel*)&data) == NO_ERROR) {
-                    devices.add(device);
-                }
-            }
-            status_t status = setUidDeviceAffinities(uid, devices);
-            reply->writeInt32(status);
-            return NO_ERROR;
-        }
-
-        case REMOVE_UID_DEVICE_AFFINITY: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            const uid_t uid = (uid_t) data.readInt32();
-            status_t status = removeUidDeviceAffinities(uid);
-            reply->writeInt32(status);
-            return NO_ERROR;
-        }
-
-        case SET_USERID_DEVICE_AFFINITY: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            const int userId = (int) data.readInt32();
-            Vector<AudioDeviceTypeAddr> devices;
-            size_t size = (size_t)data.readInt32();
-            for (size_t i = 0; i < size; i++) {
-                AudioDeviceTypeAddr device;
-                if (device.readFromParcel((Parcel*)&data) == NO_ERROR) {
-                    devices.add(device);
-                }
-            }
-            status_t status = setUserIdDeviceAffinities(userId, devices);
-            reply->writeInt32(status);
-            return NO_ERROR;
-        }
-
-        case REMOVE_USERID_DEVICE_AFFINITY: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            const int userId = (int) data.readInt32();
-            status_t status = removeUserIdDeviceAffinities(userId);
-            reply->writeInt32(status);
-            return NO_ERROR;
-        }
-
-        case LIST_AUDIO_PRODUCT_STRATEGIES: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            AudioProductStrategyVector strategies;
-            status_t status = listAudioProductStrategies(strategies);
-            reply->writeInt32(status);
-            if (status != NO_ERROR) {
-                return NO_ERROR;
-            }
-            size_t size = strategies.size();
-            size_t sizePosition = reply->dataPosition();
-            reply->writeInt32(size);
-            size_t finalSize = size;
-            for (size_t i = 0; i < size; i++) {
-                size_t position = reply->dataPosition();
-                if (strategies[i].writeToParcel(reply) != NO_ERROR) {
-                    reply->setDataPosition(position);
-                    finalSize--;
-                }
-            }
-            if (size != finalSize) {
-                size_t position = reply->dataPosition();
-                reply->setDataPosition(sizePosition);
-                reply->writeInt32(finalSize);
-                reply->setDataPosition(position);
-            }
-            return NO_ERROR;
-        }
-
-        case GET_STRATEGY_FOR_ATTRIBUTES: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            AudioAttributes attributes;
-            status_t status = attributes.readFromParcel(&data);
-            if (status != NO_ERROR) {
-                return status;
-            }
-            product_strategy_t strategy;
-            status = getProductStrategyFromAudioAttributes(attributes, strategy);
-            reply->writeInt32(status);
-            if (status != NO_ERROR) {
-                return NO_ERROR;
-            }
-            reply->writeUint32(static_cast<int>(strategy));
-            return NO_ERROR;
-        }
-
-        case LIST_AUDIO_VOLUME_GROUPS: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            AudioVolumeGroupVector groups;
-            status_t status = listAudioVolumeGroups(groups);
-            reply->writeInt32(status);
-            if (status != NO_ERROR) {
-                return NO_ERROR;
-            }
-            size_t size = groups.size();
-            size_t sizePosition = reply->dataPosition();
-            reply->writeInt32(size);
-            size_t finalSize = size;
-            for (size_t i = 0; i < size; i++) {
-                size_t position = reply->dataPosition();
-                if (groups[i].writeToParcel(reply) != NO_ERROR) {
-                    reply->setDataPosition(position);
-                    finalSize--;
-                }
-            }
-            if (size != finalSize) {
-                size_t position = reply->dataPosition();
-                reply->setDataPosition(sizePosition);
-                reply->writeInt32(finalSize);
-                reply->setDataPosition(position);
-            }
-            return NO_ERROR;
-        }
-
-        case GET_VOLUME_GROUP_FOR_ATTRIBUTES: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            AudioAttributes attributes;
-            status_t status = attributes.readFromParcel(&data);
-            if (status != NO_ERROR) {
-                return status;
-            }
-
-            volume_group_t group;
-            status = getVolumeGroupFromAudioAttributes(attributes, group);
-            if (status != NO_ERROR) {
-                return NO_ERROR;
-            }
-
-            reply->writeInt32(status);
-            reply->writeUint32(static_cast<int>(group));
-            return NO_ERROR;
-        }
-
-        case SET_SUPPORTED_SYSTEM_USAGES: {
-             CHECK_INTERFACE(IAudioPolicyService, data, reply);
-             std::vector<audio_usage_t> systemUsages;
-
-             int32_t size;
-             status_t status = data.readInt32(&size);
-             if (status != NO_ERROR) {
-                 return status;
-             }
-             if (size > MAX_ITEMS_PER_LIST) {
-                 size = MAX_ITEMS_PER_LIST;
-             }
-
-             for (int32_t i = 0; i < size; i++) {
-                 int32_t systemUsageInt;
-                 status = data.readInt32(&systemUsageInt);
-                 if (status != NO_ERROR) {
-                     return status;
-                 }
-
-                 audio_usage_t systemUsage = static_cast<audio_usage_t>(systemUsageInt);
-                 systemUsages.push_back(systemUsage);
-             }
-             status = setSupportedSystemUsages(systemUsages);
-             reply->writeInt32(static_cast <int32_t>(status));
-             return NO_ERROR;
-        }
-
-        case SET_ALLOWED_CAPTURE_POLICY: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            uid_t uid = data.readInt32();
-            audio_flags_mask_t flags = data.readInt32();
-            status_t status = setAllowedCapturePolicy(uid, flags);
-            reply->writeInt32(status);
-            return NO_ERROR;
-        }
-
-        case SET_RTT_ENABLED: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            bool enabled = static_cast<bool>(data.readInt32());
-            status_t status = setRttEnabled(enabled);
-            reply->writeInt32(status);
-            return NO_ERROR;
-        }
-
-        case IS_CALL_SCREEN_MODE_SUPPORTED: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            bool isAvailable = isCallScreenModeSupported();
-            reply->writeBool(isAvailable);
-            return NO_ERROR;
-        }
-
-        case SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            product_strategy_t strategy = (product_strategy_t) data.readUint32();
-            AudioDeviceTypeAddr device;
-            status_t status = device.readFromParcel((Parcel*)&data);
-            if (status != NO_ERROR) {
-                return status;
-            }
-            status = setPreferredDeviceForStrategy(strategy, device);
-            reply->writeInt32(status);
-            return NO_ERROR;
-        }
-
-        case REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            product_strategy_t strategy = (product_strategy_t) data.readUint32();
-            status_t status = removePreferredDeviceForStrategy(strategy);
-            reply->writeInt32(status);
-            return NO_ERROR;
-        }
-
-        case GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            product_strategy_t strategy = (product_strategy_t) data.readUint32();
-            AudioDeviceTypeAddr device;
-            status_t status = getPreferredDeviceForStrategy(strategy, device);
-            status_t marshall_status = device.writeToParcel(reply);
-            if (marshall_status != NO_ERROR) {
-                return marshall_status;
-            }
-            reply->writeInt32(status);
-            return NO_ERROR;
-        }
-
-        case GET_DEVICES_FOR_ATTRIBUTES: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            AudioAttributes attributes;
-            status_t status = attributes.readFromParcel(&data);
-            if (status != NO_ERROR) {
-                return status;
-            }
-            AudioDeviceTypeAddrVector devices;
-            status = getDevicesForAttributes(attributes.getAttributes(), &devices);
-            // reply data formatted as:
-            //  - (int32) method call result from APM
-            //  - (int32) number of devices (n) if method call returned NO_ERROR
-            //  - n AudioDeviceTypeAddr         if method call returned NO_ERROR
-            reply->writeInt32(status);
-            if (status != NO_ERROR) {
-                return NO_ERROR;
-            }
-            status = reply->writeInt32(devices.size());
-            if (status != NO_ERROR) {
-                return status;
-            }
-            for (const auto& device : devices) {
-                status = device.writeToParcel(reply);
-                if (status != NO_ERROR) {
-                    return status;
-                }
-            }
-
-            return NO_ERROR;
-        }
-
-        case AUDIO_MODULES_UPDATED: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            onNewAudioModulesAvailable();
-            return NO_ERROR;
-        } break;
-
-        case SET_CURRENT_IME_UID: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            int32_t uid;
-            status_t status = data.readInt32(&uid);
-            if (status != NO_ERROR) {
-                return status;
-            }
-            status = setCurrentImeUid(uid);
-            reply->writeInt32(static_cast <int32_t>(status));
-            return NO_ERROR;
-        }
-
-        case REGISTER_SOUNDTRIGGER_CAPTURE_STATE_LISTENER: {
-            CHECK_INTERFACE(IAudioPolicyService, data, reply);
-            sp<IBinder> binder = data.readStrongBinder();
-            if (binder == nullptr) {
-                return BAD_VALUE;
-            }
-            sp<ICaptureStateListener>
-                listener = interface_cast<ICaptureStateListener>(
-                binder);
-            if (listener == nullptr) {
-                return BAD_VALUE;
-            }
-            bool ret;
-            status_t status =
-                registerSoundTriggerCaptureStateListener(listener, &ret);
-            LOG_ALWAYS_FATAL_IF(status != NO_ERROR,
-                                "Server returned unexpected status code: %d",
-                                status);
-            status = reply->writeBool(ret);
-            if (status != NO_ERROR) {
-                return status;
-            }
-            return NO_ERROR;
-        } break;
-
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-/** returns true if string overflow was prevented by zero termination */
-template <size_t size>
-static bool preventStringOverflow(char (&s)[size]) {
-    if (strnlen(s, size) < size) return false;
-    s[size - 1] = '\0';
-    return true;
-}
-
-void BnAudioPolicyService::sanetizeAudioAttributes(audio_attributes_t* attr)
-{
-    const size_t tagsMaxSize = AUDIO_ATTRIBUTES_TAGS_MAX_SIZE;
-    if (strnlen(attr->tags, tagsMaxSize) >= tagsMaxSize) {
-        android_errorWriteLog(0x534e4554, "68953950"); // SafetyNet logging
-    }
-    attr->tags[tagsMaxSize - 1] = '\0';
-}
-
-/** returns BAD_VALUE if sanitization was required. */
-status_t BnAudioPolicyService::sanitizeEffectDescriptor(effect_descriptor_t* desc)
-{
-    if (preventStringOverflow(desc->name)
-        | /* always */ preventStringOverflow(desc->implementor)) {
-        android_errorWriteLog(0x534e4554, "73126106"); // SafetyNet logging
-        return BAD_VALUE;
-    }
-    return NO_ERROR;
-}
-
-/** returns BAD_VALUE if sanitization was required. */
-status_t BnAudioPolicyService::sanitizeAudioPortConfig(struct audio_port_config* config)
-{
-    if (config->type == AUDIO_PORT_TYPE_DEVICE &&
-        preventStringOverflow(config->ext.device.address)) {
-        return BAD_VALUE;
-    }
-    return NO_ERROR;
-}
-
-// ----------------------------------------------------------------------------
-
-} // namespace android
diff --git a/media/libaudioclient/IAudioPolicyServiceClient.cpp b/media/libaudioclient/IAudioPolicyServiceClient.cpp
deleted file mode 100644
index 0f9580c..0000000
--- a/media/libaudioclient/IAudioPolicyServiceClient.cpp
+++ /dev/null
@@ -1,212 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "IAudioPolicyServiceClient"
-#include <utils/Log.h>
-
-#include <stdint.h>
-#include <sys/types.h>
-
-#include <binder/Parcel.h>
-
-#include <media/IAudioPolicyServiceClient.h>
-#include <media/AudioSystem.h>
-
-namespace android {
-
-enum {
-    PORT_LIST_UPDATE = IBinder::FIRST_CALL_TRANSACTION,
-    PATCH_LIST_UPDATE,
-    MIX_STATE_UPDATE,
-    RECORDING_CONFIGURATION_UPDATE,
-    VOLUME_GROUP_CHANGED,
-};
-
-// ----------------------------------------------------------------------
-inline void readAudioConfigBaseFromParcel(const Parcel& data, audio_config_base_t *config) {
-    config->sample_rate = data.readUint32();
-    config->channel_mask = (audio_channel_mask_t) data.readInt32();
-    config->format = (audio_format_t) data.readInt32();
-}
-
-inline void writeAudioConfigBaseToParcel(Parcel& data, const audio_config_base_t *config)
-{
-    data.writeUint32(config->sample_rate);
-    data.writeInt32((int32_t) config->channel_mask);
-    data.writeInt32((int32_t) config->format);
-}
-
-inline void readRecordClientInfoFromParcel(const Parcel& data, record_client_info_t *clientInfo) {
-    clientInfo->riid = (audio_unique_id_t) data.readInt32();
-    clientInfo->uid = (uid_t) data.readUint32();
-    clientInfo->session = (audio_session_t) data.readInt32();
-    clientInfo->source = (audio_source_t) data.readInt32();
-    data.read(&clientInfo->port_id, sizeof(audio_port_handle_t));
-    clientInfo->silenced = data.readBool();
-}
-
-inline void writeRecordClientInfoToParcel(Parcel& data, const record_client_info_t *clientInfo) {
-    data.writeInt32((int32_t) clientInfo->riid);
-    data.writeUint32((uint32_t) clientInfo->uid);
-    data.writeInt32((int32_t) clientInfo->session);
-    data.writeInt32((int32_t) clientInfo->source);
-    data.write(&clientInfo->port_id, sizeof(audio_port_handle_t));
-    data.writeBool(clientInfo->silenced);
-}
-
-inline void readEffectVectorFromParcel(const Parcel& data,
-                                       std::vector<effect_descriptor_t> *effects) {
-    int32_t numEffects = data.readInt32();
-    for (int32_t i = 0; i < numEffects; i++) {
-        effect_descriptor_t effect;
-        if (data.read(&effect, sizeof(effect_descriptor_t)) != NO_ERROR) {
-            break;
-        }
-        (*effects).push_back(effect);
-    }
-}
-
-inline void writeEffectVectorToParcel(Parcel& data, std::vector<effect_descriptor_t> effects) {
-    data.writeUint32((uint32_t) effects.size());
-    for (const auto& effect : effects) {
-        if (data.write(&effect, sizeof(effect_descriptor_t)) != NO_ERROR) {
-            break;
-        }
-    }
-}
-
-// ----------------------------------------------------------------------
-class BpAudioPolicyServiceClient : public BpInterface<IAudioPolicyServiceClient>
-{
-public:
-    explicit BpAudioPolicyServiceClient(const sp<IBinder>& impl)
-        : BpInterface<IAudioPolicyServiceClient>(impl)
-    {
-    }
-
-    void onAudioPortListUpdate()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyServiceClient::getInterfaceDescriptor());
-        remote()->transact(PORT_LIST_UPDATE, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-
-    void onAudioPatchListUpdate()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyServiceClient::getInterfaceDescriptor());
-        remote()->transact(PATCH_LIST_UPDATE, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-
-    void onAudioVolumeGroupChanged(volume_group_t group, int flags)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyServiceClient::getInterfaceDescriptor());
-        data.writeUint32(group);
-        data.writeInt32(flags);
-        remote()->transact(VOLUME_GROUP_CHANGED, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-
-    void onDynamicPolicyMixStateUpdate(String8 regId, int32_t state)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyServiceClient::getInterfaceDescriptor());
-        data.writeString8(regId);
-        data.writeInt32(state);
-        remote()->transact(MIX_STATE_UPDATE, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-
-    void onRecordingConfigurationUpdate(int event,
-                                        const record_client_info_t *clientInfo,
-                                        const audio_config_base_t *clientConfig,
-                                        std::vector<effect_descriptor_t> clientEffects,
-                                        const audio_config_base_t *deviceConfig,
-                                        std::vector<effect_descriptor_t> effects,
-                                        audio_patch_handle_t patchHandle,
-                                        audio_source_t source) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioPolicyServiceClient::getInterfaceDescriptor());
-        data.writeInt32(event);
-        writeRecordClientInfoToParcel(data, clientInfo);
-        writeAudioConfigBaseToParcel(data, clientConfig);
-        writeEffectVectorToParcel(data, clientEffects);
-        writeAudioConfigBaseToParcel(data, deviceConfig);
-        writeEffectVectorToParcel(data, effects);
-        data.writeInt32(patchHandle);
-        data.writeInt32((int32_t) source);
-        remote()->transact(RECORDING_CONFIGURATION_UPDATE, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-};
-
-IMPLEMENT_META_INTERFACE(AudioPolicyServiceClient, "android.media.IAudioPolicyServiceClient");
-
-// ----------------------------------------------------------------------
-
-status_t BnAudioPolicyServiceClient::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    switch (code) {
-    case PORT_LIST_UPDATE: {
-            CHECK_INTERFACE(IAudioPolicyServiceClient, data, reply);
-            onAudioPortListUpdate();
-            return NO_ERROR;
-        } break;
-    case PATCH_LIST_UPDATE: {
-            CHECK_INTERFACE(IAudioPolicyServiceClient, data, reply);
-            onAudioPatchListUpdate();
-            return NO_ERROR;
-        } break;
-    case VOLUME_GROUP_CHANGED: {
-            CHECK_INTERFACE(IAudioPolicyServiceClient, data, reply);
-            volume_group_t group = static_cast<volume_group_t>(data.readUint32());
-            int flags = data.readInt32();
-            onAudioVolumeGroupChanged(group, flags);
-            return NO_ERROR;
-        } break;
-    case MIX_STATE_UPDATE: {
-            CHECK_INTERFACE(IAudioPolicyServiceClient, data, reply);
-            String8 regId = data.readString8();
-            int32_t state = data.readInt32();
-            onDynamicPolicyMixStateUpdate(regId, state);
-            return NO_ERROR;
-        } break;
-    case RECORDING_CONFIGURATION_UPDATE: {
-            CHECK_INTERFACE(IAudioPolicyServiceClient, data, reply);
-            int event = (int) data.readInt32();
-            record_client_info_t clientInfo;
-            audio_config_base_t clientConfig;
-            audio_config_base_t deviceConfig;
-            readRecordClientInfoFromParcel(data, &clientInfo);
-            readAudioConfigBaseFromParcel(data, &clientConfig);
-            std::vector<effect_descriptor_t> clientEffects;
-            readEffectVectorFromParcel(data, &clientEffects);
-            readAudioConfigBaseFromParcel(data, &deviceConfig);
-            std::vector<effect_descriptor_t> effects;
-            readEffectVectorFromParcel(data, &effects);
-            audio_patch_handle_t patchHandle = (audio_patch_handle_t) data.readInt32();
-            audio_source_t source = (audio_source_t) data.readInt32();
-            onRecordingConfigurationUpdate(event, &clientInfo, &clientConfig, clientEffects,
-                                           &deviceConfig, effects, patchHandle, source);
-            return NO_ERROR;
-        } break;
-    default:
-        return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-// ----------------------------------------------------------------------------
-
-} // namespace android
diff --git a/media/libaudioclient/IAudioTrack.cpp b/media/libaudioclient/IAudioTrack.cpp
deleted file mode 100644
index 6219e7a..0000000
--- a/media/libaudioclient/IAudioTrack.cpp
+++ /dev/null
@@ -1,317 +0,0 @@
-/*
-**
-** Copyright 2007, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-#define LOG_TAG "IAudioTrack"
-//#define LOG_NDEBUG 0
-#include <utils/Log.h>
-
-#include <stdint.h>
-#include <sys/types.h>
-
-#include <binder/Parcel.h>
-
-#include <media/IAudioTrack.h>
-
-namespace android {
-
-using media::VolumeShaper;
-
-enum {
-    GET_CBLK = IBinder::FIRST_CALL_TRANSACTION,
-    START,
-    STOP,
-    FLUSH,
-    RESERVED, // was MUTE
-    PAUSE,
-    ATTACH_AUX_EFFECT,
-    SET_PARAMETERS,
-    SELECT_PRESENTATION,
-    GET_TIMESTAMP,
-    SIGNAL,
-    APPLY_VOLUME_SHAPER,
-    GET_VOLUME_SHAPER_STATE,
-};
-
-class BpAudioTrack : public BpInterface<IAudioTrack>
-{
-public:
-    explicit BpAudioTrack(const sp<IBinder>& impl)
-        : BpInterface<IAudioTrack>(impl)
-    {
-    }
-
-    virtual sp<IMemory> getCblk() const
-    {
-        Parcel data, reply;
-        sp<IMemory> cblk;
-        data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
-        status_t status = remote()->transact(GET_CBLK, data, &reply);
-        if (status == NO_ERROR) {
-            cblk = interface_cast<IMemory>(reply.readStrongBinder());
-            if (cblk != 0 && cblk->unsecurePointer() == NULL) {
-                cblk.clear();
-            }
-        }
-        return cblk;
-    }
-
-    virtual status_t start()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
-        status_t status = remote()->transact(START, data, &reply);
-        if (status == NO_ERROR) {
-            status = reply.readInt32();
-        } else {
-            ALOGW("start() error: %s", strerror(-status));
-        }
-        return status;
-    }
-
-    virtual void stop()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
-        remote()->transact(STOP, data, &reply);
-    }
-
-    virtual void flush()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
-        remote()->transact(FLUSH, data, &reply);
-    }
-
-    virtual void pause()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
-        remote()->transact(PAUSE, data, &reply);
-    }
-
-    virtual status_t attachAuxEffect(int effectId)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
-        data.writeInt32(effectId);
-        status_t status = remote()->transact(ATTACH_AUX_EFFECT, data, &reply);
-        if (status == NO_ERROR) {
-            status = reply.readInt32();
-        } else {
-            ALOGW("attachAuxEffect() error: %s", strerror(-status));
-        }
-        return status;
-    }
-
-    virtual status_t setParameters(const String8& keyValuePairs) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
-        data.writeString8(keyValuePairs);
-        status_t status = remote()->transact(SET_PARAMETERS, data, &reply);
-        if (status == NO_ERROR) {
-            status = reply.readInt32();
-        }
-        return status;
-    }
-
-    /* Selects the presentation (if available) */
-    virtual status_t selectPresentation(int presentationId, int programId) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
-        data.writeInt32(presentationId);
-        data.writeInt32(programId);
-        status_t status = remote()->transact(SELECT_PRESENTATION, data, &reply);
-        if (status == NO_ERROR) {
-            status = reply.readInt32();
-        }
-        return status;
-    }
-
-    virtual status_t getTimestamp(AudioTimestamp& timestamp) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
-        status_t status = remote()->transact(GET_TIMESTAMP, data, &reply);
-        if (status == NO_ERROR) {
-            status = reply.readInt32();
-            if (status == NO_ERROR) {
-                timestamp.mPosition = reply.readInt32();
-                timestamp.mTime.tv_sec = reply.readInt32();
-                timestamp.mTime.tv_nsec = reply.readInt32();
-            }
-        }
-        return status;
-    }
-
-    virtual void signal() {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
-        remote()->transact(SIGNAL, data, &reply);
-    }
-
-    virtual VolumeShaper::Status applyVolumeShaper(
-            const sp<VolumeShaper::Configuration>& configuration,
-            const sp<VolumeShaper::Operation>& operation) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
-
-        status_t status = configuration.get() == nullptr
-                ? data.writeInt32(0)
-                :  data.writeInt32(1)
-                    ?: configuration->writeToParcel(&data);
-        if (status != NO_ERROR) {
-            return VolumeShaper::Status(status);
-        }
-
-        status = operation.get() == nullptr
-                ? status = data.writeInt32(0)
-                : data.writeInt32(1)
-                    ?: operation->writeToParcel(&data);
-        if (status != NO_ERROR) {
-            return VolumeShaper::Status(status);
-        }
-
-        int32_t remoteVolumeShaperStatus;
-        status = remote()->transact(APPLY_VOLUME_SHAPER, data, &reply)
-                 ?: reply.readInt32(&remoteVolumeShaperStatus);
-
-        return VolumeShaper::Status(status ?: remoteVolumeShaperStatus);
-    }
-
-    virtual sp<VolumeShaper::State> getVolumeShaperState(int id) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
-
-        data.writeInt32(id);
-        status_t status = remote()->transact(GET_VOLUME_SHAPER_STATE, data, &reply);
-        if (status != NO_ERROR) {
-            return nullptr;
-        }
-        sp<VolumeShaper::State> state = new VolumeShaper::State;
-        status = state->readFromParcel(&reply);
-        if (status != NO_ERROR) {
-            return nullptr;
-        }
-        return state;
-    }
-};
-
-IMPLEMENT_META_INTERFACE(AudioTrack, "android.media.IAudioTrack");
-
-// ----------------------------------------------------------------------
-
-status_t BnAudioTrack::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    switch (code) {
-        case GET_CBLK: {
-            CHECK_INTERFACE(IAudioTrack, data, reply);
-            reply->writeStrongBinder(IInterface::asBinder(getCblk()));
-            return NO_ERROR;
-        } break;
-        case START: {
-            CHECK_INTERFACE(IAudioTrack, data, reply);
-            reply->writeInt32(start());
-            return NO_ERROR;
-        } break;
-        case STOP: {
-            CHECK_INTERFACE(IAudioTrack, data, reply);
-            stop();
-            return NO_ERROR;
-        } break;
-        case FLUSH: {
-            CHECK_INTERFACE(IAudioTrack, data, reply);
-            flush();
-            return NO_ERROR;
-        } break;
-        case PAUSE: {
-            CHECK_INTERFACE(IAudioTrack, data, reply);
-            pause();
-            return NO_ERROR;
-        }
-        case ATTACH_AUX_EFFECT: {
-            CHECK_INTERFACE(IAudioTrack, data, reply);
-            reply->writeInt32(attachAuxEffect(data.readInt32()));
-            return NO_ERROR;
-        } break;
-        case SET_PARAMETERS: {
-            CHECK_INTERFACE(IAudioTrack, data, reply);
-            String8 keyValuePairs(data.readString8());
-            reply->writeInt32(setParameters(keyValuePairs));
-            return NO_ERROR;
-        } break;
-        case SELECT_PRESENTATION: {
-            CHECK_INTERFACE(IAudioTrack, data, reply);
-            reply->writeInt32(selectPresentation(data.readInt32(), data.readInt32()));
-            return NO_ERROR;
-        } break;
-        case GET_TIMESTAMP: {
-            CHECK_INTERFACE(IAudioTrack, data, reply);
-            AudioTimestamp timestamp;
-            status_t status = getTimestamp(timestamp);
-            reply->writeInt32(status);
-            if (status == NO_ERROR) {
-                reply->writeInt32(timestamp.mPosition);
-                reply->writeInt32(timestamp.mTime.tv_sec);
-                reply->writeInt32(timestamp.mTime.tv_nsec);
-            }
-            return NO_ERROR;
-        } break;
-        case SIGNAL: {
-            CHECK_INTERFACE(IAudioTrack, data, reply);
-            signal();
-            return NO_ERROR;
-        } break;
-        case APPLY_VOLUME_SHAPER: {
-            CHECK_INTERFACE(IAudioTrack, data, reply);
-            sp<VolumeShaper::Configuration> configuration;
-            sp<VolumeShaper::Operation> operation;
-
-            int32_t present;
-            status_t status = data.readInt32(&present);
-            if (status == NO_ERROR && present != 0) {
-                configuration = new VolumeShaper::Configuration();
-                status = configuration->readFromParcel(&data);
-            }
-            status = status ?: data.readInt32(&present);
-            if (status == NO_ERROR && present != 0) {
-                operation = new VolumeShaper::Operation();
-                status = operation->readFromParcel(&data);
-            }
-            if (status == NO_ERROR) {
-                status = (status_t)applyVolumeShaper(configuration, operation);
-            }
-            reply->writeInt32(status);
-            return NO_ERROR;
-        } break;
-        case GET_VOLUME_SHAPER_STATE: {
-            CHECK_INTERFACE(IAudioTrack, data, reply);
-            int id;
-            status_t status = data.readInt32(&id);
-            if (status == NO_ERROR) {
-                sp<VolumeShaper::State> state = getVolumeShaperState(id);
-                if (state.get() != nullptr) {
-                     status = state->writeToParcel(reply);
-                }
-            }
-            return NO_ERROR;
-        } break;
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-} // namespace android
diff --git a/media/libaudioclient/IEffect.cpp b/media/libaudioclient/IEffect.cpp
deleted file mode 100644
index 5d47dff..0000000
--- a/media/libaudioclient/IEffect.cpp
+++ /dev/null
@@ -1,229 +0,0 @@
-/*
-**
-** Copyright 2010, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "IEffect"
-#include <utils/Log.h>
-#include <stdint.h>
-#include <sys/types.h>
-#include <binder/Parcel.h>
-#include <media/IEffect.h>
-
-namespace android {
-
-// Maximum command/reply size expected
-#define EFFECT_PARAM_SIZE_MAX       65536
-
-enum {
-    ENABLE = IBinder::FIRST_CALL_TRANSACTION,
-    DISABLE,
-    COMMAND,
-    DISCONNECT,
-    GET_CBLK
-};
-
-class BpEffect: public BpInterface<IEffect>
-{
-public:
-    explicit BpEffect(const sp<IBinder>& impl)
-        : BpInterface<IEffect>(impl)
-    {
-    }
-
-    status_t enable()
-    {
-        ALOGV("enable");
-        Parcel data, reply;
-        data.writeInterfaceToken(IEffect::getInterfaceDescriptor());
-        remote()->transact(ENABLE, data, &reply);
-        return reply.readInt32();
-    }
-
-    status_t disable()
-    {
-        ALOGV("disable");
-        Parcel data, reply;
-        data.writeInterfaceToken(IEffect::getInterfaceDescriptor());
-        remote()->transact(DISABLE, data, &reply);
-        return reply.readInt32();
-    }
-
-    status_t command(uint32_t cmdCode,
-                     uint32_t cmdSize,
-                     void *pCmdData,
-                     uint32_t *pReplySize,
-                     void *pReplyData)
-    {
-        ALOGV("command");
-        Parcel data, reply;
-        data.writeInterfaceToken(IEffect::getInterfaceDescriptor());
-        data.writeInt32(cmdCode);
-        int size = cmdSize;
-        if (pCmdData == NULL) {
-            size = 0;
-        }
-        data.writeInt32(size);
-        if (size) {
-            data.write(pCmdData, size);
-        }
-        if (pReplySize == NULL) {
-            size = 0;
-        } else {
-            size = *pReplySize;
-        }
-        data.writeInt32(size);
-
-        status_t status = remote()->transact(COMMAND, data, &reply);
-        if (status == NO_ERROR) {
-            status = reply.readInt32();
-        }
-        if (status != NO_ERROR) {
-            if (pReplySize != NULL)
-                *pReplySize = 0;
-            return status;
-        }
-
-        size = reply.readInt32();
-        if (size != 0 && pReplyData != NULL && pReplySize != NULL) {
-            reply.read(pReplyData, size);
-            *pReplySize = size;
-        }
-        return status;
-    }
-
-    void disconnect()
-    {
-        ALOGV("disconnect");
-        Parcel data, reply;
-        data.writeInterfaceToken(IEffect::getInterfaceDescriptor());
-        remote()->transact(DISCONNECT, data, &reply);
-        return;
-    }
-
-    virtual sp<IMemory> getCblk() const
-    {
-        Parcel data, reply;
-        sp<IMemory> cblk;
-        data.writeInterfaceToken(IEffect::getInterfaceDescriptor());
-        status_t status = remote()->transact(GET_CBLK, data, &reply);
-        if (status == NO_ERROR) {
-            cblk = interface_cast<IMemory>(reply.readStrongBinder());
-            if (cblk != 0 && cblk->unsecurePointer() == NULL) {
-                cblk.clear();
-            }
-        }
-        return cblk;
-    }
- };
-
-IMPLEMENT_META_INTERFACE(Effect, "android.media.IEffect");
-
-// ----------------------------------------------------------------------
-
-status_t BnEffect::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    switch (code) {
-        case ENABLE: {
-            ALOGV("ENABLE");
-            CHECK_INTERFACE(IEffect, data, reply);
-            reply->writeInt32(enable());
-            return NO_ERROR;
-        } break;
-
-        case DISABLE: {
-            ALOGV("DISABLE");
-            CHECK_INTERFACE(IEffect, data, reply);
-            reply->writeInt32(disable());
-            return NO_ERROR;
-        } break;
-
-        case COMMAND: {
-            ALOGV("COMMAND");
-            CHECK_INTERFACE(IEffect, data, reply);
-            uint32_t cmdCode = data.readInt32();
-            uint32_t cmdSize = data.readInt32();
-            char *cmd = NULL;
-            if (cmdSize) {
-                if (cmdSize > EFFECT_PARAM_SIZE_MAX) {
-                    reply->writeInt32(NO_MEMORY);
-                    return NO_ERROR;
-                }
-                cmd = (char *)calloc(cmdSize, 1);
-                if (cmd == NULL) {
-                    reply->writeInt32(NO_MEMORY);
-                    return NO_ERROR;
-                }
-                data.read(cmd, cmdSize);
-            }
-            uint32_t replySize = data.readInt32();
-            uint32_t replySz = replySize;
-            char *resp = NULL;
-            if (replySize) {
-                if (replySize > EFFECT_PARAM_SIZE_MAX) {
-                    free(cmd);
-                    reply->writeInt32(NO_MEMORY);
-                    return NO_ERROR;
-                }
-                resp = (char *)calloc(replySize, 1);
-                if (resp == NULL) {
-                    free(cmd);
-                    reply->writeInt32(NO_MEMORY);
-                    return NO_ERROR;
-                }
-            }
-            status_t status = command(cmdCode, cmdSize, cmd, &replySz, resp);
-            reply->writeInt32(status);
-            if (status == NO_ERROR) {
-                if (replySz < replySize) {
-                    replySize = replySz;
-                }
-                reply->writeInt32(replySize);
-                if (replySize) {
-                    reply->write(resp, replySize);
-                }
-            }
-            if (cmd) {
-                free(cmd);
-            }
-            if (resp) {
-                free(resp);
-            }
-            return NO_ERROR;
-        } break;
-
-        case DISCONNECT: {
-            ALOGV("DISCONNECT");
-            CHECK_INTERFACE(IEffect, data, reply);
-            disconnect();
-            return NO_ERROR;
-        } break;
-
-        case GET_CBLK: {
-            CHECK_INTERFACE(IEffect, data, reply);
-            reply->writeStrongBinder(IInterface::asBinder(getCblk()));
-            return NO_ERROR;
-        } break;
-
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-// ----------------------------------------------------------------------------
-
-} // namespace android
diff --git a/media/libaudioclient/IEffectClient.cpp b/media/libaudioclient/IEffectClient.cpp
deleted file mode 100644
index 3f2c67d..0000000
--- a/media/libaudioclient/IEffectClient.cpp
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
-**
-** Copyright 2010, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "IEffectClient"
-#include <utils/Log.h>
-#include <stdint.h>
-#include <sys/types.h>
-#include <media/IEffectClient.h>
-
-namespace android {
-
-enum {
-    CONTROL_STATUS_CHANGED = IBinder::FIRST_CALL_TRANSACTION,
-    ENABLE_STATUS_CHANGED,
-    COMMAND_EXECUTED
-};
-
-class BpEffectClient: public BpInterface<IEffectClient>
-{
-public:
-    explicit BpEffectClient(const sp<IBinder>& impl)
-        : BpInterface<IEffectClient>(impl)
-    {
-    }
-
-    void controlStatusChanged(bool controlGranted)
-    {
-        ALOGV("controlStatusChanged");
-        Parcel data, reply;
-        data.writeInterfaceToken(IEffectClient::getInterfaceDescriptor());
-        data.writeInt32((uint32_t)controlGranted);
-        remote()->transact(CONTROL_STATUS_CHANGED, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-
-    void enableStatusChanged(bool enabled)
-    {
-        ALOGV("enableStatusChanged");
-        Parcel data, reply;
-        data.writeInterfaceToken(IEffectClient::getInterfaceDescriptor());
-        data.writeInt32((uint32_t)enabled);
-        remote()->transact(ENABLE_STATUS_CHANGED, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-
-    void commandExecuted(uint32_t cmdCode,
-                         uint32_t cmdSize,
-                         void *pCmdData,
-                         uint32_t replySize,
-                         void *pReplyData)
-    {
-        ALOGV("commandExecuted");
-        Parcel data, reply;
-        data.writeInterfaceToken(IEffectClient::getInterfaceDescriptor());
-        data.writeInt32(cmdCode);
-        int size = cmdSize;
-        if (pCmdData == NULL) {
-            size = 0;
-        }
-        data.writeInt32(size);
-        if (size) {
-            data.write(pCmdData, size);
-        }
-        size = replySize;
-        if (pReplyData == NULL) {
-            size = 0;
-        }
-        data.writeInt32(size);
-        if (size) {
-            data.write(pReplyData, size);
-        }
-        remote()->transact(COMMAND_EXECUTED, data, &reply, IBinder::FLAG_ONEWAY);
-    }
-
-};
-
-IMPLEMENT_META_INTERFACE(EffectClient, "android.media.IEffectClient");
-
-// ----------------------------------------------------------------------
-
-status_t BnEffectClient::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    switch (code) {
-        case CONTROL_STATUS_CHANGED: {
-            ALOGV("CONTROL_STATUS_CHANGED");
-            CHECK_INTERFACE(IEffectClient, data, reply);
-            bool hasControl = (bool)data.readInt32();
-            controlStatusChanged(hasControl);
-            return NO_ERROR;
-        } break;
-        case ENABLE_STATUS_CHANGED: {
-            ALOGV("ENABLE_STATUS_CHANGED");
-            CHECK_INTERFACE(IEffectClient, data, reply);
-            bool enabled = (bool)data.readInt32();
-            enableStatusChanged(enabled);
-            return NO_ERROR;
-        } break;
-        case COMMAND_EXECUTED: {
-            ALOGV("COMMAND_EXECUTED");
-            CHECK_INTERFACE(IEffectClient, data, reply);
-            uint32_t cmdCode = data.readInt32();
-            uint32_t cmdSize = data.readInt32();
-            char *cmd = NULL;
-            if (cmdSize) {
-                cmd = (char *)malloc(cmdSize);
-                data.read(cmd, cmdSize);
-            }
-            uint32_t replySize = data.readInt32();
-            char *resp = NULL;
-            if (replySize) {
-                resp = (char *)malloc(replySize);
-                data.read(resp, replySize);
-            }
-            commandExecuted(cmdCode, cmdSize, cmd, replySize, resp);
-            if (cmd) {
-                free(cmd);
-            }
-            if (resp) {
-                free(resp);
-            }
-            return NO_ERROR;
-        } break;
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-// ----------------------------------------------------------------------------
-
-} // namespace android
diff --git a/media/libaudioclient/OWNERS b/media/libaudioclient/OWNERS
index 482b9fb..034d161 100644
--- a/media/libaudioclient/OWNERS
+++ b/media/libaudioclient/OWNERS
@@ -1,3 +1,4 @@
 gkasten@google.com
+hunga@google.com
 jmtrivi@google.com
 mnaganov@google.com
diff --git a/media/libaudioclient/PlayerBase.cpp b/media/libaudioclient/PlayerBase.cpp
index b0c68e5..446a58c 100644
--- a/media/libaudioclient/PlayerBase.cpp
+++ b/media/libaudioclient/PlayerBase.cpp
@@ -15,20 +15,23 @@
  */
 
 #include <binder/IServiceManager.h>
+#include <media/AidlConversionUtil.h>
 #include <media/PlayerBase.h>
 
 #define max(a, b) ((a) > (b) ? (a) : (b))
 #define min(a, b) ((a) < (b) ? (a) : (b))
 
 namespace android {
-
-using media::VolumeShaper;
+using aidl_utils::binderStatusFromStatusT;
+using media::VolumeShaperConfiguration;
+using media::VolumeShaperOperation;
 
 //--------------------------------------------------------------------------------------------------
 PlayerBase::PlayerBase() : BnPlayer(),
         mPanMultiplierL(1.0f), mPanMultiplierR(1.0f),
         mVolumeMultiplierL(1.0f), mVolumeMultiplierR(1.0f),
-        mPIId(PLAYER_PIID_INVALID), mLastReportedEvent(PLAYER_STATE_UNKNOWN)
+        mPIId(PLAYER_PIID_INVALID), mLastReportedEvent(PLAYER_STATE_UNKNOWN),
+        mLastReportedDeviceId(AUDIO_PORT_HANDLE_NONE)
 {
     ALOGD("PlayerBase::PlayerBase()");
     // use checkService() to avoid blocking if audio service is not up yet
@@ -46,11 +49,12 @@
     baseDestroy();
 }
 
-void PlayerBase::init(player_type_t playerType, audio_usage_t usage) {
+void PlayerBase::init(player_type_t playerType, audio_usage_t usage, audio_session_t sessionId) {
     if (mAudioManager == 0) {
                 ALOGE("AudioPlayer realize: no audio service, player will not be registered");
     } else {
-        mPIId = mAudioManager->trackPlayer(playerType, usage, AUDIO_CONTENT_TYPE_UNKNOWN, this);
+        mPIId = mAudioManager->trackPlayer(playerType, usage, AUDIO_CONTENT_TYPE_UNKNOWN, this,
+                sessionId);
     }
 }
 
@@ -62,14 +66,26 @@
 }
 
 //------------------------------------------------------------------------------
-void PlayerBase::servicePlayerEvent(player_state_t event) {
+void PlayerBase::servicePlayerEvent(player_state_t event, audio_port_handle_t deviceId) {
     if (mAudioManager != 0) {
-        // only report state change
-        Mutex::Autolock _l(mPlayerStateLock);
-        if (event != mLastReportedEvent
-                && mPIId != PLAYER_PIID_INVALID) {
-            mLastReportedEvent = event;
-            mAudioManager->playerEvent(mPIId, event);
+        bool changed = false;
+        {
+            Mutex::Autolock _l(mDeviceIdLock);
+            changed = mLastReportedDeviceId != deviceId;
+            mLastReportedDeviceId = deviceId;
+        }
+
+        {
+            Mutex::Autolock _l(mPlayerStateLock);
+            // PLAYER_UPDATE_DEVICE_ID is not saved as an actual state, instead it is used to update
+            // device ID only.
+            if ((event != PLAYER_UPDATE_DEVICE_ID) && (event != mLastReportedEvent)) {
+                mLastReportedEvent = event;
+                changed = true;
+            }
+        }
+        if (changed && (mPIId != PLAYER_PIID_INVALID)) {
+            mAudioManager->playerEvent(mPIId, event, deviceId);
         }
     }
 }
@@ -82,14 +98,18 @@
 }
 
 //FIXME temporary method while some player state is outside of this class
-void PlayerBase::reportEvent(player_state_t event) {
-    servicePlayerEvent(event);
+void PlayerBase::reportEvent(player_state_t event, audio_port_handle_t deviceId) {
+    servicePlayerEvent(event, deviceId);
 }
 
-status_t PlayerBase::startWithStatus() {
+void PlayerBase::baseUpdateDeviceId(audio_port_handle_t deviceId) {
+    servicePlayerEvent(PLAYER_UPDATE_DEVICE_ID, deviceId);
+}
+
+status_t PlayerBase::startWithStatus(audio_port_handle_t deviceId) {
     status_t status = playerStart();
     if (status == NO_ERROR) {
-        servicePlayerEvent(PLAYER_STATE_STARTED);
+        servicePlayerEvent(PLAYER_STATE_STARTED, deviceId);
     } else {
         ALOGW("PlayerBase::start() error %d", status);
     }
@@ -99,18 +119,18 @@
 status_t PlayerBase::pauseWithStatus() {
     status_t status = playerPause();
     if (status == NO_ERROR) {
-        servicePlayerEvent(PLAYER_STATE_PAUSED);
+        servicePlayerEvent(PLAYER_STATE_PAUSED, AUDIO_PORT_HANDLE_NONE);
     } else {
         ALOGW("PlayerBase::pause() error %d", status);
     }
     return status;
 }
 
-
 status_t PlayerBase::stopWithStatus() {
     status_t status = playerStop();
+
     if (status == NO_ERROR) {
-        servicePlayerEvent(PLAYER_STATE_STOPPED);
+        servicePlayerEvent(PLAYER_STATE_STOPPED, AUDIO_PORT_HANDLE_NONE);
     } else {
         ALOGW("PlayerBase::stop() error %d", status);
     }
@@ -121,7 +141,12 @@
 // Implementation of IPlayer
 binder::Status PlayerBase::start() {
     ALOGD("PlayerBase::start() from IPlayer");
-    (void)startWithStatus();
+    audio_port_handle_t deviceId;
+    {
+        Mutex::Autolock _l(mDeviceIdLock);
+        deviceId = mLastReportedDeviceId;
+    }
+    (void)startWithStatus(deviceId);
     return binder::Status::ok();
 }
 
@@ -149,7 +174,7 @@
     if (status != NO_ERROR) {
         ALOGW("PlayerBase::setVolume() error %d", status);
     }
-    return binder::Status::fromStatusT(status);
+    return binderStatusFromStatusT(status);
 }
 
 binder::Status PlayerBase::setPan(float pan) {
@@ -169,7 +194,7 @@
     if (status != NO_ERROR) {
         ALOGW("PlayerBase::setPan() error %d", status);
     }
-    return binder::Status::fromStatusT(status);
+    return binderStatusFromStatusT(status);
 }
 
 binder::Status PlayerBase::setStartDelayMs(int32_t delayMs __unused) {
@@ -178,8 +203,8 @@
 }
 
 binder::Status PlayerBase::applyVolumeShaper(
-            const VolumeShaper::Configuration& configuration __unused,
-            const VolumeShaper::Operation& operation __unused) {
+            const VolumeShaperConfiguration& configuration __unused,
+            const VolumeShaperOperation& operation __unused) {
     ALOGW("applyVolumeShaper() is not supported");
     return binder::Status::ok();
 }
diff --git a/media/libaudioclient/PolicyAidlConversion.cpp b/media/libaudioclient/PolicyAidlConversion.cpp
new file mode 100644
index 0000000..25fdb49
--- /dev/null
+++ b/media/libaudioclient/PolicyAidlConversion.cpp
@@ -0,0 +1,469 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "PolicyAidlConversion"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include "media/PolicyAidlConversion.h"
+
+#include "media/AidlConversion.h"
+
+namespace android {
+
+using base::unexpected;
+
+ConversionResult<volume_group_t>
+aidl2legacy_int32_t_volume_group_t(int32_t aidl) {
+    return convertReinterpret<volume_group_t>(aidl);
+}
+
+ConversionResult<int32_t>
+legacy2aidl_volume_group_t_int32_t(volume_group_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioMixType_uint32_t(media::AudioMixType aidl) {
+    switch (aidl) {
+        case media::AudioMixType::PLAYERS:
+            return MIX_TYPE_PLAYERS;
+        case media::AudioMixType::RECORDERS:
+            return MIX_TYPE_RECORDERS;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioMixType>
+legacy2aidl_uint32_t_AudioMixType(uint32_t legacy) {
+    switch (legacy) {
+        case MIX_TYPE_PLAYERS:
+            return media::AudioMixType::PLAYERS;
+        case MIX_TYPE_RECORDERS:
+            return media::AudioMixType::RECORDERS;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioMixCallbackFlag_uint32_t(media::AudioMixCallbackFlag aidl) {
+    switch (aidl) {
+        case media::AudioMixCallbackFlag::NOTIFY_ACTIVITY:
+            return AudioMix::kCbFlagNotifyActivity;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioMixCallbackFlag>
+legacy2aidl_uint32_t_AudioMixCallbackFlag(uint32_t legacy) {
+    switch (legacy) {
+        case AudioMix::kCbFlagNotifyActivity:
+            return media::AudioMixCallbackFlag::NOTIFY_ACTIVITY;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioMixCallbackFlag_uint32_t_mask(int32_t aidl) {
+    return convertBitmask<uint32_t, int32_t, uint32_t, media::AudioMixCallbackFlag>(
+            aidl,
+            aidl2legacy_AudioMixCallbackFlag_uint32_t,
+            indexToEnum_index<media::AudioMixCallbackFlag>,
+            enumToMask_bitmask<uint32_t, uint32_t>);
+}
+
+ConversionResult<int32_t>
+legacy2aidl_uint32_t_AudioMixCallbackFlag_mask(uint32_t legacy) {
+    return convertBitmask<int32_t, uint32_t, media::AudioMixCallbackFlag, uint32_t>(
+            legacy,
+            legacy2aidl_uint32_t_AudioMixCallbackFlag,
+            indexToEnum_bitmask<uint32_t>,
+            enumToMask_index<int32_t, media::AudioMixCallbackFlag>);
+}
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioMixRouteFlag_uint32_t(media::AudioMixRouteFlag aidl) {
+    switch (aidl) {
+        case media::AudioMixRouteFlag::RENDER:
+            return MIX_ROUTE_FLAG_RENDER;
+        case media::AudioMixRouteFlag::LOOP_BACK:
+            return MIX_ROUTE_FLAG_LOOP_BACK;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioMixRouteFlag>
+legacy2aidl_uint32_t_AudioMixRouteFlag(uint32_t legacy) {
+    switch (legacy) {
+        case MIX_ROUTE_FLAG_RENDER:
+            return media::AudioMixRouteFlag::RENDER;
+        case MIX_ROUTE_FLAG_LOOP_BACK:
+            return media::AudioMixRouteFlag::LOOP_BACK;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioMixRouteFlag_uint32_t_mask(int32_t aidl) {
+    return convertBitmask<uint32_t, int32_t, uint32_t, media::AudioMixRouteFlag>(
+            aidl,
+            aidl2legacy_AudioMixRouteFlag_uint32_t,
+            indexToEnum_index<media::AudioMixRouteFlag>,
+            enumToMask_bitmask<uint32_t, uint32_t>);
+}
+
+ConversionResult<int32_t>
+legacy2aidl_uint32_t_AudioMixRouteFlag_mask(uint32_t legacy) {
+    return convertBitmask<int32_t, uint32_t, media::AudioMixRouteFlag, uint32_t>(
+            legacy,
+            legacy2aidl_uint32_t_AudioMixRouteFlag,
+            indexToEnum_bitmask<uint32_t>,
+            enumToMask_index<int32_t, media::AudioMixRouteFlag>);
+}
+
+// This type is unnamed in the original definition, thus we name it here.
+using AudioMixMatchCriterionValue = decltype(AudioMixMatchCriterion::mValue);
+
+ConversionResult<AudioMixMatchCriterionValue>
+aidl2legacy_AudioMixMatchCriterionValue(
+        const media::AudioMixMatchCriterionValue& aidl,
+        uint32_t* rule) {
+    AudioMixMatchCriterionValue legacy;
+    *rule = 0;
+    switch (aidl.getTag()) {
+        case media::AudioMixMatchCriterionValue::usage:
+            legacy.mUsage = VALUE_OR_RETURN(
+                    aidl2legacy_AudioUsage_audio_usage_t(UNION_GET(aidl, usage).value()));
+            *rule |= RULE_MATCH_ATTRIBUTE_USAGE;
+            return legacy;
+
+        case media::AudioMixMatchCriterionValue::source:
+            legacy.mSource = VALUE_OR_RETURN(
+                    aidl2legacy_AudioSourceType_audio_source_t(UNION_GET(aidl, source).value()));
+            *rule |= RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET;
+            return legacy;
+
+        case media::AudioMixMatchCriterionValue::uid:
+            legacy.mUid = VALUE_OR_RETURN(
+                    aidl2legacy_int32_t_uid_t(UNION_GET(aidl, uid).value()));
+            *rule |= RULE_MATCH_UID;
+            return legacy;
+
+        case media::AudioMixMatchCriterionValue::userId:
+            legacy.mUserId = VALUE_OR_RETURN(
+                    convertIntegral<int>(UNION_GET(aidl, userId).value()));
+            *rule |= RULE_MATCH_USERID;
+            return legacy;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioMixMatchCriterionValue>
+legacy2aidl_AudioMixMatchCriterionValue(
+        const AudioMixMatchCriterionValue& legacy,
+        uint32_t rule) {
+    media::AudioMixMatchCriterionValue aidl;
+    switch (rule) {
+        case RULE_MATCH_ATTRIBUTE_USAGE:
+            UNION_SET(aidl, usage,
+                      VALUE_OR_RETURN(legacy2aidl_audio_usage_t_AudioUsage(legacy.mUsage)));
+            break;
+
+        case RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET:
+            UNION_SET(aidl, source,
+                      VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSourceType(legacy.mSource)));
+            break;
+
+        case RULE_MATCH_UID:
+            UNION_SET(aidl, uid, VALUE_OR_RETURN(legacy2aidl_uid_t_int32_t(legacy.mUid)));
+            break;
+
+        case RULE_MATCH_USERID:
+            UNION_SET(aidl, userId, VALUE_OR_RETURN(convertReinterpret<uint32_t>(legacy.mUserId)));
+            break;
+
+        default:
+            return unexpected(BAD_VALUE);
+    }
+    return aidl;
+}
+
+
+ConversionResult<AudioMixMatchCriterion>
+aidl2legacy_AudioMixMatchCriterion(const media::AudioMixMatchCriterion& aidl) {
+    AudioMixMatchCriterion legacy;
+    legacy.mValue = VALUE_OR_RETURN(
+            aidl2legacy_AudioMixMatchCriterionValue(aidl.value, &legacy.mRule));
+    if (aidl.invert) {
+        legacy.mRule |= RULE_EXCLUSION_MASK;
+    }
+    return legacy;
+}
+
+ConversionResult<media::AudioMixMatchCriterion>
+legacy2aidl_AudioMixMatchCriterion(const AudioMixMatchCriterion& legacy) {
+    media::AudioMixMatchCriterion aidl;
+    uint32_t rule = legacy.mRule;
+    if (rule & RULE_EXCLUSION_MASK) {
+        aidl.invert = true;
+        rule &= ~RULE_EXCLUSION_MASK;
+    }
+    aidl.value = VALUE_OR_RETURN(legacy2aidl_AudioMixMatchCriterionValue(legacy.mValue, rule));
+    return aidl;
+}
+
+ConversionResult<AudioMix>
+aidl2legacy_AudioMix(const media::AudioMix& aidl) {
+    AudioMix legacy;
+    RETURN_IF_ERROR(convertRange(aidl.criteria.begin(), aidl.criteria.end(),
+                                 std::back_inserter(legacy.mCriteria),
+                                 aidl2legacy_AudioMixMatchCriterion));
+    legacy.mMixType = VALUE_OR_RETURN(aidl2legacy_AudioMixType_uint32_t(aidl.mixType));
+    legacy.mFormat = VALUE_OR_RETURN(aidl2legacy_AudioConfig_audio_config_t(aidl.format));
+    legacy.mRouteFlags = VALUE_OR_RETURN(
+            aidl2legacy_AudioMixRouteFlag_uint32_t_mask(aidl.routeFlags));
+    legacy.mDeviceType = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_devices_t(aidl.device.type));
+    legacy.mDeviceAddress = VALUE_OR_RETURN(aidl2legacy_string_view_String8(aidl.device.address));
+    legacy.mCbFlags = VALUE_OR_RETURN(aidl2legacy_AudioMixCallbackFlag_uint32_t_mask(aidl.cbFlags));
+    legacy.mAllowPrivilegedMediaPlaybackCapture = aidl.allowPrivilegedMediaPlaybackCapture;
+    legacy.mVoiceCommunicationCaptureAllowed = aidl.voiceCommunicationCaptureAllowed;
+    return legacy;
+}
+
+ConversionResult<media::AudioMix>
+legacy2aidl_AudioMix(const AudioMix& legacy) {
+    media::AudioMix aidl;
+    aidl.criteria = VALUE_OR_RETURN(
+            convertContainer<std::vector<media::AudioMixMatchCriterion>>(
+                    legacy.mCriteria,
+                    legacy2aidl_AudioMixMatchCriterion));
+    aidl.mixType = VALUE_OR_RETURN(legacy2aidl_uint32_t_AudioMixType(legacy.mMixType));
+    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_config_t_AudioConfig(legacy.mFormat));
+    aidl.routeFlags = VALUE_OR_RETURN(
+            legacy2aidl_uint32_t_AudioMixRouteFlag_mask(legacy.mRouteFlags));
+    aidl.device.type = VALUE_OR_RETURN(legacy2aidl_audio_devices_t_int32_t(legacy.mDeviceType));
+    aidl.device.address = VALUE_OR_RETURN(legacy2aidl_String8_string(legacy.mDeviceAddress));
+    aidl.cbFlags = VALUE_OR_RETURN(legacy2aidl_uint32_t_AudioMixCallbackFlag_mask(legacy.mCbFlags));
+    aidl.allowPrivilegedMediaPlaybackCapture = legacy.mAllowPrivilegedMediaPlaybackCapture;
+    aidl.voiceCommunicationCaptureAllowed = legacy.mVoiceCommunicationCaptureAllowed;
+    return aidl;
+}
+
+ConversionResult<audio_policy_dev_state_t>
+aidl2legacy_AudioPolicyDeviceState_audio_policy_dev_state_t(media::AudioPolicyDeviceState aidl) {
+    switch (aidl) {
+        case media::AudioPolicyDeviceState::UNAVAILABLE:
+            return AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE;
+        case media::AudioPolicyDeviceState::AVAILABLE:
+            return AUDIO_POLICY_DEVICE_STATE_AVAILABLE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioPolicyDeviceState>
+legacy2aidl_audio_policy_dev_state_t_AudioPolicyDeviceState(audio_policy_dev_state_t legacy) {
+    switch (legacy) {
+        case AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE:
+            return media::AudioPolicyDeviceState::UNAVAILABLE;
+        case AUDIO_POLICY_DEVICE_STATE_AVAILABLE:
+            return media::AudioPolicyDeviceState::AVAILABLE;
+        case AUDIO_POLICY_DEVICE_STATE_CNT:
+            break;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_policy_force_use_t>
+aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(media::AudioPolicyForceUse aidl) {
+    switch (aidl) {
+        case media::AudioPolicyForceUse::COMMUNICATION:
+            return AUDIO_POLICY_FORCE_FOR_COMMUNICATION;
+        case media::AudioPolicyForceUse::MEDIA:
+            return AUDIO_POLICY_FORCE_FOR_MEDIA;
+        case media::AudioPolicyForceUse::RECORD:
+            return AUDIO_POLICY_FORCE_FOR_RECORD;
+        case media::AudioPolicyForceUse::DOCK:
+            return AUDIO_POLICY_FORCE_FOR_DOCK;
+        case media::AudioPolicyForceUse::SYSTEM:
+            return AUDIO_POLICY_FORCE_FOR_SYSTEM;
+        case media::AudioPolicyForceUse::HDMI_SYSTEM_AUDIO:
+            return AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO;
+        case media::AudioPolicyForceUse::ENCODED_SURROUND:
+            return AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND;
+        case media::AudioPolicyForceUse::VIBRATE_RINGING:
+            return AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioPolicyForceUse>
+legacy2aidl_audio_policy_force_use_t_AudioPolicyForceUse(audio_policy_force_use_t legacy) {
+    switch (legacy) {
+        case AUDIO_POLICY_FORCE_FOR_COMMUNICATION:
+            return media::AudioPolicyForceUse::COMMUNICATION;
+        case AUDIO_POLICY_FORCE_FOR_MEDIA:
+            return media::AudioPolicyForceUse::MEDIA;
+        case AUDIO_POLICY_FORCE_FOR_RECORD:
+            return media::AudioPolicyForceUse::RECORD;
+        case AUDIO_POLICY_FORCE_FOR_DOCK:
+            return media::AudioPolicyForceUse::DOCK;
+        case AUDIO_POLICY_FORCE_FOR_SYSTEM:
+            return media::AudioPolicyForceUse::SYSTEM;
+        case AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO:
+            return media::AudioPolicyForceUse::HDMI_SYSTEM_AUDIO;
+        case AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND:
+            return media::AudioPolicyForceUse::ENCODED_SURROUND;
+        case AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING:
+            return media::AudioPolicyForceUse::VIBRATE_RINGING;
+        case AUDIO_POLICY_FORCE_USE_CNT:
+            break;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_policy_forced_cfg_t>
+aidl2legacy_AudioPolicyForcedConfig_audio_policy_forced_cfg_t(media::AudioPolicyForcedConfig aidl) {
+    switch (aidl) {
+        case media::AudioPolicyForcedConfig::NONE:
+            return AUDIO_POLICY_FORCE_NONE;
+        case media::AudioPolicyForcedConfig::SPEAKER:
+            return AUDIO_POLICY_FORCE_SPEAKER;
+        case media::AudioPolicyForcedConfig::HEADPHONES:
+            return AUDIO_POLICY_FORCE_HEADPHONES;
+        case media::AudioPolicyForcedConfig::BT_SCO:
+            return AUDIO_POLICY_FORCE_BT_SCO;
+        case media::AudioPolicyForcedConfig::BT_A2DP:
+            return AUDIO_POLICY_FORCE_BT_A2DP;
+        case media::AudioPolicyForcedConfig::WIRED_ACCESSORY:
+            return AUDIO_POLICY_FORCE_WIRED_ACCESSORY;
+        case media::AudioPolicyForcedConfig::BT_CAR_DOCK:
+            return AUDIO_POLICY_FORCE_BT_CAR_DOCK;
+        case media::AudioPolicyForcedConfig::BT_DESK_DOCK:
+            return AUDIO_POLICY_FORCE_BT_DESK_DOCK;
+        case media::AudioPolicyForcedConfig::ANALOG_DOCK:
+            return AUDIO_POLICY_FORCE_ANALOG_DOCK;
+        case media::AudioPolicyForcedConfig::DIGITAL_DOCK:
+            return AUDIO_POLICY_FORCE_DIGITAL_DOCK;
+        case media::AudioPolicyForcedConfig::NO_BT_A2DP:
+            return AUDIO_POLICY_FORCE_NO_BT_A2DP;
+        case media::AudioPolicyForcedConfig::SYSTEM_ENFORCED:
+            return AUDIO_POLICY_FORCE_SYSTEM_ENFORCED;
+        case media::AudioPolicyForcedConfig::HDMI_SYSTEM_AUDIO_ENFORCED:
+            return AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED;
+        case media::AudioPolicyForcedConfig::ENCODED_SURROUND_NEVER:
+            return AUDIO_POLICY_FORCE_ENCODED_SURROUND_NEVER;
+        case media::AudioPolicyForcedConfig::ENCODED_SURROUND_ALWAYS:
+            return AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS;
+        case media::AudioPolicyForcedConfig::ENCODED_SURROUND_MANUAL:
+            return AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioPolicyForcedConfig>
+legacy2aidl_audio_policy_forced_cfg_t_AudioPolicyForcedConfig(audio_policy_forced_cfg_t legacy) {
+    switch (legacy) {
+        case AUDIO_POLICY_FORCE_NONE:
+            return media::AudioPolicyForcedConfig::NONE;
+        case AUDIO_POLICY_FORCE_SPEAKER:
+            return media::AudioPolicyForcedConfig::SPEAKER;
+        case AUDIO_POLICY_FORCE_HEADPHONES:
+            return media::AudioPolicyForcedConfig::HEADPHONES;
+        case AUDIO_POLICY_FORCE_BT_SCO:
+            return media::AudioPolicyForcedConfig::BT_SCO;
+        case AUDIO_POLICY_FORCE_BT_A2DP:
+            return media::AudioPolicyForcedConfig::BT_A2DP;
+        case AUDIO_POLICY_FORCE_WIRED_ACCESSORY:
+            return media::AudioPolicyForcedConfig::WIRED_ACCESSORY;
+        case AUDIO_POLICY_FORCE_BT_CAR_DOCK:
+            return media::AudioPolicyForcedConfig::BT_CAR_DOCK;
+        case AUDIO_POLICY_FORCE_BT_DESK_DOCK:
+            return media::AudioPolicyForcedConfig::BT_DESK_DOCK;
+        case AUDIO_POLICY_FORCE_ANALOG_DOCK:
+            return media::AudioPolicyForcedConfig::ANALOG_DOCK;
+        case AUDIO_POLICY_FORCE_DIGITAL_DOCK:
+            return media::AudioPolicyForcedConfig::DIGITAL_DOCK;
+        case AUDIO_POLICY_FORCE_NO_BT_A2DP:
+            return media::AudioPolicyForcedConfig::NO_BT_A2DP;
+        case AUDIO_POLICY_FORCE_SYSTEM_ENFORCED:
+            return media::AudioPolicyForcedConfig::SYSTEM_ENFORCED;
+        case AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED:
+            return media::AudioPolicyForcedConfig::HDMI_SYSTEM_AUDIO_ENFORCED;
+        case AUDIO_POLICY_FORCE_ENCODED_SURROUND_NEVER:
+            return media::AudioPolicyForcedConfig::ENCODED_SURROUND_NEVER;
+        case AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS:
+            return media::AudioPolicyForcedConfig::ENCODED_SURROUND_ALWAYS;
+        case AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL:
+            return media::AudioPolicyForcedConfig::ENCODED_SURROUND_MANUAL;
+        case AUDIO_POLICY_FORCE_CFG_CNT:
+            break;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<device_role_t>
+aidl2legacy_DeviceRole_device_role_t(media::DeviceRole aidl) {
+    switch (aidl) {
+        case media::DeviceRole::NONE:
+            return DEVICE_ROLE_NONE;
+        case media::DeviceRole::PREFERRED:
+            return DEVICE_ROLE_PREFERRED;
+        case media::DeviceRole::DISABLED:
+            return DEVICE_ROLE_DISABLED;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::DeviceRole>
+legacy2aidl_device_role_t_DeviceRole(device_role_t legacy) {
+    switch (legacy) {
+        case DEVICE_ROLE_NONE:
+            return media::DeviceRole::NONE;
+        case DEVICE_ROLE_PREFERRED:
+            return media::DeviceRole::PREFERRED;
+        case DEVICE_ROLE_DISABLED:
+            return media::DeviceRole::DISABLED;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_offload_mode_t>
+aidl2legacy_AudioOffloadMode_audio_offload_mode_t(media::AudioOffloadMode aidl) {
+    switch (aidl) {
+        case media::AudioOffloadMode::NOT_SUPPORTED:
+            return AUDIO_OFFLOAD_NOT_SUPPORTED;
+        case media::AudioOffloadMode::SUPPORTED:
+            return AUDIO_OFFLOAD_SUPPORTED;
+        case media::AudioOffloadMode::GAPLESS_SUPPORTED:
+            return AUDIO_OFFLOAD_GAPLESS_SUPPORTED;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioOffloadMode>
+legacy2aidl_audio_offload_mode_t_AudioOffloadMode(audio_offload_mode_t legacy) {
+    switch (legacy) {
+        case AUDIO_OFFLOAD_NOT_SUPPORTED:
+            return media::AudioOffloadMode::NOT_SUPPORTED;
+        case AUDIO_OFFLOAD_SUPPORTED:
+            return media::AudioOffloadMode::SUPPORTED;
+        case AUDIO_OFFLOAD_GAPLESS_SUPPORTED:
+            return media::AudioOffloadMode::GAPLESS_SUPPORTED;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+}  // namespace android
diff --git a/media/libaudioclient/ToneGenerator.cpp b/media/libaudioclient/ToneGenerator.cpp
index 050ad65..e5e8496 100644
--- a/media/libaudioclient/ToneGenerator.cpp
+++ b/media/libaudioclient/ToneGenerator.cpp
@@ -17,6 +17,8 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "ToneGenerator"
 
+#include <utility>
+
 #include <math.h>
 #include <utils/Log.h>
 #include <cutils/properties.h>
@@ -25,6 +27,7 @@
 
 namespace android {
 
+using android::content::AttributionSourceState;
 
 // Descriptors for all available tones (See ToneGenerator::ToneDescriptor class declaration for details)
 const ToneGenerator::ToneDescriptor ToneGenerator::sToneDescriptors[] = {
@@ -740,6 +743,11 @@
                         { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
           .repeatCnt = ToneGenerator::TONEGEN_INF,
           .repeatSegment = 0 },                              // TONE_JAPAN_RADIO_ACK
+        { .segments = { { .duration = 1000, .waveFreq = { 400, 0 }, 0, 0 },
+                        { .duration = 2000, .waveFreq = { 0 }, 0, 0 },
+                        { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+          .repeatCnt = ToneGenerator::TONEGEN_INF,
+          .repeatSegment = 0 },                              // TONE_JAPAN_RINGTONE
         { .segments = { { .duration = 375, .waveFreq = { 400, 0 }, 0, 0 },
                         { .duration = 375, .waveFreq = { 0 }, 0, 0 },
                         { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
@@ -853,6 +861,11 @@
                       { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
           .repeatCnt = ToneGenerator::TONEGEN_INF,
           .repeatSegment = 0 },                               // TONE_INDIA_RINGTONE
+        { .segments = { { .duration = 1000, .waveFreq = { 440, 480, 0 }, 0, 0 },
+                        { .duration = 2000, .waveFreq = { 0 }, 0, 0 },
+                        { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+          .repeatCnt = ToneGenerator::TONEGEN_INF,
+          .repeatSegment = 0 },                               // TONE_TW_RINGTONE
 };
 
 // Used by ToneGenerator::getToneForRegion() to convert user specified supervisory tone type
@@ -876,7 +889,7 @@
             TONE_SUP_RADIO_NOTAVAIL,     // TONE_SUP_RADIO_NOTAVAIL
             TONE_SUP_ERROR,              // TONE_SUP_ERROR
             TONE_SUP_CALL_WAITING,       // TONE_SUP_CALL_WAITING
-            TONE_SUP_RINGTONE            // TONE_SUP_RINGTONE
+            TONE_JAPAN_RINGTONE          // TONE_SUP_RINGTONE
         },
         {   // GB
             TONE_ANSI_DIAL,              // TONE_SUP_DIAL
@@ -937,6 +950,16 @@
             TONE_SUP_ERROR,              // TONE_SUP_ERROR
             TONE_INDIA_CALL_WAITING,     // TONE_SUP_CALL_WAITING
             TONE_INDIA_RINGTONE          // TONE_SUP_RINGTONE
+        },
+        {   // TAIWAN
+            TONE_SUP_DIAL,               // TONE_SUP_DIAL
+            TONE_SUP_BUSY,               // TONE_SUP_BUSY
+            TONE_SUP_CONGESTION,         // TONE_SUP_CONGESTION
+            TONE_SUP_RADIO_ACK,          // TONE_SUP_RADIO_ACK
+            TONE_SUP_RADIO_NOTAVAIL,     // TONE_SUP_RADIO_NOTAVAIL
+            TONE_SUP_ERROR,              // TONE_SUP_ERROR
+            TONE_SUP_CALL_WAITING,       // TONE_SUP_CALL_WAITING
+            TONE_TW_RINGTONE             // TONE_SUP_RINGTONE
         }
 };
 
@@ -964,7 +987,9 @@
 //        none
 //
 ////////////////////////////////////////////////////////////////////////////////
-ToneGenerator::ToneGenerator(audio_stream_type_t streamType, float volume, bool threadCanCallJava) {
+ToneGenerator::ToneGenerator(audio_stream_type_t streamType, float volume, bool threadCanCallJava,
+        std::string opPackageName)
+        : mOpPackageName(std::move(opPackageName)) {
 
     ALOGV("ToneGenerator constructor: streamType=%d, volume=%f", streamType, volume);
 
@@ -1010,6 +1035,8 @@
         mRegion = IRELAND;
     } else if (strstr(value, "in") != NULL) {
         mRegion = INDIA;
+    } else if (strstr(value, "tw") != NULL) {
+        mRegion = TAIWAN;
     } else {
         mRegion = CEPT;
     }
@@ -1233,7 +1260,11 @@
 ////////////////////////////////////////////////////////////////////////////////
 bool ToneGenerator::initAudioTrack() {
     // Open audio track in mono, PCM 16bit, default sampling rate.
-    mpAudioTrack = new AudioTrack();
+    // TODO b/182392769: use attribution source util
+    AttributionSourceState attributionSource = AttributionSourceState();
+    attributionSource.packageName = mOpPackageName;
+    attributionSource.token = sp<BBinder>::make();
+    mpAudioTrack = new AudioTrack(attributionSource);
     ALOGV("AudioTrack(%p) created", mpAudioTrack.get());
 
     audio_attributes_t attr;
@@ -1259,8 +1290,7 @@
             AUDIO_SESSION_ALLOCATE,
             AudioTrack::TRANSFER_CALLBACK,
             nullptr,
-            AUDIO_UID_INVALID,
-            -1,
+            attributionSource,
             &attr);
     // Set caller name so it can be logged in destructor.
     // MediaMetricsConstants.h: AMEDIAMETRICS_PROP_CALLERNAME_VALUE_TONEGENERATOR
diff --git a/media/libaudioclient/TrackPlayerBase.cpp b/media/libaudioclient/TrackPlayerBase.cpp
index 0a914fc..188f321 100644
--- a/media/libaudioclient/TrackPlayerBase.cpp
+++ b/media/libaudioclient/TrackPlayerBase.cpp
@@ -17,7 +17,7 @@
 #include <media/TrackPlayerBase.h>
 
 namespace android {
-
+using aidl_utils::binderStatusFromStatusT;
 using media::VolumeShaper;
 
 //--------------------------------------------------------------------------------------------------
@@ -33,9 +33,15 @@
     doDestroy();
 }
 
-void TrackPlayerBase::init(AudioTrack* pat, player_type_t playerType, audio_usage_t usage) {
-    PlayerBase::init(playerType, usage);
+void TrackPlayerBase::init(AudioTrack* pat, player_type_t playerType, audio_usage_t usage,
+        audio_session_t sessionId) {
+    PlayerBase::init(playerType, usage, sessionId);
     mAudioTrack = pat;
+    if (mAudioTrack != 0) {
+        mSelfAudioDeviceCallback = new SelfAudioDeviceCallback(*this);
+        mAudioTrack->addAudioDeviceCallback(mSelfAudioDeviceCallback);
+        mAudioTrack->setPlayerIId(mPIId); // set in PlayerBase::init().
+    }
 }
 
 void TrackPlayerBase::destroy() {
@@ -43,9 +49,23 @@
     baseDestroy();
 }
 
+TrackPlayerBase::SelfAudioDeviceCallback::SelfAudioDeviceCallback(PlayerBase& self) :
+    AudioSystem::AudioDeviceCallback(), mSelf(self) {
+}
+
+TrackPlayerBase::SelfAudioDeviceCallback::~SelfAudioDeviceCallback() {
+}
+
+void TrackPlayerBase::SelfAudioDeviceCallback::onAudioDeviceUpdate(audio_io_handle_t __unused,
+                                                                   audio_port_handle_t deviceId) {
+    mSelf.baseUpdateDeviceId(deviceId);
+}
+
 void TrackPlayerBase::doDestroy() {
     if (mAudioTrack != 0) {
         mAudioTrack->stop();
+        mAudioTrack->removeAudioDeviceCallback(mSelfAudioDeviceCallback);
+        mSelfAudioDeviceCallback.clear();
         // Note that there may still be another reference in post-unlock phase of SetPlayState
         mAudioTrack.clear();
     }
@@ -106,11 +126,17 @@
 
 
 binder::Status TrackPlayerBase::applyVolumeShaper(
-        const VolumeShaper::Configuration& configuration,
-        const VolumeShaper::Operation& operation) {
+        const media::VolumeShaperConfiguration& configuration,
+        const media::VolumeShaperOperation& operation) {
 
-    sp<VolumeShaper::Configuration> spConfiguration = new VolumeShaper::Configuration(configuration);
-    sp<VolumeShaper::Operation> spOperation = new VolumeShaper::Operation(operation);
+    sp<VolumeShaper::Configuration> spConfiguration = new VolumeShaper::Configuration();
+    sp<VolumeShaper::Operation> spOperation = new VolumeShaper::Operation();
+
+    status_t s = spConfiguration->readFromParcelable(configuration)
+            ?: spOperation->readFromParcelable(operation);
+    if (s != OK) {
+        return binderStatusFromStatusT(s);
+    }
 
     if (mAudioTrack != 0) {
         ALOGD("TrackPlayerBase::applyVolumeShaper() from IPlayer");
@@ -118,7 +144,7 @@
         if (status < 0) { // a non-negative value is the volume shaper id.
             ALOGE("TrackPlayerBase::applyVolumeShaper() failed with status %d", status);
         }
-        return binder::Status::fromStatusT(status);
+        return binderStatusFromStatusT(status);
     } else {
         ALOGD("TrackPlayerBase::applyVolumeShaper()"
               " no AudioTrack for volume control from IPlayer");
diff --git a/media/libaudioclient/aidl/android/media/AudioAttributesEx.aidl b/media/libaudioclient/aidl/android/media/AudioAttributesEx.aidl
new file mode 100644
index 0000000..04a02c7
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioAttributesEx.aidl
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioAttributesInternal;
+import android.media.AudioStreamType;
+
+/**
+ * This is the equivalent of the android::AudioAttributes C++ type.
+ * {@hide}
+ */
+parcelable AudioAttributesEx {
+    AudioAttributesInternal attributes;
+    AudioStreamType streamType;
+    /** Interpreted as volume_group_t. */
+    int groupId;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioAttributesInternal.aidl b/media/libaudioclient/aidl/android/media/AudioAttributesInternal.aidl
new file mode 100644
index 0000000..699df0a
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioAttributesInternal.aidl
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioContentType;
+import android.media.AudioSourceType;
+import android.media.AudioUsage;
+
+/**
+ * The "Internal" suffix of this type name is to disambiguate it from the
+ * android.media.AudioAttributes SDK type.
+ * {@hide}
+ */
+parcelable AudioAttributesInternal {
+    AudioContentType contentType;
+    AudioUsage usage;
+    AudioSourceType source;
+    // Bitmask, indexed by AudioFlag.
+    int flags;
+    @utf8InCpp String tags; /* UTF8 */
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioClient.aidl b/media/libaudioclient/aidl/android/media/AudioClient.aidl
new file mode 100644
index 0000000..e98fed3
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioClient.aidl
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.content.AttributionSourceState;
+
+/**
+ * {@hide}
+ */
+parcelable AudioClient {
+    /** Interpreted as pid_t. */
+    int clientTid;
+    AttributionSourceState attributionSource;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioConfig.aidl b/media/libaudioclient/aidl/android/media/AudioConfig.aidl
new file mode 100644
index 0000000..8dc97d3
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioConfig.aidl
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioOffloadInfo;
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioConfig {
+    int sampleRate;
+    /**
+     * Interpreted as audio_channel_mask_t.
+     * TODO(ytai): Create a designated type.
+     */
+    int channelMask;
+    AudioFormat format;
+    AudioOffloadInfo offloadInfo;
+    long frameCount;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioConfigBase.aidl b/media/libaudioclient/aidl/android/media/AudioConfigBase.aidl
new file mode 100644
index 0000000..8353c0d
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioConfigBase.aidl
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioConfigBase {
+    int sampleRate;
+    /** Interpreted as audio_channel_mask_t. */
+    int channelMask;
+    AudioFormat format;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioContentType.aidl b/media/libaudioclient/aidl/android/media/AudioContentType.aidl
new file mode 100644
index 0000000..f734fba
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioContentType.aidl
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioContentType {
+    UNKNOWN = 0,
+    SPEECH = 1,
+    MUSIC = 2,
+    MOVIE = 3,
+    SONIFICATION = 4,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioDevice.aidl b/media/libaudioclient/aidl/android/media/AudioDevice.aidl
new file mode 100644
index 0000000..b200697
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioDevice.aidl
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioDevice {
+    /** Interpreted as audio_devices_t. */
+    int type;
+    @utf8InCpp String address;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioDualMonoMode.aidl b/media/libaudioclient/aidl/android/media/AudioDualMonoMode.aidl
new file mode 100644
index 0000000..f6220c2
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioDualMonoMode.aidl
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+// TODO(b/175167149): Reconcile AudioDualMonoMode with framework-media-sources
+
+@Backing(type="int")
+enum AudioDualMonoMode {
+    OFF = 0,
+    LR = 1,
+    LL = 2,
+    RR = 3,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioEncapsulationMetadataType.aidl b/media/libaudioclient/aidl/android/media/AudioEncapsulationMetadataType.aidl
new file mode 100644
index 0000000..b03adfe
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioEncapsulationMetadataType.aidl
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioEncapsulationMetadataType {
+    NONE = 0,
+    FRAMEWORK_TUNER = 1,
+    DVB_AD_DESCRIPTOR = 2,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl b/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl
new file mode 100644
index 0000000..9e04e82
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioEncapsulationMode {
+     NONE = 0,
+     ELEMENTARY_STREAM = 1,
+     HANDLE = 2,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioEncapsulationType.aidl b/media/libaudioclient/aidl/android/media/AudioEncapsulationType.aidl
new file mode 100644
index 0000000..b08a604
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioEncapsulationType.aidl
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Audio encapsulation type is used to describe if the audio data should be sent with a particular
+ * encapsulation type or not.
+ *
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioEncapsulationType {
+    NONE     = 0,
+    IEC61937 = 1,
+}
\ No newline at end of file
diff --git a/media/libaudioclient/aidl/android/media/AudioFlag.aidl b/media/libaudioclient/aidl/android/media/AudioFlag.aidl
new file mode 100644
index 0000000..58b493b
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioFlag.aidl
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioFlag {
+    AUDIBILITY_ENFORCED = 0,
+    SECURE = 1,
+    SCO = 2,
+    BEACON = 3,
+    HW_AV_SYNC = 4,
+    HW_HOTWORD = 5,
+    BYPASS_INTERRUPTION_POLICY = 6,
+    BYPASS_MUTE = 7,
+    LOW_LATENCY = 8,
+    DEEP_BUFFER = 9,
+    NO_MEDIA_PROJECTION = 10,
+    MUTE_HAPTIC = 11,
+    NO_SYSTEM_CAPTURE = 12,
+    CAPTURE_PRIVATE = 13,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioGain.aidl b/media/libaudioclient/aidl/android/media/AudioGain.aidl
new file mode 100644
index 0000000..048b295
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioGain.aidl
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioGain {
+    int index;
+    boolean useInChannelMask;
+    boolean useForVolume;
+    /** Bitmask, indexed by AudioGainMode. */
+    int mode;
+    /** Interpreted as audio_channel_mask_t. */
+    int channelMask;
+    int minValue;
+    int maxValue;
+    int defaultValue;
+    int stepValue;
+    int minRampMs;
+    int maxRampMs;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioGainConfig.aidl b/media/libaudioclient/aidl/android/media/AudioGainConfig.aidl
new file mode 100644
index 0000000..b93c2dc
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioGainConfig.aidl
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioGainConfig {
+    /** Index of the corresponding audio_gain in the audio_port gains[] table. */
+    int index;
+
+    /** Mode requested for this command. Bitfield indexed by AudioGainMode. */
+    int mode;
+
+    /**
+     * Channels which gain value follows. N/A in joint mode.
+     * Interpreted as audio_channel_mask_t.
+     */
+    int channelMask;
+
+    /**
+     * Gain values in millibels.
+     * For each channel ordered from LSb to MSb in channel mask. The number of values is 1 in joint
+     * mode, otherwise equals the number of bits implied by channelMask.
+     */
+    int[]  values;
+
+    /** Ramp duration in ms. */
+    int rampDurationMs;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioGainMode.aidl b/media/libaudioclient/aidl/android/media/AudioGainMode.aidl
new file mode 100644
index 0000000..e1b9f0b
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioGainMode.aidl
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioGainMode {
+    JOINT    = 0,
+    CHANNELS = 1,
+    RAMP     = 2,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl b/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl
new file mode 100644
index 0000000..bfc0eb0
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioInputFlags {
+    FAST       = 0,
+    HW_HOTWORD = 1,
+    RAW        = 2,
+    SYNC       = 3,
+    MMAP_NOIRQ = 4,
+    VOIP_TX    = 5,
+    HW_AV_SYNC = 6,
+    DIRECT     = 7,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioIoConfigEvent.aidl b/media/libaudioclient/aidl/android/media/AudioIoConfigEvent.aidl
new file mode 100644
index 0000000..d5f23a1
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioIoConfigEvent.aidl
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioIoConfigEvent {
+    OUTPUT_REGISTERED = 0,
+    OUTPUT_OPENED = 1,
+    OUTPUT_CLOSED = 2,
+    OUTPUT_CONFIG_CHANGED = 3,
+    INPUT_REGISTERED = 4,
+    INPUT_OPENED = 5,
+    INPUT_CLOSED = 6,
+    INPUT_CONFIG_CHANGED = 7,
+    CLIENT_STARTED = 8,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl b/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
new file mode 100644
index 0000000..876ef9b
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioPatch;
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioIoDescriptor {
+    /** Interpreted as audio_io_handle_t. */
+    int ioHandle;
+    AudioPatch patch;
+    int samplingRate;
+    AudioFormat format;
+    /** Interpreted as audio_channel_mask_t. */
+    int channelMask;
+    long frameCount;
+    long frameCountHAL;
+    /** Only valid for output. */
+    int latency;
+    /**
+     * Interpreted as audio_port_handle_t.
+     * valid for event AUDIO_CLIENT_STARTED.
+     */
+    int portId;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioIoFlags.aidl b/media/libaudioclient/aidl/android/media/AudioIoFlags.aidl
new file mode 100644
index 0000000..f9b25bf
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioIoFlags.aidl
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+union AudioIoFlags {
+    /** Bitmask indexed by AudioInputFlags. */
+    int input;
+    /** Bitmask indexed by AudioOutputFlags. */
+    int output;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioMix.aidl b/media/libaudioclient/aidl/android/media/AudioMix.aidl
new file mode 100644
index 0000000..7473372
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioMix.aidl
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioConfig;
+import android.media.AudioDevice;
+import android.media.AudioMixCallbackFlag;
+import android.media.AudioMixMatchCriterion;
+import android.media.AudioMixRouteFlag;
+import android.media.AudioMixType;
+
+/**
+ * {@hide}
+ */
+parcelable AudioMix {
+    AudioMixMatchCriterion[] criteria;
+    AudioMixType mixType;
+    AudioConfig format;
+    /** Bitmask, indexed by AudioMixRouteFlag. */
+    int routeFlags;
+    AudioDevice device;
+    /** Flags indicating which callbacks to use. Bitmask, indexed by AudioMixCallbackFlag. */
+    int cbFlags;
+    /** Ignore the AUDIO_FLAG_NO_MEDIA_PROJECTION */
+    boolean allowPrivilegedMediaPlaybackCapture;
+    /** Indicates if the caller can capture voice communication output */
+    boolean voiceCommunicationCaptureAllowed;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioMixCallbackFlag.aidl b/media/libaudioclient/aidl/android/media/AudioMixCallbackFlag.aidl
new file mode 100644
index 0000000..6cb3d38
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioMixCallbackFlag.aidl
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioMixCallbackFlag {
+    NOTIFY_ACTIVITY = 0,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioMixLatencyClass.aidl b/media/libaudioclient/aidl/android/media/AudioMixLatencyClass.aidl
new file mode 100644
index 0000000..d70b364
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioMixLatencyClass.aidl
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioMixLatencyClass {
+    LOW = 0,
+    NORMAL = 1,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioMixMatchCriterion.aidl b/media/libaudioclient/aidl/android/media/AudioMixMatchCriterion.aidl
new file mode 100644
index 0000000..2c63dc5
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioMixMatchCriterion.aidl
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioMixMatchCriterionValue;
+
+/**
+ * {@hide}
+ */
+parcelable AudioMixMatchCriterion {
+    /** When true, rule becomes "exclude" instead of "include". */
+    boolean invert;
+    AudioMixMatchCriterionValue value;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl b/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl
new file mode 100644
index 0000000..e26a9e1
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioSourceType;
+import android.media.AudioUsage;
+
+/**
+ * {@hide}
+ */
+union AudioMixMatchCriterionValue {
+    AudioUsage usage = AudioUsage.UNKNOWN;
+    AudioSourceType source;
+    /** Interpreted as uid_t. */
+    int uid;
+    int userId;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioMixRouteFlag.aidl b/media/libaudioclient/aidl/android/media/AudioMixRouteFlag.aidl
new file mode 100644
index 0000000..f0cce2d
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioMixRouteFlag.aidl
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioMixRouteFlag {
+    RENDER = 0,
+    LOOP_BACK = 1,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioMixType.aidl b/media/libaudioclient/aidl/android/media/AudioMixType.aidl
new file mode 100644
index 0000000..b6806dc
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioMixType.aidl
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioMixType {
+    PLAYERS = 0,
+    RECORDERS = 1,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioMode.aidl b/media/libaudioclient/aidl/android/media/AudioMode.aidl
new file mode 100644
index 0000000..7067dd3
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioMode.aidl
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioMode {
+    INVALID = -2,
+    CURRENT = -1,
+    NORMAL = 0,
+    RINGTONE = 1,
+    IN_CALL = 2,
+    IN_COMMUNICATION = 3,
+    CALL_SCREEN = 4,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioOffloadInfo.aidl b/media/libaudioclient/aidl/android/media/AudioOffloadInfo.aidl
new file mode 100644
index 0000000..c86b3f0
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioOffloadInfo.aidl
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioConfigBase;
+import android.media.AudioEncapsulationMode;
+import android.media.AudioStreamType;
+import android.media.AudioUsage;
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioOffloadInfo {
+    /** Version of the info structure. Interpreted as a uint16_t version constant. */
+    int version;
+    /** Audio configuration. */
+    AudioConfigBase config;
+    /** Stream type. */
+    AudioStreamType streamType;
+    /** Bit rate in bits per second. */
+    int bitRate;
+    /** Duration in microseconds, -1 if unknown. */
+    long durationUs;
+    /** true if stream is tied to a video stream. */
+    boolean hasVideo;
+    /** true if streaming, false if local playback. */
+    boolean isStreaming;
+    int bitWidth;
+    /** Offload fragment size. */
+    int offloadBufferSize;
+    AudioUsage usage;
+    AudioEncapsulationMode encapsulationMode;
+    /** Content id from tuner HAL (0 if none). */
+    int contentId;
+    /** Sync id from tuner HAL (0 if none). */
+    int syncId;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioOffloadMode.aidl b/media/libaudioclient/aidl/android/media/AudioOffloadMode.aidl
new file mode 100644
index 0000000..45a44f2
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioOffloadMode.aidl
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioOffloadMode {
+    NOT_SUPPORTED = 0,
+    SUPPORTED = 1,
+    GAPLESS_SUPPORTED = 2
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl b/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl
new file mode 100644
index 0000000..cebd8f0
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioOutputFlags {
+    DIRECT           = 0,
+    PRIMARY          = 1,
+    FAST             = 2,
+    DEEP_BUFFER      = 3,
+    COMPRESS_OFFLOAD = 4,
+    NON_BLOCKING     = 5,
+    HW_AV_SYNC       = 6,
+    TTS              = 7,
+    RAW              = 8,
+    SYNC             = 9,
+    IEC958_NONAUDIO  = 10,
+    DIRECT_PCM       = 11,
+    MMAP_NOIRQ       = 12,
+    VOIP_RX          = 13,
+    INCALL_MUSIC     = 14,
+    GAPLESS_OFFLOAD  = 15,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPatch.aidl b/media/libaudioclient/aidl/android/media/AudioPatch.aidl
new file mode 100644
index 0000000..8519faf
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPatch.aidl
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioPortConfig;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPatch {
+    /**
+     * Patch unique ID.
+     * Interpreted as audio_patch_handle_t.
+     */
+    int id;
+    AudioPortConfig[] sources;
+    AudioPortConfig[] sinks;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPlaybackRate.aidl b/media/libaudioclient/aidl/android/media/AudioPlaybackRate.aidl
new file mode 100644
index 0000000..e29d398
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPlaybackRate.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * The AudioPlaybackRate.
+ *
+ * See https://developer.android.com/reference/android/media/PlaybackParams.
+ * TODO(b/175166815): Reconcile with framework-media-sources PlaybackParams.aidl.
+ *       As this is used for native wire serialization, no need to define
+ *       audio_timestretch_stretch_mode_t and audio_timestretch_fallback_mode_t enums
+ *       until we attempt to unify with PlaybackParams.
+ *
+ * {@hide}
+ */
+parcelable AudioPlaybackRate {
+    /** Speed of audio playback, >= 0.f, 1.f nominal (system limits are further restrictive) */
+    float speed;
+    /** Pitch of audio, >= 0.f, 1.f nominal (system limits are further restrictive) */
+    float pitch;
+    /** Interpreted as audio_timestretch_stretch_mode_t */
+    int stretchMode;
+    /** Interpreted as audio_timestretch_fallback_mode_t */
+    int fallbackMode;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPolicyDeviceState.aidl b/media/libaudioclient/aidl/android/media/AudioPolicyDeviceState.aidl
new file mode 100644
index 0000000..a1072d2
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPolicyDeviceState.aidl
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioPolicyDeviceState {
+    UNAVAILABLE = 0,
+    AVAILABLE = 1,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPolicyForceUse.aidl b/media/libaudioclient/aidl/android/media/AudioPolicyForceUse.aidl
new file mode 100644
index 0000000..9bb0605
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPolicyForceUse.aidl
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioPolicyForceUse {
+    COMMUNICATION = 0,
+    MEDIA = 1,
+    RECORD = 2,
+    DOCK = 3,
+    SYSTEM = 4,
+    HDMI_SYSTEM_AUDIO = 5,
+    ENCODED_SURROUND = 6,
+    VIBRATE_RINGING = 7,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPolicyForcedConfig.aidl b/media/libaudioclient/aidl/android/media/AudioPolicyForcedConfig.aidl
new file mode 100644
index 0000000..2255d4c
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPolicyForcedConfig.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioPolicyForcedConfig {
+    NONE = 0,
+    SPEAKER = 1,
+    HEADPHONES = 2,
+    BT_SCO = 3,
+    BT_A2DP = 4,
+    WIRED_ACCESSORY = 5,
+    BT_CAR_DOCK = 6,
+    BT_DESK_DOCK = 7,
+    ANALOG_DOCK = 8,
+    DIGITAL_DOCK = 9,
+    NO_BT_A2DP = 10, /* A2DP sink is not preferred to speaker or wired HS */
+    SYSTEM_ENFORCED = 11,
+    HDMI_SYSTEM_AUDIO_ENFORCED = 12,
+    ENCODED_SURROUND_NEVER = 13,
+    ENCODED_SURROUND_ALWAYS = 14,
+    ENCODED_SURROUND_MANUAL = 15,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPort.aidl b/media/libaudioclient/aidl/android/media/AudioPort.aidl
new file mode 100644
index 0000000..bf0e5b7
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPort.aidl
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioGain;
+import android.media.AudioPortConfig;
+import android.media.AudioPortExt;
+import android.media.AudioPortRole;
+import android.media.AudioPortType;
+import android.media.AudioProfile;
+import android.media.ExtraAudioDescriptor;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPort {
+    /** Port unique ID. Interpreted as audio_port_handle_t. */
+    int id;
+    /** Sink or source. */
+    AudioPortRole role;
+    /** Device, mix ... */
+    AudioPortType type;
+    @utf8InCpp String name;
+    /** AudioProfiles supported by this port (format, Rates, Channels). */
+    AudioProfile[] profiles;
+    /**
+     * ExtraAudioDescriptors supported by this port. The format is not unrecognized to the
+     * platform. The audio capability is described by a hardware descriptor.
+     */
+    ExtraAudioDescriptor[] extraAudioDescriptors;
+    /** Gain controllers. */
+    AudioGain[] gains;
+    /** Current audio port configuration. */
+    AudioPortConfig activeConfig;
+    AudioPortExt ext;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
new file mode 100644
index 0000000..2dd30a4
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioGainConfig;
+import android.media.AudioIoFlags;
+import android.media.AudioPortConfigExt;
+import android.media.AudioPortConfigType;
+import android.media.AudioPortRole;
+import android.media.AudioPortType;
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortConfig {
+    /**
+     * Port unique ID.
+     * Interpreted as audio_port_handle_t.
+     */
+    int id;
+    /** Sink or source. */
+    AudioPortRole role;
+    /** Device, mix ... */
+    AudioPortType type;
+    /** Bitmask, indexed by AudioPortConfigType. */
+    int configMask;
+    /** Sampling rate in Hz. */
+    int sampleRate;
+    /**
+     * Channel mask, if applicable.
+     * Interpreted as audio_channel_mask_t.
+     * TODO: bitmask?
+     */
+    int channelMask;
+    /**
+     * Format, if applicable.
+     */
+    AudioFormat format;
+    /** Gain to apply, if applicable. */
+    AudioGainConfig gain;
+    /** Framework only: HW_AV_SYNC, DIRECT, ... */
+    AudioIoFlags flags;
+    AudioPortConfigExt ext;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigDeviceExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigDeviceExt.aidl
new file mode 100644
index 0000000..a99aa9b
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigDeviceExt.aidl
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortConfigDeviceExt {
+    /**
+     * Module the device is attached to.
+     * Interpreted as audio_module_handle_t.
+     */
+    int hwModule;
+    /**
+     * Device type (e.g AUDIO_DEVICE_OUT_SPEAKER).
+     * Interpreted as audio_devices_t.
+     * TODO: Convert to a standalone AIDL representation.
+     */
+    int type;
+    /** Device address. "" if N/A. */
+    @utf8InCpp String address;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigExt.aidl
new file mode 100644
index 0000000..5d635b6
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigExt.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioPortConfigDeviceExt;
+import android.media.AudioPortConfigMixExt;
+import android.media.AudioPortConfigSessionExt;
+
+/**
+ * {@hide}
+ */
+union AudioPortConfigExt {
+    /**
+     * This represents an empty union. Value is ignored.
+     * TODO(ytai): replace with the canonical representation for an empty union, as soon as it is
+     *             established.
+     */
+    boolean unspecified;
+    /** Device specific info. */
+    AudioPortConfigDeviceExt device;
+    /** Mix specific info. */
+    AudioPortConfigMixExt mix;
+    /** Session specific info. */
+    AudioPortConfigSessionExt session;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigMixExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigMixExt.aidl
new file mode 100644
index 0000000..d3226f2
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigMixExt.aidl
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioPortConfigMixExtUseCase;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortConfigMixExt {
+    /**
+     * Module the stream is attached to.
+     * Interpreted as audio_module_handle_t.
+     */
+    int hwModule;
+    /**
+     * I/O handle of the input/output stream.
+     * Interpreted as audio_io_handle_t.
+     */
+    int handle;
+    AudioPortConfigMixExtUseCase usecase;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigMixExtUseCase.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigMixExtUseCase.aidl
new file mode 100644
index 0000000..c61f044
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigMixExtUseCase.aidl
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioSourceType;
+import android.media.AudioStreamType;
+
+/**
+ * {@hide}
+ */
+union AudioPortConfigMixExtUseCase {
+    /**
+     * This to be set if the containing config has the AudioPortRole::NONE role.
+     * This represents an empty value (value is ignored).
+     * TODO(ytai): replace with the canonical representation for an empty union, as soon as it is
+     *             established.
+     */
+    boolean unspecified;
+    /** This to be set if the containing config has the AudioPortRole::SOURCE role. */
+    AudioStreamType stream;
+    /** This to be set if the containing config has the AudioPortRole::SINK role. */
+    AudioSourceType source;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigSessionExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigSessionExt.aidl
new file mode 100644
index 0000000..a2cbf62
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigSessionExt.aidl
@@ -0,0 +1,24 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortConfigSessionExt {
+    int session;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigType.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigType.aidl
new file mode 100644
index 0000000..6e22b8d
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigType.aidl
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioPortConfigType {
+    SAMPLE_RATE  = 0,
+    CHANNEL_MASK = 1,
+    FORMAT       = 2,
+    GAIN         = 3,
+    FLAGS        = 4,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortDeviceExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortDeviceExt.aidl
new file mode 100644
index 0000000..b758f23
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortDeviceExt.aidl
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioDevice;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortDeviceExt {
+    /** Module the device is attached to. Interpreted as audio_module_handle_t. */
+    int hwModule;
+    AudioDevice device;
+    /** Bitmask, indexed by AudioEncapsulationMode. */
+    int encapsulationModes;
+    /** Bitmask, indexed by AudioEncapsulationMetadataType. */
+    int encapsulationMetadataTypes;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortExt.aidl
new file mode 100644
index 0000000..453784b
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortExt.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioPortDeviceExt;
+import android.media.AudioPortMixExt;
+import android.media.AudioPortSessionExt;
+
+/**
+ * {@hide}
+ */
+union AudioPortExt {
+    /**
+     * This represents an empty union. Value is ignored.
+     * TODO(ytai): replace with the canonical representation for an empty union, as soon as it is
+     *             established.
+     */
+    boolean unspecified;
+    /** Device specific info. */
+    AudioPortDeviceExt device;
+    /** Mix specific info. */
+    AudioPortMixExt mix;
+    /** Session specific info. */
+    AudioPortSessionExt session;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortMixExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortMixExt.aidl
new file mode 100644
index 0000000..62cdb8e
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortMixExt.aidl
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioMixLatencyClass;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortMixExt {
+    /** Module the stream is attached to. Interpreted as audio_module_handle_t. */
+    int hwModule;
+    /** I/O handle of the input/output stream. Interpreted as audio_io_handle_t. */
+    int handle;
+    /** Latency class */
+    AudioMixLatencyClass latencyClass;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortRole.aidl b/media/libaudioclient/aidl/android/media/AudioPortRole.aidl
new file mode 100644
index 0000000..ea2ef3a
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortRole.aidl
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioPortRole {
+    NONE = 0,
+    SOURCE = 1,
+    SINK = 2,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortSessionExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortSessionExt.aidl
new file mode 100644
index 0000000..dbca168
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortSessionExt.aidl
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortSessionExt {
+    /** Audio session. Interpreted as audio_session_t. */
+    int session;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortType.aidl b/media/libaudioclient/aidl/android/media/AudioPortType.aidl
new file mode 100644
index 0000000..9e6af49
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortType.aidl
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioPortType {
+    NONE = 0,
+    DEVICE = 1,
+    MIX = 2,
+    SESSION = 3,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioProductStrategy.aidl b/media/libaudioclient/aidl/android/media/AudioProductStrategy.aidl
new file mode 100644
index 0000000..56c4a44
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioProductStrategy.aidl
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioAttributesEx;
+
+/**
+ * {@hide}
+ */
+parcelable AudioProductStrategy {
+    /** Interpreted as product_strategy_t. */
+    int id;
+    @utf8InCpp String name;
+    AudioAttributesEx[] audioAttributes;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioProfile.aidl b/media/libaudioclient/aidl/android/media/AudioProfile.aidl
new file mode 100644
index 0000000..afb288f
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioProfile.aidl
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioEncapsulationType;
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioProfile {
+    @utf8InCpp String name;
+    /** The format for an audio profile should only be set when initialized. */
+    AudioFormat format;
+    /** Interpreted as audio_channel_mask_t. */
+    int[] channelMasks;
+    int[] samplingRates;
+    boolean isDynamicFormat;
+    boolean isDynamicChannels;
+    boolean isDynamicRate;
+    AudioEncapsulationType encapsulationType;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioSourceType.aidl b/media/libaudioclient/aidl/android/media/AudioSourceType.aidl
new file mode 100644
index 0000000..8673b92
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioSourceType.aidl
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioSourceType {
+    INVALID = -1,
+    DEFAULT = 0,
+    MIC = 1,
+    VOICE_UPLINK = 2,
+    VOICE_DOWNLINK = 3,
+    VOICE_CALL = 4,
+    CAMCORDER = 5,
+    VOICE_RECOGNITION = 6,
+    VOICE_COMMUNICATION = 7,
+    REMOTE_SUBMIX = 8,
+    UNPROCESSED = 9,
+    VOICE_PERFORMANCE = 10,
+    ECHO_REFERENCE = 1997,
+    FM_TUNER = 1998,
+    /**
+     * A low-priority, preemptible audio source for for background software
+     * hotword detection. Same tuning as VOICE_RECOGNITION.
+     * Used only internally by the framework.
+     */
+    HOTWORD = 1999,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioStandard.aidl b/media/libaudioclient/aidl/android/media/AudioStandard.aidl
new file mode 100644
index 0000000..e131d0d
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioStandard.aidl
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * The audio standard that describe audio playback/capture capabilites.
+ *
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioStandard {
+    NONE = 0,
+    EDID = 1,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioStreamType.aidl b/media/libaudioclient/aidl/android/media/AudioStreamType.aidl
new file mode 100644
index 0000000..d777882
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioStreamType.aidl
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioStreamType {
+    DEFAULT = -1,
+    VOICE_CALL = 0,
+    SYSTEM = 1,
+    RING = 2,
+    MUSIC = 3,
+    ALARM = 4,
+    NOTIFICATION = 5,
+    BLUETOOTH_SCO = 6,
+    ENFORCED_AUDIBLE = 7,
+    DTMF = 8,
+    TTS = 9,
+    ACCESSIBILITY = 10,
+    ASSISTANT = 11,
+    /** For dynamic policy output mixes. Only used by the audio policy */
+    REROUTING = 12,
+    /** For audio flinger tracks volume. Only used by the audioflinger */
+    PATCH = 13,
+    /** stream for corresponding to AUDIO_USAGE_CALL_ASSISTANT */
+    CALL_ASSISTANT = 14,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioTimestampInternal.aidl b/media/libaudioclient/aidl/android/media/AudioTimestampInternal.aidl
new file mode 100644
index 0000000..8bbfb57
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioTimestampInternal.aidl
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * The "Internal" timestamp is intended to disambiguate from the android.media.AudioTimestamp type.
+ *
+ * {@hide}
+ */
+parcelable AudioTimestampInternal {
+    /** A frame position in AudioTrack::getPosition() units. */
+    int position;
+    /** corresponding CLOCK_MONOTONIC when frame is expected to present. */
+    long sec;
+    int nsec;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioUniqueIdUse.aidl b/media/libaudioclient/aidl/android/media/AudioUniqueIdUse.aidl
new file mode 100644
index 0000000..fdb6d2d
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioUniqueIdUse.aidl
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioUniqueIdUse {
+    UNSPECIFIED = 0,
+    SESSION = 1, // audio_session_t
+                 // for allocated sessions, not special AUDIO_SESSION_*
+    MODULE = 2,  // audio_module_handle_t
+    EFFECT = 3,  // audio_effect_handle_t
+    PATCH = 4,   // audio_patch_handle_t
+    OUTPUT = 5,  // audio_io_handle_t
+    INPUT = 6,   // audio_io_handle_t
+    CLIENT = 7,  // client-side players and recorders
+                 // FIXME should move to a separate namespace;
+                 // these IDs are allocated by AudioFlinger on client request,
+                 // but are never used by AudioFlinger
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioUsage.aidl b/media/libaudioclient/aidl/android/media/AudioUsage.aidl
new file mode 100644
index 0000000..66c5c30
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioUsage.aidl
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioUsage {
+    UNKNOWN = 0,
+    MEDIA = 1,
+    VOICE_COMMUNICATION = 2,
+    VOICE_COMMUNICATION_SIGNALLING = 3,
+    ALARM = 4,
+    NOTIFICATION = 5,
+    NOTIFICATION_TELEPHONY_RINGTONE = 6,
+    NOTIFICATION_COMMUNICATION_REQUEST = 7,
+    NOTIFICATION_COMMUNICATION_INSTANT = 8,
+    NOTIFICATION_COMMUNICATION_DELAYED = 9,
+    NOTIFICATION_EVENT = 10,
+    ASSISTANCE_ACCESSIBILITY = 11,
+    ASSISTANCE_NAVIGATION_GUIDANCE = 12,
+    ASSISTANCE_SONIFICATION = 13,
+    GAME = 14,
+    VIRTUAL_SOURCE = 15,
+    ASSISTANT = 16,
+    CALL_ASSISTANT = 17,
+    EMERGENCY = 1000,
+    SAFETY = 1001,
+    VEHICLE_STATUS = 1002,
+    ANNOUNCEMENT = 1003,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioUuid.aidl b/media/libaudioclient/aidl/android/media/AudioUuid.aidl
new file mode 100644
index 0000000..bba9039
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioUuid.aidl
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioUuid {
+    int timeLow;
+    int timeMid;
+    int timeHiAndVersion;
+    int clockSeq;
+    byte[] node;  // Length = 6
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioVibratorInfo.aidl b/media/libaudioclient/aidl/android/media/AudioVibratorInfo.aidl
new file mode 100644
index 0000000..f88fc3c
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioVibratorInfo.aidl
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ * A class for vibrator information. The information will be used in HapticGenerator effect.
+ */
+parcelable AudioVibratorInfo {
+    int id;
+    float resonantFrequency;
+    float qFactor;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioVolumeGroup.aidl b/media/libaudioclient/aidl/android/media/AudioVolumeGroup.aidl
new file mode 100644
index 0000000..3a29a08
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioVolumeGroup.aidl
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioAttributesInternal;
+import android.media.AudioStreamType;
+
+/**
+ * {@hide}
+ */
+parcelable AudioVolumeGroup {
+    /** Interpreted as volume_group_t. */
+    int groupId;
+    @utf8InCpp String name;
+    AudioAttributesInternal[] audioAttributes;
+    AudioStreamType[] streams;
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateEffectRequest.aidl b/media/libaudioclient/aidl/android/media/CreateEffectRequest.aidl
new file mode 100644
index 0000000..2d274f4
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/CreateEffectRequest.aidl
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioDevice;
+import android.media.EffectDescriptor;
+import android.media.IEffectClient;
+import android.content.AttributionSourceState;
+
+/**
+ * Input arguments of the createEffect() method.
+ *
+ * {@hide}
+ */
+parcelable CreateEffectRequest {
+    EffectDescriptor desc;
+    @nullable IEffectClient client;
+    int priority;
+    /** Interpreted as audio_io_handle_t. */
+    int output;
+    /** Interpreted as audio_session_t. */
+    int sessionId;
+    AudioDevice device;
+    AttributionSourceState attributionSource;
+    boolean probe;
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateEffectResponse.aidl b/media/libaudioclient/aidl/android/media/CreateEffectResponse.aidl
new file mode 100644
index 0000000..0aa640a
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/CreateEffectResponse.aidl
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.EffectDescriptor;
+import android.media.IEffect;
+
+/**
+ * Output arguments of the createEffect() method.
+ *
+ * {@hide}
+ */
+parcelable CreateEffectResponse {
+    int id;
+    boolean enabled;
+    @nullable IEffect effect;
+    EffectDescriptor desc;
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl b/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl
new file mode 100644
index 0000000..7e3c240
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioAttributesInternal;
+import android.media.AudioClient;
+import android.media.AudioConfigBase;
+
+/**
+ * CreateRecordRequest contains all input arguments sent by AudioRecord to AudioFlinger
+ * when calling createRecord() including arguments that will be updated by AudioFlinger
+ * and returned in CreateRecordResponse object.
+ *
+ * {@hide}
+ */
+parcelable CreateRecordRequest {
+    AudioAttributesInternal attr;
+    AudioConfigBase config;
+    AudioClient clientInfo;
+    /** Interpreted as audio_unique_id_t. */
+    int riid;
+    int maxSharedAudioHistoryMs;
+    /** Bitmask, indexed by AudioInputFlags. */
+    int flags;
+    long frameCount;
+    long notificationFrameCount;
+    /** Interpreted as audio_port_handle_t. */
+    int selectedDeviceId;
+    int sessionId;
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl b/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl
new file mode 100644
index 0000000..d78b3fc
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.IAudioRecord;
+import android.media.SharedFileRegion;
+
+/**
+ * CreateRecordResponse contains all output arguments returned by AudioFlinger to AudioRecord
+ * when calling createRecord() including arguments that were passed as I/O for update by
+ * CreateRecordRequest.
+ *
+ * {@hide}
+ */
+parcelable CreateRecordResponse {
+    /** Bitmask, indexed by AudioInputFlags. */
+    int flags;
+    long frameCount;
+    long notificationFrameCount;
+    /** Interpreted as audio_port_handle_t. */
+    int selectedDeviceId;
+    int sessionId;
+    int sampleRate;
+    /** Interpreted as audio_io_handle_t. */
+    int inputId;
+    @nullable SharedFileRegion cblk;
+    @nullable SharedFileRegion buffers;
+    /** Interpreted as audio_port_handle_t. */
+    int portId;
+    /** The newly created record. */
+    @nullable IAudioRecord audioRecord;
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateTrackRequest.aidl b/media/libaudioclient/aidl/android/media/CreateTrackRequest.aidl
new file mode 100644
index 0000000..014b3ca
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/CreateTrackRequest.aidl
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioAttributesInternal;
+import android.media.AudioClient;
+import android.media.AudioConfig;
+import android.media.IAudioTrackCallback;
+import android.media.SharedFileRegion;
+
+/**
+ * CreateTrackInput contains all input arguments sent by AudioTrack to AudioFlinger
+ * when calling createTrack() including arguments that will be updated by AudioFlinger
+ * and returned in CreateTrackResponse object.
+ *
+ * {@hide}
+ */
+parcelable CreateTrackRequest {
+    AudioAttributesInternal attr;
+    AudioConfig config;
+    AudioClient clientInfo;
+    @nullable SharedFileRegion sharedBuffer;
+    int notificationsPerBuffer;
+    float speed;
+    IAudioTrackCallback audioTrackCallback;
+    @utf8InCpp String opPackageName;
+    /** Bitmask, indexed by AudioOutputFlags. */
+    int flags;
+    long frameCount;
+    long notificationFrameCount;
+    /** Interpreted as audio_port_handle_t. */
+    int selectedDeviceId;
+    int sessionId;
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
new file mode 100644
index 0000000..40473fa
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioStreamType;
+import android.media.IAudioTrack;
+
+/**
+ * CreateTrackOutput contains all output arguments returned by AudioFlinger to AudioTrack
+ * when calling createTrack() including arguments that were passed as I/O for update by
+ * CreateTrackRequest.
+ *
+ * {@hide}
+ */
+parcelable CreateTrackResponse {
+    /** Bitmask, indexed by AudioOutputFlags. */
+    int flags;
+    long frameCount;
+    long notificationFrameCount;
+    /** Interpreted as audio_port_handle_t. */
+    int selectedDeviceId;
+    int sessionId;
+    int sampleRate;
+    AudioStreamType streamType;
+    long afFrameCount;
+    int afSampleRate;
+    int afLatencyMs;
+    /** Interpreted as audio_io_handle_t. */
+    int outputId;
+    /** Interpreted as audio_port_handle_t. */
+    int portId;
+    /** The newly created track. */
+    @nullable IAudioTrack audioTrack;
+}
diff --git a/media/libaudioclient/aidl/android/media/DeviceRole.aidl b/media/libaudioclient/aidl/android/media/DeviceRole.aidl
new file mode 100644
index 0000000..b741843
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/DeviceRole.aidl
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum DeviceRole {
+    /** Default routing rules and priority apply. */
+    NONE = 0,
+    /* Devices are specified as preferred devices. */
+    PREFERRED = 1,
+    /* Devices cannot be used. */
+    DISABLED = 2,
+}
diff --git a/media/libaudioclient/aidl/android/media/EffectDescriptor.aidl b/media/libaudioclient/aidl/android/media/EffectDescriptor.aidl
new file mode 100644
index 0000000..35a3d74
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/EffectDescriptor.aidl
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioUuid;
+
+/**
+ * {@hide}
+ */
+parcelable EffectDescriptor {
+    /** UUID of to the OpenSL ES interface implemented by this effect. */
+    AudioUuid type;
+    /** UUID for this particular implementation. */
+    AudioUuid uuid;
+    /** Version of the effect control API implemented. */
+    int apiVersion;
+    /** Effect engine capabilities/requirements flags. */
+    int flags;
+    /** CPU load indication.. */
+    int cpuLoad;
+    /** Data Memory usage.. */
+    int memoryUsage;
+    /** Human readable effect name. */
+    @utf8InCpp String name;
+    /** Human readable effect implementor name. */
+    @utf8InCpp String implementor;
+}
diff --git a/media/libaudioclient/aidl/android/media/ExtraAudioDescriptor.aidl b/media/libaudioclient/aidl/android/media/ExtraAudioDescriptor.aidl
new file mode 100644
index 0000000..ec5b67a
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/ExtraAudioDescriptor.aidl
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioEncapsulationType;
+import android.media.AudioStandard;
+
+/**
+ * The audio descriptor that descibes playback/capture capabilities according to
+ * a particular standard.
+ *
+ * {@hide}
+ */
+parcelable ExtraAudioDescriptor {
+    AudioStandard standard;
+    byte[] audioDescriptor;
+    AudioEncapsulationType encapsulationType;
+}
diff --git a/media/libaudioclient/aidl/android/media/GetInputForAttrResponse.aidl b/media/libaudioclient/aidl/android/media/GetInputForAttrResponse.aidl
new file mode 100644
index 0000000..9696124
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/GetInputForAttrResponse.aidl
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable GetInputForAttrResponse {
+    /** Interpreted as audio_io_handle_t. */
+    int input;
+    /** Interpreted as audio_port_handle_t. */
+    int selectedDeviceId;
+    /** Interpreted as audio_port_handle_t. */
+    int portId;
+}
diff --git a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
new file mode 100644
index 0000000..164fb9d
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioStreamType;
+
+/**
+ * {@hide}
+ */
+parcelable GetOutputForAttrResponse {
+    /** Interpreted as audio_io_handle_t. */
+    int output;
+    AudioStreamType stream;
+    /** Interpreted as audio_port_handle_t. */
+    int selectedDeviceId;
+    /** Interpreted as audio_port_handle_t. */
+    int portId;
+    /** Interpreted as audio_io_handle_t[]. */
+    int[] secondaryOutputs;
+}
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerClient.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerClient.aidl
new file mode 100644
index 0000000..421c31c
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerClient.aidl
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioIoConfigEvent;
+import android.media.AudioIoDescriptor;
+
+/**
+ * A callback interface for AudioFlinger.
+ *
+ * {@hide}
+ */
+interface IAudioFlingerClient {
+    oneway void ioConfigChanged(AudioIoConfigEvent event,
+                                in AudioIoDescriptor ioDesc);
+}
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
new file mode 100644
index 0000000..d2cae6d
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -0,0 +1,216 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioMode;
+import android.media.AudioPatch;
+import android.media.AudioPort;
+import android.media.AudioPortConfig;
+import android.media.AudioStreamType;
+import android.media.AudioUniqueIdUse;
+import android.media.AudioUuid;
+import android.media.AudioVibratorInfo;
+import android.media.CreateEffectRequest;
+import android.media.CreateEffectResponse;
+import android.media.CreateRecordRequest;
+import android.media.CreateRecordResponse;
+import android.media.CreateTrackRequest;
+import android.media.CreateTrackResponse;
+import android.media.OpenInputRequest;
+import android.media.OpenInputResponse;
+import android.media.OpenOutputRequest;
+import android.media.OpenOutputResponse;
+import android.media.EffectDescriptor;
+import android.media.IAudioFlingerClient;
+import android.media.IAudioRecord;
+import android.media.IAudioTrack;
+import android.media.MicrophoneInfoData;
+import android.media.RenderPosition;
+import android.media.TrackSecondaryOutputInfo;
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+interface IAudioFlingerService {
+    /**
+     * Creates an audio track and registers it with AudioFlinger, or null if the track cannot be
+     * created.
+     */
+    CreateTrackResponse createTrack(in CreateTrackRequest request);
+
+    CreateRecordResponse createRecord(in CreateRecordRequest request);
+
+    // FIXME Surprisingly, format/latency don't work for input handles
+
+    /**
+     * Queries the audio hardware state. This state never changes, and therefore can be cached.
+     */
+    int sampleRate(int /* audio_io_handle_t */ ioHandle);
+
+    AudioFormat format(int /* audio_io_handle_t */ output);
+
+    long frameCount(int /* audio_io_handle_t */ ioHandle);
+
+    /**
+     * Return the estimated latency in milliseconds.
+     */
+    int latency(int  /* audio_io_handle_t */ output);
+
+    /*
+     * Sets/gets the audio hardware state. This will probably be used by
+     * the preference panel, mostly.
+     */
+    void setMasterVolume(float value);
+    void setMasterMute(boolean muted);
+
+    float masterVolume();
+    boolean masterMute();
+
+    void setMasterBalance(float balance);
+    float getMasterBalance();
+
+    /*
+     * Set/gets stream type state. This will probably be used by
+     * the preference panel, mostly.
+     */
+    void setStreamVolume(AudioStreamType stream, float value, int /* audio_io_handle_t */ output);
+    void setStreamMute(AudioStreamType stream, boolean muted);
+    float streamVolume(AudioStreamType stream, int /* audio_io_handle_t */ output);
+    boolean streamMute(AudioStreamType stream);
+
+    // set audio mode.
+    void setMode(AudioMode mode);
+
+    // mic mute/state
+    void setMicMute(boolean state);
+    boolean getMicMute();
+    void setRecordSilenced(int /* audio_port_handle_t */ portId,
+                           boolean silenced);
+
+    void setParameters(int /* audio_io_handle_t */ ioHandle,
+                       @utf8InCpp String keyValuePairs);
+    @utf8InCpp String getParameters(int /* audio_io_handle_t */ ioHandle,
+                                    @utf8InCpp String keys);
+
+    // Register an object to receive audio input/output change and track notifications.
+    // For a given calling pid, AudioFlinger disregards any registrations after the first.
+    // Thus the IAudioFlingerClient must be a singleton per process.
+    void registerClient(IAudioFlingerClient client);
+
+    // Retrieve the audio recording buffer size in bytes.
+    // FIXME This API assumes a route, and so should be deprecated.
+    long getInputBufferSize(int sampleRate,
+                            AudioFormat format,
+                            int /* audio_channel_mask_t */ channelMask);
+
+    OpenOutputResponse openOutput(in OpenOutputRequest request);
+    int /* audio_io_handle_t */ openDuplicateOutput(int /* audio_io_handle_t */ output1,
+                                                    int /* audio_io_handle_t */ output2);
+    void closeOutput(int /* audio_io_handle_t */ output);
+    void suspendOutput(int /* audio_io_handle_t */ output);
+    void restoreOutput(int /* audio_io_handle_t */ output);
+
+    OpenInputResponse openInput(in OpenInputRequest request);
+    void closeInput(int /* audio_io_handle_t */ input);
+
+    void invalidateStream(AudioStreamType stream);
+
+    void setVoiceVolume(float volume);
+
+    RenderPosition getRenderPosition(int /* audio_io_handle_t */ output);
+
+    int getInputFramesLost(int /* audio_io_handle_t */ ioHandle);
+
+    int /* audio_unique_id_t */ newAudioUniqueId(AudioUniqueIdUse use);
+
+    void acquireAudioSessionId(int /* audio_session_t */ audioSession,
+                               int /* pid_t */ pid,
+                               int /* uid_t */ uid);
+    void releaseAudioSessionId(int /* audio_session_t */ audioSession,
+                               int /* pid_t */ pid);
+
+    int queryNumberEffects();
+
+    EffectDescriptor queryEffect(int index);
+
+    /** preferredTypeFlag is interpreted as a uint32_t with the "effect flag" format. */
+    EffectDescriptor getEffectDescriptor(in AudioUuid effectUUID,
+                                         in AudioUuid typeUUID,
+                                         int preferredTypeFlag);
+
+    CreateEffectResponse createEffect(in CreateEffectRequest request);
+
+    void moveEffects(int /* audio_session_t */ session,
+                     int /* audio_io_handle_t */ srcOutput,
+                     int /* audio_io_handle_t */ dstOutput);
+
+    void setEffectSuspended(int effectId,
+                            int /* audio_session_t */ sessionId,
+                            boolean suspended);
+
+    int /* audio_module_handle_t */ loadHwModule(@utf8InCpp String name);
+
+    // helpers for android.media.AudioManager.getProperty(), see description there for meaning
+    // FIXME move these APIs to AudioPolicy to permit a more accurate implementation
+    // that looks on primary device for a stream with fast flag, primary flag, or first one.
+    int getPrimaryOutputSamplingRate();
+    long getPrimaryOutputFrameCount();
+
+    // Intended for AudioService to inform AudioFlinger of device's low RAM attribute,
+    // and should be called at most once.  For a definition of what "low RAM" means, see
+    // android.app.ActivityManager.isLowRamDevice().  The totalMemory parameter
+    // is obtained from android.app.ActivityManager.MemoryInfo.totalMem.
+    void setLowRamDevice(boolean isLowRamDevice, long totalMemory);
+
+    /* Get attributes for a given audio port */
+    AudioPort getAudioPort(in AudioPort port);
+
+    /* Create an audio patch between several source and sink ports */
+    int /* audio_patch_handle_t */ createAudioPatch(in AudioPatch patch);
+
+    /* Release an audio patch */
+    void releaseAudioPatch(int /* audio_patch_handle_t */ handle);
+
+    /* List existing audio patches */
+    AudioPatch[] listAudioPatches(int maxCount);
+    /* Set audio port configuration */
+    void setAudioPortConfig(in AudioPortConfig config);
+
+    /* Get the HW synchronization source used for an audio session */
+    int /* audio_hw_sync_t */ getAudioHwSyncForSession(int /* audio_session_t */ sessionId);
+
+    /* Indicate JAVA services are ready (scheduling, power management ...) */
+    oneway void systemReady();
+
+    // Returns the number of frames per audio HAL buffer.
+    long frameCountHAL(int /* audio_io_handle_t */ ioHandle);
+
+    /* List available microphones and their characteristics */
+    MicrophoneInfoData[] getMicrophones();
+
+    void setAudioHalPids(in int[] /* pid_t[] */ pids);
+
+    // Set vibrators' information.
+    // The value will be used to initialize HapticGenerator.
+    void setVibratorInfos(in AudioVibratorInfo[] vibratorInfos);
+
+    // Update secondary outputs.
+    // This usually happens when there is a dynamic policy registered.
+    void updateSecondaryOutputs(
+            in TrackSecondaryOutputInfo[] trackSecondaryOutputInfos);
+}
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
new file mode 100644
index 0000000..65bcd82
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -0,0 +1,349 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.content.AttributionSourceState;
+
+import android.media.audio.common.AudioFormat;
+
+import android.media.AudioAttributesEx;
+import android.media.AudioAttributesInternal;
+import android.media.AudioConfig;
+import android.media.AudioConfigBase;
+import android.media.AudioDevice;
+import android.media.AudioMix;
+import android.media.AudioMode;
+import android.media.AudioOffloadInfo;
+import android.media.AudioOffloadMode;
+import android.media.AudioPatch;
+import android.media.AudioPolicyDeviceState;
+import android.media.AudioPolicyForcedConfig;
+import android.media.AudioPolicyForceUse;
+import android.media.AudioPort;
+import android.media.AudioPortConfig;
+import android.media.AudioPortRole;
+import android.media.AudioPortType;
+import android.media.AudioProductStrategy;
+import android.media.AudioSourceType;
+import android.media.AudioStreamType;
+import android.media.AudioUsage;
+import android.media.AudioUuid;
+import android.media.AudioVolumeGroup;
+import android.media.DeviceRole;
+import android.media.EffectDescriptor;
+import android.media.GetInputForAttrResponse;
+import android.media.GetOutputForAttrResponse;
+import android.media.IAudioPolicyServiceClient;
+import android.media.ICaptureStateListener;
+import android.media.Int;
+import android.media.SoundTriggerSession;
+
+/**
+ * IAudioPolicyService interface (see AudioPolicyInterface for method descriptions).
+ *
+ * {@hide}
+ */
+interface IAudioPolicyService {
+    oneway void onNewAudioModulesAvailable();
+
+    void setDeviceConnectionState(in AudioDevice device,
+                                  in AudioPolicyDeviceState state,
+                                  @utf8InCpp String deviceName,
+                                  in AudioFormat encodedFormat);
+
+    AudioPolicyDeviceState getDeviceConnectionState(in AudioDevice device);
+
+    void handleDeviceConfigChange(in AudioDevice device,
+                                  @utf8InCpp String deviceName,
+                                  in AudioFormat encodedFormat);
+
+    void setPhoneState(AudioMode state, int /* uid_t */ uid);
+
+    void setForceUse(AudioPolicyForceUse usage,
+                     AudioPolicyForcedConfig config);
+
+    AudioPolicyForcedConfig getForceUse(AudioPolicyForceUse usage);
+
+    int /* audio_io_handle_t */ getOutput(AudioStreamType stream);
+
+    GetOutputForAttrResponse getOutputForAttr(in AudioAttributesInternal attr,
+                                              int /* audio_session_t */ session,
+                                              in AttributionSourceState attributionSource,
+                                              in AudioConfig config,
+                                              int /* Bitmask, indexed by AudioOutputFlags */ flags,
+                                              int /* audio_port_handle_t */ selectedDeviceId);
+
+    void startOutput(int /* audio_port_handle_t */ portId);
+
+    void stopOutput(int /* audio_port_handle_t */ portId);
+
+    void releaseOutput(int /* audio_port_handle_t */ portId);
+
+    GetInputForAttrResponse getInputForAttr(in AudioAttributesInternal attr,
+                                            int /* audio_io_handle_t */ input,
+                                            int /* audio_unique_id_t */ riid,
+                                            int /* audio_session_t */ session,
+                                            in AttributionSourceState attributionSource,
+                                            in AudioConfigBase config,
+                                            int /* Bitmask, indexed by AudioInputFlags */ flags,
+                                            int /* audio_port_handle_t */ selectedDeviceId);
+
+
+    void startInput(int /* audio_port_handle_t */ portId);
+
+    void stopInput(int /* audio_port_handle_t */ portId);
+
+    void releaseInput(int /* audio_port_handle_t */ portId);
+
+    void initStreamVolume(AudioStreamType stream,
+                          int indexMin,
+                          int indexMax);
+
+    void setStreamVolumeIndex(AudioStreamType stream,
+                              int /* audio_devices_t */ device,
+                              int index);
+
+    int getStreamVolumeIndex(AudioStreamType stream,
+                             int /* audio_devices_t */ device);
+
+    void setVolumeIndexForAttributes(in AudioAttributesInternal attr,
+                                     int /* audio_devices_t */ device,
+                                     int index);
+
+    int getVolumeIndexForAttributes(in AudioAttributesInternal attr,
+                                    int /* audio_devices_t */ device);
+
+    int getMaxVolumeIndexForAttributes(in AudioAttributesInternal attr);
+
+    int getMinVolumeIndexForAttributes(in AudioAttributesInternal attr);
+
+    int /* product_strategy_t */ getStrategyForStream(AudioStreamType stream);
+
+    int /* bitmask of audio_devices_t */ getDevicesForStream(AudioStreamType stream);
+
+    AudioDevice[] getDevicesForAttributes(in AudioAttributesEx attr);
+
+    int /* audio_io_handle_t */ getOutputForEffect(in EffectDescriptor desc);
+
+    void registerEffect(in EffectDescriptor desc,
+                        int /* audio_io_handle_t */ io,
+                        int /* product_strategy_t */ strategy,
+                        int /* audio_session_t */ session,
+                        int id);
+
+    void unregisterEffect(int id);
+
+    void setEffectEnabled(int id, boolean enabled);
+
+    void moveEffectsToIo(in int[] ids, int /* audio_io_handle_t */ io);
+
+    boolean isStreamActive(AudioStreamType stream, int inPastMs);
+
+    boolean isStreamActiveRemotely(AudioStreamType stream, int inPastMs);
+
+    boolean isSourceActive(AudioSourceType source);
+
+    /**
+     * On input, count represents the maximum length of the returned array.
+     * On output, count is the total number of elements, which may be larger than the array size.
+     * Passing '0' on input and inspecting the value on output is a common way of determining the
+     * number of elements without actually retrieving them.
+     */
+    EffectDescriptor[] queryDefaultPreProcessing(int /* audio_session_t */ audioSession,
+                                                 inout Int count);
+
+    int /* audio_unique_id_t */ addSourceDefaultEffect(in AudioUuid type,
+                                                       @utf8InCpp String opPackageName,
+                                                       in AudioUuid uuid,
+                                                       int priority,
+                                                       AudioSourceType source);
+
+    int /* audio_unique_id_t */ addStreamDefaultEffect(in AudioUuid type,
+                                                       @utf8InCpp String opPackageName,
+                                                       in AudioUuid uuid,
+                                                       int priority,
+                                                       AudioUsage usage);
+
+    void removeSourceDefaultEffect(int /* audio_unique_id_t */ id);
+
+    void removeStreamDefaultEffect(int /* audio_unique_id_t */ id);
+
+    void setSupportedSystemUsages(in AudioUsage[] systemUsages);
+
+    void setAllowedCapturePolicy(int /* uid_t */ uid,
+                                 int /* Bitmask of AudioFlags */ capturePolicy);
+
+    /**
+     * Check if offload is possible for given format, stream type, sample rate,
+     * bit rate, duration, video and streaming or offload property is enabled.
+     */
+    AudioOffloadMode getOffloadSupport(in AudioOffloadInfo info);
+
+    /**
+     * Check if direct playback is possible for given format, sample rate, channel mask and flags.
+     */
+    boolean isDirectOutputSupported(in AudioConfigBase config,
+                                    in AudioAttributesInternal attributes);
+
+    /**
+     * List available audio ports and their attributes. Returns the generation.
+     *
+     * On input, count represents the maximum length of the returned array.
+     * On output, count is the total number of elements, which may be larger than the array size.
+     * Passing '0' on input and inspecting the value on output is a common way of determining the
+     * number of elements without actually retrieving them.
+     */
+    int listAudioPorts(AudioPortRole role,
+                       AudioPortType type,
+                       inout Int count,
+                       out AudioPort[] ports);
+
+    /** Get attributes for a given audio port. */
+    AudioPort getAudioPort(in AudioPort port);
+
+    /**
+     * Create an audio patch between several source and sink ports.
+     * The handle argument is used when updating an existing patch.
+     */
+    int /* audio_patch_handle_t */ createAudioPatch(in AudioPatch patch, int handle);
+
+    /** Release an audio patch. */
+    void releaseAudioPatch(int /* audio_patch_handle_t */ handle);
+
+    /**
+     * List existing audio patches. Returns the generation.
+     *
+     * On input, count represents the maximum length of the returned array.
+     * On output, count is the total number of elements, which may be larger than the array size.
+     * Passing '0' on input and inspecting the value on output is a common way of determining the
+     * number of elements without actually retrieving them.
+     */
+    int listAudioPatches(inout Int count, out AudioPatch[] patches);
+
+    /** Set audio port configuration. */
+    void setAudioPortConfig(in AudioPortConfig config);
+
+    void registerClient(IAudioPolicyServiceClient client);
+
+    void setAudioPortCallbacksEnabled(boolean enabled);
+
+    void setAudioVolumeGroupCallbacksEnabled(boolean enabled);
+
+    SoundTriggerSession acquireSoundTriggerSession();
+
+    void releaseSoundTriggerSession(int /* audio_session_t */ session);
+
+    AudioMode getPhoneState();
+
+    void registerPolicyMixes(in AudioMix[] mixes, boolean registration);
+
+    void setUidDeviceAffinities(int /* uid_t */ uid, in AudioDevice[] devices);
+
+    void removeUidDeviceAffinities(int /* uid_t */ uid);
+
+    void setUserIdDeviceAffinities(int userId, in AudioDevice[] devices);
+
+    void removeUserIdDeviceAffinities(int userId);
+
+    int /* audio_port_handle_t */ startAudioSource(in AudioPortConfig source,
+                                                   in AudioAttributesInternal attributes);
+
+    void stopAudioSource(int /* audio_port_handle_t */ portId);
+
+    void setMasterMono(boolean mono);
+
+    boolean getMasterMono();
+
+    float getStreamVolumeDB(AudioStreamType stream, int index, int /* audio_devices_t */ device);
+
+    /**
+     * Populates supported surround formats and their enabled state in formats and formatsEnabled.
+     *
+     * On input, count represents the maximum length of the returned array.
+     * On output, count is the total number of elements, which may be larger than the array size.
+     * Passing '0' on input and inspecting the value on output is a common way of determining the
+     * number of elements without actually retrieving them.
+     */
+    void getSurroundFormats(inout Int count,
+                            out AudioFormat[] formats,
+                            out boolean[] formatsEnabled);
+
+    /**
+     * Populates the surround formats reported by the HDMI devices in formats.
+     *
+     * On input, count represents the maximum length of the returned array.
+     * On output, count is the total number of elements, which may be larger than the array size.
+     * Passing '0' on input and inspecting the value on output is a common way of determining the
+     * number of elements without actually retrieving them.
+     */
+    void getReportedSurroundFormats(inout Int count,
+                                    out AudioFormat[] formats);
+
+    AudioFormat[] getHwOffloadEncodingFormatsSupportedForA2DP();
+
+    void setSurroundFormatEnabled(AudioFormat audioFormat, boolean enabled);
+
+    void setAssistantUid(int /* uid_t */ uid);
+
+    void setA11yServicesUids(in int[] /* uid_t[] */ uids);
+
+    void setCurrentImeUid(int /* uid_t */ uid);
+
+    boolean isHapticPlaybackSupported();
+
+    AudioProductStrategy[] listAudioProductStrategies();
+    int /* product_strategy_t */ getProductStrategyFromAudioAttributes(in AudioAttributesEx aa,
+                                                                       boolean fallbackOnDefault);
+
+    AudioVolumeGroup[] listAudioVolumeGroups();
+    int /* volume_group_t */ getVolumeGroupFromAudioAttributes(in AudioAttributesEx aa,
+                                                               boolean fallbackOnDefault);
+
+    void setRttEnabled(boolean enabled);
+
+    boolean isCallScreenModeSupported();
+
+    void setDevicesRoleForStrategy(int /* product_strategy_t */ strategy,
+                                   DeviceRole role,
+                                   in AudioDevice[] devices);
+
+    void removeDevicesRoleForStrategy(int /* product_strategy_t */ strategy,
+                                       DeviceRole role);
+
+    AudioDevice[] getDevicesForRoleAndStrategy(int /* product_strategy_t */ strategy,
+                                               DeviceRole role);
+
+    void setDevicesRoleForCapturePreset(AudioSourceType audioSource,
+                                        DeviceRole role,
+                                        in AudioDevice[] devices);
+
+    void addDevicesRoleForCapturePreset(AudioSourceType audioSource,
+                                        DeviceRole role,
+                                        in AudioDevice[] devices);
+
+    void removeDevicesRoleForCapturePreset(AudioSourceType audioSource,
+                                           DeviceRole role,
+                                           in AudioDevice[] devices);
+
+    void clearDevicesRoleForCapturePreset(AudioSourceType audioSource,
+                                          DeviceRole role);
+
+    AudioDevice[] getDevicesForRoleAndCapturePreset(AudioSourceType audioSource,
+                                                    DeviceRole role);
+
+    boolean registerSoundTriggerCaptureStateListener(ICaptureStateListener listener);
+}
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyServiceClient.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyServiceClient.aidl
new file mode 100644
index 0000000..a7782b8
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyServiceClient.aidl
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioConfigBase;
+import android.media.AudioSourceType;
+import android.media.EffectDescriptor;
+import android.media.RecordClientInfo;
+
+/**
+ * {@hide}
+ */
+oneway interface IAudioPolicyServiceClient {
+    /** Notifies a change of volume group. */
+    void onAudioVolumeGroupChanged(int /* volume_group_t */ group,
+                                   int flags);
+    /** Notifies a change of audio port configuration. */
+    void onAudioPortListUpdate();
+    /** Notifies a change of audio patch configuration. */
+    void onAudioPatchListUpdate();
+    /** Notifies a change in the mixing state of a specific mix in a dynamic audio policy. */
+    void onDynamicPolicyMixStateUpdate(@utf8InCpp String regId,
+                                       int state);
+    /** Notifies a change of audio recording configuration. */
+    void onRecordingConfigurationUpdate(int event,
+                                        in RecordClientInfo clientInfo,
+                                        in AudioConfigBase clientConfig,
+                                        in EffectDescriptor[] clientEffects,
+                                        in AudioConfigBase deviceConfig,
+                                        in EffectDescriptor[] effects,
+                                        int /* audio_patch_handle_t */ patchHandle,
+                                        AudioSourceType source);
+     /** Notifies a change of audio routing */
+     void onRoutingUpdated();
+}
diff --git a/media/libaudioclient/aidl/android/media/IAudioRecord.aidl b/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
index ecf58b6..44ef80b 100644
--- a/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
@@ -16,9 +16,13 @@
 
 package android.media;
 
-import android.media.MicrophoneInfo;
+import android.media.MicrophoneInfoData;
 
-/* Native code must specify namespace media (media::IAudioRecord) when referring to this class */
+/**
+ * Native code must specify namespace media (media::IAudioRecord) when referring to this class.
+ *
+ * {@hide}
+ */
 interface IAudioRecord {
 
   /* After it's created the track is not active. Call start() to
@@ -35,7 +39,7 @@
 
   /* Get a list of current active microphones.
    */
-  void getActiveMicrophones(out MicrophoneInfo[] activeMicrophones);
+  void getActiveMicrophones(out MicrophoneInfoData[] activeMicrophones);
 
   /* Set the microphone direction (for processing).
    */
@@ -44,4 +48,6 @@
   /* Set the microphone zoom (for processing).
    */
   void setPreferredMicrophoneFieldDimension(float zoom);
+
+  void shareAudioHistory(@utf8InCpp String sharedAudioPackageName, long sharedAudioStartMs);
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioTrack.aidl b/media/libaudioclient/aidl/android/media/IAudioTrack.aidl
new file mode 100644
index 0000000..ac58925
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IAudioTrack.aidl
@@ -0,0 +1,116 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioDualMonoMode;
+import android.media.AudioPlaybackRate;
+import android.media.AudioTimestampInternal;
+import android.media.SharedFileRegion;
+import android.media.VolumeShaperConfiguration;
+import android.media.VolumeShaperOperation;
+import android.media.VolumeShaperState;
+
+/**
+ * Unless otherwise noted, methods returning int expect it to be interpreted as a status_t.
+ *
+ * {@hide}
+ */
+interface IAudioTrack {
+    /** Get this track's control block */
+    @nullable SharedFileRegion getCblk();
+
+    /**
+     * After it's created the track is not active. Call start() to
+     * make it active.
+     */
+    int start();
+
+    /**
+     * Stop a track. If set, the callback will cease being called and
+     * obtainBuffer will return an error. Buffers that are already released
+     * will continue to be processed, unless/until flush() is called.
+     */
+    void stop();
+
+    /**
+     * Flush a stopped or paused track. All pending/released buffers are discarded.
+     * This function has no effect if the track is not stopped or paused.
+     */
+    void flush();
+
+    /**
+     * Pause a track. If set, the callback will cease being called and
+     * obtainBuffer will return an error. Buffers that are already released
+     * will continue to be processed, unless/until flush() is called.
+     */
+    void pause();
+
+    /**
+     * Attach track auxiliary output to specified effect. Use effectId = 0
+     * to detach track from effect.
+     */
+    int attachAuxEffect(int effectId);
+
+    /** Send parameters to the audio hardware. */
+    int setParameters(@utf8InCpp String keyValuePairs);
+
+    /** Selects the presentation (if available). */
+    int selectPresentation(int presentationId, int programId);
+
+    /** Return NO_ERROR if timestamp is valid. */
+    int getTimestamp(out AudioTimestampInternal timestamp);
+
+    /** Signal the playback thread for a change in control block. */
+    void signal();
+
+    /** Sets the volume shaper. Returns the volume shaper status. */
+    int applyVolumeShaper(in VolumeShaperConfiguration configuration,
+                          in VolumeShaperOperation operation);
+
+    /** Gets the volume shaper state. */
+    @nullable VolumeShaperState getVolumeShaperState(int id);
+
+    /**
+     * Returns DualMonoMode setting associated with this AudioTrack.
+     */
+    AudioDualMonoMode getDualMonoMode();
+
+    /**
+     * Sets DualMonoMode setting.
+     */
+    void setDualMonoMode(in AudioDualMonoMode mode);
+
+    /**
+     * Returns the AudioDescriptionMixLevel.
+     */
+    float getAudioDescriptionMixLevel();
+
+    /**
+     * Sets the AudioDescriptionMixLevel.
+     */
+    void setAudioDescriptionMixLevel(float leveldB);
+
+    /**
+     * Returns the AudioPlaybackRate.
+     */
+    AudioPlaybackRate getPlaybackRateParameters();
+
+    /**
+     * Sets the AudioPlaybackRate.
+     */
+    void setPlaybackRateParameters(in AudioPlaybackRate playbackRate);
+}
diff --git a/media/libaudioclient/aidl/android/media/IAudioTrackCallback.aidl b/media/libaudioclient/aidl/android/media/IAudioTrackCallback.aidl
index 21553b5..f593e22 100644
--- a/media/libaudioclient/aidl/android/media/IAudioTrackCallback.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioTrackCallback.aidl
@@ -17,7 +17,7 @@
 package android.media;
 
 /**
- * @hide
+ * {@hide}
  */
 interface IAudioTrackCallback {
     oneway void onCodecFormatChanged(in byte[] audioMetadata);
diff --git a/media/libaudioclient/aidl/android/media/ICaptureStateListener.aidl b/media/libaudioclient/aidl/android/media/ICaptureStateListener.aidl
index 8502282..3b2206a 100644
--- a/media/libaudioclient/aidl/android/media/ICaptureStateListener.aidl
+++ b/media/libaudioclient/aidl/android/media/ICaptureStateListener.aidl
@@ -16,6 +16,9 @@
 
 package android.media;
 
+/**
+ * {@hide}
+ */
 interface ICaptureStateListener {
     void setCaptureState(boolean active);
 }
diff --git a/media/libaudioclient/aidl/android/media/IEffect.aidl b/media/libaudioclient/aidl/android/media/IEffect.aidl
new file mode 100644
index 0000000..813cd5c
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IEffect.aidl
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.SharedFileRegion;
+
+/**
+ * The IEffect interface enables control of the effect module activity and parameters.
+ *
+ * {@hide}
+ */
+interface IEffect {
+    /**
+     * Activates the effect module by connecting it to the audio path.
+     * @return a status_t code.
+     */
+    int enable();
+
+    /**
+     * Deactivates the effect module by disconnecting it from the audio path.
+     * @return a status_t code.
+     */
+    int disable();
+
+    /**
+     * Sends control, reads or writes parameters. Same behavior as the command() method in the
+     * effect control interface.
+     * Refer to system/audio_effect.h for a description of the valid command codes and their
+     * associated parameter and return messages. The cmdData and response parameters are expected to
+     * contain the respective types in a standard C memory layout.
+     *
+     * TODO(ytai): replace opaque byte arrays with strongly typed parameters.
+     */
+    int command(int cmdCode, in byte[] cmdData, int maxResponseSize, out byte[] response);
+
+    /**
+     * Disconnects the IEffect interface from the effect module.
+     * This will also delete the effect module and release the effect engine in the library if this
+     * is the last client disconnected. To release control of the effect module, the application can
+     * disconnect or delete the IEffect interface.
+     */
+    void disconnect();
+
+    /**
+     * returns a pointer to a shared memory area used to pass multiple parameters to the effect
+     * module without multiplying the binder calls.
+     *
+     * TODO(ytai): Explain how this should be used exactly.
+     */
+    SharedFileRegion getCblk();
+}
diff --git a/media/libaudioclient/aidl/android/media/IEffectClient.aidl b/media/libaudioclient/aidl/android/media/IEffectClient.aidl
new file mode 100644
index 0000000..3b6bcf1
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IEffectClient.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * A callback interface for getting effect-related notifications.
+ *
+ * {@hide}
+ */
+interface IEffectClient {
+    /**
+     * Called whenever the status of granting control over the effect to the application
+     * has changed.
+     * @param controlGranted true iff the application has the control of the effect module.
+     */
+    oneway void controlStatusChanged(boolean controlGranted);
+
+    /**
+     * Called whenever the effect has been enabled or disabled. Received only if the client is not
+     * currently controlling the effect.
+     * @param enabled true if the effect module has been activated, false if deactivated.
+     */
+    oneway void enableStatusChanged(boolean enabled);
+
+    /**
+     * A command has been send to the effect engine. Received only if the client is not currently
+     * controlling the effect. See IEffect.command() for a description of buffer contents.
+     *
+     * TODO(ytai): replace opaque byte arrays with strongly typed parameters.
+     */
+    oneway void commandExecuted(int cmdCode, in byte[] cmdData, in byte[] replyData);
+}
diff --git a/media/libaudioclient/aidl/android/media/IPlayer.aidl b/media/libaudioclient/aidl/android/media/IPlayer.aidl
index a90fcdd..43bb7f3 100644
--- a/media/libaudioclient/aidl/android/media/IPlayer.aidl
+++ b/media/libaudioclient/aidl/android/media/IPlayer.aidl
@@ -16,11 +16,11 @@
 
 package android.media;
 
-import android.media.VolumeShaper.Configuration;
-import android.media.VolumeShaper.Operation;
+import android.media.VolumeShaperConfiguration;
+import android.media.VolumeShaperOperation;
 
 /**
- * @hide
+ * {@hide}
  */
 interface IPlayer {
     oneway void start();
@@ -29,6 +29,6 @@
     oneway void setVolume(float vol);
     oneway void setPan(float pan);
     oneway void setStartDelayMs(int delayMs);
-    oneway void applyVolumeShaper(in Configuration configuration,
-                                  in Operation operation);
+    oneway void applyVolumeShaper(in VolumeShaperConfiguration configuration,
+                                  in VolumeShaperOperation operation);
 }
diff --git a/media/libaudioclient/aidl/android/media/Int.aidl b/media/libaudioclient/aidl/android/media/Int.aidl
new file mode 100644
index 0000000..24f4d62
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/Int.aidl
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * This is a simple wrapper around an 'int', putting it in a parcelable, so it can be used as an
+ * inout parameter, be made @nullable, etc.
+ *
+ * {@hide}
+ */
+parcelable Int {
+    int value;
+}
diff --git a/media/libaudioclient/aidl/android/media/MicrophoneInfo.aidl b/media/libaudioclient/aidl/android/media/MicrophoneInfo.aidl
deleted file mode 100644
index d6e46cb..0000000
--- a/media/libaudioclient/aidl/android/media/MicrophoneInfo.aidl
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-parcelable MicrophoneInfo cpp_header "media/MicrophoneInfo.h";
diff --git a/media/libaudioclient/aidl/android/media/OpenInputRequest.aidl b/media/libaudioclient/aidl/android/media/OpenInputRequest.aidl
new file mode 100644
index 0000000..2e55526
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/OpenInputRequest.aidl
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioConfig;
+import android.media.AudioDevice;
+import android.media.AudioSourceType;
+
+/**
+ * {@hide}
+ */
+parcelable OpenInputRequest {
+    /** Interpreted as audio_module_handle_t. */
+    int module;
+    /** Interpreted as audio_io_handle_t. */
+    int input;
+    AudioConfig config;
+    AudioDevice device;
+    AudioSourceType source;
+    /** Bitmask, indexed by AudioInputFlag. */
+    int flags;
+}
diff --git a/media/libaudioclient/aidl/android/media/OpenInputResponse.aidl b/media/libaudioclient/aidl/android/media/OpenInputResponse.aidl
new file mode 100644
index 0000000..b613ba5
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/OpenInputResponse.aidl
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioConfig;
+import android.media.AudioDevice;
+
+/**
+ * {@hide}
+ */
+parcelable OpenInputResponse {
+    /** Interpreted as audio_io_handle_t. */
+    int input;
+    AudioConfig config;
+    AudioDevice device;
+}
diff --git a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
new file mode 100644
index 0000000..06b12e9
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioConfig;
+import android.media.AudioPort;
+
+/**
+ * {@hide}
+ */
+parcelable OpenOutputRequest {
+    /** Interpreted as audio_module_handle_t. */
+    int module;
+    AudioConfig config;
+    /** Type must be DEVICE. */
+    AudioPort device;
+    /** Bitmask, indexed by AudioOutputFlag. */
+    int flags;
+}
diff --git a/media/libaudioclient/aidl/android/media/OpenOutputResponse.aidl b/media/libaudioclient/aidl/android/media/OpenOutputResponse.aidl
new file mode 100644
index 0000000..a051969
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/OpenOutputResponse.aidl
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioConfig;
+
+/**
+ * {@hide}
+ */
+parcelable OpenOutputResponse {
+    /** Interpreted as audio_io_handle_t. */
+    int output;
+    AudioConfig config;
+    int latencyMs;
+    /** Bitmask, indexed by AudioOutputFlag. */
+    int flags;
+}
diff --git a/media/libaudioclient/aidl/android/media/RecordClientInfo.aidl b/media/libaudioclient/aidl/android/media/RecordClientInfo.aidl
new file mode 100644
index 0000000..3280460
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/RecordClientInfo.aidl
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioSourceType;
+
+/**
+ * {@hide}
+ */
+parcelable RecordClientInfo {
+    /** Interpreted as audio_unique_id_t. */
+    int riid;
+    /** Interpreted as uid_t. */
+    int uid;
+    /** Interpreted as audio_session_t. */
+    int session;
+    AudioSourceType source;
+    /** Interpreted as audio_port_handle_t. */
+    int portId;
+    boolean silenced;
+}
diff --git a/media/libaudioclient/aidl/android/media/RenderPosition.aidl b/media/libaudioclient/aidl/android/media/RenderPosition.aidl
new file mode 100644
index 0000000..98dc17a
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/RenderPosition.aidl
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable RenderPosition {
+    int halFrames;
+    int dspFrames;
+}
diff --git a/media/libaudioclient/aidl/android/media/SoundTriggerSession.aidl b/media/libaudioclient/aidl/android/media/SoundTriggerSession.aidl
new file mode 100644
index 0000000..a829e59
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/SoundTriggerSession.aidl
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable SoundTriggerSession {
+    /** Interpreted as audio_session_t. */
+    int session;
+    /** Interpreted as audio_io_handle_t. */
+    int ioHandle;
+    /** Interpreted as audio_devices_t. */
+    int device;
+}
diff --git a/media/libaudioclient/aidl/android/media/TrackSecondaryOutputInfo.aidl b/media/libaudioclient/aidl/android/media/TrackSecondaryOutputInfo.aidl
new file mode 100644
index 0000000..113328e
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/TrackSecondaryOutputInfo.aidl
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * This is a class that contains port handle for a track and handles for all secondary
+ * outputs of the track.
+ * @hide
+ */
+parcelable TrackSecondaryOutputInfo {
+    int portId; // audio_port_handle_t
+    int[] secondaryOutputIds; // audio_io_handle_t[]
+}
\ No newline at end of file
diff --git a/media/libaudioclient/aidl/android/media/VolumeShaper/Configuration.aidl b/media/libaudioclient/aidl/android/media/VolumeShaper/Configuration.aidl
deleted file mode 100644
index fd0e60f..0000000
--- a/media/libaudioclient/aidl/android/media/VolumeShaper/Configuration.aidl
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.VolumeShaper;
-
-parcelable Configuration cpp_header "media/VolumeShaper.h";
diff --git a/media/libaudioclient/aidl/android/media/VolumeShaper/Operation.aidl b/media/libaudioclient/aidl/android/media/VolumeShaper/Operation.aidl
deleted file mode 100644
index 4290d9d..0000000
--- a/media/libaudioclient/aidl/android/media/VolumeShaper/Operation.aidl
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.VolumeShaper;
-
-parcelable Operation cpp_header "media/VolumeShaper.h";
diff --git a/media/libaudioclient/aidl/android/media/VolumeShaper/State.aidl b/media/libaudioclient/aidl/android/media/VolumeShaper/State.aidl
deleted file mode 100644
index f6a22b8..0000000
--- a/media/libaudioclient/aidl/android/media/VolumeShaper/State.aidl
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.VolumeShaper;
-
-parcelable State cpp_header "media/VolumeShaper.h";
diff --git a/media/libaudioclient/fuzzer/Android.bp b/media/libaudioclient/fuzzer/Android.bp
new file mode 100644
index 0000000..b290aa8
--- /dev/null
+++ b/media/libaudioclient/fuzzer/Android.bp
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_fuzz {
+    name: "audioflinger_fuzzer",
+    srcs: [
+        "audioflinger_fuzzer.cpp",
+    ],
+    static_libs: [
+        "android.hardware.audio.common@7.0-enums",
+        "effect-aidl-cpp",
+        "libaudioclient",
+        "libbase",
+        "libcgrouprc",
+        "libcgrouprc_format",
+        "libcutils",
+        "libjsoncpp",
+        "liblog",
+        "libmediametrics",
+        "libmediametricsservice",
+        "libmedia_helper",
+        "libprocessgroup",
+        "shared-file-region-aidl-cpp",
+    ],
+    shared_libs: [
+        "android.hardware.audio.common-util",
+        "audioclient-types-aidl-cpp",
+        "audioflinger-aidl-cpp",
+        "audiopolicy-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
+        "av-types-aidl-cpp",
+        "capture_state_listener-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+        "libaudioflinger",
+        "libaudiofoundation",
+        "libaudiomanager",
+        "libaudiopolicy",
+        "libaudioutils",
+        "libbinder",
+        "libdl",
+        "libmediautils",
+        "libnblog",
+        "libutils",
+        "libxml2",
+        "mediametricsservice-aidl-cpp",
+        "framework-permission-aidl-cpp",
+    ],
+    header_libs: [
+        "libaudiofoundation_headers",
+        "libmedia_headers",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/media/libaudioclient/fuzzer/README.md b/media/libaudioclient/fuzzer/README.md
new file mode 100644
index 0000000..ada6c49
--- /dev/null
+++ b/media/libaudioclient/fuzzer/README.md
@@ -0,0 +1,80 @@
+# Fuzzer for libaudioflinger
+
+## Plugin Design Considerations
+The fuzzer plugin for libaudioflinger is designed based on the understanding of the
+library and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer. The fuzzer
+covers libaudioflinger APIs as called from libaudioclient through IPC.
+
+libaudioflinger supports the following parameters:
+1. Unique IDs (parameter name: `uniqueId`)
+2. Audio Mode (parameter name: `mode`)
+3. Session ID (parameter name: `sessionId`)
+4. Encapsulation Mode (parameter name: `encapsulationMode`)
+5. Audio Port Role (parameter name: `portRole`)
+6. Audio Port Type (parameter name: `portType`)
+7. Audio Stream Type (parameter name: `streamType`)
+8. Audio Format (parameter name: `format`)
+9. Audio Channel Mask (parameter name: `channelMask`)
+10. Usage (parameter name: `usage`)
+11. Audio Content Type (parameter name: `contentType`)
+12. Input Source (parameter name: `inputSource`)
+13. Input Flags (parameter name: `inputFlags`)
+14. Output Flags (parameter name: `outputFlags`)
+15. Audio Gain Mode (parameter name: `gainMode`)
+16. Audio Device (parameter name: `device`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `uniqueId`   | 0. `AUDIO_UNIQUE_ID_USE_UNSPECIFIED` 1. `AUDIO_UNIQUE_ID_USE_SESSION` 2. `AUDIO_UNIQUE_ID_USE_MODULE` 3. `AUDIO_UNIQUE_ID_USE_EFFECT` 4. `AUDIO_UNIQUE_ID_USE_PATCH` 5. `AUDIO_UNIQUE_ID_USE_OUTPUT` 6. `AUDIO_UNIQUE_ID_USE_INPUT` 7. `AUDIO_UNIQUE_ID_USE_CLIENT` 8. `AUDIO_UNIQUE_ID_USE_MAX` | Value obtained from FuzzedDataProvider
+| `mode`   | 0.`AUDIO_MODE_INVALID` 1. `AUDIO_MODE_CURRENT` 2. ` AUDIO_MODE_NORMAL` 3. `AUDIO_MODE_RINGTONE` 4. `AUDIO_MODE_IN_CALL` 5. `AUDIO_MODE_IN_COMMUNICATION` 6. `AUDIO_MODE_CALL_SCREEN` | Value obtained from FuzzedDataProvider|
+| `sessionId`   | 0. `AUDIO_SESSION_NONE` 1. `AUDIO_SESSION_OUTPUT_STAGE` 2. `AUDIO_SESSION_DEVICE` | Value obtained from FuzzedDataProvider|
+| `encapsulationMode`   | 0. `AUDIO_ENCAPSULATION_MODE_NONE` 1. `AUDIO_ENCAPSULATION_MODE_ELEMENTARY_STREAM` 2. `AUDIO_ENCAPSULATION_MODE_HANDLE` | Value obtained from FuzzedDataProvider|
+| `portRole`   | 0. `AUDIO_PORT_ROLE_NONE` 1. `AUDIO_PORT_ROLE_SOURCE` 2. `AUDIO_PORT_ROLE_SINK` | Value obtained from FuzzedDataProvider|
+| `portType`   | 0. `AUDIO_PORT_TYPE_NONE` 1. `AUDIO_PORT_TYPE_DEVICE` 2. `AUDIO_PORT_TYPE_MIX` 3. `AUDIO_PORT_TYPE_SESSION`| Value obtained from FuzzedDataProvider|
+| `streamType` | 15 values of type `audio_stream_type_t` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `format` | 77 values of type `audio_format_t` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `channelMask` | 83 values of type `audio_channel_mask_t` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `usage` | 22 values of type `audio_usage_t` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `contentType` | 5 values of type `audio_content_type_t` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `inputSource` | 14 values of type `audio_source_t` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `inputFlags` | 9 values of type `audio_input_flags_t` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `outputFlags` | 16 values of type `audio_output_flags_t` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `gainMode` | 3 values of type `audio_gain_mode_t` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `device` | 66 values of type `audio_devices_t` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesn't `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build audioflinger_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) audioflinger_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/audioflinger_fuzzer/audioflinger_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.co
diff --git a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
new file mode 100644
index 0000000..d03c6fa
--- /dev/null
+++ b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
@@ -0,0 +1,739 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+/**
+ * NOTE
+ * 1) The input to AudioFlinger binder calls are fuzzed in this fuzzer
+ * 2) AudioFlinger crashes due to the fuzzer are detected by the
+      Binder DeathRecipient, where the fuzzer aborts if AudioFlinger dies
+ */
+
+#include <android_audio_policy_configuration_V7_0-enums.h>
+#include <android/content/AttributionSourceState.h>
+#include <binder/IServiceManager.h>
+#include <binder/MemoryDealer.h>
+#include <media/AidlConversion.h>
+#include <media/AudioEffect.h>
+#include <media/AudioRecord.h>
+#include <media/AudioSystem.h>
+#include <media/AudioTrack.h>
+#include <media/IAudioFlinger.h>
+#include "fuzzer/FuzzedDataProvider.h"
+
+#define MAX_STRING_LENGTH 256
+#define MAX_ARRAY_LENGTH 256
+
+constexpr int32_t kMinSampleRateHz = 4000;
+constexpr int32_t kMaxSampleRateHz = 192000;
+constexpr int32_t kSampleRateUnspecified = 0;
+
+using namespace std;
+using namespace android;
+
+namespace xsd {
+using namespace ::android::audio::policy::configuration::V7_0;
+}
+
+using android::content::AttributionSourceState;
+
+constexpr audio_unique_id_use_t kUniqueIds[] = {
+    AUDIO_UNIQUE_ID_USE_UNSPECIFIED, AUDIO_UNIQUE_ID_USE_SESSION, AUDIO_UNIQUE_ID_USE_MODULE,
+    AUDIO_UNIQUE_ID_USE_EFFECT,      AUDIO_UNIQUE_ID_USE_PATCH,   AUDIO_UNIQUE_ID_USE_OUTPUT,
+    AUDIO_UNIQUE_ID_USE_INPUT,       AUDIO_UNIQUE_ID_USE_CLIENT,  AUDIO_UNIQUE_ID_USE_MAX,
+};
+
+constexpr audio_mode_t kModes[] = {
+    AUDIO_MODE_INVALID, AUDIO_MODE_CURRENT,          AUDIO_MODE_NORMAL,     AUDIO_MODE_RINGTONE,
+    AUDIO_MODE_IN_CALL, AUDIO_MODE_IN_COMMUNICATION, AUDIO_MODE_CALL_SCREEN};
+
+constexpr audio_session_t kSessionId[] = {AUDIO_SESSION_NONE, AUDIO_SESSION_OUTPUT_STAGE,
+                                          AUDIO_SESSION_DEVICE};
+
+constexpr audio_encapsulation_mode_t kEncapsulation[] = {
+    AUDIO_ENCAPSULATION_MODE_NONE,
+    AUDIO_ENCAPSULATION_MODE_ELEMENTARY_STREAM,
+    AUDIO_ENCAPSULATION_MODE_HANDLE,
+};
+
+constexpr audio_port_role_t kPortRoles[] = {
+    AUDIO_PORT_ROLE_NONE,
+    AUDIO_PORT_ROLE_SOURCE,
+    AUDIO_PORT_ROLE_SINK,
+};
+
+constexpr audio_port_type_t kPortTypes[] = {
+    AUDIO_PORT_TYPE_NONE,
+    AUDIO_PORT_TYPE_DEVICE,
+    AUDIO_PORT_TYPE_MIX,
+    AUDIO_PORT_TYPE_SESSION,
+};
+
+template <typename T, typename X, typename FUNC>
+std::vector<T> getFlags(const xsdc_enum_range<X> &range, const FUNC &func,
+                        const std::string &findString = {}) {
+    std::vector<T> vec;
+    for (const auto &xsdEnumVal : range) {
+        T enumVal;
+        std::string enumString = toString(xsdEnumVal);
+        if (enumString.find(findString) != std::string::npos &&
+            func(enumString.c_str(), &enumVal)) {
+            vec.push_back(enumVal);
+        }
+    }
+    return vec;
+}
+
+static const std::vector<audio_stream_type_t> kStreamtypes =
+    getFlags<audio_stream_type_t, xsd::AudioStreamType, decltype(audio_stream_type_from_string)>(
+        xsdc_enum_range<xsd::AudioStreamType>{}, audio_stream_type_from_string);
+
+static const std::vector<audio_format_t> kFormats =
+    getFlags<audio_format_t, xsd::AudioFormat, decltype(audio_format_from_string)>(
+        xsdc_enum_range<xsd::AudioFormat>{}, audio_format_from_string);
+
+static const std::vector<audio_channel_mask_t> kChannelMasks =
+    getFlags<audio_channel_mask_t, xsd::AudioChannelMask, decltype(audio_channel_mask_from_string)>(
+        xsdc_enum_range<xsd::AudioChannelMask>{}, audio_channel_mask_from_string);
+
+static const std::vector<audio_usage_t> kUsages =
+    getFlags<audio_usage_t, xsd::AudioUsage, decltype(audio_usage_from_string)>(
+        xsdc_enum_range<xsd::AudioUsage>{}, audio_usage_from_string);
+
+static const std::vector<audio_content_type_t> kContentType =
+    getFlags<audio_content_type_t, xsd::AudioContentType, decltype(audio_content_type_from_string)>(
+        xsdc_enum_range<xsd::AudioContentType>{}, audio_content_type_from_string);
+
+static const std::vector<audio_source_t> kInputSources =
+    getFlags<audio_source_t, xsd::AudioSource, decltype(audio_source_from_string)>(
+        xsdc_enum_range<xsd::AudioSource>{}, audio_source_from_string);
+
+static const std::vector<audio_gain_mode_t> kGainModes =
+    getFlags<audio_gain_mode_t, xsd::AudioGainMode, decltype(audio_gain_mode_from_string)>(
+        xsdc_enum_range<xsd::AudioGainMode>{}, audio_gain_mode_from_string);
+
+static const std::vector<audio_devices_t> kDevices =
+    getFlags<audio_devices_t, xsd::AudioDevice, decltype(audio_device_from_string)>(
+        xsdc_enum_range<xsd::AudioDevice>{}, audio_device_from_string);
+
+static const std::vector<audio_input_flags_t> kInputFlags =
+    getFlags<audio_input_flags_t, xsd::AudioInOutFlag, decltype(audio_input_flag_from_string)>(
+        xsdc_enum_range<xsd::AudioInOutFlag>{}, audio_input_flag_from_string, "_INPUT_");
+
+static const std::vector<audio_output_flags_t> kOutputFlags =
+    getFlags<audio_output_flags_t, xsd::AudioInOutFlag, decltype(audio_output_flag_from_string)>(
+        xsdc_enum_range<xsd::AudioInOutFlag>{}, audio_output_flag_from_string, "_OUTPUT_");
+
+template <typename T, size_t size>
+T getValue(FuzzedDataProvider *fdp, const T (&arr)[size]) {
+    return arr[fdp->ConsumeIntegralInRange<int32_t>(0, size - 1)];
+}
+
+template <typename T>
+T getValue(FuzzedDataProvider *fdp, std::vector<T> vec) {
+    return vec[fdp->ConsumeIntegralInRange<int32_t>(0, vec.size() - 1)];
+}
+
+int32_t getSampleRate(FuzzedDataProvider *fdp) {
+    if (fdp->ConsumeBool()) {
+        return fdp->ConsumeIntegralInRange<int32_t>(kMinSampleRateHz, kMaxSampleRateHz);
+    }
+    return kSampleRateUnspecified;
+}
+
+class DeathNotifier : public IBinder::DeathRecipient {
+   public:
+    void binderDied(const wp<IBinder> &) { abort(); }
+};
+
+class AudioFlingerFuzzer {
+   public:
+    AudioFlingerFuzzer(const uint8_t *data, size_t size);
+    void process();
+
+   private:
+    FuzzedDataProvider mFdp;
+    void invokeAudioTrack();
+    void invokeAudioRecord();
+    status_t invokeAudioEffect();
+    void invokeAudioSystem();
+    status_t invokeAudioInputDevice();
+    status_t invokeAudioOutputDevice();
+    void invokeAudioPatch();
+
+    sp<DeathNotifier> mDeathNotifier;
+};
+
+AudioFlingerFuzzer::AudioFlingerFuzzer(const uint8_t *data, size_t size) : mFdp(data, size) {
+    sp<IServiceManager> sm = defaultServiceManager();
+    sp<IBinder> binder = sm->getService(String16("media.audio_flinger"));
+    if (binder == nullptr) {
+        return;
+    }
+    mDeathNotifier = new DeathNotifier();
+    binder->linkToDeath(mDeathNotifier);
+}
+
+void AudioFlingerFuzzer::invokeAudioTrack() {
+    uint32_t sampleRate = getSampleRate(&mFdp);
+    audio_format_t format = getValue(&mFdp, kFormats);
+    audio_channel_mask_t channelMask = getValue(&mFdp, kChannelMasks);
+    size_t frameCount = static_cast<size_t>(mFdp.ConsumeIntegral<uint32_t>());
+    int32_t notificationFrames = mFdp.ConsumeIntegral<int32_t>();
+    uint32_t useSharedBuffer = mFdp.ConsumeBool();
+    audio_output_flags_t flags = getValue(&mFdp, kOutputFlags);
+    audio_session_t sessionId = getValue(&mFdp, kSessionId);
+    audio_usage_t usage = getValue(&mFdp, kUsages);
+    audio_content_type_t contentType = getValue(&mFdp, kContentType);
+    audio_attributes_t attributes = {};
+    sp<IMemory> sharedBuffer;
+    sp<MemoryDealer> heap = nullptr;
+    audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
+
+    bool offload = false;
+    bool fast = ((flags & AUDIO_OUTPUT_FLAG_FAST) != 0);
+
+    if (useSharedBuffer != 0) {
+        size_t heapSize = audio_channel_count_from_out_mask(channelMask) *
+                          audio_bytes_per_sample(format) * frameCount;
+        heap = new MemoryDealer(heapSize, "AudioTrack Heap Base");
+        sharedBuffer = heap->allocate(heapSize);
+        frameCount = 0;
+        notificationFrames = 0;
+    }
+    if ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
+        offloadInfo.sample_rate = sampleRate;
+        offloadInfo.channel_mask = channelMask;
+        offloadInfo.format = format;
+        offload = true;
+    }
+
+    attributes.content_type = contentType;
+    attributes.usage = usage;
+    sp<AudioTrack> track = new AudioTrack();
+
+    // TODO b/182392769: use attribution source util
+    AttributionSourceState attributionSource;
+    attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+    attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
+    attributionSource.token = sp<BBinder>::make();
+    track->set(AUDIO_STREAM_DEFAULT, sampleRate, format, channelMask, frameCount, flags, nullptr,
+               nullptr, notificationFrames, sharedBuffer, false, sessionId,
+               ((fast && sharedBuffer == 0) || offload) ? AudioTrack::TRANSFER_CALLBACK
+                                                        : AudioTrack::TRANSFER_DEFAULT,
+               offload ? &offloadInfo : nullptr, attributionSource, &attributes, false, 1.0f,
+               AUDIO_PORT_HANDLE_NONE);
+
+    status_t status = track->initCheck();
+    if (status != NO_ERROR) {
+        track.clear();
+        return;
+    }
+    track->getSampleRate();
+    track->latency();
+    track->getUnderrunCount();
+    track->streamType();
+    track->channelCount();
+    track->getNotificationPeriodInFrames();
+    uint32_t bufferSizeInFrames = mFdp.ConsumeIntegral<uint32_t>();
+    track->setBufferSizeInFrames(bufferSizeInFrames);
+    track->getBufferSizeInFrames();
+
+    int64_t duration = mFdp.ConsumeIntegral<int64_t>();
+    track->getBufferDurationInUs(&duration);
+    sp<IMemory> sharedBuffer2 = track->sharedBuffer();
+    track->setCallerName(mFdp.ConsumeRandomLengthString(MAX_STRING_LENGTH));
+
+    track->setVolume(mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>());
+    track->setVolume(mFdp.ConsumeFloatingPoint<float>());
+    track->setAuxEffectSendLevel(mFdp.ConsumeFloatingPoint<float>());
+
+    float auxEffectSendLevel;
+    track->getAuxEffectSendLevel(&auxEffectSendLevel);
+    track->setSampleRate(getSampleRate(&mFdp));
+    track->getSampleRate();
+    track->getOriginalSampleRate();
+
+    AudioPlaybackRate playbackRate = {};
+    playbackRate.mSpeed = mFdp.ConsumeFloatingPoint<float>();
+    playbackRate.mPitch = mFdp.ConsumeFloatingPoint<float>();
+    track->setPlaybackRate(playbackRate);
+    track->getPlaybackRate();
+    track->setLoop(mFdp.ConsumeIntegral<uint32_t>(), mFdp.ConsumeIntegral<uint32_t>(),
+                   mFdp.ConsumeIntegral<uint32_t>());
+    track->setMarkerPosition(mFdp.ConsumeIntegral<uint32_t>());
+
+    uint32_t marker = {};
+    track->getMarkerPosition(&marker);
+    track->setPositionUpdatePeriod(mFdp.ConsumeIntegral<uint32_t>());
+
+    uint32_t updatePeriod = {};
+    track->getPositionUpdatePeriod(&updatePeriod);
+    track->setPosition(mFdp.ConsumeIntegral<uint32_t>());
+    uint32_t position = {};
+    track->getPosition(&position);
+    track->getBufferPosition(&position);
+    track->reload();
+    track->start();
+    track->pause();
+    track->flush();
+    track->stop();
+    track->stopped();
+}
+
+void AudioFlingerFuzzer::invokeAudioRecord() {
+    int32_t notificationFrames = mFdp.ConsumeIntegral<int32_t>();
+    uint32_t sampleRate = getSampleRate(&mFdp);
+    size_t frameCount = static_cast<size_t>(mFdp.ConsumeIntegral<uint32_t>());
+    audio_format_t format = getValue(&mFdp, kFormats);
+    audio_channel_mask_t channelMask = getValue(&mFdp, kChannelMasks);
+    audio_input_flags_t flags = getValue(&mFdp, kInputFlags);
+    audio_session_t sessionId = getValue(&mFdp, kSessionId);
+    audio_source_t inputSource = getValue(&mFdp, kInputSources);
+
+    audio_attributes_t attributes = {};
+    bool fast = ((flags & AUDIO_OUTPUT_FLAG_FAST) != 0);
+
+    attributes.source = inputSource;
+
+    // TODO b/182392769: use attribution source util
+    AttributionSourceState attributionSource;
+    attributionSource.packageName = std::string(mFdp.ConsumeRandomLengthString().c_str());
+    attributionSource.token = sp<BBinder>::make();
+    sp<AudioRecord> record = new AudioRecord(attributionSource);
+    record->set(AUDIO_SOURCE_DEFAULT, sampleRate, format, channelMask, frameCount, nullptr, nullptr,
+                notificationFrames, false, sessionId,
+                fast ? AudioRecord::TRANSFER_CALLBACK : AudioRecord::TRANSFER_DEFAULT, flags,
+                getuid(), getpid(), &attributes, AUDIO_PORT_HANDLE_NONE);
+    status_t status = record->initCheck();
+    if (status != NO_ERROR) {
+        return;
+    }
+    record->latency();
+    record->format();
+    record->channelCount();
+    record->frameCount();
+    record->frameSize();
+    record->inputSource();
+    record->getNotificationPeriodInFrames();
+    record->start();
+    record->stop();
+    record->stopped();
+
+    uint32_t marker = mFdp.ConsumeIntegral<uint32_t>();
+    record->setMarkerPosition(marker);
+    record->getMarkerPosition(&marker);
+
+    uint32_t updatePeriod = mFdp.ConsumeIntegral<uint32_t>();
+    record->setPositionUpdatePeriod(updatePeriod);
+    record->getPositionUpdatePeriod(&updatePeriod);
+
+    uint32_t position;
+    record->getPosition(&position);
+
+    ExtendedTimestamp timestamp;
+    record->getTimestamp(&timestamp);
+    record->getSessionId();
+    record->getCallerName();
+    android::AudioRecord::Buffer audioBuffer;
+    int32_t waitCount = mFdp.ConsumeIntegral<int32_t>();
+    size_t nonContig = static_cast<size_t>(mFdp.ConsumeIntegral<uint32_t>());
+    audioBuffer.frameCount = static_cast<size_t>(mFdp.ConsumeIntegral<uint32_t>());
+    record->obtainBuffer(&audioBuffer, waitCount, &nonContig);
+    bool blocking = false;
+    record->read(audioBuffer.raw, audioBuffer.size, blocking);
+    record->getInputFramesLost();
+    record->getFlags();
+
+    std::vector<media::MicrophoneInfo> activeMicrophones;
+    record->getActiveMicrophones(&activeMicrophones);
+    record->releaseBuffer(&audioBuffer);
+
+    audio_port_handle_t deviceId =
+        static_cast<audio_port_handle_t>(mFdp.ConsumeIntegral<int32_t>());
+    record->setInputDevice(deviceId);
+    record->getInputDevice();
+    record->getRoutedDeviceId();
+    record->getPortId();
+}
+
+struct EffectClient : public android::media::BnEffectClient {
+    EffectClient() {}
+    binder::Status controlStatusChanged(bool controlGranted __unused) override {
+        return binder::Status::ok();
+    }
+    binder::Status enableStatusChanged(bool enabled __unused) override {
+        return binder::Status::ok();
+    }
+    binder::Status commandExecuted(int32_t cmdCode __unused,
+                                   const std::vector<uint8_t> &cmdData __unused,
+                                   const std::vector<uint8_t> &replyData __unused) override {
+        return binder::Status::ok();
+    }
+};
+
+status_t AudioFlingerFuzzer::invokeAudioEffect() {
+    effect_uuid_t type;
+    type.timeLow = mFdp.ConsumeIntegral<uint32_t>();
+    type.timeMid = mFdp.ConsumeIntegral<uint16_t>();
+    type.timeHiAndVersion = mFdp.ConsumeIntegral<uint16_t>();
+    type.clockSeq = mFdp.ConsumeIntegral<uint16_t>();
+    for (int i = 0; i < 6; ++i) {
+        type.node[i] = mFdp.ConsumeIntegral<uint8_t>();
+    }
+
+    effect_descriptor_t descriptor = {};
+    descriptor.type = type;
+    descriptor.uuid = *EFFECT_UUID_NULL;
+
+    sp<EffectClient> effectClient(new EffectClient());
+
+    const int32_t priority = mFdp.ConsumeIntegral<int32_t>();
+    audio_session_t sessionId = static_cast<audio_session_t>(mFdp.ConsumeIntegral<int32_t>());
+    const audio_io_handle_t io = mFdp.ConsumeIntegral<int32_t>();
+    std::string opPackageName = static_cast<std::string>(mFdp.ConsumeRandomLengthString().c_str());
+    AudioDeviceTypeAddr device;
+
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (!af) {
+        return NO_ERROR;
+    }
+
+    media::CreateEffectRequest request{};
+    request.desc =
+        VALUE_OR_RETURN_STATUS(legacy2aidl_effect_descriptor_t_EffectDescriptor(descriptor));
+    request.client = effectClient;
+    request.priority = priority;
+    request.output = io;
+    request.sessionId = sessionId;
+    request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioDeviceTypeAddress(device));
+    // TODO b/182392769: use attribution source util
+    request.attributionSource.packageName = opPackageName;
+    request.attributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(getpid()));
+    request.probe = false;
+
+    media::CreateEffectResponse response{};
+    status_t status = af->createEffect(request, &response);
+
+    if (status != OK) {
+        return NO_ERROR;
+    }
+
+    descriptor =
+        VALUE_OR_RETURN_STATUS(aidl2legacy_EffectDescriptor_effect_descriptor_t(response.desc));
+
+    uint32_t numEffects;
+    af->queryNumberEffects(&numEffects);
+
+    uint32_t queryIndex = mFdp.ConsumeIntegral<uint32_t>();
+    af->queryEffect(queryIndex, &descriptor);
+
+    effect_descriptor_t getDescriptor;
+    uint32_t preferredTypeFlag = mFdp.ConsumeIntegral<int32_t>();
+    af->getEffectDescriptor(&descriptor.uuid, &descriptor.type, preferredTypeFlag, &getDescriptor);
+
+    sessionId = static_cast<audio_session_t>(mFdp.ConsumeIntegral<int32_t>());
+    audio_io_handle_t srcOutput = mFdp.ConsumeIntegral<int32_t>();
+    audio_io_handle_t dstOutput = mFdp.ConsumeIntegral<int32_t>();
+    af->moveEffects(sessionId, srcOutput, dstOutput);
+
+    int effectId = mFdp.ConsumeIntegral<int32_t>();
+    sessionId = static_cast<audio_session_t>(mFdp.ConsumeIntegral<int32_t>());
+    af->setEffectSuspended(effectId, sessionId, mFdp.ConsumeBool());
+    return NO_ERROR;
+}
+
+void AudioFlingerFuzzer::invokeAudioSystem() {
+    AudioSystem::muteMicrophone(mFdp.ConsumeBool());
+    AudioSystem::setMasterMute(mFdp.ConsumeBool());
+    AudioSystem::setMasterVolume(mFdp.ConsumeFloatingPoint<float>());
+    AudioSystem::setMasterBalance(mFdp.ConsumeFloatingPoint<float>());
+    AudioSystem::setVoiceVolume(mFdp.ConsumeFloatingPoint<float>());
+
+    float volume;
+    AudioSystem::getMasterVolume(&volume);
+
+    bool state;
+    AudioSystem::getMasterMute(&state);
+    AudioSystem::isMicrophoneMuted(&state);
+
+    audio_stream_type_t stream = getValue(&mFdp, kStreamtypes);
+    AudioSystem::setStreamMute(getValue(&mFdp, kStreamtypes), mFdp.ConsumeBool());
+
+    stream = getValue(&mFdp, kStreamtypes);
+    AudioSystem::setStreamVolume(stream, mFdp.ConsumeFloatingPoint<float>(),
+                                 mFdp.ConsumeIntegral<int32_t>());
+
+    audio_mode_t mode = getValue(&mFdp, kModes);
+    AudioSystem::setMode(mode);
+
+    size_t frameCount;
+    stream = getValue(&mFdp, kStreamtypes);
+    AudioSystem::getOutputFrameCount(&frameCount, stream);
+
+    uint32_t latency;
+    stream = getValue(&mFdp, kStreamtypes);
+    AudioSystem::getOutputLatency(&latency, stream);
+
+    stream = getValue(&mFdp, kStreamtypes);
+    AudioSystem::getStreamVolume(stream, &volume, mFdp.ConsumeIntegral<int32_t>());
+
+    stream = getValue(&mFdp, kStreamtypes);
+    AudioSystem::getStreamMute(stream, &state);
+
+    uint32_t samplingRate;
+    AudioSystem::getSamplingRate(mFdp.ConsumeIntegral<int32_t>(), &samplingRate);
+
+    AudioSystem::getFrameCount(mFdp.ConsumeIntegral<int32_t>(), &frameCount);
+    AudioSystem::getLatency(mFdp.ConsumeIntegral<int32_t>(), &latency);
+    AudioSystem::setVoiceVolume(mFdp.ConsumeFloatingPoint<float>());
+
+    uint32_t halFrames;
+    uint32_t dspFrames;
+    AudioSystem::getRenderPosition(mFdp.ConsumeIntegral<int32_t>(), &halFrames, &dspFrames);
+
+    AudioSystem::getInputFramesLost(mFdp.ConsumeIntegral<int32_t>());
+    AudioSystem::getInputFramesLost(mFdp.ConsumeIntegral<int32_t>());
+
+    audio_unique_id_use_t uniqueIdUse = getValue(&mFdp, kUniqueIds);
+    AudioSystem::newAudioUniqueId(uniqueIdUse);
+
+    audio_session_t sessionId = getValue(&mFdp, kSessionId);
+    pid_t pid = mFdp.ConsumeBool() ? getpid() : mFdp.ConsumeIntegral<int32_t>();
+    uid_t uid = mFdp.ConsumeBool() ? getuid() : mFdp.ConsumeIntegral<int32_t>();
+    AudioSystem::acquireAudioSessionId(sessionId, pid, uid);
+
+    pid = mFdp.ConsumeBool() ? getpid() : mFdp.ConsumeIntegral<int32_t>();
+    sessionId = getValue(&mFdp, kSessionId);
+    AudioSystem::releaseAudioSessionId(sessionId, pid);
+
+    sessionId = getValue(&mFdp, kSessionId);
+    AudioSystem::getAudioHwSyncForSession(sessionId);
+
+    AudioSystem::systemReady();
+    AudioSystem::getFrameCountHAL(mFdp.ConsumeIntegral<int32_t>(), &frameCount);
+
+    size_t buffSize;
+    uint32_t sampleRate = getSampleRate(&mFdp);
+    audio_format_t format = getValue(&mFdp, kFormats);
+    audio_channel_mask_t channelMask = getValue(&mFdp, kChannelMasks);
+    AudioSystem::getInputBufferSize(sampleRate, format, channelMask, &buffSize);
+
+    AudioSystem::getPrimaryOutputSamplingRate();
+    AudioSystem::getPrimaryOutputFrameCount();
+    AudioSystem::setLowRamDevice(mFdp.ConsumeBool(), mFdp.ConsumeIntegral<int64_t>());
+
+    std::vector<media::MicrophoneInfo> microphones;
+    AudioSystem::getMicrophones(&microphones);
+
+    std::vector<pid_t> pids;
+    pids.insert(pids.begin(), getpid());
+    for (int i = 1; i < mFdp.ConsumeIntegralInRange<int32_t>(2, MAX_ARRAY_LENGTH); ++i) {
+        pids.insert(pids.begin() + i, static_cast<pid_t>(mFdp.ConsumeIntegral<int32_t>()));
+    }
+    AudioSystem::setAudioHalPids(pids);
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (!af) {
+        return;
+    }
+    af->setRecordSilenced(mFdp.ConsumeIntegral<uint32_t>(), mFdp.ConsumeBool());
+
+    float balance = mFdp.ConsumeFloatingPoint<float>();
+    af->getMasterBalance(&balance);
+    af->invalidateStream(static_cast<audio_stream_type_t>(mFdp.ConsumeIntegral<uint32_t>()));
+}
+
+status_t AudioFlingerFuzzer::invokeAudioInputDevice() {
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (!af) {
+        return NO_ERROR;
+    }
+
+    audio_config_t config = {};
+    audio_module_handle_t module = mFdp.ConsumeIntegral<int32_t>();
+    audio_io_handle_t input = mFdp.ConsumeIntegral<int32_t>();
+    config.frame_count = mFdp.ConsumeIntegral<uint32_t>();
+    String8 address = static_cast<String8>(mFdp.ConsumeRandomLengthString().c_str());
+
+    config.channel_mask = getValue(&mFdp, kChannelMasks);
+    config.format = getValue(&mFdp, kFormats);
+
+    config.offload_info = AUDIO_INFO_INITIALIZER;
+    config.offload_info.bit_rate = mFdp.ConsumeIntegral<uint32_t>();
+    config.offload_info.bit_width = mFdp.ConsumeIntegral<uint32_t>();
+    config.offload_info.content_id = mFdp.ConsumeIntegral<uint32_t>();
+    config.offload_info.channel_mask = getValue(&mFdp, kChannelMasks);
+    config.offload_info.duration_us = mFdp.ConsumeIntegral<int64_t>();
+    config.offload_info.encapsulation_mode = getValue(&mFdp, kEncapsulation);
+    config.offload_info.format = getValue(&mFdp, kFormats);
+    config.offload_info.has_video = mFdp.ConsumeBool();
+    config.offload_info.is_streaming = mFdp.ConsumeBool();
+    config.offload_info.sample_rate = getSampleRate(&mFdp);
+    config.offload_info.sync_id = mFdp.ConsumeIntegral<uint32_t>();
+    config.offload_info.stream_type = getValue(&mFdp, kStreamtypes);
+    config.offload_info.usage = getValue(&mFdp, kUsages);
+
+    config.sample_rate = getSampleRate(&mFdp);
+
+    audio_devices_t device = getValue(&mFdp, kDevices);
+    audio_source_t source = getValue(&mFdp, kInputSources);
+    audio_input_flags_t flags = getValue(&mFdp, kInputFlags);
+
+    AudioDeviceTypeAddr deviceTypeAddr(device, address.c_str());
+
+    media::OpenInputRequest request{};
+    request.module = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_module_handle_t_int32_t(module));
+    request.input = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(input));
+    request.config = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_config_t_AudioConfig(config));
+    request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioDeviceTypeAddress(deviceTypeAddr));
+    request.source = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_source_t_AudioSourceType(source));
+    request.flags = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_input_flags_t_int32_t_mask(flags));
+
+    media::OpenInputResponse response{};
+    status_t status = af->openInput(request, &response);
+    if (status != NO_ERROR) {
+        return NO_ERROR;
+    }
+
+    input = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_module_handle_t(response.input));
+    af->closeInput(input);
+    return NO_ERROR;
+}
+
+status_t AudioFlingerFuzzer::invokeAudioOutputDevice() {
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (!af) {
+        return NO_ERROR;
+    }
+
+    audio_config_t config = {};
+    audio_module_handle_t module = mFdp.ConsumeIntegral<int32_t>();
+    audio_io_handle_t output = mFdp.ConsumeIntegral<int32_t>();
+    config.frame_count = mFdp.ConsumeIntegral<uint32_t>();
+    String8 address = static_cast<String8>(mFdp.ConsumeRandomLengthString().c_str());
+
+    config.channel_mask = getValue(&mFdp, kChannelMasks);
+
+    config.offload_info = AUDIO_INFO_INITIALIZER;
+    config.offload_info.bit_rate = mFdp.ConsumeIntegral<uint32_t>();
+    config.offload_info.bit_width = mFdp.ConsumeIntegral<uint32_t>();
+    config.offload_info.channel_mask = getValue(&mFdp, kChannelMasks);
+    config.offload_info.content_id = mFdp.ConsumeIntegral<uint32_t>();
+    config.offload_info.duration_us = mFdp.ConsumeIntegral<int64_t>();
+    config.offload_info.encapsulation_mode = getValue(&mFdp, kEncapsulation);
+    config.offload_info.format = getValue(&mFdp, kFormats);
+    config.offload_info.has_video = mFdp.ConsumeBool();
+    config.offload_info.is_streaming = mFdp.ConsumeBool();
+    config.offload_info.sample_rate = getSampleRate(&mFdp);
+    config.offload_info.stream_type = getValue(&mFdp, kStreamtypes);
+    config.offload_info.sync_id = mFdp.ConsumeIntegral<uint32_t>();
+    config.offload_info.usage = getValue(&mFdp, kUsages);
+
+    config.format = getValue(&mFdp, kFormats);
+    config.sample_rate = getSampleRate(&mFdp);
+
+    sp<DeviceDescriptorBase> device = new DeviceDescriptorBase(getValue(&mFdp, kDevices));
+    audio_output_flags_t flags = getValue(&mFdp, kOutputFlags);
+
+    media::OpenOutputRequest request{};
+    media::OpenOutputResponse response{};
+
+    request.module = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_module_handle_t_int32_t(module));
+    request.config = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_config_t_AudioConfig(config));
+    request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_DeviceDescriptorBase(device));
+    request.flags = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+
+    status_t status = af->openOutput(request, &response);
+    if (status != NO_ERROR) {
+        return NO_ERROR;
+    }
+    output = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_io_handle_t(response.output));
+
+    audio_io_handle_t output1 = mFdp.ConsumeIntegral<int32_t>();
+    af->openDuplicateOutput(output, output1);
+    af->suspendOutput(output);
+    af->restoreOutput(output);
+    af->closeOutput(output);
+    return NO_ERROR;
+}
+
+void AudioFlingerFuzzer::invokeAudioPatch() {
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (!af) {
+        return;
+    }
+    struct audio_patch patch = {};
+    audio_patch_handle_t handle = mFdp.ConsumeIntegral<int32_t>();
+
+    patch.id = mFdp.ConsumeIntegral<int32_t>();
+    patch.num_sources = mFdp.ConsumeIntegral<uint32_t>();
+    patch.num_sinks = mFdp.ConsumeIntegral<uint32_t>();
+
+    for (int i = 0; i < AUDIO_PATCH_PORTS_MAX; ++i) {
+        patch.sources[i].config_mask = mFdp.ConsumeIntegral<uint32_t>();
+        patch.sources[i].channel_mask = getValue(&mFdp, kChannelMasks);
+        patch.sources[i].format = getValue(&mFdp, kFormats);
+        patch.sources[i].gain.channel_mask = getValue(&mFdp, kChannelMasks);
+        patch.sources[i].gain.index = mFdp.ConsumeIntegral<int32_t>();
+        patch.sources[i].gain.mode = getValue(&mFdp, kGainModes);
+        patch.sources[i].gain.ramp_duration_ms = mFdp.ConsumeIntegral<uint32_t>();
+        patch.sources[i].id = static_cast<audio_format_t>(mFdp.ConsumeIntegral<int32_t>());
+        patch.sources[i].role = getValue(&mFdp, kPortRoles);
+        patch.sources[i].sample_rate = getSampleRate(&mFdp);
+        patch.sources[i].type = getValue(&mFdp, kPortTypes);
+
+        patch.sinks[i].config_mask = mFdp.ConsumeIntegral<uint32_t>();
+        patch.sinks[i].channel_mask = getValue(&mFdp, kChannelMasks);
+        patch.sinks[i].format = getValue(&mFdp, kFormats);
+        patch.sinks[i].gain.channel_mask = getValue(&mFdp, kChannelMasks);
+        patch.sinks[i].gain.index = mFdp.ConsumeIntegral<int32_t>();
+        patch.sinks[i].gain.mode = getValue(&mFdp, kGainModes);
+        patch.sinks[i].gain.ramp_duration_ms = mFdp.ConsumeIntegral<uint32_t>();
+        patch.sinks[i].id = static_cast<audio_format_t>(mFdp.ConsumeIntegral<int32_t>());
+        patch.sinks[i].role = getValue(&mFdp, kPortRoles);
+        patch.sinks[i].sample_rate = getSampleRate(&mFdp);
+        patch.sinks[i].type = getValue(&mFdp, kPortTypes);
+    }
+
+    status_t status = af->createAudioPatch(&patch, &handle);
+    if (status != NO_ERROR) {
+        return;
+    }
+
+    unsigned int num_patches = mFdp.ConsumeIntegral<uint32_t>();
+    struct audio_patch patches = {};
+    af->listAudioPatches(&num_patches, &patches);
+    af->releaseAudioPatch(handle);
+}
+
+void AudioFlingerFuzzer::process() {
+    invokeAudioEffect();
+    invokeAudioInputDevice();
+    invokeAudioOutputDevice();
+    invokeAudioPatch();
+    invokeAudioRecord();
+    invokeAudioSystem();
+    invokeAudioTrack();
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    if (size < 1) {
+        return 0;
+    }
+    AudioFlingerFuzzer audioFuzzer(data, size);
+    audioFuzzer.process();
+    return 0;
+}
diff --git a/media/libaudioclient/include/media/AidlConversion.h b/media/libaudioclient/include/media/AidlConversion.h
new file mode 100644
index 0000000..4ec69c7
--- /dev/null
+++ b/media/libaudioclient/include/media/AidlConversion.h
@@ -0,0 +1,420 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <limits>
+#include <type_traits>
+
+#include <system/audio.h>
+
+#include <android/media/AudioAttributesInternal.h>
+#include <android/media/AudioClient.h>
+#include <android/media/AudioConfig.h>
+#include <android/media/AudioConfigBase.h>
+#include <android/media/AudioDualMonoMode.h>
+#include <android/media/AudioEncapsulationMode.h>
+#include <android/media/AudioEncapsulationMetadataType.h>
+#include <android/media/AudioEncapsulationType.h>
+#include <android/media/AudioFlag.h>
+#include <android/media/AudioGain.h>
+#include <android/media/AudioGainMode.h>
+#include <android/media/AudioInputFlags.h>
+#include <android/media/AudioIoConfigEvent.h>
+#include <android/media/AudioIoDescriptor.h>
+#include <android/media/AudioMixLatencyClass.h>
+#include <android/media/AudioMode.h>
+#include <android/media/AudioOutputFlags.h>
+#include <android/media/AudioPlaybackRate.h>
+#include <android/media/AudioPort.h>
+#include <android/media/AudioPortConfigType.h>
+#include <android/media/AudioPortDeviceExt.h>
+#include <android/media/AudioPortExt.h>
+#include <android/media/AudioPortMixExt.h>
+#include <android/media/AudioPortSessionExt.h>
+#include <android/media/AudioProfile.h>
+#include <android/media/AudioTimestampInternal.h>
+#include <android/media/AudioUniqueIdUse.h>
+#include <android/media/EffectDescriptor.h>
+#include <android/media/ExtraAudioDescriptor.h>
+#include <android/media/TrackSecondaryOutputInfo.h>
+
+#include <android/media/SharedFileRegion.h>
+#include <binder/IMemory.h>
+#include <media/AidlConversionUtil.h>
+#include <media/AudioClient.h>
+#include <media/AudioCommonTypes.h>
+#include <media/AudioIoDescriptor.h>
+#include <media/AudioTimestamp.h>
+#include <system/audio_effect.h>
+
+namespace android {
+
+// maxSize is the size of the C-string buffer (including the 0-terminator), NOT the max length of
+// the string.
+status_t aidl2legacy_string(std::string_view aidl, char* dest, size_t maxSize);
+ConversionResult<std::string> legacy2aidl_string(const char* legacy, size_t maxSize);
+
+ConversionResult<audio_module_handle_t> aidl2legacy_int32_t_audio_module_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_module_handle_t_int32_t(audio_module_handle_t legacy);
+
+ConversionResult<audio_io_handle_t> aidl2legacy_int32_t_audio_io_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_io_handle_t_int32_t(audio_io_handle_t legacy);
+
+ConversionResult<audio_port_handle_t> aidl2legacy_int32_t_audio_port_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_port_handle_t_int32_t(audio_port_handle_t legacy);
+
+ConversionResult<audio_patch_handle_t> aidl2legacy_int32_t_audio_patch_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_patch_handle_t_int32_t(audio_patch_handle_t legacy);
+
+ConversionResult<audio_unique_id_t> aidl2legacy_int32_t_audio_unique_id_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_unique_id_t_int32_t(audio_unique_id_t legacy);
+
+ConversionResult<audio_hw_sync_t> aidl2legacy_int32_t_audio_hw_sync_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_hw_sync_t_int32_t(audio_hw_sync_t legacy);
+
+// The legacy enum is unnamed. Thus, we use int32_t.
+ConversionResult<int32_t> aidl2legacy_AudioPortConfigType_int32_t(
+        media::AudioPortConfigType aidl);
+// The legacy enum is unnamed. Thus, we use int32_t.
+ConversionResult<media::AudioPortConfigType> legacy2aidl_int32_t_AudioPortConfigType(
+        int32_t legacy);
+
+ConversionResult<unsigned int> aidl2legacy_int32_t_config_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_config_mask_int32_t(unsigned int legacy);
+
+ConversionResult<audio_channel_mask_t> aidl2legacy_int32_t_audio_channel_mask_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_channel_mask_t_int32_t(audio_channel_mask_t legacy);
+
+ConversionResult<pid_t> aidl2legacy_int32_t_pid_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_pid_t_int32_t(pid_t legacy);
+
+ConversionResult<uid_t> aidl2legacy_int32_t_uid_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_uid_t_int32_t(uid_t legacy);
+
+ConversionResult<String8> aidl2legacy_string_view_String8(std::string_view aidl);
+ConversionResult<std::string> legacy2aidl_String8_string(const String8& legacy);
+
+ConversionResult<String16> aidl2legacy_string_view_String16(std::string_view aidl);
+ConversionResult<std::string> legacy2aidl_String16_string(const String16& legacy);
+
+ConversionResult<std::optional<String16>>
+aidl2legacy_optional_string_view_optional_String16(std::optional<std::string_view> aidl);
+ConversionResult<std::optional<std::string_view>>
+legacy2aidl_optional_String16_optional_string(std::optional<String16> legacy);
+
+ConversionResult<audio_io_config_event> aidl2legacy_AudioIoConfigEvent_audio_io_config_event(
+        media::AudioIoConfigEvent aidl);
+ConversionResult<media::AudioIoConfigEvent> legacy2aidl_audio_io_config_event_AudioIoConfigEvent(
+        audio_io_config_event legacy);
+
+ConversionResult<audio_port_role_t> aidl2legacy_AudioPortRole_audio_port_role_t(
+        media::AudioPortRole aidl);
+ConversionResult<media::AudioPortRole> legacy2aidl_audio_port_role_t_AudioPortRole(
+        audio_port_role_t legacy);
+
+ConversionResult<audio_port_type_t> aidl2legacy_AudioPortType_audio_port_type_t(
+        media::AudioPortType aidl);
+ConversionResult<media::AudioPortType> legacy2aidl_audio_port_type_t_AudioPortType(
+        audio_port_type_t legacy);
+
+ConversionResult<audio_format_t> aidl2legacy_AudioFormat_audio_format_t(
+        media::audio::common::AudioFormat aidl);
+ConversionResult<media::audio::common::AudioFormat> legacy2aidl_audio_format_t_AudioFormat(
+        audio_format_t legacy);
+
+ConversionResult<audio_gain_mode_t>
+aidl2legacy_AudioGainMode_audio_gain_mode_t(media::AudioGainMode aidl);
+ConversionResult<media::AudioGainMode>
+legacy2aidl_audio_gain_mode_t_AudioGainMode(audio_gain_mode_t legacy);
+
+ConversionResult<audio_gain_mode_t> aidl2legacy_int32_t_audio_gain_mode_t_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_gain_mode_t_int32_t_mask(audio_gain_mode_t legacy);
+
+ConversionResult<audio_devices_t> aidl2legacy_int32_t_audio_devices_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_devices_t_int32_t(audio_devices_t legacy);
+
+ConversionResult<audio_gain_config> aidl2legacy_AudioGainConfig_audio_gain_config(
+        const media::AudioGainConfig& aidl, media::AudioPortRole role, media::AudioPortType type);
+ConversionResult<media::AudioGainConfig> legacy2aidl_audio_gain_config_AudioGainConfig(
+        const audio_gain_config& legacy, audio_port_role_t role, audio_port_type_t type);
+
+ConversionResult<audio_input_flags_t> aidl2legacy_AudioInputFlags_audio_input_flags_t(
+        media::AudioInputFlags aidl);
+ConversionResult<media::AudioInputFlags> legacy2aidl_audio_input_flags_t_AudioInputFlags(
+        audio_input_flags_t legacy);
+
+ConversionResult<audio_output_flags_t> aidl2legacy_AudioOutputFlags_audio_output_flags_t(
+        media::AudioOutputFlags aidl);
+ConversionResult<media::AudioOutputFlags> legacy2aidl_audio_output_flags_t_AudioOutputFlags(
+        audio_output_flags_t legacy);
+
+ConversionResult<audio_input_flags_t> aidl2legacy_int32_t_audio_input_flags_t_mask(
+        int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_input_flags_t_int32_t_mask(
+        audio_input_flags_t legacy);
+
+ConversionResult<audio_output_flags_t> aidl2legacy_int32_t_audio_output_flags_t_mask(
+        int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_output_flags_t_int32_t_mask(
+        audio_output_flags_t legacy);
+
+ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
+        const media::AudioIoFlags& aidl, media::AudioPortRole role, media::AudioPortType type);
+ConversionResult<media::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
+        const audio_io_flags& legacy, audio_port_role_t role, audio_port_type_t type);
+
+ConversionResult<audio_port_config_device_ext>
+aidl2legacy_AudioPortConfigDeviceExt_audio_port_config_device_ext(
+        const media::AudioPortConfigDeviceExt& aidl);
+ConversionResult<media::AudioPortConfigDeviceExt>
+legacy2aidl_audio_port_config_device_ext_AudioPortConfigDeviceExt(
+        const audio_port_config_device_ext& legacy);
+
+ConversionResult<audio_stream_type_t> aidl2legacy_AudioStreamType_audio_stream_type_t(
+        media::AudioStreamType aidl);
+ConversionResult<media::AudioStreamType> legacy2aidl_audio_stream_type_t_AudioStreamType(
+        audio_stream_type_t legacy);
+
+ConversionResult<audio_source_t> aidl2legacy_AudioSourceType_audio_source_t(
+        media::AudioSourceType aidl);
+ConversionResult<media::AudioSourceType> legacy2aidl_audio_source_t_AudioSourceType(
+        audio_source_t legacy);
+
+ConversionResult<audio_session_t> aidl2legacy_int32_t_audio_session_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_session_t_int32_t(audio_session_t legacy);
+
+ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortConfigMixExt(
+        const media::AudioPortConfigMixExt& aidl, media::AudioPortRole role);
+ConversionResult<media::AudioPortConfigMixExt> legacy2aidl_AudioPortConfigMixExt(
+        const audio_port_config_mix_ext& legacy, audio_port_role_t role);
+
+ConversionResult<audio_port_config_session_ext>
+aidl2legacy_AudioPortConfigSessionExt_audio_port_config_session_ext(
+        const media::AudioPortConfigSessionExt& aidl);
+ConversionResult<media::AudioPortConfigSessionExt>
+legacy2aidl_audio_port_config_session_ext_AudioPortConfigSessionExt(
+        const audio_port_config_session_ext& legacy);
+
+ConversionResult<audio_port_config> aidl2legacy_AudioPortConfig_audio_port_config(
+        const media::AudioPortConfig& aidl);
+ConversionResult<media::AudioPortConfig> legacy2aidl_audio_port_config_AudioPortConfig(
+        const audio_port_config& legacy);
+
+ConversionResult<struct audio_patch> aidl2legacy_AudioPatch_audio_patch(
+        const media::AudioPatch& aidl);
+ConversionResult<media::AudioPatch> legacy2aidl_audio_patch_AudioPatch(
+        const struct audio_patch& legacy);
+
+ConversionResult<sp<AudioIoDescriptor>> aidl2legacy_AudioIoDescriptor_AudioIoDescriptor(
+        const media::AudioIoDescriptor& aidl);
+
+ConversionResult<media::AudioIoDescriptor> legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(
+        const sp<AudioIoDescriptor>& legacy);
+
+ConversionResult<AudioClient> aidl2legacy_AudioClient_AudioClient(
+        const media::AudioClient& aidl);
+ConversionResult<media::AudioClient> legacy2aidl_AudioClient_AudioClient(
+        const AudioClient& legacy);
+
+ConversionResult<audio_content_type_t>
+aidl2legacy_AudioContentType_audio_content_type_t(media::AudioContentType aidl);
+ConversionResult<media::AudioContentType>
+legacy2aidl_audio_content_type_t_AudioContentType(audio_content_type_t legacy);
+
+ConversionResult<audio_usage_t>
+aidl2legacy_AudioUsage_audio_usage_t(media::AudioUsage aidl);
+ConversionResult<media::AudioUsage>
+legacy2aidl_audio_usage_t_AudioUsage(audio_usage_t legacy);
+
+ConversionResult<audio_flags_mask_t>
+aidl2legacy_AudioFlag_audio_flags_mask_t(media::AudioFlag aidl);
+ConversionResult<media::AudioFlag>
+legacy2aidl_audio_flags_mask_t_AudioFlag(audio_flags_mask_t legacy);
+
+ConversionResult<audio_flags_mask_t>
+aidl2legacy_int32_t_audio_flags_mask_t_mask(int32_t aidl);
+ConversionResult<int32_t>
+legacy2aidl_audio_flags_mask_t_int32_t_mask(audio_flags_mask_t legacy);
+
+ConversionResult<audio_attributes_t>
+aidl2legacy_AudioAttributesInternal_audio_attributes_t(const media::AudioAttributesInternal& aidl);
+ConversionResult<media::AudioAttributesInternal>
+legacy2aidl_audio_attributes_t_AudioAttributesInternal(const audio_attributes_t& legacy);
+
+ConversionResult<audio_encapsulation_mode_t>
+aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(media::AudioEncapsulationMode aidl);
+ConversionResult<media::AudioEncapsulationMode>
+legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode(audio_encapsulation_mode_t legacy);
+
+ConversionResult<audio_offload_info_t>
+aidl2legacy_AudioOffloadInfo_audio_offload_info_t(const media::AudioOffloadInfo& aidl);
+ConversionResult<media::AudioOffloadInfo>
+legacy2aidl_audio_offload_info_t_AudioOffloadInfo(const audio_offload_info_t& legacy);
+
+ConversionResult<audio_config_t>
+aidl2legacy_AudioConfig_audio_config_t(const media::AudioConfig& aidl);
+ConversionResult<media::AudioConfig>
+legacy2aidl_audio_config_t_AudioConfig(const audio_config_t& legacy);
+
+ConversionResult<audio_config_base_t>
+aidl2legacy_AudioConfigBase_audio_config_base_t(const media::AudioConfigBase& aidl);
+ConversionResult<media::AudioConfigBase>
+legacy2aidl_audio_config_base_t_AudioConfigBase(const audio_config_base_t& legacy);
+
+ConversionResult<sp<IMemory>>
+aidl2legacy_SharedFileRegion_IMemory(const media::SharedFileRegion& aidl);
+ConversionResult<media::SharedFileRegion>
+legacy2aidl_IMemory_SharedFileRegion(const sp<IMemory>& legacy);
+
+ConversionResult<sp<IMemory>>
+aidl2legacy_NullableSharedFileRegion_IMemory(const std::optional<media::SharedFileRegion>& aidl);
+ConversionResult<std::optional<media::SharedFileRegion>>
+legacy2aidl_NullableIMemory_SharedFileRegion(const sp<IMemory>& legacy);
+
+ConversionResult<AudioTimestamp>
+aidl2legacy_AudioTimestampInternal_AudioTimestamp(const media::AudioTimestampInternal& aidl);
+ConversionResult<media::AudioTimestampInternal>
+legacy2aidl_AudioTimestamp_AudioTimestampInternal(const AudioTimestamp& legacy);
+
+ConversionResult<audio_uuid_t>
+aidl2legacy_AudioUuid_audio_uuid_t(const media::AudioUuid& aidl);
+ConversionResult<media::AudioUuid>
+legacy2aidl_audio_uuid_t_AudioUuid(const audio_uuid_t& legacy);
+
+ConversionResult<effect_descriptor_t>
+aidl2legacy_EffectDescriptor_effect_descriptor_t(const media::EffectDescriptor& aidl);
+ConversionResult<media::EffectDescriptor>
+legacy2aidl_effect_descriptor_t_EffectDescriptor(const effect_descriptor_t& legacy);
+
+ConversionResult<audio_encapsulation_metadata_type_t>
+aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t(
+        media::AudioEncapsulationMetadataType aidl);
+ConversionResult<media::AudioEncapsulationMetadataType>
+legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType(
+        audio_encapsulation_metadata_type_t legacy);
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioEncapsulationMode_mask(int32_t aidl);
+ConversionResult<int32_t>
+legacy2aidl_AudioEncapsulationMode_mask(uint32_t legacy);
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioEncapsulationMetadataType_mask(int32_t aidl);
+ConversionResult<int32_t>
+legacy2aidl_AudioEncapsulationMetadataType_mask(uint32_t legacy);
+
+ConversionResult<audio_mix_latency_class_t>
+aidl2legacy_AudioMixLatencyClass_audio_mix_latency_class_t(
+        media::AudioMixLatencyClass aidl);
+ConversionResult<media::AudioMixLatencyClass>
+legacy2aidl_audio_mix_latency_class_t_AudioMixLatencyClass(
+        audio_mix_latency_class_t legacy);
+
+ConversionResult<audio_port_device_ext>
+aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(const media::AudioPortDeviceExt& aidl);
+ConversionResult<media::AudioPortDeviceExt>
+legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(const audio_port_device_ext& legacy);
+
+ConversionResult<audio_port_mix_ext>
+aidl2legacy_AudioPortMixExt_audio_port_mix_ext(const media::AudioPortMixExt& aidl);
+ConversionResult<media::AudioPortMixExt>
+legacy2aidl_audio_port_mix_ext_AudioPortMixExt(const audio_port_mix_ext& legacy);
+
+ConversionResult<audio_port_session_ext>
+aidl2legacy_AudioPortSessionExt_audio_port_session_ext(const media::AudioPortSessionExt& aidl);
+ConversionResult<media::AudioPortSessionExt>
+legacy2aidl_audio_port_session_ext_AudioPortSessionExt(const audio_port_session_ext& legacy);
+
+ConversionResult<audio_profile>
+aidl2legacy_AudioProfile_audio_profile(const media::AudioProfile& aidl);
+ConversionResult<media::AudioProfile>
+legacy2aidl_audio_profile_AudioProfile(const audio_profile& legacy);
+
+ConversionResult<audio_gain>
+aidl2legacy_AudioGain_audio_gain(const media::AudioGain& aidl);
+ConversionResult<media::AudioGain>
+legacy2aidl_audio_gain_AudioGain(const audio_gain& legacy);
+
+ConversionResult<audio_port_v7>
+aidl2legacy_AudioPort_audio_port_v7(const media::AudioPort& aidl);
+ConversionResult<media::AudioPort>
+legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy);
+
+ConversionResult<audio_mode_t>
+aidl2legacy_AudioMode_audio_mode_t(media::AudioMode aidl);
+ConversionResult<media::AudioMode>
+legacy2aidl_audio_mode_t_AudioMode(audio_mode_t legacy);
+
+ConversionResult<audio_unique_id_use_t>
+aidl2legacy_AudioUniqueIdUse_audio_unique_id_use_t(media::AudioUniqueIdUse aidl);
+ConversionResult<media::AudioUniqueIdUse>
+legacy2aidl_audio_unique_id_use_t_AudioUniqueIdUse(audio_unique_id_use_t legacy);
+
+ConversionResult<volume_group_t>
+aidl2legacy_int32_t_volume_group_t(int32_t aidl);
+ConversionResult<int32_t>
+legacy2aidl_volume_group_t_int32_t(volume_group_t legacy);
+
+ConversionResult<audio_dual_mono_mode_t>
+aidl2legacy_AudioDualMonoMode_audio_dual_mono_mode_t(media::AudioDualMonoMode aidl);
+ConversionResult<media::AudioDualMonoMode>
+legacy2aidl_audio_dual_mono_mode_t_AudioDualMonoMode(audio_dual_mono_mode_t legacy);
+
+ConversionResult<audio_timestretch_fallback_mode_t>
+aidl2legacy_int32_t_audio_timestretch_fallback_mode_t(int32_t aidl);
+ConversionResult<int32_t>
+legacy2aidl_audio_timestretch_fallback_mode_t_int32_t(audio_timestretch_fallback_mode_t legacy);
+
+ConversionResult<audio_timestretch_stretch_mode_t>
+aidl2legacy_int32_t_audio_timestretch_stretch_mode_t(int32_t aidl);
+ConversionResult<int32_t>
+legacy2aidl_audio_timestretch_stretch_mode_t_int32_t(audio_timestretch_stretch_mode_t legacy);
+
+ConversionResult<audio_playback_rate_t>
+aidl2legacy_AudioPlaybackRate_audio_playback_rate_t(const media::AudioPlaybackRate& aidl);
+ConversionResult<media::AudioPlaybackRate>
+legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(const audio_playback_rate_t& legacy);
+
+ConversionResult<audio_standard_t>
+aidl2legacy_AudioStandard_audio_standard_t(media::AudioStandard aidl);
+ConversionResult<media::AudioStandard>
+legacy2aidl_audio_standard_t_AudioStandard(audio_standard_t legacy);
+
+ConversionResult<audio_extra_audio_descriptor>
+aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor(
+        const media::ExtraAudioDescriptor& aidl);
+ConversionResult<media::ExtraAudioDescriptor>
+legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor(
+        const audio_extra_audio_descriptor& legacy);
+
+ConversionResult<audio_encapsulation_type_t>
+aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
+        const media::AudioEncapsulationType& aidl);
+ConversionResult<media::AudioEncapsulationType>
+legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
+        const audio_encapsulation_type_t & legacy);
+
+using TrackSecondaryOutputInfoPair = std::pair<audio_port_handle_t, std::vector<audio_io_handle_t>>;
+ConversionResult<TrackSecondaryOutputInfoPair>
+aidl2legacy_TrackSecondaryOutputInfo_TrackSecondaryOutputInfoPair(
+        const media::TrackSecondaryOutputInfo& aidl);
+ConversionResult<media::TrackSecondaryOutputInfo>
+legacy2aidl_TrackSecondaryOutputInfoPair_TrackSecondaryOutputInfo(
+        const TrackSecondaryOutputInfoPair& legacy);
+
+
+}  // namespace android
diff --git a/media/libaudioclient/include/media/AidlConversionUtil.h b/media/libaudioclient/include/media/AidlConversionUtil.h
new file mode 100644
index 0000000..c1a2be3
--- /dev/null
+++ b/media/libaudioclient/include/media/AidlConversionUtil.h
@@ -0,0 +1,318 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <limits>
+#include <type_traits>
+#include <utility>
+
+#include <android-base/expected.h>
+#include <binder/Status.h>
+
+namespace android {
+
+template <typename T>
+using ConversionResult = base::expected<T, status_t>;
+
+// Convenience macros for working with ConversionResult, useful for writing converted for aggregate
+// types.
+
+#define VALUE_OR_RETURN(result)                                \
+    ({                                                         \
+        auto _tmp = (result);                                  \
+        if (!_tmp.ok()) return base::unexpected(_tmp.error()); \
+        std::move(_tmp.value());                               \
+    })
+
+#define RETURN_IF_ERROR(result) \
+    if (status_t _tmp = (result); _tmp != OK) return base::unexpected(_tmp);
+
+#define VALUE_OR_RETURN_STATUS(x)           \
+    ({                                      \
+       auto _tmp = (x);                     \
+       if (!_tmp.ok()) return _tmp.error(); \
+       std::move(_tmp.value());             \
+     })
+
+#define VALUE_OR_FATAL(result)                                        \
+    ({                                                                \
+       auto _tmp = (result);                                          \
+       LOG_ALWAYS_FATAL_IF(!_tmp.ok(),                                \
+                           "Function: %s Line: %d Failed result (%d)",\
+                           __FUNCTION__, __LINE__, _tmp.error());     \
+       std::move(_tmp.value());                                       \
+     })
+
+/**
+ * A generic template to safely cast between integral types, respecting limits of the destination
+ * type.
+ */
+template<typename To, typename From>
+ConversionResult<To> convertIntegral(From from) {
+    // Special handling is required for signed / vs. unsigned comparisons, since otherwise we may
+    // have the signed converted to unsigned and produce wrong results.
+    if (std::is_signed_v<From> && !std::is_signed_v<To>) {
+        if (from < 0 || from > std::numeric_limits<To>::max()) {
+            return base::unexpected(BAD_VALUE);
+        }
+    } else if (std::is_signed_v<To> && !std::is_signed_v<From>) {
+        if (from > std::numeric_limits<To>::max()) {
+            return base::unexpected(BAD_VALUE);
+        }
+    } else {
+        if (from < std::numeric_limits<To>::min() || from > std::numeric_limits<To>::max()) {
+            return base::unexpected(BAD_VALUE);
+        }
+    }
+    return static_cast<To>(from);
+}
+
+/**
+ * A generic template to safely cast between types, that are intended to be the same size, but
+ * interpreted differently.
+ */
+template<typename To, typename From>
+ConversionResult<To> convertReinterpret(From from) {
+    static_assert(sizeof(From) == sizeof(To));
+    return static_cast<To>(from);
+}
+
+/**
+ * A generic template that helps convert containers of convertible types, using iterators.
+ */
+template<typename InputIterator, typename OutputIterator, typename Func>
+status_t convertRange(InputIterator start,
+                      InputIterator end,
+                      OutputIterator out,
+                      const Func& itemConversion) {
+    for (InputIterator iter = start; iter != end; ++iter, ++out) {
+        *out = VALUE_OR_RETURN_STATUS(itemConversion(*iter));
+    }
+    return OK;
+}
+
+/**
+ * A generic template that helps convert containers of convertible types.
+ */
+template<typename OutputContainer, typename InputContainer, typename Func>
+ConversionResult<OutputContainer>
+convertContainer(const InputContainer& input, const Func& itemConversion) {
+    OutputContainer output;
+    auto ins = std::inserter(output, output.begin());
+    for (const auto& item : input) {
+        *ins = VALUE_OR_RETURN(itemConversion(item));
+    }
+    return output;
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// The code below establishes:
+// IntegralTypeOf<T>, which works for either integral types (in which case it evaluates to T), or
+// enum types (in which case it evaluates to std::underlying_type_T<T>).
+
+template<typename T, typename = std::enable_if_t<std::is_integral_v<T> || std::is_enum_v<T>>>
+struct IntegralTypeOfStruct {
+    using Type = T;
+};
+
+template<typename T>
+struct IntegralTypeOfStruct<T, std::enable_if_t<std::is_enum_v<T>>> {
+    using Type = std::underlying_type_t<T>;
+};
+
+template<typename T>
+using IntegralTypeOf = typename IntegralTypeOfStruct<T>::Type;
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Utilities for handling bitmasks.
+
+template<typename Enum>
+Enum indexToEnum_index(int index) {
+    static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+    return static_cast<Enum>(index);
+}
+
+template<typename Enum>
+Enum indexToEnum_bitmask(int index) {
+    static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+    return static_cast<Enum>(1 << index);
+}
+
+template<typename Mask, typename Enum>
+Mask enumToMask_bitmask(Enum e) {
+    static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+    static_assert(std::is_enum_v<Mask> || std::is_integral_v<Mask>);
+    return static_cast<Mask>(e);
+}
+
+template<typename Mask, typename Enum>
+Mask enumToMask_index(Enum e) {
+    static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+    static_assert(std::is_enum_v<Mask> || std::is_integral_v<Mask>);
+    return static_cast<Mask>(static_cast<std::make_unsigned_t<IntegralTypeOf<Mask>>>(1)
+            << static_cast<int>(e));
+}
+
+template<typename DestMask, typename SrcMask, typename DestEnum, typename SrcEnum>
+ConversionResult<DestMask> convertBitmask(
+        SrcMask src, const std::function<ConversionResult<DestEnum>(SrcEnum)>& enumConversion,
+        const std::function<SrcEnum(int)>& srcIndexToEnum,
+        const std::function<DestMask(DestEnum)>& destEnumToMask) {
+    using UnsignedDestMask = std::make_unsigned_t<IntegralTypeOf<DestMask>>;
+    using UnsignedSrcMask = std::make_unsigned_t<IntegralTypeOf<SrcMask>>;
+
+    UnsignedDestMask dest = static_cast<UnsignedDestMask>(0);
+    UnsignedSrcMask usrc = static_cast<UnsignedSrcMask>(src);
+
+    int srcBitIndex = 0;
+    while (usrc != 0) {
+        if (usrc & 1) {
+            SrcEnum srcEnum = srcIndexToEnum(srcBitIndex);
+            DestEnum destEnum = VALUE_OR_RETURN(enumConversion(srcEnum));
+            DestMask destMask = destEnumToMask(destEnum);
+            dest |= destMask;
+        }
+        ++srcBitIndex;
+        usrc >>= 1;
+    }
+    return static_cast<DestMask>(dest);
+}
+
+template<typename Mask, typename Enum>
+bool bitmaskIsSet(Mask mask, Enum index) {
+    return (mask & enumToMask_index<Mask, Enum>(index)) != 0;
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Utilities for working with AIDL unions.
+// UNION_GET(obj, fieldname) returns a ConversionResult<T> containing either the strongly-typed
+//   value of the respective field, or BAD_VALUE if the union is not set to the requested field.
+// UNION_SET(obj, fieldname, value) sets the requested field to the given value.
+
+template<typename T, typename T::Tag tag>
+using UnionFieldType = std::decay_t<decltype(std::declval<T>().template get<tag>())>;
+
+template<typename T, typename T::Tag tag>
+ConversionResult<UnionFieldType<T, tag>> unionGetField(const T& u) {
+    if (u.getTag() != tag) {
+        return base::unexpected(BAD_VALUE);
+    }
+    return u.template get<tag>();
+}
+
+#define UNION_GET(u, field) \
+    unionGetField<std::decay_t<decltype(u)>, std::decay_t<decltype(u)>::Tag::field>(u)
+
+#define UNION_SET(u, field, value) \
+    (u).set<std::decay_t<decltype(u)>::Tag::field>(value)
+
+namespace aidl_utils {
+
+/**
+ * Return the equivalent Android status_t from a binder exception code.
+ *
+ * Generally one should use statusTFromBinderStatus() instead.
+ *
+ * Exception codes can be generated from a remote Java service exception, translate
+ * them for use on the Native side.
+ *
+ * Note: for EX_TRANSACTION_FAILED and EX_SERVICE_SPECIFIC a more detailed error code
+ * can be found from transactionError() or serviceSpecificErrorCode().
+ */
+static inline status_t statusTFromExceptionCode(int32_t exceptionCode) {
+    using namespace ::android::binder;
+    switch (exceptionCode) {
+        case Status::EX_NONE:
+            return OK;
+        case Status::EX_SECURITY: // Java SecurityException, rethrows locally in Java
+            return PERMISSION_DENIED;
+        case Status::EX_BAD_PARCELABLE: // Java BadParcelableException, rethrows in Java
+        case Status::EX_ILLEGAL_ARGUMENT: // Java IllegalArgumentException, rethrows in Java
+        case Status::EX_NULL_POINTER: // Java NullPointerException, rethrows in Java
+            return BAD_VALUE;
+        case Status::EX_ILLEGAL_STATE: // Java IllegalStateException, rethrows in Java
+        case Status::EX_UNSUPPORTED_OPERATION: // Java UnsupportedOperationException, rethrows
+            return INVALID_OPERATION;
+        case Status::EX_HAS_REPLY_HEADER: // Native strictmode violation
+        case Status::EX_PARCELABLE: // Java bootclass loader (not standard exception), rethrows
+        case Status::EX_NETWORK_MAIN_THREAD: // Java NetworkOnMainThreadException, rethrows
+        case Status::EX_TRANSACTION_FAILED: // Native - see error code
+        case Status::EX_SERVICE_SPECIFIC:  // Java ServiceSpecificException,
+                                           // rethrows in Java with integer error code
+            return UNKNOWN_ERROR;
+    }
+    return UNKNOWN_ERROR;
+}
+
+/**
+ * Return the equivalent Android status_t from a binder status.
+ *
+ * Used to handle errors from a AIDL method declaration
+ *
+ * [oneway] void method(type0 param0, ...)
+ *
+ * or the following (where return_type is not a status_t)
+ *
+ * return_type method(type0 param0, ...)
+ */
+static inline status_t statusTFromBinderStatus(const ::android::binder::Status &status) {
+    return status.isOk() ? OK // check OK,
+        : status.serviceSpecificErrorCode() // service-side error, not standard Java exception
+                                            // (fromServiceSpecificError)
+        ?: status.transactionError() // a native binder transaction error (fromStatusT)
+        ?: statusTFromExceptionCode(status.exceptionCode()); // a service-side error with a
+                                                    // standard Java exception (fromExceptionCode)
+}
+
+/**
+ * Return a binder::Status from native service status.
+ *
+ * This is used for methods not returning an explicit status_t,
+ * where Java callers expect an exception, not an integer return value.
+ */
+static inline ::android::binder::Status binderStatusFromStatusT(
+        status_t status, const char *optionalMessage = nullptr) {
+    const char * const emptyIfNull = optionalMessage == nullptr ? "" : optionalMessage;
+    // From binder::Status instructions:
+    //  Prefer a generic exception code when possible, then a service specific
+    //  code, and finally a status_t for low level failures or legacy support.
+    //  Exception codes and service specific errors map to nicer exceptions for
+    //  Java clients.
+
+    using namespace ::android::binder;
+    switch (status) {
+        case OK:
+            return Status::ok();
+        case PERMISSION_DENIED: // throw SecurityException on Java side
+            return Status::fromExceptionCode(Status::EX_SECURITY, emptyIfNull);
+        case BAD_VALUE: // throw IllegalArgumentException on Java side
+            return Status::fromExceptionCode(Status::EX_ILLEGAL_ARGUMENT, emptyIfNull);
+        case INVALID_OPERATION: // throw IllegalStateException on Java side
+            return Status::fromExceptionCode(Status::EX_ILLEGAL_STATE, emptyIfNull);
+    }
+
+    // A service specific error will not show on status.transactionError() so
+    // be sure to use statusTFromBinderStatus() for reliable error handling.
+
+    // throw a ServiceSpecificException.
+    return Status::fromServiceSpecificError(status, emptyIfNull);
+}
+
+
+} // namespace aidl_utils
+
+}  // namespace android
diff --git a/media/libaudioclient/include/media/AudioAttributes.h b/media/libaudioclient/include/media/AudioAttributes.h
index 001c629..24bd179 100644
--- a/media/libaudioclient/include/media/AudioAttributes.h
+++ b/media/libaudioclient/include/media/AudioAttributes.h
@@ -17,7 +17,9 @@
 
 #pragma once
 
+#include <android/media/AudioAttributesEx.h>
 #include <media/AudioCommonTypes.h>
+#include <media/AidlConversionUtil.h>
 #include <system/audio.h>
 #include <system/audio_policy.h>
 #include <binder/Parcelable.h>
@@ -57,4 +59,10 @@
     volume_group_t mGroupId = VOLUME_GROUP_NONE;
 };
 
+// AIDL conversion routines.
+ConversionResult<media::AudioAttributesEx>
+legacy2aidl_AudioAttributes_AudioAttributesEx(const AudioAttributes& legacy);
+ConversionResult<AudioAttributes>
+aidl2legacy_AudioAttributesEx_AudioAttributes(const media::AudioAttributesEx& aidl);
+
 } // namespace android
diff --git a/media/libaudioclient/include/media/AudioClient.h b/media/libaudioclient/include/media/AudioClient.h
index 247af9e..3be8ce2 100644
--- a/media/libaudioclient/include/media/AudioClient.h
+++ b/media/libaudioclient/include/media/AudioClient.h
@@ -18,38 +18,18 @@
 #ifndef ANDROID_AUDIO_CLIENT_H
 #define ANDROID_AUDIO_CLIENT_H
 
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
-#include <system/audio.h>
-#include <utils/String16.h>
+#include <sys/types.h>
+#include <android/content/AttributionSourceState.h>
 
 namespace android {
 
-class AudioClient : public Parcelable {
+class AudioClient {
  public:
     AudioClient() :
-        clientUid(-1), clientPid(-1), clientTid(-1), packageName("") {}
+        clientTid(-1) {}
 
-    uid_t clientUid;
-    pid_t clientPid;
     pid_t clientTid;
-    String16 packageName;
-
-    status_t readFromParcel(const Parcel *parcel) override {
-        clientUid = parcel->readInt32();
-        clientPid = parcel->readInt32();
-        clientTid = parcel->readInt32();
-        packageName = parcel->readString16();
-        return NO_ERROR;
-    }
-
-    status_t writeToParcel(Parcel *parcel) const override {
-        parcel->writeInt32(clientUid);
-        parcel->writeInt32(clientPid);
-        parcel->writeInt32(clientTid);
-        parcel->writeString16(packageName);
-        return NO_ERROR;
-    }
+    android::content::AttributionSourceState attributionSource;
 };
 
 }; // namespace android
diff --git a/media/libaudioclient/include/media/AudioCommonTypes.h b/media/libaudioclient/include/media/AudioCommonTypes.h
index 8e446ea..5dfe5fc 100644
--- a/media/libaudioclient/include/media/AudioCommonTypes.h
+++ b/media/libaudioclient/include/media/AudioCommonTypes.h
@@ -29,6 +29,8 @@
 using AttributesVector = std::vector<audio_attributes_t>;
 using StreamTypeVector = std::vector<audio_stream_type_t>;
 
+using TrackSecondaryOutputsMap = std::map<audio_port_handle_t, std::vector<audio_io_handle_t>>;
+
 constexpr bool operator==(const audio_attributes_t &lhs, const audio_attributes_t &rhs)
 {
     return lhs.usage == rhs.usage && lhs.content_type == rhs.content_type &&
diff --git a/media/libaudioclient/include/media/AudioEffect.h b/media/libaudioclient/include/media/AudioEffect.h
index cb76252..3c19ec1 100644
--- a/media/libaudioclient/include/media/AudioEffect.h
+++ b/media/libaudioclient/include/media/AudioEffect.h
@@ -21,16 +21,16 @@
 #include <sys/types.h>
 
 #include <media/IAudioFlinger.h>
-#include <media/IAudioPolicyService.h>
-#include <media/IEffect.h>
-#include <media/IEffectClient.h>
 #include <media/AudioSystem.h>
 #include <system/audio_effect.h>
+#include <android/content/AttributionSourceState.h>
 
 #include <utils/RefBase.h>
 #include <utils/Errors.h>
 #include <binder/IInterface.h>
 
+#include "android/media/IEffect.h"
+#include "android/media/BnEffectClient.h"
 
 namespace android {
 
@@ -337,18 +337,23 @@
      *
      * Parameters:
      *
-     * opPackageName:      The package name used for app op checks.
+     * client:      Attribution source for app-op checks
      */
-    AudioEffect(const String16& opPackageName);
+    explicit AudioEffect(const android::content::AttributionSourceState& client);
 
+    /* Terminates the AudioEffect and unregisters it from AudioFlinger.
+     * The effect engine is also destroyed if this AudioEffect was the last controlling
+     * the engine.
+     */
+                        ~AudioEffect();
 
-    /* Constructor.
+    /**
+     * Initialize an uninitialized AudioEffect.
      *
      * Parameters:
      *
      * type:  type of effect created: can be null if uuid is specified. This corresponds to
      *        the OpenSL ES interface implemented by this effect.
-     * opPackageName:  The package name used for app op checks.
      * uuid:  Uuid of effect created: can be null if type is specified. This uuid corresponds to
      *        a particular implementation of an effect type.
      * priority:    requested priority for effect control: the priority level corresponds to the
@@ -356,7 +361,7 @@
      *      higher priorities, 0 being the normal priority.
      * cbf:         optional callback function (see effect_callback_t)
      * user:        pointer to context for use by the callback receiver.
-     * sessionID:   audio session this effect is associated to.
+     * sessionId:   audio session this effect is associated to.
      *      If equal to AUDIO_SESSION_OUTPUT_MIX, the effect will be global to
      *      the output mix.  Otherwise, the effect will be applied to all players
      *      (AudioTrack or MediaPLayer) within the same audio session.
@@ -369,46 +374,13 @@
      *        In this mode, no IEffect interface to AudioFlinger is created and all actions
      *        besides getters implemented in client AudioEffect object are no ops
      *        after effect creation.
+     *
+     * Returned status (from utils/Errors.h) can be:
+     *  - NO_ERROR or ALREADY_EXISTS: successful initialization
+     *  - INVALID_OPERATION: AudioEffect is already initialized
+     *  - BAD_VALUE: invalid parameter
+     *  - NO_INIT: audio flinger or audio hardware not initialized
      */
-
-    AudioEffect(const effect_uuid_t *type,
-                const String16& opPackageName,
-                const effect_uuid_t *uuid = NULL,
-                int32_t priority = 0,
-                effect_callback_t cbf = NULL,
-                void* user = NULL,
-                audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
-                audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
-                const AudioDeviceTypeAddr& device = {},
-                bool probe = false);
-
-    /* Constructor.
-     *      Same as above but with type and uuid specified by character strings
-     */
-    AudioEffect(const char *typeStr,
-                    const String16& opPackageName,
-                    const char *uuidStr = NULL,
-                    int32_t priority = 0,
-                    effect_callback_t cbf = NULL,
-                    void* user = NULL,
-                    audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
-                    audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
-                    const AudioDeviceTypeAddr& device = {},
-                    bool probe = false);
-
-    /* Terminates the AudioEffect and unregisters it from AudioFlinger.
-     * The effect engine is also destroyed if this AudioEffect was the last controlling
-     * the engine.
-     */
-                        ~AudioEffect();
-
-    /* Initialize an uninitialized AudioEffect.
-    * Returned status (from utils/Errors.h) can be:
-    *  - NO_ERROR or ALREADY_EXISTS: successful initialization
-    *  - INVALID_OPERATION: AudioEffect is already initialized
-    *  - BAD_VALUE: invalid parameter
-    *  - NO_INIT: audio flinger or audio hardware not initialized
-    * */
             status_t    set(const effect_uuid_t *type,
                             const effect_uuid_t *uuid = NULL,
                             int32_t priority = 0,
@@ -418,6 +390,18 @@
                             audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
                             const AudioDeviceTypeAddr& device = {},
                             bool probe = false);
+    /*
+     * Same as above but with type and uuid specified by character strings.
+     */
+            status_t    set(const char *typeStr,
+                            const char *uuidStr = NULL,
+                            int32_t priority = 0,
+                            effect_callback_t cbf = NULL,
+                            void* user = NULL,
+                            audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
+                            audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
+                            const AudioDeviceTypeAddr& device = {},
+                            bool probe = false);
 
     /* Result of constructing the AudioEffect. This must be checked
      * before using any AudioEffect API.
@@ -547,90 +531,67 @@
      static const uint32_t kMaxPreProcessing = 10;
 
 protected:
-     bool                    mEnabled;           // enable state
-     audio_session_t         mSessionId;         // audio session ID
-     int32_t                 mPriority;          // priority for effect control
-     status_t                mStatus;            // effect status
-     bool                    mProbe;             // effect created in probe mode: all commands
+     android::content::AttributionSourceState mClientAttributionSource; // source for app op checks.
+     bool                    mEnabled = false;   // enable state
+     audio_session_t         mSessionId = AUDIO_SESSION_OUTPUT_MIX; // audio session ID
+     int32_t                 mPriority = 0;      // priority for effect control
+     status_t                mStatus = NO_INIT;  // effect status
+     bool                    mProbe = false;     // effect created in probe mode: all commands
                                                  // are no ops because mIEffect is NULL
-     effect_callback_t       mCbf;               // callback function for status, control and
+     effect_callback_t       mCbf = nullptr;     // callback function for status, control and
                                                  // parameter changes notifications
-     void*                   mUserData;          // client context for callback function
-     effect_descriptor_t     mDescriptor;        // effect descriptor
-     int32_t                 mId;                // system wide unique effect engine instance ID
+     void*                   mUserData = nullptr;// client context for callback function
+     effect_descriptor_t     mDescriptor = {};   // effect descriptor
+     int32_t                 mId = -1;           // system wide unique effect engine instance ID
      Mutex                   mLock;              // Mutex for mEnabled access
-     Mutex                   mConstructLock;     // Mutex for integrality construction
 
-     String16                mOpPackageName;     // The package name used for app op checks.
 
      // IEffectClient
      virtual void controlStatusChanged(bool controlGranted);
      virtual void enableStatusChanged(bool enabled);
-     virtual void commandExecuted(uint32_t cmdCode,
-             uint32_t cmdSize,
-             void *pCmdData,
-             uint32_t replySize,
-             void *pReplyData);
+     virtual void commandExecuted(int32_t cmdCode,
+                                  const std::vector<uint8_t>& cmdData,
+                                  const std::vector<uint8_t>& replyData);
 
 private:
 
      // Implements the IEffectClient interface
     class EffectClient :
-        public android::BnEffectClient, public android::IBinder::DeathRecipient
+        public media::BnEffectClient, public android::IBinder::DeathRecipient
     {
     public:
 
         EffectClient(AudioEffect *effect) : mEffect(effect){}
 
         // IEffectClient
-        virtual void controlStatusChanged(bool controlGranted) {
+        binder::Status controlStatusChanged(bool controlGranted) override {
             sp<AudioEffect> effect = mEffect.promote();
             if (effect != 0) {
-                {
-                    // Got the mConstructLock means the construction of AudioEffect
-                    // has finished, we should release the mConstructLock immediately.
-                    AutoMutex lock(effect->mConstructLock);
-                }
                 effect->controlStatusChanged(controlGranted);
             }
+            return binder::Status::ok();
         }
-        virtual void enableStatusChanged(bool enabled) {
+        binder::Status enableStatusChanged(bool enabled) override {
             sp<AudioEffect> effect = mEffect.promote();
             if (effect != 0) {
-                {
-                    // Got the mConstructLock means the construction of AudioEffect
-                    // has finished, we should release the mConstructLock immediately.
-                    AutoMutex lock(effect->mConstructLock);
-                }
                 effect->enableStatusChanged(enabled);
             }
+            return binder::Status::ok();
         }
-        virtual void commandExecuted(uint32_t cmdCode,
-                                     uint32_t cmdSize,
-                                     void *pCmdData,
-                                     uint32_t replySize,
-                                     void *pReplyData) {
+        binder::Status commandExecuted(int32_t cmdCode,
+                             const std::vector<uint8_t>& cmdData,
+                             const std::vector<uint8_t>& replyData) override {
             sp<AudioEffect> effect = mEffect.promote();
             if (effect != 0) {
-                {
-                    // Got the mConstructLock means the construction of AudioEffect
-                    // has finished, we should release the mConstructLock immediately.
-                    AutoMutex lock(effect->mConstructLock);
-                }
-                effect->commandExecuted(
-                    cmdCode, cmdSize, pCmdData, replySize, pReplyData);
+                effect->commandExecuted(cmdCode, cmdData, replyData);
             }
+            return binder::Status::ok();
         }
 
         // IBinder::DeathRecipient
         virtual void binderDied(const wp<IBinder>& /*who*/) {
             sp<AudioEffect> effect = mEffect.promote();
             if (effect != 0) {
-                {
-                    // Got the mConstructLock means the construction of AudioEffect
-                    // has finished, we should release the mConstructLock immediately.
-                    AutoMutex lock(effect->mConstructLock);
-                }
                 effect->binderDied();
             }
         }
@@ -641,12 +602,10 @@
 
     void binderDied();
 
-    sp<IEffect>             mIEffect;           // IEffect binder interface
+    sp<media::IEffect>      mIEffect;           // IEffect binder interface
     sp<EffectClient>        mIEffectClient;     // IEffectClient implementation
     sp<IMemory>             mCblkMemory;        // shared memory for deferred parameter setting
-    effect_param_cblk_t*    mCblk;              // control block for deferred parameter setting
-    pid_t                   mClientPid = (pid_t)-1;
-    uid_t                   mClientUid = (uid_t)-1;
+    effect_param_cblk_t*    mCblk = nullptr;    // control block for deferred parameter setting
 };
 
 
diff --git a/media/libaudioclient/include/media/AudioPolicy.h b/media/libaudioclient/include/media/AudioPolicy.h
index 00fe278..08b3da1 100644
--- a/media/libaudioclient/include/media/AudioPolicy.h
+++ b/media/libaudioclient/include/media/AudioPolicy.h
@@ -120,7 +120,7 @@
     String8         mDeviceAddress;
     uint32_t        mCbFlags; // flags indicating which callbacks to use, see kCbFlag*
     /** Ignore the AUDIO_FLAG_NO_MEDIA_PROJECTION */
-    bool            mAllowPrivilegedPlaybackCapture = false;
+    bool            mAllowPrivilegedMediaPlaybackCapture = false;
     /** Indicates if the caller can capture voice communication output */
     bool            mVoiceCommunicationCaptureAllowed = false;
 };
diff --git a/media/libaudioclient/include/media/AudioProductStrategy.h b/media/libaudioclient/include/media/AudioProductStrategy.h
index 7441095..b55b506 100644
--- a/media/libaudioclient/include/media/AudioProductStrategy.h
+++ b/media/libaudioclient/include/media/AudioProductStrategy.h
@@ -17,6 +17,8 @@
 
 #pragma once
 
+#include <android/media/AudioProductStrategy.h>
+#include <media/AidlConversionUtil.h>
 #include <media/AudioCommonTypes.h>
 #include <media/AudioAttributes.h>
 #include <system/audio.h>
@@ -62,5 +64,11 @@
 
 using AudioProductStrategyVector = std::vector<AudioProductStrategy>;
 
+// AIDL conversion routines.
+ConversionResult<media::AudioProductStrategy>
+legacy2aidl_AudioProductStrategy(const AudioProductStrategy& legacy);
+ConversionResult<AudioProductStrategy>
+aidl2legacy_AudioProductStrategy(const media::AudioProductStrategy& aidl);
+
 } // namespace android
 
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index 2f66658..326919a 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -32,6 +32,7 @@
 #include <utils/threads.h>
 
 #include "android/media/IAudioRecord.h"
+#include <android/content/AttributionSourceState.h>
 
 namespace android {
 
@@ -148,9 +149,9 @@
      *
      * Parameters:
      *
-     * opPackageName:      The package name used for app ops.
+     * client:          The attribution source of the owner of the record
      */
-                        AudioRecord(const String16& opPackageName);
+                        AudioRecord(const android::content::AttributionSourceState& client);
 
     /* Creates an AudioRecord object and registers it with AudioFlinger.
      * Once created, the track needs to be started before it can be used.
@@ -163,7 +164,7 @@
      * format:             Audio format (e.g AUDIO_FORMAT_PCM_16_BIT for signed
      *                     16 bits per sample).
      * channelMask:        Channel mask, such that audio_is_input_channel(channelMask) is true.
-     * opPackageName:      The package name used for app ops.
+     * client:             The attribution source of the owner of the record
      * frameCount:         Minimum size of track PCM buffer in frames. This defines the
      *                     application's contribution to the
      *                     latency of the track.  The actual size selected by the AudioRecord could
@@ -186,7 +187,7 @@
                                     uint32_t sampleRate,
                                     audio_format_t format,
                                     audio_channel_mask_t channelMask,
-                                    const String16& opPackageName,
+                                    const android::content::AttributionSourceState& client,
                                     size_t frameCount = 0,
                                     callback_t cbf = NULL,
                                     void* user = NULL,
@@ -194,8 +195,6 @@
                                     audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
                                     transfer_type transferType = TRANSFER_DEFAULT,
                                     audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
-                                    uid_t uid = AUDIO_UID_INVALID,
-                                    pid_t pid = -1,
                                     const audio_attributes_t* pAttributes = NULL,
                                     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
                                     audio_microphone_direction_t
@@ -242,7 +241,8 @@
                             audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
                             audio_microphone_direction_t
                                 selectedMicDirection = MIC_DIRECTION_UNSPECIFIED,
-                            float selectedMicFieldDimension = MIC_FIELD_DIMENSION_DEFAULT);
+                            float selectedMicFieldDimension = MIC_FIELD_DIMENSION_DEFAULT,
+                            int32_t maxSharedAudioHistoryMs = 0);
 
     /* Result of constructing the AudioRecord. This must be checked for successful initialization
      * before using any AudioRecord API (except for set()), because using
@@ -304,6 +304,19 @@
             void        stop();
             bool        stopped() const;
 
+    /* Calls stop() and then wait for all of the callbacks to return.
+     * It is safe to call this if stop() or pause() has already been called.
+     *
+     * This function is called from the destructor. But since AudioRecord
+     * is ref counted, the destructor may be called later than desired.
+     * This can be called explicitly as part of closing an AudioRecord
+     * if you want to be certain that callbacks have completely finished.
+     *
+     * This is not thread safe and should only be called from one thread,
+     * ideally as the AudioRecord is being closed.
+     */
+            void        stopAndJoinCallbacks();
+
     /* Return the sink sample rate for this record track in Hz.
      * If specified as zero in constructor or set(), this will be the source sample rate.
      * Unlike AudioTrack, the sample rate is const after initialization, so doesn't need a lock.
@@ -578,6 +591,16 @@
       */
             audio_port_handle_t getPortId() const { return mPortId; };
 
+    /* Sets the LogSessionId field which is used for metrics association of
+     * this object with other objects. A nullptr or empty string clears
+     * the logSessionId.
+     */
+            void setLogSessionId(const char *logSessionId);
+
+
+            status_t shareAudioHistory(const std::string& sharedPackageName,
+                                       int64_t sharedStartMs);
+
      /*
       * Dumps the state of an audio record.
       */
@@ -632,7 +655,7 @@
 
             // caller must hold lock on mLock for all _l methods
 
-            status_t createRecord_l(const Modulo<uint32_t> &epoch, const String16& opPackageName);
+            status_t createRecord_l(const Modulo<uint32_t> &epoch);
 
             // FIXME enum is faster than strcmp() for parameter 'from'
             status_t restoreRecord_l(const char *from);
@@ -673,7 +696,7 @@
 
     status_t                mStatus;
 
-    String16                mOpPackageName;         // The package name used for app ops.
+    android::content::AttributionSourceState mClientAttributionSource; // Owner's attribution source
 
     size_t                  mFrameCount;            // corresponds to current IAudioRecord, value is
                                                     // reported back by AudioFlinger to the client
@@ -701,6 +724,14 @@
 
     audio_session_t         mSessionId;
     audio_port_handle_t     mPortId;                    // Id from Audio Policy Manager
+
+    /**
+     * mLogSessionId is a string identifying this AudioRecord for the metrics service.
+     * It may be unique or shared with other objects.  An empty string means the
+     * logSessionId is not set.
+     */
+    std::string             mLogSessionId{};
+
     transfer_type           mTransfer;
 
     // Next 5 fields may be changed if IAudioRecord is re-created, but always != 0
@@ -740,8 +771,6 @@
 
     sp<DeathNotifier>       mDeathNotifier;
     uint32_t                mSequence;              // incremented for each new IAudioRecord attempt
-    uid_t                   mClientUid;
-    pid_t                   mClientPid;
     audio_attributes_t      mAttributes;
 
     // For Device Selection API
@@ -755,6 +784,10 @@
     audio_microphone_direction_t mSelectedMicDirection;
     float mSelectedMicFieldDimension;
 
+    int32_t                    mMaxSharedAudioHistoryMs = 0;
+    std::string                mSharedAudioPackageName = {};
+    int64_t                    mSharedAudioStartMs = 0;
+
 private:
     class MediaMetrics {
       public:
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 19c2cbd..a9109c8 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -19,13 +19,16 @@
 
 #include <sys/types.h>
 
+#include <android/media/AudioVibratorInfo.h>
+#include <android/media/BnAudioFlingerClient.h>
+#include <android/media/BnAudioPolicyServiceClient.h>
+#include <android/content/AttributionSourceState.h>
+#include <media/AidlConversionUtil.h>
 #include <media/AudioDeviceTypeAddr.h>
 #include <media/AudioPolicy.h>
 #include <media/AudioProductStrategy.h>
 #include <media/AudioVolumeGroup.h>
 #include <media/AudioIoDescriptor.h>
-#include <media/IAudioFlingerClient.h>
-#include <media/IAudioPolicyServiceClient.h>
 #include <media/MicrophoneInfo.h>
 #include <set>
 #include <system/audio.h>
@@ -35,8 +38,27 @@
 #include <utils/Mutex.h>
 #include <vector>
 
+using android::content::AttributionSourceState;
+
 namespace android {
 
+struct record_client_info {
+    audio_unique_id_t riid;
+    uid_t uid;
+    audio_session_t session;
+    audio_source_t source;
+    audio_port_handle_t port_id;
+    bool silenced;
+};
+
+typedef struct record_client_info record_client_info_t;
+
+// AIDL conversion functions.
+ConversionResult<record_client_info_t>
+aidl2legacy_RecordClientInfo_record_client_info_t(const media::RecordClientInfo& aidl);
+ConversionResult<media::RecordClientInfo>
+legacy2aidl_record_client_info_t_RecordClientInfo(const record_client_info_t& legacy);
+
 typedef void (*audio_error_callback)(status_t err);
 typedef void (*dynamic_policy_callback)(int event, String8 regId, int val);
 typedef void (*record_config_callback)(int event,
@@ -47,11 +69,15 @@
                                        std::vector<effect_descriptor_t> effects,
                                        audio_patch_handle_t patchHandle,
                                        audio_source_t source);
+typedef void (*routing_callback)();
 
 class IAudioFlinger;
-class IAudioPolicyService;
 class String8;
 
+namespace media {
+class IAudioPolicyService;
+}
+
 class AudioSystem
 {
 public:
@@ -120,6 +146,12 @@
 
     static void setDynPolicyCallback(dynamic_policy_callback cb);
     static void setRecordConfigCallback(record_config_callback);
+    static void setRoutingCallback(routing_callback cb);
+
+    // Sets the binder to use for accessing the AudioFlinger service. This enables the system server
+    // to grant specific isolated processes access to the audio system. Currently used only for the
+    // HotwordDetectionService.
+    static void setAudioFlingerBinder(const sp<IBinder>& audioFlinger);
 
     // helper function to obtain AudioFlinger service handle
     static const sp<IAudioFlinger> get_audio_flinger();
@@ -239,8 +271,7 @@
                                      audio_io_handle_t *output,
                                      audio_session_t session,
                                      audio_stream_type_t *stream,
-                                     pid_t pid,
-                                     uid_t uid,
+                                     const AttributionSourceState& attributionSource,
                                      const audio_config_t *config,
                                      audio_output_flags_t flags,
                                      audio_port_handle_t *selectedDeviceId,
@@ -256,9 +287,7 @@
                                     audio_io_handle_t *input,
                                     audio_unique_id_t riid,
                                     audio_session_t session,
-                                    pid_t pid,
-                                    uid_t uid,
-                                    const String16& opPackageName,
+                                     const AttributionSourceState& attributionSource,
                                     const audio_config_base_t *config,
                                     audio_input_flags_t flags,
                                     audio_port_handle_t *selectedDeviceId,
@@ -288,7 +317,7 @@
 
     static status_t getMinVolumeIndexForAttributes(const audio_attributes_t &attr, int &index);
 
-    static uint32_t getStrategyForStream(audio_stream_type_t stream);
+    static product_strategy_t getStrategyForStream(audio_stream_type_t stream);
     static audio_devices_t getDevicesForStream(audio_stream_type_t stream);
     static status_t getDevicesForAttributes(const AudioAttributes &aa,
                                             AudioDeviceTypeAddrVector *devices);
@@ -296,7 +325,7 @@
     static audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc);
     static status_t registerEffect(const effect_descriptor_t *desc,
                                     audio_io_handle_t io,
-                                    uint32_t strategy,
+                                    product_strategy_t strategy,
                                     audio_session_t session,
                                     int id);
     static status_t unregisterEffect(int id);
@@ -307,7 +336,7 @@
     // and output configuration cache (gOutputs)
     static void clearAudioConfigCache();
 
-    static const sp<IAudioPolicyService> get_audio_policy_service();
+    static const sp<media::IAudioPolicyService> get_audio_policy_service();
 
     // helpers for android.media.AudioManager.getProperty(), see description there for meaning
     static uint32_t getPrimaryOutputSamplingRate();
@@ -317,11 +346,12 @@
 
     static status_t setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages);
 
-    static status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags);
+    static status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t capturePolicy);
 
-    // Check if hw offload is possible for given format, stream type, sample rate,
-    // bit rate, duration, video and streaming or offload property is enabled
-    static bool isOffloadSupported(const audio_offload_info_t& info);
+    // Indicate if hw offload is possible for given format, stream type, sample rate,
+    // bit rate, duration, video and streaming or offload property is enabled and when possible
+    // if gapless transitions are supported.
+    static audio_offload_mode_t getOffloadSupport(const audio_offload_info_t& info);
 
     // check presence of audio flinger service.
     // returns NO_ERROR if binding to service succeeds, DEAD_OBJECT otherwise
@@ -331,11 +361,11 @@
     static status_t listAudioPorts(audio_port_role_t role,
                                    audio_port_type_t type,
                                    unsigned int *num_ports,
-                                   struct audio_port *ports,
+                                   struct audio_port_v7 *ports,
                                    unsigned int *generation);
 
     /* Get attributes for a given audio port */
-    static status_t getAudioPort(struct audio_port *port);
+    static status_t getAudioPort(struct audio_port_v7 *port);
 
     /* Create an audio patch between several source and sink ports */
     static status_t createAudioPatch(const struct audio_patch *patch,
@@ -361,11 +391,11 @@
 
     static status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration);
 
-    static status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices);
+    static status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices);
 
     static status_t removeUidDeviceAffinities(uid_t uid);
 
-    static status_t setUserIdDeviceAffinities(int userId, const Vector<AudioDeviceTypeAddr>& devices);
+    static status_t setUserIdDeviceAffinities(int userId, const AudioDeviceTypeAddrVector& devices);
 
     static status_t removeUserIdDeviceAffinities(int userId);
 
@@ -393,8 +423,9 @@
     // populated. The actual number of surround formats should be returned at numSurroundFormats.
     static status_t getSurroundFormats(unsigned int *numSurroundFormats,
                                        audio_format_t *surroundFormats,
-                                       bool *surroundFormatsEnabled,
-                                       bool reported);
+                                       bool *surroundFormatsEnabled);
+    static status_t getReportedSurroundFormats(unsigned int *numSurroundFormats,
+                                               audio_format_t *surroundFormats);
     static status_t setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled);
 
     static status_t setAssistantUid(uid_t uid);
@@ -404,16 +435,17 @@
     static bool     isHapticPlaybackSupported();
 
     static status_t listAudioProductStrategies(AudioProductStrategyVector &strategies);
-    static status_t getProductStrategyFromAudioAttributes(const AudioAttributes &aa,
-                                                        product_strategy_t &productStrategy);
+    static status_t getProductStrategyFromAudioAttributes(
+            const AudioAttributes &aa, product_strategy_t &productStrategy,
+            bool fallbackOnDefault = true);
 
     static audio_attributes_t streamTypeToAttributes(audio_stream_type_t stream);
     static audio_stream_type_t attributesToStreamType(const audio_attributes_t &attr);
 
     static status_t listAudioVolumeGroups(AudioVolumeGroupVector &groups);
 
-    static status_t getVolumeGroupFromAudioAttributes(const AudioAttributes &aa,
-                                                      volume_group_t &volumeGroup);
+    static status_t getVolumeGroupFromAudioAttributes(
+            const AudioAttributes &aa, volume_group_t &volumeGroup, bool fallbackOnDefault = true);
 
     static status_t setRttEnabled(bool enabled);
 
@@ -425,13 +457,29 @@
      */
     static status_t setAudioHalPids(const std::vector<pid_t>& pids);
 
-    static status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
-            const AudioDeviceTypeAddr &device);
+    static status_t setDevicesRoleForStrategy(product_strategy_t strategy,
+            device_role_t role, const AudioDeviceTypeAddrVector &devices);
 
-    static status_t removePreferredDeviceForStrategy(product_strategy_t strategy);
+    static status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role);
 
-    static status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
-            AudioDeviceTypeAddr &device);
+    static status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
+            device_role_t role, AudioDeviceTypeAddrVector &devices);
+
+    static status_t setDevicesRoleForCapturePreset(audio_source_t audioSource,
+            device_role_t role, const AudioDeviceTypeAddrVector &devices);
+
+    static status_t addDevicesRoleForCapturePreset(audio_source_t audioSource,
+            device_role_t role, const AudioDeviceTypeAddrVector &devices);
+
+    static status_t removeDevicesRoleForCapturePreset(
+            audio_source_t audioSource, device_role_t role,
+            const AudioDeviceTypeAddrVector& devices);
+
+    static status_t clearDevicesRoleForCapturePreset(
+            audio_source_t audioSource, device_role_t role);
+
+    static status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+            device_role_t role, AudioDeviceTypeAddrVector &devices);
 
     static status_t getDeviceForStrategy(product_strategy_t strategy,
             AudioDeviceTypeAddr &device);
@@ -513,9 +561,11 @@
 
     static audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo);
 
+    static status_t setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos);
+
 private:
 
-    class AudioFlingerClient: public IBinder::DeathRecipient, public BnAudioFlingerClient
+    class AudioFlingerClient: public IBinder::DeathRecipient, public media::BnAudioFlingerClient
     {
     public:
         AudioFlingerClient() :
@@ -535,9 +585,9 @@
 
         // indicate a change in the configuration of an output or input: keeps the cached
         // values for output/input parameters up-to-date in client process
-        virtual void ioConfigChanged(audio_io_config_event event,
-                                     const sp<AudioIoDescriptor>& ioDesc);
-
+        binder::Status ioConfigChanged(
+                media::AudioIoConfigEvent event,
+                const media::AudioIoDescriptor& ioDesc) override;
 
         status_t addAudioDeviceCallback(const wp<AudioDeviceCallback>& callback,
                                                audio_io_handle_t audioIo,
@@ -563,7 +613,7 @@
     };
 
     class AudioPolicyServiceClient: public IBinder::DeathRecipient,
-                                    public BnAudioPolicyServiceClient
+                                    public media::BnAudioPolicyServiceClient
     {
     public:
         AudioPolicyServiceClient() {
@@ -581,18 +631,21 @@
         virtual void binderDied(const wp<IBinder>& who);
 
         // IAudioPolicyServiceClient
-        virtual void onAudioPortListUpdate();
-        virtual void onAudioPatchListUpdate();
-        virtual void onAudioVolumeGroupChanged(volume_group_t group, int flags);
-        virtual void onDynamicPolicyMixStateUpdate(String8 regId, int32_t state);
-        virtual void onRecordingConfigurationUpdate(int event,
-                                                    const record_client_info_t *clientInfo,
-                                                    const audio_config_base_t *clientConfig,
-                                                    std::vector<effect_descriptor_t> clientEffects,
-                                                    const audio_config_base_t *deviceConfig,
-                                                    std::vector<effect_descriptor_t> effects,
-                                                    audio_patch_handle_t patchHandle,
-                                                    audio_source_t source);
+        binder::Status onAudioVolumeGroupChanged(int32_t group, int32_t flags) override;
+        binder::Status onAudioPortListUpdate() override;
+        binder::Status onAudioPatchListUpdate() override;
+        binder::Status onDynamicPolicyMixStateUpdate(const std::string& regId,
+                                                     int32_t state) override;
+        binder::Status onRecordingConfigurationUpdate(
+                int32_t event,
+                const media::RecordClientInfo& clientInfo,
+                const media::AudioConfigBase& clientConfig,
+                const std::vector<media::EffectDescriptor>& clientEffects,
+                const media::AudioConfigBase& deviceConfig,
+                const std::vector<media::EffectDescriptor>& effects,
+                int32_t patchHandle,
+                media::AudioSourceType source) override;
+        binder::Status onRoutingUpdated();
 
     private:
         Mutex                               mLock;
@@ -619,6 +672,7 @@
     static std::set<audio_error_callback> gAudioErrorCallbacks;
     static dynamic_policy_callback gDynPolicyCallback;
     static record_config_callback gRecordConfigCallback;
+    static routing_callback gRoutingCallback;
 
     static size_t gInBuffSize;
     // previous parameters for recording buffer size queries
@@ -626,7 +680,7 @@
     static audio_format_t gPrevInFormat;
     static audio_channel_mask_t gPrevInChannelMask;
 
-    static sp<IAudioPolicyService> gAudioPolicyService;
+    static sp<media::IAudioPolicyService> gAudioPolicyService;
 };
 
 };  // namespace android
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 0dbd842..cb00990 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -17,20 +17,27 @@
 #ifndef ANDROID_AUDIOTRACK_H
 #define ANDROID_AUDIOTRACK_H
 
+#include <binder/IMemory.h>
 #include <cutils/sched_policy.h>
 #include <media/AudioSystem.h>
 #include <media/AudioTimestamp.h>
-#include <media/IAudioTrack.h>
 #include <media/AudioResamplerPublic.h>
 #include <media/MediaMetricsItem.h>
 #include <media/Modulo.h>
+#include <media/VolumeShaper.h>
 #include <utils/threads.h>
+#include <android/content/AttributionSourceState.h>
+
+#include <string>
 
 #include "android/media/BnAudioTrackCallback.h"
+#include "android/media/IAudioTrack.h"
 #include "android/media/IAudioTrackCallback.h"
 
 namespace android {
 
+using content::AttributionSourceState;
+
 // ----------------------------------------------------------------------------
 
 struct audio_track_cblk_t;
@@ -177,6 +184,8 @@
      */
                         AudioTrack();
 
+                        AudioTrack(const AttributionSourceState& attributionSourceState);
+
     /* Creates an AudioTrack object and registers it with AudioFlinger.
      * Once created, the track needs to be started before it can be used.
      * Unspecified values are set to appropriate default values.
@@ -223,10 +232,10 @@
      * transferType:       How data is transferred to AudioTrack.
      * offloadInfo:        If not NULL, provides offload parameters for
      *                     AudioSystem::getOutputForAttr().
-     * uid:                User ID of the app which initially requested this AudioTrack
-     *                     for power management tracking, or -1 for current user ID.
-     * pid:                Process ID of the app which initially requested this AudioTrack
-     *                     for power management tracking, or -1 for current process ID.
+     * attributionSource:  The attribution source of the app which initially requested this
+     *                     AudioTrack.
+     *                     Includes the UID and PID for power management tracking, or -1 for
+     *                     current user/process ID, plus the package name.
      * pAttributes:        If not NULL, supersedes streamType for use case selection.
      * doNotReconnect:     If set to true, AudioTrack won't automatically recreate the IAudioTrack
                            binder to AudioFlinger.
@@ -253,8 +262,8 @@
                                     audio_session_t sessionId  = AUDIO_SESSION_ALLOCATE,
                                     transfer_type transferType = TRANSFER_DEFAULT,
                                     const audio_offload_info_t *offloadInfo = NULL,
-                                    uid_t uid = AUDIO_UID_INVALID,
-                                    pid_t pid = -1,
+                                    const AttributionSourceState& attributionSource =
+                                        AttributionSourceState(),
                                     const audio_attributes_t* pAttributes = NULL,
                                     bool doNotReconnect = false,
                                     float maxRequiredSpeed = 1.0f,
@@ -284,8 +293,8 @@
                                     audio_session_t sessionId   = AUDIO_SESSION_ALLOCATE,
                                     transfer_type transferType = TRANSFER_DEFAULT,
                                     const audio_offload_info_t *offloadInfo = NULL,
-                                    uid_t uid = AUDIO_UID_INVALID,
-                                    pid_t pid = -1,
+                                    const AttributionSourceState& attributionSource =
+                                        AttributionSourceState(),
                                     const audio_attributes_t* pAttributes = NULL,
                                     bool doNotReconnect = false,
                                     float maxRequiredSpeed = 1.0f);
@@ -332,6 +341,27 @@
                             audio_session_t sessionId  = AUDIO_SESSION_ALLOCATE,
                             transfer_type transferType = TRANSFER_DEFAULT,
                             const audio_offload_info_t *offloadInfo = NULL,
+                            const AttributionSourceState& attributionSource =
+                                AttributionSourceState(),
+                            const audio_attributes_t* pAttributes = NULL,
+                            bool doNotReconnect = false,
+                            float maxRequiredSpeed = 1.0f,
+                            audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
+    // FIXME(b/169889714): Vendor code depends on the old method signature at link time
+            status_t    set(audio_stream_type_t streamType,
+                            uint32_t sampleRate,
+                            audio_format_t format,
+                            uint32_t channelMask,
+                            size_t frameCount   = 0,
+                            audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
+                            callback_t cbf      = NULL,
+                            void* user          = NULL,
+                            int32_t notificationFrames = 0,
+                            const sp<IMemory>& sharedBuffer = 0,
+                            bool threadCanCallJava = false,
+                            audio_session_t sessionId  = AUDIO_SESSION_ALLOCATE,
+                            transfer_type transferType = TRANSFER_DEFAULT,
+                            const audio_offload_info_t *offloadInfo = NULL,
                             uid_t uid = AUDIO_UID_INVALID,
                             pid_t pid = -1,
                             const audio_attributes_t* pAttributes = NULL,
@@ -402,6 +432,19 @@
      */
             ssize_t     setBufferSizeInFrames(size_t size);
 
+    /* Returns the start threshold on the buffer for audio streaming
+     * or a negative value if the AudioTrack is not initialized.
+     */
+            ssize_t     getStartThresholdInFrames() const;
+
+    /* Sets the start threshold in frames on the buffer for audio streaming.
+     *
+     * May be clamped internally. Returns the actual value set, or a negative
+     * value if the AudioTrack is not initialized or if the input
+     * is zero or greater than INT_MAX.
+     */
+            ssize_t     setStartThresholdInFrames(size_t startThresholdInFrames);
+
     /* Return the static buffer specified in constructor or set(), or 0 for streaming mode */
             sp<IMemory> sharedBuffer() const { return mSharedBuffer; }
 
@@ -439,6 +482,19 @@
             void        stop();
             bool        stopped() const;
 
+    /* Call stop() and then wait for all of the callbacks to return.
+     * It is safe to call this if stop() or pause() has already been called.
+     *
+     * This function is called from the destructor. But since AudioTrack
+     * is ref counted, the destructor may be called later than desired.
+     * This can be called explicitly as part of closing an AudioTrack
+     * if you want to be certain that callbacks have completely finished.
+     *
+     * This is not thread safe and should only be called from one thread,
+     * ideally as the AudioTrack is being closed.
+     */
+            void        stopAndJoinCallbacks();
+
     /* Flush a stopped or paused track. All previously buffered data is discarded immediately.
      * This has the effect of draining the buffers without mixing or output.
      * Flush is intended for streaming mode, for example before switching to non-contiguous content.
@@ -486,6 +542,18 @@
      */
             uint32_t    getOriginalSampleRate() const;
 
+    /* Sets the Dual Mono mode presentation on the output device. */
+            status_t    setDualMonoMode(audio_dual_mono_mode_t mode);
+
+    /* Returns the Dual Mono mode presentation setting. */
+            status_t    getDualMonoMode(audio_dual_mono_mode_t* mode) const;
+
+    /* Sets the Audio Description Mix level in dB. */
+            status_t    setAudioDescriptionMixLevel(float leveldB);
+
+    /* Returns the Audio Description Mix level in dB. */
+            status_t    getAudioDescriptionMixLevel(float* leveldB) const;
+
     /* Set source playback rate for timestretch
      * 1.0 is normal speed: < 1.0 is slower, > 1.0 is faster
      * 1.0 is normal pitch: < 1.0 is lower pitch, > 1.0 is higher pitch
@@ -499,7 +567,7 @@
             status_t    setPlaybackRate(const AudioPlaybackRate &playbackRate);
 
     /* Return current playback rate */
-            const AudioPlaybackRate& getPlaybackRate() const;
+            const AudioPlaybackRate& getPlaybackRate();
 
     /* Enables looping and sets the start and end points of looping.
      * Only supported for static buffer mode.
@@ -621,9 +689,7 @@
      *  handle on audio hardware output, or AUDIO_IO_HANDLE_NONE if the
      *  track needed to be re-created but that failed
      */
-private:
             audio_io_handle_t    getOutput() const;
-public:
 
     /* Selects the audio device to use for output of this AudioTrack. A value of
      * AUDIO_PORT_HANDLE_NONE indicates default (AudioPolicyManager) routing.
@@ -947,6 +1013,23 @@
      */
             audio_port_handle_t getPortId() const { return mPortId; };
 
+    /* Sets the LogSessionId field which is used for metrics association of
+     * this object with other objects. A nullptr or empty string clears
+     * the logSessionId.
+     */
+            void setLogSessionId(const char *logSessionId);
+
+    /* Sets the playerIId field to associate the AudioTrack with an interface managed by
+     * AudioService.
+     *
+     * If this value is not set, then the playerIId is reported as -1
+     * (not associated with an AudioService player interface).
+     *
+     * For metrics purposes, we keep the playerIId association in the native
+     * client AudioTrack to improve the robustness under track restoration.
+     */
+            void setPlayerIId(int playerIId);
+
             void setAudioTrackCallback(const sp<media::IAudioTrackCallback>& callback) {
                 mAudioTrackCallback->setAudioTrackCallback(callback);
             }
@@ -1043,8 +1126,14 @@
 
             void     updateRoutedDeviceId_l();
 
+            /* Sets the Dual Mono mode presentation on the output device. */
+            status_t setDualMonoMode_l(audio_dual_mono_mode_t mode);
+
+            /* Sets the Audio Description Mix level in dB. */
+            status_t setAudioDescriptionMixLevel_l(float leveldB);
+
     // Next 4 fields may be changed if IAudioTrack is re-created, but always != 0
-    sp<IAudioTrack>         mAudioTrack;
+    sp<media::IAudioTrack>  mAudioTrack;
     sp<IMemory>             mCblkMemory;
     audio_track_cblk_t*     mCblk;                  // re-load after mLock.unlock()
     audio_io_handle_t       mOutput = AUDIO_IO_HANDLE_NONE; // from AudioSystem::getOutputForAttr()
@@ -1075,8 +1164,9 @@
 
     // constant after constructor or set()
     audio_format_t          mFormat;                // as requested by client, not forced to 16-bit
-    audio_stream_type_t     mStreamType;            // mStreamType == AUDIO_STREAM_DEFAULT implies
-                                                    // this AudioTrack has valid attributes
+    // mOriginalStreamType == AUDIO_STREAM_DEFAULT implies this AudioTrack has valid attributes
+    audio_stream_type_t     mOriginalStreamType = AUDIO_STREAM_DEFAULT;
+    audio_stream_type_t     mStreamType = AUDIO_STREAM_DEFAULT;
     uint32_t                mChannelCount;
     audio_channel_mask_t    mChannelMask;
     sp<IMemory>             mSharedBuffer;
@@ -1210,6 +1300,19 @@
     int                     mAuxEffectId;
     audio_port_handle_t     mPortId;                    // Id from Audio Policy Manager
 
+    /**
+     * mPlayerIId is the player id of the AudioTrack used by AudioManager.
+     * For an AudioTrack created by the Java interface, this is generally set once.
+     */
+    int                     mPlayerIId = -1;  // AudioManager.h PLAYER_PIID_INVALID
+
+    /**
+     * mLogSessionId is a string identifying this AudioTrack for the metrics service.
+     * It may be unique or shared with other objects.  An empty string means the
+     * logSessionId is not set.
+     */
+    std::string             mLogSessionId{};
+
     mutable Mutex           mLock;
 
     int                     mPreviousPriority;          // before start()
@@ -1248,11 +1351,14 @@
 
     sp<DeathNotifier>       mDeathNotifier;
     uint32_t                mSequence;              // incremented for each new IAudioTrack attempt
-    uid_t                   mClientUid;
-    pid_t                   mClientPid;
+    AttributionSourceState mClientAttributionSource;
 
     wp<AudioSystem::AudioDeviceCallback> mDeviceCallback;
 
+    // Cached values to restore along with the AudioTrack.
+    audio_dual_mono_mode_t mDualMonoMode = AUDIO_DUAL_MONO_MODE_OFF;
+    float mAudioDescriptionMixLeveldB = -std::numeric_limits<float>::infinity();
+
 private:
     class MediaMetrics {
       public:
diff --git a/media/libaudioclient/include/media/AudioVolumeGroup.h b/media/libaudioclient/include/media/AudioVolumeGroup.h
index 9a6ea07..946d58a 100644
--- a/media/libaudioclient/include/media/AudioVolumeGroup.h
+++ b/media/libaudioclient/include/media/AudioVolumeGroup.h
@@ -17,6 +17,8 @@
 
 #pragma once
 
+#include <android/media/AudioVolumeGroup.h>
+#include <media/AidlConversionUtil.h>
 #include <media/AudioProductStrategy.h>
 #include <system/audio.h>
 #include <system/audio_policy.h>
@@ -51,4 +53,10 @@
 
 using AudioVolumeGroupVector = std::vector<AudioVolumeGroup>;
 
+// AIDL conversion routines.
+ConversionResult<media::AudioVolumeGroup>
+legacy2aidl_AudioVolumeGroup(const AudioVolumeGroup& legacy);
+ConversionResult<AudioVolumeGroup>
+aidl2legacy_AudioVolumeGroup(const media::AudioVolumeGroup& aidl);
+
 } // namespace android
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 612ce7a..0e059f7 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -24,101 +24,57 @@
 #include <utils/RefBase.h>
 #include <utils/Errors.h>
 #include <binder/IInterface.h>
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
+#include <media/AidlConversion.h>
 #include <media/AudioClient.h>
+#include <media/AudioCommonTypes.h>
 #include <media/DeviceDescriptorBase.h>
-#include <media/IAudioTrack.h>
-#include <media/IAudioFlingerClient.h>
 #include <system/audio.h>
 #include <system/audio_effect.h>
 #include <system/audio_policy.h>
-#include <media/IEffect.h>
-#include <media/IEffectClient.h>
 #include <utils/String8.h>
 #include <media/MicrophoneInfo.h>
+#include <map>
+#include <string>
 #include <vector>
 
+#include <android/media/AudioVibratorInfo.h>
+#include <android/media/BnAudioFlingerService.h>
+#include <android/media/BpAudioFlingerService.h>
+#include <android/content/AttributionSourceState.h>
+#include "android/media/CreateEffectRequest.h"
+#include "android/media/CreateEffectResponse.h"
+#include "android/media/CreateRecordRequest.h"
+#include "android/media/CreateRecordResponse.h"
+#include "android/media/CreateTrackRequest.h"
+#include "android/media/CreateTrackResponse.h"
 #include "android/media/IAudioRecord.h"
+#include "android/media/IAudioFlingerClient.h"
+#include "android/media/IAudioTrack.h"
 #include "android/media/IAudioTrackCallback.h"
+#include "android/media/IEffect.h"
+#include "android/media/IEffectClient.h"
+#include "android/media/OpenInputRequest.h"
+#include "android/media/OpenInputResponse.h"
+#include "android/media/OpenOutputRequest.h"
+#include "android/media/OpenOutputResponse.h"
+#include "android/media/TrackSecondaryOutputInfo.h"
 
 namespace android {
 
 // ----------------------------------------------------------------------------
 
-class IAudioFlinger : public IInterface
-{
+class IAudioFlinger : public RefBase {
 public:
-    DECLARE_META_INTERFACE(AudioFlinger);
+    static constexpr char DEFAULT_SERVICE_NAME[] = "media.audio_flinger";
+
+    virtual ~IAudioFlinger() = default;
 
     /* CreateTrackInput contains all input arguments sent by AudioTrack to AudioFlinger
      * when calling createTrack() including arguments that will be updated by AudioFlinger
      * and returned in CreateTrackOutput object
      */
-    class CreateTrackInput : public Parcelable {
+    class CreateTrackInput {
     public:
-        status_t readFromParcel(const Parcel *parcel) override {
-            /* input arguments*/
-            memset(&attr, 0, sizeof(audio_attributes_t));
-            if (parcel->read(&attr, sizeof(audio_attributes_t)) != NO_ERROR) {
-                return DEAD_OBJECT;
-            }
-            attr.tags[AUDIO_ATTRIBUTES_TAGS_MAX_SIZE -1] = '\0';
-            memset(&config, 0, sizeof(audio_config_t));
-            if (parcel->read(&config, sizeof(audio_config_t)) != NO_ERROR) {
-                return DEAD_OBJECT;
-            }
-            if (clientInfo.readFromParcel(parcel) != NO_ERROR) {
-                return DEAD_OBJECT;
-            }
-            if (parcel->readInt32() != 0) {
-                // TODO: Using unsecurePointer() has some associated security
-                //       pitfalls (see declaration for details).
-                //       Either document why it is safe in this case or address
-                //       the issue (e.g. by copying).
-                sharedBuffer = interface_cast<IMemory>(parcel->readStrongBinder());
-                if (sharedBuffer == 0 || sharedBuffer->unsecurePointer() == NULL) {
-                    return BAD_VALUE;
-                }
-            }
-            notificationsPerBuffer = parcel->readInt32();
-            speed = parcel->readFloat();
-            audioTrackCallback = interface_cast<media::IAudioTrackCallback>(
-                    parcel->readStrongBinder());
-
-            /* input/output arguments*/
-            (void)parcel->read(&flags, sizeof(audio_output_flags_t));
-            frameCount = parcel->readInt64();
-            notificationFrameCount = parcel->readInt64();
-            (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
-            (void)parcel->read(&sessionId, sizeof(audio_session_t));
-            return NO_ERROR;
-        }
-
-        status_t writeToParcel(Parcel *parcel) const override {
-            /* input arguments*/
-            (void)parcel->write(&attr, sizeof(audio_attributes_t));
-            (void)parcel->write(&config, sizeof(audio_config_t));
-            (void)clientInfo.writeToParcel(parcel);
-            if (sharedBuffer != 0) {
-                (void)parcel->writeInt32(1);
-                (void)parcel->writeStrongBinder(IInterface::asBinder(sharedBuffer));
-            } else {
-                (void)parcel->writeInt32(0);
-            }
-            (void)parcel->writeInt32(notificationsPerBuffer);
-            (void)parcel->writeFloat(speed);
-            (void)parcel->writeStrongBinder(IInterface::asBinder(audioTrackCallback));
-
-            /* input/output arguments*/
-            (void)parcel->write(&flags, sizeof(audio_output_flags_t));
-            (void)parcel->writeInt64(frameCount);
-            (void)parcel->writeInt64(notificationFrameCount);
-            (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
-            (void)parcel->write(&sessionId, sizeof(audio_session_t));
-            return NO_ERROR;
-        }
-
         /* input */
         audio_attributes_t attr;
         audio_config_t config;
@@ -134,50 +90,17 @@
         size_t notificationFrameCount;
         audio_port_handle_t selectedDeviceId;
         audio_session_t sessionId;
+
+        ConversionResult<media::CreateTrackRequest> toAidl() const;
+        static ConversionResult<CreateTrackInput> fromAidl(const media::CreateTrackRequest& aidl);
     };
 
     /* CreateTrackOutput contains all output arguments returned by AudioFlinger to AudioTrack
      * when calling createTrack() including arguments that were passed as I/O for update by
      * CreateTrackInput.
      */
-    class CreateTrackOutput : public Parcelable {
+    class CreateTrackOutput {
     public:
-        status_t readFromParcel(const Parcel *parcel) override {
-            /* input/output arguments*/
-            (void)parcel->read(&flags, sizeof(audio_output_flags_t));
-            frameCount = parcel->readInt64();
-            notificationFrameCount = parcel->readInt64();
-            (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
-            (void)parcel->read(&sessionId, sizeof(audio_session_t));
-
-            /* output arguments*/
-            sampleRate = parcel->readUint32();
-            afFrameCount = parcel->readInt64();
-            afSampleRate = parcel->readInt64();
-            afLatencyMs = parcel->readInt32();
-            (void)parcel->read(&outputId, sizeof(audio_io_handle_t));
-            (void)parcel->read(&portId, sizeof(audio_port_handle_t));
-            return NO_ERROR;
-        }
-
-        status_t writeToParcel(Parcel *parcel) const override {
-            /* input/output arguments*/
-            (void)parcel->write(&flags, sizeof(audio_output_flags_t));
-            (void)parcel->writeInt64(frameCount);
-            (void)parcel->writeInt64(notificationFrameCount);
-            (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
-            (void)parcel->write(&sessionId, sizeof(audio_session_t));
-
-            /* output arguments*/
-            (void)parcel->writeUint32(sampleRate);
-            (void)parcel->writeInt64(afFrameCount);
-            (void)parcel->writeInt64(afSampleRate);
-            (void)parcel->writeInt32(afLatencyMs);
-            (void)parcel->write(&outputId, sizeof(audio_io_handle_t));
-            (void)parcel->write(&portId, sizeof(audio_port_handle_t));
-            return NO_ERROR;
-        }
-
         /* input/output */
         audio_output_flags_t flags;
         size_t frameCount;
@@ -187,70 +110,30 @@
 
         /* output */
         uint32_t sampleRate;
+        audio_stream_type_t streamType;
         size_t   afFrameCount;
         uint32_t afSampleRate;
         uint32_t afLatencyMs;
         audio_io_handle_t outputId;
         audio_port_handle_t portId;
+        sp<media::IAudioTrack> audioTrack;
+
+        ConversionResult<media::CreateTrackResponse> toAidl() const;
+        static ConversionResult<CreateTrackOutput> fromAidl(const media::CreateTrackResponse& aidl);
     };
 
     /* CreateRecordInput contains all input arguments sent by AudioRecord to AudioFlinger
      * when calling createRecord() including arguments that will be updated by AudioFlinger
      * and returned in CreateRecordOutput object
      */
-    class CreateRecordInput : public Parcelable {
+    class CreateRecordInput {
     public:
-        status_t readFromParcel(const Parcel *parcel) override {
-            /* input arguments*/
-            memset(&attr, 0, sizeof(audio_attributes_t));
-            if (parcel->read(&attr, sizeof(audio_attributes_t)) != NO_ERROR) {
-                return DEAD_OBJECT;
-            }
-            attr.tags[AUDIO_ATTRIBUTES_TAGS_MAX_SIZE -1] = '\0';
-            memset(&config, 0, sizeof(audio_config_base_t));
-            if (parcel->read(&config, sizeof(audio_config_base_t)) != NO_ERROR) {
-                return DEAD_OBJECT;
-            }
-            if (clientInfo.readFromParcel(parcel) != NO_ERROR) {
-                return DEAD_OBJECT;
-            }
-            opPackageName = parcel->readString16();
-            if (parcel->read(&riid, sizeof(audio_unique_id_t)) != NO_ERROR) {
-                return DEAD_OBJECT;
-            }
-
-            /* input/output arguments*/
-            (void)parcel->read(&flags, sizeof(audio_input_flags_t));
-            frameCount = parcel->readInt64();
-            notificationFrameCount = parcel->readInt64();
-            (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
-            (void)parcel->read(&sessionId, sizeof(audio_session_t));
-            return NO_ERROR;
-        }
-
-        status_t writeToParcel(Parcel *parcel) const override {
-            /* input arguments*/
-            (void)parcel->write(&attr, sizeof(audio_attributes_t));
-            (void)parcel->write(&config, sizeof(audio_config_base_t));
-            (void)clientInfo.writeToParcel(parcel);
-            (void)parcel->writeString16(opPackageName);
-            (void)parcel->write(&riid, sizeof(audio_unique_id_t));
-
-            /* input/output arguments*/
-            (void)parcel->write(&flags, sizeof(audio_input_flags_t));
-            (void)parcel->writeInt64(frameCount);
-            (void)parcel->writeInt64(notificationFrameCount);
-            (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
-            (void)parcel->write(&sessionId, sizeof(audio_session_t));
-            return NO_ERROR;
-        }
-
         /* input */
         audio_attributes_t attr;
         audio_config_base_t config;
         AudioClient clientInfo;
-        String16 opPackageName;
         audio_unique_id_t riid;
+        int32_t maxSharedAudioHistoryMs;
 
         /* input/output */
         audio_input_flags_t flags;
@@ -258,77 +141,17 @@
         size_t notificationFrameCount;
         audio_port_handle_t selectedDeviceId;
         audio_session_t sessionId;
+
+        ConversionResult<media::CreateRecordRequest> toAidl() const;
+        static ConversionResult<CreateRecordInput> fromAidl(const media::CreateRecordRequest& aidl);
     };
 
     /* CreateRecordOutput contains all output arguments returned by AudioFlinger to AudioRecord
      * when calling createRecord() including arguments that were passed as I/O for update by
      * CreateRecordInput.
      */
-    class CreateRecordOutput : public Parcelable {
+    class CreateRecordOutput {
     public:
-        status_t readFromParcel(const Parcel *parcel) override {
-            /* input/output arguments*/
-            (void)parcel->read(&flags, sizeof(audio_input_flags_t));
-            frameCount = parcel->readInt64();
-            notificationFrameCount = parcel->readInt64();
-            (void)parcel->read(&selectedDeviceId, sizeof(audio_port_handle_t));
-            (void)parcel->read(&sessionId, sizeof(audio_session_t));
-
-            /* output arguments*/
-            sampleRate = parcel->readUint32();
-            (void)parcel->read(&inputId, sizeof(audio_io_handle_t));
-            if (parcel->readInt32() != 0) {
-                cblk = interface_cast<IMemory>(parcel->readStrongBinder());
-                // TODO: Using unsecurePointer() has some associated security
-                //       pitfalls (see declaration for details).
-                //       Either document why it is safe in this case or address
-                //       the issue (e.g. by copying).
-                if (cblk == 0 || cblk->unsecurePointer() == NULL) {
-                    return BAD_VALUE;
-                }
-            }
-            if (parcel->readInt32() != 0) {
-                buffers = interface_cast<IMemory>(parcel->readStrongBinder());
-                // TODO: Using unsecurePointer() has some associated security
-                //       pitfalls (see declaration for details).
-                //       Either document why it is safe in this case or address
-                //       the issue (e.g. by copying).
-                if (buffers == 0 || buffers->unsecurePointer() == NULL) {
-                    return BAD_VALUE;
-                }
-            }
-            (void)parcel->read(&portId, sizeof(audio_port_handle_t));
-            return NO_ERROR;
-        }
-
-        status_t writeToParcel(Parcel *parcel) const override {
-            /* input/output arguments*/
-            (void)parcel->write(&flags, sizeof(audio_input_flags_t));
-            (void)parcel->writeInt64(frameCount);
-            (void)parcel->writeInt64(notificationFrameCount);
-            (void)parcel->write(&selectedDeviceId, sizeof(audio_port_handle_t));
-            (void)parcel->write(&sessionId, sizeof(audio_session_t));
-
-            /* output arguments*/
-            (void)parcel->writeUint32(sampleRate);
-            (void)parcel->write(&inputId, sizeof(audio_io_handle_t));
-            if (cblk != 0) {
-                (void)parcel->writeInt32(1);
-                (void)parcel->writeStrongBinder(IInterface::asBinder(cblk));
-            } else {
-                (void)parcel->writeInt32(0);
-            }
-            if (buffers != 0) {
-                (void)parcel->writeInt32(1);
-                (void)parcel->writeStrongBinder(IInterface::asBinder(buffers));
-            } else {
-                (void)parcel->writeInt32(0);
-            }
-            (void)parcel->write(&portId, sizeof(audio_port_handle_t));
-
-            return NO_ERROR;
-        }
-
         /* input/output */
         audio_input_flags_t flags;
         size_t frameCount;
@@ -342,21 +165,26 @@
         sp<IMemory> cblk;
         sp<IMemory> buffers;
         audio_port_handle_t portId;
+        sp<media::IAudioRecord> audioRecord;
+
+        ConversionResult<media::CreateRecordResponse> toAidl() const;
+        static ConversionResult<CreateRecordOutput>
+        fromAidl(const media::CreateRecordResponse& aidl);
     };
 
-    // invariant on exit for all APIs that return an sp<>:
-    //   (return value != 0) == (*status == NO_ERROR)
-
     /* create an audio track and registers it with AudioFlinger.
-     * return null if the track cannot be created.
+     * The audioTrack field will be null if the track cannot be created and the status will reflect
+     * failure.
      */
-    virtual sp<IAudioTrack> createTrack(const CreateTrackInput& input,
-                                        CreateTrackOutput& output,
-                                        status_t *status) = 0;
+    virtual status_t createTrack(const media::CreateTrackRequest& input,
+                                 media::CreateTrackResponse& output) = 0;
 
-    virtual sp<media::IAudioRecord> createRecord(const CreateRecordInput& input,
-                                        CreateRecordOutput& output,
-                                        status_t *status) = 0;
+    /* create an audio record and registers it with AudioFlinger.
+     * The audioRecord field will be null if the track cannot be created and the status will reflect
+     * failure.
+     */
+    virtual status_t createRecord(const media::CreateRecordRequest& input,
+                                  media::CreateRecordResponse& output) = 0;
 
     // FIXME Surprisingly, format/latency don't work for input handles
 
@@ -412,32 +240,24 @@
     // Register an object to receive audio input/output change and track notifications.
     // For a given calling pid, AudioFlinger disregards any registrations after the first.
     // Thus the IAudioFlingerClient must be a singleton per process.
-    virtual void registerClient(const sp<IAudioFlingerClient>& client) = 0;
+    virtual void registerClient(const sp<media::IAudioFlingerClient>& client) = 0;
 
     // retrieve the audio recording buffer size in bytes
     // FIXME This API assumes a route, and so should be deprecated.
     virtual size_t getInputBufferSize(uint32_t sampleRate, audio_format_t format,
             audio_channel_mask_t channelMask) const = 0;
 
-    virtual status_t openOutput(audio_module_handle_t module,
-                                audio_io_handle_t *output,
-                                audio_config_t *config,
-                                const sp<DeviceDescriptorBase>& device,
-                                uint32_t *latencyMs,
-                                audio_output_flags_t flags) = 0;
+    virtual status_t openOutput(const media::OpenOutputRequest& request,
+                                media::OpenOutputResponse* response) = 0;
     virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1,
                                     audio_io_handle_t output2) = 0;
     virtual status_t closeOutput(audio_io_handle_t output) = 0;
     virtual status_t suspendOutput(audio_io_handle_t output) = 0;
     virtual status_t restoreOutput(audio_io_handle_t output) = 0;
 
-    virtual status_t openInput(audio_module_handle_t module,
-                               audio_io_handle_t *input,
-                               audio_config_t *config,
-                               audio_devices_t *device,
-                               const String8& address,
-                               audio_source_t source,
-                               audio_input_flags_t flags) = 0;
+    virtual status_t openInput(const media::OpenInputRequest& request,
+                               media::OpenInputResponse* response) = 0;
+
     virtual status_t closeInput(audio_io_handle_t input) = 0;
 
     virtual status_t invalidateStream(audio_stream_type_t stream) = 0;
@@ -463,20 +283,8 @@
                                          uint32_t preferredTypeFlag,
                                          effect_descriptor_t *pDescriptor) const = 0;
 
-    virtual sp<IEffect> createEffect(
-                                    effect_descriptor_t *pDesc,
-                                    const sp<IEffectClient>& client,
-                                    int32_t priority,
-                                    // AudioFlinger doesn't take over handle reference from client
-                                    audio_io_handle_t output,
-                                    audio_session_t sessionId,
-                                    const AudioDeviceTypeAddr& device,
-                                    const String16& callingPackage,
-                                    pid_t pid,
-                                    bool probe,
-                                    status_t *status,
-                                    int *id,
-                                    int *enabled) = 0;
+    virtual status_t createEffect(const media::CreateEffectRequest& request,
+                                  media::CreateEffectResponse* response) = 0;
 
     virtual status_t moveEffects(audio_session_t session, audio_io_handle_t srcOutput,
                                     audio_io_handle_t dstOutput) = 0;
@@ -499,12 +307,8 @@
     // is obtained from android.app.ActivityManager.MemoryInfo.totalMem.
     virtual status_t setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) = 0;
 
-    /* List available audio ports and their attributes */
-    virtual status_t listAudioPorts(unsigned int *num_ports,
-                                    struct audio_port *ports) = 0;
-
     /* Get attributes for a given audio port */
-    virtual status_t getAudioPort(struct audio_port *port) = 0;
+    virtual status_t getAudioPort(struct audio_port_v7 *port) = 0;
 
     /* Create an audio patch between several source and sink ports */
     virtual status_t createAudioPatch(const struct audio_patch *patch,
@@ -532,24 +336,304 @@
     virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones) = 0;
 
     virtual status_t setAudioHalPids(const std::vector<pid_t>& pids) = 0;
+
+    // Set vibrators' information.
+    // The values will be used to initialize HapticGenerator.
+    virtual status_t setVibratorInfos(
+            const std::vector<media::AudioVibratorInfo>& vibratorInfos) = 0;
+
+    virtual status_t updateSecondaryOutputs(
+            const TrackSecondaryOutputsMap& trackSecondaryOutputs) = 0;
 };
 
-
-// ----------------------------------------------------------------------------
-
-class BnAudioFlinger : public BnInterface<IAudioFlinger>
-{
+/**
+ * A client-side adapter, wrapping an IAudioFlingerService instance and presenting it as an
+ * IAudioFlinger. Intended to be used by legacy client code that was written against IAudioFlinger,
+ * before IAudioFlingerService was introduced as an AIDL service.
+ * New clients should not use this adapter, but rather IAudioFlingerService directly, via
+ * BpAudioFlingerService.
+ */
+class AudioFlingerClientAdapter : public IAudioFlinger {
 public:
-    virtual status_t    onTransact( uint32_t code,
-                                    const Parcel& data,
-                                    Parcel* reply,
-                                    uint32_t flags = 0);
+    explicit AudioFlingerClientAdapter(const sp<media::IAudioFlingerService> delegate);
 
-    // Requests media.log to start merging log buffers
-    virtual void requestLogMerge() = 0;
+    status_t createTrack(const media::CreateTrackRequest& input,
+                         media::CreateTrackResponse& output) override;
+    status_t createRecord(const media::CreateRecordRequest& input,
+                          media::CreateRecordResponse& output) override;
+    uint32_t sampleRate(audio_io_handle_t ioHandle) const override;
+    audio_format_t format(audio_io_handle_t output) const override;
+    size_t frameCount(audio_io_handle_t ioHandle) const override;
+    uint32_t latency(audio_io_handle_t output) const override;
+    status_t setMasterVolume(float value) override;
+    status_t setMasterMute(bool muted) override;
+    float masterVolume() const override;
+    bool masterMute() const override;
+    status_t setMasterBalance(float balance) override;
+    status_t getMasterBalance(float* balance) const override;
+    status_t setStreamVolume(audio_stream_type_t stream, float value,
+                             audio_io_handle_t output) override;
+    status_t setStreamMute(audio_stream_type_t stream, bool muted) override;
+    float streamVolume(audio_stream_type_t stream,
+                       audio_io_handle_t output) const override;
+    bool streamMute(audio_stream_type_t stream) const override;
+    status_t setMode(audio_mode_t mode) override;
+    status_t setMicMute(bool state) override;
+    bool getMicMute() const override;
+    void setRecordSilenced(audio_port_handle_t portId, bool silenced) override;
+    status_t setParameters(audio_io_handle_t ioHandle,
+                           const String8& keyValuePairs) override;
+    String8 getParameters(audio_io_handle_t ioHandle, const String8& keys)
+    const override;
+    void registerClient(const sp<media::IAudioFlingerClient>& client) override;
+    size_t getInputBufferSize(uint32_t sampleRate, audio_format_t format,
+                              audio_channel_mask_t channelMask) const override;
+    status_t openOutput(const media::OpenOutputRequest& request,
+                        media::OpenOutputResponse* response) override;
+    audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1,
+                                          audio_io_handle_t output2) override;
+    status_t closeOutput(audio_io_handle_t output) override;
+    status_t suspendOutput(audio_io_handle_t output) override;
+    status_t restoreOutput(audio_io_handle_t output) override;
+    status_t openInput(const media::OpenInputRequest& request,
+                       media::OpenInputResponse* response) override;
+    status_t closeInput(audio_io_handle_t input) override;
+    status_t invalidateStream(audio_stream_type_t stream) override;
+    status_t setVoiceVolume(float volume) override;
+    status_t getRenderPosition(uint32_t* halFrames, uint32_t* dspFrames,
+                               audio_io_handle_t output) const override;
+    uint32_t getInputFramesLost(audio_io_handle_t ioHandle) const override;
+    audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t use) override;
+    void acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) override;
+    void releaseAudioSessionId(audio_session_t audioSession, pid_t pid) override;
+    status_t queryNumberEffects(uint32_t* numEffects) const override;
+    status_t queryEffect(uint32_t index, effect_descriptor_t* pDescriptor) const override;
+    status_t getEffectDescriptor(const effect_uuid_t* pEffectUUID,
+                                 const effect_uuid_t* pTypeUUID,
+                                 uint32_t preferredTypeFlag,
+                                 effect_descriptor_t* pDescriptor) const override;
+    status_t createEffect(const media::CreateEffectRequest& request,
+                          media::CreateEffectResponse* response) override;
+    status_t moveEffects(audio_session_t session, audio_io_handle_t srcOutput,
+                         audio_io_handle_t dstOutput) override;
+    void setEffectSuspended(int effectId,
+                            audio_session_t sessionId,
+                            bool suspended) override;
+    audio_module_handle_t loadHwModule(const char* name) override;
+    uint32_t getPrimaryOutputSamplingRate() override;
+    size_t getPrimaryOutputFrameCount() override;
+    status_t setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) override;
+    status_t getAudioPort(struct audio_port_v7* port) override;
+    status_t createAudioPatch(const struct audio_patch* patch,
+                              audio_patch_handle_t* handle) override;
+    status_t releaseAudioPatch(audio_patch_handle_t handle) override;
+    status_t listAudioPatches(unsigned int* num_patches,
+                              struct audio_patch* patches) override;
+    status_t setAudioPortConfig(const struct audio_port_config* config) override;
+    audio_hw_sync_t getAudioHwSyncForSession(audio_session_t sessionId) override;
+    status_t systemReady() override;
+    size_t frameCountHAL(audio_io_handle_t ioHandle) const override;
+    status_t getMicrophones(std::vector<media::MicrophoneInfo>* microphones) override;
+    status_t setAudioHalPids(const std::vector<pid_t>& pids) override;
+    status_t setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos) override;
+    status_t updateSecondaryOutputs(
+            const TrackSecondaryOutputsMap& trackSecondaryOutputs) override;
+
+private:
+    const sp<media::IAudioFlingerService> mDelegate;
 };
 
-// ----------------------------------------------------------------------------
+/**
+ * A server-side adapter, wrapping an IAudioFlinger instance and presenting it as an
+ * IAudioFlingerService. Intended to be used by legacy server code that was written against
+ * IAudioFlinger, before IAudioFlingerService was introduced as an AIDL service.
+ * New servers should not use this adapter, but rather implement IAudioFlingerService directly, via
+ * BnAudioFlingerService.
+ */
+class AudioFlingerServerAdapter : public media::BnAudioFlingerService {
+public:
+    using Status = binder::Status;
+
+    /**
+     * Legacy server should implement this interface in order to be wrapped.
+     */
+    class Delegate : public IAudioFlinger {
+    protected:
+        friend class AudioFlingerServerAdapter;
+
+        enum class TransactionCode {
+            CREATE_TRACK = media::BnAudioFlingerService::TRANSACTION_createTrack,
+            CREATE_RECORD = media::BnAudioFlingerService::TRANSACTION_createRecord,
+            SAMPLE_RATE = media::BnAudioFlingerService::TRANSACTION_sampleRate,
+            FORMAT = media::BnAudioFlingerService::TRANSACTION_format,
+            FRAME_COUNT = media::BnAudioFlingerService::TRANSACTION_frameCount,
+            LATENCY = media::BnAudioFlingerService::TRANSACTION_latency,
+            SET_MASTER_VOLUME = media::BnAudioFlingerService::TRANSACTION_setMasterVolume,
+            SET_MASTER_MUTE = media::BnAudioFlingerService::TRANSACTION_setMasterMute,
+            MASTER_VOLUME = media::BnAudioFlingerService::TRANSACTION_masterVolume,
+            MASTER_MUTE = media::BnAudioFlingerService::TRANSACTION_masterMute,
+            SET_STREAM_VOLUME = media::BnAudioFlingerService::TRANSACTION_setStreamVolume,
+            SET_STREAM_MUTE = media::BnAudioFlingerService::TRANSACTION_setStreamMute,
+            STREAM_VOLUME = media::BnAudioFlingerService::TRANSACTION_streamVolume,
+            STREAM_MUTE = media::BnAudioFlingerService::TRANSACTION_streamMute,
+            SET_MODE = media::BnAudioFlingerService::TRANSACTION_setMode,
+            SET_MIC_MUTE = media::BnAudioFlingerService::TRANSACTION_setMicMute,
+            GET_MIC_MUTE = media::BnAudioFlingerService::TRANSACTION_getMicMute,
+            SET_RECORD_SILENCED = media::BnAudioFlingerService::TRANSACTION_setRecordSilenced,
+            SET_PARAMETERS = media::BnAudioFlingerService::TRANSACTION_setParameters,
+            GET_PARAMETERS = media::BnAudioFlingerService::TRANSACTION_getParameters,
+            REGISTER_CLIENT = media::BnAudioFlingerService::TRANSACTION_registerClient,
+            GET_INPUTBUFFERSIZE = media::BnAudioFlingerService::TRANSACTION_getInputBufferSize,
+            OPEN_OUTPUT = media::BnAudioFlingerService::TRANSACTION_openOutput,
+            OPEN_DUPLICATE_OUTPUT = media::BnAudioFlingerService::TRANSACTION_openDuplicateOutput,
+            CLOSE_OUTPUT = media::BnAudioFlingerService::TRANSACTION_closeOutput,
+            SUSPEND_OUTPUT = media::BnAudioFlingerService::TRANSACTION_suspendOutput,
+            RESTORE_OUTPUT = media::BnAudioFlingerService::TRANSACTION_restoreOutput,
+            OPEN_INPUT = media::BnAudioFlingerService::TRANSACTION_openInput,
+            CLOSE_INPUT = media::BnAudioFlingerService::TRANSACTION_closeInput,
+            INVALIDATE_STREAM = media::BnAudioFlingerService::TRANSACTION_invalidateStream,
+            SET_VOICE_VOLUME = media::BnAudioFlingerService::TRANSACTION_setVoiceVolume,
+            GET_RENDER_POSITION = media::BnAudioFlingerService::TRANSACTION_getRenderPosition,
+            GET_INPUT_FRAMES_LOST = media::BnAudioFlingerService::TRANSACTION_getInputFramesLost,
+            NEW_AUDIO_UNIQUE_ID = media::BnAudioFlingerService::TRANSACTION_newAudioUniqueId,
+            ACQUIRE_AUDIO_SESSION_ID = media::BnAudioFlingerService::TRANSACTION_acquireAudioSessionId,
+            RELEASE_AUDIO_SESSION_ID = media::BnAudioFlingerService::TRANSACTION_releaseAudioSessionId,
+            QUERY_NUM_EFFECTS = media::BnAudioFlingerService::TRANSACTION_queryNumberEffects,
+            QUERY_EFFECT = media::BnAudioFlingerService::TRANSACTION_queryEffect,
+            GET_EFFECT_DESCRIPTOR = media::BnAudioFlingerService::TRANSACTION_getEffectDescriptor,
+            CREATE_EFFECT = media::BnAudioFlingerService::TRANSACTION_createEffect,
+            MOVE_EFFECTS = media::BnAudioFlingerService::TRANSACTION_moveEffects,
+            LOAD_HW_MODULE = media::BnAudioFlingerService::TRANSACTION_loadHwModule,
+            GET_PRIMARY_OUTPUT_SAMPLING_RATE = media::BnAudioFlingerService::TRANSACTION_getPrimaryOutputSamplingRate,
+            GET_PRIMARY_OUTPUT_FRAME_COUNT = media::BnAudioFlingerService::TRANSACTION_getPrimaryOutputFrameCount,
+            SET_LOW_RAM_DEVICE = media::BnAudioFlingerService::TRANSACTION_setLowRamDevice,
+            GET_AUDIO_PORT = media::BnAudioFlingerService::TRANSACTION_getAudioPort,
+            CREATE_AUDIO_PATCH = media::BnAudioFlingerService::TRANSACTION_createAudioPatch,
+            RELEASE_AUDIO_PATCH = media::BnAudioFlingerService::TRANSACTION_releaseAudioPatch,
+            LIST_AUDIO_PATCHES = media::BnAudioFlingerService::TRANSACTION_listAudioPatches,
+            SET_AUDIO_PORT_CONFIG = media::BnAudioFlingerService::TRANSACTION_setAudioPortConfig,
+            GET_AUDIO_HW_SYNC_FOR_SESSION = media::BnAudioFlingerService::TRANSACTION_getAudioHwSyncForSession,
+            SYSTEM_READY = media::BnAudioFlingerService::TRANSACTION_systemReady,
+            FRAME_COUNT_HAL = media::BnAudioFlingerService::TRANSACTION_frameCountHAL,
+            GET_MICROPHONES = media::BnAudioFlingerService::TRANSACTION_getMicrophones,
+            SET_MASTER_BALANCE = media::BnAudioFlingerService::TRANSACTION_setMasterBalance,
+            GET_MASTER_BALANCE = media::BnAudioFlingerService::TRANSACTION_getMasterBalance,
+            SET_EFFECT_SUSPENDED = media::BnAudioFlingerService::TRANSACTION_setEffectSuspended,
+            SET_AUDIO_HAL_PIDS = media::BnAudioFlingerService::TRANSACTION_setAudioHalPids,
+            SET_VIBRATOR_INFOS = media::BnAudioFlingerService::TRANSACTION_setVibratorInfos,
+            UPDATE_SECONDARY_OUTPUTS = media::BnAudioFlingerService::TRANSACTION_updateSecondaryOutputs,
+        };
+
+        /**
+         * And optional hook, called on every transaction, allowing additional operations to be
+         * performed before/after the unparceling  ofthe data and dispatching to the respective
+         * method. Useful for bulk operations, such as logging or permission checks.
+         * The implementer is responsible to invoke the provided delegate function, which is the
+         * actual onTransact(), unless an error occurs.
+         * By default, this is just a pass-through to the delegate.
+         */
+        virtual status_t onTransactWrapper(TransactionCode code,
+                                           const Parcel& data,
+                                           uint32_t flags,
+                                           const std::function<status_t()>& delegate) {
+            (void) code;
+            (void) data;
+            (void) flags;
+            return delegate();
+        }
+
+        /**
+         * An optional hook for implementing diagnostics dumping.
+         */
+        virtual status_t dump(int fd, const Vector<String16>& args) {
+            (void) fd;
+            (void) args;
+            return OK;
+        }
+    };
+
+    explicit AudioFlingerServerAdapter(
+            const sp<AudioFlingerServerAdapter::Delegate>& delegate);
+
+    status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
+    status_t dump(int fd, const Vector<String16>& args) override;
+
+    Status createTrack(const media::CreateTrackRequest& request,
+                       media::CreateTrackResponse* _aidl_return) override;
+    Status createRecord(const media::CreateRecordRequest& request,
+                        media::CreateRecordResponse* _aidl_return) override;
+    Status sampleRate(int32_t ioHandle, int32_t* _aidl_return) override;
+    Status format(int32_t output, media::audio::common::AudioFormat* _aidl_return) override;
+    Status frameCount(int32_t ioHandle, int64_t* _aidl_return) override;
+    Status latency(int32_t output, int32_t* _aidl_return) override;
+    Status setMasterVolume(float value) override;
+    Status setMasterMute(bool muted) override;
+    Status masterVolume(float* _aidl_return) override;
+    Status masterMute(bool* _aidl_return) override;
+    Status setMasterBalance(float balance) override;
+    Status getMasterBalance(float* _aidl_return) override;
+    Status setStreamVolume(media::AudioStreamType stream, float value, int32_t output) override;
+    Status setStreamMute(media::AudioStreamType stream, bool muted) override;
+    Status
+    streamVolume(media::AudioStreamType stream, int32_t output, float* _aidl_return) override;
+    Status streamMute(media::AudioStreamType stream, bool* _aidl_return) override;
+    Status setMode(media::AudioMode mode) override;
+    Status setMicMute(bool state) override;
+    Status getMicMute(bool* _aidl_return) override;
+    Status setRecordSilenced(int32_t portId, bool silenced) override;
+    Status setParameters(int32_t ioHandle, const std::string& keyValuePairs) override;
+    Status
+    getParameters(int32_t ioHandle, const std::string& keys, std::string* _aidl_return) override;
+    Status registerClient(const sp<media::IAudioFlingerClient>& client) override;
+    Status getInputBufferSize(int32_t sampleRate, media::audio::common::AudioFormat format,
+                              int32_t channelMask, int64_t* _aidl_return) override;
+    Status openOutput(const media::OpenOutputRequest& request,
+                      media::OpenOutputResponse* _aidl_return) override;
+    Status openDuplicateOutput(int32_t output1, int32_t output2, int32_t* _aidl_return) override;
+    Status closeOutput(int32_t output) override;
+    Status suspendOutput(int32_t output) override;
+    Status restoreOutput(int32_t output) override;
+    Status openInput(const media::OpenInputRequest& request,
+                     media::OpenInputResponse* _aidl_return) override;
+    Status closeInput(int32_t input) override;
+    Status invalidateStream(media::AudioStreamType stream) override;
+    Status setVoiceVolume(float volume) override;
+    Status getRenderPosition(int32_t output, media::RenderPosition* _aidl_return) override;
+    Status getInputFramesLost(int32_t ioHandle, int32_t* _aidl_return) override;
+    Status newAudioUniqueId(media::AudioUniqueIdUse use, int32_t* _aidl_return) override;
+    Status acquireAudioSessionId(int32_t audioSession, int32_t pid, int32_t uid) override;
+    Status releaseAudioSessionId(int32_t audioSession, int32_t pid) override;
+    Status queryNumberEffects(int32_t* _aidl_return) override;
+    Status queryEffect(int32_t index, media::EffectDescriptor* _aidl_return) override;
+    Status getEffectDescriptor(const media::AudioUuid& effectUUID, const media::AudioUuid& typeUUID,
+                               int32_t preferredTypeFlag,
+                               media::EffectDescriptor* _aidl_return) override;
+    Status createEffect(const media::CreateEffectRequest& request,
+                        media::CreateEffectResponse* _aidl_return) override;
+    Status moveEffects(int32_t session, int32_t srcOutput, int32_t dstOutput) override;
+    Status setEffectSuspended(int32_t effectId, int32_t sessionId, bool suspended) override;
+    Status loadHwModule(const std::string& name, int32_t* _aidl_return) override;
+    Status getPrimaryOutputSamplingRate(int32_t* _aidl_return) override;
+    Status getPrimaryOutputFrameCount(int64_t* _aidl_return) override;
+    Status setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) override;
+    Status getAudioPort(const media::AudioPort& port, media::AudioPort* _aidl_return) override;
+    Status createAudioPatch(const media::AudioPatch& patch, int32_t* _aidl_return) override;
+    Status releaseAudioPatch(int32_t handle) override;
+    Status listAudioPatches(int32_t maxCount,
+                            std::vector<media::AudioPatch>* _aidl_return) override;
+    Status setAudioPortConfig(const media::AudioPortConfig& config) override;
+    Status getAudioHwSyncForSession(int32_t sessionId, int32_t* _aidl_return) override;
+    Status systemReady() override;
+    Status frameCountHAL(int32_t ioHandle, int64_t* _aidl_return) override;
+    Status getMicrophones(std::vector<media::MicrophoneInfoData>* _aidl_return) override;
+    Status setAudioHalPids(const std::vector<int32_t>& pids) override;
+    Status setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos) override;
+    Status updateSecondaryOutputs(
+            const std::vector<media::TrackSecondaryOutputInfo>& trackSecondaryOutputInfos) override;
+
+private:
+    const sp<AudioFlingerServerAdapter::Delegate> mDelegate;
+};
 
 }; // namespace android
 
diff --git a/media/libaudioclient/include/media/IAudioFlingerClient.h b/media/libaudioclient/include/media/IAudioFlingerClient.h
deleted file mode 100644
index 0080bc9..0000000
--- a/media/libaudioclient/include/media/IAudioFlingerClient.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_IAUDIOFLINGERCLIENT_H
-#define ANDROID_IAUDIOFLINGERCLIENT_H
-
-
-#include <utils/RefBase.h>
-#include <binder/IInterface.h>
-#include <utils/KeyedVector.h>
-#include <system/audio.h>
-#include <media/AudioIoDescriptor.h>
-
-namespace android {
-
-// ----------------------------------------------------------------------------
-
-class IAudioFlingerClient : public IInterface
-{
-public:
-    DECLARE_META_INTERFACE(AudioFlingerClient);
-
-    // Notifies a change of audio input/output configuration.
-    virtual void ioConfigChanged(audio_io_config_event event,
-                                 const sp<AudioIoDescriptor>& ioDesc) = 0;
-
-};
-
-
-// ----------------------------------------------------------------------------
-
-class BnAudioFlingerClient : public BnInterface<IAudioFlingerClient>
-{
-public:
-    virtual status_t    onTransact( uint32_t code,
-                                    const Parcel& data,
-                                    Parcel* reply,
-                                    uint32_t flags = 0);
-};
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
-
-#endif // ANDROID_IAUDIOFLINGERCLIENT_H
diff --git a/media/libaudioclient/include/media/IAudioPolicyService.h b/media/libaudioclient/include/media/IAudioPolicyService.h
deleted file mode 100644
index bb1c07f..0000000
--- a/media/libaudioclient/include/media/IAudioPolicyService.h
+++ /dev/null
@@ -1,279 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_IAUDIOPOLICYSERVICE_H
-#define ANDROID_IAUDIOPOLICYSERVICE_H
-
-#include <stdint.h>
-#include <sys/types.h>
-#include <unistd.h>
-#include <utils/RefBase.h>
-#include <utils/Errors.h>
-#include <binder/IInterface.h>
-#include <media/AudioDeviceTypeAddr.h>
-#include <media/AudioSystem.h>
-#include <media/AudioPolicy.h>
-#include <media/IAudioPolicyServiceClient.h>
-#include <system/audio_policy.h>
-#include <vector>
-
-namespace android {
-namespace media {
-// Must be pre-declared, or else there isn't a good way to generate a header
-// library.
-class ICaptureStateListener;
-}
-
-// ----------------------------------------------------------------------------
-
-class IAudioPolicyService : public IInterface
-{
-public:
-    DECLARE_META_INTERFACE(AudioPolicyService);
-
-    //
-    // IAudioPolicyService interface (see AudioPolicyInterface for method descriptions)
-    //
-    virtual void onNewAudioModulesAvailable() = 0;
-    virtual status_t setDeviceConnectionState(audio_devices_t device,
-                                              audio_policy_dev_state_t state,
-                                              const char *device_address,
-                                              const char *device_name,
-                                              audio_format_t encodedFormat) = 0;
-    virtual audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device,
-                                                                  const char *device_address) = 0;
-    virtual status_t handleDeviceConfigChange(audio_devices_t device,
-                                              const char *device_address,
-                                              const char *device_name,
-                                              audio_format_t encodedFormat) = 0;
-    virtual status_t setPhoneState(audio_mode_t state, uid_t uid) = 0;
-    virtual status_t setForceUse(audio_policy_force_use_t usage,
-                                    audio_policy_forced_cfg_t config) = 0;
-    virtual audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage) = 0;
-    virtual audio_io_handle_t getOutput(audio_stream_type_t stream) = 0;
-    virtual status_t getOutputForAttr(audio_attributes_t *attr,
-                                      audio_io_handle_t *output,
-                                      audio_session_t session,
-                                      audio_stream_type_t *stream,
-                                      pid_t pid,
-                                      uid_t uid,
-                                      const audio_config_t *config,
-                                      audio_output_flags_t flags,
-                                      audio_port_handle_t *selectedDeviceId,
-                                      audio_port_handle_t *portId,
-                                      std::vector<audio_io_handle_t> *secondaryOutputs) = 0;
-    virtual status_t startOutput(audio_port_handle_t portId) = 0;
-    virtual status_t stopOutput(audio_port_handle_t portId) = 0;
-    virtual void releaseOutput(audio_port_handle_t portId) = 0;
-    virtual status_t  getInputForAttr(const audio_attributes_t *attr,
-                              audio_io_handle_t *input,
-                              audio_unique_id_t riid,
-                              audio_session_t session,
-                              pid_t pid,
-                              uid_t uid,
-                              const String16& opPackageName,
-                              const audio_config_base_t *config,
-                              audio_input_flags_t flags,
-                              audio_port_handle_t *selectedDeviceId,
-                              audio_port_handle_t *portId) = 0;
-    virtual status_t startInput(audio_port_handle_t portId) = 0;
-    virtual status_t stopInput(audio_port_handle_t portId) = 0;
-    virtual void releaseInput(audio_port_handle_t portId) = 0;
-    virtual status_t initStreamVolume(audio_stream_type_t stream,
-                                      int indexMin,
-                                      int indexMax) = 0;
-    virtual status_t setStreamVolumeIndex(audio_stream_type_t stream,
-                                          int index,
-                                          audio_devices_t device) = 0;
-    virtual status_t getStreamVolumeIndex(audio_stream_type_t stream,
-                                          int *index,
-                                          audio_devices_t device) = 0;
-
-    virtual status_t setVolumeIndexForAttributes(const audio_attributes_t &attr,
-                                                 int index,
-                                                 audio_devices_t device) = 0;
-    virtual status_t getVolumeIndexForAttributes(const audio_attributes_t &attr,
-                                                 int &index,
-                                                 audio_devices_t device) = 0;
-    virtual status_t getMaxVolumeIndexForAttributes(const audio_attributes_t &attr, int &index) = 0;
-
-    virtual status_t getMinVolumeIndexForAttributes(const audio_attributes_t &attr, int &index) = 0;
-
-    virtual uint32_t getStrategyForStream(audio_stream_type_t stream) = 0;
-    virtual audio_devices_t getDevicesForStream(audio_stream_type_t stream) = 0;
-    virtual status_t getDevicesForAttributes(const AudioAttributes &aa,
-            AudioDeviceTypeAddrVector *devices) const = 0;
-    virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc) = 0;
-    virtual status_t registerEffect(const effect_descriptor_t *desc,
-                                    audio_io_handle_t io,
-                                    uint32_t strategy,
-                                    audio_session_t session,
-                                    int id) = 0;
-    virtual status_t unregisterEffect(int id) = 0;
-    virtual status_t setEffectEnabled(int id, bool enabled) = 0;
-    virtual status_t moveEffectsToIo(const std::vector<int>& ids, audio_io_handle_t io) = 0;
-    virtual bool     isStreamActive(audio_stream_type_t stream, uint32_t inPastMs = 0) const = 0;
-    virtual bool     isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs = 0)
-                             const = 0;
-    virtual bool     isSourceActive(audio_source_t source) const = 0;
-    virtual status_t queryDefaultPreProcessing(audio_session_t audioSession,
-                                              effect_descriptor_t *descriptors,
-                                              uint32_t *count) = 0;
-    virtual status_t addSourceDefaultEffect(const effect_uuid_t *type,
-                                            const String16& opPackageName,
-                                            const effect_uuid_t *uuid,
-                                            int32_t priority,
-                                            audio_source_t source,
-                                            audio_unique_id_t* id) = 0;
-    virtual status_t addStreamDefaultEffect(const effect_uuid_t *type,
-                                            const String16& opPackageName,
-                                            const effect_uuid_t *uuid,
-                                            int32_t priority,
-                                            audio_usage_t usage,
-                                            audio_unique_id_t* id) = 0;
-    virtual status_t removeSourceDefaultEffect(audio_unique_id_t id) = 0;
-    virtual status_t removeStreamDefaultEffect(audio_unique_id_t id) = 0;
-    virtual status_t setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) = 0;
-    virtual status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags) = 0;
-   // Check if offload is possible for given format, stream type, sample rate,
-    // bit rate, duration, video and streaming or offload property is enabled
-    virtual bool isOffloadSupported(const audio_offload_info_t& info) = 0;
-
-    // Check if direct playback is possible for given format, sample rate, channel mask and flags.
-    virtual bool isDirectOutputSupported(const audio_config_base_t& config,
-                                         const audio_attributes_t& attributes) = 0;
-
-    /* List available audio ports and their attributes */
-    virtual status_t listAudioPorts(audio_port_role_t role,
-                                    audio_port_type_t type,
-                                    unsigned int *num_ports,
-                                    struct audio_port *ports,
-                                    unsigned int *generation) = 0;
-
-    /* Get attributes for a given audio port */
-    virtual status_t getAudioPort(struct audio_port *port) = 0;
-
-    /* Create an audio patch between several source and sink ports */
-    virtual status_t createAudioPatch(const struct audio_patch *patch,
-                                       audio_patch_handle_t *handle) = 0;
-
-    /* Release an audio patch */
-    virtual status_t releaseAudioPatch(audio_patch_handle_t handle) = 0;
-
-    /* List existing audio patches */
-    virtual status_t listAudioPatches(unsigned int *num_patches,
-                                      struct audio_patch *patches,
-                                      unsigned int *generation) = 0;
-    /* Set audio port configuration */
-    virtual status_t setAudioPortConfig(const struct audio_port_config *config) = 0;
-
-    virtual void registerClient(const sp<IAudioPolicyServiceClient>& client) = 0;
-
-    virtual void setAudioPortCallbacksEnabled(bool enabled) = 0;
-
-    virtual void setAudioVolumeGroupCallbacksEnabled(bool enabled) = 0;
-
-    virtual status_t acquireSoundTriggerSession(audio_session_t *session,
-                                           audio_io_handle_t *ioHandle,
-                                           audio_devices_t *device) = 0;
-
-    virtual status_t releaseSoundTriggerSession(audio_session_t session) = 0;
-
-    virtual audio_mode_t getPhoneState() = 0;
-
-    virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration) = 0;
-
-    virtual status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices)
-            = 0;
-
-    virtual status_t removeUidDeviceAffinities(uid_t uid) = 0;
-
-    virtual status_t setUserIdDeviceAffinities(int userId,
-            const Vector<AudioDeviceTypeAddr>& devices) = 0;
-
-    virtual status_t removeUserIdDeviceAffinities(int userId) = 0;
-
-    virtual status_t startAudioSource(const struct audio_port_config *source,
-                                      const audio_attributes_t *attributes,
-                                      audio_port_handle_t *portId) = 0;
-    virtual status_t stopAudioSource(audio_port_handle_t portId) = 0;
-
-    virtual status_t setMasterMono(bool mono) = 0;
-    virtual status_t getMasterMono(bool *mono) = 0;
-    virtual float    getStreamVolumeDB(
-            audio_stream_type_t stream, int index, audio_devices_t device) = 0;
-
-    virtual status_t getSurroundFormats(unsigned int *numSurroundFormats,
-                                        audio_format_t *surroundFormats,
-                                        bool *surroundFormatsEnabled,
-                                        bool reported) = 0;
-    virtual status_t getHwOffloadEncodingFormatsSupportedForA2DP(
-                                        std::vector<audio_format_t> *formats) = 0;
-    virtual status_t setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled) = 0;
-
-    virtual status_t setAssistantUid(uid_t uid) = 0;
-    virtual status_t setA11yServicesUids(const std::vector<uid_t>& uids) = 0;
-    virtual status_t setCurrentImeUid(uid_t uid) = 0;
-
-    virtual bool     isHapticPlaybackSupported() = 0;
-    virtual status_t listAudioProductStrategies(AudioProductStrategyVector &strategies) = 0;
-    virtual status_t getProductStrategyFromAudioAttributes(const AudioAttributes &aa,
-                                                           product_strategy_t &productStrategy) = 0;
-
-    virtual status_t listAudioVolumeGroups(AudioVolumeGroupVector &groups) = 0;
-    virtual status_t getVolumeGroupFromAudioAttributes(const AudioAttributes &aa,
-                                                       volume_group_t &volumeGroup) = 0;
-
-    virtual status_t setRttEnabled(bool enabled) = 0;
-
-    virtual bool     isCallScreenModeSupported() = 0;
-
-    virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   const AudioDeviceTypeAddr &device) = 0;
-
-    virtual status_t removePreferredDeviceForStrategy(product_strategy_t strategy) = 0;
-
-    virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   AudioDeviceTypeAddr &device) = 0;
-
-    // The return code here is only intended to represent transport errors. The
-    // actual server implementation should always return NO_ERROR.
-    virtual status_t registerSoundTriggerCaptureStateListener(
-        const sp<media::ICaptureStateListener>& listener,
-        bool* result) = 0;
-};
-
-
-// ----------------------------------------------------------------------------
-
-class BnAudioPolicyService : public BnInterface<IAudioPolicyService>
-{
-public:
-    virtual status_t    onTransact( uint32_t code,
-                                    const Parcel& data,
-                                    Parcel* reply,
-                                    uint32_t flags = 0);
-private:
-    void sanetizeAudioAttributes(audio_attributes_t* attr);
-    status_t sanitizeEffectDescriptor(effect_descriptor_t* desc);
-    status_t sanitizeAudioPortConfig(struct audio_port_config* config);
-};
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
-
-#endif // ANDROID_IAUDIOPOLICYSERVICE_H
diff --git a/media/libaudioclient/include/media/IAudioPolicyServiceClient.h b/media/libaudioclient/include/media/IAudioPolicyServiceClient.h
deleted file mode 100644
index 47b31ee..0000000
--- a/media/libaudioclient/include/media/IAudioPolicyServiceClient.h
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_IAUDIOPOLICYSERVICECLIENT_H
-#define ANDROID_IAUDIOPOLICYSERVICECLIENT_H
-
-#include <vector>
-
-#include <utils/RefBase.h>
-#include <binder/IInterface.h>
-#include <system/audio.h>
-#include <system/audio_effect.h>
-#include <media/AudioPolicy.h>
-#include <media/AudioVolumeGroup.h>
-
-namespace android {
-
-// ----------------------------------------------------------------------------
-
-struct record_client_info {
-    audio_unique_id_t riid;
-    uid_t uid;
-    audio_session_t session;
-    audio_source_t source;
-    audio_port_handle_t port_id;
-    bool silenced;
-};
-
-typedef struct record_client_info record_client_info_t;
-
-// ----------------------------------------------------------------------------
-
-class IAudioPolicyServiceClient : public IInterface
-{
-public:
-    DECLARE_META_INTERFACE(AudioPolicyServiceClient);
-
-    // Notifies a change of volume group
-    virtual void onAudioVolumeGroupChanged(volume_group_t group, int flags) = 0;
-    // Notifies a change of audio port configuration.
-    virtual void onAudioPortListUpdate() = 0;
-    // Notifies a change of audio patch configuration.
-    virtual void onAudioPatchListUpdate() = 0;
-    // Notifies a change in the mixing state of a specific mix in a dynamic audio policy
-    virtual void onDynamicPolicyMixStateUpdate(String8 regId, int32_t state) = 0;
-    // Notifies a change of audio recording configuration
-    virtual void onRecordingConfigurationUpdate(int event,
-            const record_client_info_t *clientInfo,
-            const audio_config_base_t *clientConfig,
-            std::vector<effect_descriptor_t> clientEffects,
-            const audio_config_base_t *deviceConfig,
-            std::vector<effect_descriptor_t> effects,
-            audio_patch_handle_t patchHandle,
-            audio_source_t source) = 0;
-};
-
-
-// ----------------------------------------------------------------------------
-
-class BnAudioPolicyServiceClient : public BnInterface<IAudioPolicyServiceClient>
-{
-public:
-    virtual status_t    onTransact( uint32_t code,
-                                    const Parcel& data,
-                                    Parcel* reply,
-                                    uint32_t flags = 0);
-};
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
-
-#endif // ANDROID_IAUDIOPOLICYSERVICECLIENT_H
diff --git a/media/libaudioclient/include/media/IAudioTrack.h b/media/libaudioclient/include/media/IAudioTrack.h
deleted file mode 100644
index 06e786d..0000000
--- a/media/libaudioclient/include/media/IAudioTrack.h
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright (C) 2007 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_IAUDIOTRACK_H
-#define ANDROID_IAUDIOTRACK_H
-
-#include <stdint.h>
-#include <sys/types.h>
-
-#include <utils/RefBase.h>
-#include <utils/Errors.h>
-#include <binder/IInterface.h>
-#include <binder/IMemory.h>
-#include <utils/String8.h>
-#include <media/AudioTimestamp.h>
-#include <media/VolumeShaper.h>
-
-namespace android {
-
-// ----------------------------------------------------------------------------
-
-class IAudioTrack : public IInterface
-{
-public:
-    DECLARE_META_INTERFACE(AudioTrack);
-
-    /* Get this track's control block */
-    virtual sp<IMemory> getCblk() const = 0;
-
-    /* After it's created the track is not active. Call start() to
-     * make it active.
-     */
-    virtual status_t    start() = 0;
-
-    /* Stop a track. If set, the callback will cease being called and
-     * obtainBuffer will return an error. Buffers that are already released
-     * will continue to be processed, unless/until flush() is called.
-     */
-    virtual void        stop() = 0;
-
-    /* Flush a stopped or paused track. All pending/released buffers are discarded.
-     * This function has no effect if the track is not stopped or paused.
-     */
-    virtual void        flush() = 0;
-
-    /* Pause a track. If set, the callback will cease being called and
-     * obtainBuffer will return an error. Buffers that are already released
-     * will continue to be processed, unless/until flush() is called.
-     */
-    virtual void        pause() = 0;
-
-    /* Attach track auxiliary output to specified effect. Use effectId = 0
-     * to detach track from effect.
-     */
-    virtual status_t    attachAuxEffect(int effectId) = 0;
-
-    /* Send parameters to the audio hardware */
-    virtual status_t    setParameters(const String8& keyValuePairs) = 0;
-
-    /* Selects the presentation (if available) */
-    virtual status_t    selectPresentation(int presentationId, int programId) = 0;
-
-    /* Return NO_ERROR if timestamp is valid.  timestamp is undefined otherwise. */
-    virtual status_t    getTimestamp(AudioTimestamp& timestamp) = 0;
-
-    /* Signal the playback thread for a change in control block */
-    virtual void        signal() = 0;
-
-    /* Sets the volume shaper */
-    virtual media::VolumeShaper::Status applyVolumeShaper(
-            const sp<media::VolumeShaper::Configuration>& configuration,
-            const sp<media::VolumeShaper::Operation>& operation) = 0;
-
-    /* gets the volume shaper state */
-    virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id) = 0;
-};
-
-// ----------------------------------------------------------------------------
-
-class BnAudioTrack : public BnInterface<IAudioTrack>
-{
-public:
-    virtual status_t    onTransact( uint32_t code,
-                                    const Parcel& data,
-                                    Parcel* reply,
-                                    uint32_t flags = 0);
-};
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
-
-#endif // ANDROID_IAUDIOTRACK_H
diff --git a/media/libaudioclient/include/media/IEffect.h b/media/libaudioclient/include/media/IEffect.h
deleted file mode 100644
index ff04869..0000000
--- a/media/libaudioclient/include/media/IEffect.h
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_IEFFECT_H
-#define ANDROID_IEFFECT_H
-
-#include <utils/RefBase.h>
-#include <binder/IInterface.h>
-#include <binder/Parcel.h>
-#include <binder/IMemory.h>
-
-namespace android {
-
-class IEffect: public IInterface
-{
-public:
-    DECLARE_META_INTERFACE(Effect);
-
-    virtual status_t enable() = 0;
-
-    virtual status_t disable() = 0;
-
-    virtual status_t command(uint32_t cmdCode,
-                             uint32_t cmdSize,
-                             void *pCmdData,
-                             uint32_t *pReplySize,
-                             void *pReplyData) = 0;
-
-    virtual void disconnect() = 0;
-
-    virtual sp<IMemory> getCblk() const = 0;
-};
-
-// ----------------------------------------------------------------------------
-
-class BnEffect: public BnInterface<IEffect>
-{
-public:
-    virtual status_t    onTransact( uint32_t code,
-                                    const Parcel& data,
-                                    Parcel* reply,
-                                    uint32_t flags = 0);
-};
-
-}; // namespace android
-
-#endif // ANDROID_IEFFECT_H
diff --git a/media/libaudioclient/include/media/IEffectClient.h b/media/libaudioclient/include/media/IEffectClient.h
deleted file mode 100644
index 2f78c98..0000000
--- a/media/libaudioclient/include/media/IEffectClient.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_IEFFECTCLIENT_H
-#define ANDROID_IEFFECTCLIENT_H
-
-#include <utils/RefBase.h>
-#include <binder/IInterface.h>
-#include <binder/Parcel.h>
-#include <binder/IMemory.h>
-
-namespace android {
-
-class IEffectClient: public IInterface
-{
-public:
-    DECLARE_META_INTERFACE(EffectClient);
-
-    virtual void controlStatusChanged(bool controlGranted) = 0;
-    virtual void enableStatusChanged(bool enabled) = 0;
-    virtual void commandExecuted(uint32_t cmdCode,
-                                 uint32_t cmdSize,
-                                 void *pCmdData,
-                                 uint32_t replySize,
-                                 void *pReplyData) = 0;
-};
-
-// ----------------------------------------------------------------------------
-
-class BnEffectClient: public BnInterface<IEffectClient>
-{
-public:
-    virtual status_t    onTransact( uint32_t code,
-                                    const Parcel& data,
-                                    Parcel* reply,
-                                    uint32_t flags = 0);
-};
-
-}; // namespace android
-
-#endif // ANDROID_IEFFECTCLIENT_H
diff --git a/media/libaudioclient/include/media/PlayerBase.h b/media/libaudioclient/include/media/PlayerBase.h
index e7a8abc..23b6bfd 100644
--- a/media/libaudioclient/include/media/PlayerBase.h
+++ b/media/libaudioclient/include/media/PlayerBase.h
@@ -19,6 +19,7 @@
 
 #include <audiomanager/AudioManager.h>
 #include <audiomanager/IAudioManager.h>
+#include <utils/Mutex.h>
 
 #include "android/media/BnPlayer.h"
 
@@ -40,19 +41,21 @@
     virtual binder::Status setPan(float pan) override;
     virtual binder::Status setStartDelayMs(int32_t delayMs) override;
     virtual binder::Status applyVolumeShaper(
-            const media::VolumeShaper::Configuration& configuration,
-            const media::VolumeShaper::Operation& operation) override;
+            const media::VolumeShaperConfiguration& configuration,
+            const media::VolumeShaperOperation& operation) override;
 
-            status_t startWithStatus();
+            status_t startWithStatus(audio_port_handle_t deviceId);
             status_t pauseWithStatus();
             status_t stopWithStatus();
 
             //FIXME temporary method while some player state is outside of this class
-            void reportEvent(player_state_t event);
+            void reportEvent(player_state_t event, audio_port_handle_t deviceId);
+
+            void baseUpdateDeviceId(audio_port_handle_t deviceId);
 
 protected:
 
-            void init(player_type_t playerType, audio_usage_t usage);
+            void init(player_type_t playerType, audio_usage_t usage, audio_session_t sessionId);
             void baseDestroy();
 
     //IPlayer methods handlers for derived classes
@@ -68,20 +71,24 @@
     float mPanMultiplierL, mPanMultiplierR;
     float mVolumeMultiplierL, mVolumeMultiplierR;
 
+    // player interface ID, uniquely identifies the player in the system
+    // effectively const after PlayerBase::init().
+    audio_unique_id_t mPIId;
+
 private:
             // report events to AudioService
-            void servicePlayerEvent(player_state_t event);
+            void servicePlayerEvent(player_state_t event, audio_port_handle_t deviceId);
             void serviceReleasePlayer();
 
     // native interface to AudioService
     android::sp<android::IAudioManager> mAudioManager;
 
-    // player interface ID, uniquely identifies the player in the system
-    audio_unique_id_t mPIId;
-
     // Mutex for state reporting
     Mutex mPlayerStateLock;
     player_state_t mLastReportedEvent;
+
+    Mutex mDeviceIdLock;
+    audio_port_handle_t mLastReportedDeviceId;
 };
 
 } // namespace android
diff --git a/media/libaudioclient/include/media/PolicyAidlConversion.h b/media/libaudioclient/include/media/PolicyAidlConversion.h
new file mode 100644
index 0000000..873f27a
--- /dev/null
+++ b/media/libaudioclient/include/media/PolicyAidlConversion.h
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <limits>
+#include <type_traits>
+
+#include <system/audio.h>
+
+#include <android/media/AudioMix.h>
+#include <android/media/AudioMixCallbackFlag.h>
+#include <android/media/AudioMixLatencyClass.h>
+#include <android/media/AudioMixRouteFlag.h>
+#include <android/media/AudioMixType.h>
+#include <android/media/AudioMode.h>
+#include <android/media/AudioOffloadMode.h>
+#include <android/media/AudioPolicyForceUse.h>
+#include <android/media/AudioPolicyForcedConfig.h>
+#include <android/media/DeviceRole.h>
+
+#include <media/AidlConversionUtil.h>
+#include <media/AudioCommonTypes.h>
+#include <media/AudioPolicy.h>
+#include <android/media/AudioPolicyDeviceState.h>
+
+namespace android {
+
+ConversionResult<volume_group_t>
+aidl2legacy_int32_t_volume_group_t(int32_t aidl);
+ConversionResult<int32_t>
+legacy2aidl_volume_group_t_int32_t(volume_group_t legacy);
+
+ConversionResult<product_strategy_t>
+aidl2legacy_int32_t_product_strategy_t(int32_t aidl);
+ConversionResult<int32_t>
+legacy2aidl_product_strategy_t_int32_t(product_strategy_t legacy);
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioMixType_uint32_t(media::AudioMixType aidl);
+ConversionResult<media::AudioMixType>
+legacy2aidl_uint32_t_AudioMixType(uint32_t legacy);
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioMixCallbackFlag_uint32_t(media::AudioMixCallbackFlag aidl);
+ConversionResult<media::AudioMixCallbackFlag>
+legacy2aidl_uint32_t_AudioMixCallbackFlag(uint32_t legacy);
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioMixCallbackFlag_uint32_t_mask(int32_t aidl);
+ConversionResult<int32_t>
+legacy2aidl_uint32_t_AudioMixCallbackFlag_mask(uint32_t legacy);
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioMixRouteFlag_uint32_t(media::AudioMixRouteFlag aidl);
+ConversionResult<media::AudioMixRouteFlag>
+legacy2aidl_uint32_t_AudioMixRouteFlag(uint32_t legacy);
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioMixRouteFlag_uint32_t_mask(int32_t aidl);
+ConversionResult<int32_t>
+legacy2aidl_uint32_t_AudioMixRouteFlag_mask(uint32_t legacy);
+
+ConversionResult<AudioMixMatchCriterion>
+aidl2legacy_AudioMixMatchCriterion(const media::AudioMixMatchCriterion& aidl);
+ConversionResult<media::AudioMixMatchCriterion>
+legacy2aidl_AudioMixMatchCriterion(const AudioMixMatchCriterion& legacy);
+
+ConversionResult<AudioMix>
+aidl2legacy_AudioMix(const media::AudioMix& aidl);
+ConversionResult<media::AudioMix>
+legacy2aidl_AudioMix(const AudioMix& legacy);
+
+ConversionResult<audio_policy_dev_state_t>
+aidl2legacy_AudioPolicyDeviceState_audio_policy_dev_state_t(media::AudioPolicyDeviceState aidl);
+ConversionResult<media::AudioPolicyDeviceState>
+legacy2aidl_audio_policy_dev_state_t_AudioPolicyDeviceState(audio_policy_dev_state_t legacy);
+
+ConversionResult<audio_policy_force_use_t>
+aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(media::AudioPolicyForceUse aidl);
+ConversionResult<media::AudioPolicyForceUse>
+legacy2aidl_audio_policy_force_use_t_AudioPolicyForceUse(audio_policy_force_use_t legacy);
+
+ConversionResult<audio_policy_forced_cfg_t>
+aidl2legacy_AudioPolicyForcedConfig_audio_policy_forced_cfg_t(media::AudioPolicyForcedConfig aidl);
+ConversionResult<media::AudioPolicyForcedConfig>
+legacy2aidl_audio_policy_forced_cfg_t_AudioPolicyForcedConfig(audio_policy_forced_cfg_t legacy);
+
+ConversionResult<device_role_t>
+aidl2legacy_DeviceRole_device_role_t(media::DeviceRole aidl);
+ConversionResult<media::DeviceRole>
+legacy2aidl_device_role_t_DeviceRole(device_role_t legacy);
+
+ConversionResult<audio_offload_mode_t>
+aidl2legacy_AudioOffloadMode_audio_offload_mode_t(media::AudioOffloadMode aidl);
+ConversionResult<media::AudioOffloadMode>
+legacy2aidl_audio_offload_mode_t_AudioOffloadMode(audio_offload_mode_t legacy);
+
+}  // namespace android
diff --git a/media/libaudioclient/include/media/ToneGenerator.h b/media/libaudioclient/include/media/ToneGenerator.h
index 5b0689a..a575616 100644
--- a/media/libaudioclient/include/media/ToneGenerator.h
+++ b/media/libaudioclient/include/media/ToneGenerator.h
@@ -17,6 +17,8 @@
 #ifndef ANDROID_TONEGENERATOR_H_
 #define ANDROID_TONEGENERATOR_H_
 
+#include <string>
+
 #include <media/AudioSystem.h>
 #include <media/AudioTrack.h>
 #include <utils/Compat.h>
@@ -152,7 +154,8 @@
         NUM_SUP_TONES = LAST_SUP_TONE-FIRST_SUP_TONE+1
     };
 
-    ToneGenerator(audio_stream_type_t streamType, float volume, bool threadCanCallJava = false);
+    ToneGenerator(audio_stream_type_t streamType, float volume, bool threadCanCallJava = false,
+            std::string opPackageName = {});
     ~ToneGenerator();
 
     bool startTone(tone_type toneType, int durationMs = -1);
@@ -193,6 +196,7 @@
         TONE_JAPAN_DIAL,            // Dial tone: 400Hz, continuous
         TONE_JAPAN_BUSY,            // Busy tone: 400Hz, 500ms ON, 500ms OFF...
         TONE_JAPAN_RADIO_ACK,       // Radio path acknowlegment: 400Hz, 1s ON, 2s OFF...
+        TONE_JAPAN_RINGTONE,        // Ring Tone: 400 Hz repeated in a 1 s on, 2 s off pattern.
         // GB Supervisory tones
         TONE_GB_BUSY,               // Busy tone: 400 Hz, 375ms ON, 375ms OFF...
         TONE_GB_CONGESTION,         // Congestion Tone: 400 Hz, 400ms ON, 350ms OFF, 225ms ON, 525ms OFF...
@@ -218,6 +222,7 @@
         TONE_INDIA_CONGESTION,      // Congestion tone: 400 Hz, 250ms ON, 250ms OFF...
         TONE_INDIA_CALL_WAITING,    // Call waiting tone: 400 Hz, tone repeated in a 0.2s on, 0.1s off, 0.2s on, 7.5s off pattern.
         TONE_INDIA_RINGTONE,        // Ring tone: 400 Hz tone modulated with 25Hz, 0.4 on 0.2 off 0.4 on 2..0 off
+        TONE_TW_RINGTONE,           // Ring Tone: 440 Hz + 480 Hz repeated with pattern 1s on, 3s off.
         NUM_ALTERNATE_TONES
     };
 
@@ -230,6 +235,7 @@
         HONGKONG,
         IRELAND,
         INDIA,
+        TAIWAN,
         CEPT,
         NUM_REGIONS
     };
@@ -341,6 +347,8 @@
     };
 
     KeyedVector<uint16_t, WaveGenerator *> mWaveGens;  // list of active wave generators.
+
+    std::string mOpPackageName;
 };
 
 }
diff --git a/media/libaudioclient/include/media/TrackPlayerBase.h b/media/libaudioclient/include/media/TrackPlayerBase.h
index 66e9b3b..80124b8 100644
--- a/media/libaudioclient/include/media/TrackPlayerBase.h
+++ b/media/libaudioclient/include/media/TrackPlayerBase.h
@@ -28,13 +28,14 @@
     explicit TrackPlayerBase();
     virtual ~TrackPlayerBase();
 
-            void init(AudioTrack* pat, player_type_t playerType, audio_usage_t usage);
+            void init(AudioTrack* pat, player_type_t playerType, audio_usage_t usage,
+                    audio_session_t sessionId);
     virtual void destroy();
 
     //IPlayer implementation
     virtual binder::Status applyVolumeShaper(
-            const media::VolumeShaper::Configuration& configuration,
-            const media::VolumeShaper::Operation& operation);
+            const media::VolumeShaperConfiguration& configuration,
+            const media::VolumeShaperOperation& operation);
 
     //FIXME move to protected field, so far made public to minimize changes to AudioTrack logic
     sp<AudioTrack> mAudioTrack;
@@ -53,8 +54,20 @@
             void doDestroy();
             status_t doSetVolume();
 
+            class SelfAudioDeviceCallback : public AudioSystem::AudioDeviceCallback {
+            public:
+                SelfAudioDeviceCallback(PlayerBase& self);
+                virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo,
+                                                         audio_port_handle_t deviceId);
+            private:
+                virtual ~SelfAudioDeviceCallback();
+                PlayerBase& mSelf;
+            };
+
     // volume coming from the player volume API
     float mPlayerVolumeL, mPlayerVolumeR;
+
+   sp<SelfAudioDeviceCallback> mSelfAudioDeviceCallback;
 };
 
 } // namespace android
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index 350a780..def7ca6 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_defaults {
     name: "libaudioclient_tests_defaults",
     cflags: [
@@ -7,6 +16,18 @@
 }
 
 cc_test {
+    name: "audio_aidl_status_tests",
+    defaults: ["libaudioclient_tests_defaults"],
+    srcs: ["audio_aidl_status_tests.cpp"],
+    shared_libs: [
+        "libaudioclient_aidl_conversion",
+        "libbinder",
+        "libcutils",
+        "libutils",
+    ],
+}
+
+cc_test {
     name: "test_create_audiotrack",
     defaults: ["libaudioclient_tests_defaults"],
     srcs: ["test_create_audiotrack.cpp",
@@ -16,6 +37,7 @@
         "libmediametrics_headers",
     ],
     shared_libs: [
+        "framework-permission-aidl-cpp",
         "libaudioclient",
         "libbinder",
         "libcutils",
@@ -38,6 +60,7 @@
         "libbinder",
         "libcutils",
         "libutils",
+        "framework-permission-aidl-cpp",
     ],
     data: ["record_test_input_*.txt"],
 }
diff --git a/media/libaudioclient/tests/audio_aidl_status_tests.cpp b/media/libaudioclient/tests/audio_aidl_status_tests.cpp
new file mode 100644
index 0000000..5517091
--- /dev/null
+++ b/media/libaudioclient/tests/audio_aidl_status_tests.cpp
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+#include <media/AidlConversionUtil.h>
+#include <utils/Errors.h>
+
+using namespace android;
+using namespace android::aidl_utils;
+using android::binder::Status;
+
+// Tests for statusTFromBinderStatus() and binderStatusFromStatusT().
+
+// STATUS_T_SMALL_VALUE_LIMIT is an arbitrary limit where we exhaustively check status_t errors.
+// It is known that this limit doesn't cover UNKNOWN_ERROR ~ INT32_MIN.
+constexpr status_t STATUS_T_SMALL_VALUE_LIMIT = -1000;
+
+// Small status values are preserved on round trip
+TEST(audio_aidl_status_tests, statusRoundTripSmallValues) {
+    for (status_t status = 0; status > STATUS_T_SMALL_VALUE_LIMIT; --status) {
+        ASSERT_EQ(status, statusTFromBinderStatus(binderStatusFromStatusT(status)));
+    }
+}
+
+// Special status values are preserved on round trip.
+TEST(audio_aidl_status_tests, statusRoundTripSpecialValues) {
+    for (status_t status : {
+            OK,
+            UNKNOWN_ERROR,
+            NO_MEMORY,
+            INVALID_OPERATION,
+            BAD_VALUE,
+            BAD_TYPE,
+            NAME_NOT_FOUND,
+            PERMISSION_DENIED,
+            NO_INIT,
+            ALREADY_EXISTS,
+            DEAD_OBJECT,
+            FAILED_TRANSACTION,
+            BAD_INDEX,
+            NOT_ENOUGH_DATA,
+            WOULD_BLOCK,
+            TIMED_OUT,
+            UNKNOWN_TRANSACTION,
+            FDS_NOT_ALLOWED}) {
+        ASSERT_EQ(status, statusTFromBinderStatus(binderStatusFromStatusT(status)));
+    }
+}
+
+// Binder exceptions show as an error (not fixed at this time); these come fromExceptionCode().
+TEST(audio_aidl_status_tests, binderStatusExceptions) {
+    for (int exceptionCode : {
+            //Status::EX_NONE,
+            Status::EX_SECURITY,
+            Status::EX_BAD_PARCELABLE,
+            Status::EX_ILLEGAL_ARGUMENT,
+            Status::EX_NULL_POINTER,
+            Status::EX_ILLEGAL_STATE,
+            Status::EX_NETWORK_MAIN_THREAD,
+            Status::EX_UNSUPPORTED_OPERATION,
+            //Status::EX_SERVICE_SPECIFIC, -- tested fromServiceSpecificError()
+            Status::EX_PARCELABLE,
+            // This is special and Java specific; see Parcel.java.
+            Status::EX_HAS_REPLY_HEADER,
+            // This is special, and indicates to C++ binder proxies that the
+            // transaction has failed at a low level.
+            //Status::EX_TRANSACTION_FAILED, -- tested fromStatusT().
+            }) {
+        ASSERT_NE(OK, statusTFromBinderStatus(Status::fromExceptionCode(exceptionCode)));
+    }
+}
+
+// Binder transaction errors show exactly in status_t; these come fromStatusT().
+TEST(audio_aidl_status_tests, binderStatusTransactionError) {
+    for (status_t status : {
+            OK, // Note: fromStatusT does check if this is 0, so this is no error.
+            UNKNOWN_ERROR,
+            NO_MEMORY,
+            INVALID_OPERATION,
+            BAD_VALUE,
+            BAD_TYPE,
+            NAME_NOT_FOUND,
+            PERMISSION_DENIED,
+            NO_INIT,
+            ALREADY_EXISTS,
+            DEAD_OBJECT,
+            FAILED_TRANSACTION,
+            BAD_INDEX,
+            NOT_ENOUGH_DATA,
+            WOULD_BLOCK,
+            TIMED_OUT,
+            UNKNOWN_TRANSACTION,
+            FDS_NOT_ALLOWED}) {
+        ASSERT_EQ(status, statusTFromBinderStatus(Status::fromStatusT(status)));
+    }
+}
+
+// Binder service specific errors show in status_t; these come fromServiceSpecificError().
+TEST(audio_aidl_status_tests, binderStatusServiceSpecificError) {
+    // fromServiceSpecificError() still stores exception code if status is 0.
+    for (status_t status = -1; status > STATUS_T_SMALL_VALUE_LIMIT; --status) {
+        ASSERT_EQ(status, statusTFromBinderStatus(Status::fromServiceSpecificError(status)));
+    }
+}
+
+// Binder status with message.
+TEST(audio_aidl_status_tests, binderStatusMessage) {
+    const String8 message("abcd");
+    for (status_t status = -1; status > STATUS_T_SMALL_VALUE_LIMIT; --status) {
+        const Status binderStatus = binderStatusFromStatusT(status, message.c_str());
+        ASSERT_EQ(status, statusTFromBinderStatus(binderStatus));
+        ASSERT_EQ(message, binderStatus.exceptionMessage());
+    }
+}
diff --git a/media/libaudioclient/tests/test_create_audiorecord.cpp b/media/libaudioclient/tests/test_create_audiorecord.cpp
index cf6a734..1cbcb71 100644
--- a/media/libaudioclient/tests/test_create_audiorecord.cpp
+++ b/media/libaudioclient/tests/test_create_audiorecord.cpp
@@ -19,6 +19,7 @@
 #include <string.h>
 #include <unistd.h>
 
+#include <android/content/AttributionSourceState.h>
 #include <binder/MemoryBase.h>
 #include <binder/MemoryDealer.h>
 #include <binder/MemoryHeapBase.h>
@@ -32,19 +33,25 @@
 
 namespace android {
 
+using android::content::AttributionSourceState;
+
 int testRecord(FILE *inputFile, int outputFileFd)
 {
     char line[MAX_INPUT_FILE_LINE_LENGTH];
     uint32_t testCount = 0;
     Vector<String16> args;
     int ret = 0;
+    // TODO b/182392769: use attribution source util
+    AttributionSourceState attributionSource;
+    attributionSource.packageName = std::string(PACKAGE_NAME);
+    attributionSource.token = sp<BBinder>::make();
 
     if (inputFile == nullptr) {
         sp<AudioRecord> record = new AudioRecord(AUDIO_SOURCE_DEFAULT,
                                               0 /* sampleRate */,
                                               AUDIO_FORMAT_DEFAULT,
                                               AUDIO_CHANNEL_IN_MONO,
-                                              String16(PACKAGE_NAME));
+                                              attributionSource);
         if (record == 0 || record->initCheck() != NO_ERROR) {
             write(outputFileFd, "Error creating AudioRecord\n",
                   sizeof("Error creating AudioRecord\n"));
@@ -90,7 +97,7 @@
         memset(&attributes, 0, sizeof(attributes));
         attributes.source = inputSource;
 
-        sp<AudioRecord> record = new AudioRecord(String16(PACKAGE_NAME));
+        sp<AudioRecord> record = new AudioRecord(attributionSource);
 
         record->set(AUDIO_SOURCE_DEFAULT,
                    sampleRate,
diff --git a/media/libaudiofoundation/Android.bp b/media/libaudiofoundation/Android.bp
index 548b080..3bef55b 100644
--- a/media/libaudiofoundation/Android.bp
+++ b/media/libaudiofoundation/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_headers {
     name: "libaudiofoundation_headers",
     vendor_available: true,
@@ -5,13 +14,27 @@
 
     export_include_dirs: ["include"],
     header_libs: [
+        "libaudioclient_aidl_conversion_util",
         "libaudio_system_headers",
         "libmedia_helper_headers",
     ],
     export_header_lib_headers: [
+        "libaudioclient_aidl_conversion_util",
         "libaudio_system_headers",
         "libmedia_helper_headers",
     ],
+    static_libs: [
+        "audioclient-types-aidl-cpp",
+    ],
+    export_static_lib_headers: [
+        "audioclient-types-aidl-cpp",
+    ],
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 cc_library {
@@ -29,6 +52,8 @@
     ],
 
     shared_libs: [
+        "audioclient-types-aidl-cpp",
+        "libaudioclient_aidl_conversion",
         "libaudioutils",
         "libbase",
         "libbinder",
@@ -37,6 +62,11 @@
         "libutils",
     ],
 
+    export_shared_lib_headers: [
+        "audioclient-types-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+    ],
+
     header_libs: [
         "libaudiofoundation_headers",
     ],
diff --git a/media/libaudiofoundation/AudioDeviceTypeAddr.cpp b/media/libaudiofoundation/AudioDeviceTypeAddr.cpp
index b44043a..c5d7da8 100644
--- a/media/libaudiofoundation/AudioDeviceTypeAddr.cpp
+++ b/media/libaudiofoundation/AudioDeviceTypeAddr.cpp
@@ -15,13 +15,59 @@
  */
 
 #include <media/AudioDeviceTypeAddr.h>
+#include <arpa/inet.h>
+#include <iostream>
+#include <regex>
+#include <set>
+#include <sstream>
+
+#include <media/AidlConversion.h>
 
 namespace android {
 
+namespace {
+
+static const std::string SUPPRESSED = "SUPPRESSED";
+static const std::regex MAC_ADDRESS_REGEX("([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}");
+
+bool isSenstiveAddress(const std::string &address) {
+    if (std::regex_match(address, MAC_ADDRESS_REGEX)) {
+        return true;
+    }
+
+    sockaddr_storage ss4;
+    if (inet_pton(AF_INET, address.c_str(), &ss4) > 0) {
+        return true;
+    }
+
+    sockaddr_storage ss6;
+    if (inet_pton(AF_INET6, address.c_str(), &ss6) > 0) {
+        return true;
+    }
+
+    return false;
+}
+
+} // namespace
+
+AudioDeviceTypeAddr::AudioDeviceTypeAddr(audio_devices_t type, const std::string &address) :
+        mType(type), mAddress(address) {
+    mIsAddressSensitive = isSenstiveAddress(mAddress);
+}
+
 const char* AudioDeviceTypeAddr::getAddress() const {
     return mAddress.c_str();
 }
 
+const std::string& AudioDeviceTypeAddr::address() const {
+    return mAddress;
+}
+
+void AudioDeviceTypeAddr::setAddress(const std::string& address) {
+    mAddress = address;
+    mIsAddressSensitive = isSenstiveAddress(mAddress);
+}
+
 bool AudioDeviceTypeAddr::equals(const AudioDeviceTypeAddr& other) const {
     return mType == other.mType && mAddress == other.mAddress;
 }
@@ -36,14 +82,34 @@
     return false;
 }
 
+bool AudioDeviceTypeAddr::operator==(const AudioDeviceTypeAddr &rhs) const {
+    return equals(rhs);
+}
+
+bool AudioDeviceTypeAddr::operator!=(const AudioDeviceTypeAddr &rhs) const {
+    return !operator==(rhs);
+}
+
 void AudioDeviceTypeAddr::reset() {
     mType = AUDIO_DEVICE_NONE;
-    mAddress = "";
+    setAddress("");
+}
+
+std::string AudioDeviceTypeAddr::toString(bool includeSensitiveInfo) const {
+    std::stringstream sstream;
+    sstream << "type:0x" << std::hex << mType;
+    // IP and MAC address are sensitive information. The sensitive information will be suppressed
+    // is `includeSensitiveInfo` is false.
+    sstream << ",@:"
+            << (!includeSensitiveInfo && mIsAddressSensitive ? SUPPRESSED : mAddress);
+    return sstream.str();
 }
 
 status_t AudioDeviceTypeAddr::readFromParcel(const Parcel *parcel) {
     status_t status;
-    if ((status = parcel->readUint32(&mType)) != NO_ERROR) return status;
+    uint32_t rawDeviceType;
+    if ((status = parcel->readUint32(&rawDeviceType)) != NO_ERROR) return status;
+    mType = static_cast<audio_devices_t>(rawDeviceType);
     status = parcel->readUtf8FromUtf16(&mAddress);
     return status;
 }
@@ -64,4 +130,44 @@
     return deviceTypes;
 }
 
-}
\ No newline at end of file
+AudioDeviceTypeAddrVector excludeDeviceTypeAddrsFrom(
+        const AudioDeviceTypeAddrVector& devices,
+        const AudioDeviceTypeAddrVector& devicesToExclude) {
+    std::set<AudioDeviceTypeAddr> devicesToExcludeSet(
+            devicesToExclude.begin(), devicesToExclude.end());
+    AudioDeviceTypeAddrVector remainedDevices;
+    for (const auto& device : devices) {
+        if (devicesToExcludeSet.count(device) == 0) {
+            remainedDevices.push_back(device);
+        }
+    }
+    return remainedDevices;
+}
+
+std::string dumpAudioDeviceTypeAddrVector(const AudioDeviceTypeAddrVector& deviceTypeAddrs,
+                                          bool includeSensitiveInfo) {
+    std::stringstream stream;
+    for (auto it = deviceTypeAddrs.begin(); it != deviceTypeAddrs.end(); ++it) {
+        if (it != deviceTypeAddrs.begin()) {
+            stream << " ";
+        }
+        stream << it->toString(includeSensitiveInfo);
+    }
+    return stream.str();
+}
+
+ConversionResult<AudioDeviceTypeAddr>
+aidl2legacy_AudioDeviceTypeAddress(const media::AudioDevice& aidl) {
+    audio_devices_t type = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_devices_t(aidl.type));
+    return AudioDeviceTypeAddr(type, aidl.address);
+}
+
+ConversionResult<media::AudioDevice>
+legacy2aidl_AudioDeviceTypeAddress(const AudioDeviceTypeAddr& legacy) {
+    media::AudioDevice aidl;
+    aidl.type = VALUE_OR_RETURN(legacy2aidl_audio_devices_t_int32_t(legacy.mType));
+    aidl.address = legacy.getAddress();
+    return aidl;
+}
+
+} // namespace android
diff --git a/media/libaudiofoundation/AudioGain.cpp b/media/libaudiofoundation/AudioGain.cpp
index 0d28335..1dee938 100644
--- a/media/libaudiofoundation/AudioGain.cpp
+++ b/media/libaudiofoundation/AudioGain.cpp
@@ -129,38 +129,51 @@
            mGain.max_ramp_ms == other->mGain.max_ramp_ms;
 }
 
-status_t AudioGain::writeToParcel(android::Parcel *parcel) const
-{
-    status_t status = NO_ERROR;
-    if ((status = parcel->writeInt32(mIndex)) != NO_ERROR) return status;
-    if ((status = parcel->writeBool(mUseInChannelMask)) != NO_ERROR) return status;
-    if ((status = parcel->writeBool(mUseForVolume)) != NO_ERROR) return status;
-    if ((status = parcel->writeUint32(mGain.mode)) != NO_ERROR) return status;
-    if ((status = parcel->writeUint32(mGain.channel_mask)) != NO_ERROR) return status;
-    if ((status = parcel->writeInt32(mGain.min_value)) != NO_ERROR) return status;
-    if ((status = parcel->writeInt32(mGain.max_value)) != NO_ERROR) return status;
-    if ((status = parcel->writeInt32(mGain.default_value)) != NO_ERROR) return status;
-    if ((status = parcel->writeUint32(mGain.step_value)) != NO_ERROR) return status;
-    if ((status = parcel->writeUint32(mGain.min_ramp_ms)) != NO_ERROR) return status;
-    status = parcel->writeUint32(mGain.max_ramp_ms);
-    return status;
+status_t AudioGain::writeToParcel(android::Parcel *parcel) const {
+    media::AudioGain parcelable;
+    return writeToParcelable(&parcelable)
+        ?: parcelable.writeToParcel(parcel);
 }
 
-status_t AudioGain::readFromParcel(const android::Parcel *parcel)
-{
-    status_t status = NO_ERROR;
-    if ((status = parcel->readInt32(&mIndex)) != NO_ERROR) return status;
-    if ((status = parcel->readBool(&mUseInChannelMask)) != NO_ERROR) return status;
-    if ((status = parcel->readBool(&mUseForVolume)) != NO_ERROR) return status;
-    if ((status = parcel->readUint32(&mGain.mode)) != NO_ERROR) return status;
-    if ((status = parcel->readUint32(&mGain.channel_mask)) != NO_ERROR) return status;
-    if ((status = parcel->readInt32(&mGain.min_value)) != NO_ERROR) return status;
-    if ((status = parcel->readInt32(&mGain.max_value)) != NO_ERROR) return status;
-    if ((status = parcel->readInt32(&mGain.default_value)) != NO_ERROR) return status;
-    if ((status = parcel->readUint32(&mGain.step_value)) != NO_ERROR) return status;
-    if ((status = parcel->readUint32(&mGain.min_ramp_ms)) != NO_ERROR) return status;
-    status = parcel->readUint32(&mGain.max_ramp_ms);
-    return status;
+status_t AudioGain::writeToParcelable(media::AudioGain* parcelable) const {
+    parcelable->index = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mIndex));
+    parcelable->useInChannelMask = mUseInChannelMask;
+    parcelable->useForVolume = mUseForVolume;
+    parcelable->mode = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_gain_mode_t_int32_t_mask(mGain.mode));
+    parcelable->channelMask = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_channel_mask_t_int32_t(mGain.channel_mask));
+    parcelable->minValue = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mGain.min_value));
+    parcelable->maxValue = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mGain.max_value));
+    parcelable->defaultValue = VALUE_OR_RETURN_STATUS(
+            convertIntegral<int32_t>(mGain.default_value));
+    parcelable->stepValue = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mGain.step_value));
+    parcelable->minRampMs = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mGain.min_ramp_ms));
+    parcelable->maxRampMs = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mGain.max_ramp_ms));
+    return OK;
+}
+
+status_t AudioGain::readFromParcel(const android::Parcel *parcel) {
+    media::AudioGain parcelable;
+    return parcelable.readFromParcel(parcel)
+        ?: readFromParcelable(parcelable);
+}
+
+status_t AudioGain::readFromParcelable(const media::AudioGain& parcelable) {
+    mIndex = VALUE_OR_RETURN_STATUS(convertIntegral<int>(parcelable.index));
+    mUseInChannelMask = parcelable.useInChannelMask;
+    mUseForVolume = parcelable.useForVolume;
+    mGain.mode = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_int32_t_audio_gain_mode_t_mask(parcelable.mode));
+    mGain.channel_mask = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_int32_t_audio_channel_mask_t(parcelable.channelMask));
+    mGain.min_value = VALUE_OR_RETURN_STATUS(convertIntegral<int>(parcelable.minValue));
+    mGain.max_value = VALUE_OR_RETURN_STATUS(convertIntegral<int>(parcelable.maxValue));
+    mGain.default_value = VALUE_OR_RETURN_STATUS(convertIntegral<int>(parcelable.defaultValue));
+    mGain.step_value = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(parcelable.stepValue));
+    mGain.min_ramp_ms = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(parcelable.minRampMs));
+    mGain.max_ramp_ms = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(parcelable.maxRampMs));
+    return OK;
 }
 
 bool AudioGains::equals(const AudioGains &other) const
@@ -196,4 +209,34 @@
     return status;
 }
 
+ConversionResult<sp<AudioGain>>
+aidl2legacy_AudioGain(const media::AudioGain& aidl) {
+    sp<AudioGain> legacy = new AudioGain(0, false);
+    status_t status = legacy->readFromParcelable(aidl);
+    if (status != OK) {
+        return base::unexpected(status);
+    }
+    return legacy;
+}
+
+ConversionResult<media::AudioGain>
+legacy2aidl_AudioGain(const sp<AudioGain>& legacy) {
+    media::AudioGain aidl;
+    status_t status = legacy->writeToParcelable(&aidl);
+    if (status != OK) {
+        return base::unexpected(status);
+    }
+    return aidl;
+}
+
+ConversionResult<AudioGains>
+aidl2legacy_AudioGains(const std::vector<media::AudioGain>& aidl) {
+    return convertContainer<AudioGains>(aidl, aidl2legacy_AudioGain);
+}
+
+ConversionResult<std::vector<media::AudioGain>>
+legacy2aidl_AudioGains(const AudioGains& legacy) {
+    return convertContainer<std::vector<media::AudioGain>>(legacy, legacy2aidl_AudioGain);
+}
+
 } // namespace android
diff --git a/media/libaudiofoundation/AudioPort.cpp b/media/libaudiofoundation/AudioPort.cpp
index f988690..fafabd9 100644
--- a/media/libaudiofoundation/AudioPort.cpp
+++ b/media/libaudiofoundation/AudioPort.cpp
@@ -16,7 +16,9 @@
 #define LOG_TAG "AudioPort"
 
 #include <algorithm>
+#include <utility>
 
+#include <android/media/ExtraAudioDescriptor.h>
 #include <android-base/stringprintf.h>
 #include <media/AudioPort.h>
 #include <utils/Log.h>
@@ -38,6 +40,36 @@
     }
 }
 
+void AudioPort::importAudioPort(const audio_port_v7 &port) {
+    for (size_t i = 0; i < port.num_audio_profiles; ++i) {
+        sp<AudioProfile> profile = new AudioProfile(port.audio_profiles[i].format,
+                ChannelMaskSet(port.audio_profiles[i].channel_masks,
+                        port.audio_profiles[i].channel_masks +
+                        port.audio_profiles->num_channel_masks),
+                SampleRateSet(port.audio_profiles[i].sample_rates,
+                        port.audio_profiles[i].sample_rates +
+                        port.audio_profiles[i].num_sample_rates),
+                port.audio_profiles[i].encapsulation_type);
+        if (!mProfiles.contains(profile)) {
+            addAudioProfile(profile);
+        }
+    }
+
+    for (size_t i = 0; i < port.num_extra_audio_descriptors; ++i) {
+        auto convertedResult = legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor(
+                port.extra_audio_descriptors[i]);
+        if (!convertedResult.ok()) {
+            ALOGE("%s, failed to convert extra audio descriptor", __func__);
+            continue;
+        }
+        if (std::find(mExtraAudioDescriptors.begin(),
+                      mExtraAudioDescriptors.end(),
+                      convertedResult.value()) == mExtraAudioDescriptors.end()) {
+            mExtraAudioDescriptors.push_back(std::move(convertedResult.value()));
+        }
+    }
+}
+
 void AudioPort::toAudioPort(struct audio_port *port) const {
     // TODO: update this function once audio_port structure reflects the new profile definition.
     // For compatibility reason: flatening the AudioProfile into audio_port structure.
@@ -62,21 +94,56 @@
             }
         }
     }
-    port->role = mRole;
-    port->type = mType;
-    strlcpy(port->name, mName.c_str(), AUDIO_PORT_MAX_NAME_LEN);
+    toAudioPortBase(port);
     port->num_sample_rates = flatenedRates.size();
     port->num_channel_masks = flatenedChannels.size();
     port->num_formats = flatenedFormats.size();
     std::copy(flatenedRates.begin(), flatenedRates.end(), port->sample_rates);
     std::copy(flatenedChannels.begin(), flatenedChannels.end(), port->channel_masks);
     std::copy(flatenedFormats.begin(), flatenedFormats.end(), port->formats);
+}
 
-    ALOGV("AudioPort::toAudioPort() num gains %zu", mGains.size());
+void AudioPort::toAudioPort(struct audio_port_v7 *port) const {
+    toAudioPortBase(port);
+    port->num_audio_profiles = 0;
+    for (const auto& profile : mProfiles) {
+        if (profile->isValid()) {
+            const SampleRateSet &sampleRates = profile->getSampleRates();
+            const ChannelMaskSet &channelMasks = profile->getChannels();
 
-    port->num_gains = std::min(mGains.size(), (size_t) AUDIO_PORT_MAX_GAINS);
-    for (size_t i = 0; i < port->num_gains; i++) {
-        port->gains[i] = mGains[i]->getGain();
+            if (sampleRates.size() > AUDIO_PORT_MAX_SAMPLING_RATES ||
+                    channelMasks.size() > AUDIO_PORT_MAX_CHANNEL_MASKS ||
+                    port->num_audio_profiles >= AUDIO_PORT_MAX_AUDIO_PROFILES) {
+                ALOGE("%s: bailing out: cannot export profiles to port config", __func__);
+                break;
+            }
+
+            auto& dstProfile = port->audio_profiles[port->num_audio_profiles++];
+            dstProfile.format = profile->getFormat();
+            dstProfile.num_sample_rates = sampleRates.size();
+            std::copy(sampleRates.begin(), sampleRates.end(),
+                    std::begin(dstProfile.sample_rates));
+            dstProfile.num_channel_masks = channelMasks.size();
+            std::copy(channelMasks.begin(), channelMasks.end(),
+                    std::begin(dstProfile.channel_masks));
+            dstProfile.encapsulation_type = profile->getEncapsulationType();
+        }
+    }
+
+    port->num_extra_audio_descriptors = 0;
+    for (const auto& desc : mExtraAudioDescriptors) {
+        if (port->num_extra_audio_descriptors >= AUDIO_PORT_MAX_EXTRA_AUDIO_DESCRIPTORS) {
+            ALOGE("%s: bailing out: cannot export extra audio descriptor to port config", __func__);
+            return;
+        }
+
+        auto convertedResult = aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor(desc);
+        if (!convertedResult.ok()) {
+            ALOGE("%s: failed to convert extra audio descriptor", __func__);
+            continue;
+        }
+        port->extra_audio_descriptors[port->num_extra_audio_descriptors++] =
+                std::move(convertedResult.value());
     }
 }
 
@@ -88,6 +155,22 @@
         std::string profilesStr;
         mProfiles.dump(&profilesStr, spaces);
         dst->append(profilesStr);
+        if (!mExtraAudioDescriptors.empty()) {
+            dst->append(base::StringPrintf("%*s- extra audio descriptors: \n", spaces, ""));
+            const int eadSpaces = spaces + 4;
+            const int descSpaces = eadSpaces + 4;
+            for (size_t i = 0; i < mExtraAudioDescriptors.size(); i++) {
+                dst->append(
+                        base::StringPrintf("%*s extra audio descriptor %zu:\n", eadSpaces, "", i));
+                dst->append(base::StringPrintf(
+                    "%*s- standard: %u\n", descSpaces, "", mExtraAudioDescriptors[i].standard));
+                dst->append(base::StringPrintf("%*s- descriptor:", descSpaces, ""));
+                for (auto v : mExtraAudioDescriptors[i].audioDescriptor) {
+                    dst->append(base::StringPrintf(" %02x", v));
+                }
+                dst->append("\n");
+            }
+        }
 
         if (mGains.size() != 0) {
             dst->append(base::StringPrintf("%*s- gains:\n", spaces, ""));
@@ -112,37 +195,41 @@
            mName.compare(other->getName()) == 0 &&
            mType == other->getType() &&
            mRole == other->getRole() &&
-           mProfiles.equals(other->getAudioProfiles());
+           mProfiles.equals(other->getAudioProfiles()) &&
+           mExtraAudioDescriptors == other->getExtraAudioDescriptors();
 }
 
 status_t AudioPort::writeToParcel(Parcel *parcel) const
 {
-    status_t status = NO_ERROR;
-    if ((status = parcel->writeUtf8AsUtf16(mName)) != NO_ERROR) return status;
-    if ((status = parcel->writeUint32(mType)) != NO_ERROR) return status;
-    if ((status = parcel->writeUint32(mRole)) != NO_ERROR) return status;
-    if ((status = parcel->writeParcelable(mProfiles)) != NO_ERROR) return status;
-    if ((status = parcel->writeParcelable(mGains)) != NO_ERROR) return status;
-    return status;
+    media::AudioPort parcelable;
+    return writeToParcelable(&parcelable)
+        ?: parcelable.writeToParcel(parcel);
 }
 
-status_t AudioPort::readFromParcel(const Parcel *parcel)
-{
-    status_t status = NO_ERROR;
-    if ((status = parcel->readUtf8FromUtf16(&mName)) != NO_ERROR) return status;
-    static_assert(sizeof(mType) == sizeof(uint32_t));
-    if ((status = parcel->readUint32(reinterpret_cast<uint32_t*>(&mType))) != NO_ERROR) {
-        return status;
-    }
-    static_assert(sizeof(mRole) == sizeof(uint32_t));
-    if ((status = parcel->readUint32(reinterpret_cast<uint32_t*>(&mRole))) != NO_ERROR) {
-        return status;
-    }
-    mProfiles.clear();
-    if ((status = parcel->readParcelable(&mProfiles)) != NO_ERROR) return status;
-    mGains.clear();
-    if ((status = parcel->readParcelable(&mGains)) != NO_ERROR) return status;
-    return status;
+status_t AudioPort::writeToParcelable(media::AudioPort* parcelable) const {
+    parcelable->name = mName;
+    parcelable->type = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_type_t_AudioPortType(mType));
+    parcelable->role = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_role_t_AudioPortRole(mRole));
+    parcelable->profiles = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioProfileVector(mProfiles));
+    parcelable->extraAudioDescriptors = mExtraAudioDescriptors;
+    parcelable->gains = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioGains(mGains));
+    return OK;
+}
+
+status_t AudioPort::readFromParcel(const Parcel *parcel) {
+    media::AudioPort parcelable;
+    return parcelable.readFromParcel(parcel)
+        ?: readFromParcelable(parcelable);
+}
+
+status_t AudioPort::readFromParcelable(const media::AudioPort& parcelable) {
+    mName = parcelable.name;
+    mType = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPortType_audio_port_type_t(parcelable.type));
+    mRole = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPortRole_audio_port_role_t(parcelable.role));
+    mProfiles = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioProfileVector(parcelable.profiles));
+    mExtraAudioDescriptors = parcelable.extraAudioDescriptors;
+    mGains = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioGains(parcelable.gains));
+    return OK;
 }
 
 // --- AudioPortConfig class implementation
@@ -243,45 +330,56 @@
            mGain.ramp_duration_ms == other->mGain.ramp_duration_ms;
 }
 
-status_t AudioPortConfig::writeToParcel(Parcel *parcel) const
-{
-    status_t status = NO_ERROR;
-    if ((status = parcel->writeUint32(mSamplingRate)) != NO_ERROR) return status;
-    if ((status = parcel->writeUint32(mFormat)) != NO_ERROR) return status;
-    if ((status = parcel->writeUint32(mChannelMask)) != NO_ERROR) return status;
-    if ((status = parcel->writeInt32(mId)) != NO_ERROR) return status;
-    // Write mGain to parcel.
-    if ((status = parcel->writeInt32(mGain.index)) != NO_ERROR) return status;
-    if ((status = parcel->writeUint32(mGain.mode)) != NO_ERROR) return status;
-    if ((status = parcel->writeUint32(mGain.channel_mask)) != NO_ERROR) return status;
-    if ((status = parcel->writeUint32(mGain.ramp_duration_ms)) != NO_ERROR) return status;
-    std::vector<int> values(std::begin(mGain.values), std::end(mGain.values));
-    if ((status = parcel->writeInt32Vector(values)) != NO_ERROR) return status;
-    return status;
+status_t AudioPortConfig::writeToParcel(Parcel *parcel) const {
+    media::AudioPortConfig parcelable;
+    return writeToParcelable(&parcelable)
+        ?: parcelable.writeToParcel(parcel);
 }
 
-status_t AudioPortConfig::readFromParcel(const Parcel *parcel)
-{
-    status_t status = NO_ERROR;
-    if ((status = parcel->readUint32(&mSamplingRate)) != NO_ERROR) return status;
-    static_assert(sizeof(mFormat) == sizeof(uint32_t));
-    if ((status = parcel->readUint32(reinterpret_cast<uint32_t*>(&mFormat))) != NO_ERROR) {
-        return status;
-    }
-    if ((status = parcel->readUint32(&mChannelMask)) != NO_ERROR) return status;
-    if ((status = parcel->readInt32(&mId)) != NO_ERROR) return status;
-    // Read mGain from parcel.
-    if ((status = parcel->readInt32(&mGain.index)) != NO_ERROR) return status;
-    if ((status = parcel->readUint32(&mGain.mode)) != NO_ERROR) return status;
-    if ((status = parcel->readUint32(&mGain.channel_mask)) != NO_ERROR) return status;
-    if ((status = parcel->readUint32(&mGain.ramp_duration_ms)) != NO_ERROR) return status;
-    std::vector<int> values;
-    if ((status = parcel->readInt32Vector(&values)) != NO_ERROR) return status;
-    if (values.size() != std::size(mGain.values)) {
+status_t AudioPortConfig::writeToParcelable(media::AudioPortConfig* parcelable) const {
+    parcelable->sampleRate = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mSamplingRate));
+    parcelable->format = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_format_t_AudioFormat(mFormat));
+    parcelable->channelMask = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_channel_mask_t_int32_t(mChannelMask));
+    parcelable->id = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(mId));
+    parcelable->gain.index = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mGain.index));
+    parcelable->gain.mode = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_gain_mode_t_int32_t_mask(mGain.mode));
+    parcelable->gain.channelMask = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_channel_mask_t_int32_t(mGain.channel_mask));
+    parcelable->gain.rampDurationMs = VALUE_OR_RETURN_STATUS(
+            convertIntegral<int32_t>(mGain.ramp_duration_ms));
+    parcelable->gain.values = VALUE_OR_RETURN_STATUS(convertContainer<std::vector<int32_t>>(
+            mGain.values, convertIntegral<int32_t, int>));
+    return OK;
+}
+
+status_t AudioPortConfig::readFromParcel(const Parcel *parcel) {
+    media::AudioPortConfig parcelable;
+    return parcelable.readFromParcel(parcel)
+        ?: readFromParcelable(parcelable);
+}
+
+status_t AudioPortConfig::readFromParcelable(const media::AudioPortConfig& parcelable) {
+    mSamplingRate = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(parcelable.sampleRate));
+    mFormat = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioFormat_audio_format_t(parcelable.format));
+    mChannelMask = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_int32_t_audio_channel_mask_t(parcelable.channelMask));
+    mId = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_port_handle_t(parcelable.id));
+    mGain.index = VALUE_OR_RETURN_STATUS(convertIntegral<int>(parcelable.gain.index));
+    mGain.mode = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_int32_t_audio_gain_mode_t_mask(parcelable.gain.mode));
+    mGain.channel_mask = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_int32_t_audio_channel_mask_t(parcelable.gain.channelMask));
+    mGain.ramp_duration_ms = VALUE_OR_RETURN_STATUS(
+            convertIntegral<unsigned int>(parcelable.gain.rampDurationMs));
+    if (parcelable.gain.values.size() > std::size(mGain.values)) {
         return BAD_VALUE;
     }
-    std::copy(values.begin(), values.end(), mGain.values);
-    return status;
+    for (size_t i = 0; i < parcelable.gain.values.size(); ++i) {
+        mGain.values[i] = VALUE_OR_RETURN_STATUS(convertIntegral<int>(parcelable.gain.values[i]));
+    }
+    return OK;
 }
 
 } // namespace android
diff --git a/media/libaudiofoundation/AudioProfile.cpp b/media/libaudiofoundation/AudioProfile.cpp
index 91be346..8ac3f73 100644
--- a/media/libaudiofoundation/AudioProfile.cpp
+++ b/media/libaudiofoundation/AudioProfile.cpp
@@ -58,10 +58,18 @@
 AudioProfile::AudioProfile(audio_format_t format,
                            const ChannelMaskSet &channelMasks,
                            const SampleRateSet &samplingRateCollection) :
+        AudioProfile(format, channelMasks, samplingRateCollection,
+                     AUDIO_ENCAPSULATION_TYPE_NONE) {}
+
+AudioProfile::AudioProfile(audio_format_t format,
+                           const ChannelMaskSet &channelMasks,
+                           const SampleRateSet &samplingRateCollection,
+                           audio_encapsulation_type_t encapsulationType) :
         mName(""),
         mFormat(format),
         mChannelMasks(channelMasks),
-        mSamplingRates(samplingRateCollection) {}
+        mSamplingRates(samplingRateCollection),
+        mEncapsulationType(encapsulationType) {}
 
 void AudioProfile::setChannels(const ChannelMaskSet &channelMasks)
 {
@@ -116,6 +124,9 @@
         }
         dst->append("\n");
     }
+
+    dst->append(base::StringPrintf(
+            "%*s- encapsulation type: %#x\n", spaces, "", mEncapsulationType));
 }
 
 bool AudioProfile::equals(const sp<AudioProfile>& other) const
@@ -127,45 +138,83 @@
            mSamplingRates == other->getSampleRates() &&
            mIsDynamicFormat == other->isDynamicFormat() &&
            mIsDynamicChannels == other->isDynamicChannels() &&
-           mIsDynamicRate == other->isDynamicRate();
+           mIsDynamicRate == other->isDynamicRate() &&
+           mEncapsulationType == other->getEncapsulationType();
 }
 
-status_t AudioProfile::writeToParcel(Parcel *parcel) const
-{
-    status_t status = NO_ERROR;
-    if ((status = parcel->writeUtf8AsUtf16(mName)) != NO_ERROR) return status;
-    if ((status = parcel->writeUint32(mFormat)) != NO_ERROR) return status;
-    std::vector<int> values(mChannelMasks.begin(), mChannelMasks.end());
-    if ((status = parcel->writeInt32Vector(values)) != NO_ERROR) return status;
-    values.clear();
-    values.assign(mSamplingRates.begin(), mSamplingRates.end());
-    if ((status = parcel->writeInt32Vector(values)) != NO_ERROR) return status;
-    if ((status = parcel->writeBool(mIsDynamicFormat)) != NO_ERROR) return status;
-    if ((status = parcel->writeBool(mIsDynamicChannels)) != NO_ERROR) return status;
-    if ((status = parcel->writeBool(mIsDynamicRate)) != NO_ERROR) return status;
-    return status;
+AudioProfile& AudioProfile::operator=(const AudioProfile& other) {
+    mName = other.mName;
+    mFormat = other.mFormat;
+    mChannelMasks = other.mChannelMasks;
+    mSamplingRates = other.mSamplingRates;
+    mEncapsulationType = other.mEncapsulationType;
+    mIsDynamicFormat = other.mIsDynamicFormat;
+    mIsDynamicChannels = other.mIsDynamicChannels;
+    mIsDynamicRate = other.mIsDynamicRate;
+    return *this;
 }
 
-status_t AudioProfile::readFromParcel(const Parcel *parcel)
-{
-    status_t status = NO_ERROR;
-    if ((status = parcel->readUtf8FromUtf16(&mName)) != NO_ERROR) return status;
-    static_assert(sizeof(mFormat) == sizeof(uint32_t));
-    if ((status = parcel->readUint32(reinterpret_cast<uint32_t*>(&mFormat))) != NO_ERROR) {
+status_t AudioProfile::writeToParcel(Parcel *parcel) const {
+    media::AudioProfile parcelable = VALUE_OR_RETURN_STATUS(toParcelable());
+    return parcelable.writeToParcel(parcel);
+ }
+
+ConversionResult<media::AudioProfile>
+AudioProfile::toParcelable() const {
+    media::AudioProfile parcelable;
+    parcelable.name = mName;
+    parcelable.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(mFormat));
+    parcelable.channelMasks = VALUE_OR_RETURN(
+            convertContainer<std::vector<int32_t>>(mChannelMasks,
+                                                   legacy2aidl_audio_channel_mask_t_int32_t));
+    parcelable.samplingRates = VALUE_OR_RETURN(
+            convertContainer<std::vector<int32_t>>(mSamplingRates,
+                                                   convertIntegral<int32_t, uint32_t>));
+    parcelable.isDynamicFormat = mIsDynamicFormat;
+    parcelable.isDynamicChannels = mIsDynamicChannels;
+    parcelable.isDynamicRate = mIsDynamicRate;
+    parcelable.encapsulationType = VALUE_OR_RETURN(
+            legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(mEncapsulationType));
+    return parcelable;
+}
+
+status_t AudioProfile::readFromParcel(const Parcel *parcel) {
+    media::AudioProfile parcelable;
+    if (status_t status = parcelable.readFromParcel(parcel); status != OK) {
         return status;
     }
-    std::vector<int> values;
-    if ((status = parcel->readInt32Vector(&values)) != NO_ERROR) return status;
-    mChannelMasks.clear();
-    mChannelMasks.insert(values.begin(), values.end());
-    values.clear();
-    if ((status = parcel->readInt32Vector(&values)) != NO_ERROR) return status;
-    mSamplingRates.clear();
-    mSamplingRates.insert(values.begin(), values.end());
-    if ((status = parcel->readBool(&mIsDynamicFormat)) != NO_ERROR) return status;
-    if ((status = parcel->readBool(&mIsDynamicChannels)) != NO_ERROR) return status;
-    if ((status = parcel->readBool(&mIsDynamicRate)) != NO_ERROR) return status;
-    return status;
+    *this = *VALUE_OR_RETURN_STATUS(fromParcelable(parcelable));
+    return OK;
+}
+
+ConversionResult<sp<AudioProfile>>
+AudioProfile::fromParcelable(const media::AudioProfile& parcelable) {
+    sp<AudioProfile> legacy = new AudioProfile();
+    legacy->mName = parcelable.name;
+    legacy->mFormat = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(parcelable.format));
+    legacy->mChannelMasks = VALUE_OR_RETURN(
+            convertContainer<ChannelMaskSet>(parcelable.channelMasks,
+                                             aidl2legacy_int32_t_audio_channel_mask_t));
+    legacy->mSamplingRates = VALUE_OR_RETURN(
+            convertContainer<SampleRateSet>(parcelable.samplingRates,
+                                            convertIntegral<uint32_t, int32_t>));
+    legacy->mIsDynamicFormat = parcelable.isDynamicFormat;
+    legacy->mIsDynamicChannels = parcelable.isDynamicChannels;
+    legacy->mIsDynamicRate = parcelable.isDynamicRate;
+    legacy->mEncapsulationType = VALUE_OR_RETURN(
+            aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
+                    parcelable.encapsulationType));
+    return legacy;
+}
+
+ConversionResult<sp<AudioProfile>>
+aidl2legacy_AudioProfile(const media::AudioProfile& aidl) {
+    return AudioProfile::fromParcelable(aidl);
+}
+
+ConversionResult<media::AudioProfile>
+legacy2aidl_AudioProfile(const sp<AudioProfile>& legacy) {
+    return legacy->toParcelable();
 }
 
 ssize_t AudioProfileVector::add(const sp<AudioProfile> &profile)
@@ -258,6 +307,16 @@
     return false;
 }
 
+bool AudioProfileVector::contains(const sp<AudioProfile>& profile) const
+{
+    for (const auto& audioProfile : *this) {
+        if (audioProfile->equals(profile)) {
+            return true;
+        }
+    }
+    return false;
+}
+
 void AudioProfileVector::dump(std::string *dst, int spaces) const
 {
     dst->append(base::StringPrintf("%*s- Profiles:\n", spaces, ""));
@@ -304,4 +363,41 @@
                       });
 }
 
+ConversionResult<AudioProfileVector>
+aidl2legacy_AudioProfileVector(const std::vector<media::AudioProfile>& aidl) {
+    return convertContainer<AudioProfileVector>(aidl, aidl2legacy_AudioProfile);
+}
+
+ConversionResult<std::vector<media::AudioProfile>>
+legacy2aidl_AudioProfileVector(const AudioProfileVector& legacy) {
+    return convertContainer<std::vector<media::AudioProfile>>(legacy, legacy2aidl_AudioProfile);
+}
+
+AudioProfileVector intersectAudioProfiles(const AudioProfileVector& profiles1,
+                                          const AudioProfileVector& profiles2)
+{
+    std::map<audio_format_t, std::pair<ChannelMaskSet, SampleRateSet>> infos2;
+    for (const auto& profile : profiles2) {
+        infos2.emplace(profile->getFormat(),
+                std::make_pair(profile->getChannels(), profile->getSampleRates()));
+    }
+    AudioProfileVector profiles;
+    for (const auto& profile : profiles1) {
+        const auto it = infos2.find(profile->getFormat());
+        if (it == infos2.end()) {
+            continue;
+        }
+        ChannelMaskSet channelMasks = SetIntersection(profile->getChannels(), it->second.first);
+        if (channelMasks.empty()) {
+            continue;
+        }
+        SampleRateSet sampleRates = SetIntersection(profile->getSampleRates(), it->second.second);
+        if (sampleRates.empty()) {
+            continue;
+        }
+        profiles.push_back(new AudioProfile(profile->getFormat(), channelMasks, sampleRates));
+    }
+    return profiles;
+}
+
 } // namespace android
diff --git a/media/libaudiofoundation/DeviceDescriptorBase.cpp b/media/libaudiofoundation/DeviceDescriptorBase.cpp
index 3dbe37d..5cfea81 100644
--- a/media/libaudiofoundation/DeviceDescriptorBase.cpp
+++ b/media/libaudiofoundation/DeviceDescriptorBase.cpp
@@ -19,6 +19,7 @@
 
 #include <android-base/stringprintf.h>
 #include <audio_utils/string.h>
+#include <media/AidlConversion.h>
 #include <media/DeviceDescriptorBase.h>
 #include <media/TypeConverter.h>
 
@@ -40,11 +41,15 @@
                                          AUDIO_PORT_ROLE_SOURCE),
         mDeviceTypeAddr(deviceTypeAddr)
 {
-    if (mDeviceTypeAddr.mAddress.empty() && audio_is_remote_submix_device(mDeviceTypeAddr.mType)) {
-        mDeviceTypeAddr.mAddress = "0";
+    if (mDeviceTypeAddr.address().empty() && audio_is_remote_submix_device(mDeviceTypeAddr.mType)) {
+        mDeviceTypeAddr.setAddress("0");
     }
 }
 
+void DeviceDescriptorBase::setAddress(const std::string &address) {
+    mDeviceTypeAddr.setAddress(address);
+}
+
 void DeviceDescriptorBase::toAudioPortConfig(struct audio_port_config *dstConfig,
                                              const struct audio_port_config *srcConfig) const
 {
@@ -76,13 +81,12 @@
 void DeviceDescriptorBase::toAudioPort(struct audio_port *port) const
 {
     ALOGV("DeviceDescriptorBase::toAudioPort() handle %d type %08x", mId, mDeviceTypeAddr.mType);
-    AudioPort::toAudioPort(port);
-    toAudioPortConfig(&port->active_config);
-    port->id = mId;
-    port->ext.device.type = mDeviceTypeAddr.mType;
-    port->ext.device.encapsulation_modes = mEncapsulationModes;
-    port->ext.device.encapsulation_metadata_types = mEncapsulationMetadataTypes;
-    (void)audio_utils_strlcpy_zerofill(port->ext.device.address, mDeviceTypeAddr.getAddress());
+    toAudioPortInternal(port);
+}
+
+void DeviceDescriptorBase::toAudioPort(struct audio_port_v7 *port) const {
+    ALOGV("DeviceDescriptorBase::toAudioPort() v7 handle %d type %08x", mId, mDeviceTypeAddr.mType);
+    toAudioPortInternal(port);
 }
 
 status_t DeviceDescriptorBase::setEncapsulationModes(uint32_t encapsulationModes) {
@@ -118,23 +122,21 @@
             spaces, "", ::android::toString(mDeviceTypeAddr.mType).c_str()));
 
     dst->append(base::StringPrintf(
-            "%*s- supported encapsulation modes: %u", spaces, "", mEncapsulationModes));
+            "%*s- supported encapsulation modes: %u\n", spaces, "", mEncapsulationModes));
     dst->append(base::StringPrintf(
-            "%*s- supported encapsulation metadata types: %u",
+            "%*s- supported encapsulation metadata types: %u\n",
             spaces, "", mEncapsulationMetadataTypes));
 
-    if (mDeviceTypeAddr.mAddress.size() != 0) {
+    if (mDeviceTypeAddr.address().size() != 0) {
         dst->append(base::StringPrintf(
                 "%*s- address: %-32s\n", spaces, "", mDeviceTypeAddr.getAddress()));
     }
     AudioPort::dump(dst, spaces, verbose);
 }
 
-std::string DeviceDescriptorBase::toString() const
+std::string DeviceDescriptorBase::toString(bool includeSensitiveInfo) const
 {
-    std::stringstream sstream;
-    sstream << "type:0x" << std::hex << type() << ",@:" << mDeviceTypeAddr.mAddress;
-    return sstream.str();
+    return mDeviceTypeAddr.toString(includeSensitiveInfo);
 }
 
 void DeviceDescriptorBase::log() const
@@ -154,26 +156,53 @@
            mDeviceTypeAddr.equals(other->mDeviceTypeAddr);
 }
 
+
 status_t DeviceDescriptorBase::writeToParcel(Parcel *parcel) const
 {
-    status_t status = NO_ERROR;
-    if ((status = AudioPort::writeToParcel(parcel)) != NO_ERROR) return status;
-    if ((status = AudioPortConfig::writeToParcel(parcel)) != NO_ERROR) return status;
-    if ((status = parcel->writeParcelable(mDeviceTypeAddr)) != NO_ERROR) return status;
-    if ((status = parcel->writeUint32(mEncapsulationModes)) != NO_ERROR) return status;
-    if ((status = parcel->writeUint32(mEncapsulationMetadataTypes)) != NO_ERROR) return status;
-    return status;
+    media::AudioPort parcelable;
+    return writeToParcelable(&parcelable)
+        ?: parcelable.writeToParcel(parcel);
 }
 
-status_t DeviceDescriptorBase::readFromParcel(const Parcel *parcel)
-{
-    status_t status = NO_ERROR;
-    if ((status = AudioPort::readFromParcel(parcel)) != NO_ERROR) return status;
-    if ((status = AudioPortConfig::readFromParcel(parcel)) != NO_ERROR) return status;
-    if ((status = parcel->readParcelable(&mDeviceTypeAddr)) != NO_ERROR) return status;
-    if ((status = parcel->readUint32(&mEncapsulationModes)) != NO_ERROR) return status;
-    if ((status = parcel->readUint32(&mEncapsulationMetadataTypes)) != NO_ERROR) return status;
-    return status;
+status_t DeviceDescriptorBase::writeToParcelable(media::AudioPort* parcelable) const {
+    AudioPort::writeToParcelable(parcelable);
+    AudioPortConfig::writeToParcelable(&parcelable->activeConfig);
+    parcelable->id = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(mId));
+
+    media::AudioPortDeviceExt ext;
+    ext.device = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioDeviceTypeAddress(mDeviceTypeAddr));
+    ext.encapsulationModes = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_AudioEncapsulationMode_mask(mEncapsulationModes));
+    ext.encapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_AudioEncapsulationMetadataType_mask(mEncapsulationMetadataTypes));
+    UNION_SET(parcelable->ext, device, std::move(ext));
+    return OK;
+}
+
+status_t DeviceDescriptorBase::readFromParcel(const Parcel *parcel) {
+    media::AudioPort parcelable;
+    return parcelable.readFromParcel(parcel)
+        ?: readFromParcelable(parcelable);
+}
+
+status_t DeviceDescriptorBase::readFromParcelable(const media::AudioPort& parcelable) {
+    if (parcelable.type != media::AudioPortType::DEVICE) {
+        return BAD_VALUE;
+    }
+    status_t status = AudioPort::readFromParcelable(parcelable)
+                      ?: AudioPortConfig::readFromParcelable(parcelable.activeConfig);
+    if (status != OK) {
+        return status;
+    }
+
+    media::AudioPortDeviceExt ext = VALUE_OR_RETURN_STATUS(UNION_GET(parcelable.ext, device));
+    mDeviceTypeAddr = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioDeviceTypeAddress(ext.device));
+    mEncapsulationModes = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioEncapsulationMode_mask(ext.encapsulationModes));
+    mEncapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioEncapsulationMetadataType_mask(ext.encapsulationMetadataTypes));
+    return OK;
 }
 
 std::string toString(const DeviceDescriptorBaseVector& devices)
@@ -197,4 +226,24 @@
     return deviceTypeAddrs;
 }
 
+ConversionResult<sp<DeviceDescriptorBase>>
+aidl2legacy_DeviceDescriptorBase(const media::AudioPort& aidl) {
+    sp<DeviceDescriptorBase> result = new DeviceDescriptorBase(AUDIO_DEVICE_NONE);
+    status_t status = result->readFromParcelable(aidl);
+    if (status != OK) {
+        return base::unexpected(status);
+    }
+    return result;
+}
+
+ConversionResult<media::AudioPort>
+legacy2aidl_DeviceDescriptorBase(const sp<DeviceDescriptorBase>& legacy) {
+    media::AudioPort aidl;
+    status_t status = legacy->writeToParcelable(&aidl);
+    if (status != OK) {
+        return base::unexpected(status);
+    }
+    return aidl;
+}
+
 } // namespace android
diff --git a/media/libaudiofoundation/include/media/AudioContainers.h b/media/libaudiofoundation/include/media/AudioContainers.h
index 72fda49..204b365 100644
--- a/media/libaudiofoundation/include/media/AudioContainers.h
+++ b/media/libaudiofoundation/include/media/AudioContainers.h
@@ -50,6 +50,15 @@
     return intersection;
 }
 
+template<typename T>
+static std::set<T> SetIntersection(const std::set<T>& a, const std::set<T> b) {
+    std::set<T> intersection;
+    std::set_intersection(a.begin(), a.end(),
+                          b.begin(), b.end(),
+                          std::inserter(intersection, intersection.begin()));
+    return intersection;
+}
+
 static inline ChannelMaskSet asInMask(const ChannelMaskSet& channelMasks) {
     ChannelMaskSet inMaskSet;
     for (const auto &channel : channelMasks) {
@@ -96,7 +105,7 @@
 static inline audio_devices_t deviceTypesToBitMask(const DeviceTypeSet& deviceTypes) {
     audio_devices_t types = AUDIO_DEVICE_NONE;
     for (auto deviceType : deviceTypes) {
-        types |= deviceType;
+        types = static_cast<audio_devices_t>(types | deviceType);
     }
     return types;
 }
@@ -131,4 +140,4 @@
 std::string toString(const DeviceTypeSet& deviceTypes);
 
 
-} // namespace android
\ No newline at end of file
+} // namespace android
diff --git a/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h b/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h
index 60ea78e..8edcc58 100644
--- a/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h
+++ b/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h
@@ -19,36 +19,53 @@
 #include <string>
 #include <vector>
 
+#include <android/media/AudioDevice.h>
 #include <binder/Parcelable.h>
 #include <binder/Parcel.h>
 #include <media/AudioContainers.h>
+#include <media/AidlConversionUtil.h>
 #include <system/audio.h>
 #include <utils/Errors.h>
 
 namespace android {
 
-struct AudioDeviceTypeAddr : public Parcelable {
+class AudioDeviceTypeAddr : public Parcelable {
+public:
     AudioDeviceTypeAddr() = default;
 
-    AudioDeviceTypeAddr(audio_devices_t type, const std::string& address) :
-            mType(type), mAddress(address) {}
+    AudioDeviceTypeAddr(audio_devices_t type, const std::string& address);
 
     const char* getAddress() const;
 
+    const std::string& address() const;
+
+    void setAddress(const std::string& address);
+
+    bool isAddressSensitive();
+
     bool equals(const AudioDeviceTypeAddr& other) const;
 
     AudioDeviceTypeAddr& operator= (const AudioDeviceTypeAddr&) = default;
 
     bool operator<(const AudioDeviceTypeAddr& other) const;
 
+    bool operator==(const AudioDeviceTypeAddr& rhs) const;
+
+    bool operator!=(const AudioDeviceTypeAddr& rhs) const;
+
     void reset();
 
+    std::string toString(bool includeSensitiveInfo=false) const;
+
     status_t readFromParcel(const Parcel *parcel) override;
 
     status_t writeToParcel(Parcel *parcel) const override;
 
     audio_devices_t mType = AUDIO_DEVICE_NONE;
+
+private:
     std::string mAddress;
+    bool mIsAddressSensitive;
 };
 
 using AudioDeviceTypeAddrVector = std::vector<AudioDeviceTypeAddr>;
@@ -58,4 +75,21 @@
  */
 DeviceTypeSet getAudioDeviceTypes(const AudioDeviceTypeAddrVector& deviceTypeAddrs);
 
-}
+/**
+ * Return a collection of AudioDeviceTypeAddrs that are shown in `devices` but not
+ * in `devicesToExclude`
+ */
+AudioDeviceTypeAddrVector excludeDeviceTypeAddrsFrom(
+        const AudioDeviceTypeAddrVector& devices,
+        const AudioDeviceTypeAddrVector& devicesToExclude);
+
+std::string dumpAudioDeviceTypeAddrVector(const AudioDeviceTypeAddrVector& deviceTypeAddrs,
+                                          bool includeSensitiveInfo=false);
+
+// Conversion routines, according to AidlConversion.h conventions.
+ConversionResult<AudioDeviceTypeAddr>
+aidl2legacy_AudioDeviceTypeAddress(const media::AudioDevice& aidl);
+ConversionResult<media::AudioDevice>
+legacy2aidl_AudioDeviceTypeAddress(const AudioDeviceTypeAddr& legacy);
+
+} // namespace android
diff --git a/media/libaudiofoundation/include/media/AudioGain.h b/media/libaudiofoundation/include/media/AudioGain.h
index 859f1e7..a06b686 100644
--- a/media/libaudiofoundation/include/media/AudioGain.h
+++ b/media/libaudiofoundation/include/media/AudioGain.h
@@ -16,8 +16,10 @@
 
 #pragma once
 
+#include <android/media/AudioGain.h>
 #include <binder/Parcel.h>
 #include <binder/Parcelable.h>
+#include <media/AidlConversion.h>
 #include <utils/Errors.h>
 #include <utils/RefBase.h>
 #include <system/audio.h>
@@ -72,6 +74,9 @@
     status_t writeToParcel(Parcel* parcel) const override;
     status_t readFromParcel(const Parcel* parcel) override;
 
+    status_t writeToParcelable(media::AudioGain* parcelable) const;
+    status_t readFromParcelable(const media::AudioGain& parcelable);
+
 private:
     int               mIndex;
     struct audio_gain mGain;
@@ -79,6 +84,12 @@
     bool              mUseForVolume = false;
 };
 
+// Conversion routines, according to AidlConversion.h conventions.
+ConversionResult<sp<AudioGain>>
+aidl2legacy_AudioGain(const media::AudioGain& aidl);
+ConversionResult<media::AudioGain>
+legacy2aidl_AudioGain(const sp<AudioGain>& legacy);
+
 class AudioGains : public std::vector<sp<AudioGain> >, public Parcelable
 {
 public:
@@ -104,4 +115,10 @@
     status_t readFromParcel(const Parcel* parcel) override;
 };
 
+// Conversion routines, according to AidlConversion.h conventions.
+ConversionResult<AudioGains>
+aidl2legacy_AudioGains(const std::vector<media::AudioGain>& aidl);
+ConversionResult<std::vector<media::AudioGain>>
+legacy2aidl_AudioGains(const AudioGains& legacy);
+
 } // namespace android
diff --git a/media/libaudiofoundation/include/media/AudioPort.h b/media/libaudiofoundation/include/media/AudioPort.h
index 3c013cb..1cee1c9 100644
--- a/media/libaudiofoundation/include/media/AudioPort.h
+++ b/media/libaudiofoundation/include/media/AudioPort.h
@@ -17,7 +17,11 @@
 #pragma once
 
 #include <string>
+#include <type_traits>
 
+#include <android/media/AudioPort.h>
+#include <android/media/AudioPortConfig.h>
+#include <android/media/ExtraAudioDescriptor.h>
 #include <binder/Parcel.h>
 #include <binder/Parcelable.h>
 #include <media/AudioGain.h>
@@ -48,6 +52,8 @@
 
     virtual void toAudioPort(struct audio_port *port) const;
 
+    virtual void toAudioPort(struct audio_port_v7 *port) const;
+
     virtual void addAudioProfile(const sp<AudioProfile> &profile) {
         mProfiles.add(profile);
     }
@@ -62,8 +68,18 @@
     void setAudioProfiles(const AudioProfileVector &profiles) { mProfiles = profiles; }
     AudioProfileVector &getAudioProfiles() { return mProfiles; }
 
+    void setExtraAudioDescriptors(
+            const std::vector<media::ExtraAudioDescriptor> extraAudioDescriptors) {
+        mExtraAudioDescriptors = extraAudioDescriptors;
+    }
+    std::vector<media::ExtraAudioDescriptor> &getExtraAudioDescriptors() {
+        return mExtraAudioDescriptors;
+    }
+
     virtual void importAudioPort(const sp<AudioPort>& port, bool force = false);
 
+    virtual void importAudioPort(const audio_port_v7& port);
+
     status_t checkGain(const struct audio_gain_config *gainConfig, int index) const {
         if (index < 0 || (size_t)index >= mGains.size()) {
             return BAD_VALUE;
@@ -86,12 +102,31 @@
     status_t writeToParcel(Parcel* parcel) const override;
     status_t readFromParcel(const Parcel* parcel) override;
 
+    status_t writeToParcelable(media::AudioPort* parcelable) const;
+    status_t readFromParcelable(const media::AudioPort& parcelable);
+
     AudioGains mGains; // gain controllers
 protected:
     std::string  mName;
     audio_port_type_t mType;
     audio_port_role_t mRole;
     AudioProfileVector mProfiles; // AudioProfiles supported by this port (format, Rates, Channels)
+
+    // Audio capabilities that are defined by hardware descriptors when the format is unrecognized
+    // by the platform, e.g. short audio descriptor in EDID for HDMI.
+    std::vector<media::ExtraAudioDescriptor> mExtraAudioDescriptors;
+private:
+    template <typename T, std::enable_if_t<std::is_same<T, struct audio_port>::value
+                                        || std::is_same<T, struct audio_port_v7>::value, int> = 0>
+    void toAudioPortBase(T* port) const {
+        port->role = mRole;
+        port->type = mType;
+        strlcpy(port->name, mName.c_str(), AUDIO_PORT_MAX_NAME_LEN);
+        port->num_gains = std::min(mGains.size(), (size_t) AUDIO_PORT_MAX_GAINS);
+        for (size_t i = 0; i < port->num_gains; i++) {
+            port->gains[i] = mGains[i]->getGain();
+        }
+    }
 };
 
 
@@ -119,6 +154,8 @@
 
     status_t writeToParcel(Parcel* parcel) const override;
     status_t readFromParcel(const Parcel* parcel) override;
+    status_t writeToParcelable(media::AudioPortConfig* parcelable) const;
+    status_t readFromParcelable(const media::AudioPortConfig& parcelable);
 
 protected:
     unsigned int mSamplingRate = 0u;
diff --git a/media/libaudiofoundation/include/media/AudioProfile.h b/media/libaudiofoundation/include/media/AudioProfile.h
index 730138a..6a36e78 100644
--- a/media/libaudiofoundation/include/media/AudioProfile.h
+++ b/media/libaudiofoundation/include/media/AudioProfile.h
@@ -19,8 +19,10 @@
 #include <string>
 #include <vector>
 
+#include <android/media/AudioProfile.h>
 #include <binder/Parcel.h>
 #include <binder/Parcelable.h>
+#include <media/AidlConversion.h>
 #include <media/AudioContainers.h>
 #include <system/audio.h>
 #include <utils/RefBase.h>
@@ -36,6 +38,10 @@
     AudioProfile(audio_format_t format,
                  const ChannelMaskSet &channelMasks,
                  const SampleRateSet &samplingRateCollection);
+    AudioProfile(audio_format_t format,
+                 const ChannelMaskSet &channelMasks,
+                 const SampleRateSet &samplingRateCollection,
+                 audio_encapsulation_type_t encapsulationType);
 
     audio_format_t getFormat() const { return mFormat; }
     const ChannelMaskSet &getChannels() const { return mChannelMasks; }
@@ -66,6 +72,11 @@
 
     bool isDynamic() { return mIsDynamicFormat || mIsDynamicChannels || mIsDynamicRate; }
 
+    audio_encapsulation_type_t getEncapsulationType() const { return mEncapsulationType; }
+    void setEncapsulationType(audio_encapsulation_type_t encapsulationType) {
+        mEncapsulationType = encapsulationType;
+    }
+
     void dump(std::string *dst, int spaces) const;
 
     bool equals(const sp<AudioProfile>& other) const;
@@ -73,7 +84,11 @@
     status_t writeToParcel(Parcel* parcel) const override;
     status_t readFromParcel(const Parcel* parcel) override;
 
+    ConversionResult<media::AudioProfile> toParcelable() const;
+    static ConversionResult<sp<AudioProfile>> fromParcelable(const media::AudioProfile& parcelable);
+
 private:
+
     std::string  mName;
     audio_format_t mFormat; // The format for an audio profile should only be set when initialized.
     ChannelMaskSet mChannelMasks;
@@ -82,8 +97,19 @@
     bool mIsDynamicFormat = false;
     bool mIsDynamicChannels = false;
     bool mIsDynamicRate = false;
+
+    audio_encapsulation_type_t mEncapsulationType = AUDIO_ENCAPSULATION_TYPE_NONE;
+
+    AudioProfile() = default;
+    AudioProfile& operator=(const AudioProfile& other);
 };
 
+// Conversion routines, according to AidlConversion.h conventions.
+ConversionResult<sp<AudioProfile>>
+aidl2legacy_AudioProfile(const media::AudioProfile& aidl);
+ConversionResult<media::AudioProfile>
+legacy2aidl_AudioProfile(const sp<AudioProfile>& legacy);
+
 class AudioProfileVector : public std::vector<sp<AudioProfile>>, public Parcelable
 {
 public:
@@ -105,6 +131,8 @@
     bool hasDynamicProfile() const;
     bool hasDynamicRateFor(audio_format_t format) const;
 
+    bool contains(const sp<AudioProfile>& profile) const;
+
     virtual void dump(std::string *dst, int spaces) const;
 
     bool equals(const AudioProfileVector& other) const;
@@ -115,4 +143,13 @@
 
 bool operator == (const AudioProfile &left, const AudioProfile &right);
 
+// Conversion routines, according to AidlConversion.h conventions.
+ConversionResult<AudioProfileVector>
+aidl2legacy_AudioProfileVector(const std::vector<media::AudioProfile>& aidl);
+ConversionResult<std::vector<media::AudioProfile>>
+legacy2aidl_AudioProfileVector(const AudioProfileVector& legacy);
+
+AudioProfileVector intersectAudioProfiles(const AudioProfileVector& profiles1,
+                                          const AudioProfileVector& profiles2);
+
 } // namespace android
diff --git a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
index af04721..140ce36 100644
--- a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
+++ b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
@@ -18,6 +18,7 @@
 
 #include <vector>
 
+#include <android/media/AudioPort.h>
 #include <binder/Parcel.h>
 #include <binder/Parcelable.h>
 #include <media/AudioContainers.h>
@@ -41,8 +42,8 @@
     virtual ~DeviceDescriptorBase() {}
 
     audio_devices_t type() const { return mDeviceTypeAddr.mType; }
-    std::string address() const { return mDeviceTypeAddr.mAddress; }
-    void setAddress(const std::string &address) { mDeviceTypeAddr.mAddress = address; }
+    const std::string& address() const { return mDeviceTypeAddr.address(); }
+    void setAddress(const std::string &address);
     const AudioDeviceTypeAddr& getDeviceTypeAddr() const { return mDeviceTypeAddr; }
 
     // AudioPortConfig
@@ -54,6 +55,7 @@
 
     // AudioPort
     virtual void toAudioPort(struct audio_port *port) const;
+    virtual void toAudioPort(struct audio_port_v7 *port) const;
 
     status_t setEncapsulationModes(uint32_t encapsulationModes);
     status_t setEncapsulationMetadataTypes(uint32_t encapsulationMetadataTypes);
@@ -61,17 +63,39 @@
     void dump(std::string *dst, int spaces, int index,
               const char* extraInfo = nullptr, bool verbose = true) const;
     void log() const;
-    std::string toString() const;
+
+    /**
+     * Return a string to describe the DeviceDescriptor.
+     *
+     * @param includeSensitiveInfo sensitive information will be added when it is true.
+     * @return a string that can be used to describe the DeviceDescriptor.
+     */
+    std::string toString(bool includeSensitiveInfo = false) const;
 
     bool equals(const sp<DeviceDescriptorBase>& other) const;
 
     status_t writeToParcel(Parcel* parcel) const override;
     status_t readFromParcel(const Parcel* parcel) override;
 
+    status_t writeToParcelable(media::AudioPort* parcelable) const;
+    status_t readFromParcelable(const media::AudioPort& parcelable);
+
 protected:
     AudioDeviceTypeAddr mDeviceTypeAddr;
     uint32_t mEncapsulationModes = 0;
     uint32_t mEncapsulationMetadataTypes = 0;
+private:
+    template <typename T, std::enable_if_t<std::is_same<T, struct audio_port>::value
+                                        || std::is_same<T, struct audio_port_v7>::value, int> = 0>
+    void toAudioPortInternal(T* port) const {
+        AudioPort::toAudioPort(port);
+        toAudioPortConfig(&port->active_config);
+        port->id = mId;
+        port->ext.device.type = mDeviceTypeAddr.mType;
+        port->ext.device.encapsulation_modes = mEncapsulationModes;
+        port->ext.device.encapsulation_metadata_types = mEncapsulationMetadataTypes;
+        (void)audio_utils_strlcpy_zerofill(port->ext.device.address, mDeviceTypeAddr.getAddress());
+    }
 };
 
 using DeviceDescriptorBaseVector = std::vector<sp<DeviceDescriptorBase>>;
@@ -87,4 +111,10 @@
  */
 AudioDeviceTypeAddrVector deviceTypeAddrsFromDescriptors(const DeviceDescriptorBaseVector& devices);
 
+// Conversion routines, according to AidlConversion.h conventions.
+ConversionResult<sp<DeviceDescriptorBase>>
+aidl2legacy_DeviceDescriptorBase(const media::AudioPort& aidl);
+ConversionResult<media::AudioPort>
+legacy2aidl_DeviceDescriptorBase(const sp<DeviceDescriptorBase>& legacy);
+
 } // namespace android
diff --git a/media/libaudiofoundation/tests/Android.bp b/media/libaudiofoundation/tests/Android.bp
index f258b14..bb9a5f2 100644
--- a/media/libaudiofoundation/tests/Android.bp
+++ b/media/libaudiofoundation/tests/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "audiofoundation_parcelable_test",
 
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index 1709d1e..bd24c84 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_shared {
     name: "libaudiohal",
 
@@ -14,10 +23,10 @@
     ],
 
     required: [
-        "libaudiohal@2.0",
         "libaudiohal@4.0",
         "libaudiohal@5.0",
         "libaudiohal@6.0",
+        "libaudiohal@7.0",
     ],
 
     shared_libs: [
@@ -30,6 +39,7 @@
     header_libs: [
         "libaudiohal_headers",
         "libbase_headers",
+        "libmediautils_headers",
     ]
 }
 
@@ -62,8 +72,6 @@
     export_include_dirs: ["include"],
 
     // This is needed because the stream interface includes media/MicrophoneInfo.h
-    // which is not in any library but has a dependency on headers from libbinder.
-    header_libs: ["libbinder_headers"],
-
-    export_header_lib_headers: ["libbinder_headers"],
+    header_libs: ["av-headers"],
+    export_header_lib_headers: ["av-headers"],
 }
diff --git a/media/libaudiohal/FactoryHalHidl.cpp b/media/libaudiohal/FactoryHalHidl.cpp
index 5985ef0..e420d07 100644
--- a/media/libaudiohal/FactoryHalHidl.cpp
+++ b/media/libaudiohal/FactoryHalHidl.cpp
@@ -31,10 +31,10 @@
 /** Supported HAL versions, in order of preference.
  */
 const char* sAudioHALVersions[] = {
+    "7.0",
     "6.0",
     "5.0",
     "4.0",
-    "2.0",
     nullptr
 };
 
diff --git a/media/libaudiohal/OWNERS b/media/libaudiohal/OWNERS
index 1456ab6..71b17e6 100644
--- a/media/libaudiohal/OWNERS
+++ b/media/libaudiohal/OWNERS
@@ -1,2 +1 @@
-krocard@google.com
 mnaganov@google.com
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index 967fba1..a2c6e8a 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_defaults {
     name: "libaudiohal_default",
 
@@ -26,6 +35,7 @@
         "android.hardware.audio.common-util",
         "android.hidl.allocator@1.0",
         "android.hidl.memory@1.0",
+        "av-types-aidl-cpp",
         "libaudiofoundation",
         "libaudiohal_deathhandler",
         "libaudioutils",
@@ -53,29 +63,15 @@
 }
 
 cc_library_shared {
-    name: "libaudiohal@2.0",
-    defaults: ["libaudiohal_default"],
-    shared_libs: [
-        "android.hardware.audio.common@2.0",
-        "android.hardware.audio.common@2.0-util",
-        "android.hardware.audio.effect@2.0",
-        "android.hardware.audio@2.0",
-    ],
-    cflags: [
-        "-DMAJOR_VERSION=2",
-        "-DMINOR_VERSION=0",
-        "-include common/all-versions/VersionMacro.h",
-    ]
-}
-
-cc_library_shared {
     name: "libaudiohal@4.0",
     defaults: ["libaudiohal_default"],
     shared_libs: [
         "android.hardware.audio.common@4.0",
         "android.hardware.audio.common@4.0-util",
         "android.hardware.audio.effect@4.0",
+        "android.hardware.audio.effect@4.0-util",
         "android.hardware.audio@4.0",
+        "android.hardware.audio@4.0-util",
     ],
     cflags: [
         "-DMAJOR_VERSION=4",
@@ -91,7 +87,9 @@
         "android.hardware.audio.common@5.0",
         "android.hardware.audio.common@5.0-util",
         "android.hardware.audio.effect@5.0",
+        "android.hardware.audio.effect@5.0-util",
         "android.hardware.audio@5.0",
+        "android.hardware.audio@5.0-util",
     ],
     cflags: [
         "-DMAJOR_VERSION=5",
@@ -107,7 +105,9 @@
         "android.hardware.audio.common@6.0",
         "android.hardware.audio.common@6.0-util",
         "android.hardware.audio.effect@6.0",
+        "android.hardware.audio.effect@6.0-util",
         "android.hardware.audio@6.0",
+        "android.hardware.audio@6.0-util",
     ],
     cflags: [
         "-DMAJOR_VERSION=6",
@@ -116,3 +116,20 @@
     ]
 }
 
+cc_library_shared {
+    name: "libaudiohal@7.0",
+    defaults: ["libaudiohal_default"],
+    shared_libs: [
+        "android.hardware.audio.common@7.0",
+        "android.hardware.audio.common@7.0-util",
+        "android.hardware.audio.effect@7.0",
+        "android.hardware.audio.effect@7.0-util",
+        "android.hardware.audio@7.0",
+        "android.hardware.audio@7.0-util",
+    ],
+    cflags: [
+        "-DMAJOR_VERSION=7",
+        "-DMINOR_VERSION=0",
+        "-include common/all-versions/VersionMacro.h",
+    ]
+}
diff --git a/media/libaudiohal/impl/ConversionHelperHidl.cpp b/media/libaudiohal/impl/ConversionHelperHidl.cpp
index ebed5fd..32eaa31 100644
--- a/media/libaudiohal/impl/ConversionHelperHidl.cpp
+++ b/media/libaudiohal/impl/ConversionHelperHidl.cpp
@@ -50,12 +50,20 @@
             halKeys.get(String8(AUDIO_PARAMETER_DEVICE_SUP_ENCAPSULATION_METADATA_TYPES),
                         value) == NO_ERROR;
 
+    const bool keepDelayValue =
+            halKeys.get(String8(AudioParameter::keyAdditionalOutputDeviceDelay),
+                        value) == NO_ERROR ||
+            halKeys.get(String8(AudioParameter::keyMaxAdditionalOutputDeviceDelay),
+                        value) == NO_ERROR;
+
     for (size_t i = 0; i < halKeys.size(); ++i) {
         String8 key;
         status_t status = halKeys.getAt(i, key);
         if (status != OK) return status;
         if ((keepFormatValue && key == AudioParameter::keyFormat) ||
-            (keepRoutingValue && key == AudioParameter::keyRouting)) {
+            (keepRoutingValue && key == AudioParameter::keyRouting) ||
+            (keepDelayValue && key == AudioParameter::keyAdditionalOutputDeviceDelay) ||
+            (keepDelayValue && key == AudioParameter::keyMaxAdditionalOutputDeviceDelay)) {
             AudioParameter keepValueParam;
             halKeys.getAt(i, key, value);
             keepValueParam.add(key, value);
@@ -112,129 +120,5 @@
     ALOGE("%s %p %s: %s (from rpc)", mClassName, this, funcName, description);
 }
 
-#if MAJOR_VERSION >= 4
-// TODO: Use the same implementation in the hal when it moves to a util library.
-static std::string deviceAddressToHal(const DeviceAddress& address) {
-    // HAL assumes that the address is NUL-terminated.
-    char halAddress[AUDIO_DEVICE_MAX_ADDRESS_LEN];
-    memset(halAddress, 0, sizeof(halAddress));
-    audio_devices_t halDevice = static_cast<audio_devices_t>(address.device);
-    if (getAudioDeviceOutAllA2dpSet().count(halDevice) > 0 ||
-        halDevice == AUDIO_DEVICE_IN_BLUETOOTH_A2DP) {
-        snprintf(halAddress, sizeof(halAddress), "%02X:%02X:%02X:%02X:%02X:%02X",
-                 address.address.mac[0], address.address.mac[1], address.address.mac[2],
-                 address.address.mac[3], address.address.mac[4], address.address.mac[5]);
-    } else if (halDevice == AUDIO_DEVICE_OUT_IP || halDevice == AUDIO_DEVICE_IN_IP) {
-        snprintf(halAddress, sizeof(halAddress), "%d.%d.%d.%d", address.address.ipv4[0],
-                 address.address.ipv4[1], address.address.ipv4[2], address.address.ipv4[3]);
-    } else if (getAudioDeviceOutAllUsbSet().count(halDevice) > 0 ||
-               getAudioDeviceInAllUsbSet().count(halDevice) > 0) {
-        snprintf(halAddress, sizeof(halAddress), "card=%d;device=%d", address.address.alsa.card,
-                 address.address.alsa.device);
-    } else if (halDevice == AUDIO_DEVICE_OUT_BUS || halDevice == AUDIO_DEVICE_IN_BUS) {
-        snprintf(halAddress, sizeof(halAddress), "%s", address.busAddress.c_str());
-    } else if (halDevice == AUDIO_DEVICE_OUT_REMOTE_SUBMIX ||
-               halDevice == AUDIO_DEVICE_IN_REMOTE_SUBMIX) {
-        snprintf(halAddress, sizeof(halAddress), "%s", address.rSubmixAddress.c_str());
-    } else {
-        snprintf(halAddress, sizeof(halAddress), "%s", address.busAddress.c_str());
-    }
-    return halAddress;
-}
-
-//local conversion helpers
-
-static audio_microphone_channel_mapping_t  channelMappingToHal(AudioMicrophoneChannelMapping mapping) {
-    switch (mapping) {
-        case AudioMicrophoneChannelMapping::UNUSED:
-            return AUDIO_MICROPHONE_CHANNEL_MAPPING_UNUSED;
-        case AudioMicrophoneChannelMapping::DIRECT:
-            return AUDIO_MICROPHONE_CHANNEL_MAPPING_DIRECT;
-        case AudioMicrophoneChannelMapping::PROCESSED:
-            return AUDIO_MICROPHONE_CHANNEL_MAPPING_PROCESSED;
-        default:
-            LOG_ALWAYS_FATAL("Unknown channelMappingToHal conversion %d", mapping);
-    }
-}
-
-static audio_microphone_location_t locationToHal(AudioMicrophoneLocation location) {
-    switch (location) {
-        case AudioMicrophoneLocation::UNKNOWN:
-            return AUDIO_MICROPHONE_LOCATION_UNKNOWN;
-        case AudioMicrophoneLocation::MAINBODY:
-            return AUDIO_MICROPHONE_LOCATION_MAINBODY;
-        case AudioMicrophoneLocation::MAINBODY_MOVABLE:
-            return AUDIO_MICROPHONE_LOCATION_MAINBODY_MOVABLE;
-        case AudioMicrophoneLocation::PERIPHERAL:
-            return AUDIO_MICROPHONE_LOCATION_PERIPHERAL;
-        default:
-            LOG_ALWAYS_FATAL("Unknown locationToHal conversion %d", location);
-    }
-}
-static audio_microphone_directionality_t directionalityToHal(AudioMicrophoneDirectionality dir) {
-    switch (dir) {
-        case AudioMicrophoneDirectionality::UNKNOWN:
-            return AUDIO_MICROPHONE_DIRECTIONALITY_UNKNOWN;
-        case AudioMicrophoneDirectionality::OMNI:
-            return AUDIO_MICROPHONE_DIRECTIONALITY_OMNI;
-        case AudioMicrophoneDirectionality::BI_DIRECTIONAL:
-            return AUDIO_MICROPHONE_DIRECTIONALITY_BI_DIRECTIONAL;
-        case AudioMicrophoneDirectionality::CARDIOID:
-            return AUDIO_MICROPHONE_DIRECTIONALITY_CARDIOID;
-        case AudioMicrophoneDirectionality::HYPER_CARDIOID:
-            return AUDIO_MICROPHONE_DIRECTIONALITY_HYPER_CARDIOID;
-        case AudioMicrophoneDirectionality::SUPER_CARDIOID:
-            return AUDIO_MICROPHONE_DIRECTIONALITY_SUPER_CARDIOID;
-        default:
-            LOG_ALWAYS_FATAL("Unknown directionalityToHal conversion %d", dir);
-    }
-}
-
-void microphoneInfoToHal(const MicrophoneInfo& src,
-                         audio_microphone_characteristic_t *pDst) {
-    if (pDst != NULL) {
-        snprintf(pDst->device_id, sizeof(pDst->device_id),
-                 "%s", src.deviceId.c_str());
-        pDst->device = static_cast<audio_devices_t>(src.deviceAddress.device);
-        snprintf(pDst->address, sizeof(pDst->address),
-                 "%s", deviceAddressToHal(src.deviceAddress).c_str());
-        if (src.channelMapping.size() > AUDIO_CHANNEL_COUNT_MAX) {
-            ALOGW("microphoneInfoToStruct found %zu channelMapping elements. Max expected is %d",
-                  src.channelMapping.size(), AUDIO_CHANNEL_COUNT_MAX);
-        }
-        size_t ch;
-        for (ch = 0; ch < src.channelMapping.size() && ch < AUDIO_CHANNEL_COUNT_MAX; ch++) {
-            pDst->channel_mapping[ch] = channelMappingToHal(src.channelMapping[ch]);
-        }
-        for (; ch < AUDIO_CHANNEL_COUNT_MAX; ch++) {
-            pDst->channel_mapping[ch] = AUDIO_MICROPHONE_CHANNEL_MAPPING_UNUSED;
-        }
-        pDst->location = locationToHal(src.location);
-        pDst->group = (audio_microphone_group_t)src.group;
-        pDst->index_in_the_group = (unsigned int)src.indexInTheGroup;
-        pDst->sensitivity = src.sensitivity;
-        pDst->max_spl = src.maxSpl;
-        pDst->min_spl = src.minSpl;
-        pDst->directionality = directionalityToHal(src.directionality);
-        pDst->num_frequency_responses = (unsigned int)src.frequencyResponse.size();
-        if (pDst->num_frequency_responses > AUDIO_MICROPHONE_MAX_FREQUENCY_RESPONSES) {
-            ALOGW("microphoneInfoToStruct found %d frequency responses. Max expected is %d",
-                  pDst->num_frequency_responses, AUDIO_MICROPHONE_MAX_FREQUENCY_RESPONSES);
-            pDst->num_frequency_responses = AUDIO_MICROPHONE_MAX_FREQUENCY_RESPONSES;
-        }
-        for (size_t k = 0; k < pDst->num_frequency_responses; k++) {
-            pDst->frequency_responses[0][k] = src.frequencyResponse[k].frequency;
-            pDst->frequency_responses[1][k] = src.frequencyResponse[k].level;
-        }
-        pDst->geometric_location.x = src.position.x;
-        pDst->geometric_location.y = src.position.y;
-        pDst->geometric_location.z = src.position.z;
-        pDst->orientation.x = src.orientation.x;
-        pDst->orientation.y = src.orientation.y;
-        pDst->orientation.z = src.orientation.z;
-    }
-}
-#endif
-
 }  // namespace CPP_VERSION
 }  // namespace android
diff --git a/media/libaudiohal/impl/ConversionHelperHidl.h b/media/libaudiohal/impl/ConversionHelperHidl.h
index fb3bb9d..59122c7 100644
--- a/media/libaudiohal/impl/ConversionHelperHidl.h
+++ b/media/libaudiohal/impl/ConversionHelperHidl.h
@@ -82,12 +82,6 @@
     void emitError(const char* funcName, const char* description);
 };
 
-#if MAJOR_VERSION >= 4
-using ::android::hardware::audio::CPP_VERSION::MicrophoneInfo;
-void microphoneInfoToHal(const MicrophoneInfo& src,
-                         audio_microphone_characteristic_t *pDst);
-#endif
-
 }  // namespace CPP_VERSION
 }  // namespace android
 
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index 7d0d83d..ca4f663 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -19,22 +19,24 @@
 #define LOG_TAG "DeviceHalHidl"
 //#define LOG_NDEBUG 0
 
-#include PATH(android/hardware/audio/FILE_VERSION/IPrimaryDevice.h)
 #include <cutils/native_handle.h>
 #include <hwbinder/IPCThreadState.h>
 #include <media/AudioContainers.h>
 #include <utils/Log.h>
 
+#include PATH(android/hardware/audio/FILE_VERSION/IPrimaryDevice.h)
+#include <HidlUtils.h>
 #include <common/all-versions/VersionUtils.h>
+#include <util/CoreUtils.h>
 
 #include "DeviceHalHidl.h"
 #include "EffectHalHidl.h"
-#include "HidlUtils.h"
+#include "ParameterUtils.h"
 #include "StreamHalHidl.h"
-#include "VersionUtils.h"
 
 using ::android::hardware::audio::common::CPP_VERSION::implementation::HidlUtils;
 using ::android::hardware::audio::common::utils::EnumBitfield;
+using ::android::hardware::audio::CPP_VERSION::implementation::CoreUtils;
 using ::android::hardware::hidl_string;
 using ::android::hardware::hidl_vec;
 
@@ -46,47 +48,6 @@
 
 using EffectHalHidl = ::android::effect::CPP_VERSION::EffectHalHidl;
 
-namespace {
-
-status_t deviceAddressFromHal(
-        audio_devices_t device, const char* halAddress, DeviceAddress* address) {
-    address->device = AudioDevice(device);
-
-    if (halAddress == nullptr || strnlen(halAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN) == 0) {
-        return OK;
-    }
-    if (getAudioDeviceOutAllA2dpSet().count(device) > 0
-            || device == AUDIO_DEVICE_IN_BLUETOOTH_A2DP) {
-        int status = sscanf(halAddress,
-                "%hhX:%hhX:%hhX:%hhX:%hhX:%hhX",
-                &address->address.mac[0], &address->address.mac[1], &address->address.mac[2],
-                &address->address.mac[3], &address->address.mac[4], &address->address.mac[5]);
-        return status == 6 ? OK : BAD_VALUE;
-    } else if (device == AUDIO_DEVICE_OUT_IP || device == AUDIO_DEVICE_IN_IP) {
-        int status = sscanf(halAddress,
-                "%hhu.%hhu.%hhu.%hhu",
-                &address->address.ipv4[0], &address->address.ipv4[1],
-                &address->address.ipv4[2], &address->address.ipv4[3]);
-        return status == 4 ? OK : BAD_VALUE;
-    } else if (getAudioDeviceOutAllUsbSet().count(device) > 0
-            || getAudioDeviceInAllUsbSet().count(device) > 0) {
-        int status = sscanf(halAddress,
-                "card=%d;device=%d",
-                &address->address.alsa.card, &address->address.alsa.device);
-        return status == 2 ? OK : BAD_VALUE;
-    } else if (device == AUDIO_DEVICE_OUT_BUS || device == AUDIO_DEVICE_IN_BUS) {
-        address->busAddress = halAddress;
-        return OK;
-    } else if (device == AUDIO_DEVICE_OUT_REMOTE_SUBMIX
-            || device == AUDIO_DEVICE_IN_REMOTE_SUBMIX) {
-        address->rSubmixAddress = halAddress;
-        return OK;
-    }
-    return OK;
-}
-
-}  // namespace
-
 DeviceHalHidl::DeviceHalHidl(const sp<IDevice>& device)
         : ConversionHelperHidl("Device"), mDevice(device),
           mPrimaryDevice(IPrimaryDevice::castFrom(device)) {
@@ -212,7 +173,7 @@
         const struct audio_config *config, size_t *size) {
     if (mDevice == 0) return NO_INIT;
     AudioConfig hidlConfig;
-    HidlUtils::audioConfigFromHal(*config, &hidlConfig);
+    HidlUtils::audioConfigFromHal(*config, true /*isInput*/, &hidlConfig);
     Result retval;
     Return<void> ret = mDevice->getInputBufferSize(
             hidlConfig,
@@ -234,16 +195,22 @@
         sp<StreamOutHalInterface> *outStream) {
     if (mDevice == 0) return NO_INIT;
     DeviceAddress hidlDevice;
-    status_t status = deviceAddressFromHal(deviceType, address, &hidlDevice);
-    if (status != OK) return status;
+    if (status_t status = CoreUtils::deviceAddressFromHal(deviceType, address, &hidlDevice);
+            status != OK) {
+        return status;
+    }
     AudioConfig hidlConfig;
-    HidlUtils::audioConfigFromHal(*config, &hidlConfig);
+    if (status_t status = HidlUtils::audioConfigFromHal(*config, false /*isInput*/, &hidlConfig);
+            status != OK) {
+        return status;
+    }
+    CoreUtils::AudioOutputFlags hidlFlags;
+    if (status_t status = CoreUtils::audioOutputFlagsFromHal(flags, &hidlFlags); status != OK) {
+        return status;
+    }
     Result retval = Result::NOT_INITIALIZED;
     Return<void> ret = mDevice->openOutputStream(
-            handle,
-            hidlDevice,
-            hidlConfig,
-            EnumBitfield<AudioOutputFlag>(flags),
+            handle, hidlDevice, hidlConfig, hidlFlags,
 #if MAJOR_VERSION >= 4
             {} /* metadata */,
 #endif
@@ -269,39 +236,54 @@
         sp<StreamInHalInterface> *inStream) {
     if (mDevice == 0) return NO_INIT;
     DeviceAddress hidlDevice;
-    status_t status = deviceAddressFromHal(devices, address, &hidlDevice);
-    if (status != OK) return status;
+    if (status_t status = CoreUtils::deviceAddressFromHal(devices, address, &hidlDevice);
+            status != OK) {
+        return status;
+    }
     AudioConfig hidlConfig;
-    HidlUtils::audioConfigFromHal(*config, &hidlConfig);
+    if (status_t status = HidlUtils::audioConfigFromHal(*config, true /*isInput*/, &hidlConfig);
+            status != OK) {
+        return status;
+    }
+    CoreUtils::AudioInputFlags hidlFlags;
+#if MAJOR_VERSION <= 5
+    // Some flags were specific to framework and must not leak to the HAL.
+    flags = static_cast<audio_input_flags_t>(flags & ~AUDIO_INPUT_FLAG_DIRECT);
+#endif
+    if (status_t status = CoreUtils::audioInputFlagsFromHal(flags, &hidlFlags); status != OK) {
+        return status;
+    }
     Result retval = Result::NOT_INITIALIZED;
 #if MAJOR_VERSION == 2
     auto sinkMetadata = AudioSource(source);
 #elif MAJOR_VERSION >= 4
     // TODO: correctly propagate the tracks sources and volume
     //       for now, only send the main source at 1dbfs
-    SinkMetadata sinkMetadata = {{{ .source = AudioSource(source), .gain = 1 }}};
+    AudioSource hidlSource;
+    if (status_t status = HidlUtils::audioSourceFromHal(source, &hidlSource); status != OK) {
+        return status;
+    }
+    SinkMetadata sinkMetadata = {{{ .source = std::move(hidlSource), .gain = 1 }}};
 #endif
 #if MAJOR_VERSION < 5
     (void)outputDevice;
     (void)outputDeviceAddress;
 #else
+#if MAJOR_VERSION >= 7
+    (void)HidlUtils::audioChannelMaskFromHal(
+            AUDIO_CHANNEL_NONE, true /*isInput*/, &sinkMetadata.tracks[0].channelMask);
+#endif
     if (outputDevice != AUDIO_DEVICE_NONE) {
         DeviceAddress hidlOutputDevice;
-        status = deviceAddressFromHal(outputDevice, outputDeviceAddress, &hidlOutputDevice);
-        if (status != OK) return status;
+        if (status_t status = CoreUtils::deviceAddressFromHal(
+                        outputDevice, outputDeviceAddress, &hidlOutputDevice); status != OK) {
+            return status;
+        }
         sinkMetadata.tracks[0].destination.device(std::move(hidlOutputDevice));
     }
 #endif
-#if MAJOR_VERSION <= 5
-    // Some flags were specific to framework and must not leak to the HAL.
-    flags = static_cast<audio_input_flags_t>(flags & ~AUDIO_INPUT_FLAG_DIRECT);
-#endif
     Return<void> ret = mDevice->openInputStream(
-            handle,
-            hidlDevice,
-            hidlConfig,
-            EnumBitfield<AudioInputFlag>(flags),
-            sinkMetadata,
+            handle, hidlDevice, hidlConfig, hidlFlags, sinkMetadata,
             [&](Result r, const sp<IStreamIn>& result, const AudioConfig& suggestedConfig) {
                 retval = r;
                 if (retval == Result::OK) {
@@ -372,7 +354,8 @@
     return processReturn("releaseAudioPatch", mDevice->releaseAudioPatch(patch));
 }
 
-status_t DeviceHalHidl::getAudioPort(struct audio_port *port) {
+template <typename HalPort>
+status_t DeviceHalHidl::getAudioPortImpl(HalPort *port) {
     if (mDevice == 0) return NO_INIT;
     AudioPort hidlPort;
     HidlUtils::audioPortFromHal(*port, &hidlPort);
@@ -388,6 +371,30 @@
     return processReturn("getAudioPort", ret, retval);
 }
 
+status_t DeviceHalHidl::getAudioPort(struct audio_port *port) {
+    return getAudioPortImpl(port);
+}
+
+status_t DeviceHalHidl::getAudioPort(struct audio_port_v7 *port) {
+#if MAJOR_VERSION >= 7
+    return getAudioPortImpl(port);
+#else
+    struct audio_port audioPort = {};
+    status_t result = NO_ERROR;
+    if (!audio_populate_audio_port(port, &audioPort)) {
+        ALOGE("Failed to populate legacy audio port from audio_port_v7");
+        result = BAD_VALUE;
+    }
+    status_t status = getAudioPort(&audioPort);
+    if (status == NO_ERROR) {
+        audio_populate_audio_port_v7(&audioPort, port);
+    } else {
+        result = status;
+    }
+    return result;
+#endif
+}
+
 status_t DeviceHalHidl::setAudioPortConfig(const struct audio_port_config *config) {
     if (mDevice == 0) return NO_INIT;
     AudioPortConfig hidlConfig;
@@ -411,7 +418,7 @@
         for (size_t k = 0; k < micArrayHal.size(); k++) {
             audio_microphone_characteristic_t dst;
             //convert
-            microphoneInfoToHal(micArrayHal[k], &dst);
+            (void)CoreUtils::microphoneInfoToHal(micArrayHal[k], &dst);
             media::MicrophoneInfo microphone = media::MicrophoneInfo(dst);
             microphonesInfo->push_back(microphone);
         }
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index d342d4a..2c847cf 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -107,6 +107,9 @@
     // Fills the list of supported attributes for a given audio port.
     virtual status_t getAudioPort(struct audio_port *port);
 
+    // Fills the list of supported attributes for a given audio port.
+    virtual status_t getAudioPort(struct audio_port_v7 *port);
+
     // Set audio port configuration.
     virtual status_t setAudioPortConfig(const struct audio_port_config *config);
 
@@ -128,6 +131,8 @@
 
     // The destructor automatically closes the device.
     virtual ~DeviceHalHidl();
+
+    template <typename HalPort> status_t getAudioPortImpl(HalPort *port);
 };
 
 } // namespace CPP_VERSION
diff --git a/media/libaudiohal/impl/DeviceHalLocal.cpp b/media/libaudiohal/impl/DeviceHalLocal.cpp
index 8021d92..af7dc1a 100644
--- a/media/libaudiohal/impl/DeviceHalLocal.cpp
+++ b/media/libaudiohal/impl/DeviceHalLocal.cpp
@@ -180,6 +180,22 @@
     return mDev->get_audio_port(mDev, port);
 }
 
+status_t DeviceHalLocal::getAudioPort(struct audio_port_v7 *port) {
+#if MAJOR_VERSION >= 7
+    if (version() >= AUDIO_DEVICE_API_VERSION_3_2) {
+        // get_audio_port_v7 is mandatory if legacy HAL support this API version.
+        return mDev->get_audio_port_v7(mDev, port);
+    }
+#endif
+    struct audio_port audioPort = {};
+    audio_populate_audio_port(port, &audioPort);
+    status_t status = getAudioPort(&audioPort);
+    if (status == NO_ERROR) {
+        audio_populate_audio_port_v7(&audioPort, port);
+    }
+    return status;
+}
+
 status_t DeviceHalLocal::setAudioPortConfig(const struct audio_port_config *config) {
     if (version() >= AUDIO_DEVICE_API_VERSION_3_0)
         return mDev->set_audio_port_config(mDev, config);
diff --git a/media/libaudiohal/impl/DeviceHalLocal.h b/media/libaudiohal/impl/DeviceHalLocal.h
index d85e2a7..46b510b 100644
--- a/media/libaudiohal/impl/DeviceHalLocal.h
+++ b/media/libaudiohal/impl/DeviceHalLocal.h
@@ -100,6 +100,9 @@
     // Fills the list of supported attributes for a given audio port.
     virtual status_t getAudioPort(struct audio_port *port);
 
+    // Fills the list of supported attributes for a given audio port.
+    virtual status_t getAudioPort(struct audio_port_v7 *port);
+
     // Set audio port configuration.
     virtual status_t setAudioPortConfig(const struct audio_port_config *config);
 
@@ -114,6 +117,8 @@
     void closeOutputStream(struct audio_stream_out *stream_out);
     void closeInputStream(struct audio_stream_in *stream_in);
 
+    uint32_t version() const { return mDev->common.version; }
+
   private:
     audio_hw_device_t *mDev;
 
@@ -124,8 +129,6 @@
 
     // The destructor automatically closes the device.
     virtual ~DeviceHalLocal();
-
-    uint32_t version() const { return mDev->common.version; }
 };
 
 } // namespace CPP_VERSION
diff --git a/media/libaudiohal/impl/EffectHalHidl.cpp b/media/libaudiohal/impl/EffectHalHidl.cpp
index caf575c..c589a48 100644
--- a/media/libaudiohal/impl/EffectHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectHalHidl.cpp
@@ -23,12 +23,13 @@
 #include <media/EffectsFactoryApi.h>
 #include <utils/Log.h>
 
+#include <util/EffectUtils.h>
+
 #include "EffectBufferHalHidl.h"
 #include "EffectHalHidl.h"
-#include "HidlUtils.h"
 
-using ::android::hardware::audio::common::CPP_VERSION::implementation::HidlUtils;
 using ::android::hardware::audio::common::utils::EnumBitfield;
+using ::android::hardware::audio::effect::CPP_VERSION::implementation::EffectUtils;
 using ::android::hardware::hidl_vec;
 using ::android::hardware::MQDescriptorSync;
 using ::android::hardware::Return;
@@ -42,6 +43,10 @@
 
 EffectHalHidl::EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId)
         : mEffect(effect), mEffectId(effectId), mBuffersChanged(true), mEfGroup(nullptr) {
+    effect_descriptor_t halDescriptor{};
+    if (EffectHalHidl::getDescriptor(&halDescriptor) == NO_ERROR) {
+        mIsInput = (halDescriptor.flags & EFFECT_FLAG_TYPE_PRE_PROC) == EFFECT_FLAG_TYPE_PRE_PROC;
+    }
 }
 
 EffectHalHidl::~EffectHalHidl() {
@@ -56,59 +61,6 @@
 }
 
 // static
-void EffectHalHidl::effectDescriptorToHal(
-        const EffectDescriptor& descriptor, effect_descriptor_t* halDescriptor) {
-    HidlUtils::uuidToHal(descriptor.type, &halDescriptor->type);
-    HidlUtils::uuidToHal(descriptor.uuid, &halDescriptor->uuid);
-    halDescriptor->flags = static_cast<uint32_t>(descriptor.flags);
-    halDescriptor->cpuLoad = descriptor.cpuLoad;
-    halDescriptor->memoryUsage = descriptor.memoryUsage;
-    memcpy(halDescriptor->name, descriptor.name.data(), descriptor.name.size());
-    memcpy(halDescriptor->implementor,
-            descriptor.implementor.data(), descriptor.implementor.size());
-}
-
-// TODO(mnaganov): These buffer conversion functions should be shared with Effect wrapper
-// via HidlUtils. Move them there when hardware/interfaces will get un-frozen again.
-
-// static
-void EffectHalHidl::effectBufferConfigFromHal(
-        const buffer_config_t& halConfig, EffectBufferConfig* config) {
-    config->samplingRateHz = halConfig.samplingRate;
-    config->channels = EnumBitfield<AudioChannelMask>(halConfig.channels);
-    config->format = AudioFormat(halConfig.format);
-    config->accessMode = EffectBufferAccess(halConfig.accessMode);
-    config->mask = EnumBitfield<EffectConfigParameters>(halConfig.mask);
-}
-
-// static
-void EffectHalHidl::effectBufferConfigToHal(
-        const EffectBufferConfig& config, buffer_config_t* halConfig) {
-    halConfig->buffer.frameCount = 0;
-    halConfig->buffer.raw = NULL;
-    halConfig->samplingRate = config.samplingRateHz;
-    halConfig->channels = static_cast<uint32_t>(config.channels);
-    halConfig->bufferProvider.cookie = NULL;
-    halConfig->bufferProvider.getBuffer = NULL;
-    halConfig->bufferProvider.releaseBuffer = NULL;
-    halConfig->format = static_cast<uint8_t>(config.format);
-    halConfig->accessMode = static_cast<uint8_t>(config.accessMode);
-    halConfig->mask = static_cast<uint8_t>(config.mask);
-}
-
-// static
-void EffectHalHidl::effectConfigFromHal(const effect_config_t& halConfig, EffectConfig* config) {
-    effectBufferConfigFromHal(halConfig.inputCfg, &config->inputCfg);
-    effectBufferConfigFromHal(halConfig.outputCfg, &config->outputCfg);
-}
-
-// static
-void EffectHalHidl::effectConfigToHal(const EffectConfig& config, effect_config_t* halConfig) {
-    effectBufferConfigToHal(config.inputCfg, &halConfig->inputCfg);
-    effectBufferConfigToHal(config.outputCfg, &halConfig->outputCfg);
-}
-
-// static
 status_t EffectHalHidl::analyzeResult(const Result& result) {
     switch (result) {
         case Result::OK: return OK;
@@ -269,7 +221,7 @@
             [&](Result r, const EffectDescriptor& result) {
                 retval = r;
                 if (retval == Result::OK) {
-                    effectDescriptorToHal(result, pDescriptor);
+                    EffectUtils::effectDescriptorToHal(result, pDescriptor);
                 }
             });
     return ret.isOk() ? analyzeResult(retval) : FAILED_TRANSACTION;
@@ -301,14 +253,16 @@
         ret = mEffect->getConfig([&] (Result r, const EffectConfig &hidlConfig) {
             result = analyzeResult(r);
             if (r == Result::OK) {
-                effectConfigToHal(hidlConfig, static_cast<effect_config_t*>(pReplyData));
+                EffectUtils::effectConfigToHal(
+                        hidlConfig, static_cast<effect_config_t*>(pReplyData));
             }
         });
     } else {
         ret = mEffect->getConfigReverse([&] (Result r, const EffectConfig &hidlConfig) {
             result = analyzeResult(r);
             if (r == Result::OK) {
-                effectConfigToHal(hidlConfig, static_cast<effect_config_t*>(pReplyData));
+                EffectUtils::effectConfigToHal(
+                        hidlConfig, static_cast<effect_config_t*>(pReplyData));
             }
         });
     }
@@ -332,7 +286,7 @@
         ALOGE("Buffer provider callbacks are not supported");
     }
     EffectConfig hidlConfig;
-    effectConfigFromHal(*halConfig, &hidlConfig);
+    EffectUtils::effectConfigFromHal(*halConfig, mIsInput, &hidlConfig);
     Return<Result> ret = cmdCode == EFFECT_CMD_SET_CONFIG ?
             mEffect->setConfig(hidlConfig, nullptr, nullptr) :
             mEffect->setConfigReverse(hidlConfig, nullptr, nullptr);
diff --git a/media/libaudiohal/impl/EffectHalHidl.h b/media/libaudiohal/impl/EffectHalHidl.h
index 1f238c0..8e46638 100644
--- a/media/libaudiohal/impl/EffectHalHidl.h
+++ b/media/libaudiohal/impl/EffectHalHidl.h
@@ -65,9 +65,6 @@
 
     uint64_t effectId() const { return mEffectId; }
 
-    static void effectDescriptorToHal(
-            const EffectDescriptor& descriptor, effect_descriptor_t* halDescriptor);
-
   private:
     friend class EffectsFactoryHalHidl;
     typedef MessageQueue<Result, hardware::kSynchronizedReadWrite> StatusMQ;
@@ -79,14 +76,9 @@
     bool mBuffersChanged;
     std::unique_ptr<StatusMQ> mStatusMQ;
     EventFlag* mEfGroup;
+    bool mIsInput = false;
 
     static status_t analyzeResult(const Result& result);
-    static void effectBufferConfigFromHal(
-            const buffer_config_t& halConfig, EffectBufferConfig* config);
-    static void effectBufferConfigToHal(
-            const EffectBufferConfig& config, buffer_config_t* halConfig);
-    static void effectConfigFromHal(const effect_config_t& halConfig, EffectConfig* config);
-    static void effectConfigToHal(const EffectConfig& config, effect_config_t* halConfig);
 
     // Can not be constructed directly by clients.
     EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId);
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
index 9192a31..9c4363c 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
@@ -19,13 +19,16 @@
 
 #include <cutils/native_handle.h>
 
+#include <UuidUtils.h>
+#include <util/EffectUtils.h>
+
 #include "ConversionHelperHidl.h"
 #include "EffectBufferHalHidl.h"
 #include "EffectHalHidl.h"
 #include "EffectsFactoryHalHidl.h"
-#include "HidlUtils.h"
 
-using ::android::hardware::audio::common::CPP_VERSION::implementation::HidlUtils;
+using ::android::hardware::audio::common::CPP_VERSION::implementation::UuidUtils;
+using ::android::hardware::audio::effect::CPP_VERSION::implementation::EffectUtils;
 using ::android::hardware::Return;
 
 namespace android {
@@ -37,7 +40,7 @@
 
 EffectsFactoryHalHidl::EffectsFactoryHalHidl(sp<IEffectsFactory> effectsFactory)
         : ConversionHelperHidl("EffectsFactory") {
-    ALOG_ASSERT(effectsFactory != nullptr, "Provided IDevicesFactory service is NULL");
+    ALOG_ASSERT(effectsFactory != nullptr, "Provided IEffectsFactory service is NULL");
     mEffectsFactory = effectsFactory;
 }
 
@@ -76,7 +79,7 @@
         if (queryResult != OK) return queryResult;
     }
     if (index >= mLastDescriptors.size()) return NAME_NOT_FOUND;
-    EffectHalHidl::effectDescriptorToHal(mLastDescriptors[index], pDescriptor);
+    EffectUtils::effectDescriptorToHal(mLastDescriptors[index], pDescriptor);
     return OK;
 }
 
@@ -85,13 +88,13 @@
     // TODO: check for nullptr
     if (mEffectsFactory == 0) return NO_INIT;
     Uuid hidlUuid;
-    HidlUtils::uuidFromHal(*pEffectUuid, &hidlUuid);
+    UuidUtils::uuidFromHal(*pEffectUuid, &hidlUuid);
     Result retval = Result::NOT_INITIALIZED;
     Return<void> ret = mEffectsFactory->getDescriptor(hidlUuid,
             [&](Result r, const EffectDescriptor& result) {
                 retval = r;
                 if (retval == Result::OK) {
-                    EffectHalHidl::effectDescriptorToHal(result, pDescriptor);
+                    EffectUtils::effectDescriptorToHal(result, pDescriptor);
                 }
             });
     if (ret.isOk()) {
@@ -107,7 +110,7 @@
         int32_t deviceId __unused, sp<EffectHalInterface> *effect) {
     if (mEffectsFactory == 0) return NO_INIT;
     Uuid hidlUuid;
-    HidlUtils::uuidFromHal(*pEffectUuid, &hidlUuid);
+    UuidUtils::uuidFromHal(*pEffectUuid, &hidlUuid);
     Result retval = Result::NOT_INITIALIZED;
     Return<void> ret;
 #if MAJOR_VERSION >= 6
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.h b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
index dece1bb..5fa85e7 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
@@ -54,6 +54,8 @@
 
     virtual status_t dumpEffects(int fd);
 
+    virtual float getHalVersion() { return MAJOR_VERSION + (float)MINOR_VERSION / 10; }
+
     status_t allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) override;
     status_t mirrorBuffer(void* external, size_t size,
                           sp<EffectBufferHalInterface>* buffer) override;
diff --git a/media/libaudiohal/impl/ParameterUtils.h b/media/libaudiohal/impl/ParameterUtils.h
new file mode 100644
index 0000000..9cab72e
--- /dev/null
+++ b/media/libaudiohal/impl/ParameterUtils.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include PATH(android/hardware/audio/FILE_VERSION/types.h)
+#include <hidl/HidlSupport.h>
+
+using ::android::hardware::audio::CPP_VERSION::ParameterValue;
+using ::android::hardware::audio::CPP_VERSION::Result;
+using ::android::hardware::Return;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::hidl_string;
+
+namespace android {
+namespace CPP_VERSION {
+namespace utils {
+
+#if MAJOR_VERSION == 2
+template <class T, class Callback>
+Return<void> getParameters(T& object, hidl_vec<ParameterValue> /*context*/,
+                           hidl_vec<hidl_string> keys, Callback callback) {
+    return object->getParameters(keys, callback);
+}
+
+template <class T>
+Return<Result> setParameters(T& object, hidl_vec<ParameterValue> /*context*/,
+                             hidl_vec<ParameterValue> keys) {
+    return object->setParameters(keys);
+}
+#elif MAJOR_VERSION >= 4
+template <class T, class Callback>
+Return<void> getParameters(T& object, hidl_vec<ParameterValue> context,
+                           hidl_vec<hidl_string> keys, Callback callback) {
+    return object->getParameters(context, keys, callback);
+}
+
+template <class T>
+Return<Result> setParameters(T& object, hidl_vec<ParameterValue> context,
+                             hidl_vec<ParameterValue> keys) {
+    return object->setParameters(context, keys);
+}
+#endif
+
+} // namespace utils
+} // namespace CPP_VERSION
+} // namespace android
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 2726e36..539a149 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -17,17 +17,23 @@
 #define LOG_TAG "StreamHalHidl"
 //#define LOG_NDEBUG 0
 
-#include PATH(android/hardware/audio/FILE_VERSION/IStreamOutCallback.h)
+#include <android/hidl/manager/1.0/IServiceManager.h>
 #include <hwbinder/IPCThreadState.h>
 #include <media/AudioParameter.h>
 #include <mediautils/SchedulingPolicyService.h>
 #include <utils/Log.h>
 
+#include PATH(android/hardware/audio/FILE_VERSION/IStreamOutCallback.h)
+#include <HidlUtils.h>
+#include <util/CoreUtils.h>
+
 #include "DeviceHalHidl.h"
 #include "EffectHalHidl.h"
+#include "ParameterUtils.h"
 #include "StreamHalHidl.h"
-#include "VersionUtils.h"
 
+using ::android::hardware::audio::common::CPP_VERSION::implementation::HidlUtils;
+using ::android::hardware::audio::CPP_VERSION::implementation::CoreUtils;
 using ::android::hardware::MQDescriptorSync;
 using ::android::hardware::Return;
 using ::android::hardware::Void;
@@ -49,24 +55,17 @@
 
     // Instrument audio signal power logging.
     // Note: This assumes channel mask, format, and sample rate do not change after creation.
-    if (mStream != nullptr /* && mStreamPowerLog.isUserDebugOrEngBuild() */) {
-        // Obtain audio properties (see StreamHalHidl::getAudioProperties() below).
-        Return<void> ret = mStream->getAudioProperties(
-                [&](auto sr, auto m, auto f) {
-                mStreamPowerLog.init(sr,
-                        static_cast<audio_channel_mask_t>(m),
-                        static_cast<audio_format_t>(f));
-            });
+    audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+    if (/* mStreamPowerLog.isUserDebugOrEngBuild() && */
+        StreamHalHidl::getAudioProperties(&config) == NO_ERROR) {
+        mStreamPowerLog.init(config.sample_rate, config.channel_mask, config.format);
     }
 }
 
 StreamHalHidl::~StreamHalHidl() {
-    mStream = nullptr;
-}
-
-status_t StreamHalHidl::getSampleRate(uint32_t *rate) {
-    if (!mStream) return NO_INIT;
-    return processReturn("getSampleRate", mStream->getSampleRate(), rate);
+    // The last step is to flush all binder commands so that the deletion
+    // of IStreamIn / IStreamOut (mStream) is issued with less delay. See b/35394629.
+    hardware::IPCThreadState::self()->flushCommands();
 }
 
 status_t StreamHalHidl::getBufferSize(size_t *size) {
@@ -78,26 +77,33 @@
     return status;
 }
 
-status_t StreamHalHidl::getChannelMask(audio_channel_mask_t *mask) {
+status_t StreamHalHidl::getAudioProperties(audio_config_base_t *configBase) {
+    *configBase = AUDIO_CONFIG_BASE_INITIALIZER;
     if (!mStream) return NO_INIT;
-    return processReturn("getChannelMask", mStream->getChannelMask(), mask);
-}
-
-status_t StreamHalHidl::getFormat(audio_format_t *format) {
-    if (!mStream) return NO_INIT;
-    return processReturn("getFormat", mStream->getFormat(), format);
-}
-
-status_t StreamHalHidl::getAudioProperties(
-        uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) {
-    if (!mStream) return NO_INIT;
+#if MAJOR_VERSION <= 6
     Return<void> ret = mStream->getAudioProperties(
             [&](uint32_t sr, auto m, auto f) {
-                *sampleRate = sr;
-                *mask = static_cast<audio_channel_mask_t>(m);
-                *format = static_cast<audio_format_t>(f);
+                configBase->sample_rate = sr;
+                configBase->channel_mask = static_cast<audio_channel_mask_t>(m);
+                configBase->format = static_cast<audio_format_t>(f);
             });
     return processReturn("getAudioProperties", ret);
+#else
+    Result retval;
+    status_t conversionStatus = BAD_VALUE;
+    Return<void> ret = mStream->getAudioProperties(
+            [&](Result r, const AudioConfigBase& config) {
+                retval = r;
+                if (retval == Result::OK) {
+                    conversionStatus = HidlUtils::audioConfigBaseToHal(config, configBase);
+                }
+            });
+    if (status_t status = processReturn("getAudioProperties", ret, retval); status == NO_ERROR) {
+        return conversionStatus;
+    } else {
+        return status;
+    }
+#endif
 }
 
 status_t StreamHalHidl::setParameters(const String8& kvPairs) {
@@ -225,6 +231,24 @@
     return getBufferSize(size);
 }
 
+status_t StreamHalHidl::getHalPid(pid_t *pid) {
+    using ::android::hidl::base::V1_0::DebugInfo;
+    using ::android::hidl::manager::V1_0::IServiceManager;
+
+    DebugInfo debugInfo;
+    auto ret = mStream->getDebugInfo([&] (const auto &info) {
+        debugInfo = info;
+    });
+    if (!ret.isOk()) {
+        return INVALID_OPERATION;
+    }
+    if (debugInfo.pid != (int)IServiceManager::PidConstant::NO_PID) {
+        *pid = debugInfo.pid;
+        return NO_ERROR;
+    }
+    return NAME_NOT_FOUND;
+}
+
 bool StreamHalHidl::requestHalThreadPriority(pid_t threadPid, pid_t threadId) {
     if (mHalThreadPriority == HAL_THREAD_PRIORITY_DEFAULT) {
         return true;
@@ -285,7 +309,7 @@
     }
 
   private:
-    wp<StreamOutHalHidl> mStream;
+    const wp<StreamOutHalHidl> mStream;
 };
 
 }  // namespace
@@ -296,21 +320,19 @@
 
 StreamOutHalHidl::~StreamOutHalHidl() {
     if (mStream != 0) {
-        if (mCallback.unsafe_get()) {
+        if (mCallback.load().unsafe_get()) {
             processReturn("clearCallback", mStream->clearCallback());
         }
 #if MAJOR_VERSION >= 6
-        if (mEventCallback.unsafe_get() != nullptr) {
+        if (mEventCallback.load().unsafe_get() != nullptr) {
             processReturn("setEventCallback",
                     mStream->setEventCallback(nullptr));
         }
 #endif
         processReturn("close", mStream->close());
-        mStream.clear();
     }
-    mCallback.clear();
-    mEventCallback.clear();
-    hardware::IPCThreadState::self()->flushCommands();
+    mCallback = nullptr;
+    mEventCallback = nullptr;
     if (mEfGroup) {
         EventFlag::deleteEventFlag(&mEfGroup);
     }
@@ -363,7 +385,7 @@
 
     if (bytes == 0 && !mDataMQ) {
         // Can't determine the size for the MQ buffer. Wait for a non-empty write request.
-        ALOGW_IF(mCallback.unsafe_get(), "First call to async write with 0 bytes");
+        ALOGW_IF(mCallback.load().unsafe_get(), "First call to async write with 0 bytes");
         return OK;
     }
 
@@ -453,7 +475,7 @@
                     const CommandMQ::Descriptor& commandMQ,
                     const DataMQ::Descriptor& dataMQ,
                     const StatusMQ::Descriptor& statusMQ,
-                    const ThreadInfo& halThreadInfo) {
+                    const auto& halThreadInfo) {
                 retval = r;
                 if (retval == Result::OK) {
                     tempCommandMQ.reset(new CommandMQ(commandMQ));
@@ -462,8 +484,12 @@
                     if (tempDataMQ->isValid() && tempDataMQ->getEventFlagWord()) {
                         EventFlag::createEventFlag(tempDataMQ->getEventFlagWord(), &mEfGroup);
                     }
+#if MAJOR_VERSION <= 6
                     halThreadPid = halThreadInfo.pid;
                     halThreadTid = halThreadInfo.tid;
+#else
+                    halThreadTid = halThreadInfo;
+#endif
                 }
             });
     if (!ret.isOk() || retval != Result::OK) {
@@ -484,6 +510,11 @@
         ALOGE_IF(!mEfGroup, "Event flag creation for writing failed");
         return NO_INIT;
     }
+#if MAJOR_VERSION >= 7
+    if (status_t status = getHalPid(&halThreadPid); status != NO_ERROR) {
+        return status;
+    }
+#endif
     requestHalThreadPriority(halThreadPid, halThreadTid);
 
     mCommandMQ = std::move(tempCommandMQ);
@@ -597,31 +628,45 @@
     return INVALID_OPERATION;
 }
 #elif MAJOR_VERSION >= 4
-/** Transform a standard collection to an HIDL vector. */
-template <class Values, class ElementConverter>
-static auto transformToHidlVec(const Values& values, ElementConverter converter) {
-    hidl_vec<decltype(converter(*values.begin()))> result{values.size()};
-    using namespace std;
-    transform(begin(values), end(values), begin(result), converter);
-    return result;
-}
-
 status_t StreamOutHalHidl::updateSourceMetadata(
         const StreamOutHalInterface::SourceMetadata& sourceMetadata) {
-    CPP_VERSION::SourceMetadata halMetadata = {
-        .tracks = transformToHidlVec(sourceMetadata.tracks,
-              [](const playback_track_metadata& metadata) -> PlaybackTrackMetadata {
-                  return {
-                    .usage=static_cast<AudioUsage>(metadata.usage),
-                    .contentType=static_cast<AudioContentType>(metadata.content_type),
-                    .gain=metadata.gain,
-                  };
-              })};
-    return processReturn("updateSourceMetadata", mStream->updateSourceMetadata(halMetadata));
+    CPP_VERSION::SourceMetadata hidlMetadata;
+    if (status_t status = CoreUtils::sourceMetadataFromHalV7(
+                    sourceMetadata.tracks, true /*ignoreNonVendorTags*/, &hidlMetadata);
+            status != OK) {
+        return status;
+    }
+    return processReturn("updateSourceMetadata", mStream->updateSourceMetadata(hidlMetadata));
 }
 #endif
 
 #if MAJOR_VERSION < 6
+status_t StreamOutHalHidl::getDualMonoMode(audio_dual_mono_mode_t* mode __unused) {
+    return INVALID_OPERATION;
+}
+
+status_t StreamOutHalHidl::setDualMonoMode(audio_dual_mono_mode_t mode __unused) {
+    return INVALID_OPERATION;
+}
+
+status_t StreamOutHalHidl::getAudioDescriptionMixLevel(float* leveldB __unused) {
+    return INVALID_OPERATION;
+}
+
+status_t StreamOutHalHidl::setAudioDescriptionMixLevel(float leveldB __unused) {
+    return INVALID_OPERATION;
+}
+
+status_t StreamOutHalHidl::getPlaybackRateParameters(
+        audio_playback_rate_t* playbackRate __unused) {
+    return INVALID_OPERATION;
+}
+
+status_t StreamOutHalHidl::setPlaybackRateParameters(
+        const audio_playback_rate_t& playbackRate __unused) {
+    return INVALID_OPERATION;
+}
+
 status_t StreamOutHalHidl::setEventCallback(
         const sp<StreamOutHalInterfaceEventCallback>& callback __unused) {
     // Codec format callback is supported starting from audio HAL V6.0
@@ -629,6 +674,73 @@
 }
 #else
 
+status_t StreamOutHalHidl::getDualMonoMode(audio_dual_mono_mode_t* mode) {
+    if (mStream == 0) return NO_INIT;
+    Result retval;
+    Return<void> ret = mStream->getDualMonoMode(
+            [&](Result r, DualMonoMode hidlMode) {
+                retval = r;
+                if (retval == Result::OK) {
+                    *mode = static_cast<audio_dual_mono_mode_t>(hidlMode);
+                }
+            });
+    return processReturn("getDualMonoMode", ret, retval);
+}
+
+status_t StreamOutHalHidl::setDualMonoMode(audio_dual_mono_mode_t mode) {
+    if (mStream == 0) return NO_INIT;
+    return processReturn(
+            "setDualMonoMode", mStream->setDualMonoMode(static_cast<DualMonoMode>(mode)));
+}
+
+status_t StreamOutHalHidl::getAudioDescriptionMixLevel(float* leveldB) {
+    if (mStream == 0) return NO_INIT;
+    Result retval;
+    Return<void> ret = mStream->getAudioDescriptionMixLevel(
+            [&](Result r, float hidlLeveldB) {
+                retval = r;
+                if (retval == Result::OK) {
+                    *leveldB = hidlLeveldB;
+                }
+            });
+    return processReturn("getAudioDescriptionMixLevel", ret, retval);
+}
+
+status_t StreamOutHalHidl::setAudioDescriptionMixLevel(float leveldB) {
+    if (mStream == 0) return NO_INIT;
+    return processReturn(
+            "setAudioDescriptionMixLevel", mStream->setAudioDescriptionMixLevel(leveldB));
+}
+
+status_t StreamOutHalHidl::getPlaybackRateParameters(audio_playback_rate_t* playbackRate) {
+    if (mStream == 0) return NO_INIT;
+    Result retval;
+    Return<void> ret = mStream->getPlaybackRateParameters(
+            [&](Result r, PlaybackRate hidlPlaybackRate) {
+                retval = r;
+                if (retval == Result::OK) {
+                    playbackRate->mSpeed = hidlPlaybackRate.speed;
+                    playbackRate->mPitch = hidlPlaybackRate.pitch;
+                    playbackRate->mStretchMode =
+                        static_cast<audio_timestretch_stretch_mode_t>(
+                            hidlPlaybackRate.timestretchMode);
+                    playbackRate->mFallbackMode =
+                        static_cast<audio_timestretch_fallback_mode_t>(
+                            hidlPlaybackRate.fallbackMode);
+                }
+            });
+    return processReturn("getPlaybackRateParameters", ret, retval);
+}
+
+status_t StreamOutHalHidl::setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) {
+    if (mStream == 0) return NO_INIT;
+    return processReturn(
+            "setPlaybackRateParameters", mStream->setPlaybackRateParameters(
+                PlaybackRate{playbackRate.mSpeed, playbackRate.mPitch,
+                    static_cast<TimestretchMode>(playbackRate.mStretchMode),
+                    static_cast<TimestretchFallbackMode>(playbackRate.mFallbackMode)}));
+}
+
 #include PATH(android/hardware/audio/FILE_VERSION/IStreamOutEventCallback.h)
 
 namespace {
@@ -666,28 +778,28 @@
 #endif
 
 void StreamOutHalHidl::onWriteReady() {
-    sp<StreamOutHalInterfaceCallback> callback = mCallback.promote();
+    sp<StreamOutHalInterfaceCallback> callback = mCallback.load().promote();
     if (callback == 0) return;
     ALOGV("asyncCallback onWriteReady");
     callback->onWriteReady();
 }
 
 void StreamOutHalHidl::onDrainReady() {
-    sp<StreamOutHalInterfaceCallback> callback = mCallback.promote();
+    sp<StreamOutHalInterfaceCallback> callback = mCallback.load().promote();
     if (callback == 0) return;
     ALOGV("asyncCallback onDrainReady");
     callback->onDrainReady();
 }
 
 void StreamOutHalHidl::onError() {
-    sp<StreamOutHalInterfaceCallback> callback = mCallback.promote();
+    sp<StreamOutHalInterfaceCallback> callback = mCallback.load().promote();
     if (callback == 0) return;
     ALOGV("asyncCallback onError");
     callback->onError();
 }
 
 void StreamOutHalHidl::onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs) {
-    sp<StreamOutHalInterfaceEventCallback> callback = mEventCallback.promote();
+    sp<StreamOutHalInterfaceEventCallback> callback = mEventCallback.load().promote();
     if (callback == nullptr) return;
     ALOGV("asyncCodecFormatCallback %s", __func__);
     callback->onCodecFormatChanged(metadataBs);
@@ -701,8 +813,6 @@
 StreamInHalHidl::~StreamInHalHidl() {
     if (mStream != 0) {
         processReturn("close", mStream->close());
-        mStream.clear();
-        hardware::IPCThreadState::self()->flushCommands();
     }
     if (mEfGroup) {
         EventFlag::deleteEventFlag(&mEfGroup);
@@ -797,7 +907,7 @@
                     const CommandMQ::Descriptor& commandMQ,
                     const DataMQ::Descriptor& dataMQ,
                     const StatusMQ::Descriptor& statusMQ,
-                    const ThreadInfo& halThreadInfo) {
+                    const auto& halThreadInfo) {
                 retval = r;
                 if (retval == Result::OK) {
                     tempCommandMQ.reset(new CommandMQ(commandMQ));
@@ -806,8 +916,12 @@
                     if (tempDataMQ->isValid() && tempDataMQ->getEventFlagWord()) {
                         EventFlag::createEventFlag(tempDataMQ->getEventFlagWord(), &mEfGroup);
                     }
+#if MAJOR_VERSION <= 6
                     halThreadPid = halThreadInfo.pid;
                     halThreadTid = halThreadInfo.tid;
+#else
+                    halThreadTid = halThreadInfo;
+#endif
                 }
             });
     if (!ret.isOk() || retval != Result::OK) {
@@ -828,6 +942,11 @@
         ALOGE_IF(!mEfGroup, "Event flag creation for reading failed");
         return NO_INIT;
     }
+#if MAJOR_VERSION >= 7
+    if (status_t status = getHalPid(&halThreadPid); status != NO_ERROR) {
+        return status;
+    }
+#endif
     requestHalThreadPriority(halThreadPid, halThreadTid);
 
     mCommandMQ = std::move(tempCommandMQ);
@@ -890,7 +1009,7 @@
         for (size_t k = 0; k < micArrayHal.size(); k++) {
             audio_microphone_characteristic_t dst;
             // convert
-            microphoneInfoToHal(micArrayHal[k], &dst);
+            (void)CoreUtils::microphoneInfoToHal(micArrayHal[k], &dst);
             media::MicrophoneInfo microphone = media::MicrophoneInfo(dst);
             microphonesInfo->push_back(microphone);
         }
@@ -900,15 +1019,13 @@
 
 status_t StreamInHalHidl::updateSinkMetadata(const
         StreamInHalInterface::SinkMetadata& sinkMetadata) {
-    CPP_VERSION::SinkMetadata halMetadata = {
-        .tracks = transformToHidlVec(sinkMetadata.tracks,
-              [](const record_track_metadata& metadata) -> RecordTrackMetadata {
-                  return {
-                    .source=static_cast<AudioSource>(metadata.source),
-                    .gain=metadata.gain,
-                  };
-              })};
-    return processReturn("updateSinkMetadata", mStream->updateSinkMetadata(halMetadata));
+    CPP_VERSION::SinkMetadata hidlMetadata;
+    if (status_t status = CoreUtils::sinkMetadataFromHalV7(
+                    sinkMetadata.tracks, true /*ignoreNonVendorTags*/, &hidlMetadata);
+            status != OK) {
+        return status;
+    }
+    return processReturn("updateSinkMetadata", mStream->updateSinkMetadata(hidlMetadata));
 }
 #endif
 
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 88f8587..970903b 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -25,6 +25,7 @@
 #include <fmq/EventFlag.h>
 #include <fmq/MessageQueue.h>
 #include <media/audiohal/StreamHalInterface.h>
+#include <mediautils/Synchronization.h>
 
 #include "ConversionHelperHidl.h"
 #include "StreamPowerLog.h"
@@ -48,21 +49,14 @@
 class StreamHalHidl : public virtual StreamHalInterface, public ConversionHelperHidl
 {
   public:
-    // Return the sampling rate in Hz - eg. 44100.
-    virtual status_t getSampleRate(uint32_t *rate);
-
     // Return size of input/output buffer in bytes for this stream - eg. 4800.
     virtual status_t getBufferSize(size_t *size);
 
-    // Return the channel mask.
-    virtual status_t getChannelMask(audio_channel_mask_t *mask);
-
-    // Return the audio format - e.g. AUDIO_FORMAT_PCM_16_BIT.
-    virtual status_t getFormat(audio_format_t *format);
-
-    // Convenience method.
-    virtual status_t getAudioProperties(
-            uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format);
+    // Return the base configuration of the stream:
+    //   - channel mask;
+    //   - format - e.g. AUDIO_FORMAT_PCM_16_BIT;
+    //   - sampling rate in Hz - eg. 44100.
+    virtual status_t getAudioProperties(audio_config_base_t *configBase);
 
     // Set audio stream parameters.
     virtual status_t setParameters(const String8& kvPairs);
@@ -100,11 +94,12 @@
     // Subclasses can not be constructed directly by clients.
     explicit StreamHalHidl(IStream *stream);
 
-    // The destructor automatically closes the stream.
-    virtual ~StreamHalHidl();
+    ~StreamHalHidl() override;
 
     status_t getCachedBufferSize(size_t *size);
 
+    status_t getHalPid(pid_t *pid);
+
     bool requestHalThreadPriority(pid_t threadPid, pid_t threadId);
 
     // mStreamPowerLog is used for audio signal power logging.
@@ -112,7 +107,7 @@
 
   private:
     const int HAL_THREAD_PRIORITY_DEFAULT = -1;
-    IStream *mStream;
+    IStream * const mStream;
     int mHalThreadPriority;
     size_t mCachedBufferSize;
 };
@@ -173,6 +168,24 @@
     void onDrainReady();
     void onError();
 
+    // Returns the Dual Mono mode presentation setting.
+    status_t getDualMonoMode(audio_dual_mono_mode_t* mode) override;
+
+    // Sets the Dual Mono mode presentation on the output device.
+    status_t setDualMonoMode(audio_dual_mono_mode_t mode) override;
+
+    // Returns the Audio Description Mix level in dB.
+    status_t getAudioDescriptionMixLevel(float* leveldB) override;
+
+    // Sets the Audio Description Mix level in dB.
+    status_t setAudioDescriptionMixLevel(float leveldB) override;
+
+    // Retrieves current playback rate parameters.
+    status_t getPlaybackRateParameters(audio_playback_rate_t* playbackRate) override;
+
+    // Sets the playback rate parameters that control playback behavior.
+    status_t setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) override;
+
     status_t setEventCallback(const sp<StreamOutHalInterfaceEventCallback>& callback) override;
 
     // Methods used by StreamCodecFormatCallback (HIDL).
@@ -184,9 +197,9 @@
     typedef MessageQueue<uint8_t, hardware::kSynchronizedReadWrite> DataMQ;
     typedef MessageQueue<WriteStatus, hardware::kSynchronizedReadWrite> StatusMQ;
 
-    wp<StreamOutHalInterfaceCallback> mCallback;
-    wp<StreamOutHalInterfaceEventCallback> mEventCallback;
-    sp<IStreamOut> mStream;
+    mediautils::atomic_wp<StreamOutHalInterfaceCallback> mCallback;
+    mediautils::atomic_wp<StreamOutHalInterfaceEventCallback> mEventCallback;
+    const sp<IStreamOut> mStream;
     std::unique_ptr<CommandMQ> mCommandMQ;
     std::unique_ptr<DataMQ> mDataMQ;
     std::unique_ptr<StatusMQ> mStatusMQ;
@@ -242,7 +255,7 @@
     typedef MessageQueue<uint8_t, hardware::kSynchronizedReadWrite> DataMQ;
     typedef MessageQueue<ReadStatus, hardware::kSynchronizedReadWrite> StatusMQ;
 
-    sp<IStreamIn> mStream;
+    const sp<IStreamIn> mStream;
     std::unique_ptr<CommandMQ> mCommandMQ;
     std::unique_ptr<DataMQ> mDataMQ;
     std::unique_ptr<StatusMQ> mStatusMQ;
diff --git a/media/libaudiohal/impl/StreamHalLocal.cpp b/media/libaudiohal/impl/StreamHalLocal.cpp
index 69be303..34bd5df 100644
--- a/media/libaudiohal/impl/StreamHalLocal.cpp
+++ b/media/libaudiohal/impl/StreamHalLocal.cpp
@@ -17,13 +17,14 @@
 #define LOG_TAG "StreamHalLocal"
 //#define LOG_NDEBUG 0
 
+#include <audio_utils/Metadata.h>
 #include <hardware/audio.h>
 #include <media/AudioParameter.h>
 #include <utils/Log.h>
 
 #include "DeviceHalLocal.h"
+#include "ParameterUtils.h"
 #include "StreamHalLocal.h"
-#include "VersionUtils.h"
 
 namespace android {
 namespace CPP_VERSION {
@@ -45,31 +46,15 @@
     mDevice.clear();
 }
 
-status_t StreamHalLocal::getSampleRate(uint32_t *rate) {
-    *rate = mStream->get_sample_rate(mStream);
-    return OK;
-}
-
 status_t StreamHalLocal::getBufferSize(size_t *size) {
     *size = mStream->get_buffer_size(mStream);
     return OK;
 }
 
-status_t StreamHalLocal::getChannelMask(audio_channel_mask_t *mask) {
-    *mask = mStream->get_channels(mStream);
-    return OK;
-}
-
-status_t StreamHalLocal::getFormat(audio_format_t *format) {
-    *format = mStream->get_format(mStream);
-    return OK;
-}
-
-status_t StreamHalLocal::getAudioProperties(
-        uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) {
-    *sampleRate = mStream->get_sample_rate(mStream);
-    *mask = mStream->get_channels(mStream);
-    *format = mStream->get_format(mStream);
+status_t StreamHalLocal::getAudioProperties(audio_config_base_t *configBase) {
+    configBase->sample_rate = mStream->get_sample_rate(mStream);
+    configBase->channel_mask = mStream->get_channels(mStream);
+    configBase->format = mStream->get_format(mStream);
     return OK;
 }
 
@@ -241,19 +226,55 @@
     return mStream->get_presentation_position(mStream, frames, timestamp);
 }
 
+void StreamOutHalLocal::doUpdateSourceMetadata(const SourceMetadata& sourceMetadata) {
+    std::vector<playback_track_metadata> halTracks;
+    halTracks.reserve(sourceMetadata.tracks.size());
+    for (auto& metadata : sourceMetadata.tracks) {
+        playback_track_metadata halTrackMetadata;
+        playback_track_metadata_from_v7(&halTrackMetadata, &metadata);
+        halTracks.push_back(halTrackMetadata);
+    }
+    const source_metadata_t halMetadata = {
+        .track_count = halTracks.size(),
+        .tracks = halTracks.data(),
+    };
+    mStream->update_source_metadata(mStream, &halMetadata);
+}
+
+#if MAJOR_VERSION >= 7
+void StreamOutHalLocal::doUpdateSourceMetadataV7(const SourceMetadata& sourceMetadata) {
+    const source_metadata_v7_t metadata {
+        .track_count = sourceMetadata.tracks.size(),
+        // const cast is fine as it is in a const structure
+        .tracks = const_cast<playback_track_metadata_v7*>(sourceMetadata.tracks.data()),
+    };
+    mStream->update_source_metadata_v7(mStream, &metadata);
+}
+#endif
+
 status_t StreamOutHalLocal::updateSourceMetadata(const SourceMetadata& sourceMetadata) {
+#if MAJOR_VERSION < 7
     if (mStream->update_source_metadata == nullptr) {
         return INVALID_OPERATION;
     }
-    const source_metadata_t metadata {
-        .track_count = sourceMetadata.tracks.size(),
-        // const cast is fine as it is in a const structure
-        .tracks = const_cast<playback_track_metadata*>(sourceMetadata.tracks.data()),
-    };
-    mStream->update_source_metadata(mStream, &metadata);
+    doUpdateSourceMetadata(sourceMetadata);
+#else
+    if (mDevice->version() < AUDIO_DEVICE_API_VERSION_3_2) {
+        if (mStream->update_source_metadata == nullptr) {
+            return INVALID_OPERATION;
+        }
+        doUpdateSourceMetadata(sourceMetadata);
+    } else {
+        if (mStream->update_source_metadata_v7 == nullptr) {
+            return INVALID_OPERATION;
+        }
+        doUpdateSourceMetadataV7(sourceMetadata);
+    }
+#endif
     return OK;
 }
 
+
 status_t StreamOutHalLocal::start() {
     if (mStream->start == NULL) return INVALID_OPERATION;
     return mStream->start(mStream);
@@ -275,6 +296,36 @@
     return mStream->get_mmap_position(mStream, position);
 }
 
+status_t StreamOutHalLocal::getDualMonoMode(audio_dual_mono_mode_t* mode) {
+    if (mStream->get_dual_mono_mode == nullptr) return INVALID_OPERATION;
+    return mStream->get_dual_mono_mode(mStream, mode);
+}
+
+status_t StreamOutHalLocal::setDualMonoMode(audio_dual_mono_mode_t mode) {
+    if (mStream->set_dual_mono_mode == nullptr) return INVALID_OPERATION;
+    return mStream->set_dual_mono_mode(mStream, mode);
+}
+
+status_t StreamOutHalLocal::getAudioDescriptionMixLevel(float* leveldB) {
+    if (mStream->get_audio_description_mix_level == nullptr) return INVALID_OPERATION;
+    return mStream->get_audio_description_mix_level(mStream, leveldB);
+}
+
+status_t StreamOutHalLocal::setAudioDescriptionMixLevel(float leveldB) {
+    if (mStream->set_audio_description_mix_level == nullptr) return INVALID_OPERATION;
+    return mStream->set_audio_description_mix_level(mStream, leveldB);
+}
+
+status_t StreamOutHalLocal::getPlaybackRateParameters(audio_playback_rate_t* playbackRate) {
+    if (mStream->get_playback_rate_parameters == nullptr) return INVALID_OPERATION;
+    return mStream->get_playback_rate_parameters(mStream, playbackRate);
+}
+
+status_t StreamOutHalLocal::setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) {
+    if (mStream->set_playback_rate_parameters == nullptr) return INVALID_OPERATION;
+    return mStream->set_playback_rate_parameters(mStream, &playbackRate);
+}
+
 status_t StreamOutHalLocal::setEventCallback(
         const sp<StreamOutHalInterfaceEventCallback>& callback) {
     if (mStream->set_event_callback == nullptr) {
@@ -303,7 +354,11 @@
     if (callback.get() == nullptr) return 0;
     switch (event) {
         case STREAM_EVENT_CBK_TYPE_CODEC_FORMAT_CHANGED:
-            callback->onCodecFormatChanged(std::basic_string<uint8_t>((uint8_t*)param));
+            // void* param is the byte string buffer from byte_string_from_audio_metadata().
+            // As the byte string buffer may have embedded zeroes, we cannot use strlen()
+            callback->onCodecFormatChanged(std::basic_string<uint8_t>(
+                    (const uint8_t*)param,
+                    audio_utils::metadata::dataByteStringLen((const uint8_t*)param)));
             break;
         default:
             ALOGW("%s unknown event %d", __func__, event);
@@ -352,16 +407,51 @@
     return mStream->get_capture_position(mStream, frames, time);
 }
 
-status_t StreamInHalLocal::updateSinkMetadata(const SinkMetadata& sinkMetadata) {
-    if (mStream->update_sink_metadata == nullptr) {
-        return INVALID_OPERATION;
+void StreamInHalLocal::doUpdateSinkMetadata(const SinkMetadata& sinkMetadata) {
+    std::vector<record_track_metadata> halTracks;
+    halTracks.reserve(sinkMetadata.tracks.size());
+    for (auto& metadata : sinkMetadata.tracks) {
+        record_track_metadata halTrackMetadata;
+        record_track_metadata_from_v7(&halTrackMetadata, &metadata);
+        halTracks.push_back(halTrackMetadata);
     }
-    const sink_metadata_t metadata {
+    const sink_metadata_t halMetadata = {
+        .track_count = halTracks.size(),
+        .tracks = halTracks.data(),
+    };
+    mStream->update_sink_metadata(mStream, &halMetadata);
+}
+
+#if MAJOR_VERSION >= 7
+void StreamInHalLocal::doUpdateSinkMetadataV7(const SinkMetadata& sinkMetadata) {
+    const sink_metadata_v7_t halMetadata {
         .track_count = sinkMetadata.tracks.size(),
         // const cast is fine as it is in a const structure
-        .tracks = const_cast<record_track_metadata*>(sinkMetadata.tracks.data()),
+        .tracks = const_cast<record_track_metadata_v7*>(sinkMetadata.tracks.data()),
     };
-    mStream->update_sink_metadata(mStream, &metadata);
+    mStream->update_sink_metadata_v7(mStream, &halMetadata);
+}
+#endif
+
+status_t StreamInHalLocal::updateSinkMetadata(const SinkMetadata& sinkMetadata) {
+#if MAJOR_VERSION < 7
+    if (mStream->update_sink_metadata == nullptr) {
+        return INVALID_OPERATION;  // not supported by the HAL
+    }
+    doUpdateSinkMetadata(sinkMetadata);
+#else
+    if (mDevice->version() < AUDIO_DEVICE_API_VERSION_3_2) {
+        if (mStream->update_sink_metadata == nullptr) {
+            return INVALID_OPERATION;  // not supported by the HAL
+        }
+        doUpdateSinkMetadata(sinkMetadata);
+    } else {
+        if (mStream->update_sink_metadata_v7 == nullptr) {
+            return INVALID_OPERATION;  // not supported by the HAL
+        }
+        doUpdateSinkMetadataV7(sinkMetadata);
+    }
+#endif
     return OK;
 }
 
diff --git a/media/libaudiohal/impl/StreamHalLocal.h b/media/libaudiohal/impl/StreamHalLocal.h
index d17f9f3..b260495 100644
--- a/media/libaudiohal/impl/StreamHalLocal.h
+++ b/media/libaudiohal/impl/StreamHalLocal.h
@@ -28,21 +28,14 @@
 class StreamHalLocal : public virtual StreamHalInterface
 {
   public:
-    // Return the sampling rate in Hz - eg. 44100.
-    virtual status_t getSampleRate(uint32_t *rate);
-
     // Return size of input/output buffer in bytes for this stream - eg. 4800.
     virtual status_t getBufferSize(size_t *size);
 
-    // Return the channel mask.
-    virtual status_t getChannelMask(audio_channel_mask_t *mask);
-
-    // Return the audio format - e.g. AUDIO_FORMAT_PCM_16_BIT.
-    virtual status_t getFormat(audio_format_t *format);
-
-    // Convenience method.
-    virtual status_t getAudioProperties(
-            uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format);
+    // Return the base configuration of the stream:
+    //   - channel mask;
+    //   - format - e.g. AUDIO_FORMAT_PCM_16_BIT;
+    //   - sampling rate in Hz - eg. 44100.
+    virtual status_t getAudioProperties(audio_config_base_t *configBase);
 
     // Set audio stream parameters.
     virtual status_t setParameters(const String8& kvPairs);
@@ -156,6 +149,24 @@
     // Called when the metadata of the stream's source has been changed.
     status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
 
+    // Returns the Dual Mono mode presentation setting.
+    status_t getDualMonoMode(audio_dual_mono_mode_t* mode) override;
+
+    // Sets the Dual Mono mode presentation on the output device.
+    status_t setDualMonoMode(audio_dual_mono_mode_t mode) override;
+
+    // Returns the Audio Description Mix level in dB.
+    status_t getAudioDescriptionMixLevel(float* leveldB) override;
+
+    // Sets the Audio Description Mix level in dB.
+    status_t setAudioDescriptionMixLevel(float leveldB) override;
+
+    // Retrieves current playback rate parameters.
+    status_t getPlaybackRateParameters(audio_playback_rate_t* playbackRate) override;
+
+    // Sets the playback rate parameters that control playback behavior.
+    status_t setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) override;
+
     status_t setEventCallback(const sp<StreamOutHalInterfaceEventCallback>& callback) override;
 
   private:
@@ -173,6 +184,9 @@
     static int asyncCallback(stream_callback_event_t event, void *param, void *cookie);
 
     static int asyncEventCallback(stream_event_callback_type_t event, void *param, void *cookie);
+
+    void doUpdateSourceMetadataV7(const SourceMetadata& sourceMetadata);
+    void doUpdateSourceMetadata(const SourceMetadata& sourceMetadata);
 };
 
 class StreamInHalLocal : public StreamInHalInterface, public StreamHalLocal {
@@ -227,6 +241,9 @@
     StreamInHalLocal(audio_stream_in_t *stream, sp<DeviceHalLocal> device);
 
     virtual ~StreamInHalLocal();
+
+    void doUpdateSinkMetadata(const SinkMetadata& sinkMetadata);
+    void doUpdateSinkMetadataV7(const SinkMetadata& sinkMetadata);
 };
 
 } // namespace CPP_VERSION
diff --git a/media/libaudiohal/impl/StreamPowerLog.h b/media/libaudiohal/impl/StreamPowerLog.h
index 5fd3912..f6a554b 100644
--- a/media/libaudiohal/impl/StreamPowerLog.h
+++ b/media/libaudiohal/impl/StreamPowerLog.h
@@ -19,6 +19,7 @@
 
 #include <audio_utils/clock.h>
 #include <audio_utils/PowerLog.h>
+#include <cutils/bitops.h>
 #include <cutils/properties.h>
 #include <system/audio.h>
 
diff --git a/media/libaudiohal/impl/VersionUtils.h b/media/libaudiohal/impl/VersionUtils.h
deleted file mode 100644
index eb0a42a..0000000
--- a/media/libaudiohal/impl/VersionUtils.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_VERSION_UTILS_H
-#define ANDROID_HARDWARE_VERSION_UTILS_H
-
-#include PATH(android/hardware/audio/FILE_VERSION/types.h)
-#include <hidl/HidlSupport.h>
-
-using ::android::hardware::audio::CPP_VERSION::ParameterValue;
-using ::android::hardware::audio::CPP_VERSION::Result;
-using ::android::hardware::Return;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::hidl_string;
-
-namespace android {
-namespace CPP_VERSION {
-namespace utils {
-
-#if MAJOR_VERSION == 2
-template <class T, class Callback>
-Return<void> getParameters(T& object, hidl_vec<ParameterValue> /*context*/,
-                           hidl_vec<hidl_string> keys, Callback callback) {
-    return object->getParameters(keys, callback);
-}
-
-template <class T>
-Return<Result> setParameters(T& object, hidl_vec<ParameterValue> /*context*/,
-                             hidl_vec<ParameterValue> keys) {
-    return object->setParameters(keys);
-}
-#elif MAJOR_VERSION >= 4
-template <class T, class Callback>
-Return<void> getParameters(T& object, hidl_vec<ParameterValue> context,
-                           hidl_vec<hidl_string> keys, Callback callback) {
-    return object->getParameters(context, keys, callback);
-}
-
-template <class T>
-Return<Result> setParameters(T& object, hidl_vec<ParameterValue> context,
-                             hidl_vec<ParameterValue> keys) {
-    return object->setParameters(context, keys);
-}
-#endif
-
-} // namespace utils
-} // namespace CPP_VERSION
-} // namespace android
-
-#endif // ANDROID_HARDWARE_VERSION_UTILS_H
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index 1e04b21..29ef011 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -106,6 +106,9 @@
     // Fills the list of supported attributes for a given audio port.
     virtual status_t getAudioPort(struct audio_port *port) = 0;
 
+    // Fills the list of supported attributes for a given audio port.
+    virtual status_t getAudioPort(struct audio_port_v7 *port) = 0;
+
     // Set audio port configuration.
     virtual status_t setAudioPortConfig(const struct audio_port_config *config) = 0;
 
diff --git a/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h b/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h
index 3a76f9f..9fb56ae 100644
--- a/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h
@@ -46,6 +46,8 @@
 
     virtual status_t dumpEffects(int fd) = 0;
 
+    virtual float getHalVersion() = 0;
+
     static sp<EffectsFactoryHalInterface> create();
 
     virtual status_t allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) = 0;
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index e30cb72..2be12fb 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -31,21 +31,27 @@
 class StreamHalInterface : public virtual RefBase
 {
   public:
-    // Return the sampling rate in Hz - eg. 44100.
-    virtual status_t getSampleRate(uint32_t *rate) = 0;
-
     // Return size of input/output buffer in bytes for this stream - eg. 4800.
     virtual status_t getBufferSize(size_t *size) = 0;
 
-    // Return the channel mask.
-    virtual status_t getChannelMask(audio_channel_mask_t *mask) = 0;
-
-    // Return the audio format - e.g. AUDIO_FORMAT_PCM_16_BIT.
-    virtual status_t getFormat(audio_format_t *format) = 0;
+    // Return the base configuration of the stream:
+    //   - channel mask;
+    //   - format - e.g. AUDIO_FORMAT_PCM_16_BIT;
+    //   - sampling rate in Hz - eg. 44100.
+    virtual status_t getAudioProperties(audio_config_base_t *configBase) = 0;
 
     // Convenience method.
-    virtual status_t getAudioProperties(
-            uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) = 0;
+    inline status_t getAudioProperties(
+            uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) {
+        audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+        const status_t result = getAudioProperties(&config);
+        if (result == NO_ERROR) {
+            if (sampleRate != nullptr) *sampleRate = config.sample_rate;
+            if (mask != nullptr) *mask = config.channel_mask;
+            if (format != nullptr) *format = config.format;
+        }
+        return result;
+    }
 
     // Set audio stream parameters.
     virtual status_t setParameters(const String8& kvPairs) = 0;
@@ -158,14 +164,33 @@
     virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) = 0;
 
     struct SourceMetadata {
-        std::vector<playback_track_metadata_t> tracks;
+        std::vector<playback_track_metadata_v7_t> tracks;
     };
+
     /**
      * Called when the metadata of the stream's source has been changed.
      * @param sourceMetadata Description of the audio that is played by the clients.
      */
     virtual status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) = 0;
 
+    // Returns the Dual Mono mode presentation setting.
+    virtual status_t getDualMonoMode(audio_dual_mono_mode_t* mode) = 0;
+
+    // Sets the Dual Mono mode presentation on the output device.
+    virtual status_t setDualMonoMode(audio_dual_mono_mode_t mode) = 0;
+
+    // Returns the Audio Description Mix level in dB.
+    virtual status_t getAudioDescriptionMixLevel(float* leveldB) = 0;
+
+    // Sets the Audio Description Mix level in dB.
+    virtual status_t setAudioDescriptionMixLevel(float leveldB) = 0;
+
+    // Retrieves current playback rate parameters.
+    virtual status_t getPlaybackRateParameters(audio_playback_rate_t* playbackRate) = 0;
+
+    // Sets the playback rate parameters that control playback behavior.
+    virtual status_t setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) = 0;
+
     virtual status_t setEventCallback(const sp<StreamOutHalInterfaceEventCallback>& callback) = 0;
 
   protected:
@@ -197,7 +222,7 @@
     virtual status_t setPreferredMicrophoneFieldDimension(float zoom) = 0;
 
     struct SinkMetadata {
-        std::vector<record_track_metadata_t> tracks;
+        std::vector<record_track_metadata_v7_t> tracks;
     };
     /**
      * Called when the metadata of the stream's sink has been changed.
diff --git a/media/libaudioprocessing/Android.bp b/media/libaudioprocessing/Android.bp
index 39b0ceb..309765a 100644
--- a/media/libaudioprocessing/Android.bp
+++ b/media/libaudioprocessing/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_defaults {
     name: "libaudioprocessing_defaults",
 
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index 1a31420..d85e2e9 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -79,10 +79,14 @@
             && mixerChannelMask == (track->mMixerChannelMask | track->mMixerHapticChannelMask)) {
         return false;  // no need to change
     }
-    const audio_channel_mask_t hapticChannelMask = trackChannelMask & AUDIO_CHANNEL_HAPTIC_ALL;
-    trackChannelMask &= ~AUDIO_CHANNEL_HAPTIC_ALL;
-    const audio_channel_mask_t mixerHapticChannelMask = mixerChannelMask & AUDIO_CHANNEL_HAPTIC_ALL;
-    mixerChannelMask &= ~AUDIO_CHANNEL_HAPTIC_ALL;
+    const audio_channel_mask_t hapticChannelMask =
+            static_cast<audio_channel_mask_t>(trackChannelMask & AUDIO_CHANNEL_HAPTIC_ALL);
+    trackChannelMask = static_cast<audio_channel_mask_t>(
+            trackChannelMask & ~AUDIO_CHANNEL_HAPTIC_ALL);
+    const audio_channel_mask_t mixerHapticChannelMask = static_cast<audio_channel_mask_t>(
+            mixerChannelMask & AUDIO_CHANNEL_HAPTIC_ALL);
+    mixerChannelMask = static_cast<audio_channel_mask_t>(
+            mixerChannelMask & ~AUDIO_CHANNEL_HAPTIC_ALL);
     // always recompute for both channel masks even if only one has changed.
     const uint32_t trackChannelCount = audio_channel_count_from_out_mask(trackChannelMask);
     const uint32_t mixerChannelCount = audio_channel_count_from_out_mask(mixerChannelMask);
@@ -362,7 +366,8 @@
             const audio_channel_mask_t trackChannelMask =
                 static_cast<audio_channel_mask_t>(valueInt);
             if (setChannelMasks(name, trackChannelMask,
-                    (track->mMixerChannelMask | track->mMixerHapticChannelMask))) {
+                    static_cast<audio_channel_mask_t>(
+                            track->mMixerChannelMask | track->mMixerHapticChannelMask))) {
                 ALOGV("setParameter(TRACK, CHANNEL_MASK, %x)", trackChannelMask);
                 invalidate();
             }
@@ -407,7 +412,8 @@
         case MIXER_CHANNEL_MASK: {
             const audio_channel_mask_t mixerChannelMask =
                     static_cast<audio_channel_mask_t>(valueInt);
-            if (setChannelMasks(name, track->channelMask | track->mHapticChannelMask,
+            if (setChannelMasks(name, static_cast<audio_channel_mask_t>(
+                                    track->channelMask | track->mHapticChannelMask),
                     mixerChannelMask)) {
                 ALOGV("setParameter(TRACK, MIXER_CHANNEL_MASK, %#x)", mixerChannelMask);
                 invalidate();
@@ -423,7 +429,7 @@
             }
             } break;
         case HAPTIC_INTENSITY: {
-            const haptic_intensity_t hapticIntensity = static_cast<haptic_intensity_t>(valueInt);
+            const os::HapticScale hapticIntensity = static_cast<os::HapticScale>(valueInt);
             if (track->mHapticIntensity != hapticIntensity) {
                 track->mHapticIntensity = hapticIntensity;
             }
@@ -533,9 +539,10 @@
     Track* t = static_cast<Track*>(track);
 
     audio_channel_mask_t channelMask = t->channelMask;
-    t->mHapticChannelMask = channelMask & AUDIO_CHANNEL_HAPTIC_ALL;
+    t->mHapticChannelMask = static_cast<audio_channel_mask_t>(
+            channelMask & AUDIO_CHANNEL_HAPTIC_ALL);
     t->mHapticChannelCount = audio_channel_count_from_out_mask(t->mHapticChannelMask);
-    channelMask &= ~AUDIO_CHANNEL_HAPTIC_ALL;
+    channelMask = static_cast<audio_channel_mask_t>(channelMask & ~AUDIO_CHANNEL_HAPTIC_ALL);
     t->channelCount = audio_channel_count_from_out_mask(channelMask);
     ALOGV_IF(audio_channel_mask_get_bits(channelMask) != AUDIO_CHANNEL_OUT_STEREO,
             "Non-stereo channel mask: %d\n", channelMask);
@@ -545,7 +552,7 @@
     t->mPlaybackRate = AUDIO_PLAYBACK_RATE_DEFAULT;
     // haptic
     t->mHapticPlaybackEnabled = false;
-    t->mHapticIntensity = HAPTIC_SCALE_NONE;
+    t->mHapticIntensity = os::HapticScale::NONE;
     t->mMixerHapticChannelMask = AUDIO_CHANNEL_NONE;
     t->mMixerHapticChannelCount = 0;
     t->mAdjustInChannelCount = t->channelCount + t->mHapticChannelCount;
@@ -590,19 +597,12 @@
             const std::shared_ptr<Track> &t = getTrack(name);
             if (t->mHapticPlaybackEnabled) {
                 size_t sampleCount = mFrameCount * t->mMixerHapticChannelCount;
-                float gamma = t->getHapticScaleGamma();
-                float maxAmplitudeRatio = t->getHapticMaxAmplitudeRatio();
                 uint8_t* buffer = (uint8_t*)pair.first + mFrameCount * audio_bytes_per_frame(
                         t->mMixerChannelCount, t->mMixerFormat);
                 switch (t->mMixerFormat) {
                 // Mixer format should be AUDIO_FORMAT_PCM_FLOAT.
                 case AUDIO_FORMAT_PCM_FLOAT: {
-                    float* fout = (float*) buffer;
-                    for (size_t i = 0; i < sampleCount; i++) {
-                        float mul = fout[i] >= 0 ? 1.0 : -1.0;
-                        fout[i] = powf(fabsf(fout[i] / HAPTIC_MAX_AMPLITUDE_FLOAT), gamma)
-                                * maxAmplitudeRatio * HAPTIC_MAX_AMPLITUDE_FLOAT * mul;
-                    }
+                    os::scaleHapticData((float*) buffer, sampleCount, t->mHapticIntensity);
                 } break;
                 default:
                     LOG_ALWAYS_FATAL("bad mMixerFormat: %#x", t->mMixerFormat);
diff --git a/media/libaudioprocessing/AudioMixerBase.cpp b/media/libaudioprocessing/AudioMixerBase.cpp
index a54e22f..f30eb54 100644
--- a/media/libaudioprocessing/AudioMixerBase.cpp
+++ b/media/libaudioprocessing/AudioMixerBase.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "AudioMixer"
 //#define LOG_NDEBUG 0
 
+#include <array>
 #include <sstream>
 #include <string.h>
 
@@ -1295,8 +1296,29 @@
 
 // Needs to derive a compile time constant (constexpr).  Could be targeted to go
 // to a MONOVOL mixtype based on MAX_NUM_VOLUMES, but that's an unnecessary complication.
-#define MIXTYPE_MONOVOL(mixtype) ((mixtype) == MIXTYPE_MULTI ? MIXTYPE_MULTI_MONOVOL : \
-        (mixtype) == MIXTYPE_MULTI_SAVEONLY ? MIXTYPE_MULTI_SAVEONLY_MONOVOL : (mixtype))
+
+constexpr int MIXTYPE_MONOVOL(int mixtype, int channels) {
+    if (channels <= FCC_2) {
+        return mixtype;
+    } else if (mixtype == MIXTYPE_MULTI) {
+        return MIXTYPE_MULTI_MONOVOL;
+    } else if (mixtype == MIXTYPE_MULTI_SAVEONLY) {
+        return MIXTYPE_MULTI_SAVEONLY_MONOVOL;
+    } else {
+        return mixtype;
+    }
+}
+
+// Helper to make a functional array from volumeRampMulti.
+template <int MIXTYPE, typename TO, typename TI, typename TV, typename TA, typename TAV,
+          std::size_t ... Is>
+static constexpr auto makeVRMArray(std::index_sequence<Is...>)
+{
+    using F = void(*)(TO*, size_t, const TI*, TA*, TV*, const TV*, TAV*, TAV);
+    return std::array<F, sizeof...(Is)>{
+            { &volumeRampMulti<MIXTYPE_MONOVOL(MIXTYPE, Is + 1), Is + 1, TO, TI, TV, TA, TAV> ...}
+        };
+}
 
 /* MIXTYPE     (see AudioMixerOps.h MIXTYPE_* enumeration)
  * TO: int32_t (Q4.27) or float
@@ -1308,40 +1330,26 @@
 static void volumeRampMulti(uint32_t channels, TO* out, size_t frameCount,
         const TI* in, TA* aux, TV *vol, const TV *volinc, TAV *vola, TAV volainc)
 {
-    switch (channels) {
-    case 1:
-        volumeRampMulti<MIXTYPE, 1>(out, frameCount, in, aux, vol, volinc, vola, volainc);
-        break;
-    case 2:
-        volumeRampMulti<MIXTYPE, 2>(out, frameCount, in, aux, vol, volinc, vola, volainc);
-        break;
-    case 3:
-        volumeRampMulti<MIXTYPE_MONOVOL(MIXTYPE), 3>(out,
-                frameCount, in, aux, vol, volinc, vola, volainc);
-        break;
-    case 4:
-        volumeRampMulti<MIXTYPE_MONOVOL(MIXTYPE), 4>(out,
-                frameCount, in, aux, vol, volinc, vola, volainc);
-        break;
-    case 5:
-        volumeRampMulti<MIXTYPE_MONOVOL(MIXTYPE), 5>(out,
-                frameCount, in, aux, vol, volinc, vola, volainc);
-        break;
-    case 6:
-        volumeRampMulti<MIXTYPE_MONOVOL(MIXTYPE), 6>(out,
-                frameCount, in, aux, vol, volinc, vola, volainc);
-        break;
-    case 7:
-        volumeRampMulti<MIXTYPE_MONOVOL(MIXTYPE), 7>(out,
-                frameCount, in, aux, vol, volinc, vola, volainc);
-        break;
-    case 8:
-        volumeRampMulti<MIXTYPE_MONOVOL(MIXTYPE), 8>(out,
-                frameCount, in, aux, vol, volinc, vola, volainc);
-        break;
+    static constexpr auto volumeRampMultiArray =
+            makeVRMArray<MIXTYPE, TO, TI, TV, TA, TAV>(std::make_index_sequence<FCC_LIMIT>());
+    if (channels > 0 && channels <= volumeRampMultiArray.size()) {
+        volumeRampMultiArray[channels - 1](out, frameCount, in, aux, vol, volinc, vola, volainc);
+    } else {
+        ALOGE("%s: invalid channel count:%d", __func__, channels);
     }
 }
 
+// Helper to make a functional array from volumeMulti.
+template <int MIXTYPE, typename TO, typename TI, typename TV, typename TA, typename TAV,
+          std::size_t ... Is>
+static constexpr auto makeVMArray(std::index_sequence<Is...>)
+{
+    using F = void(*)(TO*, size_t, const TI*, TA*, const TV*, TAV);
+    return std::array<F, sizeof...(Is)>{
+            { &volumeMulti<MIXTYPE_MONOVOL(MIXTYPE, Is + 1), Is + 1, TO, TI, TV, TA, TAV> ... }
+        };
+}
+
 /* MIXTYPE     (see AudioMixerOps.h MIXTYPE_* enumeration)
  * TO: int32_t (Q4.27) or float
  * TI: int32_t (Q4.27) or int16_t (Q0.15) or float
@@ -1352,31 +1360,12 @@
 static void volumeMulti(uint32_t channels, TO* out, size_t frameCount,
         const TI* in, TA* aux, const TV *vol, TAV vola)
 {
-    switch (channels) {
-    case 1:
-        volumeMulti<MIXTYPE, 1>(out, frameCount, in, aux, vol, vola);
-        break;
-    case 2:
-        volumeMulti<MIXTYPE, 2>(out, frameCount, in, aux, vol, vola);
-        break;
-    case 3:
-        volumeMulti<MIXTYPE_MONOVOL(MIXTYPE), 3>(out, frameCount, in, aux, vol, vola);
-        break;
-    case 4:
-        volumeMulti<MIXTYPE_MONOVOL(MIXTYPE), 4>(out, frameCount, in, aux, vol, vola);
-        break;
-    case 5:
-        volumeMulti<MIXTYPE_MONOVOL(MIXTYPE), 5>(out, frameCount, in, aux, vol, vola);
-        break;
-    case 6:
-        volumeMulti<MIXTYPE_MONOVOL(MIXTYPE), 6>(out, frameCount, in, aux, vol, vola);
-        break;
-    case 7:
-        volumeMulti<MIXTYPE_MONOVOL(MIXTYPE), 7>(out, frameCount, in, aux, vol, vola);
-        break;
-    case 8:
-        volumeMulti<MIXTYPE_MONOVOL(MIXTYPE), 8>(out, frameCount, in, aux, vol, vola);
-        break;
+    static constexpr auto volumeMultiArray =
+            makeVMArray<MIXTYPE, TO, TI, TV, TA, TAV>(std::make_index_sequence<FCC_LIMIT>());
+    if (channels > 0 && channels <= volumeMultiArray.size()) {
+        volumeMultiArray[channels - 1](out, frameCount, in, aux, vol, vola);
+    } else {
+        ALOGE("%s: invalid channel count:%d", __func__, channels);
     }
 }
 
diff --git a/media/libaudioprocessing/AudioMixerOps.h b/media/libaudioprocessing/AudioMixerOps.h
index 80bd093..cd47dc6 100644
--- a/media/libaudioprocessing/AudioMixerOps.h
+++ b/media/libaudioprocessing/AudioMixerOps.h
@@ -17,6 +17,8 @@
 #ifndef ANDROID_AUDIO_MIXER_OPS_H
 #define ANDROID_AUDIO_MIXER_OPS_H
 
+#include <system/audio.h>
+
 namespace android {
 
 // Hack to make static_assert work in a constexpr
@@ -231,20 +233,23 @@
         typename TO, typename TI, typename TV,
         typename F>
 void stereoVolumeHelper(TO*& out, const TI*& in, const TV *vol, F f) {
-    static_assert(NCHAN > 0 && NCHAN <= 8);
+    static_assert(NCHAN > 0 && NCHAN <= FCC_LIMIT);
     static_assert(MIXTYPE == MIXTYPE_MULTI_STEREOVOL
             || MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
-            || MIXTYPE == MIXTYPE_STEREOEXPAND);
+            || MIXTYPE == MIXTYPE_STEREOEXPAND
+            || MIXTYPE == MIXTYPE_MONOEXPAND);
     auto proc = [](auto& a, const auto& b) {
         if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
-                || MIXTYPE == MIXTYPE_STEREOEXPAND) {
+                || MIXTYPE == MIXTYPE_STEREOEXPAND
+                || MIXTYPE == MIXTYPE_MONOEXPAND) {
             a += b;
         } else {
             a = b;
         }
     };
     auto inp = [&in]() -> const TI& {
-        if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) {
+        if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND
+                || MIXTYPE == MIXTYPE_MONOEXPAND) {
             return *in;
         } else {
             return *in++;
@@ -288,6 +293,16 @@
     // NCHAN == 8
     proc(*out++, f(inp(), vol[0])); // side left
     proc(*out++, f(inp(), vol[1])); // side right
+    if constexpr (NCHAN > FCC_8) {
+        // Mutes to zero extended surround channels.
+        // 7.1.4 has the correct behavior.
+        // 22.2 has the behavior that FLC and FRC will be mixed instead
+        // of SL and SR and LFE will be center, not left.
+        for (int i = 8; i < NCHAN; ++i) {
+            // TODO: Consider using android::audio_utils::channels::kSideFromChannelIdx
+            proc(*out++, f(inp(), 0.f));
+        }
+    }
 }
 
 /*
@@ -312,6 +327,8 @@
  *   TV/TAV: int32_t (U4.28) or int16_t (U4.12) or float
  *   Input channel count is 1.
  *   vol: represents volume array.
+ *   This uses stereo balanced volume vol[0] and vol[1].
+ *   Before R, this was a full volume array but was called only for channels <= 2.
  *
  *   This accumulates into the out pointer.
  *
@@ -356,17 +373,13 @@
         do {
             TA auxaccum = 0;
             if constexpr (MIXTYPE == MIXTYPE_MULTI) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ += MixMulAux<TO, TI, TV, TA>(*in++, vol[i], &auxaccum);
                     vol[i] += volinc[i];
                 }
-            } else if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) {
-                for (int i = 0; i < NCHAN; ++i) {
-                    *out++ += MixMulAux<TO, TI, TV, TA>(*in, vol[i], &auxaccum);
-                    vol[i] += volinc[i];
-                }
-                in++;
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_SAVEONLY) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ = MixMulAux<TO, TI, TV, TA>(*in++, vol[i], &auxaccum);
                     vol[i] += volinc[i];
@@ -383,11 +396,13 @@
                 vol[0] += volinc[0];
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
                     || MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
+                    || MIXTYPE == MIXTYPE_MONOEXPAND
                     || MIXTYPE == MIXTYPE_STEREOEXPAND) {
                 stereoVolumeHelper<MIXTYPE, NCHAN>(
                         out, in, vol, [&auxaccum] (auto &a, const auto &b) {
                     return MixMulAux<TO, TI, TV, TA>(a, b, &auxaccum);
                 });
+                if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) in += 1;
                 if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) in += 2;
                 vol[0] += volinc[0];
                 vol[1] += volinc[1];
@@ -401,17 +416,13 @@
     } else {
         do {
             if constexpr (MIXTYPE == MIXTYPE_MULTI) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ += MixMul<TO, TI, TV>(*in++, vol[i]);
                     vol[i] += volinc[i];
                 }
-            } else if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) {
-                for (int i = 0; i < NCHAN; ++i) {
-                    *out++ += MixMul<TO, TI, TV>(*in, vol[i]);
-                    vol[i] += volinc[i];
-                }
-                in++;
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_SAVEONLY) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ = MixMul<TO, TI, TV>(*in++, vol[i]);
                     vol[i] += volinc[i];
@@ -428,10 +439,12 @@
                 vol[0] += volinc[0];
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
                     || MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
+                    || MIXTYPE == MIXTYPE_MONOEXPAND
                     || MIXTYPE == MIXTYPE_STEREOEXPAND) {
                 stereoVolumeHelper<MIXTYPE, NCHAN>(out, in, vol, [] (auto &a, const auto &b) {
                     return MixMul<TO, TI, TV>(a, b);
                 });
+                if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) in += 1;
                 if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) in += 2;
                 vol[0] += volinc[0];
                 vol[1] += volinc[1];
@@ -454,15 +467,12 @@
         do {
             TA auxaccum = 0;
             if constexpr (MIXTYPE == MIXTYPE_MULTI) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ += MixMulAux<TO, TI, TV, TA>(*in++, vol[i], &auxaccum);
                 }
-            } else if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) {
-                for (int i = 0; i < NCHAN; ++i) {
-                    *out++ += MixMulAux<TO, TI, TV, TA>(*in, vol[i], &auxaccum);
-                }
-                in++;
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_SAVEONLY) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ = MixMulAux<TO, TI, TV, TA>(*in++, vol[i], &auxaccum);
                 }
@@ -476,11 +486,13 @@
                 }
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
                     || MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
+                    || MIXTYPE == MIXTYPE_MONOEXPAND
                     || MIXTYPE == MIXTYPE_STEREOEXPAND) {
                 stereoVolumeHelper<MIXTYPE, NCHAN>(
                         out, in, vol, [&auxaccum] (auto &a, const auto &b) {
                     return MixMulAux<TO, TI, TV, TA>(a, b, &auxaccum);
                 });
+                if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) in += 1;
                 if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) in += 2;
             } else /* constexpr */ {
                 static_assert(dependent_false<MIXTYPE>, "invalid mixtype");
@@ -490,16 +502,14 @@
         } while (--frameCount);
     } else {
         do {
+            // ALOGD("Mixtype:%d NCHAN:%d", MIXTYPE, NCHAN);
             if constexpr (MIXTYPE == MIXTYPE_MULTI) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ += MixMul<TO, TI, TV>(*in++, vol[i]);
                 }
-            } else if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) {
-                for (int i = 0; i < NCHAN; ++i) {
-                    *out++ += MixMul<TO, TI, TV>(*in, vol[i]);
-                }
-                in++;
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_SAVEONLY) {
+                static_assert(NCHAN <= 2);
                 for (int i = 0; i < NCHAN; ++i) {
                     *out++ = MixMul<TO, TI, TV>(*in++, vol[i]);
                 }
@@ -513,10 +523,12 @@
                 }
             } else if constexpr (MIXTYPE == MIXTYPE_MULTI_STEREOVOL
                     || MIXTYPE == MIXTYPE_MULTI_SAVEONLY_STEREOVOL
+                    || MIXTYPE == MIXTYPE_MONOEXPAND
                     || MIXTYPE == MIXTYPE_STEREOEXPAND) {
                 stereoVolumeHelper<MIXTYPE, NCHAN>(out, in, vol, [] (auto &a, const auto &b) {
                     return MixMul<TO, TI, TV>(a, b);
                 });
+                if constexpr (MIXTYPE == MIXTYPE_MONOEXPAND) in += 1;
                 if constexpr (MIXTYPE == MIXTYPE_STEREOEXPAND) in += 2;
             } else /* constexpr */ {
                 static_assert(dependent_false<MIXTYPE>, "invalid mixtype");
diff --git a/media/libaudioprocessing/AudioResampler.cpp b/media/libaudioprocessing/AudioResampler.cpp
index c761b38..51673d7 100644
--- a/media/libaudioprocessing/AudioResampler.cpp
+++ b/media/libaudioprocessing/AudioResampler.cpp
@@ -268,7 +268,7 @@
         mPhaseFraction(0),
         mQuality(quality) {
 
-    const int maxChannels = quality < DYN_LOW_QUALITY ? 2 : 8;
+    const int maxChannels = quality < DYN_LOW_QUALITY ? FCC_2 : FCC_LIMIT;
     if (inChannelCount < 1
             || inChannelCount > maxChannels) {
         LOG_ALWAYS_FATAL("Unsupported sample format %d quality %d channels",
diff --git a/media/libaudioprocessing/AudioResamplerDyn.cpp b/media/libaudioprocessing/AudioResamplerDyn.cpp
index ec56b00..2292b19 100644
--- a/media/libaudioprocessing/AudioResamplerDyn.cpp
+++ b/media/libaudioprocessing/AudioResamplerDyn.cpp
@@ -25,7 +25,6 @@
 
 #include <cutils/compiler.h>
 #include <cutils/properties.h>
-#include <utils/Debug.h>
 #include <utils/Log.h>
 #include <audio_utils/primitives.h>
 
@@ -546,64 +545,76 @@
     // Note: A stride of 2 is achieved with non-SIMD processing.
     int stride = ((c.mHalfNumCoefs & 7) == 0) ? 16 : 2;
     LOG_ALWAYS_FATAL_IF(stride < 16, "Resampler stride must be 16 or more");
-    LOG_ALWAYS_FATAL_IF(mChannelCount < 1 || mChannelCount > 8,
-            "Resampler channels(%d) must be between 1 to 8", mChannelCount);
+    LOG_ALWAYS_FATAL_IF(mChannelCount < 1 || mChannelCount > FCC_LIMIT,
+            "Resampler channels(%d) must be between 1 to %d", mChannelCount, FCC_LIMIT);
     // stride 16 (falls back to stride 2 for machines that do not support NEON)
+
+
+// For now use a #define as a compiler generated function table requires renaming.
+#pragma push_macro("AUDIORESAMPLERDYN_CASE")
+#undef AUDIORESAMPLERDYN_CASE
+#define AUDIORESAMPLERDYN_CASE(CHANNEL, LOCKED) \
+    case CHANNEL: if constexpr (CHANNEL <= FCC_LIMIT) {\
+        mResampleFunc = &AudioResamplerDyn<TC, TI, TO>::resample<CHANNEL, LOCKED, 16>; \
+    } break
+
     if (locked) {
         switch (mChannelCount) {
-        case 1:
-            mResampleFunc = &AudioResamplerDyn<TC, TI, TO>::resample<1, true, 16>;
-            break;
-        case 2:
-            mResampleFunc = &AudioResamplerDyn<TC, TI, TO>::resample<2, true, 16>;
-            break;
-        case 3:
-            mResampleFunc = &AudioResamplerDyn<TC, TI, TO>::resample<3, true, 16>;
-            break;
-        case 4:
-            mResampleFunc = &AudioResamplerDyn<TC, TI, TO>::resample<4, true, 16>;
-            break;
-        case 5:
-            mResampleFunc = &AudioResamplerDyn<TC, TI, TO>::resample<5, true, 16>;
-            break;
-        case 6:
-            mResampleFunc = &AudioResamplerDyn<TC, TI, TO>::resample<6, true, 16>;
-            break;
-        case 7:
-            mResampleFunc = &AudioResamplerDyn<TC, TI, TO>::resample<7, true, 16>;
-            break;
-        case 8:
-            mResampleFunc = &AudioResamplerDyn<TC, TI, TO>::resample<8, true, 16>;
-            break;
+        AUDIORESAMPLERDYN_CASE(1, true);
+        AUDIORESAMPLERDYN_CASE(2, true);
+        AUDIORESAMPLERDYN_CASE(3, true);
+        AUDIORESAMPLERDYN_CASE(4, true);
+        AUDIORESAMPLERDYN_CASE(5, true);
+        AUDIORESAMPLERDYN_CASE(6, true);
+        AUDIORESAMPLERDYN_CASE(7, true);
+        AUDIORESAMPLERDYN_CASE(8, true);
+        AUDIORESAMPLERDYN_CASE(9, true);
+        AUDIORESAMPLERDYN_CASE(10, true);
+        AUDIORESAMPLERDYN_CASE(11, true);
+        AUDIORESAMPLERDYN_CASE(12, true);
+        AUDIORESAMPLERDYN_CASE(13, true);
+        AUDIORESAMPLERDYN_CASE(14, true);
+        AUDIORESAMPLERDYN_CASE(15, true);
+        AUDIORESAMPLERDYN_CASE(16, true);
+        AUDIORESAMPLERDYN_CASE(17, true);
+        AUDIORESAMPLERDYN_CASE(18, true);
+        AUDIORESAMPLERDYN_CASE(19, true);
+        AUDIORESAMPLERDYN_CASE(20, true);
+        AUDIORESAMPLERDYN_CASE(21, true);
+        AUDIORESAMPLERDYN_CASE(22, true);
+        AUDIORESAMPLERDYN_CASE(23, true);
+        AUDIORESAMPLERDYN_CASE(24, true);
         }
     } else {
         switch (mChannelCount) {
-        case 1:
-            mResampleFunc = &AudioResamplerDyn<TC, TI, TO>::resample<1, false, 16>;
-            break;
-        case 2:
-            mResampleFunc = &AudioResamplerDyn<TC, TI, TO>::resample<2, false, 16>;
-            break;
-        case 3:
-            mResampleFunc = &AudioResamplerDyn<TC, TI, TO>::resample<3, false, 16>;
-            break;
-        case 4:
-            mResampleFunc = &AudioResamplerDyn<TC, TI, TO>::resample<4, false, 16>;
-            break;
-        case 5:
-            mResampleFunc = &AudioResamplerDyn<TC, TI, TO>::resample<5, false, 16>;
-            break;
-        case 6:
-            mResampleFunc = &AudioResamplerDyn<TC, TI, TO>::resample<6, false, 16>;
-            break;
-        case 7:
-            mResampleFunc = &AudioResamplerDyn<TC, TI, TO>::resample<7, false, 16>;
-            break;
-        case 8:
-            mResampleFunc = &AudioResamplerDyn<TC, TI, TO>::resample<8, false, 16>;
-            break;
+        AUDIORESAMPLERDYN_CASE(1, false);
+        AUDIORESAMPLERDYN_CASE(2, false);
+        AUDIORESAMPLERDYN_CASE(3, false);
+        AUDIORESAMPLERDYN_CASE(4, false);
+        AUDIORESAMPLERDYN_CASE(5, false);
+        AUDIORESAMPLERDYN_CASE(6, false);
+        AUDIORESAMPLERDYN_CASE(7, false);
+        AUDIORESAMPLERDYN_CASE(8, false);
+        AUDIORESAMPLERDYN_CASE(9, false);
+        AUDIORESAMPLERDYN_CASE(10, false);
+        AUDIORESAMPLERDYN_CASE(11, false);
+        AUDIORESAMPLERDYN_CASE(12, false);
+        AUDIORESAMPLERDYN_CASE(13, false);
+        AUDIORESAMPLERDYN_CASE(14, false);
+        AUDIORESAMPLERDYN_CASE(15, false);
+        AUDIORESAMPLERDYN_CASE(16, false);
+        AUDIORESAMPLERDYN_CASE(17, false);
+        AUDIORESAMPLERDYN_CASE(18, false);
+        AUDIORESAMPLERDYN_CASE(19, false);
+        AUDIORESAMPLERDYN_CASE(20, false);
+        AUDIORESAMPLERDYN_CASE(21, false);
+        AUDIORESAMPLERDYN_CASE(22, false);
+        AUDIORESAMPLERDYN_CASE(23, false);
+        AUDIORESAMPLERDYN_CASE(24, false);
         }
     }
+#pragma pop_macro("AUDIORESAMPLERDYN_CASE")
+
 #ifdef DEBUG_RESAMPLER
     printf("channels:%d  %s  stride:%d  %s  coef:%d  shift:%d\n",
             mChannelCount, locked ? "locked" : "interpolated",
@@ -636,7 +647,7 @@
     const uint32_t phaseWrapLimit = c.mL << c.mShift;
     size_t inFrameCount = (phaseIncrement * (uint64_t)outFrameCount + phaseFraction)
             / phaseWrapLimit;
-    // sanity check that inFrameCount is in signed 32 bit integer range.
+    // validate that inFrameCount is in signed 32 bit integer range.
     ALOG_ASSERT(0 <= inFrameCount && inFrameCount < (1U << 31));
 
     //ALOGV("inFrameCount:%d  outFrameCount:%d"
@@ -646,7 +657,7 @@
     // NOTE: be very careful when modifying the code here. register
     // pressure is very high and a small change might cause the compiler
     // to generate far less efficient code.
-    // Always sanity check the result with objdump or test-resample.
+    // Always validate the result with objdump or test-resample.
 
     // the following logic is a bit convoluted to keep the main processing loop
     // as tight as possible with register allocation.
diff --git a/media/libaudioprocessing/AudioResamplerFirProcess.h b/media/libaudioprocessing/AudioResamplerFirProcess.h
index 9b70a1c..1fcffcc 100644
--- a/media/libaudioprocessing/AudioResamplerFirProcess.h
+++ b/media/libaudioprocessing/AudioResamplerFirProcess.h
@@ -381,7 +381,7 @@
     // NOTE: be very careful when modifying the code here. register
     // pressure is very high and a small change might cause the compiler
     // to generate far less efficient code.
-    // Always sanity check the result with objdump or test-resample.
+    // Always validate the result with objdump or test-resample.
 
     if (LOCKED) {
         // locked polyphase (no interpolation)
diff --git a/media/libaudioprocessing/AudioResamplerSinc.cpp b/media/libaudioprocessing/AudioResamplerSinc.cpp
index 5a03a0d..f2c386d 100644
--- a/media/libaudioprocessing/AudioResamplerSinc.cpp
+++ b/media/libaudioprocessing/AudioResamplerSinc.cpp
@@ -404,7 +404,7 @@
     // NOTE: be very careful when modifying the code here. register
     // pressure is very high and a small change might cause the compiler
     // to generate far less efficient code.
-    // Always sanity check the result with objdump or test-resample.
+    // Always validate the result with objdump or test-resample.
 
     // compute the index of the coefficient on the positive side and
     // negative side
diff --git a/media/libaudioprocessing/audio-resampler/Android.bp b/media/libaudioprocessing/audio-resampler/Android.bp
index dc70310..4ea75e7 100644
--- a/media/libaudioprocessing/audio-resampler/Android.bp
+++ b/media/libaudioprocessing/audio-resampler/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_shared {
     name: "libaudio-resampler",
 
diff --git a/media/libaudioprocessing/include/media/AudioMixer.h b/media/libaudioprocessing/include/media/AudioMixer.h
index 3f7cd48..70eafe3 100644
--- a/media/libaudioprocessing/include/media/AudioMixer.h
+++ b/media/libaudioprocessing/include/media/AudioMixer.h
@@ -22,10 +22,10 @@
 #include <stdint.h>
 #include <sys/types.h>
 
-#include <android/os/IExternalVibratorService.h>
 #include <media/AudioMixerBase.h>
 #include <media/BufferProviders.h>
 #include <utils/threads.h>
+#include <vibrator/ExternalVibrationUtils.h>
 
 // FIXME This is actually unity gain, which might not be max in future, expressed in U.12
 #define MAX_GAIN_INT AudioMixerBase::UNITY_GAIN_INT
@@ -55,32 +55,6 @@
                                   // parameter 'value' is a pointer to the new playback rate.
     };
 
-    typedef enum { // Haptic intensity, should keep consistent with VibratorService
-        HAPTIC_SCALE_MUTE = os::IExternalVibratorService::SCALE_MUTE,
-        HAPTIC_SCALE_VERY_LOW = os::IExternalVibratorService::SCALE_VERY_LOW,
-        HAPTIC_SCALE_LOW = os::IExternalVibratorService::SCALE_LOW,
-        HAPTIC_SCALE_NONE = os::IExternalVibratorService::SCALE_NONE,
-        HAPTIC_SCALE_HIGH = os::IExternalVibratorService::SCALE_HIGH,
-        HAPTIC_SCALE_VERY_HIGH = os::IExternalVibratorService::SCALE_VERY_HIGH,
-    } haptic_intensity_t;
-    static constexpr float HAPTIC_SCALE_VERY_LOW_RATIO = 2.0f / 3.0f;
-    static constexpr float HAPTIC_SCALE_LOW_RATIO = 3.0f / 4.0f;
-    static const constexpr float HAPTIC_MAX_AMPLITUDE_FLOAT = 1.0f;
-
-    static inline bool isValidHapticIntensity(haptic_intensity_t hapticIntensity) {
-        switch (hapticIntensity) {
-        case HAPTIC_SCALE_MUTE:
-        case HAPTIC_SCALE_VERY_LOW:
-        case HAPTIC_SCALE_LOW:
-        case HAPTIC_SCALE_NONE:
-        case HAPTIC_SCALE_HIGH:
-        case HAPTIC_SCALE_VERY_HIGH:
-            return true;
-        default:
-            return false;
-        }
-    }
-
     AudioMixer(size_t frameCount, uint32_t sampleRate)
             : AudioMixerBase(frameCount, sampleRate) {
         pthread_once(&sOnceControl, &sInitRoutine);
@@ -170,7 +144,7 @@
 
         // Haptic
         bool                 mHapticPlaybackEnabled;
-        haptic_intensity_t   mHapticIntensity;
+        os::HapticScale      mHapticIntensity;
         audio_channel_mask_t mHapticChannelMask;
         uint32_t             mHapticChannelCount;
         audio_channel_mask_t mMixerHapticChannelMask;
@@ -180,38 +154,6 @@
         uint32_t             mAdjustNonDestructiveInChannelCount;
         uint32_t             mAdjustNonDestructiveOutChannelCount;
         bool                 mKeepContractedChannels;
-
-        float getHapticScaleGamma() const {
-        // Need to keep consistent with the value in VibratorService.
-        switch (mHapticIntensity) {
-        case HAPTIC_SCALE_VERY_LOW:
-            return 2.0f;
-        case HAPTIC_SCALE_LOW:
-            return 1.5f;
-        case HAPTIC_SCALE_HIGH:
-            return 0.5f;
-        case HAPTIC_SCALE_VERY_HIGH:
-            return 0.25f;
-        default:
-            return 1.0f;
-        }
-        }
-
-        float getHapticMaxAmplitudeRatio() const {
-        // Need to keep consistent with the value in VibratorService.
-        switch (mHapticIntensity) {
-        case HAPTIC_SCALE_VERY_LOW:
-            return HAPTIC_SCALE_VERY_LOW_RATIO;
-        case HAPTIC_SCALE_LOW:
-            return HAPTIC_SCALE_LOW_RATIO;
-        case HAPTIC_SCALE_NONE:
-        case HAPTIC_SCALE_HIGH:
-        case HAPTIC_SCALE_VERY_HIGH:
-            return 1.0f;
-        default:
-            return 0.0f;
-        }
-        }
     };
 
     inline std::shared_ptr<Track> getTrack(int name) {
diff --git a/media/libaudioprocessing/include/media/AudioMixerBase.h b/media/libaudioprocessing/include/media/AudioMixerBase.h
index cf84b83..3419816 100644
--- a/media/libaudioprocessing/include/media/AudioMixerBase.h
+++ b/media/libaudioprocessing/include/media/AudioMixerBase.h
@@ -45,8 +45,7 @@
 {
 public:
     // Do not change these unless underlying code changes.
-    // This mixer has a hard-coded upper limit of 8 channels for output.
-    static constexpr uint32_t MAX_NUM_CHANNELS = FCC_8;
+    static constexpr uint32_t MAX_NUM_CHANNELS = FCC_LIMIT;
     static constexpr uint32_t MAX_NUM_VOLUMES = FCC_2; // stereo volume only
 
     static const uint16_t UNITY_GAIN_INT = 0x1000;
diff --git a/media/libaudioprocessing/include/media/AudioResamplerPublic.h b/media/libaudioprocessing/include/media/AudioResamplerPublic.h
index 1b39067..200a4c8 100644
--- a/media/libaudioprocessing/include/media/AudioResamplerPublic.h
+++ b/media/libaudioprocessing/include/media/AudioResamplerPublic.h
@@ -59,7 +59,7 @@
 
 static inline bool isAudioPlaybackRateValid(const AudioPlaybackRate &playbackRate) {
     if (playbackRate.mFallbackMode == AUDIO_TIMESTRETCH_FALLBACK_FAIL &&
-            (playbackRate.mStretchMode == AUDIO_TIMESTRETCH_STRETCH_SPEECH ||
+            (playbackRate.mStretchMode == AUDIO_TIMESTRETCH_STRETCH_VOICE ||
                     playbackRate.mStretchMode == AUDIO_TIMESTRETCH_STRETCH_DEFAULT)) {
         //test sonic specific constraints
         return playbackRate.mSpeed >= TIMESTRETCH_SONIC_SPEED_MIN &&
diff --git a/media/libaudioprocessing/tests/Android.bp b/media/libaudioprocessing/tests/Android.bp
index 18acef7..3856817 100644
--- a/media/libaudioprocessing/tests/Android.bp
+++ b/media/libaudioprocessing/tests/Android.bp
@@ -1,5 +1,14 @@
 // Build the unit tests for libaudioprocessing
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_defaults {
     name: "libaudioprocessing_test_defaults",
 
diff --git a/media/libaudioprocessing/tests/fuzzer/Android.bp b/media/libaudioprocessing/tests/fuzzer/Android.bp
index 1df47b7..8fb6fff 100644
--- a/media/libaudioprocessing/tests/fuzzer/Android.bp
+++ b/media/libaudioprocessing/tests/fuzzer/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_fuzz {
   name: "libaudioprocessing_resampler_fuzzer",
   srcs: [
@@ -8,3 +17,14 @@
     "libsndfile",
   ],
 }
+
+cc_fuzz {
+  name: "libaudioprocessing_record_buffer_converter_fuzzer",
+  srcs: [
+    "libaudioprocessing_record_buffer_converter_fuzzer.cpp",
+  ],
+  defaults: ["libaudioprocessing_test_defaults"],
+  static_libs: [
+    "libsndfile",
+  ],
+}
diff --git a/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_fuzz_utils.h b/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_fuzz_utils.h
new file mode 100644
index 0000000..5165925
--- /dev/null
+++ b/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_fuzz_utils.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_LIBAUDIOPROCESSING_FUZZ_UTILS_H
+#define ANDROID_LIBAUDIOPROCESSING_FUZZ_UTILS_H
+
+#include <media/AudioBufferProvider.h>
+#include <system/audio.h>
+
+namespace android {
+
+class Provider : public AudioBufferProvider {
+  const void* mAddr;        // base address
+  const size_t mNumFrames;  // total frames
+  const size_t mFrameSize;  // size of each frame in bytes
+  size_t mNextFrame;        // index of next frame to provide
+  size_t mUnrel;            // number of frames not yet released
+ public:
+  Provider(const void* addr, size_t frames, size_t frameSize)
+      : mAddr(addr),
+        mNumFrames(frames),
+        mFrameSize(frameSize),
+        mNextFrame(0),
+        mUnrel(0) {}
+  status_t getNextBuffer(Buffer* buffer) override {
+    if (buffer->frameCount > mNumFrames - mNextFrame) {
+      buffer->frameCount = mNumFrames - mNextFrame;
+    }
+    mUnrel = buffer->frameCount;
+    if (buffer->frameCount > 0) {
+      buffer->raw = (char*)mAddr + mFrameSize * mNextFrame;
+      return NO_ERROR;
+    } else {
+      buffer->raw = nullptr;
+      return NOT_ENOUGH_DATA;
+    }
+  }
+  void releaseBuffer(Buffer* buffer) override {
+    if (buffer->frameCount > mUnrel) {
+      mNextFrame += mUnrel;
+      mUnrel = 0;
+    } else {
+      mNextFrame += buffer->frameCount;
+      mUnrel -= buffer->frameCount;
+    }
+    buffer->frameCount = 0;
+    buffer->raw = nullptr;
+  }
+  void reset() { mNextFrame = 0; }
+};
+
+} // namespace android
+
+#endif // ANDROID_LIBAUDIOPROCESSING_FUZZ_UTILS_H
diff --git a/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_record_buffer_converter_fuzzer.cpp b/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_record_buffer_converter_fuzzer.cpp
new file mode 100644
index 0000000..017598c
--- /dev/null
+++ b/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_record_buffer_converter_fuzzer.cpp
@@ -0,0 +1,177 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "libaudioprocessing_fuzz_utils.h"
+#include "fuzzer/FuzzedDataProvider.h"
+#include <media/AudioResampler.h>
+#include <media/RecordBufferConverter.h>
+#include <stddef.h>
+#include <stdint.h>
+
+using namespace android;
+
+constexpr int MAX_FRAMES = 1024;
+
+#define AUDIO_FORMAT_PCM_MAIN 0
+
+// Copied and simplified from audio-hal-enums.h?l=571
+constexpr uint32_t FUZZ_AUDIO_FORMATS[] = {
+  AUDIO_FORMAT_PCM_MAIN | AUDIO_FORMAT_PCM_SUB_16_BIT,
+  AUDIO_FORMAT_PCM_MAIN | AUDIO_FORMAT_PCM_SUB_8_BIT,
+  AUDIO_FORMAT_PCM_MAIN | AUDIO_FORMAT_PCM_SUB_32_BIT,
+  AUDIO_FORMAT_PCM_MAIN | AUDIO_FORMAT_PCM_SUB_8_24_BIT,
+  AUDIO_FORMAT_PCM_MAIN | AUDIO_FORMAT_PCM_SUB_FLOAT,
+  AUDIO_FORMAT_PCM_MAIN | AUDIO_FORMAT_PCM_SUB_24_BIT_PACKED,
+  0x01000000u,
+  0x02000000u,
+  0x03000000u,
+  0x04000000u,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_MAIN,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_LC,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_SSR,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_LTP,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_HE_V1,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_SCALABLE,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_ERLC,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_LD,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_HE_V2,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_ELD,
+  AUDIO_FORMAT_AAC | AUDIO_FORMAT_AAC_SUB_XHE,
+  0x05000000u,
+  0x06000000u,
+  0x07000000u,
+  0x08000000u,
+  0x09000000u,
+  0x0A000000u,
+  AUDIO_FORMAT_E_AC3 | AUDIO_FORMAT_E_AC3_SUB_JOC,
+  0x0B000000u,
+  0x0C000000u,
+  0x0D000000u,
+  0x0E000000u,
+  0x10000000u,
+  0x11000000u,
+  0x12000000u,
+  0x13000000u,
+  0x14000000u,
+  0x15000000u,
+  0x16000000u,
+  0x17000000u,
+  0x18000000u,
+  0x19000000u,
+  0x1A000000u,
+  0x1B000000u,
+  0x1C000000u,
+  0x1D000000u,
+  0x1E000000u,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_MAIN,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_LC,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_SSR,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_LTP,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_HE_V1,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_SCALABLE,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_ERLC,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_LD,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_HE_V2,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_ELD,
+  AUDIO_FORMAT_AAC_ADTS | AUDIO_FORMAT_AAC_SUB_XHE,
+  0x1F000000u,
+  0x20000000u,
+  0x21000000u,
+  0x22000000u,
+  0x23000000u,
+  0x24000000u,
+  AUDIO_FORMAT_MAT | AUDIO_FORMAT_MAT_SUB_1_0,
+  AUDIO_FORMAT_MAT | AUDIO_FORMAT_MAT_SUB_2_0,
+  AUDIO_FORMAT_MAT | AUDIO_FORMAT_MAT_SUB_2_1,
+  0x25000000u,
+  AUDIO_FORMAT_AAC_LATM | AUDIO_FORMAT_AAC_SUB_LC,
+  AUDIO_FORMAT_AAC_LATM | AUDIO_FORMAT_AAC_SUB_HE_V1,
+  AUDIO_FORMAT_AAC_LATM | AUDIO_FORMAT_AAC_SUB_HE_V2,
+  0x26000000u,
+  0x27000000u,
+  0x28000000u,
+  0x29000000u,
+  0x2A000000u,
+  0x2B000000u,
+  0xFFFFFFFFu,
+  AUDIO_FORMAT_PCM_MAIN,
+  AUDIO_FORMAT_PCM,
+};
+constexpr size_t NUM_AUDIO_FORMATS = std::size(FUZZ_AUDIO_FORMATS);
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  FuzzedDataProvider fdp(data, size);
+  fdp.ConsumeIntegral<int>();
+
+  const audio_channel_mask_t srcChannelMask = (audio_channel_mask_t)fdp.ConsumeIntegral<int>();
+  const audio_format_t srcFormat =
+      (audio_format_t)FUZZ_AUDIO_FORMATS[fdp.ConsumeIntegralInRange<int>(0, NUM_AUDIO_FORMATS - 1)];
+  const uint32_t srcSampleRate = fdp.ConsumeIntegralInRange<int>(1, 0x7fffffff);
+  const audio_channel_mask_t dstChannelMask = (audio_channel_mask_t)fdp.ConsumeIntegral<int>();
+  const audio_format_t dstFormat =
+      (audio_format_t)FUZZ_AUDIO_FORMATS[fdp.ConsumeIntegralInRange<int>(0, NUM_AUDIO_FORMATS - 1)];
+  const uint32_t dstSampleRate = fdp.ConsumeIntegralInRange<int>(1, 0x7fffffff);
+
+  // Certain formats will result in LOG_ALWAYS_FATAL errors that aren't interesting crashes
+  // for fuzzing.  Don't use those ones.
+  const uint32_t dstChannelCount = audio_channel_count_from_in_mask(dstChannelMask);
+  constexpr android::AudioResampler::src_quality quality =
+      android::AudioResampler::DEFAULT_QUALITY;
+  const int maxChannels =
+      quality < android::AudioResampler::DYN_LOW_QUALITY ? 2 : 8;
+  if (dstChannelCount < 1 || dstChannelCount > maxChannels) {
+    return 0;
+  }
+
+  const uint32_t srcChannelCount = audio_channel_count_from_in_mask(srcChannelMask);
+  if (srcChannelCount < 1 || srcChannelCount > maxChannels) {
+    return 0;
+  }
+
+  RecordBufferConverter converter(srcChannelMask, srcFormat, srcSampleRate,
+                                  dstChannelMask, dstFormat, dstSampleRate);
+  if (converter.initCheck() != NO_ERROR) {
+    return 0;
+  }
+
+  const uint32_t srcFrameSize = srcChannelCount * audio_bytes_per_sample(srcFormat);
+  const int srcNumFrames = fdp.ConsumeIntegralInRange<int>(0, MAX_FRAMES);
+  constexpr size_t metadataSize = 2 + 3 * sizeof(int) + 2 * sizeof(float);
+  std::vector<uint8_t> inputData = fdp.ConsumeBytes<uint8_t>(
+      metadataSize + (srcFrameSize * srcNumFrames));
+  Provider provider(inputData.data(), srcNumFrames, srcFrameSize);
+
+  const uint32_t dstFrameSize = dstChannelCount * audio_bytes_per_sample(dstFormat);
+  const size_t frames = fdp.ConsumeIntegralInRange<size_t>(0, MAX_FRAMES + 1);
+  int8_t dst[dstFrameSize * frames];
+  memset(dst, 0, sizeof(int8_t) * dstFrameSize * frames);
+
+  // Add a small number of loops to see if repeated calls to convert cause
+  // any change in behavior.
+  const int numLoops = fdp.ConsumeIntegralInRange<int>(1, 3);
+  for (int loop = 0; loop < numLoops; ++loop) {
+    switch (fdp.ConsumeIntegralInRange<int>(0, 1)) {
+      case 0:
+        converter.reset();
+        FALLTHROUGH_INTENDED;
+      case 1:
+        converter.convert(dst, &provider, frames);
+        break;
+    }
+  }
+
+  return 0;
+}
diff --git a/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_resampler_fuzzer.cpp b/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_resampler_fuzzer.cpp
index 938c610..65c9a3c 100644
--- a/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_resampler_fuzzer.cpp
+++ b/media/libaudioprocessing/tests/fuzzer/libaudioprocessing_resampler_fuzzer.cpp
@@ -34,6 +34,8 @@
 #include <unistd.h>
 #include <utils/Vector.h>
 
+#include "libaudioprocessing_fuzz_utils.h"
+
 #include <memory>
 
 using namespace android;
@@ -53,46 +55,6 @@
     AudioResampler::DYN_HIGH_QUALITY,
 };
 
-class Provider : public AudioBufferProvider {
-  const void* mAddr;        // base address
-  const size_t mNumFrames;  // total frames
-  const size_t mFrameSize;  // size of each frame in bytes
-  size_t mNextFrame;        // index of next frame to provide
-  size_t mUnrel;            // number of frames not yet released
- public:
-  Provider(const void* addr, size_t frames, size_t frameSize)
-      : mAddr(addr),
-        mNumFrames(frames),
-        mFrameSize(frameSize),
-        mNextFrame(0),
-        mUnrel(0) {}
-  status_t getNextBuffer(Buffer* buffer) override {
-    if (buffer->frameCount > mNumFrames - mNextFrame) {
-      buffer->frameCount = mNumFrames - mNextFrame;
-    }
-    mUnrel = buffer->frameCount;
-    if (buffer->frameCount > 0) {
-      buffer->raw = (char*)mAddr + mFrameSize * mNextFrame;
-      return NO_ERROR;
-    } else {
-      buffer->raw = nullptr;
-      return NOT_ENOUGH_DATA;
-    }
-  }
-  virtual void releaseBuffer(Buffer* buffer) {
-    if (buffer->frameCount > mUnrel) {
-      mNextFrame += mUnrel;
-      mUnrel = 0;
-    } else {
-      mNextFrame += buffer->frameCount;
-      mUnrel -= buffer->frameCount;
-    }
-    buffer->frameCount = 0;
-    buffer->raw = nullptr;
-  }
-  void reset() { mNextFrame = 0; }
-};
-
 audio_format_t chooseFormat(AudioResampler::src_quality quality,
                             uint8_t input_byte) {
   switch (quality) {
diff --git a/media/libaudioprocessing/tests/mixerops_benchmark.cpp b/media/libaudioprocessing/tests/mixerops_benchmark.cpp
index 86f5429..7a4c5c7 100644
--- a/media/libaudioprocessing/tests/mixerops_benchmark.cpp
+++ b/media/libaudioprocessing/tests/mixerops_benchmark.cpp
@@ -74,28 +74,32 @@
     }
 }
 
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI, 2);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY, 2);
+// MULTI mode and MULTI_SAVEONLY mode are not used by AudioMixer for channels > 2,
+// which is ensured by a static_assert (won't compile for those configurations).
+// So we benchmark MIXTYPE_MULTI_MONOVOL and MIXTYPE_MULTI_SAVEONLY_MONOVOL compared
+// with MIXTYPE_MULTI_STEREOVOL and MIXTYPE_MULTI_SAVEONLY_STEREOVOL.
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_MONOVOL, 2);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 2);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_STEREOVOL, 2);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 2);
 
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI, 4);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY, 4);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_MONOVOL, 4);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 4);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_STEREOVOL, 4);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 4);
 
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI, 5);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY, 5);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_MONOVOL, 5);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 5);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_STEREOVOL, 5);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 5);
 
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI, 8);
-BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY, 8);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_MONOVOL, 8);
+BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 8);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_STEREOVOL, 8);
 BENCHMARK_TEMPLATE(BM_VolumeRampMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 8);
 
-BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI, 8);
-BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_SAVEONLY, 8);
+BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_MONOVOL, 8);
+BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_SAVEONLY_MONOVOL, 8);
 BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_STEREOVOL, 8);
 BENCHMARK_TEMPLATE(BM_VolumeMulti, MIXTYPE_MULTI_SAVEONLY_STEREOVOL, 8);
 
diff --git a/media/libaudioprocessing/tests/test-mixer.cpp b/media/libaudioprocessing/tests/test-mixer.cpp
index bc9d2a6..1bbb863 100644
--- a/media/libaudioprocessing/tests/test-mixer.cpp
+++ b/media/libaudioprocessing/tests/test-mixer.cpp
@@ -241,7 +241,8 @@
     // set up the tracks.
     for (size_t i = 0; i < providers.size(); ++i) {
         //printf("track %d out of %d\n", i, providers.size());
-        uint32_t channelMask = audio_channel_out_mask_from_count(providers[i].getNumChannels());
+        audio_channel_mask_t channelMask =
+                audio_channel_out_mask_from_count(providers[i].getNumChannels());
         const int name = i;
         const status_t status = mixer->create(
                 name, channelMask, formats[i], AUDIO_SESSION_OUTPUT_MIX);
diff --git a/media/libcpustats/Android.bp b/media/libcpustats/Android.bp
index 6e8ca1d..1ab1de0 100644
--- a/media/libcpustats/Android.bp
+++ b/media/libcpustats/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_static {
     name: "libcpustats",
 
diff --git a/media/libcpustats/ThreadCpuUsage.cpp b/media/libcpustats/ThreadCpuUsage.cpp
index 4b7549f..e71a7db 100644
--- a/media/libcpustats/ThreadCpuUsage.cpp
+++ b/media/libcpustats/ThreadCpuUsage.cpp
@@ -21,6 +21,7 @@
 #include <stdlib.h>
 #include <string.h>
 #include <time.h>
+#include <unistd.h>
 
 #include <utils/Log.h>
 
diff --git a/media/libdatasource/Android.bp b/media/libdatasource/Android.bp
index f191c21..e0c6808 100644
--- a/media/libdatasource/Android.bp
+++ b/media/libdatasource/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library {
     name: "libdatasource",
 
diff --git a/media/libdatasource/DataSourceFactory.cpp b/media/libdatasource/DataSourceFactory.cpp
index bb6a08c..f91e3ea 100644
--- a/media/libdatasource/DataSourceFactory.cpp
+++ b/media/libdatasource/DataSourceFactory.cpp
@@ -65,6 +65,9 @@
         sp<HTTPBase> mediaHTTP = httpSource;
         if (mediaHTTP == NULL) {
             mediaHTTP = static_cast<HTTPBase *>(CreateMediaHTTP(httpService).get());
+            if (mediaHTTP == NULL) {
+                return NULL;
+            }
         }
 
         String8 cacheConfig;
diff --git a/media/libeffects/OWNERS b/media/libeffects/OWNERS
index 7f9ae81..b7832ea 100644
--- a/media/libeffects/OWNERS
+++ b/media/libeffects/OWNERS
@@ -1,4 +1,3 @@
 hunga@google.com
-krocard@google.com
 mnaganov@google.com
 rago@google.com
diff --git a/media/libeffects/config/Android.bp b/media/libeffects/config/Android.bp
index 8493e30..b02dcb6 100644
--- a/media/libeffects/config/Android.bp
+++ b/media/libeffects/config/Android.bp
@@ -1,4 +1,13 @@
 // Effect configuration
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library {
     name: "libeffectsconfig",
     vendor_available: true,
diff --git a/media/libeffects/config/src/EffectsConfig.cpp b/media/libeffects/config/src/EffectsConfig.cpp
index 26eaaf8..1696233 100644
--- a/media/libeffects/config/src/EffectsConfig.cpp
+++ b/media/libeffects/config/src/EffectsConfig.cpp
@@ -138,7 +138,7 @@
 
 template <>
 bool stringToStreamType(const char *streamName, audio_devices_t* type) {
-    return deviceFromString(streamName, *type);
+    return DeviceConverter::fromString(streamName, *type);
 }
 
 /** Parse a library xml note and push the result in libraries or return false on failure. */
diff --git a/media/libeffects/data/audio_effects.xml b/media/libeffects/data/audio_effects.xml
index 2e5f529..93a2181 100644
--- a/media/libeffects/data/audio_effects.xml
+++ b/media/libeffects/data/audio_effects.xml
@@ -21,6 +21,7 @@
         <library name="downmix" path="libdownmix.so"/>
         <library name="loudness_enhancer" path="libldnhncr.so"/>
         <library name="dynamics_processing" path="libdynproc.so"/>
+        <library name="haptic_generator" path="libhapticgenerator.so"/>
     </libraries>
 
     <!-- list of effects to load.
@@ -58,6 +59,7 @@
         <effect name="downmix" library="downmix" uuid="93f04452-e4fe-41cc-91f9-e475b6d1d69f"/>
         <effect name="loudness_enhancer" library="loudness_enhancer" uuid="fa415329-2034-4bea-b5dc-5b381c8d1e2c"/>
         <effect name="dynamics_processing" library="dynamics_processing" uuid="e0e6539b-1781-7261-676f-6d7573696340"/>
+        <effect name="haptic_generator" library="haptic_generator" uuid="97c4acd1-8b82-4f2f-832e-c2fe5d7a9931"/>
     </effects>
 
     <!-- Audio pre processor configurations.
diff --git a/media/libeffects/downmix/Android.bp b/media/libeffects/downmix/Android.bp
index 2a2f36e..b26d028 100644
--- a/media/libeffects/downmix/Android.bp
+++ b/media/libeffects/downmix/Android.bp
@@ -1,9 +1,28 @@
 // Multichannel downmix effect library
-cc_library_shared {
-    name: "libdownmix",
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libeffects_downmix_license",
+    ],
+}
 
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libeffects_downmix_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library {
+    name: "libdownmix",
+    host_supported: true,
     vendor: true,
-    srcs: ["EffectDownmix.c"],
+    srcs: ["EffectDownmix.cpp"],
 
     shared_libs: [
         "libaudioutils",
@@ -14,7 +33,6 @@
     relative_install_path: "soundfx",
 
     cflags: [
-        "-DBUILD_FLOAT",
         "-fvisibility=hidden",
         "-Wall",
         "-Werror",
diff --git a/media/libeffects/downmix/EffectDownmix.c b/media/libeffects/downmix/EffectDownmix.c
deleted file mode 100644
index 99ac4f5..0000000
--- a/media/libeffects/downmix/EffectDownmix.c
+++ /dev/null
@@ -1,1432 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "EffectDownmix"
-//#define LOG_NDEBUG 0
-
-#include <inttypes.h>
-#include <stdbool.h>
-#include <stdlib.h>
-#include <string.h>
-
-#include <log/log.h>
-
-#include "EffectDownmix.h"
-
-// Do not submit with DOWNMIX_TEST_CHANNEL_INDEX defined, strictly for testing
-//#define DOWNMIX_TEST_CHANNEL_INDEX 0
-// Do not submit with DOWNMIX_ALWAYS_USE_GENERIC_DOWNMIXER defined, strictly for testing
-//#define DOWNMIX_ALWAYS_USE_GENERIC_DOWNMIXER 0
-
-#ifdef BUILD_FLOAT
-#define MINUS_3_DB_IN_FLOAT 0.70710678f // -3dB = 0.70710678f
-const audio_format_t gTargetFormat = AUDIO_FORMAT_PCM_FLOAT;
-#else
-#define MINUS_3_DB_IN_Q19_12 2896 // -3dB = 0.707 * 2^12 = 2896
-const audio_format_t gTargetFormat = AUDIO_FORMAT_PCM_16_BIT;
-#endif
-
-// subset of possible audio_channel_mask_t values, and AUDIO_CHANNEL_OUT_* renamed to CHANNEL_MASK_*
-typedef enum {
-    CHANNEL_MASK_QUAD_BACK = AUDIO_CHANNEL_OUT_QUAD_BACK,
-    CHANNEL_MASK_QUAD_SIDE = AUDIO_CHANNEL_OUT_QUAD_SIDE,
-    CHANNEL_MASK_5POINT1_BACK = AUDIO_CHANNEL_OUT_5POINT1_BACK,
-    CHANNEL_MASK_5POINT1_SIDE = AUDIO_CHANNEL_OUT_5POINT1_SIDE,
-    CHANNEL_MASK_7POINT1 = AUDIO_CHANNEL_OUT_7POINT1,
-} downmix_input_channel_mask_t;
-
-// effect_handle_t interface implementation for downmix effect
-const struct effect_interface_s gDownmixInterface = {
-        Downmix_Process,
-        Downmix_Command,
-        Downmix_GetDescriptor,
-        NULL /* no process_reverse function, no reference stream needed */
-};
-
-// This is the only symbol that needs to be exported
-__attribute__ ((visibility ("default")))
-audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
-    .tag = AUDIO_EFFECT_LIBRARY_TAG,
-    .version = EFFECT_LIBRARY_API_VERSION,
-    .name = "Downmix Library",
-    .implementor = "The Android Open Source Project",
-    .create_effect = DownmixLib_Create,
-    .release_effect = DownmixLib_Release,
-    .get_descriptor = DownmixLib_GetDescriptor,
-};
-
-
-// AOSP insert downmix UUID: 93f04452-e4fe-41cc-91f9-e475b6d1d69f
-static const effect_descriptor_t gDownmixDescriptor = {
-        EFFECT_UIID_DOWNMIX__, //type
-        {0x93f04452, 0xe4fe, 0x41cc, 0x91f9, {0xe4, 0x75, 0xb6, 0xd1, 0xd6, 0x9f}}, // uuid
-        EFFECT_CONTROL_API_VERSION,
-        EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST,
-        0, //FIXME what value should be reported? // cpu load
-        0, //FIXME what value should be reported? // memory usage
-        "Multichannel Downmix To Stereo", // human readable effect name
-        "The Android Open Source Project" // human readable effect implementor name
-};
-
-// gDescriptors contains pointers to all defined effect descriptor in this library
-static const effect_descriptor_t * const gDescriptors[] = {
-        &gDownmixDescriptor
-};
-
-// number of effects in this library
-const int kNbEffects = sizeof(gDescriptors) / sizeof(const effect_descriptor_t *);
-#ifdef BUILD_FLOAT
-static LVM_FLOAT clamp_float(LVM_FLOAT a) {
-    if (a > 1.0f) {
-        return 1.0f;
-    }
-    else if (a < -1.0f) {
-        return -1.0f;
-    }
-    else {
-        return a;
-    }
-}
-#endif
-/*----------------------------------------------------------------------------
- * Test code
- *--------------------------------------------------------------------------*/
-#ifdef DOWNMIX_TEST_CHANNEL_INDEX
-// strictly for testing, logs the indices of the channels for a given mask,
-// uses the same code as Downmix_foldGeneric()
-void Downmix_testIndexComputation(uint32_t mask) {
-    ALOGI("Testing index computation for 0x%" PRIx32 ":", mask);
-    // check against unsupported channels
-    if (mask & kUnsupported) {
-        ALOGE("Unsupported channels (top or front left/right of center)");
-        return;
-    }
-    // verify has FL/FR
-    if ((mask & AUDIO_CHANNEL_OUT_STEREO) != AUDIO_CHANNEL_OUT_STEREO) {
-        ALOGE("Front channels must be present");
-        return;
-    }
-    // verify uses SIDE as a pair (ok if not using SIDE at all)
-    bool hasSides = false;
-    if ((mask & kSides) != 0) {
-        if ((mask & kSides) != kSides) {
-            ALOGE("Side channels must be used as a pair");
-            return;
-        }
-        hasSides = true;
-    }
-    // verify uses BACK as a pair (ok if not using BACK at all)
-    bool hasBacks = false;
-    if ((mask & kBacks) != 0) {
-        if ((mask & kBacks) != kBacks) {
-            ALOGE("Back channels must be used as a pair");
-            return;
-        }
-        hasBacks = true;
-    }
-
-    const int numChan = audio_channel_count_from_out_mask(mask);
-    const bool hasFC = ((mask & AUDIO_CHANNEL_OUT_FRONT_CENTER) == AUDIO_CHANNEL_OUT_FRONT_CENTER);
-    const bool hasLFE =
-            ((mask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY) == AUDIO_CHANNEL_OUT_LOW_FREQUENCY);
-    const bool hasBC = ((mask & AUDIO_CHANNEL_OUT_BACK_CENTER) == AUDIO_CHANNEL_OUT_BACK_CENTER);
-    // compute at what index each channel is: samples will be in the following order:
-    //   FL FR FC LFE BL BR BC SL SR
-    // when a channel is not present, its index is set to the same as the index of the preceding
-    // channel
-    const int indexFC  = hasFC    ? 2            : 1;        // front center
-    const int indexLFE = hasLFE   ? indexFC + 1  : indexFC;  // low frequency
-    const int indexBL  = hasBacks ? indexLFE + 1 : indexLFE; // back left
-    const int indexBR  = hasBacks ? indexBL + 1  : indexBL;  // back right
-    const int indexBC  = hasBC    ? indexBR + 1  : indexBR;  // back center
-    const int indexSL  = hasSides ? indexBC + 1  : indexBC;  // side left
-    const int indexSR  = hasSides ? indexSL + 1  : indexSL;  // side right
-
-    ALOGI("  FL FR FC LFE BL BR BC SL SR");
-    ALOGI("   %d  %d  %d   %d  %d  %d  %d  %d  %d",
-            0, 1, indexFC, indexLFE, indexBL, indexBR, indexBC, indexSL, indexSR);
-}
-#endif
-
-static bool Downmix_validChannelMask(uint32_t mask)
-{
-    if (!mask) {
-        return false;
-    }
-    // check against unsupported channels
-    if (mask & kUnsupported) {
-        ALOGE("Unsupported channels (top or front left/right of center)");
-        return false;
-    }
-    // verify has FL/FR
-    if ((mask & AUDIO_CHANNEL_OUT_STEREO) != AUDIO_CHANNEL_OUT_STEREO) {
-        ALOGE("Front channels must be present");
-        return false;
-    }
-    // verify uses SIDE as a pair (ok if not using SIDE at all)
-    if ((mask & kSides) != 0) {
-        if ((mask & kSides) != kSides) {
-            ALOGE("Side channels must be used as a pair");
-            return false;
-        }
-    }
-    // verify uses BACK as a pair (ok if not using BACK at all)
-    if ((mask & kBacks) != 0) {
-        if ((mask & kBacks) != kBacks) {
-            ALOGE("Back channels must be used as a pair");
-            return false;
-        }
-    }
-    return true;
-}
-
-/*----------------------------------------------------------------------------
- * Effect API implementation
- *--------------------------------------------------------------------------*/
-
-/*--- Effect Library Interface Implementation ---*/
-
-int32_t DownmixLib_Create(const effect_uuid_t *uuid,
-        int32_t sessionId __unused,
-        int32_t ioId __unused,
-        effect_handle_t *pHandle) {
-    int ret;
-    int i;
-    downmix_module_t *module;
-    const effect_descriptor_t *desc;
-
-    ALOGV("DownmixLib_Create()");
-
-#ifdef DOWNMIX_TEST_CHANNEL_INDEX
-    // should work (won't log an error)
-    ALOGI("DOWNMIX_TEST_CHANNEL_INDEX: should work:");
-    Downmix_testIndexComputation(AUDIO_CHANNEL_OUT_FRONT_LEFT | AUDIO_CHANNEL_OUT_FRONT_RIGHT |
-                    AUDIO_CHANNEL_OUT_LOW_FREQUENCY | AUDIO_CHANNEL_OUT_BACK_CENTER);
-    Downmix_testIndexComputation(CHANNEL_MASK_QUAD_SIDE | CHANNEL_MASK_QUAD_BACK);
-    Downmix_testIndexComputation(CHANNEL_MASK_5POINT1_SIDE | AUDIO_CHANNEL_OUT_BACK_CENTER);
-    Downmix_testIndexComputation(CHANNEL_MASK_5POINT1_BACK | AUDIO_CHANNEL_OUT_BACK_CENTER);
-    // shouldn't work (will log an error, won't display channel indices)
-    ALOGI("DOWNMIX_TEST_CHANNEL_INDEX: should NOT work:");
-    Downmix_testIndexComputation(AUDIO_CHANNEL_OUT_FRONT_LEFT | AUDIO_CHANNEL_OUT_FRONT_RIGHT |
-                        AUDIO_CHANNEL_OUT_LOW_FREQUENCY | AUDIO_CHANNEL_OUT_BACK_LEFT);
-    Downmix_testIndexComputation(AUDIO_CHANNEL_OUT_FRONT_LEFT | AUDIO_CHANNEL_OUT_FRONT_RIGHT |
-                            AUDIO_CHANNEL_OUT_LOW_FREQUENCY | AUDIO_CHANNEL_OUT_SIDE_LEFT);
-    Downmix_testIndexComputation(AUDIO_CHANNEL_OUT_FRONT_LEFT |
-                        AUDIO_CHANNEL_OUT_BACK_LEFT | AUDIO_CHANNEL_OUT_BACK_RIGHT);
-    Downmix_testIndexComputation(AUDIO_CHANNEL_OUT_FRONT_LEFT |
-                            AUDIO_CHANNEL_OUT_SIDE_LEFT | AUDIO_CHANNEL_OUT_SIDE_RIGHT);
-#endif
-
-    if (pHandle == NULL || uuid == NULL) {
-        return -EINVAL;
-    }
-
-    for (i = 0 ; i < kNbEffects ; i++) {
-        desc = gDescriptors[i];
-        if (memcmp(uuid, &desc->uuid, sizeof(effect_uuid_t)) == 0) {
-            break;
-        }
-    }
-
-    if (i == kNbEffects) {
-        return -ENOENT;
-    }
-
-    module = malloc(sizeof(downmix_module_t));
-
-    module->itfe = &gDownmixInterface;
-
-    module->context.state = DOWNMIX_STATE_UNINITIALIZED;
-
-    ret = Downmix_Init(module);
-    if (ret < 0) {
-        ALOGW("DownmixLib_Create() init failed");
-        free(module);
-        return ret;
-    }
-
-    *pHandle = (effect_handle_t) module;
-
-    ALOGV("DownmixLib_Create() %p , size %zu", module, sizeof(downmix_module_t));
-
-    return 0;
-}
-
-
-int32_t DownmixLib_Release(effect_handle_t handle) {
-    downmix_module_t *pDwmModule = (downmix_module_t *)handle;
-
-    ALOGV("DownmixLib_Release() %p", handle);
-    if (handle == NULL) {
-        return -EINVAL;
-    }
-
-    pDwmModule->context.state = DOWNMIX_STATE_UNINITIALIZED;
-
-    free(pDwmModule);
-    return 0;
-}
-
-
-int32_t DownmixLib_GetDescriptor(const effect_uuid_t *uuid, effect_descriptor_t *pDescriptor) {
-    ALOGV("DownmixLib_GetDescriptor()");
-    int i;
-
-    if (pDescriptor == NULL || uuid == NULL){
-        ALOGE("DownmixLib_Create() called with NULL pointer");
-        return -EINVAL;
-    }
-    ALOGV("DownmixLib_GetDescriptor() nb effects=%d", kNbEffects);
-    for (i = 0; i < kNbEffects; i++) {
-        ALOGV("DownmixLib_GetDescriptor() i=%d", i);
-        if (memcmp(uuid, &gDescriptors[i]->uuid, sizeof(effect_uuid_t)) == 0) {
-            memcpy(pDescriptor, gDescriptors[i], sizeof(effect_descriptor_t));
-            ALOGV("EffectGetDescriptor - UUID matched downmix type %d, UUID = %" PRIx32,
-                 i, gDescriptors[i]->uuid.timeLow);
-            return 0;
-        }
-    }
-
-    return -EINVAL;
-}
-
-#ifndef BUILD_FLOAT
-/*--- Effect Control Interface Implementation ---*/
-
-static int Downmix_Process(effect_handle_t self,
-        audio_buffer_t *inBuffer, audio_buffer_t *outBuffer) {
-
-    downmix_object_t *pDownmixer;
-    int16_t *pSrc, *pDst;
-    downmix_module_t *pDwmModule = (downmix_module_t *)self;
-
-    if (pDwmModule == NULL) {
-        return -EINVAL;
-    }
-
-    if (inBuffer == NULL || inBuffer->raw == NULL ||
-        outBuffer == NULL || outBuffer->raw == NULL ||
-        inBuffer->frameCount != outBuffer->frameCount) {
-        return -EINVAL;
-    }
-
-    pDownmixer = (downmix_object_t*) &pDwmModule->context;
-
-    if (pDownmixer->state == DOWNMIX_STATE_UNINITIALIZED) {
-        ALOGE("Downmix_Process error: trying to use an uninitialized downmixer");
-        return -EINVAL;
-    } else if (pDownmixer->state == DOWNMIX_STATE_INITIALIZED) {
-        ALOGE("Downmix_Process error: trying to use a non-configured downmixer");
-        return -ENODATA;
-    }
-
-    pSrc = inBuffer->s16;
-    pDst = outBuffer->s16;
-    size_t numFrames = outBuffer->frameCount;
-
-    const bool accumulate =
-            (pDwmModule->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
-    const uint32_t downmixInputChannelMask = pDwmModule->config.inputCfg.channels;
-
-    switch(pDownmixer->type) {
-
-      case DOWNMIX_TYPE_STRIP:
-          if (accumulate) {
-              while (numFrames) {
-                  pDst[0] = clamp16(pDst[0] + pSrc[0]);
-                  pDst[1] = clamp16(pDst[1] + pSrc[1]);
-                  pSrc += pDownmixer->input_channel_count;
-                  pDst += 2;
-                  numFrames--;
-              }
-          } else {
-              while (numFrames) {
-                  pDst[0] = pSrc[0];
-                  pDst[1] = pSrc[1];
-                  pSrc += pDownmixer->input_channel_count;
-                  pDst += 2;
-                  numFrames--;
-              }
-          }
-          break;
-
-      case DOWNMIX_TYPE_FOLD:
-#ifdef DOWNMIX_ALWAYS_USE_GENERIC_DOWNMIXER
-          // bypass the optimized downmix routines for the common formats
-          if (!Downmix_foldGeneric(
-                  downmixInputChannelMask, pSrc, pDst, numFrames, accumulate)) {
-              ALOGE("Multichannel configuration 0x%" PRIx32 " is not supported", downmixInputChannelMask);
-              return -EINVAL;
-          }
-          break;
-#endif
-        // optimize for the common formats
-        switch((downmix_input_channel_mask_t)downmixInputChannelMask) {
-        case CHANNEL_MASK_QUAD_BACK:
-        case CHANNEL_MASK_QUAD_SIDE:
-            Downmix_foldFromQuad(pSrc, pDst, numFrames, accumulate);
-            break;
-        case CHANNEL_MASK_5POINT1_BACK:
-        case CHANNEL_MASK_5POINT1_SIDE:
-            Downmix_foldFrom5Point1(pSrc, pDst, numFrames, accumulate);
-            break;
-        case CHANNEL_MASK_7POINT1:
-            Downmix_foldFrom7Point1(pSrc, pDst, numFrames, accumulate);
-            break;
-        default:
-            if (!Downmix_foldGeneric(
-                    downmixInputChannelMask, pSrc, pDst, numFrames, accumulate)) {
-                ALOGE("Multichannel configuration 0x%" PRIx32 " is not supported", downmixInputChannelMask);
-                return -EINVAL;
-            }
-            break;
-        }
-        break;
-
-      default:
-        return -EINVAL;
-    }
-
-    return 0;
-}
-#else /*BUILD_FLOAT*/
-/*--- Effect Control Interface Implementation ---*/
-
-static int Downmix_Process(effect_handle_t self,
-        audio_buffer_t *inBuffer, audio_buffer_t *outBuffer) {
-
-    downmix_object_t *pDownmixer;
-    LVM_FLOAT *pSrc, *pDst;
-    downmix_module_t *pDwmModule = (downmix_module_t *)self;
-
-    if (pDwmModule == NULL) {
-        return -EINVAL;
-    }
-
-    if (inBuffer == NULL || inBuffer->raw == NULL ||
-        outBuffer == NULL || outBuffer->raw == NULL ||
-        inBuffer->frameCount != outBuffer->frameCount) {
-        return -EINVAL;
-    }
-
-    pDownmixer = (downmix_object_t*) &pDwmModule->context;
-
-    if (pDownmixer->state == DOWNMIX_STATE_UNINITIALIZED) {
-        ALOGE("Downmix_Process error: trying to use an uninitialized downmixer");
-        return -EINVAL;
-    } else if (pDownmixer->state == DOWNMIX_STATE_INITIALIZED) {
-        ALOGE("Downmix_Process error: trying to use a non-configured downmixer");
-        return -ENODATA;
-    }
-
-    pSrc = (LVM_FLOAT *) inBuffer->s16;
-    pDst = (LVM_FLOAT *) outBuffer->s16;
-    size_t numFrames = outBuffer->frameCount;
-
-    const bool accumulate =
-            (pDwmModule->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
-    const uint32_t downmixInputChannelMask = pDwmModule->config.inputCfg.channels;
-
-    switch(pDownmixer->type) {
-
-      case DOWNMIX_TYPE_STRIP:
-          if (accumulate) {
-              while (numFrames) {
-                  pDst[0] = clamp_float(pDst[0] + pSrc[0]);
-                  pDst[1] = clamp_float(pDst[1] + pSrc[1]);
-                  pSrc += pDownmixer->input_channel_count;
-                  pDst += 2;
-                  numFrames--;
-              }
-          } else {
-              while (numFrames) {
-                  pDst[0] = pSrc[0];
-                  pDst[1] = pSrc[1];
-                  pSrc += pDownmixer->input_channel_count;
-                  pDst += 2;
-                  numFrames--;
-              }
-          }
-          break;
-
-      case DOWNMIX_TYPE_FOLD:
-#ifdef DOWNMIX_ALWAYS_USE_GENERIC_DOWNMIXER
-          // bypass the optimized downmix routines for the common formats
-          if (!Downmix_foldGeneric(
-                  downmixInputChannelMask, pSrc, pDst, numFrames, accumulate)) {
-              ALOGE("Multichannel configuration 0x%" PRIx32 " is not supported",
-                    downmixInputChannelMask);
-              return -EINVAL;
-          }
-          break;
-#endif
-        // optimize for the common formats
-        switch((downmix_input_channel_mask_t)downmixInputChannelMask) {
-        case CHANNEL_MASK_QUAD_BACK:
-        case CHANNEL_MASK_QUAD_SIDE:
-            Downmix_foldFromQuad(pSrc, pDst, numFrames, accumulate);
-            break;
-        case CHANNEL_MASK_5POINT1_BACK:
-        case CHANNEL_MASK_5POINT1_SIDE:
-            Downmix_foldFrom5Point1(pSrc, pDst, numFrames, accumulate);
-            break;
-        case CHANNEL_MASK_7POINT1:
-            Downmix_foldFrom7Point1(pSrc, pDst, numFrames, accumulate);
-            break;
-        default:
-            if (!Downmix_foldGeneric(
-                    downmixInputChannelMask, pSrc, pDst, numFrames, accumulate)) {
-                ALOGE("Multichannel configuration 0x%" PRIx32 " is not supported",
-                      downmixInputChannelMask);
-                return -EINVAL;
-            }
-            break;
-        }
-        break;
-
-      default:
-        return -EINVAL;
-    }
-
-    return 0;
-}
-#endif
-
-static int Downmix_Command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
-        void *pCmdData, uint32_t *replySize, void *pReplyData) {
-
-    downmix_module_t *pDwmModule = (downmix_module_t *) self;
-    downmix_object_t *pDownmixer;
-
-    if (pDwmModule == NULL || pDwmModule->context.state == DOWNMIX_STATE_UNINITIALIZED) {
-        return -EINVAL;
-    }
-
-    pDownmixer = (downmix_object_t*) &pDwmModule->context;
-
-    ALOGV("Downmix_Command command %" PRIu32 " cmdSize %" PRIu32, cmdCode, cmdSize);
-
-    switch (cmdCode) {
-    case EFFECT_CMD_INIT:
-        if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
-            return -EINVAL;
-        }
-        *(int *) pReplyData = Downmix_Init(pDwmModule);
-        break;
-
-    case EFFECT_CMD_SET_CONFIG:
-        if (pCmdData == NULL || cmdSize != sizeof(effect_config_t)
-                || pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
-            return -EINVAL;
-        }
-        *(int *) pReplyData = Downmix_Configure(pDwmModule,
-                (effect_config_t *)pCmdData, false);
-        break;
-
-    case EFFECT_CMD_RESET:
-        Downmix_Reset(pDownmixer, false);
-        break;
-
-    case EFFECT_CMD_GET_PARAM:
-        ALOGV("Downmix_Command EFFECT_CMD_GET_PARAM pCmdData %p, *replySize %" PRIu32 ", pReplyData: %p",
-                pCmdData, *replySize, pReplyData);
-        if (pCmdData == NULL || cmdSize < (int)(sizeof(effect_param_t) + sizeof(int32_t)) ||
-                pReplyData == NULL || replySize == NULL ||
-                *replySize < (int) sizeof(effect_param_t) + 2 * sizeof(int32_t)) {
-            return -EINVAL;
-        }
-        effect_param_t *rep = (effect_param_t *) pReplyData;
-        memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + sizeof(int32_t));
-        ALOGV("Downmix_Command EFFECT_CMD_GET_PARAM param %" PRId32 ", replySize %" PRIu32,
-                *(int32_t *)rep->data, rep->vsize);
-        rep->status = Downmix_getParameter(pDownmixer, *(int32_t *)rep->data, &rep->vsize,
-                rep->data + sizeof(int32_t));
-        *replySize = sizeof(effect_param_t) + sizeof(int32_t) + rep->vsize;
-        break;
-
-    case EFFECT_CMD_SET_PARAM:
-        ALOGV("Downmix_Command EFFECT_CMD_SET_PARAM cmdSize %d pCmdData %p, *replySize %" PRIu32
-                ", pReplyData %p", cmdSize, pCmdData, *replySize, pReplyData);
-        if (pCmdData == NULL || (cmdSize < (int)(sizeof(effect_param_t) + sizeof(int32_t)))
-                || pReplyData == NULL || replySize == NULL || *replySize != (int)sizeof(int32_t)) {
-            return -EINVAL;
-        }
-        effect_param_t *cmd = (effect_param_t *) pCmdData;
-        if (cmd->psize != sizeof(int32_t)) {
-            android_errorWriteLog(0x534e4554, "63662938");
-            return -EINVAL;
-        }
-        *(int *)pReplyData = Downmix_setParameter(pDownmixer, *(int32_t *)cmd->data,
-                cmd->vsize, cmd->data + sizeof(int32_t));
-        break;
-
-    case EFFECT_CMD_SET_PARAM_DEFERRED:
-        //FIXME implement
-        ALOGW("Downmix_Command command EFFECT_CMD_SET_PARAM_DEFERRED not supported, FIXME");
-        break;
-
-    case EFFECT_CMD_SET_PARAM_COMMIT:
-        //FIXME implement
-        ALOGW("Downmix_Command command EFFECT_CMD_SET_PARAM_COMMIT not supported, FIXME");
-        break;
-
-    case EFFECT_CMD_ENABLE:
-        if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
-            return -EINVAL;
-        }
-        if (pDownmixer->state != DOWNMIX_STATE_INITIALIZED) {
-            return -ENOSYS;
-        }
-        pDownmixer->state = DOWNMIX_STATE_ACTIVE;
-        ALOGV("EFFECT_CMD_ENABLE() OK");
-        *(int *)pReplyData = 0;
-        break;
-
-    case EFFECT_CMD_DISABLE:
-        if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
-            return -EINVAL;
-        }
-        if (pDownmixer->state != DOWNMIX_STATE_ACTIVE) {
-            return -ENOSYS;
-        }
-        pDownmixer->state = DOWNMIX_STATE_INITIALIZED;
-        ALOGV("EFFECT_CMD_DISABLE() OK");
-        *(int *)pReplyData = 0;
-        break;
-
-    case EFFECT_CMD_SET_DEVICE:
-        if (pCmdData == NULL || cmdSize != (int)sizeof(uint32_t)) {
-            return -EINVAL;
-        }
-        // FIXME change type if playing on headset vs speaker
-        ALOGV("Downmix_Command EFFECT_CMD_SET_DEVICE: 0x%08" PRIx32, *(uint32_t *)pCmdData);
-        break;
-
-    case EFFECT_CMD_SET_VOLUME: {
-        // audio output is always stereo => 2 channel volumes
-        if (pCmdData == NULL || cmdSize != (int)sizeof(uint32_t) * 2) {
-            return -EINVAL;
-        }
-        // FIXME change volume
-        ALOGW("Downmix_Command command EFFECT_CMD_SET_VOLUME not supported, FIXME");
-        float left = (float)(*(uint32_t *)pCmdData) / (1 << 24);
-        float right = (float)(*((uint32_t *)pCmdData + 1)) / (1 << 24);
-        ALOGV("Downmix_Command EFFECT_CMD_SET_VOLUME: left %f, right %f ", left, right);
-        break;
-    }
-
-    case EFFECT_CMD_SET_AUDIO_MODE:
-        if (pCmdData == NULL || cmdSize != (int)sizeof(uint32_t)) {
-            return -EINVAL;
-        }
-        ALOGV("Downmix_Command EFFECT_CMD_SET_AUDIO_MODE: %" PRIu32, *(uint32_t *)pCmdData);
-        break;
-
-    case EFFECT_CMD_SET_CONFIG_REVERSE:
-    case EFFECT_CMD_SET_INPUT_DEVICE:
-        // these commands are ignored by a downmix effect
-        break;
-
-    default:
-        ALOGW("Downmix_Command invalid command %" PRIu32, cmdCode);
-        return -EINVAL;
-    }
-
-    return 0;
-}
-
-
-int Downmix_GetDescriptor(effect_handle_t self, effect_descriptor_t *pDescriptor)
-{
-    downmix_module_t *pDwnmxModule = (downmix_module_t *) self;
-
-    if (pDwnmxModule == NULL ||
-            pDwnmxModule->context.state == DOWNMIX_STATE_UNINITIALIZED) {
-        return -EINVAL;
-    }
-
-    memcpy(pDescriptor, &gDownmixDescriptor, sizeof(effect_descriptor_t));
-
-    return 0;
-}
-
-
-/*----------------------------------------------------------------------------
- * Downmix internal functions
- *--------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
- * Downmix_Init()
- *----------------------------------------------------------------------------
- * Purpose:
- * Initialize downmix context and apply default parameters
- *
- * Inputs:
- *  pDwmModule    pointer to downmix effect module
- *
- * Outputs:
- *
- * Returns:
- *  0             indicates success
- *
- * Side Effects:
- *  updates:
- *           pDwmModule->context.type
- *           pDwmModule->context.apply_volume_correction
- *           pDwmModule->config.inputCfg
- *           pDwmModule->config.outputCfg
- *           pDwmModule->config.inputCfg.samplingRate
- *           pDwmModule->config.outputCfg.samplingRate
- *           pDwmModule->context.state
- *  doesn't set:
- *           pDwmModule->itfe
- *
- *----------------------------------------------------------------------------
- */
-
-int Downmix_Init(downmix_module_t *pDwmModule) {
-
-    ALOGV("Downmix_Init module %p", pDwmModule);
-    int ret = 0;
-
-    memset(&pDwmModule->context, 0, sizeof(downmix_object_t));
-
-    pDwmModule->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
-    pDwmModule->config.inputCfg.format = gTargetFormat;
-    pDwmModule->config.inputCfg.channels = AUDIO_CHANNEL_OUT_7POINT1;
-    pDwmModule->config.inputCfg.bufferProvider.getBuffer = NULL;
-    pDwmModule->config.inputCfg.bufferProvider.releaseBuffer = NULL;
-    pDwmModule->config.inputCfg.bufferProvider.cookie = NULL;
-    pDwmModule->config.inputCfg.mask = EFFECT_CONFIG_ALL;
-
-    pDwmModule->config.inputCfg.samplingRate = 44100;
-    pDwmModule->config.outputCfg.samplingRate = pDwmModule->config.inputCfg.samplingRate;
-
-    // set a default value for the access mode, but should be overwritten by caller
-    pDwmModule->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
-    pDwmModule->config.outputCfg.format = gTargetFormat;
-    pDwmModule->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
-    pDwmModule->config.outputCfg.bufferProvider.getBuffer = NULL;
-    pDwmModule->config.outputCfg.bufferProvider.releaseBuffer = NULL;
-    pDwmModule->config.outputCfg.bufferProvider.cookie = NULL;
-    pDwmModule->config.outputCfg.mask = EFFECT_CONFIG_ALL;
-
-    ret = Downmix_Configure(pDwmModule, &pDwmModule->config, true);
-    if (ret != 0) {
-        ALOGV("Downmix_Init error %d on module %p", ret, pDwmModule);
-    } else {
-        pDwmModule->context.state = DOWNMIX_STATE_INITIALIZED;
-    }
-
-    return ret;
-}
-
-
-/*----------------------------------------------------------------------------
- * Downmix_Configure()
- *----------------------------------------------------------------------------
- * Purpose:
- *  Set input and output audio configuration.
- *
- * Inputs:
- *  pDwmModule  pointer to downmix effect module
- *  pConfig     pointer to effect_config_t structure containing input
- *                  and output audio parameters configuration
- *  init        true if called from init function
- *
- * Outputs:
- *
- * Returns:
- *  0           indicates success
- *
- * Side Effects:
- *
- *----------------------------------------------------------------------------
- */
-
-int Downmix_Configure(downmix_module_t *pDwmModule, effect_config_t *pConfig, bool init) {
-
-    downmix_object_t *pDownmixer = &pDwmModule->context;
-
-    // Check configuration compatibility with build options, and effect capabilities
-    if (pConfig->inputCfg.samplingRate != pConfig->outputCfg.samplingRate
-        || pConfig->outputCfg.channels != DOWNMIX_OUTPUT_CHANNELS
-        || pConfig->inputCfg.format != gTargetFormat
-        || pConfig->outputCfg.format != gTargetFormat) {
-        ALOGE("Downmix_Configure error: invalid config");
-        return -EINVAL;
-    }
-
-    if (&pDwmModule->config != pConfig) {
-        memcpy(&pDwmModule->config, pConfig, sizeof(effect_config_t));
-    }
-
-    if (init) {
-        pDownmixer->type = DOWNMIX_TYPE_FOLD;
-        pDownmixer->apply_volume_correction = false;
-        pDownmixer->input_channel_count = 8; // matches default input of AUDIO_CHANNEL_OUT_7POINT1
-    } else {
-        // when configuring the effect, do not allow a blank or unsupported channel mask
-        if (!Downmix_validChannelMask(pConfig->inputCfg.channels)) {
-            ALOGE("Downmix_Configure error: input channel mask(0x%x) not supported",
-                                                        pConfig->inputCfg.channels);
-            return -EINVAL;
-        }
-        pDownmixer->input_channel_count =
-                audio_channel_count_from_out_mask(pConfig->inputCfg.channels);
-    }
-
-    Downmix_Reset(pDownmixer, init);
-
-    return 0;
-}
-
-
-/*----------------------------------------------------------------------------
- * Downmix_Reset()
- *----------------------------------------------------------------------------
- * Purpose:
- *  Reset internal states.
- *
- * Inputs:
- *  pDownmixer   pointer to downmix context
- *  init         true if called from init function
- *
- * Outputs:
-*
- * Returns:
- *  0            indicates success
- *
- * Side Effects:
- *
- *----------------------------------------------------------------------------
- */
-
-int Downmix_Reset(downmix_object_t *pDownmixer __unused, bool init __unused) {
-    // nothing to do here
-    return 0;
-}
-
-
-/*----------------------------------------------------------------------------
- * Downmix_setParameter()
- *----------------------------------------------------------------------------
- * Purpose:
- * Set a Downmix parameter
- *
- * Inputs:
- *  pDownmixer    handle to instance data
- *  param         parameter
- *  pValue        pointer to parameter value
- *  size          value size
- *
- * Outputs:
- *
- * Returns:
- *  0             indicates success
- *
- * Side Effects:
- *
- *----------------------------------------------------------------------------
- */
-int Downmix_setParameter(downmix_object_t *pDownmixer, int32_t param, uint32_t size, void *pValue) {
-
-    int16_t value16;
-    ALOGV("Downmix_setParameter, context %p, param %" PRId32 ", value16 %" PRId16 ", value32 %" PRId32,
-            pDownmixer, param, *(int16_t *)pValue, *(int32_t *)pValue);
-
-    switch (param) {
-
-      case DOWNMIX_PARAM_TYPE:
-        if (size != sizeof(downmix_type_t)) {
-            ALOGE("Downmix_setParameter(DOWNMIX_PARAM_TYPE) invalid size %" PRIu32 ", should be %zu",
-                    size, sizeof(downmix_type_t));
-            return -EINVAL;
-        }
-        value16 = *(int16_t *)pValue;
-        ALOGV("set DOWNMIX_PARAM_TYPE, type %" PRId16, value16);
-        if (!((value16 > DOWNMIX_TYPE_INVALID) && (value16 <= DOWNMIX_TYPE_LAST))) {
-            ALOGE("Downmix_setParameter invalid DOWNMIX_PARAM_TYPE value %" PRId16, value16);
-            return -EINVAL;
-        } else {
-            pDownmixer->type = (downmix_type_t) value16;
-        break;
-
-      default:
-        ALOGE("Downmix_setParameter unknown parameter %" PRId32, param);
-        return -EINVAL;
-    }
-}
-
-    return 0;
-} /* end Downmix_setParameter */
-
-
-/*----------------------------------------------------------------------------
- * Downmix_getParameter()
- *----------------------------------------------------------------------------
- * Purpose:
- * Get a Downmix parameter
- *
- * Inputs:
- *  pDownmixer    handle to instance data
- *  param         parameter
- *  pValue        pointer to variable to hold retrieved value
- *  pSize         pointer to value size: maximum size as input
- *
- * Outputs:
- *  *pValue updated with parameter value
- *  *pSize updated with actual value size
- *
- * Returns:
- *  0             indicates success
- *
- * Side Effects:
- *
- *----------------------------------------------------------------------------
- */
-int Downmix_getParameter(downmix_object_t *pDownmixer, int32_t param, uint32_t *pSize, void *pValue) {
-    int16_t *pValue16;
-
-    switch (param) {
-
-    case DOWNMIX_PARAM_TYPE:
-      if (*pSize < sizeof(int16_t)) {
-          ALOGE("Downmix_getParameter invalid parameter size %" PRIu32 " for DOWNMIX_PARAM_TYPE", *pSize);
-          return -EINVAL;
-      }
-      pValue16 = (int16_t *)pValue;
-      *pValue16 = (int16_t) pDownmixer->type;
-      *pSize = sizeof(int16_t);
-      ALOGV("Downmix_getParameter DOWNMIX_PARAM_TYPE is %" PRId16, *pValue16);
-      break;
-
-    default:
-      ALOGE("Downmix_getParameter unknown parameter %" PRId16, param);
-      return -EINVAL;
-    }
-
-    return 0;
-} /* end Downmix_getParameter */
-
-
-/*----------------------------------------------------------------------------
- * Downmix_foldFromQuad()
- *----------------------------------------------------------------------------
- * Purpose:
- * downmix a quad signal to stereo
- *
- * Inputs:
- *  pSrc       quad audio samples to downmix
- *  numFrames  the number of quad frames to downmix
- *  accumulate whether to mix (when true) the result of the downmix with the contents of pDst,
- *               or overwrite pDst (when false)
- *
- * Outputs:
- *  pDst       downmixed stereo audio samples
- *
- *----------------------------------------------------------------------------
- */
-#ifndef BUILD_FLOAT
-void Downmix_foldFromQuad(int16_t *pSrc, int16_t*pDst, size_t numFrames, bool accumulate) {
-    // sample at index 0 is FL
-    // sample at index 1 is FR
-    // sample at index 2 is RL
-    // sample at index 3 is RR
-    if (accumulate) {
-        while (numFrames) {
-            // FL + RL
-            pDst[0] = clamp16(pDst[0] + ((pSrc[0] + pSrc[2]) >> 1));
-            // FR + RR
-            pDst[1] = clamp16(pDst[1] + ((pSrc[1] + pSrc[3]) >> 1));
-            pSrc += 4;
-            pDst += 2;
-            numFrames--;
-        }
-    } else { // same code as above but without adding and clamping pDst[i] to itself
-        while (numFrames) {
-            // FL + RL
-            pDst[0] = clamp16((pSrc[0] + pSrc[2]) >> 1);
-            // FR + RR
-            pDst[1] = clamp16((pSrc[1] + pSrc[3]) >> 1);
-            pSrc += 4;
-            pDst += 2;
-            numFrames--;
-        }
-    }
-}
-#else
-void Downmix_foldFromQuad(LVM_FLOAT *pSrc, LVM_FLOAT *pDst, size_t numFrames, bool accumulate) {
-    // sample at index 0 is FL
-    // sample at index 1 is FR
-    // sample at index 2 is RL
-    // sample at index 3 is RR
-    if (accumulate) {
-        while (numFrames) {
-            // FL + RL
-            pDst[0] = clamp_float(pDst[0] + ((pSrc[0] + pSrc[2]) / 2.0f));
-            // FR + RR
-            pDst[1] = clamp_float(pDst[1] + ((pSrc[1] + pSrc[3]) / 2.0f));
-            pSrc += 4;
-            pDst += 2;
-            numFrames--;
-        }
-    } else { // same code as above but without adding and clamping pDst[i] to itself
-        while (numFrames) {
-            // FL + RL
-            pDst[0] = clamp_float((pSrc[0] + pSrc[2]) / 2.0f);
-            // FR + RR
-            pDst[1] = clamp_float((pSrc[1] + pSrc[3]) / 2.0f);
-            pSrc += 4;
-            pDst += 2;
-            numFrames--;
-        }
-    }
-}
-#endif
-
-/*----------------------------------------------------------------------------
- * Downmix_foldFrom5Point1()
- *----------------------------------------------------------------------------
- * Purpose:
- * downmix a 5.1 signal to stereo
- *
- * Inputs:
- *  pSrc       5.1 audio samples to downmix
- *  numFrames  the number of 5.1 frames to downmix
- *  accumulate whether to mix (when true) the result of the downmix with the contents of pDst,
- *               or overwrite pDst (when false)
- *
- * Outputs:
- *  pDst       downmixed stereo audio samples
- *
- *----------------------------------------------------------------------------
- */
-#ifndef BUILD_FLOAT
-void Downmix_foldFrom5Point1(int16_t *pSrc, int16_t*pDst, size_t numFrames, bool accumulate) {
-    int32_t lt, rt, centerPlusLfeContrib; // samples in Q19.12 format
-    // sample at index 0 is FL
-    // sample at index 1 is FR
-    // sample at index 2 is FC
-    // sample at index 3 is LFE
-    // sample at index 4 is RL
-    // sample at index 5 is RR
-    // code is mostly duplicated between the two values of accumulate to avoid repeating the test
-    // for every sample
-    if (accumulate) {
-        while (numFrames) {
-            // centerPlusLfeContrib = FC(-3dB) + LFE(-3dB)
-            centerPlusLfeContrib = (pSrc[2] * MINUS_3_DB_IN_Q19_12)
-                    + (pSrc[3] * MINUS_3_DB_IN_Q19_12);
-            // FL + centerPlusLfeContrib + RL
-            lt = (pSrc[0] << 12) + centerPlusLfeContrib + (pSrc[4] << 12);
-            // FR + centerPlusLfeContrib + RR
-            rt = (pSrc[1] << 12) + centerPlusLfeContrib + (pSrc[5] << 12);
-            // accumulate in destination
-            pDst[0] = clamp16(pDst[0] + (lt >> 13));
-            pDst[1] = clamp16(pDst[1] + (rt >> 13));
-            pSrc += 6;
-            pDst += 2;
-            numFrames--;
-        }
-    } else { // same code as above but without adding and clamping pDst[i] to itself
-        while (numFrames) {
-            // centerPlusLfeContrib = FC(-3dB) + LFE(-3dB)
-            centerPlusLfeContrib = (pSrc[2] * MINUS_3_DB_IN_Q19_12)
-                    + (pSrc[3] * MINUS_3_DB_IN_Q19_12);
-            // FL + centerPlusLfeContrib + RL
-            lt = (pSrc[0] << 12) + centerPlusLfeContrib + (pSrc[4] << 12);
-            // FR + centerPlusLfeContrib + RR
-            rt = (pSrc[1] << 12) + centerPlusLfeContrib + (pSrc[5] << 12);
-            // store in destination
-            pDst[0] = clamp16(lt >> 13); // differs from when accumulate is true above
-            pDst[1] = clamp16(rt >> 13); // differs from when accumulate is true above
-            pSrc += 6;
-            pDst += 2;
-            numFrames--;
-        }
-    }
-}
-#else
-void Downmix_foldFrom5Point1(LVM_FLOAT *pSrc, LVM_FLOAT *pDst, size_t numFrames, bool accumulate) {
-    LVM_FLOAT lt, rt, centerPlusLfeContrib; // samples in Q19.12 format
-    // sample at index 0 is FL
-    // sample at index 1 is FR
-    // sample at index 2 is FC
-    // sample at index 3 is LFE
-    // sample at index 4 is RL
-    // sample at index 5 is RR
-    // code is mostly duplicated between the two values of accumulate to avoid repeating the test
-    // for every sample
-    if (accumulate) {
-        while (numFrames) {
-            // centerPlusLfeContrib = FC(-3dB) + LFE(-3dB)
-            centerPlusLfeContrib = (pSrc[2] * MINUS_3_DB_IN_FLOAT)
-                    + (pSrc[3] * MINUS_3_DB_IN_FLOAT);
-            // FL + centerPlusLfeContrib + RL
-            lt = pSrc[0] + centerPlusLfeContrib + pSrc[4];
-            // FR + centerPlusLfeContrib + RR
-            rt = pSrc[1] + centerPlusLfeContrib + pSrc[5];
-            // accumulate in destination
-            pDst[0] = clamp_float(pDst[0] + (lt / 2.0f));
-            pDst[1] = clamp_float(pDst[1] + (rt / 2.0f));
-            pSrc += 6;
-            pDst += 2;
-            numFrames--;
-        }
-    } else { // same code as above but without adding and clamping pDst[i] to itself
-        while (numFrames) {
-            // centerPlusLfeContrib = FC(-3dB) + LFE(-3dB)
-            centerPlusLfeContrib = (pSrc[2] * MINUS_3_DB_IN_FLOAT)
-                    + (pSrc[3] * MINUS_3_DB_IN_FLOAT);
-            // FL + centerPlusLfeContrib + RL
-            lt = pSrc[0] + centerPlusLfeContrib + pSrc[4];
-            // FR + centerPlusLfeContrib + RR
-            rt = pSrc[1] + centerPlusLfeContrib + pSrc[5];
-            // store in destination
-            pDst[0] = clamp_float(lt / 2.0f); // differs from when accumulate is true above
-            pDst[1] = clamp_float(rt / 2.0f); // differs from when accumulate is true above
-            pSrc += 6;
-            pDst += 2;
-            numFrames--;
-        }
-    }
-}
-#endif
-
-/*----------------------------------------------------------------------------
- * Downmix_foldFrom7Point1()
- *----------------------------------------------------------------------------
- * Purpose:
- * downmix a 7.1 signal to stereo
- *
- * Inputs:
- *  pSrc       7.1 audio samples to downmix
- *  numFrames  the number of 7.1 frames to downmix
- *  accumulate whether to mix (when true) the result of the downmix with the contents of pDst,
- *               or overwrite pDst (when false)
- *
- * Outputs:
- *  pDst       downmixed stereo audio samples
- *
- *----------------------------------------------------------------------------
- */
-#ifndef BUILD_FLOAT
-void Downmix_foldFrom7Point1(int16_t *pSrc, int16_t*pDst, size_t numFrames, bool accumulate) {
-    int32_t lt, rt, centerPlusLfeContrib; // samples in Q19.12 format
-    // sample at index 0 is FL
-    // sample at index 1 is FR
-    // sample at index 2 is FC
-    // sample at index 3 is LFE
-    // sample at index 4 is RL
-    // sample at index 5 is RR
-    // sample at index 6 is SL
-    // sample at index 7 is SR
-    // code is mostly duplicated between the two values of accumulate to avoid repeating the test
-    // for every sample
-    if (accumulate) {
-        while (numFrames) {
-            // centerPlusLfeContrib = FC(-3dB) + LFE(-3dB)
-            centerPlusLfeContrib = (pSrc[2] * MINUS_3_DB_IN_Q19_12)
-                    + (pSrc[3] * MINUS_3_DB_IN_Q19_12);
-            // FL + centerPlusLfeContrib + SL + RL
-            lt = (pSrc[0] << 12) + centerPlusLfeContrib + (pSrc[6] << 12) + (pSrc[4] << 12);
-            // FR + centerPlusLfeContrib + SR + RR
-            rt = (pSrc[1] << 12) + centerPlusLfeContrib + (pSrc[7] << 12) + (pSrc[5] << 12);
-            //accumulate in destination
-            pDst[0] = clamp16(pDst[0] + (lt >> 13));
-            pDst[1] = clamp16(pDst[1] + (rt >> 13));
-            pSrc += 8;
-            pDst += 2;
-            numFrames--;
-    }
-    } else { // same code as above but without adding and clamping pDst[i] to itself
-        while (numFrames) {
-            // centerPlusLfeContrib = FC(-3dB) + LFE(-3dB)
-            centerPlusLfeContrib = (pSrc[2] * MINUS_3_DB_IN_Q19_12)
-                    + (pSrc[3] * MINUS_3_DB_IN_Q19_12);
-            // FL + centerPlusLfeContrib + SL + RL
-            lt = (pSrc[0] << 12) + centerPlusLfeContrib + (pSrc[6] << 12) + (pSrc[4] << 12);
-            // FR + centerPlusLfeContrib + SR + RR
-            rt = (pSrc[1] << 12) + centerPlusLfeContrib + (pSrc[7] << 12) + (pSrc[5] << 12);
-            // store in destination
-            pDst[0] = clamp16(lt >> 13); // differs from when accumulate is true above
-            pDst[1] = clamp16(rt >> 13); // differs from when accumulate is true above
-            pSrc += 8;
-            pDst += 2;
-            numFrames--;
-        }
-    }
-}
-#else
-void Downmix_foldFrom7Point1(LVM_FLOAT *pSrc, LVM_FLOAT *pDst, size_t numFrames, bool accumulate) {
-    LVM_FLOAT lt, rt, centerPlusLfeContrib; // samples in Q19.12 format
-    // sample at index 0 is FL
-    // sample at index 1 is FR
-    // sample at index 2 is FC
-    // sample at index 3 is LFE
-    // sample at index 4 is RL
-    // sample at index 5 is RR
-    // sample at index 6 is SL
-    // sample at index 7 is SR
-    // code is mostly duplicated between the two values of accumulate to avoid repeating the test
-    // for every sample
-    if (accumulate) {
-        while (numFrames) {
-            // centerPlusLfeContrib = FC(-3dB) + LFE(-3dB)
-            centerPlusLfeContrib = (pSrc[2] * MINUS_3_DB_IN_FLOAT)
-                    + (pSrc[3] * MINUS_3_DB_IN_FLOAT);
-            // FL + centerPlusLfeContrib + SL + RL
-            lt = pSrc[0] + centerPlusLfeContrib + pSrc[6] + pSrc[4];
-            // FR + centerPlusLfeContrib + SR + RR
-            rt = pSrc[1] + centerPlusLfeContrib + pSrc[7] + pSrc[5];
-            //accumulate in destination
-            pDst[0] = clamp_float(pDst[0] + (lt / 2.0f));
-            pDst[1] = clamp_float(pDst[1] + (rt / 2.0f));
-            pSrc += 8;
-            pDst += 2;
-            numFrames--;
-        }
-    } else { // same code as above but without adding and clamping pDst[i] to itself
-        while (numFrames) {
-            // centerPlusLfeContrib = FC(-3dB) + LFE(-3dB)
-            centerPlusLfeContrib = (pSrc[2] * MINUS_3_DB_IN_FLOAT)
-                    + (pSrc[3] * MINUS_3_DB_IN_FLOAT);
-            // FL + centerPlusLfeContrib + SL + RL
-            lt = pSrc[0] + centerPlusLfeContrib + pSrc[6] + pSrc[4];
-            // FR + centerPlusLfeContrib + SR + RR
-            rt = pSrc[1] + centerPlusLfeContrib + pSrc[7] + pSrc[5];
-            // store in destination
-            pDst[0] = clamp_float(lt / 2.0f); // differs from when accumulate is true above
-            pDst[1] = clamp_float(rt / 2.0f); // differs from when accumulate is true above
-            pSrc += 8;
-            pDst += 2;
-            numFrames--;
-        }
-    }
-}
-#endif
-/*----------------------------------------------------------------------------
- * Downmix_foldGeneric()
- *----------------------------------------------------------------------------
- * Purpose:
- * downmix to stereo a multichannel signal whose format is:
- *  - has FL/FR
- *  - if using AUDIO_CHANNEL_OUT_SIDE*, it contains both left and right
- *  - if using AUDIO_CHANNEL_OUT_BACK*, it contains both left and right
- *  - doesn't use any of the AUDIO_CHANNEL_OUT_TOP* channels
- *  - doesn't use any of the AUDIO_CHANNEL_OUT_FRONT_*_OF_CENTER channels
- * Only handles channel masks not enumerated in downmix_input_channel_mask_t
- *
- * Inputs:
- *  mask       the channel mask of pSrc
- *  pSrc       multichannel audio buffer to downmix
- *  numFrames  the number of multichannel frames to downmix
- *  accumulate whether to mix (when true) the result of the downmix with the contents of pDst,
- *               or overwrite pDst (when false)
- *
- * Outputs:
- *  pDst       downmixed stereo audio samples
- *
- * Returns: false if multichannel format is not supported
- *
- *----------------------------------------------------------------------------
- */
-#ifndef BUILD_FLOAT
-bool Downmix_foldGeneric(
-        uint32_t mask, int16_t *pSrc, int16_t*pDst, size_t numFrames, bool accumulate) {
-
-    if (!Downmix_validChannelMask(mask)) {
-        return false;
-    }
-
-    const bool hasSides = (mask & kSides) != 0;
-    const bool hasBacks = (mask & kBacks) != 0;
-
-    const int numChan = audio_channel_count_from_out_mask(mask);
-    const bool hasFC = ((mask & AUDIO_CHANNEL_OUT_FRONT_CENTER) == AUDIO_CHANNEL_OUT_FRONT_CENTER);
-    const bool hasLFE =
-            ((mask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY) == AUDIO_CHANNEL_OUT_LOW_FREQUENCY);
-    const bool hasBC = ((mask & AUDIO_CHANNEL_OUT_BACK_CENTER) == AUDIO_CHANNEL_OUT_BACK_CENTER);
-    // compute at what index each channel is: samples will be in the following order:
-    //   FL FR FC LFE BL BR BC SL SR
-    // when a channel is not present, its index is set to the same as the index of the preceding
-    // channel
-    const int indexFC  = hasFC    ? 2            : 1;        // front center
-    const int indexLFE = hasLFE   ? indexFC + 1  : indexFC;  // low frequency
-    const int indexBL  = hasBacks ? indexLFE + 1 : indexLFE; // back left
-    const int indexBR  = hasBacks ? indexBL + 1  : indexBL;  // back right
-    const int indexBC  = hasBC    ? indexBR + 1  : indexBR;  // back center
-    const int indexSL  = hasSides ? indexBC + 1  : indexBC;  // side left
-    const int indexSR  = hasSides ? indexSL + 1  : indexSL;  // side right
-
-    int32_t lt, rt, centersLfeContrib; // samples in Q19.12 format
-    // code is mostly duplicated between the two values of accumulate to avoid repeating the test
-    // for every sample
-    if (accumulate) {
-        while (numFrames) {
-            // compute contribution of FC, BC and LFE
-            centersLfeContrib = 0;
-            if (hasFC)  { centersLfeContrib += pSrc[indexFC]; }
-            if (hasLFE) { centersLfeContrib += pSrc[indexLFE]; }
-            if (hasBC)  { centersLfeContrib += pSrc[indexBC]; }
-            centersLfeContrib *= MINUS_3_DB_IN_Q19_12;
-            // always has FL/FR
-            lt = (pSrc[0] << 12);
-            rt = (pSrc[1] << 12);
-            // mix in sides and backs
-            if (hasSides) {
-                lt += pSrc[indexSL] << 12;
-                rt += pSrc[indexSR] << 12;
-            }
-            if (hasBacks) {
-                lt += pSrc[indexBL] << 12;
-                rt += pSrc[indexBR] << 12;
-            }
-            lt += centersLfeContrib;
-            rt += centersLfeContrib;
-            // accumulate in destination
-            pDst[0] = clamp16(pDst[0] + (lt >> 13));
-            pDst[1] = clamp16(pDst[1] + (rt >> 13));
-            pSrc += numChan;
-            pDst += 2;
-            numFrames--;
-        }
-    } else {
-        while (numFrames) {
-            // compute contribution of FC, BC and LFE
-            centersLfeContrib = 0;
-            if (hasFC)  { centersLfeContrib += pSrc[indexFC]; }
-            if (hasLFE) { centersLfeContrib += pSrc[indexLFE]; }
-            if (hasBC)  { centersLfeContrib += pSrc[indexBC]; }
-            centersLfeContrib *= MINUS_3_DB_IN_Q19_12;
-            // always has FL/FR
-            lt = (pSrc[0] << 12);
-            rt = (pSrc[1] << 12);
-            // mix in sides and backs
-            if (hasSides) {
-                lt += pSrc[indexSL] << 12;
-                rt += pSrc[indexSR] << 12;
-            }
-            if (hasBacks) {
-                lt += pSrc[indexBL] << 12;
-                rt += pSrc[indexBR] << 12;
-            }
-            lt += centersLfeContrib;
-            rt += centersLfeContrib;
-            // store in destination
-            pDst[0] = clamp16(lt >> 13); // differs from when accumulate is true above
-            pDst[1] = clamp16(rt >> 13); // differs from when accumulate is true above
-            pSrc += numChan;
-            pDst += 2;
-            numFrames--;
-        }
-    }
-    return true;
-}
-#else
-bool Downmix_foldGeneric(
-        uint32_t mask, LVM_FLOAT *pSrc, LVM_FLOAT *pDst, size_t numFrames, bool accumulate) {
-
-    if (!Downmix_validChannelMask(mask)) {
-        return false;
-    }
-
-    const bool hasSides = (mask & kSides) != 0;
-    const bool hasBacks = (mask & kBacks) != 0;
-
-    const int numChan = audio_channel_count_from_out_mask(mask);
-    const bool hasFC = ((mask & AUDIO_CHANNEL_OUT_FRONT_CENTER) == AUDIO_CHANNEL_OUT_FRONT_CENTER);
-    const bool hasLFE =
-            ((mask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY) == AUDIO_CHANNEL_OUT_LOW_FREQUENCY);
-    const bool hasBC = ((mask & AUDIO_CHANNEL_OUT_BACK_CENTER) == AUDIO_CHANNEL_OUT_BACK_CENTER);
-    // compute at what index each channel is: samples will be in the following order:
-    //   FL FR FC LFE BL BR BC SL SR
-    // when a channel is not present, its index is set to the same as the index of the preceding
-    // channel
-    const int indexFC  = hasFC    ? 2            : 1;        // front center
-    const int indexLFE = hasLFE   ? indexFC + 1  : indexFC;  // low frequency
-    const int indexBL  = hasBacks ? indexLFE + 1 : indexLFE; // back left
-    const int indexBR  = hasBacks ? indexBL + 1  : indexBL;  // back right
-    const int indexBC  = hasBC    ? indexBR + 1  : indexBR;  // back center
-    const int indexSL  = hasSides ? indexBC + 1  : indexBC;  // side left
-    const int indexSR  = hasSides ? indexSL + 1  : indexSL;  // side right
-
-    LVM_FLOAT lt, rt, centersLfeContrib;
-    // code is mostly duplicated between the two values of accumulate to avoid repeating the test
-    // for every sample
-    if (accumulate) {
-        while (numFrames) {
-            // compute contribution of FC, BC and LFE
-            centersLfeContrib = 0;
-            if (hasFC)  { centersLfeContrib += pSrc[indexFC]; }
-            if (hasLFE) { centersLfeContrib += pSrc[indexLFE]; }
-            if (hasBC)  { centersLfeContrib += pSrc[indexBC]; }
-            centersLfeContrib *= MINUS_3_DB_IN_FLOAT;
-            // always has FL/FR
-            lt = pSrc[0];
-            rt = pSrc[1];
-            // mix in sides and backs
-            if (hasSides) {
-                lt += pSrc[indexSL];
-                rt += pSrc[indexSR];
-            }
-            if (hasBacks) {
-                lt += pSrc[indexBL];
-                rt += pSrc[indexBR];
-            }
-            lt += centersLfeContrib;
-            rt += centersLfeContrib;
-            // accumulate in destination
-            pDst[0] = clamp_float(pDst[0] + (lt / 2.0f));
-            pDst[1] = clamp_float(pDst[1] + (rt / 2.0f));
-            pSrc += numChan;
-            pDst += 2;
-            numFrames--;
-        }
-    } else {
-        while (numFrames) {
-            // compute contribution of FC, BC and LFE
-            centersLfeContrib = 0;
-            if (hasFC)  { centersLfeContrib += pSrc[indexFC]; }
-            if (hasLFE) { centersLfeContrib += pSrc[indexLFE]; }
-            if (hasBC)  { centersLfeContrib += pSrc[indexBC]; }
-            centersLfeContrib *= MINUS_3_DB_IN_FLOAT;
-            // always has FL/FR
-            lt = pSrc[0];
-            rt = pSrc[1];
-            // mix in sides and backs
-            if (hasSides) {
-                lt += pSrc[indexSL];
-                rt += pSrc[indexSR];
-            }
-            if (hasBacks) {
-                lt += pSrc[indexBL];
-                rt += pSrc[indexBR];
-            }
-            lt += centersLfeContrib;
-            rt += centersLfeContrib;
-            // store in destination
-            pDst[0] = clamp_float(lt / 2.0f); // differs from when accumulate is true above
-            pDst[1] = clamp_float(rt / 2.0f); // differs from when accumulate is true above
-            pSrc += numChan;
-            pDst += 2;
-            numFrames--;
-        }
-    }
-    return true;
-}
-#endif
diff --git a/media/libeffects/downmix/EffectDownmix.cpp b/media/libeffects/downmix/EffectDownmix.cpp
new file mode 100644
index 0000000..f500bc3
--- /dev/null
+++ b/media/libeffects/downmix/EffectDownmix.cpp
@@ -0,0 +1,1127 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectDownmix"
+//#define LOG_NDEBUG 0
+#include <log/log.h>
+
+#include "EffectDownmix.h"
+#include <math.h>
+
+// Do not submit with DOWNMIX_TEST_CHANNEL_INDEX defined, strictly for testing
+//#define DOWNMIX_TEST_CHANNEL_INDEX 0
+// Do not submit with DOWNMIX_ALWAYS_USE_GENERIC_DOWNMIXER defined, strictly for testing
+//#define DOWNMIX_ALWAYS_USE_GENERIC_DOWNMIXER 0
+
+#define MINUS_3_DB_IN_FLOAT M_SQRT1_2 // -3dB = 0.70710678
+
+typedef enum {
+    DOWNMIX_STATE_UNINITIALIZED,
+    DOWNMIX_STATE_INITIALIZED,
+    DOWNMIX_STATE_ACTIVE,
+} downmix_state_t;
+
+/* parameters for each downmixer */
+typedef struct {
+    downmix_state_t state;
+    downmix_type_t type;
+    bool apply_volume_correction;
+    uint8_t input_channel_count;
+} downmix_object_t;
+
+typedef struct downmix_module_s {
+    const struct effect_interface_s *itfe;
+    effect_config_t config;
+    downmix_object_t context;
+} downmix_module_t;
+
+
+// Audio Effect API
+static int32_t DownmixLib_Create(const effect_uuid_t *uuid,
+        int32_t sessionId,
+        int32_t ioId,
+        effect_handle_t *pHandle);
+static int32_t DownmixLib_Release(effect_handle_t handle);
+static int32_t DownmixLib_GetDescriptor(const effect_uuid_t *uuid,
+        effect_descriptor_t *pDescriptor);
+static int32_t Downmix_Process(effect_handle_t self,
+        audio_buffer_t *inBuffer,
+        audio_buffer_t *outBuffer);
+static int32_t Downmix_Command(effect_handle_t self,
+        uint32_t cmdCode,
+        uint32_t cmdSize,
+        void *pCmdData,
+        uint32_t *replySize,
+        void *pReplyData);
+static int32_t Downmix_GetDescriptor(effect_handle_t self,
+        effect_descriptor_t *pDescriptor);
+
+// Internal methods
+static int Downmix_Init(downmix_module_t *pDwmModule);
+static int Downmix_Configure(downmix_module_t *pDwmModule, effect_config_t *pConfig, bool init);
+static int Downmix_Reset(downmix_object_t *pDownmixer, bool init);
+static int Downmix_setParameter(
+        downmix_object_t *pDownmixer, int32_t param, uint32_t size, void *pValue);
+static int Downmix_getParameter(
+        downmix_object_t *pDownmixer, int32_t param, uint32_t *pSize, void *pValue);
+static void Downmix_foldFromQuad(float *pSrc, float *pDst, size_t numFrames, bool accumulate);
+static void Downmix_foldFrom5Point1(float *pSrc, float *pDst, size_t numFrames, bool accumulate);
+static void Downmix_foldFrom7Point1(float *pSrc, float *pDst, size_t numFrames, bool accumulate);
+static bool Downmix_foldGeneric(
+        uint32_t mask, float *pSrc, float *pDst, size_t numFrames, bool accumulate);
+
+// effect_handle_t interface implementation for downmix effect
+const struct effect_interface_s gDownmixInterface = {
+        Downmix_Process,
+        Downmix_Command,
+        Downmix_GetDescriptor,
+        NULL /* no process_reverse function, no reference stream needed */
+};
+
+// This is the only symbol that needs to be exported
+__attribute__ ((visibility ("default")))
+audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
+    .tag = AUDIO_EFFECT_LIBRARY_TAG,
+    .version = EFFECT_LIBRARY_API_VERSION,
+    .name = "Downmix Library",
+    .implementor = "The Android Open Source Project",
+    .create_effect = DownmixLib_Create,
+    .release_effect = DownmixLib_Release,
+    .get_descriptor = DownmixLib_GetDescriptor,
+};
+
+// AOSP insert downmix UUID: 93f04452-e4fe-41cc-91f9-e475b6d1d69f
+static const effect_descriptor_t gDownmixDescriptor = {
+        EFFECT_UIID_DOWNMIX__, //type
+        {0x93f04452, 0xe4fe, 0x41cc, 0x91f9, {0xe4, 0x75, 0xb6, 0xd1, 0xd6, 0x9f}}, // uuid
+        EFFECT_CONTROL_API_VERSION,
+        EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST,
+        0, //FIXME what value should be reported? // cpu load
+        0, //FIXME what value should be reported? // memory usage
+        "Multichannel Downmix To Stereo", // human readable effect name
+        "The Android Open Source Project" // human readable effect implementor name
+};
+
+// gDescriptors contains pointers to all defined effect descriptor in this library
+static const effect_descriptor_t * const gDescriptors[] = {
+        &gDownmixDescriptor
+};
+
+// number of effects in this library
+const int kNbEffects = sizeof(gDescriptors) / sizeof(const effect_descriptor_t *);
+
+static inline float clamp_float(float value) {
+    return fmin(fmax(value, -1.f), 1.f);
+}
+
+/*----------------------------------------------------------------------------
+ * Test code
+ *--------------------------------------------------------------------------*/
+#ifdef DOWNMIX_TEST_CHANNEL_INDEX
+// strictly for testing, logs the indices of the channels for a given mask,
+// uses the same code as Downmix_foldGeneric()
+void Downmix_testIndexComputation(uint32_t mask) {
+    ALOGI("Testing index computation for %#x:", mask);
+    // check against unsupported channels
+    if (mask & kUnsupported) {
+        ALOGE("Unsupported channels (top or front left/right of center)");
+        return;
+    }
+    // verify has FL/FR
+    if ((mask & AUDIO_CHANNEL_OUT_STEREO) != AUDIO_CHANNEL_OUT_STEREO) {
+        ALOGE("Front channels must be present");
+        return;
+    }
+    // verify uses SIDE as a pair (ok if not using SIDE at all)
+    bool hasSides = false;
+    if ((mask & kSides) != 0) {
+        if ((mask & kSides) != kSides) {
+            ALOGE("Side channels must be used as a pair");
+            return;
+        }
+        hasSides = true;
+    }
+    // verify uses BACK as a pair (ok if not using BACK at all)
+    bool hasBacks = false;
+    if ((mask & kBacks) != 0) {
+        if ((mask & kBacks) != kBacks) {
+            ALOGE("Back channels must be used as a pair");
+            return;
+        }
+        hasBacks = true;
+    }
+
+    const int numChan = audio_channel_count_from_out_mask(mask);
+    const bool hasFC = ((mask & AUDIO_CHANNEL_OUT_FRONT_CENTER) == AUDIO_CHANNEL_OUT_FRONT_CENTER);
+    const bool hasLFE =
+            ((mask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY) == AUDIO_CHANNEL_OUT_LOW_FREQUENCY);
+    const bool hasBC = ((mask & AUDIO_CHANNEL_OUT_BACK_CENTER) == AUDIO_CHANNEL_OUT_BACK_CENTER);
+    // compute at what index each channel is: samples will be in the following order:
+    //   FL FR FC LFE BL BR BC SL SR
+    // when a channel is not present, its index is set to the same as the index of the preceding
+    // channel
+    const int indexFC  = hasFC    ? 2            : 1;        // front center
+    const int indexLFE = hasLFE   ? indexFC + 1  : indexFC;  // low frequency
+    const int indexBL  = hasBacks ? indexLFE + 1 : indexLFE; // back left
+    const int indexBR  = hasBacks ? indexBL + 1  : indexBL;  // back right
+    const int indexBC  = hasBC    ? indexBR + 1  : indexBR;  // back center
+    const int indexSL  = hasSides ? indexBC + 1  : indexBC;  // side left
+    const int indexSR  = hasSides ? indexSL + 1  : indexSL;  // side right
+
+    ALOGI("  FL FR FC LFE BL BR BC SL SR");
+    ALOGI("   %d  %d  %d   %d  %d  %d  %d  %d  %d",
+            0, 1, indexFC, indexLFE, indexBL, indexBR, indexBC, indexSL, indexSR);
+}
+#endif
+
+static bool Downmix_validChannelMask(uint32_t mask)
+{
+    if (!mask) {
+        return false;
+    }
+    // check against unsupported channels
+    if (mask & ~AUDIO_CHANNEL_OUT_22POINT2) {
+        ALOGE("Unsupported channels in %u", mask & ~AUDIO_CHANNEL_OUT_22POINT2);
+        return false;
+    }
+    return true;
+}
+
+/*----------------------------------------------------------------------------
+ * Effect API implementation
+ *--------------------------------------------------------------------------*/
+
+/*--- Effect Library Interface Implementation ---*/
+
+static int32_t DownmixLib_Create(const effect_uuid_t *uuid,
+        int32_t /* sessionId */,
+        int32_t /* ioId */,
+        effect_handle_t *pHandle) {
+    int ret;
+    int i;
+    downmix_module_t *module;
+    const effect_descriptor_t *desc;
+
+    ALOGV("DownmixLib_Create()");
+
+#ifdef DOWNMIX_TEST_CHANNEL_INDEX
+    // should work (won't log an error)
+    ALOGI("DOWNMIX_TEST_CHANNEL_INDEX: should work:");
+    Downmix_testIndexComputation(AUDIO_CHANNEL_OUT_FRONT_LEFT | AUDIO_CHANNEL_OUT_FRONT_RIGHT |
+                    AUDIO_CHANNEL_OUT_LOW_FREQUENCY | AUDIO_CHANNEL_OUT_BACK_CENTER);
+    Downmix_testIndexComputation(AUDIO_CHANNEL_OUT_QUAD_SIDE | AUDIO_CHANNEL_OUT_QUAD_BACK);
+    Downmix_testIndexComputation(AUDIO_CHANNEL_OUT_5POINT1_SIDE | AUDIO_CHANNEL_OUT_BACK_CENTER);
+    Downmix_testIndexComputation(AUDIO_CHANNEL_OUT_5POINT1_BACK | AUDIO_CHANNEL_OUT_BACK_CENTER);
+    // shouldn't work (will log an error, won't display channel indices)
+    ALOGI("DOWNMIX_TEST_CHANNEL_INDEX: should NOT work:");
+    Downmix_testIndexComputation(AUDIO_CHANNEL_OUT_FRONT_LEFT | AUDIO_CHANNEL_OUT_FRONT_RIGHT |
+                        AUDIO_CHANNEL_OUT_LOW_FREQUENCY | AUDIO_CHANNEL_OUT_BACK_LEFT);
+    Downmix_testIndexComputation(AUDIO_CHANNEL_OUT_FRONT_LEFT | AUDIO_CHANNEL_OUT_FRONT_RIGHT |
+                            AUDIO_CHANNEL_OUT_LOW_FREQUENCY | AUDIO_CHANNEL_OUT_SIDE_LEFT);
+    Downmix_testIndexComputation(AUDIO_CHANNEL_OUT_FRONT_LEFT |
+                        AUDIO_CHANNEL_OUT_BACK_LEFT | AUDIO_CHANNEL_OUT_BACK_RIGHT);
+    Downmix_testIndexComputation(AUDIO_CHANNEL_OUT_FRONT_LEFT |
+                            AUDIO_CHANNEL_OUT_SIDE_LEFT | AUDIO_CHANNEL_OUT_SIDE_RIGHT);
+#endif
+
+    if (pHandle == NULL || uuid == NULL) {
+        return -EINVAL;
+    }
+
+    for (i = 0 ; i < kNbEffects ; i++) {
+        desc = gDescriptors[i];
+        if (memcmp(uuid, &desc->uuid, sizeof(effect_uuid_t)) == 0) {
+            break;
+        }
+    }
+
+    if (i == kNbEffects) {
+        return -ENOENT;
+    }
+
+    module = new downmix_module_t{};
+
+    module->itfe = &gDownmixInterface;
+
+    module->context.state = DOWNMIX_STATE_UNINITIALIZED;
+
+    ret = Downmix_Init(module);
+    if (ret < 0) {
+        ALOGW("DownmixLib_Create() init failed");
+        free(module);
+        return ret;
+    }
+
+    *pHandle = (effect_handle_t) module;
+
+    ALOGV("DownmixLib_Create() %p , size %zu", module, sizeof(downmix_module_t));
+
+    return 0;
+}
+
+static int32_t DownmixLib_Release(effect_handle_t handle) {
+    downmix_module_t *pDwmModule = (downmix_module_t *)handle;
+
+    ALOGV("DownmixLib_Release() %p", handle);
+    if (handle == NULL) {
+        return -EINVAL;
+    }
+
+    pDwmModule->context.state = DOWNMIX_STATE_UNINITIALIZED;
+
+    delete pDwmModule;
+    return 0;
+}
+
+static int32_t DownmixLib_GetDescriptor(
+        const effect_uuid_t *uuid, effect_descriptor_t *pDescriptor) {
+    ALOGV("DownmixLib_GetDescriptor()");
+    int i;
+
+    if (pDescriptor == NULL || uuid == NULL){
+        ALOGE("DownmixLib_Create() called with NULL pointer");
+        return -EINVAL;
+    }
+    ALOGV("DownmixLib_GetDescriptor() nb effects=%d", kNbEffects);
+    for (i = 0; i < kNbEffects; i++) {
+        ALOGV("DownmixLib_GetDescriptor() i=%d", i);
+        if (memcmp(uuid, &gDescriptors[i]->uuid, sizeof(effect_uuid_t)) == 0) {
+            memcpy(pDescriptor, gDescriptors[i], sizeof(effect_descriptor_t));
+            ALOGV("EffectGetDescriptor - UUID matched downmix type %d, UUID = %#x",
+                 i, gDescriptors[i]->uuid.timeLow);
+            return 0;
+        }
+    }
+
+    return -EINVAL;
+}
+
+/*--- Effect Control Interface Implementation ---*/
+
+static int32_t Downmix_Process(effect_handle_t self,
+        audio_buffer_t *inBuffer, audio_buffer_t *outBuffer) {
+
+    downmix_object_t *pDownmixer;
+    float *pSrc, *pDst;
+    downmix_module_t *pDwmModule = (downmix_module_t *)self;
+
+    if (pDwmModule == NULL) {
+        return -EINVAL;
+    }
+
+    if (inBuffer == NULL || inBuffer->raw == NULL ||
+        outBuffer == NULL || outBuffer->raw == NULL ||
+        inBuffer->frameCount != outBuffer->frameCount) {
+        return -EINVAL;
+    }
+
+    pDownmixer = (downmix_object_t*) &pDwmModule->context;
+
+    if (pDownmixer->state == DOWNMIX_STATE_UNINITIALIZED) {
+        ALOGE("Downmix_Process error: trying to use an uninitialized downmixer");
+        return -EINVAL;
+    } else if (pDownmixer->state == DOWNMIX_STATE_INITIALIZED) {
+        ALOGE("Downmix_Process error: trying to use a non-configured downmixer");
+        return -ENODATA;
+    }
+
+    pSrc = inBuffer->f32;
+    pDst = outBuffer->f32;
+    size_t numFrames = outBuffer->frameCount;
+
+    const bool accumulate =
+            (pDwmModule->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
+    const uint32_t downmixInputChannelMask = pDwmModule->config.inputCfg.channels;
+
+    switch(pDownmixer->type) {
+
+      case DOWNMIX_TYPE_STRIP:
+          if (accumulate) {
+              while (numFrames) {
+                  pDst[0] = clamp_float(pDst[0] + pSrc[0]);
+                  pDst[1] = clamp_float(pDst[1] + pSrc[1]);
+                  pSrc += pDownmixer->input_channel_count;
+                  pDst += 2;
+                  numFrames--;
+              }
+          } else {
+              while (numFrames) {
+                  pDst[0] = pSrc[0];
+                  pDst[1] = pSrc[1];
+                  pSrc += pDownmixer->input_channel_count;
+                  pDst += 2;
+                  numFrames--;
+              }
+          }
+          break;
+
+      case DOWNMIX_TYPE_FOLD:
+#ifdef DOWNMIX_ALWAYS_USE_GENERIC_DOWNMIXER
+          // bypass the optimized downmix routines for the common formats
+          if (!Downmix_foldGeneric(
+                  downmixInputChannelMask, pSrc, pDst, numFrames, accumulate)) {
+              ALOGE("Multichannel configuration %#x is not supported",
+                    downmixInputChannelMask);
+              return -EINVAL;
+          }
+          break;
+#endif
+        // optimize for the common formats
+        switch (downmixInputChannelMask) {
+        case AUDIO_CHANNEL_OUT_QUAD_BACK:
+        case AUDIO_CHANNEL_OUT_QUAD_SIDE:
+            Downmix_foldFromQuad(pSrc, pDst, numFrames, accumulate);
+            break;
+        case AUDIO_CHANNEL_OUT_5POINT1_BACK:
+        case AUDIO_CHANNEL_OUT_5POINT1_SIDE:
+            Downmix_foldFrom5Point1(pSrc, pDst, numFrames, accumulate);
+            break;
+        case AUDIO_CHANNEL_OUT_7POINT1:
+            Downmix_foldFrom7Point1(pSrc, pDst, numFrames, accumulate);
+            break;
+        default:
+            if (!Downmix_foldGeneric(
+                    downmixInputChannelMask, pSrc, pDst, numFrames, accumulate)) {
+                ALOGE("Multichannel configuration %#x is not supported",
+                      downmixInputChannelMask);
+                return -EINVAL;
+            }
+            break;
+        }
+        break;
+
+      default:
+        return -EINVAL;
+    }
+
+    return 0;
+}
+
+static int32_t Downmix_Command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
+        void *pCmdData, uint32_t *replySize, void *pReplyData) {
+
+    downmix_module_t *pDwmModule = (downmix_module_t *) self;
+    downmix_object_t *pDownmixer;
+
+    if (pDwmModule == NULL || pDwmModule->context.state == DOWNMIX_STATE_UNINITIALIZED) {
+        return -EINVAL;
+    }
+
+    pDownmixer = (downmix_object_t*) &pDwmModule->context;
+
+    ALOGV("Downmix_Command command %u cmdSize %u", cmdCode, cmdSize);
+
+    switch (cmdCode) {
+    case EFFECT_CMD_INIT:
+        if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
+            return -EINVAL;
+        }
+        *(int *) pReplyData = Downmix_Init(pDwmModule);
+        break;
+
+    case EFFECT_CMD_SET_CONFIG:
+        if (pCmdData == NULL || cmdSize != sizeof(effect_config_t)
+                || pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
+            return -EINVAL;
+        }
+        *(int *) pReplyData = Downmix_Configure(pDwmModule,
+                (effect_config_t *)pCmdData, false);
+        break;
+
+    case EFFECT_CMD_RESET:
+        Downmix_Reset(pDownmixer, false);
+        break;
+
+    case EFFECT_CMD_GET_PARAM: {
+        ALOGV("Downmix_Command EFFECT_CMD_GET_PARAM pCmdData %p, *replySize %u, pReplyData: %p",
+                pCmdData, *replySize, pReplyData);
+        if (pCmdData == NULL || cmdSize < (int)(sizeof(effect_param_t) + sizeof(int32_t)) ||
+                pReplyData == NULL || replySize == NULL ||
+                *replySize < (int) sizeof(effect_param_t) + 2 * sizeof(int32_t)) {
+            return -EINVAL;
+        }
+        effect_param_t *rep = (effect_param_t *) pReplyData;
+        memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + sizeof(int32_t));
+        ALOGV("Downmix_Command EFFECT_CMD_GET_PARAM param %d, replySize %u",
+                *(int32_t *)rep->data, rep->vsize);
+        rep->status = Downmix_getParameter(pDownmixer, *(int32_t *)rep->data, &rep->vsize,
+                rep->data + sizeof(int32_t));
+        *replySize = sizeof(effect_param_t) + sizeof(int32_t) + rep->vsize;
+        break;
+    }
+    case EFFECT_CMD_SET_PARAM: {
+        ALOGV("Downmix_Command EFFECT_CMD_SET_PARAM cmdSize %d pCmdData %p, *replySize %u"
+                ", pReplyData %p", cmdSize, pCmdData, *replySize, pReplyData);
+        if (pCmdData == NULL || (cmdSize < (int)(sizeof(effect_param_t) + sizeof(int32_t)))
+                || pReplyData == NULL || replySize == NULL || *replySize != (int)sizeof(int32_t)) {
+            return -EINVAL;
+        }
+        effect_param_t *cmd = (effect_param_t *) pCmdData;
+        if (cmd->psize != sizeof(int32_t)) {
+            android_errorWriteLog(0x534e4554, "63662938");
+            return -EINVAL;
+        }
+        *(int *)pReplyData = Downmix_setParameter(pDownmixer, *(int32_t *)cmd->data,
+                cmd->vsize, cmd->data + sizeof(int32_t));
+        break;
+    }
+
+    case EFFECT_CMD_SET_PARAM_DEFERRED:
+        //FIXME implement
+        ALOGW("Downmix_Command command EFFECT_CMD_SET_PARAM_DEFERRED not supported, FIXME");
+        break;
+
+    case EFFECT_CMD_SET_PARAM_COMMIT:
+        //FIXME implement
+        ALOGW("Downmix_Command command EFFECT_CMD_SET_PARAM_COMMIT not supported, FIXME");
+        break;
+
+    case EFFECT_CMD_ENABLE:
+        if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
+            return -EINVAL;
+        }
+        if (pDownmixer->state != DOWNMIX_STATE_INITIALIZED) {
+            return -ENOSYS;
+        }
+        pDownmixer->state = DOWNMIX_STATE_ACTIVE;
+        ALOGV("EFFECT_CMD_ENABLE() OK");
+        *(int *)pReplyData = 0;
+        break;
+
+    case EFFECT_CMD_DISABLE:
+        if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
+            return -EINVAL;
+        }
+        if (pDownmixer->state != DOWNMIX_STATE_ACTIVE) {
+            return -ENOSYS;
+        }
+        pDownmixer->state = DOWNMIX_STATE_INITIALIZED;
+        ALOGV("EFFECT_CMD_DISABLE() OK");
+        *(int *)pReplyData = 0;
+        break;
+
+    case EFFECT_CMD_SET_DEVICE:
+        if (pCmdData == NULL || cmdSize != (int)sizeof(uint32_t)) {
+            return -EINVAL;
+        }
+        // FIXME change type if playing on headset vs speaker
+        ALOGV("Downmix_Command EFFECT_CMD_SET_DEVICE: %#x", *(uint32_t *)pCmdData);
+        break;
+
+    case EFFECT_CMD_SET_VOLUME: {
+        // audio output is always stereo => 2 channel volumes
+        if (pCmdData == NULL || cmdSize != (int)sizeof(uint32_t) * 2) {
+            return -EINVAL;
+        }
+        // FIXME change volume
+        ALOGW("Downmix_Command command EFFECT_CMD_SET_VOLUME not supported, FIXME");
+        float left = (float)(*(uint32_t *)pCmdData) / (1 << 24);
+        float right = (float)(*((uint32_t *)pCmdData + 1)) / (1 << 24);
+        ALOGV("Downmix_Command EFFECT_CMD_SET_VOLUME: left %f, right %f ", left, right);
+        break;
+    }
+
+    case EFFECT_CMD_SET_AUDIO_MODE:
+        if (pCmdData == NULL || cmdSize != (int)sizeof(uint32_t)) {
+            return -EINVAL;
+        }
+        ALOGV("Downmix_Command EFFECT_CMD_SET_AUDIO_MODE: %u", *(uint32_t *)pCmdData);
+        break;
+
+    case EFFECT_CMD_SET_CONFIG_REVERSE:
+    case EFFECT_CMD_SET_INPUT_DEVICE:
+        // these commands are ignored by a downmix effect
+        break;
+
+    default:
+        ALOGW("Downmix_Command invalid command %u", cmdCode);
+        return -EINVAL;
+    }
+
+    return 0;
+}
+
+static int32_t Downmix_GetDescriptor(effect_handle_t self, effect_descriptor_t *pDescriptor)
+{
+    downmix_module_t *pDwnmxModule = (downmix_module_t *) self;
+
+    if (pDwnmxModule == NULL ||
+            pDwnmxModule->context.state == DOWNMIX_STATE_UNINITIALIZED) {
+        return -EINVAL;
+    }
+
+    memcpy(pDescriptor, &gDownmixDescriptor, sizeof(effect_descriptor_t));
+
+    return 0;
+}
+
+
+/*----------------------------------------------------------------------------
+ * Downmix internal functions
+ *--------------------------------------------------------------------------*/
+
+/*----------------------------------------------------------------------------
+ * Downmix_Init()
+ *----------------------------------------------------------------------------
+ * Purpose:
+ * Initialize downmix context and apply default parameters
+ *
+ * Inputs:
+ *  pDwmModule    pointer to downmix effect module
+ *
+ * Outputs:
+ *
+ * Returns:
+ *  0             indicates success
+ *
+ * Side Effects:
+ *  updates:
+ *           pDwmModule->context.type
+ *           pDwmModule->context.apply_volume_correction
+ *           pDwmModule->config.inputCfg
+ *           pDwmModule->config.outputCfg
+ *           pDwmModule->config.inputCfg.samplingRate
+ *           pDwmModule->config.outputCfg.samplingRate
+ *           pDwmModule->context.state
+ *  doesn't set:
+ *           pDwmModule->itfe
+ *
+ *----------------------------------------------------------------------------
+ */
+
+static int Downmix_Init(downmix_module_t *pDwmModule) {
+
+    ALOGV("Downmix_Init module %p", pDwmModule);
+    int ret = 0;
+
+    memset(&pDwmModule->context, 0, sizeof(downmix_object_t));
+
+    pDwmModule->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
+    pDwmModule->config.inputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+    pDwmModule->config.inputCfg.channels = AUDIO_CHANNEL_OUT_7POINT1;
+    pDwmModule->config.inputCfg.bufferProvider.getBuffer = NULL;
+    pDwmModule->config.inputCfg.bufferProvider.releaseBuffer = NULL;
+    pDwmModule->config.inputCfg.bufferProvider.cookie = NULL;
+    pDwmModule->config.inputCfg.mask = EFFECT_CONFIG_ALL;
+
+    pDwmModule->config.inputCfg.samplingRate = 44100;
+    pDwmModule->config.outputCfg.samplingRate = pDwmModule->config.inputCfg.samplingRate;
+
+    // set a default value for the access mode, but should be overwritten by caller
+    pDwmModule->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
+    pDwmModule->config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+    pDwmModule->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    pDwmModule->config.outputCfg.bufferProvider.getBuffer = NULL;
+    pDwmModule->config.outputCfg.bufferProvider.releaseBuffer = NULL;
+    pDwmModule->config.outputCfg.bufferProvider.cookie = NULL;
+    pDwmModule->config.outputCfg.mask = EFFECT_CONFIG_ALL;
+
+    ret = Downmix_Configure(pDwmModule, &pDwmModule->config, true);
+    if (ret != 0) {
+        ALOGV("Downmix_Init error %d on module %p", ret, pDwmModule);
+    } else {
+        pDwmModule->context.state = DOWNMIX_STATE_INITIALIZED;
+    }
+
+    return ret;
+}
+
+
+/*----------------------------------------------------------------------------
+ * Downmix_Configure()
+ *----------------------------------------------------------------------------
+ * Purpose:
+ *  Set input and output audio configuration.
+ *
+ * Inputs:
+ *  pDwmModule  pointer to downmix effect module
+ *  pConfig     pointer to effect_config_t structure containing input
+ *                  and output audio parameters configuration
+ *  init        true if called from init function
+ *
+ * Outputs:
+ *
+ * Returns:
+ *  0           indicates success
+ *
+ * Side Effects:
+ *
+ *----------------------------------------------------------------------------
+ */
+
+static int Downmix_Configure(downmix_module_t *pDwmModule, effect_config_t *pConfig, bool init) {
+
+    downmix_object_t *pDownmixer = &pDwmModule->context;
+
+    // Check configuration compatibility with build options, and effect capabilities
+    if (pConfig->inputCfg.samplingRate != pConfig->outputCfg.samplingRate
+        || pConfig->outputCfg.channels != AUDIO_CHANNEL_OUT_STEREO
+        || pConfig->inputCfg.format != AUDIO_FORMAT_PCM_FLOAT
+        || pConfig->outputCfg.format != AUDIO_FORMAT_PCM_FLOAT) {
+        ALOGE("Downmix_Configure error: invalid config");
+        return -EINVAL;
+    }
+
+    if (&pDwmModule->config != pConfig) {
+        memcpy(&pDwmModule->config, pConfig, sizeof(effect_config_t));
+    }
+
+    if (init) {
+        pDownmixer->type = DOWNMIX_TYPE_FOLD;
+        pDownmixer->apply_volume_correction = false;
+        pDownmixer->input_channel_count = 8; // matches default input of AUDIO_CHANNEL_OUT_7POINT1
+    } else {
+        // when configuring the effect, do not allow a blank or unsupported channel mask
+        if (!Downmix_validChannelMask(pConfig->inputCfg.channels)) {
+            ALOGE("Downmix_Configure error: input channel mask(0x%x) not supported",
+                                                        pConfig->inputCfg.channels);
+            return -EINVAL;
+        }
+        pDownmixer->input_channel_count =
+                audio_channel_count_from_out_mask(pConfig->inputCfg.channels);
+    }
+
+    Downmix_Reset(pDownmixer, init);
+
+    return 0;
+}
+
+
+/*----------------------------------------------------------------------------
+ * Downmix_Reset()
+ *----------------------------------------------------------------------------
+ * Purpose:
+ *  Reset internal states.
+ *
+ * Inputs:
+ *  pDownmixer   pointer to downmix context
+ *  init         true if called from init function
+ *
+ * Outputs:
+*
+ * Returns:
+ *  0            indicates success
+ *
+ * Side Effects:
+ *
+ *----------------------------------------------------------------------------
+ */
+
+static int Downmix_Reset(downmix_object_t* /* pDownmixer */, bool /* init */) {
+    // nothing to do here
+    return 0;
+}
+
+
+/*----------------------------------------------------------------------------
+ * Downmix_setParameter()
+ *----------------------------------------------------------------------------
+ * Purpose:
+ * Set a Downmix parameter
+ *
+ * Inputs:
+ *  pDownmixer    handle to instance data
+ *  param         parameter
+ *  pValue        pointer to parameter value
+ *  size          value size
+ *
+ * Outputs:
+ *
+ * Returns:
+ *  0             indicates success
+ *
+ * Side Effects:
+ *
+ *----------------------------------------------------------------------------
+ */
+static int Downmix_setParameter(
+        downmix_object_t *pDownmixer, int32_t param, uint32_t size, void *pValue) {
+
+    int16_t value16;
+    ALOGV("Downmix_setParameter, context %p, param %d, value16 %d, value32 %d",
+            pDownmixer, param, *(int16_t *)pValue, *(int32_t *)pValue);
+
+    switch (param) {
+
+      case DOWNMIX_PARAM_TYPE:
+        if (size != sizeof(downmix_type_t)) {
+            ALOGE("Downmix_setParameter(DOWNMIX_PARAM_TYPE) invalid size %u, should be %zu",
+                    size, sizeof(downmix_type_t));
+            return -EINVAL;
+        }
+        value16 = *(int16_t *)pValue;
+        ALOGV("set DOWNMIX_PARAM_TYPE, type %d", value16);
+        if (!((value16 > DOWNMIX_TYPE_INVALID) && (value16 <= DOWNMIX_TYPE_LAST))) {
+            ALOGE("Downmix_setParameter invalid DOWNMIX_PARAM_TYPE value %d", value16);
+            return -EINVAL;
+        } else {
+            pDownmixer->type = (downmix_type_t) value16;
+        break;
+
+      default:
+        ALOGE("Downmix_setParameter unknown parameter %d", param);
+        return -EINVAL;
+    }
+}
+
+    return 0;
+} /* end Downmix_setParameter */
+
+
+/*----------------------------------------------------------------------------
+ * Downmix_getParameter()
+ *----------------------------------------------------------------------------
+ * Purpose:
+ * Get a Downmix parameter
+ *
+ * Inputs:
+ *  pDownmixer    handle to instance data
+ *  param         parameter
+ *  pValue        pointer to variable to hold retrieved value
+ *  pSize         pointer to value size: maximum size as input
+ *
+ * Outputs:
+ *  *pValue updated with parameter value
+ *  *pSize updated with actual value size
+ *
+ * Returns:
+ *  0             indicates success
+ *
+ * Side Effects:
+ *
+ *----------------------------------------------------------------------------
+ */
+static int Downmix_getParameter(
+        downmix_object_t *pDownmixer, int32_t param, uint32_t *pSize, void *pValue) {
+    int16_t *pValue16;
+
+    switch (param) {
+
+    case DOWNMIX_PARAM_TYPE:
+      if (*pSize < sizeof(int16_t)) {
+          ALOGE("Downmix_getParameter invalid parameter size %u for DOWNMIX_PARAM_TYPE", *pSize);
+          return -EINVAL;
+      }
+      pValue16 = (int16_t *)pValue;
+      *pValue16 = (int16_t) pDownmixer->type;
+      *pSize = sizeof(int16_t);
+      ALOGV("Downmix_getParameter DOWNMIX_PARAM_TYPE is %d", *pValue16);
+      break;
+
+    default:
+      ALOGE("Downmix_getParameter unknown parameter %d", param);
+      return -EINVAL;
+    }
+
+    return 0;
+} /* end Downmix_getParameter */
+
+
+/*----------------------------------------------------------------------------
+ * Downmix_foldFromQuad()
+ *----------------------------------------------------------------------------
+ * Purpose:
+ * downmix a quad signal to stereo
+ *
+ * Inputs:
+ *  pSrc       quad audio samples to downmix
+ *  numFrames  the number of quad frames to downmix
+ *  accumulate whether to mix (when true) the result of the downmix with the contents of pDst,
+ *               or overwrite pDst (when false)
+ *
+ * Outputs:
+ *  pDst       downmixed stereo audio samples
+ *
+ *----------------------------------------------------------------------------
+ */
+void Downmix_foldFromQuad(float *pSrc, float *pDst, size_t numFrames, bool accumulate) {
+    // sample at index 0 is FL
+    // sample at index 1 is FR
+    // sample at index 2 is RL
+    // sample at index 3 is RR
+    if (accumulate) {
+        while (numFrames) {
+            // FL + RL
+            pDst[0] = clamp_float(pDst[0] + ((pSrc[0] + pSrc[2]) / 2.0f));
+            // FR + RR
+            pDst[1] = clamp_float(pDst[1] + ((pSrc[1] + pSrc[3]) / 2.0f));
+            pSrc += 4;
+            pDst += 2;
+            numFrames--;
+        }
+    } else { // same code as above but without adding and clamping pDst[i] to itself
+        while (numFrames) {
+            // FL + RL
+            pDst[0] = clamp_float((pSrc[0] + pSrc[2]) / 2.0f);
+            // FR + RR
+            pDst[1] = clamp_float((pSrc[1] + pSrc[3]) / 2.0f);
+            pSrc += 4;
+            pDst += 2;
+            numFrames--;
+        }
+    }
+}
+
+/*----------------------------------------------------------------------------
+ * Downmix_foldFrom5Point1()
+ *----------------------------------------------------------------------------
+ * Purpose:
+ * downmix a 5.1 signal to stereo
+ *
+ * Inputs:
+ *  pSrc       5.1 audio samples to downmix
+ *  numFrames  the number of 5.1 frames to downmix
+ *  accumulate whether to mix (when true) the result of the downmix with the contents of pDst,
+ *               or overwrite pDst (when false)
+ *
+ * Outputs:
+ *  pDst       downmixed stereo audio samples
+ *
+ *----------------------------------------------------------------------------
+ */
+void Downmix_foldFrom5Point1(float *pSrc, float *pDst, size_t numFrames, bool accumulate) {
+    float lt, rt, centerPlusLfeContrib; // samples in Q19.12 format
+    // sample at index 0 is FL
+    // sample at index 1 is FR
+    // sample at index 2 is FC
+    // sample at index 3 is LFE
+    // sample at index 4 is RL
+    // sample at index 5 is RR
+    // code is mostly duplicated between the two values of accumulate to avoid repeating the test
+    // for every sample
+    if (accumulate) {
+        while (numFrames) {
+            // centerPlusLfeContrib = FC(-3dB) + LFE(-3dB)
+            centerPlusLfeContrib = (pSrc[2] * MINUS_3_DB_IN_FLOAT)
+                    + (pSrc[3] * MINUS_3_DB_IN_FLOAT);
+            // FL + centerPlusLfeContrib + RL
+            lt = pSrc[0] + centerPlusLfeContrib + pSrc[4];
+            // FR + centerPlusLfeContrib + RR
+            rt = pSrc[1] + centerPlusLfeContrib + pSrc[5];
+            // accumulate in destination
+            pDst[0] = clamp_float(pDst[0] + (lt / 2.0f));
+            pDst[1] = clamp_float(pDst[1] + (rt / 2.0f));
+            pSrc += 6;
+            pDst += 2;
+            numFrames--;
+        }
+    } else { // same code as above but without adding and clamping pDst[i] to itself
+        while (numFrames) {
+            // centerPlusLfeContrib = FC(-3dB) + LFE(-3dB)
+            centerPlusLfeContrib = (pSrc[2] * MINUS_3_DB_IN_FLOAT)
+                    + (pSrc[3] * MINUS_3_DB_IN_FLOAT);
+            // FL + centerPlusLfeContrib + RL
+            lt = pSrc[0] + centerPlusLfeContrib + pSrc[4];
+            // FR + centerPlusLfeContrib + RR
+            rt = pSrc[1] + centerPlusLfeContrib + pSrc[5];
+            // store in destination
+            pDst[0] = clamp_float(lt / 2.0f); // differs from when accumulate is true above
+            pDst[1] = clamp_float(rt / 2.0f); // differs from when accumulate is true above
+            pSrc += 6;
+            pDst += 2;
+            numFrames--;
+        }
+    }
+}
+
+/*----------------------------------------------------------------------------
+ * Downmix_foldFrom7Point1()
+ *----------------------------------------------------------------------------
+ * Purpose:
+ * downmix a 7.1 signal to stereo
+ *
+ * Inputs:
+ *  pSrc       7.1 audio samples to downmix
+ *  numFrames  the number of 7.1 frames to downmix
+ *  accumulate whether to mix (when true) the result of the downmix with the contents of pDst,
+ *               or overwrite pDst (when false)
+ *
+ * Outputs:
+ *  pDst       downmixed stereo audio samples
+ *
+ *----------------------------------------------------------------------------
+ */
+void Downmix_foldFrom7Point1(float *pSrc, float *pDst, size_t numFrames, bool accumulate) {
+    float lt, rt, centerPlusLfeContrib; // samples in Q19.12 format
+    // sample at index 0 is FL
+    // sample at index 1 is FR
+    // sample at index 2 is FC
+    // sample at index 3 is LFE
+    // sample at index 4 is RL
+    // sample at index 5 is RR
+    // sample at index 6 is SL
+    // sample at index 7 is SR
+    // code is mostly duplicated between the two values of accumulate to avoid repeating the test
+    // for every sample
+    if (accumulate) {
+        while (numFrames) {
+            // centerPlusLfeContrib = FC(-3dB) + LFE(-3dB)
+            centerPlusLfeContrib = (pSrc[2] * MINUS_3_DB_IN_FLOAT)
+                    + (pSrc[3] * MINUS_3_DB_IN_FLOAT);
+            // FL + centerPlusLfeContrib + SL + RL
+            lt = pSrc[0] + centerPlusLfeContrib + pSrc[6] + pSrc[4];
+            // FR + centerPlusLfeContrib + SR + RR
+            rt = pSrc[1] + centerPlusLfeContrib + pSrc[7] + pSrc[5];
+            //accumulate in destination
+            pDst[0] = clamp_float(pDst[0] + (lt / 2.0f));
+            pDst[1] = clamp_float(pDst[1] + (rt / 2.0f));
+            pSrc += 8;
+            pDst += 2;
+            numFrames--;
+        }
+    } else { // same code as above but without adding and clamping pDst[i] to itself
+        while (numFrames) {
+            // centerPlusLfeContrib = FC(-3dB) + LFE(-3dB)
+            centerPlusLfeContrib = (pSrc[2] * MINUS_3_DB_IN_FLOAT)
+                    + (pSrc[3] * MINUS_3_DB_IN_FLOAT);
+            // FL + centerPlusLfeContrib + SL + RL
+            lt = pSrc[0] + centerPlusLfeContrib + pSrc[6] + pSrc[4];
+            // FR + centerPlusLfeContrib + SR + RR
+            rt = pSrc[1] + centerPlusLfeContrib + pSrc[7] + pSrc[5];
+            // store in destination
+            pDst[0] = clamp_float(lt / 2.0f); // differs from when accumulate is true above
+            pDst[1] = clamp_float(rt / 2.0f); // differs from when accumulate is true above
+            pSrc += 8;
+            pDst += 2;
+            numFrames--;
+        }
+    }
+}
+
+/*----------------------------------------------------------------------------
+ * Downmix_foldGeneric()
+ *----------------------------------------------------------------------------
+ * Purpose:
+ * downmix to stereo a multichannel signal of arbitrary channel position mask.
+ *
+ * Inputs:
+ *  mask       the channel mask of pSrc
+ *  pSrc       multichannel audio buffer to downmix
+ *  numFrames  the number of multichannel frames to downmix
+ *  accumulate whether to mix (when true) the result of the downmix with the contents of pDst,
+ *               or overwrite pDst (when false)
+ *
+ * Outputs:
+ *  pDst       downmixed stereo audio samples
+ *
+ * Returns: false if multichannel format is not supported
+ *
+ *----------------------------------------------------------------------------
+ */
+bool Downmix_foldGeneric(
+        uint32_t mask, float *pSrc, float *pDst, size_t numFrames, bool accumulate) {
+
+    if (!Downmix_validChannelMask(mask)) {
+        return false;
+    }
+    const int numChan = audio_channel_count_from_out_mask(mask);
+
+    // compute at what index each channel is: samples will be in the following order:
+    //   FL  FR  FC    LFE   BL  BR  BC    SL  SR
+    //
+    //  (transfer matrix)
+    //   FL  FR  FC    LFE   BL  BR  BC    SL  SR
+    //   0.5     0.353 0.353 0.5     0.353 0.5
+    //       0.5 0.353 0.353     0.5 0.353     0.5
+
+    // derive the indices for the transfer matrix columns that have non-zero values.
+    int indexFL = -1;
+    int indexFR = -1;
+    int indexFC = -1;
+    int indexLFE = -1;
+    int indexBL = -1;
+    int indexBR = -1;
+    int indexBC = -1;
+    int indexSL = -1;
+    int indexSR = -1;
+    int index = 0;
+    for (unsigned tmp = mask;
+         (tmp & (AUDIO_CHANNEL_OUT_7POINT1 | AUDIO_CHANNEL_OUT_BACK_CENTER)) != 0;
+         ++index) {
+        const unsigned lowestBit = tmp & -(signed)tmp;
+        switch (lowestBit) {
+        case AUDIO_CHANNEL_OUT_FRONT_LEFT:
+            indexFL = index;
+            break;
+        case AUDIO_CHANNEL_OUT_FRONT_RIGHT:
+            indexFR = index;
+            break;
+        case AUDIO_CHANNEL_OUT_FRONT_CENTER:
+            indexFC = index;
+            break;
+        case AUDIO_CHANNEL_OUT_LOW_FREQUENCY:
+            indexLFE = index;
+            break;
+        case AUDIO_CHANNEL_OUT_BACK_LEFT:
+            indexBL = index;
+            break;
+        case AUDIO_CHANNEL_OUT_BACK_RIGHT:
+            indexBR = index;
+            break;
+        case AUDIO_CHANNEL_OUT_BACK_CENTER:
+            indexBC = index;
+            break;
+        case AUDIO_CHANNEL_OUT_SIDE_LEFT:
+            indexSL = index;
+            break;
+        case AUDIO_CHANNEL_OUT_SIDE_RIGHT:
+            indexSR = index;
+            break;
+        }
+        tmp ^= lowestBit;
+    }
+
+    // With good branch prediction, this should run reasonably fast.
+    // Also consider using a transfer matrix form.
+    while (numFrames) {
+        // compute contribution of FC, BC and LFE
+        float centersLfeContrib = 0;
+        if (indexFC >= 0) centersLfeContrib = pSrc[indexFC];
+        if (indexLFE >= 0) centersLfeContrib += pSrc[indexLFE];
+        if (indexBC >= 0) centersLfeContrib += pSrc[indexBC];
+        centersLfeContrib *= MINUS_3_DB_IN_FLOAT;
+
+        float ch[2];
+        ch[0] = centersLfeContrib;
+        ch[1] = centersLfeContrib;
+
+        // mix in left / right channels
+        if (indexFL >= 0) ch[0] += pSrc[indexFL];
+        if (indexFR >= 0) ch[1] += pSrc[indexFR];
+
+        if (indexSL >= 0) ch[0] += pSrc[indexSL];
+        if (indexSR >= 0) ch[1] += pSrc[indexSR]; // note pair checks enforce this if indexSL != 0
+
+        if (indexBL >= 0) ch[0] += pSrc[indexBL];
+        if (indexBR >= 0) ch[1] += pSrc[indexBR]; // note pair checks enforce this if indexBL != 0
+
+        // scale to prevent overflow.
+        ch[0] *= 0.5f;
+        ch[1] *= 0.5f;
+
+        if (accumulate) {
+            ch[0] += pDst[0];
+            ch[1] += pDst[1];
+        }
+
+        pDst[0] = clamp_float(ch[0]);
+        pDst[1] = clamp_float(ch[1]);
+        pSrc += numChan;
+        pDst += 2;
+        numFrames--;
+    }
+    return true;
+}
diff --git a/media/libeffects/downmix/EffectDownmix.h b/media/libeffects/downmix/EffectDownmix.h
index c1be0f2..1206520 100644
--- a/media/libeffects/downmix/EffectDownmix.h
+++ b/media/libeffects/downmix/EffectDownmix.h
@@ -18,97 +18,9 @@
 #define ANDROID_EFFECTDOWNMIX_H_
 
 #include <audio_effects/effect_downmix.h>
-#include <audio_utils/primitives.h>
 #include <system/audio.h>
 
-/*------------------------------------
- * definitions
- *------------------------------------
-*/
-
-#define DOWNMIX_OUTPUT_CHANNELS AUDIO_CHANNEL_OUT_STEREO
-#ifdef BUILD_FLOAT
-#define LVM_FLOAT float
-#endif
-typedef enum {
-    DOWNMIX_STATE_UNINITIALIZED,
-    DOWNMIX_STATE_INITIALIZED,
-    DOWNMIX_STATE_ACTIVE,
-} downmix_state_t;
-
-/* parameters for each downmixer */
-typedef struct {
-    downmix_state_t state;
-    downmix_type_t type;
-    bool apply_volume_correction;
-    uint8_t input_channel_count;
-} downmix_object_t;
-
-
-typedef struct downmix_module_s {
-    const struct effect_interface_s *itfe;
-    effect_config_t config;
-    downmix_object_t context;
-} downmix_module_t;
-
-const uint32_t kSides = AUDIO_CHANNEL_OUT_SIDE_LEFT | AUDIO_CHANNEL_OUT_SIDE_RIGHT;
-const uint32_t kBacks = AUDIO_CHANNEL_OUT_BACK_LEFT | AUDIO_CHANNEL_OUT_BACK_RIGHT;
-const uint32_t kUnsupported =
-        AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER | AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER |
-        AUDIO_CHANNEL_OUT_TOP_CENTER |
-        AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT |
-        AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER |
-        AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT |
-        AUDIO_CHANNEL_OUT_TOP_BACK_LEFT |
-        AUDIO_CHANNEL_OUT_TOP_BACK_CENTER |
-        AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT;
-
-/*------------------------------------
- * Effect API
- *------------------------------------
-*/
-int32_t DownmixLib_Create(const effect_uuid_t *uuid,
-        int32_t sessionId,
-        int32_t ioId,
-        effect_handle_t *pHandle);
-int32_t DownmixLib_Release(effect_handle_t handle);
-int32_t DownmixLib_GetDescriptor(const effect_uuid_t *uuid,
-        effect_descriptor_t *pDescriptor);
-
-static int Downmix_Process(effect_handle_t self,
-        audio_buffer_t *inBuffer,
-        audio_buffer_t *outBuffer);
-static int Downmix_Command(effect_handle_t self,
-        uint32_t cmdCode,
-        uint32_t cmdSize,
-        void *pCmdData,
-        uint32_t *replySize,
-        void *pReplyData);
-static int Downmix_GetDescriptor(effect_handle_t self,
-        effect_descriptor_t *pDescriptor);
-
-
-/*------------------------------------
- * internal functions
- *------------------------------------
-*/
-int Downmix_Init(downmix_module_t *pDwmModule);
-int Downmix_Configure(downmix_module_t *pDwmModule, effect_config_t *pConfig, bool init);
-int Downmix_Reset(downmix_object_t *pDownmixer, bool init);
-int Downmix_setParameter(downmix_object_t *pDownmixer, int32_t param, uint32_t size, void *pValue);
-int Downmix_getParameter(downmix_object_t *pDownmixer, int32_t param, uint32_t *pSize, void *pValue);
-#ifdef BUILD_FLOAT
-void Downmix_foldFromQuad(LVM_FLOAT *pSrc, LVM_FLOAT *pDst, size_t numFrames, bool accumulate);
-void Downmix_foldFrom5Point1(LVM_FLOAT *pSrc, LVM_FLOAT *pDst, size_t numFrames, bool accumulate);
-void Downmix_foldFrom7Point1(LVM_FLOAT *pSrc, LVM_FLOAT *pDst, size_t numFrames, bool accumulate);
-bool Downmix_foldGeneric(
-        uint32_t mask, LVM_FLOAT *pSrc, LVM_FLOAT *pDst, size_t numFrames, bool accumulate);
-#else
-void Downmix_foldFromQuad(int16_t *pSrc, int16_t*pDst, size_t numFrames, bool accumulate);
-void Downmix_foldFrom5Point1(int16_t *pSrc, int16_t*pDst, size_t numFrames, bool accumulate);
-void Downmix_foldFrom7Point1(int16_t *pSrc, int16_t*pDst, size_t numFrames, bool accumulate);
-bool Downmix_foldGeneric(
-        uint32_t mask, int16_t *pSrc, int16_t*pDst, size_t numFrames, bool accumulate);
-#endif
+// Use the following declaration to obtain the Downmix library information.
+// extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
 
 #endif /*ANDROID_EFFECTDOWNMIX_H_*/
diff --git a/media/libeffects/downmix/benchmark/Android.bp b/media/libeffects/downmix/benchmark/Android.bp
new file mode 100644
index 0000000..10f14e2
--- /dev/null
+++ b/media/libeffects/downmix/benchmark/Android.bp
@@ -0,0 +1,38 @@
+// Build testbench for downmix module.
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libeffects_downmix_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libeffects_downmix_license",
+    ],
+}
+
+cc_benchmark {
+    name: "downmix_benchmark",
+    host_supported: false,
+    vendor: true,
+    include_dirs: [
+        "frameworks/av/media/libeffects/downmix",
+    ],
+    header_libs: [
+        "libaudioeffects",
+    ],
+    shared_libs: [
+        "liblog",
+    ],
+    static_libs: [
+        "libaudioutils",
+        "libdownmix",
+    ],
+    srcs: [
+        "downmix_benchmark.cpp",
+    ],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+}
diff --git a/media/libeffects/downmix/benchmark/downmix_benchmark.cpp b/media/libeffects/downmix/benchmark/downmix_benchmark.cpp
new file mode 100644
index 0000000..ee169c2
--- /dev/null
+++ b/media/libeffects/downmix/benchmark/downmix_benchmark.cpp
@@ -0,0 +1,206 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <random>
+#include <vector>
+
+#include <audio_effects/effect_downmix.h>
+#include <audio_utils/channels.h>
+#include <audio_utils/primitives.h>
+#include <audio_utils/Statistics.h>
+#include <benchmark/benchmark.h>
+#include <log/log.h>
+#include <system/audio.h>
+
+#include "EffectDownmix.h"
+
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+
+static constexpr audio_channel_mask_t kChannelPositionMasks[] = {
+    AUDIO_CHANNEL_OUT_FRONT_LEFT,
+    AUDIO_CHANNEL_OUT_FRONT_CENTER,
+    AUDIO_CHANNEL_OUT_STEREO,
+    AUDIO_CHANNEL_OUT_2POINT1,
+    AUDIO_CHANNEL_OUT_2POINT0POINT2,
+    AUDIO_CHANNEL_OUT_QUAD,
+    AUDIO_CHANNEL_OUT_QUAD_BACK,
+    AUDIO_CHANNEL_OUT_QUAD_SIDE,
+    AUDIO_CHANNEL_OUT_SURROUND,
+    AUDIO_CHANNEL_OUT_2POINT1POINT2,
+    AUDIO_CHANNEL_OUT_3POINT0POINT2,
+    AUDIO_CHANNEL_OUT_PENTA,
+    AUDIO_CHANNEL_OUT_3POINT1POINT2,
+    AUDIO_CHANNEL_OUT_5POINT1,
+    AUDIO_CHANNEL_OUT_5POINT1_BACK,
+    AUDIO_CHANNEL_OUT_5POINT1_SIDE,
+    AUDIO_CHANNEL_OUT_6POINT1,
+    AUDIO_CHANNEL_OUT_5POINT1POINT2,
+    AUDIO_CHANNEL_OUT_7POINT1,
+    AUDIO_CHANNEL_OUT_5POINT1POINT4,
+    AUDIO_CHANNEL_OUT_7POINT1POINT2,
+    AUDIO_CHANNEL_OUT_7POINT1POINT4,
+    AUDIO_CHANNEL_OUT_13POINT_360RA,
+    AUDIO_CHANNEL_OUT_22POINT2,
+};
+
+static constexpr effect_uuid_t downmix_uuid = {
+    0x93f04452, 0xe4fe, 0x41cc, 0x91f9, {0xe4, 0x75, 0xb6, 0xd1, 0xd6, 0x9f}};
+
+static constexpr size_t kFrameCount = 1000;
+
+/*
+Pixel 3XL
+downmix_benchmark:
+  #BM_Downmix/0     4723 ns    4708 ns       148694
+  #BM_Downmix/1     4717 ns    4702 ns       148873
+  #BM_Downmix/2     4803 ns    4788 ns       145893
+  #BM_Downmix/3     5056 ns    5041 ns       139110
+  #BM_Downmix/4     4710 ns    4696 ns       149625
+  #BM_Downmix/5     1514 ns    1509 ns       463694
+  #BM_Downmix/6     1513 ns    1509 ns       463451
+  #BM_Downmix/7     1516 ns    1511 ns       463899
+  #BM_Downmix/8     4445 ns    4431 ns       157831
+  #BM_Downmix/9     5081 ns    5065 ns       138412
+  #BM_Downmix/10    4354 ns    4341 ns       161247
+  #BM_Downmix/11    4411 ns    4397 ns       158893
+  #BM_Downmix/12    4434 ns    4420 ns       157992
+  #BM_Downmix/13    4845 ns    4830 ns       144873
+  #BM_Downmix/14    4851 ns    4835 ns       144954
+  #BM_Downmix/15    4884 ns    4870 ns       144233
+  #BM_Downmix/16    5832 ns    5813 ns       120565
+  #BM_Downmix/17    5241 ns    5224 ns       133927
+  #BM_Downmix/18    5044 ns    5028 ns       139131
+  #BM_Downmix/19    5244 ns    5227 ns       132315
+  #BM_Downmix/20    5943 ns    5923 ns       117759
+  #BM_Downmix/21    5990 ns    5971 ns       117263
+  #BM_Downmix/22    4468 ns    4454 ns       156689
+  #BM_Downmix/23    7306 ns    7286 ns        95911
+--
+downmix_benchmark: (generic fold)
+  #BM_Downmix/0     4722 ns    4707 ns       149847
+  #BM_Downmix/1     4714 ns    4698 ns       148748
+  #BM_Downmix/2     4794 ns    4779 ns       145661
+  #BM_Downmix/3     5053 ns    5035 ns       139172
+  #BM_Downmix/4     4695 ns    4678 ns       149762
+  #BM_Downmix/5     4381 ns    4368 ns       159675
+  #BM_Downmix/6     4387 ns    4373 ns       160267
+  #BM_Downmix/7     4732 ns    4717 ns       148514
+  #BM_Downmix/8     4430 ns    4415 ns       158133
+  #BM_Downmix/9     5101 ns    5084 ns       138353
+  #BM_Downmix/10    4356 ns    4343 ns       160821
+  #BM_Downmix/11    4397 ns    4383 ns       159995
+  #BM_Downmix/12    4438 ns    4424 ns       158117
+  #BM_Downmix/13    5243 ns    5226 ns       133863
+  #BM_Downmix/14    5259 ns    5242 ns       131855
+  #BM_Downmix/15    5245 ns    5228 ns       133686
+  #BM_Downmix/16    5829 ns    5809 ns       120543
+  #BM_Downmix/17    5245 ns    5228 ns       133533
+  #BM_Downmix/18    5935 ns    5916 ns       118282
+  #BM_Downmix/19    5263 ns    5245 ns       133657
+  #BM_Downmix/20    5998 ns    5978 ns       114693
+  #BM_Downmix/21    5989 ns    5969 ns       117450
+  #BM_Downmix/22    4442 ns    4431 ns       157913
+  #BM_Downmix/23    7309 ns    7290 ns        95797
+*/
+
+static void BM_Downmix(benchmark::State& state) {
+    const audio_channel_mask_t channelMask = kChannelPositionMasks[state.range(0)];
+    const size_t channelCount = audio_channel_count_from_out_mask(channelMask);
+    const int sampleRate = 48000;
+
+    // Initialize input buffer with deterministic pseudo-random values
+    std::minstd_rand gen(channelMask);
+    std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+    std::vector<float> input(kFrameCount * channelCount);
+    std::vector<float> output(kFrameCount * 2);
+    for (auto& in : input) {
+        in = dis(gen);
+    }
+    effect_handle_t effectHandle = nullptr;
+    if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(
+            &downmix_uuid, 1, 1, &effectHandle);
+        status != 0) {
+        ALOGE("create_effect returned an error = %d\n", status);
+        return;
+    }
+
+    effect_config_t config{};
+    config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
+    config.inputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+    config.inputCfg.bufferProvider.getBuffer = nullptr;
+    config.inputCfg.bufferProvider.releaseBuffer = nullptr;
+    config.inputCfg.bufferProvider.cookie = nullptr;
+    config.inputCfg.mask = EFFECT_CONFIG_ALL;
+
+    config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_WRITE;
+    config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+    config.outputCfg.bufferProvider.getBuffer = nullptr;
+    config.outputCfg.bufferProvider.releaseBuffer = nullptr;
+    config.outputCfg.bufferProvider.cookie = nullptr;
+    config.outputCfg.mask = EFFECT_CONFIG_ALL;
+
+    config.inputCfg.samplingRate = sampleRate;
+    config.inputCfg.channels = channelMask;
+
+    config.outputCfg.samplingRate = sampleRate;
+    config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO; // output always stereo
+
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    if (int status = (*effectHandle)
+            ->command(effectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t),
+                    &config, &replySize, &reply);
+        status != 0) {
+        ALOGE("command returned an error = %d\n", status);
+        return;
+    }
+
+    if (int status = (*effectHandle)
+            ->command(effectHandle, EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+        status != 0) {
+        ALOGE("Command enable call returned error %d\n", reply);
+        return;
+    }
+
+    // Run the test
+    for (auto _ : state) {
+        benchmark::DoNotOptimize(input.data());
+        benchmark::DoNotOptimize(output.data());
+
+        audio_buffer_t inBuffer = {.frameCount = kFrameCount, .f32 = input.data()};
+        audio_buffer_t outBuffer = {.frameCount = kFrameCount, .f32 = output.data()};
+        (*effectHandle)->process(effectHandle, &inBuffer, &outBuffer);
+
+        benchmark::ClobberMemory();
+    }
+
+    state.SetComplexityN(state.range(0));
+
+    if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle); status != 0) {
+        ALOGE("release_effect returned an error = %d\n", status);
+        return;
+    }
+}
+
+static void DownmixArgs(benchmark::internal::Benchmark* b) {
+    for (int i = 0; i < (int)std::size(kChannelPositionMasks); i++) {
+        b->Args({i});
+    }
+}
+
+BENCHMARK(BM_Downmix)->Apply(DownmixArgs);
+
+BENCHMARK_MAIN();
diff --git a/media/libeffects/downmix/tests/Android.bp b/media/libeffects/downmix/tests/Android.bp
index 63afc54..4940117 100644
--- a/media/libeffects/downmix/tests/Android.bp
+++ b/media/libeffects/downmix/tests/Android.bp
@@ -1,4 +1,52 @@
 // Build testbench for downmix module.
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libeffects_downmix_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libeffects_downmix_license",
+    ],
+}
+
+// This is a gtest unit test.
+//
+// Use "atest downmix_tests" to run.
+cc_test {
+    name:"downmix_tests",
+    gtest: true,
+    host_supported: true,
+    vendor: true,
+    include_dirs: [
+        "frameworks/av/media/libeffects/downmix",
+    ],
+    header_libs: [
+        "libaudioeffects",
+    ],
+    shared_libs: [
+        "liblog",
+    ],
+    static_libs: [
+        "libaudioutils",
+        "libdownmix",
+    ],
+    srcs: [
+        "downmix_tests.cpp",
+    ],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+}
+
+// This is a test application which generates downmixed files for regression
+// analysis.
+//
+// See build_and_run_all_unit_tests.sh for a test script that uses this
+// test application and outputs then compares files in a local directory
+// on device (/data/local/tmp/downmixtest/).
 cc_test {
     name:"downmixtest",
     host_supported: false,
diff --git a/media/libeffects/downmix/tests/build_and_run_all_unit_tests.sh b/media/libeffects/downmix/tests/build_and_run_all_unit_tests.sh
index d0faebe..8aadfbf 100755
--- a/media/libeffects/downmix/tests/build_and_run_all_unit_tests.sh
+++ b/media/libeffects/downmix/tests/build_and_run_all_unit_tests.sh
@@ -39,8 +39,7 @@
 echo "testing Downmix"
 adb shell mkdir $testdir
 
-adb push $ANDROID_BUILD_TOP/cts/tests/tests/media/res/raw/sinesweepraw.raw \
-$testdir
+adb push $ANDROID_BUILD_TOP/frameworks/av/media/libeffects/res/raw/sinesweepraw.raw $testdir
 adb push $OUT/testcases/downmixtest/arm64/downmixtest $testdir
 
 #run the downmix test application for test.
diff --git a/media/libeffects/downmix/tests/downmix_tests.cpp b/media/libeffects/downmix/tests/downmix_tests.cpp
new file mode 100644
index 0000000..d4b7a3a
--- /dev/null
+++ b/media/libeffects/downmix/tests/downmix_tests.cpp
@@ -0,0 +1,253 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <vector>
+
+#include "EffectDownmix.h"
+
+#include <audio_utils/channels.h>
+#include <audio_utils/primitives.h>
+#include <audio_utils/Statistics.h>
+#include <gtest/gtest.h>
+#include <log/log.h>
+
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+static constexpr audio_channel_mask_t kChannelPositionMasks[] = {
+    AUDIO_CHANNEL_OUT_FRONT_LEFT, // Legacy: the downmix effect treats MONO as FRONT_LEFT only.
+                                  // The AudioMixer interprets MONO as a special case requiring
+                                  // channel replication, bypassing the downmix effect.
+    AUDIO_CHANNEL_OUT_FRONT_CENTER,
+    AUDIO_CHANNEL_OUT_STEREO,
+    AUDIO_CHANNEL_OUT_2POINT1,
+    AUDIO_CHANNEL_OUT_2POINT0POINT2,
+    AUDIO_CHANNEL_OUT_QUAD,
+    AUDIO_CHANNEL_OUT_QUAD_BACK,
+    AUDIO_CHANNEL_OUT_QUAD_SIDE,
+    AUDIO_CHANNEL_OUT_SURROUND,
+    AUDIO_CHANNEL_OUT_2POINT1POINT2,
+    AUDIO_CHANNEL_OUT_3POINT0POINT2,
+    AUDIO_CHANNEL_OUT_PENTA,
+    AUDIO_CHANNEL_OUT_3POINT1POINT2,
+    AUDIO_CHANNEL_OUT_5POINT1,
+    AUDIO_CHANNEL_OUT_5POINT1_BACK,
+    AUDIO_CHANNEL_OUT_5POINT1_SIDE,
+    AUDIO_CHANNEL_OUT_6POINT1,
+    AUDIO_CHANNEL_OUT_5POINT1POINT2,
+    AUDIO_CHANNEL_OUT_7POINT1,
+    AUDIO_CHANNEL_OUT_5POINT1POINT4,
+    AUDIO_CHANNEL_OUT_7POINT1POINT2,
+    AUDIO_CHANNEL_OUT_7POINT1POINT4,
+    AUDIO_CHANNEL_OUT_13POINT_360RA,
+    AUDIO_CHANNEL_OUT_22POINT2,
+};
+
+static constexpr audio_channel_mask_t kConsideredChannels =
+    (audio_channel_mask_t)(AUDIO_CHANNEL_OUT_7POINT1 | AUDIO_CHANNEL_OUT_BACK_CENTER);
+
+// Downmix doesn't change with sample rate
+static constexpr size_t kSampleRates[] = {
+    48000,
+};
+
+// Our near expectation is 16x the bit that doesn't fit the mantissa.
+// this works so long as we add values close in exponent with each other
+// realizing that errors accumulate as the sqrt of N (random walk, lln, etc).
+#define EXPECT_NEAR_EPSILON(e, v) EXPECT_NEAR((e), (v), \
+        abs((e) * std::numeric_limits<std::decay_t<decltype(e)>>::epsilon() * 8))
+
+template<typename T>
+static auto channelStatistics(const std::vector<T>& input, size_t channels) {
+    std::vector<android::audio_utils::Statistics<T>> result(channels);
+    const size_t frames = input.size() / channels;
+    if (frames > 0) {
+        const float *fptr = input.data();
+        for (size_t i = 0; i < frames; ++i) {
+            for (size_t j = 0; j < channels; ++j) {
+                result[j].add(*fptr++);
+            }
+        }
+    }
+    return result;
+}
+
+using DownmixParam = std::tuple<int /* sample rate */,  int /* channel mask */>;
+class DownmixTest : public ::testing::TestWithParam<DownmixParam> {
+public:
+    static constexpr effect_uuid_t downmix_uuid_ = {
+        0x93f04452, 0xe4fe, 0x41cc, 0x91f9, {0xe4, 0x75, 0xb6, 0xd1, 0xd6, 0x9f}};
+    static constexpr size_t FRAME_LENGTH = 256;
+
+    void testBalance(int sampleRate, audio_channel_mask_t channelMask) {
+        using namespace ::android::audio_utils::channels;
+
+        size_t frames = 100;
+        unsigned outChannels = 2;
+        unsigned inChannels = audio_channel_count_from_out_mask(channelMask);
+        std::vector<float> input(frames * inChannels);
+        std::vector<float> output(frames * outChannels);
+
+        double savedPower[32][2]{};
+        for (unsigned i = 0, channel = channelMask; channel != 0; ++i) {
+            const int index = __builtin_ctz(channel);
+            ASSERT_LT(index, FCC_24);
+            const int pairIndex = pairIdxFromChannelIdx(index);
+            const AUDIO_GEOMETRY_SIDE side = sideFromChannelIdx(index);
+            const int channelBit = 1 << index;
+            channel &= ~channelBit;
+
+            // Generate a +1, -1 alternating stream in one channel, which has variance 1.
+            auto indata = input.data();
+            for (unsigned j = 0; j < frames; ++j) {
+                for (unsigned k = 0; k < inChannels; ++k) {
+                    *indata++ = (k == i) ? (j & 1 ? -1 : 1) : 0;
+                }
+            }
+            run(sampleRate, channelMask, input, output, frames);
+
+            auto stats = channelStatistics(output, 2 /* channels */);
+            // printf("power: %s %s\n", stats[0].toString().c_str(), stats[1].toString().c_str());
+            double power[2] = { stats[0].getVariance(), stats[1].getVariance() };
+
+            // Check symmetric power for pair channels on exchange of left/right position.
+            // to do this, we save previous power measurements.
+            if (pairIndex >= 0 && pairIndex < index) {
+                EXPECT_NEAR_EPSILON(power[0], savedPower[pairIndex][1]);
+                EXPECT_NEAR_EPSILON(power[1], savedPower[pairIndex][0]);
+            }
+            savedPower[index][0] = power[0];
+            savedPower[index][1] = power[1];
+
+            // Confirm exactly the mix amount prescribed by the existing downmix effect.
+            // For future changes to the downmix effect, the nearness needs to be relaxed
+            // to compare behavior S or earlier.
+            if ((channelBit & kConsideredChannels) == 0) {
+                // for channels not considered, expect 0 power for legacy downmix
+                EXPECT_EQ(0.f, power[0]);
+                EXPECT_EQ(0.f, power[1]);
+                continue;
+            }
+            constexpr float POWER_TOLERANCE = 0.01;  // for variance sum error.
+            switch (side) {
+            case AUDIO_GEOMETRY_SIDE_LEFT:
+                EXPECT_NEAR(0.25f, power[0], POWER_TOLERANCE);
+                EXPECT_EQ(0.f, power[1]);
+                break;
+            case AUDIO_GEOMETRY_SIDE_RIGHT:
+                EXPECT_EQ(0.f, power[0]);
+                EXPECT_NEAR(0.25f, power[1], POWER_TOLERANCE);
+                break;
+            case AUDIO_GEOMETRY_SIDE_CENTER:
+                EXPECT_NEAR(0.125f, power[0], POWER_TOLERANCE);
+                EXPECT_NEAR(0.125f, power[1], POWER_TOLERANCE);
+                EXPECT_NEAR_EPSILON(power[0], power[1]);
+                break;
+            }
+        }
+    }
+
+    void run(int sampleRate, audio_channel_mask_t channelMask,
+            std::vector<float>& input, std::vector<float>& output, size_t frames) {
+        reconfig(sampleRate, channelMask);
+
+        ASSERT_EQ(frames * inputChannelCount_, input.size());
+        ASSERT_EQ(frames * outputChannelCount_, output.size());
+
+        const int32_t sessionId = 0;
+        const int32_t ioId = 0;
+        int32_t err = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(
+                &downmix_uuid_, sessionId, ioId,  &handle_);
+        ASSERT_EQ(0, err);
+
+        const struct effect_interface_s * const downmixApi = *handle_;
+        int32_t reply = 0;
+        uint32_t replySize = (uint32_t)sizeof(reply);
+        err = (downmixApi->command)(
+                handle_, EFFECT_CMD_SET_CONFIG,
+                sizeof(effect_config_t), &config_, &replySize, &reply);
+        ASSERT_EQ(0, err);
+        err = (downmixApi->command)(
+                handle_, EFFECT_CMD_ENABLE,
+                0, nullptr, &replySize, &reply);
+        ASSERT_EQ(0, err);
+
+        process(input, output, frames);
+        err = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(handle_);
+        ASSERT_EQ(0, err);
+    }
+
+private:
+    void reconfig(int sampleRate, audio_channel_mask_t channelMask) {
+        config_.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
+        config_.inputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+        config_.inputCfg.bufferProvider.getBuffer = nullptr;
+        config_.inputCfg.bufferProvider.releaseBuffer = nullptr;
+        config_.inputCfg.bufferProvider.cookie = nullptr;
+        config_.inputCfg.mask = EFFECT_CONFIG_ALL;
+
+        config_.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_WRITE;
+        config_.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+        config_.outputCfg.bufferProvider.getBuffer = nullptr;
+        config_.outputCfg.bufferProvider.releaseBuffer = nullptr;
+        config_.outputCfg.bufferProvider.cookie = nullptr;
+        config_.outputCfg.mask = EFFECT_CONFIG_ALL;
+
+        config_.inputCfg.samplingRate = sampleRate;
+        config_.inputCfg.channels = channelMask;
+        inputChannelCount_ = audio_channel_count_from_out_mask(config_.inputCfg.channels);
+
+        config_.outputCfg.samplingRate = sampleRate;
+        config_.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO; // output always stereo
+        outputChannelCount_ = audio_channel_count_from_out_mask(config_.outputCfg.channels);
+    }
+
+    void process(std::vector<float> &input, std::vector<float> &output, size_t frames) const {
+        const struct effect_interface_s * const downmixApi = *handle_;
+
+        for (size_t pos = 0; pos < frames;) {
+            const size_t transfer = std::min(frames - pos, FRAME_LENGTH);
+            audio_buffer_t inbuffer{.frameCount = transfer,
+                .f32 = input.data() + pos * inputChannelCount_};
+            audio_buffer_t outbuffer{.frameCount = transfer,
+                .f32 = output.data() + pos * outputChannelCount_};
+            const int32_t err = (downmixApi->process)(handle_, &inbuffer, &outbuffer);
+            ASSERT_EQ(0, err);
+            pos += transfer;
+        }
+    }
+
+    effect_handle_t handle_{};
+    effect_config_t config_{};
+    int outputChannelCount_{};
+    int inputChannelCount_{};
+};
+
+TEST_P(DownmixTest, basic) {
+    testBalance(kSampleRates[std::get<0>(GetParam())],
+            kChannelPositionMasks[std::get<1>(GetParam())]);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        DownmixTestAll, DownmixTest,
+        ::testing::Combine(
+                ::testing::Range(0, (int)std::size(kSampleRates)),
+                ::testing::Range(0, (int)std::size(kChannelPositionMasks))
+                ));
+
+int main(int argc, /* const */ char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = RUN_ALL_TESTS();
+    return status;
+}
diff --git a/media/libeffects/downmix/tests/downmixtest.cpp b/media/libeffects/downmix/tests/downmixtest.cpp
index 71f83e5..4076320 100644
--- a/media/libeffects/downmix/tests/downmixtest.cpp
+++ b/media/libeffects/downmix/tests/downmixtest.cpp
@@ -29,7 +29,6 @@
 #define MAX_NUM_CHANNELS 8
 
 struct downmix_cntxt_s {
-  effect_descriptor_t desc;
   effect_handle_t handle;
   effect_config_t config;
 
@@ -87,13 +86,12 @@
 }
 
 int32_t DownmixConfiureAndEnable(downmix_cntxt_s *pDescriptor) {
-  effect_handle_t *effectHandle = &pDescriptor->handle;
-  downmix_module_t *downmixEffectHandle = (downmix_module_t *)*effectHandle;
-  const struct effect_interface_s *Downmix_api = downmixEffectHandle->itfe;
+  effect_handle_t effectHandle = pDescriptor->handle;
+  const struct effect_interface_s *Downmix_api = *effectHandle;
   int32_t err = 0;
   uint32_t replySize = (uint32_t)sizeof(err);
 
-  err = (Downmix_api->command)(*effectHandle, EFFECT_CMD_SET_CONFIG,
+  err = (Downmix_api->command)(effectHandle, EFFECT_CMD_SET_CONFIG,
                                sizeof(effect_config_t), &(pDescriptor->config),
                                &replySize, &err);
   if (err != 0) {
@@ -101,7 +99,7 @@
     return err;
   }
 
-  err = ((Downmix_api->command))(*effectHandle, EFFECT_CMD_ENABLE, 0, nullptr,
+  err = ((Downmix_api->command))(effectHandle, EFFECT_CMD_ENABLE, 0, nullptr,
                                  &replySize, &err);
   if (err != 0) {
     ALOGE("Downmix command to enable effect returned an error %d", err);
@@ -112,9 +110,8 @@
 
 int32_t DownmixExecute(downmix_cntxt_s *pDescriptor, FILE *finp,
                        FILE *fout) {
-  effect_handle_t *effectHandle = &pDescriptor->handle;
-  downmix_module_t *downmixEffectHandle = (downmix_module_t *)*effectHandle;
-  const struct effect_interface_s *Downmix_api = downmixEffectHandle->itfe;
+  effect_handle_t effectHandle = pDescriptor->handle;
+  const struct effect_interface_s *Downmix_api = *effectHandle;
 
   const int numFileChannels = pDescriptor->numFileChannels;
   const int numProcessChannels = pDescriptor->numProcessChannels;
@@ -150,7 +147,7 @@
     memcpy_to_float_from_i16(inFloat.data(), inS16.data(),
                              FRAME_LENGTH * numProcessChannels);
 
-    const int32_t err = (Downmix_api->process)(*effectHandle, pinbuf, poutbuf);
+    const int32_t err = (Downmix_api->process)(effectHandle, pinbuf, poutbuf);
     if (err != 0) {
       ALOGE("DownmixProcess returned an error %d", err);
       return -1;
diff --git a/media/libeffects/dynamicsproc/Android.bp b/media/libeffects/dynamicsproc/Android.bp
index eafc483..84131a4 100644
--- a/media/libeffects/dynamicsproc/Android.bp
+++ b/media/libeffects/dynamicsproc/Android.bp
@@ -13,6 +13,25 @@
 // limitations under the License.
 
 // DynamicsProcessing library
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libeffects_dynamicsproc_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libeffects_dynamicsproc_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libdynproc",
 
diff --git a/media/libeffects/factory/Android.bp b/media/libeffects/factory/Android.bp
index ddbfdd8..22838a3 100644
--- a/media/libeffects/factory/Android.bp
+++ b/media/libeffects/factory/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_headers {
     name: "libeffects_headers",
     vendor_available: true,
diff --git a/media/libeffects/factory/EffectsConfigLoader.c b/media/libeffects/factory/EffectsConfigLoader.c
index fcef36f..e23530e 100644
--- a/media/libeffects/factory/EffectsConfigLoader.c
+++ b/media/libeffects/factory/EffectsConfigLoader.c
@@ -394,7 +394,7 @@
        }
        sub_effect_entry_t *subEntry = (sub_effect_entry_t*)gSubEffectList->sub_elem->object;
        effect_descriptor_t *subEffectDesc = (effect_descriptor_t*)(subEntry->object);
-       // Since we return a dummy descriptor for the proxy during
+       // Since we return a stub descriptor for the proxy during
        // get_descriptor call,we replace it with the correspoding
        // sw effect descriptor, but with Proxy UUID
        // check for Sw desc
diff --git a/media/libeffects/factory/EffectsXmlConfigLoader.cpp b/media/libeffects/factory/EffectsXmlConfigLoader.cpp
index 505be7c..30a9007 100644
--- a/media/libeffects/factory/EffectsXmlConfigLoader.cpp
+++ b/media/libeffects/factory/EffectsXmlConfigLoader.cpp
@@ -283,7 +283,7 @@
             }
             listPush(effectLoadResult.effectDesc.get(), subEffectList);
 
-            // Since we return a dummy descriptor for the proxy during
+            // Since we return a stub descriptor for the proxy during
             // get_descriptor call, we replace it with the corresponding
             // sw effect descriptor, but keep the Proxy UUID
             *effectLoadResult.effectDesc = *swEffectLoadResult.effectDesc;
diff --git a/media/libeffects/hapticgenerator/Android.bp b/media/libeffects/hapticgenerator/Android.bp
new file mode 100644
index 0000000..a660957
--- /dev/null
+++ b/media/libeffects/hapticgenerator/Android.bp
@@ -0,0 +1,59 @@
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// HapticGenerator library
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library_shared {
+    name: "libhapticgenerator",
+
+    vendor: true,
+
+    srcs: [
+        "EffectHapticGenerator.cpp",
+        "Processors.cpp",
+    ],
+
+    cflags: [
+        "-O2", // Turning on the optimization in order to reduce effect processing time.
+               // The latency is around 1/5 less than without the optimization.
+        "-Wall",
+        "-Werror",
+        "-ffast-math", // This is needed for the non-zero coefficients optimization for
+                       // BiquadFilter. Try the biquad_filter_benchmark test in audio_utils
+                       // with/without `-ffast-math` for more context.
+        "-fvisibility=hidden",
+    ],
+
+    shared_libs: [
+        "libaudioutils",
+        "libbinder",
+        "liblog",
+        "libutils",
+        "libvibrator",
+    ],
+
+    relative_install_path: "soundfx",
+
+    header_libs: [
+        "libaudioeffects",
+    ],
+}
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
new file mode 100644
index 0000000..65a20a7
--- /dev/null
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
@@ -0,0 +1,568 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectHG"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include "EffectHapticGenerator.h"
+
+#include <algorithm>
+#include <memory>
+#include <utility>
+
+#include <errno.h>
+#include <inttypes.h>
+#include <math.h>
+
+#include <audio_effects/effect_hapticgenerator.h>
+#include <audio_utils/format.h>
+#include <system/audio.h>
+
+static constexpr float DEFAULT_RESONANT_FREQUENCY = 150.0f;
+static constexpr float DEFAULT_BSF_ZERO_Q = 8.0f;
+static constexpr float DEFAULT_BSF_POLE_Q = 4.0f;
+
+// This is the only symbol that needs to be exported
+__attribute__ ((visibility ("default")))
+audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
+        .tag = AUDIO_EFFECT_LIBRARY_TAG,
+        .version = EFFECT_LIBRARY_API_VERSION,
+        .name = "HapticGenerator Library",
+        .implementor = "The Android Open Source Project",
+        .create_effect = android::audio_effect::haptic_generator::HapticGeneratorLib_Create,
+        .release_effect = android::audio_effect::haptic_generator::HapticGeneratorLib_Release,
+        .get_descriptor = android::audio_effect::haptic_generator::HapticGeneratorLib_GetDescriptor,
+};
+
+namespace android::audio_effect::haptic_generator {
+
+// effect_handle_t interface implementation for haptic generator effect
+const struct effect_interface_s gHapticGeneratorInterface = {
+        HapticGenerator_Process,
+        HapticGenerator_Command,
+        HapticGenerator_GetDescriptor,
+        nullptr /* no process_reverse function, no reference stream needed */
+};
+
+//-----------------------------------------------------------------------------
+// Effect Descriptor
+//-----------------------------------------------------------------------------
+
+// UUIDs for effect types have been generated from http://www.itu.int/ITU-T/asn1/uuid.html
+// Haptic Generator
+static const effect_descriptor_t gHgDescriptor = {
+        FX_IID_HAPTICGENERATOR_, // type
+        {0x97c4acd1, 0x8b82, 0x4f2f, 0x832e, {0xc2, 0xfe, 0x5d, 0x7a, 0x99, 0x31}}, // uuid
+        EFFECT_CONTROL_API_VERSION,
+        EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST,
+        0, // FIXME what value should be reported? // cpu load
+        0, // FIXME what value should be reported? // memory usage
+        "Haptic Generator",
+        "The Android Open Source Project"
+};
+
+//-----------------------------------------------------------------------------
+// Internal functions
+//-----------------------------------------------------------------------------
+
+namespace {
+
+int HapticGenerator_Init(struct HapticGeneratorContext *context) {
+    context->itfe = &gHapticGeneratorInterface;
+
+    context->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
+    context->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    context->config.inputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+    context->config.inputCfg.samplingRate = 0;
+    context->config.inputCfg.bufferProvider.getBuffer = nullptr;
+    context->config.inputCfg.bufferProvider.releaseBuffer = nullptr;
+    context->config.inputCfg.bufferProvider.cookie = nullptr;
+    context->config.inputCfg.mask = EFFECT_CONFIG_ALL;
+    context->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
+    context->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    context->config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+    context->config.outputCfg.samplingRate = 0;
+    context->config.outputCfg.bufferProvider.getBuffer = nullptr;
+    context->config.outputCfg.bufferProvider.releaseBuffer = nullptr;
+    context->config.outputCfg.bufferProvider.cookie = nullptr;
+    context->config.outputCfg.mask = EFFECT_CONFIG_ALL;
+
+    memset(context->param.hapticChannelSource, 0, sizeof(context->param.hapticChannelSource));
+    context->param.hapticChannelCount = 0;
+    context->param.audioChannelCount = 0;
+    context->param.maxHapticIntensity = os::HapticScale::MUTE;
+
+    context->param.resonantFrequency = DEFAULT_RESONANT_FREQUENCY;
+    context->param.bpfQ = 1.0f;
+    context->param.slowEnvNormalizationPower = -0.8f;
+    context->param.bsfZeroQ = DEFAULT_BSF_ZERO_Q;
+    context->param.bsfPoleQ = DEFAULT_BSF_POLE_Q;
+    context->param.distortionCornerFrequency = 300.0f;
+    context->param.distortionInputGain = 0.3f;
+    context->param.distortionCubeThreshold = 0.1f;
+    context->param.distortionOutputGain = 1.5f;
+
+    context->state = HAPTICGENERATOR_STATE_INITIALIZED;
+    return 0;
+}
+
+void addBiquadFilter(
+        std::vector<std::function<void(float *, const float *, size_t)>> &processingChain,
+        struct HapticGeneratorProcessorsRecord &processorsRecord,
+        std::shared_ptr<HapticBiquadFilter> filter) {
+    // The process chain captures the shared pointer of the filter in lambda.
+    // The process record will keep a shared pointer to the filter so that it is possible to access
+    // the filter outside of the process chain.
+    processorsRecord.filters.push_back(filter);
+    processingChain.push_back([filter](float *out, const float *in, size_t frameCount) {
+            filter->process(out, in, frameCount);
+    });
+}
+
+/**
+ * \brief build haptic generator processing chain.
+ *
+ * \param processingChain
+ * \param processorsRecord a structure to cache all the shared pointers for processors
+ * \param sampleRate the audio sampling rate. Use a float here as it may be used to create filters
+ * \param channelCount haptic channel count
+ */
+void HapticGenerator_buildProcessingChain(
+        std::vector<std::function<void(float*, const float*, size_t)>>& processingChain,
+        struct HapticGeneratorProcessorsRecord& processorsRecord, float sampleRate,
+        const struct HapticGeneratorParam* param) {
+    const size_t channelCount = param->hapticChannelCount;
+    float highPassCornerFrequency = 50.0f;
+    auto hpf = createHPF2(highPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, hpf);
+    float lowPassCornerFrequency = 9000.0f;
+    auto lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, lpf);
+
+    auto ramp = std::make_shared<Ramp>(channelCount);  // ramp = half-wave rectifier.
+    // The process chain captures the shared pointer of the ramp in lambda. It will be the only
+    // reference to the ramp.
+    // The process record will keep a weak pointer to the ramp so that it is possible to access
+    // the ramp outside of the process chain.
+    processorsRecord.ramps.push_back(ramp);
+    processingChain.push_back([ramp](float *out, const float *in, size_t frameCount) {
+            ramp->process(out, in, frameCount);
+    });
+
+    highPassCornerFrequency = 60.0f;
+    hpf = createHPF2(highPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, hpf);
+    lowPassCornerFrequency = 700.0f;
+    lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, lpf);
+
+    lowPassCornerFrequency = 400.0f;
+    lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, lpf);
+    lowPassCornerFrequency = 500.0f;
+    lpf = createLPF2(lowPassCornerFrequency, sampleRate, channelCount);
+    addBiquadFilter(processingChain, processorsRecord, lpf);
+
+    auto bpf = createBPF(param->resonantFrequency, param->bpfQ, sampleRate, channelCount);
+    processorsRecord.bpf = bpf;
+    addBiquadFilter(processingChain, processorsRecord, bpf);
+
+    float normalizationPower = param->slowEnvNormalizationPower;
+    // The process chain captures the shared pointer of the slow envelope in lambda. It will
+    // be the only reference to the slow envelope.
+    // The process record will keep a weak pointer to the slow envelope so that it is possible
+    // to access the slow envelope outside of the process chain.
+    auto slowEnv = std::make_shared<SlowEnvelope>(  // SlowEnvelope = partial normalizer, or AGC.
+            5.0f /*envCornerFrequency*/, sampleRate, normalizationPower,
+            0.01f /*envOffset*/, channelCount);
+    processorsRecord.slowEnvs.push_back(slowEnv);
+    processingChain.push_back([slowEnv](float *out, const float *in, size_t frameCount) {
+            slowEnv->process(out, in, frameCount);
+    });
+
+
+    auto bsf = createBSF(
+            param->resonantFrequency, param->bsfZeroQ, param->bsfPoleQ, sampleRate, channelCount);
+    processorsRecord.bsf = bsf;
+    addBiquadFilter(processingChain, processorsRecord, bsf);
+
+    // The process chain captures the shared pointer of the Distortion in lambda. It will
+    // be the only reference to the Distortion.
+    // The process record will keep a weak pointer to the Distortion so that it is possible
+    // to access the Distortion outside of the process chain.
+    auto distortion = std::make_shared<Distortion>(
+            param->distortionCornerFrequency, sampleRate, param->distortionInputGain,
+            param->distortionCubeThreshold, param->distortionOutputGain, channelCount);
+    processorsRecord.distortions.push_back(distortion);
+    processingChain.push_back([distortion](float *out, const float *in, size_t frameCount) {
+            distortion->process(out, in, frameCount);
+    });
+}
+
+int HapticGenerator_Configure(struct HapticGeneratorContext *context, effect_config_t *config) {
+    if (config->inputCfg.samplingRate != config->outputCfg.samplingRate ||
+        config->inputCfg.format != config->outputCfg.format ||
+        config->inputCfg.format != AUDIO_FORMAT_PCM_FLOAT ||
+        config->inputCfg.channels != config->outputCfg.channels ||
+        config->inputCfg.buffer.frameCount != config->outputCfg.buffer.frameCount) {
+        return -EINVAL;
+    }
+    if (&context->config != config) {
+        context->processingChain.clear();
+        context->processorsRecord.filters.clear();
+        context->processorsRecord.ramps.clear();
+        context->processorsRecord.slowEnvs.clear();
+        context->processorsRecord.distortions.clear();
+        memcpy(&context->config, config, sizeof(effect_config_t));
+        context->param.audioChannelCount = audio_channel_count_from_out_mask(
+                ((audio_channel_mask_t) config->inputCfg.channels) & ~AUDIO_CHANNEL_HAPTIC_ALL);
+        context->param.hapticChannelCount = audio_channel_count_from_out_mask(
+                ((audio_channel_mask_t) config->outputCfg.channels) & AUDIO_CHANNEL_HAPTIC_ALL);
+        ALOG_ASSERT(context->param.hapticChannelCount <= 2,
+                    "haptic channel count(%zu) is too large",
+                    context->param.hapticChannelCount);
+        context->audioDataBytesPerFrame = audio_bytes_per_frame(
+                context->param.audioChannelCount, (audio_format_t) config->inputCfg.format);
+        for (size_t i = 0; i < context->param.hapticChannelCount; ++i) {
+            // By default, use the first audio channel to generate haptic channels.
+            context->param.hapticChannelSource[i] = 0;
+        }
+
+        HapticGenerator_buildProcessingChain(context->processingChain,
+                                             context->processorsRecord,
+                                             config->inputCfg.samplingRate,
+                                             &context->param);
+    }
+    return 0;
+}
+
+int HapticGenerator_Reset(struct HapticGeneratorContext *context) {
+    for (auto& filter : context->processorsRecord.filters) {
+        filter->clear();
+    }
+    for (auto& slowEnv : context->processorsRecord.slowEnvs) {
+        slowEnv->clear();
+    }
+    for (auto& distortion : context->processorsRecord.distortions) {
+        distortion->clear();
+    }
+    return 0;
+}
+
+int HapticGenerator_SetParameter(struct HapticGeneratorContext *context,
+                                 int32_t param,
+                                 uint32_t size,
+                                 void *value) {
+    switch (param) {
+    case HG_PARAM_HAPTIC_INTENSITY: {
+        if (value == nullptr || size != (uint32_t) (2 * sizeof(int))) {
+            return -EINVAL;
+        }
+        int id = *(int *) value;
+        os::HapticScale hapticIntensity = static_cast<os::HapticScale>(*((int *) value + 1));
+        if (hapticIntensity == os::HapticScale::MUTE) {
+            context->param.id2Intensity.erase(id);
+        } else {
+            context->param.id2Intensity.emplace(id, hapticIntensity);
+        }
+        context->param.maxHapticIntensity = hapticIntensity;
+        for (const auto&[id, intensity] : context->param.id2Intensity) {
+            context->param.maxHapticIntensity = std::max(
+                    context->param.maxHapticIntensity, intensity);
+        }
+        break;
+    }
+    case HG_PARAM_VIBRATOR_INFO: {
+        if (value == nullptr || size != 2 * sizeof(float)) {
+            return -EINVAL;
+        }
+        const float resonantFrequency = *(float*) value;
+        const float qFactor = *((float *) value + 1);
+        context->param.resonantFrequency =
+                isnan(resonantFrequency) ? DEFAULT_RESONANT_FREQUENCY : resonantFrequency;
+        context->param.bsfZeroQ = isnan(qFactor) ? DEFAULT_BSF_POLE_Q : qFactor;
+        context->param.bsfPoleQ = context->param.bsfZeroQ / 2.0f;
+
+        if (context->processorsRecord.bpf != nullptr) {
+            context->processorsRecord.bpf->setCoefficients(
+                    bpfCoefs(context->param.resonantFrequency,
+                             context->param.bpfQ,
+                             context->config.inputCfg.samplingRate));
+        }
+        if (context->processorsRecord.bsf != nullptr) {
+            context->processorsRecord.bsf->setCoefficients(
+                    bsfCoefs(context->param.resonantFrequency,
+                             context->param.bsfZeroQ,
+                             context->param.bsfPoleQ,
+                             context->config.inputCfg.samplingRate));
+        }
+        HapticGenerator_Reset(context);
+    } break;
+    default:
+        ALOGW("Unknown param: %d", param);
+        return -EINVAL;
+    }
+
+    return 0;
+}
+
+/**
+ * \brief run the processing chain to generate haptic data from audio data
+ *
+ * \param processingChain the processing chain for generating haptic data
+ * \param buf1 a buffer contains raw audio data
+ * \param buf2 a buffer that is large enough to keep all the data
+ * \param frameCount frame count of the data
+ * \return a pointer to the output buffer
+ */
+float* HapticGenerator_runProcessingChain(
+        const std::vector<std::function<void(float*, const float*, size_t)>>& processingChain,
+        float* buf1, float* buf2, size_t frameCount) {
+    float *in = buf1;
+    float *out = buf2;
+    for (const auto processingFunc : processingChain) {
+        processingFunc(out, in, frameCount);
+        std::swap(in, out);
+    }
+    return in;
+}
+
+} // namespace (anonymous)
+
+//-----------------------------------------------------------------------------
+// Effect API Implementation
+//-----------------------------------------------------------------------------
+
+/*--- Effect Library Interface Implementation ---*/
+
+int32_t HapticGeneratorLib_Create(const effect_uuid_t *uuid,
+                                  int32_t sessionId __unused,
+                                  int32_t ioId __unused,
+                                  effect_handle_t *handle) {
+    if (handle == nullptr || uuid == nullptr) {
+        return -EINVAL;
+    }
+
+    if (memcmp(uuid, &gHgDescriptor.uuid, sizeof(*uuid)) != 0) {
+        return -EINVAL;
+    }
+
+    HapticGeneratorContext *context = new HapticGeneratorContext;
+    HapticGenerator_Init(context);
+
+    *handle = (effect_handle_t) context;
+    ALOGV("%s context is %p", __func__, context);
+    return 0;
+}
+
+int32_t HapticGeneratorLib_Release(effect_handle_t handle) {
+    HapticGeneratorContext *context = (HapticGeneratorContext *) handle;
+    delete context;
+    return 0;
+}
+
+int32_t HapticGeneratorLib_GetDescriptor(const effect_uuid_t *uuid,
+                                         effect_descriptor_t *descriptor) {
+
+    if (descriptor == nullptr || uuid == nullptr) {
+        ALOGE("%s() called with NULL pointer", __func__);
+        return -EINVAL;
+    }
+
+    if (memcmp(uuid, &gHgDescriptor.uuid, sizeof(*uuid)) == 0) {
+        *descriptor = gHgDescriptor;
+        return 0;
+    }
+
+    return -EINVAL;
+}
+
+/*--- Effect Control Interface Implementation ---*/
+
+int32_t HapticGenerator_Process(effect_handle_t self,
+                                audio_buffer_t *inBuffer, audio_buffer_t *outBuffer) {
+    HapticGeneratorContext *context = (HapticGeneratorContext *) self;
+
+    if (inBuffer == nullptr || inBuffer->raw == nullptr
+            || outBuffer == nullptr || outBuffer->raw == nullptr) {
+        return 0;
+    }
+
+    // The audio data must not be modified but just written to
+    // output buffer according the access mode.
+    size_t audioBytes = context->audioDataBytesPerFrame * inBuffer->frameCount;
+    size_t audioSampleCount = inBuffer->frameCount * context->param.audioChannelCount;
+    if (inBuffer->raw != outBuffer->raw) {
+        if (context->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+            for (size_t i = 0; i < audioSampleCount; ++i) {
+                outBuffer->f32[i] += inBuffer->f32[i];
+            }
+        } else {
+            memcpy(outBuffer->raw, inBuffer->raw, audioBytes);
+        }
+    }
+
+    if (context->state != HAPTICGENERATOR_STATE_ACTIVE) {
+        ALOGE("State(%d) is not HAPTICGENERATOR_STATE_ACTIVE when calling %s",
+                context->state, __func__);
+        return -ENODATA;
+    }
+
+    if (context->param.maxHapticIntensity == os::HapticScale::MUTE) {
+        // Haptic channels are muted, not need to generate haptic data.
+        return 0;
+    }
+
+    // Resize buffer if the haptic sample count is greater than buffer size.
+    size_t hapticSampleCount = inBuffer->frameCount * context->param.hapticChannelCount;
+    if (hapticSampleCount > context->inputBuffer.size()) {
+        // The context->inputBuffer and context->outputBuffer must have the same size,
+        // which must be at least the haptic sample count.
+        context->inputBuffer.resize(hapticSampleCount);
+        context->outputBuffer.resize(hapticSampleCount);
+    }
+
+    // Construct input buffer according to haptic channel source
+    for (size_t i = 0; i < inBuffer->frameCount; ++i) {
+        for (size_t j = 0; j < context->param.hapticChannelCount; ++j) {
+            context->inputBuffer[i * context->param.hapticChannelCount + j] =
+                    inBuffer->f32[i * context->param.audioChannelCount
+                            + context->param.hapticChannelSource[j]];
+        }
+    }
+
+    float* hapticOutBuffer = HapticGenerator_runProcessingChain(
+            context->processingChain, context->inputBuffer.data(),
+            context->outputBuffer.data(), inBuffer->frameCount);
+    os::scaleHapticData(hapticOutBuffer, hapticSampleCount, context->param.maxHapticIntensity);
+
+    // For haptic data, the haptic playback thread will copy the data from effect input buffer,
+    // which contains haptic data at the end of the buffer, directly to sink buffer.
+    // In that case, copy haptic data to input buffer instead of output buffer.
+    // Note: this may not work with rpc/binder calls
+    memcpy_by_audio_format(static_cast<char*>(inBuffer->raw) + audioBytes,
+                           static_cast<audio_format_t>(context->config.outputCfg.format),
+                           hapticOutBuffer,
+                           AUDIO_FORMAT_PCM_FLOAT,
+                           hapticSampleCount);
+
+    return 0;
+}
+
+int32_t HapticGenerator_Command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
+                                void *cmdData, uint32_t *replySize, void *replyData) {
+    HapticGeneratorContext *context = (HapticGeneratorContext *) self;
+
+    if (context == nullptr || context->state == HAPTICGENERATOR_STATE_UNINITIALIZED) {
+        return -EINVAL;
+    }
+
+    ALOGV("HapticGenerator_Command command %u cmdSize %u", cmdCode, cmdSize);
+
+    switch (cmdCode) {
+        case EFFECT_CMD_INIT:
+            if (replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+                return -EINVAL;
+            }
+            *(int *) replyData = HapticGenerator_Init(context);
+            break;
+
+        case EFFECT_CMD_SET_CONFIG:
+            if (cmdData == nullptr || cmdSize != sizeof(effect_config_t)
+                || replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+                return -EINVAL;
+            }
+            *(int *) replyData = HapticGenerator_Configure(
+                    context, (effect_config_t *) cmdData);
+            break;
+
+        case EFFECT_CMD_RESET:
+            HapticGenerator_Reset(context);
+            break;
+
+        case EFFECT_CMD_GET_PARAM:
+            ALOGV("HapticGenerator_Command EFFECT_CMD_GET_PARAM cmdData %p,"
+                  "*replySize %u, replyData: %p",
+                  cmdData, *replySize, replyData);
+            break;
+
+        case EFFECT_CMD_SET_PARAM: {
+            ALOGV("HapticGenerator_Command EFFECT_CMD_SET_PARAM cmdSize %d cmdData %p, "
+                  "*replySize %u, replyData %p", cmdSize, cmdData,
+                  replySize ? *replySize : 0, replyData);
+            if (cmdData == nullptr || (cmdSize < (int) (sizeof(effect_param_t) + sizeof(int32_t)))
+                || replyData == nullptr || replySize == nullptr ||
+                *replySize != (int) sizeof(int32_t)) {
+                return -EINVAL;
+            }
+            effect_param_t *cmd = (effect_param_t *) cmdData;
+            *(int *) replyData = HapticGenerator_SetParameter(
+                    context, *(int32_t *) cmd->data, cmd->vsize, cmd->data + sizeof(int32_t));
+        }
+            break;
+
+        case EFFECT_CMD_ENABLE:
+            if (replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+                return -EINVAL;
+            }
+            if (context->state != HAPTICGENERATOR_STATE_INITIALIZED) {
+                return -ENOSYS;
+            }
+            context->state = HAPTICGENERATOR_STATE_ACTIVE;
+            ALOGV("EFFECT_CMD_ENABLE() OK");
+            *(int *) replyData = 0;
+            break;
+
+        case EFFECT_CMD_DISABLE:
+            if (replyData == nullptr || replySize == nullptr || *replySize != sizeof(int)) {
+                return -EINVAL;
+            }
+            if (context->state != HAPTICGENERATOR_STATE_ACTIVE) {
+                return -ENOSYS;
+            }
+            context->state = HAPTICGENERATOR_STATE_INITIALIZED;
+            ALOGV("EFFECT_CMD_DISABLE() OK");
+            *(int *) replyData = 0;
+            break;
+
+        case EFFECT_CMD_SET_VOLUME:
+        case EFFECT_CMD_SET_DEVICE:
+        case EFFECT_CMD_SET_AUDIO_MODE:
+            break;
+
+        default:
+            ALOGW("HapticGenerator_Command invalid command %u", cmdCode);
+            return -EINVAL;
+    }
+
+    return 0;
+}
+
+int32_t HapticGenerator_GetDescriptor(effect_handle_t self, effect_descriptor_t *descriptor) {
+    HapticGeneratorContext *context = (HapticGeneratorContext *) self;
+
+    if (context == nullptr ||
+        context->state == HAPTICGENERATOR_STATE_UNINITIALIZED) {
+        return -EINVAL;
+    }
+
+    memcpy(descriptor, &gHgDescriptor, sizeof(effect_descriptor_t));
+
+    return 0;
+}
+
+} // namespace android::audio_effect::haptic_generator
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
new file mode 100644
index 0000000..96b744a
--- /dev/null
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECTHAPTICGENERATOR_H_
+#define ANDROID_EFFECTHAPTICGENERATOR_H_
+
+#include <functional>
+#include <vector>
+#include <map>
+
+#include <hardware/audio_effect.h>
+#include <system/audio_effect.h>
+#include <vibrator/ExternalVibrationUtils.h>
+
+#include "Processors.h"
+
+namespace android::audio_effect::haptic_generator {
+
+//-----------------------------------------------------------------------------
+// Definition
+//-----------------------------------------------------------------------------
+
+enum hapticgenerator_state_t {
+    HAPTICGENERATOR_STATE_UNINITIALIZED,
+    HAPTICGENERATOR_STATE_INITIALIZED,
+    HAPTICGENERATOR_STATE_ACTIVE,
+};
+
+// parameters for each haptic generator
+struct HapticGeneratorParam {
+    uint32_t hapticChannelSource[2]; // The audio channels used to generate haptic channels.
+                                     // The first channel will be used to generate HAPTIC_A,
+                                     // The second channel will be used to generate HAPTIC_B
+                                     // The value will be offset of audio channel
+    uint32_t audioChannelCount;
+    uint32_t hapticChannelCount;
+
+    // A map from track id to haptic intensity.
+    std::map<int, os::HapticScale> id2Intensity;
+    os::HapticScale maxHapticIntensity; // max intensity will be used to scale haptic data.
+
+    float resonantFrequency;
+    float bpfQ;
+    float slowEnvNormalizationPower;
+    float bsfZeroQ;
+    float bsfPoleQ;
+    float distortionCornerFrequency;
+    float distortionInputGain;
+    float distortionCubeThreshold;
+    float distortionOutputGain;
+};
+
+// A structure to keep all shared pointers for all processors in HapticGenerator.
+struct HapticGeneratorProcessorsRecord {
+    std::vector<std::shared_ptr<HapticBiquadFilter>> filters;
+    std::vector<std::shared_ptr<Ramp>> ramps;
+    std::vector<std::shared_ptr<SlowEnvelope>> slowEnvs;
+    std::vector<std::shared_ptr<Distortion>> distortions;
+
+    // Cache band-pass filter and band-stop filter for updating parameters
+    // according to vibrator info
+    std::shared_ptr<HapticBiquadFilter> bpf;
+    std::shared_ptr<HapticBiquadFilter> bsf;
+};
+
+// A structure to keep all the context for HapticGenerator.
+struct HapticGeneratorContext {
+    const struct effect_interface_s *itfe;
+    effect_config_t config;
+    hapticgenerator_state_t state;
+    struct HapticGeneratorParam param;
+    size_t audioDataBytesPerFrame;
+
+    // A cache for all shared pointers of the HapticGenerator
+    struct HapticGeneratorProcessorsRecord processorsRecord;
+
+    // Using a vector of functions to record the processing chain for haptic-generating algorithm.
+    // The three parameters of the processing functions are pointer to output buffer, pointer to
+    // input buffer and frame count.
+    std::vector<std::function<void(float*, const float*, size_t)>> processingChain;
+
+    // inputBuffer is where to keep input buffer for the generating algorithm. It will be
+    // constructed according to HapticGeneratorParam.hapticChannelSource.
+    std::vector<float> inputBuffer;
+
+    // outputBuffer is a buffer having the same length as inputBuffer. It can be used as
+    // intermediate buffer in the generating algorithm.
+    std::vector<float> outputBuffer;
+};
+
+//-----------------------------------------------------------------------------
+// Effect API
+//-----------------------------------------------------------------------------
+
+int32_t HapticGeneratorLib_Create(const effect_uuid_t *uuid,
+                                  int32_t sessionId,
+                                  int32_t ioId,
+                                  effect_handle_t *handle);
+
+int32_t HapticGeneratorLib_Release(effect_handle_t handle);
+
+int32_t HapticGeneratorLib_GetDescriptor(const effect_uuid_t *uuid,
+                                         effect_descriptor_t *descriptor);
+
+int32_t HapticGenerator_Process(effect_handle_t self,
+                                audio_buffer_t *inBuffer,
+                                audio_buffer_t *outBuffer);
+
+int32_t HapticGenerator_Command(effect_handle_t self,
+                                uint32_t cmdCode,
+                                uint32_t cmdSize,
+                                void *cmdData,
+                                uint32_t *replySize,
+                                void *replyData);
+
+int32_t HapticGenerator_GetDescriptor(effect_handle_t self,
+                                      effect_descriptor_t *descriptor);
+
+} // namespace android::audio_effect::haptic_generator
+
+#endif // ANDROID_EFFECTHAPTICGENERATOR_H_
diff --git a/media/libstagefright/codecs/amrwb/MODULE_LICENSE_APACHE2 b/media/libeffects/hapticgenerator/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/codecs/amrwb/MODULE_LICENSE_APACHE2
rename to media/libeffects/hapticgenerator/MODULE_LICENSE_APACHE2
diff --git a/media/libeffects/hapticgenerator/Processors.cpp b/media/libeffects/hapticgenerator/Processors.cpp
new file mode 100644
index 0000000..4fe3a75
--- /dev/null
+++ b/media/libeffects/hapticgenerator/Processors.cpp
@@ -0,0 +1,282 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectHG_Processors"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <assert.h>
+
+#include <cmath>
+
+#include "Processors.h"
+
+#if defined(__aarch64__) || defined(__ARM_NEON__)
+#ifndef USE_NEON
+#define USE_NEON (true)
+#endif
+#else
+#define USE_NEON (false)
+#endif
+#if USE_NEON
+#include <arm_neon.h>
+#endif
+
+namespace android::audio_effect::haptic_generator {
+
+float getRealPoleZ(float cornerFrequency, float sampleRate) {
+    // This will be a pole of a first order filter.
+    float realPoleS = -2 * M_PI * cornerFrequency;
+    return exp(realPoleS / sampleRate); // zero-pole matching
+}
+
+std::pair<float, float> getComplexPoleZ(float ringingFrequency, float q, float sampleRate) {
+    // This is the pole for 1/(s^2 + s/q + 1) in normalized frequency. The other pole is
+    // the complex conjugate of this.
+    float poleImagS = 2 * M_PI * ringingFrequency;
+    float poleRealS = -poleImagS / (2 * q);
+    float poleRadius = exp(poleRealS / sampleRate);
+    float poleImagZ = poleRadius * sin(poleImagS / sampleRate);
+    float poleRealZ = poleRadius * cos(poleImagS / sampleRate);
+    return {poleRealZ, poleImagZ};
+}
+
+// Implementation of Ramp
+
+Ramp::Ramp(size_t channelCount) : mChannelCount(channelCount) {}
+
+void Ramp::process(float *out, const float *in, size_t frameCount) {
+    size_t i = 0;
+#if USE_NEON
+    size_t sampleCount = frameCount * mChannelCount;
+    float32x2_t allZero = vdup_n_f32(0.0f);
+    while (i + 1 < sampleCount) {
+        vst1_f32(out, vmax_f32(vld1_f32(in), allZero));
+        in += 2;
+        out += 2;
+        i += 2;
+    }
+#endif // USE_NEON
+    for (; i < frameCount * mChannelCount; ++i) {
+        *out = *in >= 0.0f ? *in : 0.0f;
+        out++;
+        in++;
+    }
+}
+
+// Implementation of SlowEnvelope
+
+SlowEnvelope::SlowEnvelope(
+        float cornerFrequency,
+        float sampleRate,
+        float normalizationPower,
+        float envOffset,
+        size_t channelCount)
+        : mLpf(createLPF(cornerFrequency, sampleRate, channelCount)),
+          mNormalizationPower(normalizationPower),
+          mEnvOffset(envOffset),
+          mChannelCount(channelCount) {}
+
+void SlowEnvelope::process(float* out, const float* in, size_t frameCount) {
+    size_t sampleCount = frameCount * mChannelCount;
+    if (sampleCount > mLpfOutBuffer.size()) {
+        mLpfOutBuffer.resize(sampleCount);
+        mLpfInBuffer.resize(sampleCount);
+    }
+    for (size_t i = 0; i < sampleCount; ++i) {
+        mLpfInBuffer[i] = fabs(in[i]);
+    }
+    mLpf->process(mLpfOutBuffer.data(), mLpfInBuffer.data(), frameCount);
+    for (size_t i = 0; i < sampleCount; ++i) {
+        out[i] = in[i] * pow(mLpfOutBuffer[i] + mEnvOffset, mNormalizationPower);
+    }
+}
+
+void SlowEnvelope::setNormalizationPower(float normalizationPower) {
+    mNormalizationPower = normalizationPower;
+}
+
+void SlowEnvelope::clear() {
+    mLpf->clear();
+}
+
+// Implementation of distortion
+
+Distortion::Distortion(
+        float cornerFrequency,
+        float sampleRate,
+        float inputGain,
+        float cubeThreshold,
+        float outputGain,
+        size_t channelCount)
+        : mLpf(createLPF2(cornerFrequency, sampleRate, channelCount)),
+          mSampleRate(sampleRate),
+          mCornerFrequency(cornerFrequency),
+          mInputGain(inputGain),
+          mCubeThreshold(cubeThreshold),
+          mOutputGain(outputGain),
+          mChannelCount(channelCount) {}
+
+void Distortion::process(float *out, const float *in, size_t frameCount) {
+    size_t sampleCount = frameCount * mChannelCount;
+    if (sampleCount > mLpfInBuffer.size()) {
+        mLpfInBuffer.resize(sampleCount);
+    }
+    for (size_t i = 0; i < sampleCount; ++i) {
+        const float x = mInputGain * in[i];
+        mLpfInBuffer[i] = x * x * x / (mCubeThreshold + x * x);  // "Coring" nonlinearity.
+    }
+    mLpf->process(out, mLpfInBuffer.data(), frameCount);  // Reduce 3*F components.
+    for (size_t i = 0; i < sampleCount; ++i) {
+        const float x = out[i];
+        out[i] = mOutputGain * x / (1.0f + fabs(x));  // Soft limiter.
+    }
+}
+
+void Distortion::setCornerFrequency(float cornerFrequency) {
+    mCornerFrequency = cornerFrequency;
+    BiquadFilterCoefficients coefficient = lpfCoefs(cornerFrequency, mSampleRate);
+    mLpf->setCoefficients(coefficient);
+}
+
+void Distortion::setInputGain(float inputGain) {
+    mInputGain = inputGain;
+}
+
+void Distortion::setCubeThrehold(float cubeThreshold) {
+    mCubeThreshold = cubeThreshold;
+}
+
+void Distortion::setOutputGain(float outputGain) {
+    mOutputGain = outputGain;
+}
+
+void Distortion::clear() {
+    mLpf->clear();
+}
+
+
+// Implementation of helper functions
+
+BiquadFilterCoefficients cascadeFirstOrderFilters(const BiquadFilterCoefficients &coefs1,
+                                                   const BiquadFilterCoefficients &coefs2) {
+    assert(coefs1[2] == 0.0f);
+    assert(coefs2[2] == 0.0f);
+    assert(coefs1[4] == 0.0f);
+    assert(coefs2[4] == 0.0f);
+    return {coefs1[0] * coefs2[0],
+            coefs1[0] * coefs2[1] + coefs1[1] * coefs2[0],
+            coefs1[1] * coefs2[1],
+            coefs1[3] + coefs2[3],
+            coefs1[3] * coefs2[3]};
+}
+
+BiquadFilterCoefficients lpfCoefs(const float cornerFrequency, const float sampleRate) {
+    BiquadFilterCoefficients coefficient;
+    float realPoleZ = getRealPoleZ(cornerFrequency, sampleRate);
+    // This is a zero at nyquist
+    coefficient[0] = 0.5f * (1 - realPoleZ);
+    coefficient[1] = coefficient[0];
+    coefficient[2] = 0.0f;
+    coefficient[3] = -realPoleZ; // This is traditional 1/(s+1) filter
+    coefficient[4] = 0.0f;
+    return coefficient;
+}
+
+BiquadFilterCoefficients bpfCoefs(const float ringingFrequency,
+                                  const float q,
+                                  const float sampleRate) {
+    BiquadFilterCoefficients coefficient;
+    const auto [real, img] = getComplexPoleZ(ringingFrequency, q, sampleRate);
+    // Note: this is not a standard cookbook BPF, but a low pass filter with zero at DC
+    coefficient[0] = 1.0f;
+    coefficient[1] = -1.0f;
+    coefficient[2] = 0.0f;
+    coefficient[3] = -2 * real;
+    coefficient[4] = real * real + img * img;
+    return coefficient;
+}
+
+BiquadFilterCoefficients bsfCoefs(const float ringingFrequency,
+                                  const float zq,
+                                  const float pq,
+                                  const float sampleRate) {
+    BiquadFilterCoefficients coefficient;
+    const auto [zeroReal, zeroImg] = getComplexPoleZ(ringingFrequency, zq, sampleRate);
+    float zeroCoeff1 = -2 * zeroReal;
+    float zeroCoeff2 = zeroReal* zeroReal + zeroImg * zeroImg;
+    const auto [poleReal, poleImg] = getComplexPoleZ(ringingFrequency, pq, sampleRate);
+    float poleCoeff1 = -2 * poleReal;
+    float poleCoeff2 = poleReal * poleReal + poleImg * poleImg;
+    const float norm = (1.0f + poleCoeff1 + poleCoeff2) / (1.0f + zeroCoeff1 + zeroCoeff2);
+    coefficient[0] = 1.0f * norm;
+    coefficient[1] = zeroCoeff1 * norm;
+    coefficient[2] = zeroCoeff2 * norm;
+    coefficient[3] = poleCoeff1;
+    coefficient[4] = poleCoeff2;
+    return coefficient;
+}
+
+std::shared_ptr<HapticBiquadFilter> createLPF(const float cornerFrequency,
+                                        const float sampleRate,
+                                        const size_t channelCount) {
+    BiquadFilterCoefficients coefficient = lpfCoefs(cornerFrequency, sampleRate);
+    return std::make_shared<HapticBiquadFilter>(channelCount, coefficient);
+}
+
+std::shared_ptr<HapticBiquadFilter> createLPF2(const float cornerFrequency,
+                                         const float sampleRate,
+                                         const size_t channelCount) {
+    BiquadFilterCoefficients coefficient = lpfCoefs(cornerFrequency, sampleRate);
+    return std::make_shared<HapticBiquadFilter>(
+            channelCount, cascadeFirstOrderFilters(coefficient, coefficient));
+}
+
+std::shared_ptr<HapticBiquadFilter> createHPF2(const float cornerFrequency,
+                                         const float sampleRate,
+                                         const size_t channelCount) {
+    BiquadFilterCoefficients coefficient;
+    // Note: this is valid only when corner frequency is less than nyquist / 2.
+    float realPoleZ = getRealPoleZ(cornerFrequency, sampleRate);
+
+    // Note: this is a zero at DC
+    coefficient[0] = 0.5f * (1 + realPoleZ);
+    coefficient[1] = -coefficient[0];
+    coefficient[2] = 0.0f;
+    coefficient[3] = -realPoleZ;
+    coefficient[4] = 0.0f;
+    return std::make_shared<HapticBiquadFilter>(
+            channelCount, cascadeFirstOrderFilters(coefficient, coefficient));
+}
+
+std::shared_ptr<HapticBiquadFilter> createBPF(const float ringingFrequency,
+                                        const float q,
+                                        const float sampleRate,
+                                        const size_t channelCount) {
+    BiquadFilterCoefficients coefficient = bpfCoefs(ringingFrequency, q, sampleRate);
+    return std::make_shared<HapticBiquadFilter>(channelCount, coefficient);
+}
+
+std::shared_ptr<HapticBiquadFilter> createBSF(const float ringingFrequency,
+                                        const float zq,
+                                        const float pq,
+                                        const float sampleRate,
+                                        const size_t channelCount) {
+    BiquadFilterCoefficients coefficient = bsfCoefs(ringingFrequency, zq, pq, sampleRate);
+    return std::make_shared<HapticBiquadFilter>(channelCount, coefficient);
+}
+
+} // namespace android::audio_effect::haptic_generator
diff --git a/media/libeffects/hapticgenerator/Processors.h b/media/libeffects/hapticgenerator/Processors.h
new file mode 100644
index 0000000..74ca77d
--- /dev/null
+++ b/media/libeffects/hapticgenerator/Processors.h
@@ -0,0 +1,136 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _EFFECT_HAPTIC_GENERATOR_PROCESSORS_H_
+#define _EFFECT_HAPTIC_GENERATOR_PROCESSORS_H_
+
+#include <sys/types.h>
+
+#include <memory>
+#include <vector>
+
+#include <audio_utils/BiquadFilter.h>
+
+using HapticBiquadFilter = android::audio_utils::BiquadFilter<float>;
+using BiquadFilterCoefficients = std::array<float, android::audio_utils::kBiquadNumCoefs>;
+
+namespace android::audio_effect::haptic_generator {
+
+// A class providing a process function that makes input data non-negative.
+class Ramp {
+public:
+    explicit Ramp(size_t channelCount);
+
+    void process(float *out, const float *in, size_t frameCount);
+
+private:
+    const size_t mChannelCount;
+};
+
+
+class SlowEnvelope {
+public:
+    SlowEnvelope(float cornerFrequency, float sampleRate,
+                 float normalizationPower, float envOffset,
+                 size_t channelCount);
+
+    void process(float *out, const float *in, size_t frameCount);
+
+    void setNormalizationPower(float normalizationPower);
+
+    void clear();
+
+private:
+    const std::shared_ptr<HapticBiquadFilter> mLpf;
+    std::vector<float> mLpfInBuffer;
+    std::vector<float> mLpfOutBuffer;
+    float mNormalizationPower;
+    const float mEnvOffset;
+    const float mChannelCount;
+};
+
+
+// A class providing a process function that compressively distorts a waveforms
+class Distortion {
+public:
+    Distortion(float cornerFrequency, float sampleRate,
+               float inputGain, float cubeThreshold,
+               float outputGain, size_t channelCount);
+
+    void process(float *out, const float *in, size_t frameCount);
+
+    void setCornerFrequency(float cornerFrequency);
+    void setInputGain(float inputGain);
+    void setCubeThrehold(float cubeThreshold);
+    void setOutputGain(float outputGain);
+
+    void clear();
+
+private:
+    const std::shared_ptr<HapticBiquadFilter> mLpf;
+    std::vector<float> mLpfInBuffer;
+    float mSampleRate;
+    float mCornerFrequency;
+    float mInputGain;
+    float mCubeThreshold;
+    float mOutputGain;
+    const size_t mChannelCount;
+};
+
+// Helper functions
+
+BiquadFilterCoefficients cascadeFirstOrderFilters(const BiquadFilterCoefficients &coefs1,
+                                                  const BiquadFilterCoefficients &coefs2);
+
+BiquadFilterCoefficients lpfCoefs(const float cornerFrequency, const float sampleRate);
+
+BiquadFilterCoefficients bpfCoefs(const float ringingFrequency,
+                                  const float q,
+                                  const float sampleRate);
+
+BiquadFilterCoefficients bsfCoefs(const float ringingFrequency,
+                                  const float zq,
+                                  const float pq,
+                                  const float sampleRate);
+
+std::shared_ptr<HapticBiquadFilter> createLPF(const float cornerFrequency,
+                                        const float sampleRate,
+                                        const size_t channelCount);
+
+// Create two cascaded LPF with same corner frequency.
+std::shared_ptr<HapticBiquadFilter> createLPF2(const float cornerFrequency,
+                                         const float sampleRate,
+                                         const size_t channelCount);
+
+// Create two cascaded HPF with same corner frequency.
+std::shared_ptr<HapticBiquadFilter> createHPF2(const float cornerFrequency,
+                                         const float sampleRate,
+                                         const size_t channelCount);
+
+std::shared_ptr<HapticBiquadFilter> createBPF(const float ringingFrequency,
+                                        const float q,
+                                        const float sampleRate,
+                                        const size_t channelCount);
+
+std::shared_ptr<HapticBiquadFilter> createBSF(const float ringingFrequency,
+                                        const float zq,
+                                        const float pq,
+                                        const float sampleRate,
+                                        const size_t channelCount);
+
+} // namespace android::audio_effect::haptic_generator
+
+#endif // _EFFECT_HAPTIC_GENERATOR_PROCESSORS_H_
diff --git a/media/libeffects/loudness/Android.bp b/media/libeffects/loudness/Android.bp
index 5a13af6..bcd6947 100644
--- a/media/libeffects/loudness/Android.bp
+++ b/media/libeffects/loudness/Android.bp
@@ -1,4 +1,23 @@
 // LoudnessEnhancer library
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libeffects_loudness_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libeffects_loudness_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libldnhncr",
 
diff --git a/media/libeffects/lvm/.clang-format b/media/libeffects/lvm/.clang-format
new file mode 100644
index 0000000..6f4b13e
--- /dev/null
+++ b/media/libeffects/lvm/.clang-format
@@ -0,0 +1,15 @@
+BasedOnStyle: Google
+Standard: Cpp11
+AccessModifierOffset: -2
+AllowShortFunctionsOnASingleLine: Inline
+ColumnLimit: 100
+CommentPragmas: NOLINT:.*
+DerivePointerAlignment: false
+IncludeBlocks: Preserve
+IndentWidth: 4
+ContinuationIndentWidth: 8
+PointerAlignment: Left
+TabWidth: 4
+UseTab: Never
+# Following are specific to libeffects/lvm
+SortIncludes: false
diff --git a/media/libeffects/lvm/benchmarks/Android.bp b/media/libeffects/lvm/benchmarks/Android.bp
new file mode 100644
index 0000000..8a25b85
--- /dev/null
+++ b/media/libeffects/lvm/benchmarks/Android.bp
@@ -0,0 +1,48 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_benchmark {
+    name: "lvm_benchmark",
+    vendor: true,
+    host_supported: true,
+    srcs: ["lvm_benchmark.cpp"],
+    static_libs: [
+        "libbundlewrapper",
+        "libmusicbundle",
+    ],
+    shared_libs: [
+        "libaudioutils",
+        "liblog",
+    ],
+    header_libs: [
+        "libhardware_headers",
+    ],
+}
+
+cc_benchmark {
+    name: "reverb_benchmark",
+    vendor: true,
+    host_supported: true,
+    include_dirs: [
+        "frameworks/av/media/libeffects/lvm/wrapper/Reverb",
+    ],
+    srcs: ["reverb_benchmark.cpp"],
+    static_libs: [
+        "libreverb",
+        "libreverbwrapper",
+    ],
+    shared_libs: [
+        "libaudioutils",
+        "liblog",
+    ],
+    header_libs: [
+        "libaudioeffects",
+        "libhardware_headers",
+    ],
+}
diff --git a/media/libeffects/lvm/benchmarks/lvm_benchmark.cpp b/media/libeffects/lvm/benchmarks/lvm_benchmark.cpp
new file mode 100644
index 0000000..bdb66d8
--- /dev/null
+++ b/media/libeffects/lvm/benchmarks/lvm_benchmark.cpp
@@ -0,0 +1,238 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <array>
+#include <climits>
+#include <cstdlib>
+#include <random>
+#include <vector>
+#include <log/log.h>
+#include <benchmark/benchmark.h>
+#include <hardware/audio_effect.h>
+#include <system/audio.h>
+
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+constexpr effect_uuid_t kEffectUuids[] = {
+        // NXP SW BassBoost
+        {0x8631f300, 0x72e2, 0x11df, 0xb57e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // NXP SW Virtualizer
+        {0x1d4033c0, 0x8557, 0x11df, 0x9f2d, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // NXP SW Equalizer
+        {0xce772f20, 0x847d, 0x11df, 0xbb17, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // NXP SW Volume
+        {0x119341a0, 0x8469, 0x11df, 0x81f9, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+};
+
+constexpr size_t kNumEffectUuids = std::size(kEffectUuids);
+
+constexpr size_t kFrameCount = 2048;
+
+constexpr audio_channel_mask_t kChMasks[] = {
+        AUDIO_CHANNEL_INDEX_MASK_1,  AUDIO_CHANNEL_INDEX_MASK_2,  AUDIO_CHANNEL_INDEX_MASK_3,
+        AUDIO_CHANNEL_INDEX_MASK_4,  AUDIO_CHANNEL_INDEX_MASK_5,  AUDIO_CHANNEL_INDEX_MASK_6,
+        AUDIO_CHANNEL_INDEX_MASK_7,  AUDIO_CHANNEL_INDEX_MASK_8,  AUDIO_CHANNEL_INDEX_MASK_9,
+        AUDIO_CHANNEL_INDEX_MASK_10, AUDIO_CHANNEL_INDEX_MASK_11, AUDIO_CHANNEL_INDEX_MASK_12,
+        AUDIO_CHANNEL_INDEX_MASK_13, AUDIO_CHANNEL_INDEX_MASK_14, AUDIO_CHANNEL_INDEX_MASK_15,
+        AUDIO_CHANNEL_INDEX_MASK_16, AUDIO_CHANNEL_INDEX_MASK_17, AUDIO_CHANNEL_INDEX_MASK_18,
+        AUDIO_CHANNEL_INDEX_MASK_19, AUDIO_CHANNEL_INDEX_MASK_20, AUDIO_CHANNEL_INDEX_MASK_21,
+        AUDIO_CHANNEL_INDEX_MASK_22, AUDIO_CHANNEL_INDEX_MASK_23, AUDIO_CHANNEL_INDEX_MASK_24,
+};
+
+constexpr size_t kNumChMasks = std::size(kChMasks);
+constexpr int kSampleRate = 44100;
+
+/*******************************************************************
+ * A test result running on Pixel 3 for comparison.
+ * The first parameter indicates the number of channels.
+ * The second parameter indicates the effect.
+ * 0: Bass Boost, 1: Virtualizer, 2: Equalizer, 3: Volume
+ * -----------------------------------------------------
+ * Benchmark           Time             CPU   Iterations
+ * -----------------------------------------------------
+ * BM_LVM/1/0       52123 ns        51971 ns        13437
+ * BM_LVM/1/1       75397 ns        75175 ns         9382
+ * BM_LVM/1/2       40253 ns        40140 ns        17418
+ * BM_LVM/1/3       19918 ns        19860 ns        35230
+ * BM_LVM/2/0       62455 ns        62283 ns        11214
+ * BM_LVM/2/1      110086 ns       109751 ns         6350
+ * BM_LVM/2/2       44017 ns        43890 ns        15982
+ * BM_LVM/2/3       21660 ns        21596 ns        32568
+ * BM_LVM/3/0       71925 ns        71698 ns         9745
+ * BM_LVM/3/1      117043 ns       116754 ns         6007
+ * BM_LVM/3/2       48899 ns        48781 ns        14334
+ * BM_LVM/3/3       23607 ns        23540 ns        29739
+ * BM_LVM/4/0       81296 ns        81095 ns         8632
+ * BM_LVM/4/1      122435 ns       122132 ns         5733
+ * BM_LVM/4/2       53744 ns        53612 ns        13068
+ * BM_LVM/4/3       25846 ns        25783 ns        27188
+ * BM_LVM/5/0       98557 ns        98311 ns         7120
+ * BM_LVM/5/1      131626 ns       131269 ns         5296
+ * BM_LVM/5/2       66892 ns        66732 ns        10458
+ * BM_LVM/5/3       31797 ns        31721 ns        22092
+ * BM_LVM/6/0      111880 ns       111596 ns         6278
+ * BM_LVM/6/1      140207 ns       139846 ns         5000
+ * BM_LVM/6/2       75683 ns        75496 ns         9253
+ * BM_LVM/6/3       37669 ns        37571 ns        18663
+ * BM_LVM/7/0      128265 ns       127957 ns         5470
+ * BM_LVM/7/1      149522 ns       149159 ns         4699
+ * BM_LVM/7/2       92024 ns        91798 ns         7631
+ * BM_LVM/7/3       43372 ns        43268 ns        16181
+ * BM_LVM/8/0      141897 ns       141548 ns         4945
+ * BM_LVM/8/1      158062 ns       157661 ns         4438
+ * BM_LVM/8/2       98042 ns        97801 ns         7151
+ * BM_LVM/8/3       49044 ns        48923 ns        14314
+ * BM_LVM/9/0      174692 ns       174228 ns         4026
+ * BM_LVM/9/1      183048 ns       182560 ns         3834
+ * BM_LVM/9/2      131020 ns       130675 ns         5347
+ * BM_LVM/9/3       71102 ns        70915 ns         9801
+ * BM_LVM/10/0     189079 ns       188576 ns         3699
+ * BM_LVM/10/1     187989 ns       187472 ns         3737
+ * BM_LVM/10/2     140093 ns       139717 ns         5007
+ * BM_LVM/10/3      78175 ns        77963 ns         8919
+ * BM_LVM/11/0     207577 ns       207007 ns         3371
+ * BM_LVM/11/1     198186 ns       197640 ns         3535
+ * BM_LVM/11/2     157214 ns       156786 ns         4459
+ * BM_LVM/11/3      85912 ns        85681 ns         8153
+ * BM_LVM/12/0     220861 ns       220265 ns         3169
+ * BM_LVM/12/1     208759 ns       208184 ns         3355
+ * BM_LVM/12/2     165533 ns       165088 ns         4234
+ * BM_LVM/12/3      92616 ns        92364 ns         7528
+ * BM_LVM/13/0     238573 ns       237920 ns         2945
+ * BM_LVM/13/1     219130 ns       218520 ns         3209
+ * BM_LVM/13/2     183193 ns       182692 ns         3830
+ * BM_LVM/13/3     100546 ns       100274 ns         7005
+ * BM_LVM/14/0     254820 ns       254135 ns         2748
+ * BM_LVM/14/1     230161 ns       229530 ns         3049
+ * BM_LVM/14/2     192195 ns       191671 ns         3635
+ * BM_LVM/14/3     107770 ns       107477 ns         6502
+ * BM_LVM/15/0     273695 ns       272954 ns         2531
+ * BM_LVM/15/1     240718 ns       240049 ns         2801
+ * BM_LVM/15/2     220914 ns       220309 ns         3191
+ * BM_LVM/15/3     124321 ns       123978 ns         5664
+ * BM_LVM/16/0     285769 ns       284969 ns         2459
+ * BM_LVM/16/1     251692 ns       250983 ns         2789
+ * BM_LVM/16/2     224554 ns       223917 ns         3132
+ * BM_LVM/16/3     122048 ns       121706 ns         5753
+ * BM_LVM/17/0     310027 ns       309154 ns         2266
+ * BM_LVM/17/1     262008 ns       261259 ns         2681
+ * BM_LVM/17/2     247530 ns       246827 ns         2842
+ * BM_LVM/17/3     129513 ns       129146 ns         5418
+ * BM_LVM/18/0     322755 ns       321844 ns         2173
+ * BM_LVM/18/1     263266 ns       262514 ns         2671
+ * BM_LVM/18/2     257606 ns       256875 ns         2731
+ * BM_LVM/18/3     136550 ns       136164 ns         5129
+ * BM_LVM/19/0     338551 ns       337591 ns         2069
+ * BM_LVM/19/1     275929 ns       275134 ns         2535
+ * BM_LVM/19/2     270331 ns       269554 ns         2596
+ * BM_LVM/19/3     144551 ns       144138 ns         4838
+ * BM_LVM/20/0     352633 ns       351617 ns         1993
+ * BM_LVM/20/1     286607 ns       285713 ns         2371
+ * BM_LVM/20/2     283541 ns       282689 ns         2407
+ * BM_LVM/20/3     152355 ns       151904 ns         4604
+ * BM_LVM/21/0     370557 ns       369456 ns         1889
+ * BM_LVM/21/1     298251 ns       297351 ns         2352
+ * BM_LVM/21/2     296806 ns       295917 ns         2364
+ * BM_LVM/21/3     160212 ns       159735 ns         4330
+ * BM_LVM/22/0     386431 ns       385224 ns         1826
+ * BM_LVM/22/1     308901 ns       307925 ns         2273
+ * BM_LVM/22/2     309077 ns       308140 ns         2274
+ * BM_LVM/22/3     167492 ns       166987 ns         4194
+ * BM_LVM/23/0     404455 ns       403218 ns         1729
+ * BM_LVM/23/1     322026 ns       321014 ns         2187
+ * BM_LVM/23/2     326616 ns       325623 ns         2152
+ * BM_LVM/23/3     175873 ns       175328 ns         4007
+ * BM_LVM/24/0     416949 ns       415676 ns         1684
+ * BM_LVM/24/1     329803 ns       328779 ns         2128
+ * BM_LVM/24/2     337648 ns       336626 ns         2080
+ * BM_LVM/24/3     183192 ns       182634 ns         3824
+ *******************************************************************/
+
+static void BM_LVM(benchmark::State& state) {
+    const size_t chMask = kChMasks[state.range(0) - 1];
+    const effect_uuid_t uuid = kEffectUuids[state.range(1)];
+    const size_t channelCount = audio_channel_count_from_out_mask(chMask);
+
+    // Initialize input buffer with deterministic pseudo-random values
+    std::minstd_rand gen(chMask);
+    std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+    std::vector<float> input(kFrameCount * channelCount);
+    for (auto& in : input) {
+        in = dis(gen);
+    }
+
+    effect_handle_t effectHandle = nullptr;
+    if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(&uuid, 1, 1, &effectHandle);
+        status != 0) {
+        ALOGE("create_effect returned an error = %d\n", status);
+        return;
+    }
+
+    effect_config_t config{};
+    config.inputCfg.samplingRate = config.outputCfg.samplingRate = kSampleRate;
+    config.inputCfg.channels = config.outputCfg.channels = chMask;
+    config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    if (int status = (*effectHandle)
+                             ->command(effectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t),
+                                       &config, &replySize, &reply);
+        status != 0) {
+        ALOGE("command returned an error = %d\n", status);
+        return;
+    }
+
+    if (int status =
+                (*effectHandle)
+                        ->command(effectHandle, EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+        status != 0) {
+        ALOGE("Command enable call returned error %d\n", reply);
+        return;
+    }
+
+    // Run the test
+    for (auto _ : state) {
+        std::vector<float> output(kFrameCount * channelCount);
+
+        benchmark::DoNotOptimize(input.data());
+        benchmark::DoNotOptimize(output.data());
+
+        audio_buffer_t inBuffer = {.frameCount = kFrameCount, .f32 = input.data()};
+        audio_buffer_t outBuffer = {.frameCount = kFrameCount, .f32 = output.data()};
+        (*effectHandle)->process(effectHandle, &inBuffer, &outBuffer);
+
+        benchmark::ClobberMemory();
+    }
+
+    state.SetComplexityN(state.range(0));
+
+    if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle); status != 0) {
+        ALOGE("release_effect returned an error = %d\n", status);
+        return;
+    }
+}
+
+static void LVMArgs(benchmark::internal::Benchmark* b) {
+    for (int i = FCC_1; i <= kNumChMasks; i++) {
+        for (int j = 0; j < kNumEffectUuids; ++j) {
+            b->Args({i, j});
+        }
+    }
+}
+
+BENCHMARK(BM_LVM)->Apply(LVMArgs);
+
+BENCHMARK_MAIN();
diff --git a/media/libeffects/lvm/benchmarks/reverb_benchmark.cpp b/media/libeffects/lvm/benchmarks/reverb_benchmark.cpp
new file mode 100644
index 0000000..00a7ff2
--- /dev/null
+++ b/media/libeffects/lvm/benchmarks/reverb_benchmark.cpp
@@ -0,0 +1,180 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <array>
+#include <climits>
+#include <cstdlib>
+#include <random>
+#include <vector>
+#include <log/log.h>
+#include <benchmark/benchmark.h>
+#include <hardware/audio_effect.h>
+#include <system/audio.h>
+#include "EffectReverb.h"
+
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+constexpr effect_uuid_t kEffectUuids[] = {
+        {0x172cdf00,
+         0xa3bc,
+         0x11df,
+         0xa72f,
+         {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // preset-insert mode
+        {0xf29a1400,
+         0xa3bb,
+         0x11df,
+         0x8ddc,
+         {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // preset-aux mode
+};
+
+constexpr size_t kNumEffectUuids = std::size(kEffectUuids);
+
+constexpr size_t kFrameCount = 2048;
+
+constexpr int kPresets[] = {
+        REVERB_PRESET_NONE,      REVERB_PRESET_SMALLROOM,  REVERB_PRESET_MEDIUMROOM,
+        REVERB_PRESET_LARGEROOM, REVERB_PRESET_MEDIUMHALL, REVERB_PRESET_LARGEHALL,
+        REVERB_PRESET_PLATE,
+};
+
+constexpr size_t kNumPresets = std::size(kPresets);
+
+constexpr int kSampleRate = 44100;
+
+int reverbSetConfigParam(uint32_t paramType, uint32_t paramValue, effect_handle_t effectHandle) {
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    uint32_t paramData[2] = {paramType, paramValue};
+    auto effectParam = (effect_param_t*)malloc(sizeof(effect_param_t) + sizeof(paramData));
+    memcpy(&effectParam->data[0], &paramData[0], sizeof(paramData));
+    effectParam->psize = sizeof(paramData[0]);
+    effectParam->vsize = sizeof(paramData[1]);
+    int status = (*effectHandle)
+                         ->command(effectHandle, EFFECT_CMD_SET_PARAM,
+                                   sizeof(effect_param_t) + sizeof(paramData), effectParam,
+                                   &replySize, &reply);
+    free(effectParam);
+    if (status != 0) {
+        ALOGE("Reverb set config returned an error = %d\n", status);
+        return status;
+    }
+    return reply;
+}
+
+/*******************************************************************
+ * A test result running on Pixel 3 with for comparison.
+ * The first parameter indicates the preset level id.
+ * The second parameter indicates the effect.
+ * 0: preset-insert mode, 1: preset-aux mode
+ * --------------------------------------------------------
+ * Benchmark              Time             CPU   Iterations
+ * --------------------------------------------------------
+ * BM_REVERB/0/0      19312 ns        19249 ns        36282
+ * BM_REVERB/0/1       5613 ns         5596 ns       125032
+ * BM_REVERB/1/0     605453 ns       603714 ns         1131
+ * BM_REVERB/1/1     589421 ns       587758 ns         1161
+ * BM_REVERB/2/0     605760 ns       604006 ns         1131
+ * BM_REVERB/2/1     589434 ns       587777 ns         1161
+ * BM_REVERB/3/0     605574 ns       603828 ns         1131
+ * BM_REVERB/3/1     589566 ns       587862 ns         1162
+ * BM_REVERB/4/0     605634 ns       603894 ns         1131
+ * BM_REVERB/4/1     589506 ns       587856 ns         1161
+ * BM_REVERB/5/0     605644 ns       603929 ns         1131
+ * BM_REVERB/5/1     589592 ns       587863 ns         1161
+ * BM_REVERB/6/0     610544 ns       608561 ns         1131
+ * BM_REVERB/6/1     589686 ns       587871 ns         1161
+ *******************************************************************/
+
+static void BM_REVERB(benchmark::State& state) {
+    const size_t chMask = AUDIO_CHANNEL_OUT_STEREO;
+    const size_t preset = kPresets[state.range(0)];
+    const effect_uuid_t uuid = kEffectUuids[state.range(1)];
+    const size_t channelCount = audio_channel_count_from_out_mask(chMask);
+
+    // Initialize input buffer with deterministic pseudo-random values
+    std::minstd_rand gen(chMask);
+    std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+    std::vector<float> input(kFrameCount * channelCount);
+    std::vector<float> output(kFrameCount * channelCount);
+    for (auto& in : input) {
+        in = dis(gen);
+    }
+
+    effect_handle_t effectHandle = nullptr;
+    if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(&uuid, 1, 1, &effectHandle);
+        status != 0) {
+        ALOGE("create_effect returned an error = %d\n", status);
+        return;
+    }
+
+    effect_config_t config{};
+    config.inputCfg.samplingRate = config.outputCfg.samplingRate = kSampleRate;
+    config.inputCfg.channels = config.outputCfg.channels = chMask;
+    config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    if (int status = (*effectHandle)
+                             ->command(effectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t),
+                                       &config, &replySize, &reply);
+        status != 0) {
+        ALOGE("command returned an error = %d\n", status);
+        return;
+    }
+
+    if (int status =
+                (*effectHandle)
+                        ->command(effectHandle, EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+        status != 0) {
+        ALOGE("Command enable call returned error %d\n", reply);
+        return;
+    }
+
+    if (int status = reverbSetConfigParam(REVERB_PARAM_PRESET, preset, effectHandle); status != 0) {
+        ALOGE("Invalid reverb preset. Error %d\n", status);
+        return;
+    }
+
+    // Run the test
+    for (auto _ : state) {
+        benchmark::DoNotOptimize(input.data());
+        benchmark::DoNotOptimize(output.data());
+
+        audio_buffer_t inBuffer = {.frameCount = kFrameCount, .f32 = input.data()};
+        audio_buffer_t outBuffer = {.frameCount = kFrameCount, .f32 = output.data()};
+        (*effectHandle)->process(effectHandle, &inBuffer, &outBuffer);
+
+        benchmark::ClobberMemory();
+    }
+
+    state.SetComplexityN(state.range(0));
+
+    if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle); status != 0) {
+        ALOGE("release_effect returned an error = %d\n", status);
+        return;
+    }
+}
+
+static void REVERBArgs(benchmark::internal::Benchmark* b) {
+    for (int i = 0; i < kNumPresets; i++) {
+        for (int j = 0; j < kNumEffectUuids; ++j) {
+            b->Args({i, j});
+        }
+    }
+}
+
+BENCHMARK(BM_REVERB)->Apply(REVERBArgs);
+
+BENCHMARK_MAIN();
diff --git a/media/libeffects/lvm/lib/Android.bp b/media/libeffects/lvm/lib/Android.bp
index 1f2a5e1..7998879 100644
--- a/media/libeffects/lvm/lib/Android.bp
+++ b/media/libeffects/lvm/lib/Android.bp
@@ -1,4 +1,23 @@
 // Music bundle
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libeffects_lvm_lib_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libeffects_lvm_lib_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_static {
     name: "libmusicbundle",
 
@@ -9,6 +28,7 @@
     },
 
     vendor: true,
+    host_supported: true,
     srcs: [
         "StereoWidening/src/LVCS_BypassMix.cpp",
         "StereoWidening/src/LVCS_Control.cpp",
@@ -30,7 +50,6 @@
         "Bundle/src/LVM_Control.cpp",
         "SpectrumAnalyzer/src/LVPSA_Control.cpp",
         "SpectrumAnalyzer/src/LVPSA_Init.cpp",
-        "SpectrumAnalyzer/src/LVPSA_Memory.cpp",
         "SpectrumAnalyzer/src/LVPSA_Process.cpp",
         "SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp",
         "SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp",
@@ -40,58 +59,20 @@
         "Eq/src/LVEQNB_Init.cpp",
         "Eq/src/LVEQNB_Process.cpp",
         "Eq/src/LVEQNB_Tables.cpp",
-        "Common/src/InstAlloc.cpp",
         "Common/src/DC_2I_D16_TRC_WRA_01.cpp",
         "Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp",
-        "Common/src/FO_2I_D16F32C15_LShx_TRC_WRA_01.cpp",
-        "Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Init.cpp",
-        "Common/src/FO_1I_D16F16C15_TRC_WRA_01.cpp",
-        "Common/src/FO_1I_D16F16Css_TRC_WRA_01_Init.cpp",
-        "Common/src/BP_1I_D16F32C30_TRC_WRA_01.cpp",
-        "Common/src/BP_1I_D16F16C14_TRC_WRA_01.cpp",
-        "Common/src/BP_1I_D32F32C30_TRC_WRA_02.cpp",
-        "Common/src/BP_1I_D16F16Css_TRC_WRA_01_Init.cpp",
-        "Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Init.cpp",
-        "Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Init.cpp",
-        "Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Init.cpp",
-        "Common/src/BQ_2I_D32F32C30_TRC_WRA_01.cpp",
-        "Common/src/BQ_2I_D16F32C15_TRC_WRA_01.cpp",
-        "Common/src/BQ_2I_D16F32C14_TRC_WRA_01.cpp",
-        "Common/src/BQ_2I_D16F32C13_TRC_WRA_01.cpp",
-        "Common/src/BQ_2I_D16F32Css_TRC_WRA_01_init.cpp",
-        "Common/src/BQ_2I_D16F16C15_TRC_WRA_01.cpp",
-        "Common/src/BQ_2I_D16F16C14_TRC_WRA_01.cpp",
-        "Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Init.cpp",
-        "Common/src/BQ_1I_D16F16C15_TRC_WRA_01.cpp",
-        "Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Init.cpp",
-        "Common/src/BQ_1I_D16F32C14_TRC_WRA_01.cpp",
-        "Common/src/BQ_1I_D16F32Css_TRC_WRA_01_init.cpp",
-        "Common/src/PK_2I_D32F32C30G11_TRC_WRA_01.cpp",
-        "Common/src/PK_2I_D32F32C14G11_TRC_WRA_01.cpp",
-        "Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Init.cpp",
-        "Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Init.cpp",
-        "Common/src/Int16LShiftToInt32_16x32.cpp",
-        "Common/src/From2iToMono_16.cpp",
         "Common/src/Copy_16.cpp",
-        "Common/src/MonoTo2I_16.cpp",
         "Common/src/MonoTo2I_32.cpp",
-        "Common/src/LoadConst_16.cpp",
-        "Common/src/LoadConst_32.cpp",
         "Common/src/dB_to_Lin32.cpp",
         "Common/src/Shift_Sat_v16xv16.cpp",
         "Common/src/Shift_Sat_v32xv32.cpp",
-        "Common/src/Abs_32.cpp",
-        "Common/src/Int32RShiftToInt16_Sat_32x16.cpp",
         "Common/src/From2iToMono_32.cpp",
-        "Common/src/mult3s_16x16.cpp",
         "Common/src/Mult3s_32x16.cpp",
         "Common/src/NonLinComp_D16.cpp",
         "Common/src/DelayMix_16x16.cpp",
         "Common/src/MSTo2i_Sat_16x16.cpp",
         "Common/src/From2iToMS_16x16.cpp",
-        "Common/src/Mac3s_Sat_16x16.cpp",
         "Common/src/Mac3s_Sat_32x16.cpp",
-        "Common/src/Add2_Sat_16x16.cpp",
         "Common/src/Add2_Sat_32x32.cpp",
         "Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp",
         "Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp",
@@ -132,13 +113,14 @@
     shared_libs: [
         "liblog",
     ],
+    static_libs: [
+        "libaudioutils",
+    ],
     header_libs: [
-        "libhardware_headers"
+        "libhardware_headers",
     ],
     cppflags: [
         "-fvisibility=hidden",
-        "-DSUPPORT_MC",
-
         "-Wall",
         "-Werror",
     ],
@@ -156,27 +138,19 @@
     },
 
     vendor: true,
+    host_supported: true,
     srcs: [
         "Reverb/src/LVREV_ApplyNewSettings.cpp",
         "Reverb/src/LVREV_ClearAudioBuffers.cpp",
         "Reverb/src/LVREV_GetControlParameters.cpp",
         "Reverb/src/LVREV_GetInstanceHandle.cpp",
-        "Reverb/src/LVREV_GetMemoryTable.cpp",
         "Reverb/src/LVREV_Process.cpp",
         "Reverb/src/LVREV_SetControlParameters.cpp",
         "Reverb/src/LVREV_Tables.cpp",
-        "Common/src/Abs_32.cpp",
-        "Common/src/InstAlloc.cpp",
-        "Common/src/LoadConst_16.cpp",
-        "Common/src/LoadConst_32.cpp",
         "Common/src/From2iToMono_32.cpp",
         "Common/src/Mult3s_32x16.cpp",
-        "Common/src/FO_1I_D32F32C31_TRC_WRA_01.cpp",
-        "Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Init.cpp",
-        "Common/src/DelayAllPass_Sat_32x16To32.cpp",
         "Common/src/Copy_16.cpp",
         "Common/src/Mac3s_Sat_32x16.cpp",
-        "Common/src/DelayWrite_32.cpp",
         "Common/src/Shift_Sat_v32xv32.cpp",
         "Common/src/Add2_Sat_32x32.cpp",
         "Common/src/JoinTo2i_32x32.cpp",
@@ -203,10 +177,11 @@
         "Reverb/lib",
         "Common/lib",
     ],
-
+    static_libs: [
+        "libaudioutils",
+    ],
     cppflags: [
         "-fvisibility=hidden",
-
         "-Wall",
         "-Werror",
     ],
diff --git a/media/libeffects/lvm/lib/Bass/lib/LVDBE.h b/media/libeffects/lvm/lib/Bass/lib/LVDBE.h
index 948d79c..e60ad07 100644
--- a/media/libeffects/lvm/lib/Bass/lib/LVDBE.h
+++ b/media/libeffects/lvm/lib/Bass/lib/LVDBE.h
@@ -69,15 +69,12 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory table*/
-#define LVDBE_NR_MEMORY_REGIONS        4                            /* Number of memory regions */
-
 /* Bass Enhancement effect level */
-#define LVDBE_EFFECT_03DB            3                              /* Effect defines for backwards compatibility */
-#define LVDBE_EFFECT_06DB            6
-#define LVDBE_EFFECT_09DB            9
-#define LVDBE_EFFECT_12DB            12
-#define LVDBE_EFFECT_15DB            15
+#define LVDBE_EFFECT_03DB 3 /* Effect defines for backwards compatibility */
+#define LVDBE_EFFECT_06DB 6
+#define LVDBE_EFFECT_09DB 9
+#define LVDBE_EFFECT_12DB 12
+#define LVDBE_EFFECT_15DB 15
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -86,52 +83,31 @@
 /****************************************************************************************/
 
 /* Instance handle */
-typedef void    *LVDBE_Handle_t;
+typedef void* LVDBE_Handle_t;
 
 /* Operating modes */
-typedef enum
-{
-    LVDBE_OFF      = 0,
-    LVDBE_ON       = 1,
-    LVDBE_MODE_MAX = LVM_MAXINT_32
-} LVDBE_Mode_en;
+typedef enum { LVDBE_OFF = 0, LVDBE_ON = 1, LVDBE_MODE_MAX = LVM_MAXINT_32 } LVDBE_Mode_en;
 
 /* High pass filter */
-typedef enum
-{
+typedef enum {
     LVDBE_HPF_OFF = 0,
-    LVDBE_HPF_ON  = 1,
+    LVDBE_HPF_ON = 1,
     LVDBE_HPF_MAX = LVM_MAXINT_32
 } LVDBE_FilterSelect_en;
 
 /* Volume control */
-typedef enum
-{
+typedef enum {
     LVDBE_VOLUME_OFF = 0,
-    LVDBE_VOLUME_ON  = 1,
+    LVDBE_VOLUME_ON = 1,
     LVDBE_VOLUME_MAX = LVM_MAXINT_32
 } LVDBE_Volume_en;
 
-/* Memory Types */
-typedef enum
-{
-    LVDBE_PERSISTENT      = 0,
-    LVDBE_PERSISTENT_DATA = 1,
-    LVDBE_PERSISTENT_COEF = 2,
-    LVDBE_SCRATCH         = 3,
-    LVDBE_MEMORY_MAX      = LVM_MAXINT_32
-
-} LVDBE_MemoryTypes_en;
-
 /* Function return status */
-typedef enum
-{
-    LVDBE_SUCCESS        = 0,                        /* Successful return from a routine */
-    LVDBE_ALIGNMENTERROR = 1,                        /* Memory alignment error */
-    LVDBE_NULLADDRESS    = 2,                        /* NULL allocation address */
-    LVDBE_TOOMANYSAMPLES = 3,                        /* Maximum block size exceeded */
-    LVDBE_SIZEERROR      = 4,                        /* Incorrect structure size */
-    LVDBE_STATUS_MAX     = LVM_MAXINT_32
+typedef enum {
+    LVDBE_SUCCESS = 0,        /* Successful return from a routine */
+    LVDBE_NULLADDRESS = 1,    /* NULL allocation address */
+    LVDBE_TOOMANYSAMPLES = 2, /* Maximum block size exceeded */
+    LVDBE_STATUS_MAX = LVM_MAXINT_32
 } LVDBE_ReturnStatus_en;
 
 /****************************************************************************************/
@@ -158,40 +134,38 @@
 /*
  * Bass Enhancement centre frequency
  */
-#define LVDBE_CAP_CENTRE_55Hz       1
-#define LVDBE_CAP_CENTRE_66Hz       2
-#define LVDBE_CAP_CENTRE_78Hz       4
-#define LVDBE_CAP_CENTRE_90Hz       8
+#define LVDBE_CAP_CENTRE_55Hz 1
+#define LVDBE_CAP_CENTRE_66Hz 2
+#define LVDBE_CAP_CENTRE_78Hz 4
+#define LVDBE_CAP_CENTRE_90Hz 8
 
-typedef enum
-{
+typedef enum {
     LVDBE_CENTRE_55HZ = 0,
     LVDBE_CENTRE_66HZ = 1,
     LVDBE_CENTRE_78HZ = 2,
     LVDBE_CENTRE_90HZ = 3,
-    LVDBE_CENTRE_MAX  = LVM_MAXINT_32
+    LVDBE_CENTRE_MAX = LVM_MAXINT_32
 } LVDBE_CentreFreq_en;
 
 /*
  * Supported sample rates in samples per second
  */
-#define LVDBE_CAP_FS_8000                1
-#define LVDBE_CAP_FS_11025               2
-#define LVDBE_CAP_FS_12000               4
-#define LVDBE_CAP_FS_16000               8
-#define LVDBE_CAP_FS_22050               16
-#define LVDBE_CAP_FS_24000               32
-#define LVDBE_CAP_FS_32000               64
-#define LVDBE_CAP_FS_44100               128
-#define LVDBE_CAP_FS_48000               256
-#define LVDBE_CAP_FS_88200               512
-#define LVDBE_CAP_FS_96000               1024
-#define LVDBE_CAP_FS_176400              2048
-#define LVDBE_CAP_FS_192000              4096
+#define LVDBE_CAP_FS_8000 1
+#define LVDBE_CAP_FS_11025 2
+#define LVDBE_CAP_FS_12000 4
+#define LVDBE_CAP_FS_16000 8
+#define LVDBE_CAP_FS_22050 16
+#define LVDBE_CAP_FS_24000 32
+#define LVDBE_CAP_FS_32000 64
+#define LVDBE_CAP_FS_44100 128
+#define LVDBE_CAP_FS_48000 256
+#define LVDBE_CAP_FS_88200 512
+#define LVDBE_CAP_FS_96000 1024
+#define LVDBE_CAP_FS_176400 2048
+#define LVDBE_CAP_FS_192000 4096
 
-typedef enum
-{
-    LVDBE_FS_8000  = 0,
+typedef enum {
+    LVDBE_FS_8000 = 0,
     LVDBE_FS_11025 = 1,
     LVDBE_FS_12000 = 2,
     LVDBE_FS_16000 = 3,
@@ -204,7 +178,7 @@
     LVDBE_FS_96000 = 10,
     LVDBE_FS_176400 = 11,
     LVDBE_FS_192000 = 12,
-    LVDBE_FS_MAX   = LVM_MAXINT_32
+    LVDBE_FS_MAX = LVM_MAXINT_32
 } LVDBE_Fs_en;
 
 /****************************************************************************************/
@@ -213,44 +187,25 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory region definition */
-typedef struct
-{
-    LVM_UINT32                Size;                        /* Region size in bytes */
-    LVM_UINT16                Alignment;                  /* Region alignment in bytes */
-    LVDBE_MemoryTypes_en      Type;                       /* Region type */
-    void                      *pBaseAddress;              /* Pointer to the region base address */
-} LVDBE_MemoryRegion_t;
-
-/* Memory table containing the region definitions */
-typedef struct
-{
-    LVDBE_MemoryRegion_t    Region[LVDBE_NR_MEMORY_REGIONS];  /* One definition for each region */
-} LVDBE_MemTab_t;
-
 /* Parameter structure */
-typedef struct
-{
-    LVDBE_Mode_en           OperatingMode;
-    LVDBE_Fs_en             SampleRate;
-    LVM_INT16               EffectLevel;
-    LVDBE_CentreFreq_en     CentreFrequency;
-    LVDBE_FilterSelect_en   HPFSelect;
-    LVDBE_Volume_en         VolumeControl;
-    LVM_INT16               VolumedB;
-    LVM_INT16               HeadroomdB;
-#ifdef SUPPORT_MC
-    LVM_INT16               NrChannels;
-#endif
+typedef struct {
+    LVDBE_Mode_en OperatingMode;
+    LVDBE_Fs_en SampleRate;
+    LVM_INT16 EffectLevel;
+    LVDBE_CentreFreq_en CentreFrequency;
+    LVDBE_FilterSelect_en HPFSelect;
+    LVDBE_Volume_en VolumeControl;
+    LVM_INT16 VolumedB;
+    LVM_INT16 HeadroomdB;
+    LVM_INT16 NrChannels;
 
 } LVDBE_Params_t;
 
 /* Capability structure */
-typedef struct
-{
-      LVM_UINT16              SampleRate;               /* Sampling rate capabilities */
-      LVM_UINT16              CentreFrequency;          /* Centre frequency capabilities */
-      LVM_UINT16              MaxBlockSize;             /* Maximum block size in sample pairs */
+typedef struct {
+    LVM_UINT16 SampleRate;      /* Sampling rate capabilities */
+    LVM_UINT16 CentreFrequency; /* Centre frequency capabilities */
+    LVM_UINT16 MaxBlockSize;    /* Maximum block size in sample pairs */
 } LVDBE_Capabilities_t;
 
 /****************************************************************************************/
@@ -261,75 +216,39 @@
 
 /****************************************************************************************/
 /*                                                                                      */
-/* FUNCTION:                 LVDBE_Memory                                               */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*    This function is used for memory allocation and free. It can be called in         */
-/*    two ways:                                                                         */
-/*                                                                                      */
-/*        hInstance = NULL                Returns the memory requirements               */
-/*        hInstance = Instance handle        Returns the memory requirements and        */
-/*                                        allocated base addresses for the instance     */
-/*                                                                                      */
-/*    When this function is called for memory allocation (hInstance=NULL) the memory    */
-/*  base address pointers are NULL on return.                                           */
-/*                                                                                      */
-/*    When the function is called for free (hInstance = Instance Handle) the memory     */
-/*  table returns the allocated memory and base addresses used during initialisation.   */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance                Instance Handle                                            */
-/*  pMemoryTable             Pointer to an empty memory definition table                */
-/*    pCapabilities            Pointer to the default capabilites                       */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVDBE_SUCCESS            Succeeded                                                  */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*    1.    This function may be interrupted by the LVDBE_Process function              */
-/*                                                                                      */
-/****************************************************************************************/
-
-LVDBE_ReturnStatus_en LVDBE_Memory(LVDBE_Handle_t           hInstance,
-                                   LVDBE_MemTab_t           *pMemoryTable,
-                                   LVDBE_Capabilities_t     *pCapabilities);
-
-/****************************************************************************************/
-/*                                                                                      */
 /* FUNCTION:                 LVDBE_Init                                                 */
 /*                                                                                      */
 /* DESCRIPTION:                                                                         */
 /*    Create and initialisation function for the Bass Enhancement module                */
 /*                                                                                      */
-/*    This function can be used to create an algorithm instance by calling with         */
-/*    hInstance set to NULL. In this case the algorithm returns the new instance        */
-/*    handle.                                                                           */
-/*                                                                                      */
-/*    This function can be used to force a full re-initialisation of the algorithm      */
-/*    by calling with hInstance = Instance Handle. In this case the memory table        */
-/*    should be correct for the instance, this can be ensured by calling the function   */
-/*    LVDBE_Memory before calling this function.                                        */
-/*                                                                                      */
 /* PARAMETERS:                                                                          */
-/*  hInstance                  Instance handle                                          */
-/*  pMemoryTable             Pointer to the memory definition table                     */
+/*  phInstance               Pointer to instance handle                                 */
 /*  pCapabilities            Pointer to the initialisation capabilities                 */
+/*  pScratch                 Pointer to the bundle scratch buffer                       */
 /*                                                                                      */
 /* RETURNS:                                                                             */
-/*  LVDBE_SUCCESS                Initialisation succeeded                               */
-/*  LVDBE_ALIGNMENTERROR        Instance or scratch memory on incorrect alignment       */
-/*    LVDBE_NULLADDRESS            One or more memory has a NULL pointer                */
+/*  LVDBE_SUCCESS            Initialisation succeeded                                   */
+/*  LVDBE_NULLADDRESS        One or more memory has a NULL pointer - malloc failure     */
 /*                                                                                      */
 /* NOTES:                                                                               */
-/*  1.     The instance handle is the pointer to the base address of the first memory   */
-/*        region.                                                                       */
-/*    2.    This function must not be interrupted by the LVDBE_Process function         */
+/*  1.    This function must not be interrupted by the LVDBE_Process function           */
 /*                                                                                      */
 /****************************************************************************************/
+LVDBE_ReturnStatus_en LVDBE_Init(LVDBE_Handle_t* phInstance, LVDBE_Capabilities_t* pCapabilities,
+                                 void* pScratch);
 
-LVDBE_ReturnStatus_en LVDBE_Init(LVDBE_Handle_t             *phInstance,
-                                   LVDBE_MemTab_t           *pMemoryTable,
-                                   LVDBE_Capabilities_t     *pCapabilities);
+/****************************************************************************************/
+/*                                                                                      */
+/* FUNCTION:                 LVDBE_DeInit                                               */
+/*                                                                                      */
+/* DESCRIPTION:                                                                         */
+/*    Free the memories created during LVDBE_Init including instance handle             */
+/*                                                                                      */
+/* PARAMETERS:                                                                          */
+/*  phInstance               Pointer to instance handle                                 */
+/*                                                                                      */
+/****************************************************************************************/
+void LVDBE_DeInit(LVDBE_Handle_t* phInstance);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -351,8 +270,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVDBE_ReturnStatus_en LVDBE_GetParameters(LVDBE_Handle_t        hInstance,
-                                            LVDBE_Params_t      *pParams);
+LVDBE_ReturnStatus_en LVDBE_GetParameters(LVDBE_Handle_t hInstance, LVDBE_Params_t* pParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -374,8 +292,8 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVDBE_ReturnStatus_en LVDBE_GetCapabilities(LVDBE_Handle_t            hInstance,
-                                              LVDBE_Capabilities_t    *pCapabilities);
+LVDBE_ReturnStatus_en LVDBE_GetCapabilities(LVDBE_Handle_t hInstance,
+                                            LVDBE_Capabilities_t* pCapabilities);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -414,8 +332,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVDBE_ReturnStatus_en LVDBE_Control(LVDBE_Handle_t      hInstance,
-                                      LVDBE_Params_t    *pParams);
+LVDBE_ReturnStatus_en LVDBE_Control(LVDBE_Handle_t hInstance, LVDBE_Params_t* pParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -437,9 +354,7 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-LVDBE_ReturnStatus_en LVDBE_Process(LVDBE_Handle_t          hInstance,
-                                       const LVM_FLOAT      *pInData,
-                                       LVM_FLOAT            *pOutData,
-                                       LVM_UINT16           NumSamples);
+LVDBE_ReturnStatus_en LVDBE_Process(LVDBE_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                    LVM_FLOAT* pOutData, LVM_UINT16 NumSamples);
 
 #endif /* __LVDBE_H__ */
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h b/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h
index b364dae..30e1692 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Coeffs.h
@@ -24,7 +24,7 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#define LVDBE_SCALESHIFT                                    10         /* As a power of 2 */
+#define LVDBE_SCALESHIFT 10 /* As a power of 2 */
 
 /************************************************************************************/
 /*                                                                                  */
@@ -32,289 +32,289 @@
 /*                                                                                  */
 /************************************************************************************/
 
- /* Coefficients for centre frequency 55Hz */
-#define HPF_Fs8000_Fc55_A0                        0.958849f
-#define HPF_Fs8000_Fc55_A1                        (-1.917698f)
-#define HPF_Fs8000_Fc55_A2                        0.958849f
-#define HPF_Fs8000_Fc55_B1                        (-1.939001f)
-#define HPF_Fs8000_Fc55_B2                        0.940807f
-#define HPF_Fs11025_Fc55_A0                       0.966909f
-#define HPF_Fs11025_Fc55_A1                       (-1.933818f)
-#define HPF_Fs11025_Fc55_A2                       0.966909f
-#define HPF_Fs11025_Fc55_B1                       (-1.955732f)
-#define HPF_Fs11025_Fc55_B2                       0.956690f
-#define HPF_Fs12000_Fc55_A0                       0.968650f
-#define HPF_Fs12000_Fc55_A1                       (-1.937300f)
-#define HPF_Fs12000_Fc55_A2                       0.968650f
-#define HPF_Fs12000_Fc55_B1                       (-1.959327f)
-#define HPF_Fs12000_Fc55_B2                       0.960138f
-#define HPF_Fs16000_Fc55_A0                       0.973588f
-#define HPF_Fs16000_Fc55_A1                       (-1.947176f)
-#define HPF_Fs16000_Fc55_A2                       0.973588f
-#define HPF_Fs16000_Fc55_B1                       (-1.969494f)
-#define HPF_Fs16000_Fc55_B2                       0.969952f
-#define HPF_Fs22050_Fc55_A0                       0.977671f
-#define HPF_Fs22050_Fc55_A1                       (-1.955343f)
-#define HPF_Fs22050_Fc55_A2                       0.977671f
-#define HPF_Fs22050_Fc55_B1                       (-1.977863f)
-#define HPF_Fs22050_Fc55_B2                       0.978105f
-#define HPF_Fs24000_Fc55_A0                       0.978551f
-#define HPF_Fs24000_Fc55_A1                       (-1.957102f)
-#define HPF_Fs24000_Fc55_A2                       0.978551f
-#define HPF_Fs24000_Fc55_B1                       (-1.979662f)
-#define HPF_Fs24000_Fc55_B2                       0.979866f
-#define HPF_Fs32000_Fc55_A0                       0.981042f
-#define HPF_Fs32000_Fc55_A1                       (-1.962084f)
-#define HPF_Fs32000_Fc55_A2                       0.981042f
-#define HPF_Fs32000_Fc55_B1                       (-1.984746f)
-#define HPF_Fs32000_Fc55_B2                       0.984861f
-#define HPF_Fs44100_Fc55_A0                       0.983097f
-#define HPF_Fs44100_Fc55_A1                       (-1.966194f)
-#define HPF_Fs44100_Fc55_A2                       0.983097f
-#define HPF_Fs44100_Fc55_B1                       (-1.988931f)
-#define HPF_Fs44100_Fc55_B2                       0.988992f
-#define HPF_Fs48000_Fc55_A0                       0.983539f
-#define HPF_Fs48000_Fc55_A1                       (-1.967079f)
-#define HPF_Fs48000_Fc55_A2                       0.983539f
-#define HPF_Fs48000_Fc55_B1                       (-1.989831f)
-#define HPF_Fs48000_Fc55_B2                       0.989882f
+/* Coefficients for centre frequency 55Hz */
+#define HPF_Fs8000_Fc55_A0 0.958849f
+#define HPF_Fs8000_Fc55_A1 (-1.917698f)
+#define HPF_Fs8000_Fc55_A2 0.958849f
+#define HPF_Fs8000_Fc55_B1 (-1.939001f)
+#define HPF_Fs8000_Fc55_B2 0.940807f
+#define HPF_Fs11025_Fc55_A0 0.966909f
+#define HPF_Fs11025_Fc55_A1 (-1.933818f)
+#define HPF_Fs11025_Fc55_A2 0.966909f
+#define HPF_Fs11025_Fc55_B1 (-1.955732f)
+#define HPF_Fs11025_Fc55_B2 0.956690f
+#define HPF_Fs12000_Fc55_A0 0.968650f
+#define HPF_Fs12000_Fc55_A1 (-1.937300f)
+#define HPF_Fs12000_Fc55_A2 0.968650f
+#define HPF_Fs12000_Fc55_B1 (-1.959327f)
+#define HPF_Fs12000_Fc55_B2 0.960138f
+#define HPF_Fs16000_Fc55_A0 0.973588f
+#define HPF_Fs16000_Fc55_A1 (-1.947176f)
+#define HPF_Fs16000_Fc55_A2 0.973588f
+#define HPF_Fs16000_Fc55_B1 (-1.969494f)
+#define HPF_Fs16000_Fc55_B2 0.969952f
+#define HPF_Fs22050_Fc55_A0 0.977671f
+#define HPF_Fs22050_Fc55_A1 (-1.955343f)
+#define HPF_Fs22050_Fc55_A2 0.977671f
+#define HPF_Fs22050_Fc55_B1 (-1.977863f)
+#define HPF_Fs22050_Fc55_B2 0.978105f
+#define HPF_Fs24000_Fc55_A0 0.978551f
+#define HPF_Fs24000_Fc55_A1 (-1.957102f)
+#define HPF_Fs24000_Fc55_A2 0.978551f
+#define HPF_Fs24000_Fc55_B1 (-1.979662f)
+#define HPF_Fs24000_Fc55_B2 0.979866f
+#define HPF_Fs32000_Fc55_A0 0.981042f
+#define HPF_Fs32000_Fc55_A1 (-1.962084f)
+#define HPF_Fs32000_Fc55_A2 0.981042f
+#define HPF_Fs32000_Fc55_B1 (-1.984746f)
+#define HPF_Fs32000_Fc55_B2 0.984861f
+#define HPF_Fs44100_Fc55_A0 0.983097f
+#define HPF_Fs44100_Fc55_A1 (-1.966194f)
+#define HPF_Fs44100_Fc55_A2 0.983097f
+#define HPF_Fs44100_Fc55_B1 (-1.988931f)
+#define HPF_Fs44100_Fc55_B2 0.988992f
+#define HPF_Fs48000_Fc55_A0 0.983539f
+#define HPF_Fs48000_Fc55_A1 (-1.967079f)
+#define HPF_Fs48000_Fc55_A2 0.983539f
+#define HPF_Fs48000_Fc55_B1 (-1.989831f)
+#define HPF_Fs48000_Fc55_B2 0.989882f
 
-#define HPF_Fs88200_Fc55_A0                       0.985818f
-#define HPF_Fs88200_Fc55_A1                       (-1.971636f)
-#define HPF_Fs88200_Fc55_A2                       0.985818f
-#define HPF_Fs88200_Fc55_B1                       (-1.994466f)
-#define HPF_Fs88200_Fc55_B2                       0.994481f
+#define HPF_Fs88200_Fc55_A0 0.985818f
+#define HPF_Fs88200_Fc55_A1 (-1.971636f)
+#define HPF_Fs88200_Fc55_A2 0.985818f
+#define HPF_Fs88200_Fc55_B1 (-1.994466f)
+#define HPF_Fs88200_Fc55_B2 0.994481f
 
-#define HPF_Fs96000_Fc55_A0                       0.986040f
-#define HPF_Fs96000_Fc55_A1                       (-1.972080f)
-#define HPF_Fs96000_Fc55_A2                       0.986040f
-#define HPF_Fs96000_Fc55_B1                       (-1.994915f)
-#define HPF_Fs96000_Fc55_B2                       0.994928f
+#define HPF_Fs96000_Fc55_A0 0.986040f
+#define HPF_Fs96000_Fc55_A1 (-1.972080f)
+#define HPF_Fs96000_Fc55_A2 0.986040f
+#define HPF_Fs96000_Fc55_B1 (-1.994915f)
+#define HPF_Fs96000_Fc55_B2 0.994928f
 
-#define HPF_Fs176400_Fc55_A0                      0.987183f
-#define HPF_Fs176400_Fc55_A1                      (-1.974366f)
-#define HPF_Fs176400_Fc55_A2                      0.987183f
-#define HPF_Fs176400_Fc55_B1                      (-1.997233f)
-#define HPF_Fs176400_Fc55_B2                      0.997237f
+#define HPF_Fs176400_Fc55_A0 0.987183f
+#define HPF_Fs176400_Fc55_A1 (-1.974366f)
+#define HPF_Fs176400_Fc55_A2 0.987183f
+#define HPF_Fs176400_Fc55_B1 (-1.997233f)
+#define HPF_Fs176400_Fc55_B2 0.997237f
 
-#define HPF_Fs192000_Fc55_A0                      0.987294f
-#define HPF_Fs192000_Fc55_A1                      (-1.974588f)
-#define HPF_Fs192000_Fc55_A2                      0.987294f
-#define HPF_Fs192000_Fc55_B1                      (-1.997458f)
-#define HPF_Fs192000_Fc55_B2                      0.997461f
+#define HPF_Fs192000_Fc55_A0 0.987294f
+#define HPF_Fs192000_Fc55_A1 (-1.974588f)
+#define HPF_Fs192000_Fc55_A2 0.987294f
+#define HPF_Fs192000_Fc55_B1 (-1.997458f)
+#define HPF_Fs192000_Fc55_B2 0.997461f
 
- /* Coefficients for centre frequency 66Hz */
-#define HPF_Fs8000_Fc66_A0                        0.953016f
-#define HPF_Fs8000_Fc66_A1                        (-1.906032f)
-#define HPF_Fs8000_Fc66_A2                        0.953016f
-#define HPF_Fs8000_Fc66_B1                        (-1.926810f)
-#define HPF_Fs8000_Fc66_B2                        0.929396f
-#define HPF_Fs11025_Fc66_A0                       0.962638f
-#define HPF_Fs11025_Fc66_A1                       (-1.925275f)
-#define HPF_Fs11025_Fc66_A2                       0.962638f
-#define HPF_Fs11025_Fc66_B1                       (-1.946881f)
-#define HPF_Fs11025_Fc66_B2                       0.948256f
-#define HPF_Fs12000_Fc66_A0                       0.964718f
-#define HPF_Fs12000_Fc66_A1                       (-1.929435f)
-#define HPF_Fs12000_Fc66_A2                       0.964718f
-#define HPF_Fs12000_Fc66_B1                       (-1.951196f)
-#define HPF_Fs12000_Fc66_B2                       0.952359f
-#define HPF_Fs16000_Fc66_A0                       0.970622f
-#define HPF_Fs16000_Fc66_A1                       (-1.941244f)
-#define HPF_Fs16000_Fc66_A2                       0.970622f
-#define HPF_Fs16000_Fc66_B1                       (-1.963394f)
-#define HPF_Fs16000_Fc66_B2                       0.964052f
-#define HPF_Fs22050_Fc66_A0                       0.975509f
-#define HPF_Fs22050_Fc66_A1                       (-1.951019f)
-#define HPF_Fs22050_Fc66_A2                       0.975509f
-#define HPF_Fs22050_Fc66_B1                       (-1.973436f)
-#define HPF_Fs22050_Fc66_B2                       0.973784f
-#define HPF_Fs24000_Fc66_A0                       0.976563f
-#define HPF_Fs24000_Fc66_A1                       (-1.953125f)
-#define HPF_Fs24000_Fc66_A2                       0.976563f
-#define HPF_Fs24000_Fc66_B1                       (-1.975594f)
-#define HPF_Fs24000_Fc66_B2                       0.975889f
-#define HPF_Fs32000_Fc66_A0                       0.979547f
-#define HPF_Fs32000_Fc66_A1                       (-1.959093f)
-#define HPF_Fs32000_Fc66_A2                       0.979547f
-#define HPF_Fs32000_Fc66_B1                       (-1.981695f)
-#define HPF_Fs32000_Fc66_B2                       0.981861f
-#define HPF_Fs44100_Fc66_A0                       0.982010f
-#define HPF_Fs44100_Fc66_A1                       (-1.964019f)
-#define HPF_Fs44100_Fc66_A2                       0.982010f
-#define HPF_Fs44100_Fc66_B1                       (-1.986718f)
-#define HPF_Fs44100_Fc66_B2                       0.986805f
-#define HPF_Fs48000_Fc66_A0                       0.982540f
-#define HPF_Fs48000_Fc66_A1                       (-1.965079f)
-#define HPF_Fs48000_Fc66_A2                       0.982540f
-#define HPF_Fs48000_Fc66_B1                       (-1.987797f)
-#define HPF_Fs48000_Fc66_B2                       0.987871f
+/* Coefficients for centre frequency 66Hz */
+#define HPF_Fs8000_Fc66_A0 0.953016f
+#define HPF_Fs8000_Fc66_A1 (-1.906032f)
+#define HPF_Fs8000_Fc66_A2 0.953016f
+#define HPF_Fs8000_Fc66_B1 (-1.926810f)
+#define HPF_Fs8000_Fc66_B2 0.929396f
+#define HPF_Fs11025_Fc66_A0 0.962638f
+#define HPF_Fs11025_Fc66_A1 (-1.925275f)
+#define HPF_Fs11025_Fc66_A2 0.962638f
+#define HPF_Fs11025_Fc66_B1 (-1.946881f)
+#define HPF_Fs11025_Fc66_B2 0.948256f
+#define HPF_Fs12000_Fc66_A0 0.964718f
+#define HPF_Fs12000_Fc66_A1 (-1.929435f)
+#define HPF_Fs12000_Fc66_A2 0.964718f
+#define HPF_Fs12000_Fc66_B1 (-1.951196f)
+#define HPF_Fs12000_Fc66_B2 0.952359f
+#define HPF_Fs16000_Fc66_A0 0.970622f
+#define HPF_Fs16000_Fc66_A1 (-1.941244f)
+#define HPF_Fs16000_Fc66_A2 0.970622f
+#define HPF_Fs16000_Fc66_B1 (-1.963394f)
+#define HPF_Fs16000_Fc66_B2 0.964052f
+#define HPF_Fs22050_Fc66_A0 0.975509f
+#define HPF_Fs22050_Fc66_A1 (-1.951019f)
+#define HPF_Fs22050_Fc66_A2 0.975509f
+#define HPF_Fs22050_Fc66_B1 (-1.973436f)
+#define HPF_Fs22050_Fc66_B2 0.973784f
+#define HPF_Fs24000_Fc66_A0 0.976563f
+#define HPF_Fs24000_Fc66_A1 (-1.953125f)
+#define HPF_Fs24000_Fc66_A2 0.976563f
+#define HPF_Fs24000_Fc66_B1 (-1.975594f)
+#define HPF_Fs24000_Fc66_B2 0.975889f
+#define HPF_Fs32000_Fc66_A0 0.979547f
+#define HPF_Fs32000_Fc66_A1 (-1.959093f)
+#define HPF_Fs32000_Fc66_A2 0.979547f
+#define HPF_Fs32000_Fc66_B1 (-1.981695f)
+#define HPF_Fs32000_Fc66_B2 0.981861f
+#define HPF_Fs44100_Fc66_A0 0.982010f
+#define HPF_Fs44100_Fc66_A1 (-1.964019f)
+#define HPF_Fs44100_Fc66_A2 0.982010f
+#define HPF_Fs44100_Fc66_B1 (-1.986718f)
+#define HPF_Fs44100_Fc66_B2 0.986805f
+#define HPF_Fs48000_Fc66_A0 0.982540f
+#define HPF_Fs48000_Fc66_A1 (-1.965079f)
+#define HPF_Fs48000_Fc66_A2 0.982540f
+#define HPF_Fs48000_Fc66_B1 (-1.987797f)
+#define HPF_Fs48000_Fc66_B2 0.987871f
 
-#define HPF_Fs88200_Fc66_A0                       0.985273f
-#define HPF_Fs88200_Fc66_A1                       (-1.970546f)
-#define HPF_Fs88200_Fc66_A2                       0.985273f
-#define HPF_Fs88200_Fc66_B1                       (-1.993359f)
-#define HPF_Fs88200_Fc66_B2                       0.993381f
+#define HPF_Fs88200_Fc66_A0 0.985273f
+#define HPF_Fs88200_Fc66_A1 (-1.970546f)
+#define HPF_Fs88200_Fc66_A2 0.985273f
+#define HPF_Fs88200_Fc66_B1 (-1.993359f)
+#define HPF_Fs88200_Fc66_B2 0.993381f
 
-#define HPF_Fs96000_Fc66_A0                       0.985539f
-#define HPF_Fs96000_Fc66_A1                       (-1.971077f)
-#define HPF_Fs96000_Fc66_A2                       0.985539f
-#define HPF_Fs96000_Fc66_B1                       (-1.993898f)
-#define HPF_Fs96000_Fc66_B2                       0.993917f
+#define HPF_Fs96000_Fc66_A0 0.985539f
+#define HPF_Fs96000_Fc66_A1 (-1.971077f)
+#define HPF_Fs96000_Fc66_A2 0.985539f
+#define HPF_Fs96000_Fc66_B1 (-1.993898f)
+#define HPF_Fs96000_Fc66_B2 0.993917f
 
-#define HPF_Fs176400_Fc66_A0                      0.986910f
-#define HPF_Fs176400_Fc66_A1                      (-1.973820f)
-#define HPF_Fs176400_Fc66_A2                      0.986910f
-#define HPF_Fs176400_Fc66_B1                      (-1.996679f)
-#define HPF_Fs176400_Fc66_B2                      0.996685f
+#define HPF_Fs176400_Fc66_A0 0.986910f
+#define HPF_Fs176400_Fc66_A1 (-1.973820f)
+#define HPF_Fs176400_Fc66_A2 0.986910f
+#define HPF_Fs176400_Fc66_B1 (-1.996679f)
+#define HPF_Fs176400_Fc66_B2 0.996685f
 
-#define HPF_Fs192000_Fc66_A0                      0.987043f
-#define HPF_Fs192000_Fc66_A1                      (-1.974086f)
-#define HPF_Fs192000_Fc66_A2                      0.987043f
-#define HPF_Fs192000_Fc66_B1                      (-1.996949f)
-#define HPF_Fs192000_Fc66_B2                      0.996954f
+#define HPF_Fs192000_Fc66_A0 0.987043f
+#define HPF_Fs192000_Fc66_A1 (-1.974086f)
+#define HPF_Fs192000_Fc66_A2 0.987043f
+#define HPF_Fs192000_Fc66_B1 (-1.996949f)
+#define HPF_Fs192000_Fc66_B2 0.996954f
 
 /* Coefficients for centre frequency 78Hz */
-#define HPF_Fs8000_Fc78_A0                        0.946693f
-#define HPF_Fs8000_Fc78_A1                        (-1.893387f)
-#define HPF_Fs8000_Fc78_A2                        0.946693f
-#define HPF_Fs8000_Fc78_B1                        (-1.913517f)
-#define HPF_Fs8000_Fc78_B2                        0.917105f
-#define HPF_Fs11025_Fc78_A0                       0.957999f
-#define HPF_Fs11025_Fc78_A1                       (-1.915998f)
-#define HPF_Fs11025_Fc78_A2                       0.957999f
-#define HPF_Fs11025_Fc78_B1                       (-1.937229f)
-#define HPF_Fs11025_Fc78_B2                       0.939140f
-#define HPF_Fs12000_Fc78_A0                       0.960446f
-#define HPF_Fs12000_Fc78_A1                       (-1.920892f)
-#define HPF_Fs12000_Fc78_A2                       0.960446f
-#define HPF_Fs12000_Fc78_B1                       (-1.942326f)
-#define HPF_Fs12000_Fc78_B2                       0.943944f
-#define HPF_Fs16000_Fc78_A0                       0.967397f
-#define HPF_Fs16000_Fc78_A1                       (-1.934794f)
-#define HPF_Fs16000_Fc78_A2                       0.967397f
-#define HPF_Fs16000_Fc78_B1                       (-1.956740f)
-#define HPF_Fs16000_Fc78_B2                       0.957656f
-#define HPF_Fs22050_Fc78_A0                       0.973156f
-#define HPF_Fs22050_Fc78_A1                       (-1.946313f)
-#define HPF_Fs22050_Fc78_A2                       0.973156f
-#define HPF_Fs22050_Fc78_B1                       (-1.968607f)
-#define HPF_Fs22050_Fc78_B2                       0.969092f
-#define HPF_Fs24000_Fc78_A0                       0.974398f
-#define HPF_Fs24000_Fc78_A1                       (-1.948797f)
-#define HPF_Fs24000_Fc78_A2                       0.974398f
-#define HPF_Fs24000_Fc78_B1                       (-1.971157f)
-#define HPF_Fs24000_Fc78_B2                       0.971568f
-#define HPF_Fs32000_Fc78_A0                       0.977918f
-#define HPF_Fs32000_Fc78_A1                       (-1.955836f)
-#define HPF_Fs32000_Fc78_A2                       0.977918f
-#define HPF_Fs32000_Fc78_B1                       (-1.978367f)
-#define HPF_Fs32000_Fc78_B2                       0.978599f
-#define HPF_Fs44100_Fc78_A0                       0.980824f
-#define HPF_Fs44100_Fc78_A1                       (-1.961649f)
-#define HPF_Fs44100_Fc78_A2                       0.980824f
-#define HPF_Fs44100_Fc78_B1                       (-1.984303f)
-#define HPF_Fs44100_Fc78_B2                       0.984425f
-#define HPF_Fs48000_Fc78_A0                       0.981450f
-#define HPF_Fs48000_Fc78_A1                       (-1.962900f)
-#define HPF_Fs48000_Fc78_A2                       0.981450f
-#define HPF_Fs48000_Fc78_B1                       (-1.985578f)
-#define HPF_Fs48000_Fc78_B2                       0.985681f
+#define HPF_Fs8000_Fc78_A0 0.946693f
+#define HPF_Fs8000_Fc78_A1 (-1.893387f)
+#define HPF_Fs8000_Fc78_A2 0.946693f
+#define HPF_Fs8000_Fc78_B1 (-1.913517f)
+#define HPF_Fs8000_Fc78_B2 0.917105f
+#define HPF_Fs11025_Fc78_A0 0.957999f
+#define HPF_Fs11025_Fc78_A1 (-1.915998f)
+#define HPF_Fs11025_Fc78_A2 0.957999f
+#define HPF_Fs11025_Fc78_B1 (-1.937229f)
+#define HPF_Fs11025_Fc78_B2 0.939140f
+#define HPF_Fs12000_Fc78_A0 0.960446f
+#define HPF_Fs12000_Fc78_A1 (-1.920892f)
+#define HPF_Fs12000_Fc78_A2 0.960446f
+#define HPF_Fs12000_Fc78_B1 (-1.942326f)
+#define HPF_Fs12000_Fc78_B2 0.943944f
+#define HPF_Fs16000_Fc78_A0 0.967397f
+#define HPF_Fs16000_Fc78_A1 (-1.934794f)
+#define HPF_Fs16000_Fc78_A2 0.967397f
+#define HPF_Fs16000_Fc78_B1 (-1.956740f)
+#define HPF_Fs16000_Fc78_B2 0.957656f
+#define HPF_Fs22050_Fc78_A0 0.973156f
+#define HPF_Fs22050_Fc78_A1 (-1.946313f)
+#define HPF_Fs22050_Fc78_A2 0.973156f
+#define HPF_Fs22050_Fc78_B1 (-1.968607f)
+#define HPF_Fs22050_Fc78_B2 0.969092f
+#define HPF_Fs24000_Fc78_A0 0.974398f
+#define HPF_Fs24000_Fc78_A1 (-1.948797f)
+#define HPF_Fs24000_Fc78_A2 0.974398f
+#define HPF_Fs24000_Fc78_B1 (-1.971157f)
+#define HPF_Fs24000_Fc78_B2 0.971568f
+#define HPF_Fs32000_Fc78_A0 0.977918f
+#define HPF_Fs32000_Fc78_A1 (-1.955836f)
+#define HPF_Fs32000_Fc78_A2 0.977918f
+#define HPF_Fs32000_Fc78_B1 (-1.978367f)
+#define HPF_Fs32000_Fc78_B2 0.978599f
+#define HPF_Fs44100_Fc78_A0 0.980824f
+#define HPF_Fs44100_Fc78_A1 (-1.961649f)
+#define HPF_Fs44100_Fc78_A2 0.980824f
+#define HPF_Fs44100_Fc78_B1 (-1.984303f)
+#define HPF_Fs44100_Fc78_B2 0.984425f
+#define HPF_Fs48000_Fc78_A0 0.981450f
+#define HPF_Fs48000_Fc78_A1 (-1.962900f)
+#define HPF_Fs48000_Fc78_A2 0.981450f
+#define HPF_Fs48000_Fc78_B1 (-1.985578f)
+#define HPF_Fs48000_Fc78_B2 0.985681f
 
-#define HPF_Fs88200_Fc78_A0                       0.984678f
-#define HPF_Fs88200_Fc78_A1                       (-1.969356f)
-#define HPF_Fs88200_Fc78_A2                       0.984678f
-#define HPF_Fs88200_Fc78_B1                       (-1.992151f)
-#define HPF_Fs88200_Fc78_B2                       0.992182f
+#define HPF_Fs88200_Fc78_A0 0.984678f
+#define HPF_Fs88200_Fc78_A1 (-1.969356f)
+#define HPF_Fs88200_Fc78_A2 0.984678f
+#define HPF_Fs88200_Fc78_B1 (-1.992151f)
+#define HPF_Fs88200_Fc78_B2 0.992182f
 
-#define HPF_Fs96000_Fc78_A0                       0.984992f
-#define HPF_Fs96000_Fc78_A1                       (-1.969984f)
-#define HPF_Fs96000_Fc78_A2                       0.984992f
-#define HPF_Fs96000_Fc78_B1                       (-1.992789f)
-#define HPF_Fs96000_Fc78_B2                       0.992815f
+#define HPF_Fs96000_Fc78_A0 0.984992f
+#define HPF_Fs96000_Fc78_A1 (-1.969984f)
+#define HPF_Fs96000_Fc78_A2 0.984992f
+#define HPF_Fs96000_Fc78_B1 (-1.992789f)
+#define HPF_Fs96000_Fc78_B2 0.992815f
 
-#define HPF_Fs176400_Fc78_A0                      0.986612f
-#define HPF_Fs176400_Fc78_A1                      (-1.973224f)
-#define HPF_Fs176400_Fc78_A2                      0.986612f
-#define HPF_Fs176400_Fc78_B1                      (-1.996076f)
-#define HPF_Fs176400_Fc78_B2                      0.996083f
+#define HPF_Fs176400_Fc78_A0 0.986612f
+#define HPF_Fs176400_Fc78_A1 (-1.973224f)
+#define HPF_Fs176400_Fc78_A2 0.986612f
+#define HPF_Fs176400_Fc78_B1 (-1.996076f)
+#define HPF_Fs176400_Fc78_B2 0.996083f
 
-#define HPF_Fs192000_Fc78_A0                      0.986769f
-#define HPF_Fs192000_Fc78_A1                      (-1.973539f)
-#define HPF_Fs192000_Fc78_A2                      0.986769f
-#define HPF_Fs192000_Fc78_B1                      (-1.996394f)
-#define HPF_Fs192000_Fc78_B2                      0.996401f
+#define HPF_Fs192000_Fc78_A0 0.986769f
+#define HPF_Fs192000_Fc78_A1 (-1.973539f)
+#define HPF_Fs192000_Fc78_A2 0.986769f
+#define HPF_Fs192000_Fc78_B1 (-1.996394f)
+#define HPF_Fs192000_Fc78_B2 0.996401f
 
 /* Coefficients for centre frequency 90Hz */
-#define HPF_Fs8000_Fc90_A0                       0.940412f
-#define HPF_Fs8000_Fc90_A1                       (-1.880825f)
-#define HPF_Fs8000_Fc90_A2                       0.940412f
-#define HPF_Fs8000_Fc90_B1                       (-1.900231f)
-#define HPF_Fs8000_Fc90_B2                       0.904977f
-#define HPF_Fs11025_Fc90_A0                      0.953383f
-#define HPF_Fs11025_Fc90_A1                      (-1.906766f)
-#define HPF_Fs11025_Fc90_A2                      0.953383f
-#define HPF_Fs11025_Fc90_B1                      (-1.927579f)
-#define HPF_Fs11025_Fc90_B2                      0.930111f
-#define HPF_Fs12000_Fc90_A0                      0.956193f
-#define HPF_Fs12000_Fc90_A1                      (-1.912387f)
-#define HPF_Fs12000_Fc90_A2                      0.956193f
-#define HPF_Fs12000_Fc90_B1                      (-1.933459f)
-#define HPF_Fs12000_Fc90_B2                      0.935603f
-#define HPF_Fs16000_Fc90_A0                      0.964183f
-#define HPF_Fs16000_Fc90_A1                      (-1.928365f)
-#define HPF_Fs16000_Fc90_A2                      0.964183f
-#define HPF_Fs16000_Fc90_B1                      (-1.950087f)
-#define HPF_Fs16000_Fc90_B2                      0.951303f
-#define HPF_Fs22050_Fc90_A0                      0.970809f
-#define HPF_Fs22050_Fc90_A1                      (-1.941618f)
-#define HPF_Fs22050_Fc90_A2                      0.970809f
-#define HPF_Fs22050_Fc90_B1                      (-1.963778f)
-#define HPF_Fs22050_Fc90_B2                      0.964423f
-#define HPF_Fs24000_Fc90_A0                      0.972239f
-#define HPF_Fs24000_Fc90_A1                      (-1.944477f)
-#define HPF_Fs24000_Fc90_A2                      0.972239f
-#define HPF_Fs24000_Fc90_B1                      (-1.966721f)
-#define HPF_Fs24000_Fc90_B2                      0.967266f
-#define HPF_Fs32000_Fc90_A0                      0.976292f
-#define HPF_Fs32000_Fc90_A1                      (-1.952584f)
-#define HPF_Fs32000_Fc90_A2                      0.976292f
-#define HPF_Fs32000_Fc90_B1                      (-1.975040f)
-#define HPF_Fs32000_Fc90_B2                      0.975347f
-#define HPF_Fs44100_Fc90_A0                      0.979641f
-#define HPF_Fs44100_Fc90_A1                      (-1.959282f)
-#define HPF_Fs44100_Fc90_A2                      0.979641f
-#define HPF_Fs44100_Fc90_B1                      (-1.981888f)
-#define HPF_Fs44100_Fc90_B2                      0.982050f
-#define HPF_Fs48000_Fc90_A0                      0.980362f
-#define HPF_Fs48000_Fc90_A1                      (-1.960724f)
-#define HPF_Fs48000_Fc90_A2                      0.980362f
-#define HPF_Fs48000_Fc90_B1                      (-1.983359f)
-#define HPF_Fs48000_Fc90_B2                      0.983497f
+#define HPF_Fs8000_Fc90_A0 0.940412f
+#define HPF_Fs8000_Fc90_A1 (-1.880825f)
+#define HPF_Fs8000_Fc90_A2 0.940412f
+#define HPF_Fs8000_Fc90_B1 (-1.900231f)
+#define HPF_Fs8000_Fc90_B2 0.904977f
+#define HPF_Fs11025_Fc90_A0 0.953383f
+#define HPF_Fs11025_Fc90_A1 (-1.906766f)
+#define HPF_Fs11025_Fc90_A2 0.953383f
+#define HPF_Fs11025_Fc90_B1 (-1.927579f)
+#define HPF_Fs11025_Fc90_B2 0.930111f
+#define HPF_Fs12000_Fc90_A0 0.956193f
+#define HPF_Fs12000_Fc90_A1 (-1.912387f)
+#define HPF_Fs12000_Fc90_A2 0.956193f
+#define HPF_Fs12000_Fc90_B1 (-1.933459f)
+#define HPF_Fs12000_Fc90_B2 0.935603f
+#define HPF_Fs16000_Fc90_A0 0.964183f
+#define HPF_Fs16000_Fc90_A1 (-1.928365f)
+#define HPF_Fs16000_Fc90_A2 0.964183f
+#define HPF_Fs16000_Fc90_B1 (-1.950087f)
+#define HPF_Fs16000_Fc90_B2 0.951303f
+#define HPF_Fs22050_Fc90_A0 0.970809f
+#define HPF_Fs22050_Fc90_A1 (-1.941618f)
+#define HPF_Fs22050_Fc90_A2 0.970809f
+#define HPF_Fs22050_Fc90_B1 (-1.963778f)
+#define HPF_Fs22050_Fc90_B2 0.964423f
+#define HPF_Fs24000_Fc90_A0 0.972239f
+#define HPF_Fs24000_Fc90_A1 (-1.944477f)
+#define HPF_Fs24000_Fc90_A2 0.972239f
+#define HPF_Fs24000_Fc90_B1 (-1.966721f)
+#define HPF_Fs24000_Fc90_B2 0.967266f
+#define HPF_Fs32000_Fc90_A0 0.976292f
+#define HPF_Fs32000_Fc90_A1 (-1.952584f)
+#define HPF_Fs32000_Fc90_A2 0.976292f
+#define HPF_Fs32000_Fc90_B1 (-1.975040f)
+#define HPF_Fs32000_Fc90_B2 0.975347f
+#define HPF_Fs44100_Fc90_A0 0.979641f
+#define HPF_Fs44100_Fc90_A1 (-1.959282f)
+#define HPF_Fs44100_Fc90_A2 0.979641f
+#define HPF_Fs44100_Fc90_B1 (-1.981888f)
+#define HPF_Fs44100_Fc90_B2 0.982050f
+#define HPF_Fs48000_Fc90_A0 0.980362f
+#define HPF_Fs48000_Fc90_A1 (-1.960724f)
+#define HPF_Fs48000_Fc90_A2 0.980362f
+#define HPF_Fs48000_Fc90_B1 (-1.983359f)
+#define HPF_Fs48000_Fc90_B2 0.983497f
 
-#define HPF_Fs88200_Fc90_A0                       0.984084f
-#define HPF_Fs88200_Fc90_A1                       (-1.968168f)
-#define HPF_Fs88200_Fc90_A2                       0.984084f
-#define HPF_Fs88200_Fc90_B1                       (-1.990944f)
-#define HPF_Fs88200_Fc90_B2                       0.990985f
+#define HPF_Fs88200_Fc90_A0 0.984084f
+#define HPF_Fs88200_Fc90_A1 (-1.968168f)
+#define HPF_Fs88200_Fc90_A2 0.984084f
+#define HPF_Fs88200_Fc90_B1 (-1.990944f)
+#define HPF_Fs88200_Fc90_B2 0.990985f
 
-#define HPF_Fs96000_Fc90_A0                       0.984446f
-#define HPF_Fs96000_Fc90_A1                       (-1.968892f)
-#define HPF_Fs96000_Fc90_A2                       0.984446f
-#define HPF_Fs96000_Fc90_B1                       (-1.991680f)
-#define HPF_Fs96000_Fc90_B2                       0.991714f
+#define HPF_Fs96000_Fc90_A0 0.984446f
+#define HPF_Fs96000_Fc90_A1 (-1.968892f)
+#define HPF_Fs96000_Fc90_A2 0.984446f
+#define HPF_Fs96000_Fc90_B1 (-1.991680f)
+#define HPF_Fs96000_Fc90_B2 0.991714f
 
-#define HPF_Fs176400_Fc90_A0                      0.986314f
-#define HPF_Fs176400_Fc90_A1                      (-1.972629f)
-#define HPF_Fs176400_Fc90_A2                      0.986314f
-#define HPF_Fs176400_Fc90_B1                      (-1.995472f)
-#define HPF_Fs176400_Fc90_B2                      0.995482f
+#define HPF_Fs176400_Fc90_A0 0.986314f
+#define HPF_Fs176400_Fc90_A1 (-1.972629f)
+#define HPF_Fs176400_Fc90_A2 0.986314f
+#define HPF_Fs176400_Fc90_B1 (-1.995472f)
+#define HPF_Fs176400_Fc90_B2 0.995482f
 
-#define HPF_Fs192000_Fc90_A0                      0.986496f
-#define HPF_Fs192000_Fc90_A1                      (-1.972992f)
-#define HPF_Fs192000_Fc90_A2                      0.986496f
-#define HPF_Fs192000_Fc90_B1                      (-1.995840f)
-#define HPF_Fs192000_Fc90_B2                      0.995848f
+#define HPF_Fs192000_Fc90_A0 0.986496f
+#define HPF_Fs192000_Fc90_A1 (-1.972992f)
+#define HPF_Fs192000_Fc90_A2 0.986496f
+#define HPF_Fs192000_Fc90_B1 (-1.995840f)
+#define HPF_Fs192000_Fc90_B2 0.995848f
 
 /************************************************************************************/
 /*                                                                                  */
@@ -323,288 +323,288 @@
 /************************************************************************************/
 
 /* Coefficients for centre frequency 55Hz */
-#define BPF_Fs8000_Fc55_A0                       0.009197f
-#define BPF_Fs8000_Fc55_A1                       0.000000f
-#define BPF_Fs8000_Fc55_A2                       (-0.009197f)
-#define BPF_Fs8000_Fc55_B1                       (-1.979545f)
-#define BPF_Fs8000_Fc55_B2                       0.981393f
-#define BPF_Fs11025_Fc55_A0                      0.006691f
-#define BPF_Fs11025_Fc55_A1                      0.000000f
-#define BPF_Fs11025_Fc55_A2                      (-0.006691f)
-#define BPF_Fs11025_Fc55_B1                      (-1.985488f)
-#define BPF_Fs11025_Fc55_B2                      0.986464f
-#define BPF_Fs12000_Fc55_A0                      0.006150f
-#define BPF_Fs12000_Fc55_A1                      0.000000f
-#define BPF_Fs12000_Fc55_A2                      (-0.006150f)
-#define BPF_Fs12000_Fc55_B1                      (-1.986733f)
-#define BPF_Fs12000_Fc55_B2                      0.987557f
-#define BPF_Fs16000_Fc55_A0                      0.004620f
-#define BPF_Fs16000_Fc55_A1                      0.000000f
-#define BPF_Fs16000_Fc55_A2                      (-0.004620f)
-#define BPF_Fs16000_Fc55_B1                      (-1.990189f)
-#define BPF_Fs16000_Fc55_B2                      0.990653f
-#define BPF_Fs22050_Fc55_A0                      0.003357f
-#define BPF_Fs22050_Fc55_A1                      0.000000f
-#define BPF_Fs22050_Fc55_A2                      (-0.003357f)
-#define BPF_Fs22050_Fc55_B1                      (-1.992964f)
-#define BPF_Fs22050_Fc55_B2                      0.993209f
-#define BPF_Fs24000_Fc55_A0                      0.003085f
-#define BPF_Fs24000_Fc55_A1                      0.000000f
-#define BPF_Fs24000_Fc55_A2                      (-0.003085f)
-#define BPF_Fs24000_Fc55_B1                      (-1.993552f)
-#define BPF_Fs24000_Fc55_B2                      0.993759f
-#define BPF_Fs32000_Fc55_A0                      0.002315f
-#define BPF_Fs32000_Fc55_A1                      0.000000f
-#define BPF_Fs32000_Fc55_A2                      (-0.002315f)
-#define BPF_Fs32000_Fc55_B1                      (-1.995199f)
-#define BPF_Fs32000_Fc55_B2                      0.995316f
-#define BPF_Fs44100_Fc55_A0                      0.001681f
-#define BPF_Fs44100_Fc55_A1                      0.000000f
-#define BPF_Fs44100_Fc55_A2                      (-0.001681f)
-#define BPF_Fs44100_Fc55_B1                      (-1.996537f)
-#define BPF_Fs44100_Fc55_B2                      0.996599f
-#define BPF_Fs48000_Fc55_A0                      0.001545f
-#define BPF_Fs48000_Fc55_A1                      0.000000f
-#define BPF_Fs48000_Fc55_A2                      (-0.001545f)
-#define BPF_Fs48000_Fc55_B1                      (-1.996823f)
-#define BPF_Fs48000_Fc55_B2                      0.996875f
+#define BPF_Fs8000_Fc55_A0 0.009197f
+#define BPF_Fs8000_Fc55_A1 0.000000f
+#define BPF_Fs8000_Fc55_A2 (-0.009197f)
+#define BPF_Fs8000_Fc55_B1 (-1.979545f)
+#define BPF_Fs8000_Fc55_B2 0.981393f
+#define BPF_Fs11025_Fc55_A0 0.006691f
+#define BPF_Fs11025_Fc55_A1 0.000000f
+#define BPF_Fs11025_Fc55_A2 (-0.006691f)
+#define BPF_Fs11025_Fc55_B1 (-1.985488f)
+#define BPF_Fs11025_Fc55_B2 0.986464f
+#define BPF_Fs12000_Fc55_A0 0.006150f
+#define BPF_Fs12000_Fc55_A1 0.000000f
+#define BPF_Fs12000_Fc55_A2 (-0.006150f)
+#define BPF_Fs12000_Fc55_B1 (-1.986733f)
+#define BPF_Fs12000_Fc55_B2 0.987557f
+#define BPF_Fs16000_Fc55_A0 0.004620f
+#define BPF_Fs16000_Fc55_A1 0.000000f
+#define BPF_Fs16000_Fc55_A2 (-0.004620f)
+#define BPF_Fs16000_Fc55_B1 (-1.990189f)
+#define BPF_Fs16000_Fc55_B2 0.990653f
+#define BPF_Fs22050_Fc55_A0 0.003357f
+#define BPF_Fs22050_Fc55_A1 0.000000f
+#define BPF_Fs22050_Fc55_A2 (-0.003357f)
+#define BPF_Fs22050_Fc55_B1 (-1.992964f)
+#define BPF_Fs22050_Fc55_B2 0.993209f
+#define BPF_Fs24000_Fc55_A0 0.003085f
+#define BPF_Fs24000_Fc55_A1 0.000000f
+#define BPF_Fs24000_Fc55_A2 (-0.003085f)
+#define BPF_Fs24000_Fc55_B1 (-1.993552f)
+#define BPF_Fs24000_Fc55_B2 0.993759f
+#define BPF_Fs32000_Fc55_A0 0.002315f
+#define BPF_Fs32000_Fc55_A1 0.000000f
+#define BPF_Fs32000_Fc55_A2 (-0.002315f)
+#define BPF_Fs32000_Fc55_B1 (-1.995199f)
+#define BPF_Fs32000_Fc55_B2 0.995316f
+#define BPF_Fs44100_Fc55_A0 0.001681f
+#define BPF_Fs44100_Fc55_A1 0.000000f
+#define BPF_Fs44100_Fc55_A2 (-0.001681f)
+#define BPF_Fs44100_Fc55_B1 (-1.996537f)
+#define BPF_Fs44100_Fc55_B2 0.996599f
+#define BPF_Fs48000_Fc55_A0 0.001545f
+#define BPF_Fs48000_Fc55_A1 0.000000f
+#define BPF_Fs48000_Fc55_A2 (-0.001545f)
+#define BPF_Fs48000_Fc55_B1 (-1.996823f)
+#define BPF_Fs48000_Fc55_B2 0.996875f
 
-#define BPF_Fs88200_Fc55_A0                      0.000831f
-#define BPF_Fs88200_Fc55_A1                      0.000000f
-#define BPF_Fs88200_Fc55_A2                      (-0.000831f)
-#define BPF_Fs88200_Fc55_B1                      (-1.998321f)
-#define BPF_Fs88200_Fc55_B2                      0.998338f
+#define BPF_Fs88200_Fc55_A0 0.000831f
+#define BPF_Fs88200_Fc55_A1 0.000000f
+#define BPF_Fs88200_Fc55_A2 (-0.000831f)
+#define BPF_Fs88200_Fc55_B1 (-1.998321f)
+#define BPF_Fs88200_Fc55_B2 0.998338f
 
-#define BPF_Fs96000_Fc55_A0                      0.000762f
-#define BPF_Fs96000_Fc55_A1                      0.000000f
-#define BPF_Fs96000_Fc55_A2                      (-0.000762f)
-#define BPF_Fs96000_Fc55_B1                      (-1.998461f)
-#define BPF_Fs96000_Fc55_B2                      0.998477f
+#define BPF_Fs96000_Fc55_A0 0.000762f
+#define BPF_Fs96000_Fc55_A1 0.000000f
+#define BPF_Fs96000_Fc55_A2 (-0.000762f)
+#define BPF_Fs96000_Fc55_B1 (-1.998461f)
+#define BPF_Fs96000_Fc55_B2 0.998477f
 
-#define BPF_Fs176400_Fc55_A0                     0.000416f
-#define BPF_Fs176400_Fc55_A1                     0.000000f
-#define BPF_Fs176400_Fc55_A2                     (-0.000416f)
-#define BPF_Fs176400_Fc55_B1                     (-1.999164f)
-#define BPF_Fs176400_Fc55_B2                     0.999169f
+#define BPF_Fs176400_Fc55_A0 0.000416f
+#define BPF_Fs176400_Fc55_A1 0.000000f
+#define BPF_Fs176400_Fc55_A2 (-0.000416f)
+#define BPF_Fs176400_Fc55_B1 (-1.999164f)
+#define BPF_Fs176400_Fc55_B2 0.999169f
 
-#define BPF_Fs192000_Fc55_A0                     0.000381f
-#define BPF_Fs192000_Fc55_A1                     0.000000f
-#define BPF_Fs192000_Fc55_A2                     (-0.000381f)
-#define BPF_Fs192000_Fc55_B1                     (-1.999234f)
-#define BPF_Fs192000_Fc55_B2                     0.999238f
+#define BPF_Fs192000_Fc55_A0 0.000381f
+#define BPF_Fs192000_Fc55_A1 0.000000f
+#define BPF_Fs192000_Fc55_A2 (-0.000381f)
+#define BPF_Fs192000_Fc55_B1 (-1.999234f)
+#define BPF_Fs192000_Fc55_B2 0.999238f
 
 /* Coefficients for centre frequency 66Hz */
-#define BPF_Fs8000_Fc66_A0                      0.012648f
-#define BPF_Fs8000_Fc66_A1                      0.000000f
-#define BPF_Fs8000_Fc66_A2                      (-0.012648f)
-#define BPF_Fs8000_Fc66_B1                      (-1.971760f)
-#define BPF_Fs8000_Fc66_B2                      0.974412f
-#define BPF_Fs11025_Fc66_A0                     0.009209f
-#define BPF_Fs11025_Fc66_A1                     0.000000f
-#define BPF_Fs11025_Fc66_A2                     (-0.009209f)
-#define BPF_Fs11025_Fc66_B1                     (-1.979966f)
-#define BPF_Fs11025_Fc66_B2                     0.981368f
-#define BPF_Fs12000_Fc66_A0                     0.008468f
-#define BPF_Fs12000_Fc66_A1                     0.000000f
-#define BPF_Fs12000_Fc66_A2                     (-0.008468f)
-#define BPF_Fs12000_Fc66_B1                     (-1.981685f)
-#define BPF_Fs12000_Fc66_B2                     0.982869f
-#define BPF_Fs16000_Fc66_A0                     0.006364f
-#define BPF_Fs16000_Fc66_A1                     0.000000f
-#define BPF_Fs16000_Fc66_A2                     (-0.006364f)
-#define BPF_Fs16000_Fc66_B1                     (-1.986457f)
-#define BPF_Fs16000_Fc66_B2                     0.987124f
-#define BPF_Fs22050_Fc66_A0                     0.004626f
-#define BPF_Fs22050_Fc66_A1                     0.000000f
-#define BPF_Fs22050_Fc66_A2                     (-0.004626f)
-#define BPF_Fs22050_Fc66_B1                     (-1.990288f)
-#define BPF_Fs22050_Fc66_B2                     0.990641f
-#define BPF_Fs24000_Fc66_A0                     0.004252f
-#define BPF_Fs24000_Fc66_A1                     0.000000f
-#define BPF_Fs24000_Fc66_A2                     (-0.004252f)
-#define BPF_Fs24000_Fc66_B1                     (-1.991100f)
-#define BPF_Fs24000_Fc66_B2                     0.991398f
-#define BPF_Fs32000_Fc66_A0                     0.003192f
-#define BPF_Fs32000_Fc66_A1                     0.000000f
-#define BPF_Fs32000_Fc66_A2                     (-0.003192f)
-#define BPF_Fs32000_Fc66_B1                     (-1.993374f)
-#define BPF_Fs32000_Fc66_B2                     0.993541f
-#define BPF_Fs44100_Fc66_A0                     0.002318f
-#define BPF_Fs44100_Fc66_A1                     0.000000f
-#define BPF_Fs44100_Fc66_A2                     (-0.002318f)
-#define BPF_Fs44100_Fc66_B1                     (-1.995221f)
-#define BPF_Fs44100_Fc66_B2                     0.995309f
-#define BPF_Fs48000_Fc66_A0                     0.002131f
-#define BPF_Fs48000_Fc66_A1                     0.000000f
-#define BPF_Fs48000_Fc66_A2                     (-0.002131f)
-#define BPF_Fs48000_Fc66_B1                     (-1.995615f)
-#define BPF_Fs48000_Fc66_B2                     0.995690f
+#define BPF_Fs8000_Fc66_A0 0.012648f
+#define BPF_Fs8000_Fc66_A1 0.000000f
+#define BPF_Fs8000_Fc66_A2 (-0.012648f)
+#define BPF_Fs8000_Fc66_B1 (-1.971760f)
+#define BPF_Fs8000_Fc66_B2 0.974412f
+#define BPF_Fs11025_Fc66_A0 0.009209f
+#define BPF_Fs11025_Fc66_A1 0.000000f
+#define BPF_Fs11025_Fc66_A2 (-0.009209f)
+#define BPF_Fs11025_Fc66_B1 (-1.979966f)
+#define BPF_Fs11025_Fc66_B2 0.981368f
+#define BPF_Fs12000_Fc66_A0 0.008468f
+#define BPF_Fs12000_Fc66_A1 0.000000f
+#define BPF_Fs12000_Fc66_A2 (-0.008468f)
+#define BPF_Fs12000_Fc66_B1 (-1.981685f)
+#define BPF_Fs12000_Fc66_B2 0.982869f
+#define BPF_Fs16000_Fc66_A0 0.006364f
+#define BPF_Fs16000_Fc66_A1 0.000000f
+#define BPF_Fs16000_Fc66_A2 (-0.006364f)
+#define BPF_Fs16000_Fc66_B1 (-1.986457f)
+#define BPF_Fs16000_Fc66_B2 0.987124f
+#define BPF_Fs22050_Fc66_A0 0.004626f
+#define BPF_Fs22050_Fc66_A1 0.000000f
+#define BPF_Fs22050_Fc66_A2 (-0.004626f)
+#define BPF_Fs22050_Fc66_B1 (-1.990288f)
+#define BPF_Fs22050_Fc66_B2 0.990641f
+#define BPF_Fs24000_Fc66_A0 0.004252f
+#define BPF_Fs24000_Fc66_A1 0.000000f
+#define BPF_Fs24000_Fc66_A2 (-0.004252f)
+#define BPF_Fs24000_Fc66_B1 (-1.991100f)
+#define BPF_Fs24000_Fc66_B2 0.991398f
+#define BPF_Fs32000_Fc66_A0 0.003192f
+#define BPF_Fs32000_Fc66_A1 0.000000f
+#define BPF_Fs32000_Fc66_A2 (-0.003192f)
+#define BPF_Fs32000_Fc66_B1 (-1.993374f)
+#define BPF_Fs32000_Fc66_B2 0.993541f
+#define BPF_Fs44100_Fc66_A0 0.002318f
+#define BPF_Fs44100_Fc66_A1 0.000000f
+#define BPF_Fs44100_Fc66_A2 (-0.002318f)
+#define BPF_Fs44100_Fc66_B1 (-1.995221f)
+#define BPF_Fs44100_Fc66_B2 0.995309f
+#define BPF_Fs48000_Fc66_A0 0.002131f
+#define BPF_Fs48000_Fc66_A1 0.000000f
+#define BPF_Fs48000_Fc66_A2 (-0.002131f)
+#define BPF_Fs48000_Fc66_B1 (-1.995615f)
+#define BPF_Fs48000_Fc66_B2 0.995690f
 
-#define BPF_Fs88200_Fc66_A0                     0.001146f
-#define BPF_Fs88200_Fc66_A1                     0.000000f
-#define BPF_Fs88200_Fc66_A2                     (-0.001146f)
-#define BPF_Fs88200_Fc66_B1                     (-1.997684f)
-#define BPF_Fs88200_Fc66_B2                     0.997708f
+#define BPF_Fs88200_Fc66_A0 0.001146f
+#define BPF_Fs88200_Fc66_A1 0.000000f
+#define BPF_Fs88200_Fc66_A2 (-0.001146f)
+#define BPF_Fs88200_Fc66_B1 (-1.997684f)
+#define BPF_Fs88200_Fc66_B2 0.997708f
 
-#define BPF_Fs96000_Fc66_A0                     0.001055f
-#define BPF_Fs96000_Fc66_A1                     0.000000f
-#define BPF_Fs96000_Fc66_A2                     (-0.001055f)
-#define BPF_Fs96000_Fc66_B1                     (-1.997868f)
-#define BPF_Fs96000_Fc66_B2                     0.997891f
+#define BPF_Fs96000_Fc66_A0 0.001055f
+#define BPF_Fs96000_Fc66_A1 0.000000f
+#define BPF_Fs96000_Fc66_A2 (-0.001055f)
+#define BPF_Fs96000_Fc66_B1 (-1.997868f)
+#define BPF_Fs96000_Fc66_B2 0.997891f
 
-#define BPF_Fs176400_Fc66_A0                    0.000573f
-#define BPF_Fs176400_Fc66_A1                    0.000000f
-#define BPF_Fs176400_Fc66_A2                    (-0.000573f)
-#define BPF_Fs176400_Fc66_B1                    (-1.998847f)
-#define BPF_Fs176400_Fc66_B2                    0.998853f
+#define BPF_Fs176400_Fc66_A0 0.000573f
+#define BPF_Fs176400_Fc66_A1 0.000000f
+#define BPF_Fs176400_Fc66_A2 (-0.000573f)
+#define BPF_Fs176400_Fc66_B1 (-1.998847f)
+#define BPF_Fs176400_Fc66_B2 0.998853f
 
-#define BPF_Fs192000_Fc66_A0                    0.000528f
-#define BPF_Fs192000_Fc66_A1                    0.000000f
-#define BPF_Fs192000_Fc66_A2                   (-0.000528f)
-#define BPF_Fs192000_Fc66_B1                   (-1.998939f)
-#define BPF_Fs192000_Fc66_B2                    0.998945f
+#define BPF_Fs192000_Fc66_A0 0.000528f
+#define BPF_Fs192000_Fc66_A1 0.000000f
+#define BPF_Fs192000_Fc66_A2 (-0.000528f)
+#define BPF_Fs192000_Fc66_B1 (-1.998939f)
+#define BPF_Fs192000_Fc66_B2 0.998945f
 
 /* Coefficients for centre frequency 78Hz */
-#define BPF_Fs8000_Fc78_A0                      0.018572f
-#define BPF_Fs8000_Fc78_A1                      0.000000f
-#define BPF_Fs8000_Fc78_A2                      (-0.018572f)
-#define BPF_Fs8000_Fc78_B1                      (-1.958745f)
-#define BPF_Fs8000_Fc78_B2                      0.962427f
-#define BPF_Fs11025_Fc78_A0                     0.013545f
-#define BPF_Fs11025_Fc78_A1                     0.000000f
-#define BPF_Fs11025_Fc78_A2                     (-0.013545f)
-#define BPF_Fs11025_Fc78_B1                     (-1.970647f)
-#define BPF_Fs11025_Fc78_B2                     0.972596f
-#define BPF_Fs12000_Fc78_A0                     0.012458f
-#define BPF_Fs12000_Fc78_A1                     0.000000f
-#define BPF_Fs12000_Fc78_A2                     (-0.012458f)
-#define BPF_Fs12000_Fc78_B1                     (-1.973148f)
-#define BPF_Fs12000_Fc78_B2                     0.974795f
-#define BPF_Fs16000_Fc78_A0                     0.009373f
-#define BPF_Fs16000_Fc78_A1                     0.000000f
-#define BPF_Fs16000_Fc78_A2                     (-0.009373f)
-#define BPF_Fs16000_Fc78_B1                     (-1.980108f)
-#define BPF_Fs16000_Fc78_B2                     0.981037f
-#define BPF_Fs22050_Fc78_A0                     0.006819f
-#define BPF_Fs22050_Fc78_A1                     0.000000f
-#define BPF_Fs22050_Fc78_A2                     (-0.006819f)
-#define BPF_Fs22050_Fc78_B1                     (-1.985714f)
-#define BPF_Fs22050_Fc78_B2                     0.986204f
-#define BPF_Fs24000_Fc78_A0                     0.006268f
-#define BPF_Fs24000_Fc78_A1                     0.000000f
-#define BPF_Fs24000_Fc78_A2                     (-0.006268f)
-#define BPF_Fs24000_Fc78_B1                     (-1.986904f)
-#define BPF_Fs24000_Fc78_B2                     0.987318f
-#define BPF_Fs32000_Fc78_A0                     0.004709f
-#define BPF_Fs32000_Fc78_A1                     0.000000f
-#define BPF_Fs32000_Fc78_A2                     (-0.004709f)
-#define BPF_Fs32000_Fc78_B1                     (-1.990240f)
-#define BPF_Fs32000_Fc78_B2                     0.990473f
-#define BPF_Fs44100_Fc78_A0                     0.003421f
-#define BPF_Fs44100_Fc78_A1                     0.000000f
-#define BPF_Fs44100_Fc78_A2                     (-0.003421f)
-#define BPF_Fs44100_Fc78_B1                     (-1.992955f)
-#define BPF_Fs44100_Fc78_B2                     0.993078f
-#define BPF_Fs48000_Fc78_A0                     0.003144f
-#define BPF_Fs48000_Fc78_A1                     0.000000f
-#define BPF_Fs48000_Fc78_A2                     (-0.003144f)
-#define BPF_Fs48000_Fc78_B1                     (-1.993535f)
-#define BPF_Fs48000_Fc78_B2                     0.993639f
+#define BPF_Fs8000_Fc78_A0 0.018572f
+#define BPF_Fs8000_Fc78_A1 0.000000f
+#define BPF_Fs8000_Fc78_A2 (-0.018572f)
+#define BPF_Fs8000_Fc78_B1 (-1.958745f)
+#define BPF_Fs8000_Fc78_B2 0.962427f
+#define BPF_Fs11025_Fc78_A0 0.013545f
+#define BPF_Fs11025_Fc78_A1 0.000000f
+#define BPF_Fs11025_Fc78_A2 (-0.013545f)
+#define BPF_Fs11025_Fc78_B1 (-1.970647f)
+#define BPF_Fs11025_Fc78_B2 0.972596f
+#define BPF_Fs12000_Fc78_A0 0.012458f
+#define BPF_Fs12000_Fc78_A1 0.000000f
+#define BPF_Fs12000_Fc78_A2 (-0.012458f)
+#define BPF_Fs12000_Fc78_B1 (-1.973148f)
+#define BPF_Fs12000_Fc78_B2 0.974795f
+#define BPF_Fs16000_Fc78_A0 0.009373f
+#define BPF_Fs16000_Fc78_A1 0.000000f
+#define BPF_Fs16000_Fc78_A2 (-0.009373f)
+#define BPF_Fs16000_Fc78_B1 (-1.980108f)
+#define BPF_Fs16000_Fc78_B2 0.981037f
+#define BPF_Fs22050_Fc78_A0 0.006819f
+#define BPF_Fs22050_Fc78_A1 0.000000f
+#define BPF_Fs22050_Fc78_A2 (-0.006819f)
+#define BPF_Fs22050_Fc78_B1 (-1.985714f)
+#define BPF_Fs22050_Fc78_B2 0.986204f
+#define BPF_Fs24000_Fc78_A0 0.006268f
+#define BPF_Fs24000_Fc78_A1 0.000000f
+#define BPF_Fs24000_Fc78_A2 (-0.006268f)
+#define BPF_Fs24000_Fc78_B1 (-1.986904f)
+#define BPF_Fs24000_Fc78_B2 0.987318f
+#define BPF_Fs32000_Fc78_A0 0.004709f
+#define BPF_Fs32000_Fc78_A1 0.000000f
+#define BPF_Fs32000_Fc78_A2 (-0.004709f)
+#define BPF_Fs32000_Fc78_B1 (-1.990240f)
+#define BPF_Fs32000_Fc78_B2 0.990473f
+#define BPF_Fs44100_Fc78_A0 0.003421f
+#define BPF_Fs44100_Fc78_A1 0.000000f
+#define BPF_Fs44100_Fc78_A2 (-0.003421f)
+#define BPF_Fs44100_Fc78_B1 (-1.992955f)
+#define BPF_Fs44100_Fc78_B2 0.993078f
+#define BPF_Fs48000_Fc78_A0 0.003144f
+#define BPF_Fs48000_Fc78_A1 0.000000f
+#define BPF_Fs48000_Fc78_A2 (-0.003144f)
+#define BPF_Fs48000_Fc78_B1 (-1.993535f)
+#define BPF_Fs48000_Fc78_B2 0.993639f
 
-#define BPF_Fs88200_Fc78_A0                    0.001693f
-#define BPF_Fs88200_Fc78_A1                    0.000000f
-#define BPF_Fs88200_Fc78_A2                    (-0.001693f)
-#define BPF_Fs88200_Fc78_B1                    (-1.996582f)
-#define BPF_Fs88200_Fc78_B2                    0.996615f
+#define BPF_Fs88200_Fc78_A0 0.001693f
+#define BPF_Fs88200_Fc78_A1 0.000000f
+#define BPF_Fs88200_Fc78_A2 (-0.001693f)
+#define BPF_Fs88200_Fc78_B1 (-1.996582f)
+#define BPF_Fs88200_Fc78_B2 0.996615f
 
-#define BPF_Fs96000_Fc78_A0                     0.001555f
-#define BPF_Fs96000_Fc78_A1                     0.000000f
-#define BPF_Fs96000_Fc78_A2                    (-0.0015555f)
-#define BPF_Fs96000_Fc78_B1                    (-1.996860f)
-#define BPF_Fs96000_Fc78_B2                     0.996891f
+#define BPF_Fs96000_Fc78_A0 0.001555f
+#define BPF_Fs96000_Fc78_A1 0.000000f
+#define BPF_Fs96000_Fc78_A2 (-0.0015555f)
+#define BPF_Fs96000_Fc78_B1 (-1.996860f)
+#define BPF_Fs96000_Fc78_B2 0.996891f
 
-#define BPF_Fs176400_Fc78_A0                    0.000847f
-#define BPF_Fs176400_Fc78_A1                    0.000000f
-#define BPF_Fs176400_Fc78_A2                    (-0.000847f)
-#define BPF_Fs176400_Fc78_B1                    (-1.998298f)
-#define BPF_Fs176400_Fc78_B2                    0.998306f
+#define BPF_Fs176400_Fc78_A0 0.000847f
+#define BPF_Fs176400_Fc78_A1 0.000000f
+#define BPF_Fs176400_Fc78_A2 (-0.000847f)
+#define BPF_Fs176400_Fc78_B1 (-1.998298f)
+#define BPF_Fs176400_Fc78_B2 0.998306f
 
-#define BPF_Fs192000_Fc78_A0                    0.000778f
-#define BPF_Fs192000_Fc78_A1                    0.000000f
-#define BPF_Fs192000_Fc78_A2                   (-0.000778f)
-#define BPF_Fs192000_Fc78_B1                   (-1.998437f)
-#define BPF_Fs192000_Fc78_B2                    0.998444f
+#define BPF_Fs192000_Fc78_A0 0.000778f
+#define BPF_Fs192000_Fc78_A1 0.000000f
+#define BPF_Fs192000_Fc78_A2 (-0.000778f)
+#define BPF_Fs192000_Fc78_B1 (-1.998437f)
+#define BPF_Fs192000_Fc78_B2 0.998444f
 
 /* Coefficients for centre frequency 90Hz */
-#define BPF_Fs8000_Fc90_A0                       0.022760f
-#define BPF_Fs8000_Fc90_A1                       0.000000f
-#define BPF_Fs8000_Fc90_A2                       (-0.022760f)
-#define BPF_Fs8000_Fc90_B1                       (-1.949073f)
-#define BPF_Fs8000_Fc90_B2                       0.953953f
-#define BPF_Fs11025_Fc90_A0                      0.016619f
-#define BPF_Fs11025_Fc90_A1                      0.000000f
-#define BPF_Fs11025_Fc90_A2                      (-0.016619f)
-#define BPF_Fs11025_Fc90_B1                      (-1.963791f)
-#define BPF_Fs11025_Fc90_B2                      0.966377f
-#define BPF_Fs12000_Fc90_A0                      0.015289f
-#define BPF_Fs12000_Fc90_A1                      0.000000f
-#define BPF_Fs12000_Fc90_A2                      (-0.015289f)
-#define BPF_Fs12000_Fc90_B1                      (-1.966882f)
-#define BPF_Fs12000_Fc90_B2                      0.969067f
-#define BPF_Fs16000_Fc90_A0                      0.011511f
-#define BPF_Fs16000_Fc90_A1                      0.000000f
-#define BPF_Fs16000_Fc90_A2                      (-0.011511f)
-#define BPF_Fs16000_Fc90_B1                      (-1.975477f)
-#define BPF_Fs16000_Fc90_B2                      0.976711f
-#define BPF_Fs22050_Fc90_A0                      0.008379f
-#define BPF_Fs22050_Fc90_A1                      0.000000f
-#define BPF_Fs22050_Fc90_A2                      (-0.008379f)
-#define BPF_Fs22050_Fc90_B1                      (-1.982395f)
-#define BPF_Fs22050_Fc90_B2                      0.983047f
-#define BPF_Fs24000_Fc90_A0                      0.007704f
-#define BPF_Fs24000_Fc90_A1                      0.000000f
-#define BPF_Fs24000_Fc90_A2                      (-0.007704f)
-#define BPF_Fs24000_Fc90_B1                      (-1.983863f)
-#define BPF_Fs24000_Fc90_B2                      0.984414f
-#define BPF_Fs32000_Fc90_A0                      0.005789f
-#define BPF_Fs32000_Fc90_A1                      0.000000f
-#define BPF_Fs32000_Fc90_A2                      (-0.005789f)
-#define BPF_Fs32000_Fc90_B1                      (-1.987977f)
-#define BPF_Fs32000_Fc90_B2                      0.988288f
-#define BPF_Fs44100_Fc90_A0                      0.004207f
-#define BPF_Fs44100_Fc90_A1                      0.000000f
-#define BPF_Fs44100_Fc90_A2                      (-0.004207f)
-#define BPF_Fs44100_Fc90_B1                      (-1.991324f)
-#define BPF_Fs44100_Fc90_B2                      0.991488f
-#define BPF_Fs48000_Fc90_A0                      0.003867f
-#define BPF_Fs48000_Fc90_A1                      0.000000f
-#define BPF_Fs48000_Fc90_A2                      (-0.003867f)
-#define BPF_Fs48000_Fc90_B1                      (-1.992038f)
-#define BPF_Fs48000_Fc90_B2                      0.992177f
+#define BPF_Fs8000_Fc90_A0 0.022760f
+#define BPF_Fs8000_Fc90_A1 0.000000f
+#define BPF_Fs8000_Fc90_A2 (-0.022760f)
+#define BPF_Fs8000_Fc90_B1 (-1.949073f)
+#define BPF_Fs8000_Fc90_B2 0.953953f
+#define BPF_Fs11025_Fc90_A0 0.016619f
+#define BPF_Fs11025_Fc90_A1 0.000000f
+#define BPF_Fs11025_Fc90_A2 (-0.016619f)
+#define BPF_Fs11025_Fc90_B1 (-1.963791f)
+#define BPF_Fs11025_Fc90_B2 0.966377f
+#define BPF_Fs12000_Fc90_A0 0.015289f
+#define BPF_Fs12000_Fc90_A1 0.000000f
+#define BPF_Fs12000_Fc90_A2 (-0.015289f)
+#define BPF_Fs12000_Fc90_B1 (-1.966882f)
+#define BPF_Fs12000_Fc90_B2 0.969067f
+#define BPF_Fs16000_Fc90_A0 0.011511f
+#define BPF_Fs16000_Fc90_A1 0.000000f
+#define BPF_Fs16000_Fc90_A2 (-0.011511f)
+#define BPF_Fs16000_Fc90_B1 (-1.975477f)
+#define BPF_Fs16000_Fc90_B2 0.976711f
+#define BPF_Fs22050_Fc90_A0 0.008379f
+#define BPF_Fs22050_Fc90_A1 0.000000f
+#define BPF_Fs22050_Fc90_A2 (-0.008379f)
+#define BPF_Fs22050_Fc90_B1 (-1.982395f)
+#define BPF_Fs22050_Fc90_B2 0.983047f
+#define BPF_Fs24000_Fc90_A0 0.007704f
+#define BPF_Fs24000_Fc90_A1 0.000000f
+#define BPF_Fs24000_Fc90_A2 (-0.007704f)
+#define BPF_Fs24000_Fc90_B1 (-1.983863f)
+#define BPF_Fs24000_Fc90_B2 0.984414f
+#define BPF_Fs32000_Fc90_A0 0.005789f
+#define BPF_Fs32000_Fc90_A1 0.000000f
+#define BPF_Fs32000_Fc90_A2 (-0.005789f)
+#define BPF_Fs32000_Fc90_B1 (-1.987977f)
+#define BPF_Fs32000_Fc90_B2 0.988288f
+#define BPF_Fs44100_Fc90_A0 0.004207f
+#define BPF_Fs44100_Fc90_A1 0.000000f
+#define BPF_Fs44100_Fc90_A2 (-0.004207f)
+#define BPF_Fs44100_Fc90_B1 (-1.991324f)
+#define BPF_Fs44100_Fc90_B2 0.991488f
+#define BPF_Fs48000_Fc90_A0 0.003867f
+#define BPF_Fs48000_Fc90_A1 0.000000f
+#define BPF_Fs48000_Fc90_A2 (-0.003867f)
+#define BPF_Fs48000_Fc90_B1 (-1.992038f)
+#define BPF_Fs48000_Fc90_B2 0.992177f
 
-#define BPF_Fs88200_Fc90_A0                      0.002083f
-#define BPF_Fs88200_Fc90_A1                      0.000000f
-#define BPF_Fs88200_Fc90_A2                      (-0.002083f)
-#define BPF_Fs88200_Fc90_B1                      (-1.995791f)
-#define BPF_Fs88200_Fc90_B2                      0.995835f
+#define BPF_Fs88200_Fc90_A0 0.002083f
+#define BPF_Fs88200_Fc90_A1 0.000000f
+#define BPF_Fs88200_Fc90_A2 (-0.002083f)
+#define BPF_Fs88200_Fc90_B1 (-1.995791f)
+#define BPF_Fs88200_Fc90_B2 0.995835f
 
-#define BPF_Fs96000_Fc90_A0                      0.001913f
-#define BPF_Fs96000_Fc90_A1                      0.000000f
-#define BPF_Fs96000_Fc90_A2                     (-0.001913f)
-#define BPF_Fs96000_Fc90_B1                     (-1.996134f)
-#define BPF_Fs96000_Fc90_B2                      0.996174f
+#define BPF_Fs96000_Fc90_A0 0.001913f
+#define BPF_Fs96000_Fc90_A1 0.000000f
+#define BPF_Fs96000_Fc90_A2 (-0.001913f)
+#define BPF_Fs96000_Fc90_B1 (-1.996134f)
+#define BPF_Fs96000_Fc90_B2 0.996174f
 
-#define BPF_Fs176400_Fc90_A0                     0.001042f
-#define BPF_Fs176400_Fc90_A1                     0.000000f
-#define BPF_Fs176400_Fc90_A2                     (-0.001042f)
-#define BPF_Fs176400_Fc90_B1                     (-1.997904f)
-#define BPF_Fs176400_Fc90_B2                     0.997915f
+#define BPF_Fs176400_Fc90_A0 0.001042f
+#define BPF_Fs176400_Fc90_A1 0.000000f
+#define BPF_Fs176400_Fc90_A2 (-0.001042f)
+#define BPF_Fs176400_Fc90_B1 (-1.997904f)
+#define BPF_Fs176400_Fc90_B2 0.997915f
 
-#define BPF_Fs192000_Fc90_A0                     0.000958f
-#define BPF_Fs192000_Fc90_A1                     0.000000f
-#define BPF_Fs192000_Fc90_A2                    (-0.000958f)
-#define BPF_Fs192000_Fc90_B1                    (-1.998075f)
-#define BPF_Fs192000_Fc90_B2                     0.998085f
+#define BPF_Fs192000_Fc90_A0 0.000958f
+#define BPF_Fs192000_Fc90_A1 0.000000f
+#define BPF_Fs192000_Fc90_A2 (-0.000958f)
+#define BPF_Fs192000_Fc90_B1 (-1.998075f)
+#define BPF_Fs192000_Fc90_B2 0.998085f
 
 /************************************************************************************/
 /*                                                                                  */
@@ -613,74 +613,74 @@
 /************************************************************************************/
 
 /* AGC Time constants */
-#define AGC_ATTACK_Fs8000                             0.841395f
-#define AGC_ATTACK_Fs11025                            0.882223f
-#define AGC_ATTACK_Fs12000                            0.891251f
-#define AGC_ATTACK_Fs16000                            0.917276f
-#define AGC_ATTACK_Fs22050                            0.939267f
-#define AGC_ATTACK_Fs24000                            0.944061f
-#define AGC_ATTACK_Fs32000                            0.957745f
-#define AGC_ATTACK_Fs44100                            0.969158f
-#define AGC_ATTACK_Fs48000                            0.971628f
+#define AGC_ATTACK_Fs8000 0.841395f
+#define AGC_ATTACK_Fs11025 0.882223f
+#define AGC_ATTACK_Fs12000 0.891251f
+#define AGC_ATTACK_Fs16000 0.917276f
+#define AGC_ATTACK_Fs22050 0.939267f
+#define AGC_ATTACK_Fs24000 0.944061f
+#define AGC_ATTACK_Fs32000 0.957745f
+#define AGC_ATTACK_Fs44100 0.969158f
+#define AGC_ATTACK_Fs48000 0.971628f
 
-#define AGC_ATTACK_Fs88200                             0.984458f
-#define AGC_ATTACK_Fs96000                             0.985712f
-#define AGC_ATTACK_Fs176400                            0.992199f
-#define AGC_ATTACK_Fs192000                            0.992830f
+#define AGC_ATTACK_Fs88200 0.984458f
+#define AGC_ATTACK_Fs96000 0.985712f
+#define AGC_ATTACK_Fs176400 0.992199f
+#define AGC_ATTACK_Fs192000 0.992830f
 
-#define DECAY_SHIFT                                   10
+#define DECAY_SHIFT 10
 
-#define AGC_DECAY_Fs8000                              0.000042f
-#define AGC_DECAY_Fs11025                             0.000030f
-#define AGC_DECAY_Fs12000                             0.000028f
-#define AGC_DECAY_Fs16000                             0.000021f
-#define AGC_DECAY_Fs22050                             0.000015f
-#define AGC_DECAY_Fs24000                             0.000014f
-#define AGC_DECAY_Fs32000                             0.000010f
-#define AGC_DECAY_Fs44100                             0.000008f
-#define AGC_DECAY_Fs48000                             0.000007f
+#define AGC_DECAY_Fs8000 0.000042f
+#define AGC_DECAY_Fs11025 0.000030f
+#define AGC_DECAY_Fs12000 0.000028f
+#define AGC_DECAY_Fs16000 0.000021f
+#define AGC_DECAY_Fs22050 0.000015f
+#define AGC_DECAY_Fs24000 0.000014f
+#define AGC_DECAY_Fs32000 0.000010f
+#define AGC_DECAY_Fs44100 0.000008f
+#define AGC_DECAY_Fs48000 0.000007f
 
-#define AGC_DECAY_Fs88200                            0.0000038f
-#define AGC_DECAY_FS96000                            0.0000035f
-#define AGC_DECAY_Fs176400                          0.00000188f
-#define AGC_DECAY_FS192000                          0.00000175f
+#define AGC_DECAY_Fs88200 0.0000038f
+#define AGC_DECAY_FS96000 0.0000035f
+#define AGC_DECAY_Fs176400 0.00000188f
+#define AGC_DECAY_FS192000 0.00000175f
 
 /* AGC Gain settings */
-#define AGC_GAIN_SCALE                                        31         /* As a power of 2 */
-#define AGC_GAIN_SHIFT                                         4         /* As a power of 2 */
-#define AGC_TARGETLEVEL                            0.988553f
-#define AGC_HPFGAIN_0dB                            0.412538f
-#define AGC_GAIN_0dB                               0.000000f
-#define AGC_HPFGAIN_1dB                            0.584893f
-#define AGC_GAIN_1dB                               0.122018f
-#define AGC_HPFGAIN_2dB                            0.778279f
-#define AGC_GAIN_2dB                               0.258925f
-#define AGC_HPFGAIN_3dB                            0.995262f
-#define AGC_GAIN_3dB                               0.412538f
-#define AGC_HPFGAIN_4dB                            1.238721f
-#define AGC_GAIN_4dB                               0.584893f
-#define AGC_HPFGAIN_5dB                            1.511886f
-#define AGC_GAIN_5dB                               0.778279f
-#define AGC_HPFGAIN_6dB                            1.818383f
-#define AGC_GAIN_6dB                               0.995262f
-#define AGC_HPFGAIN_7dB                            2.162278f
-#define AGC_GAIN_7dB                               1.238721f
-#define AGC_HPFGAIN_8dB                            2.548134f
-#define AGC_GAIN_8dB                               1.511886f
-#define AGC_HPFGAIN_9dB                            2.981072f
-#define AGC_GAIN_9dB                               1.818383f
-#define AGC_HPFGAIN_10dB                           3.466836f
-#define AGC_GAIN_10dB                              2.162278f
-#define AGC_HPFGAIN_11dB                           4.011872f
-#define AGC_GAIN_11dB                              2.548134f
-#define AGC_HPFGAIN_12dB                           4.623413f
-#define AGC_GAIN_12dB                              2.981072f
-#define AGC_HPFGAIN_13dB                           5.309573f
-#define AGC_GAIN_13dB                              3.466836f
-#define AGC_HPFGAIN_14dB                           6.079458f
-#define AGC_GAIN_14dB                              4.011872f
-#define AGC_HPFGAIN_15dB                           6.943282f
-#define AGC_GAIN_15dB                              4.623413f
+#define AGC_GAIN_SCALE 31 /* As a power of 2 */
+#define AGC_GAIN_SHIFT 4  /* As a power of 2 */
+#define AGC_TARGETLEVEL 0.988553f
+#define AGC_HPFGAIN_0dB 0.412538f
+#define AGC_GAIN_0dB 0.000000f
+#define AGC_HPFGAIN_1dB 0.584893f
+#define AGC_GAIN_1dB 0.122018f
+#define AGC_HPFGAIN_2dB 0.778279f
+#define AGC_GAIN_2dB 0.258925f
+#define AGC_HPFGAIN_3dB 0.995262f
+#define AGC_GAIN_3dB 0.412538f
+#define AGC_HPFGAIN_4dB 1.238721f
+#define AGC_GAIN_4dB 0.584893f
+#define AGC_HPFGAIN_5dB 1.511886f
+#define AGC_GAIN_5dB 0.778279f
+#define AGC_HPFGAIN_6dB 1.818383f
+#define AGC_GAIN_6dB 0.995262f
+#define AGC_HPFGAIN_7dB 2.162278f
+#define AGC_GAIN_7dB 1.238721f
+#define AGC_HPFGAIN_8dB 2.548134f
+#define AGC_GAIN_8dB 1.511886f
+#define AGC_HPFGAIN_9dB 2.981072f
+#define AGC_GAIN_9dB 1.818383f
+#define AGC_HPFGAIN_10dB 3.466836f
+#define AGC_GAIN_10dB 2.162278f
+#define AGC_HPFGAIN_11dB 4.011872f
+#define AGC_GAIN_11dB 2.548134f
+#define AGC_HPFGAIN_12dB 4.623413f
+#define AGC_GAIN_12dB 2.981072f
+#define AGC_HPFGAIN_13dB 5.309573f
+#define AGC_GAIN_13dB 3.466836f
+#define AGC_HPFGAIN_14dB 6.079458f
+#define AGC_GAIN_14dB 4.011872f
+#define AGC_HPFGAIN_15dB 6.943282f
+#define AGC_GAIN_15dB 4.623413f
 
 /************************************************************************************/
 /*                                                                                  */
@@ -689,38 +689,38 @@
 /************************************************************************************/
 
 /* Volume control gain */
-#define VOLUME_MAX                                          0         /* In dBs */
-#define VOLUME_SHIFT                                        0         /* In dBs */
+#define VOLUME_MAX 0   /* In dBs */
+#define VOLUME_SHIFT 0 /* In dBs */
 
 /* Volume control time constants */
-#define VOL_TC_SHIFT                                       21         /* As a power of 2 */
-#define VOL_TC_Fs8000                                   0.024690f
-#define VOL_TC_Fs11025                                  0.017977f
-#define VOL_TC_Fs12000                                  0.016529f
-#define VOL_TC_Fs16000                                  0.012422f
-#define VOL_TC_Fs22050                                  0.009029f
-#define VOL_TC_Fs24000                                  0.008299f
-#define VOL_TC_Fs32000                                  0.006231f
-#define VOL_TC_Fs44100                                  0.004525f
-#define VOL_TC_Fs48000                                  0.004158f
-#define VOL_TC_Fs88200                                  0.002263f
-#define VOL_TC_Fs96000                                  0.002079f
-#define VOL_TC_Fs176400                                 0.001131f
-#define VOL_TC_Fs192000                                 0.001039f
-#define MIX_TC_Fs8000                                   29365         /* Floating point value 0.896151 */
-#define MIX_TC_Fs11025                                  30230         /* Floating point value 0.922548 */
-#define MIX_TC_Fs12000                                  30422         /* Floating point value 0.928415 */
-#define MIX_TC_Fs16000                                  30978         /* Floating point value 0.945387 */
-#define MIX_TC_Fs22050                                  31451         /* Floating point value 0.959804 */
-#define MIX_TC_Fs24000                                  31554         /* Floating point value 0.962956 */
-#define MIX_TC_Fs32000                                  31850         /* Floating point value 0.971973 */
-#define MIX_TC_Fs44100                                  32097         /* Floating point value 0.979515 */
-#define MIX_TC_Fs48000                                  32150         /* Floating point value 0.981150 */
+#define VOL_TC_SHIFT 21 /* As a power of 2 */
+#define VOL_TC_Fs8000 0.024690f
+#define VOL_TC_Fs11025 0.017977f
+#define VOL_TC_Fs12000 0.016529f
+#define VOL_TC_Fs16000 0.012422f
+#define VOL_TC_Fs22050 0.009029f
+#define VOL_TC_Fs24000 0.008299f
+#define VOL_TC_Fs32000 0.006231f
+#define VOL_TC_Fs44100 0.004525f
+#define VOL_TC_Fs48000 0.004158f
+#define VOL_TC_Fs88200 0.002263f
+#define VOL_TC_Fs96000 0.002079f
+#define VOL_TC_Fs176400 0.001131f
+#define VOL_TC_Fs192000 0.001039f
+#define MIX_TC_Fs8000 29365  /* Floating point value 0.896151 */
+#define MIX_TC_Fs11025 30230 /* Floating point value 0.922548 */
+#define MIX_TC_Fs12000 30422 /* Floating point value 0.928415 */
+#define MIX_TC_Fs16000 30978 /* Floating point value 0.945387 */
+#define MIX_TC_Fs22050 31451 /* Floating point value 0.959804 */
+#define MIX_TC_Fs24000 31554 /* Floating point value 0.962956 */
+#define MIX_TC_Fs32000 31850 /* Floating point value 0.971973 */
+#define MIX_TC_Fs44100 32097 /* Floating point value 0.979515 */
+#define MIX_TC_Fs48000 32150 /* Floating point value 0.981150 */
 /* Floating point value 0.989704 */
-#define MIX_TC_Fs88200                                  32430
-#define MIX_TC_Fs96000                                  32456         /* Floating point value 0.990530 */
+#define MIX_TC_Fs88200 32430
+#define MIX_TC_Fs96000 32456 /* Floating point value 0.990530 */
 /* Floating point value 0.994838 */
-#define MIX_TC_Fs176400                                 32598
-#define MIX_TC_Fs192000                                 32611         /* Floating point value 0.992524 */
+#define MIX_TC_Fs176400 32598
+#define MIX_TC_Fs192000 32611 /* Floating point value 0.992524 */
 
 #endif
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp
index 53feae8..9fe8116 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Control.cpp
@@ -21,6 +21,8 @@
 /*                                                                                      */
 /****************************************************************************************/
 
+#include <audio_utils/BiquadFilter.h>
+#include <system/audio.h>
 #include "LVDBE.h"
 #include "LVDBE_Private.h"
 #include "VectorArithmetic.h"
@@ -47,15 +49,12 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVDBE_ReturnStatus_en LVDBE_GetParameters(LVDBE_Handle_t        hInstance,
-                                            LVDBE_Params_t        *pParams)
-{
-
-    LVDBE_Instance_t    *pInstance =(LVDBE_Instance_t  *)hInstance;
+LVDBE_ReturnStatus_en LVDBE_GetParameters(LVDBE_Handle_t hInstance, LVDBE_Params_t* pParams) {
+    LVDBE_Instance_t* pInstance = (LVDBE_Instance_t*)hInstance;
 
     *pParams = pInstance->Params;
 
-    return(LVDBE_SUCCESS);
+    return (LVDBE_SUCCESS);
 }
 
 /************************************************************************************/
@@ -77,15 +76,13 @@
 /*                                                                                  */
 /************************************************************************************/
 
-LVDBE_ReturnStatus_en LVDBE_GetCapabilities(LVDBE_Handle_t            hInstance,
-                                              LVDBE_Capabilities_t    *pCapabilities)
-{
-
-    LVDBE_Instance_t    *pInstance =(LVDBE_Instance_t  *)hInstance;
+LVDBE_ReturnStatus_en LVDBE_GetCapabilities(LVDBE_Handle_t hInstance,
+                                            LVDBE_Capabilities_t* pCapabilities) {
+    LVDBE_Instance_t* pInstance = (LVDBE_Instance_t*)hInstance;
 
     *pCapabilities = pInstance->Capabilities;
 
-    return(LVDBE_SUCCESS);
+    return (LVDBE_SUCCESS);
 }
 
 /************************************************************************************/
@@ -101,35 +98,30 @@
 /*                                                                                  */
 /************************************************************************************/
 
-void    LVDBE_SetFilters(LVDBE_Instance_t     *pInstance,
-                         LVDBE_Params_t       *pParams)
-{
-
+void LVDBE_SetFilters(LVDBE_Instance_t* pInstance, LVDBE_Params_t* pParams) {
     /*
      * Calculate the table offsets
      */
-    LVM_UINT16 Offset = (LVM_UINT16)((LVM_UINT16)pParams->SampleRate + \
-                                    (LVM_UINT16)(pParams->CentreFrequency * (1+LVDBE_FS_192000)));
+    LVM_UINT16 Offset =
+            (LVM_UINT16)((LVM_UINT16)pParams->SampleRate +
+                         (LVM_UINT16)(pParams->CentreFrequency * (1 + LVDBE_FS_192000)));
 
     /*
      * Setup the high pass filter
      */
-    LoadConst_Float(0,                                          /* Clear the history, value 0 */
-                   (LVM_FLOAT *)&pInstance->pData->HPFTaps,     /* Destination */
-                    sizeof(pInstance->pData->HPFTaps) / sizeof(LVM_FLOAT)); /* Number of words */
-    BQ_2I_D32F32Cll_TRC_WRA_01_Init(&pInstance->pCoef->HPFInstance,    /* Initialise the filter */
-                                    &pInstance->pData->HPFTaps,
-                                    (BQ_FLOAT_Coefs_t *)&LVDBE_HPF_Table[Offset]);
+    std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs> coefs = {
+            LVDBE_HPF_Table[Offset].A0, LVDBE_HPF_Table[Offset].A1, LVDBE_HPF_Table[Offset].A2,
+            -(LVDBE_HPF_Table[Offset].B1), -(LVDBE_HPF_Table[Offset].B2)};
+    pInstance->pHPFBiquad
+            ->setCoefficients<std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs>>(coefs);
 
     /*
      * Setup the band pass filter
      */
-    LoadConst_Float(0,                                           /* Clear the history, value 0 */
-                 (LVM_FLOAT *)&pInstance->pData->BPFTaps,        /* Destination */
-                 sizeof(pInstance->pData->BPFTaps) / sizeof(LVM_FLOAT));   /* Number of words */
-    BP_1I_D32F32Cll_TRC_WRA_02_Init(&pInstance->pCoef->BPFInstance,    /* Initialise the filter */
-                                    &pInstance->pData->BPFTaps,
-                                    (BP_FLOAT_Coefs_t *)&LVDBE_BPF_Table[Offset]);
+    coefs = {LVDBE_BPF_Table[Offset].A0, 0.0, -(LVDBE_BPF_Table[Offset].A0),
+             -(LVDBE_BPF_Table[Offset].B1), -(LVDBE_BPF_Table[Offset].B2)};
+    pInstance->pBPFBiquad
+            ->setCoefficients<std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs>>(coefs);
 }
 
 /************************************************************************************/
@@ -145,29 +137,26 @@
 /*                                                                                  */
 /************************************************************************************/
 
-void    LVDBE_SetAGC(LVDBE_Instance_t     *pInstance,
-                     LVDBE_Params_t       *pParams)
-{
-
+void LVDBE_SetAGC(LVDBE_Instance_t* pInstance, LVDBE_Params_t* pParams) {
     /*
      * Get the attack and decay time constants
      */
-    pInstance->pData->AGCInstance.AGC_Attack = LVDBE_AGC_ATTACK_Table[(LVM_UINT16)pParams->SampleRate];  /* Attack multiplier */
-    pInstance->pData->AGCInstance.AGC_Decay  = LVDBE_AGC_DECAY_Table[(LVM_UINT16)pParams->SampleRate];   /* Decay multipler */
+    pInstance->pData->AGCInstance.AGC_Attack =
+            LVDBE_AGC_ATTACK_Table[(LVM_UINT16)pParams->SampleRate]; /* Attack multiplier */
+    pInstance->pData->AGCInstance.AGC_Decay =
+            LVDBE_AGC_DECAY_Table[(LVM_UINT16)pParams->SampleRate]; /* Decay multipler */
 
     /*
      * Get the boost gain
      */
-    if (pParams->HPFSelect == LVDBE_HPF_ON)
-    {
-        pInstance->pData->AGCInstance.AGC_MaxGain   = LVDBE_AGC_HPFGAIN_Table[(LVM_UINT16)pParams->EffectLevel];  /* High pass filter on */
-    }
-    else
-    {
-        pInstance->pData->AGCInstance.AGC_MaxGain   = LVDBE_AGC_GAIN_Table[(LVM_UINT16)pParams->EffectLevel];     /* High pass filter off */
+    if (pParams->HPFSelect == LVDBE_HPF_ON) {
+        pInstance->pData->AGCInstance.AGC_MaxGain =
+                LVDBE_AGC_HPFGAIN_Table[(LVM_UINT16)pParams->EffectLevel]; /* High pass filter on */
+    } else {
+        pInstance->pData->AGCInstance.AGC_MaxGain =
+                LVDBE_AGC_GAIN_Table[(LVM_UINT16)pParams->EffectLevel]; /* High pass filter off */
     }
     pInstance->pData->AGCInstance.AGC_Target = AGC_TARGETLEVEL;
-
 }
 
 /************************************************************************************/
@@ -193,29 +182,22 @@
 /*                                                                                  */
 /************************************************************************************/
 
-void    LVDBE_SetVolume(LVDBE_Instance_t     *pInstance,
-                        LVDBE_Params_t       *pParams)
-{
+void LVDBE_SetVolume(LVDBE_Instance_t* pInstance, LVDBE_Params_t* pParams) {
+    LVM_UINT16 dBShifts;  /* 6dB shifts */
+    LVM_UINT16 dBOffset;  /* Table offset */
+    LVM_INT16 Volume = 0; /* Required volume in dBs */
 
-    LVM_UINT16      dBShifts;                                   /* 6dB shifts */
-    LVM_UINT16      dBOffset;                                   /* Table offset */
-    LVM_INT16       Volume = 0;                                 /* Required volume in dBs */
-
-    LVM_FLOAT        dBShifts_fac;
+    LVM_FLOAT dBShifts_fac;
     /*
      * Apply the volume if enabled
      */
-    if (pParams->VolumeControl == LVDBE_VOLUME_ON)
-    {
+    if (pParams->VolumeControl == LVDBE_VOLUME_ON) {
         /*
          * Limit the gain to the maximum allowed
          */
-        if  (pParams->VolumedB > VOLUME_MAX)
-        {
+        if (pParams->VolumedB > VOLUME_MAX) {
             Volume = VOLUME_MAX;
-        }
-        else
-        {
+        } else {
             Volume = pParams->VolumedB;
         }
     }
@@ -223,8 +205,8 @@
     /*
      * Calculate the required gain and shifts
      */
-    dBOffset = (LVM_UINT16)(6 + Volume % 6);                    /* Get the dBs 0-5 */
-    dBShifts = (LVM_UINT16)(Volume / -6);                       /* Get the 6dB shifts */
+    dBOffset = (LVM_UINT16)(6 + Volume % 6); /* Get the dBs 0-5 */
+    dBShifts = (LVM_UINT16)(Volume / -6);    /* Get the 6dB shifts */
 
     dBShifts_fac = (LVM_FLOAT)(1 << dBShifts);
     /*
@@ -232,27 +214,23 @@
      */
     pInstance->pData->AGCInstance.Target = (LVDBE_VolumeTable[dBOffset]);
     pInstance->pData->AGCInstance.Target = pInstance->pData->AGCInstance.Target / dBShifts_fac;
-    pInstance->pData->AGCInstance.VolumeTC    = LVDBE_VolumeTCTable[(LVM_UINT16)pParams->SampleRate];   /* Volume update time constant */
+    pInstance->pData->AGCInstance.VolumeTC =
+            LVDBE_VolumeTCTable[(LVM_UINT16)pParams->SampleRate]; /* Volume update time constant */
 
     /*
      * When DBE is disabled use the bypass volume control
      */
-    if(dBShifts > 0)
-    {
+    if (dBShifts > 0) {
         LVC_Mixer_SetTarget(&pInstance->pData->BypassVolume.MixerStream[0],
                             LVDBE_VolumeTable[dBOffset] / dBShifts_fac);
-    }
-    else
-    {
+    } else {
         LVC_Mixer_SetTarget(&pInstance->pData->BypassVolume.MixerStream[0],
                             LVDBE_VolumeTable[dBOffset]);
     }
 
     pInstance->pData->BypassVolume.MixerStream[0].CallbackSet = 1;
     LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->pData->BypassVolume.MixerStream[0],
-                                LVDBE_MIXER_TC,
-                                (LVM_Fs_en)pInstance->Params.SampleRate,
-                                2);
+                                       LVDBE_MIXER_TC, (LVM_Fs_en)pInstance->Params.SampleRate, 2);
 }
 
 /****************************************************************************************/
@@ -292,21 +270,25 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVDBE_ReturnStatus_en LVDBE_Control(LVDBE_Handle_t         hInstance,
-                                      LVDBE_Params_t         *pParams)
-{
+LVDBE_ReturnStatus_en LVDBE_Control(LVDBE_Handle_t hInstance, LVDBE_Params_t* pParams) {
+    LVDBE_Instance_t* pInstance = (LVDBE_Instance_t*)hInstance;
+    LVMixer3_2St_FLOAT_st* pBypassMixer_Instance = &pInstance->pData->BypassMixer;
 
-    LVDBE_Instance_t    *pInstance =(LVDBE_Instance_t  *)hInstance;
-    LVMixer3_2St_FLOAT_st     *pBypassMixer_Instance = &pInstance->pData->BypassMixer;
-
+    /*
+     * Create biquad instance
+     */
+    if (pInstance->Params.NrChannels != pParams->NrChannels) {
+        pInstance->pHPFBiquad.reset(
+                new android::audio_utils::BiquadFilter<LVM_FLOAT>(pParams->NrChannels));
+    }
     /*
      * Update the filters
      */
     if ((pInstance->Params.SampleRate != pParams->SampleRate) ||
-        (pInstance->Params.CentreFrequency != pParams->CentreFrequency))
-    {
-        LVDBE_SetFilters(pInstance,                     /* Instance pointer */
-                         pParams);                      /* New parameters */
+        (pInstance->Params.NrChannels != pParams->NrChannels) ||
+        (pInstance->Params.CentreFrequency != pParams->CentreFrequency)) {
+        LVDBE_SetFilters(pInstance, /* Instance pointer */
+                         pParams);  /* New parameters */
     }
 
     /*
@@ -314,16 +296,14 @@
      */
     if ((pInstance->Params.SampleRate != pParams->SampleRate) ||
         (pInstance->Params.EffectLevel != pParams->EffectLevel) ||
-        (pInstance->Params.HPFSelect != pParams->HPFSelect))
-    {
-        LVDBE_SetAGC(pInstance,                         /* Instance pointer */
-                     pParams);                          /* New parameters */
-        LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[0],
-            LVDBE_BYPASS_MIXER_TC,(LVM_Fs_en)pParams->SampleRate, 2);
+        (pInstance->Params.HPFSelect != pParams->HPFSelect)) {
+        LVDBE_SetAGC(pInstance, /* Instance pointer */
+                     pParams);  /* New parameters */
+        LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[0], LVDBE_BYPASS_MIXER_TC,
+                                  (LVM_Fs_en)pParams->SampleRate, 2);
 
-        LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[1],
-            LVDBE_BYPASS_MIXER_TC,(LVM_Fs_en)pParams->SampleRate, 2);
-
+        LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[1], LVDBE_BYPASS_MIXER_TC,
+                                  (LVM_Fs_en)pParams->SampleRate, 2);
     }
 
     /*
@@ -332,19 +312,16 @@
     if ((pInstance->Params.VolumedB != pParams->VolumedB) ||
         (pInstance->Params.SampleRate != pParams->SampleRate) ||
         (pInstance->Params.HeadroomdB != pParams->HeadroomdB) ||
-        (pInstance->Params.VolumeControl != pParams->VolumeControl))
-    {
-        LVDBE_SetVolume(pInstance,                      /* Instance pointer */
-                       pParams);                        /* New parameters */
+        (pInstance->Params.VolumeControl != pParams->VolumeControl)) {
+        LVDBE_SetVolume(pInstance, /* Instance pointer */
+                        pParams);  /* New parameters */
     }
 
-    if (pInstance->Params.OperatingMode==LVDBE_ON && pParams->OperatingMode==LVDBE_OFF)
-    {
+    if (pInstance->Params.OperatingMode == LVDBE_ON && pParams->OperatingMode == LVDBE_OFF) {
         LVC_Mixer_SetTarget(&pInstance->pData->BypassMixer.MixerStream[0], 0);
         LVC_Mixer_SetTarget(&pInstance->pData->BypassMixer.MixerStream[1], 1.0f);
     }
-    if (pInstance->Params.OperatingMode==LVDBE_OFF && pParams->OperatingMode==LVDBE_ON)
-    {
+    if (pInstance->Params.OperatingMode == LVDBE_OFF && pParams->OperatingMode == LVDBE_ON) {
         LVC_Mixer_SetTarget(&pInstance->pData->BypassMixer.MixerStream[0], 1.0f);
         LVC_Mixer_SetTarget(&pInstance->pData->BypassMixer.MixerStream[1], 0);
     }
@@ -354,5 +331,5 @@
      */
     pInstance->Params = *pParams;
 
-    return(LVDBE_SUCCESS);
+    return (LVDBE_SUCCESS);
 }
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
index ad77696..b113f48 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
@@ -21,209 +21,93 @@
 /*                                                                                      */
 /****************************************************************************************/
 
+#include <system/audio.h>
+#include <stdlib.h>
 #include "LVDBE.h"
 #include "LVDBE_Private.h"
 
 /****************************************************************************************/
 /*                                                                                      */
-/* FUNCTION:                 LVDBE_Memory                                               */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*    This function is used for memory allocation and free. It can be called in         */
-/*    two ways:                                                                         */
-/*                                                                                      */
-/*        hInstance = NULL                Returns the memory requirements               */
-/*        hInstance = Instance handle        Returns the memory requirements and        */
-/*                                        allocated base addresses for the instance     */
-/*                                                                                      */
-/*    When this function is called for memory allocation (hInstance=NULL) the memory    */
-/*  base address pointers are NULL on return.                                           */
-/*                                                                                      */
-/*    When the function is called for free (hInstance = Instance Handle) the memory     */
-/*  table returns the allocated memory and base addresses used during initialisation.   */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance                Instance Handle                                            */
-/*  pMemoryTable             Pointer to an empty memory definition table                */
-/*    pCapabilities           Pointer to the instance capabilities                      */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVDBE_SUCCESS            Succeeded                                                  */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*    1.    This function may be interrupted by the LVDBE_Process function              */
-/*                                                                                      */
-/****************************************************************************************/
-
-LVDBE_ReturnStatus_en LVDBE_Memory(LVDBE_Handle_t            hInstance,
-                                   LVDBE_MemTab_t            *pMemoryTable,
-                                   LVDBE_Capabilities_t      *pCapabilities)
-{
-
-    LVM_UINT32          ScratchSize;
-    LVDBE_Instance_t    *pInstance = (LVDBE_Instance_t *)hInstance;
-
-    /*
-     * Fill in the memory table
-     */
-    if (hInstance == LVM_NULL)
-    {
-        /*
-         * Instance memory
-         */
-        pMemoryTable->Region[LVDBE_MEMREGION_INSTANCE].Size         = sizeof(LVDBE_Instance_t);
-        pMemoryTable->Region[LVDBE_MEMREGION_INSTANCE].Alignment    = LVDBE_INSTANCE_ALIGN;
-        pMemoryTable->Region[LVDBE_MEMREGION_INSTANCE].Type         = LVDBE_PERSISTENT;
-        pMemoryTable->Region[LVDBE_MEMREGION_INSTANCE].pBaseAddress = LVM_NULL;
-
-        /*
-         * Data memory
-         */
-        pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_DATA].Size   = sizeof(LVDBE_Data_FLOAT_t);
-        pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_DATA].Alignment    = LVDBE_PERSISTENT_DATA_ALIGN;
-        pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_DATA].Type         = LVDBE_PERSISTENT_DATA;
-        pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_DATA].pBaseAddress = LVM_NULL;
-
-        /*
-         * Coef memory
-         */
-        pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_COEF].Size   = sizeof(LVDBE_Coef_FLOAT_t);
-        pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_COEF].Alignment    = LVDBE_PERSISTENT_COEF_ALIGN;
-        pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_COEF].Type         = LVDBE_PERSISTENT_COEF;
-        pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_COEF].pBaseAddress = LVM_NULL;
-
-        /*
-         * Scratch memory
-         */
-        ScratchSize = (LVM_UINT32)(LVDBE_SCRATCHBUFFERS_INPLACE*sizeof(LVM_FLOAT) * \
-                                        pCapabilities->MaxBlockSize);
-        pMemoryTable->Region[LVDBE_MEMREGION_SCRATCH].Size         = ScratchSize;
-        pMemoryTable->Region[LVDBE_MEMREGION_SCRATCH].Alignment    = LVDBE_SCRATCH_ALIGN;
-        pMemoryTable->Region[LVDBE_MEMREGION_SCRATCH].Type         = LVDBE_SCRATCH;
-        pMemoryTable->Region[LVDBE_MEMREGION_SCRATCH].pBaseAddress = LVM_NULL;
-    }
-    else
-    {
-        /* Read back memory allocation table */
-        *pMemoryTable = pInstance->MemoryTable;
-    }
-
-    return(LVDBE_SUCCESS);
-}
-
-/****************************************************************************************/
-/*                                                                                      */
 /* FUNCTION:                 LVDBE_Init                                                 */
 /*                                                                                      */
 /* DESCRIPTION:                                                                         */
-/*    Create and initialisation function for the Dynamic Bass Enhancement module        */
-/*                                                                                      */
-/*    This function can be used to create an algorithm instance by calling with         */
-/*    hInstance set to NULL. In this case the algorithm returns the new instance        */
-/*    handle.                                                                           */
-/*                                                                                      */
-/*    This function can be used to force a full re-initialisation of the algorithm      */
-/*    by calling with hInstance = Instance Handle. In this case the memory table        */
-/*    should be correct for the instance, this can be ensured by calling the function   */
-/*    DBE_Memory before calling this function.                                          */
+/*    Create and initialisation function for the Bass Enhancement module                */
 /*                                                                                      */
 /* PARAMETERS:                                                                          */
-/*  hInstance                  Instance handle                                          */
-/*  pMemoryTable             Pointer to the memory definition table                     */
-/*  pCapabilities              Pointer to the instance capabilities                     */
+/*  phInstance               Pointer to instance handle                                 */
+/*  pCapabilities            Pointer to the initialisation capabilities                 */
+/*  pScratch                 Pointer to the bundle scratch buffer                       */
 /*                                                                                      */
 /* RETURNS:                                                                             */
 /*  LVDBE_SUCCESS            Initialisation succeeded                                   */
-/*  LVDBE_ALIGNMENTERROR    Instance or scratch memory on incorrect alignment           */
-/*    LVDBE_NULLADDRESS        Instance or scratch memory has a NULL pointer            */
+/*  LVDBE_NULLADDRESS        One or more memory has a NULL pointer - malloc failure     */
 /*                                                                                      */
 /* NOTES:                                                                               */
-/*  1.     The instance handle is the pointer to the base address of the first memory   */
-/*        region.                                                                       */
-/*    2.    This function must not be interrupted by the LVDBE_Process function         */
+/*  1.    This function must not be interrupted by the LVDBE_Process function           */
 /*                                                                                      */
 /****************************************************************************************/
-
-LVDBE_ReturnStatus_en LVDBE_Init(LVDBE_Handle_t         *phInstance,
-                                   LVDBE_MemTab_t       *pMemoryTable,
-                                   LVDBE_Capabilities_t *pCapabilities)
-{
-
-    LVDBE_Instance_t      *pInstance;
-    LVMixer3_1St_FLOAT_st       *pMixer_Instance;
-    LVMixer3_2St_FLOAT_st       *pBypassMixer_Instance;
-    LVM_FLOAT             MixGain;
-    LVM_INT16             i;
+LVDBE_ReturnStatus_en LVDBE_Init(LVDBE_Handle_t* phInstance, LVDBE_Capabilities_t* pCapabilities,
+                                 void* pScratch) {
+    LVDBE_Instance_t* pInstance;
+    LVMixer3_1St_FLOAT_st* pMixer_Instance;
+    LVMixer3_2St_FLOAT_st* pBypassMixer_Instance;
+    LVM_FLOAT MixGain;
 
     /*
-     * Set the instance handle if not already initialised
+     * Create the instance handle if not already initialised
      */
-    if (*phInstance == LVM_NULL)
-    {
-        *phInstance = (LVDBE_Handle_t)pMemoryTable->Region[LVDBE_MEMREGION_INSTANCE].pBaseAddress;
+    if (*phInstance == LVM_NULL) {
+        *phInstance = new LVDBE_Instance_t{};
     }
-    pInstance =(LVDBE_Instance_t  *)*phInstance;
-
-    /*
-     * Check the memory table for NULL pointers and incorrectly aligned data
-     */
-    for (i=0; i<LVDBE_NR_MEMORY_REGIONS; i++)
-    {
-        if (pMemoryTable->Region[i].Size!=0)
-        {
-            if (pMemoryTable->Region[i].pBaseAddress==LVM_NULL)
-            {
-                return(LVDBE_NULLADDRESS);
-            }
-            if (((uintptr_t)pMemoryTable->Region[i].pBaseAddress % pMemoryTable->Region[i].Alignment)!=0){
-                return(LVDBE_ALIGNMENTERROR);
-            }
-        }
-    }
+    pInstance = (LVDBE_Instance_t*)*phInstance;
 
     /*
      * Save the memory table in the instance structure
      */
     pInstance->Capabilities = *pCapabilities;
 
-    /*
-     * Save the memory table in the instance structure
-     */
-    pInstance->MemoryTable = *pMemoryTable;
+    pInstance->pScratch = pScratch;
 
     /*
      * Set the default instance parameters
      */
-    pInstance->Params.CentreFrequency   =    LVDBE_CENTRE_55HZ;
-    pInstance->Params.EffectLevel       =    0;
-    pInstance->Params.HeadroomdB        =    0;
-    pInstance->Params.HPFSelect         =    LVDBE_HPF_OFF;
-    pInstance->Params.OperatingMode     =    LVDBE_OFF;
-    pInstance->Params.SampleRate        =    LVDBE_FS_8000;
-    pInstance->Params.VolumeControl     =    LVDBE_VOLUME_OFF;
-    pInstance->Params.VolumedB          =    0;
+    pInstance->Params.CentreFrequency = LVDBE_CENTRE_55HZ;
+    pInstance->Params.EffectLevel = 0;
+    pInstance->Params.HeadroomdB = 0;
+    pInstance->Params.HPFSelect = LVDBE_HPF_OFF;
+    pInstance->Params.OperatingMode = LVDBE_OFF;
+    pInstance->Params.SampleRate = LVDBE_FS_8000;
+    pInstance->Params.VolumeControl = LVDBE_VOLUME_OFF;
+    pInstance->Params.VolumedB = 0;
+    pInstance->Params.NrChannels = FCC_2;
 
     /*
-     * Set pointer to data and coef memory
+     * Create pointer to data and coef memory
      */
-    pInstance->pData =
-         (LVDBE_Data_FLOAT_t *)pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_DATA].pBaseAddress;
-    pInstance->pCoef =
-         (LVDBE_Coef_FLOAT_t *)pMemoryTable->Region[LVDBE_MEMREGION_PERSISTENT_COEF].pBaseAddress;
+    pInstance->pData = (LVDBE_Data_FLOAT_t*)calloc(1, sizeof(*(pInstance->pData)));
+    if (pInstance->pData == NULL) {
+        return LVDBE_NULLADDRESS;
+    }
+    /*
+     * Create biquad instance
+     */
+    pInstance->pHPFBiquad.reset(
+            new android::audio_utils::BiquadFilter<LVM_FLOAT>(pInstance->Params.NrChannels));
+    pInstance->pBPFBiquad.reset(new android::audio_utils::BiquadFilter<LVM_FLOAT>(FCC_1));
 
     /*
      * Initialise the filters
      */
-    LVDBE_SetFilters(pInstance,                 /* Set the filter taps and coefficients */
+    LVDBE_SetFilters(pInstance, /* Set the filter taps and coefficients */
                      &pInstance->Params);
 
     /*
      * Initialise the AGC
      */
-    LVDBE_SetAGC(pInstance,                                     /* Set the AGC gain */
+    LVDBE_SetAGC(pInstance, /* Set the AGC gain */
                  &pInstance->Params);
     pInstance->pData->AGCInstance.AGC_Gain = pInstance->pData->AGCInstance.AGC_MaxGain;
-                                                /* Default to the bass boost setting */
+    /* Default to the bass boost setting */
 
     // initialize the mixer with some fixes values since otherwise LVDBE_SetVolume ends up
     // reading uninitialized data
@@ -233,11 +117,11 @@
     /*
      * Initialise the volume
      */
-    LVDBE_SetVolume(pInstance,                                         /* Set the Volume */
+    LVDBE_SetVolume(pInstance, /* Set the Volume */
                     &pInstance->Params);
 
     pInstance->pData->AGCInstance.Volume = pInstance->pData->AGCInstance.Target;
-                                                /* Initialise as the target */
+    /* Initialise as the target */
     MixGain = LVC_Mixer_GetTarget(&pMixer_Instance->MixerStream[0]);
     LVC_Mixer_Init(&pMixer_Instance->MixerStream[0], MixGain, MixGain);
 
@@ -259,11 +143,11 @@
     pBypassMixer_Instance->MixerStream[0].CallbackParam = 0;
     pBypassMixer_Instance->MixerStream[0].pCallbackHandle = LVM_NULL;
     pBypassMixer_Instance->MixerStream[0].pCallBack = LVM_NULL;
-    pBypassMixer_Instance->MixerStream[0].CallbackSet=0;
+    pBypassMixer_Instance->MixerStream[0].CallbackSet = 0;
 
-    LVC_Mixer_Init(&pBypassMixer_Instance->MixerStream[0],0,0);
-    LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[0],
-        LVDBE_BYPASS_MIXER_TC,(LVM_Fs_en)pInstance->Params.SampleRate,2);
+    LVC_Mixer_Init(&pBypassMixer_Instance->MixerStream[0], 0, 0);
+    LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[0], LVDBE_BYPASS_MIXER_TC,
+                              (LVM_Fs_en)pInstance->Params.SampleRate, 2);
 
     /*
      * Setup the mixer gain for the unprocessed path
@@ -271,10 +155,34 @@
     pBypassMixer_Instance->MixerStream[1].CallbackParam = 0;
     pBypassMixer_Instance->MixerStream[1].pCallbackHandle = LVM_NULL;
     pBypassMixer_Instance->MixerStream[1].pCallBack = LVM_NULL;
-    pBypassMixer_Instance->MixerStream[1].CallbackSet=0;
+    pBypassMixer_Instance->MixerStream[1].CallbackSet = 0;
     LVC_Mixer_Init(&pBypassMixer_Instance->MixerStream[1], 1.0, 1.0);
-    LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[1],
-        LVDBE_BYPASS_MIXER_TC,(LVM_Fs_en)pInstance->Params.SampleRate, 2);
+    LVC_Mixer_SetTimeConstant(&pBypassMixer_Instance->MixerStream[1], LVDBE_BYPASS_MIXER_TC,
+                              (LVM_Fs_en)pInstance->Params.SampleRate, 2);
 
-    return(LVDBE_SUCCESS);
+    return (LVDBE_SUCCESS);
+}
+
+/****************************************************************************************/
+/*                                                                                      */
+/* FUNCTION:                 LVDBE_DeInit                                               */
+/*                                                                                      */
+/* DESCRIPTION:                                                                         */
+/*    Free the memories created during LVDBE_Init including instance handle             */
+/*                                                                                      */
+/* PARAMETERS:                                                                          */
+/*  phInstance               Pointer to instance handle                                 */
+/*                                                                                      */
+/****************************************************************************************/
+void LVDBE_DeInit(LVDBE_Handle_t* phInstance) {
+    LVDBE_Instance_t* pInstance = (LVDBE_Instance_t*)*phInstance;
+    if (pInstance == LVM_NULL) {
+        return;
+    }
+    if (pInstance->pData != LVM_NULL) {
+        free(pInstance->pData);
+        pInstance->pData = LVM_NULL;
+    }
+    delete pInstance;
+    *phInstance = LVM_NULL;
 }
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Private.h b/media/libeffects/lvm/lib/Bass/src/LVDBE_Private.h
index f3faaed..7ac5db3 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Private.h
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Private.h
@@ -33,7 +33,8 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-#include "LVDBE.h"                                /* Calling or Application layer definitions */
+#include <audio_utils/BiquadFilter.h>
+#include "LVDBE.h" /* Calling or Application layer definitions */
 #include "BIQUAD.h"
 #include "LVC_Mixer.h"
 #include "AGC.h"
@@ -45,28 +46,10 @@
 /****************************************************************************************/
 
 /* General */
-#define    LVDBE_INVALID            0xFFFF        /* Invalid init parameter */
+#define LVDBE_INVALID 0xFFFF /* Invalid init parameter */
 
-/* Memory */
-#define LVDBE_MEMREGION_INSTANCE         0       /* Offset to the instance memory region */
-#define LVDBE_MEMREGION_PERSISTENT_DATA  1       /* Offset to persistent data memory region */
-#define LVDBE_MEMREGION_PERSISTENT_COEF  2       /* Offset to persistent coefficient region */
-#define LVDBE_MEMREGION_SCRATCH          3       /* Offset to data scratch memory region */
-
-#define LVDBE_INSTANCE_ALIGN             4       /* 32-bit alignment for structures */
-#define LVDBE_PERSISTENT_DATA_ALIGN      4       /* 32-bit alignment for data */
-#define LVDBE_PERSISTENT_COEF_ALIGN      4       /* 32-bit alignment for coef */
-#define LVDBE_SCRATCH_ALIGN              4       /* 32-bit alignment for long data */
-
-#ifdef SUPPORT_MC
-/* Number of buffers required for inplace processing */
-#define LVDBE_SCRATCHBUFFERS_INPLACE     (LVM_MAX_CHANNELS * 3)
-#else
-#define LVDBE_SCRATCHBUFFERS_INPLACE     6       /* Number of buffers required for inplace processing */
-#endif
-
-#define LVDBE_MIXER_TC                   5       /* Mixer time  */
-#define LVDBE_BYPASS_MIXER_TC            100     /* Bypass mixer time */
+#define LVDBE_MIXER_TC 5          /* Mixer time  */
+#define LVDBE_BYPASS_MIXER_TC 100 /* Bypass mixer time */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -76,37 +59,30 @@
 
 /* Data structure */
 /* Data structure */
-typedef struct
-{
+typedef struct {
     /* AGC parameters */
-    AGC_MIX_VOL_2St1Mon_FLOAT_t   AGCInstance;        /* AGC instance parameters */
+    AGC_MIX_VOL_2St1Mon_FLOAT_t AGCInstance; /* AGC instance parameters */
 
     /* Process variables */
-    Biquad_2I_Order2_FLOAT_Taps_t     HPFTaps;            /* High pass filter taps */
-    Biquad_1I_Order2_FLOAT_Taps_t     BPFTaps;            /* Band pass filter taps */
-    LVMixer3_1St_FLOAT_st             BypassVolume;       /* Bypass volume scaler */
-    LVMixer3_2St_FLOAT_st             BypassMixer;        /* Bypass Mixer for Click Removal */
+    LVMixer3_1St_FLOAT_st BypassVolume;    /* Bypass volume scaler */
+    LVMixer3_2St_FLOAT_st BypassMixer;     /* Bypass Mixer for Click Removal */
 
 } LVDBE_Data_FLOAT_t;
 
-/* Coefs structure */
-typedef struct
-{
-    /* Process variables */
-    Biquad_FLOAT_Instance_t           HPFInstance;        /* High pass filter instance */
-    Biquad_FLOAT_Instance_t           BPFInstance;        /* Band pass filter instance */
-} LVDBE_Coef_FLOAT_t;
+
 /* Instance structure */
-typedef struct
-{
+typedef struct {
     /* Public parameters */
-    LVDBE_MemTab_t                MemoryTable;        /* Instance memory allocation table */
-    LVDBE_Params_t                Params;             /* Instance parameters */
-    LVDBE_Capabilities_t        Capabilities;         /* Instance capabilities */
+    LVDBE_Params_t Params;             /* Instance parameters */
+    LVDBE_Capabilities_t Capabilities; /* Instance capabilities */
 
     /* Data and coefficient pointers */
-    LVDBE_Data_FLOAT_t                *pData;                /* Instance data */
-    LVDBE_Coef_FLOAT_t                *pCoef;                /* Instance coefficients */
+    LVDBE_Data_FLOAT_t* pData; /* Instance data */
+    void* pScratch;            /* scratch pointer */
+    std::unique_ptr<android::audio_utils::BiquadFilter<LVM_FLOAT>>
+            pHPFBiquad; /* Biquad filter instance for HPF */
+    std::unique_ptr<android::audio_utils::BiquadFilter<LVM_FLOAT>>
+            pBPFBiquad; /* Biquad filter instance for BPF */
 } LVDBE_Instance_t;
 
 /****************************************************************************************/
@@ -115,13 +91,10 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-void    LVDBE_SetAGC(LVDBE_Instance_t       *pInstance,
-                     LVDBE_Params_t         *pParams);
+void LVDBE_SetAGC(LVDBE_Instance_t* pInstance, LVDBE_Params_t* pParams);
 
-void    LVDBE_SetVolume(LVDBE_Instance_t    *pInstance,
-                        LVDBE_Params_t      *pParams);
+void LVDBE_SetVolume(LVDBE_Instance_t* pInstance, LVDBE_Params_t* pParams);
 
-void    LVDBE_SetFilters(LVDBE_Instance_t   *pInstance,
-                         LVDBE_Params_t     *pParams);
+void LVDBE_SetFilters(LVDBE_Instance_t* pInstance, LVDBE_Params_t* pParams);
 
-#endif      /* __LVDBE_PRIVATE_H__ */
+#endif /* __LVDBE_PRIVATE_H__ */
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Process.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Process.cpp
index b4a71c7..0969053 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Process.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Process.cpp
@@ -20,13 +20,14 @@
 /*    Includes                                                                          */
 /*                                                                                      */
 /****************************************************************************************/
+#include <audio_utils/BiquadFilter.h>
 
-#include <string.h> // memset
+#include <string.h>  // memset
 #include "LVDBE.h"
 #include "LVDBE_Private.h"
 #include "VectorArithmetic.h"
 #include "AGC.h"
-#include "LVDBE_Coeffs.h"               /* Filter coefficients */
+#include "LVDBE_Coeffs.h" /* Filter coefficients */
 #include <log/log.h>
 
 /********************************************************************************************/
@@ -73,179 +74,103 @@
 /*     overall end to end gain is odB.                                                      */
 /*                                                                                          */
 /********************************************************************************************/
-LVDBE_ReturnStatus_en LVDBE_Process(LVDBE_Handle_t hInstance,
-    const LVM_FLOAT *pInData,
-    LVM_FLOAT *pOutData,
-    const LVM_UINT16 NrFrames) // updated to use samples = frames * channels.
+LVDBE_ReturnStatus_en LVDBE_Process(
+        LVDBE_Handle_t hInstance, const LVM_FLOAT* pInData, LVM_FLOAT* pOutData,
+        const LVM_UINT16 NrFrames)  // updated to use samples = frames * channels.
 {
-  LVDBE_Instance_t *pInstance =(LVDBE_Instance_t *)hInstance;
+    LVDBE_Instance_t* pInstance = (LVDBE_Instance_t*)hInstance;
+    const LVM_INT32 NrChannels = pInstance->Params.NrChannels;
+    const LVM_INT32 NrSamples = NrChannels * NrFrames;
 
-  /*Extract number of Channels info*/
-#ifdef SUPPORT_MC
-  // Mono passed in as stereo
-  const LVM_INT32 NrChannels = pInstance->Params.NrChannels == 1
-      ? 2 : pInstance->Params.NrChannels;
-#else
-  const LVM_INT32 NrChannels = 2; // FCC_2
-#endif
-  const LVM_INT32 NrSamples = NrChannels * NrFrames;
-
-  /* Space to store DBE path computation */
-  LVM_FLOAT * const pScratch =
-          (LVM_FLOAT *)pInstance->MemoryTable.Region[LVDBE_MEMREGION_SCRATCH].pBaseAddress;
-
-  /*
-   * Scratch for Mono path starts at offset of
-   * NrSamples float values from pScratch.
-   */
-  LVM_FLOAT * const pMono = pScratch + NrSamples;
-
-  /*
-   * TRICKY: pMono is used and discarded by the DBE path.
-   *         so it is available for use for the pScratchVol
-   *         path which is computed afterwards.
-   *
-   * Space to store Volume Control path computation.
-   * This is identical to pMono (see TRICKY comment).
-   */
-  LVM_FLOAT * const pScratchVol = pMono;
-
-  /*
-   * Check the number of frames is not too large
-   */
-  if (NrFrames > pInstance->Capabilities.MaxBlockSize)
-  {
-    return LVDBE_TOOMANYSAMPLES;
-  }
-
-  /*
-   * Check if the algorithm is enabled
-   */
-  /* DBE path is processed when DBE is ON or during On/Off transitions */
-  if ((pInstance->Params.OperatingMode == LVDBE_ON)||
-      (LVC_Mixer_GetCurrent(&pInstance->pData->BypassMixer.MixerStream[0])
-          !=LVC_Mixer_GetTarget(&pInstance->pData->BypassMixer.MixerStream[0])))
-  {
-    // make copy of input data
-    Copy_Float(pInData,
-        pScratch,
-        (LVM_INT16)NrSamples);
+    /* Space to store DBE path computation */
+    LVM_FLOAT* const pScratch = (LVM_FLOAT*)pInstance->pScratch;
 
     /*
-     * Apply the high pass filter if selected
+     * Scratch for Mono path starts at offset of
+     * NrSamples float values from pScratch.
      */
-    if (pInstance->Params.HPFSelect == LVDBE_HPF_ON)
-    {
-#ifdef SUPPORT_MC
-      BQ_MC_D32F32C30_TRC_WRA_01(&pInstance->pCoef->HPFInstance, /* Filter instance      */
-          pScratch, /* Source               */
-          pScratch, /* Destination          */
-          (LVM_INT16)NrFrames,
-          (LVM_INT16)NrChannels);
-#else
-      BQ_2I_D32F32C30_TRC_WRA_01(&pInstance->pCoef->HPFInstance,/* Filter instance      */
-          pScratch, /* Source               */
-          pScratch, /* Destination          */
-          (LVM_INT16)NrFrames);
-#endif
+    LVM_FLOAT* const pMono = pScratch + NrSamples;
+
+    /*
+     * TRICKY: pMono is used and discarded by the DBE path.
+     *         so it is available for use for the pScratchVol
+     *         path which is computed afterwards.
+     *
+     * Space to store Volume Control path computation.
+     * This is identical to pMono (see TRICKY comment).
+     */
+    LVM_FLOAT* const pScratchVol = pMono;
+
+    /*
+     * Check the number of frames is not too large
+     */
+    if (NrFrames > pInstance->Capabilities.MaxBlockSize) {
+        return LVDBE_TOOMANYSAMPLES;
     }
 
     /*
-     * Create the mono stream
+     * Check if the algorithm is enabled
      */
-#ifdef SUPPORT_MC
-    FromMcToMono_Float(pScratch, /* Source */
-        pMono, /* Mono destination */
-        (LVM_INT16)NrFrames,  /* Number of frames */
-        (LVM_INT16)NrChannels);
-#else
-    From2iToMono_Float(pScratch, /* Stereo source         */
-        pMono, /* Mono destination      */
-        (LVM_INT16)NrFrames);
-#endif
+    /* DBE path is processed when DBE is ON or during On/Off transitions */
+    if ((pInstance->Params.OperatingMode == LVDBE_ON) ||
+        (LVC_Mixer_GetCurrent(&pInstance->pData->BypassMixer.MixerStream[0]) !=
+         LVC_Mixer_GetTarget(&pInstance->pData->BypassMixer.MixerStream[0]))) {
+        // make copy of input data
+        Copy_Float(pInData, pScratch, (LVM_INT16)NrSamples);
 
-    /*
-     * Apply the band pass filter
-     */
-    BP_1I_D32F32C30_TRC_WRA_02(&pInstance->pCoef->BPFInstance, /* Filter instance       */
-        pMono, /* Source                */
-        pMono, /* Destination           */
-        (LVM_INT16)NrFrames);
+        /*
+         * Apply the high pass filter if selected
+         */
+        if (pInstance->Params.HPFSelect == LVDBE_HPF_ON) {
+            pInstance->pHPFBiquad->process(pScratch, pScratch, NrFrames);
+        }
 
-    /*
-     * Apply the AGC and mix
-     */
-#ifdef SUPPORT_MC
-    AGC_MIX_VOL_Mc1Mon_D32_WRA(&pInstance->pData->AGCInstance, /* Instance pointer      */
-        pScratch, /* Source         */
-        pMono, /* Mono band pass source */
-        pScratch, /* Destination    */
-        NrFrames, /* Number of frames     */
-        NrChannels); /* Number of channels     */
-#else
-    AGC_MIX_VOL_2St1Mon_D32_WRA(&pInstance->pData->AGCInstance, /* Instance pointer      */
-        pScratch, /* Stereo source         */
-        pMono, /* Mono band pass source */
-        pScratch, /* Stereo destination    */
-        NrFrames);
-#endif
+        /*
+         * Create the mono stream
+         */
+        FromMcToMono_Float(pScratch,            /* Source */
+                           pMono,               /* Mono destination */
+                           (LVM_INT16)NrFrames, /* Number of frames */
+                           (LVM_INT16)NrChannels);
 
-    for (LVM_INT32 ii = 0; ii < NrSamples; ++ii) {
-      //TODO: replace with existing clamping function
-      if (pScratch[ii] < -1.0) {
-        pScratch[ii] = -1.0;
-      } else if (pScratch[ii] > 1.0) {
-        pScratch[ii] = 1.0;
-      }
+        /*
+         * Apply the band pass filter
+         */
+        pInstance->pBPFBiquad->process(pMono, pMono, NrFrames);
+
+        /*
+         * Apply the AGC and mix
+         */
+        AGC_MIX_VOL_Mc1Mon_D32_WRA(&pInstance->pData->AGCInstance, /* Instance pointer      */
+                                   pScratch,                       /* Source         */
+                                   pMono,                          /* Mono band pass source */
+                                   pScratch,                       /* Destination    */
+                                   NrFrames,                       /* Number of frames     */
+                                   NrChannels);                    /* Number of channels     */
+    } else {
+        // clear DBE processed path
+        memset(pScratch, 0, sizeof(*pScratch) * NrSamples);
     }
-  } else {
-    // clear DBE processed path
-    memset(pScratch, 0, sizeof(*pScratch) * NrSamples);
-  }
 
-  /* Bypass Volume path is processed when DBE is OFF or during On/Off transitions */
-  if ((pInstance->Params.OperatingMode == LVDBE_OFF)||
-      (LVC_Mixer_GetCurrent(&pInstance->pData->BypassMixer.MixerStream[1])
-          !=LVC_Mixer_GetTarget(&pInstance->pData->BypassMixer.MixerStream[1])))
-  {
+    /* Bypass Volume path is processed when DBE is OFF or during On/Off transitions */
+    if ((pInstance->Params.OperatingMode == LVDBE_OFF) ||
+        (LVC_Mixer_GetCurrent(&pInstance->pData->BypassMixer.MixerStream[1]) !=
+         LVC_Mixer_GetTarget(&pInstance->pData->BypassMixer.MixerStream[1]))) {
+        /*
+         * The algorithm is disabled but volume management is required to compensate for
+         * headroom and volume (if enabled)
+         */
+        LVC_MixSoft_Mc_D16C31_SAT(&pInstance->pData->BypassVolume, pInData, pScratchVol,
+                                  (LVM_INT16)NrFrames, (LVM_INT16)NrChannels);
+    } else {
+        // clear bypass volume path
+        memset(pScratchVol, 0, sizeof(*pScratchVol) * NrSamples);
+    }
 
     /*
-     * The algorithm is disabled but volume management is required to compensate for
-     * headroom and volume (if enabled)
+     * Mix DBE processed path and bypass volume path
      */
-#ifdef SUPPORT_MC
-    LVC_MixSoft_Mc_D16C31_SAT(&pInstance->pData->BypassVolume,
-        pInData,
-        pScratchVol,
-        (LVM_INT16)NrFrames,
-        (LVM_INT16)NrChannels);
-#else
-    LVC_MixSoft_1St_D16C31_SAT(&pInstance->pData->BypassVolume,
-        pInData,
-        pScratchVol,
-        (LVM_INT16)NrSamples); /* Left and right, really # samples */
-#endif
-  } else {
-    // clear bypass volume path
-    memset(pScratchVol, 0, sizeof(*pScratchVol) * NrSamples);
-  }
-
-  /*
-   * Mix DBE processed path and bypass volume path
-   */
-#ifdef SUPPORT_MC
-  LVC_MixSoft_2Mc_D16C31_SAT(&pInstance->pData->BypassMixer,
-      pScratch,
-      pScratchVol,
-      pOutData,
-      (LVM_INT16)NrFrames,
-      (LVM_INT16)NrChannels);
-#else
-  LVC_MixSoft_2St_D16C31_SAT(&pInstance->pData->BypassMixer,
-      pScratch,
-      pScratchVol,
-      pOutData,
-      (LVM_INT16)NrSamples);
-#endif
-  return LVDBE_SUCCESS;
+    LVC_MixSoft_2Mc_D16C31_SAT(&pInstance->pData->BypassMixer, pScratch, pScratchVol, pOutData,
+                               (LVM_INT16)NrFrames, (LVM_INT16)NrChannels);
+    return LVDBE_SUCCESS;
 }
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Tables.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Tables.cpp
index 728575c..1b95812 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Tables.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Tables.cpp
@@ -22,7 +22,7 @@
 /************************************************************************************/
 
 #include "LVDBE.h"
-#include "LVDBE_Coeffs.h"               /* Filter coefficients */
+#include "LVDBE_Coeffs.h" /* Filter coefficients */
 #include "LVDBE_Tables.h"
 #include "BIQUAD.h"
 
@@ -36,275 +36,119 @@
  * High Pass Filter Coefficient table
  */
 const BQ_FLOAT_Coefs_t LVDBE_HPF_Table[] = {
-    /* Coefficients for 55Hz centre frequency */
-    {HPF_Fs8000_Fc55_A2,                /* 8kS/s coefficients */
-     HPF_Fs8000_Fc55_A1,
-     HPF_Fs8000_Fc55_A0,
-     -HPF_Fs8000_Fc55_B2,
-     -HPF_Fs8000_Fc55_B1},
-    {HPF_Fs11025_Fc55_A2,                /* 11kS/s coefficients */
-     HPF_Fs11025_Fc55_A1,
-     HPF_Fs11025_Fc55_A0,
-     -HPF_Fs11025_Fc55_B2,
-     -HPF_Fs11025_Fc55_B1},
-    {HPF_Fs12000_Fc55_A2,                /* 12kS/s coefficients */
-     HPF_Fs12000_Fc55_A1,
-     HPF_Fs12000_Fc55_A0,
-     -HPF_Fs12000_Fc55_B2,
-     -HPF_Fs12000_Fc55_B1},
-    {HPF_Fs16000_Fc55_A2,                /* 16kS/s coefficients */
-     HPF_Fs16000_Fc55_A1,
-     HPF_Fs16000_Fc55_A0,
-     -HPF_Fs16000_Fc55_B2,
-     -HPF_Fs16000_Fc55_B1},
-    {HPF_Fs22050_Fc55_A2,                /* 22kS/s coefficients */
-     HPF_Fs22050_Fc55_A1,
-     HPF_Fs22050_Fc55_A0,
-     -HPF_Fs22050_Fc55_B2,
-     -HPF_Fs22050_Fc55_B1},
-    {HPF_Fs24000_Fc55_A2,                /* 24kS/s coefficients */
-     HPF_Fs24000_Fc55_A1,
-     HPF_Fs24000_Fc55_A0,
-     -HPF_Fs24000_Fc55_B2,
-     -HPF_Fs24000_Fc55_B1},
-    {HPF_Fs32000_Fc55_A2,                /* 32kS/s coefficients */
-     HPF_Fs32000_Fc55_A1,
-     HPF_Fs32000_Fc55_A0,
-     -HPF_Fs32000_Fc55_B2,
-     -HPF_Fs32000_Fc55_B1},
-    {HPF_Fs44100_Fc55_A2,                /* 44kS/s coefficients */
-     HPF_Fs44100_Fc55_A1,
-     HPF_Fs44100_Fc55_A0,
-     -HPF_Fs44100_Fc55_B2,
-     -HPF_Fs44100_Fc55_B1},
-    {HPF_Fs48000_Fc55_A2,                /* 48kS/s coefficients */
-     HPF_Fs48000_Fc55_A1,
-     HPF_Fs48000_Fc55_A0,
-     -HPF_Fs48000_Fc55_B2,
-     -HPF_Fs48000_Fc55_B1},
-    {HPF_Fs88200_Fc55_A2,                /* 88kS/s coefficients */
-     HPF_Fs88200_Fc55_A1,
-     HPF_Fs88200_Fc55_A0,
-     -HPF_Fs88200_Fc55_B2,
-     -HPF_Fs88200_Fc55_B1},
-    {HPF_Fs96000_Fc55_A2,                /* 96kS/s coefficients */
-     HPF_Fs96000_Fc55_A1,
-     HPF_Fs96000_Fc55_A0,
-     -HPF_Fs96000_Fc55_B2,
-     -HPF_Fs96000_Fc55_B1},
-    {HPF_Fs176400_Fc55_A2,                /* 176kS/s coefficients */
-     HPF_Fs176400_Fc55_A1,
-     HPF_Fs176400_Fc55_A0,
-     -HPF_Fs176400_Fc55_B2,
-     -HPF_Fs176400_Fc55_B1},
-    {HPF_Fs192000_Fc55_A2,                /* 192kS/s coefficients */
-     HPF_Fs192000_Fc55_A1,
-     HPF_Fs192000_Fc55_A0,
-     -HPF_Fs192000_Fc55_B2,
-     -HPF_Fs192000_Fc55_B1},
+        /* Coefficients for 55Hz centre frequency */
+        {HPF_Fs8000_Fc55_A2, /* 8kS/s coefficients */
+         HPF_Fs8000_Fc55_A1, HPF_Fs8000_Fc55_A0, -HPF_Fs8000_Fc55_B2, -HPF_Fs8000_Fc55_B1},
+        {HPF_Fs11025_Fc55_A2, /* 11kS/s coefficients */
+         HPF_Fs11025_Fc55_A1, HPF_Fs11025_Fc55_A0, -HPF_Fs11025_Fc55_B2, -HPF_Fs11025_Fc55_B1},
+        {HPF_Fs12000_Fc55_A2, /* 12kS/s coefficients */
+         HPF_Fs12000_Fc55_A1, HPF_Fs12000_Fc55_A0, -HPF_Fs12000_Fc55_B2, -HPF_Fs12000_Fc55_B1},
+        {HPF_Fs16000_Fc55_A2, /* 16kS/s coefficients */
+         HPF_Fs16000_Fc55_A1, HPF_Fs16000_Fc55_A0, -HPF_Fs16000_Fc55_B2, -HPF_Fs16000_Fc55_B1},
+        {HPF_Fs22050_Fc55_A2, /* 22kS/s coefficients */
+         HPF_Fs22050_Fc55_A1, HPF_Fs22050_Fc55_A0, -HPF_Fs22050_Fc55_B2, -HPF_Fs22050_Fc55_B1},
+        {HPF_Fs24000_Fc55_A2, /* 24kS/s coefficients */
+         HPF_Fs24000_Fc55_A1, HPF_Fs24000_Fc55_A0, -HPF_Fs24000_Fc55_B2, -HPF_Fs24000_Fc55_B1},
+        {HPF_Fs32000_Fc55_A2, /* 32kS/s coefficients */
+         HPF_Fs32000_Fc55_A1, HPF_Fs32000_Fc55_A0, -HPF_Fs32000_Fc55_B2, -HPF_Fs32000_Fc55_B1},
+        {HPF_Fs44100_Fc55_A2, /* 44kS/s coefficients */
+         HPF_Fs44100_Fc55_A1, HPF_Fs44100_Fc55_A0, -HPF_Fs44100_Fc55_B2, -HPF_Fs44100_Fc55_B1},
+        {HPF_Fs48000_Fc55_A2, /* 48kS/s coefficients */
+         HPF_Fs48000_Fc55_A1, HPF_Fs48000_Fc55_A0, -HPF_Fs48000_Fc55_B2, -HPF_Fs48000_Fc55_B1},
+        {HPF_Fs88200_Fc55_A2, /* 88kS/s coefficients */
+         HPF_Fs88200_Fc55_A1, HPF_Fs88200_Fc55_A0, -HPF_Fs88200_Fc55_B2, -HPF_Fs88200_Fc55_B1},
+        {HPF_Fs96000_Fc55_A2, /* 96kS/s coefficients */
+         HPF_Fs96000_Fc55_A1, HPF_Fs96000_Fc55_A0, -HPF_Fs96000_Fc55_B2, -HPF_Fs96000_Fc55_B1},
+        {HPF_Fs176400_Fc55_A2, /* 176kS/s coefficients */
+         HPF_Fs176400_Fc55_A1, HPF_Fs176400_Fc55_A0, -HPF_Fs176400_Fc55_B2, -HPF_Fs176400_Fc55_B1},
+        {HPF_Fs192000_Fc55_A2, /* 192kS/s coefficients */
+         HPF_Fs192000_Fc55_A1, HPF_Fs192000_Fc55_A0, -HPF_Fs192000_Fc55_B2, -HPF_Fs192000_Fc55_B1},
 
-    /* Coefficients for 66Hz centre frequency */
-    {HPF_Fs8000_Fc66_A2,                /* 8kS/s coefficients */
-     HPF_Fs8000_Fc66_A1,
-     HPF_Fs8000_Fc66_A0,
-     -HPF_Fs8000_Fc66_B2,
-     -HPF_Fs8000_Fc66_B1},
-    {HPF_Fs11025_Fc66_A2,                /* 11kS/s coefficients */
-     HPF_Fs11025_Fc66_A1,
-     HPF_Fs11025_Fc66_A0,
-     -HPF_Fs11025_Fc66_B2,
-     -HPF_Fs11025_Fc66_B1},
-    {HPF_Fs12000_Fc66_A2,                /* 12kS/s coefficients */
-     HPF_Fs12000_Fc66_A1,
-     HPF_Fs12000_Fc66_A0,
-     -HPF_Fs12000_Fc66_B2,
-     -HPF_Fs12000_Fc66_B1},
-    {HPF_Fs16000_Fc66_A2,                /* 16kS/s coefficients */
-     HPF_Fs16000_Fc66_A1,
-     HPF_Fs16000_Fc66_A0,
-     -HPF_Fs16000_Fc66_B2,
-     -HPF_Fs16000_Fc66_B1},
-    {HPF_Fs22050_Fc66_A2,                /* 22kS/s coefficients */
-     HPF_Fs22050_Fc66_A1,
-     HPF_Fs22050_Fc66_A0,
-     -HPF_Fs22050_Fc66_B2,
-     -HPF_Fs22050_Fc66_B1},
-    {HPF_Fs24000_Fc66_A2,                /* 24kS/s coefficients */
-     HPF_Fs24000_Fc66_A1,
-     HPF_Fs24000_Fc66_A0,
-     -HPF_Fs24000_Fc66_B2,
-     -HPF_Fs24000_Fc66_B1},
-    {HPF_Fs32000_Fc66_A2,                /* 32kS/s coefficients */
-     HPF_Fs32000_Fc66_A1,
-     HPF_Fs32000_Fc66_A0,
-     -HPF_Fs32000_Fc66_B2,
-     -HPF_Fs32000_Fc66_B1},
-    {HPF_Fs44100_Fc66_A2,                /* 44kS/s coefficients */
-     HPF_Fs44100_Fc66_A1,
-     HPF_Fs44100_Fc66_A0,
-     -HPF_Fs44100_Fc66_B2,
-     -HPF_Fs44100_Fc66_B1},
-    {HPF_Fs48000_Fc66_A2,                /* 48kS/s coefficients */
-     HPF_Fs48000_Fc66_A1,
-     HPF_Fs48000_Fc66_A0,
-     -HPF_Fs48000_Fc66_B2,
-     -HPF_Fs48000_Fc66_B1},
-    {HPF_Fs88200_Fc66_A2,                /* 88kS/s coefficients */
-     HPF_Fs88200_Fc66_A1,
-     HPF_Fs88200_Fc66_A0,
-     -HPF_Fs88200_Fc66_B2,
-     -HPF_Fs88200_Fc66_B1},
-    {HPF_Fs96000_Fc66_A2,                /* 96kS/s coefficients */
-     HPF_Fs96000_Fc66_A1,
-     HPF_Fs96000_Fc66_A0,
-     -HPF_Fs96000_Fc66_B2,
-     -HPF_Fs96000_Fc66_B1},
-    {HPF_Fs176400_Fc66_A2,                /* 176kS/s coefficients */
-     HPF_Fs176400_Fc66_A1,
-     HPF_Fs176400_Fc66_A0,
-     -HPF_Fs176400_Fc66_B2,
-     -HPF_Fs176400_Fc66_B1},
-    {HPF_Fs192000_Fc66_A2,                /* 192kS/s coefficients */
-     HPF_Fs192000_Fc66_A1,
-     HPF_Fs192000_Fc66_A0,
-     -HPF_Fs192000_Fc66_B2,
-     -HPF_Fs192000_Fc66_B1},
+        /* Coefficients for 66Hz centre frequency */
+        {HPF_Fs8000_Fc66_A2, /* 8kS/s coefficients */
+         HPF_Fs8000_Fc66_A1, HPF_Fs8000_Fc66_A0, -HPF_Fs8000_Fc66_B2, -HPF_Fs8000_Fc66_B1},
+        {HPF_Fs11025_Fc66_A2, /* 11kS/s coefficients */
+         HPF_Fs11025_Fc66_A1, HPF_Fs11025_Fc66_A0, -HPF_Fs11025_Fc66_B2, -HPF_Fs11025_Fc66_B1},
+        {HPF_Fs12000_Fc66_A2, /* 12kS/s coefficients */
+         HPF_Fs12000_Fc66_A1, HPF_Fs12000_Fc66_A0, -HPF_Fs12000_Fc66_B2, -HPF_Fs12000_Fc66_B1},
+        {HPF_Fs16000_Fc66_A2, /* 16kS/s coefficients */
+         HPF_Fs16000_Fc66_A1, HPF_Fs16000_Fc66_A0, -HPF_Fs16000_Fc66_B2, -HPF_Fs16000_Fc66_B1},
+        {HPF_Fs22050_Fc66_A2, /* 22kS/s coefficients */
+         HPF_Fs22050_Fc66_A1, HPF_Fs22050_Fc66_A0, -HPF_Fs22050_Fc66_B2, -HPF_Fs22050_Fc66_B1},
+        {HPF_Fs24000_Fc66_A2, /* 24kS/s coefficients */
+         HPF_Fs24000_Fc66_A1, HPF_Fs24000_Fc66_A0, -HPF_Fs24000_Fc66_B2, -HPF_Fs24000_Fc66_B1},
+        {HPF_Fs32000_Fc66_A2, /* 32kS/s coefficients */
+         HPF_Fs32000_Fc66_A1, HPF_Fs32000_Fc66_A0, -HPF_Fs32000_Fc66_B2, -HPF_Fs32000_Fc66_B1},
+        {HPF_Fs44100_Fc66_A2, /* 44kS/s coefficients */
+         HPF_Fs44100_Fc66_A1, HPF_Fs44100_Fc66_A0, -HPF_Fs44100_Fc66_B2, -HPF_Fs44100_Fc66_B1},
+        {HPF_Fs48000_Fc66_A2, /* 48kS/s coefficients */
+         HPF_Fs48000_Fc66_A1, HPF_Fs48000_Fc66_A0, -HPF_Fs48000_Fc66_B2, -HPF_Fs48000_Fc66_B1},
+        {HPF_Fs88200_Fc66_A2, /* 88kS/s coefficients */
+         HPF_Fs88200_Fc66_A1, HPF_Fs88200_Fc66_A0, -HPF_Fs88200_Fc66_B2, -HPF_Fs88200_Fc66_B1},
+        {HPF_Fs96000_Fc66_A2, /* 96kS/s coefficients */
+         HPF_Fs96000_Fc66_A1, HPF_Fs96000_Fc66_A0, -HPF_Fs96000_Fc66_B2, -HPF_Fs96000_Fc66_B1},
+        {HPF_Fs176400_Fc66_A2, /* 176kS/s coefficients */
+         HPF_Fs176400_Fc66_A1, HPF_Fs176400_Fc66_A0, -HPF_Fs176400_Fc66_B2, -HPF_Fs176400_Fc66_B1},
+        {HPF_Fs192000_Fc66_A2, /* 192kS/s coefficients */
+         HPF_Fs192000_Fc66_A1, HPF_Fs192000_Fc66_A0, -HPF_Fs192000_Fc66_B2, -HPF_Fs192000_Fc66_B1},
 
-    /* Coefficients for 78Hz centre frequency */
-    {HPF_Fs8000_Fc78_A2,                /* 8kS/s coefficients */
-     HPF_Fs8000_Fc78_A1,
-     HPF_Fs8000_Fc78_A0,
-     -HPF_Fs8000_Fc78_B2,
-     -HPF_Fs8000_Fc78_B1},
-    {HPF_Fs11025_Fc78_A2,                /* 11kS/s coefficients */
-     HPF_Fs11025_Fc78_A1,
-     HPF_Fs11025_Fc78_A0,
-     -HPF_Fs11025_Fc78_B2,
-     -HPF_Fs11025_Fc78_B1},
-    {HPF_Fs12000_Fc78_A2,                /* 12kS/s coefficients */
-     HPF_Fs12000_Fc78_A1,
-     HPF_Fs12000_Fc78_A0,
-     -HPF_Fs12000_Fc78_B2,
-     -HPF_Fs12000_Fc78_B1},
-    {HPF_Fs16000_Fc78_A2,                /* 16kS/s coefficients */
-     HPF_Fs16000_Fc78_A1,
-     HPF_Fs16000_Fc78_A0,
-     -HPF_Fs16000_Fc78_B2,
-     -HPF_Fs16000_Fc78_B1},
-    {HPF_Fs22050_Fc78_A2,                /* 22kS/s coefficients */
-     HPF_Fs22050_Fc78_A1,
-     HPF_Fs22050_Fc78_A0,
-     -HPF_Fs22050_Fc78_B2,
-     -HPF_Fs22050_Fc78_B1},
-    {HPF_Fs24000_Fc78_A2,                /* 24kS/s coefficients */
-     HPF_Fs24000_Fc78_A1,
-     HPF_Fs24000_Fc78_A0,
-     -HPF_Fs24000_Fc78_B2,
-     -HPF_Fs24000_Fc78_B1},
-    {HPF_Fs32000_Fc78_A2,                /* 32kS/s coefficients */
-     HPF_Fs32000_Fc78_A1,
-     HPF_Fs32000_Fc78_A0,
-     -HPF_Fs32000_Fc78_B2,
-     -HPF_Fs32000_Fc78_B1},
-    {HPF_Fs44100_Fc78_A2,                /* 44kS/s coefficients */
-     HPF_Fs44100_Fc78_A1,
-     HPF_Fs44100_Fc78_A0,
-     -HPF_Fs44100_Fc78_B2,
-     -HPF_Fs44100_Fc78_B1},
-    {HPF_Fs48000_Fc78_A2,                /* 48kS/s coefficients */
-     HPF_Fs48000_Fc78_A1,
-     HPF_Fs48000_Fc78_A0,
-     -HPF_Fs48000_Fc78_B2,
-     -HPF_Fs48000_Fc78_B1},
-    {HPF_Fs88200_Fc78_A2,                /* 88kS/s coefficients */
-     HPF_Fs88200_Fc78_A1,
-     HPF_Fs88200_Fc78_A0,
-     -HPF_Fs88200_Fc78_B2,
-     -HPF_Fs88200_Fc78_B1},
-    {HPF_Fs96000_Fc78_A2,                /* 96kS/s coefficients */
-     HPF_Fs96000_Fc78_A1,
-     HPF_Fs96000_Fc78_A0,
-     -HPF_Fs96000_Fc78_B2,
-     -HPF_Fs96000_Fc78_B1},
-    {HPF_Fs176400_Fc78_A2,                /* 176kS/s coefficients */
-     HPF_Fs176400_Fc78_A1,
-     HPF_Fs176400_Fc78_A0,
-     -HPF_Fs176400_Fc78_B2,
-     -HPF_Fs176400_Fc78_B1},
-    {HPF_Fs192000_Fc78_A2,                /* 192kS/s coefficients */
-     HPF_Fs192000_Fc78_A1,
-     HPF_Fs192000_Fc78_A0,
-     -HPF_Fs192000_Fc78_B2,
-     -HPF_Fs192000_Fc78_B1},
+        /* Coefficients for 78Hz centre frequency */
+        {HPF_Fs8000_Fc78_A2, /* 8kS/s coefficients */
+         HPF_Fs8000_Fc78_A1, HPF_Fs8000_Fc78_A0, -HPF_Fs8000_Fc78_B2, -HPF_Fs8000_Fc78_B1},
+        {HPF_Fs11025_Fc78_A2, /* 11kS/s coefficients */
+         HPF_Fs11025_Fc78_A1, HPF_Fs11025_Fc78_A0, -HPF_Fs11025_Fc78_B2, -HPF_Fs11025_Fc78_B1},
+        {HPF_Fs12000_Fc78_A2, /* 12kS/s coefficients */
+         HPF_Fs12000_Fc78_A1, HPF_Fs12000_Fc78_A0, -HPF_Fs12000_Fc78_B2, -HPF_Fs12000_Fc78_B1},
+        {HPF_Fs16000_Fc78_A2, /* 16kS/s coefficients */
+         HPF_Fs16000_Fc78_A1, HPF_Fs16000_Fc78_A0, -HPF_Fs16000_Fc78_B2, -HPF_Fs16000_Fc78_B1},
+        {HPF_Fs22050_Fc78_A2, /* 22kS/s coefficients */
+         HPF_Fs22050_Fc78_A1, HPF_Fs22050_Fc78_A0, -HPF_Fs22050_Fc78_B2, -HPF_Fs22050_Fc78_B1},
+        {HPF_Fs24000_Fc78_A2, /* 24kS/s coefficients */
+         HPF_Fs24000_Fc78_A1, HPF_Fs24000_Fc78_A0, -HPF_Fs24000_Fc78_B2, -HPF_Fs24000_Fc78_B1},
+        {HPF_Fs32000_Fc78_A2, /* 32kS/s coefficients */
+         HPF_Fs32000_Fc78_A1, HPF_Fs32000_Fc78_A0, -HPF_Fs32000_Fc78_B2, -HPF_Fs32000_Fc78_B1},
+        {HPF_Fs44100_Fc78_A2, /* 44kS/s coefficients */
+         HPF_Fs44100_Fc78_A1, HPF_Fs44100_Fc78_A0, -HPF_Fs44100_Fc78_B2, -HPF_Fs44100_Fc78_B1},
+        {HPF_Fs48000_Fc78_A2, /* 48kS/s coefficients */
+         HPF_Fs48000_Fc78_A1, HPF_Fs48000_Fc78_A0, -HPF_Fs48000_Fc78_B2, -HPF_Fs48000_Fc78_B1},
+        {HPF_Fs88200_Fc78_A2, /* 88kS/s coefficients */
+         HPF_Fs88200_Fc78_A1, HPF_Fs88200_Fc78_A0, -HPF_Fs88200_Fc78_B2, -HPF_Fs88200_Fc78_B1},
+        {HPF_Fs96000_Fc78_A2, /* 96kS/s coefficients */
+         HPF_Fs96000_Fc78_A1, HPF_Fs96000_Fc78_A0, -HPF_Fs96000_Fc78_B2, -HPF_Fs96000_Fc78_B1},
+        {HPF_Fs176400_Fc78_A2, /* 176kS/s coefficients */
+         HPF_Fs176400_Fc78_A1, HPF_Fs176400_Fc78_A0, -HPF_Fs176400_Fc78_B2, -HPF_Fs176400_Fc78_B1},
+        {HPF_Fs192000_Fc78_A2, /* 192kS/s coefficients */
+         HPF_Fs192000_Fc78_A1, HPF_Fs192000_Fc78_A0, -HPF_Fs192000_Fc78_B2, -HPF_Fs192000_Fc78_B1},
 
-    /* Coefficients for 90Hz centre frequency */
-    {HPF_Fs8000_Fc90_A2,                /* 8kS/s coefficients */
-     HPF_Fs8000_Fc90_A1,
-     HPF_Fs8000_Fc90_A0,
-     -HPF_Fs8000_Fc90_B2,
-     -HPF_Fs8000_Fc90_B1},
-    {HPF_Fs11025_Fc90_A2,                /* 11kS/s coefficients */
-     HPF_Fs11025_Fc90_A1,
-     HPF_Fs11025_Fc90_A0,
-     -HPF_Fs11025_Fc90_B2,
-     -HPF_Fs11025_Fc90_B1},
-    {HPF_Fs12000_Fc90_A2,                /* 12kS/s coefficients */
-     HPF_Fs12000_Fc90_A1,
-     HPF_Fs12000_Fc90_A0,
-     -HPF_Fs12000_Fc90_B2,
-     -HPF_Fs12000_Fc90_B1},
-    {HPF_Fs16000_Fc90_A2,                /* 16kS/s coefficients */
-     HPF_Fs16000_Fc90_A1,
-     HPF_Fs16000_Fc90_A0,
-     -HPF_Fs16000_Fc90_B2,
-     -HPF_Fs16000_Fc90_B1},
-    {HPF_Fs22050_Fc90_A2,                /* 22kS/s coefficients */
-     HPF_Fs22050_Fc90_A1,
-     HPF_Fs22050_Fc90_A0,
-     -HPF_Fs22050_Fc90_B2,
-     -HPF_Fs22050_Fc90_B1},
-    {HPF_Fs24000_Fc90_A2,                /* 24kS/s coefficients */
-     HPF_Fs24000_Fc90_A1,
-     HPF_Fs24000_Fc90_A0,
-     -HPF_Fs24000_Fc90_B2,
-     -HPF_Fs24000_Fc90_B1},
-    {HPF_Fs32000_Fc90_A2,                /* 32kS/s coefficients */
-     HPF_Fs32000_Fc90_A1,
-     HPF_Fs32000_Fc90_A0,
-     -HPF_Fs32000_Fc90_B2,
-     -HPF_Fs32000_Fc90_B1},
-    {HPF_Fs44100_Fc90_A2,                /* 44kS/s coefficients */
-     HPF_Fs44100_Fc90_A1,
-     HPF_Fs44100_Fc90_A0,
-     -HPF_Fs44100_Fc90_B2,
-     -HPF_Fs44100_Fc90_B1},
-    {HPF_Fs48000_Fc90_A2,                /* 48kS/s coefficients */
-     HPF_Fs48000_Fc90_A1,
-     HPF_Fs48000_Fc90_A0,
-     -HPF_Fs48000_Fc90_B2,
-     -HPF_Fs48000_Fc90_B1}
+        /* Coefficients for 90Hz centre frequency */
+        {HPF_Fs8000_Fc90_A2, /* 8kS/s coefficients */
+         HPF_Fs8000_Fc90_A1, HPF_Fs8000_Fc90_A0, -HPF_Fs8000_Fc90_B2, -HPF_Fs8000_Fc90_B1},
+        {HPF_Fs11025_Fc90_A2, /* 11kS/s coefficients */
+         HPF_Fs11025_Fc90_A1, HPF_Fs11025_Fc90_A0, -HPF_Fs11025_Fc90_B2, -HPF_Fs11025_Fc90_B1},
+        {HPF_Fs12000_Fc90_A2, /* 12kS/s coefficients */
+         HPF_Fs12000_Fc90_A1, HPF_Fs12000_Fc90_A0, -HPF_Fs12000_Fc90_B2, -HPF_Fs12000_Fc90_B1},
+        {HPF_Fs16000_Fc90_A2, /* 16kS/s coefficients */
+         HPF_Fs16000_Fc90_A1, HPF_Fs16000_Fc90_A0, -HPF_Fs16000_Fc90_B2, -HPF_Fs16000_Fc90_B1},
+        {HPF_Fs22050_Fc90_A2, /* 22kS/s coefficients */
+         HPF_Fs22050_Fc90_A1, HPF_Fs22050_Fc90_A0, -HPF_Fs22050_Fc90_B2, -HPF_Fs22050_Fc90_B1},
+        {HPF_Fs24000_Fc90_A2, /* 24kS/s coefficients */
+         HPF_Fs24000_Fc90_A1, HPF_Fs24000_Fc90_A0, -HPF_Fs24000_Fc90_B2, -HPF_Fs24000_Fc90_B1},
+        {HPF_Fs32000_Fc90_A2, /* 32kS/s coefficients */
+         HPF_Fs32000_Fc90_A1, HPF_Fs32000_Fc90_A0, -HPF_Fs32000_Fc90_B2, -HPF_Fs32000_Fc90_B1},
+        {HPF_Fs44100_Fc90_A2, /* 44kS/s coefficients */
+         HPF_Fs44100_Fc90_A1, HPF_Fs44100_Fc90_A0, -HPF_Fs44100_Fc90_B2, -HPF_Fs44100_Fc90_B1},
+        {HPF_Fs48000_Fc90_A2, /* 48kS/s coefficients */
+         HPF_Fs48000_Fc90_A1, HPF_Fs48000_Fc90_A0, -HPF_Fs48000_Fc90_B2, -HPF_Fs48000_Fc90_B1}
 
-    ,
-    {HPF_Fs88200_Fc90_A2,                /* 88kS/s coefficients */
-     HPF_Fs88200_Fc90_A1,
-     HPF_Fs88200_Fc90_A0,
-     -HPF_Fs88200_Fc90_B2,
-     -HPF_Fs88200_Fc90_B1},
-    {HPF_Fs96000_Fc90_A2,                /* 96kS/s coefficients */
-     HPF_Fs96000_Fc90_A1,
-     HPF_Fs96000_Fc90_A0,
-     -HPF_Fs96000_Fc90_B2,
-     -HPF_Fs96000_Fc90_B1},
-    {HPF_Fs176400_Fc90_A2,                /* 176kS/s coefficients */
-     HPF_Fs176400_Fc90_A1,
-     HPF_Fs176400_Fc90_A0,
-     -HPF_Fs176400_Fc90_B2,
-     -HPF_Fs176400_Fc90_B1},
-    {HPF_Fs192000_Fc90_A2,                /* 192kS/s coefficients */
-     HPF_Fs192000_Fc90_A1,
-     HPF_Fs192000_Fc90_A0,
-     -HPF_Fs192000_Fc90_B2,
-     -HPF_Fs192000_Fc90_B1}
+        ,
+        {HPF_Fs88200_Fc90_A2, /* 88kS/s coefficients */
+         HPF_Fs88200_Fc90_A1, HPF_Fs88200_Fc90_A0, -HPF_Fs88200_Fc90_B2, -HPF_Fs88200_Fc90_B1},
+        {HPF_Fs96000_Fc90_A2, /* 96kS/s coefficients */
+         HPF_Fs96000_Fc90_A1, HPF_Fs96000_Fc90_A0, -HPF_Fs96000_Fc90_B2, -HPF_Fs96000_Fc90_B1},
+        {HPF_Fs176400_Fc90_A2, /* 176kS/s coefficients */
+         HPF_Fs176400_Fc90_A1, HPF_Fs176400_Fc90_A0, -HPF_Fs176400_Fc90_B2, -HPF_Fs176400_Fc90_B1},
+        {HPF_Fs192000_Fc90_A2, /* 192kS/s coefficients */
+         HPF_Fs192000_Fc90_A1, HPF_Fs192000_Fc90_A0, -HPF_Fs192000_Fc90_B2, -HPF_Fs192000_Fc90_B1}
 
 };
 
@@ -312,170 +156,117 @@
  * Band Pass Filter coefficient table
  */
 const BP_FLOAT_Coefs_t LVDBE_BPF_Table[] = {
-    /* Coefficients for 55Hz centre frequency */
-    {BPF_Fs8000_Fc55_A0,                /* 8kS/s coefficients */
-     -BPF_Fs8000_Fc55_B2,
-     -BPF_Fs8000_Fc55_B1},
-    {BPF_Fs11025_Fc55_A0,                /* 11kS/s coefficients */
-     -BPF_Fs11025_Fc55_B2,
-     -BPF_Fs11025_Fc55_B1},
-    {BPF_Fs12000_Fc55_A0,                /* 12kS/s coefficients */
-     -BPF_Fs12000_Fc55_B2,
-     -BPF_Fs12000_Fc55_B1},
-    {BPF_Fs16000_Fc55_A0,                /* 16kS/s coefficients */
-     -BPF_Fs16000_Fc55_B2,
-     -BPF_Fs16000_Fc55_B1},
-    {BPF_Fs22050_Fc55_A0,                /* 22kS/s coefficients */
-     -BPF_Fs22050_Fc55_B2,
-     -BPF_Fs22050_Fc55_B1},
-    {BPF_Fs24000_Fc55_A0,                /* 24kS/s coefficients */
-     -BPF_Fs24000_Fc55_B2,
-     -BPF_Fs24000_Fc55_B1},
-    {BPF_Fs32000_Fc55_A0,                /* 32kS/s coefficients */
-     -BPF_Fs32000_Fc55_B2,
-     -BPF_Fs32000_Fc55_B1},
-    {BPF_Fs44100_Fc55_A0,                /* 44kS/s coefficients */
-     -BPF_Fs44100_Fc55_B2,
-     -BPF_Fs44100_Fc55_B1},
-    {BPF_Fs48000_Fc55_A0,                /* 48kS/s coefficients */
-     -BPF_Fs48000_Fc55_B2,
-     -BPF_Fs48000_Fc55_B1},
-     {BPF_Fs88200_Fc55_A0,                /* 88kS/s coefficients */
-      -BPF_Fs88200_Fc55_B2,
-      -BPF_Fs88200_Fc55_B1},
-     {BPF_Fs96000_Fc55_A0,                /* 96kS/s coefficients */
-     -BPF_Fs96000_Fc55_B2,
-     -BPF_Fs96000_Fc55_B1},
-     {BPF_Fs176400_Fc55_A0,                /* 176kS/s coefficients */
-      -BPF_Fs176400_Fc55_B2,
-      -BPF_Fs176400_Fc55_B1},
-     {BPF_Fs192000_Fc55_A0,                /* 192kS/s coefficients */
-     -BPF_Fs192000_Fc55_B2,
-     -BPF_Fs192000_Fc55_B1},
+        /* Coefficients for 55Hz centre frequency */
+        {BPF_Fs8000_Fc55_A0, /* 8kS/s coefficients */
+         -BPF_Fs8000_Fc55_B2, -BPF_Fs8000_Fc55_B1},
+        {BPF_Fs11025_Fc55_A0, /* 11kS/s coefficients */
+         -BPF_Fs11025_Fc55_B2, -BPF_Fs11025_Fc55_B1},
+        {BPF_Fs12000_Fc55_A0, /* 12kS/s coefficients */
+         -BPF_Fs12000_Fc55_B2, -BPF_Fs12000_Fc55_B1},
+        {BPF_Fs16000_Fc55_A0, /* 16kS/s coefficients */
+         -BPF_Fs16000_Fc55_B2, -BPF_Fs16000_Fc55_B1},
+        {BPF_Fs22050_Fc55_A0, /* 22kS/s coefficients */
+         -BPF_Fs22050_Fc55_B2, -BPF_Fs22050_Fc55_B1},
+        {BPF_Fs24000_Fc55_A0, /* 24kS/s coefficients */
+         -BPF_Fs24000_Fc55_B2, -BPF_Fs24000_Fc55_B1},
+        {BPF_Fs32000_Fc55_A0, /* 32kS/s coefficients */
+         -BPF_Fs32000_Fc55_B2, -BPF_Fs32000_Fc55_B1},
+        {BPF_Fs44100_Fc55_A0, /* 44kS/s coefficients */
+         -BPF_Fs44100_Fc55_B2, -BPF_Fs44100_Fc55_B1},
+        {BPF_Fs48000_Fc55_A0, /* 48kS/s coefficients */
+         -BPF_Fs48000_Fc55_B2, -BPF_Fs48000_Fc55_B1},
+        {BPF_Fs88200_Fc55_A0, /* 88kS/s coefficients */
+         -BPF_Fs88200_Fc55_B2, -BPF_Fs88200_Fc55_B1},
+        {BPF_Fs96000_Fc55_A0, /* 96kS/s coefficients */
+         -BPF_Fs96000_Fc55_B2, -BPF_Fs96000_Fc55_B1},
+        {BPF_Fs176400_Fc55_A0, /* 176kS/s coefficients */
+         -BPF_Fs176400_Fc55_B2, -BPF_Fs176400_Fc55_B1},
+        {BPF_Fs192000_Fc55_A0, /* 192kS/s coefficients */
+         -BPF_Fs192000_Fc55_B2, -BPF_Fs192000_Fc55_B1},
 
-    /* Coefficients for 66Hz centre frequency */
-    {BPF_Fs8000_Fc66_A0,                /* 8kS/s coefficients */
-     -BPF_Fs8000_Fc66_B2,
-     -BPF_Fs8000_Fc66_B1},
-    {BPF_Fs11025_Fc66_A0,                /* 11kS/s coefficients */
-     -BPF_Fs11025_Fc66_B2,
-     -BPF_Fs11025_Fc66_B1},
-    {BPF_Fs12000_Fc66_A0,                /* 12kS/s coefficients */
-     -BPF_Fs12000_Fc66_B2,
-     -BPF_Fs12000_Fc66_B1},
-    {BPF_Fs16000_Fc66_A0,                /* 16kS/s coefficients */
-     -BPF_Fs16000_Fc66_B2,
-     -BPF_Fs16000_Fc66_B1},
-    {BPF_Fs22050_Fc66_A0,                /* 22kS/s coefficients */
-     -BPF_Fs22050_Fc66_B2,
-     -BPF_Fs22050_Fc66_B1},
-    {BPF_Fs24000_Fc66_A0,                /* 24kS/s coefficients */
-     -BPF_Fs24000_Fc66_B2,
-     -BPF_Fs24000_Fc66_B1},
-    {BPF_Fs32000_Fc66_A0,                /* 32kS/s coefficients */
-     -BPF_Fs32000_Fc66_B2,
-     -BPF_Fs32000_Fc66_B1},
-    {BPF_Fs44100_Fc66_A0,                /* 44kS/s coefficients */
-     -BPF_Fs44100_Fc66_B2,
-     -BPF_Fs44100_Fc66_B1},
-    {BPF_Fs48000_Fc66_A0,                /* 48kS/s coefficients */
-     -BPF_Fs48000_Fc66_B2,
-     -BPF_Fs48000_Fc66_B1},
-    {BPF_Fs88200_Fc66_A0,                /* 88kS/s coefficients */
-     -BPF_Fs88200_Fc66_B2,
-     -BPF_Fs88200_Fc66_B1},
-    {BPF_Fs96000_Fc66_A0,                /* 96kS/s coefficients */
-     -BPF_Fs96000_Fc66_B2,
-     -BPF_Fs96000_Fc66_B1},
-    {BPF_Fs176400_Fc66_A0,                /* 176kS/s coefficients */
-     -BPF_Fs176400_Fc66_B2,
-     -BPF_Fs176400_Fc66_B1},
-    {BPF_Fs192000_Fc66_A0,                /* 192kS/s coefficients */
-     -BPF_Fs192000_Fc66_B2,
-     -BPF_Fs192000_Fc66_B1},
+        /* Coefficients for 66Hz centre frequency */
+        {BPF_Fs8000_Fc66_A0, /* 8kS/s coefficients */
+         -BPF_Fs8000_Fc66_B2, -BPF_Fs8000_Fc66_B1},
+        {BPF_Fs11025_Fc66_A0, /* 11kS/s coefficients */
+         -BPF_Fs11025_Fc66_B2, -BPF_Fs11025_Fc66_B1},
+        {BPF_Fs12000_Fc66_A0, /* 12kS/s coefficients */
+         -BPF_Fs12000_Fc66_B2, -BPF_Fs12000_Fc66_B1},
+        {BPF_Fs16000_Fc66_A0, /* 16kS/s coefficients */
+         -BPF_Fs16000_Fc66_B2, -BPF_Fs16000_Fc66_B1},
+        {BPF_Fs22050_Fc66_A0, /* 22kS/s coefficients */
+         -BPF_Fs22050_Fc66_B2, -BPF_Fs22050_Fc66_B1},
+        {BPF_Fs24000_Fc66_A0, /* 24kS/s coefficients */
+         -BPF_Fs24000_Fc66_B2, -BPF_Fs24000_Fc66_B1},
+        {BPF_Fs32000_Fc66_A0, /* 32kS/s coefficients */
+         -BPF_Fs32000_Fc66_B2, -BPF_Fs32000_Fc66_B1},
+        {BPF_Fs44100_Fc66_A0, /* 44kS/s coefficients */
+         -BPF_Fs44100_Fc66_B2, -BPF_Fs44100_Fc66_B1},
+        {BPF_Fs48000_Fc66_A0, /* 48kS/s coefficients */
+         -BPF_Fs48000_Fc66_B2, -BPF_Fs48000_Fc66_B1},
+        {BPF_Fs88200_Fc66_A0, /* 88kS/s coefficients */
+         -BPF_Fs88200_Fc66_B2, -BPF_Fs88200_Fc66_B1},
+        {BPF_Fs96000_Fc66_A0, /* 96kS/s coefficients */
+         -BPF_Fs96000_Fc66_B2, -BPF_Fs96000_Fc66_B1},
+        {BPF_Fs176400_Fc66_A0, /* 176kS/s coefficients */
+         -BPF_Fs176400_Fc66_B2, -BPF_Fs176400_Fc66_B1},
+        {BPF_Fs192000_Fc66_A0, /* 192kS/s coefficients */
+         -BPF_Fs192000_Fc66_B2, -BPF_Fs192000_Fc66_B1},
 
-    /* Coefficients for 78Hz centre frequency */
-    {BPF_Fs8000_Fc78_A0,                /* 8kS/s coefficients */
-     -BPF_Fs8000_Fc78_B2,
-     -BPF_Fs8000_Fc78_B1},
-    {BPF_Fs11025_Fc78_A0,                /* 11kS/s coefficients */
-     -BPF_Fs11025_Fc78_B2,
-     -BPF_Fs11025_Fc78_B1},
-    {BPF_Fs12000_Fc78_A0,                /* 12kS/s coefficients */
-     -BPF_Fs12000_Fc78_B2,
-     -BPF_Fs12000_Fc78_B1},
-    {BPF_Fs16000_Fc78_A0,                /* 16kS/s coefficients */
-     -BPF_Fs16000_Fc78_B2,
-     -BPF_Fs16000_Fc78_B1},
-    {BPF_Fs22050_Fc78_A0,                /* 22kS/s coefficients */
-     -BPF_Fs22050_Fc78_B2,
-     -BPF_Fs22050_Fc78_B1},
-    {BPF_Fs24000_Fc78_A0,                /* 24kS/s coefficients */
-     -BPF_Fs24000_Fc78_B2,
-     -BPF_Fs24000_Fc78_B1},
-    {BPF_Fs32000_Fc78_A0,                /* 32kS/s coefficients */
-     -BPF_Fs32000_Fc78_B2,
-     -BPF_Fs32000_Fc78_B1},
-    {BPF_Fs44100_Fc78_A0,                /* 44kS/s coefficients */
-     -BPF_Fs44100_Fc78_B2,
-     -BPF_Fs44100_Fc78_B1},
-    {BPF_Fs48000_Fc78_A0,                /* 48kS/s coefficients */
-     -BPF_Fs48000_Fc78_B2,
-     -BPF_Fs48000_Fc78_B1},
-    {BPF_Fs88200_Fc66_A0,                /* 88kS/s coefficients */
-     -BPF_Fs88200_Fc66_B2,
-     -BPF_Fs88200_Fc66_B1},
-    {BPF_Fs96000_Fc78_A0,                /* 96kS/s coefficients */
-     -BPF_Fs96000_Fc78_B2,
-     -BPF_Fs96000_Fc78_B1},
-    {BPF_Fs176400_Fc66_A0,                /* 176kS/s coefficients */
-     -BPF_Fs176400_Fc66_B2,
-     -BPF_Fs176400_Fc66_B1},
-    {BPF_Fs192000_Fc78_A0,                /* 192kS/s coefficients */
-     -BPF_Fs192000_Fc78_B2,
-     -BPF_Fs192000_Fc78_B1},
+        /* Coefficients for 78Hz centre frequency */
+        {BPF_Fs8000_Fc78_A0, /* 8kS/s coefficients */
+         -BPF_Fs8000_Fc78_B2, -BPF_Fs8000_Fc78_B1},
+        {BPF_Fs11025_Fc78_A0, /* 11kS/s coefficients */
+         -BPF_Fs11025_Fc78_B2, -BPF_Fs11025_Fc78_B1},
+        {BPF_Fs12000_Fc78_A0, /* 12kS/s coefficients */
+         -BPF_Fs12000_Fc78_B2, -BPF_Fs12000_Fc78_B1},
+        {BPF_Fs16000_Fc78_A0, /* 16kS/s coefficients */
+         -BPF_Fs16000_Fc78_B2, -BPF_Fs16000_Fc78_B1},
+        {BPF_Fs22050_Fc78_A0, /* 22kS/s coefficients */
+         -BPF_Fs22050_Fc78_B2, -BPF_Fs22050_Fc78_B1},
+        {BPF_Fs24000_Fc78_A0, /* 24kS/s coefficients */
+         -BPF_Fs24000_Fc78_B2, -BPF_Fs24000_Fc78_B1},
+        {BPF_Fs32000_Fc78_A0, /* 32kS/s coefficients */
+         -BPF_Fs32000_Fc78_B2, -BPF_Fs32000_Fc78_B1},
+        {BPF_Fs44100_Fc78_A0, /* 44kS/s coefficients */
+         -BPF_Fs44100_Fc78_B2, -BPF_Fs44100_Fc78_B1},
+        {BPF_Fs48000_Fc78_A0, /* 48kS/s coefficients */
+         -BPF_Fs48000_Fc78_B2, -BPF_Fs48000_Fc78_B1},
+        {BPF_Fs88200_Fc66_A0, /* 88kS/s coefficients */
+         -BPF_Fs88200_Fc66_B2, -BPF_Fs88200_Fc66_B1},
+        {BPF_Fs96000_Fc78_A0, /* 96kS/s coefficients */
+         -BPF_Fs96000_Fc78_B2, -BPF_Fs96000_Fc78_B1},
+        {BPF_Fs176400_Fc66_A0, /* 176kS/s coefficients */
+         -BPF_Fs176400_Fc66_B2, -BPF_Fs176400_Fc66_B1},
+        {BPF_Fs192000_Fc78_A0, /* 192kS/s coefficients */
+         -BPF_Fs192000_Fc78_B2, -BPF_Fs192000_Fc78_B1},
 
-    /* Coefficients for 90Hz centre frequency */
-    {BPF_Fs8000_Fc90_A0,                /* 8kS/s coefficients */
-     -BPF_Fs8000_Fc90_B2,
-     -BPF_Fs8000_Fc90_B1},
-    {BPF_Fs11025_Fc90_A0,                /* 11kS/s coefficients */
-     -BPF_Fs11025_Fc90_B2,
-     -BPF_Fs11025_Fc90_B1},
-    {BPF_Fs12000_Fc90_A0,                /* 12kS/s coefficients */
-     -BPF_Fs12000_Fc90_B2,
-     -BPF_Fs12000_Fc90_B1},
-    {BPF_Fs16000_Fc90_A0,                /* 16kS/s coefficients */
-     -BPF_Fs16000_Fc90_B2,
-     -BPF_Fs16000_Fc90_B1},
-    {BPF_Fs22050_Fc90_A0,                /* 22kS/s coefficients */
-     -BPF_Fs22050_Fc90_B2,
-     -BPF_Fs22050_Fc90_B1},
-    {BPF_Fs24000_Fc90_A0,                /* 24kS/s coefficients */
-     -BPF_Fs24000_Fc90_B2,
-     -BPF_Fs24000_Fc90_B1},
-    {BPF_Fs32000_Fc90_A0,                /* 32kS/s coefficients */
-     -BPF_Fs32000_Fc90_B2,
-     -BPF_Fs32000_Fc90_B1},
-    {BPF_Fs44100_Fc90_A0,                /* 44kS/s coefficients */
-     -BPF_Fs44100_Fc90_B2,
-     -BPF_Fs44100_Fc90_B1},
-    {BPF_Fs48000_Fc90_A0,                /* 48kS/s coefficients */
-     -BPF_Fs48000_Fc90_B2,
-     -BPF_Fs48000_Fc90_B1}
-    ,
-    {BPF_Fs88200_Fc90_A0,                /* 88kS/s coefficients */
-     -BPF_Fs88200_Fc90_B2,
-     -BPF_Fs88200_Fc90_B1},
-    {BPF_Fs96000_Fc90_A0,                /* 96kS/s coefficients */
-     -BPF_Fs96000_Fc90_B2,
-     -BPF_Fs96000_Fc90_B1},
-    {BPF_Fs176400_Fc90_A0,                /* 176kS/s coefficients */
-     -BPF_Fs176400_Fc90_B2,
-     -BPF_Fs176400_Fc90_B1},
-    {BPF_Fs192000_Fc90_A0,                /* 192kS/s coefficients */
-     -BPF_Fs192000_Fc90_B2,
-     -BPF_Fs192000_Fc90_B1}
+        /* Coefficients for 90Hz centre frequency */
+        {BPF_Fs8000_Fc90_A0, /* 8kS/s coefficients */
+         -BPF_Fs8000_Fc90_B2, -BPF_Fs8000_Fc90_B1},
+        {BPF_Fs11025_Fc90_A0, /* 11kS/s coefficients */
+         -BPF_Fs11025_Fc90_B2, -BPF_Fs11025_Fc90_B1},
+        {BPF_Fs12000_Fc90_A0, /* 12kS/s coefficients */
+         -BPF_Fs12000_Fc90_B2, -BPF_Fs12000_Fc90_B1},
+        {BPF_Fs16000_Fc90_A0, /* 16kS/s coefficients */
+         -BPF_Fs16000_Fc90_B2, -BPF_Fs16000_Fc90_B1},
+        {BPF_Fs22050_Fc90_A0, /* 22kS/s coefficients */
+         -BPF_Fs22050_Fc90_B2, -BPF_Fs22050_Fc90_B1},
+        {BPF_Fs24000_Fc90_A0, /* 24kS/s coefficients */
+         -BPF_Fs24000_Fc90_B2, -BPF_Fs24000_Fc90_B1},
+        {BPF_Fs32000_Fc90_A0, /* 32kS/s coefficients */
+         -BPF_Fs32000_Fc90_B2, -BPF_Fs32000_Fc90_B1},
+        {BPF_Fs44100_Fc90_A0, /* 44kS/s coefficients */
+         -BPF_Fs44100_Fc90_B2, -BPF_Fs44100_Fc90_B1},
+        {BPF_Fs48000_Fc90_A0, /* 48kS/s coefficients */
+         -BPF_Fs48000_Fc90_B2, -BPF_Fs48000_Fc90_B1},
+        {BPF_Fs88200_Fc90_A0, /* 88kS/s coefficients */
+         -BPF_Fs88200_Fc90_B2, -BPF_Fs88200_Fc90_B1},
+        {BPF_Fs96000_Fc90_A0, /* 96kS/s coefficients */
+         -BPF_Fs96000_Fc90_B2, -BPF_Fs96000_Fc90_B1},
+        {BPF_Fs176400_Fc90_A0, /* 176kS/s coefficients */
+         -BPF_Fs176400_Fc90_B2, -BPF_Fs176400_Fc90_B1},
+        {BPF_Fs192000_Fc90_A0, /* 192kS/s coefficients */
+         -BPF_Fs192000_Fc90_B2, -BPF_Fs192000_Fc90_B1}
 
 };
 
@@ -487,77 +278,34 @@
 
 /* Attack time (signal too large) */
 const LVM_FLOAT LVDBE_AGC_ATTACK_Table[] = {
-    AGC_ATTACK_Fs8000,
-    AGC_ATTACK_Fs11025,
-    AGC_ATTACK_Fs12000,
-    AGC_ATTACK_Fs16000,
-    AGC_ATTACK_Fs22050,
-    AGC_ATTACK_Fs24000,
-    AGC_ATTACK_Fs32000,
-    AGC_ATTACK_Fs44100,
-    AGC_ATTACK_Fs48000
-    ,AGC_ATTACK_Fs88200
-    ,AGC_ATTACK_Fs96000
-    ,AGC_ATTACK_Fs176400
-    ,AGC_ATTACK_Fs192000
+        AGC_ATTACK_Fs8000,  AGC_ATTACK_Fs11025, AGC_ATTACK_Fs12000, AGC_ATTACK_Fs16000,
+        AGC_ATTACK_Fs22050, AGC_ATTACK_Fs24000, AGC_ATTACK_Fs32000, AGC_ATTACK_Fs44100,
+        AGC_ATTACK_Fs48000, AGC_ATTACK_Fs88200, AGC_ATTACK_Fs96000, AGC_ATTACK_Fs176400,
+        AGC_ATTACK_Fs192000
 
 };
 
 /* Decay time (signal too small) */
-const LVM_FLOAT LVDBE_AGC_DECAY_Table[] = {
-    AGC_DECAY_Fs8000,
-    AGC_DECAY_Fs11025,
-    AGC_DECAY_Fs12000,
-    AGC_DECAY_Fs16000,
-    AGC_DECAY_Fs22050,
-    AGC_DECAY_Fs24000,
-    AGC_DECAY_Fs32000,
-    AGC_DECAY_Fs44100,
-    AGC_DECAY_Fs48000
-    ,AGC_DECAY_Fs88200
-    ,AGC_DECAY_FS96000
-    ,AGC_DECAY_Fs176400
-    ,AGC_DECAY_FS192000
+const LVM_FLOAT LVDBE_AGC_DECAY_Table[] = {AGC_DECAY_Fs8000,  AGC_DECAY_Fs11025, AGC_DECAY_Fs12000,
+                                           AGC_DECAY_Fs16000, AGC_DECAY_Fs22050, AGC_DECAY_Fs24000,
+                                           AGC_DECAY_Fs32000, AGC_DECAY_Fs44100, AGC_DECAY_Fs48000,
+                                           AGC_DECAY_Fs88200, AGC_DECAY_FS96000, AGC_DECAY_Fs176400,
+                                           AGC_DECAY_FS192000
 
 };
 
 /* Gain for use without the high pass filter */
 const LVM_FLOAT LVDBE_AGC_GAIN_Table[] = {
-    AGC_GAIN_0dB,
-    AGC_GAIN_1dB,
-    AGC_GAIN_2dB,
-    AGC_GAIN_3dB,
-    AGC_GAIN_4dB,
-    AGC_GAIN_5dB,
-    AGC_GAIN_6dB,
-    AGC_GAIN_7dB,
-    AGC_GAIN_8dB,
-    AGC_GAIN_9dB,
-    AGC_GAIN_10dB,
-    AGC_GAIN_11dB,
-    AGC_GAIN_12dB,
-    AGC_GAIN_13dB,
-    AGC_GAIN_14dB,
-    AGC_GAIN_15dB};
+        AGC_GAIN_0dB,  AGC_GAIN_1dB,  AGC_GAIN_2dB,  AGC_GAIN_3dB, AGC_GAIN_4dB,  AGC_GAIN_5dB,
+        AGC_GAIN_6dB,  AGC_GAIN_7dB,  AGC_GAIN_8dB,  AGC_GAIN_9dB, AGC_GAIN_10dB, AGC_GAIN_11dB,
+        AGC_GAIN_12dB, AGC_GAIN_13dB, AGC_GAIN_14dB, AGC_GAIN_15dB};
 
 /* Gain for use with the high pass filter */
 const LVM_FLOAT LVDBE_AGC_HPFGAIN_Table[] = {
-    AGC_HPFGAIN_0dB,
-    AGC_HPFGAIN_1dB,
-    AGC_HPFGAIN_2dB,
-    AGC_HPFGAIN_3dB,
-    AGC_HPFGAIN_4dB,
-    AGC_HPFGAIN_5dB,
-    AGC_HPFGAIN_6dB,
-    AGC_HPFGAIN_7dB,
-    AGC_HPFGAIN_8dB,
-    AGC_HPFGAIN_9dB,
-    AGC_HPFGAIN_10dB,
-    AGC_HPFGAIN_11dB,
-    AGC_HPFGAIN_12dB,
-    AGC_HPFGAIN_13dB,
-    AGC_HPFGAIN_14dB,
-    AGC_HPFGAIN_15dB};
+        AGC_HPFGAIN_0dB,  AGC_HPFGAIN_1dB,  AGC_HPFGAIN_2dB,  AGC_HPFGAIN_3dB,
+        AGC_HPFGAIN_4dB,  AGC_HPFGAIN_5dB,  AGC_HPFGAIN_6dB,  AGC_HPFGAIN_7dB,
+        AGC_HPFGAIN_8dB,  AGC_HPFGAIN_9dB,  AGC_HPFGAIN_10dB, AGC_HPFGAIN_11dB,
+        AGC_HPFGAIN_12dB, AGC_HPFGAIN_13dB, AGC_HPFGAIN_14dB, AGC_HPFGAIN_15dB};
 
 /************************************************************************************/
 /*                                                                                  */
@@ -566,45 +314,23 @@
 /************************************************************************************/
 
 /* dB to linear conversion table */
-const LVM_FLOAT LVDBE_VolumeTable[] = {
-    0.500000f,         /* -6dB */
-    0.562341f,         /* -5dB */
-    0.630957f,         /* -4dB */
-    0.707946f,         /* -3dB */
-    0.794328f,         /* -2dB */
-    0.891251f,         /* -1dB */
-    1.000000f};        /*  0dB */
+const LVM_FLOAT LVDBE_VolumeTable[] = {0.500000f,  /* -6dB */
+                                       0.562341f,  /* -5dB */
+                                       0.630957f,  /* -4dB */
+                                       0.707946f,  /* -3dB */
+                                       0.794328f,  /* -2dB */
+                                       0.891251f,  /* -1dB */
+                                       1.000000f}; /*  0dB */
 
 const LVM_FLOAT LVDBE_VolumeTCTable[] = {
-    VOL_TC_Fs8000,
-    VOL_TC_Fs11025,
-    VOL_TC_Fs12000,
-    VOL_TC_Fs16000,
-    VOL_TC_Fs22050,
-    VOL_TC_Fs24000,
-    VOL_TC_Fs32000,
-    VOL_TC_Fs44100,
-    VOL_TC_Fs48000
-    ,VOL_TC_Fs88200
-    ,VOL_TC_Fs96000
-    ,VOL_TC_Fs176400
-    ,VOL_TC_Fs192000
-};
+        VOL_TC_Fs8000,  VOL_TC_Fs11025,  VOL_TC_Fs12000, VOL_TC_Fs16000, VOL_TC_Fs22050,
+        VOL_TC_Fs24000, VOL_TC_Fs32000,  VOL_TC_Fs44100, VOL_TC_Fs48000, VOL_TC_Fs88200,
+        VOL_TC_Fs96000, VOL_TC_Fs176400, VOL_TC_Fs192000};
 
 const LVM_INT16 LVDBE_MixerTCTable[] = {
 
-    MIX_TC_Fs8000,
-    MIX_TC_Fs11025,
-    MIX_TC_Fs12000,
-    MIX_TC_Fs16000,
-    MIX_TC_Fs22050,
-    MIX_TC_Fs24000,
-    MIX_TC_Fs32000,
-    MIX_TC_Fs44100,
-    MIX_TC_Fs48000
-    ,MIX_TC_Fs88200
-    ,MIX_TC_Fs96000
-    ,MIX_TC_Fs176400
-    ,MIX_TC_Fs192000
+        MIX_TC_Fs8000,  MIX_TC_Fs11025,  MIX_TC_Fs12000, MIX_TC_Fs16000, MIX_TC_Fs22050,
+        MIX_TC_Fs24000, MIX_TC_Fs32000,  MIX_TC_Fs44100, MIX_TC_Fs48000, MIX_TC_Fs88200,
+        MIX_TC_Fs96000, MIX_TC_Fs176400, MIX_TC_Fs192000
 
 };
diff --git a/media/libeffects/lvm/lib/Bundle/lib/LVM.h b/media/libeffects/lvm/lib/Bundle/lib/LVM.h
index e4e8450..c90c5cc 100644
--- a/media/libeffects/lvm/lib/Bundle/lib/LVM.h
+++ b/media/libeffects/lvm/lib/Bundle/lib/LVM.h
@@ -67,31 +67,28 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory table*/
-#define LVM_NR_MEMORY_REGIONS                 4     /* Number of memory regions */
-
 /* Concert Sound effect level presets */
-#define LVM_CS_EFFECT_NONE                    0     /* 0% effect, minimum value */
-#define LVM_CS_EFFECT_LOW                 16384     /* 50% effect */
-#define LVM_CS_EFFECT_MED                 24576     /* 75% effect */
-#define LVM_CS_EFFECT_HIGH                32767     /* 100% effect, maximum value */
+#define LVM_CS_EFFECT_NONE 0     /* 0% effect, minimum value */
+#define LVM_CS_EFFECT_LOW 16384  /* 50% effect */
+#define LVM_CS_EFFECT_MED 24576  /* 75% effect */
+#define LVM_CS_EFFECT_HIGH 32767 /* 100% effect, maximum value */
 
 /* Treble enhancement */
-#define LVM_TE_LOW_MIPS                   32767
+#define LVM_TE_LOW_MIPS 32767
 
 /* Bass enhancement effect level presets */
-#define LVM_BE_0DB                            0     /* 0dB boost, no effect */
-#define LVM_BE_3DB                            3     /* +3dB boost */
-#define LVM_BE_6DB                            6     /* +6dB boost */
-#define LVM_BE_9DB                            9     /* +9dB boost */
-#define LVM_BE_12DB                          12     /* +12dB boost */
-#define LVM_BE_15DB                          15     /* +15dB boost */
+#define LVM_BE_0DB 0   /* 0dB boost, no effect */
+#define LVM_BE_3DB 3   /* +3dB boost */
+#define LVM_BE_6DB 6   /* +6dB boost */
+#define LVM_BE_9DB 9   /* +9dB boost */
+#define LVM_BE_12DB 12 /* +12dB boost */
+#define LVM_BE_15DB 15 /* +15dB boost */
 
 /* N-Band Equalizer */
-#define LVM_EQ_NBANDS                         5    /* Number of bands for equalizer */
+#define LVM_EQ_NBANDS 5 /* Number of bands for equalizer */
 
 /* Headroom management */
-#define LVM_HEADROOM_MAX_NBANDS               5
+#define LVM_HEADROOM_MAX_NBANDS 5
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -100,123 +97,89 @@
 /****************************************************************************************/
 
 /* Instance handle */
-typedef void *LVM_Handle_t;
+typedef void* LVM_Handle_t;
 
 /* Status return values */
-typedef enum
-{
-    LVM_SUCCESS            = 0,                     /* Successful return from a routine */
-    LVM_ALIGNMENTERROR     = 1,                     /* Memory alignment error */
-    LVM_NULLADDRESS        = 2,                     /* NULL allocation address */
-    LVM_OUTOFRANGE         = 3,                     /* Out of range control parameter */
-    LVM_INVALIDNUMSAMPLES  = 4,                     /* Invalid number of samples */
-    LVM_WRONGAUDIOTIME     = 5,                     /* Wrong time value for audio time*/
-    LVM_ALGORITHMDISABLED  = 6,                     /* Algorithm is disabled*/
-    LVM_ALGORITHMPSA       = 7,                     /* Algorithm PSA returns an error */
+typedef enum {
+    LVM_SUCCESS = 0,           /* Successful return from a routine */
+    LVM_ALIGNMENTERROR = 1,    /* Memory alignment error */
+    LVM_NULLADDRESS = 2,       /* NULL allocation address */
+    LVM_OUTOFRANGE = 3,        /* Out of range control parameter */
+    LVM_INVALIDNUMSAMPLES = 4, /* Invalid number of samples */
+    LVM_WRONGAUDIOTIME = 5,    /* Wrong time value for audio time*/
+    LVM_ALGORITHMDISABLED = 6, /* Algorithm is disabled*/
+    LVM_ALGORITHMPSA = 7,      /* Algorithm PSA returns an error */
     LVM_RETURNSTATUS_DUMMY = LVM_MAXENUM
 } LVM_ReturnStatus_en;
 
 /* Buffer Management mode */
-typedef enum
-{
-    LVM_MANAGED_BUFFERS   = 0,
+typedef enum {
+    LVM_MANAGED_BUFFERS = 0,
     LVM_UNMANAGED_BUFFERS = 1,
-    LVM_BUFFERS_DUMMY     = LVM_MAXENUM
+    LVM_BUFFERS_DUMMY = LVM_MAXENUM
 } LVM_BufferMode_en;
 
 /* Output device type */
-typedef enum
-{
-    LVM_HEADPHONES             = 0,
-    LVM_EX_HEADPHONES          = 1,
-    LVM_SPEAKERTYPE_MAX        = LVM_MAXENUM
+typedef enum {
+    LVM_HEADPHONES = 0,
+    LVM_EX_HEADPHONES = 1,
+    LVM_SPEAKERTYPE_MAX = LVM_MAXENUM
 } LVM_OutputDeviceType_en;
 
 /* Virtualizer mode selection*/
-typedef enum
-{
-    LVM_CONCERTSOUND       = 0,
-    LVM_VIRTUALIZERTYPE_DUMMY   = LVM_MAXENUM
+typedef enum {
+    LVM_CONCERTSOUND = 0,
+    LVM_VIRTUALIZERTYPE_DUMMY = LVM_MAXENUM
 } LVM_VirtualizerType_en;
 
 /* N-Band Equaliser operating mode */
-typedef enum
-{
-    LVM_EQNB_OFF   = 0,
-    LVM_EQNB_ON    = 1,
-    LVM_EQNB_DUMMY = LVM_MAXENUM
-} LVM_EQNB_Mode_en;
+typedef enum { LVM_EQNB_OFF = 0, LVM_EQNB_ON = 1, LVM_EQNB_DUMMY = LVM_MAXENUM } LVM_EQNB_Mode_en;
 
 /* Bass Enhancement operating mode */
-typedef enum
-{
-    LVM_BE_OFF   = 0,
-    LVM_BE_ON    = 1,
-    LVM_BE_DUMMY = LVM_MAXENUM
-} LVM_BE_Mode_en;
+typedef enum { LVM_BE_OFF = 0, LVM_BE_ON = 1, LVM_BE_DUMMY = LVM_MAXENUM } LVM_BE_Mode_en;
 
 /* Bass Enhancement centre frequency selection control */
-typedef enum
-{
-    LVM_BE_CENTRE_55Hz  = 0,
-    LVM_BE_CENTRE_66Hz  = 1,
-    LVM_BE_CENTRE_78Hz  = 2,
-    LVM_BE_CENTRE_90Hz  = 3,
+typedef enum {
+    LVM_BE_CENTRE_55Hz = 0,
+    LVM_BE_CENTRE_66Hz = 1,
+    LVM_BE_CENTRE_78Hz = 2,
+    LVM_BE_CENTRE_90Hz = 3,
     LVM_BE_CENTRE_DUMMY = LVM_MAXENUM
 } LVM_BE_CentreFreq_en;
 
 /* Bass Enhancement HPF selection control */
-typedef enum
-{
-    LVM_BE_HPF_OFF   = 0,
-    LVM_BE_HPF_ON    = 1,
+typedef enum {
+    LVM_BE_HPF_OFF = 0,
+    LVM_BE_HPF_ON = 1,
     LVM_BE_HPF_DUMMY = LVM_MAXENUM
 } LVM_BE_FilterSelect_en;
 
 /* Volume Control operating mode */
-typedef enum
-{
-    LVM_VC_OFF   = 0,
-    LVM_VC_ON    = 1,
-    LVM_VC_DUMMY = LVM_MAXENUM
-} LVM_VC_Mode_en;
+typedef enum { LVM_VC_OFF = 0, LVM_VC_ON = 1, LVM_VC_DUMMY = LVM_MAXENUM } LVM_VC_Mode_en;
 
 /* Treble Enhancement operating mode */
-typedef enum
-{
-    LVM_TE_OFF   = 0,
-    LVM_TE_ON    = 1,
-    LVM_TE_DUMMY = LVM_MAXENUM
-} LVM_TE_Mode_en;
+typedef enum { LVM_TE_OFF = 0, LVM_TE_ON = 1, LVM_TE_DUMMY = LVM_MAXENUM } LVM_TE_Mode_en;
 
 /* Headroom management operating mode */
-typedef enum
-{
-    LVM_HEADROOM_OFF   = 0,
-    LVM_HEADROOM_ON    = 1,
+typedef enum {
+    LVM_HEADROOM_OFF = 0,
+    LVM_HEADROOM_ON = 1,
     LVM_Headroom_DUMMY = LVM_MAXENUM
 } LVM_Headroom_Mode_en;
 
-typedef enum
-{
-    LVM_PSA_SPEED_SLOW,                                  /* Peak decaying at slow speed */
-    LVM_PSA_SPEED_MEDIUM,                                /* Peak decaying at medium speed */
-    LVM_PSA_SPEED_FAST,                                  /* Peak decaying at fast speed */
+typedef enum {
+    LVM_PSA_SPEED_SLOW,   /* Peak decaying at slow speed */
+    LVM_PSA_SPEED_MEDIUM, /* Peak decaying at medium speed */
+    LVM_PSA_SPEED_FAST,   /* Peak decaying at fast speed */
     LVM_PSA_SPEED_DUMMY = LVM_MAXENUM
 } LVM_PSA_DecaySpeed_en;
 
-typedef enum
-{
-    LVM_PSA_OFF   = 0,
-    LVM_PSA_ON    = 1,
-    LVM_PSA_DUMMY = LVM_MAXENUM
-} LVM_PSA_Mode_en;
+typedef enum { LVM_PSA_OFF = 0, LVM_PSA_ON = 1, LVM_PSA_DUMMY = LVM_MAXENUM } LVM_PSA_Mode_en;
 
 /* Version information */
-typedef struct
-{
-    LVM_CHAR                    *pVersionNumber;        /* Pointer to the version number in the format X.YY.ZZ */
-    LVM_CHAR                    *pPlatform;             /* Pointer to the library platform type */
+typedef struct {
+    LVM_CHAR* pVersionNumber; /* Pointer to the version number in the format X.YY.ZZ */
+    LVM_CHAR* pPlatform;      /* Pointer to the library platform type */
 } LVM_VersionInfo_st;
 
 /****************************************************************************************/
@@ -225,93 +188,80 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory table containing the region definitions */
-typedef struct
-{
-    LVM_MemoryRegion_st         Region[LVM_NR_MEMORY_REGIONS];  /* One definition for each region */
-} LVM_MemTab_t;
-
 /* N-Band equaliser band definition */
-typedef struct
-{
-    LVM_INT16                   Gain;                   /* Band gain in dB */
-    LVM_UINT16                  Frequency;              /* Band centre frequency in Hz */
-    LVM_UINT16                  QFactor;                /* Band quality factor (x100) */
+typedef struct {
+    LVM_INT16 Gain;       /* Band gain in dB */
+    LVM_UINT16 Frequency; /* Band centre frequency in Hz */
+    LVM_UINT16 QFactor;   /* Band quality factor (x100) */
 } LVM_EQNB_BandDef_t;
 
 /* Headroom band definition */
-typedef struct
-{
-    LVM_UINT16                  Limit_Low;              /* Low frequency limit of the band in Hertz */
-    LVM_UINT16                  Limit_High;             /* High frequency limit of the band in Hertz */
-    LVM_INT16                   Headroom_Offset;        /* Headroom = biggest band gain - Headroom_Offset */
+typedef struct {
+    LVM_UINT16 Limit_Low;      /* Low frequency limit of the band in Hertz */
+    LVM_UINT16 Limit_High;     /* High frequency limit of the band in Hertz */
+    LVM_INT16 Headroom_Offset; /* Headroom = biggest band gain - Headroom_Offset */
 } LVM_HeadroomBandDef_t;
 
 /* Control Parameter structure */
-typedef struct
-{
+typedef struct {
     /* General parameters */
-    LVM_Mode_en                 OperatingMode;          /* Bundle operating mode On/Bypass */
-    LVM_Fs_en                   SampleRate;             /* Sample rate */
-    LVM_Format_en               SourceFormat;           /* Input data format */
-    LVM_OutputDeviceType_en     SpeakerType;            /* Output device type */
+    LVM_Mode_en OperatingMode;           /* Bundle operating mode On/Bypass */
+    LVM_Fs_en SampleRate;                /* Sample rate */
+    LVM_Format_en SourceFormat;          /* Input data format */
+    LVM_OutputDeviceType_en SpeakerType; /* Output device type */
 
     /* Concert Sound Virtualizer parameters*/
-    LVM_Mode_en                 VirtualizerOperatingMode; /* Virtualizer operating mode On/Off */
-    LVM_VirtualizerType_en      VirtualizerType;          /* Virtualizer type: ConcertSound */
-    LVM_UINT16                  VirtualizerReverbLevel;   /* Virtualizer reverb level in % */
-    LVM_INT16                   CS_EffectLevel;           /* Concert Sound effect level */
+    LVM_Mode_en VirtualizerOperatingMode;   /* Virtualizer operating mode On/Off */
+    LVM_VirtualizerType_en VirtualizerType; /* Virtualizer type: ConcertSound */
+    LVM_UINT16 VirtualizerReverbLevel;      /* Virtualizer reverb level in % */
+    LVM_INT16 CS_EffectLevel;               /* Concert Sound effect level */
 
     /* N-Band Equaliser parameters */
-    LVM_EQNB_Mode_en            EQNB_OperatingMode;     /* N-Band Equaliser operating mode */
-    LVM_UINT16                  EQNB_NBands;            /* Number of bands */
-    LVM_EQNB_BandDef_t          *pEQNB_BandDefinition;  /* Pointer to equaliser definitions */
+    LVM_EQNB_Mode_en EQNB_OperatingMode;      /* N-Band Equaliser operating mode */
+    LVM_UINT16 EQNB_NBands;                   /* Number of bands */
+    LVM_EQNB_BandDef_t* pEQNB_BandDefinition; /* Pointer to equaliser definitions */
 
     /* Bass Enhancement parameters */
-    LVM_BE_Mode_en              BE_OperatingMode;       /* Bass Enhancement operating mode */
-    LVM_INT16                   BE_EffectLevel;         /* Bass Enhancement effect level */
-    LVM_BE_CentreFreq_en        BE_CentreFreq;          /* Bass Enhancement centre frequency */
-    LVM_BE_FilterSelect_en      BE_HPF;                 /* Bass Enhancement high pass filter selector */
+    LVM_BE_Mode_en BE_OperatingMode;    /* Bass Enhancement operating mode */
+    LVM_INT16 BE_EffectLevel;           /* Bass Enhancement effect level */
+    LVM_BE_CentreFreq_en BE_CentreFreq; /* Bass Enhancement centre frequency */
+    LVM_BE_FilterSelect_en BE_HPF;      /* Bass Enhancement high pass filter selector */
 
     /* Volume Control parameters */
-    LVM_INT16                   VC_EffectLevel;         /* Volume Control setting in dBs */
-    LVM_INT16                   VC_Balance;             /* Left Right Balance control in dB (-96 to 96 dB), -ve values reduce
-                                                           Right channel while +ve value reduces Left channel*/
+    LVM_INT16 VC_EffectLevel; /* Volume Control setting in dBs */
+    LVM_INT16 VC_Balance;     /* Left Right Balance control in dB (-96 to 96 dB), -ve values reduce
+                                 Right channel while +ve value reduces Left channel*/
 
     /* Treble Enhancement parameters */
-    LVM_TE_Mode_en              TE_OperatingMode;       /* Treble Enhancement On/Off */
-    LVM_INT16                   TE_EffectLevel;         /* Treble Enhancement gain dBs */
+    LVM_TE_Mode_en TE_OperatingMode; /* Treble Enhancement On/Off */
+    LVM_INT16 TE_EffectLevel;        /* Treble Enhancement gain dBs */
 
     /* Spectrum Analyzer parameters Control */
-    LVM_PSA_Mode_en             PSA_Enable;
-    LVM_PSA_DecaySpeed_en       PSA_PeakDecayRate;      /* Peak value decay rate*/
-#ifdef SUPPORT_MC
-    LVM_INT32                   NrChannels;
-    LVM_INT32                   ChMask;
-#endif
+    LVM_PSA_Mode_en PSA_Enable;
+    LVM_PSA_DecaySpeed_en PSA_PeakDecayRate; /* Peak value decay rate*/
+    LVM_INT32 NrChannels;
+    LVM_INT32 ChMask;
 
 } LVM_ControlParams_t;
 
 /* Instance Parameter structure */
-typedef struct
-{
+typedef struct {
     /* General */
-    LVM_BufferMode_en           BufferMode;             /* Buffer management mode */
-    LVM_UINT16                  MaxBlockSize;           /* Maximum processing block size */
+    LVM_BufferMode_en BufferMode; /* Buffer management mode */
+    LVM_UINT16 MaxBlockSize;      /* Maximum processing block size */
 
     /* N-Band Equaliser */
-    LVM_UINT16                  EQNB_NumBands;          /* Maximum number of equaliser bands */
+    LVM_UINT16 EQNB_NumBands; /* Maximum number of equaliser bands */
 
     /* PSA */
-    LVM_PSA_Mode_en             PSA_Included;            /* Controls the instance memory allocation for PSA: ON/OFF */
+    LVM_PSA_Mode_en PSA_Included; /* Controls the instance memory allocation for PSA: ON/OFF */
 } LVM_InstParams_t;
 
 /* Headroom management parameter structure */
-typedef struct
-{
-    LVM_Headroom_Mode_en        Headroom_OperatingMode; /* Headroom Control On/Off */
-    LVM_HeadroomBandDef_t       *pHeadroomDefinition;   /* Pointer to headroom bands definition */
-    LVM_UINT16                  NHeadroomBands;         /* Number of headroom bands */
+typedef struct {
+    LVM_Headroom_Mode_en Headroom_OperatingMode; /* Headroom Control On/Off */
+    LVM_HeadroomBandDef_t* pHeadroomDefinition;  /* Pointer to headroom bands definition */
+    LVM_UINT16 NHeadroomBands;                   /* Number of headroom bands */
 
 } LVM_HeadroomParams_t;
 
@@ -339,55 +289,18 @@
 /*  1.  This function may be interrupted by the LVM_Process function                    */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetVersionInfo(LVM_VersionInfo_st  *pVersion);
-
-/****************************************************************************************/
-/*                                                                                      */
-/* FUNCTION:                LVM_GetMemoryTable                                          */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used for memory allocation and free. It can be called in           */
-/*  two ways:                                                                           */
-/*                                                                                      */
-/*      hInstance = NULL                Returns the memory requirements                 */
-/*      hInstance = Instance handle     Returns the memory requirements and             */
-/*                                      allocated base addresses for the instance       */
-/*                                                                                      */
-/*  When this function is called for memory allocation (hInstance=NULL) the memory      */
-/*  base address pointers are NULL on return.                                           */
-/*                                                                                      */
-/*  When the function is called for free (hInstance = Instance Handle) the memory       */
-/*  table returns the allocated memory and base addresses used during initialisation.   */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory definition table                 */
-/*  pInstParams             Pointer to the instance parameters                          */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVM_SUCCESS             Succeeded                                                   */
-/*  LVM_NULLADDRESS         When one of pMemoryTable or pInstParams is NULL             */
-/*  LVM_OUTOFRANGE          When any of the Instance parameters are out of range        */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  This function may be interrupted by the LVM_Process function                    */
-/*                                                                                      */
-/****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetMemoryTable(LVM_Handle_t         hInstance,
-                                       LVM_MemTab_t         *pMemoryTable,
-                                       LVM_InstParams_t     *pInstParams);
+LVM_ReturnStatus_en LVM_GetVersionInfo(LVM_VersionInfo_st* pVersion);
 
 /****************************************************************************************/
 /*                                                                                      */
 /* FUNCTION:                LVM_GetInstanceHandle                                       */
 /*                                                                                      */
 /* DESCRIPTION:                                                                         */
-/*  This function is used to create a bundle instance. It returns the created instance  */
-/*  handle through phInstance. All parameters are set to their default, inactive state. */
+/*  This function is used to create a bundle instance.                                  */
+/*  All parameters are set to their default, inactive state.                            */
 /*                                                                                      */
 /* PARAMETERS:                                                                          */
-/*  phInstance              pointer to the instance handle                              */
-/*  pMemoryTable            Pointer to the memory definition table                      */
+/*  phInstance              Pointer to the instance handle                              */
 /*  pInstParams             Pointer to the instance parameters                          */
 /*                                                                                      */
 /* RETURNS:                                                                             */
@@ -399,9 +312,24 @@
 /*  1. This function must not be interrupted by the LVM_Process function                */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetInstanceHandle(LVM_Handle_t        *phInstance,
-                                          LVM_MemTab_t        *pMemoryTable,
-                                          LVM_InstParams_t    *pInstParams);
+LVM_ReturnStatus_en LVM_GetInstanceHandle(LVM_Handle_t* phInstance, LVM_InstParams_t* pInstParams);
+
+/****************************************************************************************/
+/*                                                                                      */
+/* FUNCTION:                LVM_DelInstanceHandle                                       */
+/*                                                                                      */
+/* DESCRIPTION:                                                                         */
+/*  This function is used to create a bundle instance. It returns the created instance  */
+/*  handle through phInstance. All parameters are set to their default, inactive state. */
+/*                                                                                      */
+/* PARAMETERS:                                                                          */
+/*  phInstance              Pointer to the instance handle                              */
+/*                                                                                      */
+/* NOTES:                                                                               */
+/*  1. This function must not be interrupted by the LVM_Process function                */
+/*                                                                                      */
+/****************************************************************************************/
+void LVM_DelInstanceHandle(LVM_Handle_t* phInstance);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -421,7 +349,7 @@
 /*  1. This function must not be interrupted by the LVM_Process function                */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_ClearAudioBuffers(LVM_Handle_t  hInstance);
+LVM_ReturnStatus_en LVM_ClearAudioBuffers(LVM_Handle_t hInstance);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -443,8 +371,7 @@
 /*  1.  This function may be interrupted by the LVM_Process function                    */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetControlParameters(LVM_Handle_t           hInstance,
-                                             LVM_ControlParams_t    *pParams);
+LVM_ReturnStatus_en LVM_GetControlParameters(LVM_Handle_t hInstance, LVM_ControlParams_t* pParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -466,8 +393,7 @@
 /*  1.  This function may be interrupted by the LVM_Process function                    */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_SetControlParameters(LVM_Handle_t           hInstance,
-                                             LVM_ControlParams_t    *pParams);
+LVM_ReturnStatus_en LVM_SetControlParameters(LVM_Handle_t hInstance, LVM_ControlParams_t* pParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -499,11 +425,8 @@
 /*      STEREO              the number of sample pairs in the block                     */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_Process(LVM_Handle_t                hInstance,
-                                const LVM_FLOAT             *pInData,
-                                LVM_FLOAT                      *pOutData,
-                                LVM_UINT16                  NumSamples,
-                                LVM_UINT32                  AudioTime);
+LVM_ReturnStatus_en LVM_Process(LVM_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                LVM_FLOAT* pOutData, LVM_UINT16 NumSamples, LVM_UINT32 AudioTime);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -524,8 +447,8 @@
 /*  1.  This function may be interrupted by the LVM_Process function                    */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_SetHeadroomParams(  LVM_Handle_t            hInstance,
-                                            LVM_HeadroomParams_t    *pHeadroomParams);
+LVM_ReturnStatus_en LVM_SetHeadroomParams(LVM_Handle_t hInstance,
+                                          LVM_HeadroomParams_t* pHeadroomParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -546,8 +469,8 @@
 /*  1.  This function may be interrupted by the LVM_Process function                    */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetHeadroomParams(  LVM_Handle_t            hInstance,
-                                            LVM_HeadroomParams_t    *pHeadroomParams);
+LVM_ReturnStatus_en LVM_GetHeadroomParams(LVM_Handle_t hInstance,
+                                          LVM_HeadroomParams_t* pHeadroomParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -574,10 +497,8 @@
 /*  1. This function may be interrupted by the LVM_Process function                     */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetSpectrum( LVM_Handle_t            hInstance,
-                                     LVM_UINT8               *pCurrentPeaks,
-                                     LVM_UINT8               *pPastPeaks,
-                                     LVM_INT32               AudioTime);
+LVM_ReturnStatus_en LVM_GetSpectrum(LVM_Handle_t hInstance, LVM_UINT8* pCurrentPeaks,
+                                    LVM_UINT8* pPastPeaks, LVM_INT32 AudioTime);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -599,8 +520,6 @@
 /*  1. This function may be interrupted by the LVM_Process function                     */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_SetVolumeNoSmoothing( LVM_Handle_t           hInstance,
-                                              LVM_ControlParams_t    *pParams);
+LVM_ReturnStatus_en LVM_SetVolumeNoSmoothing(LVM_Handle_t hInstance, LVM_ControlParams_t* pParams);
 
-#endif      /* __LVM_H__ */
-
+#endif /* __LVM_H__ */
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_API_Specials.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_API_Specials.cpp
index e241cdd..cea964c 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_API_Specials.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_API_Specials.cpp
@@ -47,69 +47,52 @@
 /*  1. This function may be interrupted by the LVM_Process function                     */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_GetSpectrum(
-                                    LVM_Handle_t            hInstance,
-                                    LVM_UINT8               *pCurrentPeaks,
-                                    LVM_UINT8               *pPastPeaks,
-                                    LVM_INT32               AudioTime
-                                    )
-{
-    LVM_Instance_t           *pInstance   = (LVM_Instance_t  *)hInstance;
+LVM_ReturnStatus_en LVM_GetSpectrum(LVM_Handle_t hInstance, LVM_UINT8* pCurrentPeaks,
+                                    LVM_UINT8* pPastPeaks, LVM_INT32 AudioTime) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
 
-    pLVPSA_Handle_t        *hPSAInstance;
-    LVPSA_RETURN           LVPSA_Status;
+    pLVPSA_Handle_t* hPSAInstance;
+    LVPSA_RETURN LVPSA_Status;
 
-    if(pInstance == LVM_NULL)
-    {
+    if (pInstance == LVM_NULL) {
         return LVM_NULLADDRESS;
     }
 
     /*If PSA is not included at the time of instance creation, return without any processing*/
-    if(pInstance->InstParams.PSA_Included!=LVM_PSA_ON)
-    {
+    if (pInstance->InstParams.PSA_Included != LVM_PSA_ON) {
         return LVM_SUCCESS;
     }
 
-    hPSAInstance = (pLVPSA_Handle_t *)pInstance->hPSAInstance;
+    hPSAInstance = (pLVPSA_Handle_t*)pInstance->hPSAInstance;
 
-    if((pCurrentPeaks == LVM_NULL) ||
-        (pPastPeaks == LVM_NULL))
-    {
+    if ((pCurrentPeaks == LVM_NULL) || (pPastPeaks == LVM_NULL)) {
         return LVM_NULLADDRESS;
     }
 
     /*
      * Update new parameters if necessary
      */
-    if (pInstance->ControlPending == LVM_TRUE)
-    {
+    if (pInstance->ControlPending == LVM_TRUE) {
         LVM_ApplyNewSettings(hInstance);
     }
 
     /* If PSA module is disabled, do nothing */
-    if(pInstance->Params.PSA_Enable==LVM_PSA_OFF)
-    {
+    if (pInstance->Params.PSA_Enable == LVM_PSA_OFF) {
         return LVM_ALGORITHMDISABLED;
     }
 
-    LVPSA_Status = LVPSA_GetSpectrum(hPSAInstance,
-                            (LVPSA_Time) (AudioTime),
-                            (LVM_UINT8*) pCurrentPeaks,
-                            (LVM_UINT8*) pPastPeaks );
+    LVPSA_Status = LVPSA_GetSpectrum(hPSAInstance, (LVPSA_Time)(AudioTime),
+                                     (LVM_UINT8*)pCurrentPeaks, (LVM_UINT8*)pPastPeaks);
 
-    if(LVPSA_Status != LVPSA_OK)
-    {
-        if(LVPSA_Status == LVPSA_ERROR_WRONGTIME)
-        {
-            return (LVM_ReturnStatus_en) LVM_WRONGAUDIOTIME;
-        }
-        else
-        {
-            return (LVM_ReturnStatus_en) LVM_NULLADDRESS;
+    if (LVPSA_Status != LVPSA_OK) {
+        if (LVPSA_Status == LVPSA_ERROR_WRONGTIME) {
+            return (LVM_ReturnStatus_en)LVM_WRONGAUDIOTIME;
+        } else {
+            return (LVM_ReturnStatus_en)LVM_NULLADDRESS;
         }
     }
 
-    return(LVM_SUCCESS);
+    return (LVM_SUCCESS);
 }
 
 /****************************************************************************************/
@@ -132,15 +115,12 @@
 /*  1. This function may be interrupted by the LVM_Process function                     */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_SetVolumeNoSmoothing( LVM_Handle_t           hInstance,
-                                              LVM_ControlParams_t    *pParams)
-{
-    LVM_Instance_t      *pInstance =(LVM_Instance_t  *)hInstance;
+LVM_ReturnStatus_en LVM_SetVolumeNoSmoothing(LVM_Handle_t hInstance, LVM_ControlParams_t* pParams) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
     LVM_ReturnStatus_en Error;
 
     /*Apply new controls*/
-    Error = LVM_SetControlParameters(hInstance,pParams);
+    Error = LVM_SetControlParameters(hInstance, pParams);
     pInstance->NoSmoothVolume = LVM_TRUE;
     return Error;
 }
-
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Buffers.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Buffers.cpp
index 3aeddbb..1d913d7 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Buffers.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Buffers.cpp
@@ -49,109 +49,90 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-void LVM_BufferManagedIn(LVM_Handle_t       hInstance,
-                         const LVM_FLOAT    *pInData,
-                         LVM_FLOAT          **pToProcess,
-                         LVM_FLOAT          **pProcessed,
-                         LVM_UINT16         *pNumSamples)
-{
-
-    LVM_INT16        SampleCount;           /* Number of samples to be processed this call */
-    LVM_INT16        NumSamples;            /* Number of samples in scratch buffer */
-    LVM_FLOAT        *pStart;
-    LVM_Instance_t   *pInstance = (LVM_Instance_t  *)hInstance;
-    LVM_Buffer_t     *pBuffer;
-    LVM_FLOAT        *pDest;
-#ifdef SUPPORT_MC
-    LVM_INT16        NumChannels = pInstance->NrChannels;
-#else
-    LVM_INT16        NumChannels = 2;
-#endif
+void LVM_BufferManagedIn(LVM_Handle_t hInstance, const LVM_FLOAT* pInData, LVM_FLOAT** pToProcess,
+                         LVM_FLOAT** pProcessed, LVM_UINT16* pNumSamples) {
+    LVM_INT16 SampleCount; /* Number of samples to be processed this call */
+    LVM_INT16 NumSamples;  /* Number of samples in scratch buffer */
+    LVM_FLOAT* pStart;
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+    LVM_Buffer_t* pBuffer;
+    LVM_FLOAT* pDest;
+    LVM_INT16 NumChannels = pInstance->NrChannels;
 
     /*
      * Set the processing address pointers
      */
-    pBuffer     = pInstance->pBufferManagement;
-    pDest       = pBuffer->pScratch;
+    pBuffer = pInstance->pBufferManagement;
+    pDest = pBuffer->pScratch;
     *pToProcess = pBuffer->pScratch;
     *pProcessed = pBuffer->pScratch;
 
     /*
      * Check if it is the first call of a block
      */
-    if (pInstance->SamplesToProcess == 0)
-    {
+    if (pInstance->SamplesToProcess == 0) {
         /*
          * First call for a new block of samples
          */
         pInstance->SamplesToProcess = (LVM_INT16)(*pNumSamples + pBuffer->InDelaySamples);
-        pInstance->pInputSamples    = (LVM_FLOAT *)pInData;
-        pBuffer->BufferState        = LVM_FIRSTCALL;
+        pInstance->pInputSamples = (LVM_FLOAT*)pInData;
+        pBuffer->BufferState = LVM_FIRSTCALL;
     }
-    pStart = pInstance->pInputSamples;                 /* Pointer to the input samples */
-    pBuffer->SamplesToOutput  = 0;                     /* Samples to output is same as
-                                                          number read for inplace processing */
+    pStart = pInstance->pInputSamples; /* Pointer to the input samples */
+    pBuffer->SamplesToOutput = 0;      /* Samples to output is same as
+                                          number read for inplace processing */
 
     /*
      * Calculate the number of samples to process this call and update the buffer state
      */
-    if (pInstance->SamplesToProcess > pInstance->InternalBlockSize)
-    {
+    if (pInstance->SamplesToProcess > pInstance->InternalBlockSize) {
         /*
          * Process the maximum bock size of samples.
          */
         SampleCount = pInstance->InternalBlockSize;
-        NumSamples  = pInstance->InternalBlockSize;
-    }
-    else
-    {
+        NumSamples = pInstance->InternalBlockSize;
+    } else {
         /*
          * Last call for the block, so calculate how many frames and samples to process
-          */
-        LVM_INT16   NumFrames;
+         */
+        LVM_INT16 NumFrames;
 
-        NumSamples  = pInstance->SamplesToProcess;
-        NumFrames    = (LVM_INT16)(NumSamples >> MIN_INTERNAL_BLOCKSHIFT);
+        NumSamples = pInstance->SamplesToProcess;
+        NumFrames = (LVM_INT16)(NumSamples >> MIN_INTERNAL_BLOCKSHIFT);
         SampleCount = (LVM_INT16)(NumFrames << MIN_INTERNAL_BLOCKSHIFT);
 
         /*
          * Update the buffer state
          */
-        if (pBuffer->BufferState == LVM_FIRSTCALL)
-        {
+        if (pBuffer->BufferState == LVM_FIRSTCALL) {
             pBuffer->BufferState = LVM_FIRSTLASTCALL;
-        }
-        else
-        {
+        } else {
             pBuffer->BufferState = LVM_LASTCALL;
         }
     }
-    *pNumSamples = (LVM_UINT16)SampleCount;  /* Set the number of samples to process this call */
+    *pNumSamples = (LVM_UINT16)SampleCount; /* Set the number of samples to process this call */
 
     /*
      * Copy samples from the delay buffer as required
      */
-    if (((pBuffer->BufferState == LVM_FIRSTCALL) ||
-        (pBuffer->BufferState == LVM_FIRSTLASTCALL)) &&
-        (pBuffer->InDelaySamples != 0))
-    {
-        Copy_Float(&pBuffer->InDelayBuffer[0],                             /* Source */
-                   pDest,                                                  /* Destination */
-                   (LVM_INT16)(NumChannels * pBuffer->InDelaySamples));    /* Number of delay \
-                                                                       samples, left and right */
+    if (((pBuffer->BufferState == LVM_FIRSTCALL) || (pBuffer->BufferState == LVM_FIRSTLASTCALL)) &&
+        (pBuffer->InDelaySamples != 0)) {
+        Copy_Float(&pBuffer->InDelayBuffer[0],                          /* Source */
+                   pDest,                                               /* Destination */
+                   (LVM_INT16)(NumChannels * pBuffer->InDelaySamples)); /* Number of delay \
+                                                                    samples, left and right */
         NumSamples = (LVM_INT16)(NumSamples - pBuffer->InDelaySamples); /* Update sample count */
-        pDest += NumChannels * pBuffer->InDelaySamples;      /* Update the destination pointer */
+        pDest += NumChannels * pBuffer->InDelaySamples; /* Update the destination pointer */
     }
 
     /*
      * Copy the rest of the samples for this call from the input buffer
      */
-    if (NumSamples > 0)
-    {
-        Copy_Float(pStart,                                      /* Source */
-                   pDest,                                       /* Destination */
-                   (LVM_INT16)(NumChannels * NumSamples));      /* Number of input samples */
-        pStart += NumChannels * NumSamples;                     /* Update the input pointer */
+    if (NumSamples > 0) {
+        Copy_Float(pStart,                                 /* Source */
+                   pDest,                                  /* Destination */
+                   (LVM_INT16)(NumChannels * NumSamples)); /* Number of input samples */
+        pStart += NumChannels * NumSamples;                /* Update the input pointer */
 
         /*
          * Update the input data pointer and samples to output
@@ -161,33 +142,30 @@
     }
 
     /*
-      * Update the sample count and input pointer
+     * Update the sample count and input pointer
      */
     /* Update the count of samples */
-    pInstance->SamplesToProcess  = (LVM_INT16)(pInstance->SamplesToProcess - SampleCount);
-    pInstance->pInputSamples     = pStart; /* Update input sample pointer */
+    pInstance->SamplesToProcess = (LVM_INT16)(pInstance->SamplesToProcess - SampleCount);
+    pInstance->pInputSamples = pStart; /* Update input sample pointer */
 
     /*
      * Save samples to the delay buffer if any left unprocessed
      */
-    if ((pBuffer->BufferState == LVM_FIRSTLASTCALL) ||
-        (pBuffer->BufferState == LVM_LASTCALL))
-    {
+    if ((pBuffer->BufferState == LVM_FIRSTLASTCALL) || (pBuffer->BufferState == LVM_LASTCALL)) {
         NumSamples = pInstance->SamplesToProcess;
-        pStart     = pBuffer->pScratch;                             /* Start of the buffer */
-        pStart    += NumChannels * SampleCount; /* Offset by the number of processed samples */
-        if (NumSamples != 0)
-        {
-            Copy_Float(pStart,                                         /* Source */
-                       &pBuffer->InDelayBuffer[0],                     /* Destination */
-                       (LVM_INT16)(NumChannels * NumSamples));   /* Number of input samples */
+        pStart = pBuffer->pScratch;          /* Start of the buffer */
+        pStart += NumChannels * SampleCount; /* Offset by the number of processed samples */
+        if (NumSamples != 0) {
+            Copy_Float(pStart,                                 /* Source */
+                       &pBuffer->InDelayBuffer[0],             /* Destination */
+                       (LVM_INT16)(NumChannels * NumSamples)); /* Number of input samples */
         }
 
         /*
          * Update the delay sample count
          */
-        pBuffer->InDelaySamples     = NumSamples;       /* Number of delay sample pairs */
-        pInstance->SamplesToProcess = 0;                            /* All Samples used */
+        pBuffer->InDelaySamples = NumSamples; /* Number of delay sample pairs */
+        pInstance->SamplesToProcess = 0;      /* All Samples used */
     }
 }
 
@@ -213,33 +191,25 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-void LVM_BufferUnmanagedIn(LVM_Handle_t     hInstance,
-                           LVM_FLOAT        **pToProcess,
-                           LVM_FLOAT        **pProcessed,
-                           LVM_UINT16       *pNumSamples)
-{
-
-    LVM_Instance_t    *pInstance = (LVM_Instance_t  *)hInstance;
+void LVM_BufferUnmanagedIn(LVM_Handle_t hInstance, LVM_FLOAT** pToProcess, LVM_FLOAT** pProcessed,
+                           LVM_UINT16* pNumSamples) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
 
     /*
      * Check if this is the first call of a block
      */
-    if (pInstance->SamplesToProcess == 0)
-    {
-        pInstance->SamplesToProcess = (LVM_INT16)*pNumSamples;    /* Get the number of samples
-                                                                               on first call */
-        pInstance->pInputSamples    = *pToProcess;                /* Get the I/O pointers */
-        pInstance->pOutputSamples    = *pProcessed;
+    if (pInstance->SamplesToProcess == 0) {
+        pInstance->SamplesToProcess = (LVM_INT16)*pNumSamples; /* Get the number of samples
+                                                                            on first call */
+        pInstance->pInputSamples = *pToProcess;                /* Get the I/O pointers */
+        pInstance->pOutputSamples = *pProcessed;
 
         /*
          * Set te block size to process
          */
-        if (pInstance->SamplesToProcess > pInstance->InternalBlockSize)
-        {
+        if (pInstance->SamplesToProcess > pInstance->InternalBlockSize) {
             *pNumSamples = (LVM_UINT16)pInstance->InternalBlockSize;
-        }
-        else
-        {
+        } else {
             *pNumSamples = (LVM_UINT16)pInstance->SamplesToProcess;
         }
     }
@@ -329,32 +299,17 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-void LVM_BufferIn(LVM_Handle_t      hInstance,
-                  const LVM_FLOAT   *pInData,
-                  LVM_FLOAT         **pToProcess,
-                  LVM_FLOAT         **pProcessed,
-                  LVM_UINT16        *pNumSamples)
-{
-
-    LVM_Instance_t    *pInstance = (LVM_Instance_t  *)hInstance;
+void LVM_BufferIn(LVM_Handle_t hInstance, const LVM_FLOAT* pInData, LVM_FLOAT** pToProcess,
+                  LVM_FLOAT** pProcessed, LVM_UINT16* pNumSamples) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
 
     /*
      * Check which mode, managed or unmanaged
      */
-    if (pInstance->InstParams.BufferMode == LVM_MANAGED_BUFFERS)
-    {
-        LVM_BufferManagedIn(hInstance,
-                            pInData,
-                            pToProcess,
-                            pProcessed,
-                            pNumSamples);
-    }
-    else
-    {
-        LVM_BufferUnmanagedIn(hInstance,
-                              pToProcess,
-                              pProcessed,
-                              pNumSamples);
+    if (pInstance->InstParams.BufferMode == LVM_MANAGED_BUFFERS) {
+        LVM_BufferManagedIn(hInstance, pInData, pToProcess, pProcessed, pNumSamples);
+    } else {
+        LVM_BufferUnmanagedIn(hInstance, pToProcess, pProcessed, pNumSamples);
     }
 }
 /****************************************************************************************/
@@ -377,196 +332,124 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-void LVM_BufferManagedOut(LVM_Handle_t        hInstance,
-                          LVM_FLOAT            *pOutData,
-                          LVM_UINT16        *pNumSamples)
-{
-
-    LVM_Instance_t  *pInstance  = (LVM_Instance_t  *)hInstance;
-    LVM_Buffer_t    *pBuffer    = pInstance->pBufferManagement;
-    LVM_INT16       SampleCount = (LVM_INT16)*pNumSamples;
-    LVM_INT16       NumSamples;
-    LVM_FLOAT       *pStart;
-    LVM_FLOAT       *pDest;
-#ifdef SUPPORT_MC
-    LVM_INT32       NrChannels = pInstance->NrChannels;
+void LVM_BufferManagedOut(LVM_Handle_t hInstance, LVM_FLOAT* pOutData, LVM_UINT16* pNumSamples) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+    LVM_Buffer_t* pBuffer = pInstance->pBufferManagement;
+    LVM_INT16 SampleCount = (LVM_INT16)*pNumSamples;
+    LVM_INT16 NumSamples;
+    LVM_FLOAT* pStart;
+    LVM_FLOAT* pDest;
+    LVM_INT32 NrChannels = pInstance->NrChannels;
 #define NrFrames NumSamples  // alias for clarity
 #define FrameCount SampleCount
-#endif
 
     /*
      * Set the pointers
      */
     NumSamples = pBuffer->SamplesToOutput;
-    pStart     = pBuffer->pScratch;
+    pStart = pBuffer->pScratch;
 
     /*
      * check if it is the first call of a block
-      */
-    if ((pBuffer->BufferState == LVM_FIRSTCALL) ||
-        (pBuffer->BufferState == LVM_FIRSTLASTCALL))
-    {
+     */
+    if ((pBuffer->BufferState == LVM_FIRSTCALL) || (pBuffer->BufferState == LVM_FIRSTLASTCALL)) {
         /* First call for a new block */
-        pInstance->pOutputSamples = pOutData;                 /* Initialise the destination */
+        pInstance->pOutputSamples = pOutData; /* Initialise the destination */
     }
-    pDest = pInstance->pOutputSamples;                        /* Set the output address */
+    pDest = pInstance->pOutputSamples; /* Set the output address */
 
     /*
      * If the number of samples is non-zero then there are still samples to send to
      * the output buffer
      */
-    if ((NumSamples != 0) &&
-        (pBuffer->OutDelaySamples != 0))
-    {
+    if ((NumSamples != 0) && (pBuffer->OutDelaySamples != 0)) {
         /*
          * Copy the delayed output buffer samples to the output
          */
-        if (pBuffer->OutDelaySamples <= NumSamples)
-        {
+        if (pBuffer->OutDelaySamples <= NumSamples) {
             /*
              * Copy all output delay samples to the output
              */
-#ifdef SUPPORT_MC
-            Copy_Float(&pBuffer->OutDelayBuffer[0],                /* Source */
-                       pDest,                                      /* Destination */
+            Copy_Float(&pBuffer->OutDelayBuffer[0], /* Source */
+                       pDest,                       /* Destination */
                        /* Number of delay samples */
                        (LVM_INT16)(NrChannels * pBuffer->OutDelaySamples));
-#else
-            Copy_Float(&pBuffer->OutDelayBuffer[0],                /* Source */
-                       pDest,                                      /* Destination */
-                       (LVM_INT16)(2 * pBuffer->OutDelaySamples)); /* Number of delay samples */
-#endif
 
             /*
              * Update the pointer and sample counts
              */
-#ifdef SUPPORT_MC
             pDest += NrChannels * pBuffer->OutDelaySamples; /* Output sample pointer */
-#else
-            pDest += 2 * pBuffer->OutDelaySamples; /* Output sample pointer */
-#endif
             NumSamples = (LVM_INT16)(NumSamples - pBuffer->OutDelaySamples); /* Samples left \
                                                                                 to send */
             pBuffer->OutDelaySamples = 0; /* No samples left in the buffer */
-        }
-        else
-        {
+        } else {
             /*
-             * Copy only some of the ouput delay samples to the output
+             * Copy only some of the output delay samples to the output
              */
-#ifdef SUPPORT_MC
-            Copy_Float(&pBuffer->OutDelayBuffer[0],                    /* Source */
-                       pDest,                                          /* Destination */
-                       (LVM_INT16)(NrChannels * NrFrames));       /* Number of delay samples */
-#else
-            Copy_Float(&pBuffer->OutDelayBuffer[0],                    /* Source */
-                       pDest,                                          /* Destination */
-                       (LVM_INT16)(2 * NumSamples));       /* Number of delay samples */
-#endif
+            Copy_Float(&pBuffer->OutDelayBuffer[0],         /* Source */
+                       pDest,                               /* Destination */
+                       (LVM_INT16)(NrChannels * NrFrames)); /* Number of delay samples */
 
             /*
              * Update the pointer and sample counts
              */
-#ifdef SUPPORT_MC
             pDest += NrChannels * NrFrames; /* Output sample pointer */
-#else
-            pDest += 2 * NumSamples; /* Output sample pointer */
-#endif
             /* No samples left in the buffer */
             pBuffer->OutDelaySamples = (LVM_INT16)(pBuffer->OutDelaySamples - NumSamples);
 
             /*
              * Realign the delay buffer data to avoid using circular buffer management
              */
-#ifdef SUPPORT_MC
-            Copy_Float(&pBuffer->OutDelayBuffer[NrChannels * NrFrames],         /* Source */
-                       &pBuffer->OutDelayBuffer[0],                    /* Destination */
+            Copy_Float(&pBuffer->OutDelayBuffer[NrChannels * NrFrames], /* Source */
+                       &pBuffer->OutDelayBuffer[0],                     /* Destination */
                        /* Number of samples to move */
                        (LVM_INT16)(NrChannels * pBuffer->OutDelaySamples));
-#else
-            Copy_Float(&pBuffer->OutDelayBuffer[2 * NumSamples],         /* Source */
-                       &pBuffer->OutDelayBuffer[0],                    /* Destination */
-                       (LVM_INT16)(2 * pBuffer->OutDelaySamples)); /* Number of samples to move */
-#endif
-            NumSamples = 0;                                /* Samples left to send */
+            NumSamples = 0; /* Samples left to send */
         }
     }
 
     /*
      * Copy the processed results to the output
      */
-    if ((NumSamples != 0) &&
-        (SampleCount != 0))
-    {
-        if (SampleCount <= NumSamples)
-        {
+    if ((NumSamples != 0) && (SampleCount != 0)) {
+        if (SampleCount <= NumSamples) {
             /*
              * Copy all processed samples to the output
              */
-#ifdef SUPPORT_MC
-            Copy_Float(pStart,                                      /* Source */
-                       pDest,                                       /* Destination */
+            Copy_Float(pStart,                                /* Source */
+                       pDest,                                 /* Destination */
                        (LVM_INT16)(NrChannels * FrameCount)); /* Number of processed samples */
-#else
-            Copy_Float(pStart,                                      /* Source */
-                       pDest,                                       /* Destination */
-                       (LVM_INT16)(2 * SampleCount)); /* Number of processed samples */
-#endif
             /*
              * Update the pointer and sample counts
              */
-#ifdef SUPPORT_MC
-            pDest      += NrChannels * FrameCount;                 /* Output sample pointer */
-#else
-            pDest      += 2 * SampleCount;                          /* Output sample pointer */
-#endif
-            NumSamples  = (LVM_INT16)(NumSamples - SampleCount);    /* Samples left to send */
-            SampleCount = 0; /* No samples left in the buffer */
-        }
-        else
-        {
+            pDest += NrChannels * FrameCount;                   /* Output sample pointer */
+            NumSamples = (LVM_INT16)(NumSamples - SampleCount); /* Samples left to send */
+            SampleCount = 0;                                    /* No samples left in the buffer */
+        } else {
             /*
              * Copy only some processed samples to the output
              */
-#ifdef SUPPORT_MC
-            Copy_Float(pStart,                                         /* Source */
-                       pDest,                                          /* Destination */
-                       (LVM_INT16)(NrChannels * NrFrames));  /* Number of processed samples */
-#else
-            Copy_Float(pStart,                                         /* Source */
-                       pDest,                                          /* Destination */
-                       (LVM_INT16)(2 * NumSamples));     /* Number of processed samples */
-#endif
+            Copy_Float(pStart,                              /* Source */
+                       pDest,                               /* Destination */
+                       (LVM_INT16)(NrChannels * NrFrames)); /* Number of processed samples */
             /*
              * Update the pointers and sample counts
-               */
-#ifdef SUPPORT_MC
-            pStart      += NrChannels * NrFrames;               /* Processed sample pointer */
-            pDest       += NrChannels * NrFrames;               /* Output sample pointer */
-#else
-            pStart      += 2 * NumSamples;                        /* Processed sample pointer */
-            pDest       += 2 * NumSamples;                        /* Output sample pointer */
-#endif
-            SampleCount  = (LVM_INT16)(SampleCount - NumSamples); /* Processed samples left */
-            NumSamples   = 0;                                     /* Clear the sample count */
+             */
+            pStart += NrChannels * NrFrames;                     /* Processed sample pointer */
+            pDest += NrChannels * NrFrames;                      /* Output sample pointer */
+            SampleCount = (LVM_INT16)(SampleCount - NumSamples); /* Processed samples left */
+            NumSamples = 0;                                      /* Clear the sample count */
         }
     }
 
     /*
      * Copy the remaining processed data to the output delay buffer
      */
-    if (SampleCount != 0)
-    {
-#ifdef SUPPORT_MC
-        Copy_Float(pStart,                                                 /* Source */
+    if (SampleCount != 0) {
+        Copy_Float(pStart, /* Source */
                    /* Destination */
                    &pBuffer->OutDelayBuffer[NrChannels * pBuffer->OutDelaySamples],
-                   (LVM_INT16)(NrChannels * FrameCount));      /* Number of processed samples */
-#else
-        Copy_Float(pStart,                                                 /* Source */
-                   &pBuffer->OutDelayBuffer[2 * pBuffer->OutDelaySamples], /* Destination */
-                   (LVM_INT16)(2 * SampleCount));               /* Number of processed samples */
-#endif
+                   (LVM_INT16)(NrChannels * FrameCount)); /* Number of processed samples */
         /* Update the buffer count */
         pBuffer->OutDelaySamples = (LVM_INT16)(pBuffer->OutDelaySamples + SampleCount);
     }
@@ -574,10 +457,10 @@
     /*
      * pointers, counts and set default buffer processing
      */
-    pBuffer->SamplesToOutput  = NumSamples;                         /* Samples left to send */
-    pInstance->pOutputSamples = pDest;                              /* Output sample pointer */
-    pBuffer->BufferState      = LVM_MAXBLOCKCALL;                   /* Set for the default call \
-                                                                            block size */
+    pBuffer->SamplesToOutput = NumSamples;   /* Samples left to send */
+    pInstance->pOutputSamples = pDest;       /* Output sample pointer */
+    pBuffer->BufferState = LVM_MAXBLOCKCALL; /* Set for the default call \
+                                                     block size */
     /* This will terminate the loop when all samples processed */
     *pNumSamples = (LVM_UINT16)pInstance->SamplesToProcess;
 }
@@ -601,44 +484,27 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-void LVM_BufferUnmanagedOut(LVM_Handle_t        hInstance,
-                            LVM_UINT16          *pNumSamples)
-{
-
-    LVM_Instance_t      *pInstance  = (LVM_Instance_t  *)hInstance;
-#ifdef SUPPORT_MC
-    LVM_INT16           NumChannels = pInstance->NrChannels;
-    if (NumChannels == 1)
-    {
-        /* Mono input is processed as stereo by LVM module */
-        NumChannels = 2;
-    }
+void LVM_BufferUnmanagedOut(LVM_Handle_t hInstance, LVM_UINT16* pNumSamples) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+    LVM_INT16 NumChannels = pInstance->NrChannels;
 #undef NrFrames
-#define NrFrames (*pNumSamples) // alias for clarity
-#else
-    LVM_INT16           NumChannels = 2;
-#endif
+#define NrFrames (*pNumSamples)  // alias for clarity
 
     /*
      * Update sample counts
      */
-    pInstance->pInputSamples    += (LVM_INT16)(*pNumSamples * NumChannels); /* Update the I/O pointers */
-#ifdef SUPPORT_MC
-    pInstance->pOutputSamples   += (LVM_INT16)(NrFrames * NumChannels);
-#else
-    pInstance->pOutputSamples   += (LVM_INT16)(*pNumSamples * 2);
-#endif
-    pInstance->SamplesToProcess  = (LVM_INT16)(pInstance->SamplesToProcess - *pNumSamples); /* Update the sample count */
+    pInstance->pInputSamples +=
+            (LVM_INT16)(*pNumSamples * NumChannels); /* Update the I/O pointers */
+    pInstance->pOutputSamples += (LVM_INT16)(NrFrames * NumChannels);
+    pInstance->SamplesToProcess =
+            (LVM_INT16)(pInstance->SamplesToProcess - *pNumSamples); /* Update the sample count */
 
     /*
      * Set te block size to process
      */
-    if (pInstance->SamplesToProcess > pInstance->InternalBlockSize)
-    {
+    if (pInstance->SamplesToProcess > pInstance->InternalBlockSize) {
         *pNumSamples = (LVM_UINT16)pInstance->InternalBlockSize;
-    }
-    else
-    {
+    } else {
         *pNumSamples = (LVM_UINT16)pInstance->SamplesToProcess;
     }
 }
@@ -698,25 +564,15 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-void LVM_BufferOut(LVM_Handle_t     hInstance,
-                   LVM_FLOAT        *pOutData,
-                   LVM_UINT16       *pNumSamples)
-{
-
-    LVM_Instance_t    *pInstance  = (LVM_Instance_t  *)hInstance;
+void LVM_BufferOut(LVM_Handle_t hInstance, LVM_FLOAT* pOutData, LVM_UINT16* pNumSamples) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
 
     /*
      * Check which mode, managed or unmanaged
      */
-    if (pInstance->InstParams.BufferMode == LVM_MANAGED_BUFFERS)
-    {
-        LVM_BufferManagedOut(hInstance,
-                             pOutData,
-                             pNumSamples);
-    }
-    else
-    {
-        LVM_BufferUnmanagedOut(hInstance,
-                               pNumSamples);
+    if (pInstance->InstParams.BufferMode == LVM_MANAGED_BUFFERS) {
+        LVM_BufferManagedOut(hInstance, pOutData, pNumSamples);
+    } else {
+        LVM_BufferUnmanagedOut(hInstance, pNumSamples);
     }
 }
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h b/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h
index 812f8e5..c02caa1 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Coeffs.h
@@ -24,832 +24,832 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#define TrebleBoostCorner                                  8000
-#define TrebleBoostMinRate                                    4
-#define TrebleBoostSteps                                     15
+#define TrebleBoostCorner 8000
+#define TrebleBoostMinRate 4
+#define TrebleBoostSteps 15
 
 /* Coefficients for sample rate 22050Hz */
-                                                                    /* Gain =  1.000000 dB */
-#define HPF_Fs22050_Gain1_A0                            1.038434
-#define HPF_Fs22050_Gain1_A1                            0.331599
-#define HPF_Fs22050_Gain1_A2                            0.000000
-#define HPF_Fs22050_Gain1_B1                            0.370033
-#define HPF_Fs22050_Gain1_B2                            0.000000
-                                                                    /* Gain =  2.000000 dB */
-#define HPF_Fs22050_Gain2_A0                            1.081557
-#define HPF_Fs22050_Gain2_A1                            0.288475
-#define HPF_Fs22050_Gain2_A2                            0.000000
-#define HPF_Fs22050_Gain2_B1                            0.370033
-#define HPF_Fs22050_Gain2_B2                            0.000000
-                                                                    /* Gain =  3.000000 dB */
-#define HPF_Fs22050_Gain3_A0                            1.129943
-#define HPF_Fs22050_Gain3_A1                            0.240090
-#define HPF_Fs22050_Gain3_A2                            0.000000
-#define HPF_Fs22050_Gain3_B1                            0.370033
-#define HPF_Fs22050_Gain3_B2                            0.000000
-                                                                    /* Gain =  4.000000 dB */
-#define HPF_Fs22050_Gain4_A0                            1.184232
-#define HPF_Fs22050_Gain4_A1                            0.185801
-#define HPF_Fs22050_Gain4_A2                            0.000000
-#define HPF_Fs22050_Gain4_B1                            0.370033
-#define HPF_Fs22050_Gain4_B2                            0.000000
-                                                                    /* Gain =  5.000000 dB */
-#define HPF_Fs22050_Gain5_A0                            1.245145
-#define HPF_Fs22050_Gain5_A1                            0.124887
-#define HPF_Fs22050_Gain5_A2                            0.000000
-#define HPF_Fs22050_Gain5_B1                            0.370033
-#define HPF_Fs22050_Gain5_B2                            0.000000
-                                                                    /* Gain =  6.000000 dB */
-#define HPF_Fs22050_Gain6_A0                            1.313491
-#define HPF_Fs22050_Gain6_A1                            0.056541
-#define HPF_Fs22050_Gain6_A2                            0.000000
-#define HPF_Fs22050_Gain6_B1                            0.370033
-#define HPF_Fs22050_Gain6_B2                            0.000000
-                                                                    /* Gain =  7.000000 dB */
-#define HPF_Fs22050_Gain7_A0                            1.390177
-#define HPF_Fs22050_Gain7_A1                            (-0.020144)
-#define HPF_Fs22050_Gain7_A2                            0.000000
-#define HPF_Fs22050_Gain7_B1                            0.370033
-#define HPF_Fs22050_Gain7_B2                            0.000000
-                                                                    /* Gain =  8.000000 dB */
-#define HPF_Fs22050_Gain8_A0                            1.476219
-#define HPF_Fs22050_Gain8_A1                            (-0.106187)
-#define HPF_Fs22050_Gain8_A2                            0.000000
-#define HPF_Fs22050_Gain8_B1                            0.370033
-#define HPF_Fs22050_Gain8_B2                            0.000000
-                                                                    /* Gain =  9.000000 dB */
-#define HPF_Fs22050_Gain9_A0                            1.572761
-#define HPF_Fs22050_Gain9_A1                            (-0.202728)
-#define HPF_Fs22050_Gain9_A2                            0.000000
-#define HPF_Fs22050_Gain9_B1                            0.370033
-#define HPF_Fs22050_Gain9_B2                            0.000000
-                                                                    /* Gain =  10.000000 dB */
-#define HPF_Fs22050_Gain10_A0                           1.681082
-#define HPF_Fs22050_Gain10_A1                           (-0.311049)
-#define HPF_Fs22050_Gain10_A2                           0.000000
-#define HPF_Fs22050_Gain10_B1                           0.370033
-#define HPF_Fs22050_Gain10_B2                           0.000000
-                                                                    /* Gain =  11.000000 dB */
-#define HPF_Fs22050_Gain11_A0                           1.802620
-#define HPF_Fs22050_Gain11_A1                           (-0.432588)
-#define HPF_Fs22050_Gain11_A2                           0.000000
-#define HPF_Fs22050_Gain11_B1                           0.370033
-#define HPF_Fs22050_Gain11_B2                           0.000000
-                                                                    /* Gain =  12.000000 dB */
-#define HPF_Fs22050_Gain12_A0                           1.938989
-#define HPF_Fs22050_Gain12_A1                           (-0.568956)
-#define HPF_Fs22050_Gain12_A2                           0.000000
-#define HPF_Fs22050_Gain12_B1                           0.370033
-#define HPF_Fs22050_Gain12_B2                           0.000000
-                                                                    /* Gain =  13.000000 dB */
-#define HPF_Fs22050_Gain13_A0                           2.091997
-#define HPF_Fs22050_Gain13_A1                           (-0.721964)
-#define HPF_Fs22050_Gain13_A2                           0.000000
-#define HPF_Fs22050_Gain13_B1                           0.370033
-#define HPF_Fs22050_Gain13_B2                           0.000000
-                                                                    /* Gain =  14.000000 dB */
-#define HPF_Fs22050_Gain14_A0                           2.263674
-#define HPF_Fs22050_Gain14_A1                           (-0.893641)
-#define HPF_Fs22050_Gain14_A2                           0.000000
-#define HPF_Fs22050_Gain14_B1                           0.370033
-#define HPF_Fs22050_Gain14_B2                           0.000000
-                                                                    /* Gain =  15.000000 dB */
-#define HPF_Fs22050_Gain15_A0                           2.456300
-#define HPF_Fs22050_Gain15_A1                           (-1.086267)
-#define HPF_Fs22050_Gain15_A2                           0.000000
-#define HPF_Fs22050_Gain15_B1                           0.370033
-#define HPF_Fs22050_Gain15_B2                           0.000000
+/* Gain =  1.000000 dB */
+#define HPF_Fs22050_Gain1_A0 1.038434
+#define HPF_Fs22050_Gain1_A1 0.331599
+#define HPF_Fs22050_Gain1_A2 0.000000
+#define HPF_Fs22050_Gain1_B1 0.370033
+#define HPF_Fs22050_Gain1_B2 0.000000
+/* Gain =  2.000000 dB */
+#define HPF_Fs22050_Gain2_A0 1.081557
+#define HPF_Fs22050_Gain2_A1 0.288475
+#define HPF_Fs22050_Gain2_A2 0.000000
+#define HPF_Fs22050_Gain2_B1 0.370033
+#define HPF_Fs22050_Gain2_B2 0.000000
+/* Gain =  3.000000 dB */
+#define HPF_Fs22050_Gain3_A0 1.129943
+#define HPF_Fs22050_Gain3_A1 0.240090
+#define HPF_Fs22050_Gain3_A2 0.000000
+#define HPF_Fs22050_Gain3_B1 0.370033
+#define HPF_Fs22050_Gain3_B2 0.000000
+/* Gain =  4.000000 dB */
+#define HPF_Fs22050_Gain4_A0 1.184232
+#define HPF_Fs22050_Gain4_A1 0.185801
+#define HPF_Fs22050_Gain4_A2 0.000000
+#define HPF_Fs22050_Gain4_B1 0.370033
+#define HPF_Fs22050_Gain4_B2 0.000000
+/* Gain =  5.000000 dB */
+#define HPF_Fs22050_Gain5_A0 1.245145
+#define HPF_Fs22050_Gain5_A1 0.124887
+#define HPF_Fs22050_Gain5_A2 0.000000
+#define HPF_Fs22050_Gain5_B1 0.370033
+#define HPF_Fs22050_Gain5_B2 0.000000
+/* Gain =  6.000000 dB */
+#define HPF_Fs22050_Gain6_A0 1.313491
+#define HPF_Fs22050_Gain6_A1 0.056541
+#define HPF_Fs22050_Gain6_A2 0.000000
+#define HPF_Fs22050_Gain6_B1 0.370033
+#define HPF_Fs22050_Gain6_B2 0.000000
+/* Gain =  7.000000 dB */
+#define HPF_Fs22050_Gain7_A0 1.390177
+#define HPF_Fs22050_Gain7_A1 (-0.020144)
+#define HPF_Fs22050_Gain7_A2 0.000000
+#define HPF_Fs22050_Gain7_B1 0.370033
+#define HPF_Fs22050_Gain7_B2 0.000000
+/* Gain =  8.000000 dB */
+#define HPF_Fs22050_Gain8_A0 1.476219
+#define HPF_Fs22050_Gain8_A1 (-0.106187)
+#define HPF_Fs22050_Gain8_A2 0.000000
+#define HPF_Fs22050_Gain8_B1 0.370033
+#define HPF_Fs22050_Gain8_B2 0.000000
+/* Gain =  9.000000 dB */
+#define HPF_Fs22050_Gain9_A0 1.572761
+#define HPF_Fs22050_Gain9_A1 (-0.202728)
+#define HPF_Fs22050_Gain9_A2 0.000000
+#define HPF_Fs22050_Gain9_B1 0.370033
+#define HPF_Fs22050_Gain9_B2 0.000000
+/* Gain =  10.000000 dB */
+#define HPF_Fs22050_Gain10_A0 1.681082
+#define HPF_Fs22050_Gain10_A1 (-0.311049)
+#define HPF_Fs22050_Gain10_A2 0.000000
+#define HPF_Fs22050_Gain10_B1 0.370033
+#define HPF_Fs22050_Gain10_B2 0.000000
+/* Gain =  11.000000 dB */
+#define HPF_Fs22050_Gain11_A0 1.802620
+#define HPF_Fs22050_Gain11_A1 (-0.432588)
+#define HPF_Fs22050_Gain11_A2 0.000000
+#define HPF_Fs22050_Gain11_B1 0.370033
+#define HPF_Fs22050_Gain11_B2 0.000000
+/* Gain =  12.000000 dB */
+#define HPF_Fs22050_Gain12_A0 1.938989
+#define HPF_Fs22050_Gain12_A1 (-0.568956)
+#define HPF_Fs22050_Gain12_A2 0.000000
+#define HPF_Fs22050_Gain12_B1 0.370033
+#define HPF_Fs22050_Gain12_B2 0.000000
+/* Gain =  13.000000 dB */
+#define HPF_Fs22050_Gain13_A0 2.091997
+#define HPF_Fs22050_Gain13_A1 (-0.721964)
+#define HPF_Fs22050_Gain13_A2 0.000000
+#define HPF_Fs22050_Gain13_B1 0.370033
+#define HPF_Fs22050_Gain13_B2 0.000000
+/* Gain =  14.000000 dB */
+#define HPF_Fs22050_Gain14_A0 2.263674
+#define HPF_Fs22050_Gain14_A1 (-0.893641)
+#define HPF_Fs22050_Gain14_A2 0.000000
+#define HPF_Fs22050_Gain14_B1 0.370033
+#define HPF_Fs22050_Gain14_B2 0.000000
+/* Gain =  15.000000 dB */
+#define HPF_Fs22050_Gain15_A0 2.456300
+#define HPF_Fs22050_Gain15_A1 (-1.086267)
+#define HPF_Fs22050_Gain15_A2 0.000000
+#define HPF_Fs22050_Gain15_B1 0.370033
+#define HPF_Fs22050_Gain15_B2 0.000000
 /* Coefficients for sample rate 24000Hz */
-                                                                    /* Gain =  1.000000 dB */
-#define HPF_Fs24000_Gain1_A0                            1.044662
-#define HPF_Fs24000_Gain1_A1                            0.223287
-#define HPF_Fs24000_Gain1_A2                            0.000000
-#define HPF_Fs24000_Gain1_B1                            0.267949
-#define HPF_Fs24000_Gain1_B2                            0.000000
-                                                                    /* Gain =  2.000000 dB */
-#define HPF_Fs24000_Gain2_A0                            1.094773
-#define HPF_Fs24000_Gain2_A1                            0.173176
-#define HPF_Fs24000_Gain2_A2                            0.000000
-#define HPF_Fs24000_Gain2_B1                            0.267949
-#define HPF_Fs24000_Gain2_B2                            0.000000
-                                                                    /* Gain =  3.000000 dB */
-#define HPF_Fs24000_Gain3_A0                            1.150999
-#define HPF_Fs24000_Gain3_A1                            0.116950
-#define HPF_Fs24000_Gain3_A2                            0.000000
-#define HPF_Fs24000_Gain3_B1                            0.267949
-#define HPF_Fs24000_Gain3_B2                            0.000000
-                                                                    /* Gain =  4.000000 dB */
-#define HPF_Fs24000_Gain4_A0                            1.214086
-#define HPF_Fs24000_Gain4_A1                            0.053863
-#define HPF_Fs24000_Gain4_A2                            0.000000
-#define HPF_Fs24000_Gain4_B1                            0.267949
-#define HPF_Fs24000_Gain4_B2                            0.000000
-                                                                    /* Gain =  5.000000 dB */
-#define HPF_Fs24000_Gain5_A0                            1.284870
-#define HPF_Fs24000_Gain5_A1                            (-0.016921)
-#define HPF_Fs24000_Gain5_A2                            0.000000
-#define HPF_Fs24000_Gain5_B1                            0.267949
-#define HPF_Fs24000_Gain5_B2                            0.000000
-                                                                    /* Gain =  6.000000 dB */
-#define HPF_Fs24000_Gain6_A0                           1.364291
-#define HPF_Fs24000_Gain6_A1                           (-0.096342)
-#define HPF_Fs24000_Gain6_A2                           0.000000
-#define HPF_Fs24000_Gain6_B1                           0.267949
-#define HPF_Fs24000_Gain6_B2                           0.000000
-                                                                    /* Gain =  7.000000 dB */
-#define HPF_Fs24000_Gain7_A0                            1.453403
-#define HPF_Fs24000_Gain7_A1                            (-0.185454)
-#define HPF_Fs24000_Gain7_A2                            0.000000
-#define HPF_Fs24000_Gain7_B1                            0.267949
-#define HPF_Fs24000_Gain7_B2                            0.000000
-                                                                    /* Gain =  8.000000 dB */
-#define HPF_Fs24000_Gain8_A0                            1.553389
-#define HPF_Fs24000_Gain8_A1                            (-0.285440)
-#define HPF_Fs24000_Gain8_A2                            0.000000
-#define HPF_Fs24000_Gain8_B1                            0.267949
-#define HPF_Fs24000_Gain8_B2                            0.000000
-                                                                    /* Gain =  9.000000 dB */
-#define HPF_Fs24000_Gain9_A0                            1.665574
-#define HPF_Fs24000_Gain9_A1                            (-0.397625)
-#define HPF_Fs24000_Gain9_A2                            0.000000
-#define HPF_Fs24000_Gain9_B1                            0.267949
-#define HPF_Fs24000_Gain9_B2                            0.000000
-                                                                    /* Gain =  10.000000 dB */
-#define HPF_Fs24000_Gain10_A0                           1.791449
-#define HPF_Fs24000_Gain10_A1                           (-0.523499)
-#define HPF_Fs24000_Gain10_A2                           0.000000
-#define HPF_Fs24000_Gain10_B1                           0.267949
-#define HPF_Fs24000_Gain10_B2                           0.000000
-                                                                    /* Gain =  11.000000 dB */
-#define HPF_Fs24000_Gain11_A0                           1.932682
-#define HPF_Fs24000_Gain11_A1                           (-0.664733)
-#define HPF_Fs24000_Gain11_A2                           0.000000
-#define HPF_Fs24000_Gain11_B1                           0.267949
-#define HPF_Fs24000_Gain11_B2                           0.000000
-                                                                    /* Gain =  12.000000 dB */
-#define HPF_Fs24000_Gain12_A0                           2.091148
-#define HPF_Fs24000_Gain12_A1                           (-0.823199)
-#define HPF_Fs24000_Gain12_A2                           0.000000
-#define HPF_Fs24000_Gain12_B1                           0.267949
-#define HPF_Fs24000_Gain12_B2                           0.000000
-                                                                    /* Gain =  13.000000 dB */
-#define HPF_Fs24000_Gain13_A0                           2.268950
-#define HPF_Fs24000_Gain13_A1                           (-1.001001)
-#define HPF_Fs24000_Gain13_A2                           0.000000
-#define HPF_Fs24000_Gain13_B1                           0.267949
-#define HPF_Fs24000_Gain13_B2                           0.000000
-                                                                    /* Gain =  14.000000 dB */
-#define HPF_Fs24000_Gain14_A0                           2.468447
-#define HPF_Fs24000_Gain14_A1                           (-1.200498)
-#define HPF_Fs24000_Gain14_A2                           0.000000
-#define HPF_Fs24000_Gain14_B1                           0.267949
-#define HPF_Fs24000_Gain14_B2                           0.000000
-                                                                    /* Gain =  15.000000 dB */
-#define HPF_Fs24000_Gain15_A0                           2.692287
-#define HPF_Fs24000_Gain15_A1                           (-1.424338)
-#define HPF_Fs24000_Gain15_A2                           0.000000
-#define HPF_Fs24000_Gain15_B1                           0.267949
-#define HPF_Fs24000_Gain15_B2                           0.000000
+/* Gain =  1.000000 dB */
+#define HPF_Fs24000_Gain1_A0 1.044662
+#define HPF_Fs24000_Gain1_A1 0.223287
+#define HPF_Fs24000_Gain1_A2 0.000000
+#define HPF_Fs24000_Gain1_B1 0.267949
+#define HPF_Fs24000_Gain1_B2 0.000000
+/* Gain =  2.000000 dB */
+#define HPF_Fs24000_Gain2_A0 1.094773
+#define HPF_Fs24000_Gain2_A1 0.173176
+#define HPF_Fs24000_Gain2_A2 0.000000
+#define HPF_Fs24000_Gain2_B1 0.267949
+#define HPF_Fs24000_Gain2_B2 0.000000
+/* Gain =  3.000000 dB */
+#define HPF_Fs24000_Gain3_A0 1.150999
+#define HPF_Fs24000_Gain3_A1 0.116950
+#define HPF_Fs24000_Gain3_A2 0.000000
+#define HPF_Fs24000_Gain3_B1 0.267949
+#define HPF_Fs24000_Gain3_B2 0.000000
+/* Gain =  4.000000 dB */
+#define HPF_Fs24000_Gain4_A0 1.214086
+#define HPF_Fs24000_Gain4_A1 0.053863
+#define HPF_Fs24000_Gain4_A2 0.000000
+#define HPF_Fs24000_Gain4_B1 0.267949
+#define HPF_Fs24000_Gain4_B2 0.000000
+/* Gain =  5.000000 dB */
+#define HPF_Fs24000_Gain5_A0 1.284870
+#define HPF_Fs24000_Gain5_A1 (-0.016921)
+#define HPF_Fs24000_Gain5_A2 0.000000
+#define HPF_Fs24000_Gain5_B1 0.267949
+#define HPF_Fs24000_Gain5_B2 0.000000
+/* Gain =  6.000000 dB */
+#define HPF_Fs24000_Gain6_A0 1.364291
+#define HPF_Fs24000_Gain6_A1 (-0.096342)
+#define HPF_Fs24000_Gain6_A2 0.000000
+#define HPF_Fs24000_Gain6_B1 0.267949
+#define HPF_Fs24000_Gain6_B2 0.000000
+/* Gain =  7.000000 dB */
+#define HPF_Fs24000_Gain7_A0 1.453403
+#define HPF_Fs24000_Gain7_A1 (-0.185454)
+#define HPF_Fs24000_Gain7_A2 0.000000
+#define HPF_Fs24000_Gain7_B1 0.267949
+#define HPF_Fs24000_Gain7_B2 0.000000
+/* Gain =  8.000000 dB */
+#define HPF_Fs24000_Gain8_A0 1.553389
+#define HPF_Fs24000_Gain8_A1 (-0.285440)
+#define HPF_Fs24000_Gain8_A2 0.000000
+#define HPF_Fs24000_Gain8_B1 0.267949
+#define HPF_Fs24000_Gain8_B2 0.000000
+/* Gain =  9.000000 dB */
+#define HPF_Fs24000_Gain9_A0 1.665574
+#define HPF_Fs24000_Gain9_A1 (-0.397625)
+#define HPF_Fs24000_Gain9_A2 0.000000
+#define HPF_Fs24000_Gain9_B1 0.267949
+#define HPF_Fs24000_Gain9_B2 0.000000
+/* Gain =  10.000000 dB */
+#define HPF_Fs24000_Gain10_A0 1.791449
+#define HPF_Fs24000_Gain10_A1 (-0.523499)
+#define HPF_Fs24000_Gain10_A2 0.000000
+#define HPF_Fs24000_Gain10_B1 0.267949
+#define HPF_Fs24000_Gain10_B2 0.000000
+/* Gain =  11.000000 dB */
+#define HPF_Fs24000_Gain11_A0 1.932682
+#define HPF_Fs24000_Gain11_A1 (-0.664733)
+#define HPF_Fs24000_Gain11_A2 0.000000
+#define HPF_Fs24000_Gain11_B1 0.267949
+#define HPF_Fs24000_Gain11_B2 0.000000
+/* Gain =  12.000000 dB */
+#define HPF_Fs24000_Gain12_A0 2.091148
+#define HPF_Fs24000_Gain12_A1 (-0.823199)
+#define HPF_Fs24000_Gain12_A2 0.000000
+#define HPF_Fs24000_Gain12_B1 0.267949
+#define HPF_Fs24000_Gain12_B2 0.000000
+/* Gain =  13.000000 dB */
+#define HPF_Fs24000_Gain13_A0 2.268950
+#define HPF_Fs24000_Gain13_A1 (-1.001001)
+#define HPF_Fs24000_Gain13_A2 0.000000
+#define HPF_Fs24000_Gain13_B1 0.267949
+#define HPF_Fs24000_Gain13_B2 0.000000
+/* Gain =  14.000000 dB */
+#define HPF_Fs24000_Gain14_A0 2.468447
+#define HPF_Fs24000_Gain14_A1 (-1.200498)
+#define HPF_Fs24000_Gain14_A2 0.000000
+#define HPF_Fs24000_Gain14_B1 0.267949
+#define HPF_Fs24000_Gain14_B2 0.000000
+/* Gain =  15.000000 dB */
+#define HPF_Fs24000_Gain15_A0 2.692287
+#define HPF_Fs24000_Gain15_A1 (-1.424338)
+#define HPF_Fs24000_Gain15_A2 0.000000
+#define HPF_Fs24000_Gain15_B1 0.267949
+#define HPF_Fs24000_Gain15_B2 0.000000
 /* Coefficients for sample rate 32000Hz */
-                                                                    /* Gain =  1.000000 dB */
-#define HPF_Fs32000_Gain1_A0                            1.061009
-#define HPF_Fs32000_Gain1_A1                            (-0.061009)
-#define HPF_Fs32000_Gain1_A2                            0.000000
-#define HPF_Fs32000_Gain1_B1                            (-0.000000)
-#define HPF_Fs32000_Gain1_B2                            0.000000
-                                                                    /* Gain =  2.000000 dB */
-#define HPF_Fs32000_Gain2_A0                             1.129463
-#define HPF_Fs32000_Gain2_A1                             (-0.129463)
-#define HPF_Fs32000_Gain2_A2                             0.000000
-#define HPF_Fs32000_Gain2_B1                             (-0.000000)
-#define HPF_Fs32000_Gain2_B2                             0.000000
-                                                                    /* Gain =  3.000000 dB */
-#define HPF_Fs32000_Gain3_A0                             1.206267
-#define HPF_Fs32000_Gain3_A1                             (-0.206267)
-#define HPF_Fs32000_Gain3_A2                             0.000000
-#define HPF_Fs32000_Gain3_B1                             (-0.000000)
-#define HPF_Fs32000_Gain3_B2                             0.000000
-                                                                    /* Gain =  4.000000 dB */
-#define HPF_Fs32000_Gain4_A0                            1.292447
-#define HPF_Fs32000_Gain4_A1                            (-0.292447)
-#define HPF_Fs32000_Gain4_A2                            0.000000
-#define HPF_Fs32000_Gain4_B1                            (-0.000000)
-#define HPF_Fs32000_Gain4_B2                            0.000000
-                                                                    /* Gain =  5.000000 dB */
-#define HPF_Fs32000_Gain5_A0                            1.389140
-#define HPF_Fs32000_Gain5_A1                            (-0.389140)
-#define HPF_Fs32000_Gain5_A2                            0.000000
-#define HPF_Fs32000_Gain5_B1                            (-0.000000)
-#define HPF_Fs32000_Gain5_B2                            0.000000
-                                                                    /* Gain =  6.000000 dB */
-#define HPF_Fs32000_Gain6_A0                             1.497631
-#define HPF_Fs32000_Gain6_A1                             (-0.497631)
-#define HPF_Fs32000_Gain6_A2                             0.000000
-#define HPF_Fs32000_Gain6_B1                             (-0.000000)
-#define HPF_Fs32000_Gain6_B2                             0.000000
-                                                                    /* Gain =  7.000000 dB */
-#define HPF_Fs32000_Gain7_A0                             1.619361
-#define HPF_Fs32000_Gain7_A1                             (-0.619361)
-#define HPF_Fs32000_Gain7_A2                             0.000000
-#define HPF_Fs32000_Gain7_B1                             (-0.000000)
-#define HPF_Fs32000_Gain7_B2                             0.000000
-                                                                    /* Gain =  8.000000 dB */
-#define HPF_Fs32000_Gain8_A0                             1.755943
-#define HPF_Fs32000_Gain8_A1                             (-0.755943)
-#define HPF_Fs32000_Gain8_A2                             0.000000
-#define HPF_Fs32000_Gain8_B1                             (-0.000000)
-#define HPF_Fs32000_Gain8_B2                             0.000000
-                                                                    /* Gain =  9.000000 dB */
-#define HPF_Fs32000_Gain9_A0                             1.909191
-#define HPF_Fs32000_Gain9_A1                             (-0.909191)
-#define HPF_Fs32000_Gain9_A2                             0.000000
-#define HPF_Fs32000_Gain9_B1                             (-0.000000)
-#define HPF_Fs32000_Gain9_B2                             0.000000
-                                                                    /* Gain =  10.000000 dB */
-#define HPF_Fs32000_Gain10_A0                            2.081139
-#define HPF_Fs32000_Gain10_A1                            (-1.081139)
-#define HPF_Fs32000_Gain10_A2                            0.000000
-#define HPF_Fs32000_Gain10_B1                            (-0.000000)
-#define HPF_Fs32000_Gain10_B2                            0.000000
-                                                                    /* Gain =  11.000000 dB */
-#define HPF_Fs32000_Gain11_A0                           2.274067
-#define HPF_Fs32000_Gain11_A1                           (-1.274067)
-#define HPF_Fs32000_Gain11_A2                           0.000000
-#define HPF_Fs32000_Gain11_B1                           (-0.000000)
-#define HPF_Fs32000_Gain11_B2                           0.000000
-                                                                    /* Gain =  12.000000 dB */
-#define HPF_Fs32000_Gain12_A0                          2.490536
-#define HPF_Fs32000_Gain12_A1                          (-1.490536)
-#define HPF_Fs32000_Gain12_A2                          0.000000
-#define HPF_Fs32000_Gain12_B1                          (-0.000000)
-#define HPF_Fs32000_Gain12_B2                          0.000000
-                                                                    /* Gain =  13.000000 dB */
-#define HPF_Fs32000_Gain13_A0                           2.733418
-#define HPF_Fs32000_Gain13_A1                           (-1.733418)
-#define HPF_Fs32000_Gain13_A2                           0.000000
-#define HPF_Fs32000_Gain13_B1                           (-0.000000)
-#define HPF_Fs32000_Gain13_B2                           0.000000
-                                                                    /* Gain =  14.000000 dB */
-#define HPF_Fs32000_Gain14_A0                           3.005936
-#define HPF_Fs32000_Gain14_A1                           (-2.005936)
-#define HPF_Fs32000_Gain14_A2                           0.000000
-#define HPF_Fs32000_Gain14_B1                           (-0.000000)
-#define HPF_Fs32000_Gain14_B2                           0.000000
-                                                                    /* Gain =  15.000000 dB */
-#define HPF_Fs32000_Gain15_A0                          3.311707
-#define HPF_Fs32000_Gain15_A1                          (-2.311707)
-#define HPF_Fs32000_Gain15_A2                          0.000000
-#define HPF_Fs32000_Gain15_B1                          (-0.000000)
-#define HPF_Fs32000_Gain15_B2                          0.000000
+/* Gain =  1.000000 dB */
+#define HPF_Fs32000_Gain1_A0 1.061009
+#define HPF_Fs32000_Gain1_A1 (-0.061009)
+#define HPF_Fs32000_Gain1_A2 0.000000
+#define HPF_Fs32000_Gain1_B1 (-0.000000)
+#define HPF_Fs32000_Gain1_B2 0.000000
+/* Gain =  2.000000 dB */
+#define HPF_Fs32000_Gain2_A0 1.129463
+#define HPF_Fs32000_Gain2_A1 (-0.129463)
+#define HPF_Fs32000_Gain2_A2 0.000000
+#define HPF_Fs32000_Gain2_B1 (-0.000000)
+#define HPF_Fs32000_Gain2_B2 0.000000
+/* Gain =  3.000000 dB */
+#define HPF_Fs32000_Gain3_A0 1.206267
+#define HPF_Fs32000_Gain3_A1 (-0.206267)
+#define HPF_Fs32000_Gain3_A2 0.000000
+#define HPF_Fs32000_Gain3_B1 (-0.000000)
+#define HPF_Fs32000_Gain3_B2 0.000000
+/* Gain =  4.000000 dB */
+#define HPF_Fs32000_Gain4_A0 1.292447
+#define HPF_Fs32000_Gain4_A1 (-0.292447)
+#define HPF_Fs32000_Gain4_A2 0.000000
+#define HPF_Fs32000_Gain4_B1 (-0.000000)
+#define HPF_Fs32000_Gain4_B2 0.000000
+/* Gain =  5.000000 dB */
+#define HPF_Fs32000_Gain5_A0 1.389140
+#define HPF_Fs32000_Gain5_A1 (-0.389140)
+#define HPF_Fs32000_Gain5_A2 0.000000
+#define HPF_Fs32000_Gain5_B1 (-0.000000)
+#define HPF_Fs32000_Gain5_B2 0.000000
+/* Gain =  6.000000 dB */
+#define HPF_Fs32000_Gain6_A0 1.497631
+#define HPF_Fs32000_Gain6_A1 (-0.497631)
+#define HPF_Fs32000_Gain6_A2 0.000000
+#define HPF_Fs32000_Gain6_B1 (-0.000000)
+#define HPF_Fs32000_Gain6_B2 0.000000
+/* Gain =  7.000000 dB */
+#define HPF_Fs32000_Gain7_A0 1.619361
+#define HPF_Fs32000_Gain7_A1 (-0.619361)
+#define HPF_Fs32000_Gain7_A2 0.000000
+#define HPF_Fs32000_Gain7_B1 (-0.000000)
+#define HPF_Fs32000_Gain7_B2 0.000000
+/* Gain =  8.000000 dB */
+#define HPF_Fs32000_Gain8_A0 1.755943
+#define HPF_Fs32000_Gain8_A1 (-0.755943)
+#define HPF_Fs32000_Gain8_A2 0.000000
+#define HPF_Fs32000_Gain8_B1 (-0.000000)
+#define HPF_Fs32000_Gain8_B2 0.000000
+/* Gain =  9.000000 dB */
+#define HPF_Fs32000_Gain9_A0 1.909191
+#define HPF_Fs32000_Gain9_A1 (-0.909191)
+#define HPF_Fs32000_Gain9_A2 0.000000
+#define HPF_Fs32000_Gain9_B1 (-0.000000)
+#define HPF_Fs32000_Gain9_B2 0.000000
+/* Gain =  10.000000 dB */
+#define HPF_Fs32000_Gain10_A0 2.081139
+#define HPF_Fs32000_Gain10_A1 (-1.081139)
+#define HPF_Fs32000_Gain10_A2 0.000000
+#define HPF_Fs32000_Gain10_B1 (-0.000000)
+#define HPF_Fs32000_Gain10_B2 0.000000
+/* Gain =  11.000000 dB */
+#define HPF_Fs32000_Gain11_A0 2.274067
+#define HPF_Fs32000_Gain11_A1 (-1.274067)
+#define HPF_Fs32000_Gain11_A2 0.000000
+#define HPF_Fs32000_Gain11_B1 (-0.000000)
+#define HPF_Fs32000_Gain11_B2 0.000000
+/* Gain =  12.000000 dB */
+#define HPF_Fs32000_Gain12_A0 2.490536
+#define HPF_Fs32000_Gain12_A1 (-1.490536)
+#define HPF_Fs32000_Gain12_A2 0.000000
+#define HPF_Fs32000_Gain12_B1 (-0.000000)
+#define HPF_Fs32000_Gain12_B2 0.000000
+/* Gain =  13.000000 dB */
+#define HPF_Fs32000_Gain13_A0 2.733418
+#define HPF_Fs32000_Gain13_A1 (-1.733418)
+#define HPF_Fs32000_Gain13_A2 0.000000
+#define HPF_Fs32000_Gain13_B1 (-0.000000)
+#define HPF_Fs32000_Gain13_B2 0.000000
+/* Gain =  14.000000 dB */
+#define HPF_Fs32000_Gain14_A0 3.005936
+#define HPF_Fs32000_Gain14_A1 (-2.005936)
+#define HPF_Fs32000_Gain14_A2 0.000000
+#define HPF_Fs32000_Gain14_B1 (-0.000000)
+#define HPF_Fs32000_Gain14_B2 0.000000
+/* Gain =  15.000000 dB */
+#define HPF_Fs32000_Gain15_A0 3.311707
+#define HPF_Fs32000_Gain15_A1 (-2.311707)
+#define HPF_Fs32000_Gain15_A2 0.000000
+#define HPF_Fs32000_Gain15_B1 (-0.000000)
+#define HPF_Fs32000_Gain15_B2 0.000000
 /* Coefficients for sample rate 44100Hz */
-                                                                    /* Gain =  1.000000 dB */
-#define HPF_Fs44100_Gain1_A0                            1.074364
-#define HPF_Fs44100_Gain1_A1                            (-0.293257)
-#define HPF_Fs44100_Gain1_A2                            0.000000
-#define HPF_Fs44100_Gain1_B1                            (-0.218894)
-#define HPF_Fs44100_Gain1_B2                            0.000000
-                                                                    /* Gain =  2.000000 dB */
-#define HPF_Fs44100_Gain2_A0                            1.157801
-#define HPF_Fs44100_Gain2_A1                            (-0.376695)
-#define HPF_Fs44100_Gain2_A2                            0.000000
-#define HPF_Fs44100_Gain2_B1                            (-0.218894)
-#define HPF_Fs44100_Gain2_B2                            0.000000
-                                                                    /* Gain =  3.000000 dB */
-#define HPF_Fs44100_Gain3_A0                           1.251420
-#define HPF_Fs44100_Gain3_A1                           (-0.470313)
-#define HPF_Fs44100_Gain3_A2                           0.000000
-#define HPF_Fs44100_Gain3_B1                           (-0.218894)
-#define HPF_Fs44100_Gain3_B2                           0.000000
-                                                                    /* Gain =  4.000000 dB */
-#define HPF_Fs44100_Gain4_A0                            1.356461
-#define HPF_Fs44100_Gain4_A1                            (-0.575355)
-#define HPF_Fs44100_Gain4_A2                            0.000000
-#define HPF_Fs44100_Gain4_B1                            (-0.218894)
-#define HPF_Fs44100_Gain4_B2                            0.000000
-                                                                    /* Gain =  5.000000 dB */
-#define HPF_Fs44100_Gain5_A0                            1.474320
-#define HPF_Fs44100_Gain5_A1                            (-0.693213)
-#define HPF_Fs44100_Gain5_A2                            0.000000
-#define HPF_Fs44100_Gain5_B1                            (-0.218894)
-#define HPF_Fs44100_Gain5_B2                            0.000000
-                                                                    /* Gain =  6.000000 dB */
-#define HPF_Fs44100_Gain6_A0                           1.606559
-#define HPF_Fs44100_Gain6_A1                           (-0.825453)
-#define HPF_Fs44100_Gain6_A2                           0.000000
-#define HPF_Fs44100_Gain6_B1                           (-0.218894)
-#define HPF_Fs44100_Gain6_B2                           0.000000
-                                                                    /* Gain =  7.000000 dB */
-#define HPF_Fs44100_Gain7_A0                           1.754935
-#define HPF_Fs44100_Gain7_A1                           (-0.973828)
-#define HPF_Fs44100_Gain7_A2                           0.000000
-#define HPF_Fs44100_Gain7_B1                           (-0.218894)
-#define HPF_Fs44100_Gain7_B2                           0.000000
-                                                                    /* Gain =  8.000000 dB */
-#define HPF_Fs44100_Gain8_A0                            1.921414
-#define HPF_Fs44100_Gain8_A1                            (-1.140308)
-#define HPF_Fs44100_Gain8_A2                            0.000000
-#define HPF_Fs44100_Gain8_B1                            (-0.218894)
-#define HPF_Fs44100_Gain8_B2                            0.000000
-                                                                    /* Gain =  9.000000 dB */
-#define HPF_Fs44100_Gain9_A0                            2.108208
-#define HPF_Fs44100_Gain9_A1                            (-1.327101)
-#define HPF_Fs44100_Gain9_A2                            0.000000
-#define HPF_Fs44100_Gain9_B1                            (-0.218894)
-#define HPF_Fs44100_Gain9_B2                            0.000000
-                                                                    /* Gain =  10.000000 dB */
-#define HPF_Fs44100_Gain10_A0                          2.317793
-#define HPF_Fs44100_Gain10_A1                          (-1.536687)
-#define HPF_Fs44100_Gain10_A2                          0.000000
-#define HPF_Fs44100_Gain10_B1                          (-0.218894)
-#define HPF_Fs44100_Gain10_B2                          0.000000
-                                                                    /* Gain =  11.000000 dB */
-#define HPF_Fs44100_Gain11_A0                          2.552952
-#define HPF_Fs44100_Gain11_A1                          (-1.771846)
-#define HPF_Fs44100_Gain11_A2                          0.000000
-#define HPF_Fs44100_Gain11_B1                          (-0.218894)
-#define HPF_Fs44100_Gain11_B2                          0.000000
-                                                                    /* Gain =  12.000000 dB */
-#define HPF_Fs44100_Gain12_A0                          2.816805
-#define HPF_Fs44100_Gain12_A1                          (-2.035698)
-#define HPF_Fs44100_Gain12_A2                          0.000000
-#define HPF_Fs44100_Gain12_B1                          (-0.218894)
-#define HPF_Fs44100_Gain12_B2                          0.000000
-                                                                    /* Gain =  13.000000 dB */
-#define HPF_Fs44100_Gain13_A0                           3.112852
-#define HPF_Fs44100_Gain13_A1                           (-2.331746)
-#define HPF_Fs44100_Gain13_A2                           0.000000
-#define HPF_Fs44100_Gain13_B1                           (-0.218894)
-#define HPF_Fs44100_Gain13_B2                           0.000000
-                                                                    /* Gain =  14.000000 dB */
-#define HPF_Fs44100_Gain14_A0                          3.445023
-#define HPF_Fs44100_Gain14_A1                          (-2.663916)
-#define HPF_Fs44100_Gain14_A2                          0.000000
-#define HPF_Fs44100_Gain14_B1                          (-0.218894)
-#define HPF_Fs44100_Gain14_B2                          0.000000
-                                                                    /* Gain =  15.000000 dB */
-#define HPF_Fs44100_Gain15_A0                          3.817724
-#define HPF_Fs44100_Gain15_A1                          (-3.036618)
-#define HPF_Fs44100_Gain15_A2                          0.000000
-#define HPF_Fs44100_Gain15_B1                          (-0.218894)
-#define HPF_Fs44100_Gain15_B2                          0.000000
+/* Gain =  1.000000 dB */
+#define HPF_Fs44100_Gain1_A0 1.074364
+#define HPF_Fs44100_Gain1_A1 (-0.293257)
+#define HPF_Fs44100_Gain1_A2 0.000000
+#define HPF_Fs44100_Gain1_B1 (-0.218894)
+#define HPF_Fs44100_Gain1_B2 0.000000
+/* Gain =  2.000000 dB */
+#define HPF_Fs44100_Gain2_A0 1.157801
+#define HPF_Fs44100_Gain2_A1 (-0.376695)
+#define HPF_Fs44100_Gain2_A2 0.000000
+#define HPF_Fs44100_Gain2_B1 (-0.218894)
+#define HPF_Fs44100_Gain2_B2 0.000000
+/* Gain =  3.000000 dB */
+#define HPF_Fs44100_Gain3_A0 1.251420
+#define HPF_Fs44100_Gain3_A1 (-0.470313)
+#define HPF_Fs44100_Gain3_A2 0.000000
+#define HPF_Fs44100_Gain3_B1 (-0.218894)
+#define HPF_Fs44100_Gain3_B2 0.000000
+/* Gain =  4.000000 dB */
+#define HPF_Fs44100_Gain4_A0 1.356461
+#define HPF_Fs44100_Gain4_A1 (-0.575355)
+#define HPF_Fs44100_Gain4_A2 0.000000
+#define HPF_Fs44100_Gain4_B1 (-0.218894)
+#define HPF_Fs44100_Gain4_B2 0.000000
+/* Gain =  5.000000 dB */
+#define HPF_Fs44100_Gain5_A0 1.474320
+#define HPF_Fs44100_Gain5_A1 (-0.693213)
+#define HPF_Fs44100_Gain5_A2 0.000000
+#define HPF_Fs44100_Gain5_B1 (-0.218894)
+#define HPF_Fs44100_Gain5_B2 0.000000
+/* Gain =  6.000000 dB */
+#define HPF_Fs44100_Gain6_A0 1.606559
+#define HPF_Fs44100_Gain6_A1 (-0.825453)
+#define HPF_Fs44100_Gain6_A2 0.000000
+#define HPF_Fs44100_Gain6_B1 (-0.218894)
+#define HPF_Fs44100_Gain6_B2 0.000000
+/* Gain =  7.000000 dB */
+#define HPF_Fs44100_Gain7_A0 1.754935
+#define HPF_Fs44100_Gain7_A1 (-0.973828)
+#define HPF_Fs44100_Gain7_A2 0.000000
+#define HPF_Fs44100_Gain7_B1 (-0.218894)
+#define HPF_Fs44100_Gain7_B2 0.000000
+/* Gain =  8.000000 dB */
+#define HPF_Fs44100_Gain8_A0 1.921414
+#define HPF_Fs44100_Gain8_A1 (-1.140308)
+#define HPF_Fs44100_Gain8_A2 0.000000
+#define HPF_Fs44100_Gain8_B1 (-0.218894)
+#define HPF_Fs44100_Gain8_B2 0.000000
+/* Gain =  9.000000 dB */
+#define HPF_Fs44100_Gain9_A0 2.108208
+#define HPF_Fs44100_Gain9_A1 (-1.327101)
+#define HPF_Fs44100_Gain9_A2 0.000000
+#define HPF_Fs44100_Gain9_B1 (-0.218894)
+#define HPF_Fs44100_Gain9_B2 0.000000
+/* Gain =  10.000000 dB */
+#define HPF_Fs44100_Gain10_A0 2.317793
+#define HPF_Fs44100_Gain10_A1 (-1.536687)
+#define HPF_Fs44100_Gain10_A2 0.000000
+#define HPF_Fs44100_Gain10_B1 (-0.218894)
+#define HPF_Fs44100_Gain10_B2 0.000000
+/* Gain =  11.000000 dB */
+#define HPF_Fs44100_Gain11_A0 2.552952
+#define HPF_Fs44100_Gain11_A1 (-1.771846)
+#define HPF_Fs44100_Gain11_A2 0.000000
+#define HPF_Fs44100_Gain11_B1 (-0.218894)
+#define HPF_Fs44100_Gain11_B2 0.000000
+/* Gain =  12.000000 dB */
+#define HPF_Fs44100_Gain12_A0 2.816805
+#define HPF_Fs44100_Gain12_A1 (-2.035698)
+#define HPF_Fs44100_Gain12_A2 0.000000
+#define HPF_Fs44100_Gain12_B1 (-0.218894)
+#define HPF_Fs44100_Gain12_B2 0.000000
+/* Gain =  13.000000 dB */
+#define HPF_Fs44100_Gain13_A0 3.112852
+#define HPF_Fs44100_Gain13_A1 (-2.331746)
+#define HPF_Fs44100_Gain13_A2 0.000000
+#define HPF_Fs44100_Gain13_B1 (-0.218894)
+#define HPF_Fs44100_Gain13_B2 0.000000
+/* Gain =  14.000000 dB */
+#define HPF_Fs44100_Gain14_A0 3.445023
+#define HPF_Fs44100_Gain14_A1 (-2.663916)
+#define HPF_Fs44100_Gain14_A2 0.000000
+#define HPF_Fs44100_Gain14_B1 (-0.218894)
+#define HPF_Fs44100_Gain14_B2 0.000000
+/* Gain =  15.000000 dB */
+#define HPF_Fs44100_Gain15_A0 3.817724
+#define HPF_Fs44100_Gain15_A1 (-3.036618)
+#define HPF_Fs44100_Gain15_A2 0.000000
+#define HPF_Fs44100_Gain15_B1 (-0.218894)
+#define HPF_Fs44100_Gain15_B2 0.000000
 /* Coefficients for sample rate 48000Hz */
-                                                                    /* Gain =  1.000000 dB */
-#define HPF_Fs48000_Gain1_A0                          1.077357
-#define HPF_Fs48000_Gain1_A1                          (-0.345306)
-#define HPF_Fs48000_Gain1_A2                          0.000000
-#define HPF_Fs48000_Gain1_B1                          (-0.267949)
-#define HPF_Fs48000_Gain1_B2                          0.000000
-                                                                    /* Gain =  2.000000 dB */
-#define HPF_Fs48000_Gain2_A0                          1.164152
-#define HPF_Fs48000_Gain2_A1                          (-0.432101)
-#define HPF_Fs48000_Gain2_A2                          0.000000
-#define HPF_Fs48000_Gain2_B1                          (-0.267949)
-#define HPF_Fs48000_Gain2_B2                          0.000000
-                                                                    /* Gain =  3.000000 dB */
-#define HPF_Fs48000_Gain3_A0                          1.261538
-#define HPF_Fs48000_Gain3_A1                          (-0.529488)
-#define HPF_Fs48000_Gain3_A2                          0.000000
-#define HPF_Fs48000_Gain3_B1                          (-0.267949)
-#define HPF_Fs48000_Gain3_B2                          0.000000
-                                                                    /* Gain =  4.000000 dB */
-#define HPF_Fs48000_Gain4_A0                           1.370807
-#define HPF_Fs48000_Gain4_A1                           (-0.638757)
-#define HPF_Fs48000_Gain4_A2                           0.000000
-#define HPF_Fs48000_Gain4_B1                           (-0.267949)
-#define HPF_Fs48000_Gain4_B2                           0.000000
-                                                                    /* Gain =  5.000000 dB */
-#define HPF_Fs48000_Gain5_A0                           1.493409
-#define HPF_Fs48000_Gain5_A1                           (-0.761359)
-#define HPF_Fs48000_Gain5_A2                           0.000000
-#define HPF_Fs48000_Gain5_B1                           (-0.267949)
-#define HPF_Fs48000_Gain5_B2                           0.000000
-                                                                    /* Gain =  6.000000 dB */
-#define HPF_Fs48000_Gain6_A0                            1.630971
-#define HPF_Fs48000_Gain6_A1                            (-0.898920)
-#define HPF_Fs48000_Gain6_A2                            0.000000
-#define HPF_Fs48000_Gain6_B1                            (-0.267949)
-#define HPF_Fs48000_Gain6_B2                            0.000000
-                                                                    /* Gain =  7.000000 dB */
-#define HPF_Fs48000_Gain7_A0                            1.785318
-#define HPF_Fs48000_Gain7_A1                            (-1.053267)
-#define HPF_Fs48000_Gain7_A2                            0.000000
-#define HPF_Fs48000_Gain7_B1                            (-0.267949)
-#define HPF_Fs48000_Gain7_B2                            0.000000
-                                                                    /* Gain =  8.000000 dB */
-#define HPF_Fs48000_Gain8_A0                           1.958498
-#define HPF_Fs48000_Gain8_A1                           (-1.226447)
-#define HPF_Fs48000_Gain8_A2                           0.000000
-#define HPF_Fs48000_Gain8_B1                           (-0.267949)
-#define HPF_Fs48000_Gain8_B2                           0.000000
-                                                                    /* Gain =  9.000000 dB */
-#define HPF_Fs48000_Gain9_A0                          2.152809
-#define HPF_Fs48000_Gain9_A1                          (-1.420758)
-#define HPF_Fs48000_Gain9_A2                          0.000000
-#define HPF_Fs48000_Gain9_B1                          (-0.267949)
-#define HPF_Fs48000_Gain9_B2                          0.000000
-                                                                    /* Gain =  10.000000 dB */
-#define HPF_Fs48000_Gain10_A0                         2.370829
-#define HPF_Fs48000_Gain10_A1                         (-1.638778)
-#define HPF_Fs48000_Gain10_A2                         0.000000
-#define HPF_Fs48000_Gain10_B1                         (-0.267949)
-#define HPF_Fs48000_Gain10_B2                         0.000000
-                                                                    /* Gain =  11.000000 dB */
-#define HPF_Fs48000_Gain11_A0                          2.615452
-#define HPF_Fs48000_Gain11_A1                          (-1.883401)
-#define HPF_Fs48000_Gain11_A2                          0.000000
-#define HPF_Fs48000_Gain11_B1                          (-0.267949)
-#define HPF_Fs48000_Gain11_B2                          0.000000
-                                                                    /* Gain =  12.000000 dB */
-#define HPF_Fs48000_Gain12_A0                          2.889924
-#define HPF_Fs48000_Gain12_A1                          (-2.157873)
-#define HPF_Fs48000_Gain12_A2                          0.000000
-#define HPF_Fs48000_Gain12_B1                          (-0.267949)
-#define HPF_Fs48000_Gain12_B2                          0.000000
-                                                                    /* Gain =  13.000000 dB */
-#define HPF_Fs48000_Gain13_A0                           3.197886
-#define HPF_Fs48000_Gain13_A1                           (-2.465835)
-#define HPF_Fs48000_Gain13_A2                           0.000000
-#define HPF_Fs48000_Gain13_B1                           (-0.267949)
-#define HPF_Fs48000_Gain13_B2                           0.000000
-                                                                    /* Gain =  14.000000 dB */
-#define HPF_Fs48000_Gain14_A0                          3.543425
-#define HPF_Fs48000_Gain14_A1                          (-2.811374)
-#define HPF_Fs48000_Gain14_A2                          0.000000
-#define HPF_Fs48000_Gain14_B1                          (-0.267949)
-#define HPF_Fs48000_Gain14_B2                          0.000000
-                                                                    /* Gain =  15.000000 dB */
-#define HPF_Fs48000_Gain15_A0                         3.931127
-#define HPF_Fs48000_Gain15_A1                         (-3.199076)
-#define HPF_Fs48000_Gain15_A2                         0.000000
-#define HPF_Fs48000_Gain15_B1                         (-0.267949)
-#define HPF_Fs48000_Gain15_B2                         0.000000
+/* Gain =  1.000000 dB */
+#define HPF_Fs48000_Gain1_A0 1.077357
+#define HPF_Fs48000_Gain1_A1 (-0.345306)
+#define HPF_Fs48000_Gain1_A2 0.000000
+#define HPF_Fs48000_Gain1_B1 (-0.267949)
+#define HPF_Fs48000_Gain1_B2 0.000000
+/* Gain =  2.000000 dB */
+#define HPF_Fs48000_Gain2_A0 1.164152
+#define HPF_Fs48000_Gain2_A1 (-0.432101)
+#define HPF_Fs48000_Gain2_A2 0.000000
+#define HPF_Fs48000_Gain2_B1 (-0.267949)
+#define HPF_Fs48000_Gain2_B2 0.000000
+/* Gain =  3.000000 dB */
+#define HPF_Fs48000_Gain3_A0 1.261538
+#define HPF_Fs48000_Gain3_A1 (-0.529488)
+#define HPF_Fs48000_Gain3_A2 0.000000
+#define HPF_Fs48000_Gain3_B1 (-0.267949)
+#define HPF_Fs48000_Gain3_B2 0.000000
+/* Gain =  4.000000 dB */
+#define HPF_Fs48000_Gain4_A0 1.370807
+#define HPF_Fs48000_Gain4_A1 (-0.638757)
+#define HPF_Fs48000_Gain4_A2 0.000000
+#define HPF_Fs48000_Gain4_B1 (-0.267949)
+#define HPF_Fs48000_Gain4_B2 0.000000
+/* Gain =  5.000000 dB */
+#define HPF_Fs48000_Gain5_A0 1.493409
+#define HPF_Fs48000_Gain5_A1 (-0.761359)
+#define HPF_Fs48000_Gain5_A2 0.000000
+#define HPF_Fs48000_Gain5_B1 (-0.267949)
+#define HPF_Fs48000_Gain5_B2 0.000000
+/* Gain =  6.000000 dB */
+#define HPF_Fs48000_Gain6_A0 1.630971
+#define HPF_Fs48000_Gain6_A1 (-0.898920)
+#define HPF_Fs48000_Gain6_A2 0.000000
+#define HPF_Fs48000_Gain6_B1 (-0.267949)
+#define HPF_Fs48000_Gain6_B2 0.000000
+/* Gain =  7.000000 dB */
+#define HPF_Fs48000_Gain7_A0 1.785318
+#define HPF_Fs48000_Gain7_A1 (-1.053267)
+#define HPF_Fs48000_Gain7_A2 0.000000
+#define HPF_Fs48000_Gain7_B1 (-0.267949)
+#define HPF_Fs48000_Gain7_B2 0.000000
+/* Gain =  8.000000 dB */
+#define HPF_Fs48000_Gain8_A0 1.958498
+#define HPF_Fs48000_Gain8_A1 (-1.226447)
+#define HPF_Fs48000_Gain8_A2 0.000000
+#define HPF_Fs48000_Gain8_B1 (-0.267949)
+#define HPF_Fs48000_Gain8_B2 0.000000
+/* Gain =  9.000000 dB */
+#define HPF_Fs48000_Gain9_A0 2.152809
+#define HPF_Fs48000_Gain9_A1 (-1.420758)
+#define HPF_Fs48000_Gain9_A2 0.000000
+#define HPF_Fs48000_Gain9_B1 (-0.267949)
+#define HPF_Fs48000_Gain9_B2 0.000000
+/* Gain =  10.000000 dB */
+#define HPF_Fs48000_Gain10_A0 2.370829
+#define HPF_Fs48000_Gain10_A1 (-1.638778)
+#define HPF_Fs48000_Gain10_A2 0.000000
+#define HPF_Fs48000_Gain10_B1 (-0.267949)
+#define HPF_Fs48000_Gain10_B2 0.000000
+/* Gain =  11.000000 dB */
+#define HPF_Fs48000_Gain11_A0 2.615452
+#define HPF_Fs48000_Gain11_A1 (-1.883401)
+#define HPF_Fs48000_Gain11_A2 0.000000
+#define HPF_Fs48000_Gain11_B1 (-0.267949)
+#define HPF_Fs48000_Gain11_B2 0.000000
+/* Gain =  12.000000 dB */
+#define HPF_Fs48000_Gain12_A0 2.889924
+#define HPF_Fs48000_Gain12_A1 (-2.157873)
+#define HPF_Fs48000_Gain12_A2 0.000000
+#define HPF_Fs48000_Gain12_B1 (-0.267949)
+#define HPF_Fs48000_Gain12_B2 0.000000
+/* Gain =  13.000000 dB */
+#define HPF_Fs48000_Gain13_A0 3.197886
+#define HPF_Fs48000_Gain13_A1 (-2.465835)
+#define HPF_Fs48000_Gain13_A2 0.000000
+#define HPF_Fs48000_Gain13_B1 (-0.267949)
+#define HPF_Fs48000_Gain13_B2 0.000000
+/* Gain =  14.000000 dB */
+#define HPF_Fs48000_Gain14_A0 3.543425
+#define HPF_Fs48000_Gain14_A1 (-2.811374)
+#define HPF_Fs48000_Gain14_A2 0.000000
+#define HPF_Fs48000_Gain14_B1 (-0.267949)
+#define HPF_Fs48000_Gain14_B2 0.000000
+/* Gain =  15.000000 dB */
+#define HPF_Fs48000_Gain15_A0 3.931127
+#define HPF_Fs48000_Gain15_A1 (-3.199076)
+#define HPF_Fs48000_Gain15_A2 0.000000
+#define HPF_Fs48000_Gain15_B1 (-0.267949)
+#define HPF_Fs48000_Gain15_B2 0.000000
 
 /* Coefficients for sample rate 88200 */
 /* Gain = 1.000000 dB */
-#define HPF_Fs88200_Gain1_A0                          1.094374f
-#define HPF_Fs88200_Gain1_A1                          (-0.641256f)
-#define HPF_Fs88200_Gain1_A2                          0.000000f
-#define HPF_Fs88200_Gain1_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain1_B2                          0.000000f
+#define HPF_Fs88200_Gain1_A0 1.094374f
+#define HPF_Fs88200_Gain1_A1 (-0.641256f)
+#define HPF_Fs88200_Gain1_A2 0.000000f
+#define HPF_Fs88200_Gain1_B1 (-0.546882f)
+#define HPF_Fs88200_Gain1_B2 0.000000f
 /* Gain = 2.000000 dB */
-#define HPF_Fs88200_Gain2_A0                          1.200264f
-#define HPF_Fs88200_Gain2_A1                          (-0.747146f)
-#define HPF_Fs88200_Gain2_A2                          0.000000f
-#define HPF_Fs88200_Gain2_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain2_B2                          0.000000f
+#define HPF_Fs88200_Gain2_A0 1.200264f
+#define HPF_Fs88200_Gain2_A1 (-0.747146f)
+#define HPF_Fs88200_Gain2_A2 0.000000f
+#define HPF_Fs88200_Gain2_B1 (-0.546882f)
+#define HPF_Fs88200_Gain2_B2 0.000000f
 /* Gain = 3.000000 dB */
-#define HPF_Fs88200_Gain3_A0                          1.319074f
-#define HPF_Fs88200_Gain3_A1                          (-0.865956f)
-#define HPF_Fs88200_Gain3_A2                          0.000000f
-#define HPF_Fs88200_Gain3_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain3_B2                          0.000000f
+#define HPF_Fs88200_Gain3_A0 1.319074f
+#define HPF_Fs88200_Gain3_A1 (-0.865956f)
+#define HPF_Fs88200_Gain3_A2 0.000000f
+#define HPF_Fs88200_Gain3_B1 (-0.546882f)
+#define HPF_Fs88200_Gain3_B2 0.000000f
 /* Gain = 4.000000 dB */
-#define HPF_Fs88200_Gain4_A0                          1.452380f
-#define HPF_Fs88200_Gain4_A1                          (-0.999263f)
-#define HPF_Fs88200_Gain4_A2                          0.000000f
-#define HPF_Fs88200_Gain4_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain4_B2                          0.000000f
+#define HPF_Fs88200_Gain4_A0 1.452380f
+#define HPF_Fs88200_Gain4_A1 (-0.999263f)
+#define HPF_Fs88200_Gain4_A2 0.000000f
+#define HPF_Fs88200_Gain4_B1 (-0.546882f)
+#define HPF_Fs88200_Gain4_B2 0.000000f
 /* Gain = 5.000000 dB */
-#define HPF_Fs88200_Gain5_A0                          1.601953f
-#define HPF_Fs88200_Gain5_A1                          (-1.148836f)
-#define HPF_Fs88200_Gain5_A2                          0.000000f
-#define HPF_Fs88200_Gain5_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain5_B2                          0.000000f
+#define HPF_Fs88200_Gain5_A0 1.601953f
+#define HPF_Fs88200_Gain5_A1 (-1.148836f)
+#define HPF_Fs88200_Gain5_A2 0.000000f
+#define HPF_Fs88200_Gain5_B1 (-0.546882f)
+#define HPF_Fs88200_Gain5_B2 0.000000f
 /* Gain = 6.000000 dB */
-#define HPF_Fs88200_Gain6_A0                          1.769777f
-#define HPF_Fs88200_Gain6_A1                          (-1.316659f)
-#define HPF_Fs88200_Gain6_A2                          0.000000f
-#define HPF_Fs88200_Gain6_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain6_B2                          0.000000f
+#define HPF_Fs88200_Gain6_A0 1.769777f
+#define HPF_Fs88200_Gain6_A1 (-1.316659f)
+#define HPF_Fs88200_Gain6_A2 0.000000f
+#define HPF_Fs88200_Gain6_B1 (-0.546882f)
+#define HPF_Fs88200_Gain6_B2 0.000000f
 /* Gain = 7.000000 dB */
-#define HPF_Fs88200_Gain7_A0                          1.958078f
-#define HPF_Fs88200_Gain7_A1                          (-1.504960f)
-#define HPF_Fs88200_Gain7_A2                          0.000000f
-#define HPF_Fs88200_Gain7_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain7_B2                          0.000000f
+#define HPF_Fs88200_Gain7_A0 1.958078f
+#define HPF_Fs88200_Gain7_A1 (-1.504960f)
+#define HPF_Fs88200_Gain7_A2 0.000000f
+#define HPF_Fs88200_Gain7_B1 (-0.546882f)
+#define HPF_Fs88200_Gain7_B2 0.000000f
 /* Gain = 8.000000 dB */
-#define HPF_Fs88200_Gain8_A0                          2.169355f
-#define HPF_Fs88200_Gain8_A1                          (-1.716238f)
-#define HPF_Fs88200_Gain8_A2                          0.000000f
-#define HPF_Fs88200_Gain8_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain8_B2                          0.000000f
+#define HPF_Fs88200_Gain8_A0 2.169355f
+#define HPF_Fs88200_Gain8_A1 (-1.716238f)
+#define HPF_Fs88200_Gain8_A2 0.000000f
+#define HPF_Fs88200_Gain8_B1 (-0.546882f)
+#define HPF_Fs88200_Gain8_B2 0.000000f
 /* Gain = 9.000000 dB */
-#define HPF_Fs88200_Gain9_A0                          2.406412f
-#define HPF_Fs88200_Gain9_A1                          (-1.953295f)
-#define HPF_Fs88200_Gain9_A2                          0.000000f
-#define HPF_Fs88200_Gain9_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain9_B2                          0.000000f
+#define HPF_Fs88200_Gain9_A0 2.406412f
+#define HPF_Fs88200_Gain9_A1 (-1.953295f)
+#define HPF_Fs88200_Gain9_A2 0.000000f
+#define HPF_Fs88200_Gain9_B1 (-0.546882f)
+#define HPF_Fs88200_Gain9_B2 0.000000f
 /* Gain = 10.000000 dB */
-#define HPF_Fs88200_Gain10_A0                          2.672395f
-#define HPF_Fs88200_Gain10_A1                          (-2.219277f)
-#define HPF_Fs88200_Gain10_A2                          0.000000f
-#define HPF_Fs88200_Gain10_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain10_B2                          0.000000f
+#define HPF_Fs88200_Gain10_A0 2.672395f
+#define HPF_Fs88200_Gain10_A1 (-2.219277f)
+#define HPF_Fs88200_Gain10_A2 0.000000f
+#define HPF_Fs88200_Gain10_B1 (-0.546882f)
+#define HPF_Fs88200_Gain10_B2 0.000000f
 /* Gain = 11.000000 dB */
-#define HPF_Fs88200_Gain11_A0                          2.970832f
-#define HPF_Fs88200_Gain11_A1                          (-2.517714f)
-#define HPF_Fs88200_Gain11_A2                          0.000000f
-#define HPF_Fs88200_Gain11_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain11_B2                          0.000000f
+#define HPF_Fs88200_Gain11_A0 2.970832f
+#define HPF_Fs88200_Gain11_A1 (-2.517714f)
+#define HPF_Fs88200_Gain11_A2 0.000000f
+#define HPF_Fs88200_Gain11_B1 (-0.546882f)
+#define HPF_Fs88200_Gain11_B2 0.000000f
 /* Gain = 12.000000 dB */
-#define HPF_Fs88200_Gain12_A0                          3.305684f
-#define HPF_Fs88200_Gain12_A1                          (-2.852566f)
-#define HPF_Fs88200_Gain12_A2                          0.000000f
-#define HPF_Fs88200_Gain12_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain12_B2                          0.000000f
+#define HPF_Fs88200_Gain12_A0 3.305684f
+#define HPF_Fs88200_Gain12_A1 (-2.852566f)
+#define HPF_Fs88200_Gain12_A2 0.000000f
+#define HPF_Fs88200_Gain12_B1 (-0.546882f)
+#define HPF_Fs88200_Gain12_B2 0.000000f
 /* Gain = 13.000000 dB */
-#define HPF_Fs88200_Gain13_A0                          3.681394f
-#define HPF_Fs88200_Gain13_A1                          (-3.228276f)
-#define HPF_Fs88200_Gain13_A2                          0.000000f
-#define HPF_Fs88200_Gain13_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain13_B2                          0.000000f
+#define HPF_Fs88200_Gain13_A0 3.681394f
+#define HPF_Fs88200_Gain13_A1 (-3.228276f)
+#define HPF_Fs88200_Gain13_A2 0.000000f
+#define HPF_Fs88200_Gain13_B1 (-0.546882f)
+#define HPF_Fs88200_Gain13_B2 0.000000f
 /* Gain = 14.000000 dB */
-#define HPF_Fs88200_Gain14_A0                          4.102947f
-#define HPF_Fs88200_Gain14_A1                          (-3.649830f)
-#define HPF_Fs88200_Gain14_A2                          0.000000f
-#define HPF_Fs88200_Gain14_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain14_B2                          0.000000f
+#define HPF_Fs88200_Gain14_A0 4.102947f
+#define HPF_Fs88200_Gain14_A1 (-3.649830f)
+#define HPF_Fs88200_Gain14_A2 0.000000f
+#define HPF_Fs88200_Gain14_B1 (-0.546882f)
+#define HPF_Fs88200_Gain14_B2 0.000000f
 /* Gain = 15.000000 dB */
-#define HPF_Fs88200_Gain15_A0                          4.575938f
-#define HPF_Fs88200_Gain15_A1                          (-4.122820f)
-#define HPF_Fs88200_Gain15_A2                          0.000000f
-#define HPF_Fs88200_Gain15_B1                          (-0.546882f)
-#define HPF_Fs88200_Gain15_B2                          0.000000f
+#define HPF_Fs88200_Gain15_A0 4.575938f
+#define HPF_Fs88200_Gain15_A1 (-4.122820f)
+#define HPF_Fs88200_Gain15_A2 0.000000f
+#define HPF_Fs88200_Gain15_B1 (-0.546882f)
+#define HPF_Fs88200_Gain15_B2 0.000000f
 
 /* Coefficients for sample rate 96000Hz */
-                                                                 /* Gain =  1.000000 dB */
-#define HPF_Fs96000_Gain1_A0                          1.096233
-#define HPF_Fs96000_Gain1_A1                          (-0.673583)
-#define HPF_Fs96000_Gain1_A2                          0.000000
-#define HPF_Fs96000_Gain1_B1                          (-0.577350)
-#define HPF_Fs96000_Gain1_B2                          0.000000
-                                                                 /* Gain =  2.000000 dB */
-#define HPF_Fs96000_Gain2_A0                          1.204208
-#define HPF_Fs96000_Gain2_A1                          (-0.781558)
-#define HPF_Fs96000_Gain2_A2                          0.000000
-#define HPF_Fs96000_Gain2_B1                          (-0.577350)
-#define HPF_Fs96000_Gain2_B2                          0.000000
-                                                                 /* Gain =  3.000000 dB */
-#define HPF_Fs96000_Gain3_A0                          1.325358
-#define HPF_Fs96000_Gain3_A1                          (-0.902708)
-#define HPF_Fs96000_Gain3_A2                          0.000000
-#define HPF_Fs96000_Gain3_B1                          (-0.577350)
-#define HPF_Fs96000_Gain3_B2                          0.000000
-                                                                 /* Gain =  4.000000 dB */
-#define HPF_Fs96000_Gain4_A0                           1.461291
-#define HPF_Fs96000_Gain4_A1                           (-1.038641)
-#define HPF_Fs96000_Gain4_A2                           0.000000
-#define HPF_Fs96000_Gain4_B1                           (-0.577350)
-#define HPF_Fs96000_Gain4_B2                           0.000000
-                                                                 /* Gain =  5.000000 dB */
-#define HPF_Fs96000_Gain5_A0                           1.613810
-#define HPF_Fs96000_Gain5_A1                           (-1.191160)
-#define HPF_Fs96000_Gain5_A2                           0.000000
-#define HPF_Fs96000_Gain5_B1                           (-0.577350)
-#define HPF_Fs96000_Gain5_B2                           0.000000
-                                                                 /* Gain =  6.000000 dB */
-#define HPF_Fs96000_Gain6_A0                            1.784939
-#define HPF_Fs96000_Gain6_A1                            (-1.362289)
-#define HPF_Fs96000_Gain6_A2                            0.000000
-#define HPF_Fs96000_Gain6_B1                            (-0.577350)
-#define HPF_Fs96000_Gain6_B2                            0.000000
-                                                                /* Gain =  7.000000 dB */
-#define HPF_Fs96000_Gain7_A0                            1.976949
-#define HPF_Fs96000_Gain7_A1                            (-1.554299)
-#define HPF_Fs96000_Gain7_A2                            0.000000
-#define HPF_Fs96000_Gain7_B1                            (-0.577350)
-#define HPF_Fs96000_Gain7_B2                            0.000000
-                                                                 /* Gain =  8.000000 dB */
-#define HPF_Fs96000_Gain8_A0                           2.192387
-#define HPF_Fs96000_Gain8_A1                           (-1.769738)
-#define HPF_Fs96000_Gain8_A2                           0.000000
-#define HPF_Fs96000_Gain8_B1                           (-0.577350)
-#define HPF_Fs96000_Gain8_B2                           0.000000
-                                                                /* Gain =  9.000000 dB */
-#define HPF_Fs96000_Gain9_A0                          2.434113
-#define HPF_Fs96000_Gain9_A1                          (-2.011464)
-#define HPF_Fs96000_Gain9_A2                          0.000000
-#define HPF_Fs96000_Gain9_B1                          (-0.577350)
-#define HPF_Fs96000_Gain9_B2                          0.000000
-                                                               /* Gain =  10.000000 dB */
-#define HPF_Fs96000_Gain10_A0                        2.705335
-#define HPF_Fs96000_Gain10_A1                        (-2.282685)
-#define HPF_Fs96000_Gain10_A2                         0.000000
-#define HPF_Fs96000_Gain10_B1                         (-0.577350)
-#define HPF_Fs96000_Gain10_B2                         0.000000
-                                                              /* Gain =  11.000000 dB */
-#define HPF_Fs96000_Gain11_A0                          3.009650
-#define HPF_Fs96000_Gain11_A1                          (-2.587000)
-#define HPF_Fs96000_Gain11_A2                          0.000000
-#define HPF_Fs96000_Gain11_B1                          (-0.577350)
-#define HPF_Fs96000_Gain11_B2                          0.000000
-                                                                  /* Gain =  12.000000 dB */
-#define HPF_Fs96000_Gain12_A0                          3.351097
-#define HPF_Fs96000_Gain12_A1                          (-2.928447)
-#define HPF_Fs96000_Gain12_A2                          0.000000
-#define HPF_Fs96000_Gain12_B1                          (-0.577350)
-#define HPF_Fs96000_Gain12_B2                          0.000000
-                                                                /* Gain =  13.000000 dB */
-#define HPF_Fs96000_Gain13_A0                           3.734207
-#define HPF_Fs96000_Gain13_A1                           (-3.311558)
-#define HPF_Fs96000_Gain13_A2                           0.000000
-#define HPF_Fs96000_Gain13_B1                           (-0.577350)
-#define HPF_Fs96000_Gain13_B2                           0.000000
-                                                                 /* Gain =  14.000000 dB */
-#define HPF_Fs96000_Gain14_A0                         4.164064
-#define HPF_Fs96000_Gain14_A1                         (-3.741414)
-#define HPF_Fs96000_Gain14_A2                          0.000000
-#define HPF_Fs96000_Gain14_B1                          (-0.577350)
-#define HPF_Fs96000_Gain14_B2                          0.000000
-                                                                 /* Gain =  15.000000 dB */
-#define HPF_Fs96000_Gain15_A0                         4.646371
-#define HPF_Fs96000_Gain15_A1                         (-4.223721)
-#define HPF_Fs96000_Gain15_A2                         0.000000
-#define HPF_Fs96000_Gain15_B1                         (-0.577350)
-#define HPF_Fs96000_Gain15_B2                         0.000000
+/* Gain =  1.000000 dB */
+#define HPF_Fs96000_Gain1_A0 1.096233
+#define HPF_Fs96000_Gain1_A1 (-0.673583)
+#define HPF_Fs96000_Gain1_A2 0.000000
+#define HPF_Fs96000_Gain1_B1 (-0.577350)
+#define HPF_Fs96000_Gain1_B2 0.000000
+/* Gain =  2.000000 dB */
+#define HPF_Fs96000_Gain2_A0 1.204208
+#define HPF_Fs96000_Gain2_A1 (-0.781558)
+#define HPF_Fs96000_Gain2_A2 0.000000
+#define HPF_Fs96000_Gain2_B1 (-0.577350)
+#define HPF_Fs96000_Gain2_B2 0.000000
+/* Gain =  3.000000 dB */
+#define HPF_Fs96000_Gain3_A0 1.325358
+#define HPF_Fs96000_Gain3_A1 (-0.902708)
+#define HPF_Fs96000_Gain3_A2 0.000000
+#define HPF_Fs96000_Gain3_B1 (-0.577350)
+#define HPF_Fs96000_Gain3_B2 0.000000
+/* Gain =  4.000000 dB */
+#define HPF_Fs96000_Gain4_A0 1.461291
+#define HPF_Fs96000_Gain4_A1 (-1.038641)
+#define HPF_Fs96000_Gain4_A2 0.000000
+#define HPF_Fs96000_Gain4_B1 (-0.577350)
+#define HPF_Fs96000_Gain4_B2 0.000000
+/* Gain =  5.000000 dB */
+#define HPF_Fs96000_Gain5_A0 1.613810
+#define HPF_Fs96000_Gain5_A1 (-1.191160)
+#define HPF_Fs96000_Gain5_A2 0.000000
+#define HPF_Fs96000_Gain5_B1 (-0.577350)
+#define HPF_Fs96000_Gain5_B2 0.000000
+/* Gain =  6.000000 dB */
+#define HPF_Fs96000_Gain6_A0 1.784939
+#define HPF_Fs96000_Gain6_A1 (-1.362289)
+#define HPF_Fs96000_Gain6_A2 0.000000
+#define HPF_Fs96000_Gain6_B1 (-0.577350)
+#define HPF_Fs96000_Gain6_B2 0.000000
+/* Gain =  7.000000 dB */
+#define HPF_Fs96000_Gain7_A0 1.976949
+#define HPF_Fs96000_Gain7_A1 (-1.554299)
+#define HPF_Fs96000_Gain7_A2 0.000000
+#define HPF_Fs96000_Gain7_B1 (-0.577350)
+#define HPF_Fs96000_Gain7_B2 0.000000
+/* Gain =  8.000000 dB */
+#define HPF_Fs96000_Gain8_A0 2.192387
+#define HPF_Fs96000_Gain8_A1 (-1.769738)
+#define HPF_Fs96000_Gain8_A2 0.000000
+#define HPF_Fs96000_Gain8_B1 (-0.577350)
+#define HPF_Fs96000_Gain8_B2 0.000000
+/* Gain =  9.000000 dB */
+#define HPF_Fs96000_Gain9_A0 2.434113
+#define HPF_Fs96000_Gain9_A1 (-2.011464)
+#define HPF_Fs96000_Gain9_A2 0.000000
+#define HPF_Fs96000_Gain9_B1 (-0.577350)
+#define HPF_Fs96000_Gain9_B2 0.000000
+/* Gain =  10.000000 dB */
+#define HPF_Fs96000_Gain10_A0 2.705335
+#define HPF_Fs96000_Gain10_A1 (-2.282685)
+#define HPF_Fs96000_Gain10_A2 0.000000
+#define HPF_Fs96000_Gain10_B1 (-0.577350)
+#define HPF_Fs96000_Gain10_B2 0.000000
+/* Gain =  11.000000 dB */
+#define HPF_Fs96000_Gain11_A0 3.009650
+#define HPF_Fs96000_Gain11_A1 (-2.587000)
+#define HPF_Fs96000_Gain11_A2 0.000000
+#define HPF_Fs96000_Gain11_B1 (-0.577350)
+#define HPF_Fs96000_Gain11_B2 0.000000
+/* Gain =  12.000000 dB */
+#define HPF_Fs96000_Gain12_A0 3.351097
+#define HPF_Fs96000_Gain12_A1 (-2.928447)
+#define HPF_Fs96000_Gain12_A2 0.000000
+#define HPF_Fs96000_Gain12_B1 (-0.577350)
+#define HPF_Fs96000_Gain12_B2 0.000000
+/* Gain =  13.000000 dB */
+#define HPF_Fs96000_Gain13_A0 3.734207
+#define HPF_Fs96000_Gain13_A1 (-3.311558)
+#define HPF_Fs96000_Gain13_A2 0.000000
+#define HPF_Fs96000_Gain13_B1 (-0.577350)
+#define HPF_Fs96000_Gain13_B2 0.000000
+/* Gain =  14.000000 dB */
+#define HPF_Fs96000_Gain14_A0 4.164064
+#define HPF_Fs96000_Gain14_A1 (-3.741414)
+#define HPF_Fs96000_Gain14_A2 0.000000
+#define HPF_Fs96000_Gain14_B1 (-0.577350)
+#define HPF_Fs96000_Gain14_B2 0.000000
+/* Gain =  15.000000 dB */
+#define HPF_Fs96000_Gain15_A0 4.646371
+#define HPF_Fs96000_Gain15_A1 (-4.223721)
+#define HPF_Fs96000_Gain15_A2 0.000000
+#define HPF_Fs96000_Gain15_B1 (-0.577350)
+#define HPF_Fs96000_Gain15_B2 0.000000
 
 /* Coefficients for sample rate 176400 */
 /* Gain = 1.000000 dB */
-#define HPF_Fs176400_Gain1_A0                          1.106711f
-#define HPF_Fs176400_Gain1_A1                          (-0.855807f)
-#define HPF_Fs176400_Gain1_A2                          0.000000f
-#define HPF_Fs176400_Gain1_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain1_B2                          0.000000f
+#define HPF_Fs176400_Gain1_A0 1.106711f
+#define HPF_Fs176400_Gain1_A1 (-0.855807f)
+#define HPF_Fs176400_Gain1_A2 0.000000f
+#define HPF_Fs176400_Gain1_B1 (-0.749096f)
+#define HPF_Fs176400_Gain1_B2 0.000000f
 /* Gain = 2.000000 dB */
-#define HPF_Fs176400_Gain2_A0                          1.226443f
-#define HPF_Fs176400_Gain2_A1                          (-0.975539f)
-#define HPF_Fs176400_Gain2_A2                          0.000000f
-#define HPF_Fs176400_Gain2_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain2_B2                          0.000000f
+#define HPF_Fs176400_Gain2_A0 1.226443f
+#define HPF_Fs176400_Gain2_A1 (-0.975539f)
+#define HPF_Fs176400_Gain2_A2 0.000000f
+#define HPF_Fs176400_Gain2_B1 (-0.749096f)
+#define HPF_Fs176400_Gain2_B2 0.000000f
 /* Gain = 3.000000 dB */
-#define HPF_Fs176400_Gain3_A0                          1.360784f
-#define HPF_Fs176400_Gain3_A1                          (-1.109880f)
-#define HPF_Fs176400_Gain3_A2                          0.000000f
-#define HPF_Fs176400_Gain3_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain3_B2                          0.000000f
+#define HPF_Fs176400_Gain3_A0 1.360784f
+#define HPF_Fs176400_Gain3_A1 (-1.109880f)
+#define HPF_Fs176400_Gain3_A2 0.000000f
+#define HPF_Fs176400_Gain3_B1 (-0.749096f)
+#define HPF_Fs176400_Gain3_B2 0.000000f
 /* Gain = 4.000000 dB */
-#define HPF_Fs176400_Gain4_A0                          1.511517f
-#define HPF_Fs176400_Gain4_A1                          (-1.260613f)
-#define HPF_Fs176400_Gain4_A2                          0.000000f
-#define HPF_Fs176400_Gain4_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain4_B2                          0.000000f
+#define HPF_Fs176400_Gain4_A0 1.511517f
+#define HPF_Fs176400_Gain4_A1 (-1.260613f)
+#define HPF_Fs176400_Gain4_A2 0.000000f
+#define HPF_Fs176400_Gain4_B1 (-0.749096f)
+#define HPF_Fs176400_Gain4_B2 0.000000f
 /* Gain = 5.000000 dB */
-#define HPF_Fs176400_Gain5_A0                          1.680643f
-#define HPF_Fs176400_Gain5_A1                          (-1.429739f)
-#define HPF_Fs176400_Gain5_A2                          0.000000f
-#define HPF_Fs176400_Gain5_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain5_B2                          0.000000f
+#define HPF_Fs176400_Gain5_A0 1.680643f
+#define HPF_Fs176400_Gain5_A1 (-1.429739f)
+#define HPF_Fs176400_Gain5_A2 0.000000f
+#define HPF_Fs176400_Gain5_B1 (-0.749096f)
+#define HPF_Fs176400_Gain5_B2 0.000000f
 /* Gain = 6.000000 dB */
-#define HPF_Fs176400_Gain6_A0                          1.870405f
-#define HPF_Fs176400_Gain6_A1                          (-1.619501f)
-#define HPF_Fs176400_Gain6_A2                          0.000000f
-#define HPF_Fs176400_Gain6_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain6_B2                          0.000000f
+#define HPF_Fs176400_Gain6_A0 1.870405f
+#define HPF_Fs176400_Gain6_A1 (-1.619501f)
+#define HPF_Fs176400_Gain6_A2 0.000000f
+#define HPF_Fs176400_Gain6_B1 (-0.749096f)
+#define HPF_Fs176400_Gain6_B2 0.000000f
 /* Gain = 7.000000 dB */
-#define HPF_Fs176400_Gain7_A0                          2.083321f
-#define HPF_Fs176400_Gain7_A1                          (-1.832417f)
-#define HPF_Fs176400_Gain7_A2                          0.000000f
-#define HPF_Fs176400_Gain7_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain7_B2                          0.000000f
+#define HPF_Fs176400_Gain7_A0 2.083321f
+#define HPF_Fs176400_Gain7_A1 (-1.832417f)
+#define HPF_Fs176400_Gain7_A2 0.000000f
+#define HPF_Fs176400_Gain7_B1 (-0.749096f)
+#define HPF_Fs176400_Gain7_B2 0.000000f
 /* Gain = 8.000000 dB */
-#define HPF_Fs176400_Gain8_A0                          2.322217f
-#define HPF_Fs176400_Gain8_A1                          (-2.071313f)
-#define HPF_Fs176400_Gain8_A2                          0.000000f
-#define HPF_Fs176400_Gain8_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain8_B2                          0.000000f
+#define HPF_Fs176400_Gain8_A0 2.322217f
+#define HPF_Fs176400_Gain8_A1 (-2.071313f)
+#define HPF_Fs176400_Gain8_A2 0.000000f
+#define HPF_Fs176400_Gain8_B1 (-0.749096f)
+#define HPF_Fs176400_Gain8_B2 0.000000f
 /* Gain = 9.000000 dB */
-#define HPF_Fs176400_Gain9_A0                          2.590263f
-#define HPF_Fs176400_Gain9_A1                          (-2.339359f)
-#define HPF_Fs176400_Gain9_A2                          0.000000f
-#define HPF_Fs176400_Gain9_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain9_B2                          0.000000f
+#define HPF_Fs176400_Gain9_A0 2.590263f
+#define HPF_Fs176400_Gain9_A1 (-2.339359f)
+#define HPF_Fs176400_Gain9_A2 0.000000f
+#define HPF_Fs176400_Gain9_B1 (-0.749096f)
+#define HPF_Fs176400_Gain9_B2 0.000000f
 /* Gain = 10.000000 dB */
-#define HPF_Fs176400_Gain10_A0                          2.891016f
-#define HPF_Fs176400_Gain10_A1                          (-2.640112f)
-#define HPF_Fs176400_Gain10_A2                          0.000000f
-#define HPF_Fs176400_Gain10_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain10_B2                          0.000000f
+#define HPF_Fs176400_Gain10_A0 2.891016f
+#define HPF_Fs176400_Gain10_A1 (-2.640112f)
+#define HPF_Fs176400_Gain10_A2 0.000000f
+#define HPF_Fs176400_Gain10_B1 (-0.749096f)
+#define HPF_Fs176400_Gain10_B2 0.000000f
 /* Gain = 11.000000 dB */
-#define HPF_Fs176400_Gain11_A0                          3.228465f
-#define HPF_Fs176400_Gain11_A1                          (-2.977561f)
-#define HPF_Fs176400_Gain11_A2                          0.000000f
-#define HPF_Fs176400_Gain11_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain11_B2                          0.000000f
+#define HPF_Fs176400_Gain11_A0 3.228465f
+#define HPF_Fs176400_Gain11_A1 (-2.977561f)
+#define HPF_Fs176400_Gain11_A2 0.000000f
+#define HPF_Fs176400_Gain11_B1 (-0.749096f)
+#define HPF_Fs176400_Gain11_B2 0.000000f
 /* Gain = 12.000000 dB */
-#define HPF_Fs176400_Gain12_A0                          3.607090f
-#define HPF_Fs176400_Gain12_A1                          (-3.356186f)
-#define HPF_Fs176400_Gain12_A2                          0.000000f
-#define HPF_Fs176400_Gain12_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain12_B2                          0.000000f
+#define HPF_Fs176400_Gain12_A0 3.607090f
+#define HPF_Fs176400_Gain12_A1 (-3.356186f)
+#define HPF_Fs176400_Gain12_A2 0.000000f
+#define HPF_Fs176400_Gain12_B1 (-0.749096f)
+#define HPF_Fs176400_Gain12_B2 0.000000f
 /* Gain = 13.000000 dB */
-#define HPF_Fs176400_Gain13_A0                          4.031914f
-#define HPF_Fs176400_Gain13_A1                          (-3.781010f)
-#define HPF_Fs176400_Gain13_A2                          0.000000f
-#define HPF_Fs176400_Gain13_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain13_B2                          0.000000f
+#define HPF_Fs176400_Gain13_A0 4.031914f
+#define HPF_Fs176400_Gain13_A1 (-3.781010f)
+#define HPF_Fs176400_Gain13_A2 0.000000f
+#define HPF_Fs176400_Gain13_B1 (-0.749096f)
+#define HPF_Fs176400_Gain13_B2 0.000000f
 /* Gain = 14.000000 dB */
-#define HPF_Fs176400_Gain14_A0                          4.508575f
-#define HPF_Fs176400_Gain14_A1                          (-4.257671f)
-#define HPF_Fs176400_Gain14_A2                          0.000000f
-#define HPF_Fs176400_Gain14_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain14_B2                          0.000000f
+#define HPF_Fs176400_Gain14_A0 4.508575f
+#define HPF_Fs176400_Gain14_A1 (-4.257671f)
+#define HPF_Fs176400_Gain14_A2 0.000000f
+#define HPF_Fs176400_Gain14_B1 (-0.749096f)
+#define HPF_Fs176400_Gain14_B2 0.000000f
 /* Gain = 15.000000 dB */
-#define HPF_Fs176400_Gain15_A0                          5.043397f
-#define HPF_Fs176400_Gain15_A1                          (-4.792493f)
-#define HPF_Fs176400_Gain15_A2                          0.000000f
-#define HPF_Fs176400_Gain15_B1                          (-0.749096f)
-#define HPF_Fs176400_Gain15_B2                          0.000000f
+#define HPF_Fs176400_Gain15_A0 5.043397f
+#define HPF_Fs176400_Gain15_A1 (-4.792493f)
+#define HPF_Fs176400_Gain15_A2 0.000000f
+#define HPF_Fs176400_Gain15_B1 (-0.749096f)
+#define HPF_Fs176400_Gain15_B2 0.000000f
 
 /* Coefficients for sample rate 192000Hz */
-                                                                  /* Gain =  1.000000 dB */
-#define HPF_Fs192000_Gain1_A0                          1.107823
-#define HPF_Fs192000_Gain1_A1                          (-0.875150)
-#define HPF_Fs192000_Gain1_A2                          0.000000
-#define HPF_Fs192000_Gain1_B1                          (-0.767327)
-#define HPF_Fs192000_Gain1_B2                          0.000000
-                                                                  /* Gain =  2.000000 dB */
-#define HPF_Fs192000_Gain2_A0                          1.228803
-#define HPF_Fs192000_Gain2_A1                          (-0.996130)
-#define HPF_Fs192000_Gain2_A2                          0.000000
-#define HPF_Fs192000_Gain2_B1                          (-0.767327)
-#define HPF_Fs192000_Gain2_B2                          0.000000
-                                                                   /* Gain =  3.000000 dB */
-#define HPF_Fs192000_Gain3_A0                          1.364544
-#define HPF_Fs192000_Gain3_A1                          (-1.131871)
-#define HPF_Fs192000_Gain3_A2                          0.000000
-#define HPF_Fs192000_Gain3_B1                          (-0.767327)
-#define HPF_Fs192000_Gain3_B2                          0.000000
-                                                                   /* Gain =  4.000000 dB */
-#define HPF_Fs192000_Gain4_A0                          1.516849
-#define HPF_Fs192000_Gain4_A1                          (-1.284176)
-#define HPF_Fs192000_Gain4_A2                           0.000000
-#define HPF_Fs192000_Gain4_B1                           (-0.767327)
-#define HPF_Fs192000_Gain4_B2                           0.000000
-                                                                   /* Gain =  5.000000 dB */
-#define HPF_Fs192000_Gain5_A0                           1.687737
-#define HPF_Fs192000_Gain5_A1                           (-1.455064)
-#define HPF_Fs192000_Gain5_A2                           0.000000
-#define HPF_Fs192000_Gain5_B1                           (-0.767327)
-#define HPF_Fs192000_Gain5_B2                           0.000000
-                                                                   /* Gain =  6.000000 dB */
-#define HPF_Fs192000_Gain6_A0                            1.879477
-#define HPF_Fs192000_Gain6_A1                            (-1.646804)
-#define HPF_Fs192000_Gain6_A2                            0.000000
-#define HPF_Fs192000_Gain6_B1                            (-0.767327)
-#define HPF_Fs192000_Gain6_B2                            0.000000
-                                                                 /* Gain =  7.000000 dB */
-#define HPF_Fs192000_Gain7_A0                            2.094613
-#define HPF_Fs192000_Gain7_A1                            (-1.861940)
-#define HPF_Fs192000_Gain7_A2                            0.000000
-#define HPF_Fs192000_Gain7_B1                            (-0.767327)
-#define HPF_Fs192000_Gain7_B2                            0.000000
-                                                                   /* Gain =  8.000000 dB */
-#define HPF_Fs192000_Gain8_A0                           2.335999
-#define HPF_Fs192000_Gain8_A1                           (-2.103326)
-#define HPF_Fs192000_Gain8_A2                           0.000000
-#define HPF_Fs192000_Gain8_B1                           (-0.767327)
-#define HPF_Fs192000_Gain8_B2                           0.000000
-                                                                   /* Gain =  9.000000 dB */
-#define HPF_Fs192000_Gain9_A0                          2.606839
-#define HPF_Fs192000_Gain9_A1                          (-2.374166)
-#define HPF_Fs192000_Gain9_A2                          0.000000
-#define HPF_Fs192000_Gain9_B1                          (-0.767327)
-#define HPF_Fs192000_Gain9_B2                          0.000000
-                                                                 /* Gain =  10.000000 dB */
-#define HPF_Fs192000_Gain10_A0                        2.910726
-#define HPF_Fs192000_Gain10_A1                        (-2.678053)
-#define HPF_Fs192000_Gain10_A2                         0.000000
-#define HPF_Fs192000_Gain10_B1                         (-0.767327)
-#define HPF_Fs192000_Gain10_B2                         0.000000
-                                                                  /* Gain =  11.000000 dB */
-#define HPF_Fs192000_Gain11_A0                          3.251693
-#define HPF_Fs192000_Gain11_A1                          (-3.019020)
-#define HPF_Fs192000_Gain11_A2                          0.000000
-#define HPF_Fs192000_Gain11_B1                          (-0.767327)
-#define HPF_Fs192000_Gain11_B2                          0.000000
-                                                                  /* Gain =  12.000000 dB */
-#define HPF_Fs192000_Gain12_A0                          3.634264
-#define HPF_Fs192000_Gain12_A1                          (-3.401591)
-#define HPF_Fs192000_Gain12_A2                          0.000000
-#define HPF_Fs192000_Gain12_B1                          (-0.767327)
-#define HPF_Fs192000_Gain12_B2                          0.000000
-                                                                /* Gain =  13.000000 dB */
-#define HPF_Fs192000_Gain13_A0                           4.063516
-#define HPF_Fs192000_Gain13_A1                           (-3.830843)
-#define HPF_Fs192000_Gain13_A2                           0.000000
-#define HPF_Fs192000_Gain13_B1                           (-0.767327)
-#define HPF_Fs192000_Gain13_B2                           0.000000
-                                                                /* Gain =  14.000000 dB */
-#define HPF_Fs192000_Gain14_A0                          4.545145
-#define HPF_Fs192000_Gain14_A1                          (-4.312472)
-#define HPF_Fs192000_Gain14_A2                          0.000000
-#define HPF_Fs192000_Gain14_B1                          (-0.767327)
-#define HPF_Fs192000_Gain14_B2                          0.000000
-                                                                  /* Gain =  15.000000 dB */
-#define HPF_Fs192000_Gain15_A0                         5.085542
-#define HPF_Fs192000_Gain15_A1                         (-4.852868)
-#define HPF_Fs192000_Gain15_A2                         0.000000
-#define HPF_Fs192000_Gain15_B1                         (-0.767327)
-#define HPF_Fs192000_Gain15_B2                         0.000000
+/* Gain =  1.000000 dB */
+#define HPF_Fs192000_Gain1_A0 1.107823
+#define HPF_Fs192000_Gain1_A1 (-0.875150)
+#define HPF_Fs192000_Gain1_A2 0.000000
+#define HPF_Fs192000_Gain1_B1 (-0.767327)
+#define HPF_Fs192000_Gain1_B2 0.000000
+/* Gain =  2.000000 dB */
+#define HPF_Fs192000_Gain2_A0 1.228803
+#define HPF_Fs192000_Gain2_A1 (-0.996130)
+#define HPF_Fs192000_Gain2_A2 0.000000
+#define HPF_Fs192000_Gain2_B1 (-0.767327)
+#define HPF_Fs192000_Gain2_B2 0.000000
+/* Gain =  3.000000 dB */
+#define HPF_Fs192000_Gain3_A0 1.364544
+#define HPF_Fs192000_Gain3_A1 (-1.131871)
+#define HPF_Fs192000_Gain3_A2 0.000000
+#define HPF_Fs192000_Gain3_B1 (-0.767327)
+#define HPF_Fs192000_Gain3_B2 0.000000
+/* Gain =  4.000000 dB */
+#define HPF_Fs192000_Gain4_A0 1.516849
+#define HPF_Fs192000_Gain4_A1 (-1.284176)
+#define HPF_Fs192000_Gain4_A2 0.000000
+#define HPF_Fs192000_Gain4_B1 (-0.767327)
+#define HPF_Fs192000_Gain4_B2 0.000000
+/* Gain =  5.000000 dB */
+#define HPF_Fs192000_Gain5_A0 1.687737
+#define HPF_Fs192000_Gain5_A1 (-1.455064)
+#define HPF_Fs192000_Gain5_A2 0.000000
+#define HPF_Fs192000_Gain5_B1 (-0.767327)
+#define HPF_Fs192000_Gain5_B2 0.000000
+/* Gain =  6.000000 dB */
+#define HPF_Fs192000_Gain6_A0 1.879477
+#define HPF_Fs192000_Gain6_A1 (-1.646804)
+#define HPF_Fs192000_Gain6_A2 0.000000
+#define HPF_Fs192000_Gain6_B1 (-0.767327)
+#define HPF_Fs192000_Gain6_B2 0.000000
+/* Gain =  7.000000 dB */
+#define HPF_Fs192000_Gain7_A0 2.094613
+#define HPF_Fs192000_Gain7_A1 (-1.861940)
+#define HPF_Fs192000_Gain7_A2 0.000000
+#define HPF_Fs192000_Gain7_B1 (-0.767327)
+#define HPF_Fs192000_Gain7_B2 0.000000
+/* Gain =  8.000000 dB */
+#define HPF_Fs192000_Gain8_A0 2.335999
+#define HPF_Fs192000_Gain8_A1 (-2.103326)
+#define HPF_Fs192000_Gain8_A2 0.000000
+#define HPF_Fs192000_Gain8_B1 (-0.767327)
+#define HPF_Fs192000_Gain8_B2 0.000000
+/* Gain =  9.000000 dB */
+#define HPF_Fs192000_Gain9_A0 2.606839
+#define HPF_Fs192000_Gain9_A1 (-2.374166)
+#define HPF_Fs192000_Gain9_A2 0.000000
+#define HPF_Fs192000_Gain9_B1 (-0.767327)
+#define HPF_Fs192000_Gain9_B2 0.000000
+/* Gain =  10.000000 dB */
+#define HPF_Fs192000_Gain10_A0 2.910726
+#define HPF_Fs192000_Gain10_A1 (-2.678053)
+#define HPF_Fs192000_Gain10_A2 0.000000
+#define HPF_Fs192000_Gain10_B1 (-0.767327)
+#define HPF_Fs192000_Gain10_B2 0.000000
+/* Gain =  11.000000 dB */
+#define HPF_Fs192000_Gain11_A0 3.251693
+#define HPF_Fs192000_Gain11_A1 (-3.019020)
+#define HPF_Fs192000_Gain11_A2 0.000000
+#define HPF_Fs192000_Gain11_B1 (-0.767327)
+#define HPF_Fs192000_Gain11_B2 0.000000
+/* Gain =  12.000000 dB */
+#define HPF_Fs192000_Gain12_A0 3.634264
+#define HPF_Fs192000_Gain12_A1 (-3.401591)
+#define HPF_Fs192000_Gain12_A2 0.000000
+#define HPF_Fs192000_Gain12_B1 (-0.767327)
+#define HPF_Fs192000_Gain12_B2 0.000000
+/* Gain =  13.000000 dB */
+#define HPF_Fs192000_Gain13_A0 4.063516
+#define HPF_Fs192000_Gain13_A1 (-3.830843)
+#define HPF_Fs192000_Gain13_A2 0.000000
+#define HPF_Fs192000_Gain13_B1 (-0.767327)
+#define HPF_Fs192000_Gain13_B2 0.000000
+/* Gain =  14.000000 dB */
+#define HPF_Fs192000_Gain14_A0 4.545145
+#define HPF_Fs192000_Gain14_A1 (-4.312472)
+#define HPF_Fs192000_Gain14_A2 0.000000
+#define HPF_Fs192000_Gain14_B1 (-0.767327)
+#define HPF_Fs192000_Gain14_B2 0.000000
+/* Gain =  15.000000 dB */
+#define HPF_Fs192000_Gain15_A0 5.085542
+#define HPF_Fs192000_Gain15_A1 (-4.852868)
+#define HPF_Fs192000_Gain15_A2 0.000000
+#define HPF_Fs192000_Gain15_B1 (-0.767327)
+#define HPF_Fs192000_Gain15_B2 0.000000
 
 #endif
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Control.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Control.cpp
index ff2c90a..b2a35d8 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Control.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Control.cpp
@@ -50,78 +50,62 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVM_ReturnStatus_en LVM_SetControlParameters(LVM_Handle_t           hInstance,
-                                             LVM_ControlParams_t    *pParams)
-{
-    LVM_Instance_t    *pInstance =(LVM_Instance_t  *)hInstance;
+LVM_ReturnStatus_en LVM_SetControlParameters(LVM_Handle_t hInstance, LVM_ControlParams_t* pParams) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
 
-    if ((pParams == LVM_NULL) || (hInstance == LVM_NULL))
-    {
+    if ((pParams == LVM_NULL) || (hInstance == LVM_NULL)) {
         return (LVM_NULLADDRESS);
     }
 
     pInstance->NewParams = *pParams;
 
-    if(
-        /* General parameters */
-        ((pParams->OperatingMode != LVM_MODE_OFF) && (pParams->OperatingMode != LVM_MODE_ON))                                         ||
-    ((pParams->SampleRate != LVM_FS_8000) && (pParams->SampleRate != LVM_FS_11025) && (pParams->SampleRate != LVM_FS_12000)       &&
-     (pParams->SampleRate != LVM_FS_16000) && (pParams->SampleRate != LVM_FS_22050) && (pParams->SampleRate != LVM_FS_24000)      &&
-     (pParams->SampleRate != LVM_FS_32000) && (pParams->SampleRate != LVM_FS_44100) && (pParams->SampleRate != LVM_FS_48000)      &&
-     (pParams->SampleRate != LVM_FS_88200) && (pParams->SampleRate != LVM_FS_96000) &&
-     (pParams->SampleRate != LVM_FS_176400) && (pParams->SampleRate != LVM_FS_192000))      ||
-#ifdef SUPPORT_MC
-        ((pParams->SourceFormat != LVM_STEREO) &&
-         (pParams->SourceFormat != LVM_MONOINSTEREO) &&
-         (pParams->SourceFormat != LVM_MONO) &&
-         (pParams->SourceFormat != LVM_MULTICHANNEL)) ||
-#else
-        ((pParams->SourceFormat != LVM_STEREO) && (pParams->SourceFormat != LVM_MONOINSTEREO) && (pParams->SourceFormat != LVM_MONO)) ||
-#endif
-        (pParams->SpeakerType > LVM_EX_HEADPHONES))
-    {
+    if (
+            /* General parameters */
+            ((pParams->OperatingMode != LVM_MODE_OFF) && (pParams->OperatingMode != LVM_MODE_ON)) ||
+            ((pParams->SampleRate != LVM_FS_8000) && (pParams->SampleRate != LVM_FS_11025) &&
+             (pParams->SampleRate != LVM_FS_12000) && (pParams->SampleRate != LVM_FS_16000) &&
+             (pParams->SampleRate != LVM_FS_22050) && (pParams->SampleRate != LVM_FS_24000) &&
+             (pParams->SampleRate != LVM_FS_32000) && (pParams->SampleRate != LVM_FS_44100) &&
+             (pParams->SampleRate != LVM_FS_48000) && (pParams->SampleRate != LVM_FS_88200) &&
+             (pParams->SampleRate != LVM_FS_96000) && (pParams->SampleRate != LVM_FS_176400) &&
+             (pParams->SampleRate != LVM_FS_192000)) ||
+            ((pParams->SourceFormat != LVM_STEREO) && (pParams->SourceFormat != LVM_MONOINSTEREO) &&
+             (pParams->SourceFormat != LVM_MONO) && (pParams->SourceFormat != LVM_MULTICHANNEL)) ||
+            (pParams->SpeakerType > LVM_EX_HEADPHONES)) {
         return (LVM_OUTOFRANGE);
     }
 
-#ifdef SUPPORT_MC
     pInstance->Params.NrChannels = pParams->NrChannels;
-    pInstance->Params.ChMask     = pParams->ChMask;
-#endif
+    pInstance->Params.ChMask = pParams->ChMask;
     /*
      * Cinema Sound parameters
      */
-    if((pParams->VirtualizerOperatingMode != LVM_MODE_OFF) && (pParams->VirtualizerOperatingMode != LVM_MODE_ON))
-    {
+    if ((pParams->VirtualizerOperatingMode != LVM_MODE_OFF) &&
+        (pParams->VirtualizerOperatingMode != LVM_MODE_ON)) {
         return (LVM_OUTOFRANGE);
     }
 
-    if(pParams->VirtualizerType != LVM_CONCERTSOUND)
-    {
+    if (pParams->VirtualizerType != LVM_CONCERTSOUND) {
         return (LVM_OUTOFRANGE);
     }
 
-    if(pParams->VirtualizerReverbLevel > LVM_VIRTUALIZER_MAX_REVERB_LEVEL)
-    {
+    if (pParams->VirtualizerReverbLevel > LVM_VIRTUALIZER_MAX_REVERB_LEVEL) {
         return (LVM_OUTOFRANGE);
     }
 
-    if(pParams->CS_EffectLevel < LVM_CS_MIN_EFFECT_LEVEL)
-    {
+    if (pParams->CS_EffectLevel < LVM_CS_MIN_EFFECT_LEVEL) {
         return (LVM_OUTOFRANGE);
     }
 
     /*
      * N-Band Equalizer
      */
-    if(pParams->EQNB_NBands > pInstance->InstParams.EQNB_NumBands)
-    {
+    if (pParams->EQNB_NBands > pInstance->InstParams.EQNB_NumBands) {
         return (LVM_OUTOFRANGE);
     }
 
     /* Definition pointer */
-    if ((pParams->pEQNB_BandDefinition == LVM_NULL) &&
-        (pParams->EQNB_NBands != 0))
-    {
+    if ((pParams->pEQNB_BandDefinition == LVM_NULL) && (pParams->EQNB_NBands != 0)) {
         return (LVM_NULLADDRESS);
     }
 
@@ -129,35 +113,31 @@
      * Copy the filter definitions for the Equaliser
      */
     {
-        LVM_INT16           i;
+        LVM_INT16 i;
 
-        if (pParams->EQNB_NBands != 0)
-        {
-            for (i=0; i<pParams->EQNB_NBands; i++)
-            {
+        if (pParams->EQNB_NBands != 0) {
+            for (i = 0; i < pParams->EQNB_NBands; i++) {
                 pInstance->pEQNB_BandDefs[i] = pParams->pEQNB_BandDefinition[i];
             }
             pInstance->NewParams.pEQNB_BandDefinition = pInstance->pEQNB_BandDefs;
         }
     }
-    if( /* N-Band Equaliser parameters */
-        ((pParams->EQNB_OperatingMode != LVM_EQNB_OFF) && (pParams->EQNB_OperatingMode != LVM_EQNB_ON)) ||
-        (pParams->EQNB_NBands > pInstance->InstParams.EQNB_NumBands))
-    {
+    if (/* N-Band Equaliser parameters */
+        ((pParams->EQNB_OperatingMode != LVM_EQNB_OFF) &&
+         (pParams->EQNB_OperatingMode != LVM_EQNB_ON)) ||
+        (pParams->EQNB_NBands > pInstance->InstParams.EQNB_NumBands)) {
         return (LVM_OUTOFRANGE);
     }
     /* Band parameters*/
     {
         LVM_INT16 i;
-        for(i = 0; i < pParams->EQNB_NBands; i++)
-        {
-            if(((pParams->pEQNB_BandDefinition[i].Frequency < LVM_EQNB_MIN_BAND_FREQ)  ||
-                (pParams->pEQNB_BandDefinition[i].Frequency > LVM_EQNB_MAX_BAND_FREQ)) ||
-                ((pParams->pEQNB_BandDefinition[i].Gain     < LVM_EQNB_MIN_BAND_GAIN)  ||
-                (pParams->pEQNB_BandDefinition[i].Gain      > LVM_EQNB_MAX_BAND_GAIN)) ||
-                ((pParams->pEQNB_BandDefinition[i].QFactor  < LVM_EQNB_MIN_QFACTOR)     ||
-                (pParams->pEQNB_BandDefinition[i].QFactor   > LVM_EQNB_MAX_QFACTOR)))
-            {
+        for (i = 0; i < pParams->EQNB_NBands; i++) {
+            if (((pParams->pEQNB_BandDefinition[i].Frequency < LVM_EQNB_MIN_BAND_FREQ) ||
+                 (pParams->pEQNB_BandDefinition[i].Frequency > LVM_EQNB_MAX_BAND_FREQ)) ||
+                ((pParams->pEQNB_BandDefinition[i].Gain < LVM_EQNB_MIN_BAND_GAIN) ||
+                 (pParams->pEQNB_BandDefinition[i].Gain > LVM_EQNB_MAX_BAND_GAIN)) ||
+                ((pParams->pEQNB_BandDefinition[i].QFactor < LVM_EQNB_MIN_QFACTOR) ||
+                 (pParams->pEQNB_BandDefinition[i].QFactor > LVM_EQNB_MAX_QFACTOR))) {
                 return (LVM_OUTOFRANGE);
             }
         }
@@ -166,24 +146,25 @@
     /*
      * Bass Enhancement parameters
      */
-    if(((pParams->BE_OperatingMode != LVM_BE_OFF) && (pParams->BE_OperatingMode != LVM_BE_ON))                      ||
-        ((pParams->BE_EffectLevel < LVM_BE_MIN_EFFECTLEVEL ) || (pParams->BE_EffectLevel > LVM_BE_MAX_EFFECTLEVEL ))||
-        ((pParams->BE_CentreFreq != LVM_BE_CENTRE_55Hz) && (pParams->BE_CentreFreq != LVM_BE_CENTRE_66Hz)           &&
-        (pParams->BE_CentreFreq != LVM_BE_CENTRE_78Hz) && (pParams->BE_CentreFreq != LVM_BE_CENTRE_90Hz))           ||
-        ((pParams->BE_HPF != LVM_BE_HPF_OFF) && (pParams->BE_HPF != LVM_BE_HPF_ON)))
-    {
+    if (((pParams->BE_OperatingMode != LVM_BE_OFF) && (pParams->BE_OperatingMode != LVM_BE_ON)) ||
+        ((pParams->BE_EffectLevel < LVM_BE_MIN_EFFECTLEVEL) ||
+         (pParams->BE_EffectLevel > LVM_BE_MAX_EFFECTLEVEL)) ||
+        ((pParams->BE_CentreFreq != LVM_BE_CENTRE_55Hz) &&
+         (pParams->BE_CentreFreq != LVM_BE_CENTRE_66Hz) &&
+         (pParams->BE_CentreFreq != LVM_BE_CENTRE_78Hz) &&
+         (pParams->BE_CentreFreq != LVM_BE_CENTRE_90Hz)) ||
+        ((pParams->BE_HPF != LVM_BE_HPF_OFF) && (pParams->BE_HPF != LVM_BE_HPF_ON))) {
         return (LVM_OUTOFRANGE);
     }
 
     /*
      * Volume Control parameters
      */
-    if((pParams->VC_EffectLevel < LVM_VC_MIN_EFFECTLEVEL ) || (pParams->VC_EffectLevel > LVM_VC_MAX_EFFECTLEVEL ))
-    {
+    if ((pParams->VC_EffectLevel < LVM_VC_MIN_EFFECTLEVEL) ||
+        (pParams->VC_EffectLevel > LVM_VC_MAX_EFFECTLEVEL)) {
         return (LVM_OUTOFRANGE);
     }
-    if((pParams->VC_Balance < LVM_VC_BALANCE_MIN ) || (pParams->VC_Balance > LVM_VC_BALANCE_MAX ))
-    {
+    if ((pParams->VC_Balance < LVM_VC_BALANCE_MIN) || (pParams->VC_Balance > LVM_VC_BALANCE_MAX)) {
         return (LVM_OUTOFRANGE);
     }
 
@@ -191,22 +172,21 @@
      * PSA parameters
      */
     if (((LVPSA_LevelDetectSpeed_en)pParams->PSA_PeakDecayRate > LVPSA_SPEED_HIGH) ||
-        (pParams->PSA_Enable > LVM_PSA_ON))
-    {
+        (pParams->PSA_Enable > LVM_PSA_ON)) {
         return (LVM_OUTOFRANGE);
     }
 
     /*
-    * Set the flag to indicate there are new parameters to use
-    *
-    * Protect the copy of the new parameters from interrupts to avoid possible problems
-    * with loss control parameters. This problem can occur if this control function is called more
-    * than once before a call to the process function. If the process function interrupts
-    * the copy to NewParams then one frame may have mixed parameters, some old and some new.
-    */
+     * Set the flag to indicate there are new parameters to use
+     *
+     * Protect the copy of the new parameters from interrupts to avoid possible problems
+     * with loss control parameters. This problem can occur if this control function is called more
+     * than once before a call to the process function. If the process function interrupts
+     * the copy to NewParams then one frame may have mixed parameters, some old and some new.
+     */
     pInstance->ControlPending = LVM_TRUE;
 
-    return(LVM_SUCCESS);
+    return (LVM_SUCCESS);
 }
 
 /****************************************************************************************/
@@ -230,16 +210,13 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVM_ReturnStatus_en LVM_GetControlParameters(LVM_Handle_t           hInstance,
-                                             LVM_ControlParams_t    *pParams)
-{
-    LVM_Instance_t    *pInstance =(LVM_Instance_t  *)hInstance;
+LVM_ReturnStatus_en LVM_GetControlParameters(LVM_Handle_t hInstance, LVM_ControlParams_t* pParams) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
 
     /*
      * Check pointer
      */
-    if ((pParams == LVM_NULL) || (hInstance == LVM_NULL))
-    {
+    if ((pParams == LVM_NULL) || (hInstance == LVM_NULL)) {
         return (LVM_NULLADDRESS);
     }
     *pParams = pInstance->NewParams;
@@ -248,17 +225,16 @@
      * Copy the filter definitions for the Equaliser
      */
     {
-        LVM_INT16           i;
+        LVM_INT16 i;
 
         if (pInstance->NewParams.EQNB_NBands != 0)
-        for (i=0; i<pInstance->NewParams.EQNB_NBands; i++)
-        {
-            pInstance->pEQNB_UserDefs[i] = pInstance->pEQNB_BandDefs[i];
-        }
+            for (i = 0; i < pInstance->NewParams.EQNB_NBands; i++) {
+                pInstance->pEQNB_UserDefs[i] = pInstance->pEQNB_BandDefs[i];
+            }
         pParams->pEQNB_BandDefinition = pInstance->pEQNB_UserDefs;
     }
 
-    return(LVM_SUCCESS);
+    return (LVM_SUCCESS);
 }
 
 /****************************************************************************************/
@@ -274,56 +250,42 @@
 /*  pParams                 Pointer to the parameters to use                            */
 /*                                                                                      */
 /****************************************************************************************/
-void LVM_SetTrebleBoost(LVM_Instance_t         *pInstance,
-                        LVM_ControlParams_t    *pParams)
-{
-    extern FO_FLOAT_LShx_Coefs_t  LVM_TrebleBoostCoefs[];
+void LVM_SetTrebleBoost(LVM_Instance_t* pInstance, LVM_ControlParams_t* pParams) {
+    extern FO_FLOAT_LShx_Coefs_t LVM_TrebleBoostCoefs[];
 
-    LVM_INT16               Offset;
-    LVM_INT16               EffectLevel = 0;
+    LVM_INT16 Offset;
+    LVM_INT16 EffectLevel = 0;
 
     /*
      * Load the coefficients
      */
-    if ((pParams->TE_OperatingMode == LVM_TE_ON) &&
-        (pParams->SampleRate >= TrebleBoostMinRate) &&
-        (pParams->OperatingMode == LVM_MODE_ON) &&
-        (pParams->TE_EffectLevel > 0))
-    {
-        if((pParams->TE_EffectLevel == LVM_TE_LOW_MIPS) &&
-            ((pParams->SpeakerType == LVM_HEADPHONES)||
-            (pParams->SpeakerType == LVM_EX_HEADPHONES)))
-        {
+    if ((pParams->TE_OperatingMode == LVM_TE_ON) && (pParams->SampleRate >= TrebleBoostMinRate) &&
+        (pParams->OperatingMode == LVM_MODE_ON) && (pParams->TE_EffectLevel > 0)) {
+        if ((pParams->TE_EffectLevel == LVM_TE_LOW_MIPS) &&
+            ((pParams->SpeakerType == LVM_HEADPHONES) ||
+             (pParams->SpeakerType == LVM_EX_HEADPHONES))) {
             pInstance->TE_Active = LVM_FALSE;
-        }
-        else
-        {
+        } else {
             EffectLevel = pParams->TE_EffectLevel;
             pInstance->TE_Active = LVM_TRUE;
         }
 
-        if(pInstance->TE_Active == LVM_TRUE)
-        {
+        if (pInstance->TE_Active == LVM_TRUE) {
             /*
              * Load the coefficients and enabled the treble boost
              */
-            Offset = (LVM_INT16)(EffectLevel - 1 + TrebleBoostSteps * (pParams->SampleRate - TrebleBoostMinRate));
-            FO_2I_D16F32Css_LShx_TRC_WRA_01_Init(&pInstance->pTE_State->TrebleBoost_State,
-                                            &pInstance->pTE_Taps->TrebleBoost_Taps,
-                                            &LVM_TrebleBoostCoefs[Offset]);
-
+            Offset = (LVM_INT16)(EffectLevel - 1 +
+                                 TrebleBoostSteps * (pParams->SampleRate - TrebleBoostMinRate));
             /*
-             * Clear the taps
+             * Create biquad instance
              */
-            LoadConst_Float((LVM_FLOAT)0,                                     /* Value */
-                            (LVM_FLOAT *)&pInstance->pTE_Taps->TrebleBoost_Taps,  /* Destination.\
-                                                     Cast to void: no dereferencing in function */
-                            (LVM_UINT16)(sizeof(pInstance->pTE_Taps->TrebleBoost_Taps) / \
-                                                        sizeof(LVM_FLOAT))); /* Number of words */
+            std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs> coefs = {
+                    LVM_TrebleBoostCoefs[Offset].A0, LVM_TrebleBoostCoefs[Offset].A1, 0.0,
+                    -(LVM_TrebleBoostCoefs[Offset].B1), 0.0};
+            pInstance->pTEBiquad.reset(
+                    new android::audio_utils::BiquadFilter<LVM_FLOAT>(pParams->NrChannels, coefs));
         }
-    }
-    else
-    {
+    } else {
         /*
          * Disable the treble boost
          */
@@ -345,94 +307,76 @@
 /*  pParams             Initialisation parameters                                   */
 /*                                                                                  */
 /************************************************************************************/
-void    LVM_SetVolume(LVM_Instance_t         *pInstance,
-                      LVM_ControlParams_t    *pParams)
-{
-
-    LVM_UINT16      dBShifts;                                   /* 6dB shifts */
-    LVM_UINT16      dBOffset;                                   /* Table offset */
-    LVM_INT16       Volume = 0;                                 /* Required volume in dBs */
-    LVM_FLOAT        Temp;
+void LVM_SetVolume(LVM_Instance_t* pInstance, LVM_ControlParams_t* pParams) {
+    LVM_UINT16 dBShifts;  /* 6dB shifts */
+    LVM_UINT16 dBOffset;  /* Table offset */
+    LVM_INT16 Volume = 0; /* Required volume in dBs */
+    LVM_FLOAT Temp;
 
     /*
      * Limit the gain to the maximum allowed
      */
-     if  (pParams->VC_EffectLevel > 0)
-     {
-         Volume = 0;
-     }
-     else
-     {
-         Volume = pParams->VC_EffectLevel;
-     }
+    if (pParams->VC_EffectLevel > 0) {
+        Volume = 0;
+    } else {
+        Volume = pParams->VC_EffectLevel;
+    }
 
-     /* Compensate this volume in PSA plot */
-     if(Volume > -60)  /* Limit volume loss to PSA Limits*/
-         pInstance->PSA_GainOffset=(LVM_INT16)(-Volume);/* Loss is compensated by Gain*/
-     else
-         pInstance->PSA_GainOffset=(LVM_INT16)60;/* Loss is compensated by Gain*/
+    /* Compensate this volume in PSA plot */
+    if (Volume > -60)                                     /* Limit volume loss to PSA Limits*/
+        pInstance->PSA_GainOffset = (LVM_INT16)(-Volume); /* Loss is compensated by Gain*/
+    else
+        pInstance->PSA_GainOffset = (LVM_INT16)60; /* Loss is compensated by Gain*/
 
     pInstance->VC_AVLFixedVolume = 0;
 
     /*
      * Set volume control and AVL volumes according to headroom and volume user setting
      */
-    if(pParams->OperatingMode == LVM_MODE_ON)
-    {
+    if (pParams->OperatingMode == LVM_MODE_ON) {
         /* Default Situation with no AVL and no RS */
-        if(pParams->EQNB_OperatingMode == LVM_EQNB_ON)
-        {
-            if(Volume > -pInstance->Headroom)
-                Volume = (LVM_INT16)-pInstance->Headroom;
+        if (pParams->EQNB_OperatingMode == LVM_EQNB_ON) {
+            if (Volume > -pInstance->Headroom) Volume = (LVM_INT16)-pInstance->Headroom;
         }
     }
 
     /*
      * Activate volume control if necessary
      */
-    pInstance->VC_Active   = LVM_TRUE;
-    if (Volume != 0)
-    {
+    pInstance->VC_Active = LVM_TRUE;
+    if (Volume != 0) {
         pInstance->VC_VolumedB = Volume;
-    }
-    else
-    {
+    } else {
         pInstance->VC_VolumedB = 0;
     }
 
     /*
      * Calculate the required gain and shifts
      */
-    dBOffset = (LVM_UINT16)((-Volume) % 6);             /* Get the dBs 0-5 */
-    dBShifts = (LVM_UINT16)(Volume / -6);               /* Get the 6dB shifts */
+    dBOffset = (LVM_UINT16)((-Volume) % 6); /* Get the dBs 0-5 */
+    dBShifts = (LVM_UINT16)(Volume / -6);   /* Get the 6dB shifts */
 
     /*
      * Set the parameters
      */
-    if(dBShifts == 0)
-    {
+    if (dBShifts == 0) {
         LVC_Mixer_SetTarget(&pInstance->VC_Volume.MixerStream[0],
-                                (LVM_FLOAT)LVM_VolumeTable[dBOffset]);
-        }
-    else
-    {
+                            (LVM_FLOAT)LVM_VolumeTable[dBOffset]);
+    } else {
         Temp = LVM_VolumeTable[dBOffset];
-        while(dBShifts) {
+        while (dBShifts) {
             Temp = Temp / 2.0f;
             dBShifts--;
         }
         LVC_Mixer_SetTarget(&pInstance->VC_Volume.MixerStream[0], Temp);
     }
     pInstance->VC_Volume.MixerStream[0].CallbackSet = 1;
-    if(pInstance->NoSmoothVolume == LVM_TRUE)
-    {
+    if (pInstance->NoSmoothVolume == LVM_TRUE) {
         LVC_Mixer_SetTimeConstant(&pInstance->VC_Volume.MixerStream[0], 0,
                                   pInstance->Params.SampleRate, 2);
-    }
-    else
-    {
-        LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_Volume.MixerStream[0],
-                                           LVM_VC_MIXER_TIME, pInstance->Params.SampleRate, 2);
+    } else {
+        LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_Volume.MixerStream[0], LVM_VC_MIXER_TIME,
+                                           pInstance->Params.SampleRate, 2);
     }
 }
 
@@ -453,43 +397,39 @@
 /* NOTES:                                                                           */
 /*                                                                                  */
 /************************************************************************************/
-void    LVM_SetHeadroom(LVM_Instance_t         *pInstance,
-                        LVM_ControlParams_t    *pParams)
-{
-    LVM_INT16   ii, jj;
-    LVM_INT16   Headroom = 0;
-    LVM_INT16   MaxGain = 0;
+void LVM_SetHeadroom(LVM_Instance_t* pInstance, LVM_ControlParams_t* pParams) {
+    LVM_INT16 ii, jj;
+    LVM_INT16 Headroom = 0;
+    LVM_INT16 MaxGain = 0;
 
-    if (((LVEQNB_Mode_en)pParams->EQNB_OperatingMode == LVEQNB_ON)
-           && (pInstance->HeadroomParams.Headroom_OperatingMode == LVM_HEADROOM_ON))
-    {
+    if (((LVEQNB_Mode_en)pParams->EQNB_OperatingMode == LVEQNB_ON) &&
+        (pInstance->HeadroomParams.Headroom_OperatingMode == LVM_HEADROOM_ON)) {
         /* Find typical headroom value */
-        for(jj = 0; jj < pInstance->HeadroomParams.NHeadroomBands; jj++)
-        {
+        for (jj = 0; jj < pInstance->HeadroomParams.NHeadroomBands; jj++) {
             MaxGain = 0;
-            for( ii = 0; ii < pParams->EQNB_NBands; ii++)
-            {
-                if((pParams->pEQNB_BandDefinition[ii].Frequency >= pInstance->HeadroomParams.pHeadroomDefinition[jj].Limit_Low) &&
-                   (pParams->pEQNB_BandDefinition[ii].Frequency <= pInstance->HeadroomParams.pHeadroomDefinition[jj].Limit_High))
-                {
-                    if(pParams->pEQNB_BandDefinition[ii].Gain > MaxGain)
-                    {
+            for (ii = 0; ii < pParams->EQNB_NBands; ii++) {
+                if ((pParams->pEQNB_BandDefinition[ii].Frequency >=
+                     pInstance->HeadroomParams.pHeadroomDefinition[jj].Limit_Low) &&
+                    (pParams->pEQNB_BandDefinition[ii].Frequency <=
+                     pInstance->HeadroomParams.pHeadroomDefinition[jj].Limit_High)) {
+                    if (pParams->pEQNB_BandDefinition[ii].Gain > MaxGain) {
                         MaxGain = pParams->pEQNB_BandDefinition[ii].Gain;
                     }
                 }
             }
 
-            if((MaxGain - pInstance->HeadroomParams.pHeadroomDefinition[jj].Headroom_Offset) > Headroom){
-                Headroom = (LVM_INT16)(MaxGain - pInstance->HeadroomParams.pHeadroomDefinition[jj].Headroom_Offset);
+            if ((MaxGain - pInstance->HeadroomParams.pHeadroomDefinition[jj].Headroom_Offset) >
+                Headroom) {
+                Headroom = (LVM_INT16)(
+                        MaxGain -
+                        pInstance->HeadroomParams.pHeadroomDefinition[jj].Headroom_Offset);
             }
         }
 
         /* Saturate */
-        if(Headroom < 0)
-            Headroom = 0;
+        if (Headroom < 0) Headroom = 0;
     }
-    pInstance->Headroom = (LVM_UINT16)Headroom ;
-
+    pInstance->Headroom = (LVM_UINT16)Headroom;
 }
 
 /****************************************************************************************/
@@ -510,32 +450,26 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVM_ReturnStatus_en LVM_ApplyNewSettings(LVM_Handle_t   hInstance)
-{
-    LVM_Instance_t         *pInstance =(LVM_Instance_t *)hInstance;
-    LVM_ControlParams_t    LocalParams;
-    LVM_INT16              Count = 5;
+LVM_ReturnStatus_en LVM_ApplyNewSettings(LVM_Handle_t hInstance) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+    LVM_ControlParams_t LocalParams;
+    LVM_INT16 Count = 5;
 
     /*
      * Copy the new parameters but make sure they didn't change while copying
      */
-    do
-    {
+    do {
         pInstance->ControlPending = LVM_FALSE;
         LocalParams = pInstance->NewParams;
         pInstance->HeadroomParams = pInstance->NewHeadroomParams;
         Count--;
-    } while ((pInstance->ControlPending != LVM_FALSE) &&
-             (Count > 0));
+    } while ((pInstance->ControlPending != LVM_FALSE) && (Count > 0));
 
-#ifdef SUPPORT_MC
     pInstance->NrChannels = LocalParams.NrChannels;
     pInstance->ChMask = LocalParams.ChMask;
-#endif
 
     /* Clear all internal data if format change*/
-    if(LocalParams.SourceFormat != pInstance->Params.SourceFormat)
-    {
+    if (LocalParams.SourceFormat != pInstance->Params.SourceFormat) {
         LVM_ClearAudioBuffers(pInstance);
         pInstance->ControlPending = LVM_FALSE;
     }
@@ -547,31 +481,27 @@
         (pInstance->Params.TE_EffectLevel != LocalParams.TE_EffectLevel) ||
         (pInstance->Params.TE_OperatingMode != LocalParams.TE_OperatingMode) ||
         (pInstance->Params.OperatingMode != LocalParams.OperatingMode) ||
-        (pInstance->Params.SpeakerType != LocalParams.SpeakerType))
-    {
-        LVM_SetTrebleBoost(pInstance,
-                           &LocalParams);
+        (pInstance->Params.SpeakerType != LocalParams.SpeakerType)) {
+        LVM_SetTrebleBoost(pInstance, &LocalParams);
     }
 
     /*
      * Update the headroom if required
      */
-        LVM_SetHeadroom(pInstance,                      /* Instance pointer */
-                        &LocalParams);                  /* New parameters */
+    LVM_SetHeadroom(pInstance,     /* Instance pointer */
+                    &LocalParams); /* New parameters */
 
     /*
      * Update the volume if required
      */
     {
-        LVM_SetVolume(pInstance,                      /* Instance pointer */
-                      &LocalParams);                  /* New parameters */
+        LVM_SetVolume(pInstance,     /* Instance pointer */
+                      &LocalParams); /* New parameters */
     }
     /* Apply balance changes*/
-    if(pInstance->Params.VC_Balance != LocalParams.VC_Balance)
-    {
+    if (pInstance->Params.VC_Balance != LocalParams.VC_Balance) {
         /* Configure Mixer module for gradual changes to volume*/
-        if(LocalParams.VC_Balance < 0)
-        {
+        if (LocalParams.VC_Balance < 0) {
             LVM_FLOAT Target_Float;
             /* Drop in right channel volume*/
             Target_Float = LVM_MAXFLOAT;
@@ -583,9 +513,7 @@
             LVC_Mixer_SetTarget(&pInstance->VC_BalanceMix.MixerStream[1], Target_Float);
             LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[1],
                                                LVM_VC_MIXER_TIME, LocalParams.SampleRate, 1);
-        }
-        else if(LocalParams.VC_Balance >0)
-        {
+        } else if (LocalParams.VC_Balance > 0) {
             LVM_FLOAT Target_Float;
             /* Drop in left channel volume*/
             Target_Float = dB_to_LinFloat((LVM_INT16)((-LocalParams.VC_Balance) << 4));
@@ -597,63 +525,54 @@
             LVC_Mixer_SetTarget(&pInstance->VC_BalanceMix.MixerStream[1], Target_Float);
             LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[1],
                                                LVM_VC_MIXER_TIME, LocalParams.SampleRate, 1);
-        }
-        else
-        {
+        } else {
             LVM_FLOAT Target_Float;
             /* No drop*/
             Target_Float = LVM_MAXFLOAT;
-            LVC_Mixer_SetTarget(&pInstance->VC_BalanceMix.MixerStream[0],Target_Float);
+            LVC_Mixer_SetTarget(&pInstance->VC_BalanceMix.MixerStream[0], Target_Float);
             LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[0],
-                                               LVM_VC_MIXER_TIME,LocalParams.SampleRate, 1);
+                                               LVM_VC_MIXER_TIME, LocalParams.SampleRate, 1);
 
-            LVC_Mixer_SetTarget(&pInstance->VC_BalanceMix.MixerStream[1],Target_Float);
+            LVC_Mixer_SetTarget(&pInstance->VC_BalanceMix.MixerStream[1], Target_Float);
             LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[1],
-                                               LVM_VC_MIXER_TIME,LocalParams.SampleRate, 1);
+                                               LVM_VC_MIXER_TIME, LocalParams.SampleRate, 1);
         }
     }
     /*
      * Update the bass enhancement
      */
     {
-        LVDBE_ReturnStatus_en       DBE_Status;
-        LVDBE_Params_t              DBE_Params;
-        LVDBE_Handle_t              *hDBEInstance = (LVDBE_Handle_t *)pInstance->hDBEInstance;
+        LVDBE_ReturnStatus_en DBE_Status;
+        LVDBE_Params_t DBE_Params;
+        LVDBE_Handle_t* hDBEInstance = (LVDBE_Handle_t*)pInstance->hDBEInstance;
 
         /*
          * Set the new parameters
          */
-        if(LocalParams.OperatingMode == LVM_MODE_OFF)
-        {
+        if (LocalParams.OperatingMode == LVM_MODE_OFF) {
             DBE_Params.OperatingMode = LVDBE_OFF;
+        } else {
+            DBE_Params.OperatingMode = (LVDBE_Mode_en)LocalParams.BE_OperatingMode;
         }
-        else
-        {
-            DBE_Params.OperatingMode    = (LVDBE_Mode_en)LocalParams.BE_OperatingMode;
-        }
-        DBE_Params.SampleRate       = (LVDBE_Fs_en)LocalParams.SampleRate;
-        DBE_Params.EffectLevel      = LocalParams.BE_EffectLevel;
-        DBE_Params.CentreFrequency  = (LVDBE_CentreFreq_en)LocalParams.BE_CentreFreq;
-        DBE_Params.HPFSelect        = (LVDBE_FilterSelect_en)LocalParams.BE_HPF;
-        DBE_Params.HeadroomdB       = 0;
-        DBE_Params.VolumeControl    = LVDBE_VOLUME_OFF;
-        DBE_Params.VolumedB         = 0;
-#ifdef SUPPORT_MC
-        DBE_Params.NrChannels         = LocalParams.NrChannels;
-#endif
+        DBE_Params.SampleRate = (LVDBE_Fs_en)LocalParams.SampleRate;
+        DBE_Params.EffectLevel = LocalParams.BE_EffectLevel;
+        DBE_Params.CentreFrequency = (LVDBE_CentreFreq_en)LocalParams.BE_CentreFreq;
+        DBE_Params.HPFSelect = (LVDBE_FilterSelect_en)LocalParams.BE_HPF;
+        DBE_Params.HeadroomdB = 0;
+        DBE_Params.VolumeControl = LVDBE_VOLUME_OFF;
+        DBE_Params.VolumedB = 0;
+        DBE_Params.NrChannels = LocalParams.NrChannels;
 
         /*
          * Make the changes
          */
-        DBE_Status = LVDBE_Control(hDBEInstance,
-                                   &DBE_Params);
+        DBE_Status = LVDBE_Control(hDBEInstance, &DBE_Params);
 
         /*
          * Quit if the changes were not accepted
          */
-        if (DBE_Status != LVDBE_SUCCESS)
-        {
-            return((LVM_ReturnStatus_en)DBE_Status);
+        if (DBE_Status != LVDBE_SUCCESS) {
+            return ((LVM_ReturnStatus_en)DBE_Status);
         }
 
         /*
@@ -666,168 +585,132 @@
      * Update the N-Band Equaliser
      */
     {
-        LVEQNB_ReturnStatus_en      EQNB_Status;
-        LVEQNB_Params_t             EQNB_Params;
-        LVEQNB_Handle_t             *hEQNBInstance = (LVEQNB_Handle_t *)pInstance->hEQNBInstance;
+        LVEQNB_ReturnStatus_en EQNB_Status;
+        LVEQNB_Params_t EQNB_Params;
+        LVEQNB_Handle_t* hEQNBInstance = (LVEQNB_Handle_t*)pInstance->hEQNBInstance;
 
         /*
          * Set the new parameters
          */
 
-        if(LocalParams.OperatingMode == LVM_MODE_OFF)
-        {
-            EQNB_Params.OperatingMode    = LVEQNB_BYPASS;
-        }
-        else
-        {
-            EQNB_Params.OperatingMode    = (LVEQNB_Mode_en)LocalParams.EQNB_OperatingMode;
+        if (LocalParams.OperatingMode == LVM_MODE_OFF) {
+            EQNB_Params.OperatingMode = LVEQNB_BYPASS;
+        } else {
+            EQNB_Params.OperatingMode = (LVEQNB_Mode_en)LocalParams.EQNB_OperatingMode;
         }
 
-        EQNB_Params.SampleRate       = (LVEQNB_Fs_en)LocalParams.SampleRate;
-        EQNB_Params.NBands           = LocalParams.EQNB_NBands;
-        EQNB_Params.pBandDefinition  = (LVEQNB_BandDef_t *)LocalParams.pEQNB_BandDefinition;
-        if (LocalParams.SourceFormat == LVM_STEREO)    /* Mono format not supported */
+        EQNB_Params.SampleRate = (LVEQNB_Fs_en)LocalParams.SampleRate;
+        EQNB_Params.NBands = LocalParams.EQNB_NBands;
+        EQNB_Params.pBandDefinition = (LVEQNB_BandDef_t*)LocalParams.pEQNB_BandDefinition;
+        if (LocalParams.SourceFormat == LVM_STEREO) /* Mono format not supported */
         {
             EQNB_Params.SourceFormat = LVEQNB_STEREO;
         }
-#ifdef SUPPORT_MC
         /* Note: Currently SourceFormat field of EQNB is not been
          *       used by the module.
          */
-        else if (LocalParams.SourceFormat == LVM_MULTICHANNEL)
-        {
+        else if (LocalParams.SourceFormat == LVM_MULTICHANNEL) {
             EQNB_Params.SourceFormat = LVEQNB_MULTICHANNEL;
+        } else {
+            EQNB_Params.SourceFormat = LVEQNB_MONOINSTEREO; /* Force to Mono-in-Stereo mode */
         }
-#endif
-        else
-        {
-            EQNB_Params.SourceFormat = LVEQNB_MONOINSTEREO;     /* Force to Mono-in-Stereo mode */
-        }
-#ifdef SUPPORT_MC
-        EQNB_Params.NrChannels         = LocalParams.NrChannels;
-#endif
+        EQNB_Params.NrChannels = LocalParams.NrChannels;
 
         /*
          * Set the control flag
          */
         if ((LocalParams.OperatingMode == LVM_MODE_ON) &&
-            (LocalParams.EQNB_OperatingMode == LVM_EQNB_ON))
-        {
+            (LocalParams.EQNB_OperatingMode == LVM_EQNB_ON)) {
             pInstance->EQNB_Active = LVM_TRUE;
-        }
-        else
-        {
+        } else {
             EQNB_Params.OperatingMode = LVEQNB_BYPASS;
         }
 
         /*
          * Make the changes
          */
-        EQNB_Status = LVEQNB_Control(hEQNBInstance,
-                                     &EQNB_Params);
+        EQNB_Status = LVEQNB_Control(hEQNBInstance, &EQNB_Params);
 
         /*
          * Quit if the changes were not accepted
          */
-        if (EQNB_Status != LVEQNB_SUCCESS)
-        {
-            return((LVM_ReturnStatus_en)EQNB_Status);
+        if (EQNB_Status != LVEQNB_SUCCESS) {
+            return ((LVM_ReturnStatus_en)EQNB_Status);
         }
-
     }
 
     /*
      * Update concert sound
      */
     {
-        LVCS_ReturnStatus_en        CS_Status;
-        LVCS_Params_t               CS_Params;
-        LVCS_Handle_t               *hCSInstance = (LVCS_Handle_t *)pInstance->hCSInstance;
-        LVM_Mode_en                 CompressorMode=LVM_MODE_ON;
+        LVCS_ReturnStatus_en CS_Status;
+        LVCS_Params_t CS_Params;
+        LVCS_Handle_t* hCSInstance = (LVCS_Handle_t*)pInstance->hCSInstance;
+        LVM_Mode_en CompressorMode = LVM_MODE_ON;
 
         /*
          * Set the new parameters
          */
-        if(LocalParams.VirtualizerOperatingMode == LVM_MODE_ON)
-        {
-            CS_Params.OperatingMode    = LVCS_ON;
-        }
-        else
-        {
-            CS_Params.OperatingMode    = LVCS_OFF;
+        if (LocalParams.VirtualizerOperatingMode == LVM_MODE_ON) {
+            CS_Params.OperatingMode = LVCS_ON;
+        } else {
+            CS_Params.OperatingMode = LVCS_OFF;
         }
 
-        if((LocalParams.TE_OperatingMode == LVM_TE_ON) && (LocalParams.TE_EffectLevel == LVM_TE_LOW_MIPS))
-        {
-            CS_Params.SpeakerType  = LVCS_EX_HEADPHONES;
-        }
-        else
-        {
-            CS_Params.SpeakerType  = LVCS_HEADPHONES;
+        if ((LocalParams.TE_OperatingMode == LVM_TE_ON) &&
+            (LocalParams.TE_EffectLevel == LVM_TE_LOW_MIPS)) {
+            CS_Params.SpeakerType = LVCS_EX_HEADPHONES;
+        } else {
+            CS_Params.SpeakerType = LVCS_HEADPHONES;
         }
 
-#ifdef SUPPORT_MC
         /* Concert sound module processes only the left and right channels
          * data. So the Source Format is set to LVCS_STEREO for multichannel
          * input also.
          */
         if (LocalParams.SourceFormat == LVM_STEREO ||
-            LocalParams.SourceFormat == LVM_MULTICHANNEL)
-#else
-        if (LocalParams.SourceFormat == LVM_STEREO)    /* Mono format not supported */
-#endif
-        {
+            LocalParams.SourceFormat == LVM_MULTICHANNEL) {
             CS_Params.SourceFormat = LVCS_STEREO;
+        } else {
+            CS_Params.SourceFormat = LVCS_MONOINSTEREO; /* Force to Mono-in-Stereo mode */
         }
-        else
-        {
-            CS_Params.SourceFormat = LVCS_MONOINSTEREO;          /* Force to Mono-in-Stereo mode */
-        }
-        CS_Params.SampleRate  = LocalParams.SampleRate;
+        CS_Params.SampleRate = LocalParams.SampleRate;
         CS_Params.ReverbLevel = LocalParams.VirtualizerReverbLevel;
         CS_Params.EffectLevel = LocalParams.CS_EffectLevel;
-#ifdef SUPPORT_MC
-        CS_Params.NrChannels  = LocalParams.NrChannels;
-#endif
+        CS_Params.NrChannels = LocalParams.NrChannels;
 
         /*
          * Set the control flag
          */
         if (((LVM_Mode_en)LocalParams.OperatingMode == LVM_MODE_ON) &&
-            ((LVCS_Modes_en)LocalParams.VirtualizerOperatingMode != LVCS_OFF))
-        {
+            ((LVCS_Modes_en)LocalParams.VirtualizerOperatingMode != LVCS_OFF)) {
             pInstance->CS_Active = LVM_TRUE;
-        }
-        else
-        {
+        } else {
             CS_Params.OperatingMode = LVCS_OFF;
         }
 
-        CS_Params.CompressorMode=CompressorMode;
+        CS_Params.CompressorMode = CompressorMode;
 
         /*
          * Make the changes
          */
-        CS_Status = LVCS_Control(hCSInstance,
-                                 &CS_Params);
+        CS_Status = LVCS_Control(hCSInstance, &CS_Params);
 
         /*
          * Quit if the changes were not accepted
          */
-        if (CS_Status != LVCS_SUCCESS)
-        {
-            return((LVM_ReturnStatus_en)CS_Status);
+        if (CS_Status != LVCS_SUCCESS) {
+            return ((LVM_ReturnStatus_en)CS_Status);
         }
-
     }
 
     /*
      * Update the Power Spectrum Analyser
      */
     {
-        LVPSA_RETURN                PSA_Status;
-        LVPSA_ControlParams_t       PSA_Params;
-        pLVPSA_Handle_t             *hPSAInstance = (pLVPSA_Handle_t *)pInstance->hPSAInstance;
+        LVPSA_RETURN PSA_Status;
+        LVPSA_ControlParams_t PSA_Params;
+        pLVPSA_Handle_t* hPSAInstance = (pLVPSA_Handle_t*)pInstance->hPSAInstance;
 
         /*
          * Set the new parameters
@@ -838,23 +721,19 @@
         /*
          * Make the changes
          */
-        if(pInstance->InstParams.PSA_Included==LVM_PSA_ON)
-        {
-            PSA_Status = LVPSA_Control(hPSAInstance,
-                &PSA_Params);
+        if (pInstance->InstParams.PSA_Included == LVM_PSA_ON) {
+            PSA_Status = LVPSA_Control(hPSAInstance, &PSA_Params);
 
-            if (PSA_Status != LVPSA_OK)
-            {
-                return((LVM_ReturnStatus_en)PSA_Status);
+            if (PSA_Status != LVPSA_OK) {
+                return ((LVM_ReturnStatus_en)PSA_Status);
             }
 
             /*
              * Apply new settings
              */
-            PSA_Status = LVPSA_ApplyNewSettings ((LVPSA_InstancePr_t*)hPSAInstance);
-            if(PSA_Status != LVPSA_OK)
-            {
-                return((LVM_ReturnStatus_en)PSA_Status);
+            PSA_Status = LVPSA_ApplyNewSettings((LVPSA_InstancePr_t*)hPSAInstance);
+            if (PSA_Status != LVPSA_OK) {
+                return ((LVM_ReturnStatus_en)PSA_Status);
             }
         }
     }
@@ -863,9 +742,9 @@
      * Update the parameters and clear the flag
      */
     pInstance->NoSmoothVolume = LVM_FALSE;
-    pInstance->Params =  LocalParams;
+    pInstance->Params = LocalParams;
 
-    return(LVM_SUCCESS);
+    return (LVM_SUCCESS);
 }
 
 /****************************************************************************************/
@@ -887,36 +766,30 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVM_ReturnStatus_en LVM_SetHeadroomParams(LVM_Handle_t              hInstance,
-                                          LVM_HeadroomParams_t      *pHeadroomParams)
-{
-    LVM_Instance_t      *pInstance =(LVM_Instance_t  *)hInstance;
-    LVM_UINT16          ii, NBands;
+LVM_ReturnStatus_en LVM_SetHeadroomParams(LVM_Handle_t hInstance,
+                                          LVM_HeadroomParams_t* pHeadroomParams) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+    LVM_UINT16 ii, NBands;
 
     /* Check for NULL pointers */
-    if ((hInstance == LVM_NULL) || (pHeadroomParams == LVM_NULL))
-    {
+    if ((hInstance == LVM_NULL) || (pHeadroomParams == LVM_NULL)) {
         return (LVM_NULLADDRESS);
     }
-    if ((pHeadroomParams->NHeadroomBands != 0) && (pHeadroomParams->pHeadroomDefinition == LVM_NULL))
-    {
+    if ((pHeadroomParams->NHeadroomBands != 0) &&
+        (pHeadroomParams->pHeadroomDefinition == LVM_NULL)) {
         return (LVM_NULLADDRESS);
     }
 
     /* Consider only the LVM_HEADROOM_MAX_NBANDS first bands*/
-    if (pHeadroomParams->NHeadroomBands > LVM_HEADROOM_MAX_NBANDS)
-    {
+    if (pHeadroomParams->NHeadroomBands > LVM_HEADROOM_MAX_NBANDS) {
         NBands = LVM_HEADROOM_MAX_NBANDS;
-    }
-    else
-    {
+    } else {
         NBands = pHeadroomParams->NHeadroomBands;
     }
     pInstance->NewHeadroomParams.NHeadroomBands = NBands;
 
     /* Copy settings in memory */
-    for(ii = 0; ii < NBands; ii++)
-    {
+    for (ii = 0; ii < NBands; ii++) {
         pInstance->pHeadroom_BandDefs[ii] = pHeadroomParams->pHeadroomDefinition[ii];
     }
 
@@ -924,7 +797,7 @@
     pInstance->NewHeadroomParams.Headroom_OperatingMode = pHeadroomParams->Headroom_OperatingMode;
     pInstance->ControlPending = LVM_TRUE;
 
-    return(LVM_SUCCESS);
+    return (LVM_SUCCESS);
 }
 
 /****************************************************************************************/
@@ -947,29 +820,26 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVM_ReturnStatus_en LVM_GetHeadroomParams(LVM_Handle_t          hInstance,
-                                          LVM_HeadroomParams_t  *pHeadroomParams)
-{
-    LVM_Instance_t      *pInstance =(LVM_Instance_t  *)hInstance;
-    LVM_UINT16          ii;
+LVM_ReturnStatus_en LVM_GetHeadroomParams(LVM_Handle_t hInstance,
+                                          LVM_HeadroomParams_t* pHeadroomParams) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+    LVM_UINT16 ii;
 
     /* Check for NULL pointers */
-    if ((hInstance == LVM_NULL) || (pHeadroomParams == LVM_NULL))
-    {
+    if ((hInstance == LVM_NULL) || (pHeadroomParams == LVM_NULL)) {
         return (LVM_NULLADDRESS);
     }
 
     pHeadroomParams->NHeadroomBands = pInstance->NewHeadroomParams.NHeadroomBands;
 
     /* Copy settings in memory */
-    for(ii = 0; ii < pInstance->NewHeadroomParams.NHeadroomBands; ii++)
-    {
+    for (ii = 0; ii < pInstance->NewHeadroomParams.NHeadroomBands; ii++) {
         pInstance->pHeadroom_UserDefs[ii] = pInstance->pHeadroom_BandDefs[ii];
     }
 
     pHeadroomParams->pHeadroomDefinition = pInstance->pHeadroom_UserDefs;
     pHeadroomParams->Headroom_OperatingMode = pInstance->NewHeadroomParams.Headroom_OperatingMode;
-    return(LVM_SUCCESS);
+    return (LVM_SUCCESS);
 }
 
 /****************************************************************************************/
@@ -988,18 +858,14 @@
 /*  1.  This function may be interrupted by the LVM_Process function                    */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_INT32 LVM_AlgoCallBack( void          *pBundleHandle,
-                            void          *pData,
-                            LVM_INT16     callbackId)
-{
-    LVM_Instance_t      *pInstance =(LVM_Instance_t  *)pBundleHandle;
+LVM_INT32 LVM_AlgoCallBack(void* pBundleHandle, void* pData, LVM_INT16 callbackId) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)pBundleHandle;
 
-    (void) pData;
+    (void)pData;
 
-    switch(callbackId & 0xFF00){
+    switch (callbackId & 0xFF00) {
         case ALGORITHM_CS_ID:
-            switch(callbackId & 0x00FF)
-            {
+            switch (callbackId & 0x00FF) {
                 case LVCS_EVENT_ALGOFF:
                     pInstance->CS_Active = LVM_FALSE;
                     break;
@@ -1008,8 +874,7 @@
             }
             break;
         case ALGORITHM_EQNB_ID:
-            switch(callbackId & 0x00FF)
-            {
+            switch (callbackId & 0x00FF) {
                 case LVEQNB_EVENT_ALGOFF:
                     pInstance->EQNB_Active = LVM_FALSE;
                     break;
@@ -1040,21 +905,17 @@
 /*  1.  This function may be interrupted by the LVM_Process function                    */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_INT32    LVM_VCCallBack(void*   pBundleHandle,
-                            void*   pGeneralPurpose,
-                            short   CallBackParam)
-{
-    LVM_Instance_t *pInstance =(LVM_Instance_t  *)pBundleHandle;
-    LVM_FLOAT    Target;
+LVM_INT32 LVM_VCCallBack(void* pBundleHandle, void* pGeneralPurpose, short CallBackParam) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)pBundleHandle;
+    LVM_FLOAT Target;
 
-    (void) pGeneralPurpose;
-    (void) CallBackParam;
+    (void)pGeneralPurpose;
+    (void)CallBackParam;
 
     /* When volume mixer has reached 0 dB target then stop it to avoid
        unnecessary processing. */
     Target = LVC_Mixer_GetTarget(&pInstance->VC_Volume.MixerStream[0]);
-    if(Target == 1.0f)
-    {
+    if (Target == 1.0f) {
         pInstance->VC_Active = LVM_FALSE;
     }
     return 1;
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
index 5620529..12b86f3 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
@@ -20,578 +20,95 @@
 /*  Includes                                                                        */
 /*                                                                                  */
 /************************************************************************************/
+#include <stdlib.h>
 
 #include "LVM_Private.h"
 #include "LVM_Tables.h"
 #include "VectorArithmetic.h"
-#include "InstAlloc.h"
-
-/****************************************************************************************/
-/*                                                                                      */
-/* FUNCTION:                LVM_GetMemoryTable                                          */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used for memory allocation and free. It can be called in           */
-/*  two ways:                                                                           */
-/*                                                                                      */
-/*      hInstance = NULL                Returns the memory requirements                 */
-/*      hInstance = Instance handle     Returns the memory requirements and             */
-/*                                      allocated base addresses for the instance       */
-/*                                                                                      */
-/*  When this function is called for memory allocation (hInstance=NULL) the memory      */
-/*  base address pointers are NULL on return.                                           */
-/*                                                                                      */
-/*  When the function is called for free (hInstance = Instance Handle) the memory       */
-/*  table returns the allocated memory and base addresses used during initialisation.   */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory definition table                 */
-/*  pCapabilities           Pointer to the default capabilities                         */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVM_SUCCESS             Succeeded                                                   */
-/*  LVM_NULLADDRESS         When one of pMemoryTable or pInstParams is NULL             */
-/*  LVM_OUTOFRANGE          When any of the Instance parameters are out of range        */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  This function may be interrupted by the LVM_Process function                    */
-/*  2.  The scratch memory is the largest required by any of the sub-modules plus any   */
-/*      additional scratch requirements of the bundle                                   */
-/*                                                                                      */
-/****************************************************************************************/
-
-/*
- * 4 Types of Memory Regions of LVM
- * TODO: Allocate on the fly.
- * i)   LVM_MEMREGION_PERSISTENT_SLOW_DATA - For Instance Handles
- * ii)  LVM_MEMREGION_PERSISTENT_FAST_DATA - Persistent Buffers
- * iii) LVM_MEMREGION_PERSISTENT_FAST_COEF - For Holding Structure values
- * iv)  LVM_MEMREGION_TEMPORARY_FAST       - For Holding Structure values
- *
- * LVM_MEMREGION_PERSISTENT_SLOW_DATA:
- *   Total Memory size:
- *     sizeof(LVM_Instance_t) + \
- *     sizeof(LVM_Buffer_t) + \
- *     sizeof(LVPSA_InstancePr_t) + \
- *     sizeof(LVM_Buffer_t) - needed if buffer mode is LVM_MANAGED_BUFFER
- *
- * LVM_MEMREGION_PERSISTENT_FAST_DATA:
- *   Total Memory size:
- *     sizeof(LVM_TE_Data_t) + \
- *     2 * pInstParams->EQNB_NumBands * sizeof(LVM_EQNB_BandDef_t) + \
- *     sizeof(LVCS_Data_t) + \
- *     sizeof(LVDBE_Data_FLOAT_t) + \
- *     sizeof(Biquad_2I_Order2_FLOAT_Taps_t) + \
- *     sizeof(Biquad_2I_Order2_FLOAT_Taps_t) + \
- *     pInstParams->EQNB_NumBands * sizeof(Biquad_2I_Order2_FLOAT_Taps_t) + \
- *     pInstParams->EQNB_NumBands * sizeof(LVEQNB_BandDef_t) + \
- *     pInstParams->EQNB_NumBands * sizeof(LVEQNB_BiquadType_en) + \
- *     2 * LVM_HEADROOM_MAX_NBANDS * sizeof(LVM_HeadroomBandDef_t) + \
- *     PSA_InitParams.nBands * sizeof(Biquad_1I_Order2_Taps_t) + \
- *     PSA_InitParams.nBands * sizeof(QPD_Taps_t)
- *
- * LVM_MEMREGION_PERSISTENT_FAST_COEF:
- *   Total Memory size:
- *     sizeof(LVM_TE_Coefs_t) + \
- *     sizeof(LVCS_Coefficient_t) + \
- *     sizeof(LVDBE_Coef_FLOAT_t) + \
- *     sizeof(Biquad_FLOAT_Instance_t) + \
- *     sizeof(Biquad_FLOAT_Instance_t) + \
- *     pInstParams->EQNB_NumBands * sizeof(Biquad_FLOAT_Instance_t) + \
- *     PSA_InitParams.nBands * sizeof(Biquad_Instance_t) + \
- *     PSA_InitParams.nBands * sizeof(QPD_State_t)
- *
- * LVM_MEMREGION_TEMPORARY_FAST (Scratch):
- *   Total Memory Size:
- *     BundleScratchSize + \
- *     MAX_INTERNAL_BLOCKSIZE * sizeof(LVM_FLOAT) + \
- *     MaxScratchOf (CS, EQNB, DBE, PSA)
- *
- *     a)BundleScratchSize:
- *         3 * LVM_MAX_CHANNELS \
- *         * (MIN_INTERNAL_BLOCKSIZE + InternalBlockSize) * sizeof(LVM_FLOAT)
- *       This Memory is allocated only when Buffer mode is LVM_MANAGED_BUFFER.
- *     b)MaxScratchOf (CS, EQNB, DBE, PSA)
- *       This Memory is needed for scratch usage for CS, EQNB, DBE, PSA.
- *       CS   = (LVCS_SCRATCHBUFFERS * sizeof(LVM_FLOAT)
- *               * pCapabilities->MaxBlockSize)
- *       EQNB = (LVEQNB_SCRATCHBUFFERS * sizeof(LVM_FLOAT)
- *               * pCapabilities->MaxBlockSize)
- *       DBE  = (LVDBE_SCRATCHBUFFERS_INPLACE*sizeof(LVM_FLOAT)
- *               * pCapabilities->MaxBlockSize)
- *       PSA  = (2 * pInitParams->MaxInputBlockSize * sizeof(LVM_FLOAT))
- *              one MaxInputBlockSize for input and another for filter output
- *     c)MAX_INTERNAL_BLOCKSIZE
- *       This Memory is needed for PSAInput - Temp memory to store output
- *       from McToMono block and given as input to PSA block
- */
-
-LVM_ReturnStatus_en LVM_GetMemoryTable(LVM_Handle_t         hInstance,
-                                       LVM_MemTab_t         *pMemoryTable,
-                                       LVM_InstParams_t     *pInstParams)
-{
-
-    LVM_Instance_t      *pInstance = (LVM_Instance_t *)hInstance;
-    LVM_UINT32          AlgScratchSize;
-    LVM_UINT32          BundleScratchSize;
-    LVM_UINT16          InternalBlockSize;
-    INST_ALLOC          AllocMem[LVM_NR_MEMORY_REGIONS];
-    LVM_INT16           i;
-
-    /*
-     * Check parameters
-     */
-    if(pMemoryTable == LVM_NULL)
-    {
-        return LVM_NULLADDRESS;
-    }
-
-    /*
-     * Return memory table if the instance has already been created
-     */
-    if (hInstance != LVM_NULL)
-    {
-       /* Read back memory allocation table */
-        *pMemoryTable = pInstance->MemoryTable;
-        return(LVM_SUCCESS);
-    }
-
-    if(pInstParams == LVM_NULL)
-    {
-        return LVM_NULLADDRESS;
-    }
-
-    /*
-     *  Power Spectrum Analyser
-     */
-    if(pInstParams->PSA_Included > LVM_PSA_ON)
-    {
-        return (LVM_OUTOFRANGE);
-    }
-
-    /*
-     * Check the instance parameters
-     */
-    if( (pInstParams->BufferMode != LVM_MANAGED_BUFFERS) && (pInstParams->BufferMode != LVM_UNMANAGED_BUFFERS) )
-    {
-        return (LVM_OUTOFRANGE);
-    }
-
-    /* N-Band Equalizer */
-    if( pInstParams->EQNB_NumBands > 32 )
-    {
-        return (LVM_OUTOFRANGE);
-    }
-
-    if(pInstParams->BufferMode == LVM_MANAGED_BUFFERS)
-    {
-        if( (pInstParams->MaxBlockSize < LVM_MIN_MAXBLOCKSIZE ) || (pInstParams->MaxBlockSize > LVM_MANAGED_MAX_MAXBLOCKSIZE ) )
-        {
-            return (LVM_OUTOFRANGE);
-        }
-    }
-    else
-    {
-        if( (pInstParams->MaxBlockSize < LVM_MIN_MAXBLOCKSIZE ) || (pInstParams->MaxBlockSize > LVM_UNMANAGED_MAX_MAXBLOCKSIZE) )
-        {
-            return (LVM_OUTOFRANGE);
-        }
-    }
-
-    /*
-    * Initialise the AllocMem structures
-    */
-    for (i=0; i<LVM_NR_MEMORY_REGIONS; i++)
-    {
-        InstAlloc_Init(&AllocMem[i], LVM_NULL);
-    }
-    InternalBlockSize = (LVM_UINT16)((pInstParams->MaxBlockSize) & MIN_INTERNAL_BLOCKMASK); /* Force to a multiple of MIN_INTERNAL_BLOCKSIZE */
-
-    if (InternalBlockSize < MIN_INTERNAL_BLOCKSIZE)
-    {
-        InternalBlockSize = MIN_INTERNAL_BLOCKSIZE;
-    }
-
-    /* Maximum Internal Black Size should not be more than MAX_INTERNAL_BLOCKSIZE*/
-    if(InternalBlockSize > MAX_INTERNAL_BLOCKSIZE)
-    {
-        InternalBlockSize = MAX_INTERNAL_BLOCKSIZE;
-    }
-
-    /*
-    * Bundle requirements
-    */
-    InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
-        sizeof(LVM_Instance_t));
-
-    /*
-     * Set the algorithm and bundle scratch requirements
-     */
-    AlgScratchSize    = 0;
-    if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS)
-    {
-        BundleScratchSize = 3 * LVM_MAX_CHANNELS \
-                            * (MIN_INTERNAL_BLOCKSIZE + InternalBlockSize) \
-                            * sizeof(LVM_FLOAT);
-        InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST],        /* Scratch buffer */
-                            BundleScratchSize);
-        InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
-                            sizeof(LVM_Buffer_t));
-    }
-
-    /*
-     * Treble Enhancement requirements
-     */
-    InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                        sizeof(LVM_TE_Data_t));
-    InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                        sizeof(LVM_TE_Coefs_t));
-
-    /*
-     * N-Band Equalizer requirements
-     */
-    InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],      /* Local storage */
-                        (pInstParams->EQNB_NumBands * sizeof(LVM_EQNB_BandDef_t)));
-    InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],      /* User storage */
-                        (pInstParams->EQNB_NumBands * sizeof(LVM_EQNB_BandDef_t)));
-
-    /*
-     * Concert Sound requirements
-     */
-    {
-        LVCS_MemTab_t           CS_MemTab;
-        LVCS_Capabilities_t     CS_Capabilities;
-
-        /*
-         * Set the capabilities
-         */
-        CS_Capabilities.MaxBlockSize     = InternalBlockSize;
-
-        /*
-         * Get the memory requirements
-         */
-        LVCS_Memory(LVM_NULL,
-                    &CS_MemTab,
-                    &CS_Capabilities);
-
-        /*
-         * Update the memory allocation structures
-         */
-        InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                            CS_MemTab.Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Size);
-        InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                            CS_MemTab.Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Size);
-        if (CS_MemTab.Region[LVM_MEMREGION_TEMPORARY_FAST].Size > AlgScratchSize) AlgScratchSize = CS_MemTab.Region[LVM_MEMREGION_TEMPORARY_FAST].Size;
-
-    }
-
-    /*
-     * Dynamic Bass Enhancement requirements
-     */
-    {
-        LVDBE_MemTab_t          DBE_MemTab;
-        LVDBE_Capabilities_t    DBE_Capabilities;
-
-        /*
-         * Set the capabilities
-         */
-        DBE_Capabilities.SampleRate      = LVDBE_CAP_FS_8000 | LVDBE_CAP_FS_11025 |
-                                           LVDBE_CAP_FS_12000 | LVDBE_CAP_FS_16000 |
-                                           LVDBE_CAP_FS_22050 | LVDBE_CAP_FS_24000 |
-                                           LVDBE_CAP_FS_32000 | LVDBE_CAP_FS_44100 |
-                                           LVDBE_CAP_FS_48000 | LVDBE_CAP_FS_88200 |
-                                           LVDBE_CAP_FS_96000 | LVDBE_CAP_FS_176400 |
-                                           LVDBE_CAP_FS_192000;
-        DBE_Capabilities.CentreFrequency = LVDBE_CAP_CENTRE_55Hz | LVDBE_CAP_CENTRE_55Hz | LVDBE_CAP_CENTRE_66Hz | LVDBE_CAP_CENTRE_78Hz | LVDBE_CAP_CENTRE_90Hz;
-        DBE_Capabilities.MaxBlockSize    = InternalBlockSize;
-
-        /*
-         * Get the memory requirements
-         */
-        LVDBE_Memory(LVM_NULL,
-                    &DBE_MemTab,
-
-                    &DBE_Capabilities);
-        /*
-         * Update the bundle table
-         */
-        InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                            DBE_MemTab.Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Size);
-        InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                            DBE_MemTab.Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Size);
-        if (DBE_MemTab.Region[LVM_MEMREGION_TEMPORARY_FAST].Size > AlgScratchSize) AlgScratchSize = DBE_MemTab.Region[LVM_MEMREGION_TEMPORARY_FAST].Size;
-
-    }
-
-    /*
-     * N-Band equaliser requirements
-     */
-    {
-        LVEQNB_MemTab_t         EQNB_MemTab;            /* For N-Band Equaliser */
-        LVEQNB_Capabilities_t   EQNB_Capabilities;
-
-        /*
-         * Set the capabilities
-         */
-        EQNB_Capabilities.SampleRate   = LVEQNB_CAP_FS_8000 | LVEQNB_CAP_FS_11025 |
-                                         LVEQNB_CAP_FS_12000 | LVEQNB_CAP_FS_16000 |
-                                         LVEQNB_CAP_FS_22050 | LVEQNB_CAP_FS_24000 |
-                                         LVEQNB_CAP_FS_32000 | LVEQNB_CAP_FS_44100 |
-                                         LVEQNB_CAP_FS_48000 | LVEQNB_CAP_FS_88200 |
-                                         LVEQNB_CAP_FS_96000 | LVEQNB_CAP_FS_176400 |
-                                         LVEQNB_CAP_FS_192000;
-        EQNB_Capabilities.SourceFormat = LVEQNB_CAP_STEREO | LVEQNB_CAP_MONOINSTEREO;
-        EQNB_Capabilities.MaxBlockSize = InternalBlockSize;
-        EQNB_Capabilities.MaxBands     = pInstParams->EQNB_NumBands;
-
-        /*
-         * Get the memory requirements
-         */
-        LVEQNB_Memory(LVM_NULL,
-                      &EQNB_MemTab,
-                      &EQNB_Capabilities);
-
-        /*
-         * Update the bundle table
-         */
-        InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                            EQNB_MemTab.Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Size);
-        InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                            EQNB_MemTab.Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Size);
-        if (EQNB_MemTab.Region[LVM_MEMREGION_TEMPORARY_FAST].Size > AlgScratchSize) AlgScratchSize = EQNB_MemTab.Region[LVM_MEMREGION_TEMPORARY_FAST].Size;
-
-    }
-
-    /*
-     * Headroom management memory allocation
-     */
-    InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                       (LVM_HEADROOM_MAX_NBANDS * sizeof(LVM_HeadroomBandDef_t)));
-    InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                       (LVM_HEADROOM_MAX_NBANDS * sizeof(LVM_HeadroomBandDef_t)));
-
-    /*
-     * Spectrum Analyzer memory requirements
-     */
-    {
-        pLVPSA_Handle_t     hPSAInst = LVM_NULL;
-        LVPSA_MemTab_t      PSA_MemTab;
-        LVPSA_InitParams_t  PSA_InitParams;
-        LVPSA_FilterParam_t FiltersParams[9];
-        LVPSA_RETURN        PSA_Status;
-
-        if(pInstParams->PSA_Included == LVM_PSA_ON)
-        {
-            PSA_InitParams.SpectralDataBufferDuration   = (LVM_UINT16) 500;
-            PSA_InitParams.MaxInputBlockSize            = (LVM_UINT16) 1000;
-            PSA_InitParams.nBands                       = (LVM_UINT16) 9;
-
-            PSA_InitParams.pFiltersParams = &FiltersParams[0];
-            for(i = 0; i < PSA_InitParams.nBands; i++)
-            {
-                FiltersParams[i].CenterFrequency    = (LVM_UINT16) 1000;
-                FiltersParams[i].QFactor            = (LVM_UINT16) 25;
-                FiltersParams[i].PostGain           = (LVM_INT16)  0;
-            }
-
-            /*
-            * Get the memory requirements
-            */
-            PSA_Status = LVPSA_Memory (hPSAInst,
-                                        &PSA_MemTab,
-                                        &PSA_InitParams);
-
-            if (PSA_Status != LVPSA_OK)
-            {
-                return((LVM_ReturnStatus_en) LVM_ALGORITHMPSA);
-            }
-
-            /*
-            * Update the bundle table
-            */
-            /* Slow Data */
-            InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
-                PSA_MemTab.Region[LVM_PERSISTENT_SLOW_DATA].Size);
-
-            /* Fast Data */
-            InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                PSA_MemTab.Region[LVM_PERSISTENT_FAST_DATA].Size);
-
-            /* Fast Coef */
-            InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                PSA_MemTab.Region[LVM_PERSISTENT_FAST_COEF].Size);
-
-            /* Fast Temporary */
-            InstAlloc_AddMember(&AllocMem[LVM_TEMPORARY_FAST],
-                                MAX_INTERNAL_BLOCKSIZE * sizeof(LVM_FLOAT));
-
-            if (PSA_MemTab.Region[LVM_TEMPORARY_FAST].Size > AlgScratchSize)
-            {
-                AlgScratchSize = PSA_MemTab.Region[LVM_TEMPORARY_FAST].Size;
-            }
-        }
-    }
-
-    /*
-     * Return the memory table
-     */
-    pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_SLOW_DATA].Size         = InstAlloc_GetTotal(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA]);
-    pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_SLOW_DATA].Type         = LVM_PERSISTENT_SLOW_DATA;
-    pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_SLOW_DATA].pBaseAddress = LVM_NULL;
-
-    pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Size         = InstAlloc_GetTotal(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA]);
-    pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Type         = LVM_PERSISTENT_FAST_DATA;
-    pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].pBaseAddress = LVM_NULL;
-    if (pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Size < 4)
-    {
-        pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_DATA].Size = 0;
-    }
-
-    pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Size         = InstAlloc_GetTotal(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF]);
-    pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Type         = LVM_PERSISTENT_FAST_COEF;
-    pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress = LVM_NULL;
-    if (pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Size < 4)
-    {
-        pMemoryTable->Region[LVM_MEMREGION_PERSISTENT_FAST_COEF].Size = 0;
-    }
-
-    InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST],
-                        AlgScratchSize);
-    pMemoryTable->Region[LVM_MEMREGION_TEMPORARY_FAST].Size             = InstAlloc_GetTotal(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST]);
-    pMemoryTable->Region[LVM_MEMREGION_TEMPORARY_FAST].Type             = LVM_TEMPORARY_FAST;
-    pMemoryTable->Region[LVM_MEMREGION_TEMPORARY_FAST].pBaseAddress     = LVM_NULL;
-    if (pMemoryTable->Region[LVM_MEMREGION_TEMPORARY_FAST].Size < 4)
-    {
-        pMemoryTable->Region[LVM_MEMREGION_TEMPORARY_FAST].Size = 0;
-    }
-
-    return(LVM_SUCCESS);
-
-}
 
 /****************************************************************************************/
 /*                                                                                      */
 /* FUNCTION:                LVM_GetInstanceHandle                                       */
 /*                                                                                      */
 /* DESCRIPTION:                                                                         */
-/*  This function is used to create a bundle instance. It returns the created instance  */
-/*  handle through phInstance. All parameters are set to their default, inactive state. */
+/*  This function is used to create a bundle instance.                                  */
+/*  All parameters are set to their default, inactive state.                            */
 /*                                                                                      */
 /* PARAMETERS:                                                                          */
-/*  phInstance              pointer to the instance handle                              */
-/*  pMemoryTable            Pointer to the memory definition table                      */
-/*  pInstParams             Pointer to the initialisation capabilities                  */
+/*  phInstance              Pointer to the instance handle                              */
+/*  pInstParams             Pointer to the instance parameters                          */
 /*                                                                                      */
 /* RETURNS:                                                                             */
 /*  LVM_SUCCESS             Initialisation succeeded                                    */
+/*  LVM_NULLADDRESS         One or more memory has a NULL pointer                       */
 /*  LVM_OUTOFRANGE          When any of the Instance parameters are out of range        */
-/*  LVM_NULLADDRESS         When one of phInstance, pMemoryTable or pInstParams are NULL*/
 /*                                                                                      */
 /* NOTES:                                                                               */
 /*  1. This function must not be interrupted by the LVM_Process function                */
 /*                                                                                      */
 /****************************************************************************************/
-
-LVM_ReturnStatus_en LVM_GetInstanceHandle(LVM_Handle_t           *phInstance,
-                                          LVM_MemTab_t           *pMemoryTable,
-                                          LVM_InstParams_t       *pInstParams)
-{
-
-    LVM_ReturnStatus_en     Status = LVM_SUCCESS;
-    LVM_Instance_t          *pInstance;
-    INST_ALLOC              AllocMem[LVM_NR_MEMORY_REGIONS];
-    LVM_INT16               i;
-    LVM_UINT16              InternalBlockSize;
-    LVM_INT32               BundleScratchSize;
+LVM_ReturnStatus_en LVM_GetInstanceHandle(LVM_Handle_t* phInstance, LVM_InstParams_t* pInstParams) {
+    LVM_ReturnStatus_en Status = LVM_SUCCESS;
+    LVM_Instance_t* pInstance;
+    LVM_INT16 i;
+    LVM_UINT16 InternalBlockSize;
+    LVM_INT32 BundleScratchSize;
 
     /*
      * Check valid points have been given
      */
-    if ((phInstance == LVM_NULL) || (pMemoryTable == LVM_NULL) || (pInstParams == LVM_NULL))
-    {
+    if ((phInstance == LVM_NULL) || (pInstParams == LVM_NULL)) {
         return (LVM_NULLADDRESS);
     }
 
     /*
-     * Check the memory table for NULL pointers
-     */
-    for (i=0; i<LVM_NR_MEMORY_REGIONS; i++)
-    {
-        if ((pMemoryTable->Region[i].Size != 0) &&
-            (pMemoryTable->Region[i].pBaseAddress==LVM_NULL))
-        {
-            return(LVM_NULLADDRESS);
-        }
-    }
-
-    /*
      * Check the instance parameters
      */
-    if( (pInstParams->BufferMode != LVM_MANAGED_BUFFERS) && (pInstParams->BufferMode != LVM_UNMANAGED_BUFFERS) )
-    {
+    if ((pInstParams->BufferMode != LVM_MANAGED_BUFFERS) &&
+        (pInstParams->BufferMode != LVM_UNMANAGED_BUFFERS)) {
         return (LVM_OUTOFRANGE);
     }
 
-    if( pInstParams->EQNB_NumBands > 32 )
-    {
+    if (pInstParams->EQNB_NumBands > 32) {
         return (LVM_OUTOFRANGE);
     }
 
-    if(pInstParams->BufferMode == LVM_MANAGED_BUFFERS)
-    {
-        if( (pInstParams->MaxBlockSize < LVM_MIN_MAXBLOCKSIZE ) || (pInstParams->MaxBlockSize > LVM_MANAGED_MAX_MAXBLOCKSIZE ) )
-        {
+    if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS) {
+        if ((pInstParams->MaxBlockSize < LVM_MIN_MAXBLOCKSIZE) ||
+            (pInstParams->MaxBlockSize > LVM_MANAGED_MAX_MAXBLOCKSIZE)) {
             return (LVM_OUTOFRANGE);
         }
-    }
-    else
-    {
-        if( (pInstParams->MaxBlockSize < LVM_MIN_MAXBLOCKSIZE ) || (pInstParams->MaxBlockSize > LVM_UNMANAGED_MAX_MAXBLOCKSIZE) )
-        {
+    } else {
+        if ((pInstParams->MaxBlockSize < LVM_MIN_MAXBLOCKSIZE) ||
+            (pInstParams->MaxBlockSize > LVM_UNMANAGED_MAX_MAXBLOCKSIZE)) {
             return (LVM_OUTOFRANGE);
         }
     }
 
-    if(pInstParams->PSA_Included > LVM_PSA_ON)
-    {
+    if (pInstParams->PSA_Included > LVM_PSA_ON) {
         return (LVM_OUTOFRANGE);
     }
 
     /*
-     * Initialise the AllocMem structures
+     * Create the instance handle
      */
-    for (i=0; i<LVM_NR_MEMORY_REGIONS; i++)
-    {
-        InstAlloc_Init(&AllocMem[i],
-                       pMemoryTable->Region[i].pBaseAddress);
-    }
+    *phInstance = new LVM_Instance_t{};
+    pInstance = (LVM_Instance_t*)*phInstance;
+
+    pInstance->InstParams = *pInstParams;
 
     /*
-     * Set the instance handle
+     * Create the bundle scratch memory and initialse the buffer management
      */
-    *phInstance  = (LVM_Handle_t)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
-                                                     sizeof(LVM_Instance_t));
-    pInstance =(LVM_Instance_t  *)*phInstance;
-
-    /*
-     * Save the memory table, parameters and capabilities
-     */
-    pInstance->MemoryTable    = *pMemoryTable;
-    pInstance->InstParams     = *pInstParams;
-
-    /*
-     * Set the bundle scratch memory and initialse the buffer management
-     */
-    InternalBlockSize = (LVM_UINT16)((pInstParams->MaxBlockSize) & MIN_INTERNAL_BLOCKMASK); /* Force to a multiple of MIN_INTERNAL_BLOCKSIZE */
-    if (InternalBlockSize < MIN_INTERNAL_BLOCKSIZE)
-    {
+    InternalBlockSize = (LVM_UINT16)(
+            (pInstParams->MaxBlockSize) &
+            MIN_INTERNAL_BLOCKMASK); /* Force to a multiple of MIN_INTERNAL_BLOCKSIZE */
+    if (InternalBlockSize < MIN_INTERNAL_BLOCKSIZE) {
         InternalBlockSize = MIN_INTERNAL_BLOCKSIZE;
     }
 
     /* Maximum Internal Black Size should not be more than MAX_INTERNAL_BLOCKSIZE*/
-    if(InternalBlockSize > MAX_INTERNAL_BLOCKSIZE)
-    {
+    if (InternalBlockSize > MAX_INTERNAL_BLOCKSIZE) {
         InternalBlockSize = MAX_INTERNAL_BLOCKSIZE;
     }
     pInstance->InternalBlockSize = (LVM_INT16)InternalBlockSize;
@@ -599,40 +116,46 @@
     /*
      * Common settings for managed and unmanaged buffers
      */
-    pInstance->SamplesToProcess = 0;                /* No samples left to process */
-    if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS)
-    {
+    pInstance->SamplesToProcess = 0; /* No samples left to process */
+    BundleScratchSize =
+            (LVM_INT32)(3 * LVM_MAX_CHANNELS * (MIN_INTERNAL_BLOCKSIZE + InternalBlockSize) *
+                        sizeof(LVM_FLOAT));
+    pInstance->pScratch = calloc(1, BundleScratchSize);
+    if (pInstance->pScratch == LVM_NULL) {
+        return LVM_NULLADDRESS;
+    }
+
+    if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS) {
         /*
          * Managed buffers required
          */
-        pInstance->pBufferManagement = (LVM_Buffer_t *)
-            InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
-                                                           sizeof(LVM_Buffer_t));
-        BundleScratchSize = (LVM_INT32)
-                            (3 * LVM_MAX_CHANNELS \
-                             * (MIN_INTERNAL_BLOCKSIZE + InternalBlockSize) \
-                             * sizeof(LVM_FLOAT));
-        pInstance->pBufferManagement->pScratch = (LVM_FLOAT *)
-            InstAlloc_AddMember(
-                         &AllocMem[LVM_MEMREGION_TEMPORARY_FAST], /* Scratch 1 buffer */
-                                                  (LVM_UINT32)BundleScratchSize);
-        LoadConst_Float(0,                                   /* Clear the input delay buffer */
-                        (LVM_FLOAT *)&pInstance->pBufferManagement->InDelayBuffer,
-                        (LVM_INT16)(LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE));
-        pInstance->pBufferManagement->InDelaySamples = MIN_INTERNAL_BLOCKSIZE; /* Set the number of delay samples */
-        pInstance->pBufferManagement->OutDelaySamples = 0;                     /* No samples in the output buffer */
-        pInstance->pBufferManagement->BufferState = LVM_FIRSTCALL;             /* Set the state ready for the first call */
+        pInstance->pBufferManagement =
+                (LVM_Buffer_t*)calloc(1, sizeof(*(pInstance->pBufferManagement)));
+        if (pInstance->pBufferManagement == LVM_NULL) {
+            return LVM_NULLADDRESS;
+        }
+
+        pInstance->pBufferManagement->pScratch = (LVM_FLOAT*)pInstance->pScratch;
+
+        memset(pInstance->pBufferManagement->InDelayBuffer, 0,
+                LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE *
+                sizeof(pInstance->pBufferManagement->InDelayBuffer[0]));
+        pInstance->pBufferManagement->InDelaySamples =
+                MIN_INTERNAL_BLOCKSIZE;                    /* Set the number of delay samples */
+        pInstance->pBufferManagement->OutDelaySamples = 0; /* No samples in the output buffer */
+        pInstance->pBufferManagement->BufferState =
+                LVM_FIRSTCALL; /* Set the state ready for the first call */
     }
 
     /*
      * Set default parameters
      */
-    pInstance->Params.OperatingMode    = LVM_MODE_OFF;
-    pInstance->Params.SampleRate       = LVM_FS_8000;
-    pInstance->Params.SourceFormat     = LVM_MONO;
-    pInstance->Params.SpeakerType      = LVM_HEADPHONES;
-    pInstance->Params.VC_EffectLevel   = 0;
-    pInstance->Params.VC_Balance       = 0;
+    pInstance->Params.OperatingMode = LVM_MODE_OFF;
+    pInstance->Params.SampleRate = LVM_FS_8000;
+    pInstance->Params.SourceFormat = LVM_MONO;
+    pInstance->Params.SpeakerType = LVM_HEADPHONES;
+    pInstance->Params.VC_EffectLevel = 0;
+    pInstance->Params.VC_Balance = 0;
 
     /*
      * Set callback
@@ -642,338 +165,257 @@
     /*
      * DC removal filter
      */
-#ifdef SUPPORT_MC
     DC_Mc_D16_TRC_WRA_01_Init(&pInstance->DC_RemovalInstance);
-#else
-    DC_2I_D16_TRC_WRA_01_Init(&pInstance->DC_RemovalInstance);
-#endif
 
     /*
      * Treble Enhancement
      */
-    pInstance->pTE_Taps  = (LVM_TE_Data_t *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                                                                sizeof(LVM_TE_Data_t));
-
-    pInstance->pTE_State = (LVM_TE_Coefs_t *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                                                                 sizeof(LVM_TE_Coefs_t));
     pInstance->Params.TE_OperatingMode = LVM_TE_OFF;
-    pInstance->Params.TE_EffectLevel   = 0;
-    pInstance->TE_Active               = LVM_FALSE;
+    pInstance->Params.TE_EffectLevel = 0;
+    pInstance->TE_Active = LVM_FALSE;
 
     /*
      * Set the volume control and initialise Current to Target
      */
-    pInstance->VC_Volume.MixerStream[0].CallbackParam      = 0;
-    pInstance->VC_Volume.MixerStream[0].CallbackSet        = 0;
-    pInstance->VC_Volume.MixerStream[0].pCallbackHandle    = pInstance;
-    pInstance->VC_Volume.MixerStream[0].pCallBack          = LVM_VCCallBack;
+    pInstance->VC_Volume.MixerStream[0].CallbackParam = 0;
+    pInstance->VC_Volume.MixerStream[0].CallbackSet = 0;
+    pInstance->VC_Volume.MixerStream[0].pCallbackHandle = pInstance;
+    pInstance->VC_Volume.MixerStream[0].pCallBack = LVM_VCCallBack;
 
-    /* In managed buffering, start with low signal level as delay in buffer management causes a click*/
-    if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS)
-    {
+    /* In managed buffering, start with low signal level as delay in buffer management causes a
+     * click*/
+    if (pInstParams->BufferMode == LVM_MANAGED_BUFFERS) {
         LVC_Mixer_Init(&pInstance->VC_Volume.MixerStream[0], 0, 0);
-    }
-    else
-    {
+    } else {
         LVC_Mixer_Init(&pInstance->VC_Volume.MixerStream[0], LVM_MAXFLOAT, LVM_MAXFLOAT);
     }
 
-    LVC_Mixer_SetTimeConstant(&pInstance->VC_Volume.MixerStream[0],0,LVM_FS_8000,2);
+    LVC_Mixer_SetTimeConstant(&pInstance->VC_Volume.MixerStream[0], 0, LVM_FS_8000, 2);
 
-    pInstance->VC_VolumedB                  = 0;
-    pInstance->VC_AVLFixedVolume            = 0;
-    pInstance->VC_Active                    = LVM_FALSE;
+    pInstance->VC_VolumedB = 0;
+    pInstance->VC_AVLFixedVolume = 0;
+    pInstance->VC_Active = LVM_FALSE;
 
-    pInstance->VC_BalanceMix.MixerStream[0].CallbackParam      = 0;
-    pInstance->VC_BalanceMix.MixerStream[0].CallbackSet        = 0;
-    pInstance->VC_BalanceMix.MixerStream[0].pCallbackHandle    = pInstance;
-    pInstance->VC_BalanceMix.MixerStream[0].pCallBack          = LVM_VCCallBack;
+    pInstance->VC_BalanceMix.MixerStream[0].CallbackParam = 0;
+    pInstance->VC_BalanceMix.MixerStream[0].CallbackSet = 0;
+    pInstance->VC_BalanceMix.MixerStream[0].pCallbackHandle = pInstance;
+    pInstance->VC_BalanceMix.MixerStream[0].pCallBack = LVM_VCCallBack;
     LVC_Mixer_Init(&pInstance->VC_BalanceMix.MixerStream[0], LVM_MAXFLOAT, LVM_MAXFLOAT);
-    LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[0],LVM_VC_MIXER_TIME,LVM_FS_8000,2);
+    LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[0], LVM_VC_MIXER_TIME,
+                                       LVM_FS_8000, 2);
 
-    pInstance->VC_BalanceMix.MixerStream[1].CallbackParam      = 0;
-    pInstance->VC_BalanceMix.MixerStream[1].CallbackSet        = 0;
-    pInstance->VC_BalanceMix.MixerStream[1].pCallbackHandle    = pInstance;
-    pInstance->VC_BalanceMix.MixerStream[1].pCallBack          = LVM_VCCallBack;
+    pInstance->VC_BalanceMix.MixerStream[1].CallbackParam = 0;
+    pInstance->VC_BalanceMix.MixerStream[1].CallbackSet = 0;
+    pInstance->VC_BalanceMix.MixerStream[1].pCallbackHandle = pInstance;
+    pInstance->VC_BalanceMix.MixerStream[1].pCallBack = LVM_VCCallBack;
     LVC_Mixer_Init(&pInstance->VC_BalanceMix.MixerStream[1], LVM_MAXFLOAT, LVM_MAXFLOAT);
-    LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[1],LVM_VC_MIXER_TIME,LVM_FS_8000,2);
+    LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->VC_BalanceMix.MixerStream[1], LVM_VC_MIXER_TIME,
+                                       LVM_FS_8000, 2);
 
     /*
-     * Set the default EQNB pre-gain and pointer to the band definitions
+     * Create the default EQNB pre-gain and pointer to the band definitions
      */
-    pInstance->pEQNB_BandDefs =
-        (LVM_EQNB_BandDef_t *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                                   (pInstParams->EQNB_NumBands * sizeof(LVM_EQNB_BandDef_t)));
-    pInstance->pEQNB_UserDefs =
-        (LVM_EQNB_BandDef_t *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                                   (pInstParams->EQNB_NumBands * sizeof(LVM_EQNB_BandDef_t)));
+    pInstance->pEQNB_BandDefs = (LVM_EQNB_BandDef_t*)calloc(pInstParams->EQNB_NumBands,
+                                                            sizeof(*(pInstance->pEQNB_BandDefs)));
+    if (pInstance->pEQNB_BandDefs == LVM_NULL) {
+        return LVM_NULLADDRESS;
+    }
+    pInstance->pEQNB_UserDefs = (LVM_EQNB_BandDef_t*)calloc(pInstParams->EQNB_NumBands,
+                                                            sizeof(*(pInstance->pEQNB_UserDefs)));
+    if (pInstance->pEQNB_UserDefs == LVM_NULL) {
+        return LVM_NULLADDRESS;
+    }
 
     /*
      * Initialise the Concert Sound module
      */
     {
-        LVCS_Handle_t           hCSInstance;                /* Instance handle */
-        LVCS_MemTab_t           CS_MemTab;                  /* Memory table */
-        LVCS_Capabilities_t     CS_Capabilities;            /* Initial capabilities */
-        LVCS_ReturnStatus_en    LVCS_Status;                /* Function call status */
+        LVCS_Handle_t hCSInstance;           /* Instance handle */
+        LVCS_Capabilities_t CS_Capabilities; /* Initial capabilities */
+        LVCS_ReturnStatus_en LVCS_Status;    /* Function call status */
 
         /*
          * Set default parameters
          */
-        pInstance->Params.VirtualizerReverbLevel    = 100;
-        pInstance->Params.VirtualizerType           = LVM_CONCERTSOUND;
-        pInstance->Params.VirtualizerOperatingMode  = LVM_MODE_OFF;
-        pInstance->CS_Active                        = LVM_FALSE;
+        pInstance->Params.VirtualizerReverbLevel = 100;
+        pInstance->Params.VirtualizerType = LVM_CONCERTSOUND;
+        pInstance->Params.VirtualizerOperatingMode = LVM_MODE_OFF;
+        pInstance->CS_Active = LVM_FALSE;
 
         /*
          * Set the initialisation capabilities
          */
-        CS_Capabilities.MaxBlockSize    = (LVM_UINT16)InternalBlockSize;
+        CS_Capabilities.MaxBlockSize = (LVM_UINT16)InternalBlockSize;
         CS_Capabilities.CallBack = pInstance->CallBack;
         CS_Capabilities.pBundleInstance = (void*)pInstance;
 
         /*
-         * Get the memory requirements and then set the address pointers, forcing alignment
-         */
-        LVCS_Status = LVCS_Memory(LVM_NULL,                /* Get the memory requirements */
-                                  &CS_MemTab,
-                                  &CS_Capabilities);
-        CS_MemTab.Region[LVCS_MEMREGION_PERSISTENT_SLOW_DATA].pBaseAddress = &pInstance->CS_Instance;
-        CS_MemTab.Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                                                                                                         CS_MemTab.Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].Size);
-        CS_MemTab.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                                                                                                         CS_MemTab.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].Size);
-        CS_MemTab.Region[LVCS_MEMREGION_TEMPORARY_FAST].pBaseAddress       = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST],
-                                                                                                         0);
-
-        /*
          * Initialise the Concert Sound instance and save the instance handle
          */
-        hCSInstance = LVM_NULL;                            /* Set to NULL to return handle */
-        LVCS_Status = LVCS_Init(&hCSInstance,              /* Initiailse */
-                                &CS_MemTab,
-                                &CS_Capabilities);
-        if (LVCS_Status != LVCS_SUCCESS) return((LVM_ReturnStatus_en)LVCS_Status);
-        pInstance->hCSInstance = hCSInstance;              /* Save the instance handle */
-
+        hCSInstance = LVM_NULL;               /* Set to NULL to return handle */
+        LVCS_Status = LVCS_Init(&hCSInstance, /* Create and initiailse */
+                                &CS_Capabilities, pInstance->pScratch);
+        if (LVCS_Status != LVCS_SUCCESS) return ((LVM_ReturnStatus_en)LVCS_Status);
+        pInstance->hCSInstance = hCSInstance; /* Save the instance handle */
     }
 
     /*
      * Initialise the Bass Enhancement module
      */
     {
-        LVDBE_Handle_t          hDBEInstance;               /* Instance handle */
-        LVDBE_MemTab_t          DBE_MemTab;                 /* Memory table */
-        LVDBE_Capabilities_t    DBE_Capabilities;           /* Initial capabilities */
-        LVDBE_ReturnStatus_en   LVDBE_Status;               /* Function call status */
+        LVDBE_Handle_t hDBEInstance;           /* Instance handle */
+        LVDBE_Capabilities_t DBE_Capabilities; /* Initial capabilities */
+        LVDBE_ReturnStatus_en LVDBE_Status;    /* Function call status */
 
         /*
          * Set the initialisation parameters
          */
         pInstance->Params.BE_OperatingMode = LVM_BE_OFF;
-        pInstance->Params.BE_CentreFreq    = LVM_BE_CENTRE_55Hz;
-        pInstance->Params.BE_EffectLevel   = 0;
-        pInstance->Params.BE_HPF           = LVM_BE_HPF_OFF;
+        pInstance->Params.BE_CentreFreq = LVM_BE_CENTRE_55Hz;
+        pInstance->Params.BE_EffectLevel = 0;
+        pInstance->Params.BE_HPF = LVM_BE_HPF_OFF;
 
-        pInstance->DBE_Active              = LVM_FALSE;
+        pInstance->DBE_Active = LVM_FALSE;
 
         /*
          * Set the initialisation capabilities
          */
-        DBE_Capabilities.SampleRate      = LVDBE_CAP_FS_8000 | LVDBE_CAP_FS_11025 |
-                                           LVDBE_CAP_FS_12000 | LVDBE_CAP_FS_16000 |
-                                           LVDBE_CAP_FS_22050 | LVDBE_CAP_FS_24000 |
-                                           LVDBE_CAP_FS_32000 | LVDBE_CAP_FS_44100 |
-                                           LVDBE_CAP_FS_48000 | LVDBE_CAP_FS_88200 |
-                                           LVDBE_CAP_FS_96000 | LVDBE_CAP_FS_176400 |
-                                           LVDBE_CAP_FS_192000;
-        DBE_Capabilities.CentreFrequency = LVDBE_CAP_CENTRE_55Hz | LVDBE_CAP_CENTRE_55Hz | LVDBE_CAP_CENTRE_66Hz | LVDBE_CAP_CENTRE_78Hz | LVDBE_CAP_CENTRE_90Hz;
-        DBE_Capabilities.MaxBlockSize    = (LVM_UINT16)InternalBlockSize;
+        DBE_Capabilities.SampleRate = LVDBE_CAP_FS_8000 | LVDBE_CAP_FS_11025 | LVDBE_CAP_FS_12000 |
+                                      LVDBE_CAP_FS_16000 | LVDBE_CAP_FS_22050 | LVDBE_CAP_FS_24000 |
+                                      LVDBE_CAP_FS_32000 | LVDBE_CAP_FS_44100 | LVDBE_CAP_FS_48000 |
+                                      LVDBE_CAP_FS_88200 | LVDBE_CAP_FS_96000 |
+                                      LVDBE_CAP_FS_176400 | LVDBE_CAP_FS_192000;
 
-        /*
-         * Get the memory requirements and then set the address pointers
-         */
-        LVDBE_Status = LVDBE_Memory(LVM_NULL,               /* Get the memory requirements */
-                                    &DBE_MemTab,
-                                    &DBE_Capabilities);
-        DBE_MemTab.Region[LVDBE_MEMREGION_INSTANCE].pBaseAddress        = &pInstance->DBE_Instance;
-        DBE_MemTab.Region[LVDBE_MEMREGION_PERSISTENT_DATA].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                                                                                                      DBE_MemTab.Region[LVDBE_MEMREGION_PERSISTENT_DATA].Size);
-        DBE_MemTab.Region[LVDBE_MEMREGION_PERSISTENT_COEF].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                                                                                                      DBE_MemTab.Region[LVDBE_MEMREGION_PERSISTENT_COEF].Size);
-        DBE_MemTab.Region[LVDBE_MEMREGION_SCRATCH].pBaseAddress         = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST],
-                                                                                                      0);
+        DBE_Capabilities.CentreFrequency = LVDBE_CAP_CENTRE_55Hz | LVDBE_CAP_CENTRE_55Hz |
+                                           LVDBE_CAP_CENTRE_66Hz | LVDBE_CAP_CENTRE_78Hz |
+                                           LVDBE_CAP_CENTRE_90Hz;
+        DBE_Capabilities.MaxBlockSize = (LVM_UINT16)InternalBlockSize;
 
         /*
          * Initialise the Dynamic Bass Enhancement instance and save the instance handle
          */
-        hDBEInstance = LVM_NULL;                            /* Set to NULL to return handle */
-        LVDBE_Status = LVDBE_Init(&hDBEInstance,            /* Initiailse */
-                                  &DBE_MemTab,
-                                  &DBE_Capabilities);
-        if (LVDBE_Status != LVDBE_SUCCESS) return((LVM_ReturnStatus_en)LVDBE_Status);
-        pInstance->hDBEInstance = hDBEInstance;             /* Save the instance handle */
+        hDBEInstance = LVM_NULL;                 /* Set to NULL to return handle */
+        LVDBE_Status = LVDBE_Init(&hDBEInstance, /* Create and initiailse */
+                                  &DBE_Capabilities, pInstance->pScratch);
+        if (LVDBE_Status != LVDBE_SUCCESS) return ((LVM_ReturnStatus_en)LVDBE_Status);
+        pInstance->hDBEInstance = hDBEInstance; /* Save the instance handle */
     }
 
     /*
      * Initialise the N-Band Equaliser module
      */
     {
-        LVEQNB_Handle_t          hEQNBInstance;             /* Instance handle */
-        LVEQNB_MemTab_t          EQNB_MemTab;               /* Memory table */
-        LVEQNB_Capabilities_t    EQNB_Capabilities;         /* Initial capabilities */
-        LVEQNB_ReturnStatus_en   LVEQNB_Status;             /* Function call status */
+        LVEQNB_Handle_t hEQNBInstance;           /* Instance handle */
+        LVEQNB_Capabilities_t EQNB_Capabilities; /* Initial capabilities */
+        LVEQNB_ReturnStatus_en LVEQNB_Status;    /* Function call status */
 
         /*
          * Set the initialisation parameters
          */
-        pInstance->Params.EQNB_OperatingMode   = LVM_EQNB_OFF;
-        pInstance->Params.EQNB_NBands          = 0;
+        pInstance->Params.EQNB_OperatingMode = LVM_EQNB_OFF;
+        pInstance->Params.EQNB_NBands = 0;
         pInstance->Params.pEQNB_BandDefinition = LVM_NULL;
-        pInstance->EQNB_Active                 = LVM_FALSE;
+        pInstance->EQNB_Active = LVM_FALSE;
 
         /*
          * Set the initialisation capabilities
          */
-        EQNB_Capabilities.SampleRate      = LVEQNB_CAP_FS_8000 | LVEQNB_CAP_FS_11025 |
-                                            LVEQNB_CAP_FS_12000 | LVEQNB_CAP_FS_16000 |
-                                            LVEQNB_CAP_FS_22050 | LVEQNB_CAP_FS_24000 |
-                                            LVEQNB_CAP_FS_32000 | LVEQNB_CAP_FS_44100 |
-                                            LVEQNB_CAP_FS_48000 | LVEQNB_CAP_FS_88200 |
-                                            LVEQNB_CAP_FS_96000 | LVEQNB_CAP_FS_176400 |
-                                            LVEQNB_CAP_FS_192000;
-        EQNB_Capabilities.MaxBlockSize    = (LVM_UINT16)InternalBlockSize;
-        EQNB_Capabilities.MaxBands        = pInstParams->EQNB_NumBands;
-        EQNB_Capabilities.SourceFormat    = LVEQNB_CAP_STEREO | LVEQNB_CAP_MONOINSTEREO;
-        EQNB_Capabilities.CallBack        = pInstance->CallBack;
-        EQNB_Capabilities.pBundleInstance  = (void*)pInstance;
+        EQNB_Capabilities.SampleRate =
+                LVEQNB_CAP_FS_8000 | LVEQNB_CAP_FS_11025 | LVEQNB_CAP_FS_12000 |
+                LVEQNB_CAP_FS_16000 | LVEQNB_CAP_FS_22050 | LVEQNB_CAP_FS_24000 |
+                LVEQNB_CAP_FS_32000 | LVEQNB_CAP_FS_44100 | LVEQNB_CAP_FS_48000 |
+                LVEQNB_CAP_FS_88200 | LVEQNB_CAP_FS_96000 | LVEQNB_CAP_FS_176400 |
+                LVEQNB_CAP_FS_192000;
 
-        /*
-         * Get the memory requirements and then set the address pointers, forcing alignment
-         */
-        LVEQNB_Status = LVEQNB_Memory(LVM_NULL,             /* Get the memory requirements */
-                                      &EQNB_MemTab,
-                                      &EQNB_Capabilities);
-        EQNB_MemTab.Region[LVEQNB_MEMREGION_INSTANCE].pBaseAddress        = &pInstance->EQNB_Instance;
-        EQNB_MemTab.Region[LVEQNB_MEMREGION_PERSISTENT_DATA].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                                                                                                        EQNB_MemTab.Region[LVEQNB_MEMREGION_PERSISTENT_DATA].Size);
-        EQNB_MemTab.Region[LVEQNB_MEMREGION_PERSISTENT_COEF].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                                                                                                        EQNB_MemTab.Region[LVEQNB_MEMREGION_PERSISTENT_COEF].Size);
-        EQNB_MemTab.Region[LVEQNB_MEMREGION_SCRATCH].pBaseAddress         = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST],
-                                                                                                        0);
+        EQNB_Capabilities.MaxBlockSize = (LVM_UINT16)InternalBlockSize;
+        EQNB_Capabilities.MaxBands = pInstParams->EQNB_NumBands;
+        EQNB_Capabilities.SourceFormat = LVEQNB_CAP_STEREO | LVEQNB_CAP_MONOINSTEREO;
+        EQNB_Capabilities.CallBack = pInstance->CallBack;
+        EQNB_Capabilities.pBundleInstance = (void*)pInstance;
 
         /*
          * Initialise the Dynamic Bass Enhancement instance and save the instance handle
          */
-        hEQNBInstance = LVM_NULL;                           /* Set to NULL to return handle */
-        LVEQNB_Status = LVEQNB_Init(&hEQNBInstance,         /* Initiailse */
-                                    &EQNB_MemTab,
-                                    &EQNB_Capabilities);
-        if (LVEQNB_Status != LVEQNB_SUCCESS) return((LVM_ReturnStatus_en)LVEQNB_Status);
-        pInstance->hEQNBInstance = hEQNBInstance;           /* Save the instance handle */
+        hEQNBInstance = LVM_NULL;                   /* Set to NULL to return handle */
+        LVEQNB_Status = LVEQNB_Init(&hEQNBInstance, /* Create and initiailse */
+                                    &EQNB_Capabilities, pInstance->pScratch);
+        if (LVEQNB_Status != LVEQNB_SUCCESS) return ((LVM_ReturnStatus_en)LVEQNB_Status);
+        pInstance->hEQNBInstance = hEQNBInstance; /* Save the instance handle */
     }
 
     /*
      * Headroom management memory allocation
      */
     {
-        pInstance->pHeadroom_BandDefs = (LVM_HeadroomBandDef_t *)
-              InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                                       (LVM_HEADROOM_MAX_NBANDS * sizeof(LVM_HeadroomBandDef_t)));
-        pInstance->pHeadroom_UserDefs = (LVM_HeadroomBandDef_t *)
-              InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                                       (LVM_HEADROOM_MAX_NBANDS * sizeof(LVM_HeadroomBandDef_t)));
+        pInstance->pHeadroom_BandDefs = (LVM_HeadroomBandDef_t*)calloc(
+                LVM_HEADROOM_MAX_NBANDS, sizeof(*(pInstance->pHeadroom_BandDefs)));
+        if (pInstance->pHeadroom_BandDefs == LVM_NULL) {
+            return LVM_NULLADDRESS;
+        }
+        pInstance->pHeadroom_UserDefs = (LVM_HeadroomBandDef_t*)calloc(
+                LVM_HEADROOM_MAX_NBANDS, sizeof(*(pInstance->pHeadroom_UserDefs)));
+        if (pInstance->pHeadroom_UserDefs == LVM_NULL) {
+            return LVM_NULLADDRESS;
+        }
 
         /* Headroom management parameters initialisation */
         pInstance->NewHeadroomParams.NHeadroomBands = 2;
         pInstance->NewHeadroomParams.pHeadroomDefinition = pInstance->pHeadroom_BandDefs;
-        pInstance->NewHeadroomParams.pHeadroomDefinition[0].Limit_Low          = 20;
-        pInstance->NewHeadroomParams.pHeadroomDefinition[0].Limit_High         = 4999;
-        pInstance->NewHeadroomParams.pHeadroomDefinition[0].Headroom_Offset    = 3;
-        pInstance->NewHeadroomParams.pHeadroomDefinition[1].Limit_Low          = 5000;
-        pInstance->NewHeadroomParams.pHeadroomDefinition[1].Limit_High         = 24000;
-        pInstance->NewHeadroomParams.pHeadroomDefinition[1].Headroom_Offset    = 4;
+        pInstance->NewHeadroomParams.pHeadroomDefinition[0].Limit_Low = 20;
+        pInstance->NewHeadroomParams.pHeadroomDefinition[0].Limit_High = 4999;
+        pInstance->NewHeadroomParams.pHeadroomDefinition[0].Headroom_Offset = 3;
+        pInstance->NewHeadroomParams.pHeadroomDefinition[1].Limit_Low = 5000;
+        pInstance->NewHeadroomParams.pHeadroomDefinition[1].Limit_High = 24000;
+        pInstance->NewHeadroomParams.pHeadroomDefinition[1].Headroom_Offset = 4;
         pInstance->NewHeadroomParams.Headroom_OperatingMode = LVM_HEADROOM_ON;
 
-        pInstance->Headroom =0;
+        pInstance->Headroom = 0;
     }
 
     /*
      * Initialise the PSA module
      */
     {
-        pLVPSA_Handle_t     hPSAInstance = LVM_NULL;   /* Instance handle */
-        LVPSA_MemTab_t      PSA_MemTab;
-        LVPSA_RETURN        PSA_Status;                 /* Function call status */
+        pLVPSA_Handle_t hPSAInstance = LVM_NULL; /* Instance handle */
+        LVPSA_RETURN PSA_Status;                 /* Function call status */
         LVPSA_FilterParam_t FiltersParams[9];
 
-        if(pInstParams->PSA_Included==LVM_PSA_ON)
-        {
-            pInstance->PSA_InitParams.SpectralDataBufferDuration   = (LVM_UINT16) 500;
-            pInstance->PSA_InitParams.MaxInputBlockSize            = (LVM_UINT16) 2048;
-            pInstance->PSA_InitParams.nBands                       = (LVM_UINT16) 9;
-            pInstance->PSA_InitParams.pFiltersParams               = &FiltersParams[0];
-            for(i = 0; i < pInstance->PSA_InitParams.nBands; i++)
-            {
-                FiltersParams[i].CenterFrequency    = (LVM_UINT16) 1000;
-                FiltersParams[i].QFactor            = (LVM_UINT16) 100;
-                FiltersParams[i].PostGain           = (LVM_INT16)  0;
+        if (pInstParams->PSA_Included == LVM_PSA_ON) {
+            pInstance->PSA_InitParams.SpectralDataBufferDuration = (LVM_UINT16)500;
+            pInstance->PSA_InitParams.MaxInputBlockSize = (LVM_UINT16)2048;
+            pInstance->PSA_InitParams.nBands = (LVM_UINT16)9;
+            pInstance->PSA_InitParams.pFiltersParams = &FiltersParams[0];
+            for (i = 0; i < pInstance->PSA_InitParams.nBands; i++) {
+                FiltersParams[i].CenterFrequency = (LVM_UINT16)1000;
+                FiltersParams[i].QFactor = (LVM_UINT16)100;
+                FiltersParams[i].PostGain = (LVM_INT16)0;
             }
 
-            /*Get the memory requirements and then set the address pointers*/
-            PSA_Status = LVPSA_Memory (hPSAInstance,
-                                          &PSA_MemTab,
-                                          &pInstance->PSA_InitParams);
-
-            if (PSA_Status != LVPSA_OK)
-            {
-                return((LVM_ReturnStatus_en) LVM_ALGORITHMPSA);
-            }
-
-            /* Slow Data */
-            PSA_MemTab.Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_SLOW_DATA],
-                PSA_MemTab.Region[LVM_PERSISTENT_SLOW_DATA].Size);
-
-            /* Fast Data */
-            PSA_MemTab.Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_DATA],
-                PSA_MemTab.Region[LVM_PERSISTENT_FAST_DATA].Size);
-
-            /* Fast Coef */
-            PSA_MemTab.Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_PERSISTENT_FAST_COEF],
-                PSA_MemTab.Region[LVM_PERSISTENT_FAST_COEF].Size);
-
-            /* Fast Temporary */
-            pInstance->pPSAInput = (LVM_FLOAT *)InstAlloc_AddMember(&AllocMem[LVM_TEMPORARY_FAST],
-                                                       (LVM_UINT32) MAX_INTERNAL_BLOCKSIZE * \
-                                                       sizeof(LVM_FLOAT));
-            PSA_MemTab.Region[LVM_TEMPORARY_FAST].pBaseAddress       = (void *)InstAlloc_AddMember(&AllocMem[LVM_MEMREGION_TEMPORARY_FAST],0);
-
             /*Initialise PSA instance and save the instance handle*/
             pInstance->PSA_ControlParams.Fs = LVM_FS_48000;
-            pInstance->PSA_ControlParams.LevelDetectionSpeed  = LVPSA_SPEED_MEDIUM;
-            PSA_Status = LVPSA_Init (&hPSAInstance,
-                                    &pInstance->PSA_InitParams,
-                                    &pInstance->PSA_ControlParams,
-                                    &PSA_MemTab);
+            pInstance->PSA_ControlParams.LevelDetectionSpeed = LVPSA_SPEED_MEDIUM;
+            pInstance->pPSAInput = (LVM_FLOAT*)calloc(MAX_INTERNAL_BLOCKSIZE, sizeof(LVM_FLOAT));
+            if (pInstance->pPSAInput == LVM_NULL) {
+                return LVM_NULLADDRESS;
+            }
+            PSA_Status = LVPSA_Init(&hPSAInstance, &pInstance->PSA_InitParams,
+                                    &pInstance->PSA_ControlParams, pInstance->pScratch);
 
-            if (PSA_Status != LVPSA_OK)
-            {
-                return((LVM_ReturnStatus_en) LVM_ALGORITHMPSA);
+            if (PSA_Status != LVPSA_OK) {
+                return ((LVM_ReturnStatus_en)LVM_ALGORITHMPSA);
             }
 
-            pInstance->hPSAInstance = hPSAInstance;       /* Save the instance handle */
+            pInstance->hPSAInstance = hPSAInstance; /* Save the instance handle */
             pInstance->PSA_GainOffset = 0;
-        }
-        else
-        {
+        } else {
             pInstance->hPSAInstance = LVM_NULL;
         }
 
         /*
          * Set the initialisation parameters.
          */
-        pInstance->Params.PSA_PeakDecayRate   = LVM_PSA_SPEED_MEDIUM;
-        pInstance->Params.PSA_Enable          = LVM_PSA_OFF;
+        pInstance->Params.PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
+        pInstance->Params.PSA_Enable = LVM_PSA_OFF;
     }
 
     /*
@@ -992,20 +434,113 @@
     pInstance->ConfigurationNumber += LVM_VC_MASK;
     pInstance->ConfigurationNumber += LVM_PSA_MASK;
 
-    if(((pInstance->ConfigurationNumber  & LVM_CS_MASK)!=0)  ||
-        ((pInstance->ConfigurationNumber & LVM_DBE_MASK)!=0) ||
-        ((pInstance->ConfigurationNumber & LVM_EQNB_MASK)!=0)||
-        ((pInstance->ConfigurationNumber & LVM_TE_MASK)!=0)  ||
-        ((pInstance->ConfigurationNumber & LVM_VC_MASK)!=0))
-    {
-        pInstance->BlickSizeMultiple    = 4;
-    }
-    else
-    {
-        pInstance->BlickSizeMultiple    = 1;
+    if (((pInstance->ConfigurationNumber & LVM_CS_MASK) != 0) ||
+        ((pInstance->ConfigurationNumber & LVM_DBE_MASK) != 0) ||
+        ((pInstance->ConfigurationNumber & LVM_EQNB_MASK) != 0) ||
+        ((pInstance->ConfigurationNumber & LVM_TE_MASK) != 0) ||
+        ((pInstance->ConfigurationNumber & LVM_VC_MASK) != 0)) {
+        pInstance->BlickSizeMultiple = 4;
+    } else {
+        pInstance->BlickSizeMultiple = 1;
     }
 
-    return(Status);
+    return (Status);
+}
+/****************************************************************************************/
+/*                                                                                      */
+/* FUNCTION:                LVM_DelInstanceHandle                                       */
+/*                                                                                      */
+/* DESCRIPTION:                                                                         */
+/*  This function is used to create a bundle instance. It returns the created instance  */
+/*  handle through phInstance. All parameters are set to their default, inactive state. */
+/*                                                                                      */
+/* PARAMETERS:                                                                          */
+/*  phInstance              Pointer to the instance handle                              */
+/*                                                                                      */
+/* NOTES:                                                                               */
+/*  1. This function must not be interrupted by the LVM_Process function                */
+/*                                                                                      */
+/****************************************************************************************/
+void LVM_DelInstanceHandle(LVM_Handle_t* phInstance) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)*phInstance;
+
+    if (pInstance->pScratch != LVM_NULL) {
+        free(pInstance->pScratch);
+        pInstance->pScratch = LVM_NULL;
+    }
+
+    if (pInstance->InstParams.BufferMode == LVM_MANAGED_BUFFERS) {
+        /*
+         * Managed buffers required
+         */
+        if (pInstance->pBufferManagement != LVM_NULL) {
+            free(pInstance->pBufferManagement);
+            pInstance->pBufferManagement = LVM_NULL;
+        }
+    }
+
+    /*
+     * Treble Enhancement
+     */
+
+    /*
+     * Free the default EQNB pre-gain and pointer to the band definitions
+     */
+    if (pInstance->pEQNB_BandDefs != LVM_NULL) {
+        free(pInstance->pEQNB_BandDefs);
+        pInstance->pEQNB_BandDefs = LVM_NULL;
+    }
+    if (pInstance->pEQNB_UserDefs != LVM_NULL) {
+        free(pInstance->pEQNB_UserDefs);
+        pInstance->pEQNB_UserDefs = LVM_NULL;
+    }
+
+    /*
+     * De-initialise the Concert Sound module
+     */
+    if (pInstance->hCSInstance != LVM_NULL) {
+        LVCS_DeInit(&pInstance->hCSInstance);
+    }
+
+    /*
+     * De-initialise the Bass Enhancement module
+     */
+    if (pInstance->hDBEInstance != LVM_NULL) {
+        LVDBE_DeInit(&pInstance->hDBEInstance);
+    }
+
+    /*
+     * De-initialise the N-Band Equaliser module
+     */
+    if (pInstance->hEQNBInstance != LVM_NULL) {
+        LVEQNB_DeInit(&pInstance->hEQNBInstance);
+    }
+
+    /*
+     * Free Headroom management memory.
+     */
+    if (pInstance->pHeadroom_BandDefs != LVM_NULL) {
+        free(pInstance->pHeadroom_BandDefs);
+        pInstance->pHeadroom_BandDefs = LVM_NULL;
+    }
+    if (pInstance->pHeadroom_UserDefs != LVM_NULL) {
+        free(pInstance->pHeadroom_UserDefs);
+        pInstance->pHeadroom_UserDefs = LVM_NULL;
+    }
+
+    /*
+     * De-initialise the PSA module
+     */
+    if (pInstance->hPSAInstance != LVM_NULL) {
+        LVPSA_DeInit(&pInstance->hPSAInstance);
+    }
+    if (pInstance->pPSAInput != LVM_NULL) {
+        free(pInstance->pPSAInput);
+        pInstance->pPSAInput = LVM_NULL;
+    }
+
+    delete pInstance;
+    return;
 }
 
 /****************************************************************************************/
@@ -1027,48 +562,36 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVM_ReturnStatus_en LVM_ClearAudioBuffers(LVM_Handle_t  hInstance)
-{
-    LVM_MemTab_t            MemTab;                                     /* Memory table */
-    LVM_InstParams_t        InstParams;                                 /* Instance parameters */
-    LVM_ControlParams_t     Params;                                     /* Control Parameters */
-    LVM_Instance_t          *pInstance  = (LVM_Instance_t  *)hInstance; /* Pointer to Instance */
-    LVM_HeadroomParams_t    HeadroomParams;
+LVM_ReturnStatus_en LVM_ClearAudioBuffers(LVM_Handle_t hInstance) {
+    LVM_InstParams_t InstParams;                            /* Instance parameters */
+    LVM_ControlParams_t Params;                             /* Control Parameters */
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance; /* Pointer to Instance */
+    LVM_HeadroomParams_t HeadroomParams;
 
-    if(hInstance == LVM_NULL){
+    if (hInstance == LVM_NULL) {
         return LVM_NULLADDRESS;
     }
 
-    /* Save the control parameters */ /* coverity[unchecked_value] */ /* Do not check return value internal function calls */
+    /* Save the control parameters */ /* coverity[unchecked_value] */ /* Do not check return value
+                                                                         internal function calls */
     LVM_GetControlParameters(hInstance, &Params);
 
     /*Save the headroom parameters*/
     LVM_GetHeadroomParams(hInstance, &HeadroomParams);
 
-    /*  Retrieve allocated buffers in memtab */
-    LVM_GetMemoryTable(hInstance, &MemTab,  LVM_NULL);
-
     /*  Save the instance parameters */
     InstParams = pInstance->InstParams;
 
     /*  Call  LVM_GetInstanceHandle to re-initialise the bundle */
-    LVM_GetInstanceHandle( &hInstance,
-                           &MemTab,
-                           &InstParams);
-
-    /* Restore control parameters */ /* coverity[unchecked_value] */ /* Do not check return value internal function calls */
+    /* Restore control parameters */ /* coverity[unchecked_value] */ /* Do not check return value
+                                                                        internal function calls */
     LVM_SetControlParameters(hInstance, &Params);
 
     /*Restore the headroom parameters*/
     LVM_SetHeadroomParams(hInstance, &HeadroomParams);
 
     /* DC removal filter */
-#ifdef SUPPORT_MC
     DC_Mc_D16_TRC_WRA_01_Init(&pInstance->DC_RemovalInstance);
-#else
-    DC_2I_D16_TRC_WRA_01_Init(&pInstance->DC_RemovalInstance);
-#endif
 
     return LVM_SUCCESS;
 }
-
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Private.h b/media/libeffects/lvm/lib/Bundle/src/LVM_Private.h
index ddaac99..63c83c0 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Private.h
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Private.h
@@ -33,14 +33,15 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#include "LVM.h"                                /* LifeVibes */
-#include "LVM_Common.h"                         /* LifeVibes common */
-#include "BIQUAD.h"                             /* Biquad library */
-#include "LVC_Mixer.h"                          /* Mixer library */
-#include "LVCS_Private.h"                       /* Concert Sound */
-#include "LVDBE_Private.h"                      /* Dynamic Bass Enhancement */
-#include "LVEQNB_Private.h"                     /* N-Band equaliser */
-#include "LVPSA_Private.h"                      /* Parametric Spectrum Analyzer */
+#include <audio_utils/BiquadFilter.h>
+#include "LVM.h"            /* LifeVibes */
+#include "LVM_Common.h"     /* LifeVibes common */
+#include "BIQUAD.h"         /* Biquad library */
+#include "LVC_Mixer.h"      /* Mixer library */
+#include "LVCS_Private.h"   /* Concert Sound */
+#include "LVDBE_Private.h"  /* Dynamic Bass Enhancement */
+#include "LVEQNB_Private.h" /* N-Band equaliser */
+#include "LVPSA_Private.h"  /* Parametric Spectrum Analyzer */
 
 /************************************************************************************/
 /*                                                                                  */
@@ -49,63 +50,64 @@
 /************************************************************************************/
 
 /* General */
-#define LVM_INVALID                     0xFFFF    /* Invalid init parameter */
+#define LVM_INVALID 0xFFFF /* Invalid init parameter */
 
 /* Memory */
-#define LVM_INSTANCE_ALIGN              4         /* 32-bit for structures */
-#define LVM_FIRSTCALL                   0         /* First call to the buffer */
-#define LVM_MAXBLOCKCALL                1         /* Maximum block size calls to the buffer */
-#define LVM_LASTCALL                    2         /* Last call to the buffer */
-#define LVM_FIRSTLASTCALL               3         /* Single call for small number of samples */
+#define LVM_INSTANCE_ALIGN 4 /* 32-bit for structures */
+#define LVM_FIRSTCALL 0      /* First call to the buffer */
+#define LVM_MAXBLOCKCALL 1   /* Maximum block size calls to the buffer */
+#define LVM_LASTCALL 2       /* Last call to the buffer */
+#define LVM_FIRSTLASTCALL 3  /* Single call for small number of samples */
 
 /* Block Size */
-#define LVM_MIN_MAXBLOCKSIZE            16        /* Minimum MaxBlockSize Limit*/
-#define LVM_MANAGED_MAX_MAXBLOCKSIZE    8191      /* Maximum MaxBlockSzie Limit for Managed Buffer Mode*/
-#define LVM_UNMANAGED_MAX_MAXBLOCKSIZE  4096      /* Maximum MaxBlockSzie Limit for Unmanaged Buffer Mode */
+#define LVM_MIN_MAXBLOCKSIZE 16           /* Minimum MaxBlockSize Limit*/
+#define LVM_MANAGED_MAX_MAXBLOCKSIZE 8191 /* Maximum MaxBlockSzie Limit for Managed Buffer Mode*/
+#define LVM_UNMANAGED_MAX_MAXBLOCKSIZE \
+    4096 /* Maximum MaxBlockSzie Limit for Unmanaged Buffer Mode */
 
-#define MAX_INTERNAL_BLOCKSIZE          8128      /* Maximum multiple of 64  below 8191*/
+#define MAX_INTERNAL_BLOCKSIZE 8128 /* Maximum multiple of 64  below 8191*/
 
-#define MIN_INTERNAL_BLOCKSIZE          16        /* Minimum internal block size */
-#define MIN_INTERNAL_BLOCKSHIFT         4         /* Minimum internal block size as a power of 2 */
-#define MIN_INTERNAL_BLOCKMASK          0xFFF0    /* Minimum internal block size mask */
+#define MIN_INTERNAL_BLOCKSIZE 16     /* Minimum internal block size */
+#define MIN_INTERNAL_BLOCKSHIFT 4     /* Minimum internal block size as a power of 2 */
+#define MIN_INTERNAL_BLOCKMASK 0xFFF0 /* Minimum internal block size mask */
 
-#define LVM_PSA_DYNAMICRANGE            60        /* Spectral Dynamic range: used for offseting output*/
-#define LVM_PSA_BARHEIGHT               127       /* Spectral Bar Height*/
+#define LVM_PSA_DYNAMICRANGE 60 /* Spectral Dynamic range: used for offseting output*/
+#define LVM_PSA_BARHEIGHT 127   /* Spectral Bar Height*/
 
-#define LVM_TE_MIN_EFFECTLEVEL          0         /*TE Minimum EffectLevel*/
-#define LVM_TE_MAX_EFFECTLEVEL          15        /*TE Maximum Effect level*/
+#define LVM_TE_MIN_EFFECTLEVEL 0  /*TE Minimum EffectLevel*/
+#define LVM_TE_MAX_EFFECTLEVEL 15 /*TE Maximum Effect level*/
 
-#define LVM_VC_MIN_EFFECTLEVEL          (-96)     /*VC Minimum EffectLevel*/
-#define LVM_VC_MAX_EFFECTLEVEL          0         /*VC Maximum Effect level*/
+#define LVM_VC_MIN_EFFECTLEVEL (-96) /*VC Minimum EffectLevel*/
+#define LVM_VC_MAX_EFFECTLEVEL 0     /*VC Maximum Effect level*/
 
-#define LVM_BE_MIN_EFFECTLEVEL          0         /*BE Minimum EffectLevel*/
-#define LVM_BE_MAX_EFFECTLEVEL          15        /*BE Maximum Effect level*/
+#define LVM_BE_MIN_EFFECTLEVEL 0  /*BE Minimum EffectLevel*/
+#define LVM_BE_MAX_EFFECTLEVEL 15 /*BE Maximum Effect level*/
 
-#define LVM_EQNB_MIN_BAND_FREQ          20        /*EQNB Minimum Band Frequency*/
-#define LVM_EQNB_MAX_BAND_FREQ          24000     /*EQNB Maximum Band Frequency*/
-#define LVM_EQNB_MIN_BAND_GAIN          (-15)     /*EQNB Minimum Band Frequency*/
-#define LVM_EQNB_MAX_BAND_GAIN          15        /*EQNB Maximum Band Frequency*/
-#define LVM_EQNB_MIN_QFACTOR            25        /*EQNB Minimum Q Factor*/
-#define LVM_EQNB_MAX_QFACTOR            1200      /*EQNB Maximum Q Factor*/
-#define LVM_EQNB_MIN_LPF_FREQ           1000      /*EQNB Minimum Low Pass Corner frequency*/
-#define LVM_EQNB_MIN_HPF_FREQ           20        /*EQNB Minimum High Pass Corner frequency*/
-#define LVM_EQNB_MAX_HPF_FREQ           1000      /*EQNB Maximum High Pass Corner frequency*/
+#define LVM_EQNB_MIN_BAND_FREQ 20    /*EQNB Minimum Band Frequency*/
+#define LVM_EQNB_MAX_BAND_FREQ 24000 /*EQNB Maximum Band Frequency*/
+#define LVM_EQNB_MIN_BAND_GAIN (-15) /*EQNB Minimum Band Frequency*/
+#define LVM_EQNB_MAX_BAND_GAIN 15    /*EQNB Maximum Band Frequency*/
+#define LVM_EQNB_MIN_QFACTOR 25      /*EQNB Minimum Q Factor*/
+#define LVM_EQNB_MAX_QFACTOR 1200    /*EQNB Maximum Q Factor*/
+#define LVM_EQNB_MIN_LPF_FREQ 1000   /*EQNB Minimum Low Pass Corner frequency*/
+#define LVM_EQNB_MIN_HPF_FREQ 20     /*EQNB Minimum High Pass Corner frequency*/
+#define LVM_EQNB_MAX_HPF_FREQ 1000   /*EQNB Maximum High Pass Corner frequency*/
 
-#define LVM_CS_MIN_EFFECT_LEVEL         0         /*CS Minimum Effect Level*/
-#define LVM_CS_MAX_REVERB_LEVEL         100       /*CS Maximum Reverb Level*/
-#define LVM_VIRTUALIZER_MAX_REVERB_LEVEL 100      /*Vitrualizer Maximum Reverb Level*/
+#define LVM_CS_MIN_EFFECT_LEVEL 0            /*CS Minimum Effect Level*/
+#define LVM_CS_MAX_REVERB_LEVEL 100          /*CS Maximum Reverb Level*/
+#define LVM_VIRTUALIZER_MAX_REVERB_LEVEL 100 /*Vitrualizer Maximum Reverb Level*/
 
-#define LVM_VC_MIXER_TIME              100       /*VC mixer time*/
-#define LVM_VC_BALANCE_MAX             96        /*VC balance max value*/
-#define LVM_VC_BALANCE_MIN             (-96)     /*VC balance min value*/
+#define LVM_VC_MIXER_TIME 100    /*VC mixer time*/
+#define LVM_VC_BALANCE_MAX 96    /*VC balance max value*/
+#define LVM_VC_BALANCE_MIN (-96) /*VC balance min value*/
 
 /* Algorithm masks */
-#define LVM_CS_MASK                     1
-#define LVM_EQNB_MASK                   2
-#define LVM_DBE_MASK                    4
-#define LVM_VC_MASK                     16
-#define LVM_TE_MASK                     32
-#define LVM_PSA_MASK                    2048
+#define LVM_CS_MASK 1
+#define LVM_EQNB_MASK 2
+#define LVM_DBE_MASK 4
+#define LVM_VC_MASK 16
+#define LVM_TE_MASK 32
+#define LVM_PSA_MASK 2048
 
 /************************************************************************************/
 /*                                                                                  */
@@ -113,133 +115,93 @@
 /*                                                                                  */
 /************************************************************************************/
 
-/* Memory region definition */
-typedef struct
-{
-    LVM_UINT32              Size;               /* Region size in bytes */
-    LVM_UINT16              Alignment;          /* Byte alignment */
-    LVM_MemoryTypes_en      Type;               /* Region type */
-    void                    *pBaseAddress;      /* Pointer to the region base address */
-} LVM_IntMemoryRegion_t;
-
-/* Memory table containing the region definitions */
-typedef struct
-{
-    LVM_IntMemoryRegion_t   Region[LVM_NR_MEMORY_REGIONS];  /* One definition for each region */
-} LVM_IntMemTab_t;
-
 /* Buffer Management */
-typedef struct
-{
-    LVM_FLOAT               *pScratch;          /* Bundle scratch buffer */
+typedef struct {
+    LVM_FLOAT* pScratch; /* Bundle scratch buffer */
 
-    LVM_INT16               BufferState;        /* Buffer status */
-#ifdef SUPPORT_MC
-    LVM_FLOAT               InDelayBuffer[3 * LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE];
-#else
-    LVM_FLOAT               InDelayBuffer[6 * MIN_INTERNAL_BLOCKSIZE]; /* Input buffer delay line, \
-                                                                           left and right */
-#endif
-    LVM_INT16               InDelaySamples;     /* Number of samples in the input delay buffer */
-#ifdef SUPPORT_MC
-    LVM_FLOAT               OutDelayBuffer[LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE];
-#else
-    LVM_FLOAT               OutDelayBuffer[2 * MIN_INTERNAL_BLOCKSIZE]; /* Output buffer delay \
-                                                                                      line */
-#endif
-    LVM_INT16               OutDelaySamples;    /* Number of samples in the output delay buffer, \
-                                                                             left and right */
-    LVM_INT16               SamplesToOutput;    /* Samples to write to the output */
+    LVM_INT16 BufferState; /* Buffer status */
+    LVM_FLOAT InDelayBuffer[3 * LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE];
+    LVM_INT16 InDelaySamples; /* Number of samples in the input delay buffer */
+    LVM_FLOAT OutDelayBuffer[LVM_MAX_CHANNELS * MIN_INTERNAL_BLOCKSIZE];
+    LVM_INT16 OutDelaySamples; /* Number of samples in the output delay buffer, \
+                                                            left and right */
+    LVM_INT16 SamplesToOutput; /* Samples to write to the output */
 } LVM_Buffer_t;
 
-/* Filter taps */
-typedef struct
-{
-    Biquad_2I_Order1_FLOAT_Taps_t TrebleBoost_Taps;   /* Treble boost Taps */
-} LVM_TE_Data_t;
 
-/* Coefficients */
-typedef struct
-{
-    Biquad_FLOAT_Instance_t       TrebleBoost_State;  /* State for the treble boost filter */
-} LVM_TE_Coefs_t;
-
-typedef struct
-{
+typedef struct {
     /* Public parameters */
-    LVM_MemTab_t            MemoryTable;        /* Instance memory allocation table */
-    LVM_ControlParams_t     Params;             /* Control parameters */
-    LVM_InstParams_t        InstParams;         /* Instance parameters */
+    LVM_ControlParams_t Params;  /* Control parameters */
+    LVM_InstParams_t InstParams; /* Instance parameters */
 
     /* Private parameters */
-    LVM_UINT16              ControlPending;     /* Control flag to indicate update pending */
-    LVM_ControlParams_t     NewParams;          /* New control parameters pending update */
+    LVM_UINT16 ControlPending;     /* Control flag to indicate update pending */
+    LVM_ControlParams_t NewParams; /* New control parameters pending update */
 
     /* Buffer control */
-    LVM_INT16               InternalBlockSize;  /* Maximum internal block size */
-    LVM_Buffer_t            *pBufferManagement; /* Buffer management variables */
-    LVM_INT16               SamplesToProcess;   /* Input samples left to process */
-    LVM_FLOAT               *pInputSamples;     /* External input sample pointer */
-    LVM_FLOAT               *pOutputSamples;    /* External output sample pointer */
+    LVM_INT16 InternalBlockSize;     /* Maximum internal block size */
+    LVM_Buffer_t* pBufferManagement; /* Buffer management variables */
+    LVM_INT16 SamplesToProcess;      /* Input samples left to process */
+    LVM_FLOAT* pInputSamples;        /* External input sample pointer */
+    LVM_FLOAT* pOutputSamples;       /* External output sample pointer */
 
     /* Configuration number */
-    LVM_INT32               ConfigurationNumber;
-    LVM_INT32               BlickSizeMultiple;
+    LVM_INT32 ConfigurationNumber;
+    LVM_INT32 BlickSizeMultiple;
 
     /* DC removal */
-    Biquad_FLOAT_Instance_t       DC_RemovalInstance; /* DC removal filter instance */
+    Biquad_FLOAT_Instance_t DC_RemovalInstance; /* DC removal filter instance */
 
     /* Concert Sound */
-    LVCS_Handle_t           hCSInstance;        /* Concert Sound instance handle */
-    LVCS_Instance_t         CS_Instance;        /* Concert Sound instance */
-    LVM_INT16               CS_Active;          /* Control flag */
+    LVCS_Handle_t hCSInstance;   /* Concert Sound instance handle */
+    LVCS_Instance_t CS_Instance; /* Concert Sound instance */
+    LVM_INT16 CS_Active;         /* Control flag */
 
     /* Equalizer */
-    LVEQNB_Handle_t         hEQNBInstance;      /* N-Band Equaliser instance handle */
-    LVEQNB_Instance_t       EQNB_Instance;      /* N-Band Equaliser instance */
-    LVM_EQNB_BandDef_t      *pEQNB_BandDefs;    /* Local storage for new definitions */
-    LVM_EQNB_BandDef_t      *pEQNB_UserDefs;    /* Local storage for the user's definitions */
-    LVM_INT16               EQNB_Active;        /* Control flag */
+    LVEQNB_Handle_t hEQNBInstance;      /* N-Band Equaliser instance handle */
+    LVEQNB_Instance_t EQNB_Instance;    /* N-Band Equaliser instance */
+    LVM_EQNB_BandDef_t* pEQNB_BandDefs; /* Local storage for new definitions */
+    LVM_EQNB_BandDef_t* pEQNB_UserDefs; /* Local storage for the user's definitions */
+    LVM_INT16 EQNB_Active;              /* Control flag */
 
     /* Dynamic Bass Enhancement */
-    LVDBE_Handle_t          hDBEInstance;       /* Dynamic Bass Enhancement instance handle */
-    LVDBE_Instance_t        DBE_Instance;       /* Dynamic Bass Enhancement instance */
-    LVM_INT16               DBE_Active;         /* Control flag */
+    LVDBE_Handle_t hDBEInstance;   /* Dynamic Bass Enhancement instance handle */
+    LVDBE_Instance_t DBE_Instance; /* Dynamic Bass Enhancement instance */
+    LVM_INT16 DBE_Active;          /* Control flag */
 
     /* Volume Control */
-    LVMixer3_1St_FLOAT_st   VC_Volume;          /* Volume scaler */
-    LVMixer3_2St_FLOAT_st         VC_BalanceMix;      /* VC balance mixer */
-    LVM_INT16               VC_VolumedB;        /* Gain in dB */
-    LVM_INT16               VC_Active;          /* Control flag */
-    LVM_INT16               VC_AVLFixedVolume;  /* AVL fixed volume */
+    LVMixer3_1St_FLOAT_st VC_Volume;     /* Volume scaler */
+    LVMixer3_2St_FLOAT_st VC_BalanceMix; /* VC balance mixer */
+    LVM_INT16 VC_VolumedB;               /* Gain in dB */
+    LVM_INT16 VC_Active;                 /* Control flag */
+    LVM_INT16 VC_AVLFixedVolume;         /* AVL fixed volume */
 
     /* Treble Enhancement */
-    LVM_TE_Data_t           *pTE_Taps;          /* Treble boost Taps */
-    LVM_TE_Coefs_t          *pTE_State;         /* State for the treble boost filter */
-    LVM_INT16               TE_Active;          /* Control flag */
+    std::unique_ptr<android::audio_utils::BiquadFilter<LVM_FLOAT>>
+            pTEBiquad; /* Biquad filter instance */
+    LVM_INT16 TE_Active;       /* Control flag */
 
     /* Headroom */
-    LVM_HeadroomParams_t    NewHeadroomParams;   /* New headroom parameters pending update */
-    LVM_HeadroomParams_t    HeadroomParams;      /* Headroom parameters */
-    LVM_HeadroomBandDef_t   *pHeadroom_BandDefs; /* Local storage for new definitions */
-    LVM_HeadroomBandDef_t   *pHeadroom_UserDefs; /* Local storage for the user's definitions */
-    LVM_UINT16              Headroom;            /* Value of the current headroom */
+    LVM_HeadroomParams_t NewHeadroomParams;    /* New headroom parameters pending update */
+    LVM_HeadroomParams_t HeadroomParams;       /* Headroom parameters */
+    LVM_HeadroomBandDef_t* pHeadroom_BandDefs; /* Local storage for new definitions */
+    LVM_HeadroomBandDef_t* pHeadroom_UserDefs; /* Local storage for the user's definitions */
+    LVM_UINT16 Headroom;                       /* Value of the current headroom */
 
     /* Spectrum Analyzer */
-    pLVPSA_Handle_t         hPSAInstance;       /* Spectrum Analyzer instance handle */
-    LVPSA_InstancePr_t      PSA_Instance;       /* Spectrum Analyzer instance */
-    LVPSA_InitParams_t      PSA_InitParams;     /* Spectrum Analyzer initialization parameters */
-    LVPSA_ControlParams_t   PSA_ControlParams;  /* Spectrum Analyzer control parameters */
-    LVM_INT16               PSA_GainOffset;     /* Tone control flag */
-    LVM_Callback            CallBack;
-    LVM_FLOAT               *pPSAInput;         /* PSA input pointer */
+    pLVPSA_Handle_t hPSAInstance;            /* Spectrum Analyzer instance handle */
+    LVPSA_InstancePr_t PSA_Instance;         /* Spectrum Analyzer instance */
+    LVPSA_InitParams_t PSA_InitParams;       /* Spectrum Analyzer initialization parameters */
+    LVPSA_ControlParams_t PSA_ControlParams; /* Spectrum Analyzer control parameters */
+    LVM_INT16 PSA_GainOffset;                /* Tone control flag */
+    LVM_Callback CallBack;
+    LVM_FLOAT* pPSAInput; /* PSA input pointer */
 
-    LVM_INT16              NoSmoothVolume;      /* Enable or disable smooth volume changes*/
+    LVM_INT16 NoSmoothVolume; /* Enable or disable smooth volume changes*/
 
-#ifdef SUPPORT_MC
-    LVM_INT16              NrChannels;
-    LVM_INT32              ChMask;
-#endif
+    LVM_INT16 NrChannels;
+    LVM_INT32 ChMask;
+    void* pScratch; /* Pointer to bundle scratch buffer*/
 
 } LVM_Instance_t;
 
@@ -249,32 +211,19 @@
 /*                                                                                  */
 /************************************************************************************/
 
-LVM_ReturnStatus_en LVM_ApplyNewSettings(LVM_Handle_t       hInstance);
+LVM_ReturnStatus_en LVM_ApplyNewSettings(LVM_Handle_t hInstance);
 
-void    LVM_SetTrebleBoost( LVM_Instance_t         *pInstance,
-                            LVM_ControlParams_t    *pParams);
+void LVM_SetTrebleBoost(LVM_Instance_t* pInstance, LVM_ControlParams_t* pParams);
 
-void    LVM_SetVolume(  LVM_Instance_t         *pInstance,
-                        LVM_ControlParams_t    *pParams);
+void LVM_SetVolume(LVM_Instance_t* pInstance, LVM_ControlParams_t* pParams);
 
-LVM_INT32    LVM_VCCallBack(void*   pBundleHandle,
-                            void*   pGeneralPurpose,
-                            short   CallBackParam);
+LVM_INT32 LVM_VCCallBack(void* pBundleHandle, void* pGeneralPurpose, short CallBackParam);
 
-void    LVM_SetHeadroom(    LVM_Instance_t         *pInstance,
-                            LVM_ControlParams_t    *pParams);
-void    LVM_BufferIn(   LVM_Handle_t      hInstance,
-                        const LVM_FLOAT   *pInData,
-                        LVM_FLOAT         **pToProcess,
-                        LVM_FLOAT         **pProcessed,
-                        LVM_UINT16        *pNumSamples);
-void    LVM_BufferOut(  LVM_Handle_t     hInstance,
-                        LVM_FLOAT        *pOutData,
-                        LVM_UINT16       *pNumSamples);
+void LVM_SetHeadroom(LVM_Instance_t* pInstance, LVM_ControlParams_t* pParams);
+void LVM_BufferIn(LVM_Handle_t hInstance, const LVM_FLOAT* pInData, LVM_FLOAT** pToProcess,
+                  LVM_FLOAT** pProcessed, LVM_UINT16* pNumSamples);
+void LVM_BufferOut(LVM_Handle_t hInstance, LVM_FLOAT* pOutData, LVM_UINT16* pNumSamples);
 
-LVM_INT32 LVM_AlgoCallBack(     void          *pBundleHandle,
-                                void          *pData,
-                                LVM_INT16     callbackId);
+LVM_INT32 LVM_AlgoCallBack(void* pBundleHandle, void* pData, LVM_INT16 callbackId);
 
-#endif      /* __LVM_PRIVATE_H__ */
-
+#endif /* __LVM_PRIVATE_H__ */
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
index dc86cfd..4eea04f 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
@@ -23,6 +23,7 @@
 #include <system/audio.h>
 
 #include "LVM_Private.h"
+#include "ScalarArithmetic.h"
 #include "VectorArithmetic.h"
 #include "LVM_Coeffs.h"
 
@@ -51,279 +52,166 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_ReturnStatus_en LVM_Process(LVM_Handle_t                hInstance,
-                                const LVM_FLOAT             *pInData,
-                                LVM_FLOAT                   *pOutData,
-                                LVM_UINT16                  NumSamples,
-                                LVM_UINT32                  AudioTime)
-{
-
-    LVM_Instance_t      *pInstance  = (LVM_Instance_t  *)hInstance;
-    LVM_UINT16          SampleCount = NumSamples;
-    LVM_FLOAT           *pInput     = (LVM_FLOAT *)pInData;
-    LVM_FLOAT           *pToProcess = (LVM_FLOAT *)pInData;
-    LVM_FLOAT           *pProcessed = pOutData;
-    LVM_ReturnStatus_en  Status;
-#ifdef SUPPORT_MC
-    LVM_INT32           NrChannels  = pInstance->NrChannels;
-    LVM_INT32           ChMask      = pInstance->ChMask;
+LVM_ReturnStatus_en LVM_Process(LVM_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                LVM_FLOAT* pOutData, LVM_UINT16 NumSamples, LVM_UINT32 AudioTime) {
+    LVM_Instance_t* pInstance = (LVM_Instance_t*)hInstance;
+    LVM_UINT16 SampleCount = NumSamples;
+    LVM_FLOAT* pInput = (LVM_FLOAT*)pInData;
+    LVM_FLOAT* pToProcess = (LVM_FLOAT*)pInData;
+    LVM_FLOAT* pProcessed = pOutData;
+    LVM_ReturnStatus_en Status;
+    LVM_INT32 NrChannels = pInstance->NrChannels;
+    LVM_INT32 ChMask = pInstance->ChMask;
 #define NrFrames SampleCount  // alias for clarity
-#endif
 
     /*
      * Check if the number of samples is zero
      */
-    if (NumSamples == 0)
-    {
-        return(LVM_SUCCESS);
+    if (NumSamples == 0) {
+        return (LVM_SUCCESS);
     }
 
     /*
      * Check valid points have been given
      */
-    if ((hInstance == LVM_NULL) || (pInData == LVM_NULL) || (pOutData == LVM_NULL))
-    {
+    if ((hInstance == LVM_NULL) || (pInData == LVM_NULL) || (pOutData == LVM_NULL)) {
         return (LVM_NULLADDRESS);
     }
 
     /*
      * For unmanaged mode only
      */
-    if(pInstance->InstParams.BufferMode == LVM_UNMANAGED_BUFFERS)
-    {
-         /*
+    if (pInstance->InstParams.BufferMode == LVM_UNMANAGED_BUFFERS) {
+        /*
          * Check if the number of samples is a good multiple (unmanaged mode only)
          */
-        if((NumSamples % pInstance->BlickSizeMultiple) != 0)
-        {
-            return(LVM_INVALIDNUMSAMPLES);
+        if ((NumSamples % pInstance->BlickSizeMultiple) != 0) {
+            return (LVM_INVALIDNUMSAMPLES);
         }
 
         /*
          * Check the buffer alignment
          */
-        if((((uintptr_t)pInData % 4) != 0) || (((uintptr_t)pOutData % 4) != 0))
-        {
-            return(LVM_ALIGNMENTERROR);
+        if ((((uintptr_t)pInData % 4) != 0) || (((uintptr_t)pOutData % 4) != 0)) {
+            return (LVM_ALIGNMENTERROR);
         }
     }
 
     /*
      * Update new parameters if necessary
      */
-    if (pInstance->ControlPending == LVM_TRUE)
-    {
+    if (pInstance->ControlPending == LVM_TRUE) {
         Status = LVM_ApplyNewSettings(hInstance);
-#ifdef SUPPORT_MC
         /* Update the local variable NrChannels from pInstance->NrChannels value */
         NrChannels = pInstance->NrChannels;
-        ChMask     = pInstance->ChMask;
-#endif
+        ChMask = pInstance->ChMask;
 
-        if(Status != LVM_SUCCESS)
-        {
+        if (Status != LVM_SUCCESS) {
             return Status;
         }
     }
 
     /*
-     * Convert from Mono if necessary
-     */
-    if (pInstance->Params.SourceFormat == LVM_MONO)
-    {
-        MonoTo2I_Float(pInData,                                /* Source */
-                       pOutData,                               /* Destination */
-                       (LVM_INT16)NumSamples);                 /* Number of input samples */
-        pInput     = pOutData;
-        pToProcess = pOutData;
-#ifdef SUPPORT_MC
-        NrChannels = 2;
-        ChMask     = AUDIO_CHANNEL_OUT_STEREO;
-#endif
-    }
-
-    /*
      * Process the data with managed buffers
      */
-    while (SampleCount != 0)
-    {
+    while (SampleCount != 0) {
         /*
          * Manage the input buffer and frame processing
          */
-        LVM_BufferIn(hInstance,
-                     pInput,
-                     &pToProcess,
-                     &pProcessed,
-                     &SampleCount);
+        LVM_BufferIn(hInstance, pInput, &pToProcess, &pProcessed, &SampleCount);
 
         /*
          * Only process data when SampleCount is none zero, a zero count can occur when
          * the BufferIn routine is working in managed mode.
          */
-        if (SampleCount != 0)
-        {
+        if (SampleCount != 0) {
             /*
              * Apply ConcertSound if required
              */
-            if (pInstance->CS_Active == LVM_TRUE)
-            {
-                (void)LVCS_Process(pInstance->hCSInstance,     /* Concert Sound instance handle */
-                                   pToProcess,
-                                   pProcessed,
-                                   SampleCount);
+            if (pInstance->CS_Active == LVM_TRUE) {
+                (void)LVCS_Process(pInstance->hCSInstance, /* Concert Sound instance handle */
+                                   pToProcess, pProcessed, SampleCount);
                 pToProcess = pProcessed;
             }
 
             /*
              * Apply volume if required
              */
-            if (pInstance->VC_Active!=0)
-            {
-#ifdef SUPPORT_MC
-                LVC_MixSoft_Mc_D16C31_SAT(&pInstance->VC_Volume,
-                                       pToProcess,
-                                       pProcessed,
-                                       (LVM_INT16)(NrFrames),
-                                       NrChannels);
-#else
-                LVC_MixSoft_1St_D16C31_SAT(&pInstance->VC_Volume,
-                                       pToProcess,
-                                       pProcessed,
-                                       (LVM_INT16)(2 * SampleCount));     /* Left and right*/
-#endif
+            if (pInstance->VC_Active != 0) {
+                LVC_MixSoft_Mc_D16C31_SAT(&pInstance->VC_Volume, pToProcess, pProcessed,
+                                          (LVM_INT16)(NrFrames), NrChannels);
                 pToProcess = pProcessed;
             }
 
             /*
              * Call N-Band equaliser if enabled
              */
-            if (pInstance->EQNB_Active == LVM_TRUE)
-            {
-                LVEQNB_Process(pInstance->hEQNBInstance,    /* N-Band equaliser instance handle */
-                               pToProcess,
-                               pProcessed,
-                               SampleCount);
+            if (pInstance->EQNB_Active == LVM_TRUE) {
+                LVEQNB_Process(pInstance->hEQNBInstance, /* N-Band equaliser instance handle */
+                               pToProcess, pProcessed, SampleCount);
                 pToProcess = pProcessed;
             }
 
             /*
              * Call bass enhancement if enabled
              */
-            if (pInstance->DBE_Active == LVM_TRUE)
-            {
-                LVDBE_Process(pInstance->hDBEInstance,       /* Dynamic Bass Enhancement \
-                                                                instance handle */
-                              pToProcess,
-                              pProcessed,
-                              SampleCount);
+            if (pInstance->DBE_Active == LVM_TRUE) {
+                LVDBE_Process(pInstance->hDBEInstance, /* Dynamic Bass Enhancement \
+                                                          instance handle */
+                              pToProcess, pProcessed, SampleCount);
                 pToProcess = pProcessed;
             }
 
             /*
              * Bypass mode or everything off, so copy the input to the output
              */
-            if (pToProcess != pProcessed)
-            {
-#ifdef SUPPORT_MC
-                Copy_Float(pToProcess,                             /* Source */
-                           pProcessed,                             /* Destination */
-                           (LVM_INT16)(NrChannels * NrFrames));    /* Copy all samples */
-#else
-                Copy_Float(pToProcess,                             /* Source */
-                           pProcessed,                             /* Destination */
-                           (LVM_INT16)(2 * SampleCount));          /* Left and right */
-#endif
+            if (pToProcess != pProcessed) {
+                Copy_Float(pToProcess,                          /* Source */
+                           pProcessed,                          /* Destination */
+                           (LVM_INT16)(NrChannels * NrFrames)); /* Copy all samples */
             }
 
             /*
              * Apply treble boost if required
              */
-            if (pInstance->TE_Active == LVM_TRUE)
-            {
+            if (pInstance->TE_Active == LVM_TRUE) {
                 /*
                  * Apply the filter
                  */
-#ifdef SUPPORT_MC
-                FO_Mc_D16F32C15_LShx_TRC_WRA_01(&pInstance->pTE_State->TrebleBoost_State,
-                                           pProcessed,
-                                           pProcessed,
-                                           (LVM_INT16)NrFrames,
-                                           (LVM_INT16)NrChannels);
-#else
-                FO_2I_D16F32C15_LShx_TRC_WRA_01(&pInstance->pTE_State->TrebleBoost_State,
-                                           pProcessed,
-                                           pProcessed,
-                                           (LVM_INT16)SampleCount);
-#endif
-
+                pInstance->pTEBiquad->process(pProcessed, pProcessed, NrFrames);
+                for (auto i = 0; i < NrChannels * NrFrames; i++) {
+                    pProcessed[i] = LVM_Clamp(pProcessed[i]);
+                }
             }
-#ifdef SUPPORT_MC
             /*
              * Volume balance
              */
-            LVC_MixSoft_1St_MC_float_SAT(&pInstance->VC_BalanceMix,
-                                          pProcessed,
-                                          pProcessed,
-                                          NrFrames,
-                                          NrChannels,
-                                          ChMask);
-#else
-            /*
-             * Volume balance
-             */
-            LVC_MixSoft_1St_2i_D16C31_SAT(&pInstance->VC_BalanceMix,
-                                          pProcessed,
-                                          pProcessed,
-                                          SampleCount);
-#endif
+            LVC_MixSoft_1St_MC_float_SAT(&pInstance->VC_BalanceMix, pProcessed, pProcessed,
+                                         NrFrames, NrChannels, ChMask);
 
             /*
              * Perform Parametric Spectum Analysis
              */
             if ((pInstance->Params.PSA_Enable == LVM_PSA_ON) &&
-                                            (pInstance->InstParams.PSA_Included == LVM_PSA_ON))
-            {
-#ifdef SUPPORT_MC
-                FromMcToMono_Float(pProcessed,
-                                   pInstance->pPSAInput,
-                                   (LVM_INT16)(NrFrames),
+                (pInstance->InstParams.PSA_Included == LVM_PSA_ON)) {
+                FromMcToMono_Float(pProcessed, pInstance->pPSAInput, (LVM_INT16)(NrFrames),
                                    NrChannels);
-#else
-                From2iToMono_Float(pProcessed,
-                                   pInstance->pPSAInput,
-                                   (LVM_INT16)(SampleCount));
-#endif
 
-                LVPSA_Process(pInstance->hPSAInstance,
-                        pInstance->pPSAInput,
-                        (LVM_UINT16)(SampleCount),
-                        AudioTime);
+                LVPSA_Process(pInstance->hPSAInstance, pInstance->pPSAInput,
+                              (LVM_UINT16)(SampleCount), AudioTime);
             }
 
             /*
              * DC removal
              */
-#ifdef SUPPORT_MC
-            DC_Mc_D16_TRC_WRA_01(&pInstance->DC_RemovalInstance,
-                                 pProcessed,
-                                 pProcessed,
-                                 (LVM_INT16)NrFrames,
-                                 NrChannels);
-#else
-            DC_2I_D16_TRC_WRA_01(&pInstance->DC_RemovalInstance,
-                                 pProcessed,
-                                 pProcessed,
-                                 (LVM_INT16)SampleCount);
-#endif
+            DC_Mc_D16_TRC_WRA_01(&pInstance->DC_RemovalInstance, pProcessed, pProcessed,
+                                 (LVM_INT16)NrFrames, NrChannels);
         }
         /*
          * Manage the output buffer
          */
-        LVM_BufferOut(hInstance,
-                      pOutData,
-                      &SampleCount);
-
+        LVM_BufferOut(hInstance, pOutData, &SampleCount);
     }
 
-    return(LVM_SUCCESS);
+    return (LVM_SUCCESS);
 }
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.cpp
index 66392e2..860196b 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.cpp
@@ -30,431 +30,297 @@
 /*                                                                                  */
 /************************************************************************************/
 
-FO_FLOAT_LShx_Coefs_t    LVM_TrebleBoostCoefs[] = {
+FO_FLOAT_LShx_Coefs_t LVM_TrebleBoostCoefs[] = {
 
-                    /* 22kHz sampling rate */
-                    {HPF_Fs22050_Gain1_A1,             /* Gain setting 1 */
-                     HPF_Fs22050_Gain1_A0,
-                     -HPF_Fs22050_Gain1_B1},
-                    {HPF_Fs22050_Gain2_A1,             /* Gain setting 2 */
-                     HPF_Fs22050_Gain2_A0,
-                     -HPF_Fs22050_Gain2_B1},
-                    {HPF_Fs22050_Gain3_A1,             /* Gain setting 3 */
-                     HPF_Fs22050_Gain3_A0,
-                     -HPF_Fs22050_Gain3_B1},
-                    {HPF_Fs22050_Gain4_A1,             /* Gain setting 4 */
-                     HPF_Fs22050_Gain4_A0,
-                     -HPF_Fs22050_Gain4_B1},
-                    {HPF_Fs22050_Gain5_A1,             /* Gain setting 5 */
-                     HPF_Fs22050_Gain5_A0,
-                     -HPF_Fs22050_Gain5_B1},
-                    {HPF_Fs22050_Gain6_A1,             /* Gain setting 6 */
-                     HPF_Fs22050_Gain6_A0,
-                     -HPF_Fs22050_Gain6_B1},
-                    {HPF_Fs22050_Gain7_A1,             /* Gain setting 7 */
-                     HPF_Fs22050_Gain7_A0,
-                     -HPF_Fs22050_Gain7_B1},
-                    {HPF_Fs22050_Gain8_A1,             /* Gain setting 8 */
-                     HPF_Fs22050_Gain8_A0,
-                     -HPF_Fs22050_Gain8_B1},
-                    {HPF_Fs22050_Gain9_A1,             /* Gain setting 9 */
-                     HPF_Fs22050_Gain9_A0,
-                     -HPF_Fs22050_Gain9_B1},
-                    {HPF_Fs22050_Gain10_A1,             /* Gain setting 10 */
-                     HPF_Fs22050_Gain10_A0,
-                     -HPF_Fs22050_Gain10_B1},
-                    {HPF_Fs22050_Gain11_A1,             /* Gain setting 11 */
-                     HPF_Fs22050_Gain11_A0,
-                     -HPF_Fs22050_Gain11_B1},
-                    {HPF_Fs22050_Gain12_A1,             /* Gain setting 12 */
-                     HPF_Fs22050_Gain12_A0,
-                     -HPF_Fs22050_Gain12_B1},
-                    {HPF_Fs22050_Gain13_A1,             /* Gain setting 13 */
-                     HPF_Fs22050_Gain13_A0,
-                     -HPF_Fs22050_Gain13_B1},
-                    {HPF_Fs22050_Gain14_A1,             /* Gain setting 14 */
-                     HPF_Fs22050_Gain14_A0,
-                     -HPF_Fs22050_Gain14_B1},
-                    {HPF_Fs22050_Gain15_A1,             /* Gain setting 15 */
-                     HPF_Fs22050_Gain15_A0,
-                     -HPF_Fs22050_Gain15_B1},
+        /* 22kHz sampling rate */
+        {HPF_Fs22050_Gain1_A1, /* Gain setting 1 */
+         HPF_Fs22050_Gain1_A0, -HPF_Fs22050_Gain1_B1},
+        {HPF_Fs22050_Gain2_A1, /* Gain setting 2 */
+         HPF_Fs22050_Gain2_A0, -HPF_Fs22050_Gain2_B1},
+        {HPF_Fs22050_Gain3_A1, /* Gain setting 3 */
+         HPF_Fs22050_Gain3_A0, -HPF_Fs22050_Gain3_B1},
+        {HPF_Fs22050_Gain4_A1, /* Gain setting 4 */
+         HPF_Fs22050_Gain4_A0, -HPF_Fs22050_Gain4_B1},
+        {HPF_Fs22050_Gain5_A1, /* Gain setting 5 */
+         HPF_Fs22050_Gain5_A0, -HPF_Fs22050_Gain5_B1},
+        {HPF_Fs22050_Gain6_A1, /* Gain setting 6 */
+         HPF_Fs22050_Gain6_A0, -HPF_Fs22050_Gain6_B1},
+        {HPF_Fs22050_Gain7_A1, /* Gain setting 7 */
+         HPF_Fs22050_Gain7_A0, -HPF_Fs22050_Gain7_B1},
+        {HPF_Fs22050_Gain8_A1, /* Gain setting 8 */
+         HPF_Fs22050_Gain8_A0, -HPF_Fs22050_Gain8_B1},
+        {HPF_Fs22050_Gain9_A1, /* Gain setting 9 */
+         HPF_Fs22050_Gain9_A0, -HPF_Fs22050_Gain9_B1},
+        {HPF_Fs22050_Gain10_A1, /* Gain setting 10 */
+         HPF_Fs22050_Gain10_A0, -HPF_Fs22050_Gain10_B1},
+        {HPF_Fs22050_Gain11_A1, /* Gain setting 11 */
+         HPF_Fs22050_Gain11_A0, -HPF_Fs22050_Gain11_B1},
+        {HPF_Fs22050_Gain12_A1, /* Gain setting 12 */
+         HPF_Fs22050_Gain12_A0, -HPF_Fs22050_Gain12_B1},
+        {HPF_Fs22050_Gain13_A1, /* Gain setting 13 */
+         HPF_Fs22050_Gain13_A0, -HPF_Fs22050_Gain13_B1},
+        {HPF_Fs22050_Gain14_A1, /* Gain setting 14 */
+         HPF_Fs22050_Gain14_A0, -HPF_Fs22050_Gain14_B1},
+        {HPF_Fs22050_Gain15_A1, /* Gain setting 15 */
+         HPF_Fs22050_Gain15_A0, -HPF_Fs22050_Gain15_B1},
 
-                    /* 24kHz sampling rate */
-                    {HPF_Fs24000_Gain1_A1,             /* Gain setting 1 */
-                     HPF_Fs24000_Gain1_A0,
-                     -HPF_Fs24000_Gain1_B1},
-                    {HPF_Fs24000_Gain2_A1,             /* Gain setting 2 */
-                     HPF_Fs24000_Gain2_A0,
-                     -HPF_Fs24000_Gain2_B1},
-                    {HPF_Fs24000_Gain3_A1,             /* Gain setting 3 */
-                     HPF_Fs24000_Gain3_A0,
-                     -HPF_Fs24000_Gain3_B1},
-                    {HPF_Fs24000_Gain4_A1,             /* Gain setting 4 */
-                     HPF_Fs24000_Gain4_A0,
-                     -HPF_Fs24000_Gain4_B1},
-                    {HPF_Fs24000_Gain5_A1,             /* Gain setting 5 */
-                     HPF_Fs24000_Gain5_A0,
-                     -HPF_Fs24000_Gain5_B1},
-                    {HPF_Fs24000_Gain6_A1,             /* Gain setting 6 */
-                     HPF_Fs24000_Gain6_A0,
-                     -HPF_Fs24000_Gain6_B1},
-                    {HPF_Fs24000_Gain7_A1,             /* Gain setting 7 */
-                     HPF_Fs24000_Gain7_A0,
-                     -HPF_Fs24000_Gain7_B1},
-                    {HPF_Fs24000_Gain8_A1,             /* Gain setting 8 */
-                     HPF_Fs24000_Gain8_A0,
-                     -HPF_Fs24000_Gain8_B1},
-                    {HPF_Fs24000_Gain9_A1,             /* Gain setting 9 */
-                     HPF_Fs24000_Gain9_A0,
-                     -HPF_Fs24000_Gain9_B1},
-                    {HPF_Fs24000_Gain10_A1,             /* Gain setting 10 */
-                     HPF_Fs24000_Gain10_A0,
-                     -HPF_Fs24000_Gain10_B1},
-                    {HPF_Fs24000_Gain11_A1,             /* Gain setting 11 */
-                     HPF_Fs24000_Gain11_A0,
-                     -HPF_Fs24000_Gain11_B1},
-                    {HPF_Fs24000_Gain12_A1,             /* Gain setting 12 */
-                     HPF_Fs24000_Gain12_A0,
-                     -HPF_Fs24000_Gain12_B1},
-                    {HPF_Fs24000_Gain13_A1,             /* Gain setting 13 */
-                     HPF_Fs24000_Gain13_A0,
-                     -HPF_Fs24000_Gain13_B1},
-                    {HPF_Fs24000_Gain14_A1,             /* Gain setting 14 */
-                     HPF_Fs24000_Gain14_A0,
-                     -HPF_Fs24000_Gain14_B1},
-                    {HPF_Fs24000_Gain15_A1,             /* Gain setting 15 */
-                     HPF_Fs24000_Gain15_A0,
-                     -HPF_Fs24000_Gain15_B1},
+        /* 24kHz sampling rate */
+        {HPF_Fs24000_Gain1_A1, /* Gain setting 1 */
+         HPF_Fs24000_Gain1_A0, -HPF_Fs24000_Gain1_B1},
+        {HPF_Fs24000_Gain2_A1, /* Gain setting 2 */
+         HPF_Fs24000_Gain2_A0, -HPF_Fs24000_Gain2_B1},
+        {HPF_Fs24000_Gain3_A1, /* Gain setting 3 */
+         HPF_Fs24000_Gain3_A0, -HPF_Fs24000_Gain3_B1},
+        {HPF_Fs24000_Gain4_A1, /* Gain setting 4 */
+         HPF_Fs24000_Gain4_A0, -HPF_Fs24000_Gain4_B1},
+        {HPF_Fs24000_Gain5_A1, /* Gain setting 5 */
+         HPF_Fs24000_Gain5_A0, -HPF_Fs24000_Gain5_B1},
+        {HPF_Fs24000_Gain6_A1, /* Gain setting 6 */
+         HPF_Fs24000_Gain6_A0, -HPF_Fs24000_Gain6_B1},
+        {HPF_Fs24000_Gain7_A1, /* Gain setting 7 */
+         HPF_Fs24000_Gain7_A0, -HPF_Fs24000_Gain7_B1},
+        {HPF_Fs24000_Gain8_A1, /* Gain setting 8 */
+         HPF_Fs24000_Gain8_A0, -HPF_Fs24000_Gain8_B1},
+        {HPF_Fs24000_Gain9_A1, /* Gain setting 9 */
+         HPF_Fs24000_Gain9_A0, -HPF_Fs24000_Gain9_B1},
+        {HPF_Fs24000_Gain10_A1, /* Gain setting 10 */
+         HPF_Fs24000_Gain10_A0, -HPF_Fs24000_Gain10_B1},
+        {HPF_Fs24000_Gain11_A1, /* Gain setting 11 */
+         HPF_Fs24000_Gain11_A0, -HPF_Fs24000_Gain11_B1},
+        {HPF_Fs24000_Gain12_A1, /* Gain setting 12 */
+         HPF_Fs24000_Gain12_A0, -HPF_Fs24000_Gain12_B1},
+        {HPF_Fs24000_Gain13_A1, /* Gain setting 13 */
+         HPF_Fs24000_Gain13_A0, -HPF_Fs24000_Gain13_B1},
+        {HPF_Fs24000_Gain14_A1, /* Gain setting 14 */
+         HPF_Fs24000_Gain14_A0, -HPF_Fs24000_Gain14_B1},
+        {HPF_Fs24000_Gain15_A1, /* Gain setting 15 */
+         HPF_Fs24000_Gain15_A0, -HPF_Fs24000_Gain15_B1},
 
-                    /* 32kHz sampling rate */
-                    {HPF_Fs32000_Gain1_A1,             /* Gain setting 1 */
-                     HPF_Fs32000_Gain1_A0,
-                     -HPF_Fs32000_Gain1_B1},
-                    {HPF_Fs32000_Gain2_A1,             /* Gain setting 2 */
-                     HPF_Fs32000_Gain2_A0,
-                     -HPF_Fs32000_Gain2_B1},
-                    {HPF_Fs32000_Gain3_A1,             /* Gain setting 3 */
-                     HPF_Fs32000_Gain3_A0,
-                     -HPF_Fs32000_Gain3_B1},
-                    {HPF_Fs32000_Gain4_A1,             /* Gain setting 4 */
-                     HPF_Fs32000_Gain4_A0,
-                     -HPF_Fs32000_Gain4_B1},
-                    {HPF_Fs32000_Gain5_A1,             /* Gain setting 5 */
-                     HPF_Fs32000_Gain5_A0,
-                     -HPF_Fs32000_Gain5_B1},
-                    {HPF_Fs32000_Gain6_A1,             /* Gain setting 6 */
-                     HPF_Fs32000_Gain6_A0,
-                     -HPF_Fs32000_Gain6_B1},
-                    {HPF_Fs32000_Gain7_A1,             /* Gain setting 7 */
-                     HPF_Fs32000_Gain7_A0,
-                     -HPF_Fs32000_Gain7_B1},
-                    {HPF_Fs32000_Gain8_A1,             /* Gain setting 8 */
-                     HPF_Fs32000_Gain8_A0,
-                     -HPF_Fs32000_Gain8_B1},
-                    {HPF_Fs32000_Gain9_A1,             /* Gain setting 9 */
-                     HPF_Fs32000_Gain9_A0,
-                     -HPF_Fs32000_Gain9_B1},
-                    {HPF_Fs32000_Gain10_A1,             /* Gain setting 10 */
-                     HPF_Fs32000_Gain10_A0,
-                     -HPF_Fs32000_Gain10_B1},
-                    {HPF_Fs32000_Gain11_A1,             /* Gain setting 11 */
-                     HPF_Fs32000_Gain11_A0,
-                     -HPF_Fs32000_Gain11_B1},
-                    {HPF_Fs32000_Gain12_A1,             /* Gain setting 12 */
-                     HPF_Fs32000_Gain12_A0,
-                     -HPF_Fs32000_Gain12_B1},
-                    {HPF_Fs32000_Gain13_A1,             /* Gain setting 13 */
-                     HPF_Fs32000_Gain13_A0,
-                     -HPF_Fs32000_Gain13_B1},
-                    {HPF_Fs32000_Gain14_A1,             /* Gain setting 14 */
-                     HPF_Fs32000_Gain14_A0,
-                     -HPF_Fs32000_Gain14_B1},
-                    {HPF_Fs32000_Gain15_A1,             /* Gain setting 15 */
-                     HPF_Fs32000_Gain15_A0,
-                     -HPF_Fs32000_Gain15_B1},
+        /* 32kHz sampling rate */
+        {HPF_Fs32000_Gain1_A1, /* Gain setting 1 */
+         HPF_Fs32000_Gain1_A0, -HPF_Fs32000_Gain1_B1},
+        {HPF_Fs32000_Gain2_A1, /* Gain setting 2 */
+         HPF_Fs32000_Gain2_A0, -HPF_Fs32000_Gain2_B1},
+        {HPF_Fs32000_Gain3_A1, /* Gain setting 3 */
+         HPF_Fs32000_Gain3_A0, -HPF_Fs32000_Gain3_B1},
+        {HPF_Fs32000_Gain4_A1, /* Gain setting 4 */
+         HPF_Fs32000_Gain4_A0, -HPF_Fs32000_Gain4_B1},
+        {HPF_Fs32000_Gain5_A1, /* Gain setting 5 */
+         HPF_Fs32000_Gain5_A0, -HPF_Fs32000_Gain5_B1},
+        {HPF_Fs32000_Gain6_A1, /* Gain setting 6 */
+         HPF_Fs32000_Gain6_A0, -HPF_Fs32000_Gain6_B1},
+        {HPF_Fs32000_Gain7_A1, /* Gain setting 7 */
+         HPF_Fs32000_Gain7_A0, -HPF_Fs32000_Gain7_B1},
+        {HPF_Fs32000_Gain8_A1, /* Gain setting 8 */
+         HPF_Fs32000_Gain8_A0, -HPF_Fs32000_Gain8_B1},
+        {HPF_Fs32000_Gain9_A1, /* Gain setting 9 */
+         HPF_Fs32000_Gain9_A0, -HPF_Fs32000_Gain9_B1},
+        {HPF_Fs32000_Gain10_A1, /* Gain setting 10 */
+         HPF_Fs32000_Gain10_A0, -HPF_Fs32000_Gain10_B1},
+        {HPF_Fs32000_Gain11_A1, /* Gain setting 11 */
+         HPF_Fs32000_Gain11_A0, -HPF_Fs32000_Gain11_B1},
+        {HPF_Fs32000_Gain12_A1, /* Gain setting 12 */
+         HPF_Fs32000_Gain12_A0, -HPF_Fs32000_Gain12_B1},
+        {HPF_Fs32000_Gain13_A1, /* Gain setting 13 */
+         HPF_Fs32000_Gain13_A0, -HPF_Fs32000_Gain13_B1},
+        {HPF_Fs32000_Gain14_A1, /* Gain setting 14 */
+         HPF_Fs32000_Gain14_A0, -HPF_Fs32000_Gain14_B1},
+        {HPF_Fs32000_Gain15_A1, /* Gain setting 15 */
+         HPF_Fs32000_Gain15_A0, -HPF_Fs32000_Gain15_B1},
 
-                    /* 44kHz sampling rate */
-                    {HPF_Fs44100_Gain1_A1,             /* Gain setting 1 */
-                     HPF_Fs44100_Gain1_A0,
-                     -HPF_Fs44100_Gain1_B1,},
-                    {HPF_Fs44100_Gain2_A1,             /* Gain setting 2 */
-                     HPF_Fs44100_Gain2_A0,
-                     -HPF_Fs44100_Gain2_B1},
-                    {HPF_Fs44100_Gain3_A1,             /* Gain setting 3 */
-                     HPF_Fs44100_Gain3_A0,
-                     -HPF_Fs44100_Gain3_B1},
-                    {HPF_Fs44100_Gain4_A1,             /* Gain setting 4 */
-                     HPF_Fs44100_Gain4_A0,
-                     -HPF_Fs44100_Gain4_B1},
-                    {HPF_Fs44100_Gain5_A1,             /* Gain setting 5 */
-                     HPF_Fs44100_Gain5_A0,
-                     -HPF_Fs44100_Gain5_B1},
-                    {HPF_Fs44100_Gain6_A1,             /* Gain setting 6 */
-                     HPF_Fs44100_Gain6_A0,
-                     -HPF_Fs44100_Gain6_B1},
-                    {HPF_Fs44100_Gain7_A1,             /* Gain setting 7 */
-                     HPF_Fs44100_Gain7_A0,
-                     -HPF_Fs44100_Gain7_B1},
-                    {HPF_Fs44100_Gain8_A1,             /* Gain setting 8 */
-                     HPF_Fs44100_Gain8_A0,
-                     -HPF_Fs44100_Gain8_B1},
-                    {HPF_Fs44100_Gain9_A1,             /* Gain setting 9 */
-                     HPF_Fs44100_Gain9_A0,
-                     -HPF_Fs44100_Gain9_B1},
-                    {HPF_Fs44100_Gain10_A1,             /* Gain setting 10 */
-                     HPF_Fs44100_Gain10_A0,
-                     -HPF_Fs44100_Gain10_B1},
-                    {HPF_Fs44100_Gain11_A1,             /* Gain setting 11 */
-                     HPF_Fs44100_Gain11_A0,
-                     -HPF_Fs44100_Gain11_B1},
-                    {HPF_Fs44100_Gain12_A1,             /* Gain setting 12 */
-                     HPF_Fs44100_Gain12_A0,
-                     -HPF_Fs44100_Gain12_B1},
-                    {HPF_Fs44100_Gain13_A1,             /* Gain setting 13 */
-                     HPF_Fs44100_Gain13_A0,
-                     -HPF_Fs44100_Gain13_B1},
-                    {HPF_Fs44100_Gain14_A1,             /* Gain setting 14 */
-                     HPF_Fs44100_Gain14_A0,
-                     -HPF_Fs44100_Gain14_B1},
-                    {HPF_Fs44100_Gain15_A1,             /* Gain setting 15 */
-                     HPF_Fs44100_Gain15_A0,
-                     -HPF_Fs44100_Gain15_B1},
+        /* 44kHz sampling rate */
+        {
+                HPF_Fs44100_Gain1_A1, /* Gain setting 1 */
+                HPF_Fs44100_Gain1_A0,
+                -HPF_Fs44100_Gain1_B1,
+        },
+        {HPF_Fs44100_Gain2_A1, /* Gain setting 2 */
+         HPF_Fs44100_Gain2_A0, -HPF_Fs44100_Gain2_B1},
+        {HPF_Fs44100_Gain3_A1, /* Gain setting 3 */
+         HPF_Fs44100_Gain3_A0, -HPF_Fs44100_Gain3_B1},
+        {HPF_Fs44100_Gain4_A1, /* Gain setting 4 */
+         HPF_Fs44100_Gain4_A0, -HPF_Fs44100_Gain4_B1},
+        {HPF_Fs44100_Gain5_A1, /* Gain setting 5 */
+         HPF_Fs44100_Gain5_A0, -HPF_Fs44100_Gain5_B1},
+        {HPF_Fs44100_Gain6_A1, /* Gain setting 6 */
+         HPF_Fs44100_Gain6_A0, -HPF_Fs44100_Gain6_B1},
+        {HPF_Fs44100_Gain7_A1, /* Gain setting 7 */
+         HPF_Fs44100_Gain7_A0, -HPF_Fs44100_Gain7_B1},
+        {HPF_Fs44100_Gain8_A1, /* Gain setting 8 */
+         HPF_Fs44100_Gain8_A0, -HPF_Fs44100_Gain8_B1},
+        {HPF_Fs44100_Gain9_A1, /* Gain setting 9 */
+         HPF_Fs44100_Gain9_A0, -HPF_Fs44100_Gain9_B1},
+        {HPF_Fs44100_Gain10_A1, /* Gain setting 10 */
+         HPF_Fs44100_Gain10_A0, -HPF_Fs44100_Gain10_B1},
+        {HPF_Fs44100_Gain11_A1, /* Gain setting 11 */
+         HPF_Fs44100_Gain11_A0, -HPF_Fs44100_Gain11_B1},
+        {HPF_Fs44100_Gain12_A1, /* Gain setting 12 */
+         HPF_Fs44100_Gain12_A0, -HPF_Fs44100_Gain12_B1},
+        {HPF_Fs44100_Gain13_A1, /* Gain setting 13 */
+         HPF_Fs44100_Gain13_A0, -HPF_Fs44100_Gain13_B1},
+        {HPF_Fs44100_Gain14_A1, /* Gain setting 14 */
+         HPF_Fs44100_Gain14_A0, -HPF_Fs44100_Gain14_B1},
+        {HPF_Fs44100_Gain15_A1, /* Gain setting 15 */
+         HPF_Fs44100_Gain15_A0, -HPF_Fs44100_Gain15_B1},
 
-                    /* 48kHz sampling rate */
-                    {HPF_Fs48000_Gain1_A1,             /* Gain setting 1 */
-                     HPF_Fs48000_Gain1_A0,
-                     -HPF_Fs48000_Gain1_B1},
-                    {HPF_Fs48000_Gain2_A1,             /* Gain setting 2 */
-                     HPF_Fs48000_Gain2_A0,
-                     -HPF_Fs48000_Gain2_B1},
-                    {HPF_Fs48000_Gain3_A1,             /* Gain setting 3 */
-                     HPF_Fs48000_Gain3_A0,
-                     -HPF_Fs48000_Gain3_B1},
-                    {HPF_Fs48000_Gain4_A1,             /* Gain setting 4 */
-                     HPF_Fs48000_Gain4_A0,
-                     -HPF_Fs48000_Gain4_B1},
-                    {HPF_Fs48000_Gain5_A1,             /* Gain setting 5 */
-                     HPF_Fs48000_Gain5_A0,
-                     -HPF_Fs48000_Gain5_B1},
-                    {HPF_Fs48000_Gain6_A1,             /* Gain setting 6 */
-                     HPF_Fs48000_Gain6_A0,
-                     -HPF_Fs48000_Gain6_B1},
-                    {HPF_Fs48000_Gain7_A1,             /* Gain setting 7 */
-                     HPF_Fs48000_Gain7_A0,
-                     -HPF_Fs48000_Gain7_B1},
-                    {HPF_Fs48000_Gain8_A1,             /* Gain setting 8 */
-                     HPF_Fs48000_Gain8_A0,
-                     -HPF_Fs48000_Gain8_B1},
-                    {HPF_Fs48000_Gain9_A1,             /* Gain setting 9 */
-                     HPF_Fs48000_Gain9_A0,
-                     -HPF_Fs48000_Gain9_B1},
-                    {HPF_Fs48000_Gain10_A1,             /* Gain setting 10 */
-                     HPF_Fs48000_Gain10_A0,
-                     -HPF_Fs48000_Gain10_B1},
-                    {HPF_Fs48000_Gain11_A1,             /* Gain setting 11 */
-                     HPF_Fs48000_Gain11_A0,
-                     -HPF_Fs48000_Gain11_B1},
-                    {HPF_Fs48000_Gain12_A1,             /* Gain setting 12 */
-                     HPF_Fs48000_Gain12_A0,
-                     -HPF_Fs48000_Gain12_B1},
-                    {HPF_Fs48000_Gain13_A1,             /* Gain setting 13 */
-                     HPF_Fs48000_Gain13_A0,
-                     -HPF_Fs48000_Gain13_B1},
-                    {HPF_Fs48000_Gain14_A1,             /* Gain setting 14 */
-                     HPF_Fs48000_Gain14_A0,
-                     -HPF_Fs48000_Gain14_B1},
-                    {HPF_Fs48000_Gain15_A1,             /* Gain setting 15 */
-                     HPF_Fs48000_Gain15_A0,
-                     -HPF_Fs48000_Gain15_B1}
-                    ,
-                    /* 88kHz Sampling rate */
-                    {HPF_Fs88200_Gain1_A1,             /* Gain Setting  1 */
-                    HPF_Fs88200_Gain1_A0,
-                    -HPF_Fs88200_Gain1_B1},
-                    {HPF_Fs88200_Gain2_A1,             /* Gain Setting  2 */
-                    HPF_Fs88200_Gain2_A0,
-                    -HPF_Fs88200_Gain2_B1},
-                    {HPF_Fs88200_Gain3_A1,             /* Gain Setting  3 */
-                    HPF_Fs88200_Gain3_A0,
-                    -HPF_Fs88200_Gain3_B1},
-                    {HPF_Fs88200_Gain4_A1,             /* Gain Setting  4 */
-                    HPF_Fs88200_Gain4_A0,
-                    -HPF_Fs88200_Gain4_B1},
-                    {HPF_Fs88200_Gain5_A1,             /* Gain Setting  5 */
-                    HPF_Fs88200_Gain5_A0,
-                    -HPF_Fs88200_Gain5_B1},
-                    {HPF_Fs88200_Gain6_A1,             /* Gain Setting  6 */
-                    HPF_Fs88200_Gain6_A0,
-                    -HPF_Fs88200_Gain6_B1},
-                    {HPF_Fs88200_Gain7_A1,             /* Gain Setting  7 */
-                    HPF_Fs88200_Gain7_A0,
-                    -HPF_Fs88200_Gain7_B1},
-                    {HPF_Fs88200_Gain8_A1,             /* Gain Setting  8 */
-                    HPF_Fs88200_Gain8_A0,
-                    -HPF_Fs88200_Gain8_B1},
-                    {HPF_Fs88200_Gain9_A1,             /* Gain Setting  9 */
-                    HPF_Fs88200_Gain9_A0,
-                    -HPF_Fs88200_Gain9_B1},
-                    {HPF_Fs88200_Gain10_A1,             /* Gain Setting  10 */
-                    HPF_Fs88200_Gain10_A0,
-                    -HPF_Fs88200_Gain10_B1},
-                    {HPF_Fs88200_Gain11_A1,             /* Gain Setting  11 */
-                    HPF_Fs88200_Gain11_A0,
-                    -HPF_Fs88200_Gain11_B1},
-                    {HPF_Fs88200_Gain12_A1,             /* Gain Setting  12 */
-                    HPF_Fs88200_Gain12_A0,
-                    -HPF_Fs88200_Gain12_B1},
-                    {HPF_Fs88200_Gain13_A1,             /* Gain Setting  13 */
-                    HPF_Fs88200_Gain13_A0,
-                    -HPF_Fs88200_Gain13_B1},
-                    {HPF_Fs88200_Gain14_A1,             /* Gain Setting  14 */
-                    HPF_Fs88200_Gain14_A0,
-                    -HPF_Fs88200_Gain14_B1},
-                    {HPF_Fs88200_Gain15_A1,             /* Gain Setting  15 */
-                    HPF_Fs88200_Gain15_A0,
-                    -HPF_Fs88200_Gain15_B1},
+        /* 48kHz sampling rate */
+        {HPF_Fs48000_Gain1_A1, /* Gain setting 1 */
+         HPF_Fs48000_Gain1_A0, -HPF_Fs48000_Gain1_B1},
+        {HPF_Fs48000_Gain2_A1, /* Gain setting 2 */
+         HPF_Fs48000_Gain2_A0, -HPF_Fs48000_Gain2_B1},
+        {HPF_Fs48000_Gain3_A1, /* Gain setting 3 */
+         HPF_Fs48000_Gain3_A0, -HPF_Fs48000_Gain3_B1},
+        {HPF_Fs48000_Gain4_A1, /* Gain setting 4 */
+         HPF_Fs48000_Gain4_A0, -HPF_Fs48000_Gain4_B1},
+        {HPF_Fs48000_Gain5_A1, /* Gain setting 5 */
+         HPF_Fs48000_Gain5_A0, -HPF_Fs48000_Gain5_B1},
+        {HPF_Fs48000_Gain6_A1, /* Gain setting 6 */
+         HPF_Fs48000_Gain6_A0, -HPF_Fs48000_Gain6_B1},
+        {HPF_Fs48000_Gain7_A1, /* Gain setting 7 */
+         HPF_Fs48000_Gain7_A0, -HPF_Fs48000_Gain7_B1},
+        {HPF_Fs48000_Gain8_A1, /* Gain setting 8 */
+         HPF_Fs48000_Gain8_A0, -HPF_Fs48000_Gain8_B1},
+        {HPF_Fs48000_Gain9_A1, /* Gain setting 9 */
+         HPF_Fs48000_Gain9_A0, -HPF_Fs48000_Gain9_B1},
+        {HPF_Fs48000_Gain10_A1, /* Gain setting 10 */
+         HPF_Fs48000_Gain10_A0, -HPF_Fs48000_Gain10_B1},
+        {HPF_Fs48000_Gain11_A1, /* Gain setting 11 */
+         HPF_Fs48000_Gain11_A0, -HPF_Fs48000_Gain11_B1},
+        {HPF_Fs48000_Gain12_A1, /* Gain setting 12 */
+         HPF_Fs48000_Gain12_A0, -HPF_Fs48000_Gain12_B1},
+        {HPF_Fs48000_Gain13_A1, /* Gain setting 13 */
+         HPF_Fs48000_Gain13_A0, -HPF_Fs48000_Gain13_B1},
+        {HPF_Fs48000_Gain14_A1, /* Gain setting 14 */
+         HPF_Fs48000_Gain14_A0, -HPF_Fs48000_Gain14_B1},
+        {HPF_Fs48000_Gain15_A1, /* Gain setting 15 */
+         HPF_Fs48000_Gain15_A0, -HPF_Fs48000_Gain15_B1},
+        /* 88kHz Sampling rate */
+        {HPF_Fs88200_Gain1_A1, /* Gain Setting  1 */
+         HPF_Fs88200_Gain1_A0, -HPF_Fs88200_Gain1_B1},
+        {HPF_Fs88200_Gain2_A1, /* Gain Setting  2 */
+         HPF_Fs88200_Gain2_A0, -HPF_Fs88200_Gain2_B1},
+        {HPF_Fs88200_Gain3_A1, /* Gain Setting  3 */
+         HPF_Fs88200_Gain3_A0, -HPF_Fs88200_Gain3_B1},
+        {HPF_Fs88200_Gain4_A1, /* Gain Setting  4 */
+         HPF_Fs88200_Gain4_A0, -HPF_Fs88200_Gain4_B1},
+        {HPF_Fs88200_Gain5_A1, /* Gain Setting  5 */
+         HPF_Fs88200_Gain5_A0, -HPF_Fs88200_Gain5_B1},
+        {HPF_Fs88200_Gain6_A1, /* Gain Setting  6 */
+         HPF_Fs88200_Gain6_A0, -HPF_Fs88200_Gain6_B1},
+        {HPF_Fs88200_Gain7_A1, /* Gain Setting  7 */
+         HPF_Fs88200_Gain7_A0, -HPF_Fs88200_Gain7_B1},
+        {HPF_Fs88200_Gain8_A1, /* Gain Setting  8 */
+         HPF_Fs88200_Gain8_A0, -HPF_Fs88200_Gain8_B1},
+        {HPF_Fs88200_Gain9_A1, /* Gain Setting  9 */
+         HPF_Fs88200_Gain9_A0, -HPF_Fs88200_Gain9_B1},
+        {HPF_Fs88200_Gain10_A1, /* Gain Setting  10 */
+         HPF_Fs88200_Gain10_A0, -HPF_Fs88200_Gain10_B1},
+        {HPF_Fs88200_Gain11_A1, /* Gain Setting  11 */
+         HPF_Fs88200_Gain11_A0, -HPF_Fs88200_Gain11_B1},
+        {HPF_Fs88200_Gain12_A1, /* Gain Setting  12 */
+         HPF_Fs88200_Gain12_A0, -HPF_Fs88200_Gain12_B1},
+        {HPF_Fs88200_Gain13_A1, /* Gain Setting  13 */
+         HPF_Fs88200_Gain13_A0, -HPF_Fs88200_Gain13_B1},
+        {HPF_Fs88200_Gain14_A1, /* Gain Setting  14 */
+         HPF_Fs88200_Gain14_A0, -HPF_Fs88200_Gain14_B1},
+        {HPF_Fs88200_Gain15_A1, /* Gain Setting  15 */
+         HPF_Fs88200_Gain15_A0, -HPF_Fs88200_Gain15_B1},
 
-                    /* 96kHz sampling rate */
-                    {HPF_Fs96000_Gain1_A1,             /* Gain setting 1 */
-                    HPF_Fs96000_Gain1_A0,
-                    -HPF_Fs96000_Gain1_B1},
-                    {HPF_Fs96000_Gain2_A1,             /* Gain setting 2 */
-                    HPF_Fs96000_Gain2_A0,
-                    -HPF_Fs96000_Gain2_B1},
-                    {HPF_Fs96000_Gain3_A1,             /* Gain setting 3 */
-                    HPF_Fs96000_Gain3_A0,
-                    -HPF_Fs96000_Gain3_B1},
-                    {HPF_Fs96000_Gain4_A1,             /* Gain setting 4 */
-                    HPF_Fs96000_Gain4_A0,
-                    -HPF_Fs96000_Gain4_B1},
-                    {HPF_Fs96000_Gain5_A1,             /* Gain setting 5 */
-                    HPF_Fs96000_Gain5_A0,
-                    -HPF_Fs96000_Gain5_B1},
-                    {HPF_Fs96000_Gain6_A1,             /* Gain setting 6 */
-                    HPF_Fs96000_Gain6_A0,
-                    -HPF_Fs96000_Gain6_B1},
-                    {HPF_Fs96000_Gain7_A1,             /* Gain setting 7 */
-                    HPF_Fs96000_Gain7_A0,
-                    -HPF_Fs96000_Gain7_B1},
-                    {HPF_Fs96000_Gain8_A1,             /* Gain setting 8 */
-                    HPF_Fs96000_Gain8_A0,
-                    -HPF_Fs96000_Gain8_B1},
-                    {HPF_Fs96000_Gain9_A1,             /* Gain setting 9 */
-                    HPF_Fs96000_Gain9_A0,
-                    -HPF_Fs96000_Gain9_B1},
-                    {HPF_Fs96000_Gain10_A1,             /* Gain setting 10 */
-                    HPF_Fs96000_Gain10_A0,
-                    -HPF_Fs96000_Gain10_B1},
-                    {HPF_Fs96000_Gain11_A1,             /* Gain setting 11 */
-                    HPF_Fs96000_Gain11_A0,
-                    -HPF_Fs96000_Gain11_B1},
-                    {HPF_Fs96000_Gain12_A1,             /* Gain setting 12 */
-                    HPF_Fs96000_Gain12_A0,
-                    -HPF_Fs96000_Gain12_B1},
-                    {HPF_Fs96000_Gain13_A1,             /* Gain setting 13 */
-                    HPF_Fs96000_Gain13_A0,
-                    -HPF_Fs96000_Gain13_B1},
-                    {HPF_Fs96000_Gain14_A1,             /* Gain setting 14 */
-                    HPF_Fs96000_Gain14_A0,
-                    -HPF_Fs96000_Gain14_B1},
-                    {HPF_Fs96000_Gain15_A1,             /* Gain setting 15 */
-                    HPF_Fs96000_Gain15_A0,
-                    -HPF_Fs96000_Gain15_B1},
+        /* 96kHz sampling rate */
+        {HPF_Fs96000_Gain1_A1, /* Gain setting 1 */
+         HPF_Fs96000_Gain1_A0, -HPF_Fs96000_Gain1_B1},
+        {HPF_Fs96000_Gain2_A1, /* Gain setting 2 */
+         HPF_Fs96000_Gain2_A0, -HPF_Fs96000_Gain2_B1},
+        {HPF_Fs96000_Gain3_A1, /* Gain setting 3 */
+         HPF_Fs96000_Gain3_A0, -HPF_Fs96000_Gain3_B1},
+        {HPF_Fs96000_Gain4_A1, /* Gain setting 4 */
+         HPF_Fs96000_Gain4_A0, -HPF_Fs96000_Gain4_B1},
+        {HPF_Fs96000_Gain5_A1, /* Gain setting 5 */
+         HPF_Fs96000_Gain5_A0, -HPF_Fs96000_Gain5_B1},
+        {HPF_Fs96000_Gain6_A1, /* Gain setting 6 */
+         HPF_Fs96000_Gain6_A0, -HPF_Fs96000_Gain6_B1},
+        {HPF_Fs96000_Gain7_A1, /* Gain setting 7 */
+         HPF_Fs96000_Gain7_A0, -HPF_Fs96000_Gain7_B1},
+        {HPF_Fs96000_Gain8_A1, /* Gain setting 8 */
+         HPF_Fs96000_Gain8_A0, -HPF_Fs96000_Gain8_B1},
+        {HPF_Fs96000_Gain9_A1, /* Gain setting 9 */
+         HPF_Fs96000_Gain9_A0, -HPF_Fs96000_Gain9_B1},
+        {HPF_Fs96000_Gain10_A1, /* Gain setting 10 */
+         HPF_Fs96000_Gain10_A0, -HPF_Fs96000_Gain10_B1},
+        {HPF_Fs96000_Gain11_A1, /* Gain setting 11 */
+         HPF_Fs96000_Gain11_A0, -HPF_Fs96000_Gain11_B1},
+        {HPF_Fs96000_Gain12_A1, /* Gain setting 12 */
+         HPF_Fs96000_Gain12_A0, -HPF_Fs96000_Gain12_B1},
+        {HPF_Fs96000_Gain13_A1, /* Gain setting 13 */
+         HPF_Fs96000_Gain13_A0, -HPF_Fs96000_Gain13_B1},
+        {HPF_Fs96000_Gain14_A1, /* Gain setting 14 */
+         HPF_Fs96000_Gain14_A0, -HPF_Fs96000_Gain14_B1},
+        {HPF_Fs96000_Gain15_A1, /* Gain setting 15 */
+         HPF_Fs96000_Gain15_A0, -HPF_Fs96000_Gain15_B1},
 
-                    /* 176kHz Sampling rate */
-                    {HPF_Fs176400_Gain1_A1,             /* Gain Setting  1 */
-                    HPF_Fs176400_Gain1_A0,
-                    -HPF_Fs176400_Gain1_B1},
-                    {HPF_Fs176400_Gain2_A1,             /* Gain Setting  2 */
-                    HPF_Fs176400_Gain2_A0,
-                    -HPF_Fs176400_Gain2_B1},
-                    {HPF_Fs176400_Gain3_A1,             /* Gain Setting  3 */
-                    HPF_Fs176400_Gain3_A0,
-                    -HPF_Fs176400_Gain3_B1},
-                    {HPF_Fs176400_Gain4_A1,             /* Gain Setting  4 */
-                    HPF_Fs176400_Gain4_A0,
-                    -HPF_Fs176400_Gain4_B1},
-                    {HPF_Fs176400_Gain5_A1,             /* Gain Setting  5 */
-                    HPF_Fs176400_Gain5_A0,
-                    -HPF_Fs176400_Gain5_B1},
-                    {HPF_Fs176400_Gain6_A1,             /* Gain Setting  6 */
-                    HPF_Fs176400_Gain6_A0,
-                    -HPF_Fs176400_Gain6_B1},
-                    {HPF_Fs176400_Gain7_A1,             /* Gain Setting  7 */
-                    HPF_Fs176400_Gain7_A0,
-                    -HPF_Fs176400_Gain7_B1},
-                    {HPF_Fs176400_Gain8_A1,             /* Gain Setting  8 */
-                    HPF_Fs176400_Gain8_A0,
-                    -HPF_Fs176400_Gain8_B1},
-                    {HPF_Fs176400_Gain9_A1,             /* Gain Setting  9 */
-                    HPF_Fs176400_Gain9_A0,
-                    -HPF_Fs176400_Gain9_B1},
-                    {HPF_Fs176400_Gain10_A1,             /* Gain Setting  10 */
-                    HPF_Fs176400_Gain10_A0,
-                    -HPF_Fs176400_Gain10_B1},
-                    {HPF_Fs176400_Gain11_A1,             /* Gain Setting  11 */
-                    HPF_Fs176400_Gain11_A0,
-                    -HPF_Fs176400_Gain11_B1},
-                    {HPF_Fs176400_Gain12_A1,             /* Gain Setting  12 */
-                    HPF_Fs176400_Gain12_A0,
-                    -HPF_Fs176400_Gain12_B1},
-                    {HPF_Fs176400_Gain13_A1,             /* Gain Setting  13 */
-                    HPF_Fs176400_Gain13_A0,
-                    -HPF_Fs176400_Gain13_B1},
-                    {HPF_Fs176400_Gain14_A1,             /* Gain Setting  14 */
-                    HPF_Fs176400_Gain14_A0,
-                    -HPF_Fs176400_Gain14_B1},
-                    {HPF_Fs176400_Gain15_A1,             /* Gain Setting  15 */
-                    HPF_Fs176400_Gain15_A0,
-                    -HPF_Fs176400_Gain15_B1},
+        /* 176kHz Sampling rate */
+        {HPF_Fs176400_Gain1_A1, /* Gain Setting  1 */
+         HPF_Fs176400_Gain1_A0, -HPF_Fs176400_Gain1_B1},
+        {HPF_Fs176400_Gain2_A1, /* Gain Setting  2 */
+         HPF_Fs176400_Gain2_A0, -HPF_Fs176400_Gain2_B1},
+        {HPF_Fs176400_Gain3_A1, /* Gain Setting  3 */
+         HPF_Fs176400_Gain3_A0, -HPF_Fs176400_Gain3_B1},
+        {HPF_Fs176400_Gain4_A1, /* Gain Setting  4 */
+         HPF_Fs176400_Gain4_A0, -HPF_Fs176400_Gain4_B1},
+        {HPF_Fs176400_Gain5_A1, /* Gain Setting  5 */
+         HPF_Fs176400_Gain5_A0, -HPF_Fs176400_Gain5_B1},
+        {HPF_Fs176400_Gain6_A1, /* Gain Setting  6 */
+         HPF_Fs176400_Gain6_A0, -HPF_Fs176400_Gain6_B1},
+        {HPF_Fs176400_Gain7_A1, /* Gain Setting  7 */
+         HPF_Fs176400_Gain7_A0, -HPF_Fs176400_Gain7_B1},
+        {HPF_Fs176400_Gain8_A1, /* Gain Setting  8 */
+         HPF_Fs176400_Gain8_A0, -HPF_Fs176400_Gain8_B1},
+        {HPF_Fs176400_Gain9_A1, /* Gain Setting  9 */
+         HPF_Fs176400_Gain9_A0, -HPF_Fs176400_Gain9_B1},
+        {HPF_Fs176400_Gain10_A1, /* Gain Setting  10 */
+         HPF_Fs176400_Gain10_A0, -HPF_Fs176400_Gain10_B1},
+        {HPF_Fs176400_Gain11_A1, /* Gain Setting  11 */
+         HPF_Fs176400_Gain11_A0, -HPF_Fs176400_Gain11_B1},
+        {HPF_Fs176400_Gain12_A1, /* Gain Setting  12 */
+         HPF_Fs176400_Gain12_A0, -HPF_Fs176400_Gain12_B1},
+        {HPF_Fs176400_Gain13_A1, /* Gain Setting  13 */
+         HPF_Fs176400_Gain13_A0, -HPF_Fs176400_Gain13_B1},
+        {HPF_Fs176400_Gain14_A1, /* Gain Setting  14 */
+         HPF_Fs176400_Gain14_A0, -HPF_Fs176400_Gain14_B1},
+        {HPF_Fs176400_Gain15_A1, /* Gain Setting  15 */
+         HPF_Fs176400_Gain15_A0, -HPF_Fs176400_Gain15_B1},
 
-                    /* 192kHz sampling rate */
-                    {HPF_Fs192000_Gain1_A1,             /* Gain setting 1 */
-                    HPF_Fs192000_Gain1_A0,
-                    -HPF_Fs192000_Gain1_B1},
-                    {HPF_Fs192000_Gain2_A1,             /* Gain setting 2 */
-                    HPF_Fs192000_Gain2_A0,
-                    -HPF_Fs192000_Gain2_B1},
-                    {HPF_Fs192000_Gain3_A1,             /* Gain setting 3 */
-                    HPF_Fs192000_Gain3_A0,
-                    -HPF_Fs192000_Gain3_B1},
-                    {HPF_Fs192000_Gain4_A1,             /* Gain setting 4 */
-                    HPF_Fs192000_Gain4_A0,
-                    -HPF_Fs192000_Gain4_B1},
-                    {HPF_Fs192000_Gain5_A1,             /* Gain setting 5 */
-                    HPF_Fs192000_Gain5_A0,
-                    -HPF_Fs192000_Gain5_B1},
-                    {HPF_Fs192000_Gain6_A1,             /* Gain setting 6 */
-                    HPF_Fs192000_Gain6_A0,
-                    -HPF_Fs192000_Gain6_B1},
-                    {HPF_Fs192000_Gain7_A1,             /* Gain setting 7 */
-                    HPF_Fs192000_Gain7_A0,
-                    -HPF_Fs192000_Gain7_B1},
-                    {HPF_Fs192000_Gain8_A1,             /* Gain setting 8 */
-                    HPF_Fs192000_Gain8_A0,
-                    -HPF_Fs192000_Gain8_B1},
-                    {HPF_Fs192000_Gain9_A1,             /* Gain setting 9 */
-                    HPF_Fs192000_Gain9_A0,
-                    -HPF_Fs192000_Gain9_B1},
-                    {HPF_Fs192000_Gain10_A1,             /* Gain setting 10 */
-                    HPF_Fs192000_Gain10_A0,
-                    -HPF_Fs192000_Gain10_B1},
-                    {HPF_Fs192000_Gain11_A1,             /* Gain setting 11 */
-                    HPF_Fs192000_Gain11_A0,
-                    -HPF_Fs192000_Gain11_B1},
-                    {HPF_Fs192000_Gain12_A1,             /* Gain setting 12 */
-                    HPF_Fs192000_Gain12_A0,
-                    -HPF_Fs192000_Gain12_B1},
-                    {HPF_Fs192000_Gain13_A1,             /* Gain setting 13 */
-                    HPF_Fs192000_Gain13_A0,
-                    -HPF_Fs192000_Gain13_B1},
-                    {HPF_Fs192000_Gain14_A1,             /* Gain setting 14 */
-                    HPF_Fs192000_Gain14_A0,
-                    -HPF_Fs192000_Gain14_B1},
-                    {HPF_Fs192000_Gain15_A1,             /* Gain setting 15 */
-                    HPF_Fs192000_Gain15_A0,
-                    -HPF_Fs192000_Gain15_B1}
-                    };
+        /* 192kHz sampling rate */
+        {HPF_Fs192000_Gain1_A1, /* Gain setting 1 */
+         HPF_Fs192000_Gain1_A0, -HPF_Fs192000_Gain1_B1},
+        {HPF_Fs192000_Gain2_A1, /* Gain setting 2 */
+         HPF_Fs192000_Gain2_A0, -HPF_Fs192000_Gain2_B1},
+        {HPF_Fs192000_Gain3_A1, /* Gain setting 3 */
+         HPF_Fs192000_Gain3_A0, -HPF_Fs192000_Gain3_B1},
+        {HPF_Fs192000_Gain4_A1, /* Gain setting 4 */
+         HPF_Fs192000_Gain4_A0, -HPF_Fs192000_Gain4_B1},
+        {HPF_Fs192000_Gain5_A1, /* Gain setting 5 */
+         HPF_Fs192000_Gain5_A0, -HPF_Fs192000_Gain5_B1},
+        {HPF_Fs192000_Gain6_A1, /* Gain setting 6 */
+         HPF_Fs192000_Gain6_A0, -HPF_Fs192000_Gain6_B1},
+        {HPF_Fs192000_Gain7_A1, /* Gain setting 7 */
+         HPF_Fs192000_Gain7_A0, -HPF_Fs192000_Gain7_B1},
+        {HPF_Fs192000_Gain8_A1, /* Gain setting 8 */
+         HPF_Fs192000_Gain8_A0, -HPF_Fs192000_Gain8_B1},
+        {HPF_Fs192000_Gain9_A1, /* Gain setting 9 */
+         HPF_Fs192000_Gain9_A0, -HPF_Fs192000_Gain9_B1},
+        {HPF_Fs192000_Gain10_A1, /* Gain setting 10 */
+         HPF_Fs192000_Gain10_A0, -HPF_Fs192000_Gain10_B1},
+        {HPF_Fs192000_Gain11_A1, /* Gain setting 11 */
+         HPF_Fs192000_Gain11_A0, -HPF_Fs192000_Gain11_B1},
+        {HPF_Fs192000_Gain12_A1, /* Gain setting 12 */
+         HPF_Fs192000_Gain12_A0, -HPF_Fs192000_Gain12_B1},
+        {HPF_Fs192000_Gain13_A1, /* Gain setting 13 */
+         HPF_Fs192000_Gain13_A0, -HPF_Fs192000_Gain13_B1},
+        {HPF_Fs192000_Gain14_A1, /* Gain setting 14 */
+         HPF_Fs192000_Gain14_A0, -HPF_Fs192000_Gain14_B1},
+        {HPF_Fs192000_Gain15_A1, /* Gain setting 15 */
+         HPF_Fs192000_Gain15_A0, -HPF_Fs192000_Gain15_B1}};
 
 /************************************************************************************/
 /*                                                                                    */
@@ -463,14 +329,13 @@
 /************************************************************************************/
 
 /* dB to linear conversion table */
-const LVM_FLOAT LVM_VolumeTable[] = {
-    1.000f,             /*  0dB */
-    0.891f,             /* -1dB */
-    0.794f,             /* -2dB */
-    0.708f,             /* -3dB */
-    0.631f,             /* -4dB */
-    0.562f,             /* -5dB */
-    0.501f};            /* -6dB */
+const LVM_FLOAT LVM_VolumeTable[] = {1.000f,  /*  0dB */
+                                     0.891f,  /* -1dB */
+                                     0.794f,  /* -2dB */
+                                     0.708f,  /* -3dB */
+                                     0.631f,  /* -4dB */
+                                     0.562f,  /* -5dB */
+                                     0.501f}; /* -6dB */
 
 /************************************************************************************/
 /*                                                                                  */
@@ -478,24 +343,16 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#define LVM_MIX_TC_Fs8000      32580         /* Floating point value 0.994262695 */
-#define LVM_MIX_TC_Fs11025     32632         /* Floating point value 0.995849609 */
-#define LVM_MIX_TC_Fs12000     32643         /* Floating point value 0.996185303 */
-#define LVM_MIX_TC_Fs16000     32674         /* Floating point value 0.997131348 */
-#define LVM_MIX_TC_Fs22050     32700         /* Floating point value 0.997924805 */
-#define LVM_MIX_TC_Fs24000     32705         /* Floating point value 0.998077393 */
-#define LVM_MIX_TC_Fs32000     32721         /* Floating point value 0.998565674 */
-#define LVM_MIX_TC_Fs44100     32734         /* Floating point value 0.998962402 */
-#define LVM_MIX_TC_Fs48000     32737         /* Floating point value 0.999053955 */
+#define LVM_MIX_TC_Fs8000 32580  /* Floating point value 0.994262695 */
+#define LVM_MIX_TC_Fs11025 32632 /* Floating point value 0.995849609 */
+#define LVM_MIX_TC_Fs12000 32643 /* Floating point value 0.996185303 */
+#define LVM_MIX_TC_Fs16000 32674 /* Floating point value 0.997131348 */
+#define LVM_MIX_TC_Fs22050 32700 /* Floating point value 0.997924805 */
+#define LVM_MIX_TC_Fs24000 32705 /* Floating point value 0.998077393 */
+#define LVM_MIX_TC_Fs32000 32721 /* Floating point value 0.998565674 */
+#define LVM_MIX_TC_Fs44100 32734 /* Floating point value 0.998962402 */
+#define LVM_MIX_TC_Fs48000 32737 /* Floating point value 0.999053955 */
 
-const LVM_INT16 LVM_MixerTCTable[] = {
-    LVM_MIX_TC_Fs8000,
-    LVM_MIX_TC_Fs11025,
-    LVM_MIX_TC_Fs12000,
-    LVM_MIX_TC_Fs16000,
-    LVM_MIX_TC_Fs22050,
-    LVM_MIX_TC_Fs24000,
-    LVM_MIX_TC_Fs32000,
-    LVM_MIX_TC_Fs44100,
-    LVM_MIX_TC_Fs48000};
-
+const LVM_INT16 LVM_MixerTCTable[] = {LVM_MIX_TC_Fs8000,  LVM_MIX_TC_Fs11025, LVM_MIX_TC_Fs12000,
+                                      LVM_MIX_TC_Fs16000, LVM_MIX_TC_Fs22050, LVM_MIX_TC_Fs24000,
+                                      LVM_MIX_TC_Fs32000, LVM_MIX_TC_Fs44100, LVM_MIX_TC_Fs48000};
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.h b/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.h
index fc82194..cf2fb5d 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.h
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Tables.h
@@ -33,7 +33,7 @@
 /*                                                                                  */
 /************************************************************************************/
 
-extern FO_FLOAT_LShx_Coefs_t     LVM_TrebleBoostCoefs[];
+extern FO_FLOAT_LShx_Coefs_t LVM_TrebleBoostCoefs[];
 
 /************************************************************************************/
 /*                                                                                  */
@@ -45,4 +45,3 @@
 extern const LVM_INT16 LVM_MixerTCTable[];
 
 #endif /* __LVM_TABLES_H__ */
-
diff --git a/media/libeffects/lvm/lib/Common/lib/AGC.h b/media/libeffects/lvm/lib/Common/lib/AGC.h
index bef7fa1..31c8200 100644
--- a/media/libeffects/lvm/lib/Common/lib/AGC.h
+++ b/media/libeffects/lvm/lib/Common/lib/AGC.h
@@ -31,16 +31,15 @@
 /*    Types                                                                       */
 /*                                                                                */
 /**********************************************************************************/
-typedef struct
-{
-    LVM_FLOAT  AGC_Gain;                        /* The current AGC gain */
-    LVM_FLOAT  AGC_MaxGain;                     /* The maximum AGC gain */
-    LVM_FLOAT  Volume;                          /* The current volume setting */
-    LVM_FLOAT  Target;                          /* The target volume setting */
-    LVM_FLOAT  AGC_Target;                      /* AGC target level */
-    LVM_FLOAT  AGC_Attack;                      /* AGC attack scaler */
-    LVM_FLOAT  AGC_Decay;                       /* AGC decay scaler */
-    LVM_FLOAT  VolumeTC;                        /* Volume update time constant */
+typedef struct {
+    LVM_FLOAT AGC_Gain;    /* The current AGC gain */
+    LVM_FLOAT AGC_MaxGain; /* The maximum AGC gain */
+    LVM_FLOAT Volume;      /* The current volume setting */
+    LVM_FLOAT Target;      /* The target volume setting */
+    LVM_FLOAT AGC_Target;  /* AGC target level */
+    LVM_FLOAT AGC_Attack;  /* AGC attack scaler */
+    LVM_FLOAT AGC_Decay;   /* AGC decay scaler */
+    LVM_FLOAT VolumeTC;    /* Volume update time constant */
 
 } AGC_MIX_VOL_2St1Mon_FLOAT_t;
 
@@ -49,19 +48,11 @@
 /*    Function Prototypes                                                              */
 /*                                                                                */
 /**********************************************************************************/
-void AGC_MIX_VOL_2St1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t  *pInstance,     /* Instance pointer */
-                                 const LVM_FLOAT            *pStSrc,        /* Stereo source */
-                                 const LVM_FLOAT            *pMonoSrc,      /* Mono source */
-                                 LVM_FLOAT                  *pDst,          /* Stereo destination */
-                                 LVM_UINT16                 n);             /* Number of samples */
-#ifdef SUPPORT_MC
-void AGC_MIX_VOL_Mc1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t  *pInstance,  /* Instance pointer */
-                                 const LVM_FLOAT            *pStSrc,      /* Source */
-                                 const LVM_FLOAT            *pMonoSrc,    /* Mono source */
-                                 LVM_FLOAT                  *pDst,        /* Destination */
-                                 LVM_UINT16                 NrFrames,     /* Number of frames */
-                                 LVM_UINT16                 NrChannels);  /* Number of channels */
-#endif
+void AGC_MIX_VOL_Mc1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t* pInstance,  /* Instance pointer */
+                                const LVM_FLOAT* pStSrc,                 /* Source */
+                                const LVM_FLOAT* pMonoSrc,               /* Mono source */
+                                LVM_FLOAT* pDst,                         /* Destination */
+                                LVM_UINT16 NrFrames,                     /* Number of frames */
+                                LVM_UINT16 NrChannels);                  /* Number of channels */
 
-#endif  /* __AGC_H__ */
-
+#endif /* __AGC_H__ */
diff --git a/media/libeffects/lvm/lib/Common/lib/BIQUAD.h b/media/libeffects/lvm/lib/Common/lib/BIQUAD.h
index c050cd0..00b539a 100644
--- a/media/libeffects/lvm/lib/Common/lib/BIQUAD.h
+++ b/media/libeffects/lvm/lib/Common/lib/BIQUAD.h
@@ -22,281 +22,67 @@
 /**********************************************************************************
    INSTANCE MEMORY TYPE DEFINITION
 ***********************************************************************************/
-typedef struct
-{
-#ifdef SUPPORT_MC
+typedef struct {
     /* The memory region created by this structure instance is typecast
      * into another structure containing a pointer and an array of filter
      * coefficients. In one case this memory region is used for storing
      * DC component of channels
      */
-    LVM_FLOAT *pStorage;
+    LVM_FLOAT* pStorage;
     LVM_FLOAT Storage[LVM_MAX_CHANNELS];
-#else
-    LVM_FLOAT Storage[6];
-#endif
 } Biquad_FLOAT_Instance_t;
 /**********************************************************************************
    COEFFICIENT TYPE DEFINITIONS
 ***********************************************************************************/
 
 /*** Biquad coefficients **********************************************************/
-typedef struct
-{
-    LVM_FLOAT  A2;   /*  a2  */
-    LVM_FLOAT  A1;   /*  a1  */
-    LVM_FLOAT  A0;   /*  a0  */
-    LVM_FLOAT  B2;   /* -b2! */
-    LVM_FLOAT  B1;   /* -b1! */
+typedef struct {
+    LVM_FLOAT A2; /*  a2  */
+    LVM_FLOAT A1; /*  a1  */
+    LVM_FLOAT A0; /*  a0  */
+    LVM_FLOAT B2; /* -b2! */
+    LVM_FLOAT B1; /* -b1! */
 } BQ_FLOAT_Coefs_t;
 
 /*** First order coefficients *****************************************************/
-typedef struct
-{
-    LVM_FLOAT A1;   /*  a1  */
-    LVM_FLOAT A0;   /*  a0  */
-    LVM_FLOAT B1;   /* -b1! */
+typedef struct {
+    LVM_FLOAT A1; /*  a1  */
+    LVM_FLOAT A0; /*  a0  */
+    LVM_FLOAT B1; /* -b1! */
 } FO_FLOAT_Coefs_t;
 
 /*** First order coefficients with Shift*****************************************************/
-typedef struct
-{
-    LVM_FLOAT A1;    /*  a1  */
-    LVM_FLOAT A0;    /*  a0  */
-    LVM_FLOAT B1;    /* -b1! */
+typedef struct {
+    LVM_FLOAT A1; /*  a1  */
+    LVM_FLOAT A0; /*  a0  */
+    LVM_FLOAT B1; /* -b1! */
 } FO_FLOAT_LShx_Coefs_t;
 /*** Band pass coefficients *******************************************************/
-typedef struct
-{
-    LVM_FLOAT  A0;   /*  a0  */
-    LVM_FLOAT  B2;   /* -b2! */
-    LVM_FLOAT  B1;   /* -b1! */
+typedef struct {
+    LVM_FLOAT A0; /*  a0  */
+    LVM_FLOAT B2; /* -b2! */
+    LVM_FLOAT B1; /* -b1! */
 } BP_FLOAT_Coefs_t;
 
 /*** Peaking coefficients *********************************************************/
-typedef struct
-{
-    LVM_FLOAT A0;   /*  a0  */
-    LVM_FLOAT B2;   /* -b2! */
-    LVM_FLOAT B1;   /* -b1! */
-    LVM_FLOAT  G;   /* Gain */
+typedef struct {
+    LVM_FLOAT A0; /*  a0  */
+    LVM_FLOAT B2; /* -b2! */
+    LVM_FLOAT B1; /* -b1! */
+    LVM_FLOAT G;  /* Gain */
 } PK_FLOAT_Coefs_t;
 
-/**********************************************************************************
-   TAPS TYPE DEFINITIONS
-***********************************************************************************/
-
-/*** Types used for first order and shelving filter *******************************/
-typedef struct
-{
-    LVM_FLOAT Storage[ (1 * 2) ];  /* One channel, two taps of size LVM_INT32 */
-} Biquad_1I_Order1_FLOAT_Taps_t;
-
-typedef struct
-{
-#ifdef SUPPORT_MC
-    /* LVM_MAX_CHANNELS channels, two taps of size LVM_FLOAT */
-    LVM_FLOAT Storage[ (LVM_MAX_CHANNELS * 2) ];
-#else
-    LVM_FLOAT Storage[ (2 * 2) ];  /* Two channels, two taps of size LVM_FLOAT */
-#endif
-} Biquad_2I_Order1_FLOAT_Taps_t;
-
-/*** Types used for biquad, band pass and peaking filter **************************/
-typedef struct
-{
-    LVM_FLOAT Storage[ (1 * 4) ];  /* One channel, four taps of size LVM_FLOAT */
-} Biquad_1I_Order2_FLOAT_Taps_t;
-
-typedef struct
-{
-#ifdef SUPPORT_MC
-    /* LVM_MAX_CHANNELS, four taps of size LVM_FLOAT */
-    LVM_FLOAT Storage[ (LVM_MAX_CHANNELS * 4) ];
-#else
-    LVM_FLOAT Storage[ (2 * 4) ];  /* Two channels, four taps of size LVM_FLOAT */
-#endif
-} Biquad_2I_Order2_FLOAT_Taps_t;
-/* The names of the functions are changed to satisfy QAC rules: Name should be Unique withing 16 characters*/
-#define BQ_2I_D32F32Cll_TRC_WRA_01_Init  Init_BQ_2I_D32F32Cll_TRC_WRA_01
-#define BP_1I_D32F32C30_TRC_WRA_02       TWO_BP_1I_D32F32C30_TRC_WRA_02
-
-/**********************************************************************************
-   FUNCTION PROTOTYPES: BIQUAD FILTERS
-***********************************************************************************/
-
-/*** 16 bit data path *************************************************************/
-
-void BQ_2I_D16F32Css_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_2I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BQ_FLOAT_Coefs_t            *pCoef);
-
-void BQ_2I_D16F32C15_TRC_WRA_01 (           Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrSamples);
-
-void BQ_2I_D16F32C14_TRC_WRA_01 (           Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrSamples);
-
-void BQ_2I_D16F32C13_TRC_WRA_01 (           Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrSamples);
-
-void BQ_2I_D16F16Css_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_2I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BQ_FLOAT_Coefs_t            *pCoef);
-
-void BQ_2I_D16F16C15_TRC_WRA_01( Biquad_FLOAT_Instance_t       *pInstance,
-                                 LVM_FLOAT               *pDataIn,
-                                 LVM_FLOAT               *pDataOut,
-                                 LVM_INT16               NrSamples);
-
-void BQ_2I_D16F16C14_TRC_WRA_01( Biquad_FLOAT_Instance_t       *pInstance,
-                                 LVM_FLOAT               *pDataIn,
-                                 LVM_FLOAT               *pDataOut,
-                                 LVM_INT16               NrSamples);
-
-void BQ_1I_D16F16Css_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_1I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BQ_FLOAT_Coefs_t            *pCoef);
-
-void BQ_1I_D16F16C15_TRC_WRA_01 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                  LVM_FLOAT               *pDataIn,
-                                  LVM_FLOAT               *pDataOut,
-                                  LVM_INT16               NrSamples);
-
-void BQ_1I_D16F32Css_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_1I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BQ_FLOAT_Coefs_t            *pCoef);
-
-void BQ_1I_D16F32C14_TRC_WRA_01 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                  LVM_FLOAT              *pDataIn,
-                                  LVM_FLOAT               *pDataOut,
-                                  LVM_INT16               NrSamples);
-/*** 32 bit data path *************************************************************/
-void BQ_2I_D32F32Cll_TRC_WRA_01_Init (      Biquad_FLOAT_Instance_t       *pInstance,
-                                            Biquad_2I_Order2_FLOAT_Taps_t *pTaps,
-                                            BQ_FLOAT_Coefs_t          *pCoef);
-void BQ_2I_D32F32C30_TRC_WRA_01 (           Biquad_FLOAT_Instance_t  *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                 NrSamples);
-#ifdef SUPPORT_MC
-void BQ_MC_D32F32C30_TRC_WRA_01 (           Biquad_FLOAT_Instance_t      *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrFrames,
-                                            LVM_INT16                    NrChannels);
-#endif
-
-/**********************************************************************************
-   FUNCTION PROTOTYPES: FIRST ORDER FILTERS
-***********************************************************************************/
-
-/*** 16 bit data path *************************************************************/
-void FO_1I_D16F16Css_TRC_WRA_01_Init(    Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_1I_Order1_FLOAT_Taps_t   *pTaps,
-                                         FO_FLOAT_Coefs_t            *pCoef);
-
-void FO_1I_D16F16C15_TRC_WRA_01( Biquad_FLOAT_Instance_t       *pInstance,
-                                 LVM_FLOAT               *pDataIn,
-                                 LVM_FLOAT               *pDataOut,
-                                 LVM_INT16               NrSamples);
-
-void FO_2I_D16F32Css_LShx_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t       *pInstance,
-                                          Biquad_2I_Order1_FLOAT_Taps_t *pTaps,
-                                          FO_FLOAT_LShx_Coefs_t     *pCoef);
-
-void FO_2I_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t       *pInstance,
-                                     LVM_FLOAT               *pDataIn,
-                                     LVM_FLOAT               *pDataOut,
-                                     LVM_INT16               NrSamples);
-/*** 32 bit data path *************************************************************/
-void FO_1I_D32F32Cll_TRC_WRA_01_Init( Biquad_FLOAT_Instance_t       *pInstance,
-                                      Biquad_1I_Order1_FLOAT_Taps_t *pTaps,
-                                      FO_FLOAT_Coefs_t          *pCoef);
-void FO_1I_D32F32C31_TRC_WRA_01( Biquad_FLOAT_Instance_t       *pInstance,
-                                 LVM_FLOAT                     *pDataIn,
-                                 LVM_FLOAT                     *pDataOut,
-                                 LVM_INT16                     NrSamples);
-#ifdef SUPPORT_MC
-void FO_Mc_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t  *pInstance,
-                                     LVM_FLOAT                *pDataIn,
-                                     LVM_FLOAT                *pDataOut,
-                                     LVM_INT16                NrFrames,
-                                     LVM_INT16                NrChannels);
-#endif
-/**********************************************************************************
-   FUNCTION PROTOTYPES: BAND PASS FILTERS
-***********************************************************************************/
-
-/*** 16 bit data path *************************************************************/
-void BP_1I_D16F16Css_TRC_WRA_01_Init( Biquad_FLOAT_Instance_t       *pInstance,
-                                      Biquad_1I_Order2_FLOAT_Taps_t *pTaps,
-                                      BP_FLOAT_Coefs_t              *pCoef);
-void BP_1I_D16F16C14_TRC_WRA_01 (     Biquad_FLOAT_Instance_t       *pInstance,
-                                      LVM_FLOAT                     *pDataIn,
-                                      LVM_FLOAT                     *pDataOut,
-                                      LVM_INT16                     NrSamples);
-void BP_1I_D16F32Cll_TRC_WRA_01_Init (Biquad_FLOAT_Instance_t       *pInstance,
-                                      Biquad_1I_Order2_FLOAT_Taps_t *pTaps,
-                                      BP_FLOAT_Coefs_t              *pCoef);
-void BP_1I_D16F32C30_TRC_WRA_01 (           Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrSamples);
-/*** 32 bit data path *************************************************************/
-void BP_1I_D32F32Cll_TRC_WRA_02_Init (      Biquad_FLOAT_Instance_t       *pInstance,
-                                            Biquad_1I_Order2_FLOAT_Taps_t *pTaps,
-                                            BP_FLOAT_Coefs_t          *pCoef);
-void BP_1I_D32F32C30_TRC_WRA_02(            Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrSamples);
-
-/*** 32 bit data path STEREO ******************************************************/
-void PK_2I_D32F32CssGss_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t       *pInstance,
-                                            Biquad_2I_Order2_FLOAT_Taps_t *pTaps,
-                                            PK_FLOAT_Coefs_t          *pCoef);
-void PK_2I_D32F32C14G11_TRC_WRA_01( Biquad_FLOAT_Instance_t       *pInstance,
-                                    LVM_FLOAT               *pDataIn,
-                                    LVM_FLOAT               *pDataOut,
-                                    LVM_INT16               NrSamples);
-#ifdef SUPPORT_MC
-void PK_Mc_D32F32C14G11_TRC_WRA_01(Biquad_FLOAT_Instance_t       *pInstance,
-                                   LVM_FLOAT               *pDataIn,
-                                   LVM_FLOAT               *pDataOut,
-                                   LVM_INT16               NrFrames,
-                                   LVM_INT16               NrChannels);
-#endif
 
 /**********************************************************************************
    FUNCTION PROTOTYPES: DC REMOVAL FILTERS
 ***********************************************************************************/
 
 /*** 16 bit data path STEREO ******************************************************/
-#ifdef SUPPORT_MC
-void DC_Mc_D16_TRC_WRA_01_Init     (        Biquad_FLOAT_Instance_t       *pInstance);
+void DC_Mc_D16_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance);
 
-void DC_Mc_D16_TRC_WRA_01          (        Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT               *pDataIn,
-                                            LVM_FLOAT               *pDataOut,
-                                            LVM_INT16               NrFrames,
-                                            LVM_INT16               NrChannels);
-#else
-void DC_2I_D16_TRC_WRA_01_Init     (        Biquad_FLOAT_Instance_t       *pInstance);
-
-void DC_2I_D16_TRC_WRA_01          (        Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT               *pDataIn,
-                                            LVM_FLOAT               *pDataOut,
-                                            LVM_INT16               NrSamples);
-#endif
+void DC_Mc_D16_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                          LVM_FLOAT* pDataOut, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
 
 /**********************************************************************************/
 
-#endif  /** _BIQUAD_H_ **/
-
+#endif /** _BIQUAD_H_ **/
diff --git a/media/libeffects/lvm/lib/Common/lib/CompLim.h b/media/libeffects/lvm/lib/Common/lib/CompLim.h
index 5b7cb1b..2fc78b4 100644
--- a/media/libeffects/lvm/lib/Common/lib/CompLim.h
+++ b/media/libeffects/lvm/lib/Common/lib/CompLim.h
@@ -35,21 +35,21 @@
 typedef struct /* Compressor state */
 {
     /* Normaliser */
-    LVM_INT16   Norm_Attack;        /* Attack time constant of the Normaliser integrator */
-    LVM_INT16   Norm_Decay;         /* Decay time constant of the Normaliser integrator */
-    LVM_INT32   NormInt;            /* Normaliser integrator current value */
-    LVM_INT16   Shift;              /* Shift gain */
-    LVM_INT16   Threshold;          /* Target threshold */
+    LVM_INT16 Norm_Attack; /* Attack time constant of the Normaliser integrator */
+    LVM_INT16 Norm_Decay;  /* Decay time constant of the Normaliser integrator */
+    LVM_INT32 NormInt;     /* Normaliser integrator current value */
+    LVM_INT16 Shift;       /* Shift gain */
+    LVM_INT16 Threshold;   /* Target threshold */
 
     /* Compressor */
-    LVM_INT16   Comp_Atten;         /* Attenuation applied before soft knee compressor */
-    LVM_INT16   Comp_Attack_S;      /* Attack time constant of the slow integrator */
-    LVM_INT16   Comp_Decay_S;       /* Decay time constant of slow the integrator */
-    LVM_INT16   Comp_Attack_F;      /* Attack time constant of fast the integrator */
-    LVM_INT16   Comp_Decay_F;       /* Decay time constant of fast the integrator */
-    LVM_INT16   SoftClipGain;       /* Soft clip gain control */
-    LVM_INT32   CompIntSlow;        /* Compressor slow integrator current value */
-    LVM_INT32   CompIntFast;        /* Compressor fast integrator current value */
+    LVM_INT16 Comp_Atten;    /* Attenuation applied before soft knee compressor */
+    LVM_INT16 Comp_Attack_S; /* Attack time constant of the slow integrator */
+    LVM_INT16 Comp_Decay_S;  /* Decay time constant of slow the integrator */
+    LVM_INT16 Comp_Attack_F; /* Attack time constant of fast the integrator */
+    LVM_INT16 Comp_Decay_F;  /* Decay time constant of fast the integrator */
+    LVM_INT16 SoftClipGain;  /* Soft clip gain control */
+    LVM_INT32 CompIntSlow;   /* Compressor slow integrator current value */
+    LVM_INT32 CompIntFast;   /* Compressor fast integrator current value */
 
 } CompLim_Instance_t;
 
@@ -58,10 +58,7 @@
 /*  Function Prototypes                                                             */
 /*                                                                                  */
 /************************************************************************************/
-void NonLinComp_Float(LVM_FLOAT        Gain,
-                      LVM_FLOAT        *pDataIn,
-                      LVM_FLOAT        *pDataOut,
-                      LVM_INT32        BlockLength);
+void NonLinComp_Float(LVM_FLOAT Gain, LVM_FLOAT* pDataIn, LVM_FLOAT* pDataOut,
+                      LVM_INT32 BlockLength);
 
 #endif /* #ifndef _COMP_LIM_H */
-
diff --git a/media/libeffects/lvm/lib/Common/lib/Filter.h b/media/libeffects/lvm/lib/Common/lib/Filter.h
index 1eeb321..0ba5223 100644
--- a/media/libeffects/lvm/lib/Common/lib/Filter.h
+++ b/media/libeffects/lvm/lib/Common/lib/Filter.h
@@ -27,26 +27,20 @@
 /**********************************************************************************
    DEFINES
 ***********************************************************************************/
-#define FILTER_LOSS     32730       /* -0.01dB loss to avoid wrapping due to band ripple */
-#define FILTER_LOSS_FLOAT    0.998849f
+#define FILTER_LOSS 32730 /* -0.01dB loss to avoid wrapping due to band ripple */
+#define FILTER_LOSS_FLOAT 0.998849f
 /**********************************************************************************
    FUNCTION PROTOTYPES
 ***********************************************************************************/
 
-LVM_FLOAT LVM_Power10(   LVM_FLOAT  X);
+LVM_FLOAT LVM_Power10(LVM_FLOAT X);
 
-LVM_FLOAT LVM_Polynomial(LVM_UINT16 N,
-                         LVM_FLOAT  *pCoefficients,
-                         LVM_FLOAT  X);
-LVM_FLOAT   LVM_GetOmega(LVM_UINT32  Fc,
-                         LVM_Fs_en   SampleRate);
+LVM_FLOAT LVM_Polynomial(LVM_UINT16 N, LVM_FLOAT* pCoefficients, LVM_FLOAT X);
+LVM_FLOAT LVM_GetOmega(LVM_UINT32 Fc, LVM_Fs_en SampleRate);
 
-LVM_FLOAT LVM_FO_LPF(    LVM_FLOAT  w,
-                         FO_FLOAT_Coefs_t  *pCoeffs);
+LVM_FLOAT LVM_FO_LPF(LVM_FLOAT w, FO_FLOAT_Coefs_t* pCoeffs);
 
-LVM_FLOAT LVM_FO_HPF(    LVM_FLOAT  w,
-                         FO_FLOAT_Coefs_t  *pCoeffs);
+LVM_FLOAT LVM_FO_HPF(LVM_FLOAT w, FO_FLOAT_Coefs_t* pCoeffs);
 /**********************************************************************************/
 
-#endif  /** _FILTER_H_ **/
-
+#endif /** _FILTER_H_ **/
diff --git a/media/libeffects/lvm/lib/Common/lib/InstAlloc.h b/media/libeffects/lvm/lib/Common/lib/InstAlloc.h
deleted file mode 100644
index bae84e7..0000000
--- a/media/libeffects/lvm/lib/Common/lib/InstAlloc.h
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef __INSTALLOC_H__
-#define __INSTALLOC_H__
-
-#include "LVM_Types.h"
-/*######################################################################################*/
-/*  Type declarations                                                                   */
-/*######################################################################################*/
-typedef struct
-{
-    LVM_UINT32              TotalSize;      /*  Accumulative total memory size                      */
-    uintptr_t               pNextMember;    /*  Pointer to the next instance member to be allocated */
-}   INST_ALLOC;
-
-/*######################################################################################*/
-/*  Function prototypes                                                          */
-/*######################################################################################*/
-
-/****************************************************************************************
- *  Name        : InstAlloc_Init()
- *  Input       : pms  - Pointer to the INST_ALLOC instance
-                  StartAddr - Base address of the instance memory
- *  Returns     : Error code
- *  Description : Initializes the instance distribution and memory size calculation function
- *  Remarks     :
- ****************************************************************************************/
-
-void   InstAlloc_Init( INST_ALLOC *pms, void *StartAddr );
-
-/****************************************************************************************
- *  Name        : InstAlloc_AddMember()
- *  Input       : pms  - Pointer to the INST_ALLOC instance
-                  Size - The size in bytes of the new added member
- *  Returns     : A pointer to the new added member
- *  Description : Allocates space for a new member in the instance memory and returns
-                  a pointer to this new member.  The start address of all members will
-                  be 32 bit alligned.
- *  Remarks     :
- ****************************************************************************************/
-
-void* InstAlloc_AddMember( INST_ALLOC *pms, LVM_UINT32 Size );
-
-/****************************************************************************************
- *  Name        : InstAlloc_GetTotal()
- *  Input       : pms  - Pointer to the INST_ALLOC instance
- *  Returns     : The instance memory size
- *  Description : This functions returns the calculated instance memory size
- *  Remarks     :
- ****************************************************************************************/
-
-LVM_UINT32 InstAlloc_GetTotal( INST_ALLOC *pms);
-
-void*   InstAlloc_AddMemberAllRet(     INST_ALLOC                 *pms,
-                                     LVM_UINT32               Size[],
-                                     void                    **ptr);
-
-void*   InstAlloc_AddMemberAll( INST_ALLOC                     *pms,
-                                 LVM_UINT32                   Size[],
-                                 LVM_MemoryTable_st           *pMemoryTable);
-
-void    InstAlloc_InitAll( INST_ALLOC                      *pms,
-                           LVM_MemoryTable_st             *pMemoryTable);
-
-void    InstAlloc_InitAll_NULL( INST_ALLOC              *pms);
-
-#endif /* __JBS_INSTALLOC_H__ */
diff --git a/media/libeffects/lvm/lib/Common/lib/LVM_Common.h b/media/libeffects/lvm/lib/Common/lib/LVM_Common.h
index 49f16ad..d3d128a 100644
--- a/media/libeffects/lvm/lib/Common/lib/LVM_Common.h
+++ b/media/libeffects/lvm/lib/Common/lib/LVM_Common.h
@@ -39,12 +39,11 @@
 /*                                                                                      */
 /****************************************************************************************/
 /* Algorithm identification */
-#define ALGORITHM_NONE_ID      0x0000
-#define ALGORITHM_CS_ID        0x0100
-#define ALGORITHM_EQNB_ID      0x0200
-#define ALGORITHM_DBE_ID       0x0300
-#define ALGORITHM_VC_ID        0x0500
-#define ALGORITHM_TE_ID        0x0600
+#define ALGORITHM_NONE_ID 0x0000
+#define ALGORITHM_CS_ID 0x0100
+#define ALGORITHM_EQNB_ID 0x0200
+#define ALGORITHM_DBE_ID 0x0300
+#define ALGORITHM_VC_ID 0x0500
+#define ALGORITHM_TE_ID 0x0600
 
-#endif      /* __LVM_COMMON_H__ */
-
+#endif /* __LVM_COMMON_H__ */
diff --git a/media/libeffects/lvm/lib/Common/lib/LVM_Macros.h b/media/libeffects/lvm/lib/Common/lib/LVM_Macros.h
index 1a15125..b984745 100644
--- a/media/libeffects/lvm/lib/Common/lib/LVM_Macros.h
+++ b/media/libeffects/lvm/lib/Common/lib/LVM_Macros.h
@@ -28,31 +28,36 @@
         of overflow is undefined.
 
 ***********************************************************************************/
-#define MUL32x32INTO32(A,B,C,ShiftR)   \
-        {LVM_INT32 MUL32x32INTO32_temp,MUL32x32INTO32_temp2,MUL32x32INTO32_mask,MUL32x32INTO32_HH,MUL32x32INTO32_HL,MUL32x32INTO32_LH,MUL32x32INTO32_LL;\
-         LVM_INT32  shiftValue;\
-        shiftValue = (ShiftR);\
-        MUL32x32INTO32_mask=0x0000FFFF;\
-        MUL32x32INTO32_HH= ((LVM_INT32)((LVM_INT16)((A)>>16))*((LVM_INT16)((B)>>16)) );\
-        MUL32x32INTO32_HL= ((LVM_INT32)((B)&MUL32x32INTO32_mask)*((LVM_INT16)((A)>>16))) ;\
-        MUL32x32INTO32_LH= ((LVM_INT32)((A)&MUL32x32INTO32_mask)*((LVM_INT16)((B)>>16)));\
-        MUL32x32INTO32_LL= (LVM_INT32)((A)&MUL32x32INTO32_mask)*(LVM_INT32)((B)&MUL32x32INTO32_mask);\
-        MUL32x32INTO32_temp= (LVM_INT32)(MUL32x32INTO32_HL&MUL32x32INTO32_mask)+(LVM_INT32)(MUL32x32INTO32_LH&MUL32x32INTO32_mask)+(LVM_INT32)((MUL32x32INTO32_LL>>16)&MUL32x32INTO32_mask);\
-        MUL32x32INTO32_HH= MUL32x32INTO32_HH+(LVM_INT32)(MUL32x32INTO32_HL>>16)+(LVM_INT32)(MUL32x32INTO32_LH>>16)+(LVM_INT32)(MUL32x32INTO32_temp>>16);\
-        MUL32x32INTO32_LL=MUL32x32INTO32_LL+(LVM_INT32)(MUL32x32INTO32_HL<<16)+(LVM_INT32)(MUL32x32INTO32_LH<<16);\
-        if(shiftValue<32)\
-        {\
-        MUL32x32INTO32_HH=MUL32x32INTO32_HH<<(32-shiftValue);\
-        MUL32x32INTO32_mask=((LVM_INT32)1<<(32-shiftValue))-1;\
-        MUL32x32INTO32_LL=(MUL32x32INTO32_LL>>shiftValue)&MUL32x32INTO32_mask;\
-        MUL32x32INTO32_temp2=MUL32x32INTO32_HH|MUL32x32INTO32_LL;\
-        }\
-        else\
-       {\
-        MUL32x32INTO32_temp2=(LVM_INT32)MUL32x32INTO32_HH>>(shiftValue-32);\
-       }\
-       (C) = MUL32x32INTO32_temp2;\
-       }
+#define MUL32x32INTO32(A, B, C, ShiftR)                                                        \
+    {                                                                                          \
+        LVM_INT32 MUL32x32INTO32_temp, MUL32x32INTO32_temp2, MUL32x32INTO32_mask,              \
+                MUL32x32INTO32_HH, MUL32x32INTO32_HL, MUL32x32INTO32_LH, MUL32x32INTO32_LL;    \
+        LVM_INT32 shiftValue;                                                                  \
+        shiftValue = (ShiftR);                                                                 \
+        MUL32x32INTO32_mask = 0x0000FFFF;                                                      \
+        MUL32x32INTO32_HH = ((LVM_INT32)((LVM_INT16)((A) >> 16)) * ((LVM_INT16)((B) >> 16)));  \
+        MUL32x32INTO32_HL = ((LVM_INT32)((B)&MUL32x32INTO32_mask) * ((LVM_INT16)((A) >> 16))); \
+        MUL32x32INTO32_LH = ((LVM_INT32)((A)&MUL32x32INTO32_mask) * ((LVM_INT16)((B) >> 16))); \
+        MUL32x32INTO32_LL =                                                                    \
+                (LVM_INT32)((A)&MUL32x32INTO32_mask) * (LVM_INT32)((B)&MUL32x32INTO32_mask);   \
+        MUL32x32INTO32_temp = (LVM_INT32)(MUL32x32INTO32_HL & MUL32x32INTO32_mask) +           \
+                              (LVM_INT32)(MUL32x32INTO32_LH & MUL32x32INTO32_mask) +           \
+                              (LVM_INT32)((MUL32x32INTO32_LL >> 16) & MUL32x32INTO32_mask);    \
+        MUL32x32INTO32_HH = MUL32x32INTO32_HH + (LVM_INT32)(MUL32x32INTO32_HL >> 16) +         \
+                            (LVM_INT32)(MUL32x32INTO32_LH >> 16) +                             \
+                            (LVM_INT32)(MUL32x32INTO32_temp >> 16);                            \
+        MUL32x32INTO32_LL = MUL32x32INTO32_LL + (LVM_INT32)(MUL32x32INTO32_HL << 16) +         \
+                            (LVM_INT32)(MUL32x32INTO32_LH << 16);                              \
+        if (shiftValue < 32) {                                                                 \
+            MUL32x32INTO32_HH = MUL32x32INTO32_HH << (32 - shiftValue);                        \
+            MUL32x32INTO32_mask = ((LVM_INT32)1 << (32 - shiftValue)) - 1;                     \
+            MUL32x32INTO32_LL = (MUL32x32INTO32_LL >> shiftValue) & MUL32x32INTO32_mask;       \
+            MUL32x32INTO32_temp2 = MUL32x32INTO32_HH | MUL32x32INTO32_LL;                      \
+        } else {                                                                               \
+            MUL32x32INTO32_temp2 = (LVM_INT32)MUL32x32INTO32_HH >> (shiftValue - 32);          \
+        }                                                                                      \
+        (C) = MUL32x32INTO32_temp2;                                                            \
+    }
 
 /**********************************************************************************
    MUL32x16INTO32(A,B,C,ShiftR)
@@ -65,25 +70,24 @@
         of overflow is undefined.
 
 ***********************************************************************************/
-#define MUL32x16INTO32(A,B,C,ShiftR)   \
-        {LVM_INT32 MUL32x16INTO32_mask,MUL32x16INTO32_HH,MUL32x16INTO32_LL;\
-         LVM_INT32  shiftValue;\
-        shiftValue = (ShiftR);\
-        MUL32x16INTO32_mask=0x0000FFFF;\
-        MUL32x16INTO32_HH= ((LVM_INT32)(B)*((LVM_INT16)((A)>>16)));\
-        MUL32x16INTO32_LL= ((LVM_INT32)((A)&MUL32x16INTO32_mask)*(B));\
-        if(shiftValue<16)\
-        {\
-        MUL32x16INTO32_HH=(LVM_INT32)((LVM_UINT32)MUL32x16INTO32_HH<<(16-shiftValue));\
-        (C)=MUL32x16INTO32_HH+(LVM_INT32)(MUL32x16INTO32_LL>>shiftValue);\
-        }\
-        else if(shiftValue<32) {\
-        MUL32x16INTO32_HH=(LVM_INT32)(MUL32x16INTO32_HH>>(shiftValue-16));\
-        (C)=MUL32x16INTO32_HH+(LVM_INT32)(MUL32x16INTO32_LL>>shiftValue);\
-        }\
-        else {\
-        (C)=MUL32x16INTO32_HH>>(shiftValue-16);}\
-        }
+#define MUL32x16INTO32(A, B, C, ShiftR)                                                          \
+    {                                                                                            \
+        LVM_INT32 MUL32x16INTO32_mask, MUL32x16INTO32_HH, MUL32x16INTO32_LL;                     \
+        LVM_INT32 shiftValue;                                                                    \
+        shiftValue = (ShiftR);                                                                   \
+        MUL32x16INTO32_mask = 0x0000FFFF;                                                        \
+        MUL32x16INTO32_HH = ((LVM_INT32)(B) * ((LVM_INT16)((A) >> 16)));                         \
+        MUL32x16INTO32_LL = ((LVM_INT32)((A)&MUL32x16INTO32_mask) * (B));                        \
+        if (shiftValue < 16) {                                                                   \
+            MUL32x16INTO32_HH = (LVM_INT32)((LVM_UINT32)MUL32x16INTO32_HH << (16 - shiftValue)); \
+            (C) = MUL32x16INTO32_HH + (LVM_INT32)(MUL32x16INTO32_LL >> shiftValue);              \
+        } else if (shiftValue < 32) {                                                            \
+            MUL32x16INTO32_HH = (LVM_INT32)(MUL32x16INTO32_HH >> (shiftValue - 16));             \
+            (C) = MUL32x16INTO32_HH + (LVM_INT32)(MUL32x16INTO32_LL >> shiftValue);              \
+        } else {                                                                                 \
+            (C) = MUL32x16INTO32_HH >> (shiftValue - 16);                                        \
+        }                                                                                        \
+    }
 
 /**********************************************************************************
    ADD2_SAT_32x32(A,B,C)
@@ -91,16 +95,16 @@
 
         A,B and C are 32 bit SIGNED numbers.
 ***********************************************************************************/
-#define ADD2_SAT_32x32(A,B,C)   \
-        {(C)=(A)+(B);\
-         if ((((C) ^ (A)) & ((C) ^ (B))) >> 31)\
-            {\
-                if((A)<0)\
-                    (C)=0x80000000l;\
-                else\
-                    (C)=0x7FFFFFFFl;\
-            }\
-        }
+#define ADD2_SAT_32x32(A, B, C)                  \
+    {                                            \
+        (C) = (A) + (B);                         \
+        if ((((C) ^ (A)) & ((C) ^ (B))) >> 31) { \
+            if ((A) < 0)                         \
+                (C) = 0x80000000l;               \
+            else                                 \
+                (C) = 0x7FFFFFFFl;               \
+        }                                        \
+    }
 
 #endif /* _LVM_MACROS_H_ */
 
diff --git a/media/libeffects/lvm/lib/Common/lib/LVM_Timer.h b/media/libeffects/lvm/lib/Common/lib/LVM_Timer.h
index dbf9e6a..75f4785 100644
--- a/media/libeffects/lvm/lib/Common/lib/LVM_Timer.h
+++ b/media/libeffects/lvm/lib/Common/lib/LVM_Timer.h
@@ -37,8 +37,7 @@
 /*  TYPE DEFINITIONS                                                                    */
 /****************************************************************************************/
 
-typedef struct
-{
+typedef struct {
     /*
      * The memory area created using this structure is internally
      * typecast to LVM_Timer_Instance_Private_t and used.
@@ -51,14 +50,13 @@
 
 } LVM_Timer_Instance_t;
 
-typedef struct
-{
-    LVM_INT32  SamplingRate;
-    LVM_INT16  TimeInMs;
-    LVM_INT32  CallBackParam;
-    void       *pCallBackParams;
-    void       *pCallbackInstance;
-    void       (*pCallBack)(void*,void*,LVM_INT32);
+typedef struct {
+    LVM_INT32 SamplingRate;
+    LVM_INT16 TimeInMs;
+    LVM_INT32 CallBackParam;
+    void* pCallBackParams;
+    void* pCallbackInstance;
+    void (*pCallBack)(void*, void*, LVM_INT32);
 
 } LVM_Timer_Params_t;
 
@@ -66,14 +64,12 @@
 /*  FUNCTION PROTOTYPES                                                                 */
 /****************************************************************************************/
 
-void LVM_Timer_Init (   LVM_Timer_Instance_t       *pInstance,
-                        LVM_Timer_Params_t         *pParams     );
+void LVM_Timer_Init(LVM_Timer_Instance_t* pInstance, LVM_Timer_Params_t* pParams);
 
-void LVM_Timer      (   LVM_Timer_Instance_t       *pInstance,
-                        LVM_INT16                       BlockSize );
+void LVM_Timer(LVM_Timer_Instance_t* pInstance, LVM_INT16 BlockSize);
 
 /****************************************************************************************/
 /*  END OF HEADER                                                                       */
 /****************************************************************************************/
 
-#endif  /* __LVM_TIMER_H__ */
+#endif /* __LVM_TIMER_H__ */
diff --git a/media/libeffects/lvm/lib/Common/lib/LVM_Types.h b/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
index 8b687f6..7cfaf27 100644
--- a/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
+++ b/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
@@ -26,53 +26,34 @@
 #define LVM_TYPES_H
 
 #include <stdint.h>
-
+#include <system/audio.h>
 /****************************************************************************************/
 /*                                                                                      */
 /*  definitions                                                                         */
 /*                                                                                      */
 /****************************************************************************************/
 
-#define LVM_NULL                0                   /* NULL pointer */
+#define LVM_NULL 0 /* NULL pointer */
 
-#define LVM_TRUE                1                   /* Booleans */
-#define LVM_FALSE               0
+#define LVM_TRUE 1 /* Booleans */
+#define LVM_FALSE 0
 
-#define LVM_MAXINT_8            127                 /* Maximum positive integer size */
-#define LVM_MAXINT_16           32767
-#define LVM_MAXINT_32           2147483647
-#define LVM_MAXENUM             2147483647
+#define LVM_MAXINT_8 127 /* Maximum positive integer size */
+#define LVM_MAXINT_16 32767
+#define LVM_MAXINT_32 2147483647
+#define LVM_MAXENUM 2147483647
 
-#define LVM_MODULEID_MASK       0xFF00              /* Mask to extract the calling module ID from callbackId */
-#define LVM_EVENTID_MASK        0x00FF              /* Mask to extract the callback event from callbackId */
+#define LVM_MODULEID_MASK 0xFF00 /* Mask to extract the calling module ID from callbackId */
+#define LVM_EVENTID_MASK 0x00FF  /* Mask to extract the callback event from callbackId */
 
 /* Memory table*/
-#define LVM_MEMREGION_PERSISTENT_SLOW_DATA      0   /* Offset to the instance memory region */
-#define LVM_MEMREGION_PERSISTENT_FAST_DATA      1   /* Offset to the persistent data memory region */
-#define LVM_MEMREGION_PERSISTENT_FAST_COEF      2   /* Offset to the persistent coefficient memory region */
-#define LVM_MEMREGION_TEMPORARY_FAST            3   /* Offset to temporary memory region */
+#define LVM_MEMREGION_PERSISTENT_SLOW_DATA 0 /* Offset to the instance memory region */
+#define LVM_MEMREGION_PERSISTENT_FAST_DATA 1 /* Offset to the persistent data memory region */
+#define LVM_MEMREGION_PERSISTENT_FAST_COEF \
+    2                                  /* Offset to the persistent coefficient memory region */
+#define LVM_MEMREGION_TEMPORARY_FAST 3 /* Offset to temporary memory region */
 
-#define LVM_NR_MEMORY_REGIONS                   4   /* Number of memory regions */
-
-/* Memory partition type */
-#define LVM_MEM_PARTITION0      0                   /* 1st memory partition */
-#define LVM_MEM_PARTITION1      1                   /* 2nd memory partition */
-#define LVM_MEM_PARTITION2      2                   /* 3rd memory partition */
-#define LVM_MEM_PARTITION3      3                   /* 4th memory partition */
-
-/* Use type */
-#define LVM_MEM_PERSISTENT      0                   /* Persistent memory type */
-#define LVM_MEM_SCRATCH         4                   /* Scratch  memory type */
-
-/* Access type */
-#define LVM_MEM_INTERNAL        0                   /* Internal (fast) access memory */
-#define LVM_MEM_EXTERNAL        8                   /* External (slow) access memory */
-
-/* Platform specific */
-#define LVM_PERSISTENT          (LVM_MEM_PARTITION0+LVM_MEM_PERSISTENT+LVM_MEM_INTERNAL)
-#define LVM_PERSISTENT_DATA     (LVM_MEM_PARTITION1+LVM_MEM_PERSISTENT+LVM_MEM_INTERNAL)
-#define LVM_PERSISTENT_COEF     (LVM_MEM_PARTITION2+LVM_MEM_PERSISTENT+LVM_MEM_INTERNAL)
-#define LVM_SCRATCH             (LVM_MEM_PARTITION3+LVM_MEM_SCRATCH+LVM_MEM_INTERNAL)
+#define LVM_NR_MEMORY_REGIONS 4 /* Number of memory regions */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -80,33 +61,28 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-typedef     char                LVM_CHAR;           /* ASCII character */
+typedef char LVM_CHAR; /* ASCII character */
 
-typedef     int8_t              LVM_INT8;           /* Signed 8-bit word */
-typedef     uint8_t             LVM_UINT8;          /* Unsigned 8-bit word */
+typedef int8_t LVM_INT8;   /* Signed 8-bit word */
+typedef uint8_t LVM_UINT8; /* Unsigned 8-bit word */
 
-typedef     int16_t             LVM_INT16;          /* Signed 16-bit word */
-typedef     uint16_t            LVM_UINT16;         /* Unsigned 16-bit word */
+typedef int16_t LVM_INT16;   /* Signed 16-bit word */
+typedef uint16_t LVM_UINT16; /* Unsigned 16-bit word */
 
-typedef     int32_t             LVM_INT32;          /* Signed 32-bit word */
-typedef     uint32_t            LVM_UINT32;         /* Unsigned 32-bit word */
-typedef     int64_t             LVM_INT64;          /* Signed 64-bit word */
+typedef int32_t LVM_INT32;   /* Signed 32-bit word */
+typedef uint32_t LVM_UINT32; /* Unsigned 32-bit word */
+typedef int64_t LVM_INT64;   /* Signed 64-bit word */
 
-#define LVM_MAXFLOAT            1.f
+#define LVM_MAXFLOAT 1.f
 
-typedef     float               LVM_FLOAT;          /* single precision floating point */
+typedef float LVM_FLOAT; /* single precision floating point */
 
 // Select whether we expose int16_t or float buffers.
 
-#define    EFFECT_BUFFER_FORMAT AUDIO_FORMAT_PCM_FLOAT
-typedef     float               effect_buffer_t;
+#define EFFECT_BUFFER_FORMAT AUDIO_FORMAT_PCM_FLOAT
+typedef float effect_buffer_t;
 
-
-#ifdef SUPPORT_MC
-#define LVM_MAX_CHANNELS 8 // FCC_8
-#else
-#define LVM_MAX_CHANNELS 2 // FCC_2
-#endif
+#define LVM_MAX_CHANNELS FCC_24
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -115,29 +91,20 @@
 /****************************************************************************************/
 
 /* Operating mode */
-typedef enum
-{
-    LVM_MODE_OFF    = 0,
-    LVM_MODE_ON     = 1,
-    LVM_MODE_DUMMY  = LVM_MAXENUM
-} LVM_Mode_en;
+typedef enum { LVM_MODE_OFF = 0, LVM_MODE_ON = 1, LVM_MODE_DUMMY = LVM_MAXENUM } LVM_Mode_en;
 
 /* Format */
-typedef enum
-{
-    LVM_STEREO          = 0,
-    LVM_MONOINSTEREO    = 1,
-    LVM_MONO            = 2,
-#ifdef SUPPORT_MC
-    LVM_MULTICHANNEL    = 3,
-#endif
-    LVM_SOURCE_DUMMY    = LVM_MAXENUM
+typedef enum {
+    LVM_STEREO = 0,
+    LVM_MONOINSTEREO = 1,
+    LVM_MONO = 2,
+    LVM_MULTICHANNEL = 3,
+    LVM_SOURCE_DUMMY = LVM_MAXENUM
 } LVM_Format_en;
 
 /* LVM sampling rates */
-typedef enum
-{
-    LVM_FS_8000  = 0,
+typedef enum {
+    LVM_FS_8000 = 0,
     LVM_FS_11025 = 1,
     LVM_FS_12000 = 2,
     LVM_FS_16000 = 3,
@@ -150,42 +117,33 @@
     LVM_FS_96000 = 10,
     LVM_FS_176400 = 11,
     LVM_FS_192000 = 12,
-    LVM_FS_INVALID = LVM_MAXENUM-1,
+    LVM_FS_INVALID = LVM_MAXENUM - 1,
     LVM_FS_DUMMY = LVM_MAXENUM
 } LVM_Fs_en;
 
-/* Memory Types */
-typedef enum
-{
-    LVM_PERSISTENT_SLOW_DATA    = LVM_MEMREGION_PERSISTENT_SLOW_DATA,
-    LVM_PERSISTENT_FAST_DATA    = LVM_MEMREGION_PERSISTENT_FAST_DATA,
-    LVM_PERSISTENT_FAST_COEF    = LVM_MEMREGION_PERSISTENT_FAST_COEF,
-    LVM_TEMPORARY_FAST          = LVM_MEMREGION_TEMPORARY_FAST,
-    LVM_MEMORYTYPE_DUMMY        = LVM_MAXENUM
-} LVM_MemoryTypes_en;
-
-/* Memory region definition */
-typedef struct
-{
-    LVM_UINT32                  Size;                   /* Region size in bytes */
-    LVM_MemoryTypes_en          Type;                   /* Region type */
-    void                        *pBaseAddress;          /* Pointer to the region base address */
-} LVM_MemoryRegion_st;
-
-/* Memory table containing the region definitions */
-typedef struct
-{
-    LVM_MemoryRegion_st         Region[LVM_NR_MEMORY_REGIONS];  /* One definition for each region */
-} LVM_MemoryTable_st;
+static inline LVM_Fs_en lvmFsForSampleRate(int sampleRate) {
+    static const std::map<int, LVM_Fs_en> kLVMFsMap = {
+            {8000, LVM_FS_8000},    {11025, LVM_FS_11025}, {12000, LVM_FS_12000},
+            {16000, LVM_FS_16000},  {22050, LVM_FS_22050}, {24000, LVM_FS_24000},
+            {32000, LVM_FS_32000},  {44100, LVM_FS_44100}, {48000, LVM_FS_48000},
+            {88200, LVM_FS_88200},  {96000, LVM_FS_96000}, {176400, LVM_FS_176400},
+            {192000, LVM_FS_192000}};
+    if (kLVMFsMap.find(sampleRate) != kLVMFsMap.end()) {
+        return kLVMFsMap.at(sampleRate);
+    }
+    return LVM_FS_INVALID;
+}
 
 /****************************************************************************************/
 /*                                                                                      */
 /*  Standard Function Prototypes                                                        */
 /*                                                                                      */
 /****************************************************************************************/
-typedef LVM_INT32 (*LVM_Callback)(void          *pCallbackData,     /* Pointer to the callback data structure */
-                                  void          *pGeneralPurpose,   /* General purpose pointer (e.g. to a data structure needed in the callback) */
-                                  LVM_INT16     GeneralPurpose );   /* General purpose variable (e.g. to be used as callback ID) */
+typedef LVM_INT32 (*LVM_Callback)(
+        void* pCallbackData,   /* Pointer to the callback data structure */
+        void* pGeneralPurpose, /* General purpose pointer (e.g. to a data structure needed in the
+                                  callback) */
+        LVM_INT16 GeneralPurpose); /* General purpose variable (e.g. to be used as callback ID) */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -193,4 +151,4 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-#endif  /* LVM_TYPES_H */
+#endif /* LVM_TYPES_H */
diff --git a/media/libeffects/lvm/lib/Common/lib/Mixer.h b/media/libeffects/lvm/lib/Common/lib/Mixer.h
index b2e0195..ba605e5 100644
--- a/media/libeffects/lvm/lib/Common/lib/Mixer.h
+++ b/media/libeffects/lvm/lib/Common/lib/Mixer.h
@@ -24,80 +24,62 @@
    INSTANCE MEMORY TYPE DEFINITION
 ***********************************************************************************/
 
-typedef struct
-{
-    LVM_FLOAT   Alpha;                   /* Time constant. Set by calling application. \
-                                            Can be changed at any time */
-    LVM_FLOAT   Target;                  /* Target value.  Set by calling application. \
-                                            Can be changed at any time */
-    LVM_FLOAT   Current;                 /* Current value.  Set by the mixer function. */
-    LVM_INT16   CallbackSet;             /* Boolean.  Should be set by calling application \
-                                            each time the target value is updated */
-    LVM_INT16   CallbackParam;           /* Parameter that will be used in the calback function */
-    void        *pCallbackHandle;        /* Pointer to the instance of the callback function */
-    void        *pGeneralPurpose;        /* Pointer for general purpose usage */
-    LVM_Callback pCallBack;              /* Pointer to the callback function */
+typedef struct {
+    LVM_FLOAT Alpha;         /* Time constant. Set by calling application. \
+                                Can be changed at any time */
+    LVM_FLOAT Target;        /* Target value.  Set by calling application. \
+                                Can be changed at any time */
+    LVM_FLOAT Current;       /* Current value.  Set by the mixer function. */
+    LVM_INT16 CallbackSet;   /* Boolean.  Should be set by calling application \
+                                each time the target value is updated */
+    LVM_INT16 CallbackParam; /* Parameter that will be used in the calback function */
+    void* pCallbackHandle;   /* Pointer to the instance of the callback function */
+    void* pGeneralPurpose;   /* Pointer for general purpose usage */
+    LVM_Callback pCallBack;  /* Pointer to the callback function */
 } Mix_1St_Cll_FLOAT_t;
-typedef struct
-{
-    LVM_FLOAT   Alpha1;
-    LVM_FLOAT   Target1;
-    LVM_FLOAT   Current1;
-    LVM_INT16   CallbackSet1;
-    LVM_INT16   CallbackParam1;
-    void        *pCallbackHandle1;
-    void        *pGeneralPurpose1;
+typedef struct {
+    LVM_FLOAT Alpha1;
+    LVM_FLOAT Target1;
+    LVM_FLOAT Current1;
+    LVM_INT16 CallbackSet1;
+    LVM_INT16 CallbackParam1;
+    void* pCallbackHandle1;
+    void* pGeneralPurpose1;
     LVM_Callback pCallBack1;
 
-    LVM_FLOAT   Alpha2;                   /* Warning the address of this location is passed as a \
-                                             pointer to Mix_1St_Cll_t in some functions */
-    LVM_FLOAT   Target2;
-    LVM_FLOAT   Current2;
-    LVM_INT16   CallbackSet2;
-    LVM_INT16   CallbackParam2;
-    void        *pCallbackHandle2;
-    void        *pGeneralPurpose2;
+    LVM_FLOAT Alpha2; /* Warning the address of this location is passed as a \
+                         pointer to Mix_1St_Cll_t in some functions */
+    LVM_FLOAT Target2;
+    LVM_FLOAT Current2;
+    LVM_INT16 CallbackSet2;
+    LVM_INT16 CallbackParam2;
+    void* pCallbackHandle2;
+    void* pGeneralPurpose2;
     LVM_Callback pCallBack2;
 } Mix_2St_Cll_FLOAT_t;
 
 /*** General functions ************************************************************/
 
-LVM_FLOAT LVM_Mixer_TimeConstant(LVM_UINT32   tc,
-                                 LVM_UINT32   Fs,
-                                 LVM_UINT16   NumChannels);
+LVM_FLOAT LVM_Mixer_TimeConstant(LVM_UINT32 tc, LVM_UINT32 Fs, LVM_UINT16 NumChannels);
 
-void MixSoft_1St_D32C31_WRA(    Mix_1St_Cll_FLOAT_t       *pInstance,
-                                const LVM_FLOAT     *src,
-                                LVM_FLOAT     *dst,
-                                LVM_INT16     n);
+void MixSoft_1St_D32C31_WRA(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src, LVM_FLOAT* dst,
+                            LVM_INT16 n);
 
-void MixSoft_2St_D32C31_SAT(    Mix_2St_Cll_FLOAT_t       *pInstance,
-                                const LVM_FLOAT     *src1,
-                                const LVM_FLOAT     *src2,
-                                LVM_FLOAT     *dst,
-                                LVM_INT16     n);
+void MixSoft_2St_D32C31_SAT(Mix_2St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src1,
+                            const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 n);
 
-void MixInSoft_D32C31_SAT(      Mix_1St_Cll_FLOAT_t       *pInstance,
-                                const LVM_FLOAT     *src,
-                                LVM_FLOAT     *dst,
-                                LVM_INT16     n);
+void MixInSoft_D32C31_SAT(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src, LVM_FLOAT* dst,
+                          LVM_INT16 n);
 
 /**********************************************************************************
    FUNCTION PROTOTYPES (LOW LEVEL SUBFUNCTIONS)
 ***********************************************************************************/
-void Core_MixSoft_1St_D32C31_WRA(   Mix_1St_Cll_FLOAT_t       *pInstance,
-                                    const LVM_FLOAT     *src,
-                                    LVM_FLOAT     *dst,
-                                    LVM_INT16     n);
-void Core_MixHard_2St_D32C31_SAT(   Mix_2St_Cll_FLOAT_t       *pInstance,
-                                    const LVM_FLOAT     *src1,
-                                    const LVM_FLOAT     *src2,
-                                    LVM_FLOAT     *dst,
-                                    LVM_INT16     n);
-void Core_MixInSoft_D32C31_SAT(     Mix_1St_Cll_FLOAT_t       *pInstance,
-                                    const LVM_FLOAT     *src,
-                                    LVM_FLOAT     *dst,
-                                    LVM_INT16     n);
+void Core_MixSoft_1St_D32C31_WRA(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src,
+                                 LVM_FLOAT* dst, LVM_INT16 n);
+void Core_MixHard_2St_D32C31_SAT(Mix_2St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src1,
+                                 const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 n);
+void Core_MixInSoft_D32C31_SAT(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src, LVM_FLOAT* dst,
+                               LVM_INT16 n);
 
 /**********************************************************************************/
 
diff --git a/media/libeffects/lvm/lib/Common/lib/ScalarArithmetic.h b/media/libeffects/lvm/lib/Common/lib/ScalarArithmetic.h
index ae54419..36c4cd2 100644
--- a/media/libeffects/lvm/lib/Common/lib/ScalarArithmetic.h
+++ b/media/libeffects/lvm/lib/Common/lib/ScalarArithmetic.h
@@ -21,7 +21,7 @@
 /*######################################################################################*/
 /*  Include files                                                                       */
 /*######################################################################################*/
-
+#include <math.h>
 #include "LVM_Types.h"
 
 /*######################################################################################*/
@@ -30,7 +30,13 @@
 
 /* Absolute value including the corner case for the extreme negative value */
 
-LVM_FLOAT   Abs_Float(LVM_FLOAT     input);
+static inline LVM_FLOAT Abs_Float(LVM_FLOAT input) {
+    return fabs(input);
+}
+
+static inline LVM_FLOAT LVM_Clamp(LVM_FLOAT val) {
+    return fmin(fmax(val, -1.0f), 1.0f);
+}
 
 /****************************************************************************************
  *  Name        : dB_to_Lin32()
@@ -44,7 +50,6 @@
  *                  (15->01) = decimal part
  *  Returns     : Lin value format 1.16.15
  ****************************************************************************************/
-LVM_FLOAT dB_to_LinFloat(LVM_INT16    db_fix);
+LVM_FLOAT dB_to_LinFloat(LVM_INT16 db_fix);
 
-#endif  /* __SCALARARITHMETIC_H__ */
-
+#endif /* __SCALARARITHMETIC_H__ */
diff --git a/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h b/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
index b27bac5..10f351e 100644
--- a/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
+++ b/media/libeffects/lvm/lib/Common/lib/VectorArithmetic.h
@@ -24,127 +24,41 @@
     VARIOUS FUNCTIONS
 ***********************************************************************************/
 
-void LoadConst_Float(          const LVM_FLOAT val,
-                               LVM_FLOAT *dst,
-                               LVM_INT16 n );
+void Copy_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
+void Copy_Float_Mc_Stereo(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 NrFrames,
+                          LVM_INT32 NrChannels);
+void Copy_Float_Stereo_Mc(const LVM_FLOAT* src, const LVM_FLOAT* StereoOut, LVM_FLOAT* dst,
+                          LVM_INT16 NrFrames, LVM_INT32 NrChannels);
 
-void Copy_Float(                 const LVM_FLOAT *src,
-                                 LVM_FLOAT *dst,
-                                 LVM_INT16 n );
-#ifdef SUPPORT_MC
-void Copy_Float_Mc_Stereo(       const LVM_FLOAT *src,
-                                 LVM_FLOAT *dst,
-                                 LVM_INT16 NrFrames,
-                                 LVM_INT32 NrChannels);
-void Copy_Float_Stereo_Mc(       const LVM_FLOAT *src,
-                                 LVM_FLOAT *StereoOut,
-                                 LVM_FLOAT *dst,
-                                 LVM_INT16 NrFrames,
-                                 LVM_INT32 NrChannels);
-#endif
+void Mult3s_Float(const LVM_FLOAT* src, const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n);
 
-/*********************************************************************************
- * note: In Mult3s_16x16() saturation of result is not taken care when           *
- *       overflow occurs.                                                        *
- *       For example when *src = 0x8000, val = *0x8000                           *
- *       The function gives the output as 0x8000 instead of 0x7fff               *
- *       This is the only case which will give wrong result.                     *
- *       For more information refer to Vector_Arithmetic.doc in /doc folder      *
- *********************************************************************************/
-void Mult3s_Float(            const LVM_FLOAT *src,
-                              const LVM_FLOAT val,
-                              LVM_FLOAT *dst,
-                              LVM_INT16 n);
-
-/*********************************************************************************
- * note: In Mult3s_32x16() saturation of result is not taken care when           *
- *       overflow occurs.                                                        *
- *       For example when *src = 0x8000000, val = *0x8000                        *
- *       The function gives the output as 0x8000000 instead of 0x7fffffff        *
- *       This is the only extreme condition which is giving unexpected result    *
- *       For more information refer to Vector_Arithmetic.doc in /doc folder      *
- *********************************************************************************/
-void Mult3s_32x16(            const LVM_INT32  *src,
-                              const LVM_INT16 val,
-                                    LVM_INT32  *dst,
-                                    LVM_INT16 n);
-void DelayMix_Float(const LVM_FLOAT *src,           /* Source 1, to be delayed */
-                    LVM_FLOAT *delay,         /* Delay buffer */
-                    LVM_INT16 size,           /* Delay size */
-                    LVM_FLOAT *dst,           /* Source/destination */
-                    LVM_INT16 *pOffset,       /* Delay offset */
-                    LVM_INT16 n)  ;            /* Number of stereo samples */
-void DelayWrite_32(           const LVM_INT32  *src,               /* Source 1, to be delayed */
-                                    LVM_INT32  *delay,             /* Delay buffer */
-                                    LVM_UINT16 size,               /* Delay size */
-                                    LVM_UINT16 *pOffset,           /* Delay offset */
-                                    LVM_INT16 n);
-void Add2_Sat_Float(          const LVM_FLOAT *src,
-                              LVM_FLOAT *dst,
-                              LVM_INT16 n );
-void Mac3s_Sat_Float(         const LVM_FLOAT *src,
-                              const LVM_FLOAT val,
-                              LVM_FLOAT *dst,
-                              LVM_INT16 n);
-void DelayAllPass_Sat_32x16To32(    LVM_INT32  *delay,              /* Delay buffer */
-                                    LVM_UINT16 size,                /* Delay size */
-                                    LVM_INT16 coeff,                /* All pass filter coefficient */
-                                    LVM_UINT16 DelayOffset,         /* Simple delay offset */
-                                    LVM_UINT16 *pAllPassOffset,     /* All pass filter delay offset */
-                                    LVM_INT32  *dst,                /* Source/destination */
-                                    LVM_INT16 n);
+void DelayMix_Float(const LVM_FLOAT* src,  /* Source 1, to be delayed */
+                    LVM_FLOAT* delay,      /* Delay buffer */
+                    LVM_INT16 size,        /* Delay size */
+                    LVM_FLOAT* dst,        /* Source/destination */
+                    LVM_INT16* pOffset,    /* Delay offset */
+                    LVM_INT16 n,           /* Number of stereo samples */
+                    LVM_INT32 NrChannels); /* Number of channels */
+void Add2_Sat_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
+void Mac3s_Sat_Float(const LVM_FLOAT* src, const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n);
 
 /**********************************************************************************
     SHIFT FUNCTIONS
 ***********************************************************************************/
-void Shift_Sat_Float (const   LVM_INT16   val,
-                      const   LVM_FLOAT   *src,
-                      LVM_FLOAT   *dst,
-                      LVM_INT16   n);
+void Shift_Sat_Float(const LVM_INT16 val, const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
 /**********************************************************************************
     AUDIO FORMAT CONVERSION FUNCTIONS
 ***********************************************************************************/
-void MonoTo2I_Float( const LVM_FLOAT     *src,
-                     LVM_FLOAT     *dst,
-                     LVM_INT16 n);
-void From2iToMono_Float(         const LVM_FLOAT  *src,
-                                 LVM_FLOAT  *dst,
-                                 LVM_INT16 n);
-#ifdef SUPPORT_MC
-void FromMcToMono_Float(const LVM_FLOAT *src,
-                        LVM_FLOAT *dst,
-                        LVM_INT16 NrFrames,
+void MonoTo2I_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
+void From2iToMono_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n);
+void FromMcToMono_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 NrFrames,
                         LVM_INT16 NrChannels);
-#endif
-void MSTo2i_Sat_Float(        const LVM_FLOAT *srcM,
-                              const LVM_FLOAT *srcS,
-                              LVM_FLOAT *dst,
-                              LVM_INT16 n );
-void From2iToMS_Float(        const LVM_FLOAT *src,
-                              LVM_FLOAT *dstM,
-                              LVM_FLOAT *dstS,
-                              LVM_INT16 n );
-void JoinTo2i_Float(          const LVM_FLOAT  *srcL,
-                              const LVM_FLOAT  *srcR,
-                              LVM_FLOAT  *dst,
-                              LVM_INT16 n );
-
-/**********************************************************************************
-    DATA TYPE CONVERSION FUNCTIONS
-***********************************************************************************/
-
-void Int16LShiftToInt32_16x32(const LVM_INT16 *src,
-                                    LVM_INT32  *dst,
-                                    LVM_INT16 n,
-                                    LVM_INT16 shift );
-
-void Int32RShiftToInt16_Sat_32x16(const  LVM_INT32  *src,
-                                    LVM_INT16 *dst,
-                                    LVM_INT16 n,
-                                    LVM_INT16 shift );
+void MSTo2i_Sat_Float(const LVM_FLOAT* srcM, const LVM_FLOAT* srcS, LVM_FLOAT* dst, LVM_INT16 n);
+void From2iToMS_Float(const LVM_FLOAT* src, LVM_FLOAT* dstM, LVM_FLOAT* dstS, LVM_INT16 n);
+void JoinTo2i_Float(const LVM_FLOAT* srcL, const LVM_FLOAT* srcR, LVM_FLOAT* dst, LVM_INT16 n);
 
 /**********************************************************************************/
 
-#endif  /* _VECTOR_ARITHMETIC_H_ */
+#endif /* _VECTOR_ARITHMETIC_H_ */
 
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/AGC_MIX_VOL_2St1Mon_D32_WRA.cpp b/media/libeffects/lvm/lib/Common/src/AGC_MIX_VOL_2St1Mon_D32_WRA.cpp
index e18aa78..78f329e 100644
--- a/media/libeffects/lvm/lib/Common/src/AGC_MIX_VOL_2St1Mon_D32_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/AGC_MIX_VOL_2St1Mon_D32_WRA.cpp
@@ -20,7 +20,6 @@
 /*    Includes                                                                          */
 /*                                                                                      */
 /****************************************************************************************/
-
 #include "AGC.h"
 #include "ScalarArithmetic.h"
 
@@ -30,151 +29,13 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-#define VOL_TC_SHIFT                                        21          /* As a power of 2 */
-#define DECAY_SHIFT                                        10           /* As a power of 2 */
-#define VOL_TC_FLOAT                                      2.0f          /* As a power of 2 */
-#define DECAY_FAC_FLOAT                                  64.0f          /* As a power of 2 */
+#define VOL_TC_SHIFT 21       /* As a power of 2 */
+#define DECAY_SHIFT 10        /* As a power of 2 */
+#define VOL_TC_FLOAT 2.0f     /* As a power of 2 */
+#define DECAY_FAC_FLOAT 64.0f /* As a power of 2 */
 
 /****************************************************************************************/
 /*                                                                                      */
-/* FUNCTION:                  AGC_MIX_VOL_2St1Mon_D32_WRA                               */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*    Apply AGC and mix signals                                                         */
-/*                                                                                      */
-/*                                                                                      */
-/*  StSrc   ------------------|                                                         */
-/*                            |                                                         */
-/*              ______       _|_        ________                                        */
-/*             |      |     |   |      |        |                                       */
-/*  MonoSrc -->| AGC  |---->| + |----->| Volume |------------------------------+--->    */
-/*             | Gain |     |___|      | Gain   |                              |        */
-/*             |______|                |________|                              |        */
-/*                /|\                               __________     ________    |        */
-/*                 |                               |          |   |        |   |        */
-/*                 |-------------------------------| AGC Gain |<--| Peak   |<--|        */
-/*                                                 | Update   |   | Detect |            */
-/*                                                 |__________|   |________|            */
-/*                                                                                      */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  pInstance               Instance pointer                                            */
-/*  pStereoIn               Stereo source                                               */
-/*  pMonoIn                 Mono band pass source                                       */
-/*  pStereoOut              Stereo destination                                          */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  Void                                                                                */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*                                                                                      */
-/****************************************************************************************/
-void AGC_MIX_VOL_2St1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t  *pInstance,     /* Instance pointer */
-                                 const LVM_FLOAT            *pStSrc,        /* Stereo source */
-                                 const LVM_FLOAT            *pMonoSrc,      /* Mono source */
-                                 LVM_FLOAT                  *pDst,          /* Stereo destination */
-                                 LVM_UINT16                 NumSamples)     /* Number of samples */
-{
-
-    /*
-     * General variables
-     */
-    LVM_UINT16      i;                                          /* Sample index */
-    LVM_FLOAT       Left;                                       /* Left sample */
-    LVM_FLOAT       Right;                                      /* Right sample */
-    LVM_FLOAT       Mono;                                       /* Mono sample */
-    LVM_FLOAT       AbsPeak;                                    /* Absolute peak signal */
-    LVM_FLOAT       AGC_Mult;                                   /* Short AGC gain */
-    LVM_FLOAT       Vol_Mult;                                   /* Short volume */
-
-    /*
-     * Instance control variables
-     */
-    LVM_FLOAT      AGC_Gain      = pInstance->AGC_Gain;         /* Get the current AGC gain */
-    LVM_FLOAT      AGC_MaxGain   = pInstance->AGC_MaxGain;      /* Get maximum AGC gain */
-    LVM_FLOAT      AGC_Attack    = pInstance->AGC_Attack;       /* Attack scaler */
-    LVM_FLOAT      AGC_Decay     = (pInstance->AGC_Decay * (1 << (DECAY_SHIFT)));/* Decay scaler */
-    LVM_FLOAT      AGC_Target    = pInstance->AGC_Target;       /* Get the target level */
-    LVM_FLOAT      Vol_Current   = pInstance->Volume;           /* Actual volume setting */
-    LVM_FLOAT      Vol_Target    = pInstance->Target;           /* Target volume setting */
-    LVM_FLOAT      Vol_TC        = pInstance->VolumeTC;         /* Time constant */
-
-    /*
-     * Process on a sample by sample basis
-     */
-    for (i = 0; i < NumSamples; i++)                                  /* For each sample */
-    {
-
-        /*
-         * Get the short scalers
-         */
-        AGC_Mult    = (LVM_FLOAT)(AGC_Gain);              /* Get the short AGC gain */
-        Vol_Mult    = (LVM_FLOAT)(Vol_Current);           /* Get the short volume gain */
-
-        /*
-         * Get the input samples
-         */
-        Left  = *pStSrc++;                                      /* Get the left sample */
-        Right = *pStSrc++;                                      /* Get the right sample */
-        Mono  = *pMonoSrc++;                                    /* Get the mono sample */
-
-        /*
-         * Apply the AGC gain to the mono input and mix with the stereo signal
-         */
-        Left  += (Mono * AGC_Mult);                               /* Mix in the mono signal */
-        Right += (Mono * AGC_Mult);
-
-        /*
-         * Apply the volume and write to the output stream
-         */
-        Left  = Left  * Vol_Mult;
-        Right = Right * Vol_Mult;
-        *pDst++ = Left;                                         /* Save the results */
-        *pDst++ = Right;
-
-        /*
-         * Update the AGC gain
-         */
-        AbsPeak = Abs_Float(Left) > Abs_Float(Right) ? Abs_Float(Left) : Abs_Float(Right);
-        if (AbsPeak > AGC_Target)
-        {
-            /*
-             * The signal is too large so decrease the gain
-             */
-            AGC_Gain = AGC_Gain * AGC_Attack;
-        }
-        else
-        {
-            /*
-             * The signal is too small so increase the gain
-             */
-            if (AGC_Gain > AGC_MaxGain)
-            {
-                AGC_Gain -= (AGC_Decay);
-            }
-            else
-            {
-                AGC_Gain += (AGC_Decay);
-            }
-        }
-
-        /*
-         * Update the gain
-         */
-        Vol_Current +=  (Vol_Target - Vol_Current) * ((LVM_FLOAT)Vol_TC / VOL_TC_FLOAT);
-    }
-
-    /*
-     * Update the parameters
-     */
-    pInstance->Volume = Vol_Current;                            /* Actual volume setting */
-    pInstance->AGC_Gain = AGC_Gain;
-
-    return;
-}
-#ifdef SUPPORT_MC
-/****************************************************************************************/
-/*                                                                                      */
 /* FUNCTION:                  AGC_MIX_VOL_Mc1Mon_D32_WRA                                */
 /*                                                                                      */
 /* DESCRIPTION:                                                                         */
@@ -209,93 +70,80 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-void AGC_MIX_VOL_Mc1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t  *pInstance,
-                                 const LVM_FLOAT            *pMcSrc,
-                                 const LVM_FLOAT            *pMonoSrc,
-                                 LVM_FLOAT                  *pDst,
-                                 LVM_UINT16                 NrFrames,
-                                 LVM_UINT16                 NrChannels)
-{
-
+void AGC_MIX_VOL_Mc1Mon_D32_WRA(AGC_MIX_VOL_2St1Mon_FLOAT_t* pInstance, const LVM_FLOAT* pMcSrc,
+                                const LVM_FLOAT* pMonoSrc, LVM_FLOAT* pDst, LVM_UINT16 NrFrames,
+                                LVM_UINT16 NrChannels) {
     /*
      * General variables
      */
-    LVM_UINT16      i, jj;                                      /* Sample index */
-    LVM_FLOAT       SampleVal;                                  /* Sample value */
-    LVM_FLOAT       Mono;                                       /* Mono sample */
-    LVM_FLOAT       AbsPeak;                                    /* Absolute peak signal */
-    LVM_FLOAT       AGC_Mult;                                   /* Short AGC gain */
-    LVM_FLOAT       Vol_Mult;                                   /* Short volume */
+    LVM_UINT16 i, jj;    /* Sample index */
+    LVM_FLOAT SampleVal; /* Sample value */
+    LVM_FLOAT Mono;      /* Mono sample */
+    LVM_FLOAT AbsPeak;   /* Absolute peak signal */
+    LVM_FLOAT AGC_Mult;  /* Short AGC gain */
+    LVM_FLOAT Vol_Mult;  /* Short volume */
 
     /*
      * Instance control variables
      */
-    LVM_FLOAT      AGC_Gain      = pInstance->AGC_Gain;         /* Get the current AGC gain */
-    LVM_FLOAT      AGC_MaxGain   = pInstance->AGC_MaxGain;      /* Get maximum AGC gain */
-    LVM_FLOAT      AGC_Attack    = pInstance->AGC_Attack;       /* Attack scaler */
+    LVM_FLOAT AGC_Gain = pInstance->AGC_Gain;       /* Get the current AGC gain */
+    LVM_FLOAT AGC_MaxGain = pInstance->AGC_MaxGain; /* Get maximum AGC gain */
+    LVM_FLOAT AGC_Attack = pInstance->AGC_Attack;   /* Attack scaler */
     /* Decay scaler */
-    LVM_FLOAT      AGC_Decay     = (pInstance->AGC_Decay * (1 << (DECAY_SHIFT)));
-    LVM_FLOAT      AGC_Target    = pInstance->AGC_Target;       /* Get the target level */
-    LVM_FLOAT      Vol_Current   = pInstance->Volume;           /* Actual volume setting */
-    LVM_FLOAT      Vol_Target    = pInstance->Target;           /* Target volume setting */
-    LVM_FLOAT      Vol_TC        = pInstance->VolumeTC;         /* Time constant */
+    LVM_FLOAT AGC_Decay = (pInstance->AGC_Decay * (1 << (DECAY_SHIFT)));
+    LVM_FLOAT AGC_Target = pInstance->AGC_Target; /* Get the target level */
+    LVM_FLOAT Vol_Current = pInstance->Volume;    /* Actual volume setting */
+    LVM_FLOAT Vol_Target = pInstance->Target;     /* Target volume setting */
+    LVM_FLOAT Vol_TC = pInstance->VolumeTC;       /* Time constant */
 
     /*
      * Process on a sample by sample basis
      */
-    for (i = 0; i < NrFrames; i++)                                  /* For each frame */
+    for (i = 0; i < NrFrames; i++) /* For each frame */
     {
-
         /*
          * Get the scalers
          */
-        AGC_Mult    = (LVM_FLOAT)(AGC_Gain);              /* Get the AGC gain */
-        Vol_Mult    = (LVM_FLOAT)(Vol_Current);           /* Get the volume gain */
+        AGC_Mult = (LVM_FLOAT)(AGC_Gain);    /* Get the AGC gain */
+        Vol_Mult = (LVM_FLOAT)(Vol_Current); /* Get the volume gain */
 
         AbsPeak = 0.0f;
         /*
          * Get the input samples
          */
-        for (jj = 0; jj < NrChannels; jj++)
-        {
-            SampleVal  = *pMcSrc++;                       /* Get the sample value of jj Channel*/
-            Mono       = *pMonoSrc;                       /* Get the mono sample */
+        for (jj = 0; jj < NrChannels; jj++) {
+            SampleVal = *pMcSrc++; /* Get the sample value of jj Channel*/
+            Mono = *pMonoSrc;      /* Get the mono sample */
 
             /*
              * Apply the AGC gain to the mono input and mix with the input signal
              */
-            SampleVal  += (Mono * AGC_Mult);                        /* Mix in the mono signal */
+            SampleVal += (Mono * AGC_Mult); /* Mix in the mono signal */
 
             /*
              * Apply the volume and write to the output stream
              */
-            SampleVal  = SampleVal  * Vol_Mult;
+            SampleVal = SampleVal * Vol_Mult;
 
-            *pDst++ = SampleVal;                                         /* Save the results */
+            *pDst++ = LVM_Clamp(SampleVal); /* Save the results */
 
             /*
              * Update the AGC gain
              */
             AbsPeak = Abs_Float(SampleVal) > AbsPeak ? Abs_Float(SampleVal) : AbsPeak;
         }
-        if (AbsPeak > AGC_Target)
-        {
+        if (AbsPeak > AGC_Target) {
             /*
              * The signal is too large so decrease the gain
              */
             AGC_Gain = AGC_Gain * AGC_Attack;
-        }
-        else
-        {
+        } else {
             /*
              * The signal is too small so increase the gain
              */
-            if (AGC_Gain > AGC_MaxGain)
-            {
+            if (AGC_Gain > AGC_MaxGain) {
                 AGC_Gain -= (AGC_Decay);
-            }
-            else
-            {
+            } else {
                 AGC_Gain += (AGC_Decay);
             }
         }
@@ -303,15 +151,14 @@
         /*
          * Update the gain
          */
-        Vol_Current +=  (Vol_Target - Vol_Current) * ((LVM_FLOAT)Vol_TC / VOL_TC_FLOAT);
+        Vol_Current += (Vol_Target - Vol_Current) * ((LVM_FLOAT)Vol_TC / VOL_TC_FLOAT);
     }
 
     /*
      * Update the parameters
      */
-    pInstance->Volume = Vol_Current;                            /* Actual volume setting */
+    pInstance->Volume = Vol_Current; /* Actual volume setting */
     pInstance->AGC_Gain = AGC_Gain;
 
     return;
 }
-#endif /*SUPPORT_MC*/
diff --git a/media/libeffects/lvm/lib/Common/src/Abs_32.cpp b/media/libeffects/lvm/lib/Common/src/Abs_32.cpp
deleted file mode 100644
index e013809..0000000
--- a/media/libeffects/lvm/lib/Common/src/Abs_32.cpp
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*######################################################################################*/
-/*  Include files                                                                       */
-/*######################################################################################*/
-
-#include    "ScalarArithmetic.h"
-
-/****************************************************************************************
- *  Name        : Abs_32()
- *  Input       : Signed 32-bit integer
- *  Output      :
- *  Returns     : Absolute value
- *  Description : Absolute value with maximum negative value corner case
- *  Remarks     :
- ****************************************************************************************/
-
-LVM_INT32    Abs_32(LVM_INT32    input)
-{
-    if(input <  0)
-    {
-        if (input == (LVM_INT32)(0x80000000U))
-        {
-            /* The corner case, so set to the maximum positive value */
-            input=(LVM_INT32) 0x7fffffff;
-        }
-        else
-        {
-            /* Negative input, so invert */
-            input = (LVM_INT32)(-input);
-        }
-    }
-    return input;
-}
-LVM_FLOAT    Abs_Float(LVM_FLOAT    input)
-{
-    if(input <  0)
-    {
-        /* Negative input, so invert */
-        input = (LVM_FLOAT)(-input);
-    }
-    return input;
-}
diff --git a/media/libeffects/lvm/lib/Common/src/Add2_Sat_16x16.cpp b/media/libeffects/lvm/lib/Common/src/Add2_Sat_16x16.cpp
deleted file mode 100644
index 6978fe7..0000000
--- a/media/libeffects/lvm/lib/Common/src/Add2_Sat_16x16.cpp
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**********************************************************************************
-   INCLUDE FILES
-***********************************************************************************/
-
-#include "VectorArithmetic.h"
-
-/**********************************************************************************
-   FUNCTION ADD2_SAT_16X16
-***********************************************************************************/
-
-void Add2_Sat_16x16( const LVM_INT16 *src,
-                           LVM_INT16 *dst,
-                           LVM_INT16  n )
-{
-    LVM_INT32 Temp;
-    LVM_INT16 ii;
-    for (ii = n; ii != 0; ii--)
-    {
-        Temp = ((LVM_INT32) *src) + ((LVM_INT32) *dst);
-        src++;
-
-        if (Temp > 0x00007FFF)
-        {
-            *dst = 0x7FFF;
-        }
-        else if (Temp < -0x00008000)
-        {
-            *dst = - 0x8000;
-        }
-        else
-        {
-            *dst = (LVM_INT16)Temp;
-        }
-        dst++;
-    }
-    return;
-}
-
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/Add2_Sat_32x32.cpp b/media/libeffects/lvm/lib/Common/src/Add2_Sat_32x32.cpp
index a48e668..1981edd 100644
--- a/media/libeffects/lvm/lib/Common/src/Add2_Sat_32x32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Add2_Sat_32x32.cpp
@@ -18,69 +18,15 @@
 /**********************************************************************************
    INCLUDE FILES
 ***********************************************************************************/
-
+#include "ScalarArithmetic.h"
 #include "VectorArithmetic.h"
 
-/**********************************************************************************
-   FUNCTION ADD2_SAT_32X32
-***********************************************************************************/
-
-void Add2_Sat_32x32( const LVM_INT32  *src,
-                           LVM_INT32  *dst,
-                           LVM_INT16  n )
-{
-    LVM_INT32 a,b,c;
-    LVM_INT16 ii;
-    for (ii = n; ii != 0; ii--)
-    {
-        a=*src;
-        src++;
-
-        b=*dst;
-        c=a+b;
-        if ((((c ^ a) & (c ^ b)) >> 31)!=0)     /* overflow / underflow */
-        {
-            if(a<0)
-            {
-                c=0x80000000L;
-            }
-            else
-            {
-                c=0x7FFFFFFFL;
-            }
-        }
-
-        *dst = c;
-        dst++;
-    }
-    return;
-}
-
-void Add2_Sat_Float( const LVM_FLOAT  *src,
-                           LVM_FLOAT  *dst,
-                           LVM_INT16  n )
-{
+void Add2_Sat_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n) {
     LVM_FLOAT Temp;
     LVM_INT16 ii;
-    for (ii = n; ii != 0; ii--)
-    {
-        Temp = ((LVM_FLOAT) *src) + ((LVM_FLOAT) *dst);
-        src++;
-
-        if (Temp > 1.000000f)
-        {
-            *dst = 1.000000f;
-        }
-        else if (Temp < -1.000000f)
-        {
-            *dst = -1.000000f;
-        }
-        else
-        {
-            *dst = Temp;
-        }
-        dst++;
+    for (ii = n; ii != 0; ii--) {
+        Temp = *src++ + *dst;
+        *dst++ = LVM_Clamp(Temp);
     }
     return;
 }
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16C14_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16C14_TRC_WRA_01.cpp
deleted file mode 100644
index 1a5e07f..0000000
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16C14_TRC_WRA_01.cpp
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "BP_1I_D16F16Css_TRC_WRA_01_Private.h"
-#include "LVM_Macros.h"
-
-/**************************************************************************
- ASSUMPTIONS:
- COEFS-
- pBiquadState->coefs[0] is A0,
- pBiquadState->coefs[1] is -B2,
- pBiquadState->coefs[2] is -B1, these are in Q14 format
-
- DELAYS-
- pBiquadState->pDelays[0] is x(n-1)L in Q0 format
- pBiquadState->pDelays[1] is x(n-2)L in Q0 format
- pBiquadState->pDelays[2] is y(n-1)L in Q0 format
- pBiquadState->pDelays[3] is y(n-2)L in Q0 format
-***************************************************************************/
-void BP_1I_D16F16C14_TRC_WRA_01 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                  LVM_FLOAT               *pDataIn,
-                                  LVM_FLOAT               *pDataOut,
-                                  LVM_INT16               NrSamples)
-
-    {
-        LVM_FLOAT ynL;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-
-         for (ii = NrSamples; ii != 0; ii--)
-         {
-
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            // ynL= (A0  * (x(n)L  - x(n-2)L  ) )
-            ynL = pBiquadState->coefs[0] * ((*pDataIn)-pBiquadState->pDelays[1]);
-
-            // ynL+= ((-B2  * y(n-2)L  ) )
-            ynL += pBiquadState->coefs[1] * pBiquadState->pDelays[3];
-
-            // ynL+= ((-B1  * y(n-1)L  ) )
-            ynL += pBiquadState->coefs[2] * pBiquadState->pDelays[2];
-
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[2]; // y(n-2)L=y(n-1)L
-            pBiquadState->pDelays[1] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
-            pBiquadState->pDelays[2] = ynL; // Update y(n-1)L
-            pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut++=ynL; // Write Left output
-
-        }
-
-    }
-
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Init.cpp
deleted file mode 100644
index 60b6c16..0000000
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Init.cpp
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*-------------------------------------------------------------------------*/
-#include "BIQUAD.h"
-#include "BP_1I_D16F16Css_TRC_WRA_01_Private.h"
-
-/*-------------------------------------------------------------------------*/
-/* FUNCTION:                                                               */
-/*   BP_1I_D16F16Css_TRC_WRA_01_Init                                       */
-/*                                                                         */
-/* DESCRIPTION:                                                            */
-/*   These functions initializes a BIQUAD filter defined as a cascade of   */
-/*   biquadratic Filter Sections.                                          */
-/*                                                                         */
-/* PARAMETERS:                                                             */
-/*   pInstance    - output, returns the pointer to the State Variable      */
-/*                   This state pointer must be passed to any subsequent   */
-/*                   call to "Biquad" functions.                           */
-/*   pTaps         - input, pointer to the taps memory                     */
-/*   pCoef         - input, pointer to the coefficient structure           */
-/*   N             - M coefficient factor of QM.N                          */
-/* RETURNS:                                                                */
-/*   void return code                                                      */
-/*-------------------------------------------------------------------------*/
-void BP_1I_D16F16Css_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t          *pInstance,
-                                         Biquad_1I_Order2_FLOAT_Taps_t    *pTaps,
-                                         BP_FLOAT_Coefs_t                  *pCoef)
-{
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-    pBiquadState->pDelays      = (LVM_FLOAT *) pTaps;
-
-    pBiquadState->coefs[0] = pCoef->A0;
-    pBiquadState->coefs[1] = pCoef->B2;
-    pBiquadState->coefs[2] = pCoef->B1;
-}
-/*-------------------------------------------------------------------------*/
-/* End Of File: BP_1I_D16F16Css_TRC_WRA_01_Init.c                              */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Private.h
deleted file mode 100644
index 8a000b6..0000000
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F16Css_TRC_WRA_01_Private.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef _BP_1I_D16F16CSS_TRC_WRA_01_PRIVATE_H_
-#define _BP_1I_D16F16CSS_TRC_WRA_01_PRIVATE_H_
-
-/* The internal state variables are implemented in a (for the user)  hidden structure */
-/* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT32         coefs[3];       /* pointer to the filter coefficients */
-}Filter_State;
-
-typedef Filter_State * PFilter_State ;
-
-typedef struct _Filter_State_FLOAT
-{
-
-    LVM_FLOAT *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-    LVM_FLOAT         coefs[3];       /* pointer to the filter coefficients */
-}Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
-#endif /*_BP_1I_D16F16CSS_TRC_WRA_01_PRIVATE_H_*/
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32C30_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32C30_TRC_WRA_01.cpp
deleted file mode 100644
index c844d03..0000000
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32C30_TRC_WRA_01.cpp
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "BP_1I_D16F32Cll_TRC_WRA_01_Private.h"
-#include "LVM_Macros.h"
-
-/**************************************************************************
- ASSUMPTIONS:
- COEFS-
- pBiquadState->coefs[0] is A0,
- pBiquadState->coefs[1] is -B2,
- pBiquadState->coefs[2] is -B1, these are in Q30 format
-
- DELAYS-
- pBiquadState->pDelays[0] is x(n-1)L in Q0 format
- pBiquadState->pDelays[1] is x(n-2)L in Q0 format
- pBiquadState->pDelays[2] is y(n-1)L in Q16 format
- pBiquadState->pDelays[3] is y(n-2)L in Q16 format
-***************************************************************************/
-void BP_1I_D16F32C30_TRC_WRA_01 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                  LVM_FLOAT               *pDataIn,
-                                  LVM_FLOAT               *pDataOut,
-                                  LVM_INT16               NrSamples)
-{
-    LVM_FLOAT ynL,templ;
-    LVM_INT16 ii;
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)pInstance;
-
-    for (ii = NrSamples; ii != 0; ii--)
-    {
-        /**************************************************************************
-                       PROCESSING OF THE LEFT CHANNEL
-        ***************************************************************************/
-        // ynL= (A0 * (x(n)L - x(n-2)L ))
-        templ = (LVM_FLOAT) *pDataIn - pBiquadState->pDelays[1];
-        ynL = pBiquadState->coefs[0] * templ;
-
-        // ynL+= ((-B2  * y(n-2)L  ) )
-        templ = pBiquadState->coefs[1] * pBiquadState->pDelays[3];
-        ynL += templ;
-
-        // ynL+= ((-B1  * y(n-1)L  ))
-        templ = pBiquadState->coefs[2] * pBiquadState->pDelays[2];
-        ynL += templ;
-
-        /**************************************************************************
-                        UPDATING THE DELAYS
-        ***************************************************************************/
-        pBiquadState->pDelays[3] = pBiquadState->pDelays[2]; // y(n-2)L=y(n-1)L
-        pBiquadState->pDelays[1] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
-        pBiquadState->pDelays[2] = ynL; // Update y(n-1)L in Q16
-        pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L in Q0
-
-        /**************************************************************************
-                        WRITING THE OUTPUT
-        ***************************************************************************/
-        *pDataOut++ = (ynL); // Write Left output
-        }
-}
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Init.cpp
deleted file mode 100644
index eb15032..0000000
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Init.cpp
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*-------------------------------------------------------------------------*/
-#include "BIQUAD.h"
-#include "BP_1I_D16F32Cll_TRC_WRA_01_Private.h"
-
-/*-------------------------------------------------------------------------*/
-/* FUNCTION:                                                               */
-/*   BP_1I_D16F32Cll_TRC_WRA_01_Init                                       */
-/*                                                                         */
-/* DESCRIPTION:                                                            */
-/*   These functions initializes a Band pass filter (BIQUAD)               */
-/*   biquadratic Filter Sections.                                          */
-/*                                                                         */
-/* PARAMETERS:                                                             */
-/*   pInstance    - output, returns the pointer to the State Variable      */
-/*                   This state pointer must be passed to any subsequent   */
-/*                   call to "Biquad" functions.                           */
-/*   pTaps         - input, pointer to the taps memory                     */
-/*   pCoef         - input, pointer to the coefficient structure           */
-/*   N             - M coefficient factor of QM.N                          */
-/*                                                                         */
-/*        The coefficients are modified in the init() function such that lower               */
-/*        half word is right shifted by one and most significant bit of the lower            */
-/*        word is made to be zero.                                                           */
-/*                                                                                           */
-/*       Reason: For MIPS effciency,we are using DSP 32*16 multiplication                    */
-/*       instruction. But we have 32*32 multiplication. This can be realized by two 32*16    */
-/*       multiplication. But 16th bit in the 32 bit word is not a sign bit. So this is done  */
-/*       by putting 16th bit to zero and lossing one bit precision by division of lower      */
-/*       half word by 2.                                                                     */
-/* RETURNS:                                                                */
-/*   void return code                                                      */
-/*-------------------------------------------------------------------------*/
-void BP_1I_D16F32Cll_TRC_WRA_01_Init (    Biquad_FLOAT_Instance_t         *pInstance,
-                                          Biquad_1I_Order2_FLOAT_Taps_t   *pTaps,
-                                          BP_FLOAT_Coefs_t                *pCoef)
-{
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-    pBiquadState->pDelays       =(LVM_FLOAT *) pTaps;
-
-    pBiquadState->coefs[0] =  pCoef->A0;
-    pBiquadState->coefs[1] =  pCoef->B2;
-    pBiquadState->coefs[2] =  pCoef->B1;
-}
-/*-------------------------------------------------------------------------*/
-/* End Of File: BP_1I_D16F32Cll_TRC_WRA_01_Init.c                              */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Private.h
deleted file mode 100644
index 6d754e2..0000000
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D16F32Cll_TRC_WRA_01_Private.h
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef _BP_1I_D16F32CLL_TRC_WRA_01_PRIVATE_H_
-#define _BP_1I_D16F32CLL_TRC_WRA_01_PRIVATE_H_
-
-/* The internal state variables are implemented in a (for the user)  hidden structure */
-/* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT32         coefs[3];       /* pointer to the filter coefficients */
-}Filter_State;
-
-typedef Filter_State * PFilter_State ;
-typedef struct _Filter_State_FLOAT
-{
-    LVM_FLOAT *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-    LVM_FLOAT         coefs[3];       /* pointer to the filter coefficients */
-}Filter_State_Float;
-typedef Filter_State_Float * PFilter_State_FLOAT ;
-#endif /*_BP_1I_D16F32CLL_TRC_WRA_01_PRIVATE_H_*/
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32C30_TRC_WRA_02.cpp b/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32C30_TRC_WRA_02.cpp
deleted file mode 100644
index d0ba206..0000000
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32C30_TRC_WRA_02.cpp
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "BP_1I_D32F32Cll_TRC_WRA_02_Private.h"
-#include "LVM_Macros.h"
-
-/**************************************************************************
- ASSUMPTIONS:
- COEFS-
- pBiquadState->coefs[0] is A0,
- pBiquadState->coefs[1] is -B2,
- pBiquadState->coefs[2] is -B1, these are in Q30 format
-
- DELAYS-
- pBiquadState->pDelays[0] is x(n-1)L in Q0 format
- pBiquadState->pDelays[1] is x(n-2)L in Q0 format
- pBiquadState->pDelays[2] is y(n-1)L in Q0 format
- pBiquadState->pDelays[3] is y(n-2)L in Q0 format
-***************************************************************************/
-void BP_1I_D32F32C30_TRC_WRA_02 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                  LVM_FLOAT               *pDataIn,
-                                  LVM_FLOAT               *pDataOut,
-                                  LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT ynL,templ;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-
-        for (ii = NrSamples; ii != 0; ii--)
-        {
-
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            // ynL= (A0  * (x(n)L  - x(n-2)L  ) )
-            templ = (*pDataIn) - pBiquadState->pDelays[1];
-            ynL = pBiquadState->coefs[0] * templ;
-
-            // ynL+= ((-B2  * y(n-2)L  ) )
-            templ = pBiquadState->coefs[1] * pBiquadState->pDelays[3];
-            ynL += templ;
-
-            // ynL+= ((-B1  * y(n-1)L  ) )
-            templ = pBiquadState->coefs[2] * pBiquadState->pDelays[2];
-            ynL += templ;
-
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[2]; // y(n-2)L=y(n-1)L
-            pBiquadState->pDelays[1] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
-            pBiquadState->pDelays[2] = ynL; // Update y(n-1)L
-            pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut++ = ynL; // Write Left output in Q0
-
-        }
-
-    }
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Init.cpp b/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Init.cpp
deleted file mode 100644
index 6f7d0b5..0000000
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Init.cpp
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*-------------------------------------------------------------------------*/
-#include "BIQUAD.h"
-#include "BP_1I_D32F32Cll_TRC_WRA_02_Private.h"
-
-/*-------------------------------------------------------------------------*/
-/* FUNCTION:                                                               */
-/*   BP_1I_D32F32Cll_TRC_WRA_02_Init                                       */
-/*                                                                         */
-/* DESCRIPTION:                                                            */
-/*   These functions initializes a BIQUAD filter defined as a cascade of   */
-/*   biquadratic Filter Sections.                                          */
-/*                                                                         */
-/* PARAMETERS:                                                             */
-/*   pInstance    - output, returns the pointer to the State Variable      */
-/*                   This state pointer must be passed to any subsequent   */
-/*                   call to "Biquad" functions.                           */
-/*   pTaps         - input, pointer to the taps memory                     */
-/*   pCoef         - input, pointer to the coefficient structure           */
-/*   N             - M coefficient factor of QM.N                          */
-/* RETURNS:                                                                */
-/*   void return code                                                      */
-/*-------------------------------------------------------------------------*/
-void BP_1I_D32F32Cll_TRC_WRA_02_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_1I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BP_FLOAT_Coefs_t            *pCoef)
-{
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-    pBiquadState->pDelays       =(LVM_FLOAT *) pTaps;
-
-    pBiquadState->coefs[0] = pCoef->A0;
-
-    pBiquadState->coefs[1] = pCoef->B2;
-
-    pBiquadState->coefs[2] = pCoef->B1;
-}
-/*-------------------------------------------------------------------------*/
-/* End Of File: BP_1I_D32F32Cll_TRC_WRA_02_Init.c                              */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Private.h b/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Private.h
deleted file mode 100644
index 9f1c66a..0000000
--- a/media/libeffects/lvm/lib/Common/src/BP_1I_D32F32Cll_TRC_WRA_02_Private.h
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef _BP_1I_D32F32CLL_TRC_WRA_02_PRIVATE_H_
-#define _BP_1I_D32F32CLL_TRC_WRA_02_PRIVATE_H_
-
-/* The internal state variables are implemented in a (for the user)  hidden structure */
-/* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT32         coefs[3];       /* pointer to the filter coefficients */
-}Filter_State;
-
-typedef Filter_State * PFilter_State ;
-typedef struct _Filter_State_FLOAT
-{
-    LVM_FLOAT *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-    LVM_FLOAT         coefs[3];       /* pointer to the filter coefficients */
-}Filter_State_Float;
-typedef Filter_State_Float* PFilter_State_FLOAT ;
-
-#endif /*_BP_1I_D32F32CLL_TRC_WRA_02_PRIVATE_H_*/
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16C15_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16C15_TRC_WRA_01.cpp
deleted file mode 100644
index 9aecc40..0000000
--- a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16C15_TRC_WRA_01.cpp
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "BQ_1I_D16F16Css_TRC_WRA_01_Private.h"
-#include "LVM_Macros.h"
-
-/**************************************************************************
- ASSUMPTIONS:
- COEFS-
- pBiquadState->coefs[0] is A2, pBiquadState->coefs[1] is A1
- pBiquadState->coefs[2] is A0, pBiquadState->coefs[3] is -B2
- pBiquadState->coefs[4] is -B1, these are in Q15 format
-
- DELAYS-
- pBiquadState->pDelays[0] is x(n-1)L in Q0 format
- pBiquadState->pDelays[1] is x(n-2)L in Q0 format
- pBiquadState->pDelays[2] is y(n-1)L in Q0 format
- pBiquadState->pDelays[3] is y(n-2)L in Q0 format
-***************************************************************************/
-void BQ_1I_D16F16C15_TRC_WRA_01 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                  LVM_FLOAT               *pDataIn,
-                                  LVM_FLOAT               *pDataOut,
-                                  LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT  ynL;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-
-         for (ii = NrSamples; ii != 0; ii--)
-         {
-
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            // ynL=A2  * x(n-2)L
-            ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[1];
-
-            // ynL+=A1 * x(n-1)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
-
-            // ynL+=A0 * x(n)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
-
-            // ynL+=  (-B2  * y(n-2)L )
-            ynL += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[3];
-
-            // ynL+= (-B1  * y(n-1)L  )
-            ynL += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[2];
-
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[2]; // y(n-2)L=y(n-1)L
-            pBiquadState->pDelays[1] = pBiquadState->pDelays[0]; // x(n-2)L=x(n-1)L
-            pBiquadState->pDelays[2] = ynL; // Update y(n-1)L
-            pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output in Q0
-
-        }
-
-    }
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Init.cpp
deleted file mode 100644
index f0b5d06..0000000
--- a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Init.cpp
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*-------------------------------------------------------------------------*/
-#include "BIQUAD.h"
-#include "BQ_1I_D16F16Css_TRC_WRA_01_Private.h"
-
-/*-------------------------------------------------------------------------*/
-/* FUNCTION:                                                               */
-/*   BQ_1I_D16F16Css_TRC_WRA_01_Init                                       */
-/*                                                                         */
-/* DESCRIPTION:                                                            */
-/*   These functions initializes a BIQUAD filter defined as a cascade of   */
-/*   biquadratic Filter Sections.                                          */
-/*                                                                         */
-/* PARAMETERS:                                                             */
-/*   pInstance    - output, returns the pointer to the State Variable      */
-/*                   This state pointer must be passed to any subsequent   */
-/*                   call to "Biquad" functions.                           */
-/*   pTaps         - input, pointer to the taps memory                     */
-/*   pCoef         - input, pointer to the coefficient structure           */
-/*   N             - M coefficient factor of QM.N                          */
-/* RETURNS:                                                                */
-/*   void return code                                                      */
-/*-------------------------------------------------------------------------*/
-void BQ_1I_D16F16Css_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_1I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BQ_FLOAT_Coefs_t            *pCoef)
-{
-    LVM_FLOAT temp;
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-    pBiquadState->pDelays      = (LVM_FLOAT *) pTaps ;
-    temp = pCoef->A2;
-    pBiquadState->coefs[0] = temp;
-    temp = pCoef->A1;
-    pBiquadState->coefs[1] = temp;
-    temp = pCoef->A0;
-    pBiquadState->coefs[2] = temp;
-    temp = pCoef->B2;
-    pBiquadState->coefs[3] = temp;
-    temp = pCoef->B1;
-    pBiquadState->coefs[4] = temp;
-}
-/*-------------------------------------------------------------------------*/
-/* End Of File: BQ_1I_D16F16Css_TRC_WRA_01_Init.c                              */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Private.h
deleted file mode 100644
index fad345d..0000000
--- a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F16Css_TRC_WRA_01_Private.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef _BQ_1I_D16F16CSS_TRC_WRA_01_PRIVATE_H_
-#define _BQ_1I_D16F16CSS_TRC_WRA_01_PRIVATE_H_
-
-/* The internal state variables are implemented in a (for the user)  hidden structure */
-/* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT16         coefs[5];       /* pointer to the filter coefficients */
-}Filter_State;
-
-typedef Filter_State * PFilter_State ;
-
-typedef struct _Filter_State_FLOAT
-{
-    LVM_FLOAT *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-    LVM_FLOAT         coefs[5];       /* pointer to the filter coefficients */
-
-}Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
-#endif /*_BQ_1I_D16F16CSS_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32C14_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32C14_TRC_WRA_01.cpp
deleted file mode 100644
index 043bc5f..0000000
--- a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32C14_TRC_WRA_01.cpp
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "BQ_1I_D16F32Css_TRC_WRA_01_Private.h"
-#include "LVM_Macros.h"
-
-/**************************************************************************
- ASSUMPTIONS:
- COEFS-
- pBiquadState->coefs[0] is A2, pBiquadState->coefs[1] is A1
- pBiquadState->coefs[2] is A0, pBiquadState->coefs[3] is -B2
- pBiquadState->coefs[4] is -B1, these are in Q14 format
-
- DELAYS-
- pBiquadState->pDelays[0] is x(n-1)L in Q0 format
- pBiquadState->pDelays[1] is x(n-2)L in Q0 format
- pBiquadState->pDelays[2] is y(n-1)L in Q16 format
- pBiquadState->pDelays[3] is y(n-2)L in Q16 format
-***************************************************************************/
-void BQ_1I_D16F32C14_TRC_WRA_01 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                  LVM_FLOAT               *pDataIn,
-                                  LVM_FLOAT               *pDataOut,
-                                  LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT  ynL;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-
-         for (ii = NrSamples; ii != 0; ii--)
-         {
-
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            // ynL=A2  * x(n-2)L
-            ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[1];
-
-            // ynL+=A1  * x(n-1)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
-
-            // ynL+=A0  * x(n)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
-
-            // ynL+= ( (-B2  * y(n-2)L )
-            ynL += pBiquadState->pDelays[3] * pBiquadState->coefs[3];
-
-            // ynL+= -B1  * y(n-1)L
-            ynL += pBiquadState->pDelays[2] * pBiquadState->coefs[4];
-
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[2];  // y(n-2)L=y(n-1)L
-            pBiquadState->pDelays[1] = pBiquadState->pDelays[0];  // x(n-2)L=x(n-1)L
-            pBiquadState->pDelays[2] = ynL;                    // Update y(n-1)L
-            pBiquadState->pDelays[0] = (*pDataIn++);              // Update x(n-1)L
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut++ = (LVM_FLOAT)(ynL); // Write Left output
-
-        }
-    }
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_Private.h
deleted file mode 100644
index 6a61d9a..0000000
--- a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_Private.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef _BQ_1I_D16F32CSS_TRC_WRA_01_PRIVATE_H_
-#define _BQ_1I_D16F32CSS_TRC_WRA_01_PRIVATE_H_
-
-/* The internal state variables are implemented in a (for the user)  hidden structure */
-/* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *   pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT16     coefs[5];       /* pointer to the filter coefficients */
-}Filter_State;
-
-typedef Filter_State * PFilter_State ;
-
-typedef struct _Filter_State_FLOAT
-{
-    LVM_FLOAT *   pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-    LVM_FLOAT     coefs[5];       /* pointer to the filter coefficients */
-
-}Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
-#endif /*_BQ_1I_D16F32CSS_TRC_WRA_01_PRIVATE_H_*/
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_init.cpp b/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_init.cpp
deleted file mode 100644
index 2b80691..0000000
--- a/media/libeffects/lvm/lib/Common/src/BQ_1I_D16F32Css_TRC_WRA_01_init.cpp
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*-------------------------------------------------------------------------*/
-#include "BIQUAD.h"
-#include "BQ_1I_D16F32Css_TRC_WRA_01_Private.h"
-
-/*-------------------------------------------------------------------------*/
-/* FUNCTION:                                                               */
-/*   BQ_1I_D16F32Css_TRC_WRA_01_Init                                       */
-/*                                                                         */
-/* DESCRIPTION:                                                            */
-/*   These functions initializes a BIQUAD filter defined as a cascade of   */
-/*   biquadratic Filter Sections.                                          */
-/*                                                                         */
-/* PARAMETERS:                                                             */
-/*   pInstance    - output, returns the pointer to the State Variable      */
-/*                   This state pointer must be passed to any subsequent   */
-/*                   call to "Biquad" functions.                           */
-/*   pTaps         - input, pointer to the taps memory                     */
-/*   pCoef         - input, pointer to the coefficient structure           */
-/*   N             - M coefficient factor of QM.N                          */
-/* RETURNS:                                                                */
-/*   void return code                                                      */
-/*-------------------------------------------------------------------------*/
-void BQ_1I_D16F32Css_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_1I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BQ_FLOAT_Coefs_t            *pCoef)
-{
-    LVM_FLOAT temp;
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-    pBiquadState->pDelays      = (LVM_FLOAT *)pTaps;
-
-    temp = pCoef->A2;
-    pBiquadState->coefs[0] = temp;
-    temp = pCoef->A1;
-    pBiquadState->coefs[1] = temp;
-    temp = pCoef->A0;
-    pBiquadState->coefs[2] = temp;
-    temp = pCoef->B2;
-    pBiquadState->coefs[3] = temp;
-    temp = pCoef->B1;
-    pBiquadState->coefs[4] = temp;
-}
-/*-------------------------------------------------------------------------*/
-/* End Of File: BQ_1I_D16F32Css_TRC_WRA_01_Init                              */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C14_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C14_TRC_WRA_01.cpp
deleted file mode 100644
index 51cd918..0000000
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C14_TRC_WRA_01.cpp
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "BQ_2I_D16F16Css_TRC_WRA_01_Private.h"
-#include "LVM_Macros.h"
-
-/**************************************************************************
- ASSUMPTIONS:
- COEFS-
- pBiquadState->coefs[0] is A2, pBiquadState->coefs[1] is A1
- pBiquadState->coefs[2] is A0, pBiquadState->coefs[3] is -B2
- pBiquadState->coefs[4] is -B1, these are in Q14 format
-
- DELAYS-
- pBiquadState->pDelays[0] is x(n-1)L in Q0 format
- pBiquadState->pDelays[1] is x(n-1)R in Q0 format
- pBiquadState->pDelays[2] is x(n-2)L in Q0 format
- pBiquadState->pDelays[3] is x(n-2)R in Q0 format
- pBiquadState->pDelays[4] is y(n-1)L in Q0 format
- pBiquadState->pDelays[5] is y(n-1)R in Q0 format
- pBiquadState->pDelays[6] is y(n-2)L in Q0 format
- pBiquadState->pDelays[7] is y(n-2)R in Q0 format
-***************************************************************************/
-void BQ_2I_D16F16C14_TRC_WRA_01 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                  LVM_FLOAT               *pDataIn,
-                                  LVM_FLOAT               *pDataOut,
-                                  LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT  ynL,ynR;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-
-         for (ii = NrSamples; ii != 0; ii--)
-         {
-
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            // ynL=A2  * x(n-2)L
-            ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
-
-            // ynL+=A1  * x(n-1)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
-
-            // ynL+=A0  * x(n)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
-
-            // ynL+= ( -B2  * y(n-2)L  )
-            ynL += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[6];
-
-            // ynL+=( -B1  * y(n-1)L )
-            ynL += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[4];
-
-            /**************************************************************************
-                            PROCESSING OF THE RIGHT CHANNEL
-            ***************************************************************************/
-            // ynR=A2  * x(n-2)R
-            ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
-
-            // ynR+=A1  * x(n-1)R
-            ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
-
-            // ynR+=A0  * x(n)R
-            ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn+1));
-
-            // ynR+= ( -B2  * y(n-2)R  )
-            ynR += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[7];
-
-            // ynR+=( -B1  * y(n-1)R  )
-            ynR += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[5];
-
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[7] = pBiquadState->pDelays[5];  // y(n-2)R=y(n-1)R
-            pBiquadState->pDelays[6] = pBiquadState->pDelays[4];  // y(n-2)L=y(n-1)L
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[1];  // x(n-2)R=x(n-1)R
-            pBiquadState->pDelays[2] = pBiquadState->pDelays[0];  // x(n-2)L=x(n-1)L
-            pBiquadState->pDelays[5] = ynR;                       // Update y(n-1)R
-            pBiquadState->pDelays[4] = ynL;                       // Update y(n-1)L
-            pBiquadState->pDelays[0] = (*pDataIn++);              // Update x(n-1)L
-            pBiquadState->pDelays[1] = (*pDataIn++);              // Update x(n-1)R
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output
-            *pDataOut++ = (LVM_FLOAT)ynR; // Write Right ouput
-
-        }
-
-    }
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C15_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C15_TRC_WRA_01.cpp
deleted file mode 100644
index 8f74749..0000000
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16C15_TRC_WRA_01.cpp
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "BQ_2I_D16F16Css_TRC_WRA_01_Private.h"
-#include "LVM_Macros.h"
-
-/**************************************************************************
- ASSUMPTIONS:
- COEFS-
- pBiquadState->coefs[0] is A2, pBiquadState->coefs[1] is A1
- pBiquadState->coefs[2] is A0, pBiquadState->coefs[3] is -B2
- pBiquadState->coefs[4] is -B1, these are in Q15 format
-
- DELAYS-
- pBiquadState->pDelays[0] is x(n-1)L in Q0 format
- pBiquadState->pDelays[1] is x(n-1)R in Q0 format
- pBiquadState->pDelays[2] is x(n-2)L in Q0 format
- pBiquadState->pDelays[3] is x(n-2)R in Q0 format
- pBiquadState->pDelays[4] is y(n-1)L in Q0 format
- pBiquadState->pDelays[5] is y(n-1)R in Q0 format
- pBiquadState->pDelays[6] is y(n-2)L in Q0 format
- pBiquadState->pDelays[7] is y(n-2)R in Q0 format
-***************************************************************************/
-void BQ_2I_D16F16C15_TRC_WRA_01 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                  LVM_FLOAT               *pDataIn,
-                                  LVM_FLOAT               *pDataOut,
-                                  LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT  ynL,ynR;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-
-         for (ii = NrSamples; ii != 0; ii--)
-         {
-
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            // ynL=A2  * x(n-2)L
-            ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
-
-            // ynL+=A1  * x(n-1)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
-
-            // ynL+=A0  * x(n)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
-
-            // ynL+= ( -B2  * y(n-2)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[6];
-
-            // ynL+=( -B1  * y(n-1)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[4];
-
-            /**************************************************************************
-                            PROCESSING OF THE RIGHT CHANNEL
-            ***************************************************************************/
-            // ynR=A2  * x(n-2)R
-            ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
-
-            // ynR+=A1  * x(n-1)R
-            ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
-
-            // ynR+=A0  * x(n)R
-            ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn+1));
-
-            // ynR+= ( -B2  * y(n-2)R  )
-            ynR += (LVM_FLOAT)pBiquadState->coefs[3] * pBiquadState->pDelays[7];
-
-            // ynR+=( -B1  * y(n-1)R  )
-            ynR += (LVM_FLOAT)pBiquadState->coefs[4] * pBiquadState->pDelays[5];
-
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[7] = pBiquadState->pDelays[5];  // y(n-2)R=y(n-1)R
-            pBiquadState->pDelays[6] = pBiquadState->pDelays[4];  // y(n-2)L=y(n-1)L
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[1];  // x(n-2)R=x(n-1)R
-            pBiquadState->pDelays[2] = pBiquadState->pDelays[0];  // x(n-2)L=x(n-1)L
-            pBiquadState->pDelays[5] = ynR;                       // Update y(n-1)R
-            pBiquadState->pDelays[4] = ynL;                       // Update y(n-1)L
-            pBiquadState->pDelays[0] = (*pDataIn++);              // Update x(n-1)L
-            pBiquadState->pDelays[1] = (*pDataIn++);              // Update x(n-1)R
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output
-            *pDataOut++ = (LVM_FLOAT)ynR; // Write Right ouput
-
-        }
-
-    }
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Init.cpp
deleted file mode 100644
index 987cbcf..0000000
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Init.cpp
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*-------------------------------------------------------------------------*/
-#include "BIQUAD.h"
-#include "BQ_2I_D16F16Css_TRC_WRA_01_Private.h"
-
-/*-------------------------------------------------------------------------*/
-/* FUNCTION:                                                               */
-/*   BQ_2I_D16F16Css_TRC_WRA_01_Init                                       */
-/*                                                                         */
-/* DESCRIPTION:                                                            */
-/*   These functions initializes a BIQUAD filter defined as a cascade of   */
-/*   biquadratic Filter Sections.                                          */
-/*                                                                         */
-/* PARAMETERS:                                                             */
-/*   pInstance    - output, returns the pointer to the State Variable      */
-/*                   This state pointer must be passed to any subsequent   */
-/*                   call to "Biquad" functions.                           */
-/*   pTaps         - input, pointer to the taps memory                     */
-/*   pCoef         - input, pointer to the coefficient structure           */
-/*   N             - M coefficient factor of QM.N                          */
-/* RETURNS:                                                                */
-/*   void return code                                                      */
-/*-------------------------------------------------------------------------*/
-void BQ_2I_D16F16Css_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_2I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BQ_FLOAT_Coefs_t            *pCoef)
-{
-    LVM_FLOAT temp;
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-    pBiquadState->pDelays      = (LVM_FLOAT *) pTaps            ;
-
-    temp = pCoef->A2;
-    pBiquadState->coefs[0] = temp;
-    temp = pCoef->A1;
-    pBiquadState->coefs[1] = temp;
-    temp = pCoef->A0;
-    pBiquadState->coefs[2] = temp;
-    temp = pCoef->B2;
-    pBiquadState->coefs[3] = temp;
-    temp = pCoef->B1;
-    pBiquadState->coefs[4] = temp;
-}
-/*-------------------------------------------------------------------------*/
-/* End Of File: BQ_2I_D16F16Css_TRC_WRA_01_Init.c                              */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Private.h
deleted file mode 100644
index 5a9a0e9..0000000
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F16Css_TRC_WRA_01_Private.h
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef _BQ_2I_D16F16CSS_TRC_WRA_01_PRIVATE_H_
-#define _BQ_2I_D16F16CSS_TRC_WRA_01_PRIVATE_H_
-
-/* The internal state variables are implemented in a (for the user)  hidden structure */
-/* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *   pDelays;            /* pointer to the delayed samples (data of 32 bits) */
-  LVM_INT16     coefs[5];           /* pointer to the filter coefficients */
-}Filter_State;
-
-typedef Filter_State * PFilter_State ;
-
-typedef struct _Filter_State_FLOAT
-{
-    LVM_FLOAT *   pDelays;            /* pointer to the delayed samples (data of 32 bits) */
-    LVM_FLOAT     coefs[5];           /* pointer to the filter coefficients */
-
-}Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
-
-#endif /* _BQ_2I_D16F16CSS_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C13_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C13_TRC_WRA_01.cpp
deleted file mode 100644
index 331c97f..0000000
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C13_TRC_WRA_01.cpp
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "BQ_2I_D16F32Css_TRC_WRA_01_Private.h"
-#include "LVM_Macros.h"
-
-/**************************************************************************
- ASSUMPTIONS:
- COEFS-
- pBiquadState->coefs[0] is A2, pBiquadState->coefs[1] is A1
- pBiquadState->coefs[2] is A0, pBiquadState->coefs[3] is -B2
- pBiquadState->coefs[4] is -B1, these are in Q13 format
-
- DELAYS-
- pBiquadState->pDelays[0] is x(n-1)L in Q0 format
- pBiquadState->pDelays[1] is x(n-1)R in Q0 format
- pBiquadState->pDelays[2] is x(n-2)L in Q0 format
- pBiquadState->pDelays[3] is x(n-2)R in Q0 format
- pBiquadState->pDelays[4] is y(n-1)L in Q16 format
- pBiquadState->pDelays[5] is y(n-1)R in Q16 format
- pBiquadState->pDelays[6] is y(n-2)L in Q16 format
- pBiquadState->pDelays[7] is y(n-2)R in Q16 format
-***************************************************************************/
-void BQ_2I_D16F32C13_TRC_WRA_01 (           Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrSamples)
-    {
-        LVM_FLOAT  ynL,ynR;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-
-        for (ii = NrSamples; ii != 0; ii--)
-        {
-
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            /* ynL=A2 * x(n-2)L */
-            ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
-
-            /* ynL+=A1* x(n-1)L */
-            ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
-
-            /* ynL+=A0* x(n)L   */
-            ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
-
-            /* ynL+=-B2*y(n-2)L */
-            ynL += pBiquadState->pDelays[6] * pBiquadState->coefs[3];
-
-            /* ynL+=-B1*y(n-1)L */
-            ynL += pBiquadState->pDelays[4] * pBiquadState->coefs[4];
-
-            /**************************************************************************
-                            PROCESSING OF THE RIGHT CHANNEL
-            ***************************************************************************/
-            /* ynR=A2 * x(n-2)R */
-            ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
-
-            /* ynR+=A1* x(n-1)R */
-            ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
-
-            /* ynR+=A0* x(n)R   */
-            ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn+1));
-
-            /* ynR+=-B2 * y(n-2)R */
-            ynR += pBiquadState->pDelays[7] * pBiquadState->coefs[3];
-
-            /* ynR+=-B1 * y(n-1)R */
-            ynR += pBiquadState->pDelays[5] * pBiquadState->coefs[4];
-
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[7] = pBiquadState->pDelays[5];  /* y(n-2)R=y(n-1)R*/
-            pBiquadState->pDelays[6] = pBiquadState->pDelays[4];  /* y(n-2)L=y(n-1)L*/
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[1];  /* x(n-2)R=x(n-1)R*/
-            pBiquadState->pDelays[2] = pBiquadState->pDelays[0];  /* x(n-2)L=x(n-1)L*/
-            pBiquadState->pDelays[5] = ynR;                       /* Update y(n-1)R */
-            pBiquadState->pDelays[4] = ynL;                       /* Update y(n-1)L */
-            pBiquadState->pDelays[0] = (*pDataIn);                /* Update x(n-1)L */
-            pDataIn++;
-            pBiquadState->pDelays[1] = (*pDataIn);                /* Update x(n-1)R */
-            pDataIn++;
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut = (LVM_FLOAT)(ynL); /* Write Left output */
-            pDataOut++;
-            *pDataOut = (LVM_FLOAT)(ynR); /* Write Right ouput */
-            pDataOut++;
-        }
-    }
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C14_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C14_TRC_WRA_01.cpp
deleted file mode 100644
index 3a396df..0000000
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C14_TRC_WRA_01.cpp
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "BQ_2I_D16F32Css_TRC_WRA_01_Private.h"
-#include "LVM_Macros.h"
-
-/**************************************************************************
- ASSUMPTIONS:
- COEFS-
- pBiquadState->coefs[0] is A2, pBiquadState->coefs[1] is A1
- pBiquadState->coefs[2] is A0, pBiquadState->coefs[3] is -B2
- pBiquadState->coefs[4] is -B1, these are in Q14 format
-
- DELAYS-
- pBiquadState->pDelays[0] is x(n-1)L in Q0 format
- pBiquadState->pDelays[1] is x(n-1)R in Q0 format
- pBiquadState->pDelays[2] is x(n-2)L in Q0 format
- pBiquadState->pDelays[3] is x(n-2)R in Q0 format
- pBiquadState->pDelays[4] is y(n-1)L in Q16 format
- pBiquadState->pDelays[5] is y(n-1)R in Q16 format
- pBiquadState->pDelays[6] is y(n-2)L in Q16 format
- pBiquadState->pDelays[7] is y(n-2)R in Q16 format
-***************************************************************************/
-void BQ_2I_D16F32C14_TRC_WRA_01 (           Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrSamples)
-    {
-        LVM_FLOAT  ynL,ynR;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-
-        for (ii = NrSamples; ii != 0; ii--)
-        {
-
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            /* ynL=A2  * x(n-2)L */
-            ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
-
-            /* ynL+=A1  * x(n-1)L */
-            ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
-
-            /* ynL+=A0  * x(n)L */
-            ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
-
-            /* ynL+= ( (-B2  * y(n-2)L  ))*/
-            ynL += pBiquadState->pDelays[6] * pBiquadState->coefs[3];
-
-            /* ynL+=( (-B1  * y(n-1)L  ))  */
-            ynL += pBiquadState->pDelays[4] * pBiquadState->coefs[4];
-
-            /**************************************************************************
-                            PROCESSING OF THE RIGHT CHANNEL
-            ***************************************************************************/
-            /* ynR=A2  * x(n-2)R */
-            ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
-
-            /* ynR+=A1  * x(n-1)R */
-            ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
-
-            /* ynR+=A0  * x(n)R */
-            ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn+1));
-
-            /* ynR+= ( (-B2  * y(n-2)R  ))*/
-            ynR += pBiquadState->pDelays[7] * pBiquadState->coefs[3];
-
-            /* ynR+=( (-B1  * y(n-1)R  ))  */
-            ynR += pBiquadState->pDelays[5] * pBiquadState->coefs[4];
-
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[7] = pBiquadState->pDelays[5];  /* y(n-2)R=y(n-1)R*/
-            pBiquadState->pDelays[6] = pBiquadState->pDelays[4];  /* y(n-2)L=y(n-1)L*/
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[1];  /* x(n-2)R=x(n-1)R*/
-            pBiquadState->pDelays[2] = pBiquadState->pDelays[0];  /* x(n-2)L=x(n-1)L*/
-            pBiquadState->pDelays[5] = ynR;                    /* Update y(n-1)R */
-            pBiquadState->pDelays[4] = ynL;                    /* Update y(n-1)L */
-            pBiquadState->pDelays[0] = (*pDataIn);                /* Update x(n-1)L */
-            pDataIn++;
-            pBiquadState->pDelays[1] = (*pDataIn);                /* Update x(n-1)R */
-            pDataIn++;
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut = (LVM_FLOAT)(ynL); /* Write Left output */
-            pDataOut++;
-            *pDataOut = (LVM_FLOAT)(ynR); /* Write Right ouput */
-            pDataOut++;
-        }
-
-    }
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C15_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C15_TRC_WRA_01.cpp
deleted file mode 100644
index 1cbff1a..0000000
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32C15_TRC_WRA_01.cpp
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "BQ_2I_D16F32Css_TRC_WRA_01_Private.h"
-#include "LVM_Macros.h"
-
-/**************************************************************************
- ASSUMPTIONS:
- COEFS-
- pBiquadState->coefs[0] is A2, pBiquadState->coefs[1] is A1
- pBiquadState->coefs[2] is A0, pBiquadState->coefs[3] is -B2
- pBiquadState->coefs[4] is -B1, these are in Q15 format
-
- DELAYS-
- pBiquadState->pDelays[0] is x(n-1)L in Q0 format
- pBiquadState->pDelays[1] is x(n-1)R in Q0 format
- pBiquadState->pDelays[2] is x(n-2)L in Q0 format
- pBiquadState->pDelays[3] is x(n-2)R in Q0 format
- pBiquadState->pDelays[4] is y(n-1)L in Q16 format
- pBiquadState->pDelays[5] is y(n-1)R in Q16 format
- pBiquadState->pDelays[6] is y(n-2)L in Q16 format
- pBiquadState->pDelays[7] is y(n-2)R in Q16 format
-***************************************************************************/
-void BQ_2I_D16F32C15_TRC_WRA_01 (           Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrSamples)
-    {
-        LVM_FLOAT  ynL,ynR;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-
-         for (ii = NrSamples; ii != 0; ii--)
-         {
-
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            /* ynL=A2  * x(n-2)L */
-            ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
-
-            /* ynL+=A1  * x(n-1)L */
-            ynL += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[0];
-
-            /* ynL+=A0  * x(n)L */
-            ynL += (LVM_FLOAT)pBiquadState->coefs[2] * (*pDataIn);
-
-            /* ynL+= ( (-B2  * y(n-2)L )  */
-            ynL += pBiquadState->pDelays[6] * pBiquadState->coefs[3];
-
-            /* ynL+=( (-B1  * y(n-1)L  ))  */
-            ynL += pBiquadState->pDelays[4] * pBiquadState->coefs[4];
-
-            /**************************************************************************
-                            PROCESSING OF THE RIGHT CHANNEL
-            ***************************************************************************/
-            /* ynR=A2  * x(n-2)R */
-            ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[3];
-
-            /* ynR+=A1  * x(n-1)R */
-            ynR += (LVM_FLOAT)pBiquadState->coefs[1] * pBiquadState->pDelays[1];
-
-            /* ynR+=A0  * x(n)R */
-            ynR += (LVM_FLOAT)pBiquadState->coefs[2] * (*(pDataIn+1));
-
-            /* ynR+= ( (-B2  * y(n-2)R ) */
-            ynR += pBiquadState->pDelays[7] * pBiquadState->coefs[3];
-
-            /* ynR+=( (-B1  * y(n-1)R  )) in Q15 */
-            ynR += pBiquadState->pDelays[5] * pBiquadState->coefs[4];
-
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
-            pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
-            pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
-            pBiquadState->pDelays[5] = ynR; /* Update y(n-1)R*/
-            pBiquadState->pDelays[4] = ynL; /* Update y(n-1)L*/
-            pBiquadState->pDelays[0] = (*pDataIn); /* Update x(n-1)L*/
-            pDataIn++;
-            pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R*/
-            pDataIn++;
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut = (LVM_FLOAT)(ynL); /* Write Left output*/
-            pDataOut++;
-            *pDataOut = (LVM_FLOAT)(ynR); /* Write Right ouput*/
-            pDataOut++;
-        }
-
-    }
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_Private.h
deleted file mode 100644
index 314388a..0000000
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_Private.h
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef _BQ_2I_D16F32CSS_TRC_WRA_01_PRIVATE_H_
-#define _BQ_2I_D16F32CSS_TRC_WRA_01_PRIVATE_H_
-
-/* The internal state variables are implemented in a (for the user)  hidden structure */
-/* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *                          pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT16                           coefs[5];         /* pointer to the filter coefficients */
-}Filter_State;
-
-typedef Filter_State * PFilter_State ;
-
-typedef struct _Filter_State_FLOAT
-{
-    LVM_FLOAT *                          pDelays;        /* pointer to the delayed samples \
-                                                           (data of 32 bits)   */
-    LVM_FLOAT                           coefs[5];        /* pointer to the filter coefficients */
-}Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
-
-#endif /* _BQ_2I_D16F32CSS_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_init.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_init.cpp
deleted file mode 100644
index 058541a..0000000
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D16F32Css_TRC_WRA_01_init.cpp
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "BQ_2I_D16F32Css_TRC_WRA_01_Private.h"
-
-/*-------------------------------------------------------------------------*/
-/* FUNCTION:                                                               */
-/*   BQ_2I_D16F32Css_TRC_WRA_01_Init                                       */
-/*                                                                         */
-/* DESCRIPTION:                                                            */
-/*   These functions initializes a BIQUAD filter defined as a cascade of   */
-/*   biquadratic Filter Sections.                                          */
-/*                                                                         */
-/* PARAMETERS:                                                             */
-/*   pInstance    - output, returns the pointer to the State Variable      */
-/*                   This state pointer must be passed to any subsequent   */
-/*                   call to "Biquad" functions.                           */
-/*   pTaps         - input, pointer to the taps memory                     */
-/*   pCoef         - input, pointer to the coefficient structure           */
-/*   N             - M coefficient factor of QM.N                          */
-/* RETURNS:                                                                */
-/*   void return code                                                      */
-/*-------------------------------------------------------------------------*/
-void BQ_2I_D16F32Css_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_2I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BQ_FLOAT_Coefs_t            *pCoef)
-{
-    LVM_FLOAT temp;
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-    pBiquadState->pDelays      = (LVM_FLOAT *) pTaps;
-    temp = pCoef->A2;
-    pBiquadState->coefs[0] = temp;
-    temp = pCoef->A1;
-    pBiquadState->coefs[1] = temp;
-    temp = pCoef->A0;
-    pBiquadState->coefs[2] = temp;
-    temp = pCoef->B2;
-    pBiquadState->coefs[3] = temp;
-    temp = pCoef->B1;
-    pBiquadState->coefs[4] = temp;
-}
-/*-------------------------------------------------------------------------*/
-/* End Of File: BQ_2I_D16F32Css_TRC_WRA_01_Init                              */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32C30_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32C30_TRC_WRA_01.cpp
deleted file mode 100644
index 78d1ba1..0000000
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32C30_TRC_WRA_01.cpp
+++ /dev/null
@@ -1,201 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "BQ_2I_D32F32Cll_TRC_WRA_01_Private.h"
-#include "LVM_Macros.h"
-
-/**************************************************************************
- ASSUMPTIONS:
- COEFS-
- pBiquadState->coefs[0] is A2, pBiquadState->coefs[1] is A1
- pBiquadState->coefs[2] is A0, pBiquadState->coefs[3] is -B2
- pBiquadState->coefs[4] is -B1, these are in Q30 format
-
- DELAYS-
- pBiquadState->pDelays[0] is x(n-1)L in Q0 format
- pBiquadState->pDelays[1] is x(n-1)R in Q0 format
- pBiquadState->pDelays[2] is x(n-2)L in Q0 format
- pBiquadState->pDelays[3] is x(n-2)R in Q0 format
- pBiquadState->pDelays[4] is y(n-1)L in Q0 format
- pBiquadState->pDelays[5] is y(n-1)R in Q0 format
- pBiquadState->pDelays[6] is y(n-2)L in Q0 format
- pBiquadState->pDelays[7] is y(n-2)R in Q0 format
-***************************************************************************/
-void BQ_2I_D32F32C30_TRC_WRA_01 (           Biquad_FLOAT_Instance_t       *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrSamples)
-
-    {
-        LVM_FLOAT ynL,ynR,templ,tempd;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-
-         for (ii = NrSamples; ii != 0; ii--)
-         {
-
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            /* ynL= ( A2  * x(n-2)L  ) */
-            ynL = pBiquadState->coefs[0] * pBiquadState->pDelays[2];
-
-            /* ynL+= ( A1  * x(n-1)L  )*/
-            templ = pBiquadState->coefs[1] * pBiquadState->pDelays[0];
-            ynL += templ;
-
-            /* ynL+= ( A0  * x(n)L  ) */
-            templ = pBiquadState->coefs[2] * (*pDataIn);
-            ynL += templ;
-
-             /* ynL+= (-B2  * y(n-2)L  ) */
-            templ = pBiquadState->coefs[3] * pBiquadState->pDelays[6];
-            ynL += templ;
-
-            /* ynL+= (-B1  * y(n-1)L  )*/
-            templ = pBiquadState->coefs[4] * pBiquadState->pDelays[4];
-            ynL += templ;
-
-            /**************************************************************************
-                            PROCESSING OF THE RIGHT CHANNEL
-            ***************************************************************************/
-            /* ynR= ( A2  * x(n-2)R  ) */
-            ynR = pBiquadState->coefs[0] * pBiquadState->pDelays[3];
-
-            /* ynR+= ( A1  * x(n-1)R  ) */
-            templ = pBiquadState->coefs[1] * pBiquadState->pDelays[1];
-            ynR += templ;
-
-            /* ynR+= ( A0  * x(n)R  ) */
-            tempd =* (pDataIn+1);
-            templ = pBiquadState->coefs[2] * tempd;
-            ynR += templ;
-
-            /* ynR+= (-B2  * y(n-2)R  ) */
-            templ = pBiquadState->coefs[3] * pBiquadState->pDelays[7];
-            ynR += templ;
-
-            /* ynR+= (-B1  * y(n-1)R  )  */
-            templ = pBiquadState->coefs[4] * pBiquadState->pDelays[5];
-            ynR += templ;
-
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
-            pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
-            pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
-            pBiquadState->pDelays[5] = (LVM_FLOAT)ynR; /* Update y(n-1)R */
-            pBiquadState->pDelays[4] = (LVM_FLOAT)ynL; /* Update y(n-1)L */
-            pBiquadState->pDelays[0] = (*pDataIn); /* Update x(n-1)L */
-            pDataIn++;
-            pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R */
-            pDataIn++;
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut = (LVM_FLOAT)ynL; /* Write Left output */
-            pDataOut++;
-            *pDataOut = (LVM_FLOAT)ynR; /* Write Right ouput */
-            pDataOut++;
-
-        }
-
-    }
-
-#ifdef SUPPORT_MC
-/**************************************************************************
- ASSUMPTIONS:
- COEFS-
- pBiquadState->coefs[0] is A2, pBiquadState->coefs[1] is A1
- pBiquadState->coefs[2] is A0, pBiquadState->coefs[3] is -B2
- pBiquadState->coefs[4] is -B1
-
- DELAYS-
- pBiquadState->pDelays[0] to
- pBiquadState->pDelays[NrChannels - 1] is x(n-1) for all NrChannels
-
- pBiquadState->pDelays[NrChannels] to
- pBiquadState->pDelays[2*NrChannels - 1] is x(n-2) for all NrChannels
-
- pBiquadState->pDelays[2*NrChannels] to
- pBiquadState->pDelays[3*NrChannels - 1] is y(n-1) for all NrChannels
-
- pBiquadState->pDelays[3*NrChannels] to
- pBiquadState->pDelays[4*NrChannels - 1] is y(n-2) for all NrChannels
-***************************************************************************/
-void BQ_MC_D32F32C30_TRC_WRA_01 (           Biquad_FLOAT_Instance_t      *pInstance,
-                                            LVM_FLOAT                    *pDataIn,
-                                            LVM_FLOAT                    *pDataOut,
-                                            LVM_INT16                    NrFrames,
-                                            LVM_INT16                    NrChannels)
-
-    {
-        LVM_FLOAT yn, temp;
-        LVM_INT16 ii, jj;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-
-         for (ii = NrFrames; ii != 0; ii--)
-         {
-            /**************************************************************************
-                            PROCESSING CHANNEL-WISE
-            ***************************************************************************/
-            for (jj = 0; jj < NrChannels; jj++)
-            {
-                /* yn= (A2  * x(n-2)) */
-                yn = pBiquadState->coefs[0] * pBiquadState->pDelays[NrChannels + jj];
-
-                /* yn+= (A1  * x(n-1)) */
-                temp = pBiquadState->coefs[1] * pBiquadState->pDelays[jj];
-                yn += temp;
-
-                /* yn+= (A0  * x(n)) */
-                temp = pBiquadState->coefs[2] * (*pDataIn);
-                yn += temp;
-
-                 /* yn+= (-B2  * y(n-2)) */
-                temp = pBiquadState->coefs[3] * pBiquadState->pDelays[NrChannels*3 + jj];
-                yn += temp;
-
-                /* yn+= (-B1  * y(n-1)) */
-                temp = pBiquadState->coefs[4] * pBiquadState->pDelays[NrChannels*2 + jj];
-                yn += temp;
-
-                /**************************************************************************
-                                UPDATING THE DELAYS
-                ***************************************************************************/
-                pBiquadState->pDelays[NrChannels * 3 + jj] =
-                    pBiquadState->pDelays[NrChannels * 2 + jj]; /* y(n-2)=y(n-1)*/
-                pBiquadState->pDelays[NrChannels * 1 + jj] =
-                    pBiquadState->pDelays[jj]; /* x(n-2)=x(n-1)*/
-                pBiquadState->pDelays[NrChannels * 2 + jj] = (LVM_FLOAT)yn; /* Update y(n-1)*/
-                pBiquadState->pDelays[jj] = (*pDataIn); /* Update x(n-1)*/
-                pDataIn++;
-                /**************************************************************************
-                                WRITING THE OUTPUT
-                ***************************************************************************/
-                *pDataOut = (LVM_FLOAT)yn; /* Write jj Channel output */
-                pDataOut++;
-            }
-        }
-
-    }
-#endif /*SUPPORT_MC*/
-
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Init.cpp
deleted file mode 100644
index 492a9e0..0000000
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Init.cpp
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*-------------------------------------------------------------------------*/
-#include "BIQUAD.h"
-#include "BQ_2I_D32F32Cll_TRC_WRA_01_Private.h"
-
-/*-------------------------------------------------------------------------*/
-/* FUNCTION:                                                               */
-/*   BQ_2I_D32F32Cll_TRC_WRA_01_Init                                       */
-/*                                                                         */
-/* DESCRIPTION:                                                            */
-/*   These functions initializes a BIQUAD filter defined as a cascade of   */
-/*   biquadratic Filter Sections.                                          */
-/*                                                                         */
-/* PARAMETERS:                                                             */
-/*   pInstance    - output, returns the pointer to the State Variable      */
-/*                   This state pointer must be passed to any subsequent   */
-/*                   call to "Biquad" functions.                           */
-/*   pTaps         - input, pointer to the taps memory                     */
-/*   pCoef         - input, pointer to the coefficient structure           */
-/*   N             - M coefficient factor of QM.N                          */
-/* RETURNS:                                                                */
-/*   void return code                                                      */
-/*-------------------------------------------------------------------------*/
-void BQ_2I_D32F32Cll_TRC_WRA_01_Init (   Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_2I_Order2_FLOAT_Taps_t   *pTaps,
-                                         BQ_FLOAT_Coefs_t            *pCoef)
-{
-    LVM_FLOAT temp;
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-    pBiquadState->pDelays      = (LVM_FLOAT *) pTaps;
-    temp = pCoef->A2;
-    pBiquadState->coefs[0] = temp;
-    temp = pCoef->A1;
-    pBiquadState->coefs[1] = temp;
-    temp = pCoef->A0;
-    pBiquadState->coefs[2] = temp;
-    temp = pCoef->B2;
-    pBiquadState->coefs[3] = temp;
-    temp = pCoef->B1;
-    pBiquadState->coefs[4] = temp;
-}
-/*-------------------------------------------------------------------------*/
-/* End Of File: BQ_2I_D32F32C32_TRC_WRA_01_Init.c                              */
-
diff --git a/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Private.h
deleted file mode 100644
index 7eb6474..0000000
--- a/media/libeffects/lvm/lib/Common/src/BQ_2I_D32F32Cll_TRC_WRA_01_Private.h
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef _BQ_2I_D32F32CLL_TRC_WRA_01_PRIVATE_H_
-#define _BQ_2I_D32F32CLL_TRC_WRA_01_PRIVATE_H_
-
-/* The internal state variables are implemented in a (for the user)  hidden structure */
-/* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *                          pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT32                            coefs[5];       /* pointer to the filter coefficients */
-}Filter_State;
-
-typedef Filter_State * PFilter_State ;
-
-typedef struct _Filter_State_FLOAT
-{
-    LVM_FLOAT *                          pDelays;        /* pointer to the delayed samples \
-                                                            (data of 32 bits)   */
-    LVM_FLOAT                            coefs[5];       /* pointer to the filter coefficients */
-}Filter_State_FLOAT;
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
-
-#endif /* _BQ_2I_D32F32CLL_TRC_WRA_01_PRIVATE_H_*/
diff --git a/media/libeffects/lvm/lib/Common/src/CompLim_private.h b/media/libeffects/lvm/lib/Common/src/CompLim_private.h
index 06a21c3..9c7a96b 100644
--- a/media/libeffects/lvm/lib/Common/src/CompLim_private.h
+++ b/media/libeffects/lvm/lib/Common/src/CompLim_private.h
@@ -28,17 +28,16 @@
    DEFINITIONS
 ***********************************************************************************/
 
-#define FS_48K      48000
+#define FS_48K 48000
 
-#define INTEGER_16  0xFFFF /*   65535*/
-#define INTEGER_15  0x7FFF /*   32767*/
+#define INTEGER_16 0xFFFF /*   65535*/
+#define INTEGER_15 0x7FFF /*   32767*/
 
-#define GAIN_6DB    1
-#define GAIN_12DB   2
-#define GAIN_18DB   3
-#define GAIN_24DB   4
+#define GAIN_6DB 1
+#define GAIN_12DB 2
+#define GAIN_18DB 3
+#define GAIN_24DB 4
 
 #endif /* #ifndef _COMP_LIM_PRIVATE_ */
 
 /*** End of file ******************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/Copy_16.cpp b/media/libeffects/lvm/lib/Common/src/Copy_16.cpp
index 3a50554..1fe7470 100644
--- a/media/libeffects/lvm/lib/Common/src/Copy_16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Copy_16.cpp
@@ -18,130 +18,65 @@
 /**********************************************************************************
    INCLUDE FILES
 ***********************************************************************************/
-
+#include <string.h>
 #include "VectorArithmetic.h"
 
-/**********************************************************************************
-   FUNCTION COPY_16
-***********************************************************************************/
-
-void Copy_16( const LVM_INT16 *src,
-                    LVM_INT16 *dst,
-                    LVM_INT16  n )
-{
-    LVM_INT16 ii;
-
-    if (src > dst)
-    {
-        for (ii = n; ii != 0; ii--)
-        {
-            *dst = *src;
-            dst++;
-            src++;
-        }
-    }
-    else
-    {
-        src += n - 1;
-        dst += n - 1;
-        for (ii = n; ii != 0; ii--)
-        {
-            *dst = *src;
-            dst--;
-            src--;
-        }
-    }
-
+void Copy_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n) {
+    memmove(dst, src, n * sizeof(LVM_FLOAT));
     return;
 }
-void Copy_Float( const LVM_FLOAT *src,
-                 LVM_FLOAT *dst,
-                 LVM_INT16  n )
-{
-    LVM_INT16 ii;
-
-    if (src > dst)
-    {
-        for (ii = n; ii != 0; ii--)
-        {
-            *dst = *src;
-            dst++;
-            src++;
-        }
-    }
-    else
-    {
-        src += n - 1;
-        dst += n - 1;
-        for (ii = n; ii != 0; ii--)
-        {
-            *dst = *src;
-            dst--;
-            src--;
-        }
-    }
-
-    return;
-}
-#ifdef SUPPORT_MC
 // Extract out the stereo channel pair from multichannel source.
-void Copy_Float_Mc_Stereo(const LVM_FLOAT *src,
-                 LVM_FLOAT *dst,
-                 LVM_INT16 NrFrames, /* Number of frames */
-                 LVM_INT32 NrChannels)
-{
+void Copy_Float_Mc_Stereo(const LVM_FLOAT* src, LVM_FLOAT* dst,
+                          LVM_INT16 NrFrames, /* Number of frames */
+                          LVM_INT32 NrChannels) {
     LVM_INT16 ii;
 
-    if (NrChannels >= 2)
-    {
-        for (ii = NrFrames; ii != 0; ii--)
-        {
+    if (NrChannels >= 2) {
+        for (ii = NrFrames; ii != 0; ii--) {
             dst[0] = src[0];
             dst[1] = src[1];
             dst += 2;
             src += NrChannels;
         }
-    }
-    else if (NrChannels == 1)
-    {   // not expected to occur, provided for completeness.
+    } else if (NrChannels == 1) {  // not expected to occur, provided for completeness.
         src += (NrFrames - 1);
         dst += 2 * (NrFrames - 1);
-        for (ii = NrFrames; ii != 0; ii--)
-        {
+        for (ii = NrFrames; ii != 0; ii--) {
             dst[0] = src[0];
             dst[1] = src[0];
             dst -= 2;
-            src --;
+            src--;
         }
     }
 }
 
 // Merge a multichannel source with stereo contained in StereoOut, to dst.
-void Copy_Float_Stereo_Mc(const LVM_FLOAT *src,
-                 LVM_FLOAT *StereoOut,
-                 LVM_FLOAT *dst,
-                 LVM_INT16 NrFrames, /* Number of frames*/
-                 LVM_INT32 NrChannels)
-{
+void Copy_Float_Stereo_Mc(const LVM_FLOAT* src, const LVM_FLOAT* StereoOut, LVM_FLOAT* dst,
+                          LVM_INT16 NrFrames, /* Number of frames*/
+                          LVM_INT32 NrChannels) {
     LVM_INT16 ii, jj;
 
-    // pack dst with stereo information of StereoOut
-    // together with the upper channels of src.
-    StereoOut += 2 * (NrFrames - 1);
-    dst += NrChannels * (NrFrames - 1);
-    src += NrChannels * (NrFrames - 1);
-    for (ii = NrFrames; ii != 0; ii--)
-    {
-        dst[1] = StereoOut[1];
-        dst[0] = StereoOut[0]; // copy 1 before 0 is required for NrChannels == 3.
-        for (jj = 2; jj < NrChannels; jj++)
-        {
-            dst[jj] = src[jj];
+    if (NrChannels >= FCC_2) {
+        // pack dst with stereo information of StereoOut
+        // together with the upper channels of src.
+        StereoOut += 2 * (NrFrames - 1);
+        dst += NrChannels * (NrFrames - 1);
+        src += NrChannels * (NrFrames - 1);
+
+        for (ii = NrFrames; ii != 0; ii--) {
+            dst[1] = StereoOut[1];
+            dst[0] = StereoOut[0];  // copy 1 before 0 is required for NrChannels == 3.
+            for (jj = FCC_2; jj < NrChannels; jj++) {
+                dst[jj] = src[jj];
+            }
+            dst -= NrChannels;
+            src -= NrChannels;
+            StereoOut -= 2;
         }
-        dst    -= NrChannels;
-        src    -= NrChannels;
-        StereoOut -= 2;
+    } else {
+        Copy_Float((const LVM_FLOAT*)StereoOut, /* Source */
+                   (LVM_FLOAT*)dst,             /* Destination */
+                   (LVM_INT16)NrFrames);        /* Number of frames */
     }
 }
-#endif
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/Core_MixHard_2St_D32C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/Core_MixHard_2St_D32C31_SAT.cpp
index 5e77335..2c2061a 100644
--- a/media/libeffects/lvm/lib/Common/src/Core_MixHard_2St_D32C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Core_MixHard_2St_D32C31_SAT.cpp
@@ -25,13 +25,9 @@
 /**********************************************************************************
    FUNCTION CORE_MIXHARD_2ST_D32C31_SAT
 ***********************************************************************************/
-void Core_MixHard_2St_D32C31_SAT(   Mix_2St_Cll_FLOAT_t       *pInstance,
-                                    const LVM_FLOAT     *src1,
-                                    const LVM_FLOAT     *src2,
-                                    LVM_FLOAT     *dst,
-                                    LVM_INT16     n)
-{
-    LVM_FLOAT  Temp1,Temp2,Temp3;
+void Core_MixHard_2St_D32C31_SAT(Mix_2St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src1,
+                                 const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 n) {
+    LVM_FLOAT Temp1, Temp2, Temp3;
     LVM_INT16 ii;
     LVM_FLOAT Current1Short;
     LVM_FLOAT Current2Short;
@@ -39,7 +35,7 @@
     Current1Short = (pInstance->Current1);
     Current2Short = (pInstance->Current2);
 
-    for (ii = n; ii != 0; ii--){
+    for (ii = n; ii != 0; ii--) {
         Temp1 = *src1++;
         Temp3 = Temp1 * Current1Short;
         Temp2 = *src2++;
@@ -47,11 +43,11 @@
         Temp2 = (Temp1 / 2.0f) + (Temp3 / 2.0f);
         if (Temp2 > 0.5f)
             Temp2 = 1.0f;
-        else if (Temp2 < -0.5f )
+        else if (Temp2 < -0.5f)
             Temp2 = -1.0f;
         else
             Temp2 = (Temp2 * 2);
-            *dst++ = Temp2;
+        *dst++ = Temp2;
     }
 }
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/Core_MixInSoft_D32C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/Core_MixInSoft_D32C31_SAT.cpp
index 8f5c0ae..b7f4b55 100644
--- a/media/libeffects/lvm/lib/Common/src/Core_MixInSoft_D32C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Core_MixInSoft_D32C31_SAT.cpp
@@ -18,72 +18,54 @@
 /**********************************************************************************
    INCLUDE FILES
 ***********************************************************************************/
-
 #include "Mixer_private.h"
 #include "LVM_Macros.h"
+#include "ScalarArithmetic.h"
 
 /**********************************************************************************
    FUNCTION CORE_MIXSOFT_1ST_D32C31_WRA
 ***********************************************************************************/
 
-void Core_MixInSoft_D32C31_SAT(     Mix_1St_Cll_FLOAT_t       *pInstance,
-                                    const LVM_FLOAT     *src,
-                                          LVM_FLOAT     *dst,
-                                          LVM_INT16     n)
-{
-    LVM_FLOAT    Temp1,Temp2,Temp3;
-    LVM_INT16     OutLoop;
-    LVM_INT16     InLoop;
-    LVM_FLOAT    TargetTimesOneMinAlpha;
-    LVM_FLOAT    CurrentTimesAlpha;
-    LVM_INT16     ii,jj;
+void Core_MixInSoft_D32C31_SAT(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src, LVM_FLOAT* dst,
+                               LVM_INT16 n) {
+    LVM_FLOAT Temp1, Temp2, Temp3;
+    LVM_INT16 OutLoop;
+    LVM_INT16 InLoop;
+    LVM_FLOAT TargetTimesOneMinAlpha;
+    LVM_FLOAT CurrentTimesAlpha;
+    LVM_INT16 ii, jj;
 
     InLoop = (LVM_INT16)(n >> 2); /* Process per 4 samples */
     OutLoop = (LVM_INT16)(n - (InLoop << 2));
 
-    TargetTimesOneMinAlpha = ((1.0f -pInstance->Alpha) * pInstance->Target);
-    if (pInstance->Target >= pInstance->Current){
-        TargetTimesOneMinAlpha +=(LVM_FLOAT)(2.0f / 2147483647.0f); /* Ceil*/
+    TargetTimesOneMinAlpha = ((1.0f - pInstance->Alpha) * pInstance->Target);
+    if (pInstance->Target >= pInstance->Current) {
+        TargetTimesOneMinAlpha += (LVM_FLOAT)(2.0f / 2147483647.0f); /* Ceil*/
     }
 
-    if (OutLoop){
-
+    if (OutLoop) {
         CurrentTimesAlpha = pInstance->Current * pInstance->Alpha;
         pInstance->Current = TargetTimesOneMinAlpha + CurrentTimesAlpha;
 
-        for (ii = OutLoop; ii != 0; ii--){
-        Temp1 = *src++;
-        Temp2 = *dst;
-
-        Temp3 = Temp1 * (pInstance->Current);
-        Temp1 = Temp2 + Temp3;
-
-        if (Temp1 > 1.0f)
-            Temp1 = 1.0f;
-        else if (Temp1 < -1.0f)
-            Temp1 = -1.0f;
-
-        *dst++ = Temp1;
-        }
-    }
-
-    for (ii = InLoop; ii != 0; ii--){
-
-        CurrentTimesAlpha = pInstance->Current * pInstance->Alpha;
-        pInstance->Current = TargetTimesOneMinAlpha + CurrentTimesAlpha;
-
-        for (jj = 4; jj!=0 ; jj--){
+        for (ii = OutLoop; ii != 0; ii--) {
             Temp1 = *src++;
             Temp2 = *dst;
 
             Temp3 = Temp1 * (pInstance->Current);
-            Temp1 = Temp2 + Temp3;
+            *dst++ = LVM_Clamp(Temp2 + Temp3);
+        }
+    }
 
-            if (Temp1 > 1.0f)
-                Temp1 = 1.0f;
-            else if (Temp1 < -1.0f)
-                Temp1 = -1.0f;
-            *dst++ = Temp1;
+    for (ii = InLoop; ii != 0; ii--) {
+        CurrentTimesAlpha = pInstance->Current * pInstance->Alpha;
+        pInstance->Current = TargetTimesOneMinAlpha + CurrentTimesAlpha;
+
+        for (jj = 4; jj != 0; jj--) {
+            Temp1 = *src++;
+            Temp2 = *dst;
+
+            Temp3 = Temp1 * (pInstance->Current);
+            *dst++ = LVM_Clamp(Temp2 + Temp3);
         }
     }
 }
diff --git a/media/libeffects/lvm/lib/Common/src/Core_MixSoft_1St_D32C31_WRA.cpp b/media/libeffects/lvm/lib/Common/src/Core_MixSoft_1St_D32C31_WRA.cpp
index 6ff7853..61a4752 100644
--- a/media/libeffects/lvm/lib/Common/src/Core_MixSoft_1St_D32C31_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Core_MixSoft_1St_D32C31_WRA.cpp
@@ -25,12 +25,9 @@
 /**********************************************************************************
    FUNCTION CORE_MIXSOFT_1ST_D32C31_WRA
 ***********************************************************************************/
-void Core_MixSoft_1St_D32C31_WRA(   Mix_1St_Cll_FLOAT_t       *pInstance,
-                                    const LVM_FLOAT     *src,
-                                    LVM_FLOAT     *dst,
-                                    LVM_INT16     n)
-{
-    LVM_FLOAT Temp1,Temp2;
+void Core_MixSoft_1St_D32C31_WRA(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src,
+                                 LVM_FLOAT* dst, LVM_INT16 n) {
+    LVM_FLOAT Temp1, Temp2;
     LVM_INT16 OutLoop;
     LVM_INT16 InLoop;
     LVM_FLOAT TargetTimesOneMinAlpha;
@@ -41,19 +38,17 @@
     InLoop = (LVM_INT16)(n >> 2); /* Process per 4 samples */
     OutLoop = (LVM_INT16)(n - (InLoop << 2));
 
-    TargetTimesOneMinAlpha = (1.0f - pInstance->Alpha) * pInstance->Target; /* float * float in float */
-    if (pInstance->Target >= pInstance->Current)
-    {
+    TargetTimesOneMinAlpha =
+            (1.0f - pInstance->Alpha) * pInstance->Target; /* float * float in float */
+    if (pInstance->Target >= pInstance->Current) {
         TargetTimesOneMinAlpha += (LVM_FLOAT)(2.0f / 2147483647.0f); /* Ceil*/
     }
 
-    if (OutLoop != 0)
-    {
+    if (OutLoop != 0) {
         CurrentTimesAlpha = (pInstance->Current * pInstance->Alpha);
         pInstance->Current = TargetTimesOneMinAlpha + CurrentTimesAlpha;
 
-        for (ii = OutLoop; ii != 0; ii--)
-        {
+        for (ii = OutLoop; ii != 0; ii--) {
             Temp1 = *src;
             src++;
 
@@ -63,37 +58,36 @@
         }
     }
 
-    for (ii = InLoop; ii != 0; ii--)
-    {
+    for (ii = InLoop; ii != 0; ii--) {
         CurrentTimesAlpha = pInstance->Current * pInstance->Alpha;
         pInstance->Current = TargetTimesOneMinAlpha + CurrentTimesAlpha;
 
-            Temp1 = *src;
-            src++;
+        Temp1 = *src;
+        src++;
 
-            Temp2 = Temp1 * (pInstance->Current);
-            *dst = Temp2;
-            dst++;
+        Temp2 = Temp1 * (pInstance->Current);
+        *dst = Temp2;
+        dst++;
 
-            Temp1 = *src;
-            src++;
+        Temp1 = *src;
+        src++;
 
-            Temp2 = Temp1 * (pInstance->Current);
-            *dst = Temp2;
-            dst++;
+        Temp2 = Temp1 * (pInstance->Current);
+        *dst = Temp2;
+        dst++;
 
-            Temp1 = *src;
-            src++;
+        Temp1 = *src;
+        src++;
 
-            Temp2 = Temp1 * (pInstance->Current);
-            *dst = Temp2;
-            dst++;
+        Temp2 = Temp1 * (pInstance->Current);
+        *dst = Temp2;
+        dst++;
 
-            Temp1 = *src;
-            src++;
-            Temp2 = Temp1 * (pInstance->Current);
-            *dst = Temp2;
-            dst++;
+        Temp1 = *src;
+        src++;
+        Temp2 = Temp1 * (pInstance->Current);
+        *dst = Temp2;
+        dst++;
     }
 }
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01.cpp
index a7ce4d3..6e859f4 100644
--- a/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01.cpp
+++ b/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01.cpp
@@ -14,54 +14,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 #include "BIQUAD.h"
 #include "DC_2I_D16_TRC_WRA_01_Private.h"
 #include "LVM_Macros.h"
-void DC_2I_D16_TRC_WRA_01( Biquad_FLOAT_Instance_t       *pInstance,
-                           LVM_FLOAT               *pDataIn,
-                           LVM_FLOAT               *pDataOut,
-                           LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT LeftDC,RightDC;
-        LVM_FLOAT Diff;
-        LVM_INT32 j;
-        PFilter_FLOAT_State pBiquadState = (PFilter_FLOAT_State) pInstance;
-
-        LeftDC = pBiquadState->LeftDC;
-        RightDC = pBiquadState->RightDC;
-        for(j = NrSamples-1; j >= 0; j--)
-        {
-            /* Subtract DC and saturate */
-            Diff =* (pDataIn++) - (LeftDC);
-            if (Diff > 1.0f) {
-                Diff = 1.0f; }
-            else if (Diff < -1.0f) {
-                Diff = -1.0f; }
-            *(pDataOut++) = (LVM_FLOAT)Diff;
-            if (Diff < 0) {
-                LeftDC -= DC_FLOAT_STEP; }
-            else {
-                LeftDC += DC_FLOAT_STEP; }
-
-            /* Subtract DC an saturate */
-            Diff =* (pDataIn++) - (RightDC);
-            if (Diff > 1.0f) {
-                Diff = 1.0f; }
-            else if (Diff < -1.0f) {
-                Diff = -1.0f; }
-            *(pDataOut++) = (LVM_FLOAT)Diff;
-            if (Diff < 0) {
-                RightDC -= DC_FLOAT_STEP; }
-            else {
-                RightDC += DC_FLOAT_STEP; }
-
-        }
-        pBiquadState->LeftDC = LeftDC;
-        pBiquadState->RightDC = RightDC;
-
-    }
-#ifdef SUPPORT_MC
+#include "ScalarArithmetic.h"
 /*
  * FUNCTION:       DC_Mc_D16_TRC_WRA_01
  *
@@ -79,37 +35,25 @@
  *  void
  *
  */
-void DC_Mc_D16_TRC_WRA_01(Biquad_FLOAT_Instance_t       *pInstance,
-                          LVM_FLOAT               *pDataIn,
-                          LVM_FLOAT               *pDataOut,
-                          LVM_INT16               NrFrames,
-                          LVM_INT16               NrChannels)
-    {
-        LVM_FLOAT *ChDC;
-        LVM_FLOAT Diff;
-        LVM_INT32 j;
-        LVM_INT32 i;
-        PFilter_FLOAT_State_Mc pBiquadState = (PFilter_FLOAT_State_Mc) pInstance;
+void DC_Mc_D16_TRC_WRA_01(Biquad_FLOAT_Instance_t* pInstance, LVM_FLOAT* pDataIn,
+                          LVM_FLOAT* pDataOut, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+    LVM_FLOAT* ChDC;
+    LVM_FLOAT Diff;
+    LVM_INT32 j;
+    LVM_INT32 i;
+    PFilter_FLOAT_State_Mc pBiquadState = (PFilter_FLOAT_State_Mc)pInstance;
 
-        ChDC = &pBiquadState->ChDC[0];
-        for (j = NrFrames - 1; j >= 0; j--)
-        {
-            /* Subtract DC and saturate */
-            for (i = NrChannels - 1; i >= 0; i--)
-            {
-                Diff = *(pDataIn++) - (ChDC[i]);
-                if (Diff > 1.0f) {
-                    Diff = 1.0f;
-                } else if (Diff < -1.0f) {
-                    Diff = -1.0f; }
-                *(pDataOut++) = (LVM_FLOAT)Diff;
-                if (Diff < 0) {
-                    ChDC[i] -= DC_FLOAT_STEP;
-                } else {
-                    ChDC[i] += DC_FLOAT_STEP; }
+    ChDC = &pBiquadState->ChDC[0];
+    for (j = NrFrames - 1; j >= 0; j--) {
+        /* Subtract DC and saturate */
+        for (i = NrChannels - 1; i >= 0; i--) {
+            Diff = *(pDataIn++) - (ChDC[i]);
+            *(pDataOut++) = LVM_Clamp(Diff);
+            if (Diff < 0) {
+                ChDC[i] -= DC_FLOAT_STEP;
+            } else {
+                ChDC[i] += DC_FLOAT_STEP;
             }
-
         }
-
     }
-#endif
+}
diff --git a/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp
index beee112..c16718c 100644
--- a/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp
@@ -17,20 +17,10 @@
 
 #include "BIQUAD.h"
 #include "DC_2I_D16_TRC_WRA_01_Private.h"
-void  DC_2I_D16_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t   *pInstance)
-{
-    PFilter_FLOAT_State pBiquadState  = (PFilter_FLOAT_State) pInstance;
-    pBiquadState->LeftDC        = 0.0f;
-    pBiquadState->RightDC       = 0.0f;
-}
-#ifdef SUPPORT_MC
-void  DC_Mc_D16_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t   *pInstance)
-{
-    PFilter_FLOAT_State_Mc pBiquadState  = (PFilter_FLOAT_State_Mc) pInstance;
+void DC_Mc_D16_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t* pInstance) {
+    PFilter_FLOAT_State_Mc pBiquadState = (PFilter_FLOAT_State_Mc)pInstance;
     LVM_INT32 i;
-    for (i = 0; i < LVM_MAX_CHANNELS; i++)
-    {
+    for (i = 0; i < LVM_MAX_CHANNELS; i++) {
         pBiquadState->ChDC[i] = 0.0f;
     }
 }
-#endif
diff --git a/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Private.h
index 6508b73..8f459d2 100644
--- a/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/DC_2I_D16_TRC_WRA_01_Private.h
@@ -18,21 +18,17 @@
 #ifndef _DC_2I_D16_TRC_WRA_01_PRIVATE_H_
 #define _DC_2I_D16_TRC_WRA_01_PRIVATE_H_
 
-#define DC_FLOAT_STEP   0.0000002384f;
+#define DC_FLOAT_STEP 0.0000002384f
 
 /* The internal state variables are implemented in a (for the user)  hidden structure */
 /* In this (private) file, the internal structure is declared fro private use.*/
-typedef struct _Filter_FLOAT_State_
-{
-    LVM_FLOAT  LeftDC;     /* LeftDC  */
-    LVM_FLOAT  RightDC;    /* RightDC  */
-}Filter_FLOAT_State;
-typedef Filter_FLOAT_State * PFilter_FLOAT_State ;
-#ifdef SUPPORT_MC
-typedef struct _Filter_FLOAT_State_Mc_
-{
-    LVM_FLOAT  ChDC[LVM_MAX_CHANNELS];     /* ChannelDC  */
+typedef struct _Filter_FLOAT_State_ {
+    LVM_FLOAT LeftDC;  /* LeftDC  */
+    LVM_FLOAT RightDC; /* RightDC  */
+} Filter_FLOAT_State;
+typedef Filter_FLOAT_State* PFilter_FLOAT_State;
+typedef struct _Filter_FLOAT_State_Mc_ {
+    LVM_FLOAT ChDC[LVM_MAX_CHANNELS]; /* ChannelDC  */
 } Filter_FLOAT_State_Mc;
-typedef Filter_FLOAT_State_Mc * PFilter_FLOAT_State_Mc ;
-#endif
+typedef Filter_FLOAT_State_Mc* PFilter_FLOAT_State_Mc;
 #endif /* _DC_2I_D16_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/DelayAllPass_Sat_32x16To32.cpp b/media/libeffects/lvm/lib/Common/src/DelayAllPass_Sat_32x16To32.cpp
deleted file mode 100644
index 771fae2..0000000
--- a/media/libeffects/lvm/lib/Common/src/DelayAllPass_Sat_32x16To32.cpp
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**********************************************************************************
-   INCLUDE FILES
-***********************************************************************************/
-
-#include "LVM_Types.h"
-#include "LVM_Macros.h"
-#include "VectorArithmetic.h"
-
-/**********************************************************************************
-   FUNCTION DelayAllPass_32x32
-***********************************************************************************/
-
-void DelayAllPass_Sat_32x16To32(  LVM_INT32  *delay,                    /* Delay buffer */
-                                  LVM_UINT16 size,                      /* Delay size */
-                                  LVM_INT16 coeff,                      /* All pass filter coefficient */
-                                  LVM_UINT16 DelayOffset,               /* Simple delay offset */
-                                  LVM_UINT16 *pAllPassOffset,           /* All pass filter delay offset */
-                                  LVM_INT32  *dst,                      /* Source/destination */
-                                  LVM_INT16 n)                          /* Number of  samples */
-{
-    LVM_INT16   i;
-    LVM_UINT16   AllPassOffset = *pAllPassOffset;
-    LVM_INT32    temp;
-    LVM_INT32    a,b,c;
-
-    for (i = 0; i < n; i++)
-    {
-
-        MUL32x16INTO32(delay[AllPassOffset], coeff, temp, 15)
-        a = temp;
-        b = delay[DelayOffset];
-        DelayOffset++;
-
-        c = a + b;
-        if ((((c ^ a) & (c ^ b)) >> 31) != 0)  /* overflow / underflow */
-        {
-            if(a < 0)
-            {
-                c = 0x80000000L;
-            }
-            else
-            {
-                c = 0x7FFFFFFFL;
-            }
-        }
-        *dst = c;
-        dst++;
-
-        MUL32x16INTO32(c, -coeff, temp, 15)
-        a = temp;
-        b = delay[AllPassOffset];
-        c = a + b;
-        if ((((c ^ a) & (c ^ b)) >> 31)!=0)  /* overflow / underflow */
-        {
-            if(a < 0)
-            {
-                c = 0x80000000L;
-            }
-            else
-            {
-                c = 0x7FFFFFFFL;
-            }
-        }
-        delay[AllPassOffset] = c;
-        AllPassOffset++;
-
-        /* Make the delay buffer a circular buffer */
-        if (DelayOffset >= size)
-        {
-            DelayOffset = 0;
-        }
-
-        if (AllPassOffset >= size)
-        {
-            AllPassOffset = 0;
-        }
-    }
-
-    /* Update the offset */
-    *pAllPassOffset = AllPassOffset;
-
-    return;
-}
-
-/**********************************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/DelayMix_16x16.cpp b/media/libeffects/lvm/lib/Common/src/DelayMix_16x16.cpp
index 52d263f..a346636 100644
--- a/media/libeffects/lvm/lib/Common/src/DelayMix_16x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/DelayMix_16x16.cpp
@@ -21,45 +21,55 @@
 
 #include "VectorArithmetic.h"
 
-/**********************************************************************************
-   FUNCTION DelayMix_16x16
-***********************************************************************************/
-
-void DelayMix_16x16(const LVM_INT16 *src,           /* Source 1, to be delayed */
-                          LVM_INT16 *delay,         /* Delay buffer */
-                          LVM_INT16 size,           /* Delay size */
-                          LVM_INT16 *dst,           /* Source/destination */
-                          LVM_INT16 *pOffset,       /* Delay offset */
-                          LVM_INT16 n)              /* Number of stereo samples */
+void DelayMix_Float(const LVM_FLOAT* src, /* Source 1, to be delayed */
+                    LVM_FLOAT* delay,     /* Delay buffer */
+                    LVM_INT16 size,       /* Delay size */
+                    LVM_FLOAT* dst,       /* Source/destination */
+                    LVM_INT16* pOffset,   /* Delay offset */
+                    LVM_INT16 n,          /* Number of samples */
+                    LVM_INT32 NrChannels) /* Number of channels */
 {
-    LVM_INT16   i;
-    LVM_INT16   Offset  = *pOffset;
-    LVM_INT16   temp;
+    LVM_INT16 i;
+    LVM_INT16 Offset = *pOffset;
+    LVM_FLOAT temp;
 
-    for (i = 0; i < n; i++)
-    {
-        /* Left channel */
-        temp = (LVM_INT16)((LVM_UINT32)((LVM_INT32)(*dst) + (LVM_INT32)delay[Offset]) >> 1);
-        *dst = temp;
-        dst++;
+    for (i = 0; i < n; i++) {
+        if (NrChannels == FCC_1) {
+            temp = (LVM_FLOAT)(*dst + (LVM_FLOAT)delay[Offset]) / 2.0f;
+            *dst = temp;
+            dst++;
 
-        delay[Offset] = *src;
-        Offset++;
-        src++;
+            delay[Offset] = *src;
+            Offset++;
+            src++;
 
-        /* Right channel */
-        temp = (LVM_INT16)((LVM_UINT32)((LVM_INT32)(*dst) - (LVM_INT32)delay[Offset]) >> 1);
-        *dst = temp;
-        dst++;
+            /* Make the reverb delay buffer a circular buffer */
+            if (Offset >= size) {
+                Offset = 0;
+            }
+        } else {
+            /* Left channel */
+            temp = (LVM_FLOAT)(*dst + (LVM_FLOAT)delay[Offset]) / 2.0f;
+            *dst = temp;
+            dst++;
 
-        delay[Offset] = *src;
-        Offset++;
-        src++;
+            delay[Offset] = *src;
+            Offset++;
+            src++;
 
-        /* Make the reverb delay buffer a circular buffer */
-        if (Offset >= size)
-        {
-            Offset = 0;
+            /* Right channel */
+            temp = (LVM_FLOAT)(*dst - (LVM_FLOAT)delay[Offset]) / 2.0f;
+            *dst = temp;
+            dst++;
+
+            delay[Offset] = *src;
+            Offset++;
+            src++;
+
+            /* Make the reverb delay buffer a circular buffer */
+            if (Offset >= size) {
+                Offset = 0;
+            }
         }
     }
 
@@ -68,47 +78,3 @@
 
     return;
 }
-void DelayMix_Float(const LVM_FLOAT *src,           /* Source 1, to be delayed */
-                          LVM_FLOAT *delay,         /* Delay buffer */
-                          LVM_INT16 size,           /* Delay size */
-                          LVM_FLOAT *dst,           /* Source/destination */
-                          LVM_INT16 *pOffset,       /* Delay offset */
-                          LVM_INT16 n)              /* Number of stereo samples */
-{
-    LVM_INT16   i;
-    LVM_INT16   Offset  = *pOffset;
-    LVM_FLOAT   temp;
-
-    for (i=0; i<n; i++)
-    {
-        /* Left channel */
-        temp            = (LVM_FLOAT)((LVM_FLOAT)(*dst + (LVM_FLOAT)delay[Offset]) / 2.0f);
-        *dst            = temp;
-        dst++;
-
-        delay[Offset] = *src;
-        Offset++;
-        src++;
-
-        /* Right channel */
-        temp            = (LVM_FLOAT)((LVM_FLOAT)(*dst - (LVM_FLOAT)delay[Offset]) / 2.0f);
-        *dst            = temp;
-        dst++;
-
-        delay[Offset] = *src;
-        Offset++;
-        src++;
-
-        /* Make the reverb delay buffer a circular buffer */
-        if (Offset >= size)
-        {
-            Offset = 0;
-        }
-    }
-
-    /* Update the offset */
-    *pOffset = Offset;
-
-    return;
-}
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/DelayWrite_32.cpp b/media/libeffects/lvm/lib/Common/src/DelayWrite_32.cpp
deleted file mode 100644
index 809cddc..0000000
--- a/media/libeffects/lvm/lib/Common/src/DelayWrite_32.cpp
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**********************************************************************************
-   INCLUDE FILES
-***********************************************************************************/
-
-#include "VectorArithmetic.h"
-
-/**********************************************************************************
-   FUNCTION DelayMix_16x16
-***********************************************************************************/
-
-void DelayWrite_32(const LVM_INT32  *src,               /* Source 1, to be delayed */
-                         LVM_INT32  *delay,             /* Delay buffer */
-                         LVM_UINT16 size,               /* Delay size */
-                         LVM_UINT16 *pOffset,           /* Delay offset */
-                         LVM_INT16  n)                  /* Number of samples */
-{
-    LVM_INT16   i;
-    LVM_INT16   Offset  = (LVM_INT16)*pOffset;
-
-    for (i=0; i<n; i++)
-    {
-        delay[Offset] = *src;
-        Offset++;
-        src++;
-
-        /* Make the delay buffer a circular buffer */
-        if (Offset >= size)
-        {
-            Offset = 0;
-        }
-    }
-
-    /* Update the offset */
-    *pOffset = (LVM_UINT16)Offset;
-
-    return;
-}
-
-/**********************************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16C15_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16C15_TRC_WRA_01.cpp
deleted file mode 100644
index bef0d62..0000000
--- a/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16C15_TRC_WRA_01.cpp
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "FO_1I_D16F16Css_TRC_WRA_01_Private.h"
-#include "LVM_Macros.h"
-
-/**************************************************************************
- ASSUMPTIONS:
- COEFS-
- pBiquadState->coefs[0] is A1,
- pBiquadState->coefs[1] is A0,
- pBiquadState->coefs[2] is -B1, these are in Q15 format
-
- DELAYS-
- pBiquadState->pDelays[0] is x(n-1)L in Q0 format
- pBiquadState->pDelays[1] is y(n-1)L in Q0 format
-***************************************************************************/
-
-void FO_1I_D16F16C15_TRC_WRA_01( Biquad_FLOAT_Instance_t       *pInstance,
-                                 LVM_FLOAT               *pDataIn,
-                                 LVM_FLOAT               *pDataOut,
-                                 LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT  ynL;
-        LVM_INT16 ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-
-         for (ii = NrSamples; ii != 0; ii--)
-         {
-
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            // ynL=A1  * x(n-1)L
-            ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[0];
-
-            // ynL+=A0  * x(n)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[1] * (*pDataIn);
-
-            // ynL+=  (-B1  * y(n-1)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[2] * pBiquadState->pDelays[1];
-
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[1] = ynL; // Update y(n-1)L
-            pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output
-
-        }
-
-    }
diff --git a/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Init.cpp
deleted file mode 100644
index 161225e..0000000
--- a/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Init.cpp
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*-------------------------------------------------------------------------*/
-#include "BIQUAD.h"
-#include "FO_1I_D16F16Css_TRC_WRA_01_Private.h"
-
-/*-------------------------------------------------------------------------*/
-/* FUNCTION:                                                               */
-/*   FO_1I_D16F16Css_TRC_WRA_01_Init                                       */
-/*                                                                         */
-/* DESCRIPTION:                                                            */
-/*   These functions initializes a BIQUAD filter defined as a cascade of   */
-/*   biquadratic Filter Sections.                                          */
-/*                                                                         */
-/* PARAMETERS:                                                             */
-/*   pInstance    - output, returns the pointer to the State Variable      */
-/*                   This state pointer must be passed to any subsequent   */
-/*                   call to "Biquad" functions.                           */
-/*   pTaps         - input, pointer to the taps memory                     */
-/*   pCoef         - input, pointer to the coefficient structure           */
-/*   N             - M coefficient factor of QM.N                          */
-/* RETURNS:                                                                */
-/*   void return code                                                      */
-/*-------------------------------------------------------------------------*/
-void FO_1I_D16F16Css_TRC_WRA_01_Init(    Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_1I_Order1_FLOAT_Taps_t   *pTaps,
-                                         FO_FLOAT_Coefs_t            *pCoef)
-{
-    LVM_FLOAT temp;
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)  pInstance;
-    pBiquadState->pDelays      = (LVM_FLOAT *)pTaps;
-    temp = pCoef->A1;
-    pBiquadState->coefs[0] = temp;
-    temp = pCoef->A0;
-    pBiquadState->coefs[1] = temp;
-    temp = pCoef->B1;
-    pBiquadState->coefs[2] = temp;
-}
-/*------------------------------------------------*/
-/* End Of File: FO_1I_D16F16Css_TRC_WRA_01_Init.c */
-
diff --git a/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Private.h
deleted file mode 100644
index 34f3df9..0000000
--- a/media/libeffects/lvm/lib/Common/src/FO_1I_D16F16Css_TRC_WRA_01_Private.h
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef _FO_1I_D16F16CSS_TRC_WRA_01_PRIVATE_H_
-#define _FO_1I_D16F16CSS_TRC_WRA_01_PRIVATE_H_
-
-/* The internal state variables are implemented in a (for the user)  hidden structure */
-/* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32*        pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT16         coefs[3];       /* pointer to the filter coefficients */
-}Filter_State;
-
-typedef Filter_State * PFilter_State ;
-
-typedef struct _Filter_State_FLOAT
-{
-    LVM_FLOAT *                          pDelays;        /* pointer to the delayed samples \
-                                                            (data of 32 bits)   */
-    LVM_FLOAT                            coefs[3];       /* pointer to the filter coefficients */
-}Filter_State_FLOAT;
-
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
-#endif /* _FO_1I_D16F16CSS_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32C31_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32C31_TRC_WRA_01.cpp
deleted file mode 100644
index e3efad7..0000000
--- a/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32C31_TRC_WRA_01.cpp
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "FO_1I_D32F32Cll_TRC_WRA_01_Private.h"
-#include "LVM_Macros.h"
-
-/**************************************************************************
- ASSUMPTIONS:
- COEFS-
- pBiquadState->coefs[0] is A1,
- pBiquadState->coefs[1] is A0,
- pBiquadState->coefs[2] is -B1, these are in Q31 format
-
- DELAYS-
- pBiquadState->pDelays[0] is x(n-1)L in Q0 format
- pBiquadState->pDelays[1] is y(n-1)L in Q0 format
-***************************************************************************/
-void FO_1I_D32F32C31_TRC_WRA_01( Biquad_FLOAT_Instance_t       *pInstance,
-                                 LVM_FLOAT               *pDataIn,
-                                 LVM_FLOAT               *pDataOut,
-                                 LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT  ynL,templ;
-        LVM_INT16  ii;
-        PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT) pInstance;
-
-        for (ii = NrSamples; ii != 0; ii--)
-        {
-
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            // ynL=A1  * x(n-1)L
-            ynL = pBiquadState->coefs[0] * pBiquadState->pDelays[0];
-
-            // ynL+=A0  * x(n)L
-            templ = pBiquadState->coefs[1] * (*pDataIn);
-            ynL += templ;
-
-            // ynL+=  (-B1  * y(n-1)L
-            templ = pBiquadState->coefs[2] * pBiquadState->pDelays[1];
-            ynL += templ;
-
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[1] = ynL; // Update y(n-1)L
-            pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut++ = (LVM_FLOAT)ynL; // Write Left output in Q0
-        }
-
-    }
diff --git a/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Init.cpp
deleted file mode 100644
index bb5295c..0000000
--- a/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Init.cpp
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "FO_1I_D32F32Cll_TRC_WRA_01_Private.h"
-
-/*-------------------------------------------------------------------------*/
-/* FUNCTION:                                                               */
-/*   FO_1I_D32F32Cll_TRC_WRA_01_Init                                       */
-/*                                                                         */
-/* DESCRIPTION:                                                            */
-/*   These functions initializes a BIQUAD filter defined as a cascade of   */
-/*   biquadratic Filter Sections.                                          */
-/*                                                                         */
-/* PARAMETERS:                                                             */
-/*   pInstance    - output, returns the pointer to the State Variable      */
-/*                   This state pointer must be passed to any subsequent   */
-/*                   call to "Biquad" functions.                           */
-/*   pTaps         - input, pointer to the taps memory                     */
-/*   pCoef         - input, pointer to the coefficient structure           */
-/*   N             - M coefficient factor of QM.N                          */
-/* RETURNS:                                                                */
-/*   void return code                                                      */
-/*-------------------------------------------------------------------------*/
-void FO_1I_D32F32Cll_TRC_WRA_01_Init( Biquad_FLOAT_Instance_t         *pInstance,
-                                      Biquad_1I_Order1_FLOAT_Taps_t   *pTaps,
-                                      FO_FLOAT_Coefs_t            *pCoef)
-{
-    LVM_FLOAT temp;
-    PFilter_State_FLOAT pBiquadState = (PFilter_State_FLOAT)  pInstance;
-    pBiquadState->pDelays      = (LVM_FLOAT *)    pTaps;
-
-    temp = pCoef->A1;
-    pBiquadState->coefs[0] = temp;
-    temp = pCoef->A0;
-    pBiquadState->coefs[1] = temp;
-    temp = pCoef->B1;
-    pBiquadState->coefs[2] = temp;
-}
-/*------------------------------------------------*/
-/* End Of File: FO_1I_D32F32Cll_TRC_WRA_01_Init.c */
-
diff --git a/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Private.h
deleted file mode 100644
index 67d1384..0000000
--- a/media/libeffects/lvm/lib/Common/src/FO_1I_D32F32Cll_TRC_WRA_01_Private.h
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef _FO_1I_D32F32CLL_TRC_WRA_01_PRIVATE_H_
-#define _FO_1I_D32F32CLL_TRC_WRA_01_PRIVATE_H_
-
-/* The internal state variables are implemented in a (for the user)  hidden structure */
-/* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT32         coefs[3];       /* pointer to the filter coefficients */
-}Filter_State;
-
-typedef Filter_State * PFilter_State ;
-
-typedef struct _Filter_State_FLOAT_
-{
-    LVM_FLOAT *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-    LVM_FLOAT         coefs[3];       /* pointer to the filter coefficients */
-}Filter_State_FLOAT;
-
-typedef Filter_State_FLOAT * PFilter_State_FLOAT ;
-#endif /* _FO_1I_D32F32CLL_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32C15_LShx_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32C15_LShx_TRC_WRA_01.cpp
deleted file mode 100644
index 6ca819a..0000000
--- a/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32C15_LShx_TRC_WRA_01.cpp
+++ /dev/null
@@ -1,198 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "FO_2I_D16F32Css_LShx_TRC_WRA_01_Private.h"
-#include "LVM_Macros.h"
-
-/**************************************************************************
-ASSUMPTIONS:
-COEFS-
-pBiquadState->coefs[0] is A1,
-pBiquadState->coefs[1] is A0,
-pBiquadState->coefs[2] is -B1, these are in Q15 format
-pBiquadState->Shift    is Shift value
-DELAYS-
-pBiquadState->pDelays[0] is x(n-1)L in Q15 format
-pBiquadState->pDelays[1] is y(n-1)L in Q30 format
-pBiquadState->pDelays[2] is x(n-1)R in Q15 format
-pBiquadState->pDelays[3] is y(n-1)R in Q30 format
-***************************************************************************/
-void FO_2I_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t       *pInstance,
-                                     LVM_FLOAT               *pDataIn,
-                                     LVM_FLOAT               *pDataOut,
-                                     LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT   ynL,ynR;
-        LVM_FLOAT   Temp;
-        LVM_FLOAT   NegSatValue;
-        LVM_INT16   ii;
-
-        PFilter_Float_State pBiquadState = (PFilter_Float_State) pInstance;
-
-        NegSatValue = -1.0f;
-
-        for (ii = NrSamples; ii != 0; ii--)
-        {
-
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-
-            // ynL =A1  * x(n-1)L
-            ynL = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[0];
-            // ynR =A1  * x(n-1)R
-            ynR = (LVM_FLOAT)pBiquadState->coefs[0] * pBiquadState->pDelays[2];
-
-            // ynL+=A0  * x(n)L
-            ynL += (LVM_FLOAT)pBiquadState->coefs[1] * (*pDataIn);
-            // ynR+=A0  * x(n)L
-            ynR += (LVM_FLOAT)pBiquadState->coefs[1] * (*(pDataIn+1));
-
-            // ynL +=  (-B1  * y(n-1)L  )
-            Temp = pBiquadState->pDelays[1] * pBiquadState->coefs[2];
-            ynL += Temp;
-            // ynR +=  (-B1  * y(n-1)R ) )
-            Temp = pBiquadState->pDelays[3] * pBiquadState->coefs[2];
-            ynR += Temp;
-
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[1] = ynL; // Update y(n-1)L
-            pBiquadState->pDelays[0] = (*pDataIn++); // Update x(n-1)L
-
-            pBiquadState->pDelays[3] = ynR; // Update y(n-1)R
-            pBiquadState->pDelays[2] = (*pDataIn++); // Update x(n-1)R
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-
-            /*Saturate results*/
-            if(ynL > 1.0f)
-            {
-                ynL = 1.0f;
-            }
-            else
-            {
-                if(ynL < NegSatValue)
-                {
-                    ynL = NegSatValue;
-                }
-            }
-
-            if(ynR > 1.0f)
-            {
-                ynR = 1.0f;
-            }
-            else
-            {
-                if(ynR < NegSatValue)
-                {
-                    ynR = NegSatValue;
-                }
-            }
-
-            *pDataOut++ = (LVM_FLOAT)ynL;
-            *pDataOut++ = (LVM_FLOAT)ynR;
-        }
-
-    }
-#ifdef SUPPORT_MC
-/**************************************************************************
-ASSUMPTIONS:
-COEFS-
-pBiquadState->coefs[0] is A1,
-pBiquadState->coefs[1] is A0,
-pBiquadState->coefs[2] is -B1,
-DELAYS-
-pBiquadState->pDelays[2*ch + 0] is x(n-1) of the 'ch' - channel
-pBiquadState->pDelays[2*ch + 1] is y(n-1) of the 'ch' - channel
-The index 'ch' runs from 0 to (NrChannels - 1)
-
-PARAMETERS:
- pInstance        Pointer Instance
- pDataIn          Input/Source
- pDataOut         Output/Destination
- NrFrames         Number of frames
- NrChannels       Number of channels
-
-RETURNS:
- void
-***************************************************************************/
-void FO_Mc_D16F32C15_LShx_TRC_WRA_01(Biquad_FLOAT_Instance_t *pInstance,
-                                     LVM_FLOAT               *pDataIn,
-                                     LVM_FLOAT               *pDataOut,
-                                     LVM_INT16               NrFrames,
-                                     LVM_INT16               NrChannels)
-    {
-        LVM_FLOAT   yn;
-        LVM_FLOAT   Temp;
-        LVM_INT16   ii;
-        LVM_INT16   ch;
-        PFilter_Float_State pBiquadState = (PFilter_Float_State) pInstance;
-
-        LVM_FLOAT   *pDelays = pBiquadState->pDelays;
-        LVM_FLOAT   *pCoefs  = &pBiquadState->coefs[0];
-        LVM_FLOAT   A0 = pCoefs[1];
-        LVM_FLOAT   A1 = pCoefs[0];
-        LVM_FLOAT   B1 = pCoefs[2];
-
-        for (ii = NrFrames; ii != 0; ii--)
-        {
-
-            /**************************************************************************
-                            PROCESSING OF THE CHANNELS
-            ***************************************************************************/
-            for (ch = 0; ch < NrChannels; ch++)
-            {
-                // yn =A1  * x(n-1)
-                yn = (LVM_FLOAT)A1 * pDelays[0];
-
-                // yn+=A0  * x(n)
-                yn += (LVM_FLOAT)A0 * (*pDataIn);
-
-                // yn +=  (-B1  * y(n-1))
-                Temp = B1 * pDelays[1];
-                yn += Temp;
-
-                /**************************************************************************
-                                UPDATING THE DELAYS
-                ***************************************************************************/
-                pDelays[1] = yn; // Update y(n-1)
-                pDelays[0] = (*pDataIn++); // Update x(n-1)
-
-                /**************************************************************************
-                                WRITING THE OUTPUT
-                ***************************************************************************/
-
-                /*Saturate results*/
-                if (yn > 1.0f)
-                {
-                    yn = 1.0f;
-                } else if (yn < -1.0f) {
-                    yn = -1.0f;
-                }
-
-                *pDataOut++ = (LVM_FLOAT)yn;
-                pDelays += 2;
-            }
-            pDelays -= NrChannels * 2;
-        }
-    }
-#endif
diff --git a/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Init.cpp
deleted file mode 100644
index b81b976..0000000
--- a/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Init.cpp
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*-------------------------------------------------------------------------*/
-#include "BIQUAD.h"
-#include "FO_2I_D16F32Css_LShx_TRC_WRA_01_Private.h"
-
-/*-------------------------------------------------------------------------*/
-/* FUNCTION:                                                               */
-/*   FO_2I_D16F32Css_LShx_TRC_WRA_01_Init                                  */
-/*                                                                         */
-/* DESCRIPTION:                                                            */
-/*   These functions initializes a BIQUAD filter defined as a cascade of   */
-/*   biquadratic Filter Sections.                                          */
-/*                                                                         */
-/* PARAMETERS:                                                             */
-/*   pInstance    - output, returns the pointer to the State Variable      */
-/*                   This state pointer must be passed to any subsequent   */
-/*                   call to "Biquad" functions.                           */
-/*   pTaps         - input, pointer to the taps memory                     */
-/*   pCoef         - input, pointer to the coefficient structure           */
-/*   N             - M coefficient factor of QM.N                          */
-/* RETURNS:                                                                */
-/*   void return code                                                      */
-/*-------------------------------------------------------------------------*/
-void FO_2I_D16F32Css_LShx_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t         *pInstance,
-                                          Biquad_2I_Order1_FLOAT_Taps_t   *pTaps,
-                                          FO_FLOAT_LShx_Coefs_t        *pCoef)
-{
-    LVM_FLOAT temp;
-    PFilter_Float_State pBiquadState = (PFilter_Float_State) pInstance;
-    pBiquadState->pDelays      = (LVM_FLOAT *) pTaps            ;
-
-    temp = pCoef->A1;
-    pBiquadState->coefs[0] = temp;
-    temp = pCoef->A0;
-    pBiquadState->coefs[1] = temp;
-    temp = pCoef->B1;
-    pBiquadState->coefs[2] = temp;
-}
-/*-------------------------------------------------------------------------*/
-/* End Of File: FO_2I_D16F32Css_LShx_TRC_WRA_01_Init.c                     */
-
diff --git a/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Private.h
deleted file mode 100644
index 5022500..0000000
--- a/media/libeffects/lvm/lib/Common/src/FO_2I_D16F32Css_LShx_TRC_WRA_01_Private.h
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef _FO_2I_D16F32CSS_LSHX_TRC_WRA_01_PRIVATE_H_
-#define _FO_2I_D16F32CSS_LSHX_TRC_WRA_01_PRIVATE_H_
-
-/* The internal state variables are implemented in a (for the user)  hidden structure */
-/* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-    LVM_FLOAT     *pDelays;       /* pointer to the delayed samples (data of 32 bits)   */
-    LVM_FLOAT     coefs[3];       /* pointer to the filter coefficients */
-}Filter_Float_State;
-
-typedef Filter_Float_State * PFilter_Float_State ;
-#endif /* _FO_2I_D16F32CSS_LSHX_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/Filters.h b/media/libeffects/lvm/lib/Common/src/Filters.h
index b5db8f4..8eb3e76 100644
--- a/media/libeffects/lvm/lib/Common/src/Filters.h
+++ b/media/libeffects/lvm/lib/Common/src/Filters.h
@@ -30,26 +30,23 @@
  * Biquad with coefficients A0, A1, A2, B1 and B2 coefficients
  */
 /* Single precision (16-bit) Biquad section coefficients */
-typedef struct
-{
-    LVM_FLOAT   A0;
-    LVM_FLOAT   A1;
-    LVM_FLOAT   A2;
-    LVM_FLOAT   B1;
-    LVM_FLOAT   B2;
-    LVM_UINT16  Scale;
+typedef struct {
+    LVM_FLOAT A0;
+    LVM_FLOAT A1;
+    LVM_FLOAT A2;
+    LVM_FLOAT B1;
+    LVM_FLOAT B2;
+    LVM_UINT16 Scale;
 } BiquadA012B12CoefsSP_t;
 /*
  * Biquad with coefficients A0, A1 and B1 coefficients
  */
 /* Single precision (16-bit) Biquad section coefficients */
-typedef struct
-{
-    LVM_FLOAT   A0;
-    LVM_FLOAT   A1;
-    LVM_FLOAT   B1;
-    LVM_UINT16  Scale;
+typedef struct {
+    LVM_FLOAT A0;
+    LVM_FLOAT A1;
+    LVM_FLOAT B1;
+    LVM_UINT16 Scale;
 } BiquadA01B1CoefsSP_t;
 
-#endif      /* FILTERS_H */
-
+#endif /* FILTERS_H */
diff --git a/media/libeffects/lvm/lib/Common/src/From2iToMS_16x16.cpp b/media/libeffects/lvm/lib/Common/src/From2iToMS_16x16.cpp
index c3f6648..e2f8c67 100644
--- a/media/libeffects/lvm/lib/Common/src/From2iToMS_16x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/From2iToMS_16x16.cpp
@@ -21,47 +21,10 @@
 
 #include "VectorArithmetic.h"
 
-/**********************************************************************************
-   FUNCTION  From2iToMS_16x16
-***********************************************************************************/
-
-void From2iToMS_16x16( const LVM_INT16  *src,
-                             LVM_INT16  *dstM,
-                             LVM_INT16  *dstS,
-                             LVM_INT16  n )
-{
-    LVM_INT32 temp1,left,right;
+void From2iToMS_Float(const LVM_FLOAT* src, LVM_FLOAT* dstM, LVM_FLOAT* dstS, LVM_INT16 n) {
+    LVM_FLOAT temp1, left, right;
     LVM_INT16 ii;
-    for (ii = n; ii != 0; ii--)
-    {
-        left = (LVM_INT32)*src;
-        src++;
-
-        right = (LVM_INT32)*src;
-        src++;
-
-        /* Compute M signal*/
-        temp1 =  (left+right)>>1;
-        *dstM = (LVM_INT16)temp1;
-        dstM++;
-
-        /* Compute S signal*/
-        temp1 =  (left-right)>>1;
-        *dstS = (LVM_INT16)temp1;
-        dstS++;
-    }
-
-    return;
-}
-void From2iToMS_Float( const LVM_FLOAT  *src,
-                             LVM_FLOAT  *dstM,
-                             LVM_FLOAT  *dstS,
-                             LVM_INT16  n )
-{
-    LVM_FLOAT temp1,left,right;
-    LVM_INT16 ii;
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         left = (LVM_FLOAT)*src;
         src++;
 
@@ -69,16 +32,15 @@
         src++;
 
         /* Compute M signal*/
-        temp1 =  (left + right) / 2.0f;
+        temp1 = (left + right) / 2.0f;
         *dstM = (LVM_FLOAT)temp1;
         dstM++;
 
         /* Compute S signal*/
-        temp1 =  (left - right) / 2.0f;
+        temp1 = (left - right) / 2.0f;
         *dstS = (LVM_FLOAT)temp1;
         dstS++;
     }
 
     return;
 }
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/From2iToMono_16.cpp b/media/libeffects/lvm/lib/Common/src/From2iToMono_16.cpp
deleted file mode 100644
index b758ee7..0000000
--- a/media/libeffects/lvm/lib/Common/src/From2iToMono_16.cpp
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**********************************************************************************
-   INCLUDE FILES
-***********************************************************************************/
-
-#include "VectorArithmetic.h"
-
-/**********************************************************************************
-   FUNCTION From2iToMono_16
-***********************************************************************************/
-
-void From2iToMono_16( const LVM_INT16 *src,
-                            LVM_INT16 *dst,
-                            LVM_INT16 n)
-{
-    LVM_INT16 ii;
-    LVM_INT32 Temp;
-    for (ii = n; ii != 0; ii--)
-    {
-        Temp = (LVM_INT32)*src;
-        src++;
-
-        Temp += (LVM_INT32)*src;
-        src++;
-
-        *dst  = (LVM_INT16)(Temp >>1);
-        dst++;
-    }
-
-    return;
-}
-
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/From2iToMono_32.cpp b/media/libeffects/lvm/lib/Common/src/From2iToMono_32.cpp
index a8688b4..039ee14 100644
--- a/media/libeffects/lvm/lib/Common/src/From2iToMono_32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/From2iToMono_32.cpp
@@ -21,40 +21,11 @@
 
 #include "VectorArithmetic.h"
 
-/**********************************************************************************
-   FUNCTION From2iToMono_32
-***********************************************************************************/
-
-void From2iToMono_32( const LVM_INT32 *src,
-                            LVM_INT32 *dst,
-                            LVM_INT16 n)
-{
-    LVM_INT16 ii;
-    LVM_INT32 Temp;
-
-    for (ii = n; ii != 0; ii--)
-    {
-        Temp = (*src>>1);
-        src++;
-
-        Temp +=(*src>>1);
-        src++;
-
-        *dst = Temp;
-        dst++;
-    }
-
-    return;
-}
-void From2iToMono_Float( const LVM_FLOAT *src,
-                         LVM_FLOAT *dst,
-                         LVM_INT16 n)
-{
+void From2iToMono_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n) {
     LVM_INT16 ii;
     LVM_FLOAT Temp;
 
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         Temp = (*src);
         src++;
 
@@ -67,7 +38,6 @@
 
     return;
 }
-#ifdef SUPPORT_MC
 /*
  * FUNCTION:       FromMcToMono_Float
  *
@@ -85,19 +55,14 @@
  *  void
  *
  */
-void FromMcToMono_Float(const LVM_FLOAT *src,
-                        LVM_FLOAT *dst,
-                        LVM_INT16 NrFrames,
-                        LVM_INT16 NrChannels)
-{
+void FromMcToMono_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 NrFrames,
+                        LVM_INT16 NrChannels) {
     LVM_INT16 ii, jj;
     LVM_FLOAT Temp;
 
-    for (ii = NrFrames; ii != 0; ii--)
-    {
+    for (ii = NrFrames; ii != 0; ii--) {
         Temp = 0.0f;
-        for (jj = NrChannels; jj !=0; jj--)
-        {
+        for (jj = NrChannels; jj != 0; jj--) {
             Temp += (*src);
             src++;
         }
@@ -107,6 +72,3 @@
 
     return;
 }
-#endif
-
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/InstAlloc.cpp b/media/libeffects/lvm/lib/Common/src/InstAlloc.cpp
deleted file mode 100644
index a039bf5..0000000
--- a/media/libeffects/lvm/lib/Common/src/InstAlloc.cpp
+++ /dev/null
@@ -1,177 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "InstAlloc.h"
-
-/****************************************************************************************
- *  Name        : InstAlloc_Init()
- *  Input       : pms  - Pointer to the INST_ALLOC instance
-                  StartAddr - Base address of the instance memory
- *  Returns     : Error code
- *  Description : Initializes the instance distribution and memory size calculation function
- *  Remarks     :
- ****************************************************************************************/
-
-void    InstAlloc_Init( INST_ALLOC      *pms,
-                        void            *StartAddr )
-{
-    pms->TotalSize = 3;
-    pms->pNextMember = (((uintptr_t)StartAddr + 3) & (uintptr_t)~3);
-}
-
-/****************************************************************************************
- *  Name        : InstAlloc_AddMember()
- *  Input       : pms  - Pointer to the INST_ALLOC instance
-                  Size - The size in bytes of the new added member
- *  Returns     : A pointer to the new added member
- *  Description : Allocates space for a new member in the instance memory and returns
-                  a pointer to this new member.  The start address of all members will
-                  be 32 bit alligned.
- *  Remarks     :
- ****************************************************************************************/
-
-void*   InstAlloc_AddMember( INST_ALLOC         *pms,
-                             LVM_UINT32           Size )
-{
-    void *NewMemberAddress; /* Variable to temporarily store the return value */
-    NewMemberAddress = (void*)pms->pNextMember;
-
-    Size = ((Size + 3) & (LVM_UINT32)~3); /* Ceil the size to a multiple of four */
-
-    pms->TotalSize += Size;
-    pms->pNextMember += Size;
-
-    return(NewMemberAddress);
-}
-
-/****************************************************************************************
- *  Name        : InstAlloc_GetTotal()
- *  Input       : pms  - Pointer to the INST_ALLOC instance
- *  Returns     : The instance memory size
- *  Description : This functions returns the calculated instance memory size
- *  Remarks     :
- ****************************************************************************************/
-
-LVM_UINT32 InstAlloc_GetTotal( INST_ALLOC *pms)
-{
-    if (pms->TotalSize > 3)
-    {
-        return(pms->TotalSize);
-    }
-    else
-    {
-        return 0;           /* No memory added */
-    }
-}
-
-void    InstAlloc_InitAll( INST_ALLOC                      *pms,
-                           LVM_MemoryTable_st             *pMemoryTable)
-{
-    uintptr_t StartAddr;
-
-    StartAddr = (uintptr_t)pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress;
-
-    pms[0].TotalSize = 3;
-    pms[0].pNextMember = ((StartAddr + 3) & (uintptr_t)~3);
-
-    StartAddr = (uintptr_t)pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress;
-
-    pms[1].TotalSize = 3;
-    pms[1].pNextMember = ((StartAddr + 3) & (uintptr_t)~3);
-
-    StartAddr = (uintptr_t)pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress;
-
-    pms[2].TotalSize = 3;
-    pms[2].pNextMember = ((StartAddr + 3) & (uintptr_t)~3);
-
-    StartAddr = (uintptr_t)pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress;
-
-    pms[3].TotalSize = 3;
-    pms[3].pNextMember = ((StartAddr + 3) & (uintptr_t)~3);
-
-}
-
-/****************************************************************************************
- *  Name        : InstAlloc_InitAll_NULL()
- *  Input       : pms  - Pointer to array of four INST_ALLOC instances
- *  Returns     : Nothing
- *  Description : This function reserves Size of 3 bytes for all memory regions and
- *                intializes pNextMember for all regions to 0
- *  Remarks     :
- ****************************************************************************************/
-
-void    InstAlloc_InitAll_NULL( INST_ALLOC  *pms)
-{
-    pms[0].TotalSize = 3;
-    pms[0].pNextMember = 0;
-
-    pms[1].TotalSize = 3;
-    pms[1].pNextMember = 0;
-
-    pms[2].TotalSize = 3;
-    pms[2].pNextMember = 0;
-
-    pms[3].TotalSize = 3;
-    pms[3].pNextMember = 0;
-
-}
-
-void*   InstAlloc_AddMemberAll( INST_ALLOC                     *pms,
-                                 LVM_UINT32                   Size[],
-                                 LVM_MemoryTable_st           *pMemoryTable)
-{
-    void *NewMemberAddress; /* Variable to temporarily store the return value */
-
-    /* coverity[returned_pointer] Ignore coverity warning that ptr is not used */
-    NewMemberAddress = InstAlloc_AddMember(&pms[LVM_PERSISTENT_SLOW_DATA], Size[LVM_PERSISTENT_SLOW_DATA]);
-
-    pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size         = InstAlloc_GetTotal(&pms[LVM_PERSISTENT_SLOW_DATA]);
-    pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Type         = LVM_PERSISTENT_SLOW_DATA;
-    pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress = LVM_NULL;
-
-    NewMemberAddress = InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_DATA], Size[LVM_PERSISTENT_FAST_DATA]);
-
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size         = InstAlloc_GetTotal(&pms[LVM_PERSISTENT_FAST_DATA]);
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Type         = LVM_PERSISTENT_FAST_DATA;
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress = LVM_NULL;
-
-    NewMemberAddress = InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_COEF], Size[LVM_PERSISTENT_FAST_COEF]);
-
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size         = InstAlloc_GetTotal(&pms[LVM_PERSISTENT_FAST_COEF]);
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Type         = LVM_PERSISTENT_FAST_COEF;
-    pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress = LVM_NULL;
-
-    NewMemberAddress = InstAlloc_AddMember(&pms[LVM_TEMPORARY_FAST], Size[LVM_TEMPORARY_FAST]);
-
-    pMemoryTable->Region[LVM_TEMPORARY_FAST].Size                 = InstAlloc_GetTotal(&pms[LVM_TEMPORARY_FAST]);
-    pMemoryTable->Region[LVM_TEMPORARY_FAST].Type                 = LVM_TEMPORARY_FAST;
-    pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress         = LVM_NULL;
-
-    return(NewMemberAddress);
-}
-
-void*   InstAlloc_AddMemberAllRet(     INST_ALLOC                 *pms,
-                                     LVM_UINT32               Size[],
-                                     void                    **ptr)
-{
-    ptr[0] = InstAlloc_AddMember(&pms[LVM_PERSISTENT_SLOW_DATA], Size[LVM_PERSISTENT_SLOW_DATA]);
-    ptr[1] = InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_DATA], Size[LVM_PERSISTENT_FAST_DATA]);
-    ptr[2] = InstAlloc_AddMember(&pms[LVM_PERSISTENT_FAST_COEF], Size[LVM_PERSISTENT_FAST_COEF]);
-    ptr[3] = InstAlloc_AddMember(&pms[LVM_TEMPORARY_FAST], Size[LVM_TEMPORARY_FAST]);
-
-    return (ptr[0]);
-}
diff --git a/media/libeffects/lvm/lib/Common/src/Int16LShiftToInt32_16x32.cpp b/media/libeffects/lvm/lib/Common/src/Int16LShiftToInt32_16x32.cpp
deleted file mode 100644
index 9f09e4d..0000000
--- a/media/libeffects/lvm/lib/Common/src/Int16LShiftToInt32_16x32.cpp
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**********************************************************************************
-   INCLUDE FILES
-***********************************************************************************/
-
-#include "VectorArithmetic.h"
-
-/**********************************************************************************
-   FUNCTION INT16LSHIFTTOINT32_16X32
-***********************************************************************************/
-
-void Int16LShiftToInt32_16x32(const LVM_INT16   *src,
-                              LVM_INT32         *dst,
-                              LVM_INT16         n,
-                              LVM_INT16         shift )
-{
-    LVM_INT16 ii;
-
-    src += n-1;
-    dst += n-1;
-
-    for (ii = n; ii != 0; ii--)
-    {
-        *dst = ( ((LVM_INT32)*src) << shift);
-        src--;
-        dst--;
-    }
-
-    return;
-}
-
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/Int32RShiftToInt16_Sat_32x16.cpp b/media/libeffects/lvm/lib/Common/src/Int32RShiftToInt16_Sat_32x16.cpp
deleted file mode 100644
index 8c9980d..0000000
--- a/media/libeffects/lvm/lib/Common/src/Int32RShiftToInt16_Sat_32x16.cpp
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**********************************************************************************
-   INCLUDE FILES
-***********************************************************************************/
-
-#include "VectorArithmetic.h"
-
-/**********************************************************************************
-   FUNCTION INT32RSHIFTTOINT16_SAT_32X16
-***********************************************************************************/
-
-void Int32RShiftToInt16_Sat_32x16(const LVM_INT32  *src,
-                                  LVM_INT16 *dst,
-                                  LVM_INT16 n,
-                                  LVM_INT16 shift )
-{
-    LVM_INT32 temp;
-    LVM_INT16 ii;
-
-    for (ii = n; ii != 0; ii--)
-    {
-        temp = *src >> shift;
-        src++;
-
-        if (temp > 0x00007FFF)
-        {
-            *dst = 0x7FFF;
-        }
-        else if (temp < -0x00008000)
-        {
-            *dst = - 0x8000;
-        }
-        else
-        {
-            *dst = (LVM_INT16)temp;
-        }
-
-        dst++;
-    }
-
-    return;
-}
-
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/JoinTo2i_32x32.cpp b/media/libeffects/lvm/lib/Common/src/JoinTo2i_32x32.cpp
index 05df656..6c7c8ae 100644
--- a/media/libeffects/lvm/lib/Common/src/JoinTo2i_32x32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/JoinTo2i_32x32.cpp
@@ -21,47 +21,14 @@
 
 #include "VectorArithmetic.h"
 
-/**********************************************************************************
-   FUNCTION JoinTo2i_32x32
-***********************************************************************************/
-
-void JoinTo2i_32x32( const LVM_INT32    *srcL,
-                     const LVM_INT32    *srcR,
-                           LVM_INT32    *dst,
-                           LVM_INT16    n )
-{
-    LVM_INT16 ii;
-
-    srcL += n-1;
-    srcR += n-1;
-    dst  += ((2*n)-1);
-
-    for (ii = n; ii != 0; ii--)
-    {
-        *dst = *srcR;
-        dst--;
-        srcR--;
-
-        *dst = *srcL;
-        dst--;
-        srcL--;
-    }
-
-    return;
-}
-void JoinTo2i_Float( const LVM_FLOAT    *srcL,
-                     const LVM_FLOAT    *srcR,
-                           LVM_FLOAT    *dst,
-                           LVM_INT16    n )
-{
+void JoinTo2i_Float(const LVM_FLOAT* srcL, const LVM_FLOAT* srcR, LVM_FLOAT* dst, LVM_INT16 n) {
     LVM_INT16 ii;
 
     srcL += n - 1;
     srcR += n - 1;
-    dst  += ((2 * n) - 1);
+    dst += ((2 * n) - 1);
 
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         *dst = *srcR;
         dst--;
         srcR--;
@@ -74,4 +41,3 @@
     return;
 }
 /**********************************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.cpp
index 14d61bd..d670b3d 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.cpp
@@ -18,67 +18,19 @@
 /**********************************************************************************
    INCLUDE FILES
 ***********************************************************************************/
-
 #include "LVC_Mixer_Private.h"
 #include "LVM_Macros.h"
 #include "ScalarArithmetic.h"
 
-/**********************************************************************************
-   FUNCTION LVC_Core_MixHard_1St_2i_D16C31_SAT
-***********************************************************************************/
-void LVC_Core_MixHard_1St_2i_D16C31_SAT( LVMixer3_FLOAT_st        *ptrInstance1,
-                                         LVMixer3_FLOAT_st        *ptrInstance2,
-                                         const LVM_FLOAT    *src,
-                                         LVM_FLOAT          *dst,
-                                         LVM_INT16          n)
-{
-    LVM_FLOAT  Temp;
-    LVM_INT16 ii;
-    Mix_Private_FLOAT_st  *pInstance1 = (Mix_Private_FLOAT_st *)(ptrInstance1->PrivateParams);
-    Mix_Private_FLOAT_st  *pInstance2 = (Mix_Private_FLOAT_st *)(ptrInstance2->PrivateParams);
-    for (ii = n; ii != 0; ii--)
-    {
-        Temp = ((LVM_FLOAT)*(src++) * (LVM_FLOAT)pInstance1->Current);
-        if (Temp > 1.0f)
-            *dst++ = 1.0f;
-        else if (Temp < -1.0f)
-            *dst++ = -1.0f;
-        else
-            *dst++ = (LVM_FLOAT)Temp;
-
-        Temp = ((LVM_FLOAT)*(src++) * (LVM_FLOAT)pInstance2->Current);
-        if (Temp > 1.0f)
-            *dst++ = 1.0f;
-        else if (Temp < -1.0f)
-            *dst++ = -1.0f;
-        else
-            *dst++ = (LVM_FLOAT)Temp;
-    }
-
-}
-#ifdef SUPPORT_MC
-void LVC_Core_MixHard_1St_MC_float_SAT (Mix_Private_FLOAT_st **ptrInstance,
-                                         const LVM_FLOAT      *src,
-                                         LVM_FLOAT            *dst,
-                                         LVM_INT16            NrFrames,
-                                         LVM_INT16            NrChannels)
-{
-    LVM_FLOAT  Temp;
+void LVC_Core_MixHard_1St_MC_float_SAT(Mix_Private_FLOAT_st** ptrInstance, const LVM_FLOAT* src,
+                                       LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+    LVM_FLOAT Temp;
     LVM_INT16 ii, jj;
-    for (ii = NrFrames; ii != 0; ii--)
-    {
-        for (jj = 0; jj < NrChannels; jj++)
-        {
-            Mix_Private_FLOAT_st  *pInstance1 = (Mix_Private_FLOAT_st *)(ptrInstance[jj]);
-            Temp = ((LVM_FLOAT)*(src++) * (LVM_FLOAT)pInstance1->Current);
-            if (Temp > 1.0f)
-                *dst++ = 1.0f;
-            else if (Temp < -1.0f)
-                *dst++ = -1.0f;
-            else
-                *dst++ = (LVM_FLOAT)Temp;
+    for (ii = NrFrames; ii != 0; ii--) {
+        for (jj = 0; jj < NrChannels; jj++) {
+            Mix_Private_FLOAT_st* pInstance1 = (Mix_Private_FLOAT_st*)(ptrInstance[jj]);
+            Temp =  *src++ * pInstance1->Current;
+            *dst++ = LVM_Clamp(Temp);
         }
     }
 }
-#endif
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_2St_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_2St_D16C31_SAT.cpp
index 841fa1e..417c1f0 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_2St_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_2St_D16C31_SAT.cpp
@@ -18,38 +18,28 @@
 /**********************************************************************************
    INCLUDE FILES
 ***********************************************************************************/
-
 #include "LVC_Mixer_Private.h"
+#include "ScalarArithmetic.h"
 
 /**********************************************************************************
    FUNCTION LVCore_MIXHARD_2ST_D16C31_SAT
 ***********************************************************************************/
-void LVC_Core_MixHard_2St_D16C31_SAT( LVMixer3_FLOAT_st *ptrInstance1,
-                                    LVMixer3_FLOAT_st         *ptrInstance2,
-                                    const LVM_FLOAT     *src1,
-                                    const LVM_FLOAT     *src2,
-                                          LVM_FLOAT     *dst,
-                                          LVM_INT16     n)
-{
-    LVM_FLOAT  Temp;
+void LVC_Core_MixHard_2St_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance1,
+                                     LVMixer3_FLOAT_st* ptrInstance2, const LVM_FLOAT* src1,
+                                     const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 n) {
+    LVM_FLOAT Temp;
     LVM_INT16 ii;
     LVM_FLOAT Current1;
     LVM_FLOAT Current2;
-    Mix_Private_FLOAT_st  *pInstance1 = (Mix_Private_FLOAT_st *)(ptrInstance1->PrivateParams);
-    Mix_Private_FLOAT_st  *pInstance2 = (Mix_Private_FLOAT_st *)(ptrInstance2->PrivateParams);
+    Mix_Private_FLOAT_st* pInstance1 = (Mix_Private_FLOAT_st*)(ptrInstance1->PrivateParams);
+    Mix_Private_FLOAT_st* pInstance2 = (Mix_Private_FLOAT_st*)(ptrInstance2->PrivateParams);
 
-    Current1 = (pInstance1->Current);
-    Current2 = (pInstance2->Current);
+    Current1 = pInstance1->Current;
+    Current2 = pInstance2->Current;
 
-    for (ii = n; ii != 0; ii--){
-        Temp = (((LVM_FLOAT)*(src1++) * (LVM_FLOAT)Current1)) +
-               (((LVM_FLOAT)*(src2++) * (LVM_FLOAT)Current2));
-        if (Temp > 1.0f)
-            *dst++ = 1.0f;
-        else if (Temp < -1.0f)
-            *dst++ = -1.0f;
-        else
-            *dst++ = Temp;
+    for (ii = n; ii != 0; ii--) {
+        Temp =  *src1++ * Current1 + *src2++ * Current2;
+        *dst++ = LVM_Clamp(Temp);
     }
 }
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixInSoft_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixInSoft_D16C31_SAT.cpp
index 318138d..d8c25c9 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixInSoft_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixInSoft_D16C31_SAT.cpp
@@ -18,102 +18,72 @@
 /**********************************************************************************
    INCLUDE FILES
 ***********************************************************************************/
-
 #include "LVC_Mixer_Private.h"
 #include "LVM_Macros.h"
+#include "ScalarArithmetic.h"
 
 /**********************************************************************************
    FUNCTION LVCore_MIXSOFT_1ST_D16C31_WRA
 ***********************************************************************************/
-void LVC_Core_MixInSoft_D16C31_SAT(LVMixer3_FLOAT_st *ptrInstance,
-                                   const LVM_FLOAT   *src,
-                                         LVM_FLOAT   *dst,
-                                         LVM_INT16   n)
-{
-
-    LVM_INT16   OutLoop;
-    LVM_INT16   InLoop;
-    LVM_INT32   ii,jj;
-    Mix_Private_FLOAT_st  *pInstance = (Mix_Private_FLOAT_st *)(ptrInstance->PrivateParams);
-    LVM_FLOAT   Delta = pInstance->Delta;
-    LVM_FLOAT   Current = pInstance->Current;
-    LVM_FLOAT   Target = pInstance->Target;
-    LVM_FLOAT   Temp;
+void LVC_Core_MixInSoft_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                   LVM_FLOAT* dst, LVM_INT16 n) {
+    LVM_INT16 OutLoop;
+    LVM_INT16 InLoop;
+    LVM_INT32 ii, jj;
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)(ptrInstance->PrivateParams);
+    LVM_FLOAT Delta = pInstance->Delta;
+    LVM_FLOAT Current = pInstance->Current;
+    LVM_FLOAT Target = pInstance->Target;
+    LVM_FLOAT Temp;
 
     InLoop = (LVM_INT16)(n >> 2); /* Process per 4 samples */
     OutLoop = (LVM_INT16)(n - (InLoop << 2));
 
-    if(Current < Target){
-        if (OutLoop){
+    if (Current < Target) {
+        if (OutLoop) {
             Temp = Current + Delta;
             Current = Temp;
-            if (Current > Target)
-                Current = Target;
+            if (Current > Target) Current = Target;
 
-           for (ii = OutLoop; ii != 0; ii--){
-                Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT)*(src++) * Current));
-                if (Temp > 1.0f)
-                    *dst++ = 1.0f;
-                else if (Temp < -1.0f)
-                    *dst++ = -1.0f;
-                else
-                    *dst++ = (LVM_FLOAT)Temp;
+            for (ii = OutLoop; ii != 0; ii--) {
+                Temp = *dst + *src++ * Current;
+                *dst++ = LVM_Clamp(Temp);
             }
         }
 
-        for (ii = InLoop; ii != 0; ii--){
+        for (ii = InLoop; ii != 0; ii--) {
             Temp = Current + Delta;
             Current = Temp;
-            if (Current > Target)
-                Current = Target;
+            if (Current > Target) Current = Target;
 
-            for (jj = 4; jj != 0 ; jj--){
-                Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT)*(src++) * Current));
-                if (Temp > 1.0f)
-                    *dst++ = 1.0f;
-                else if (Temp < -1.0f)
-                    *dst++ = -1.0f;
-                else
-                    *dst++ = (LVM_FLOAT)Temp;
+            for (jj = 4; jj != 0; jj--) {
+                Temp = *dst + *src++ * Current;
+                *dst++ = LVM_Clamp(Temp);
             }
         }
-    }
-    else{
-        if (OutLoop){
+    } else {
+        if (OutLoop) {
             Current -= Delta;
-            if (Current < Target)
-                Current = Target;
+            if (Current < Target) Current = Target;
 
-            for (ii = OutLoop; ii != 0; ii--){
-                Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT)*(src++) * Current));
-                if (Temp > 1.0f)
-                    *dst++ = 1.0f;
-                else if (Temp < -1.0f)
-                    *dst++ = -1.0f;
-                else
-                    *dst++ = (LVM_FLOAT)Temp;
+            for (ii = OutLoop; ii != 0; ii--) {
+                Temp = *dst + *src++ * Current;
+                *dst++ = LVM_Clamp(Temp);
             }
         }
 
-        for (ii = InLoop; ii != 0; ii--){
+        for (ii = InLoop; ii != 0; ii--) {
             Current -= Delta;
-            if (Current < Target)
-                Current = Target;
+            if (Current < Target) Current = Target;
 
-            for (jj = 4; jj != 0 ; jj--){
-                Temp = ((LVM_FLOAT)*dst) + (((LVM_FLOAT)*(src++) * Current));
-                if (Temp > 1.0f)
-                    *dst++ = 1.0f;
-                else if (Temp < -1.0f)
-                    *dst++ = -1.0f;
-                else
-                    *dst++ = (LVM_FLOAT)Temp;
+            for (jj = 4; jj != 0; jj--) {
+                Temp = *dst + *src++ * Current;
+                *dst++ = LVM_Clamp(Temp);
             }
         }
     }
     pInstance->Current = Current;
 }
-#ifdef SUPPORT_MC
 /*
  * FUNCTION:       LVC_Core_MixInSoft_Mc_D16C31_SAT
  *
@@ -131,21 +101,16 @@
  *  void
  *
  */
-void LVC_Core_MixInSoft_Mc_D16C31_SAT(LVMixer3_FLOAT_st *ptrInstance,
-                                      const LVM_FLOAT   *src,
-                                            LVM_FLOAT   *dst,
-                                            LVM_INT16   NrFrames,
-                                            LVM_INT16   NrChannels)
-{
-
-    LVM_INT16   OutLoop;
-    LVM_INT16   InLoop;
-    LVM_INT32   ii, jj;
-    Mix_Private_FLOAT_st  *pInstance = (Mix_Private_FLOAT_st *)(ptrInstance->PrivateParams);
-    LVM_FLOAT   Delta = pInstance->Delta;
-    LVM_FLOAT   Current = pInstance->Current;
-    LVM_FLOAT   Target = pInstance->Target;
-    LVM_FLOAT   Temp;
+void LVC_Core_MixInSoft_Mc_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                      LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+    LVM_INT16 OutLoop;
+    LVM_INT16 InLoop;
+    LVM_INT32 ii, jj;
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)(ptrInstance->PrivateParams);
+    LVM_FLOAT Delta = pInstance->Delta;
+    LVM_FLOAT Current = pInstance->Current;
+    LVM_FLOAT Target = pInstance->Target;
+    LVM_FLOAT Temp;
 
     /*
      * Same operation is performed on consecutive frames.
@@ -160,90 +125,52 @@
         if (OutLoop) {
             Temp = Current + Delta;
             Current = Temp;
-            if (Current > Target)
-                Current = Target;
+            if (Current > Target) Current = Target;
 
-           for (ii = OutLoop*NrChannels; ii != 0; ii--) {
-                Temp = (*dst) + (*(src++) * Current);
-                if (Temp > 1.0f)
-                    *dst++ = 1.0f;
-                else if (Temp < -1.0f)
-                    *dst++ = -1.0f;
-                else
-                    *dst++ = Temp;
+            for (ii = OutLoop * NrChannels; ii != 0; ii--) {
+                Temp = *dst + *src++ * Current;
+                *dst++ = LVM_Clamp(Temp);
             }
         }
 
         for (ii = InLoop; ii != 0; ii--) {
             Temp = Current + Delta;
             Current = Temp;
-            if (Current > Target)
-                Current = Target;
+            if (Current > Target) Current = Target;
 
-            for (jj = NrChannels; jj != 0 ; jj--) {
-                Temp = (*dst) + (*(src++) * Current);
-                if (Temp > 1.0f)
-                    *dst++ = 1.0f;
-                else if (Temp < -1.0f)
-                    *dst++ = -1.0f;
-                else
-                    *dst++ = Temp;
+            for (jj = NrChannels; jj != 0; jj--) {
+                Temp = *dst + *src++ * Current;
+                *dst++ = LVM_Clamp(Temp);
 
-                Temp = (*dst) + (*(src++) * Current);
-                if (Temp > 1.0f)
-                    *dst++ = 1.0f;
-                else if (Temp < -1.0f)
-                    *dst++ = -1.0f;
-                else
-                    *dst++ = Temp;
-
+                Temp = *dst + *src++ * Current;
+                *dst++ = LVM_Clamp(Temp);
             }
         }
-    }
-    else{
+    } else {
         if (OutLoop) {
             Current -= Delta;
-            if (Current < Target)
-                Current = Target;
+            if (Current < Target) Current = Target;
 
-            for (ii = OutLoop*NrChannels; ii != 0; ii--) {
-                Temp = (*dst) + (*(src++) * Current);
-                if (Temp > 1.0f)
-                    *dst++ = 1.0f;
-                else if (Temp < -1.0f)
-                    *dst++ = -1.0f;
-                else
-                    *dst++ = Temp;
+            for (ii = OutLoop * NrChannels; ii != 0; ii--) {
+                Temp = *dst + *src++ * Current;
+                *dst++ = LVM_Clamp(Temp);
             }
         }
 
         for (ii = InLoop; ii != 0; ii--) {
             Current -= Delta;
-            if (Current < Target)
-                Current = Target;
+            if (Current < Target) Current = Target;
 
-            for (jj = NrChannels; jj != 0 ; jj--) {
-                Temp = (*dst) + (*(src++) * Current);
-                if (Temp > 1.0f)
-                    *dst++ = 1.0f;
-                else if (Temp < -1.0f)
-                    *dst++ = -1.0f;
-                else
-                    *dst++ = Temp;
+            for (jj = NrChannels; jj != 0; jj--) {
+                Temp = *dst + *src++ * Current;
+                *dst++ = LVM_Clamp(Temp);
 
-                Temp = (*dst) + (*(src++) * Current);
-                if (Temp > 1.0f)
-                    *dst++ = 1.0f;
-                else if (Temp < -1.0f)
-                    *dst++ = -1.0f;
-                else
-                    *dst++ = Temp;
-
+                Temp = *dst + *src++ * Current;
+                *dst++ = LVM_Clamp(Temp);
             }
         }
     }
     pInstance->Current = Current;
 }
 
-#endif
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.cpp
index 1f4b08a..0968cf8 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.cpp
@@ -18,176 +18,38 @@
 /**********************************************************************************
    INCLUDE FILES
 ***********************************************************************************/
-
 #include "LVC_Mixer_Private.h"
-#include "ScalarArithmetic.h"
 #include "LVM_Macros.h"
+#include "ScalarArithmetic.h"
 
-/**********************************************************************************
-   FUNCTION LVC_Core_MixSoft_1St_2i_D16C31_WRA
-***********************************************************************************/
-static LVM_FLOAT ADD2_SAT_FLOAT(LVM_FLOAT a,
-                                LVM_FLOAT b,
-                                LVM_FLOAT c)
-{
-    LVM_FLOAT temp;
-    temp = a + b ;
-    if (temp < -1.0f)
-        c = -1.0f;
-    else if (temp > 1.0f)
-        c = 1.0f;
-    else
-        c = temp;
-    return c;
+static inline LVM_FLOAT ADD2_SAT_FLOAT(LVM_FLOAT a, LVM_FLOAT b) {
+    return LVM_Clamp(a + b);
 }
-void LVC_Core_MixSoft_1St_2i_D16C31_WRA( LVMixer3_FLOAT_st        *ptrInstance1,
-                                         LVMixer3_FLOAT_st        *ptrInstance2,
-                                         const LVM_FLOAT    *src,
-                                         LVM_FLOAT          *dst,
-                                         LVM_INT16          n)
-{
-    LVM_INT16   OutLoop;
-    LVM_INT16   InLoop;
-    LVM_INT32   ii;
-    Mix_Private_FLOAT_st  *pInstanceL = (Mix_Private_FLOAT_st *)(ptrInstance1->PrivateParams);
-    Mix_Private_FLOAT_st  *pInstanceR = (Mix_Private_FLOAT_st *)(ptrInstance2->PrivateParams);
-
-    LVM_FLOAT   DeltaL = pInstanceL->Delta;
-    LVM_FLOAT   CurrentL = pInstanceL->Current;
-    LVM_FLOAT   TargetL = pInstanceL->Target;
-
-    LVM_FLOAT   DeltaR = pInstanceR->Delta;
-    LVM_FLOAT   CurrentR = pInstanceR->Current;
-    LVM_FLOAT   TargetR = pInstanceR->Target;
-
-    LVM_FLOAT   Temp = 0;
-
-    InLoop = (LVM_INT16)(n >> 2); /* Process per 4 samples */
-    OutLoop = (LVM_INT16)(n - (InLoop << 2));
-
-    if (OutLoop)
-    {
-        if(CurrentL < TargetL)
-        {
-            ADD2_SAT_FLOAT(CurrentL, DeltaL, Temp);
-            CurrentL = Temp;
-            if (CurrentL > TargetL)
-                CurrentL = TargetL;
-        }
-        else
-        {
-            CurrentL -= DeltaL;
-            if (CurrentL < TargetL)
-                CurrentL = TargetL;
-        }
-
-        if(CurrentR < TargetR)
-        {
-            ADD2_SAT_FLOAT(CurrentR, DeltaR, Temp);
-            CurrentR = Temp;
-            if (CurrentR > TargetR)
-                CurrentR = TargetR;
-        }
-        else
-        {
-            CurrentR -= DeltaR;
-            if (CurrentR < TargetR)
-                CurrentR = TargetR;
-        }
-
-        for (ii = OutLoop * 2; ii != 0; ii -= 2)
-        {
-            *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentL));
-            *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentR));
-        }
-    }
-
-    for (ii = InLoop * 2; ii != 0; ii-=2)
-    {
-        if(CurrentL < TargetL)
-        {
-            ADD2_SAT_FLOAT(CurrentL, DeltaL, Temp);
-            CurrentL = Temp;
-            if (CurrentL > TargetL)
-                CurrentL = TargetL;
-        }
-        else
-        {
-            CurrentL -= DeltaL;
-            if (CurrentL < TargetL)
-                CurrentL = TargetL;
-        }
-
-        if(CurrentR < TargetR)
-        {
-            ADD2_SAT_FLOAT(CurrentR, DeltaR, Temp);
-            CurrentR = Temp;
-            if (CurrentR > TargetR)
-                CurrentR = TargetR;
-        }
-        else
-        {
-            CurrentR -= DeltaR;
-            if (CurrentR < TargetR)
-                CurrentR = TargetR;
-        }
-
-        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentL));
-        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentR));
-        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentL));
-        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentR));
-        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentL));
-        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentR));
-        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentL));
-        *(dst++) = (LVM_FLOAT)(((LVM_FLOAT)*(src++) * (LVM_FLOAT)CurrentR));
-    }
-    pInstanceL->Current = CurrentL;
-    pInstanceR->Current = CurrentR;
-
-}
-#ifdef SUPPORT_MC
-void LVC_Core_MixSoft_1St_MC_float_WRA (Mix_Private_FLOAT_st **ptrInstance,
-                                         const LVM_FLOAT      *src,
-                                         LVM_FLOAT            *dst,
-                                         LVM_INT16            NrFrames,
-                                         LVM_INT16            NrChannels)
-{
-    LVM_INT32   ii, ch;
-    LVM_FLOAT   Temp =0.0f;
-    LVM_FLOAT   tempCurrent[NrChannels];
-    for (ch = 0; ch < NrChannels; ch++)
-    {
+void LVC_Core_MixSoft_1St_MC_float_WRA(Mix_Private_FLOAT_st** ptrInstance, const LVM_FLOAT* src,
+                                       LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+    LVM_INT32 ii, ch;
+    LVM_FLOAT tempCurrent[NrChannels];
+    for (ch = 0; ch < NrChannels; ch++) {
         tempCurrent[ch] = ptrInstance[ch]->Current;
     }
-    for (ii = NrFrames; ii > 0; ii--)
-    {
-        for (ch = 0; ch < NrChannels; ch++)
-        {
-            Mix_Private_FLOAT_st *pInstance = ptrInstance[ch];
-            const LVM_FLOAT   Delta = pInstance->Delta;
-            LVM_FLOAT         Current = tempCurrent[ch];
-            const LVM_FLOAT   Target = pInstance->Target;
-            if (Current < Target)
-            {
-                ADD2_SAT_FLOAT(Current, Delta, Temp);
-                Current = Temp;
-                if (Current > Target)
-                    Current = Target;
-            }
-            else
-            {
+    for (ii = NrFrames; ii > 0; ii--) {
+        for (ch = 0; ch < NrChannels; ch++) {
+            Mix_Private_FLOAT_st* pInstance = ptrInstance[ch];
+            const LVM_FLOAT Delta = pInstance->Delta;
+            LVM_FLOAT Current = tempCurrent[ch];
+            const LVM_FLOAT Target = pInstance->Target;
+            if (Current < Target) {
+                Current = ADD2_SAT_FLOAT(Current, Delta);
+                if (Current > Target) Current = Target;
+            } else {
                 Current -= Delta;
-                if (Current < Target)
-                    Current = Target;
+                if (Current < Target) Current = Target;
             }
             *dst++ = *src++ * Current;
             tempCurrent[ch] = Current;
         }
     }
-    for (ch = 0; ch < NrChannels; ch++)
-    {
+    for (ch = 0; ch < NrChannels; ch++) {
         ptrInstance[ch]->Current = tempCurrent[ch];
     }
 }
-#endif
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_D16C31_WRA.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_D16C31_WRA.cpp
index 5d8aadc..fc464e6 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_D16C31_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_D16C31_WRA.cpp
@@ -18,7 +18,6 @@
 /**********************************************************************************
    INCLUDE FILES
 ***********************************************************************************/
-
 #include "LVC_Mixer_Private.h"
 #include "LVM_Macros.h"
 #include "ScalarArithmetic.h"
@@ -26,86 +25,62 @@
 /**********************************************************************************
    FUNCTION LVCore_MIXSOFT_1ST_D16C31_WRA
 ***********************************************************************************/
-void LVC_Core_MixSoft_1St_D16C31_WRA(LVMixer3_FLOAT_st *ptrInstance,
-                                     const LVM_FLOAT   *src,
-                                           LVM_FLOAT   *dst,
-                                           LVM_INT16   n)
-{
-    LVM_INT16   OutLoop;
-    LVM_INT16   InLoop;
-    LVM_INT32   ii;
-    Mix_Private_FLOAT_st  *pInstance=(Mix_Private_FLOAT_st *)(ptrInstance->PrivateParams);
-    LVM_FLOAT   Delta= (LVM_FLOAT)pInstance->Delta;
-    LVM_FLOAT   Current = (LVM_FLOAT)pInstance->Current;
-    LVM_FLOAT   Target= (LVM_FLOAT)pInstance->Target;
-    LVM_FLOAT   Temp;
+void LVC_Core_MixSoft_1St_D16C31_WRA(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                     LVM_FLOAT* dst, LVM_INT16 n) {
+    LVM_INT16 OutLoop;
+    LVM_INT16 InLoop;
+    LVM_INT32 ii;
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)(ptrInstance->PrivateParams);
+    LVM_FLOAT Delta = (LVM_FLOAT)pInstance->Delta;
+    LVM_FLOAT Current = (LVM_FLOAT)pInstance->Current;
+    LVM_FLOAT Target = (LVM_FLOAT)pInstance->Target;
 
     InLoop = (LVM_INT16)(n >> 2); /* Process per 4 samples */
     OutLoop = (LVM_INT16)(n - (InLoop << 2));
 
-    if(Current<Target){
-        if (OutLoop){
+    if (Current < Target) {
+        if (OutLoop) {
+            Current = LVM_Clamp(Current + Delta);
+            if (Current > Target) Current = Target;
 
-            Temp = Current + Delta;
-            if (Temp > 1.0f)
-                Temp = 1.0f;
-            else if (Temp < -1.0f)
-                Temp = -1.0f;
-
-            Current=Temp;
-            if (Current > Target)
-                Current = Target;
-
-            for (ii = OutLoop; ii != 0; ii--){
-                *(dst++) = (((LVM_FLOAT)*(src++) * (LVM_FLOAT)Current));
+            for (ii = OutLoop; ii != 0; ii--) {
+                *(dst++) = (((LVM_FLOAT) * (src++) * (LVM_FLOAT)Current));
             }
         }
 
-        for (ii = InLoop; ii != 0; ii--){
+        for (ii = InLoop; ii != 0; ii--) {
+            Current = LVM_Clamp(Current + Delta);
 
-            Temp = Current + Delta;
+            if (Current > Target) Current = Target;
 
-            if (Temp > 1.0f)
-                Temp = 1.0f;
-            else if (Temp < -1.0f)
-                Temp = -1.0f;
-
-            Current=Temp;
-            if (Current > Target)
-                Current = Target;
-
-            *(dst++) = (((LVM_FLOAT)*(src++) * Current) );
-            *(dst++) = (((LVM_FLOAT)*(src++) * Current) );
-            *(dst++) = (((LVM_FLOAT)*(src++) * Current) );
-            *(dst++) = (((LVM_FLOAT)*(src++) * Current) );
+            *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+            *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+            *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+            *(dst++) = (((LVM_FLOAT) * (src++) * Current));
         }
-    }
-    else{
-        if (OutLoop){
+    } else {
+        if (OutLoop) {
             Current -= Delta;
-            if (Current < Target)
-                Current = Target;
+            if (Current < Target) Current = Target;
 
-            for (ii = OutLoop; ii != 0; ii--){
-                *(dst++) = (((LVM_FLOAT)*(src++) * Current));
+            for (ii = OutLoop; ii != 0; ii--) {
+                *(dst++) = (((LVM_FLOAT) * (src++) * Current));
             }
         }
 
-        for (ii = InLoop; ii != 0; ii--){
+        for (ii = InLoop; ii != 0; ii--) {
             Current -= Delta;
-            if (Current < Target)
-                Current = Target;
+            if (Current < Target) Current = Target;
 
-            *(dst++) = (((LVM_FLOAT)*(src++) * Current));
-            *(dst++) = (((LVM_FLOAT)*(src++) * Current));
-            *(dst++) = (((LVM_FLOAT)*(src++) * Current));
-            *(dst++) = (((LVM_FLOAT)*(src++) * Current));
+            *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+            *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+            *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+            *(dst++) = (((LVM_FLOAT) * (src++) * Current));
         }
     }
-    pInstance->Current=Current;
+    pInstance->Current = Current;
 }
 
-#ifdef SUPPORT_MC
 /*
  * FUNCTION:       LVC_Core_MixSoft_Mc_D16C31_WRA
  *
@@ -123,20 +98,15 @@
  *  void
  *
  */
-void LVC_Core_MixSoft_Mc_D16C31_WRA(LVMixer3_FLOAT_st *ptrInstance,
-                                    const LVM_FLOAT   *src,
-                                          LVM_FLOAT   *dst,
-                                          LVM_INT16   NrFrames,
-                                          LVM_INT16   NrChannels)
-{
-    LVM_INT16   OutLoop;
-    LVM_INT16   InLoop;
-    LVM_INT32   ii, jj;
-    Mix_Private_FLOAT_st  *pInstance=(Mix_Private_FLOAT_st *)(ptrInstance->PrivateParams);
-    LVM_FLOAT   Delta= (LVM_FLOAT)pInstance->Delta;
-    LVM_FLOAT   Current = (LVM_FLOAT)pInstance->Current;
-    LVM_FLOAT   Target= (LVM_FLOAT)pInstance->Target;
-    LVM_FLOAT   Temp;
+void LVC_Core_MixSoft_Mc_D16C31_WRA(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                    LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+    LVM_INT16 OutLoop;
+    LVM_INT16 InLoop;
+    LVM_INT32 ii, jj;
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)(ptrInstance->PrivateParams);
+    LVM_FLOAT Delta = (LVM_FLOAT)pInstance->Delta;
+    LVM_FLOAT Current = (LVM_FLOAT)pInstance->Current;
+    LVM_FLOAT Target = (LVM_FLOAT)pInstance->Target;
 
     /*
      * Same operation is performed on consecutive frames.
@@ -147,73 +117,50 @@
     /* OutLoop is calculated to handle cases where NrFrames value can be odd.*/
     OutLoop = (LVM_INT16)(NrFrames - (InLoop << 1));
 
-    if (Current<Target) {
+    if (Current < Target) {
         if (OutLoop) {
-
-            Temp = Current + Delta;
-            if (Temp > 1.0f)
-                Temp = 1.0f;
-            else if (Temp < -1.0f)
-                Temp = -1.0f;
-
-            Current=Temp;
-            if (Current > Target)
-                Current = Target;
+            Current = LVM_Clamp(Current + Delta);
+            if (Current > Target) Current = Target;
 
             for (ii = OutLoop; ii != 0; ii--) {
-                for (jj = NrChannels; jj !=0; jj--) {
-                    *(dst++) = (((LVM_FLOAT)*(src++) * (LVM_FLOAT)Current));
+                for (jj = NrChannels; jj != 0; jj--) {
+                    *(dst++) = (((LVM_FLOAT) * (src++) * (LVM_FLOAT)Current));
                 }
             }
         }
 
         for (ii = InLoop; ii != 0; ii--) {
+            Current = LVM_Clamp(Current + Delta);
+            if (Current > Target) Current = Target;
 
-            Temp = Current + Delta;
-
-            if (Temp > 1.0f)
-                Temp = 1.0f;
-            else if (Temp < -1.0f)
-                Temp = -1.0f;
-
-            Current=Temp;
-            if (Current > Target)
-                Current = Target;
-
-            for (jj = NrChannels; jj != 0 ; jj--)
-            {
-                *(dst++) = (((LVM_FLOAT)*(src++) * Current));
-                *(dst++) = (((LVM_FLOAT)*(src++) * Current));
+            for (jj = NrChannels; jj != 0; jj--) {
+                *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+                *(dst++) = (((LVM_FLOAT) * (src++) * Current));
             }
         }
-    }
-    else{
+    } else {
         if (OutLoop) {
             Current -= Delta;
-            if (Current < Target)
-                Current = Target;
+            if (Current < Target) Current = Target;
 
             for (ii = OutLoop; ii != 0; ii--) {
-                for (jj = NrChannels; jj !=0; jj--) {
-                    *(dst++) = (((LVM_FLOAT)*(src++) * (LVM_FLOAT)Current));
+                for (jj = NrChannels; jj != 0; jj--) {
+                    *(dst++) = (((LVM_FLOAT) * (src++) * (LVM_FLOAT)Current));
                 }
             }
         }
 
         for (ii = InLoop; ii != 0; ii--) {
             Current -= Delta;
-            if (Current < Target)
-                Current = Target;
+            if (Current < Target) Current = Target;
 
-            for (jj = NrChannels; jj != 0 ; jj--)
-            {
-                *(dst++) = (((LVM_FLOAT)*(src++) * Current));
-                *(dst++) = (((LVM_FLOAT)*(src++) * Current));
+            for (jj = NrChannels; jj != 0; jj--) {
+                *(dst++) = (((LVM_FLOAT) * (src++) * Current));
+                *(dst++) = (((LVM_FLOAT) * (src++) * Current));
             }
         }
     }
-    pInstance->Current=Current;
+    pInstance->Current = Current;
 }
-#endif
 
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_MixInSoft_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_MixInSoft_D16C31_SAT.cpp
index 2bec3be..270c7e0 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_MixInSoft_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_MixInSoft_D16C31_SAT.cpp
@@ -27,39 +27,35 @@
    DEFINITIONS
 ***********************************************************************************/
 
-#define TRUE          1
-#define FALSE         0
+#define TRUE 1
+#define FALSE 0
 
 /**********************************************************************************
    FUNCTION MIXINSOFT_D16C31_SAT
 ***********************************************************************************/
-void LVC_MixInSoft_D16C31_SAT(LVMixer3_1St_FLOAT_st *ptrInstance,
-                              const LVM_FLOAT       *src,
-                                    LVM_FLOAT       *dst,
-                                    LVM_INT16       n)
-{
-    char        HardMixing = TRUE;
-    LVM_FLOAT   TargetGain;
-    Mix_Private_FLOAT_st  *pInstance = \
-                             (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
+void LVC_MixInSoft_D16C31_SAT(LVMixer3_1St_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                              LVM_FLOAT* dst, LVM_INT16 n) {
+    char HardMixing = TRUE;
+    LVM_FLOAT TargetGain;
+    Mix_Private_FLOAT_st* pInstance =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
 
-    if(n <= 0)    return;
+    if (n <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
-    if (pInstance->Current != pInstance->Target)
-    {
-        if(pInstance->Delta == 1.0f){
+    if (pInstance->Current != pInstance->Target) {
+        if (pInstance->Delta == 1.0f) {
             pInstance->Current = pInstance->Target;
             TargetGain = pInstance->Target;
             LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta){
+        } else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
             pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
                                                        Make them equal. */
             TargetGain = pInstance->Target;
             LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }else{
+        } else {
             /* Soft mixing has to be applied */
             HardMixing = FALSE;
             LVC_Core_MixInSoft_D16C31_SAT(&(ptrInstance->MixerStream[0]), src, dst, n);
@@ -70,111 +66,12 @@
        HARD MIXING
     *******************************************************************************/
 
-    if (HardMixing){
-        if (pInstance->Target != 0){ /* Nothing to do in case Target = 0 */
-            if ((pInstance->Target) == 1.0f){
-                Add2_Sat_Float(src, dst, n);
-            }
-            else{
-                Mac3s_Sat_Float(src, (pInstance->Target), dst, n);
-                /* In case the LVCore function would have changed the Current value */
-                pInstance->Current = pInstance->Target;
-            }
-        }
-    }
-
-    /******************************************************************************
-       CALL BACK
-    *******************************************************************************/
-
-    if (ptrInstance->MixerStream[0].CallbackSet){
-        if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta){
-            pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
-                                                       Make them equal. */
-            TargetGain = pInstance->Target;
-            LVC_Mixer_SetTarget(ptrInstance->MixerStream, TargetGain);
-            ptrInstance->MixerStream[0].CallbackSet = FALSE;
-            if (ptrInstance->MixerStream[0].pCallBack != 0){
-                (*ptrInstance->MixerStream[0].pCallBack) ( \
-                                                ptrInstance->MixerStream[0].pCallbackHandle,
-                                                ptrInstance->MixerStream[0].pGeneralPurpose,
-                                                ptrInstance->MixerStream[0].CallbackParam );
-            }
-        }
-    }
-
-}
-
-#ifdef SUPPORT_MC
-/*
- * FUNCTION:       LVC_MixInSoft_Mc_D16C31_SAT
- *
- * DESCRIPTION:
- *  Mixer function with support for processing multichannel input
- *
- * PARAMETERS:
- *  ptrInstance    Instance pointer
- *  src            Source
- *  dst            Destination
- *  NrFrames       Number of frames
- *  NrChannels     Number of channels
- *
- * RETURNS:
- *  void
- *
- */
-void LVC_MixInSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st *ptrInstance,
-                                 const LVM_FLOAT       *src,
-                                       LVM_FLOAT       *dst,
-                                       LVM_INT16       NrFrames,
-                                       LVM_INT16       NrChannels)
-{
-    char        HardMixing = TRUE;
-    LVM_FLOAT   TargetGain;
-    Mix_Private_FLOAT_st  *pInstance = \
-                             (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
-
-    if (NrFrames <= 0)    return;
-
-    /******************************************************************************
-       SOFT MIXING
-    *******************************************************************************/
-    if (pInstance->Current != pInstance->Target)
-    {
-        if (pInstance->Delta == 1.0f) {
-            pInstance->Current = pInstance->Target;
-            TargetGain = pInstance->Target;
-            LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
-            pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
-                                                       Make them equal. */
-            TargetGain = pInstance->Target;
-            LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }else{
-            /* Soft mixing has to be applied */
-            HardMixing = FALSE;
-            LVC_Core_MixInSoft_Mc_D16C31_SAT(&(ptrInstance->MixerStream[0]),
-                                             src,
-                                             dst,
-                                             NrFrames,
-                                             NrChannels);
-        }
-    }
-
-    /******************************************************************************
-       HARD MIXING
-    *******************************************************************************/
-
     if (HardMixing) {
         if (pInstance->Target != 0) { /* Nothing to do in case Target = 0 */
             if ((pInstance->Target) == 1.0f) {
-                Add2_Sat_Float(src, dst, NrFrames*NrChannels);
-            }
-            else{
-                Mac3s_Sat_Float(src,
-                                (pInstance->Target),
-                                dst,
-                                NrFrames * NrChannels);
+                Add2_Sat_Float(src, dst, n);
+            } else {
+                Mac3s_Sat_Float(src, (pInstance->Target), dst, n);
                 /* In case the LVCore function would have changed the Current value */
                 pInstance->Current = pInstance->Target;
             }
@@ -193,15 +90,97 @@
             LVC_Mixer_SetTarget(ptrInstance->MixerStream, TargetGain);
             ptrInstance->MixerStream[0].CallbackSet = FALSE;
             if (ptrInstance->MixerStream[0].pCallBack != 0) {
-                (*ptrInstance->MixerStream[0].pCallBack) (\
-                                                ptrInstance->MixerStream[0].pCallbackHandle,
-                                                ptrInstance->MixerStream[0].pGeneralPurpose,
-                                                ptrInstance->MixerStream[0].CallbackParam);
+                (*ptrInstance->MixerStream[0].pCallBack)(
+                        ptrInstance->MixerStream[0].pCallbackHandle,
+                        ptrInstance->MixerStream[0].pGeneralPurpose,
+                        ptrInstance->MixerStream[0].CallbackParam);
+            }
+        }
+    }
+}
+
+/*
+ * FUNCTION:       LVC_MixInSoft_Mc_D16C31_SAT
+ *
+ * DESCRIPTION:
+ *  Mixer function with support for processing multichannel input
+ *
+ * PARAMETERS:
+ *  ptrInstance    Instance pointer
+ *  src            Source
+ *  dst            Destination
+ *  NrFrames       Number of frames
+ *  NrChannels     Number of channels
+ *
+ * RETURNS:
+ *  void
+ *
+ */
+void LVC_MixInSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                 LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+    char HardMixing = TRUE;
+    LVM_FLOAT TargetGain;
+    Mix_Private_FLOAT_st* pInstance =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
+
+    if (NrFrames <= 0) return;
+
+    /******************************************************************************
+       SOFT MIXING
+    *******************************************************************************/
+    if (pInstance->Current != pInstance->Target) {
+        if (pInstance->Delta == 1.0f) {
+            pInstance->Current = pInstance->Target;
+            TargetGain = pInstance->Target;
+            LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
+        } else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
+            pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
+                                                       Make them equal. */
+            TargetGain = pInstance->Target;
+            LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
+        } else {
+            /* Soft mixing has to be applied */
+            HardMixing = FALSE;
+            LVC_Core_MixInSoft_Mc_D16C31_SAT(&(ptrInstance->MixerStream[0]), src, dst, NrFrames,
+                                             NrChannels);
+        }
+    }
+
+    /******************************************************************************
+       HARD MIXING
+    *******************************************************************************/
+
+    if (HardMixing) {
+        if (pInstance->Target != 0) { /* Nothing to do in case Target = 0 */
+            if ((pInstance->Target) == 1.0f) {
+                Add2_Sat_Float(src, dst, NrFrames * NrChannels);
+            } else {
+                Mac3s_Sat_Float(src, (pInstance->Target), dst, NrFrames * NrChannels);
+                /* In case the LVCore function would have changed the Current value */
+                pInstance->Current = pInstance->Target;
             }
         }
     }
 
+    /******************************************************************************
+       CALL BACK
+    *******************************************************************************/
+
+    if (ptrInstance->MixerStream[0].CallbackSet) {
+        if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
+            pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
+                                                       Make them equal. */
+            TargetGain = pInstance->Target;
+            LVC_Mixer_SetTarget(ptrInstance->MixerStream, TargetGain);
+            ptrInstance->MixerStream[0].CallbackSet = FALSE;
+            if (ptrInstance->MixerStream[0].pCallBack != 0) {
+                (*ptrInstance->MixerStream[0].pCallBack)(
+                        ptrInstance->MixerStream[0].pCallbackHandle,
+                        ptrInstance->MixerStream[0].pGeneralPurpose,
+                        ptrInstance->MixerStream[0].CallbackParam);
+            }
+        }
+    }
 }
-#endif
 
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp
index 3153ada..b0aa172 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp
@@ -29,144 +29,118 @@
    DEFINITIONS
 ***********************************************************************************/
 
-#define TRUE          1
-#define FALSE         0
+#define TRUE 1
+#define FALSE 0
 
 #define ARRAY_SIZE(a) ((sizeof(a)) / (sizeof(*(a))))
 
 /**********************************************************************************
-   FUNCTION LVC_MixSoft_1St_2i_D16C31_SAT
+   FUNCTION LVC_MixSoft_1St_MC_float_SAT
 ***********************************************************************************/
-#ifdef SUPPORT_MC
 /* This threshold is used to decide on the processing to be applied on
  * front center and back center channels
  */
 #define LVM_VOL_BAL_THR (0.000016f)
-void LVC_MixSoft_1St_MC_float_SAT (LVMixer3_2St_FLOAT_st *ptrInstance,
-                                    const LVM_FLOAT       *src,
-                                    LVM_FLOAT             *dst,
-                                    LVM_INT16             NrFrames,
-                                    LVM_INT32             NrChannels,
-                                    LVM_INT32             ChMask)
-{
-    char        HardMixing = TRUE;
-    LVM_FLOAT   TargetGain;
-    Mix_Private_FLOAT_st  Target_lfe = {LVM_MAXFLOAT, LVM_MAXFLOAT, LVM_MAXFLOAT};
-    Mix_Private_FLOAT_st  Target_ctr = {LVM_MAXFLOAT, LVM_MAXFLOAT, LVM_MAXFLOAT};
-    Mix_Private_FLOAT_st  *pInstance1 = \
-                              (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
-    Mix_Private_FLOAT_st  *pInstance2 = \
-                              (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[1].PrivateParams);
-    Mix_Private_FLOAT_st  *pMixPrivInst[4] = {pInstance1, pInstance2, &Target_ctr, &Target_lfe};
-    Mix_Private_FLOAT_st  *pInstance[NrChannels];
+void LVC_MixSoft_1St_MC_float_SAT(LVMixer3_2St_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                  LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT32 NrChannels,
+                                  LVM_INT32 ChMask) {
+    char HardMixing = TRUE;
+    LVM_FLOAT TargetGain;
+    Mix_Private_FLOAT_st Target_lfe = {LVM_MAXFLOAT, LVM_MAXFLOAT, LVM_MAXFLOAT};
+    Mix_Private_FLOAT_st Target_ctr = {LVM_MAXFLOAT, LVM_MAXFLOAT, LVM_MAXFLOAT};
+    Mix_Private_FLOAT_st* pInstance1 =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
+    Mix_Private_FLOAT_st* pInstance2 =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[1].PrivateParams);
+    Mix_Private_FLOAT_st* pMixPrivInst[4] = {pInstance1, pInstance2, &Target_ctr, &Target_lfe};
+    Mix_Private_FLOAT_st* pInstance[NrChannels];
 
-    if (audio_channel_mask_get_representation(ChMask)
-            == AUDIO_CHANNEL_REPRESENTATION_INDEX)
-    {
-        for (int i = 0; i < 2; i++)
-        {
+    if (audio_channel_mask_get_representation(ChMask) == AUDIO_CHANNEL_REPRESENTATION_INDEX) {
+        int loopLimit = (NrChannels == FCC_1) ? NrChannels : FCC_2;
+        for (int i = 0; i < loopLimit; i++) {
             pInstance[i] = pMixPrivInst[i];
         }
-        for (int i = 2; i < NrChannels; i++)
-        {
+        for (int i = loopLimit; i < NrChannels; i++) {
             pInstance[i] = pMixPrivInst[2];
         }
-    }
-    else
-    {
+    } else {
         // TODO: Combine with system/media/audio_utils/Balance.cpp
         // Constants in system/media/audio/include/system/audio-base.h
         // 'mixInstIdx' is used to map the appropriate mixer instance for each channel.
         const int mixInstIdx[] = {
-            0, // AUDIO_CHANNEL_OUT_FRONT_LEFT            = 0x1u,
-            1, // AUDIO_CHANNEL_OUT_FRONT_RIGHT           = 0x2u,
-            2, // AUDIO_CHANNEL_OUT_FRONT_CENTER          = 0x4u,
-            3, // AUDIO_CHANNEL_OUT_LOW_FREQUENCY         = 0x8u,
-            0, // AUDIO_CHANNEL_OUT_BACK_LEFT             = 0x10u,
-            1, // AUDIO_CHANNEL_OUT_BACK_RIGHT            = 0x20u,
-            0, // AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER  = 0x40u,
-            1, // AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER = 0x80u,
-            2, // AUDIO_CHANNEL_OUT_BACK_CENTER           = 0x100u,
-            0, // AUDIO_CHANNEL_OUT_SIDE_LEFT             = 0x200u,
-            1, // AUDIO_CHANNEL_OUT_SIDE_RIGHT            = 0x400u,
-            2, // AUDIO_CHANNEL_OUT_TOP_CENTER            = 0x800u,
-            0, // AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT        = 0x1000u,
-            2, // AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER      = 0x2000u,
-            1, // AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT       = 0x4000u,
-            0, // AUDIO_CHANNEL_OUT_TOP_BACK_LEFT         = 0x8000u,
-            2, // AUDIO_CHANNEL_OUT_TOP_BACK_CENTER       = 0x10000u,
-            1, // AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT        = 0x20000u,
-            0, // AUDIO_CHANNEL_OUT_TOP_SIDE_LEFT         = 0x40000u,
-            1, // AUDIO_CHANNEL_OUT_TOP_SIDE_RIGHT        = 0x80000u
+                0,  // AUDIO_CHANNEL_OUT_FRONT_LEFT            = 0x1u,
+                1,  // AUDIO_CHANNEL_OUT_FRONT_RIGHT           = 0x2u,
+                2,  // AUDIO_CHANNEL_OUT_FRONT_CENTER          = 0x4u,
+                3,  // AUDIO_CHANNEL_OUT_LOW_FREQUENCY         = 0x8u,
+                0,  // AUDIO_CHANNEL_OUT_BACK_LEFT             = 0x10u,
+                1,  // AUDIO_CHANNEL_OUT_BACK_RIGHT            = 0x20u,
+                0,  // AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER  = 0x40u,
+                1,  // AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER = 0x80u,
+                2,  // AUDIO_CHANNEL_OUT_BACK_CENTER           = 0x100u,
+                0,  // AUDIO_CHANNEL_OUT_SIDE_LEFT             = 0x200u,
+                1,  // AUDIO_CHANNEL_OUT_SIDE_RIGHT            = 0x400u,
+                2,  // AUDIO_CHANNEL_OUT_TOP_CENTER            = 0x800u,
+                0,  // AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT        = 0x1000u,
+                2,  // AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER      = 0x2000u,
+                1,  // AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT       = 0x4000u,
+                0,  // AUDIO_CHANNEL_OUT_TOP_BACK_LEFT         = 0x8000u,
+                2,  // AUDIO_CHANNEL_OUT_TOP_BACK_CENTER       = 0x10000u,
+                1,  // AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT        = 0x20000u,
+                0,  // AUDIO_CHANNEL_OUT_TOP_SIDE_LEFT         = 0x40000u,
+                1,  // AUDIO_CHANNEL_OUT_TOP_SIDE_RIGHT        = 0x80000u
         };
-        if (pInstance1->Target <= LVM_VOL_BAL_THR ||
-            pInstance2->Target <= LVM_VOL_BAL_THR)
-        {
-            Target_ctr.Target  = 0.0f;
+        if (pInstance1->Target <= LVM_VOL_BAL_THR || pInstance2->Target <= LVM_VOL_BAL_THR) {
+            Target_ctr.Target = 0.0f;
             Target_ctr.Current = 0.0f;
-            Target_ctr.Delta   = 0.0f;
+            Target_ctr.Delta = 0.0f;
         }
         const unsigned int idxArrSize = ARRAY_SIZE(mixInstIdx);
-        for (unsigned int i = 0, channel = ChMask; channel !=0 ; ++i)
-        {
+        for (unsigned int i = 0, channel = ChMask; channel != 0; ++i) {
             const unsigned int idx = __builtin_ctz(channel);
-            if (idx < idxArrSize)
-            {
+            if (idx < idxArrSize) {
                 pInstance[i] = pMixPrivInst[mixInstIdx[idx]];
-            }
-            else
-            {
+            } else {
                 pInstance[i] = pMixPrivInst[2];
             }
             channel &= ~(1 << idx);
         }
     }
 
-    if (NrFrames <= 0)    return;
+    if (NrFrames <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
 
     if ((pInstance1->Current != pInstance1->Target) ||
-        (pInstance2->Current != pInstance2->Target))
-    {
+        (pInstance2->Current != pInstance2->Target)) {
         // TODO: combine similar checks below.
-        if (pInstance1->Delta == LVM_MAXFLOAT
-                || Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta)
-        {
+        if (pInstance1->Delta == LVM_MAXFLOAT ||
+            Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta) {
             /* Difference is not significant anymore. Make them equal. */
             pInstance1->Current = pInstance1->Target;
             TargetGain = pInstance1->Target;
             LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }
-        else
-        {
+        } else {
             /* Soft mixing has to be applied */
             HardMixing = FALSE;
         }
 
-        if (HardMixing == TRUE)
-        {
-            if (pInstance2->Delta == LVM_MAXFLOAT
-                    || Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta)
-            {
+        if (HardMixing == TRUE) {
+            if (pInstance2->Delta == LVM_MAXFLOAT ||
+                Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta) {
                 /* Difference is not significant anymore. Make them equal. */
                 pInstance2->Current = pInstance2->Target;
                 TargetGain = pInstance2->Target;
                 LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[1]), TargetGain);
-            }
-            else
-            {
+            } else {
                 /* Soft mixing has to be applied */
                 HardMixing = FALSE;
             }
         }
 
-        if (HardMixing == FALSE)
-        {
-             LVC_Core_MixSoft_1St_MC_float_WRA (&pInstance[0],
-                                                 src, dst, NrFrames, NrChannels);
+        if (HardMixing == FALSE) {
+            LVC_Core_MixSoft_1St_MC_float_WRA(&pInstance[0], src, dst, NrFrames, NrChannels);
         }
     }
 
@@ -174,19 +148,13 @@
        HARD MIXING
     *******************************************************************************/
 
-    if (HardMixing == TRUE)
-    {
-        if ((pInstance1->Target == LVM_MAXFLOAT) && (pInstance2->Target == LVM_MAXFLOAT))
-        {
-            if (src != dst)
-            {
-                Copy_Float(src, dst, NrFrames*NrChannels);
+    if (HardMixing == TRUE) {
+        if ((pInstance1->Target == LVM_MAXFLOAT) && (pInstance2->Target == LVM_MAXFLOAT)) {
+            if (src != dst) {
+                Copy_Float(src, dst, NrFrames * NrChannels);
             }
-        }
-        else
-        {
-            LVC_Core_MixHard_1St_MC_float_SAT(&(pInstance[0]),
-                                               src, dst, NrFrames, NrChannels);
+        } else {
+            LVC_Core_MixHard_1St_MC_float_SAT(&(pInstance[0]), src, dst, NrFrames, NrChannels);
         }
     }
 
@@ -194,172 +162,34 @@
        CALL BACK
     *******************************************************************************/
 
-    if (ptrInstance->MixerStream[0].CallbackSet)
-    {
-        if (Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta)
-        {
+    if (ptrInstance->MixerStream[0].CallbackSet) {
+        if (Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta) {
             pInstance1->Current = pInstance1->Target; /* Difference is not significant anymore. \
                                                          Make them equal. */
             TargetGain = pInstance1->Target;
             LVC_Mixer_SetTarget(&ptrInstance->MixerStream[0], TargetGain);
             ptrInstance->MixerStream[0].CallbackSet = FALSE;
-            if (ptrInstance->MixerStream[0].pCallBack != 0)
-            {
-                (*ptrInstance->MixerStream[0].pCallBack) (\
-                    ptrInstance->MixerStream[0].pCallbackHandle,
-                    ptrInstance->MixerStream[0].pGeneralPurpose,
-                    ptrInstance->MixerStream[0].CallbackParam);
+            if (ptrInstance->MixerStream[0].pCallBack != 0) {
+                (*ptrInstance->MixerStream[0].pCallBack)(
+                        ptrInstance->MixerStream[0].pCallbackHandle,
+                        ptrInstance->MixerStream[0].pGeneralPurpose,
+                        ptrInstance->MixerStream[0].CallbackParam);
             }
         }
     }
-    if (ptrInstance->MixerStream[1].CallbackSet)
-    {
-        if (Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta)
-        {
+    if (ptrInstance->MixerStream[1].CallbackSet) {
+        if (Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta) {
             pInstance2->Current = pInstance2->Target; /* Difference is not significant anymore.
                                                          Make them equal. */
             TargetGain = pInstance2->Target;
             LVC_Mixer_SetTarget(&ptrInstance->MixerStream[1], TargetGain);
             ptrInstance->MixerStream[1].CallbackSet = FALSE;
-            if (ptrInstance->MixerStream[1].pCallBack != 0)
-            {
-                (*ptrInstance->MixerStream[1].pCallBack) (\
-                    ptrInstance->MixerStream[1].pCallbackHandle,
-                    ptrInstance->MixerStream[1].pGeneralPurpose,
-                    ptrInstance->MixerStream[1].CallbackParam);
+            if (ptrInstance->MixerStream[1].pCallBack != 0) {
+                (*ptrInstance->MixerStream[1].pCallBack)(
+                        ptrInstance->MixerStream[1].pCallbackHandle,
+                        ptrInstance->MixerStream[1].pGeneralPurpose,
+                        ptrInstance->MixerStream[1].CallbackParam);
             }
         }
     }
 }
-#endif
-void LVC_MixSoft_1St_2i_D16C31_SAT( LVMixer3_2St_FLOAT_st *ptrInstance,
-                                    const LVM_FLOAT             *src,
-                                    LVM_FLOAT             *dst,
-                                    LVM_INT16             n)
-{
-    char        HardMixing = TRUE;
-    LVM_FLOAT   TargetGain;
-    Mix_Private_FLOAT_st  *pInstance1 = \
-                              (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
-    Mix_Private_FLOAT_st  *pInstance2 = \
-                              (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[1].PrivateParams);
-
-    if(n <= 0)    return;
-
-    /******************************************************************************
-       SOFT MIXING
-    *******************************************************************************/
-    if ((pInstance1->Current != pInstance1->Target) || (pInstance2->Current != pInstance2->Target))
-    {
-        if(pInstance1->Delta == 1.0f)
-        {
-            pInstance1->Current = pInstance1->Target;
-            TargetGain = pInstance1->Target;
-            LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }
-        else if (Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta)
-        {
-            pInstance1->Current = pInstance1->Target; /* Difference is not significant anymore. \
-                                                         Make them equal. */
-            TargetGain = pInstance1->Target;
-            LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }
-        else
-        {
-            /* Soft mixing has to be applied */
-            HardMixing = FALSE;
-        }
-
-        if(HardMixing == TRUE)
-        {
-            if(pInstance2->Delta == 1.0f)
-            {
-                pInstance2->Current = pInstance2->Target;
-                TargetGain = pInstance2->Target;
-                LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[1]), TargetGain);
-            }
-            else if (Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta)
-            {
-                pInstance2->Current = pInstance2->Target; /* Difference is not significant anymore. \
-                                                             Make them equal. */
-                TargetGain = pInstance2->Target;
-                LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[1]), TargetGain);
-            }
-            else
-            {
-                /* Soft mixing has to be applied */
-                HardMixing = FALSE;
-            }
-        }
-
-        if(HardMixing == FALSE)
-        {
-             LVC_Core_MixSoft_1St_2i_D16C31_WRA( &(ptrInstance->MixerStream[0]),
-                                                 &(ptrInstance->MixerStream[1]),
-                                                 src, dst, n);
-        }
-    }
-
-    /******************************************************************************
-       HARD MIXING
-    *******************************************************************************/
-
-    if (HardMixing)
-    {
-        if ((pInstance1->Target == 1.0f) && (pInstance2->Target == 1.0f))
-        {
-            if(src != dst)
-            {
-                Copy_Float(src, dst, n);
-            }
-        }
-        else
-        {
-            LVC_Core_MixHard_1St_2i_D16C31_SAT(&(ptrInstance->MixerStream[0]),
-                                               &(ptrInstance->MixerStream[1]),
-                                               src, dst, n);
-        }
-    }
-
-    /******************************************************************************
-       CALL BACK
-    *******************************************************************************/
-
-    if (ptrInstance->MixerStream[0].CallbackSet)
-    {
-        if (Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta)
-        {
-            pInstance1->Current = pInstance1->Target; /* Difference is not significant anymore. \
-                                                         Make them equal. */
-            TargetGain = pInstance1->Target;
-            LVC_Mixer_SetTarget(&ptrInstance->MixerStream[0], TargetGain);
-            ptrInstance->MixerStream[0].CallbackSet = FALSE;
-            if (ptrInstance->MixerStream[0].pCallBack != 0)
-            {
-                (*ptrInstance->MixerStream[0].pCallBack) ( \
-                                                ptrInstance->MixerStream[0].pCallbackHandle,
-                                                ptrInstance->MixerStream[0].pGeneralPurpose,
-                                                ptrInstance->MixerStream[0].CallbackParam );
-            }
-        }
-    }
-    if (ptrInstance->MixerStream[1].CallbackSet)
-    {
-        if (Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta)
-        {
-            pInstance2->Current = pInstance2->Target; /* Difference is not significant anymore.
-                                                         Make them equal. */
-            TargetGain = pInstance2->Target;
-            LVC_Mixer_SetTarget(&ptrInstance->MixerStream[1], TargetGain);
-            ptrInstance->MixerStream[1].CallbackSet = FALSE;
-            if (ptrInstance->MixerStream[1].pCallBack != 0)
-            {
-                (*ptrInstance->MixerStream[1].pCallBack) (
-                                                ptrInstance->MixerStream[1].pCallbackHandle,
-                                                ptrInstance->MixerStream[1].pGeneralPurpose,
-                                                ptrInstance->MixerStream[1].CallbackParam );
-            }
-        }
-    }
-}
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
index 4d229da..5a67bda 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp
@@ -19,6 +19,7 @@
    INCLUDE FILES
 ***********************************************************************************/
 
+#include <string.h>
 #include "LVC_Mixer_Private.h"
 #include "VectorArithmetic.h"
 #include "ScalarArithmetic.h"
@@ -27,39 +28,35 @@
    DEFINITIONS
 ***********************************************************************************/
 
-#define TRUE          1
-#define FALSE         0
+#define TRUE 1
+#define FALSE 0
 
 /**********************************************************************************
    FUNCTION LVMixer3_MIXSOFT_1ST_D16C31_SAT
 ***********************************************************************************/
-void LVC_MixSoft_1St_D16C31_SAT( LVMixer3_1St_FLOAT_st *ptrInstance,
-                                  const LVM_FLOAT             *src,
-                                        LVM_FLOAT             *dst,
-                                        LVM_INT16             n)
-{
-    char        HardMixing = TRUE;
-    LVM_FLOAT   TargetGain;
-    Mix_Private_FLOAT_st  *pInstance = \
-                          (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
+void LVC_MixSoft_1St_D16C31_SAT(LVMixer3_1St_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                LVM_FLOAT* dst, LVM_INT16 n) {
+    char HardMixing = TRUE;
+    LVM_FLOAT TargetGain;
+    Mix_Private_FLOAT_st* pInstance =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
 
-    if(n <= 0)    return;
+    if (n <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
-    if (pInstance->Current != pInstance->Target)
-    {
-        if(pInstance->Delta == 1.0f){
+    if (pInstance->Current != pInstance->Target) {
+        if (pInstance->Delta == 1.0f) {
             pInstance->Current = pInstance->Target;
             TargetGain = pInstance->Target;
             LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta){
+        } else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
             pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
                                                        Make them equal. */
             TargetGain = pInstance->Target;
             LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }else{
+        } else {
             /* Soft mixing has to be applied */
             HardMixing = FALSE;
             LVC_Core_MixSoft_1St_D16C31_WRA(&(ptrInstance->MixerStream[0]), src, dst, n);
@@ -70,39 +67,37 @@
        HARD MIXING
     *******************************************************************************/
 
-    if (HardMixing){
+    if (HardMixing) {
         if (pInstance->Target == 0)
-            LoadConst_Float(0.0, dst, n);
+            memset(dst, 0, n * sizeof(*dst));
         else {
             if ((pInstance->Target) != 1.0f)
                 Mult3s_Float(src, (pInstance->Target), dst, n);
-            else if(src != dst)
+            else if (src != dst)
                 Copy_Float(src, dst, n);
         }
-
     }
 
     /******************************************************************************
        CALL BACK
     *******************************************************************************/
 
-    if (ptrInstance->MixerStream[0].CallbackSet){
-        if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta){
+    if (ptrInstance->MixerStream[0].CallbackSet) {
+        if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
             pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
                                                        Make them equal. */
             TargetGain = pInstance->Target;
             LVC_Mixer_SetTarget(ptrInstance->MixerStream, TargetGain);
             ptrInstance->MixerStream[0].CallbackSet = FALSE;
-            if (ptrInstance->MixerStream[0].pCallBack != 0){
-                (*ptrInstance->MixerStream[0].pCallBack) ( \
-                                                ptrInstance->MixerStream[0].pCallbackHandle,
-                                                ptrInstance->MixerStream[0].pGeneralPurpose,
-                                                ptrInstance->MixerStream[0].CallbackParam );
+            if (ptrInstance->MixerStream[0].pCallBack != 0) {
+                (*ptrInstance->MixerStream[0].pCallBack)(
+                        ptrInstance->MixerStream[0].pCallbackHandle,
+                        ptrInstance->MixerStream[0].pGeneralPurpose,
+                        ptrInstance->MixerStream[0].CallbackParam);
             }
         }
     }
 }
-#ifdef SUPPORT_MC
 /*
  * FUNCTION:       LVC_MixSoft_Mc_D16C31_SAT
  *
@@ -120,40 +115,32 @@
  *  void
  *
  */
-void LVC_MixSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st *ptrInstance,
-                                  const LVM_FLOAT      *src,
-                                        LVM_FLOAT      *dst,
-                                        LVM_INT16      NrFrames,
-                                        LVM_INT16      NrChannels)
-{
-    char        HardMixing = TRUE;
-    LVM_FLOAT   TargetGain;
-    Mix_Private_FLOAT_st  *pInstance = \
-                          (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
+void LVC_MixSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                               LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels) {
+    char HardMixing = TRUE;
+    LVM_FLOAT TargetGain;
+    Mix_Private_FLOAT_st* pInstance =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
 
-    if (NrFrames <= 0)    return;
+    if (NrFrames <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
-    if (pInstance->Current != pInstance->Target)
-    {
+    if (pInstance->Current != pInstance->Target) {
         if (pInstance->Delta == 1.0f) {
             pInstance->Current = pInstance->Target;
             TargetGain = pInstance->Target;
             LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
+        } else if (Abs_Float(pInstance->Current - pInstance->Target) < pInstance->Delta) {
             pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
                                                        Make them equal. */
             TargetGain = pInstance->Target;
             LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
-        }else{
+        } else {
             /* Soft mixing has to be applied */
             HardMixing = FALSE;
-            LVC_Core_MixSoft_Mc_D16C31_WRA(&(ptrInstance->MixerStream[0]),
-                                           src,
-                                           dst,
-                                           NrFrames,
+            LVC_Core_MixSoft_Mc_D16C31_WRA(&(ptrInstance->MixerStream[0]), src, dst, NrFrames,
                                            NrChannels);
         }
     }
@@ -164,14 +151,13 @@
 
     if (HardMixing) {
         if (pInstance->Target == 0)
-            LoadConst_Float(0.0, dst, NrFrames * NrChannels);
+            memset(dst, 0, NrFrames * NrChannels * sizeof(*dst));
         else {
             if ((pInstance->Target) != 1.0f)
                 Mult3s_Float(src, (pInstance->Target), dst, NrFrames * NrChannels);
             else if (src != dst)
                 Copy_Float(src, dst, NrFrames * NrChannels);
         }
-
     }
 
     /******************************************************************************
@@ -186,15 +172,13 @@
             LVC_Mixer_SetTarget(ptrInstance->MixerStream, TargetGain);
             ptrInstance->MixerStream[0].CallbackSet = FALSE;
             if (ptrInstance->MixerStream[0].pCallBack != 0) {
-                (*ptrInstance->MixerStream[0].pCallBack) (\
-                                                ptrInstance->MixerStream[0].pCallbackHandle,
-                                                ptrInstance->MixerStream[0].pGeneralPurpose,
-                                                ptrInstance->MixerStream[0].CallbackParam);
+                (*ptrInstance->MixerStream[0].pCallBack)(
+                        ptrInstance->MixerStream[0].pCallbackHandle,
+                        ptrInstance->MixerStream[0].pGeneralPurpose,
+                        ptrInstance->MixerStream[0].CallbackParam);
             }
         }
     }
 }
 
-#endif
-
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_2St_D16C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_2St_D16C31_SAT.cpp
index 54ab79d..882a8ce 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_2St_D16C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_2St_D16C31_SAT.cpp
@@ -25,49 +25,39 @@
 /**********************************************************************************
    FUNCTION LVC_MixSoft_2St_D16C31_SAT.c
 ***********************************************************************************/
-void LVC_MixSoft_2St_D16C31_SAT(LVMixer3_2St_FLOAT_st *ptrInstance,
-                                const LVM_FLOAT       *src1,
-                                const LVM_FLOAT       *src2,
-                                      LVM_FLOAT       *dst,
-                                      LVM_INT16       n)
-{
-    Mix_Private_FLOAT_st  *pInstance1 = \
-                             (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
-    Mix_Private_FLOAT_st  *pInstance2 = \
-                             (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[1].PrivateParams);
+void LVC_MixSoft_2St_D16C31_SAT(LVMixer3_2St_FLOAT_st* ptrInstance, const LVM_FLOAT* src1,
+                                const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 n) {
+    Mix_Private_FLOAT_st* pInstance1 =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
+    Mix_Private_FLOAT_st* pInstance2 =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[1].PrivateParams);
 
-    if(n <= 0)    return;
+    if (n <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
-    if ((pInstance1->Current == pInstance1->Target) && (pInstance1->Current == 0)){
-        LVC_MixSoft_1St_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[1]),
-                                    src2, dst, n);
-    }
-    else if ((pInstance2->Current == pInstance2->Target) && (pInstance2->Current == 0)){
-        LVC_MixSoft_1St_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[0]),
-                                    src1, dst, n);
-    }
-    else if ((pInstance1->Current != pInstance1->Target) || \
-                                    (pInstance2->Current != pInstance2->Target))
-    {
-        LVC_MixSoft_1St_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[0]),
-                                   src1, dst, n);
-        LVC_MixInSoft_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[1]),
-                                  src2, dst, n);
-    }
-    else{
+    if ((pInstance1->Current == pInstance1->Target) && (pInstance1->Current == 0)) {
+        LVC_MixSoft_1St_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[1]), src2,
+                                   dst, n);
+    } else if ((pInstance2->Current == pInstance2->Target) && (pInstance2->Current == 0)) {
+        LVC_MixSoft_1St_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[0]), src1,
+                                   dst, n);
+    } else if ((pInstance1->Current != pInstance1->Target) ||
+               (pInstance2->Current != pInstance2->Target)) {
+        LVC_MixSoft_1St_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[0]), src1,
+                                   dst, n);
+        LVC_MixInSoft_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[1]), src2, dst,
+                                 n);
+    } else {
         /******************************************************************************
            HARD MIXING
         *******************************************************************************/
-        LVC_Core_MixHard_2St_D16C31_SAT( &ptrInstance->MixerStream[0],
-                                         &ptrInstance->MixerStream[1],
-                                         src1, src2, dst, n);
+        LVC_Core_MixHard_2St_D16C31_SAT(&ptrInstance->MixerStream[0], &ptrInstance->MixerStream[1],
+                                        src1, src2, dst, n);
     }
 }
 
-#ifdef SUPPORT_MC
 /*
  * FUNCTION:       LVC_MixSoft_2Mc_D16C31_SAT
  *
@@ -86,48 +76,38 @@
  *  void
  *
  */
-void LVC_MixSoft_2Mc_D16C31_SAT(LVMixer3_2St_FLOAT_st *ptrInstance,
-                                const LVM_FLOAT       *src1,
-                                const LVM_FLOAT       *src2,
-                                      LVM_FLOAT       *dst,
-                                      LVM_INT16       NrFrames,
-                                      LVM_INT16       NrChannels)
-{
-    Mix_Private_FLOAT_st  *pInstance1 = \
-                             (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
-    Mix_Private_FLOAT_st  *pInstance2 = \
-                             (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[1].PrivateParams);
+void LVC_MixSoft_2Mc_D16C31_SAT(LVMixer3_2St_FLOAT_st* ptrInstance, const LVM_FLOAT* src1,
+                                const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 NrFrames,
+                                LVM_INT16 NrChannels) {
+    Mix_Private_FLOAT_st* pInstance1 =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[0].PrivateParams);
+    Mix_Private_FLOAT_st* pInstance2 =
+            (Mix_Private_FLOAT_st*)(ptrInstance->MixerStream[1].PrivateParams);
 
-    if (NrFrames <= 0)    return;
+    if (NrFrames <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
     if ((pInstance1->Current == pInstance1->Target) && (pInstance1->Current == 0)) {
-        LVC_MixSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[1]),
-                                    src2, dst, NrFrames, NrChannels);
-    }
-    else if ((pInstance2->Current == pInstance2->Target) && (pInstance2->Current == 0)) {
-        LVC_MixSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[0]),
-                                    src1, dst, NrFrames, NrChannels);
-    }
-    else if ((pInstance1->Current != pInstance1->Target) || \
-                                    (pInstance2->Current != pInstance2->Target))
-    {
-        LVC_MixSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[0]),
-                                   src1, dst, NrFrames, NrChannels);
-        LVC_MixInSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st *)(&ptrInstance->MixerStream[1]),
-                                   src2, dst, NrFrames, NrChannels);
-    }
-    else{
+        LVC_MixSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[1]), src2, dst,
+                                  NrFrames, NrChannels);
+    } else if ((pInstance2->Current == pInstance2->Target) && (pInstance2->Current == 0)) {
+        LVC_MixSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[0]), src1, dst,
+                                  NrFrames, NrChannels);
+    } else if ((pInstance1->Current != pInstance1->Target) ||
+               (pInstance2->Current != pInstance2->Target)) {
+        LVC_MixSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[0]), src1, dst,
+                                  NrFrames, NrChannels);
+        LVC_MixInSoft_Mc_D16C31_SAT((LVMixer3_1St_FLOAT_st*)(&ptrInstance->MixerStream[1]), src2,
+                                    dst, NrFrames, NrChannels);
+    } else {
         /******************************************************************************
            HARD MIXING
         *******************************************************************************/
-        LVC_Core_MixHard_2St_D16C31_SAT(&ptrInstance->MixerStream[0],
-                                        &ptrInstance->MixerStream[1],
+        LVC_Core_MixHard_2St_D16C31_SAT(&ptrInstance->MixerStream[0], &ptrInstance->MixerStream[1],
                                         src1, src2, dst, NrFrames * NrChannels);
     }
 }
-#endif
 
 /**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer.h b/media/libeffects/lvm/lib/Common/src/LVC_Mixer.h
index ce42d2e..1eb2dea 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer.h
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer.h
@@ -25,34 +25,31 @@
 ***********************************************************************************/
 
 /* LVMixer3_st structure stores Instance parameters for one audio stream */
-typedef struct
-{
-    LVM_FLOAT       PrivateParams[3];   /* Private Instance params for \
-                                           Audio Stream shift parameter */
-    LVM_INT16       CallbackSet;        /* Boolean.  Should be set by calling application \
-                                           each time the target value is updated */
-    LVM_INT16       CallbackParam;      /* Parameter that will be used in the calback function */
-    void            *pCallbackHandle;   /* Pointer to the instance of the callback function */
-    void            *pGeneralPurpose;   /* Pointer for general purpose usage */
-    LVM_Callback    pCallBack;          /* Pointer to the callback function */
+typedef struct {
+    LVM_FLOAT PrivateParams[3]; /* Private Instance params for \
+                                   Audio Stream shift parameter */
+    LVM_INT16 CallbackSet;      /* Boolean.  Should be set by calling application \
+                                   each time the target value is updated */
+    LVM_INT16 CallbackParam;    /* Parameter that will be used in the calback function */
+    void* pCallbackHandle;      /* Pointer to the instance of the callback function */
+    void* pGeneralPurpose;      /* Pointer for general purpose usage */
+    LVM_Callback pCallBack;     /* Pointer to the callback function */
 } LVMixer3_FLOAT_st;
-typedef struct
-{
-    LVMixer3_FLOAT_st     MixerStream[1];    /* Instance Params for one Audio Stream */
+typedef struct {
+    LVMixer3_FLOAT_st MixerStream[1]; /* Instance Params for one Audio Stream */
 } LVMixer3_1St_FLOAT_st;
-typedef struct
-{
-    LVMixer3_FLOAT_st     MixerStream[2];    /* Instance Params for two Audio Streams */
+typedef struct {
+    LVMixer3_FLOAT_st MixerStream[2]; /* Instance Params for two Audio Streams */
 } LVMixer3_2St_FLOAT_st;
 /**********************************************************************************
    FUNCTION PROTOTYPES (HIGH LEVEL FUNCTIONS)
 ***********************************************************************************/
 
 /* Function names should be unique within first 16 characters  */
-#define    LVMixer3_MixSoft_1St_D16C31_SAT   LVMixer3_1St_D16C31_SAT_MixSoft
-#define    LVMixer3_MixInSoft_D16C31_SAT     LVMixer3_D16C31_SAT_MixInSoft
-#define    LVMixer3_MixSoft_2St_D16C31_SAT   LVMixer3_2St_D16C31_SAT_MixSoft
-#define    LVMixer3_MixSoft_3St_D16C31_SAT   LVMixer3_3St_D16C31_SAT_MixSoft
+#define LVMixer3_MixSoft_1St_D16C31_SAT LVMixer3_1St_D16C31_SAT_MixSoft
+#define LVMixer3_MixInSoft_D16C31_SAT LVMixer3_D16C31_SAT_MixInSoft
+#define LVMixer3_MixSoft_2St_D16C31_SAT LVMixer3_2St_D16C31_SAT_MixSoft
+#define LVMixer3_MixSoft_3St_D16C31_SAT LVMixer3_3St_D16C31_SAT_MixSoft
 
 /*** General functions ************************************************************/
 
@@ -62,85 +59,49 @@
 /* then the calculation will give an incorrect value for alpha, see the mixer     */
 /* documentation for further details.                                             */
 /* ********************************************************************************/
-void LVC_Mixer_SetTarget( LVMixer3_FLOAT_st *pStream,
-                          LVM_FLOAT        TargetGain);
-LVM_FLOAT LVC_Mixer_GetTarget( LVMixer3_FLOAT_st *pStream);
+void LVC_Mixer_SetTarget(LVMixer3_FLOAT_st* pStream, LVM_FLOAT TargetGain);
+LVM_FLOAT LVC_Mixer_GetTarget(LVMixer3_FLOAT_st* pStream);
 
-LVM_FLOAT LVC_Mixer_GetCurrent( LVMixer3_FLOAT_st *pStream);
+LVM_FLOAT LVC_Mixer_GetCurrent(LVMixer3_FLOAT_st* pStream);
 
-void LVC_Mixer_Init( LVMixer3_FLOAT_st *pStream,
-                     LVM_FLOAT           TargetGain,
-                     LVM_FLOAT           CurrentGain);
+void LVC_Mixer_Init(LVMixer3_FLOAT_st* pStream, LVM_FLOAT TargetGain, LVM_FLOAT CurrentGain);
 
-void LVC_Mixer_SetTimeConstant( LVMixer3_FLOAT_st *pStream,
-                                LVM_INT32           Tc_millisec,
-                                LVM_Fs_en           Fs,
-                                LVM_INT16           NumChannels);
+void LVC_Mixer_SetTimeConstant(LVMixer3_FLOAT_st* pStream, LVM_INT32 Tc_millisec, LVM_Fs_en Fs,
+                               LVM_INT16 NumChannels);
 
-void LVC_Mixer_VarSlope_SetTimeConstant( LVMixer3_FLOAT_st *pStream,
-                                         LVM_INT32           Tc_millisec,
-                                         LVM_Fs_en           Fs,
-                                         LVM_INT16           NumChannels);
+void LVC_Mixer_VarSlope_SetTimeConstant(LVMixer3_FLOAT_st* pStream, LVM_INT32 Tc_millisec,
+                                        LVM_Fs_en Fs, LVM_INT16 NumChannels);
 
 /*** 16 bit functions *************************************************************/
 
-void LVC_MixSoft_1St_D16C31_SAT(LVMixer3_1St_FLOAT_st *pInstance,
-                                const LVM_FLOAT       *src,
-                                      LVM_FLOAT       *dst,
-                                      LVM_INT16       n);
-#ifdef SUPPORT_MC
-void LVC_MixSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st *pInstance,
-                               const LVM_FLOAT       *src,
-                                     LVM_FLOAT       *dst,
-                                     LVM_INT16       NrFrames,
-                                     LVM_INT16       NrChannels);
-#endif
+void LVC_MixSoft_1St_D16C31_SAT(LVMixer3_1St_FLOAT_st* pInstance, const LVM_FLOAT* src,
+                                LVM_FLOAT* dst, LVM_INT16 n);
+void LVC_MixSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st* pInstance, const LVM_FLOAT* src,
+                               LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
 
-void LVC_MixInSoft_D16C31_SAT(LVMixer3_1St_FLOAT_st *pInstance,
-                              const LVM_FLOAT       *src,
-                                    LVM_FLOAT       *dst,
-                                    LVM_INT16       n);
-#ifdef SUPPORT_MC
-void LVC_MixInSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st *pInstance,
-                                 const LVM_FLOAT       *src,
-                                       LVM_FLOAT       *dst,
-                                       LVM_INT16       NrFrames,
-                                       LVM_INT16       NrChannels);
-#endif
+void LVC_MixInSoft_D16C31_SAT(LVMixer3_1St_FLOAT_st* pInstance, const LVM_FLOAT* src,
+                              LVM_FLOAT* dst, LVM_INT16 n);
+void LVC_MixInSoft_Mc_D16C31_SAT(LVMixer3_1St_FLOAT_st* pInstance, const LVM_FLOAT* src,
+                                 LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
 
-void LVC_MixSoft_2St_D16C31_SAT(LVMixer3_2St_FLOAT_st *pInstance,
-                                const LVM_FLOAT       *src1,
-                                const LVM_FLOAT       *src2,
-                                LVM_FLOAT             *dst,  /* dst cannot be equal to src2 */
-                                LVM_INT16             n);
-#ifdef SUPPORT_MC
-void LVC_MixSoft_2Mc_D16C31_SAT(LVMixer3_2St_FLOAT_st *pInstance,
-                                const LVM_FLOAT       *src1,
-                                const LVM_FLOAT       *src2,
-                                LVM_FLOAT             *dst,  /* dst cannot be equal to src2 */
-                                LVM_INT16             NrFrames,
-                                LVM_INT16             NrChannels);
-#endif
+void LVC_MixSoft_2St_D16C31_SAT(LVMixer3_2St_FLOAT_st* pInstance, const LVM_FLOAT* src1,
+                                const LVM_FLOAT* src2,
+                                LVM_FLOAT* dst, /* dst cannot be equal to src2 */
+                                LVM_INT16 n);
+void LVC_MixSoft_2Mc_D16C31_SAT(LVMixer3_2St_FLOAT_st* pInstance, const LVM_FLOAT* src1,
+                                const LVM_FLOAT* src2,
+                                LVM_FLOAT* dst, /* dst cannot be equal to src2 */
+                                LVM_INT16 NrFrames, LVM_INT16 NrChannels);
 /**********************************************************************************/
 /* For applying different gains to Left and right chennals                        */
 /* MixerStream[0] applies to Left channel                                         */
 /* MixerStream[1] applies to Right channel                                        */
 /* Gain values should not be more that 1.0                                        */
 /**********************************************************************************/
-#ifdef SUPPORT_MC
-void LVC_MixSoft_1St_MC_float_SAT(LVMixer3_2St_FLOAT_st *pInstance,
-                                   const   LVM_FLOAT     *src,
-                                   LVM_FLOAT             *dst,   /* dst can be equal to src */
-                                   LVM_INT16             NrFrames,
-                                   LVM_INT32             NrChannels,
-                                   LVM_INT32             ChMask);
-#endif
-void LVC_MixSoft_1St_2i_D16C31_SAT(LVMixer3_2St_FLOAT_st *pInstance,
-                                   const   LVM_FLOAT     *src,
-                                   LVM_FLOAT             *dst,   /* dst can be equal to src */
-                                   LVM_INT16             n);     /* Number of stereo samples */
+void LVC_MixSoft_1St_MC_float_SAT(LVMixer3_2St_FLOAT_st* pInstance, const LVM_FLOAT* src,
+                                  LVM_FLOAT* dst, /* dst can be equal to src */
+                                  LVM_INT16 NrFrames, LVM_INT32 NrChannels, LVM_INT32 ChMask);
 
 /**********************************************************************************/
 
-#endif //#ifndef __LVC_MIXER_H__
-
+#endif  //#ifndef __LVC_MIXER_H__
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetCurrent.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetCurrent.cpp
index d0b50e6..03de8b0 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetCurrent.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetCurrent.cpp
@@ -30,10 +30,9 @@
 /*  CurrentGain      - CurrentGain value in Q 16.15 format              */
 /*                                                                      */
 /************************************************************************/
-LVM_FLOAT LVC_Mixer_GetCurrent( LVMixer3_FLOAT_st *pStream)
-{
-    LVM_FLOAT       CurrentGain;
-    Mix_Private_FLOAT_st  *pInstance = (Mix_Private_FLOAT_st *)pStream->PrivateParams;
+LVM_FLOAT LVC_Mixer_GetCurrent(LVMixer3_FLOAT_st* pStream) {
+    LVM_FLOAT CurrentGain;
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)pStream->PrivateParams;
     CurrentGain = pInstance->Current;  // CurrentGain
     return CurrentGain;
 }
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetTarget.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetTarget.cpp
index 3ae5ba4..21ebac1f 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetTarget.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_GetTarget.cpp
@@ -30,10 +30,9 @@
 /*  TargetGain      - TargetGain value in Q 16.15 format                */
 /*                                                                      */
 /************************************************************************/
-LVM_FLOAT LVC_Mixer_GetTarget( LVMixer3_FLOAT_st *pStream)
-{
-    LVM_FLOAT       TargetGain;
-    Mix_Private_FLOAT_st  *pInstance = (Mix_Private_FLOAT_st *)pStream->PrivateParams;
+LVM_FLOAT LVC_Mixer_GetTarget(LVMixer3_FLOAT_st* pStream) {
+    LVM_FLOAT TargetGain;
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)pStream->PrivateParams;
 
     TargetGain = pInstance->Target;  // TargetGain
     return TargetGain;
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Init.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Init.cpp
index c9fd344..e37f635 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Init.cpp
@@ -44,14 +44,10 @@
 /*  void                                                                */
 /*                                                                      */
 /************************************************************************/
-void LVC_Mixer_Init( LVMixer3_FLOAT_st *pStream,
-                     LVM_FLOAT           TargetGain,
-                     LVM_FLOAT           CurrentGain)
-{
+void LVC_Mixer_Init(LVMixer3_FLOAT_st* pStream, LVM_FLOAT TargetGain, LVM_FLOAT CurrentGain) {
     LVM_FLOAT MaxGain = TargetGain;
-    Mix_Private_FLOAT_st *pInstance = (Mix_Private_FLOAT_st *)pStream->PrivateParams;
-    if(CurrentGain > MaxGain)
-        MaxGain = CurrentGain;
-    pInstance->Target = TargetGain;   // Update fractional gain Target
-    pInstance->Current = CurrentGain; // Update fractional gain Current
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)pStream->PrivateParams;
+    if (CurrentGain > MaxGain) MaxGain = CurrentGain;
+    pInstance->Target = TargetGain;    // Update fractional gain Target
+    pInstance->Current = CurrentGain;  // Update fractional gain Current
 }
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Private.h b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Private.h
index 123d22b..9206fae 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Private.h
@@ -26,54 +26,36 @@
 #include "VectorArithmetic.h"
 
 /* Instance parameter structure */
-typedef struct
-{
+typedef struct {
     /* General */
-    LVM_FLOAT                       Target;           /*number specifying value of Target Gain */
-    LVM_FLOAT                       Current;          /*number specifying value of Current Gain */
-    LVM_FLOAT                       Delta;            /*number specifying value of Delta Gain */
+    LVM_FLOAT Target;  /*number specifying value of Target Gain */
+    LVM_FLOAT Current; /*number specifying value of Current Gain */
+    LVM_FLOAT Delta;   /*number specifying value of Delta Gain */
 } Mix_Private_FLOAT_st;
 
 /**********************************************************************************
    DEFINITIONS
 ***********************************************************************************/
-#define LVCore_MixInSoft_D32C31_SAT    LVCore_InSoft_D32C31_SAT
-#define LVCore_MixSoft_1St_D32C31_WRA  LVCore_Soft_1St_D32C31_WRA
-#define LVCore_MixHard_2St_D32C31_SAT  LVCore_Hard_2St_D32C31_SAT
+#define LVCore_MixInSoft_D32C31_SAT LVCore_InSoft_D32C31_SAT
+#define LVCore_MixSoft_1St_D32C31_WRA LVCore_Soft_1St_D32C31_WRA
+#define LVCore_MixHard_2St_D32C31_SAT LVCore_Hard_2St_D32C31_SAT
 
 /**********************************************************************************
    FUNCTION PROTOTYPES (LOW LEVEL SUBFUNCTIONS)
 ***********************************************************************************/
 
 /*** 16 bit functions *************************************************************/
-void LVC_Core_MixInSoft_D16C31_SAT( LVMixer3_FLOAT_st *ptrInstance,
-                                    const LVM_FLOAT     *src,
-                                    LVM_FLOAT     *dst,
-                                    LVM_INT16     n);
-#ifdef SUPPORT_MC
-void LVC_Core_MixInSoft_Mc_D16C31_SAT(LVMixer3_FLOAT_st *ptrInstance,
-                                    const LVM_FLOAT     *src,
-                                          LVM_FLOAT     *dst,
-                                          LVM_INT16     NrFrames,
-                                          LVM_INT16     NrChannels);
-#endif
-void LVC_Core_MixSoft_1St_D16C31_WRA( LVMixer3_FLOAT_st *ptrInstance,
-                                      const LVM_FLOAT     *src,
-                                      LVM_FLOAT     *dst,
-                                      LVM_INT16     n);
-#ifdef SUPPORT_MC
-void LVC_Core_MixSoft_Mc_D16C31_WRA(LVMixer3_FLOAT_st *ptrInstance,
-                                    const LVM_FLOAT     *src,
-                                          LVM_FLOAT     *dst,
-                                          LVM_INT16     NrFrames,
-                                          LVM_INT16     NrChannels);
-#endif
-void LVC_Core_MixHard_2St_D16C31_SAT( LVMixer3_FLOAT_st *pInstance1,
-                                      LVMixer3_FLOAT_st         *pInstance2,
-                                      const LVM_FLOAT     *src1,
-                                      const LVM_FLOAT     *src2,
-                                      LVM_FLOAT     *dst,
-                                      LVM_INT16     n);
+void LVC_Core_MixInSoft_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                   LVM_FLOAT* dst, LVM_INT16 n);
+void LVC_Core_MixInSoft_Mc_D16C31_SAT(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                      LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
+void LVC_Core_MixSoft_1St_D16C31_WRA(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                     LVM_FLOAT* dst, LVM_INT16 n);
+void LVC_Core_MixSoft_Mc_D16C31_WRA(LVMixer3_FLOAT_st* ptrInstance, const LVM_FLOAT* src,
+                                    LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
+void LVC_Core_MixHard_2St_D16C31_SAT(LVMixer3_FLOAT_st* pInstance1, LVMixer3_FLOAT_st* pInstance2,
+                                     const LVM_FLOAT* src1, const LVM_FLOAT* src2, LVM_FLOAT* dst,
+                                     LVM_INT16 n);
 
 /**********************************************************************************/
 /* For applying different gains to Left and right chennals                        */
@@ -81,18 +63,8 @@
 /* ptrInstance2 applies to Right channel                                          */
 /* Gain values should not be more that 1.0                                        */
 /**********************************************************************************/
-#ifdef SUPPORT_MC
-void LVC_Core_MixSoft_1St_MC_float_WRA(Mix_Private_FLOAT_st **ptrInstance,
-                                         const LVM_FLOAT      *src,
-                                         LVM_FLOAT            *dst,
-                                         LVM_INT16            NrFrames,
-                                         LVM_INT16            NrChannels);
-#endif
-void LVC_Core_MixSoft_1St_2i_D16C31_WRA( LVMixer3_FLOAT_st        *ptrInstance1,
-                                         LVMixer3_FLOAT_st        *ptrInstance2,
-                                         const LVM_FLOAT    *src,
-                                         LVM_FLOAT          *dst,
-                                         LVM_INT16          n);
+void LVC_Core_MixSoft_1St_MC_float_WRA(Mix_Private_FLOAT_st** ptrInstance, const LVM_FLOAT* src,
+                                       LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
 
 /**********************************************************************************/
 /* For applying different gains to Left and right chennals                        */
@@ -100,21 +72,7 @@
 /* ptrInstance2 applies to Right channel                                          */
 /* Gain values should not be more that 1.0                                        */
 /**********************************************************************************/
-#ifdef SUPPORT_MC
-void LVC_Core_MixHard_1St_MC_float_SAT(Mix_Private_FLOAT_st **ptrInstance,
-                                         const LVM_FLOAT      *src,
-                                         LVM_FLOAT            *dst,
-                                         LVM_INT16            NrFrames,
-                                         LVM_INT16            NrChannels);
-#endif
-void LVC_Core_MixHard_1St_2i_D16C31_SAT( LVMixer3_FLOAT_st        *ptrInstance1,
-                                         LVMixer3_FLOAT_st        *ptrInstance2,
-                                         const LVM_FLOAT    *src,
-                                         LVM_FLOAT          *dst,
-                                         LVM_INT16          n);
+void LVC_Core_MixHard_1St_MC_float_SAT(Mix_Private_FLOAT_st** ptrInstance, const LVM_FLOAT* src,
+                                       LVM_FLOAT* dst, LVM_INT16 NrFrames, LVM_INT16 NrChannels);
 
-/*** 32 bit functions *************************************************************/
-/**********************************************************************************/
-
-#endif //#ifndef __LVC_MIXER_PRIVATE_H__
-
+#endif  //#ifndef __LVC_MIXER_PRIVATE_H__
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTarget.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTarget.cpp
index 47b0cec..d8015c4 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTarget.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTarget.cpp
@@ -43,9 +43,7 @@
 /*  void                                                                */
 /*                                                                      */
 /************************************************************************/
-void LVC_Mixer_SetTarget(LVMixer3_FLOAT_st *pStream,
-                         LVM_FLOAT         TargetGain)
-{
-    Mix_Private_FLOAT_st *pInstance = (Mix_Private_FLOAT_st *)pStream->PrivateParams;
-    pInstance->Target = TargetGain;               // Update gain Target
+void LVC_Mixer_SetTarget(LVMixer3_FLOAT_st* pStream, LVM_FLOAT TargetGain) {
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)pStream->PrivateParams;
+    pInstance->Target = TargetGain;  // Update gain Target
 }
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTimeConstant.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTimeConstant.cpp
index 1a8da7a..715b908 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTimeConstant.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_SetTimeConstant.cpp
@@ -44,36 +44,33 @@
 /* RETURNS:                                                             */
 /*  void                                                                */
 /************************************************************************/
-void LVC_Mixer_SetTimeConstant(LVMixer3_FLOAT_st *pStream,
-                               LVM_INT32           Tc_millisec,
-                               LVM_Fs_en           Fs,
-                               LVM_INT16           NumChannels)
-{
-    LVM_FLOAT   DeltaTable[13] = {0.500000f,/*8000*/
-                                  0.362812f,/*11025*/
-                                  0.333333f,/*12000*/
-                                  0.250000f,/*16000*/
-                                  0.181406f,/*22050*/
-                                  0.166666f,/*24000*/
-                                  0.125000f,/*32000*/
-                                  0.090703f,/*44100*/
-                                  0.083333f,/*48000*/
-                                  0.045352f,/*88200*/
-                                  0.041667f,/*96000*/
-                                  0.022676f,/*176400*/
-                                  0.020833f};/*192000*/
+void LVC_Mixer_SetTimeConstant(LVMixer3_FLOAT_st* pStream, LVM_INT32 Tc_millisec, LVM_Fs_en Fs,
+                               LVM_INT16 NumChannels) {
+    LVM_FLOAT DeltaTable[13] = {0.500000f,  /*8000*/
+                                0.362812f,  /*11025*/
+                                0.333333f,  /*12000*/
+                                0.250000f,  /*16000*/
+                                0.181406f,  /*22050*/
+                                0.166666f,  /*24000*/
+                                0.125000f,  /*32000*/
+                                0.090703f,  /*44100*/
+                                0.083333f,  /*48000*/
+                                0.045352f,  /*88200*/
+                                0.041667f,  /*96000*/
+                                0.022676f,  /*176400*/
+                                0.020833f}; /*192000*/
 
-    Mix_Private_FLOAT_st *pInstance = (Mix_Private_FLOAT_st *)pStream->PrivateParams;
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)pStream->PrivateParams;
     LVM_FLOAT Delta = DeltaTable[Fs];
     Delta = Delta / (NumChannels);
 
-    if(Tc_millisec == 0)
+    if (Tc_millisec == 0)
         Delta = 1.000000f;
     else
         Delta = Delta / Tc_millisec;
 
-    if(Delta == 0)
-        Delta = 0.0000000005f;  /* If Time Constant is so large that Delta is 0, \
-                                  assign minimum value to Delta */
+    if (Delta == 0)
+        Delta = 0.0000000005f; /* If Time Constant is so large that Delta is 0, \
+                                 assign minimum value to Delta */
     pInstance->Delta = Delta;  // Delta=(2147483647*4*1000)/(NumChannels*SampleRate*Tc_millisec)
 }
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_VarSlope_SetTimeConstant.cpp b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_VarSlope_SetTimeConstant.cpp
index f335a1e..cf84613 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_VarSlope_SetTimeConstant.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_VarSlope_SetTimeConstant.cpp
@@ -44,57 +44,50 @@
 /* RETURNS:                                                             */
 /*  void                                                                */
 /************************************************************************/
-void LVC_Mixer_VarSlope_SetTimeConstant( LVMixer3_FLOAT_st *pStream,
-                                         LVM_INT32           Tc_millisec,
-                                         LVM_Fs_en           Fs,
-                                         LVM_INT16           NumChannels)
-{
-     LVM_FLOAT   DeltaTable[13] = {0.500000f,/*8000*/
-                                   0.362812f,/*11025*/
-                                   0.333333f,/*12000*/
-                                   0.250000f,/*16000*/
-                                   0.181406f,/*22050*/
-                                   0.166666f,/*24000*/
-                                   0.125000f,/*32000*/
-                                   0.090703f,/*44100*/
-                                   0.083333f,/*48000*/
-                                   0.045352f,/*88200*/
-                                   0.041666f,/*96000*/
-                                   0.022676f,/*176400*/
-                                   0.020833f};/*192000*/
+void LVC_Mixer_VarSlope_SetTimeConstant(LVMixer3_FLOAT_st* pStream, LVM_INT32 Tc_millisec,
+                                        LVM_Fs_en Fs, LVM_INT16 NumChannels) {
+    LVM_FLOAT DeltaTable[13] = {0.500000f,  /*8000*/
+                                0.362812f,  /*11025*/
+                                0.333333f,  /*12000*/
+                                0.250000f,  /*16000*/
+                                0.181406f,  /*22050*/
+                                0.166666f,  /*24000*/
+                                0.125000f,  /*32000*/
+                                0.090703f,  /*44100*/
+                                0.083333f,  /*48000*/
+                                0.045352f,  /*88200*/
+                                0.041666f,  /*96000*/
+                                0.022676f,  /*176400*/
+                                0.020833f}; /*192000*/
     LVM_FLOAT Tc_millisec_float;
-    Mix_Private_FLOAT_st *pInstance = (Mix_Private_FLOAT_st *)pStream->PrivateParams;
+    Mix_Private_FLOAT_st* pInstance = (Mix_Private_FLOAT_st*)pStream->PrivateParams;
     LVM_FLOAT Delta = DeltaTable[Fs];
 
-    LVM_FLOAT   Current;
-    LVM_FLOAT   Target;
+    LVM_FLOAT Current;
+    LVM_FLOAT Target;
 
-    Delta=Delta / (NumChannels);
+    Delta = Delta / (NumChannels);
 
     /*  Get gain values  */
     Current = pInstance->Current;
     Target = pInstance->Target;
 
-    if (Current != Target)
-    {
+    if (Current != Target) {
         Tc_millisec_float = (LVM_FLOAT)(Tc_millisec) / (Current - Target);
-        if (Tc_millisec_float < 0)
-            Tc_millisec_float = -Tc_millisec_float;
+        if (Tc_millisec_float < 0) Tc_millisec_float = -Tc_millisec_float;
 
-        if(Tc_millisec == 0)
+        if (Tc_millisec == 0)
             Delta = 1.000000f;
         else
             Delta = Delta / Tc_millisec_float;
 
-        if(Delta == 0)
+        if (Delta == 0)
             Delta = 0.0000000005f; /* If Time Constant is so large that Delta is 0, \
                                       assign minimum value to Delta */
-    }
-    else
-    {
-        Delta = 0.0000000005f;  /* Minimum value for proper call-backs \
-                             (setting it to zero has some problems, to be corrected) */
+    } else {
+        Delta = 0.0000000005f; /* Minimum value for proper call-backs \
+                            (setting it to zero has some problems, to be corrected) */
     }
 
-    pInstance->Delta = Delta;     // Delta=(2147483647*4*1000)/(NumChannels*SampleRate*Tc_millisec)
+    pInstance->Delta = Delta;  // Delta=(2147483647*4*1000)/(NumChannels*SampleRate*Tc_millisec)
 }
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_FO_HPF.cpp b/media/libeffects/lvm/lib/Common/src/LVM_FO_HPF.cpp
index 2497d29..59095df 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_FO_HPF.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_FO_HPF.cpp
@@ -67,31 +67,19 @@
 /* RETURNS:                                                                */
 /*                                                                         */
 /*-------------------------------------------------------------------------*/
-LVM_FLOAT LVM_FO_HPF(   LVM_FLOAT       w,
-                        FO_FLOAT_Coefs_t  *pCoeffs)
-{
-    LVM_FLOAT Y,Coefficients[13] = {-0.999996f,
-                                    0.999801f,
-                                    -0.497824f,
-                                    0.322937f,
-                                    -0.180880f,
-                                    0.087658f,
-                                    -0.032102f,
-                                    0.008163f,
-                                    -0.001252f,
-                                    0.000089f,
-                                    0,
-                                    0,
-                                    0};
-    Y=LVM_Polynomial((LVM_UINT16)9, Coefficients, w);
+LVM_FLOAT LVM_FO_HPF(LVM_FLOAT w, FO_FLOAT_Coefs_t* pCoeffs) {
+    LVM_FLOAT Y, Coefficients[13] = {-0.999996f, 0.999801f,  -0.497824f, 0.322937f,  -0.180880f,
+                                     0.087658f,  -0.032102f, 0.008163f,  -0.001252f, 0.000089f,
+                                     0,          0,          0};
+    Y = LVM_Polynomial((LVM_UINT16)9, Coefficients, w);
 
-    pCoeffs->B1 = -Y;         /* Store -B1 in filter structure instead of B1!*/
-                            /* A0=(1-B1)/2= B1/2 - 0.5*/
-    Y = Y / 2.0f;                 /* A0=Y=B1/2*/
-    Y = Y - 0.5f;         /* A0=Y=(B1/2 - 0.5)*/
+    pCoeffs->B1 = -Y; /* Store -B1 in filter structure instead of B1!*/
+                      /* A0=(1-B1)/2= B1/2 - 0.5*/
+    Y = Y / 2.0f;     /* A0=Y=B1/2*/
+    Y = Y - 0.5f;     /* A0=Y=(B1/2 - 0.5)*/
 
-    pCoeffs->A0 = Y * FILTER_LOSS_FLOAT;                  /* Apply loss to avoid overflow*/
-    pCoeffs->A1 = -pCoeffs->A0;                           /* Store A1=-A0*/
+    pCoeffs->A0 = Y * FILTER_LOSS_FLOAT; /* Apply loss to avoid overflow*/
+    pCoeffs->A1 = -pCoeffs->A0;          /* Store A1=-A0*/
 
     return 1;
 }
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_FO_LPF.cpp b/media/libeffects/lvm/lib/Common/src/LVM_FO_LPF.cpp
index 7bc6046..91964fb 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_FO_LPF.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_FO_LPF.cpp
@@ -67,25 +67,15 @@
 /* RETURNS:                                                                */
 /*                                                                         */
 /*-------------------------------------------------------------------------*/
-LVM_FLOAT LVM_FO_LPF(   LVM_FLOAT       w,
-                        FO_FLOAT_Coefs_t  *pCoeffs)
-{
-    LVM_FLOAT Y,Coefficients[13] = {-0.999996f,
-                                    0.999801f,
-                                    -0.497824f,
-                                    0.322937f,
-                                    -0.180880f,
-                                    0.087658f,
-                                    -0.032102f,
-                                    0.008163f,
-                                    -0.001252f,
-                                    0.000089f,
-                                    0};
-    Y=LVM_Polynomial((LVM_UINT16)9, Coefficients, w);
-    pCoeffs->B1 = -Y;     // Store -B1 in filter structure instead of B1!
-                        // A0=(1+B1)/2= B1/2 + 0.5
-    Y = Y / 2.0f;             // A0=Y=B1/2
-    Y = Y + 0.5f;     // A0=Y=(B1/2 + 0.5)
+LVM_FLOAT LVM_FO_LPF(LVM_FLOAT w, FO_FLOAT_Coefs_t* pCoeffs) {
+    LVM_FLOAT Y,
+            Coefficients[13] = {-0.999996f, 0.999801f, -0.497824f, 0.322937f, -0.180880f, 0.087658f,
+                                -0.032102f, 0.008163f, -0.001252f, 0.000089f, 0};
+    Y = LVM_Polynomial((LVM_UINT16)9, Coefficients, w);
+    pCoeffs->B1 = -Y;  // Store -B1 in filter structure instead of B1!
+                       // A0=(1+B1)/2= B1/2 + 0.5
+    Y = Y / 2.0f;      // A0=Y=B1/2
+    Y = Y + 0.5f;      // A0=Y=(B1/2 + 0.5)
 
     pCoeffs->A0 = Y * FILTER_LOSS_FLOAT;
     pCoeffs->A1 = pCoeffs->A0;
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_GetOmega.cpp b/media/libeffects/lvm/lib/Common/src/LVM_GetOmega.cpp
index 2a7cca2..5f25677 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_GetOmega.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_GetOmega.cpp
@@ -25,41 +25,32 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#define LVVDL_2PiBy_8000        1727108826  /* In Q41 format */
-#define LVVDL_2PiBy_11025       1253230894  /* In Q41 format */
-#define LVVDL_2PiBy_12000       1151405884  /* In Q41 format */
+#define LVVDL_2PiBy_8000 1727108826  /* In Q41 format */
+#define LVVDL_2PiBy_11025 1253230894 /* In Q41 format */
+#define LVVDL_2PiBy_12000 1151405884 /* In Q41 format */
 
-#define LVVDL_2PiByFs_SHIFT1    12          /* Qformat shift for 8kHz, 11.025kHz and 12kHz i.e. 12=41-29 */
-#define LVVDL_2PiByFs_SHIFT2    13          /* Qformat shift for 16kHz, 22.050kHz and 24kHz i.e. 13=42-29 */
-#define LVVDL_2PiByFs_SHIFT3    14          /* Qformat shift for 32kHz, 44.1kHz and 48kHz i.e. 14=43-29 */
-#define LVVDL_2PiBy_8000_f        0.000785398f
-#define LVVDL_2PiBy_11025_f       0.000569903f
-#define LVVDL_2PiBy_12000_f       0.000523599f
-#define LVVDL_2PiBy_16000_f       0.000392700f
-#define LVVDL_2PiBy_22050_f       0.000284952f
-#define LVVDL_2PiBy_24000_f       0.000261800f
-#define LVVDL_2PiBy_32000_f       0.000196350f
-#define LVVDL_2PiBy_44100_f       0.000142476f
-#define LVVDL_2PiBy_48000_f       0.000130900f
+#define LVVDL_2PiByFs_SHIFT1 12 /* Qformat shift for 8kHz, 11.025kHz and 12kHz i.e. 12=41-29 */
+#define LVVDL_2PiByFs_SHIFT2 13 /* Qformat shift for 16kHz, 22.050kHz and 24kHz i.e. 13=42-29 */
+#define LVVDL_2PiByFs_SHIFT3 14 /* Qformat shift for 32kHz, 44.1kHz and 48kHz i.e. 14=43-29 */
+#define LVVDL_2PiBy_8000_f 0.000785398f
+#define LVVDL_2PiBy_11025_f 0.000569903f
+#define LVVDL_2PiBy_12000_f 0.000523599f
+#define LVVDL_2PiBy_16000_f 0.000392700f
+#define LVVDL_2PiBy_22050_f 0.000284952f
+#define LVVDL_2PiBy_24000_f 0.000261800f
+#define LVVDL_2PiBy_32000_f 0.000196350f
+#define LVVDL_2PiBy_44100_f 0.000142476f
+#define LVVDL_2PiBy_48000_f 0.000130900f
 
-#define LVVDL_2PiBy_88200_f       0.000071238f
-#define LVVDL_2PiBy_96000_f       0.000065450f
-#define LVVDL_2PiBy_176400_f      0.000035619f
-#define LVVDL_2PiBy_192000_f      0.000032725f
-const LVM_FLOAT     LVVDL_2PiOnFsTable[] =  {LVVDL_2PiBy_8000_f,
-                                             LVVDL_2PiBy_11025_f,
-                                             LVVDL_2PiBy_12000_f,
-                                             LVVDL_2PiBy_16000_f,
-                                             LVVDL_2PiBy_22050_f,
-                                             LVVDL_2PiBy_24000_f,
-                                             LVVDL_2PiBy_32000_f,
-                                             LVVDL_2PiBy_44100_f,
-                                             LVVDL_2PiBy_48000_f
-                                            ,LVVDL_2PiBy_88200_f
-                                            ,LVVDL_2PiBy_96000_f
-                                            ,LVVDL_2PiBy_176400_f
-                                            ,LVVDL_2PiBy_192000_f
-                                           };
+#define LVVDL_2PiBy_88200_f 0.000071238f
+#define LVVDL_2PiBy_96000_f 0.000065450f
+#define LVVDL_2PiBy_176400_f 0.000035619f
+#define LVVDL_2PiBy_192000_f 0.000032725f
+const LVM_FLOAT LVVDL_2PiOnFsTable[] = {
+        LVVDL_2PiBy_8000_f,  LVVDL_2PiBy_11025_f, LVVDL_2PiBy_12000_f, LVVDL_2PiBy_16000_f,
+        LVVDL_2PiBy_22050_f, LVVDL_2PiBy_24000_f, LVVDL_2PiBy_32000_f, LVVDL_2PiBy_44100_f,
+        LVVDL_2PiBy_48000_f, LVVDL_2PiBy_88200_f, LVVDL_2PiBy_96000_f, LVVDL_2PiBy_176400_f,
+        LVVDL_2PiBy_192000_f};
 /*-------------------------------------------------------------------------*/
 /* FUNCTION:                                                               */
 /*   LVM_GetOmega                                                          */
@@ -77,10 +68,8 @@
 /* RETURNS:                                                                */
 /*   w=2*pi*Fc/Fs in Q2.29 format                                          */
 /*-------------------------------------------------------------------------*/
-LVM_FLOAT LVM_GetOmega(LVM_UINT32                  Fc,
-                       LVM_Fs_en                   Fs)
-{
-    LVM_FLOAT   w;
+LVM_FLOAT LVM_GetOmega(LVM_UINT32 Fc, LVM_Fs_en Fs) {
+    LVM_FLOAT w;
     w = (LVM_FLOAT)Fc * LVVDL_2PiOnFsTable[Fs];
     return w;
 }
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Mixer_FilterCoeffs.h b/media/libeffects/lvm/lib/Common/src/LVM_Mixer_FilterCoeffs.h
index 244f09d..31dcaa4 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Mixer_FilterCoeffs.h
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Mixer_FilterCoeffs.h
@@ -33,109 +33,109 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#define Alpha_TableSize                        50      /* The number of table entires */
-#define ALPHA_0                        2147480769      /* Floating point Alpha = 0.999999 */
-#define ALPHA_1                        2147479577      /* Floating point Alpha = 0.999998 */
-#define ALPHA_2                        2147477892      /* Floating point Alpha = 0.999997 */
-#define ALPHA_3                        2147475510      /* Floating point Alpha = 0.999996 */
-#define ALPHA_4                        2147472141      /* Floating point Alpha = 0.999995 */
-#define ALPHA_5                        2147467377      /* Floating point Alpha = 0.999992 */
-#define ALPHA_6                        2147460642      /* Floating point Alpha = 0.999989 */
-#define ALPHA_7                        2147451118      /* Floating point Alpha = 0.999985 */
-#define ALPHA_8                        2147437651      /* Floating point Alpha = 0.999979 */
-#define ALPHA_9                        2147418608      /* Floating point Alpha = 0.999970 */
-#define ALPHA_10                       2147391683      /* Floating point Alpha = 0.999957 */
-#define ALPHA_11                       2147353611      /* Floating point Alpha = 0.999939 */
-#define ALPHA_12                       2147299779      /* Floating point Alpha = 0.999914 */
-#define ALPHA_13                       2147223662      /* Floating point Alpha = 0.999879 */
-#define ALPHA_14                       2147116037      /* Floating point Alpha = 0.999829 */
-#define ALPHA_15                       2146963865      /* Floating point Alpha = 0.999758 */
-#define ALPHA_16                       2146748712      /* Floating point Alpha = 0.999658 */
-#define ALPHA_17                       2146444522      /* Floating point Alpha = 0.999516 */
-#define ALPHA_18                       2146014472      /* Floating point Alpha = 0.999316 */
-#define ALPHA_19                       2145406527      /* Floating point Alpha = 0.999033 */
-#define ALPHA_20                       2144547188      /* Floating point Alpha = 0.998633 */
-#define ALPHA_21                       2143332669      /* Floating point Alpha = 0.998067 */
-#define ALPHA_22                       2141616514      /* Floating point Alpha = 0.997268 */
-#define ALPHA_23                       2139192215      /* Floating point Alpha = 0.996139 */
-#define ALPHA_24                       2135768939      /* Floating point Alpha = 0.994545 */
-#define ALPHA_25                       2130937774      /* Floating point Alpha = 0.992295 */
-#define ALPHA_26                       2124125153      /* Floating point Alpha = 0.989123 */
-#define ALPHA_27                       2114529263      /* Floating point Alpha = 0.984654 */
-#define ALPHA_28                       2101034612      /* Floating point Alpha = 0.978370 */
-#define ALPHA_29                       2082100030      /* Floating point Alpha = 0.969553 */
-#define ALPHA_30                       2055617398      /* Floating point Alpha = 0.957221 */
-#define ALPHA_31                       2018744824      /* Floating point Alpha = 0.940051 */
-#define ALPHA_32                       1967733015      /* Floating point Alpha = 0.916297 */
-#define ALPHA_33                       1897794587      /* Floating point Alpha = 0.883729 */
-#define ALPHA_34                       1803123234      /* Floating point Alpha = 0.839645 */
-#define ALPHA_35                       1677262220      /* Floating point Alpha = 0.781036 */
-#define ALPHA_36                       1514142675      /* Floating point Alpha = 0.705078 */
-#define ALPHA_37                       1310197875      /* Floating point Alpha = 0.610108 */
-#define ALPHA_38                       1067813480      /* Floating point Alpha = 0.497239 */
-#define ALPHA_39                        799601371      /* Floating point Alpha = 0.372343 */
-#define ALPHA_40                        531183049      /* Floating point Alpha = 0.247351 */
-#define ALPHA_41                        297904007      /* Floating point Alpha = 0.138722 */
-#define ALPHA_42                        131499768      /* Floating point Alpha = 0.061234 */
-#define ALPHA_43                         41375282      /* Floating point Alpha = 0.019267 */
-#define ALPHA_44                          8065899      /* Floating point Alpha = 0.003756 */
-#define ALPHA_45                           799076      /* Floating point Alpha = 0.000372 */
-#define ALPHA_46                            30398      /* Floating point Alpha = 0.000014 */
-#define ALPHA_47                              299      /* Floating point Alpha = 0.000000 */
-#define ALPHA_48                                0      /* Floating point Alpha = 0.000000 */
-#define ALPHA_49                                0      /* Floating point Alpha = 0.000000 */
-#define ALPHA_50                                0      /* Floating point Alpha = 0.000000 */
+#define Alpha_TableSize 50  /* The number of table entires */
+#define ALPHA_0 2147480769  /* Floating point Alpha = 0.999999 */
+#define ALPHA_1 2147479577  /* Floating point Alpha = 0.999998 */
+#define ALPHA_2 2147477892  /* Floating point Alpha = 0.999997 */
+#define ALPHA_3 2147475510  /* Floating point Alpha = 0.999996 */
+#define ALPHA_4 2147472141  /* Floating point Alpha = 0.999995 */
+#define ALPHA_5 2147467377  /* Floating point Alpha = 0.999992 */
+#define ALPHA_6 2147460642  /* Floating point Alpha = 0.999989 */
+#define ALPHA_7 2147451118  /* Floating point Alpha = 0.999985 */
+#define ALPHA_8 2147437651  /* Floating point Alpha = 0.999979 */
+#define ALPHA_9 2147418608  /* Floating point Alpha = 0.999970 */
+#define ALPHA_10 2147391683 /* Floating point Alpha = 0.999957 */
+#define ALPHA_11 2147353611 /* Floating point Alpha = 0.999939 */
+#define ALPHA_12 2147299779 /* Floating point Alpha = 0.999914 */
+#define ALPHA_13 2147223662 /* Floating point Alpha = 0.999879 */
+#define ALPHA_14 2147116037 /* Floating point Alpha = 0.999829 */
+#define ALPHA_15 2146963865 /* Floating point Alpha = 0.999758 */
+#define ALPHA_16 2146748712 /* Floating point Alpha = 0.999658 */
+#define ALPHA_17 2146444522 /* Floating point Alpha = 0.999516 */
+#define ALPHA_18 2146014472 /* Floating point Alpha = 0.999316 */
+#define ALPHA_19 2145406527 /* Floating point Alpha = 0.999033 */
+#define ALPHA_20 2144547188 /* Floating point Alpha = 0.998633 */
+#define ALPHA_21 2143332669 /* Floating point Alpha = 0.998067 */
+#define ALPHA_22 2141616514 /* Floating point Alpha = 0.997268 */
+#define ALPHA_23 2139192215 /* Floating point Alpha = 0.996139 */
+#define ALPHA_24 2135768939 /* Floating point Alpha = 0.994545 */
+#define ALPHA_25 2130937774 /* Floating point Alpha = 0.992295 */
+#define ALPHA_26 2124125153 /* Floating point Alpha = 0.989123 */
+#define ALPHA_27 2114529263 /* Floating point Alpha = 0.984654 */
+#define ALPHA_28 2101034612 /* Floating point Alpha = 0.978370 */
+#define ALPHA_29 2082100030 /* Floating point Alpha = 0.969553 */
+#define ALPHA_30 2055617398 /* Floating point Alpha = 0.957221 */
+#define ALPHA_31 2018744824 /* Floating point Alpha = 0.940051 */
+#define ALPHA_32 1967733015 /* Floating point Alpha = 0.916297 */
+#define ALPHA_33 1897794587 /* Floating point Alpha = 0.883729 */
+#define ALPHA_34 1803123234 /* Floating point Alpha = 0.839645 */
+#define ALPHA_35 1677262220 /* Floating point Alpha = 0.781036 */
+#define ALPHA_36 1514142675 /* Floating point Alpha = 0.705078 */
+#define ALPHA_37 1310197875 /* Floating point Alpha = 0.610108 */
+#define ALPHA_38 1067813480 /* Floating point Alpha = 0.497239 */
+#define ALPHA_39 799601371  /* Floating point Alpha = 0.372343 */
+#define ALPHA_40 531183049  /* Floating point Alpha = 0.247351 */
+#define ALPHA_41 297904007  /* Floating point Alpha = 0.138722 */
+#define ALPHA_42 131499768  /* Floating point Alpha = 0.061234 */
+#define ALPHA_43 41375282   /* Floating point Alpha = 0.019267 */
+#define ALPHA_44 8065899    /* Floating point Alpha = 0.003756 */
+#define ALPHA_45 799076     /* Floating point Alpha = 0.000372 */
+#define ALPHA_46 30398      /* Floating point Alpha = 0.000014 */
+#define ALPHA_47 299        /* Floating point Alpha = 0.000000 */
+#define ALPHA_48 0          /* Floating point Alpha = 0.000000 */
+#define ALPHA_49 0          /* Floating point Alpha = 0.000000 */
+#define ALPHA_50 0          /* Floating point Alpha = 0.000000 */
 
-#define ALPHA_Float_0                        0.999999f
-#define ALPHA_Float_1                        0.999998f
-#define ALPHA_Float_2                        0.999997f
-#define ALPHA_Float_3                        0.999996f
-#define ALPHA_Float_4                        0.999995f
-#define ALPHA_Float_5                        0.999992f
-#define ALPHA_Float_6                        0.999989f
-#define ALPHA_Float_7                        0.999985f
-#define ALPHA_Float_8                        0.999979f
-#define ALPHA_Float_9                        0.999970f
-#define ALPHA_Float_10                       0.999957f
-#define ALPHA_Float_11                       0.999939f
-#define ALPHA_Float_12                       0.999914f
-#define ALPHA_Float_13                       0.999879f
-#define ALPHA_Float_14                       0.999829f
-#define ALPHA_Float_15                       0.999758f
-#define ALPHA_Float_16                       0.999658f
-#define ALPHA_Float_17                       0.999516f
-#define ALPHA_Float_18                       0.999316f
-#define ALPHA_Float_19                       0.999033f
-#define ALPHA_Float_20                       0.998633f
-#define ALPHA_Float_21                       0.998067f
-#define ALPHA_Float_22                       0.997268f
-#define ALPHA_Float_23                       0.996139f
-#define ALPHA_Float_24                       0.994545f
-#define ALPHA_Float_25                       0.992295f
-#define ALPHA_Float_26                       0.989123f
-#define ALPHA_Float_27                       0.984654f
-#define ALPHA_Float_28                       0.978370f
-#define ALPHA_Float_29                       0.969553f
-#define ALPHA_Float_30                       0.957221f
-#define ALPHA_Float_31                       0.940051f
-#define ALPHA_Float_32                       0.916297f
-#define ALPHA_Float_33                       0.883729f
-#define ALPHA_Float_34                       0.839645f
-#define ALPHA_Float_35                       0.781036f
-#define ALPHA_Float_36                       0.705078f
-#define ALPHA_Float_37                       0.610108f
-#define ALPHA_Float_38                       0.497239f
-#define ALPHA_Float_39                       0.372343f
-#define ALPHA_Float_40                       0.247351f
-#define ALPHA_Float_41                       0.138722f
-#define ALPHA_Float_42                       0.061234f
-#define ALPHA_Float_43                       0.019267f
-#define ALPHA_Float_44                       0.003756f
-#define ALPHA_Float_45                       0.000372f
-#define ALPHA_Float_46                       0.000014f
-#define ALPHA_Float_47                       0.000000f
-#define ALPHA_Float_48                       0.000000f
-#define ALPHA_Float_49                       0.000000f
-#define ALPHA_Float_50                       0.000000f
+#define ALPHA_Float_0 0.999999f
+#define ALPHA_Float_1 0.999998f
+#define ALPHA_Float_2 0.999997f
+#define ALPHA_Float_3 0.999996f
+#define ALPHA_Float_4 0.999995f
+#define ALPHA_Float_5 0.999992f
+#define ALPHA_Float_6 0.999989f
+#define ALPHA_Float_7 0.999985f
+#define ALPHA_Float_8 0.999979f
+#define ALPHA_Float_9 0.999970f
+#define ALPHA_Float_10 0.999957f
+#define ALPHA_Float_11 0.999939f
+#define ALPHA_Float_12 0.999914f
+#define ALPHA_Float_13 0.999879f
+#define ALPHA_Float_14 0.999829f
+#define ALPHA_Float_15 0.999758f
+#define ALPHA_Float_16 0.999658f
+#define ALPHA_Float_17 0.999516f
+#define ALPHA_Float_18 0.999316f
+#define ALPHA_Float_19 0.999033f
+#define ALPHA_Float_20 0.998633f
+#define ALPHA_Float_21 0.998067f
+#define ALPHA_Float_22 0.997268f
+#define ALPHA_Float_23 0.996139f
+#define ALPHA_Float_24 0.994545f
+#define ALPHA_Float_25 0.992295f
+#define ALPHA_Float_26 0.989123f
+#define ALPHA_Float_27 0.984654f
+#define ALPHA_Float_28 0.978370f
+#define ALPHA_Float_29 0.969553f
+#define ALPHA_Float_30 0.957221f
+#define ALPHA_Float_31 0.940051f
+#define ALPHA_Float_32 0.916297f
+#define ALPHA_Float_33 0.883729f
+#define ALPHA_Float_34 0.839645f
+#define ALPHA_Float_35 0.781036f
+#define ALPHA_Float_36 0.705078f
+#define ALPHA_Float_37 0.610108f
+#define ALPHA_Float_38 0.497239f
+#define ALPHA_Float_39 0.372343f
+#define ALPHA_Float_40 0.247351f
+#define ALPHA_Float_41 0.138722f
+#define ALPHA_Float_42 0.061234f
+#define ALPHA_Float_43 0.019267f
+#define ALPHA_Float_44 0.003756f
+#define ALPHA_Float_45 0.000372f
+#define ALPHA_Float_46 0.000014f
+#define ALPHA_Float_47 0.000000f
+#define ALPHA_Float_48 0.000000f
+#define ALPHA_Float_49 0.000000f
+#define ALPHA_Float_50 0.000000f
 
 #endif
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Mixer_TimeConstant.cpp b/media/libeffects/lvm/lib/Common/src/LVM_Mixer_TimeConstant.cpp
index 73da2cf..3ec103a 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Mixer_TimeConstant.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Mixer_TimeConstant.cpp
@@ -56,83 +56,37 @@
 /*  Alpha   - the filter coefficient Q31 format                         */
 /*                                                                      */
 /************************************************************************/
-LVM_FLOAT LVM_Mixer_TimeConstant(LVM_UINT32   tc,
-                                  LVM_UINT32   Fs,
-                                  LVM_UINT16   NumChannels)
-{
-
-    LVM_UINT32  Product;
-    LVM_FLOAT  ProductFloat;
-    LVM_INT16   InterpolateShort;
-    LVM_FLOAT   Interpolate;
-    LVM_UINT16  Shift;
-    LVM_FLOAT   Diff;
-    LVM_FLOAT  Table[] = {ALPHA_Float_0,             /* Log spaced look-up table */
-                          ALPHA_Float_1,
-                          ALPHA_Float_2,
-                          ALPHA_Float_3,
-                          ALPHA_Float_4,
-                          ALPHA_Float_5,
-                          ALPHA_Float_6,
-                          ALPHA_Float_7,
-                          ALPHA_Float_8,
-                          ALPHA_Float_9,
-                          ALPHA_Float_10,
-                          ALPHA_Float_11,
-                          ALPHA_Float_12,
-                          ALPHA_Float_13,
-                          ALPHA_Float_14,
-                          ALPHA_Float_15,
-                          ALPHA_Float_16,
-                          ALPHA_Float_17,
-                          ALPHA_Float_18,
-                          ALPHA_Float_19,
-                          ALPHA_Float_20,
-                          ALPHA_Float_21,
-                          ALPHA_Float_22,
-                          ALPHA_Float_23,
-                          ALPHA_Float_24,
-                          ALPHA_Float_25,
-                          ALPHA_Float_26,
-                          ALPHA_Float_27,
-                          ALPHA_Float_28,
-                          ALPHA_Float_29,
-                          ALPHA_Float_30,
-                          ALPHA_Float_31,
-                          ALPHA_Float_32,
-                          ALPHA_Float_33,
-                          ALPHA_Float_34,
-                          ALPHA_Float_35,
-                          ALPHA_Float_36,
-                          ALPHA_Float_37,
-                          ALPHA_Float_38,
-                          ALPHA_Float_39,
-                          ALPHA_Float_40,
-                          ALPHA_Float_41,
-                          ALPHA_Float_42,
-                          ALPHA_Float_43,
-                          ALPHA_Float_44,
-                          ALPHA_Float_45,
-                          ALPHA_Float_46,
-                          ALPHA_Float_47,
-                          ALPHA_Float_48,
-                          ALPHA_Float_49,
-                          ALPHA_Float_50};
+LVM_FLOAT LVM_Mixer_TimeConstant(LVM_UINT32 tc, LVM_UINT32 Fs, LVM_UINT16 NumChannels) {
+    LVM_UINT32 Product;
+    LVM_FLOAT ProductFloat;
+    LVM_INT16 InterpolateShort;
+    LVM_FLOAT Interpolate;
+    LVM_UINT16 Shift;
+    LVM_FLOAT Diff;
+    LVM_FLOAT Table[] = {
+            ALPHA_Float_0, /* Log spaced look-up table */
+            ALPHA_Float_1,  ALPHA_Float_2,  ALPHA_Float_3,  ALPHA_Float_4,  ALPHA_Float_5,
+            ALPHA_Float_6,  ALPHA_Float_7,  ALPHA_Float_8,  ALPHA_Float_9,  ALPHA_Float_10,
+            ALPHA_Float_11, ALPHA_Float_12, ALPHA_Float_13, ALPHA_Float_14, ALPHA_Float_15,
+            ALPHA_Float_16, ALPHA_Float_17, ALPHA_Float_18, ALPHA_Float_19, ALPHA_Float_20,
+            ALPHA_Float_21, ALPHA_Float_22, ALPHA_Float_23, ALPHA_Float_24, ALPHA_Float_25,
+            ALPHA_Float_26, ALPHA_Float_27, ALPHA_Float_28, ALPHA_Float_29, ALPHA_Float_30,
+            ALPHA_Float_31, ALPHA_Float_32, ALPHA_Float_33, ALPHA_Float_34, ALPHA_Float_35,
+            ALPHA_Float_36, ALPHA_Float_37, ALPHA_Float_38, ALPHA_Float_39, ALPHA_Float_40,
+            ALPHA_Float_41, ALPHA_Float_42, ALPHA_Float_43, ALPHA_Float_44, ALPHA_Float_45,
+            ALPHA_Float_46, ALPHA_Float_47, ALPHA_Float_48, ALPHA_Float_49, ALPHA_Float_50};
 
     /* Calculate the product of the time constant and the sample rate */
-    Product = ((tc >> 16) * (LVM_UINT32)Fs) << 13;  /* Stereo value */
+    Product = ((tc >> 16) * (LVM_UINT32)Fs) << 13; /* Stereo value */
     Product = Product + (((tc & 0x0000FFFF) * (LVM_UINT32)Fs) >> 3);
 
-    if (NumChannels == 1)
-    {
-        Product = Product >> 1;   /* Mono value */
+    if (NumChannels == 1) {
+        Product = Product >> 1; /* Mono value */
     }
 
     /* Normalize to get the table index and interpolation factor */
-    for (Shift = 0; Shift < ((Alpha_TableSize - 1) / 2); Shift++)
-    {
-        if ((Product & 0x80000000) != 0)
-        {
+    for (Shift = 0; Shift < ((Alpha_TableSize - 1) / 2); Shift++) {
+        if ((Product & 0x80000000) != 0) {
             break;
         }
 
@@ -140,8 +94,7 @@
     }
     Shift = (LVM_UINT16)((Shift << 1));
 
-    if ((Product & 0x40000000)==0)
-    {
+    if ((Product & 0x40000000) == 0) {
         Shift++;
     }
 
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Polynomial.cpp b/media/libeffects/lvm/lib/Common/src/LVM_Polynomial.cpp
index 2c3e9ec..8382529 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Polynomial.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Polynomial.cpp
@@ -40,33 +40,25 @@
 /* RETURNS:                                                                */
 /*   The result of the polynomial expansion in Q1.31 format                */
 /*-------------------------------------------------------------------------*/
-LVM_FLOAT LVM_Polynomial(LVM_UINT16    N,
-                         LVM_FLOAT    *pCoefficients,
-                         LVM_FLOAT    X)
-{
+LVM_FLOAT LVM_Polynomial(LVM_UINT16 N, LVM_FLOAT* pCoefficients, LVM_FLOAT X) {
     LVM_INT32 i;
-    LVM_FLOAT Y,A,XTemp,Temp,sign;
+    LVM_FLOAT Y, A, XTemp, Temp, sign;
 
     Y = *pCoefficients; /* Y=A0*/
     pCoefficients++;
 
-    if(X == -1.0f)
-    {
+    if (X == -1.0f) {
         Temp = -1;
         sign = Temp;
-        for(i = 1; i <= N; i++)
-        {
+        for (i = 1; i <= N; i++) {
             Y += ((*pCoefficients) * sign);
             pCoefficients++;
             sign *= Temp;
         }
 
-    }
-    else
-    {
+    } else {
         XTemp = X;
-        for(i = N-1; i >= 0; i--)
-        {
+        for (i = N - 1; i >= 0; i--) {
             A = *pCoefficients;
             pCoefficients++;
 
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Power10.cpp b/media/libeffects/lvm/lib/Common/src/LVM_Power10.cpp
index ae8e9d1..85596aa 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Power10.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Power10.cpp
@@ -53,23 +53,10 @@
 /* RETURNS:                                                                */
 /*   The result of the 10x expansion in Q8.24 format                       */
 /*-------------------------------------------------------------------------*/
-LVM_FLOAT LVM_Power10(LVM_FLOAT     X)
-{
-    LVM_FLOAT Y,Coefficients[13]={0.999906f,
-                                  2.302475f,
-                                  2.652765f,
-                                  2.035494f,
-                                  1.165667f,
-                                  0.537676f,
-                                  0.213192f,
-                                  0.069603f,
-                                  0.016553f,
-                                  0.004373f,
-                                  0.001817f,
-                                  0.000367f,
-                                  0};
-    Y=LVM_Polynomial((LVM_UINT16)11,
-                     Coefficients,
-                     X);
+LVM_FLOAT LVM_Power10(LVM_FLOAT X) {
+    LVM_FLOAT Y, Coefficients[13] = {0.999906f, 2.302475f, 2.652765f, 2.035494f, 1.165667f,
+                                     0.537676f, 0.213192f, 0.069603f, 0.016553f, 0.004373f,
+                                     0.001817f, 0.000367f, 0};
+    Y = LVM_Polynomial((LVM_UINT16)11, Coefficients, X);
     return Y;
 }
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Timer.cpp b/media/libeffects/lvm/lib/Common/src/LVM_Timer.cpp
index 5995f54..be7c8e4 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Timer.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Timer.cpp
@@ -26,19 +26,16 @@
 /*  TIMER FUNCTION                                                                      */
 /****************************************************************************************/
 
-void LVM_Timer      (   LVM_Timer_Instance_t       *pInstance,
-                        LVM_INT16                  BlockSize ){
+void LVM_Timer(LVM_Timer_Instance_t* pInstance, LVM_INT16 BlockSize) {
+    LVM_Timer_Instance_Private_t* pInstancePr;
+    pInstancePr = (LVM_Timer_Instance_Private_t*)pInstance;
 
-    LVM_Timer_Instance_Private_t *pInstancePr;
-    pInstancePr = (LVM_Timer_Instance_Private_t *)pInstance;
-
-    if (pInstancePr->TimerArmed){
+    if (pInstancePr->TimerArmed) {
         pInstancePr->RemainingTimeInSamples -= BlockSize;
-        if (pInstancePr->RemainingTimeInSamples <= 0){
+        if (pInstancePr->RemainingTimeInSamples <= 0) {
             pInstancePr->TimerArmed = 0;
-            (*pInstancePr->pCallBack) ( pInstancePr->pCallbackInstance,
-                                        pInstancePr->pCallBackParams,
-                                        pInstancePr->CallBackParam );
+            (*pInstancePr->pCallBack)(pInstancePr->pCallbackInstance, pInstancePr->pCallBackParams,
+                                      pInstancePr->CallBackParam);
         }
     }
 }
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Timer_Init.cpp b/media/libeffects/lvm/lib/Common/src/LVM_Timer_Init.cpp
index 3015057..bfd6bcf 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Timer_Init.cpp
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Timer_Init.cpp
@@ -33,20 +33,20 @@
 /*  INIT FUNCTION                                                                       */
 /****************************************************************************************/
 
-void LVM_Timer_Init (   LVM_Timer_Instance_t       *pInstance,
-                        LVM_Timer_Params_t         *pParams     ){
+void LVM_Timer_Init(LVM_Timer_Instance_t* pInstance, LVM_Timer_Params_t* pParams) {
+    LVM_Timer_Instance_Private_t* pInstancePr;
+    pInstancePr = (LVM_Timer_Instance_Private_t*)pInstance;
 
-    LVM_Timer_Instance_Private_t *pInstancePr;
-    pInstancePr = (LVM_Timer_Instance_Private_t *)pInstance;
-
-    pInstancePr->CallBackParam     = pParams->CallBackParam;
-    pInstancePr->pCallBackParams   = (LVM_INT32 *)pParams->pCallBackParams;
+    pInstancePr->CallBackParam = pParams->CallBackParam;
+    pInstancePr->pCallBackParams = (LVM_INT32*)pParams->pCallBackParams;
     pInstancePr->pCallbackInstance = pParams->pCallbackInstance;
-    pInstancePr->pCallBack         = pParams->pCallBack;
-    pInstancePr->TimerArmed        = 1;
+    pInstancePr->pCallBack = pParams->pCallBack;
+    pInstancePr->TimerArmed = 1;
 
-    MUL32x16INTO32(pParams->SamplingRate,OneOverThousandInQ24,pInstancePr->RemainingTimeInSamples,16);  /* (Q0 * Q24) >>16 into Q8*/
-    MUL32x16INTO32(pInstancePr->RemainingTimeInSamples,pParams->TimeInMs,pInstancePr->RemainingTimeInSamples,8);  /* (Q8 * Q0) >>8 into Q0*/
+    MUL32x16INTO32(pParams->SamplingRate, OneOverThousandInQ24, pInstancePr->RemainingTimeInSamples,
+                   16); /* (Q0 * Q24) >>16 into Q8*/
+    MUL32x16INTO32(pInstancePr->RemainingTimeInSamples, pParams->TimeInMs,
+                   pInstancePr->RemainingTimeInSamples, 8); /* (Q8 * Q0) >>8 into Q0*/
 }
 
 /****************************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LVM_Timer_Private.h b/media/libeffects/lvm/lib/Common/src/LVM_Timer_Private.h
index a372b82..3e8aba8 100644
--- a/media/libeffects/lvm/lib/Common/src/LVM_Timer_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/LVM_Timer_Private.h
@@ -24,14 +24,13 @@
 /*  TYPE DEFINITIONS                                                                    */
 /****************************************************************************************/
 
-typedef struct
-{
-    LVM_INT32  RemainingTimeInSamples;
-    LVM_INT32  CallBackParam;
-    LVM_INT32  *pCallBackParams;
-    void  *pCallbackInstance;
-    void  (*pCallBack)(void*,void*,LVM_INT32);
-    LVM_INT16 TimerArmed;                        /* Boolean, true between init and callback */
+typedef struct {
+    LVM_INT32 RemainingTimeInSamples;
+    LVM_INT32 CallBackParam;
+    LVM_INT32* pCallBackParams;
+    void* pCallbackInstance;
+    void (*pCallBack)(void*, void*, LVM_INT32);
+    LVM_INT16 TimerArmed; /* Boolean, true between init and callback */
 
 } LVM_Timer_Instance_Private_t;
 
@@ -39,4 +38,4 @@
 /*  END OF HEADER                                                                       */
 /****************************************************************************************/
 
-#endif  /* LVM_TIMER_PRIVATE_H */
+#endif /* LVM_TIMER_PRIVATE_H */
diff --git a/media/libeffects/lvm/lib/Common/src/LoadConst_16.cpp b/media/libeffects/lvm/lib/Common/src/LoadConst_16.cpp
deleted file mode 100644
index f88ca0e..0000000
--- a/media/libeffects/lvm/lib/Common/src/LoadConst_16.cpp
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**********************************************************************************
-   INCLUDE FILES
-***********************************************************************************/
-
-#include "VectorArithmetic.h"
-
-/**********************************************************************************
-   FUNCTION LoadConst_16
-***********************************************************************************/
-
-void LoadConst_16(const LVM_INT16 val,
-                  LVM_INT16 *dst,
-                  LVM_INT16 n )
-{
-    LVM_INT16 ii;
-
-    for (ii = n; ii != 0; ii--)
-    {
-        *dst = val;
-        dst++;
-    }
-
-    return;
-}
-
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp b/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp
deleted file mode 100644
index c789756..0000000
--- a/media/libeffects/lvm/lib/Common/src/LoadConst_32.cpp
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**********************************************************************************
-   INCLUDE FILES
-***********************************************************************************/
-
-#include "VectorArithmetic.h"
-
-/**********************************************************************************
-   FUNCTION LoadConst_32
-***********************************************************************************/
-void LoadConst_Float(const LVM_FLOAT   val,
-                     LVM_FLOAT  *dst,
-                     LVM_INT16 n )
-{
-    LVM_INT16 ii;
-
-    for (ii = n; ii != 0; ii--)
-    {
-        *dst = val;
-        dst++;
-    }
-
-    return;
-}
-
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/MSTo2i_Sat_16x16.cpp b/media/libeffects/lvm/lib/Common/src/MSTo2i_Sat_16x16.cpp
index 1ea765a..e5c6f15 100644
--- a/media/libeffects/lvm/lib/Common/src/MSTo2i_Sat_16x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MSTo2i_Sat_16x16.cpp
@@ -18,74 +18,14 @@
 /**********************************************************************************
    INCLUDE FILES
 ***********************************************************************************/
-
+#include "ScalarArithmetic.h"
 #include "VectorArithmetic.h"
 
-/**********************************************************************************
-   FUNCTION  MSTO2I_SAT_16X16
-***********************************************************************************/
-
-void MSTo2i_Sat_16x16(const LVM_INT16  *srcM,
-                      const LVM_INT16  *srcS,
-                      LVM_INT16  *dst,
-                      LVM_INT16  n )
-{
-    LVM_INT32 temp,mVal,sVal;
+void MSTo2i_Sat_Float(const LVM_FLOAT* srcM, const LVM_FLOAT* srcS, LVM_FLOAT* dst, LVM_INT16 n) {
+    LVM_FLOAT temp, mVal, sVal;
     LVM_INT16 ii;
 
-    for (ii = n; ii != 0; ii--)
-    {
-        mVal=(LVM_INT32)*srcM;
-        srcM++;
-
-        sVal=(LVM_INT32)*srcS;
-        srcS++;
-
-        temp = mVal + sVal;
-
-        if (temp > 0x00007FFF)
-        {
-            *dst = 0x7FFF;
-        }
-        else if (temp < -0x00008000)
-        {
-            *dst = - 0x8000;
-        }
-        else
-        {
-            *dst = (LVM_INT16)temp;
-        }
-        dst++;
-
-        temp = mVal - sVal;
-
-        if (temp > 0x00007FFF)
-        {
-            *dst = 0x7FFF;
-        }
-        else if (temp < -0x00008000)
-        {
-            *dst = - 0x8000;
-        }
-        else
-        {
-            *dst = (LVM_INT16)temp;
-        }
-        dst++;
-    }
-
-    return;
-}
-void MSTo2i_Sat_Float(const LVM_FLOAT  *srcM,
-                      const LVM_FLOAT  *srcS,
-                      LVM_FLOAT  *dst,
-                      LVM_INT16  n )
-{
-    LVM_FLOAT temp,mVal,sVal;
-    LVM_INT16 ii;
-
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         mVal = (LVM_FLOAT)*srcM;
         srcM++;
 
@@ -93,38 +33,11 @@
         srcS++;
 
         temp = mVal + sVal;
-
-        if (temp > 1.0f)
-        {
-            *dst = 1.0f;
-        }
-        else if (temp < -1.0f)
-        {
-            *dst = -1.0f;
-        }
-        else
-        {
-            *dst = (LVM_FLOAT)temp;
-        }
-        dst++;
+        *dst++ = LVM_Clamp(temp);
 
         temp = mVal - sVal;
-
-        if (temp > 1.0f)
-        {
-            *dst = 1.0f;
-        }
-        else if (temp < -1.0f)
-        {
-            *dst = - 1.0f;
-        }
-        else
-        {
-            *dst = (LVM_FLOAT)temp;
-        }
-        dst++;
+        *dst++ = LVM_Clamp(temp);
     }
 
     return;
 }
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_16x16.cpp b/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_16x16.cpp
deleted file mode 100644
index 6584251..0000000
--- a/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_16x16.cpp
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**********************************************************************************
-
-     %created_by:    sra % (CM/S)
-     %name:          Mac3s_Sat_16x16.c % (CM/S)
-     %version:       1 % (CM/S)
-     %date_created:  Fri Nov 13 12:07:13 2009 % (CM/S)
-
-***********************************************************************************/
-
-/**********************************************************************************
-   INCLUDE FILES
-***********************************************************************************/
-
-#include "VectorArithmetic.h"
-#include "LVM_Macros.h"
-
-/**********************************************************************************
-   FUNCTION Mac3S_16X16
-***********************************************************************************/
-
-void Mac3s_Sat_16x16( const LVM_INT16 *src,
-                     const LVM_INT16 val,
-                     LVM_INT16 *dst,
-                     LVM_INT16 n)
-{
-    LVM_INT16 ii;
-    LVM_INT16 srcval;
-    LVM_INT32 Temp,dInVal;
-
-    for (ii = n; ii != 0; ii--)
-    {
-        srcval=*src;
-        src++;
-
-        Temp = (srcval *val)>>15;
-
-        dInVal  = (LVM_INT32)*dst;
-
-        Temp = Temp + dInVal;
-
-        if (Temp > 0x00007FFF)
-        {
-            *dst = 0x7FFF;
-        }
-        else if (Temp < -0x00008000)
-        {
-            *dst = - 0x8000;
-        }
-        else
-        {
-            *dst = (LVM_INT16)Temp;
-        }
-
-        dst++;
-    }
-
-    return;
-}
-
-/**********************************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_32x16.cpp b/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_32x16.cpp
index 5d5564f..24bdf3e 100644
--- a/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_32x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Mac3s_Sat_32x16.cpp
@@ -18,85 +18,19 @@
 /**********************************************************************************
    INCLUDE FILES
 ***********************************************************************************/
-
+#include "ScalarArithmetic.h"
 #include "VectorArithmetic.h"
 #include "LVM_Macros.h"
 
-/**********************************************************************************
-   FUNCTION MAC3S_16X16
-***********************************************************************************/
-
-void Mac3s_Sat_32x16(  const LVM_INT32 *src,
-                     const LVM_INT16 val,
-                     LVM_INT32 *dst,
-                     LVM_INT16 n)
-{
+void Mac3s_Sat_Float(const LVM_FLOAT* src, const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n) {
     LVM_INT16 ii;
-    LVM_INT32 srcval,temp, dInVal, dOutVal;
 
-    for (ii = n; ii != 0; ii--)
-    {
-        srcval=*src;
-        src++;
+    for (ii = n; ii != 0; ii--) {
+        LVM_FLOAT Temp = *src++ * val;
+        Temp += *dst;
 
-        MUL32x16INTO32(srcval,val,temp,15)
-
-            dInVal  = *dst;
-        dOutVal = temp + dInVal;
-
-        if ((((dOutVal ^ temp) & (dOutVal ^ dInVal)) >> 31)!=0)     /* overflow / underflow */
-        {
-            if(temp<0)
-            {
-                dOutVal=0x80000000L;
-            }
-            else
-            {
-                dOutVal=0x7FFFFFFFL;
-            }
-        }
-
-        *dst = dOutVal;
-        dst++;
+        *dst++ = LVM_Clamp(Temp);
     }
 
     return;
 }
-void Mac3s_Sat_Float(const LVM_FLOAT *src,
-                     const LVM_FLOAT val,
-                     LVM_FLOAT *dst,
-                     LVM_INT16 n)
-{
-    LVM_INT16 ii;
-    LVM_FLOAT srcval;
-    LVM_FLOAT Temp,dInVal;
-
-    for (ii = n; ii != 0; ii--)
-    {
-        srcval = *src;
-        src++;
-
-        Temp = srcval * val;
-
-        dInVal  = (LVM_FLOAT)*dst;
-        Temp = Temp + dInVal;
-
-        if (Temp > 1.000000f)
-        {
-            *dst = 1.000000f;
-        }
-        else if (Temp < -1.000000f)
-        {
-            *dst = -1.000000f;
-        }
-        else
-        {
-            *dst = Temp;
-        }
-        dst++;
-    }
-
-    return;
-}
-/**********************************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/MixInSoft_D32C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/MixInSoft_D32C31_SAT.cpp
index 7c7b36f..9663998 100644
--- a/media/libeffects/lvm/lib/Common/src/MixInSoft_D32C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MixInSoft_D32C31_SAT.cpp
@@ -26,33 +26,29 @@
    DEFINITIONS
 ***********************************************************************************/
 
-#define TRUE          1
-#define FALSE         0
+#define TRUE 1
+#define FALSE 0
 
 /**********************************************************************************
    FUNCTION MIXINSOFT_D32C31_SAT
 ***********************************************************************************/
-void MixInSoft_D32C31_SAT( Mix_1St_Cll_FLOAT_t        *pInstance,
-                           const LVM_FLOAT      *src,
-                           LVM_FLOAT      *dst,
-                           LVM_INT16      n)
-{
+void MixInSoft_D32C31_SAT(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src, LVM_FLOAT* dst,
+                          LVM_INT16 n) {
     char HardMixing = TRUE;
 
-    if(n <= 0)    return;
+    if (n <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
-    if (pInstance->Current != pInstance->Target)
-    {
-        if(pInstance->Alpha == 0){
+    if (pInstance->Current != pInstance->Target) {
+        if (pInstance->Alpha == 0) {
             pInstance->Current = pInstance->Target;
-        }else if ((pInstance->Current-pInstance->Target < POINT_ZERO_ONE_DB_FLOAT) &&
-                 (pInstance->Current-pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)){
+        } else if ((pInstance->Current - pInstance->Target < POINT_ZERO_ONE_DB_FLOAT) &&
+                   (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)) {
             pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
                                                        Make them equal. */
-        }else{
+        } else {
             /* Soft mixing has to be applied */
             HardMixing = FALSE;
             Core_MixInSoft_D32C31_SAT(pInstance, src, dst, n);
@@ -63,11 +59,11 @@
        HARD MIXING
     *******************************************************************************/
 
-    if (HardMixing){
-        if (pInstance->Target != 0){ /* Nothing to do in case Target = 0 */
+    if (HardMixing) {
+        if (pInstance->Target != 0) { /* Nothing to do in case Target = 0 */
             if ((pInstance->Target) == 1.0f)
                 Add2_Sat_Float(src, dst, n);
-            else{
+            else {
                 Core_MixInSoft_D32C31_SAT(pInstance, src, dst, n);
                 pInstance->Current = pInstance->Target; /* In case the core function would \
                                                            have changed the Current value */
@@ -81,16 +77,15 @@
     /* Call back before the hard mixing, because in this case, hard mixing makes
        use of the core soft mix function which can change the Current value!      */
 
-    if (pInstance->CallbackSet){
+    if (pInstance->CallbackSet) {
         if ((pInstance->Current - pInstance->Target < POINT_ZERO_ONE_DB_FLOAT) &&
-            (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)){
+            (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)) {
             pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
                                                        Make them equal. */
             pInstance->CallbackSet = FALSE;
-            if (pInstance->pCallBack != 0){
-                (*pInstance->pCallBack) ( pInstance->pCallbackHandle,
-                                          pInstance->pGeneralPurpose,
-                                          pInstance->CallbackParam );
+            if (pInstance->pCallBack != 0) {
+                (*pInstance->pCallBack)(pInstance->pCallbackHandle, pInstance->pGeneralPurpose,
+                                        pInstance->CallbackParam);
             }
         }
     }
diff --git a/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp b/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
index d3325ec..58a9102 100644
--- a/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MixSoft_1St_D32C31_WRA.cpp
@@ -19,6 +19,7 @@
    INCLUDE FILES
 ***********************************************************************************/
 
+#include <string.h>
 #include "Mixer_private.h"
 #include "VectorArithmetic.h"
 
@@ -26,33 +27,29 @@
    DEFINITIONS
 ***********************************************************************************/
 
-#define TRUE          1
-#define FALSE         0
+#define TRUE 1
+#define FALSE 0
 
 /**********************************************************************************
    FUNCTION MIXSOFT_1ST_D32C31_WRA
 ***********************************************************************************/
-void MixSoft_1St_D32C31_WRA(    Mix_1St_Cll_FLOAT_t       *pInstance,
-                                const LVM_FLOAT     *src,
-                                      LVM_FLOAT     *dst,
-                                      LVM_INT16     n)
-{
+void MixSoft_1St_D32C31_WRA(Mix_1St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src, LVM_FLOAT* dst,
+                            LVM_INT16 n) {
     char HardMixing = TRUE;
 
-    if(n <= 0)    return;
+    if (n <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
-    if (pInstance->Current != pInstance->Target)
-    {
-        if(pInstance->Alpha == 0){
+    if (pInstance->Current != pInstance->Target) {
+        if (pInstance->Alpha == 0) {
             pInstance->Current = pInstance->Target;
-        }else if ((pInstance->Current - pInstance->Target < POINT_ZERO_ONE_DB_FLOAT) &&
-                 (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)){
+        } else if ((pInstance->Current - pInstance->Target < POINT_ZERO_ONE_DB_FLOAT) &&
+                   (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)) {
             pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
                                                        Make them equal. */
-        }else{
+        } else {
             /* Soft mixing has to be applied */
             HardMixing = FALSE;
             Core_MixSoft_1St_D32C31_WRA(pInstance, src, dst, n);
@@ -63,14 +60,12 @@
        HARD MIXING
     *******************************************************************************/
 
-    if (HardMixing){
+    if (HardMixing) {
         if (pInstance->Target == 0)
-            LoadConst_Float(0, dst, n);
-        else if ((pInstance->Target) == 1.0f){
-            if (src != dst)
-                Copy_Float((LVM_FLOAT*)src, (LVM_FLOAT*)dst, (LVM_INT16)(n));
-        }
-        else
+            memset(dst, 0, n * sizeof(*dst));
+        else if ((pInstance->Target) == 1.0f) {
+            if (src != dst) Copy_Float((LVM_FLOAT*)src, (LVM_FLOAT*)dst, (LVM_INT16)(n));
+        } else
             Mult3s_Float(src, pInstance->Current, dst, n);
     }
 
@@ -78,16 +73,15 @@
        CALL BACK
     *******************************************************************************/
 
-    if (pInstance->CallbackSet){
+    if (pInstance->CallbackSet) {
         if ((pInstance->Current - pInstance->Target < POINT_ZERO_ONE_DB_FLOAT) &&
-            (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)){
+            (pInstance->Current - pInstance->Target > -POINT_ZERO_ONE_DB_FLOAT)) {
             pInstance->Current = pInstance->Target; /* Difference is not significant anymore. \
                                                        Make them equal. */
             pInstance->CallbackSet = FALSE;
-            if (pInstance->pCallBack != 0){
-                (*pInstance->pCallBack) ( pInstance->pCallbackHandle,
-                                          pInstance->pGeneralPurpose,
-                                          pInstance->CallbackParam );
+            if (pInstance->pCallBack != 0) {
+                (*pInstance->pCallBack)(pInstance->pCallbackHandle, pInstance->pGeneralPurpose,
+                                        pInstance->CallbackParam);
             }
         }
     }
diff --git a/media/libeffects/lvm/lib/Common/src/MixSoft_2St_D32C31_SAT.cpp b/media/libeffects/lvm/lib/Common/src/MixSoft_2St_D32C31_SAT.cpp
index b002738..aba8537 100644
--- a/media/libeffects/lvm/lib/Common/src/MixSoft_2St_D32C31_SAT.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MixSoft_2St_D32C31_SAT.cpp
@@ -25,42 +25,35 @@
 /**********************************************************************************
    FUNCTION MIXSOFT_2ST_D32C31_SAT
 ***********************************************************************************/
-void MixSoft_2St_D32C31_SAT(    Mix_2St_Cll_FLOAT_t       *pInstance,
-                                const LVM_FLOAT     *src1,
-                                const LVM_FLOAT     *src2,
-                                      LVM_FLOAT     *dst,
-                                      LVM_INT16     n)
-{
-
-    if(n <= 0)    return;
+void MixSoft_2St_D32C31_SAT(Mix_2St_Cll_FLOAT_t* pInstance, const LVM_FLOAT* src1,
+                            const LVM_FLOAT* src2, LVM_FLOAT* dst, LVM_INT16 n) {
+    if (n <= 0) return;
 
     /******************************************************************************
        SOFT MIXING
     *******************************************************************************/
-    if ((pInstance->Current1 != pInstance->Target1) || (pInstance->Current2 != pInstance->Target2))
-    {
+    if ((pInstance->Current1 != pInstance->Target1) ||
+        (pInstance->Current2 != pInstance->Target2)) {
         MixSoft_1St_D32C31_WRA((Mix_1St_Cll_FLOAT_t*)pInstance, src1, dst, n);
-        MixInSoft_D32C31_SAT((Mix_1St_Cll_FLOAT_t *)&pInstance->Alpha2, /* Cast to void: \
-                                                              no dereferencing in function*/
-                              src2, dst, n);
+        MixInSoft_D32C31_SAT((Mix_1St_Cll_FLOAT_t*)&pInstance->Alpha2, /* Cast to void: \
+                                                             no dereferencing in function*/
+                             src2, dst, n);
     }
 
     /******************************************************************************
        HARD MIXING
     *******************************************************************************/
 
-    else
-    {
+    else {
         if (pInstance->Current1 == 0)
             MixSoft_1St_D32C31_WRA(
-                    (Mix_1St_Cll_FLOAT_t *) &pInstance->Alpha2, /* Cast to void: no \
-                                                             dereferencing in function*/
-                                    src2, dst, n);
+                    (Mix_1St_Cll_FLOAT_t*)&pInstance->Alpha2, /* Cast to void: no \
+                                                           dereferencing in function*/
+                    src2, dst, n);
         else if (pInstance->Current2 == 0)
-            MixSoft_1St_D32C31_WRA((Mix_1St_Cll_FLOAT_t*) pInstance, src1, dst, n);
+            MixSoft_1St_D32C31_WRA((Mix_1St_Cll_FLOAT_t*)pInstance, src1, dst, n);
         else
             Core_MixHard_2St_D32C31_SAT(pInstance, src1, src2, dst, n);
     }
 }
 /**********************************************************************************/
-
diff --git a/media/libeffects/lvm/lib/Common/src/Mixer_private.h b/media/libeffects/lvm/lib/Common/src/Mixer_private.h
index 1d653bb..e1e62c5 100644
--- a/media/libeffects/lvm/lib/Common/src/Mixer_private.h
+++ b/media/libeffects/lvm/lib/Common/src/Mixer_private.h
@@ -26,13 +26,13 @@
 
 #define POINT_ZERO_ONE_DB 2473805 /* 0.01 dB on a full scale signal = (10^(0.01/20) -1) * 2^31 */
 
-#define POINT_ZERO_ONE_DB_FLOAT 0.001152 /* 0.01 dB on a full scale \
-                                            signal = (10^(0.01/20) -1) * 2^31 */
+#define POINT_ZERO_ONE_DB_FLOAT         \
+    0.001152 /* 0.01 dB on a full scale \
+                signal = (10^(0.01/20) -1) * 2^31 */
 /**********************************************************************************
    DEFINITIONS
 ***********************************************************************************/
 
 /**********************************************************************************/
 
-#endif //#ifndef __MIXER_PRIVATE_H__
-
+#endif  //#ifndef __MIXER_PRIVATE_H__
diff --git a/media/libeffects/lvm/lib/Common/src/MonoTo2I_16.cpp b/media/libeffects/lvm/lib/Common/src/MonoTo2I_16.cpp
deleted file mode 100644
index ead798d..0000000
--- a/media/libeffects/lvm/lib/Common/src/MonoTo2I_16.cpp
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**********************************************************************************
-   INCLUDE FILES
-***********************************************************************************/
-
-#include "VectorArithmetic.h"
-
-/**********************************************************************************
-   FUNCTION MonoTo2I_16
-***********************************************************************************/
-
-void MonoTo2I_16( const LVM_INT16 *src,
-                 LVM_INT16 *dst,
-                 LVM_INT16 n)
-{
-    LVM_INT16 ii;
-    src += (n-1);
-    dst += ((n*2)-1);
-
-    for (ii = n; ii != 0; ii--)
-    {
-        *dst = *src;
-        dst--;
-
-        *dst = *src;
-        dst--;
-        src--;
-    }
-
-    return;
-}
-
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/MonoTo2I_32.cpp b/media/libeffects/lvm/lib/Common/src/MonoTo2I_32.cpp
index 603d1fc..ef3e633 100644
--- a/media/libeffects/lvm/lib/Common/src/MonoTo2I_32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/MonoTo2I_32.cpp
@@ -21,40 +21,12 @@
 
 #include "VectorArithmetic.h"
 
-/**********************************************************************************
-   FUNCTION MonoTo2I_32
-***********************************************************************************/
-
-void MonoTo2I_32( const LVM_INT32  *src,
-                 LVM_INT32  *dst,
-                 LVM_INT16 n)
-{
-    LVM_INT16 ii;
-    src += (n-1);
-    dst += ((n*2)-1);
-
-    for (ii = n; ii != 0; ii--)
-    {
-        *dst = *src;
-        dst--;
-
-        *dst = *src;
-        dst--;
-        src--;
-    }
-
-    return;
-}
-void MonoTo2I_Float( const LVM_FLOAT  *src,
-                     LVM_FLOAT  *dst,
-                     LVM_INT16 n)
-{
+void MonoTo2I_Float(const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n) {
     LVM_INT16 ii;
     src += (n - 1);
     dst += ((n * 2) - 1);
 
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         *dst = *src;
         dst--;
 
@@ -65,4 +37,3 @@
 
     return;
 }
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/Mult3s_32x16.cpp b/media/libeffects/lvm/lib/Common/src/Mult3s_32x16.cpp
index 370c39a..babfef3 100644
--- a/media/libeffects/lvm/lib/Common/src/Mult3s_32x16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Mult3s_32x16.cpp
@@ -22,41 +22,11 @@
 #include "VectorArithmetic.h"
 #include "LVM_Macros.h"
 
-/**********************************************************************************
-FUNCTION MULT3S_16X16
-***********************************************************************************/
-
-void Mult3s_32x16( const LVM_INT32 *src,
-                  const LVM_INT16 val,
-                  LVM_INT32 *dst,
-                  LVM_INT16 n)
-{
-    LVM_INT16 ii;
-    LVM_INT32 srcval,temp;
-
-    for (ii = n; ii != 0; ii--)
-    {
-        srcval=*src;
-        src++;
-
-        MUL32x16INTO32(srcval,val,temp,15)
-
-        *dst = temp;
-        dst++;
-    }
-
-    return;
-}
-void Mult3s_Float( const LVM_FLOAT *src,
-                   const LVM_FLOAT val,
-                   LVM_FLOAT *dst,
-                   LVM_INT16 n)
-{
+void Mult3s_Float(const LVM_FLOAT* src, const LVM_FLOAT val, LVM_FLOAT* dst, LVM_INT16 n) {
     LVM_INT16 ii;
     LVM_FLOAT temp;
 
-    for (ii = n; ii != 0; ii--)
-    {
+    for (ii = n; ii != 0; ii--) {
         temp = (*src) * val;
         src++;
         *dst = temp;
@@ -64,4 +34,3 @@
     }
     return;
 }
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/NonLinComp_D16.cpp b/media/libeffects/lvm/lib/Common/src/NonLinComp_D16.cpp
index 36d1149..f3a1a67 100644
--- a/media/libeffects/lvm/lib/Common/src/NonLinComp_D16.cpp
+++ b/media/libeffects/lvm/lib/Common/src/NonLinComp_D16.cpp
@@ -61,22 +61,16 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-void NonLinComp_D16(LVM_INT16        Gain,
-                      LVM_INT16        *pDataIn,
-                    LVM_INT16        *pDataOut,
-                    LVM_INT32        BlockLength)
-{
-
-    LVM_INT16            Sample;                    /* Input samples */
-    LVM_INT32            SampleNo;                /* Sample index */
-    LVM_INT16            Temp;
+void NonLinComp_Float(LVM_FLOAT Gain, LVM_FLOAT* pDataIn, LVM_FLOAT* pDataOut,
+                      LVM_INT32 BlockLength) {
+    LVM_FLOAT Sample;   /* Input samples */
+    LVM_INT32 SampleNo; /* Sample index */
+    LVM_FLOAT Temp;
 
     /*
      * Process a block of samples
      */
-    for(SampleNo = 0; SampleNo<BlockLength; SampleNo++)
-    {
-
+    for (SampleNo = 0; SampleNo < BlockLength; SampleNo++) {
         /*
          * Read the input
          */
@@ -88,64 +82,12 @@
          * harmonic distortion. The amount of compression is control by the
          * gain factor
          */
-        if ((LVM_INT32)Sample != -32768)
-        {
-            Temp = (LVM_INT16)((Sample * Sample) >> 15);
-            if(Sample >0)
-            {
-                Sample = (LVM_INT16)(Sample + ((Gain * (Sample - Temp)) >> 15));
-            }
-            else
-            {
-                Sample = (LVM_INT16)(Sample + ((Gain * (Sample + Temp)) >> 15));
-            }
-        }
-
-        /*
-         * Save the output
-         */
-        *pDataOut = Sample;
-        pDataOut++;
-
-    }
-
-}
-void NonLinComp_Float(LVM_FLOAT        Gain,
-                      LVM_FLOAT        *pDataIn,
-                      LVM_FLOAT        *pDataOut,
-                      LVM_INT32        BlockLength)
-{
-
-    LVM_FLOAT            Sample;                    /* Input samples */
-    LVM_INT32            SampleNo;                /* Sample index */
-    LVM_FLOAT            Temp;
-
-    /*
-     * Process a block of samples
-     */
-    for(SampleNo = 0; SampleNo < BlockLength; SampleNo++)
-    {
-        /*
-         * Read the input
-         */
-        Sample = *pDataIn;
-        pDataIn++;
-
-        /*
-         * Apply the compander, this compresses the signal at the expense of
-         * harmonic distortion. The amount of compression is control by the
-         * gain factor
-         */
-        if (Sample != -1.0f)
-        {
+        if (Sample != -1.0f) {
             Temp = ((Sample * Sample));
-            if(Sample > 0)
-            {
-                Sample = (Sample + ((Gain * (Sample - Temp)) ));
-            }
-            else
-            {
-                Sample = (Sample + ((Gain * (Sample + Temp)) ));
+            if (Sample > 0) {
+                Sample = (Sample + ((Gain * (Sample - Temp))));
+            } else {
+                Sample = (Sample + ((Gain * (Sample + Temp))));
             }
         }
 
diff --git a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32C14G11_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32C14G11_TRC_WRA_01.cpp
deleted file mode 100644
index 3f62f99..0000000
--- a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32C14G11_TRC_WRA_01.cpp
+++ /dev/null
@@ -1,192 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "PK_2I_D32F32CssGss_TRC_WRA_01_Private.h"
-#include "LVM_Macros.h"
-
-/**************************************************************************
- ASSUMPTIONS:
- COEFS-
- pBiquadState->coefs[0] is A0,
- pBiquadState->coefs[1] is -B2,
- pBiquadState->coefs[2] is -B1, these are in Q14 format
- pBiquadState->coefs[3] is Gain, in Q11 format
-
- DELAYS-
- pBiquadState->pDelays[0] is x(n-1)L in Q0 format
- pBiquadState->pDelays[1] is x(n-1)R in Q0 format
- pBiquadState->pDelays[2] is x(n-2)L in Q0 format
- pBiquadState->pDelays[3] is x(n-2)R in Q0 format
- pBiquadState->pDelays[4] is y(n-1)L in Q0 format
- pBiquadState->pDelays[5] is y(n-1)R in Q0 format
- pBiquadState->pDelays[6] is y(n-2)L in Q0 format
- pBiquadState->pDelays[7] is y(n-2)R in Q0 format
-***************************************************************************/
-void PK_2I_D32F32C14G11_TRC_WRA_01 ( Biquad_FLOAT_Instance_t       *pInstance,
-                                     LVM_FLOAT               *pDataIn,
-                                     LVM_FLOAT               *pDataOut,
-                                     LVM_INT16               NrSamples)
-    {
-        LVM_FLOAT ynL,ynR,ynLO,ynRO,templ;
-        LVM_INT16 ii;
-        PFilter_State_Float pBiquadState = (PFilter_State_Float) pInstance;
-
-         for (ii = NrSamples; ii != 0; ii--)
-         {
-
-            /**************************************************************************
-                            PROCESSING OF THE LEFT CHANNEL
-            ***************************************************************************/
-            /* ynL= (A0  * (x(n)L - x(n-2)L  ) )*/
-            templ = (*pDataIn) - pBiquadState->pDelays[2];
-            ynL = templ * pBiquadState->coefs[0];
-
-            /* ynL+= ((-B2  * y(n-2)L  )) */
-            templ = pBiquadState->pDelays[6] * pBiquadState->coefs[1];
-            ynL += templ;
-
-            /* ynL+= ((-B1 * y(n-1)L  ) ) */
-            templ = pBiquadState->pDelays[4] * pBiquadState->coefs[2];
-            ynL += templ;
-
-            /* ynLO= ((Gain * ynL )) */
-            ynLO = ynL * pBiquadState->coefs[3];
-
-            /* ynLO=( ynLO + x(n)L  )*/
-            ynLO += (*pDataIn);
-
-            /**************************************************************************
-                            PROCESSING OF THE RIGHT CHANNEL
-            ***************************************************************************/
-            /* ynR= (A0  * (x(n)R  - x(n-2)R  ) ) */
-            templ = (*(pDataIn + 1)) - pBiquadState->pDelays[3];
-            ynR = templ * pBiquadState->coefs[0];
-
-            /* ynR+= ((-B2  * y(n-2)R  ) )  */
-            templ = pBiquadState->pDelays[7] * pBiquadState->coefs[1];
-            ynR += templ;
-
-            /* ynR+= ((-B1  * y(n-1)R  ) )   */
-            templ = pBiquadState->pDelays[5] * pBiquadState->coefs[2];
-            ynR += templ;
-
-            /* ynRO= ((Gain  * ynR )) */
-            ynRO = ynR * pBiquadState->coefs[3];
-
-            /* ynRO=( ynRO + x(n)R  )*/
-            ynRO += (*(pDataIn+1));
-
-            /**************************************************************************
-                            UPDATING THE DELAYS
-            ***************************************************************************/
-            pBiquadState->pDelays[7] = pBiquadState->pDelays[5]; /* y(n-2)R=y(n-1)R*/
-            pBiquadState->pDelays[6] = pBiquadState->pDelays[4]; /* y(n-2)L=y(n-1)L*/
-            pBiquadState->pDelays[3] = pBiquadState->pDelays[1]; /* x(n-2)R=x(n-1)R*/
-            pBiquadState->pDelays[2] = pBiquadState->pDelays[0]; /* x(n-2)L=x(n-1)L*/
-            pBiquadState->pDelays[5] = ynR; /* Update y(n-1)R */
-            pBiquadState->pDelays[4] = ynL; /* Update y(n-1)L */
-            pBiquadState->pDelays[0] = (*pDataIn); /* Update x(n-1)L */
-            pDataIn++;
-            pBiquadState->pDelays[1] = (*pDataIn); /* Update x(n-1)R */
-            pDataIn++;
-
-            /**************************************************************************
-                            WRITING THE OUTPUT
-            ***************************************************************************/
-            *pDataOut = ynLO; /* Write Left output*/
-            pDataOut++;
-            *pDataOut = ynRO; /* Write Right ouput*/
-            pDataOut++;
-
-        }
-
-    }
-
-#ifdef SUPPORT_MC
-/**************************************************************************
-DELAYS-
-pBiquadState->pDelays[0] to
-pBiquadState->pDelays[NrChannels - 1] is x(n-1) for all NrChannels
-
-pBiquadState->pDelays[NrChannels] to
-pBiquadState->pDelays[2*NrChannels - 1] is x(n-2) for all NrChannels
-
-pBiquadState->pDelays[2*NrChannels] to
-pBiquadState->pDelays[3*NrChannels - 1] is y(n-1) for all NrChannels
-
-pBiquadState->pDelays[3*NrChannels] to
-pBiquadState->pDelays[4*NrChannels - 1] is y(n-2) for all NrChannels
-***************************************************************************/
-
-void PK_Mc_D32F32C14G11_TRC_WRA_01 (Biquad_FLOAT_Instance_t       *pInstance,
-                                    LVM_FLOAT               *pDataIn,
-                                    LVM_FLOAT               *pDataOut,
-                                    LVM_INT16               NrFrames,
-                                    LVM_INT16               NrChannels)
-    {
-        LVM_FLOAT yn, ynO, temp;
-        LVM_INT16 ii, jj;
-        PFilter_State_Float pBiquadState = (PFilter_State_Float) pInstance;
-
-         for (ii = NrFrames; ii != 0; ii--)
-         {
-
-            for (jj = 0; jj < NrChannels; jj++)
-            {
-                /**************************************************************************
-                                PROCESSING OF THE jj CHANNEL
-                ***************************************************************************/
-                /* yn= (A0  * (x(n) - x(n-2)))*/
-                temp = (*pDataIn) - pBiquadState->pDelays[NrChannels + jj];
-                yn = temp * pBiquadState->coefs[0];
-
-                /* yn+= ((-B2  * y(n-2))) */
-                temp = pBiquadState->pDelays[NrChannels*3 + jj] * pBiquadState->coefs[1];
-                yn += temp;
-
-                /* yn+= ((-B1 * y(n-1))) */
-                temp = pBiquadState->pDelays[NrChannels*2 + jj] * pBiquadState->coefs[2];
-                yn += temp;
-
-                /* ynO= ((Gain * yn)) */
-                ynO = yn * pBiquadState->coefs[3];
-
-                /* ynO=(ynO + x(n))*/
-                ynO += (*pDataIn);
-
-                /**************************************************************************
-                                UPDATING THE DELAYS
-                ***************************************************************************/
-                pBiquadState->pDelays[NrChannels * 3 + jj] =
-                    pBiquadState->pDelays[NrChannels * 2 + jj]; /* y(n-2)=y(n-1)*/
-                pBiquadState->pDelays[NrChannels * 1 + jj] =
-                    pBiquadState->pDelays[jj]; /* x(n-2)=x(n-1)*/
-                pBiquadState->pDelays[NrChannels * 2 + jj] = yn; /* Update y(n-1) */
-                pBiquadState->pDelays[jj] = (*pDataIn); /* Update x(n-1)*/
-                pDataIn++;
-
-                /**************************************************************************
-                                WRITING THE OUTPUT
-                ***************************************************************************/
-                *pDataOut = ynO; /* Write output*/
-                pDataOut++;
-            }
-        }
-
-    }
-#endif
diff --git a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32C30G11_TRC_WRA_01.cpp b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32C30G11_TRC_WRA_01.cpp
deleted file mode 100644
index 41de1de..0000000
--- a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32C30G11_TRC_WRA_01.cpp
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "PK_2I_D32F32CllGss_TRC_WRA_01_Private.h"
-#include "LVM_Macros.h"
-
-/**************************************************************************
- ASSUMPTIONS:
- COEFS-
- pBiquadState->coefs[0] is A0,
- pBiquadState->coefs[1] is -B2,
- pBiquadState->coefs[2] is -B1, these are in Q30 format
- pBiquadState->coefs[3] is Gain, in Q11 format
-
- DELAYS-
- pBiquadState->pDelays[0] is x(n-1)L in Q0 format
- pBiquadState->pDelays[1] is x(n-1)R in Q0 format
- pBiquadState->pDelays[2] is x(n-2)L in Q0 format
- pBiquadState->pDelays[3] is x(n-2)R in Q0 format
- pBiquadState->pDelays[4] is y(n-1)L in Q0 format
- pBiquadState->pDelays[5] is y(n-1)R in Q0 format
- pBiquadState->pDelays[6] is y(n-2)L in Q0 format
- pBiquadState->pDelays[7] is y(n-2)R in Q0 format
-***************************************************************************/
diff --git a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Init.cpp
deleted file mode 100644
index 714aa52..0000000
--- a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Init.cpp
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "PK_2I_D32F32CllGss_TRC_WRA_01_Private.h"
-
diff --git a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Private.h
deleted file mode 100644
index c5f9c7c..0000000
--- a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CllGss_TRC_WRA_01_Private.h
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef _PK_2I_D32F32CLLGSS_TRC_WRA_01_PRIVATE_H_
-#define _PK_2I_D32F32CLLGSS_TRC_WRA_01_PRIVATE_H_
-
-/* The internal state variables are implemented in a (for the user)  hidden structure */
-/* In this (private) file, the internal structure is declared fro private use.        */
-typedef struct _Filter_State_
-{
-  LVM_INT32 *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT32         coefs[5];       /* pointer to the filter coefficients */
-}Filter_State;
-
-typedef Filter_State * PFilter_State ;
-
-#endif /* _PK_2I_D32F32CLLGSS_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Init.cpp b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Init.cpp
deleted file mode 100644
index f6c05da..0000000
--- a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Init.cpp
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "BIQUAD.h"
-#include "PK_2I_D32F32CssGss_TRC_WRA_01_Private.h"
-void  PK_2I_D32F32CssGss_TRC_WRA_01_Init(Biquad_FLOAT_Instance_t         *pInstance,
-                                         Biquad_2I_Order2_FLOAT_Taps_t   *pTaps,
-                                         PK_FLOAT_Coefs_t            *pCoef)
-{
-    PFilter_State_Float pBiquadState = (PFilter_State_Float) pInstance;
-    pBiquadState->pDelays       = (LVM_FLOAT *) pTaps;
-
-    pBiquadState->coefs[0] = pCoef->A0;
-
-    pBiquadState->coefs[1] = pCoef->B2;
-
-    pBiquadState->coefs[2] = pCoef->B1;
-
-    pBiquadState->coefs[3] = pCoef->G;
-}
diff --git a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Private.h b/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Private.h
deleted file mode 100644
index cc924c4..0000000
--- a/media/libeffects/lvm/lib/Common/src/PK_2I_D32F32CssGss_TRC_WRA_01_Private.h
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef _PK_2I_D32F32CSSGSS_TRC_WRA_01_PRIVATE_H_
-#define _PK_2I_D32F32CSSGSS_TRC_WRA_01_PRIVATE_H_
-
-/* The internal state variables are implemented in a (for the user)  hidden structure */
-/* In this (private) file, the internal structure is declared fro private use.        */
-
-typedef struct _Filter_State_Float_
-{
-    LVM_FLOAT *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-    LVM_FLOAT         coefs[5];       /* pointer to the filter coefficients */
-}Filter_State_Float;
-
-typedef Filter_State_Float * PFilter_State_Float ;
-typedef struct _Filter_State_
-{
-  LVM_INT32 *       pDelays;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT32         coefs[5];       /* pointer to the filter coefficients */
-}Filter_State;
-
-typedef Filter_State * PFilter_State ;
-
-#endif /* _PK_2I_D32F32CSSGSS_TRC_WRA_01_PRIVATE_H_ */
diff --git a/media/libeffects/lvm/lib/Common/src/Shift_Sat_v32xv32.cpp b/media/libeffects/lvm/lib/Common/src/Shift_Sat_v32xv32.cpp
index 97a04c1..f54ba90 100644
--- a/media/libeffects/lvm/lib/Common/src/Shift_Sat_v32xv32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/Shift_Sat_v32xv32.cpp
@@ -24,55 +24,39 @@
 /**********************************************************************************
    FUNCTION Shift_Sat_v32xv32
 ***********************************************************************************/
-void Shift_Sat_Float (const   LVM_INT16   val,
-                      const   LVM_FLOAT   *src,
-                      LVM_FLOAT   *dst,
-                      LVM_INT16   n)
-{
-    LVM_FLOAT   temp;
-    LVM_INT32   ii,ij;
-    LVM_INT16   RShift;
+void Shift_Sat_Float(const LVM_INT16 val, const LVM_FLOAT* src, LVM_FLOAT* dst, LVM_INT16 n) {
+    LVM_FLOAT temp;
+    LVM_INT32 ii, ij;
+    LVM_INT16 RShift;
 
-    if(val > 0)
-    {
-        for (ii = n; ii != 0; ii--)
-        {
+    if (val > 0) {
+        for (ii = n; ii != 0; ii--) {
             temp = (LVM_FLOAT)*src;
             src++;
-            for(ij = 0; ij < val; ij++)
-            {
+            for (ij = 0; ij < val; ij++) {
                 temp = temp * 2;
             }
 
-            if(temp > 1.0)
-                temp = 1.0;
-            if(temp < -1.0)
-                temp = -1.0;
+            if (temp > 1.0) temp = 1.0;
+            if (temp < -1.0) temp = -1.0;
 
             *dst = (LVM_FLOAT)temp;
             dst++;
         }
-    }
-    else if(val < 0)
-    {
-        RShift=(LVM_INT16)(-val);
+    } else if (val < 0) {
+        RShift = (LVM_INT16)(-val);
 
-        for (ii = n; ii != 0; ii--)
-        {
+        for (ii = n; ii != 0; ii--) {
             temp = (LVM_FLOAT)*src;
             src++;
-            for(ij = 0; ij < RShift; ij++)
-            {
+            for (ij = 0; ij < RShift; ij++) {
                 temp = temp / 2;
             }
             *dst = (LVM_FLOAT)temp;
             dst++;
         }
-    }
-    else
-    {
-        if(src != dst)
-        {
+    } else {
+        if (src != dst) {
             Copy_Float(src, dst, n);
         }
     }
diff --git a/media/libeffects/lvm/lib/Common/src/dB_to_Lin32.cpp b/media/libeffects/lvm/lib/Common/src/dB_to_Lin32.cpp
index 4da2013..2143465 100644
--- a/media/libeffects/lvm/lib/Common/src/dB_to_Lin32.cpp
+++ b/media/libeffects/lvm/lib/Common/src/dB_to_Lin32.cpp
@@ -57,17 +57,16 @@
  *
  ****************************************************************************************/
 
-#define FOUR_OVER_SIX    21846                  /* (4 / 6) * 2^15 */
-#define SIX_DB           96                     /* 6 * 16 or 6dB in Q11.4 format */
-#define FIRST_COEF_NEG   14884305
-#define FIRST_COEF_POS   7442152                /* FIRST_COEF_NEG / 2 */
-#define SECOND_COEF      38836
-#define MAX_VALUE        1536                   /* 96 * 16 */
+#define FOUR_OVER_SIX 21846 /* (4 / 6) * 2^15 */
+#define SIX_DB 96           /* 6 * 16 or 6dB in Q11.4 format */
+#define FIRST_COEF_NEG 14884305
+#define FIRST_COEF_POS 7442152 /* FIRST_COEF_NEG / 2 */
+#define SECOND_COEF 38836
+#define MAX_VALUE 1536 /* 96 * 16 */
 
-LVM_FLOAT   dB_to_LinFloat(LVM_INT16    db_fix)
-{
-    LVM_FLOAT    dB_Float;
-    LVM_FLOAT    LinFloat;
+LVM_FLOAT dB_to_LinFloat(LVM_INT16 db_fix) {
+    LVM_FLOAT dB_Float;
+    LVM_FLOAT LinFloat;
 
     dB_Float = (LVM_FLOAT)((LVM_FLOAT)db_fix / 16.0f);
     LinFloat = pow(10, dB_Float / 20.0);
diff --git a/media/libeffects/lvm/lib/Common/src/mult3s_16x16.cpp b/media/libeffects/lvm/lib/Common/src/mult3s_16x16.cpp
deleted file mode 100644
index 4092560..0000000
--- a/media/libeffects/lvm/lib/Common/src/mult3s_16x16.cpp
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**********************************************************************************
-   INCLUDE FILES
-***********************************************************************************/
-
-#include "VectorArithmetic.h"
-
-/**********************************************************************************
-   FUNCTION MULT3S_16X16
-***********************************************************************************/
-
-void Mult3s_16x16( const LVM_INT16 *src,
-                  const LVM_INT16 val,
-                  LVM_INT16 *dst,
-                  LVM_INT16 n)
-{
-    LVM_INT16 ii;
-    LVM_INT32 temp;
-
-    for (ii = n; ii != 0; ii--)
-    {
-        temp = (LVM_INT32)(*src) * (LVM_INT32)val;
-        src++;
-
-        *dst = (LVM_INT16)(temp >> 15);
-        dst++;
-    }
-
-    return;
-}
-
-/**********************************************************************************/
diff --git a/media/libeffects/lvm/lib/Eq/lib/LVEQNB.h b/media/libeffects/lvm/lib/Eq/lib/LVEQNB.h
index c5ddf77..f1afcd6 100644
--- a/media/libeffects/lvm/lib/Eq/lib/LVEQNB.h
+++ b/media/libeffects/lvm/lib/Eq/lib/LVEQNB.h
@@ -86,16 +86,9 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory table */
-#define LVEQNB_MEMREGION_INSTANCE          0   /* Offset to the instance memory region */
-#define LVEQNB_MEMREGION_PERSISTENT_DATA   1   /* Offset to persistent data memory region */
-#define LVEQNB_MEMREGION_PERSISTENT_COEF   2   /* Offset to persistent coefficient region */
-#define LVEQNB_MEMREGION_SCRATCH           3   /* Offset to data scratch memory region */
-#define LVEQNB_NR_MEMORY_REGIONS           4   /* Number of memory regions */
-
 /* Callback events */
-#define LVEQNB_EVENT_NONE                   0x0000    /* Not a valid event */
-#define LVEQNB_EVENT_ALGOFF                 0x0001    /* EQNB has completed switch off */
+#define LVEQNB_EVENT_NONE 0x0000   /* Not a valid event */
+#define LVEQNB_EVENT_ALGOFF 0x0001 /* EQNB has completed switch off */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -104,42 +97,25 @@
 /****************************************************************************************/
 
 /* Instance handle */
-typedef void *LVEQNB_Handle_t;
+typedef void* LVEQNB_Handle_t;
 
 /* Operating modes */
-typedef enum
-{
-    LVEQNB_BYPASS   = 0,
-    LVEQNB_ON       = 1,
-    LVEQNB_MODE_MAX = LVM_MAXINT_32
-} LVEQNB_Mode_en;
+typedef enum { LVEQNB_BYPASS = 0, LVEQNB_ON = 1, LVEQNB_MODE_MAX = LVM_MAXINT_32 } LVEQNB_Mode_en;
 
 /* Filter mode control */
-typedef enum
-{
-    LVEQNB_FILTER_OFF   = 0,
-    LVEQNB_FILTER_ON    = 1,
+typedef enum {
+    LVEQNB_FILTER_OFF = 0,
+    LVEQNB_FILTER_ON = 1,
     LVEQNB_FILTER_DUMMY = LVM_MAXINT_32
 } LVEQNB_FilterMode_en;
 
-/* Memory Types */
-typedef enum
-{
-    LVEQNB_PERSISTENT      = 0,
-    LVEQNB_PERSISTENT_DATA = 1,
-    LVEQNB_PERSISTENT_COEF = 2,
-    LVEQNB_SCRATCH         = 3,
-    LVEQNB_MEMORY_MAX      = LVM_MAXINT_32
-} LVEQNB_MemoryTypes_en;
-
 /* Function return status */
-typedef enum
-{
-    LVEQNB_SUCCESS        = 0,                          /* Successful return from a routine */
-    LVEQNB_ALIGNMENTERROR = 1,                          /* Memory alignment error */
-    LVEQNB_NULLADDRESS    = 2,                          /* NULL allocation address */
-    LVEQNB_TOOMANYSAMPLES = 3,                          /* Maximum block size exceeded */
-    LVEQNB_STATUS_MAX     = LVM_MAXINT_32
+typedef enum {
+    LVEQNB_SUCCESS = 0,        /* Successful return from a routine */
+    LVEQNB_ALIGNMENTERROR = 1, /* Memory alignment error */
+    LVEQNB_NULLADDRESS = 2,    /* NULL allocation address */
+    LVEQNB_TOOMANYSAMPLES = 3, /* Maximum block size exceeded */
+    LVEQNB_STATUS_MAX = LVM_MAXINT_32
 } LVEQNB_ReturnStatus_en;
 
 /****************************************************************************************/
@@ -166,39 +142,35 @@
 /*
  * Supported source data formats
  */
-#define LVEQNB_CAP_STEREO                  1
-#define LVEQNB_CAP_MONOINSTEREO            2
+#define LVEQNB_CAP_STEREO 1
+#define LVEQNB_CAP_MONOINSTEREO 2
 
-typedef enum
-{
-    LVEQNB_STEREO       = 0,
+typedef enum {
+    LVEQNB_STEREO = 0,
     LVEQNB_MONOINSTEREO = 1,
-#ifdef SUPPORT_MC
     LVEQNB_MULTICHANNEL = 2,
-#endif
-    LVEQNB_SOURCE_MAX   = LVM_MAXINT_32
+    LVEQNB_SOURCE_MAX = LVM_MAXINT_32
 } LVEQNB_SourceFormat_en;
 
 /*
  * Supported sample rates in samples per second
  */
-#define LVEQNB_CAP_FS_8000                 1
-#define LVEQNB_CAP_FS_11025                2
-#define LVEQNB_CAP_FS_12000                4
-#define LVEQNB_CAP_FS_16000                8
-#define LVEQNB_CAP_FS_22050                16
-#define LVEQNB_CAP_FS_24000                32
-#define LVEQNB_CAP_FS_32000                64
-#define LVEQNB_CAP_FS_44100                128
-#define LVEQNB_CAP_FS_48000                256
-#define LVEQNB_CAP_FS_88200                512
-#define LVEQNB_CAP_FS_96000                1024
-#define LVEQNB_CAP_FS_176400               2048
-#define LVEQNB_CAP_FS_192000               4096
+#define LVEQNB_CAP_FS_8000 1
+#define LVEQNB_CAP_FS_11025 2
+#define LVEQNB_CAP_FS_12000 4
+#define LVEQNB_CAP_FS_16000 8
+#define LVEQNB_CAP_FS_22050 16
+#define LVEQNB_CAP_FS_24000 32
+#define LVEQNB_CAP_FS_32000 64
+#define LVEQNB_CAP_FS_44100 128
+#define LVEQNB_CAP_FS_48000 256
+#define LVEQNB_CAP_FS_88200 512
+#define LVEQNB_CAP_FS_96000 1024
+#define LVEQNB_CAP_FS_176400 2048
+#define LVEQNB_CAP_FS_192000 4096
 
-typedef enum
-{
-    LVEQNB_FS_8000  = 0,
+typedef enum {
+    LVEQNB_FS_8000 = 0,
     LVEQNB_FS_11025 = 1,
     LVEQNB_FS_12000 = 2,
     LVEQNB_FS_16000 = 3,
@@ -211,7 +183,7 @@
     LVEQNB_FS_96000 = 10,
     LVEQNB_FS_176400 = 11,
     LVEQNB_FS_192000 = 12,
-    LVEQNB_FS_MAX   = LVM_MAXINT_32
+    LVEQNB_FS_MAX = LVM_MAXINT_32
 } LVEQNB_Fs_en;
 
 /****************************************************************************************/
@@ -220,58 +192,38 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory region definition */
-typedef struct
-{
-    LVM_UINT32                  Size;                   /* Region size in bytes */
-    LVM_UINT16                  Alignment;              /* Region alignment in bytes */
-    LVEQNB_MemoryTypes_en       Type;                   /* Region type */
-    void                        *pBaseAddress;          /* Pointer to the region base address */
-} LVEQNB_MemoryRegion_t;
-
-/* Memory table containing the region definitions */
-typedef struct
-{
-    LVEQNB_MemoryRegion_t       Region[LVEQNB_NR_MEMORY_REGIONS];  /* One definition for each region */
-} LVEQNB_MemTab_t;
-
 /* Equaliser band definition */
-typedef struct
-{
-    LVM_INT16                   Gain;                   /* Band gain in dB */
-    LVM_UINT16                  Frequency;              /* Band centre frequency in Hz */
-    LVM_UINT16                  QFactor;                /* Band quality factor */
+typedef struct {
+    LVM_INT16 Gain;       /* Band gain in dB */
+    LVM_UINT16 Frequency; /* Band centre frequency in Hz */
+    LVM_UINT16 QFactor;   /* Band quality factor */
 } LVEQNB_BandDef_t;
 
 /* Parameter structure */
-typedef struct
-{
+typedef struct {
     /* General parameters */
-    LVEQNB_Mode_en              OperatingMode;
-    LVEQNB_Fs_en                SampleRate;
-    LVEQNB_SourceFormat_en      SourceFormat;
+    LVEQNB_Mode_en OperatingMode;
+    LVEQNB_Fs_en SampleRate;
+    LVEQNB_SourceFormat_en SourceFormat;
 
     /* Equaliser parameters */
-    LVM_UINT16                  NBands;                 /* Number of bands */
-    LVEQNB_BandDef_t            *pBandDefinition;       /* Pointer to equaliser definitions */
-#ifdef SUPPORT_MC
-    LVM_INT16                   NrChannels;
-#endif
+    LVM_UINT16 NBands;                 /* Number of bands */
+    LVEQNB_BandDef_t* pBandDefinition; /* Pointer to equaliser definitions */
+    LVM_INT16 NrChannels;
 } LVEQNB_Params_t;
 
 /* Capability structure */
-typedef struct
-{
+typedef struct {
     /* General parameters */
-    LVM_UINT16                  SampleRate;
+    LVM_UINT16 SampleRate;
 
-    LVM_UINT16                  SourceFormat;
-    LVM_UINT16                  MaxBlockSize;
-    LVM_UINT16                  MaxBands;
+    LVM_UINT16 SourceFormat;
+    LVM_UINT16 MaxBlockSize;
+    LVM_UINT16 MaxBands;
 
     /* Callback parameters */
-    LVM_Callback                CallBack;               /* Bundle callback */
-    void                        *pBundleInstance;       /* Bundle instance handle */
+    LVM_Callback CallBack; /* Bundle callback */
+    void* pBundleInstance; /* Bundle instance handle */
 
 } LVEQNB_Capabilities_t;
 
@@ -283,78 +235,43 @@
 
 /****************************************************************************************/
 /*                                                                                      */
-/* FUNCTION:                LVEQNB_Memory                                               */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used for memory allocation and free. It can be called in           */
-/*  two ways:                                                                           */
-/*                                                                                      */
-/*      hInstance = NULL                Returns the memory requirements                 */
-/*      hInstance = Instance handle     Returns the memory requirements and             */
-/*                                      allocated base addresses for the instance       */
-/*                                                                                      */
-/*  When this function is called for memory allocation (hInstance=NULL) the memory      */
-/*  base address pointers are NULL on return.                                           */
-/*                                                                                      */
-/*  When the function is called for free (hInstance = Instance Handle) the memory       */
-/*  table returns the allocated memory and base addresses used during initialisation.   */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory definition table                 */
-/*  pCapabilities           Pointer to the default capabilities                         */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVEQNB_SUCCESS          Succeeded                                                   */
-/*  LVEQNB_NULLADDRESS      When any of pMemoryTable and pCapabilities is NULL address  */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  This function may be interrupted by the LVEQNB_Process function                 */
-/*                                                                                      */
-/****************************************************************************************/
-
-LVEQNB_ReturnStatus_en LVEQNB_Memory(LVEQNB_Handle_t            hInstance,
-                                     LVEQNB_MemTab_t            *pMemoryTable,
-                                     LVEQNB_Capabilities_t      *pCapabilities);
-
-/****************************************************************************************/
-/*                                                                                      */
 /* FUNCTION:                LVEQNB_Init                                                 */
 /*                                                                                      */
 /* DESCRIPTION:                                                                         */
-/*  Create and initialisation function for the N-Band equalliser module                 */
-/*                                                                                      */
-/*  This function can be used to create an algorithm instance by calling with           */
-/*  hInstance set to NULL. In this case the algorithm returns the new instance          */
-/*  handle.                                                                             */
-/*                                                                                      */
-/*  This function can be used to force a full re-initialisation of the algorithm        */
-/*  by calling with hInstance = Instance Handle. In this case the memory table          */
-/*  should be correct for the instance, this can be ensured by calling the function     */
-/*  LVEQNB_Memory before calling this function.                                         */
+/*  Create and initialisation function for the N-Band equaliser module.                 */
 /*                                                                                      */
 /* PARAMETERS:                                                                          */
-/*  hInstance               Instance handle                                             */
-/*  pMemoryTable            Pointer to the memory definition table                      */
+/*  phInstance              Pointer to instance handle                                  */
 /*  pCapabilities           Pointer to the initialisation capabilities                  */
+/*  pScratch                Pointer to bundle scratch buffer                            */
 /*                                                                                      */
 /* RETURNS:                                                                             */
 /*  LVEQNB_SUCCESS          Initialisation succeeded                                    */
-/*  LVEQNB_NULLADDRESS        When pCapabilities or pMemoryTableis or phInstance are NULL */
-/*  LVEQNB_NULLADDRESS        One or more of the memory regions has a NULL base address   */
-/*                          pointer for a memory region with a non-zero size.           */
-/*                                                                                      */
+/*  LVEQNB_NULLADDRESS      When pCapabilities or phInstance are NULL                   */
+/*  LVEQNB_NULLADDRESS      When allocated memory has a NULL base address               */
 /*                                                                                      */
 /* NOTES:                                                                               */
-/*  1.  The instance handle is the pointer to the base address of the first memory      */
-/*      region.                                                                         */
-/*  2.  This function must not be interrupted by the LVEQNB_Process function            */
+/*  1.  This function must not be interrupted by the LVEQNB_Process function            */
 /*                                                                                      */
 /****************************************************************************************/
+LVEQNB_ReturnStatus_en LVEQNB_Init(LVEQNB_Handle_t* phInstance,
+                                   LVEQNB_Capabilities_t* pCapabilities, void* pScratch);
 
-LVEQNB_ReturnStatus_en LVEQNB_Init(LVEQNB_Handle_t          *phInstance,
-                                   LVEQNB_MemTab_t          *pMemoryTable,
-                                   LVEQNB_Capabilities_t    *pCapabilities);
+/****************************************************************************************/
+/*                                                                                      */
+/* FUNCTION:                LVEQNB_DeInit                                               */
+/*                                                                                      */
+/* DESCRIPTION:                                                                         */
+/*    Free the memories created during LVEQNB_Init including instance handle            */
+/*                                                                                      */
+/* PARAMETERS:                                                                          */
+/*  phInstance              Pointer to instance handle                                  */
+/*                                                                                      */
+/* NOTES:                                                                               */
+/*  1.  This function must not be interrupted by the LVEQNB_Process function            */
+/*                                                                                      */
+/****************************************************************************************/
+void LVEQNB_DeInit(LVEQNB_Handle_t* phInstance);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -377,8 +294,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVEQNB_ReturnStatus_en LVEQNB_GetParameters(LVEQNB_Handle_t     hInstance,
-                                            LVEQNB_Params_t     *pParams);
+LVEQNB_ReturnStatus_en LVEQNB_GetParameters(LVEQNB_Handle_t hInstance, LVEQNB_Params_t* pParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -401,8 +317,8 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVEQNB_ReturnStatus_en LVEQNB_GetCapabilities(LVEQNB_Handle_t           hInstance,
-                                              LVEQNB_Capabilities_t     *pCapabilities);
+LVEQNB_ReturnStatus_en LVEQNB_GetCapabilities(LVEQNB_Handle_t hInstance,
+                                              LVEQNB_Capabilities_t* pCapabilities);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -426,8 +342,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVEQNB_ReturnStatus_en LVEQNB_Control(LVEQNB_Handle_t       hInstance,
-                                      LVEQNB_Params_t       *pParams);
+LVEQNB_ReturnStatus_en LVEQNB_Control(LVEQNB_Handle_t hInstance, LVEQNB_Params_t* pParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -451,10 +366,7 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-LVEQNB_ReturnStatus_en LVEQNB_Process(LVEQNB_Handle_t       hInstance,
-                                      const LVM_FLOAT       *pInData,
-                                      LVM_FLOAT             *pOutData,
-                                      LVM_UINT16            NumSamples);
+LVEQNB_ReturnStatus_en LVEQNB_Process(LVEQNB_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                      LVM_FLOAT* pOutData, LVM_UINT16 NumSamples);
 
-#endif      /* __LVEQNB__ */
-
+#endif /* __LVEQNB__ */
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_CalcCoef.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_CalcCoef.cpp
index c3c0fad..f8a5f2a 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_CalcCoef.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_CalcCoef.cpp
@@ -111,43 +111,37 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVEQNB_ReturnStatus_en LVEQNB_SinglePrecCoefs(LVM_UINT16        Fs,
-                                              LVEQNB_BandDef_t  *pFilterDefinition,
-                                              PK_FLOAT_Coefs_t  *pCoefficients)
-{
-
-    extern LVM_FLOAT    LVEQNB_GainTable[];
-    extern LVM_FLOAT    LVEQNB_TwoPiOnFsTable[];
-    extern LVM_FLOAT    LVEQNB_DTable[];
+LVEQNB_ReturnStatus_en LVEQNB_SinglePrecCoefs(LVM_UINT16 Fs, LVEQNB_BandDef_t* pFilterDefinition,
+                                              PK_FLOAT_Coefs_t* pCoefficients) {
+    extern LVM_FLOAT LVEQNB_GainTable[];
+    extern LVM_FLOAT LVEQNB_TwoPiOnFsTable[];
+    extern LVM_FLOAT LVEQNB_DTable[];
 
     /*
      * Get the filter definition
      */
-    LVM_INT16           Gain        = pFilterDefinition->Gain;
-    LVM_UINT16          Frequency   = pFilterDefinition->Frequency;
+    LVM_INT16 Gain = pFilterDefinition->Gain;
+    LVM_UINT16 Frequency = pFilterDefinition->Frequency;
     /* As mentioned in effectbundle.h */
-    LVM_FLOAT           QFactor     = (LVM_FLOAT)pFilterDefinition->QFactor / 100.0f;
+    LVM_FLOAT QFactor = (LVM_FLOAT)pFilterDefinition->QFactor / 100.0f;
 
     /*
      * Intermediate variables and temporary values
      */
-    LVM_FLOAT           T0;
-    LVM_FLOAT           D;
-    LVM_FLOAT           A0;
-    LVM_FLOAT           B1;
-    LVM_FLOAT           B2;
+    LVM_FLOAT T0;
+    LVM_FLOAT D;
+    LVM_FLOAT A0;
+    LVM_FLOAT B1;
+    LVM_FLOAT B2;
 
     /*
      * Calculating the intermediate values
      */
-    T0 = Frequency * LVEQNB_TwoPiOnFsTable[Fs];        /* T0 = 2 * Pi * Fc / Fs */
-    if (Gain >= 0)
-    {
-        D = LVEQNB_DTable[15];                         /* D = 1            if GaindB >= 0 */
-    }
-    else
-    {
-        D = LVEQNB_DTable[Gain + 15];                    /* D = 1 / (1 + G)  if GaindB <  0 */
+    T0 = Frequency * LVEQNB_TwoPiOnFsTable[Fs]; /* T0 = 2 * Pi * Fc / Fs */
+    if (Gain >= 0) {
+        D = LVEQNB_DTable[15]; /* D = 1            if GaindB >= 0 */
+    } else {
+        D = LVEQNB_DTable[Gain + 15]; /* D = 1 / (1 + G)  if GaindB <  0 */
     }
 
     /*
@@ -164,7 +158,7 @@
     pCoefficients->A0 = 2 * A0;
     pCoefficients->B1 = 2 * B1;
     pCoefficients->B2 = 2 * B2;
-    pCoefficients->G  = LVEQNB_GainTable[Gain + 15];
+    pCoefficients->G = LVEQNB_GainTable[Gain + 15];
 
-    return(LVEQNB_SUCCESS);
+    return (LVEQNB_SUCCESS);
 }
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h
index 6329181..c44a9be 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Coeffs.h
@@ -23,78 +23,78 @@
 /* Gain table for (10^(Gain/20) - 1)                                                */
 /*                                                                                  */
 /************************************************************************************/
-#define LVEQNB_Gain_Neg15_dB                             (-0.822172f)
-#define LVEQNB_Gain_Neg14_dB                             (-0.800474f)
-#define LVEQNB_Gain_Neg13_dB                             (-0.776128f)
-#define LVEQNB_Gain_Neg12_dB                             (-0.748811f)
-#define LVEQNB_Gain_Neg11_dB                             (-0.718162f)
-#define LVEQNB_Gain_Neg10_dB                             (-0.683772f)
-#define LVEQNB_Gain_Neg9_dB                              (-0.645187f)
-#define LVEQNB_Gain_Neg8_dB                              (-0.601893f)
-#define LVEQNB_Gain_Neg7_dB                              (-0.553316f)
-#define LVEQNB_Gain_Neg6_dB                              (-0.498813f)
-#define LVEQNB_Gain_Neg5_dB                              (-0.437659f)
-#define LVEQNB_Gain_Neg4_dB                              (-0.369043f)
-#define LVEQNB_Gain_Neg3_dB                              (-0.292054f)
-#define LVEQNB_Gain_Neg2_dB                              (-0.205672f)
-#define LVEQNB_Gain_Neg1_dB                              (-0.108749f)
-#define LVEQNB_Gain_0_dB                                  0.000000f
-#define LVEQNB_Gain_1_dB                                  0.122018f
-#define LVEQNB_Gain_2_dB                                  0.258925f
-#define LVEQNB_Gain_3_dB                                  0.412538f
-#define LVEQNB_Gain_4_dB                                  0.584893f
-#define LVEQNB_Gain_5_dB                                  0.778279f
-#define LVEQNB_Gain_6_dB                                  0.995262f
-#define LVEQNB_Gain_7_dB                                  1.238721f
-#define LVEQNB_Gain_8_dB                                  1.511886f
-#define LVEQNB_Gain_9_dB                                  1.818383f
-#define LVEQNB_Gain_10_dB                                 2.162278f
-#define LVEQNB_Gain_11_dB                                 2.548134f
-#define LVEQNB_Gain_12_dB                                 2.981072f
-#define LVEQNB_Gain_13_dB                                 3.466836f
-#define LVEQNB_Gain_14_dB                                 4.011872f
-#define LVEQNB_Gain_15_dB                                 4.623413f
+#define LVEQNB_Gain_Neg15_dB (-0.822172f)
+#define LVEQNB_Gain_Neg14_dB (-0.800474f)
+#define LVEQNB_Gain_Neg13_dB (-0.776128f)
+#define LVEQNB_Gain_Neg12_dB (-0.748811f)
+#define LVEQNB_Gain_Neg11_dB (-0.718162f)
+#define LVEQNB_Gain_Neg10_dB (-0.683772f)
+#define LVEQNB_Gain_Neg9_dB (-0.645187f)
+#define LVEQNB_Gain_Neg8_dB (-0.601893f)
+#define LVEQNB_Gain_Neg7_dB (-0.553316f)
+#define LVEQNB_Gain_Neg6_dB (-0.498813f)
+#define LVEQNB_Gain_Neg5_dB (-0.437659f)
+#define LVEQNB_Gain_Neg4_dB (-0.369043f)
+#define LVEQNB_Gain_Neg3_dB (-0.292054f)
+#define LVEQNB_Gain_Neg2_dB (-0.205672f)
+#define LVEQNB_Gain_Neg1_dB (-0.108749f)
+#define LVEQNB_Gain_0_dB 0.000000f
+#define LVEQNB_Gain_1_dB 0.122018f
+#define LVEQNB_Gain_2_dB 0.258925f
+#define LVEQNB_Gain_3_dB 0.412538f
+#define LVEQNB_Gain_4_dB 0.584893f
+#define LVEQNB_Gain_5_dB 0.778279f
+#define LVEQNB_Gain_6_dB 0.995262f
+#define LVEQNB_Gain_7_dB 1.238721f
+#define LVEQNB_Gain_8_dB 1.511886f
+#define LVEQNB_Gain_9_dB 1.818383f
+#define LVEQNB_Gain_10_dB 2.162278f
+#define LVEQNB_Gain_11_dB 2.548134f
+#define LVEQNB_Gain_12_dB 2.981072f
+#define LVEQNB_Gain_13_dB 3.466836f
+#define LVEQNB_Gain_14_dB 4.011872f
+#define LVEQNB_Gain_15_dB 4.623413f
 
 /************************************************************************************/
 /*                                                                                  */
 /* Frequency table for 2*Pi/Fs                                                      */
 /*                                                                                  */
 /************************************************************************************/
-#define LVEQNB_2PiOn_8000                                0.000785f
-#define LVEQNB_2PiOn_11025                               0.000570f
-#define LVEQNB_2PiOn_12000                               0.000524f
-#define LVEQNB_2PiOn_16000                               0.000393f
-#define LVEQNB_2PiOn_22050                               0.000285f
-#define LVEQNB_2PiOn_24000                               0.000262f
-#define LVEQNB_2PiOn_32000                               0.000196f
-#define LVEQNB_2PiOn_44100                               0.000142f
-#define LVEQNB_2PiOn_48000                               0.000131f
+#define LVEQNB_2PiOn_8000 0.000785f
+#define LVEQNB_2PiOn_11025 0.000570f
+#define LVEQNB_2PiOn_12000 0.000524f
+#define LVEQNB_2PiOn_16000 0.000393f
+#define LVEQNB_2PiOn_22050 0.000285f
+#define LVEQNB_2PiOn_24000 0.000262f
+#define LVEQNB_2PiOn_32000 0.000196f
+#define LVEQNB_2PiOn_44100 0.000142f
+#define LVEQNB_2PiOn_48000 0.000131f
 
-#define LVEQNB_2PiOn_88200                               0.000071f
-#define LVEQNB_2PiOn_96000                               0.000065f
-#define LVEQNB_2PiOn_176400                              0.000036f
-#define LVEQNB_2PiOn_192000                              0.000033f
+#define LVEQNB_2PiOn_88200 0.000071f
+#define LVEQNB_2PiOn_96000 0.000065f
+#define LVEQNB_2PiOn_176400 0.000036f
+#define LVEQNB_2PiOn_192000 0.000033f
 
 /************************************************************************************/
 /*                                                                                  */
 /* 50D table for 50 / ( 1 + Gain )                                                  */
 /*                                                                                  */
 /************************************************************************************/
-#define LVEQNB_100D_Neg15_dB                             5.623413f
-#define LVEQNB_100D_Neg14_dB                             5.011872f
-#define LVEQNB_100D_Neg13_dB                             4.466836f
-#define LVEQNB_100D_Neg12_dB                             3.981072f
-#define LVEQNB_100D_Neg11_dB                             3.548134f
-#define LVEQNB_100D_Neg10_dB                             3.162278f
-#define LVEQNB_100D_Neg9_dB                              2.818383f
-#define LVEQNB_100D_Neg8_dB                              2.511886f
-#define LVEQNB_100D_Neg7_dB                              2.238721f
-#define LVEQNB_100D_Neg6_dB                              1.995262f
-#define LVEQNB_100D_Neg5_dB                              1.778279f
-#define LVEQNB_100D_Neg4_dB                              1.584893f
-#define LVEQNB_100D_Neg3_dB                              1.412538f
-#define LVEQNB_100D_Neg2_dB                              1.258925f
-#define LVEQNB_100D_Neg1_dB                              1.122018f
-#define LVEQNB_100D_0_dB                                 1.000000f
+#define LVEQNB_100D_Neg15_dB 5.623413f
+#define LVEQNB_100D_Neg14_dB 5.011872f
+#define LVEQNB_100D_Neg13_dB 4.466836f
+#define LVEQNB_100D_Neg12_dB 3.981072f
+#define LVEQNB_100D_Neg11_dB 3.548134f
+#define LVEQNB_100D_Neg10_dB 3.162278f
+#define LVEQNB_100D_Neg9_dB 2.818383f
+#define LVEQNB_100D_Neg8_dB 2.511886f
+#define LVEQNB_100D_Neg7_dB 2.238721f
+#define LVEQNB_100D_Neg6_dB 1.995262f
+#define LVEQNB_100D_Neg5_dB 1.778279f
+#define LVEQNB_100D_Neg4_dB 1.584893f
+#define LVEQNB_100D_Neg3_dB 1.412538f
+#define LVEQNB_100D_Neg2_dB 1.258925f
+#define LVEQNB_100D_Neg1_dB 1.122018f
+#define LVEQNB_100D_0_dB 1.000000f
 
 #endif
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
index 6bb4a7e..7e5caed 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
@@ -21,6 +21,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 
+#include <system/audio.h>
 #include "LVEQNB.h"
 #include "LVEQNB_Private.h"
 #include "VectorArithmetic.h"
@@ -32,8 +33,8 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-#define     LOW_FREQ            298             /* 32768/110 for low test frequency */
-#define     HIGH_FREQ           386             /* 32768/85 for high test frequency */
+#define LOW_FREQ 298  /* 32768/110 for low test frequency */
+#define HIGH_FREQ 386 /* 32768/85 for high test frequency */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -56,23 +57,19 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVEQNB_ReturnStatus_en LVEQNB_GetParameters(LVEQNB_Handle_t     hInstance,
-                                            LVEQNB_Params_t     *pParams)
-{
+LVEQNB_ReturnStatus_en LVEQNB_GetParameters(LVEQNB_Handle_t hInstance, LVEQNB_Params_t* pParams) {
+    LVEQNB_Instance_t* pInstance = (LVEQNB_Instance_t*)hInstance;
 
-    LVEQNB_Instance_t    *pInstance =(LVEQNB_Instance_t  *)hInstance;
-
-   /*
+    /*
      * Check for error conditions
      */
-    if((hInstance == LVM_NULL) || (pParams == LVM_NULL))
-    {
+    if ((hInstance == LVM_NULL) || (pParams == LVM_NULL)) {
         return LVEQNB_NULLADDRESS;
     }
 
     *pParams = pInstance->Params;
 
-    return(LVEQNB_SUCCESS);
+    return (LVEQNB_SUCCESS);
 }
 
 /************************************************************************************/
@@ -96,20 +93,17 @@
 /*                                                                                  */
 /************************************************************************************/
 
-LVEQNB_ReturnStatus_en LVEQNB_GetCapabilities(LVEQNB_Handle_t           hInstance,
-                                              LVEQNB_Capabilities_t     *pCapabilities)
-{
+LVEQNB_ReturnStatus_en LVEQNB_GetCapabilities(LVEQNB_Handle_t hInstance,
+                                              LVEQNB_Capabilities_t* pCapabilities) {
+    LVEQNB_Instance_t* pInstance = (LVEQNB_Instance_t*)hInstance;
 
-    LVEQNB_Instance_t    *pInstance =(LVEQNB_Instance_t  *)hInstance;
-
-    if((hInstance == LVM_NULL) || (pCapabilities == LVM_NULL))
-    {
+    if ((hInstance == LVM_NULL) || (pCapabilities == LVM_NULL)) {
         return LVEQNB_NULLADDRESS;
     }
 
     *pCapabilities = pInstance->Capabilities;
 
-    return(LVEQNB_SUCCESS);
+    return (LVEQNB_SUCCESS);
 }
 
 /************************************************************************************/
@@ -134,33 +128,30 @@
 /*                                                                                  */
 /************************************************************************************/
 
-void    LVEQNB_SetFilters(LVEQNB_Instance_t     *pInstance,
-                          LVEQNB_Params_t       *pParams)
-{
-    extern const LVM_UINT32   LVEQNB_SampleRateTab[];           /* Sample rate table */
+void LVEQNB_SetFilters(LVEQNB_Instance_t* pInstance, LVEQNB_Params_t* pParams) {
+    extern const LVM_UINT32 LVEQNB_SampleRateTab[]; /* Sample rate table */
 
-    LVM_UINT16          i;                                      /* Filter band index */
-    LVM_UINT32          fs = (LVM_UINT32)LVEQNB_SampleRateTab[(LVM_UINT16)pParams->SampleRate];  /* Sample rate */
-    LVM_UINT32          fc;                                     /* Filter centre frequency */
-    LVM_INT16           QFactor;                                /* Filter Q factor */
+    LVM_UINT16 i; /* Filter band index */
+    LVM_UINT32 fs =
+            (LVM_UINT32)LVEQNB_SampleRateTab[(LVM_UINT16)pParams->SampleRate]; /* Sample rate */
+    LVM_UINT32 fc;     /* Filter centre frequency */
+    LVM_INT16 QFactor; /* Filter Q factor */
 
     pInstance->NBands = pParams->NBands;
 
-    for (i=0; i<pParams->NBands; i++)
-    {
+    for (i = 0; i < pParams->NBands; i++) {
         /*
          * Get the filter settings
          */
-        fc = (LVM_UINT32)pParams->pBandDefinition[i].Frequency;     /* Get the band centre frequency */
-        QFactor = (LVM_INT16)pParams->pBandDefinition[i].QFactor;   /* Get the band Q factor */
+        fc = (LVM_UINT32)pParams->pBandDefinition[i].Frequency; /* Get the band centre frequency */
+        QFactor = (LVM_INT16)pParams->pBandDefinition[i].QFactor; /* Get the band Q factor */
 
         pInstance->pBiquadType[i] = LVEQNB_SinglePrecision_Float; /* Default to single precision */
 
         /*
          * Check for out of range frequencies
          */
-        if (fc > (fs >> 1))
-        {
+        if (fc > (fs >> 1)) {
             pInstance->pBiquadType[i] = LVEQNB_OutOfRange;
         }
 
@@ -168,7 +159,6 @@
          * Copy the filter definition to persistant memory
          */
         pInstance->pBandDefinitions[i] = pParams->pBandDefinition[i];
-
     }
 }
 
@@ -186,46 +176,44 @@
 /*                                                                                  */
 /************************************************************************************/
 
-void    LVEQNB_SetCoefficients(LVEQNB_Instance_t     *pInstance)
-{
+void LVEQNB_SetCoefficients(LVEQNB_Instance_t* pInstance) {
+    LVM_UINT16 i;                    /* Filter band index */
+    LVEQNB_BiquadType_en BiquadType; /* Filter biquad type */
 
-    LVM_UINT16              i;                          /* Filter band index */
-    LVEQNB_BiquadType_en    BiquadType;                 /* Filter biquad type */
-
+    pInstance->gain.resize(pInstance->Params.NBands);
     /*
      * Set the coefficients for each band by the init function
      */
-    for (i=0; i<pInstance->Params.NBands; i++)
-    {
-
+    for (i = 0; i < pInstance->Params.NBands; i++) {
         /*
          * Check band type for correct initialisation method and recalculate the coefficients
          */
         BiquadType = pInstance->pBiquadType[i];
-        switch  (BiquadType)
-        {
-            case    LVEQNB_SinglePrecision_Float:
-            {
-                PK_FLOAT_Coefs_t      Coefficients;
+        switch (BiquadType) {
+            case LVEQNB_SinglePrecision_Float: {
+                PK_FLOAT_Coefs_t Coefficients;
                 /*
                  * Calculate the single precision coefficients
                  */
                 LVEQNB_SinglePrecCoefs((LVM_UINT16)pInstance->Params.SampleRate,
-                                       &pInstance->pBandDefinitions[i],
-                                       &Coefficients);
+                                       &pInstance->pBandDefinitions[i], &Coefficients);
                 /*
                  * Set the coefficients
                  */
-                PK_2I_D32F32CssGss_TRC_WRA_01_Init(&pInstance->pEQNB_FilterState_Float[i],
-                                                   &pInstance->pEQNB_Taps_Float[i],
-                                                   &Coefficients);
+                pInstance->gain[i] = Coefficients.G;
+                std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs> coefs = {
+                        Coefficients.A0, 0.0, -(Coefficients.A0), -(Coefficients.B1),
+                        -(Coefficients.B2)};
+                pInstance->eqBiquad[i]
+                        .setCoefficients<
+                                std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs>>(
+                                coefs);
                 break;
             }
             default:
                 break;
         }
     }
-
 }
 
 /************************************************************************************/
@@ -239,20 +227,9 @@
 /*  pInstance           Pointer to the instance                                     */
 /*                                                                                  */
 /************************************************************************************/
-void    LVEQNB_ClearFilterHistory(LVEQNB_Instance_t     *pInstance)
-{
-    LVM_FLOAT       *pTapAddress;
-    LVM_INT16       NumTaps;
-
-    pTapAddress = (LVM_FLOAT *)pInstance->pEQNB_Taps_Float;
-    NumTaps     = (LVM_INT16)((pInstance->Capabilities.MaxBands * \
-                                    sizeof(Biquad_2I_Order2_FLOAT_Taps_t)) / sizeof(LVM_FLOAT));
-
-    if (NumTaps != 0)
-    {
-        LoadConst_Float(0,                                 /* Clear the history, value 0 */
-                        pTapAddress,                       /* Destination */
-                        NumTaps);                          /* Number of words */
+void LVEQNB_ClearFilterHistory(LVEQNB_Instance_t* pInstance) {
+    for (size_t i = 0; i < pInstance->eqBiquad.size(); i++) {
+        pInstance->eqBiquad[i].clear();
     }
 }
 /****************************************************************************************/
@@ -277,56 +254,47 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVEQNB_ReturnStatus_en LVEQNB_Control(LVEQNB_Handle_t        hInstance,
-                                      LVEQNB_Params_t        *pParams)
-{
-
-    LVEQNB_Instance_t    *pInstance = (LVEQNB_Instance_t  *)hInstance;
-    LVM_INT16            bChange    = LVM_FALSE;
-    LVM_INT16            i = 0;
-    LVEQNB_Mode_en       OperatingModeSave ;
+LVEQNB_ReturnStatus_en LVEQNB_Control(LVEQNB_Handle_t hInstance, LVEQNB_Params_t* pParams) {
+    LVEQNB_Instance_t* pInstance = (LVEQNB_Instance_t*)hInstance;
+    LVM_INT16 bChange = LVM_FALSE;
+    LVM_INT16 i = 0;
+    LVEQNB_Mode_en OperatingModeSave;
 
     /*
      * Check for error conditions
      */
-    if((hInstance == LVM_NULL) || (pParams == LVM_NULL))
-    {
+    if ((hInstance == LVM_NULL) || (pParams == LVM_NULL)) {
         return LVEQNB_NULLADDRESS;
     }
 
-    if((pParams->NBands !=0) && (pParams->pBandDefinition==LVM_NULL))
-    {
+    if ((pParams->NBands != 0) && (pParams->pBandDefinition == LVM_NULL)) {
         return LVEQNB_NULLADDRESS;
     }
 
     OperatingModeSave = pInstance->Params.OperatingMode;
 
     /* Set the alpha factor of the mixer */
-    if (pParams->SampleRate != pInstance->Params.SampleRate)
-    {
-        LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[0],LVEQNB_BYPASS_MIXER_TC,(LVM_Fs_en)pParams->SampleRate,2);
-        LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[1],LVEQNB_BYPASS_MIXER_TC,(LVM_Fs_en)pParams->SampleRate,2);
+    if (pParams->SampleRate != pInstance->Params.SampleRate) {
+        LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[0],
+                                           LVEQNB_BYPASS_MIXER_TC, (LVM_Fs_en)pParams->SampleRate,
+                                           2);
+        LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[1],
+                                           LVEQNB_BYPASS_MIXER_TC, (LVM_Fs_en)pParams->SampleRate,
+                                           2);
     }
 
-    if( (pInstance->Params.NBands            !=  pParams->NBands          ) ||
-        (pInstance->Params.OperatingMode     !=  pParams->OperatingMode   ) ||
-        (pInstance->Params.pBandDefinition   !=  pParams->pBandDefinition ) ||
-        (pInstance->Params.SampleRate        !=  pParams->SampleRate      ) ||
-        (pInstance->Params.SourceFormat      !=  pParams->SourceFormat    ))
-    {
-
+    if ((pInstance->Params.NBands != pParams->NBands) ||
+        (pInstance->Params.OperatingMode != pParams->OperatingMode) ||
+        (pInstance->Params.pBandDefinition != pParams->pBandDefinition) ||
+        (pInstance->Params.SampleRate != pParams->SampleRate) ||
+        (pInstance->Params.SourceFormat != pParams->SourceFormat)) {
         bChange = LVM_TRUE;
-    }
-    else
-    {
-        for(i = 0; i < pParams->NBands; i++)
-        {
-
-            if((pInstance->pBandDefinitions[i].Frequency  != pParams->pBandDefinition[i].Frequency )||
-                (pInstance->pBandDefinitions[i].Gain       != pParams->pBandDefinition[i].Gain      )||
-                (pInstance->pBandDefinitions[i].QFactor    != pParams->pBandDefinition[i].QFactor   ))
-            {
-
+    } else {
+        for (i = 0; i < pParams->NBands; i++) {
+            if ((pInstance->pBandDefinitions[i].Frequency !=
+                 pParams->pBandDefinition[i].Frequency) ||
+                (pInstance->pBandDefinitions[i].Gain != pParams->pBandDefinition[i].Gain) ||
+                (pInstance->pBandDefinitions[i].QFactor != pParams->pBandDefinition[i].QFactor)) {
                 bChange = LVM_TRUE;
             }
         }
@@ -335,19 +303,24 @@
     // During operating mode transition, there is a race condition where the mode
     // is still LVEQNB_ON, but the effect is considered disabled in the upper layers.
     // modeChange handles this special race condition.
-    const int /* bool */ modeChange = pParams->OperatingMode != OperatingModeSave
-            || (OperatingModeSave == LVEQNB_ON
-                    && pInstance->bInOperatingModeTransition
-                    && LVC_Mixer_GetTarget(&pInstance->BypassMixer.MixerStream[0]) == 0);
+    const int /* bool */ modeChange =
+            pParams->OperatingMode != OperatingModeSave ||
+            (OperatingModeSave == LVEQNB_ON && pInstance->bInOperatingModeTransition &&
+             LVC_Mixer_GetTarget(&pInstance->BypassMixer.MixerStream[0]) == 0);
+
+    /*
+     * Create biquad instance
+     */
+    pInstance->eqBiquad.resize(pParams->NBands,
+                               android::audio_utils::BiquadFilter<LVM_FLOAT>(pParams->NrChannels));
+    LVEQNB_ClearFilterHistory(pInstance);
 
     if (bChange || modeChange) {
-
         /*
          * If the sample rate has changed clear the history
          */
-        if (pInstance->Params.SampleRate != pParams->SampleRate)
-        {
-            LVEQNB_ClearFilterHistory(pInstance);           /* Clear the history */
+        if (pInstance->Params.SampleRate != pParams->SampleRate) {
+            LVEQNB_ClearFilterHistory(pInstance); /* Clear the history */
         }
 
         /*
@@ -358,45 +331,45 @@
         /*
          * Reset the filters except if the algo is switched off
          */
-        if(pParams->OperatingMode != LVEQNB_BYPASS){
+        if (pParams->OperatingMode != LVEQNB_BYPASS) {
             /*
              * Reset the filters as all parameters could have changed
              */
-            LVEQNB_SetFilters(pInstance,                        /* Instance pointer */
-                              pParams);                         /* New parameters */
+            LVEQNB_SetFilters(pInstance, /* Instance pointer */
+                              pParams);  /* New parameters */
 
             /*
              * Update the filters
              */
-            LVEQNB_SetCoefficients(pInstance);                  /* Instance pointer */
+            LVEQNB_SetCoefficients(pInstance); /* Instance pointer */
         }
 
         if (modeChange) {
-            if(pParams->OperatingMode == LVEQNB_ON)
-            {
+            if (pParams->OperatingMode == LVEQNB_ON) {
                 LVC_Mixer_SetTarget(&pInstance->BypassMixer.MixerStream[0], 1.0f);
                 LVC_Mixer_SetTarget(&pInstance->BypassMixer.MixerStream[1], 0.0f);
-                pInstance->BypassMixer.MixerStream[0].CallbackSet        = 1;
-                pInstance->BypassMixer.MixerStream[1].CallbackSet        = 1;
-            }
-            else
-            {
+                pInstance->BypassMixer.MixerStream[0].CallbackSet = 1;
+                pInstance->BypassMixer.MixerStream[1].CallbackSet = 1;
+            } else {
                 /* Stay on the ON operating mode until the transition is done */
                 // This may introduce a state race condition if the effect is enabled again
                 // while in transition.  This is fixed in the modeChange logic.
                 pInstance->Params.OperatingMode = LVEQNB_ON;
                 LVC_Mixer_SetTarget(&pInstance->BypassMixer.MixerStream[0], 0.0f);
                 LVC_Mixer_SetTarget(&pInstance->BypassMixer.MixerStream[1], 1.0f);
-                pInstance->BypassMixer.MixerStream[0].CallbackSet        = 1;
-                pInstance->BypassMixer.MixerStream[1].CallbackSet        = 1;
+                pInstance->BypassMixer.MixerStream[0].CallbackSet = 1;
+                pInstance->BypassMixer.MixerStream[1].CallbackSet = 1;
             }
-            LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[0],LVEQNB_BYPASS_MIXER_TC,(LVM_Fs_en)pParams->SampleRate,2);
-            LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[1],LVEQNB_BYPASS_MIXER_TC,(LVM_Fs_en)pParams->SampleRate,2);
+            LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[0],
+                                               LVEQNB_BYPASS_MIXER_TC,
+                                               (LVM_Fs_en)pParams->SampleRate, 2);
+            LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMixer.MixerStream[1],
+                                               LVEQNB_BYPASS_MIXER_TC,
+                                               (LVM_Fs_en)pParams->SampleRate, 2);
             pInstance->bInOperatingModeTransition = LVM_TRUE;
         }
-
     }
-    return(LVEQNB_SUCCESS);
+    return (LVEQNB_SUCCESS);
 }
 
 /****************************************************************************************/
@@ -408,23 +381,22 @@
 /*  transition                                                                          */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_INT32 LVEQNB_BypassMixerCallBack (void* hInstance,
-                                      void *pGeneralPurpose,
-                                      LVM_INT16 CallbackParam)
-{
-    LVEQNB_Instance_t      *pInstance =(LVEQNB_Instance_t  *)hInstance;
-    LVM_Callback            CallBack  = pInstance->Capabilities.CallBack;
+LVM_INT32 LVEQNB_BypassMixerCallBack(void* hInstance, void* pGeneralPurpose,
+                                     LVM_INT16 CallbackParam) {
+    LVEQNB_Instance_t* pInstance = (LVEQNB_Instance_t*)hInstance;
+    LVM_Callback CallBack = pInstance->Capabilities.CallBack;
 
-    (void) pGeneralPurpose;
+    (void)pGeneralPurpose;
 
-     /*
-      * Send an ALGOFF event if the ON->OFF switch transition is finished
-      */
-    if((LVC_Mixer_GetTarget(&pInstance->BypassMixer.MixerStream[0]) == 0) &&
-       (CallbackParam == 0)){
+    /*
+     * Send an ALGOFF event if the ON->OFF switch transition is finished
+     */
+    if ((LVC_Mixer_GetTarget(&pInstance->BypassMixer.MixerStream[0]) == 0) &&
+        (CallbackParam == 0)) {
         pInstance->Params.OperatingMode = LVEQNB_BYPASS;
-        if (CallBack != LVM_NULL){
-            CallBack(pInstance->Capabilities.pBundleInstance, LVM_NULL, ALGORITHM_EQNB_ID|LVEQNB_EVENT_ALGOFF);
+        if (CallBack != LVM_NULL) {
+            CallBack(pInstance->Capabilities.pBundleInstance, LVM_NULL,
+                     ALGORITHM_EQNB_ID | LVEQNB_EVENT_ALGOFF);
         }
     }
 
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
index 271a914..3473262 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
@@ -21,302 +21,127 @@
 /*                                                                                      */
 /****************************************************************************************/
 
+#include <stdlib.h>
 #include "LVEQNB.h"
 #include "LVEQNB_Private.h"
-#include "InstAlloc.h"
 #include <string.h> /* For memset */
 
 /****************************************************************************************/
 /*                                                                                      */
-/* FUNCTION:                LVEQNB_Memory                                               */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used for memory allocation and free. It can be called in           */
-/*  two ways:                                                                           */
-/*                                                                                      */
-/*      hInstance = NULL                Returns the memory requirements                 */
-/*      hInstance = Instance handle     Returns the memory requirements and             */
-/*                                      allocated base addresses for the instance       */
-/*                                                                                      */
-/*  When this function is called for memory allocation (hInstance=NULL) the memory      */
-/*  base address pointers are NULL on return.                                           */
-/*                                                                                      */
-/*  When the function is called for free (hInstance = Instance Handle) the memory       */
-/*  table returns the allocated memory and base addresses used during initialisation.   */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory definition table                 */
-/*  pCapabilities           Pointer to the instance capabilities                        */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVEQNB_SUCCESS          Succeeded                                                   */
-/*  LVEQNB_NULLADDRESS      When any of pMemoryTable and pCapabilities is NULL address  */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  This function may be interrupted by the LVEQNB_Process function                 */
-/*                                                                                      */
-/****************************************************************************************/
-
-LVEQNB_ReturnStatus_en LVEQNB_Memory(LVEQNB_Handle_t            hInstance,
-                                     LVEQNB_MemTab_t            *pMemoryTable,
-                                     LVEQNB_Capabilities_t      *pCapabilities)
-{
-
-    INST_ALLOC          AllocMem;
-    LVEQNB_Instance_t   *pInstance = (LVEQNB_Instance_t *)hInstance;
-
-    if((pMemoryTable == LVM_NULL)|| (pCapabilities == LVM_NULL))
-    {
-        return LVEQNB_NULLADDRESS;
-    }
-
-    /*
-     * Fill in the memory table
-     */
-    if (hInstance == LVM_NULL)
-    {
-        /*
-         * Instance memory
-         */
-        InstAlloc_Init(&AllocMem,
-                       LVM_NULL);
-        InstAlloc_AddMember(&AllocMem,                              /* Low pass filter */
-                            sizeof(LVEQNB_Instance_t));
-        pMemoryTable->Region[LVEQNB_MEMREGION_INSTANCE].Size         = InstAlloc_GetTotal(&AllocMem);
-        pMemoryTable->Region[LVEQNB_MEMREGION_INSTANCE].Alignment    = LVEQNB_INSTANCE_ALIGN;
-        pMemoryTable->Region[LVEQNB_MEMREGION_INSTANCE].Type         = LVEQNB_PERSISTENT;
-        pMemoryTable->Region[LVEQNB_MEMREGION_INSTANCE].pBaseAddress = LVM_NULL;
-
-        /*
-         * Persistant data memory
-         */
-        InstAlloc_Init(&AllocMem,
-                       LVM_NULL);
-        InstAlloc_AddMember(&AllocMem,                              /* Low pass filter */
-                            sizeof(Biquad_2I_Order2_FLOAT_Taps_t));
-        InstAlloc_AddMember(&AllocMem,                              /* High pass filter */
-                            sizeof(Biquad_2I_Order2_FLOAT_Taps_t));
-        /* Equaliser Biquad Taps */
-        InstAlloc_AddMember(&AllocMem,
-                            (pCapabilities->MaxBands * sizeof(Biquad_2I_Order2_FLOAT_Taps_t)));
-        /* Filter definitions */
-        InstAlloc_AddMember(&AllocMem,
-                            (pCapabilities->MaxBands * sizeof(LVEQNB_BandDef_t)));
-        /* Biquad types */
-        InstAlloc_AddMember(&AllocMem,
-                            (pCapabilities->MaxBands * sizeof(LVEQNB_BiquadType_en)));
-        pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_DATA].Size         = InstAlloc_GetTotal(&AllocMem);
-        pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_DATA].Alignment    = LVEQNB_DATA_ALIGN;
-        pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_DATA].Type         = LVEQNB_PERSISTENT_DATA;
-        pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_DATA].pBaseAddress = LVM_NULL;
-
-        /*
-         * Persistant coefficient memory
-         */
-        InstAlloc_Init(&AllocMem,
-                       LVM_NULL);
-        InstAlloc_AddMember(&AllocMem,                              /* Low pass filter */
-                            sizeof(Biquad_FLOAT_Instance_t));
-        InstAlloc_AddMember(&AllocMem,                              /* High pass filter */
-                            sizeof(Biquad_FLOAT_Instance_t));
-        /* Equaliser Biquad Instance */
-        InstAlloc_AddMember(&AllocMem,
-                            pCapabilities->MaxBands * sizeof(Biquad_FLOAT_Instance_t));
-        pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_COEF].Size         = InstAlloc_GetTotal(&AllocMem);
-        pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_COEF].Alignment    = LVEQNB_COEF_ALIGN;
-        pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_COEF].Type         = LVEQNB_PERSISTENT_COEF;
-        pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_COEF].pBaseAddress = LVM_NULL;
-
-        /*
-         * Scratch memory
-         */
-        InstAlloc_Init(&AllocMem,
-                       LVM_NULL);
-        InstAlloc_AddMember(&AllocMem,                              /* Low pass filter */
-                            LVEQNB_SCRATCHBUFFERS * sizeof(LVM_FLOAT) * \
-                                             pCapabilities->MaxBlockSize);
-        pMemoryTable->Region[LVEQNB_MEMREGION_SCRATCH].Size              = InstAlloc_GetTotal(&AllocMem);
-        pMemoryTable->Region[LVEQNB_MEMREGION_SCRATCH].Alignment         = LVEQNB_SCRATCH_ALIGN;
-        pMemoryTable->Region[LVEQNB_MEMREGION_SCRATCH].Type              = LVEQNB_SCRATCH;
-        pMemoryTable->Region[LVEQNB_MEMREGION_SCRATCH].pBaseAddress      = LVM_NULL;
-    }
-    else
-    {
-        /* Read back memory allocation table */
-        *pMemoryTable = pInstance->MemoryTable;
-    }
-
-    return(LVEQNB_SUCCESS);
-}
-
-/****************************************************************************************/
-/*                                                                                      */
 /* FUNCTION:                LVEQNB_Init                                                 */
 /*                                                                                      */
 /* DESCRIPTION:                                                                         */
-/*  Create and initialisation function for the N-Band equaliser module                  */
-/*                                                                                      */
-/*  This function can be used to create an algorithm instance by calling with           */
-/*  hInstance set to NULL. In this case the algorithm returns the new instance          */
-/*  handle.                                                                             */
-/*                                                                                      */
-/*  This function can be used to force a full re-initialisation of the algorithm        */
-/*  by calling with hInstance = Instance Handle. In this case the memory table          */
-/*  should be correct for the instance, this can be ensured by calling the function     */
-/*  DBE_Memory before calling this function.                                            */
+/*  Create and initialisation function for the N-Band equaliser module.                 */
 /*                                                                                      */
 /* PARAMETERS:                                                                          */
-/*  hInstance               Instance handle                                             */
-/*  pMemoryTable            Pointer to the memory definition table                      */
-/*  pCapabilities           Pointer to the instance capabilities                        */
+/*  phInstance              Pointer to instance handle                                  */
+/*  pCapabilities           Pointer to the initialisation capabilities                  */
+/*  pScratch                Pointer to bundle scratch buffer                            */
 /*                                                                                      */
 /* RETURNS:                                                                             */
 /*  LVEQNB_SUCCESS          Initialisation succeeded                                    */
-/*  LVEQNB_NULLADDRESS        When pCapabilities or pMemoryTableis or phInstance are NULL */
-/*  LVEQNB_NULLADDRESS        One or more of the memory regions has a NULL base address   */
-/*                          pointer for a memory region with a non-zero size.           */
+/*  LVEQNB_NULLADDRESS      One or more memory has a NULL pointer - malloc failure      */
 /*                                                                                      */
 /* NOTES:                                                                               */
-/*  1.  The instance handle is the pointer to the base address of the first memory      */
-/*      region.                                                                         */
-/*  2.  This function must not be interrupted by the LVEQNB_Process function            */
+/*  1.  This function must not be interrupted by the LVEQNB_Process function            */
 /*                                                                                      */
 /****************************************************************************************/
 
-LVEQNB_ReturnStatus_en LVEQNB_Init(LVEQNB_Handle_t          *phInstance,
-                                   LVEQNB_MemTab_t          *pMemoryTable,
-                                   LVEQNB_Capabilities_t    *pCapabilities)
-{
+LVEQNB_ReturnStatus_en LVEQNB_Init(LVEQNB_Handle_t* phInstance,
+                                   LVEQNB_Capabilities_t* pCapabilities, void* pScratch) {
+    LVEQNB_Instance_t* pInstance;
 
-    LVEQNB_Instance_t   *pInstance;
-    LVM_UINT32          MemSize;
-    INST_ALLOC          AllocMem;
-    LVM_INT32           i;
+    *phInstance = new LVEQNB_Instance_t{};
+    pInstance = (LVEQNB_Instance_t*)*phInstance;
 
-    /*
-     * Check for NULL pointers
-     */
-    if((phInstance == LVM_NULL) || (pMemoryTable == LVM_NULL) || (pCapabilities == LVM_NULL))
-    {
-        return LVEQNB_NULLADDRESS;
-    }
-
-    /*
-     * Check the memory table for NULL pointers
-     */
-    for (i = 0; i < LVEQNB_NR_MEMORY_REGIONS; i++)
-    {
-        if (pMemoryTable->Region[i].Size!=0)
-        {
-            if (pMemoryTable->Region[i].pBaseAddress==LVM_NULL)
-            {
-                return(LVEQNB_NULLADDRESS);
-            }
-        }
-    }
-
-    /*
-     * Set the instance handle if not already initialised
-     */
-
-    InstAlloc_Init(&AllocMem,  pMemoryTable->Region[LVEQNB_MEMREGION_INSTANCE].pBaseAddress);
-
-    if (*phInstance == LVM_NULL)
-    {
-        *phInstance = InstAlloc_AddMember(&AllocMem, sizeof(LVEQNB_Instance_t));
-    }
-    pInstance =(LVEQNB_Instance_t  *)*phInstance;
-
-    /*
-     * Save the memory table in the instance structure
-     */
     pInstance->Capabilities = *pCapabilities;
-
-    /*
-     * Save the memory table in the instance structure and
-     * set the structure pointers
-     */
-    pInstance->MemoryTable       = *pMemoryTable;
-
-    /*
-     * Allocate coefficient memory
-     */
-    InstAlloc_Init(&AllocMem,
-                   pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_COEF].pBaseAddress);
+    pInstance->pScratch = pScratch;
 
     /* Equaliser Biquad Instance */
-    pInstance->pEQNB_FilterState_Float = (Biquad_FLOAT_Instance_t *)
-        InstAlloc_AddMember(&AllocMem, pCapabilities->MaxBands * \
-                                                       sizeof(Biquad_FLOAT_Instance_t));
-
-    /*
-     * Allocate data memory
-     */
-    InstAlloc_Init(&AllocMem,
-                   pMemoryTable->Region[LVEQNB_MEMREGION_PERSISTENT_DATA].pBaseAddress);
-
-    MemSize = (pCapabilities->MaxBands * sizeof(Biquad_2I_Order2_FLOAT_Taps_t));
-    pInstance->pEQNB_Taps_Float = (Biquad_2I_Order2_FLOAT_Taps_t *)InstAlloc_AddMember(&AllocMem,
-                                                                                       MemSize);
-    MemSize = (pCapabilities->MaxBands * sizeof(LVEQNB_BandDef_t));
-    pInstance->pBandDefinitions  = (LVEQNB_BandDef_t *)InstAlloc_AddMember(&AllocMem,
-                                                                           MemSize);
+    LVM_UINT32 MemSize = pCapabilities->MaxBands * sizeof(*(pInstance->pBandDefinitions));
+    pInstance->pBandDefinitions = (LVEQNB_BandDef_t*)calloc(1, MemSize);
+    if (pInstance->pBandDefinitions == LVM_NULL) {
+        return LVEQNB_NULLADDRESS;
+    }
     // clear all the bands, setting their gain to 0, otherwise when applying new params,
     // it will compare against uninitialized values
     memset(pInstance->pBandDefinitions, 0, MemSize);
-    MemSize = (pCapabilities->MaxBands * sizeof(LVEQNB_BiquadType_en));
-    pInstance->pBiquadType = (LVEQNB_BiquadType_en *)InstAlloc_AddMember(&AllocMem,
-                                                                         MemSize);
 
-    /*
-     * Internally map, structure and allign scratch memory
-     */
-    InstAlloc_Init(&AllocMem,
-                   pMemoryTable->Region[LVEQNB_MEMREGION_SCRATCH].pBaseAddress);
+    MemSize = (pCapabilities->MaxBands * sizeof(*(pInstance->pBiquadType)));
+    pInstance->pBiquadType = (LVEQNB_BiquadType_en*)calloc(1, MemSize);
+    if (pInstance->pBiquadType == LVM_NULL) {
+        return LVEQNB_NULLADDRESS;
+    }
 
-    pInstance->pFastTemporary = (LVM_FLOAT *)InstAlloc_AddMember(&AllocMem,
-                                                                 sizeof(LVM_FLOAT));
+    pInstance->pFastTemporary = (LVM_FLOAT*)pScratch;
 
     /*
      * Update the instance parameters
      */
-    pInstance->Params.NBands          = 0;
-    pInstance->Params.OperatingMode   = LVEQNB_BYPASS;
+    pInstance->Params.NBands = 0;
+    pInstance->Params.OperatingMode = LVEQNB_BYPASS;
     pInstance->Params.pBandDefinition = LVM_NULL;
-    pInstance->Params.SampleRate      = LVEQNB_FS_8000;
-    pInstance->Params.SourceFormat    = LVEQNB_STEREO;
+    pInstance->Params.SampleRate = LVEQNB_FS_8000;
+    pInstance->Params.SourceFormat = LVEQNB_STEREO;
 
     /*
      * Initialise the filters
      */
-    LVEQNB_SetFilters(pInstance,                        /* Set the filter types */
+    LVEQNB_SetFilters(pInstance, /* Set the filter types */
                       &pInstance->Params);
 
-    LVEQNB_SetCoefficients(pInstance);                  /* Set the filter coefficients */
-
-    LVEQNB_ClearFilterHistory(pInstance);               /* Clear the filter history */
-
     /*
      * Initialise the bypass variables
      */
-    pInstance->BypassMixer.MixerStream[0].CallbackSet        = 0;
-    pInstance->BypassMixer.MixerStream[0].CallbackParam      = 0;
-    pInstance->BypassMixer.MixerStream[0].pCallbackHandle    = (void*)pInstance;
-    pInstance->BypassMixer.MixerStream[0].pCallBack          = LVEQNB_BypassMixerCallBack;
+    pInstance->BypassMixer.MixerStream[0].CallbackSet = 0;
+    pInstance->BypassMixer.MixerStream[0].CallbackParam = 0;
+    pInstance->BypassMixer.MixerStream[0].pCallbackHandle = (void*)pInstance;
+    pInstance->BypassMixer.MixerStream[0].pCallBack = LVEQNB_BypassMixerCallBack;
 
-    LVC_Mixer_Init(&pInstance->BypassMixer.MixerStream[0],0,0);
-    LVC_Mixer_SetTimeConstant(&pInstance->BypassMixer.MixerStream[0],0,LVM_FS_8000,2);
+    LVC_Mixer_Init(&pInstance->BypassMixer.MixerStream[0], 0, 0);
+    LVC_Mixer_SetTimeConstant(&pInstance->BypassMixer.MixerStream[0], 0, LVM_FS_8000, 2);
 
-    pInstance->BypassMixer.MixerStream[1].CallbackSet        = 1;
-    pInstance->BypassMixer.MixerStream[1].CallbackParam      = 0;
-    pInstance->BypassMixer.MixerStream[1].pCallbackHandle    = LVM_NULL;
-    pInstance->BypassMixer.MixerStream[1].pCallBack          = LVM_NULL;
+    pInstance->BypassMixer.MixerStream[1].CallbackSet = 1;
+    pInstance->BypassMixer.MixerStream[1].CallbackParam = 0;
+    pInstance->BypassMixer.MixerStream[1].pCallbackHandle = LVM_NULL;
+    pInstance->BypassMixer.MixerStream[1].pCallBack = LVM_NULL;
     LVC_Mixer_Init(&pInstance->BypassMixer.MixerStream[1], 0, 1.0f);
     LVC_Mixer_SetTimeConstant(&pInstance->BypassMixer.MixerStream[1], 0, LVM_FS_8000, 2);
 
-    pInstance->bInOperatingModeTransition      = LVM_FALSE;
+    pInstance->bInOperatingModeTransition = LVM_FALSE;
 
-    return(LVEQNB_SUCCESS);
+    return (LVEQNB_SUCCESS);
 }
+/****************************************************************************************/
+/*                                                                                      */
+/* FUNCTION:                LVEQNB_DeInit                                               */
+/*                                                                                      */
+/* DESCRIPTION:                                                                         */
+/*    Free the memories created during LVEQNB_Init including instance handle            */
+/*                                                                                      */
+/* PARAMETERS:                                                                          */
+/*  phInstance              Pointer to instance handle                                  */
+/*                                                                                      */
+/* NOTES:                                                                               */
+/*  1.  This function must not be interrupted by the LVEQNB_Process function            */
+/*                                                                                      */
+/****************************************************************************************/
 
+void LVEQNB_DeInit(LVEQNB_Handle_t* phInstance) {
+    LVEQNB_Instance_t* pInstance;
+    if (phInstance == LVM_NULL) {
+        return;
+    }
+    pInstance = (LVEQNB_Instance_t*)*phInstance;
+
+    if (pInstance->pBandDefinitions != LVM_NULL) {
+        free(pInstance->pBandDefinitions);
+        pInstance->pBandDefinitions = LVM_NULL;
+    }
+    if (pInstance->pBiquadType != LVM_NULL) {
+        free(pInstance->pBiquadType);
+        pInstance->pBiquadType = LVM_NULL;
+    }
+    delete pInstance;
+    *phInstance = LVM_NULL;
+}
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Private.h b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Private.h
index 40facfb..2225fec 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Private.h
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Private.h
@@ -24,7 +24,8 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-#include "LVEQNB.h"                                     /* Calling or Application layer definitions */
+#include <audio_utils/BiquadFilter.h>
+#include "LVEQNB.h" /* Calling or Application layer definitions */
 #include "BIQUAD.h"
 #include "LVC_Mixer.h"
 
@@ -35,21 +36,8 @@
 /****************************************************************************************/
 
 /* General */
-#define LVEQNB_INVALID              0xFFFF              /* Invalid init parameter */
-
-/* Memory */
-#define LVEQNB_INSTANCE_ALIGN       4                   /* 32-bit alignment for instance structures */
-#define LVEQNB_DATA_ALIGN           4                   /* 32-bit alignment for structures */
-#define LVEQNB_COEF_ALIGN           4                   /* 32-bit alignment for long words */
-#ifdef SUPPORT_MC
-/* Number of buffers required for inplace processing */
-#define LVEQNB_SCRATCHBUFFERS       (LVM_MAX_CHANNELS * 2)
-#else
-#define LVEQNB_SCRATCHBUFFERS       4                   /* Number of buffers required for inplace processing */
-#endif
-#define LVEQNB_SCRATCH_ALIGN        4                   /* 32-bit alignment for long data */
-
-#define LVEQNB_BYPASS_MIXER_TC      100                 /* Bypass Mixer TC */
+#define LVEQNB_INVALID 0xFFFF      /* Invalid init parameter */
+#define LVEQNB_BYPASS_MIXER_TC 100 /* Bypass Mixer TC */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -58,13 +46,12 @@
 /****************************************************************************************/
 
 /* Filter biquad types */
-typedef enum
-{
+typedef enum {
     LVEQNB_SinglePrecision_Float = -1,
     LVEQNB_SinglePrecision = 0,
     LVEQNB_DoublePrecision = 1,
-    LVEQNB_OutOfRange      = 2,
-    LVEQNB_BIQUADTYPE_MAX  = LVM_MAXINT_32
+    LVEQNB_OutOfRange = 2,
+    LVEQNB_BIQUADTYPE_MAX = LVM_MAXINT_32
 } LVEQNB_BiquadType_en;
 
 /****************************************************************************************/
@@ -74,28 +61,28 @@
 /****************************************************************************************/
 
 /* Instance structure */
-typedef struct
-{
+typedef struct {
     /* Public parameters */
-    LVEQNB_MemTab_t                 MemoryTable;        /* Instance memory allocation table */
-    LVEQNB_Params_t                 Params;             /* Instance parameters */
-    LVEQNB_Capabilities_t           Capabilities;       /* Instance capabilities */
+    void* pScratch;                     /* Pointer to bundle scratch buffer */
+    LVEQNB_Params_t Params;             /* Instance parameters */
+    LVEQNB_Capabilities_t Capabilities; /* Instance capabilities */
 
     /* Aligned memory pointers */
-    LVM_FLOAT                      *pFastTemporary;        /* Fast temporary data base address */
+    LVM_FLOAT* pFastTemporary; /* Fast temporary data base address */
 
-    Biquad_2I_Order2_FLOAT_Taps_t   *pEQNB_Taps_Float;        /* Equaliser Taps */
-    Biquad_FLOAT_Instance_t         *pEQNB_FilterState_Float; /* State for each filter band */
+    std::vector<android::audio_utils::BiquadFilter<LVM_FLOAT>>
+            eqBiquad;            /* Biquad filter instances */
+    std::vector<LVM_FLOAT> gain; /* Gain values for all bands*/
 
     /* Filter definitions and call back */
-    LVM_UINT16                      NBands;             /* Number of bands */
-    LVEQNB_BandDef_t                *pBandDefinitions;  /* Filter band definitions */
-    LVEQNB_BiquadType_en            *pBiquadType;       /* Filter biquad types */
+    LVM_UINT16 NBands;                  /* Number of bands */
+    LVEQNB_BandDef_t* pBandDefinitions; /* Filter band definitions */
+    LVEQNB_BiquadType_en* pBiquadType;  /* Filter biquad types */
 
     /* Bypass variable */
-    LVMixer3_2St_FLOAT_st     BypassMixer;
+    LVMixer3_2St_FLOAT_st BypassMixer;
 
-    LVM_INT16               bInOperatingModeTransition; /* Operating mode transition flag */
+    LVM_INT16 bInOperatingModeTransition; /* Operating mode transition flag */
 
 } LVEQNB_Instance_t;
 
@@ -105,17 +92,15 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-void    LVEQNB_SetFilters(LVEQNB_Instance_t   *pInstance,
-                          LVEQNB_Params_t     *pParams);
+void LVEQNB_SetFilters(LVEQNB_Instance_t* pInstance, LVEQNB_Params_t* pParams);
 
-void    LVEQNB_SetCoefficients(LVEQNB_Instance_t    *pInstance);
+void LVEQNB_SetCoefficients(LVEQNB_Instance_t* pInstance);
 
-void    LVEQNB_ClearFilterHistory(LVEQNB_Instance_t *pInstance);
-LVEQNB_ReturnStatus_en LVEQNB_SinglePrecCoefs(LVM_UINT16        Fs,
-                                              LVEQNB_BandDef_t  *pFilterDefinition,
-                                              PK_FLOAT_Coefs_t    *pCoefficients);
+void LVEQNB_ClearFilterHistory(LVEQNB_Instance_t* pInstance);
+LVEQNB_ReturnStatus_en LVEQNB_SinglePrecCoefs(LVM_UINT16 Fs, LVEQNB_BandDef_t* pFilterDefinition,
+                                              PK_FLOAT_Coefs_t* pCoefficients);
 
-LVM_INT32 LVEQNB_BypassMixerCallBack (void* hInstance, void *pGeneralPurpose, LVM_INT16 CallbackParam);
+LVM_INT32 LVEQNB_BypassMixerCallBack(void* hInstance, void* pGeneralPurpose,
+                                     LVM_INT16 CallbackParam);
 
 #endif /* __LVEQNB_PRIVATE_H__ */
-
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Process.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Process.cpp
index 65eff53..b177dd4 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Process.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Process.cpp
@@ -34,7 +34,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-#define SHIFT       13
+#define SHIFT 13
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -58,89 +58,60 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-LVEQNB_ReturnStatus_en LVEQNB_Process(LVEQNB_Handle_t       hInstance,
-                                      const LVM_FLOAT       *pInData,
-                                      LVM_FLOAT             *pOutData,
-                                      const LVM_UINT16      NrFrames)
-{                                     // updated to use samples = frames * channels.
-    LVEQNB_Instance_t   *pInstance = (LVEQNB_Instance_t  *)hInstance;
-
-#ifdef SUPPORT_MC
-    // Mono passed in as stereo
-    const LVM_INT32 NrChannels = pInstance->Params.NrChannels == 1
-        ? 2 : pInstance->Params.NrChannels;
-#else
-    const LVM_INT32 NrChannels = 2; // FCC_2
-#endif
+LVEQNB_ReturnStatus_en LVEQNB_Process(
+        LVEQNB_Handle_t hInstance, const LVM_FLOAT* pInData, LVM_FLOAT* pOutData,
+        const LVM_UINT16 NrFrames) {  // updated to use samples = frames * channels.
+    LVEQNB_Instance_t* pInstance = (LVEQNB_Instance_t*)hInstance;
+    const LVM_INT32 NrChannels = pInstance->Params.NrChannels;
     const LVM_INT32 NrSamples = NrChannels * NrFrames;
 
-     /* Check for NULL pointers */
-    if((hInstance == LVM_NULL) || (pInData == LVM_NULL) || (pOutData == LVM_NULL))
-    {
+    /* Check for NULL pointers */
+    if ((hInstance == LVM_NULL) || (pInData == LVM_NULL) || (pOutData == LVM_NULL)) {
         return LVEQNB_NULLADDRESS;
     }
 
     /* Check if the input and output data buffers are 32-bit aligned */
-    if ((((uintptr_t)pInData % 4) != 0) || (((uintptr_t)pOutData % 4) != 0))
-    {
+    if ((((uintptr_t)pInData % 4) != 0) || (((uintptr_t)pOutData % 4) != 0)) {
         return LVEQNB_ALIGNMENTERROR;
     }
 
-    LVM_FLOAT * const pScratch = (LVM_FLOAT *)pInstance->pFastTemporary;
+    LVM_FLOAT* const pScratch = (LVM_FLOAT*)pInstance->pFastTemporary;
 
     /*
-    * Check the number of frames is not too large
-    */
-    if (NrFrames > pInstance->Capabilities.MaxBlockSize)
-    {
+     * Check the number of frames is not too large
+     */
+    if (NrFrames > pInstance->Capabilities.MaxBlockSize) {
         return LVEQNB_TOOMANYSAMPLES;
     }
 
-    if (pInstance->Params.OperatingMode == LVEQNB_ON)
-    {
+    if (pInstance->Params.OperatingMode == LVEQNB_ON) {
         /*
          * Copy input data in to scratch buffer
          */
-        Copy_Float(pInData,     /* Source */
-                   pScratch,    /* Destination */
+        Copy_Float(pInData,  /* Source */
+                   pScratch, /* Destination */
                    (LVM_INT16)NrSamples);
 
         /*
          * For each section execte the filter unless the gain is 0dB
          */
-        if (pInstance->NBands != 0)
-        {
-            for (LVM_UINT16 i = 0; i < pInstance->NBands; i++)
-            {
+        if (pInstance->NBands != 0) {
+            for (LVM_UINT16 i = 0; i < pInstance->NBands; i++) {
                 /*
                  * Check if band is non-zero dB gain
                  */
-                if (pInstance->pBandDefinitions[i].Gain != 0)
-                {
-                    /*
-                     * Get the address of the biquad instance
-                     */
-                    Biquad_FLOAT_Instance_t *pBiquad = &pInstance->pEQNB_FilterState_Float[i];
-
+                if (pInstance->pBandDefinitions[i].Gain != 0) {
                     /*
                      * Select single or double precision as required
                      */
-                    switch (pInstance->pBiquadType[i])
-                    {
-                        case LVEQNB_SinglePrecision_Float:
-                        {
-#ifdef SUPPORT_MC
-                            PK_Mc_D32F32C14G11_TRC_WRA_01(pBiquad,
-                                                          pScratch,
-                                                          pScratch,
-                                                          (LVM_INT16)NrFrames,
-                                                          (LVM_INT16)NrChannels);
-#else
-                            PK_2I_D32F32C14G11_TRC_WRA_01(pBiquad,
-                                                          pScratch,
-                                                          pScratch,
-                                                          (LVM_INT16)NrFrames);
-#endif
+                    switch (pInstance->pBiquadType[i]) {
+                        case LVEQNB_SinglePrecision_Float: {
+                            LVM_FLOAT* pTemp = pScratch + NrSamples;
+                            pInstance->eqBiquad[i].process(pTemp, pScratch, NrFrames);
+                            const auto gain = pInstance->gain[i];
+                            for (unsigned j = 0; j < NrSamples; ++j) {
+                                pScratch[j] += pTemp[j] * gain;
+                            }
                             break;
                         }
                         default:
@@ -150,44 +121,27 @@
             }
         }
 
-        if(pInstance->bInOperatingModeTransition == LVM_TRUE){
-#ifdef SUPPORT_MC
-            LVC_MixSoft_2Mc_D16C31_SAT(&pInstance->BypassMixer,
-                                       pScratch,
-                                       pInData,
-                                       pScratch,
-                                       (LVM_INT16)NrFrames,
-                                       (LVM_INT16)NrChannels);
-#else
-            LVC_MixSoft_2St_D16C31_SAT(&pInstance->BypassMixer,
-                                       pScratch,
-                                       pInData,
-                                       pScratch,
-                                       (LVM_INT16)NrSamples);
-#endif
+        if (pInstance->bInOperatingModeTransition == LVM_TRUE) {
+            LVC_MixSoft_2Mc_D16C31_SAT(&pInstance->BypassMixer, pScratch, pInData, pScratch,
+                                       (LVM_INT16)NrFrames, (LVM_INT16)NrChannels);
             // duplicate with else clause(s)
-            Copy_Float(pScratch,                         /* Source */
-                       pOutData,                         /* Destination */
-                       (LVM_INT16)NrSamples);            /* All channel samples */
-        }
-        else{
+            Copy_Float(pScratch,              /* Source */
+                       pOutData,              /* Destination */
+                       (LVM_INT16)NrSamples); /* All channel samples */
+        } else {
             Copy_Float(pScratch,              /* Source */
                        pOutData,              /* Destination */
                        (LVM_INT16)NrSamples); /* All channel samples */
         }
-    }
-    else
-    {
+    } else {
         /*
          * Mode is OFF so copy the data if necessary
          */
-        if (pInData != pOutData)
-        {
-            Copy_Float(pInData,                          /* Source */
-                       pOutData,                         /* Destination */
-                       (LVM_INT16)NrSamples);            /* All channel samples */
+        if (pInData != pOutData) {
+            Copy_Float(pInData,               /* Source */
+                       pOutData,              /* Destination */
+                       (LVM_INT16)NrSamples); /* All channel samples */
         }
     }
     return LVEQNB_SUCCESS;
-
 }
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.cpp
index 0628114..d79d7c9 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.cpp
@@ -35,20 +35,9 @@
  * Sample rate table for converting between the enumerated type and the actual
  * frequency
  */
-const LVM_UINT32    LVEQNB_SampleRateTab[] = {8000,                    /* 8kS/s  */
-                                              11025,
-                                              12000,
-                                              16000,
-                                              22050,
-                                              24000,
-                                              32000,
-                                              44100,
-                                              48000,
-                                              88200,
-                                              96000,
-                                              176400,
-                                              192000
-};
+const LVM_UINT32 LVEQNB_SampleRateTab[] = {8000, /* 8kS/s  */
+                                           11025, 12000, 16000, 22050, 24000,  32000,
+                                           44100, 48000, 88200, 96000, 176400, 192000};
 
 /************************************************************************************/
 /*                                                                                  */
@@ -59,74 +48,34 @@
 /*
  * Table for 2 * Pi / Fs
  */
-const LVM_FLOAT     LVEQNB_TwoPiOnFsTable[] = {LVEQNB_2PiOn_8000,      /* 8kS/s */
-                                               LVEQNB_2PiOn_11025,
-                                               LVEQNB_2PiOn_12000,
-                                               LVEQNB_2PiOn_16000,
-                                               LVEQNB_2PiOn_22050,
-                                               LVEQNB_2PiOn_24000,
-                                               LVEQNB_2PiOn_32000,
-                                               LVEQNB_2PiOn_44100,
-                                               LVEQNB_2PiOn_48000
-                                              ,LVEQNB_2PiOn_88200
-                                              ,LVEQNB_2PiOn_96000
-                                              ,LVEQNB_2PiOn_176400
-                                              ,LVEQNB_2PiOn_192000
-                                               };
+const LVM_FLOAT LVEQNB_TwoPiOnFsTable[] = {
+        LVEQNB_2PiOn_8000, /* 8kS/s */
+        LVEQNB_2PiOn_11025, LVEQNB_2PiOn_12000, LVEQNB_2PiOn_16000,  LVEQNB_2PiOn_22050,
+        LVEQNB_2PiOn_24000, LVEQNB_2PiOn_32000, LVEQNB_2PiOn_44100,  LVEQNB_2PiOn_48000,
+        LVEQNB_2PiOn_88200, LVEQNB_2PiOn_96000, LVEQNB_2PiOn_176400, LVEQNB_2PiOn_192000};
 
 /*
  * Gain table
  */
-const LVM_FLOAT     LVEQNB_GainTable[] = {LVEQNB_Gain_Neg15_dB,        /* -15dB gain */
-                                          LVEQNB_Gain_Neg14_dB,
-                                          LVEQNB_Gain_Neg13_dB,
-                                          LVEQNB_Gain_Neg12_dB,
-                                          LVEQNB_Gain_Neg11_dB,
-                                          LVEQNB_Gain_Neg10_dB,
-                                          LVEQNB_Gain_Neg9_dB,
-                                          LVEQNB_Gain_Neg8_dB,
-                                          LVEQNB_Gain_Neg7_dB,
-                                          LVEQNB_Gain_Neg6_dB,
-                                          LVEQNB_Gain_Neg5_dB,
-                                          LVEQNB_Gain_Neg4_dB,
-                                          LVEQNB_Gain_Neg3_dB,
-                                          LVEQNB_Gain_Neg2_dB,
-                                          LVEQNB_Gain_Neg1_dB,
-                                          LVEQNB_Gain_0_dB,            /* 0dB gain */
-                                          LVEQNB_Gain_1_dB,
-                                          LVEQNB_Gain_2_dB,
-                                          LVEQNB_Gain_3_dB,
-                                          LVEQNB_Gain_4_dB,
-                                          LVEQNB_Gain_5_dB,
-                                          LVEQNB_Gain_6_dB,
-                                          LVEQNB_Gain_7_dB,
-                                          LVEQNB_Gain_8_dB,
-                                          LVEQNB_Gain_9_dB,
-                                          LVEQNB_Gain_10_dB,
-                                          LVEQNB_Gain_11_dB,
-                                          LVEQNB_Gain_12_dB,
-                                          LVEQNB_Gain_13_dB,
-                                          LVEQNB_Gain_14_dB,
-                                          LVEQNB_Gain_15_dB};          /* +15dB gain */
+const LVM_FLOAT LVEQNB_GainTable[] = {
+        LVEQNB_Gain_Neg15_dB, /* -15dB gain */
+        LVEQNB_Gain_Neg14_dB, LVEQNB_Gain_Neg13_dB, LVEQNB_Gain_Neg12_dB, LVEQNB_Gain_Neg11_dB,
+        LVEQNB_Gain_Neg10_dB, LVEQNB_Gain_Neg9_dB,  LVEQNB_Gain_Neg8_dB,  LVEQNB_Gain_Neg7_dB,
+        LVEQNB_Gain_Neg6_dB,  LVEQNB_Gain_Neg5_dB,  LVEQNB_Gain_Neg4_dB,  LVEQNB_Gain_Neg3_dB,
+        LVEQNB_Gain_Neg2_dB,  LVEQNB_Gain_Neg1_dB,  LVEQNB_Gain_0_dB, /* 0dB gain */
+        LVEQNB_Gain_1_dB,     LVEQNB_Gain_2_dB,     LVEQNB_Gain_3_dB,     LVEQNB_Gain_4_dB,
+        LVEQNB_Gain_5_dB,     LVEQNB_Gain_6_dB,     LVEQNB_Gain_7_dB,     LVEQNB_Gain_8_dB,
+        LVEQNB_Gain_9_dB,     LVEQNB_Gain_10_dB,    LVEQNB_Gain_11_dB,    LVEQNB_Gain_12_dB,
+        LVEQNB_Gain_13_dB,    LVEQNB_Gain_14_dB,    LVEQNB_Gain_15_dB}; /* +15dB gain */
 /*
  * D table for 100 / (Gain + 1)
  */
-const LVM_FLOAT    LVEQNB_DTable[] = {LVEQNB_100D_Neg15_dB,            /* -15dB gain */
-                                      LVEQNB_100D_Neg14_dB,
-                                      LVEQNB_100D_Neg13_dB,
-                                      LVEQNB_100D_Neg12_dB,
-                                      LVEQNB_100D_Neg11_dB,
-                                      LVEQNB_100D_Neg10_dB,
-                                      LVEQNB_100D_Neg9_dB,
-                                      LVEQNB_100D_Neg8_dB,
-                                      LVEQNB_100D_Neg7_dB,
-                                      LVEQNB_100D_Neg6_dB,
-                                      LVEQNB_100D_Neg5_dB,
-                                      LVEQNB_100D_Neg4_dB,
-                                      LVEQNB_100D_Neg3_dB,
-                                      LVEQNB_100D_Neg2_dB,
-                                      LVEQNB_100D_Neg1_dB,
-                                      LVEQNB_100D_0_dB};               /* 0dB gain */
+const LVM_FLOAT LVEQNB_DTable[] = {
+        LVEQNB_100D_Neg15_dB, /* -15dB gain */
+        LVEQNB_100D_Neg14_dB, LVEQNB_100D_Neg13_dB, LVEQNB_100D_Neg12_dB, LVEQNB_100D_Neg11_dB,
+        LVEQNB_100D_Neg10_dB, LVEQNB_100D_Neg9_dB,  LVEQNB_100D_Neg8_dB,  LVEQNB_100D_Neg7_dB,
+        LVEQNB_100D_Neg6_dB,  LVEQNB_100D_Neg5_dB,  LVEQNB_100D_Neg4_dB,  LVEQNB_100D_Neg3_dB,
+        LVEQNB_100D_Neg2_dB,  LVEQNB_100D_Neg1_dB,  LVEQNB_100D_0_dB}; /* 0dB gain */
 /************************************************************************************/
 /*                                                                                  */
 /*    Filter polynomial coefficients                                                */
@@ -142,13 +91,13 @@
  * a range of 0 to Pi. The output is in the range 32767 to -32768 representing the range
  * +1.0 to -1.0
  */
-const LVM_INT16     LVEQNB_CosCoef[] = {3,                             /* Shifts */
-                                        4096,                          /* a0 */
-                                        -36,                           /* a1 */
-                                        -19725,                        /* a2 */
-                                        -2671,                         /* a3 */
-                                        23730,                         /* a4 */
-                                        -9490};                        /* a5 */
+const LVM_INT16 LVEQNB_CosCoef[] = {3,      /* Shifts */
+                                    4096,   /* a0 */
+                                    -36,    /* a1 */
+                                    -19725, /* a2 */
+                                    -2671,  /* a3 */
+                                    23730,  /* a4 */
+                                    -9490}; /* a5 */
 
 /*
  * Coefficients for calculating the cosine error with the equation:
@@ -164,9 +113,8 @@
  *
  * Cos(x) = 1.0 - CosErr(x)
  */
-const LVM_INT16     LVEQNB_DPCosCoef[] = {1,                           /* Shifts */
-                                          0,                           /* a0 */
-                                          -6,                          /* a1 */
-                                          16586,                       /* a2 */
-                                          -44};                        /* a3 */
-
+const LVM_INT16 LVEQNB_DPCosCoef[] = {1,     /* Shifts */
+                                      0,     /* a0 */
+                                      -6,    /* a1 */
+                                      16586, /* a2 */
+                                      -44};  /* a3 */
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.h b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.h
index a71eeb9..ab51196 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.h
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Tables.h
@@ -27,7 +27,7 @@
  * Sample rate table for converting between the enumerated type and the actual
  * frequency
  */
-extern const LVM_UINT32    LVEQNB_SampleRateTab[];
+extern const LVM_UINT32 LVEQNB_SampleRateTab[];
 
 /************************************************************************************/
 /*                                                                                  */
@@ -38,17 +38,17 @@
 /*
  * Table for 2 * Pi / Fs
  */
-extern const LVM_FLOAT     LVEQNB_TwoPiOnFsTable[];
+extern const LVM_FLOAT LVEQNB_TwoPiOnFsTable[];
 
 /*
  * Gain table
  */
-extern const LVM_FLOAT     LVEQNB_GainTable[];
+extern const LVM_FLOAT LVEQNB_GainTable[];
 
 /*
  * D table for 100 / (Gain + 1)
  */
-extern const LVM_FLOAT     LVEQNB_DTable[];
+extern const LVM_FLOAT LVEQNB_DTable[];
 
 /************************************************************************************/
 /*                                                                                  */
@@ -65,7 +65,7 @@
  * a range of 0 to Pi. The output is in the range 32767 to -32768 representing the range
  * +1.0 to -1.0
  */
-extern const LVM_INT16     LVEQNB_CosCoef[];
+extern const LVM_INT16 LVEQNB_CosCoef[];
 
 /*
  * Coefficients for calculating the cosine error with the equation:
@@ -81,6 +81,6 @@
  *
  * Cos(x) = 1.0 - CosErr(x)
  */
-extern const LVM_INT16     LVEQNB_DPCosCoef[];
+extern const LVM_INT16 LVEQNB_DPCosCoef[];
 
 #endif /* __LVEQNB_TABLES_H__ */
diff --git a/media/libeffects/lvm/lib/Reverb/lib/LVREV.h b/media/libeffects/lvm/lib/Reverb/lib/LVREV.h
index 8c91ea9..82e94da 100644
--- a/media/libeffects/lvm/lib/Reverb/lib/LVREV.h
+++ b/media/libeffects/lvm/lib/Reverb/lib/LVREV.h
@@ -41,11 +41,11 @@
 /*                                                                                      */
 /****************************************************************************************/
 /* General */
-#define LVREV_BLOCKSIZE_MULTIPLE                1       /* Processing block size multiple */
-#define LVREV_MAX_T60                        7000       /* Maximum decay time is 7000ms */
+#define LVREV_BLOCKSIZE_MULTIPLE 1 /* Processing block size multiple */
+#define LVREV_MAX_T60 7000         /* Maximum decay time is 7000ms */
 
 /* Memory table*/
-#define LVREV_NR_MEMORY_REGIONS                 4       /* Number of memory regions */
+#define LVREV_NR_MEMORY_REGIONS 4 /* Number of memory regions */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -53,24 +53,22 @@
 /*                                                                                      */
 /****************************************************************************************/
 /* Instance handle */
-typedef void *LVREV_Handle_t;
+typedef void* LVREV_Handle_t;
 
 /* Status return values */
-typedef enum
-{
-    LVREV_SUCCESS            = 0,                       /* Successful return from a routine */
-    LVREV_NULLADDRESS        = 1,                       /* NULL allocation address */
-    LVREV_OUTOFRANGE         = 2,                       /* Out of range control parameter */
-    LVREV_INVALIDNUMSAMPLES  = 3,                       /* Invalid number of samples */
+typedef enum {
+    LVREV_SUCCESS = 0,           /* Successful return from a routine */
+    LVREV_NULLADDRESS = 1,       /* NULL allocation address */
+    LVREV_OUTOFRANGE = 2,        /* Out of range control parameter */
+    LVREV_INVALIDNUMSAMPLES = 3, /* Invalid number of samples */
     LVREV_RETURNSTATUS_DUMMY = LVM_MAXENUM
 } LVREV_ReturnStatus_en;
 
 /* Reverb delay lines */
-typedef enum
-{
-    LVREV_DELAYLINES_1     = 1,                         /* One delay line */
-    LVREV_DELAYLINES_2     = 2,                         /* Two delay lines */
-    LVREV_DELAYLINES_4     = 4,                         /* Four delay lines */
+typedef enum {
+    LVREV_DELAYLINES_1 = 1, /* One delay line */
+    LVREV_DELAYLINES_2 = 2, /* Two delay lines */
+    LVREV_DELAYLINES_4 = 4, /* Four delay lines */
     LVREV_DELAYLINES_DUMMY = LVM_MAXENUM
 } LVREV_NumDelayLines_en;
 
@@ -80,41 +78,33 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory table containing the region definitions */
-typedef struct
-{
-    LVM_MemoryRegion_st        Region[LVREV_NR_MEMORY_REGIONS];  /* One definition for each region */
-} LVREV_MemoryTable_st;
-
 /* Control Parameter structure */
-typedef struct
-{
+typedef struct {
     /* General parameters */
-    LVM_Mode_en                 OperatingMode;          /* Operating mode */
-    LVM_Fs_en                   SampleRate;             /* Sample rate */
-    LVM_Format_en               SourceFormat;           /* Source data format */
+    LVM_Mode_en OperatingMode;  /* Operating mode */
+    LVM_Fs_en SampleRate;       /* Sample rate */
+    LVM_Format_en SourceFormat; /* Source data format */
 
     /* Parameters for REV */
-    LVM_UINT16                  Level;                  /* Level, 0 to 100 representing percentage of reverb */
-    LVM_UINT32                  LPF;                    /* Low pass filter, in Hz */
-    LVM_UINT32                  HPF;                    /* High pass filter, in Hz */
+    LVM_UINT16 Level; /* Level, 0 to 100 representing percentage of reverb */
+    LVM_UINT32 LPF;   /* Low pass filter, in Hz */
+    LVM_UINT32 HPF;   /* High pass filter, in Hz */
 
-    LVM_UINT16                  T60;                    /* Decay time constant, in ms */
-    LVM_UINT16                  Density;                /* Echo density, 0 to 100 for minimum to maximum density */
-    LVM_UINT16                  Damping;                /* Damping */
-    LVM_UINT16                  RoomSize;               /* Simulated room size, 1 to 100 for minimum to maximum size */
+    LVM_UINT16 T60;      /* Decay time constant, in ms */
+    LVM_UINT16 Density;  /* Echo density, 0 to 100 for minimum to maximum density */
+    LVM_UINT16 Damping;  /* Damping */
+    LVM_UINT16 RoomSize; /* Simulated room size, 1 to 100 for minimum to maximum size */
 
 } LVREV_ControlParams_st;
 
 /* Instance Parameter structure */
-typedef struct
-{
+typedef struct {
     /* General */
-    LVM_UINT16                  MaxBlockSize;           /* Maximum processing block size */
+    LVM_UINT16 MaxBlockSize; /* Maximum processing block size */
 
     /* Reverb */
-    LVM_Format_en               SourceFormat;           /* Source data formats to support */
-    LVREV_NumDelayLines_en      NumDelays;              /* The number of delay lines, 1, 2 or 4 */
+    LVM_Format_en SourceFormat;       /* Source data formats to support */
+    LVREV_NumDelayLines_en NumDelays; /* The number of delay lines, 1, 2 or 4 */
 
 } LVREV_InstanceParams_st;
 
@@ -126,46 +116,6 @@
 
 /****************************************************************************************/
 /*                                                                                      */
-/* FUNCTION:                LVREV_GetMemoryTable                                        */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used to obtain the LVREV module memory requirements to support     */
-/*  memory allocation. It can also be used to return the memory base address provided   */
-/*  during memory allocation to support freeing of memory when the LVREV module is no   */
-/*  longer required. It is called in two ways:                                          */
-/*                                                                                      */
-/*  hInstance = NULL                Returns the memory requirements                     */
-/*  hInstance = Instance handle     Returns the memory requirements and allocated       */
-/*                                  base addresses.                                     */
-/*                                                                                      */
-/*  When this function is called with hInstance = NULL the memory base address pointers */
-/*  will be NULL on return.                                                             */
-/*                                                                                      */
-/*  When the function is called for freeing memory, hInstance = Instance Handle the     */
-/*  memory table returns the allocated memory and base addresses used during            */
-/*  initialisation.                                                                     */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory table                            */
-/*  pInstanceParams         Pointer to the instance parameters                          */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVREV_SUCCESS           Succeeded                                                   */
-/*  LVREV_NULLADDRESS       When pMemoryTable is NULL                                   */
-/*  LVREV_NULLADDRESS       When requesting memory requirements and pInstanceParams     */
-/*                          is NULL                                                     */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  This function may be interrupted by the LVREV_Process function                  */
-/*                                                                                      */
-/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetMemoryTable(LVREV_Handle_t           hInstance,
-                                           LVREV_MemoryTable_st     *pMemoryTable,
-                                           LVREV_InstanceParams_st  *pInstanceParams);
-
-/****************************************************************************************/
-/*                                                                                      */
 /* FUNCTION:                LVREV_GetInstanceHandle                                     */
 /*                                                                                      */
 /* DESCRIPTION:                                                                         */
@@ -179,7 +129,6 @@
 /*                                                                                      */
 /* PARAMETERS:                                                                          */
 /*  phInstance              Pointer to the instance handle                              */
-/*  pMemoryTable            Pointer to the memory definition table                      */
 /*  pInstanceParams         Pointer to the instance parameters                          */
 /*                                                                                      */
 /* RETURNS:                                                                             */
@@ -190,9 +139,25 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetInstanceHandle(LVREV_Handle_t            *phInstance,
-                                              LVREV_MemoryTable_st      *pMemoryTable,
-                                              LVREV_InstanceParams_st   *pInstanceParams);
+LVREV_ReturnStatus_en LVREV_GetInstanceHandle(LVREV_Handle_t* phInstance,
+                                              LVREV_InstanceParams_st* pInstanceParams);
+
+/****************************************************************************************/
+/*                                                                                      */
+/* FUNCTION:                LVREV_FreeInstance                                          */
+/*                                                                                      */
+/* DESCRIPTION:                                                                         */
+/*  This function is used to free the internal allocations of the module.               */
+/*                                                                                      */
+/* PARAMETERS:                                                                          */
+/*  hInstance               Instance handle                                             */
+/*                                                                                      */
+/* RETURNS:                                                                             */
+/*  LVREV_SUCCESS          free instance succeeded                                      */
+/*  LVREV_NULLADDRESS      Instance is NULL                                             */
+/*                                                                                      */
+/****************************************************************************************/
+LVREV_ReturnStatus_en LVREV_FreeInstance(LVREV_Handle_t hInstance);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -214,8 +179,8 @@
 /*  1.  This function may be interrupted by the LVREV_Process function                  */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetControlParameters(LVREV_Handle_t           hInstance,
-                                                 LVREV_ControlParams_st   *pControlParams);
+LVREV_ReturnStatus_en LVREV_GetControlParameters(LVREV_Handle_t hInstance,
+                                                 LVREV_ControlParams_st* pControlParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -236,8 +201,8 @@
 /*  1.  This function may be interrupted by the LVREV_Process function                  */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_SetControlParameters(LVREV_Handle_t           hInstance,
-                                                 LVREV_ControlParams_st   *pNewParams);
+LVREV_ReturnStatus_en LVREV_SetControlParameters(LVREV_Handle_t hInstance,
+                                                 LVREV_ControlParams_st* pNewParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -257,7 +222,7 @@
 /*  1. This function must not be interrupted by the LVREV_Process function              */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_ClearAudioBuffers(LVREV_Handle_t  hInstance);
+LVREV_ReturnStatus_en LVREV_ClearAudioBuffers(LVREV_Handle_t hInstance);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -280,11 +245,9 @@
 /*  1. The input and output buffers must be 32-bit aligned                              */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_Process(LVREV_Handle_t      hInstance,
-                                    const LVM_FLOAT     *pInData,
-                                    LVM_FLOAT           *pOutData,
-                                    const LVM_UINT16          NumSamples);
+LVREV_ReturnStatus_en LVREV_Process(LVREV_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                    LVM_FLOAT* pOutData, const LVM_UINT16 NumSamples);
 
-#endif      /* __LVREV_H__ */
+#endif /* __LVREV_H__ */
 
 /* End of file */
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_ApplyNewSettings.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_ApplyNewSettings.cpp
index 1f0d39b..b7883f5 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_ApplyNewSettings.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_ApplyNewSettings.cpp
@@ -20,6 +20,8 @@
 /*  Includes                                                                            */
 /*                                                                                      */
 /****************************************************************************************/
+
+#include <system/audio.h>
 #include "LVREV_Private.h"
 #include "Filter.h"
 
@@ -41,160 +43,130 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVREV_ReturnStatus_en LVREV_ApplyNewSettings (LVREV_Instance_st     *pPrivate)
-{
-
-    LVM_Mode_en  OperatingMode;
-    LVM_INT32    NumberOfDelayLines;
+LVREV_ReturnStatus_en LVREV_ApplyNewSettings(LVREV_Instance_st* pPrivate) {
+    LVM_Mode_en OperatingMode;
+    LVM_INT32 NumberOfDelayLines;
 
     /* Check for NULL pointer */
-    if(pPrivate == LVM_NULL)
-    {
+    if (pPrivate == LVM_NULL) {
         return LVREV_NULLADDRESS;
     }
 
     OperatingMode = pPrivate->NewParams.OperatingMode;
 
-    if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4)
-    {
+    if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4) {
         NumberOfDelayLines = 4;
-    }
-    else if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_2)
-    {
+    } else if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_2) {
         NumberOfDelayLines = 2;
-    }
-    else
-    {
+    } else {
         NumberOfDelayLines = 1;
     }
 
     /*
      * Update the high pass filter coefficients
      */
-    if((pPrivate->NewParams.HPF        != pPrivate->CurrentParams.HPF)        ||
-       (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
-       (pPrivate->bFirstControl        == LVM_TRUE))
-    {
-        LVM_FLOAT       Omega;
-        FO_FLOAT_Coefs_t  Coeffs;
+    if ((pPrivate->NewParams.HPF != pPrivate->CurrentParams.HPF) ||
+        (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
+        (pPrivate->bFirstControl == LVM_TRUE)) {
+        LVM_FLOAT Omega;
+        FO_FLOAT_Coefs_t Coeffs;
 
         Omega = LVM_GetOmega(pPrivate->NewParams.HPF, pPrivate->NewParams.SampleRate);
         LVM_FO_HPF(Omega, &Coeffs);
-        FO_1I_D32F32Cll_TRC_WRA_01_Init( &pPrivate->pFastCoef->HPCoefs,
-                                         &pPrivate->pFastData->HPTaps, &Coeffs);
-        LoadConst_Float(0,
-                (LVM_FLOAT *)&pPrivate->pFastData->HPTaps,
-                        sizeof(Biquad_1I_Order1_FLOAT_Taps_t) / sizeof(LVM_FLOAT));
+        const std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs> coefs = {
+                Coeffs.A0, Coeffs.A1, 0.0, -(Coeffs.B1), 0.0};
+        pPrivate->pRevHPFBiquad.reset(
+                new android::audio_utils::BiquadFilter<LVM_FLOAT>(FCC_1, coefs));
     }
 
     /*
      * Update the low pass filter coefficients
      */
-    if((pPrivate->NewParams.LPF        != pPrivate->CurrentParams.LPF)        ||
-       (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
-       (pPrivate->bFirstControl        == LVM_TRUE))
-    {
-        LVM_FLOAT       Omega;
-        FO_FLOAT_Coefs_t  Coeffs;
+    if ((pPrivate->NewParams.LPF != pPrivate->CurrentParams.LPF) ||
+        (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
+        (pPrivate->bFirstControl == LVM_TRUE)) {
+        LVM_FLOAT Omega;
+        FO_FLOAT_Coefs_t Coeffs;
 
         Coeffs.A0 = 1;
         Coeffs.A1 = 0;
         Coeffs.B1 = 0;
-        if(pPrivate->NewParams.LPF <= (LVM_FsTable[pPrivate->NewParams.SampleRate] >> 1))
-        {
+        if (pPrivate->NewParams.LPF <= (LVM_FsTable[pPrivate->NewParams.SampleRate] >> 1)) {
             Omega = LVM_GetOmega(pPrivate->NewParams.LPF, pPrivate->NewParams.SampleRate);
 
             /*
              * Do not apply filter if w =2*pi*fc/fs >= 2.9
              */
-            if(Omega <= (LVM_FLOAT)LVREV_2_9_INQ29)
-            {
+            if (Omega <= (LVM_FLOAT)LVREV_2_9_INQ29) {
                 LVM_FO_LPF(Omega, &Coeffs);
             }
         }
-        FO_1I_D32F32Cll_TRC_WRA_01_Init( &pPrivate->pFastCoef->LPCoefs,
-                                         &pPrivate->pFastData->LPTaps, &Coeffs);
-        LoadConst_Float(0,
-                (LVM_FLOAT *)&pPrivate->pFastData->LPTaps,
-                        sizeof(Biquad_1I_Order1_FLOAT_Taps_t) / sizeof(LVM_FLOAT));
+        const std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs> coefs = {
+                Coeffs.A0, Coeffs.A1, 0.0, -(Coeffs.B1), 0.0};
+        pPrivate->pRevLPFBiquad.reset(
+                new android::audio_utils::BiquadFilter<LVM_FLOAT>(FCC_1, coefs));
     }
 
     /*
      * Calculate the room size parameter
      */
-    if( pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize)
-    {
+    if (pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) {
         /* Room size range is 10ms to 200ms
          * 0%   -- 10ms
          * 50%  -- 65ms
          * 100% -- 120ms
          */
-        pPrivate->RoomSizeInms = 10 + (((pPrivate->NewParams.RoomSize*11) + 5) / 10);
+        pPrivate->RoomSizeInms = 10 + (((pPrivate->NewParams.RoomSize * 11) + 5) / 10);
     }
 
     /*
      * Update the T delay number of samples and the all pass delay number of samples
      */
-    if( (pPrivate->NewParams.RoomSize   != pPrivate->CurrentParams.RoomSize)   ||
+    if ((pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) ||
         (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
-        (pPrivate->bFirstControl        == LVM_TRUE))
-    {
-
-        LVM_UINT32  Temp;
-        LVM_INT32   APDelaySize;
-        LVM_INT32   Fs = LVM_GetFsFromTable(pPrivate->NewParams.SampleRate);
-        LVM_UINT32  DelayLengthSamples = (LVM_UINT32)(Fs * pPrivate->RoomSizeInms);
-        LVM_INT16   i;
-        LVM_FLOAT   ScaleTable[]  = {LVREV_T_3_Power_minus0_on_4, LVREV_T_3_Power_minus1_on_4, \
-                                     LVREV_T_3_Power_minus2_on_4, LVREV_T_3_Power_minus3_on_4};
-        LVM_INT16   MaxT_Delay[]  = {LVREV_MAX_T0_DELAY, LVREV_MAX_T1_DELAY, \
-                                     LVREV_MAX_T2_DELAY, LVREV_MAX_T3_DELAY};
-        LVM_INT16   MaxAP_Delay[] = {LVREV_MAX_AP0_DELAY, LVREV_MAX_AP1_DELAY, \
-                                     LVREV_MAX_AP2_DELAY, LVREV_MAX_AP3_DELAY};
+        (pPrivate->bFirstControl == LVM_TRUE)) {
+        LVM_UINT32 Temp;
+        LVM_INT32 APDelaySize;
+        LVM_INT32 Fs = LVM_GetFsFromTable(pPrivate->NewParams.SampleRate);
+        LVM_UINT32 DelayLengthSamples = (LVM_UINT32)(Fs * pPrivate->RoomSizeInms);
+        LVM_INT16 i;
+        LVM_FLOAT ScaleTable[] = {LVREV_T_3_Power_minus0_on_4, LVREV_T_3_Power_minus1_on_4,
+                                  LVREV_T_3_Power_minus2_on_4, LVREV_T_3_Power_minus3_on_4};
 
         /*
          * For each delay line
          */
-        for (i = 0; i < NumberOfDelayLines; i++)
-        {
-            if (i != 0)
-            {
-                LVM_FLOAT Temp1;  /* to avoid QAC warning on type conversion */
+        for (i = 0; i < NumberOfDelayLines; i++) {
+            if (i != 0) {
+                LVM_FLOAT Temp1; /* to avoid QAC warning on type conversion */
 
-                Temp1=(LVM_FLOAT)DelayLengthSamples;
+                Temp1 = (LVM_FLOAT)DelayLengthSamples;
                 Temp = (LVM_UINT32)(Temp1 * ScaleTable[i]);
+            } else {
+                Temp = DelayLengthSamples;
             }
-            else
-            {
-               Temp = DelayLengthSamples;
-            }
-            APDelaySize = Temp  / 1500;
+            APDelaySize = Temp / 1500;
 
             /*
              * Set the fixed delay
              */
 
-            Temp  = (MaxT_Delay[i] - MaxAP_Delay[i]) * Fs / 192000;
+            Temp = (LVREV_MAX_T_DELAY[i] - LVREV_MAX_AP_DELAY[i]) * Fs / 192000;
             pPrivate->Delay_AP[i] = pPrivate->T[i] - Temp;
 
             /*
              * Set the tap selection
              */
-            if (pPrivate->AB_Selection)
-            {
+            if (pPrivate->AB_Selection) {
                 /* Smooth from tap A to tap B */
-                pPrivate->pOffsetB[i]             = &pPrivate->pDelay_T[i][pPrivate->T[i] - \
-                                                                           Temp - APDelaySize];
-                pPrivate->B_DelaySize[i]          = APDelaySize;
+                pPrivate->pOffsetB[i] = &pPrivate->pDelay_T[i][pPrivate->T[i] - Temp - APDelaySize];
+                pPrivate->B_DelaySize[i] = APDelaySize;
                 pPrivate->Mixer_APTaps[i].Target1 = 0;
                 pPrivate->Mixer_APTaps[i].Target2 = 1.0f;
-            }
-            else
-            {
+            } else {
                 /* Smooth from tap B to tap A */
-                pPrivate->pOffsetA[i]             = &pPrivate->pDelay_T[i][pPrivate->T[i] - \
-                                                                           Temp - APDelaySize];
-                pPrivate->A_DelaySize[i]          = APDelaySize;
+                pPrivate->pOffsetA[i] = &pPrivate->pDelay_T[i][pPrivate->T[i] - Temp - APDelaySize];
+                pPrivate->A_DelaySize[i] = APDelaySize;
                 pPrivate->Mixer_APTaps[i].Target2 = 0;
                 pPrivate->Mixer_APTaps[i].Target1 = 1.0f;
             }
@@ -202,22 +174,17 @@
             /*
              * Set the maximum block size to the smallest delay size
              */
-            pPrivate->MaxBlkLen   = Temp;
-            if (pPrivate->MaxBlkLen > pPrivate->A_DelaySize[i])
-            {
+            pPrivate->MaxBlkLen = Temp;
+            if (pPrivate->MaxBlkLen > pPrivate->A_DelaySize[i]) {
                 pPrivate->MaxBlkLen = pPrivate->A_DelaySize[i];
             }
-            if (pPrivate->MaxBlkLen > pPrivate->B_DelaySize[i])
-            {
+            if (pPrivate->MaxBlkLen > pPrivate->B_DelaySize[i]) {
                 pPrivate->MaxBlkLen = pPrivate->B_DelaySize[i];
             }
         }
-        if (pPrivate->AB_Selection)
-        {
+        if (pPrivate->AB_Selection) {
             pPrivate->AB_Selection = 0;
-        }
-        else
-        {
+        } else {
             pPrivate->AB_Selection = 1;
         }
 
@@ -226,8 +193,7 @@
          */
         /* Just as a precausion, but no problem if we remove this line      */
         pPrivate->MaxBlkLen = pPrivate->MaxBlkLen - 2;
-        if(pPrivate->MaxBlkLen > pPrivate->InstanceParams.MaxBlockSize)
-        {
+        if (pPrivate->MaxBlkLen > pPrivate->InstanceParams.MaxBlockSize) {
             pPrivate->MaxBlkLen = (LVM_INT32)pPrivate->InstanceParams.MaxBlockSize;
         }
     }
@@ -235,72 +201,61 @@
     /*
      * Update the low pass filter coefficient
      */
-    if( (pPrivate->NewParams.Damping    != pPrivate->CurrentParams.Damping)    ||
+    if ((pPrivate->NewParams.Damping != pPrivate->CurrentParams.Damping) ||
         (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
-        (pPrivate->bFirstControl        == LVM_TRUE))
-    {
-
-        LVM_INT32       Temp;
-        LVM_FLOAT       Omega;
-        FO_FLOAT_Coefs_t  Coeffs;
-        LVM_INT16       i;
-        LVM_INT16       Damping      = (LVM_INT16)((pPrivate->NewParams.Damping * 100) + 1000);
-        LVM_FLOAT       ScaleTable[] = {LVREV_T_3_Power_0_on_4, LVREV_T_3_Power_1_on_4,
-                                        LVREV_T_3_Power_2_on_4, LVREV_T_3_Power_3_on_4};
+        (pPrivate->bFirstControl == LVM_TRUE)) {
+        LVM_INT32 Temp;
+        LVM_FLOAT Omega;
+        FO_FLOAT_Coefs_t Coeffs;
+        LVM_INT16 i;
+        LVM_INT16 Damping = (LVM_INT16)((pPrivate->NewParams.Damping * 100) + 1000);
+        LVM_FLOAT ScaleTable[] = {LVREV_T_3_Power_0_on_4, LVREV_T_3_Power_1_on_4,
+                                  LVREV_T_3_Power_2_on_4, LVREV_T_3_Power_3_on_4};
 
         /*
          * For each filter
          */
-        for (i = 0; i < NumberOfDelayLines; i++)
-        {
-            if (i != 0)
-            {
+        for (i = 0; i < NumberOfDelayLines; i++) {
+            if (i != 0) {
                 Temp = (LVM_INT32)(ScaleTable[i] * Damping);
-            }
-            else
-            {
+            } else {
                 Temp = Damping;
             }
-            if(Temp <= (LVM_INT32)(LVM_FsTable[pPrivate->NewParams.SampleRate] >> 1))
-            {
+            if (Temp <= (LVM_INT32)(LVM_FsTable[pPrivate->NewParams.SampleRate] >> 1)) {
                 Omega = LVM_GetOmega(Temp, pPrivate->NewParams.SampleRate);
                 LVM_FO_LPF(Omega, &Coeffs);
-            }
-            else
-            {
+            } else {
                 Coeffs.A0 = 1;
                 Coeffs.A1 = 0;
                 Coeffs.B1 = 0;
             }
-            FO_1I_D32F32Cll_TRC_WRA_01_Init(&pPrivate->pFastCoef->RevLPCoefs[i],
-                                            &pPrivate->pFastData->RevLPTaps[i], &Coeffs);
+            const std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs> coefs = {
+                    Coeffs.A0, Coeffs.A1, 0.0, -(Coeffs.B1), 0.0};
+            pPrivate->revLPFBiquad[i].reset(
+                    new android::audio_utils::BiquadFilter<LVM_FLOAT>(FCC_1, coefs));
         }
     }
 
     /*
      * Update All-pass filter mixer time constants
      */
-    if( (pPrivate->NewParams.RoomSize   != pPrivate->CurrentParams.RoomSize)   ||
+    if ((pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) ||
         (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
-        (pPrivate->NewParams.Density    != pPrivate->CurrentParams.Density))
-    {
-        LVM_INT16   i;
-        LVM_FLOAT   Alpha;
-        LVM_FLOAT   AlphaTap;
+        (pPrivate->NewParams.Density != pPrivate->CurrentParams.Density)) {
+        LVM_INT16 i;
+        LVM_FLOAT Alpha;
+        LVM_FLOAT AlphaTap;
 
         Alpha = LVM_Mixer_TimeConstant(LVREV_ALLPASS_TC,
-                                       LVM_GetFsFromTable(pPrivate->NewParams.SampleRate),
-                                       1);
+                                       LVM_GetFsFromTable(pPrivate->NewParams.SampleRate), 1);
 
         AlphaTap = LVM_Mixer_TimeConstant(LVREV_ALLPASS_TAP_TC,
-                                          LVM_GetFsFromTable(pPrivate->NewParams.SampleRate),
-                                          1);
+                                          LVM_GetFsFromTable(pPrivate->NewParams.SampleRate), 1);
 
-        for (i = 0; i < 4; i++)
-        {
-            pPrivate->Mixer_APTaps[i].Alpha1       = AlphaTap;
-            pPrivate->Mixer_APTaps[i].Alpha2       = AlphaTap;
-            pPrivate->Mixer_SGFeedback[i].Alpha    = Alpha;
+        for (i = 0; i < 4; i++) {
+            pPrivate->Mixer_APTaps[i].Alpha1 = AlphaTap;
+            pPrivate->Mixer_APTaps[i].Alpha2 = AlphaTap;
+            pPrivate->Mixer_SGFeedback[i].Alpha = Alpha;
             pPrivate->Mixer_SGFeedforward[i].Alpha = Alpha;
         }
     }
@@ -308,150 +263,121 @@
     /*
      * Update the feed back gain
      */
-    if( (pPrivate->NewParams.RoomSize   != pPrivate->CurrentParams.RoomSize)   ||
+    if ((pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) ||
         (pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
-        (pPrivate->NewParams.T60        != pPrivate->CurrentParams.T60)        ||
-        (pPrivate->bFirstControl        == LVM_TRUE))
-    {
+        (pPrivate->NewParams.T60 != pPrivate->CurrentParams.T60) ||
+        (pPrivate->bFirstControl == LVM_TRUE)) {
+        LVM_FLOAT G[4]; /* Feedback gain (Q7.24) */
 
-        LVM_FLOAT               G[4];                       /* Feedback gain (Q7.24) */
-
-        if(pPrivate->NewParams.T60 == 0)
-        {
+        if (pPrivate->NewParams.T60 == 0) {
             G[3] = 0;
             G[2] = 0;
             G[1] = 0;
             G[0] = 0;
-        }
-        else
-        {
-            LVM_FLOAT   Temp1;
-            LVM_FLOAT   Temp2;
-            LVM_INT16   i;
-            LVM_FLOAT   ScaleTable[] = {LVREV_T_3_Power_minus0_on_4, LVREV_T_3_Power_minus1_on_4,
-                                        LVREV_T_3_Power_minus2_on_4, LVREV_T_3_Power_minus3_on_4};
+        } else {
+            LVM_FLOAT Temp1;
+            LVM_FLOAT Temp2;
+            LVM_INT16 i;
+            LVM_FLOAT ScaleTable[] = {LVREV_T_3_Power_minus0_on_4, LVREV_T_3_Power_minus1_on_4,
+                                      LVREV_T_3_Power_minus2_on_4, LVREV_T_3_Power_minus3_on_4};
 
             /*
              * For each delay line
              */
-            for (i = 0; i < NumberOfDelayLines; i++)
-            {
+            for (i = 0; i < NumberOfDelayLines; i++) {
                 Temp1 = (3 * pPrivate->RoomSizeInms * ScaleTable[i]) / pPrivate->NewParams.T60;
-                if(Temp1 >= (4))
-                {
+                if (Temp1 >= (4)) {
                     G[i] = 0;
-                }
-                else if((Temp1 >= (2)))
-                {
+                } else if ((Temp1 >= (2))) {
                     Temp2 = LVM_Power10(-(Temp1 / 2.0f));
                     Temp1 = LVM_Power10(-(Temp1 / 2.0f));
                     Temp1 = Temp1 * Temp2;
-                }
-                else
-                {
+                } else {
                     Temp1 = LVM_Power10(-(Temp1));
                 }
-                if (NumberOfDelayLines == 1)
-                {
+                if (NumberOfDelayLines == 1) {
                     G[i] = Temp1;
-                }
-                else
-                {
-                    LVM_FLOAT   TempG;
+                } else {
+                    LVM_FLOAT TempG;
                     TempG = Temp1 * ONE_OVER_SQRT_TWO;
-                    G[i]=TempG;
+                    G[i] = TempG;
                 }
             }
         }
 
         /* Set up the feedback mixers for four delay lines */
-        pPrivate->FeedbackMixer[0].Target=G[0];
-        pPrivate->FeedbackMixer[1].Target=G[1];
-        pPrivate->FeedbackMixer[2].Target=G[2];
-        pPrivate->FeedbackMixer[3].Target=G[3];
+        pPrivate->FeedbackMixer[0].Target = G[0];
+        pPrivate->FeedbackMixer[1].Target = G[1];
+        pPrivate->FeedbackMixer[2].Target = G[2];
+        pPrivate->FeedbackMixer[3].Target = G[3];
     }
 
     /*
      * Calculate the gain correction
      */
-    if((pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) ||
-       (pPrivate->NewParams.Level    != pPrivate->CurrentParams.Level)    ||
-       (pPrivate->NewParams.T60      != pPrivate->CurrentParams.T60) )
-    {
-        LVM_INT32 Index=0;
+    if ((pPrivate->NewParams.RoomSize != pPrivate->CurrentParams.RoomSize) ||
+        (pPrivate->NewParams.Level != pPrivate->CurrentParams.Level) ||
+        (pPrivate->NewParams.T60 != pPrivate->CurrentParams.T60)) {
+        LVM_INT32 Index = 0;
         LVM_FLOAT Index_FLOAT;
-        LVM_INT32 i=0;
-        LVM_FLOAT Gain=0;
-        LVM_INT32 RoomSize=0;
+        LVM_INT32 i = 0;
+        LVM_FLOAT Gain = 0;
+        LVM_INT32 RoomSize = 0;
         LVM_FLOAT T60;
         LVM_FLOAT Coefs[5];
 
-        if(pPrivate->NewParams.RoomSize == 0)
-        {
+        if (pPrivate->NewParams.RoomSize == 0) {
             RoomSize = 1;
-        }
-        else
-        {
+        } else {
             RoomSize = (LVM_INT32)pPrivate->NewParams.RoomSize;
         }
 
-        if(pPrivate->NewParams.T60 < 100)
-        {
+        if (pPrivate->NewParams.T60 < 100) {
             T60 = 100 * LVREV_T60_SCALE;
-        }
-        else
-        {
+        } else {
             T60 = pPrivate->NewParams.T60 * LVREV_T60_SCALE;
         }
 
         /* Find the nearest room size in table */
-        for(i = 0; i < 24; i++)
-        {
-            if(RoomSize <= LVREV_GainPolyTable[i][0])
-            {
+        for (i = 0; i < 24; i++) {
+            if (RoomSize <= LVREV_GainPolyTable[i][0]) {
                 Index = i;
                 break;
             }
         }
 
-        if(RoomSize == LVREV_GainPolyTable[Index][0])
-        {
+        if (RoomSize == LVREV_GainPolyTable[Index][0]) {
             /* Take table values if the room size is in table */
-            for(i = 1; i < 5; i++)
-            {
-                Coefs[i-1] = LVREV_GainPolyTable[Index][i];
+            for (i = 1; i < 5; i++) {
+                Coefs[i - 1] = LVREV_GainPolyTable[Index][i];
             }
             Coefs[4] = 0;
-            Gain = LVM_Polynomial(3, Coefs, T60);       /* Q.24 result */
-        }
-        else
-        {
+            Gain = LVM_Polynomial(3, Coefs, T60); /* Q.24 result */
+        } else {
             /* Interpolate the gain between nearest room sizes */
 
-            LVM_FLOAT Gain1,Gain2;
-            LVM_INT32 Tot_Dist,Dist;
+            LVM_FLOAT Gain1, Gain2;
+            LVM_INT32 Tot_Dist, Dist;
 
-            Tot_Dist = (LVM_UINT32)LVREV_GainPolyTable[Index][0] - \
-                                            (LVM_UINT32)LVREV_GainPolyTable[Index-1][0];
+            Tot_Dist = (LVM_UINT32)LVREV_GainPolyTable[Index][0] -
+                       (LVM_UINT32)LVREV_GainPolyTable[Index - 1][0];
             Dist = RoomSize - (LVM_UINT32)LVREV_GainPolyTable[Index - 1][0];
 
             /* Get gain for first */
-            for(i = 1; i < 5; i++)
-            {
-                Coefs[i-1] = LVREV_GainPolyTable[Index-1][i];
+            for (i = 1; i < 5; i++) {
+                Coefs[i - 1] = LVREV_GainPolyTable[Index - 1][i];
             }
             Coefs[4] = 0;
 
-            Gain1 = LVM_Polynomial(3, Coefs, T60);      /* Q.24 result */
+            Gain1 = LVM_Polynomial(3, Coefs, T60); /* Q.24 result */
 
             /* Get gain for second */
-            for(i = 1; i < 5; i++)
-            {
-                Coefs[i-1] = LVREV_GainPolyTable[Index][i];
+            for (i = 1; i < 5; i++) {
+                Coefs[i - 1] = LVREV_GainPolyTable[Index][i];
             }
             Coefs[4] = 0;
 
-            Gain2 = LVM_Polynomial(3, Coefs, T60);      /* Q.24 result */
+            Gain2 = LVM_Polynomial(3, Coefs, T60); /* Q.24 result */
 
             /* Linear Interpolate the gain */
             Gain = Gain1 + (((Gain2 - Gain1) * Dist) / (Tot_Dist));
@@ -461,32 +387,27 @@
          * Get the inverse of gain: Q.15
          * Gain is mostly above one except few cases, take only gains above 1
          */
-        if(Gain < 1)
-        {
+        if (Gain < 1) {
             pPrivate->Gain = 1;
-        }
-        else
-        {
+        } else {
             pPrivate->Gain = 1 / Gain;
         }
 
         Index_FLOAT = 100.0f / (LVM_FLOAT)(100 + pPrivate->NewParams.Level);
         pPrivate->Gain = pPrivate->Gain * Index_FLOAT;
-        pPrivate->GainMixer.Target = (pPrivate->Gain*Index_FLOAT) / 2;
+        pPrivate->GainMixer.Target = (pPrivate->Gain * Index_FLOAT) / 2;
     }
 
     /*
      * Update the all pass comb filter coefficient
      */
-    if( (pPrivate->NewParams.Density != pPrivate->CurrentParams.Density) ||
-        (pPrivate->bFirstControl     == LVM_TRUE))
-    {
-        LVM_INT16   i;
-        LVM_FLOAT   b = (LVM_FLOAT)pPrivate->NewParams.Density * LVREV_B_8_on_1000;
+    if ((pPrivate->NewParams.Density != pPrivate->CurrentParams.Density) ||
+        (pPrivate->bFirstControl == LVM_TRUE)) {
+        LVM_INT16 i;
+        LVM_FLOAT b = (LVM_FLOAT)pPrivate->NewParams.Density * LVREV_B_8_on_1000;
 
-        for (i = 0; i < 4; i++)
-        {
-            pPrivate->Mixer_SGFeedback[i].Target    = b;
+        for (i = 0; i < 4; i++) {
+            pPrivate->Mixer_SGFeedback[i].Target = b;
             pPrivate->Mixer_SGFeedforward[i].Target = b;
         }
     }
@@ -494,11 +415,10 @@
     /*
      * Update the bypass mixer time constant
      */
-    if((pPrivate->NewParams.SampleRate   != pPrivate->CurrentParams.SampleRate)   ||
-       (pPrivate->bFirstControl          == LVM_TRUE))
-    {
-        LVM_UINT16   NumChannels = 1;                       /* Assume MONO format */
-        LVM_FLOAT    Alpha;
+    if ((pPrivate->NewParams.SampleRate != pPrivate->CurrentParams.SampleRate) ||
+        (pPrivate->bFirstControl == LVM_TRUE)) {
+        LVM_UINT16 NumChannels = 1; /* Assume MONO format */
+        LVM_FLOAT Alpha;
 
         Alpha = LVM_Mixer_TimeConstant(LVREV_FEEDBACKMIXER_TC,
                                        LVM_GetFsFromTable(pPrivate->NewParams.SampleRate),
@@ -508,67 +428,55 @@
         pPrivate->FeedbackMixer[2].Alpha = Alpha;
         pPrivate->FeedbackMixer[3].Alpha = Alpha;
 
-        NumChannels = 2;                                    /* Always stereo output */
-        pPrivate->BypassMixer.Alpha1 = LVM_Mixer_TimeConstant(LVREV_BYPASSMIXER_TC,
-                             LVM_GetFsFromTable(pPrivate->NewParams.SampleRate), NumChannels);
+        NumChannels = 2; /* Always stereo output */
+        pPrivate->BypassMixer.Alpha1 = LVM_Mixer_TimeConstant(
+                LVREV_BYPASSMIXER_TC, LVM_GetFsFromTable(pPrivate->NewParams.SampleRate),
+                NumChannels);
         pPrivate->BypassMixer.Alpha2 = pPrivate->BypassMixer.Alpha1;
-        pPrivate->GainMixer.Alpha    = pPrivate->BypassMixer.Alpha1;
+        pPrivate->GainMixer.Alpha = pPrivate->BypassMixer.Alpha1;
     }
 
     /*
      * Update the bypass mixer targets
      */
-    if( (pPrivate->NewParams.Level != pPrivate->CurrentParams.Level) &&
-        (pPrivate->NewParams.OperatingMode == LVM_MODE_ON))
-    {
-        pPrivate->BypassMixer.Target2 = (LVM_FLOAT)(pPrivate->NewParams.Level ) / 100.0f;
+    if ((pPrivate->NewParams.Level != pPrivate->CurrentParams.Level) &&
+        (pPrivate->NewParams.OperatingMode == LVM_MODE_ON)) {
+        pPrivate->BypassMixer.Target2 = (LVM_FLOAT)(pPrivate->NewParams.Level) / 100.0f;
         pPrivate->BypassMixer.Target1 = 0x00000000;
-        if ((pPrivate->NewParams.Level == 0) && (pPrivate->bFirstControl == LVM_FALSE))
-        {
+        if ((pPrivate->NewParams.Level == 0) && (pPrivate->bFirstControl == LVM_FALSE)) {
             pPrivate->BypassMixer.CallbackSet2 = LVM_TRUE;
         }
-        if (pPrivate->NewParams.Level != 0)
-        {
+        if (pPrivate->NewParams.Level != 0) {
             pPrivate->bDisableReverb = LVM_FALSE;
         }
     }
 
-    if(pPrivate->NewParams.OperatingMode != pPrivate->CurrentParams.OperatingMode)
-    {
-        if(pPrivate->NewParams.OperatingMode == LVM_MODE_ON)
-        {
-            pPrivate->BypassMixer.Target2 = (LVM_FLOAT)(pPrivate->NewParams.Level ) / 100.0f;
+    if (pPrivate->NewParams.OperatingMode != pPrivate->CurrentParams.OperatingMode) {
+        if (pPrivate->NewParams.OperatingMode == LVM_MODE_ON) {
+            pPrivate->BypassMixer.Target2 = (LVM_FLOAT)(pPrivate->NewParams.Level) / 100.0f;
             pPrivate->BypassMixer.Target1 = 0x00000000;
 
             pPrivate->BypassMixer.CallbackSet2 = LVM_FALSE;
-            OperatingMode                      = LVM_MODE_ON;
-            if (pPrivate->NewParams.Level == 0)
-            {
+            OperatingMode = LVM_MODE_ON;
+            if (pPrivate->NewParams.Level == 0) {
                 pPrivate->bDisableReverb = LVM_TRUE;
-            }
-            else
-            {
+            } else {
                 pPrivate->bDisableReverb = LVM_FALSE;
             }
-        }
-        else if (pPrivate->bFirstControl == LVM_FALSE)
-        {
+        } else if (pPrivate->bFirstControl == LVM_FALSE) {
             pPrivate->BypassMixer.Target2 = 0x00000000;
             pPrivate->BypassMixer.Target1 = 0x00000000;
             pPrivate->BypassMixer.CallbackSet2 = LVM_TRUE;
-            pPrivate->GainMixer.Target    = 0.03125f;
+            pPrivate->GainMixer.Target = 0.03125f;
             OperatingMode = LVM_MODE_ON;
-        }
-        else
-        {
+        } else {
             OperatingMode = LVM_MODE_OFF;
         }
     }
 
     /*  If it is the first call to ApplyNew settings force the current to the target \
         to begin immediate playback of the effect */
-    if(pPrivate->bFirstControl == LVM_TRUE)
-    {
+    if (pPrivate->bFirstControl == LVM_TRUE) {
         pPrivate->BypassMixer.Current1 = pPrivate->BypassMixer.Target1;
         pPrivate->BypassMixer.Current2 = pPrivate->BypassMixer.Target2;
     }
@@ -582,8 +490,7 @@
     /*
      * Update flag
      */
-    if(pPrivate->bFirstControl == LVM_TRUE)
-    {
+    if (pPrivate->bFirstControl == LVM_TRUE) {
         pPrivate->bFirstControl = LVM_FALSE;
     }
 
@@ -606,12 +513,9 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_INT32 BypassMixer_Callback (void *pCallbackData,
-                                void *pGeneralPurpose,
-                                LVM_INT16 GeneralPurpose )
-{
-
-    LVREV_Instance_st     *pLVREV_Private = (LVREV_Instance_st *)pCallbackData;
+LVM_INT32 BypassMixer_Callback(void* pCallbackData, void* pGeneralPurpose,
+                               LVM_INT16 GeneralPurpose) {
+    LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)pCallbackData;
 
     /*
      * Avoid build warnings
@@ -623,11 +527,10 @@
      * Turn off
      */
     pLVREV_Private->CurrentParams.OperatingMode = LVM_MODE_OFF;
-    pLVREV_Private->bDisableReverb              = LVM_TRUE;
+    pLVREV_Private->bDisableReverb = LVM_TRUE;
     LVREV_ClearAudioBuffers((LVREV_Handle_t)pCallbackData);
 
     return 0;
 }
 
 /* End of file */
-
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
index 586539f..be3505f 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_ClearAudioBuffers.cpp
@@ -41,17 +41,14 @@
 /*  1. This function must not be interrupted by the LVM_Process function                */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_ClearAudioBuffers(LVREV_Handle_t  hInstance)
-{
-
-   LVREV_Instance_st     *pLVREV_Private = (LVREV_Instance_st *)hInstance;
+LVREV_ReturnStatus_en LVREV_ClearAudioBuffers(LVREV_Handle_t hInstance) {
+    LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
 
     /*
      * Check for error conditions
      */
     /* Check for NULL pointers */
-    if(hInstance == LVM_NULL)
-    {
+    if (hInstance == LVM_NULL) {
         return LVREV_NULLADDRESS;
     }
 
@@ -59,38 +56,12 @@
      * Clear all filter tap data, delay-lines and other signal related data
      */
 
-    LoadConst_Float(0,
-        (LVM_FLOAT *)&pLVREV_Private->pFastData->HPTaps,
-        2);
-    LoadConst_Float(0,
-        (LVM_FLOAT *)&pLVREV_Private->pFastData->LPTaps,
-        2);
-    if((LVM_UINT16)pLVREV_Private->InstanceParams.NumDelays == LVREV_DELAYLINES_4)
-    {
-        LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[3], 2);
-        LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[2], 2);
-        LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[1], 2);
-        LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[0], 2);
-
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[3], LVREV_MAX_T3_DELAY);
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[2], LVREV_MAX_T2_DELAY);
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[1], LVREV_MAX_T1_DELAY);
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[0], LVREV_MAX_T0_DELAY);
-    }
-
-    if((LVM_UINT16)pLVREV_Private->InstanceParams.NumDelays >= LVREV_DELAYLINES_2)
-    {
-        LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[1], 2);
-        LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[0], 2);
-
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[1], LVREV_MAX_T1_DELAY);
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[0], LVREV_MAX_T0_DELAY);
-    }
-
-    if((LVM_UINT16)pLVREV_Private->InstanceParams.NumDelays >= LVREV_DELAYLINES_1)
-    {
-        LoadConst_Float(0, (LVM_FLOAT *)&pLVREV_Private->pFastData->RevLPTaps[0], 2);
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[0], LVREV_MAX_T0_DELAY);
+    pLVREV_Private->pRevHPFBiquad->clear();
+    pLVREV_Private->pRevLPFBiquad->clear();
+    for (size_t i = 0; i < pLVREV_Private->InstanceParams.NumDelays; i++) {
+        pLVREV_Private->revLPFBiquad[i]->clear();
+        memset(pLVREV_Private->pDelay_T[i], 0, LVREV_MAX_T_DELAY[i] *
+                sizeof(pLVREV_Private->pDelay_T[i][0]));
     }
     return LVREV_SUCCESS;
 }
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetControlParameters.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetControlParameters.cpp
index e0b0142..f858b74 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetControlParameters.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetControlParameters.cpp
@@ -42,17 +42,14 @@
 /*  1.  This function may be interrupted by the LVREV_Process function                  */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetControlParameters(LVREV_Handle_t           hInstance,
-                                                 LVREV_ControlParams_st   *pControlParams)
-{
-
-    LVREV_Instance_st  *pLVREV_Private = (LVREV_Instance_st *)hInstance;
+LVREV_ReturnStatus_en LVREV_GetControlParameters(LVREV_Handle_t hInstance,
+                                                 LVREV_ControlParams_st* pControlParams) {
+    LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
 
     /*
      * Check for error conditions
      */
-    if((hInstance == LVM_NULL) || (pControlParams == LVM_NULL))
-    {
+    if ((hInstance == LVM_NULL) || (pControlParams == LVM_NULL)) {
         return LVREV_NULLADDRESS;
     }
 
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
index 68f883a..3a63698 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
@@ -21,7 +21,6 @@
 /*                                                                                      */
 /****************************************************************************************/
 #include "LVREV_Private.h"
-#include "InstAlloc.h"
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -34,7 +33,6 @@
 /*                                                                                      */
 /* PARAMETERS:                                                                          */
 /*  phInstance              pointer to the instance handle                              */
-/*  pMemoryTable            Pointer to the memory definition table                      */
 /*  pInstanceParams         Pointer to the instance parameters                          */
 /*                                                                                      */
 /* RETURNS:                                                                             */
@@ -45,201 +43,72 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetInstanceHandle(LVREV_Handle_t            *phInstance,
-                                              LVREV_MemoryTable_st      *pMemoryTable,
-                                              LVREV_InstanceParams_st   *pInstanceParams)
-{
-
-    INST_ALLOC              SlowData;
-    INST_ALLOC              FastData;
-    INST_ALLOC              FastCoef;
-    INST_ALLOC              Temporary;
-    LVREV_Instance_st       *pLVREV_Private;
-    LVM_INT16               i;
-    LVM_UINT16              MaxBlockSize;
+LVREV_ReturnStatus_en LVREV_GetInstanceHandle(LVREV_Handle_t* phInstance,
+                                              LVREV_InstanceParams_st* pInstanceParams) {
+    LVREV_Instance_st* pLVREV_Private;
+    LVM_INT16 i;
+    LVM_UINT16 MaxBlockSize;
 
     /*
      * Check for error conditions
      */
     /* Check for NULL pointers */
-    if((phInstance == LVM_NULL) || (pMemoryTable == LVM_NULL) || (pInstanceParams == LVM_NULL))
-    {
+    if ((phInstance == LVM_NULL) || (pInstanceParams == LVM_NULL)) {
         return LVREV_NULLADDRESS;
     }
-    /* Check the memory table for NULL pointers */
-    for (i = 0; i < LVREV_NR_MEMORY_REGIONS; i++)
-    {
-        if (pMemoryTable->Region[i].Size!=0)
-        {
-            if (pMemoryTable->Region[i].pBaseAddress==LVM_NULL)
-            {
-                return(LVREV_NULLADDRESS);
-            }
-        }
-    }
-
     /*
      * Check all instance parameters are in range
      */
     /* Check for a non-zero block size */
-    if (pInstanceParams->MaxBlockSize == 0)
-    {
+    if (pInstanceParams->MaxBlockSize == 0) {
         return LVREV_OUTOFRANGE;
     }
 
     /* Check for a valid number of delay lines */
-    if ((pInstanceParams->NumDelays != LVREV_DELAYLINES_1)&&
-        (pInstanceParams->NumDelays != LVREV_DELAYLINES_2)&&
-        (pInstanceParams->NumDelays != LVREV_DELAYLINES_4))
-    {
+    if ((pInstanceParams->NumDelays != LVREV_DELAYLINES_1) &&
+        (pInstanceParams->NumDelays != LVREV_DELAYLINES_2) &&
+        (pInstanceParams->NumDelays != LVREV_DELAYLINES_4)) {
         return LVREV_OUTOFRANGE;
     }
 
     /*
-     * Initialise the InstAlloc instances
-     */
-    InstAlloc_Init(&SlowData,  pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress);
-    InstAlloc_Init(&FastData,  pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress);
-    InstAlloc_Init(&FastCoef,  pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress);
-    InstAlloc_Init(&Temporary, pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress);
-
-    /*
-     * Zero all memory regions
-     */
-    LoadConst_Float(0,
-                    (LVM_FLOAT *)pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress,
-                    (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size) / \
-                                                    sizeof(LVM_FLOAT)));
-    LoadConst_Float(0,
-                    (LVM_FLOAT *)pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress,
-                    (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size) / \
-                                                    sizeof(LVM_FLOAT)));
-    LoadConst_Float(0,
-                    (LVM_FLOAT *)pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress,
-                    (LVM_INT16)((pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size) / \
-                                                    sizeof(LVM_FLOAT)));
-    LoadConst_Float(0,
-                    (LVM_FLOAT *)pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress,
-                    (LVM_INT16)((pMemoryTable->Region[LVM_TEMPORARY_FAST].Size) / \
-                                                    sizeof(LVM_FLOAT)));
-    /*
      * Set the instance handle if not already initialised
      */
-    if (*phInstance == LVM_NULL)
-    {
-        *phInstance = InstAlloc_AddMember(&SlowData, sizeof(LVREV_Instance_st));
+    if (*phInstance == LVM_NULL) {
+        *phInstance = new LVREV_Instance_st{};
     }
-    pLVREV_Private              =(LVREV_Instance_st *)*phInstance;
-    pLVREV_Private->MemoryTable = *pMemoryTable;
+    pLVREV_Private = (LVREV_Instance_st*)*phInstance;
 
-    if(pInstanceParams->NumDelays ==LVREV_DELAYLINES_4)
-    {
-        MaxBlockSize = LVREV_MAX_AP3_DELAY;
-    }
-    else if(pInstanceParams->NumDelays ==LVREV_DELAYLINES_2)
-    {
-        MaxBlockSize = LVREV_MAX_AP1_DELAY;
-    }
-    else
-    {
-        MaxBlockSize = LVREV_MAX_AP0_DELAY;
+    if (pInstanceParams->NumDelays == LVREV_DELAYLINES_4) {
+        MaxBlockSize = LVREV_MAX_AP_DELAY[3];
+    } else if (pInstanceParams->NumDelays == LVREV_DELAYLINES_2) {
+        MaxBlockSize = LVREV_MAX_AP_DELAY[1];
+    } else {
+        MaxBlockSize = LVREV_MAX_AP_DELAY[0];
     }
 
-    if(MaxBlockSize>pInstanceParams->MaxBlockSize)
-    {
-        MaxBlockSize=pInstanceParams->MaxBlockSize;
+    if (MaxBlockSize > pInstanceParams->MaxBlockSize) {
+        MaxBlockSize = pInstanceParams->MaxBlockSize;
     }
 
     /*
      * Set the data, coefficient and temporary memory pointers
      */
-    /* Fast data memory base address */
-    pLVREV_Private->pFastData = (LVREV_FastData_st *)
-        InstAlloc_AddMember(&FastData, sizeof(LVREV_FastData_st));
-    if(pInstanceParams->NumDelays == LVREV_DELAYLINES_4)
-    {
-        pLVREV_Private->pDelay_T[3]     =
-            (LVM_FLOAT *)InstAlloc_AddMember(&FastData, LVREV_MAX_T3_DELAY * \
-                                                              sizeof(LVM_FLOAT));
-        pLVREV_Private->pDelay_T[2]     =
-            (LVM_FLOAT *)InstAlloc_AddMember(&FastData, LVREV_MAX_T2_DELAY * \
-                                                              sizeof(LVM_FLOAT));
-        pLVREV_Private->pDelay_T[1]     =
-            (LVM_FLOAT *)InstAlloc_AddMember(&FastData, LVREV_MAX_T1_DELAY * \
-                                                              sizeof(LVM_FLOAT));
-        pLVREV_Private->pDelay_T[0]     =
-            (LVM_FLOAT *)InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * \
-                                                              sizeof(LVM_FLOAT));
-
-        for(i = 0; i < 4; i++)
-        {
-            /* Scratch for each delay line output */
-            pLVREV_Private->pScratchDelayLine[i] = (LVM_FLOAT *)InstAlloc_AddMember(&Temporary,
-                                                                       sizeof(LVM_FLOAT) * \
-                                                                       MaxBlockSize);
-        }
-
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[3], LVREV_MAX_T3_DELAY);
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[2], LVREV_MAX_T2_DELAY);
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[1], LVREV_MAX_T1_DELAY);
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[0], LVREV_MAX_T0_DELAY);
-    }
-
-    if(pInstanceParams->NumDelays == LVREV_DELAYLINES_2)
-    {
-        pLVREV_Private->pDelay_T[1]  = (LVM_FLOAT *)
-                InstAlloc_AddMember(&FastData, LVREV_MAX_T1_DELAY * \
-                                                           sizeof(LVM_FLOAT));
-        pLVREV_Private->pDelay_T[0]  = (LVM_FLOAT *)
-                InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * \
-                                                           sizeof(LVM_FLOAT));
-
-        for(i = 0; i < 2; i++)
-        {
-            /* Scratch for each delay line output */
-            pLVREV_Private->pScratchDelayLine[i] = (LVM_FLOAT *)InstAlloc_AddMember(&Temporary,
-                                                                       sizeof(LVM_FLOAT) * \
-                                                                       MaxBlockSize);
-        }
-
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[1], (LVM_INT16)LVREV_MAX_T1_DELAY);
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[0], (LVM_INT16)LVREV_MAX_T0_DELAY);
-    }
-
-    if(pInstanceParams->NumDelays == LVREV_DELAYLINES_1)
-    {
-        pLVREV_Private->pDelay_T[0]  = (LVM_FLOAT *)InstAlloc_AddMember(&FastData,
-                                                           LVREV_MAX_T0_DELAY * sizeof(LVM_FLOAT));
-
-        for(i = 0; i < 1; i++)
-        {
-            /* Scratch for each delay line output */
-            pLVREV_Private->pScratchDelayLine[i] = (LVM_FLOAT *)InstAlloc_AddMember(&Temporary,
-                                                                       sizeof(LVM_FLOAT) * \
-                                                                       MaxBlockSize);
-        }
-
-        LoadConst_Float(0, pLVREV_Private->pDelay_T[0], (LVM_INT16)LVREV_MAX_T0_DELAY);
+    for (size_t i = 0; i < pInstanceParams->NumDelays; i++) {
+        pLVREV_Private->pDelay_T[i] = (LVM_FLOAT*)calloc(LVREV_MAX_T_DELAY[i], sizeof(LVM_FLOAT));
+        /* Scratch for each delay line output */
+        pLVREV_Private->pScratchDelayLine[i] = (LVM_FLOAT*)calloc(MaxBlockSize, sizeof(LVM_FLOAT));
     }
     /* All-pass delay buffer addresses and sizes */
-    pLVREV_Private->T[0]         = LVREV_MAX_T0_DELAY;
-    pLVREV_Private->T[1]         = LVREV_MAX_T1_DELAY;
-    pLVREV_Private->T[2]         = LVREV_MAX_T2_DELAY;
-    pLVREV_Private->T[3]         = LVREV_MAX_T3_DELAY;
-    pLVREV_Private->AB_Selection = 1;       /* Select smoothing A to B */
+    for (size_t i = 0; i < LVREV_DELAYLINES_4; i++) {
+        pLVREV_Private->T[i] = LVREV_MAX_T_DELAY[i];
+    }
+    pLVREV_Private->AB_Selection = 1; /* Select smoothing A to B */
 
-    /* Fast coefficient memory base address */
-    pLVREV_Private->pFastCoef       =
-        (LVREV_FastCoef_st *)InstAlloc_AddMember(&FastCoef, sizeof(LVREV_FastCoef_st));
     /* General purpose scratch */
-    pLVREV_Private->pScratch        =
-            (LVM_FLOAT *)InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * \
-                                                          MaxBlockSize);
+    pLVREV_Private->pScratch = (LVM_FLOAT*)calloc(MaxBlockSize, sizeof(LVM_FLOAT));
     /* Mono->stereo input save for end mix */
-    pLVREV_Private->pInputSave      =
-            (LVM_FLOAT *)InstAlloc_AddMember(&Temporary, 2 * sizeof(LVM_FLOAT) * \
-                                                          MaxBlockSize);
-    LoadConst_Float(0, pLVREV_Private->pInputSave, (LVM_INT16)(MaxBlockSize * 2));
+    pLVREV_Private->pInputSave = (LVM_FLOAT*)calloc(FCC_2 * MaxBlockSize, sizeof(LVM_FLOAT));
 
     /*
      * Save the instance parameters in the instance structure
@@ -249,105 +118,153 @@
     /*
      * Set the parameters to invalid
      */
-    pLVREV_Private->CurrentParams.SampleRate    = LVM_FS_INVALID;
+    pLVREV_Private->CurrentParams.SampleRate = LVM_FS_INVALID;
     pLVREV_Private->CurrentParams.OperatingMode = LVM_MODE_DUMMY;
-    pLVREV_Private->CurrentParams.SourceFormat  = LVM_SOURCE_DUMMY;
+    pLVREV_Private->CurrentParams.SourceFormat = LVM_SOURCE_DUMMY;
 
-    pLVREV_Private->bControlPending             = LVM_FALSE;
-    pLVREV_Private->bFirstControl               = LVM_TRUE;
-    pLVREV_Private->bDisableReverb              = LVM_FALSE;
+    pLVREV_Private->bControlPending = LVM_FALSE;
+    pLVREV_Private->bFirstControl = LVM_TRUE;
+    pLVREV_Private->bDisableReverb = LVM_FALSE;
 
     /*
      * Set mixer parameters
      */
-    pLVREV_Private->BypassMixer.CallbackParam2      = 0;
-    pLVREV_Private->BypassMixer.pCallbackHandle2    = pLVREV_Private;
-    pLVREV_Private->BypassMixer.pGeneralPurpose2    = LVM_NULL;
-    pLVREV_Private->BypassMixer.pCallBack2          = BypassMixer_Callback;
-    pLVREV_Private->BypassMixer.CallbackSet2        = LVM_FALSE;
-    pLVREV_Private->BypassMixer.Current2            = 0;
-    pLVREV_Private->BypassMixer.Target2             = 0;
-    pLVREV_Private->BypassMixer.CallbackParam1      = 0;
-    pLVREV_Private->BypassMixer.pCallbackHandle1    = LVM_NULL;
-    pLVREV_Private->BypassMixer.pGeneralPurpose1    = LVM_NULL;
-    pLVREV_Private->BypassMixer.pCallBack1          = LVM_NULL;
-    pLVREV_Private->BypassMixer.CallbackSet1        = LVM_FALSE;
-    pLVREV_Private->BypassMixer.Current1            = 0x00000000;
-    pLVREV_Private->BypassMixer.Target1             = 0x00000000;
+    pLVREV_Private->BypassMixer.CallbackParam2 = 0;
+    pLVREV_Private->BypassMixer.pCallbackHandle2 = pLVREV_Private;
+    pLVREV_Private->BypassMixer.pGeneralPurpose2 = LVM_NULL;
+    pLVREV_Private->BypassMixer.pCallBack2 = BypassMixer_Callback;
+    pLVREV_Private->BypassMixer.CallbackSet2 = LVM_FALSE;
+    pLVREV_Private->BypassMixer.Current2 = 0;
+    pLVREV_Private->BypassMixer.Target2 = 0;
+    pLVREV_Private->BypassMixer.CallbackParam1 = 0;
+    pLVREV_Private->BypassMixer.pCallbackHandle1 = LVM_NULL;
+    pLVREV_Private->BypassMixer.pGeneralPurpose1 = LVM_NULL;
+    pLVREV_Private->BypassMixer.pCallBack1 = LVM_NULL;
+    pLVREV_Private->BypassMixer.CallbackSet1 = LVM_FALSE;
+    pLVREV_Private->BypassMixer.Current1 = 0x00000000;
+    pLVREV_Private->BypassMixer.Target1 = 0x00000000;
 
-    pLVREV_Private->RoomSizeInms                    = 100;  // 100 msec
+    pLVREV_Private->RoomSizeInms = 100;  // 100 msec
 
     /*
      *  Set the output gain mixer parameters
      */
-    pLVREV_Private->GainMixer.CallbackParam      = 0;
-    pLVREV_Private->GainMixer.pCallbackHandle    = LVM_NULL;
-    pLVREV_Private->GainMixer.pGeneralPurpose    = LVM_NULL;
-    pLVREV_Private->GainMixer.pCallBack          = LVM_NULL;
-    pLVREV_Private->GainMixer.CallbackSet        = LVM_FALSE;
-    pLVREV_Private->GainMixer.Current            = 0.03125f;//0x03ffffff;
-    pLVREV_Private->GainMixer.Target             = 0.03125f;//0x03ffffff;
+    pLVREV_Private->GainMixer.CallbackParam = 0;
+    pLVREV_Private->GainMixer.pCallbackHandle = LVM_NULL;
+    pLVREV_Private->GainMixer.pGeneralPurpose = LVM_NULL;
+    pLVREV_Private->GainMixer.pCallBack = LVM_NULL;
+    pLVREV_Private->GainMixer.CallbackSet = LVM_FALSE;
+    pLVREV_Private->GainMixer.Current = 0.03125f;  // 0x03ffffff;
+    pLVREV_Private->GainMixer.Target = 0.03125f;   // 0x03ffffff;
 
     /*
      * Set the All-Pass Filter mixers
      */
-    for (i=0; i<4; i++)
-    {
+    for (i = 0; i < 4; i++) {
         pLVREV_Private->pOffsetA[i] = pLVREV_Private->pDelay_T[i];
         pLVREV_Private->pOffsetB[i] = pLVREV_Private->pDelay_T[i];
         /* Delay tap selection mixer */
-        pLVREV_Private->Mixer_APTaps[i].CallbackParam2   = 0;
+        pLVREV_Private->Mixer_APTaps[i].CallbackParam2 = 0;
         pLVREV_Private->Mixer_APTaps[i].pCallbackHandle2 = LVM_NULL;
         pLVREV_Private->Mixer_APTaps[i].pGeneralPurpose2 = LVM_NULL;
-        pLVREV_Private->Mixer_APTaps[i].pCallBack2       = LVM_NULL;
-        pLVREV_Private->Mixer_APTaps[i].CallbackSet2     = LVM_FALSE;
-        pLVREV_Private->Mixer_APTaps[i].Current2         = 0;
-        pLVREV_Private->Mixer_APTaps[i].Target2          = 0;
-        pLVREV_Private->Mixer_APTaps[i].CallbackParam1   = 0;
+        pLVREV_Private->Mixer_APTaps[i].pCallBack2 = LVM_NULL;
+        pLVREV_Private->Mixer_APTaps[i].CallbackSet2 = LVM_FALSE;
+        pLVREV_Private->Mixer_APTaps[i].Current2 = 0;
+        pLVREV_Private->Mixer_APTaps[i].Target2 = 0;
+        pLVREV_Private->Mixer_APTaps[i].CallbackParam1 = 0;
         pLVREV_Private->Mixer_APTaps[i].pCallbackHandle1 = LVM_NULL;
         pLVREV_Private->Mixer_APTaps[i].pGeneralPurpose1 = LVM_NULL;
-        pLVREV_Private->Mixer_APTaps[i].pCallBack1       = LVM_NULL;
-        pLVREV_Private->Mixer_APTaps[i].CallbackSet1     = LVM_FALSE;
-        pLVREV_Private->Mixer_APTaps[i].Current1         = 0;
-        pLVREV_Private->Mixer_APTaps[i].Target1          = 1;
+        pLVREV_Private->Mixer_APTaps[i].pCallBack1 = LVM_NULL;
+        pLVREV_Private->Mixer_APTaps[i].CallbackSet1 = LVM_FALSE;
+        pLVREV_Private->Mixer_APTaps[i].Current1 = 0;
+        pLVREV_Private->Mixer_APTaps[i].Target1 = 1;
         /* Feedforward mixer */
-        pLVREV_Private->Mixer_SGFeedforward[i].CallbackParam   = 0;
+        pLVREV_Private->Mixer_SGFeedforward[i].CallbackParam = 0;
         pLVREV_Private->Mixer_SGFeedforward[i].pCallbackHandle = LVM_NULL;
         pLVREV_Private->Mixer_SGFeedforward[i].pGeneralPurpose = LVM_NULL;
-        pLVREV_Private->Mixer_SGFeedforward[i].pCallBack       = LVM_NULL;
-        pLVREV_Private->Mixer_SGFeedforward[i].CallbackSet     = LVM_FALSE;
-        pLVREV_Private->Mixer_SGFeedforward[i].Current         = 0;
-        pLVREV_Private->Mixer_SGFeedforward[i].Target          = 0;
+        pLVREV_Private->Mixer_SGFeedforward[i].pCallBack = LVM_NULL;
+        pLVREV_Private->Mixer_SGFeedforward[i].CallbackSet = LVM_FALSE;
+        pLVREV_Private->Mixer_SGFeedforward[i].Current = 0;
+        pLVREV_Private->Mixer_SGFeedforward[i].Target = 0;
         /* Feedback mixer */
-        pLVREV_Private->Mixer_SGFeedback[i].CallbackParam   = 0;
+        pLVREV_Private->Mixer_SGFeedback[i].CallbackParam = 0;
         pLVREV_Private->Mixer_SGFeedback[i].pCallbackHandle = LVM_NULL;
         pLVREV_Private->Mixer_SGFeedback[i].pGeneralPurpose = LVM_NULL;
-        pLVREV_Private->Mixer_SGFeedback[i].pCallBack       = LVM_NULL;
-        pLVREV_Private->Mixer_SGFeedback[i].CallbackSet     = LVM_FALSE;
-        pLVREV_Private->Mixer_SGFeedback[i].Current         = 0;
-        pLVREV_Private->Mixer_SGFeedback[i].Target          = 0;
+        pLVREV_Private->Mixer_SGFeedback[i].pCallBack = LVM_NULL;
+        pLVREV_Private->Mixer_SGFeedback[i].CallbackSet = LVM_FALSE;
+        pLVREV_Private->Mixer_SGFeedback[i].Current = 0;
+        pLVREV_Private->Mixer_SGFeedback[i].Target = 0;
         /* Feedback gain mixer */
-        pLVREV_Private->FeedbackMixer[i].CallbackParam    = 0;
-        pLVREV_Private->FeedbackMixer[i].pCallbackHandle  = LVM_NULL;
-        pLVREV_Private->FeedbackMixer[i].pGeneralPurpose  = LVM_NULL;
-        pLVREV_Private->FeedbackMixer[i].pCallBack        = LVM_NULL;
-        pLVREV_Private->FeedbackMixer[i].CallbackSet      = LVM_FALSE;
-        pLVREV_Private->FeedbackMixer[i].Current          = 0;
-        pLVREV_Private->FeedbackMixer[i].Target           = 0;
+        pLVREV_Private->FeedbackMixer[i].CallbackParam = 0;
+        pLVREV_Private->FeedbackMixer[i].pCallbackHandle = LVM_NULL;
+        pLVREV_Private->FeedbackMixer[i].pGeneralPurpose = LVM_NULL;
+        pLVREV_Private->FeedbackMixer[i].pCallBack = LVM_NULL;
+        pLVREV_Private->FeedbackMixer[i].CallbackSet = LVM_FALSE;
+        pLVREV_Private->FeedbackMixer[i].Current = 0;
+        pLVREV_Private->FeedbackMixer[i].Target = 0;
     }
     /* Delay tap index */
-    pLVREV_Private->A_DelaySize[0] = LVREV_MAX_AP0_DELAY;
-    pLVREV_Private->B_DelaySize[0] = LVREV_MAX_AP0_DELAY;
-    pLVREV_Private->A_DelaySize[1] = LVREV_MAX_AP1_DELAY;
-    pLVREV_Private->B_DelaySize[1] = LVREV_MAX_AP1_DELAY;
-    pLVREV_Private->A_DelaySize[2] = LVREV_MAX_AP2_DELAY;
-    pLVREV_Private->B_DelaySize[2] = LVREV_MAX_AP2_DELAY;
-    pLVREV_Private->A_DelaySize[3] = LVREV_MAX_AP3_DELAY;
-    pLVREV_Private->B_DelaySize[3] = LVREV_MAX_AP3_DELAY;
+    for (size_t i = 0; i < LVREV_DELAYLINES_4; i++) {
+        pLVREV_Private->A_DelaySize[i] = LVREV_MAX_AP_DELAY[i];
+        pLVREV_Private->B_DelaySize[i] = LVREV_MAX_AP_DELAY[i];
+    }
+
+    pLVREV_Private->pRevHPFBiquad.reset(
+            new android::audio_utils::BiquadFilter<LVM_FLOAT>(LVM_MAX_CHANNELS));
+    pLVREV_Private->pRevLPFBiquad.reset(
+            new android::audio_utils::BiquadFilter<LVM_FLOAT>(LVM_MAX_CHANNELS));
+    for (int i = 0; i < LVREV_DELAYLINES_4; i++) {
+        pLVREV_Private->revLPFBiquad[i].reset(
+                new android::audio_utils::BiquadFilter<LVM_FLOAT>(LVM_MAX_CHANNELS));
+    }
 
     LVREV_ClearAudioBuffers(*phInstance);
 
     return LVREV_SUCCESS;
 }
 
+/****************************************************************************************/
+/*                                                                                      */
+/* FUNCTION:                LVREV_FreeInstance                                          */
+/*                                                                                      */
+/* DESCRIPTION:                                                                         */
+/*  This function is used to free the internal allocations of the module.               */
+/*                                                                                      */
+/* PARAMETERS:                                                                          */
+/*  hInstance               Instance handle                                             */
+/*                                                                                      */
+/* RETURNS:                                                                             */
+/*  LVREV_SUCCESS          free instance succeeded                                      */
+/*  LVREV_NULLADDRESS      Instance is NULL                                             */
+/*                                                                                      */
+/****************************************************************************************/
+LVREV_ReturnStatus_en LVREV_FreeInstance(LVREV_Handle_t hInstance) {
+    if (hInstance == LVM_NULL) {
+        return LVREV_NULLADDRESS;
+    }
+
+    LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
+
+    for (size_t i = 0; i < pLVREV_Private->InstanceParams.NumDelays; i++) {
+        if (pLVREV_Private->pDelay_T[i]) {
+            free(pLVREV_Private->pDelay_T[i]);
+            pLVREV_Private->pDelay_T[i] = LVM_NULL;
+        }
+        if (pLVREV_Private->pScratchDelayLine[i]) {
+            free(pLVREV_Private->pScratchDelayLine[i]);
+            pLVREV_Private->pScratchDelayLine[i] = LVM_NULL;
+        }
+    }
+    if (pLVREV_Private->pScratch) {
+        free(pLVREV_Private->pScratch);
+        pLVREV_Private->pScratch = LVM_NULL;
+    }
+    if (pLVREV_Private->pInputSave) {
+        free(pLVREV_Private->pInputSave);
+        pLVREV_Private->pInputSave = LVM_NULL;
+    }
+
+    delete pLVREV_Private;
+    return LVREV_SUCCESS;
+}
 /* End of file */
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetMemoryTable.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetMemoryTable.cpp
deleted file mode 100644
index f59933c..0000000
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetMemoryTable.cpp
+++ /dev/null
@@ -1,240 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/****************************************************************************************/
-/*                                                                                      */
-/*  Includes                                                                            */
-/*                                                                                      */
-/****************************************************************************************/
-#include "LVREV_Private.h"
-#include "InstAlloc.h"
-
-/****************************************************************************************/
-/*                                                                                      */
-/* FUNCTION:                LVREV_GetMemoryTable                                        */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used for memory allocation and free. It can be called in           */
-/*  two ways:                                                                           */
-/*                                                                                      */
-/*  hInstance = NULL                Returns the memory requirements                     */
-/*  hInstance = Instance handle     Returns the memory requirements and allocated       */
-/*                                  base addresses.                                     */
-/*                                                                                      */
-/*  When this function is called for memory allocation (hInstance=NULL) the memory      */
-/*  base address pointers are NULL on return.                                           */
-/*                                                                                      */
-/*  When the function is called for free (hInstance = Instance Handle) the memory       */
-/*  table returns the allocated memory and base addresses used during initialisation.   */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory table                            */
-/*  pInstanceParams         Pointer to the instance parameters                          */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVREV_Success           Succeeded                                                   */
-/*  LVREV_NULLADDRESS       When pMemoryTable is NULL                                   */
-/*  LVREV_NULLADDRESS       When requesting memory requirements and pInstanceParams     */
-/*                          is NULL                                                     */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  This function may be interrupted by the LVREV_Process function                  */
-/*                                                                                      */
-/****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_GetMemoryTable(LVREV_Handle_t           hInstance,
-                                           LVREV_MemoryTable_st     *pMemoryTable,
-                                           LVREV_InstanceParams_st  *pInstanceParams)
-{
-
-    INST_ALLOC              SlowData;
-    INST_ALLOC              FastData;
-    INST_ALLOC              FastCoef;
-    INST_ALLOC              Temporary;
-    LVM_INT16               i;
-    LVM_UINT16              MaxBlockSize;
-
-    /*
-     * Check for error conditions
-     */
-    /* Check for NULL pointer */
-    if (pMemoryTable == LVM_NULL)
-    {
-        return(LVREV_NULLADDRESS);
-    }
-
-    /*
-     * Check all instance parameters are in range
-     */
-    if (pInstanceParams != LVM_NULL)
-    {
-        /*
-         * Call for memory allocation, so check the parameters
-         */
-        /* Check for a non-zero block size */
-        if (pInstanceParams->MaxBlockSize == 0)
-        {
-            return LVREV_OUTOFRANGE;
-        }
-
-        /* Check for a valid number of delay lines */
-        if ((pInstanceParams->NumDelays != LVREV_DELAYLINES_1) &&
-            (pInstanceParams->NumDelays != LVREV_DELAYLINES_2) &&
-            (pInstanceParams->NumDelays != LVREV_DELAYLINES_4))
-        {
-            return LVREV_OUTOFRANGE;
-        }
-    }
-
-    /*
-     * Initialise the InstAlloc instances
-     */
-    InstAlloc_Init(&SlowData,  (void *)LVM_NULL);
-    InstAlloc_Init(&FastData,  (void *)LVM_NULL);
-    InstAlloc_Init(&FastCoef,  (void *)LVM_NULL);
-    InstAlloc_Init(&Temporary, (void *)LVM_NULL);
-
-    /*
-     * Fill in the memory table
-     */
-    if (hInstance == LVM_NULL)
-    {
-        /*
-         * Check for null pointers
-         */
-        if (pInstanceParams == LVM_NULL)
-        {
-            return(LVREV_NULLADDRESS);
-        }
-
-        /*
-         * Select the maximum internal block size
-         */
-        if(pInstanceParams->NumDelays ==LVREV_DELAYLINES_4)
-        {
-            MaxBlockSize = LVREV_MAX_AP3_DELAY;
-        }
-        else if(pInstanceParams->NumDelays ==LVREV_DELAYLINES_2)
-        {
-            MaxBlockSize = LVREV_MAX_AP1_DELAY;
-        }
-        else
-        {
-            MaxBlockSize = LVREV_MAX_AP0_DELAY;
-        }
-
-        if(MaxBlockSize>pInstanceParams->MaxBlockSize)
-        {
-            MaxBlockSize=pInstanceParams->MaxBlockSize;
-        }
-
-        /*
-         * Slow data memory
-         */
-        InstAlloc_AddMember(&SlowData, sizeof(LVREV_Instance_st));
-        pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Size         = InstAlloc_GetTotal(&SlowData);
-        pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].Type         = LVM_PERSISTENT_SLOW_DATA;
-        pMemoryTable->Region[LVM_PERSISTENT_SLOW_DATA].pBaseAddress = LVM_NULL;
-
-        /*
-         * Persistent fast data memory
-         */
-        InstAlloc_AddMember(&FastData, sizeof(LVREV_FastData_st));
-        if(pInstanceParams->NumDelays == LVREV_DELAYLINES_4)
-        {
-            InstAlloc_AddMember(&FastData, LVREV_MAX_T3_DELAY * sizeof(LVM_FLOAT));
-            InstAlloc_AddMember(&FastData, LVREV_MAX_T2_DELAY * sizeof(LVM_FLOAT));
-            InstAlloc_AddMember(&FastData, LVREV_MAX_T1_DELAY * sizeof(LVM_FLOAT));
-            InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * sizeof(LVM_FLOAT));
-        }
-
-        if(pInstanceParams->NumDelays == LVREV_DELAYLINES_2)
-        {
-            InstAlloc_AddMember(&FastData, LVREV_MAX_T1_DELAY * sizeof(LVM_FLOAT));
-            InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * sizeof(LVM_FLOAT));
-        }
-
-        if(pInstanceParams->NumDelays == LVREV_DELAYLINES_1)
-        {
-            InstAlloc_AddMember(&FastData, LVREV_MAX_T0_DELAY * sizeof(LVM_FLOAT));
-        }
-
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Size         = InstAlloc_GetTotal(&FastData);
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].Type         = LVM_PERSISTENT_FAST_DATA;
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_DATA].pBaseAddress = LVM_NULL;
-
-        /*
-         * Persistent fast coefficient memory
-         */
-        InstAlloc_AddMember(&FastCoef, sizeof(LVREV_FastCoef_st));
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Size         = InstAlloc_GetTotal(&FastCoef);
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].Type         = LVM_PERSISTENT_FAST_COEF;
-        pMemoryTable->Region[LVM_PERSISTENT_FAST_COEF].pBaseAddress = LVM_NULL;
-
-        /*
-         * Temporary fast memory
-         */
-        /* General purpose scratch memory */
-        InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
-        /* Mono->stereo input saved for end mix */
-        InstAlloc_AddMember(&Temporary, 2 * sizeof(LVM_FLOAT) * MaxBlockSize);
-        if(pInstanceParams->NumDelays == LVREV_DELAYLINES_4)
-        {
-            for(i=0; i<4; i++)
-            {
-                /* A Scratch buffer for each delay line */
-                InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
-            }
-        }
-
-        if(pInstanceParams->NumDelays == LVREV_DELAYLINES_2)
-        {
-            for(i=0; i<2; i++)
-            {
-                /* A Scratch buffer for each delay line */
-                InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
-            }
-        }
-
-        if(pInstanceParams->NumDelays == LVREV_DELAYLINES_1)
-        {
-            for(i=0; i<1; i++)
-            {
-                /* A Scratch buffer for each delay line */
-                InstAlloc_AddMember(&Temporary, sizeof(LVM_FLOAT) * MaxBlockSize);
-            }
-        }
-
-        pMemoryTable->Region[LVM_TEMPORARY_FAST].Size         = InstAlloc_GetTotal(&Temporary);
-        pMemoryTable->Region[LVM_TEMPORARY_FAST].Type         = LVM_TEMPORARY_FAST;
-        pMemoryTable->Region[LVM_TEMPORARY_FAST].pBaseAddress = LVM_NULL;
-
-    }
-    else
-    {
-        LVREV_Instance_st   *pLVREV_Private = (LVREV_Instance_st *)hInstance;
-
-        /*
-         * Read back memory allocation table
-         */
-        *pMemoryTable = pLVREV_Private->MemoryTable;
-    }
-
-    return(LVREV_SUCCESS);
-}
-
-/* End of file */
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h b/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h
index 2c27c6e..9a2f9ca 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_Private.h
@@ -23,6 +23,8 @@
 /*  Includes                                                                            */
 /*                                                                                      */
 /****************************************************************************************/
+
+#include <audio_utils/BiquadFilter.h>
 #include "LVREV.h"
 #include "LVREV_Tables.h"
 #include "BIQUAD.h"
@@ -37,63 +39,63 @@
 /*                                                                                      */
 /****************************************************************************************/
 /* General */
-#define ONE_OVER_SQRT_TWO            0.707107f           /* 1/sqrt(2) * 2^15 */
-#define LVREV_B_8_on_1000               0.008f           /* 0.8 * 2^31 */
-#define LVREV_HEADROOM                   0.25f           /* -12dB * 2^15 */
-#define LVREV_2_9_INQ29                   2.9f           /* 2.9 in Q29 format */
-#define LVREV_MIN3DB                0.7079457f           /* -3dB in Q15 format */
+#define ONE_OVER_SQRT_TWO 0.707107f /* 1/sqrt(2) * 2^15 */
+#define LVREV_B_8_on_1000 0.008f    /* 0.8 * 2^31 */
+#define LVREV_HEADROOM 0.25f        /* -12dB * 2^15 */
+#define LVREV_2_9_INQ29 2.9f        /* 2.9 in Q29 format */
+#define LVREV_MIN3DB 0.7079457f     /* -3dB in Q15 format */
 
 /* Intenal constants */
-#define LVREV_LP_Poly_Order                 4
-#define LVREV_LP_Poly_Shift                 5
+#define LVREV_LP_Poly_Order 4
+#define LVREV_LP_Poly_Shift 5
 
-#define LVREV_T60_SCALE                0.000142f           /*(1/7000) */
+#define LVREV_T60_SCALE 0.000142f /*(1/7000) */
 
-#define LVREV_T_3_Power_0_on_4              1.0f
-#define LVREV_T_3_Power_1_on_4         1.316074f
-#define LVREV_T_3_Power_2_on_4         1.732051f
-#define LVREV_T_3_Power_3_on_4         2.279507f
-#define LVREV_T_3_Power_minus0_on_4         1.0f        /* 3^(-0/4) * 2^15 */
-#define LVREV_T_3_Power_minus1_on_4    0.759836f        /* 3^(-1/4) * 2^15 */
-#define LVREV_T_3_Power_minus2_on_4    0.577350f        /* 3^(-2/4) * 2^15 */
-#define LVREV_T_3_Power_minus3_on_4    0.438691f        /* 3^(-3/4) * 2^15 */
+#define LVREV_T_3_Power_0_on_4 1.0f
+#define LVREV_T_3_Power_1_on_4 1.316074f
+#define LVREV_T_3_Power_2_on_4 1.732051f
+#define LVREV_T_3_Power_3_on_4 2.279507f
+#define LVREV_T_3_Power_minus0_on_4 1.0f      /* 3^(-0/4) * 2^15 */
+#define LVREV_T_3_Power_minus1_on_4 0.759836f /* 3^(-1/4) * 2^15 */
+#define LVREV_T_3_Power_minus2_on_4 0.577350f /* 3^(-2/4) * 2^15 */
+#define LVREV_T_3_Power_minus3_on_4 0.438691f /* 3^(-3/4) * 2^15 */
 
-    /* ((192000 * 120 * LVREV_T_3_Power_minus3_on_4) >> 15) / 1000 */
-#define LVREV_MAX_T3_DELAY               10108
-    /* ((192000 * 120 * LVREV_T_3_Power_minus2_on_4) >> 15) / 1000 */
-#define LVREV_MAX_T2_DELAY               13304
-    /* ((192000 * 120 * LVREV_T_3_Power_minus1_on_4) >> 15) / 1000 */
-#define LVREV_MAX_T1_DELAY               17508
-    /* ((192000 * 120 * LVREV_T_3_Power_minus0_on_4) >> 15) / 1000 */
-#define LVREV_MAX_T0_DELAY               23040
-    /* ((192000 * 120 * LVREV_T_3_Power_minus3_on_4) >> 15) / 1500 */
-#define LVREV_MAX_AP3_DELAY               6740
-    /* ((192000 * 120 * LVREV_T_3_Power_minus2_on_4) >> 15) / 1500 */
-#define LVREV_MAX_AP2_DELAY               8872
-    /* ((192000 * 120 * LVREV_T_3_Power_minus1_on_4) >> 15) / 1500 */
-#define LVREV_MAX_AP1_DELAY              11672
-    /* ((192000 * 120 * LVREV_T_3_Power_minus0_on_4) >> 15) / 1500 */
-#define LVREV_MAX_AP0_DELAY              15360
+/* ((192000 * 120 * LVREV_T_3_Power_minus3_on_4) >> 15) / 1000 */
+#define LVREV_MAX_T3_DELAY 10108
+/* ((192000 * 120 * LVREV_T_3_Power_minus2_on_4) >> 15) / 1000 */
+#define LVREV_MAX_T2_DELAY 13304
+/* ((192000 * 120 * LVREV_T_3_Power_minus1_on_4) >> 15) / 1000 */
+#define LVREV_MAX_T1_DELAY 17508
+/* ((192000 * 120 * LVREV_T_3_Power_minus0_on_4) >> 15) / 1000 */
+#define LVREV_MAX_T0_DELAY 23040
+/* ((192000 * 120 * LVREV_T_3_Power_minus3_on_4) >> 15) / 1500 */
+#define LVREV_MAX_AP3_DELAY 6740
+/* ((192000 * 120 * LVREV_T_3_Power_minus2_on_4) >> 15) / 1500 */
+#define LVREV_MAX_AP2_DELAY 8872
+/* ((192000 * 120 * LVREV_T_3_Power_minus1_on_4) >> 15) / 1500 */
+#define LVREV_MAX_AP1_DELAY 11672
+/* ((192000 * 120 * LVREV_T_3_Power_minus0_on_4) >> 15) / 1500 */
+#define LVREV_MAX_AP0_DELAY 15360
 
-#define LVREV_BYPASSMIXER_TC             1000           /* Bypass mixer time constant*/
-#define LVREV_ALLPASS_TC                 1000           /* All-pass filter time constant */
-#define LVREV_ALLPASS_TAP_TC             10000           /* All-pass filter dely tap change */
-#define LVREV_FEEDBACKMIXER_TC            100           /* Feedback mixer time constant*/
-#define LVREV_OUTPUTGAIN_SHIFT              5           /* Bits shift for output gain correction */
+#define LVREV_BYPASSMIXER_TC 1000  /* Bypass mixer time constant*/
+#define LVREV_ALLPASS_TC 1000      /* All-pass filter time constant */
+#define LVREV_ALLPASS_TAP_TC 10000 /* All-pass filter dely tap change */
+#define LVREV_FEEDBACKMIXER_TC 100 /* Feedback mixer time constant*/
+#define LVREV_OUTPUTGAIN_SHIFT 5   /* Bits shift for output gain correction */
 
 /* Parameter limits */
-#define LVREV_NUM_FS                       13           /* Number of supported sample rates */
+#define LVREV_NUM_FS 13 /* Number of supported sample rates */
 
-#define LVREV_MAXBLKSIZE_LIMIT             64           /* Maximum block size low limit */
-#define LVREV_MAX_LEVEL                   100           /* Maximum level, 100% */
-#define LVREV_MIN_LPF_CORNER               50           /* Low pass filter limits */
-#define LVREV_MAX_LPF_CORNER            23999
-#define LVREV_MIN_HPF_CORNER               20           /* High pass filrer limits */
-#define LVREV_MAX_HPF_CORNER             1000
-#define LVREV_MAX_T60                    7000           /* Maximum T60 time in ms */
-#define LVREV_MAX_DENSITY                 100           /* Maximum density, 100% */
-#define LVREV_MAX_DAMPING                 100           /* Maximum damping, 100% */
-#define LVREV_MAX_ROOMSIZE                100           /* Maximum room size, 100% */
+#define LVREV_MAXBLKSIZE_LIMIT 64 /* Maximum block size low limit */
+#define LVREV_MAX_LEVEL 100       /* Maximum level, 100% */
+#define LVREV_MIN_LPF_CORNER 50   /* Low pass filter limits */
+#define LVREV_MAX_LPF_CORNER 23999
+#define LVREV_MIN_HPF_CORNER 20 /* High pass filrer limits */
+#define LVREV_MAX_HPF_CORNER 1000
+#define LVREV_MAX_T60 7000     /* Maximum T60 time in ms */
+#define LVREV_MAX_DENSITY 100  /* Maximum density, 100% */
+#define LVREV_MAX_DAMPING 100  /* Maximum damping, 100% */
+#define LVREV_MAX_ROOMSIZE 100 /* Maximum room size, 100% */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -101,73 +103,57 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Fast data structure */
-typedef struct
-{
-    Biquad_1I_Order1_FLOAT_Taps_t HPTaps;                     /* High pass filter taps */
-    Biquad_1I_Order1_FLOAT_Taps_t LPTaps;                     /* Low pass filter taps */
-    Biquad_1I_Order1_FLOAT_Taps_t RevLPTaps[4];               /* Reverb low pass filters taps */
-
-} LVREV_FastData_st;
-
-/* Fast coefficient structure */
-typedef struct
-{
-
-    Biquad_FLOAT_Instance_t       HPCoefs;              /* High pass filter coefficients */
-    Biquad_FLOAT_Instance_t       LPCoefs;              /* Low pass filter coefficients */
-    Biquad_FLOAT_Instance_t       RevLPCoefs[4];        /* Reverb low pass filters coefficients */
-
-} LVREV_FastCoef_st;
-typedef struct
-{
+typedef struct {
     /* General */
-    LVREV_InstanceParams_st InstanceParams;           /* Initialisation time instance parameters */
-    LVREV_MemoryTable_st    MemoryTable;              /* Memory table */
-    LVREV_ControlParams_st  CurrentParams;            /* Parameters being used */
-    LVREV_ControlParams_st  NewParams;                /* New parameters from the \
-                                                         calling application */
-    LVM_CHAR                bControlPending;          /* Flag to indicate new parameters \
-                                                         are available */
-    LVM_CHAR                bFirstControl;            /* Flag to indicate that the control \
-                                                         function is called for the first time */
-    LVM_CHAR                bDisableReverb;           /* Flag to indicate that the mix level is
-                                                         0% and the reverb can be disabled */
-    LVM_INT32               RoomSizeInms;             /* Room size in msec */
-    LVM_INT32               MaxBlkLen;                /* Maximum block size for internal
-                                                         processing */
+    LVREV_InstanceParams_st InstanceParams; /* Initialisation time instance parameters */
+    LVREV_ControlParams_st CurrentParams;   /* Parameters being used */
+    LVREV_ControlParams_st NewParams;       /* New parameters from the \
+                                               calling application */
+    LVM_CHAR bControlPending;               /* Flag to indicate new parameters \
+                                               are available */
+    LVM_CHAR bFirstControl;                 /* Flag to indicate that the control \
+                                               function is called for the first time */
+    LVM_CHAR bDisableReverb;                /* Flag to indicate that the mix level is
+                                               0% and the reverb can be disabled */
+    LVM_INT32 RoomSizeInms;                 /* Room size in msec */
+    LVM_INT32 MaxBlkLen;                    /* Maximum block size for internal
+                                               processing */
 
     /* Aligned memory pointers */
-    LVREV_FastData_st       *pFastData;               /* Fast data memory base address */
-    LVREV_FastCoef_st       *pFastCoef;               /* Fast coefficient memory base address */
-    LVM_FLOAT               *pScratchDelayLine[4];    /* Delay line scratch memory */
-    LVM_FLOAT               *pScratch;                /* Multi ussge scratch */
-    LVM_FLOAT               *pInputSave;              /* Reverb block input save for dry/wet
-                                                         mixing*/
+    std::unique_ptr<android::audio_utils::BiquadFilter<LVM_FLOAT>>
+            pRevHPFBiquad; /* Biquad filter instance for HPF */
+    std::unique_ptr<android::audio_utils::BiquadFilter<LVM_FLOAT>>
+            pRevLPFBiquad; /* Biquad filter instance for LPF */
+    std::array<std::unique_ptr<android::audio_utils::BiquadFilter<LVM_FLOAT>>, LVREV_DELAYLINES_4>
+            revLPFBiquad; /* Biquad filter instance for Reverb LPF */
+    LVM_FLOAT* pScratchDelayLine[LVREV_DELAYLINES_4]; /* Delay line scratch memory */
+    LVM_FLOAT* pScratch;             /* Multi ussge scratch */
+    LVM_FLOAT* pInputSave;           /* Reverb block input save for dry/wet
+                                        mixing*/
 
     /* Feedback matrix */
-    Mix_1St_Cll_FLOAT_t     FeedbackMixer[4];         /* Mixer for Pop and Click Supression \
-                                                         caused by feedback Gain */
+    Mix_1St_Cll_FLOAT_t FeedbackMixer[LVREV_DELAYLINES_4]; /* Mixer for Pop and Click Suppression \
+                                             caused by feedback Gain */
 
     /* All-Pass Filter */
-    LVM_INT32               T[4];                     /* Maximum delay size of buffer */
-    LVM_FLOAT               *pDelay_T[4];             /* Pointer to delay buffers */
-    LVM_INT32               Delay_AP[4];              /* Offset to AP delay buffer start */
-    LVM_INT16               AB_Selection;             /* Smooth from tap A to B when 1 \
-                                                         otherwise B to A */
-    LVM_INT32               A_DelaySize[4];           /* A delay length in samples */
-    LVM_INT32               B_DelaySize[4];           /* B delay length in samples */
-    LVM_FLOAT               *pOffsetA[4];             /* Offset for the A delay tap */
-    LVM_FLOAT               *pOffsetB[4];             /* Offset for the B delay tap */
-    Mix_2St_Cll_FLOAT_t     Mixer_APTaps[4];          /* Smoothed AP delay mixer */
-    Mix_1St_Cll_FLOAT_t     Mixer_SGFeedback[4];      /* Smoothed SAfeedback gain */
-    Mix_1St_Cll_FLOAT_t     Mixer_SGFeedforward[4];   /* Smoothed AP feedforward gain */
+    LVM_INT32 T[LVREV_DELAYLINES_4];                          /* Maximum delay size of buffer */
+    LVM_FLOAT* pDelay_T[LVREV_DELAYLINES_4];                  /* Pointer to delay buffers */
+    LVM_INT32 Delay_AP[LVREV_DELAYLINES_4];                   /* Offset to AP delay buffer start */
+    LVM_INT16 AB_Selection;                     /* Smooth from tap A to B when 1 \
+                                                   otherwise B to A */
+    LVM_INT32 A_DelaySize[LVREV_DELAYLINES_4];                /* A delay length in samples */
+    LVM_INT32 B_DelaySize[LVREV_DELAYLINES_4];                /* B delay length in samples */
+    LVM_FLOAT* pOffsetA[LVREV_DELAYLINES_4];                  /* Offset for the A delay tap */
+    LVM_FLOAT* pOffsetB[LVREV_DELAYLINES_4];                  /* Offset for the B delay tap */
+    Mix_2St_Cll_FLOAT_t Mixer_APTaps[LVREV_DELAYLINES_4];     /* Smoothed AP delay mixer */
+    Mix_1St_Cll_FLOAT_t Mixer_SGFeedback[LVREV_DELAYLINES_4]; /* Smoothed SAfeedback gain */
+    Mix_1St_Cll_FLOAT_t Mixer_SGFeedforward[LVREV_DELAYLINES_4]; /* Smoothed AP feedforward gain */
 
     /* Output gain */
-    Mix_2St_Cll_FLOAT_t     BypassMixer;              /* Dry/wet mixer */
-    LVM_FLOAT               Gain;                     /* Gain applied to output to maintain
-                                                         average signal power */
-    Mix_1St_Cll_FLOAT_t     GainMixer;                /* Gain smoothing */
+    Mix_2St_Cll_FLOAT_t BypassMixer; /* Dry/wet mixer */
+    LVM_FLOAT Gain;                  /* Gain applied to output to maintain
+                                        average signal power */
+    Mix_1St_Cll_FLOAT_t GainMixer;   /* Gain smoothing */
 
 } LVREV_Instance_st;
 
@@ -177,15 +163,12 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVREV_ReturnStatus_en   LVREV_ApplyNewSettings(LVREV_Instance_st     *pPrivate);
-void                    ReverbBlock(LVM_FLOAT           *pInput,
-                                    LVM_FLOAT           *pOutput,
-                                    LVREV_Instance_st   *pPrivate,
-                                    LVM_UINT16          NumSamples);
-LVM_INT32               BypassMixer_Callback(void       *pCallbackData,
-                                             void       *pGeneralPurpose,
-                                             LVM_INT16  GeneralPurpose );
+LVREV_ReturnStatus_en LVREV_ApplyNewSettings(LVREV_Instance_st* pPrivate);
+void ReverbBlock(LVM_FLOAT* pInput, LVM_FLOAT* pOutput, LVREV_Instance_st* pPrivate,
+                 LVM_UINT16 NumSamples);
+LVM_INT32 BypassMixer_Callback(void* pCallbackData, void* pGeneralPurpose,
+                               LVM_INT16 GeneralPurpose);
 
-#endif  /** __LVREV_PRIVATE_H__ **/
+#endif /** __LVREV_PRIVATE_H__ **/
 
 /* End of file */
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_Process.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_Process.cpp
index 35f9ad8..f9c3266 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_Process.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_Process.cpp
@@ -45,43 +45,37 @@
 /*  1. The input and output buffers must be 32-bit aligned                              */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_Process(LVREV_Handle_t      hInstance,
-                                    const LVM_FLOAT     *pInData,
-                                    LVM_FLOAT           *pOutData,
-                                    const LVM_UINT16    NumSamples)
-{
-   LVREV_Instance_st     *pLVREV_Private = (LVREV_Instance_st *)hInstance;
-   LVM_FLOAT             *pInput  = (LVM_FLOAT *)pInData;
-   LVM_FLOAT             *pOutput = pOutData;
-   LVM_INT32             SamplesToProcess, RemainingSamples;
-   LVM_INT32             format = 1;
+LVREV_ReturnStatus_en LVREV_Process(LVREV_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                    LVM_FLOAT* pOutData, const LVM_UINT16 NumSamples) {
+    LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
+    LVM_FLOAT* pInput = (LVM_FLOAT*)pInData;
+    LVM_FLOAT* pOutput = pOutData;
+    LVM_INT32 SamplesToProcess, RemainingSamples;
+    LVM_INT32 format = 1;
 
     /*
      * Check for error conditions
      */
 
     /* Check for NULL pointers */
-    if((hInstance == LVM_NULL) || (pInData == LVM_NULL) || (pOutData == LVM_NULL))
-    {
+    if ((hInstance == LVM_NULL) || (pInData == LVM_NULL) || (pOutData == LVM_NULL)) {
         return LVREV_NULLADDRESS;
     }
 
     /*
      * Apply the new controls settings if required
      */
-    if(pLVREV_Private->bControlPending == LVM_TRUE)
-    {
-        LVREV_ReturnStatus_en   errorCode;
+    if (pLVREV_Private->bControlPending == LVM_TRUE) {
+        LVREV_ReturnStatus_en errorCode;
 
         /*
          * Clear the pending flag and update the control settings
          */
         pLVREV_Private->bControlPending = LVM_FALSE;
 
-        errorCode = LVREV_ApplyNewSettings (pLVREV_Private);
+        errorCode = LVREV_ApplyNewSettings(pLVREV_Private);
 
-        if(errorCode != LVREV_SUCCESS)
-        {
+        if (errorCode != LVREV_SUCCESS) {
             return errorCode;
         }
     }
@@ -89,27 +83,23 @@
     /*
      * Trap the case where the number of samples is zero.
      */
-    if (NumSamples == 0)
-    {
+    if (NumSamples == 0) {
         return LVREV_SUCCESS;
     }
 
     /*
      * If OFF copy and reformat the data as necessary
      */
-    if (pLVREV_Private->CurrentParams.OperatingMode == LVM_MODE_OFF)
-    {
-        if(pInput != pOutput)
-        {
+    if (pLVREV_Private->CurrentParams.OperatingMode == LVM_MODE_OFF) {
+        if (pInput != pOutput) {
             /*
              * Copy the data to the output buffer, convert to stereo is required
              */
-            if(pLVREV_Private->CurrentParams.SourceFormat == LVM_MONO){
+            if (pLVREV_Private->CurrentParams.SourceFormat == LVM_MONO) {
                 MonoTo2I_Float(pInput, pOutput, NumSamples);
             } else {
-                Copy_Float(pInput,
-                           pOutput,
-                           (LVM_INT16)(NumSamples << 1)); // 32 bit data, stereo
+                Copy_Float(pInput, pOutput,
+                           (LVM_INT16)(NumSamples << 1));  // 32 bit data, stereo
             }
         }
 
@@ -118,31 +108,26 @@
 
     RemainingSamples = (LVM_INT32)NumSamples;
 
-    if (pLVREV_Private->CurrentParams.SourceFormat != LVM_MONO)
-    {
+    if (pLVREV_Private->CurrentParams.SourceFormat != LVM_MONO) {
         format = 2;
     }
 
-    while (RemainingSamples!=0)
-    {
+    while (RemainingSamples != 0) {
         /*
          * Process the data
          */
 
-        if(RemainingSamples >  pLVREV_Private->MaxBlkLen)
-        {
-            SamplesToProcess =  pLVREV_Private->MaxBlkLen;
+        if (RemainingSamples > pLVREV_Private->MaxBlkLen) {
+            SamplesToProcess = pLVREV_Private->MaxBlkLen;
             RemainingSamples = (LVM_INT16)(RemainingSamples - SamplesToProcess);
-        }
-        else
-        {
+        } else {
             SamplesToProcess = RemainingSamples;
             RemainingSamples = 0;
         }
 
         ReverbBlock(pInput, pOutput, pLVREV_Private, (LVM_UINT16)SamplesToProcess);
-        pInput  = (LVM_FLOAT *)(pInput + (SamplesToProcess * format));
-        pOutput = (LVM_FLOAT *)(pOutput + (SamplesToProcess * 2));      // Always stereo output
+        pInput = (LVM_FLOAT*)(pInput + (SamplesToProcess * format));
+        pOutput = (LVM_FLOAT*)(pOutput + (SamplesToProcess * 2));  // Always stereo output
     }
 
     return LVREV_SUCCESS;
@@ -170,16 +155,15 @@
 /*  1. The input and output buffers must be 32-bit aligned                              */
 /*                                                                                      */
 /****************************************************************************************/
-void ReverbBlock(LVM_FLOAT *pInput, LVM_FLOAT *pOutput,
-                 LVREV_Instance_st *pPrivate, LVM_UINT16 NumSamples)
-{
-    LVM_INT16   j, size;
-    LVM_FLOAT   *pDelayLine;
-    LVM_FLOAT   *pDelayLineInput = pPrivate->pScratch;
-    LVM_FLOAT   *pScratch = pPrivate->pScratch;
-    LVM_FLOAT   *pIn;
-    LVM_FLOAT   *pTemp = pPrivate->pInputSave;
-    LVM_INT32   NumberOfDelayLines;
+void ReverbBlock(LVM_FLOAT* pInput, LVM_FLOAT* pOutput, LVREV_Instance_st* pPrivate,
+                 LVM_UINT16 NumSamples) {
+    LVM_INT16 j, size;
+    LVM_FLOAT* pDelayLine;
+    LVM_FLOAT* pDelayLineInput = pPrivate->pScratch;
+    LVM_FLOAT* pScratch = pPrivate->pScratch;
+    LVM_FLOAT* pIn;
+    LVM_FLOAT* pTemp = pPrivate->pInputSave;
+    LVM_INT32 NumberOfDelayLines;
 
     /******************************************************************************
      * All calculations will go into the buffer pointed to by pTemp, this will    *
@@ -196,85 +180,61 @@
      * and the final output is converted to STEREO after the mixer                *
      ******************************************************************************/
 
-    if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4)
-    {
+    if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4) {
         NumberOfDelayLines = 4;
-    }
-    else if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_2)
-    {
+    } else if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_2) {
         NumberOfDelayLines = 2;
-    }
-    else
-    {
+    } else {
         NumberOfDelayLines = 1;
     }
 
-    if(pPrivate->CurrentParams.SourceFormat == LVM_MONO)
-    {
+    if (pPrivate->CurrentParams.SourceFormat == LVM_MONO) {
         pIn = pInput;
-    }
-    else
-    {
+    } else {
         /*
          *  Stereo to mono conversion
          */
 
-        From2iToMono_Float(pInput,
-                           pTemp,
-                           (LVM_INT16)NumSamples);
+        From2iToMono_Float(pInput, pTemp, (LVM_INT16)NumSamples);
         pIn = pTemp;
     }
 
-    Mult3s_Float(pIn,
-                 (LVM_FLOAT)LVREV_HEADROOM,
-                 pTemp,
-                 (LVM_INT16)NumSamples);
+    Mult3s_Float(pIn, (LVM_FLOAT)LVREV_HEADROOM, pTemp, (LVM_INT16)NumSamples);
 
     /*
      *  High pass filter
      */
-    FO_1I_D32F32C31_TRC_WRA_01(&pPrivate->pFastCoef->HPCoefs,
-                               pTemp,
-                               pTemp,
-                               (LVM_INT16)NumSamples);
+    pPrivate->pRevHPFBiquad->process(pTemp, pTemp, NumSamples);
+
     /*
      *  Low pass filter
      */
-    FO_1I_D32F32C31_TRC_WRA_01(&pPrivate->pFastCoef->LPCoefs,
-                               pTemp,
-                               pTemp,
-                               (LVM_INT16)NumSamples);
+    pPrivate->pRevLPFBiquad->process(pTemp, pTemp, NumSamples);
 
     /*
      *  Process all delay lines
      */
 
-    for(j = 0; j < NumberOfDelayLines; j++)
-    {
+    for (j = 0; j < NumberOfDelayLines; j++) {
         pDelayLine = pPrivate->pScratchDelayLine[j];
 
         /*
          * All-pass filter with pop and click suppression
          */
         /* Get the smoothed, delayed output. Put it in the output buffer */
-        MixSoft_2St_D32C31_SAT(&pPrivate->Mixer_APTaps[j],
-                               pPrivate->pOffsetA[j],
-                               pPrivate->pOffsetB[j],
-                               pDelayLine,
-                               (LVM_INT16)NumSamples);
+        MixSoft_2St_D32C31_SAT(&pPrivate->Mixer_APTaps[j], pPrivate->pOffsetA[j],
+                               pPrivate->pOffsetB[j], pDelayLine, (LVM_INT16)NumSamples);
         /* Re-align the all pass filter delay buffer and copying the fixed delay data \
            to the AP delay in the process */
-        Copy_Float(&pPrivate->pDelay_T[j][NumSamples],
-                   pPrivate->pDelay_T[j],
-                   (LVM_INT16)(pPrivate->T[j] - NumSamples));         /* 32-bit data */
+        Copy_Float(&pPrivate->pDelay_T[j][NumSamples], pPrivate->pDelay_T[j],
+                   (LVM_INT16)(pPrivate->T[j] - NumSamples)); /* 32-bit data */
         /* Apply the smoothed feedback and save to fixed delay input (currently empty) */
-        MixSoft_1St_D32C31_WRA(&pPrivate->Mixer_SGFeedback[j],
-                               pDelayLine,
+        MixSoft_1St_D32C31_WRA(&pPrivate->Mixer_SGFeedback[j], pDelayLine,
                                &pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples],
                                (LVM_INT16)NumSamples);
         /* Sum into the AP delay line */
         Mac3s_Sat_Float(&pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples],
-                        -1.0f,    /* Invert since the feedback coefficient is negative */
+                        -1.0f, /* Invert since the feedback coefficient is negative */
                         &pPrivate->pDelay_T[j][pPrivate->Delay_AP[j] - NumSamples],
                         (LVM_INT16)NumSamples);
         /* Apply smoothed feedforward sand save to fixed delay input (currently empty) */
@@ -283,9 +243,7 @@
                                &pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples],
                                (LVM_INT16)NumSamples);
         /* Sum into the AP output */
-        Mac3s_Sat_Float(&pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples],
-                        1.0f,
-                        pDelayLine,
+        Mac3s_Sat_Float(&pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples], 1.0f, pDelayLine,
                         (LVM_INT16)NumSamples);
 
         /*
@@ -296,34 +254,26 @@
         /*
          *  Low pass filter
          */
-        FO_1I_D32F32C31_TRC_WRA_01(&pPrivate->pFastCoef->RevLPCoefs[j],
-                                   pDelayLine,
-                                   pDelayLine,
-                                   (LVM_INT16)NumSamples);
+        pPrivate->revLPFBiquad[j]->process(pDelayLine, pDelayLine, NumSamples);
     }
 
     /*
      *  Apply rotation matrix and delay samples
      */
-    for(j = 0; j < NumberOfDelayLines; j++)
-    {
-
-        Copy_Float(pTemp,
-                   pDelayLineInput,
-                   (LVM_INT16)(NumSamples));
+    for (j = 0; j < NumberOfDelayLines; j++) {
+        Copy_Float(pTemp, pDelayLineInput, (LVM_INT16)(NumSamples));
         /*
          *  Rotation matrix mix
          */
-        switch(j)
-        {
+        switch (j) {
             case 3:
                 /*
                  *  Add delay line 1 and 2 contribution
                  */
-                 Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f,
-                                 pDelayLineInput, (LVM_INT16)NumSamples);
-                 Mac3s_Sat_Float(pPrivate->pScratchDelayLine[2], -1.0f,
-                                 pDelayLineInput, (LVM_INT16)NumSamples);
+                Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f, pDelayLineInput,
+                                (LVM_INT16)NumSamples);
+                Mac3s_Sat_Float(pPrivate->pScratchDelayLine[2], -1.0f, pDelayLineInput,
+                                (LVM_INT16)NumSamples);
 
                 break;
             case 2:
@@ -331,61 +281,52 @@
                 /*
                  *  Add delay line 0 and 3 contribution
                  */
-                 Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f,
-                                 pDelayLineInput, (LVM_INT16)NumSamples);
-                 Mac3s_Sat_Float(pPrivate->pScratchDelayLine[3], -1.0f,
-                                 pDelayLineInput, (LVM_INT16)NumSamples);
+                Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f, pDelayLineInput,
+                                (LVM_INT16)NumSamples);
+                Mac3s_Sat_Float(pPrivate->pScratchDelayLine[3], -1.0f, pDelayLineInput,
+                                (LVM_INT16)NumSamples);
 
                 break;
             case 1:
-                if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4)
-                {
+                if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4) {
                     /*
                      *  Add delay line 0 and 3 contribution
                      */
-                    Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f,
-                                    pDelayLineInput, (LVM_INT16)NumSamples);
+                    Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f, pDelayLineInput,
+                                    (LVM_INT16)NumSamples);
                     Add2_Sat_Float(pPrivate->pScratchDelayLine[3], pDelayLineInput,
                                    (LVM_INT16)NumSamples);
 
-                }
-                else
-                {
+                } else {
                     /*
                      *  Add delay line 0 and 1 contribution
                      */
-                     Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f,
-                                     pDelayLineInput, (LVM_INT16)NumSamples);
-                     Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f,
-                                     pDelayLineInput, (LVM_INT16)NumSamples);
-
+                    Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f, pDelayLineInput,
+                                    (LVM_INT16)NumSamples);
+                    Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f, pDelayLineInput,
+                                    (LVM_INT16)NumSamples);
                 }
                 break;
             case 0:
-                if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4)
-                {
+                if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_4) {
                     /*
                      *  Add delay line 1 and 2 contribution
                      */
-                    Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f,
-                                    pDelayLineInput, (LVM_INT16)NumSamples);
+                    Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f, pDelayLineInput,
+                                    (LVM_INT16)NumSamples);
                     Add2_Sat_Float(pPrivate->pScratchDelayLine[2], pDelayLineInput,
                                    (LVM_INT16)NumSamples);
 
-                }
-                else if(pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_2)
-                {
+                } else if (pPrivate->InstanceParams.NumDelays == LVREV_DELAYLINES_2) {
                     /*
                      *  Add delay line 0 and 1 contribution
                      */
                     Add2_Sat_Float(pPrivate->pScratchDelayLine[0], pDelayLineInput,
                                    (LVM_INT16)NumSamples);
-                    Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f,
-                                    pDelayLineInput, (LVM_INT16)NumSamples);
+                    Mac3s_Sat_Float(pPrivate->pScratchDelayLine[1], -1.0f, pDelayLineInput,
+                                    (LVM_INT16)NumSamples);
 
-                }
-                else
-                {
+                } else {
                     /*
                      *  Add delay line 0 contribution
                      */
@@ -402,54 +343,37 @@
         /*
          *  Delay samples
          */
-        Copy_Float(pDelayLineInput,
-                   &pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples],
-                   (LVM_INT16)(NumSamples));              /* 32-bit data */
+        Copy_Float(pDelayLineInput, &pPrivate->pDelay_T[j][pPrivate->T[j] - NumSamples],
+                   (LVM_INT16)(NumSamples)); /* 32-bit data */
     }
 
     /*
      *  Create stereo output
      */
-    switch(pPrivate->InstanceParams.NumDelays)
-    {
+    switch (pPrivate->InstanceParams.NumDelays) {
         case LVREV_DELAYLINES_4:
-             Add2_Sat_Float(pPrivate->pScratchDelayLine[3],
-                            pPrivate->pScratchDelayLine[0],
-                            (LVM_INT16)NumSamples);
-             Add2_Sat_Float(pPrivate->pScratchDelayLine[2],
-                            pPrivate->pScratchDelayLine[1],
-                            (LVM_INT16)NumSamples);
+            Add2_Sat_Float(pPrivate->pScratchDelayLine[3], pPrivate->pScratchDelayLine[0],
+                           (LVM_INT16)NumSamples);
+            Add2_Sat_Float(pPrivate->pScratchDelayLine[2], pPrivate->pScratchDelayLine[1],
+                           (LVM_INT16)NumSamples);
 
-            JoinTo2i_Float(pPrivate->pScratchDelayLine[0],
-                           pPrivate->pScratchDelayLine[1],
-                           pTemp,
+            JoinTo2i_Float(pPrivate->pScratchDelayLine[0], pPrivate->pScratchDelayLine[1], pTemp,
                            (LVM_INT16)NumSamples);
 
             break;
         case LVREV_DELAYLINES_2:
 
-             Copy_Float(pPrivate->pScratchDelayLine[1],
-                        pScratch,
-                        (LVM_INT16)(NumSamples));
+            Copy_Float(pPrivate->pScratchDelayLine[1], pScratch, (LVM_INT16)(NumSamples));
 
-             Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0],
-                            -1.0f,
-                            pScratch,
-                            (LVM_INT16)NumSamples);
+            Mac3s_Sat_Float(pPrivate->pScratchDelayLine[0], -1.0f, pScratch, (LVM_INT16)NumSamples);
 
-             Add2_Sat_Float(pPrivate->pScratchDelayLine[1],
-                            pPrivate->pScratchDelayLine[0],
-                            (LVM_INT16)NumSamples);
+            Add2_Sat_Float(pPrivate->pScratchDelayLine[1], pPrivate->pScratchDelayLine[0],
+                           (LVM_INT16)NumSamples);
 
-             JoinTo2i_Float(pPrivate->pScratchDelayLine[0],
-                            pScratch,
-                            pTemp,
-                            (LVM_INT16)NumSamples);
+            JoinTo2i_Float(pPrivate->pScratchDelayLine[0], pScratch, pTemp, (LVM_INT16)NumSamples);
             break;
         case LVREV_DELAYLINES_1:
-            MonoTo2I_Float(pPrivate->pScratchDelayLine[0],
-                           pTemp,
-                           (LVM_INT16)NumSamples);
+            MonoTo2I_Float(pPrivate->pScratchDelayLine[0], pTemp, (LVM_INT16)NumSamples);
             break;
         default:
             break;
@@ -460,25 +384,14 @@
      */
 
     size = (LVM_INT16)(NumSamples << 1);
-    MixSoft_2St_D32C31_SAT(&pPrivate->BypassMixer,
-                           pTemp,
-                           pTemp,
-                           pOutput,
-                           size);
+    MixSoft_2St_D32C31_SAT(&pPrivate->BypassMixer, pTemp, pTemp, pOutput, size);
 
     /* Apply Gain*/
 
-    Shift_Sat_Float(LVREV_OUTPUTGAIN_SHIFT,
-                    pOutput,
-                    pOutput,
-                    size);
+    Shift_Sat_Float(LVREV_OUTPUTGAIN_SHIFT, pOutput, pOutput, size);
 
-    MixSoft_1St_D32C31_WRA(&pPrivate->GainMixer,
-                           pOutput,
-                           pOutput,
-                           size);
+    MixSoft_1St_D32C31_WRA(&pPrivate->GainMixer, pOutput, pOutput, size);
 
     return;
 }
 /* End of file */
-
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_SetControlParameters.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_SetControlParameters.cpp
index 2a75559..e5a0bc8 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_SetControlParameters.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_SetControlParameters.cpp
@@ -42,84 +42,67 @@
 /*  1.  This function may be interrupted by the LVREV_Process function                  */
 /*                                                                                      */
 /****************************************************************************************/
-LVREV_ReturnStatus_en LVREV_SetControlParameters(LVREV_Handle_t           hInstance,
-                                                 LVREV_ControlParams_st   *pNewParams)
-{
-
-    LVREV_Instance_st     *pLVREV_Private = (LVREV_Instance_st *)hInstance;
+LVREV_ReturnStatus_en LVREV_SetControlParameters(LVREV_Handle_t hInstance,
+                                                 LVREV_ControlParams_st* pNewParams) {
+    LVREV_Instance_st* pLVREV_Private = (LVREV_Instance_st*)hInstance;
 
     /*
      * Check for error conditions
      */
-    if((hInstance == LVM_NULL) || (pNewParams == LVM_NULL))
-    {
+    if ((hInstance == LVM_NULL) || (pNewParams == LVM_NULL)) {
         return LVREV_NULLADDRESS;
     }
 
     /*
      * Check all new control parameters are in range
      */
-    if(    ((pNewParams->OperatingMode != LVM_MODE_OFF) && (pNewParams->OperatingMode != LVM_MODE_ON))                                         ||
-        (
-        (pNewParams->SampleRate != LVM_FS_8000) && (pNewParams->SampleRate != LVM_FS_11025) && (pNewParams->SampleRate != LVM_FS_12000)       &&
-        (pNewParams->SampleRate != LVM_FS_16000) && (pNewParams->SampleRate != LVM_FS_22050) && (pNewParams->SampleRate != LVM_FS_24000)       &&
-        (pNewParams->SampleRate != LVM_FS_32000) &&
-        (pNewParams->SampleRate != LVM_FS_44100) &&
-        (pNewParams->SampleRate != LVM_FS_48000)
-        && (pNewParams->SampleRate != LVM_FS_88200) && (pNewParams->SampleRate != LVM_FS_96000)
-        && (pNewParams->SampleRate != LVM_FS_176400) && (pNewParams->SampleRate != LVM_FS_192000)
-        )
-#ifdef SUPPORT_MC
-        || ((pNewParams->SourceFormat != LVM_STEREO)       &&
-            (pNewParams->SourceFormat != LVM_MONOINSTEREO) &&
-            (pNewParams->SourceFormat != LVM_MONO)         &&
-            (pNewParams->SourceFormat != LVM_MULTICHANNEL)))
-#else
-        || ((pNewParams->SourceFormat != LVM_STEREO) && (pNewParams->SourceFormat != LVM_MONOINSTEREO) && (pNewParams->SourceFormat != LVM_MONO)) )
-#endif
-    {
+    if (((pNewParams->OperatingMode != LVM_MODE_OFF) &&
+         (pNewParams->OperatingMode != LVM_MODE_ON)) ||
+        ((pNewParams->SampleRate != LVM_FS_8000) && (pNewParams->SampleRate != LVM_FS_11025) &&
+         (pNewParams->SampleRate != LVM_FS_12000) && (pNewParams->SampleRate != LVM_FS_16000) &&
+         (pNewParams->SampleRate != LVM_FS_22050) && (pNewParams->SampleRate != LVM_FS_24000) &&
+         (pNewParams->SampleRate != LVM_FS_32000) && (pNewParams->SampleRate != LVM_FS_44100) &&
+         (pNewParams->SampleRate != LVM_FS_48000) && (pNewParams->SampleRate != LVM_FS_88200) &&
+         (pNewParams->SampleRate != LVM_FS_96000) && (pNewParams->SampleRate != LVM_FS_176400) &&
+         (pNewParams->SampleRate != LVM_FS_192000)) ||
+        ((pNewParams->SourceFormat != LVM_STEREO) &&
+         (pNewParams->SourceFormat != LVM_MONOINSTEREO) && (pNewParams->SourceFormat != LVM_MONO) &&
+         (pNewParams->SourceFormat != LVM_MULTICHANNEL))) {
         return (LVREV_OUTOFRANGE);
     }
 
-    if (pNewParams->Level > LVREV_MAX_LEVEL)
-    {
+    if (pNewParams->Level > LVREV_MAX_LEVEL) {
         return LVREV_OUTOFRANGE;
     }
 
-    if ((pNewParams->LPF < LVREV_MIN_LPF_CORNER) || (pNewParams->LPF > LVREV_MAX_LPF_CORNER))
-    {
+    if ((pNewParams->LPF < LVREV_MIN_LPF_CORNER) || (pNewParams->LPF > LVREV_MAX_LPF_CORNER)) {
         return LVREV_OUTOFRANGE;
     }
 
-    if ((pNewParams->HPF < LVREV_MIN_HPF_CORNER) || (pNewParams->HPF > LVREV_MAX_HPF_CORNER))
-    {
+    if ((pNewParams->HPF < LVREV_MIN_HPF_CORNER) || (pNewParams->HPF > LVREV_MAX_HPF_CORNER)) {
         return LVREV_OUTOFRANGE;
     }
 
-    if (pNewParams->T60 > LVREV_MAX_T60)
-    {
+    if (pNewParams->T60 > LVREV_MAX_T60) {
         return LVREV_OUTOFRANGE;
     }
 
-    if (pNewParams->Density > LVREV_MAX_DENSITY)
-    {
+    if (pNewParams->Density > LVREV_MAX_DENSITY) {
         return LVREV_OUTOFRANGE;
     }
 
-    if (pNewParams->Damping > LVREV_MAX_DAMPING)
-    {
+    if (pNewParams->Damping > LVREV_MAX_DAMPING) {
         return LVREV_OUTOFRANGE;
     }
 
-    if (pNewParams->RoomSize > LVREV_MAX_ROOMSIZE)
-    {
+    if (pNewParams->RoomSize > LVREV_MAX_ROOMSIZE) {
         return LVREV_OUTOFRANGE;
     }
 
     /*
      * Copy the new parameters and set the flag to indicate they are available
      */
-    pLVREV_Private->NewParams       = *pNewParams;
+    pLVREV_Private->NewParams = *pNewParams;
     pLVREV_Private->bControlPending = LVM_TRUE;
 
     return LVREV_SUCCESS;
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.cpp
index 5cd623e..bb6cf12 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.cpp
@@ -29,26 +29,17 @@
 /*                                                                                      */
 /****************************************************************************************/
 
+const LVM_INT16 LVREV_MAX_T_DELAY[] = {LVREV_MAX_T0_DELAY, LVREV_MAX_T1_DELAY, LVREV_MAX_T2_DELAY,
+                                       LVREV_MAX_T3_DELAY};
+const LVM_INT16 LVREV_MAX_AP_DELAY[] = {LVREV_MAX_AP0_DELAY, LVREV_MAX_AP1_DELAY,
+                                        LVREV_MAX_AP2_DELAY, LVREV_MAX_AP3_DELAY};
+
 /* Table with supported sampling rates.  The table can be indexed using LVM_Fs_en       */
-const LVM_UINT32 LVM_FsTable[] = {
-    8000 ,
-    11025,
-    12000,
-    16000,
-    22050,
-    24000,
-    32000,
-    44100,
-    48000,
-    88200,
-    96000,
-    176400,
-    192000
-};
+const LVM_UINT32 LVM_FsTable[] = {8000,  11025, 12000, 16000, 22050,  24000, 32000,
+                                  44100, 48000, 88200, 96000, 176400, 192000};
 /* Table with supported sampling rates.  The table can be indexed using LVM_Fs_en       */
-LVM_UINT32 LVM_GetFsFromTable(LVM_Fs_en FsIndex){
-    if (FsIndex > LVM_FS_192000)
-        return 0;
+LVM_UINT32 LVM_GetFsFromTable(LVM_Fs_en FsIndex) {
+    if (FsIndex > LVM_FS_192000) return 0;
 
     return (LVM_FsTable[FsIndex]);
 }
@@ -73,30 +64,174 @@
   */
 
 /* Normalizing output including Reverb Level part (only shift up)*/
-const LVM_FLOAT LVREV_GainPolyTable[24][5]={{1,1.045909f,7.681098f,-7.211500f,3.025605f,},
-                                            {2,1.088194f,10.291749f,-11.513787f,5.265817f,},
-                                            {3,0.988919f,8.299956f,-8.920862f,3.979806f,},
-                                            {4,1.035927f,10.182567f,-10.346134f,4.546533f,},
-                                            {5,1.130313f,12.538727f,-13.627023f,6.165208f,},
-                                            {6,1.060743f,8.091713f,-8.588079f,3.834230f,},
-                                            {7,1.040381f,10.406566f,-11.176650f,5.075132f,},
-                                            {8,1.026944f,8.387302f,-8.689796f,3.895863f,},
-                                            {9,1.013312f,9.727236f,-10.534165f,4.742272f,},
-                                            {10,0.996095f,8.492249f,-7.947677f,3.478917f,},
-                                            {13,1.079346f,8.894425f,-9.641768f,4.434442f,},
-                                            {15,0.994327f,7.441335f,-8.003979f,3.581177f,},
-                                            {17,0.991067f,7.208373f,-7.257859f,3.167774f,},
-                                            {20,1.033445f,7.476371f,-7.546960f,3.369703f,},
-                                            {25,0.982830f,5.913867f,-5.638448f,2.420932f,},
-                                            {30,0.928782f,5.035343f,-4.492104f,1.844904f,},
-                                            {40,0.953714f,5.060232f,-4.472204f,1.829642f,},
-                                            {50,0.899258f,4.273357f,-3.537492f,1.387576f,},
-                                            {60,0.943584f,4.093228f,-3.469658f,1.410911f,},
-                                            {70,0.926021f,3.973125f,-3.331985f,1.344690f,},
-                                            {75,0.894853f,2.871747f,-1.438758f,0.311856f,},
-                                            {80,0.935122f,2.991857f,-2.038882f,0.686395f,},
-                                            {90,0.953872f,2.880315f,-2.122365f,0.784032f,},
-                                            {100,0.951005f,2.894294f,-2.009086f,0.698316f,},
+const LVM_FLOAT LVREV_GainPolyTable[24][5] = {
+        {
+                1,
+                1.045909f,
+                7.681098f,
+                -7.211500f,
+                3.025605f,
+        },
+        {
+                2,
+                1.088194f,
+                10.291749f,
+                -11.513787f,
+                5.265817f,
+        },
+        {
+                3,
+                0.988919f,
+                8.299956f,
+                -8.920862f,
+                3.979806f,
+        },
+        {
+                4,
+                1.035927f,
+                10.182567f,
+                -10.346134f,
+                4.546533f,
+        },
+        {
+                5,
+                1.130313f,
+                12.538727f,
+                -13.627023f,
+                6.165208f,
+        },
+        {
+                6,
+                1.060743f,
+                8.091713f,
+                -8.588079f,
+                3.834230f,
+        },
+        {
+                7,
+                1.040381f,
+                10.406566f,
+                -11.176650f,
+                5.075132f,
+        },
+        {
+                8,
+                1.026944f,
+                8.387302f,
+                -8.689796f,
+                3.895863f,
+        },
+        {
+                9,
+                1.013312f,
+                9.727236f,
+                -10.534165f,
+                4.742272f,
+        },
+        {
+                10,
+                0.996095f,
+                8.492249f,
+                -7.947677f,
+                3.478917f,
+        },
+        {
+                13,
+                1.079346f,
+                8.894425f,
+                -9.641768f,
+                4.434442f,
+        },
+        {
+                15,
+                0.994327f,
+                7.441335f,
+                -8.003979f,
+                3.581177f,
+        },
+        {
+                17,
+                0.991067f,
+                7.208373f,
+                -7.257859f,
+                3.167774f,
+        },
+        {
+                20,
+                1.033445f,
+                7.476371f,
+                -7.546960f,
+                3.369703f,
+        },
+        {
+                25,
+                0.982830f,
+                5.913867f,
+                -5.638448f,
+                2.420932f,
+        },
+        {
+                30,
+                0.928782f,
+                5.035343f,
+                -4.492104f,
+                1.844904f,
+        },
+        {
+                40,
+                0.953714f,
+                5.060232f,
+                -4.472204f,
+                1.829642f,
+        },
+        {
+                50,
+                0.899258f,
+                4.273357f,
+                -3.537492f,
+                1.387576f,
+        },
+        {
+                60,
+                0.943584f,
+                4.093228f,
+                -3.469658f,
+                1.410911f,
+        },
+        {
+                70,
+                0.926021f,
+                3.973125f,
+                -3.331985f,
+                1.344690f,
+        },
+        {
+                75,
+                0.894853f,
+                2.871747f,
+                -1.438758f,
+                0.311856f,
+        },
+        {
+                80,
+                0.935122f,
+                2.991857f,
+                -2.038882f,
+                0.686395f,
+        },
+        {
+                90,
+                0.953872f,
+                2.880315f,
+                -2.122365f,
+                0.784032f,
+        },
+        {
+                100,
+                0.951005f,
+                2.894294f,
+                -2.009086f,
+                0.698316f,
+        },
 };
 /* End of file */
-
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.h b/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.h
index e100d8a..723d1ff 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.h
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_Tables.h
@@ -31,11 +31,13 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-extern const    LVM_UINT32  LVM_FsTable[];
-extern          LVM_UINT32  LVM_GetFsFromTable(LVM_Fs_en FsIndex);
+extern const LVM_UINT32 LVM_FsTable[];
+extern LVM_UINT32 LVM_GetFsFromTable(LVM_Fs_en FsIndex);
 
-extern const    LVM_FLOAT   LVREV_GainPolyTable[24][5];
+extern const LVM_FLOAT LVREV_GainPolyTable[24][5];
+extern const LVM_INT16 LVREV_MAX_T_DELAY[];
+extern const LVM_INT16 LVREV_MAX_AP_DELAY[];
 
-#endif  /** _LVREV_TABLES_H_ **/
+#endif /** _LVREV_TABLES_H_ **/
 
 /* End of file */
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/lib/LVPSA.h b/media/libeffects/lvm/lib/SpectrumAnalyzer/lib/LVPSA.h
index c9fa7ad..85e3ab9 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/lib/LVPSA.h
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/lib/LVPSA.h
@@ -22,255 +22,191 @@
 
 /****************************************************************************************/
 /*                                                                                      */
-/*  CONSTANTS DEFINITIONS                                                               */
-/*                                                                                      */
-/****************************************************************************************/
-
-/* Memory table*/
-#define     LVPSA_NR_MEMORY_REGIONS                  4      /* Number of memory regions                                          */
-
-/****************************************************************************************/
-/*                                                                                      */
 /*  TYPES DEFINITIONS                                                                   */
 /*                                                                                      */
 /****************************************************************************************/
-/* Memory Types */
-typedef enum
-{
-    LVPSA_PERSISTENT      = LVM_PERSISTENT,
-    LVPSA_PERSISTENT_DATA = LVM_PERSISTENT_DATA,
-    LVPSA_PERSISTENT_COEF = LVM_PERSISTENT_COEF,
-    LVPSA_SCRATCH         = LVM_SCRATCH,
-    LVPSA_MEMORY_DUMMY = LVM_MAXINT_32                      /* Force 32 bits enum, don't use it!                                 */
-} LVPSA_MemoryTypes_en;
-
 /* Level detection speed control parameters */
-typedef enum
-{
-    LVPSA_SPEED_LOW,                                        /* Low speed level   detection                                       */
-    LVPSA_SPEED_MEDIUM,                                     /* Medium speed level   detection                                    */
-    LVPSA_SPEED_HIGH,                                       /* High speed level   detection                                      */
-    LVPSA_SPEED_DUMMY = LVM_MAXINT_32                       /* Force 32 bits enum, don't use it!                                 */
+typedef enum {
+    LVPSA_SPEED_LOW,    /* Low speed level   detection                                       */
+    LVPSA_SPEED_MEDIUM, /* Medium speed level   detection                                    */
+    LVPSA_SPEED_HIGH,   /* High speed level   detection                                      */
+    LVPSA_SPEED_DUMMY = LVM_MAXINT_32 /* Force 32 bits enum, don't use it! */
 } LVPSA_LevelDetectSpeed_en;
 
 /* Filter control parameters */
-typedef struct
-{
-    LVM_UINT16                 CenterFrequency;             /* Center frequency of the band-pass filter (in Hz)                  */
-    LVM_UINT16                 QFactor;                     /* Quality factor of the filter             (in 1/100)               */
-    LVM_INT16                  PostGain;                    /* Postgain to apply after the filtering    (in dB Q16.0)            */
+typedef struct {
+    LVM_UINT16 CenterFrequency; /* Center frequency of the band-pass filter (in Hz) */
+    LVM_UINT16 QFactor; /* Quality factor of the filter             (in 1/100)               */
+    LVM_INT16 PostGain; /* Postgain to apply after the filtering    (in dB Q16.0)            */
 
 } LVPSA_FilterParam_t;
 
 /* LVPSA initialization parameters */
-typedef struct
-{
-    LVM_UINT16                 SpectralDataBufferDuration;  /* Spectral data buffer duration in time (ms in Q16.0)               */
-    LVM_UINT16                 MaxInputBlockSize;           /* Maximum expected input block size (in samples)                    */
-    LVM_UINT16                 nBands;                      /* Number of bands of the SA                                         */
-    LVPSA_FilterParam_t       *pFiltersParams;              /* Points to nBands filter param structures for filters settings     */
+typedef struct {
+    LVM_UINT16
+            SpectralDataBufferDuration; /* Spectral data buffer duration in time (ms in Q16.0) */
+    LVM_UINT16 MaxInputBlockSize;       /* Maximum expected input block size (in samples)       */
+    LVM_UINT16 nBands; /* Number of bands of the SA                                         */
+    LVPSA_FilterParam_t*
+            pFiltersParams; /* Points to nBands filter param structures for filters settings     */
 
 } LVPSA_InitParams_t, *pLVPSA_InitParams_t;
 
 /* LVPSA control parameters */
-typedef struct
-{
-    LVM_Fs_en                  Fs;                          /* Input sampling rate                                               */
-    LVPSA_LevelDetectSpeed_en  LevelDetectionSpeed;         /* Level detection speed                                             */
+typedef struct {
+    LVM_Fs_en Fs; /* Input sampling rate                                               */
+    LVPSA_LevelDetectSpeed_en LevelDetectionSpeed; /* Level detection speed */
 
 } LVPSA_ControlParams_t, *pLVPSA_ControlParams_t;
 
-/* Memory region definition */
-typedef struct
-{
-    LVM_UINT32                 Size;                        /* Region size in bytes                                              */
-    LVPSA_MemoryTypes_en       Type;                        /* Region type                                                       */
-    void                       *pBaseAddress;               /* Pointer to the region base address                                */
-} LVPSA_MemoryRegion_t;
-
-/* Memory table containing the region definitions */
-typedef struct
-{
-    LVPSA_MemoryRegion_t       Region[LVPSA_NR_MEMORY_REGIONS];/* One definition for each region                                 */
-} LVPSA_MemTab_t;
-
 /* Audio time type */
 typedef LVM_INT32 LVPSA_Time;
 
 /* Module instance Handle */
-typedef void *pLVPSA_Handle_t;
+typedef void* pLVPSA_Handle_t;
 
 /* LVPSA return codes */
-typedef enum
-{
-    LVPSA_OK,                                               /* The function ran without any problem                              */
-    LVPSA_ERROR_INVALIDPARAM,                               /* A parameter is incorrect                                          */
-    LVPSA_ERROR_WRONGTIME,                                  /* An incorrect AudioTime is used                                    */
-    LVPSA_ERROR_NULLADDRESS,                                /* A pointer has a NULL value                                        */
-    LVPSA_RETURN_DUMMY = LVM_MAXINT_32                      /* Force 32 bits enum, don't use it!                                 */
+typedef enum {
+    LVPSA_OK, /* The function ran without any problem                              */
+    LVPSA_ERROR_INVALIDPARAM, /* A parameter is incorrect */
+    LVPSA_ERROR_WRONGTIME,   /* An incorrect AudioTime is used                                    */
+    LVPSA_ERROR_NULLADDRESS, /* A pointer has a NULL value                                        */
+    LVPSA_RETURN_DUMMY = LVM_MAXINT_32 /* Force 32 bits enum, don't use it! */
 } LVPSA_RETURN;
 
 /*********************************************************************************************************************************
    FUNCTIONS PROTOTYPE
 **********************************************************************************************************************************/
-/*********************************************************************************************************************************/
-/*                                                                                                                               */
-/* FUNCTION:            LVPSA_Memory                                                                                         */
-/*                                                                                                                               */
-/* DESCRIPTION:                                                                                                                  */
-/*  This function is used for memory allocation and free. It can be called in                                                    */
-/*  two ways:                                                                                                                    */
-/*                                                                                                                               */
-/*      hInstance = NULL                Returns the memory requirements                                                          */
-/*      hInstance = Instance handle     Returns the memory requirements and                                                      */
-/*                                      allocated base addresses for the instance                                                */
-/*                                                                                                                               */
-/*  When this function is called for memory allocation (hInstance=NULL) the memory                                               */
-/*  base address pointers are NULL on return.                                                                                    */
-/*                                                                                                                               */
-/*  When the function is called for free (hInstance = Instance Handle) the memory                                                */
-/*  table returns the allocated memory and base addresses used during initialisation.                                            */
-/*                                                                                                                               */
-/* PARAMETERS:                                                                                                                   */
-/*  hInstance           Instance Handle                                                                                          */
-/*  pMemoryTable        Pointer to an empty memory definition table                                                              */
-/*  pInitParams         Pointer to the instance init parameters                                                                  */
-/*                                                                                                                               */
-/* RETURNS:                                                                                                                      */
-/*  LVPSA_OK            Succeeds                                                                                                 */
-/*  otherwise           Error due to bad parameters                                                                              */
-/*                                                                                                                               */
-/*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_Memory            ( pLVPSA_Handle_t             hInstance,
-                                       LVPSA_MemTab_t             *pMemoryTable,
-                                       LVPSA_InitParams_t         *pInitParams    );
+/************************************************************************************/
+/*                                                                                  */
+/* FUNCTION:            LVPSA_Init                                                  */
+/*                                                                                  */
+/* DESCRIPTION:                                                                     */
+/*  Create and Initialize the LVPSA module including instance handle                */
+/*                                                                                  */
+/*                                                                                  */
+/* PARAMETERS:                                                                      */
+/*  phInstance          Pointer to the instance handle                              */
+/*  InitParams          Init parameters structure                                   */
+/*  ControlParams       Control parameters structure                                */
+/*  pScratch            Pointer to bundle scratch memory area                       */
+/*                                                                                  */
+/*                                                                                  */
+/* RETURNS:                                                                         */
+/*  LVPSA_OK            Succeeds                                                    */
+/*  otherwise           Error due to bad parameters                                 */
+/*                                                                                  */
+/************************************************************************************/
+LVPSA_RETURN LVPSA_Init(pLVPSA_Handle_t* phInstance, LVPSA_InitParams_t* pInitParams,
+                        LVPSA_ControlParams_t* pControlParams, void* pScratch);
+
+/************************************************************************************/
+/*                                                                                  */
+/* FUNCTION:            LVPSA_DeInit                                                */
+/*                                                                                  */
+/* DESCRIPTION:                                                                     */
+/*    Free the memories created in LVPSA_Init call including instance handle        */
+/*                                                                                  */
+/* PARAMETERS:                                                                      */
+/*  phInstance          Pointer to the instance handle                              */
+/*                                                                                  */
+/************************************************************************************/
+void LVPSA_DeInit(pLVPSA_Handle_t* phInstance);
 
 /*********************************************************************************************************************************/
 /*                                                                                                                               */
-/* FUNCTION:            LVPSA_Init                                                                                               */
+/* FUNCTION:            LVPSA_Control */
 /*                                                                                                                               */
-/* DESCRIPTION:                                                                                                                  */
-/*  Initializes the LVPSA module.                                                                                                */
+/* DESCRIPTION: */
+/*  Controls the LVPSA module. */
 /*                                                                                                                               */
+/* PARAMETERS: */
+/*  hInstance           Instance Handle */
+/*  pNewParams          Pointer to the instance new control parameters */
 /*                                                                                                                               */
-/* PARAMETERS:                                                                                                                   */
-/*  phInstance          Pointer to the instance Handle                                                                           */
-/*  pInitParams         Pointer to the instance init parameters                                                                  */
-/*  pControlParams      Pointer to the instance control parameters                                                               */
-/*  pMemoryTable        Pointer to the memory definition table                                                                   */
-/*                                                                                                                               */
-/*                                                                                                                               */
-/* RETURNS:                                                                                                                      */
-/*  LVPSA_OK            Succeeds                                                                                                 */
-/*  otherwise           Error due to bad parameters                                                                              */
+/* RETURNS: */
+/*  LVPSA_OK            Succeeds */
+/*  otherwise           Error due to bad parameters */
 /*                                                                                                                               */
 /*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_Init              ( pLVPSA_Handle_t             *phInstance,
-                                       LVPSA_InitParams_t          *pInitParams,
-                                       LVPSA_ControlParams_t       *pControlParams,
-                                       LVPSA_MemTab_t              *pMemoryTable  );
+LVPSA_RETURN LVPSA_Control(pLVPSA_Handle_t hInstance, LVPSA_ControlParams_t* pNewParams);
 
 /*********************************************************************************************************************************/
 /*                                                                                                                               */
-/* FUNCTION:            LVPSA_Control                                                                                            */
+/* FUNCTION:            LVPSA_Process */
 /*                                                                                                                               */
-/* DESCRIPTION:                                                                                                                  */
-/*  Controls the LVPSA module.                                                                                                   */
+/* DESCRIPTION: */
+/*  The process calculates the levels of the frequency bands. */
 /*                                                                                                                               */
-/* PARAMETERS:                                                                                                                   */
-/*  hInstance           Instance Handle                                                                                          */
-/*  pNewParams          Pointer to the instance new control parameters                                                           */
+/* PARAMETERS: */
+/*  hInstance           Instance Handle */
+/*  pLVPSA_InputSamples Pointer to the input samples buffer */
+/*  InputBlockSize      Number of mono samples to process */
+/*  AudioTime           Playback time of the first input sample */
 /*                                                                                                                               */
-/* RETURNS:                                                                                                                      */
-/*  LVPSA_OK            Succeeds                                                                                                 */
-/*  otherwise           Error due to bad parameters                                                                              */
+/*                                                                                                                               */
+/* RETURNS: */
+/*  LVPSA_OK            Succeeds */
+/*  otherwise           Error due to bad parameters */
 /*                                                                                                                               */
 /*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_Control           ( pLVPSA_Handle_t             hInstance,
-                                       LVPSA_ControlParams_t      *pNewParams     );
+LVPSA_RETURN LVPSA_Process(pLVPSA_Handle_t hInstance, LVM_FLOAT* pLVPSA_InputSamples,
+                           LVM_UINT16 InputBlockSize, LVPSA_Time AudioTime);
+/*********************************************************************************************************************************/
+/*                                                                                                                               */
+/* FUNCTION:            LVPSA_GetSpectrum */
+/*                                                                                                                               */
+/* DESCRIPTION: */
+/*  This function is used for memory allocation and free. */
+/*                                                                                                                               */
+/*                                                                                                                               */
+/* PARAMETERS: */
+/*  hInstance            Instance Handle */
+/*  GetSpectrumAudioTime Time to retrieve the values at */
+/*  pCurrentValues       Pointer to an empty buffer : Current level values output */
+/*  pPeakValues          Pointer to an empty buffer : Peak level values output */
+/*                                                                                                                               */
+/*                                                                                                                               */
+/* RETURNS: */
+/*  LVPSA_OK            Succeeds */
+/*  otherwise           Error due to bad parameters */
+/*                                                                                                                               */
+/*********************************************************************************************************************************/
+LVPSA_RETURN LVPSA_GetSpectrum(pLVPSA_Handle_t hInstance, LVPSA_Time GetSpectrumAudioTime,
+                               LVM_UINT8* pCurrentValues, LVM_UINT8* pPeakValues);
 
 /*********************************************************************************************************************************/
 /*                                                                                                                               */
-/* FUNCTION:            LVPSA_Process                                                                                            */
+/* FUNCTION:            LVPSA_GetControlParams */
 /*                                                                                                                               */
-/* DESCRIPTION:                                                                                                                  */
-/*  The process calculates the levels of the frequency bands.                                                                    */
+/* DESCRIPTION: */
+/*  Get the current control parameters of the LVPSA module. */
 /*                                                                                                                               */
-/* PARAMETERS:                                                                                                                   */
-/*  hInstance           Instance Handle                                                                                          */
-/*  pLVPSA_InputSamples Pointer to the input samples buffer                                                                      */
-/*  InputBlockSize      Number of mono samples to process                                                                        */
-/*  AudioTime           Playback time of the first input sample                                                                  */
-/*                                                                                                                               */
-/*                                                                                                                               */
-/* RETURNS:                                                                                                                      */
-/*  LVPSA_OK            Succeeds                                                                                                 */
-/*  otherwise           Error due to bad parameters                                                                              */
+/* PARAMETERS: */
+/*  hInstance           Instance Handle */
+/*  pParams             Pointer to an empty control parameters structure */
+/* RETURNS: */
+/*  LVPSA_OK            Succeeds */
+/*  otherwise           Error due to bad parameters */
 /*                                                                                                                               */
 /*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_Process           ( pLVPSA_Handle_t      hInstance,
-                                       LVM_FLOAT           *pLVPSA_InputSamples,
-                                       LVM_UINT16           InputBlockSize,
-                                       LVPSA_Time           AudioTime             );
-/*********************************************************************************************************************************/
-/*                                                                                                                               */
-/* FUNCTION:            LVPSA_GetSpectrum                                                                                        */
-/*                                                                                                                               */
-/* DESCRIPTION:                                                                                                                  */
-/*  This function is used for memory allocation and free.                                                                        */
-/*                                                                                                                               */
-/*                                                                                                                               */
-/* PARAMETERS:                                                                                                                   */
-/*  hInstance            Instance Handle                                                                                         */
-/*  GetSpectrumAudioTime Time to retrieve the values at                                                                          */
-/*  pCurrentValues       Pointer to an empty buffer : Current level values output                                                */
-/*  pPeakValues          Pointer to an empty buffer : Peak level values output                                                   */
-/*                                                                                                                               */
-/*                                                                                                                               */
-/* RETURNS:                                                                                                                      */
-/*  LVPSA_OK            Succeeds                                                                                                 */
-/*  otherwise           Error due to bad parameters                                                                              */
-/*                                                                                                                               */
-/*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_GetSpectrum       ( pLVPSA_Handle_t      hInstance,
-                                       LVPSA_Time           GetSpectrumAudioTime,
-                                       LVM_UINT8           *pCurrentValues,
-                                       LVM_UINT8           *pPeakValues           );
+LVPSA_RETURN LVPSA_GetControlParams(pLVPSA_Handle_t hInstance, LVPSA_ControlParams_t* pParams);
 
 /*********************************************************************************************************************************/
 /*                                                                                                                               */
-/* FUNCTION:            LVPSA_GetControlParams                                                                                   */
+/* FUNCTION:            LVPSA_GetInitParams */
 /*                                                                                                                               */
-/* DESCRIPTION:                                                                                                                  */
-/*  Get the current control parameters of the LVPSA module.                                                                      */
+/* DESCRIPTION: */
+/*  Get the initialization parameters of the LVPSA module. */
 /*                                                                                                                               */
-/* PARAMETERS:                                                                                                                   */
-/*  hInstance           Instance Handle                                                                                          */
-/*  pParams             Pointer to an empty control parameters structure                                                         */
-/* RETURNS:                                                                                                                      */
-/*  LVPSA_OK            Succeeds                                                                                                 */
-/*  otherwise           Error due to bad parameters                                                                              */
+/* PARAMETERS: */
+/*  hInstance           Instance Handle */
+/*  pParams             Pointer to an empty init parameters structure */
+/* RETURNS: */
+/*  LVPSA_OK            Succeeds */
+/*  otherwise           Error due to bad parameters */
 /*                                                                                                                               */
 /*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_GetControlParams  (    pLVPSA_Handle_t            hInstance,
-                                          LVPSA_ControlParams_t     *pParams      );
-
-/*********************************************************************************************************************************/
-/*                                                                                                                               */
-/* FUNCTION:            LVPSA_GetInitParams                                                                                      */
-/*                                                                                                                               */
-/* DESCRIPTION:                                                                                                                  */
-/*  Get the initialization parameters of the LVPSA module.                                                                       */
-/*                                                                                                                               */
-/* PARAMETERS:                                                                                                                   */
-/*  hInstance           Instance Handle                                                                                          */
-/*  pParams             Pointer to an empty init parameters structure                                                            */
-/* RETURNS:                                                                                                                      */
-/*  LVPSA_OK            Succeeds                                                                                                 */
-/*  otherwise           Error due to bad parameters                                                                              */
-/*                                                                                                                               */
-/*********************************************************************************************************************************/
-LVPSA_RETURN LVPSA_GetInitParams     (    pLVPSA_Handle_t            hInstance,
-                                          LVPSA_InitParams_t        *pParams      );
+LVPSA_RETURN LVPSA_GetInitParams(pLVPSA_Handle_t hInstance, LVPSA_InitParams_t* pParams);
 
 #endif /* _LVPSA_H */
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp
index deafaa7..8e63502 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp
@@ -15,30 +15,26 @@
  * limitations under the License.
  */
 
-#include    "LVPSA.h"
-#include    "LVPSA_Private.h"
-#include    "VectorArithmetic.h"
+#include <system/audio.h>
+#include "LVPSA.h"
+#include "LVPSA_Private.h"
+#include "VectorArithmetic.h"
 
-#define     LOW_FREQ            298             /* 32768/110 for low test frequency */
-#define     HIGH_FREQ           386             /* 32768/85 for high test frequency */
+#define LOW_FREQ 298  /* 32768/110 for low test frequency */
+#define HIGH_FREQ 386 /* 32768/85 for high test frequency */
 
-LVPSA_RETURN LVPSA_SetBPFiltersType (  LVPSA_InstancePr_t        *pInst,
-                                       LVPSA_ControlParams_t      *pParams  );
+LVPSA_RETURN LVPSA_SetBPFiltersType(LVPSA_InstancePr_t* pInst, LVPSA_ControlParams_t* pParams);
 
-LVPSA_RETURN LVPSA_SetQPFCoefficients( LVPSA_InstancePr_t        *pInst,
-                                       LVPSA_ControlParams_t      *pParams  );
+LVPSA_RETURN LVPSA_SetQPFCoefficients(LVPSA_InstancePr_t* pInst, LVPSA_ControlParams_t* pParams);
 
-LVPSA_RETURN LVPSA_BPSinglePrecCoefs(  LVM_UINT16             Fs,
-                                       LVPSA_FilterParam_t   *pFilterParams,
-                                       BP_FLOAT_Coefs_t        *pCoefficients);
+LVPSA_RETURN LVPSA_BPSinglePrecCoefs(LVM_UINT16 Fs, LVPSA_FilterParam_t* pFilterParams,
+                                     BP_FLOAT_Coefs_t* pCoefficients);
 
-LVPSA_RETURN LVPSA_BPDoublePrecCoefs(  LVM_UINT16            Fs,
-                                       LVPSA_FilterParam_t  *pFilterParams,
-                                       BP_FLOAT_Coefs_t       *pCoefficients);
-LVPSA_RETURN LVPSA_SetBPFCoefficients( LVPSA_InstancePr_t        *pInst,
-                                       LVPSA_ControlParams_t      *pParams  );
+LVPSA_RETURN LVPSA_BPDoublePrecCoefs(LVM_UINT16 Fs, LVPSA_FilterParam_t* pFilterParams,
+                                     BP_FLOAT_Coefs_t* pCoefficients);
+LVPSA_RETURN LVPSA_SetBPFCoefficients(LVPSA_InstancePr_t* pInst, LVPSA_ControlParams_t* pParams);
 
-LVPSA_RETURN LVPSA_ClearFilterHistory( LVPSA_InstancePr_t        *pInst);
+LVPSA_RETURN LVPSA_ClearFilterHistory(LVPSA_InstancePr_t* pInst);
 
 /************************************************************************************/
 /*                                                                                  */
@@ -56,29 +52,23 @@
 /*  otherwise           Error due to bad parameters                                 */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_Control           ( pLVPSA_Handle_t             hInstance,
-                                       LVPSA_ControlParams_t      *pNewParams     )
-{
+LVPSA_RETURN LVPSA_Control(pLVPSA_Handle_t hInstance, LVPSA_ControlParams_t* pNewParams) {
+    LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
 
-    LVPSA_InstancePr_t     *pLVPSA_Inst    = (LVPSA_InstancePr_t*)hInstance;
-
-    if((hInstance == LVM_NULL) || (pNewParams == LVM_NULL))
-    {
-        return(LVPSA_ERROR_NULLADDRESS);
+    if ((hInstance == LVM_NULL) || (pNewParams == LVM_NULL)) {
+        return (LVPSA_ERROR_NULLADDRESS);
     }
-    if(pNewParams->Fs >= LVPSA_NR_SUPPORTED_RATE)
-    {
-        return(LVPSA_ERROR_INVALIDPARAM);
+    if (pNewParams->Fs >= LVPSA_NR_SUPPORTED_RATE) {
+        return (LVPSA_ERROR_INVALIDPARAM);
     }
-    if(pNewParams->LevelDetectionSpeed >= LVPSA_NR_SUPPORTED_SPEED)
-    {
-        return(LVPSA_ERROR_INVALIDPARAM);
+    if (pNewParams->LevelDetectionSpeed >= LVPSA_NR_SUPPORTED_SPEED) {
+        return (LVPSA_ERROR_INVALIDPARAM);
     }
 
     pLVPSA_Inst->NewParams = *pNewParams;
     pLVPSA_Inst->bControlPending = LVM_TRUE;
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
 
 /************************************************************************************/
@@ -96,20 +86,17 @@
 /*  otherwise           Error due to bad parameters                                 */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_GetControlParams         (    pLVPSA_Handle_t            hInstance,
-                                                 LVPSA_ControlParams_t     *pParams )
-{
-    LVPSA_InstancePr_t     *pLVPSA_Inst    = (LVPSA_InstancePr_t*)hInstance;
+LVPSA_RETURN LVPSA_GetControlParams(pLVPSA_Handle_t hInstance, LVPSA_ControlParams_t* pParams) {
+    LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
 
-    if((hInstance == LVM_NULL) || (pParams == LVM_NULL))
-    {
-        return(LVPSA_ERROR_NULLADDRESS);
+    if ((hInstance == LVM_NULL) || (pParams == LVM_NULL)) {
+        return (LVPSA_ERROR_NULLADDRESS);
     }
 
-    pParams->Fs                     = pLVPSA_Inst->CurrentParams.Fs;
-    pParams->LevelDetectionSpeed    = pLVPSA_Inst->CurrentParams.LevelDetectionSpeed;
+    pParams->Fs = pLVPSA_Inst->CurrentParams.Fs;
+    pParams->LevelDetectionSpeed = pLVPSA_Inst->CurrentParams.LevelDetectionSpeed;
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
 
 /************************************************************************************/
@@ -127,22 +114,19 @@
 /*  otherwise           Error due to bad parameters                                 */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_GetInitParams         (    pLVPSA_Handle_t            hInstance,
-                                              LVPSA_InitParams_t        *pParams )
-{
-    LVPSA_InstancePr_t     *pLVPSA_Inst    = (LVPSA_InstancePr_t*)hInstance;
+LVPSA_RETURN LVPSA_GetInitParams(pLVPSA_Handle_t hInstance, LVPSA_InitParams_t* pParams) {
+    LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
 
-    if((hInstance == LVM_NULL) || (pParams == LVM_NULL))
-    {
-        return(LVPSA_ERROR_NULLADDRESS);
+    if ((hInstance == LVM_NULL) || (pParams == LVM_NULL)) {
+        return (LVPSA_ERROR_NULLADDRESS);
     }
 
-    pParams->SpectralDataBufferDuration   = pLVPSA_Inst->SpectralDataBufferDuration;
-    pParams->MaxInputBlockSize            = pLVPSA_Inst->MaxInputBlockSize;
-    pParams->nBands                       = pLVPSA_Inst->nBands;
-    pParams->pFiltersParams               = pLVPSA_Inst->pFiltersParams;
+    pParams->SpectralDataBufferDuration = pLVPSA_Inst->SpectralDataBufferDuration;
+    pParams->MaxInputBlockSize = pLVPSA_Inst->MaxInputBlockSize;
+    pParams->nBands = pLVPSA_Inst->nBands;
+    pParams->pFiltersParams = pLVPSA_Inst->pFiltersParams;
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
 
 /************************************************************************************/
@@ -163,46 +147,47 @@
 /* NOTES:                                                                           */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_ApplyNewSettings (LVPSA_InstancePr_t     *pInst)
-{
+LVPSA_RETURN LVPSA_ApplyNewSettings(LVPSA_InstancePr_t* pInst) {
     LVM_UINT16 ii;
     LVM_UINT16 Freq;
-    LVPSA_ControlParams_t   Params;
-    extern LVM_INT16        LVPSA_nSamplesBufferUpdate[];
-    extern LVM_UINT32       LVPSA_SampleRateTab[];
-    extern LVM_UINT16       LVPSA_DownSamplingFactor[];
+    LVPSA_ControlParams_t Params;
+    extern LVM_INT16 LVPSA_nSamplesBufferUpdate[];
+    extern LVM_UINT32 LVPSA_SampleRateTab[];
+    extern LVM_UINT16 LVPSA_DownSamplingFactor[];
 
-    if(pInst == 0)
-    {
-        return(LVPSA_ERROR_NULLADDRESS);
+    if (pInst == 0) {
+        return (LVPSA_ERROR_NULLADDRESS);
     }
 
     Params = pInst->NewParams;
 
     /* Modifies filters types and coefficients, clear the taps and
        re-initializes parameters if sample frequency has changed    */
-    if(Params.Fs != pInst->CurrentParams.Fs)
-    {
+    if (Params.Fs != pInst->CurrentParams.Fs) {
         pInst->CurrentParams.Fs = Params.Fs;
 
         /* Initialize the center freqeuncies as a function of the sample rate */
-        Freq = (LVM_UINT16) ((LVPSA_SampleRateTab[pInst->CurrentParams.Fs]>>1) / (pInst->nBands + 1));
-        for(ii = pInst->nBands; ii > 0; ii--)
-        {
-            pInst->pFiltersParams[ii-1].CenterFrequency = (LVM_UINT16) (Freq * ii);
+        Freq = (LVM_UINT16)((LVPSA_SampleRateTab[pInst->CurrentParams.Fs] >> 1) /
+                            (pInst->nBands + 1));
+        for (ii = pInst->nBands; ii > 0; ii--) {
+            pInst->pFiltersParams[ii - 1].CenterFrequency = (LVM_UINT16)(Freq * ii);
         }
 
         /* Count the number of relevant filters. If the center frequency of the filter is
            bigger than the nyquist frequency, then the filter is not relevant and doesn't
            need to be used */
-        for(ii = pInst->nBands; ii > 0; ii--)
-        {
-            if(pInst->pFiltersParams[ii-1].CenterFrequency < (LVPSA_SampleRateTab[pInst->CurrentParams.Fs]>>1))
-            {
+        for (ii = pInst->nBands; ii > 0; ii--) {
+            if (pInst->pFiltersParams[ii - 1].CenterFrequency <
+                (LVPSA_SampleRateTab[pInst->CurrentParams.Fs] >> 1)) {
                 pInst->nRelevantFilters = ii;
                 break;
             }
         }
+        /*
+         * Create biquad instance
+         */
+        pInst->specBiquad.resize(pInst->nRelevantFilters,
+                                 android::audio_utils::BiquadFilter<LVM_FLOAT>(FCC_1));
         LVPSA_SetBPFiltersType(pInst, &Params);
         LVPSA_SetBPFCoefficients(pInst, &Params);
         LVPSA_SetQPFCoefficients(pInst, &Params);
@@ -211,19 +196,14 @@
         pInst->BufferUpdateSamplesCount = 0;
         pInst->DownSamplingFactor = LVPSA_DownSamplingFactor[Params.Fs];
         pInst->DownSamplingCount = 0;
-        for(ii = 0; ii < (pInst->nBands * pInst->SpectralDataBufferLength); ii++)
-        {
+        for (ii = 0; ii < (pInst->nBands * pInst->SpectralDataBufferLength); ii++) {
             pInst->pSpectralDataBufferStart[ii] = 0;
         }
-        for(ii = 0; ii < pInst->nBands; ii++)
-        {
+        for (ii = 0; ii < pInst->nBands; ii++) {
             pInst->pPreviousPeaks[ii] = 0;
         }
-    }
-    else
-    {
-        if(Params.LevelDetectionSpeed != pInst->CurrentParams.LevelDetectionSpeed)
-        {
+    } else {
+        if (Params.LevelDetectionSpeed != pInst->CurrentParams.LevelDetectionSpeed) {
             LVPSA_SetQPFCoefficients(pInst, &Params);
         }
     }
@@ -253,47 +233,43 @@
 /*          Single precision    otherwise                                           */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_SetBPFiltersType (   LVPSA_InstancePr_t        *pInst,
-                                        LVPSA_ControlParams_t      *pParams  )
-{
-    extern LVM_UINT32   LVPSA_SampleRateTab[];                 /* Sample rate table */
-    LVM_UINT16          ii;                                                         /* Filter band index */
-    LVM_UINT32          fs = (LVM_UINT32)LVPSA_SampleRateTab[(LVM_UINT16)pParams->Fs];      /* Sample rate */
-    LVM_UINT32          fc;                                                         /* Filter centre frequency */
-    LVM_INT16           QFactor;                                                    /* Filter Q factor */
+LVPSA_RETURN LVPSA_SetBPFiltersType(LVPSA_InstancePr_t* pInst, LVPSA_ControlParams_t* pParams) {
+    extern LVM_UINT32 LVPSA_SampleRateTab[]; /* Sample rate table */
+    LVM_UINT16 ii;                           /* Filter band index */
+    LVM_UINT32 fs = (LVM_UINT32)LVPSA_SampleRateTab[(LVM_UINT16)pParams->Fs]; /* Sample rate */
+    LVM_UINT32 fc;     /* Filter centre frequency */
+    LVM_INT16 QFactor; /* Filter Q factor */
 
-    for (ii = 0; ii < pInst->nRelevantFilters; ii++)
-    {
+    for (ii = 0; ii < pInst->nRelevantFilters; ii++) {
         /*
          * Get the filter settings
          */
-        fc = (LVM_UINT32)pInst->pFiltersParams[ii].CenterFrequency;     /* Get the band centre frequency */
-        QFactor =(LVM_INT16) pInst->pFiltersParams[ii].QFactor;                    /* Get the band Q factor */
+        fc = (LVM_UINT32)pInst->pFiltersParams[ii]
+                     .CenterFrequency;                          /* Get the band centre frequency */
+        QFactor = (LVM_INT16)pInst->pFiltersParams[ii].QFactor; /* Get the band Q factor */
 
         /*
          * For each filter set the type of biquad required
          */
-        pInst->pBPFiltersPrecision[ii] = LVPSA_SimplePrecisionFilter;     /* Default to single precision */
-        if ((LOW_FREQ * fs) >= (fc << 15))
-        {
+        pInst->pBPFiltersPrecision[ii] =
+                LVPSA_SimplePrecisionFilter; /* Default to single precision */
+        if ((LOW_FREQ * fs) >= (fc << 15)) {
             /*
              * fc <= fs/110
              */
             pInst->pBPFiltersPrecision[ii] = LVPSA_DoublePrecisionFilter;
-        }
-        else
-        {
-            if (((LOW_FREQ * fs) < (fc << 15)) && ((fc << 15) < (HIGH_FREQ * fs)) && (QFactor > 300))
-            {
+        } else {
+            if (((LOW_FREQ * fs) < (fc << 15)) && ((fc << 15) < (HIGH_FREQ * fs)) &&
+                (QFactor > 300)) {
                 /*
-                * (fs/110 < fc < fs/85) & (Q>3)
-                */
+                 * (fs/110 < fc < fs/85) & (Q>3)
+                 */
                 pInst->pBPFiltersPrecision[ii] = LVPSA_DoublePrecisionFilter;
             }
         }
     }
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
 
 /************************************************************************************/
@@ -314,60 +290,59 @@
 /* NOTES:                                                                           */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_SetBPFCoefficients(  LVPSA_InstancePr_t        *pInst,
-                                        LVPSA_ControlParams_t      *pParams)
-{
-
-    LVM_UINT16                      ii;
+LVPSA_RETURN LVPSA_SetBPFCoefficients(LVPSA_InstancePr_t* pInst, LVPSA_ControlParams_t* pParams) {
+    LVM_UINT16 ii;
 
     /*
      * Set the coefficients for each band by the init function
      */
-    for (ii = 0; ii < pInst->nRelevantFilters; ii++)
-    {
-        switch  (pInst->pBPFiltersPrecision[ii])
-        {
-            case    LVPSA_DoublePrecisionFilter:
-            {
-                BP_FLOAT_Coefs_t      Coefficients;
+    for (ii = 0; ii < pInst->nRelevantFilters; ii++) {
+        switch (pInst->pBPFiltersPrecision[ii]) {
+            case LVPSA_DoublePrecisionFilter: {
+                BP_FLOAT_Coefs_t Coefficients;
                 /*
                  * Calculate the double precision coefficients
                  */
-                LVPSA_BPDoublePrecCoefs((LVM_UINT16)pParams->Fs,
-                                        &pInst->pFiltersParams[ii],
+                LVPSA_BPDoublePrecCoefs((LVM_UINT16)pParams->Fs, &pInst->pFiltersParams[ii],
                                         &Coefficients);
                 /*
                  * Set the coefficients
                  */
-                BP_1I_D16F32Cll_TRC_WRA_01_Init ( &pInst->pBP_Instances[ii],
-                                                  &pInst->pBP_Taps[ii],
-                                                  &Coefficients);
+                const std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs> coefs = {
+                        Coefficients.A0, 0.0, -(Coefficients.A0), -(Coefficients.B1),
+                        -(Coefficients.B2)};
+                pInst->specBiquad[ii]
+                        .setCoefficients<
+                                std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs>>(
+                                coefs);
                 break;
             }
 
-            case    LVPSA_SimplePrecisionFilter:
-            {
-                BP_FLOAT_Coefs_t      Coefficients;
+            case LVPSA_SimplePrecisionFilter: {
+                BP_FLOAT_Coefs_t Coefficients;
 
                 /*
                  * Calculate the single precision coefficients
                  */
-                LVPSA_BPSinglePrecCoefs((LVM_UINT16)pParams->Fs,
-                                        &pInst->pFiltersParams[ii],
+                LVPSA_BPSinglePrecCoefs((LVM_UINT16)pParams->Fs, &pInst->pFiltersParams[ii],
                                         &Coefficients);
 
                 /*
                  * Set the coefficients
                  */
-                BP_1I_D16F16Css_TRC_WRA_01_Init (&pInst->pBP_Instances[ii],
-                                                  &pInst->pBP_Taps[ii],
-                                                  &Coefficients);
+                const std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs> coefs = {
+                        Coefficients.A0, 0.0, -(Coefficients.A0), -(Coefficients.B1),
+                        -(Coefficients.B2)};
+                pInst->specBiquad[ii]
+                        .setCoefficients<
+                                std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs>>(
+                                coefs);
                 break;
             }
         }
     }
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
 
 /************************************************************************************/
@@ -388,26 +363,20 @@
 /* NOTES:                                                                           */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_SetQPFCoefficients(   LVPSA_InstancePr_t        *pInst,
-                                         LVPSA_ControlParams_t      *pParams  )
-{
-    LVM_UINT16     ii;
-    LVM_Fs_en      Fs = pParams->Fs;
-    QPD_FLOAT_Coefs  *pCoefficients;
-    extern         QPD_FLOAT_Coefs     LVPSA_QPD_Float_Coefs[];
+LVPSA_RETURN LVPSA_SetQPFCoefficients(LVPSA_InstancePr_t* pInst, LVPSA_ControlParams_t* pParams) {
+    LVM_UINT16 ii;
+    LVM_Fs_en Fs = pParams->Fs;
+    QPD_FLOAT_Coefs* pCoefficients;
+    extern QPD_FLOAT_Coefs LVPSA_QPD_Float_Coefs[];
 
-    pCoefficients = &LVPSA_QPD_Float_Coefs[(pParams->LevelDetectionSpeed * \
-                                    LVPSA_NR_SUPPORTED_RATE) + Fs];
+    pCoefficients =
+            &LVPSA_QPD_Float_Coefs[(pParams->LevelDetectionSpeed * LVPSA_NR_SUPPORTED_RATE) + Fs];
 
-    for (ii = 0; ii < pInst->nRelevantFilters; ii++)
-    {
-        LVPSA_QPD_Init_Float (&pInst->pQPD_States[ii],
-                              &pInst->pQPD_Taps[ii],
-                              pCoefficients );
+    for (ii = 0; ii < pInst->nRelevantFilters; ii++) {
+        LVPSA_QPD_Init_Float(&pInst->pQPD_States[ii], &pInst->pQPD_Taps[ii], pCoefficients);
     }
 
-    return(LVPSA_OK);
-
+    return (LVPSA_OK);
 }
 
 /****************************************************************************************/
@@ -443,49 +412,46 @@
 /*     of the n bands equalizer (LVEQNB                                                 */
 /*                                                                                      */
 /****************************************************************************************/
-LVPSA_RETURN LVPSA_BPSinglePrecCoefs(    LVM_UINT16              Fs,
-                                         LVPSA_FilterParam_t     *pFilterParams,
-                                         BP_FLOAT_Coefs_t        *pCoefficients)
-{
-
-    extern LVM_FLOAT    LVPSA_Float_TwoPiOnFsTable[];
-    extern LVM_FLOAT    LVPSA_Float_CosCoef[];
+LVPSA_RETURN LVPSA_BPSinglePrecCoefs(LVM_UINT16 Fs, LVPSA_FilterParam_t* pFilterParams,
+                                     BP_FLOAT_Coefs_t* pCoefficients) {
+    extern LVM_FLOAT LVPSA_Float_TwoPiOnFsTable[];
+    extern LVM_FLOAT LVPSA_Float_CosCoef[];
 
     /*
      * Intermediate variables and temporary values
      */
-    LVM_FLOAT           T0;
-    LVM_FLOAT           D;
-    LVM_FLOAT           A0;
-    LVM_FLOAT           B1;
-    LVM_FLOAT           B2;
-    LVM_FLOAT           Dt0;
-    LVM_FLOAT           B2_Den;
-    LVM_FLOAT           B2_Num;
-    LVM_FLOAT           COS_T0;
-    LVM_FLOAT           coef;
-    LVM_FLOAT           factor;
-    LVM_FLOAT           t0;
-    LVM_INT16           i;
+    LVM_FLOAT T0;
+    LVM_FLOAT D;
+    LVM_FLOAT A0;
+    LVM_FLOAT B1;
+    LVM_FLOAT B2;
+    LVM_FLOAT Dt0;
+    LVM_FLOAT B2_Den;
+    LVM_FLOAT B2_Num;
+    LVM_FLOAT COS_T0;
+    LVM_FLOAT coef;
+    LVM_FLOAT factor;
+    LVM_FLOAT t0;
+    LVM_INT16 i;
 
     /*
      * Get the filter definition
      */
-    LVM_FLOAT          Frequency   = (LVM_FLOAT)(pFilterParams->CenterFrequency);
-    LVM_FLOAT          QFactor     = ((LVM_FLOAT)(pFilterParams->QFactor)) / 100;
+    LVM_FLOAT Frequency = (LVM_FLOAT)(pFilterParams->CenterFrequency);
+    LVM_FLOAT QFactor = ((LVM_FLOAT)(pFilterParams->QFactor)) / 100;
 
     /*
      * Calculating the intermediate values
      */
-    T0 = Frequency * LVPSA_Float_TwoPiOnFsTable[Fs];   /* T0 = 2 * Pi * Fc / Fs */
-    D = 3200;                 /* Floating point value 1.000000 (1*100*2^5) */
-                    /* Force D = 1 : the function was originally used for a peaking filter.
-                       The D parameter do not exist for a BandPass filter coefficients */
+    T0 = Frequency * LVPSA_Float_TwoPiOnFsTable[Fs]; /* T0 = 2 * Pi * Fc / Fs */
+    D = 3200;                                        /* Floating point value 1.000000 (1*100*2^5) */
+    /* Force D = 1 : the function was originally used for a peaking filter.
+       The D parameter do not exist for a BandPass filter coefficients */
 
     /*
      * Calculate the B2 coefficient
      */
-    Dt0 =  T0 / 2048 ;
+    Dt0 = T0 / 2048;
     B2_Den = QFactor + Dt0;
     B2_Num = Dt0 - QFactor;
     B2 = B2_Num / (2 * B2_Den);
@@ -495,20 +461,19 @@
      *
      *  Cos += coef(n) * t0^n                   For n = 0 to 6
      */
-    T0 = (T0 / 2048) * 0.63658558f;              /* Scale to 1.0 in 16-bit for range 0 to fs/2 */
-    t0 = T0 ;
-    factor = 1.0f;                            /* Initialise to 1.0 for the a0 coefficient */
-    COS_T0 = 0.0f;                                 /* Initialise the error to zero */
-    for (i = 1; i < 7; i++)
-    {
-        coef    = LVPSA_Float_CosCoef[i];                /* Get the nth coefficient */
-        COS_T0 += (factor * coef);         /* The nth partial sum */
-        factor  = (factor * t0) ;           /* Calculate t0^n */
+    T0 = (T0 / 2048) * 0.63658558f; /* Scale to 1.0 in 16-bit for range 0 to fs/2 */
+    t0 = T0;
+    factor = 1.0f; /* Initialise to 1.0 for the a0 coefficient */
+    COS_T0 = 0.0f; /* Initialise the error to zero */
+    for (i = 1; i < 7; i++) {
+        coef = LVPSA_Float_CosCoef[i]; /* Get the nth coefficient */
+        COS_T0 += (factor * coef);     /* The nth partial sum */
+        factor = (factor * t0);        /* Calculate t0^n */
     }
-    COS_T0 = COS_T0 * 8;    /*LVPSA_CosCoef_float[0]*/      /* Correct the scaling */
+    COS_T0 = COS_T0 * 8; /*LVPSA_CosCoef_float[0]*/ /* Correct the scaling */
 
-    B1 = ((LVM_FLOAT)0.5 - B2) * (COS_T0);    /* B1 = (0.5 - b2) * cos(t0) */
-    A0 = ((LVM_FLOAT)0.5 + B2) / 2;                        /* A0 = (0.5 + b2) / 2 */
+    B1 = ((LVM_FLOAT)0.5 - B2) * (COS_T0); /* B1 = (0.5 - b2) * cos(t0) */
+    A0 = ((LVM_FLOAT)0.5 + B2) / 2;        /* A0 = (0.5 + b2) / 2 */
 
     /*
      * Write coeff into the data structure
@@ -517,7 +482,7 @@
     pCoefficients->B1 = B1 * 2;
     pCoefficients->B2 = B2 * 2;
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
 /****************************************************************************************/
 /*                                                                                      */
@@ -561,49 +526,46 @@
 /*     of the n bands equalizer (LVEQNB                                                 */
 /*                                                                                      */
 /****************************************************************************************/
-LVPSA_RETURN LVPSA_BPDoublePrecCoefs(   LVM_UINT16            Fs,
-                                        LVPSA_FilterParam_t   *pFilterParams,
-                                        BP_FLOAT_Coefs_t      *pCoefficients)
-{
-
-    extern LVM_FLOAT    LVPSA_Float_TwoPiOnFsTable[];
-    extern LVM_FLOAT    LVPSA_Float_DPCosCoef[];
+LVPSA_RETURN LVPSA_BPDoublePrecCoefs(LVM_UINT16 Fs, LVPSA_FilterParam_t* pFilterParams,
+                                     BP_FLOAT_Coefs_t* pCoefficients) {
+    extern LVM_FLOAT LVPSA_Float_TwoPiOnFsTable[];
+    extern LVM_FLOAT LVPSA_Float_DPCosCoef[];
 
     /*
      * Intermediate variables and temporary values
      */
-    LVM_FLOAT           T0;
-    LVM_FLOAT           D;
-    LVM_FLOAT           A0;
-    LVM_FLOAT           B1;
-    LVM_FLOAT           B2;
-    LVM_FLOAT           Dt0;
-    LVM_FLOAT           B2_Den;
-    LVM_FLOAT           B2_Num;
-    LVM_FLOAT           CosErr;
-    LVM_FLOAT           coef;
-    LVM_FLOAT           factor;
-    LVM_FLOAT           t0;
-    LVM_INT16           i;
+    LVM_FLOAT T0;
+    LVM_FLOAT D;
+    LVM_FLOAT A0;
+    LVM_FLOAT B1;
+    LVM_FLOAT B2;
+    LVM_FLOAT Dt0;
+    LVM_FLOAT B2_Den;
+    LVM_FLOAT B2_Num;
+    LVM_FLOAT CosErr;
+    LVM_FLOAT coef;
+    LVM_FLOAT factor;
+    LVM_FLOAT t0;
+    LVM_INT16 i;
 
     /*
      * Get the filter definition
      */
-    LVM_FLOAT          Frequency   = (LVM_FLOAT)(pFilterParams->CenterFrequency);
-    LVM_FLOAT          QFactor     = ((LVM_FLOAT)(pFilterParams->QFactor)) / 100;
+    LVM_FLOAT Frequency = (LVM_FLOAT)(pFilterParams->CenterFrequency);
+    LVM_FLOAT QFactor = ((LVM_FLOAT)(pFilterParams->QFactor)) / 100;
 
     /*
      * Calculating the intermediate values
      */
-    T0 = Frequency * LVPSA_Float_TwoPiOnFsTable[Fs];   /* T0 = 2 * Pi * Fc / Fs */
-    D = 3200;    /* Floating point value 1.000000 (1*100*2^5) */
-                 /* Force D = 1 : the function was originally used for a peaking filter.
-                    The D parameter do not exist for a BandPass filter coefficients */
+    T0 = Frequency * LVPSA_Float_TwoPiOnFsTable[Fs]; /* T0 = 2 * Pi * Fc / Fs */
+    D = 3200;                                        /* Floating point value 1.000000 (1*100*2^5) */
+    /* Force D = 1 : the function was originally used for a peaking filter.
+       The D parameter do not exist for a BandPass filter coefficients */
 
     /*
      * Calculate the B2 coefficient
      */
-    Dt0 =  T0 / 2048 ;
+    Dt0 = T0 / 2048;
     B2_Den = QFactor + Dt0;
     B2_Num = Dt0 - QFactor;
     B2 = B2_Num / (2 * B2_Den);
@@ -613,25 +575,24 @@
      *
      *  CosErr += coef(n) * t0^n                For n = 0 to 4
      */
-    T0 = T0 * 0.994750f;                    /* Scale to 1.0 in 16-bit for range 0 to fs/50 */
+    T0 = T0 * 0.994750f; /* Scale to 1.0 in 16-bit for range 0 to fs/50 */
     t0 = T0;
-    factor = 1.0f;                            /* Initialise to 1.0 for the a0 coefficient */
-    CosErr = 0.0f;                                 /* Initialise the error to zero */
-    for (i = 1; i < 5; i++)
-    {
-        coef = LVPSA_Float_DPCosCoef[i];              /* Get the nth coefficient */
+    factor = 1.0f; /* Initialise to 1.0 for the a0 coefficient */
+    CosErr = 0.0f; /* Initialise the error to zero */
+    for (i = 1; i < 5; i++) {
+        coef = LVPSA_Float_DPCosCoef[i]; /* Get the nth coefficient */
         CosErr += factor * coef;         /* The nth partial sum */
-        factor = factor * t0;           /* Calculate t0^n */
+        factor = factor * t0;            /* Calculate t0^n */
     }
-    CosErr = CosErr * 2;          /* Correct the scaling */
+    CosErr = CosErr * 2; /* Correct the scaling */
 
     /*
      * Calculate the B1 and A0 coefficients
      */
-    B1 = ((LVM_FLOAT)0.5 - B2);                     /* B1 = (0.5 - b2) */
-    A0 = B1 * CosErr ;    /* Temporary storage for (0.5 - b2) * coserr(t0) */
-    B1 -= A0;                                   /* B1 = (0.5 - b2) * (1 - coserr(t0))  */
-    A0 = ((LVM_FLOAT)0.5  + B2) / 2;                /* A0 = (0.5 + b2) / 2 */
+    B1 = ((LVM_FLOAT)0.5 - B2);     /* B1 = (0.5 - b2) */
+    A0 = B1 * CosErr;               /* Temporary storage for (0.5 - b2) * coserr(t0) */
+    B1 -= A0;                       /* B1 = (0.5 - b2) * (1 - coserr(t0))  */
+    A0 = ((LVM_FLOAT)0.5 + B2) / 2; /* A0 = (0.5 + b2) / 2 */
 
     /*
      * Write coeff into the data structure
@@ -640,7 +601,7 @@
     pCoefficients->B1 = B1;
     pCoefficients->B2 = B2;
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
 /************************************************************************************/
 /*                                                                                  */
@@ -658,24 +619,10 @@
 /* NOTES:                                                                           */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_ClearFilterHistory(LVPSA_InstancePr_t        *pInst)
-{
-    LVM_INT8       *pTapAddress;
-    LVM_UINT32       i;
-
-    /* Band Pass filters taps */
-    pTapAddress = (LVM_INT8 *)pInst->pBP_Taps;
-    for(i = 0; i < pInst->nBands * sizeof(Biquad_1I_Order2_FLOAT_Taps_t); i++)
-    {
-        pTapAddress[i] = 0;
-    }
-    /* Quasi-peak filters taps */
-    pTapAddress = (LVM_INT8 *)pInst->pQPD_Taps;
-    for(i = 0; i < pInst->nBands * sizeof(QPD_Taps_t); i++)
-    {
-        pTapAddress[i] = 0;
+LVPSA_RETURN LVPSA_ClearFilterHistory(LVPSA_InstancePr_t* pInst) {
+    for (size_t i = 0; i < pInst->specBiquad.size(); i++) {
+        pInst->specBiquad[i].clear();
     }
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
-
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
index 9fcd82f..a0f28b1 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
@@ -15,23 +15,23 @@
  * limitations under the License.
  */
 
-#include    "LVPSA.h"
-#include    "LVPSA_Private.h"
-#include    "InstAlloc.h"
+#include <stdlib.h>
+#include "LVPSA.h"
+#include "LVPSA_Private.h"
 
 /************************************************************************************/
 /*                                                                                  */
 /* FUNCTION:            LVPSA_Init                                                  */
 /*                                                                                  */
 /* DESCRIPTION:                                                                     */
-/*  Initialize the LVPSA module                                                     */
+/*  Create and Initialize the LVPSA module including instance handle                */
 /*                                                                                  */
 /*                                                                                  */
 /* PARAMETERS:                                                                      */
-/*  phInstance          Pointer to pointer to the instance                          */
+/*  phInstance          Pointer to the instance handle                              */
 /*  InitParams          Init parameters structure                                   */
 /*  ControlParams       Control parameters structure                                */
-/*  pMemoryTable        Memory table that contains memory areas definition          */
+/*  pScratch            Pointer to bundle scratch memory area                       */
 /*                                                                                  */
 /*                                                                                  */
 /* RETURNS:                                                                         */
@@ -39,75 +39,19 @@
 /*  otherwise           Error due to bad parameters                                 */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_Init              ( pLVPSA_Handle_t             *phInstance,
-                                       LVPSA_InitParams_t          *pInitParams,
-                                       LVPSA_ControlParams_t       *pControlParams,
-                                       LVPSA_MemTab_t              *pMemoryTable )
-{
-    LVPSA_InstancePr_t          *pLVPSA_Inst;
-    LVPSA_RETURN                errorCode       = LVPSA_OK;
-    LVM_UINT32                  ii;
-    extern LVM_FLOAT            LVPSA_Float_GainTable[];
-    LVM_UINT32                  BufferLength = 0;
-
-    /* Ints_Alloc instances, needed for memory alignment management */
-    INST_ALLOC          Instance;
-    INST_ALLOC          Scratch;
-    INST_ALLOC          Data;
-    INST_ALLOC          Coef;
-
-    /* Check parameters */
-    if((phInstance == LVM_NULL) || (pInitParams == LVM_NULL) || (pControlParams == LVM_NULL) || (pMemoryTable == LVM_NULL))
-    {
-        return(LVPSA_ERROR_NULLADDRESS);
-    }
-    if( (pInitParams->SpectralDataBufferDuration > LVPSA_MAXBUFFERDURATION)   ||
-        (pInitParams->SpectralDataBufferDuration == 0)                        ||
-        (pInitParams->MaxInputBlockSize > LVPSA_MAXINPUTBLOCKSIZE)      ||
-        (pInitParams->MaxInputBlockSize == 0)                           ||
-        (pInitParams->nBands < LVPSA_NBANDSMIN)                         ||
-        (pInitParams->nBands > LVPSA_NBANDSMAX)                         ||
-        (pInitParams->pFiltersParams == 0))
-    {
-        return(LVPSA_ERROR_INVALIDPARAM);
-    }
-    for(ii = 0; ii < pInitParams->nBands; ii++)
-    {
-        if((pInitParams->pFiltersParams[ii].CenterFrequency > LVPSA_MAXCENTERFREQ) ||
-           (pInitParams->pFiltersParams[ii].PostGain        > LVPSA_MAXPOSTGAIN)   ||
-           (pInitParams->pFiltersParams[ii].PostGain        < LVPSA_MINPOSTGAIN)   ||
-           (pInitParams->pFiltersParams[ii].QFactor < LVPSA_MINQFACTOR)            ||
-           (pInitParams->pFiltersParams[ii].QFactor > LVPSA_MAXQFACTOR))
-           {
-                return(LVPSA_ERROR_INVALIDPARAM);
-           }
-    }
-
-    /*Inst_Alloc instances initialization */
-    InstAlloc_Init( &Instance   , pMemoryTable->Region[LVPSA_MEMREGION_INSTANCE].pBaseAddress);
-    InstAlloc_Init( &Scratch    , pMemoryTable->Region[LVPSA_MEMREGION_SCRATCH].pBaseAddress);
-    InstAlloc_Init( &Data       , pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_DATA].pBaseAddress);
-    InstAlloc_Init( &Coef       , pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_COEF].pBaseAddress);
+LVPSA_RETURN LVPSA_Init(pLVPSA_Handle_t* phInstance, LVPSA_InitParams_t* pInitParams,
+                        LVPSA_ControlParams_t* pControlParams, void* pScratch) {
+    LVPSA_InstancePr_t* pLVPSA_Inst;
+    LVPSA_RETURN errorCode = LVPSA_OK;
+    LVM_UINT32 ii;
+    extern LVM_FLOAT LVPSA_Float_GainTable[];
+    LVM_UINT32 BufferLength = 0;
 
     /* Set the instance handle if not already initialised */
-    if (*phInstance == LVM_NULL)
-    {
-        *phInstance = InstAlloc_AddMember( &Instance, sizeof(LVPSA_InstancePr_t) );
-    }
-    pLVPSA_Inst =(LVPSA_InstancePr_t*)*phInstance;
+    *phInstance = new LVPSA_InstancePr_t{};
+    pLVPSA_Inst = (LVPSA_InstancePr_t*)*phInstance;
 
-    /* Check the memory table for NULL pointers */
-    for (ii = 0; ii < LVPSA_NR_MEMORY_REGIONS; ii++)
-    {
-        if (pMemoryTable->Region[ii].Size!=0)
-        {
-            if (pMemoryTable->Region[ii].pBaseAddress==LVM_NULL)
-            {
-                return(LVPSA_ERROR_NULLADDRESS);
-            }
-            pLVPSA_Inst->MemoryTable.Region[ii] = pMemoryTable->Region[ii];
-        }
-    }
+    pLVPSA_Inst->pScratch = pScratch;
 
     /* Initialize module's internal parameters */
     pLVPSA_Inst->bControlPending = LVM_FALSE;
@@ -117,81 +61,132 @@
     pLVPSA_Inst->CurrentParams.Fs = LVM_FS_DUMMY;
     pLVPSA_Inst->CurrentParams.LevelDetectionSpeed = LVPSA_SPEED_DUMMY;
 
-    {   /* for avoiding QAC warnings */
-        LVM_INT32 SDBD=(LVM_INT32)pLVPSA_Inst->SpectralDataBufferDuration;
-        LVM_INT32 IRTI=(LVM_INT32)LVPSA_InternalRefreshTimeInv;
+    { /* for avoiding QAC warnings */
+        LVM_INT32 SDBD = (LVM_INT32)pLVPSA_Inst->SpectralDataBufferDuration;
+        LVM_INT32 IRTI = (LVM_INT32)LVPSA_InternalRefreshTimeInv;
         LVM_INT32 BL;
 
-        MUL32x32INTO32(SDBD,IRTI,BL,LVPSA_InternalRefreshTimeShift)
+        MUL32x32INTO32(SDBD, IRTI, BL, LVPSA_InternalRefreshTimeShift)
 
-        BufferLength=(LVM_UINT32)BL;
+                BufferLength = (LVM_UINT32)BL;
     }
 
-    if((BufferLength * LVPSA_InternalRefreshTime) != pLVPSA_Inst->SpectralDataBufferDuration)
-    {
+    if ((BufferLength * LVPSA_InternalRefreshTime) != pLVPSA_Inst->SpectralDataBufferDuration) {
         pLVPSA_Inst->SpectralDataBufferLength = BufferLength + 1;
-    }
-    else
-    {
+    } else {
         pLVPSA_Inst->SpectralDataBufferLength = BufferLength;
     }
 
     /* Assign the pointers */
-    pLVPSA_Inst->pPostGains             =
-        (LVM_FLOAT *)InstAlloc_AddMember(&Instance, pInitParams->nBands * sizeof(LVM_FLOAT));
-    pLVPSA_Inst->pFiltersParams             = (LVPSA_FilterParam_t *)
-        InstAlloc_AddMember(&Instance, pInitParams->nBands * sizeof(LVPSA_FilterParam_t));
-    pLVPSA_Inst->pSpectralDataBufferStart   = (LVM_UINT8 *)
-        InstAlloc_AddMember(&Instance, pInitParams->nBands * \
-                                pLVPSA_Inst->SpectralDataBufferLength * sizeof(LVM_UINT8));
-    pLVPSA_Inst->pPreviousPeaks             = (LVM_UINT8 *)
-                  InstAlloc_AddMember(&Instance, pInitParams->nBands * sizeof(LVM_UINT8));
-    pLVPSA_Inst->pBPFiltersPrecision        = (LVPSA_BPFilterPrecision_en *)
-                  InstAlloc_AddMember(&Instance, pInitParams->nBands * \
-                                                       sizeof(LVPSA_BPFilterPrecision_en));
-    pLVPSA_Inst->pBP_Instances          = (Biquad_FLOAT_Instance_t *)
-                  InstAlloc_AddMember(&Coef, pInitParams->nBands * \
-                                                          sizeof(Biquad_FLOAT_Instance_t));
-    pLVPSA_Inst->pQPD_States            = (QPD_FLOAT_State_t *)
-                  InstAlloc_AddMember(&Coef, pInitParams->nBands * \
-                                                                sizeof(QPD_FLOAT_State_t));
-
-    pLVPSA_Inst->pBP_Taps               = (Biquad_1I_Order2_FLOAT_Taps_t *)
-        InstAlloc_AddMember(&Data, pInitParams->nBands * \
-                                                     sizeof(Biquad_1I_Order2_FLOAT_Taps_t));
-    pLVPSA_Inst->pQPD_Taps              = (QPD_FLOAT_Taps_t *)
-        InstAlloc_AddMember(&Data, pInitParams->nBands * \
-                                                    sizeof(QPD_FLOAT_Taps_t));
+    pLVPSA_Inst->pPostGains =
+            (LVM_FLOAT*)calloc(pInitParams->nBands, sizeof(*(pLVPSA_Inst->pPostGains)));
+    if (pLVPSA_Inst->pPostGains == LVM_NULL) {
+        return LVPSA_ERROR_NULLADDRESS;
+    }
+    pLVPSA_Inst->pFiltersParams = (LVPSA_FilterParam_t*)calloc(
+            pInitParams->nBands, sizeof(*(pLVPSA_Inst->pFiltersParams)));
+    if (pLVPSA_Inst->pFiltersParams == LVM_NULL) {
+        return LVPSA_ERROR_NULLADDRESS;
+    }
+    pLVPSA_Inst->pSpectralDataBufferStart = (LVM_UINT8*)calloc(
+            pInitParams->nBands, pLVPSA_Inst->SpectralDataBufferLength *
+                                         sizeof(*(pLVPSA_Inst->pSpectralDataBufferStart)));
+    if (pLVPSA_Inst->pSpectralDataBufferStart == LVM_NULL) {
+        return LVPSA_ERROR_NULLADDRESS;
+    }
+    pLVPSA_Inst->pPreviousPeaks =
+            (LVM_UINT8*)calloc(pInitParams->nBands, sizeof(*(pLVPSA_Inst->pPreviousPeaks)));
+    if (pLVPSA_Inst->pPreviousPeaks == LVM_NULL) {
+        return LVPSA_ERROR_NULLADDRESS;
+    }
+    pLVPSA_Inst->pBPFiltersPrecision = (LVPSA_BPFilterPrecision_en*)calloc(
+            pInitParams->nBands, sizeof(*(pLVPSA_Inst->pBPFiltersPrecision)));
+    if (pLVPSA_Inst->pBPFiltersPrecision == LVM_NULL) {
+        return LVPSA_ERROR_NULLADDRESS;
+    }
+    pLVPSA_Inst->pQPD_States =
+            (QPD_FLOAT_State_t*)calloc(pInitParams->nBands, sizeof(*(pLVPSA_Inst->pQPD_States)));
+    if (pLVPSA_Inst->pQPD_States == LVM_NULL) {
+        return LVPSA_ERROR_NULLADDRESS;
+    }
+    pLVPSA_Inst->pQPD_Taps =
+            (QPD_FLOAT_Taps_t*)calloc(pInitParams->nBands, sizeof(*(pLVPSA_Inst->pQPD_Taps)));
+    if (pLVPSA_Inst->pQPD_Taps == LVM_NULL) {
+        return LVPSA_ERROR_NULLADDRESS;
+    }
 
     /* Copy filters parameters in the private instance */
-    for(ii = 0; ii < pLVPSA_Inst->nBands; ii++)
-    {
+    for (ii = 0; ii < pLVPSA_Inst->nBands; ii++) {
         pLVPSA_Inst->pFiltersParams[ii] = pInitParams->pFiltersParams[ii];
     }
 
     /* Set Post filters gains*/
-    for(ii = 0; ii < pLVPSA_Inst->nBands; ii++)
-    {
-        pLVPSA_Inst->pPostGains[ii] = LVPSA_Float_GainTable[15 + \
-                                                        pInitParams->pFiltersParams[ii].PostGain];
+    for (ii = 0; ii < pLVPSA_Inst->nBands; ii++) {
+        pLVPSA_Inst->pPostGains[ii] =
+                LVPSA_Float_GainTable[15 + pInitParams->pFiltersParams[ii].PostGain];
     }
     pLVPSA_Inst->pSpectralDataBufferWritePointer = pLVPSA_Inst->pSpectralDataBufferStart;
 
     /* Initialize control dependant internal parameters */
-    errorCode = LVPSA_Control (*phInstance, pControlParams);
+    errorCode = LVPSA_Control(*phInstance, pControlParams);
 
-    if(errorCode!=0)
-    {
+    if (errorCode != 0) {
         return errorCode;
     }
 
-    errorCode = LVPSA_ApplyNewSettings (pLVPSA_Inst);
+    errorCode = LVPSA_ApplyNewSettings(pLVPSA_Inst);
 
-    if(errorCode!=0)
-    {
+    if (errorCode != 0) {
         return errorCode;
     }
 
-    return(errorCode);
+    return (errorCode);
 }
 
+/************************************************************************************/
+/*                                                                                  */
+/* FUNCTION:            LVPSA_DeInit                                                */
+/*                                                                                  */
+/* DESCRIPTION:                                                                     */
+/*    Free the memories created in LVPSA_Init call including instance handle        */
+/*                                                                                  */
+/* PARAMETERS:                                                                      */
+/*  phInstance          Pointer to the instance handle                              */
+/*                                                                                  */
+/************************************************************************************/
+void LVPSA_DeInit(pLVPSA_Handle_t* phInstance) {
+    LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)*phInstance;
+    if (pLVPSA_Inst == LVM_NULL) {
+        return;
+    }
+    if (pLVPSA_Inst->pPostGains != LVM_NULL) {
+        free(pLVPSA_Inst->pPostGains);
+        pLVPSA_Inst->pPostGains = LVM_NULL;
+    }
+    if (pLVPSA_Inst->pFiltersParams != LVM_NULL) {
+        free(pLVPSA_Inst->pFiltersParams);
+        pLVPSA_Inst->pFiltersParams = LVM_NULL;
+    }
+    if (pLVPSA_Inst->pSpectralDataBufferStart != LVM_NULL) {
+        free(pLVPSA_Inst->pSpectralDataBufferStart);
+        pLVPSA_Inst->pSpectralDataBufferStart = LVM_NULL;
+    }
+    if (pLVPSA_Inst->pPreviousPeaks != LVM_NULL) {
+        free(pLVPSA_Inst->pPreviousPeaks);
+        pLVPSA_Inst->pPreviousPeaks = LVM_NULL;
+    }
+    if (pLVPSA_Inst->pBPFiltersPrecision != LVM_NULL) {
+        free(pLVPSA_Inst->pBPFiltersPrecision);
+        pLVPSA_Inst->pBPFiltersPrecision = LVM_NULL;
+    }
+    if (pLVPSA_Inst->pQPD_States != LVM_NULL) {
+        free(pLVPSA_Inst->pQPD_States);
+        pLVPSA_Inst->pQPD_States = LVM_NULL;
+    }
+    if (pLVPSA_Inst->pQPD_Taps != LVM_NULL) {
+        free(pLVPSA_Inst->pQPD_Taps);
+        pLVPSA_Inst->pQPD_Taps = LVM_NULL;
+    }
+    delete pLVPSA_Inst;
+    *phInstance = LVM_NULL;
+}
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Memory.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Memory.cpp
deleted file mode 100644
index eafcbe6..0000000
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Memory.cpp
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
- * Copyright (C) 2004-2010 NXP Software
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include    "LVPSA.h"
-#include    "LVPSA_Private.h"
-#include    "InstAlloc.h"
-
-/****************************************************************************************/
-/*                                                                                      */
-/* FUNCTION:                LVEQNB_Memory                                               */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used for memory allocation and free. It can be called in           */
-/*  two ways:                                                                           */
-/*                                                                                      */
-/*      hInstance = NULL         Returns the memory requirements                        */
-/*      hInstance = Instance handle     Returns the memory requirements and             */
-/*                                      allocated base addresses for the instance       */
-/*                                                                                      */
-/*  When this function is called for memory allocation (hInstance=NULL) the memory      */
-/*  base address pointers are NULL on return.                                           */
-/*                                                                                      */
-/*  When the function is called for free (hInstance = Instance Handle) the memory       */
-/*  table returns the allocated memory and base addresses used during initialisation.   */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory definition table                 */
-/*  InitParams              Pointer to the instance init parameters                     */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVPSA_OK            Succeeds                                                        */
-/*  otherwise           Error due to bad parameters                                     */
-/*                                                                                      */
-/****************************************************************************************/
-LVPSA_RETURN LVPSA_Memory            ( pLVPSA_Handle_t             hInstance,
-                                       LVPSA_MemTab_t             *pMemoryTable,
-                                       LVPSA_InitParams_t         *pInitParams    )
-{
-    LVM_UINT32          ii;
-    LVM_UINT32          BufferLength;
-    INST_ALLOC          Instance;
-    INST_ALLOC          Scratch;
-    INST_ALLOC          Data;
-    INST_ALLOC          Coef;
-    LVPSA_InstancePr_t *pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
-
-    InstAlloc_Init( &Instance   , LVM_NULL);
-    InstAlloc_Init( &Scratch    , LVM_NULL);
-    InstAlloc_Init( &Data       , LVM_NULL);
-    InstAlloc_Init( &Coef       , LVM_NULL);
-
-    if((pMemoryTable == LVM_NULL) || (pInitParams == LVM_NULL))
-    {
-        return(LVPSA_ERROR_NULLADDRESS);
-    }
-
-    /*
-     * Fill in the memory table
-     */
-    if (hInstance == LVM_NULL)
-    {
-
-        /* Check init parameter */
-        if( (pInitParams->SpectralDataBufferDuration > LVPSA_MAXBUFFERDURATION)   ||
-            (pInitParams->SpectralDataBufferDuration == 0)                        ||
-            (pInitParams->MaxInputBlockSize > LVPSA_MAXINPUTBLOCKSIZE)      ||
-            (pInitParams->MaxInputBlockSize == 0)                           ||
-            (pInitParams->nBands < LVPSA_NBANDSMIN)                         ||
-            (pInitParams->nBands > LVPSA_NBANDSMAX)                         ||
-            (pInitParams->pFiltersParams == 0))
-        {
-            return(LVPSA_ERROR_INVALIDPARAM);
-        }
-        for(ii = 0; ii < pInitParams->nBands; ii++)
-        {
-            if((pInitParams->pFiltersParams[ii].CenterFrequency > LVPSA_MAXCENTERFREQ) ||
-               (pInitParams->pFiltersParams[ii].PostGain        > LVPSA_MAXPOSTGAIN)   ||
-               (pInitParams->pFiltersParams[ii].PostGain        < LVPSA_MINPOSTGAIN)   ||
-               (pInitParams->pFiltersParams[ii].QFactor < LVPSA_MINQFACTOR)            ||
-               (pInitParams->pFiltersParams[ii].QFactor > LVPSA_MAXQFACTOR))
-               {
-                    return(LVPSA_ERROR_INVALIDPARAM);
-               }
-        }
-
-        /*
-         * Instance memory
-         */
-
-        InstAlloc_AddMember( &Instance, sizeof(LVPSA_InstancePr_t) );
-        InstAlloc_AddMember( &Instance, pInitParams->nBands * sizeof(LVM_FLOAT) );
-        InstAlloc_AddMember( &Instance, pInitParams->nBands * sizeof(LVPSA_FilterParam_t) );
-
-        {
-            /* for avoiding QAC warnings as MUL32x32INTO32 works on LVM_INT32 only*/
-            LVM_INT32 SDBD=(LVM_INT32)pInitParams->SpectralDataBufferDuration;
-            LVM_INT32 IRTI=(LVM_INT32)LVPSA_InternalRefreshTimeInv;
-            LVM_INT32 BL;
-
-            MUL32x32INTO32(SDBD,IRTI,BL,LVPSA_InternalRefreshTimeShift)
-            BufferLength=(LVM_UINT32)BL;
-        }
-
-        if((BufferLength * LVPSA_InternalRefreshTime) != pInitParams->SpectralDataBufferDuration)
-        {
-            BufferLength++;
-        }
-        InstAlloc_AddMember( &Instance, pInitParams->nBands * BufferLength * sizeof(LVM_UINT8) );
-        InstAlloc_AddMember( &Instance, pInitParams->nBands * sizeof(LVM_UINT8) );
-        InstAlloc_AddMember( &Instance, pInitParams->nBands * sizeof(LVPSA_BPFilterPrecision_en) );
-        pMemoryTable->Region[LVPSA_MEMREGION_INSTANCE].Size         = InstAlloc_GetTotal(&Instance);
-        pMemoryTable->Region[LVPSA_MEMREGION_INSTANCE].Type         = LVPSA_PERSISTENT;
-        pMemoryTable->Region[LVPSA_MEMREGION_INSTANCE].pBaseAddress = LVM_NULL;
-
-        /*
-         * Scratch memory
-         */
-        InstAlloc_AddMember( &Scratch, 2 * pInitParams->MaxInputBlockSize * sizeof(LVM_FLOAT) );
-        pMemoryTable->Region[LVPSA_MEMREGION_SCRATCH].Size         = InstAlloc_GetTotal(&Scratch);
-        pMemoryTable->Region[LVPSA_MEMREGION_SCRATCH].Type         = LVPSA_SCRATCH;
-        pMemoryTable->Region[LVPSA_MEMREGION_SCRATCH].pBaseAddress = LVM_NULL;
-
-        /*
-         * Persistent coefficients memory
-         */
-        InstAlloc_AddMember( &Coef, pInitParams->nBands * sizeof(Biquad_FLOAT_Instance_t) );
-        InstAlloc_AddMember( &Coef, pInitParams->nBands * sizeof(QPD_FLOAT_State_t) );
-        pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_COEF].Size         = InstAlloc_GetTotal(&Coef);
-        pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_COEF].Type         = LVPSA_PERSISTENT_COEF;
-        pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_COEF].pBaseAddress = LVM_NULL;
-
-        /*
-         * Persistent data memory
-         */
-        InstAlloc_AddMember( &Data, pInitParams->nBands * sizeof(Biquad_1I_Order2_FLOAT_Taps_t) );
-        InstAlloc_AddMember( &Data, pInitParams->nBands * sizeof(QPD_FLOAT_Taps_t) );
-        pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_DATA].Size         = InstAlloc_GetTotal(&Data);
-        pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_DATA].Type         = LVPSA_PERSISTENT_DATA;
-        pMemoryTable->Region[LVPSA_MEMREGION_PERSISTENT_DATA].pBaseAddress = LVM_NULL;
-
-    }
-    else
-    {
-        /* Read back memory allocation table */
-        *pMemoryTable = pLVPSA_Inst->MemoryTable;
-    }
-
-    return(LVPSA_OK);
-}
-
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Private.h b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Private.h
index 61987b5..605a22e 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Private.h
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Private.h
@@ -18,6 +18,7 @@
 #ifndef _LVPSA_PRIVATE_H_
 #define _LVPSA_PRIVATE_H_
 
+#include <audio_utils/BiquadFilter.h>
 #include "LVPSA.h"
 #include "BIQUAD.h"
 #include "LVPSA_QPD.h"
@@ -27,95 +28,99 @@
    CONSTANT DEFINITIONS
 ***********************************************************************************/
 
-/* Memory */
-#define LVPSA_INSTANCE_ALIGN             4      /* 32-bit alignment for structures                                  */
-#define LVPSA_SCRATCH_ALIGN              4      /* 32-bit alignment for long data                                   */
-#define LVPSA_COEF_ALIGN                 4      /* 32-bit alignment for long words                                  */
-#define LVPSA_DATA_ALIGN                 4      /* 32-bit alignment for long data                                   */
+#define LVPSA_NR_SUPPORTED_RATE 13 /* From 8000Hz to 192000Hz*/
+#define LVPSA_NR_SUPPORTED_SPEED \
+    3 /* LOW, MEDIUM, HIGH                                                */
 
-#define LVPSA_MEMREGION_INSTANCE         0      /* Offset to instance memory region in memory table                 */
-#define LVPSA_MEMREGION_PERSISTENT_COEF  1      /* Offset to persistent coefficients  memory region in memory table */
-#define LVPSA_MEMREGION_PERSISTENT_DATA  2      /* Offset to persistent taps  memory region in memory table         */
-#define LVPSA_MEMREGION_SCRATCH          3      /* Offset to scratch  memory region in memory table                 */
-#define LVPSA_NR_SUPPORTED_RATE          13      /* From 8000Hz to 192000Hz*/
-#define LVPSA_NR_SUPPORTED_SPEED         3      /* LOW, MEDIUM, HIGH                                                */
+#define LVPSA_MAXBUFFERDURATION \
+    4000 /* Maximum length in ms of the levels buffer                        */
+#define LVPSA_MAXINPUTBLOCKSIZE \
+    5000                   /* Maximum length in mono samples of the block to process           */
+#define LVPSA_NBANDSMIN 1  /* Minimum number of frequency band                                 */
+#define LVPSA_NBANDSMAX 30 /* Maximum number of frequency band                                 */
+#define LVPSA_MAXCENTERFREQ \
+    20000 /* Maximum possible center frequency                                */
+#define LVPSA_MINPOSTGAIN \
+    (-15)                     /* Minimum possible post gain                                       */
+#define LVPSA_MAXPOSTGAIN 15  /* Maximum possible post gain  */
+#define LVPSA_MINQFACTOR 25   /* Minimum possible Q factor                                        */
+#define LVPSA_MAXQFACTOR 1200 /* Maximum possible Q factor */
 
-#define LVPSA_MAXBUFFERDURATION          4000   /* Maximum length in ms of the levels buffer                        */
-#define LVPSA_MAXINPUTBLOCKSIZE          5000   /* Maximum length in mono samples of the block to process           */
-#define LVPSA_NBANDSMIN                  1      /* Minimum number of frequency band                                 */
-#define LVPSA_NBANDSMAX                  30     /* Maximum number of frequency band                                 */
-#define LVPSA_MAXCENTERFREQ              20000  /* Maximum possible center frequency                                */
-#define LVPSA_MINPOSTGAIN              (-15)    /* Minimum possible post gain                                       */
-#define LVPSA_MAXPOSTGAIN                15     /* Maximum possible post gain                                       */
-#define LVPSA_MINQFACTOR                 25     /* Minimum possible Q factor                                        */
-#define LVPSA_MAXQFACTOR                 1200   /* Maximum possible Q factor                                        */
+#define LVPSA_MAXLEVELDECAYFACTOR \
+    0x4111 /* Decay factor for the maximum values calculation                  */
+#define LVPSA_MAXLEVELDECAYSHIFT \
+    14 /* Decay shift for the maximum values calculation                   */
 
-#define LVPSA_MAXLEVELDECAYFACTOR        0x4111 /* Decay factor for the maximum values calculation                  */
-#define LVPSA_MAXLEVELDECAYSHIFT         14     /* Decay shift for the maximum values calculation                   */
+#define LVPSA_MAXUNSIGNEDCHAR 0xFF
 
-#define LVPSA_MAXUNSIGNEDCHAR            0xFF
-
-#define LVPSA_FsInvertShift              31
-#define LVPSA_GAINSHIFT                  11
-#define LVPSA_FREQSHIFT                  25
+#define LVPSA_FsInvertShift 31
+#define LVPSA_GAINSHIFT 11
+#define LVPSA_FREQSHIFT 25
 
 /**********************************************************************************
    TYPES DEFINITIONS
 ***********************************************************************************/
 
-#define LVPSA_InternalRefreshTime       0x0014    /* 20 ms (50Hz) in Q16.0      */
-#define LVPSA_InternalRefreshTimeInv    0x0666    /* 1/20ms left shifted by 15  */
-#define LVPSA_InternalRefreshTimeShift  15
+#define LVPSA_InternalRefreshTime 0x0014    /* 20 ms (50Hz) in Q16.0      */
+#define LVPSA_InternalRefreshTimeInv 0x0666 /* 1/20ms left shifted by 15  */
+#define LVPSA_InternalRefreshTimeShift 15
 
 /* Precision of the filter */
-typedef enum
-{
-    LVPSA_SimplePrecisionFilter,    /* Simple precision */
-    LVPSA_DoublePrecisionFilter     /* Double precision */
+typedef enum {
+    LVPSA_SimplePrecisionFilter, /* Simple precision */
+    LVPSA_DoublePrecisionFilter  /* Double precision */
 } LVPSA_BPFilterPrecision_en;
 
-typedef struct
-{
-    LVM_CHAR                    bControlPending;                    /* Flag incating a change of the control parameters                                             */
-    LVM_UINT16                  nBands;                             /* Number of bands of the spectrum analyzer                                                     */
-    LVM_UINT16                  MaxInputBlockSize;                  /* Maximum input data buffer size                                                               */
+typedef struct {
+    LVM_CHAR bControlPending;     /* Flag incating a change of the control parameters     */
+    LVM_UINT16 nBands;            /* Number of bands of the spectrum analyzer            */
+    LVM_UINT16 MaxInputBlockSize; /* Maximum input data buffer size */
 
-    LVPSA_ControlParams_t       CurrentParams;                      /* Current control parameters of the module                                                     */
-    LVPSA_ControlParams_t       NewParams;                          /* New control parameters given by the user                                                     */
-    LVPSA_MemTab_t              MemoryTable;
+    LVPSA_ControlParams_t CurrentParams; /* Current control parameters of the module */
+    LVPSA_ControlParams_t NewParams;     /* New control parameters given by the user     */
+    void* pScratch;
+    /* Pointer to bundle scratch buffer */
 
-    LVPSA_BPFilterPrecision_en *pBPFiltersPrecision;                /* Points a nBands elements array that contains the filter precision for each band              */
-    Biquad_FLOAT_Instance_t          *pBP_Instances;
-    /* Points a nBands elements array that contains the band pass filter taps for each band */
-    Biquad_1I_Order2_FLOAT_Taps_t    *pBP_Taps;
+    LVPSA_BPFilterPrecision_en* pBPFiltersPrecision; /* Points a nBands elements array that contains
+                                                        the filter precision for each band */
+    std::vector<android::audio_utils::BiquadFilter<LVM_FLOAT>>
+            specBiquad; /* Biquad filter instances */
     /* Points a nBands elements array that contains the QPD filter instance for each band */
-    QPD_FLOAT_State_t                *pQPD_States;
+    QPD_FLOAT_State_t* pQPD_States;
     /* Points a nBands elements array that contains the QPD filter taps for each band */
-    QPD_FLOAT_Taps_t                 *pQPD_Taps;
+    QPD_FLOAT_Taps_t* pQPD_Taps;
 
     /* Points a nBands elements array that contains the post-filter gains for each band */
-    LVM_FLOAT                  *pPostGains;
-    LVPSA_FilterParam_t        *pFiltersParams;                     /* Copy of the filters parameters from the input parameters                                     */
+    LVM_FLOAT* pPostGains;
+    LVPSA_FilterParam_t*
+            pFiltersParams; /* Copy of the filters parameters from the input parameters */
 
-    LVM_UINT16                  nSamplesBufferUpdate;               /* Number of samples to make 20ms                                                               */
-    LVM_INT32                   BufferUpdateSamplesCount;           /* Counter used to know when to put a new value in the buffer                                   */
-    LVM_UINT16                  nRelevantFilters;                   /* Number of relevent filters depending on sampling frequency and bands center frequency        */
-    LVM_UINT16                  LocalSamplesCount;                  /* Counter used to update the SpectralDataBufferAudioTime                                       */
+    LVM_UINT16 nSamplesBufferUpdate;    /* Number of samples to make 20ms    */
+    LVM_INT32 BufferUpdateSamplesCount; /* Counter used to know when to put a new value in the
+                                           buffer                                   */
+    LVM_UINT16 nRelevantFilters;  /* Number of relevant filters depending on sampling frequency and
+                                     bands center frequency        */
+    LVM_UINT16 LocalSamplesCount; /* Counter used to update the SpectralDataBufferAudioTime */
 
-    LVM_UINT16                  DownSamplingFactor;                 /* Down sampling factor depending on the sampling frequency                                     */
-    LVM_UINT16                  DownSamplingCount;                  /* Counter used for the downsampling handling                                                   */
+    LVM_UINT16 DownSamplingFactor; /* Down sampling factor depending on the sampling frequency */
+    LVM_UINT16 DownSamplingCount;  /* Counter used for the downsampling handling  */
 
-    LVM_UINT16                  SpectralDataBufferDuration;         /* Length of the buffer in time (ms) defined by the application                                 */
-    LVM_UINT8                  *pSpectralDataBufferStart;           /* Starting address of the buffer                                                               */
-    LVM_UINT8                  *pSpectralDataBufferWritePointer;    /* Current position of the writting pointer of the buffer                                       */
-    LVPSA_Time                  SpectralDataBufferAudioTime;        /* AudioTime at which the last value save occured in the buffer                                 */
-    LVM_UINT32                  SpectralDataBufferLength;           /* Number of spectrum data value that the buffer can contain (per band)
-                                                                       = SpectralDataBufferDuration/20ms                                                            */
+    LVM_UINT16 SpectralDataBufferDuration;      /* Length of the buffer in time (ms) defined by the
+                                                   application                                 */
+    LVM_UINT8* pSpectralDataBufferStart;        /* Starting address of the buffer        */
+    LVM_UINT8* pSpectralDataBufferWritePointer; /* Current position of the writing pointer of the
+                                                   buffer                                       */
+    LVPSA_Time SpectralDataBufferAudioTime; /* AudioTime at which the last value save occurred in
+                                               the buffer                                 */
+    LVM_UINT32
+            SpectralDataBufferLength; /* Number of spectrum data value that the buffer can contain
+                                         (per band) = SpectralDataBufferDuration/20ms */
 
-    LVM_UINT8                  *pPreviousPeaks;                     /* Points to a nBands elements array that contains the previous peak value of the level
-                                                                     detection. Those values are decremented after each call to the GetSpectrum function          */
+    LVM_UINT8* pPreviousPeaks; /* Points to a nBands elements array that contains the previous peak
+                                value of the level detection. Those values are decremented after
+                                each call to the GetSpectrum function          */
 
-}LVPSA_InstancePr_t, *pLVPSA_InstancePr_t;
+} LVPSA_InstancePr_t, *pLVPSA_InstancePr_t;
 
 /**********************************************************************************
    FUNCTIONS PROTOTYPE
@@ -135,6 +140,6 @@
 /*  LVPSA_OK            Always succeeds                                             */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_ApplyNewSettings (LVPSA_InstancePr_t     *pInst);
+LVPSA_RETURN LVPSA_ApplyNewSettings(LVPSA_InstancePr_t* pInst);
 
 #endif /* _LVPSA_PRIVATE_H */
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Process.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Process.cpp
index 81a88c5..c89c4f6 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Process.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Process.cpp
@@ -15,23 +15,23 @@
  * limitations under the License.
  */
 
-#include    "LVPSA.h"
-#include    "LVPSA_Private.h"
-#include    "LVM_Macros.h"
-#include    "VectorArithmetic.h"
+#include "LVPSA.h"
+#include "LVPSA_Private.h"
+#include "LVM_Macros.h"
+#include "VectorArithmetic.h"
 
-#define LVM_MININT_32   0x80000000
+#define LVM_MININT_32 0x80000000
 
 static LVM_INT32 mult32x32in32_shiftr(LVM_INT32 a, LVM_INT32 b, LVM_INT32 c) {
-  LVM_INT64 result = ((LVM_INT64)a * b) >> c;
+    LVM_INT64 result = ((LVM_INT64)a * b) >> c;
 
-  if (result >= INT32_MAX) {
-    return INT32_MAX;
-  } else if (result <= INT32_MIN) {
-    return INT32_MIN;
-  } else {
-    return (LVM_INT32)result;
-  }
+    if (result >= INT32_MAX) {
+        return INT32_MAX;
+    } else if (result <= INT32_MIN) {
+        return INT32_MIN;
+    } else {
+        return (LVM_INT32)result;
+    }
 }
 
 /************************************************************************************/
@@ -54,42 +54,36 @@
 /*  otherwise           Error due to bad parameters                                 */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_Process           ( pLVPSA_Handle_t      hInstance,
-                                       LVM_FLOAT           *pLVPSA_InputSamples,
-                                       LVM_UINT16           InputBlockSize,
-                                       LVPSA_Time           AudioTime            )
+LVPSA_RETURN LVPSA_Process(pLVPSA_Handle_t hInstance, LVM_FLOAT* pLVPSA_InputSamples,
+                           LVM_UINT16 InputBlockSize, LVPSA_Time AudioTime)
 
 {
-    LVPSA_InstancePr_t     *pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
-    LVM_FLOAT               *pScratch;
-    LVM_INT16               ii;
-    LVM_INT32               AudioTimeInc;
-    extern LVM_UINT32       LVPSA_SampleRateInvTab[];
-    LVM_UINT8               *pWrite_Save;         /* Position of the write pointer
-                                                     at the beginning of the process  */
+    LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
+    LVM_FLOAT* pScratch;
+    LVM_INT16 ii;
+    LVM_INT32 AudioTimeInc;
+    extern LVM_UINT32 LVPSA_SampleRateInvTab[];
+    LVM_UINT8* pWrite_Save; /* Position of the write pointer
+                               at the beginning of the process  */
 
     /******************************************************************************
        CHECK PARAMETERS
     *******************************************************************************/
-    if(hInstance == LVM_NULL || pLVPSA_InputSamples == LVM_NULL)
-    {
-        return(LVPSA_ERROR_NULLADDRESS);
+    if (hInstance == LVM_NULL || pLVPSA_InputSamples == LVM_NULL) {
+        return (LVPSA_ERROR_NULLADDRESS);
     }
-    if(InputBlockSize == 0 || InputBlockSize > pLVPSA_Inst->MaxInputBlockSize)
-    {
-        return(LVPSA_ERROR_INVALIDPARAM);
+    if (InputBlockSize == 0 || InputBlockSize > pLVPSA_Inst->MaxInputBlockSize) {
+        return (LVPSA_ERROR_INVALIDPARAM);
     }
-
-    pScratch = (LVM_FLOAT*)pLVPSA_Inst->MemoryTable.Region[LVPSA_MEMREGION_SCRATCH].pBaseAddress;
+    pScratch = (LVM_FLOAT*)pLVPSA_Inst->pScratch;
     pWrite_Save = pLVPSA_Inst->pSpectralDataBufferWritePointer;
 
     /******************************************************************************
        APPLY NEW SETTINGS IF NEEDED
     *******************************************************************************/
-    if (pLVPSA_Inst->bControlPending == LVM_TRUE)
-    {
+    if (pLVPSA_Inst->bControlPending == LVM_TRUE) {
         pLVPSA_Inst->bControlPending = 0;
-        LVPSA_ApplyNewSettings( pLVPSA_Inst);
+        LVPSA_ApplyNewSettings(pLVPSA_Inst);
     }
 
     /******************************************************************************
@@ -99,39 +93,30 @@
     Copy_Float(pLVPSA_InputSamples, pScratch, (LVM_INT16)InputBlockSize);
     Shift_Sat_Float(-1, pScratch, pScratch, (LVM_INT16)InputBlockSize);
 
-    for (ii = 0; ii < pLVPSA_Inst->nRelevantFilters; ii++)
-    {
-        switch(pLVPSA_Inst->pBPFiltersPrecision[ii])
-        {
+    for (ii = 0; ii < pLVPSA_Inst->nRelevantFilters; ii++) {
+        switch (pLVPSA_Inst->pBPFiltersPrecision[ii]) {
             case LVPSA_SimplePrecisionFilter:
-                BP_1I_D16F16C14_TRC_WRA_01  ( &pLVPSA_Inst->pBP_Instances[ii],
-                                              pScratch,
-                                              pScratch + InputBlockSize,
-                                              (LVM_INT16)InputBlockSize);
+                pLVPSA_Inst->specBiquad[ii].process(pScratch + InputBlockSize, pScratch,
+                                                    (LVM_INT16)InputBlockSize);
                 break;
 
             case LVPSA_DoublePrecisionFilter:
-                BP_1I_D16F32C30_TRC_WRA_01  ( &pLVPSA_Inst->pBP_Instances[ii],
-                                              pScratch,
-                                              pScratch + InputBlockSize,
-                                              (LVM_INT16)InputBlockSize);
+                pLVPSA_Inst->specBiquad[ii].process(pScratch + InputBlockSize, pScratch,
+                                                    (LVM_INT16)InputBlockSize);
                 break;
             default:
                 break;
         }
 
-        LVPSA_QPD_Process_Float   ( pLVPSA_Inst,
-                                    pScratch + InputBlockSize,
-                                    (LVM_INT16)InputBlockSize,
-                                    ii);
+        LVPSA_QPD_Process_Float(pLVPSA_Inst, pScratch + InputBlockSize, (LVM_INT16)InputBlockSize,
+                                ii);
     }
 
     /******************************************************************************
        UPDATE SpectralDataBufferAudioTime
     *******************************************************************************/
 
-    if(pLVPSA_Inst->pSpectralDataBufferWritePointer != pWrite_Save)
-    {
+    if (pLVPSA_Inst->pSpectralDataBufferWritePointer != pWrite_Save) {
         AudioTimeInc = mult32x32in32_shiftr(
                 (AudioTime + ((LVM_INT32)pLVPSA_Inst->LocalSamplesCount * 1000)),
                 (LVM_INT32)LVPSA_SampleRateInvTab[pLVPSA_Inst->CurrentParams.Fs],
@@ -139,7 +124,7 @@
         pLVPSA_Inst->SpectralDataBufferAudioTime = AudioTime + AudioTimeInc;
     }
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
 
 /************************************************************************************/
@@ -162,99 +147,95 @@
 /*  otherwise           Error due to bad parameters                                 */
 /*                                                                                  */
 /************************************************************************************/
-LVPSA_RETURN LVPSA_GetSpectrum       ( pLVPSA_Handle_t      hInstance,
-                                       LVPSA_Time           GetSpectrumAudioTime,
-                                       LVM_UINT8           *pCurrentValues,
-                                       LVM_UINT8           *pPeakValues           )
+LVPSA_RETURN LVPSA_GetSpectrum(pLVPSA_Handle_t hInstance, LVPSA_Time GetSpectrumAudioTime,
+                               LVM_UINT8* pCurrentValues, LVM_UINT8* pPeakValues)
 
 {
+    LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
+    LVM_INT32 StatusDelta, ii;
+    LVM_UINT8* pRead;
 
-    LVPSA_InstancePr_t      *pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
-    LVM_INT32               StatusDelta, ii;
-    LVM_UINT8               *pRead;
-
-    if(hInstance == LVM_NULL || pCurrentValues == LVM_NULL || pPeakValues == LVM_NULL)
-    {
-        return(LVPSA_ERROR_NULLADDRESS);
+    if (hInstance == LVM_NULL || pCurrentValues == LVM_NULL || pPeakValues == LVM_NULL) {
+        return (LVPSA_ERROR_NULLADDRESS);
     }
 
     /* First find the place where to look in the status buffer */
-    if(GetSpectrumAudioTime <= pLVPSA_Inst->SpectralDataBufferAudioTime)
-    {
-        MUL32x32INTO32((pLVPSA_Inst->SpectralDataBufferAudioTime - GetSpectrumAudioTime),LVPSA_InternalRefreshTimeInv,StatusDelta,LVPSA_InternalRefreshTimeShift);
-        if((StatusDelta * LVPSA_InternalRefreshTime) != (pLVPSA_Inst->SpectralDataBufferAudioTime - GetSpectrumAudioTime))
-        {
+    if (GetSpectrumAudioTime <= pLVPSA_Inst->SpectralDataBufferAudioTime) {
+        MUL32x32INTO32((pLVPSA_Inst->SpectralDataBufferAudioTime - GetSpectrumAudioTime),
+                       LVPSA_InternalRefreshTimeInv, StatusDelta, LVPSA_InternalRefreshTimeShift);
+        if ((StatusDelta * LVPSA_InternalRefreshTime) !=
+            (pLVPSA_Inst->SpectralDataBufferAudioTime - GetSpectrumAudioTime)) {
             StatusDelta += 1;
         }
-    }
-    else
-    {
+    } else {
         /* This part handles the wrap around */
-        MUL32x32INTO32(((pLVPSA_Inst->SpectralDataBufferAudioTime - (LVM_INT32)LVM_MININT_32) + ((LVM_INT32)LVM_MAXINT_32 - GetSpectrumAudioTime)),LVPSA_InternalRefreshTimeInv,StatusDelta,LVPSA_InternalRefreshTimeShift)
-        if(((LVM_INT32)(StatusDelta * LVPSA_InternalRefreshTime)) != ((LVM_INT32)((pLVPSA_Inst->SpectralDataBufferAudioTime - (LVM_INT32)LVM_MININT_32) + ((LVM_INT32)LVM_MAXINT_32 - GetSpectrumAudioTime))))
-        {
+        MUL32x32INTO32(
+                ((pLVPSA_Inst->SpectralDataBufferAudioTime - (LVM_INT32)LVM_MININT_32) +
+                 ((LVM_INT32)LVM_MAXINT_32 - GetSpectrumAudioTime)),
+                LVPSA_InternalRefreshTimeInv, StatusDelta,
+                LVPSA_InternalRefreshTimeShift) if (((LVM_INT32)(StatusDelta *
+                                                                 LVPSA_InternalRefreshTime)) !=
+                                                    ((LVM_INT32)(
+                                                            (pLVPSA_Inst
+                                                                     ->SpectralDataBufferAudioTime -
+                                                             (LVM_INT32)LVM_MININT_32) +
+                                                            ((LVM_INT32)LVM_MAXINT_32 -
+                                                             GetSpectrumAudioTime)))) {
             StatusDelta += 1;
         }
     }
     /* Check whether the desired level is not too "old" (see 2.10 in LVPSA_DesignNotes.doc)*/
-    if(
-        ((GetSpectrumAudioTime < pLVPSA_Inst->SpectralDataBufferAudioTime)&&
-         ((GetSpectrumAudioTime<0)&&(pLVPSA_Inst->SpectralDataBufferAudioTime>0))&&
-         (((LVM_INT32)(-GetSpectrumAudioTime + pLVPSA_Inst->SpectralDataBufferAudioTime))>LVM_MAXINT_32))||
+    if (((GetSpectrumAudioTime < pLVPSA_Inst->SpectralDataBufferAudioTime) &&
+         ((GetSpectrumAudioTime < 0) && (pLVPSA_Inst->SpectralDataBufferAudioTime > 0)) &&
+         (((LVM_INT32)(-GetSpectrumAudioTime + pLVPSA_Inst->SpectralDataBufferAudioTime)) >
+          LVM_MAXINT_32)) ||
 
-         ((GetSpectrumAudioTime > pLVPSA_Inst->SpectralDataBufferAudioTime)&&
-         (((GetSpectrumAudioTime>=0)&&(pLVPSA_Inst->SpectralDataBufferAudioTime>=0))||
-          ((GetSpectrumAudioTime<=0)&&(pLVPSA_Inst->SpectralDataBufferAudioTime<=0))||
-         (((GetSpectrumAudioTime>=0)&&(pLVPSA_Inst->SpectralDataBufferAudioTime<=0))&&
-         (((LVM_INT32)(GetSpectrumAudioTime - pLVPSA_Inst->SpectralDataBufferAudioTime))<LVM_MAXINT_32))))||
+        ((GetSpectrumAudioTime > pLVPSA_Inst->SpectralDataBufferAudioTime) &&
+         (((GetSpectrumAudioTime >= 0) && (pLVPSA_Inst->SpectralDataBufferAudioTime >= 0)) ||
+          ((GetSpectrumAudioTime <= 0) && (pLVPSA_Inst->SpectralDataBufferAudioTime <= 0)) ||
+          (((GetSpectrumAudioTime >= 0) && (pLVPSA_Inst->SpectralDataBufferAudioTime <= 0)) &&
+           (((LVM_INT32)(GetSpectrumAudioTime - pLVPSA_Inst->SpectralDataBufferAudioTime)) <
+            LVM_MAXINT_32)))) ||
 
-        (StatusDelta > (LVM_INT32)pLVPSA_Inst->SpectralDataBufferLength) ||
-        (!StatusDelta))
-    {
-        for(ii = 0; ii < pLVPSA_Inst->nBands; ii++)
-        {
-            pCurrentValues[ii]  = 0;
-            pPeakValues[ii]      = 0;
+        (StatusDelta > (LVM_INT32)pLVPSA_Inst->SpectralDataBufferLength) || (!StatusDelta)) {
+        for (ii = 0; ii < pLVPSA_Inst->nBands; ii++) {
+            pCurrentValues[ii] = 0;
+            pPeakValues[ii] = 0;
         }
-        return(LVPSA_OK);
+        return (LVPSA_OK);
     }
     /* Set the reading pointer */
-    if((LVM_INT32)(StatusDelta * pLVPSA_Inst->nBands) > (pLVPSA_Inst->pSpectralDataBufferWritePointer - pLVPSA_Inst->pSpectralDataBufferStart))
-    {
-        pRead = pLVPSA_Inst->pSpectralDataBufferWritePointer + (pLVPSA_Inst->SpectralDataBufferLength - (LVM_UINT32)StatusDelta) * pLVPSA_Inst->nBands;
-    }
-    else
-    {
-        pRead = pLVPSA_Inst->pSpectralDataBufferWritePointer  - StatusDelta * pLVPSA_Inst->nBands;
+    if ((LVM_INT32)(StatusDelta * pLVPSA_Inst->nBands) >
+        (pLVPSA_Inst->pSpectralDataBufferWritePointer - pLVPSA_Inst->pSpectralDataBufferStart)) {
+        pRead = pLVPSA_Inst->pSpectralDataBufferWritePointer +
+                (pLVPSA_Inst->SpectralDataBufferLength - (LVM_UINT32)StatusDelta) *
+                        pLVPSA_Inst->nBands;
+    } else {
+        pRead = pLVPSA_Inst->pSpectralDataBufferWritePointer - StatusDelta * pLVPSA_Inst->nBands;
     }
 
     /* Read the status buffer and fill the output buffers */
-    for(ii = 0; ii < pLVPSA_Inst->nBands; ii++)
-    {
+    for (ii = 0; ii < pLVPSA_Inst->nBands; ii++) {
         pCurrentValues[ii] = pRead[ii];
-        if(pLVPSA_Inst->pPreviousPeaks[ii] <= pRead[ii])
-        {
+        if (pLVPSA_Inst->pPreviousPeaks[ii] <= pRead[ii]) {
             pLVPSA_Inst->pPreviousPeaks[ii] = pRead[ii];
-        }
-        else if(pLVPSA_Inst->pPreviousPeaks[ii] != 0)
-        {
+        } else if (pLVPSA_Inst->pPreviousPeaks[ii] != 0) {
             LVM_INT32 temp;
             /*Re-compute max values for decay */
             temp = (LVM_INT32)(LVPSA_MAXUNSIGNEDCHAR - pLVPSA_Inst->pPreviousPeaks[ii]);
-            temp = ((temp * LVPSA_MAXLEVELDECAYFACTOR)>>LVPSA_MAXLEVELDECAYSHIFT);
+            temp = ((temp * LVPSA_MAXLEVELDECAYFACTOR) >> LVPSA_MAXLEVELDECAYSHIFT);
             /* If the gain has no effect, "help" the value to increase */
-            if(temp == (LVPSA_MAXUNSIGNEDCHAR - pLVPSA_Inst->pPreviousPeaks[ii]))
-            {
+            if (temp == (LVPSA_MAXUNSIGNEDCHAR - pLVPSA_Inst->pPreviousPeaks[ii])) {
                 temp += 1;
             }
             /* Saturate */
             temp = (temp > LVPSA_MAXUNSIGNEDCHAR) ? LVPSA_MAXUNSIGNEDCHAR : temp;
             /* Store new max level */
-            pLVPSA_Inst->pPreviousPeaks[ii] =  (LVM_UINT8)(LVPSA_MAXUNSIGNEDCHAR - temp);
+            pLVPSA_Inst->pPreviousPeaks[ii] = (LVM_UINT8)(LVPSA_MAXUNSIGNEDCHAR - temp);
         }
 
         pPeakValues[ii] = pLVPSA_Inst->pPreviousPeaks[ii];
     }
 
-    return(LVPSA_OK);
+    return (LVPSA_OK);
 }
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD.h b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD.h
index 609a485..2f752bf 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD.h
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD.h
@@ -20,41 +20,35 @@
 
 #include "LVM_Types.h"
 
-typedef struct
-{
-  LVM_INT32                            *pDelay;        /* pointer to the delayed samples (data of 32 bits)   */
-  LVM_INT32                            Coefs[2];       /* pointer to the filter coefficients */
-}QPD_State_t, *pQPD_State_t;
+typedef struct {
+    LVM_INT32* pDelay;  /* pointer to the delayed samples (data of 32 bits)   */
+    LVM_INT32 Coefs[2]; /* pointer to the filter coefficients */
+} QPD_State_t, *pQPD_State_t;
 
-typedef struct
-{
+typedef struct {
     /* pointer to the delayed samples (data of 32 bits)   */
-    LVM_FLOAT                            *pDelay;
-    LVM_FLOAT                            Coefs[2];       /* pointer to the filter coefficients */
-}QPD_FLOAT_State_t, *pQPD_FLOAT_State_t;
+    LVM_FLOAT* pDelay;
+    LVM_FLOAT Coefs[2]; /* pointer to the filter coefficients */
+} QPD_FLOAT_State_t, *pQPD_FLOAT_State_t;
 
-typedef struct
-{
-    LVM_INT32 KP;    /*should store a0*/
-    LVM_INT32 KM;    /*should store b2*/
+typedef struct {
+    LVM_INT32 KP; /*should store a0*/
+    LVM_INT32 KM; /*should store b2*/
 
 } QPD_C32_Coefs, *PQPD_C32_Coefs;
 
-typedef struct
-{
-    LVM_FLOAT KP;    /*should store a0*/
-    LVM_FLOAT KM;    /*should store b2*/
+typedef struct {
+    LVM_FLOAT KP; /*should store a0*/
+    LVM_FLOAT KM; /*should store b2*/
 
 } QPD_FLOAT_Coefs, *PQPD_FLOAT_Coefs;
 
-typedef struct
-{
+typedef struct {
     LVM_INT32 Storage[1];
 
 } QPD_Taps_t, *pQPD_Taps_t;
 
-typedef struct
-{
+typedef struct {
     LVM_FLOAT Storage[1];
 
 } QPD_FLOAT_Taps_t, *pQPD_FLOAT_Taps_t;
@@ -72,15 +66,11 @@
 /* RETURNS:             void                                                        */
 /*                                                                                  */
 /************************************************************************************/
-void LVPSA_QPD_Process (            void                               *hInstance,
-                                    LVM_INT16                          *pInSamps,
-                                    LVM_INT16                           numSamples,
-                                    LVM_INT16                           BandIndex);
+void LVPSA_QPD_Process(void* hInstance, LVM_INT16* pInSamps, LVM_INT16 numSamples,
+                       LVM_INT16 BandIndex);
 
-void LVPSA_QPD_Process_Float (      void                               *hInstance,
-                                    LVM_FLOAT                          *pInSamps,
-                                    LVM_INT16                           numSamples,
-                                    LVM_INT16                           BandIndex);
+void LVPSA_QPD_Process_Float(void* hInstance, LVM_FLOAT* pInSamps, LVM_INT16 numSamples,
+                             LVM_INT16 BandIndex);
 /************************************************************************************/
 /*                                                                                  */
 /* FUNCTION:            LVPSA_QPD_Init                                              */
@@ -96,13 +86,9 @@
 /* RETURNS:     void                                                                */
 /*                                                                                  */
 /************************************************************************************/
-void LVPSA_QPD_Init (   QPD_State_t       *pInstance,
-                        QPD_Taps_t        *pTaps,
-                        QPD_C32_Coefs     *pCoef     );
+void LVPSA_QPD_Init(QPD_State_t* pInstance, QPD_Taps_t* pTaps, QPD_C32_Coefs* pCoef);
 
-void LVPSA_QPD_Init_Float (   QPD_FLOAT_State_t       *pInstance,
-                              QPD_FLOAT_Taps_t        *pTaps,
-                              QPD_FLOAT_Coefs         *pCoef     );
+void LVPSA_QPD_Init_Float(QPD_FLOAT_State_t* pInstance, QPD_FLOAT_Taps_t* pTaps,
+                          QPD_FLOAT_Coefs* pCoef);
 
 #endif
-
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp
index 2dbf694..c5023c3 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp
@@ -32,20 +32,15 @@
 /* RETURNS:     void                                                                */
 /*                                                                                  */
 /************************************************************************************/
-void LVPSA_QPD_Init (   pQPD_State_t       pQPD_State,
-                        QPD_Taps_t        *pTaps,
-                        QPD_C32_Coefs     *pCoef     )
-{
-    pQPD_State->pDelay  = pTaps->Storage;
-    pQPD_State->Coefs[0]  = pCoef->KP;
-    pQPD_State->Coefs[1]  = pCoef->KM;
+void LVPSA_QPD_Init(pQPD_State_t pQPD_State, QPD_Taps_t* pTaps, QPD_C32_Coefs* pCoef) {
+    pQPD_State->pDelay = pTaps->Storage;
+    pQPD_State->Coefs[0] = pCoef->KP;
+    pQPD_State->Coefs[1] = pCoef->KM;
 }
 
-void LVPSA_QPD_Init_Float (   pQPD_FLOAT_State_t       pQPD_State,
-                              QPD_FLOAT_Taps_t         *pTaps,
-                              QPD_FLOAT_Coefs          *pCoef     )
-{
-    pQPD_State->pDelay  = pTaps->Storage;
-    pQPD_State->Coefs[0]  = ((LVM_FLOAT)pCoef->KP);
-    pQPD_State->Coefs[1]  = ((LVM_FLOAT)pCoef->KM);
+void LVPSA_QPD_Init_Float(pQPD_FLOAT_State_t pQPD_State, QPD_FLOAT_Taps_t* pTaps,
+                          QPD_FLOAT_Coefs* pCoef) {
+    pQPD_State->pDelay = pTaps->Storage;
+    pQPD_State->Coefs[0] = ((LVM_FLOAT)pCoef->KP);
+    pQPD_State->Coefs[1] = ((LVM_FLOAT)pCoef->KM);
 }
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp
index 8805420..e301cf9 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp
@@ -34,15 +34,11 @@
 /* RETURNS:             void                                                        */
 /*                                                                                  */
 /************************************************************************************/
-void LVPSA_QPD_WritePeak(   pLVPSA_InstancePr_t       pLVPSA_Inst,
-                            LVM_UINT8                 **ppWrite,
-                            LVM_INT16                 BandIndex,
-                            LVM_INT16                 Value   );
+void LVPSA_QPD_WritePeak(pLVPSA_InstancePr_t pLVPSA_Inst, LVM_UINT8** ppWrite, LVM_INT16 BandIndex,
+                         LVM_INT16 Value);
 
-void LVPSA_QPD_WritePeak_Float(   pLVPSA_InstancePr_t       pLVPSA_Inst,
-                                  LVM_UINT8             **ppWrite,
-                                  LVM_INT16               BandIndex,
-                                  LVM_FLOAT               Value   );
+void LVPSA_QPD_WritePeak_Float(pLVPSA_InstancePr_t pLVPSA_Inst, LVM_UINT8** ppWrite,
+                               LVM_INT16 BandIndex, LVM_FLOAT Value);
 /************************************************************************************/
 /*                                                                                  */
 /* FUNCTION:            LVPSA_QPD_Process                                           */
@@ -56,38 +52,34 @@
 /* RETURNS:             void                                                        */
 /*                                                                                  */
 /************************************************************************************/
-void LVPSA_QPD_Process_Float (      void                               *hInstance,
-                                    LVM_FLOAT                          *pInSamps,
-                                    LVM_INT16                           numSamples,
-                                    LVM_INT16                           BandIndex)
-{
-
+void LVPSA_QPD_Process_Float(void* hInstance, LVM_FLOAT* pInSamps, LVM_INT16 numSamples,
+                             LVM_INT16 BandIndex) {
     /******************************************************************************
        PARAMETERS
     *******************************************************************************/
-    LVPSA_InstancePr_t     *pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
-    QPD_FLOAT_State_t *pQPDState =  (QPD_FLOAT_State_t*)&pLVPSA_Inst->pQPD_States[BandIndex];
+    LVPSA_InstancePr_t* pLVPSA_Inst = (LVPSA_InstancePr_t*)hInstance;
+    QPD_FLOAT_State_t* pQPDState = (QPD_FLOAT_State_t*)&pLVPSA_Inst->pQPD_States[BandIndex];
 
     /* Pointer to taps */
-    LVM_FLOAT* pDelay  = pQPDState->pDelay;
+    LVM_FLOAT* pDelay = pQPDState->pDelay;
 
     /* Parameters needed during quasi peak calculations */
-    LVM_FLOAT   X0;
-    LVM_FLOAT   temp,temp2;
-    LVM_FLOAT   accu;
-    LVM_FLOAT   Xg0;
-    LVM_FLOAT   D0;
-    LVM_FLOAT   V0 = (LVM_FLOAT)(*pDelay);
+    LVM_FLOAT X0;
+    LVM_FLOAT temp, temp2;
+    LVM_FLOAT accu;
+    LVM_FLOAT Xg0;
+    LVM_FLOAT D0;
+    LVM_FLOAT V0 = (LVM_FLOAT)(*pDelay);
 
     /* Filter's coef */
-    LVM_FLOAT   Kp = ((LVM_FLOAT)(pQPDState->Coefs[0]));
-    LVM_FLOAT   Km = ((LVM_FLOAT)(pQPDState->Coefs[1]));
+    LVM_FLOAT Kp = ((LVM_FLOAT)(pQPDState->Coefs[0]));
+    LVM_FLOAT Km = ((LVM_FLOAT)(pQPDState->Coefs[1]));
 
-    LVM_INT16   ii = numSamples;
+    LVM_INT16 ii = numSamples;
 
-    LVM_UINT8  *pWrite = pLVPSA_Inst->pSpectralDataBufferWritePointer;
-    LVM_INT32   BufferUpdateSamplesCount = pLVPSA_Inst->BufferUpdateSamplesCount;
-    LVM_UINT16  DownSamplingFactor = pLVPSA_Inst->DownSamplingFactor;
+    LVM_UINT8* pWrite = pLVPSA_Inst->pSpectralDataBufferWritePointer;
+    LVM_INT32 BufferUpdateSamplesCount = pLVPSA_Inst->BufferUpdateSamplesCount;
+    LVM_UINT16 DownSamplingFactor = pLVPSA_Inst->DownSamplingFactor;
 
     /******************************************************************************
        INITIALIZATION
@@ -97,29 +89,27 @@
     /* Correct also the number of samples */
     ii = (LVM_INT16)(ii - (LVM_INT16)pLVPSA_Inst->DownSamplingCount);
 
-    while (ii > 0)
-    {
+    while (ii > 0) {
         /* Apply post gain */
         /* - 1 to compensate scaling in process function*/
         X0 = (*pInSamps) * pLVPSA_Inst->pPostGains[BandIndex];
         pInSamps = pInSamps + DownSamplingFactor;
 
         /* Saturate and take absolute value */
-        if(X0 < 0.0f)
-            X0 = -X0;
+        if (X0 < 0.0f) X0 = -X0;
         if (X0 > 1.0f)
             Xg0 = 1.0f;
         else
-            Xg0 =X0;
+            Xg0 = X0;
 
         /* Quasi peak filter calculation */
-        D0  = Xg0 - V0;
+        D0 = Xg0 - V0;
 
         temp2 = D0;
 
         accu = temp2 * Kp;
-        D0    = D0 / 2.0f;
-        if (D0 < 0.0f){
+        D0 = D0 / 2.0f;
+        if (D0 < 0.0f) {
             D0 = -D0;
         }
 
@@ -130,17 +120,13 @@
 
         if (accu > 1.0f)
             accu = 1.0f;
-        else if(accu < 0.0f)
+        else if (accu < 0.0f)
             accu = 0.0f;
 
         V0 = accu;
 
-        if(((pLVPSA_Inst->nSamplesBufferUpdate - BufferUpdateSamplesCount) < DownSamplingFactor))
-        {
-            LVPSA_QPD_WritePeak_Float( pLVPSA_Inst,
-                                       &pWrite,
-                                       BandIndex,
-                                       V0);
+        if (((pLVPSA_Inst->nSamplesBufferUpdate - BufferUpdateSamplesCount) < DownSamplingFactor)) {
+            LVPSA_QPD_WritePeak_Float(pLVPSA_Inst, &pWrite, BandIndex, V0);
 
             BufferUpdateSamplesCount -= pLVPSA_Inst->nSamplesBufferUpdate;
             pLVPSA_Inst->LocalSamplesCount = (LVM_UINT16)(numSamples - ii);
@@ -148,7 +134,6 @@
         BufferUpdateSamplesCount += DownSamplingFactor;
 
         ii = (LVM_INT16)(ii - DownSamplingFactor);
-
     }
 
     /* Store last taps in memory */
@@ -156,20 +141,15 @@
 
     /* If this is the last call to the function after last band processing,
        update the parameters. */
-    if(BandIndex == (pLVPSA_Inst->nRelevantFilters - 1))
-    {
+    if (BandIndex == (pLVPSA_Inst->nRelevantFilters - 1)) {
         pLVPSA_Inst->pSpectralDataBufferWritePointer = pWrite;
         /* Adjustment for 11025Hz input, 220,5 is normally
            the exact number of samples for 20ms.*/
-        if((pLVPSA_Inst->pSpectralDataBufferWritePointer != pWrite)&&
-                                        (pLVPSA_Inst->CurrentParams.Fs == LVM_FS_11025))
-        {
-            if(pLVPSA_Inst->nSamplesBufferUpdate == 220)
-            {
+        if ((pLVPSA_Inst->pSpectralDataBufferWritePointer != pWrite) &&
+            (pLVPSA_Inst->CurrentParams.Fs == LVM_FS_11025)) {
+            if (pLVPSA_Inst->nSamplesBufferUpdate == 220) {
                 pLVPSA_Inst->nSamplesBufferUpdate = 221;
-            }
-            else
-            {
+            } else {
                 pLVPSA_Inst->nSamplesBufferUpdate = 220;
             }
         }
@@ -194,37 +174,29 @@
 /* RETURNS:             void                                                        */
 /*                                                                                  */
 /************************************************************************************/
-void LVPSA_QPD_WritePeak(   pLVPSA_InstancePr_t       pLVPSA_Inst,
-                            LVM_UINT8             **ppWrite,
-                            LVM_INT16               BandIndex,
-                            LVM_INT16               Value   )
-{
-    LVM_UINT8 *pWrite = *ppWrite;
+void LVPSA_QPD_WritePeak(pLVPSA_InstancePr_t pLVPSA_Inst, LVM_UINT8** ppWrite, LVM_INT16 BandIndex,
+                         LVM_INT16 Value) {
+    LVM_UINT8* pWrite = *ppWrite;
 
     /* Write the value and update the write pointer */
-    *(pWrite + BandIndex) = (LVM_UINT8)(Value>>7);
+    *(pWrite + BandIndex) = (LVM_UINT8)(Value >> 7);
     pWrite += pLVPSA_Inst->nBands;
-    if (pWrite == (pLVPSA_Inst->pSpectralDataBufferStart + pLVPSA_Inst->nBands * pLVPSA_Inst->SpectralDataBufferLength))
-    {
+    if (pWrite == (pLVPSA_Inst->pSpectralDataBufferStart +
+                   pLVPSA_Inst->nBands * pLVPSA_Inst->SpectralDataBufferLength)) {
         pWrite = pLVPSA_Inst->pSpectralDataBufferStart;
     }
 
     *ppWrite = pWrite;
-
 }
-void LVPSA_QPD_WritePeak_Float(   pLVPSA_InstancePr_t     pLVPSA_Inst,
-                                  LVM_UINT8               **ppWrite,
-                                  LVM_INT16               BandIndex,
-                                  LVM_FLOAT               Value   )
-{
-    LVM_UINT8 *pWrite = *ppWrite;
+void LVPSA_QPD_WritePeak_Float(pLVPSA_InstancePr_t pLVPSA_Inst, LVM_UINT8** ppWrite,
+                               LVM_INT16 BandIndex, LVM_FLOAT Value) {
+    LVM_UINT8* pWrite = *ppWrite;
 
     /* Write the value and update the write pointer */
     *(pWrite + BandIndex) = (LVM_UINT8)(Value * 256);
     pWrite += pLVPSA_Inst->nBands;
-    if (pWrite == (pLVPSA_Inst->pSpectralDataBufferStart + pLVPSA_Inst->nBands * \
-                                    pLVPSA_Inst->SpectralDataBufferLength))
-    {
+    if (pWrite == (pLVPSA_Inst->pSpectralDataBufferStart +
+                   pLVPSA_Inst->nBands * pLVPSA_Inst->SpectralDataBufferLength)) {
         pWrite = pLVPSA_Inst->pSpectralDataBufferStart;
     }
 
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.cpp
index 9f0aa02..4fbff6f 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.cpp
@@ -34,19 +34,9 @@
  * Sample rate table for converting between the enumerated type and the actual
  * frequency
  */
-const LVM_UINT32    LVPSA_SampleRateTab[] = {   8000,                    /* 8kS/s  */
-                                                11025,
-                                                12000,
-                                                16000,
-                                                22050,
-                                                24000,
-                                                32000,
-                                                44100,
-                                                48000,
-                                                88200,
-                                                96000,
-                                               176400,
-                                               192000};                  /* 192kS/s */
+const LVM_UINT32 LVPSA_SampleRateTab[] = {8000, /* 8kS/s  */
+                                          11025, 12000, 16000, 22050, 24000,  32000,
+                                          44100, 48000, 88200, 96000, 176400, 192000}; /* 192kS/s */
 
 /************************************************************************************/
 /*                                                                                  */
@@ -58,20 +48,11 @@
  * Sample rate table for converting between the enumerated type and the actual
  * frequency
  */
-const LVM_UINT32    LVPSA_SampleRateInvTab[] = {    268435,                    /* 8kS/s  */
-                                                    194783,
-                                                    178957,
-                                                    134218,
-                                                    97391,
-                                                    89478,
-                                                    67109,
-                                                    48696,
-                                                    44739
-                                                    ,24348
-                                                    ,22369
-                                                    ,12174
-                                                    ,11185                  /* 192kS/s */
-                                               };
+const LVM_UINT32 LVPSA_SampleRateInvTab[] = {
+        268435, /* 8kS/s  */
+        194783, 178957, 134218, 97391, 89478, 67109,
+        48696,  44739,  24348,  22369, 12174, 11185 /* 192kS/s */
+};
 
 /************************************************************************************/
 /*                                                                                  */
@@ -83,20 +64,10 @@
  * Table for converting between the enumerated type and the number of samples
  * during 20ms
  */
-const LVM_UINT16    LVPSA_nSamplesBufferUpdate[]  = {   160,                   /* 8kS/s  */
-                                                        220,
-                                                        240,
-                                                        320,
-                                                        441,
-                                                        480,
-                                                        640,
-                                                        882,
-                                                        960
-                                                        ,1764
-                                                        ,1920
-                                                        ,3528
-                                                        ,3840                  /* 192kS/s */
-                                                    };
+const LVM_UINT16 LVPSA_nSamplesBufferUpdate[] = {
+        160,                                                           /* 8kS/s  */
+        220, 240, 320, 441, 480, 640, 882, 960, 1764, 1920, 3528, 3840 /* 192kS/s */
+};
 /************************************************************************************/
 /*                                                                                  */
 /*  Down sampling factors                                                           */
@@ -106,20 +77,25 @@
 /*
  * Table for converting between the enumerated type and the down sampling factor
  */
-const LVM_UINT16    LVPSA_DownSamplingFactor[]  = {     5,                    /* 8000  S/s  */
-                                                        7,                    /* 11025 S/s  */
-                                                        8,                    /* 12000 S/s  */
-                                                        10,                   /* 16000 S/s  */
-                                                        15,                   /* 22050 S/s  */
-                                                        16,                   /* 24000 S/s  */
-                                                        21,                   /* 32000 S/s  */
-                                                        30,                   /* 44100 S/s  */
-                                                        32                    /* 48000 S/s  */
-                                                       ,60                   /* 88200 S/s  */
-                                                       ,64                   /* 96000 S/s  */
-                                                       ,120                  /* 176400 S/s  */
-                                                       ,128                  /*192000 S/s  */
-                                                  };
+const LVM_UINT16 LVPSA_DownSamplingFactor[] = {
+        5,  /* 8000  S/s  */
+        7,  /* 11025 S/s  */
+        8,  /* 12000 S/s  */
+        10, /* 16000 S/s  */
+        15, /* 22050 S/s  */
+        16, /* 24000 S/s  */
+        21, /* 32000 S/s  */
+        30, /* 44100 S/s  */
+        32  /* 48000 S/s  */
+        ,
+        60 /* 88200 S/s  */
+        ,
+        64 /* 96000 S/s  */
+        ,
+        120 /* 176400 S/s  */
+        ,
+        128 /*192000 S/s  */
+};
 
 /************************************************************************************/
 /*                                                                                  */
@@ -130,102 +106,34 @@
 /*
  * Table for 2 * Pi / Fs
  */
-const LVM_INT16     LVPSA_TwoPiOnFsTable[] = {  26354,      /* 8kS/s */
-                                                19123,
-                                                17569,
-                                                13177,
-                                                 9561,
-                                                 8785,
-                                                 6588,
-                                                 4781,
-                                                 4392
-                                                ,2390
-                                                ,2196
-                                                ,1195
-                                                ,1098    /* 192kS/s */
-                                             };
+const LVM_INT16 LVPSA_TwoPiOnFsTable[] = {
+        26354,                                                                    /* 8kS/s */
+        19123, 17569, 13177, 9561, 8785, 6588, 4781, 4392, 2390, 2196, 1195, 1098 /* 192kS/s */
+};
 
-const LVM_FLOAT     LVPSA_Float_TwoPiOnFsTable[] = {  0.8042847f,      /* 8kS/s */
-                                                      0.5836054f,
-                                                      0.5361796f,
-                                                      0.4021423f,
-                                                      0.2917874f,
-                                                      0.2681051f,
-                                                      0.2010559f,
-                                                      0.1459089f,
-                                                      0.1340372f
-                                                     ,0.0729476f
-                                                     ,0.0670186f
-                                                     ,0.0364738f
-                                                     ,0.0335093f    /* 192kS/s */
-                                                   };
+const LVM_FLOAT LVPSA_Float_TwoPiOnFsTable[] = {
+        0.8042847f, /* 8kS/s */
+        0.5836054f, 0.5361796f, 0.4021423f, 0.2917874f, 0.2681051f, 0.2010559f,
+        0.1459089f, 0.1340372f, 0.0729476f, 0.0670186f, 0.0364738f, 0.0335093f /* 192kS/s */
+};
 
 /*
  * Gain table
  */
-const LVM_INT16     LVPSA_GainTable[] = {   364,          /* -15dB gain */
-                                            408,
-                                            458,
-                                            514,
-                                            577,
-                                            647,
-                                            726,
-                                            815,
-                                            914,
-                                            1026,
-                                            1151,
-                                            1292,
-                                            1449,
-                                            1626,
-                                            1825,
-                                            2048,         /* 0dB gain */
-                                            2297,
-                                            2578,
-                                            2892,
-                                            3245,
-                                            3641,
-                                            4096,
-                                            4584,
-                                            5144,
-                                            5772,
-                                            6476,
-                                            7266,
-                                            8153,
-                                            9148,
-                                            10264,
-                                            11576};        /* +15dB gain */
+const LVM_INT16 LVPSA_GainTable[] = {364, /* -15dB gain */
+                                     408,  458,  514,  577,  647,  726,   815,  914,
+                                     1026, 1151, 1292, 1449, 1626, 1825,  2048, /* 0dB gain */
+                                     2297, 2578, 2892, 3245, 3641, 4096,  4584, 5144,
+                                     5772, 6476, 7266, 8153, 9148, 10264, 11576}; /* +15dB gain */
 
-const LVM_FLOAT  LVPSA_Float_GainTable[]={  0.177734375f,          /* -15dB gain */
-                                            0.199218750f,
-                                            0.223632812f,
-                                            0.250976562f,
-                                            0.281738281f,
-                                            0.315917968f,
-                                            0.354492187f,
-                                            0.397949218f,
-                                            0.446289062f,
-                                            0.500976562f,
-                                            0.562011718f,
-                                            0.630859375f,
-                                            0.707519531f,
-                                            0.793945312f,
-                                            0.891113281f,
-                                            1.000000000f,         /* 0dB gain */
-                                            1.121582031f,
-                                            1.258789062f,
-                                            1.412109375f,
-                                            1.584472656f,
-                                            1.777832031f,
-                                            2.000000000f,
-                                            2.238281250f,
-                                            2.511718750f,
-                                            2.818359375f,
-                                            3.162109375f,
-                                            3.547851562f,
-                                            3.980957031f,
-                                            4.466796875f,
-                                            5.011718750f,
-                                            5.652343750f};        /* +15dB gain */
+const LVM_FLOAT LVPSA_Float_GainTable[] = {
+        0.177734375f, /* -15dB gain */
+        0.199218750f, 0.223632812f, 0.250976562f, 0.281738281f, 0.315917968f,
+        0.354492187f, 0.397949218f, 0.446289062f, 0.500976562f, 0.562011718f,
+        0.630859375f, 0.707519531f, 0.793945312f, 0.891113281f, 1.000000000f, /* 0dB gain */
+        1.121582031f, 1.258789062f, 1.412109375f, 1.584472656f, 1.777832031f,
+        2.000000000f, 2.238281250f, 2.511718750f, 2.818359375f, 3.162109375f,
+        3.547851562f, 3.980957031f, 4.466796875f, 5.011718750f, 5.652343750f}; /* +15dB gain */
 /************************************************************************************/
 /*                                                                                  */
 /*  Cosone polynomial coefficients                                                  */
@@ -241,20 +149,20 @@
  * a range of 0 to Pi. The output is in the range 32767 to -32768 representing the range
  * +1.0 to -1.0
  */
-const LVM_INT16     LVPSA_CosCoef[] = { 3,                             /* Shifts */
-                                        4096,                          /* a0 */
-                                        -36,                           /* a1 */
-                                        -19725,                        /* a2 */
-                                        -2671,                         /* a3 */
-                                        23730,                         /* a4 */
-                                        -9490};                        /* a5 */
-const LVM_FLOAT     LVPSA_Float_CosCoef[] = { 3,                             /* Shifts */
-                                              0.1250038f,                          /* a0 */
-                                              -0.0010986f,                           /* a1 */
-                                              -0.6019775f,                        /* a2 */
-                                              -0.0815149f,                         /* a3 */
-                                              0.7242042f,                         /* a4 */
-                                              -0.2896206f};                        /* a5 */
+const LVM_INT16 LVPSA_CosCoef[] = {3,                  /* Shifts */
+                                   4096,               /* a0 */
+                                   -36,                /* a1 */
+                                   -19725,             /* a2 */
+                                   -2671,              /* a3 */
+                                   23730,              /* a4 */
+                                   -9490};             /* a5 */
+const LVM_FLOAT LVPSA_Float_CosCoef[] = {3,            /* Shifts */
+                                         0.1250038f,   /* a0 */
+                                         -0.0010986f,  /* a1 */
+                                         -0.6019775f,  /* a2 */
+                                         -0.0815149f,  /* a3 */
+                                         0.7242042f,   /* a4 */
+                                         -0.2896206f}; /* a5 */
 /*
  * Coefficients for calculating the cosine error with the equation:
  *
@@ -269,101 +177,100 @@
  *
  * Cos(x) = 1.0 - CosErr(x)
  */
-const LVM_INT16     LVPSA_DPCosCoef[] = {   1,                           /* Shifts */
-                                            0,                           /* a0 */
-                                            -6,                          /* a1 */
-                                            16586,                       /* a2 */
-                                            -44};                        /* a3 */
-const LVM_FLOAT    LVPSA_Float_DPCosCoef[] = {1.0f,                        /* Shifts */
-                                              0.0f,                        /* a0 */
-                                              -0.00008311f,                 /* a1 */
-                                              0.50617999f,                 /* a2 */
-                                              -0.00134281f};                /* a3 */
+const LVM_INT16 LVPSA_DPCosCoef[] = {1,                   /* Shifts */
+                                     0,                   /* a0 */
+                                     -6,                  /* a1 */
+                                     16586,               /* a2 */
+                                     -44};                /* a3 */
+const LVM_FLOAT LVPSA_Float_DPCosCoef[] = {1.0f,          /* Shifts */
+                                           0.0f,          /* a0 */
+                                           -0.00008311f,  /* a1 */
+                                           0.50617999f,   /* a2 */
+                                           -0.00134281f}; /* a3 */
 /************************************************************************************/
 /*                                                                                  */
 /*  Quasi peak filter coefficients table                                            */
 /*                                                                                  */
 /************************************************************************************/
-const QPD_C32_Coefs     LVPSA_QPD_Coefs[] = {
-                                         /* 8kS/s  */    /* LVPSA_SPEED_LOW   */
-                                         {(LVM_INT32)0x80CEFD2B,0x00CB9B17},
-                                         {(LVM_INT32)0x80D242E7,0x00CED11D},
-                                         {(LVM_INT32)0x80DCBAF5,0x00D91679},
-                                         {(LVM_INT32)0x80CEFD2B,0x00CB9B17},
-                                         {(LVM_INT32)0x80E13739,0x00DD7CD3},
-                                         {(LVM_INT32)0x80DCBAF5,0x00D91679},
-                                         {(LVM_INT32)0x80D94BAF,0x00D5B7E7},
-                                         {(LVM_INT32)0x80E13739,0x00DD7CD3},
-                                         {(LVM_INT32)0x80DCBAF5,0x00D91679},  /* 48kS/s */
+const QPD_C32_Coefs LVPSA_QPD_Coefs[] = {
+        /* 8kS/s  */ /* LVPSA_SPEED_LOW   */
+        {(LVM_INT32)0x80CEFD2B, 0x00CB9B17},
+        {(LVM_INT32)0x80D242E7, 0x00CED11D},
+        {(LVM_INT32)0x80DCBAF5, 0x00D91679},
+        {(LVM_INT32)0x80CEFD2B, 0x00CB9B17},
+        {(LVM_INT32)0x80E13739, 0x00DD7CD3},
+        {(LVM_INT32)0x80DCBAF5, 0x00D91679},
+        {(LVM_INT32)0x80D94BAF, 0x00D5B7E7},
+        {(LVM_INT32)0x80E13739, 0x00DD7CD3},
+        {(LVM_INT32)0x80DCBAF5, 0x00D91679}, /* 48kS/s */
 
-                                         /* 8kS/s  */    /* LVPSA_SPEED_MEDIUM */
-                                         {(LVM_INT32)0x8587513D,0x055C22CF},
-                                         {(LVM_INT32)0x859D2967,0x0570F007},
-                                         {(LVM_INT32)0x85E2EFAC,0x05B34D79},
-                                         {(LVM_INT32)0x8587513D,0x055C22CF},
-                                         {(LVM_INT32)0x8600C7B9,0x05CFA6CF},
-                                         {(LVM_INT32)0x85E2EFAC,0x05B34D79},
-                                         {(LVM_INT32)0x85CC1018,0x059D8F69},
-                                         {(LVM_INT32)0x8600C7B9,0x05CFA6CF},
-                                         {(LVM_INT32)0x85E2EFAC,0x05B34D79},  /* 48kS/s */
+        /* 8kS/s  */ /* LVPSA_SPEED_MEDIUM */
+        {(LVM_INT32)0x8587513D, 0x055C22CF},
+        {(LVM_INT32)0x859D2967, 0x0570F007},
+        {(LVM_INT32)0x85E2EFAC, 0x05B34D79},
+        {(LVM_INT32)0x8587513D, 0x055C22CF},
+        {(LVM_INT32)0x8600C7B9, 0x05CFA6CF},
+        {(LVM_INT32)0x85E2EFAC, 0x05B34D79},
+        {(LVM_INT32)0x85CC1018, 0x059D8F69},
+        {(LVM_INT32)0x8600C7B9, 0x05CFA6CF},
+        {(LVM_INT32)0x85E2EFAC, 0x05B34D79}, /* 48kS/s */
 
-                                         /* 8kS/s  */   /* LVPSA_SPEED_HIGH    */
-                                         {(LVM_INT32)0xA115EA7A,0x1CDB3F5C},
-                                         {(LVM_INT32)0xA18475F0,0x1D2C83A2},
-                                         {(LVM_INT32)0xA2E1E950,0x1E2A532E},
-                                         {(LVM_INT32)0xA115EA7A,0x1CDB3F5C},
-                                         {(LVM_INT32)0xA375B2C6,0x1E943BBC},
-                                         {(LVM_INT32)0xA2E1E950,0x1E2A532E},
-                                         {(LVM_INT32)0xA26FF6BD,0x1DD81530},
-                                         {(LVM_INT32)0xA375B2C6,0x1E943BBC},
-                                         {(LVM_INT32)0xA2E1E950,0x1E2A532E}}; /* 48kS/s */
+        /* 8kS/s  */ /* LVPSA_SPEED_HIGH    */
+        {(LVM_INT32)0xA115EA7A, 0x1CDB3F5C},
+        {(LVM_INT32)0xA18475F0, 0x1D2C83A2},
+        {(LVM_INT32)0xA2E1E950, 0x1E2A532E},
+        {(LVM_INT32)0xA115EA7A, 0x1CDB3F5C},
+        {(LVM_INT32)0xA375B2C6, 0x1E943BBC},
+        {(LVM_INT32)0xA2E1E950, 0x1E2A532E},
+        {(LVM_INT32)0xA26FF6BD, 0x1DD81530},
+        {(LVM_INT32)0xA375B2C6, 0x1E943BBC},
+        {(LVM_INT32)0xA2E1E950, 0x1E2A532E}}; /* 48kS/s */
 
-const QPD_FLOAT_Coefs     LVPSA_QPD_Float_Coefs[] = {
+const QPD_FLOAT_Coefs LVPSA_QPD_Float_Coefs[] = {
 
-                                         /* 8kS/s  */    /* LVPSA_SPEED_LOW   */
-                                         {-0.9936831989325583f,0.0062135565094650f},
-                                         {-0.9935833332128823f,0.0063115493394434f},
-                                         {-0.9932638457976282f,0.0066249934025109f},
-                                         {-0.9936831989325583f,0.0062135565094650f},
-                                         {-0.9931269618682563f,0.0067592649720609f},
-                                         {-0.9932638457976282f,0.0066249934025109f},
-                                         {-0.9933686633594334f,0.0065221670083702f},
-                                         {-0.9931269618682563f,0.0067592649720609f},
-                                          /* 48kS/s */
-                                         {-0.9932638457976282f,0.0066249934025109f},
-                                         {-0.9931269618682563f,0.0067592649720609f},
-                                         {-0.9932638457976282f,0.0066249934025109f},
-                                         {-0.9931269618682563f,0.0067592649720609f},
-                                         {-0.9932638457976282f,0.0066249934025109f},
-                                         /* 8kS/s  */    /* LVPSA_SPEED_MEDIUM      */
-                                         {-0.9568079425953329f,0.0418742666952312f},
-                                         {-0.9561413046903908f,0.0425090822391212f},
-                                         {-0.9540119562298059f,0.0445343819446862f},
-                                         {-0.9568079425953329f,0.0418742666952312f},
-                                         {-0.9531011912040412f,0.0453995238058269f},
-                                         {-0.9540119562298059f,0.0445343819446862f},
-                                         {-0.9547099955379963f,0.0438708555884659f},
-                                          //{0x8600C7B9,0x05CFA6CF},
-                                         {-0.9531011912040412f,0.0453995238058269f},
-                                          /* 48kS/s */
-                                         {-0.9540119562298059f,0.0445343819446862f},
-                                         {-0.9531011912040412f,0.0453995238058269f},
-                                         {-0.9540119562298059f,0.0445343819446862f},
-                                         {-0.9531011912040412f,0.0453995238058269f},
-                                         {-0.9540119562298059f,0.0445343819446862f},
-                                          /* 8kS/s  */   /* LVPSA_SPEED_HIGH      */
-                                         {-0.7415186790749431f,0.2254409026354551f},
-                                         {-0.7381451204419136f,0.2279209652915597f},
-                                         {-0.7274807319045067f,0.2356666540727019f},
-                                         {-0.7415186790749431f,0.2254409026354551f},
-                                         {-0.7229706319049001f,0.2388987224549055f},
-                                         {-0.7274807319045067f,0.2356666540727019f},
-                                         {-0.7309581353329122f,0.2331568226218224f},
-                                         {-0.7229706319049001f,0.2388987224549055f},
-                                           /* 48kS/s */
-                                         {-0.7274807319045067f,0.2356666540727019f}
-                                        ,{-0.7229706319049001f,0.2388987224549055f}
-                                        ,{-0.7274807319045067f,0.2356666540727019f}
-                                        ,{-0.7229706319049001f,0.2388987224549055f}
-                                        ,{-0.7274807319045067f,0.2356666540727019f}
-                                        };
+        /* 8kS/s  */ /* LVPSA_SPEED_LOW   */
+        {-0.9936831989325583f, 0.0062135565094650f},
+        {-0.9935833332128823f, 0.0063115493394434f},
+        {-0.9932638457976282f, 0.0066249934025109f},
+        {-0.9936831989325583f, 0.0062135565094650f},
+        {-0.9931269618682563f, 0.0067592649720609f},
+        {-0.9932638457976282f, 0.0066249934025109f},
+        {-0.9933686633594334f, 0.0065221670083702f},
+        {-0.9931269618682563f, 0.0067592649720609f},
+        /* 48kS/s */
+        {-0.9932638457976282f, 0.0066249934025109f},
+        {-0.9931269618682563f, 0.0067592649720609f},
+        {-0.9932638457976282f, 0.0066249934025109f},
+        {-0.9931269618682563f, 0.0067592649720609f},
+        {-0.9932638457976282f, 0.0066249934025109f},
+        /* 8kS/s  */ /* LVPSA_SPEED_MEDIUM      */
+        {-0.9568079425953329f, 0.0418742666952312f},
+        {-0.9561413046903908f, 0.0425090822391212f},
+        {-0.9540119562298059f, 0.0445343819446862f},
+        {-0.9568079425953329f, 0.0418742666952312f},
+        {-0.9531011912040412f, 0.0453995238058269f},
+        {-0.9540119562298059f, 0.0445343819446862f},
+        {-0.9547099955379963f, 0.0438708555884659f},
+        //{0x8600C7B9,0x05CFA6CF},
+        {-0.9531011912040412f, 0.0453995238058269f},
+        /* 48kS/s */
+        {-0.9540119562298059f, 0.0445343819446862f},
+        {-0.9531011912040412f, 0.0453995238058269f},
+        {-0.9540119562298059f, 0.0445343819446862f},
+        {-0.9531011912040412f, 0.0453995238058269f},
+        {-0.9540119562298059f, 0.0445343819446862f},
+        /* 8kS/s  */ /* LVPSA_SPEED_HIGH      */
+        {-0.7415186790749431f, 0.2254409026354551f},
+        {-0.7381451204419136f, 0.2279209652915597f},
+        {-0.7274807319045067f, 0.2356666540727019f},
+        {-0.7415186790749431f, 0.2254409026354551f},
+        {-0.7229706319049001f, 0.2388987224549055f},
+        {-0.7274807319045067f, 0.2356666540727019f},
+        {-0.7309581353329122f, 0.2331568226218224f},
+        {-0.7229706319049001f, 0.2388987224549055f},
+        /* 48kS/s */
+        {-0.7274807319045067f, 0.2356666540727019f},
+        {-0.7229706319049001f, 0.2388987224549055f},
+        {-0.7274807319045067f, 0.2356666540727019f},
+        {-0.7229706319049001f, 0.2388987224549055f},
+        {-0.7274807319045067f, 0.2356666540727019f}};
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.h b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.h
index 65872fe..c771dad 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.h
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Tables.h
@@ -27,7 +27,7 @@
  * Sample rate table for converting between the enumerated type and the actual
  * frequency
  */
-extern const LVM_UINT32    LVPSA_SampleRateTab[];
+extern const LVM_UINT32 LVPSA_SampleRateTab[];
 
 /************************************************************************************/
 /*                                                                                  */
@@ -39,7 +39,7 @@
  * Sample rate table for converting between the enumerated type and the actual
  * frequency
  */
-extern const LVM_UINT32    LVPSA_SampleRateInvTab[];
+extern const LVM_UINT32 LVPSA_SampleRateInvTab[];
 
 /************************************************************************************/
 /*                                                                                  */
@@ -51,7 +51,7 @@
  * Table for converting between the enumerated type and the number of samples
  * during 20ms
  */
-extern const LVM_UINT16    LVPSA_nSamplesBufferUpdate[];
+extern const LVM_UINT16 LVPSA_nSamplesBufferUpdate[];
 
 /************************************************************************************/
 /*                                                                                  */
@@ -62,7 +62,7 @@
 /*
  * Table for converting between the enumerated type and the down sampling factor
  */
-extern const LVM_UINT16    LVPSA_DownSamplingFactor[];
+extern const LVM_UINT16 LVPSA_DownSamplingFactor[];
 
 /************************************************************************************/
 /*                                                                                  */
@@ -73,14 +73,14 @@
 /*
  * Table for 2 * Pi / Fs
  */
-extern const LVM_INT16     LVPSA_TwoPiOnFsTable[];
-extern const LVM_FLOAT     LVPSA_Float_TwoPiOnFsTable[];
+extern const LVM_INT16 LVPSA_TwoPiOnFsTable[];
+extern const LVM_FLOAT LVPSA_Float_TwoPiOnFsTable[];
 
 /*
  * Gain table
  */
-extern const LVM_INT16     LVPSA_GainTable[];
-extern const LVM_FLOAT     LVPSA_Float_GainTable[];
+extern const LVM_INT16 LVPSA_GainTable[];
+extern const LVM_FLOAT LVPSA_Float_GainTable[];
 
 /************************************************************************************/
 /*                                                                                  */
@@ -97,8 +97,8 @@
  * a range of 0 to Pi. The output is in the range 32767 to -32768 representing the range
  * +1.0 to -1.0
  */
-extern const LVM_INT16     LVPSA_CosCoef[];
-extern const LVM_FLOAT     LVPSA_Float_CosCoef[];
+extern const LVM_INT16 LVPSA_CosCoef[];
+extern const LVM_FLOAT LVPSA_Float_CosCoef[];
 
 /*
  * Coefficients for calculating the cosine error with the equation:
@@ -114,15 +114,15 @@
  *
  * Cos(x) = 1.0 - CosErr(x)
  */
-extern const LVM_INT16     LVPSA_DPCosCoef[];
-extern const LVM_FLOAT    LVPSA_Float_DPCosCoef[];
+extern const LVM_INT16 LVPSA_DPCosCoef[];
+extern const LVM_FLOAT LVPSA_Float_DPCosCoef[];
 
 /************************************************************************************/
 /*                                                                                  */
 /*  Quasi peak filter coefficients table                                            */
 /*                                                                                  */
 /************************************************************************************/
-extern const QPD_C32_Coefs     LVPSA_QPD_Coefs[];
-extern const QPD_FLOAT_Coefs     LVPSA_QPD_Float_Coefs[];
+extern const QPD_C32_Coefs LVPSA_QPD_Coefs[];
+extern const QPD_FLOAT_Coefs LVPSA_QPD_Float_Coefs[];
 
 #endif /* __LVPSA_TABLES_H__ */
diff --git a/media/libeffects/lvm/lib/StereoWidening/lib/LVCS.h b/media/libeffects/lvm/lib/StereoWidening/lib/LVCS.h
index 0adfd1b..ffe7902 100644
--- a/media/libeffects/lvm/lib/StereoWidening/lib/LVCS.h
+++ b/media/libeffects/lvm/lib/StereoWidening/lib/LVCS.h
@@ -71,21 +71,14 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory table */
-#define LVCS_MEMREGION_PERSISTENT_SLOW_DATA    0    /* Offset to the instance memory region */
-#define LVCS_MEMREGION_PERSISTENT_FAST_DATA    1    /* Offset to the persistent data memory region */
-#define LVCS_MEMREGION_PERSISTENT_FAST_COEF    2    /* Offset to the persistent coefficient memory region */
-#define LVCS_MEMREGION_TEMPORARY_FAST          3    /* Offset to temporary memory region */
-#define LVCS_NR_MEMORY_REGIONS                 4    /* Number of memory regions */
-
 /* Effect Level */
-#define LVCS_EFFECT_LOW                    16384    /* Effect scaling 50% */
-#define LVCS_EFFECT_MEDIUM                 24576    /* Effect scaling 75% */
-#define LVCS_EFFECT_HIGH                   32767    /* Effect Scaling 100% */
+#define LVCS_EFFECT_LOW 16384    /* Effect scaling 50% */
+#define LVCS_EFFECT_MEDIUM 24576 /* Effect scaling 75% */
+#define LVCS_EFFECT_HIGH 32767   /* Effect Scaling 100% */
 
 /* Callback events */
-#define LVCS_EVENT_NONE                   0x0000    /* Not a valid event */
-#define LVCS_EVENT_ALGOFF                 0x0001    /* CS has completed switch off */
+#define LVCS_EVENT_NONE 0x0000   /* Not a valid event */
+#define LVCS_EVENT_ALGOFF 0x0001 /* CS has completed switch off */
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -94,70 +87,49 @@
 /****************************************************************************************/
 
 /* Instance handle */
-typedef void *LVCS_Handle_t;
+typedef void* LVCS_Handle_t;
 
 /* Operating modes */
-typedef enum
-{
-    LVCS_OFF = 0,
-    LVCS_ON  = 15,
-    LVCS_MAX = LVM_MAXENUM
-} LVCS_Modes_en;
-
-/* Memory Types */
-typedef enum
-{
-    LVCS_SCRATCH        = 0,
-    LVCS_DATA           = 1,
-    LVCS_COEFFICIENT    = 2,
-    LVCS_PERSISTENT     = 3,
-    LVCS_MEMORYTYPE_MAX = LVM_MAXENUM
-} LVCS_MemoryTypes_en;
+typedef enum { LVCS_OFF = 0, LVCS_ON = 15, LVCS_MAX = LVM_MAXENUM } LVCS_Modes_en;
 
 /* Function return status */
-typedef enum
-{
-    LVCS_SUCCESS        = 0,                        /* Successful return from a routine */
-    LVCS_ALIGNMENTERROR = 1,                        /* Memory alignment error */
-    LVCS_NULLADDRESS    = 2,                        /* NULL allocation address */
-    LVCS_TOOMANYSAMPLES = 3,                        /* Maximum block size exceeded */
-    LVCS_INVALIDBUFFER  = 4,                        /* Invalid buffer processing request */
-    LVCS_STATUSMAX      = LVM_MAXENUM
+typedef enum {
+    LVCS_SUCCESS = 0,        /* Successful return from a routine */
+    LVCS_NULLADDRESS = 1,    /* NULL allocation address */
+    LVCS_TOOMANYSAMPLES = 2, /* Maximum block size exceeded */
+    LVCS_STATUSMAX = LVM_MAXENUM
 } LVCS_ReturnStatus_en;
 
 /*
  * Source data formats
  */
-typedef enum
-{
-    LVCS_STEREO       = 0,
+typedef enum {
+    LVCS_STEREO = 0,
     LVCS_MONOINSTEREO = 1,
-    LVCS_SOURCEMAX    = LVM_MAXENUM
+    LVCS_SOURCEMAX = LVM_MAXENUM
 } LVCS_SourceFormat_en;
 
 /*
  * Supported output devices
  */
-typedef enum
-{
-    LVCS_HEADPHONES             = 0,
-    LVCS_EX_HEADPHONES          = 1,
-    LVCS_SPEAKERTYPE_MAX        = LVM_MAXENUM
+typedef enum {
+    LVCS_HEADPHONES = 0,
+    LVCS_EX_HEADPHONES = 1,
+    LVCS_SPEAKERTYPE_MAX = LVM_MAXENUM
 } LVCS_SpeakerType_en;
 
 /*
  * Speaker Coefficients Table
  */
-typedef struct
-{
-    void    *pTable1;
-    void    *pTable2;
-    void    *pTable3;
-    void    *pTable4;
-    void    *pTable5;
-    void    *pTable6;
-    void    *pTable7;
-    void    *pTable8;
+typedef struct {
+    void* pTable1;
+    void* pTable2;
+    void* pTable3;
+    void* pTable4;
+    void* pTable5;
+    void* pTable6;
+    void* pTable7;
+    void* pTable8;
 } LVCS_CSMS_Coef_Tables_t;
 
 /****************************************************************************************/
@@ -166,44 +138,26 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/* Memory region definition */
-typedef struct
-{
-    LVM_UINT32              Size;                   /* Region size in bytes */
-    LVCS_MemoryTypes_en     Type;                   /* Region type */
-    void                    *pBaseAddress;          /* Pointer to the region base address */
-} LVCS_MemoryRegion_t;
-
-/* Memory table containing the region definitions */
-typedef struct
-{
-    LVCS_MemoryRegion_t Region[LVCS_NR_MEMORY_REGIONS]; /* One definition for each region */
-} LVCS_MemTab_t;
-
 /* Concert Sound parameter structure */
-typedef struct
-{
-    LVCS_Modes_en           OperatingMode;          /* Algorithm mode */
-    LVCS_SpeakerType_en     SpeakerType;            /* Output device type */
-    LVCS_SourceFormat_en    SourceFormat;           /* Source data format */
-    LVM_Mode_en             CompressorMode;         /* Non-Linear Compressor Mode */
-    LVM_Fs_en               SampleRate;             /* Sampling rate */
-    LVM_INT16               EffectLevel;            /* Effect level */
-    LVM_UINT16              ReverbLevel;            /* Reverb level in % */
-#ifdef SUPPORT_MC
-    LVM_INT32               NrChannels;
-#endif
+typedef struct {
+    LVCS_Modes_en OperatingMode;       /* Algorithm mode */
+    LVCS_SpeakerType_en SpeakerType;   /* Output device type */
+    LVCS_SourceFormat_en SourceFormat; /* Source data format */
+    LVM_Mode_en CompressorMode;        /* Non-Linear Compressor Mode */
+    LVM_Fs_en SampleRate;              /* Sampling rate */
+    LVM_INT16 EffectLevel;             /* Effect level */
+    LVM_UINT16 ReverbLevel;            /* Reverb level in % */
+    LVM_INT32 NrChannels;
 } LVCS_Params_t;
 
 /* Concert Sound Capability structure */
-typedef struct
-{
+typedef struct {
     /* General parameters */
-    LVM_UINT16              MaxBlockSize;           /* Maximum block size in sample pairs */
+    LVM_UINT16 MaxBlockSize; /* Maximum block size in sample pairs */
 
     /* Callback parameters */
-    LVM_Callback            CallBack;               /* Bundle callback */
-    void                    *pBundleInstance;       /* Bundle instance handle */
+    LVM_Callback CallBack; /* Bundle callback */
+    void* pBundleInstance; /* Bundle instance handle */
 
 } LVCS_Capabilities_t;
 
@@ -213,82 +167,44 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-/****************************************************************************************/
-/*                                                                                      */
-/* FUNCTION:                LVCS_Memory                                                 */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used for memory allocation and free. It can be called in           */
-/*  two ways:                                                                           */
-/*                                                                                      */
-/*      hInstance = NULL                Returns the memory requirements                 */
-/*      hInstance = Instance handle     Returns the memory requirements and             */
-/*                                      allocated base addresses for the instance       */
-/*                                                                                      */
-/*  When this function is called for memory allocation (hInstance=NULL) it is           */
-/*  passed the default capabilities, of these only the buffer processing setting is     */
-/*  used.                                                                               */
-/*                                                                                      */
-/*  When called for memory allocation the memory base address pointers are NULL on      */
-/*  return.                                                                             */
-/*                                                                                      */
-/*  When the function is called for free (hInstance = Instance Handle) the              */
-/*  capabilities are ignored and the memory table returns the allocated memory and      */
-/*  base addresses used during initialisation.                                          */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory definition table                 */
-/*  pCapabilities           Pointer to the default capabilites                          */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVCS_Success            Succeeded                                                   */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  This function may be interrupted by the LVCS_Process function                   */
-/*                                                                                      */
-/****************************************************************************************/
+/************************************************************************************/
+/*                                                                                  */
+/* FUNCTION:                LVCS_Init                                               */
+/*                                                                                  */
+/* DESCRIPTION:                                                                     */
+/*  Create and initialisation function for the Concert Sound module                 */
+/*                                                                                  */
+/* PARAMETERS:                                                                      */
+/*  phInstance              Pointer to instance handle                              */
+/*  pCapabilities           Pointer to the capabilities structure                   */
+/*  pScratch                Pointer to the scratch buffer                           */
+/*                                                                                  */
+/* RETURNS:                                                                         */
+/*  LVCS_Success            Initialisation succeeded                                */
+/*  LVDBE_NULLADDRESS       One or more memory has a NULL pointer                   */
+/*                                                                                  */
+/* NOTES:                                                                           */
+/*  1.  This function must not be interrupted by the LVCS_Process function          */
+/*                                                                                  */
+/************************************************************************************/
+LVCS_ReturnStatus_en LVCS_Init(LVCS_Handle_t* phInstance, LVCS_Capabilities_t* pCapabilities,
+                               void* pScratch);
 
-LVCS_ReturnStatus_en LVCS_Memory(LVCS_Handle_t          hInstance,
-                                 LVCS_MemTab_t          *pMemoryTable,
-                                 LVCS_Capabilities_t    *pCapabilities);
-
-/****************************************************************************************/
-/*                                                                                      */
-/* FUNCTION:                LVCS_Init                                                   */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  Create and initialisation function for the Concert Sound module                     */
-/*                                                                                      */
-/*  This function can be used to create an algorithm instance by calling with           */
-/*  hInstance set to NULL. In this case the algorithm returns the new instance          */
-/*  handle.                                                                             */
-/*                                                                                      */
-/*  This function can be used to force a full re-initialisation of the algorithm        */
-/*  by calling with hInstance = Instance Handle. In this case the memory table          */
-/*  should be correct for the instance, this can be ensured by calling the function     */
-/*  LVCS_Memory before calling this function.                                           */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance handle                                             */
-/*  pMemoryTable            Pointer to the memory definition table                      */
-/*  pCapabilities           Pointer to the initialisation capabilities                  */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVCS_Success            Initialisation succeeded                                    */
-/*  LVCS_AlignmentError     Instance or scratch memory on incorrect alignment           */
-/*  LVCS_NullAddress        Instance or scratch memory has a NULL pointer               */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  The instance handle is the pointer to the base address of the first memory      */
-/*      region.                                                                         */
-/*  2.  This function must not be interrupted by the LVCS_Process function              */
-/*                                                                                      */
-/****************************************************************************************/
-
-LVCS_ReturnStatus_en LVCS_Init(LVCS_Handle_t            *phInstance,
-                               LVCS_MemTab_t            *pMemoryTable,
-                               LVCS_Capabilities_t      *pCapabilities);
+/************************************************************************************/
+/*                                                                                  */
+/* FUNCTION:                LVCS_DeInit                                             */
+/*                                                                                  */
+/* DESCRIPTION:                                                                     */
+/*  Free memories created during the LVCS_Init call including instance handle       */
+/*                                                                                  */
+/* PARAMETERS:                                                                      */
+/*  phInstance              Pointer to instance handle                              */
+/*                                                                                  */
+/* NOTES:                                                                           */
+/*  1.  This function must not be interrupted by the LVCS_Process function          */
+/*                                                                                  */
+/************************************************************************************/
+void LVCS_DeInit(LVCS_Handle_t* phInstance);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -310,8 +226,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_GetParameters(LVCS_Handle_t   hInstance,
-                                        LVCS_Params_t   *pParams);
+LVCS_ReturnStatus_en LVCS_GetParameters(LVCS_Handle_t hInstance, LVCS_Params_t* pParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -332,8 +247,7 @@
 /*                                                                                      */
 /****************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_Control(LVCS_Handle_t     hInstance,
-                                  LVCS_Params_t     *pParams);
+LVCS_ReturnStatus_en LVCS_Control(LVCS_Handle_t hInstance, LVCS_Params_t* pParams);
 
 /****************************************************************************************/
 /*                                                                                      */
@@ -356,9 +270,7 @@
 /* NOTES:                                                                               */
 /*                                                                                      */
 /****************************************************************************************/
-LVCS_ReturnStatus_en LVCS_Process(LVCS_Handle_t             hInstance,
-                                  const LVM_FLOAT           *pInData,
-                                  LVM_FLOAT                 *pOutData,
-                                  LVM_UINT16                NumSamples);
+LVCS_ReturnStatus_en LVCS_Process(LVCS_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                  LVM_FLOAT* pOutData, LVM_UINT16 NumSamples);
 
-#endif  /* LVCS_H */
+#endif /* LVCS_H */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.cpp
index ba152c0..f805fca 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.cpp
@@ -32,9 +32,8 @@
 /*  Function Prototypes                                                                 */
 /*                                                                                      */
 /****************************************************************************************/
-LVM_INT32 LVCS_MixerCallback(   LVCS_Handle_t   hInstance,
-                                void            *pGeneralPurpose,
-                                LVM_INT16       CallbackParam);
+LVM_INT32 LVCS_MixerCallback(LVCS_Handle_t hInstance, void* pGeneralPurpose,
+                             LVM_INT16 CallbackParam);
 
 /************************************************************************************/
 /*                                                                                  */
@@ -65,29 +64,22 @@
 /*                                                                                  */
 /************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_BypassMixInit(LVCS_Handle_t       hInstance,
-                                        LVCS_Params_t       *pParams)
-{
-
-    LVM_UINT16          Offset;
-    LVM_FLOAT           Gain;
-    LVM_FLOAT           Current;
-    LVCS_Instance_t     *pInstance = (LVCS_Instance_t  *)hInstance;
-    LVCS_BypassMix_t    *pConfig   = (LVCS_BypassMix_t *)&pInstance->BypassMix;
-    const Gain_t        *pOutputGainTable;
+LVCS_ReturnStatus_en LVCS_BypassMixInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams) {
+    LVM_UINT16 Offset;
+    LVM_FLOAT Gain;
+    LVM_FLOAT Current;
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    LVCS_BypassMix_t* pConfig = (LVCS_BypassMix_t*)&pInstance->BypassMix;
+    const Gain_t* pOutputGainTable;
 
     /*
      * Set the transition gain
      */
-    if ((pParams->OperatingMode == LVCS_ON) &&
-        (pInstance->bTimerDone == LVM_TRUE)
-        && (pInstance->MSTarget1 != 0x7FFF) /* this indicates an off->on transtion */
-        )
-    {
+    if ((pParams->OperatingMode == LVCS_ON) && (pInstance->bTimerDone == LVM_TRUE) &&
+        (pInstance->MSTarget1 != 0x7FFF) /* this indicates an off->on transition */
+    ) {
         pInstance->TransitionGain = ((LVM_FLOAT)pParams->EffectLevel / 32767);
-    }
-    else
-    {
+    } else {
         /* Select no effect level */
         pInstance->TransitionGain = 0;
     }
@@ -95,18 +87,19 @@
     /*
      * Calculate the output gain table offset
      */
-    Offset = (LVM_UINT16)(pParams->SpeakerType + (pParams->SourceFormat*(1+LVCS_EX_HEADPHONES)));
+    Offset =
+            (LVM_UINT16)(pParams->SpeakerType + (pParams->SourceFormat * (1 + LVCS_EX_HEADPHONES)));
     pOutputGainTable = (Gain_t*)&LVCS_OutputGainTable[0];
 
     /*
      * Setup the mixer gain for the processed path
      */
-    Gain =  (LVM_FLOAT)(pOutputGainTable[Offset].Loss * pInstance->TransitionGain);
+    Gain = (LVM_FLOAT)(pOutputGainTable[Offset].Loss * pInstance->TransitionGain);
 
     pConfig->Mixer_Instance.MixerStream[0].CallbackParam = 0;
     pConfig->Mixer_Instance.MixerStream[0].pCallbackHandle = LVM_NULL;
     pConfig->Mixer_Instance.MixerStream[0].pCallBack = LVM_NULL;
-    pConfig->Mixer_Instance.MixerStream[0].CallbackSet=1;
+    pConfig->Mixer_Instance.MixerStream[0].CallbackSet = 1;
 
     Current = LVC_Mixer_GetCurrent(&pConfig->Mixer_Instance.MixerStream[0]);
     LVC_Mixer_Init(&pConfig->Mixer_Instance.MixerStream[0], (LVM_FLOAT)(Gain), Current);
@@ -116,8 +109,8 @@
     /*
      * Setup the mixer gain for the unprocessed path
      */
-    Gain = (LVM_FLOAT)(pOutputGainTable[Offset].Loss * (1.0 - \
-                                    (LVM_FLOAT)pInstance->TransitionGain));
+    Gain = (LVM_FLOAT)(pOutputGainTable[Offset].Loss *
+                       (1.0 - (LVM_FLOAT)pInstance->TransitionGain));
     Gain = (LVM_FLOAT)pOutputGainTable[Offset].UnprocLoss * Gain;
     Current = LVC_Mixer_GetCurrent(&pConfig->Mixer_Instance.MixerStream[1]);
     LVC_Mixer_Init(&pConfig->Mixer_Instance.MixerStream[1], (LVM_FLOAT)(Gain), Current);
@@ -125,7 +118,7 @@
                                        LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
     pConfig->Mixer_Instance.MixerStream[1].CallbackParam = 0;
     pConfig->Mixer_Instance.MixerStream[1].pCallbackHandle = hInstance;
-    pConfig->Mixer_Instance.MixerStream[1].CallbackSet=1;
+    pConfig->Mixer_Instance.MixerStream[1].CallbackSet = 1;
     pConfig->Mixer_Instance.MixerStream[1].pCallBack = LVCS_MixerCallback;
 
     /*
@@ -137,45 +130,42 @@
      * Correct gain for the effect level
      */
     {
-        LVM_FLOAT           GainCorrect;
-        LVM_FLOAT           Gain1;
-        LVM_FLOAT           Gain2;
+        LVM_FLOAT GainCorrect;
+        LVM_FLOAT Gain1;
+        LVM_FLOAT Gain2;
 
         Gain1 = LVC_Mixer_GetTarget(&pConfig->Mixer_Instance.MixerStream[0]);
         Gain2 = LVC_Mixer_GetTarget(&pConfig->Mixer_Instance.MixerStream[1]);
         /*
          * Calculate the gain correction
          */
-        if (pInstance->Params.CompressorMode == LVM_MODE_ON)
-        {
-        GainCorrect = (LVM_FLOAT)(  pInstance->VolCorrect.GainMin
-                                    - (((LVM_FLOAT)pInstance->VolCorrect.GainMin * \
-                                                         ((LVM_FLOAT)pInstance->TransitionGain)))
-                                    + (((LVM_FLOAT)pInstance->VolCorrect.GainFull * \
-                                                        ((LVM_FLOAT)pInstance->TransitionGain))));
+        if (pInstance->Params.CompressorMode == LVM_MODE_ON) {
+            GainCorrect = (LVM_FLOAT)(pInstance->VolCorrect.GainMin -
+                                      (((LVM_FLOAT)pInstance->VolCorrect.GainMin *
+                                        ((LVM_FLOAT)pInstance->TransitionGain))) +
+                                      (((LVM_FLOAT)pInstance->VolCorrect.GainFull *
+                                        ((LVM_FLOAT)pInstance->TransitionGain))));
 
-        /*
-         * Apply the gain correction
-         */
-        Gain1 = (Gain1 * GainCorrect);
-        Gain2 = (Gain2 * GainCorrect);
-
+            /*
+             * Apply the gain correction
+             */
+            Gain1 = (Gain1 * GainCorrect);
+            Gain2 = (Gain2 * GainCorrect);
         }
 
         /*
          * Set the gain values
          */
         pConfig->Output_Shift = pConfig->Output_Shift;
-        LVC_Mixer_SetTarget(&pConfig->Mixer_Instance.MixerStream[0],Gain1);
+        LVC_Mixer_SetTarget(&pConfig->Mixer_Instance.MixerStream[0], Gain1);
         LVC_Mixer_VarSlope_SetTimeConstant(&pConfig->Mixer_Instance.MixerStream[0],
                                            LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
-        LVC_Mixer_SetTarget(&pConfig->Mixer_Instance.MixerStream[1],Gain2);
+        LVC_Mixer_SetTarget(&pConfig->Mixer_Instance.MixerStream[1], Gain2);
         LVC_Mixer_VarSlope_SetTimeConstant(&pConfig->Mixer_Instance.MixerStream[1],
                                            LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
     }
 
-    return(LVCS_SUCCESS);
-
+    return (LVCS_SUCCESS);
 }
 
 /************************************************************************************/
@@ -205,39 +195,31 @@
 /*                                                                                  */
 /************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_BypassMixer(LVCS_Handle_t         hInstance,
-                                      const LVM_FLOAT       *pProcessed,
-                                      const LVM_FLOAT       *pUnprocessed,
-                                      LVM_FLOAT             *pOutData,
-                                      LVM_UINT16            NumSamples)
-{
-
-    LVCS_Instance_t     *pInstance      = (LVCS_Instance_t  *)hInstance;
-    LVCS_BypassMix_t    *pConfig        = (LVCS_BypassMix_t *)&pInstance->BypassMix;
+LVCS_ReturnStatus_en LVCS_BypassMixer(LVCS_Handle_t hInstance, const LVM_FLOAT* pProcessed,
+                                      const LVM_FLOAT* pUnprocessed, LVM_FLOAT* pOutData,
+                                      LVM_UINT16 NumSamples) {
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    LVCS_BypassMix_t* pConfig = (LVCS_BypassMix_t*)&pInstance->BypassMix;
+    LVM_UINT16 destNumSamples =
+            (pInstance->Params.NrChannels == FCC_1) ? NumSamples : FCC_2 * NumSamples;
 
     /*
      * Check if the bypass mixer is enabled
      */
-    if ((pInstance->Params.OperatingMode & LVCS_BYPASSMIXSWITCH) != 0)
-    {
+    if ((pInstance->Params.OperatingMode & LVCS_BYPASSMIXSWITCH) != 0) {
         /*
          * Apply the bypass mix
          */
-        LVC_MixSoft_2St_D16C31_SAT(&pConfig->Mixer_Instance,
-                                   pProcessed,
-                                   (LVM_FLOAT *) pUnprocessed,
-                                   pOutData,
-                                   (LVM_INT16)(2 * NumSamples));
+        LVC_MixSoft_2St_D16C31_SAT(&pConfig->Mixer_Instance, pProcessed, (LVM_FLOAT*)pUnprocessed,
+                                   pOutData, (LVM_INT16)destNumSamples);
         /*
          * Apply output gain correction shift
          */
-        Shift_Sat_Float((LVM_INT16)pConfig->Output_Shift,
-                        (LVM_FLOAT*)pOutData,
-                        (LVM_FLOAT*)pOutData,
-                        (LVM_INT16)(2 * NumSamples));          /* Left and right*/
+        Shift_Sat_Float((LVM_INT16)pConfig->Output_Shift, (LVM_FLOAT*)pOutData,
+                        (LVM_FLOAT*)pOutData, (LVM_INT16)destNumSamples);
     }
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
 
 /************************************************************************************/
@@ -245,22 +227,18 @@
 /* FUNCTION:                LVCS_MixerCallback                                      */
 /*                                                                                  */
 /************************************************************************************/
-LVM_INT32 LVCS_MixerCallback(LVCS_Handle_t      hInstance,
-                            void                *pGeneralPurpose,
-                            LVM_INT16           CallbackParam)
-{
-    LVCS_Instance_t     *pInstance = (LVCS_Instance_t  *)hInstance;
+LVM_INT32 LVCS_MixerCallback(LVCS_Handle_t hInstance, void* pGeneralPurpose,
+                             LVM_INT16 CallbackParam) {
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
 
-   (void)pGeneralPurpose;
+    (void)pGeneralPurpose;
 
     /*
      * Off transition has completed in Headphone mode
      */
-    if ((pInstance->OutputDevice == LVCS_HEADPHONE) &&
-        (pInstance->bInOperatingModeTransition)     &&
-        (pInstance->MSTarget0 == 0x0000)&&  /* this indicates an on->off transition */
-        (CallbackParam == 0))
-    {
+    if ((pInstance->OutputDevice == LVCS_HEADPHONE) && (pInstance->bInOperatingModeTransition) &&
+        (pInstance->MSTarget0 == 0x0000) && /* this indicates an on->off transition */
+        (CallbackParam == 0)) {
         /* Set operating mode to OFF */
         pInstance->Params.OperatingMode = LVCS_OFF;
 
@@ -268,21 +246,17 @@
         pInstance->bInOperatingModeTransition = LVM_FALSE;
 
         /* Signal to the bundle */
-        if((*pInstance->Capabilities.CallBack) != LVM_NULL){
-            (*pInstance->Capabilities.CallBack)(pInstance->Capabilities.pBundleInstance,
-                                                LVM_NULL,
+        if ((*pInstance->Capabilities.CallBack) != LVM_NULL) {
+            (*pInstance->Capabilities.CallBack)(pInstance->Capabilities.pBundleInstance, LVM_NULL,
                                                 (ALGORITHM_CS_ID | LVCS_EVENT_ALGOFF));
         }
     }
 
-    if ((pInstance->OutputDevice == LVCS_HEADPHONE)  &&
-        (pInstance->MSTarget0 == 1) &&
-        (pInstance->bTimerDone == LVM_TRUE)){
-
+    if ((pInstance->OutputDevice == LVCS_HEADPHONE) && (pInstance->MSTarget0 == 1) &&
+        (pInstance->bTimerDone == LVM_TRUE)) {
         /* Exit transition state */
         pInstance->bInOperatingModeTransition = LVM_FALSE;
     }
 
     return 1;
 }
-
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.h
index fcd8ee3..69afcbb 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_BypassMix.h
@@ -33,20 +33,18 @@
 /************************************************************************************/
 
 /* Bypass mixer structure */
-typedef struct
-{
+typedef struct {
     /* Mixer settings */
-    LVMixer3_2St_FLOAT_st   Mixer_Instance;             /* Mixer instance */
-    LVM_UINT16              Output_Shift;               /* Correcting gain output shift */
+    LVMixer3_2St_FLOAT_st Mixer_Instance; /* Mixer instance */
+    LVM_UINT16 Output_Shift;              /* Correcting gain output shift */
 
 } LVCS_BypassMix_t;
 
-typedef struct
-{
+typedef struct {
     /* Output gain settings, Gain = (Loss/32768) * 2^Shift */
-    LVM_UINT16             Shift;                      /* Left shifts required */
-    LVM_FLOAT              Loss;                       /* Loss required */
-    LVM_FLOAT              UnprocLoss;                 /* Unprocessed path loss */
+    LVM_UINT16 Shift;     /* Left shifts required */
+    LVM_FLOAT Loss;       /* Loss required */
+    LVM_FLOAT UnprocLoss; /* Unprocessed path loss */
 } Gain_t;
 /************************************************************************************/
 /*                                                                                    */
@@ -54,13 +52,10 @@
 /*                                                                                    */
 /************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_BypassMixInit(LVCS_Handle_t       hInstance,
-                                           LVCS_Params_t    *pParams);
+LVCS_ReturnStatus_en LVCS_BypassMixInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams);
 
-LVCS_ReturnStatus_en LVCS_BypassMixer(LVCS_Handle_t         hInstance,
-                                      const LVM_FLOAT       *pProcessed,
-                                      const LVM_FLOAT       *unProcessed,
-                                      LVM_FLOAT       *pOutData,
-                                      LVM_UINT16      NumSamples);
+LVCS_ReturnStatus_en LVCS_BypassMixer(LVCS_Handle_t hInstance, const LVM_FLOAT* pProcessed,
+                                      const LVM_FLOAT* unProcessed, LVM_FLOAT* pOutData,
+                                      LVM_UINT16 NumSamples);
 
-#endif  /* BYPASSMIX_H */
+#endif /* BYPASSMIX_H */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Control.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Control.cpp
index 50db03d..89f2f3b 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Control.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Control.cpp
@@ -45,15 +45,12 @@
 /*                                                                                  */
 /************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_GetParameters(LVCS_Handle_t   hInstance,
-                                        LVCS_Params_t   *pParams)
-{
-
-    LVCS_Instance_t     *pInstance =(LVCS_Instance_t  *)hInstance;
+LVCS_ReturnStatus_en LVCS_GetParameters(LVCS_Handle_t hInstance, LVCS_Params_t* pParams) {
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
 
     *pParams = pInstance->Params;
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
 
 /************************************************************************************/
@@ -75,34 +72,29 @@
 /*                                                                                  */
 /************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_Control(LVCS_Handle_t      hInstance,
-                                  LVCS_Params_t      *pParams)
-{
-    LVM_INT16                   Offset;
-    LVCS_Instance_t             *pInstance =(LVCS_Instance_t  *)hInstance;
-    LVCS_ReturnStatus_en        err;
-    LVCS_Modes_en               OperatingModeSave = pInstance->Params.OperatingMode;
+LVCS_ReturnStatus_en LVCS_Control(LVCS_Handle_t hInstance, LVCS_Params_t* pParams) {
+    LVM_INT16 Offset;
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    LVCS_ReturnStatus_en err;
+    LVCS_Modes_en OperatingModeSave = pInstance->Params.OperatingMode;
 
-    if (pParams->SampleRate != pInstance->Params.SampleRate)
-    {
+    if (pParams->SampleRate != pInstance->Params.SampleRate) {
         pInstance->TimerParams.SamplingRate = LVCS_SampleRateTable[pParams->SampleRate];
     }
 
     /*
      * If the reverb level has changed
      */
-    if(pInstance->Params.ReverbLevel != pParams->ReverbLevel)
-    {
-        err=LVCS_ReverbGeneratorInit(hInstance,pParams);
+    if (pInstance->Params.ReverbLevel != pParams->ReverbLevel) {
+        err = LVCS_ReverbGeneratorInit(hInstance, pParams);
     }
 
     /*
      * If the sample rate or speaker has changed then perform a full re-initialisation
      */
     if ((pInstance->Params.SampleRate != pParams->SampleRate) ||
-       (pInstance->Params.SpeakerType != pParams->SpeakerType))
-    {
-        const LVCS_VolCorrect_t *pLVCS_VolCorrectTable;
+        (pInstance->Params.SpeakerType != pParams->SpeakerType)) {
+        const LVCS_VolCorrect_t* pLVCS_VolCorrectTable;
 
         /*
          * Output device
@@ -114,15 +106,16 @@
          */
         /* Use internal coefficient table */
         pLVCS_VolCorrectTable = (LVCS_VolCorrect_t*)&LVCS_VolCorrectTable[0];
-        Offset = (LVM_INT16)(pParams->SpeakerType + pParams->SourceFormat*(1+LVCS_EX_HEADPHONES));
+        Offset = (LVM_INT16)(pParams->SpeakerType +
+                             pParams->SourceFormat * (1 + LVCS_EX_HEADPHONES));
 
         pInstance->VolCorrect = pLVCS_VolCorrectTable[Offset];
 
         pInstance->CompressGain = pInstance->VolCorrect.CompMin;
         LVC_Mixer_Init(&pInstance->BypassMix.Mixer_Instance.MixerStream[0], 0, 0);
         {
-            LVM_FLOAT          Gain;
-            const Gain_t        *pOutputGainTable = (Gain_t*)&LVCS_OutputGainTable[0];
+            LVM_FLOAT Gain;
+            const Gain_t* pOutputGainTable = (Gain_t*)&LVCS_OutputGainTable[0];
             Gain = (LVM_FLOAT)(pOutputGainTable[Offset].Loss);
             Gain = (LVM_FLOAT)pOutputGainTable[Offset].UnprocLoss * (Gain);
 
@@ -133,22 +126,18 @@
 
             LVC_Mixer_Init(&pInstance->BypassMix.Mixer_Instance.MixerStream[1], 0, Gain);
             LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMix.Mixer_Instance.MixerStream[0],
-                    LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
+                                               LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
             LVC_Mixer_VarSlope_SetTimeConstant(&pInstance->BypassMix.Mixer_Instance.MixerStream[1],
-                    LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
+                                               LVCS_BYPASS_MIXER_TC, pParams->SampleRate, 2);
         }
 
-        err=LVCS_SEnhancerInit(hInstance,
-                           pParams);
+        err = LVCS_SEnhancerInit(hInstance, pParams);
 
-        err=LVCS_ReverbGeneratorInit(hInstance,
-                                 pParams);
+        err = LVCS_ReverbGeneratorInit(hInstance, pParams);
 
-        err=LVCS_EqualiserInit(hInstance,
-                           pParams);
+        err = LVCS_EqualiserInit(hInstance, pParams);
 
-        err=LVCS_BypassMixInit(hInstance,
-                           pParams);
+        err = LVCS_BypassMixInit(hInstance, pParams);
 
     }
 
@@ -156,30 +145,26 @@
      * Check if the effect level or source format has changed
      */
     else if ((pInstance->Params.EffectLevel != pParams->EffectLevel) ||
-            (pInstance->Params.SourceFormat != pParams->SourceFormat))
-    {
-        const LVCS_VolCorrect_t *pLVCS_VolCorrectTable;
+             (pInstance->Params.SourceFormat != pParams->SourceFormat)) {
+        const LVCS_VolCorrect_t* pLVCS_VolCorrectTable;
 
         /*
          * Get the volume correction parameters
          */
         /* Use internal coefficient table */
         pLVCS_VolCorrectTable = (LVCS_VolCorrect_t*)&LVCS_VolCorrectTable[0];
-        Offset = (LVM_INT16)(pParams->SpeakerType + pParams->SourceFormat*(1+LVCS_EX_HEADPHONES));
+        Offset = (LVM_INT16)(pParams->SpeakerType +
+                             pParams->SourceFormat * (1 + LVCS_EX_HEADPHONES));
 
         pInstance->VolCorrect = pLVCS_VolCorrectTable[Offset];
 
         /* Update the effect level and alpha-mixer gains */
-        err=LVCS_BypassMixInit(hInstance,
-                           pParams);
+        err = LVCS_BypassMixInit(hInstance, pParams);
 
-        if(err != LVCS_SUCCESS)
-        {
+        if (err != LVCS_SUCCESS) {
             return err;
         }
-    }
-    else
-    {
+    } else {
         pInstance->Params = *pParams;
     }
 
@@ -189,40 +174,38 @@
     pInstance->Params = *pParams;
 
     /* Stay on the current operating mode until the transition is done */
-    if((pParams->OperatingMode != OperatingModeSave) ||
-       (pInstance->bInOperatingModeTransition == LVM_TRUE)){
-
+    if ((pParams->OperatingMode != OperatingModeSave) ||
+        (pInstance->bInOperatingModeTransition == LVM_TRUE)) {
         /* Set the reverb delay timeout */
-        if(pInstance->bInOperatingModeTransition != LVM_TRUE){
+        if (pInstance->bInOperatingModeTransition != LVM_TRUE) {
             pInstance->bTimerDone = LVM_FALSE;
             pInstance->TimerParams.TimeInMs =
-            (LVM_INT16)(((pInstance->Reverberation.DelaySize << 2)
-            /pInstance->TimerParams.SamplingRate) + 1);
-            LVM_Timer_Init ( &pInstance->TimerInstance,
-                             &pInstance->TimerParams);
+                    (LVM_INT16)(((pInstance->Params.NrChannels == FCC_1
+                                          ? pInstance->Reverberation.DelaySize << 3
+                                          : pInstance->Reverberation.DelaySize << 2) /
+                                 pInstance->TimerParams.SamplingRate) +
+                                1);
+            LVM_Timer_Init(&pInstance->TimerInstance, &pInstance->TimerParams);
         }
 
         /* Update the effect level and alpha-mixer gains */
-        err=LVCS_BypassMixInit(hInstance,
-                           pParams);
+        err = LVCS_BypassMixInit(hInstance, pParams);
 
         /* Change transition bypass mixer settings if needed depending on transition type */
-        if(pParams->OperatingMode != LVCS_OFF){
-            pInstance->MSTarget0=LVM_MAXINT_16;
-            pInstance->MSTarget1=0;
-        }
-        else
-        {
+        if (pParams->OperatingMode != LVCS_OFF) {
+            pInstance->MSTarget0 = LVM_MAXINT_16;
+            pInstance->MSTarget1 = 0;
+        } else {
             pInstance->Params.OperatingMode = OperatingModeSave;
-            pInstance->MSTarget1=LVM_MAXINT_16;
-            pInstance->MSTarget0=0;
+            pInstance->MSTarget1 = LVM_MAXINT_16;
+            pInstance->MSTarget0 = 0;
         }
 
         /* Set transition flag */
         pInstance->bInOperatingModeTransition = LVM_TRUE;
     }
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
 
 /****************************************************************************************/
@@ -233,12 +216,11 @@
 /*  CallBack function of the Timer.                                                     */
 /*                                                                                      */
 /****************************************************************************************/
-void LVCS_TimerCallBack (void* hInstance, void* pCallBackParams, LVM_INT32 CallbackParam)
-{
-    LVCS_Instance_t     *pInstance  = (LVCS_Instance_t  *)hInstance;
+void LVCS_TimerCallBack(void* hInstance, void* pCallBackParams, LVM_INT32 CallbackParam) {
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
 
     /* Avoid warnings because pCallBackParams and CallbackParam are not used*/
-    if((pCallBackParams != LVM_NULL) || (CallbackParam != 0)){
+    if ((pCallBackParams != LVM_NULL) || (CallbackParam != 0)) {
         pCallBackParams = hInstance;
         CallbackParam = 0;
         return;
@@ -248,4 +230,3 @@
 
     return;
 }
-
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp
index 431b7e3..2b628f1 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.cpp
@@ -21,6 +21,7 @@
 /*                                                                                  */
 /************************************************************************************/
 
+#include <system/audio.h>
 #include "LVCS.h"
 #include "LVCS_Private.h"
 #include "LVCS_Equaliser.h"
@@ -53,68 +54,31 @@
 /* NOTES:                                                                           */
 /*                                                                                  */
 /************************************************************************************/
-LVCS_ReturnStatus_en LVCS_EqualiserInit(LVCS_Handle_t       hInstance,
-                                        LVCS_Params_t       *pParams)
-{
+LVCS_ReturnStatus_en LVCS_EqualiserInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams) {
+    LVM_UINT16 Offset;
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    const BiquadA012B12CoefsSP_t* pEqualiserCoefTable;
 
-    LVM_UINT16          Offset;
-    LVCS_Instance_t     *pInstance = (LVCS_Instance_t  *)hInstance;
-    LVCS_Equaliser_t    *pConfig   = (LVCS_Equaliser_t *)&pInstance->Equaliser;
-    LVCS_Data_t         *pData;
-    LVCS_Coefficient_t  *pCoefficients;
-    BQ_FLOAT_Coefs_t      Coeffs;
-    const BiquadA012B12CoefsSP_t *pEqualiserCoefTable;
-
-    pData = (LVCS_Data_t *) \
-                pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].pBaseAddress;
-
-    pCoefficients = (LVCS_Coefficient_t *) \
-                pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress;
     /*
      * If the sample rate changes re-initialise the filters
      */
     if ((pInstance->Params.SampleRate != pParams->SampleRate) ||
-        (pInstance->Params.SpeakerType != pParams->SpeakerType))
-    {
+        (pInstance->Params.SpeakerType != pParams->SpeakerType)) {
         /*
          * Setup the filter coefficients and clear the history
          */
         Offset = (LVM_UINT16)(pParams->SampleRate + (pParams->SpeakerType * (1 + LVM_FS_48000)));
         pEqualiserCoefTable = (BiquadA012B12CoefsSP_t*)&LVCS_EqualiserCoefTable[0];
 
-        /* Left and right filters */
-        /* Convert incoming coefficients to the required format/ordering */
-        Coeffs.A0 = (LVM_FLOAT) pEqualiserCoefTable[Offset].A0;
-        Coeffs.A1 = (LVM_FLOAT) pEqualiserCoefTable[Offset].A1;
-        Coeffs.A2 = (LVM_FLOAT) pEqualiserCoefTable[Offset].A2;
-        Coeffs.B1 = (LVM_FLOAT)-pEqualiserCoefTable[Offset].B1;
-        Coeffs.B2 = (LVM_FLOAT)-pEqualiserCoefTable[Offset].B2;
-
-        LoadConst_Float((LVM_INT16)0,                                         /* Value */
-                        (LVM_FLOAT *)&pData->EqualiserBiquadTaps, /* Destination */
-                        /* Number of words */
-                        (LVM_UINT16)(sizeof(pData->EqualiserBiquadTaps) / sizeof(LVM_FLOAT)));
-
-        BQ_2I_D16F32Css_TRC_WRA_01_Init(&pCoefficients->EqualiserBiquadInstance,
-                                        &pData->EqualiserBiquadTaps,
-                                        &Coeffs);
-
-        /* Callbacks */
-        switch(pEqualiserCoefTable[Offset].Scale)
-        {
-            case 13:
-                pConfig->pBiquadCallBack  = BQ_2I_D16F32C13_TRC_WRA_01;
-                break;
-            case 14:
-                pConfig->pBiquadCallBack  = BQ_2I_D16F32C14_TRC_WRA_01;
-                break;
-            case 15:
-                pConfig->pBiquadCallBack  = BQ_2I_D16F32C15_TRC_WRA_01;
-                break;
-        }
+        std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs> coefs = {
+                pEqualiserCoefTable[Offset].A0, pEqualiserCoefTable[Offset].A1,
+                pEqualiserCoefTable[Offset].A2, pEqualiserCoefTable[Offset].B1,
+                pEqualiserCoefTable[Offset].B2};
+        pInstance->pEqBiquad.reset(new android::audio_utils::BiquadFilter<LVM_FLOAT>(
+                (pParams->NrChannels == FCC_1) ? FCC_1 : FCC_2, coefs));
     }
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
 /************************************************************************************/
 /*                                                                                  */
@@ -135,30 +99,17 @@
 /*  1.  Always processes in place.                                                  */
 /*                                                                                  */
 /************************************************************************************/
-LVCS_ReturnStatus_en LVCS_Equaliser(LVCS_Handle_t       hInstance,
-                                    LVM_FLOAT           *pInputOutput,
-                                    LVM_UINT16          NumSamples)
-{
-
-    LVCS_Instance_t     *pInstance = (LVCS_Instance_t  *)hInstance;
-    LVCS_Equaliser_t    *pConfig   = (LVCS_Equaliser_t  *)&pInstance->Equaliser;
-    LVCS_Coefficient_t  *pCoefficients;
-
-    pCoefficients = (LVCS_Coefficient_t *) \
-                  pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress;
+LVCS_ReturnStatus_en LVCS_Equaliser(LVCS_Handle_t hInstance, LVM_FLOAT* pInputOutput,
+                                    LVM_UINT16 NumSamples) {
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
 
     /*
      * Check if the equaliser is required
      */
-    if ((pInstance->Params.OperatingMode & LVCS_EQUALISERSWITCH) != 0)
-    {
+    if ((pInstance->Params.OperatingMode & LVCS_EQUALISERSWITCH) != 0) {
         /* Apply filter to the left and right channels */
-        (pConfig->pBiquadCallBack)((Biquad_FLOAT_Instance_t*) \
-                                        &pCoefficients->EqualiserBiquadInstance,
-                                        (LVM_FLOAT *)pInputOutput,
-                                        (LVM_FLOAT *)pInputOutput,
-                                        (LVM_INT16)NumSamples);
+        pInstance->pEqBiquad->process(pInputOutput, pInputOutput, NumSamples);
     }
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.h
index 918d931..5237344 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Equaliser.h
@@ -23,23 +23,14 @@
 /*    Structures                                                                    */
 /*                                                                                  */
 /************************************************************************************/
-
-/* Equaliser structure */
-typedef struct
-{
-    void (*pBiquadCallBack) (Biquad_FLOAT_Instance_t*, LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
-} LVCS_Equaliser_t;
-
 /************************************************************************************/
 /*                                                                                  */
 /*    Function prototypes                                                           */
 /*                                                                                  */
 /************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_EqualiserInit(LVCS_Handle_t       hInstance,
-                                        LVCS_Params_t       *pParams);
-LVCS_ReturnStatus_en LVCS_Equaliser(LVCS_Handle_t            hInstance,
-                                    LVM_FLOAT                *pInputOutput,
-                                    LVM_UINT16                NumSamples);
+LVCS_ReturnStatus_en LVCS_EqualiserInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams);
+LVCS_ReturnStatus_en LVCS_Equaliser(LVCS_Handle_t hInstance, LVM_FLOAT* pInputOutput,
+                                    LVM_UINT16 NumSamples);
 
-#endif  /* EQUALISER_H */
+#endif /* EQUALISER_H */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
index c7ee232..69c46c6 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Headphone_Coeffs.h
@@ -24,201 +24,201 @@
 /*                                                                                  */
 /************************************************************************************/
 /* Stereo Enhancer coefficients for 8000 Hz sample rate, scaled with 0.161258 */
-#define CS_MIDDLE_8000_A0                           0.227720
-#define CS_MIDDLE_8000_A1                          (-0.215125)
-#define CS_MIDDLE_8000_A2                           0.000000
-#define CS_MIDDLE_8000_B1                          (-0.921899)
-#define CS_MIDDLE_8000_B2                           0.000000
-#define CS_MIDDLE_8000_SCALE                        15
-#define CS_SIDE_8000_A0                             0.611441
-#define CS_SIDE_8000_A1                            (-0.380344)
-#define CS_SIDE_8000_A2                            (-0.231097)
-#define CS_SIDE_8000_B1                            (-0.622470)
-#define CS_SIDE_8000_B2                            (-0.130759)
-#define CS_SIDE_8000_SCALE                         15
+#define CS_MIDDLE_8000_A0 0.227720
+#define CS_MIDDLE_8000_A1 (-0.215125)
+#define CS_MIDDLE_8000_A2 0.000000
+#define CS_MIDDLE_8000_B1 (-0.921899)
+#define CS_MIDDLE_8000_B2 0.000000
+#define CS_MIDDLE_8000_SCALE 15
+#define CS_SIDE_8000_A0 0.611441
+#define CS_SIDE_8000_A1 (-0.380344)
+#define CS_SIDE_8000_A2 (-0.231097)
+#define CS_SIDE_8000_B1 (-0.622470)
+#define CS_SIDE_8000_B2 (-0.130759)
+#define CS_SIDE_8000_SCALE 15
 
 /* Stereo Enhancer coefficients for 11025Hz sample rate, scaled with 0.162943 */
-#define CS_MIDDLE_11025_A0                       0.230838
-#define CS_MIDDLE_11025_A1                      (-0.221559)
-#define CS_MIDDLE_11025_A2                       0.000000
-#define CS_MIDDLE_11025_B1                      (-0.943056)
-#define CS_MIDDLE_11025_B2                       0.000000
-#define CS_MIDDLE_11025_SCALE                    15
-#define CS_SIDE_11025_A0                         0.557372
-#define CS_SIDE_11025_A1                        (-0.391490)
-#define CS_SIDE_11025_A2                        (-0.165881)
-#define CS_SIDE_11025_B1                        (-0.880608)
-#define CS_SIDE_11025_B2                         0.032397
-#define CS_SIDE_11025_SCALE                      15
+#define CS_MIDDLE_11025_A0 0.230838
+#define CS_MIDDLE_11025_A1 (-0.221559)
+#define CS_MIDDLE_11025_A2 0.000000
+#define CS_MIDDLE_11025_B1 (-0.943056)
+#define CS_MIDDLE_11025_B2 0.000000
+#define CS_MIDDLE_11025_SCALE 15
+#define CS_SIDE_11025_A0 0.557372
+#define CS_SIDE_11025_A1 (-0.391490)
+#define CS_SIDE_11025_A2 (-0.165881)
+#define CS_SIDE_11025_B1 (-0.880608)
+#define CS_SIDE_11025_B2 0.032397
+#define CS_SIDE_11025_SCALE 15
 
 /* Stereo Enhancer coefficients for 12000Hz sample rate, scaled with 0.162191 */
-#define CS_MIDDLE_12000_A0                        0.229932
-#define CS_MIDDLE_12000_A1                       (-0.221436)
-#define CS_MIDDLE_12000_A2                        0.000000
-#define CS_MIDDLE_12000_B1                       (-0.947616)
-#define CS_MIDDLE_12000_B2                        0.000000
-#define CS_MIDDLE_12000_SCALE                        15
-#define CS_SIDE_12000_A0                         0.558398
-#define CS_SIDE_12000_A1                        (-0.392211)
-#define CS_SIDE_12000_A2                        (-0.166187)
-#define CS_SIDE_12000_B1                        (-0.892550)
-#define CS_SIDE_12000_B2                         0.032856
-#define CS_SIDE_12000_SCALE                          15
+#define CS_MIDDLE_12000_A0 0.229932
+#define CS_MIDDLE_12000_A1 (-0.221436)
+#define CS_MIDDLE_12000_A2 0.000000
+#define CS_MIDDLE_12000_B1 (-0.947616)
+#define CS_MIDDLE_12000_B2 0.000000
+#define CS_MIDDLE_12000_SCALE 15
+#define CS_SIDE_12000_A0 0.558398
+#define CS_SIDE_12000_A1 (-0.392211)
+#define CS_SIDE_12000_A2 (-0.166187)
+#define CS_SIDE_12000_B1 (-0.892550)
+#define CS_SIDE_12000_B2 0.032856
+#define CS_SIDE_12000_SCALE 15
 
 /* Stereo Enhancer coefficients for 16000Hz sample rate, scaled with 0.162371 */
-#define CS_MIDDLE_16000_A0                       0.230638
-#define CS_MIDDLE_16000_A1                      (-0.224232)
-#define CS_MIDDLE_16000_A2                       0.000000
-#define CS_MIDDLE_16000_B1                      (-0.960550)
-#define CS_MIDDLE_16000_B2                       0.000000
-#define CS_MIDDLE_16000_SCALE                        15
-#define CS_SIDE_16000_A0                         0.499695
-#define CS_SIDE_16000_A1                        (-0.355543)
-#define CS_SIDE_16000_A2                        (-0.144152)
-#define CS_SIDE_16000_B1                        (-1.050788)
-#define CS_SIDE_16000_B2                         0.144104
-#define CS_SIDE_16000_SCALE                          14
+#define CS_MIDDLE_16000_A0 0.230638
+#define CS_MIDDLE_16000_A1 (-0.224232)
+#define CS_MIDDLE_16000_A2 0.000000
+#define CS_MIDDLE_16000_B1 (-0.960550)
+#define CS_MIDDLE_16000_B2 0.000000
+#define CS_MIDDLE_16000_SCALE 15
+#define CS_SIDE_16000_A0 0.499695
+#define CS_SIDE_16000_A1 (-0.355543)
+#define CS_SIDE_16000_A2 (-0.144152)
+#define CS_SIDE_16000_B1 (-1.050788)
+#define CS_SIDE_16000_B2 0.144104
+#define CS_SIDE_16000_SCALE 14
 
 /* Stereo Enhancer coefficients for 22050Hz sample rate, scaled with 0.160781 */
-#define CS_MIDDLE_22050_A0                       0.228749
-#define CS_MIDDLE_22050_A1                      (-0.224128)
-#define CS_MIDDLE_22050_A2                       0.000000
-#define CS_MIDDLE_22050_B1                      (-0.971262)
-#define CS_MIDDLE_22050_B2                       0.000000
-#define CS_MIDDLE_22050_SCALE                        15
-#define CS_SIDE_22050_A0                          0.440112
-#define CS_SIDE_22050_A1                         (-0.261096)
-#define CS_SIDE_22050_A2                         (-0.179016)
-#define CS_SIDE_22050_B1                         (-1.116786)
-#define CS_SIDE_22050_B2                          0.182507
-#define CS_SIDE_22050_SCALE                          14
+#define CS_MIDDLE_22050_A0 0.228749
+#define CS_MIDDLE_22050_A1 (-0.224128)
+#define CS_MIDDLE_22050_A2 0.000000
+#define CS_MIDDLE_22050_B1 (-0.971262)
+#define CS_MIDDLE_22050_B2 0.000000
+#define CS_MIDDLE_22050_SCALE 15
+#define CS_SIDE_22050_A0 0.440112
+#define CS_SIDE_22050_A1 (-0.261096)
+#define CS_SIDE_22050_A2 (-0.179016)
+#define CS_SIDE_22050_B1 (-1.116786)
+#define CS_SIDE_22050_B2 0.182507
+#define CS_SIDE_22050_SCALE 14
 
 /* Stereo Enhancer coefficients for 24000Hz sample rate, scaled with 0.161882 */
-#define CS_MIDDLE_24000_A0                         0.230395
-#define CS_MIDDLE_24000_A1                        (-0.226117)
-#define CS_MIDDLE_24000_A2                         0.000000
-#define CS_MIDDLE_24000_B1                        (-0.973573)
-#define CS_MIDDLE_24000_B2                         0.000000
-#define CS_MIDDLE_24000_SCALE                        15
-#define CS_SIDE_24000_A0                           0.414770
-#define CS_SIDE_24000_A1                          (-0.287182)
-#define CS_SIDE_24000_A2                          (-0.127588)
-#define CS_SIDE_24000_B1                          (-1.229648)
-#define CS_SIDE_24000_B2                           0.282177
-#define CS_SIDE_24000_SCALE                          14
+#define CS_MIDDLE_24000_A0 0.230395
+#define CS_MIDDLE_24000_A1 (-0.226117)
+#define CS_MIDDLE_24000_A2 0.000000
+#define CS_MIDDLE_24000_B1 (-0.973573)
+#define CS_MIDDLE_24000_B2 0.000000
+#define CS_MIDDLE_24000_SCALE 15
+#define CS_SIDE_24000_A0 0.414770
+#define CS_SIDE_24000_A1 (-0.287182)
+#define CS_SIDE_24000_A2 (-0.127588)
+#define CS_SIDE_24000_B1 (-1.229648)
+#define CS_SIDE_24000_B2 0.282177
+#define CS_SIDE_24000_SCALE 14
 
 /* Stereo Enhancer coefficients for 32000Hz sample rate, scaled with 0.160322 */
-#define CS_MIDDLE_32000_A0                          0.228400
-#define CS_MIDDLE_32000_A1                         (-0.225214)
-#define CS_MIDDLE_32000_A2                          0.000000
-#define CS_MIDDLE_32000_B1                         (-0.980126)
-#define CS_MIDDLE_32000_B2                          0.000000
-#define CS_MIDDLE_32000_SCALE                        15
-#define CS_SIDE_32000_A0                            0.364579
-#define CS_SIDE_32000_A1                           (-0.207355)
-#define CS_SIDE_32000_A2                           (-0.157224)
-#define CS_SIDE_32000_B1                           (-1.274231)
-#define CS_SIDE_32000_B2                            0.312495
-#define CS_SIDE_32000_SCALE                          14
+#define CS_MIDDLE_32000_A0 0.228400
+#define CS_MIDDLE_32000_A1 (-0.225214)
+#define CS_MIDDLE_32000_A2 0.000000
+#define CS_MIDDLE_32000_B1 (-0.980126)
+#define CS_MIDDLE_32000_B2 0.000000
+#define CS_MIDDLE_32000_SCALE 15
+#define CS_SIDE_32000_A0 0.364579
+#define CS_SIDE_32000_A1 (-0.207355)
+#define CS_SIDE_32000_A2 (-0.157224)
+#define CS_SIDE_32000_B1 (-1.274231)
+#define CS_SIDE_32000_B2 0.312495
+#define CS_SIDE_32000_SCALE 14
 
 /* Stereo Enhancer coefficients for 44100Hz sample rate, scaled with 0.163834 */
-#define CS_MIDDLE_44100_A0                     0.233593
-#define CS_MIDDLE_44100_A1                    (-0.231225)
-#define CS_MIDDLE_44100_A2                     0.000000
-#define CS_MIDDLE_44100_B1                    (-0.985545)
-#define CS_MIDDLE_44100_B2                     0.000000
-#define CS_MIDDLE_44100_SCALE                        15
-#define CS_SIDE_44100_A0                       0.284573
-#define CS_SIDE_44100_A1                      (-0.258910)
-#define CS_SIDE_44100_A2                      (-0.025662)
-#define CS_SIDE_44100_B1                      (-1.572248)
-#define CS_SIDE_44100_B2                       0.588399
-#define CS_SIDE_44100_SCALE                  14
+#define CS_MIDDLE_44100_A0 0.233593
+#define CS_MIDDLE_44100_A1 (-0.231225)
+#define CS_MIDDLE_44100_A2 0.000000
+#define CS_MIDDLE_44100_B1 (-0.985545)
+#define CS_MIDDLE_44100_B2 0.000000
+#define CS_MIDDLE_44100_SCALE 15
+#define CS_SIDE_44100_A0 0.284573
+#define CS_SIDE_44100_A1 (-0.258910)
+#define CS_SIDE_44100_A2 (-0.025662)
+#define CS_SIDE_44100_B1 (-1.572248)
+#define CS_SIDE_44100_B2 0.588399
+#define CS_SIDE_44100_SCALE 14
 
 /* Stereo Enhancer coefficients for 48000Hz sample rate, scaled with 0.164402 */
-#define CS_MIDDLE_48000_A0                     0.234445
-#define CS_MIDDLE_48000_A1                    (-0.232261)
-#define CS_MIDDLE_48000_A2                     0.000000
-#define CS_MIDDLE_48000_B1                    (-0.986713)
-#define CS_MIDDLE_48000_B2                     0.000000
-#define CS_MIDDLE_48000_SCALE                        15
-#define CS_SIDE_48000_A0                     0.272606
-#define CS_SIDE_48000_A1                    (-0.266952)
-#define CS_SIDE_48000_A2                    (-0.005654)
-#define CS_SIDE_48000_B1                    (-1.617141)
-#define CS_SIDE_48000_B2                     0.630405
-#define CS_SIDE_48000_SCALE                          14
+#define CS_MIDDLE_48000_A0 0.234445
+#define CS_MIDDLE_48000_A1 (-0.232261)
+#define CS_MIDDLE_48000_A2 0.000000
+#define CS_MIDDLE_48000_B1 (-0.986713)
+#define CS_MIDDLE_48000_B2 0.000000
+#define CS_MIDDLE_48000_SCALE 15
+#define CS_SIDE_48000_A0 0.272606
+#define CS_SIDE_48000_A1 (-0.266952)
+#define CS_SIDE_48000_A2 (-0.005654)
+#define CS_SIDE_48000_B1 (-1.617141)
+#define CS_SIDE_48000_B2 0.630405
+#define CS_SIDE_48000_SCALE 14
 
 /* Coefficients for 88200Hz sample rate.
  * The filter coefficients are obtained by carrying out
  * state-space analysis using the coefficients available
  * for 44100Hz.
  */
-#define CS_MIDDLE_88200_A0                     0.233846f
-#define CS_MIDDLE_88200_A1                     (-0.232657f)
-#define CS_MIDDLE_88200_A2                     0.000000f
-#define CS_MIDDLE_88200_B1                     (-0.992747f)
-#define CS_MIDDLE_88200_B2                     0.000000f
-#define CS_MIDDLE_88200_SCALE                  15
-#define CS_SIDE_88200_A0                       0.231541f
-#define CS_SIDE_88200_A1                       (-0.289586f)
-#define CS_SIDE_88200_A2                       0.058045f
-#define CS_SIDE_88200_B1                       (-1.765300f)
-#define CS_SIDE_88200_B2                       0.769816f
-#define CS_SIDE_88200_SCALE                    14
+#define CS_MIDDLE_88200_A0 0.233846f
+#define CS_MIDDLE_88200_A1 (-0.232657f)
+#define CS_MIDDLE_88200_A2 0.000000f
+#define CS_MIDDLE_88200_B1 (-0.992747f)
+#define CS_MIDDLE_88200_B2 0.000000f
+#define CS_MIDDLE_88200_SCALE 15
+#define CS_SIDE_88200_A0 0.231541f
+#define CS_SIDE_88200_A1 (-0.289586f)
+#define CS_SIDE_88200_A2 0.058045f
+#define CS_SIDE_88200_B1 (-1.765300f)
+#define CS_SIDE_88200_B2 0.769816f
+#define CS_SIDE_88200_SCALE 14
 
 /* Stereo Enhancer coefficients for 96000Hz sample rate, scaled with  0.165*/
 /* high pass filter with cutoff frequency 102.18 Hz*/
-#define CS_MIDDLE_96000_A0                     0.235532
-#define CS_MIDDLE_96000_A1                    (-0.234432)
-#define CS_MIDDLE_96000_A2                     0.000000
-#define CS_MIDDLE_96000_B1                    (-0.993334)
-#define CS_MIDDLE_96000_B2                     0.000000
-#define CS_MIDDLE_96000_SCALE                        15
+#define CS_MIDDLE_96000_A0 0.235532
+#define CS_MIDDLE_96000_A1 (-0.234432)
+#define CS_MIDDLE_96000_A2 0.000000
+#define CS_MIDDLE_96000_B1 (-0.993334)
+#define CS_MIDDLE_96000_B2 0.000000
+#define CS_MIDDLE_96000_SCALE 15
 /* Coefficients calculated using tf2ss and ss2tf functions based on
  * coefficients available for 48000Hz sampling frequency
  */
-#define CS_SIDE_96000_A0                     0.224326f
-#define CS_SIDE_96000_A1                     (-0.294937f)
-#define CS_SIDE_96000_A2                     0.070611f
-#define CS_SIDE_96000_B1                     (-1.792166f)
-#define CS_SIDE_96000_B2                     0.795830f
-#define CS_SIDE_96000_SCALE                  14
+#define CS_SIDE_96000_A0 0.224326f
+#define CS_SIDE_96000_A1 (-0.294937f)
+#define CS_SIDE_96000_A2 0.070611f
+#define CS_SIDE_96000_B1 (-1.792166f)
+#define CS_SIDE_96000_B2 0.795830f
+#define CS_SIDE_96000_SCALE 14
 
 /* Stereo Enhancer coefficients for 176400Hz sample rate.
  * The filter coefficients are obtained by carrying out
  * state-space analysis using the coefficients available
  * for 44100Hz.
  */
-#define CS_MIDDLE_176400_A0                     0.233973f
-#define CS_MIDDLE_176400_A1                     (-0.233378f)
-#define CS_MIDDLE_176400_A2                     0.000000f
-#define CS_MIDDLE_176400_B1                     (-0.996367f)
-#define CS_MIDDLE_176400_B2                     0.000000f
-#define CS_MIDDLE_176400_SCALE                  15
-#define CS_SIDE_176400_A0                       0.199836f
-#define CS_SIDE_176400_A1                       (-0.307544f)
-#define CS_SIDE_176400_A2                       0.107708f
-#define CS_SIDE_176400_B1                       (-1.876572f)
-#define CS_SIDE_176400_B2                       0.877771f
-#define CS_SIDE_176400_SCALE                    14
+#define CS_MIDDLE_176400_A0 0.233973f
+#define CS_MIDDLE_176400_A1 (-0.233378f)
+#define CS_MIDDLE_176400_A2 0.000000f
+#define CS_MIDDLE_176400_B1 (-0.996367f)
+#define CS_MIDDLE_176400_B2 0.000000f
+#define CS_MIDDLE_176400_SCALE 15
+#define CS_SIDE_176400_A0 0.199836f
+#define CS_SIDE_176400_A1 (-0.307544f)
+#define CS_SIDE_176400_A2 0.107708f
+#define CS_SIDE_176400_B1 (-1.876572f)
+#define CS_SIDE_176400_B2 0.877771f
+#define CS_SIDE_176400_SCALE 14
 
 /* Stereo Enhancer coefficients for 192000Hz sample rate, scaled with  0.1689*/
-#define CS_MIDDLE_192000_A0                     0.241219
-#define CS_MIDDLE_192000_A1                    (-0.240656)
-#define CS_MIDDLE_192000_A2                     0.000000
-#define CS_MIDDLE_192000_B1                    (-0.996661)
-#define CS_MIDDLE_192000_B2                     0.000000
-#define CS_MIDDLE_192000_SCALE                        15
+#define CS_MIDDLE_192000_A0 0.241219
+#define CS_MIDDLE_192000_A1 (-0.240656)
+#define CS_MIDDLE_192000_A2 0.000000
+#define CS_MIDDLE_192000_B1 (-0.996661)
+#define CS_MIDDLE_192000_B2 0.000000
+#define CS_MIDDLE_192000_SCALE 15
 /* Coefficients calculated using tf2ss and ss2tf functions based on
  * coefficients available for 48000Hz sampling frequency
  */
-#define CS_SIDE_192000_A0                    0.196039f
-#define CS_SIDE_192000_A1                    (-0.311027f)
-#define CS_SIDE_192000_A2                    0.114988f
-#define CS_SIDE_192000_B1                    (-1.891380f)
-#define CS_SIDE_192000_B2                    0.8923460f
-#define CS_SIDE_192000_SCALE                 14
+#define CS_SIDE_192000_A0 0.196039f
+#define CS_SIDE_192000_A1 (-0.311027f)
+#define CS_SIDE_192000_A2 0.114988f
+#define CS_SIDE_192000_B1 (-1.891380f)
+#define CS_SIDE_192000_B2 0.8923460f
+#define CS_SIDE_192000_SCALE 14
 
 /************************************************************************************/
 /*                                                                                  */
@@ -227,133 +227,133 @@
 /************************************************************************************/
 
 /* Reverb delay settings in samples */
-#define LVCS_STEREODELAY_CS_8KHZ                     93         /* Sample rate 8kS/s */
-#define LVCS_STEREODELAY_CS_11KHZ                   128         /* Sample rate 11kS/s */
-#define LVCS_STEREODELAY_CS_12KHZ                   139         /* Sample rate 12kS/s */
-#define LVCS_STEREODELAY_CS_16KHZ                   186         /* Sample rate 16kS/s */
-#define LVCS_STEREODELAY_CS_22KHZ                   256         /* Sample rate 22kS/s */
-#define LVCS_STEREODELAY_CS_24KHZ                   279         /* Sample rate 24kS/s */
-#define LVCS_STEREODELAY_CS_32KHZ                   372         /* Sample rate 32kS/s */
-#define LVCS_STEREODELAY_CS_44KHZ                   512         /* Sample rate 44kS/s */
-#define LVCS_STEREODELAY_CS_48KHZ                   557         /* Sample rate 48kS/s */
-#define LVCS_STEREODELAY_CS_88KHZ                   1024        /* Sample rate 88.2kS/s */
-#define LVCS_STEREODELAY_CS_96KHZ                   1115        /* Sample rate 96kS/s */
-#define LVCS_STEREODELAY_CS_176KHZ                  2048        /* Sample rate 176.4kS/s */
-#define LVCS_STEREODELAY_CS_192KHZ                  2229        /* Sample rate 196kS/s */
-#define LVCS_STEREODELAY_CS_MAX_VAL                 LVCS_STEREODELAY_CS_192KHZ
+#define LVCS_STEREODELAY_CS_8KHZ 93     /* Sample rate 8kS/s */
+#define LVCS_STEREODELAY_CS_11KHZ 128   /* Sample rate 11kS/s */
+#define LVCS_STEREODELAY_CS_12KHZ 139   /* Sample rate 12kS/s */
+#define LVCS_STEREODELAY_CS_16KHZ 186   /* Sample rate 16kS/s */
+#define LVCS_STEREODELAY_CS_22KHZ 256   /* Sample rate 22kS/s */
+#define LVCS_STEREODELAY_CS_24KHZ 279   /* Sample rate 24kS/s */
+#define LVCS_STEREODELAY_CS_32KHZ 372   /* Sample rate 32kS/s */
+#define LVCS_STEREODELAY_CS_44KHZ 512   /* Sample rate 44kS/s */
+#define LVCS_STEREODELAY_CS_48KHZ 557   /* Sample rate 48kS/s */
+#define LVCS_STEREODELAY_CS_88KHZ 1024  /* Sample rate 88.2kS/s */
+#define LVCS_STEREODELAY_CS_96KHZ 1115  /* Sample rate 96kS/s */
+#define LVCS_STEREODELAY_CS_176KHZ 2048 /* Sample rate 176.4kS/s */
+#define LVCS_STEREODELAY_CS_192KHZ 2229 /* Sample rate 196kS/s */
+#define LVCS_STEREODELAY_CS_MAX_VAL LVCS_STEREODELAY_CS_192KHZ
 
 /* Reverb coefficients for 8000 Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_8000_A0                          0.667271
-#define CS_REVERB_8000_A1                         (-0.667271)
-#define CS_REVERB_8000_A2                          0.000000
-#define CS_REVERB_8000_B1                         (-0.668179)
-#define CS_REVERB_8000_B2                          0.000000
-#define CS_REVERB_8000_SCALE                         15
+#define CS_REVERB_8000_A0 0.667271
+#define CS_REVERB_8000_A1 (-0.667271)
+#define CS_REVERB_8000_A2 0.000000
+#define CS_REVERB_8000_B1 (-0.668179)
+#define CS_REVERB_8000_B2 0.000000
+#define CS_REVERB_8000_SCALE 15
 
 /* Reverb coefficients for 11025Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_11025_A0                     0.699638
-#define CS_REVERB_11025_A1                    (-0.699638)
-#define CS_REVERB_11025_A2                     0.000000
-#define CS_REVERB_11025_B1                    (-0.749096)
-#define CS_REVERB_11025_B2                     0.000000
-#define CS_REVERB_11025_SCALE                  15
+#define CS_REVERB_11025_A0 0.699638
+#define CS_REVERB_11025_A1 (-0.699638)
+#define CS_REVERB_11025_A2 0.000000
+#define CS_REVERB_11025_B1 (-0.749096)
+#define CS_REVERB_11025_B2 0.000000
+#define CS_REVERB_11025_SCALE 15
 
 /* Reverb coefficients for 12000Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_12000_A0                   0.706931
-#define CS_REVERB_12000_A1                  (-0.706931)
-#define CS_REVERB_12000_A2                   0.000000
-#define CS_REVERB_12000_B1                  (-0.767327)
-#define CS_REVERB_12000_B2                   0.000000
-#define CS_REVERB_12000_SCALE                15
+#define CS_REVERB_12000_A0 0.706931
+#define CS_REVERB_12000_A1 (-0.706931)
+#define CS_REVERB_12000_A2 0.000000
+#define CS_REVERB_12000_B1 (-0.767327)
+#define CS_REVERB_12000_B2 0.000000
+#define CS_REVERB_12000_SCALE 15
 
 /* Reverb coefficients for 16000Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_16000_A0                      0.728272
-#define CS_REVERB_16000_A1                     (-0.728272)
-#define CS_REVERB_16000_A2                      0.000000
-#define CS_REVERB_16000_B1                     (-0.820679)
-#define CS_REVERB_16000_B2                      0.000000
-#define CS_REVERB_16000_SCALE                        15
+#define CS_REVERB_16000_A0 0.728272
+#define CS_REVERB_16000_A1 (-0.728272)
+#define CS_REVERB_16000_A2 0.000000
+#define CS_REVERB_16000_B1 (-0.820679)
+#define CS_REVERB_16000_B2 0.000000
+#define CS_REVERB_16000_SCALE 15
 
 /* Reverb coefficients for 22050Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_22050_A0                     0.516396
-#define CS_REVERB_22050_A1                     0.000000
-#define CS_REVERB_22050_A2                    (-0.516396)
-#define CS_REVERB_22050_B1                    (-0.518512)
-#define CS_REVERB_22050_B2                    (-0.290990)
-#define CS_REVERB_22050_SCALE                        15
+#define CS_REVERB_22050_A0 0.516396
+#define CS_REVERB_22050_A1 0.000000
+#define CS_REVERB_22050_A2 (-0.516396)
+#define CS_REVERB_22050_B1 (-0.518512)
+#define CS_REVERB_22050_B2 (-0.290990)
+#define CS_REVERB_22050_SCALE 15
 
 /* Reverb coefficients for 24000Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_24000_A0                       0.479565
-#define CS_REVERB_24000_A1                       0.000000
-#define CS_REVERB_24000_A2                      (-0.479565)
-#define CS_REVERB_24000_B1                      (-0.637745)
-#define CS_REVERB_24000_B2                      (-0.198912)
-#define CS_REVERB_24000_SCALE                        15
+#define CS_REVERB_24000_A0 0.479565
+#define CS_REVERB_24000_A1 0.000000
+#define CS_REVERB_24000_A2 (-0.479565)
+#define CS_REVERB_24000_B1 (-0.637745)
+#define CS_REVERB_24000_B2 (-0.198912)
+#define CS_REVERB_24000_SCALE 15
 
 /* Reverb coefficients for 32000Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_32000_A0                      0.380349
-#define CS_REVERB_32000_A1                      0.000000
-#define CS_REVERB_32000_A2                     (-0.380349)
-#define CS_REVERB_32000_B1                     (-0.950873)
-#define CS_REVERB_32000_B2                      0.049127
-#define CS_REVERB_32000_SCALE                        15
+#define CS_REVERB_32000_A0 0.380349
+#define CS_REVERB_32000_A1 0.000000
+#define CS_REVERB_32000_A2 (-0.380349)
+#define CS_REVERB_32000_B1 (-0.950873)
+#define CS_REVERB_32000_B2 0.049127
+#define CS_REVERB_32000_SCALE 15
 
 /* Reverb coefficients for 44100Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_44100_A0                         0.297389
-#define CS_REVERB_44100_A1                         0.000000
-#define CS_REVERB_44100_A2                        (-0.297389)
-#define CS_REVERB_44100_B1                        (-1.200423)
-#define CS_REVERB_44100_B2                         0.256529
-#define CS_REVERB_44100_SCALE                        14
+#define CS_REVERB_44100_A0 0.297389
+#define CS_REVERB_44100_A1 0.000000
+#define CS_REVERB_44100_A2 (-0.297389)
+#define CS_REVERB_44100_B1 (-1.200423)
+#define CS_REVERB_44100_B2 0.256529
+#define CS_REVERB_44100_SCALE 14
 
 /* Reverb coefficients for 48000Hz sample rate, scaled with 1.038030 */
-#define CS_REVERB_48000_A0                       0.278661
-#define CS_REVERB_48000_A1                       0.000000
-#define CS_REVERB_48000_A2                      (-0.278661)
-#define CS_REVERB_48000_B1                      (-1.254993)
-#define CS_REVERB_48000_B2                       0.303347
-#define CS_REVERB_48000_SCALE                        14
+#define CS_REVERB_48000_A0 0.278661
+#define CS_REVERB_48000_A1 0.000000
+#define CS_REVERB_48000_A2 (-0.278661)
+#define CS_REVERB_48000_B1 (-1.254993)
+#define CS_REVERB_48000_B2 0.303347
+#define CS_REVERB_48000_SCALE 14
 
 /* Reverb coefficients for 88200Hz sample rate, scaled with 0.8 */
 /* Band pass filter with fc1=500 and fc2=8000 */
-#define CS_REVERB_88200_A0                       0.171901f
-#define CS_REVERB_88200_A1                       0.000000f
-#define CS_REVERB_88200_A2                      (-0.171901f)
-#define CS_REVERB_88200_B1                      (-1.553948f)
-#define CS_REVERB_88200_B2                      (0.570248f)
-#define CS_REVERB_88200_SCALE                      14
+#define CS_REVERB_88200_A0 0.171901f
+#define CS_REVERB_88200_A1 0.000000f
+#define CS_REVERB_88200_A2 (-0.171901f)
+#define CS_REVERB_88200_B1 (-1.553948f)
+#define CS_REVERB_88200_B2 (0.570248f)
+#define CS_REVERB_88200_SCALE 14
 /* Reverb coefficients for 96000Hz sample rate, scaled with 0.8 */
 /* Band pass filter with fc1=500 and fc2=8000*/
-#define CS_REVERB_96000_A0                       0.1602488
-#define CS_REVERB_96000_A1                       0.000000
-#define CS_REVERB_96000_A2                      (-0.1602488)
-#define CS_REVERB_96000_B1                      (-1.585413)
-#define CS_REVERB_96000_B2                       0.599377
-#define CS_REVERB_96000_SCALE                        14
+#define CS_REVERB_96000_A0 0.1602488
+#define CS_REVERB_96000_A1 0.000000
+#define CS_REVERB_96000_A2 (-0.1602488)
+#define CS_REVERB_96000_B1 (-1.585413)
+#define CS_REVERB_96000_B2 0.599377
+#define CS_REVERB_96000_SCALE 14
 
 /* Reverb coefficients for 176400Hz sample rate, scaled with 0.8 */
 /* Band pass filter with fc1=500 and fc2=8000 */
-#define CS_REVERB_176400_A0                       0.094763f
-#define CS_REVERB_176400_A1                       0.000000f
-#define CS_REVERB_176400_A2                      (-0.094763f)
-#define CS_REVERB_176400_B1                      (-1.758593f)
-#define CS_REVERB_176400_B2                      (0.763091f)
-#define CS_REVERB_176400_SCALE                      14
+#define CS_REVERB_176400_A0 0.094763f
+#define CS_REVERB_176400_A1 0.000000f
+#define CS_REVERB_176400_A2 (-0.094763f)
+#define CS_REVERB_176400_B1 (-1.758593f)
+#define CS_REVERB_176400_B2 (0.763091f)
+#define CS_REVERB_176400_SCALE 14
 /* Reverb coefficients for 192000Hz sample rate, scaled with 0.8 */
 /* Band pass filter with fc1=500 and fc2=8000*/
-#define CS_REVERB_192000_A0                       0.0878369
-#define CS_REVERB_192000_A1                       0.000000
-#define CS_REVERB_192000_A2                      (-0.0878369)
-#define CS_REVERB_192000_B1                      (-1.7765764)
-#define CS_REVERB_192000_B2                       0.7804076
-#define CS_REVERB_192000_SCALE                        14
+#define CS_REVERB_192000_A0 0.0878369
+#define CS_REVERB_192000_A1 0.000000
+#define CS_REVERB_192000_A2 (-0.0878369)
+#define CS_REVERB_192000_B1 (-1.7765764)
+#define CS_REVERB_192000_B2 0.7804076
+#define CS_REVERB_192000_SCALE 14
 
 /* Reverb Gain Settings */
-#define LVCS_HEADPHONE_DELAYGAIN               0.800000         /* Algorithm delay path gain */
-#define LVCS_HEADPHONE_OUTPUTGAIN              1.000000         /* Algorithm output gain */
-#define LVCS_HEADPHONE_PROCGAIN                   18403         /* Processed path gain */
-#define LVCS_HEADPHONE_UNPROCGAIN                 18403         /* Unprocessed path gain */
-#define LVCS_HEADPHONE_GAINCORRECT             1.009343         /* Delay mixer gain correction */
+#define LVCS_HEADPHONE_DELAYGAIN 0.800000   /* Algorithm delay path gain */
+#define LVCS_HEADPHONE_OUTPUTGAIN 1.000000  /* Algorithm output gain */
+#define LVCS_HEADPHONE_PROCGAIN 18403       /* Processed path gain */
+#define LVCS_HEADPHONE_UNPROCGAIN 18403     /* Unprocessed path gain */
+#define LVCS_HEADPHONE_GAINCORRECT 1.009343 /* Delay mixer gain correction */
 
 /************************************************************************************/
 /*                                                                                  */
@@ -363,205 +363,204 @@
 
 /* Equaliser coefficients for 8000 Hz sample rate, \
    CS scaled with 1.038497 and CSEX scaled with 0.775480 */
-#define CS_EQUALISER_8000_A0                     1.263312
-#define CS_EQUALISER_8000_A1                    (-0.601748)
-#define CS_EQUALISER_8000_A2                    (-0.280681)
-#define CS_EQUALISER_8000_B1                    (-0.475865)
-#define CS_EQUALISER_8000_B2                    (-0.408154)
-#define CS_EQUALISER_8000_SCALE                      14
-#define CSEX_EQUALISER_8000_A0                    0.943357
-#define CSEX_EQUALISER_8000_A1                   (-0.449345)
-#define CSEX_EQUALISER_8000_A2                   (-0.209594)
-#define CSEX_EQUALISER_8000_B1                   (-0.475865)
-#define CSEX_EQUALISER_8000_B2                   (-0.408154)
-#define CSEX_EQUALISER_8000_SCALE                    15
+#define CS_EQUALISER_8000_A0 1.263312
+#define CS_EQUALISER_8000_A1 (-0.601748)
+#define CS_EQUALISER_8000_A2 (-0.280681)
+#define CS_EQUALISER_8000_B1 (-0.475865)
+#define CS_EQUALISER_8000_B2 (-0.408154)
+#define CS_EQUALISER_8000_SCALE 14
+#define CSEX_EQUALISER_8000_A0 0.943357
+#define CSEX_EQUALISER_8000_A1 (-0.449345)
+#define CSEX_EQUALISER_8000_A2 (-0.209594)
+#define CSEX_EQUALISER_8000_B1 (-0.475865)
+#define CSEX_EQUALISER_8000_B2 (-0.408154)
+#define CSEX_EQUALISER_8000_SCALE 15
 
 /* Equaliser coefficients for 11025Hz sample rate, \
    CS scaled with 1.027761 and CSEX scaled with 0.767463 */
-#define CS_EQUALISER_11025_A0                    1.101145
-#define CS_EQUALISER_11025_A1                    0.139020
-#define CS_EQUALISER_11025_A2                   (-0.864423)
-#define CS_EQUALISER_11025_B1                    0.024541
-#define CS_EQUALISER_11025_B2                   (-0.908930)
-#define CS_EQUALISER_11025_SCALE                     14
-#define CSEX_EQUALISER_11025_A0                    0.976058
-#define CSEX_EQUALISER_11025_A1                   (-0.695326)
-#define CSEX_EQUALISER_11025_A2                   (-0.090809)
-#define CSEX_EQUALISER_11025_B1                   (-0.610594)
-#define CSEX_EQUALISER_11025_B2                   (-0.311149)
-#define CSEX_EQUALISER_11025_SCALE                   15
+#define CS_EQUALISER_11025_A0 1.101145
+#define CS_EQUALISER_11025_A1 0.139020
+#define CS_EQUALISER_11025_A2 (-0.864423)
+#define CS_EQUALISER_11025_B1 0.024541
+#define CS_EQUALISER_11025_B2 (-0.908930)
+#define CS_EQUALISER_11025_SCALE 14
+#define CSEX_EQUALISER_11025_A0 0.976058
+#define CSEX_EQUALISER_11025_A1 (-0.695326)
+#define CSEX_EQUALISER_11025_A2 (-0.090809)
+#define CSEX_EQUALISER_11025_B1 (-0.610594)
+#define CSEX_EQUALISER_11025_B2 (-0.311149)
+#define CSEX_EQUALISER_11025_SCALE 15
 
 /* Equaliser coefficients for 12000Hz sample rate, \
    CS scaled with 1.032521 and CSEX scaled with 0.771017 */
-#define CS_EQUALISER_12000_A0                      1.276661
-#define CS_EQUALISER_12000_A1                     (-1.017519)
-#define CS_EQUALISER_12000_A2                     (-0.044128)
-#define CS_EQUALISER_12000_B1                     (-0.729616)
-#define CS_EQUALISER_12000_B2                     (-0.204532)
-#define CS_EQUALISER_12000_SCALE                     14
-#define CSEX_EQUALISER_12000_A0                 1.007095
-#define CSEX_EQUALISER_12000_A1                (-0.871912)
-#define CSEX_EQUALISER_12000_A2                 0.023232
-#define CSEX_EQUALISER_12000_B1                (-0.745857)
-#define CSEX_EQUALISER_12000_B2                (-0.189171)
-#define CSEX_EQUALISER_12000_SCALE                   14
+#define CS_EQUALISER_12000_A0 1.276661
+#define CS_EQUALISER_12000_A1 (-1.017519)
+#define CS_EQUALISER_12000_A2 (-0.044128)
+#define CS_EQUALISER_12000_B1 (-0.729616)
+#define CS_EQUALISER_12000_B2 (-0.204532)
+#define CS_EQUALISER_12000_SCALE 14
+#define CSEX_EQUALISER_12000_A0 1.007095
+#define CSEX_EQUALISER_12000_A1 (-0.871912)
+#define CSEX_EQUALISER_12000_A2 0.023232
+#define CSEX_EQUALISER_12000_B1 (-0.745857)
+#define CSEX_EQUALISER_12000_B2 (-0.189171)
+#define CSEX_EQUALISER_12000_SCALE 14
 
 /* Equaliser coefficients for 16000Hz sample rate, \
    CS scaled with 1.031378 and CSEX scaled with 0.770164 */
-#define CS_EQUALISER_16000_A0                     1.281629
-#define CS_EQUALISER_16000_A1                    (-1.075872)
-#define CS_EQUALISER_16000_A2                    (-0.041365)
-#define CS_EQUALISER_16000_B1                    (-0.725239)
-#define CS_EQUALISER_16000_B2                    (-0.224358)
-#define CS_EQUALISER_16000_SCALE                     14
-#define CSEX_EQUALISER_16000_A0                  1.081091
-#define CSEX_EQUALISER_16000_A1                 (-0.867183)
-#define CSEX_EQUALISER_16000_A2                 (-0.070247)
-#define CSEX_EQUALISER_16000_B1                 (-0.515121)
-#define CSEX_EQUALISER_16000_B2                 (-0.425893)
-#define CSEX_EQUALISER_16000_SCALE                   14
+#define CS_EQUALISER_16000_A0 1.281629
+#define CS_EQUALISER_16000_A1 (-1.075872)
+#define CS_EQUALISER_16000_A2 (-0.041365)
+#define CS_EQUALISER_16000_B1 (-0.725239)
+#define CS_EQUALISER_16000_B2 (-0.224358)
+#define CS_EQUALISER_16000_SCALE 14
+#define CSEX_EQUALISER_16000_A0 1.081091
+#define CSEX_EQUALISER_16000_A1 (-0.867183)
+#define CSEX_EQUALISER_16000_A2 (-0.070247)
+#define CSEX_EQUALISER_16000_B1 (-0.515121)
+#define CSEX_EQUALISER_16000_B2 (-0.425893)
+#define CSEX_EQUALISER_16000_SCALE 14
 
 /* Equaliser coefficients for 22050Hz sample rate, \
    CS scaled with 1.041576 and CSEX scaled with 0.777779 */
-#define CS_EQUALISER_22050_A0                   1.388605
-#define CS_EQUALISER_22050_A1                  (-1.305799)
-#define CS_EQUALISER_22050_A2                   0.039922
-#define CS_EQUALISER_22050_B1                  (-0.719494)
-#define CS_EQUALISER_22050_B2                  (-0.243245)
-#define CS_EQUALISER_22050_SCALE                     14
-#define CSEX_EQUALISER_22050_A0                   1.272910
-#define CSEX_EQUALISER_22050_A1                  (-1.341014)
-#define CSEX_EQUALISER_22050_A2                   0.167462
-#define CSEX_EQUALISER_22050_B1                  (-0.614219)
-#define CSEX_EQUALISER_22050_B2                  (-0.345384)
-#define CSEX_EQUALISER_22050_SCALE                   14
+#define CS_EQUALISER_22050_A0 1.388605
+#define CS_EQUALISER_22050_A1 (-1.305799)
+#define CS_EQUALISER_22050_A2 0.039922
+#define CS_EQUALISER_22050_B1 (-0.719494)
+#define CS_EQUALISER_22050_B2 (-0.243245)
+#define CS_EQUALISER_22050_SCALE 14
+#define CSEX_EQUALISER_22050_A0 1.272910
+#define CSEX_EQUALISER_22050_A1 (-1.341014)
+#define CSEX_EQUALISER_22050_A2 0.167462
+#define CSEX_EQUALISER_22050_B1 (-0.614219)
+#define CSEX_EQUALISER_22050_B2 (-0.345384)
+#define CSEX_EQUALISER_22050_SCALE 14
 
 /* Equaliser coefficients for 24000Hz sample rate, \
    CS scaled with 1.034495 and CSEX scaled with 0.772491 */
-#define CS_EQUALISER_24000_A0                    1.409832
-#define CS_EQUALISER_24000_A1                   (-1.456506)
-#define CS_EQUALISER_24000_A2                    0.151410
-#define CS_EQUALISER_24000_B1                   (-0.804201)
-#define CS_EQUALISER_24000_B2                   (-0.163783)
-#define CS_EQUALISER_24000_SCALE                     14
-#define CSEX_EQUALISER_24000_A0                  1.299198
-#define CSEX_EQUALISER_24000_A1                 (-1.452447)
-#define CSEX_EQUALISER_24000_A2                  0.240489
-#define CSEX_EQUALISER_24000_B1                 (-0.669303)
-#define CSEX_EQUALISER_24000_B2                 (-0.294984)
-#define CSEX_EQUALISER_24000_SCALE                   14
+#define CS_EQUALISER_24000_A0 1.409832
+#define CS_EQUALISER_24000_A1 (-1.456506)
+#define CS_EQUALISER_24000_A2 0.151410
+#define CS_EQUALISER_24000_B1 (-0.804201)
+#define CS_EQUALISER_24000_B2 (-0.163783)
+#define CS_EQUALISER_24000_SCALE 14
+#define CSEX_EQUALISER_24000_A0 1.299198
+#define CSEX_EQUALISER_24000_A1 (-1.452447)
+#define CSEX_EQUALISER_24000_A2 0.240489
+#define CSEX_EQUALISER_24000_B1 (-0.669303)
+#define CSEX_EQUALISER_24000_B2 (-0.294984)
+#define CSEX_EQUALISER_24000_SCALE 14
 
 /* Equaliser coefficients for 32000Hz sample rate, \
    CS scaled with 1.044559 and CSEX scaled with 0.780006 */
-#define CS_EQUALISER_32000_A0                     1.560988
-#define CS_EQUALISER_32000_A1                    (-1.877724)
-#define CS_EQUALISER_32000_A2                     0.389741
-#define CS_EQUALISER_32000_B1                    (-0.907410)
-#define CS_EQUALISER_32000_B2                    (-0.070489)
-#define CS_EQUALISER_32000_SCALE                     14
-#define CSEX_EQUALISER_32000_A0                  1.785049
-#define CSEX_EQUALISER_32000_A1                 (-2.233497)
-#define CSEX_EQUALISER_32000_A2                  0.526431
-#define CSEX_EQUALISER_32000_B1                 (-0.445939)
-#define CSEX_EQUALISER_32000_B2                 (-0.522446)
-#define CSEX_EQUALISER_32000_SCALE                   13
+#define CS_EQUALISER_32000_A0 1.560988
+#define CS_EQUALISER_32000_A1 (-1.877724)
+#define CS_EQUALISER_32000_A2 0.389741
+#define CS_EQUALISER_32000_B1 (-0.907410)
+#define CS_EQUALISER_32000_B2 (-0.070489)
+#define CS_EQUALISER_32000_SCALE 14
+#define CSEX_EQUALISER_32000_A0 1.785049
+#define CSEX_EQUALISER_32000_A1 (-2.233497)
+#define CSEX_EQUALISER_32000_A2 0.526431
+#define CSEX_EQUALISER_32000_B1 (-0.445939)
+#define CSEX_EQUALISER_32000_B2 (-0.522446)
+#define CSEX_EQUALISER_32000_SCALE 13
 
 /* Equaliser coefficients for 44100Hz sample rate, \
    CS scaled with 1.022170 and CSEX scaled with 0.763288 */
-#define CS_EQUALISER_44100_A0                  1.623993
-#define CS_EQUALISER_44100_A1                 (-2.270743)
-#define CS_EQUALISER_44100_A2                  0.688829
-#define CS_EQUALISER_44100_B1                 (-1.117190)
-#define CS_EQUALISER_44100_B2                  0.130208
-#define CS_EQUALISER_44100_SCALE                     13
-#define CSEX_EQUALISER_44100_A0                   2.028315
-#define CSEX_EQUALISER_44100_A1                  (-2.882459)
-#define CSEX_EQUALISER_44100_A2                   0.904535
-#define CSEX_EQUALISER_44100_B1                  (-0.593308)
-#define CSEX_EQUALISER_44100_B2                  (-0.385816)
-#define CSEX_EQUALISER_44100_SCALE                   13
+#define CS_EQUALISER_44100_A0 1.623993
+#define CS_EQUALISER_44100_A1 (-2.270743)
+#define CS_EQUALISER_44100_A2 0.688829
+#define CS_EQUALISER_44100_B1 (-1.117190)
+#define CS_EQUALISER_44100_B2 0.130208
+#define CS_EQUALISER_44100_SCALE 13
+#define CSEX_EQUALISER_44100_A0 2.028315
+#define CSEX_EQUALISER_44100_A1 (-2.882459)
+#define CSEX_EQUALISER_44100_A2 0.904535
+#define CSEX_EQUALISER_44100_B1 (-0.593308)
+#define CSEX_EQUALISER_44100_B2 (-0.385816)
+#define CSEX_EQUALISER_44100_SCALE 13
 
 /* Equaliser coefficients for 48000Hz sample rate, \
    CS scaled with 1.018635 and CSEX scaled with 0.760648 */
-#define CS_EQUALISER_48000_A0                    1.641177
-#define CS_EQUALISER_48000_A1                   (-2.364687)
-#define CS_EQUALISER_48000_A2                    0.759910
-#define CS_EQUALISER_48000_B1                   (-1.166774)
-#define CS_EQUALISER_48000_B2                    0.178074
-#define CS_EQUALISER_48000_SCALE                     13
-#define CSEX_EQUALISER_48000_A0                  2.099655
-#define CSEX_EQUALISER_48000_A1                 (-3.065220)
-#define CSEX_EQUALISER_48000_A2                  1.010417
-#define CSEX_EQUALISER_48000_B1                 (-0.634021)
-#define CSEX_EQUALISER_48000_B2                 (-0.347332)
-#define CSEX_EQUALISER_48000_SCALE                   13
+#define CS_EQUALISER_48000_A0 1.641177
+#define CS_EQUALISER_48000_A1 (-2.364687)
+#define CS_EQUALISER_48000_A2 0.759910
+#define CS_EQUALISER_48000_B1 (-1.166774)
+#define CS_EQUALISER_48000_B2 0.178074
+#define CS_EQUALISER_48000_SCALE 13
+#define CSEX_EQUALISER_48000_A0 2.099655
+#define CSEX_EQUALISER_48000_A1 (-3.065220)
+#define CSEX_EQUALISER_48000_A2 1.010417
+#define CSEX_EQUALISER_48000_B1 (-0.634021)
+#define CSEX_EQUALISER_48000_B2 (-0.347332)
+#define CSEX_EQUALISER_48000_SCALE 13
 
 /* Equaliser coefficients for 88200Hz sample rate.
  * The filter coefficients are obtained by carrying out
  * state-space analysis using the coefficients available
  * for 44100Hz.
  */
-#define CS_EQUALISER_88200_A0                   1.771899f
-#define CS_EQUALISER_88200_A1                   (-2.930762f)
-#define CS_EQUALISER_88200_A2                   1.172175f
-#define CS_EQUALISER_88200_B1                   (-1.438349f)
-#define CS_EQUALISER_88200_B2                   0.442520f
-#define CS_EQUALISER_88200_SCALE                13
-#define CSEX_EQUALISER_88200_A0                 2.675241f
-#define CSEX_EQUALISER_88200_A1                 (-4.466154f)
-#define CSEX_EQUALISER_88200_A2                 1.810305f
-#define CSEX_EQUALISER_88200_B1                 (-0.925350f)
-#define CSEX_EQUALISER_88200_B2                 (-0.066616f)
-#define CSEX_EQUALISER_88200_SCALE              13
+#define CS_EQUALISER_88200_A0 1.771899f
+#define CS_EQUALISER_88200_A1 (-2.930762f)
+#define CS_EQUALISER_88200_A2 1.172175f
+#define CS_EQUALISER_88200_B1 (-1.438349f)
+#define CS_EQUALISER_88200_B2 0.442520f
+#define CS_EQUALISER_88200_SCALE 13
+#define CSEX_EQUALISER_88200_A0 2.675241f
+#define CSEX_EQUALISER_88200_A1 (-4.466154f)
+#define CSEX_EQUALISER_88200_A2 1.810305f
+#define CSEX_EQUALISER_88200_B1 (-0.925350f)
+#define CSEX_EQUALISER_88200_B2 (-0.066616f)
+#define CSEX_EQUALISER_88200_SCALE 13
 
-#define CS_EQUALISER_96000_A0                    1.784497
-#define CS_EQUALISER_96000_A1                   (-3.001435)
-#define CS_EQUALISER_96000_A2                    1.228422
-#define CS_EQUALISER_96000_B1                   (-1.477804)
-#define CS_EQUALISER_96000_B2                    0.481369
-#define CS_EQUALISER_96000_SCALE                     13
-#define CSEX_EQUALISER_96000_A0                  2.7573
-#define CSEX_EQUALISER_96000_A1                 (-4.6721)
-#define CSEX_EQUALISER_96000_A2                  1.9317
-#define CSEX_EQUALISER_96000_B1                 (-0.971718)
-#define CSEX_EQUALISER_96000_B2                 (-0.021216)
-#define CSEX_EQUALISER_96000_SCALE                   13
+#define CS_EQUALISER_96000_A0 1.784497
+#define CS_EQUALISER_96000_A1 (-3.001435)
+#define CS_EQUALISER_96000_A2 1.228422
+#define CS_EQUALISER_96000_B1 (-1.477804)
+#define CS_EQUALISER_96000_B2 0.481369
+#define CS_EQUALISER_96000_SCALE 13
+#define CSEX_EQUALISER_96000_A0 2.7573
+#define CSEX_EQUALISER_96000_A1 (-4.6721)
+#define CSEX_EQUALISER_96000_A2 1.9317
+#define CSEX_EQUALISER_96000_B1 (-0.971718)
+#define CSEX_EQUALISER_96000_B2 (-0.021216)
+#define CSEX_EQUALISER_96000_SCALE 13
 /* Equaliser coefficients for 176400Hz sample rate.
  * The filter coefficients are obtained by carrying out
  * state-space analysis using the coefficients available
  * for 44100Hz.
  */
-#define CS_EQUALISER_176400_A0                  1.883440f
-#define CS_EQUALISER_176400_A1                  (-3.414272f)
-#define CS_EQUALISER_176400_A2                  1.534702f
-#define CS_EQUALISER_176400_B1                  (-1.674614f)
-#define CS_EQUALISER_176400_B2                  0.675827f
-#define CS_EQUALISER_176400_SCALE               13
-#define CSEX_EQUALISER_176400_A0                3.355068f
-#define CSEX_EQUALISER_176400_A1                (-6.112578f)
-#define CSEX_EQUALISER_176400_A2                2.764135f
-#define CSEX_EQUALISER_176400_B1                (-1.268533f)
-#define CSEX_EQUALISER_176400_B2                0.271277f
-#define CSEX_EQUALISER_176400_SCALE             13
+#define CS_EQUALISER_176400_A0 1.883440f
+#define CS_EQUALISER_176400_A1 (-3.414272f)
+#define CS_EQUALISER_176400_A2 1.534702f
+#define CS_EQUALISER_176400_B1 (-1.674614f)
+#define CS_EQUALISER_176400_B2 0.675827f
+#define CS_EQUALISER_176400_SCALE 13
+#define CSEX_EQUALISER_176400_A0 3.355068f
+#define CSEX_EQUALISER_176400_A1 (-6.112578f)
+#define CSEX_EQUALISER_176400_A2 2.764135f
+#define CSEX_EQUALISER_176400_B1 (-1.268533f)
+#define CSEX_EQUALISER_176400_B2 0.271277f
+#define CSEX_EQUALISER_176400_SCALE 13
 
-#define CS_EQUALISER_192000_A0                    1.889582
-#define CS_EQUALISER_192000_A1                   (-3.456140)
-#define CS_EQUALISER_192000_A2                    1.569864
-#define CS_EQUALISER_192000_B1                   (-1.700798)
-#define CS_EQUALISER_192000_B2                    0.701824
-#define CS_EQUALISER_192000_SCALE                     13
-#define CSEX_EQUALISER_192000_A0                  3.4273
-#define CSEX_EQUALISER_192000_A1                 (-6.2936)
-#define CSEX_EQUALISER_192000_A2                  2.8720
-#define CSEX_EQUALISER_192000_B1                 (-1.31074)
-#define CSEX_EQUALISER_192000_B2                 0.31312
-#define CSEX_EQUALISER_192000_SCALE                   13
+#define CS_EQUALISER_192000_A0 1.889582
+#define CS_EQUALISER_192000_A1 (-3.456140)
+#define CS_EQUALISER_192000_A2 1.569864
+#define CS_EQUALISER_192000_B1 (-1.700798)
+#define CS_EQUALISER_192000_B2 0.701824
+#define CS_EQUALISER_192000_SCALE 13
+#define CSEX_EQUALISER_192000_A0 3.4273
+#define CSEX_EQUALISER_192000_A1 (-6.2936)
+#define CSEX_EQUALISER_192000_A2 2.8720
+#define CSEX_EQUALISER_192000_B1 (-1.31074)
+#define CSEX_EQUALISER_192000_B2 0.31312
+#define CSEX_EQUALISER_192000_SCALE 13
 
-#define LVCS_HEADPHONE_SHIFT                          2              /* Output Shift */
-#define LVCS_HEADPHONE_SHIFTLOSS                  0.8477735          /* Output Shift loss */
-#define LVCS_HEADPHONE_GAIN                       0.2087465          /* Unprocessed path gain */
-#define LVCS_EX_HEADPHONE_SHIFT                       3              /* EX Output Shift */
-#define LVCS_EX_HEADPHONE_SHIFTLOSS               0.569225           /* EX Output Shift loss */
-#define LVCS_EX_HEADPHONE_GAIN                    0.07794425         /* EX Unprocessed path gain */
+#define LVCS_HEADPHONE_SHIFT 2               /* Output Shift */
+#define LVCS_HEADPHONE_SHIFTLOSS 0.8477735   /* Output Shift loss */
+#define LVCS_HEADPHONE_GAIN 0.2087465        /* Unprocessed path gain */
+#define LVCS_EX_HEADPHONE_SHIFT 3            /* EX Output Shift */
+#define LVCS_EX_HEADPHONE_SHIFTLOSS 0.569225 /* EX Output Shift loss */
+#define LVCS_EX_HEADPHONE_GAIN 0.07794425    /* EX Unprocessed path gain */
 #endif
-
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp
index 630ecf7..dd1baf3 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp
@@ -20,99 +20,11 @@
 /*  Includes                                                                        */
 /*                                                                                  */
 /************************************************************************************/
-
+#include <stdlib.h>
 #include "LVCS.h"
 #include "LVCS_Private.h"
 #include "LVCS_Tables.h"
 
-/****************************************************************************************/
-/*                                                                                      */
-/* FUNCTION:                LVCS_Memory                                                 */
-/*                                                                                      */
-/* DESCRIPTION:                                                                         */
-/*  This function is used for memory allocation and free. It can be called in           */
-/*  two ways:                                                                           */
-/*                                                                                      */
-/*      hInstance = NULL                Returns the memory requirements                 */
-/*      hInstance = Instance handle     Returns the memory requirements and             */
-/*                                      allocated base addresses for the instance       */
-/*                                                                                      */
-/*  When this function is called for memory allocation (hInstance=NULL) it is           */
-/*  passed the default capabilities.                                                    */
-/*                                                                                      */
-/*  When called for memory allocation the memory base address pointers are NULL on      */
-/*  return.                                                                             */
-/*                                                                                      */
-/*  When the function is called for free (hInstance = Instance Handle) the              */
-/*  capabilities are ignored and the memory table returns the allocated memory and      */
-/*  base addresses used during initialisation.                                          */
-/*                                                                                      */
-/* PARAMETERS:                                                                          */
-/*  hInstance               Instance Handle                                             */
-/*  pMemoryTable            Pointer to an empty memory definition table                 */
-/*  pCapabilities           Pointer to the default capabilites                          */
-/*                                                                                      */
-/* RETURNS:                                                                             */
-/*  LVCS_Success            Succeeded                                                   */
-/*                                                                                      */
-/* NOTES:                                                                               */
-/*  1.  This function may be interrupted by the LVCS_Process function                   */
-/*                                                                                      */
-/****************************************************************************************/
-
-LVCS_ReturnStatus_en LVCS_Memory(LVCS_Handle_t          hInstance,
-                                 LVCS_MemTab_t          *pMemoryTable,
-                                 LVCS_Capabilities_t    *pCapabilities)
-{
-
-    LVM_UINT32          ScratchSize;
-    LVCS_Instance_t     *pInstance = (LVCS_Instance_t *)hInstance;
-
-    /*
-     * Fill in the memory table
-     */
-    if (hInstance == LVM_NULL)
-    {
-        /*
-         * Instance memory
-         */
-        pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_SLOW_DATA].Size         = (LVM_UINT32)sizeof(LVCS_Instance_t);
-        pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_SLOW_DATA].Type         = LVCS_PERSISTENT;
-        pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_SLOW_DATA].pBaseAddress = LVM_NULL;
-
-        /*
-         * Data memory
-         */
-        pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].Size         = (LVM_UINT32)sizeof(LVCS_Data_t);
-        pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].Type         = LVCS_DATA;
-        pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].pBaseAddress = LVM_NULL;
-
-        /*
-         * Coefficient memory
-         */
-        pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].Size         = (LVM_UINT32)sizeof(LVCS_Coefficient_t);
-        pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].Type         = LVCS_COEFFICIENT;
-        pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress = LVM_NULL;
-
-        /*
-         * Scratch memory
-         */
-        /* Inplace processing */
-        ScratchSize = (LVM_UINT32) \
-                        (LVCS_SCRATCHBUFFERS * sizeof(LVM_FLOAT) * pCapabilities->MaxBlockSize);
-        pMemoryTable->Region[LVCS_MEMREGION_TEMPORARY_FAST].Size         = ScratchSize;
-        pMemoryTable->Region[LVCS_MEMREGION_TEMPORARY_FAST].Type         = LVCS_SCRATCH;
-        pMemoryTable->Region[LVCS_MEMREGION_TEMPORARY_FAST].pBaseAddress = LVM_NULL;
-    }
-    else
-    {
-        /* Read back memory allocation table */
-        *pMemoryTable = pInstance->MemoryTable;
-    }
-
-    return(LVCS_SUCCESS);
-}
-
 /************************************************************************************/
 /*                                                                                  */
 /* FUNCTION:                LVCS_Init                                               */
@@ -120,91 +32,95 @@
 /* DESCRIPTION:                                                                     */
 /*  Create and initialisation function for the Concert Sound module                 */
 /*                                                                                  */
-/*  This function can be used to create an algorithm instance by calling with       */
-/*  hInstance set to LVM_NULL. In this case the algorithm returns the new instance  */
-/*  handle.                                                                         */
-/*                                                                                  */
-/*  This function can be used to force a full re-initialisation of the algorithm    */
-/*  by calling with hInstance = Instance Handle. In this case the memory table      */
-/*  should be correct for the instance, this can be ensured by calling the function */
-/*  LVCS_Memory before calling this function.                                       */
-/*                                                                                  */
 /* PARAMETERS:                                                                      */
-/*  hInstance               Instance handle                                         */
-/*  pMemoryTable            Pointer to the memory definition table                  */
+/*  phInstance              Pointer to instance handle                              */
 /*  pCapabilities           Pointer to the capabilities structure                   */
+/*  pScratch                Pointer to scratch buffer                               */
 /*                                                                                  */
 /* RETURNS:                                                                         */
 /*  LVCS_Success            Initialisation succeeded                                */
+/*  LVDBE_NULLADDRESS       One or more memory has a NULL pointer - malloc failure  */
 /*                                                                                  */
 /* NOTES:                                                                           */
-/*  1.  The instance handle is the pointer to the base address of the first memory  */
-/*      region.                                                                     */
-/*  2.  This function must not be interrupted by the LVCS_Process function          */
-/*  3.  This function must be called with the same capabilities as used for the     */
-/*      call to the memory function                                                 */
+/*  1.  This function must not be interrupted by the LVCS_Process function          */
 /*                                                                                  */
 /************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_Init(LVCS_Handle_t         *phInstance,
-                               LVCS_MemTab_t         *pMemoryTable,
-                               LVCS_Capabilities_t   *pCapabilities)
-{
-
-    LVCS_Instance_t                 *pInstance;
-    LVCS_VolCorrect_t               *pLVCS_VolCorrectTable;
+LVCS_ReturnStatus_en LVCS_Init(LVCS_Handle_t* phInstance, LVCS_Capabilities_t* pCapabilities,
+                               void* pScratch) {
+    LVCS_Instance_t* pInstance;
+    LVCS_VolCorrect_t* pLVCS_VolCorrectTable;
 
     /*
-     * Set the instance handle if not already initialised
+     * Create the instance handle if not already initialised
      */
-    if (*phInstance == LVM_NULL)
-    {
-        *phInstance = (LVCS_Handle_t)pMemoryTable->Region[LVCS_MEMREGION_PERSISTENT_SLOW_DATA].pBaseAddress;
+    if (*phInstance == LVM_NULL) {
+        *phInstance = new LVCS_Instance_t{};
     }
-    pInstance =(LVCS_Instance_t  *)*phInstance;
+    pInstance = (LVCS_Instance_t*)*phInstance;
 
     /*
      * Save the capabilities in the instance structure
      */
     pInstance->Capabilities = *pCapabilities;
 
-    /*
-     * Save the memory table in the instance structure
-     */
-    pInstance->MemoryTable = *pMemoryTable;
+    pInstance->pScratch = pScratch;
 
     /*
      * Set all initial parameters to invalid to force a full initialisation
      */
-    pInstance->Params.OperatingMode  = LVCS_OFF;
-    pInstance->Params.SpeakerType    = LVCS_SPEAKERTYPE_MAX;
-    pInstance->OutputDevice          = LVCS_HEADPHONE;
-    pInstance->Params.SourceFormat   = LVCS_SOURCEMAX;
+    pInstance->Params.OperatingMode = LVCS_OFF;
+    pInstance->Params.SpeakerType = LVCS_SPEAKERTYPE_MAX;
+    pInstance->OutputDevice = LVCS_HEADPHONE;
+    pInstance->Params.SourceFormat = LVCS_SOURCEMAX;
     pInstance->Params.CompressorMode = LVM_MODE_OFF;
-    pInstance->Params.SampleRate     = LVM_FS_INVALID;
-    pInstance->Params.EffectLevel    = 0;
-    pInstance->Params.ReverbLevel    = (LVM_UINT16)0x8000;
-    pLVCS_VolCorrectTable            = (LVCS_VolCorrect_t*)&LVCS_VolCorrectTable[0];
-    pInstance->VolCorrect            = pLVCS_VolCorrectTable[0];
-    pInstance->TransitionGain        = 0;
+    pInstance->Params.SampleRate = LVM_FS_INVALID;
+    pInstance->Params.EffectLevel = 0;
+    pInstance->Params.ReverbLevel = (LVM_UINT16)0x8000;
+    pLVCS_VolCorrectTable = (LVCS_VolCorrect_t*)&LVCS_VolCorrectTable[0];
+    pInstance->VolCorrect = pLVCS_VolCorrectTable[0];
+    pInstance->TransitionGain = 0;
 
     /* These current and target values are intialized again in LVCS_Control.c */
-    LVC_Mixer_Init(&pInstance->BypassMix.Mixer_Instance.MixerStream[0],0,0);
+    LVC_Mixer_Init(&pInstance->BypassMix.Mixer_Instance.MixerStream[0], 0, 0);
     /* These current and target values are intialized again in LVCS_Control.c */
-    LVC_Mixer_Init(&pInstance->BypassMix.Mixer_Instance.MixerStream[1],0,0);
+    LVC_Mixer_Init(&pInstance->BypassMix.Mixer_Instance.MixerStream[1], 0, 0);
 
     /*
      * Initialise the bypass variables
      */
-    pInstance->MSTarget0=0;
-    pInstance->MSTarget1=0;
-    pInstance->bInOperatingModeTransition          = LVM_FALSE;
-    pInstance->bTimerDone                        = LVM_FALSE;
-    pInstance->TimerParams.CallBackParam         = 0;
-    pInstance->TimerParams.pCallBack             = LVCS_TimerCallBack;
-    pInstance->TimerParams.pCallbackInstance     = pInstance;
-    pInstance->TimerParams.pCallBackParams       = LVM_NULL;
+    pInstance->MSTarget0 = 0;
+    pInstance->MSTarget1 = 0;
+    pInstance->bInOperatingModeTransition = LVM_FALSE;
+    pInstance->bTimerDone = LVM_FALSE;
+    pInstance->TimerParams.CallBackParam = 0;
+    pInstance->TimerParams.pCallBack = LVCS_TimerCallBack;
+    pInstance->TimerParams.pCallbackInstance = pInstance;
+    pInstance->TimerParams.pCallBackParams = LVM_NULL;
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
 
+/************************************************************************************/
+/*                                                                                  */
+/* FUNCTION:                LVCS_DeInit                                             */
+/*                                                                                  */
+/* DESCRIPTION:                                                                     */
+/*  Free memories created during the LVCS_Init call including instance handle       */
+/*                                                                                  */
+/* PARAMETERS:                                                                      */
+/*  phInstance              Pointer to instance handle                              */
+/*                                                                                  */
+/* NOTES:                                                                           */
+/*  1.  This function must not be interrupted by the LVCS_Process function          */
+/*                                                                                  */
+/************************************************************************************/
+void LVCS_DeInit(LVCS_Handle_t* phInstance) {
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)*phInstance;
+    if (pInstance == LVM_NULL) {
+        return;
+    }
+    delete pInstance;
+    *phInstance = LVM_NULL;
+    return;
+}
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Private.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Private.h
index 154ea55..58e21bd 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Private.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Private.h
@@ -33,11 +33,12 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#include "LVCS.h"                               /* Calling or Application layer definitions */
-#include "LVCS_StereoEnhancer.h"                /* Stereo enhancer module definitions */
-#include "LVCS_ReverbGenerator.h"               /* Reverberation module definitions */
-#include "LVCS_Equaliser.h"                     /* Equaliser module definitions */
-#include "LVCS_BypassMix.h"                     /* Bypass Mixer module definitions */
+#include <audio_utils/BiquadFilter.h>
+#include "LVCS.h"                 /* Calling or Application layer definitions */
+#include "LVCS_StereoEnhancer.h"  /* Stereo enhancer module definitions */
+#include "LVCS_ReverbGenerator.h" /* Reverberation module definitions */
+#include "LVCS_Equaliser.h"       /* Equaliser module definitions */
+#include "LVCS_BypassMix.h"       /* Bypass Mixer module definitions */
 #include "LVM_Timer.h"
 
 /************************************************************************************/
@@ -47,35 +48,29 @@
 /************************************************************************************/
 
 /* Configuration switch controls */
-#define LVCS_STEREOENHANCESWITCH    0x0001      /* Stereo enhancement enable control */
-#define LVCS_REVERBSWITCH           0x0002      /* Reverberation enable control */
-#define LVCS_EQUALISERSWITCH        0x0004      /* Equaliser enable control */
-#define LVCS_BYPASSMIXSWITCH        0x0008      /* Bypass mixer enable control */
-#define LVCS_COMPGAINFRAME          64          /* Compressor gain update interval */
+#define LVCS_STEREOENHANCESWITCH 0x0001 /* Stereo enhancement enable control */
+#define LVCS_REVERBSWITCH 0x0002        /* Reverberation enable control */
+#define LVCS_EQUALISERSWITCH 0x0004     /* Equaliser enable control */
+#define LVCS_BYPASSMIXSWITCH 0x0008     /* Bypass mixer enable control */
+#define LVCS_COMPGAINFRAME 64           /* Compressor gain update interval */
 
 /* Memory */
-#ifdef SUPPORT_MC
-#define LVCS_SCRATCHBUFFERS              8      /* Number of buffers required for inplace processing */
-#else
-#define LVCS_SCRATCHBUFFERS              6      /* Number of buffers required for inplace processing */
-#endif
-#ifdef SUPPORT_MC
+#define LVCS_SCRATCHBUFFERS 8 /* Number of buffers required for inplace processing */
 /*
  * The Concert Surround module applies processing only on the first two
  * channels of a multichannel input. The data of first two channels is copied
  * from the multichannel input into scratch buffer. The buffers added here
  * are used for this purpose
  */
-#define LVCS_MC_SCRATCHBUFFERS           2
-#endif
+#define LVCS_MC_SCRATCHBUFFERS 2
 
 /* General */
-#define LVCS_INVALID                0xFFFF      /* Invalid init parameter */
-#define LVCS_BYPASS_MIXER_TC        100         /* Bypass mixer time */
+#define LVCS_INVALID 0xFFFF      /* Invalid init parameter */
+#define LVCS_BYPASS_MIXER_TC 100 /* Bypass mixer time */
 
 /* Access to external coefficients table */
-#define LVCS_NR_OF_FS                    9
-#define LVCS_NR_OF_CHAN_CFG              2
+#define LVCS_NR_OF_FS 9
+#define LVCS_NR_OF_CHAN_CFG 2
 
 /************************************************************************************/
 /*                                                                                  */
@@ -83,13 +78,9 @@
 /*                                                                                  */
 /************************************************************************************/
 
-typedef LVM_UINT16  LVCS_Configuration_t;       /* Internal algorithm configuration */
+typedef LVM_UINT16 LVCS_Configuration_t; /* Internal algorithm configuration */
 
-typedef enum
-{
-    LVCS_HEADPHONE  = 0,
-    LVCS_DEVICE_MAX = LVM_MAXENUM
-} LVCS_OutputDevice_en;
+typedef enum { LVCS_HEADPHONE = 0, LVCS_DEVICE_MAX = LVM_MAXENUM } LVCS_OutputDevice_en;
 
 /************************************************************************************/
 /*                                                                                  */
@@ -98,65 +89,50 @@
 /************************************************************************************/
 
 /* Volume correction structure */
-typedef struct
-{
-    LVM_FLOAT   CompFull;                       /* Post CS compression 100% effect */
-    LVM_FLOAT   CompMin;                        /* Post CS compression 0% effect */
-    LVM_FLOAT   GainFull;                       /* CS gain correct 100% effect */
-    LVM_FLOAT   GainMin;                        /* CS gain correct 0% effect */
+typedef struct {
+    LVM_FLOAT CompFull; /* Post CS compression 100% effect */
+    LVM_FLOAT CompMin;  /* Post CS compression 0% effect */
+    LVM_FLOAT GainFull; /* CS gain correct 100% effect */
+    LVM_FLOAT GainMin;  /* CS gain correct 0% effect */
 } LVCS_VolCorrect_t;
 
 /* Instance structure */
-typedef struct
-{
+typedef struct {
     /* Public parameters */
-    LVCS_MemTab_t           MemoryTable;        /* Instance memory allocation table */
-    LVCS_Params_t           Params;             /* Instance parameters */
-    LVCS_Capabilities_t     Capabilities;       /* Initialisation capabilities */
+    LVCS_Params_t Params;             /* Instance parameters */
+    LVCS_Capabilities_t Capabilities; /* Initialisation capabilities */
 
     /* Private parameters */
-    LVCS_OutputDevice_en    OutputDevice;       /* Selected output device type */
-    LVCS_VolCorrect_t       VolCorrect;         /* Volume correction settings */
-    LVM_FLOAT               TransitionGain;     /* Transition gain */
-    LVM_FLOAT               CompressGain;       /* Last used compressor gain*/
+    LVCS_OutputDevice_en OutputDevice; /* Selected output device type */
+    LVCS_VolCorrect_t VolCorrect;      /* Volume correction settings */
+    LVM_FLOAT TransitionGain;          /* Transition gain */
+    LVM_FLOAT CompressGain;            /* Last used compressor gain*/
 
     /* Sub-block configurations */
-    LVCS_StereoEnhancer_t   StereoEnhancer;     /* Stereo enhancer configuration */
-    LVCS_ReverbGenerator_t  Reverberation;      /* Reverberation configuration */
-    LVCS_Equaliser_t        Equaliser;          /* Equaliser configuration */
-    LVCS_BypassMix_t        BypassMix;          /* Bypass mixer configuration */
+    LVCS_StereoEnhancer_t StereoEnhancer; /* Stereo enhancer configuration */
+    LVCS_ReverbGenerator_t Reverberation; /* Reverberation configuration */
+    LVCS_BypassMix_t BypassMix;           /* Bypass mixer configuration */
 
     /* Bypass variable */
-    LVM_INT16               MSTarget0;                          /* Mixer state control variable for smooth transtion */
-    LVM_INT16               MSTarget1;                          /* Mixer state control variable for smooth transtion */
-    LVM_INT16               bInOperatingModeTransition;         /* Operating mode transition flag */
-    LVM_INT16               bTimerDone;                         /* Timer completion flag */
-    LVM_Timer_Params_t      TimerParams;                        /* Timer parameters */
-    LVM_Timer_Instance_t    TimerInstance;                      /* Timer instance */
+    LVM_INT16 MSTarget0;                  /* Mixer state control variable for smooth transition */
+    LVM_INT16 MSTarget1;                  /* Mixer state control variable for smooth transition */
+    LVM_INT16 bInOperatingModeTransition; /* Operating mode transition flag */
+    LVM_INT16 bTimerDone;                 /* Timer completion flag */
+    LVM_Timer_Params_t TimerParams;       /* Timer parameters */
+    LVM_Timer_Instance_t TimerInstance;   /* Timer instance */
+    std::unique_ptr<android::audio_utils::BiquadFilter<LVM_FLOAT>>
+            pEqBiquad; /* Biquad filter instance for Equaliser */
+    std::unique_ptr<android::audio_utils::BiquadFilter<LVM_FLOAT>>
+            pRevBiquad; /* Biquad filter instance for Reverberation */
+    std::unique_ptr<android::audio_utils::BiquadFilter<LVM_FLOAT>>
+            pSEMidBiquad; /* Biquad filter instance for Stereo enhancement mid */
+    std::unique_ptr<android::audio_utils::BiquadFilter<LVM_FLOAT>>
+            pSESideBiquad; /* Biquad filter instance for Stereo enhancement side */
+    void* pScratch;                       /* Pointer to bundle scratch buffer */
 
 } LVCS_Instance_t;
 
-/* Coefficient Structure */
-typedef struct
-{
-    Biquad_FLOAT_Instance_t       EqualiserBiquadInstance;
-    Biquad_FLOAT_Instance_t       ReverbBiquadInstance;
-    Biquad_FLOAT_Instance_t       SEBiquadInstanceMid;
-    Biquad_FLOAT_Instance_t       SEBiquadInstanceSide;
-} LVCS_Coefficient_t;
 
-/* Data Structure */
-typedef struct
-{
-    Biquad_2I_Order2_FLOAT_Taps_t EqualiserBiquadTaps;
-    Biquad_2I_Order2_FLOAT_Taps_t ReverbBiquadTaps;
-    Biquad_1I_Order1_FLOAT_Taps_t SEBiquadTapsMid;
-    Biquad_1I_Order2_FLOAT_Taps_t SEBiquadTapsSide;
-} LVCS_Data_t;
+void LVCS_TimerCallBack(void* hInstance, void* pCallBackParams, LVM_INT32 CallbackParam);
 
-void LVCS_TimerCallBack (   void* hInstance,
-                            void* pCallBackParams,
-                            LVM_INT32 CallbackParam);
-
-#endif      /* PRIVATE_H */
-
+#endif /* PRIVATE_H */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Process.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Process.cpp
index 8e09be2..6af0f75 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Process.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Process.cpp
@@ -65,116 +65,75 @@
 /* NOTES:                                                                           */
 /*                                                                                  */
 /************************************************************************************/
-LVCS_ReturnStatus_en LVCS_Process_CS(LVCS_Handle_t              hInstance,
-                                     const LVM_FLOAT            *pInData,
-                                     LVM_FLOAT                  *pOutData,
-                                     LVM_UINT16                 NumSamples)
-{
-    const LVM_FLOAT     *pInput;
-    LVCS_Instance_t     *pInstance = (LVCS_Instance_t  *)hInstance;
-    LVM_FLOAT           *pScratch;
+LVCS_ReturnStatus_en LVCS_Process_CS(LVCS_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                     LVM_FLOAT* pOutData, LVM_UINT16 NumSamples) {
+    const LVM_FLOAT* pInput;
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    LVM_FLOAT* pScratch;
     LVCS_ReturnStatus_en err;
-#ifdef SUPPORT_MC
-    LVM_FLOAT           *pStIn;
-    LVM_INT32           channels = pInstance->Params.NrChannels;
+    LVM_FLOAT* pStIn;
+    LVM_INT32 channels = pInstance->Params.NrChannels;
 #define NrFrames NumSamples  // alias for clarity
 
-    /*In case of mono processing, stereo input is created from mono
-     *and stored in pInData before applying any of the effects.
-     *However we do not update the value pInstance->Params.NrChannels
-     *at this point.
-     *So to treat the pInData as stereo we are setting channels to 2
-     */
-    if (channels == 1)
-    {
-        channels = 2;
-    }
-#endif
-
-    pScratch  = (LVM_FLOAT *) \
-                  pInstance->MemoryTable.Region[LVCS_MEMREGION_TEMPORARY_FAST].pBaseAddress;
+    pScratch = (LVM_FLOAT*)pInstance->pScratch;
 
     /*
      * Check if the processing is inplace
      */
-#ifdef SUPPORT_MC
     /*
      * The pInput buffer holds the first 2 (Left, Right) channels information.
      * Hence the memory required by this buffer is 2 * NumFrames.
      * The Concert Surround module carries out processing only on L, R.
      */
     pInput = pScratch + (2 * NrFrames);
-    pStIn  = pScratch + ((LVCS_SCRATCHBUFFERS - 2) * NrFrames);
-    /* The first two channel data is extracted from the input data and
-     * copied into pInput buffer
-     */
-    Copy_Float_Mc_Stereo((LVM_FLOAT *)pInData,
-                         (LVM_FLOAT *)pInput,
-                         NrFrames,
-                         channels);
-    Copy_Float((LVM_FLOAT *)pInput,
-               (LVM_FLOAT *)pStIn,
-               (LVM_INT16)(2 * NrFrames));
-#else
-    if (pInData == pOutData)
-    {
-        /* Processing inplace */
-        pInput = pScratch + (2 * NumSamples);
-        Copy_Float((LVM_FLOAT *)pInData,           /* Source */
-                   (LVM_FLOAT *)pInput,            /* Destination */
-                   (LVM_INT16)(2 * NumSamples));     /* Left and right */
+    pStIn = pScratch + ((LVCS_SCRATCHBUFFERS - 2) * NrFrames);
+    if (channels == FCC_1) {
+        Copy_Float((LVM_FLOAT*)pInData, (LVM_FLOAT*)pInput, (LVM_INT16)NrFrames);
+        Copy_Float((LVM_FLOAT*)pInput, (LVM_FLOAT*)pStIn, (LVM_INT16)NrFrames);
+    } else {
+        /* The first two channel data is extracted from the input data and
+         * copied into pInput buffer
+         */
+        Copy_Float_Mc_Stereo((LVM_FLOAT*)pInData, (LVM_FLOAT*)pInput, NrFrames, channels);
+        Copy_Float((LVM_FLOAT*)pInput, (LVM_FLOAT*)pStIn, (LVM_INT16)(FCC_2 * NrFrames));
     }
-    else
-    {
-        /* Processing outplace */
-        pInput = pInData;
-    }
-#endif
     /*
      * Call the stereo enhancer
      */
-#ifdef SUPPORT_MC
-    err = LVCS_StereoEnhancer(hInstance,              /* Instance handle */
-                              pStIn,                  /* Pointer to the input data */
-                              pOutData,               /* Pointer to the output data */
-                              NrFrames);              /* Number of frames to process */
-#else
-    err = LVCS_StereoEnhancer(hInstance,              /* Instance handle */
-                              pInData,                    /* Pointer to the input data */
-                              pOutData,                   /* Pointer to the output data */
-                              NumSamples);                /* Number of samples to process */
-#endif
+    err = LVCS_StereoEnhancer(hInstance, /* Instance handle */
+                              pStIn,     /* Pointer to the input data */
+                              pOutData,  /* Pointer to the output data */
+                              NrFrames); /* Number of frames to process */
 
     /*
      * Call the reverb generator
      */
-    err = LVCS_ReverbGenerator(hInstance,             /* Instance handle */
-                               pOutData,                  /* Pointer to the input data */
-                               pOutData,                  /* Pointer to the output data */
-                               NumSamples);               /* Number of samples to process */
+    err = LVCS_ReverbGenerator(hInstance,   /* Instance handle */
+                               pOutData,    /* Pointer to the input data */
+                               pOutData,    /* Pointer to the output data */
+                               NumSamples); /* Number of samples to process */
 
     /*
      * Call the equaliser
      */
-    err = LVCS_Equaliser(hInstance,                   /* Instance handle */
-                         pOutData,                        /* Pointer to the input data */
-                         NumSamples);                     /* Number of samples to process */
+    err = LVCS_Equaliser(hInstance,   /* Instance handle */
+                         pOutData,    /* Pointer to the input data */
+                         NumSamples); /* Number of samples to process */
 
     /*
      * Call the bypass mixer
      */
-    err = LVCS_BypassMixer(hInstance,                 /* Instance handle */
-                           pOutData,                      /* Pointer to the processed data */
-                           pInput,                        /* Pointer to the input (unprocessed) data */
-                           pOutData,                      /* Pointer to the output data */
-                           NumSamples);                   /* Number of samples to process */
+    err = LVCS_BypassMixer(hInstance,   /* Instance handle */
+                           pOutData,    /* Pointer to the processed data */
+                           pInput,      /* Pointer to the input (unprocessed) data */
+                           pOutData,    /* Pointer to the output data */
+                           NumSamples); /* Number of samples to process */
 
-    if(err != LVCS_SUCCESS)
-    {
+    if (err != LVCS_SUCCESS) {
         return err;
     }
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
 /************************************************************************************/
 /*                                                                                  */
@@ -202,171 +161,117 @@
 /* NOTES:                                                                           */
 /*                                                                                  */
 /************************************************************************************/
-LVCS_ReturnStatus_en LVCS_Process(LVCS_Handle_t             hInstance,
-                                  const LVM_FLOAT           *pInData,
-                                  LVM_FLOAT                 *pOutData,
-                                  LVM_UINT16                NumSamples)
-{
-
-    LVCS_Instance_t *pInstance = (LVCS_Instance_t  *)hInstance;
+LVCS_ReturnStatus_en LVCS_Process(LVCS_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                  LVM_FLOAT* pOutData, LVM_UINT16 NumSamples) {
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
     LVCS_ReturnStatus_en err;
-#ifdef SUPPORT_MC
     /*Extract number of Channels info*/
     LVM_INT32 channels = pInstance->Params.NrChannels;
+    LVM_UINT16 destNumSamples = (channels == FCC_1) ? NumSamples : FCC_2 * NumSamples;
+    LVM_INT32 compGainInterval =
+            (channels == FCC_1) ? LVCS_COMPGAINFRAME : FCC_2 * LVCS_COMPGAINFRAME;
 #define NrFrames NumSamples  // alias for clarity
-    if (channels == 1)
-    {
-        channels = 2;
-    }
-#endif
     /*
      * Check the number of samples is not too large
      */
-    if (NumSamples > pInstance->Capabilities.MaxBlockSize)
-    {
-        return(LVCS_TOOMANYSAMPLES);
+    if (NumSamples > pInstance->Capabilities.MaxBlockSize) {
+        return (LVCS_TOOMANYSAMPLES);
     }
 
     /*
      * Check if the algorithm is enabled
      */
-    if (pInstance->Params.OperatingMode != LVCS_OFF)
-    {
-#ifdef SUPPORT_MC
-        LVM_FLOAT *pStereoOut;
+    if (pInstance->Params.OperatingMode != LVCS_OFF) {
+        LVM_FLOAT* pStereoOut;
         /*
          * LVCS_Process_CS uses output buffer to store intermediate outputs of StereoEnhancer,
          * Equalizer, ReverbGenerator and BypassMixer.
          * So, to avoid i/o data overlapping, when i/o buffers are common, use scratch buffer
          * to store intermediate outputs.
          */
-        if (pOutData == pInData)
-        {
-          /*
-           * Scratch memory is used in 4 chunks of (2 * NrFrames) size.
-           * First chunk of memory is used by LVCS_StereoEnhancer and LVCS_ReverbGenerator,
-           * second and fourth are used as input buffers by pInput and pStIn in LVCS_Process_CS.
-           * Hence, pStereoOut is pointed to use unused third portion of scratch memory.
-           */
-            pStereoOut = (LVM_FLOAT *) \
-                          pInstance->MemoryTable. \
-                          Region[LVCS_MEMREGION_TEMPORARY_FAST].pBaseAddress +
-                          ((LVCS_SCRATCHBUFFERS - 4) * NrFrames);
-        }
-        else
-        {
+        if (pOutData == pInData) {
+            /*
+             * Scratch memory is used in 4 chunks of (2 * NrFrames) size.
+             * First chunk of memory is used by LVCS_StereoEnhancer and LVCS_ReverbGenerator,
+             * second and fourth are used as input buffers by pInput and pStIn in LVCS_Process_CS.
+             * Hence, pStereoOut is pointed to use unused third portion of scratch memory.
+             */
+            pStereoOut = (LVM_FLOAT*)pInstance->pScratch + ((LVCS_SCRATCHBUFFERS - 4) * NrFrames);
+        } else {
             pStereoOut = pOutData;
         }
 
         /*
          * Call CS process function
          */
-            err = LVCS_Process_CS(hInstance,
-                                  pInData,
-                                  pStereoOut,
-                                  NrFrames);
-#else
-            err = LVCS_Process_CS(hInstance,
-                                  pInData,
-                                  pOutData,
-                                  NumSamples);
-#endif
+        err = LVCS_Process_CS(hInstance, pInData, pStereoOut, NrFrames);
 
         /*
          * Compress to reduce expansion effect of Concert Sound and correct volume
          * differences for difference settings. Not applied in test modes
          */
-        if ((pInstance->Params.OperatingMode == LVCS_ON)&& \
-                                        (pInstance->Params.CompressorMode == LVM_MODE_ON))
-        {
+        if ((pInstance->Params.OperatingMode == LVCS_ON) &&
+            (pInstance->Params.CompressorMode == LVM_MODE_ON)) {
             LVM_FLOAT Gain = pInstance->VolCorrect.CompMin;
             LVM_FLOAT Current1;
 
             Current1 = LVC_Mixer_GetCurrent(&pInstance->BypassMix.Mixer_Instance.MixerStream[0]);
-            Gain = (LVM_FLOAT)(  pInstance->VolCorrect.CompMin
-                               - (((LVM_FLOAT)pInstance->VolCorrect.CompMin  * (Current1)))
-                               + (((LVM_FLOAT)pInstance->VolCorrect.CompFull * (Current1))));
+            Gain = (LVM_FLOAT)(pInstance->VolCorrect.CompMin -
+                               (((LVM_FLOAT)pInstance->VolCorrect.CompMin * (Current1))) +
+                               (((LVM_FLOAT)pInstance->VolCorrect.CompFull * (Current1))));
 
-            if(NumSamples < LVCS_COMPGAINFRAME)
-            {
-#ifdef SUPPORT_MC
-                NonLinComp_Float(Gain,                    /* Compressor gain setting */
-                                 pStereoOut,
-                                 pStereoOut,
-                                 (LVM_INT32)(2 * NrFrames));
-#else
-                NonLinComp_Float(Gain,                    /* Compressor gain setting */
-                                 pOutData,
-                                 pOutData,
-                                 (LVM_INT32)(2 * NumSamples));
-#endif
-            }
-            else
-            {
-                LVM_FLOAT  GainStep;
-                LVM_FLOAT  FinalGain;
-                LVM_INT16  SampleToProcess = NumSamples;
-                LVM_FLOAT  *pOutPtr;
+            if (NumSamples < LVCS_COMPGAINFRAME) {
+                NonLinComp_Float(Gain, /* Compressor gain setting */
+                                 pStereoOut, pStereoOut, (LVM_INT32)destNumSamples);
+            } else {
+                LVM_FLOAT GainStep;
+                LVM_FLOAT FinalGain;
+                LVM_INT16 SampleToProcess = NumSamples;
+                LVM_FLOAT* pOutPtr;
 
                 /* Large changes in Gain can cause clicks in output
                    Split data into small blocks and use interpolated gain values */
 
-                GainStep = (LVM_FLOAT)(((Gain-pInstance->CompressGain) * \
-                                                LVCS_COMPGAINFRAME) / NumSamples);
+                GainStep = (LVM_FLOAT)(((Gain - pInstance->CompressGain) * LVCS_COMPGAINFRAME) /
+                                       NumSamples);
 
-                if((GainStep == 0) && (pInstance->CompressGain < Gain))
-                {
+                if ((GainStep == 0) && (pInstance->CompressGain < Gain)) {
                     GainStep = 1;
-                }
-                else
-                {
-                    if((GainStep == 0) && (pInstance->CompressGain > Gain))
-                    {
+                } else {
+                    if ((GainStep == 0) && (pInstance->CompressGain > Gain)) {
                         GainStep = -1;
                     }
                 }
 
                 FinalGain = Gain;
                 Gain = pInstance->CompressGain;
-#ifdef SUPPORT_MC
                 pOutPtr = pStereoOut;
-#else
-                pOutPtr = pOutData;
-#endif
 
-                while(SampleToProcess > 0)
-                {
+                while (SampleToProcess > 0) {
                     Gain = (LVM_FLOAT)(Gain + GainStep);
-                    if((GainStep > 0) && (FinalGain <= Gain))
-                    {
+                    if ((GainStep > 0) && (FinalGain <= Gain)) {
                         Gain = FinalGain;
                         GainStep = 0;
                     }
 
-                    if((GainStep < 0) && (FinalGain > Gain))
-                    {
+                    if ((GainStep < 0) && (FinalGain > Gain)) {
                         Gain = FinalGain;
                         GainStep = 0;
                     }
 
-                    if(SampleToProcess > LVCS_COMPGAINFRAME)
-                    {
-                        NonLinComp_Float(Gain,                    /* Compressor gain setting */
-                                         pOutPtr,
-                                         pOutPtr,
-                                         (LVM_INT32)(2 * LVCS_COMPGAINFRAME));
-                        pOutPtr += (2 * LVCS_COMPGAINFRAME);
+                    if (SampleToProcess > LVCS_COMPGAINFRAME) {
+                        NonLinComp_Float(Gain, /* Compressor gain setting */
+                                         pOutPtr, pOutPtr, compGainInterval);
+                        pOutPtr += compGainInterval;
                         SampleToProcess = (LVM_INT16)(SampleToProcess - LVCS_COMPGAINFRAME);
-                    }
-                    else
-                    {
-                        NonLinComp_Float(Gain,                    /* Compressor gain setting */
-                                         pOutPtr,
-                                         pOutPtr,
-                                         (LVM_INT32)(2 * SampleToProcess));
+                    } else {
+                        NonLinComp_Float(Gain, /* Compressor gain setting */
+                                         pOutPtr, pOutPtr,
+                                         (channels == FCC_1)
+                                                 ? (LVM_INT32)(SampleToProcess)
+                                                 : (LVM_INT32)(FCC_2 * SampleToProcess));
                         SampleToProcess = 0;
                     }
-
                 }
             }
 
@@ -374,57 +279,33 @@
             pInstance->CompressGain = Gain;
         }
 
-        if(pInstance->bInOperatingModeTransition == LVM_TRUE){
-
+        if (pInstance->bInOperatingModeTransition == LVM_TRUE) {
             /*
              * Re-init bypass mix when timer has completed
              */
             if ((pInstance->bTimerDone == LVM_TRUE) &&
-                (pInstance->BypassMix.Mixer_Instance.MixerStream[1].CallbackSet == 0))
-            {
-                err = LVCS_BypassMixInit(hInstance,
-                                         &pInstance->Params);
+                (pInstance->BypassMix.Mixer_Instance.MixerStream[1].CallbackSet == 0)) {
+                err = LVCS_BypassMixInit(hInstance, &pInstance->Params);
 
-                if(err != LVCS_SUCCESS)
-                {
+                if (err != LVCS_SUCCESS) {
                     return err;
                 }
 
-            }
-            else{
-                LVM_Timer ( &pInstance->TimerInstance,
-                            (LVM_INT16)NumSamples);
+            } else {
+                LVM_Timer(&pInstance->TimerInstance, (LVM_INT16)NumSamples);
             }
         }
-#ifdef SUPPORT_MC
-        Copy_Float_Stereo_Mc(pInData,
-                             pStereoOut,
-                             pOutData,
-                             NrFrames,
-                             channels);
-#endif
-    }
-    else
-    {
-        if (pInData != pOutData)
-        {
-#ifdef SUPPORT_MC
+        Copy_Float_Stereo_Mc(pInData, (const LVM_FLOAT*)pStereoOut, pOutData, NrFrames, channels);
+    } else {
+        if (pInData != pOutData) {
             /*
              * The algorithm is disabled so just copy the data
              */
-            Copy_Float((LVM_FLOAT *)pInData,               /* Source */
-                       (LVM_FLOAT *)pOutData,                  /* Destination */
-                       (LVM_INT16)(channels * NrFrames));    /* All Channels*/
-#else
-            /*
-             * The algorithm is disabled so just copy the data
-             */
-            Copy_Float((LVM_FLOAT *)pInData,               /* Source */
-                       (LVM_FLOAT *)pOutData,                  /* Destination */
-                       (LVM_INT16)(2 * NumSamples));             /* Left and right */
-#endif
+            Copy_Float((LVM_FLOAT*)pInData,               /* Source */
+                       (LVM_FLOAT*)pOutData,              /* Destination */
+                       (LVM_INT16)(channels * NrFrames)); /* All Channels*/
         }
     }
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
index d0e6e09..de23d07 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.cpp
@@ -20,7 +20,8 @@
 /*  Includes                                                                        */
 /*                                                                                  */
 /************************************************************************************/
-
+#include <system/audio.h>
+#include <stdlib.h>
 #include "LVCS.h"
 #include "LVCS_Private.h"
 #include "LVCS_ReverbGenerator.h"
@@ -57,31 +58,19 @@
 /*  2.  The numerator coefficients of the filter are negated to cause an inversion. */
 /*                                                                                  */
 /************************************************************************************/
-LVCS_ReturnStatus_en LVCS_ReverbGeneratorInit(LVCS_Handle_t     hInstance,
-                                              LVCS_Params_t     *pParams)
-{
-
-    LVM_UINT16              Delay;
-    LVM_UINT16              Offset;
-    LVCS_Instance_t         *pInstance = (LVCS_Instance_t  *)hInstance;
-    LVCS_ReverbGenerator_t  *pConfig   = (LVCS_ReverbGenerator_t *)&pInstance->Reverberation;
-    LVCS_Data_t             *pData;
-    LVCS_Coefficient_t      *pCoefficients;
-    BQ_FLOAT_Coefs_t         Coeffs;
-    const BiquadA012B12CoefsSP_t  *pReverbCoefTable;
-
-    pData = (LVCS_Data_t *) \
-                 pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].pBaseAddress;
-
-    pCoefficients = (LVCS_Coefficient_t *) \
-                 pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress;
+LVCS_ReturnStatus_en LVCS_ReverbGeneratorInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams) {
+    LVM_UINT16 Delay;
+    LVM_UINT16 Offset;
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    LVCS_ReverbGenerator_t* pConfig = (LVCS_ReverbGenerator_t*)&pInstance->Reverberation;
+    const BiquadA012B12CoefsSP_t* pReverbCoefTable;
 
     /*
      * Initialise the delay and filters if:
      *  - the sample rate has changed
      *  - the speaker type has changed to or from the mobile speaker
      */
-    if(pInstance->Params.SampleRate != pParams->SampleRate )      /* Sample rate change test */
+    if (pInstance->Params.SampleRate != pParams->SampleRate) /* Sample rate change test */
 
     {
         /*
@@ -89,59 +78,36 @@
          */
         Delay = (LVM_UINT16)LVCS_StereoDelayCS[(LVM_UINT16)pParams->SampleRate];
 
-        pConfig->DelaySize      = (LVM_INT16)(2 * Delay);
-        pConfig->DelayOffset    = 0;
-        LoadConst_Float(0,                                            /* Value */
-                        (LVM_FLOAT *)&pConfig->StereoSamples[0],      /* Destination */
-                        /* Number of words */
-                        (LVM_UINT16)(sizeof(pConfig->StereoSamples) / sizeof(LVM_FLOAT)));
+        pConfig->DelaySize =
+                (pParams->NrChannels == FCC_1) ? (LVM_INT16)Delay : (LVM_INT16)(FCC_2 * Delay);
+        pConfig->DelayOffset = 0;
+        memset(pConfig->StereoSamples, 0, sizeof(pConfig->StereoSamples));
         /*
          * Setup the filters
          */
         Offset = (LVM_UINT16)pParams->SampleRate;
         pReverbCoefTable = (BiquadA012B12CoefsSP_t*)&LVCS_ReverbCoefTable[0];
 
-        /* Convert incoming coefficients to the required format/ordering */
-        Coeffs.A0 = (LVM_FLOAT)pReverbCoefTable[Offset].A0;
-        Coeffs.A1 = (LVM_FLOAT)pReverbCoefTable[Offset].A1;
-        Coeffs.A2 = (LVM_FLOAT)pReverbCoefTable[Offset].A2;
-        Coeffs.B1 = (LVM_FLOAT)-pReverbCoefTable[Offset].B1;
-        Coeffs.B2 = (LVM_FLOAT)-pReverbCoefTable[Offset].B2;
-
-        LoadConst_Float(0,                                 /* Value */
-                        (LVM_FLOAT *)&pData->ReverbBiquadTaps, /* Destination */
-                        /* Number of words */
-                        (LVM_UINT16)(sizeof(pData->ReverbBiquadTaps) / sizeof(LVM_FLOAT)));
-
-        BQ_2I_D16F16Css_TRC_WRA_01_Init(&pCoefficients->ReverbBiquadInstance,
-                                        &pData->ReverbBiquadTaps,
-                                        &Coeffs);
-
-        /* Callbacks */
-        switch(pReverbCoefTable[Offset].Scale)
-        {
-            case 14:
-                pConfig->pBiquadCallBack  = BQ_2I_D16F16C14_TRC_WRA_01;
-                break;
-            case 15:
-                pConfig->pBiquadCallBack  = BQ_2I_D16F16C15_TRC_WRA_01;
-                break;
-        }
+        std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs> coefs = {
+                pReverbCoefTable[Offset].A0, pReverbCoefTable[Offset].A1,
+                pReverbCoefTable[Offset].A2, pReverbCoefTable[Offset].B1,
+                pReverbCoefTable[Offset].B2};
+        pInstance->pRevBiquad.reset(new android::audio_utils::BiquadFilter<LVM_FLOAT>(
+                (pParams->NrChannels == FCC_1) ? FCC_1 : FCC_2, coefs));
 
         /*
          * Setup the mixer
          */
         pConfig->ProcGain = (LVM_UINT16)(HEADPHONEGAINPROC);
-        pConfig->UnprocGain  = (LVM_UINT16)(HEADPHONEGAINUNPROC);
+        pConfig->UnprocGain = (LVM_UINT16)(HEADPHONEGAINUNPROC);
     }
 
-    if(pInstance->Params.ReverbLevel != pParams->ReverbLevel)
-    {
-        LVM_INT32   ReverbPercentage = 83886;      // 1 Percent Reverb i.e 1/100 in Q 23 format
+    if (pInstance->Params.ReverbLevel != pParams->ReverbLevel) {
+        LVM_INT32 ReverbPercentage = 83886;        // 1 Percent Reverb i.e 1/100 in Q 23 format
         ReverbPercentage *= pParams->ReverbLevel;  // Actual Reverb Level in Q 23 format
-        pConfig->ReverbLevel = ((LVM_FLOAT)(ReverbPercentage>>8)) / 32767.0f;
+        pConfig->ReverbLevel = ((LVM_FLOAT)(ReverbPercentage >> 8)) / 32767.0f;
     }
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
 /************************************************************************************/
 /*                                                                                  */
@@ -181,46 +147,38 @@
 /*  2.  The Gain is combined with the LPF and incorporated in to the coefficients   */
 /*                                                                                  */
 /************************************************************************************/
-LVCS_ReturnStatus_en LVCS_ReverbGenerator(LVCS_Handle_t         hInstance,
-                                          const LVM_FLOAT       *pInData,
-                                          LVM_FLOAT             *pOutData,
-                                          LVM_UINT16            NumSamples)
-{
+LVCS_ReturnStatus_en LVCS_ReverbGenerator(LVCS_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                          LVM_FLOAT* pOutData, LVM_UINT16 NumSamples) {
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    LVCS_ReverbGenerator_t* pConfig = (LVCS_ReverbGenerator_t*)&pInstance->Reverberation;
+    LVM_FLOAT* pScratch;
+    LVM_INT32 NumChannels = pInstance->Params.NrChannels;
+    LVM_UINT16 destNumSamples =
+            (pInstance->Params.NrChannels == FCC_1) ? NumSamples : FCC_2 * NumSamples;
 
-    LVCS_Instance_t         *pInstance = (LVCS_Instance_t  *)hInstance;
-    LVCS_ReverbGenerator_t  *pConfig   = (LVCS_ReverbGenerator_t *)&pInstance->Reverberation;
-    LVCS_Coefficient_t      *pCoefficients;
-    LVM_FLOAT               *pScratch;
-
-    pCoefficients = (LVCS_Coefficient_t *)\
-                   pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress;
-
-    pScratch  = (LVM_FLOAT *)\
-                    pInstance->MemoryTable.Region[LVCS_MEMREGION_TEMPORARY_FAST].pBaseAddress;
+    pScratch = (LVM_FLOAT*)pInstance->pScratch;
 
     /*
      * Copy the data to the output in outplace processing
      */
-    if (pInData != pOutData)
-    {
+    if (pInData != pOutData) {
         /*
          * Reverb not required so just copy the data
          */
-        Copy_Float((LVM_FLOAT *)pInData,                                       /* Source */
-                   (LVM_FLOAT *)pOutData,                                      /* Destination */
-                   (LVM_INT16)(2 * NumSamples));                                 /* Left and right */
+        Copy_Float((LVM_FLOAT*)pInData,        /* Source */
+                   (LVM_FLOAT*)pOutData,       /* Destination */
+                   (LVM_INT16)destNumSamples); /* Number of frames */
     }
 
     /*
      * Check if the reverb is required
      */
     /* Disable when CS4MS in stereo mode */
-    if ((((LVCS_OutputDevice_en)pInstance->Params.SpeakerType == LVCS_HEADPHONE) || \
+    if ((((LVCS_OutputDevice_en)pInstance->Params.SpeakerType == LVCS_HEADPHONE) ||
          (pInstance->Params.SpeakerType == LVCS_EX_HEADPHONES) ||
-         (pInstance->Params.SourceFormat != LVCS_STEREO))  &&
-                                    /* For validation testing */
-        ((pInstance->Params.OperatingMode & LVCS_REVERBSWITCH) !=0))
-    {
+         (pInstance->Params.SourceFormat != LVCS_STEREO)) &&
+        /* For validation testing */
+        ((pInstance->Params.OperatingMode & LVCS_REVERBSWITCH) != 0)) {
         /********************************************************************************/
         /*                                                                              */
         /* Copy the input data to scratch memory and filter it                          */
@@ -230,34 +188,24 @@
         /*
          * Copy the input data to the scratch memory
          */
-        Copy_Float((LVM_FLOAT *)pInData,                                     /* Source */
-                   (LVM_FLOAT *)pScratch,                                    /* Destination */
-                   (LVM_INT16)(2 * NumSamples));                               /* Left and right */
+        Copy_Float((LVM_FLOAT*)pInData,        /* Source */
+                   (LVM_FLOAT*)pScratch,       /* Destination */
+                   (LVM_INT16)destNumSamples); /* Number of frames */
 
         /*
          * Filter the data
          */
-        (pConfig->pBiquadCallBack)((Biquad_FLOAT_Instance_t*)&pCoefficients->ReverbBiquadInstance,
-                                   (LVM_FLOAT *)pScratch,
-                                   (LVM_FLOAT *)pScratch,
-                                   (LVM_INT16)NumSamples);
+        pInstance->pRevBiquad->process(pScratch, pScratch, NumSamples);
 
-        Mult3s_Float( (LVM_FLOAT *)pScratch,
-                      pConfig->ReverbLevel,
-                      (LVM_FLOAT *)pScratch,
-                      (LVM_INT16)(2 * NumSamples));
+        Mult3s_Float((LVM_FLOAT*)pScratch, pConfig->ReverbLevel, (LVM_FLOAT*)pScratch,
+                     (LVM_INT16)destNumSamples); /* Number of frames */
 
         /*
          * Apply the delay mix
          */
-        DelayMix_Float((LVM_FLOAT *)pScratch,
-                       &pConfig->StereoSamples[0],
-                       pConfig->DelaySize,
-                       pOutData,
-                       &pConfig->DelayOffset,
-                       (LVM_INT16)NumSamples);
-
+        DelayMix_Float((LVM_FLOAT*)pScratch, &pConfig->StereoSamples[0], pConfig->DelaySize,
+                       pOutData, &pConfig->DelayOffset, (LVM_INT16)NumSamples, NumChannels);
     }
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.h
index 1bc4338..049eef2 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_ReverbGenerator.h
@@ -32,8 +32,8 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#define     HEADPHONEGAINPROC           LVCS_HEADPHONE_PROCGAIN
-#define     HEADPHONEGAINUNPROC         LVCS_HEADPHONE_UNPROCGAIN
+#define HEADPHONEGAINPROC LVCS_HEADPHONE_PROCGAIN
+#define HEADPHONEGAINUNPROC LVCS_HEADPHONE_UNPROCGAIN
 
 /************************************************************************************/
 /*                                                                                  */
@@ -42,20 +42,15 @@
 /************************************************************************************/
 
 /* Reverberation module structure */
-typedef struct
-{
-
+typedef struct {
     /* Stereo delay */
-    LVM_INT16                   DelaySize;
-    LVM_INT16                   DelayOffset;
-    LVM_INT16                   ProcGain;
-    LVM_INT16                   UnprocGain;
-    LVM_FLOAT                   StereoSamples[2 * LVCS_STEREODELAY_CS_MAX_VAL];
+    LVM_INT16 DelaySize;
+    LVM_INT16 DelayOffset;
+    LVM_INT16 ProcGain;
+    LVM_INT16 UnprocGain;
+    LVM_FLOAT StereoSamples[2 * LVCS_STEREODELAY_CS_MAX_VAL];
     /* Reverb Level */
-    LVM_FLOAT                   ReverbLevel;
-    /* Filter */
-    void                        (*pBiquadCallBack) (Biquad_FLOAT_Instance_t*,
-                                                    LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
+    LVM_FLOAT ReverbLevel;
 } LVCS_ReverbGenerator_t;
 
 /************************************************************************************/
@@ -64,11 +59,8 @@
 /*                                                                                    */
 /************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_ReverbGeneratorInit(LVCS_Handle_t     hInstance,
-                                                 LVCS_Params_t  *pParams);
-LVCS_ReturnStatus_en LVCS_ReverbGenerator(LVCS_Handle_t         hInstance,
-                                          const LVM_FLOAT       *pInput,
-                                          LVM_FLOAT             *pOutput,
-                                          LVM_UINT16            NumSamples);
+LVCS_ReturnStatus_en LVCS_ReverbGeneratorInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams);
+LVCS_ReturnStatus_en LVCS_ReverbGenerator(LVCS_Handle_t hInstance, const LVM_FLOAT* pInput,
+                                          LVM_FLOAT* pOutput, LVM_UINT16 NumSamples);
 
-#endif  /* REVERB_H */
+#endif /* REVERB_H */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp
index 7fd8444..3ca25f9 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.cpp
@@ -21,6 +21,7 @@
 /*                                                                                  */
 /************************************************************************************/
 
+#include <system/audio.h>
 #include "LVCS.h"
 #include "LVCS_Private.h"
 #include "LVCS_StereoEnhancer.h"
@@ -49,96 +50,39 @@
 /* NOTES:                                                                           */
 /*                                                                                  */
 /************************************************************************************/
-LVCS_ReturnStatus_en LVCS_SEnhancerInit(LVCS_Handle_t       hInstance,
-                                        LVCS_Params_t       *pParams)
-{
-
-    LVM_UINT16              Offset;
-    LVCS_Instance_t         *pInstance = (LVCS_Instance_t  *)hInstance;
-    LVCS_StereoEnhancer_t   *pConfig   = (LVCS_StereoEnhancer_t *)&pInstance->StereoEnhancer;
-    LVCS_Data_t             *pData;
-    LVCS_Coefficient_t      *pCoefficient;
-    FO_FLOAT_Coefs_t          CoeffsMid;
-    BQ_FLOAT_Coefs_t          CoeffsSide;
-    const BiquadA012B12CoefsSP_t *pSESideCoefs;
-
-    pData     = (LVCS_Data_t *) \
-                  pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_DATA].pBaseAddress;
-
-    pCoefficient = (LVCS_Coefficient_t *) \
-                  pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress;
+LVCS_ReturnStatus_en LVCS_SEnhancerInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams) {
+    LVM_UINT16 Offset;
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    const BiquadA012B12CoefsSP_t* pSESideCoefs;
 
     /*
      * If the sample rate or speaker type has changed update the filters
      */
     if ((pInstance->Params.SampleRate != pParams->SampleRate) ||
-        (pInstance->Params.SpeakerType != pParams->SpeakerType))
-    {
+        (pInstance->Params.SpeakerType != pParams->SpeakerType)) {
         /*
          * Set the filter coefficients based on the sample rate
          */
         /* Mid filter */
         Offset = (LVM_UINT16)pParams->SampleRate;
 
-        /* Convert incoming coefficients to the required format/ordering */
-        CoeffsMid.A0 = (LVM_FLOAT) LVCS_SEMidCoefTable[Offset].A0;
-        CoeffsMid.A1 = (LVM_FLOAT) LVCS_SEMidCoefTable[Offset].A1;
-        CoeffsMid.B1 = (LVM_FLOAT)-LVCS_SEMidCoefTable[Offset].B1;
-
-        /* Clear the taps */
-        LoadConst_Float(0,                                  /* Value */
-                        (LVM_FLOAT *)&pData->SEBiquadTapsMid,    /* Destination */
-                        /* Number of words */
-                        (LVM_UINT16)(sizeof(pData->SEBiquadTapsMid) / sizeof(LVM_FLOAT)));
-
-        FO_1I_D16F16Css_TRC_WRA_01_Init(&pCoefficient->SEBiquadInstanceMid,
-                                        &pData->SEBiquadTapsMid,
-                                        &CoeffsMid);
-
-        /* Callbacks */
-        if(LVCS_SEMidCoefTable[Offset].Scale == 15)
-        {
-            pConfig->pBiquadCallBack_Mid  = FO_1I_D16F16C15_TRC_WRA_01;
-        }
+        std::array<LVM_FLOAT, android::audio_utils::kBiquadNumCoefs> coefs = {
+                LVCS_SEMidCoefTable[Offset].A0, LVCS_SEMidCoefTable[Offset].A1, 0.0,
+                LVCS_SEMidCoefTable[Offset].B1, 0.0};
+        pInstance->pSEMidBiquad.reset(
+                new android::audio_utils::BiquadFilter<LVM_FLOAT>(FCC_1, coefs));
 
         Offset = (LVM_UINT16)(pParams->SampleRate);
         pSESideCoefs = (BiquadA012B12CoefsSP_t*)&LVCS_SESideCoefTable[0];
 
         /* Side filter */
-        /* Convert incoming coefficients to the required format/ordering */
-        CoeffsSide.A0 = (LVM_FLOAT) pSESideCoefs[Offset].A0;
-        CoeffsSide.A1 = (LVM_FLOAT) pSESideCoefs[Offset].A1;
-        CoeffsSide.A2 = (LVM_FLOAT) pSESideCoefs[Offset].A2;
-        CoeffsSide.B1 = (LVM_FLOAT)-pSESideCoefs[Offset].B1;
-        CoeffsSide.B2 = (LVM_FLOAT)-pSESideCoefs[Offset].B2;
-
-        /* Clear the taps */
-        LoadConst_Float(0,                                /* Value */
-                        (LVM_FLOAT *)&pData->SEBiquadTapsSide, /* Destination */
-                        /* Number of words */
-                        (LVM_UINT16)(sizeof(pData->SEBiquadTapsSide) / sizeof(LVM_FLOAT)));
-        /* Callbacks */
-        switch(pSESideCoefs[Offset].Scale)
-        {
-            case 14:
-                BQ_1I_D16F32Css_TRC_WRA_01_Init(&pCoefficient->SEBiquadInstanceSide,
-                                                &pData->SEBiquadTapsSide,
-                                                &CoeffsSide);
-
-                pConfig->pBiquadCallBack_Side  = BQ_1I_D16F32C14_TRC_WRA_01;
-                break;
-            case 15:
-                BQ_1I_D16F16Css_TRC_WRA_01_Init(&pCoefficient->SEBiquadInstanceSide,
-                                                &pData->SEBiquadTapsSide,
-                                                &CoeffsSide);
-
-                pConfig->pBiquadCallBack_Side  = BQ_1I_D16F16C15_TRC_WRA_01;
-                break;
-        }
-
+        coefs = {pSESideCoefs[Offset].A0, pSESideCoefs[Offset].A1, pSESideCoefs[Offset].A2,
+                 pSESideCoefs[Offset].B1, pSESideCoefs[Offset].B2};
+        pInstance->pSESideBiquad.reset(
+                new android::audio_utils::BiquadFilter<LVM_FLOAT>(FCC_1, coefs));
     }
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
 /************************************************************************************/
 /*                                                                                  */
@@ -178,85 +122,67 @@
 /*  1.  The side filter is not used in Mobile Speaker mode                          */
 /*                                                                                  */
 /************************************************************************************/
-LVCS_ReturnStatus_en LVCS_StereoEnhancer(LVCS_Handle_t          hInstance,
-                                         const LVM_FLOAT        *pInData,
-                                         LVM_FLOAT              *pOutData,
-                                         LVM_UINT16             NumSamples)
-{
-
-    LVCS_Instance_t         *pInstance = (LVCS_Instance_t  *)hInstance;
-    LVCS_StereoEnhancer_t   *pConfig   = (LVCS_StereoEnhancer_t *)&pInstance->StereoEnhancer;
-    LVCS_Coefficient_t      *pCoefficient;
-    LVM_FLOAT               *pScratch;
-
-    pCoefficient = (LVCS_Coefficient_t *) \
-                   pInstance->MemoryTable.Region[LVCS_MEMREGION_PERSISTENT_FAST_COEF].pBaseAddress;
-
-    pScratch  = (LVM_FLOAT *) \
-                    pInstance->MemoryTable.Region[LVCS_MEMREGION_TEMPORARY_FAST].pBaseAddress;
+LVCS_ReturnStatus_en LVCS_StereoEnhancer(LVCS_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                         LVM_FLOAT* pOutData, LVM_UINT16 NumSamples) {
+    LVCS_Instance_t* pInstance = (LVCS_Instance_t*)hInstance;
+    LVCS_StereoEnhancer_t* pConfig = (LVCS_StereoEnhancer_t*)&pInstance->StereoEnhancer;
+    LVM_FLOAT* pScratch;
+    pScratch = (LVM_FLOAT*)pInstance->pScratch;
+    LVM_INT32 NumChannels = pInstance->Params.NrChannels;
+    LVM_UINT16 destNumSamples = (NumChannels == FCC_1) ? NumSamples : FCC_2 * NumSamples;
     /*
      * Check if the Stereo Enhancer is enabled
      */
-    if ((pInstance->Params.OperatingMode & LVCS_STEREOENHANCESWITCH) != 0)
-        {
+    if ((pInstance->Params.OperatingMode & LVCS_STEREOENHANCESWITCH) != 0) {
         /*
          * Convert from stereo to middle and side
          */
-        From2iToMS_Float(pInData,
-                         pScratch,
-                         pScratch + NumSamples,
-                         (LVM_INT16)NumSamples);
+        if (NumChannels == 1) {
+            // Copy same input to scratch as Middle data
+            Copy_Float((LVM_FLOAT*)pInData, (LVM_FLOAT*)pScratch, (LVM_INT16)NumSamples);
+        } else {
+            From2iToMS_Float(pInData, pScratch, pScratch + NumSamples, (LVM_INT16)NumSamples);
+        }
 
         /*
          * Apply filter to the middle signal
          */
-        if (pInstance->OutputDevice == LVCS_HEADPHONE)
-        {
-            (pConfig->pBiquadCallBack_Mid)((Biquad_FLOAT_Instance_t*)\
-                                            &pCoefficient->SEBiquadInstanceMid,
-                                            (LVM_FLOAT *)pScratch,
-                                            (LVM_FLOAT *)pScratch,
-                                            (LVM_INT16)NumSamples);
-        }
-        else
-        {
-            Mult3s_Float(pScratch,              /* Source */
-                         (LVM_FLOAT)pConfig->MidGain,      /* Gain */
-                         pScratch,              /* Destination */
-                         (LVM_INT16)NumSamples);           /* Number of samples */
+        if (pInstance->OutputDevice == LVCS_HEADPHONE) {
+            pInstance->pSEMidBiquad->process(pScratch, pScratch, NumSamples);
+        } else {
+            Mult3s_Float(pScratch,                    /* Source */
+                         (LVM_FLOAT)pConfig->MidGain, /* Gain */
+                         pScratch,                    /* Destination */
+                         (LVM_INT16)NumSamples);      /* Number of samples */
         }
 
         /*
          * Apply the filter the side signal only in stereo mode for headphones
          * and in all modes for mobile speakers
          */
-        if (pInstance->Params.SourceFormat == LVCS_STEREO)
-        {
-            (pConfig->pBiquadCallBack_Side)((Biquad_FLOAT_Instance_t*) \
-                                            &pCoefficient->SEBiquadInstanceSide,
-                                            (LVM_FLOAT *)(pScratch + NumSamples),
-                                            (LVM_FLOAT *)(pScratch + NumSamples),
-                                            (LVM_INT16)NumSamples);
+        if (pInstance->Params.SourceFormat == LVCS_STEREO) {
+            pInstance->pSESideBiquad->process(pScratch + NumSamples, pScratch + NumSamples,
+                                              NumSamples);
         }
 
-        /*
-         * Convert from middle and side to stereo
-         */
-        MSTo2i_Sat_Float(pScratch,
-                         pScratch + NumSamples,
-                         pOutData,
-                         (LVM_INT16)NumSamples);
+        if (NumChannels == 1) {
+            // Copy processed Middle data from scratch to pOutData
+            Copy_Float((LVM_FLOAT*)pScratch, (LVM_FLOAT*)pOutData, (LVM_INT16)NumSamples);
+        } else {
+            /*
+             * Convert from middle and side to stereo
+             */
+            MSTo2i_Sat_Float(pScratch, pScratch + NumSamples, pOutData, (LVM_INT16)NumSamples);
+        }
 
-    }
-    else
-    {
+    } else {
         /*
          * The stereo enhancer is disabled so just copy the data
          */
-        Copy_Float((LVM_FLOAT *)pInData,           /* Source */
-                   (LVM_FLOAT *)pOutData,          /* Destination */
-                   (LVM_INT16)(2 * NumSamples));     /* Left and right */
+        Copy_Float((LVM_FLOAT*)pInData,        /* Source */
+                   (LVM_FLOAT*)pOutData,       /* Destination */
+                   (LVM_INT16)destNumSamples); /* Number of frames */
     }
 
-    return(LVCS_SUCCESS);
+    return (LVCS_SUCCESS);
 }
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.h
index 12a5982..0bed591 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_StereoEnhancer.h
@@ -24,8 +24,8 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#include "Filters.h"                        /* Filter definitions */
-#include "LVCS_Headphone_Coeffs.h"          /* Headphone coefficients */
+#include "Filters.h"               /* Filter definitions */
+#include "LVCS_Headphone_Coeffs.h" /* Headphone coefficients */
 #include "BIQUAD.h"
 
 /************************************************************************************/
@@ -35,21 +35,8 @@
 /************************************************************************************/
 
 /* Stereo enhancer structure */
-typedef struct
-{
-
-    /*
-     * Middle filter
-     */
-    void                    (*pBiquadCallBack_Mid)(Biquad_FLOAT_Instance_t*,
-                                    LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
-
-    /*
-     * Side filter
-     */
-    void                    (*pBiquadCallBack_Side)(Biquad_FLOAT_Instance_t*,
-                                    LVM_FLOAT*, LVM_FLOAT*, LVM_INT16);
-    LVM_FLOAT              MidGain;            /* Middle gain in mobile speaker mode */
+typedef struct {
+    LVM_FLOAT MidGain; /* Middle gain in mobile speaker mode */
 } LVCS_StereoEnhancer_t;
 
 /************************************************************************************/
@@ -58,12 +45,9 @@
 /*                                                                                  */
 /************************************************************************************/
 
-LVCS_ReturnStatus_en LVCS_SEnhancerInit(LVCS_Handle_t        hInstance,
-                                        LVCS_Params_t        *pParams);
+LVCS_ReturnStatus_en LVCS_SEnhancerInit(LVCS_Handle_t hInstance, LVCS_Params_t* pParams);
 
-LVCS_ReturnStatus_en LVCS_StereoEnhancer(LVCS_Handle_t        hInstance,
-                                         const LVM_FLOAT    *pInData,
-                                         LVM_FLOAT            *pOutData,
-                                         LVM_UINT16            NumSamples);
+LVCS_ReturnStatus_en LVCS_StereoEnhancer(LVCS_Handle_t hInstance, const LVM_FLOAT* pInData,
+                                         LVM_FLOAT* pOutData, LVM_UINT16 NumSamples);
 
-#endif  /* STEREOENHANCE_H */
+#endif /* STEREOENHANCE_H */
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.cpp
index d79db61..55b5243 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.cpp
@@ -23,9 +23,9 @@
 
 #include "LVCS_Private.h"
 #include "LVCS_Tables.h"
-#include "Filters.h"                            /* Filter definitions */
-#include "BIQUAD.h"                             /* Biquad definitions */
-#include "LVCS_Headphone_Coeffs.h"              /* Headphone coefficients */
+#include "Filters.h"               /* Filter definitions */
+#include "BIQUAD.h"                /* Biquad definitions */
+#include "LVCS_Headphone_Coeffs.h" /* Headphone coefficients */
 
 /************************************************************************************/
 /*                                                                                  */
@@ -35,144 +35,75 @@
 
 /* Coefficient table for the middle filter */
 const BiquadA01B1CoefsSP_t LVCS_SEMidCoefTable[] = {
-    {CS_MIDDLE_8000_A0,         /* 8kS/s coefficients */
-     CS_MIDDLE_8000_A1,
-     CS_MIDDLE_8000_B1,
-     (LVM_UINT16 )CS_MIDDLE_8000_SCALE},
-    {CS_MIDDLE_11025_A0,        /* 11kS/s coefficients */
-     CS_MIDDLE_11025_A1,
-     CS_MIDDLE_11025_B1,
-     (LVM_UINT16 )CS_MIDDLE_11025_SCALE},
-    {CS_MIDDLE_12000_A0,        /* 12kS/s coefficients */
-     CS_MIDDLE_12000_A1,
-     CS_MIDDLE_12000_B1,
-     (LVM_UINT16 )CS_MIDDLE_12000_SCALE},
-    {CS_MIDDLE_16000_A0,        /* 16kS/s coefficients */
-     CS_MIDDLE_16000_A1,
-     CS_MIDDLE_16000_B1,
-     (LVM_UINT16 )CS_MIDDLE_16000_SCALE},
-    {CS_MIDDLE_22050_A0,        /* 22kS/s coefficients */
-     CS_MIDDLE_22050_A1,
-     CS_MIDDLE_22050_B1,
-     (LVM_UINT16 )CS_MIDDLE_22050_SCALE},
-    {CS_MIDDLE_24000_A0,        /* 24kS/s coefficients */
-     CS_MIDDLE_24000_A1,
-     CS_MIDDLE_24000_B1,
-     (LVM_UINT16 )CS_MIDDLE_24000_SCALE},
-    {CS_MIDDLE_32000_A0,        /* 32kS/s coefficients */
-     CS_MIDDLE_32000_A1,
-     CS_MIDDLE_32000_B1,
-     (LVM_UINT16 )CS_MIDDLE_32000_SCALE},
-    {CS_MIDDLE_44100_A0,        /* 44kS/s coefficients */
-     CS_MIDDLE_44100_A1,
-     CS_MIDDLE_44100_B1,
-     (LVM_UINT16 )CS_MIDDLE_44100_SCALE},
-    {CS_MIDDLE_48000_A0,        /* 48kS/s coefficients */
-     CS_MIDDLE_48000_A1,
-     CS_MIDDLE_48000_B1,
-     (LVM_UINT16 )CS_MIDDLE_48000_SCALE}
-    ,
-    {CS_MIDDLE_88200_A0,        /* 88kS/s coefficients */
-     CS_MIDDLE_88200_A1,
-     CS_MIDDLE_88200_B1,
-     (LVM_UINT16)CS_MIDDLE_88200_SCALE},
-    {CS_MIDDLE_96000_A0,        /* 96kS/s coefficients */
-     CS_MIDDLE_96000_A1,
-     CS_MIDDLE_96000_B1,
-     (LVM_UINT16 )CS_MIDDLE_96000_SCALE},
-    {CS_MIDDLE_176400_A0,        /* 176kS/s coefficients */
-     CS_MIDDLE_176400_A1,
-     CS_MIDDLE_176400_B1,
-     (LVM_UINT16)CS_MIDDLE_176400_SCALE},
-    {CS_MIDDLE_192000_A0,        /* 192kS/s coefficients */
-     CS_MIDDLE_192000_A1,
-     CS_MIDDLE_192000_B1,
-     (LVM_UINT16 )CS_MIDDLE_192000_SCALE}
-    };
+        {CS_MIDDLE_8000_A0, /* 8kS/s coefficients */
+         CS_MIDDLE_8000_A1, CS_MIDDLE_8000_B1, (LVM_UINT16)CS_MIDDLE_8000_SCALE},
+        {CS_MIDDLE_11025_A0, /* 11kS/s coefficients */
+         CS_MIDDLE_11025_A1, CS_MIDDLE_11025_B1, (LVM_UINT16)CS_MIDDLE_11025_SCALE},
+        {CS_MIDDLE_12000_A0, /* 12kS/s coefficients */
+         CS_MIDDLE_12000_A1, CS_MIDDLE_12000_B1, (LVM_UINT16)CS_MIDDLE_12000_SCALE},
+        {CS_MIDDLE_16000_A0, /* 16kS/s coefficients */
+         CS_MIDDLE_16000_A1, CS_MIDDLE_16000_B1, (LVM_UINT16)CS_MIDDLE_16000_SCALE},
+        {CS_MIDDLE_22050_A0, /* 22kS/s coefficients */
+         CS_MIDDLE_22050_A1, CS_MIDDLE_22050_B1, (LVM_UINT16)CS_MIDDLE_22050_SCALE},
+        {CS_MIDDLE_24000_A0, /* 24kS/s coefficients */
+         CS_MIDDLE_24000_A1, CS_MIDDLE_24000_B1, (LVM_UINT16)CS_MIDDLE_24000_SCALE},
+        {CS_MIDDLE_32000_A0, /* 32kS/s coefficients */
+         CS_MIDDLE_32000_A1, CS_MIDDLE_32000_B1, (LVM_UINT16)CS_MIDDLE_32000_SCALE},
+        {CS_MIDDLE_44100_A0, /* 44kS/s coefficients */
+         CS_MIDDLE_44100_A1, CS_MIDDLE_44100_B1, (LVM_UINT16)CS_MIDDLE_44100_SCALE},
+        {CS_MIDDLE_48000_A0, /* 48kS/s coefficients */
+         CS_MIDDLE_48000_A1, CS_MIDDLE_48000_B1, (LVM_UINT16)CS_MIDDLE_48000_SCALE},
+        {CS_MIDDLE_88200_A0, /* 88kS/s coefficients */
+         CS_MIDDLE_88200_A1, CS_MIDDLE_88200_B1, (LVM_UINT16)CS_MIDDLE_88200_SCALE},
+        {CS_MIDDLE_96000_A0, /* 96kS/s coefficients */
+         CS_MIDDLE_96000_A1, CS_MIDDLE_96000_B1, (LVM_UINT16)CS_MIDDLE_96000_SCALE},
+        {CS_MIDDLE_176400_A0, /* 176kS/s coefficients */
+         CS_MIDDLE_176400_A1, CS_MIDDLE_176400_B1, (LVM_UINT16)CS_MIDDLE_176400_SCALE},
+        {CS_MIDDLE_192000_A0, /* 192kS/s coefficients */
+         CS_MIDDLE_192000_A1, CS_MIDDLE_192000_B1, (LVM_UINT16)CS_MIDDLE_192000_SCALE}};
 
 /* Coefficient table for the side filter */
 const BiquadA012B12CoefsSP_t LVCS_SESideCoefTable[] = {
-    /* Headphone Side coefficients */
-    {CS_SIDE_8000_A0,           /* 8kS/s coefficients */
-     CS_SIDE_8000_A1,
-     CS_SIDE_8000_A2,
-     CS_SIDE_8000_B1,
-     CS_SIDE_8000_B2,
-     (LVM_UINT16 )CS_SIDE_8000_SCALE},
-    {CS_SIDE_11025_A0,          /* 11kS/s coefficients */
-     CS_SIDE_11025_A1,
-     CS_SIDE_11025_A2,
-     CS_SIDE_11025_B1,
-     CS_SIDE_11025_B2,
-     (LVM_UINT16 )CS_SIDE_11025_SCALE},
-    {CS_SIDE_12000_A0,          /* 12kS/s coefficients */
-     CS_SIDE_12000_A1,
-     CS_SIDE_12000_A2,
-     CS_SIDE_12000_B1,
-     CS_SIDE_12000_B2,
-     (LVM_UINT16 )CS_SIDE_12000_SCALE},
-    {CS_SIDE_16000_A0,          /* 16kS/s coefficients */
-     CS_SIDE_16000_A1,
-     CS_SIDE_16000_A2,
-     CS_SIDE_16000_B1,
-     CS_SIDE_16000_B2,
-     (LVM_UINT16 )CS_SIDE_16000_SCALE},
-    {CS_SIDE_22050_A0,          /* 22kS/s coefficients */
-     CS_SIDE_22050_A1,
-     CS_SIDE_22050_A2,
-     CS_SIDE_22050_B1,
-     CS_SIDE_22050_B2,
-     (LVM_UINT16 )CS_SIDE_22050_SCALE},
-    {CS_SIDE_24000_A0,          /* 24kS/s coefficients */
-     CS_SIDE_24000_A1,
-     CS_SIDE_24000_A2,
-     CS_SIDE_24000_B1,
-     CS_SIDE_24000_B2,
-     (LVM_UINT16 )CS_SIDE_24000_SCALE},
-    {CS_SIDE_32000_A0,          /* 32kS/s coefficients */
-     CS_SIDE_32000_A1,
-     CS_SIDE_32000_A2,
-     CS_SIDE_32000_B1,
-     CS_SIDE_32000_B2,
-     (LVM_UINT16 )CS_SIDE_32000_SCALE},
-    {CS_SIDE_44100_A0,          /* 44kS/s coefficients */
-     CS_SIDE_44100_A1,
-     CS_SIDE_44100_A2,
-     CS_SIDE_44100_B1,
-     CS_SIDE_44100_B2,
-     (LVM_UINT16 )CS_SIDE_44100_SCALE},
-    {CS_SIDE_48000_A0,          /* 48kS/s coefficients */
-     CS_SIDE_48000_A1,
-     CS_SIDE_48000_A2,
-     CS_SIDE_48000_B1,
-     CS_SIDE_48000_B2,
-     (LVM_UINT16 )CS_SIDE_48000_SCALE}
-     ,
-    {CS_SIDE_88200_A0,          /* 88kS/s coefficients */
-     CS_SIDE_88200_A1,
-     CS_SIDE_88200_A2,
-     CS_SIDE_88200_B1,
-     CS_SIDE_88200_B2,
-     (LVM_UINT16)CS_SIDE_88200_SCALE},
-     {CS_SIDE_96000_A0,          /* 96kS/s coefficients */
-     CS_SIDE_96000_A1,
-     CS_SIDE_96000_A2,
-     CS_SIDE_96000_B1,
-     CS_SIDE_96000_B2,
-     (LVM_UINT16 )CS_SIDE_96000_SCALE},
-    {CS_SIDE_176400_A0,          /*176kS/s coefficients */
-     CS_SIDE_176400_A1,
-     CS_SIDE_176400_A2,
-     CS_SIDE_176400_B1,
-     CS_SIDE_176400_B2,
-     (LVM_UINT16)CS_SIDE_176400_SCALE},
-     {CS_SIDE_192000_A0,          /* 192kS/s coefficients */
-     CS_SIDE_192000_A1,
-     CS_SIDE_192000_A2,
-     CS_SIDE_192000_B1,
-     CS_SIDE_192000_B2,
-     (LVM_UINT16 )CS_SIDE_192000_SCALE}
-};
+        /* Headphone Side coefficients */
+        {CS_SIDE_8000_A0, /* 8kS/s coefficients */
+         CS_SIDE_8000_A1, CS_SIDE_8000_A2, CS_SIDE_8000_B1, CS_SIDE_8000_B2,
+         (LVM_UINT16)CS_SIDE_8000_SCALE},
+        {CS_SIDE_11025_A0, /* 11kS/s coefficients */
+         CS_SIDE_11025_A1, CS_SIDE_11025_A2, CS_SIDE_11025_B1, CS_SIDE_11025_B2,
+         (LVM_UINT16)CS_SIDE_11025_SCALE},
+        {CS_SIDE_12000_A0, /* 12kS/s coefficients */
+         CS_SIDE_12000_A1, CS_SIDE_12000_A2, CS_SIDE_12000_B1, CS_SIDE_12000_B2,
+         (LVM_UINT16)CS_SIDE_12000_SCALE},
+        {CS_SIDE_16000_A0, /* 16kS/s coefficients */
+         CS_SIDE_16000_A1, CS_SIDE_16000_A2, CS_SIDE_16000_B1, CS_SIDE_16000_B2,
+         (LVM_UINT16)CS_SIDE_16000_SCALE},
+        {CS_SIDE_22050_A0, /* 22kS/s coefficients */
+         CS_SIDE_22050_A1, CS_SIDE_22050_A2, CS_SIDE_22050_B1, CS_SIDE_22050_B2,
+         (LVM_UINT16)CS_SIDE_22050_SCALE},
+        {CS_SIDE_24000_A0, /* 24kS/s coefficients */
+         CS_SIDE_24000_A1, CS_SIDE_24000_A2, CS_SIDE_24000_B1, CS_SIDE_24000_B2,
+         (LVM_UINT16)CS_SIDE_24000_SCALE},
+        {CS_SIDE_32000_A0, /* 32kS/s coefficients */
+         CS_SIDE_32000_A1, CS_SIDE_32000_A2, CS_SIDE_32000_B1, CS_SIDE_32000_B2,
+         (LVM_UINT16)CS_SIDE_32000_SCALE},
+        {CS_SIDE_44100_A0, /* 44kS/s coefficients */
+         CS_SIDE_44100_A1, CS_SIDE_44100_A2, CS_SIDE_44100_B1, CS_SIDE_44100_B2,
+         (LVM_UINT16)CS_SIDE_44100_SCALE},
+        {CS_SIDE_48000_A0, /* 48kS/s coefficients */
+         CS_SIDE_48000_A1, CS_SIDE_48000_A2, CS_SIDE_48000_B1, CS_SIDE_48000_B2,
+         (LVM_UINT16)CS_SIDE_48000_SCALE},
+        {CS_SIDE_88200_A0, /* 88kS/s coefficients */
+         CS_SIDE_88200_A1, CS_SIDE_88200_A2, CS_SIDE_88200_B1, CS_SIDE_88200_B2,
+         (LVM_UINT16)CS_SIDE_88200_SCALE},
+        {CS_SIDE_96000_A0, /* 96kS/s coefficients */
+         CS_SIDE_96000_A1, CS_SIDE_96000_A2, CS_SIDE_96000_B1, CS_SIDE_96000_B2,
+         (LVM_UINT16)CS_SIDE_96000_SCALE},
+        {CS_SIDE_176400_A0, /*176kS/s coefficients */
+         CS_SIDE_176400_A1, CS_SIDE_176400_A2, CS_SIDE_176400_B1, CS_SIDE_176400_B2,
+         (LVM_UINT16)CS_SIDE_176400_SCALE},
+        {CS_SIDE_192000_A0, /* 192kS/s coefficients */
+         CS_SIDE_192000_A1, CS_SIDE_192000_A2, CS_SIDE_192000_B1, CS_SIDE_192000_B2,
+         (LVM_UINT16)CS_SIDE_192000_SCALE}};
 
 /************************************************************************************/
 /*                                                                                  */
@@ -181,167 +112,87 @@
 /************************************************************************************/
 
 const BiquadA012B12CoefsSP_t LVCS_EqualiserCoefTable[] = {
-    /* Headphone coefficients */
-    {CS_EQUALISER_8000_A0,      /* 8kS/s coefficients */
-     CS_EQUALISER_8000_A1,
-     CS_EQUALISER_8000_A2,
-     CS_EQUALISER_8000_B1,
-     CS_EQUALISER_8000_B2,
-     (LVM_UINT16 )CS_EQUALISER_8000_SCALE},
-    {CS_EQUALISER_11025_A0,     /* 11kS/s coefficients */
-     CS_EQUALISER_11025_A1,
-     CS_EQUALISER_11025_A2,
-     CS_EQUALISER_11025_B1,
-     CS_EQUALISER_11025_B2,
-     (LVM_UINT16 )CS_EQUALISER_11025_SCALE},
-    {CS_EQUALISER_12000_A0,     /* 12kS/s coefficients */
-     CS_EQUALISER_12000_A1,
-     CS_EQUALISER_12000_A2,
-     CS_EQUALISER_12000_B1,
-     CS_EQUALISER_12000_B2,
-     (LVM_UINT16 )CS_EQUALISER_12000_SCALE},
-    {CS_EQUALISER_16000_A0,     /* 16kS/s coefficients */
-     CS_EQUALISER_16000_A1,
-     CS_EQUALISER_16000_A2,
-     CS_EQUALISER_16000_B1,
-     CS_EQUALISER_16000_B2,
-     (LVM_UINT16 )CS_EQUALISER_16000_SCALE},
-    {CS_EQUALISER_22050_A0,     /* 22kS/s coefficients */
-     CS_EQUALISER_22050_A1,
-     CS_EQUALISER_22050_A2,
-     CS_EQUALISER_22050_B1,
-     CS_EQUALISER_22050_B2,
-     (LVM_UINT16 )CS_EQUALISER_22050_SCALE},
-    {CS_EQUALISER_24000_A0,     /* 24kS/s coefficients */
-     CS_EQUALISER_24000_A1,
-     CS_EQUALISER_24000_A2,
-     CS_EQUALISER_24000_B1,
-     CS_EQUALISER_24000_B2,
-     (LVM_UINT16 )CS_EQUALISER_24000_SCALE},
-    {CS_EQUALISER_32000_A0,     /* 32kS/s coefficients */
-     CS_EQUALISER_32000_A1,
-     CS_EQUALISER_32000_A2,
-     CS_EQUALISER_32000_B1,
-     CS_EQUALISER_32000_B2,
-     (LVM_UINT16 )CS_EQUALISER_32000_SCALE},
-    {CS_EQUALISER_44100_A0,     /* 44kS/s coefficients */
-     CS_EQUALISER_44100_A1,
-     CS_EQUALISER_44100_A2,
-     CS_EQUALISER_44100_B1,
-     CS_EQUALISER_44100_B2,
-     (LVM_UINT16 )CS_EQUALISER_44100_SCALE},
-    {CS_EQUALISER_48000_A0,     /* 48kS/s coefficients */
-     CS_EQUALISER_48000_A1,
-     CS_EQUALISER_48000_A2,
-     CS_EQUALISER_48000_B1,
-     CS_EQUALISER_48000_B2,
-     (LVM_UINT16 )CS_EQUALISER_48000_SCALE},
-    {CS_EQUALISER_88200_A0,     /* 88kS/s coeffieients */
-     CS_EQUALISER_88200_A1,
-     CS_EQUALISER_88200_A2,
-     CS_EQUALISER_88200_B1,
-     CS_EQUALISER_88200_B2,
-     (LVM_UINT16)CS_EQUALISER_88200_SCALE},
-    {CS_EQUALISER_96000_A0,     /* 96kS/s coefficients */
-     CS_EQUALISER_96000_A1,
-     CS_EQUALISER_96000_A2,
-     CS_EQUALISER_96000_B1,
-     CS_EQUALISER_96000_B2,
-     (LVM_UINT16 )CS_EQUALISER_96000_SCALE},
-    {CS_EQUALISER_176400_A0,     /* 176kS/s coefficients */
-     CS_EQUALISER_176400_A1,
-     CS_EQUALISER_176400_A2,
-     CS_EQUALISER_176400_B1,
-     CS_EQUALISER_176400_B2,
-     (LVM_UINT16)CS_EQUALISER_176400_SCALE},
-    {CS_EQUALISER_192000_A0,     /* 192kS/s coefficients */
-     CS_EQUALISER_192000_A1,
-     CS_EQUALISER_192000_A2,
-     CS_EQUALISER_192000_B1,
-     CS_EQUALISER_192000_B2,
-     (LVM_UINT16 )CS_EQUALISER_192000_SCALE},
+        /* Headphone coefficients */
+        {CS_EQUALISER_8000_A0, /* 8kS/s coefficients */
+         CS_EQUALISER_8000_A1, CS_EQUALISER_8000_A2, CS_EQUALISER_8000_B1, CS_EQUALISER_8000_B2,
+         (LVM_UINT16)CS_EQUALISER_8000_SCALE},
+        {CS_EQUALISER_11025_A0, /* 11kS/s coefficients */
+         CS_EQUALISER_11025_A1, CS_EQUALISER_11025_A2, CS_EQUALISER_11025_B1, CS_EQUALISER_11025_B2,
+         (LVM_UINT16)CS_EQUALISER_11025_SCALE},
+        {CS_EQUALISER_12000_A0, /* 12kS/s coefficients */
+         CS_EQUALISER_12000_A1, CS_EQUALISER_12000_A2, CS_EQUALISER_12000_B1, CS_EQUALISER_12000_B2,
+         (LVM_UINT16)CS_EQUALISER_12000_SCALE},
+        {CS_EQUALISER_16000_A0, /* 16kS/s coefficients */
+         CS_EQUALISER_16000_A1, CS_EQUALISER_16000_A2, CS_EQUALISER_16000_B1, CS_EQUALISER_16000_B2,
+         (LVM_UINT16)CS_EQUALISER_16000_SCALE},
+        {CS_EQUALISER_22050_A0, /* 22kS/s coefficients */
+         CS_EQUALISER_22050_A1, CS_EQUALISER_22050_A2, CS_EQUALISER_22050_B1, CS_EQUALISER_22050_B2,
+         (LVM_UINT16)CS_EQUALISER_22050_SCALE},
+        {CS_EQUALISER_24000_A0, /* 24kS/s coefficients */
+         CS_EQUALISER_24000_A1, CS_EQUALISER_24000_A2, CS_EQUALISER_24000_B1, CS_EQUALISER_24000_B2,
+         (LVM_UINT16)CS_EQUALISER_24000_SCALE},
+        {CS_EQUALISER_32000_A0, /* 32kS/s coefficients */
+         CS_EQUALISER_32000_A1, CS_EQUALISER_32000_A2, CS_EQUALISER_32000_B1, CS_EQUALISER_32000_B2,
+         (LVM_UINT16)CS_EQUALISER_32000_SCALE},
+        {CS_EQUALISER_44100_A0, /* 44kS/s coefficients */
+         CS_EQUALISER_44100_A1, CS_EQUALISER_44100_A2, CS_EQUALISER_44100_B1, CS_EQUALISER_44100_B2,
+         (LVM_UINT16)CS_EQUALISER_44100_SCALE},
+        {CS_EQUALISER_48000_A0, /* 48kS/s coefficients */
+         CS_EQUALISER_48000_A1, CS_EQUALISER_48000_A2, CS_EQUALISER_48000_B1, CS_EQUALISER_48000_B2,
+         (LVM_UINT16)CS_EQUALISER_48000_SCALE},
+        {CS_EQUALISER_88200_A0, /* 88kS/s coeffieients */
+         CS_EQUALISER_88200_A1, CS_EQUALISER_88200_A2, CS_EQUALISER_88200_B1, CS_EQUALISER_88200_B2,
+         (LVM_UINT16)CS_EQUALISER_88200_SCALE},
+        {CS_EQUALISER_96000_A0, /* 96kS/s coefficients */
+         CS_EQUALISER_96000_A1, CS_EQUALISER_96000_A2, CS_EQUALISER_96000_B1, CS_EQUALISER_96000_B2,
+         (LVM_UINT16)CS_EQUALISER_96000_SCALE},
+        {CS_EQUALISER_176400_A0, /* 176kS/s coefficients */
+         CS_EQUALISER_176400_A1, CS_EQUALISER_176400_A2, CS_EQUALISER_176400_B1,
+         CS_EQUALISER_176400_B2, (LVM_UINT16)CS_EQUALISER_176400_SCALE},
+        {CS_EQUALISER_192000_A0, /* 192kS/s coefficients */
+         CS_EQUALISER_192000_A1, CS_EQUALISER_192000_A2, CS_EQUALISER_192000_B1,
+         CS_EQUALISER_192000_B2, (LVM_UINT16)CS_EQUALISER_192000_SCALE},
 
-    /* Concert Sound EX Headphone coefficients */
-    {CSEX_EQUALISER_8000_A0,    /* 8kS/s coefficients */
-     CSEX_EQUALISER_8000_A1,
-     CSEX_EQUALISER_8000_A2,
-     CSEX_EQUALISER_8000_B1,
-     CSEX_EQUALISER_8000_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_8000_SCALE},
-    {CSEX_EQUALISER_11025_A0,   /* 11kS/s coefficients */
-     CSEX_EQUALISER_11025_A1,
-     CSEX_EQUALISER_11025_A2,
-     CSEX_EQUALISER_11025_B1,
-     CSEX_EQUALISER_11025_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_11025_SCALE},
-    {CSEX_EQUALISER_12000_A0,   /* 12kS/s coefficients */
-     CSEX_EQUALISER_12000_A1,
-     CSEX_EQUALISER_12000_A2,
-     CSEX_EQUALISER_12000_B1,
-     CSEX_EQUALISER_12000_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_12000_SCALE},
-    {CSEX_EQUALISER_16000_A0,   /* 16kS/s coefficients */
-     CSEX_EQUALISER_16000_A1,
-     CSEX_EQUALISER_16000_A2,
-     CSEX_EQUALISER_16000_B1,
-     CSEX_EQUALISER_16000_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_16000_SCALE},
-    {CSEX_EQUALISER_22050_A0,   /* 22kS/s coefficients */
-     CSEX_EQUALISER_22050_A1,
-     CSEX_EQUALISER_22050_A2,
-     CSEX_EQUALISER_22050_B1,
-     CSEX_EQUALISER_22050_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_22050_SCALE},
-    {CSEX_EQUALISER_24000_A0,   /* 24kS/s coefficients */
-     CSEX_EQUALISER_24000_A1,
-     CSEX_EQUALISER_24000_A2,
-     CSEX_EQUALISER_24000_B1,
-     CSEX_EQUALISER_24000_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_24000_SCALE},
-    {CSEX_EQUALISER_32000_A0,   /* 32kS/s coefficients */
-     CSEX_EQUALISER_32000_A1,
-     CSEX_EQUALISER_32000_A2,
-     CSEX_EQUALISER_32000_B1,
-     CSEX_EQUALISER_32000_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_32000_SCALE},
-    {CSEX_EQUALISER_44100_A0,   /* 44kS/s coefficients */
-     CSEX_EQUALISER_44100_A1,
-     CSEX_EQUALISER_44100_A2,
-     CSEX_EQUALISER_44100_B1,
-     CSEX_EQUALISER_44100_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_44100_SCALE},
-    {CSEX_EQUALISER_48000_A0,   /* 48kS/s coefficients */
-     CSEX_EQUALISER_48000_A1,
-     CSEX_EQUALISER_48000_A2,
-     CSEX_EQUALISER_48000_B1,
-     CSEX_EQUALISER_48000_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_48000_SCALE}
-    ,
-    {CSEX_EQUALISER_88200_A0,   /* 88kS/s coefficients */
-     CSEX_EQUALISER_88200_A1,
-     CSEX_EQUALISER_88200_A2,
-     CSEX_EQUALISER_88200_B1,
-     CSEX_EQUALISER_88200_B2,
-     (LVM_UINT16)CSEX_EQUALISER_88200_SCALE},
-    {CSEX_EQUALISER_96000_A0,   /* 96kS/s coefficients */
-     CSEX_EQUALISER_96000_A1,
-     CSEX_EQUALISER_96000_A2,
-     CSEX_EQUALISER_96000_B1,
-     CSEX_EQUALISER_96000_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_96000_SCALE},
-    {CSEX_EQUALISER_176400_A0,   /* 176kS/s coefficients */
-     CSEX_EQUALISER_176400_A1,
-     CSEX_EQUALISER_176400_A2,
-     CSEX_EQUALISER_176400_B1,
-     CSEX_EQUALISER_176400_B2,
-     (LVM_UINT16)CSEX_EQUALISER_176400_SCALE},
-     {CSEX_EQUALISER_192000_A0,   /* 192kS/s coefficients */
-     CSEX_EQUALISER_192000_A1,
-     CSEX_EQUALISER_192000_A2,
-     CSEX_EQUALISER_192000_B1,
-     CSEX_EQUALISER_192000_B2,
-     (LVM_UINT16 )CSEX_EQUALISER_192000_SCALE}
-};
+        /* Concert Sound EX Headphone coefficients */
+        {CSEX_EQUALISER_8000_A0, /* 8kS/s coefficients */
+         CSEX_EQUALISER_8000_A1, CSEX_EQUALISER_8000_A2, CSEX_EQUALISER_8000_B1,
+         CSEX_EQUALISER_8000_B2, (LVM_UINT16)CSEX_EQUALISER_8000_SCALE},
+        {CSEX_EQUALISER_11025_A0, /* 11kS/s coefficients */
+         CSEX_EQUALISER_11025_A1, CSEX_EQUALISER_11025_A2, CSEX_EQUALISER_11025_B1,
+         CSEX_EQUALISER_11025_B2, (LVM_UINT16)CSEX_EQUALISER_11025_SCALE},
+        {CSEX_EQUALISER_12000_A0, /* 12kS/s coefficients */
+         CSEX_EQUALISER_12000_A1, CSEX_EQUALISER_12000_A2, CSEX_EQUALISER_12000_B1,
+         CSEX_EQUALISER_12000_B2, (LVM_UINT16)CSEX_EQUALISER_12000_SCALE},
+        {CSEX_EQUALISER_16000_A0, /* 16kS/s coefficients */
+         CSEX_EQUALISER_16000_A1, CSEX_EQUALISER_16000_A2, CSEX_EQUALISER_16000_B1,
+         CSEX_EQUALISER_16000_B2, (LVM_UINT16)CSEX_EQUALISER_16000_SCALE},
+        {CSEX_EQUALISER_22050_A0, /* 22kS/s coefficients */
+         CSEX_EQUALISER_22050_A1, CSEX_EQUALISER_22050_A2, CSEX_EQUALISER_22050_B1,
+         CSEX_EQUALISER_22050_B2, (LVM_UINT16)CSEX_EQUALISER_22050_SCALE},
+        {CSEX_EQUALISER_24000_A0, /* 24kS/s coefficients */
+         CSEX_EQUALISER_24000_A1, CSEX_EQUALISER_24000_A2, CSEX_EQUALISER_24000_B1,
+         CSEX_EQUALISER_24000_B2, (LVM_UINT16)CSEX_EQUALISER_24000_SCALE},
+        {CSEX_EQUALISER_32000_A0, /* 32kS/s coefficients */
+         CSEX_EQUALISER_32000_A1, CSEX_EQUALISER_32000_A2, CSEX_EQUALISER_32000_B1,
+         CSEX_EQUALISER_32000_B2, (LVM_UINT16)CSEX_EQUALISER_32000_SCALE},
+        {CSEX_EQUALISER_44100_A0, /* 44kS/s coefficients */
+         CSEX_EQUALISER_44100_A1, CSEX_EQUALISER_44100_A2, CSEX_EQUALISER_44100_B1,
+         CSEX_EQUALISER_44100_B2, (LVM_UINT16)CSEX_EQUALISER_44100_SCALE},
+        {CSEX_EQUALISER_48000_A0, /* 48kS/s coefficients */
+         CSEX_EQUALISER_48000_A1, CSEX_EQUALISER_48000_A2, CSEX_EQUALISER_48000_B1,
+         CSEX_EQUALISER_48000_B2, (LVM_UINT16)CSEX_EQUALISER_48000_SCALE},
+        {CSEX_EQUALISER_88200_A0, /* 88kS/s coefficients */
+         CSEX_EQUALISER_88200_A1, CSEX_EQUALISER_88200_A2, CSEX_EQUALISER_88200_B1,
+         CSEX_EQUALISER_88200_B2, (LVM_UINT16)CSEX_EQUALISER_88200_SCALE},
+        {CSEX_EQUALISER_96000_A0, /* 96kS/s coefficients */
+         CSEX_EQUALISER_96000_A1, CSEX_EQUALISER_96000_A2, CSEX_EQUALISER_96000_B1,
+         CSEX_EQUALISER_96000_B2, (LVM_UINT16)CSEX_EQUALISER_96000_SCALE},
+        {CSEX_EQUALISER_176400_A0, /* 176kS/s coefficients */
+         CSEX_EQUALISER_176400_A1, CSEX_EQUALISER_176400_A2, CSEX_EQUALISER_176400_B1,
+         CSEX_EQUALISER_176400_B2, (LVM_UINT16)CSEX_EQUALISER_176400_SCALE},
+        {CSEX_EQUALISER_192000_A0, /* 192kS/s coefficients */
+         CSEX_EQUALISER_192000_A1, CSEX_EQUALISER_192000_A2, CSEX_EQUALISER_192000_B1,
+         CSEX_EQUALISER_192000_B2, (LVM_UINT16)CSEX_EQUALISER_192000_SCALE}};
 
 /************************************************************************************/
 /*                                                                                  */
@@ -350,20 +201,12 @@
 /************************************************************************************/
 
 /* Stereo delay table for Concert Sound */
-const LVM_UINT16    LVCS_StereoDelayCS[] = {
-    LVCS_STEREODELAY_CS_8KHZ,
-    LVCS_STEREODELAY_CS_11KHZ,
-    LVCS_STEREODELAY_CS_12KHZ,
-    LVCS_STEREODELAY_CS_16KHZ,
-    LVCS_STEREODELAY_CS_22KHZ,
-    LVCS_STEREODELAY_CS_24KHZ,
-    LVCS_STEREODELAY_CS_32KHZ,
-    LVCS_STEREODELAY_CS_44KHZ,
-    LVCS_STEREODELAY_CS_48KHZ,
-    LVCS_STEREODELAY_CS_88KHZ,
-    LVCS_STEREODELAY_CS_96KHZ,
-    LVCS_STEREODELAY_CS_176KHZ,
-    LVCS_STEREODELAY_CS_192KHZ,
+const LVM_UINT16 LVCS_StereoDelayCS[] = {
+        LVCS_STEREODELAY_CS_8KHZ,   LVCS_STEREODELAY_CS_11KHZ, LVCS_STEREODELAY_CS_12KHZ,
+        LVCS_STEREODELAY_CS_16KHZ,  LVCS_STEREODELAY_CS_22KHZ, LVCS_STEREODELAY_CS_24KHZ,
+        LVCS_STEREODELAY_CS_32KHZ,  LVCS_STEREODELAY_CS_44KHZ, LVCS_STEREODELAY_CS_48KHZ,
+        LVCS_STEREODELAY_CS_88KHZ,  LVCS_STEREODELAY_CS_96KHZ, LVCS_STEREODELAY_CS_176KHZ,
+        LVCS_STEREODELAY_CS_192KHZ,
 };
 
 /************************************************************************************/
@@ -373,87 +216,46 @@
 /************************************************************************************/
 
 const BiquadA012B12CoefsSP_t LVCS_ReverbCoefTable[] = {
-    /* Headphone coefficients */
-    {CS_REVERB_8000_A0,             /* 8kS/s coefficients */
-     CS_REVERB_8000_A1,
-     CS_REVERB_8000_A2,
-     CS_REVERB_8000_B1,
-     CS_REVERB_8000_B2,
-     (LVM_UINT16 )CS_REVERB_8000_SCALE},
-    {CS_REVERB_11025_A0,            /* 11kS/s coefficients */
-     CS_REVERB_11025_A1,
-     CS_REVERB_11025_A2,
-     CS_REVERB_11025_B1,
-     CS_REVERB_11025_B2,
-     (LVM_UINT16 )CS_REVERB_11025_SCALE},
-    {CS_REVERB_12000_A0,            /* 12kS/s coefficients */
-     CS_REVERB_12000_A1,
-     CS_REVERB_12000_A2,
-     CS_REVERB_12000_B1,
-     CS_REVERB_12000_B2,
-     (LVM_UINT16 )CS_REVERB_12000_SCALE},
-    {CS_REVERB_16000_A0,            /* 16kS/s coefficients */
-     CS_REVERB_16000_A1,
-     CS_REVERB_16000_A2,
-     CS_REVERB_16000_B1,
-     CS_REVERB_16000_B2,
-     (LVM_UINT16 )CS_REVERB_16000_SCALE},
-    {CS_REVERB_22050_A0,            /* 22kS/s coefficients */
-     CS_REVERB_22050_A1,
-     CS_REVERB_22050_A2,
-     CS_REVERB_22050_B1,
-     CS_REVERB_22050_B2,
-     (LVM_UINT16 )CS_REVERB_22050_SCALE},
-    {CS_REVERB_24000_A0,            /* 24kS/s coefficients */
-     CS_REVERB_24000_A1,
-     CS_REVERB_24000_A2,
-     CS_REVERB_24000_B1,
-     CS_REVERB_24000_B2,
-     (LVM_UINT16 )CS_REVERB_24000_SCALE},
-    {CS_REVERB_32000_A0,            /* 32kS/s coefficients */
-     CS_REVERB_32000_A1,
-     CS_REVERB_32000_A2,
-     CS_REVERB_32000_B1,
-     CS_REVERB_32000_B2,
-     (LVM_UINT16 )CS_REVERB_32000_SCALE},
-    {CS_REVERB_44100_A0,            /* 44kS/s coefficients */
-     CS_REVERB_44100_A1,
-     CS_REVERB_44100_A2,
-     CS_REVERB_44100_B1,
-     CS_REVERB_44100_B2,
-     (LVM_UINT16 )CS_REVERB_44100_SCALE},
-    {CS_REVERB_48000_A0,            /* 48kS/s coefficients */
-     CS_REVERB_48000_A1,
-     CS_REVERB_48000_A2,
-     CS_REVERB_48000_B1,
-     CS_REVERB_48000_B2,
-     (LVM_UINT16 )CS_REVERB_48000_SCALE}
-    ,
-    {CS_REVERB_88200_A0,            /* 88kS/s coefficients */
-     CS_REVERB_88200_A1,
-     CS_REVERB_88200_A2,
-     CS_REVERB_88200_B1,
-     CS_REVERB_88200_B2,
-     (LVM_UINT16)CS_REVERB_88200_SCALE},
-    {CS_REVERB_96000_A0,            /* 96kS/s coefficients */
-     CS_REVERB_96000_A1,
-     CS_REVERB_96000_A2,
-     CS_REVERB_96000_B1,
-     CS_REVERB_96000_B2,
-     (LVM_UINT16 )CS_REVERB_96000_SCALE},
-    {CS_REVERB_176400_A0,            /* 176kS/s coefficients */
-     CS_REVERB_176400_A1,
-     CS_REVERB_176400_A2,
-     CS_REVERB_176400_B1,
-     CS_REVERB_176400_B2,
-     (LVM_UINT16)CS_REVERB_176400_SCALE},
-     {CS_REVERB_192000_A0,            /* 192kS/s coefficients */
-     CS_REVERB_192000_A1,
-     CS_REVERB_192000_A2,
-     CS_REVERB_192000_B1,
-     CS_REVERB_192000_B2,
-     (LVM_UINT16 )CS_REVERB_192000_SCALE}
-};
+        /* Headphone coefficients */
+        {CS_REVERB_8000_A0, /* 8kS/s coefficients */
+         CS_REVERB_8000_A1, CS_REVERB_8000_A2, CS_REVERB_8000_B1, CS_REVERB_8000_B2,
+         (LVM_UINT16)CS_REVERB_8000_SCALE},
+        {CS_REVERB_11025_A0, /* 11kS/s coefficients */
+         CS_REVERB_11025_A1, CS_REVERB_11025_A2, CS_REVERB_11025_B1, CS_REVERB_11025_B2,
+         (LVM_UINT16)CS_REVERB_11025_SCALE},
+        {CS_REVERB_12000_A0, /* 12kS/s coefficients */
+         CS_REVERB_12000_A1, CS_REVERB_12000_A2, CS_REVERB_12000_B1, CS_REVERB_12000_B2,
+         (LVM_UINT16)CS_REVERB_12000_SCALE},
+        {CS_REVERB_16000_A0, /* 16kS/s coefficients */
+         CS_REVERB_16000_A1, CS_REVERB_16000_A2, CS_REVERB_16000_B1, CS_REVERB_16000_B2,
+         (LVM_UINT16)CS_REVERB_16000_SCALE},
+        {CS_REVERB_22050_A0, /* 22kS/s coefficients */
+         CS_REVERB_22050_A1, CS_REVERB_22050_A2, CS_REVERB_22050_B1, CS_REVERB_22050_B2,
+         (LVM_UINT16)CS_REVERB_22050_SCALE},
+        {CS_REVERB_24000_A0, /* 24kS/s coefficients */
+         CS_REVERB_24000_A1, CS_REVERB_24000_A2, CS_REVERB_24000_B1, CS_REVERB_24000_B2,
+         (LVM_UINT16)CS_REVERB_24000_SCALE},
+        {CS_REVERB_32000_A0, /* 32kS/s coefficients */
+         CS_REVERB_32000_A1, CS_REVERB_32000_A2, CS_REVERB_32000_B1, CS_REVERB_32000_B2,
+         (LVM_UINT16)CS_REVERB_32000_SCALE},
+        {CS_REVERB_44100_A0, /* 44kS/s coefficients */
+         CS_REVERB_44100_A1, CS_REVERB_44100_A2, CS_REVERB_44100_B1, CS_REVERB_44100_B2,
+         (LVM_UINT16)CS_REVERB_44100_SCALE},
+        {CS_REVERB_48000_A0, /* 48kS/s coefficients */
+         CS_REVERB_48000_A1, CS_REVERB_48000_A2, CS_REVERB_48000_B1, CS_REVERB_48000_B2,
+         (LVM_UINT16)CS_REVERB_48000_SCALE},
+        {CS_REVERB_88200_A0, /* 88kS/s coefficients */
+         CS_REVERB_88200_A1, CS_REVERB_88200_A2, CS_REVERB_88200_B1, CS_REVERB_88200_B2,
+         (LVM_UINT16)CS_REVERB_88200_SCALE},
+        {CS_REVERB_96000_A0, /* 96kS/s coefficients */
+         CS_REVERB_96000_A1, CS_REVERB_96000_A2, CS_REVERB_96000_B1, CS_REVERB_96000_B2,
+         (LVM_UINT16)CS_REVERB_96000_SCALE},
+        {CS_REVERB_176400_A0, /* 176kS/s coefficients */
+         CS_REVERB_176400_A1, CS_REVERB_176400_A2, CS_REVERB_176400_B1, CS_REVERB_176400_B2,
+         (LVM_UINT16)CS_REVERB_176400_SCALE},
+        {CS_REVERB_192000_A0, /* 192kS/s coefficients */
+         CS_REVERB_192000_A1, CS_REVERB_192000_A2, CS_REVERB_192000_B1, CS_REVERB_192000_B2,
+         (LVM_UINT16)CS_REVERB_192000_SCALE}};
 
 /************************************************************************************/
 /*                                                                                  */
@@ -461,20 +263,14 @@
 /*                                                                                  */
 /************************************************************************************/
 
-const Gain_t LVCS_OutputGainTable[] = {
-    {LVCS_HEADPHONE_SHIFT,         /* Headphone, stereo mode */
-     LVCS_HEADPHONE_SHIFTLOSS,
-     LVCS_HEADPHONE_GAIN},
-    {LVCS_EX_HEADPHONE_SHIFT,      /* EX Headphone, stereo mode */
-     LVCS_EX_HEADPHONE_SHIFTLOSS,
-     LVCS_EX_HEADPHONE_GAIN},
-    {LVCS_HEADPHONE_SHIFT,         /* Headphone, mono mode */
-     LVCS_HEADPHONE_SHIFTLOSS,
-     LVCS_HEADPHONE_GAIN},
-    {LVCS_EX_HEADPHONE_SHIFT,      /* EX Headphone, mono mode */
-     LVCS_EX_HEADPHONE_SHIFTLOSS,
-     LVCS_EX_HEADPHONE_GAIN}
-};
+const Gain_t LVCS_OutputGainTable[] = {{LVCS_HEADPHONE_SHIFT, /* Headphone, stereo mode */
+                                        LVCS_HEADPHONE_SHIFTLOSS, LVCS_HEADPHONE_GAIN},
+                                       {LVCS_EX_HEADPHONE_SHIFT, /* EX Headphone, stereo mode */
+                                        LVCS_EX_HEADPHONE_SHIFTLOSS, LVCS_EX_HEADPHONE_GAIN},
+                                       {LVCS_HEADPHONE_SHIFT, /* Headphone, mono mode */
+                                        LVCS_HEADPHONE_SHIFTLOSS, LVCS_HEADPHONE_GAIN},
+                                       {LVCS_EX_HEADPHONE_SHIFT, /* EX Headphone, mono mode */
+                                        LVCS_EX_HEADPHONE_SHIFTLOSS, LVCS_EX_HEADPHONE_GAIN}};
 
 /************************************************************************************/
 /*                                                                                  */
@@ -501,24 +297,14 @@
 /*          1024    is -12dB gain                                                   */
 /*                                                                                  */
 /************************************************************************************/
-const LVCS_VolCorrect_t LVCS_VolCorrectTable[] = {
-    {0.433362f,          /* Headphone, stereo mode */
-     0.000000f,
-     1.000024f,
-     1.412640f},
-    {0.433362f,          /* EX Headphone, stereo mode */
-     0.000000f,
-     1.000024f,
-     1.412640f},
-    {1.000000f,         /* Headphone, mono mode */
-     0.000000f,
-     1.000024f,
-     1.412640f},
-    {1.000000f,         /* EX Headphone, mono mode */
-     0.000000f,
-     1.000024f,
-     1.412640f}
-};
+const LVCS_VolCorrect_t LVCS_VolCorrectTable[] = {{0.433362f, /* Headphone, stereo mode */
+                                                   0.000000f, 1.000024f, 1.412640f},
+                                                  {0.433362f, /* EX Headphone, stereo mode */
+                                                   0.000000f, 1.000024f, 1.412640f},
+                                                  {1.000000f, /* Headphone, mono mode */
+                                                   0.000000f, 1.000024f, 1.412640f},
+                                                  {1.000000f, /* EX Headphone, mono mode */
+                                                   0.000000f, 1.000024f, 1.412640f}};
 
 /************************************************************************************/
 /*                                                                                  */
@@ -526,51 +312,32 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#define LVCS_VOL_TC_Fs8000      32580       /* Floating point value 0.994262695 */
-#define LVCS_VOL_TC_Fs11025     32632       /* Floating point value 0.995849609 */
-#define LVCS_VOL_TC_Fs12000     32643       /* Floating point value 0.996185303 */
-#define LVCS_VOL_TC_Fs16000     32674       /* Floating point value 0.997131348 */
-#define LVCS_VOL_TC_Fs22050     32700       /* Floating point value 0.997924805 */
-#define LVCS_VOL_TC_Fs24000     32705       /* Floating point value 0.998077393 */
-#define LVCS_VOL_TC_Fs32000     32721       /* Floating point value 0.998565674 */
-#define LVCS_VOL_TC_Fs44100     32734       /* Floating point value 0.998962402 */
-#define LVCS_VOL_TC_Fs48000     32737       /* Floating point value 0.999053955 */
-#define LVCS_VOL_TC_Fs88200     32751       /* Floating point value 0.999481066 */
-#define LVCS_VOL_TC_Fs96000     32751       /* Floating point value 0.999511703 */   /* Todo @ need to re check this value*/
-#define LVCS_VOL_TC_Fs176400    32759       /* Floating point value 0.999740499 */
-#define LVCS_VOL_TC_Fs192000    32763       /* Floating point value 0.999877925 */  /* Todo @ need to re check this value*/
+#define LVCS_VOL_TC_Fs8000 32580  /* Floating point value 0.994262695 */
+#define LVCS_VOL_TC_Fs11025 32632 /* Floating point value 0.995849609 */
+#define LVCS_VOL_TC_Fs12000 32643 /* Floating point value 0.996185303 */
+#define LVCS_VOL_TC_Fs16000 32674 /* Floating point value 0.997131348 */
+#define LVCS_VOL_TC_Fs22050 32700 /* Floating point value 0.997924805 */
+#define LVCS_VOL_TC_Fs24000 32705 /* Floating point value 0.998077393 */
+#define LVCS_VOL_TC_Fs32000 32721 /* Floating point value 0.998565674 */
+#define LVCS_VOL_TC_Fs44100 32734 /* Floating point value 0.998962402 */
+#define LVCS_VOL_TC_Fs48000 32737 /* Floating point value 0.999053955 */
+#define LVCS_VOL_TC_Fs88200 32751 /* Floating point value 0.999481066 */
+#define LVCS_VOL_TC_Fs96000 \
+    32751 /* Floating point value 0.999511703 */ /* Todo @ need to re check this value*/
+#define LVCS_VOL_TC_Fs176400 32759               /* Floating point value 0.999740499 */
+#define LVCS_VOL_TC_Fs192000 \
+    32763 /* Floating point value 0.999877925 */ /* Todo @ need to re check this value*/
 
-const LVM_INT16 LVCS_VolumeTCTable[13] = {LVCS_VOL_TC_Fs8000,
-                                          LVCS_VOL_TC_Fs11025,
-                                          LVCS_VOL_TC_Fs12000,
-                                          LVCS_VOL_TC_Fs16000,
-                                          LVCS_VOL_TC_Fs22050,
-                                          LVCS_VOL_TC_Fs24000,
-                                          LVCS_VOL_TC_Fs32000,
-                                          LVCS_VOL_TC_Fs44100,
-                                          LVCS_VOL_TC_Fs48000,
-                                          LVCS_VOL_TC_Fs88200,
-                                          LVCS_VOL_TC_Fs96000,
-                                          LVCS_VOL_TC_Fs176400,
-                                          LVCS_VOL_TC_Fs192000
-};
+const LVM_INT16 LVCS_VolumeTCTable[13] = {
+        LVCS_VOL_TC_Fs8000,  LVCS_VOL_TC_Fs11025, LVCS_VOL_TC_Fs12000, LVCS_VOL_TC_Fs16000,
+        LVCS_VOL_TC_Fs22050, LVCS_VOL_TC_Fs24000, LVCS_VOL_TC_Fs32000, LVCS_VOL_TC_Fs44100,
+        LVCS_VOL_TC_Fs48000, LVCS_VOL_TC_Fs88200, LVCS_VOL_TC_Fs96000, LVCS_VOL_TC_Fs176400,
+        LVCS_VOL_TC_Fs192000};
 
 /************************************************************************************/
 /*                                                                                  */
 /*  Sample rate table                                                               */
 /*                                                                                  */
 /************************************************************************************/
-const LVM_INT32   LVCS_SampleRateTable[13] = {8000,
-                                              11025,
-                                              12000,
-                                              16000,
-                                              22050,
-                                              24000,
-                                              32000,
-                                              44100,
-                                              48000,
-                                              88200,
-                                              96000,
-                                              176400,
-                                              192000
-};
+const LVM_INT32 LVCS_SampleRateTable[13] = {8000,  11025, 12000, 16000, 22050,  24000, 32000,
+                                            44100, 48000, 88200, 96000, 176400, 192000};
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.h b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.h
index 5490699..766f5f2 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.h
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Tables.h
@@ -24,7 +24,7 @@
 /*                                                                                  */
 /************************************************************************************/
 
-#include "BIQUAD.h"                             /* Biquad definitions */
+#include "BIQUAD.h" /* Biquad definitions */
 
 /************************************************************************************/
 /*                                                                                  */
@@ -106,35 +106,34 @@
 /*                                                                                  */
 /************************************************************************************/
 
-extern const LVM_INT32          LVCS_SampleRateTable[];
+extern const LVM_INT32 LVCS_SampleRateTable[];
 
 /*Speaker coeffient tables*/
-extern LVM_UINT16               LVCS_MS_Small_SEMiddleGainTable[];
-extern BiquadA012B12CoefsSP_t   LVCS_MS_Small_SESideCoefTable[];
-extern BiquadA012B12CoefsSP_t   LVCS_MS_Small_EqualiserCoefTable[];
-extern BiquadA012B12CoefsSP_t   LVCS_MS_Small_ReverbCoefTable[] ;
-extern LVM_UINT16               LVCS_MS_Small_StereoDelayCS4MS[];
-extern Gain_t                   LVCS_MS_Small_OutputGainTable[];
-extern LVCS_VolCorrect_t        LVCS_MS_Small_VolCorrectTable[];
-extern LVM_UINT16               LVCS_MS_Small_ReverbGainTable[];
+extern LVM_UINT16 LVCS_MS_Small_SEMiddleGainTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Small_SESideCoefTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Small_EqualiserCoefTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Small_ReverbCoefTable[];
+extern LVM_UINT16 LVCS_MS_Small_StereoDelayCS4MS[];
+extern Gain_t LVCS_MS_Small_OutputGainTable[];
+extern LVCS_VolCorrect_t LVCS_MS_Small_VolCorrectTable[];
+extern LVM_UINT16 LVCS_MS_Small_ReverbGainTable[];
 
-extern LVM_UINT16               LVCS_MS_Medium_SEMiddleGainTable[];
-extern BiquadA012B12CoefsSP_t   LVCS_MS_Medium_SESideCoefTable[];
-extern BiquadA012B12CoefsSP_t   LVCS_MS_Medium_EqualiserCoefTable[];
-extern BiquadA012B12CoefsSP_t   LVCS_MS_Medium_ReverbCoefTable[] ;
-extern LVM_UINT16               LVCS_MS_Medium_StereoDelayCS4MS[];
-extern Gain_t                   LVCS_MS_Medium_OutputGainTable[];
-extern LVCS_VolCorrect_t        LVCS_MS_Medium_VolCorrectTable[];
-extern LVM_UINT16               LVCS_MS_Medium_ReverbGainTable[];
+extern LVM_UINT16 LVCS_MS_Medium_SEMiddleGainTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Medium_SESideCoefTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Medium_EqualiserCoefTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Medium_ReverbCoefTable[];
+extern LVM_UINT16 LVCS_MS_Medium_StereoDelayCS4MS[];
+extern Gain_t LVCS_MS_Medium_OutputGainTable[];
+extern LVCS_VolCorrect_t LVCS_MS_Medium_VolCorrectTable[];
+extern LVM_UINT16 LVCS_MS_Medium_ReverbGainTable[];
 
-extern LVM_UINT16               LVCS_MS_Large_SEMiddleGainTable[];
-extern BiquadA012B12CoefsSP_t   LVCS_MS_Large_SESideCoefTable[];
-extern BiquadA012B12CoefsSP_t   LVCS_MS_Large_EqualiserCoefTable[];
-extern BiquadA012B12CoefsSP_t   LVCS_MS_Large_ReverbCoefTable[] ;
-extern LVM_UINT16               LVCS_MS_Large_StereoDelayCS4MS[];
-extern Gain_t                   LVCS_MS_Large_OutputGainTable[];
-extern LVCS_VolCorrect_t        LVCS_MS_Large_VolCorrectTable[];
-extern LVM_UINT16               LVCS_MS_Large_ReverbGainTable[];
+extern LVM_UINT16 LVCS_MS_Large_SEMiddleGainTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Large_SESideCoefTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Large_EqualiserCoefTable[];
+extern BiquadA012B12CoefsSP_t LVCS_MS_Large_ReverbCoefTable[];
+extern LVM_UINT16 LVCS_MS_Large_StereoDelayCS4MS[];
+extern Gain_t LVCS_MS_Large_OutputGainTable[];
+extern LVCS_VolCorrect_t LVCS_MS_Large_VolCorrectTable[];
+extern LVM_UINT16 LVCS_MS_Large_ReverbGainTable[];
 
 #endif /* __LVCS_TABLES_H__ */
-
diff --git a/media/libeffects/lvm/tests/Android.bp b/media/libeffects/lvm/tests/Android.bp
index 674c246..9939ed1 100644
--- a/media/libeffects/lvm/tests/Android.bp
+++ b/media/libeffects/lvm/tests/Android.bp
@@ -1,5 +1,64 @@
 // Build the unit tests for effects
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_test {
+    name: "EffectReverbTest",
+    vendor: true,
+    gtest: true,
+    host_supported: true,
+    srcs: [
+        "EffectReverbTest.cpp",
+        "EffectTestHelper.cpp",
+    ],
+    include_dirs: [
+        "frameworks/av/media/libeffects/lvm/lib/Common/lib",
+        "frameworks/av/media/libeffects/lvm/wrapper/Reverb",
+    ],
+    static_libs: [
+        "libaudioutils",
+        "libreverb",
+        "libreverbwrapper",
+    ],
+    shared_libs: [
+        "liblog",
+    ],
+    header_libs: [
+        "libaudioeffects",
+        "libhardware_headers",
+    ],
+}
+
+cc_test {
+    name: "EffectBundleTest",
+    vendor: true,
+    gtest: true,
+    host_supported: true,
+    test_suites: ["device-tests"],
+    srcs: [
+        "EffectBundleTest.cpp",
+        "EffectTestHelper.cpp",
+    ],
+    static_libs: [
+        "libaudioutils",
+        "libbundlewrapper",
+        "libmusicbundle",
+    ],
+    shared_libs: [
+        "liblog",
+    ],
+    header_libs: [
+        "libhardware_headers",
+    ],
+}
+
 cc_test {
     name: "lvmtest",
     host_supported: false,
@@ -44,6 +103,40 @@
 }
 
 cc_test {
+    name: "reverb_test",
+    host_supported: true,
+    proprietary: true,
+    gtest: false,
+
+    include_dirs: [
+        "frameworks/av/media/libeffects/lvm/wrapper/Reverb",
+    ],
+
+    header_libs: [
+        "libaudioeffects",
+    ],
+
+    shared_libs: [
+        "libaudioutils",
+        "liblog",
+    ],
+
+    static_libs: [
+        "libreverb",
+        "libreverbwrapper",
+    ],
+    srcs: [
+        "reverb_test.cpp",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+}
+
+cc_test {
     name: "snr",
     host_supported: false,
 
diff --git a/media/libeffects/lvm/tests/EffectBundleTest.cpp b/media/libeffects/lvm/tests/EffectBundleTest.cpp
new file mode 100644
index 0000000..881ffb1
--- /dev/null
+++ b/media/libeffects/lvm/tests/EffectBundleTest.cpp
@@ -0,0 +1,199 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "EffectTestHelper.h"
+using namespace android;
+
+// Update isBassBoost, if the order of effects is updated
+constexpr effect_uuid_t kEffectUuids[] = {
+        // NXP SW BassBoost
+        {0x8631f300, 0x72e2, 0x11df, 0xb57e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // NXP SW Virtualizer
+        {0x1d4033c0, 0x8557, 0x11df, 0x9f2d, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // NXP SW Equalizer
+        {0xce772f20, 0x847d, 0x11df, 0xbb17, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // NXP SW Volume
+        {0x119341a0, 0x8469, 0x11df, 0x81f9, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+};
+
+static bool isBassBoost(const effect_uuid_t* uuid) {
+    // Update this, if the order of effects in kEffectUuids is updated
+    return uuid == &kEffectUuids[0];
+}
+
+constexpr size_t kNumEffectUuids = std::size(kEffectUuids);
+
+typedef std::tuple<int, int, int, int, int> SingleEffectTestParam;
+class SingleEffectTest : public ::testing::TestWithParam<SingleEffectTestParam> {
+  public:
+    SingleEffectTest()
+        : mChMask(EffectTestHelper::kChMasks[std::get<0>(GetParam())]),
+          mChannelCount(audio_channel_count_from_out_mask(mChMask)),
+          mSampleRate(EffectTestHelper::kSampleRates[std::get<1>(GetParam())]),
+          mFrameCount(EffectTestHelper::kFrameCounts[std::get<2>(GetParam())]),
+          mLoopCount(EffectTestHelper::kLoopCounts[std::get<3>(GetParam())]),
+          mTotalFrameCount(mFrameCount * mLoopCount),
+          mUuid(&kEffectUuids[std::get<4>(GetParam())]) {}
+
+    const size_t mChMask;
+    const size_t mChannelCount;
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    const size_t mTotalFrameCount;
+    const effect_uuid_t* mUuid;
+};
+
+// Tests applying a single effect
+TEST_P(SingleEffectTest, SimpleProcess) {
+    SCOPED_TRACE(testing::Message()
+                 << "chMask: " << mChMask << " sampleRate: " << mSampleRate
+                 << " frameCount: " << mFrameCount << " loopCount: " << mLoopCount);
+
+    EffectTestHelper effect(mUuid, mChMask, mChMask, mSampleRate, mFrameCount, mLoopCount);
+
+    ASSERT_NO_FATAL_FAILURE(effect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(effect.setConfig());
+
+    // Initialize input buffer with deterministic pseudo-random values
+    std::vector<float> input(mTotalFrameCount * mChannelCount);
+    std::vector<float> output(mTotalFrameCount * mChannelCount);
+    std::minstd_rand gen(mChMask);
+    std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+    for (auto& in : input) {
+        in = dis(gen);
+    }
+    ASSERT_NO_FATAL_FAILURE(effect.process(input.data(), output.data()));
+    ASSERT_NO_FATAL_FAILURE(effect.releaseEffect());
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        EffectBundleTestAll, SingleEffectTest,
+        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumChMasks),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumFrameCounts),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+                           ::testing::Range(0, (int)kNumEffectUuids)));
+
+typedef std::tuple<int, int, int, int> SingleEffectComparisonTestParam;
+class SingleEffectComparisonTest
+    : public ::testing::TestWithParam<SingleEffectComparisonTestParam> {
+  public:
+    SingleEffectComparisonTest()
+        : mSampleRate(EffectTestHelper::kSampleRates[std::get<0>(GetParam())]),
+          mFrameCount(EffectTestHelper::kFrameCounts[std::get<1>(GetParam())]),
+          mLoopCount(EffectTestHelper::kLoopCounts[std::get<2>(GetParam())]),
+          mTotalFrameCount(mFrameCount * mLoopCount),
+          mUuid(&kEffectUuids[std::get<3>(GetParam())]) {}
+
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    const size_t mTotalFrameCount;
+    const effect_uuid_t* mUuid;
+};
+
+// Compares first two channels in multi-channel output to stereo output when same effect is applied
+TEST_P(SingleEffectComparisonTest, SimpleProcess) {
+    SCOPED_TRACE(testing::Message() << " sampleRate: " << mSampleRate << " frameCount: "
+                                    << mFrameCount << " loopCount: " << mLoopCount);
+
+    // Initialize mono input buffer with deterministic pseudo-random values
+    std::vector<float> monoInput(mTotalFrameCount);
+
+    std::minstd_rand gen(mSampleRate);
+    std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+    for (auto& in : monoInput) {
+        in = dis(gen);
+    }
+
+    // Generate stereo by repeating mono channel data
+    std::vector<float> stereoInput(mTotalFrameCount * FCC_2);
+    adjust_channels(monoInput.data(), FCC_1, stereoInput.data(), FCC_2, sizeof(float),
+                    mTotalFrameCount * sizeof(float) * FCC_1);
+
+    // Apply effect on stereo channels
+    EffectTestHelper stereoEffect(mUuid, AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_STEREO,
+                                  mSampleRate, mFrameCount, mLoopCount);
+
+    ASSERT_NO_FATAL_FAILURE(stereoEffect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(stereoEffect.setConfig());
+
+    std::vector<float> stereoOutput(mTotalFrameCount * FCC_2);
+    ASSERT_NO_FATAL_FAILURE(stereoEffect.process(stereoInput.data(), stereoOutput.data()));
+    ASSERT_NO_FATAL_FAILURE(stereoEffect.releaseEffect());
+
+    // Convert stereo float data to stereo int16_t to be used as reference
+    std::vector<int16_t> stereoRefI16(mTotalFrameCount * FCC_2);
+    memcpy_to_i16_from_float(stereoRefI16.data(), stereoOutput.data(), mTotalFrameCount * FCC_2);
+
+    for (size_t chMask : EffectTestHelper::kChMasks) {
+        size_t channelCount = audio_channel_count_from_out_mask(chMask);
+        EffectTestHelper testEffect(mUuid, chMask, chMask, mSampleRate, mFrameCount, mLoopCount);
+
+        ASSERT_NO_FATAL_FAILURE(testEffect.createEffect());
+        ASSERT_NO_FATAL_FAILURE(testEffect.setConfig());
+
+        std::vector<float> testInput(mTotalFrameCount * channelCount);
+
+        // Repeat mono channel data to all the channels
+        // adjust_channels() zero fills channels > 2, hence can't be used here
+        for (size_t i = 0; i < mTotalFrameCount; ++i) {
+            auto* fp = &testInput[i * channelCount];
+            std::fill(fp, fp + channelCount, monoInput[i]);
+        }
+
+        std::vector<float> testOutput(mTotalFrameCount * channelCount);
+        ASSERT_NO_FATAL_FAILURE(testEffect.process(testInput.data(), testOutput.data()));
+        ASSERT_NO_FATAL_FAILURE(testEffect.releaseEffect());
+
+        // Extract first two channels
+        std::vector<float> stereoTestOutput(mTotalFrameCount * FCC_2);
+        adjust_channels(testOutput.data(), channelCount, stereoTestOutput.data(), FCC_2,
+                        sizeof(float), mTotalFrameCount * sizeof(float) * channelCount);
+
+        // Convert the test data to int16_t
+        std::vector<int16_t> stereoTestI16(mTotalFrameCount * FCC_2);
+        memcpy_to_i16_from_float(stereoTestI16.data(), stereoTestOutput.data(),
+                                 mTotalFrameCount * FCC_2);
+
+        if (isBassBoost(mUuid)) {
+            // SNR must be above the threshold
+            float snr = computeSnr<int16_t>(stereoRefI16.data(), stereoTestI16.data(),
+                                            mTotalFrameCount * FCC_2);
+            ASSERT_GT(snr, EffectTestHelper::kSNRThreshold)
+                    << "SNR " << snr << "is lower than " << EffectTestHelper::kSNRThreshold;
+        } else {
+            ASSERT_EQ(0,
+                      memcmp(stereoRefI16.data(), stereoTestI16.data(), mTotalFrameCount * FCC_2))
+                    << "First two channels do not match with stereo output \n";
+        }
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        EffectBundleTestAll, SingleEffectComparisonTest,
+        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumFrameCounts),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+                           ::testing::Range(0, (int)kNumEffectUuids)));
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = RUN_ALL_TESTS();
+    ALOGV("Test result = %d\n", status);
+    return status;
+}
diff --git a/media/libeffects/lvm/tests/EffectReverbTest.cpp b/media/libeffects/lvm/tests/EffectReverbTest.cpp
new file mode 100644
index 0000000..59453eb
--- /dev/null
+++ b/media/libeffects/lvm/tests/EffectReverbTest.cpp
@@ -0,0 +1,239 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <audio_effects/effect_presetreverb.h>
+#include <VectorArithmetic.h>
+
+#include "EffectTestHelper.h"
+using namespace android;
+
+constexpr effect_uuid_t kEffectUuids[] = {
+        // NXP SW insert environmental reverb
+        {0xc7a511a0, 0xa3bb, 0x11df, 0x860e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // NXP SW insert preset reverb
+        {0x172cdf00, 0xa3bc, 0x11df, 0xa72f, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // NXP SW auxiliary environmental reverb
+        {0x4a387fc0, 0x8ab3, 0x11df, 0x8bad, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // NXP SW auxiliary preset reverb
+        {0xf29a1400, 0xa3bb, 0x11df, 0x8ddc, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+};
+
+constexpr size_t kNumEffectUuids = std::size(kEffectUuids);
+
+static bool isAuxMode(const effect_uuid_t* uuid) {
+    // Update this, if the order of effects in kEffectUuids is updated
+    return (uuid == &kEffectUuids[2] || uuid == &kEffectUuids[3]);
+}
+
+constexpr int kPresets[] = {
+        REVERB_PRESET_NONE,      REVERB_PRESET_SMALLROOM,  REVERB_PRESET_MEDIUMROOM,
+        REVERB_PRESET_LARGEROOM, REVERB_PRESET_MEDIUMHALL, REVERB_PRESET_LARGEHALL,
+        REVERB_PRESET_PLATE,
+};
+
+constexpr size_t kNumPresets = std::size(kPresets);
+
+typedef std::tuple<int, int, int, int, int, int> SingleEffectTestParam;
+class SingleEffectTest : public ::testing::TestWithParam<SingleEffectTestParam> {
+  public:
+    SingleEffectTest()
+        : mSampleRate(EffectTestHelper::kSampleRates[std::get<1>(GetParam())]),
+          mFrameCount(EffectTestHelper::kFrameCounts[std::get<2>(GetParam())]),
+          mLoopCount(EffectTestHelper::kLoopCounts[std::get<3>(GetParam())]),
+          mTotalFrameCount(mFrameCount * mLoopCount),
+          mUuid(&kEffectUuids[std::get<4>(GetParam())]),
+          mInChMask(isAuxMode(mUuid) ? AUDIO_CHANNEL_OUT_MONO
+                                     : EffectTestHelper::kChMasks[std::get<0>(GetParam())]),
+          mInChannelCount(audio_channel_count_from_out_mask(mInChMask)),
+          mOutChMask(EffectTestHelper::kChMasks[std::get<0>(GetParam())]),
+          mOutChannelCount(audio_channel_count_from_out_mask(mOutChMask)),
+          mPreset(kPresets[std::get<5>(GetParam())]) {}
+
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    const size_t mTotalFrameCount;
+    const effect_uuid_t* mUuid;
+    const size_t mInChMask;
+    const size_t mInChannelCount;
+    const size_t mOutChMask;
+    const size_t mOutChannelCount;
+    const size_t mPreset;
+};
+
+// Tests applying a single effect
+TEST_P(SingleEffectTest, SimpleProcess) {
+    SCOPED_TRACE(testing::Message() << "outChMask: " << mOutChMask << " sampleRate: " << mSampleRate
+                                    << " frameCount: " << mFrameCount
+                                    << " loopCount: " << mLoopCount << " preset: " << mPreset);
+
+    EffectTestHelper effect(mUuid, mInChMask, mOutChMask, mSampleRate, mFrameCount, mLoopCount);
+
+    ASSERT_NO_FATAL_FAILURE(effect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(effect.setConfig());
+    ASSERT_NO_FATAL_FAILURE(effect.setParam(REVERB_PARAM_PRESET, mPreset));
+
+    // Initialize input buffer with deterministic pseudo-random values
+    std::vector<float> input(mTotalFrameCount * mInChannelCount);
+    std::vector<float> output(mTotalFrameCount * mOutChannelCount);
+    std::minstd_rand gen(mOutChMask);
+    std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+    for (auto& in : input) {
+        in = dis(gen);
+    }
+    ASSERT_NO_FATAL_FAILURE(effect.process(input.data(), output.data()));
+    ASSERT_NO_FATAL_FAILURE(effect.releaseEffect());
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        EffectReverbTestAll, SingleEffectTest,
+        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumChMasks),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumFrameCounts),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+                           ::testing::Range(0, (int)kNumEffectUuids),
+                           ::testing::Range(0, (int)kNumPresets)));
+
+typedef std::tuple<int, int, int, int, int> SingleEffectComparisonTestParam;
+class SingleEffectComparisonTest
+    : public ::testing::TestWithParam<SingleEffectComparisonTestParam> {
+  public:
+    SingleEffectComparisonTest()
+        : mSampleRate(EffectTestHelper::kSampleRates[std::get<0>(GetParam())]),
+          mFrameCount(EffectTestHelper::kFrameCounts[std::get<1>(GetParam())]),
+          mLoopCount(EffectTestHelper::kLoopCounts[std::get<2>(GetParam())]),
+          mTotalFrameCount(mFrameCount * mLoopCount),
+          mUuid(&kEffectUuids[std::get<3>(GetParam())]),
+          mPreset(kPresets[std::get<4>(GetParam())]) {}
+
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    const size_t mTotalFrameCount;
+    const effect_uuid_t* mUuid;
+    const size_t mPreset;
+};
+
+// Compares first two channels in multi-channel output to stereo output when same effect is applied
+TEST_P(SingleEffectComparisonTest, SimpleProcess) {
+    SCOPED_TRACE(testing::Message()
+                 << " sampleRate: " << mSampleRate << " frameCount: " << mFrameCount
+                 << " loopCount: " << mLoopCount << " preset: " << mPreset);
+
+    // Initialize mono input buffer with deterministic pseudo-random values
+    std::vector<float> monoInput(mTotalFrameCount);
+
+    std::minstd_rand gen(mSampleRate);
+    std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+    for (auto& in : monoInput) {
+        in = dis(gen);
+    }
+
+    // Generate stereo by repeating mono channel data
+    std::vector<float> stereoInput(mTotalFrameCount * FCC_2);
+    adjust_channels(monoInput.data(), FCC_1, stereoInput.data(), FCC_2, sizeof(float),
+                    mTotalFrameCount * sizeof(float) * FCC_1);
+
+    // Apply effect on stereo channels
+    EffectTestHelper stereoEffect(
+            mUuid, isAuxMode(mUuid) ? AUDIO_CHANNEL_OUT_MONO : AUDIO_CHANNEL_OUT_STEREO,
+            AUDIO_CHANNEL_OUT_STEREO, mSampleRate, mFrameCount, mLoopCount);
+
+    ASSERT_NO_FATAL_FAILURE(stereoEffect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(stereoEffect.setConfig());
+    ASSERT_NO_FATAL_FAILURE(stereoEffect.setParam(REVERB_PARAM_PRESET, mPreset));
+
+    std::vector<float> stereoOutput(mTotalFrameCount * FCC_2);
+    ASSERT_NO_FATAL_FAILURE(stereoEffect.process(
+            (isAuxMode(mUuid) ? monoInput.data() : stereoInput.data()), stereoOutput.data()));
+    ASSERT_NO_FATAL_FAILURE(stereoEffect.releaseEffect());
+
+    // Average of both channels data is stored for mono comparison
+    std::vector<float> monoOutput(mTotalFrameCount);
+    From2iToMono_Float((const float*)stereoOutput.data(), monoOutput.data(), mTotalFrameCount);
+
+    // Convert stereo float data to stereo int16_t to be used as reference
+    std::vector<int16_t> stereoRefI16(mTotalFrameCount * FCC_2);
+    memcpy_to_i16_from_float(stereoRefI16.data(), stereoOutput.data(), mTotalFrameCount * FCC_2);
+
+    // mono int16_t to be used as refernece for mono comparison
+    std::vector<int16_t> monoRefI16(mTotalFrameCount);
+    memcpy_to_i16_from_float(monoRefI16.data(), monoOutput.data(), mTotalFrameCount);
+
+    for (size_t outChMask : EffectTestHelper::kChMasks) {
+        size_t outChannelCount = audio_channel_count_from_out_mask(outChMask);
+        size_t inChMask = isAuxMode(mUuid) ? AUDIO_CHANNEL_OUT_MONO : outChMask;
+
+        EffectTestHelper testEffect(mUuid, inChMask, outChMask, mSampleRate, mFrameCount,
+                                    mLoopCount);
+
+        ASSERT_NO_FATAL_FAILURE(testEffect.createEffect());
+        ASSERT_NO_FATAL_FAILURE(testEffect.setConfig());
+        ASSERT_NO_FATAL_FAILURE(testEffect.setParam(REVERB_PARAM_PRESET, mPreset));
+
+        std::vector<float> testInput(mTotalFrameCount * outChannelCount);
+
+        // Repeat mono channel data to all the channels
+        // adjust_channels() zero fills channels > 2, hence can't be used here
+        for (size_t i = 0; i < mTotalFrameCount; ++i) {
+            auto* fp = &testInput[i * outChannelCount];
+            std::fill(fp, fp + outChannelCount, monoInput[i]);
+        }
+
+        std::vector<float> testOutput(mTotalFrameCount * outChannelCount);
+        ASSERT_NO_FATAL_FAILURE(testEffect.process(
+                (isAuxMode(mUuid) ? monoInput.data() : testInput.data()), testOutput.data()));
+        ASSERT_NO_FATAL_FAILURE(testEffect.releaseEffect());
+
+        if (outChannelCount == FCC_1) {
+            // Convert the test data to int16_t
+            std::vector<int16_t> monoTestI16(mTotalFrameCount);
+            memcpy_to_i16_from_float(monoTestI16.data(), testOutput.data(), mTotalFrameCount);
+
+            ASSERT_EQ(0, memcmp(monoRefI16.data(), monoTestI16.data(), mTotalFrameCount * FCC_2))
+                    << "Mono channel do not match with reference output \n";
+        } else {
+            // Extract first two channels
+            std::vector<float> stereoTestOutput(mTotalFrameCount * FCC_2);
+            adjust_channels(testOutput.data(), outChannelCount, stereoTestOutput.data(), FCC_2,
+                            sizeof(float), mTotalFrameCount * sizeof(float) * outChannelCount);
+
+            // Convert the test data to int16_t
+            std::vector<int16_t> stereoTestI16(mTotalFrameCount * FCC_2);
+            memcpy_to_i16_from_float(stereoTestI16.data(), stereoTestOutput.data(),
+                                     mTotalFrameCount * FCC_2);
+
+            ASSERT_EQ(0,
+                      memcmp(stereoRefI16.data(), stereoTestI16.data(), mTotalFrameCount * FCC_2))
+                    << "First two channels do not match with stereo output \n";
+        }
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        EffectReverbTestAll, SingleEffectComparisonTest,
+        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumFrameCounts),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+                           ::testing::Range(0, (int)kNumEffectUuids),
+                           ::testing::Range(0, (int)kNumPresets)));
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = RUN_ALL_TESTS();
+    ALOGV("Test result = %d\n", status);
+    return status;
+}
diff --git a/media/libeffects/lvm/tests/EffectTestHelper.cpp b/media/libeffects/lvm/tests/EffectTestHelper.cpp
new file mode 100644
index 0000000..625c15a
--- /dev/null
+++ b/media/libeffects/lvm/tests/EffectTestHelper.cpp
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "EffectTestHelper.h"
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+
+namespace android {
+
+void EffectTestHelper::createEffect() {
+    int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(mUuid, 1, 1, &mEffectHandle);
+    ASSERT_EQ(status, 0) << "create_effect returned an error " << status;
+}
+
+void EffectTestHelper::releaseEffect() {
+    int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(mEffectHandle);
+    ASSERT_EQ(status, 0) << "release_effect returned an error " << status;
+}
+
+void EffectTestHelper::setConfig() {
+    effect_config_t config{};
+    config.inputCfg.samplingRate = config.outputCfg.samplingRate = mSampleRate;
+    config.inputCfg.channels = mInChMask;
+    config.outputCfg.channels = mOutChMask;
+    config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    int status = (*mEffectHandle)
+                         ->command(mEffectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t),
+                                   &config, &replySize, &reply);
+    ASSERT_EQ(status, 0) << "set_config returned an error " << status;
+    ASSERT_EQ(reply, 0) << "set_config reply non zero " << reply;
+
+    status = (*mEffectHandle)
+                     ->command(mEffectHandle, EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+    ASSERT_EQ(status, 0) << "cmd_enable returned an error " << status;
+    ASSERT_EQ(reply, 0) << "cmd_enable reply non zero " << reply;
+}
+
+void EffectTestHelper::setParam(uint32_t type, uint32_t value) {
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    uint32_t paramData[2] = {type, value};
+    auto effectParam = new effect_param_t[sizeof(effect_param_t) + sizeof(paramData)];
+    memcpy(&effectParam->data[0], &paramData[0], sizeof(paramData));
+    effectParam->psize = sizeof(paramData[0]);
+    effectParam->vsize = sizeof(paramData[1]);
+    int status = (*mEffectHandle)
+                         ->command(mEffectHandle, EFFECT_CMD_SET_PARAM,
+                                   sizeof(effect_param_t) + sizeof(paramData), effectParam,
+                                   &replySize, &reply);
+    delete[] effectParam;
+    ASSERT_EQ(status, 0) << "set_param returned an error " << status;
+    ASSERT_EQ(reply, 0) << "set_param reply non zero " << reply;
+}
+
+void EffectTestHelper::process(float* input, float* output) {
+    audio_buffer_t inBuffer = {.frameCount = mFrameCount, .f32 = input};
+    audio_buffer_t outBuffer = {.frameCount = mFrameCount, .f32 = output};
+    for (size_t i = 0; i < mLoopCount; i++) {
+        int status = (*mEffectHandle)->process(mEffectHandle, &inBuffer, &outBuffer);
+        ASSERT_EQ(status, 0) << "process returned an error " << status;
+
+        inBuffer.f32 += mFrameCount * mInChannelCount;
+        outBuffer.f32 += mFrameCount * mOutChannelCount;
+    }
+}
+}  // namespace android
diff --git a/media/libeffects/lvm/tests/EffectTestHelper.h b/media/libeffects/lvm/tests/EffectTestHelper.h
new file mode 100644
index 0000000..3854d46
--- /dev/null
+++ b/media/libeffects/lvm/tests/EffectTestHelper.h
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <array>
+#include <audio_utils/channels.h>
+#include <audio_utils/primitives.h>
+#include <climits>
+#include <cstdlib>
+#include <gtest/gtest.h>
+#include <hardware/audio_effect.h>
+#include <log/log.h>
+#include <random>
+#include <stdint.h>
+#include <system/audio.h>
+#include <vector>
+
+namespace android {
+template <typename T>
+static float computeSnr(const T* ref, const T* tst, size_t count) {
+    double signal{};
+    double noise{};
+
+    for (size_t i = 0; i < count; ++i) {
+        const double value(ref[i]);
+        const double diff(tst[i] - value);
+        signal += value * value;
+        noise += diff * diff;
+    }
+    // Initialized to large value to handle
+    // cases where ref and tst match exactly
+    float snr = FLT_MAX;
+    if (signal > 0.0f && noise > 0.0f) {
+        snr = 10.f * log(signal / noise);
+    }
+    return snr;
+}
+
+class EffectTestHelper {
+  public:
+    EffectTestHelper(const effect_uuid_t* uuid, size_t inChMask, size_t outChMask,
+                     size_t sampleRate, size_t frameCount, size_t loopCount)
+        : mUuid(uuid),
+          mInChMask(inChMask),
+          mInChannelCount(audio_channel_count_from_out_mask(mInChMask)),
+          mOutChMask(outChMask),
+          mOutChannelCount(audio_channel_count_from_out_mask(mOutChMask)),
+          mSampleRate(sampleRate),
+          mFrameCount(frameCount),
+          mLoopCount(loopCount) {}
+    void createEffect();
+    void releaseEffect();
+    void setConfig();
+    void setParam(uint32_t type, uint32_t val);
+    void process(float* input, float* output);
+
+    // Corresponds to SNR for 1 bit difference between two int16_t signals
+    static constexpr float kSNRThreshold = 90.308998;
+
+    static constexpr audio_channel_mask_t kChMasks[] = {
+            AUDIO_CHANNEL_OUT_MONO,          AUDIO_CHANNEL_OUT_STEREO,
+            AUDIO_CHANNEL_OUT_2POINT1,       AUDIO_CHANNEL_OUT_2POINT0POINT2,
+            AUDIO_CHANNEL_OUT_QUAD,          AUDIO_CHANNEL_OUT_QUAD_BACK,
+            AUDIO_CHANNEL_OUT_QUAD_SIDE,     AUDIO_CHANNEL_OUT_SURROUND,
+            AUDIO_CHANNEL_INDEX_MASK_4,      AUDIO_CHANNEL_OUT_2POINT1POINT2,
+            AUDIO_CHANNEL_OUT_3POINT0POINT2, AUDIO_CHANNEL_OUT_PENTA,
+            AUDIO_CHANNEL_INDEX_MASK_5,      AUDIO_CHANNEL_OUT_3POINT1POINT2,
+            AUDIO_CHANNEL_OUT_5POINT1,       AUDIO_CHANNEL_OUT_5POINT1_BACK,
+            AUDIO_CHANNEL_OUT_5POINT1_SIDE,  AUDIO_CHANNEL_INDEX_MASK_6,
+            AUDIO_CHANNEL_OUT_6POINT1,       AUDIO_CHANNEL_INDEX_MASK_7,
+            AUDIO_CHANNEL_OUT_5POINT1POINT2, AUDIO_CHANNEL_OUT_7POINT1,
+            AUDIO_CHANNEL_INDEX_MASK_8,      AUDIO_CHANNEL_INDEX_MASK_9,
+            AUDIO_CHANNEL_INDEX_MASK_10,     AUDIO_CHANNEL_INDEX_MASK_11,
+            AUDIO_CHANNEL_INDEX_MASK_12,     AUDIO_CHANNEL_INDEX_MASK_13,
+            AUDIO_CHANNEL_INDEX_MASK_14,     AUDIO_CHANNEL_INDEX_MASK_15,
+            AUDIO_CHANNEL_INDEX_MASK_16,     AUDIO_CHANNEL_INDEX_MASK_17,
+            AUDIO_CHANNEL_INDEX_MASK_18,     AUDIO_CHANNEL_INDEX_MASK_19,
+            AUDIO_CHANNEL_INDEX_MASK_20,     AUDIO_CHANNEL_INDEX_MASK_21,
+            AUDIO_CHANNEL_INDEX_MASK_22,     AUDIO_CHANNEL_INDEX_MASK_23,
+            AUDIO_CHANNEL_INDEX_MASK_24,
+    };
+
+    static constexpr size_t kNumChMasks = std::size(kChMasks);
+
+    static constexpr size_t kSampleRates[] = {8000,  11025, 12000, 16000, 22050,  24000, 32000,
+                                              44100, 48000, 88200, 96000, 176400, 192000};
+
+    static constexpr size_t kNumSampleRates = std::size(kSampleRates);
+
+    static constexpr size_t kFrameCounts[] = {4, 2048};
+
+    static constexpr size_t kNumFrameCounts = std::size(kFrameCounts);
+
+    static constexpr size_t kLoopCounts[] = {1, 4};
+
+    static constexpr size_t kNumLoopCounts = std::size(kLoopCounts);
+
+  private:
+    const effect_uuid_t* mUuid;
+    const size_t mInChMask;
+    const size_t mInChannelCount;
+    const size_t mOutChMask;
+    const size_t mOutChannelCount;
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    effect_handle_t mEffectHandle{};
+};
+}  // namespace android
diff --git a/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh b/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
index a97acc9..7571a24 100755
--- a/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
+++ b/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
@@ -23,7 +23,7 @@
 echo "========================================"
 echo "testing lvm"
 adb shell mkdir -p $testdir
-adb push $ANDROID_BUILD_TOP/cts/tests/tests/media/res/raw/sinesweepraw.raw $testdir
+adb push $ANDROID_BUILD_TOP/frameworks/av/media/libeffects/res/raw/sinesweepraw.raw $testdir
 adb push $OUT/testcases/snr/arm64/snr $testdir
 
 E_VAL=1
@@ -53,16 +53,16 @@
 flags_arr=(
     "-csE"
     "-eqE"
-    "-tE"
-    "-csE -tE -eqE"
+    "-tE -trebleLvl:15"
+    "-csE -tE -trebleLvl:15 -eqE"
     "-bE -M"
-    "-csE -tE"
-    "-csE -eqE" "-tE -eqE"
-    "-csE -tE -bE -M -eqE"
-    "-tE -eqE -vcBal:96 -M"
-    "-tE -eqE -vcBal:-96 -M"
-    "-tE -eqE -vcBal:0 -M"
-    "-tE -eqE -bE -vcBal:30 -M"
+    "-csE -tE -trebleLvl:15"
+    "-csE -eqE" "-tE -trebleLvl:15 -eqE"
+    "-csE -tE -trebleLvl:15 -bE -M -eqE"
+    "-tE -trebleLvl:15 -eqE -vcBal:96 -M"
+    "-tE -trebleLvl:15 -eqE -vcBal:-96 -M"
+    "-tE -trebleLvl:15 -eqE -vcBal:0 -M"
+    "-tE -trebleLvl:15 -eqE -bE -vcBal:30 -M"
 )
 
 fs_arr=(
@@ -91,7 +91,7 @@
     do
         for fs in ${fs_arr[*]}
         do
-            for chMask in {0..22}
+            for chMask in {0..38}
             do
                 adb shell $testdir/lvmtest -i:$testdir/sinesweepraw.raw \
                     -o:$testdir/sinesweep_$((chMask))_$((fs)).raw -chMask:$chMask -fs:$fs $flags
@@ -102,6 +102,11 @@
                     ((++error_count))
                 fi
 
+                # Do not compare cases where -vcBal is in flags and chMask is 0 (due to
+                # stereo computation)
+                if [[ $flags == *"-vcBal:"* ]] && [[ $chMask -eq 0 ]]; then
+                    continue
+                fi
 
                 # two channel files should be identical to higher channel
                 # computation (first 2 channels).
diff --git a/media/libeffects/lvm/tests/build_and_run_all_unit_tests_reverb.sh b/media/libeffects/lvm/tests/build_and_run_all_unit_tests_reverb.sh
new file mode 100755
index 0000000..72b370e
--- /dev/null
+++ b/media/libeffects/lvm/tests/build_and_run_all_unit_tests_reverb.sh
@@ -0,0 +1,105 @@
+#!/bin/bash
+#
+# reverb test
+#
+
+if [ -z "$ANDROID_BUILD_TOP" ]; then
+    echo "Android build environment not set"
+    exit -1
+fi
+
+# ensure we have mm
+. $ANDROID_BUILD_TOP/build/envsetup.sh
+
+mm -j
+
+echo "waiting for device"
+
+adb root && adb wait-for-device remount
+
+# location of test files
+testdir="/data/local/tmp/revTest"
+
+echo "========================================"
+echo "testing reverb"
+adb shell mkdir -p $testdir
+adb push $ANDROID_BUILD_TOP/frameworks/av/media/libeffects/res/raw/sinesweepraw.raw $testdir
+
+E_VAL=1
+cmds="adb push $OUT/testcases/reverb_test/arm/reverb_test $testdir"
+
+fs_arr=(
+    8000
+    16000
+    22050
+    32000
+    44100
+    48000
+    88200
+    96000
+    176400
+    192000
+)
+
+flags_arr=(
+    "--M --fch 1"
+    "--fch 2"
+)
+
+# run reverb at different configs, saving only the stereo channel
+# pair.
+error_count=0
+testcase_count=0
+for cmd in "${cmds[@]}"
+do
+    $cmd
+    for flags in "${flags_arr[@]}"
+    do
+        for preset_val in {0..6}
+        do
+            for fs in ${fs_arr[*]}
+            do
+                for chMask in {0..38}
+                do
+                    adb shell $testdir/reverb_test \
+                        --input $testdir/sinesweepraw.raw \
+                        --output $testdir/sinesweep_$((chMask))_$((fs)).raw \
+                        --chMask $chMask $flags --fs $fs --preset $preset_val
+
+                    shell_ret=$?
+                    if [ $shell_ret -ne 0 ]; then
+                        echo "error: $shell_ret"
+                        ((++error_count))
+                    fi
+
+                    if [[ "$chMask" -gt 0 ]] && [[ $flags != *"--fch 2"* ]]
+                    then
+                        # single channel files should be identical to higher channel
+                        # computation (first channel).
+                        adb shell cmp $testdir/sinesweep_0_$((fs)).raw \
+                            $testdir/sinesweep_$((chMask))_$((fs)).raw
+                    elif [[ "$chMask" -gt 1 ]]
+                    then
+                        # two channel files should be identical to higher channel
+                        # computation (first 2 channels).
+                        adb shell cmp $testdir/sinesweep_1_$((fs)).raw \
+                            $testdir/sinesweep_$((chMask))_$((fs)).raw
+                    fi
+
+                    # cmp returns EXIT_FAILURE on mismatch.
+                    shell_ret=$?
+                    if [ $shell_ret -ne 0 ]; then
+                        echo "error: $shell_ret"
+                        ((++error_count))
+                    fi
+                    ((++testcase_count))
+                done
+            done
+        done
+    done
+done
+
+adb shell rm -r $testdir
+echo "$testcase_count tests performed"
+echo "$error_count errors"
+exit $error_count
diff --git a/media/libeffects/lvm/tests/lvmtest.cpp b/media/libeffects/lvm/tests/lvmtest.cpp
index a4ace6c..e65228c 100644
--- a/media/libeffects/lvm/tests/lvmtest.cpp
+++ b/media/libeffects/lvm/tests/lvmtest.cpp
@@ -33,198 +33,165 @@
 #define ALOGVV ALOGV
 #else
 #define ALOGVV(a...) \
-  do {               \
-  } while (false)
+    do {             \
+    } while (false)
 #endif
 
-#define CHECK_ARG(cond)                                \
-  {                                                    \
-    if (!(cond)) {                                     \
-      ALOGE("\tLVM_ERROR : Invalid argument: " #cond); \
-      return -EINVAL;                                  \
-    }                                                  \
-  \
-}
+#define CHECK_ARG(cond)                                      \
+    {                                                        \
+        if (!(cond)) {                                       \
+            ALOGE("\tLVM_ERROR : Invalid argument: " #cond); \
+            return -EINVAL;                                  \
+        }                                                    \
+    }
 
-#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc)     \
-  {                                                             \
-    if ((LvmStatus) == LVM_NULLADDRESS) {                       \
-      ALOGE(                                                    \
-          "\tLVM_ERROR : Parameter error - "                    \
-          "null pointer returned by %s in %s\n\n\n\n",          \
-          callingFunc, calledFunc);                             \
-    }                                                           \
-    if ((LvmStatus) == LVM_ALIGNMENTERROR) {                    \
-      ALOGE(                                                    \
-          "\tLVM_ERROR : Parameter error - "                    \
-          "bad alignment returned by %s in %s\n\n\n\n",         \
-          callingFunc, calledFunc);                             \
-    }                                                           \
-    if ((LvmStatus) == LVM_INVALIDNUMSAMPLES) {                 \
-      ALOGE(                                                    \
-          "\tLVM_ERROR : Parameter error - "                    \
-          "bad number of samples returned by %s in %s\n\n\n\n", \
-          callingFunc, calledFunc);                             \
-    }                                                           \
-    if ((LvmStatus) == LVM_OUTOFRANGE) {                        \
-      ALOGE(                                                    \
-          "\tLVM_ERROR : Parameter error - "                    \
-          "out of range returned by %s in %s\n",                \
-          callingFunc, calledFunc);                             \
-    }                                                           \
-  }
+#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc)             \
+    {                                                                   \
+        if ((LvmStatus) == LVM_NULLADDRESS) {                           \
+            ALOGE("\tLVM_ERROR : Parameter error - "                    \
+                  "null pointer returned by %s in %s\n\n\n\n",          \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
+        if ((LvmStatus) == LVM_ALIGNMENTERROR) {                        \
+            ALOGE("\tLVM_ERROR : Parameter error - "                    \
+                  "bad alignment returned by %s in %s\n\n\n\n",         \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
+        if ((LvmStatus) == LVM_INVALIDNUMSAMPLES) {                     \
+            ALOGE("\tLVM_ERROR : Parameter error - "                    \
+                  "bad number of samples returned by %s in %s\n\n\n\n", \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
+        if ((LvmStatus) == LVM_OUTOFRANGE) {                            \
+            ALOGE("\tLVM_ERROR : Parameter error - "                    \
+                  "out of range returned by %s in %s\n",                \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
+    }
 
 struct lvmConfigParams_t {
-  int              samplingFreq    = 44100;
-  int              nrChannels      = 2;
-  int              chMask          = AUDIO_CHANNEL_OUT_STEREO;
-  int              vcBal           = 0;
-  int              fChannels       = 2;
-  bool             monoMode        = false;
-  int              bassEffectLevel = 0;
-  int              eqPresetLevel   = 0;
-  int              frameLength     = 256;
-  LVM_BE_Mode_en   bassEnable      = LVM_BE_OFF;
-  LVM_TE_Mode_en   trebleEnable    = LVM_TE_OFF;
-  LVM_EQNB_Mode_en eqEnable        = LVM_EQNB_OFF;
-  LVM_Mode_en      csEnable        = LVM_MODE_OFF;
+    int samplingFreq = 44100;
+    int nrChannels = 2;
+    int chMask = AUDIO_CHANNEL_OUT_STEREO;
+    int vcBal = 0;
+    int fChannels = 2;
+    bool monoMode = false;
+    int bassEffectLevel = 0;
+    int eqPresetLevel = 0;
+    int frameLength = 256;
+    int trebleEffectLevel = 0;
+    LVM_BE_Mode_en bassEnable = LVM_BE_OFF;
+    LVM_TE_Mode_en trebleEnable = LVM_TE_OFF;
+    LVM_EQNB_Mode_en eqEnable = LVM_EQNB_OFF;
+    LVM_Mode_en csEnable = LVM_MODE_OFF;
 };
 
 constexpr audio_channel_mask_t lvmConfigChMask[] = {
-    AUDIO_CHANNEL_OUT_MONO,
-    AUDIO_CHANNEL_OUT_STEREO,
-    AUDIO_CHANNEL_OUT_2POINT1,
-    AUDIO_CHANNEL_OUT_2POINT0POINT2,
-    AUDIO_CHANNEL_OUT_QUAD,
-    AUDIO_CHANNEL_OUT_QUAD_BACK,
-    AUDIO_CHANNEL_OUT_QUAD_SIDE,
-    AUDIO_CHANNEL_OUT_SURROUND,
-    (1 << 4) - 1,
-    AUDIO_CHANNEL_OUT_2POINT1POINT2,
-    AUDIO_CHANNEL_OUT_3POINT0POINT2,
-    AUDIO_CHANNEL_OUT_PENTA,
-    (1 << 5) - 1,
-    AUDIO_CHANNEL_OUT_3POINT1POINT2,
-    AUDIO_CHANNEL_OUT_5POINT1,
-    AUDIO_CHANNEL_OUT_5POINT1_BACK,
-    AUDIO_CHANNEL_OUT_5POINT1_SIDE,
-    (1 << 6) - 1,
-    AUDIO_CHANNEL_OUT_6POINT1,
-    (1 << 7) - 1,
-    AUDIO_CHANNEL_OUT_5POINT1POINT2,
-    AUDIO_CHANNEL_OUT_7POINT1,
-    (1 << 8) - 1,
+        AUDIO_CHANNEL_OUT_MONO,
+        AUDIO_CHANNEL_OUT_STEREO,
+        AUDIO_CHANNEL_OUT_2POINT1,
+        AUDIO_CHANNEL_OUT_2POINT0POINT2,
+        AUDIO_CHANNEL_OUT_QUAD,
+        AUDIO_CHANNEL_OUT_QUAD_BACK,
+        AUDIO_CHANNEL_OUT_QUAD_SIDE,
+        AUDIO_CHANNEL_OUT_SURROUND,
+        AUDIO_CHANNEL_INDEX_MASK_4,
+        AUDIO_CHANNEL_OUT_2POINT1POINT2,
+        AUDIO_CHANNEL_OUT_3POINT0POINT2,
+        AUDIO_CHANNEL_OUT_PENTA,
+        AUDIO_CHANNEL_INDEX_MASK_5,
+        AUDIO_CHANNEL_OUT_3POINT1POINT2,
+        AUDIO_CHANNEL_OUT_5POINT1,
+        AUDIO_CHANNEL_OUT_5POINT1_BACK,
+        AUDIO_CHANNEL_OUT_5POINT1_SIDE,
+        AUDIO_CHANNEL_INDEX_MASK_6,
+        AUDIO_CHANNEL_OUT_6POINT1,
+        AUDIO_CHANNEL_INDEX_MASK_7,
+        AUDIO_CHANNEL_OUT_5POINT1POINT2,
+        AUDIO_CHANNEL_OUT_7POINT1,
+        AUDIO_CHANNEL_INDEX_MASK_8,
+        AUDIO_CHANNEL_INDEX_MASK_9,
+        AUDIO_CHANNEL_INDEX_MASK_10,
+        AUDIO_CHANNEL_INDEX_MASK_11,
+        AUDIO_CHANNEL_INDEX_MASK_12,
+        AUDIO_CHANNEL_INDEX_MASK_13,
+        AUDIO_CHANNEL_INDEX_MASK_14,
+        AUDIO_CHANNEL_INDEX_MASK_15,
+        AUDIO_CHANNEL_INDEX_MASK_16,
+        AUDIO_CHANNEL_INDEX_MASK_17,
+        AUDIO_CHANNEL_INDEX_MASK_18,
+        AUDIO_CHANNEL_INDEX_MASK_19,
+        AUDIO_CHANNEL_INDEX_MASK_20,
+        AUDIO_CHANNEL_INDEX_MASK_21,
+        AUDIO_CHANNEL_INDEX_MASK_22,
+        AUDIO_CHANNEL_INDEX_MASK_23,
+        AUDIO_CHANNEL_INDEX_MASK_24,
 };
 
-
 void printUsage() {
-  printf("\nUsage: ");
-  printf("\n     <executable> -i:<input_file> -o:<out_file> [options]\n");
-  printf("\nwhere, \n     <inputfile>  is the input file name");
-  printf("\n                  on which LVM effects are applied");
-  printf("\n     <outputfile> processed output file");
-  printf("\n     and options are mentioned below");
-  printf("\n");
-  printf("\n     -help (or) -h");
-  printf("\n           Prints this usage information");
-  printf("\n");
-  printf("\n     -chMask:<channel_mask>\n");
-  printf("\n         0  - AUDIO_CHANNEL_OUT_MONO");
-  printf("\n         1  - AUDIO_CHANNEL_OUT_STEREO");
-  printf("\n         2  - AUDIO_CHANNEL_OUT_2POINT1");
-  printf("\n         3  - AUDIO_CHANNEL_OUT_2POINT0POINT2");
-  printf("\n         4  - AUDIO_CHANNEL_OUT_QUAD");
-  printf("\n         5  - AUDIO_CHANNEL_OUT_QUAD_BACK");
-  printf("\n         6  - AUDIO_CHANNEL_OUT_QUAD_SIDE");
-  printf("\n         7  - AUDIO_CHANNEL_OUT_SURROUND");
-  printf("\n         8  - canonical channel index mask for 4 ch: (1 << 4) - 1");
-  printf("\n         9  - AUDIO_CHANNEL_OUT_2POINT1POINT2");
-  printf("\n         10 - AUDIO_CHANNEL_OUT_3POINT0POINT2");
-  printf("\n         11 - AUDIO_CHANNEL_OUT_PENTA");
-  printf("\n         12 - canonical channel index mask for 5 ch: (1 << 5) - 1");
-  printf("\n         13 - AUDIO_CHANNEL_OUT_3POINT1POINT2");
-  printf("\n         14 - AUDIO_CHANNEL_OUT_5POINT1");
-  printf("\n         15 - AUDIO_CHANNEL_OUT_5POINT1_BACK");
-  printf("\n         16 - AUDIO_CHANNEL_OUT_5POINT1_SIDE");
-  printf("\n         17 - canonical channel index mask for 6 ch: (1 << 6) - 1");
-  printf("\n         18 - AUDIO_CHANNEL_OUT_6POINT1");
-  printf("\n         19 - canonical channel index mask for 7 ch: (1 << 7) - 1");
-  printf("\n         20 - AUDIO_CHANNEL_OUT_5POINT1POINT2");
-  printf("\n         21 - AUDIO_CHANNEL_OUT_7POINT1");
-  printf("\n         22 - canonical channel index mask for 8 ch: (1 << 8) - 1");
-  printf("\n         default 0");
-  printf("\n     -vcBal:<Left Right Balance control in dB [-96 to 96 dB]>");
-  printf("\n            -ve values reduce Right channel while +ve value reduces Left channel");
-  printf("\n                 default 0");
-  printf("\n     -fch:<file_channels> (1 through 8)\n\n");
-  printf("\n     -M");
-  printf("\n           Mono mode (force all input audio channels to be identical)");
-  printf("\n     -basslvl:<effect_level>");
-  printf("\n           A value that ranges between %d - %d default 0", LVM_BE_MIN_EFFECTLEVEL,
-    LVM_BE_MAX_EFFECTLEVEL);
-  printf("\n");
-  printf("\n     -eqPreset:<preset Value>");
-  const size_t numPresetLvls  = std::size(gEqualizerPresets);
-  for (size_t i = 0; i < numPresetLvls; ++i) {
-    printf("\n           %zu - %s", i, gEqualizerPresets[i].name);
-  }
-  printf("\n           default - 0");
-  printf("\n     -bE ");
-  printf("\n           Enable Dynamic Bass Enhancement");
-  printf("\n");
-  printf("\n     -tE ");
-  printf("\n           Enable Treble Boost");
-  printf("\n");
-  printf("\n     -csE ");
-  printf("\n           Enable Concert Surround");
-  printf("\n");
-  printf("\n     -eqE ");
-  printf("\n           Enable Equalizer");
-}
-
-//----------------------------------------------------------------------------
-// LvmEffect_free()
-//----------------------------------------------------------------------------
-// Purpose: Free all memory associated with the Bundle.
-//
-// Inputs:
-//  pContext:   effect engine context
-//
-// Outputs:
-//
-//----------------------------------------------------------------------------
-
-void LvmEffect_free(struct EffectContext *pContext) {
-  LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
-  LVM_MemTab_t MemTab;
-
-  /* Free the algorithm memory */
-  LvmStatus = LVM_GetMemoryTable(pContext->pBundledContext->hInstance, &MemTab,
-                                 LVM_NULL);
-
-  LVM_ERROR_CHECK(LvmStatus, "LVM_GetMemoryTable", "LvmEffect_free")
-
-  for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
-    if (MemTab.Region[i].Size != 0) {
-      if (MemTab.Region[i].pBaseAddress != NULL) {
-        ALOGV("\tLvmEffect_free - START freeing %" PRIu32
-              " bytes for region %u at %p\n",
-              MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-
-        free(MemTab.Region[i].pBaseAddress);
-
-        ALOGV("\tLvmEffect_free - END   freeing %" PRIu32
-              " bytes for region %u at %p\n",
-              MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-      } else {
-        ALOGE(
-            "\tLVM_ERROR : LvmEffect_free - trying to free with NULL pointer "
-            "%" PRIu32 " bytes for region %u at %p ERROR\n",
-            MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-      }
+    printf("\nUsage: ");
+    printf("\n     <executable> -i:<input_file> -o:<out_file> [options]\n");
+    printf("\nwhere, \n     <inputfile>  is the input file name");
+    printf("\n                  on which LVM effects are applied");
+    printf("\n     <outputfile> processed output file");
+    printf("\n     and options are mentioned below");
+    printf("\n");
+    printf("\n     -help (or) -h");
+    printf("\n           Prints this usage information");
+    printf("\n");
+    printf("\n     -chMask:<channel_mask>\n");
+    printf("\n         0  - AUDIO_CHANNEL_OUT_MONO");
+    printf("\n         1  - AUDIO_CHANNEL_OUT_STEREO");
+    printf("\n         2  - AUDIO_CHANNEL_OUT_2POINT1");
+    printf("\n         3  - AUDIO_CHANNEL_OUT_2POINT0POINT2");
+    printf("\n         4  - AUDIO_CHANNEL_OUT_QUAD");
+    printf("\n         5  - AUDIO_CHANNEL_OUT_QUAD_BACK");
+    printf("\n         6  - AUDIO_CHANNEL_OUT_QUAD_SIDE");
+    printf("\n         7  - AUDIO_CHANNEL_OUT_SURROUND");
+    printf("\n         8  - canonical channel index mask for 4 ch: (1 << 4) - 1");
+    printf("\n         9  - AUDIO_CHANNEL_OUT_2POINT1POINT2");
+    printf("\n         10 - AUDIO_CHANNEL_OUT_3POINT0POINT2");
+    printf("\n         11 - AUDIO_CHANNEL_OUT_PENTA");
+    printf("\n         12 - canonical channel index mask for 5 ch: (1 << 5) - 1");
+    printf("\n         13 - AUDIO_CHANNEL_OUT_3POINT1POINT2");
+    printf("\n         14 - AUDIO_CHANNEL_OUT_5POINT1");
+    printf("\n         15 - AUDIO_CHANNEL_OUT_5POINT1_BACK");
+    printf("\n         16 - AUDIO_CHANNEL_OUT_5POINT1_SIDE");
+    printf("\n         17 - canonical channel index mask for 6 ch: (1 << 6) - 1");
+    printf("\n         18 - AUDIO_CHANNEL_OUT_6POINT1");
+    printf("\n         19 - canonical channel index mask for 7 ch: (1 << 7) - 1");
+    printf("\n         20 - AUDIO_CHANNEL_OUT_5POINT1POINT2");
+    printf("\n         21 - AUDIO_CHANNEL_OUT_7POINT1");
+    printf("\n         22 - canonical channel index mask for 8 ch: (1 << 8) - 1");
+    printf("\n         default 0");
+    printf("\n     -vcBal:<Left Right Balance control in dB [-96 to 96 dB]>");
+    printf("\n            -ve values reduce Right channel while +ve value reduces Left channel");
+    printf("\n                 default 0");
+    printf("\n     -fch:<file_channels> (1 through 8)\n\n");
+    printf("\n     -M");
+    printf("\n           Mono mode (force all input audio channels to be identical)");
+    printf("\n     -basslvl:<effect_level>");
+    printf("\n           A value that ranges between %d - %d default 0", LVM_BE_MIN_EFFECTLEVEL,
+           LVM_BE_MAX_EFFECTLEVEL);
+    printf("\n");
+    printf("\n     -eqPreset:<preset Value>");
+    const size_t numPresetLvls = std::size(gEqualizerPresets);
+    for (size_t i = 0; i < numPresetLvls; ++i) {
+        printf("\n           %zu - %s", i, gEqualizerPresets[i].name);
     }
-  }
-} /* end LvmEffect_free */
+    printf("\n           default - 0");
+    printf("\n     -bE ");
+    printf("\n           Enable Dynamic Bass Enhancement");
+    printf("\n");
+    printf("\n     -tE ");
+    printf("\n           Enable Treble Boost");
+    printf("\n");
+    printf("\n     -csE ");
+    printf("\n           Enable Concert Surround");
+    printf("\n");
+    printf("\n     -eqE ");
+    printf("\n           Enable Equalizer");
+}
 
 //----------------------------------------------------------------------------
 // LvmBundle_init()
@@ -239,586 +206,467 @@
 //
 //----------------------------------------------------------------------------
 
-int LvmBundle_init(struct EffectContext *pContext, LVM_ControlParams_t *params) {
-  ALOGV("\tLvmBundle_init start");
+int LvmBundle_init(struct EffectContext* pContext, LVM_ControlParams_t* params) {
+    ALOGV("\tLvmBundle_init start");
 
-  pContext->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
-  pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
-  pContext->config.inputCfg.format = EFFECT_BUFFER_FORMAT;
-  pContext->config.inputCfg.samplingRate = 44100;
-  pContext->config.inputCfg.bufferProvider.getBuffer = NULL;
-  pContext->config.inputCfg.bufferProvider.releaseBuffer = NULL;
-  pContext->config.inputCfg.bufferProvider.cookie = NULL;
-  pContext->config.inputCfg.mask = EFFECT_CONFIG_ALL;
-  pContext->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
-  pContext->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
-  pContext->config.outputCfg.format = EFFECT_BUFFER_FORMAT;
-  pContext->config.outputCfg.samplingRate = 44100;
-  pContext->config.outputCfg.bufferProvider.getBuffer = NULL;
-  pContext->config.outputCfg.bufferProvider.releaseBuffer = NULL;
-  pContext->config.outputCfg.bufferProvider.cookie = NULL;
-  pContext->config.outputCfg.mask = EFFECT_CONFIG_ALL;
+    pContext->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
+    pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    pContext->config.inputCfg.format = EFFECT_BUFFER_FORMAT;
+    pContext->config.inputCfg.samplingRate = 44100;
+    pContext->config.inputCfg.bufferProvider.getBuffer = NULL;
+    pContext->config.inputCfg.bufferProvider.releaseBuffer = NULL;
+    pContext->config.inputCfg.bufferProvider.cookie = NULL;
+    pContext->config.inputCfg.mask = EFFECT_CONFIG_ALL;
+    pContext->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
+    pContext->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    pContext->config.outputCfg.format = EFFECT_BUFFER_FORMAT;
+    pContext->config.outputCfg.samplingRate = 44100;
+    pContext->config.outputCfg.bufferProvider.getBuffer = NULL;
+    pContext->config.outputCfg.bufferProvider.releaseBuffer = NULL;
+    pContext->config.outputCfg.bufferProvider.cookie = NULL;
+    pContext->config.outputCfg.mask = EFFECT_CONFIG_ALL;
 
-  if (pContext->pBundledContext->hInstance != NULL) {
-    ALOGV(
-        "\tLvmBundle_init pContext->pBassBoost != NULL "
-        "-> Calling pContext->pBassBoost->free()");
+    if (pContext->pBundledContext->hInstance != NULL) {
+        ALOGV("\tLvmBundle_init pContext->pBassBoost != NULL "
+              "-> Calling pContext->pBassBoost->free()");
+        LVM_DelInstanceHandle(&pContext->pBundledContext->hInstance);
 
-    LvmEffect_free(pContext);
-
-    ALOGV(
-        "\tLvmBundle_init pContext->pBassBoost != NULL "
-        "-> Called pContext->pBassBoost->free()");
-  }
-
-  LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
-  LVM_InstParams_t InstParams;                 /* Instance parameters */
-  LVM_EQNB_BandDef_t BandDefs[MAX_NUM_BANDS];  /* Equaliser band definitions */
-  LVM_HeadroomParams_t HeadroomParams;         /* Headroom parameters */
-  LVM_HeadroomBandDef_t HeadroomBandDef[LVM_HEADROOM_MAX_NBANDS];
-  LVM_MemTab_t MemTab; /* Memory allocation table */
-  bool bMallocFailure = LVM_FALSE;
-
-  /* Set the capabilities */
-  InstParams.BufferMode = LVM_UNMANAGED_BUFFERS;
-  InstParams.MaxBlockSize = MAX_CALL_SIZE;
-  InstParams.EQNB_NumBands = MAX_NUM_BANDS;
-  InstParams.PSA_Included = LVM_PSA_ON;
-
-  /* Allocate memory, forcing alignment */
-  LvmStatus = LVM_GetMemoryTable(LVM_NULL, &MemTab, &InstParams);
-
-  LVM_ERROR_CHECK(LvmStatus, "LVM_GetMemoryTable", "LvmBundle_init");
-  if (LvmStatus != LVM_SUCCESS) return -EINVAL;
-
-  ALOGV("\tCreateInstance Succesfully called LVM_GetMemoryTable\n");
-
-  /* Allocate memory */
-  for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
-    if (MemTab.Region[i].Size != 0) {
-      MemTab.Region[i].pBaseAddress = malloc(MemTab.Region[i].Size);
-
-      if (MemTab.Region[i].pBaseAddress == LVM_NULL) {
-        ALOGE(
-            "\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate "
-            "%" PRIu32 " bytes for region %u\n",
-            MemTab.Region[i].Size, i);
-        bMallocFailure = LVM_TRUE;
-        break;
-      } else {
-        ALOGV("\tLvmBundle_init CreateInstance allocated %" PRIu32
-              " bytes for region %u at %p\n",
-              MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-      }
+        ALOGV("\tLvmBundle_init pContext->pBassBoost != NULL "
+              "-> Called pContext->pBassBoost->free()");
     }
-  }
 
-  /* If one or more of the memory regions failed to allocate, free the regions
-   * that were
-   * succesfully allocated and return with an error
-   */
-  if (bMallocFailure == LVM_TRUE) {
-    for (int i = 0; i < LVM_NR_MEMORY_REGIONS; i++) {
-      if (MemTab.Region[i].pBaseAddress == LVM_NULL) {
-        ALOGE(
-            "\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate "
-            "%" PRIu32 " bytes for region %u Not freeing\n",
-            MemTab.Region[i].Size, i);
-      } else {
-        ALOGE(
-            "\tLVM_ERROR :LvmBundle_init CreateInstance Failed: but allocated "
-            "%" PRIu32 " bytes for region %u at %p- free\n",
-            MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-        free(MemTab.Region[i].pBaseAddress);
-      }
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+    LVM_InstParams_t InstParams;                 /* Instance parameters */
+    LVM_EQNB_BandDef_t BandDefs[MAX_NUM_BANDS];  /* Equaliser band definitions */
+    LVM_HeadroomParams_t HeadroomParams;         /* Headroom parameters */
+    LVM_HeadroomBandDef_t HeadroomBandDef[LVM_HEADROOM_MAX_NBANDS];
+
+    /* Set the capabilities */
+    InstParams.BufferMode = LVM_UNMANAGED_BUFFERS;
+    InstParams.MaxBlockSize = MAX_CALL_SIZE;
+    InstParams.EQNB_NumBands = MAX_NUM_BANDS;
+    InstParams.PSA_Included = LVM_PSA_ON;
+
+    LvmStatus = LVM_GetInstanceHandle(&pContext->pBundledContext->hInstance, &InstParams);
+
+    LVM_ERROR_CHECK(LvmStatus, "LVM_GetInstanceHandle", "LvmBundle_init");
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+
+    ALOGV("\tLvmBundle_init CreateInstance Successfully called "
+          "LVM_GetInstanceHandle\n");
+
+    /* Set the initial process parameters */
+    /* General parameters */
+    params->OperatingMode = LVM_MODE_ON;
+    params->SampleRate = LVM_FS_44100;
+    params->SourceFormat = LVM_STEREO;
+    params->ChMask = AUDIO_CHANNEL_OUT_STEREO;
+    params->SpeakerType = LVM_HEADPHONES;
+
+    pContext->pBundledContext->SampleRate = LVM_FS_44100;
+
+    /* Concert Sound parameters */
+    params->VirtualizerOperatingMode = LVM_MODE_OFF;
+    params->VirtualizerType = LVM_CONCERTSOUND;
+    params->VirtualizerReverbLevel = 100;
+    params->CS_EffectLevel = LVM_CS_EFFECT_NONE;
+
+    /* N-Band Equaliser parameters */
+    params->EQNB_OperatingMode = LVM_EQNB_ON;
+    params->EQNB_NBands = FIVEBAND_NUMBANDS;
+    params->pEQNB_BandDefinition = &BandDefs[0];
+
+    for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+        BandDefs[i].Frequency = EQNB_5BandPresetsFrequencies[i];
+        BandDefs[i].QFactor = EQNB_5BandPresetsQFactors[i];
+        BandDefs[i].Gain = EQNB_5BandSoftPresets[i];
     }
-    return -EINVAL;
-  }
-  ALOGV("\tLvmBundle_init CreateInstance Succesfully malloc'd memory\n");
 
-  /* Initialise */
-  pContext->pBundledContext->hInstance = LVM_NULL;
+    /* Volume Control parameters */
+    params->VC_EffectLevel = 0;
+    params->VC_Balance = 0;
 
-  /* Init sets the instance handle */
-  LvmStatus = LVM_GetInstanceHandle(&pContext->pBundledContext->hInstance,
-                                    &MemTab, &InstParams);
+    /* Treble Enhancement parameters */
+    params->TE_OperatingMode = LVM_TE_OFF;
+    params->TE_EffectLevel = 0;
 
-  LVM_ERROR_CHECK(LvmStatus, "LVM_GetInstanceHandle", "LvmBundle_init");
-  if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+    /* PSA Control parameters */
+    params->PSA_Enable = LVM_PSA_OFF;
+    params->PSA_PeakDecayRate = (LVM_PSA_DecaySpeed_en)0;
 
-  ALOGV(
-      "\tLvmBundle_init CreateInstance Succesfully called "
-      "LVM_GetInstanceHandle\n");
+    /* Bass Enhancement parameters */
+    params->BE_OperatingMode = LVM_BE_ON;
+    params->BE_EffectLevel = 0;
+    params->BE_CentreFreq = LVM_BE_CENTRE_90Hz;
+    params->BE_HPF = LVM_BE_HPF_ON;
 
-  /* Set the initial process parameters */
-  /* General parameters */
-  params->OperatingMode = LVM_MODE_ON;
-  params->SampleRate = LVM_FS_44100;
-  params->SourceFormat = LVM_STEREO;
-  params->ChMask       = AUDIO_CHANNEL_OUT_STEREO;
-  params->SpeakerType = LVM_HEADPHONES;
+    /* PSA Control parameters */
+    params->PSA_Enable = LVM_PSA_OFF;
+    params->PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
 
-  pContext->pBundledContext->SampleRate = LVM_FS_44100;
+    /* Activate the initial settings */
+    LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, params);
 
-  /* Concert Sound parameters */
-  params->VirtualizerOperatingMode = LVM_MODE_OFF;
-  params->VirtualizerType = LVM_CONCERTSOUND;
-  params->VirtualizerReverbLevel = 100;
-  params->CS_EffectLevel = LVM_CS_EFFECT_NONE;
+    LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmBundle_init");
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-  /* N-Band Equaliser parameters */
-  params->EQNB_OperatingMode = LVM_EQNB_ON;
-  params->EQNB_NBands = FIVEBAND_NUMBANDS;
-  params->pEQNB_BandDefinition = &BandDefs[0];
+    ALOGV("\tLvmBundle_init CreateInstance Successfully called "
+          "LVM_SetControlParameters\n");
 
-  for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
-    BandDefs[i].Frequency = EQNB_5BandPresetsFrequencies[i];
-    BandDefs[i].QFactor = EQNB_5BandPresetsQFactors[i];
-    BandDefs[i].Gain = EQNB_5BandSoftPresets[i];
-  }
+    /* Set the headroom parameters */
+    HeadroomBandDef[0].Limit_Low = 20;
+    HeadroomBandDef[0].Limit_High = 4999;
+    HeadroomBandDef[0].Headroom_Offset = 0;
+    HeadroomBandDef[1].Limit_Low = 5000;
+    HeadroomBandDef[1].Limit_High = 24000;
+    HeadroomBandDef[1].Headroom_Offset = 0;
+    HeadroomParams.pHeadroomDefinition = &HeadroomBandDef[0];
+    HeadroomParams.Headroom_OperatingMode = LVM_HEADROOM_ON;
+    HeadroomParams.NHeadroomBands = 2;
 
-  /* Volume Control parameters */
-  params->VC_EffectLevel = 0;
-  params->VC_Balance = 0;
+    LvmStatus = LVM_SetHeadroomParams(pContext->pBundledContext->hInstance, &HeadroomParams);
 
-  /* Treble Enhancement parameters */
-  params->TE_OperatingMode = LVM_TE_OFF;
-  params->TE_EffectLevel = 0;
+    LVM_ERROR_CHECK(LvmStatus, "LVM_SetHeadroomParams", "LvmBundle_init");
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-  /* PSA Control parameters */
-  params->PSA_Enable = LVM_PSA_OFF;
-  params->PSA_PeakDecayRate = (LVM_PSA_DecaySpeed_en)0;
-
-  /* Bass Enhancement parameters */
-  params->BE_OperatingMode = LVM_BE_ON;
-  params->BE_EffectLevel = 0;
-  params->BE_CentreFreq = LVM_BE_CENTRE_90Hz;
-  params->BE_HPF = LVM_BE_HPF_ON;
-
-  /* PSA Control parameters */
-  params->PSA_Enable = LVM_PSA_OFF;
-  params->PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
-
-  /* TE Control parameters */
-  params->TE_OperatingMode = LVM_TE_OFF;
-  params->TE_EffectLevel = 0;
-
-  /* Activate the initial settings */
-  LvmStatus =
-      LVM_SetControlParameters(pContext->pBundledContext->hInstance, params);
-
-  LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmBundle_init");
-  if (LvmStatus != LVM_SUCCESS) return -EINVAL;
-
-  ALOGV(
-      "\tLvmBundle_init CreateInstance Succesfully called "
-      "LVM_SetControlParameters\n");
-
-  /* Set the headroom parameters */
-  HeadroomBandDef[0].Limit_Low = 20;
-  HeadroomBandDef[0].Limit_High = 4999;
-  HeadroomBandDef[0].Headroom_Offset = 0;
-  HeadroomBandDef[1].Limit_Low = 5000;
-  HeadroomBandDef[1].Limit_High = 24000;
-  HeadroomBandDef[1].Headroom_Offset = 0;
-  HeadroomParams.pHeadroomDefinition = &HeadroomBandDef[0];
-  HeadroomParams.Headroom_OperatingMode = LVM_HEADROOM_ON;
-  HeadroomParams.NHeadroomBands = 2;
-
-  LvmStatus = LVM_SetHeadroomParams(pContext->pBundledContext->hInstance,
-                                    &HeadroomParams);
-
-  LVM_ERROR_CHECK(LvmStatus, "LVM_SetHeadroomParams", "LvmBundle_init");
-  if (LvmStatus != LVM_SUCCESS) return -EINVAL;
-
-  ALOGV(
-      "\tLvmBundle_init CreateInstance Succesfully called "
-      "LVM_SetHeadroomParams\n");
-  ALOGV("\tLvmBundle_init End");
-  return 0;
+    ALOGV("\tLvmBundle_init CreateInstance Successfully called "
+          "LVM_SetHeadroomParams\n");
+    ALOGV("\tLvmBundle_init End");
+    return 0;
 } /* end LvmBundle_init */
 
-int lvmCreate(struct EffectContext *pContext,
-              lvmConfigParams_t    *plvmConfigParams,
-              LVM_ControlParams_t  *params) {
-  int ret = 0;
-  pContext->pBundledContext = NULL;
-  pContext->pBundledContext = (BundledEffectContext *)malloc(sizeof(struct BundledEffectContext));
-  if (NULL == pContext->pBundledContext) {
-    return -EINVAL;
-  }
-
-  pContext->pBundledContext->SessionNo = 0;
-  pContext->pBundledContext->SessionId = 0;
-  pContext->pBundledContext->hInstance = NULL;
-  pContext->pBundledContext->bVolumeEnabled = LVM_FALSE;
-  pContext->pBundledContext->bEqualizerEnabled = LVM_FALSE;
-  pContext->pBundledContext->bBassEnabled = LVM_FALSE;
-  pContext->pBundledContext->bBassTempDisabled = LVM_FALSE;
-  pContext->pBundledContext->bVirtualizerEnabled = LVM_FALSE;
-  pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
-  pContext->pBundledContext->nOutputDevice = AUDIO_DEVICE_NONE;
-  pContext->pBundledContext->nVirtualizerForcedDevice = AUDIO_DEVICE_NONE;
-  pContext->pBundledContext->NumberEffectsEnabled = 0;
-  pContext->pBundledContext->NumberEffectsCalled = 0;
-  pContext->pBundledContext->firstVolume = LVM_TRUE;
-  pContext->pBundledContext->volume = 0;
-
-  /* Saved strength is used to return the exact strength that was used in the
-   * set to the get
-   * because we map the original strength range of 0:1000 to 1:15, and this will
-   * avoid
-   * quantisation like effect when returning
-   */
-  pContext->pBundledContext->BassStrengthSaved = 0;
-  pContext->pBundledContext->VirtStrengthSaved = 0;
-  pContext->pBundledContext->CurPreset = PRESET_CUSTOM;
-  pContext->pBundledContext->levelSaved = 0;
-  pContext->pBundledContext->bMuteEnabled = LVM_FALSE;
-  pContext->pBundledContext->bStereoPositionEnabled = LVM_FALSE;
-  pContext->pBundledContext->positionSaved = 0;
-  pContext->pBundledContext->workBuffer = NULL;
-  pContext->pBundledContext->frameCount = -1;
-  pContext->pBundledContext->SamplesToExitCountVirt = 0;
-  pContext->pBundledContext->SamplesToExitCountBb = 0;
-  pContext->pBundledContext->SamplesToExitCountEq = 0;
-  for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
-    pContext->pBundledContext->bandGaindB[i] = EQNB_5BandSoftPresets[i];
-  }
-  pContext->config.inputCfg.channels = plvmConfigParams->nrChannels;
-  ALOGV("\tEffectCreate - Calling LvmBundle_init");
-  ret = LvmBundle_init(pContext, params);
-
-  if (ret < 0) {
-    ALOGE("\tLVM_ERROR : lvmCreate() Bundle init failed");
-    return ret;
-  }
-  return 0;
-}
-
-int lvmControl(struct EffectContext *pContext,
-               lvmConfigParams_t    *plvmConfigParams,
-               LVM_ControlParams_t  *params) {
-  LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
-
-  /* Set the initial process parameters */
-  /* General parameters */
-  params->OperatingMode = LVM_MODE_ON;
-  params->SpeakerType = LVM_HEADPHONES;
-
-  params->ChMask     = plvmConfigParams->chMask;
-  params->NrChannels = plvmConfigParams->nrChannels;
-  if (params->NrChannels == 1) {
-    params->SourceFormat = LVM_MONO;
-  } else if (params->NrChannels == 2) {
-    params->SourceFormat = LVM_STEREO;
-  } else if (params->NrChannels > 2 && params->NrChannels <= 8) { // FCC_2 FCC_8
-    params->SourceFormat = LVM_MULTICHANNEL;
-  } else {
-      return -EINVAL;
-  }
-
-  LVM_Fs_en sampleRate;
-  switch (plvmConfigParams->samplingFreq) {
-    case 8000:
-      sampleRate = LVM_FS_8000;
-      break;
-    case 11025:
-      sampleRate = LVM_FS_11025;
-      break;
-    case 12000:
-      sampleRate = LVM_FS_12000;
-      break;
-    case 16000:
-      sampleRate = LVM_FS_16000;
-      break;
-    case 22050:
-      sampleRate = LVM_FS_22050;
-      break;
-    case 24000:
-      sampleRate = LVM_FS_24000;
-      break;
-    case 32000:
-      sampleRate = LVM_FS_32000;
-      break;
-    case 44100:
-      sampleRate = LVM_FS_44100;
-      break;
-    case 48000:
-      sampleRate = LVM_FS_48000;
-      break;
-    case 88200:
-      sampleRate = LVM_FS_88200;
-      break;
-    case 96000:
-      sampleRate = LVM_FS_96000;
-      break;
-    case 176400:
-      sampleRate = LVM_FS_176400;
-      break;
-    case 192000:
-      sampleRate = LVM_FS_192000;
-      break;
-    default:
-      return -EINVAL;
-  }
-  params->SampleRate = sampleRate;
-
-  /* Concert Sound parameters */
-  params->VirtualizerOperatingMode = plvmConfigParams->csEnable;
-  params->VirtualizerType = LVM_CONCERTSOUND;
-  params->VirtualizerReverbLevel = 100;
-  params->CS_EffectLevel = LVM_CS_EFFECT_NONE;
-
-  /* N-Band Equaliser parameters */
-  const int eqPresetLevel = plvmConfigParams->eqPresetLevel;
-  LVM_EQNB_BandDef_t BandDefs[MAX_NUM_BANDS];  /* Equaliser band definitions */
-  for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
-    BandDefs[i].Frequency = EQNB_5BandPresetsFrequencies[i];
-    BandDefs[i].QFactor = EQNB_5BandPresetsQFactors[i];
-    BandDefs[i].Gain =
-        EQNB_5BandSoftPresets[(FIVEBAND_NUMBANDS * eqPresetLevel) + i];
-  }
-  params->EQNB_OperatingMode = plvmConfigParams->eqEnable;
- // Caution: raw pointer to stack data, stored in instance by LVM_SetControlParameters.
-  params->pEQNB_BandDefinition = &BandDefs[0];
-
-  /* Volume Control parameters */
-  params->VC_EffectLevel = 0;
-  params->VC_Balance = plvmConfigParams->vcBal;
-
-  /* Treble Enhancement parameters */
-  params->TE_OperatingMode = plvmConfigParams->trebleEnable;
-
-  /* PSA Control parameters */
-  params->PSA_Enable = LVM_PSA_ON;
-
-  /* Bass Enhancement parameters */
-  params->BE_OperatingMode = plvmConfigParams->bassEnable;
-
-  /* Activate the initial settings */
-  LvmStatus =
-      LVM_SetControlParameters(pContext->pBundledContext->hInstance, params);
-
-  LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmBundle_init");
-  if (LvmStatus != LVM_SUCCESS) return -EINVAL;
-
-  LvmStatus = LVM_ApplyNewSettings(pContext->pBundledContext->hInstance);
-
-  if (LvmStatus != LVM_SUCCESS) return -EINVAL;
-
-  return 0;
-}
-
-int lvmExecute(float *floatIn, float *floatOut, struct EffectContext *pContext,
-               lvmConfigParams_t *plvmConfigParams) {
-  const int frameLength = plvmConfigParams->frameLength;
-  return
-      LVM_Process(pContext->pBundledContext->hInstance, /* Instance handle */
-                  floatIn,                              /* Input buffer */
-                  floatOut,                             /* Output buffer */
-                  (LVM_UINT16)frameLength, /* Number of samples to read */
-                  0);                      /* Audio Time */
-}
-
-int lvmMainProcess(EffectContext *pContext,
-                   LVM_ControlParams_t *pParams,
-                   lvmConfigParams_t *plvmConfigParams,
-                   FILE *finp,
-                   FILE *fout) {
-  int errCode = lvmControl(pContext, plvmConfigParams, pParams);
-  if (errCode) {
-    ALOGE("Error: lvmControl returned with %d\n", errCode);
-    return errCode;
-  }
-
-  const int channelCount = plvmConfigParams->nrChannels;
-  const int frameLength = plvmConfigParams->frameLength;
-  const int frameSize = channelCount * sizeof(float);  // processing size
-  const int ioChannelCount = plvmConfigParams->fChannels;
-  const int ioFrameSize = ioChannelCount * sizeof(short); // file load size
-  const int maxChannelCount = std::max(channelCount, ioChannelCount);
-  /*
-   * Mono input will be converted to 2 channels internally in the process call
-   * by copying the same data into the second channel.
-   * Hence when channelCount is 1, output buffer should be allocated for
-   * 2 channels. The memAllocChCount takes care of allocation of sufficient
-   * memory for the output buffer.
-   */
-  const int memAllocChCount = (channelCount == 1 ? 2 : channelCount);
-
-  std::vector<short> in(frameLength * maxChannelCount);
-  std::vector<short> out(frameLength * maxChannelCount);
-  std::vector<float> floatIn(frameLength * channelCount);
-  std::vector<float> floatOut(frameLength * memAllocChCount);
-
-  int frameCounter = 0;
-  while (fread(in.data(), ioFrameSize, frameLength, finp) == (size_t)frameLength) {
-    if (ioChannelCount != channelCount) {
-        adjust_channels(in.data(), ioChannelCount, in.data(), channelCount,
-               sizeof(short), frameLength * ioFrameSize);
+int lvmCreate(struct EffectContext* pContext, lvmConfigParams_t* plvmConfigParams,
+              LVM_ControlParams_t* params) {
+    int ret = 0;
+    pContext->pBundledContext = NULL;
+    pContext->pBundledContext = (BundledEffectContext*)malloc(sizeof(struct BundledEffectContext));
+    if (NULL == pContext->pBundledContext) {
+        return -EINVAL;
     }
-    memcpy_to_float_from_i16(floatIn.data(), in.data(), frameLength * channelCount);
 
-    // Mono mode will replicate the first channel to all other channels.
-    // This ensures all audio channels are identical. This is useful for testing
-    // Bass Boost, which extracts a mono signal for processing.
-    if (plvmConfigParams->monoMode && channelCount > 1) {
-        for (int i = 0; i < frameLength; ++i) {
-            auto *fp = &floatIn[i * channelCount];
-            std::fill(fp + 1, fp + channelCount, *fp); // replicate ch 0
+    pContext->pBundledContext->SessionNo = 0;
+    pContext->pBundledContext->SessionId = 0;
+    pContext->pBundledContext->hInstance = NULL;
+    pContext->pBundledContext->bVolumeEnabled = LVM_FALSE;
+    pContext->pBundledContext->bEqualizerEnabled = LVM_FALSE;
+    pContext->pBundledContext->bBassEnabled = LVM_FALSE;
+    pContext->pBundledContext->bBassTempDisabled = LVM_FALSE;
+    pContext->pBundledContext->bVirtualizerEnabled = LVM_FALSE;
+    pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
+    pContext->pBundledContext->nOutputDevice = AUDIO_DEVICE_NONE;
+    pContext->pBundledContext->nVirtualizerForcedDevice = AUDIO_DEVICE_NONE;
+    pContext->pBundledContext->NumberEffectsEnabled = 0;
+    pContext->pBundledContext->NumberEffectsCalled = 0;
+    pContext->pBundledContext->firstVolume = LVM_TRUE;
+    pContext->pBundledContext->volume = 0;
+
+    /* Saved strength is used to return the exact strength that was used in the
+     * set to the get
+     * because we map the original strength range of 0:1000 to 1:15, and this will
+     * avoid
+     * quantisation like effect when returning
+     */
+    pContext->pBundledContext->BassStrengthSaved = 0;
+    pContext->pBundledContext->VirtStrengthSaved = 0;
+    pContext->pBundledContext->CurPreset = PRESET_CUSTOM;
+    pContext->pBundledContext->levelSaved = 0;
+    pContext->pBundledContext->bMuteEnabled = LVM_FALSE;
+    pContext->pBundledContext->bStereoPositionEnabled = LVM_FALSE;
+    pContext->pBundledContext->positionSaved = 0;
+    pContext->pBundledContext->workBuffer = NULL;
+    pContext->pBundledContext->frameCount = -1;
+    pContext->pBundledContext->SamplesToExitCountVirt = 0;
+    pContext->pBundledContext->SamplesToExitCountBb = 0;
+    pContext->pBundledContext->SamplesToExitCountEq = 0;
+    for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+        pContext->pBundledContext->bandGaindB[i] = EQNB_5BandSoftPresets[i];
+    }
+    pContext->config.inputCfg.channels = plvmConfigParams->nrChannels;
+    ALOGV("\tEffectCreate - Calling LvmBundle_init");
+    ret = LvmBundle_init(pContext, params);
+
+    if (ret < 0) {
+        ALOGE("\tLVM_ERROR : lvmCreate() Bundle init failed");
+        return ret;
+    }
+    return 0;
+}
+
+int lvmControl(struct EffectContext* pContext, lvmConfigParams_t* plvmConfigParams,
+               LVM_ControlParams_t* params) {
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+
+    /* Set the initial process parameters */
+    /* General parameters */
+    params->OperatingMode = LVM_MODE_ON;
+    params->SpeakerType = LVM_HEADPHONES;
+
+    params->ChMask = plvmConfigParams->chMask;
+    params->NrChannels = plvmConfigParams->nrChannels;
+    if (params->NrChannels == 1) {
+        params->SourceFormat = LVM_MONO;
+    } else if (params->NrChannels == 2) {
+        params->SourceFormat = LVM_STEREO;
+    } else if (params->NrChannels > FCC_2 && params->NrChannels <= FCC_24) {
+        params->SourceFormat = LVM_MULTICHANNEL;
+    } else {
+        return -EINVAL;
+    }
+    params->SampleRate = lvmFsForSampleRate(plvmConfigParams->samplingFreq);
+    if (params->SampleRate == LVM_FS_INVALID) {
+        ALOGE("lvmControl invalid sampling rate %d", plvmConfigParams->samplingFreq);
+        return -EINVAL;
+    }
+
+    /* Concert Sound parameters */
+    params->VirtualizerOperatingMode = plvmConfigParams->csEnable;
+    params->VirtualizerType = LVM_CONCERTSOUND;
+    params->VirtualizerReverbLevel = 100;
+    params->CS_EffectLevel = LVM_CS_EFFECT_NONE;
+
+    /* N-Band Equaliser parameters */
+    const int eqPresetLevel = plvmConfigParams->eqPresetLevel;
+    LVM_EQNB_BandDef_t BandDefs[MAX_NUM_BANDS]; /* Equaliser band definitions */
+    for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+        BandDefs[i].Frequency = EQNB_5BandPresetsFrequencies[i];
+        BandDefs[i].QFactor = EQNB_5BandPresetsQFactors[i];
+        BandDefs[i].Gain = EQNB_5BandSoftPresets[(FIVEBAND_NUMBANDS * eqPresetLevel) + i];
+    }
+    params->EQNB_OperatingMode = plvmConfigParams->eqEnable;
+    // Caution: raw pointer to stack data, stored in instance by LVM_SetControlParameters.
+    params->pEQNB_BandDefinition = &BandDefs[0];
+
+    /* Volume Control parameters */
+    params->VC_EffectLevel = 0;
+    params->VC_Balance = plvmConfigParams->vcBal;
+
+    /* Treble Enhancement parameters */
+    params->TE_OperatingMode = plvmConfigParams->trebleEnable;
+    params->TE_EffectLevel = plvmConfigParams->trebleEffectLevel;
+
+    /* PSA Control parameters */
+    params->PSA_Enable = LVM_PSA_ON;
+
+    /* Bass Enhancement parameters */
+    params->BE_OperatingMode = plvmConfigParams->bassEnable;
+
+    /* Activate the initial settings */
+    LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, params);
+
+    LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmBundle_init");
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+
+    LvmStatus = LVM_ApplyNewSettings(pContext->pBundledContext->hInstance);
+
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+
+    return 0;
+}
+
+int lvmExecute(float* floatIn, float* floatOut, struct EffectContext* pContext,
+               lvmConfigParams_t* plvmConfigParams) {
+    const int frameLength = plvmConfigParams->frameLength;
+    return LVM_Process(pContext->pBundledContext->hInstance, /* Instance handle */
+                       floatIn,                              /* Input buffer */
+                       floatOut,                             /* Output buffer */
+                       (LVM_UINT16)frameLength,              /* Number of samples to read */
+                       0);                                   /* Audio Time */
+}
+
+int lvmMainProcess(EffectContext* pContext, LVM_ControlParams_t* pParams,
+                   lvmConfigParams_t* plvmConfigParams, FILE* finp, FILE* fout) {
+    int errCode = lvmControl(pContext, plvmConfigParams, pParams);
+    if (errCode) {
+        ALOGE("Error: lvmControl returned with %d\n", errCode);
+        return errCode;
+    }
+
+    const int channelCount = plvmConfigParams->nrChannels;
+    const int frameLength = plvmConfigParams->frameLength;
+    const int frameSize = channelCount * sizeof(float);  // processing size
+    const int ioChannelCount = plvmConfigParams->fChannels;
+    const int ioFrameSize = ioChannelCount * sizeof(short);  // file load size
+    const int maxChannelCount = std::max(channelCount, ioChannelCount);
+
+    std::vector<short> in(frameLength * maxChannelCount);
+    std::vector<short> out(frameLength * maxChannelCount);
+    std::vector<float> floatIn(frameLength * channelCount);
+    std::vector<float> floatOut(frameLength * channelCount);
+
+    int frameCounter = 0;
+    while (fread(in.data(), ioFrameSize, frameLength, finp) == (size_t)frameLength) {
+        if (ioChannelCount != channelCount) {
+            adjust_channels(in.data(), ioChannelCount, in.data(), channelCount, sizeof(short),
+                            frameLength * ioFrameSize);
+        }
+        memcpy_to_float_from_i16(floatIn.data(), in.data(), frameLength * channelCount);
+
+        // Mono mode will replicate the first channel to all other channels.
+        // This ensures all audio channels are identical. This is useful for testing
+        // Bass Boost, which extracts a mono signal for processing.
+        if (plvmConfigParams->monoMode && channelCount > 1) {
+            for (int i = 0; i < frameLength; ++i) {
+                auto* fp = &floatIn[i * channelCount];
+                std::fill(fp + 1, fp + channelCount, *fp);  // replicate ch 0
+            }
+        }
+#ifndef BYPASS_EXEC
+        errCode = lvmExecute(floatIn.data(), floatOut.data(), pContext, plvmConfigParams);
+        if (errCode) {
+            printf("\nError: lvmExecute returned with %d\n", errCode);
+            return errCode;
+        }
+
+        (void)frameSize;  // eliminate warning
+#else
+        memcpy(floatOut.data(), floatIn.data(), frameLength * frameSize);
+#endif
+        memcpy_to_i16_from_float(out.data(), floatOut.data(), frameLength * channelCount);
+        if (ioChannelCount != channelCount) {
+            adjust_channels(out.data(), channelCount, out.data(), ioChannelCount, sizeof(short),
+                            frameLength * channelCount * sizeof(short));
+        }
+        (void)fwrite(out.data(), ioFrameSize, frameLength, fout);
+        frameCounter += frameLength;
+    }
+    printf("frameCounter: [%d]\n", frameCounter);
+    return 0;
+}
+
+int main(int argc, const char* argv[]) {
+    if (argc == 1) {
+        printUsage();
+        return -1;
+    }
+
+    lvmConfigParams_t lvmConfigParams{};  // default initialize
+    const char* infile = nullptr;
+    const char* outfile = nullptr;
+
+    for (int i = 1; i < argc; i++) {
+        printf("%s ", argv[i]);
+        if (!strncmp(argv[i], "-i:", 3)) {
+            infile = argv[i] + 3;
+        } else if (!strncmp(argv[i], "-o:", 3)) {
+            outfile = argv[i] + 3;
+        } else if (!strncmp(argv[i], "-fs:", 4)) {
+            const int samplingFreq = atoi(argv[i] + 4);
+            if (samplingFreq != 8000 && samplingFreq != 11025 && samplingFreq != 12000 &&
+                samplingFreq != 16000 && samplingFreq != 22050 && samplingFreq != 24000 &&
+                samplingFreq != 32000 && samplingFreq != 44100 && samplingFreq != 48000 &&
+                samplingFreq != 88200 && samplingFreq != 96000 && samplingFreq != 176400 &&
+                samplingFreq != 192000) {
+                printf("Error: Unsupported Sampling Frequency : %d\n", samplingFreq);
+                return -1;
+            }
+            lvmConfigParams.samplingFreq = samplingFreq;
+        } else if (!strncmp(argv[i], "-chMask:", 8)) {
+            const int chMaskConfigIdx = atoi(argv[i] + 8);
+            if (chMaskConfigIdx < 0 || (size_t)chMaskConfigIdx >= std::size(lvmConfigChMask)) {
+                ALOGE("\nError: Unsupported Channel Mask : %d\n", chMaskConfigIdx);
+                return -1;
+            }
+            const audio_channel_mask_t chMask = lvmConfigChMask[chMaskConfigIdx];
+            lvmConfigParams.chMask = chMask;
+            lvmConfigParams.nrChannels = audio_channel_count_from_out_mask(chMask);
+        } else if (!strncmp(argv[i], "-vcBal:", 7)) {
+            const int vcBalance = atoi(argv[i] + 7);
+            if (vcBalance > 96 || vcBalance < -96) {
+                ALOGE("\nError: Unsupported volume balance value: %d\n", vcBalance);
+            }
+            lvmConfigParams.vcBal = vcBalance;
+        } else if (!strncmp(argv[i], "-fch:", 5)) {
+            const int fChannels = atoi(argv[i] + 5);
+            if (fChannels > 8 || fChannels < 1) {
+                printf("Error: Unsupported number of file channels : %d\n", fChannels);
+                return -1;
+            }
+            lvmConfigParams.fChannels = fChannels;
+        } else if (!strcmp(argv[i], "-M")) {
+            lvmConfigParams.monoMode = true;
+        } else if (!strncmp(argv[i], "-basslvl:", 9)) {
+            const int bassEffectLevel = atoi(argv[i] + 9);
+            if (bassEffectLevel > LVM_BE_MAX_EFFECTLEVEL ||
+                bassEffectLevel < LVM_BE_MIN_EFFECTLEVEL) {
+                printf("Error: Unsupported Bass Effect Level : %d\n", bassEffectLevel);
+                printUsage();
+                return -1;
+            }
+            lvmConfigParams.bassEffectLevel = bassEffectLevel;
+        } else if (!strncmp(argv[i], "-eqPreset:", 10)) {
+            const int eqPresetLevel = atoi(argv[i] + 10);
+            const int numPresetLvls = std::size(gEqualizerPresets);
+            if (eqPresetLevel >= numPresetLvls || eqPresetLevel < 0) {
+                printf("Error: Unsupported Equalizer Preset : %d\n", eqPresetLevel);
+                printUsage();
+                return -1;
+            }
+            lvmConfigParams.eqPresetLevel = eqPresetLevel;
+        } else if (!strncmp(argv[i], "-trebleLvl:", 11)) {
+            const int trebleEffectLevel = atoi(argv[i] + 11);
+            if (trebleEffectLevel > LVM_TE_MAX_EFFECTLEVEL ||
+                trebleEffectLevel < LVM_TE_MIN_EFFECTLEVEL) {
+                printf("Error: Unsupported Treble Effect Level : %d\n", trebleEffectLevel);
+                printUsage();
+                return -1;
+            }
+            lvmConfigParams.trebleEffectLevel = trebleEffectLevel;
+        } else if (!strcmp(argv[i], "-bE")) {
+            lvmConfigParams.bassEnable = LVM_BE_ON;
+        } else if (!strcmp(argv[i], "-eqE")) {
+            lvmConfigParams.eqEnable = LVM_EQNB_ON;
+        } else if (!strcmp(argv[i], "-tE")) {
+            lvmConfigParams.trebleEnable = LVM_TE_ON;
+        } else if (!strcmp(argv[i], "-csE")) {
+            lvmConfigParams.csEnable = LVM_MODE_ON;
+        } else if (!strcmp(argv[i], "-h")) {
+            printUsage();
+            return 0;
         }
     }
-#ifndef BYPASS_EXEC
-    errCode = lvmExecute(floatIn.data(), floatOut.data(), pContext, plvmConfigParams);
-    if (errCode) {
-      printf("\nError: lvmExecute returned with %d\n", errCode);
-      return errCode;
-    }
 
-    (void)frameSize; // eliminate warning
-#else
-    memcpy(floatOut.data(), floatIn.data(), frameLength * frameSize);
-#endif
-    memcpy_to_i16_from_float(out.data(), floatOut.data(), frameLength * channelCount);
-    if (ioChannelCount != channelCount) {
-        adjust_channels(out.data(), channelCount, out.data(), ioChannelCount,
-               sizeof(short), frameLength * channelCount * sizeof(short));
-    }
-    (void) fwrite(out.data(), ioFrameSize, frameLength, fout);
-    frameCounter += frameLength;
-  }
-  printf("frameCounter: [%d]\n", frameCounter);
-  return 0;
-}
-
-int main(int argc, const char *argv[]) {
-  if (argc == 1) {
-    printUsage();
-    return -1;
-  }
-
-  lvmConfigParams_t lvmConfigParams{}; // default initialize
-  const char *infile = nullptr;
-  const char *outfile = nullptr;
-
-  for (int i = 1; i < argc; i++) {
-    printf("%s ", argv[i]);
-    if (!strncmp(argv[i], "-i:", 3)) {
-      infile = argv[i] + 3;
-    } else if (!strncmp(argv[i], "-o:", 3)) {
-      outfile = argv[i] + 3;
-    } else if (!strncmp(argv[i], "-fs:", 4)) {
-      const int samplingFreq = atoi(argv[i] + 4);
-      if (samplingFreq != 8000 && samplingFreq != 11025 &&
-          samplingFreq != 12000 && samplingFreq != 16000 &&
-          samplingFreq != 22050 && samplingFreq != 24000 &&
-          samplingFreq != 32000 && samplingFreq != 44100 &&
-          samplingFreq != 48000 && samplingFreq != 88200 &&
-          samplingFreq != 96000 && samplingFreq != 176400 &&
-          samplingFreq != 192000) {
-        printf("Error: Unsupported Sampling Frequency : %d\n", samplingFreq);
-        return -1;
-      }
-      lvmConfigParams.samplingFreq = samplingFreq;
-    } else if (!strncmp(argv[i], "-chMask:", 8)) {
-      const int chMaskConfigIdx = atoi(argv[i] + 8);
-      if (chMaskConfigIdx < 0 || (size_t)chMaskConfigIdx >= std::size(lvmConfigChMask)) {
-        ALOGE("\nError: Unsupported Channel Mask : %d\n", chMaskConfigIdx);
-        return -1;
-      }
-      const audio_channel_mask_t chMask = lvmConfigChMask[chMaskConfigIdx];
-      lvmConfigParams.chMask = chMask;
-      lvmConfigParams.nrChannels = audio_channel_count_from_out_mask(chMask);
-    } else if (!strncmp(argv[i], "-vcBal:", 7)) {
-      const int vcBalance = atoi(argv[i] + 7);
-      if (vcBalance > 96 || vcBalance < -96) {
-        ALOGE("\nError: Unsupported volume balance value: %d\n", vcBalance);
-      }
-      lvmConfigParams.vcBal = vcBalance;
-    } else if (!strncmp(argv[i], "-fch:", 5)) {
-      const int fChannels = atoi(argv[i] + 5);
-      if (fChannels > 8 || fChannels < 1) {
-             printf("Error: Unsupported number of file channels : %d\n", fChannels);
-             return -1;
-           }
-           lvmConfigParams.fChannels = fChannels;
-    } else if (!strcmp(argv[i],"-M")) {
-          lvmConfigParams.monoMode = true;
-    } else if (!strncmp(argv[i], "-basslvl:", 9)) {
-      const int bassEffectLevel = atoi(argv[i] + 9);
-      if (bassEffectLevel > LVM_BE_MAX_EFFECTLEVEL || bassEffectLevel < LVM_BE_MIN_EFFECTLEVEL) {
-        printf("Error: Unsupported Bass Effect Level : %d\n",
-               bassEffectLevel);
+    if (infile == nullptr || outfile == nullptr) {
+        printf("Error: missing input/output files\n");
         printUsage();
         return -1;
-      }
-      lvmConfigParams.bassEffectLevel = bassEffectLevel;
-    } else if (!strncmp(argv[i], "-eqPreset:", 10)) {
-      const int eqPresetLevel = atoi(argv[i] + 10);
-      const int numPresetLvls = std::size(gEqualizerPresets);
-      if (eqPresetLevel >= numPresetLvls || eqPresetLevel < 0) {
-        printf("Error: Unsupported Equalizer Preset : %d\n", eqPresetLevel);
-        printUsage();
-        return -1;
-      }
-      lvmConfigParams.eqPresetLevel = eqPresetLevel;
-    } else if (!strcmp(argv[i], "-bE")) {
-      lvmConfigParams.bassEnable = LVM_BE_ON;
-    } else if (!strcmp(argv[i], "-eqE")) {
-      lvmConfigParams.eqEnable = LVM_EQNB_ON;
-    } else if (!strcmp(argv[i], "-tE")) {
-      lvmConfigParams.trebleEnable = LVM_TE_ON;
-    } else if (!strcmp(argv[i], "-csE")) {
-      lvmConfigParams.csEnable = LVM_MODE_ON;
-    } else if (!strcmp(argv[i], "-h")) {
-      printUsage();
-      return 0;
     }
-  }
 
-  if (infile == nullptr || outfile == nullptr) {
-    printf("Error: missing input/output files\n");
-    printUsage();
-    return -1;
-  }
+    FILE* finp = fopen(infile, "rb");
+    if (finp == nullptr) {
+        printf("Cannot open input file %s", infile);
+        return -1;
+    }
 
-  FILE *finp = fopen(infile, "rb");
-  if (finp == nullptr) {
-    printf("Cannot open input file %s", infile);
-    return -1;
-  }
+    FILE* fout = fopen(outfile, "wb");
+    if (fout == nullptr) {
+        printf("Cannot open output file %s", outfile);
+        fclose(finp);
+        return -1;
+    }
 
-  FILE *fout = fopen(outfile, "wb");
-  if (fout == nullptr) {
-    printf("Cannot open output file %s", outfile);
+    EffectContext context;
+    LVM_ControlParams_t params;
+    int errCode = lvmCreate(&context, &lvmConfigParams, &params);
+    if (errCode == 0) {
+        errCode = lvmMainProcess(&context, &params, &lvmConfigParams, finp, fout);
+        if (errCode != 0) {
+            printf("Error: lvmMainProcess returned with the error: %d", errCode);
+        }
+    } else {
+        printf("Error: lvmCreate returned with the error: %d", errCode);
+    }
     fclose(finp);
-    return -1;
-  }
-
-  EffectContext context;
-  LVM_ControlParams_t params;
-  int errCode = lvmCreate(&context, &lvmConfigParams, &params);
-  if (errCode == 0) {
-    errCode = lvmMainProcess(&context, &params, &lvmConfigParams, finp, fout);
-    if (errCode != 0) {
-      printf("Error: lvmMainProcess returned with the error: %d",errCode);
+    fclose(fout);
+    /* Free the allocated buffers */
+    if (context.pBundledContext != nullptr) {
+        if (context.pBundledContext->hInstance != nullptr) {
+            LVM_DelInstanceHandle(&context.pBundledContext->hInstance);
+        }
+        free(context.pBundledContext);
     }
-  } else {
-    printf("Error: lvmCreate returned with the error: %d", errCode);
-  }
-  fclose(finp);
-  fclose(fout);
-  /* Free the allocated buffers */
-  if (context.pBundledContext != nullptr) {
-    if (context.pBundledContext->hInstance != nullptr) {
-      LvmEffect_free(&context);
-    }
-    free(context.pBundledContext);
-  }
 
-  if (errCode) {
-    return -1;
-  }
-  return 0;
+    if (errCode) {
+        return -1;
+    }
+    return 0;
 }
diff --git a/media/libeffects/lvm/tests/reverb_test.cpp b/media/libeffects/lvm/tests/reverb_test.cpp
new file mode 100644
index 0000000..dfb6970
--- /dev/null
+++ b/media/libeffects/lvm/tests/reverb_test.cpp
@@ -0,0 +1,403 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <assert.h>
+#include <getopt.h>
+#include <inttypes.h>
+#include <iterator>
+#include <math.h>
+#include <stdlib.h>
+#include <string.h>
+#include <vector>
+
+#include <audio_utils/channels.h>
+#include <audio_utils/primitives.h>
+#include <log/log.h>
+#include <system/audio.h>
+
+#include "EffectReverb.h"
+
+// This is the only symbol that needs to be exported
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+
+// Global Variables
+enum ReverbParams {
+    ARG_HELP = 1,
+    ARG_INPUT,
+    ARG_OUTPUT,
+    ARG_FS,
+    ARG_CH_MASK,
+    ARG_PRESET,
+    ARG_AUX,
+    ARG_MONO_MODE,
+    ARG_FILE_CH,
+};
+
+const effect_uuid_t kReverbUuids[] = {
+        {0x172cdf00,
+         0xa3bc,
+         0x11df,
+         0xa72f,
+         {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // preset-insert mode
+        {0xf29a1400,
+         0xa3bb,
+         0x11df,
+         0x8ddc,
+         {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // preset-aux mode
+};
+
+// structures
+struct reverbConfigParams_t {
+    int fChannels = 2;
+    int monoMode = false;
+    int frameLength = 256;
+    int preset = 0;
+    int nrChannels = 2;
+    int sampleRate = 48000;
+    int auxiliary = 0;
+    audio_channel_mask_t chMask = AUDIO_CHANNEL_OUT_STEREO;
+};
+
+constexpr audio_channel_mask_t kReverbConfigChMask[] = {
+        AUDIO_CHANNEL_OUT_MONO,
+        AUDIO_CHANNEL_OUT_STEREO,
+        AUDIO_CHANNEL_OUT_2POINT1,
+        AUDIO_CHANNEL_OUT_2POINT0POINT2,
+        AUDIO_CHANNEL_OUT_QUAD,
+        AUDIO_CHANNEL_OUT_QUAD_BACK,
+        AUDIO_CHANNEL_OUT_QUAD_SIDE,
+        AUDIO_CHANNEL_OUT_SURROUND,
+        AUDIO_CHANNEL_INDEX_MASK_4,
+        AUDIO_CHANNEL_OUT_2POINT1POINT2,
+        AUDIO_CHANNEL_OUT_3POINT0POINT2,
+        AUDIO_CHANNEL_OUT_PENTA,
+        AUDIO_CHANNEL_INDEX_MASK_5,
+        AUDIO_CHANNEL_OUT_3POINT1POINT2,
+        AUDIO_CHANNEL_OUT_5POINT1,
+        AUDIO_CHANNEL_OUT_5POINT1_BACK,
+        AUDIO_CHANNEL_OUT_5POINT1_SIDE,
+        AUDIO_CHANNEL_INDEX_MASK_6,
+        AUDIO_CHANNEL_OUT_6POINT1,
+        AUDIO_CHANNEL_INDEX_MASK_7,
+        AUDIO_CHANNEL_OUT_5POINT1POINT2,
+        AUDIO_CHANNEL_OUT_7POINT1,
+        AUDIO_CHANNEL_INDEX_MASK_8,
+        AUDIO_CHANNEL_INDEX_MASK_9,
+        AUDIO_CHANNEL_INDEX_MASK_10,
+        AUDIO_CHANNEL_INDEX_MASK_11,
+        AUDIO_CHANNEL_INDEX_MASK_12,
+        AUDIO_CHANNEL_INDEX_MASK_13,
+        AUDIO_CHANNEL_INDEX_MASK_14,
+        AUDIO_CHANNEL_INDEX_MASK_15,
+        AUDIO_CHANNEL_INDEX_MASK_16,
+        AUDIO_CHANNEL_INDEX_MASK_17,
+        AUDIO_CHANNEL_INDEX_MASK_18,
+        AUDIO_CHANNEL_INDEX_MASK_19,
+        AUDIO_CHANNEL_INDEX_MASK_20,
+        AUDIO_CHANNEL_INDEX_MASK_21,
+        AUDIO_CHANNEL_INDEX_MASK_22,
+        AUDIO_CHANNEL_INDEX_MASK_23,
+        AUDIO_CHANNEL_INDEX_MASK_24,
+};
+
+constexpr int kReverbConfigChMaskCount = std::size(kReverbConfigChMask);
+
+int reverbCreateEffect(effect_handle_t* pEffectHandle, effect_config_t* pConfig, int sessionId,
+                       int ioId, int auxFlag) {
+    if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(&kReverbUuids[auxFlag], sessionId,
+                                                                 ioId, pEffectHandle);
+        status != 0) {
+        ALOGE("Reverb create returned an error = %d\n", status);
+        return EXIT_FAILURE;
+    }
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    (**pEffectHandle)
+            ->command(*pEffectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t), pConfig,
+                      &replySize, &reply);
+    return reply;
+}
+
+int reverbSetConfigParam(uint32_t paramType, uint32_t paramValue, effect_handle_t effectHandle) {
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    uint32_t paramData[2] = {paramType, paramValue};
+    effect_param_t* effectParam = (effect_param_t*)malloc(sizeof(*effectParam) + sizeof(paramData));
+    memcpy(&effectParam->data[0], &paramData[0], sizeof(paramData));
+    effectParam->psize = sizeof(paramData[0]);
+    effectParam->vsize = sizeof(paramData[1]);
+    int status = (*effectHandle)
+                         ->command(effectHandle, EFFECT_CMD_SET_PARAM,
+                                   sizeof(effect_param_t) + sizeof(paramData), effectParam,
+                                   &replySize, &reply);
+    free(effectParam);
+    if (status != 0) {
+        ALOGE("Reverb set config returned an error = %d\n", status);
+        return status;
+    }
+    return reply;
+}
+
+void printUsage() {
+    printf("\nUsage: ");
+    printf("\n     <executable> [options]\n");
+    printf("\nwhere options are, ");
+    printf("\n     --input <inputfile>");
+    printf("\n           path to the input file");
+    printf("\n     --output <outputfile>");
+    printf("\n           path to the output file");
+    printf("\n     --help");
+    printf("\n           prints this usage information");
+    printf("\n     --chMask <channel_mask>\n");
+    printf("\n           0  - AUDIO_CHANNEL_OUT_MONO");
+    printf("\n           1  - AUDIO_CHANNEL_OUT_STEREO");
+    printf("\n           2  - AUDIO_CHANNEL_OUT_2POINT1");
+    printf("\n           3  - AUDIO_CHANNEL_OUT_2POINT0POINT2");
+    printf("\n           4  - AUDIO_CHANNEL_OUT_QUAD");
+    printf("\n           5  - AUDIO_CHANNEL_OUT_QUAD_BACK");
+    printf("\n           6  - AUDIO_CHANNEL_OUT_QUAD_SIDE");
+    printf("\n           7  - AUDIO_CHANNEL_OUT_SURROUND");
+    printf("\n           8  - canonical channel index mask for 4 ch: (1 << 4) - 1");
+    printf("\n           9  - AUDIO_CHANNEL_OUT_2POINT1POINT2");
+    printf("\n           10 - AUDIO_CHANNEL_OUT_3POINT0POINT2");
+    printf("\n           11 - AUDIO_CHANNEL_OUT_PENTA");
+    printf("\n           12 - canonical channel index mask for 5 ch: (1 << 5) - 1");
+    printf("\n           13 - AUDIO_CHANNEL_OUT_3POINT1POINT2");
+    printf("\n           14 - AUDIO_CHANNEL_OUT_5POINT1");
+    printf("\n           15 - AUDIO_CHANNEL_OUT_5POINT1_BACK");
+    printf("\n           16 - AUDIO_CHANNEL_OUT_5POINT1_SIDE");
+    printf("\n           17 - canonical channel index mask for 6 ch: (1 << 6) - 1");
+    printf("\n           18 - AUDIO_CHANNEL_OUT_6POINT1");
+    printf("\n           19 - canonical channel index mask for 7 ch: (1 << 7) - 1");
+    printf("\n           20 - AUDIO_CHANNEL_OUT_5POINT1POINT2");
+    printf("\n           21 - AUDIO_CHANNEL_OUT_7POINT1");
+    printf("\n           22 - canonical channel index mask for 8 ch: (1 << 8) - 1");
+    printf("\n           default 0");
+    printf("\n     --fs <sampling_freq>");
+    printf("\n           Sampling frequency in Hz, default 48000.");
+    printf("\n     --preset <preset_value>");
+    printf("\n           0 - None");
+    printf("\n           1 - Small Room");
+    printf("\n           2 - Medium Room");
+    printf("\n           3 - Large Room");
+    printf("\n           4 - Medium Hall");
+    printf("\n           5 - Large Hall");
+    printf("\n           6 - Plate");
+    printf("\n           default 0");
+    printf("\n     --fch <file_channels>");
+    printf("\n           number of channels in input file (1 through 8), default 1");
+    printf("\n     --M");
+    printf("\n           Mono mode (force all input audio channels to be identical)");
+    printf("\n     --aux <auxiliary_flag> ");
+    printf("\n           0 - Insert Mode on");
+    printf("\n           1 - auxiliary Mode on");
+    printf("\n           default 0");
+    printf("\n");
+}
+
+int main(int argc, const char* argv[]) {
+    if (argc == 1) {
+        printUsage();
+        return EXIT_FAILURE;
+    }
+    for (int i = 1; i < argc; i++) {
+        printf("%s ", argv[i]);
+    }
+    reverbConfigParams_t revConfigParams{};  // default initialize
+    const char* inputFile = nullptr;
+    const char* outputFile = nullptr;
+
+    const option long_opts[] = {
+            {"help", no_argument, nullptr, ARG_HELP},
+            {"input", required_argument, nullptr, ARG_INPUT},
+            {"output", required_argument, nullptr, ARG_OUTPUT},
+            {"fs", required_argument, nullptr, ARG_FS},
+            {"chMask", required_argument, nullptr, ARG_CH_MASK},
+            {"preset", required_argument, nullptr, ARG_PRESET},
+            {"aux", required_argument, nullptr, ARG_AUX},
+            {"M", no_argument, &revConfigParams.monoMode, true},
+            {"fch", required_argument, nullptr, ARG_FILE_CH},
+            {nullptr, 0, nullptr, 0},
+    };
+
+    while (true) {
+        const int opt = getopt_long(argc, (char* const*)argv, "i:o:", long_opts, nullptr);
+        if (opt == -1) {
+            break;
+        }
+        switch (opt) {
+            case ARG_HELP:
+                printUsage();
+                return EXIT_SUCCESS;
+            case ARG_INPUT: {
+                inputFile = (char*)optarg;
+                break;
+            }
+            case ARG_OUTPUT: {
+                outputFile = (char*)optarg;
+                break;
+            }
+            case ARG_FS: {
+                revConfigParams.sampleRate = atoi(optarg);
+                break;
+            }
+            case ARG_CH_MASK: {
+                int chMaskIdx = atoi(optarg);
+                if (chMaskIdx < 0 or chMaskIdx > kReverbConfigChMaskCount) {
+                    ALOGE("Channel Mask index not in correct range\n");
+                    printUsage();
+                    return EXIT_FAILURE;
+                }
+                revConfigParams.chMask = kReverbConfigChMask[chMaskIdx];
+                break;
+            }
+            case ARG_PRESET: {
+                revConfigParams.preset = atoi(optarg);
+                break;
+            }
+            case ARG_AUX: {
+                revConfigParams.auxiliary = atoi(optarg);
+                break;
+            }
+            case ARG_MONO_MODE: {
+                break;
+            }
+            case ARG_FILE_CH: {
+                revConfigParams.fChannels = atoi(optarg);
+                break;
+            }
+            default:
+                break;
+        }
+    }
+
+    if (inputFile == nullptr) {
+        ALOGE("Error: missing input files\n");
+        printUsage();
+        return EXIT_FAILURE;
+    }
+    std::unique_ptr<FILE, decltype(&fclose)> inputFp(fopen(inputFile, "rb"), &fclose);
+
+    if (inputFp == nullptr) {
+        ALOGE("Cannot open input file %s\n", inputFile);
+        return EXIT_FAILURE;
+    }
+
+    if (outputFile == nullptr) {
+        ALOGE("Error: missing output files\n");
+        printUsage();
+        return EXIT_FAILURE;
+    }
+    std::unique_ptr<FILE, decltype(&fclose)> outputFp(fopen(outputFile, "wb"), &fclose);
+
+    if (outputFp == nullptr) {
+        ALOGE("Cannot open output file %s\n", outputFile);
+        return EXIT_FAILURE;
+    }
+
+    int32_t sessionId = 1;
+    int32_t ioId = 1;
+    effect_handle_t effectHandle = nullptr;
+    effect_config_t config;
+    config.inputCfg.samplingRate = config.outputCfg.samplingRate = revConfigParams.sampleRate;
+    config.inputCfg.channels = config.outputCfg.channels = revConfigParams.chMask;
+    config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+    if (int status = reverbCreateEffect(&effectHandle, &config, sessionId, ioId,
+                                        revConfigParams.auxiliary);
+        status != 0) {
+        ALOGE("Create effect call returned error %i", status);
+        return EXIT_FAILURE;
+    }
+
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    (*effectHandle)->command(effectHandle, EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+    if (reply != 0) {
+        ALOGE("Command enable call returned error %d\n", reply);
+        return EXIT_FAILURE;
+    }
+
+    if (int status = reverbSetConfigParam(REVERB_PARAM_PRESET, (uint32_t)revConfigParams.preset,
+                                          effectHandle);
+        status != 0) {
+        ALOGE("Invalid reverb preset. Error %d\n", status);
+        return EXIT_FAILURE;
+    }
+
+    revConfigParams.nrChannels = audio_channel_count_from_out_mask(revConfigParams.chMask);
+    const int channelCount = revConfigParams.nrChannels;
+    const int frameLength = revConfigParams.frameLength;
+#ifdef BYPASS_EXEC
+    const int frameSize = (int)channelCount * sizeof(float);
+#endif
+    const int ioChannelCount = revConfigParams.fChannels;
+    const int ioFrameSize = ioChannelCount * sizeof(short);
+    const int maxChannelCount = std::max(channelCount, ioChannelCount);
+
+    std::vector<short> in(frameLength * maxChannelCount);
+    std::vector<short> out(frameLength * maxChannelCount);
+    std::vector<float> floatIn(frameLength * channelCount);
+    std::vector<float> floatOut(frameLength * channelCount);
+
+    int frameCounter = 0;
+
+    while (fread(in.data(), ioFrameSize, frameLength, inputFp.get()) == (size_t)frameLength) {
+        if (ioChannelCount != channelCount) {
+            adjust_channels(in.data(), ioChannelCount, in.data(), channelCount, sizeof(short),
+                            frameLength * ioFrameSize);
+        }
+        memcpy_to_float_from_i16(floatIn.data(), in.data(), frameLength * channelCount);
+
+        // Mono mode will replicate the first channel to all other channels.
+        // This ensures all audio channels are identical. This is useful for testing
+        // Bass Boost, which extracts a mono signal for processing.
+        if (revConfigParams.monoMode && channelCount > 1) {
+            for (int i = 0; i < frameLength; ++i) {
+                auto* fp = &floatIn[i * channelCount];
+                std::fill(fp + 1, fp + channelCount, *fp);  // replicate ch 0
+            }
+        }
+
+        audio_buffer_t inputBuffer, outputBuffer;
+        inputBuffer.frameCount = outputBuffer.frameCount = frameLength;
+        inputBuffer.f32 = floatIn.data();
+        outputBuffer.f32 = floatOut.data();
+#ifndef BYPASS_EXEC
+        if (int status = (*effectHandle)->process(effectHandle, &inputBuffer, &outputBuffer);
+            status != 0) {
+            ALOGE("\nError: Process returned with error %d\n", status);
+            return EXIT_FAILURE;
+        }
+#else
+        memcpy(floatOut.data(), floatIn.data(), frameLength * frameSize);
+#endif
+        memcpy_to_i16_from_float(out.data(), floatOut.data(), frameLength * channelCount);
+
+        if (ioChannelCount != channelCount) {
+            adjust_channels(out.data(), channelCount, out.data(), ioChannelCount, sizeof(short),
+                            frameLength * channelCount * sizeof(short));
+        }
+        (void)fwrite(out.data(), ioFrameSize, frameLength, outputFp.get());
+        frameCounter += frameLength;
+    }
+
+    if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle); status != 0) {
+        ALOGE("Audio Preprocessing release returned an error = %d\n", status);
+        return EXIT_FAILURE;
+    }
+    printf("frameCounter: [%d]\n", frameCounter);
+
+    return EXIT_SUCCESS;
+}
diff --git a/media/libeffects/lvm/tests/snr.cpp b/media/libeffects/lvm/tests/snr.cpp
index 885994c..0fef334 100644
--- a/media/libeffects/lvm/tests/snr.cpp
+++ b/media/libeffects/lvm/tests/snr.cpp
@@ -22,84 +22,83 @@
 #include <vector>
 
 template <typename T, typename A = float>
-std::pair<A, A> getSignalNoise(FILE *finp, FILE *fref) {
-  constexpr size_t framesize = 256;
-  std::vector<T> in(framesize);
-  std::vector<T> ref(framesize);
-  A signal{};
-  A noise{};
+std::pair<A, A> getSignalNoise(FILE* finp, FILE* fref) {
+    constexpr size_t framesize = 256;
+    std::vector<T> in(framesize);
+    std::vector<T> ref(framesize);
+    A signal{};
+    A noise{};
 
-  for (;;) {
-    size_t read_samples_in = fread(&in[0], sizeof(T), framesize, finp);
-    const size_t read_samples_ref = fread(&ref[0], sizeof(T), framesize, fref);
-    if (read_samples_in != read_samples_ref) {
-      printf("file sizes do not match (last %zu %zu)", read_samples_in, read_samples_ref);
-      read_samples_in = std::min(read_samples_in, read_samples_ref);
+    for (;;) {
+        size_t read_samples_in = fread(&in[0], sizeof(T), framesize, finp);
+        const size_t read_samples_ref = fread(&ref[0], sizeof(T), framesize, fref);
+        if (read_samples_in != read_samples_ref) {
+            printf("file sizes do not match (last %zu %zu)", read_samples_in, read_samples_ref);
+            read_samples_in = std::min(read_samples_in, read_samples_ref);
+        }
+        if (read_samples_in == 0) {
+            return {signal, noise};
+        }
+        for (size_t i = 0; i < read_samples_in; ++i) {
+            const A value(ref[i]);
+            const A diff(A(in[i]) - value);
+            signal += value * value;
+            noise += diff * diff;
+        }
     }
-    if (read_samples_in == 0) {
-        return { signal, noise };
-    }
-    for (size_t i = 0; i < read_samples_in; ++i) {
-       const A value(ref[i]);
-       const A diff(A(in[i]) - value);
-       signal += value * value;
-       noise += diff * diff;
-    }
-  }
 }
 
 void printUsage() {
-  printf("\nUsage: ");
-  printf("\n     snr <ref_file> <test_file> [options]\n");
-  printf("\nwhere, \n     <ref_file>  is the reference file name");
-  printf("\n                  on which will be taken as pure signal");
-  printf("\n     <test_file> is test file for snr calculation");
-  printf("\n     and options are mentioned below");
-  printf("\n");
-  printf("\n     -pcm_format:<pcm format of input files>");
-  printf("\n           0 - 16 bit pcm");
-  printf("\n           1 - 32 bit float");
-  printf("\n           default 0");
-  printf("\n     -thr:<threshold value>");
-  printf("\n           default - negative infinity\n\n");
+    printf("\nUsage: ");
+    printf("\n     snr <ref_file> <test_file> [options]\n");
+    printf("\nwhere, \n     <ref_file>  is the reference file name");
+    printf("\n                  on which will be taken as pure signal");
+    printf("\n     <test_file> is test file for snr calculation");
+    printf("\n     and options are mentioned below");
+    printf("\n");
+    printf("\n     -pcm_format:<pcm format of input files>");
+    printf("\n           0 - 16 bit pcm");
+    printf("\n           1 - 32 bit float");
+    printf("\n           default 0");
+    printf("\n     -thr:<threshold value>");
+    printf("\n           default - negative infinity\n\n");
 }
 
-int main(int argc, const char *argv[]) {
-  if (argc < 3) {
-    printUsage();
-    return -1;
-  }
-  int pcm_format = 0;
-  float thr = - std::numeric_limits<float>::infinity();
-  FILE *fref = fopen(argv[1], "rb");
-  FILE *finp = fopen(argv[2], "rb");
-  for (int i = 3; i < argc; i++) {
-    if (!strncmp(argv[i], "-pcm_format:", 12)) {
-      pcm_format = atoi(argv[i] + 12);
-    } else if (!strncmp(argv[i], "-thr:", 5)) {
-      thr = atof(argv[i] + 5);
+int main(int argc, const char* argv[]) {
+    if (argc < 3) {
+        printUsage();
+        return -1;
     }
-  }
-  if (finp == nullptr || fref == nullptr) {
-    printf("\nError: missing input/reference files\n");
-    return -1;
-  }
-  int ret = EXIT_SUCCESS;
-  auto sn = pcm_format == 0
-      ? getSignalNoise<short>(finp, fref)
-      : getSignalNoise<float>(finp, fref);
-  if (sn.first > 0.f && sn.second > 0.f) {
-    float snr = 10.f * log(sn.first / sn.second);
-    // compare the measured snr value with threshold
-    if (snr < thr) {
-      printf("%.6f less than threshold %.6f\n", snr, thr);
-      ret = EXIT_FAILURE;
-    } else {
-      printf("%.6f\n", snr);
+    int pcm_format = 0;
+    float thr = -std::numeric_limits<float>::infinity();
+    FILE* fref = fopen(argv[1], "rb");
+    FILE* finp = fopen(argv[2], "rb");
+    for (int i = 3; i < argc; i++) {
+        if (!strncmp(argv[i], "-pcm_format:", 12)) {
+            pcm_format = atoi(argv[i] + 12);
+        } else if (!strncmp(argv[i], "-thr:", 5)) {
+            thr = atof(argv[i] + 5);
+        }
     }
-  }
-  fclose(finp);
-  fclose(fref);
+    if (finp == nullptr || fref == nullptr) {
+        printf("\nError: missing input/reference files\n");
+        return -1;
+    }
+    int ret = EXIT_SUCCESS;
+    auto sn =
+            pcm_format == 0 ? getSignalNoise<short>(finp, fref) : getSignalNoise<float>(finp, fref);
+    if (sn.first > 0.f && sn.second > 0.f) {
+        float snr = 10.f * log(sn.first / sn.second);
+        // compare the measured snr value with threshold
+        if (snr < thr) {
+            printf("%.6f less than threshold %.6f\n", snr, thr);
+            ret = EXIT_FAILURE;
+        } else {
+            printf("%.6f\n", snr);
+        }
+    }
+    fclose(finp);
+    fclose(fref);
 
-  return ret;
+    return ret;
 }
diff --git a/media/libeffects/lvm/wrapper/Android.bp b/media/libeffects/lvm/wrapper/Android.bp
index afc4220..e169e3c 100644
--- a/media/libeffects/lvm/wrapper/Android.bp
+++ b/media/libeffects/lvm/wrapper/Android.bp
@@ -1,5 +1,24 @@
 // music bundle wrapper
-cc_library_shared {
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libeffects_lvm_wrapper_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libeffects_lvm_wrapper_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library {
     name: "libbundlewrapper",
 
     arch: {
@@ -9,11 +28,11 @@
     },
 
     vendor: true,
+    host_supported: true,
     srcs: ["Bundle/EffectBundle.cpp"],
 
     cppflags: [
         "-fvisibility=hidden",
-        "-DSUPPORT_MC",
 
         "-Wall",
         "-Werror",
@@ -26,7 +45,6 @@
     shared_libs: [
         "libaudioutils",
         "libcutils",
-        "libdl",
         "liblog",
     ],
 
@@ -39,7 +57,7 @@
 }
 
 // reverb wrapper
-cc_library_shared {
+cc_library {
     name: "libreverbwrapper",
 
     arch: {
@@ -49,6 +67,7 @@
     },
 
     vendor: true,
+    host_supported: true,
     srcs: ["Reverb/EffectReverb.cpp"],
 
     cppflags: [
@@ -65,7 +84,6 @@
     shared_libs: [
         "libaudioutils",
         "libcutils",
-        "libdl",
         "liblog",
     ],
 
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index 6fca0e7..df64676 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -18,7 +18,7 @@
 typedef float LVM_FLOAT;
 #endif
 #define LOG_TAG "Bundle"
-#define ARRAY_SIZE(array) (sizeof (array) / sizeof (array)[0])
+#define ARRAY_SIZE(array) (sizeof(array) / sizeof(array)[0])
 //#define LOG_NDEBUG 0
 
 #include <assert.h>
@@ -42,26 +42,33 @@
 #ifdef VERY_VERY_VERBOSE_LOGGING
 #define ALOGVV ALOGV
 #else
-#define ALOGVV(a...) do { } while (false)
+#define ALOGVV(a...) \
+    do {             \
+    } while (false)
 #endif
 
-#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc){\
-        if ((LvmStatus) == LVM_NULLADDRESS){\
-            ALOGV("\tLVM_ERROR : Parameter error - "\
-                    "null pointer returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\
-        }\
-        if ((LvmStatus) == LVM_ALIGNMENTERROR){\
-            ALOGV("\tLVM_ERROR : Parameter error - "\
-                    "bad alignment returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\
-        }\
-        if ((LvmStatus) == LVM_INVALIDNUMSAMPLES){\
-            ALOGV("\tLVM_ERROR : Parameter error - "\
-                    "bad number of samples returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\
-        }\
-        if ((LvmStatus) == LVM_OUTOFRANGE){\
-            ALOGV("\tLVM_ERROR : Parameter error - "\
-                    "out of range returned by %s in %s\n", callingFunc, calledFunc);\
-        }\
+#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc)             \
+    {                                                                   \
+        if ((LvmStatus) == LVM_NULLADDRESS) {                           \
+            ALOGV("\tLVM_ERROR : Parameter error - "                    \
+                  "null pointer returned by %s in %s\n\n\n\n",          \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
+        if ((LvmStatus) == LVM_ALIGNMENTERROR) {                        \
+            ALOGV("\tLVM_ERROR : Parameter error - "                    \
+                  "bad alignment returned by %s in %s\n\n\n\n",         \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
+        if ((LvmStatus) == LVM_INVALIDNUMSAMPLES) {                     \
+            ALOGV("\tLVM_ERROR : Parameter error - "                    \
+                  "bad number of samples returned by %s in %s\n\n\n\n", \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
+        if ((LvmStatus) == LVM_OUTOFRANGE) {                            \
+            ALOGV("\tLVM_ERROR : Parameter error - "                    \
+                  "out of range returned by %s in %s\n",                \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
     }
 
 // Namespaces
@@ -74,20 +81,21 @@
 int SessionIndex[LVM_MAX_SESSIONS];
 
 /* local functions */
-#define CHECK_ARG(cond) {                     \
-    if (!(cond)) {                            \
-        ALOGV("\tLVM_ERROR : Invalid argument: "#cond);      \
-        return -EINVAL;                       \
-    }                                         \
-}
+#define CHECK_ARG(cond)                                      \
+    {                                                        \
+        if (!(cond)) {                                       \
+            ALOGV("\tLVM_ERROR : Invalid argument: " #cond); \
+            return -EINVAL;                                  \
+        }                                                    \
+    }
 
 // NXP SW BassBoost UUID
 const effect_descriptor_t gBassBoostDescriptor = {
-        {0x0634f220, 0xddd4, 0x11db, 0xa0fc, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b }},
-        {0x8631f300, 0x72e2, 0x11df, 0xb57e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid
+        {0x0634f220, 0xddd4, 0x11db, 0xa0fc, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        {0x8631f300, 0x72e2, 0x11df, 0xb57e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // uuid
         EFFECT_CONTROL_API_VERSION,
-        (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST | EFFECT_FLAG_DEVICE_IND
-        | EFFECT_FLAG_VOLUME_CTRL),
+        (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST | EFFECT_FLAG_DEVICE_IND |
+         EFFECT_FLAG_VOLUME_CTRL),
         BASS_BOOST_CUP_LOAD_ARM9E,
         BUNDLE_MEM_USAGE,
         "Dynamic Bass Boost",
@@ -99,8 +107,8 @@
         {0x37cc2c00, 0xdddd, 0x11db, 0x8577, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
         {0x1d4033c0, 0x8557, 0x11df, 0x9f2d, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
         EFFECT_CONTROL_API_VERSION,
-        (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST | EFFECT_FLAG_DEVICE_IND
-        | EFFECT_FLAG_VOLUME_CTRL),
+        (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST | EFFECT_FLAG_DEVICE_IND |
+         EFFECT_FLAG_VOLUME_CTRL),
         VIRTUALIZER_CUP_LOAD_ARM9E,
         BUNDLE_MEM_USAGE,
         "Virtualizer",
@@ -109,8 +117,8 @@
 
 // NXP SW Equalizer UUID
 const effect_descriptor_t gEqualizerDescriptor = {
-        {0x0bed4300, 0xddd6, 0x11db, 0x8f34, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // type
-        {0xce772f20, 0x847d, 0x11df, 0xbb17, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid Eq NXP
+        {0x0bed4300, 0xddd6, 0x11db, 0x8f34, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // type
+        {0xce772f20, 0x847d, 0x11df, 0xbb17, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // uuid Eq NXP
         EFFECT_CONTROL_API_VERSION,
         (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST | EFFECT_FLAG_VOLUME_CTRL),
         EQUALIZER_CUP_LOAD_ARM9E,
@@ -121,8 +129,8 @@
 
 // NXP SW Volume UUID
 const effect_descriptor_t gVolumeDescriptor = {
-        {0x09e8ede0, 0xddde, 0x11db, 0xb4f6, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b }},
-        {0x119341a0, 0x8469, 0x11df, 0x81f9, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b }}, //uuid VOL NXP
+        {0x09e8ede0, 0xddde, 0x11db, 0xb4f6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        {0x119341a0, 0x8469, 0x11df, 0x81f9, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // uuid VOL NXP
         EFFECT_CONTROL_API_VERSION,
         (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST | EFFECT_FLAG_VOLUME_CTRL),
         VOLUME_CUP_LOAD_ARM9E,
@@ -132,77 +140,50 @@
 };
 
 //--- local function prototypes
-void LvmGlobalBundle_init      (void);
-int  LvmBundle_init            (EffectContext *pContext);
-int  LvmEffect_enable          (EffectContext *pContext);
-int  LvmEffect_disable         (EffectContext *pContext);
-void LvmEffect_free            (EffectContext *pContext);
-int  Effect_setConfig          (EffectContext *pContext, effect_config_t *pConfig);
-void Effect_getConfig          (EffectContext *pContext, effect_config_t *pConfig);
-int  BassBoost_setParameter    (EffectContext *pContext,
-                                uint32_t       paramSize,
-                                void          *pParam,
-                                uint32_t       valueSize,
-                                void          *pValue);
-int  BassBoost_getParameter    (EffectContext *pContext,
-                                uint32_t       paramSize,
-                                void          *pParam,
-                                uint32_t      *pValueSize,
-                                void          *pValue);
-int  Virtualizer_setParameter  (EffectContext *pContext,
-                                uint32_t       paramSize,
-                                void          *pParam,
-                                uint32_t       valueSize,
-                                void          *pValue);
-int  Virtualizer_getParameter  (EffectContext *pContext,
-                                uint32_t       paramSize,
-                                void          *pParam,
-                                uint32_t      *pValueSize,
-                                void          *pValue);
-int  Equalizer_setParameter    (EffectContext *pContext,
-                                uint32_t       paramSize,
-                                void          *pParam,
-                                uint32_t       valueSize,
-                                void          *pValue);
-int  Equalizer_getParameter    (EffectContext *pContext,
-                                uint32_t       paramSize,
-                                void          *pParam,
-                                uint32_t      *pValueSize,
-                                void          *pValue);
-int  Volume_setParameter       (EffectContext *pContext,
-                                uint32_t       paramSize,
-                                void          *pParam,
-                                uint32_t       valueSize,
-                                void          *pValue);
-int  Volume_getParameter       (EffectContext *pContext,
-                                uint32_t       paramSize,
-                                void          *pParam,
-                                uint32_t      *pValueSize,
-                                void          *pValue);
-int Effect_setEnabled(EffectContext *pContext, bool enabled);
+void LvmGlobalBundle_init(void);
+int LvmBundle_init(EffectContext* pContext);
+int LvmEffect_enable(EffectContext* pContext);
+int LvmEffect_disable(EffectContext* pContext);
+int Effect_setConfig(EffectContext* pContext, effect_config_t* pConfig);
+void Effect_getConfig(EffectContext* pContext, effect_config_t* pConfig);
+int BassBoost_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                           uint32_t valueSize, void* pValue);
+int BassBoost_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                           uint32_t* pValueSize, void* pValue);
+int Virtualizer_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                             uint32_t valueSize, void* pValue);
+int Virtualizer_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                             uint32_t* pValueSize, void* pValue);
+int Equalizer_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                           uint32_t valueSize, void* pValue);
+int Equalizer_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                           uint32_t* pValueSize, void* pValue);
+int Volume_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                        uint32_t valueSize, void* pValue);
+int Volume_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                        uint32_t* pValueSize, void* pValue);
+int Effect_setEnabled(EffectContext* pContext, bool enabled);
 
 /* Effect Library Interface Implementation */
 
-extern "C" int EffectCreate(const effect_uuid_t *uuid,
-                            int32_t             sessionId,
-                            int32_t             ioId __unused,
-                            effect_handle_t  *pHandle){
+extern "C" int EffectCreate(const effect_uuid_t* uuid, int32_t sessionId,
+                            int32_t /* ioId __unused */, effect_handle_t* pHandle) {
     int ret = 0;
     int sessionNo = -1;
     int i;
-    EffectContext *pContext = NULL;
+    EffectContext* pContext = NULL;
     bool newBundle = false;
-    SessionContext *pSessionContext;
+    SessionContext* pSessionContext;
 
     ALOGV("\n\tEffectCreate start session %d", sessionId);
 
-    if (pHandle == NULL || uuid == NULL){
+    if (pHandle == NULL || uuid == NULL) {
         ALOGV("\tLVM_ERROR : EffectCreate() called with NULL pointer");
         ret = -EINVAL;
         goto exit;
     }
 
-    if(LvmInitFlag == LVM_FALSE){
+    if (LvmInitFlag == LVM_FALSE) {
         LvmInitFlag = LVM_TRUE;
         ALOGV("\tEffectCreate - Initializing all global memory");
         LvmGlobalBundle_init();
@@ -210,7 +191,7 @@
 
     // Find sessionNo: if one already exists for the sessionId use it,
     // otherwise choose the first available empty slot.
-    for(i=0; i<LVM_MAX_SESSIONS; i++){
+    for (i = 0; i < LVM_MAX_SESSIONS; i++) {
         if (SessionIndex[i] == sessionId) {
             sessionNo = i;
             break;
@@ -232,106 +213,102 @@
     pContext = new EffectContext;
 
     // If this is the first create in this session
-    if(GlobalSessionMemory[sessionNo].bBundledEffectsEnabled == LVM_FALSE){
+    if (GlobalSessionMemory[sessionNo].bBundledEffectsEnabled == LVM_FALSE) {
         ALOGV("\tEffectCreate - This is the first effect in current sessionId %d sessionNo %d",
-                sessionId, sessionNo);
+              sessionId, sessionNo);
 
         GlobalSessionMemory[sessionNo].bBundledEffectsEnabled = LVM_TRUE;
-        GlobalSessionMemory[sessionNo].pBundledContext        = new BundledEffectContext;
+        GlobalSessionMemory[sessionNo].pBundledContext = new BundledEffectContext;
         newBundle = true;
 
         pContext->pBundledContext = GlobalSessionMemory[sessionNo].pBundledContext;
-        pContext->pBundledContext->SessionNo                = sessionNo;
-        pContext->pBundledContext->SessionId                = sessionId;
-        pContext->pBundledContext->hInstance                = NULL;
-        pContext->pBundledContext->bVolumeEnabled           = LVM_FALSE;
-        pContext->pBundledContext->bEqualizerEnabled        = LVM_FALSE;
-        pContext->pBundledContext->bBassEnabled             = LVM_FALSE;
-        pContext->pBundledContext->bBassTempDisabled        = LVM_FALSE;
-        pContext->pBundledContext->bVirtualizerEnabled      = LVM_FALSE;
+        pContext->pBundledContext->SessionNo = sessionNo;
+        pContext->pBundledContext->SessionId = sessionId;
+        pContext->pBundledContext->hInstance = NULL;
+        pContext->pBundledContext->bVolumeEnabled = LVM_FALSE;
+        pContext->pBundledContext->bEqualizerEnabled = LVM_FALSE;
+        pContext->pBundledContext->bBassEnabled = LVM_FALSE;
+        pContext->pBundledContext->bBassTempDisabled = LVM_FALSE;
+        pContext->pBundledContext->bVirtualizerEnabled = LVM_FALSE;
         pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
-        pContext->pBundledContext->nOutputDevice            = AUDIO_DEVICE_NONE;
+        pContext->pBundledContext->nOutputDevice = AUDIO_DEVICE_NONE;
         pContext->pBundledContext->nVirtualizerForcedDevice = AUDIO_DEVICE_NONE;
-        pContext->pBundledContext->NumberEffectsEnabled     = 0;
-        pContext->pBundledContext->NumberEffectsCalled      = 0;
-        pContext->pBundledContext->firstVolume              = LVM_TRUE;
-        pContext->pBundledContext->volume                   = 0;
-
+        pContext->pBundledContext->NumberEffectsEnabled = 0;
+        pContext->pBundledContext->NumberEffectsCalled = 0;
+        pContext->pBundledContext->firstVolume = LVM_TRUE;
+        pContext->pBundledContext->volume = 0;
 
         /* Saved strength is used to return the exact strength that was used in the set to the get
          * because we map the original strength range of 0:1000 to 1:15, and this will avoid
          * quantisation like effect when returning
          */
-        pContext->pBundledContext->BassStrengthSaved        = 0;
-        pContext->pBundledContext->VirtStrengthSaved        = 0;
-        pContext->pBundledContext->CurPreset                = PRESET_CUSTOM;
-        pContext->pBundledContext->levelSaved               = 0;
-        pContext->pBundledContext->bMuteEnabled             = LVM_FALSE;
-        pContext->pBundledContext->bStereoPositionEnabled   = LVM_FALSE;
-        pContext->pBundledContext->positionSaved            = 0;
-        pContext->pBundledContext->workBuffer               = NULL;
-        pContext->pBundledContext->frameCount               = -1;
-        pContext->pBundledContext->SamplesToExitCountVirt   = 0;
-        pContext->pBundledContext->SamplesToExitCountBb     = 0;
-        pContext->pBundledContext->SamplesToExitCountEq     = 0;
+        pContext->pBundledContext->BassStrengthSaved = 0;
+        pContext->pBundledContext->VirtStrengthSaved = 0;
+        pContext->pBundledContext->CurPreset = PRESET_CUSTOM;
+        pContext->pBundledContext->levelSaved = 0;
+        pContext->pBundledContext->bMuteEnabled = LVM_FALSE;
+        pContext->pBundledContext->bStereoPositionEnabled = LVM_FALSE;
+        pContext->pBundledContext->positionSaved = 0;
+        pContext->pBundledContext->workBuffer = NULL;
+        pContext->pBundledContext->frameCount = -1;
+        pContext->pBundledContext->SamplesToExitCountVirt = 0;
+        pContext->pBundledContext->SamplesToExitCountBb = 0;
+        pContext->pBundledContext->SamplesToExitCountEq = 0;
         for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
             pContext->pBundledContext->bandGaindB[i] = EQNB_5BandSoftPresets[i];
         }
-        pContext->pBundledContext->effectProcessCalled      = 0;
-        pContext->pBundledContext->effectInDrain            = 0;
+        pContext->pBundledContext->effectProcessCalled = 0;
+        pContext->pBundledContext->effectInDrain = 0;
 
         ALOGV("\tEffectCreate - Calling LvmBundle_init");
         ret = LvmBundle_init(pContext);
 
-        if (ret < 0){
+        if (ret < 0) {
             ALOGV("\tLVM_ERROR : EffectCreate() Bundle init failed");
             goto exit;
         }
-    }
-    else{
+    } else {
         ALOGV("\tEffectCreate - Assigning memory for previously created effect on sessionNo %d",
-                sessionNo);
-        pContext->pBundledContext =
-                GlobalSessionMemory[sessionNo].pBundledContext;
+              sessionNo);
+        pContext->pBundledContext = GlobalSessionMemory[sessionNo].pBundledContext;
     }
     ALOGV("\tEffectCreate - pBundledContext is %p", pContext->pBundledContext);
 
     pSessionContext = &GlobalSessionMemory[pContext->pBundledContext->SessionNo];
 
     // Create each Effect
-    if (memcmp(uuid, &gBassBoostDescriptor.uuid, sizeof(effect_uuid_t)) == 0){
+    if (memcmp(uuid, &gBassBoostDescriptor.uuid, sizeof(effect_uuid_t)) == 0) {
         // Create Bass Boost
         ALOGV("\tEffectCreate - Effect to be created is LVM_BASS_BOOST");
         pSessionContext->bBassInstantiated = LVM_TRUE;
         pContext->pBundledContext->SamplesToExitCountBb = 0;
 
-        pContext->itfe       = &gLvmEffectInterface;
+        pContext->itfe = &gLvmEffectInterface;
         pContext->EffectType = LVM_BASS_BOOST;
-    } else if (memcmp(uuid, &gVirtualizerDescriptor.uuid, sizeof(effect_uuid_t)) == 0){
+    } else if (memcmp(uuid, &gVirtualizerDescriptor.uuid, sizeof(effect_uuid_t)) == 0) {
         // Create Virtualizer
         ALOGV("\tEffectCreate - Effect to be created is LVM_VIRTUALIZER");
-        pSessionContext->bVirtualizerInstantiated=LVM_TRUE;
+        pSessionContext->bVirtualizerInstantiated = LVM_TRUE;
         pContext->pBundledContext->SamplesToExitCountVirt = 0;
 
-        pContext->itfe       = &gLvmEffectInterface;
+        pContext->itfe = &gLvmEffectInterface;
         pContext->EffectType = LVM_VIRTUALIZER;
-    } else if (memcmp(uuid, &gEqualizerDescriptor.uuid, sizeof(effect_uuid_t)) == 0){
+    } else if (memcmp(uuid, &gEqualizerDescriptor.uuid, sizeof(effect_uuid_t)) == 0) {
         // Create Equalizer
         ALOGV("\tEffectCreate - Effect to be created is LVM_EQUALIZER");
         pSessionContext->bEqualizerInstantiated = LVM_TRUE;
         pContext->pBundledContext->SamplesToExitCountEq = 0;
 
-        pContext->itfe       = &gLvmEffectInterface;
+        pContext->itfe = &gLvmEffectInterface;
         pContext->EffectType = LVM_EQUALIZER;
-    } else if (memcmp(uuid, &gVolumeDescriptor.uuid, sizeof(effect_uuid_t)) == 0){
+    } else if (memcmp(uuid, &gVolumeDescriptor.uuid, sizeof(effect_uuid_t)) == 0) {
         // Create Volume
         ALOGV("\tEffectCreate - Effect to be created is LVM_VOLUME");
         pSessionContext->bVolumeInstantiated = LVM_TRUE;
 
-        pContext->itfe       = &gLvmEffectInterface;
+        pContext->itfe = &gLvmEffectInterface;
         pContext->EffectType = LVM_VOLUME;
-    }
-    else{
+    } else {
         ALOGV("\tLVM_ERROR : EffectCreate() invalid UUID");
         ret = -EINVAL;
         goto exit;
@@ -347,66 +324,64 @@
             }
             delete pContext;
         }
-        if (pHandle != NULL)
-          *pHandle = (effect_handle_t)NULL;
+        if (pHandle != NULL) *pHandle = (effect_handle_t)NULL;
     } else {
-      if (pHandle != NULL)
-        *pHandle = (effect_handle_t)pContext;
+        if (pHandle != NULL) *pHandle = (effect_handle_t)pContext;
     }
     ALOGV("\tEffectCreate end..\n\n");
     return ret;
 } /* end EffectCreate */
 
-extern "C" int EffectRelease(effect_handle_t handle){
+extern "C" int EffectRelease(effect_handle_t handle) {
     ALOGV("\n\tEffectRelease start %p", handle);
-    EffectContext * pContext = (EffectContext *)handle;
+    EffectContext* pContext = (EffectContext*)handle;
 
     ALOGV("\tEffectRelease start handle: %p, context %p", handle, pContext->pBundledContext);
-    if (pContext == NULL){
+    if (pContext == NULL) {
         ALOGV("\tLVM_ERROR : EffectRelease called with NULL pointer");
         return -EINVAL;
     }
 
-    SessionContext *pSessionContext = &GlobalSessionMemory[pContext->pBundledContext->SessionNo];
+    SessionContext* pSessionContext = &GlobalSessionMemory[pContext->pBundledContext->SessionNo];
 
     // Clear the instantiated flag for the effect
     // protect agains the case where an effect is un-instantiated without being disabled
 
-    int &effectInDrain = pContext->pBundledContext->effectInDrain;
-    if(pContext->EffectType == LVM_BASS_BOOST) {
+    int& effectInDrain = pContext->pBundledContext->effectInDrain;
+    if (pContext->EffectType == LVM_BASS_BOOST) {
         ALOGV("\tEffectRelease LVM_BASS_BOOST Clearing global intstantiated flag");
         pSessionContext->bBassInstantiated = LVM_FALSE;
-        if(pContext->pBundledContext->SamplesToExitCountBb > 0){
+        if (pContext->pBundledContext->SamplesToExitCountBb > 0) {
             pContext->pBundledContext->NumberEffectsEnabled--;
         }
         pContext->pBundledContext->SamplesToExitCountBb = 0;
-    } else if(pContext->EffectType == LVM_VIRTUALIZER) {
+    } else if (pContext->EffectType == LVM_VIRTUALIZER) {
         ALOGV("\tEffectRelease LVM_VIRTUALIZER Clearing global intstantiated flag");
         pSessionContext->bVirtualizerInstantiated = LVM_FALSE;
-        if(pContext->pBundledContext->SamplesToExitCountVirt > 0){
+        if (pContext->pBundledContext->SamplesToExitCountVirt > 0) {
             pContext->pBundledContext->NumberEffectsEnabled--;
         }
         pContext->pBundledContext->SamplesToExitCountVirt = 0;
-    } else if(pContext->EffectType == LVM_EQUALIZER) {
+    } else if (pContext->EffectType == LVM_EQUALIZER) {
         ALOGV("\tEffectRelease LVM_EQUALIZER Clearing global intstantiated flag");
-        pSessionContext->bEqualizerInstantiated =LVM_FALSE;
-        if(pContext->pBundledContext->SamplesToExitCountEq > 0){
+        pSessionContext->bEqualizerInstantiated = LVM_FALSE;
+        if (pContext->pBundledContext->SamplesToExitCountEq > 0) {
             pContext->pBundledContext->NumberEffectsEnabled--;
         }
         pContext->pBundledContext->SamplesToExitCountEq = 0;
-    } else if(pContext->EffectType == LVM_VOLUME) {
+    } else if (pContext->EffectType == LVM_VOLUME) {
         ALOGV("\tEffectRelease LVM_VOLUME Clearing global intstantiated flag");
         pSessionContext->bVolumeInstantiated = LVM_FALSE;
         // There is no samplesToExitCount for volume so we also use the drain flag to check
         // if we should decrement the effects enabled.
-        if (pContext->pBundledContext->bVolumeEnabled == LVM_TRUE
-                || (effectInDrain & 1 << LVM_VOLUME) != 0) {
+        if (pContext->pBundledContext->bVolumeEnabled == LVM_TRUE ||
+            (effectInDrain & 1 << LVM_VOLUME) != 0) {
             pContext->pBundledContext->NumberEffectsEnabled--;
         }
     } else {
         ALOGV("\tLVM_ERROR : EffectRelease : Unsupported effect\n\n\n\n\n\n\n");
     }
-    effectInDrain &= ~(1 << pContext->EffectType); // no need to drain if released
+    effectInDrain &= ~(1 << pContext->EffectType);  // no need to drain if released
 
     // Disable effect, in this case ignore errors (return codes)
     // if an effect has already been disabled
@@ -414,17 +389,15 @@
 
     // if all effects are no longer instantiaed free the lvm memory and delete BundledEffectContext
     if ((pSessionContext->bBassInstantiated == LVM_FALSE) &&
-            (pSessionContext->bVolumeInstantiated == LVM_FALSE) &&
-            (pSessionContext->bEqualizerInstantiated ==LVM_FALSE) &&
-            (pSessionContext->bVirtualizerInstantiated==LVM_FALSE))
-    {
-
+        (pSessionContext->bVolumeInstantiated == LVM_FALSE) &&
+        (pSessionContext->bEqualizerInstantiated == LVM_FALSE) &&
+        (pSessionContext->bVirtualizerInstantiated == LVM_FALSE)) {
         // Clear the SessionIndex
-        for(int i=0; i<LVM_MAX_SESSIONS; i++){
-            if(SessionIndex[i] == pContext->pBundledContext->SessionId){
+        for (int i = 0; i < LVM_MAX_SESSIONS; i++) {
+            if (SessionIndex[i] == pContext->pBundledContext->SessionId) {
                 SessionIndex[i] = LVM_UNUSED_SESSION;
-                ALOGV("\tEffectRelease: Clearing SessionIndex SessionNo %d for SessionId %d\n",
-                        i, pContext->pBundledContext->SessionId);
+                ALOGV("\tEffectRelease: Clearing SessionIndex SessionNo %d for SessionId %d\n", i,
+                      pContext->pBundledContext->SessionId);
                 break;
             }
         }
@@ -433,7 +406,7 @@
         pSessionContext->bBundledEffectsEnabled = LVM_FALSE;
         pSessionContext->pBundledContext = LVM_NULL;
         ALOGV("\tEffectRelease: Freeing LVM Bundle memory\n");
-        LvmEffect_free(pContext);
+        LVM_DelInstanceHandle(&pContext->pBundledContext->hInstance);
         ALOGV("\tEffectRelease: Deleting LVM Bundle context %p\n", pContext->pBundledContext);
         if (pContext->pBundledContext->workBuffer != NULL) {
             free(pContext->pBundledContext->workBuffer);
@@ -449,11 +422,10 @@
 
 } /* end EffectRelease */
 
-extern "C" int EffectGetDescriptor(const effect_uuid_t *uuid,
-                                   effect_descriptor_t *pDescriptor) {
-    const effect_descriptor_t *desc = NULL;
+extern "C" int EffectGetDescriptor(const effect_uuid_t* uuid, effect_descriptor_t* pDescriptor) {
+    const effect_descriptor_t* desc = NULL;
 
-    if (pDescriptor == NULL || uuid == NULL){
+    if (pDescriptor == NULL || uuid == NULL) {
         ALOGV("EffectGetDescriptor() called with NULL pointer");
         return -EINVAL;
     }
@@ -469,7 +441,7 @@
     }
 
     if (desc == NULL) {
-        return  -EINVAL;
+        return -EINVAL;
     }
 
     *pDescriptor = *desc;
@@ -477,15 +449,15 @@
     return 0;
 } /* end EffectGetDescriptor */
 
-void LvmGlobalBundle_init(){
+void LvmGlobalBundle_init() {
     ALOGV("\tLvmGlobalBundle_init start");
-    for(int i=0; i<LVM_MAX_SESSIONS; i++){
-        GlobalSessionMemory[i].bBundledEffectsEnabled   = LVM_FALSE;
-        GlobalSessionMemory[i].bVolumeInstantiated      = LVM_FALSE;
-        GlobalSessionMemory[i].bEqualizerInstantiated   = LVM_FALSE;
-        GlobalSessionMemory[i].bBassInstantiated        = LVM_FALSE;
+    for (int i = 0; i < LVM_MAX_SESSIONS; i++) {
+        GlobalSessionMemory[i].bBundledEffectsEnabled = LVM_FALSE;
+        GlobalSessionMemory[i].bVolumeInstantiated = LVM_FALSE;
+        GlobalSessionMemory[i].bEqualizerInstantiated = LVM_FALSE;
+        GlobalSessionMemory[i].bBassInstantiated = LVM_FALSE;
         GlobalSessionMemory[i].bVirtualizerInstantiated = LVM_FALSE;
-        GlobalSessionMemory[i].pBundledContext          = LVM_NULL;
+        GlobalSessionMemory[i].pBundledContext = LVM_NULL;
 
         SessionIndex[i] = LVM_UNUSED_SESSION;
     }
@@ -504,203 +476,140 @@
 //
 //----------------------------------------------------------------------------
 
-int LvmBundle_init(EffectContext *pContext){
+int LvmBundle_init(EffectContext* pContext) {
     ALOGV("\tLvmBundle_init start");
 
-    pContext->config.inputCfg.accessMode                    = EFFECT_BUFFER_ACCESS_READ;
-    pContext->config.inputCfg.channels                      = AUDIO_CHANNEL_OUT_STEREO;
-    pContext->config.inputCfg.format                        = EFFECT_BUFFER_FORMAT;
-    pContext->config.inputCfg.samplingRate                  = 44100;
-    pContext->config.inputCfg.bufferProvider.getBuffer      = NULL;
-    pContext->config.inputCfg.bufferProvider.releaseBuffer  = NULL;
-    pContext->config.inputCfg.bufferProvider.cookie         = NULL;
-    pContext->config.inputCfg.mask                          = EFFECT_CONFIG_ALL;
-    pContext->config.outputCfg.accessMode                   = EFFECT_BUFFER_ACCESS_ACCUMULATE;
-    pContext->config.outputCfg.channels                     = AUDIO_CHANNEL_OUT_STEREO;
-    pContext->config.outputCfg.format                       = EFFECT_BUFFER_FORMAT;
-    pContext->config.outputCfg.samplingRate                 = 44100;
-    pContext->config.outputCfg.bufferProvider.getBuffer     = NULL;
+    pContext->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
+    pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    pContext->config.inputCfg.format = EFFECT_BUFFER_FORMAT;
+    pContext->config.inputCfg.samplingRate = 44100;
+    pContext->config.inputCfg.bufferProvider.getBuffer = NULL;
+    pContext->config.inputCfg.bufferProvider.releaseBuffer = NULL;
+    pContext->config.inputCfg.bufferProvider.cookie = NULL;
+    pContext->config.inputCfg.mask = EFFECT_CONFIG_ALL;
+    pContext->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
+    pContext->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    pContext->config.outputCfg.format = EFFECT_BUFFER_FORMAT;
+    pContext->config.outputCfg.samplingRate = 44100;
+    pContext->config.outputCfg.bufferProvider.getBuffer = NULL;
     pContext->config.outputCfg.bufferProvider.releaseBuffer = NULL;
-    pContext->config.outputCfg.bufferProvider.cookie        = NULL;
-    pContext->config.outputCfg.mask                         = EFFECT_CONFIG_ALL;
+    pContext->config.outputCfg.bufferProvider.cookie = NULL;
+    pContext->config.outputCfg.mask = EFFECT_CONFIG_ALL;
 
     CHECK_ARG(pContext != NULL);
 
-    if (pContext->pBundledContext->hInstance != NULL){
+    if (pContext->pBundledContext->hInstance != NULL) {
         ALOGV("\tLvmBundle_init pContext->pBassBoost != NULL "
-                "-> Calling pContext->pBassBoost->free()");
-
-        LvmEffect_free(pContext);
+              "-> Calling pContext->pBassBoost->free()");
+        LVM_DelInstanceHandle(&pContext->pBundledContext->hInstance);
 
         ALOGV("\tLvmBundle_init pContext->pBassBoost != NULL "
-                "-> Called pContext->pBassBoost->free()");
+              "-> Called pContext->pBassBoost->free()");
     }
 
-    LVM_ReturnStatus_en     LvmStatus=LVM_SUCCESS;          /* Function call status */
-    LVM_ControlParams_t     params;                         /* Control Parameters */
-    LVM_InstParams_t        InstParams;                     /* Instance parameters */
-    LVM_EQNB_BandDef_t      BandDefs[MAX_NUM_BANDS];        /* Equaliser band definitions */
-    LVM_HeadroomParams_t    HeadroomParams;                 /* Headroom parameters */
-    LVM_HeadroomBandDef_t   HeadroomBandDef[LVM_HEADROOM_MAX_NBANDS];
-    LVM_MemTab_t            MemTab;                         /* Memory allocation table */
-    bool                    bMallocFailure = LVM_FALSE;
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+    LVM_ControlParams_t params;                  /* Control Parameters */
+    LVM_InstParams_t InstParams;                 /* Instance parameters */
+    LVM_EQNB_BandDef_t BandDefs[MAX_NUM_BANDS];  /* Equaliser band definitions */
+    LVM_HeadroomParams_t HeadroomParams;         /* Headroom parameters */
+    LVM_HeadroomBandDef_t HeadroomBandDef[LVM_HEADROOM_MAX_NBANDS];
 
     /* Set the capabilities */
-    InstParams.BufferMode       = LVM_UNMANAGED_BUFFERS;
-    InstParams.MaxBlockSize     = MAX_CALL_SIZE;
-    InstParams.EQNB_NumBands    = MAX_NUM_BANDS;
-    InstParams.PSA_Included     = LVM_PSA_ON;
+    InstParams.BufferMode = LVM_UNMANAGED_BUFFERS;
+    InstParams.MaxBlockSize = MAX_CALL_SIZE;
+    InstParams.EQNB_NumBands = MAX_NUM_BANDS;
+    InstParams.PSA_Included = LVM_PSA_ON;
 
-    /* Allocate memory, forcing alignment */
-    LvmStatus = LVM_GetMemoryTable(LVM_NULL,
-                                  &MemTab,
-                                  &InstParams);
-
-    LVM_ERROR_CHECK(LvmStatus, "LVM_GetMemoryTable", "LvmBundle_init")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
-
-    ALOGV("\tCreateInstance Succesfully called LVM_GetMemoryTable\n");
-
-    /* Allocate memory */
-    for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
-        if (MemTab.Region[i].Size != 0){
-            MemTab.Region[i].pBaseAddress = malloc(MemTab.Region[i].Size);
-
-            if (MemTab.Region[i].pBaseAddress == LVM_NULL){
-                ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate %" PRIu32
-                        " bytes for region %u\n", MemTab.Region[i].Size, i );
-                bMallocFailure = LVM_TRUE;
-            }else{
-                ALOGV("\tLvmBundle_init CreateInstance allocated %" PRIu32
-                        " bytes for region %u at %p\n",
-                        MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-            }
-        }
-    }
-
-    /* If one or more of the memory regions failed to allocate, free the regions that were
-     * succesfully allocated and return with an error
-     */
-    if(bMallocFailure == LVM_TRUE){
-        for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
-            if (MemTab.Region[i].pBaseAddress == LVM_NULL){
-                ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate %" PRIu32
-                        " bytes for region %u Not freeing\n", MemTab.Region[i].Size, i );
-            }else{
-                ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed: but allocated %" PRIu32
-                     " bytes for region %u at %p- free\n",
-                     MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-                free(MemTab.Region[i].pBaseAddress);
-            }
-        }
-        return -EINVAL;
-    }
-    ALOGV("\tLvmBundle_init CreateInstance Succesfully malloc'd memory\n");
-
-    /* Initialise */
-    pContext->pBundledContext->hInstance = LVM_NULL;
-
-    /* Init sets the instance handle */
-    LvmStatus = LVM_GetInstanceHandle(&pContext->pBundledContext->hInstance,
-                                      &MemTab,
-                                      &InstParams);
+    LvmStatus = LVM_GetInstanceHandle(&pContext->pBundledContext->hInstance, &InstParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetInstanceHandle", "LvmBundle_init")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    ALOGV("\tLvmBundle_init CreateInstance Succesfully called LVM_GetInstanceHandle\n");
+    ALOGV("\tLvmBundle_init CreateInstance Successfully called LVM_GetInstanceHandle\n");
 
     /* Set the initial process parameters */
     /* General parameters */
-    params.OperatingMode          = LVM_MODE_ON;
-    params.SampleRate             = LVM_FS_44100;
-    params.SourceFormat           = LVM_STEREO;
-    params.SpeakerType            = LVM_HEADPHONES;
+    params.OperatingMode = LVM_MODE_ON;
+    params.SampleRate = LVM_FS_44100;
+    params.SourceFormat = LVM_STEREO;
+    params.SpeakerType = LVM_HEADPHONES;
 
     pContext->pBundledContext->SampleRate = LVM_FS_44100;
-#ifdef SUPPORT_MC
     pContext->pBundledContext->ChMask = AUDIO_CHANNEL_OUT_STEREO;
-#endif
 
     /* Concert Sound parameters */
-    params.VirtualizerOperatingMode   = LVM_MODE_OFF;
-    params.VirtualizerType            = LVM_CONCERTSOUND;
-    params.VirtualizerReverbLevel     = 100;
-    params.CS_EffectLevel             = LVM_CS_EFFECT_NONE;
+    params.VirtualizerOperatingMode = LVM_MODE_OFF;
+    params.VirtualizerType = LVM_CONCERTSOUND;
+    params.VirtualizerReverbLevel = 100;
+    params.CS_EffectLevel = LVM_CS_EFFECT_NONE;
 
     /* N-Band Equaliser parameters */
-    params.EQNB_OperatingMode     = LVM_EQNB_OFF;
-    params.EQNB_NBands            = FIVEBAND_NUMBANDS;
-    params.pEQNB_BandDefinition   = &BandDefs[0];
+    params.EQNB_OperatingMode = LVM_EQNB_OFF;
+    params.EQNB_NBands = FIVEBAND_NUMBANDS;
+    params.pEQNB_BandDefinition = &BandDefs[0];
 
-    for (int i=0; i<FIVEBAND_NUMBANDS; i++)
-    {
+    for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
         BandDefs[i].Frequency = EQNB_5BandPresetsFrequencies[i];
-        BandDefs[i].QFactor   = EQNB_5BandPresetsQFactors[i];
-        BandDefs[i].Gain      = EQNB_5BandSoftPresets[i];
+        BandDefs[i].QFactor = EQNB_5BandPresetsQFactors[i];
+        BandDefs[i].Gain = EQNB_5BandSoftPresets[i];
     }
 
     /* Volume Control parameters */
-    params.VC_EffectLevel         = 0;
-    params.VC_Balance             = 0;
+    params.VC_EffectLevel = 0;
+    params.VC_Balance = 0;
 
     /* Treble Enhancement parameters */
-    params.TE_OperatingMode       = LVM_TE_OFF;
-    params.TE_EffectLevel         = 0;
+    params.TE_OperatingMode = LVM_TE_OFF;
+    params.TE_EffectLevel = 0;
 
     /* PSA Control parameters */
-    params.PSA_Enable             = LVM_PSA_OFF;
-    params.PSA_PeakDecayRate      = (LVM_PSA_DecaySpeed_en)0;
+    params.PSA_Enable = LVM_PSA_OFF;
+    params.PSA_PeakDecayRate = (LVM_PSA_DecaySpeed_en)0;
 
     /* Bass Enhancement parameters */
-    params.BE_OperatingMode       = LVM_BE_OFF;
-    params.BE_EffectLevel         = 0;
-    params.BE_CentreFreq          = LVM_BE_CENTRE_90Hz;
-    params.BE_HPF                 = LVM_BE_HPF_ON;
+    params.BE_OperatingMode = LVM_BE_OFF;
+    params.BE_EffectLevel = 0;
+    params.BE_CentreFreq = LVM_BE_CENTRE_90Hz;
+    params.BE_HPF = LVM_BE_HPF_ON;
 
     /* PSA Control parameters */
-    params.PSA_Enable             = LVM_PSA_OFF;
-    params.PSA_PeakDecayRate      = LVM_PSA_SPEED_MEDIUM;
+    params.PSA_Enable = LVM_PSA_OFF;
+    params.PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
 
     /* TE Control parameters */
-    params.TE_OperatingMode       = LVM_TE_OFF;
-    params.TE_EffectLevel         = 0;
+    params.TE_OperatingMode = LVM_TE_OFF;
+    params.TE_EffectLevel = 0;
 
-#ifdef SUPPORT_MC
-    params.NrChannels             =
-        audio_channel_count_from_out_mask(AUDIO_CHANNEL_OUT_STEREO);
-    params.ChMask                 = AUDIO_CHANNEL_OUT_STEREO;
-#endif
+    params.NrChannels = audio_channel_count_from_out_mask(AUDIO_CHANNEL_OUT_STEREO);
+    params.ChMask = AUDIO_CHANNEL_OUT_STEREO;
     /* Activate the initial settings */
-    LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance,
-                                         &params);
+    LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &params);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmBundle_init")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    ALOGV("\tLvmBundle_init CreateInstance Succesfully called LVM_SetControlParameters\n");
+    ALOGV("\tLvmBundle_init CreateInstance Successfully called LVM_SetControlParameters\n");
 
     /* Set the headroom parameters */
-    HeadroomBandDef[0].Limit_Low          = 20;
-    HeadroomBandDef[0].Limit_High         = 4999;
-    HeadroomBandDef[0].Headroom_Offset    = 0;
-    HeadroomBandDef[1].Limit_Low          = 5000;
-    HeadroomBandDef[1].Limit_High         = 24000;
-    HeadroomBandDef[1].Headroom_Offset    = 0;
-    HeadroomParams.pHeadroomDefinition    = &HeadroomBandDef[0];
+    HeadroomBandDef[0].Limit_Low = 20;
+    HeadroomBandDef[0].Limit_High = 4999;
+    HeadroomBandDef[0].Headroom_Offset = 0;
+    HeadroomBandDef[1].Limit_Low = 5000;
+    HeadroomBandDef[1].Limit_High = 24000;
+    HeadroomBandDef[1].Headroom_Offset = 0;
+    HeadroomParams.pHeadroomDefinition = &HeadroomBandDef[0];
     HeadroomParams.Headroom_OperatingMode = LVM_HEADROOM_ON;
-    HeadroomParams.NHeadroomBands         = 2;
+    HeadroomParams.NHeadroomBands = 2;
 
-    LvmStatus = LVM_SetHeadroomParams(pContext->pBundledContext->hInstance,
-                                      &HeadroomParams);
+    LvmStatus = LVM_SetHeadroomParams(pContext->pBundledContext->hInstance, &HeadroomParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_SetHeadroomParams", "LvmBundle_init")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    ALOGV("\tLvmBundle_init CreateInstance Succesfully called LVM_SetHeadroomParams\n");
+    ALOGV("\tLvmBundle_init CreateInstance Successfully called LVM_SetHeadroomParams\n");
     ALOGV("\tLvmBundle_init End");
     return 0;
-}   /* end LvmBundle_init */
+} /* end LvmBundle_init */
 
 //----------------------------------------------------------------------------
 // LvmBundle_process()
@@ -719,25 +628,22 @@
 //  pOut:       pointer to updated stereo 16 bit output data
 //
 //----------------------------------------------------------------------------
-int LvmBundle_process(effect_buffer_t  *pIn,
-                      effect_buffer_t  *pOut,
-                      int              frameCount,
-                      EffectContext    *pContext){
-
-    LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;                /* Function call status */
-    effect_buffer_t         *pOutTmp;
+int LvmBundle_process(effect_buffer_t* pIn, effect_buffer_t* pOut, int frameCount,
+                      EffectContext* pContext) {
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+    effect_buffer_t* pOutTmp;
     const LVM_INT32 NrChannels =
-        audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
+            audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
 
-    if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE){
+    if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE) {
         pOutTmp = pOut;
-    } else if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE){
+    } else if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
         if (pContext->pBundledContext->frameCount != frameCount) {
             if (pContext->pBundledContext->workBuffer != NULL) {
                 free(pContext->pBundledContext->workBuffer);
             }
             pContext->pBundledContext->workBuffer =
-                    (effect_buffer_t *)calloc(frameCount, sizeof(effect_buffer_t) * NrChannels);
+                    (effect_buffer_t*)calloc(frameCount, sizeof(effect_buffer_t) * NrChannels);
             if (pContext->pBundledContext->workBuffer == NULL) {
                 return -ENOMEM;
             }
@@ -749,7 +655,6 @@
         return -EINVAL;
     }
 
-
     /* Process the samples */
     LvmStatus = LVM_Process(pContext->pBundledContext->hInstance, /* Instance handle */
                             pIn,                                  /* Input buffer */
@@ -757,16 +662,15 @@
                             (LVM_UINT16)frameCount,               /* Number of samples to read */
                             0);                                   /* Audio Time */
     LVM_ERROR_CHECK(LvmStatus, "LVM_Process", "LvmBundle_process")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-
-    if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE){
+    if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
         for (int i = 0; i < frameCount * NrChannels; i++) {
             pOut[i] = pOut[i] + pOutTmp[i];
         }
     }
     return 0;
-}    /* end LvmBundle_process */
+} /* end LvmBundle_process */
 
 //----------------------------------------------------------------------------
 // EqualizerUpdateActiveParams()
@@ -779,29 +683,28 @@
 // Outputs:
 //
 //----------------------------------------------------------------------------
-void EqualizerUpdateActiveParams(EffectContext *pContext) {
-    LVM_ControlParams_t     ActiveParams;              /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus=LVM_SUCCESS;     /* Function call status */
+void EqualizerUpdateActiveParams(EffectContext* pContext) {
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "EqualizerUpdateActiveParams")
-    //ALOGV("\tEqualizerUpdateActiveParams Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tEqualizerUpdateActiveParams just Got -> %d\n",
+    // ALOGV("\tEqualizerUpdateActiveParams Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tEqualizerUpdateActiveParams just Got -> %d\n",
     //          ActiveParams.pEQNB_BandDefinition[band].Gain);
 
     for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
-           ActiveParams.pEQNB_BandDefinition[i].Frequency = EQNB_5BandPresetsFrequencies[i];
-           ActiveParams.pEQNB_BandDefinition[i].QFactor   = EQNB_5BandPresetsQFactors[i];
-           ActiveParams.pEQNB_BandDefinition[i].Gain = pContext->pBundledContext->bandGaindB[i];
-       }
+        ActiveParams.pEQNB_BandDefinition[i].Frequency = EQNB_5BandPresetsFrequencies[i];
+        ActiveParams.pEQNB_BandDefinition[i].QFactor = EQNB_5BandPresetsQFactors[i];
+        ActiveParams.pEQNB_BandDefinition[i].Gain = pContext->pBundledContext->bandGaindB[i];
+    }
 
     /* Activate the initial settings */
     LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "EqualizerUpdateActiveParams")
-    //ALOGV("\tEqualizerUpdateActiveParams just Set -> %d\n",
+    // ALOGV("\tEqualizerUpdateActiveParams just Set -> %d\n",
     //          ActiveParams.pEQNB_BandDefinition[band].Gain);
-
 }
 
 //----------------------------------------------------------------------------
@@ -816,19 +719,19 @@
 // Outputs:
 //
 //----------------------------------------------------------------------------
-void LvmEffect_limitLevel(EffectContext *pContext) {
-    LVM_ControlParams_t     ActiveParams;              /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus=LVM_SUCCESS;     /* Function call status */
+void LvmEffect_limitLevel(EffectContext* pContext) {
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "LvmEffect_limitLevel")
-    //ALOGV("\tLvmEffect_limitLevel Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tLvmEffect_limitLevel just Got -> %d\n",
+    // ALOGV("\tLvmEffect_limitLevel Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tLvmEffect_limitLevel just Got -> %d\n",
     //          ActiveParams.pEQNB_BandDefinition[band].Gain);
 
     int gainCorrection = 0;
-    //Count the energy contribution per band for EQ and BassBoost only if they are active.
+    // Count the energy contribution per band for EQ and BassBoost only if they are active.
     float energyContribution = 0;
     float energyCross = 0;
     float energyBassBoost = 0;
@@ -838,88 +741,83 @@
     bool bbEnabled = pContext->pBundledContext->bBassEnabled == LVM_TRUE;
     bool viEnabled = pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE;
 
-    //EQ contribution
+    // EQ contribution
     if (eqEnabled) {
         for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
-            float bandFactor = pContext->pBundledContext->bandGaindB[i]/15.0;
+            float bandFactor = pContext->pBundledContext->bandGaindB[i] / 15.0;
             float bandCoefficient = LimitLevel_bandEnergyCoefficient[i];
             float bandEnergy = bandFactor * bandCoefficient * bandCoefficient;
-            if (bandEnergy > 0)
-                energyContribution += bandEnergy;
+            if (bandEnergy > 0) energyContribution += bandEnergy;
         }
 
-        //cross EQ coefficients
+        // cross EQ coefficients
         float bandFactorSum = 0;
-        for (int i = 0; i < FIVEBAND_NUMBANDS-1; i++) {
-            float bandFactor1 = pContext->pBundledContext->bandGaindB[i]/15.0;
-            float bandFactor2 = pContext->pBundledContext->bandGaindB[i+1]/15.0;
+        for (int i = 0; i < FIVEBAND_NUMBANDS - 1; i++) {
+            float bandFactor1 = pContext->pBundledContext->bandGaindB[i] / 15.0;
+            float bandFactor2 = pContext->pBundledContext->bandGaindB[i + 1] / 15.0;
 
             if (bandFactor1 > 0 && bandFactor2 > 0) {
-                float crossEnergy = bandFactor1 * bandFactor2 *
-                        LimitLevel_bandEnergyCrossCoefficient[i];
+                float crossEnergy =
+                        bandFactor1 * bandFactor2 * LimitLevel_bandEnergyCrossCoefficient[i];
                 bandFactorSum += bandFactor1 * bandFactor2;
 
-                if (crossEnergy > 0)
-                    energyCross += crossEnergy;
+                if (crossEnergy > 0) energyCross += crossEnergy;
             }
         }
         bandFactorSum -= 1.0;
-        if (bandFactorSum > 0)
-          crossCorrection = bandFactorSum * 0.7;
+        if (bandFactorSum > 0) crossCorrection = bandFactorSum * 0.7;
     }
 
-    //BassBoost contribution
+    // BassBoost contribution
     if (bbEnabled) {
-        float boostFactor = (pContext->pBundledContext->BassStrengthSaved)/1000.0;
+        float boostFactor = (pContext->pBundledContext->BassStrengthSaved) / 1000.0;
         float boostCoefficient = LimitLevel_bassBoostEnergyCoefficient;
 
         energyContribution += boostFactor * boostCoefficient * boostCoefficient;
 
         if (eqEnabled) {
             for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
-                float bandFactor = pContext->pBundledContext->bandGaindB[i]/15.0;
+                float bandFactor = pContext->pBundledContext->bandGaindB[i] / 15.0;
                 float bandCrossCoefficient = LimitLevel_bassBoostEnergyCrossCoefficient[i];
-                float bandEnergy = boostFactor * bandFactor *
-                    bandCrossCoefficient;
-                if (bandEnergy > 0)
-                  energyBassBoost += bandEnergy;
+                float bandEnergy = boostFactor * bandFactor * bandCrossCoefficient;
+                if (bandEnergy > 0) energyBassBoost += bandEnergy;
             }
         }
     }
 
-    //Virtualizer contribution
+    // Virtualizer contribution
     if (viEnabled) {
-        energyContribution += LimitLevel_virtualizerContribution *
-                LimitLevel_virtualizerContribution;
+        energyContribution +=
+                LimitLevel_virtualizerContribution * LimitLevel_virtualizerContribution;
     }
 
-    double totalEnergyEstimation = sqrt(energyContribution + energyCross + energyBassBoost) -
-            crossCorrection;
+    double totalEnergyEstimation =
+            sqrt(energyContribution + energyCross + energyBassBoost) - crossCorrection;
     ALOGV(" TOTAL energy estimation: %0.2f dB", totalEnergyEstimation);
 
-    //roundoff
+    // roundoff
     int maxLevelRound = (int)(totalEnergyEstimation + 0.99);
     if (maxLevelRound + pContext->pBundledContext->volume > 0) {
         gainCorrection = maxLevelRound + pContext->pBundledContext->volume;
     }
 
-    ActiveParams.VC_EffectLevel  = pContext->pBundledContext->volume - gainCorrection;
+    ActiveParams.VC_EffectLevel = pContext->pBundledContext->volume - gainCorrection;
     if (ActiveParams.VC_EffectLevel < -96) {
         ActiveParams.VC_EffectLevel = -96;
     }
     ALOGV("\tVol:%d, GainCorrection: %d, Actual vol: %d", pContext->pBundledContext->volume,
-            gainCorrection, ActiveParams.VC_EffectLevel);
+          gainCorrection, ActiveParams.VC_EffectLevel);
 
     /* Activate the initial settings */
     LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmEffect_limitLevel")
 
     ALOGV("LVM_SetControlParameters return:%d", (int)LvmStatus);
-    //ALOGV("\tLvmEffect_limitLevel just Set -> %d\n",
+    // ALOGV("\tLvmEffect_limitLevel just Set -> %d\n",
     //          ActiveParams.pEQNB_BandDefinition[band].Gain);
 
-    //ALOGV("\tLvmEffect_limitLevel just set (-96dB -> 0dB) -> %d\n",ActiveParams.VC_EffectLevel );
-    if (pContext->pBundledContext->firstVolume == LVM_TRUE){
+    // ALOGV("\tLvmEffect_limitLevel just set (-96dB -> 0dB) -> %d\n",ActiveParams.VC_EffectLevel );
+    if (pContext->pBundledContext->firstVolume == LVM_TRUE) {
         LvmStatus = LVM_SetVolumeNoSmoothing(pContext->pBundledContext->hInstance, &ActiveParams);
         LVM_ERROR_CHECK(LvmStatus, "LVM_SetVolumeNoSmoothing", "LvmBundle_process")
         ALOGV("\tLVM_VOLUME: Disabling Smoothing for first volume change to remove spikes/clicks");
@@ -939,42 +837,41 @@
 //
 //----------------------------------------------------------------------------
 
-int LvmEffect_enable(EffectContext *pContext){
-    //ALOGV("\tLvmEffect_enable start");
+int LvmEffect_enable(EffectContext* pContext) {
+    // ALOGV("\tLvmEffect_enable start");
 
-    LVM_ControlParams_t     ActiveParams;                           /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;                /* Function call status */
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
 
     /* Get the current settings */
-    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,
-                                         &ActiveParams);
+    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "LvmEffect_enable")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
-    //ALOGV("\tLvmEffect_enable Succesfully called LVM_GetControlParameters\n");
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+    // ALOGV("\tLvmEffect_enable Successfully called LVM_GetControlParameters\n");
 
-    if(pContext->EffectType == LVM_BASS_BOOST) {
+    if (pContext->EffectType == LVM_BASS_BOOST) {
         ALOGV("\tLvmEffect_enable : Enabling LVM_BASS_BOOST");
-        ActiveParams.BE_OperatingMode       = LVM_BE_ON;
+        ActiveParams.BE_OperatingMode = LVM_BE_ON;
     }
-    if(pContext->EffectType == LVM_VIRTUALIZER) {
+    if (pContext->EffectType == LVM_VIRTUALIZER) {
         ALOGV("\tLvmEffect_enable : Enabling LVM_VIRTUALIZER");
-        ActiveParams.VirtualizerOperatingMode   = LVM_MODE_ON;
+        ActiveParams.VirtualizerOperatingMode = LVM_MODE_ON;
     }
-    if(pContext->EffectType == LVM_EQUALIZER) {
+    if (pContext->EffectType == LVM_EQUALIZER) {
         ALOGV("\tLvmEffect_enable : Enabling LVM_EQUALIZER");
-        ActiveParams.EQNB_OperatingMode     = LVM_EQNB_ON;
+        ActiveParams.EQNB_OperatingMode = LVM_EQNB_ON;
     }
-    if(pContext->EffectType == LVM_VOLUME) {
+    if (pContext->EffectType == LVM_VOLUME) {
         ALOGV("\tLvmEffect_enable : Enabling LVM_VOLUME");
     }
 
     LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmEffect_enable")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    //ALOGV("\tLvmEffect_enable Succesfully called LVM_SetControlParameters\n");
-    //ALOGV("\tLvmEffect_enable end");
+    // ALOGV("\tLvmEffect_enable Successfully called LVM_SetControlParameters\n");
+    // ALOGV("\tLvmEffect_enable end");
     LvmEffect_limitLevel(pContext);
     return 0;
 }
@@ -991,82 +888,45 @@
 //
 //----------------------------------------------------------------------------
 
-int LvmEffect_disable(EffectContext *pContext){
-    //ALOGV("\tLvmEffect_disable start");
+int LvmEffect_disable(EffectContext* pContext) {
+    // ALOGV("\tLvmEffect_disable start");
 
-    LVM_ControlParams_t     ActiveParams;                           /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;                /* Function call status */
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
     /* Get the current settings */
-    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,
-                                         &ActiveParams);
+    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "LvmEffect_disable")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
-    //ALOGV("\tLvmEffect_disable Succesfully called LVM_GetControlParameters\n");
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+    // ALOGV("\tLvmEffect_disable Successfully called LVM_GetControlParameters\n");
 
-    if(pContext->EffectType == LVM_BASS_BOOST) {
+    if (pContext->EffectType == LVM_BASS_BOOST) {
         ALOGV("\tLvmEffect_disable : Disabling LVM_BASS_BOOST");
-        ActiveParams.BE_OperatingMode       = LVM_BE_OFF;
+        ActiveParams.BE_OperatingMode = LVM_BE_OFF;
     }
-    if(pContext->EffectType == LVM_VIRTUALIZER) {
+    if (pContext->EffectType == LVM_VIRTUALIZER) {
         ALOGV("\tLvmEffect_disable : Disabling LVM_VIRTUALIZER");
-        ActiveParams.VirtualizerOperatingMode   = LVM_MODE_OFF;
+        ActiveParams.VirtualizerOperatingMode = LVM_MODE_OFF;
     }
-    if(pContext->EffectType == LVM_EQUALIZER) {
+    if (pContext->EffectType == LVM_EQUALIZER) {
         ALOGV("\tLvmEffect_disable : Disabling LVM_EQUALIZER");
-        ActiveParams.EQNB_OperatingMode     = LVM_EQNB_OFF;
+        ActiveParams.EQNB_OperatingMode = LVM_EQNB_OFF;
     }
-    if(pContext->EffectType == LVM_VOLUME) {
+    if (pContext->EffectType == LVM_VOLUME) {
         ALOGV("\tLvmEffect_disable : Disabling LVM_VOLUME");
     }
 
     LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmEffect_disable")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    //ALOGV("\tLvmEffect_disable Succesfully called LVM_SetControlParameters\n");
-    //ALOGV("\tLvmEffect_disable end");
+    // ALOGV("\tLvmEffect_disable Successfully called LVM_SetControlParameters\n");
+    // ALOGV("\tLvmEffect_disable end");
     LvmEffect_limitLevel(pContext);
     return 0;
 }
 
 //----------------------------------------------------------------------------
-// LvmEffect_free()
-//----------------------------------------------------------------------------
-// Purpose: Free all memory associated with the Bundle.
-//
-// Inputs:
-//  pContext:   effect engine context
-//
-// Outputs:
-//
-//----------------------------------------------------------------------------
-
-void LvmEffect_free(EffectContext *pContext){
-    LVM_ReturnStatus_en     LvmStatus=LVM_SUCCESS;         /* Function call status */
-    LVM_MemTab_t            MemTab;
-
-    /* Free the algorithm memory */
-    LvmStatus = LVM_GetMemoryTable(pContext->pBundledContext->hInstance,
-                                   &MemTab,
-                                   LVM_NULL);
-
-    LVM_ERROR_CHECK(LvmStatus, "LVM_GetMemoryTable", "LvmEffect_free")
-
-    for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
-        if (MemTab.Region[i].Size != 0){
-            if (MemTab.Region[i].pBaseAddress != NULL){
-                free(MemTab.Region[i].pBaseAddress);
-            }else{
-                ALOGV("\tLVM_ERROR : LvmEffect_free - trying to free with NULL pointer %" PRIu32
-                        " bytes for region %u at %p ERROR\n",
-                        MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-            }
-        }
-    }
-}    /* end LvmEffect_free */
-
-//----------------------------------------------------------------------------
 // Effect_setConfig()
 //----------------------------------------------------------------------------
 // Purpose: Set input and output audio configuration.
@@ -1080,9 +940,9 @@
 //
 //----------------------------------------------------------------------------
 
-int Effect_setConfig(EffectContext *pContext, effect_config_t *pConfig){
-    LVM_Fs_en   SampleRate;
-    //ALOGV("\tEffect_setConfig start");
+int Effect_setConfig(EffectContext* pContext, effect_config_t* pConfig) {
+    LVM_Fs_en SampleRate;
+    // ALOGV("\tEffect_setConfig start");
 
     CHECK_ARG(pContext != NULL);
     CHECK_ARG(pConfig != NULL);
@@ -1090,107 +950,64 @@
     CHECK_ARG(pConfig->inputCfg.samplingRate == pConfig->outputCfg.samplingRate);
     CHECK_ARG(pConfig->inputCfg.channels == pConfig->outputCfg.channels);
     CHECK_ARG(pConfig->inputCfg.format == pConfig->outputCfg.format);
-#ifdef SUPPORT_MC
     CHECK_ARG(audio_channel_count_from_out_mask(pConfig->inputCfg.channels) <= LVM_MAX_CHANNELS);
-#else
-    CHECK_ARG(pConfig->inputCfg.channels == AUDIO_CHANNEL_OUT_STEREO);
-#endif
-    CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE
-              || pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
+    CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE ||
+              pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
     CHECK_ARG(pConfig->inputCfg.format == EFFECT_BUFFER_FORMAT);
     pContext->config = *pConfig;
     const LVM_INT16 NrChannels = audio_channel_count_from_out_mask(pConfig->inputCfg.channels);
 
-    switch (pConfig->inputCfg.samplingRate) {
-    case 8000:
-        SampleRate = LVM_FS_8000;
-        pContext->pBundledContext->SamplesPerSecond = 8000 * NrChannels;
-        break;
-    case 16000:
-        SampleRate = LVM_FS_16000;
-        pContext->pBundledContext->SamplesPerSecond = 16000 * NrChannels;
-        break;
-    case 22050:
-        SampleRate = LVM_FS_22050;
-        pContext->pBundledContext->SamplesPerSecond = 22050 * NrChannels;
-        break;
-    case 32000:
-        SampleRate = LVM_FS_32000;
-        pContext->pBundledContext->SamplesPerSecond = 32000 * NrChannels;
-        break;
-    case 44100:
-        SampleRate = LVM_FS_44100;
-        pContext->pBundledContext->SamplesPerSecond = 44100 * NrChannels;
-        break;
-    case 48000:
-        SampleRate = LVM_FS_48000;
-        pContext->pBundledContext->SamplesPerSecond = 48000 * NrChannels;
-        break;
-    case 88200:
-        SampleRate = LVM_FS_88200;
-        pContext->pBundledContext->SamplesPerSecond = 88200 * NrChannels;
-        break;
-    case 96000:
-        SampleRate = LVM_FS_96000;
-        pContext->pBundledContext->SamplesPerSecond = 96000 * NrChannels;
-        break;
-    case 176400:
-        SampleRate = LVM_FS_176400;
-        pContext->pBundledContext->SamplesPerSecond = 176400 * NrChannels;
-        break;
-    case 192000:
-        SampleRate = LVM_FS_192000;
-        pContext->pBundledContext->SamplesPerSecond = 192000 * NrChannels;
-        break;
-    default:
-        ALOGV("\tEffect_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
+    SampleRate = lvmFsForSampleRate(pConfig->inputCfg.samplingRate);
+    if (SampleRate == LVM_FS_INVALID) {
+        ALOGV("Effect_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
         return -EINVAL;
     }
+    pContext->pBundledContext->SamplesPerSecond = pConfig->inputCfg.samplingRate * NrChannels;
 
-#ifdef SUPPORT_MC
     if (pContext->pBundledContext->SampleRate != SampleRate ||
         pContext->pBundledContext->ChMask != pConfig->inputCfg.channels) {
-#else
-    if(pContext->pBundledContext->SampleRate != SampleRate){
-#endif
-
-        LVM_ControlParams_t     ActiveParams;
-        LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;
+        LVM_ControlParams_t ActiveParams;
+        LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS;
 
         ALOGV("\tEffect_setConfig change sampling rate to %d", SampleRate);
 
         /* Get the current settings */
-        LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,
-                                         &ActiveParams);
+        LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
         LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "Effect_setConfig")
-        if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+        if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
         ActiveParams.SampleRate = SampleRate;
 
-#ifdef SUPPORT_MC
         ActiveParams.NrChannels = NrChannels;
         ActiveParams.ChMask = pConfig->inputCfg.channels;
-#endif
+
+        if (NrChannels == 1) {
+            ActiveParams.SourceFormat = LVM_MONO;
+        } else if (NrChannels == 2) {
+            ActiveParams.SourceFormat = LVM_STEREO;
+        } else if (NrChannels > 2 && NrChannels <= LVM_MAX_CHANNELS) {
+            ActiveParams.SourceFormat = LVM_MULTICHANNEL;
+        } else {
+            return -EINVAL;
+        }
 
         LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
         LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "Effect_setConfig")
-        ALOGV("\tEffect_setConfig Succesfully called LVM_SetControlParameters\n");
+        ALOGV("\tEffect_setConfig Successfully called LVM_SetControlParameters\n");
         pContext->pBundledContext->SampleRate = SampleRate;
-#ifdef SUPPORT_MC
         pContext->pBundledContext->ChMask = pConfig->inputCfg.channels;
-#endif
 
         LvmEffect_limitLevel(pContext);
 
-    }else{
-        //ALOGV("\tEffect_setConfig keep sampling rate at %d", SampleRate);
+    } else {
+        // ALOGV("\tEffect_setConfig keep sampling rate at %d", SampleRate);
     }
 
-    //ALOGV("\tEffect_setConfig End....");
+    // ALOGV("\tEffect_setConfig End....");
     return 0;
-}   /* end Effect_setConfig */
+} /* end Effect_setConfig */
 
 //----------------------------------------------------------------------------
 // Effect_getConfig()
@@ -1206,10 +1023,9 @@
 //
 //----------------------------------------------------------------------------
 
-void Effect_getConfig(EffectContext *pContext, effect_config_t *pConfig)
-{
+void Effect_getConfig(EffectContext* pContext, effect_config_t* pConfig) {
     *pConfig = pContext->config;
-}   /* end Effect_getConfig */
+} /* end Effect_getConfig */
 
 //----------------------------------------------------------------------------
 // BassGetStrength()
@@ -1225,32 +1041,31 @@
 //
 //----------------------------------------------------------------------------
 
-uint32_t BassGetStrength(EffectContext *pContext){
-    //ALOGV("\tBassGetStrength() (0-1000) -> %d\n", pContext->pBundledContext->BassStrengthSaved);
+uint32_t BassGetStrength(EffectContext* pContext) {
+    // ALOGV("\tBassGetStrength() (0-1000) -> %d\n", pContext->pBundledContext->BassStrengthSaved);
 
-    LVM_ControlParams_t     ActiveParams;                           /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;                /* Function call status */
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
     /* Get the current settings */
-    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,
-                                         &ActiveParams);
+    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "BassGetStrength")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    //ALOGV("\tBassGetStrength Succesfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tBassGetStrength Successfully returned from LVM_GetControlParameters\n");
 
     /* Check that the strength returned matches the strength that was set earlier */
-    if(ActiveParams.BE_EffectLevel !=
-       (LVM_INT16)((15*pContext->pBundledContext->BassStrengthSaved)/1000)){
+    if (ActiveParams.BE_EffectLevel !=
+        (LVM_INT16)((15 * pContext->pBundledContext->BassStrengthSaved) / 1000)) {
         ALOGV("\tLVM_ERROR : BassGetStrength module strength does not match savedStrength %d %d\n",
-                ActiveParams.BE_EffectLevel, pContext->pBundledContext->BassStrengthSaved);
+              ActiveParams.BE_EffectLevel, pContext->pBundledContext->BassStrengthSaved);
         return -EINVAL;
     }
 
-    //ALOGV("\tBassGetStrength() (0-15)   -> %d\n", ActiveParams.BE_EffectLevel );
-    //ALOGV("\tBassGetStrength() (saved)  -> %d\n", pContext->pBundledContext->BassStrengthSaved );
+    // ALOGV("\tBassGetStrength() (0-15)   -> %d\n", ActiveParams.BE_EffectLevel );
+    // ALOGV("\tBassGetStrength() (saved)  -> %d\n", pContext->pBundledContext->BassStrengthSaved );
     return pContext->pBundledContext->BassStrengthSaved;
-}    /* end BassGetStrength */
+} /* end BassGetStrength */
 
 //----------------------------------------------------------------------------
 // BassSetStrength()
@@ -1264,35 +1079,34 @@
 //
 //----------------------------------------------------------------------------
 
-void BassSetStrength(EffectContext *pContext, uint32_t strength){
-    //ALOGV("\tBassSetStrength(%d)", strength);
+void BassSetStrength(EffectContext* pContext, uint32_t strength) {
+    // ALOGV("\tBassSetStrength(%d)", strength);
 
     pContext->pBundledContext->BassStrengthSaved = (int)strength;
 
-    LVM_ControlParams_t     ActiveParams;              /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus=LVM_SUCCESS;     /* Function call status */
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
 
     /* Get the current settings */
-    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,
-                                         &ActiveParams);
+    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "BassSetStrength")
-    //ALOGV("\tBassSetStrength Succesfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tBassSetStrength Successfully returned from LVM_GetControlParameters\n");
 
     /* Bass Enhancement parameters */
-    ActiveParams.BE_EffectLevel    = (LVM_INT16)((15*strength)/1000);
-    ActiveParams.BE_CentreFreq     = LVM_BE_CENTRE_90Hz;
+    ActiveParams.BE_EffectLevel = (LVM_INT16)((15 * strength) / 1000);
+    ActiveParams.BE_CentreFreq = LVM_BE_CENTRE_90Hz;
 
-    //ALOGV("\tBassSetStrength() (0-15)   -> %d\n", ActiveParams.BE_EffectLevel );
+    // ALOGV("\tBassSetStrength() (0-15)   -> %d\n", ActiveParams.BE_EffectLevel );
 
     /* Activate the initial settings */
     LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "BassSetStrength")
-    //ALOGV("\tBassSetStrength Succesfully called LVM_SetControlParameters\n");
+    // ALOGV("\tBassSetStrength Successfully called LVM_SetControlParameters\n");
 
     LvmEffect_limitLevel(pContext);
-}    /* end BassSetStrength */
+} /* end BassSetStrength */
 
 //----------------------------------------------------------------------------
 // VirtualizerGetStrength()
@@ -1308,21 +1122,23 @@
 //
 //----------------------------------------------------------------------------
 
-uint32_t VirtualizerGetStrength(EffectContext *pContext){
-    //ALOGV("\tVirtualizerGetStrength (0-1000) -> %d\n",pContext->pBundledContext->VirtStrengthSaved);
+uint32_t VirtualizerGetStrength(EffectContext* pContext) {
+    // ALOGV("\tVirtualizerGetStrength (0-1000) ->
+    // %d\n",pContext->pBundledContext->VirtStrengthSaved);
 
-    LVM_ControlParams_t     ActiveParams;                           /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;                /* Function call status */
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
 
     LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VirtualizerGetStrength")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    //ALOGV("\tVirtualizerGetStrength Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tVirtualizerGetStrength() (0-100)   -> %d\n", ActiveParams.VirtualizerReverbLevel*10);
+    // ALOGV("\tVirtualizerGetStrength Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tVirtualizerGetStrength() (0-100)   -> %d\n",
+    // ActiveParams.VirtualizerReverbLevel*10);
     return pContext->pBundledContext->VirtStrengthSaved;
-}    /* end getStrength */
+} /* end getStrength */
 
 //----------------------------------------------------------------------------
 // VirtualizerSetStrength()
@@ -1336,31 +1152,31 @@
 //
 //----------------------------------------------------------------------------
 
-void VirtualizerSetStrength(EffectContext *pContext, uint32_t strength){
-    //ALOGV("\tVirtualizerSetStrength(%d)", strength);
-    LVM_ControlParams_t     ActiveParams;              /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus=LVM_SUCCESS;     /* Function call status */
+void VirtualizerSetStrength(EffectContext* pContext, uint32_t strength) {
+    // ALOGV("\tVirtualizerSetStrength(%d)", strength);
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
 
     pContext->pBundledContext->VirtStrengthSaved = (int)strength;
 
     /* Get the current settings */
-    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,&ActiveParams);
+    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VirtualizerSetStrength")
-    //ALOGV("\tVirtualizerSetStrength Succesfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tVirtualizerSetStrength Successfully returned from LVM_GetControlParameters\n");
 
     /* Virtualizer parameters */
-    ActiveParams.CS_EffectLevel             = (int)((strength*32767)/1000);
+    ActiveParams.CS_EffectLevel = (int)((strength * 32767) / 1000);
 
-    ALOGV("\tVirtualizerSetStrength() (0-1000)   -> %d\n", strength );
-    ALOGV("\tVirtualizerSetStrength() (0- 100)   -> %d\n", ActiveParams.CS_EffectLevel );
+    ALOGV("\tVirtualizerSetStrength() (0-1000)   -> %d\n", strength);
+    ALOGV("\tVirtualizerSetStrength() (0- 100)   -> %d\n", ActiveParams.CS_EffectLevel);
 
     /* Activate the initial settings */
     LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "VirtualizerSetStrength")
-    //ALOGV("\tVirtualizerSetStrength Succesfully called LVM_SetControlParameters\n\n");
+    // ALOGV("\tVirtualizerSetStrength Successfully called LVM_SetControlParameters\n\n");
     LvmEffect_limitLevel(pContext);
-}    /* end setStrength */
+} /* end setStrength */
 
 //----------------------------------------------------------------------------
 // VirtualizerIsDeviceSupported()
@@ -1376,14 +1192,14 @@
 //----------------------------------------------------------------------------
 int VirtualizerIsDeviceSupported(audio_devices_t deviceType) {
     switch (deviceType) {
-    case AUDIO_DEVICE_OUT_WIRED_HEADSET:
-    case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
-    case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
-    case AUDIO_DEVICE_OUT_USB_HEADSET:
-    // case AUDIO_DEVICE_OUT_USB_DEVICE:  // For USB testing of the virtualizer only.
-        return 0;
-    default :
-        return -EINVAL;
+        case AUDIO_DEVICE_OUT_WIRED_HEADSET:
+        case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
+        case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
+        case AUDIO_DEVICE_OUT_USB_HEADSET:
+            // case AUDIO_DEVICE_OUT_USB_DEVICE:  // For USB testing of the virtualizer only.
+            return 0;
+        default:
+            return -EINVAL;
     }
 }
 
@@ -1401,9 +1217,9 @@
 //  0            if the configuration is supported
 //----------------------------------------------------------------------------
 int VirtualizerIsConfigurationSupported(audio_channel_mask_t channelMask,
-        audio_devices_t deviceType) {
+                                        audio_devices_t deviceType) {
     uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);
-    if (channelCount < 1 || channelCount > FCC_2) { // TODO: update to 8 channels when supported.
+    if (channelCount < 1 || channelCount > FCC_2) {  // TODO: update to 8 channels when supported.
         return -EINVAL;
     }
     return VirtualizerIsDeviceSupported(deviceType);
@@ -1423,16 +1239,16 @@
 //  0            if the device is supported and the virtualization mode forced
 //
 //----------------------------------------------------------------------------
-int VirtualizerForceVirtualizationMode(EffectContext *pContext, audio_devices_t forcedDevice) {
+int VirtualizerForceVirtualizationMode(EffectContext* pContext, audio_devices_t forcedDevice) {
     ALOGV("VirtualizerForceVirtualizationMode: forcedDev=0x%x enabled=%d tmpDisabled=%d",
-            forcedDevice, pContext->pBundledContext->bVirtualizerEnabled,
-            pContext->pBundledContext->bVirtualizerTempDisabled);
+          forcedDevice, pContext->pBundledContext->bVirtualizerEnabled,
+          pContext->pBundledContext->bVirtualizerTempDisabled);
     int status = 0;
     bool useVirtualizer = false;
 
     if (VirtualizerIsDeviceSupported(forcedDevice) != 0) {
         if (forcedDevice != AUDIO_DEVICE_NONE) {
-            //forced device is not supported, make it behave as a reset of forced mode
+            // forced device is not supported, make it behave as a reset of forced mode
             forcedDevice = AUDIO_DEVICE_NONE;
             // but return an error
             status = -EINVAL;
@@ -1472,8 +1288,8 @@
     }
 
     ALOGV("\tafter VirtualizerForceVirtualizationMode: enabled=%d tmpDisabled=%d",
-            pContext->pBundledContext->bVirtualizerEnabled,
-            pContext->pBundledContext->bVirtualizerTempDisabled);
+          pContext->pBundledContext->bVirtualizerEnabled,
+          pContext->pBundledContext->bVirtualizerTempDisabled);
 
     return status;
 }
@@ -1499,23 +1315,24 @@
 //
 //----------------------------------------------------------------------------
 void VirtualizerGetSpeakerAngles(audio_channel_mask_t channelMask,
-        audio_devices_t deviceType __unused, int32_t *pSpeakerAngles) {
+                                 audio_devices_t /* deviceType __unused */,
+                                 int32_t* pSpeakerAngles) {
     // the channel count is guaranteed to be 1 or 2
     // the device is guaranteed to be of type headphone
     // this virtualizer is always using 2 virtual speakers at -90 and 90deg of azimuth, 0deg of
     // elevation but the return information is sized for nbChannels * 3, so we have to consider
     // the (false here) case of a single channel, and return only 3 fields.
     if (audio_channel_count_from_out_mask(channelMask) == 1) {
-        *pSpeakerAngles++ = (int32_t) AUDIO_CHANNEL_OUT_MONO; // same as FRONT_LEFT
-        *pSpeakerAngles++ = 0; // azimuth
-        *pSpeakerAngles = 0; // elevation
+        *pSpeakerAngles++ = (int32_t)AUDIO_CHANNEL_OUT_MONO;  // same as FRONT_LEFT
+        *pSpeakerAngles++ = 0;                                // azimuth
+        *pSpeakerAngles = 0;                                  // elevation
     } else {
-        *pSpeakerAngles++ = (int32_t) AUDIO_CHANNEL_OUT_FRONT_LEFT;
-        *pSpeakerAngles++ = -90; // azimuth
-        *pSpeakerAngles++ = 0;   // elevation
-        *pSpeakerAngles++ = (int32_t) AUDIO_CHANNEL_OUT_FRONT_RIGHT;
+        *pSpeakerAngles++ = (int32_t)AUDIO_CHANNEL_OUT_FRONT_LEFT;
+        *pSpeakerAngles++ = -90;  // azimuth
+        *pSpeakerAngles++ = 0;    // elevation
+        *pSpeakerAngles++ = (int32_t)AUDIO_CHANNEL_OUT_FRONT_RIGHT;
         *pSpeakerAngles++ = 90;  // azimuth
-        *pSpeakerAngles   = 0;   // elevation
+        *pSpeakerAngles = 0;     // elevation
     }
 }
 
@@ -1529,10 +1346,10 @@
 //   AUDIO_DEVICE_NONE if the effect is not virtualizing
 //   or the device type if the effect is virtualizing
 //----------------------------------------------------------------------------
-audio_devices_t VirtualizerGetVirtualizationMode(EffectContext *pContext) {
+audio_devices_t VirtualizerGetVirtualizationMode(EffectContext* pContext) {
     audio_devices_t virtDevice = AUDIO_DEVICE_NONE;
-    if ((pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE)
-            && (pContext->pBundledContext->bVirtualizerTempDisabled == LVM_FALSE)) {
+    if ((pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) &&
+        (pContext->pBundledContext->bVirtualizerTempDisabled == LVM_FALSE)) {
         if (pContext->pBundledContext->nVirtualizerForcedDevice != AUDIO_DEVICE_NONE) {
             // virtualization mode is forced, return that device
             virtDevice = pContext->pBundledContext->nVirtualizerForcedDevice;
@@ -1557,8 +1374,8 @@
 // Outputs:
 //
 //----------------------------------------------------------------------------
-int32_t EqualizerGetBandLevel(EffectContext *pContext, int32_t band){
-    //ALOGV("\tEqualizerGetBandLevel -> %d\n", pContext->pBundledContext->bandGaindB[band] );
+int32_t EqualizerGetBandLevel(EffectContext* pContext, int32_t band) {
+    // ALOGV("\tEqualizerGetBandLevel -> %d\n", pContext->pBundledContext->bandGaindB[band] );
     return pContext->pBundledContext->bandGaindB[band] * 100;
 }
 
@@ -1576,14 +1393,14 @@
 // Outputs:
 //
 //---------------------------------------------------------------------------
-void EqualizerSetBandLevel(EffectContext *pContext, int band, short Gain){
+void EqualizerSetBandLevel(EffectContext* pContext, int band, short Gain) {
     int gainRounded;
-    if(Gain > 0){
-        gainRounded = (int)((Gain+50)/100);
-    }else{
-        gainRounded = (int)((Gain-50)/100);
+    if (Gain > 0) {
+        gainRounded = (int)((Gain + 50) / 100);
+    } else {
+        gainRounded = (int)((Gain - 50) / 100);
     }
-    //ALOGV("\tEqualizerSetBandLevel(%d)->(%d)", Gain, gainRounded);
+    // ALOGV("\tEqualizerSetBandLevel(%d)->(%d)", Gain, gainRounded);
     pContext->pBundledContext->bandGaindB[band] = gainRounded;
     pContext->pBundledContext->CurPreset = PRESET_CUSTOM;
 
@@ -1603,23 +1420,22 @@
 // Outputs:
 //
 //----------------------------------------------------------------------------
-int32_t EqualizerGetCentreFrequency(EffectContext *pContext, int32_t band){
-    int32_t Frequency =0;
+int32_t EqualizerGetCentreFrequency(EffectContext* pContext, int32_t band) {
+    int32_t Frequency = 0;
 
-    LVM_ControlParams_t     ActiveParams;                           /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;                /* Function call status */
-    LVM_EQNB_BandDef_t      *BandDef;
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+    LVM_EQNB_BandDef_t* BandDef;
     /* Get the current settings */
-    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance,
-                                         &ActiveParams);
+    LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "EqualizerGetCentreFrequency")
 
-    BandDef   = ActiveParams.pEQNB_BandDefinition;
-    Frequency = (int32_t)BandDef[band].Frequency*1000;     // Convert to millibels
+    BandDef = ActiveParams.pEQNB_BandDefinition;
+    Frequency = (int32_t)BandDef[band].Frequency * 1000;  // Convert to millibels
 
-    //ALOGV("\tEqualizerGetCentreFrequency -> %d\n", Frequency );
-    //ALOGV("\tEqualizerGetCentreFrequency Succesfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tEqualizerGetCentreFrequency -> %d\n", Frequency );
+    // ALOGV("\tEqualizerGetCentreFrequency Successfully returned from LVM_GetControlParameters\n");
     return Frequency;
 }
 
@@ -1641,10 +1457,10 @@
 //  pLow:       lower band range
 //  pLow:       upper band range
 //----------------------------------------------------------------------------
-int32_t EqualizerGetBandFreqRange(EffectContext *pContext __unused, int32_t band, uint32_t *pLow,
-                                  uint32_t *pHi){
+int32_t EqualizerGetBandFreqRange(EffectContext* /* pContext __unused */, int32_t band,
+                                  uint32_t* pLow, uint32_t* pHi) {
     *pLow = bandFreqRange[band][0];
-    *pHi  = bandFreqRange[band][1];
+    *pHi = bandFreqRange[band][1];
     return 0;
 }
 
@@ -1665,16 +1481,16 @@
 //  pLow:       lower band range
 //  pLow:       upper band range
 //----------------------------------------------------------------------------
-int32_t EqualizerGetBand(EffectContext *pContext __unused, uint32_t targetFreq){
+int32_t EqualizerGetBand(EffectContext* /* pContext __unused */, uint32_t targetFreq) {
     int band = 0;
 
-    if(targetFreq < bandFreqRange[0][0]){
+    if (targetFreq < bandFreqRange[0][0]) {
         return -EINVAL;
-    }else if(targetFreq == bandFreqRange[0][0]){
+    } else if (targetFreq == bandFreqRange[0][0]) {
         return 0;
     }
-    for(int i=0; i<FIVEBAND_NUMBANDS;i++){
-        if((targetFreq > bandFreqRange[i][0])&&(targetFreq <= bandFreqRange[i][1])){
+    for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+        if ((targetFreq > bandFreqRange[i][0]) && (targetFreq <= bandFreqRange[i][1])) {
             band = i;
         }
     }
@@ -1694,7 +1510,7 @@
 //  pContext:    effect engine context
 //
 //----------------------------------------------------------------------------
-int32_t EqualizerGetPreset(EffectContext *pContext){
+int32_t EqualizerGetPreset(EffectContext* pContext) {
     return pContext->pBundledContext->CurPreset;
 }
 
@@ -1711,14 +1527,12 @@
 //  preset       The preset ID.
 //
 //----------------------------------------------------------------------------
-void EqualizerSetPreset(EffectContext *pContext, int preset){
-
-    //ALOGV("\tEqualizerSetPreset(%d)", preset);
+void EqualizerSetPreset(EffectContext* pContext, int preset) {
+    // ALOGV("\tEqualizerSetPreset(%d)", preset);
     pContext->pBundledContext->CurPreset = preset;
 
-    //ActiveParams.pEQNB_BandDefinition = &BandDefs[0];
-    for (int i=0; i<FIVEBAND_NUMBANDS; i++)
-    {
+    // ActiveParams.pEQNB_BandDefinition = &BandDefs[0];
+    for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
         pContext->pBundledContext->bandGaindB[i] =
                 EQNB_5BandSoftPresets[i + preset * FIVEBAND_NUMBANDS];
     }
@@ -1726,11 +1540,11 @@
     EqualizerUpdateActiveParams(pContext);
     LvmEffect_limitLevel(pContext);
 
-    //ALOGV("\tEqualizerSetPreset Succesfully called LVM_SetControlParameters\n");
+    // ALOGV("\tEqualizerSetPreset Successfully called LVM_SetControlParameters\n");
     return;
 }
 
-int32_t EqualizerGetNumPresets(){
+int32_t EqualizerGetNumPresets() {
     return sizeof(gEqualizerPresets) / sizeof(PresetConfig);
 }
 
@@ -1745,14 +1559,14 @@
 // preset       The preset ID. Must be less than number of presets.
 //
 //-------------------------------------------------------------------------
-const char * EqualizerGetPresetName(int32_t preset){
-    //ALOGV("\tEqualizerGetPresetName start(%d)", preset);
+const char* EqualizerGetPresetName(int32_t preset) {
+    // ALOGV("\tEqualizerGetPresetName start(%d)", preset);
     if (preset == PRESET_CUSTOM) {
         return "Custom";
     } else {
         return gEqualizerPresets[preset].name;
     }
-    //ALOGV("\tEqualizerGetPresetName end(%d)", preset);
+    // ALOGV("\tEqualizerGetPresetName end(%d)", preset);
     return 0;
 }
 
@@ -1767,8 +1581,7 @@
 //
 //----------------------------------------------------------------------------
 
-int VolumeSetVolumeLevel(EffectContext *pContext, int16_t level){
-
+int VolumeSetVolumeLevel(EffectContext* pContext, int16_t level) {
     if (level > 0 || level < -9600) {
         return -EINVAL;
     }
@@ -1782,7 +1595,7 @@
     LvmEffect_limitLevel(pContext);
 
     return 0;
-}    /* end VolumeSetVolumeLevel */
+} /* end VolumeSetVolumeLevel */
 
 //----------------------------------------------------------------------------
 // VolumeGetVolumeLevel()
@@ -1794,15 +1607,14 @@
 //
 //----------------------------------------------------------------------------
 
-int VolumeGetVolumeLevel(EffectContext *pContext, int16_t *level){
-
+int VolumeGetVolumeLevel(EffectContext* pContext, int16_t* level) {
     if (pContext->pBundledContext->bMuteEnabled == LVM_TRUE) {
         *level = pContext->pBundledContext->levelSaved * 100;
     } else {
         *level = pContext->pBundledContext->volume * 100;
     }
     return 0;
-}    /* end VolumeGetVolumeLevel */
+} /* end VolumeGetVolumeLevel */
 
 //----------------------------------------------------------------------------
 // VolumeSetMute()
@@ -1815,23 +1627,23 @@
 //
 //----------------------------------------------------------------------------
 
-int32_t VolumeSetMute(EffectContext *pContext, uint32_t mute){
-    //ALOGV("\tVolumeSetMute start(%d)", mute);
+int32_t VolumeSetMute(EffectContext* pContext, uint32_t mute) {
+    // ALOGV("\tVolumeSetMute start(%d)", mute);
 
     pContext->pBundledContext->bMuteEnabled = mute;
 
     /* Set appropriate volume level */
-    if(pContext->pBundledContext->bMuteEnabled == LVM_TRUE){
+    if (pContext->pBundledContext->bMuteEnabled == LVM_TRUE) {
         pContext->pBundledContext->levelSaved = pContext->pBundledContext->volume;
         pContext->pBundledContext->volume = -96;
-    }else{
+    } else {
         pContext->pBundledContext->volume = pContext->pBundledContext->levelSaved;
     }
 
     LvmEffect_limitLevel(pContext);
 
     return 0;
-}    /* end setMute */
+} /* end setMute */
 
 //----------------------------------------------------------------------------
 // VolumeGetMute()
@@ -1845,26 +1657,25 @@
 //  mute:       enable/disable flag
 //----------------------------------------------------------------------------
 
-int32_t VolumeGetMute(EffectContext *pContext, uint32_t *mute){
-    //ALOGV("\tVolumeGetMute start");
-    if((pContext->pBundledContext->bMuteEnabled == LVM_FALSE)||
-       (pContext->pBundledContext->bMuteEnabled == LVM_TRUE)){
+int32_t VolumeGetMute(EffectContext* pContext, uint32_t* mute) {
+    // ALOGV("\tVolumeGetMute start");
+    if ((pContext->pBundledContext->bMuteEnabled == LVM_FALSE) ||
+        (pContext->pBundledContext->bMuteEnabled == LVM_TRUE)) {
         *mute = pContext->pBundledContext->bMuteEnabled;
         return 0;
-    }else{
+    } else {
         ALOGV("\tLVM_ERROR : VolumeGetMute read an invalid value from context %d",
               pContext->pBundledContext->bMuteEnabled);
         return -EINVAL;
     }
-    //ALOGV("\tVolumeGetMute end");
-}    /* end getMute */
+    // ALOGV("\tVolumeGetMute end");
+} /* end getMute */
 
-int16_t VolumeConvertStereoPosition(int16_t position){
+int16_t VolumeConvertStereoPosition(int16_t position) {
     int16_t convertedPosition = 0;
 
-    convertedPosition = (int16_t)(((float)position/1000)*96);
+    convertedPosition = (int16_t)(((float)position / 1000) * 96);
     return convertedPosition;
-
 }
 
 //----------------------------------------------------------------------------
@@ -1879,55 +1690,55 @@
 // Outputs:
 //----------------------------------------------------------------------------
 
-int VolumeSetStereoPosition(EffectContext *pContext, int16_t position){
-
-    LVM_ControlParams_t     ActiveParams;              /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus=LVM_SUCCESS;     /* Function call status */
-    LVM_INT16               Balance = 0;
+int VolumeSetStereoPosition(EffectContext* pContext, int16_t position) {
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+    LVM_INT16 Balance = 0;
 
     pContext->pBundledContext->positionSaved = position;
     Balance = VolumeConvertStereoPosition(pContext->pBundledContext->positionSaved);
 
-    //ALOGV("\tVolumeSetStereoPosition start pContext->pBundledContext->positionSaved = %d",
-    //pContext->pBundledContext->positionSaved);
+    // ALOGV("\tVolumeSetStereoPosition start pContext->pBundledContext->positionSaved = %d",
+    // pContext->pBundledContext->positionSaved);
 
-    if(pContext->pBundledContext->bStereoPositionEnabled == LVM_TRUE){
-
-        //ALOGV("\tVolumeSetStereoPosition Position to be set is %d %d\n", position, Balance);
+    if (pContext->pBundledContext->bStereoPositionEnabled == LVM_TRUE) {
+        // ALOGV("\tVolumeSetStereoPosition Position to be set is %d %d\n", position, Balance);
         pContext->pBundledContext->positionSaved = position;
         /* Get the current settings */
         LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
         LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VolumeSetStereoPosition")
-        if(LvmStatus != LVM_SUCCESS) return -EINVAL;
-        //ALOGV("\tVolumeSetStereoPosition Succesfully returned from LVM_GetControlParameters got:"
+        if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+        // ALOGV("\tVolumeSetStereoPosition Successfully returned from LVM_GetControlParameters
+        // got:"
         //     " %d\n", ActiveParams.VC_Balance);
 
         /* Volume parameters */
-        ActiveParams.VC_Balance  = Balance;
-        //ALOGV("\tVolumeSetStereoPosition() (-96dB -> +96dB)   -> %d\n", ActiveParams.VC_Balance );
+        ActiveParams.VC_Balance = Balance;
+        // ALOGV("\tVolumeSetStereoPosition() (-96dB -> +96dB)   -> %d\n", ActiveParams.VC_Balance
+        // );
 
         /* Activate the initial settings */
         LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
         LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "VolumeSetStereoPosition")
-        if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+        if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-        //ALOGV("\tVolumeSetStereoPosition Succesfully called LVM_SetControlParameters\n");
+        // ALOGV("\tVolumeSetStereoPosition Successfully called LVM_SetControlParameters\n");
 
         /* Get the current settings */
         LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
         LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VolumeSetStereoPosition")
-        if(LvmStatus != LVM_SUCCESS) return -EINVAL;
-        //ALOGV("\tVolumeSetStereoPosition Succesfully returned from LVM_GetControlParameters got: "
+        if (LvmStatus != LVM_SUCCESS) return -EINVAL;
+        // ALOGV("\tVolumeSetStereoPosition Successfully returned from LVM_GetControlParameters got:
+        // "
         //     "%d\n", ActiveParams.VC_Balance);
+    } else {
+        // ALOGV("\tVolumeSetStereoPosition Position attempting to set, but not enabled %d %d\n",
+        // position, Balance);
     }
-    else{
-        //ALOGV("\tVolumeSetStereoPosition Position attempting to set, but not enabled %d %d\n",
-        //position, Balance);
-    }
-    //ALOGV("\tVolumeSetStereoPosition end pContext->pBundledContext->positionSaved = %d\n",
-    //pContext->pBundledContext->positionSaved);
+    // ALOGV("\tVolumeSetStereoPosition end pContext->pBundledContext->positionSaved = %d\n",
+    // pContext->pBundledContext->positionSaved);
     return 0;
-}    /* end VolumeSetStereoPosition */
+} /* end VolumeSetStereoPosition */
 
 //----------------------------------------------------------------------------
 // VolumeGetStereoPosition()
@@ -1941,35 +1752,35 @@
 //  position:       stereo position
 //----------------------------------------------------------------------------
 
-int32_t VolumeGetStereoPosition(EffectContext *pContext, int16_t *position){
-    //ALOGV("\tVolumeGetStereoPosition start");
+int32_t VolumeGetStereoPosition(EffectContext* pContext, int16_t* position) {
+    // ALOGV("\tVolumeGetStereoPosition start");
 
-    LVM_ControlParams_t     ActiveParams;                           /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus = LVM_SUCCESS;                /* Function call status */
-    LVM_INT16               balance;
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
+    LVM_INT16 balance;
 
-    //ALOGV("\tVolumeGetStereoPosition start pContext->pBundledContext->positionSaved = %d",
-    //pContext->pBundledContext->positionSaved);
+    // ALOGV("\tVolumeGetStereoPosition start pContext->pBundledContext->positionSaved = %d",
+    // pContext->pBundledContext->positionSaved);
 
     LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VolumeGetStereoPosition")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    //ALOGV("\tVolumeGetStereoPosition -> %d\n", ActiveParams.VC_Balance);
-    //ALOGV("\tVolumeGetStereoPosition Succesfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tVolumeGetStereoPosition -> %d\n", ActiveParams.VC_Balance);
+    // ALOGV("\tVolumeGetStereoPosition Successfully returned from LVM_GetControlParameters\n");
 
     balance = VolumeConvertStereoPosition(pContext->pBundledContext->positionSaved);
 
-    if(pContext->pBundledContext->bStereoPositionEnabled == LVM_TRUE){
-        if(balance != ActiveParams.VC_Balance){
+    if (pContext->pBundledContext->bStereoPositionEnabled == LVM_TRUE) {
+        if (balance != ActiveParams.VC_Balance) {
             return -EINVAL;
         }
     }
-    *position = (LVM_INT16)pContext->pBundledContext->positionSaved;     // Convert dB to millibels
-    //ALOGV("\tVolumeGetStereoPosition end returning pContext->pBundledContext->positionSaved =%d\n",
-    //pContext->pBundledContext->positionSaved);
+    *position = (LVM_INT16)pContext->pBundledContext->positionSaved;  // Convert dB to millibels
+    // ALOGV("\tVolumeGetStereoPosition end returning pContext->pBundledContext->positionSaved
+    // =%d\n", pContext->pBundledContext->positionSaved);
     return 0;
-}    /* end VolumeGetStereoPosition */
+} /* end VolumeGetStereoPosition */
 
 //----------------------------------------------------------------------------
 // VolumeEnableStereoPosition()
@@ -1982,40 +1793,40 @@
 //
 //----------------------------------------------------------------------------
 
-int32_t VolumeEnableStereoPosition(EffectContext *pContext, uint32_t enabled){
-    //ALOGV("\tVolumeEnableStereoPosition start()");
+int32_t VolumeEnableStereoPosition(EffectContext* pContext, uint32_t enabled) {
+    // ALOGV("\tVolumeEnableStereoPosition start()");
 
     pContext->pBundledContext->bStereoPositionEnabled = enabled;
 
-    LVM_ControlParams_t     ActiveParams;              /* Current control Parameters */
-    LVM_ReturnStatus_en     LvmStatus=LVM_SUCCESS;     /* Function call status */
+    LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+    LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VolumeEnableStereoPosition")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    //ALOGV("\tVolumeEnableStereoPosition Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tVolumeEnableStereoPosition to %d, position was %d\n",
+    // ALOGV("\tVolumeEnableStereoPosition Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tVolumeEnableStereoPosition to %d, position was %d\n",
     //     enabled, ActiveParams.VC_Balance );
 
     /* Set appropriate stereo position */
-    if(pContext->pBundledContext->bStereoPositionEnabled == LVM_FALSE){
+    if (pContext->pBundledContext->bStereoPositionEnabled == LVM_FALSE) {
         ActiveParams.VC_Balance = 0;
-    }else{
-        ActiveParams.VC_Balance  =
-                            VolumeConvertStereoPosition(pContext->pBundledContext->positionSaved);
+    } else {
+        ActiveParams.VC_Balance =
+                VolumeConvertStereoPosition(pContext->pBundledContext->positionSaved);
     }
 
     /* Activate the initial settings */
     LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "VolumeEnableStereoPosition")
-    if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
-    //ALOGV("\tVolumeEnableStereoPosition Succesfully called LVM_SetControlParameters\n");
-    //ALOGV("\tVolumeEnableStereoPosition end()\n");
+    // ALOGV("\tVolumeEnableStereoPosition Successfully called LVM_SetControlParameters\n");
+    // ALOGV("\tVolumeEnableStereoPosition end()\n");
     return 0;
-}    /* end VolumeEnableStereoPosition */
+} /* end VolumeEnableStereoPosition */
 
 //----------------------------------------------------------------------------
 // BassBoost_getParameter()
@@ -2038,13 +1849,10 @@
 //
 //----------------------------------------------------------------------------
 
-int BassBoost_getParameter(EffectContext *pContext,
-                           uint32_t       paramSize,
-                           void          *pParam,
-                           uint32_t      *pValueSize,
-                           void          *pValue) {
+int BassBoost_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                           uint32_t* pValueSize, void* pValue) {
     int status = 0;
-    int32_t *params = (int32_t *)pParam;
+    int32_t* params = (int32_t*)pParam;
 
     ALOGVV("%s start", __func__);
 
@@ -2055,28 +1863,27 @@
     switch (params[0]) {
         case BASSBOOST_PARAM_STRENGTH_SUPPORTED:
             if (*pValueSize != sizeof(uint32_t)) {  // legacy: check equality here.
-                ALOGV("%s BASSBOOST_PARAM_STRENGTH_SUPPORTED invalid *pValueSize %u",
-                        __func__, *pValueSize);
+                ALOGV("%s BASSBOOST_PARAM_STRENGTH_SUPPORTED invalid *pValueSize %u", __func__,
+                      *pValueSize);
                 status = -EINVAL;
                 break;
             }
             // no need to set *pValueSize
 
-            *(uint32_t *)pValue = 1;
-            ALOGVV("%s BASSBOOST_PARAM_STRENGTH_SUPPORTED %u", __func__, *(uint32_t *)pValue);
+            *(uint32_t*)pValue = 1;
+            ALOGVV("%s BASSBOOST_PARAM_STRENGTH_SUPPORTED %u", __func__, *(uint32_t*)pValue);
             break;
 
         case BASSBOOST_PARAM_STRENGTH:
             if (*pValueSize != sizeof(int16_t)) {  // legacy: check equality here.
-                ALOGV("%s BASSBOOST_PARAM_STRENGTH invalid *pValueSize %u",
-                        __func__, *pValueSize);
+                ALOGV("%s BASSBOOST_PARAM_STRENGTH invalid *pValueSize %u", __func__, *pValueSize);
                 status = -EINVAL;
                 break;
             }
             // no need to set *pValueSize
 
-            *(int16_t *)pValue = BassGetStrength(pContext);
-            ALOGVV("%s BASSBOOST_PARAM_STRENGTH %d", __func__, *(int16_t *)pValue);
+            *(int16_t*)pValue = BassGetStrength(pContext);
+            ALOGVV("%s BASSBOOST_PARAM_STRENGTH %d", __func__, *(int16_t*)pValue);
             break;
 
         default:
@@ -2104,13 +1911,10 @@
 //
 //----------------------------------------------------------------------------
 
-int BassBoost_setParameter(EffectContext *pContext,
-                           uint32_t       paramSize,
-                           void          *pParam,
-                           uint32_t       valueSize,
-                           void          *pValue) {
+int BassBoost_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                           uint32_t valueSize, void* pValue) {
     int status = 0;
-    int32_t *params = (int32_t *)pParam;
+    int32_t* params = (int32_t*)pParam;
 
     ALOGVV("%s start", __func__);
 
@@ -2126,7 +1930,7 @@
                 break;
             }
 
-            const int16_t strength = *(int16_t *)pValue;
+            const int16_t strength = *(int16_t*)pValue;
             ALOGVV("%s BASSBOOST_PARAM_STRENGTH %d", __func__, strength);
             ALOGVV("%s BASSBOOST_PARAM_STRENGTH Calling BassSetStrength", __func__);
             BassSetStrength(pContext, (int32_t)strength);
@@ -2164,13 +1968,10 @@
 //
 //----------------------------------------------------------------------------
 
-int Virtualizer_getParameter(EffectContext *pContext,
-                             uint32_t       paramSize,
-                             void          *pParam,
-                             uint32_t      *pValueSize,
-                             void          *pValue) {
+int Virtualizer_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                             uint32_t* pValueSize, void* pValue) {
     int status = 0;
-    int32_t *params = (int32_t *)pParam;
+    int32_t* params = (int32_t*)pParam;
 
     ALOGVV("%s start", __func__);
 
@@ -2180,47 +1981,47 @@
     }
     switch (params[0]) {
         case VIRTUALIZER_PARAM_STRENGTH_SUPPORTED:
-            if (*pValueSize != sizeof(uint32_t)) { // legacy: check equality here.
-                ALOGV("%s VIRTUALIZER_PARAM_STRENGTH_SUPPORTED invalid *pValueSize %u",
-                        __func__, *pValueSize);
+            if (*pValueSize != sizeof(uint32_t)) {  // legacy: check equality here.
+                ALOGV("%s VIRTUALIZER_PARAM_STRENGTH_SUPPORTED invalid *pValueSize %u", __func__,
+                      *pValueSize);
                 status = -EINVAL;
                 break;
             }
             // no need to set *pValueSize
 
-            *(uint32_t *)pValue = 1;
-            ALOGVV("%s VIRTUALIZER_PARAM_STRENGTH_SUPPORTED %d", __func__, *(uint32_t *)pValue);
+            *(uint32_t*)pValue = 1;
+            ALOGVV("%s VIRTUALIZER_PARAM_STRENGTH_SUPPORTED %d", __func__, *(uint32_t*)pValue);
             break;
 
         case VIRTUALIZER_PARAM_STRENGTH:
-            if (*pValueSize != sizeof(int16_t)) { // legacy: check equality here.
-                ALOGV("%s VIRTUALIZER_PARAM_STRENGTH invalid *pValueSize %u",
-                        __func__, *pValueSize);
+            if (*pValueSize != sizeof(int16_t)) {  // legacy: check equality here.
+                ALOGV("%s VIRTUALIZER_PARAM_STRENGTH invalid *pValueSize %u", __func__,
+                      *pValueSize);
                 status = -EINVAL;
                 break;
             }
             // no need to set *pValueSize
 
-            *(int16_t *)pValue = VirtualizerGetStrength(pContext);
+            *(int16_t*)pValue = VirtualizerGetStrength(pContext);
 
-            ALOGVV("%s VIRTUALIZER_PARAM_STRENGTH %d", __func__, *(int16_t *)pValue);
+            ALOGVV("%s VIRTUALIZER_PARAM_STRENGTH %d", __func__, *(int16_t*)pValue);
             break;
 
         case VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES: {
             if (paramSize < 3 * sizeof(int32_t)) {
-                ALOGV("%s VIRTUALIZER_PARAM_SPEAKER_ANGLES invalid paramSize: %u",
-                        __func__, paramSize);
+                ALOGV("%s VIRTUALIZER_PARAM_SPEAKER_ANGLES invalid paramSize: %u", __func__,
+                      paramSize);
                 status = -EINVAL;
                 break;
             }
 
-            const audio_channel_mask_t channelMask = (audio_channel_mask_t) params[1];
-            const audio_devices_t deviceType = (audio_devices_t) params[2];
+            const audio_channel_mask_t channelMask = (audio_channel_mask_t)params[1];
+            const audio_devices_t deviceType = (audio_devices_t)params[2];
             const uint32_t nbChannels = audio_channel_count_from_out_mask(channelMask);
             const uint32_t valueSizeRequired = 3 * nbChannels * sizeof(int32_t);
             if (*pValueSize < valueSizeRequired) {
-                ALOGV("%s VIRTUALIZER_PARAM_SPEAKER_ANGLES invalid *pValueSize %u",
-                        __func__, *pValueSize);
+                ALOGV("%s VIRTUALIZER_PARAM_SPEAKER_ANGLES invalid *pValueSize %u", __func__,
+                      *pValueSize);
                 status = -EINVAL;
                 break;
             }
@@ -2229,23 +2030,23 @@
             // verify the configuration is supported
             status = VirtualizerIsConfigurationSupported(channelMask, deviceType);
             if (status == 0) {
-                ALOGV("%s VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES mask=0x%x device=0x%x",
-                        __func__, channelMask, deviceType);
+                ALOGV("%s VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES mask=0x%x device=0x%x", __func__,
+                      channelMask, deviceType);
                 // configuration is supported, get the angles
-                VirtualizerGetSpeakerAngles(channelMask, deviceType, (int32_t *)pValue);
+                VirtualizerGetSpeakerAngles(channelMask, deviceType, (int32_t*)pValue);
             }
         } break;
 
         case VIRTUALIZER_PARAM_VIRTUALIZATION_MODE:
-            if (*pValueSize != sizeof(uint32_t)) { // legacy: check equality here.
-                ALOGV("%s VIRTUALIZER_PARAM_VIRTUALIZATION_MODE invalid *pValueSize %u",
-                        __func__, *pValueSize);
+            if (*pValueSize != sizeof(uint32_t)) {  // legacy: check equality here.
+                ALOGV("%s VIRTUALIZER_PARAM_VIRTUALIZATION_MODE invalid *pValueSize %u", __func__,
+                      *pValueSize);
                 status = -EINVAL;
                 break;
             }
             // no need to set *pValueSize
 
-            *(uint32_t *)pValue = (uint32_t) VirtualizerGetVirtualizationMode(pContext);
+            *(uint32_t*)pValue = (uint32_t)VirtualizerGetVirtualizationMode(pContext);
             break;
 
         default:
@@ -2273,17 +2074,14 @@
 //
 //----------------------------------------------------------------------------
 
-int Virtualizer_setParameter(EffectContext *pContext,
-                             uint32_t       paramSize,
-                             void          *pParam,
-                             uint32_t       valueSize,
-                             void          *pValue) {
+int Virtualizer_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                             uint32_t valueSize, void* pValue) {
     int status = 0;
-    int32_t *params = (int32_t *)pParam;
+    int32_t* params = (int32_t*)pParam;
 
     ALOGVV("%s start", __func__);
 
-    if (paramSize != sizeof(int32_t)) { // legacy: check equality here.
+    if (paramSize != sizeof(int32_t)) {  // legacy: check equality here.
         ALOGV("%s invalid paramSize: %u", __func__, paramSize);
         return -EINVAL;
     }
@@ -2295,7 +2093,7 @@
                 break;
             }
 
-            const int16_t strength = *(int16_t *)pValue;
+            const int16_t strength = *(int16_t*)pValue;
             ALOGVV("%s VIRTUALIZER_PARAM_STRENGTH %d", __func__, strength);
             ALOGVV("%s VIRTUALIZER_PARAM_STRENGTH Calling VirtualizerSetStrength", __func__);
             VirtualizerSetStrength(pContext, (int32_t)strength);
@@ -2305,16 +2103,16 @@
         case VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE: {
             if (valueSize < sizeof(int32_t)) {
                 ALOGV("%s VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE invalid valueSize: %u",
-                        __func__, valueSize);
+                      __func__, valueSize);
                 android_errorWriteLog(0x534e4554, "64478003");
                 status = -EINVAL;
                 break;
             }
 
-            const audio_devices_t deviceType = (audio_devices_t)*(int32_t *)pValue;
+            const audio_devices_t deviceType = (audio_devices_t) * (int32_t*)pValue;
             status = VirtualizerForceVirtualizationMode(pContext, deviceType);
-            ALOGVV("%s VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE device=%#x result=%d",
-                    __func__, deviceType, status);
+            ALOGVV("%s VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE device=%#x result=%d", __func__,
+                   deviceType, status);
         } break;
 
         default:
@@ -2347,13 +2145,10 @@
 // Side Effects:
 //
 //----------------------------------------------------------------------------
-int Equalizer_getParameter(EffectContext *pContext,
-                           uint32_t       paramSize,
-                           void          *pParam,
-                           uint32_t      *pValueSize,
-                           void          *pValue) {
+int Equalizer_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                           uint32_t* pValueSize, void* pValue) {
     int status = 0;
-    int32_t *params = (int32_t *)pParam;
+    int32_t* params = (int32_t*)pParam;
 
     ALOGVV("%s start", __func__);
 
@@ -2362,211 +2157,210 @@
         return -EINVAL;
     }
     switch (params[0]) {
-    case EQ_PARAM_NUM_BANDS:
-        if (*pValueSize < sizeof(uint16_t)) {
-            ALOGV("%s EQ_PARAM_NUM_BANDS invalid *pValueSize %u", __func__, *pValueSize);
-            status = -EINVAL;
-            break;
-        }
-        *pValueSize = sizeof(uint16_t);
-
-        *(uint16_t *)pValue = (uint16_t)FIVEBAND_NUMBANDS;
-        ALOGVV("%s EQ_PARAM_NUM_BANDS %u", __func__, *(uint16_t *)pValue);
-        break;
-
-    case EQ_PARAM_CUR_PRESET:
-        if (*pValueSize < sizeof(uint16_t)) {
-            ALOGV("%s EQ_PARAM_CUR_PRESET invalid *pValueSize %u", __func__, *pValueSize);
-            status = -EINVAL;
-            break;
-        }
-        *pValueSize = sizeof(uint16_t);
-
-        *(uint16_t *)pValue = (uint16_t)EqualizerGetPreset(pContext);
-        ALOGVV("%s EQ_PARAM_CUR_PRESET %u", __func__, *(uint16_t *)pValue);
-        break;
-
-    case EQ_PARAM_GET_NUM_OF_PRESETS:
-        if (*pValueSize < sizeof(uint16_t)) {
-            ALOGV("%s EQ_PARAM_GET_NUM_OF_PRESETS invalid *pValueSize %u", __func__, *pValueSize);
-            status = -EINVAL;
-            break;
-        }
-        *pValueSize = sizeof(uint16_t);
-
-        *(uint16_t *)pValue = (uint16_t)EqualizerGetNumPresets();
-        ALOGVV("%s EQ_PARAM_GET_NUM_OF_PRESETS %u", __func__, *(uint16_t *)pValue);
-        break;
-
-    case EQ_PARAM_GET_BAND: {
-        if (paramSize < 2 * sizeof(int32_t)) {
-            ALOGV("%s EQ_PARAM_GET_BAND invalid paramSize: %u", __func__, paramSize);
-            status = -EINVAL;
-            break;
-        }
-        if (*pValueSize < sizeof(uint16_t)) {
-            ALOGV("%s EQ_PARAM_GET_BAND invalid *pValueSize %u", __func__, *pValueSize);
-            status = -EINVAL;
-            break;
-        }
-        *pValueSize = sizeof(uint16_t);
-
-        const int32_t frequency = params[1];
-        *(uint16_t *)pValue = (uint16_t)EqualizerGetBand(pContext, frequency);
-        ALOGVV("%s EQ_PARAM_GET_BAND frequency %d, band %u",
-                __func__, frequency, *(uint16_t *)pValue);
-    } break;
-
-    case EQ_PARAM_BAND_LEVEL: {
-        if (paramSize < 2 * sizeof(int32_t)) {
-            ALOGV("%s EQ_PARAM_BAND_LEVEL invalid paramSize %u", __func__, paramSize);
-            status = -EINVAL;
-            break;
-        }
-        if (*pValueSize < sizeof(int16_t)) {
-            ALOGV("%s EQ_PARAM_BAND_LEVEL invalid *pValueSize %u", __func__, *pValueSize);
-            status = -EINVAL;
-            break;
-        }
-        *pValueSize = sizeof(int16_t);
-
-        const int32_t band = params[1];
-        if (band < 0 || band >= FIVEBAND_NUMBANDS) {
-            if (band < 0) {
-                android_errorWriteLog(0x534e4554, "32438598");
-                ALOGW("%s EQ_PARAM_BAND_LEVEL invalid band %d", __func__, band);
+        case EQ_PARAM_NUM_BANDS:
+            if (*pValueSize < sizeof(uint16_t)) {
+                ALOGV("%s EQ_PARAM_NUM_BANDS invalid *pValueSize %u", __func__, *pValueSize);
+                status = -EINVAL;
+                break;
             }
-            status = -EINVAL;
-            break;
-        }
-        *(int16_t *)pValue = (int16_t)EqualizerGetBandLevel(pContext, band);
-        ALOGVV("%s EQ_PARAM_BAND_LEVEL band %d, level %d",
-                __func__, band, *(int16_t *)pValue);
-    } break;
+            *pValueSize = sizeof(uint16_t);
 
-    case EQ_PARAM_LEVEL_RANGE:
-        if (*pValueSize < 2 * sizeof(int16_t)) {
-            ALOGV("%s EQ_PARAM_LEVEL_RANGE invalid *pValueSize %u", __func__, *pValueSize);
-            status = -EINVAL;
+            *(uint16_t*)pValue = (uint16_t)FIVEBAND_NUMBANDS;
+            ALOGVV("%s EQ_PARAM_NUM_BANDS %u", __func__, *(uint16_t*)pValue);
             break;
-        }
-        *pValueSize = 2 * sizeof(int16_t);
 
-        *(int16_t *)pValue = -1500;
-        *((int16_t *)pValue + 1) = 1500;
-        ALOGVV("%s EQ_PARAM_LEVEL_RANGE min %d, max %d",
-                __func__, *(int16_t *)pValue, *((int16_t *)pValue + 1));
-        break;
-
-    case EQ_PARAM_BAND_FREQ_RANGE: {
-        if (paramSize < 2 * sizeof(int32_t)) {
-            ALOGV("%s EQ_PARAM_BAND_FREQ_RANGE invalid paramSize: %u", __func__, paramSize);
-            status = -EINVAL;
-            break;
-        }
-        if (*pValueSize < 2 * sizeof(int32_t)) {
-            ALOGV("%s EQ_PARAM_BAND_FREQ_RANGE invalid *pValueSize %u", __func__, *pValueSize);
-            status = -EINVAL;
-            break;
-        }
-        *pValueSize = 2 * sizeof(int32_t);
-
-        const int32_t band = params[1];
-        if (band < 0 || band >= FIVEBAND_NUMBANDS) {
-            if (band < 0) {
-                android_errorWriteLog(0x534e4554, "32247948");
-                ALOGW("%s EQ_PARAM_BAND_FREQ_RANGE invalid band %d",
-                        __func__, band);
+        case EQ_PARAM_CUR_PRESET:
+            if (*pValueSize < sizeof(uint16_t)) {
+                ALOGV("%s EQ_PARAM_CUR_PRESET invalid *pValueSize %u", __func__, *pValueSize);
+                status = -EINVAL;
+                break;
             }
-            status = -EINVAL;
-            break;
-        }
-        EqualizerGetBandFreqRange(pContext, band, (uint32_t *)pValue, ((uint32_t *)pValue + 1));
-        ALOGVV("%s EQ_PARAM_BAND_FREQ_RANGE band %d, min %d, max %d",
-                __func__, band, *(int32_t *)pValue, *((int32_t *)pValue + 1));
+            *pValueSize = sizeof(uint16_t);
 
-    } break;
-
-    case EQ_PARAM_CENTER_FREQ: {
-        if (paramSize < 2 * sizeof(int32_t)) {
-            ALOGV("%s EQ_PARAM_CENTER_FREQ invalid paramSize: %u", __func__, paramSize);
-            status = -EINVAL;
+            *(uint16_t*)pValue = (uint16_t)EqualizerGetPreset(pContext);
+            ALOGVV("%s EQ_PARAM_CUR_PRESET %u", __func__, *(uint16_t*)pValue);
             break;
-        }
-        if (*pValueSize < sizeof(int32_t)) {
-            ALOGV("%s EQ_PARAM_CENTER_FREQ invalid *pValueSize %u", __func__, *pValueSize);
-            status = -EINVAL;
-            break;
-        }
-        *pValueSize = sizeof(int32_t);
 
-        const int32_t band = params[1];
-        if (band < 0 || band >= FIVEBAND_NUMBANDS) {
-            status = -EINVAL;
-            if (band < 0) {
-                android_errorWriteLog(0x534e4554, "32436341");
-                ALOGW("%s EQ_PARAM_CENTER_FREQ invalid band %d", __func__, band);
+        case EQ_PARAM_GET_NUM_OF_PRESETS:
+            if (*pValueSize < sizeof(uint16_t)) {
+                ALOGV("%s EQ_PARAM_GET_NUM_OF_PRESETS invalid *pValueSize %u", __func__,
+                      *pValueSize);
+                status = -EINVAL;
+                break;
             }
-            break;
-        }
-        *(int32_t *)pValue = EqualizerGetCentreFrequency(pContext, band);
-        ALOGVV("%s EQ_PARAM_CENTER_FREQ band %d, frequency %d",
-                __func__, band, *(int32_t *)pValue);
-    } break;
+            *pValueSize = sizeof(uint16_t);
 
-    case EQ_PARAM_GET_PRESET_NAME: {
-        if (paramSize < 2 * sizeof(int32_t)) {
-            ALOGV("%s EQ_PARAM_PRESET_NAME invalid paramSize: %u", __func__, paramSize);
-            status = -EINVAL;
+            *(uint16_t*)pValue = (uint16_t)EqualizerGetNumPresets();
+            ALOGVV("%s EQ_PARAM_GET_NUM_OF_PRESETS %u", __func__, *(uint16_t*)pValue);
             break;
-        }
-        if (*pValueSize < 1) {
-            android_errorWriteLog(0x534e4554, "37536407");
-            status = -EINVAL;
-            break;
-        }
 
-        const int32_t preset = params[1];
-        if ((preset < 0 && preset != PRESET_CUSTOM) ||  preset >= EqualizerGetNumPresets()) {
-            if (preset < 0) {
-                android_errorWriteLog(0x534e4554, "32448258");
-                ALOGE("%s EQ_PARAM_GET_PRESET_NAME preset %d", __func__, preset);
+        case EQ_PARAM_GET_BAND: {
+            if (paramSize < 2 * sizeof(int32_t)) {
+                ALOGV("%s EQ_PARAM_GET_BAND invalid paramSize: %u", __func__, paramSize);
+                status = -EINVAL;
+                break;
             }
+            if (*pValueSize < sizeof(uint16_t)) {
+                ALOGV("%s EQ_PARAM_GET_BAND invalid *pValueSize %u", __func__, *pValueSize);
+                status = -EINVAL;
+                break;
+            }
+            *pValueSize = sizeof(uint16_t);
+
+            const int32_t frequency = params[1];
+            *(uint16_t*)pValue = (uint16_t)EqualizerGetBand(pContext, frequency);
+            ALOGVV("%s EQ_PARAM_GET_BAND frequency %d, band %u", __func__, frequency,
+                   *(uint16_t*)pValue);
+        } break;
+
+        case EQ_PARAM_BAND_LEVEL: {
+            if (paramSize < 2 * sizeof(int32_t)) {
+                ALOGV("%s EQ_PARAM_BAND_LEVEL invalid paramSize %u", __func__, paramSize);
+                status = -EINVAL;
+                break;
+            }
+            if (*pValueSize < sizeof(int16_t)) {
+                ALOGV("%s EQ_PARAM_BAND_LEVEL invalid *pValueSize %u", __func__, *pValueSize);
+                status = -EINVAL;
+                break;
+            }
+            *pValueSize = sizeof(int16_t);
+
+            const int32_t band = params[1];
+            if (band < 0 || band >= FIVEBAND_NUMBANDS) {
+                if (band < 0) {
+                    android_errorWriteLog(0x534e4554, "32438598");
+                    ALOGW("%s EQ_PARAM_BAND_LEVEL invalid band %d", __func__, band);
+                }
+                status = -EINVAL;
+                break;
+            }
+            *(int16_t*)pValue = (int16_t)EqualizerGetBandLevel(pContext, band);
+            ALOGVV("%s EQ_PARAM_BAND_LEVEL band %d, level %d", __func__, band, *(int16_t*)pValue);
+        } break;
+
+        case EQ_PARAM_LEVEL_RANGE:
+            if (*pValueSize < 2 * sizeof(int16_t)) {
+                ALOGV("%s EQ_PARAM_LEVEL_RANGE invalid *pValueSize %u", __func__, *pValueSize);
+                status = -EINVAL;
+                break;
+            }
+            *pValueSize = 2 * sizeof(int16_t);
+
+            *(int16_t*)pValue = -1500;
+            *((int16_t*)pValue + 1) = 1500;
+            ALOGVV("%s EQ_PARAM_LEVEL_RANGE min %d, max %d", __func__, *(int16_t*)pValue,
+                   *((int16_t*)pValue + 1));
+            break;
+
+        case EQ_PARAM_BAND_FREQ_RANGE: {
+            if (paramSize < 2 * sizeof(int32_t)) {
+                ALOGV("%s EQ_PARAM_BAND_FREQ_RANGE invalid paramSize: %u", __func__, paramSize);
+                status = -EINVAL;
+                break;
+            }
+            if (*pValueSize < 2 * sizeof(int32_t)) {
+                ALOGV("%s EQ_PARAM_BAND_FREQ_RANGE invalid *pValueSize %u", __func__, *pValueSize);
+                status = -EINVAL;
+                break;
+            }
+            *pValueSize = 2 * sizeof(int32_t);
+
+            const int32_t band = params[1];
+            if (band < 0 || band >= FIVEBAND_NUMBANDS) {
+                if (band < 0) {
+                    android_errorWriteLog(0x534e4554, "32247948");
+                    ALOGW("%s EQ_PARAM_BAND_FREQ_RANGE invalid band %d", __func__, band);
+                }
+                status = -EINVAL;
+                break;
+            }
+            EqualizerGetBandFreqRange(pContext, band, (uint32_t*)pValue, ((uint32_t*)pValue + 1));
+            ALOGVV("%s EQ_PARAM_BAND_FREQ_RANGE band %d, min %d, max %d", __func__, band,
+                   *(int32_t*)pValue, *((int32_t*)pValue + 1));
+
+        } break;
+
+        case EQ_PARAM_CENTER_FREQ: {
+            if (paramSize < 2 * sizeof(int32_t)) {
+                ALOGV("%s EQ_PARAM_CENTER_FREQ invalid paramSize: %u", __func__, paramSize);
+                status = -EINVAL;
+                break;
+            }
+            if (*pValueSize < sizeof(int32_t)) {
+                ALOGV("%s EQ_PARAM_CENTER_FREQ invalid *pValueSize %u", __func__, *pValueSize);
+                status = -EINVAL;
+                break;
+            }
+            *pValueSize = sizeof(int32_t);
+
+            const int32_t band = params[1];
+            if (band < 0 || band >= FIVEBAND_NUMBANDS) {
+                status = -EINVAL;
+                if (band < 0) {
+                    android_errorWriteLog(0x534e4554, "32436341");
+                    ALOGW("%s EQ_PARAM_CENTER_FREQ invalid band %d", __func__, band);
+                }
+                break;
+            }
+            *(int32_t*)pValue = EqualizerGetCentreFrequency(pContext, band);
+            ALOGVV("%s EQ_PARAM_CENTER_FREQ band %d, frequency %d", __func__, band,
+                   *(int32_t*)pValue);
+        } break;
+
+        case EQ_PARAM_GET_PRESET_NAME: {
+            if (paramSize < 2 * sizeof(int32_t)) {
+                ALOGV("%s EQ_PARAM_PRESET_NAME invalid paramSize: %u", __func__, paramSize);
+                status = -EINVAL;
+                break;
+            }
+            if (*pValueSize < 1) {
+                android_errorWriteLog(0x534e4554, "37536407");
+                status = -EINVAL;
+                break;
+            }
+
+            const int32_t preset = params[1];
+            if ((preset < 0 && preset != PRESET_CUSTOM) || preset >= EqualizerGetNumPresets()) {
+                if (preset < 0) {
+                    android_errorWriteLog(0x534e4554, "32448258");
+                    ALOGE("%s EQ_PARAM_GET_PRESET_NAME preset %d", __func__, preset);
+                }
+                status = -EINVAL;
+                break;
+            }
+
+            char* const name = (char*)pValue;
+            strncpy(name, EqualizerGetPresetName(preset), *pValueSize - 1);
+            name[*pValueSize - 1] = 0;
+            *pValueSize = strlen(name) + 1;
+            ALOGVV("%s EQ_PARAM_GET_PRESET_NAME preset %d, name %s len %d", __func__, preset, name,
+                   *pValueSize);
+
+        } break;
+
+        case EQ_PARAM_PROPERTIES: {
+            constexpr uint32_t requiredValueSize = (2 + FIVEBAND_NUMBANDS) * sizeof(uint16_t);
+            if (*pValueSize < requiredValueSize) {
+                ALOGV("%s EQ_PARAM_PROPERTIES invalid *pValueSize %u", __func__, *pValueSize);
+                status = -EINVAL;
+                break;
+            }
+            *pValueSize = requiredValueSize;
+
+            int16_t* p = (int16_t*)pValue;
+            ALOGV("%s EQ_PARAM_PROPERTIES", __func__);
+            p[0] = (int16_t)EqualizerGetPreset(pContext);
+            p[1] = (int16_t)FIVEBAND_NUMBANDS;
+            for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+                p[2 + i] = (int16_t)EqualizerGetBandLevel(pContext, i);
+            }
+        } break;
+
+        default:
+            ALOGV("%s invalid param %d", __func__, params[0]);
             status = -EINVAL;
             break;
-        }
-
-        char * const name = (char *)pValue;
-        strncpy(name, EqualizerGetPresetName(preset), *pValueSize - 1);
-        name[*pValueSize - 1] = 0;
-        *pValueSize = strlen(name) + 1;
-        ALOGVV("%s EQ_PARAM_GET_PRESET_NAME preset %d, name %s len %d",
-               __func__, preset, name, *pValueSize);
-
-    } break;
-
-    case EQ_PARAM_PROPERTIES: {
-        constexpr uint32_t requiredValueSize = (2 + FIVEBAND_NUMBANDS) * sizeof(uint16_t);
-        if (*pValueSize < requiredValueSize) {
-            ALOGV("%s EQ_PARAM_PROPERTIES invalid *pValueSize %u", __func__, *pValueSize);
-            status = -EINVAL;
-            break;
-        }
-        *pValueSize = requiredValueSize;
-
-        int16_t *p = (int16_t *)pValue;
-        ALOGV("%s EQ_PARAM_PROPERTIES", __func__);
-        p[0] = (int16_t)EqualizerGetPreset(pContext);
-        p[1] = (int16_t)FIVEBAND_NUMBANDS;
-        for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
-            p[2 + i] = (int16_t)EqualizerGetBandLevel(pContext, i);
-        }
-    } break;
-
-    default:
-        ALOGV("%s invalid param %d", __func__, params[0]);
-        status = -EINVAL;
-        break;
     }
 
     ALOGVV("%s end param: %d, status: %d", __func__, params[0], status);
@@ -2589,13 +2383,10 @@
 // Outputs:
 //
 //----------------------------------------------------------------------------
-int Equalizer_setParameter(EffectContext *pContext,
-                           uint32_t       paramSize,
-                           void          *pParam,
-                           uint32_t       valueSize,
-                           void          *pValue) {
+int Equalizer_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                           uint32_t valueSize, void* pValue) {
     int status = 0;
-    int32_t *params = (int32_t *)pParam;
+    int32_t* params = (int32_t*)pParam;
 
     ALOGVV("%s start", __func__);
 
@@ -2604,87 +2395,87 @@
         return -EINVAL;
     }
     switch (params[0]) {
-    case EQ_PARAM_CUR_PRESET: {
-        if (valueSize < sizeof(int16_t)) {
-            ALOGV("%s EQ_PARAM_CUR_PRESET invalid valueSize %u", __func__, valueSize);
-            status = -EINVAL;
-            break;
-        }
-        const int32_t preset = (int32_t)*(uint16_t *)pValue;
-
-        ALOGVV("%s EQ_PARAM_CUR_PRESET %d", __func__, preset);
-        if (preset >= EqualizerGetNumPresets() || preset < 0) {
-            ALOGV("%s EQ_PARAM_CUR_PRESET invalid preset %d", __func__, preset);
-            status = -EINVAL;
-            break;
-        }
-        EqualizerSetPreset(pContext, preset);
-    } break;
-
-    case EQ_PARAM_BAND_LEVEL: {
-        if (paramSize < 2 * sizeof(int32_t)) {
-            ALOGV("%s EQ_PARAM_BAND_LEVEL invalid paramSize: %u", __func__, paramSize);
-            status = -EINVAL;
-            break;
-        }
-        if (valueSize < sizeof(int16_t)) {
-            ALOGV("%s EQ_PARAM_BAND_LEVEL invalid valueSize %u", __func__, valueSize);
-            status = -EINVAL;
-            break;
-        }
-        const int32_t band =  params[1];
-        const int32_t level = (int32_t)*(int16_t *)pValue;
-        ALOGVV("%s EQ_PARAM_BAND_LEVEL band %d, level %d", __func__, band, level);
-        if (band < 0 || band >= FIVEBAND_NUMBANDS) {
-            if (band < 0) {
-                android_errorWriteLog(0x534e4554, "32095626");
-                ALOGE("%s EQ_PARAM_BAND_LEVEL invalid band %d", __func__, band);
-            }
-            status = -EINVAL;
-            break;
-        }
-        EqualizerSetBandLevel(pContext, band, level);
-    } break;
-
-    case EQ_PARAM_PROPERTIES: {
-        ALOGVV("%s EQ_PARAM_PROPERTIES", __func__);
-        if (valueSize < sizeof(int16_t)) {
-            ALOGV("%s EQ_PARAM_PROPERTIES invalid valueSize %u", __func__, valueSize);
-            status = -EINVAL;
-            break;
-        }
-        int16_t *p = (int16_t *)pValue;
-        if ((int)p[0] >= EqualizerGetNumPresets()) {
-            ALOGV("%s EQ_PARAM_PROPERTIES invalid preset %d", __func__, (int)p[0]);
-            status = -EINVAL;
-            break;
-        }
-        if (p[0] >= 0) {
-            EqualizerSetPreset(pContext, (int)p[0]);
-        } else {
-            constexpr uint32_t valueSizeRequired = (2 + FIVEBAND_NUMBANDS) * sizeof(int16_t);
-            if (valueSize < valueSizeRequired) {
-              android_errorWriteLog(0x534e4554, "37563371");
-              ALOGE("%s EQ_PARAM_PROPERTIES invalid valueSize %u < %u",
-                      __func__, valueSize, valueSizeRequired);
-              status = -EINVAL;
-              break;
-            }
-            if ((int)p[1] != FIVEBAND_NUMBANDS) {
-                ALOGV("%s EQ_PARAM_PROPERTIES invalid bands %d", __func__, (int)p[1]);
+        case EQ_PARAM_CUR_PRESET: {
+            if (valueSize < sizeof(int16_t)) {
+                ALOGV("%s EQ_PARAM_CUR_PRESET invalid valueSize %u", __func__, valueSize);
                 status = -EINVAL;
                 break;
             }
-            for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
-                EqualizerSetBandLevel(pContext, i, (int)p[2 + i]);
-            }
-        }
-    } break;
+            const int32_t preset = (int32_t) * (uint16_t*)pValue;
 
-    default:
-        ALOGV("%s invalid param %d", __func__, params[0]);
-        status = -EINVAL;
-        break;
+            ALOGVV("%s EQ_PARAM_CUR_PRESET %d", __func__, preset);
+            if (preset >= EqualizerGetNumPresets() || preset < 0) {
+                ALOGV("%s EQ_PARAM_CUR_PRESET invalid preset %d", __func__, preset);
+                status = -EINVAL;
+                break;
+            }
+            EqualizerSetPreset(pContext, preset);
+        } break;
+
+        case EQ_PARAM_BAND_LEVEL: {
+            if (paramSize < 2 * sizeof(int32_t)) {
+                ALOGV("%s EQ_PARAM_BAND_LEVEL invalid paramSize: %u", __func__, paramSize);
+                status = -EINVAL;
+                break;
+            }
+            if (valueSize < sizeof(int16_t)) {
+                ALOGV("%s EQ_PARAM_BAND_LEVEL invalid valueSize %u", __func__, valueSize);
+                status = -EINVAL;
+                break;
+            }
+            const int32_t band = params[1];
+            const int32_t level = (int32_t) * (int16_t*)pValue;
+            ALOGVV("%s EQ_PARAM_BAND_LEVEL band %d, level %d", __func__, band, level);
+            if (band < 0 || band >= FIVEBAND_NUMBANDS) {
+                if (band < 0) {
+                    android_errorWriteLog(0x534e4554, "32095626");
+                    ALOGE("%s EQ_PARAM_BAND_LEVEL invalid band %d", __func__, band);
+                }
+                status = -EINVAL;
+                break;
+            }
+            EqualizerSetBandLevel(pContext, band, level);
+        } break;
+
+        case EQ_PARAM_PROPERTIES: {
+            ALOGVV("%s EQ_PARAM_PROPERTIES", __func__);
+            if (valueSize < sizeof(int16_t)) {
+                ALOGV("%s EQ_PARAM_PROPERTIES invalid valueSize %u", __func__, valueSize);
+                status = -EINVAL;
+                break;
+            }
+            int16_t* p = (int16_t*)pValue;
+            if ((int)p[0] >= EqualizerGetNumPresets()) {
+                ALOGV("%s EQ_PARAM_PROPERTIES invalid preset %d", __func__, (int)p[0]);
+                status = -EINVAL;
+                break;
+            }
+            if (p[0] >= 0) {
+                EqualizerSetPreset(pContext, (int)p[0]);
+            } else {
+                constexpr uint32_t valueSizeRequired = (2 + FIVEBAND_NUMBANDS) * sizeof(int16_t);
+                if (valueSize < valueSizeRequired) {
+                    android_errorWriteLog(0x534e4554, "37563371");
+                    ALOGE("%s EQ_PARAM_PROPERTIES invalid valueSize %u < %u", __func__, valueSize,
+                          valueSizeRequired);
+                    status = -EINVAL;
+                    break;
+                }
+                if ((int)p[1] != FIVEBAND_NUMBANDS) {
+                    ALOGV("%s EQ_PARAM_PROPERTIES invalid bands %d", __func__, (int)p[1]);
+                    status = -EINVAL;
+                    break;
+                }
+                for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+                    EqualizerSetBandLevel(pContext, i, (int)p[2 + i]);
+                }
+            }
+        } break;
+
+        default:
+            ALOGV("%s invalid param %d", __func__, params[0]);
+            status = -EINVAL;
+            break;
     }
 
     ALOGVV("%s end param: %d, status: %d", __func__, params[0], status);
@@ -2712,13 +2503,10 @@
 //
 //----------------------------------------------------------------------------
 
-int Volume_getParameter(EffectContext *pContext,
-                        uint32_t       paramSize,
-                        void          *pParam,
-                        uint32_t      *pValueSize,
-                        void          *pValue) {
+int Volume_getParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                        uint32_t* pValueSize, void* pValue) {
     int status = 0;
-    int32_t *params = (int32_t *)pParam;
+    int32_t* params = (int32_t*)pParam;
 
     ALOGVV("%s start", __func__);
 
@@ -2728,19 +2516,19 @@
     }
     switch (params[0]) {
         case VOLUME_PARAM_LEVEL:
-            if (*pValueSize != sizeof(int16_t)) { // legacy: check equality here.
+            if (*pValueSize != sizeof(int16_t)) {  // legacy: check equality here.
                 ALOGV("%s VOLUME_PARAM_LEVEL invalid *pValueSize %u", __func__, *pValueSize);
                 status = -EINVAL;
                 break;
             }
             // no need to set *pValueSize
 
-            status = VolumeGetVolumeLevel(pContext, (int16_t *)(pValue));
-            ALOGVV("%s VOLUME_PARAM_LEVEL %d", __func__, *(int16_t *)pValue);
+            status = VolumeGetVolumeLevel(pContext, (int16_t*)(pValue));
+            ALOGVV("%s VOLUME_PARAM_LEVEL %d", __func__, *(int16_t*)pValue);
             break;
 
         case VOLUME_PARAM_MAXLEVEL:
-            if (*pValueSize != sizeof(int16_t)) { // legacy: check equality here.
+            if (*pValueSize != sizeof(int16_t)) {  // legacy: check equality here.
                 ALOGV("%s VOLUME_PARAM_MAXLEVEL invalid *pValueSize %u", __func__, *pValueSize);
                 status = -EINVAL;
                 break;
@@ -2748,21 +2536,21 @@
             // no need to set *pValueSize
 
             // in millibel
-            *(int16_t *)pValue = 0;
-            ALOGVV("%s VOLUME_PARAM_MAXLEVEL %d", __func__, *(int16_t *)pValue);
+            *(int16_t*)pValue = 0;
+            ALOGVV("%s VOLUME_PARAM_MAXLEVEL %d", __func__, *(int16_t*)pValue);
             break;
 
         case VOLUME_PARAM_STEREOPOSITION:
-            if (*pValueSize != sizeof(int16_t)) { // legacy: check equality here.
-                ALOGV("%s VOLUME_PARAM_STEREOPOSITION invalid *pValueSize %u",
-                        __func__, *pValueSize);
+            if (*pValueSize != sizeof(int16_t)) {  // legacy: check equality here.
+                ALOGV("%s VOLUME_PARAM_STEREOPOSITION invalid *pValueSize %u", __func__,
+                      *pValueSize);
                 status = -EINVAL;
                 break;
             }
             // no need to set *pValueSize
 
-            VolumeGetStereoPosition(pContext, (int16_t *)pValue);
-            ALOGVV("%s VOLUME_PARAM_STEREOPOSITION %d", __func__, *(int16_t *)pValue);
+            VolumeGetStereoPosition(pContext, (int16_t*)pValue);
+            ALOGVV("%s VOLUME_PARAM_STEREOPOSITION %d", __func__, *(int16_t*)pValue);
             break;
 
         case VOLUME_PARAM_MUTE:
@@ -2773,21 +2561,21 @@
             }
             *pValueSize = sizeof(uint32_t);
 
-            status = VolumeGetMute(pContext, (uint32_t *)pValue);
-            ALOGV("%s VOLUME_PARAM_MUTE %u", __func__, *(uint32_t *)pValue);
+            status = VolumeGetMute(pContext, (uint32_t*)pValue);
+            ALOGV("%s VOLUME_PARAM_MUTE %u", __func__, *(uint32_t*)pValue);
             break;
 
         case VOLUME_PARAM_ENABLESTEREOPOSITION:
             if (*pValueSize < sizeof(int32_t)) {
-                ALOGV("%s VOLUME_PARAM_ENABLESTEREOPOSITION invalid *pValueSize %u",
-                        __func__, *pValueSize);
+                ALOGV("%s VOLUME_PARAM_ENABLESTEREOPOSITION invalid *pValueSize %u", __func__,
+                      *pValueSize);
                 status = -EINVAL;
                 break;
             }
             *pValueSize = sizeof(int32_t);
 
-            *(int32_t *)pValue = pContext->pBundledContext->bStereoPositionEnabled;
-            ALOGVV("%s VOLUME_PARAM_ENABLESTEREOPOSITION %d", __func__, *(int32_t *)pValue);
+            *(int32_t*)pValue = pContext->pBundledContext->bStereoPositionEnabled;
+            ALOGVV("%s VOLUME_PARAM_ENABLESTEREOPOSITION %d", __func__, *(int32_t*)pValue);
 
             break;
 
@@ -2816,13 +2604,10 @@
 //
 //----------------------------------------------------------------------------
 
-int Volume_setParameter(EffectContext *pContext,
-                        uint32_t       paramSize,
-                        void          *pParam,
-                        uint32_t       valueSize,
-                        void          *pValue) {
+int Volume_setParameter(EffectContext* pContext, uint32_t paramSize, void* pParam,
+                        uint32_t valueSize, void* pValue) {
     int status = 0;
-    int32_t *params = (int32_t *)pParam;
+    int32_t* params = (int32_t*)pParam;
 
     ALOGVV("%s start", __func__);
 
@@ -2838,7 +2623,7 @@
                 break;
             }
 
-            const int16_t level = *(int16_t *)pValue;
+            const int16_t level = *(int16_t*)pValue;
             ALOGVV("%s VOLUME_PARAM_LEVEL %d", __func__, level);
             ALOGVV("%s VOLUME_PARAM_LEVEL Calling VolumeSetVolumeLevel", __func__);
             status = VolumeSetVolumeLevel(pContext, level);
@@ -2853,7 +2638,7 @@
                 break;
             }
 
-            const uint32_t mute = *(uint32_t *)pValue;
+            const uint32_t mute = *(uint32_t*)pValue;
             ALOGVV("%s VOLUME_PARAM_MUTE %d", __func__, mute);
             ALOGVV("%s VOLUME_PARAM_MUTE Calling VolumeSetMute", __func__);
             status = VolumeSetMute(pContext, mute);
@@ -2862,15 +2647,16 @@
 
         case VOLUME_PARAM_ENABLESTEREOPOSITION: {
             if (valueSize < sizeof(uint32_t)) {
-                ALOGV("%s VOLUME_PARAM_ENABLESTEREOPOSITION invalid valueSize %u",
-                        __func__, valueSize);
+                ALOGV("%s VOLUME_PARAM_ENABLESTEREOPOSITION invalid valueSize %u", __func__,
+                      valueSize);
                 status = -EINVAL;
                 break;
             }
 
-            const uint32_t positionEnabled = *(uint32_t *)pValue;
+            const uint32_t positionEnabled = *(uint32_t*)pValue;
             status = VolumeEnableStereoPosition(pContext, positionEnabled)
-                    ?: VolumeSetStereoPosition(pContext, pContext->pBundledContext->positionSaved);
+                             ?: VolumeSetStereoPosition(pContext,
+                                                        pContext->pBundledContext->positionSaved);
             ALOGVV("%s VOLUME_PARAM_ENABLESTEREOPOSITION called", __func__);
         } break;
 
@@ -2881,13 +2667,11 @@
                 break;
             }
 
-            const int16_t position = *(int16_t *)pValue;
+            const int16_t position = *(int16_t*)pValue;
             ALOGVV("%s VOLUME_PARAM_STEREOPOSITION %d", __func__, position);
-            ALOGVV("%s VOLUME_PARAM_STEREOPOSITION Calling VolumeSetStereoPosition",
-                    __func__);
+            ALOGVV("%s VOLUME_PARAM_STEREOPOSITION Calling VolumeSetStereoPosition", __func__);
             status = VolumeSetStereoPosition(pContext, position);
-            ALOGVV("%s VOLUME_PARAM_STEREOPOSITION Called VolumeSetStereoPosition",
-                    __func__);
+            ALOGVV("%s VOLUME_PARAM_STEREOPOSITION Called VolumeSetStereoPosition", __func__);
         } break;
 
         default:
@@ -2912,18 +2696,15 @@
  *  Remarks     :
  ****************************************************************************************/
 
-LVM_INT16 LVC_ToDB_s32Tos16(LVM_INT32 Lin_fix)
-{
-    LVM_INT16   db_fix;
-    LVM_INT16   Shift;
-    LVM_INT16   SmallRemainder;
-    LVM_UINT32  Remainder = (LVM_UINT32)Lin_fix;
+LVM_INT16 LVC_ToDB_s32Tos16(LVM_INT32 Lin_fix) {
+    LVM_INT16 db_fix;
+    LVM_INT16 Shift;
+    LVM_INT16 SmallRemainder;
+    LVM_UINT32 Remainder = (LVM_UINT32)Lin_fix;
 
     /* Count leading bits, 1 cycle in assembly*/
-    for (Shift = 0; Shift<32; Shift++)
-    {
-        if ((Remainder & 0x80000000U)!=0)
-        {
+    for (Shift = 0; Shift < 32; Shift++) {
+        if ((Remainder & 0x80000000U) != 0) {
             break;
         }
         Remainder = Remainder << 1;
@@ -2934,9 +2715,9 @@
      *
      * dB = -96 * Shift + 16 * (8 * Remainder - 2 * Remainder^2)
      */
-    db_fix    = (LVM_INT16)(-96 * Shift);               /* Six dB steps in Q11.4 format*/
+    db_fix = (LVM_INT16)(-96 * Shift); /* Six dB steps in Q11.4 format*/
     SmallRemainder = (LVM_INT16)((Remainder & 0x7fffffff) >> 24);
-    db_fix = (LVM_INT16)(db_fix + SmallRemainder );
+    db_fix = (LVM_INT16)(db_fix + SmallRemainder);
     SmallRemainder = (LVM_INT16)(SmallRemainder * SmallRemainder);
     db_fix = (LVM_INT16)(db_fix - (LVM_INT16)((LVM_UINT16)SmallRemainder >> 9));
 
@@ -2960,11 +2741,10 @@
 //
 //----------------------------------------------------------------------------
 
-int Effect_setEnabled(EffectContext *pContext, bool enabled)
-{
-    ALOGV("%s effectType %d, enabled %d, currently enabled %d", __func__,
-            pContext->EffectType, enabled, pContext->pBundledContext->NumberEffectsEnabled);
-    int &effectInDrain = pContext->pBundledContext->effectInDrain;
+int Effect_setEnabled(EffectContext* pContext, bool enabled) {
+    ALOGV("%s effectType %d, enabled %d, currently enabled %d", __func__, pContext->EffectType,
+          enabled, pContext->pBundledContext->NumberEffectsEnabled);
+    int& effectInDrain = pContext->pBundledContext->effectInDrain;
     if (enabled) {
         // Bass boost or Virtualizer can be temporarily disabled if playing over device speaker due
         // to their nature.
@@ -2972,15 +2752,15 @@
         switch (pContext->EffectType) {
             case LVM_BASS_BOOST:
                 if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
-                     ALOGV("\tEffect_setEnabled() LVM_BASS_BOOST is already enabled");
-                     return -EINVAL;
+                    ALOGV("\tEffect_setEnabled() LVM_BASS_BOOST is already enabled");
+                    return -EINVAL;
                 }
-                if(pContext->pBundledContext->SamplesToExitCountBb <= 0){
+                if (pContext->pBundledContext->SamplesToExitCountBb <= 0) {
                     pContext->pBundledContext->NumberEffectsEnabled++;
                 }
                 effectInDrain &= ~(1 << LVM_BASS_BOOST);
                 pContext->pBundledContext->SamplesToExitCountBb =
-                     (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1);
+                        (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond * 0.1);
                 pContext->pBundledContext->bBassEnabled = LVM_TRUE;
                 tempDisabled = pContext->pBundledContext->bBassTempDisabled;
                 break;
@@ -2989,12 +2769,12 @@
                     ALOGV("\tEffect_setEnabled() LVM_EQUALIZER is already enabled");
                     return -EINVAL;
                 }
-                if(pContext->pBundledContext->SamplesToExitCountEq <= 0){
+                if (pContext->pBundledContext->SamplesToExitCountEq <= 0) {
                     pContext->pBundledContext->NumberEffectsEnabled++;
                 }
                 effectInDrain &= ~(1 << LVM_EQUALIZER);
                 pContext->pBundledContext->SamplesToExitCountEq =
-                     (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1);
+                        (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond * 0.1);
                 pContext->pBundledContext->bEqualizerEnabled = LVM_TRUE;
                 break;
             case LVM_VIRTUALIZER:
@@ -3002,12 +2782,12 @@
                     ALOGV("\tEffect_setEnabled() LVM_VIRTUALIZER is already enabled");
                     return -EINVAL;
                 }
-                if(pContext->pBundledContext->SamplesToExitCountVirt <= 0){
+                if (pContext->pBundledContext->SamplesToExitCountVirt <= 0) {
                     pContext->pBundledContext->NumberEffectsEnabled++;
                 }
                 effectInDrain &= ~(1 << LVM_VIRTUALIZER);
                 pContext->pBundledContext->SamplesToExitCountVirt =
-                     (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1);
+                        (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond * 0.1);
                 pContext->pBundledContext->bVirtualizerEnabled = LVM_TRUE;
                 tempDisabled = pContext->pBundledContext->bVirtualizerTempDisabled;
                 break;
@@ -3084,41 +2864,39 @@
 //
 //-----------------------------------------------------------------------
 
-int16_t LVC_Convert_VolToDb(uint32_t vol){
-    int16_t  dB;
+int16_t LVC_Convert_VolToDb(uint32_t vol) {
+    int16_t dB;
 
-    dB = LVC_ToDB_s32Tos16(vol <<7);
-    dB = (dB +8)>>4;
-    dB = (dB <-96) ? -96 : dB ;
+    dB = LVC_ToDB_s32Tos16(vol << 7);
+    dB = (dB + 8) >> 4;
+    dB = (dB < -96) ? -96 : dB;
 
     return dB;
 }
 
-} // namespace
-} // namespace
+}  // namespace
+}  // namespace android
 
 extern "C" {
 /* Effect Control Interface Implementation: Process */
-int Effect_process(effect_handle_t     self,
-                              audio_buffer_t         *inBuffer,
-                              audio_buffer_t         *outBuffer){
-    EffectContext * pContext = (EffectContext *) self;
-    int    status = 0;
-    int    processStatus = 0;
+int Effect_process(effect_handle_t self, audio_buffer_t* inBuffer, audio_buffer_t* outBuffer) {
+    EffectContext* pContext = (EffectContext*)self;
+    int status = 0;
+    int processStatus = 0;
     const int NrChannels = audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
 
-//ALOGV("\tEffect_process Start : Enabled = %d     Called = %d (%8d %8d %8d)",
-//pContext->pBundledContext->NumberEffectsEnabled,pContext->pBundledContext->NumberEffectsCalled,
-//    pContext->pBundledContext->SamplesToExitCountBb,
-//    pContext->pBundledContext->SamplesToExitCountVirt,
-//    pContext->pBundledContext->SamplesToExitCountEq);
+    // ALOGV("\tEffect_process Start : Enabled = %d     Called = %d (%8d %8d %8d)",
+    // pContext->pBundledContext->NumberEffectsEnabled,pContext->pBundledContext->NumberEffectsCalled,
+    //    pContext->pBundledContext->SamplesToExitCountBb,
+    //    pContext->pBundledContext->SamplesToExitCountVirt,
+    //    pContext->pBundledContext->SamplesToExitCountEq);
 
-    if (pContext == NULL){
+    if (pContext == NULL) {
         ALOGV("\tLVM_ERROR : Effect_process() ERROR pContext == NULL");
         return -EINVAL;
     }
 
-    //if(pContext->EffectType == LVM_BASS_BOOST){
+    // if(pContext->EffectType == LVM_BASS_BOOST){
     //  ALOGV("\tEffect_process: Effect type is BASS_BOOST");
     //}else if(pContext->EffectType == LVM_EQUALIZER){
     //  ALOGV("\tEffect_process: Effect type is LVM_EQUALIZER");
@@ -3126,15 +2904,14 @@
     //  ALOGV("\tEffect_process: Effect type is LVM_VIRTUALIZER");
     //}
 
-    if (inBuffer == NULL  || inBuffer->raw == NULL  ||
-            outBuffer == NULL || outBuffer->raw == NULL ||
-            inBuffer->frameCount != outBuffer->frameCount){
+    if (inBuffer == NULL || inBuffer->raw == NULL || outBuffer == NULL || outBuffer->raw == NULL ||
+        inBuffer->frameCount != outBuffer->frameCount) {
         ALOGV("\tLVM_ERROR : Effect_process() ERROR NULL INPUT POINTER OR FRAME COUNT IS WRONG");
         return -EINVAL;
     }
 
-    int &effectProcessCalled = pContext->pBundledContext->effectProcessCalled;
-    int &effectInDrain = pContext->pBundledContext->effectInDrain;
+    int& effectProcessCalled = pContext->pBundledContext->effectProcessCalled;
+    int& effectInDrain = pContext->pBundledContext->effectInDrain;
     if ((effectProcessCalled & 1 << pContext->EffectType) != 0) {
         ALOGW("Effect %d already called", pContext->EffectType);
         const int undrainedEffects = effectInDrain & ~effectProcessCalled;
@@ -3164,12 +2941,12 @@
     }
     effectProcessCalled |= 1 << pContext->EffectType;
 
-    if ((pContext->pBundledContext->bBassEnabled == LVM_FALSE)&&
-        (pContext->EffectType == LVM_BASS_BOOST)){
-        //ALOGV("\tEffect_process() LVM_BASS_BOOST Effect is not enabled");
-        if(pContext->pBundledContext->SamplesToExitCountBb > 0){
+    if ((pContext->pBundledContext->bBassEnabled == LVM_FALSE) &&
+        (pContext->EffectType == LVM_BASS_BOOST)) {
+        // ALOGV("\tEffect_process() LVM_BASS_BOOST Effect is not enabled");
+        if (pContext->pBundledContext->SamplesToExitCountBb > 0) {
             pContext->pBundledContext->SamplesToExitCountBb -= outBuffer->frameCount * NrChannels;
-            //ALOGV("\tEffect_process: Waiting to turn off BASS_BOOST, %d samples left",
+            // ALOGV("\tEffect_process: Waiting to turn off BASS_BOOST, %d samples left",
             //    pContext->pBundledContext->SamplesToExitCountBb);
         }
         if (pContext->pBundledContext->SamplesToExitCountBb <= 0) {
@@ -3181,21 +2958,21 @@
             ALOGV("\tEffect_process() this is the last frame for LVM_BASS_BOOST");
         }
     }
-    if ((pContext->pBundledContext->bVolumeEnabled == LVM_FALSE)&&
-        (pContext->EffectType == LVM_VOLUME)){
-        //ALOGV("\tEffect_process() LVM_VOLUME Effect is not enabled");
+    if ((pContext->pBundledContext->bVolumeEnabled == LVM_FALSE) &&
+        (pContext->EffectType == LVM_VOLUME)) {
+        // ALOGV("\tEffect_process() LVM_VOLUME Effect is not enabled");
         status = -ENODATA;
         if ((effectInDrain & 1 << LVM_VOLUME) != 0) {
             pContext->pBundledContext->NumberEffectsEnabled--;
             effectInDrain &= ~(1 << LVM_VOLUME);
         }
     }
-    if ((pContext->pBundledContext->bEqualizerEnabled == LVM_FALSE)&&
-        (pContext->EffectType == LVM_EQUALIZER)){
-        //ALOGV("\tEffect_process() LVM_EQUALIZER Effect is not enabled");
-        if(pContext->pBundledContext->SamplesToExitCountEq > 0){
+    if ((pContext->pBundledContext->bEqualizerEnabled == LVM_FALSE) &&
+        (pContext->EffectType == LVM_EQUALIZER)) {
+        // ALOGV("\tEffect_process() LVM_EQUALIZER Effect is not enabled");
+        if (pContext->pBundledContext->SamplesToExitCountEq > 0) {
             pContext->pBundledContext->SamplesToExitCountEq -= outBuffer->frameCount * NrChannels;
-            //ALOGV("\tEffect_process: Waiting to turn off EQUALIZER, %d samples left",
+            // ALOGV("\tEffect_process: Waiting to turn off EQUALIZER, %d samples left",
             //    pContext->pBundledContext->SamplesToExitCountEq);
         }
         if (pContext->pBundledContext->SamplesToExitCountEq <= 0) {
@@ -3207,13 +2984,12 @@
             ALOGV("\tEffect_process() this is the last frame for LVM_EQUALIZER");
         }
     }
-    if ((pContext->pBundledContext->bVirtualizerEnabled == LVM_FALSE)&&
-        (pContext->EffectType == LVM_VIRTUALIZER)){
-        //ALOGV("\tEffect_process() LVM_VIRTUALIZER Effect is not enabled");
-        if(pContext->pBundledContext->SamplesToExitCountVirt > 0){
-            pContext->pBundledContext->SamplesToExitCountVirt -=
-                outBuffer->frameCount * NrChannels;
-            //ALOGV("\tEffect_process: Waiting for to turn off VIRTUALIZER, %d samples left",
+    if ((pContext->pBundledContext->bVirtualizerEnabled == LVM_FALSE) &&
+        (pContext->EffectType == LVM_VIRTUALIZER)) {
+        // ALOGV("\tEffect_process() LVM_VIRTUALIZER Effect is not enabled");
+        if (pContext->pBundledContext->SamplesToExitCountVirt > 0) {
+            pContext->pBundledContext->SamplesToExitCountVirt -= outBuffer->frameCount * NrChannels;
+            // ALOGV("\tEffect_process: Waiting for to turn off VIRTUALIZER, %d samples left",
             //    pContext->pBundledContext->SamplesToExitCountVirt);
         }
         if (pContext->pBundledContext->SamplesToExitCountVirt <= 0) {
@@ -3226,37 +3002,34 @@
         }
     }
 
-    if(status != -ENODATA){
+    if (status != -ENODATA) {
         pContext->pBundledContext->NumberEffectsCalled++;
     }
 
     if (pContext->pBundledContext->NumberEffectsCalled >=
-            pContext->pBundledContext->NumberEffectsEnabled) {
-
+        pContext->pBundledContext->NumberEffectsEnabled) {
         // We expect the # effects called to be equal to # effects enabled in sequence (including
         // draining effects).  Warn if this is not the case due to inconsistent calls.
         ALOGW_IF(pContext->pBundledContext->NumberEffectsCalled >
-                pContext->pBundledContext->NumberEffectsEnabled,
-                "%s Number of effects called %d is greater than number of effects enabled %d",
-                __func__, pContext->pBundledContext->NumberEffectsCalled,
-                pContext->pBundledContext->NumberEffectsEnabled);
-        effectProcessCalled = 0; // reset our consistency check.
+                         pContext->pBundledContext->NumberEffectsEnabled,
+                 "%s Number of effects called %d is greater than number of effects enabled %d",
+                 __func__, pContext->pBundledContext->NumberEffectsCalled,
+                 pContext->pBundledContext->NumberEffectsEnabled);
+        effectProcessCalled = 0;  // reset our consistency check.
 
-        //ALOGV("\tEffect_process     Calling process with %d effects enabled, %d called: Effect %d",
-        //pContext->pBundledContext->NumberEffectsEnabled,
-        //pContext->pBundledContext->NumberEffectsCalled, pContext->EffectType);
+        // ALOGV("\tEffect_process     Calling process with %d effects enabled, %d called: Effect
+        // %d", pContext->pBundledContext->NumberEffectsEnabled,
+        // pContext->pBundledContext->NumberEffectsCalled, pContext->EffectType);
 
-        if (status == -ENODATA){
+        if (status == -ENODATA) {
             ALOGV("\tEffect_process() processing last frame");
         }
         pContext->pBundledContext->NumberEffectsCalled = 0;
         /* Process all the available frames, block processing is
            handled internalLY by the LVM bundle */
-        processStatus = android::LvmBundle_process(inBuffer->f32,
-                                                   outBuffer->f32,
-                                                   outBuffer->frameCount,
-                                                   pContext);
-        if (processStatus != 0){
+        processStatus = android::LvmBundle_process(inBuffer->f32, outBuffer->f32,
+                                                   outBuffer->frameCount, pContext);
+        if (processStatus != 0) {
             ALOGV("\tLVM_ERROR : LvmBundle_process returned error %d", processStatus);
             if (status == 0) {
                 status = processStatus;
@@ -3264,133 +3037,126 @@
             return status;
         }
     } else {
-        //ALOGV("\tEffect_process Not Calling process with %d effects enabled, %d called: Effect %d",
-        //pContext->pBundledContext->NumberEffectsEnabled,
-        //pContext->pBundledContext->NumberEffectsCalled, pContext->EffectType);
+        // ALOGV("\tEffect_process Not Calling process with %d effects enabled, %d called: Effect
+        // %d", pContext->pBundledContext->NumberEffectsEnabled,
+        // pContext->pBundledContext->NumberEffectsCalled, pContext->EffectType);
 
         if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
             for (size_t i = 0; i < outBuffer->frameCount * NrChannels; ++i) {
                 outBuffer->f32[i] += inBuffer->f32[i];
             }
         } else if (outBuffer->raw != inBuffer->raw) {
-            memcpy(outBuffer->raw,
-                    inBuffer->raw,
-                    outBuffer->frameCount * sizeof(effect_buffer_t) * FCC_2);
+            memcpy(outBuffer->raw, inBuffer->raw,
+                   outBuffer->frameCount * sizeof(effect_buffer_t) * FCC_2);
         }
     }
 
     return status;
-}   /* end Effect_process */
+} /* end Effect_process */
 
 // The value offset of an effect parameter is computed by rounding up
 // the parameter size to the next 32 bit alignment.
-static inline uint32_t computeParamVOffset(const effect_param_t *p) {
-    return ((p->psize + sizeof(int32_t) - 1) / sizeof(int32_t)) *
-            sizeof(int32_t);
+static inline uint32_t computeParamVOffset(const effect_param_t* p) {
+    return ((p->psize + sizeof(int32_t) - 1) / sizeof(int32_t)) * sizeof(int32_t);
 }
 
 /* Effect Control Interface Implementation: Command */
-int Effect_command(effect_handle_t  self,
-                              uint32_t            cmdCode,
-                              uint32_t            cmdSize,
-                              void                *pCmdData,
-                              uint32_t            *replySize,
-                              void                *pReplyData){
-    EffectContext * pContext = (EffectContext *) self;
+int Effect_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize, void* pCmdData,
+                   uint32_t* replySize, void* pReplyData) {
+    EffectContext* pContext = (EffectContext*)self;
 
-    //ALOGV("\t\nEffect_command start");
+    // ALOGV("\t\nEffect_command start");
 
-    if(pContext->EffectType == LVM_BASS_BOOST){
-        //ALOGV("\tEffect_command setting command for LVM_BASS_BOOST");
+    if (pContext->EffectType == LVM_BASS_BOOST) {
+        // ALOGV("\tEffect_command setting command for LVM_BASS_BOOST");
     }
-    if(pContext->EffectType == LVM_VIRTUALIZER){
-        //ALOGV("\tEffect_command setting command for LVM_VIRTUALIZER");
+    if (pContext->EffectType == LVM_VIRTUALIZER) {
+        // ALOGV("\tEffect_command setting command for LVM_VIRTUALIZER");
     }
-    if(pContext->EffectType == LVM_EQUALIZER){
-        //ALOGV("\tEffect_command setting command for LVM_EQUALIZER");
+    if (pContext->EffectType == LVM_EQUALIZER) {
+        // ALOGV("\tEffect_command setting command for LVM_EQUALIZER");
     }
-    if(pContext->EffectType == LVM_VOLUME){
-        //ALOGV("\tEffect_command setting command for LVM_VOLUME");
+    if (pContext->EffectType == LVM_VOLUME) {
+        // ALOGV("\tEffect_command setting command for LVM_VOLUME");
     }
 
-    if (pContext == NULL){
+    if (pContext == NULL) {
         ALOGV("\tLVM_ERROR : Effect_command ERROR pContext == NULL");
         return -EINVAL;
     }
 
-    //ALOGV("\tEffect_command INPUTS are: command %d cmdSize %d",cmdCode, cmdSize);
+    // ALOGV("\tEffect_command INPUTS are: command %d cmdSize %d",cmdCode, cmdSize);
 
     // Incase we disable an effect, next time process is
     // called the number of effect called could be greater
     // pContext->pBundledContext->NumberEffectsCalled = 0;
 
-    //ALOGV("\tEffect_command NumberEffectsCalled = %d, NumberEffectsEnabled = %d",
+    // ALOGV("\tEffect_command NumberEffectsCalled = %d, NumberEffectsEnabled = %d",
     //        pContext->pBundledContext->NumberEffectsCalled,
     //        pContext->pBundledContext->NumberEffectsEnabled);
 
-    switch (cmdCode){
+    switch (cmdCode) {
         case EFFECT_CMD_INIT:
-            if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)){
+            if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
                 ALOGV("\tLVM_ERROR, EFFECT_CMD_INIT: ERROR for effect type %d",
-                        pContext->EffectType);
+                      pContext->EffectType);
                 return -EINVAL;
             }
-            *(int *) pReplyData = 0;
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT start");
-            if(pContext->EffectType == LVM_BASS_BOOST){
-                //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_BASS_BOOST");
+            *(int*)pReplyData = 0;
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT start");
+            if (pContext->EffectType == LVM_BASS_BOOST) {
+                // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_BASS_BOOST");
                 android::BassSetStrength(pContext, 0);
             }
-            if(pContext->EffectType == LVM_VIRTUALIZER){
-                //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_VIRTUALIZER");
+            if (pContext->EffectType == LVM_VIRTUALIZER) {
+                // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_VIRTUALIZER");
                 android::VirtualizerSetStrength(pContext, 0);
             }
-            if(pContext->EffectType == LVM_EQUALIZER){
-                //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_EQUALIZER");
+            if (pContext->EffectType == LVM_EQUALIZER) {
+                // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_EQUALIZER");
                 android::EqualizerSetPreset(pContext, 0);
             }
-            if(pContext->EffectType == LVM_VOLUME){
-                //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_VOLUME");
-                *(int *) pReplyData = android::VolumeSetVolumeLevel(pContext, 0);
+            if (pContext->EffectType == LVM_VOLUME) {
+                // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_VOLUME");
+                *(int*)pReplyData = android::VolumeSetVolumeLevel(pContext, 0);
             }
             break;
 
         case EFFECT_CMD_SET_CONFIG:
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_CONFIG start");
-            if (pCmdData    == NULL || cmdSize     != sizeof(effect_config_t) ||
-                    pReplyData  == NULL || replySize == NULL || *replySize  != sizeof(int)) {
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_CONFIG start");
+            if (pCmdData == NULL || cmdSize != sizeof(effect_config_t) || pReplyData == NULL ||
+                replySize == NULL || *replySize != sizeof(int)) {
                 ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: "
-                        "EFFECT_CMD_SET_CONFIG: ERROR");
+                      "EFFECT_CMD_SET_CONFIG: ERROR");
                 return -EINVAL;
             }
-            *(int *) pReplyData = android::Effect_setConfig(pContext, (effect_config_t *) pCmdData);
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_CONFIG end");
+            *(int*)pReplyData = android::Effect_setConfig(pContext, (effect_config_t*)pCmdData);
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_CONFIG end");
             break;
 
         case EFFECT_CMD_GET_CONFIG:
             if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(effect_config_t)) {
                 ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: "
-                        "EFFECT_CMD_GET_CONFIG: ERROR");
+                      "EFFECT_CMD_GET_CONFIG: ERROR");
                 return -EINVAL;
             }
 
-            android::Effect_getConfig(pContext, (effect_config_t *)pReplyData);
+            android::Effect_getConfig(pContext, (effect_config_t*)pReplyData);
             break;
 
         case EFFECT_CMD_RESET:
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_RESET start");
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_RESET start");
             android::Effect_setConfig(pContext, &pContext->config);
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_RESET end");
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_RESET end");
             break;
 
-        case EFFECT_CMD_GET_PARAM:{
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_GET_PARAM start");
+        case EFFECT_CMD_GET_PARAM: {
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_GET_PARAM start");
 
-            effect_param_t *p = (effect_param_t *)pCmdData;
+            effect_param_t* p = (effect_param_t*)pCmdData;
             if (pCmdData == NULL || cmdSize < sizeof(effect_param_t) ||
-                    cmdSize < (sizeof(effect_param_t) + p->psize) ||
-                    pReplyData == NULL || replySize == NULL ||
-                    *replySize < (sizeof(effect_param_t) + p->psize)) {
+                cmdSize < (sizeof(effect_param_t) + p->psize) || pReplyData == NULL ||
+                replySize == NULL || *replySize < (sizeof(effect_param_t) + p->psize)) {
                 ALOGV("\tLVM_ERROR : EFFECT_CMD_GET_PARAM: ERROR");
                 return -EINVAL;
             }
@@ -3401,75 +3167,62 @@
             }
             const uint32_t paddedParamSize = computeParamVOffset(p);
             if ((EFFECT_PARAM_SIZE_MAX - sizeof(effect_param_t) < paddedParamSize) ||
-                (EFFECT_PARAM_SIZE_MAX - sizeof(effect_param_t) - paddedParamSize <
-                    p->vsize)) {
+                (EFFECT_PARAM_SIZE_MAX - sizeof(effect_param_t) - paddedParamSize < p->vsize)) {
                 ALOGV("\tLVM_ERROR : EFFECT_CMD_GET_PARAM: padded_psize or vsize too big");
                 return -EINVAL;
             }
             uint32_t expectedReplySize = sizeof(effect_param_t) + paddedParamSize + p->vsize;
             if (*replySize < expectedReplySize) {
                 ALOGV("\tLVM_ERROR : EFFECT_CMD_GET_PARAM: min. replySize %u, got %u bytes",
-                        expectedReplySize, *replySize);
+                      expectedReplySize, *replySize);
                 android_errorWriteLog(0x534e4554, "32705438");
                 return -EINVAL;
             }
 
             memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + p->psize);
 
-            p = (effect_param_t *)pReplyData;
+            p = (effect_param_t*)pReplyData;
 
             uint32_t voffset = paddedParamSize;
-            if(pContext->EffectType == LVM_BASS_BOOST){
-                p->status = android::BassBoost_getParameter(pContext,
-                                                            p->psize,
-                                                            p->data,
-                                                            &p->vsize,
+            if (pContext->EffectType == LVM_BASS_BOOST) {
+                p->status = android::BassBoost_getParameter(pContext, p->psize, p->data, &p->vsize,
                                                             p->data + voffset);
-                //ALOGV("\tBassBoost_command EFFECT_CMD_GET_PARAM "
+                // ALOGV("\tBassBoost_command EFFECT_CMD_GET_PARAM "
                 //        "*pCmdData %d, *replySize %d, *pReplyData %d ",
                 //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
                 //        *replySize,
                 //        *(int16_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset));
             }
 
-            if(pContext->EffectType == LVM_VIRTUALIZER){
-                p->status = android::Virtualizer_getParameter(pContext,
-                                                              p->psize,
-                                                              (void *)p->data,
-                                                              &p->vsize,
-                                                              p->data + voffset);
+            if (pContext->EffectType == LVM_VIRTUALIZER) {
+                p->status = android::Virtualizer_getParameter(pContext, p->psize, (void*)p->data,
+                                                              &p->vsize, p->data + voffset);
 
-                //ALOGV("\tVirtualizer_command EFFECT_CMD_GET_PARAM "
+                // ALOGV("\tVirtualizer_command EFFECT_CMD_GET_PARAM "
                 //        "*pCmdData %d, *replySize %d, *pReplyData %d ",
                 //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
                 //        *replySize,
                 //        *(int16_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset));
             }
-            if(pContext->EffectType == LVM_EQUALIZER){
-                //ALOGV("\tEqualizer_command cmdCode Case: "
+            if (pContext->EffectType == LVM_EQUALIZER) {
+                // ALOGV("\tEqualizer_command cmdCode Case: "
                 //        "EFFECT_CMD_GET_PARAM start");
-                p->status = android::Equalizer_getParameter(pContext,
-                                                            p->psize,
-                                                            p->data,
-                                                            &p->vsize,
+                p->status = android::Equalizer_getParameter(pContext, p->psize, p->data, &p->vsize,
                                                             p->data + voffset);
 
-                //ALOGV("\tEqualizer_command EFFECT_CMD_GET_PARAM *pCmdData %d, *replySize %d, "
+                // ALOGV("\tEqualizer_command EFFECT_CMD_GET_PARAM *pCmdData %d, *replySize %d, "
                 //       "*pReplyData %08x %08x",
                 //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)), *replySize,
                 //        *(int32_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset),
                 //        *(int32_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset +
                 //        sizeof(int32_t)));
             }
-            if(pContext->EffectType == LVM_VOLUME){
-                //ALOGV("\tVolume_command cmdCode Case: EFFECT_CMD_GET_PARAM start");
-                p->status = android::Volume_getParameter(pContext,
-                                                         p->psize,
-                                                         (void *)p->data,
-                                                         &p->vsize,
-                                                         p->data + voffset);
+            if (pContext->EffectType == LVM_VOLUME) {
+                // ALOGV("\tVolume_command cmdCode Case: EFFECT_CMD_GET_PARAM start");
+                p->status = android::Volume_getParameter(pContext, p->psize, (void*)p->data,
+                                                         &p->vsize, p->data + voffset);
 
-                //ALOGV("\tVolume_command EFFECT_CMD_GET_PARAM "
+                // ALOGV("\tVolume_command EFFECT_CMD_GET_PARAM "
                 //        "*pCmdData %d, *replySize %d, *pReplyData %d ",
                 //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
                 //        *replySize,
@@ -3477,123 +3230,114 @@
             }
             *replySize = sizeof(effect_param_t) + voffset + p->vsize;
 
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_GET_PARAM end");
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_GET_PARAM end");
         } break;
-        case EFFECT_CMD_SET_PARAM:{
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_PARAM start");
-            if(pContext->EffectType == LVM_BASS_BOOST){
-                //ALOGV("\tBassBoost_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d",
+        case EFFECT_CMD_SET_PARAM: {
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_PARAM start");
+            if (pContext->EffectType == LVM_BASS_BOOST) {
+                // ALOGV("\tBassBoost_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value
+                // %d",
                 //       *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
                 //       *replySize,
                 //       *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) + sizeof(int32_t)));
 
-                if (pCmdData   == NULL ||
-                        cmdSize    != (sizeof(effect_param_t) + sizeof(int32_t) +sizeof(int16_t)) ||
-                        pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
+                if (pCmdData == NULL ||
+                    cmdSize != (sizeof(effect_param_t) + sizeof(int32_t) + sizeof(int16_t)) ||
+                    pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
                     ALOGV("\tLVM_ERROR : BassBoost_command cmdCode Case: "
-                            "EFFECT_CMD_SET_PARAM: ERROR");
+                          "EFFECT_CMD_SET_PARAM: ERROR");
                     return -EINVAL;
                 }
 
-                effect_param_t * const p = (effect_param_t *) pCmdData;
+                effect_param_t* const p = (effect_param_t*)pCmdData;
                 const uint32_t voffset = computeParamVOffset(p);
 
-                //ALOGV("\tnBassBoost_command cmdSize is %d\n"
+                // ALOGV("\tnBassBoost_command cmdSize is %d\n"
                 //        "\tsizeof(effect_param_t) is  %d\n"
                 //        "\tp->psize is %d\n"
                 //        "\tp->vsize is %d"
                 //        "\n",
                 //        cmdSize, sizeof(effect_param_t), p->psize, p->vsize );
 
-                *(int *)pReplyData = android::BassBoost_setParameter(pContext,
-                                                                     p->psize,
-                                                                     (void *)p->data,
-                                                                     p->vsize,
-                                                                     p->data + voffset);
+                *(int*)pReplyData = android::BassBoost_setParameter(
+                        pContext, p->psize, (void*)p->data, p->vsize, p->data + voffset);
             }
-            if(pContext->EffectType == LVM_VIRTUALIZER){
-              // Warning this log will fail to properly read an int32_t value, assumes int16_t
-              //ALOGV("\tVirtualizer_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d",
-              //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
-              //        *replySize,
-              //        *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) + sizeof(int32_t)));
+            if (pContext->EffectType == LVM_VIRTUALIZER) {
+                // Warning this log will fail to properly read an int32_t value, assumes int16_t
+                // ALOGV("\tVirtualizer_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value
+                // %d",
+                //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
+                //        *replySize,
+                //        *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) +
+                //        sizeof(int32_t)));
 
-                if (pCmdData   == NULL ||
-                        // legal parameters are int16_t or int32_t
-                        cmdSize    > (sizeof(effect_param_t) + sizeof(int32_t) +sizeof(int32_t)) ||
-                        cmdSize    < (sizeof(effect_param_t) + sizeof(int32_t) +sizeof(int16_t)) ||
-                        pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
+                if (pCmdData == NULL ||
+                    // legal parameters are int16_t or int32_t
+                    cmdSize > (sizeof(effect_param_t) + sizeof(int32_t) + sizeof(int32_t)) ||
+                    cmdSize < (sizeof(effect_param_t) + sizeof(int32_t) + sizeof(int16_t)) ||
+                    pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
                     ALOGV("\tLVM_ERROR : Virtualizer_command cmdCode Case: "
-                            "EFFECT_CMD_SET_PARAM: ERROR");
+                          "EFFECT_CMD_SET_PARAM: ERROR");
                     return -EINVAL;
                 }
 
-                effect_param_t * const p = (effect_param_t *) pCmdData;
+                effect_param_t* const p = (effect_param_t*)pCmdData;
                 const uint32_t voffset = computeParamVOffset(p);
 
-                //ALOGV("\tnVirtualizer_command cmdSize is %d\n"
+                // ALOGV("\tnVirtualizer_command cmdSize is %d\n"
                 //        "\tsizeof(effect_param_t) is  %d\n"
                 //        "\tp->psize is %d\n"
                 //        "\tp->vsize is %d"
                 //        "\n",
                 //        cmdSize, sizeof(effect_param_t), p->psize, p->vsize );
 
-                *(int *)pReplyData = android::Virtualizer_setParameter(pContext,
-                                                                       p->psize,
-                                                                       (void *)p->data,
-                                                                       p->vsize,
-                                                                       p->data + voffset);
+                *(int*)pReplyData = android::Virtualizer_setParameter(
+                        pContext, p->psize, (void*)p->data, p->vsize, p->data + voffset);
             }
-            if(pContext->EffectType == LVM_EQUALIZER){
-               //ALOGV("\tEqualizer_command cmdCode Case: "
-               //        "EFFECT_CMD_SET_PARAM start");
-               //ALOGV("\tEqualizer_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d ",
-               //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
-               //        *replySize,
-               //        *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) + sizeof(int32_t)));
+            if (pContext->EffectType == LVM_EQUALIZER) {
+                // ALOGV("\tEqualizer_command cmdCode Case: "
+                //        "EFFECT_CMD_SET_PARAM start");
+                // ALOGV("\tEqualizer_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d
+                // ",
+                //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
+                //        *replySize,
+                //        *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) +
+                //        sizeof(int32_t)));
 
                 if (pCmdData == NULL || cmdSize < (sizeof(effect_param_t) + sizeof(int32_t)) ||
-                        pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
+                    pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
                     ALOGV("\tLVM_ERROR : Equalizer_command cmdCode Case: "
-                            "EFFECT_CMD_SET_PARAM: ERROR");
+                          "EFFECT_CMD_SET_PARAM: ERROR");
                     return -EINVAL;
                 }
 
-                effect_param_t * const p = (effect_param_t *) pCmdData;
+                effect_param_t* const p = (effect_param_t*)pCmdData;
                 const uint32_t voffset = computeParamVOffset(p);
 
-                *(int *)pReplyData = android::Equalizer_setParameter(pContext,
-                                                                     p->psize,
-                                                                     (void *)p->data,
-                                                                     p->vsize,
-                                                                     p->data + voffset);
+                *(int*)pReplyData = android::Equalizer_setParameter(
+                        pContext, p->psize, (void*)p->data, p->vsize, p->data + voffset);
             }
-            if(pContext->EffectType == LVM_VOLUME){
-                //ALOGV("\tVolume_command cmdCode Case: EFFECT_CMD_SET_PARAM start");
-                //ALOGV("\tVolume_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d ",
+            if (pContext->EffectType == LVM_VOLUME) {
+                // ALOGV("\tVolume_command cmdCode Case: EFFECT_CMD_SET_PARAM start");
+                // ALOGV("\tVolume_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d ",
                 //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
                 //        *replySize,
                 //        *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) +sizeof(int32_t)));
 
-                if (pCmdData   == NULL ||
-                        cmdSize    < (sizeof(effect_param_t) + sizeof(int32_t)) ||
-                        pReplyData == NULL || replySize == NULL ||
-                        *replySize != sizeof(int32_t)) {
+                if (pCmdData == NULL || cmdSize < (sizeof(effect_param_t) + sizeof(int32_t)) ||
+                    pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
                     ALOGV("\tLVM_ERROR : Volume_command cmdCode Case: "
-                            "EFFECT_CMD_SET_PARAM: ERROR");
+                          "EFFECT_CMD_SET_PARAM: ERROR");
                     return -EINVAL;
                 }
 
-                effect_param_t * const p = (effect_param_t *) pCmdData;
+                effect_param_t* const p = (effect_param_t*)pCmdData;
                 const uint32_t voffset = computeParamVOffset(p);
 
-                *(int *)pReplyData = android::Volume_setParameter(pContext,
-                                                                  p->psize,
-                                                                  (void *)p->data,
-                                                                  p->vsize,
-                                                                  p->data + voffset);
+                *(int*)pReplyData = android::Volume_setParameter(pContext, p->psize, (void*)p->data,
+                                                                 p->vsize, p->data + voffset);
             }
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_PARAM end");
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_PARAM end");
         } break;
 
         case EFFECT_CMD_ENABLE:
@@ -3603,57 +3347,56 @@
                 return -EINVAL;
             }
 
-            *(int *)pReplyData = android::Effect_setEnabled(pContext, LVM_TRUE);
+            *(int*)pReplyData = android::Effect_setEnabled(pContext, LVM_TRUE);
             break;
 
         case EFFECT_CMD_DISABLE:
-            //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_DISABLE start");
+            // ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_DISABLE start");
             if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
                 ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: EFFECT_CMD_DISABLE: ERROR");
                 return -EINVAL;
             }
-            *(int *)pReplyData = android::Effect_setEnabled(pContext, LVM_FALSE);
+            *(int*)pReplyData = android::Effect_setEnabled(pContext, LVM_FALSE);
             break;
 
-        case EFFECT_CMD_SET_DEVICE:
-        {
+        case EFFECT_CMD_SET_DEVICE: {
             ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_DEVICE start");
-            if (pCmdData   == NULL){
+            if (pCmdData == NULL) {
                 ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: EFFECT_CMD_SET_DEVICE: ERROR");
                 return -EINVAL;
             }
 
-            uint32_t device = *(uint32_t *)pCmdData;
-            pContext->pBundledContext->nOutputDevice = (audio_devices_t) device;
+            audio_devices_t device = *(audio_devices_t *)pCmdData;
+            pContext->pBundledContext->nOutputDevice = device;
 
             if (pContext->EffectType == LVM_BASS_BOOST) {
-                if((device == AUDIO_DEVICE_OUT_SPEAKER) ||
-                        (device == AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT) ||
-                        (device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER)){
+                if ((device == AUDIO_DEVICE_OUT_SPEAKER) ||
+                    (device == AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT) ||
+                    (device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER)) {
                     ALOGV("\tEFFECT_CMD_SET_DEVICE device is invalid for LVM_BASS_BOOST %d",
-                          *(int32_t *)pCmdData);
+                          *(int32_t*)pCmdData);
                     ALOGV("\tEFFECT_CMD_SET_DEVICE temporary disable LVM_BAS_BOOST");
 
-                    // If a device doesnt support bassboost the effect must be temporarily disabled
+                    // If a device doesn't support bassboost the effect must be temporarily disabled
                     // the effect must still report its original state as this can only be changed
                     // by the ENABLE/DISABLE command
 
                     if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
                         ALOGV("\tEFFECT_CMD_SET_DEVICE disable LVM_BASS_BOOST %d",
-                             *(int32_t *)pCmdData);
+                              *(int32_t*)pCmdData);
                         android::LvmEffect_disable(pContext);
                     }
                     pContext->pBundledContext->bBassTempDisabled = LVM_TRUE;
                 } else {
                     ALOGV("\tEFFECT_CMD_SET_DEVICE device is valid for LVM_BASS_BOOST %d",
-                         *(int32_t *)pCmdData);
+                          *(int32_t*)pCmdData);
 
                     // If a device supports bassboost and the effect has been temporarily disabled
                     // previously then re-enable it
 
                     if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
                         ALOGV("\tEFFECT_CMD_SET_DEVICE re-enable LVM_BASS_BOOST %d",
-                             *(int32_t *)pCmdData);
+                              *(int32_t*)pCmdData);
                         android::LvmEffect_enable(pContext);
                     }
                     pContext->pBundledContext->bBassTempDisabled = LVM_FALSE;
@@ -3664,129 +3407,128 @@
                     // default case unless configuration is forced
                     if (android::VirtualizerIsDeviceSupported(device) != 0) {
                         ALOGV("\tEFFECT_CMD_SET_DEVICE device is invalid for LVM_VIRTUALIZER %d",
-                                *(int32_t *)pCmdData);
+                              *(int32_t*)pCmdData);
                         ALOGV("\tEFFECT_CMD_SET_DEVICE temporary disable LVM_VIRTUALIZER");
 
-                        //If a device doesnt support virtualizer the effect must be temporarily
+                        // If a device doesn't support virtualizer the effect must be temporarily
                         // disabled the effect must still report its original state as this can
                         // only be changed by the ENABLE/DISABLE command
 
                         if (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) {
                             ALOGV("\tEFFECT_CMD_SET_DEVICE disable LVM_VIRTUALIZER %d",
-                                    *(int32_t *)pCmdData);
+                                  *(int32_t*)pCmdData);
                             android::LvmEffect_disable(pContext);
                         }
                         pContext->pBundledContext->bVirtualizerTempDisabled = LVM_TRUE;
                     } else {
                         ALOGV("\tEFFECT_CMD_SET_DEVICE device is valid for LVM_VIRTUALIZER %d",
-                                *(int32_t *)pCmdData);
+                              *(int32_t*)pCmdData);
 
                         // If a device supports virtualizer and the effect has been temporarily
                         // disabled previously then re-enable it
 
-                        if(pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE){
+                        if (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) {
                             ALOGV("\tEFFECT_CMD_SET_DEVICE re-enable LVM_VIRTUALIZER %d",
-                                    *(int32_t *)pCmdData);
+                                  *(int32_t*)pCmdData);
                             android::LvmEffect_enable(pContext);
                         }
                         pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
                     }
-                } // else virtualization mode is forced to a certain device, nothing to do
+                }  // else virtualization mode is forced to a certain device, nothing to do
             }
             ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_DEVICE end");
             break;
         }
-        case EFFECT_CMD_SET_VOLUME:
-        {
+        case EFFECT_CMD_SET_VOLUME: {
             uint32_t leftVolume, rightVolume;
-            int16_t  leftdB, rightdB;
-            int16_t  maxdB, pandB;
-            int32_t  vol_ret[2] = {1<<24,1<<24}; // Apply no volume
-            LVM_ControlParams_t     ActiveParams;           /* Current control Parameters */
-            LVM_ReturnStatus_en     LvmStatus=LVM_SUCCESS;  /* Function call status */
+            int16_t leftdB, rightdB;
+            int16_t maxdB, pandB;
+            int32_t vol_ret[2] = {1 << 24, 1 << 24};     // Apply no volume
+            LVM_ControlParams_t ActiveParams;            /* Current control Parameters */
+            LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */
 
             // if pReplyData is NULL, VOL_CTRL is delegated to another effect
-            if(pReplyData == LVM_NULL){
+            if (pReplyData == LVM_NULL) {
                 break;
             }
 
             if (pCmdData == NULL || cmdSize != 2 * sizeof(uint32_t) || pReplyData == NULL ||
-                    replySize == NULL || *replySize < 2*sizeof(int32_t)) {
+                replySize == NULL || *replySize < 2 * sizeof(int32_t)) {
                 ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: "
-                        "EFFECT_CMD_SET_VOLUME: ERROR");
+                      "EFFECT_CMD_SET_VOLUME: ERROR");
                 return -EINVAL;
             }
 
-            leftVolume  = ((*(uint32_t *)pCmdData));
-            rightVolume = ((*((uint32_t *)pCmdData + 1)));
+            leftVolume = ((*(uint32_t*)pCmdData));
+            rightVolume = ((*((uint32_t*)pCmdData + 1)));
 
-            if(leftVolume == 0x1000000){
+            if (leftVolume == 0x1000000) {
                 leftVolume -= 1;
             }
-            if(rightVolume == 0x1000000){
+            if (rightVolume == 0x1000000) {
                 rightVolume -= 1;
             }
 
             // Convert volume to dB
-            leftdB  = android::LVC_Convert_VolToDb(leftVolume);
+            leftdB = android::LVC_Convert_VolToDb(leftVolume);
             rightdB = android::LVC_Convert_VolToDb(rightVolume);
 
             pandB = rightdB - leftdB;
 
             // Calculate max volume in dB
             maxdB = leftdB;
-            if(rightdB > maxdB){
+            if (rightdB > maxdB) {
                 maxdB = rightdB;
             }
-            //ALOGV("\tEFFECT_CMD_SET_VOLUME Session: %d, SessionID: %d VOLUME is %d dB, "
+            // ALOGV("\tEFFECT_CMD_SET_VOLUME Session: %d, SessionID: %d VOLUME is %d dB, "
             //      "effect is %d",
-            //pContext->pBundledContext->SessionNo, pContext->pBundledContext->SessionId,
+            // pContext->pBundledContext->SessionNo, pContext->pBundledContext->SessionId,
             //(int32_t)maxdB, pContext->EffectType);
-            //ALOGV("\tEFFECT_CMD_SET_VOLUME: Left is %d, Right is %d", leftVolume, rightVolume);
-            //ALOGV("\tEFFECT_CMD_SET_VOLUME: Left %ddB, Right %ddB, Position %ddB",
+            // ALOGV("\tEFFECT_CMD_SET_VOLUME: Left is %d, Right is %d", leftVolume, rightVolume);
+            // ALOGV("\tEFFECT_CMD_SET_VOLUME: Left %ddB, Right %ddB, Position %ddB",
             //        leftdB, rightdB, pandB);
 
-            memcpy(pReplyData, vol_ret, sizeof(int32_t)*2);
-            android::VolumeSetVolumeLevel(pContext, (int16_t)(maxdB*100));
+            memcpy(pReplyData, vol_ret, sizeof(int32_t) * 2);
+            android::VolumeSetVolumeLevel(pContext, (int16_t)(maxdB * 100));
 
             /* Get the current settings */
-            LvmStatus =LVM_GetControlParameters(pContext->pBundledContext->hInstance,&ActiveParams);
+            LvmStatus =
+                    LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
             LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "VolumeSetStereoPosition")
-            if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+            if (LvmStatus != LVM_SUCCESS) return -EINVAL;
 
             /* Volume parameters */
-            ActiveParams.VC_Balance  = pandB;
-            ALOGV("\t\tVolumeSetStereoPosition() (-96dB -> +96dB)-> %d\n", ActiveParams.VC_Balance );
+            ActiveParams.VC_Balance = pandB;
+            ALOGV("\t\tVolumeSetStereoPosition() (-96dB -> +96dB)-> %d\n", ActiveParams.VC_Balance);
 
             /* Activate the initial settings */
-            LvmStatus =LVM_SetControlParameters(pContext->pBundledContext->hInstance,&ActiveParams);
+            LvmStatus =
+                    LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
             LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "VolumeSetStereoPosition")
-            if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+            if (LvmStatus != LVM_SUCCESS) return -EINVAL;
             break;
-         }
+        }
         case EFFECT_CMD_SET_AUDIO_MODE:
             break;
         default:
             return -EINVAL;
     }
 
-    //ALOGV("\tEffect_command end...\n\n");
+    // ALOGV("\tEffect_command end...\n\n");
     return 0;
-}    /* end Effect_command */
+} /* end Effect_command */
 
 /* Effect Control Interface Implementation: get_descriptor */
-int Effect_getDescriptor(effect_handle_t   self,
-                                    effect_descriptor_t *pDescriptor)
-{
-    EffectContext * pContext = (EffectContext *) self;
-    const effect_descriptor_t *desc;
+int Effect_getDescriptor(effect_handle_t self, effect_descriptor_t* pDescriptor) {
+    EffectContext* pContext = (EffectContext*)self;
+    const effect_descriptor_t* desc;
 
     if (pContext == NULL || pDescriptor == NULL) {
         ALOGV("Effect_getDescriptor() invalid param");
         return -EINVAL;
     }
 
-    switch(pContext->EffectType) {
+    switch (pContext->EffectType) {
         case LVM_BASS_BOOST:
             desc = &android::gBassBoostDescriptor;
             break;
@@ -3806,26 +3548,24 @@
     *pDescriptor = *desc;
 
     return 0;
-}   /* end Effect_getDescriptor */
+} /* end Effect_getDescriptor */
 
 // effect_handle_t interface implementation for effect
 const struct effect_interface_s gLvmEffectInterface = {
-    Effect_process,
-    Effect_command,
-    Effect_getDescriptor,
-    NULL,
-};    /* end gLvmEffectInterface */
+        Effect_process,
+        Effect_command,
+        Effect_getDescriptor,
+        NULL,
+}; /* end gLvmEffectInterface */
 
 // This is the only symbol that needs to be exported
-__attribute__ ((visibility ("default")))
-audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
-    .tag = AUDIO_EFFECT_LIBRARY_TAG,
-    .version = EFFECT_LIBRARY_API_VERSION,
-    .name = "Effect Bundle Library",
-    .implementor = "NXP Software Ltd.",
-    .create_effect = android::EffectCreate,
-    .release_effect = android::EffectRelease,
-    .get_descriptor = android::EffectGetDescriptor,
+__attribute__((visibility("default"))) audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
+        .tag = AUDIO_EFFECT_LIBRARY_TAG,
+        .version = EFFECT_LIBRARY_API_VERSION,
+        .name = "Effect Bundle Library",
+        .implementor = "NXP Software Ltd.",
+        .create_effect = android::EffectCreate,
+        .release_effect = android::EffectRelease,
+        .get_descriptor = android::EffectGetDescriptor,
 };
-
 }
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
index 524e103..f3e7884 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
@@ -23,176 +23,148 @@
 #include <LVM.h>
 #include <limits.h>
 
-#define FIVEBAND_NUMBANDS          5
-#define MAX_NUM_BANDS              5
-#define MAX_CALL_SIZE              256
-#define LVM_MAX_SESSIONS           32
-#define LVM_UNUSED_SESSION         INT_MAX
-#define BASS_BOOST_CUP_LOAD_ARM9E  150    // Expressed in 0.1 MIPS
-#define VIRTUALIZER_CUP_LOAD_ARM9E 120    // Expressed in 0.1 MIPS
-#define EQUALIZER_CUP_LOAD_ARM9E   220    // Expressed in 0.1 MIPS
-#define VOLUME_CUP_LOAD_ARM9E      0      // Expressed in 0.1 MIPS
-#define BUNDLE_MEM_USAGE           25     // Expressed in kB
+#define FIVEBAND_NUMBANDS 5
+#define MAX_NUM_BANDS 5
+#define MAX_CALL_SIZE 256
+#define LVM_MAX_SESSIONS 32
+#define LVM_UNUSED_SESSION INT_MAX
+#define BASS_BOOST_CUP_LOAD_ARM9E 150   // Expressed in 0.1 MIPS
+#define VIRTUALIZER_CUP_LOAD_ARM9E 120  // Expressed in 0.1 MIPS
+#define EQUALIZER_CUP_LOAD_ARM9E 220    // Expressed in 0.1 MIPS
+#define VOLUME_CUP_LOAD_ARM9E 0         // Expressed in 0.1 MIPS
+#define BUNDLE_MEM_USAGE 25             // Expressed in kB
 
 #ifndef OPENSL_ES_H_
-static const effect_uuid_t SL_IID_VOLUME_ = { 0x09e8ede0, 0xddde, 0x11db, 0xb4f6,
-                                            { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } };
-const effect_uuid_t * const SL_IID_VOLUME = &SL_IID_VOLUME_;
-#endif //OPENSL_ES_H_
+static const effect_uuid_t SL_IID_VOLUME_ = {
+        0x09e8ede0, 0xddde, 0x11db, 0xb4f6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
+const effect_uuid_t* const SL_IID_VOLUME = &SL_IID_VOLUME_;
+#endif  // OPENSL_ES_H_
 
-typedef enum
-{
-    LVM_BASS_BOOST,
-    LVM_VIRTUALIZER,
-    LVM_EQUALIZER,
-    LVM_VOLUME
-} lvm_effect_en;
+typedef enum { LVM_BASS_BOOST, LVM_VIRTUALIZER, LVM_EQUALIZER, LVM_VOLUME } lvm_effect_en;
 
 // Preset configuration.
 struct PresetConfig {
     // Human-readable name.
-    const char * name;
+    const char* name;
     // An array of size nBands where each element is a configuration for the
     // corresponding band.
-    //const BandConfig * bandConfigs;
+    // const BandConfig * bandConfigs;
 };
 
 /* BundledEffectContext : One per session */
-struct BundledEffectContext{
-    LVM_Handle_t                    hInstance;                /* Instance handle */
-    int                             SessionNo;                /* Current session number */
-    int                             SessionId;                /* Current session id */
-    bool                            bVolumeEnabled;           /* Flag for Volume */
-    bool                            bEqualizerEnabled;        /* Flag for EQ */
-    bool                            bBassEnabled;             /* Flag for Bass */
-    bool                            bBassTempDisabled;        /* Flag for Bass to be re-enabled */
-    bool                            bVirtualizerEnabled;      /* Flag for Virtualizer */
-    bool                            bVirtualizerTempDisabled; /* Flag for effect to be re-enabled */
-    audio_devices_t                 nOutputDevice;            /* Output device for the effect */
-    audio_devices_t                 nVirtualizerForcedDevice; /* Forced device virtualization mode*/
-    int                             NumberEffectsEnabled;     /* Effects in this session */
-    int                             NumberEffectsCalled;      /* Effects called so far */
-    bool                            firstVolume;              /* No smoothing on first Vol change */
+struct BundledEffectContext {
+    LVM_Handle_t hInstance;                   /* Instance handle */
+    int SessionNo;                            /* Current session number */
+    int SessionId;                            /* Current session id */
+    bool bVolumeEnabled;                      /* Flag for Volume */
+    bool bEqualizerEnabled;                   /* Flag for EQ */
+    bool bBassEnabled;                        /* Flag for Bass */
+    bool bBassTempDisabled;                   /* Flag for Bass to be re-enabled */
+    bool bVirtualizerEnabled;                 /* Flag for Virtualizer */
+    bool bVirtualizerTempDisabled;            /* Flag for effect to be re-enabled */
+    audio_devices_t nOutputDevice;            /* Output device for the effect */
+    audio_devices_t nVirtualizerForcedDevice; /* Forced device virtualization mode*/
+    int NumberEffectsEnabled;                 /* Effects in this session */
+    int NumberEffectsCalled;                  /* Effects called so far */
+    bool firstVolume;                         /* No smoothing on first Vol change */
     // Saved parameters for each effect */
     // Bass Boost
-    int                             BassStrengthSaved;        /* Conversion between Get/Set */
+    int BassStrengthSaved; /* Conversion between Get/Set */
     // Equalizer
-    int                             CurPreset;                /* Current preset being used */
+    int CurPreset; /* Current preset being used */
     // Virtualzer
-    int                             VirtStrengthSaved;        /* Conversion between Get/Set */
+    int VirtStrengthSaved; /* Conversion between Get/Set */
     // Volume
-    int                             levelSaved;     /* for when mute is set, level must be saved */
-    int                             positionSaved;
-    bool                            bMuteEnabled;   /* Must store as mute = -96dB level */
-    bool                            bStereoPositionEnabled;
-    LVM_Fs_en                       SampleRate;
-    int                             SamplesPerSecond;
-    int                             SamplesToExitCountEq;
-    int                             SamplesToExitCountBb;
-    int                             SamplesToExitCountVirt;
-    effect_buffer_t                 *workBuffer;
-    int                             frameCount;
-    int32_t                         bandGaindB[FIVEBAND_NUMBANDS];
-    int                             volume;
-#ifdef SUPPORT_MC
-    LVM_INT32                       ChMask;
-#endif
+    int levelSaved; /* for when mute is set, level must be saved */
+    int positionSaved;
+    bool bMuteEnabled; /* Must store as mute = -96dB level */
+    bool bStereoPositionEnabled;
+    LVM_Fs_en SampleRate;
+    int SamplesPerSecond;
+    int SamplesToExitCountEq;
+    int SamplesToExitCountBb;
+    int SamplesToExitCountVirt;
+    effect_buffer_t* workBuffer;
+    int frameCount;
+    int32_t bandGaindB[FIVEBAND_NUMBANDS];
+    int volume;
+    LVM_INT32 ChMask;
 
     /* Bitmask whether drain is in progress due to disabling the effect.
        The corresponding bit to an effect is set by 1 << lvm_effect_en. */
-    int                             effectInDrain;
+    int effectInDrain;
 
     /* Bitmask whether process() was called for a particular effect.
        The corresponding bit to an effect is set by 1 << lvm_effect_en. */
-    int                             effectProcessCalled;
+    int effectProcessCalled;
 };
 
 /* SessionContext : One session */
-struct SessionContext{
-    bool                            bBundledEffectsEnabled;
-    bool                            bVolumeInstantiated;
-    bool                            bEqualizerInstantiated;
-    bool                            bBassInstantiated;
-    bool                            bVirtualizerInstantiated;
-    BundledEffectContext            *pBundledContext;
+struct SessionContext {
+    bool bBundledEffectsEnabled;
+    bool bVolumeInstantiated;
+    bool bEqualizerInstantiated;
+    bool bBassInstantiated;
+    bool bVirtualizerInstantiated;
+    BundledEffectContext* pBundledContext;
 };
 
-struct EffectContext{
-    const struct effect_interface_s *itfe;
-    effect_config_t                 config;
-    lvm_effect_en                   EffectType;
-    BundledEffectContext            *pBundledContext;
+struct EffectContext {
+    const struct effect_interface_s* itfe;
+    effect_config_t config;
+    lvm_effect_en EffectType;
+    BundledEffectContext* pBundledContext;
 };
 
 /* enumerated parameter settings for Volume effect */
-typedef enum
-{
-    VOLUME_PARAM_LEVEL,                       // type SLmillibel = typedef SLuint16 (set & get)
-    VOLUME_PARAM_MAXLEVEL,                    // type SLmillibel = typedef SLuint16 (get)
-    VOLUME_PARAM_MUTE,                        // type SLboolean  = typedef SLuint32 (set & get)
-    VOLUME_PARAM_ENABLESTEREOPOSITION,        // type SLboolean  = typedef SLuint32 (set & get)
-    VOLUME_PARAM_STEREOPOSITION,              // type SLpermille = typedef SLuint16 (set & get)
+typedef enum {
+    VOLUME_PARAM_LEVEL,                 // type SLmillibel = typedef SLuint16 (set & get)
+    VOLUME_PARAM_MAXLEVEL,              // type SLmillibel = typedef SLuint16 (get)
+    VOLUME_PARAM_MUTE,                  // type SLboolean  = typedef SLuint32 (set & get)
+    VOLUME_PARAM_ENABLESTEREOPOSITION,  // type SLboolean  = typedef SLuint32 (set & get)
+    VOLUME_PARAM_STEREOPOSITION,        // type SLpermille = typedef SLuint16 (set & get)
 } t_volume_params;
 
 static const int PRESET_CUSTOM = -1;
 
-static const uint32_t bandFreqRange[FIVEBAND_NUMBANDS][2] = {
-                                       {30000, 120000},
-                                       {120001, 460000},
-                                       {460001, 1800000},
-                                       {1800001, 7000000},
-                                       {7000001, 20000000}};
+static const uint32_t bandFreqRange[FIVEBAND_NUMBANDS][2] = {{30000, 120000},
+                                                             {120001, 460000},
+                                                             {460001, 1800000},
+                                                             {1800001, 7000000},
+                                                             {7000001, 20000000}};
 
-//Note: If these frequencies change, please update LimitLevel values accordingly.
-static const LVM_UINT16  EQNB_5BandPresetsFrequencies[] = {
-                                       60,           /* Frequencies in Hz */
-                                       230,
-                                       910,
-                                       3600,
-                                       14000};
+// Note: If these frequencies change, please update LimitLevel values accordingly.
+static const LVM_UINT16 EQNB_5BandPresetsFrequencies[] = {60, /* Frequencies in Hz */
+                                                          230, 910, 3600, 14000};
 
-static const LVM_UINT16 EQNB_5BandPresetsQFactors[] = {
-                                       96,               /* Q factor multiplied by 100 */
-                                       96,
-                                       96,
-                                       96,
-                                       96};
+static const LVM_UINT16 EQNB_5BandPresetsQFactors[] = {96, /* Q factor multiplied by 100 */
+                                                       96, 96, 96, 96};
 
-static const LVM_INT16 EQNB_5BandNormalPresets[] = {
-                                       3, 0, 0, 0, 3,       /* Normal Preset */
-                                       8, 5, -3, 5, 6,      /* Classical Preset */
-                                       15, -6, 7, 13, 10,   /* Dance Preset */
-                                       0, 0, 0, 0, 0,       /* Flat Preset */
-                                       6, -2, -2, 6, -3,    /* Folk Preset */
-                                       8, -8, 13, -1, -4,   /* Heavy Metal Preset */
-                                       10, 6, -4, 5, 8,     /* Hip Hop Preset */
-                                       8, 5, -4, 5, 9,      /* Jazz Preset */
-                                      -6, 4, 9, 4, -5,      /* Pop Preset */
-                                       10, 6, -1, 8, 10};   /* Rock Preset */
+static const LVM_INT16 EQNB_5BandNormalPresets[] = {3,  0,  0,  0,  3,   /* Normal Preset */
+                                                    8,  5,  -3, 5,  6,   /* Classical Preset */
+                                                    15, -6, 7,  13, 10,  /* Dance Preset */
+                                                    0,  0,  0,  0,  0,   /* Flat Preset */
+                                                    6,  -2, -2, 6,  -3,  /* Folk Preset */
+                                                    8,  -8, 13, -1, -4,  /* Heavy Metal Preset */
+                                                    10, 6,  -4, 5,  8,   /* Hip Hop Preset */
+                                                    8,  5,  -4, 5,  9,   /* Jazz Preset */
+                                                    -6, 4,  9,  4,  -5,  /* Pop Preset */
+                                                    10, 6,  -1, 8,  10}; /* Rock Preset */
 
-static const LVM_INT16 EQNB_5BandSoftPresets[] = {
-                                        3, 0, 0, 0, 3,      /* Normal Preset */
-                                        5, 3, -2, 4, 4,     /* Classical Preset */
-                                        6, 0, 2, 4, 1,      /* Dance Preset */
-                                        0, 0, 0, 0, 0,      /* Flat Preset */
-                                        3, 0, 0, 2, -1,     /* Folk Preset */
-                                        4, 1, 9, 3, 0,      /* Heavy Metal Preset */
-                                        5, 3, 0, 1, 3,      /* Hip Hop Preset */
-                                        4, 2, -2, 2, 5,     /* Jazz Preset */
-                                       -1, 2, 5, 1, -2,     /* Pop Preset */
-                                        5, 3, -1, 3, 5};    /* Rock Preset */
+static const LVM_INT16 EQNB_5BandSoftPresets[] = {3,  0, 0,  0, 3,  /* Normal Preset */
+                                                  5,  3, -2, 4, 4,  /* Classical Preset */
+                                                  6,  0, 2,  4, 1,  /* Dance Preset */
+                                                  0,  0, 0,  0, 0,  /* Flat Preset */
+                                                  3,  0, 0,  2, -1, /* Folk Preset */
+                                                  4,  1, 9,  3, 0,  /* Heavy Metal Preset */
+                                                  5,  3, 0,  1, 3,  /* Hip Hop Preset */
+                                                  4,  2, -2, 2, 5,  /* Jazz Preset */
+                                                  -1, 2, 5,  1, -2, /* Pop Preset */
+                                                  5,  3, -1, 3, 5}; /* Rock Preset */
 
-static const PresetConfig gEqualizerPresets[] = {
-                                        {"Normal"},
-                                        {"Classical"},
-                                        {"Dance"},
-                                        {"Flat"},
-                                        {"Folk"},
-                                        {"Heavy Metal"},
-                                        {"Hip Hop"},
-                                        {"Jazz"},
-                                        {"Pop"},
-                                        {"Rock"}};
+static const PresetConfig gEqualizerPresets[] = {{"Normal"}, {"Classical"},   {"Dance"},   {"Flat"},
+                                                 {"Folk"},   {"Heavy Metal"}, {"Hip Hop"}, {"Jazz"},
+                                                 {"Pop"},    {"Rock"}};
 
 /* The following tables have been computed using the actual levels measured by the output of
  * white noise or pink noise (IEC268-1) for the EQ and BassBoost Effects. These are estimates of
@@ -201,14 +173,14 @@
  * updated.
  */
 
-static const float LimitLevel_bandEnergyCoefficient[FIVEBAND_NUMBANDS] = {
-        7.56, 9.69, 9.59, 7.37, 2.88};
+static const float LimitLevel_bandEnergyCoefficient[FIVEBAND_NUMBANDS] = {7.56, 9.69, 9.59, 7.37,
+                                                                          2.88};
 
-static const float LimitLevel_bandEnergyCrossCoefficient[FIVEBAND_NUMBANDS-1] = {
-        126.0, 115.0, 125.0, 104.0 };
+static const float LimitLevel_bandEnergyCrossCoefficient[FIVEBAND_NUMBANDS - 1] = {126.0, 115.0,
+                                                                                   125.0, 104.0};
 
 static const float LimitLevel_bassBoostEnergyCrossCoefficient[FIVEBAND_NUMBANDS] = {
-        221.21, 208.10, 28.16, 0.0, 0.0 };
+        221.21, 208.10, 28.16, 0.0, 0.0};
 
 static const float LimitLevel_bassBoostEnergyCoefficient = 9.00;
 
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
index 39f5bb6..290a7b1 100644
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
@@ -18,7 +18,7 @@
 typedef float LVM_FLOAT;
 #endif
 #define LOG_TAG "Reverb"
-#define ARRAY_SIZE(array) (sizeof (array) / sizeof (array)[0])
+#define ARRAY_SIZE(array) (sizeof(array) / sizeof(array)[0])
 //#define LOG_NDEBUG 0
 
 #include <assert.h>
@@ -33,23 +33,28 @@
 #include "EffectReverb.h"
 // from Reverb/lib
 #include "LVREV.h"
+#include "VectorArithmetic.h"
 
 // effect_handle_t interface implementation for reverb
 extern "C" const struct effect_interface_s gReverbInterface;
 
-#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc){\
-        if ((LvmStatus) == LVREV_NULLADDRESS){\
-            ALOGV("\tLVREV_ERROR : Parameter error - "\
-                    "null pointer returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\
-        }\
-        if ((LvmStatus) == LVREV_INVALIDNUMSAMPLES){\
-            ALOGV("\tLVREV_ERROR : Parameter error - "\
-                    "bad number of samples returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\
-        }\
-        if ((LvmStatus) == LVREV_OUTOFRANGE){\
-            ALOGV("\tLVREV_ERROR : Parameter error - "\
-                    "out of range returned by %s in %s\n", callingFunc, calledFunc);\
-        }\
+#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc)             \
+    {                                                                   \
+        if ((LvmStatus) == LVREV_NULLADDRESS) {                         \
+            ALOGV("\tLVREV_ERROR : Parameter error - "                  \
+                  "null pointer returned by %s in %s\n\n\n\n",          \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
+        if ((LvmStatus) == LVREV_INVALIDNUMSAMPLES) {                   \
+            ALOGV("\tLVREV_ERROR : Parameter error - "                  \
+                  "bad number of samples returned by %s in %s\n\n\n\n", \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
+        if ((LvmStatus) == LVREV_OUTOFRANGE) {                          \
+            ALOGV("\tLVREV_ERROR : Parameter error - "                  \
+                  "out of range returned by %s in %s\n",                \
+                  callingFunc, calledFunc);                             \
+        }                                                               \
     }
 
 // Namespaces
@@ -81,8 +86,8 @@
 
 // NXP SW auxiliary environmental reverb
 const effect_descriptor_t gAuxEnvReverbDescriptor = {
-        { 0xc2e5d5f0, 0x94bd, 0x4763, 0x9cac, { 0x4e, 0x23, 0x4d, 0x06, 0x83, 0x9e } },
-        { 0x4a387fc0, 0x8ab3, 0x11df, 0x8bad, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } },
+        {0xc2e5d5f0, 0x94bd, 0x4763, 0x9cac, {0x4e, 0x23, 0x4d, 0x06, 0x83, 0x9e}},
+        {0x4a387fc0, 0x8ab3, 0x11df, 0x8bad, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
         EFFECT_CONTROL_API_VERSION,
         EFFECT_FLAG_TYPE_AUXILIARY,
         LVREV_CUP_LOAD_ARM9E,
@@ -128,42 +133,39 @@
 };
 
 // gDescriptors contains pointers to all defined effect descriptor in this library
-static const effect_descriptor_t * const gDescriptors[] = {
-        &gAuxEnvReverbDescriptor,
-        &gInsertEnvReverbDescriptor,
-        &gAuxPresetReverbDescriptor,
-        &gInsertPresetReverbDescriptor
-};
+static const effect_descriptor_t* const gDescriptors[] = {
+        &gAuxEnvReverbDescriptor, &gInsertEnvReverbDescriptor, &gAuxPresetReverbDescriptor,
+        &gInsertPresetReverbDescriptor};
 
-typedef     float               process_buffer_t; // process in float
+typedef float process_buffer_t;  // process in float
 
-struct ReverbContext{
-    const struct effect_interface_s *itfe;
-    effect_config_t                 config;
-    LVREV_Handle_t                  hInstance;
-    int16_t                         SavedRoomLevel;
-    int16_t                         SavedHfLevel;
-    int16_t                         SavedDecayTime;
-    int16_t                         SavedDecayHfRatio;
-    int16_t                         SavedReverbLevel;
-    int16_t                         SavedDiffusion;
-    int16_t                         SavedDensity;
-    bool                            bEnabled;
-    LVM_Fs_en                       SampleRate;
-    process_buffer_t                *InFrames;
-    process_buffer_t                *OutFrames;
-    size_t                          bufferSizeIn;
-    size_t                          bufferSizeOut;
-    bool                            auxiliary;
-    bool                            preset;
-    uint16_t                        curPreset;
-    uint16_t                        nextPreset;
-    int                             SamplesToExitCount;
-    LVM_INT16                       leftVolume;
-    LVM_INT16                       rightVolume;
-    LVM_INT16                       prevLeftVolume;
-    LVM_INT16                       prevRightVolume;
-    int                             volumeMode;
+struct ReverbContext {
+    const struct effect_interface_s* itfe;
+    effect_config_t config;
+    LVREV_Handle_t hInstance;
+    int16_t SavedRoomLevel;
+    int16_t SavedHfLevel;
+    int16_t SavedDecayTime;
+    int16_t SavedDecayHfRatio;
+    int16_t SavedReverbLevel;
+    int16_t SavedDiffusion;
+    int16_t SavedDensity;
+    bool bEnabled;
+    LVM_Fs_en SampleRate;
+    process_buffer_t* InFrames;
+    process_buffer_t* OutFrames;
+    size_t bufferSizeIn;
+    size_t bufferSizeOut;
+    bool auxiliary;
+    bool preset;
+    uint16_t curPreset;
+    uint16_t nextPreset;
+    int SamplesToExitCount;
+    LVM_INT16 leftVolume;
+    LVM_INT16 rightVolume;
+    LVM_INT16 prevLeftVolume;
+    LVM_INT16 prevRightVolume;
+    int volumeMode;
 };
 
 enum {
@@ -174,44 +176,38 @@
 
 #define REVERB_DEFAULT_PRESET REVERB_PRESET_NONE
 
-#define REVERB_SEND_LEVEL   0.75f // 0.75 in 4.12 format
-#define REVERB_UNIT_VOLUME  (0x1000) // 1.0 in 4.12 format
+#define REVERB_SEND_LEVEL 0.75f      // 0.75 in 4.12 format
+#define REVERB_UNIT_VOLUME (0x1000)  // 1.0 in 4.12 format
 
 //--- local function prototypes
-int  Reverb_init            (ReverbContext *pContext);
-void Reverb_free            (ReverbContext *pContext);
-int  Reverb_setConfig       (ReverbContext *pContext, effect_config_t *pConfig);
-void Reverb_getConfig       (ReverbContext *pContext, effect_config_t *pConfig);
-int  Reverb_setParameter    (ReverbContext *pContext, void *pParam, void *pValue, int vsize);
-int  Reverb_getParameter    (ReverbContext *pContext,
-                             void          *pParam,
-                             uint32_t      *pValueSize,
-                             void          *pValue);
-int Reverb_LoadPreset       (ReverbContext   *pContext);
-int Reverb_paramValueSize   (int32_t param);
+int Reverb_init(ReverbContext* pContext);
+void Reverb_free(ReverbContext* pContext);
+int Reverb_setConfig(ReverbContext* pContext, effect_config_t* pConfig);
+void Reverb_getConfig(ReverbContext* pContext, effect_config_t* pConfig);
+int Reverb_setParameter(ReverbContext* pContext, void* pParam, void* pValue, int vsize);
+int Reverb_getParameter(ReverbContext* pContext, void* pParam, uint32_t* pValueSize, void* pValue);
+int Reverb_LoadPreset(ReverbContext* pContext);
+int Reverb_paramValueSize(int32_t param);
 
 /* Effect Library Interface Implementation */
 
-extern "C" int EffectCreate(const effect_uuid_t *uuid,
-                            int32_t             sessionId __unused,
-                            int32_t             ioId __unused,
-                            effect_handle_t  *pHandle){
+extern "C" int EffectCreate(const effect_uuid_t* uuid, int32_t /* sessionId __unused */,
+                            int32_t /* ioId __unused */, effect_handle_t* pHandle) {
     int ret;
     int i;
-    int length = sizeof(gDescriptors) / sizeof(const effect_descriptor_t *);
-    const effect_descriptor_t *desc;
+    int length = sizeof(gDescriptors) / sizeof(const effect_descriptor_t*);
+    const effect_descriptor_t* desc;
 
     ALOGV("\t\nEffectCreate start");
 
-    if (pHandle == NULL || uuid == NULL){
+    if (pHandle == NULL || uuid == NULL) {
         ALOGV("\tLVM_ERROR : EffectCreate() called with NULL pointer");
         return -EINVAL;
     }
 
     for (i = 0; i < length; i++) {
         desc = gDescriptors[i];
-        if (memcmp(uuid, &desc->uuid, sizeof(effect_uuid_t))
-                == 0) {
+        if (memcmp(uuid, &desc->uuid, sizeof(effect_uuid_t)) == 0) {
             ALOGV("\tEffectCreate - UUID matched Reverb type %d, UUID = %x", i, desc->uuid.timeLow);
             break;
         }
@@ -221,16 +217,16 @@
         return -ENOENT;
     }
 
-    ReverbContext *pContext = new ReverbContext;
+    ReverbContext* pContext = new ReverbContext;
 
-    pContext->itfe      = &gReverbInterface;
+    pContext->itfe = &gReverbInterface;
     pContext->hInstance = NULL;
 
     pContext->auxiliary = false;
-    if ((desc->flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY){
+    if ((desc->flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
         pContext->auxiliary = true;
         ALOGV("\tEffectCreate - AUX");
-    }else{
+    } else {
         ALOGV("\tEffectCreate - INS");
     }
 
@@ -241,14 +237,14 @@
         pContext->curPreset = REVERB_PRESET_LAST + 1;
         pContext->nextPreset = REVERB_DEFAULT_PRESET;
         ALOGV("\tEffectCreate - PRESET");
-    }else{
+    } else {
         ALOGV("\tEffectCreate - ENVIRONMENTAL");
     }
 
     ALOGV("\tEffectCreate - Calling Reverb_init");
     ret = Reverb_init(pContext);
 
-    if (ret < 0){
+    if (ret < 0) {
         ALOGV("\tLVM_ERROR : EffectCreate() init failed");
         delete pContext;
         return ret;
@@ -256,25 +252,25 @@
 
     *pHandle = (effect_handle_t)pContext;
 
-
     int channels = audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
 
+    channels = (pContext->auxiliary == true) ? channels : FCC_2;
     // Allocate memory for reverb process (*2 is for STEREO)
     pContext->bufferSizeIn = LVREV_MAX_FRAME_SIZE * sizeof(process_buffer_t) * channels;
     pContext->bufferSizeOut = LVREV_MAX_FRAME_SIZE * sizeof(process_buffer_t) * FCC_2;
-    pContext->InFrames  = (process_buffer_t *)calloc(pContext->bufferSizeIn, 1 /* size */);
-    pContext->OutFrames = (process_buffer_t *)calloc(pContext->bufferSizeOut, 1 /* size */);
+    pContext->InFrames = (process_buffer_t*)calloc(pContext->bufferSizeIn, 1 /* size */);
+    pContext->OutFrames = (process_buffer_t*)calloc(pContext->bufferSizeOut, 1 /* size */);
 
     ALOGV("\tEffectCreate %p, size %zu", pContext, sizeof(ReverbContext));
     ALOGV("\tEffectCreate end\n");
     return 0;
 } /* end EffectCreate */
 
-extern "C" int EffectRelease(effect_handle_t handle){
-    ReverbContext * pContext = (ReverbContext *)handle;
+extern "C" int EffectRelease(effect_handle_t handle) {
+    ReverbContext* pContext = (ReverbContext*)handle;
 
     ALOGV("\tEffectRelease %p", handle);
-    if (pContext == NULL){
+    if (pContext == NULL) {
         ALOGV("\tLVM_ERROR : EffectRelease called with NULL pointer");
         return -EINVAL;
     }
@@ -288,12 +284,11 @@
     return 0;
 } /* end EffectRelease */
 
-extern "C" int EffectGetDescriptor(const effect_uuid_t *uuid,
-                                   effect_descriptor_t *pDescriptor) {
+extern "C" int EffectGetDescriptor(const effect_uuid_t* uuid, effect_descriptor_t* pDescriptor) {
     int i;
-    int length = sizeof(gDescriptors) / sizeof(const effect_descriptor_t *);
+    int length = sizeof(gDescriptors) / sizeof(const effect_descriptor_t*);
 
-    if (pDescriptor == NULL || uuid == NULL){
+    if (pDescriptor == NULL || uuid == NULL) {
         ALOGV("EffectGetDescriptor() called with NULL pointer");
         return -EINVAL;
     }
@@ -301,8 +296,8 @@
     for (i = 0; i < length; i++) {
         if (memcmp(uuid, &gDescriptors[i]->uuid, sizeof(effect_uuid_t)) == 0) {
             *pDescriptor = *gDescriptors[i];
-            ALOGV("EffectGetDescriptor - UUID matched Reverb type %d, UUID = %x",
-                 i, gDescriptors[i]->uuid.timeLow);
+            ALOGV("EffectGetDescriptor - UUID matched Reverb type %d, UUID = %x", i,
+                  gDescriptors[i]->uuid.timeLow);
             return 0;
         }
     }
@@ -311,12 +306,13 @@
 } /* end EffectGetDescriptor */
 
 /* local functions */
-#define CHECK_ARG(cond) {                     \
-    if (!(cond)) {                            \
-        ALOGV("\tLVM_ERROR : Invalid argument: "#cond);      \
-        return -EINVAL;                       \
-    }                                         \
-}
+#define CHECK_ARG(cond)                                      \
+    {                                                        \
+        if (!(cond)) {                                       \
+            ALOGV("\tLVM_ERROR : Invalid argument: " #cond); \
+            return -EINVAL;                                  \
+        }                                                    \
+    }
 
 //----------------------------------------------------------------------------
 // process()
@@ -335,91 +331,101 @@
 //  pOut:       pointer to updated stereo 16 bit output data
 //
 //----------------------------------------------------------------------------
-int process( effect_buffer_t   *pIn,
-             effect_buffer_t   *pOut,
-             int           frameCount,
-             ReverbContext *pContext){
-
+int process(effect_buffer_t* pIn, effect_buffer_t* pOut, int frameCount, ReverbContext* pContext) {
     int channels = audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
-    LVREV_ReturnStatus_en   LvmStatus = LVREV_SUCCESS;              /* Function call status */
+    int outChannels = audio_channel_count_from_out_mask(pContext->config.outputCfg.channels);
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
-    // Check that the input is either mono or stereo
-    if (!(channels == 1 || channels == FCC_2) ) {
-        ALOGE("\tLVREV_ERROR : process invalid PCM format");
+    // Reverb only effects the stereo channels in multichannel source.
+    if (channels < 1 || channels > LVM_MAX_CHANNELS) {
+        ALOGE("\tLVREV_ERROR : process invalid PCM channels %d", channels);
         return -EINVAL;
     }
 
     size_t inSize = frameCount * sizeof(process_buffer_t) * channels;
     size_t outSize = frameCount * sizeof(process_buffer_t) * FCC_2;
-    if (pContext->InFrames == NULL ||
-            pContext->bufferSizeIn < inSize) {
+    if (pContext->InFrames == NULL || pContext->bufferSizeIn < inSize) {
         free(pContext->InFrames);
         pContext->bufferSizeIn = inSize;
-        pContext->InFrames = (process_buffer_t *)calloc(1, pContext->bufferSizeIn);
+        pContext->InFrames = (process_buffer_t*)calloc(1, pContext->bufferSizeIn);
     }
-    if (pContext->OutFrames == NULL ||
-            pContext->bufferSizeOut < outSize) {
+    if (pContext->OutFrames == NULL || pContext->bufferSizeOut < outSize) {
         free(pContext->OutFrames);
         pContext->bufferSizeOut = outSize;
-        pContext->OutFrames = (process_buffer_t *)calloc(1, pContext->bufferSizeOut);
+        pContext->OutFrames = (process_buffer_t*)calloc(1, pContext->bufferSizeOut);
     }
 
-
     // Check for NULL pointers
     if ((pContext->InFrames == NULL) || (pContext->OutFrames == NULL)) {
         ALOGE("\tLVREV_ERROR : process failed to allocate memory for temporary buffers ");
         return -EINVAL;
     }
 
-
     if (pContext->preset && pContext->nextPreset != pContext->curPreset) {
         Reverb_LoadPreset(pContext);
     }
 
     if (pContext->auxiliary) {
         static_assert(std::is_same<decltype(*pIn), decltype(*pContext->InFrames)>::value,
-                "pIn and InFrames must be same type");
+                      "pIn and InFrames must be same type");
         memcpy(pContext->InFrames, pIn, frameCount * channels * sizeof(*pIn));
+    } else {
+        // mono input is duplicated
+        if (channels >= FCC_2) {
+            for (int i = 0; i < frameCount; i++) {
+                pContext->InFrames[FCC_2 * i] =
+                        (process_buffer_t)pIn[channels * i] * REVERB_SEND_LEVEL;
+                pContext->InFrames[FCC_2 * i + 1] =
+                        (process_buffer_t)pIn[channels * i + 1] * REVERB_SEND_LEVEL;
+            }
         } else {
-        // insert reverb input is always stereo
-        for (int i = 0; i < frameCount; i++) {
-            pContext->InFrames[2 * i] = (process_buffer_t)pIn[2 * i] * REVERB_SEND_LEVEL;
-            pContext->InFrames[2 * i + 1] = (process_buffer_t)pIn[2 * i + 1] * REVERB_SEND_LEVEL;
+            for (int i = 0; i < frameCount; i++) {
+                pContext->InFrames[FCC_2 * i] = pContext->InFrames[FCC_2 * i + 1] =
+                        (process_buffer_t)pIn[i] * REVERB_SEND_LEVEL;
+            }
         }
     }
 
     if (pContext->preset && pContext->curPreset == REVERB_PRESET_NONE) {
         memset(pContext->OutFrames, 0,
-                frameCount * sizeof(*pContext->OutFrames) * FCC_2); //always stereo here
+               frameCount * sizeof(*pContext->OutFrames) * FCC_2);  // always stereo here
     } else {
-        if(pContext->bEnabled == LVM_FALSE && pContext->SamplesToExitCount > 0) {
-            memset(pContext->InFrames, 0,
-                    frameCount * sizeof(*pContext->OutFrames) * channels);
+        if (pContext->bEnabled == LVM_FALSE && pContext->SamplesToExitCount > 0) {
+            memset(pContext->InFrames, 0, frameCount * sizeof(*pContext->OutFrames) * channels);
             ALOGV("\tZeroing %d samples per frame at the end of call", channels);
         }
 
         /* Process the samples, producing a stereo output */
-        LvmStatus = LVREV_Process(pContext->hInstance,      /* Instance handle */
-                                  pContext->InFrames,     /* Input buffer */
-                                  pContext->OutFrames,    /* Output buffer */
-                                  frameCount);              /* Number of samples to read */
+        LvmStatus = LVREV_Process(pContext->hInstance, /* Instance handle */
+                                  pContext->InFrames,  /* Input buffer */
+                                  pContext->OutFrames, /* Output buffer */
+                                  frameCount);         /* Number of samples to read */
     }
 
     LVM_ERROR_CHECK(LvmStatus, "LVREV_Process", "process")
-    if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
 
     // Convert to 16 bits
     if (pContext->auxiliary) {
         // nothing to do here
     } else {
-        for (int i = 0; i < frameCount * FCC_2; i++) { // always stereo here
-            // Mix with dry input
-            pContext->OutFrames[i] += pIn[i];
+        if (channels >= FCC_2) {
+            for (int i = 0; i < frameCount; i++) {
+                // Mix with dry input
+                pContext->OutFrames[FCC_2 * i] += pIn[channels * i];
+                pContext->OutFrames[FCC_2 * i + 1] += pIn[channels * i + 1];
+            }
+        } else {
+            for (int i = 0; i < frameCount; i++) {
+                // Mix with dry input
+                pContext->OutFrames[FCC_2 * i] += pIn[i];
+                pContext->OutFrames[FCC_2 * i + 1] += pIn[i];
+            }
         }
         // apply volume with ramp if needed
         if ((pContext->leftVolume != pContext->prevLeftVolume ||
-                pContext->rightVolume != pContext->prevRightVolume) &&
-                pContext->volumeMode == REVERB_VOLUME_RAMP) {
+             pContext->rightVolume != pContext->prevRightVolume) &&
+            pContext->volumeMode == REVERB_VOLUME_RAMP) {
             // FIXME: still using int16 volumes.
             // For reference: REVERB_UNIT_VOLUME  (0x1000) // 1.0 in 4.12 format
             float vl = (float)pContext->prevLeftVolume / 4096;
@@ -450,20 +456,51 @@
         }
     }
 
-
-    // Accumulate if required
-    if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE){
-        //ALOGV("\tBuffer access is ACCUMULATE");
-        for (int i = 0; i < frameCount * FCC_2; i++) { // always stereo here
-            pOut[i] += pContext->OutFrames[i];
+    if (outChannels > 2) {
+        // Accumulate if required
+        if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+            for (int i = 0; i < frameCount; i++) {
+                pOut[outChannels * i] += pContext->OutFrames[FCC_2 * i];
+                pOut[outChannels * i + 1] += pContext->OutFrames[FCC_2 * i + 1];
+            }
+        } else {
+            for (int i = 0; i < frameCount; i++) {
+                pOut[outChannels * i] = pContext->OutFrames[FCC_2 * i];
+                pOut[outChannels * i + 1] = pContext->OutFrames[FCC_2 * i + 1];
+            }
         }
-    }else{
-        //ALOGV("\tBuffer access is WRITE");
-        memcpy(pOut, pContext->OutFrames, frameCount * sizeof(*pOut) * FCC_2);
+        if (!pContext->auxiliary) {
+            for (int i = 0; i < frameCount; i++) {
+                // channels and outChannels are expected to be same.
+                for (int j = FCC_2; j < outChannels; j++) {
+                    pOut[outChannels * i + j] = pIn[outChannels * i + j];
+                }
+            }
+        }
+    } else {
+        if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+            if (outChannels == FCC_1) {
+                for (int i = 0; i < frameCount; i++) {
+                    pOut[i] +=
+                            ((pContext->OutFrames[i * FCC_2] + pContext->OutFrames[i * FCC_2 + 1]) *
+                             0.5f);
+                }
+            } else {
+                for (int i = 0; i < frameCount * FCC_2; i++) {
+                    pOut[i] += pContext->OutFrames[i];
+                }
+            }
+        } else {
+            if (outChannels == FCC_1) {
+                From2iToMono_Float((const process_buffer_t*)pContext->OutFrames, pOut, frameCount);
+            } else {
+                memcpy(pOut, pContext->OutFrames, frameCount * sizeof(*pOut) * FCC_2);
+            }
+        }
     }
 
     return 0;
-}    /* end process */
+} /* end process */
 
 //----------------------------------------------------------------------------
 // Reverb_free()
@@ -477,30 +514,12 @@
 //
 //----------------------------------------------------------------------------
 
-void Reverb_free(ReverbContext *pContext){
+void Reverb_free(ReverbContext* pContext) {
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;         /* Function call status */
-    LVREV_MemoryTable_st      MemTab;
-
-    /* Free the algorithm memory */
-    LvmStatus = LVREV_GetMemoryTable(pContext->hInstance,
-                                   &MemTab,
-                                   LVM_NULL);
-
-    LVM_ERROR_CHECK(LvmStatus, "LVM_GetMemoryTable", "Reverb_free")
-
-    for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
-        if (MemTab.Region[i].Size != 0){
-            if (MemTab.Region[i].pBaseAddress != NULL){
-                free(MemTab.Region[i].pBaseAddress);
-            }else{
-                ALOGV("\tLVM_ERROR : free() - trying to free with NULL pointer %" PRIu32 " bytes "
-                        "for region %u at %p ERROR\n",
-                        MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-            }
-        }
-    }
-}    /* end Reverb_free */
+    LvmStatus = LVREV_FreeInstance(pContext->hInstance);
+    LVM_ERROR_CHECK(LvmStatus, "LVREV_FreeInstance", "Reverb_free")
+} /* end Reverb_free */
 
 //----------------------------------------------------------------------------
 // Reverb_setConfig()
@@ -516,89 +535,59 @@
 //
 //----------------------------------------------------------------------------
 
-int Reverb_setConfig(ReverbContext *pContext, effect_config_t *pConfig){
-    LVM_Fs_en   SampleRate;
-    //ALOGV("\tReverb_setConfig start");
+int Reverb_setConfig(ReverbContext* pContext, effect_config_t* pConfig) {
+    LVM_Fs_en SampleRate;
+    // ALOGV("\tReverb_setConfig start");
 
     CHECK_ARG(pContext != NULL);
     CHECK_ARG(pConfig != NULL);
 
     CHECK_ARG(pConfig->inputCfg.samplingRate == pConfig->outputCfg.samplingRate);
     CHECK_ARG(pConfig->inputCfg.format == pConfig->outputCfg.format);
+    int inputChannels = audio_channel_count_from_out_mask(pConfig->inputCfg.channels);
     CHECK_ARG((pContext->auxiliary && pConfig->inputCfg.channels == AUDIO_CHANNEL_OUT_MONO) ||
-              ((!pContext->auxiliary) && pConfig->inputCfg.channels == AUDIO_CHANNEL_OUT_STEREO));
-    CHECK_ARG(pConfig->outputCfg.channels == AUDIO_CHANNEL_OUT_STEREO);
-    CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE
-              || pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
+              ((!pContext->auxiliary) && (inputChannels <= LVM_MAX_CHANNELS)));
+    int outputChannels = audio_channel_count_from_out_mask(pConfig->outputCfg.channels);
+    CHECK_ARG(outputChannels <= LVM_MAX_CHANNELS);
+    CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE ||
+              pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
     CHECK_ARG(pConfig->inputCfg.format == EFFECT_BUFFER_FORMAT);
-    //ALOGV("\tReverb_setConfig calling memcpy");
+    // ALOGV("\tReverb_setConfig calling memcpy");
     pContext->config = *pConfig;
 
-    switch (pConfig->inputCfg.samplingRate) {
-    case 8000:
-        SampleRate = LVM_FS_8000;
-        break;
-    case 16000:
-        SampleRate = LVM_FS_16000;
-        break;
-    case 22050:
-        SampleRate = LVM_FS_22050;
-        break;
-    case 32000:
-        SampleRate = LVM_FS_32000;
-        break;
-    case 44100:
-        SampleRate = LVM_FS_44100;
-        break;
-    case 48000:
-        SampleRate = LVM_FS_48000;
-        break;
-    case 88200:
-        SampleRate = LVM_FS_88200;
-        break;
-    case 96000:
-        SampleRate = LVM_FS_96000;
-        break;
-    case 176400:
-        SampleRate = LVM_FS_176400;
-        break;
-    case 192000:
-        SampleRate = LVM_FS_192000;
-        break;
-    default:
-        ALOGV("\rReverb_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
+    SampleRate = lvmFsForSampleRate(pConfig->inputCfg.samplingRate);
+    if (SampleRate == LVM_FS_INVALID) {
+        ALOGE("Reverb_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
         return -EINVAL;
     }
 
     if (pContext->SampleRate != SampleRate) {
+        LVREV_ControlParams_st ActiveParams;
+        LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS;
 
-        LVREV_ControlParams_st    ActiveParams;
-        LVREV_ReturnStatus_en     LvmStatus = LVREV_SUCCESS;
-
-        //ALOGV("\tReverb_setConfig change sampling rate to %d", SampleRate);
+        // ALOGV("\tReverb_setConfig change sampling rate to %d", SampleRate);
 
         /* Get the current settings */
-        LvmStatus = LVREV_GetControlParameters(pContext->hInstance,
-                                         &ActiveParams);
+        LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
 
         LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "Reverb_setConfig")
-        if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
+        if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
 
         ActiveParams.SampleRate = SampleRate;
 
         LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
 
         LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "Reverb_setConfig")
-        if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
-        //ALOGV("\tReverb_setConfig Succesfully called LVREV_SetControlParameters\n");
+        if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
+        // ALOGV("\tReverb_setConfig Successfully called LVREV_SetControlParameters\n");
         pContext->SampleRate = SampleRate;
-    }else{
-        //ALOGV("\tReverb_setConfig keep sampling rate at %d", SampleRate);
+    } else {
+        // ALOGV("\tReverb_setConfig keep sampling rate at %d", SampleRate);
     }
 
-    //ALOGV("\tReverb_setConfig End");
+    // ALOGV("\tReverb_setConfig End");
     return 0;
-}   /* end Reverb_setConfig */
+} /* end Reverb_setConfig */
 
 //----------------------------------------------------------------------------
 // Reverb_getConfig()
@@ -614,10 +603,9 @@
 //
 //----------------------------------------------------------------------------
 
-void Reverb_getConfig(ReverbContext *pContext, effect_config_t *pConfig)
-{
+void Reverb_getConfig(ReverbContext* pContext, effect_config_t* pConfig) {
     *pConfig = pContext->config;
-}   /* end Reverb_getConfig */
+} /* end Reverb_getConfig */
 
 //----------------------------------------------------------------------------
 // Reverb_init()
@@ -631,35 +619,35 @@
 //
 //----------------------------------------------------------------------------
 
-int Reverb_init(ReverbContext *pContext){
+int Reverb_init(ReverbContext* pContext) {
     ALOGV("\tReverb_init start");
 
     CHECK_ARG(pContext != NULL);
 
-    if (pContext->hInstance != NULL){
+    if (pContext->hInstance != NULL) {
         Reverb_free(pContext);
     }
 
-    pContext->config.inputCfg.accessMode                    = EFFECT_BUFFER_ACCESS_READ;
+    pContext->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
     if (pContext->auxiliary) {
-        pContext->config.inputCfg.channels                  = AUDIO_CHANNEL_OUT_MONO;
+        pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_MONO;
     } else {
-        pContext->config.inputCfg.channels                  = AUDIO_CHANNEL_OUT_STEREO;
+        pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
     }
-    pContext->config.inputCfg.format                        = EFFECT_BUFFER_FORMAT;
-    pContext->config.inputCfg.samplingRate                  = 44100;
-    pContext->config.inputCfg.bufferProvider.getBuffer      = NULL;
-    pContext->config.inputCfg.bufferProvider.releaseBuffer  = NULL;
-    pContext->config.inputCfg.bufferProvider.cookie         = NULL;
-    pContext->config.inputCfg.mask                          = EFFECT_CONFIG_ALL;
-    pContext->config.outputCfg.accessMode                   = EFFECT_BUFFER_ACCESS_ACCUMULATE;
-    pContext->config.outputCfg.channels                     = AUDIO_CHANNEL_OUT_STEREO;
-    pContext->config.outputCfg.format                       = EFFECT_BUFFER_FORMAT;
-    pContext->config.outputCfg.samplingRate                 = 44100;
-    pContext->config.outputCfg.bufferProvider.getBuffer     = NULL;
+    pContext->config.inputCfg.format = EFFECT_BUFFER_FORMAT;
+    pContext->config.inputCfg.samplingRate = 44100;
+    pContext->config.inputCfg.bufferProvider.getBuffer = NULL;
+    pContext->config.inputCfg.bufferProvider.releaseBuffer = NULL;
+    pContext->config.inputCfg.bufferProvider.cookie = NULL;
+    pContext->config.inputCfg.mask = EFFECT_CONFIG_ALL;
+    pContext->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
+    pContext->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    pContext->config.outputCfg.format = EFFECT_BUFFER_FORMAT;
+    pContext->config.outputCfg.samplingRate = 44100;
+    pContext->config.outputCfg.bufferProvider.getBuffer = NULL;
     pContext->config.outputCfg.bufferProvider.releaseBuffer = NULL;
-    pContext->config.outputCfg.bufferProvider.cookie        = NULL;
-    pContext->config.outputCfg.mask                         = EFFECT_CONFIG_ALL;
+    pContext->config.outputCfg.bufferProvider.cookie = NULL;
+    pContext->config.outputCfg.mask = EFFECT_CONFIG_ALL;
 
     pContext->leftVolume = REVERB_UNIT_VOLUME;
     pContext->rightVolume = REVERB_UNIT_VOLUME;
@@ -667,123 +655,75 @@
     pContext->prevRightVolume = REVERB_UNIT_VOLUME;
     pContext->volumeMode = REVERB_VOLUME_FLAT;
 
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;        /* Function call status */
-    LVREV_ControlParams_st    params;                         /* Control Parameters */
-    LVREV_InstanceParams_st   InstParams;                     /* Instance parameters */
-    LVREV_MemoryTable_st      MemTab;                         /* Memory allocation table */
-    bool                      bMallocFailure = LVM_FALSE;
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+    LVREV_ControlParams_st params;                   /* Control Parameters */
+    LVREV_InstanceParams_st InstParams;              /* Instance parameters */
 
     /* Set the capabilities */
-    InstParams.MaxBlockSize  = MAX_CALL_SIZE;
-    InstParams.SourceFormat  = LVM_STEREO;          // Max format, could be mono during process
-    InstParams.NumDelays     = LVREV_DELAYLINES_4;
-
-    /* Allocate memory, forcing alignment */
-    LvmStatus = LVREV_GetMemoryTable(LVM_NULL,
-                                  &MemTab,
-                                  &InstParams);
-
-    LVM_ERROR_CHECK(LvmStatus, "LVREV_GetMemoryTable", "Reverb_init")
-    if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
-
-    ALOGV("\tCreateInstance Succesfully called LVM_GetMemoryTable\n");
-
-    /* Allocate memory */
-    for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
-        if (MemTab.Region[i].Size != 0){
-            MemTab.Region[i].pBaseAddress = malloc(MemTab.Region[i].Size);
-
-            if (MemTab.Region[i].pBaseAddress == LVM_NULL){
-                ALOGV("\tLVREV_ERROR :Reverb_init CreateInstance Failed to allocate %" PRIu32
-                        " bytes for region %u\n", MemTab.Region[i].Size, i );
-                bMallocFailure = LVM_TRUE;
-            }else{
-                ALOGV("\tReverb_init CreateInstance allocate %" PRIu32
-                        " bytes for region %u at %p\n",
-                        MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-            }
-        }
-    }
-
-    /* If one or more of the memory regions failed to allocate, free the regions that were
-     * succesfully allocated and return with an error
-     */
-    if(bMallocFailure == LVM_TRUE){
-        for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
-            if (MemTab.Region[i].pBaseAddress == LVM_NULL){
-                ALOGV("\tLVM_ERROR :Reverb_init CreateInstance Failed to allocate %" PRIu32
-                        " bytes for region %u - Not freeing\n", MemTab.Region[i].Size, i );
-            }else{
-                ALOGV("\tLVM_ERROR :Reverb_init CreateInstance Failed: but allocated %" PRIu32
-                        " bytes for region %u at %p- free\n",
-                        MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
-                free(MemTab.Region[i].pBaseAddress);
-            }
-        }
-        return -EINVAL;
-    }
-    ALOGV("\tReverb_init CreateInstance Succesfully malloc'd memory\n");
+    InstParams.MaxBlockSize = MAX_CALL_SIZE;
+    InstParams.SourceFormat = LVM_STEREO;  // Max format, could be mono during process
+    InstParams.NumDelays = LVREV_DELAYLINES_4;
 
     /* Initialise */
     pContext->hInstance = LVM_NULL;
 
     /* Init sets the instance handle */
-    LvmStatus = LVREV_GetInstanceHandle(&pContext->hInstance,
-                                        &MemTab,
-                                        &InstParams);
+    LvmStatus = LVREV_GetInstanceHandle(&pContext->hInstance, &InstParams);
 
     LVM_ERROR_CHECK(LvmStatus, "LVM_GetInstanceHandle", "Reverb_init")
-    if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
 
-    ALOGV("\tReverb_init CreateInstance Succesfully called LVM_GetInstanceHandle\n");
+    ALOGV("\tReverb_init CreateInstance Successfully called LVM_GetInstanceHandle\n");
 
     /* Set the initial process parameters */
     /* General parameters */
-    params.OperatingMode  = LVM_MODE_ON;
-    params.SampleRate     = LVM_FS_44100;
-    pContext->SampleRate  = LVM_FS_44100;
+    params.OperatingMode = LVM_MODE_ON;
+    params.SampleRate = LVM_FS_44100;
+    pContext->SampleRate = LVM_FS_44100;
 
-    if(pContext->config.inputCfg.channels == AUDIO_CHANNEL_OUT_MONO){
-        params.SourceFormat   = LVM_MONO;
+    if (pContext->config.inputCfg.channels == AUDIO_CHANNEL_OUT_MONO) {
+        params.SourceFormat = LVM_MONO;
     } else {
-        params.SourceFormat   = LVM_STEREO;
+        params.SourceFormat = LVM_STEREO;
     }
 
+    if ((pContext->auxiliary == false) && (params.SourceFormat == LVM_MONO)) {
+        params.SourceFormat = LVM_STEREO;
+    }
     /* Reverb parameters */
-    params.Level          = 0;
-    params.LPF            = 23999;
-    params.HPF            = 50;
-    params.T60            = 1490;
-    params.Density        = 100;
-    params.Damping        = 21;
-    params.RoomSize       = 100;
+    params.Level = 0;
+    params.LPF = 23999;
+    params.HPF = 50;
+    params.T60 = 1490;
+    params.Density = 100;
+    params.Damping = 21;
+    params.RoomSize = 100;
 
-    pContext->SamplesToExitCount = (params.T60 * pContext->config.inputCfg.samplingRate)/1000;
+    pContext->SamplesToExitCount = (params.T60 * pContext->config.inputCfg.samplingRate) / 1000;
 
     /* Saved strength is used to return the exact strength that was used in the set to the get
      * because we map the original strength range of 0:1000 to 1:15, and this will avoid
      * quantisation like effect when returning
      */
-    pContext->SavedRoomLevel    = -6000;
-    pContext->SavedHfLevel      = 0;
-    pContext->bEnabled          = LVM_FALSE;
-    pContext->SavedDecayTime    = params.T60;
-    pContext->SavedDecayHfRatio = params.Damping*20;
-    pContext->SavedDensity      = params.RoomSize*10;
-    pContext->SavedDiffusion    = params.Density*10;
-    pContext->SavedReverbLevel  = -6000;
+    pContext->SavedRoomLevel = -6000;
+    pContext->SavedHfLevel = 0;
+    pContext->bEnabled = LVM_FALSE;
+    pContext->SavedDecayTime = params.T60;
+    pContext->SavedDecayHfRatio = params.Damping * 20;
+    pContext->SavedDensity = params.RoomSize * 10;
+    pContext->SavedDiffusion = params.Density * 10;
+    pContext->SavedReverbLevel = -6000;
 
     /* Activate the initial settings */
-    LvmStatus = LVREV_SetControlParameters(pContext->hInstance,
-                                         &params);
+    LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &params);
 
     LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "Reverb_init")
-    if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
+    if (LvmStatus != LVREV_SUCCESS) return -EINVAL;
 
-    ALOGV("\tReverb_init CreateInstance Succesfully called LVREV_SetControlParameters\n");
+    ALOGV("\tReverb_init CreateInstance Successfully called LVREV_SetControlParameters\n");
     ALOGV("\tReverb_init End");
     return 0;
-}   /* end Reverb_init */
+} /* end Reverb_init */
 
 //----------------------------------------------------------------------------
 // ReverbConvertLevel()
@@ -796,27 +736,21 @@
 //
 //----------------------------------------------------------------------------
 
-int16_t ReverbConvertLevel(int16_t level){
-    static int16_t LevelArray[101] =
-    {
-       -12000, -4000,  -3398,  -3046,  -2796,  -2603,  -2444,  -2310,  -2194,  -2092,
-       -2000,  -1918,  -1842,  -1773,  -1708,  -1648,  -1592,  -1540,  -1490,  -1443,
-       -1398,  -1356,  -1316,  -1277,  -1240,  -1205,  -1171,  -1138,  -1106,  -1076,
-       -1046,  -1018,  -990,   -963,   -938,   -912,   -888,   -864,   -841,   -818,
-       -796,   -775,   -754,   -734,   -714,   -694,   -675,   -656,   -638,   -620,
-       -603,   -585,   -568,   -552,   -536,   -520,   -504,   -489,   -474,   -459,
-       -444,   -430,   -416,   -402,   -388,   -375,   -361,   -348,   -335,   -323,
-       -310,   -298,   -286,   -274,   -262,   -250,   -239,   -228,   -216,   -205,
-       -194,   -184,   -173,   -162,   -152,   -142,   -132,   -121,   -112,   -102,
-       -92,    -82,    -73,    -64,    -54,    -45,    -36,    -27,    -18,    -9,
-       0
-    };
+int16_t ReverbConvertLevel(int16_t level) {
+    static int16_t LevelArray[101] = {
+            -12000, -4000, -3398, -3046, -2796, -2603, -2444, -2310, -2194, -2092, -2000, -1918,
+            -1842,  -1773, -1708, -1648, -1592, -1540, -1490, -1443, -1398, -1356, -1316, -1277,
+            -1240,  -1205, -1171, -1138, -1106, -1076, -1046, -1018, -990,  -963,  -938,  -912,
+            -888,   -864,  -841,  -818,  -796,  -775,  -754,  -734,  -714,  -694,  -675,  -656,
+            -638,   -620,  -603,  -585,  -568,  -552,  -536,  -520,  -504,  -489,  -474,  -459,
+            -444,   -430,  -416,  -402,  -388,  -375,  -361,  -348,  -335,  -323,  -310,  -298,
+            -286,   -274,  -262,  -250,  -239,  -228,  -216,  -205,  -194,  -184,  -173,  -162,
+            -152,   -142,  -132,  -121,  -112,  -102,  -92,   -82,   -73,   -64,   -54,   -45,
+            -36,    -27,   -18,   -9,    0};
     int16_t i;
 
-    for(i = 0; i < 101; i++)
-    {
-       if(level <= LevelArray[i])
-           break;
+    for (i = 0; i < 101; i++) {
+        if (level <= LevelArray[i]) break;
     }
     return i;
 }
@@ -832,37 +766,31 @@
 //
 //----------------------------------------------------------------------------
 
-int16_t ReverbConvertHfLevel(int16_t Hflevel){
+int16_t ReverbConvertHfLevel(int16_t Hflevel) {
     int16_t i;
 
-    static LPFPair_t LPFArray[97] =
-    {   // Limit range to 50 for LVREV parameter range
-        {-10000, 50}, { -5000, 50 }, { -4000, 50},  { -3000, 158}, { -2000, 502},
-        {-1000, 1666},{ -900, 1897}, { -800, 2169}, { -700, 2496}, { -600, 2895},
-        {-500, 3400}, { -400, 4066}, { -300, 5011}, { -200, 6537}, { -100,  9826},
-        {-99, 9881 }, { -98, 9937 }, { -97, 9994 }, { -96, 10052}, { -95, 10111},
-        {-94, 10171}, { -93, 10231}, { -92, 10293}, { -91, 10356}, { -90, 10419},
-        {-89, 10484}, { -88, 10549}, { -87, 10616}, { -86, 10684}, { -85, 10753},
-        {-84, 10823}, { -83, 10895}, { -82, 10968}, { -81, 11042}, { -80, 11117},
-        {-79, 11194}, { -78, 11272}, { -77, 11352}, { -76, 11433}, { -75, 11516},
-        {-74, 11600}, { -73, 11686}, { -72, 11774}, { -71, 11864}, { -70, 11955},
-        {-69, 12049}, { -68, 12144}, { -67, 12242}, { -66, 12341}, { -65, 12443},
-        {-64, 12548}, { -63, 12654}, { -62, 12763}, { -61, 12875}, { -60, 12990},
-        {-59, 13107}, { -58, 13227}, { -57, 13351}, { -56, 13477}, { -55, 13607},
-        {-54, 13741}, { -53, 13878}, { -52, 14019}, { -51, 14164}, { -50, 14313},
-        {-49, 14467}, { -48, 14626}, { -47, 14789}, { -46, 14958}, { -45, 15132},
-        {-44, 15312}, { -43, 15498}, { -42, 15691}, { -41, 15890}, { -40, 16097},
-        {-39, 16311}, { -38, 16534}, { -37, 16766}, { -36, 17007}, { -35, 17259},
-        {-34, 17521}, { -33, 17795}, { -32, 18081}, { -31, 18381}, { -30, 18696},
-        {-29, 19027}, { -28, 19375}, { -27, 19742}, { -26, 20129}, { -25, 20540},
-        {-24, 20976}, { -23, 21439}, { -22, 21934}, { -21, 22463}, { -20, 23031},
-        {-19, 23643}, { -18, 23999}
-    };
+    static LPFPair_t LPFArray[97] = {
+            // Limit range to 50 for LVREV parameter range
+            {-10000, 50}, {-5000, 50},  {-4000, 50},  {-3000, 158}, {-2000, 502}, {-1000, 1666},
+            {-900, 1897}, {-800, 2169}, {-700, 2496}, {-600, 2895}, {-500, 3400}, {-400, 4066},
+            {-300, 5011}, {-200, 6537}, {-100, 9826}, {-99, 9881},  {-98, 9937},  {-97, 9994},
+            {-96, 10052}, {-95, 10111}, {-94, 10171}, {-93, 10231}, {-92, 10293}, {-91, 10356},
+            {-90, 10419}, {-89, 10484}, {-88, 10549}, {-87, 10616}, {-86, 10684}, {-85, 10753},
+            {-84, 10823}, {-83, 10895}, {-82, 10968}, {-81, 11042}, {-80, 11117}, {-79, 11194},
+            {-78, 11272}, {-77, 11352}, {-76, 11433}, {-75, 11516}, {-74, 11600}, {-73, 11686},
+            {-72, 11774}, {-71, 11864}, {-70, 11955}, {-69, 12049}, {-68, 12144}, {-67, 12242},
+            {-66, 12341}, {-65, 12443}, {-64, 12548}, {-63, 12654}, {-62, 12763}, {-61, 12875},
+            {-60, 12990}, {-59, 13107}, {-58, 13227}, {-57, 13351}, {-56, 13477}, {-55, 13607},
+            {-54, 13741}, {-53, 13878}, {-52, 14019}, {-51, 14164}, {-50, 14313}, {-49, 14467},
+            {-48, 14626}, {-47, 14789}, {-46, 14958}, {-45, 15132}, {-44, 15312}, {-43, 15498},
+            {-42, 15691}, {-41, 15890}, {-40, 16097}, {-39, 16311}, {-38, 16534}, {-37, 16766},
+            {-36, 17007}, {-35, 17259}, {-34, 17521}, {-33, 17795}, {-32, 18081}, {-31, 18381},
+            {-30, 18696}, {-29, 19027}, {-28, 19375}, {-27, 19742}, {-26, 20129}, {-25, 20540},
+            {-24, 20976}, {-23, 21439}, {-22, 21934}, {-21, 22463}, {-20, 23031}, {-19, 23643},
+            {-18, 23999}};
 
-    for(i = 0; i < 96; i++)
-    {
-        if(Hflevel <= LPFArray[i].Room_HF)
-            break;
+    for (i = 0; i < 96; i++) {
+        if (Hflevel <= LPFArray[i].Room_HF) break;
     }
     return LPFArray[i].LPF;
 }
@@ -879,26 +807,26 @@
 //
 //----------------------------------------------------------------------------
 
-void ReverbSetRoomHfLevel(ReverbContext *pContext, int16_t level){
-    //ALOGV("\tReverbSetRoomHfLevel start (%d)", level);
+void ReverbSetRoomHfLevel(ReverbContext* pContext, int16_t level) {
+    // ALOGV("\tReverbSetRoomHfLevel start (%d)", level);
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetRoomHfLevel")
-    //ALOGV("\tReverbSetRoomHfLevel Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbSetRoomHfLevel() just Got -> %d\n", ActiveParams.LPF);
+    // ALOGV("\tReverbSetRoomHfLevel Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbSetRoomHfLevel() just Got -> %d\n", ActiveParams.LPF);
 
     ActiveParams.LPF = ReverbConvertHfLevel(level);
 
     /* Activate the initial settings */
     LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetRoomHfLevel")
-    //ALOGV("\tReverbSetRoomhfLevel() just Set -> %d\n", ActiveParams.LPF);
+    // ALOGV("\tReverbSetRoomhfLevel() just Set -> %d\n", ActiveParams.LPF);
     pContext->SavedHfLevel = level;
-    //ALOGV("\tReverbSetHfRoomLevel end.. saving %d", pContext->SavedHfLevel);
+    // ALOGV("\tReverbSetHfRoomLevel end.. saving %d", pContext->SavedHfLevel);
     return;
 }
 
@@ -913,30 +841,31 @@
 //
 //----------------------------------------------------------------------------
 
-int16_t ReverbGetRoomHfLevel(ReverbContext *pContext){
+int16_t ReverbGetRoomHfLevel(ReverbContext* pContext) {
     int16_t level;
-    //ALOGV("\tReverbGetRoomHfLevel start, saved level is %d", pContext->SavedHfLevel);
+    // ALOGV("\tReverbGetRoomHfLevel start, saved level is %d", pContext->SavedHfLevel);
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetRoomHfLevel")
-    //ALOGV("\tReverbGetRoomHfLevel Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbGetRoomHfLevel() just Got -> %d\n", ActiveParams.LPF);
+    // ALOGV("\tReverbGetRoomHfLevel Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbGetRoomHfLevel() just Got -> %d\n", ActiveParams.LPF);
 
     level = ReverbConvertHfLevel(pContext->SavedHfLevel);
 
-    //ALOGV("\tReverbGetRoomHfLevel() ActiveParams.LPFL %d, pContext->SavedHfLevel: %d, "
+    // ALOGV("\tReverbGetRoomHfLevel() ActiveParams.LPFL %d, pContext->SavedHfLevel: %d, "
     //     "converted level: %d\n", ActiveParams.LPF, pContext->SavedHfLevel, level);
 
-    if((int16_t)ActiveParams.LPF != level){
-        ALOGV("\tLVM_ERROR : (ignore at start up) ReverbGetRoomHfLevel() has wrong level -> %d %d\n",
-               ActiveParams.Level, level);
+    if ((int16_t)ActiveParams.LPF != level) {
+        ALOGV("\tLVM_ERROR : (ignore at start up) ReverbGetRoomHfLevel() has wrong level -> %d "
+              "%d\n",
+              ActiveParams.Level, level);
     }
 
-    //ALOGV("\tReverbGetRoomHfLevel end");
+    // ALOGV("\tReverbGetRoomHfLevel end");
     return pContext->SavedHfLevel;
 }
 
@@ -952,35 +881,35 @@
 //
 //----------------------------------------------------------------------------
 
-void ReverbSetReverbLevel(ReverbContext *pContext, int16_t level){
-    //ALOGV("\n\tReverbSetReverbLevel start (%d)", level);
+void ReverbSetReverbLevel(ReverbContext* pContext, int16_t level) {
+    // ALOGV("\n\tReverbSetReverbLevel start (%d)", level);
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
-    LVM_INT32                 CombinedLevel;             // Sum of room and reverb level controls
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+    LVM_INT32 CombinedLevel;                         // Sum of room and reverb level controls
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetReverbLevel")
-    //ALOGV("\tReverbSetReverbLevel Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbSetReverbLevel just Got -> %d\n", ActiveParams.Level);
+    // ALOGV("\tReverbSetReverbLevel Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbSetReverbLevel just Got -> %d\n", ActiveParams.Level);
 
     // needs to subtract max levels for both RoomLevel and ReverbLevel
-    CombinedLevel = (level + pContext->SavedRoomLevel)-LVREV_MAX_REVERB_LEVEL;
-    //ALOGV("\tReverbSetReverbLevel() CombinedLevel is %d = %d + %d\n",
+    CombinedLevel = (level + pContext->SavedRoomLevel) - LVREV_MAX_REVERB_LEVEL;
+    // ALOGV("\tReverbSetReverbLevel() CombinedLevel is %d = %d + %d\n",
     //      CombinedLevel, level, pContext->SavedRoomLevel);
 
     ActiveParams.Level = ReverbConvertLevel(CombinedLevel);
 
-    //ALOGV("\tReverbSetReverbLevel() Trying to set -> %d\n", ActiveParams.Level);
+    // ALOGV("\tReverbSetReverbLevel() Trying to set -> %d\n", ActiveParams.Level);
 
     /* Activate the initial settings */
     LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetReverbLevel")
-    //ALOGV("\tReverbSetReverbLevel() just Set -> %d\n", ActiveParams.Level);
+    // ALOGV("\tReverbSetReverbLevel() just Set -> %d\n", ActiveParams.Level);
 
     pContext->SavedReverbLevel = level;
-    //ALOGV("\tReverbSetReverbLevel end pContext->SavedReverbLevel is %d\n\n",
+    // ALOGV("\tReverbSetReverbLevel end pContext->SavedReverbLevel is %d\n\n",
     //     pContext->SavedReverbLevel);
     return;
 }
@@ -996,37 +925,40 @@
 //
 //----------------------------------------------------------------------------
 
-int16_t ReverbGetReverbLevel(ReverbContext *pContext){
+int16_t ReverbGetReverbLevel(ReverbContext* pContext) {
     int16_t level;
-    //ALOGV("\tReverbGetReverbLevel start");
+    // ALOGV("\tReverbGetReverbLevel start");
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
-    LVM_INT32                 CombinedLevel;             // Sum of room and reverb level controls
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+    LVM_INT32 CombinedLevel;                         // Sum of room and reverb level controls
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetReverbLevel")
-    //ALOGV("\tReverbGetReverbLevel Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbGetReverbLevel() just Got -> %d\n", ActiveParams.Level);
+    // ALOGV("\tReverbGetReverbLevel Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbGetReverbLevel() just Got -> %d\n", ActiveParams.Level);
 
     // needs to subtract max levels for both RoomLevel and ReverbLevel
-    CombinedLevel = (pContext->SavedReverbLevel + pContext->SavedRoomLevel)-LVREV_MAX_REVERB_LEVEL;
+    CombinedLevel =
+            (pContext->SavedReverbLevel + pContext->SavedRoomLevel) - LVREV_MAX_REVERB_LEVEL;
 
-    //ALOGV("\tReverbGetReverbLevel() CombinedLevel is %d = %d + %d\n",
-    //CombinedLevel, pContext->SavedReverbLevel, pContext->SavedRoomLevel);
+    // ALOGV("\tReverbGetReverbLevel() CombinedLevel is %d = %d + %d\n",
+    // CombinedLevel, pContext->SavedReverbLevel, pContext->SavedRoomLevel);
     level = ReverbConvertLevel(CombinedLevel);
 
-    //ALOGV("\tReverbGetReverbLevel(): ActiveParams.Level: %d, pContext->SavedReverbLevel: %d, "
+    // ALOGV("\tReverbGetReverbLevel(): ActiveParams.Level: %d, pContext->SavedReverbLevel: %d, "
     //"pContext->SavedRoomLevel: %d, CombinedLevel: %d, converted level: %d\n",
-    //ActiveParams.Level, pContext->SavedReverbLevel,pContext->SavedRoomLevel, CombinedLevel,level);
+    // ActiveParams.Level, pContext->SavedReverbLevel,pContext->SavedRoomLevel,
+    // CombinedLevel,level);
 
-    if(ActiveParams.Level != level){
-        ALOGV("\tLVM_ERROR : (ignore at start up) ReverbGetReverbLevel() has wrong level -> %d %d\n",
-                ActiveParams.Level, level);
+    if (ActiveParams.Level != level) {
+        ALOGV("\tLVM_ERROR : (ignore at start up) ReverbGetReverbLevel() has wrong level -> %d "
+              "%d\n",
+              ActiveParams.Level, level);
     }
 
-    //ALOGV("\tReverbGetReverbLevel end\n");
+    // ALOGV("\tReverbGetReverbLevel end\n");
 
     return pContext->SavedReverbLevel;
 }
@@ -1043,30 +975,30 @@
 //
 //----------------------------------------------------------------------------
 
-void ReverbSetRoomLevel(ReverbContext *pContext, int16_t level){
-    //ALOGV("\tReverbSetRoomLevel start (%d)", level);
+void ReverbSetRoomLevel(ReverbContext* pContext, int16_t level) {
+    // ALOGV("\tReverbSetRoomLevel start (%d)", level);
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
-    LVM_INT32                 CombinedLevel;             // Sum of room and reverb level controls
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+    LVM_INT32 CombinedLevel;                         // Sum of room and reverb level controls
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetRoomLevel")
-    //ALOGV("\tReverbSetRoomLevel Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbSetRoomLevel() just Got -> %d\n", ActiveParams.Level);
+    // ALOGV("\tReverbSetRoomLevel Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbSetRoomLevel() just Got -> %d\n", ActiveParams.Level);
 
     // needs to subtract max levels for both RoomLevel and ReverbLevel
-    CombinedLevel = (level + pContext->SavedReverbLevel)-LVREV_MAX_REVERB_LEVEL;
+    CombinedLevel = (level + pContext->SavedReverbLevel) - LVREV_MAX_REVERB_LEVEL;
     ActiveParams.Level = ReverbConvertLevel(CombinedLevel);
 
     /* Activate the initial settings */
     LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetRoomLevel")
-    //ALOGV("\tReverbSetRoomLevel() just Set -> %d\n", ActiveParams.Level);
+    // ALOGV("\tReverbSetRoomLevel() just Set -> %d\n", ActiveParams.Level);
 
     pContext->SavedRoomLevel = level;
-    //ALOGV("\tReverbSetRoomLevel end");
+    // ALOGV("\tReverbSetRoomLevel end");
     return;
 }
 
@@ -1081,35 +1013,36 @@
 //
 //----------------------------------------------------------------------------
 
-int16_t ReverbGetRoomLevel(ReverbContext *pContext){
+int16_t ReverbGetRoomLevel(ReverbContext* pContext) {
     int16_t level;
-    //ALOGV("\tReverbGetRoomLevel start");
+    // ALOGV("\tReverbGetRoomLevel start");
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
-    LVM_INT32                 CombinedLevel;             // Sum of room and reverb level controls
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+    LVM_INT32 CombinedLevel;                         // Sum of room and reverb level controls
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetRoomLevel")
-    //ALOGV("\tReverbGetRoomLevel Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbGetRoomLevel() just Got -> %d\n", ActiveParams.Level);
+    // ALOGV("\tReverbGetRoomLevel Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbGetRoomLevel() just Got -> %d\n", ActiveParams.Level);
 
     // needs to subtract max levels for both RoomLevel and ReverbLevel
-    CombinedLevel = (pContext->SavedRoomLevel + pContext->SavedReverbLevel-LVREV_MAX_REVERB_LEVEL);
+    CombinedLevel =
+            (pContext->SavedRoomLevel + pContext->SavedReverbLevel - LVREV_MAX_REVERB_LEVEL);
     level = ReverbConvertLevel(CombinedLevel);
 
-    //ALOGV("\tReverbGetRoomLevel, Level = %d, pContext->SavedRoomLevel = %d, "
+    // ALOGV("\tReverbGetRoomLevel, Level = %d, pContext->SavedRoomLevel = %d, "
     //     "pContext->SavedReverbLevel = %d, CombinedLevel = %d, level = %d",
     //     ActiveParams.Level, pContext->SavedRoomLevel,
     //     pContext->SavedReverbLevel, CombinedLevel, level);
 
-    if(ActiveParams.Level != level){
+    if (ActiveParams.Level != level) {
         ALOGV("\tLVM_ERROR : (ignore at start up) ReverbGetRoomLevel() has wrong level -> %d %d\n",
               ActiveParams.Level, level);
     }
 
-    //ALOGV("\tReverbGetRoomLevel end");
+    // ALOGV("\tReverbGetRoomLevel end");
     return pContext->SavedRoomLevel;
 }
 
@@ -1125,34 +1058,35 @@
 //
 //----------------------------------------------------------------------------
 
-void ReverbSetDecayTime(ReverbContext *pContext, uint32_t time){
-    //ALOGV("\tReverbSetDecayTime start (%d)", time);
+void ReverbSetDecayTime(ReverbContext* pContext, uint32_t time) {
+    // ALOGV("\tReverbSetDecayTime start (%d)", time);
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetDecayTime")
-    //ALOGV("\tReverbSetDecayTime Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbSetDecayTime() just Got -> %d\n", ActiveParams.T60);
+    // ALOGV("\tReverbSetDecayTime Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbSetDecayTime() just Got -> %d\n", ActiveParams.T60);
 
     if (time <= LVREV_MAX_T60) {
         ActiveParams.T60 = (LVM_UINT16)time;
-    }
-    else {
+    } else {
         ActiveParams.T60 = LVREV_MAX_T60;
     }
 
     /* Activate the initial settings */
     LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetDecayTime")
-    //ALOGV("\tReverbSetDecayTime() just Set -> %d\n", ActiveParams.T60);
+    // ALOGV("\tReverbSetDecayTime() just Set -> %d\n", ActiveParams.T60);
 
-    pContext->SamplesToExitCount = (ActiveParams.T60 * pContext->config.inputCfg.samplingRate)/1000;
-    //ALOGV("\tReverbSetDecayTime() just Set SamplesToExitCount-> %d\n",pContext->SamplesToExitCount);
+    pContext->SamplesToExitCount =
+            (ActiveParams.T60 * pContext->config.inputCfg.samplingRate) / 1000;
+    // ALOGV("\tReverbSetDecayTime() just Set SamplesToExitCount->
+    // %d\n",pContext->SamplesToExitCount);
     pContext->SavedDecayTime = (int16_t)time;
-    //ALOGV("\tReverbSetDecayTime end");
+    // ALOGV("\tReverbSetDecayTime end");
     return;
 }
 
@@ -1167,25 +1101,25 @@
 //
 //----------------------------------------------------------------------------
 
-uint32_t ReverbGetDecayTime(ReverbContext *pContext){
-    //ALOGV("\tReverbGetDecayTime start");
+uint32_t ReverbGetDecayTime(ReverbContext* pContext) {
+    // ALOGV("\tReverbGetDecayTime start");
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetDecayTime")
-    //ALOGV("\tReverbGetDecayTime Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbGetDecayTime() just Got -> %d\n", ActiveParams.T60);
+    // ALOGV("\tReverbGetDecayTime Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbGetDecayTime() just Got -> %d\n", ActiveParams.T60);
 
-    if(ActiveParams.T60 != pContext->SavedDecayTime){
+    if (ActiveParams.T60 != pContext->SavedDecayTime) {
         // This will fail if the decay time is set to more than 7000
-        ALOGV("\tLVM_ERROR : ReverbGetDecayTime() has wrong level -> %d %d\n",
-         ActiveParams.T60, pContext->SavedDecayTime);
+        ALOGV("\tLVM_ERROR : ReverbGetDecayTime() has wrong level -> %d %d\n", ActiveParams.T60,
+              pContext->SavedDecayTime);
     }
 
-    //ALOGV("\tReverbGetDecayTime end");
+    // ALOGV("\tReverbGetDecayTime end");
     return (uint32_t)ActiveParams.T60;
 }
 
@@ -1201,27 +1135,27 @@
 //
 //----------------------------------------------------------------------------
 
-void ReverbSetDecayHfRatio(ReverbContext *pContext, int16_t ratio){
-    //ALOGV("\tReverbSetDecayHfRatioe start (%d)", ratio);
+void ReverbSetDecayHfRatio(ReverbContext* pContext, int16_t ratio) {
+    // ALOGV("\tReverbSetDecayHfRatioe start (%d)", ratio);
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;   /* Function call status */
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetDecayHfRatio")
-    //ALOGV("\tReverbSetDecayHfRatio Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbSetDecayHfRatio() just Got -> %d\n", ActiveParams.Damping);
+    // ALOGV("\tReverbSetDecayHfRatio Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbSetDecayHfRatio() just Got -> %d\n", ActiveParams.Damping);
 
-    ActiveParams.Damping = (LVM_INT16)(ratio/20);
+    ActiveParams.Damping = (LVM_INT16)(ratio / 20);
 
     /* Activate the initial settings */
     LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetDecayHfRatio")
-    //ALOGV("\tReverbSetDecayHfRatio() just Set -> %d\n", ActiveParams.Damping);
+    // ALOGV("\tReverbSetDecayHfRatio() just Set -> %d\n", ActiveParams.Damping);
 
     pContext->SavedDecayHfRatio = ratio;
-    //ALOGV("\tReverbSetDecayHfRatio end");
+    // ALOGV("\tReverbSetDecayHfRatio end");
     return;
 }
 
@@ -1236,24 +1170,24 @@
 //
 //----------------------------------------------------------------------------
 
-int32_t ReverbGetDecayHfRatio(ReverbContext *pContext){
-    //ALOGV("\tReverbGetDecayHfRatio start");
+int32_t ReverbGetDecayHfRatio(ReverbContext* pContext) {
+    // ALOGV("\tReverbGetDecayHfRatio start");
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;   /* Function call status */
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetDecayHfRatio")
-    //ALOGV("\tReverbGetDecayHfRatio Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbGetDecayHfRatio() just Got -> %d\n", ActiveParams.Damping);
+    // ALOGV("\tReverbGetDecayHfRatio Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbGetDecayHfRatio() just Got -> %d\n", ActiveParams.Damping);
 
-    if(ActiveParams.Damping != (LVM_INT16)(pContext->SavedDecayHfRatio / 20)){
+    if (ActiveParams.Damping != (LVM_INT16)(pContext->SavedDecayHfRatio / 20)) {
         ALOGV("\tLVM_ERROR : ReverbGetDecayHfRatio() has wrong level -> %d %d\n",
-         ActiveParams.Damping, pContext->SavedDecayHfRatio);
+              ActiveParams.Damping, pContext->SavedDecayHfRatio);
     }
 
-    //ALOGV("\tReverbGetDecayHfRatio end");
+    // ALOGV("\tReverbGetDecayHfRatio end");
     return pContext->SavedDecayHfRatio;
 }
 
@@ -1269,27 +1203,27 @@
 //
 //----------------------------------------------------------------------------
 
-void ReverbSetDiffusion(ReverbContext *pContext, int16_t level){
-    //ALOGV("\tReverbSetDiffusion start (%d)", level);
+void ReverbSetDiffusion(ReverbContext* pContext, int16_t level) {
+    // ALOGV("\tReverbSetDiffusion start (%d)", level);
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetDiffusion")
-    //ALOGV("\tReverbSetDiffusion Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbSetDiffusion() just Got -> %d\n", ActiveParams.Density);
+    // ALOGV("\tReverbSetDiffusion Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbSetDiffusion() just Got -> %d\n", ActiveParams.Density);
 
-    ActiveParams.Density = (LVM_INT16)(level/10);
+    ActiveParams.Density = (LVM_INT16)(level / 10);
 
     /* Activate the initial settings */
     LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetDiffusion")
-    //ALOGV("\tReverbSetDiffusion() just Set -> %d\n", ActiveParams.Density);
+    // ALOGV("\tReverbSetDiffusion() just Set -> %d\n", ActiveParams.Density);
 
     pContext->SavedDiffusion = level;
-    //ALOGV("\tReverbSetDiffusion end");
+    // ALOGV("\tReverbSetDiffusion end");
     return;
 }
 
@@ -1304,26 +1238,26 @@
 //
 //----------------------------------------------------------------------------
 
-int32_t ReverbGetDiffusion(ReverbContext *pContext){
-    //ALOGV("\tReverbGetDiffusion start");
+int32_t ReverbGetDiffusion(ReverbContext* pContext) {
+    // ALOGV("\tReverbGetDiffusion start");
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
-    LVM_INT16                 Temp;
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+    LVM_INT16 Temp;
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetDiffusion")
-    //ALOGV("\tReverbGetDiffusion Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbGetDiffusion just Got -> %d\n", ActiveParams.Density);
+    // ALOGV("\tReverbGetDiffusion Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbGetDiffusion just Got -> %d\n", ActiveParams.Density);
 
-    Temp = (LVM_INT16)(pContext->SavedDiffusion/10);
+    Temp = (LVM_INT16)(pContext->SavedDiffusion / 10);
 
-    if(ActiveParams.Density != Temp){
+    if (ActiveParams.Density != Temp) {
         ALOGV("\tLVM_ERROR : ReverbGetDiffusion invalid value %d %d", Temp, ActiveParams.Density);
     }
 
-    //ALOGV("\tReverbGetDiffusion end");
+    // ALOGV("\tReverbGetDiffusion end");
     return pContext->SavedDiffusion;
 }
 
@@ -1339,27 +1273,27 @@
 //
 //----------------------------------------------------------------------------
 
-void ReverbSetDensity(ReverbContext *pContext, int16_t level){
-    //ALOGV("\tReverbSetDensity start (%d)", level);
+void ReverbSetDensity(ReverbContext* pContext, int16_t level) {
+    // ALOGV("\tReverbSetDensity start (%d)", level);
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbSetDensity")
-    //ALOGV("\tReverbSetDensity Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbSetDensity just Got -> %d\n", ActiveParams.RoomSize);
+    // ALOGV("\tReverbSetDensity Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbSetDensity just Got -> %d\n", ActiveParams.RoomSize);
 
     ActiveParams.RoomSize = (LVM_INT16)(((level * 99) / 1000) + 1);
 
     /* Activate the initial settings */
     LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "ReverbSetDensity")
-    //ALOGV("\tReverbSetDensity just Set -> %d\n", ActiveParams.RoomSize);
+    // ALOGV("\tReverbSetDensity just Set -> %d\n", ActiveParams.RoomSize);
 
     pContext->SavedDensity = level;
-    //ALOGV("\tReverbSetDensity end");
+    // ALOGV("\tReverbSetDensity end");
     return;
 }
 
@@ -1374,25 +1308,25 @@
 //
 //----------------------------------------------------------------------------
 
-int32_t ReverbGetDensity(ReverbContext *pContext){
-    //ALOGV("\tReverbGetDensity start");
+int32_t ReverbGetDensity(ReverbContext* pContext) {
+    // ALOGV("\tReverbGetDensity start");
 
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
-    LVM_INT16                 Temp;
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
+    LVM_INT16 Temp;
     /* Get the current settings */
     LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
     LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "ReverbGetDensity")
-    //ALOGV("\tReverbGetDensity Succesfully returned from LVM_GetControlParameters\n");
-    //ALOGV("\tReverbGetDensity() just Got -> %d\n", ActiveParams.RoomSize);
+    // ALOGV("\tReverbGetDensity Successfully returned from LVM_GetControlParameters\n");
+    // ALOGV("\tReverbGetDensity() just Got -> %d\n", ActiveParams.RoomSize);
 
     Temp = (LVM_INT16)(((pContext->SavedDensity * 99) / 1000) + 1);
 
-    if(Temp != ActiveParams.RoomSize){
+    if (Temp != ActiveParams.RoomSize) {
         ALOGV("\tLVM_ERROR : ReverbGetDensity invalid value %d %d", Temp, ActiveParams.RoomSize);
     }
 
-    //ALOGV("\tReverbGetDensity end");
+    // ALOGV("\tReverbGetDensity end");
     return pContext->SavedDensity;
 }
 
@@ -1410,20 +1344,19 @@
 // Side Effects:
 //
 //----------------------------------------------------------------------------
-int Reverb_LoadPreset(ReverbContext   *pContext)
-{
-    //TODO: add reflections delay, level and reverb delay when early reflections are
+int Reverb_LoadPreset(ReverbContext* pContext) {
+    // TODO: add reflections delay, level and reverb delay when early reflections are
     // implemented
     pContext->curPreset = pContext->nextPreset;
 
     if (pContext->curPreset != REVERB_PRESET_NONE) {
-        const t_reverb_settings *preset = &sReverbPresets[pContext->curPreset];
+        const t_reverb_settings* preset = &sReverbPresets[pContext->curPreset];
         ReverbSetRoomLevel(pContext, preset->roomLevel);
         ReverbSetRoomHfLevel(pContext, preset->roomHFLevel);
         ReverbSetDecayTime(pContext, preset->decayTime);
         ReverbSetDecayHfRatio(pContext, preset->decayHFRatio);
-        //reflectionsLevel
-        //reflectionsDelay
+        // reflectionsLevel
+        // reflectionsDelay
         ReverbSetReverbLevel(pContext, preset->reverbLevel);
         // reverbDelay
         ReverbSetDiffusion(pContext, preset->diffusion);
@@ -1454,99 +1387,96 @@
 //
 //----------------------------------------------------------------------------
 
-int Reverb_getParameter(ReverbContext *pContext,
-                        void          *pParam,
-                        uint32_t      *pValueSize,
-                        void          *pValue){
+int Reverb_getParameter(ReverbContext* pContext, void* pParam, uint32_t* pValueSize, void* pValue) {
     int status = 0;
-    int32_t *pParamTemp = (int32_t *)pParam;
+    int32_t* pParamTemp = (int32_t*)pParam;
     int32_t param = *pParamTemp++;
-    t_reverb_settings *pProperties;
+    t_reverb_settings* pProperties;
 
-    //ALOGV("\tReverb_getParameter start");
+    // ALOGV("\tReverb_getParameter start");
     if (pContext->preset) {
         if (param != REVERB_PARAM_PRESET || *pValueSize < sizeof(uint16_t)) {
             return -EINVAL;
         }
 
-        *(uint16_t *)pValue = pContext->nextPreset;
+        *(uint16_t*)pValue = pContext->nextPreset;
         ALOGV("get REVERB_PARAM_PRESET, preset %d", pContext->nextPreset);
         return 0;
     }
 
-    switch (param){
+    switch (param) {
         case REVERB_PARAM_ROOM_LEVEL:
-            if (*pValueSize != sizeof(int16_t)){
+            if (*pValueSize != sizeof(int16_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize1 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(int16_t);
             break;
         case REVERB_PARAM_ROOM_HF_LEVEL:
-            if (*pValueSize != sizeof(int16_t)){
+            if (*pValueSize != sizeof(int16_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize12 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(int16_t);
             break;
         case REVERB_PARAM_DECAY_TIME:
-            if (*pValueSize != sizeof(uint32_t)){
+            if (*pValueSize != sizeof(uint32_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize3 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(uint32_t);
             break;
         case REVERB_PARAM_DECAY_HF_RATIO:
-            if (*pValueSize != sizeof(int16_t)){
+            if (*pValueSize != sizeof(int16_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize4 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(int16_t);
             break;
         case REVERB_PARAM_REFLECTIONS_LEVEL:
-            if (*pValueSize != sizeof(int16_t)){
+            if (*pValueSize != sizeof(int16_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize5 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(int16_t);
             break;
         case REVERB_PARAM_REFLECTIONS_DELAY:
-            if (*pValueSize != sizeof(uint32_t)){
+            if (*pValueSize != sizeof(uint32_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize6 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(uint32_t);
             break;
         case REVERB_PARAM_REVERB_LEVEL:
-            if (*pValueSize != sizeof(int16_t)){
+            if (*pValueSize != sizeof(int16_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize7 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(int16_t);
             break;
         case REVERB_PARAM_REVERB_DELAY:
-            if (*pValueSize != sizeof(uint32_t)){
+            if (*pValueSize != sizeof(uint32_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize8 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(uint32_t);
             break;
         case REVERB_PARAM_DIFFUSION:
-            if (*pValueSize != sizeof(int16_t)){
+            if (*pValueSize != sizeof(int16_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize9 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(int16_t);
             break;
         case REVERB_PARAM_DENSITY:
-            if (*pValueSize != sizeof(int16_t)){
+            if (*pValueSize != sizeof(int16_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize10 %d", *pValueSize);
                 return -EINVAL;
             }
             *pValueSize = sizeof(int16_t);
             break;
         case REVERB_PARAM_PROPERTIES:
-            if (*pValueSize != sizeof(t_reverb_settings)){
+            if (*pValueSize != sizeof(t_reverb_settings)) {
                 ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize11 %d", *pValueSize);
                 return -EINVAL;
             }
@@ -1558,9 +1488,9 @@
             return -EINVAL;
     }
 
-    pProperties = (t_reverb_settings *) pValue;
+    pProperties = (t_reverb_settings*)pValue;
 
-    switch (param){
+    switch (param) {
         case REVERB_PARAM_PROPERTIES:
             pProperties->roomLevel = ReverbGetRoomLevel(pContext);
             pProperties->roomHFLevel = ReverbGetRoomHfLevel(pContext);
@@ -1574,74 +1504,74 @@
             pProperties->density = ReverbGetDensity(pContext);
 
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is roomLevel        %d",
-                pProperties->roomLevel);
+                  pProperties->roomLevel);
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is roomHFLevel      %d",
-                pProperties->roomHFLevel);
+                  pProperties->roomHFLevel);
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is decayTime        %d",
-                pProperties->decayTime);
+                  pProperties->decayTime);
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is decayHFRatio     %d",
-                pProperties->decayHFRatio);
+                  pProperties->decayHFRatio);
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is reflectionsLevel %d",
-                pProperties->reflectionsLevel);
+                  pProperties->reflectionsLevel);
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is reflectionsDelay %d",
-                pProperties->reflectionsDelay);
+                  pProperties->reflectionsDelay);
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is reverbDelay      %d",
-                pProperties->reverbDelay);
+                  pProperties->reverbDelay);
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is reverbLevel      %d",
-                pProperties->reverbLevel);
+                  pProperties->reverbLevel);
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is diffusion        %d",
-                pProperties->diffusion);
+                  pProperties->diffusion);
             ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is density          %d",
-                pProperties->density);
+                  pProperties->density);
             break;
 
         case REVERB_PARAM_ROOM_LEVEL:
-            *(int16_t *)pValue = ReverbGetRoomLevel(pContext);
+            *(int16_t*)pValue = ReverbGetRoomLevel(pContext);
 
-            //ALOGV("\tReverb_getParameter() REVERB_PARAM_ROOM_LEVEL Value is %d",
+            // ALOGV("\tReverb_getParameter() REVERB_PARAM_ROOM_LEVEL Value is %d",
             //        *(int16_t *)pValue);
             break;
         case REVERB_PARAM_ROOM_HF_LEVEL:
-            *(int16_t *)pValue = ReverbGetRoomHfLevel(pContext);
+            *(int16_t*)pValue = ReverbGetRoomHfLevel(pContext);
 
-            //ALOGV("\tReverb_getParameter() REVERB_PARAM_ROOM_HF_LEVEL Value is %d",
+            // ALOGV("\tReverb_getParameter() REVERB_PARAM_ROOM_HF_LEVEL Value is %d",
             //        *(int16_t *)pValue);
             break;
         case REVERB_PARAM_DECAY_TIME:
-            *(uint32_t *)pValue = ReverbGetDecayTime(pContext);
+            *(uint32_t*)pValue = ReverbGetDecayTime(pContext);
 
-            //ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_TIME Value is %d",
+            // ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_TIME Value is %d",
             //        *(int32_t *)pValue);
             break;
         case REVERB_PARAM_DECAY_HF_RATIO:
-            *(int16_t *)pValue = ReverbGetDecayHfRatio(pContext);
+            *(int16_t*)pValue = ReverbGetDecayHfRatio(pContext);
 
-            //ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_HF_RATION Value is %d",
+            // ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_HF_RATION Value is %d",
             //        *(int16_t *)pValue);
             break;
         case REVERB_PARAM_REVERB_LEVEL:
-             *(int16_t *)pValue = ReverbGetReverbLevel(pContext);
+            *(int16_t*)pValue = ReverbGetReverbLevel(pContext);
 
-            //ALOGV("\tReverb_getParameter() REVERB_PARAM_REVERB_LEVEL Value is %d",
+            // ALOGV("\tReverb_getParameter() REVERB_PARAM_REVERB_LEVEL Value is %d",
             //        *(int16_t *)pValue);
             break;
         case REVERB_PARAM_DIFFUSION:
-            *(int16_t *)pValue = ReverbGetDiffusion(pContext);
+            *(int16_t*)pValue = ReverbGetDiffusion(pContext);
 
-            //ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_DIFFUSION Value is %d",
+            // ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_DIFFUSION Value is %d",
             //        *(int16_t *)pValue);
             break;
         case REVERB_PARAM_DENSITY:
-            *(int16_t *)pValue = ReverbGetDensity(pContext);
-            //ALOGV("\tReverb_getParameter() REVERB_PARAM_DENSITY Value is %d",
+            *(int16_t*)pValue = ReverbGetDensity(pContext);
+            // ALOGV("\tReverb_getParameter() REVERB_PARAM_DENSITY Value is %d",
             //        *(uint32_t *)pValue);
             break;
         case REVERB_PARAM_REFLECTIONS_LEVEL:
-            *(uint16_t *)pValue = 0;
+            *(uint16_t*)pValue = 0;
             break;
         case REVERB_PARAM_REFLECTIONS_DELAY:
         case REVERB_PARAM_REVERB_DELAY:
-            *(uint32_t *)pValue = 0;
+            *(uint32_t*)pValue = 0;
             break;
 
         default:
@@ -1650,7 +1580,7 @@
             break;
     }
 
-    //ALOGV("\tReverb_getParameter end");
+    // ALOGV("\tReverb_getParameter end");
     return status;
 } /* end Reverb_getParameter */
 
@@ -1670,16 +1600,16 @@
 //
 //----------------------------------------------------------------------------
 
-int Reverb_setParameter (ReverbContext *pContext, void *pParam, void *pValue, int vsize){
+int Reverb_setParameter(ReverbContext* pContext, void* pParam, void* pValue, int vsize) {
     int status = 0;
     int16_t level;
     int16_t ratio;
     uint32_t time;
-    t_reverb_settings *pProperties;
-    int32_t *pParamTemp = (int32_t *)pParam;
+    t_reverb_settings* pProperties;
+    int32_t* pParamTemp = (int32_t*)pParam;
     int32_t param = *pParamTemp++;
 
-    //ALOGV("\tReverb_setParameter start");
+    // ALOGV("\tReverb_setParameter start");
     if (pContext->preset) {
         if (param != REVERB_PARAM_PRESET) {
             return -EINVAL;
@@ -1689,7 +1619,7 @@
             return -EINVAL;
         }
 
-        uint16_t preset = *(uint16_t *)pValue;
+        uint16_t preset = *(uint16_t*)pValue;
         ALOGV("set REVERB_PARAM_PRESET, preset %d", preset);
         if (preset > REVERB_PRESET_LAST) {
             return -EINVAL;
@@ -1703,10 +1633,10 @@
         return -EINVAL;
     }
 
-    switch (param){
+    switch (param) {
         case REVERB_PARAM_PROPERTIES:
             ALOGV("\tReverb_setParameter() REVERB_PARAM_PROPERTIES");
-            pProperties = (t_reverb_settings *) pValue;
+            pProperties = (t_reverb_settings*)pValue;
             ReverbSetRoomLevel(pContext, pProperties->roomLevel);
             ReverbSetRoomHfLevel(pContext, pProperties->roomHFLevel);
             ReverbSetDecayTime(pContext, pProperties->decayTime);
@@ -1716,55 +1646,55 @@
             ReverbSetDensity(pContext, pProperties->density);
             break;
         case REVERB_PARAM_ROOM_LEVEL:
-            level = *(int16_t *)pValue;
-            //ALOGV("\tReverb_setParameter() REVERB_PARAM_ROOM_LEVEL value is %d", level);
-            //ALOGV("\tReverb_setParameter() Calling ReverbSetRoomLevel");
+            level = *(int16_t*)pValue;
+            // ALOGV("\tReverb_setParameter() REVERB_PARAM_ROOM_LEVEL value is %d", level);
+            // ALOGV("\tReverb_setParameter() Calling ReverbSetRoomLevel");
             ReverbSetRoomLevel(pContext, level);
-            //ALOGV("\tReverb_setParameter() Called ReverbSetRoomLevel");
-           break;
-        case REVERB_PARAM_ROOM_HF_LEVEL:
-            level = *(int16_t *)pValue;
-            //ALOGV("\tReverb_setParameter() REVERB_PARAM_ROOM_HF_LEVEL value is %d", level);
-            //ALOGV("\tReverb_setParameter() Calling ReverbSetRoomHfLevel");
-            ReverbSetRoomHfLevel(pContext, level);
-            //ALOGV("\tReverb_setParameter() Called ReverbSetRoomHfLevel");
-           break;
-        case REVERB_PARAM_DECAY_TIME:
-            time = *(uint32_t *)pValue;
-            //ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_TIME value is %d", time);
-            //ALOGV("\tReverb_setParameter() Calling ReverbSetDecayTime");
-            ReverbSetDecayTime(pContext, time);
-            //ALOGV("\tReverb_setParameter() Called ReverbSetDecayTime");
-           break;
-        case REVERB_PARAM_DECAY_HF_RATIO:
-            ratio = *(int16_t *)pValue;
-            //ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_HF_RATIO value is %d", ratio);
-            //ALOGV("\tReverb_setParameter() Calling ReverbSetDecayHfRatio");
-            ReverbSetDecayHfRatio(pContext, ratio);
-            //ALOGV("\tReverb_setParameter() Called ReverbSetDecayHfRatio");
+            // ALOGV("\tReverb_setParameter() Called ReverbSetRoomLevel");
             break;
-         case REVERB_PARAM_REVERB_LEVEL:
-            level = *(int16_t *)pValue;
-            //ALOGV("\tReverb_setParameter() REVERB_PARAM_REVERB_LEVEL value is %d", level);
-            //ALOGV("\tReverb_setParameter() Calling ReverbSetReverbLevel");
+        case REVERB_PARAM_ROOM_HF_LEVEL:
+            level = *(int16_t*)pValue;
+            // ALOGV("\tReverb_setParameter() REVERB_PARAM_ROOM_HF_LEVEL value is %d", level);
+            // ALOGV("\tReverb_setParameter() Calling ReverbSetRoomHfLevel");
+            ReverbSetRoomHfLevel(pContext, level);
+            // ALOGV("\tReverb_setParameter() Called ReverbSetRoomHfLevel");
+            break;
+        case REVERB_PARAM_DECAY_TIME:
+            time = *(uint32_t*)pValue;
+            // ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_TIME value is %d", time);
+            // ALOGV("\tReverb_setParameter() Calling ReverbSetDecayTime");
+            ReverbSetDecayTime(pContext, time);
+            // ALOGV("\tReverb_setParameter() Called ReverbSetDecayTime");
+            break;
+        case REVERB_PARAM_DECAY_HF_RATIO:
+            ratio = *(int16_t*)pValue;
+            // ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_HF_RATIO value is %d", ratio);
+            // ALOGV("\tReverb_setParameter() Calling ReverbSetDecayHfRatio");
+            ReverbSetDecayHfRatio(pContext, ratio);
+            // ALOGV("\tReverb_setParameter() Called ReverbSetDecayHfRatio");
+            break;
+        case REVERB_PARAM_REVERB_LEVEL:
+            level = *(int16_t*)pValue;
+            // ALOGV("\tReverb_setParameter() REVERB_PARAM_REVERB_LEVEL value is %d", level);
+            // ALOGV("\tReverb_setParameter() Calling ReverbSetReverbLevel");
             ReverbSetReverbLevel(pContext, level);
-            //ALOGV("\tReverb_setParameter() Called ReverbSetReverbLevel");
-           break;
+            // ALOGV("\tReverb_setParameter() Called ReverbSetReverbLevel");
+            break;
         case REVERB_PARAM_DIFFUSION:
-            ratio = *(int16_t *)pValue;
-            //ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_DIFFUSION value is %d", ratio);
-            //ALOGV("\tReverb_setParameter() Calling ReverbSetDiffusion");
+            ratio = *(int16_t*)pValue;
+            // ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_DIFFUSION value is %d", ratio);
+            // ALOGV("\tReverb_setParameter() Calling ReverbSetDiffusion");
             ReverbSetDiffusion(pContext, ratio);
-            //ALOGV("\tReverb_setParameter() Called ReverbSetDiffusion");
+            // ALOGV("\tReverb_setParameter() Called ReverbSetDiffusion");
             break;
         case REVERB_PARAM_DENSITY:
-            ratio = *(int16_t *)pValue;
-            //ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_DENSITY value is %d", ratio);
-            //ALOGV("\tReverb_setParameter() Calling ReverbSetDensity");
+            ratio = *(int16_t*)pValue;
+            // ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_DENSITY value is %d", ratio);
+            // ALOGV("\tReverb_setParameter() Calling ReverbSetDensity");
             ReverbSetDensity(pContext, ratio);
-            //ALOGV("\tReverb_setParameter() Called ReverbSetDensity");
+            // ALOGV("\tReverb_setParameter() Called ReverbSetDensity");
             break;
-           break;
+            break;
         case REVERB_PARAM_REFLECTIONS_LEVEL:
         case REVERB_PARAM_REFLECTIONS_DELAY:
         case REVERB_PARAM_REVERB_DELAY:
@@ -1774,7 +1704,7 @@
             break;
     }
 
-    //ALOGV("\tReverb_setParameter end");
+    // ALOGV("\tReverb_setParameter end");
     return status;
 } /* end Reverb_setParameter */
 
@@ -1783,52 +1713,46 @@
  */
 int Reverb_paramValueSize(int32_t param) {
     switch (param) {
-    case REVERB_PARAM_ROOM_LEVEL:
-    case REVERB_PARAM_ROOM_HF_LEVEL:
-    case REVERB_PARAM_REFLECTIONS_LEVEL:
-    case REVERB_PARAM_REVERB_LEVEL:
-        return sizeof(int16_t); // millibel
-    case REVERB_PARAM_DECAY_TIME:
-    case REVERB_PARAM_REFLECTIONS_DELAY:
-    case REVERB_PARAM_REVERB_DELAY:
-        return sizeof(uint32_t); // milliseconds
-    case REVERB_PARAM_DECAY_HF_RATIO:
-    case REVERB_PARAM_DIFFUSION:
-    case REVERB_PARAM_DENSITY:
-        return sizeof(int16_t); // permille
-    case REVERB_PARAM_PROPERTIES:
-        return sizeof(s_reverb_settings); // struct of all reverb properties
+        case REVERB_PARAM_ROOM_LEVEL:
+        case REVERB_PARAM_ROOM_HF_LEVEL:
+        case REVERB_PARAM_REFLECTIONS_LEVEL:
+        case REVERB_PARAM_REVERB_LEVEL:
+            return sizeof(int16_t);  // millibel
+        case REVERB_PARAM_DECAY_TIME:
+        case REVERB_PARAM_REFLECTIONS_DELAY:
+        case REVERB_PARAM_REVERB_DELAY:
+            return sizeof(uint32_t);  // milliseconds
+        case REVERB_PARAM_DECAY_HF_RATIO:
+        case REVERB_PARAM_DIFFUSION:
+        case REVERB_PARAM_DENSITY:
+            return sizeof(int16_t);  // permille
+        case REVERB_PARAM_PROPERTIES:
+            return sizeof(s_reverb_settings);  // struct of all reverb properties
     }
     return sizeof(int32_t);
 }
 
-} // namespace
-} // namespace
+}  // namespace
+}  // namespace android
 
 extern "C" {
 /* Effect Control Interface Implementation: Process */
-int Reverb_process(effect_handle_t   self,
-                                 audio_buffer_t         *inBuffer,
-                                 audio_buffer_t         *outBuffer){
-    android::ReverbContext * pContext = (android::ReverbContext *) self;
-    int    status = 0;
+int Reverb_process(effect_handle_t self, audio_buffer_t* inBuffer, audio_buffer_t* outBuffer) {
+    android::ReverbContext* pContext = (android::ReverbContext*)self;
+    int status = 0;
 
-    if (pContext == NULL){
+    if (pContext == NULL) {
         ALOGV("\tLVM_ERROR : Reverb_process() ERROR pContext == NULL");
         return -EINVAL;
     }
-    if (inBuffer == NULL  || inBuffer->raw == NULL  ||
-            outBuffer == NULL || outBuffer->raw == NULL ||
-            inBuffer->frameCount != outBuffer->frameCount){
+    if (inBuffer == NULL || inBuffer->raw == NULL || outBuffer == NULL || outBuffer->raw == NULL ||
+        inBuffer->frameCount != outBuffer->frameCount) {
         ALOGV("\tLVM_ERROR : Reverb_process() ERROR NULL INPUT POINTER OR FRAME COUNT IS WRONG");
         return -EINVAL;
     }
-    //ALOGV("\tReverb_process() Calling process with %d frames", outBuffer->frameCount);
+    // ALOGV("\tReverb_process() Calling process with %d frames", outBuffer->frameCount);
     /* Process all the available frames, block processing is handled internalLY by the LVM bundle */
-    status = process(    inBuffer->f32,
-                         outBuffer->f32,
-                         outBuffer->frameCount,
-                         pContext);
+    status = process(inBuffer->f32, outBuffer->f32, outBuffer->frameCount, pContext);
 
     if (pContext->bEnabled == LVM_FALSE) {
         if (pContext->SamplesToExitCount > 0) {
@@ -1840,72 +1764,67 @@
     }
 
     return status;
-}   /* end Reverb_process */
+} /* end Reverb_process */
 
 /* Effect Control Interface Implementation: Command */
-int Reverb_command(effect_handle_t  self,
-                              uint32_t            cmdCode,
-                              uint32_t            cmdSize,
-                              void                *pCmdData,
-                              uint32_t            *replySize,
-                              void                *pReplyData){
-    android::ReverbContext * pContext = (android::ReverbContext *) self;
-    LVREV_ControlParams_st    ActiveParams;              /* Current control Parameters */
-    LVREV_ReturnStatus_en     LvmStatus=LVREV_SUCCESS;     /* Function call status */
+int Reverb_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize, void* pCmdData,
+                   uint32_t* replySize, void* pReplyData) {
+    android::ReverbContext* pContext = (android::ReverbContext*)self;
+    LVREV_ControlParams_st ActiveParams;             /* Current control Parameters */
+    LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
 
-    if (pContext == NULL){
+    if (pContext == NULL) {
         ALOGV("\tLVM_ERROR : Reverb_command ERROR pContext == NULL");
         return -EINVAL;
     }
 
-    //ALOGV("\tReverb_command INPUTS are: command %d cmdSize %d",cmdCode, cmdSize);
+    // ALOGV("\tReverb_command INPUTS are: command %d cmdSize %d",cmdCode, cmdSize);
 
-    switch (cmdCode){
+    switch (cmdCode) {
         case EFFECT_CMD_INIT:
-            //ALOGV("\tReverb_command cmdCode Case: "
+            // ALOGV("\tReverb_command cmdCode Case: "
             //        "EFFECT_CMD_INIT start");
 
-            if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)){
+            if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                        "EFFECT_CMD_INIT: ERROR");
+                      "EFFECT_CMD_INIT: ERROR");
                 return -EINVAL;
             }
-            *(int *) pReplyData = 0;
+            *(int*)pReplyData = 0;
             break;
 
         case EFFECT_CMD_SET_CONFIG:
-            //ALOGV("\tReverb_command cmdCode Case: "
+            // ALOGV("\tReverb_command cmdCode Case: "
             //        "EFFECT_CMD_SET_CONFIG start");
-            if (pCmdData == NULL || cmdSize != sizeof(effect_config_t) ||
-                    pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
+            if (pCmdData == NULL || cmdSize != sizeof(effect_config_t) || pReplyData == NULL ||
+                replySize == NULL || *replySize != sizeof(int)) {
                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                        "EFFECT_CMD_SET_CONFIG: ERROR");
+                      "EFFECT_CMD_SET_CONFIG: ERROR");
                 return -EINVAL;
             }
-            *(int *) pReplyData = android::Reverb_setConfig(pContext,
-                                                            (effect_config_t *) pCmdData);
+            *(int*)pReplyData = android::Reverb_setConfig(pContext, (effect_config_t*)pCmdData);
             break;
 
         case EFFECT_CMD_GET_CONFIG:
             if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(effect_config_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                        "EFFECT_CMD_GET_CONFIG: ERROR");
+                      "EFFECT_CMD_GET_CONFIG: ERROR");
                 return -EINVAL;
             }
 
-            android::Reverb_getConfig(pContext, (effect_config_t *)pReplyData);
+            android::Reverb_getConfig(pContext, (effect_config_t*)pReplyData);
             break;
 
         case EFFECT_CMD_RESET:
-            //ALOGV("\tReverb_command cmdCode Case: "
+            // ALOGV("\tReverb_command cmdCode Case: "
             //        "EFFECT_CMD_RESET start");
             Reverb_setConfig(pContext, &pContext->config);
             break;
 
-        case EFFECT_CMD_GET_PARAM:{
-            //ALOGV("\tReverb_command cmdCode Case: "
+        case EFFECT_CMD_GET_PARAM: {
+            // ALOGV("\tReverb_command cmdCode Case: "
             //        "EFFECT_CMD_GET_PARAM start");
-            effect_param_t *p = (effect_param_t *)pCmdData;
+            effect_param_t* p = (effect_param_t*)pCmdData;
             if (pCmdData == nullptr) {
                 ALOGW("\tLVM_ERROR : pCmdData is NULL");
                 return -EINVAL;
@@ -1914,163 +1833,156 @@
                 android_errorWriteLog(0x534e4554, "26347509");
                 return -EINVAL;
             }
-            if (cmdSize < sizeof(effect_param_t) ||
-                    cmdSize < (sizeof(effect_param_t) + p->psize) ||
-                    pReplyData == NULL || replySize == NULL ||
-                    *replySize < (sizeof(effect_param_t) + p->psize)) {
+            if (cmdSize < sizeof(effect_param_t) || cmdSize < (sizeof(effect_param_t) + p->psize) ||
+                pReplyData == NULL || replySize == NULL ||
+                *replySize < (sizeof(effect_param_t) + p->psize)) {
                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                        "EFFECT_CMD_GET_PARAM: ERROR");
+                      "EFFECT_CMD_GET_PARAM: ERROR");
                 return -EINVAL;
             }
 
             memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + p->psize);
 
-            p = (effect_param_t *)pReplyData;
+            p = (effect_param_t*)pReplyData;
 
             int voffset = ((p->psize - 1) / sizeof(int32_t) + 1) * sizeof(int32_t);
 
-            p->status = android::Reverb_getParameter(pContext,
-                                                         (void *)p->data,
-                                                          &p->vsize,
-                                                          p->data + voffset);
+            p->status = android::Reverb_getParameter(pContext, (void*)p->data, &p->vsize,
+                                                     p->data + voffset);
 
             *replySize = sizeof(effect_param_t) + voffset + p->vsize;
 
-            //ALOGV("\tReverb_command EFFECT_CMD_GET_PARAM "
+            // ALOGV("\tReverb_command EFFECT_CMD_GET_PARAM "
             //        "*pCmdData %d, *replySize %d, *pReplyData %d ",
             //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
             //        *replySize,
             //        *(int16_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset));
 
         } break;
-        case EFFECT_CMD_SET_PARAM:{
-
-            //ALOGV("\tReverb_command cmdCode Case: "
+        case EFFECT_CMD_SET_PARAM: {
+            // ALOGV("\tReverb_command cmdCode Case: "
             //        "EFFECT_CMD_SET_PARAM start");
-            //ALOGV("\tReverb_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d ",
+            // ALOGV("\tReverb_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d ",
             //        *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
             //        *replySize,
             //        *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) + sizeof(int32_t)));
 
             if (pCmdData == NULL || (cmdSize < (sizeof(effect_param_t) + sizeof(int32_t))) ||
-                    pReplyData == NULL ||  replySize == NULL || *replySize != sizeof(int32_t)) {
+                pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                        "EFFECT_CMD_SET_PARAM: ERROR");
+                      "EFFECT_CMD_SET_PARAM: ERROR");
                 return -EINVAL;
             }
 
-            effect_param_t *p = (effect_param_t *) pCmdData;
+            effect_param_t* p = (effect_param_t*)pCmdData;
 
-            if (p->psize != sizeof(int32_t)){
+            if (p->psize != sizeof(int32_t)) {
                 ALOGV("\t4LVM_ERROR : Reverb_command cmdCode Case: "
-                        "EFFECT_CMD_SET_PARAM: ERROR, psize is not sizeof(int32_t)");
+                      "EFFECT_CMD_SET_PARAM: ERROR, psize is not sizeof(int32_t)");
                 return -EINVAL;
             }
 
-            //ALOGV("\tn5Reverb_command cmdSize is %d\n"
+            // ALOGV("\tn5Reverb_command cmdSize is %d\n"
             //        "\tsizeof(effect_param_t) is  %d\n"
             //        "\tp->psize is %d\n"
             //        "\tp->vsize is %d"
             //        "\n",
             //        cmdSize, sizeof(effect_param_t), p->psize, p->vsize );
 
-            *(int *)pReplyData = android::Reverb_setParameter(pContext,
-                                                             (void *)p->data,
-                                                              p->data + p->psize,
-                                                              p->vsize);
+            *(int*)pReplyData = android::Reverb_setParameter(pContext, (void*)p->data,
+                                                             p->data + p->psize, p->vsize);
         } break;
 
         case EFFECT_CMD_ENABLE:
-            //ALOGV("\tReverb_command cmdCode Case: "
+            // ALOGV("\tReverb_command cmdCode Case: "
             //        "EFFECT_CMD_ENABLE start");
 
-            if (pReplyData == NULL || *replySize != sizeof(int)){
+            if (pReplyData == NULL || *replySize != sizeof(int)) {
                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                        "EFFECT_CMD_ENABLE: ERROR");
+                      "EFFECT_CMD_ENABLE: ERROR");
                 return -EINVAL;
             }
-            if(pContext->bEnabled == LVM_TRUE){
-                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                         "EFFECT_CMD_ENABLE: ERROR-Effect is already enabled");
-                 return -EINVAL;
-             }
-            *(int *)pReplyData = 0;
+            if (pContext->bEnabled == LVM_TRUE) {
+                ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
+                      "EFFECT_CMD_ENABLE: ERROR-Effect is already enabled");
+                return -EINVAL;
+            }
+            *(int*)pReplyData = 0;
             pContext->bEnabled = LVM_TRUE;
             /* Get the current settings */
             LvmStatus = LVREV_GetControlParameters(pContext->hInstance, &ActiveParams);
             LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "EFFECT_CMD_ENABLE")
             pContext->SamplesToExitCount =
-                    (ActiveParams.T60 * pContext->config.inputCfg.samplingRate)/1000;
+                    (ActiveParams.T60 * pContext->config.inputCfg.samplingRate) / 1000;
             // force no volume ramp for first buffer processed after enabling the effect
             pContext->volumeMode = android::REVERB_VOLUME_FLAT;
-            //ALOGV("\tEFFECT_CMD_ENABLE SamplesToExitCount = %d", pContext->SamplesToExitCount);
+            // ALOGV("\tEFFECT_CMD_ENABLE SamplesToExitCount = %d", pContext->SamplesToExitCount);
             break;
         case EFFECT_CMD_DISABLE:
-            //ALOGV("\tReverb_command cmdCode Case: "
+            // ALOGV("\tReverb_command cmdCode Case: "
             //        "EFFECT_CMD_DISABLE start");
 
-            if (pReplyData == NULL || *replySize != sizeof(int)){
+            if (pReplyData == NULL || *replySize != sizeof(int)) {
                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                        "EFFECT_CMD_DISABLE: ERROR");
+                      "EFFECT_CMD_DISABLE: ERROR");
                 return -EINVAL;
             }
-            if(pContext->bEnabled == LVM_FALSE){
-                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                         "EFFECT_CMD_DISABLE: ERROR-Effect is not yet enabled");
-                 return -EINVAL;
-             }
-            *(int *)pReplyData = 0;
+            if (pContext->bEnabled == LVM_FALSE) {
+                ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
+                      "EFFECT_CMD_DISABLE: ERROR-Effect is not yet enabled");
+                return -EINVAL;
+            }
+            *(int*)pReplyData = 0;
             pContext->bEnabled = LVM_FALSE;
             break;
 
         case EFFECT_CMD_SET_VOLUME:
-            if (pCmdData == NULL ||
-                cmdSize != 2 * sizeof(uint32_t)) {
+            if (pCmdData == NULL || cmdSize != 2 * sizeof(uint32_t)) {
                 ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                        "EFFECT_CMD_SET_VOLUME: ERROR");
+                      "EFFECT_CMD_SET_VOLUME: ERROR");
                 return -EINVAL;
             }
 
-            if (pReplyData != NULL) { // we have volume control
-                pContext->leftVolume = (LVM_INT16)((*(uint32_t *)pCmdData + (1 << 11)) >> 12);
-                pContext->rightVolume = (LVM_INT16)((*((uint32_t *)pCmdData + 1) + (1 << 11)) >> 12);
-                *(uint32_t *)pReplyData = (1 << 24);
-                *((uint32_t *)pReplyData + 1) = (1 << 24);
+            if (pReplyData != NULL) {  // we have volume control
+                pContext->leftVolume = (LVM_INT16)((*(uint32_t*)pCmdData + (1 << 11)) >> 12);
+                pContext->rightVolume = (LVM_INT16)((*((uint32_t*)pCmdData + 1) + (1 << 11)) >> 12);
+                *(uint32_t*)pReplyData = (1 << 24);
+                *((uint32_t*)pReplyData + 1) = (1 << 24);
                 if (pContext->volumeMode == android::REVERB_VOLUME_OFF) {
                     // force no volume ramp for first buffer processed after getting volume control
                     pContext->volumeMode = android::REVERB_VOLUME_FLAT;
                 }
-            } else { // we don't have volume control
+            } else {  // we don't have volume control
                 pContext->leftVolume = REVERB_UNIT_VOLUME;
                 pContext->rightVolume = REVERB_UNIT_VOLUME;
                 pContext->volumeMode = android::REVERB_VOLUME_OFF;
             }
-            ALOGV("EFFECT_CMD_SET_VOLUME left %d, right %d mode %d",
-                    pContext->leftVolume, pContext->rightVolume,  pContext->volumeMode);
+            ALOGV("EFFECT_CMD_SET_VOLUME left %d, right %d mode %d", pContext->leftVolume,
+                  pContext->rightVolume, pContext->volumeMode);
             break;
 
         case EFFECT_CMD_SET_DEVICE:
         case EFFECT_CMD_SET_AUDIO_MODE:
-        //ALOGV("\tReverb_command cmdCode Case: "
-        //        "EFFECT_CMD_SET_DEVICE/EFFECT_CMD_SET_VOLUME/EFFECT_CMD_SET_AUDIO_MODE start");
+            // ALOGV("\tReverb_command cmdCode Case: "
+            //        "EFFECT_CMD_SET_DEVICE/EFFECT_CMD_SET_VOLUME/EFFECT_CMD_SET_AUDIO_MODE
+            //        start");
             break;
 
         default:
             ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: "
-                    "DEFAULT start %d ERROR",cmdCode);
+                  "DEFAULT start %d ERROR",
+                  cmdCode);
             return -EINVAL;
     }
 
-    //ALOGV("\tReverb_command end\n\n");
+    // ALOGV("\tReverb_command end\n\n");
     return 0;
-}    /* end Reverb_command */
+} /* end Reverb_command */
 
 /* Effect Control Interface Implementation: get_descriptor */
-int Reverb_getDescriptor(effect_handle_t   self,
-                                    effect_descriptor_t *pDescriptor)
-{
-    android::ReverbContext * pContext = (android::ReverbContext *)self;
-    const effect_descriptor_t *desc;
+int Reverb_getDescriptor(effect_handle_t self, effect_descriptor_t* pDescriptor) {
+    android::ReverbContext* pContext = (android::ReverbContext*)self;
+    const effect_descriptor_t* desc;
 
     if (pContext == NULL || pDescriptor == NULL) {
         ALOGV("Reverb_getDescriptor() invalid param");
@@ -2094,26 +2006,24 @@
     *pDescriptor = *desc;
 
     return 0;
-}   /* end Reverb_getDescriptor */
+} /* end Reverb_getDescriptor */
 
 // effect_handle_t interface implementation for Reverb effect
 const struct effect_interface_s gReverbInterface = {
-    Reverb_process,
-    Reverb_command,
-    Reverb_getDescriptor,
-    NULL,
-};    /* end gReverbInterface */
+        Reverb_process,
+        Reverb_command,
+        Reverb_getDescriptor,
+        NULL,
+}; /* end gReverbInterface */
 
 // This is the only symbol that needs to be exported
-__attribute__ ((visibility ("default")))
-audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
-    .tag = AUDIO_EFFECT_LIBRARY_TAG,
-    .version = EFFECT_LIBRARY_API_VERSION,
-    .name = "Reverb Library",
-    .implementor = "NXP Software Ltd.",
-    .create_effect = android::EffectCreate,
-    .release_effect = android::EffectRelease,
-    .get_descriptor = android::EffectGetDescriptor,
+__attribute__((visibility("default"))) audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
+        .tag = AUDIO_EFFECT_LIBRARY_TAG,
+        .version = EFFECT_LIBRARY_API_VERSION,
+        .name = "Reverb Library",
+        .implementor = "NXP Software Ltd.",
+        .create_effect = android::EffectCreate,
+        .release_effect = android::EffectRelease,
+        .get_descriptor = android::EffectGetDescriptor,
 };
-
 }
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.h b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.h
index 96223a8..227d953 100644
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.h
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.h
@@ -20,16 +20,15 @@
 #include <audio_effects/effect_environmentalreverb.h>
 #include <audio_effects/effect_presetreverb.h>
 
-#define MAX_NUM_BANDS           5
-#define MAX_CALL_SIZE           256
-#define LVREV_MAX_T60           7000
-#define LVREV_MAX_REVERB_LEVEL  2000
-#define LVREV_MAX_FRAME_SIZE    2560
-#define LVREV_CUP_LOAD_ARM9E    470    // Expressed in 0.1 MIPS
-#define LVREV_MEM_USAGE         (71+(LVREV_MAX_FRAME_SIZE>>7))     // Expressed in kB
+#define MAX_NUM_BANDS 5
+#define MAX_CALL_SIZE 256
+#define LVREV_MAX_T60 7000
+#define LVREV_MAX_REVERB_LEVEL 2000
+#define LVREV_MAX_FRAME_SIZE 2560
+#define LVREV_CUP_LOAD_ARM9E 470                            // Expressed in 0.1 MIPS
+#define LVREV_MEM_USAGE (71 + (LVREV_MAX_FRAME_SIZE >> 7))  // Expressed in kB
 
-typedef struct _LPFPair_t
-{
+typedef struct _LPFPair_t {
     int16_t Room_HF;
     int16_t LPF;
 } LPFPair_t;
diff --git a/media/libeffects/preprocessing/.clang-format b/media/libeffects/preprocessing/.clang-format
new file mode 120000
index 0000000..f1b4f69
--- /dev/null
+++ b/media/libeffects/preprocessing/.clang-format
@@ -0,0 +1 @@
+../../../../../build/soong/scripts/system-clang-format
\ No newline at end of file
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index c87635f..c6e036a 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -1,35 +1,60 @@
 // audio preprocessing wrapper
-cc_library_shared {
-    name: "libaudiopreprocessing",
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libeffects_preprocessing_license",
+    ],
+}
 
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libeffects_preprocessing_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_defaults {
+    name: "libaudiopreprocessing-defaults",
     vendor: true,
-
-    relative_install_path: "soundfx",
-
-    srcs: ["PreProcessing.cpp"],
-
-    include_dirs: [
-        "external/webrtc",
-        "external/webrtc/webrtc/modules/include",
-        "external/webrtc/webrtc/modules/audio_processing/include",
+    host_supported: true,
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+        "-Wno-unused-parameter",
     ],
 
     shared_libs: [
-        "libwebrtc_audio_preprocessing",
-        "libspeexresampler",
-        "libutils",
         "liblog",
+        "libutils",
     ],
 
-    cflags: [
-        "-DWEBRTC_POSIX",
-        "-fvisibility=hidden",
-        "-Wall",
-        "-Werror",
+    static_libs: [
+        "webrtc_audio_processing",
     ],
 
     header_libs: [
         "libaudioeffects",
         "libhardware_headers",
     ],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+cc_library {
+    name: "libaudiopreprocessing",
+    defaults: ["libaudiopreprocessing-defaults"],
+    relative_install_path: "soundfx",
+    srcs: ["PreProcessing.cpp"],
+    header_libs: [
+        "libwebrtc_absl_headers",
+    ],
 }
diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp
index 5fab5be..19a8b2f 100644
--- a/media/libeffects/preprocessing/PreProcessing.cpp
+++ b/media/libeffects/preprocessing/PreProcessing.cpp
@@ -18,15 +18,15 @@
 #include <string.h>
 #define LOG_TAG "PreProcessing"
 //#define LOG_NDEBUG 0
-#include <utils/Log.h>
-#include <utils/Timers.h>
-#include <hardware/audio_effect.h>
 #include <audio_effects/effect_aec.h>
 #include <audio_effects/effect_agc.h>
+#include <hardware/audio_effect.h>
+#include <utils/Log.h>
+#include <utils/Timers.h>
+#include <audio_effects/effect_agc2.h>
 #include <audio_effects/effect_ns.h>
-#include <module_common_types.h>
 #include <audio_processing.h>
-#include "speex/speex_resampler.h"
+#include <module_common_types.h>
 
 // undefine to perform multi channels API functional tests
 //#define DUAL_MIC_TEST
@@ -39,26 +39,26 @@
 #define PREPROC_NUM_SESSIONS 8
 
 // types of pre processing modules
-enum preproc_id
-{
-    PREPROC_AGC,        // Automatic Gain Control
-    PREPROC_AEC,        // Acoustic Echo Canceler
-    PREPROC_NS,         // Noise Suppressor
+enum preproc_id {
+    PREPROC_AGC,  // Automatic Gain Control
+    PREPROC_AGC2,  // Automatic Gain Control 2
+    PREPROC_AEC,  // Acoustic Echo Canceler
+    PREPROC_NS,   // Noise Suppressor
     PREPROC_NUM_EFFECTS
 };
 
 // Session state
 enum preproc_session_state {
-    PREPROC_SESSION_STATE_INIT,        // initialized
-    PREPROC_SESSION_STATE_CONFIG       // configuration received
+    PREPROC_SESSION_STATE_INIT,   // initialized
+    PREPROC_SESSION_STATE_CONFIG  // configuration received
 };
 
 // Effect/Preprocessor state
 enum preproc_effect_state {
-    PREPROC_EFFECT_STATE_INIT,         // initialized
-    PREPROC_EFFECT_STATE_CREATED,      // webRTC engine created
-    PREPROC_EFFECT_STATE_CONFIG,       // configuration received/disabled
-    PREPROC_EFFECT_STATE_ACTIVE        // active/enabled
+    PREPROC_EFFECT_STATE_INIT,     // initialized
+    PREPROC_EFFECT_STATE_CREATED,  // webRTC engine created
+    PREPROC_EFFECT_STATE_CONFIG,   // configuration received/disabled
+    PREPROC_EFFECT_STATE_ACTIVE    // active/enabled
 };
 
 // handle on webRTC engine
@@ -71,74 +71,66 @@
 // Effect operation table. Functions for all pre processors are declared in sPreProcOps[] table.
 // Function pointer can be null if no action required.
 struct preproc_ops_s {
-    int (* create)(preproc_effect_t *fx);
-    int (* init)(preproc_effect_t *fx);
-    int (* reset)(preproc_effect_t *fx);
-    void (* enable)(preproc_effect_t *fx);
-    void (* disable)(preproc_effect_t *fx);
-    int (* set_parameter)(preproc_effect_t *fx, void *param, void *value);
-    int (* get_parameter)(preproc_effect_t *fx, void *param, uint32_t *size, void *value);
-    int (* set_device)(preproc_effect_t *fx, uint32_t device);
+    int (*create)(preproc_effect_t* fx);
+    int (*init)(preproc_effect_t* fx);
+    int (*reset)(preproc_effect_t* fx);
+    void (*enable)(preproc_effect_t* fx);
+    void (*disable)(preproc_effect_t* fx);
+    int (*set_parameter)(preproc_effect_t* fx, void* param, void* value);
+    int (*get_parameter)(preproc_effect_t* fx, void* param, uint32_t* size, void* value);
+    int (*set_device)(preproc_effect_t* fx, uint32_t device);
 };
 
 // Effect context
 struct preproc_effect_s {
-    const struct effect_interface_s *itfe;
-    uint32_t procId;                // type of pre processor (enum preproc_id)
-    uint32_t state;                 // current state (enum preproc_effect_state)
-    preproc_session_t *session;     // session the effect is on
-    const preproc_ops_t *ops;       // effect ops table
-    preproc_fx_handle_t engine;     // handle on webRTC engine
-    uint32_t type;                  // subtype of effect
+    const struct effect_interface_s* itfe;
+    uint32_t procId;             // type of pre processor (enum preproc_id)
+    uint32_t state;              // current state (enum preproc_effect_state)
+    preproc_session_t* session;  // session the effect is on
+    const preproc_ops_t* ops;    // effect ops table
+    preproc_fx_handle_t engine;  // handle on webRTC engine
+    uint32_t type;               // subtype of effect
 #ifdef DUAL_MIC_TEST
-    bool aux_channels_on;           // support auxiliary channels
-    size_t cur_channel_config;      // current auciliary channel configuration
+    bool aux_channels_on;       // support auxiliary channels
+    size_t cur_channel_config;  // current auciliary channel configuration
 #endif
 };
 
 // Session context
 struct preproc_session_s {
-    struct preproc_effect_s effects[PREPROC_NUM_EFFECTS]; // effects in this session
-    uint32_t state;                     // current state (enum preproc_session_state)
-    int id;                             // audio session ID
-    int io;                             // handle of input stream this session is on
-    webrtc::AudioProcessing* apm;       // handle on webRTC audio processing module (APM)
-    size_t apmFrameCount;               // buffer size for webRTC process (10 ms)
-    uint32_t apmSamplingRate;           // webRTC APM sampling rate (8/16 or 32 kHz)
-    size_t frameCount;                  // buffer size before input resampler ( <=> apmFrameCount)
-    uint32_t samplingRate;              // sampling rate at effect process interface
-    uint32_t inChannelCount;            // input channel count
-    uint32_t outChannelCount;           // output channel count
-    uint32_t createdMsk;                // bit field containing IDs of crested pre processors
-    uint32_t enabledMsk;                // bit field containing IDs of enabled pre processors
-    uint32_t processedMsk;              // bit field containing IDs of pre processors already
-                                        // processed in current round
-    webrtc::AudioFrame *procFrame;      // audio frame passed to webRTC AMP ProcessStream()
-    int16_t *inBuf;                     // input buffer used when resampling
-    size_t inBufSize;                   // input buffer size in frames
-    size_t framesIn;                    // number of frames in input buffer
-    SpeexResamplerState *inResampler;   // handle on input speex resampler
-    int16_t *outBuf;                    // output buffer used when resampling
-    size_t outBufSize;                  // output buffer size in frames
-    size_t framesOut;                   // number of frames in output buffer
-    SpeexResamplerState *outResampler;  // handle on output speex resampler
-    uint32_t revChannelCount;           // number of channels on reverse stream
-    uint32_t revEnabledMsk;             // bit field containing IDs of enabled pre processors
-                                        // with reverse channel
-    uint32_t revProcessedMsk;           // bit field containing IDs of pre processors with reverse
-                                        // channel already processed in current round
-    webrtc::AudioFrame *revFrame;       // audio frame passed to webRTC AMP AnalyzeReverseStream()
-    int16_t *revBuf;                    // reverse channel input buffer
-    size_t revBufSize;                  // reverse channel input buffer size
-    size_t framesRev;                   // number of frames in reverse channel input buffer
-    SpeexResamplerState *revResampler;  // handle on reverse channel input speex resampler
+    struct preproc_effect_s effects[PREPROC_NUM_EFFECTS];  // effects in this session
+    uint32_t state;                // current state (enum preproc_session_state)
+    int id;                        // audio session ID
+    int io;                        // handle of input stream this session is on
+    webrtc::AudioProcessing* apm;  // handle on webRTC audio processing module (APM)
+    // Audio Processing module builder
+    webrtc::AudioProcessingBuilder ap_builder;
+    // frameCount represents the size of the buffers used for processing, and must represent 10ms.
+    size_t frameCount;
+    uint32_t samplingRate;     // sampling rate at effect process interface
+    uint32_t inChannelCount;   // input channel count
+    uint32_t outChannelCount;  // output channel count
+    uint32_t createdMsk;       // bit field containing IDs of crested pre processors
+    uint32_t enabledMsk;       // bit field containing IDs of enabled pre processors
+    uint32_t processedMsk;     // bit field containing IDs of pre processors already
+                               // processed in current round
+    // audio config strucutre
+    webrtc::AudioProcessing::Config config;
+    webrtc::StreamConfig inputConfig;   // input stream configuration
+    webrtc::StreamConfig outputConfig;  // output stream configuration
+    uint32_t revChannelCount;  // number of channels on reverse stream
+    uint32_t revEnabledMsk;    // bit field containing IDs of enabled pre processors
+                               // with reverse channel
+    uint32_t revProcessedMsk;  // bit field containing IDs of pre processors with reverse
+                               // channel already processed in current round
+    webrtc::StreamConfig revConfig;     // reverse stream configuration.
 };
 
 #ifdef DUAL_MIC_TEST
 enum {
-    PREPROC_CMD_DUAL_MIC_ENABLE = EFFECT_CMD_FIRST_PROPRIETARY, // enable dual mic mode
-    PREPROC_CMD_DUAL_MIC_PCM_DUMP_START,                        // start pcm capture
-    PREPROC_CMD_DUAL_MIC_PCM_DUMP_STOP                          // stop pcm capture
+    PREPROC_CMD_DUAL_MIC_ENABLE = EFFECT_CMD_FIRST_PROPRIETARY,  // enable dual mic mode
+    PREPROC_CMD_DUAL_MIC_PCM_DUMP_START,                         // start pcm capture
+    PREPROC_CMD_DUAL_MIC_PCM_DUMP_STOP                           // stop pcm capture
 };
 
 enum {
@@ -151,24 +143,22 @@
 };
 
 const channel_config_t sDualMicConfigs[CHANNEL_CFG_CNT] = {
-        {AUDIO_CHANNEL_IN_MONO , 0},
-        {AUDIO_CHANNEL_IN_STEREO , 0},
-        {AUDIO_CHANNEL_IN_FRONT , AUDIO_CHANNEL_IN_BACK},
-        {AUDIO_CHANNEL_IN_STEREO , AUDIO_CHANNEL_IN_RIGHT}
-};
+        {AUDIO_CHANNEL_IN_MONO, 0},
+        {AUDIO_CHANNEL_IN_STEREO, 0},
+        {AUDIO_CHANNEL_IN_FRONT, AUDIO_CHANNEL_IN_BACK},
+        {AUDIO_CHANNEL_IN_STEREO, AUDIO_CHANNEL_IN_RIGHT}};
 
 bool sHasAuxChannels[PREPROC_NUM_EFFECTS] = {
-        false,   // PREPROC_AGC
+        false,  // PREPROC_AGC
         true,   // PREPROC_AEC
         true,   // PREPROC_NS
 };
 
 bool gDualMicEnabled;
-FILE *gPcmDumpFh;
+FILE* gPcmDumpFh;
 static pthread_mutex_t gPcmDumpLock = PTHREAD_MUTEX_INITIALIZER;
 #endif
 
-
 //------------------------------------------------------------------------------
 // Effect descriptors
 //------------------------------------------------------------------------------
@@ -178,68 +168,69 @@
 
 // Automatic Gain Control
 static const effect_descriptor_t sAgcDescriptor = {
-        { 0x0a8abfe0, 0x654c, 0x11e0, 0xba26, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } }, // type
-        { 0xaa8130e0, 0x66fc, 0x11e0, 0xbad0, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } }, // uuid
+        {0x0a8abfe0, 0x654c, 0x11e0, 0xba26, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // type
+        {0xaa8130e0, 0x66fc, 0x11e0, 0xbad0, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // uuid
         EFFECT_CONTROL_API_VERSION,
-        (EFFECT_FLAG_TYPE_PRE_PROC|EFFECT_FLAG_DEVICE_IND),
-        0, //FIXME indicate CPU load
-        0, //FIXME indicate memory usage
+        (EFFECT_FLAG_TYPE_PRE_PROC | EFFECT_FLAG_DEVICE_IND),
+        0,  // FIXME indicate CPU load
+        0,  // FIXME indicate memory usage
         "Automatic Gain Control",
-        "The Android Open Source Project"
-};
+        "The Android Open Source Project"};
+
+// Automatic Gain Control 2
+static const effect_descriptor_t sAgc2Descriptor = {
+        {0xae3c653b, 0xbe18, 0x4ab8, 0x8938, {0x41, 0x8f, 0x0a, 0x7f, 0x06, 0xac}},  // type
+        {0x89f38e65, 0xd4d2, 0x4d64, 0xad0e, {0x2b, 0x3e, 0x79, 0x9e, 0xa8, 0x86}},  // uuid
+        EFFECT_CONTROL_API_VERSION,
+        (EFFECT_FLAG_TYPE_PRE_PROC | EFFECT_FLAG_DEVICE_IND),
+        0,  // FIXME indicate CPU load
+        0,  // FIXME indicate memory usage
+        "Automatic Gain Control 2",
+        "The Android Open Source Project"};
 
 // Acoustic Echo Cancellation
 static const effect_descriptor_t sAecDescriptor = {
-        { 0x7b491460, 0x8d4d, 0x11e0, 0xbd61, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } }, // type
-        { 0xbb392ec0, 0x8d4d, 0x11e0, 0xa896, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } }, // uuid
+        {0x7b491460, 0x8d4d, 0x11e0, 0xbd61, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // type
+        {0xbb392ec0, 0x8d4d, 0x11e0, 0xa896, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // uuid
         EFFECT_CONTROL_API_VERSION,
-        (EFFECT_FLAG_TYPE_PRE_PROC|EFFECT_FLAG_DEVICE_IND),
-        0, //FIXME indicate CPU load
-        0, //FIXME indicate memory usage
+        (EFFECT_FLAG_TYPE_PRE_PROC | EFFECT_FLAG_DEVICE_IND),
+        0,  // FIXME indicate CPU load
+        0,  // FIXME indicate memory usage
         "Acoustic Echo Canceler",
-        "The Android Open Source Project"
-};
+        "The Android Open Source Project"};
 
 // Noise suppression
 static const effect_descriptor_t sNsDescriptor = {
-        { 0x58b4b260, 0x8e06, 0x11e0, 0xaa8e, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } }, // type
-        { 0xc06c8400, 0x8e06, 0x11e0, 0x9cb6, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } }, // uuid
+        {0x58b4b260, 0x8e06, 0x11e0, 0xaa8e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // type
+        {0xc06c8400, 0x8e06, 0x11e0, 0x9cb6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // uuid
         EFFECT_CONTROL_API_VERSION,
-        (EFFECT_FLAG_TYPE_PRE_PROC|EFFECT_FLAG_DEVICE_IND),
-        0, //FIXME indicate CPU load
-        0, //FIXME indicate memory usage
+        (EFFECT_FLAG_TYPE_PRE_PROC | EFFECT_FLAG_DEVICE_IND),
+        0,  // FIXME indicate CPU load
+        0,  // FIXME indicate memory usage
         "Noise Suppression",
-        "The Android Open Source Project"
-};
+        "The Android Open Source Project"};
 
-
-static const effect_descriptor_t *sDescriptors[PREPROC_NUM_EFFECTS] = {
-        &sAgcDescriptor,
-        &sAecDescriptor,
-        &sNsDescriptor
-};
+static const effect_descriptor_t* sDescriptors[PREPROC_NUM_EFFECTS] = {&sAgcDescriptor,
+                                                                       &sAgc2Descriptor,
+                                                                       &sAecDescriptor,
+                                                                       &sNsDescriptor};
 
 //------------------------------------------------------------------------------
 // Helper functions
 //------------------------------------------------------------------------------
 
-const effect_uuid_t * const sUuidToPreProcTable[PREPROC_NUM_EFFECTS] = {
-        FX_IID_AGC,
-        FX_IID_AEC,
-        FX_IID_NS
-};
+const effect_uuid_t* const sUuidToPreProcTable[PREPROC_NUM_EFFECTS] = {FX_IID_AGC,
+                                                                       FX_IID_AGC2,
+                                                                       FX_IID_AEC, FX_IID_NS};
 
-
-const effect_uuid_t * ProcIdToUuid(int procId)
-{
+const effect_uuid_t* ProcIdToUuid(int procId) {
     if (procId >= PREPROC_NUM_EFFECTS) {
         return EFFECT_UUID_NULL;
     }
     return sUuidToPreProcTable[procId];
 }
 
-uint32_t UuidToProcId(const effect_uuid_t * uuid)
-{
+uint32_t UuidToProcId(const effect_uuid_t* uuid) {
     size_t i;
     for (i = 0; i < PREPROC_NUM_EFFECTS; i++) {
         if (memcmp(uuid, sUuidToPreProcTable[i], sizeof(*uuid)) == 0) {
@@ -249,15 +240,13 @@
     return i;
 }
 
-bool HasReverseStream(uint32_t procId)
-{
+bool HasReverseStream(uint32_t procId) {
     if (procId == PREPROC_AEC) {
         return true;
     }
     return false;
 }
 
-
 //------------------------------------------------------------------------------
 // Automatic Gain Control (AGC)
 //------------------------------------------------------------------------------
@@ -266,499 +255,538 @@
 static const int kAgcDefaultCompGain = 9;
 static const bool kAgcDefaultLimiter = true;
 
-int  AgcInit (preproc_effect_t *effect)
-{
-    ALOGV("AgcInit");
-    webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
-    agc->set_mode(webrtc::GainControl::kFixedDigital);
-    agc->set_target_level_dbfs(kAgcDefaultTargetLevel);
-    agc->set_compression_gain_db(kAgcDefaultCompGain);
-    agc->enable_limiter(kAgcDefaultLimiter);
+int Agc2Init(preproc_effect_t* effect) {
+    ALOGV("Agc2Init");
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.gain_controller2.fixed_digital.gain_db = 0.f;
+    effect->session->config.gain_controller2.adaptive_digital.level_estimator =
+            effect->session->config.gain_controller2.kRms;
+    effect->session->config.gain_controller2.adaptive_digital.extra_saturation_margin_db = 2.f;
+    effect->session->apm->ApplyConfig(effect->session->config);
     return 0;
 }
 
-int  AgcCreate(preproc_effect_t *effect)
-{
-    webrtc::GainControl *agc = effect->session->apm->gain_control();
-    ALOGV("AgcCreate got agc %p", agc);
-    if (agc == NULL) {
-        ALOGW("AgcCreate Error");
-        return -ENOMEM;
-    }
-    effect->engine = static_cast<preproc_fx_handle_t>(agc);
+int AgcInit(preproc_effect_t* effect) {
+    ALOGV("AgcInit");
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.gain_controller1.target_level_dbfs = kAgcDefaultTargetLevel;
+    effect->session->config.gain_controller1.compression_gain_db = kAgcDefaultCompGain;
+    effect->session->config.gain_controller1.enable_limiter = kAgcDefaultLimiter;
+    effect->session->apm->ApplyConfig(effect->session->config);
+    return 0;
+}
+
+int Agc2Create(preproc_effect_t* effect) {
+    Agc2Init(effect);
+    return 0;
+}
+
+int AgcCreate(preproc_effect_t* effect) {
     AgcInit(effect);
     return 0;
 }
 
-int AgcGetParameter(preproc_effect_t *effect,
-                    void *pParam,
-                    uint32_t *pValueSize,
-                    void *pValue)
-{
+int Agc2GetParameter(preproc_effect_t* effect, void* pParam, uint32_t* pValueSize, void* pValue) {
     int status = 0;
-    uint32_t param = *(uint32_t *)pParam;
-    t_agc_settings *pProperties = (t_agc_settings *)pValue;
-    webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
+    uint32_t param = *(uint32_t*)pParam;
+    agc2_settings_t* pProperties = (agc2_settings_t*)pValue;
 
     switch (param) {
-    case AGC_PARAM_TARGET_LEVEL:
-    case AGC_PARAM_COMP_GAIN:
-        if (*pValueSize < sizeof(int16_t)) {
-            *pValueSize = 0;
-            return -EINVAL;
-        }
-        break;
-    case AGC_PARAM_LIMITER_ENA:
-        if (*pValueSize < sizeof(bool)) {
-            *pValueSize = 0;
-            return -EINVAL;
-        }
-        break;
-    case AGC_PARAM_PROPERTIES:
-        if (*pValueSize < sizeof(t_agc_settings)) {
-            *pValueSize = 0;
-            return -EINVAL;
-        }
-        break;
+        case AGC2_PARAM_FIXED_DIGITAL_GAIN:
+            if (*pValueSize < sizeof(float)) {
+                *pValueSize = 0.f;
+                return -EINVAL;
+            }
+            break;
+        case AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR:
+            if (*pValueSize < sizeof(int32_t)) {
+                *pValueSize = 0;
+                return -EINVAL;
+            }
+            break;
+        case AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN:
+            if (*pValueSize < sizeof(float)) {
+                *pValueSize = 0.f;
+                return -EINVAL;
+            }
+            break;
+        case AGC2_PARAM_PROPERTIES:
+            if (*pValueSize < sizeof(agc2_settings_t)) {
+                *pValueSize = 0;
+                return -EINVAL;
+            }
+            break;
 
-    default:
-        ALOGW("AgcGetParameter() unknown param %08x", param);
-        status = -EINVAL;
-        break;
+        default:
+            ALOGW("Agc2GetParameter() unknown param %08x", param);
+            status = -EINVAL;
+            break;
     }
 
+    effect->session->config = effect->session->apm->GetConfig();
     switch (param) {
-    case AGC_PARAM_TARGET_LEVEL:
-        *(int16_t *) pValue = (int16_t)(agc->target_level_dbfs() * -100);
-        ALOGV("AgcGetParameter() target level %d milliBels", *(int16_t *) pValue);
-        break;
-    case AGC_PARAM_COMP_GAIN:
-        *(int16_t *) pValue = (int16_t)(agc->compression_gain_db() * 100);
-        ALOGV("AgcGetParameter() comp gain %d milliBels", *(int16_t *) pValue);
-        break;
-    case AGC_PARAM_LIMITER_ENA:
-        *(bool *) pValue = (bool)agc->is_limiter_enabled();
-        ALOGV("AgcGetParameter() limiter enabled %s",
-             (*(int16_t *) pValue != 0) ? "true" : "false");
-        break;
-    case AGC_PARAM_PROPERTIES:
-        pProperties->targetLevel = (int16_t)(agc->target_level_dbfs() * -100);
-        pProperties->compGain = (int16_t)(agc->compression_gain_db() * 100);
-        pProperties->limiterEnabled = (bool)agc->is_limiter_enabled();
-        break;
-    default:
-        ALOGW("AgcGetParameter() unknown param %d", param);
-        status = -EINVAL;
-        break;
+        case AGC2_PARAM_FIXED_DIGITAL_GAIN:
+            *(float*)pValue =
+                    (float)(effect->session->config.gain_controller2.fixed_digital.gain_db);
+            ALOGV("Agc2GetParameter() target level %f dB", *(float*)pValue);
+            break;
+        case AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR:
+            *(uint32_t*)pValue = (uint32_t)(
+                    effect->session->config.gain_controller2.adaptive_digital.level_estimator);
+            ALOGV("Agc2GetParameter() level estimator %d",
+                  *(webrtc::AudioProcessing::Config::GainController2::LevelEstimator*)pValue);
+            break;
+        case AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN:
+            *(float*)pValue = (float)(effect->session->config.gain_controller2.adaptive_digital
+                                              .extra_saturation_margin_db);
+            ALOGV("Agc2GetParameter() extra saturation margin %f dB", *(float*)pValue);
+            break;
+        case AGC2_PARAM_PROPERTIES:
+            pProperties->fixedDigitalGain =
+                    (float)(effect->session->config.gain_controller2.fixed_digital.gain_db);
+            pProperties->level_estimator = (uint32_t)(
+                    effect->session->config.gain_controller2.adaptive_digital.level_estimator);
+            pProperties->extraSaturationMargin =
+                    (float)(effect->session->config.gain_controller2.adaptive_digital
+                                    .extra_saturation_margin_db);
+            break;
+        default:
+            ALOGW("Agc2GetParameter() unknown param %d", param);
+            status = -EINVAL;
+            break;
+    }
+
+    return status;
+}
+
+int AgcGetParameter(preproc_effect_t* effect, void* pParam, uint32_t* pValueSize, void* pValue) {
+    int status = 0;
+    uint32_t param = *(uint32_t*)pParam;
+    t_agc_settings* pProperties = (t_agc_settings*)pValue;
+
+    switch (param) {
+        case AGC_PARAM_TARGET_LEVEL:
+        case AGC_PARAM_COMP_GAIN:
+            if (*pValueSize < sizeof(int16_t)) {
+                *pValueSize = 0;
+                return -EINVAL;
+            }
+            break;
+        case AGC_PARAM_LIMITER_ENA:
+            if (*pValueSize < sizeof(bool)) {
+                *pValueSize = 0;
+                return -EINVAL;
+            }
+            break;
+        case AGC_PARAM_PROPERTIES:
+            if (*pValueSize < sizeof(t_agc_settings)) {
+                *pValueSize = 0;
+                return -EINVAL;
+            }
+            break;
+
+        default:
+            ALOGW("AgcGetParameter() unknown param %08x", param);
+            status = -EINVAL;
+            break;
+    }
+
+    effect->session->config = effect->session->apm->GetConfig();
+    switch (param) {
+        case AGC_PARAM_TARGET_LEVEL:
+            *(int16_t*)pValue =
+                    (int16_t)(effect->session->config.gain_controller1.target_level_dbfs * -100);
+            ALOGV("AgcGetParameter() target level %d milliBels", *(int16_t*)pValue);
+            break;
+        case AGC_PARAM_COMP_GAIN:
+            *(int16_t*)pValue =
+                    (int16_t)(effect->session->config.gain_controller1.compression_gain_db * -100);
+            ALOGV("AgcGetParameter() comp gain %d milliBels", *(int16_t*)pValue);
+            break;
+        case AGC_PARAM_LIMITER_ENA:
+            *(bool*)pValue = (bool)(effect->session->config.gain_controller1.enable_limiter);
+            ALOGV("AgcGetParameter() limiter enabled %s",
+                  (*(int16_t*)pValue != 0) ? "true" : "false");
+            break;
+        case AGC_PARAM_PROPERTIES:
+            pProperties->targetLevel =
+                    (int16_t)(effect->session->config.gain_controller1.target_level_dbfs * -100);
+            pProperties->compGain =
+                    (int16_t)(effect->session->config.gain_controller1.compression_gain_db * -100);
+            pProperties->limiterEnabled =
+                    (bool)(effect->session->config.gain_controller1.enable_limiter);
+            break;
+        default:
+            ALOGW("AgcGetParameter() unknown param %d", param);
+            status = -EINVAL;
+            break;
     }
     return status;
 }
 
-int AgcSetParameter (preproc_effect_t *effect, void *pParam, void *pValue)
-{
+int Agc2SetParameter(preproc_effect_t* effect, void* pParam, void* pValue) {
     int status = 0;
-    uint32_t param = *(uint32_t *)pParam;
-    t_agc_settings *pProperties = (t_agc_settings *)pValue;
-    webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
-
+    uint32_t param = *(uint32_t*)pParam;
+    float valueFloat = 0.f;
+    agc2_settings_t* pProperties = (agc2_settings_t*)pValue;
+    effect->session->config = effect->session->apm->GetConfig();
     switch (param) {
-    case AGC_PARAM_TARGET_LEVEL:
-        ALOGV("AgcSetParameter() target level %d milliBels", *(int16_t *)pValue);
-        status = agc->set_target_level_dbfs(-(*(int16_t *)pValue / 100));
-        break;
-    case AGC_PARAM_COMP_GAIN:
-        ALOGV("AgcSetParameter() comp gain %d milliBels", *(int16_t *)pValue);
-        status = agc->set_compression_gain_db(*(int16_t *)pValue / 100);
-        break;
-    case AGC_PARAM_LIMITER_ENA:
-        ALOGV("AgcSetParameter() limiter enabled %s", *(bool *)pValue ? "true" : "false");
-        status = agc->enable_limiter(*(bool *)pValue);
-        break;
-    case AGC_PARAM_PROPERTIES:
-        ALOGV("AgcSetParameter() properties level %d, gain %d limiter %d",
-             pProperties->targetLevel,
-             pProperties->compGain,
-             pProperties->limiterEnabled);
-        status = agc->set_target_level_dbfs(-(pProperties->targetLevel / 100));
-        if (status != 0) break;
-        status = agc->set_compression_gain_db(pProperties->compGain / 100);
-        if (status != 0) break;
-        status = agc->enable_limiter(pProperties->limiterEnabled);
-        break;
-    default:
-        ALOGW("AgcSetParameter() unknown param %08x value %08x", param, *(uint32_t *)pValue);
-        status = -EINVAL;
-        break;
+        case AGC2_PARAM_FIXED_DIGITAL_GAIN:
+            valueFloat = (float)(*(int32_t*)pValue);
+            ALOGV("Agc2SetParameter() fixed digital gain %f dB", valueFloat);
+            effect->session->config.gain_controller2.fixed_digital.gain_db = valueFloat;
+            break;
+        case AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR:
+            ALOGV("Agc2SetParameter() level estimator %d",
+                  *(webrtc::AudioProcessing::Config::GainController2::LevelEstimator*)pValue);
+            effect->session->config.gain_controller2.adaptive_digital.level_estimator =
+                    (*(webrtc::AudioProcessing::Config::GainController2::LevelEstimator*)pValue);
+            break;
+        case AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN:
+            valueFloat = (float)(*(int32_t*)pValue);
+            ALOGV("Agc2SetParameter() extra saturation margin %f dB", valueFloat);
+            effect->session->config.gain_controller2.adaptive_digital.extra_saturation_margin_db =
+                    valueFloat;
+            break;
+        case AGC2_PARAM_PROPERTIES:
+            ALOGV("Agc2SetParameter() properties gain %f, level %d margin %f",
+                  pProperties->fixedDigitalGain, pProperties->level_estimator,
+                  pProperties->extraSaturationMargin);
+            effect->session->config.gain_controller2.fixed_digital.gain_db =
+                    pProperties->fixedDigitalGain;
+            effect->session->config.gain_controller2.adaptive_digital.level_estimator =
+                    (webrtc::AudioProcessing::Config::GainController2::LevelEstimator)
+                            pProperties->level_estimator;
+            effect->session->config.gain_controller2.adaptive_digital.extra_saturation_margin_db =
+                    pProperties->extraSaturationMargin;
+            break;
+        default:
+            ALOGW("Agc2SetParameter() unknown param %08x value %08x", param, *(uint32_t*)pValue);
+            status = -EINVAL;
+            break;
     }
+    effect->session->apm->ApplyConfig(effect->session->config);
+
+    ALOGV("Agc2SetParameter() done status %d", status);
+
+    return status;
+}
+
+int AgcSetParameter(preproc_effect_t* effect, void* pParam, void* pValue) {
+    int status = 0;
+    uint32_t param = *(uint32_t*)pParam;
+    t_agc_settings* pProperties = (t_agc_settings*)pValue;
+    effect->session->config = effect->session->apm->GetConfig();
+    switch (param) {
+        case AGC_PARAM_TARGET_LEVEL:
+            ALOGV("AgcSetParameter() target level %d milliBels", *(int16_t*)pValue);
+            effect->session->config.gain_controller1.target_level_dbfs =
+                    (-(*(int16_t*)pValue / 100));
+            break;
+        case AGC_PARAM_COMP_GAIN:
+            ALOGV("AgcSetParameter() comp gain %d milliBels", *(int16_t*)pValue);
+            effect->session->config.gain_controller1.compression_gain_db =
+                    (*(int16_t*)pValue / 100);
+            break;
+        case AGC_PARAM_LIMITER_ENA:
+            ALOGV("AgcSetParameter() limiter enabled %s", *(bool*)pValue ? "true" : "false");
+            effect->session->config.gain_controller1.enable_limiter = (*(bool*)pValue);
+            break;
+        case AGC_PARAM_PROPERTIES:
+            ALOGV("AgcSetParameter() properties level %d, gain %d limiter %d",
+                  pProperties->targetLevel, pProperties->compGain, pProperties->limiterEnabled);
+            effect->session->config.gain_controller1.target_level_dbfs =
+                    -(pProperties->targetLevel / 100);
+            effect->session->config.gain_controller1.compression_gain_db =
+                    pProperties->compGain / 100;
+            effect->session->config.gain_controller1.enable_limiter = pProperties->limiterEnabled;
+            break;
+        default:
+            ALOGW("AgcSetParameter() unknown param %08x value %08x", param, *(uint32_t*)pValue);
+            status = -EINVAL;
+            break;
+    }
+    effect->session->apm->ApplyConfig(effect->session->config);
 
     ALOGV("AgcSetParameter() done status %d", status);
 
     return status;
 }
 
-void AgcEnable(preproc_effect_t *effect)
-{
-    webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
-    ALOGV("AgcEnable agc %p", agc);
-    agc->Enable(true);
+void Agc2Enable(preproc_effect_t* effect) {
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.gain_controller2.enabled = true;
+    effect->session->apm->ApplyConfig(effect->session->config);
 }
 
-void AgcDisable(preproc_effect_t *effect)
-{
-    ALOGV("AgcDisable");
-    webrtc::GainControl *agc = static_cast<webrtc::GainControl *>(effect->engine);
-    agc->Enable(false);
+void AgcEnable(preproc_effect_t* effect) {
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.gain_controller1.enabled = true;
+    effect->session->apm->ApplyConfig(effect->session->config);
 }
 
+void Agc2Disable(preproc_effect_t* effect) {
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.gain_controller2.enabled = false;
+    effect->session->apm->ApplyConfig(effect->session->config);
+}
 
-static const preproc_ops_t sAgcOps = {
-        AgcCreate,
-        AgcInit,
-        NULL,
-        AgcEnable,
-        AgcDisable,
-        AgcSetParameter,
-        AgcGetParameter,
-        NULL
-};
+void AgcDisable(preproc_effect_t* effect) {
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.gain_controller1.enabled = false;
+    effect->session->apm->ApplyConfig(effect->session->config);
+}
 
+static const preproc_ops_t sAgcOps = {AgcCreate,       AgcInit,         NULL, AgcEnable, AgcDisable,
+                                      AgcSetParameter, AgcGetParameter, NULL};
+
+static const preproc_ops_t sAgc2Ops = {Agc2Create,       Agc2Init,    NULL,
+                                       Agc2Enable,       Agc2Disable, Agc2SetParameter,
+                                       Agc2GetParameter, NULL};
 
 //------------------------------------------------------------------------------
 // Acoustic Echo Canceler (AEC)
 //------------------------------------------------------------------------------
 
-static const webrtc::EchoControlMobile::RoutingMode kAecDefaultMode =
-        webrtc::EchoControlMobile::kEarpiece;
-static const bool kAecDefaultComfortNoise = true;
 
-int  AecInit (preproc_effect_t *effect)
-{
+int AecInit(preproc_effect_t* effect) {
     ALOGV("AecInit");
-    webrtc::EchoControlMobile *aec = static_cast<webrtc::EchoControlMobile *>(effect->engine);
-    aec->set_routing_mode(kAecDefaultMode);
-    aec->enable_comfort_noise(kAecDefaultComfortNoise);
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.echo_canceller.mobile_mode = true;
+    effect->session->apm->ApplyConfig(effect->session->config);
     return 0;
 }
 
-int  AecCreate(preproc_effect_t *effect)
-{
-    webrtc::EchoControlMobile *aec = effect->session->apm->echo_control_mobile();
-    ALOGV("AecCreate got aec %p", aec);
-    if (aec == NULL) {
-        ALOGW("AgcCreate Error");
-        return -ENOMEM;
-    }
-    effect->engine = static_cast<preproc_fx_handle_t>(aec);
-    AecInit (effect);
+int AecCreate(preproc_effect_t* effect) {
+    AecInit(effect);
     return 0;
 }
 
-int AecGetParameter(preproc_effect_t  *effect,
-                    void              *pParam,
-                    uint32_t          *pValueSize,
-                    void              *pValue)
-{
+int AecGetParameter(preproc_effect_t* effect, void* pParam, uint32_t* pValueSize, void* pValue) {
     int status = 0;
-    uint32_t param = *(uint32_t *)pParam;
+    uint32_t param = *(uint32_t*)pParam;
 
     if (*pValueSize < sizeof(uint32_t)) {
         return -EINVAL;
     }
     switch (param) {
-    case AEC_PARAM_ECHO_DELAY:
-    case AEC_PARAM_PROPERTIES:
-        *(uint32_t *)pValue = 1000 * effect->session->apm->stream_delay_ms();
-        ALOGV("AecGetParameter() echo delay %d us", *(uint32_t *)pValue);
-        break;
-    default:
-        ALOGW("AecGetParameter() unknown param %08x value %08x", param, *(uint32_t *)pValue);
-        status = -EINVAL;
-        break;
+        case AEC_PARAM_ECHO_DELAY:
+        case AEC_PARAM_PROPERTIES:
+            *(uint32_t*)pValue = 1000 * effect->session->apm->stream_delay_ms();
+            ALOGV("AecGetParameter() echo delay %d us", *(uint32_t*)pValue);
+            break;
+        case AEC_PARAM_MOBILE_MODE:
+            effect->session->config = effect->session->apm->GetConfig();
+            *(uint32_t*)pValue = effect->session->config.echo_canceller.mobile_mode;
+            ALOGV("AecGetParameter() mobile mode %d us", *(uint32_t*)pValue);
+            break;
+        default:
+            ALOGW("AecGetParameter() unknown param %08x value %08x", param, *(uint32_t*)pValue);
+            status = -EINVAL;
+            break;
     }
     return status;
 }
 
-int AecSetParameter (preproc_effect_t *effect, void *pParam, void *pValue)
-{
+int AecSetParameter(preproc_effect_t* effect, void* pParam, void* pValue) {
     int status = 0;
-    uint32_t param = *(uint32_t *)pParam;
-    uint32_t value = *(uint32_t *)pValue;
+    uint32_t param = *(uint32_t*)pParam;
+    uint32_t value = *(uint32_t*)pValue;
 
     switch (param) {
-    case AEC_PARAM_ECHO_DELAY:
-    case AEC_PARAM_PROPERTIES:
-        status = effect->session->apm->set_stream_delay_ms(value/1000);
-        ALOGV("AecSetParameter() echo delay %d us, status %d", value, status);
-        break;
-    default:
-        ALOGW("AecSetParameter() unknown param %08x value %08x", param, *(uint32_t *)pValue);
-        status = -EINVAL;
-        break;
+        case AEC_PARAM_ECHO_DELAY:
+        case AEC_PARAM_PROPERTIES:
+            status = effect->session->apm->set_stream_delay_ms(value / 1000);
+            ALOGV("AecSetParameter() echo delay %d us, status %d", value, status);
+            break;
+        case AEC_PARAM_MOBILE_MODE:
+            effect->session->config = effect->session->apm->GetConfig();
+            effect->session->config.echo_canceller.mobile_mode = value;
+            ALOGV("AecSetParameter() mobile mode %d us", value);
+            effect->session->apm->ApplyConfig(effect->session->config);
+            break;
+        default:
+            ALOGW("AecSetParameter() unknown param %08x value %08x", param, *(uint32_t*)pValue);
+            status = -EINVAL;
+            break;
     }
     return status;
 }
 
-void AecEnable(preproc_effect_t *effect)
-{
-    webrtc::EchoControlMobile *aec = static_cast<webrtc::EchoControlMobile *>(effect->engine);
-    ALOGV("AecEnable aec %p", aec);
-    aec->Enable(true);
+void AecEnable(preproc_effect_t* effect) {
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.echo_canceller.enabled = true;
+    effect->session->apm->ApplyConfig(effect->session->config);
 }
 
-void AecDisable(preproc_effect_t *effect)
-{
-    ALOGV("AecDisable");
-    webrtc::EchoControlMobile *aec = static_cast<webrtc::EchoControlMobile *>(effect->engine);
-    aec->Enable(false);
+void AecDisable(preproc_effect_t* effect) {
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.echo_canceller.enabled = false;
+    effect->session->apm->ApplyConfig(effect->session->config);
 }
 
-int AecSetDevice(preproc_effect_t *effect, uint32_t device)
-{
+int AecSetDevice(preproc_effect_t* effect, uint32_t device) {
     ALOGV("AecSetDevice %08x", device);
-    webrtc::EchoControlMobile *aec = static_cast<webrtc::EchoControlMobile *>(effect->engine);
-    webrtc::EchoControlMobile::RoutingMode mode = webrtc::EchoControlMobile::kQuietEarpieceOrHeadset;
 
     if (audio_is_input_device(device)) {
         return 0;
     }
 
-    switch(device) {
-    case AUDIO_DEVICE_OUT_EARPIECE:
-        mode = webrtc::EchoControlMobile::kEarpiece;
-        break;
-    case AUDIO_DEVICE_OUT_SPEAKER:
-        mode = webrtc::EchoControlMobile::kSpeakerphone;
-        break;
-    case AUDIO_DEVICE_OUT_WIRED_HEADSET:
-    case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
-    case AUDIO_DEVICE_OUT_USB_HEADSET:
-    default:
-        break;
-    }
-    aec->set_routing_mode(mode);
     return 0;
 }
 
-static const preproc_ops_t sAecOps = {
-        AecCreate,
-        AecInit,
-        NULL,
-        AecEnable,
-        AecDisable,
-        AecSetParameter,
-        AecGetParameter,
-        AecSetDevice
-};
+static const preproc_ops_t sAecOps = {AecCreate,       AecInit,     NULL,
+                                      AecEnable,       AecDisable,  AecSetParameter,
+                                      AecGetParameter, AecSetDevice};
 
 //------------------------------------------------------------------------------
 // Noise Suppression (NS)
 //------------------------------------------------------------------------------
 
-static const webrtc::NoiseSuppression::Level kNsDefaultLevel = webrtc::NoiseSuppression::kModerate;
+static const webrtc::AudioProcessing::Config::NoiseSuppression::Level kNsDefaultLevel =
+        webrtc::AudioProcessing::Config::NoiseSuppression::kModerate;
 
-int  NsInit (preproc_effect_t *effect)
-{
+int NsInit(preproc_effect_t* effect) {
     ALOGV("NsInit");
-    webrtc::NoiseSuppression *ns = static_cast<webrtc::NoiseSuppression *>(effect->engine);
-    ns->set_level(kNsDefaultLevel);
-    webrtc::Config config;
-    std::vector<webrtc::Point> geometry;
-    // TODO(aluebs): Make the geometry settable.
-    geometry.push_back(webrtc::Point(-0.03f, 0.f, 0.f));
-    geometry.push_back(webrtc::Point(-0.01f, 0.f, 0.f));
-    geometry.push_back(webrtc::Point(0.01f, 0.f, 0.f));
-    geometry.push_back(webrtc::Point(0.03f, 0.f, 0.f));
-    // The geometry needs to be set with Beamforming enabled.
-    config.Set<webrtc::Beamforming>(
-            new webrtc::Beamforming(true, geometry));
-    effect->session->apm->SetExtraOptions(config);
-    config.Set<webrtc::Beamforming>(
-            new webrtc::Beamforming(false, geometry));
-    effect->session->apm->SetExtraOptions(config);
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.noise_suppression.level = kNsDefaultLevel;
+    effect->session->apm->ApplyConfig(effect->session->config);
     effect->type = NS_TYPE_SINGLE_CHANNEL;
     return 0;
 }
 
-int  NsCreate(preproc_effect_t *effect)
-{
-    webrtc::NoiseSuppression *ns = effect->session->apm->noise_suppression();
-    ALOGV("NsCreate got ns %p", ns);
-    if (ns == NULL) {
-        ALOGW("AgcCreate Error");
-        return -ENOMEM;
-    }
-    effect->engine = static_cast<preproc_fx_handle_t>(ns);
-    NsInit (effect);
+int NsCreate(preproc_effect_t* effect) {
+    NsInit(effect);
     return 0;
 }
 
-int NsGetParameter(preproc_effect_t  *effect __unused,
-                   void              *pParam __unused,
-                   uint32_t          *pValueSize __unused,
-                   void              *pValue __unused)
-{
+int NsGetParameter(preproc_effect_t* /*effect __unused*/, void* /*pParam __unused*/,
+                   uint32_t* /*pValueSize __unused*/, void* /*pValue __unused*/) {
     int status = 0;
     return status;
 }
 
-int NsSetParameter (preproc_effect_t *effect, void *pParam, void *pValue)
-{
+int NsSetParameter(preproc_effect_t* effect, void* pParam, void* pValue) {
     int status = 0;
-    webrtc::NoiseSuppression *ns = static_cast<webrtc::NoiseSuppression *>(effect->engine);
-    uint32_t param = *(uint32_t *)pParam;
-    uint32_t value = *(uint32_t *)pValue;
-    switch(param) {
+    uint32_t param = *(uint32_t*)pParam;
+    uint32_t value = *(uint32_t*)pValue;
+    effect->session->config = effect->session->apm->GetConfig();
+    switch (param) {
         case NS_PARAM_LEVEL:
-            ns->set_level((webrtc::NoiseSuppression::Level)value);
+            effect->session->config.noise_suppression.level =
+                    (webrtc::AudioProcessing::Config::NoiseSuppression::Level)value;
             ALOGV("NsSetParameter() level %d", value);
             break;
-        case NS_PARAM_TYPE:
-        {
-            webrtc::Config config;
-            std::vector<webrtc::Point> geometry;
-            bool is_beamforming_enabled =
-                    value == NS_TYPE_MULTI_CHANNEL && ns->is_enabled();
-            config.Set<webrtc::Beamforming>(
-                    new webrtc::Beamforming(is_beamforming_enabled, geometry));
-            effect->session->apm->SetExtraOptions(config);
-            effect->type = value;
-            ALOGV("NsSetParameter() type %d", value);
-            break;
-        }
         default:
             ALOGW("NsSetParameter() unknown param %08x value %08x", param, value);
             status = -EINVAL;
     }
+    effect->session->apm->ApplyConfig(effect->session->config);
 
     return status;
 }
 
-void NsEnable(preproc_effect_t *effect)
-{
-    webrtc::NoiseSuppression *ns = static_cast<webrtc::NoiseSuppression *>(effect->engine);
-    ALOGV("NsEnable ns %p", ns);
-    ns->Enable(true);
-    if (effect->type == NS_TYPE_MULTI_CHANNEL) {
-        webrtc::Config config;
-        std::vector<webrtc::Point> geometry;
-        config.Set<webrtc::Beamforming>(new webrtc::Beamforming(true, geometry));
-        effect->session->apm->SetExtraOptions(config);
-    }
+void NsEnable(preproc_effect_t* effect) {
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.noise_suppression.enabled = true;
+    effect->session->apm->ApplyConfig(effect->session->config);
 }
 
-void NsDisable(preproc_effect_t *effect)
-{
+void NsDisable(preproc_effect_t* effect) {
     ALOGV("NsDisable");
-    webrtc::NoiseSuppression *ns = static_cast<webrtc::NoiseSuppression *>(effect->engine);
-    ns->Enable(false);
-    webrtc::Config config;
-    std::vector<webrtc::Point> geometry;
-    config.Set<webrtc::Beamforming>(new webrtc::Beamforming(false, geometry));
-    effect->session->apm->SetExtraOptions(config);
+    effect->session->config = effect->session->apm->GetConfig();
+    effect->session->config.noise_suppression.enabled = false;
+    effect->session->apm->ApplyConfig(effect->session->config);
 }
 
-static const preproc_ops_t sNsOps = {
-        NsCreate,
-        NsInit,
-        NULL,
-        NsEnable,
-        NsDisable,
-        NsSetParameter,
-        NsGetParameter,
-        NULL
-};
+static const preproc_ops_t sNsOps = {NsCreate,  NsInit,         NULL,           NsEnable,
+                                     NsDisable, NsSetParameter, NsGetParameter, NULL};
 
-
-static const preproc_ops_t *sPreProcOps[PREPROC_NUM_EFFECTS] = {
-        &sAgcOps,
-        &sAecOps,
-        &sNsOps
-};
-
+static const preproc_ops_t* sPreProcOps[PREPROC_NUM_EFFECTS] = {&sAgcOps,
+                                                                &sAgc2Ops,
+                                                                &sAecOps, &sNsOps};
 
 //------------------------------------------------------------------------------
 // Effect functions
 //------------------------------------------------------------------------------
 
-void Session_SetProcEnabled(preproc_session_t *session, uint32_t procId, bool enabled);
+void Session_SetProcEnabled(preproc_session_t* session, uint32_t procId, bool enabled);
 
 extern "C" const struct effect_interface_s sEffectInterface;
 extern "C" const struct effect_interface_s sEffectInterfaceReverse;
 
-#define BAD_STATE_ABORT(from, to) \
-        LOG_ALWAYS_FATAL("Bad state transition from %d to %d", from, to);
+#define BAD_STATE_ABORT(from, to) LOG_ALWAYS_FATAL("Bad state transition from %d to %d", from, to);
 
-int Effect_SetState(preproc_effect_t *effect, uint32_t state)
-{
+int Effect_SetState(preproc_effect_t* effect, uint32_t state) {
     int status = 0;
     ALOGV("Effect_SetState proc %d, new %d old %d", effect->procId, state, effect->state);
-    switch(state) {
-    case PREPROC_EFFECT_STATE_INIT:
-        switch(effect->state) {
-        case PREPROC_EFFECT_STATE_ACTIVE:
-            effect->ops->disable(effect);
-            Session_SetProcEnabled(effect->session, effect->procId, false);
+    switch (state) {
+        case PREPROC_EFFECT_STATE_INIT:
+            switch (effect->state) {
+                case PREPROC_EFFECT_STATE_ACTIVE:
+                    effect->ops->disable(effect);
+                    Session_SetProcEnabled(effect->session, effect->procId, false);
+                    break;
+                case PREPROC_EFFECT_STATE_CONFIG:
+                case PREPROC_EFFECT_STATE_CREATED:
+                case PREPROC_EFFECT_STATE_INIT:
+                    break;
+                default:
+                    BAD_STATE_ABORT(effect->state, state);
+            }
+            break;
+        case PREPROC_EFFECT_STATE_CREATED:
+            switch (effect->state) {
+                case PREPROC_EFFECT_STATE_INIT:
+                    status = effect->ops->create(effect);
+                    break;
+                case PREPROC_EFFECT_STATE_CREATED:
+                case PREPROC_EFFECT_STATE_ACTIVE:
+                case PREPROC_EFFECT_STATE_CONFIG:
+                    ALOGE("Effect_SetState invalid transition");
+                    status = -ENOSYS;
+                    break;
+                default:
+                    BAD_STATE_ABORT(effect->state, state);
+            }
             break;
         case PREPROC_EFFECT_STATE_CONFIG:
-        case PREPROC_EFFECT_STATE_CREATED:
-        case PREPROC_EFFECT_STATE_INIT:
+            switch (effect->state) {
+                case PREPROC_EFFECT_STATE_INIT:
+                    ALOGE("Effect_SetState invalid transition");
+                    status = -ENOSYS;
+                    break;
+                case PREPROC_EFFECT_STATE_ACTIVE:
+                    effect->ops->disable(effect);
+                    Session_SetProcEnabled(effect->session, effect->procId, false);
+                    break;
+                case PREPROC_EFFECT_STATE_CREATED:
+                case PREPROC_EFFECT_STATE_CONFIG:
+                    break;
+                default:
+                    BAD_STATE_ABORT(effect->state, state);
+            }
+            break;
+        case PREPROC_EFFECT_STATE_ACTIVE:
+            switch (effect->state) {
+                case PREPROC_EFFECT_STATE_INIT:
+                case PREPROC_EFFECT_STATE_CREATED:
+                    ALOGE("Effect_SetState invalid transition");
+                    status = -ENOSYS;
+                    break;
+                case PREPROC_EFFECT_STATE_ACTIVE:
+                    // enabling an already enabled effect is just ignored
+                    break;
+                case PREPROC_EFFECT_STATE_CONFIG:
+                    effect->ops->enable(effect);
+                    Session_SetProcEnabled(effect->session, effect->procId, true);
+                    break;
+                default:
+                    BAD_STATE_ABORT(effect->state, state);
+            }
             break;
         default:
             BAD_STATE_ABORT(effect->state, state);
-        }
-        break;
-    case PREPROC_EFFECT_STATE_CREATED:
-        switch(effect->state) {
-        case PREPROC_EFFECT_STATE_INIT:
-            status = effect->ops->create(effect);
-            break;
-        case PREPROC_EFFECT_STATE_CREATED:
-        case PREPROC_EFFECT_STATE_ACTIVE:
-        case PREPROC_EFFECT_STATE_CONFIG:
-            ALOGE("Effect_SetState invalid transition");
-            status = -ENOSYS;
-            break;
-        default:
-            BAD_STATE_ABORT(effect->state, state);
-        }
-        break;
-    case PREPROC_EFFECT_STATE_CONFIG:
-        switch(effect->state) {
-        case PREPROC_EFFECT_STATE_INIT:
-            ALOGE("Effect_SetState invalid transition");
-            status = -ENOSYS;
-            break;
-        case PREPROC_EFFECT_STATE_ACTIVE:
-            effect->ops->disable(effect);
-            Session_SetProcEnabled(effect->session, effect->procId, false);
-            break;
-        case PREPROC_EFFECT_STATE_CREATED:
-        case PREPROC_EFFECT_STATE_CONFIG:
-            break;
-        default:
-            BAD_STATE_ABORT(effect->state, state);
-        }
-        break;
-    case PREPROC_EFFECT_STATE_ACTIVE:
-        switch(effect->state) {
-        case PREPROC_EFFECT_STATE_INIT:
-        case PREPROC_EFFECT_STATE_CREATED:
-            ALOGE("Effect_SetState invalid transition");
-            status = -ENOSYS;
-            break;
-        case PREPROC_EFFECT_STATE_ACTIVE:
-            // enabling an already enabled effect is just ignored
-            break;
-        case PREPROC_EFFECT_STATE_CONFIG:
-            effect->ops->enable(effect);
-            Session_SetProcEnabled(effect->session, effect->procId, true);
-            break;
-        default:
-            BAD_STATE_ABORT(effect->state, state);
-        }
-        break;
-    default:
-        BAD_STATE_ABORT(effect->state, state);
     }
     if (status == 0) {
         effect->state = state;
@@ -766,8 +794,7 @@
     return status;
 }
 
-int Effect_Init(preproc_effect_t *effect, uint32_t procId)
-{
+int Effect_Init(preproc_effect_t* effect, uint32_t procId) {
     if (HasReverseStream(procId)) {
         effect->itfe = &sEffectInterfaceReverse;
     } else {
@@ -779,21 +806,17 @@
     return 0;
 }
 
-int Effect_Create(preproc_effect_t *effect,
-               preproc_session_t *session,
-               effect_handle_t  *interface)
-{
+int Effect_Create(preproc_effect_t* effect, preproc_session_t* session,
+                  effect_handle_t* interface) {
     effect->session = session;
     *interface = (effect_handle_t)&effect->itfe;
     return Effect_SetState(effect, PREPROC_EFFECT_STATE_CREATED);
 }
 
-int Effect_Release(preproc_effect_t *effect)
-{
+int Effect_Release(preproc_effect_t* effect) {
     return Effect_SetState(effect, PREPROC_EFFECT_STATE_INIT);
 }
 
-
 //------------------------------------------------------------------------------
 // Session functions
 //------------------------------------------------------------------------------
@@ -803,8 +826,7 @@
 static const int kPreprocDefaultSr = 16000;
 static const int kPreProcDefaultCnl = 1;
 
-int Session_Init(preproc_session_t *session)
-{
+int Session_Init(preproc_session_t* session) {
     size_t i;
     int status = 0;
 
@@ -812,129 +834,69 @@
     session->id = 0;
     session->io = 0;
     session->createdMsk = 0;
-    session->apm = NULL;
     for (i = 0; i < PREPROC_NUM_EFFECTS && status == 0; i++) {
         status = Effect_Init(&session->effects[i], i);
     }
     return status;
 }
 
-
-extern "C" int Session_CreateEffect(preproc_session_t *session,
-                                    int32_t procId,
-                                    effect_handle_t  *interface)
-{
+extern "C" int Session_CreateEffect(preproc_session_t* session, int32_t procId,
+                                    effect_handle_t* interface) {
     int status = -ENOMEM;
 
     ALOGV("Session_CreateEffect procId %d, createdMsk %08x", procId, session->createdMsk);
 
     if (session->createdMsk == 0) {
-        session->apm = webrtc::AudioProcessing::Create();
+        session->apm = session->ap_builder.Create();
         if (session->apm == NULL) {
             ALOGW("Session_CreateEffect could not get apm engine");
             goto error;
         }
-        const webrtc::ProcessingConfig processing_config = {
-            {{kPreprocDefaultSr, kPreProcDefaultCnl},
-             {kPreprocDefaultSr, kPreProcDefaultCnl},
-             {kPreprocDefaultSr, kPreProcDefaultCnl},
-             {kPreprocDefaultSr, kPreProcDefaultCnl}}};
-        session->apm->Initialize(processing_config);
-        session->procFrame = new webrtc::AudioFrame();
-        if (session->procFrame == NULL) {
-            ALOGW("Session_CreateEffect could not allocate audio frame");
-            goto error;
-        }
-        session->revFrame = new webrtc::AudioFrame();
-        if (session->revFrame == NULL) {
-            ALOGW("Session_CreateEffect could not allocate reverse audio frame");
-            goto error;
-        }
-        session->apmSamplingRate = kPreprocDefaultSr;
-        session->apmFrameCount = (kPreprocDefaultSr) / 100;
-        session->frameCount = session->apmFrameCount;
+        session->frameCount = kPreprocDefaultSr / 100;
         session->samplingRate = kPreprocDefaultSr;
         session->inChannelCount = kPreProcDefaultCnl;
         session->outChannelCount = kPreProcDefaultCnl;
-        session->procFrame->sample_rate_hz_ = kPreprocDefaultSr;
-        session->procFrame->num_channels_ = kPreProcDefaultCnl;
+        session->inputConfig.set_sample_rate_hz(kPreprocDefaultSr);
+        session->inputConfig.set_num_channels(kPreProcDefaultCnl);
+        session->outputConfig.set_sample_rate_hz(kPreprocDefaultSr);
+        session->outputConfig.set_num_channels(kPreProcDefaultCnl);
         session->revChannelCount = kPreProcDefaultCnl;
-        session->revFrame->sample_rate_hz_ = kPreprocDefaultSr;
-        session->revFrame->num_channels_ = kPreProcDefaultCnl;
+        session->revConfig.set_sample_rate_hz(kPreprocDefaultSr);
+        session->revConfig.set_num_channels(kPreProcDefaultCnl);
         session->enabledMsk = 0;
         session->processedMsk = 0;
         session->revEnabledMsk = 0;
         session->revProcessedMsk = 0;
-        session->inResampler = NULL;
-        session->inBuf = NULL;
-        session->inBufSize = 0;
-        session->outResampler = NULL;
-        session->outBuf = NULL;
-        session->outBufSize = 0;
-        session->revResampler = NULL;
-        session->revBuf = NULL;
-        session->revBufSize = 0;
     }
     status = Effect_Create(&session->effects[procId], session, interface);
     if (status < 0) {
         goto error;
     }
     ALOGV("Session_CreateEffect OK");
-    session->createdMsk |= (1<<procId);
+    session->createdMsk |= (1 << procId);
     return status;
 
 error:
     if (session->createdMsk == 0) {
-        delete session->revFrame;
-        session->revFrame = NULL;
-        delete session->procFrame;
-        session->procFrame = NULL;
         delete session->apm;
-        session->apm = NULL; // NOLINT(clang-analyzer-cplusplus.NewDelete)
+        session->apm = NULL;
     }
     return status;
 }
 
-int Session_ReleaseEffect(preproc_session_t *session,
-                          preproc_effect_t *fx)
-{
+int Session_ReleaseEffect(preproc_session_t* session, preproc_effect_t* fx) {
     ALOGW_IF(Effect_Release(fx) != 0, " Effect_Release() failed for proc ID %d", fx->procId);
-    session->createdMsk &= ~(1<<fx->procId);
+    session->createdMsk &= ~(1 << fx->procId);
     if (session->createdMsk == 0) {
         delete session->apm;
         session->apm = NULL;
-        delete session->procFrame;
-        session->procFrame = NULL;
-        delete session->revFrame;
-        session->revFrame = NULL;
-        if (session->inResampler != NULL) {
-            speex_resampler_destroy(session->inResampler);
-            session->inResampler = NULL;
-        }
-        if (session->outResampler != NULL) {
-            speex_resampler_destroy(session->outResampler);
-            session->outResampler = NULL;
-        }
-        if (session->revResampler != NULL) {
-            speex_resampler_destroy(session->revResampler);
-            session->revResampler = NULL;
-        }
-        delete session->inBuf;
-        session->inBuf = NULL;
-        delete session->outBuf;
-        session->outBuf = NULL;
-        delete session->revBuf;
-        session->revBuf = NULL;
-
         session->id = 0;
     }
 
     return 0;
 }
 
-
-int Session_SetConfig(preproc_session_t *session, effect_config_t *config)
-{
+int Session_SetConfig(preproc_session_t* session, effect_config_t* config) {
     uint32_t inCnl = audio_channel_count_from_in_mask(config->inputCfg.channels);
     uint32_t outCnl = audio_channel_count_from_in_mask(config->outputCfg.channels);
 
@@ -944,112 +906,27 @@
         return -EINVAL;
     }
 
-    ALOGV("Session_SetConfig sr %d cnl %08x",
-         config->inputCfg.samplingRate, config->inputCfg.channels);
-    int status;
-
-    // AEC implementation is limited to 16kHz
-    if (config->inputCfg.samplingRate >= 32000 && !(session->createdMsk & (1 << PREPROC_AEC))) {
-        session->apmSamplingRate = 32000;
-    } else
-    if (config->inputCfg.samplingRate >= 16000) {
-        session->apmSamplingRate = 16000;
-    } else if (config->inputCfg.samplingRate >= 8000) {
-        session->apmSamplingRate = 8000;
-    }
-
-    const webrtc::ProcessingConfig processing_config = {
-      {{static_cast<int>(session->apmSamplingRate), inCnl},
-       {static_cast<int>(session->apmSamplingRate), outCnl},
-       {static_cast<int>(session->apmSamplingRate), inCnl},
-       {static_cast<int>(session->apmSamplingRate), inCnl}}};
-    status = session->apm->Initialize(processing_config);
-    if (status < 0) {
-        return -EINVAL;
-    }
+    ALOGV("Session_SetConfig sr %d cnl %08x", config->inputCfg.samplingRate,
+          config->inputCfg.channels);
 
     session->samplingRate = config->inputCfg.samplingRate;
-    session->apmFrameCount = session->apmSamplingRate / 100;
-    if (session->samplingRate == session->apmSamplingRate) {
-        session->frameCount = session->apmFrameCount;
-    } else {
-        session->frameCount = (session->apmFrameCount * session->samplingRate) /
-                session->apmSamplingRate  + 1;
-    }
+    session->frameCount = session->samplingRate / 100;
     session->inChannelCount = inCnl;
     session->outChannelCount = outCnl;
-    session->procFrame->num_channels_ = inCnl;
-    session->procFrame->sample_rate_hz_ = session->apmSamplingRate;
+    session->inputConfig.set_sample_rate_hz(session->samplingRate);
+    session->inputConfig.set_num_channels(inCnl);
+    session->outputConfig.set_sample_rate_hz(session->samplingRate);
+    session->outputConfig.set_num_channels(inCnl);
 
     session->revChannelCount = inCnl;
-    session->revFrame->num_channels_ = inCnl;
-    session->revFrame->sample_rate_hz_ = session->apmSamplingRate;
-
-    // force process buffer reallocation
-    session->inBufSize = 0;
-    session->outBufSize = 0;
-    session->framesIn = 0;
-    session->framesOut = 0;
-
-
-    if (session->inResampler != NULL) {
-        speex_resampler_destroy(session->inResampler);
-        session->inResampler = NULL;
-    }
-    if (session->outResampler != NULL) {
-        speex_resampler_destroy(session->outResampler);
-        session->outResampler = NULL;
-    }
-    if (session->revResampler != NULL) {
-        speex_resampler_destroy(session->revResampler);
-        session->revResampler = NULL;
-    }
-    if (session->samplingRate != session->apmSamplingRate) {
-        int error;
-        session->inResampler = speex_resampler_init(session->inChannelCount,
-                                                    session->samplingRate,
-                                                    session->apmSamplingRate,
-                                                    RESAMPLER_QUALITY,
-                                                    &error);
-        if (session->inResampler == NULL) {
-            ALOGW("Session_SetConfig Cannot create speex resampler: %s",
-                 speex_resampler_strerror(error));
-            return -EINVAL;
-        }
-        session->outResampler = speex_resampler_init(session->outChannelCount,
-                                                    session->apmSamplingRate,
-                                                    session->samplingRate,
-                                                    RESAMPLER_QUALITY,
-                                                    &error);
-        if (session->outResampler == NULL) {
-            ALOGW("Session_SetConfig Cannot create speex resampler: %s",
-                 speex_resampler_strerror(error));
-            speex_resampler_destroy(session->inResampler);
-            session->inResampler = NULL;
-            return -EINVAL;
-        }
-        session->revResampler = speex_resampler_init(session->inChannelCount,
-                                                    session->samplingRate,
-                                                    session->apmSamplingRate,
-                                                    RESAMPLER_QUALITY,
-                                                    &error);
-        if (session->revResampler == NULL) {
-            ALOGW("Session_SetConfig Cannot create speex resampler: %s",
-                 speex_resampler_strerror(error));
-            speex_resampler_destroy(session->inResampler);
-            session->inResampler = NULL;
-            speex_resampler_destroy(session->outResampler);
-            session->outResampler = NULL;
-            return -EINVAL;
-        }
-    }
+    session->revConfig.set_sample_rate_hz(session->samplingRate);
+    session->revConfig.set_num_channels(inCnl);
 
     session->state = PREPROC_SESSION_STATE_CONFIG;
     return 0;
 }
 
-void Session_GetConfig(preproc_session_t *session, effect_config_t *config)
-{
+void Session_GetConfig(preproc_session_t* session, effect_config_t* config) {
     memset(config, 0, sizeof(effect_config_t));
     config->inputCfg.samplingRate = config->outputCfg.samplingRate = session->samplingRate;
     config->inputCfg.format = config->outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
@@ -1060,46 +937,30 @@
             (EFFECT_CONFIG_SMP_RATE | EFFECT_CONFIG_CHANNELS | EFFECT_CONFIG_FORMAT);
 }
 
-int Session_SetReverseConfig(preproc_session_t *session, effect_config_t *config)
-{
+int Session_SetReverseConfig(preproc_session_t* session, effect_config_t* config) {
     if (config->inputCfg.samplingRate != config->outputCfg.samplingRate ||
-            config->inputCfg.format != config->outputCfg.format ||
-            config->inputCfg.format != AUDIO_FORMAT_PCM_16_BIT) {
+        config->inputCfg.format != config->outputCfg.format ||
+        config->inputCfg.format != AUDIO_FORMAT_PCM_16_BIT) {
         return -EINVAL;
     }
 
-    ALOGV("Session_SetReverseConfig sr %d cnl %08x",
-         config->inputCfg.samplingRate, config->inputCfg.channels);
+    ALOGV("Session_SetReverseConfig sr %d cnl %08x", config->inputCfg.samplingRate,
+          config->inputCfg.channels);
 
     if (session->state < PREPROC_SESSION_STATE_CONFIG) {
         return -ENOSYS;
     }
     if (config->inputCfg.samplingRate != session->samplingRate ||
-            config->inputCfg.format != AUDIO_FORMAT_PCM_16_BIT) {
+        config->inputCfg.format != AUDIO_FORMAT_PCM_16_BIT) {
         return -EINVAL;
     }
     uint32_t inCnl = audio_channel_count_from_out_mask(config->inputCfg.channels);
-    const webrtc::ProcessingConfig processing_config = {
-       {{static_cast<int>(session->apmSamplingRate), session->inChannelCount},
-        {static_cast<int>(session->apmSamplingRate), session->outChannelCount},
-        {static_cast<int>(session->apmSamplingRate), inCnl},
-        {static_cast<int>(session->apmSamplingRate), inCnl}}};
-    int status = session->apm->Initialize(processing_config);
-    if (status < 0) {
-        return -EINVAL;
-    }
     session->revChannelCount = inCnl;
-    session->revFrame->num_channels_ = inCnl;
-    session->revFrame->sample_rate_hz_ = session->apmSamplingRate;
-    // force process buffer reallocation
-    session->revBufSize = 0;
-    session->framesRev = 0;
 
     return 0;
 }
 
-void Session_GetReverseConfig(preproc_session_t *session, effect_config_t *config)
-{
+void Session_GetReverseConfig(preproc_session_t* session, effect_config_t* config) {
     memset(config, 0, sizeof(effect_config_t));
     config->inputCfg.samplingRate = config->outputCfg.samplingRate = session->samplingRate;
     config->inputCfg.format = config->outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
@@ -1109,25 +970,10 @@
             (EFFECT_CONFIG_SMP_RATE | EFFECT_CONFIG_CHANNELS | EFFECT_CONFIG_FORMAT);
 }
 
-void Session_SetProcEnabled(preproc_session_t *session, uint32_t procId, bool enabled)
-{
+void Session_SetProcEnabled(preproc_session_t* session, uint32_t procId, bool enabled) {
     if (enabled) {
-        if(session->enabledMsk == 0) {
-            session->framesIn = 0;
-            if (session->inResampler != NULL) {
-                speex_resampler_reset_mem(session->inResampler);
-            }
-            session->framesOut = 0;
-            if (session->outResampler != NULL) {
-                speex_resampler_reset_mem(session->outResampler);
-            }
-        }
         session->enabledMsk |= (1 << procId);
         if (HasReverseStream(procId)) {
-            session->framesRev = 0;
-            if (session->revResampler != NULL) {
-                speex_resampler_reset_mem(session->revResampler);
-            }
             session->revEnabledMsk |= (1 << procId);
         }
     } else {
@@ -1136,8 +982,8 @@
             session->revEnabledMsk &= ~(1 << procId);
         }
     }
-    ALOGV("Session_SetProcEnabled proc %d, enabled %d enabledMsk %08x revEnabledMsk %08x",
-         procId, enabled, session->enabledMsk, session->revEnabledMsk);
+    ALOGV("Session_SetProcEnabled proc %d, enabled %d enabledMsk %08x revEnabledMsk %08x", procId,
+          enabled, session->enabledMsk, session->revEnabledMsk);
     session->processedMsk = 0;
     if (HasReverseStream(procId)) {
         session->revProcessedMsk = 0;
@@ -1151,8 +997,7 @@
 static int sInitStatus = 1;
 static preproc_session_t sSessions[PREPROC_NUM_SESSIONS];
 
-preproc_session_t *PreProc_GetSession(int32_t procId, int32_t  sessionId, int32_t  ioId)
-{
+preproc_session_t* PreProc_GetSession(int32_t procId, int32_t sessionId, int32_t ioId) {
     size_t i;
     for (i = 0; i < PREPROC_NUM_SESSIONS; i++) {
         if (sSessions[i].id == sessionId) {
@@ -1172,7 +1017,6 @@
     return NULL;
 }
 
-
 int PreProc_Init() {
     size_t i;
     int status = 0;
@@ -1187,8 +1031,7 @@
     return sInitStatus;
 }
 
-const effect_descriptor_t *PreProc_GetDescriptor(const effect_uuid_t *uuid)
-{
+const effect_descriptor_t* PreProc_GetDescriptor(const effect_uuid_t* uuid) {
     size_t i;
     for (i = 0; i < PREPROC_NUM_EFFECTS; i++) {
         if (memcmp(&sDescriptors[i]->uuid, uuid, sizeof(effect_uuid_t)) == 0) {
@@ -1198,234 +1041,86 @@
     return NULL;
 }
 
-
 extern "C" {
 
 //------------------------------------------------------------------------------
 // Effect Control Interface Implementation
 //------------------------------------------------------------------------------
 
-int PreProcessingFx_Process(effect_handle_t     self,
-                            audio_buffer_t    *inBuffer,
-                            audio_buffer_t    *outBuffer)
-{
-    preproc_effect_t * effect = (preproc_effect_t *)self;
+int PreProcessingFx_Process(effect_handle_t self, audio_buffer_t* inBuffer,
+                            audio_buffer_t* outBuffer) {
+    preproc_effect_t* effect = (preproc_effect_t*)self;
 
-    if (effect == NULL){
+    if (effect == NULL) {
         ALOGV("PreProcessingFx_Process() ERROR effect == NULL");
         return -EINVAL;
     }
-    preproc_session_t * session = (preproc_session_t *)effect->session;
+    preproc_session_t* session = (preproc_session_t*)effect->session;
 
-    if (inBuffer == NULL  || inBuffer->raw == NULL  ||
-            outBuffer == NULL || outBuffer->raw == NULL){
+    if (inBuffer == NULL || inBuffer->raw == NULL || outBuffer == NULL || outBuffer->raw == NULL) {
         ALOGW("PreProcessingFx_Process() ERROR bad pointer");
         return -EINVAL;
     }
 
-    session->processedMsk |= (1<<effect->procId);
+    if (inBuffer->frameCount != outBuffer->frameCount) {
+        ALOGW("inBuffer->frameCount %zu is not equal to outBuffer->frameCount %zu",
+              inBuffer->frameCount, outBuffer->frameCount);
+        return -EINVAL;
+    }
 
-//    ALOGV("PreProcessingFx_Process In %d frames enabledMsk %08x processedMsk %08x",
-//         inBuffer->frameCount, session->enabledMsk, session->processedMsk);
+    if (inBuffer->frameCount != session->frameCount) {
+        ALOGW("inBuffer->frameCount %zu != %zu representing 10ms at sampling rate %d",
+              inBuffer->frameCount, session->frameCount, session->samplingRate);
+        return -EINVAL;
+    }
 
+    session->processedMsk |= (1 << effect->procId);
+
+    //    ALOGV("PreProcessingFx_Process In %d frames enabledMsk %08x processedMsk %08x",
+    //         inBuffer->frameCount, session->enabledMsk, session->processedMsk);
     if ((session->processedMsk & session->enabledMsk) == session->enabledMsk) {
         effect->session->processedMsk = 0;
-        size_t framesRq = outBuffer->frameCount;
-        size_t framesWr = 0;
-        if (session->framesOut) {
-            size_t fr = session->framesOut;
-            if (outBuffer->frameCount < fr) {
-                fr = outBuffer->frameCount;
-            }
-            memcpy(outBuffer->s16,
-                  session->outBuf,
-                  fr * session->outChannelCount * sizeof(int16_t));
-            memcpy(session->outBuf,
-                  session->outBuf + fr * session->outChannelCount,
-                  (session->framesOut - fr) * session->outChannelCount * sizeof(int16_t));
-            session->framesOut -= fr;
-            framesWr += fr;
+        if (int status = effect->session->apm->ProcessStream(
+                    (const int16_t* const)inBuffer->s16,
+                    (const webrtc::StreamConfig)effect->session->inputConfig,
+                    (const webrtc::StreamConfig)effect->session->outputConfig,
+                    (int16_t* const)outBuffer->s16);
+            status != 0) {
+            ALOGE("Process Stream failed with error %d\n", status);
+            return status;
         }
-        outBuffer->frameCount = framesWr;
-        if (framesWr == framesRq) {
-            inBuffer->frameCount = 0;
-            return 0;
-        }
-
-        if (session->inResampler != NULL) {
-            size_t fr = session->frameCount - session->framesIn;
-            if (inBuffer->frameCount < fr) {
-                fr = inBuffer->frameCount;
-            }
-            if (session->inBufSize < session->framesIn + fr) {
-                int16_t *buf;
-                session->inBufSize = session->framesIn + fr;
-                buf = (int16_t *)realloc(session->inBuf,
-                                 session->inBufSize * session->inChannelCount * sizeof(int16_t));
-                if (buf == NULL) {
-                    session->framesIn = 0;
-                    free(session->inBuf);
-                    session->inBuf = NULL;
-                    return -ENOMEM;
-                }
-                session->inBuf = buf;
-            }
-            memcpy(session->inBuf + session->framesIn * session->inChannelCount,
-                   inBuffer->s16,
-                   fr * session->inChannelCount * sizeof(int16_t));
-#ifdef DUAL_MIC_TEST
-            pthread_mutex_lock(&gPcmDumpLock);
-            if (gPcmDumpFh != NULL) {
-                fwrite(inBuffer->raw,
-                       fr * session->inChannelCount * sizeof(int16_t), 1, gPcmDumpFh);
-            }
-            pthread_mutex_unlock(&gPcmDumpLock);
-#endif
-
-            session->framesIn += fr;
-            inBuffer->frameCount = fr;
-            if (session->framesIn < session->frameCount) {
-                return 0;
-            }
-            spx_uint32_t frIn = session->framesIn;
-            spx_uint32_t frOut = session->apmFrameCount;
-            if (session->inChannelCount == 1) {
-                speex_resampler_process_int(session->inResampler,
-                                            0,
-                                            session->inBuf,
-                                            &frIn,
-                                            session->procFrame->data_,
-                                            &frOut);
-            } else {
-                speex_resampler_process_interleaved_int(session->inResampler,
-                                                        session->inBuf,
-                                                        &frIn,
-                                                        session->procFrame->data_,
-                                                        &frOut);
-            }
-            memcpy(session->inBuf,
-                   session->inBuf + frIn * session->inChannelCount,
-                   (session->framesIn - frIn) * session->inChannelCount * sizeof(int16_t));
-            session->framesIn -= frIn;
-        } else {
-            size_t fr = session->frameCount - session->framesIn;
-            if (inBuffer->frameCount < fr) {
-                fr = inBuffer->frameCount;
-            }
-            memcpy(session->procFrame->data_ + session->framesIn * session->inChannelCount,
-                   inBuffer->s16,
-                   fr * session->inChannelCount * sizeof(int16_t));
-
-#ifdef DUAL_MIC_TEST
-            pthread_mutex_lock(&gPcmDumpLock);
-            if (gPcmDumpFh != NULL) {
-                fwrite(inBuffer->raw,
-                       fr * session->inChannelCount * sizeof(int16_t), 1, gPcmDumpFh);
-            }
-            pthread_mutex_unlock(&gPcmDumpLock);
-#endif
-
-            session->framesIn += fr;
-            inBuffer->frameCount = fr;
-            if (session->framesIn < session->frameCount) {
-                return 0;
-            }
-            session->framesIn = 0;
-        }
-        session->procFrame->samples_per_channel_ = session->apmFrameCount;
-
-        effect->session->apm->ProcessStream(session->procFrame);
-
-        if (session->outBufSize < session->framesOut + session->frameCount) {
-            int16_t *buf;
-            session->outBufSize = session->framesOut + session->frameCount;
-            buf = (int16_t *)realloc(session->outBuf,
-                             session->outBufSize * session->outChannelCount * sizeof(int16_t));
-            if (buf == NULL) {
-                session->framesOut = 0;
-                free(session->outBuf);
-                session->outBuf = NULL;
-                return -ENOMEM;
-            }
-            session->outBuf = buf;
-        }
-
-        if (session->outResampler != NULL) {
-            spx_uint32_t frIn = session->apmFrameCount;
-            spx_uint32_t frOut = session->frameCount;
-            if (session->inChannelCount == 1) {
-                speex_resampler_process_int(session->outResampler,
-                                    0,
-                                    session->procFrame->data_,
-                                    &frIn,
-                                    session->outBuf + session->framesOut * session->outChannelCount,
-                                    &frOut);
-            } else {
-                speex_resampler_process_interleaved_int(session->outResampler,
-                                    session->procFrame->data_,
-                                    &frIn,
-                                    session->outBuf + session->framesOut * session->outChannelCount,
-                                    &frOut);
-            }
-            session->framesOut += frOut;
-        } else {
-            memcpy(session->outBuf + session->framesOut * session->outChannelCount,
-                   session->procFrame->data_,
-                   session->frameCount * session->outChannelCount * sizeof(int16_t));
-            session->framesOut += session->frameCount;
-        }
-        size_t fr = session->framesOut;
-        if (framesRq - framesWr < fr) {
-            fr = framesRq - framesWr;
-        }
-        memcpy(outBuffer->s16 + framesWr * session->outChannelCount,
-              session->outBuf,
-              fr * session->outChannelCount * sizeof(int16_t));
-        memcpy(session->outBuf,
-              session->outBuf + fr * session->outChannelCount,
-              (session->framesOut - fr) * session->outChannelCount * sizeof(int16_t));
-        session->framesOut -= fr;
-        outBuffer->frameCount += fr;
-
         return 0;
     } else {
         return -ENODATA;
     }
 }
 
-int PreProcessingFx_Command(effect_handle_t  self,
-                            uint32_t            cmdCode,
-                            uint32_t            cmdSize,
-                            void                *pCmdData,
-                            uint32_t            *replySize,
-                            void                *pReplyData)
-{
-    preproc_effect_t * effect = (preproc_effect_t *) self;
+int PreProcessingFx_Command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
+                            void* pCmdData, uint32_t* replySize, void* pReplyData) {
+    preproc_effect_t* effect = (preproc_effect_t*)self;
 
-    if (effect == NULL){
+    if (effect == NULL) {
         return -EINVAL;
     }
 
-    //ALOGV("PreProcessingFx_Command: command %d cmdSize %d",cmdCode, cmdSize);
+    // ALOGV("PreProcessingFx_Command: command %d cmdSize %d",cmdCode, cmdSize);
 
-    switch (cmdCode){
+    switch (cmdCode) {
         case EFFECT_CMD_INIT:
-            if (pReplyData == NULL || *replySize != sizeof(int)){
+            if (pReplyData == NULL || *replySize != sizeof(int)) {
                 return -EINVAL;
             }
             if (effect->ops->init) {
                 effect->ops->init(effect);
             }
-            *(int *)pReplyData = 0;
+            *(int*)pReplyData = 0;
             break;
 
         case EFFECT_CMD_SET_CONFIG: {
-            if (pCmdData    == NULL||
-                cmdSize     != sizeof(effect_config_t)||
-                pReplyData  == NULL||
-                *replySize  != sizeof(int)){
+            if (pCmdData == NULL || cmdSize != sizeof(effect_config_t) || pReplyData == NULL ||
+                *replySize != sizeof(int)) {
                 ALOGV("PreProcessingFx_Command cmdCode Case: "
-                        "EFFECT_CMD_SET_CONFIG: ERROR");
+                      "EFFECT_CMD_SET_CONFIG: ERROR");
                 return -EINVAL;
             }
 #ifdef DUAL_MIC_TEST
@@ -1436,55 +1131,51 @@
                 effect->session->enabledMsk = 0;
             }
 #endif
-            *(int *)pReplyData = Session_SetConfig(effect->session, (effect_config_t *)pCmdData);
+            *(int*)pReplyData = Session_SetConfig(effect->session, (effect_config_t*)pCmdData);
 #ifdef DUAL_MIC_TEST
             if (gDualMicEnabled) {
                 effect->session->enabledMsk = enabledMsk;
             }
 #endif
-            if (*(int *)pReplyData != 0) {
+            if (*(int*)pReplyData != 0) {
                 break;
             }
             if (effect->state != PREPROC_EFFECT_STATE_ACTIVE) {
-                *(int *)pReplyData = Effect_SetState(effect, PREPROC_EFFECT_STATE_CONFIG);
+                *(int*)pReplyData = Effect_SetState(effect, PREPROC_EFFECT_STATE_CONFIG);
             }
-            } break;
+        } break;
 
         case EFFECT_CMD_GET_CONFIG:
-            if (pReplyData == NULL ||
-                *replySize != sizeof(effect_config_t)) {
+            if (pReplyData == NULL || *replySize != sizeof(effect_config_t)) {
                 ALOGV("\tLVM_ERROR : PreProcessingFx_Command cmdCode Case: "
-                        "EFFECT_CMD_GET_CONFIG: ERROR");
+                      "EFFECT_CMD_GET_CONFIG: ERROR");
                 return -EINVAL;
             }
 
-            Session_GetConfig(effect->session, (effect_config_t *)pReplyData);
+            Session_GetConfig(effect->session, (effect_config_t*)pReplyData);
             break;
 
         case EFFECT_CMD_SET_CONFIG_REVERSE:
-            if (pCmdData == NULL ||
-                cmdSize != sizeof(effect_config_t) ||
-                pReplyData == NULL ||
+            if (pCmdData == NULL || cmdSize != sizeof(effect_config_t) || pReplyData == NULL ||
                 *replySize != sizeof(int)) {
                 ALOGV("PreProcessingFx_Command cmdCode Case: "
-                        "EFFECT_CMD_SET_CONFIG_REVERSE: ERROR");
+                      "EFFECT_CMD_SET_CONFIG_REVERSE: ERROR");
                 return -EINVAL;
             }
-            *(int *)pReplyData = Session_SetReverseConfig(effect->session,
-                                                          (effect_config_t *)pCmdData);
-            if (*(int *)pReplyData != 0) {
+            *(int*)pReplyData =
+                    Session_SetReverseConfig(effect->session, (effect_config_t*)pCmdData);
+            if (*(int*)pReplyData != 0) {
                 break;
             }
             break;
 
         case EFFECT_CMD_GET_CONFIG_REVERSE:
-            if (pReplyData == NULL ||
-                *replySize != sizeof(effect_config_t)){
+            if (pReplyData == NULL || *replySize != sizeof(effect_config_t)) {
                 ALOGV("PreProcessingFx_Command cmdCode Case: "
-                        "EFFECT_CMD_GET_CONFIG_REVERSE: ERROR");
+                      "EFFECT_CMD_GET_CONFIG_REVERSE: ERROR");
                 return -EINVAL;
             }
-            Session_GetReverseConfig(effect->session, (effect_config_t *)pCmdData);
+            Session_GetReverseConfig(effect->session, (effect_config_t*)pCmdData);
             break;
 
         case EFFECT_CMD_RESET:
@@ -1494,80 +1185,74 @@
             break;
 
         case EFFECT_CMD_GET_PARAM: {
-            effect_param_t *p = (effect_param_t *)pCmdData;
+            effect_param_t* p = (effect_param_t*)pCmdData;
 
             if (pCmdData == NULL || cmdSize < sizeof(effect_param_t) ||
-                    cmdSize < (sizeof(effect_param_t) + p->psize) ||
-                    pReplyData == NULL || replySize == NULL ||
-                    *replySize < (sizeof(effect_param_t) + p->psize)){
+                cmdSize < (sizeof(effect_param_t) + p->psize) || pReplyData == NULL ||
+                replySize == NULL || *replySize < (sizeof(effect_param_t) + p->psize)) {
                 ALOGV("PreProcessingFx_Command cmdCode Case: "
-                        "EFFECT_CMD_GET_PARAM: ERROR");
+                      "EFFECT_CMD_GET_PARAM: ERROR");
                 return -EINVAL;
             }
 
             memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + p->psize);
 
-            p = (effect_param_t *)pReplyData;
+            p = (effect_param_t*)pReplyData;
 
             int voffset = ((p->psize - 1) / sizeof(int32_t) + 1) * sizeof(int32_t);
 
             if (effect->ops->get_parameter) {
-                p->status = effect->ops->get_parameter(effect, p->data,
-                                                       &p->vsize,
-                                                       p->data + voffset);
+                p->status =
+                        effect->ops->get_parameter(effect, p->data, &p->vsize, p->data + voffset);
                 *replySize = sizeof(effect_param_t) + voffset + p->vsize;
             }
         } break;
 
-        case EFFECT_CMD_SET_PARAM:{
-            if (pCmdData == NULL||
-                    cmdSize < sizeof(effect_param_t) ||
-                    pReplyData == NULL || replySize == NULL ||
-                    *replySize != sizeof(int32_t)){
+        case EFFECT_CMD_SET_PARAM: {
+            if (pCmdData == NULL || cmdSize < sizeof(effect_param_t) || pReplyData == NULL ||
+                replySize == NULL || *replySize != sizeof(int32_t)) {
                 ALOGV("PreProcessingFx_Command cmdCode Case: "
-                        "EFFECT_CMD_SET_PARAM: ERROR");
+                      "EFFECT_CMD_SET_PARAM: ERROR");
                 return -EINVAL;
             }
-            effect_param_t *p = (effect_param_t *) pCmdData;
+            effect_param_t* p = (effect_param_t*)pCmdData;
 
-            if (p->psize != sizeof(int32_t)){
+            if (p->psize != sizeof(int32_t)) {
                 ALOGV("PreProcessingFx_Command cmdCode Case: "
-                        "EFFECT_CMD_SET_PARAM: ERROR, psize is not sizeof(int32_t)");
+                      "EFFECT_CMD_SET_PARAM: ERROR, psize is not sizeof(int32_t)");
                 return -EINVAL;
             }
             if (effect->ops->set_parameter) {
-                *(int *)pReplyData = effect->ops->set_parameter(effect,
-                                                                (void *)p->data,
-                                                                p->data + p->psize);
+                *(int*)pReplyData =
+                        effect->ops->set_parameter(effect, (void*)p->data, p->data + p->psize);
             }
         } break;
 
         case EFFECT_CMD_ENABLE:
-            if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)){
+            if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
                 ALOGV("PreProcessingFx_Command cmdCode Case: EFFECT_CMD_ENABLE: ERROR");
                 return -EINVAL;
             }
-            *(int *)pReplyData = Effect_SetState(effect, PREPROC_EFFECT_STATE_ACTIVE);
+            *(int*)pReplyData = Effect_SetState(effect, PREPROC_EFFECT_STATE_ACTIVE);
             break;
 
         case EFFECT_CMD_DISABLE:
-            if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)){
+            if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) {
                 ALOGV("PreProcessingFx_Command cmdCode Case: EFFECT_CMD_DISABLE: ERROR");
                 return -EINVAL;
             }
-            *(int *)pReplyData  = Effect_SetState(effect, PREPROC_EFFECT_STATE_CONFIG);
+            *(int*)pReplyData = Effect_SetState(effect, PREPROC_EFFECT_STATE_CONFIG);
             break;
 
         case EFFECT_CMD_SET_DEVICE:
         case EFFECT_CMD_SET_INPUT_DEVICE:
-            if (pCmdData == NULL ||
-                cmdSize != sizeof(uint32_t)) {
+            if (pCmdData == NULL || cmdSize != sizeof(uint32_t)) {
                 ALOGV("PreProcessingFx_Command cmdCode Case: EFFECT_CMD_SET_DEVICE: ERROR");
                 return -EINVAL;
             }
 
             if (effect->ops->set_device) {
-                effect->ops->set_device(effect, *(uint32_t *)pCmdData);
+                effect->ops->set_device(effect, *(uint32_t*)pCmdData);
             }
             break;
 
@@ -1578,30 +1263,30 @@
 #ifdef DUAL_MIC_TEST
         ///// test commands start
         case PREPROC_CMD_DUAL_MIC_ENABLE: {
-            if (pCmdData == NULL|| cmdSize != sizeof(uint32_t) ||
-                    pReplyData == NULL || replySize == NULL) {
+            if (pCmdData == NULL || cmdSize != sizeof(uint32_t) || pReplyData == NULL ||
+                replySize == NULL) {
                 ALOGE("PreProcessingFx_Command cmdCode Case: "
-                        "PREPROC_CMD_DUAL_MIC_ENABLE: ERROR");
+                      "PREPROC_CMD_DUAL_MIC_ENABLE: ERROR");
                 *replySize = 0;
                 return -EINVAL;
             }
-            gDualMicEnabled = *(bool *)pCmdData;
+            gDualMicEnabled = *(bool*)pCmdData;
             if (gDualMicEnabled) {
                 effect->aux_channels_on = sHasAuxChannels[effect->procId];
             } else {
                 effect->aux_channels_on = false;
             }
-            effect->cur_channel_config = (effect->session->inChannelCount == 1) ?
-                    CHANNEL_CFG_MONO : CHANNEL_CFG_STEREO;
+            effect->cur_channel_config =
+                    (effect->session->inChannelCount == 1) ? CHANNEL_CFG_MONO : CHANNEL_CFG_STEREO;
 
             ALOGV("PREPROC_CMD_DUAL_MIC_ENABLE: %s", gDualMicEnabled ? "enabled" : "disabled");
             *replySize = sizeof(int);
-            *(int *)pReplyData = 0;
-            } break;
+            *(int*)pReplyData = 0;
+        } break;
         case PREPROC_CMD_DUAL_MIC_PCM_DUMP_START: {
-            if (pCmdData == NULL|| pReplyData == NULL || replySize == NULL) {
+            if (pCmdData == NULL || pReplyData == NULL || replySize == NULL) {
                 ALOGE("PreProcessingFx_Command cmdCode Case: "
-                        "PREPROC_CMD_DUAL_MIC_PCM_DUMP_START: ERROR");
+                      "PREPROC_CMD_DUAL_MIC_PCM_DUMP_START: ERROR");
                 *replySize = 0;
                 return -EINVAL;
             }
@@ -1610,20 +1295,19 @@
                 fclose(gPcmDumpFh);
                 gPcmDumpFh = NULL;
             }
-            char *path = strndup((char *)pCmdData, cmdSize);
-            gPcmDumpFh = fopen((char *)path, "wb");
+            char* path = strndup((char*)pCmdData, cmdSize);
+            gPcmDumpFh = fopen((char*)path, "wb");
             pthread_mutex_unlock(&gPcmDumpLock);
-            ALOGV("PREPROC_CMD_DUAL_MIC_PCM_DUMP_START: path %s gPcmDumpFh %p",
-                  path, gPcmDumpFh);
+            ALOGV("PREPROC_CMD_DUAL_MIC_PCM_DUMP_START: path %s gPcmDumpFh %p", path, gPcmDumpFh);
             ALOGE_IF(gPcmDumpFh <= 0, "gPcmDumpFh open error %d %s", errno, strerror(errno));
             free(path);
             *replySize = sizeof(int);
-            *(int *)pReplyData = 0;
-            } break;
+            *(int*)pReplyData = 0;
+        } break;
         case PREPROC_CMD_DUAL_MIC_PCM_DUMP_STOP: {
             if (pReplyData == NULL || replySize == NULL) {
                 ALOGE("PreProcessingFx_Command cmdCode Case: "
-                        "PREPROC_CMD_DUAL_MIC_PCM_DUMP_STOP: ERROR");
+                      "PREPROC_CMD_DUAL_MIC_PCM_DUMP_STOP: ERROR");
                 *replySize = 0;
                 return -EINVAL;
             }
@@ -1635,118 +1319,116 @@
             pthread_mutex_unlock(&gPcmDumpLock);
             ALOGV("PREPROC_CMD_DUAL_MIC_PCM_DUMP_STOP");
             *replySize = sizeof(int);
-            *(int *)pReplyData = 0;
-            } break;
-        ///// test commands end
+            *(int*)pReplyData = 0;
+        } break;
+            ///// test commands end
 
         case EFFECT_CMD_GET_FEATURE_SUPPORTED_CONFIGS: {
-            if(!gDualMicEnabled) {
+            if (!gDualMicEnabled) {
                 return -EINVAL;
             }
-            if (pCmdData == NULL|| cmdSize != 2 * sizeof(uint32_t) ||
-                    pReplyData == NULL || replySize == NULL) {
+            if (pCmdData == NULL || cmdSize != 2 * sizeof(uint32_t) || pReplyData == NULL ||
+                replySize == NULL) {
                 ALOGE("PreProcessingFx_Command cmdCode Case: "
-                        "EFFECT_CMD_GET_FEATURE_SUPPORTED_CONFIGS: ERROR");
+                      "EFFECT_CMD_GET_FEATURE_SUPPORTED_CONFIGS: ERROR");
                 *replySize = 0;
                 return -EINVAL;
             }
-            if (*(uint32_t *)pCmdData != EFFECT_FEATURE_AUX_CHANNELS ||
-                  !effect->aux_channels_on) {
+            if (*(uint32_t*)pCmdData != EFFECT_FEATURE_AUX_CHANNELS || !effect->aux_channels_on) {
                 ALOGV("PreProcessingFx_Command feature EFFECT_FEATURE_AUX_CHANNELS not supported by"
-                        " fx %d", effect->procId);
-                *(uint32_t *)pReplyData = -ENOSYS;
+                      " fx %d",
+                      effect->procId);
+                *(uint32_t*)pReplyData = -ENOSYS;
                 *replySize = sizeof(uint32_t);
                 break;
             }
-            size_t num_configs = *((uint32_t *)pCmdData + 1);
-            if (*replySize < (2 * sizeof(uint32_t) +
-                              num_configs * sizeof(channel_config_t))) {
+            size_t num_configs = *((uint32_t*)pCmdData + 1);
+            if (*replySize < (2 * sizeof(uint32_t) + num_configs * sizeof(channel_config_t))) {
                 *replySize = 0;
                 return -EINVAL;
             }
 
-            *((uint32_t *)pReplyData + 1) = CHANNEL_CFG_CNT;
+            *((uint32_t*)pReplyData + 1) = CHANNEL_CFG_CNT;
             if (num_configs < CHANNEL_CFG_CNT ||
-                    *replySize < (2 * sizeof(uint32_t) +
-                                     CHANNEL_CFG_CNT * sizeof(channel_config_t))) {
-                *(uint32_t *)pReplyData = -ENOMEM;
+                *replySize < (2 * sizeof(uint32_t) + CHANNEL_CFG_CNT * sizeof(channel_config_t))) {
+                *(uint32_t*)pReplyData = -ENOMEM;
             } else {
                 num_configs = CHANNEL_CFG_CNT;
-                *(uint32_t *)pReplyData = 0;
+                *(uint32_t*)pReplyData = 0;
             }
             ALOGV("PreProcessingFx_Command EFFECT_CMD_GET_FEATURE_SUPPORTED_CONFIGS num config %d",
                   num_configs);
 
             *replySize = 2 * sizeof(uint32_t) + num_configs * sizeof(channel_config_t);
-            *((uint32_t *)pReplyData + 1) = num_configs;
-            memcpy((uint32_t *)pReplyData + 2, &sDualMicConfigs, num_configs * sizeof(channel_config_t));
-            } break;
+            *((uint32_t*)pReplyData + 1) = num_configs;
+            memcpy((uint32_t*)pReplyData + 2, &sDualMicConfigs,
+                   num_configs * sizeof(channel_config_t));
+        } break;
         case EFFECT_CMD_GET_FEATURE_CONFIG:
-            if(!gDualMicEnabled) {
+            if (!gDualMicEnabled) {
                 return -EINVAL;
             }
-            if (pCmdData == NULL|| cmdSize != sizeof(uint32_t) ||
-                    pReplyData == NULL || replySize == NULL ||
-                    *replySize < sizeof(uint32_t) + sizeof(channel_config_t)) {
+            if (pCmdData == NULL || cmdSize != sizeof(uint32_t) || pReplyData == NULL ||
+                replySize == NULL || *replySize < sizeof(uint32_t) + sizeof(channel_config_t)) {
                 ALOGE("PreProcessingFx_Command cmdCode Case: "
-                        "EFFECT_CMD_GET_FEATURE_CONFIG: ERROR");
+                      "EFFECT_CMD_GET_FEATURE_CONFIG: ERROR");
                 return -EINVAL;
             }
-            if (*(uint32_t *)pCmdData != EFFECT_FEATURE_AUX_CHANNELS || !effect->aux_channels_on) {
-                *(uint32_t *)pReplyData = -ENOSYS;
+            if (*(uint32_t*)pCmdData != EFFECT_FEATURE_AUX_CHANNELS || !effect->aux_channels_on) {
+                *(uint32_t*)pReplyData = -ENOSYS;
                 *replySize = sizeof(uint32_t);
                 break;
             }
             ALOGV("PreProcessingFx_Command EFFECT_CMD_GET_FEATURE_CONFIG");
-            *(uint32_t *)pReplyData = 0;
+            *(uint32_t*)pReplyData = 0;
             *replySize = sizeof(uint32_t) + sizeof(channel_config_t);
-            memcpy((uint32_t *)pReplyData + 1,
-                   &sDualMicConfigs[effect->cur_channel_config],
+            memcpy((uint32_t*)pReplyData + 1, &sDualMicConfigs[effect->cur_channel_config],
                    sizeof(channel_config_t));
             break;
         case EFFECT_CMD_SET_FEATURE_CONFIG: {
             ALOGV("PreProcessingFx_Command EFFECT_CMD_SET_FEATURE_CONFIG: "
-                    "gDualMicEnabled %d effect->aux_channels_on %d",
+                  "gDualMicEnabled %d effect->aux_channels_on %d",
                   gDualMicEnabled, effect->aux_channels_on);
-            if(!gDualMicEnabled) {
+            if (!gDualMicEnabled) {
                 return -EINVAL;
             }
-            if (pCmdData == NULL|| cmdSize != (sizeof(uint32_t) + sizeof(channel_config_t)) ||
-                    pReplyData == NULL || replySize == NULL ||
-                    *replySize < sizeof(uint32_t)) {
+            if (pCmdData == NULL || cmdSize != (sizeof(uint32_t) + sizeof(channel_config_t)) ||
+                pReplyData == NULL || replySize == NULL || *replySize < sizeof(uint32_t)) {
                 ALOGE("PreProcessingFx_Command cmdCode Case: "
-                        "EFFECT_CMD_SET_FEATURE_CONFIG: ERROR\n"
-                        "pCmdData %p cmdSize %d pReplyData %p replySize %p *replySize %d",
-                        pCmdData, cmdSize, pReplyData, replySize, replySize ? *replySize : -1);
+                      "EFFECT_CMD_SET_FEATURE_CONFIG: ERROR\n"
+                      "pCmdData %p cmdSize %d pReplyData %p replySize %p *replySize %d",
+                      pCmdData, cmdSize, pReplyData, replySize, replySize ? *replySize : -1);
                 return -EINVAL;
             }
             *replySize = sizeof(uint32_t);
-            if (*(uint32_t *)pCmdData != EFFECT_FEATURE_AUX_CHANNELS || !effect->aux_channels_on) {
-                *(uint32_t *)pReplyData = -ENOSYS;
+            if (*(uint32_t*)pCmdData != EFFECT_FEATURE_AUX_CHANNELS || !effect->aux_channels_on) {
+                *(uint32_t*)pReplyData = -ENOSYS;
                 ALOGV("PreProcessingFx_Command cmdCode Case: "
-                                        "EFFECT_CMD_SET_FEATURE_CONFIG: ERROR\n"
-                                        "CmdData %d effect->aux_channels_on %d",
-                                        *(uint32_t *)pCmdData, effect->aux_channels_on);
+                      "EFFECT_CMD_SET_FEATURE_CONFIG: ERROR\n"
+                      "CmdData %d effect->aux_channels_on %d",
+                      *(uint32_t*)pCmdData, effect->aux_channels_on);
                 break;
             }
             size_t i;
-            for (i = 0; i < CHANNEL_CFG_CNT;i++) {
-                if (memcmp((uint32_t *)pCmdData + 1,
-                           &sDualMicConfigs[i], sizeof(channel_config_t)) == 0) {
+            for (i = 0; i < CHANNEL_CFG_CNT; i++) {
+                if (memcmp((uint32_t*)pCmdData + 1, &sDualMicConfigs[i],
+                           sizeof(channel_config_t)) == 0) {
                     break;
                 }
             }
             if (i == CHANNEL_CFG_CNT) {
-                *(uint32_t *)pReplyData = -EINVAL;
+                *(uint32_t*)pReplyData = -EINVAL;
                 ALOGW("PreProcessingFx_Command EFFECT_CMD_SET_FEATURE_CONFIG invalid config"
-                        "[%08x].[%08x]", *((uint32_t *)pCmdData + 1), *((uint32_t *)pCmdData + 2));
+                      "[%08x].[%08x]",
+                      *((uint32_t*)pCmdData + 1), *((uint32_t*)pCmdData + 2));
             } else {
                 effect->cur_channel_config = i;
-                *(uint32_t *)pReplyData = 0;
+                *(uint32_t*)pReplyData = 0;
                 ALOGV("PreProcessingFx_Command EFFECT_CMD_SET_FEATURE_CONFIG New config"
-                        "[%08x].[%08x]", sDualMicConfigs[i].main_channels, sDualMicConfigs[i].aux_channels);
+                      "[%08x].[%08x]",
+                      sDualMicConfigs[i].main_channels, sDualMicConfigs[i].aux_channels);
             }
-            } break;
+        } break;
 #endif
         default:
             return -EINVAL;
@@ -1754,11 +1436,8 @@
     return 0;
 }
 
-
-int PreProcessingFx_GetDescriptor(effect_handle_t   self,
-                                  effect_descriptor_t *pDescriptor)
-{
-    preproc_effect_t * effect = (preproc_effect_t *) self;
+int PreProcessingFx_GetDescriptor(effect_handle_t self, effect_descriptor_t* pDescriptor) {
+    preproc_effect_t* effect = (preproc_effect_t*)self;
 
     if (effect == NULL || pDescriptor == NULL) {
         return -EINVAL;
@@ -1769,137 +1448,81 @@
     return 0;
 }
 
-int PreProcessingFx_ProcessReverse(effect_handle_t     self,
-                                   audio_buffer_t    *inBuffer,
-                                   audio_buffer_t    *outBuffer __unused)
-{
-    preproc_effect_t * effect = (preproc_effect_t *)self;
+int PreProcessingFx_ProcessReverse(effect_handle_t self, audio_buffer_t* inBuffer,
+                                   audio_buffer_t* outBuffer) {
+    preproc_effect_t* effect = (preproc_effect_t*)self;
 
-    if (effect == NULL){
+    if (effect == NULL) {
         ALOGW("PreProcessingFx_ProcessReverse() ERROR effect == NULL");
         return -EINVAL;
     }
-    preproc_session_t * session = (preproc_session_t *)effect->session;
+    preproc_session_t* session = (preproc_session_t*)effect->session;
 
-    if (inBuffer == NULL  || inBuffer->raw == NULL){
+    if (inBuffer == NULL || inBuffer->raw == NULL) {
         ALOGW("PreProcessingFx_ProcessReverse() ERROR bad pointer");
         return -EINVAL;
     }
 
-    session->revProcessedMsk |= (1<<effect->procId);
+    if (inBuffer->frameCount != outBuffer->frameCount) {
+        ALOGW("inBuffer->frameCount %zu is not equal to outBuffer->frameCount %zu",
+              inBuffer->frameCount, outBuffer->frameCount);
+        return -EINVAL;
+    }
 
-//    ALOGV("PreProcessingFx_ProcessReverse In %d frames revEnabledMsk %08x revProcessedMsk %08x",
-//         inBuffer->frameCount, session->revEnabledMsk, session->revProcessedMsk);
+    if (inBuffer->frameCount != session->frameCount) {
+        ALOGW("inBuffer->frameCount %zu != %zu representing 10ms at sampling rate %d",
+              inBuffer->frameCount, session->frameCount, session->samplingRate);
+        return -EINVAL;
+    }
 
+    session->revProcessedMsk |= (1 << effect->procId);
+
+    //    ALOGV("PreProcessingFx_ProcessReverse In %d frames revEnabledMsk %08x revProcessedMsk
+    //    %08x",
+    //         inBuffer->frameCount, session->revEnabledMsk, session->revProcessedMsk);
 
     if ((session->revProcessedMsk & session->revEnabledMsk) == session->revEnabledMsk) {
         effect->session->revProcessedMsk = 0;
-        if (session->revResampler != NULL) {
-            size_t fr = session->frameCount - session->framesRev;
-            if (inBuffer->frameCount < fr) {
-                fr = inBuffer->frameCount;
-            }
-            if (session->revBufSize < session->framesRev + fr) {
-                int16_t *buf;
-                session->revBufSize = session->framesRev + fr;
-                buf = (int16_t *)realloc(session->revBuf,
-                                 session->revBufSize * session->inChannelCount * sizeof(int16_t));
-                if (buf == NULL) {
-                    session->framesRev = 0;
-                    free(session->revBuf);
-                    session->revBuf = NULL;
-                    return -ENOMEM;
-                }
-                session->revBuf = buf;
-            }
-            memcpy(session->revBuf + session->framesRev * session->inChannelCount,
-                   inBuffer->s16,
-                   fr * session->inChannelCount * sizeof(int16_t));
-
-            session->framesRev += fr;
-            inBuffer->frameCount = fr;
-            if (session->framesRev < session->frameCount) {
-                return 0;
-            }
-            spx_uint32_t frIn = session->framesRev;
-            spx_uint32_t frOut = session->apmFrameCount;
-            if (session->inChannelCount == 1) {
-                speex_resampler_process_int(session->revResampler,
-                                            0,
-                                            session->revBuf,
-                                            &frIn,
-                                            session->revFrame->data_,
-                                            &frOut);
-            } else {
-                speex_resampler_process_interleaved_int(session->revResampler,
-                                                        session->revBuf,
-                                                        &frIn,
-                                                        session->revFrame->data_,
-                                                        &frOut);
-            }
-            memcpy(session->revBuf,
-                   session->revBuf + frIn * session->inChannelCount,
-                   (session->framesRev - frIn) * session->inChannelCount * sizeof(int16_t));
-            session->framesRev -= frIn;
-        } else {
-            size_t fr = session->frameCount - session->framesRev;
-            if (inBuffer->frameCount < fr) {
-                fr = inBuffer->frameCount;
-            }
-            memcpy(session->revFrame->data_ + session->framesRev * session->inChannelCount,
-                   inBuffer->s16,
-                   fr * session->inChannelCount * sizeof(int16_t));
-            session->framesRev += fr;
-            inBuffer->frameCount = fr;
-            if (session->framesRev < session->frameCount) {
-                return 0;
-            }
-            session->framesRev = 0;
+        if (int status = effect->session->apm->ProcessReverseStream(
+                    (const int16_t* const)inBuffer->s16,
+                    (const webrtc::StreamConfig)effect->session->revConfig,
+                    (const webrtc::StreamConfig)effect->session->revConfig,
+                    (int16_t* const)outBuffer->s16);
+            status != 0) {
+            ALOGE("Process Reverse Stream failed with error %d\n", status);
+            return status;
         }
-        session->revFrame->samples_per_channel_ = session->apmFrameCount;
-        effect->session->apm->AnalyzeReverseStream(session->revFrame);
         return 0;
     } else {
         return -ENODATA;
     }
 }
 
-
 // effect_handle_t interface implementation for effect
 const struct effect_interface_s sEffectInterface = {
-    PreProcessingFx_Process,
-    PreProcessingFx_Command,
-    PreProcessingFx_GetDescriptor,
-    NULL
-};
+        PreProcessingFx_Process, PreProcessingFx_Command, PreProcessingFx_GetDescriptor, NULL};
 
 const struct effect_interface_s sEffectInterfaceReverse = {
-    PreProcessingFx_Process,
-    PreProcessingFx_Command,
-    PreProcessingFx_GetDescriptor,
-    PreProcessingFx_ProcessReverse
-};
+        PreProcessingFx_Process, PreProcessingFx_Command, PreProcessingFx_GetDescriptor,
+        PreProcessingFx_ProcessReverse};
 
 //------------------------------------------------------------------------------
 // Effect Library Interface Implementation
 //------------------------------------------------------------------------------
 
-int PreProcessingLib_Create(const effect_uuid_t *uuid,
-                            int32_t             sessionId,
-                            int32_t             ioId,
-                            effect_handle_t  *pInterface)
-{
+int PreProcessingLib_Create(const effect_uuid_t* uuid, int32_t sessionId, int32_t ioId,
+                            effect_handle_t* pInterface) {
     ALOGV("EffectCreate: uuid: %08x session %d IO: %d", uuid->timeLow, sessionId, ioId);
 
     int status;
-    const effect_descriptor_t *desc;
-    preproc_session_t *session;
+    const effect_descriptor_t* desc;
+    preproc_session_t* session;
     uint32_t procId;
 
     if (PreProc_Init() != 0) {
         return sInitStatus;
     }
-    desc =  PreProc_GetDescriptor(uuid);
+    desc = PreProc_GetDescriptor(uuid);
     if (desc == NULL) {
         ALOGW("EffectCreate: fx not found uuid: %08x", uuid->timeLow);
         return -EINVAL;
@@ -1920,14 +1543,13 @@
     return status;
 }
 
-int PreProcessingLib_Release(effect_handle_t interface)
-{
+int PreProcessingLib_Release(effect_handle_t interface) {
     ALOGV("EffectRelease start %p", interface);
     if (PreProc_Init() != 0) {
         return sInitStatus;
     }
 
-    preproc_effect_t *fx = (preproc_effect_t *)interface;
+    preproc_effect_t* fx = (preproc_effect_t*)interface;
 
     if (fx->session->id == 0) {
         return -EINVAL;
@@ -1935,17 +1557,15 @@
     return Session_ReleaseEffect(fx->session, fx);
 }
 
-int PreProcessingLib_GetDescriptor(const effect_uuid_t *uuid,
-                                   effect_descriptor_t *pDescriptor) {
-
-    if (pDescriptor == NULL || uuid == NULL){
+int PreProcessingLib_GetDescriptor(const effect_uuid_t* uuid, effect_descriptor_t* pDescriptor) {
+    if (pDescriptor == NULL || uuid == NULL) {
         return -EINVAL;
     }
 
-    const effect_descriptor_t *desc = PreProc_GetDescriptor(uuid);
+    const effect_descriptor_t* desc = PreProc_GetDescriptor(uuid);
     if (desc == NULL) {
         ALOGV("PreProcessingLib_GetDescriptor() not found");
-        return  -EINVAL;
+        return -EINVAL;
     }
 
     ALOGV("PreProcessingLib_GetDescriptor() got fx %s", desc->name);
@@ -1955,15 +1575,13 @@
 }
 
 // This is the only symbol that needs to be exported
-__attribute__ ((visibility ("default")))
-audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
-    .tag = AUDIO_EFFECT_LIBRARY_TAG,
-    .version = EFFECT_LIBRARY_API_VERSION,
-    .name = "Audio Preprocessing Library",
-    .implementor = "The Android Open Source Project",
-    .create_effect = PreProcessingLib_Create,
-    .release_effect = PreProcessingLib_Release,
-    .get_descriptor = PreProcessingLib_GetDescriptor
-};
+__attribute__((visibility("default"))) audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
+        .tag = AUDIO_EFFECT_LIBRARY_TAG,
+        .version = EFFECT_LIBRARY_API_VERSION,
+        .name = "Audio Preprocessing Library",
+        .implementor = "The Android Open Source Project",
+        .create_effect = PreProcessingLib_Create,
+        .release_effect = PreProcessingLib_Release,
+        .get_descriptor = PreProcessingLib_GetDescriptor};
 
-}; // extern "C"
+};  // extern "C"
diff --git a/media/libeffects/preprocessing/README.md b/media/libeffects/preprocessing/README.md
new file mode 100644
index 0000000..af46376
--- /dev/null
+++ b/media/libeffects/preprocessing/README.md
@@ -0,0 +1,7 @@
+# Preprocessing effects
+
+## Limitations
+- Preprocessing effects currently work on 10ms worth of data and do not support
+  arbitrary frame counts. This limiation comes from the underlying effects in
+  webrtc modules
+- There is currently no api to communicate this requirement
diff --git a/media/libeffects/preprocessing/benchmarks/Android.bp b/media/libeffects/preprocessing/benchmarks/Android.bp
new file mode 100644
index 0000000..fbbcab4
--- /dev/null
+++ b/media/libeffects/preprocessing/benchmarks/Android.bp
@@ -0,0 +1,20 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libeffects_preprocessing_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libeffects_preprocessing_license",
+    ],
+}
+
+cc_benchmark {
+    name: "preprocessing_benchmark",
+    defaults: ["libaudiopreprocessing-defaults"],
+    srcs: ["preprocessing_benchmark.cpp"],
+    static_libs: [
+        "libaudiopreprocessing",
+        "libaudioutils",
+    ],
+}
diff --git a/media/libeffects/preprocessing/benchmarks/preprocessing_benchmark.cpp b/media/libeffects/preprocessing/benchmarks/preprocessing_benchmark.cpp
new file mode 100644
index 0000000..9501d4d
--- /dev/null
+++ b/media/libeffects/preprocessing/benchmarks/preprocessing_benchmark.cpp
@@ -0,0 +1,322 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*******************************************************************
+ * A test result running on Pixel 3 for comparison.
+ * The first parameter indicates the channel mask index.
+ * The second parameter indicates the effect index.
+ * 0: Automatic Gain Control,
+ * 1: Acoustic Echo Canceler,
+ * 2: Noise Suppressor,
+ * 3: Automatic Gain Control 2
+ * ---------------------------------------------------------------
+ * Benchmark                     Time             CPU   Iterations
+ * ---------------------------------------------------------------
+ * BM_PREPROCESSING/1/0       48179 ns        48041 ns        12349
+ * BM_PREPROCESSING/1/1       57559 ns        57403 ns        12270
+ * BM_PREPROCESSING/1/2       17524 ns        17466 ns        39982
+ * BM_PREPROCESSING/1/3        2608 ns         2599 ns       268399
+ * BM_PREPROCESSING/2/0       94198 ns        93926 ns         7470
+ * BM_PREPROCESSING/2/1      109196 ns       108899 ns         6459
+ * BM_PREPROCESSING/2/2       34098 ns        33986 ns        20576
+ * BM_PREPROCESSING/2/3        3231 ns         3221 ns       216606
+ * BM_PREPROCESSING/3/0      141532 ns       141132 ns         5030
+ * BM_PREPROCESSING/3/1      161199 ns       160745 ns         4387
+ * BM_PREPROCESSING/3/2       50663 ns        50535 ns        13619
+ * BM_PREPROCESSING/3/3        3967 ns         3955 ns       177005
+ * BM_PREPROCESSING/4/0      187032 ns       186486 ns         3706
+ * BM_PREPROCESSING/4/1      212872 ns       212264 ns         3304
+ * BM_PREPROCESSING/4/2       67649 ns        67476 ns        10128
+ * BM_PREPROCESSING/4/3        4728 ns         4713 ns       148547
+ * BM_PREPROCESSING/5/0      233874 ns       233188 ns         2954
+ * BM_PREPROCESSING/5/1      262798 ns       262052 ns         2680
+ * BM_PREPROCESSING/5/2       84592 ns        84368 ns         8203
+ * BM_PREPROCESSING/5/3        5472 ns         5455 ns       127784
+ * BM_PREPROCESSING/6/0      284777 ns       283911 ns         2468
+ * BM_PREPROCESSING/6/1      315631 ns       314726 ns         2233
+ * BM_PREPROCESSING/6/2      101200 ns       100931 ns         6802
+ * BM_PREPROCESSING/6/3        6152 ns         6133 ns       113951
+ * BM_PREPROCESSING/7/0      327207 ns       326153 ns         2112
+ * BM_PREPROCESSING/7/1      367510 ns       366410 ns         1915
+ * BM_PREPROCESSING/7/2      118574 ns       118250 ns         5795
+ * BM_PREPROCESSING/7/3        6956 ns         6935 ns       100783
+ * BM_PREPROCESSING/8/0      372603 ns       371470 ns         1880
+ * BM_PREPROCESSING/8/1      418882 ns       417625 ns         1685
+ * BM_PREPROCESSING/8/2      136155 ns       135777 ns         4986
+ * BM_PREPROCESSING/8/3        7734 ns         7711 ns        91581
+ * BM_PREPROCESSING/9/0      424795 ns       423464 ns         1657
+ * BM_PREPROCESSING/9/1      469073 ns       467687 ns         1506
+ * BM_PREPROCESSING/9/2      153170 ns       152737 ns         4519
+ * BM_PREPROCESSING/9/3        8393 ns         8363 ns        83603
+ * BM_PREPROCESSING/10/0     472440 ns       470926 ns         1489
+ * BM_PREPROCESSING/10/1     516984 ns       515480 ns         1000
+ * BM_PREPROCESSING/10/2     168802 ns       168348 ns         4097
+ * BM_PREPROCESSING/10/3       9127 ns         9100 ns        76913
+ * BM_PREPROCESSING/11/0     509690 ns       508113 ns         1360
+ * BM_PREPROCESSING/11/1     569076 ns       567390 ns         1310
+ * BM_PREPROCESSING/11/2     185678 ns       185165 ns         3729
+ * BM_PREPROCESSING/11/3       9789 ns         9760 ns        71342
+ * BM_PREPROCESSING/12/0     563858 ns       562108 ns         1270
+ * BM_PREPROCESSING/12/1     619656 ns       617791 ns         1198
+ * BM_PREPROCESSING/12/2     202882 ns       202316 ns         3406
+ * BM_PREPROCESSING/12/3      10610 ns        10579 ns        66287
+ * BM_PREPROCESSING/13/0     602944 ns       601094 ns         1167
+ * BM_PREPROCESSING/13/1     675401 ns       673293 ns         1107
+ * BM_PREPROCESSING/13/2     220677 ns       220051 ns         3131
+ * BM_PREPROCESSING/13/3      11301 ns        11265 ns        62022
+ * BM_PREPROCESSING/14/0     659495 ns       657375 ns         1071
+ * BM_PREPROCESSING/14/1     726551 ns       724295 ns         1024
+ * BM_PREPROCESSING/14/2     238595 ns       237922 ns         2901
+ * BM_PREPROCESSING/14/3      11941 ns        11906 ns        58788
+ * BM_PREPROCESSING/15/0     698377 ns       696134 ns         1014
+ * BM_PREPROCESSING/15/1     772532 ns       770217 ns          960
+ * BM_PREPROCESSING/15/2     253219 ns       252505 ns         2736
+ * BM_PREPROCESSING/15/3      12669 ns        12632 ns        55452
+ * BM_PREPROCESSING/16/0     742054 ns       739708 ns          936
+ * BM_PREPROCESSING/16/1     828029 ns       825484 ns          902
+ * BM_PREPROCESSING/16/2     272419 ns       271658 ns         2545
+ * BM_PREPROCESSING/16/3      13473 ns        13431 ns        52088
+ * BM_PREPROCESSING/17/0     794444 ns       791916 ns          891
+ * BM_PREPROCESSING/17/1     879429 ns       876704 ns          841
+ * BM_PREPROCESSING/17/2     290059 ns       289216 ns         2391
+ * BM_PREPROCESSING/17/3      14257 ns        14210 ns        49425
+ * BM_PREPROCESSING/18/0     852221 ns       849430 ns          839
+ * BM_PREPROCESSING/18/1     931121 ns       928308 ns          799
+ * BM_PREPROCESSING/18/2     307995 ns       307104 ns         2253
+ * BM_PREPROCESSING/18/3      14947 ns        14900 ns        46872
+ * BM_PREPROCESSING/19/0     888752 ns       885893 ns          781
+ * BM_PREPROCESSING/19/1     983398 ns       980285 ns          756
+ * BM_PREPROCESSING/19/2     325669 ns       324705 ns         2132
+ * BM_PREPROCESSING/19/3      15677 ns        15629 ns        44693
+ * BM_PREPROCESSING/20/0     933651 ns       930697 ns          746
+ * BM_PREPROCESSING/20/1    1033396 ns      1030235 ns          713
+ * BM_PREPROCESSING/20/2     342081 ns       341077 ns         2031
+ * BM_PREPROCESSING/20/3      16422 ns        16370 ns        42622
+ * BM_PREPROCESSING/21/0     982521 ns       979388 ns          706
+ * BM_PREPROCESSING/21/1    1085340 ns      1081926 ns          682
+ * BM_PREPROCESSING/21/2     360862 ns       359810 ns         1926
+ * BM_PREPROCESSING/21/3      17161 ns        17107 ns        40885
+ * BM_PREPROCESSING/22/0    1043560 ns      1040219 ns          678
+ * BM_PREPROCESSING/22/1    1137203 ns      1133687 ns          653
+ * BM_PREPROCESSING/22/2     377421 ns       376315 ns         1841
+ * BM_PREPROCESSING/22/3      17903 ns        17847 ns        38984
+ * BM_PREPROCESSING/23/0    1090097 ns      1086523 ns          650
+ * BM_PREPROCESSING/23/1    1199267 ns      1194231 ns          619
+ * BM_PREPROCESSING/23/2     395429 ns       394263 ns         1759
+ * BM_PREPROCESSING/23/3      18879 ns        18818 ns        37242
+ * BM_PREPROCESSING/24/0    1128638 ns      1125076 ns          629
+ * BM_PREPROCESSING/24/1    1239909 ns      1236019 ns          598
+ * BM_PREPROCESSING/24/2     414294 ns       413055 ns         1680
+ * BM_PREPROCESSING/24/3      19583 ns        19521 ns        35771
+ *******************************************************************/
+
+#include <audio_effects/effect_aec.h>
+#include <audio_effects/effect_agc.h>
+#include <array>
+#include <climits>
+#include <cstdlib>
+#include <random>
+#include <vector>
+#include <audio_effects/effect_agc2.h>
+#include <audio_effects/effect_ns.h>
+#include <benchmark/benchmark.h>
+#include <hardware/audio_effect.h>
+#include <log/log.h>
+#include <sys/stat.h>
+#include <system/audio.h>
+
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+
+constexpr int kSampleRate = 16000;
+constexpr float kTenMilliSecVal = 0.01;
+constexpr unsigned int kStreamDelayMs = 0;
+constexpr effect_uuid_t kEffectUuids[] = {
+        // agc uuid
+        {0xaa8130e0, 0x66fc, 0x11e0, 0xbad0, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // aec uuid
+        {0xbb392ec0, 0x8d4d, 0x11e0, 0xa896, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // ns  uuid
+        {0xc06c8400, 0x8e06, 0x11e0, 0x9cb6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+        // agc2 uuid
+        {0x89f38e65, 0xd4d2, 0x4d64, 0xad0e, {0x2b, 0x3e, 0x79, 0x9e, 0xa8, 0x86}},
+};
+constexpr size_t kNumEffectUuids = std::size(kEffectUuids);
+constexpr audio_channel_mask_t kChMasks[] = {
+        AUDIO_CHANNEL_INDEX_MASK_1,  AUDIO_CHANNEL_INDEX_MASK_2,  AUDIO_CHANNEL_INDEX_MASK_3,
+        AUDIO_CHANNEL_INDEX_MASK_4,  AUDIO_CHANNEL_INDEX_MASK_5,  AUDIO_CHANNEL_INDEX_MASK_6,
+        AUDIO_CHANNEL_INDEX_MASK_7,  AUDIO_CHANNEL_INDEX_MASK_8,  AUDIO_CHANNEL_INDEX_MASK_9,
+        AUDIO_CHANNEL_INDEX_MASK_10, AUDIO_CHANNEL_INDEX_MASK_11, AUDIO_CHANNEL_INDEX_MASK_12,
+        AUDIO_CHANNEL_INDEX_MASK_13, AUDIO_CHANNEL_INDEX_MASK_14, AUDIO_CHANNEL_INDEX_MASK_15,
+        AUDIO_CHANNEL_INDEX_MASK_16, AUDIO_CHANNEL_INDEX_MASK_17, AUDIO_CHANNEL_INDEX_MASK_18,
+        AUDIO_CHANNEL_INDEX_MASK_19, AUDIO_CHANNEL_INDEX_MASK_20, AUDIO_CHANNEL_INDEX_MASK_21,
+        AUDIO_CHANNEL_INDEX_MASK_22, AUDIO_CHANNEL_INDEX_MASK_23, AUDIO_CHANNEL_INDEX_MASK_24,
+};
+constexpr size_t kNumChMasks = std::size(kChMasks);
+
+// types of pre processing modules
+enum PreProcId {
+    PREPROC_AGC,  // Automatic Gain Control
+    PREPROC_AEC,  // Acoustic Echo Canceler
+    PREPROC_NS,   // Noise Suppressor
+    PREPROC_AGC2,  // Automatic Gain Control 2
+    PREPROC_NUM_EFFECTS
+};
+
+int preProcCreateEffect(effect_handle_t* pEffectHandle, uint32_t effectType,
+                        effect_config_t* pConfig, int sessionId, int ioId) {
+    if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(&kEffectUuids[effectType],
+                                                                 sessionId, ioId, pEffectHandle);
+        status != 0) {
+        ALOGE("Audio Preprocessing create returned an error = %d\n", status);
+        return EXIT_FAILURE;
+    }
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    if (effectType == PREPROC_AEC) {
+        if (int status = (**pEffectHandle)
+                                 ->command(*pEffectHandle, EFFECT_CMD_SET_CONFIG_REVERSE,
+                                           sizeof(effect_config_t), pConfig, &replySize, &reply);
+            status != 0) {
+            ALOGE("Set config reverse command returned an error = %d\n", status);
+            return EXIT_FAILURE;
+        }
+    }
+    if (int status = (**pEffectHandle)
+                             ->command(*pEffectHandle, EFFECT_CMD_SET_CONFIG,
+                                       sizeof(effect_config_t), pConfig, &replySize, &reply);
+        status != 0) {
+        ALOGE("Set config command returned an error = %d\n", status);
+        return EXIT_FAILURE;
+    }
+    return reply;
+}
+
+int preProcSetConfigParam(effect_handle_t effectHandle, uint32_t paramType, uint32_t paramValue) {
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    uint32_t paramData[2] = {paramType, paramValue};
+    effect_param_t* effectParam = (effect_param_t*)malloc(sizeof(*effectParam) + sizeof(paramData));
+    memcpy(&effectParam->data[0], &paramData[0], sizeof(paramData));
+    effectParam->psize = sizeof(paramData[0]);
+    (*effectHandle)
+            ->command(effectHandle, EFFECT_CMD_SET_PARAM, sizeof(effect_param_t), effectParam,
+                      &replySize, &reply);
+    free(effectParam);
+    return reply;
+}
+
+short preProcGetShortVal(float paramValue) {
+    return static_cast<short>(paramValue * std::numeric_limits<short>::max());
+}
+
+static void BM_PREPROCESSING(benchmark::State& state) {
+    const size_t chMask = kChMasks[state.range(0) - 1];
+    const size_t channelCount = audio_channel_count_from_in_mask(chMask);
+
+    PreProcId effectType = (PreProcId)state.range(1);
+
+    int32_t sessionId = 1;
+    int32_t ioId = 1;
+    effect_handle_t effectHandle = nullptr;
+    effect_config_t config{};
+    config.inputCfg.samplingRate = config.outputCfg.samplingRate = kSampleRate;
+    config.inputCfg.channels = config.outputCfg.channels = chMask;
+    config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+
+    if (int status = preProcCreateEffect(&effectHandle, state.range(1), &config, sessionId, ioId);
+        status != 0) {
+        ALOGE("Create effect call returned error %i", status);
+        return;
+    }
+
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    if (int status =
+                (*effectHandle)
+                        ->command(effectHandle, EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+        status != 0) {
+        ALOGE("Command enable call returned error %d\n", reply);
+        return;
+    }
+
+    // Initialize input buffer with deterministic pseudo-random values
+    const int frameLength = (int)(kSampleRate * kTenMilliSecVal);
+    std::minstd_rand gen(chMask);
+    std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+    std::vector<short> in(frameLength * channelCount);
+    for (auto& i : in) {
+        i = preProcGetShortVal(dis(gen));
+    }
+    std::vector<short> farIn(frameLength * channelCount);
+    for (auto& i : farIn) {
+        i = preProcGetShortVal(dis(gen));
+    }
+    std::vector<short> out(frameLength * channelCount);
+
+    // Run the test
+    for (auto _ : state) {
+        benchmark::DoNotOptimize(in.data());
+        benchmark::DoNotOptimize(out.data());
+        benchmark::DoNotOptimize(farIn.data());
+
+        audio_buffer_t inBuffer = {.frameCount = (size_t)frameLength, .s16 = in.data()};
+        audio_buffer_t outBuffer = {.frameCount = (size_t)frameLength, .s16 = out.data()};
+        audio_buffer_t farInBuffer = {.frameCount = (size_t)frameLength, .s16 = farIn.data()};
+
+        if (PREPROC_AEC == effectType) {
+            if (int status =
+                        preProcSetConfigParam(effectHandle, AEC_PARAM_ECHO_DELAY, kStreamDelayMs);
+                status != 0) {
+                ALOGE("preProcSetConfigParam returned Error %d\n", status);
+                return;
+            }
+        }
+        if (int status = (*effectHandle)->process(effectHandle, &inBuffer, &outBuffer);
+            status != 0) {
+            ALOGE("\nError: Process i = %d returned with error %d\n", (int)state.range(1), status);
+            return;
+        }
+        if (PREPROC_AEC == effectType) {
+            if (int status =
+                        (*effectHandle)->process_reverse(effectHandle, &farInBuffer, &outBuffer);
+                status != 0) {
+                ALOGE("\nError: Process reverse i = %d returned with error %d\n",
+                      (int)state.range(1), status);
+                return;
+            }
+        }
+    }
+    benchmark::ClobberMemory();
+
+    state.SetComplexityN(state.range(0));
+
+    if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle); status != 0) {
+        ALOGE("release_effect returned an error = %d\n", status);
+        return;
+    }
+}
+
+static void preprocessingArgs(benchmark::internal::Benchmark* b) {
+    for (int i = 1; i <= (int)kNumChMasks; i++) {
+        for (int j = 0; j < (int)kNumEffectUuids; ++j) {
+            b->Args({i, j});
+        }
+    }
+}
+
+BENCHMARK(BM_PREPROCESSING)->Apply(preprocessingArgs);
+
+BENCHMARK_MAIN();
diff --git a/media/libeffects/preprocessing/tests/Android.bp b/media/libeffects/preprocessing/tests/Android.bp
new file mode 100644
index 0000000..d80b135
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/Android.bp
@@ -0,0 +1,49 @@
+// audio preprocessing unit test
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libeffects_preprocessing_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libeffects_preprocessing_license",
+    ],
+}
+
+cc_test {
+    name: "EffectPreprocessingTest",
+    defaults: ["libaudiopreprocessing-defaults"],
+    gtest: true,
+    test_suites: ["device-tests"],
+    srcs: [
+        "EffectPreprocessingTest.cpp",
+        "EffectTestHelper.cpp",
+    ],
+    static_libs: [
+        "libaudiopreprocessing",
+        "libaudioutils",
+    ],
+}
+
+cc_test {
+    name: "AudioPreProcessingTest",
+    defaults: ["libaudiopreprocessing-defaults"],
+    gtest: false,
+    srcs: ["PreProcessingTest.cpp"],
+    static_libs: [
+        "libaudiopreprocessing",
+        "libaudioutils",
+    ],
+}
+
+cc_test {
+    name: "correlation",
+    host_supported: true,
+    srcs: ["correlation.cpp"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+    gtest: false,
+}
diff --git a/media/libeffects/preprocessing/tests/EffectPreprocessingTest.cpp b/media/libeffects/preprocessing/tests/EffectPreprocessingTest.cpp
new file mode 100644
index 0000000..07006a1
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/EffectPreprocessingTest.cpp
@@ -0,0 +1,332 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "EffectTestHelper.h"
+
+#include <getopt.h>
+#include <stddef.h>
+#include <stdint.h>
+#include <tuple>
+#include <vector>
+
+#include <audio_effects/effect_aec.h>
+#include <audio_effects/effect_agc.h>
+#include <audio_effects/effect_agc2.h>
+#include <audio_effects/effect_ns.h>
+#include <log/log.h>
+
+constexpr effect_uuid_t kAGCUuid = {
+        0xaa8130e0, 0x66fc, 0x11e0, 0xbad0, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
+constexpr effect_uuid_t kAGC2Uuid = {
+        0x89f38e65, 0xd4d2, 0x4d64, 0xad0e, {0x2b, 0x3e, 0x79, 0x9e, 0xa8, 0x86}};
+constexpr effect_uuid_t kAECUuid = {
+        0xbb392ec0, 0x8d4d, 0x11e0, 0xa896, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
+constexpr effect_uuid_t kNSUuid = {
+        0xc06c8400, 0x8e06, 0x11e0, 0x9cb6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
+
+static bool isAGCEffect(const effect_uuid_t* uuid) {
+    return uuid == &kAGCUuid;
+}
+static bool isAGC2Effect(const effect_uuid_t* uuid) {
+    return uuid == &kAGC2Uuid;
+}
+static bool isAECEffect(const effect_uuid_t* uuid) {
+    return uuid == &kAECUuid;
+}
+static bool isNSEffect(const effect_uuid_t* uuid) {
+    return uuid == &kNSUuid;
+}
+
+constexpr int kAGCTargetLevels[] = {0, -300, -500, -1000, -3100};
+
+constexpr int kAGCCompLevels[] = {0, -300, -500, -1000, -9000};
+
+constexpr size_t kAGC2FixedDigitalGains[] = {0, 3, 10, 20, 49};
+
+constexpr size_t kAGC2AdaptGigitalLevelEstimators[] = {0, 1};
+
+constexpr size_t kAGC2ExtraSaturationMargins[] = {0, 3, 10, 20, 100};
+
+constexpr size_t kAECEchoDelays[] = {0, 250, 500};
+
+constexpr size_t kNSLevels[] = {0, 1, 2, 3};
+
+struct AGCParams {
+    int targetLevel;
+    int compLevel;
+};
+
+struct AGC2Params {
+    size_t fixedDigitalGain;
+    size_t adaptDigiLevelEstimator;
+    size_t extraSaturationMargin;
+};
+
+struct AECParams {
+    size_t echoDelay;
+};
+
+struct NSParams {
+    size_t level;
+};
+
+struct PreProcParams {
+    const effect_uuid_t* uuid;
+    union {
+        AGCParams agcParams;
+        AGC2Params agc2Params;
+        AECParams aecParams;
+        NSParams nsParams;
+    };
+};
+
+// Create a list of pre-processing parameters to be used for testing
+static const std::vector<PreProcParams> kPreProcParams = [] {
+    std::vector<PreProcParams> result;
+
+    for (const auto targetLevel : kAGCTargetLevels) {
+        for (const auto compLevel : kAGCCompLevels) {
+            AGCParams agcParams = {.targetLevel = targetLevel, .compLevel = compLevel};
+            PreProcParams params = {.uuid = &kAGCUuid, .agcParams = agcParams};
+            result.push_back(params);
+        }
+    }
+
+    for (const auto fixedDigitalGain : kAGC2FixedDigitalGains) {
+        for (const auto adaptDigiLevelEstimator : kAGC2AdaptGigitalLevelEstimators) {
+            for (const auto extraSaturationMargin : kAGC2ExtraSaturationMargins) {
+                AGC2Params agc2Params = {.fixedDigitalGain = fixedDigitalGain,
+                                         .adaptDigiLevelEstimator = adaptDigiLevelEstimator,
+                                         .extraSaturationMargin = extraSaturationMargin};
+                PreProcParams params = {.uuid = &kAGC2Uuid, .agc2Params = agc2Params};
+                result.push_back(params);
+            }
+        }
+    }
+
+    for (const auto echoDelay : kAECEchoDelays) {
+        AECParams aecParams = {.echoDelay = echoDelay};
+        PreProcParams params = {.uuid = &kAECUuid, .aecParams = aecParams};
+        result.push_back(params);
+    }
+
+    for (const auto level : kNSLevels) {
+        NSParams nsParams = {.level = level};
+        PreProcParams params = {.uuid = &kNSUuid, .nsParams = nsParams};
+        result.push_back(params);
+    }
+    return result;
+}();
+
+static const size_t kNumPreProcParams = std::size(kPreProcParams);
+
+void setPreProcParams(const effect_uuid_t* uuid, EffectTestHelper& effect, size_t paramIdx) {
+    const PreProcParams* params = &kPreProcParams[paramIdx];
+    if (isAGCEffect(uuid)) {
+        const AGCParams* agcParams = &params->agcParams;
+        ASSERT_NO_FATAL_FAILURE(effect.setParam(AGC_PARAM_TARGET_LEVEL, agcParams->targetLevel));
+        ASSERT_NO_FATAL_FAILURE(effect.setParam(AGC_PARAM_COMP_GAIN, agcParams->compLevel));
+    } else if (isAGC2Effect(uuid)) {
+        const AGC2Params* agc2Params = &params->agc2Params;
+        ASSERT_NO_FATAL_FAILURE(
+                effect.setParam(AGC2_PARAM_FIXED_DIGITAL_GAIN, agc2Params->fixedDigitalGain));
+        ASSERT_NO_FATAL_FAILURE(effect.setParam(AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR,
+                                                agc2Params->adaptDigiLevelEstimator));
+        ASSERT_NO_FATAL_FAILURE(effect.setParam(AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN,
+                                                agc2Params->extraSaturationMargin));
+    } else if (isAECEffect(uuid)) {
+        const AECParams* aecParams = &params->aecParams;
+        ASSERT_NO_FATAL_FAILURE(effect.setParam(AEC_PARAM_ECHO_DELAY, aecParams->echoDelay));
+    } else if (isNSEffect(uuid)) {
+        const NSParams* nsParams = &params->nsParams;
+        ASSERT_NO_FATAL_FAILURE(effect.setParam(NS_PARAM_LEVEL, nsParams->level));
+    }
+}
+
+typedef std::tuple<int, int, int, int> SingleEffectTestParam;
+class SingleEffectTest : public ::testing::TestWithParam<SingleEffectTestParam> {
+  public:
+    SingleEffectTest()
+        : mSampleRate(EffectTestHelper::kSampleRates[std::get<1>(GetParam())]),
+          mFrameCount(mSampleRate * EffectTestHelper::kTenMilliSecVal),
+          mLoopCount(EffectTestHelper::kLoopCounts[std::get<2>(GetParam())]),
+          mTotalFrameCount(mFrameCount * mLoopCount),
+          mChMask(EffectTestHelper::kChMasks[std::get<0>(GetParam())]),
+          mChannelCount(audio_channel_count_from_in_mask(mChMask)),
+          mParamIdx(std::get<3>(GetParam())),
+          mUuid(kPreProcParams[mParamIdx].uuid){};
+
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    const size_t mTotalFrameCount;
+    const size_t mChMask;
+    const size_t mChannelCount;
+    const size_t mParamIdx;
+    const effect_uuid_t* mUuid;
+};
+
+// Tests applying a single effect
+TEST_P(SingleEffectTest, SimpleProcess) {
+    SCOPED_TRACE(testing::Message() << " chMask: " << mChMask << " sampleRate: " << mSampleRate
+                                    << " loopCount: " << mLoopCount << " paramIdx " << mParamIdx);
+
+    EffectTestHelper effect(mUuid, mChMask, mSampleRate, mLoopCount);
+
+    ASSERT_NO_FATAL_FAILURE(effect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(effect.setConfig(isAECEffect(mUuid)));
+    ASSERT_NO_FATAL_FAILURE(setPreProcParams(mUuid, effect, mParamIdx));
+
+    // Initialize input buffer with deterministic pseudo-random values
+    std::vector<int16_t> input(mTotalFrameCount * mChannelCount);
+    std::vector<int16_t> output(mTotalFrameCount * mChannelCount);
+    std::vector<int16_t> farInput(mTotalFrameCount * mChannelCount);
+    std::minstd_rand gen(mChMask);
+    std::uniform_int_distribution<int16_t> dis(INT16_MIN, INT16_MAX);
+    for (auto& in : input) {
+        in = dis(gen);
+    }
+    if (isAECEffect(mUuid)) {
+        for (auto& farIn : farInput) {
+            farIn = dis(gen);
+        }
+    }
+    ASSERT_NO_FATAL_FAILURE(effect.process(input.data(), output.data(), isAECEffect(mUuid)));
+    if (isAECEffect(mUuid)) {
+        ASSERT_NO_FATAL_FAILURE(effect.process_reverse(farInput.data(), output.data()));
+    }
+    ASSERT_NO_FATAL_FAILURE(effect.releaseEffect());
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        PreProcTestAll, SingleEffectTest,
+        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumChMasks),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+                           ::testing::Range(0, (int)kNumPreProcParams)));
+
+typedef std::tuple<int, int, int> SingleEffectComparisonTestParam;
+class SingleEffectComparisonTest
+    : public ::testing::TestWithParam<SingleEffectComparisonTestParam> {
+  public:
+    SingleEffectComparisonTest()
+        : mSampleRate(EffectTestHelper::kSampleRates[std::get<0>(GetParam())]),
+          mFrameCount(mSampleRate * EffectTestHelper::kTenMilliSecVal),
+          mLoopCount(EffectTestHelper::kLoopCounts[std::get<1>(GetParam())]),
+          mTotalFrameCount(mFrameCount * mLoopCount),
+          mParamIdx(std::get<2>(GetParam())),
+          mUuid(kPreProcParams[mParamIdx].uuid){};
+
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    const size_t mTotalFrameCount;
+    const size_t mParamIdx;
+    const effect_uuid_t* mUuid;
+};
+
+// Compares first channel in multi-channel output to mono output when same effect is applied
+TEST_P(SingleEffectComparisonTest, SimpleProcess) {
+    SCOPED_TRACE(testing::Message() << " sampleRate: " << mSampleRate
+                                    << " loopCount: " << mLoopCount << " paramIdx " << mParamIdx);
+
+    // Initialize mono input buffer with deterministic pseudo-random values
+    std::vector<int16_t> monoInput(mTotalFrameCount);
+    std::vector<int16_t> monoFarInput(mTotalFrameCount);
+
+    std::minstd_rand gen(mSampleRate);
+    std::uniform_int_distribution<int16_t> dis(INT16_MIN, INT16_MAX);
+    for (auto& in : monoInput) {
+        in = dis(gen);
+    }
+    if (isAECEffect(mUuid)) {
+        for (auto& farIn : monoFarInput) {
+            farIn = dis(gen);
+        }
+    }
+
+    // Apply effect on mono channel
+    EffectTestHelper monoEffect(mUuid, AUDIO_CHANNEL_INDEX_MASK_1, mSampleRate, mLoopCount);
+
+    ASSERT_NO_FATAL_FAILURE(monoEffect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(monoEffect.setConfig(isAECEffect(mUuid)));
+    ASSERT_NO_FATAL_FAILURE(setPreProcParams(mUuid, monoEffect, mParamIdx));
+
+    std::vector<int16_t> monoOutput(mTotalFrameCount);
+    ASSERT_NO_FATAL_FAILURE(
+            monoEffect.process(monoInput.data(), monoOutput.data(), isAECEffect(mUuid)));
+    if (isAECEffect(mUuid)) {
+        ASSERT_NO_FATAL_FAILURE(monoEffect.process_reverse(monoFarInput.data(), monoOutput.data()));
+    }
+    ASSERT_NO_FATAL_FAILURE(monoEffect.releaseEffect());
+
+    for (size_t chMask : EffectTestHelper::kChMasks) {
+        size_t channelCount = audio_channel_count_from_in_mask(chMask);
+
+        EffectTestHelper testEffect(mUuid, chMask, mSampleRate, mLoopCount);
+
+        ASSERT_NO_FATAL_FAILURE(testEffect.createEffect());
+        ASSERT_NO_FATAL_FAILURE(testEffect.setConfig(isAECEffect(mUuid)));
+        ASSERT_NO_FATAL_FAILURE(setPreProcParams(mUuid, testEffect, mParamIdx));
+
+        std::vector<int16_t> testInput(mTotalFrameCount * channelCount);
+        std::vector<int16_t> testFarInput(mTotalFrameCount * channelCount);
+
+        // Repeat mono channel data to all the channels
+        // adjust_channels() zero fills channels > 2, hence can't be used here
+        for (size_t i = 0; i < mTotalFrameCount; ++i) {
+            auto* fpInput = &testInput[i * channelCount];
+            std::fill(fpInput, fpInput + channelCount, monoInput[i]);
+        }
+        if (isAECEffect(mUuid)) {
+            for (size_t i = 0; i < mTotalFrameCount; ++i) {
+                auto* fpFarInput = &testFarInput[i * channelCount];
+                std::fill(fpFarInput, fpFarInput + channelCount, monoFarInput[i]);
+            }
+        }
+
+        std::vector<int16_t> testOutput(mTotalFrameCount * channelCount);
+        ASSERT_NO_FATAL_FAILURE(
+                testEffect.process(testInput.data(), testOutput.data(), isAECEffect(mUuid)));
+        if (isAECEffect(mUuid)) {
+            ASSERT_NO_FATAL_FAILURE(
+                    testEffect.process_reverse(testFarInput.data(), testOutput.data()));
+        }
+        ASSERT_NO_FATAL_FAILURE(testEffect.releaseEffect());
+
+        // Adjust the test output to mono channel
+        std::vector<int16_t> monoTestOutput(mTotalFrameCount);
+        adjust_channels(testOutput.data(), channelCount, monoTestOutput.data(), FCC_1,
+                        sizeof(int16_t), mTotalFrameCount * sizeof(int16_t) * channelCount);
+
+        ASSERT_EQ(0, memcmp(monoOutput.data(), monoTestOutput.data(),
+                            mTotalFrameCount * sizeof(int16_t)))
+                << "Mono channel output does not match with reference output \n";
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        PreProcTestAll, SingleEffectComparisonTest,
+        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
+                           ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+                           ::testing::Range(0, (int)kNumPreProcParams)));
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = RUN_ALL_TESTS();
+    ALOGV("Test result = %d", status);
+    return status;
+}
diff --git a/media/libeffects/preprocessing/tests/EffectTestHelper.cpp b/media/libeffects/preprocessing/tests/EffectTestHelper.cpp
new file mode 100644
index 0000000..79200b6
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/EffectTestHelper.cpp
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "EffectTestHelper.h"
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+
+void EffectTestHelper::createEffect() {
+    int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(mUuid, 1, 1, &mEffectHandle);
+    ASSERT_EQ(status, 0) << "create_effect returned an error " << status;
+}
+
+void EffectTestHelper::releaseEffect() {
+    int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(mEffectHandle);
+    ASSERT_EQ(status, 0) << "release_effect returned an error " << status;
+}
+
+void EffectTestHelper::setConfig(bool configReverse) {
+    effect_config_t config{};
+    config.inputCfg.samplingRate = config.outputCfg.samplingRate = mSampleRate;
+    config.inputCfg.channels = config.outputCfg.channels = mChMask;
+    config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+
+    int status = (*mEffectHandle)
+                         ->command(mEffectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t),
+                                   &config, &replySize, &reply);
+    ASSERT_EQ(status, 0) << "set_config returned an error " << status;
+    ASSERT_EQ(reply, 0) << "set_config reply non zero " << reply;
+
+    if (configReverse) {
+        int status = (*mEffectHandle)
+                             ->command(mEffectHandle, EFFECT_CMD_SET_CONFIG_REVERSE,
+                                       sizeof(effect_config_t), &config, &replySize, &reply);
+        ASSERT_EQ(status, 0) << "set_config_reverse returned an error " << status;
+        ASSERT_EQ(reply, 0) << "set_config_reverse reply non zero " << reply;
+    }
+
+    status = (*mEffectHandle)
+                     ->command(mEffectHandle, EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+    ASSERT_EQ(status, 0) << "cmd_enable returned an error " << status;
+    ASSERT_EQ(reply, 0) << "cmd_enable reply non zero " << reply;
+}
+
+void EffectTestHelper::setParam(uint32_t type, uint32_t value) {
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    uint32_t paramData[2] = {type, value};
+    auto effectParam = (effect_param_t*)malloc(sizeof(effect_param_t) + sizeof(paramData));
+    memcpy(&effectParam->data[0], &paramData[0], sizeof(paramData));
+    effectParam->psize = sizeof(paramData[0]);
+    effectParam->vsize = sizeof(paramData[1]);
+    int status = (*mEffectHandle)
+                         ->command(mEffectHandle, EFFECT_CMD_SET_PARAM,
+                                   sizeof(effect_param_t) + sizeof(paramData), effectParam,
+                                   &replySize, &reply);
+    free(effectParam);
+    ASSERT_EQ(status, 0) << "set_param returned an error " << status;
+    ASSERT_EQ(reply, 0) << "set_param reply non zero " << reply;
+}
+
+void EffectTestHelper::process(int16_t* input, int16_t* output, bool setAecEchoDelay) {
+    audio_buffer_t inBuffer = {.frameCount = mFrameCount, .s16 = input};
+    audio_buffer_t outBuffer = {.frameCount = mFrameCount, .s16 = output};
+    for (size_t i = 0; i < mLoopCount; i++) {
+        if (setAecEchoDelay) ASSERT_NO_FATAL_FAILURE(setParam(AEC_PARAM_ECHO_DELAY, kAECDelay));
+        int status = (*mEffectHandle)->process(mEffectHandle, &inBuffer, &outBuffer);
+        ASSERT_EQ(status, 0) << "process returned an error " << status;
+
+        inBuffer.s16 += mFrameCount * mChannelCount;
+        outBuffer.s16 += mFrameCount * mChannelCount;
+    }
+}
+
+void EffectTestHelper::process_reverse(int16_t* farInput, int16_t* output) {
+    audio_buffer_t farInBuffer = {.frameCount = mFrameCount, .s16 = farInput};
+    audio_buffer_t outBuffer = {.frameCount = mFrameCount, .s16 = output};
+    for (size_t i = 0; i < mLoopCount; i++) {
+        int status = (*mEffectHandle)->process_reverse(mEffectHandle, &farInBuffer, &outBuffer);
+        ASSERT_EQ(status, 0) << "process returned an error " << status;
+
+        farInBuffer.s16 += mFrameCount * mChannelCount;
+        outBuffer.s16 += mFrameCount * mChannelCount;
+    }
+}
diff --git a/media/libeffects/preprocessing/tests/EffectTestHelper.h b/media/libeffects/preprocessing/tests/EffectTestHelper.h
new file mode 100644
index 0000000..117cf7b
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/EffectTestHelper.h
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <array>
+#include <audio_effects/effect_aec.h>
+#include <audio_utils/channels.h>
+#include <audio_utils/primitives.h>
+#include <climits>
+#include <cstdlib>
+#include <gtest/gtest.h>
+#include <hardware/audio_effect.h>
+#include <log/log.h>
+#include <random>
+#include <stdint.h>
+#include <system/audio.h>
+#include <vector>
+
+template <typename T>
+static float computeSnr(const T* ref, const T* tst, size_t count) {
+    double signal{};
+    double noise{};
+
+    for (size_t i = 0; i < count; ++i) {
+        const double value(ref[i]);
+        const double diff(tst[i] - value);
+        signal += value * value;
+        noise += diff * diff;
+    }
+    // Initialized to large value to handle
+    // cases where ref and tst match exactly
+    float snr = FLT_MAX;
+    if (signal > 0.0f && noise > 0.0f) {
+        snr = 10.f * log(signal / noise);
+    }
+    return snr;
+}
+
+class EffectTestHelper {
+  public:
+    EffectTestHelper(const effect_uuid_t* uuid, size_t chMask, size_t sampleRate, size_t loopCount)
+        : mUuid(uuid),
+          mChMask(chMask),
+          mChannelCount(audio_channel_count_from_in_mask(mChMask)),
+          mSampleRate(sampleRate),
+          mFrameCount(mSampleRate * kTenMilliSecVal),
+          mLoopCount(loopCount) {}
+    void createEffect();
+    void releaseEffect();
+    void setConfig(bool configReverse);
+    void setParam(uint32_t type, uint32_t val);
+    void process(int16_t* input, int16_t* output, bool setAecEchoDelay);
+    void process_reverse(int16_t* farInput, int16_t* output);
+
+    // Corresponds to SNR for 1 bit difference between two int16_t signals
+    static constexpr float kSNRThreshold = 90.308998;
+
+    static constexpr audio_channel_mask_t kChMasks[] = {
+            AUDIO_CHANNEL_IN_MONO,
+            AUDIO_CHANNEL_IN_STEREO,
+            AUDIO_CHANNEL_IN_FRONT_BACK,
+            AUDIO_CHANNEL_IN_6,
+            AUDIO_CHANNEL_IN_2POINT0POINT2,
+            AUDIO_CHANNEL_IN_2POINT1POINT2,
+            AUDIO_CHANNEL_IN_3POINT0POINT2,
+            AUDIO_CHANNEL_IN_3POINT1POINT2,
+            AUDIO_CHANNEL_IN_5POINT1,
+            AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO,
+            AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO,
+            AUDIO_CHANNEL_IN_VOICE_CALL_MONO,
+    };
+
+    static constexpr float kTenMilliSecVal = 0.01;
+
+    static constexpr size_t kNumChMasks = std::size(kChMasks);
+
+    static constexpr size_t kSampleRates[] = {8000,  11025, 12000, 16000, 22050,
+                                              24000, 32000, 44100, 48000};
+
+    static constexpr size_t kNumSampleRates = std::size(kSampleRates);
+
+    static constexpr size_t kLoopCounts[] = {1, 4};
+
+    static constexpr size_t kNumLoopCounts = std::size(kLoopCounts);
+
+    static constexpr size_t kAECDelay = 0;
+
+  private:
+    const effect_uuid_t* mUuid;
+    const size_t mChMask;
+    const size_t mChannelCount;
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    effect_handle_t mEffectHandle{};
+};
diff --git a/media/libeffects/preprocessing/tests/PreProcessingTest.cpp b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
new file mode 100644
index 0000000..3bd93f8
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
@@ -0,0 +1,532 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <getopt.h>
+#include <stddef.h>
+#include <stdint.h>
+#include <sys/stat.h>
+#include <vector>
+
+#include <audio_effects/effect_aec.h>
+#include <audio_effects/effect_agc.h>
+#include <audio_effects/effect_agc2.h>
+#include <audio_effects/effect_ns.h>
+#include <audio_utils/channels.h>
+#include <log/log.h>
+
+// This is the only symbol that needs to be imported
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+
+//------------------------------------------------------------------------------
+// local definitions
+//------------------------------------------------------------------------------
+
+// types of pre processing modules
+enum PreProcId {
+    PREPROC_AGC,  // Automatic Gain Control
+    PREPROC_AGC2,  // Automatic Gain Control 2
+    PREPROC_AEC,  // Acoustic Echo Canceler
+    PREPROC_NS,   // Noise Suppressor
+    PREPROC_NUM_EFFECTS
+};
+
+enum PreProcParams {
+    ARG_HELP = 1,
+    ARG_INPUT,
+    ARG_OUTPUT,
+    ARG_FAR,
+    ARG_FS,
+    ARG_CH_MASK,
+    ARG_AGC_TGT_LVL,
+    ARG_AGC_COMP_LVL,
+    ARG_AEC_DELAY,
+    ARG_NS_LVL,
+    ARG_AGC2_GAIN,
+    ARG_AGC2_LVL,
+    ARG_AGC2_SAT_MGN,
+    ARG_FILE_CHANNELS,
+    ARG_MONO_MODE
+};
+
+struct preProcConfigParams_t {
+    int samplingFreq = 16000;
+    audio_channel_mask_t chMask = AUDIO_CHANNEL_IN_MONO;
+    int nsLevel = 0;         // a value between 0-3
+    int agcTargetLevel = 3;  // in dB
+    int agcCompLevel = 9;    // in dB
+    float agc2Gain = 0.f;              // in dB
+    float agc2SaturationMargin = 2.f;  // in dB
+    int agc2Level = 0;                 // either kRms(0) or kPeak(1)
+    int aecDelay = 0;  // in ms
+    int fileChannels = 1;
+    int monoMode = 0;
+};
+
+const effect_uuid_t kPreProcUuids[PREPROC_NUM_EFFECTS] = {
+        {0xaa8130e0, 0x66fc, 0x11e0, 0xbad0, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // agc uuid
+        {0x89f38e65, 0xd4d2, 0x4d64, 0xad0e, {0x2b, 0x3e, 0x79, 0x9e, 0xa8, 0x86}},  // agc2 uuid
+        {0xbb392ec0, 0x8d4d, 0x11e0, 0xa896, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // aec uuid
+        {0xc06c8400, 0x8e06, 0x11e0, 0x9cb6, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},  // ns  uuid
+};
+
+constexpr audio_channel_mask_t kPreProcConfigChMask[] = {
+        AUDIO_CHANNEL_IN_MONO,
+        AUDIO_CHANNEL_IN_STEREO,
+        AUDIO_CHANNEL_IN_FRONT_BACK,
+        AUDIO_CHANNEL_IN_6,
+        AUDIO_CHANNEL_IN_2POINT0POINT2,
+        AUDIO_CHANNEL_IN_2POINT1POINT2,
+        AUDIO_CHANNEL_IN_3POINT0POINT2,
+        AUDIO_CHANNEL_IN_3POINT1POINT2,
+        AUDIO_CHANNEL_IN_5POINT1,
+        AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO,
+        AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO,
+        AUDIO_CHANNEL_IN_VOICE_CALL_MONO,
+};
+
+constexpr int kPreProcConfigChMaskCount = std::size(kPreProcConfigChMask);
+
+void printUsage() {
+    printf("\nUsage: ");
+    printf("\n     <executable> [options]\n");
+    printf("\nwhere options are, ");
+    printf("\n     --input <inputfile>");
+    printf("\n           path to the input file");
+    printf("\n     --output <outputfile>");
+    printf("\n           path to the output file");
+    printf("\n     --help");
+    printf("\n           Prints this usage information");
+    printf("\n     --fs <sampling_freq>");
+    printf("\n           Sampling frequency in Hz, default 16000.");
+    printf("\n     --ch_mask <channel_mask>\n");
+    printf("\n         0  - AUDIO_CHANNEL_IN_MONO");
+    printf("\n         1  - AUDIO_CHANNEL_IN_STEREO");
+    printf("\n         2  - AUDIO_CHANNEL_IN_FRONT_BACK");
+    printf("\n         3  - AUDIO_CHANNEL_IN_6");
+    printf("\n         4  - AUDIO_CHANNEL_IN_2POINT0POINT2");
+    printf("\n         5  - AUDIO_CHANNEL_IN_2POINT1POINT2");
+    printf("\n         6  - AUDIO_CHANNEL_IN_3POINT0POINT2");
+    printf("\n         7  - AUDIO_CHANNEL_IN_3POINT1POINT2");
+    printf("\n         8  - AUDIO_CHANNEL_IN_5POINT1");
+    printf("\n         9  - AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO");
+    printf("\n         10 - AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO ");
+    printf("\n         11 - AUDIO_CHANNEL_IN_VOICE_CALL_MONO ");
+    printf("\n         default 0");
+    printf("\n     --far <farend_file>");
+    printf("\n           Path to far-end file needed for echo cancellation");
+    printf("\n     --aec");
+    printf("\n           Enable Echo Cancellation, default disabled");
+    printf("\n     --ns");
+    printf("\n           Enable Noise Suppression, default disabled");
+    printf("\n     --agc");
+    printf("\n           Enable Gain Control, default disabled");
+    printf("\n     --agc2");
+    printf("\n           Enable Gain Controller 2, default disabled");
+    printf("\n     --ns_lvl <ns_level>");
+    printf("\n           Noise Suppression level in dB, default value 0dB");
+    printf("\n     --agc_tgt_lvl <target_level>");
+    printf("\n           AGC Target Level in dB, default value 3dB");
+    printf("\n     --agc_comp_lvl <comp_level>");
+    printf("\n           AGC Comp Level in dB, default value 9dB");
+    printf("\n     --agc2_gain <fixed_digital_gain>");
+    printf("\n           AGC Fixed Digital Gain in dB, default value 0dB");
+    printf("\n     --agc2_lvl <level_estimator>");
+    printf("\n           AGC Adaptive Digital Level Estimator, default value kRms");
+    printf("\n     --agc2_sat_mgn <saturation_margin>");
+    printf("\n           AGC Adaptive Digital Saturation Margin in dB, default value 2dB");
+    printf("\n     --aec_delay <delay>");
+    printf("\n           AEC delay value in ms, default value 0ms");
+    printf("\n     --fch <fileChannels>");
+    printf("\n           number of channels in the input file");
+    printf("\n     --mono <Mono Mode>");
+    printf("\n           Mode to make data of all channels the same as first channel");
+    printf("\n");
+}
+
+constexpr float kTenMilliSecVal = 0.01;
+
+int preProcCreateEffect(effect_handle_t* pEffectHandle, uint32_t effectType,
+                        effect_config_t* pConfig, int sessionId, int ioId) {
+    if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(&kPreProcUuids[effectType],
+                                                                 sessionId, ioId, pEffectHandle);
+        status != 0) {
+        ALOGE("Audio Preprocessing create returned an error = %d\n", status);
+        return EXIT_FAILURE;
+    }
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    if (effectType == PREPROC_AEC) {
+        (**pEffectHandle)
+                ->command(*pEffectHandle, EFFECT_CMD_SET_CONFIG_REVERSE, sizeof(effect_config_t),
+                          pConfig, &replySize, &reply);
+    }
+    (**pEffectHandle)
+            ->command(*pEffectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t), pConfig,
+                      &replySize, &reply);
+    return reply;
+}
+
+int preProcSetConfigParam(uint32_t paramType, uint32_t paramValue, effect_handle_t effectHandle) {
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    uint32_t paramData[2] = {paramType, paramValue};
+    effect_param_t* effectParam = (effect_param_t*)malloc(sizeof(*effectParam) + sizeof(paramData));
+    memcpy(&effectParam->data[0], &paramData[0], sizeof(paramData));
+    effectParam->psize = sizeof(paramData[0]);
+    (*effectHandle)
+            ->command(effectHandle, EFFECT_CMD_SET_PARAM, sizeof(effect_param_t), effectParam,
+                      &replySize, &reply);
+    free(effectParam);
+    return reply;
+}
+
+int main(int argc, const char* argv[]) {
+    if (argc == 1) {
+        printUsage();
+        return EXIT_FAILURE;
+    }
+
+    // Print the arguments passed
+    for (int i = 1; i < argc; i++) {
+        printf("%s ", argv[i]);
+    }
+
+    const char* inputFile = nullptr;
+    const char* outputFile = nullptr;
+    const char* farFile = nullptr;
+    int effectEn[PREPROC_NUM_EFFECTS] = {0};
+    struct preProcConfigParams_t preProcCfgParams {};
+
+    const option long_opts[] = {
+            {"help", no_argument, nullptr, ARG_HELP},
+            {"input", required_argument, nullptr, ARG_INPUT},
+            {"output", required_argument, nullptr, ARG_OUTPUT},
+            {"far", required_argument, nullptr, ARG_FAR},
+            {"fs", required_argument, nullptr, ARG_FS},
+            {"ch_mask", required_argument, nullptr, ARG_CH_MASK},
+            {"agc_tgt_lvl", required_argument, nullptr, ARG_AGC_TGT_LVL},
+            {"agc_comp_lvl", required_argument, nullptr, ARG_AGC_COMP_LVL},
+            {"agc2_gain", required_argument, nullptr, ARG_AGC2_GAIN},
+            {"agc2_lvl", required_argument, nullptr, ARG_AGC2_LVL},
+            {"agc2_sat_mgn", required_argument, nullptr, ARG_AGC2_SAT_MGN},
+            {"aec_delay", required_argument, nullptr, ARG_AEC_DELAY},
+            {"ns_lvl", required_argument, nullptr, ARG_NS_LVL},
+            {"aec", no_argument, &effectEn[PREPROC_AEC], 1},
+            {"agc", no_argument, &effectEn[PREPROC_AGC], 1},
+            {"agc2", no_argument, &effectEn[PREPROC_AGC2], 1},
+            {"ns", no_argument, &effectEn[PREPROC_NS], 1},
+            {"fch", required_argument, nullptr, ARG_FILE_CHANNELS},
+            {"mono", no_argument, &preProcCfgParams.monoMode, 1},
+            {nullptr, 0, nullptr, 0},
+    };
+
+    while (true) {
+        const int opt = getopt_long(argc, (char* const*)argv, "i:o:", long_opts, nullptr);
+        if (opt == -1) {
+            break;
+        }
+        switch (opt) {
+            case ARG_HELP:
+                printUsage();
+                return 0;
+            case ARG_INPUT: {
+                inputFile = (char*)optarg;
+                break;
+            }
+            case ARG_OUTPUT: {
+                outputFile = (char*)optarg;
+                break;
+            }
+            case ARG_FAR: {
+                farFile = (char*)optarg;
+                break;
+            }
+            case ARG_FS: {
+                preProcCfgParams.samplingFreq = atoi(optarg);
+                break;
+            }
+            case ARG_CH_MASK: {
+                int chMaskIdx = atoi(optarg);
+                if (chMaskIdx < 0 or chMaskIdx > kPreProcConfigChMaskCount) {
+                    ALOGE("Channel Mask index not in correct range\n");
+                    printUsage();
+                    return EXIT_FAILURE;
+                }
+                preProcCfgParams.chMask = kPreProcConfigChMask[chMaskIdx];
+                break;
+            }
+            case ARG_AGC_TGT_LVL: {
+                preProcCfgParams.agcTargetLevel = atoi(optarg);
+                break;
+            }
+            case ARG_AGC_COMP_LVL: {
+                preProcCfgParams.agcCompLevel = atoi(optarg);
+                break;
+            }
+            case ARG_AGC2_GAIN: {
+                preProcCfgParams.agc2Gain = atof(optarg);
+                break;
+            }
+            case ARG_AGC2_LVL: {
+                preProcCfgParams.agc2Level = atoi(optarg);
+                break;
+            }
+            case ARG_AGC2_SAT_MGN: {
+                preProcCfgParams.agc2SaturationMargin = atof(optarg);
+                break;
+            }
+            case ARG_AEC_DELAY: {
+                preProcCfgParams.aecDelay = atoi(optarg);
+                break;
+            }
+            case ARG_NS_LVL: {
+                preProcCfgParams.nsLevel = atoi(optarg);
+                break;
+            }
+            case ARG_FILE_CHANNELS: {
+                preProcCfgParams.fileChannels = atoi(optarg);
+                break;
+            }
+            case ARG_MONO_MODE: {
+                preProcCfgParams.monoMode = 1;
+                break;
+            }
+            default:
+                break;
+        }
+    }
+
+    if (inputFile == nullptr) {
+        ALOGE("Error: missing input file\n");
+        printUsage();
+        return EXIT_FAILURE;
+    }
+
+    std::unique_ptr<FILE, decltype(&fclose)> inputFp(fopen(inputFile, "rb"), &fclose);
+    if (inputFp == nullptr) {
+        ALOGE("Cannot open input file %s\n", inputFile);
+        return EXIT_FAILURE;
+    }
+
+    std::unique_ptr<FILE, decltype(&fclose)> farFp(fopen(farFile, "rb"), &fclose);
+    std::unique_ptr<FILE, decltype(&fclose)> outputFp(fopen(outputFile, "wb"), &fclose);
+    if (effectEn[PREPROC_AEC]) {
+        if (farFile == nullptr) {
+            ALOGE("Far end signal file required for echo cancellation \n");
+            return EXIT_FAILURE;
+        }
+        if (farFp == nullptr) {
+            ALOGE("Cannot open far end stream file %s\n", farFile);
+            return EXIT_FAILURE;
+        }
+        struct stat statInput, statFar;
+        (void)fstat(fileno(inputFp.get()), &statInput);
+        (void)fstat(fileno(farFp.get()), &statFar);
+        if (statInput.st_size != statFar.st_size) {
+            ALOGE("Near and far end signals are of different sizes");
+            return EXIT_FAILURE;
+        }
+    }
+    if (outputFile != nullptr && outputFp == nullptr) {
+        ALOGE("Cannot open output file %s\n", outputFile);
+        return EXIT_FAILURE;
+    }
+
+    int32_t sessionId = 1;
+    int32_t ioId = 1;
+    effect_handle_t effectHandle[PREPROC_NUM_EFFECTS] = {nullptr};
+    effect_config_t config;
+    config.inputCfg.samplingRate = config.outputCfg.samplingRate = preProcCfgParams.samplingFreq;
+    config.inputCfg.channels = config.outputCfg.channels = preProcCfgParams.chMask;
+    config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+
+    // Create all the effect handles
+    for (int i = 0; i < PREPROC_NUM_EFFECTS; i++) {
+        if (int status = preProcCreateEffect(&effectHandle[i], i, &config, sessionId, ioId);
+            status != 0) {
+            ALOGE("Create effect call returned error %i", status);
+            return EXIT_FAILURE;
+        }
+    }
+
+    for (int i = 0; i < PREPROC_NUM_EFFECTS; i++) {
+        if (effectEn[i] == 1) {
+            int reply = 0;
+            uint32_t replySize = sizeof(reply);
+            (*effectHandle[i])
+                    ->command(effectHandle[i], EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+            if (reply != 0) {
+                ALOGE("Command enable call returned error %d\n", reply);
+                return EXIT_FAILURE;
+            }
+        }
+    }
+
+    // Set Config Params of the effects
+    if (effectEn[PREPROC_AGC]) {
+        if (int status = preProcSetConfigParam(AGC_PARAM_TARGET_LEVEL,
+                                               (uint32_t)preProcCfgParams.agcTargetLevel,
+                                               effectHandle[PREPROC_AGC]);
+            status != 0) {
+            ALOGE("Invalid AGC Target Level. Error %d\n", status);
+            return EXIT_FAILURE;
+        }
+        if (int status = preProcSetConfigParam(AGC_PARAM_COMP_GAIN,
+                                               (uint32_t)preProcCfgParams.agcCompLevel,
+                                               effectHandle[PREPROC_AGC]);
+            status != 0) {
+            ALOGE("Invalid AGC Comp Gain. Error %d\n", status);
+            return EXIT_FAILURE;
+        }
+    }
+    if (effectEn[PREPROC_AGC2]) {
+        if (int status = preProcSetConfigParam(AGC2_PARAM_FIXED_DIGITAL_GAIN,
+                                               (float)preProcCfgParams.agc2Gain,
+                                               effectHandle[PREPROC_AGC2]);
+            status != 0) {
+            ALOGE("Invalid AGC2 Fixed Digital Gain. Error %d\n", status);
+            return EXIT_FAILURE;
+        }
+        if (int status = preProcSetConfigParam(AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR,
+                                               (uint32_t)preProcCfgParams.agc2Level,
+                                               effectHandle[PREPROC_AGC2]);
+            status != 0) {
+            ALOGE("Invalid AGC2 Level Estimator. Error %d\n", status);
+            return EXIT_FAILURE;
+        }
+        if (int status = preProcSetConfigParam(AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN,
+                                               (float)preProcCfgParams.agc2SaturationMargin,
+                                               effectHandle[PREPROC_AGC2]);
+            status != 0) {
+            ALOGE("Invalid AGC2 Saturation Margin. Error %d\n", status);
+            return EXIT_FAILURE;
+        }
+    }
+    if (effectEn[PREPROC_NS]) {
+        if (int status = preProcSetConfigParam(NS_PARAM_LEVEL, (uint32_t)preProcCfgParams.nsLevel,
+                                               effectHandle[PREPROC_NS]);
+            status != 0) {
+            ALOGE("Invalid Noise Suppression level Error %d\n", status);
+            return EXIT_FAILURE;
+        }
+    }
+
+    // Process Call
+    const int frameLength = (int)(preProcCfgParams.samplingFreq * kTenMilliSecVal);
+    const int ioChannelCount = audio_channel_count_from_in_mask(preProcCfgParams.chMask);
+    const int fileChannelCount = preProcCfgParams.fileChannels;
+    const int ioFrameSize = ioChannelCount * sizeof(short);
+    const int inFrameSize = fileChannelCount * sizeof(short);
+    int frameCounter = 0;
+    while (true) {
+        std::vector<short> in(frameLength * ioChannelCount);
+        std::vector<short> out(frameLength * ioChannelCount);
+        std::vector<short> farIn(frameLength * ioChannelCount);
+        size_t samplesRead = fread(in.data(), inFrameSize, frameLength, inputFp.get());
+        if (samplesRead == 0) {
+            break;
+        }
+        if (fileChannelCount != ioChannelCount) {
+            adjust_channels(in.data(), fileChannelCount, in.data(), ioChannelCount, sizeof(short),
+                            frameLength * inFrameSize);
+            if (preProcCfgParams.monoMode == 1) {
+                for (int i = 0; i < frameLength; ++i) {
+                    auto* fp = &in[i * ioChannelCount];
+                    std::fill(fp + 1, fp + ioChannelCount, *fp);  // replicate ch 0
+                }
+            }
+        }
+        audio_buffer_t inputBuffer, outputBuffer;
+        audio_buffer_t farInBuffer{};
+        inputBuffer.frameCount = frameLength;
+        outputBuffer.frameCount = frameLength;
+        inputBuffer.s16 = in.data();
+        outputBuffer.s16 = out.data();
+
+        if (farFp != nullptr) {
+            samplesRead = fread(farIn.data(), inFrameSize, frameLength, farFp.get());
+            if (samplesRead == 0) {
+                break;
+            }
+            if (fileChannelCount != ioChannelCount) {
+                adjust_channels(farIn.data(), fileChannelCount, farIn.data(), ioChannelCount,
+                                sizeof(short), frameLength * inFrameSize);
+                if (preProcCfgParams.monoMode == 1) {
+                    for (int i = 0; i < frameLength; ++i) {
+                        auto* fp = &farIn[i * ioChannelCount];
+                        std::fill(fp + 1, fp + ioChannelCount, *fp);  // replicate ch 0
+                    }
+                }
+            }
+
+            farInBuffer.frameCount = frameLength;
+            farInBuffer.s16 = farIn.data();
+        }
+
+        for (int i = 0; i < PREPROC_NUM_EFFECTS; i++) {
+            if (effectEn[i] == 1) {
+                if (i == PREPROC_AEC) {
+                    if (int status = preProcSetConfigParam(AEC_PARAM_ECHO_DELAY,
+                                                           (uint32_t)preProcCfgParams.aecDelay,
+                                                           effectHandle[PREPROC_AEC]);
+                        status != 0) {
+                        ALOGE("preProcSetConfigParam returned Error %d\n", status);
+                        return EXIT_FAILURE;
+                    }
+                }
+                if (int status = (*effectHandle[i])
+                                         ->process(effectHandle[i], &inputBuffer, &outputBuffer);
+                    status != 0) {
+                    ALOGE("\nError: Process i = %d returned with error %d\n", i, status);
+                    return EXIT_FAILURE;
+                }
+                if (i == PREPROC_AEC) {
+                    if (int status = (*effectHandle[i])
+                                             ->process_reverse(effectHandle[i], &farInBuffer,
+                                                               &outputBuffer);
+                        status != 0) {
+                        ALOGE("\nError: Process reverse i = %d returned with error %d\n", i,
+                              status);
+                        return EXIT_FAILURE;
+                    }
+                }
+            }
+        }
+        if (outputFp != nullptr) {
+            if (fileChannelCount != ioChannelCount) {
+                adjust_channels(out.data(), ioChannelCount, out.data(), fileChannelCount,
+                                sizeof(short), frameLength * ioFrameSize);
+            }
+            size_t samplesWritten =
+                    fwrite(out.data(), inFrameSize, outputBuffer.frameCount, outputFp.get());
+            if (samplesWritten != outputBuffer.frameCount) {
+                ALOGE("\nError: Output file writing failed");
+                break;
+            }
+        }
+        frameCounter += frameLength;
+    }
+    printf("frameCounter: [%d]\n", frameCounter);
+    // Release all the effect handles created
+    for (int i = 0; i < PREPROC_NUM_EFFECTS; i++) {
+        if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle[i]);
+            status != 0) {
+            ALOGE("Audio Preprocessing release returned an error = %d\n", status);
+            return EXIT_FAILURE;
+        }
+    }
+    return EXIT_SUCCESS;
+}
diff --git a/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh b/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh
new file mode 100755
index 0000000..35da13e
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh
@@ -0,0 +1,119 @@
+#!/bin/bash
+#
+# Run tests in this directory.
+#
+
+if [ -z "$ANDROID_BUILD_TOP" ]; then
+    echo "Android build environment not set"
+    exit -1
+fi
+
+# ensure we have mm
+. $ANDROID_BUILD_TOP/build/envsetup.sh
+
+mm -j
+
+echo "waiting for device"
+
+adb root && adb wait-for-device remount
+
+# location of test files
+testdir="/data/local/tmp/AudioPreProcessingTest"
+
+echo "========================================"
+echo "testing PreProcessing modules"
+adb shell mkdir -p $testdir
+adb push $ANDROID_BUILD_TOP/frameworks/av/media/libeffects/res/raw/sinesweepraw.raw $testdir
+adb push $OUT/testcases/snr/arm64/snr $testdir
+
+E_VAL=1
+if [ -z "$1" ]
+then
+    cmds=("adb push $OUT/testcases/AudioPreProcessingTest/arm64/AudioPreProcessingTest $testdir"
+          "adb push $OUT/testcases/AudioPreProcessingTest/arm/AudioPreProcessingTest $testdir"
+)
+elif [ "$1" == "32" ]
+then
+    cmds="adb push $OUT/testcases/AudioPreProcessingTest/arm/AudioPreProcessingTest $testdir"
+elif [ "$1" == "64" ]
+then
+    cmds="adb push $OUT/testcases/AudioPreProcessingTest/arm64/AudioPreProcessingTest $testdir"
+else
+    echo ""
+    echo "Invalid \"val\""
+    echo "Usage:"
+    echo "      "$0" [val]"
+    echo "      where, val can be either 32 or 64."
+    echo ""
+    echo "      If val is not specified then both 32 bit and 64 bit binaries"
+    echo "      are tested."
+    exit $E_VAL
+fi
+
+flags_arr=(
+    "--agc --mono"
+    "--ns --mono"
+    "--agc2 --mono"
+    "--aec --mono"
+)
+
+fs_arr=(
+    8000
+    11025
+    12000
+    16000
+    22050
+    24000
+    32000
+    44100
+    48000
+)
+
+# run multichannel effects at different configs, saving only the mono channel
+error_count=0
+test_count=0
+for cmd in "${cmds[@]}"
+do
+    $cmd
+    for flags in "${flags_arr[@]}"
+    do
+        for fs in ${fs_arr[*]}
+        do
+            for chMask in {0..7}
+            do
+                adb shell $testdir/AudioPreProcessingTest $flags \
+                    --i $testdir/sinesweepraw.raw --far $testdir/sinesweepraw.raw \
+                    --output $testdir/sinesweep_$((chMask))_$((fs)).raw --ch_mask $chMask \
+                    --fs $fs --fch 1
+
+                shell_ret=$?
+                if [ $shell_ret -ne 0 ]; then
+                    echo "error shell_ret here is zero: $shell_ret"
+                    ((++error_count))
+                fi
+
+
+                # single channel files should be identical to higher channel
+                # computation (first channel).
+                if  [[ "$chMask" -gt 1 ]]
+                then
+                    adb shell cmp $testdir/sinesweep_1_$((fs)).raw \
+                        $testdir/sinesweep_$((chMask))_$((fs)).raw
+                fi
+
+                # cmp return EXIT_FAILURE on mismatch.
+                shell_ret=$?
+                if [ $shell_ret -ne 0 ]; then
+                    echo "error: $shell_ret"
+                    ((++error_count))
+                fi
+                ((++test_count))
+            done
+        done
+    done
+done
+
+adb shell rm -r $testdir
+echo "$test_count tests performed"
+echo "$error_count errors"
+exit $error_count
diff --git a/media/libeffects/preprocessing/tests/correlation.cpp b/media/libeffects/preprocessing/tests/correlation.cpp
new file mode 100644
index 0000000..eb56fc3
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/correlation.cpp
@@ -0,0 +1,173 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <iostream>
+#include <vector>
+
+constexpr int kMinLoopLimitValue = 1;
+constexpr int kNumPeaks = 3;
+
+/*!
+  \brief           Compute the length normalized correlation of two signals
+
+  \sigX            Pointer to signal 1
+  \sigY            Pointer to signal 2
+  \len             Length of signals
+  \enableCrossCorr Flag to be set to 1 if cross-correlation is needed
+
+  \return          First value is vector of correlation peak indices
+                   Second value is vector of correlation peak values
+*/
+
+static std::pair<std::vector<int>, std::vector<float>> correlation(const int16_t* sigX,
+                                                                   const int16_t* sigY, int len,
+                                                                   int16_t enableCrossCorr) {
+    float maxCorrVal = 0.f, prevCorrVal = 0.f;
+    int delay = 0, peakIndex = 0, flag = 0;
+    int loopLim = (1 == enableCrossCorr) ? len : kMinLoopLimitValue;
+    std::vector<int> peakIndexVect(kNumPeaks, 0);
+    std::vector<float> peakValueVect(kNumPeaks, 0.f);
+    for (int i = 0; i < loopLim; i++) {
+        float corrVal = 0.f;
+        for (int j = i; j < len; j++) {
+            corrVal += (float)(sigX[j] * sigY[j - i]);
+        }
+        corrVal /= len - i;
+        if (corrVal > maxCorrVal) {
+            delay = i;
+            maxCorrVal = corrVal;
+        }
+        // Correlation peaks are expected to be observed at equal intervals. The interval length is
+        // expected to match with wave period.
+        // The following block of code saves the first kNumPeaks number of peaks and the index at
+        // which they occur.
+        if (peakIndex < kNumPeaks) {
+            if (corrVal > prevCorrVal) {
+                peakIndexVect[peakIndex] = i;
+                peakValueVect[peakIndex] = corrVal;
+                flag = 0;
+            } else if (0 == flag) {
+                peakIndex++;
+                flag = 1;
+            }
+        }
+        if (peakIndex == kNumPeaks) break;
+        prevCorrVal = corrVal;
+    }
+    return {peakIndexVect, peakValueVect};
+}
+
+void printUsage() {
+    printf("\nUsage: ");
+    printf("\n     correlation <firstFile> <secondFile> [enableCrossCorr]\n");
+    printf("\nwhere, \n     <firstFile>       is the first file name");
+    printf("\n     <secondFile>      is the second file name");
+    printf("\n     [enableCrossCorr] is flag to set for cross-correlation (Default 1)\n\n");
+}
+
+int main(int argc, const char* argv[]) {
+    if (argc < 3) {
+        printUsage();
+        return EXIT_FAILURE;
+    }
+
+    std::unique_ptr<FILE, decltype(&fclose)> fInput1(fopen(argv[1], "rb"), &fclose);
+    if (fInput1.get() == NULL) {
+        printf("\nError: missing file %s\n", argv[1]);
+        return EXIT_FAILURE;
+    }
+    std::unique_ptr<FILE, decltype(&fclose)> fInput2(fopen(argv[2], "rb"), &fclose);
+    if (fInput2.get() == NULL) {
+        printf("\nError: missing file %s\n", argv[2]);
+        return EXIT_FAILURE;
+    }
+    int16_t enableCrossCorr = (4 == argc) ? atoi(argv[3]) : 1;
+
+    fseek(fInput1.get(), 0L, SEEK_END);
+    unsigned int fileSize1 = ftell(fInput1.get());
+    rewind(fInput1.get());
+    fseek(fInput2.get(), 0L, SEEK_END);
+    unsigned int fileSize2 = ftell(fInput2.get());
+    rewind(fInput2.get());
+    if (fileSize1 != fileSize2) {
+        printf("\nError: File sizes different\n");
+        return EXIT_FAILURE;
+    }
+
+    size_t numFrames = fileSize1 / sizeof(int16_t);
+    std::unique_ptr<int16_t[]> inBuffer1(new int16_t[numFrames]());
+    std::unique_ptr<int16_t[]> inBuffer2(new int16_t[numFrames]());
+
+    if (numFrames != fread(inBuffer1.get(), sizeof(int16_t), numFrames, fInput1.get())) {
+        printf("\nError: Unable to read %zu samples from file %s\n", numFrames, argv[1]);
+        return EXIT_FAILURE;
+    }
+
+    if (numFrames != fread(inBuffer2.get(), sizeof(int16_t), numFrames, fInput2.get())) {
+        printf("\nError: Unable to read %zu samples from file %s\n", numFrames, argv[2]);
+        return EXIT_FAILURE;
+    }
+
+    auto pairAutoCorr1 = correlation(inBuffer1.get(), inBuffer1.get(), numFrames, enableCrossCorr);
+    auto pairAutoCorr2 = correlation(inBuffer2.get(), inBuffer2.get(), numFrames, enableCrossCorr);
+
+    // Following code block checks pitch period difference between two input signals. They must
+    // match as AGC applies only gain, no frequency related computation is done.
+    bool pitchMatch = false;
+    for (unsigned i = 0; i < pairAutoCorr1.first.size() - 1; i++) {
+        if (pairAutoCorr1.first[i + 1] - pairAutoCorr1.first[i] !=
+            pairAutoCorr2.first[i + 1] - pairAutoCorr2.first[i]) {
+            pitchMatch = false;
+            break;
+        }
+        pitchMatch = true;
+    }
+    if (pitchMatch) {
+        printf("Auto-correlation  : Pitch matched\n");
+    } else {
+        printf("Auto-correlation  : Pitch mismatch\n");
+        return EXIT_FAILURE;
+    }
+
+    if (enableCrossCorr) {
+        auto pairCrossCorr =
+                correlation(inBuffer1.get(), inBuffer2.get(), numFrames, enableCrossCorr);
+
+        // Since AGC applies only gain, the pitch information obtained from cross correlation data
+        // of input and output is expected to be same as the input signal's pitch information.
+        pitchMatch = false;
+        for (unsigned i = 0; i < pairCrossCorr.first.size() - 1; i++) {
+            if (pairAutoCorr1.first[i + 1] - pairAutoCorr1.first[i] !=
+                pairCrossCorr.first[i + 1] - pairCrossCorr.first[i]) {
+                pitchMatch = false;
+                break;
+            }
+            pitchMatch = true;
+        }
+        if (pitchMatch) {
+            printf("Cross-correlation : Pitch matched for AGC\n");
+            if (pairAutoCorr1.second[0]) {
+                printf("Expected gain     : (maxCrossCorr / maxAutoCorr1) = %f\n",
+                       pairCrossCorr.second[0] / pairAutoCorr1.second[0]);
+            }
+        } else {
+            printf("Cross-correlation : Pitch mismatch\n");
+            return EXIT_FAILURE;
+        }
+    }
+
+    return EXIT_SUCCESS;
+}
diff --git a/media/libeffects/proxy/Android.bp b/media/libeffects/proxy/Android.bp
index c6abb9e..6256eda 100644
--- a/media/libeffects/proxy/Android.bp
+++ b/media/libeffects/proxy/Android.bp
@@ -12,6 +12,15 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_shared {
     name: "libeffectproxy",
     relative_install_path: "soundfx",
diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp
index 42e44f0..be9f8c0 100644
--- a/media/libeffects/proxy/EffectProxy.cpp
+++ b/media/libeffects/proxy/EffectProxy.cpp
@@ -30,7 +30,7 @@
 #include <media/EffectsFactoryApi.h>
 
 namespace android {
-// This is a dummy proxy descriptor just to return to Factory during the initial
+// This is a stub proxy descriptor just to return to Factory during the initial
 // GetDescriptor call. Later in the factory, it is replaced with the
 // SW sub effect descriptor
 // proxy UUID af8da7e0-2ca1-11e3-b71d-0002a5d5c51b
@@ -116,6 +116,16 @@
         pContext->sube[SUB_FX_OFFLOAD] = sube[1];
         pContext->desc[SUB_FX_OFFLOAD] = desc[1];
         pContext->aeli[SUB_FX_OFFLOAD] = aeli[1];
+    } else {
+        ALOGE("Both effects have (or don't have) EFFECT_FLAG_HW_ACC_TUNNEL flag");
+        delete[] sube;
+        delete[] desc;
+        delete[] aeli;
+        delete[] pContext->sube;
+        delete[] pContext->desc;
+        delete[] pContext->aeli;
+        delete pContext;
+        return -EINVAL;
     }
     delete[] desc;
     delete[] aeli;
diff --git a/media/libeffects/res/raw/sinesweepraw.raw b/media/libeffects/res/raw/sinesweepraw.raw
new file mode 100644
index 0000000..c0d48ce
--- /dev/null
+++ b/media/libeffects/res/raw/sinesweepraw.raw
Binary files differ
diff --git a/media/libeffects/testlibs/EffectsMath.h b/media/libeffects/testlibs/EffectsMath.h
index 2a44399..dd43b49 100644
--- a/media/libeffects/testlibs/EffectsMath.h
+++ b/media/libeffects/testlibs/EffectsMath.h
@@ -251,22 +251,6 @@
 */
 
 /* use LFO_GAIN_TO_CENTS to convert the LFO gain value to cents */
-#if 0
-#define    DOUBLE_LOG2_10    (double) (3.32192809488736)    /* log2(10) */
-
-#define    DOUBLE_LFO_GAIN_TO_CENTS    (double)                \
-    (                                                        \
-                (DOUBLE_LOG2_10) *                            \
-                1200.0    /                                    \
-                20.0                                        \
-    )
-
-#define    LFO_GAIN_TO_CENTS    (int32_t)                        \
-    (                                                        \
-                DOUBLE_LFO_GAIN_TO_CENTS *                    \
-                (0x1L << NUM_EG1_FRAC_BITS)                    \
-    )
-#endif
 
 #define LFO_GAIN_TO_CENTS (int32_t) (1671981156L >> (23 - NUM_EG1_FRAC_BITS))
 
diff --git a/media/libeffects/visualizer/Android.bp b/media/libeffects/visualizer/Android.bp
new file mode 100644
index 0000000..8dd6789
--- /dev/null
+++ b/media/libeffects/visualizer/Android.bp
@@ -0,0 +1,51 @@
+// Visualizer library
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libeffects_visualizer_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libeffects_visualizer_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library_shared {
+    name: "libvisualizer",
+
+    vendor: true,
+
+    srcs: [
+        "EffectVisualizer.cpp",
+    ],
+
+    cflags: [
+        "-O2",
+        "-fvisibility=hidden",
+
+        "-DBUILD_FLOAT",
+        "-DSUPPORT_MC",
+
+        "-Wall",
+        "-Werror",
+    ],
+
+    shared_libs: [
+        "liblog",
+    ],
+
+    relative_install_path: "soundfx",
+
+    header_libs: [
+        "libaudioeffects",
+        "libaudioutils_headers",
+    ],
+}
diff --git a/media/libeffects/visualizer/Android.mk b/media/libeffects/visualizer/Android.mk
deleted file mode 100644
index 35e2f3d..0000000
--- a/media/libeffects/visualizer/Android.mk
+++ /dev/null
@@ -1,28 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-# Visualizer library
-include $(CLEAR_VARS)
-
-LOCAL_VENDOR_MODULE := true
-LOCAL_SRC_FILES:= \
-	EffectVisualizer.cpp
-
-LOCAL_CFLAGS+= -O2 -fvisibility=hidden
-LOCAL_CFLAGS += -Wall -Werror
-LOCAL_CFLAGS += -DBUILD_FLOAT -DSUPPORT_MC
-
-LOCAL_SHARED_LIBRARIES := \
-	libcutils \
-	liblog \
-	libdl
-
-LOCAL_MODULE_RELATIVE_PATH := soundfx
-LOCAL_MODULE:= libvisualizer
-
-LOCAL_C_INCLUDES := \
-	$(call include-path-for, audio-effects) \
-	$(call include-path-for, audio-utils)
-
-
-LOCAL_HEADER_LIBRARIES += libhardware_headers
-include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libeffects/visualizer/EffectVisualizer.cpp b/media/libeffects/visualizer/EffectVisualizer.cpp
index f838892..1551e33 100644
--- a/media/libeffects/visualizer/EffectVisualizer.cpp
+++ b/media/libeffects/visualizer/EffectVisualizer.cpp
@@ -157,7 +157,7 @@
     if (pConfig->inputCfg.format != pConfig->outputCfg.format) return -EINVAL;
     const uint32_t channelCount = audio_channel_count_from_out_mask(pConfig->inputCfg.channels);
 #ifdef SUPPORT_MC
-    if (channelCount < 1 || channelCount > FCC_8) return -EINVAL;
+    if (channelCount < 1 || channelCount > FCC_LIMIT) return -EINVAL;
 #else
     if (channelCount != FCC_2) return -EINVAL;
 #endif
diff --git a/media/libheif/Android.bp b/media/libheif/Android.bp
index 7d5a4eb..6a3427e 100644
--- a/media/libheif/Android.bp
+++ b/media/libheif/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_shared {
     name: "libheif",
 
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 3fd3fc3..e98d7d8 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -1,3 +1,20 @@
+package {
+    default_applicable_licenses: ["frameworks_av_media_libmedia_license"],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libmedia_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_headers {
     name: "libmedia_headers",
     vendor_available: true,
@@ -5,18 +22,42 @@
 
     export_include_dirs: ["include"],
     header_libs: [
+        "av-headers",
         "libbase_headers",
         "libgui_headers",
         "libstagefright_headers",
         "media_plugin_headers",
     ],
     export_header_lib_headers: [
+        "av-headers",
         "libgui_headers",
         "libstagefright_headers",
         "media_plugin_headers",
     ],
 }
 
+cc_library_headers {
+    name: "libmedia_datasource_headers",
+    export_include_dirs: ["include"],
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+    header_libs: [
+        "libbinder_headers",
+    ],
+    export_header_lib_headers: [
+        "libbinder_headers",
+    ],
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+    ],
+    min_sdk_version: "29",
+}
+
 filegroup {
     name: "libmedia_omx_aidl",
     srcs: [
@@ -33,28 +74,6 @@
     path: "aidl",
 }
 
-filegroup {
-    name: "resourcemanager_aidl",
-    srcs: [
-        "aidl/android/media/IResourceManagerClient.aidl",
-        "aidl/android/media/IResourceManagerService.aidl",
-        "aidl/android/media/MediaResourceType.aidl",
-        "aidl/android/media/MediaResourceSubType.aidl",
-        "aidl/android/media/MediaResourceParcel.aidl",
-        "aidl/android/media/MediaResourcePolicyParcel.aidl",
-    ],
-    path: "aidl",
-}
-
-aidl_interface {
-    name: "resourcemanager_aidl_interface",
-    unstable: true,
-    local_include_dir: "aidl",
-    srcs: [
-        ":resourcemanager_aidl",
-    ],
-}
-
 cc_library_shared {
     name: "libmedia_omx",
     vendor_available: true,
@@ -203,7 +222,7 @@
     ],
 
     header_libs: [
-        "libmedia_headers",
+        "libmedia_datasource_headers",
         "media_ndk_headers",
     ],
 
@@ -220,6 +239,14 @@
         ],
         cfi: true,
     },
+
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 cc_library_shared {
@@ -309,15 +336,19 @@
     header_libs: [
         "libstagefright_headers",
         "media_ndk_headers",
+        "jni_headers",
     ],
 
     export_header_lib_headers: [
         "libstagefright_headers",
         "media_ndk_headers",
+        "jni_headers",
     ],
 
     shared_libs: [
         "android.hidl.token@1.0-utils",
+        "audioclient-types-aidl-cpp",
+        "av-types-aidl-cpp",
         "liblog",
         "libcutils",
         "libprocessgroup",
@@ -334,6 +365,7 @@
         "libaudioclient",
         "libmedia_codeclist",
         "libmedia_omx",
+        "framework-permission-aidl-cpp",
     ],
 
     export_shared_lib_headers: [
@@ -342,14 +374,17 @@
         "libandroidicu",
         //"libsonivox",
         "libmedia_omx",
+        "framework-permission-aidl-cpp",
     ],
 
     static_libs: [
         "resourcemanager_aidl_interface-ndk_platform",
+        "framework-permission-aidl-cpp",
     ],
 
     export_static_lib_headers: [
         "resourcemanager_aidl_interface-ndk_platform",
+        "framework-permission-aidl-cpp",
     ],
 
     export_include_dirs: [
@@ -372,3 +407,39 @@
         cfi: true,
     },
 }
+
+cc_library_static {
+    name: "libmedia_ndkformatpriv",
+
+    host_supported: true,
+
+    srcs: [
+        "NdkMediaFormatPriv.cpp",
+        "NdkMediaErrorPriv.cpp",
+    ],
+
+    header_libs: [
+        "libstagefright_foundation_headers",
+        "libstagefright_headers",
+        "media_ndk_headers",
+    ],
+
+    cflags: [
+        "-DEXPORT=__attribute__((visibility(\"default\")))",
+        "-Werror",
+        "-Wall",
+    ],
+
+    export_include_dirs: ["include"],
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media"
+    ],
+}
diff --git a/media/libmedia/CharacterEncodingDetector.cpp b/media/libmedia/CharacterEncodingDetector.cpp
index 5c6b981..64ba977 100644
--- a/media/libmedia/CharacterEncodingDetector.cpp
+++ b/media/libmedia/CharacterEncodingDetector.cpp
@@ -268,7 +268,7 @@
 
                 ucnv_convertEx(mUtf8Conv, conv, &target, target + targetLength,
                         &source, source + strlen(source),
-                        NULL, NULL, NULL, NULL, TRUE, TRUE, &status);
+                        NULL, NULL, NULL, NULL, true, true, &status);
 
                 if (U_FAILURE(status)) {
                     ALOGE("ucnv_convertEx failed: %d", status);
diff --git a/media/libmedia/IMediaExtractor.cpp b/media/libmedia/IMediaExtractor.cpp
index 39caf53..eb436d1 100644
--- a/media/libmedia/IMediaExtractor.cpp
+++ b/media/libmedia/IMediaExtractor.cpp
@@ -38,7 +38,9 @@
     FLAGS,
     SETMEDIACAS,
     NAME,
-    GETMETRICS
+    GETMETRICS,
+    SETENTRYPOINT,
+    SETLOGSESSIONID
 };
 
 class BpMediaExtractor : public BpInterface<IMediaExtractor> {
@@ -142,6 +144,20 @@
         }
         return nm;
     }
+
+    virtual status_t setEntryPoint(EntryPoint entryPoint) {
+        Parcel data, reply;
+        data.writeInterfaceToken(BpMediaExtractor::getInterfaceDescriptor());
+        data.writeInt32(static_cast<int32_t>(entryPoint));
+        return remote()->transact(SETENTRYPOINT, data, &reply);
+    }
+
+    virtual status_t setLogSessionId(const String8& logSessionId) {
+        Parcel data, reply;
+        data.writeInterfaceToken(BpMediaExtractor::getInterfaceDescriptor());
+        data.writeString8(logSessionId);
+        return remote()->transact(SETLOGSESSIONID, data, &reply);
+    }
 };
 
 IMPLEMENT_META_INTERFACE(MediaExtractor, "android.media.IMediaExtractor");
@@ -232,6 +248,26 @@
             reply->writeString8(nm);
             return NO_ERROR;
         }
+        case SETENTRYPOINT: {
+            ALOGV("setEntryPoint");
+            CHECK_INTERFACE(IMediaExtractor, data, reply);
+            int32_t entryPoint;
+            status_t err = data.readInt32(&entryPoint);
+            if (err == OK) {
+                setEntryPoint(EntryPoint(entryPoint));
+            }
+            return err;
+        }
+        case SETLOGSESSIONID: {
+            ALOGV("setLogSessionId");
+            CHECK_INTERFACE(IMediaExtractor, data, reply);
+            String8 logSessionId;
+            status_t status = data.readString8(&logSessionId);
+            if (status == OK) {
+              setLogSessionId(logSessionId);
+            }
+            return status;
+        }
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index 20bc23d..c89c023 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -20,6 +20,7 @@
 #include <sys/types.h>
 
 #include <android/IDataSource.h>
+#include <binder/IPCThreadState.h>
 #include <binder/Parcel.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <media/AudioResamplerPublic.h>
@@ -34,58 +35,37 @@
 
 using media::VolumeShaper;
 
-enum {
-    DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,
-    SET_DATA_SOURCE_URL,
-    SET_DATA_SOURCE_FD,
-    SET_DATA_SOURCE_STREAM,
-    SET_DATA_SOURCE_CALLBACK,
-    SET_BUFFERING_SETTINGS,
-    GET_BUFFERING_SETTINGS,
-    PREPARE_ASYNC,
-    START,
-    STOP,
-    IS_PLAYING,
-    SET_PLAYBACK_SETTINGS,
-    GET_PLAYBACK_SETTINGS,
-    SET_SYNC_SETTINGS,
-    GET_SYNC_SETTINGS,
-    PAUSE,
-    SEEK_TO,
-    GET_CURRENT_POSITION,
-    GET_DURATION,
-    RESET,
-    NOTIFY_AT,
-    SET_AUDIO_STREAM_TYPE,
-    SET_LOOPING,
-    SET_VOLUME,
-    INVOKE,
-    SET_METADATA_FILTER,
-    GET_METADATA,
-    SET_AUX_EFFECT_SEND_LEVEL,
-    ATTACH_AUX_EFFECT,
-    SET_VIDEO_SURFACETEXTURE,
-    SET_PARAMETER,
-    GET_PARAMETER,
-    SET_RETRANSMIT_ENDPOINT,
-    GET_RETRANSMIT_ENDPOINT,
-    SET_NEXT_PLAYER,
-    APPLY_VOLUME_SHAPER,
-    GET_VOLUME_SHAPER_STATE,
-    // Modular DRM
-    PREPARE_DRM,
-    RELEASE_DRM,
-    // AudioRouting
-    SET_OUTPUT_DEVICE,
-    GET_ROUTED_DEVICE_ID,
-    ENABLE_AUDIO_DEVICE_CALLBACK,
-};
-
 // ModDrm helpers
-static void readVector(const Parcel& reply, Vector<uint8_t>& vector) {
-    uint32_t size = reply.readUint32();
-    vector.insertAt((size_t)0, size);
-    reply.read(vector.editArray(), size);
+static status_t readVector(const Parcel& reply, Vector<uint8_t>& vector) {
+    uint32_t size = 0;
+    status_t status = reply.readUint32(&size);
+    if (status == OK) {
+        status = size <= reply.dataAvail() ? OK : BAD_VALUE;
+    }
+    if (status == OK) {
+        status = vector.insertAt((size_t) 0, size) >= 0 ? OK : NO_MEMORY;
+    }
+    if (status == OK) {
+        status = reply.read(vector.editArray(), size);
+    }
+    if (status != OK) {
+        char errorMsg[100];
+        char buganizerId[] = "173720767";
+        snprintf(errorMsg,
+                sizeof(errorMsg),
+                "%s: failed to read array. Size: %d, status: %d.",
+                __func__,
+                size,
+                status);
+        android_errorWriteWithInfoLog(
+                /* safetyNet tag= */ 0x534e4554,
+                buganizerId,
+                IPCThreadState::self()->getCallingUid(),
+                errorMsg,
+                strlen(errorMsg));
+        ALOGE("%s (b/%s)", errorMsg, buganizerId);
+    }
+    return status;
 }
 
 static void writeVector(Parcel& data, Vector<uint8_t> const& vector) {
@@ -161,6 +141,15 @@
         return reply.readInt32();
     }
 
+    status_t setDataSource(const String8& rtpParams) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+        data.writeString8(rtpParams);
+        remote()->transact(SET_DATA_SOURCE_RTP, data, &reply);
+
+        return reply.readInt32();
+    }
+
     // pass the buffered IGraphicBufferProducer to the media player service
     status_t setVideoSurfaceTexture(const sp<IGraphicBufferProducer>& bufferProducer)
     {
@@ -685,6 +674,12 @@
             }
             return NO_ERROR;
         }
+        case SET_DATA_SOURCE_RTP: {
+            CHECK_INTERFACE(IMediaPlayer, data, reply);
+            String8 rtpParams = data.readString8();
+            reply->writeInt32(setDataSource(rtpParams));
+            return NO_ERROR;
+        }
         case SET_VIDEO_SURFACETEXTURE: {
             CHECK_INTERFACE(IMediaPlayer, data, reply);
             sp<IGraphicBufferProducer> bufferProducer =
@@ -958,11 +953,13 @@
         case PREPARE_DRM: {
             CHECK_INTERFACE(IMediaPlayer, data, reply);
 
-            uint8_t uuid[16];
+            uint8_t uuid[16] = {};
             data.read(uuid, sizeof(uuid));
             Vector<uint8_t> drmSessionId;
-            readVector(data, drmSessionId);
-
+            status_t status = readVector(data, drmSessionId);
+            if (status != OK) {
+              return status;
+            }
             uint32_t result = prepareDrm(uuid, drmSessionId);
             reply->writeInt32(result);
             return OK;
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index bd18a40..07c0ac5 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -35,6 +35,8 @@
 
 namespace android {
 
+using android::content::AttributionSourceState;
+
 enum {
     CREATE = IBinder::FIRST_CALL_TRANSACTION,
     CREATE_MEDIA_RECORDER,
@@ -62,21 +64,23 @@
     }
 
     virtual sp<IMediaPlayer> create(
-            const sp<IMediaPlayerClient>& client, audio_session_t audioSessionId) {
+            const sp<IMediaPlayerClient>& client, audio_session_t audioSessionId,
+            const AttributionSourceState& attributionSource) {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
         data.writeStrongBinder(IInterface::asBinder(client));
         data.writeInt32(audioSessionId);
+        data.writeParcelable(attributionSource);
 
         remote()->transact(CREATE, data, &reply);
         return interface_cast<IMediaPlayer>(reply.readStrongBinder());
     }
 
-    virtual sp<IMediaRecorder> createMediaRecorder(const String16 &opPackageName)
+    virtual sp<IMediaRecorder> createMediaRecorder(const AttributionSourceState& attributionSource)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
-        data.writeString16(opPackageName);
+        data.writeParcelable(attributionSource);
         remote()->transact(CREATE_MEDIA_RECORDER, data, &reply);
         return interface_cast<IMediaRecorder>(reply.readStrongBinder());
     }
@@ -127,14 +131,23 @@
             sp<IMediaPlayerClient> client =
                 interface_cast<IMediaPlayerClient>(data.readStrongBinder());
             audio_session_t audioSessionId = (audio_session_t) data.readInt32();
-            sp<IMediaPlayer> player = create(client, audioSessionId);
+            AttributionSourceState attributionSource;
+            status_t status = data.readParcelable(&attributionSource);
+            if (status != NO_ERROR) {
+                return status;
+            }
+            sp<IMediaPlayer> player = create(client, audioSessionId, attributionSource);
             reply->writeStrongBinder(IInterface::asBinder(player));
             return NO_ERROR;
         } break;
         case CREATE_MEDIA_RECORDER: {
             CHECK_INTERFACE(IMediaPlayerService, data, reply);
-            const String16 opPackageName = data.readString16();
-            sp<IMediaRecorder> recorder = createMediaRecorder(opPackageName);
+            AttributionSourceState attributionSource;
+            status_t status = data.readParcelable(&attributionSource);
+            if (status != NO_ERROR) {
+                return status;
+            }
+            sp<IMediaRecorder> recorder = createMediaRecorder(attributionSource);
             reply->writeStrongBinder(IInterface::asBinder(recorder));
             return NO_ERROR;
         } break;
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index ac86f72..154988d 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -66,6 +66,7 @@
     ENABLE_AUDIO_DEVICE_CALLBACK,
     GET_ACTIVE_MICROPHONES,
     GET_PORT_ID,
+    GET_RTP_DATA_USAGE,
     SET_PREFERRED_MICROPHONE_DIRECTION,
     SET_PREFERRED_MICROPHONE_FIELD_DIMENSION,
     SET_PRIVACY_SENSITIVE,
@@ -476,6 +477,23 @@
         *portId = (audio_port_handle_t)reply.readInt32();
         return NO_ERROR;
     }
+
+    status_t getRtpDataUsage(uint64_t *bytes)
+    {
+        ALOGV("getRtpDataUsage");
+        if (bytes == nullptr) {
+            return BAD_VALUE;
+        }
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
+        status_t status = remote()->transact(GET_RTP_DATA_USAGE, data, &reply);
+        if (status != OK
+                || (status = (status_t)reply.readInt32()) != NO_ERROR) {
+            *bytes = 0;
+            return status;
+        }
+        return reply.readUint64(bytes);
+    }
 };
 
 IMPLEMENT_META_INTERFACE(MediaRecorder, "android.media.IMediaRecorder");
@@ -759,6 +777,17 @@
             }
             return NO_ERROR;
         }
+        case GET_RTP_DATA_USAGE: {
+            ALOGV("GET_RTP_DATA_USAGE");
+            CHECK_INTERFACE(IMediaRecorder, data, reply);
+            uint64_t bytes;
+            status_t status = getRtpDataUsage(&bytes);
+            reply->writeInt32(status);
+            if (status == NO_ERROR) {
+                reply->writeUint64(bytes);
+            }
+            return NO_ERROR;
+        }
         case SET_PREFERRED_MICROPHONE_DIRECTION: {
             ALOGV("SET_PREFERRED_MICROPHONE_DIRECTION");
             CHECK_INTERFACE(IMediaRecorder, data, reply);
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 637322f..67d33fa 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -63,7 +63,7 @@
                      searchDirs[1] + fileName,
                      searchDirs[2] + fileName,
                      searchDirs[3] + fileName,
-                     "system/etc/media_profiles_V1_0.xml" // System fallback
+                     "system/etc/media_profiles.xml" // System fallback
                    };
         }();
     static std::array<char const*, 5> const cPaths = {
@@ -121,6 +121,11 @@
     {"1080p", CAMCORDER_QUALITY_1080P},
     {"2160p", CAMCORDER_QUALITY_2160P},
     {"qvga", CAMCORDER_QUALITY_QVGA},
+    {"vga", CAMCORDER_QUALITY_VGA},
+    {"4kdci", CAMCORDER_QUALITY_4KDCI},
+    {"qhd", CAMCORDER_QUALITY_QHD},
+    {"2k", CAMCORDER_QUALITY_2K},
+    {"8kuhd", CAMCORDER_QUALITY_8KUHD},
 
     {"timelapselow",  CAMCORDER_QUALITY_TIME_LAPSE_LOW},
     {"timelapsehigh", CAMCORDER_QUALITY_TIME_LAPSE_HIGH},
@@ -131,6 +136,10 @@
     {"timelapse1080p", CAMCORDER_QUALITY_TIME_LAPSE_1080P},
     {"timelapse2160p", CAMCORDER_QUALITY_TIME_LAPSE_2160P},
     {"timelapseqvga", CAMCORDER_QUALITY_TIME_LAPSE_QVGA},
+    {"timelapsevga", CAMCORDER_QUALITY_TIME_LAPSE_VGA},
+    {"timelapse4kdci", CAMCORDER_QUALITY_TIME_LAPSE_4KDCI},
+    {"timelapseqhd", CAMCORDER_QUALITY_TIME_LAPSE_QHD},
+    {"timelapse2k", CAMCORDER_QUALITY_TIME_LAPSE_2K},
 
     {"highspeedlow",  CAMCORDER_QUALITY_HIGH_SPEED_LOW},
     {"highspeedhigh", CAMCORDER_QUALITY_HIGH_SPEED_HIGH},
@@ -138,19 +147,11 @@
     {"highspeed720p", CAMCORDER_QUALITY_HIGH_SPEED_720P},
     {"highspeed1080p", CAMCORDER_QUALITY_HIGH_SPEED_1080P},
     {"highspeed2160p", CAMCORDER_QUALITY_HIGH_SPEED_2160P},
-
-    // Vendor-specific profiles
-    {"vga", CAMCORDER_QUALITY_VGA},
-    {"4kdci", CAMCORDER_QUALITY_4KDCI},
-    {"timelapsevga", CAMCORDER_QUALITY_TIME_LAPSE_VGA},
-    {"timelapse4kdci", CAMCORDER_QUALITY_TIME_LAPSE_4KDCI},
     {"highspeedcif", CAMCORDER_QUALITY_HIGH_SPEED_CIF},
     {"highspeedvga", CAMCORDER_QUALITY_HIGH_SPEED_VGA},
     {"highspeed4kdci", CAMCORDER_QUALITY_HIGH_SPEED_4KDCI},
-    {"qhd", CAMCORDER_QUALITY_QHD},
-    {"2k", CAMCORDER_QUALITY_2k},
-    {"timelapseqhd", CAMCORDER_QUALITY_TIME_LAPSE_QHD},
-    {"timelapse2k", CAMCORDER_QUALITY_TIME_LAPSE_2k},
+
+    // Vendor-specific profiles
 };
 
 #if LOG_NDEBUG
@@ -168,6 +169,7 @@
     ALOGV("frame width: %d", codec.mFrameWidth);
     ALOGV("frame height: %d", codec.mFrameHeight);
     ALOGV("frame rate: %d", codec.mFrameRate);
+    ALOGV("profile: %d", codec.mProfile);
 }
 
 /*static*/ void
@@ -178,6 +180,7 @@
     ALOGV("bit rate: %d", codec.mBitRate);
     ALOGV("sample rate: %d", codec.mSampleRate);
     ALOGV("number of channels: %d", codec.mChannels);
+    ALOGV("profile: %d", codec.mProfile);
 }
 
 /*static*/ void
@@ -229,10 +232,11 @@
     return tag;
 }
 
-/*static*/ MediaProfiles::VideoCodec*
-MediaProfiles::createVideoCodec(const char **atts, MediaProfiles *profiles)
+/*static*/ void
+MediaProfiles::createVideoCodec(const char **atts, size_t natts, MediaProfiles *profiles)
 {
-    CHECK(!strcmp("codec",     atts[0]) &&
+    CHECK(natts >= 10 &&
+          !strcmp("codec",     atts[0]) &&
           !strcmp("bitRate",   atts[2]) &&
           !strcmp("width",     atts[4]) &&
           !strcmp("height",    atts[6]) &&
@@ -240,49 +244,69 @@
 
     const size_t nMappings = sizeof(sVideoEncoderNameMap)/sizeof(sVideoEncoderNameMap[0]);
     const int codec = findTagForName(sVideoEncoderNameMap, nMappings, atts[1]);
-    CHECK(codec != -1);
+    if (codec == -1) {
+        ALOGE("MediaProfiles::createVideoCodec failed to locate codec %s", atts[1]);
+        return;
+    }
 
-    MediaProfiles::VideoCodec *videoCodec =
-        new MediaProfiles::VideoCodec(static_cast<video_encoder>(codec),
-            atoi(atts[3]), atoi(atts[5]), atoi(atts[7]), atoi(atts[9]));
-    logVideoCodec(*videoCodec);
+    int profile = -1;
+    if (natts >= 12 && !strcmp("profile", atts[10])) {
+        profile = atoi(atts[11]);
+    }
+
+    VideoCodec videoCodec {
+            static_cast<video_encoder>(codec),
+            atoi(atts[3]), atoi(atts[5]), atoi(atts[7]), atoi(atts[9]), profile };
+    logVideoCodec(videoCodec);
 
     size_t nCamcorderProfiles;
     CHECK((nCamcorderProfiles = profiles->mCamcorderProfiles.size()) >= 1);
-    profiles->mCamcorderProfiles[nCamcorderProfiles - 1]->mVideoCodec = videoCodec;
-    return videoCodec;
+    profiles->mCamcorderProfiles[nCamcorderProfiles - 1]->mVideoCodecs.emplace_back(videoCodec);
 }
 
-/*static*/ MediaProfiles::AudioCodec*
-MediaProfiles::createAudioCodec(const char **atts, MediaProfiles *profiles)
+/*static*/ void
+MediaProfiles::createAudioCodec(const char **atts, size_t natts, MediaProfiles *profiles)
 {
-    CHECK(!strcmp("codec",      atts[0]) &&
+    CHECK(natts >= 8 &&
+          !strcmp("codec",      atts[0]) &&
           !strcmp("bitRate",    atts[2]) &&
           !strcmp("sampleRate", atts[4]) &&
           !strcmp("channels",   atts[6]));
     const size_t nMappings = sizeof(sAudioEncoderNameMap)/sizeof(sAudioEncoderNameMap[0]);
     const int codec = findTagForName(sAudioEncoderNameMap, nMappings, atts[1]);
-    CHECK(codec != -1);
+    if (codec == -1) {
+        ALOGE("MediaProfiles::createAudioCodec failed to locate codec %s", atts[1]);
+        return;
+    }
 
-    MediaProfiles::AudioCodec *audioCodec =
-        new MediaProfiles::AudioCodec(static_cast<audio_encoder>(codec),
-            atoi(atts[3]), atoi(atts[5]), atoi(atts[7]));
-    logAudioCodec(*audioCodec);
+    int profile = -1;
+    if (natts >= 10 && !strcmp("profile", atts[8])) {
+        profile = atoi(atts[9]);
+    }
+
+    AudioCodec audioCodec{
+            static_cast<audio_encoder>(codec),
+            atoi(atts[3]), atoi(atts[5]), atoi(atts[7]), profile };
+    logAudioCodec(audioCodec);
 
     size_t nCamcorderProfiles;
     CHECK((nCamcorderProfiles = profiles->mCamcorderProfiles.size()) >= 1);
-    profiles->mCamcorderProfiles[nCamcorderProfiles - 1]->mAudioCodec = audioCodec;
-    return audioCodec;
+    profiles->mCamcorderProfiles[nCamcorderProfiles - 1]->mAudioCodecs.emplace_back(audioCodec);
 }
+
 /*static*/ MediaProfiles::AudioDecoderCap*
-MediaProfiles::createAudioDecoderCap(const char **atts)
+MediaProfiles::createAudioDecoderCap(const char **atts, size_t natts)
 {
-    CHECK(!strcmp("name",    atts[0]) &&
+    CHECK(natts >= 4 &&
+          !strcmp("name",    atts[0]) &&
           !strcmp("enabled", atts[2]));
 
     const size_t nMappings = sizeof(sAudioDecoderNameMap)/sizeof(sAudioDecoderNameMap[0]);
     const int codec = findTagForName(sAudioDecoderNameMap, nMappings, atts[1]);
-    CHECK(codec != -1);
+    if (codec == -1) {
+      ALOGE("MediaProfiles::createAudioDecoderCap failed to locate codec %s", atts[1]);
+      return nullptr;
+    }
 
     MediaProfiles::AudioDecoderCap *cap =
         new MediaProfiles::AudioDecoderCap(static_cast<audio_decoder>(codec));
@@ -291,14 +315,18 @@
 }
 
 /*static*/ MediaProfiles::VideoDecoderCap*
-MediaProfiles::createVideoDecoderCap(const char **atts)
+MediaProfiles::createVideoDecoderCap(const char **atts, size_t natts)
 {
-    CHECK(!strcmp("name",    atts[0]) &&
+    CHECK(natts >= 4 &&
+          !strcmp("name",    atts[0]) &&
           !strcmp("enabled", atts[2]));
 
     const size_t nMappings = sizeof(sVideoDecoderNameMap)/sizeof(sVideoDecoderNameMap[0]);
     const int codec = findTagForName(sVideoDecoderNameMap, nMappings, atts[1]);
-    CHECK(codec != -1);
+    if (codec == -1) {
+      ALOGE("MediaProfiles::createVideoDecoderCap failed to locate codec %s", atts[1]);
+      return nullptr;
+    }
 
     MediaProfiles::VideoDecoderCap *cap =
         new MediaProfiles::VideoDecoderCap(static_cast<video_decoder>(codec));
@@ -307,9 +335,10 @@
 }
 
 /*static*/ MediaProfiles::VideoEncoderCap*
-MediaProfiles::createVideoEncoderCap(const char **atts)
+MediaProfiles::createVideoEncoderCap(const char **atts, size_t natts)
 {
-    CHECK(!strcmp("name",           atts[0])  &&
+    CHECK(natts >= 20 &&
+          !strcmp("name",           atts[0])  &&
           !strcmp("enabled",        atts[2])  &&
           !strcmp("minBitRate",     atts[4])  &&
           !strcmp("maxBitRate",     atts[6])  &&
@@ -322,7 +351,10 @@
 
     const size_t nMappings = sizeof(sVideoEncoderNameMap)/sizeof(sVideoEncoderNameMap[0]);
     const int codec = findTagForName(sVideoEncoderNameMap, nMappings, atts[1]);
-    CHECK(codec != -1);
+    if (codec == -1) {
+      ALOGE("MediaProfiles::createVideoEncoderCap failed to locate codec %s", atts[1]);
+      return nullptr;
+    }
 
     MediaProfiles::VideoEncoderCap *cap =
         new MediaProfiles::VideoEncoderCap(static_cast<video_encoder>(codec),
@@ -333,9 +365,10 @@
 }
 
 /*static*/ MediaProfiles::AudioEncoderCap*
-MediaProfiles::createAudioEncoderCap(const char **atts)
+MediaProfiles::createAudioEncoderCap(const char **atts, size_t natts)
 {
-    CHECK(!strcmp("name",          atts[0])  &&
+    CHECK(natts >= 16 &&
+          !strcmp("name",          atts[0])  &&
           !strcmp("enabled",       atts[2])  &&
           !strcmp("minBitRate",    atts[4])  &&
           !strcmp("maxBitRate",    atts[6])  &&
@@ -346,7 +379,10 @@
 
     const size_t nMappings = sizeof(sAudioEncoderNameMap)/sizeof(sAudioEncoderNameMap[0]);
     const int codec = findTagForName(sAudioEncoderNameMap, nMappings, atts[1]);
-    CHECK(codec != -1);
+    if (codec == -1) {
+      ALOGE("MediaProfiles::createAudioEncoderCap failed to locate codec %s", atts[1]);
+      return nullptr;
+    }
 
     MediaProfiles::AudioEncoderCap *cap =
         new MediaProfiles::AudioEncoderCap(static_cast<audio_encoder>(codec), atoi(atts[5]),
@@ -356,9 +392,10 @@
 }
 
 /*static*/ output_format
-MediaProfiles::createEncoderOutputFileFormat(const char **atts)
+MediaProfiles::createEncoderOutputFileFormat(const char **atts, size_t natts)
 {
-    CHECK(!strcmp("name", atts[0]));
+    CHECK(natts >= 2 &&
+          !strcmp("name", atts[0]));
 
     const size_t nMappings =sizeof(sFileFormatMap)/sizeof(sFileFormatMap[0]);
     const int format = findTagForName(sFileFormatMap, nMappings, atts[1]);
@@ -377,20 +414,28 @@
 }
 
 /*static*/ MediaProfiles::CamcorderProfile*
-MediaProfiles::createCamcorderProfile(int cameraId, const char **atts, Vector<int>& cameraIds)
+MediaProfiles::createCamcorderProfile(
+        int cameraId, const char **atts, size_t natts, Vector<int>& cameraIds)
 {
-    CHECK(!strcmp("quality",    atts[0]) &&
+    CHECK(natts >= 6 &&
+          !strcmp("quality",    atts[0]) &&
           !strcmp("fileFormat", atts[2]) &&
           !strcmp("duration",   atts[4]));
 
     const size_t nProfileMappings = sizeof(sCamcorderQualityNameMap)/
             sizeof(sCamcorderQualityNameMap[0]);
     const int quality = findTagForName(sCamcorderQualityNameMap, nProfileMappings, atts[1]);
-    CHECK(quality != -1);
+    if (quality == -1) {
+      ALOGE("MediaProfiles::createCamcorderProfile failed to locate quality %s", atts[1]);
+      return nullptr;
+    }
 
     const size_t nFormatMappings = sizeof(sFileFormatMap)/sizeof(sFileFormatMap[0]);
     const int fileFormat = findTagForName(sFileFormatMap, nFormatMappings, atts[3]);
-    CHECK(fileFormat != -1);
+    if (fileFormat == -1) {
+      ALOGE("MediaProfiles::createCamcorderProfile failed to locate file format %s", atts[1]);
+      return nullptr;
+    }
 
     MediaProfiles::CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
     profile->mCameraId = cameraId;
@@ -416,9 +461,10 @@
     return NULL;
 }
 
-void MediaProfiles::addImageEncodingQualityLevel(int cameraId, const char** atts)
+void MediaProfiles::addImageEncodingQualityLevel(int cameraId, const char** atts, size_t natts)
 {
-    CHECK(!strcmp("quality", atts[0]));
+    CHECK(natts >= 2 &&
+          !strcmp("quality", atts[0]));
     int quality = atoi(atts[1]);
     ALOGV("%s: cameraId=%d, quality=%d", __func__, cameraId, quality);
     ImageEncodingQualityLevels *levels = findImageEncodingQualityLevels(cameraId);
@@ -433,18 +479,19 @@
 }
 
 /*static*/ int
-MediaProfiles::getCameraId(const char** atts)
+MediaProfiles::getCameraId(const char** atts, size_t natts)
 {
     if (!atts[0]) return 0;  // default cameraId = 0
-    CHECK(!strcmp("cameraId", atts[0]));
+    CHECK(natts >= 2 &&
+          !strcmp("cameraId", atts[0]));
     return atoi(atts[1]);
 }
 
-void MediaProfiles::addStartTimeOffset(int cameraId, const char** atts)
+void MediaProfiles::addStartTimeOffset(int cameraId, const char** atts, size_t natts)
 {
     int offsetTimeMs = 1000;
-    if (atts[2]) {
-        CHECK(!strcmp("startOffsetMs", atts[2]));
+    if (natts >= 3 && atts[2]) {
+        CHECK(natts >= 4 && !strcmp("startOffsetMs", atts[2]));
         offsetTimeMs = atoi(atts[3]);
     }
 
@@ -455,33 +502,58 @@
 /*static*/ void
 MediaProfiles::startElementHandler(void *userData, const char *name, const char **atts)
 {
-    MediaProfiles *profiles = (MediaProfiles *) userData;
+    // determine number of attributes
+    size_t natts = 0;
+    while (atts[natts]) {
+        ++natts;
+    }
+
+    MediaProfiles *profiles = (MediaProfiles *)userData;
     if (strcmp("Video", name) == 0) {
-        createVideoCodec(atts, profiles);
+        createVideoCodec(atts, natts, profiles);
     } else if (strcmp("Audio", name) == 0) {
-        createAudioCodec(atts, profiles);
+        createAudioCodec(atts, natts, profiles);
     } else if (strcmp("VideoEncoderCap", name) == 0 &&
+               natts >= 4 &&
                strcmp("true", atts[3]) == 0) {
-        profiles->mVideoEncoders.add(createVideoEncoderCap(atts));
+        MediaProfiles::VideoEncoderCap* cap = createVideoEncoderCap(atts, natts);
+        if (cap != nullptr) {
+          profiles->mVideoEncoders.add(cap);
+        }
     } else if (strcmp("AudioEncoderCap", name) == 0 &&
+               natts >= 4 &&
                strcmp("true", atts[3]) == 0) {
-        profiles->mAudioEncoders.add(createAudioEncoderCap(atts));
+        MediaProfiles::AudioEncoderCap* cap = createAudioEncoderCap(atts, natts);
+        if (cap != nullptr) {
+          profiles->mAudioEncoders.add(cap);
+        }
     } else if (strcmp("VideoDecoderCap", name) == 0 &&
+               natts >= 4 &&
                strcmp("true", atts[3]) == 0) {
-        profiles->mVideoDecoders.add(createVideoDecoderCap(atts));
+        MediaProfiles::VideoDecoderCap* cap = createVideoDecoderCap(atts, natts);
+        if (cap != nullptr) {
+          profiles->mVideoDecoders.add(cap);
+        }
     } else if (strcmp("AudioDecoderCap", name) == 0 &&
+               natts >= 4 &&
                strcmp("true", atts[3]) == 0) {
-        profiles->mAudioDecoders.add(createAudioDecoderCap(atts));
+        MediaProfiles::AudioDecoderCap* cap = createAudioDecoderCap(atts, natts);
+        if (cap != nullptr) {
+          profiles->mAudioDecoders.add(cap);
+        }
     } else if (strcmp("EncoderOutputFileFormat", name) == 0) {
-        profiles->mEncoderOutputFileFormats.add(createEncoderOutputFileFormat(atts));
+        profiles->mEncoderOutputFileFormats.add(createEncoderOutputFileFormat(atts, natts));
     } else if (strcmp("CamcorderProfiles", name) == 0) {
-        profiles->mCurrentCameraId = getCameraId(atts);
-        profiles->addStartTimeOffset(profiles->mCurrentCameraId, atts);
+        profiles->mCurrentCameraId = getCameraId(atts, natts);
+        profiles->addStartTimeOffset(profiles->mCurrentCameraId, atts, natts);
     } else if (strcmp("EncoderProfile", name) == 0) {
-        profiles->mCamcorderProfiles.add(
-            createCamcorderProfile(profiles->mCurrentCameraId, atts, profiles->mCameraIds));
+      MediaProfiles::CamcorderProfile* profile = createCamcorderProfile(
+          profiles->mCurrentCameraId, atts, natts, profiles->mCameraIds);
+      if (profile != nullptr) {
+        profiles->mCamcorderProfiles.add(profile);
+      }
     } else if (strcmp("ImageEncoding", name) == 0) {
-        profiles->addImageEncodingQualityLevel(profiles->mCurrentCameraId, atts);
+        profiles->addImageEncodingQualityLevel(profiles->mCurrentCameraId, atts, natts);
     }
 }
 
@@ -535,8 +607,20 @@
     initRequiredProfileRefs(mCameraIds);
 
     for (size_t i = 0, n = mCamcorderProfiles.size(); i < n; ++i) {
-        int product = mCamcorderProfiles[i]->mVideoCodec->mFrameWidth *
-                      mCamcorderProfiles[i]->mVideoCodec->mFrameHeight;
+        // ensure at least one video and audio profile is added
+        if (mCamcorderProfiles[i]->mVideoCodecs.empty()) {
+            mCamcorderProfiles[i]->mVideoCodecs.emplace_back(
+                    VIDEO_ENCODER_H263, 192000 /* bitrate */,
+                    176 /* width */, 144 /* height */, 20 /* frameRate */);
+        }
+        if (mCamcorderProfiles[i]->mAudioCodecs.empty()) {
+            mCamcorderProfiles[i]->mAudioCodecs.emplace_back(
+                    AUDIO_ENCODER_AMR_NB, 12200 /* bitrate */,
+                    8000 /* sampleRate */, 1 /* channels */);
+        }
+
+        int product = mCamcorderProfiles[i]->mVideoCodecs[0].mFrameWidth *
+                      mCamcorderProfiles[i]->mVideoCodecs[0].mFrameHeight;
 
         camcorder_quality quality = mCamcorderProfiles[i]->mQuality;
         int cameraId = mCamcorderProfiles[i]->mCameraId;
@@ -705,34 +789,35 @@
 /*static*/ MediaProfiles::CamcorderProfile*
 MediaProfiles::createDefaultCamcorderTimeLapseQcifProfile(camcorder_quality quality)
 {
-    MediaProfiles::VideoCodec *videoCodec =
-        new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 1000000, 176, 144, 20);
-
-    AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
     CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
     profile->mCameraId = 0;
     profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
     profile->mQuality = quality;
     profile->mDuration = 60;
-    profile->mVideoCodec = videoCodec;
-    profile->mAudioCodec = audioCodec;
+    profile->mVideoCodecs.emplace_back(
+            VIDEO_ENCODER_H263, 1000000 /* bitrate */,
+            176 /* width */, 144 /* height */, 20 /* frameRate */);
+    profile->mAudioCodecs.emplace_back(
+            AUDIO_ENCODER_AMR_NB, 12200 /* bitrate */,
+            8000 /* sampleRate */, 1 /* channels */);
+
     return profile;
 }
 
 /*static*/ MediaProfiles::CamcorderProfile*
 MediaProfiles::createDefaultCamcorderTimeLapse480pProfile(camcorder_quality quality)
 {
-    MediaProfiles::VideoCodec *videoCodec =
-        new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 20000000, 720, 480, 20);
-
-    AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
     CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
     profile->mCameraId = 0;
     profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
     profile->mQuality = quality;
     profile->mDuration = 60;
-    profile->mVideoCodec = videoCodec;
-    profile->mAudioCodec = audioCodec;
+    profile->mVideoCodecs.emplace_back(
+            VIDEO_ENCODER_H263, 20000000 /* bitrate */,
+            720 /* width */, 480 /* height */, 20 /* frameRate */);
+    profile->mAudioCodecs.emplace_back(
+            AUDIO_ENCODER_AMR_NB, 12200 /* bitrate */,
+            8000 /* sampleRate */, 1 /* channels */);
     return profile;
 }
 
@@ -759,36 +844,34 @@
 /*static*/ MediaProfiles::CamcorderProfile*
 MediaProfiles::createDefaultCamcorderQcifProfile(camcorder_quality quality)
 {
-    MediaProfiles::VideoCodec *videoCodec =
-        new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 192000, 176, 144, 20);
-
-    MediaProfiles::AudioCodec *audioCodec =
-        new MediaProfiles::AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
-
-    MediaProfiles::CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
+    CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
     profile->mCameraId = 0;
     profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
     profile->mQuality = quality;
     profile->mDuration = 30;
-    profile->mVideoCodec = videoCodec;
-    profile->mAudioCodec = audioCodec;
+    profile->mVideoCodecs.emplace_back(
+            VIDEO_ENCODER_H263, 192000 /* bitrate */,
+            176 /* width */, 144 /* height */, 20 /* frameRate */);
+    profile->mAudioCodecs.emplace_back(
+            AUDIO_ENCODER_AMR_NB, 12200 /* bitrate */,
+            8000 /* sampleRate */, 1 /* channels */);
     return profile;
 }
 
 /*static*/ MediaProfiles::CamcorderProfile*
 MediaProfiles::createDefaultCamcorderCifProfile(camcorder_quality quality)
 {
-    MediaProfiles::VideoCodec *videoCodec =
-        new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 360000, 352, 288, 20);
-
-    AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
     CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
     profile->mCameraId = 0;
     profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
     profile->mQuality = quality;
     profile->mDuration = 60;
-    profile->mVideoCodec = videoCodec;
-    profile->mAudioCodec = audioCodec;
+    profile->mVideoCodecs.emplace_back(
+            VIDEO_ENCODER_H263, 360000 /* bitrate */,
+            352 /* width */, 288 /* height */, 20 /* frameRate */);
+    profile->mAudioCodecs.emplace_back(
+            AUDIO_ENCODER_AMR_NB, 12200 /* bitrate */,
+            8000 /* sampleRate */, 1 /* channels */);
     return profile;
 }
 
@@ -1072,6 +1155,36 @@
     return index;
 }
 
+const MediaProfiles::CamcorderProfile *MediaProfiles::getCamcorderProfile(
+            int cameraId, camcorder_quality quality) const {
+    int index = getCamcorderProfileIndex(cameraId, quality);
+    if (index == -1) {
+        ALOGE("The given camcorder profile camera %d quality %d is not found",
+            cameraId, quality);
+        return nullptr;
+    }
+
+    return mCamcorderProfiles[index];
+}
+
+std::vector<const MediaProfiles::AudioCodec *>
+MediaProfiles::CamcorderProfile::getAudioCodecs() const {
+    std::vector<const MediaProfiles::AudioCodec *> res;
+    for (const MediaProfiles::AudioCodec &ac : mAudioCodecs) {
+        res.push_back(&ac);
+    }
+    return res;
+}
+
+std::vector<const MediaProfiles::VideoCodec *>
+MediaProfiles::CamcorderProfile::getVideoCodecs() const {
+    std::vector<const MediaProfiles::VideoCodec *> res;
+    for (const MediaProfiles::VideoCodec &vc : mVideoCodecs) {
+        res.push_back(&vc);
+    }
+    return res;
+}
+
 int MediaProfiles::getCamcorderProfileParamByName(const char *name,
                                                   int cameraId,
                                                   camcorder_quality quality) const
@@ -1088,15 +1201,15 @@
 
     if (!strcmp("duration", name)) return mCamcorderProfiles[index]->mDuration;
     if (!strcmp("file.format", name)) return mCamcorderProfiles[index]->mFileFormat;
-    if (!strcmp("vid.codec", name)) return mCamcorderProfiles[index]->mVideoCodec->mCodec;
-    if (!strcmp("vid.width", name)) return mCamcorderProfiles[index]->mVideoCodec->mFrameWidth;
-    if (!strcmp("vid.height", name)) return mCamcorderProfiles[index]->mVideoCodec->mFrameHeight;
-    if (!strcmp("vid.bps", name)) return mCamcorderProfiles[index]->mVideoCodec->mBitRate;
-    if (!strcmp("vid.fps", name)) return mCamcorderProfiles[index]->mVideoCodec->mFrameRate;
-    if (!strcmp("aud.codec", name)) return mCamcorderProfiles[index]->mAudioCodec->mCodec;
-    if (!strcmp("aud.bps", name)) return mCamcorderProfiles[index]->mAudioCodec->mBitRate;
-    if (!strcmp("aud.ch", name)) return mCamcorderProfiles[index]->mAudioCodec->mChannels;
-    if (!strcmp("aud.hz", name)) return mCamcorderProfiles[index]->mAudioCodec->mSampleRate;
+    if (!strcmp("vid.codec", name)) return mCamcorderProfiles[index]->mVideoCodecs[0].mCodec;
+    if (!strcmp("vid.width", name)) return mCamcorderProfiles[index]->mVideoCodecs[0].mFrameWidth;
+    if (!strcmp("vid.height", name)) return mCamcorderProfiles[index]->mVideoCodecs[0].mFrameHeight;
+    if (!strcmp("vid.bps", name)) return mCamcorderProfiles[index]->mVideoCodecs[0].mBitRate;
+    if (!strcmp("vid.fps", name)) return mCamcorderProfiles[index]->mVideoCodecs[0].mFrameRate;
+    if (!strcmp("aud.codec", name)) return mCamcorderProfiles[index]->mAudioCodecs[0].mCodec;
+    if (!strcmp("aud.bps", name)) return mCamcorderProfiles[index]->mAudioCodecs[0].mBitRate;
+    if (!strcmp("aud.ch", name)) return mCamcorderProfiles[index]->mAudioCodecs[0].mChannels;
+    if (!strcmp("aud.hz", name)) return mCamcorderProfiles[index]->mAudioCodecs[0].mSampleRate;
 
     ALOGE("The given camcorder profile param id %d name %s is not found", cameraId, name);
     return -1;
diff --git a/media/libmedia/MediaResource.cpp b/media/libmedia/MediaResource.cpp
index 0936a99..ec52a49 100644
--- a/media/libmedia/MediaResource.cpp
+++ b/media/libmedia/MediaResource.cpp
@@ -35,7 +35,7 @@
     this->value = value;
 }
 
-MediaResource::MediaResource(Type type, const std::vector<int8_t> &id, int64_t value) {
+MediaResource::MediaResource(Type type, const std::vector<uint8_t> &id, int64_t value) {
     this->type = type;
     this->subType = SubType::kUnspecifiedSubType;
     this->id = id;
@@ -43,11 +43,11 @@
 }
 
 //static
-MediaResource MediaResource::CodecResource(bool secure, bool video) {
+MediaResource MediaResource::CodecResource(bool secure, bool video, int64_t instanceCount) {
     return MediaResource(
             secure ? Type::kSecureCodec : Type::kNonSecureCodec,
             video ? SubType::kVideoCodec : SubType::kAudioCodec,
-            1);
+            instanceCount);
 }
 
 //static
@@ -66,11 +66,11 @@
 }
 
 //static
-MediaResource MediaResource::DrmSessionResource(const std::vector<int8_t> &id, int64_t value) {
+MediaResource MediaResource::DrmSessionResource(const std::vector<uint8_t> &id, int64_t value) {
     return MediaResource(Type::kDrmSession, id, value);
 }
 
-static String8 bytesToHexString(const std::vector<int8_t> &bytes) {
+static String8 bytesToHexString(const std::vector<uint8_t> &bytes) {
     String8 str;
     for (auto &b : bytes) {
         str.appendFormat("%02x", b);
diff --git a/media/libmedia/MidiIoWrapper.cpp b/media/libmedia/MidiIoWrapper.cpp
index 12a87d1..f682b6e 100644
--- a/media/libmedia/MidiIoWrapper.cpp
+++ b/media/libmedia/MidiIoWrapper.cpp
@@ -18,8 +18,8 @@
 #define LOG_TAG "MidiIoWrapper"
 #include <utils/Log.h>
 
-#include <sys/stat.h>
 #include <fcntl.h>
+#include <sys/stat.h>
 #include <unistd.h>
 #include <algorithm>
 
diff --git a/media/libmedia/NdkMediaFormatPriv.cpp b/media/libmedia/NdkMediaFormatPriv.cpp
index 3a9fb8b..7983184 100644
--- a/media/libmedia/NdkMediaFormatPriv.cpp
+++ b/media/libmedia/NdkMediaFormatPriv.cpp
@@ -24,8 +24,6 @@
 #include <media/NdkMediaFormatPriv.h>
 #include <media/stagefright/foundation/AMessage.h>
 
-#include <jni.h>
-
 using namespace android;
 
 namespace android {
diff --git a/media/libmedia/TEST_MAPPING b/media/libmedia/TEST_MAPPING
new file mode 100644
index 0000000..65390ed
--- /dev/null
+++ b/media/libmedia/TEST_MAPPING
@@ -0,0 +1,6 @@
+// test_mapping for frameworks/av/media/libmedia
+{
+  "presubmit": [
+    { "name": "CodecListTest" }
+  ]
+}
diff --git a/media/libmedia/aidl/android/media/IResourceManagerService.aidl b/media/libmedia/aidl/android/media/IResourceManagerService.aidl
deleted file mode 100644
index 1b2d522..0000000
--- a/media/libmedia/aidl/android/media/IResourceManagerService.aidl
+++ /dev/null
@@ -1,105 +0,0 @@
-/**
- * Copyright (c) 2019, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.IResourceManagerClient;
-import android.media.MediaResourceParcel;
-import android.media.MediaResourcePolicyParcel;
-
-/**
- * ResourceManagerService interface that keeps track of media resource
- * owned by clients, and reclaims resources based on configured policies
- * when necessary.
- *
- * {@hide}
- */
-interface IResourceManagerService {
-    const @utf8InCpp String kPolicySupportsMultipleSecureCodecs
-            = "supports-multiple-secure-codecs";
-    const @utf8InCpp String kPolicySupportsSecureWithNonSecureCodec
-            = "supports-secure-with-non-secure-codec";
-
-    /**
-     * Configure the ResourceManagerService to adopted particular policies when
-     * managing the resources.
-     *
-     * @param policies an array of policies to be adopted.
-     */
-    void config(in MediaResourcePolicyParcel[] policies);
-
-    /**
-     * Add a client to a process with a list of resources.
-     *
-     * @param pid pid of the client.
-     * @param uid uid of the client.
-     * @param clientId an identifier that uniquely identifies the client within the pid.
-     * @param client interface for the ResourceManagerService to call the client.
-     * @param resources an array of resources to be added.
-     */
-    void addResource(
-            int pid,
-            int uid,
-            long clientId,
-            IResourceManagerClient client,
-            in MediaResourceParcel[] resources);
-
-    /**
-     * Remove the listed resources from a client.
-     *
-     * @param pid pid from which the list of resources will be removed.
-     * @param clientId clientId within the pid from which the list of resources will be removed.
-     * @param resources an array of resources to be removed from the client.
-     */
-    void removeResource(int pid, long clientId, in MediaResourceParcel[] resources);
-
-    /**
-     * Remove all resources from a client.
-     *
-     * @param pid pid from which the client's resources will be removed.
-     * @param clientId clientId within the pid that will be removed.
-     */
-    void removeClient(int pid, long clientId);
-
-    /**
-     * Tries to reclaim resource from processes with lower priority than the
-     * calling process according to the requested resources.
-     *
-     * @param callingPid pid of the calling process.
-     * @param resources an array of resources to be reclaimed.
-     *
-     * @return true if the reclaim was successful and false otherwise.
-     */
-    boolean reclaimResource(int callingPid, in MediaResourceParcel[] resources);
-
-    /**
-     * Override the pid of original calling process with the pid of the process
-     * who actually use the requested resources.
-     *
-     * @param originalPid pid of the original calling process.
-     * @param newPid pid of the actual process who use the resources.
-     *        remove existing override on originalPid if newPid is -1.
-     */
-    void overridePid(int originalPid, int newPid);
-
-    /**
-     * Mark a client for pending removal
-     *
-     * @param pid pid from which the client's resources will be removed.
-     * @param clientId clientId within the pid that will be removed.
-     */
-    void markClientForPendingRemoval(int pid, long clientId);
-}
diff --git a/media/libmedia/include/android/IMediaExtractor.h b/media/libmedia/include/android/IMediaExtractor.h
index 3e035ad..bb1eb99 100644
--- a/media/libmedia/include/android/IMediaExtractor.h
+++ b/media/libmedia/include/android/IMediaExtractor.h
@@ -63,6 +63,17 @@
     virtual status_t setMediaCas(const HInterfaceToken &casToken) = 0;
 
     virtual String8 name() = 0;
+
+    enum class EntryPoint {
+        SDK = 1,
+        NDK_WITH_JVM = 2,
+        NDK_NO_JVM = 3,
+        OTHER = 4,
+    };
+
+    virtual status_t setEntryPoint(EntryPoint entryPoint) = 0;
+
+    virtual status_t setLogSessionId(const String8& logSessionId) = 0;
 };
 
 
diff --git a/media/libmedia/include/media/IMediaPlayer.h b/media/libmedia/include/media/IMediaPlayer.h
index a4c0ec6..28684d1 100644
--- a/media/libmedia/include/media/IMediaPlayer.h
+++ b/media/libmedia/include/media/IMediaPlayer.h
@@ -59,6 +59,7 @@
     virtual status_t        setDataSource(int fd, int64_t offset, int64_t length) = 0;
     virtual status_t        setDataSource(const sp<IStreamSource>& source) = 0;
     virtual status_t        setDataSource(const sp<IDataSource>& source) = 0;
+    virtual status_t        setDataSource(const String8& rtpParams) = 0;
     virtual status_t        setVideoSurfaceTexture(
                                     const sp<IGraphicBufferProducer>& bufferProducer) = 0;
     virtual status_t        getBufferingSettings(
@@ -136,6 +137,56 @@
     virtual status_t        setOutputDevice(audio_port_handle_t deviceId) = 0;
     virtual status_t        getRoutedDeviceId(audio_port_handle_t *deviceId) = 0;
     virtual status_t        enableAudioDeviceCallback(bool enabled) = 0;
+protected:
+
+    friend class IMediaPlayerTest;
+    enum {
+        DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,
+        SET_DATA_SOURCE_URL,
+        SET_DATA_SOURCE_FD,
+        SET_DATA_SOURCE_STREAM,
+        SET_DATA_SOURCE_CALLBACK,
+        SET_DATA_SOURCE_RTP,
+        SET_BUFFERING_SETTINGS,
+        GET_BUFFERING_SETTINGS,
+        PREPARE_ASYNC,
+        START,
+        STOP,
+        IS_PLAYING,
+        SET_PLAYBACK_SETTINGS,
+        GET_PLAYBACK_SETTINGS,
+        SET_SYNC_SETTINGS,
+        GET_SYNC_SETTINGS,
+        PAUSE,
+        SEEK_TO,
+        GET_CURRENT_POSITION,
+        GET_DURATION,
+        RESET,
+        NOTIFY_AT,
+        SET_AUDIO_STREAM_TYPE,
+        SET_LOOPING,
+        SET_VOLUME,
+        INVOKE,
+        SET_METADATA_FILTER,
+        GET_METADATA,
+        SET_AUX_EFFECT_SEND_LEVEL,
+        ATTACH_AUX_EFFECT,
+        SET_VIDEO_SURFACETEXTURE,
+        SET_PARAMETER,
+        GET_PARAMETER,
+        SET_RETRANSMIT_ENDPOINT,
+        GET_RETRANSMIT_ENDPOINT,
+        SET_NEXT_PLAYER,
+        APPLY_VOLUME_SHAPER,
+        GET_VOLUME_SHAPER_STATE,
+        // Modular DRM
+        PREPARE_DRM,
+        RELEASE_DRM,
+        // AudioRouting
+        SET_OUTPUT_DEVICE,
+        GET_ROUTED_DEVICE_ID,
+        ENABLE_AUDIO_DEVICE_CALLBACK,
+    };
 };
 
 // ----------------------------------------------------------------------------
diff --git a/media/libmedia/include/media/IMediaPlayerService.h b/media/libmedia/include/media/IMediaPlayerService.h
index f2e2060..6070673 100644
--- a/media/libmedia/include/media/IMediaPlayerService.h
+++ b/media/libmedia/include/media/IMediaPlayerService.h
@@ -27,6 +27,9 @@
 
 #include <media/IMediaPlayerClient.h>
 #include <media/IMediaMetadataRetriever.h>
+#include <android/content/AttributionSourceState.h>
+
+#include <string>
 
 namespace android {
 
@@ -44,10 +47,13 @@
 public:
     DECLARE_META_INTERFACE(MediaPlayerService);
 
-    virtual sp<IMediaRecorder> createMediaRecorder(const String16 &opPackageName) = 0;
+    virtual sp<IMediaRecorder> createMediaRecorder(
+        const android::content::AttributionSourceState &attributionSource) = 0;
     virtual sp<IMediaMetadataRetriever> createMetadataRetriever() = 0;
     virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client,
-            audio_session_t audioSessionId = AUDIO_SESSION_ALLOCATE) = 0;
+            audio_session_t audioSessionId = AUDIO_SESSION_ALLOCATE,
+            const android::content::AttributionSourceState &attributionSource =
+                android::content::AttributionSourceState()) = 0;
     virtual sp<IMediaCodecList> getCodecList() const = 0;
 
     // Connects to a remote display.
diff --git a/media/libmedia/include/media/IMediaRecorder.h b/media/libmedia/include/media/IMediaRecorder.h
index 651bd5e..6e69782 100644
--- a/media/libmedia/include/media/IMediaRecorder.h
+++ b/media/libmedia/include/media/IMediaRecorder.h
@@ -78,6 +78,7 @@
     virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction) = 0;
     virtual status_t setPreferredMicrophoneFieldDimension(float zoom) = 0;
     virtual status_t getPortId(audio_port_handle_t *portId) = 0;
+    virtual status_t getRtpDataUsage(uint64_t *bytes) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/media/libmedia/include/media/MediaProfiles.h b/media/libmedia/include/media/MediaProfiles.h
index 4cc5b95..4a898e2 100644
--- a/media/libmedia/include/media/MediaProfiles.h
+++ b/media/libmedia/include/media/MediaProfiles.h
@@ -21,6 +21,8 @@
 #include <utils/threads.h>
 #include <media/mediarecorder.h>
 
+#include <vector>
+
 namespace android {
 
 enum camcorder_quality {
@@ -37,8 +39,9 @@
     CAMCORDER_QUALITY_VGA = 9,
     CAMCORDER_QUALITY_4KDCI = 10,
     CAMCORDER_QUALITY_QHD = 11,
-    CAMCORDER_QUALITY_2k = 12,
-    CAMCORDER_QUALITY_LIST_END = 12,
+    CAMCORDER_QUALITY_2K = 12,
+    CAMCORDER_QUALITY_8KUHD = 13,
+    CAMCORDER_QUALITY_LIST_END = 13,
 
     CAMCORDER_QUALITY_TIME_LAPSE_LIST_START = 1000,
     CAMCORDER_QUALITY_TIME_LAPSE_LOW  = 1000,
@@ -53,8 +56,9 @@
     CAMCORDER_QUALITY_TIME_LAPSE_VGA = 1009,
     CAMCORDER_QUALITY_TIME_LAPSE_4KDCI = 1010,
     CAMCORDER_QUALITY_TIME_LAPSE_QHD = 1011,
-    CAMCORDER_QUALITY_TIME_LAPSE_2k = 1012,
-    CAMCORDER_QUALITY_TIME_LAPSE_LIST_END = 1012,
+    CAMCORDER_QUALITY_TIME_LAPSE_2K = 1012,
+    CAMCORDER_QUALITY_TIME_LAPSE_8KUHD = 1013,
+    CAMCORDER_QUALITY_TIME_LAPSE_LIST_END = 1013,
 
     CAMCORDER_QUALITY_HIGH_SPEED_LIST_START = 2000,
     CAMCORDER_QUALITY_HIGH_SPEED_LOW  = 2000,
@@ -98,6 +102,193 @@
     static MediaProfiles* getInstance();
 
     /**
+     * Configuration for a video encoder.
+     */
+    struct VideoCodec {
+    public:
+        /**
+         * Constructs a video encoder configuration.
+         *
+         * @param codec codec type
+         * @param bitrate bitrate in bps
+         * @param frameWidth frame width in pixels
+         * @param frameHeight frame height in pixels
+         * @param frameRate frame rate in fps
+         * @param profile codec profile (for MediaCodec) or -1 for none
+         */
+        VideoCodec(video_encoder codec, int bitrate, int frameWidth, int frameHeight, int frameRate,
+                   int profile = -1)
+            : mCodec(codec),
+              mBitRate(bitrate),
+              mFrameWidth(frameWidth),
+              mFrameHeight(frameHeight),
+              mFrameRate(frameRate),
+              mProfile(profile) {
+        }
+
+        VideoCodec(const VideoCodec&) = default;
+
+        ~VideoCodec() {}
+
+        /** Returns the codec type. */
+        video_encoder getCodec() const {
+            return mCodec;
+        }
+
+        /** Returns the bitrate in bps. */
+        int getBitrate() const {
+            return mBitRate;
+        }
+
+        /** Returns the frame width in pixels. */
+        int getFrameWidth() const {
+            return mFrameWidth;
+        }
+
+        /** Returns the frame height in pixels. */
+        int getFrameHeight() const {
+            return mFrameHeight;
+        }
+
+        /** Returns the frame rate in fps. */
+        int getFrameRate() const {
+            return mFrameRate;
+        }
+
+        /** Returns the codec profile (or -1 for no profile). */
+        int getProfile() const {
+            return mProfile;
+        }
+
+    private:
+        video_encoder mCodec;
+        int mBitRate;
+        int mFrameWidth;
+        int mFrameHeight;
+        int mFrameRate;
+        int mProfile;
+        friend class MediaProfiles;
+    };
+
+    /**
+     * Configuration for an audio encoder.
+     */
+    struct AudioCodec {
+    public:
+        /**
+         * Constructs an audio encoder configuration.
+         *
+         * @param codec codec type
+         * @param bitrate bitrate in bps
+         * @param sampleRate sample rate in Hz
+         * @param channels number of channels
+         * @param profile codec profile (for MediaCodec) or -1 for none
+         */
+        AudioCodec(audio_encoder codec, int bitrate, int sampleRate, int channels, int profile = -1)
+            : mCodec(codec),
+              mBitRate(bitrate),
+              mSampleRate(sampleRate),
+              mChannels(channels),
+              mProfile(profile) {
+        }
+
+        AudioCodec(const AudioCodec&) = default;
+
+        ~AudioCodec() {}
+
+        /** Returns the codec type. */
+        audio_encoder getCodec() const {
+            return mCodec;
+        }
+
+        /** Returns the bitrate in bps. */
+        int getBitrate() const {
+            return mBitRate;
+        }
+
+        /** Returns the sample rate in Hz. */
+        int getSampleRate() const {
+            return mSampleRate;
+        }
+
+        /** Returns the number of channels. */
+        int getChannels() const {
+            return mChannels;
+        }
+
+        /** Returns the codec profile (or -1 for no profile). */
+        int getProfile() const {
+            return mProfile;
+        }
+
+    private:
+        audio_encoder mCodec;
+        int mBitRate;
+        int mSampleRate;
+        int mChannels;
+        int mProfile;
+        friend class MediaProfiles;
+    };
+
+    /**
+     * Configuration for a camcorder profile/encoder profiles object.
+     */
+    struct CamcorderProfile {
+        /**
+         *  Returns on ordered list of the video codec configurations in
+         *  decreasing preference. The returned object is only valid
+         *  during the lifetime of this object.
+         */
+        std::vector<const VideoCodec *> getVideoCodecs() const;
+
+        /**
+         *  Returns on ordered list of the audio codec configurations in
+         *  decreasing preference. The returned object is only valid
+         *  during the lifetime of this object.
+         */
+        std::vector<const AudioCodec *> getAudioCodecs() const;
+
+        /** Returns the default duration in seconds. */
+        int getDuration() const {
+            return mDuration;
+        }
+
+        /** Returns the preferred file format. */
+        int getFileFormat() const {
+            return mFileFormat;
+        }
+
+        CamcorderProfile(const CamcorderProfile& copy) = default;
+
+        ~CamcorderProfile() = default;
+
+    private:
+        /**
+         * Constructs an empty object with no audio/video profiles.
+         */
+        CamcorderProfile()
+            : mCameraId(0),
+              mFileFormat(OUTPUT_FORMAT_THREE_GPP),
+              mQuality(CAMCORDER_QUALITY_HIGH),
+              mDuration(0) {}
+
+        int mCameraId;
+        output_format mFileFormat;
+        camcorder_quality mQuality;
+        int mDuration;
+        std::vector<VideoCodec> mVideoCodecs;
+        std::vector<AudioCodec> mAudioCodecs;
+        friend class MediaProfiles;
+    };
+
+    /**
+     * Returns the CamcorderProfile object for the given camera at
+     * the given quality level, or null if it does not exist.
+     */
+    const CamcorderProfile *getCamcorderProfile(
+            int cameraId, camcorder_quality quality) const;
+
+    /**
      * Returns the value for the given param name for the given camera at
      * the given quality level, or -1 if error.
      *
@@ -200,84 +391,6 @@
     MediaProfiles() {}                               // Dummy default constructor
     ~MediaProfiles();                                // Don't delete me
 
-    struct VideoCodec {
-        VideoCodec(video_encoder codec, int bitRate, int frameWidth, int frameHeight, int frameRate)
-            : mCodec(codec),
-              mBitRate(bitRate),
-              mFrameWidth(frameWidth),
-              mFrameHeight(frameHeight),
-              mFrameRate(frameRate) {}
-
-        VideoCodec(const VideoCodec& copy) {
-            mCodec = copy.mCodec;
-            mBitRate = copy.mBitRate;
-            mFrameWidth = copy.mFrameWidth;
-            mFrameHeight = copy.mFrameHeight;
-            mFrameRate = copy.mFrameRate;
-        }
-
-        ~VideoCodec() {}
-
-        video_encoder mCodec;
-        int mBitRate;
-        int mFrameWidth;
-        int mFrameHeight;
-        int mFrameRate;
-    };
-
-    struct AudioCodec {
-        AudioCodec(audio_encoder codec, int bitRate, int sampleRate, int channels)
-            : mCodec(codec),
-              mBitRate(bitRate),
-              mSampleRate(sampleRate),
-              mChannels(channels) {}
-
-        AudioCodec(const AudioCodec& copy) {
-            mCodec = copy.mCodec;
-            mBitRate = copy.mBitRate;
-            mSampleRate = copy.mSampleRate;
-            mChannels = copy.mChannels;
-        }
-
-        ~AudioCodec() {}
-
-        audio_encoder mCodec;
-        int mBitRate;
-        int mSampleRate;
-        int mChannels;
-    };
-
-    struct CamcorderProfile {
-        CamcorderProfile()
-            : mCameraId(0),
-              mFileFormat(OUTPUT_FORMAT_THREE_GPP),
-              mQuality(CAMCORDER_QUALITY_HIGH),
-              mDuration(0),
-              mVideoCodec(0),
-              mAudioCodec(0) {}
-
-        CamcorderProfile(const CamcorderProfile& copy) {
-            mCameraId = copy.mCameraId;
-            mFileFormat = copy.mFileFormat;
-            mQuality = copy.mQuality;
-            mDuration = copy.mDuration;
-            mVideoCodec = new VideoCodec(*copy.mVideoCodec);
-            mAudioCodec = new AudioCodec(*copy.mAudioCodec);
-        }
-
-        ~CamcorderProfile() {
-            delete mVideoCodec;
-            delete mAudioCodec;
-        }
-
-        int mCameraId;
-        output_format mFileFormat;
-        camcorder_quality mQuality;
-        int mDuration;
-        VideoCodec *mVideoCodec;
-        AudioCodec *mAudioCodec;
-    };
-
     struct VideoEncoderCap {
         // Ugly constructor
         VideoEncoderCap(video_encoder codec,
@@ -362,23 +475,23 @@
     // If the xml configuration file does exist, use the settings
     // from the xml
     static MediaProfiles* createInstanceFromXmlFile(const char *xml);
-    static output_format createEncoderOutputFileFormat(const char **atts);
-    static VideoCodec* createVideoCodec(const char **atts, MediaProfiles *profiles);
-    static AudioCodec* createAudioCodec(const char **atts, MediaProfiles *profiles);
-    static AudioDecoderCap* createAudioDecoderCap(const char **atts);
-    static VideoDecoderCap* createVideoDecoderCap(const char **atts);
-    static VideoEncoderCap* createVideoEncoderCap(const char **atts);
-    static AudioEncoderCap* createAudioEncoderCap(const char **atts);
+    static output_format createEncoderOutputFileFormat(const char **atts, size_t natts);
+    static void createVideoCodec(const char **atts, size_t natts, MediaProfiles *profiles);
+    static void createAudioCodec(const char **atts, size_t natts, MediaProfiles *profiles);
+    static AudioDecoderCap* createAudioDecoderCap(const char **atts, size_t natts);
+    static VideoDecoderCap* createVideoDecoderCap(const char **atts, size_t natts);
+    static VideoEncoderCap* createVideoEncoderCap(const char **atts, size_t natts);
+    static AudioEncoderCap* createAudioEncoderCap(const char **atts, size_t natts);
 
     static CamcorderProfile* createCamcorderProfile(
-                int cameraId, const char **atts, Vector<int>& cameraIds);
+                int cameraId, const char **atts, size_t natts, Vector<int>& cameraIds);
 
-    static int getCameraId(const char **atts);
+    static int getCameraId(const char **atts, size_t natts);
 
-    void addStartTimeOffset(int cameraId, const char **atts);
+    void addStartTimeOffset(int cameraId, const char **atts, size_t natts);
 
     ImageEncodingQualityLevels* findImageEncodingQualityLevels(int cameraId) const;
-    void addImageEncodingQualityLevel(int cameraId, const char** atts);
+    void addImageEncodingQualityLevel(int cameraId, const char** atts, size_t natts);
 
     // Customized element tag handler for parsing the xml configuration file.
     static void startElementHandler(void *userData, const char *name, const char **atts);
diff --git a/media/libmedia/include/media/MediaRecorderBase.h b/media/libmedia/include/media/MediaRecorderBase.h
index 8493f64..2b7818d 100644
--- a/media/libmedia/include/media/MediaRecorderBase.h
+++ b/media/libmedia/include/media/MediaRecorderBase.h
@@ -21,6 +21,7 @@
 #include <media/AudioSystem.h>
 #include <media/MicrophoneInfo.h>
 #include <media/mediarecorder.h>
+#include <android/content/AttributionSourceState.h>
 
 #include <system/audio.h>
 
@@ -33,8 +34,8 @@
 struct PersistentSurface;
 
 struct MediaRecorderBase {
-    MediaRecorderBase(const String16 &opPackageName)
-        : mOpPackageName(opPackageName) {}
+    explicit MediaRecorderBase(const android::content::AttributionSourceState &attributionSource)
+        : mAttributionSource(attributionSource) {}
     virtual ~MediaRecorderBase() {}
 
     virtual status_t init() = 0;
@@ -77,12 +78,13 @@
     virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction) = 0;
     virtual status_t setPreferredMicrophoneFieldDimension(float zoom) = 0;
     virtual status_t getPortId(audio_port_handle_t *portId) const = 0;
+    virtual status_t getRtpDataUsage(uint64_t *bytes) = 0;
 
 
 
 protected:
 
-    String16 mOpPackageName;
+    android::content::AttributionSourceState mAttributionSource;
 
 private:
     MediaRecorderBase(const MediaRecorderBase &);
diff --git a/media/libmedia/include/media/MediaResource.h b/media/libmedia/include/media/MediaResource.h
index e7362c1..4712528 100644
--- a/media/libmedia/include/media/MediaResource.h
+++ b/media/libmedia/include/media/MediaResource.h
@@ -35,13 +35,13 @@
     MediaResource() = delete;
     MediaResource(Type type, int64_t value);
     MediaResource(Type type, SubType subType, int64_t value);
-    MediaResource(Type type, const std::vector<int8_t> &id, int64_t value);
+    MediaResource(Type type, const std::vector<uint8_t> &id, int64_t value);
 
-    static MediaResource CodecResource(bool secure, bool video);
+    static MediaResource CodecResource(bool secure, bool video, int64_t instanceCount = 1);
     static MediaResource GraphicMemoryResource(int64_t value);
     static MediaResource CpuBoostResource();
     static MediaResource VideoBatteryResource();
-    static MediaResource DrmSessionResource(const std::vector<int8_t> &id, int64_t value);
+    static MediaResource DrmSessionResource(const std::vector<uint8_t> &id, int64_t value);
 };
 
 inline static const char *asString(MediaResource::Type i, const char *def = "??") {
diff --git a/media/libmedia/include/media/mediametadataretriever.h b/media/libmedia/include/media/mediametadataretriever.h
index 138a014..fba1a30 100644
--- a/media/libmedia/include/media/mediametadataretriever.h
+++ b/media/libmedia/include/media/mediametadataretriever.h
@@ -73,6 +73,9 @@
     METADATA_KEY_COLOR_RANGE     = 37,
     METADATA_KEY_SAMPLERATE      = 38,
     METADATA_KEY_BITS_PER_SAMPLE = 39,
+    METADATA_KEY_VIDEO_CODEC_MIME_TYPE = 40,
+    METADATA_KEY_XMP_OFFSET      = 41,
+    METADATA_KEY_XMP_LENGTH      = 42,
 
     // Add more here...
 };
diff --git a/media/libmedia/include/media/mediaplayer.h b/media/libmedia/include/media/mediaplayer.h
index 2335c5a..de4c7db 100644
--- a/media/libmedia/include/media/mediaplayer.h
+++ b/media/libmedia/include/media/mediaplayer.h
@@ -29,10 +29,13 @@
 #include <media/IMediaPlayer.h>
 #include <media/IMediaDeathNotifier.h>
 #include <media/IStreamSource.h>
+#include <android/content/AttributionSourceState.h>
 
 #include <utils/KeyedVector.h>
 #include <utils/String8.h>
 
+#include <string>
+
 struct ANativeWindow;
 
 namespace android {
@@ -60,6 +63,7 @@
     MEDIA_META_DATA         = 202,
     MEDIA_DRM_INFO          = 210,
     MEDIA_TIME_DISCONTINUITY = 211,
+    MEDIA_IMS_RX_NOTICE     = 300,
     MEDIA_AUDIO_ROUTING_CHANGED = 10000,
 };
 
@@ -177,7 +181,10 @@
     KEY_PARAMETER_PLAYBACK_RATE_PERMILLE = 1300,                // set only
 
     // Set a Parcel containing the value of a parcelled Java AudioAttribute instance
-    KEY_PARAMETER_AUDIO_ATTRIBUTES = 1400                       // set only
+    KEY_PARAMETER_AUDIO_ATTRIBUTES = 1400,                       // set only
+
+    // Set a Parcel containing the values of RTP attribute
+    KEY_PARAMETER_RTP_ATTRIBUTES = 2000                       // set only
 };
 
 // Keep INVOKE_ID_* in sync with MediaPlayer.java.
@@ -205,7 +212,8 @@
                     public virtual IMediaDeathNotifier
 {
 public:
-    MediaPlayer();
+    explicit MediaPlayer(const android::content::AttributionSourceState& mAttributionSource =
+        android::content::AttributionSourceState());
     ~MediaPlayer();
             void            died();
             void            disconnect();
@@ -217,6 +225,7 @@
 
             status_t        setDataSource(int fd, int64_t offset, int64_t length);
             status_t        setDataSource(const sp<IDataSource> &source);
+            status_t        setDataSource(const String8& rtpParams);
             status_t        setVideoSurfaceTexture(
                                     const sp<IGraphicBufferProducer>& bufferProducer);
             status_t        setListener(const sp<MediaPlayerListener>& listener);
@@ -308,6 +317,7 @@
     float                       mSendLevel;
     struct sockaddr_in          mRetransmitEndpoint;
     bool                        mRetransmitEndpointValid;
+    const android::content::AttributionSourceState mAttributionSource;
 };
 
 }; // namespace android
diff --git a/media/libmedia/include/media/mediarecorder.h b/media/libmedia/include/media/mediarecorder.h
index 6e2d94d..d54ff32 100644
--- a/media/libmedia/include/media/mediarecorder.h
+++ b/media/libmedia/include/media/mediarecorder.h
@@ -25,6 +25,7 @@
 #include <media/IMediaRecorderClient.h>
 #include <media/IMediaDeathNotifier.h>
 #include <media/MicrophoneInfo.h>
+#include <android/content/AttributionSourceState.h>
 
 namespace android {
 
@@ -226,7 +227,7 @@
                       public virtual IMediaDeathNotifier
 {
 public:
-    MediaRecorder(const String16& opPackageName);
+    explicit MediaRecorder(const android::content::AttributionSourceState& attributionSource);
     ~MediaRecorder();
 
     void        died();
@@ -270,6 +271,7 @@
     status_t    setPreferredMicrophoneFieldDimension(float zoom);
 
     status_t    getPortId(audio_port_handle_t *portId) const;
+    status_t    getRtpDataUsage(uint64_t *bytes);
 
 private:
     void                    doCleanUp();
@@ -291,6 +293,8 @@
     bool                        mIsOutputFileSet;
     Mutex                       mLock;
     Mutex                       mNotifyLock;
+
+    output_format               mOutputFormat;
 };
 
 };  // namespace android
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 1fadc94..1c9b9e4 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -40,8 +40,10 @@
 namespace android {
 
 using media::VolumeShaper;
+using content::AttributionSourceState;
 
-MediaPlayer::MediaPlayer()
+MediaPlayer::MediaPlayer(const AttributionSourceState& attributionSource)
+        : mAttributionSource(attributionSource)
 {
     ALOGV("constructor");
     mListener = NULL;
@@ -152,7 +154,7 @@
     if (url != NULL) {
         const sp<IMediaPlayerService> service(getMediaPlayerService());
         if (service != 0) {
-            sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
+            sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mAttributionSource));
             if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
                 (NO_ERROR != player->setDataSource(httpService, url, headers))) {
                 player.clear();
@@ -169,7 +171,7 @@
     status_t err = UNKNOWN_ERROR;
     const sp<IMediaPlayerService> service(getMediaPlayerService());
     if (service != 0) {
-        sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
+        sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mAttributionSource));
         if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
             (NO_ERROR != player->setDataSource(fd, offset, length))) {
             player.clear();
@@ -185,7 +187,7 @@
     status_t err = UNKNOWN_ERROR;
     const sp<IMediaPlayerService> service(getMediaPlayerService());
     if (service != 0) {
-        sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
+        sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mAttributionSource));
         if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
             (NO_ERROR != player->setDataSource(source))) {
             player.clear();
@@ -195,6 +197,22 @@
     return err;
 }
 
+status_t MediaPlayer::setDataSource(const String8& rtpParams)
+{
+    ALOGV("setDataSource(rtpParams)");
+    status_t err = UNKNOWN_ERROR;
+    const sp<IMediaPlayerService> service(getMediaPlayerService());
+    if (service != 0) {
+        sp<IMediaPlayer> player(service->create(this, mAudioSessionId, mAttributionSource));
+        if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
+            (NO_ERROR != player->setDataSource(rtpParams))) {
+            player.clear();
+        }
+        err = attachNewPlayer(player);
+    }
+    return err;
+}
+
 status_t MediaPlayer::invoke(const Parcel& request, Parcel *reply)
 {
     Mutex::Autolock _l(mLock);
@@ -943,6 +961,9 @@
     case MEDIA_META_DATA:
         ALOGV("Received timed metadata message");
         break;
+    case MEDIA_IMS_RX_NOTICE:
+        ALOGV("Received IMS Rx notice message");
+        break;
     default:
         ALOGV("unrecognized message: (%d, %d, %d)", msg, ext1, ext2);
         break;
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index 70655d5..cf12c36 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -33,6 +33,8 @@
 
 namespace android {
 
+using content::AttributionSourceState;
+
 status_t MediaRecorder::setCamera(const sp<hardware::ICamera>& camera,
         const sp<ICameraRecordingProxy>& proxy)
 {
@@ -244,6 +246,7 @@
         mCurrentState = MEDIA_RECORDER_ERROR;
         return ret;
     }
+    mOutputFormat = (output_format)of;
     mCurrentState = MEDIA_RECORDER_DATASOURCE_CONFIGURED;
     return ret;
 }
@@ -479,6 +482,13 @@
                            (MEDIA_RECORDER_PREPARED |
                             MEDIA_RECORDER_RECORDING |
                             MEDIA_RECORDER_ERROR));
+
+    // For RTP video, parameter should be set dynamically.
+    if (isInvalidState) {
+        if (mCurrentState == MEDIA_RECORDER_RECORDING &&
+            mOutputFormat == OUTPUT_FORMAT_RTP_AVP)
+            isInvalidState = false;
+    }
     if (isInvalidState) {
         ALOGE("setParameters is called in an invalid state: %d", mCurrentState);
         return INVALID_OPERATION;
@@ -737,6 +747,7 @@
     mIsAudioEncoderSet = false;
     mIsVideoEncoderSet = false;
     mIsOutputFileSet   = false;
+    mOutputFormat      = OUTPUT_FORMAT_DEFAULT;
 }
 
 // Release should be OK in any state
@@ -749,13 +760,14 @@
     return INVALID_OPERATION;
 }
 
-MediaRecorder::MediaRecorder(const String16& opPackageName) : mSurfaceMediaSource(NULL)
+MediaRecorder::MediaRecorder(const AttributionSourceState &attributionSource)
+        : mSurfaceMediaSource(NULL)
 {
     ALOGV("constructor");
 
     const sp<IMediaPlayerService> service(getMediaPlayerService());
     if (service != NULL) {
-        mMediaRecorder = service->createMediaRecorder(opPackageName);
+        mMediaRecorder = service->createMediaRecorder(attributionSource);
     }
     if (mMediaRecorder != NULL) {
         mCurrentState = MEDIA_RECORDER_IDLE;
@@ -904,4 +916,14 @@
     return mMediaRecorder->getPortId(portId);
 }
 
+status_t MediaRecorder::getRtpDataUsage(uint64_t *bytes)
+{
+    ALOGV("getRtpDataUsage");
+
+    if (mMediaRecorder == NULL) {
+        ALOGE("media recorder is not initialized yet");
+        return INVALID_OPERATION;
+    }
+    return mMediaRecorder->getRtpDataUsage(bytes);
+}
 } // namespace android
diff --git a/media/libmedia/tests/codeclist/Android.bp b/media/libmedia/tests/codeclist/Android.bp
new file mode 100644
index 0000000..7dd0caa
--- /dev/null
+++ b/media/libmedia/tests/codeclist/Android.bp
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libmedia_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libmedia_license"],
+}
+
+cc_test {
+    name: "CodecListTest",
+    test_suites: ["device-tests"],
+    gtest: true,
+
+    srcs: [
+        "CodecListTest.cpp",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "liblog",
+        "libmedia_codeclist",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libstagefright_xmlparser",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libmedia/tests/codeclist/CodecListTest.cpp b/media/libmedia/tests/codeclist/CodecListTest.cpp
new file mode 100644
index 0000000..bd2adf7
--- /dev/null
+++ b/media/libmedia/tests/codeclist/CodecListTest.cpp
@@ -0,0 +1,222 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecListTest"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include <binder/Parcel.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
+
+#define kSwCodecXmlPath "/apex/com.android.media.swcodec/etc/"
+
+using namespace android;
+
+struct CddReq {
+    CddReq(const char *type, bool encoder) {
+        mediaType = type;
+        isEncoder = encoder;
+    }
+
+    const char *mediaType;
+    bool isEncoder;
+};
+
+TEST(CodecListTest, CodecListSanityTest) {
+    sp<IMediaCodecList> list = MediaCodecList::getInstance();
+    ASSERT_NE(list, nullptr) << "Unable to get MediaCodecList instance.";
+    EXPECT_GT(list->countCodecs(), 0) << "No codecs in CodecList";
+    for (size_t i = 0; i < list->countCodecs(); ++i) {
+        sp<MediaCodecInfo> info = list->getCodecInfo(i);
+        ASSERT_NE(info, nullptr) << "CodecInfo is null";
+        ssize_t index = list->findCodecByName(info->getCodecName());
+        EXPECT_GE(index, 0) << "Wasn't able to find existing codec: " << info->getCodecName();
+    }
+}
+
+TEST(CodecListTest, CodecListByTypeTest) {
+    sp<IMediaCodecList> list = MediaCodecList::getInstance();
+    ASSERT_NE(list, nullptr) << "Unable to get MediaCodecList instance.";
+
+    std::vector<CddReq> cddReq{
+            // media type, isEncoder
+            CddReq(MIMETYPE_AUDIO_AAC, false),
+            CddReq(MIMETYPE_AUDIO_AAC, true),
+
+            CddReq(MIMETYPE_VIDEO_AVC, false),
+            CddReq(MIMETYPE_VIDEO_HEVC, false),
+            CddReq(MIMETYPE_VIDEO_MPEG4, false),
+            CddReq(MIMETYPE_VIDEO_VP8, false),
+            CddReq(MIMETYPE_VIDEO_VP9, false),
+
+            CddReq(MIMETYPE_VIDEO_AVC, true),
+            CddReq(MIMETYPE_VIDEO_VP8, true),
+    };
+
+    for (CddReq codecReq : cddReq) {
+        ssize_t index = list->findCodecByType(codecReq.mediaType, codecReq.isEncoder);
+        EXPECT_GE(index, 0) << "Wasn't able to find codec for media type: " << codecReq.mediaType
+                            << (codecReq.isEncoder ? " encoder" : " decoder");
+    }
+}
+
+TEST(CodecInfoTest, ListInfoTest) {
+    ALOGV("Compare CodecInfo with info in XML");
+    MediaCodecsXmlParser parser;
+    status_t status = parser.parseXmlFilesInSearchDirs();
+    ASSERT_EQ(status, OK) << "XML Parsing failed for default paths";
+
+    const std::vector<std::string> &xmlFiles = MediaCodecsXmlParser::getDefaultXmlNames();
+    const std::vector<std::string> &searchDirsApex{std::string(kSwCodecXmlPath)};
+    status = parser.parseXmlFilesInSearchDirs(xmlFiles, searchDirsApex);
+    ASSERT_EQ(status, OK) << "XML Parsing of " << kSwCodecXmlPath << " failed";
+
+    MediaCodecsXmlParser::CodecMap codecMap = parser.getCodecMap();
+
+    sp<IMediaCodecList> list = MediaCodecList::getInstance();
+    ASSERT_NE(list, nullptr) << "Unable to get MediaCodecList instance";
+
+    // Compare CodecMap from XML to CodecList
+    for (auto mapIter : codecMap) {
+        ssize_t index = list->findCodecByName(mapIter.first.c_str());
+        if (index < 0) {
+            std::cout << "[   WARN   ] " << mapIter.first << " not found in CodecList \n";
+            continue;
+        }
+
+        sp<MediaCodecInfo> info = list->getCodecInfo(index);
+        ASSERT_NE(info, nullptr) << "CodecInfo is null";
+
+        MediaCodecsXmlParser::CodecProperties codecProperties = mapIter.second;
+        ASSERT_EQ(codecProperties.isEncoder, info->isEncoder()) << "Encoder property mismatch";
+
+        ALOGV("codec name: %s", info->getCodecName());
+        ALOGV("codec rank: %d", info->getRank());
+        ALOGV("codec ownername: %s", info->getOwnerName());
+        ALOGV("codec isEncoder: %d", info->isEncoder());
+
+        ALOGV("attributeFlags: kFlagIsHardwareAccelerated, kFlagIsSoftwareOnly, kFlagIsVendor, "
+              "kFlagIsEncoder");
+        std::bitset<4> attr(info->getAttributes());
+        ALOGV("codec attributes: %s", attr.to_string().c_str());
+
+        Vector<AString> mediaTypes;
+        info->getSupportedMediaTypes(&mediaTypes);
+        ALOGV("supported media types count: %zu", mediaTypes.size());
+        ASSERT_FALSE(mediaTypes.isEmpty())
+                << "no media type supported by codec: " << info->getCodecName();
+
+        MediaCodecsXmlParser::TypeMap typeMap = codecProperties.typeMap;
+        for (auto mediaType : mediaTypes) {
+            ALOGV("codec mediaTypes: %s", mediaType.c_str());
+            auto searchTypeMap = typeMap.find(mediaType.c_str());
+            ASSERT_NE(searchTypeMap, typeMap.end())
+                    << "CodecList doesn't contain codec media type: " << mediaType.c_str();
+            MediaCodecsXmlParser::AttributeMap attributeMap = searchTypeMap->second;
+
+            const sp<MediaCodecInfo::Capabilities> &capabilities =
+                    info->getCapabilitiesFor(mediaType.c_str());
+
+            Vector<uint32_t> colorFormats;
+            capabilities->getSupportedColorFormats(&colorFormats);
+            for (auto colorFormat : colorFormats) {
+                ALOGV("supported color formats: %d", colorFormat);
+            }
+
+            Vector<MediaCodecInfo::ProfileLevel> profileLevels;
+            capabilities->getSupportedProfileLevels(&profileLevels);
+            if (!profileLevels.empty()) {
+                ALOGV("supported profilelevel for media type: %s", mediaType.c_str());
+            }
+            for (auto profileLevel : profileLevels) {
+                ALOGV("profile: %d, level: %d", profileLevel.mProfile, profileLevel.mLevel);
+            }
+
+            sp<AMessage> details = capabilities->getDetails();
+            ASSERT_NE(details, nullptr) << "Details in codec capabilities is null";
+            ALOGV("no. of entries in details: %zu", details->countEntries());
+
+            for (size_t idxDetail = 0; idxDetail < details->countEntries(); idxDetail++) {
+                AMessage::Type type;
+                const char *name = details->getEntryNameAt(idxDetail, &type);
+                ALOGV("details entry name: %s", name);
+                AMessage::ItemData itemData = details->getEntryAt(idxDetail);
+                switch (type) {
+                    case AMessage::kTypeInt32:
+                        int32_t val32;
+                        if (itemData.find(&val32)) {
+                            ALOGV("entry int val: %d", val32);
+                            auto searchAttr = attributeMap.find(name);
+                            if (searchAttr == attributeMap.end()) {
+                                ALOGW("Parser doesn't have key: %s", name);
+                            } else if (stoi(searchAttr->second) != val32) {
+                                ALOGW("Values didn't match for key: %s", name);
+                                ALOGV("Values act/exp: %d / %d", val32, stoi(searchAttr->second));
+                            }
+                        }
+                        break;
+                    case AMessage::kTypeString:
+                        if (AString valStr; itemData.find(&valStr)) {
+                            ALOGV("entry str val: %s", valStr.c_str());
+                            auto searchAttr = attributeMap.find(name);
+                            if (searchAttr == attributeMap.end()) {
+                                ALOGW("Parser doesn't have key: %s", name);
+                            } else if (searchAttr->second != valStr.c_str()) {
+                                ALOGW("Values didn't match for key: %s", name);
+                                ALOGV("Values act/exp: %s / %s", valStr.c_str(),
+                                      searchAttr->second.c_str());
+                            }
+                        }
+                        break;
+                    default:
+                        ALOGV("data type: %d shouldn't be present in details", type);
+                        break;
+                }
+            }
+        }
+
+        Parcel *codecInfoParcel = new Parcel();
+        ASSERT_NE(codecInfoParcel, nullptr) << "Unable to create parcel";
+
+        status_t status = info->writeToParcel(codecInfoParcel);
+        ASSERT_EQ(status, OK) << "Writing to parcel failed";
+
+        codecInfoParcel->setDataPosition(0);
+        sp<MediaCodecInfo> parcelCodecInfo = info->FromParcel(*codecInfoParcel);
+        ASSERT_NE(parcelCodecInfo, nullptr) << "CodecInfo from parcel is null";
+        delete codecInfoParcel;
+
+        EXPECT_STREQ(info->getCodecName(), parcelCodecInfo->getCodecName())
+                << "Returned codec name in info doesn't match";
+        EXPECT_EQ(info->getRank(), parcelCodecInfo->getRank())
+                << "Returned component rank in info doesn't match";
+    }
+}
+
+TEST(CodecListTest, CodecListGlobalSettingsTest) {
+    sp<IMediaCodecList> list = MediaCodecList::getInstance();
+    ASSERT_NE(list, nullptr) << "Unable to get MediaCodecList instance";
+
+    sp<AMessage> globalSettings = list->getGlobalSettings();
+    ASSERT_NE(globalSettings, nullptr) << "GlobalSettings AMessage is null";
+    ALOGV("global settings: %s", globalSettings->debugString(0).c_str());
+}
diff --git a/media/libmedia/tests/fuzzer/Android.bp b/media/libmedia/tests/fuzzer/Android.bp
new file mode 100644
index 0000000..c03b5b1
--- /dev/null
+++ b/media/libmedia/tests/fuzzer/Android.bp
@@ -0,0 +1,19 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libmedia_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libmedia_license"],
+}
+
+cc_fuzz {
+  name: "libmedia_metadata_fuzzer",
+  srcs: [
+    "libmedia_metadata_fuzzer.cpp",
+  ],
+  shared_libs: [
+    "libmedia",
+    "libbinder",
+  ],
+}
diff --git a/media/libmedia/tests/fuzzer/libmedia_metadata_fuzzer.cpp b/media/libmedia/tests/fuzzer/libmedia_metadata_fuzzer.cpp
new file mode 100644
index 0000000..058e4e5
--- /dev/null
+++ b/media/libmedia/tests/fuzzer/libmedia_metadata_fuzzer.cpp
@@ -0,0 +1,52 @@
+//This program fuzzes Metadata.cpp
+
+#include <stddef.h>
+#include <stdint.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/Metadata.h>
+#include <binder/Parcel.h>
+
+using namespace android;
+using namespace media;
+
+static const float want_prob = 0.5;
+
+bool bytesRemain(FuzzedDataProvider *fdp);
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
+    Parcel p;
+    Metadata md = Metadata(&p);
+
+    md.appendHeader();
+    while (bytesRemain(&fdp)) {
+
+        float got_prob = fdp.ConsumeProbability<float>();
+        if (!bytesRemain(&fdp)) {
+            break;
+        }
+
+        if (got_prob < want_prob) {
+            int32_t key_bool = fdp.ConsumeIntegral<int32_t>();
+            if (!bytesRemain(&fdp)) {
+                break;
+            }
+            bool val_bool = fdp.ConsumeBool();
+            md.appendBool(key_bool, val_bool);
+        } else {
+            int32_t key_int32 = fdp.ConsumeIntegral<int32_t>();
+            if (!bytesRemain(&fdp)) {
+                break;
+            }
+            bool val_int32 = fdp.ConsumeIntegral<int32_t>();
+            md.appendInt32(key_int32, val_int32);
+        }
+        md.updateLength();
+    }
+    md.resetParcel();
+    return 0;
+}
+
+bool bytesRemain(FuzzedDataProvider *fdp){
+    return fdp -> remaining_bytes() > 0;
+}
\ No newline at end of file
diff --git a/media/libmedia/tests/mediaplayer/Android.bp b/media/libmedia/tests/mediaplayer/Android.bp
new file mode 100644
index 0000000..50f35ea
--- /dev/null
+++ b/media/libmedia/tests/mediaplayer/Android.bp
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libmedia_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libmedia_license"],
+}
+
+cc_test {
+    name: "IMediaPlayerTest",
+    test_suites: ["device-tests", "mts"],
+    gtest: true,
+
+    srcs: [
+        "IMediaPlayerTest.cpp",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "liblog",
+        "libmedia",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libutils",
+        "media_permission-aidl-cpp",
+    ],
+    compile_multilib: "first",
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+}
diff --git a/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp b/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp
new file mode 100644
index 0000000..cc60933
--- /dev/null
+++ b/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <binder/IServiceManager.h>
+#include <binder/Parcel.h>
+#include <gtest/gtest.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/IMediaPlayer.h>
+#include <media/IMediaPlayerService.h>
+#include <media/mediaplayer.h>
+
+namespace android {
+
+constexpr uint8_t kMockByteArray[16] = {};
+
+ class IMediaPlayerTest : public testing::Test {
+  protected:
+   static constexpr uint32_t PREPARE_DRM = IMediaPlayer::PREPARE_DRM;
+
+   void SetUp() override {
+    mediaPlayer_ = sp<MediaPlayer>::make();
+    sp<IServiceManager> serviceManager = defaultServiceManager();
+    sp<IBinder> mediaPlayerService = serviceManager->getService(String16("media.player"));
+    sp<IMediaPlayerService> iMediaPlayerService =
+            IMediaPlayerService::asInterface(mediaPlayerService);
+    iMediaPlayer_ = iMediaPlayerService->create(mediaPlayer_);
+   }
+
+   sp<MediaPlayer> mediaPlayer_;
+   sp<IMediaPlayer> iMediaPlayer_;
+ };
+
+TEST_F(IMediaPlayerTest, PrepareDrmInvalidTransaction) {
+    Parcel data, reply;
+    data.writeInterfaceToken(iMediaPlayer_->getInterfaceDescriptor());
+    data.write(kMockByteArray, 16);
+
+    // We write a length greater than the following session id array. Should be discarded.
+    data.writeUint32(2);
+    data.writeUnpadded(kMockByteArray, 1);
+
+    status_t result = IMediaPlayer::asBinder(iMediaPlayer_)
+            ->transact(PREPARE_DRM, data, &reply);
+    ASSERT_EQ(result, BAD_VALUE);
+}
+
+TEST_F(IMediaPlayerTest, PrepareDrmValidTransaction) {
+    Parcel data, reply;
+    data.writeInterfaceToken(iMediaPlayer_->getInterfaceDescriptor());
+    data.write(kMockByteArray, 16);
+
+    // We write a length equal to the length of the following data. The transaction should be valid.
+    data.writeUint32(1);
+    data.write(kMockByteArray, 1);
+
+    status_t result = IMediaPlayer::asBinder(iMediaPlayer_)
+            ->transact(PREPARE_DRM, data, &reply);
+    ASSERT_EQ(result, OK);
+}
+}  // namespace android
diff --git a/media/libmedia/xsd/Android.bp b/media/libmedia/xsd/Android.bp
index 1635f70..36e2808 100644
--- a/media/libmedia/xsd/Android.bp
+++ b/media/libmedia/xsd/Android.bp
@@ -14,6 +14,15 @@
 // limitations under the License.
 //
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libmedia_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libmedia_license"],
+}
+
 xsd_config {
     name: "media_profiles",
     srcs: ["media_profiles.xsd"],
diff --git a/media/libmedia/xsd/vts/Android.bp b/media/libmedia/xsd/vts/Android.bp
index 598e41b..83ab977 100644
--- a/media/libmedia/xsd/vts/Android.bp
+++ b/media/libmedia/xsd/vts/Android.bp
@@ -14,6 +14,15 @@
 // limitations under the License.
 //
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libmedia_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libmedia_license"],
+}
+
 cc_test {
     name: "vts_mediaProfiles_validate_test",
     srcs: [
diff --git a/media/libmedia/xsd/vts/Android.mk b/media/libmedia/xsd/vts/Android.mk
deleted file mode 100644
index 52c3779..0000000
--- a/media/libmedia/xsd/vts/Android.mk
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-# Copyright (C) 2019 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := VtsValidateMediaProfiles
-include test/vts/tools/build/Android.host_config.mk
diff --git a/media/libmedia/xsd/vts/AndroidTest.xml b/media/libmedia/xsd/vts/AndroidTest.xml
deleted file mode 100644
index e68721b..0000000
--- a/media/libmedia/xsd/vts/AndroidTest.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2019 The Android Open Source Project
-
-     Licensed under the Apache License, Version 2.0 (the "License");
-     you may not use this file except in compliance with the License.
-     You may obtain a copy of the License at
-
-          http://www.apache.org/licenses/LICENSE-2.0
-
-     Unless required by applicable law or agreed to in writing, software
-     distributed under the License is distributed on an "AS IS" BASIS,
-     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     See the License for the specific language governing permissions and
-     limitations under the License.
--->
-<configuration description="Config for VTS VtsValidateMediaProfiles.">
-    <option name="config-descriptor:metadata" key="plan" value="vts-treble" />
-    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.VtsFilePusher">
-        <option name="abort-on-push-failure" value="false"/>
-        <option name="push-group" value="HostDrivenTest.push"/>
-        <option name="push" value="DATA/etc/media_profiles.xsd->/data/local/tmp/media_profiles.xsd"/>
-    </target_preparer>
-    <test class="com.android.tradefed.testtype.VtsMultiDeviceTest">
-        <option name="test-module-name" value="VtsValidateMediaProfiles"/>
-        <option name="binary-test-source" value="_32bit::DATA/nativetest/vts_mediaProfiles_validate_test/vts_mediaProfiles_validate_test" />
-        <option name="binary-test-source" value="_64bit::DATA/nativetest64/vts_mediaProfiles_validate_test/vts_mediaProfiles_validate_test" />
-        <option name="binary-test-type" value="gtest"/>
-        <option name="test-timeout" value="30s"/>
-    </test>
-</configuration>
diff --git a/media/libmediaformatshaper/Android.bp b/media/libmediaformatshaper/Android.bp
new file mode 100644
index 0000000..bdd1465
--- /dev/null
+++ b/media/libmediaformatshaper/Android.bp
@@ -0,0 +1,105 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+// these headers include the structure of needed function pointers
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library_headers {
+    name: "libmediaformatshaper_headers",
+    export_include_dirs: ["include"],
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+    ],
+    min_sdk_version: "29",
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+cc_defaults {
+    name: "libmediaformatshaper_defaults",
+    srcs: [
+        "CodecProperties.cpp",
+        "CodecSeeding.cpp",
+        "FormatShaper.cpp",
+        "ManageShapingCodecs.cpp",
+        "VideoShaper.cpp",
+        "VQApply.cpp",
+    ],
+
+    local_include_dirs: [
+        "include",
+    ],
+
+    header_libs: [
+        "libstagefright_headers",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+        "-fvisibility=hidden",
+        "-Wthread-safety",                      // enables GUARDED_BY()
+    ],
+
+    target: {
+        android: {
+            shared_libs: [
+                "libmediandk#29",
+            ],
+        },
+    },
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
+
+cc_library {
+    name: "libmediaformatshaper",
+    defaults: ["libmediaformatshaper_defaults"],
+
+    min_sdk_version: "29",
+
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+    ],
+
+    version_script: "exports.lds",
+
+}
diff --git a/media/libmediaformatshaper/CodecProperties.cpp b/media/libmediaformatshaper/CodecProperties.cpp
new file mode 100644
index 0000000..c57b693
--- /dev/null
+++ b/media/libmediaformatshaper/CodecProperties.cpp
@@ -0,0 +1,500 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecProperties"
+#include <utils/Log.h>
+
+#include <string>
+#include <stdlib.h>
+
+#include "CodecProperties.h"
+
+#include <media/stagefright/MediaCodecConstants.h>
+
+
+// we aren't going to mess with shaping points dimensions beyond this
+static const int32_t DIMENSION_LIMIT = 16384;
+
+namespace android {
+namespace mediaformatshaper {
+
+CodecProperties::CodecProperties(std::string name, std::string mediaType) {
+    ALOGV("CodecProperties(%s, %s)", name.c_str(), mediaType.c_str());
+    mName = name;
+    mMediaType = mediaType;
+}
+
+std::string CodecProperties::getName(){
+    return mName;
+}
+
+std::string CodecProperties::getMediaType(){
+    return mMediaType;
+}
+
+int CodecProperties::supportedMinimumQuality() {
+    return mMinimumQuality;
+}
+void CodecProperties::setSupportedMinimumQuality(int vmaf) {
+    mMinimumQuality = vmaf;
+}
+
+void CodecProperties::setMissingQpBoost(double boost) {
+    mMissingQpBoost = boost;
+}
+void CodecProperties::setPhaseOut(double phaseout) {
+    mPhaseOut = phaseout;
+}
+
+// what API is this codec set up for (e.g. API of the associated partition)
+// vendor-side (OEM) codecs may be older, due to 'vendor freeze' and treble
+int CodecProperties::supportedApi() {
+    return mApi;
+}
+
+void CodecProperties::setFeatureValue(std::string key, int32_t value) {
+    ALOGD("setFeatureValue(%s,%d)", key.c_str(), value);
+    mFeatures.insert({key, value});
+
+    if (!strcmp(key.c_str(), FEATURE_QpBounds)) {
+        setSupportsQp(1);
+    } else if (!strcmp(key.c_str(), "video-minimum-quality")) {
+        setSupportedMinimumQuality(1);
+    } else if (!strcmp(key.c_str(), "vq-minimum-quality")) { // from prototyping
+        setSupportedMinimumQuality(1);
+    }
+}
+
+bool CodecProperties::getFeatureValue(std::string key, int32_t *valuep) {
+    ALOGV("getFeatureValue(%s)", key.c_str());
+    if (valuep == nullptr) {
+        return false;
+    }
+    auto mapped = mFeatures.find(key);
+    if (mapped != mFeatures.end()) {
+        *valuep = mapped->second;
+        return true;
+    }
+    return false;
+}
+
+// Tuning values (which differ from Features)
+// this is where we set up things like target bitrates and QP ranges
+// NB the tuning values arrive as a string, allowing us to convert it into an appropriate
+// format (int, float, ranges, other combinations)
+//
+void CodecProperties::setTuningValue(std::string key, std::string value) {
+    ALOGD("setTuningValue(%s,%s)", key.c_str(), value.c_str());
+    mTunings.insert({key, value});
+
+    bool legal = false;
+    // NB: old school strtol() because std::stoi() throws exceptions
+    if (!strcmp(key.c_str(), "vq-target-qpmax")) {
+        const char *p = value.c_str();
+        char *q;
+        int32_t iValue =  strtol(p, &q, 0);
+        if (q != p) {
+            setTargetQpMax(iValue);
+            legal = true;
+        }
+    } else if (!strncmp(key.c_str(), "vq-target-qpmax-", strlen("vq-target-qpmax-"))) {
+            std::string resolution = key.substr(strlen("vq-target-qpmax-"));
+            if (qpMaxPoint(resolution, value)) {
+                legal = true;
+            }
+    } else if (!strcmp(key.c_str(), "vq-target-bpp")) {
+        const char *p = value.c_str();
+        char *q;
+        double bpp = strtod(p, &q);
+        if (q != p) {
+            setBpp(bpp);
+            legal = true;
+        }
+    } else if (!strncmp(key.c_str(), "vq-target-bpp-", strlen("vq-target-bpp-"))) {
+            std::string resolution = key.substr(strlen("vq-target-bpp-"));
+            if (bppPoint(resolution, value)) {
+                legal = true;
+            }
+    } else if (!strcmp(key.c_str(), "vq-target-bppx100")) {
+        // legacy, prototyping
+        const char *p = value.c_str();
+        char *q;
+        int32_t iValue =  strtol(p, &q, 0);
+        if (q != p) {
+            double bpp = iValue / 100.0;
+            setBpp(bpp);
+            legal = true;
+        }
+    } else if (!strcmp(key.c_str(), "vq-bitrate-phaseout")) {
+        const char *p = value.c_str();
+        char *q;
+        double phaseout = strtod(p, &q);
+        if (q != p) {
+            setPhaseOut(phaseout);
+            legal = true;
+        }
+    } else if (!strcmp(key.c_str(), "vq-boost-missing-qp")) {
+        const char *p = value.c_str();
+        char *q;
+        double boost = strtod(p, &q);
+        if (q != p) {
+            setMissingQpBoost(boost);
+            legal = true;
+        }
+    } else {
+        legal = true;
+    }
+
+    if (!legal) {
+        ALOGW("setTuningValue() unable to apply tuning '%s' with value '%s'",
+              key.c_str(), value.c_str());
+    }
+    return;
+}
+
+bool CodecProperties::getTuningValue(std::string key, std::string &value) {
+    ALOGV("getTuningValue(%s)", key.c_str());
+    auto mapped = mFeatures.find(key);
+    if (mapped != mFeatures.end()) {
+        value = mapped->second;
+        return true;
+    }
+    return false;
+}
+
+bool CodecProperties::bppPoint(std::string resolution, std::string value) {
+
+    int32_t width = 0;
+    int32_t height = 0;
+    double bpp = -1;
+
+    // resolution is "WxH", "W*H" or a standard name like "720p"
+    if (resolution == "1080p") {
+        width = 1080; height = 1920;
+    } else if (resolution == "720p") {
+        width = 720; height = 1280;
+    } else if (resolution == "540p") {
+        width = 540; height = 960;
+    } else if (resolution == "480p") {
+        width = 480; height = 854;
+    } else {
+        size_t sep = resolution.find('x');
+        if (sep == std::string::npos) {
+            sep = resolution.find('*');
+        }
+        if (sep == std::string::npos) {
+            ALOGW("unable to parse resolution: '%s'", resolution.c_str());
+            return false;
+        }
+        std::string w = resolution.substr(0, sep);
+        std::string h = resolution.substr(sep+1);
+
+        char *q;
+        const char *p = w.c_str();
+        width = strtol(p, &q, 0);
+        if (q == p) {
+                width = -1;
+        }
+        p = h.c_str();
+        height = strtol(p, &q, 0);
+        if (q == p) {
+                height = -1;
+        }
+        if (width <= 0 || height <= 0 || width > DIMENSION_LIMIT || height > DIMENSION_LIMIT) {
+            ALOGW("unparseable: width, height '%s'", resolution.c_str());
+            return false;
+        }
+    }
+
+    const char *p = value.c_str();
+    char *q;
+    bpp = strtod(p, &q);
+    if (q == p) {
+        ALOGW("unparseable bpp '%s'", value.c_str());
+        return false;
+    }
+
+    struct bpp_point *point = (struct bpp_point*) malloc(sizeof(*point));
+    if (point == nullptr) {
+        ALOGW("unable to allocate memory for bpp point");
+        return false;
+    }
+
+    point->pixels = width * height;
+    point->width = width;
+    point->height = height;
+    point->bpp = bpp;
+
+    if (mBppPoints == nullptr) {
+        point->next = nullptr;
+        mBppPoints = point;
+    } else if (point->pixels < mBppPoints->pixels) {
+        // at the front
+        point->next = mBppPoints;
+        mBppPoints = point;
+    } else {
+        struct bpp_point *after = mBppPoints;
+        while (after->next) {
+            if (point->pixels > after->next->pixels) {
+                after = after->next;
+                continue;
+            }
+
+            // insert before after->next
+            point->next = after->next;
+            after->next = point;
+            break;
+        }
+        if (after->next == nullptr) {
+            // hasn't gone in yet
+            point->next = nullptr;
+            after->next = point;
+        }
+    }
+
+    return true;
+}
+
+double CodecProperties::getBpp(int32_t width, int32_t height) {
+    // look in the per-resolution list
+
+    int32_t pixels = width * height;
+
+    if (mBppPoints) {
+        struct bpp_point *point = mBppPoints;
+        while (point && point->pixels < pixels) {
+            point = point->next;
+        }
+        if (point) {
+            ALOGV("getBpp(w=%d,h=%d) returns %f from bpppoint w=%d h=%d",
+                width, height, point->bpp, point->width, point->height);
+            return point->bpp;
+        }
+    }
+
+    ALOGV("defaulting to %f bpp", mBpp);
+    return mBpp;
+}
+
+bool CodecProperties::qpMaxPoint(std::string resolution, std::string value) {
+
+    int32_t width = 0;
+    int32_t height = 0;
+    int qpMax = INT32_MAX;
+
+    // resolution is "WxH", "W*H" or a standard name like "720p"
+    if (resolution == "1080p") {
+        width = 1080; height = 1920;
+    } else if (resolution == "720p") {
+        width = 720; height = 1280;
+    } else if (resolution == "540p") {
+        width = 540; height = 960;
+    } else if (resolution == "480p") {
+        width = 480; height = 854;
+    } else {
+        size_t sep = resolution.find('x');
+        if (sep == std::string::npos) {
+            sep = resolution.find('*');
+        }
+        if (sep == std::string::npos) {
+            ALOGW("unable to parse resolution: '%s'", resolution.c_str());
+            return false;
+        }
+        std::string w = resolution.substr(0, sep);
+        std::string h = resolution.substr(sep+1);
+
+        char *q;
+        const char *p = w.c_str();
+        width = strtol(p, &q, 0);
+        if (q == p) {
+                width = -1;
+        }
+        p = h.c_str();
+        height = strtol(p, &q, 0);
+        if (q == p) {
+                height = -1;
+        }
+        if (width <= 0 || height <= 0 || width > DIMENSION_LIMIT || height > DIMENSION_LIMIT) {
+            ALOGW("unparseable: width, height '%s'", resolution.c_str());
+            return false;
+        }
+    }
+
+    const char *p = value.c_str();
+    char *q;
+    qpMax = strtol(p, &q, 0);
+    if (q == p) {
+        ALOGW("unparseable qpmax '%s'", value.c_str());
+        return false;
+    }
+
+    // convert to our internal 'unspecified' notation
+    if (qpMax == -1)
+        qpMax = INT32_MAX;
+
+    struct qpmax_point *point = (struct qpmax_point*) malloc(sizeof(*point));
+    if (point == nullptr) {
+        ALOGW("unable to allocate memory for qpmax point");
+        return false;
+    }
+
+    point->pixels = width * height;
+    point->width = width;
+    point->height = height;
+    point->qpMax = qpMax;
+
+    if (mQpMaxPoints == nullptr) {
+        point->next = nullptr;
+        mQpMaxPoints = point;
+    } else if (point->pixels < mQpMaxPoints->pixels) {
+        // at the front
+        point->next = mQpMaxPoints;
+        mQpMaxPoints = point;
+    } else {
+        struct qpmax_point *after = mQpMaxPoints;
+        while (after->next) {
+            if (point->pixels > after->next->pixels) {
+                after = after->next;
+                continue;
+            }
+
+            // insert before after->next
+            point->next = after->next;
+            after->next = point;
+            break;
+        }
+        if (after->next == nullptr) {
+            // hasn't gone in yet
+            point->next = nullptr;
+            after->next = point;
+        }
+    }
+
+    return true;
+}
+
+int CodecProperties::targetQpMax(int32_t width, int32_t height) {
+    // look in the per-resolution list
+
+    int32_t pixels = width * height;
+
+    if (mQpMaxPoints) {
+        struct qpmax_point *point = mQpMaxPoints;
+        while (point && point->pixels < pixels) {
+            point = point->next;
+        }
+        if (point) {
+            ALOGV("targetQpMax(w=%d,h=%d) returns %d from qpmax_point w=%d h=%d",
+                width, height, point->qpMax, point->width, point->height);
+            return point->qpMax;
+        }
+    }
+
+    ALOGV("defaulting to %d qpmax", mTargetQpMax);
+    return mTargetQpMax;
+}
+
+void CodecProperties::setTargetQpMax(int qpMax) {
+    // convert to our internal 'unspecified' notation
+    if (qpMax == -1)
+        qpMax = INT32_MAX;
+    mTargetQpMax = qpMax;
+}
+
+std::string CodecProperties::getMapping(std::string key, std::string kind) {
+    ALOGV("getMapping(key %s, kind %s )", key.c_str(), kind.c_str());
+    //play with mMappings
+    auto mapped = mMappings.find(kind + "-" + key);
+    if (mapped != mMappings.end()) {
+        std::string result = mapped->second;
+        ALOGV("getMapping(%s, %s) -> %s", key.c_str(), kind.c_str(), result.c_str());
+        return result;
+    }
+    ALOGV("nope, return unchanged key");
+    return key;
+}
+
+
+// really a bit of debugging code here.
+void CodecProperties::showMappings() {
+    ALOGD("Mappings:");
+    int count = 0;
+    for (const auto& [key, value] : mMappings) {
+         count++;
+         ALOGD("'%s' -> '%s'", key.c_str(), value.c_str());
+    }
+    ALOGD("total %d mappings", count);
+}
+
+void CodecProperties::setMapping(std::string kind, std::string key, std::string value) {
+    ALOGV("setMapping(%s,%s,%s)", kind.c_str(), key.c_str(), value.c_str());
+    std::string metaKey = kind + "-" + key;
+    mMappings.insert({metaKey, value});
+}
+
+const char **CodecProperties::getMappings(std::string kind, bool reverse) {
+    ALOGV("getMappings(kind %s, reverse %d", kind.c_str(), reverse);
+    // how many do we need?
+    int count = mMappings.size();
+    if (count == 0) {
+        ALOGV("empty mappings");
+        return nullptr;
+    }
+    size_t size = sizeof(char *) * (2 * count + 2);
+    const char **result = (const char **)malloc(size);
+    if (result == nullptr) {
+        ALOGW("no memory to return mappings");
+        return nullptr;
+    }
+    memset(result, '\0', size);
+
+    const char **pp = result;
+    for (const auto& [key, value] : mMappings) {
+        // split out the kind/key
+        size_t pos = key.find('-');
+        if (pos == std::string::npos) {
+            ALOGD("ignoring malformed key: %s", key.c_str());
+            continue;
+        }
+        std::string actualKind = key.substr(0,pos);
+        if (kind.length() != 0 && kind != actualKind) {
+            ALOGD("kinds don't match: want '%s' got '%s'", kind.c_str(), actualKind.c_str());
+            continue;
+        }
+        if (reverse) {
+            // codec specific -> std aka 'unmapping'
+            pp[0] = strdup( value.c_str());
+            pp[1] = strdup( key.substr(pos+1).c_str());
+        } else {
+            // std -> codec specific
+            pp[0] = strdup( key.substr(pos+1).c_str());
+            pp[1] = strdup( value.c_str());
+        }
+        ALOGV(" %s -> %s", pp[0], pp[1]);
+        pp += 2;
+    }
+
+    pp[0] = nullptr;
+    pp[1] = nullptr;
+
+    return result;
+}
+
+
+} // namespace mediaformatshaper
+} // namespace android
+
diff --git a/media/libmediaformatshaper/CodecProperties.h b/media/libmediaformatshaper/CodecProperties.h
new file mode 100644
index 0000000..196a40f
--- /dev/null
+++ b/media/libmediaformatshaper/CodecProperties.h
@@ -0,0 +1,165 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _LIBMEDIAFORMATSHAPER_CODECPROPERTIES_H_
+#define _LIBMEDIAFORMATSHAPER_CODECPROPERTIES_H_
+
+#include <map>
+#include <mutex>
+#include <string>
+
+#include <inttypes.h>
+
+#include <utils/RefBase.h>
+
+namespace android {
+namespace mediaformatshaper {
+
+class CodecProperties {
+
+  public:
+    CodecProperties(std::string name, std::string mediaType);
+
+    // seed the codec with some preconfigured values
+    // (e.g. mediaType-granularity defaults)
+    // runs from the constructor
+    void Seed();
+    void Finish();
+
+    std::string getName();
+    std::string getMediaType();
+
+    // establish a mapping from standard 'key' to non-standard 'value' in the namespace 'kind'
+    void setMapping(std::string kind, std::string key, std::string value);
+
+    // translate from from standard key to non-standard key
+    // return original standard key if there is no mapping
+    std::string getMapping(std::string key, std::string kind);
+
+    // returns an array of char *, which are paired "from" and "to" values
+    // for mapping (or unmapping). it's always expressed as from->to
+    // and 'reverse' describes which strings are to be on which side.
+    const char **getMappings(std::string kind, bool reverse);
+
+    // keep a map of all features and their parameters
+    void setFeatureValue(std::string key, int32_t value);
+    bool getFeatureValue(std::string key, int32_t *valuep);
+
+    // keep a map of all tunings and their parameters
+    void setTuningValue(std::string key, std::string value);
+    bool getTuningValue(std::string key, std::string &value);
+
+    // does the codec support the Android S minimum quality rules
+    void setSupportedMinimumQuality(int vmaf);
+    int supportedMinimumQuality();
+
+    // qp max bound used to compensate when SupportedMinimumQuality == 0
+    // 0 == let a system default handle it
+    void setTargetQpMax(int qpmax);
+    int targetQpMax(int32_t width, int32_t height);
+
+    // target bits-per-pixel (per second) for encoding operations.
+    // This is used to calculate a minimum bitrate for any particular resolution.
+    // A 1080p (1920*1080 = 2073600 pixels) to be encoded at 5Mbps has a bpp == 2.41
+    void setBpp(double bpp) { mBpp = bpp;}
+    double getBpp(int32_t width, int32_t height);
+
+    // Does this codec support QP bounding
+    // The getMapping() methods provide any needed mapping to non-standard keys.
+    void setSupportsQp(bool supported) { mSupportsQp = supported;}
+    bool supportsQp() { return mSupportsQp;}
+
+    // defines our range of operation -- multiplier on the floor bitrate
+    double getPhaseOut() { return mPhaseOut; }
+    void setPhaseOut(double overageMultiplier);
+
+    // how much (0.20 = +20%) do we add if Qp is requested but unsupported
+    double getMissingQpBoost() {return mMissingQpBoost; }
+    void setMissingQpBoost(double boost);
+
+    int  supportedApi();
+
+    // a codec is not usable until it has been registered with its
+    // name/mediaType.
+    bool isRegistered() { return mIsRegistered;}
+    void setRegistered(bool registered) { mIsRegistered = registered;}
+
+  private:
+    std::string mName;
+    std::string mMediaType;
+    int mApi = 0;
+    int mMinimumQuality = 0;
+    int mTargetQpMax = INT32_MAX;
+    bool mSupportsQp = false;
+    double mBpp = 0.0;
+
+    // target bitrates above floor * mPhaseOut are left untouched
+    double mPhaseOut = 1.75;
+    // 20% bump if QP is configured but it is unavailable
+    double mMissingQpBoost = 0.20;
+
+    // allow different target bits-per-pixel based on resolution
+    // similar to codec 'performance points'
+    // uses 'next largest' (by pixel count) point as minimum bpp
+    struct bpp_point {
+        struct bpp_point *next;
+        int32_t pixels;
+        int32_t width, height;
+        double bpp;
+    };
+    struct bpp_point *mBppPoints = nullptr;
+    bool bppPoint(std::string resolution, std::string value);
+
+    // same thing for qpmax -- allow different ones based on resolution
+    // allow different target bits-per-pixel based on resolution
+    // similar to codec 'performance points'
+    // uses 'next largest' (by pixel count) point as minimum bpp
+    struct qpmax_point {
+        struct qpmax_point *next;
+        int32_t pixels;
+        int32_t width, height;
+        int qpMax;
+    };
+    struct qpmax_point *mQpMaxPoints = nullptr;
+    bool qpMaxPoint(std::string resolution, std::string value);
+
+    std::mutex mMappingLock;
+    // XXX figure out why I'm having problems getting compiler to like GUARDED_BY
+    std::map<std::string, std::string> mMappings /*GUARDED_BY(mMappingLock)*/ ;
+
+    std::map<std::string, int32_t> mFeatures /*GUARDED_BY(mMappingLock)*/ ;
+    std::map<std::string, std::string> mTunings /*GUARDED_BY(mMappingLock)*/ ;
+
+    // Seed() and Finish() use this as the underlying implementation
+    void addMediaDefaults(bool overrideable);
+
+    bool mIsRegistered = false;
+
+    // debugging of what's in the mapping dictionary
+    void showMappings();
+
+    // DISALLOW_EVIL_CONSTRUCTORS(CodecProperties);
+};
+
+extern CodecProperties *findCodec(const char *codecName, const char *mediaType);
+extern CodecProperties *registerCodec(CodecProperties *codec, const char *codecName,
+                               const char *mediaType);
+
+
+} // namespace mediaformatshaper
+} // namespace android
+
+#endif  //  _LIBMEDIAFORMATSHAPER_CODECPROPERTIES_H_
diff --git a/media/libmediaformatshaper/CodecSeeding.cpp b/media/libmediaformatshaper/CodecSeeding.cpp
new file mode 100644
index 0000000..d2ad7ac
--- /dev/null
+++ b/media/libmediaformatshaper/CodecSeeding.cpp
@@ -0,0 +1,164 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecSeeding"
+#include <utils/Log.h>
+
+#include <string>
+
+#include "CodecProperties.h"
+
+namespace android {
+namespace mediaformatshaper {
+
+/*
+ * a block of pre-loaded tunings for codecs.
+ *
+ * things the library seeds into the codecproperties based
+ * on the mediaType.
+ * XXX: parsing from a file is likely better than embedding in code.
+ */
+typedef struct {
+    bool overrideable;
+    const char *key;
+    const char *value;
+} preloadTuning_t;
+
+typedef struct {
+    const char *mediaType;
+    preloadTuning_t *features;
+} preloadTunings_t;
+
+/*
+ * bpp == bits per pixel per second, for 30fps.
+ */
+
+static preloadTuning_t featuresAvc[] = {
+      {true, "vq-target-bpp", "0"},
+      {true, "vq-target-bpp-1080p", "1.90"},
+      {true, "vq-target-bpp-720p", "2.25"},
+      {true, "vq-target-bpp-540p", "2.65"},
+      {true, "vq-target-bpp-480p", "3.00"},
+      {true, "vq-target-bpp-320x240", "0"},
+      {true, "vq-target-qpmax", "-1"},
+      {true, "vq-target-qpmax-1080p", "45"},
+      {true, "vq-target-qpmax-720p", "43"},
+      {true, "vq-target-qpmax-540p", "42"},
+      {true, "vq-target-qpmax-480p", "38"},
+      {true, "vq-bitrate-phaseout", "1.75"},
+      {true, "vq-boost-missing-qp", "0.20"},
+      {true, nullptr, 0}
+};
+
+static preloadTuning_t featuresHevc[] = {
+      {true, "vq-target-bpp", "0"},
+      {true, "vq-target-bpp-1080p", "1.50"},
+      {true, "vq-target-bpp-720p", "1.80"},
+      {true, "vq-target-bpp-540p", "2.10"},
+      {true, "vq-target-bpp-480p", "2.30"},
+      {true, "vq-target-bpp-320x240", "0"},
+      {true, "vq-target-qpmax", "-1"},
+      {true, "vq-target-qpmax-1080p", "45"},
+      {true, "vq-target-qpmax-720p", "44"},
+      {true, "vq-target-qpmax-540p", "43"},
+      {true, "vq-target-qpmax-480p", "42"},
+      {true, "vq-bitrate-phaseout", "1.75"},
+      {true, "vq-boost-missing-qp", "0.20"},
+      {true, nullptr, 0}
+};
+
+static preloadTuning_t featuresGenericVideo[] = {
+        // 0 == off
+      {true, "vq-target-bpp", "0"},
+      {true, nullptr, 0}
+};
+
+static preloadTunings_t preloadTunings[] = {
+    { "video/avc", featuresAvc},
+    { "video/hevc", &featuresHevc[0]},
+
+    // wildcard for any video format not already captured
+    { "video/*", &featuresGenericVideo[0]},
+
+    { nullptr, nullptr}
+};
+
+void CodecProperties::addMediaDefaults(bool overrideable) {
+    ALOGD("Seed: codec %s, mediatype %s, overrideable %d",
+          mName.c_str(), mMediaType.c_str(), overrideable);
+
+    // load me up with initial configuration data
+    int count = 0;
+    for (int i = 0; ; i++) {
+        preloadTunings_t *p = &preloadTunings[i];
+        if (p->mediaType == nullptr) {
+            break;
+        }
+        bool found = false;
+        if (strcmp(p->mediaType, mMediaType.c_str()) == 0) {
+            found = true;
+        }
+        const char *r;
+        if (!found && (r = strchr(p->mediaType, '*')) != NULL) {
+            // wildcard; check the prefix
+            size_t len = r - p->mediaType;
+            if (strncmp(p->mediaType, mMediaType.c_str(), len) == 0) {
+                found = true;
+            }
+        }
+
+        if (!found) {
+            continue;
+        }
+        ALOGV("seeding from mediaType '%s'", p->mediaType);
+
+        // walk through, filling things
+        if (p->features != nullptr) {
+            for (int j=0;; j++) {
+                preloadTuning_t *q = &p->features[j];
+                if (q->key == nullptr) {
+                    break;
+                }
+                if (q->overrideable != overrideable) {
+                    continue;
+                }
+                setTuningValue(q->key, q->value);
+                count++;
+            }
+            break;
+        }
+    }
+    ALOGV("loaded %d preset values", count);
+}
+
+// a chance, as we create the codec to inject any default behaviors we want.
+// XXX: consider whether we need pre/post or just post. it affects what can be
+// overridden by way of the codec XML
+//
+void CodecProperties::Seed() {
+    ALOGV("Seed: for codec %s, mediatype %s", mName.c_str(), mMediaType.c_str());
+    addMediaDefaults(true);
+}
+
+void CodecProperties::Finish() {
+    ALOGV("Finish: for codec %s, mediatype %s", mName.c_str(), mMediaType.c_str());
+    addMediaDefaults(false);
+}
+
+} // namespace mediaformatshaper
+} // namespace android
+
diff --git a/media/libmediaformatshaper/FormatShaper.cpp b/media/libmediaformatshaper/FormatShaper.cpp
new file mode 100644
index 0000000..451f772
--- /dev/null
+++ b/media/libmediaformatshaper/FormatShaper.cpp
@@ -0,0 +1,203 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "FormatShaper"
+#include <utils/Log.h>
+
+#include <string>
+#include <inttypes.h>
+
+#include <media/NdkMediaFormat.h>
+
+#include "CodecProperties.h"
+#include "VideoShaper.h"
+#include "VQops.h"
+
+#include <media/formatshaper/FormatShaper.h>
+
+namespace android {
+namespace mediaformatshaper {
+
+//
+// Caller retains ownership of and responsibility for inFormat
+//
+
+//
+// the interface to the outside
+//
+
+int shapeFormat(shaperHandle_t shaper, AMediaFormat* inFormat, int flags) {
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr) {
+        return -1;
+    }
+    if (!codec->isRegistered()) {
+        return -1;
+    }
+
+    // run through the list of possible transformations
+    //
+
+    std::string mediaType = codec->getMediaType();
+    if (strncmp(mediaType.c_str(), "video/", 6) == 0) {
+        // video specific shaping
+        (void) videoShaper(codec, inFormat, flags);
+
+    } else if (strncmp(mediaType.c_str(), "audio/", 6) == 0) {
+        // audio specific shaping
+
+    } else {
+        ALOGV("unknown mediatype '%s', left untouched", mediaType.c_str());
+
+    }
+
+    return 0;
+}
+
+int setMap(shaperHandle_t shaper,  const char *kind, const char *key, const char *value) {
+    ALOGV("setMap: kind %s key %s -> value %s", kind, key, value);
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr) {
+        return -1;
+    }
+    // must not yet be registered
+    if (codec->isRegistered()) {
+        return -1;
+    }
+
+    codec->setMapping(kind, key, value);
+    return 0;
+}
+
+int setFeature(shaperHandle_t shaper, const char *feature, int value) {
+    ALOGV("set_feature: feature %s value %d", feature, value);
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr) {
+        return -1;
+    }
+    // must not yet be registered
+    if (codec->isRegistered()) {
+        return -1;
+    }
+
+    // save a map of all features
+    codec->setFeatureValue(feature, value);
+
+    return 0;
+}
+
+int setTuning(shaperHandle_t shaper, const char *tuning, const char *value) {
+    ALOGV("setTuning: tuning %s value %s", tuning, value);
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr) {
+        return -1;
+    }
+    // must not yet be registered
+    if (codec->isRegistered()) {
+        return -1;
+    }
+
+    // save a map of all features
+    codec->setTuningValue(tuning, value);
+
+    return 0;
+}
+
+/*
+ * The routines that manage finding, creating, and registering the shapers.
+ */
+
+shaperHandle_t findShaper(const char *codecName, const char *mediaType) {
+    CodecProperties *codec = findCodec(codecName, mediaType);
+    return (shaperHandle_t) codec;
+}
+
+shaperHandle_t createShaper(const char *codecName, const char *mediaType) {
+    CodecProperties *codec = new CodecProperties(codecName, mediaType);
+    if (codec != nullptr) {
+        codec->Seed();
+    }
+    return (shaperHandle_t) codec;
+}
+
+shaperHandle_t registerShaper(shaperHandle_t shaper, const char *codecName, const char *mediaType) {
+    ALOGV("registerShaper(handle, codecName %s, mediaType %s", codecName, mediaType);
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr) {
+        return nullptr;
+    }
+    // must not yet be registered
+    if (codec->isRegistered()) {
+        return nullptr;
+    }
+
+    // any final cleanup for the parameters. This allows us to override
+    // bad parameters from a devices XML file.
+    codec->Finish();
+
+    // may return a different codec, if we lost a race.
+    // if so, registerCodec() reclaims the one we tried to register for us.
+    codec = registerCodec(codec, codecName, mediaType);
+    return (shaperHandle_t) codec;
+}
+
+// mapping & unmapping
+// give me the mappings for 'kind'.
+// kind==null (or empty string), means *all* mappings
+
+const char **getMappings(shaperHandle_t shaper, const char *kind) {
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr)
+        return nullptr;
+    if (kind == nullptr)
+        kind = "";
+
+    return codec->getMappings(kind, /* reverse */ false);
+}
+
+const char **getReverseMappings(shaperHandle_t shaper, const char *kind) {
+    CodecProperties *codec = (CodecProperties*) shaper;
+    if (codec == nullptr)
+        return nullptr;
+    if (kind == nullptr)
+        kind = "";
+
+    return codec->getMappings(kind, /* reverse */ true);
+}
+
+
+// the system grabs this structure
+__attribute__ ((visibility ("default")))
+extern "C" FormatShaperOps_t shaper_ops = {
+    .version = SHAPER_VERSION_V1,
+
+    .findShaper = findShaper,
+    .createShaper = createShaper,
+    .setMap = setMap,
+    .setFeature = setFeature,
+    .registerShaper = registerShaper,
+
+    .shapeFormat = shapeFormat,
+    .getMappings = getMappings,
+    .getReverseMappings = getReverseMappings,
+
+    .setTuning = setTuning,
+};
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
diff --git a/media/libmediaformatshaper/ManageShapingCodecs.cpp b/media/libmediaformatshaper/ManageShapingCodecs.cpp
new file mode 100644
index 0000000..3061d0b
--- /dev/null
+++ b/media/libmediaformatshaper/ManageShapingCodecs.cpp
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ManageShapingCodecs"
+#include <utils/Log.h>
+
+#include <mutex>
+#include <string>
+#include <inttypes.h>
+
+#include <media/NdkMediaFormat.h>
+
+#include "CodecProperties.h"
+
+namespace android {
+namespace mediaformatshaper {
+
+// manage the list of codec information.
+//
+// XXX: the mutex here is too heavy; rework that.
+//
+
+static std::mutex sCodecMutex;
+static std::map<std::string, CodecProperties*> sCodecTraits;
+
+CodecProperties *findCodec(const char *codecName, const char *mediaType) {
+    CodecProperties *codec = nullptr;
+
+    // synthesize a name from both codecName + mediaType
+    // some codecs support multiple media types and may have different capabilities
+    // for each media type
+    //
+    std::string codecKey = codecName;
+    codecKey += "-";
+    codecKey += mediaType;
+
+    std::lock_guard  _l(sCodecMutex);
+
+    auto it = sCodecTraits.find(codecKey);
+    if (it != sCodecTraits.end()) {
+        codec = it->second;
+    }
+
+    return codec;
+}
+
+CodecProperties *registerCodec(CodecProperties *codec, const char *codecName,
+                               const char *mediaType) {
+
+    CodecProperties *registeredCodec = nullptr;
+
+    if (codec->isRegistered()) {
+        return nullptr;
+    }
+
+    // synthesize a name from both codecName + mediaType
+    // some codecs support multiple media types and may have different capabilities
+    // for each media type
+    //
+    std::string codecKey = codecName;
+    codecKey += "-";
+    codecKey += mediaType;
+
+    std::lock_guard  _l(sCodecMutex);
+
+    auto it = sCodecTraits.find(codecKey);
+    if (it != sCodecTraits.end()) {
+        registeredCodec = it->second;
+    }
+
+    if (registeredCodec == nullptr) {
+        // register the one that was passed to us
+        ALOGV("Creating entry for codec %s, mediaType %s, key %s", codecName, mediaType,
+              codecKey.c_str());
+        sCodecTraits.insert({codecKey, codec});
+        registeredCodec = codec;
+        codec->setRegistered(true);
+    } else {
+        // one has already been registered, use that
+        // and discard the candidate
+        delete codec;
+        codec = nullptr;
+    }
+
+    return registeredCodec;
+}
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
diff --git a/media/libmediaformatshaper/VQApply.cpp b/media/libmediaformatshaper/VQApply.cpp
new file mode 100644
index 0000000..26ff446
--- /dev/null
+++ b/media/libmediaformatshaper/VQApply.cpp
@@ -0,0 +1,342 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VQApply"
+#include <utils/Log.h>
+
+#include <string>
+#include <inttypes.h>
+
+#include <media/NdkMediaFormat.h>
+
+#include "VQops.h"
+#include "CodecProperties.h"
+#include "VideoShaper.h"
+
+namespace android {
+namespace mediaformatshaper {
+
+
+// these are all NDK#31 and we run as NDK#29 (to be within the module)
+// the __builtin_available(android 31, *) constructs didn't work for me.
+//
+#define	AMEDIAFORMAT_VIDEO_QP_MAX	"video-qp-max"
+#define	AMEDIAFORMAT_VIDEO_QP_MIN	"video-qp-min"
+
+#define	AMEDIAFORMAT_VIDEO_QP_B_MAX	"video-qp-b-max"
+#define	AMEDIAFORMAT_VIDEO_QP_B_MIN	"video-qp-b-min"
+#define	AMEDIAFORMAT_VIDEO_QP_I_MAX	"video-qp-i-max"
+#define	AMEDIAFORMAT_VIDEO_QP_I_MIN	"video-qp-i-min"
+#define	AMEDIAFORMAT_VIDEO_QP_P_MAX	"video-qp-p-max"
+#define	AMEDIAFORMAT_VIDEO_QP_P_MIN	"video-qp-p-min"
+
+// defined in the SDK, but not in the NDK
+//
+static const int BITRATE_MODE_VBR = 1;
+
+
+//
+// Caller retains ownership of and responsibility for inFormat
+//
+int VQApply(CodecProperties *codec, vqOps_t *info, AMediaFormat* inFormat, int flags) {
+    ALOGV("codecName %s inFormat %p flags x%x", codec->getName().c_str(), inFormat, flags);
+    (void) info; // unused for now
+
+    int32_t bitRateMode = -1;
+    if (AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_BITRATE_MODE, &bitRateMode)
+        && bitRateMode != BITRATE_MODE_VBR) {
+        ALOGD("minquality: applies only to VBR encoding");
+        return 0;
+    }
+
+    // only proceed if we're in the handheld category.
+    // We embed this information within the codec record when we build up features
+    // and pass them in from MediaCodec; it's the easiest place to store it
+    //
+    // TODO: make a #define for ' _vq_eligible.device' here and in MediaCodec.cpp
+    //
+    int32_t isVQEligible = 0;
+    (void) codec->getFeatureValue("_vq_eligible.device", &isVQEligible);
+    if (!isVQEligible) {
+        ALOGD("minquality: not an eligible device class");
+        return 0;
+    }
+
+    // look at resolution to determine if we want any shaping/modification at all.
+    //
+    // we currently only shape (or ask the underlying codec to shape) for
+    // resolution range  320x240 < target <= 1920x1080
+    // NB: the < vs <=, that is deliberate.
+    //
+
+    int32_t width = 0;
+    (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_WIDTH, &width);
+    int32_t height = 0;
+    (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_HEIGHT, &height);
+    int64_t pixels = ((int64_t)width) * height;
+
+    bool eligibleSize = true;
+    if (pixels <= 320 * 240) {
+        eligibleSize = false;
+    } else if (pixels > 1920 * 1088) {
+        eligibleSize = false;
+    }
+
+    if (!eligibleSize) {
+        // we won't shape, and ask that the codec not shape
+        ALOGD("minquality: %dx%d outside of shaping range", width, height);
+        AMediaFormat_setInt32(inFormat, "android._encoding-quality-level", 0);
+        return 0;
+    }
+
+    if (codec->supportedMinimumQuality() > 0) {
+        // have the codec-provided minimum quality behavior to work at it
+        ALOGD("minquality: codec claims to implement minquality=%d",
+              codec->supportedMinimumQuality());
+
+        // tell the underlying codec to do its thing; we won't try to second guess.
+        // default to 1, aka S_HANDHELD;
+        int32_t qualityTarget = 1;
+        (void) codec->getFeatureValue("_quality.target", &qualityTarget);
+        AMediaFormat_setInt32(inFormat, "android._encoding-quality-level", qualityTarget);
+        return 0;
+    }
+
+    // let the codec know that we'll be enforcing the minimum quality standards
+    AMediaFormat_setInt32(inFormat, "android._encoding-quality-level", 0);
+
+    //
+    // consider any and all tools available
+    // -- qp
+    // -- minimum bits-per-pixel
+    //
+    int64_t bitrateChosen = 0;
+    int32_t qpChosen = INT32_MAX;
+
+    int64_t bitrateConfigured = 0;
+    int32_t bitrateConfiguredTmp = 0;
+    (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_BIT_RATE, &bitrateConfiguredTmp);
+    bitrateConfigured = bitrateConfiguredTmp;
+    bitrateChosen = bitrateConfigured;
+
+    // width, height, and pixels are calculated above
+
+    double minimumBpp = codec->getBpp(width, height);
+
+    int64_t bitrateFloor = pixels * minimumBpp;
+    int64_t bitrateCeiling = bitrateFloor * codec->getPhaseOut();
+    if (bitrateFloor > INT32_MAX) bitrateFloor = INT32_MAX;
+    if (bitrateCeiling > INT32_MAX) bitrateCeiling = INT32_MAX;
+
+    // if we are far enough above the target bpp, leave it alone
+    //
+    ALOGV("bitrate: configured %" PRId64 " floor %" PRId64, bitrateConfigured, bitrateFloor);
+    if (bitrateConfigured >= bitrateCeiling) {
+        ALOGV("high enough bitrate: configured %" PRId64 " >= ceiling %" PRId64,
+                bitrateConfigured, bitrateCeiling);
+        return 0;
+    }
+
+    // raise anything below the bitrate floor
+    if (bitrateConfigured < bitrateFloor) {
+        ALOGD("raise bitrate: configured %" PRId64 " to floor %" PRId64,
+                bitrateConfigured, bitrateFloor);
+        bitrateChosen = bitrateFloor;
+    }
+
+    bool qpPresent = hasQpMax(inFormat);
+
+    // calculate a target QP value
+    int32_t qpmax = codec->targetQpMax(width, height);
+    if (!qpPresent) {
+        // user didn't, so shaper wins
+        if (qpmax != INT32_MAX) {
+            ALOGV("choosing qp=%d", qpmax);
+            qpChosen = qpmax;
+        }
+    } else if (qpmax == INT32_MAX) {
+        // shaper didn't so user wins
+        qpChosen = INT32_MAX;
+        AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_VIDEO_QP_MAX, &qpChosen);
+    } else {
+        // both sides want it, choose most restrictive
+        int32_t value = INT32_MAX;
+        AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_VIDEO_QP_MAX, &value);
+        qpChosen = std::min(qpmax, value);
+    }
+
+    // if QP is desired but not supported, compensate with additional bits
+    if (!codec->supportsQp()) {
+        if (qpChosen != INT32_MAX) {
+            int64_t boost = bitrateChosen * codec->getMissingQpBoost();
+            ALOGD("minquality: requested QP unsupported, boost bitrate %" PRId64 " by %" PRId64,
+                bitrateChosen, boost);
+            bitrateChosen =  bitrateChosen + boost;
+            qpChosen = INT32_MAX;
+        }
+    }
+
+    // limits
+    // apply our chosen values
+    //
+    if (qpChosen != INT32_MAX) {
+        ALOGD("minquality by QP: inject %s=%d", AMEDIAFORMAT_VIDEO_QP_MAX, qpChosen);
+        AMediaFormat_setInt32(inFormat, AMEDIAFORMAT_VIDEO_QP_MAX, qpChosen);
+
+        // caller (VideoShaper) handles spreading this across the subframes
+    }
+
+    if (bitrateChosen != bitrateConfigured) {
+        if (bitrateChosen > bitrateCeiling) {
+            ALOGD("minquality: bitrate increase clamped at ceiling %" PRId64,  bitrateCeiling);
+            bitrateChosen = bitrateCeiling;
+        }
+        ALOGD("minquality/target bitrate raised from %" PRId64 " to %" PRId64 " bps",
+              bitrateConfigured, bitrateChosen);
+        AMediaFormat_setInt32(inFormat, AMEDIAFORMAT_KEY_BIT_RATE, (int32_t)bitrateChosen);
+    }
+
+    return 0;
+}
+
+
+bool hasQpMaxPerFrameType(AMediaFormat *format) {
+    int32_t value;
+
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MAX, &value)
+        || AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MIN, &value)) {
+        return true;
+    }
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MAX, &value)
+        || AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MIN, &value)) {
+        return true;
+    }
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MAX, &value)
+        || AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MIN, &value)) {
+        return true;
+    }
+    return false;
+}
+
+bool hasQpMaxGlobal(AMediaFormat *format) {
+    int32_t value;
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_MAX, &value)
+        || AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_MIN, &value)) {
+        return true;
+    }
+    return false;
+}
+
+bool hasQpMax(AMediaFormat *format) {
+    if (hasQpMaxGlobal(format)) {
+        return true;
+    }
+    return hasQpMaxPerFrameType(format);
+}
+
+void qpSpreadPerFrameType(AMediaFormat *format, int delta,
+                           int qplow, int qphigh, bool override) {
+
+     qpSpreadMinPerFrameType(format, qplow, override);
+     qpSpreadMaxPerFrameType(format, delta, qphigh, override);
+     // make sure that min<max for all the QP fields.
+     qpVerifyMinMaxOrdering(format);
+}
+
+void qpSpreadMaxPerFrameType(AMediaFormat *format, int delta, int qphigh, bool override) {
+    ALOGV("format %p delta %d  hi %d override %d", format, delta, qphigh, override);
+
+    int32_t qpOffered = 0;
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_MAX, &qpOffered)) {
+        // propagate to frame-specific keys, choosing most restrictive
+        // ensure that we don't violate min<=max rules
+        {
+            int32_t maxI = INT32_MAX;
+            AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MAX, &maxI);
+            int32_t value = std::min({qpOffered, qphigh, maxI});
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MAX, value);
+        }
+        {
+            int32_t maxP = INT32_MAX;
+            AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MAX, &maxP);
+            int32_t value = std::min({(std::min(qpOffered, INT32_MAX-1*delta) + 1*delta),
+                                     qphigh, maxP});
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MAX, value);
+        }
+        {
+            int32_t maxB = INT32_MAX;
+            AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MAX, &maxB);
+            int32_t value = std::min({(std::min(qpOffered, INT32_MAX-2*delta) + 2*delta),
+                                     qphigh, maxB});
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MAX, value);
+        }
+    }
+}
+
+void qpSpreadMinPerFrameType(AMediaFormat *format, int qplow, bool override) {
+    ALOGV("format %p lo %d override %d", format, qplow, override);
+
+    int32_t qpOffered = 0;
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_MIN, &qpOffered)) {
+        int value = std::max(qplow, qpOffered);
+        // propagate to frame-specific keys, use lowest of this and existing per-frame value
+        int32_t minI = INT32_MAX;
+        AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MIN, &minI);
+        int32_t setI = std::min(value, minI);
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MIN, setI);
+
+        int32_t minP = INT32_MAX;
+        AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MIN, &minP);
+        int32_t setP = std::min(value, minP);
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MIN, setP);
+
+        int32_t minB = INT32_MAX;
+        AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MIN, &minB);
+        int32_t setB = std::min(value, minB);
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MIN, setB);
+    }
+}
+
+// XXX whether we allow min==max, or if we'll insist that min<max
+void qpVerifyMinMaxOrdering(AMediaFormat *format) {
+    // ensure that we don't violate min<=max rules
+    int32_t maxI = INT32_MAX;
+    int32_t minI = INT32_MIN;
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MAX, &maxI)
+        && AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MIN, &minI)
+        && minI > maxI) {
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_I_MIN, maxI);
+    }
+    int32_t maxP = INT32_MAX;
+    int32_t minP = INT32_MIN;
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MAX, &maxP)
+        && AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MIN, &minP)
+        && minP > maxP) {
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_P_MIN, maxP);
+    }
+    int32_t maxB = INT32_MAX;
+    int32_t minB = INT32_MIN;
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MAX, &maxB)
+        && AMediaFormat_getInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MIN, &minB)
+        && minB > maxB) {
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_VIDEO_QP_B_MIN, maxB);
+    }
+}
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
diff --git a/media/libmediaformatshaper/VQops.h b/media/libmediaformatshaper/VQops.h
new file mode 100644
index 0000000..74cce18
--- /dev/null
+++ b/media/libmediaformatshaper/VQops.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBMEDIAFORMATSHAPER_VQOPS_H_
+#define LIBMEDIAFORMATSHAPER_VQOPS_H_
+
+#include "CodecProperties.h"
+#include <media/NdkMediaFormat.h>
+
+namespace android {
+namespace mediaformatshaper {
+
+// parameterized info for the different mediaType types
+typedef struct {
+    const char *mediaType;
+
+    int32_t qpMin;      // codec type limit (e.g. h264, not c2.android.avc.encoder)
+    int32_t qpMax;
+    int32_t qpDelta;    // from I to P to B
+
+} vqOps_t;
+
+int VQApply(CodecProperties *codec, vqOps_t *info, AMediaFormat* inFormat, int flags);
+
+// spread the overall QP setting to any un-set per-frame-type settings
+void qpSpreadPerFrameType(AMediaFormat *format, int delta, int qplow, int qphigh, bool override);
+void qpSpreadMaxPerFrameType(AMediaFormat *format, int delta, int qphigh, bool override);
+void qpSpreadMinPerFrameType(AMediaFormat *format, int qplow, bool override);
+void qpVerifyMinMaxOrdering(AMediaFormat *format);
+
+// does the format have QP bounding entries
+bool hasQpMax(AMediaFormat *format);
+bool hasQpMaxGlobal(AMediaFormat *format);
+bool hasQpMaxPerFrameType(AMediaFormat *format);
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
+#endif  // LIBMEDIAFORMATSHAPER_VQOPS_H_
diff --git a/media/libmediaformatshaper/VideoShaper.cpp b/media/libmediaformatshaper/VideoShaper.cpp
new file mode 100644
index 0000000..cf8b50f
--- /dev/null
+++ b/media/libmediaformatshaper/VideoShaper.cpp
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VideoShaper"
+#include <utils/Log.h>
+
+#include <string>
+#include <inttypes.h>
+
+#include <media/NdkMediaFormat.h>
+
+#include "CodecProperties.h"
+#include "VideoShaper.h"
+#include "VQops.h"
+
+namespace android {
+namespace mediaformatshaper {
+
+// mediatype-specific operations
+
+vqOps_t mediaInfo[] = {
+    {
+        .mediaType = "video/avc",
+        .qpMin = 0,
+        .qpMax = 51,
+        .qpDelta = 3,
+    },
+    {
+        .mediaType = "video/hevc",
+        .qpMin = 0,
+        .qpMax = 51,
+        .qpDelta = 3,
+    },
+    {
+        .mediaType = NULL,                // matches everything, it must come last
+        .qpMin = INT32_MIN,
+        .qpMax = INT32_MAX,
+        .qpDelta = 3,
+    }
+};
+int nMediaInfos = sizeof(mediaInfo) / sizeof(mediaInfo[0]);
+
+//
+// Caller retains ownership of and responsibility for inFormat
+//
+
+int videoShaper(CodecProperties *codec, AMediaFormat* inFormat, int flags) {
+    if (codec == nullptr) {
+        return -1;
+    }
+    ALOGV("codec %s inFormat %p flags x%x", codec->getName().c_str(), inFormat, flags);
+
+    int ix;
+
+    std::string mediaType = codec->getMediaType();
+    // we should always come out of this with a selection, because the final entry
+    // is deliberaly a NULL -- so that it will act as a default
+    for(ix = 0; mediaInfo[ix].mediaType != NULL; ix++) {
+        if (strcmp(mediaType.c_str(), mediaInfo[ix].mediaType) == 0) {
+            break;
+        }
+    }
+    if (ix >= nMediaInfos) {
+        // shouldn't happen, but if it does .....
+    }
+
+    vqOps_t *info = &mediaInfo[ix];
+
+    // apply any quality transforms in here..
+    (void) VQApply(codec, info, inFormat, flags);
+
+    // We always spread any QP parameters.
+    // Sometimes it's something we inserted here, sometimes it's a value that the user injected.
+    //
+    qpSpreadPerFrameType(inFormat, info->qpDelta, info->qpMin, info->qpMax, /* override */ true);
+
+    //
+    return 0;
+
+}
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
diff --git a/media/libmediaformatshaper/VideoShaper.h b/media/libmediaformatshaper/VideoShaper.h
new file mode 100644
index 0000000..53f1b13
--- /dev/null
+++ b/media/libmediaformatshaper/VideoShaper.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIBMEDIAFORMATSHAPER_VIDEOSHAPER_H_
+#define LIBMEDIAFORMATSHAPER_VIDEOSHAPER_H_
+
+namespace android {
+namespace mediaformatshaper {
+
+/*
+ * runs through video-specific shaping operations for the codec/format combination.
+ * updates inFormat in place.
+ */
+int videoShaper(CodecProperties *codec,  AMediaFormat* inFormat, int flags);
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
+#endif  // LIBMEDIAFORMATSHAPER_VIDEOSHAPER_H_
diff --git a/media/libmediaformatshaper/exports.lds b/media/libmediaformatshaper/exports.lds
new file mode 100644
index 0000000..a29cadb
--- /dev/null
+++ b/media/libmediaformatshaper/exports.lds
@@ -0,0 +1,6 @@
+{
+    global:
+        shaper_ops;
+    local:
+        *;
+};
diff --git a/media/libmediaformatshaper/include/media/formatshaper/FormatShaper.h b/media/libmediaformatshaper/include/media/formatshaper/FormatShaper.h
new file mode 100644
index 0000000..a1747cc
--- /dev/null
+++ b/media/libmediaformatshaper/include/media/formatshaper/FormatShaper.h
@@ -0,0 +1,140 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * structure defining the function pointers that system-side folks
+ * use to invoke operations within the MediaFormat shaping library
+ *
+ * This is the include file the outside world uses.
+ */
+
+#ifndef LIBMEDIAFORMATSHAPER_FORMATSHAPER_H_
+#define LIBMEDIAFORMATSHAPER_FORMATSHAPER_H_
+
+namespace android {
+namespace mediaformatshaper {
+
+/*
+ * An opaque handle clients use to refer to codec+mediatype being shaped.
+ */
+typedef void (*shaperHandle_t);
+
+/*
+ * shapeFormat applies any re-shaping on the passed AMediaFormat.
+ * The updated format is returned in-place.
+ */
+typedef int (*shapeFormat_t)(shaperHandle_t shaperHandle,
+                             AMediaFormat* inFormat, int flags);
+
+/*
+ * getMapping returns any mappings from standard keys to codec-specific keys.
+ * The return is a vector of const char* which are set up in pairs
+ * of "from", and "to".
+ * This array is always finished with a pair of nulls (to indicate a null from
+ * and a null to)
+ */
+
+typedef const char **(*getMappings_t)(shaperHandle_t shaperHandle, const char *kind);
+
+/*
+ * Returns a handle to the shaperHandle for the specified codec and mediatype.
+ * If none exists, it returns null.
+ */
+typedef shaperHandle_t (*findShaper_t)(const char *codecName, const char *mediaType);
+
+/*
+ * Creates and returns an empty shaperHandle that the client can populate using the
+ * setFeature() and setMap() operations.
+ */
+typedef shaperHandle_t (*createShaper_t)(const char *codecName, const char *mediaType);
+
+/*
+ * Registers the indicated shaperHandle for the indicated codec and mediatype.
+ * This call returns the shaperHandle that is to be used for further shaper operations.
+ * The returned value may be different than the one passed as an argument if another
+ * shaperinfo was registered while the passed one was being configured.
+ */
+typedef shaperHandle_t (*registerShaper_t)(shaperHandle_t shaper, const char *codecName,
+                                         const char *mediaType);
+
+/*
+ * establishes a mapping between the standard key "from" and the codec-specific key "to"
+ * in the "kind" namespace. This mapping is specific to the indicated codecName when
+ * encoding for the indicated mediaType.
+ */
+typedef int (*setMap_t)(shaperHandle_t shaper, const char *kind, const char *from, const char *to);
+
+/*
+ * establishes that codec "codecName" encoding for "mediaType" supports the indicated
+ * feature at the indicated value
+ */
+typedef int (*setFeature_t)(shaperHandle_t shaper, const char *feature, int value);
+
+/*
+ * establishes that codec "codecName" encoding for "mediaType" supports the indicated
+ * tuning at the indicated value
+ */
+typedef int (*setTuning_t)(shaperHandle_t shaper, const char *feature, const char * value);
+
+/*
+ * The expectation is that the client will implement a flow similar to the following when
+ * setting up an encoding.
+ *
+ * if ((shaper=formatShaperops->findShaper(codecName, mediaType)) == NULL) {
+ *     for (all codec features) {
+ *         get feature name, feature value
+ *         formatShaperops->setFeature(shaper,, featurename, featurevalue)
+ *     }
+ *     for (all codec mappings) {
+ *         get mapping 'kind', mapping 'from', mapping 'to'
+ *         formatShaperops->setMap(shaper, kind, from, to)
+ *     }
+ * }
+ *
+ */
+
+typedef struct FormatShaperOps {
+    const uint32_t version;
+
+    /*
+     * find, create, setup, and register the shaper info
+     */
+    findShaper_t findShaper;
+    createShaper_t createShaper;
+    setMap_t setMap;
+    setFeature_t setFeature;
+    registerShaper_t registerShaper;
+
+    /*
+     * use the shaper info
+     */
+    shapeFormat_t shapeFormat;
+    getMappings_t getMappings;
+    getMappings_t getReverseMappings;
+
+    setTuning_t setTuning;
+
+    // additions happen at the end of the structure
+} FormatShaperOps_t;
+
+// versioninf information
+const uint32_t SHAPER_VERSION_UNKNOWN = 0;
+const uint32_t SHAPER_VERSION_V1 = 1;
+
+}  // namespace mediaformatshaper
+}  // namespace android
+
+#endif  // LIBMEDIAFORMATSHAPER_FORMATSHAPER_H_
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index 6fcbc7b..9b54199 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -1,8 +1,29 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_headers {
     name: "libmedia_helper_headers",
     vendor_available: true,
     min_sdk_version: "29",
     export_include_dirs: ["include"],
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.bluetooth.updatable",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
 }
 
 cc_library {
@@ -12,7 +33,11 @@
         enabled: true,
     },
     double_loadable: true,
-    srcs: ["AudioParameter.cpp", "TypeConverter.cpp"],
+    srcs: [
+        "AudioParameter.cpp",
+        "AudioValidator.cpp",
+        "TypeConverter.cpp",
+    ],
     cflags: [
         "-Werror",
         "-Wextra",
@@ -27,4 +52,10 @@
         "libmedia_helper_headers",
     ],
     clang: true,
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/libmediahelper/AudioParameter.cpp b/media/libmediahelper/AudioParameter.cpp
index fc8306c..382a920 100644
--- a/media/libmediahelper/AudioParameter.cpp
+++ b/media/libmediahelper/AudioParameter.cpp
@@ -57,6 +57,10 @@
 //        AUDIO_PARAMETER_DEVICE_SUP_ENCAPSULATION_MODES;
 // const char * const AudioParameter::keyDeviceSupportedEncapsulationMetadataTypes =
 //        AUDIO_PARAMETER_DEVICE_SUP_ENCAPSULATION_METADATA_TYPES;
+const char * const AudioParameter::keyAdditionalOutputDeviceDelay =
+        AUDIO_PARAMETER_DEVICE_ADDITIONAL_OUTPUT_DELAY;
+const char * const AudioParameter::keyMaxAdditionalOutputDeviceDelay =
+        AUDIO_PARAMETER_DEVICE_MAX_ADDITIONAL_OUTPUT_DELAY;
 
 AudioParameter::AudioParameter(const String8& keyValuePairs)
 {
diff --git a/media/libmediahelper/AudioValidator.cpp b/media/libmediahelper/AudioValidator.cpp
new file mode 100644
index 0000000..7eddbe1
--- /dev/null
+++ b/media/libmediahelper/AudioValidator.cpp
@@ -0,0 +1,183 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/AudioValidator.h>
+#include <cmath>
+
+namespace android {
+
+/** returns true if string is overflow */
+template <size_t size>
+bool checkStringOverflow(const char (&s)[size]) {
+    return strnlen(s, size) >= size;
+}
+
+status_t safetyNetLog(status_t status, std::string_view bugNumber) {
+    if (status != NO_ERROR && !bugNumber.empty()) {
+        android_errorWriteLog(0x534e4554, bugNumber.data()); // SafetyNet logging
+    }
+    return status;
+}
+
+status_t AudioValidator::validateAudioAttributes(
+        const audio_attributes_t& attr, std::string_view bugNumber)
+{
+    status_t status = NO_ERROR;
+    const size_t tagsMaxSize = AUDIO_ATTRIBUTES_TAGS_MAX_SIZE;
+    if (strnlen(attr.tags, tagsMaxSize) >= tagsMaxSize) {
+        status = BAD_VALUE;
+    }
+    return safetyNetLog(status, bugNumber);
+}
+
+status_t AudioValidator::validateEffectDescriptor(
+        const effect_descriptor_t& desc, std::string_view bugNumber)
+{
+    status_t status = NO_ERROR;
+    if (checkStringOverflow(desc.name)
+        | /* always */ checkStringOverflow(desc.implementor)) {
+        status = BAD_VALUE;
+    }
+    return safetyNetLog(status, bugNumber);
+}
+
+status_t AudioValidator::validateAudioPortConfig(
+        const struct audio_port_config& config, std::string_view bugNumber)
+{
+    status_t status = NO_ERROR;
+    if (config.type == AUDIO_PORT_TYPE_DEVICE &&
+        checkStringOverflow(config.ext.device.address)) {
+        status = BAD_VALUE;
+    }
+    return safetyNetLog(status, bugNumber);
+}
+
+namespace {
+
+template <typename T, std::enable_if_t<std::is_same<T, struct audio_port>::value
+                                    || std::is_same<T, struct audio_port_v7>::value, int> = 0>
+static status_t validateAudioPortInternal(const T& port, std::string_view bugNumber = {}) {
+    status_t status = NO_ERROR;
+    if (checkStringOverflow(port.name)) {
+        status = BAD_VALUE;
+    }
+    if (AudioValidator::validateAudioPortConfig(port.active_config) != NO_ERROR) {
+        status = BAD_VALUE;
+    }
+    if (port.type == AUDIO_PORT_TYPE_DEVICE &&
+        checkStringOverflow(port.ext.device.address)) {
+        status = BAD_VALUE;
+    }
+    return safetyNetLog(status, bugNumber);
+}
+
+} // namespace
+
+status_t AudioValidator::validateAudioPort(
+        const struct audio_port& port, std::string_view bugNumber)
+{
+    return validateAudioPortInternal(port, bugNumber);
+}
+
+status_t AudioValidator::validateAudioPort(
+        const struct audio_port_v7& port, std::string_view bugNumber)
+{
+    return validateAudioPortInternal(port, bugNumber);
+}
+
+/** returns BAD_VALUE if sanitization was required. */
+status_t AudioValidator::validateAudioPatch(
+        const struct audio_patch& patch, std::string_view bugNumber)
+{
+    status_t status = NO_ERROR;
+    if (patch.num_sources > AUDIO_PATCH_PORTS_MAX) {
+        status = BAD_VALUE;
+    }
+    if (patch.num_sinks > AUDIO_PATCH_PORTS_MAX) {
+        status = BAD_VALUE;
+    }
+    for (size_t i = 0; i < patch.num_sources; i++) {
+        if (validateAudioPortConfig(patch.sources[i]) != NO_ERROR) {
+            status = BAD_VALUE;
+        }
+    }
+    for (size_t i = 0; i < patch.num_sinks; i++) {
+        if (validateAudioPortConfig(patch.sinks[i]) != NO_ERROR) {
+            status = BAD_VALUE;
+        }
+    }
+    return safetyNetLog(status, bugNumber);
+}
+
+/* static */
+status_t AudioValidator::validateAudioDescriptionMixLevel(float leveldB)
+{
+    constexpr float MAX_AUDIO_DESCRIPTION_MIX_LEVEL = 48.f;
+    return std::isnan(leveldB) || leveldB > MAX_AUDIO_DESCRIPTION_MIX_LEVEL ? BAD_VALUE : OK;
+}
+
+/* static */
+status_t AudioValidator::validateDualMonoMode(audio_dual_mono_mode_t dualMonoMode)
+{
+    switch (dualMonoMode) {
+        case AUDIO_DUAL_MONO_MODE_OFF:
+        case AUDIO_DUAL_MONO_MODE_LR:
+        case AUDIO_DUAL_MONO_MODE_LL:
+        case AUDIO_DUAL_MONO_MODE_RR:
+        return OK;
+    }
+    return BAD_VALUE;
+}
+
+/* static */
+status_t AudioValidator::validatePlaybackRateFallbackMode(
+        audio_timestretch_fallback_mode_t fallbackMode)
+{
+    switch (fallbackMode) {
+        case AUDIO_TIMESTRETCH_FALLBACK_CUT_REPEAT:
+            // This is coarse sounding timestretching used for internal debugging,
+            // not intended for general use.
+            break; // warning if not listed.
+        case AUDIO_TIMESTRETCH_FALLBACK_DEFAULT:
+        case AUDIO_TIMESTRETCH_FALLBACK_MUTE:
+        case AUDIO_TIMESTRETCH_FALLBACK_FAIL:
+            return OK;
+    }
+    return BAD_VALUE;
+}
+
+/* static */
+status_t AudioValidator::validatePlaybackRateStretchMode(
+        audio_timestretch_stretch_mode_t stretchMode)
+{
+    switch (stretchMode) {
+        case AUDIO_TIMESTRETCH_STRETCH_DEFAULT:
+        case AUDIO_TIMESTRETCH_STRETCH_VOICE:
+            return OK;
+    }
+    return BAD_VALUE;
+}
+
+/* static */
+status_t AudioValidator::validatePlaybackRate(
+        const audio_playback_rate_t& playbackRate)
+{
+    if (playbackRate.mSpeed < 0.f || playbackRate.mPitch < 0.f) return BAD_VALUE;
+    return validatePlaybackRateFallbackMode(playbackRate.mFallbackMode) ?:
+            validatePlaybackRateStretchMode(playbackRate.mStretchMode);
+}
+
+}; // namespace android
diff --git a/media/libmediahelper/TEST_MAPPING b/media/libmediahelper/TEST_MAPPING
new file mode 100644
index 0000000..f9594bd
--- /dev/null
+++ b/media/libmediahelper/TEST_MAPPING
@@ -0,0 +1,7 @@
+{
+  "presubmit": [
+    {
+      "name": "libmedia_helper_tests"
+    }
+  ]
+}
diff --git a/media/libmediahelper/TypeConverter.cpp b/media/libmediahelper/TypeConverter.cpp
index 6382ce4..d3a517f 100644
--- a/media/libmediahelper/TypeConverter.cpp
+++ b/media/libmediahelper/TypeConverter.cpp
@@ -18,307 +18,9 @@
 
 namespace android {
 
-#define MAKE_STRING_FROM_ENUM(string) { #string, string }
+#define MAKE_STRING_FROM_ENUM(enumval) { #enumval, enumval }
 #define TERMINATOR { .literal = nullptr }
 
-template <>
-const OutputDeviceConverter::Table OutputDeviceConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_NONE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_EARPIECE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_SPEAKER),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_SPEAKER_SAFE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_WIRED_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_WIRED_HEADPHONE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ALL_SCO),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ALL_A2DP),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_AUX_DIGITAL),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_HDMI),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_USB_ACCESSORY),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_USB_DEVICE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ALL_USB),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_REMOTE_SUBMIX),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_TELEPHONY_TX),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_LINE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_HDMI_ARC),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_SPDIF),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_FM),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_AUX_LINE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_IP),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BUS),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_PROXY),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_USB_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_HEARING_AID),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ECHO_CANCELLER),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_DEFAULT),
-    // STUB must be after DEFAULT, so the latter is picked up by toString first.
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_STUB),
-    TERMINATOR
-};
-
-template <>
-const InputDeviceConverter::Table InputDeviceConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_NONE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_COMMUNICATION),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_AMBIENT),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BUILTIN_MIC),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ALL_SCO),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_WIRED_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_AUX_DIGITAL),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_HDMI),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_HDMI_ARC),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_TELEPHONY_RX),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_VOICE_CALL),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BACK_MIC),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_REMOTE_SUBMIX),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_USB_ACCESSORY),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_USB_DEVICE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ALL_USB),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_FM_TUNER),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_TV_TUNER),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_LINE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_SPDIF),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_A2DP),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_LOOPBACK),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_IP),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BUS),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_PROXY),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_USB_HEADSET),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_BLE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ECHO_REFERENCE),
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_DEFAULT),
-    // STUB must be after DEFAULT, so the latter is picked up by toString first.
-    MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_STUB),
-    TERMINATOR
-};
-
-
-template <>
-const OutputFlagConverter::Table OutputFlagConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_NONE),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_DIRECT),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_PRIMARY),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_FAST),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_DEEP_BUFFER),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_NON_BLOCKING),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_HW_AV_SYNC),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_TTS),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_RAW),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_SYNC),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_DIRECT_PCM),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_VOIP_RX),
-    MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_INCALL_MUSIC),
-    TERMINATOR
-};
-
-
-template <>
-const InputFlagConverter::Table InputFlagConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_NONE),
-    MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_FAST),
-    MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_HW_HOTWORD),
-    MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_RAW),
-    MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_SYNC),
-    MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_MMAP_NOIRQ),
-    MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_VOIP_TX),
-    MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_HW_AV_SYNC),
-    MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_DIRECT),
-    TERMINATOR
-};
-
-
-template <>
-const FormatConverter::Table FormatConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_16_BIT),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_8_BIT),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_32_BIT),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_8_24_BIT),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_FLOAT),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_24_BIT_PACKED),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MP3),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AMR_NB),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AMR_WB),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_MAIN),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_SSR),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LTP),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_HE_V1),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_SCALABLE),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ERLC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LD),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_HE_V2),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ELD),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_XHE),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_MAIN),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_LC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_SSR),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_LTP),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_HE_V1),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_SCALABLE),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_ERLC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_LD),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_HE_V2),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_ELD),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_XHE),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_VORBIS),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_HE_AAC_V1),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_HE_AAC_V2),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_OPUS),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AC3),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_E_AC3),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DTS),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DTS_HD),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_IEC61937),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DOLBY_TRUEHD),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_EVRC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_EVRCB),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_EVRCWB),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_EVRCNW),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADIF),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_WMA),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_WMA_PRO),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AMR_WB_PLUS),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MP2),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_QCELP),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DSD),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_FLAC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_ALAC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APE),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_SBC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX_HD),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AC4),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_LDAC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_E_AC3_JOC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT_1_0),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT_2_0),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT_2_1),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM_LC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM_HE_V1),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM_HE_V2),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_CELT),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX_ADAPTIVE),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_LHDC),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_LHDC_LL),
-    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX_TWSP),
-    TERMINATOR
-};
-
-
-template <>
-const OutputChannelConverter::Table OutputChannelConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_MONO),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_STEREO),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_2POINT1),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_2POINT0POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_2POINT1POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_TRI),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_TRI_BACK),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_3POINT1),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_3POINT0POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_3POINT1POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_QUAD),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_QUAD_BACK),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_QUAD_SIDE),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_SURROUND),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_PENTA),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1_BACK),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1_SIDE),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1POINT4),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_6POINT1),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_7POINT1),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_7POINT1POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_7POINT1POINT4),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_HAPTIC_A),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_MONO_HAPTIC_A),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_STEREO_HAPTIC_A),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_HAPTIC_AB),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_MONO_HAPTIC_AB),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_STEREO_HAPTIC_AB),
-    TERMINATOR
-};
-
-
-template <>
-const InputChannelConverter::Table InputChannelConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_MONO),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_STEREO),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_FRONT_BACK),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_6),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_2POINT0POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_2POINT1POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_3POINT0POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_3POINT1POINT2),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_5POINT1),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO),
-    MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_VOICE_CALL_MONO),
-    TERMINATOR
-};
-
-template <>
-const ChannelIndexConverter::Table ChannelIndexConverter::mTable[] = {
-    {"AUDIO_CHANNEL_INDEX_MASK_1", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_1)},
-    {"AUDIO_CHANNEL_INDEX_MASK_2", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_2)},
-    {"AUDIO_CHANNEL_INDEX_MASK_3", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_3)},
-    {"AUDIO_CHANNEL_INDEX_MASK_4", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_4)},
-    {"AUDIO_CHANNEL_INDEX_MASK_5", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_5)},
-    {"AUDIO_CHANNEL_INDEX_MASK_6", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_6)},
-    {"AUDIO_CHANNEL_INDEX_MASK_7", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_7)},
-    {"AUDIO_CHANNEL_INDEX_MASK_8", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_8)},
-    TERMINATOR
-};
-
-
-template <>
-const GainModeConverter::Table GainModeConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_GAIN_MODE_JOINT),
-    MAKE_STRING_FROM_ENUM(AUDIO_GAIN_MODE_CHANNELS),
-    MAKE_STRING_FROM_ENUM(AUDIO_GAIN_MODE_RAMP),
-    TERMINATOR
-};
-
-
-template <>
-const StreamTypeConverter::Table StreamTypeConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_DEFAULT),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_VOICE_CALL),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_SYSTEM),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_RING),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_MUSIC),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ALARM),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_NOTIFICATION),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_BLUETOOTH_SCO ),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ENFORCED_AUDIBLE),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_DTMF),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_TTS),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ACCESSIBILITY),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ASSISTANT),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_REROUTING),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_PATCH),
-    MAKE_STRING_FROM_ENUM(AUDIO_STREAM_CALL_ASSISTANT),
-    TERMINATOR
-};
-
 template<>
 const AudioModeConverter::Table AudioModeConverter::mTable[] = {
     MAKE_STRING_FROM_ENUM(AUDIO_MODE_INVALID),
@@ -331,62 +33,6 @@
     TERMINATOR
 };
 
-template<>
-const AudioContentTypeConverter::Table AudioContentTypeConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_UNKNOWN),
-    MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_SPEECH),
-    MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_MUSIC),
-    MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_MOVIE),
-    MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_SONIFICATION),
-    TERMINATOR
-};
-
-template <>
-const UsageTypeConverter::Table UsageTypeConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_UNKNOWN),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_MEDIA),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VOICE_COMMUNICATION),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ALARM),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_EVENT),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANCE_SONIFICATION),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_GAME),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VIRTUAL_SOURCE),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANT),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_CALL_ASSISTANT),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_EMERGENCY),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_SAFETY),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VEHICLE_STATUS),
-    MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ANNOUNCEMENT),
-    TERMINATOR
-};
-
-template <>
-const SourceTypeConverter::Table SourceTypeConverter::mTable[] = {
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_DEFAULT),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_MIC),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_UPLINK),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_DOWNLINK),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_CALL),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_CAMCORDER),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_RECOGNITION),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_COMMUNICATION),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_REMOTE_SUBMIX),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_UNPROCESSED),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_PERFORMANCE),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_ECHO_REFERENCE),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_FM_TUNER),
-    MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_HOTWORD),
-    TERMINATOR
-};
-
 template <>
 const AudioFlagConverter::Table AudioFlagConverter::mTable[] = {
     MAKE_STRING_FROM_ENUM(AUDIO_FLAG_NONE),
@@ -409,6 +55,7 @@
 
 template class TypeConverter<OutputDeviceTraits>;
 template class TypeConverter<InputDeviceTraits>;
+template class TypeConverter<DeviceTraits>;
 template class TypeConverter<OutputFlagTraits>;
 template class TypeConverter<InputFlagTraits>;
 template class TypeConverter<FormatTraits>;
@@ -422,11 +69,6 @@
 template class TypeConverter<SourceTraits>;
 template class TypeConverter<AudioFlagTraits>;
 
-bool deviceFromString(const std::string& literalDevice, audio_devices_t& device) {
-    return InputDeviceConverter::fromString(literalDevice, device) ||
-            OutputDeviceConverter::fromString(literalDevice, device);
-}
-
 SampleRateTraits::Collection samplingRatesFromString(
         const std::string &samplingRates, const char *del)
 {
@@ -446,21 +88,20 @@
 audio_format_t formatFromString(const std::string &literalFormat, audio_format_t defaultFormat)
 {
     audio_format_t format;
-    if (literalFormat.empty()) {
-        return defaultFormat;
+    if (!literalFormat.empty() && FormatConverter::fromString(literalFormat, format)) {
+        return format;
     }
-    FormatConverter::fromString(literalFormat, format);
-    return format;
+    return defaultFormat;
 }
 
 audio_channel_mask_t channelMaskFromString(const std::string &literalChannels)
 {
     audio_channel_mask_t channels;
-    if (!OutputChannelConverter::fromString(literalChannels, channels) &&
-            !InputChannelConverter::fromString(literalChannels, channels)) {
-        return AUDIO_CHANNEL_INVALID;
+    if (!literalChannels.empty() &&
+            audio_channel_mask_from_string(literalChannels.c_str(), &channels)) {
+        return channels;
     }
-    return channels;
+    return AUDIO_CHANNEL_INVALID;
 }
 
 ChannelTraits::Collection channelMasksFromString(
diff --git a/media/libmediahelper/include/media/AudioParameter.h b/media/libmediahelper/include/media/AudioParameter.h
index 66d8dfb..9a6ca8a 100644
--- a/media/libmediahelper/include/media/AudioParameter.h
+++ b/media/libmediahelper/include/media/AudioParameter.h
@@ -104,6 +104,9 @@
     // static const char * const keyDeviceSupportedEncapsulationModes;
     // static const char * const keyDeviceSupportedEncapsulationMetadataTypes;
 
+    static const char * const keyAdditionalOutputDeviceDelay;
+    static const char * const keyMaxAdditionalOutputDeviceDelay;
+
     String8 toString() const { return toStringImpl(true); }
     String8 keysToString() const { return toStringImpl(false); }
 
diff --git a/media/libmediahelper/include/media/AudioValidator.h b/media/libmediahelper/include/media/AudioValidator.h
new file mode 100644
index 0000000..56c2fa6
--- /dev/null
+++ b/media/libmediahelper/include/media/AudioValidator.h
@@ -0,0 +1,110 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_VALIDATOR_H_
+#define ANDROID_AUDIO_VALIDATOR_H_
+
+#include <system/audio.h>
+#include <system/audio_effect.h>
+#include <utils/Errors.h>
+#include <utils/Log.h>
+
+#include <string_view>
+
+namespace android {
+
+/**
+ * AudioValidator is a class to validate audio data in binder call. NO_ERROR will be returned only
+ * when there is no error with the data.
+ */
+class AudioValidator {
+public:
+    /**
+     * Return NO_ERROR only when there is no error with the given audio attributes.
+     * Otherwise, return BAD_VALUE.
+     */
+    static status_t validateAudioAttributes(
+            const audio_attributes_t& attr, std::string_view bugNumber = {});
+
+    /**
+     * Return NO_ERROR only when there is no error with the given effect descriptor.
+     * Otherwise, return BAD_VALUE.
+     */
+    static status_t validateEffectDescriptor(
+            const effect_descriptor_t& desc, std::string_view bugNumber = {});
+
+    /**
+     * Return NO_ERROR only when there is no error with the given audio port config.
+     * Otherwise, return BAD_VALUE.
+     */
+    static status_t validateAudioPortConfig(
+            const struct audio_port_config& config, std::string_view bugNumber = {});
+
+    /**
+     * Return NO_ERROR only when there is no error with the given audio port.
+     * Otherwise, return BAD_VALUE.
+     */
+    static status_t validateAudioPort(
+            const struct audio_port& port, std::string_view bugNumber = {});
+
+    /**
+     * Return NO_ERROR only when there is no error with the given audio_port_v7.
+     * Otherwise, return BAD_VALUE.
+     */
+    static status_t validateAudioPort(
+            const struct audio_port_v7& port, std::string_view bugNumber = {});
+
+    /**
+     * Return NO_ERROR only when there is no error with the given audio patch.
+     * Otherwise, return BAD_VALUE.
+     */
+    static status_t validateAudioPatch(
+            const struct audio_patch& patch, std::string_view bugNumber = {});
+
+    /**
+     * Return NO_ERROR if leveldB is acceptable, otherwise BAD_VALUE.
+     */
+    static status_t validateAudioDescriptionMixLevel(float leveldB);
+
+    /**
+     * Return NO_ERROR if dualMonoMode is one of the enum values, otherwise BAD_VALUE.
+     */
+    static status_t validateDualMonoMode(audio_dual_mono_mode_t dualMonoMode);
+
+    /**
+     * Return NO_ERROR if fallbackMode is one of the enum values, otherwise BAD_VALUE.
+     */
+    static status_t validatePlaybackRateFallbackMode(
+            audio_timestretch_fallback_mode_t fallbackMode);
+
+    /**
+     * Return NO_ERROR if fallbackMode is one of the enum values, otherwise BAD_VALUE.
+     */
+    static status_t validatePlaybackRateStretchMode(audio_timestretch_stretch_mode_t stretchMode);
+
+    /**
+     * Return NO_ERROR if playbackRate is acceptable - the enums are correct and the
+     * rate and speed non-negative, otherwise BAD_VALUE.
+     *
+     * This is a basic bounds check - the system might have stricter requirements for
+     * playbackRate on a particular stream / device.
+     */
+    static status_t validatePlaybackRate(const audio_playback_rate_t& playbackRate);
+};
+
+}; // namespace android
+
+#endif  /*ANDROID_AUDIO_VALIDATOR_H_*/
diff --git a/media/libmediahelper/include/media/TypeConverter.h b/media/libmediahelper/include/media/TypeConverter.h
index 011498a..42ccb5f 100644
--- a/media/libmediahelper/include/media/TypeConverter.h
+++ b/media/libmediahelper/include/media/TypeConverter.h
@@ -24,8 +24,6 @@
 
 #include <system/audio.h>
 #include <utils/Log.h>
-#include <utils/Vector.h>
-#include <utils/SortedVector.h>
 
 #include <media/AudioParameter.h>
 #include "convert.h"
@@ -43,16 +41,6 @@
     }
 };
 template <typename T>
-struct SortedVectorTraits
-{
-    typedef T Type;
-    typedef SortedVector<Type> Collection;
-    static void add(Collection &collection, Type value)
-    {
-        collection.add(value);
-    }
-};
-template <typename T>
 struct SetTraits
 {
     typedef T Type;
@@ -108,13 +96,20 @@
                                      typename Traits::Collection &collection,
                                      const char *del = AudioParameter::valueListSeparator);
 
-    static uint32_t maskFromString(
+    static typename Traits::Type maskFromString(
             const std::string &str, const char *del = AudioParameter::valueListSeparator);
 
     static void maskToString(
-            uint32_t mask, std::string &str, const char *del = AudioParameter::valueListSeparator);
+            typename Traits::Type mask, std::string &str,
+            const char *del = AudioParameter::valueListSeparator);
 
 protected:
+    // Default implementations use mTable for to/from string conversions
+    // of each individual enum value.
+    // These functions may be specialized to use external converters instead.
+    static bool toStringImpl(const typename Traits::Type &value, std::string &str);
+    static bool fromStringImpl(const std::string &str, typename Traits::Type &result);
+
     struct Table {
         const char *literal;
         typename Traits::Type value;
@@ -124,26 +119,22 @@
 };
 
 template <class Traits>
-inline bool TypeConverter<Traits>::toString(const typename Traits::Type &value, std::string &str)
-{
+inline bool TypeConverter<Traits>::toStringImpl(
+        const typename Traits::Type &value, std::string &str) {
     for (size_t i = 0; mTable[i].literal; i++) {
         if (mTable[i].value == value) {
             str = mTable[i].literal;
             return true;
         }
     }
-    char result[64];
-    snprintf(result, sizeof(result), "Unknown enum value %d", value);
-    str = result;
     return false;
 }
 
 template <class Traits>
-inline bool TypeConverter<Traits>::fromString(const std::string &str, typename Traits::Type &result)
-{
+inline bool TypeConverter<Traits>::fromStringImpl(
+        const std::string &str, typename Traits::Type &result) {
     for (size_t i = 0; mTable[i].literal; i++) {
         if (strcmp(mTable[i].literal, str.c_str()) == 0) {
-            ALOGV("stringToEnum() found %s", mTable[i].literal);
             result = mTable[i].value;
             return true;
         }
@@ -152,6 +143,26 @@
 }
 
 template <class Traits>
+inline bool TypeConverter<Traits>::toString(const typename Traits::Type &value, std::string &str)
+{
+    const bool success = toStringImpl(value, str);
+    if (!success) {
+        char result[64];
+        snprintf(result, sizeof(result), "Unknown enum value %d", value);
+        str = result;
+    }
+    return success;
+}
+
+template <class Traits>
+inline bool TypeConverter<Traits>::fromString(const std::string &str, typename Traits::Type &result)
+{
+    const bool success = fromStringImpl(str, result);
+    ALOGV_IF(success, "stringToEnum() found %s", str.c_str());
+    return success;
+}
+
+template <class Traits>
 inline void TypeConverter<Traits>::collectionFromString(const std::string &str,
         typename Traits::Collection &collection,
         const char *del)
@@ -168,7 +179,8 @@
 }
 
 template <class Traits>
-inline uint32_t TypeConverter<Traits>::maskFromString(const std::string &str, const char *del)
+inline typename Traits::Type TypeConverter<Traits>::maskFromString(
+        const std::string &str, const char *del)
 {
     char *literal = strdup(str.c_str());
     uint32_t value = 0;
@@ -179,20 +191,24 @@
         }
     }
     free(literal);
-    return value;
+    return static_cast<typename Traits::Type>(value);
 }
 
 template <class Traits>
-inline void TypeConverter<Traits>::maskToString(uint32_t mask, std::string &str, const char *del)
+inline void TypeConverter<Traits>::maskToString(
+        typename Traits::Type mask, std::string &str, const char *del)
 {
     if (mask != 0) {
         bool first_flag = true;
-        for (size_t i = 0; mTable[i].literal; i++) {
-            uint32_t value = static_cast<uint32_t>(mTable[i].value);
-            if (mTable[i].value != 0 && ((mask & value) == value)) {
-                if (!first_flag) str += del;
-                first_flag = false;
-                str += mTable[i].literal;
+        for (size_t bit = 0; bit < sizeof(uint32_t) * 8; ++bit) {
+            uint32_t flag = 1u << bit;
+            if ((flag & mask) == flag) {
+                std::string flag_str;
+                if (toString(static_cast<typename Traits::Type>(flag), flag_str)) {
+                    if (!first_flag) str += del;
+                    first_flag = false;
+                    str += flag_str;
+                }
             }
         }
     } else {
@@ -200,6 +216,7 @@
     }
 }
 
+typedef TypeConverter<DeviceTraits> DeviceConverter;
 typedef TypeConverter<OutputDeviceTraits> OutputDeviceConverter;
 typedef TypeConverter<InputDeviceTraits> InputDeviceConverter;
 typedef TypeConverter<OutputFlagTraits> OutputFlagConverter;
@@ -216,23 +233,227 @@
 typedef TypeConverter<SourceTraits> SourceTypeConverter;
 typedef TypeConverter<AudioFlagTraits> AudioFlagConverter;
 
-template<> const OutputDeviceConverter::Table OutputDeviceConverter::mTable[];
-template<> const InputDeviceConverter::Table InputDeviceConverter::mTable[];
-template<> const OutputFlagConverter::Table OutputFlagConverter::mTable[];
-template<> const InputFlagConverter::Table InputFlagConverter::mTable[];
-template<> const FormatConverter::Table FormatConverter::mTable[];
-template<> const OutputChannelConverter::Table OutputChannelConverter::mTable[];
-template<> const InputChannelConverter::Table InputChannelConverter::mTable[];
-template<> const ChannelIndexConverter::Table ChannelIndexConverter::mTable[];
-template<> const GainModeConverter::Table GainModeConverter::mTable[];
-template<> const StreamTypeConverter::Table StreamTypeConverter::mTable[];
 template<> const AudioModeConverter::Table AudioModeConverter::mTable[];
-template<> const AudioContentTypeConverter::Table AudioContentTypeConverter::mTable[];
-template<> const UsageTypeConverter::Table UsageTypeConverter::mTable[];
-template<> const SourceTypeConverter::Table SourceTypeConverter::mTable[];
 template<> const AudioFlagConverter::Table AudioFlagConverter::mTable[];
 
-bool deviceFromString(const std::string& literalDevice, audio_devices_t& device);
+template <>
+inline bool TypeConverter<DeviceTraits>::toStringImpl(
+        const DeviceTraits::Type &value, std::string &str) {
+    str = audio_device_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<DeviceTraits>::fromStringImpl(
+        const std::string &str, DeviceTraits::Type &result) {
+    return audio_device_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<OutputDeviceTraits>::toStringImpl(
+        const OutputDeviceTraits::Type &value, std::string &str) {
+    if (audio_is_output_device(value)) {
+        str = audio_device_to_string(value);
+        return !str.empty();
+    }
+    return false;
+}
+
+template <>
+inline bool TypeConverter<OutputDeviceTraits>::fromStringImpl(
+        const std::string &str, OutputDeviceTraits::Type &result) {
+    OutputDeviceTraits::Type temp;
+    if (audio_device_from_string(str.c_str(), &temp) &&
+            audio_is_output_device(temp)) {
+        result = temp;
+        return true;
+    }
+    return false;
+}
+
+template <>
+inline bool TypeConverter<InputDeviceTraits>::toStringImpl(
+        const InputDeviceTraits::Type &value, std::string &str) {
+    if (audio_is_input_device(value)) {
+        str = audio_device_to_string(value);
+        return !str.empty();
+    }
+    return false;
+}
+
+template <>
+inline bool TypeConverter<InputDeviceTraits>::fromStringImpl(
+        const std::string &str, InputDeviceTraits::Type &result) {
+    InputDeviceTraits::Type temp;
+    if (audio_device_from_string(str.c_str(), &temp) &&
+            audio_is_input_device(temp)) {
+        result = temp;
+        return true;
+    }
+    return false;
+}
+
+template <>
+inline bool TypeConverter<InputFlagTraits>::toStringImpl(
+        const audio_input_flags_t &value, std::string &str) {
+    str = audio_input_flag_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<InputFlagTraits>::fromStringImpl(
+        const std::string &str, audio_input_flags_t &result) {
+    return audio_input_flag_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<OutputFlagTraits>::toStringImpl(
+        const audio_output_flags_t &value, std::string &str) {
+    str = audio_output_flag_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<OutputFlagTraits>::fromStringImpl(
+        const std::string &str, audio_output_flags_t &result) {
+    return audio_output_flag_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<FormatTraits>::toStringImpl(
+        const audio_format_t &value, std::string &str) {
+    str = audio_format_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<FormatTraits>::fromStringImpl(
+        const std::string &str, audio_format_t &result) {
+    return audio_format_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<OutputChannelTraits>::toStringImpl(
+        const audio_channel_mask_t &value, std::string &str) {
+    str = audio_channel_out_mask_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<OutputChannelTraits>::fromStringImpl(
+        const std::string &str, audio_channel_mask_t &result) {
+    OutputChannelTraits::Type temp;
+    if (audio_channel_mask_from_string(str.c_str(), &temp) &&
+            audio_is_output_channel(temp)) {
+        result = temp;
+        return true;
+    }
+    return false;
+}
+
+template <>
+inline bool TypeConverter<InputChannelTraits>::toStringImpl(
+        const audio_channel_mask_t &value, std::string &str) {
+    str = audio_channel_in_mask_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<InputChannelTraits>::fromStringImpl(
+        const std::string &str, audio_channel_mask_t &result) {
+    InputChannelTraits::Type temp;
+    if (audio_channel_mask_from_string(str.c_str(), &temp) &&
+            audio_is_input_channel(temp)) {
+        result = temp;
+        return true;
+    }
+    return false;
+}
+
+template <>
+inline bool TypeConverter<ChannelIndexTraits>::toStringImpl(
+        const audio_channel_mask_t &value, std::string &str) {
+    str = audio_channel_index_mask_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<ChannelIndexTraits>::fromStringImpl(
+        const std::string &str, audio_channel_mask_t &result) {
+    ChannelIndexTraits::Type temp;
+    if (audio_channel_mask_from_string(str.c_str(), &temp) &&
+            audio_channel_mask_get_representation(temp) == AUDIO_CHANNEL_REPRESENTATION_INDEX) {
+        result = temp;
+        return true;
+    }
+    return false;
+}
+
+template <>
+inline bool TypeConverter<StreamTraits>::toStringImpl(
+        const audio_stream_type_t &value, std::string &str) {
+    str = audio_stream_type_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<StreamTraits>::fromStringImpl(
+        const std::string &str, audio_stream_type_t &result)
+{
+    return audio_stream_type_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<GainModeTraits>::toStringImpl(
+        const audio_gain_mode_t &value, std::string &str) {
+    str = audio_gain_mode_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<GainModeTraits>::fromStringImpl(
+        const std::string &str, audio_gain_mode_t &result) {
+    return audio_gain_mode_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<AudioContentTraits>::toStringImpl(
+        const audio_content_type_t &value, std::string &str) {
+    str = audio_content_type_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<AudioContentTraits>::fromStringImpl(
+        const std::string &str, audio_content_type_t &result) {
+    return audio_content_type_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<UsageTraits>::toStringImpl(const audio_usage_t &value, std::string &str)
+{
+    str = audio_usage_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<UsageTraits>::fromStringImpl(
+        const std::string &str, audio_usage_t &result) {
+    return audio_usage_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<SourceTraits>::toStringImpl(const audio_source_t &value, std::string &str)
+{
+    str = audio_source_to_string(value);
+    return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<SourceTraits>::fromStringImpl(
+        const std::string &str, audio_source_t &result) {
+    return audio_source_from_string(str.c_str(), &result);
+}
 
 SampleRateTraits::Collection samplingRatesFromString(
         const std::string &samplingRates, const char *del = AudioParameter::valueListSeparator);
@@ -256,6 +477,7 @@
 
 // counting enumerations
 template <typename T, std::enable_if_t<std::is_same<T, audio_content_type_t>::value
+                                    || std::is_same<T, audio_devices_t>::value
                                     || std::is_same<T, audio_mode_t>::value
                                     || std::is_same<T, audio_source_t>::value
                                     || std::is_same<T, audio_stream_type_t>::value
@@ -282,17 +504,6 @@
     return result;
 }
 
-static inline std::string toString(const audio_devices_t& devices)
-{
-    std::string result;
-    if ((devices & AUDIO_DEVICE_BIT_IN) != 0) {
-        InputDeviceConverter::maskToString(devices, result);
-    } else {
-        OutputDeviceConverter::maskToString(devices, result);
-    }
-    return result;
-}
-
 static inline std::string toString(const audio_attributes_t& attributes)
 {
     std::ostringstream result;
diff --git a/media/libmediahelper/tests/Android.bp b/media/libmediahelper/tests/Android.bp
new file mode 100644
index 0000000..a5f2819
--- /dev/null
+++ b/media/libmediahelper/tests/Android.bp
@@ -0,0 +1,31 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_test {
+    name: "libmedia_helper_tests",
+
+    generated_headers: ["audio_policy_configuration_V7_0"],
+    generated_sources: ["audio_policy_configuration_V7_0"],
+    header_libs: ["libxsdc-utils"],
+    shared_libs: [
+        "libbase",
+        "liblog",
+        "libmedia_helper",
+        "libxml2",
+    ],
+
+    srcs: ["typeconverter_tests.cpp"],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    test_suites: ["device-tests"],
+}
diff --git a/media/libmediahelper/tests/typeconverter_tests.cpp b/media/libmediahelper/tests/typeconverter_tests.cpp
new file mode 100644
index 0000000..181d636
--- /dev/null
+++ b/media/libmediahelper/tests/typeconverter_tests.cpp
@@ -0,0 +1,232 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#define LOG_TAG "TypeConverter_Test"
+#include <log/log.h>
+
+#include <android_audio_policy_configuration_V7_0.h>
+#include <media/TypeConverter.h>
+#include <system/audio.h>
+#include <xsdc/XsdcSupport.h>
+
+using namespace android;
+namespace xsd {
+using namespace android::audio::policy::configuration::V7_0;
+}
+
+TEST(TypeConverter, ParseChannelMasks) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioChannelMask>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_channel_mask_t channelMask = channelMaskFromString(stringVal);
+        EXPECT_EQ(enumVal != xsd::AudioChannelMask::AUDIO_CHANNEL_NONE,
+                audio_channel_mask_is_valid(channelMask))
+                << "Validity of \"" << stringVal << "\" is not as expected";
+    }
+}
+
+TEST(TypeConverter, ParseInputOutputIndexChannelMask) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioChannelMask>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_channel_mask_t channelMask, channelMaskBack;
+        std::string stringValBack;
+        if (stringVal.find("_CHANNEL_IN_") != std::string::npos) {
+            EXPECT_TRUE(InputChannelConverter::fromString(stringVal, channelMask))
+                    << "Conversion of \"" << stringVal << "\" failed (as input channel mask)";
+            EXPECT_TRUE(InputChannelConverter::toString(channelMask, stringValBack))
+                    << "Conversion of input channel mask " << channelMask << " failed";
+            // Due to aliased values, the result of 'toString' might not be the same
+            // as 'stringVal', thus we need to compare the results of parsing instead.
+            EXPECT_TRUE(InputChannelConverter::fromString(stringValBack, channelMaskBack))
+                    << "Conversion of \"" << stringValBack << "\" failed (as input channel mask)";
+            EXPECT_EQ(channelMask, channelMaskBack);
+        } else if (stringVal.find("_CHANNEL_OUT_") != std::string::npos) {
+            EXPECT_TRUE(OutputChannelConverter::fromString(stringVal, channelMask))
+                    << "Conversion of \"" << stringVal << "\" failed (as output channel mask)";
+            EXPECT_TRUE(OutputChannelConverter::toString(channelMask, stringValBack))
+                    << "Conversion of output channel mask " << channelMask << " failed";
+            EXPECT_TRUE(OutputChannelConverter::fromString(stringValBack, channelMaskBack))
+                    << "Conversion of \"" << stringValBack << "\" failed (as output channel mask)";
+            EXPECT_EQ(channelMask, channelMaskBack);
+        } else if (stringVal.find("_CHANNEL_INDEX_") != std::string::npos) {
+            EXPECT_TRUE(ChannelIndexConverter::fromString(stringVal, channelMask))
+                    << "Conversion of \"" << stringVal << "\" failed (as indexed channel mask)";
+            EXPECT_TRUE(ChannelIndexConverter::toString(channelMask, stringValBack))
+                    << "Conversion of indexed channel mask " << channelMask << " failed";
+            EXPECT_EQ(stringVal, stringValBack);
+        } else if (stringVal == toString(xsd::AudioChannelMask::AUDIO_CHANNEL_NONE)) {
+            EXPECT_FALSE(InputChannelConverter::fromString(stringVal, channelMask))
+                    << "Conversion of \"" << stringVal << "\" succeeded (as input channel mask)";
+            EXPECT_FALSE(OutputChannelConverter::fromString(stringVal, channelMask))
+                    << "Conversion of \"" << stringVal << "\" succeeded (as output channel mask)";
+            EXPECT_FALSE(ChannelIndexConverter::fromString(stringVal, channelMask))
+                    << "Conversion of \"" << stringVal << "\" succeeded (as index channel mask)";
+            // None of Converters could parse this because 'NONE' isn't a 'valid' channel mask.
+            channelMask = AUDIO_CHANNEL_NONE;
+            // However they all must succeed in converting it back.
+            EXPECT_TRUE(InputChannelConverter::toString(channelMask, stringValBack))
+                    << "Conversion of input channel mask " << channelMask << " failed";
+            EXPECT_EQ(stringVal, stringValBack);
+            EXPECT_TRUE(OutputChannelConverter::toString(channelMask, stringValBack))
+                    << "Conversion of output channel mask " << channelMask << " failed";
+            EXPECT_EQ(stringVal, stringValBack);
+            EXPECT_TRUE(ChannelIndexConverter::toString(channelMask, stringValBack))
+                    << "Conversion of indexed channel mask " << channelMask << " failed";
+            EXPECT_EQ(stringVal, stringValBack);
+        } else {
+            FAIL() << "Unrecognized channel mask \"" << stringVal << "\"";
+        }
+    }
+}
+
+TEST(TypeConverter, ParseContentTypes) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioContentType>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_content_type_t contentType;
+        EXPECT_TRUE(AudioContentTypeConverter::fromString(stringVal, contentType))
+                << "Conversion of \"" << stringVal << "\" failed";
+        EXPECT_EQ(stringVal, toString(contentType));
+    }
+}
+
+TEST(TypeConverter, ParseDevices) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioDevice>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_devices_t device, deviceBack;
+        std::string stringValBack;
+        EXPECT_TRUE(DeviceConverter::fromString(stringVal, device))
+                << "Conversion of \"" << stringVal << "\" failed";
+        if (enumVal != xsd::AudioDevice::AUDIO_DEVICE_NONE) {
+            EXPECT_TRUE(audio_is_input_device(device) || audio_is_output_device(device))
+                    << "Device \"" << stringVal << "\" is neither input, nor output device";
+        } else {
+            EXPECT_FALSE(audio_is_input_device(device));
+            EXPECT_FALSE(audio_is_output_device(device));
+        }
+        // Due to aliased values, the result of 'toString' might not be the same
+        // as 'stringVal', thus we need to compare the results of parsing instead.
+        stringValBack = toString(device);
+        EXPECT_TRUE(DeviceConverter::fromString(stringValBack, deviceBack))
+                << "Conversion of \"" << stringValBack << "\" failed";
+        EXPECT_EQ(device, deviceBack);
+    }
+}
+
+TEST(TypeConverter, ParseInOutDevices) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioDevice>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_devices_t device, deviceBack;
+        std::string stringValBack;
+        if (stringVal.find("_DEVICE_IN_") != std::string::npos) {
+            EXPECT_TRUE(InputDeviceConverter::fromString(stringVal, device))
+                    << "Conversion of \"" << stringVal << "\" failed (as input device)";
+            // Due to aliased values, the result of 'toString' might not be the same
+            // as 'stringVal', thus we need to compare the results of parsing instead.
+            stringValBack = toString(device);
+            EXPECT_TRUE(InputDeviceConverter::fromString(stringValBack, deviceBack))
+                    << "Conversion of \"" << stringValBack << "\" failed";
+            EXPECT_EQ(device, deviceBack);
+        } else if (stringVal.find("_DEVICE_OUT_") != std::string::npos) {
+            EXPECT_TRUE(OutputDeviceConverter::fromString(stringVal, device))
+                    << "Conversion of \"" << stringVal << "\" failed (as output device)";
+            stringValBack = toString(device);
+            EXPECT_TRUE(OutputDeviceConverter::fromString(stringValBack, deviceBack))
+                    << "Conversion of \"" << stringValBack << "\" failed";
+            EXPECT_EQ(device, deviceBack);
+        } else if (stringVal == toString(xsd::AudioDevice::AUDIO_DEVICE_NONE)) {
+            EXPECT_FALSE(InputDeviceConverter::fromString(stringVal, device))
+                    << "Conversion of \"" << stringVal << "\" succeeded (as input device)";
+            EXPECT_FALSE(OutputDeviceConverter::fromString(stringVal, device))
+                    << "Conversion of \"" << stringVal << "\" succeeded (as output device)";
+            EXPECT_EQ(stringVal, toString(device));
+        } else {
+            FAIL() << "Unrecognized audio device \"" << stringVal << "\"";
+        }
+    }
+}
+
+TEST(TypeConverter, ParseInOutFlags) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioInOutFlag>{}) {
+        const std::string stringVal = toString(enumVal);
+        if (stringVal.find("_INPUT_FLAG_") != std::string::npos) {
+            audio_input_flags_t flag;
+            EXPECT_TRUE(InputFlagConverter::fromString(stringVal, flag))
+                    << "Conversion of \"" << stringVal << "\" failed (as input flag)";
+            EXPECT_EQ(stringVal, toString(flag));
+        } else {
+            audio_output_flags_t flag;
+            EXPECT_TRUE(OutputFlagConverter::fromString(stringVal, flag))
+                    << "Conversion of \"" << stringVal << "\" failed (as output flag)";
+            EXPECT_EQ(stringVal, toString(flag));
+        }
+    }
+}
+
+TEST(TypeConverter, ParseFormats) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioFormat>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_format_t format;
+        EXPECT_TRUE(FormatConverter::fromString(stringVal, format))
+                << "Conversion of \"" << stringVal << "\" failed";
+        EXPECT_EQ(enumVal != xsd::AudioFormat::AUDIO_FORMAT_DEFAULT,
+                audio_is_valid_format(format))
+                << "Validity of \"" << stringVal << "\" is not as expected";
+        EXPECT_EQ(stringVal, toString(format));
+    }
+}
+
+TEST(TypeConverter, ParseGainModes) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioGainMode>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_gain_mode_t gainMode;
+        EXPECT_TRUE(GainModeConverter::fromString(stringVal, gainMode))
+                << "Conversion of \"" << stringVal << "\" failed";
+        EXPECT_EQ(stringVal, toString(gainMode));
+    }
+}
+
+TEST(TypeConverter, ParseSources) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioSource>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_source_t source;
+        EXPECT_TRUE(SourceTypeConverter::fromString(stringVal, source))
+                << "Conversion of \"" << stringVal << "\" failed";
+        EXPECT_EQ(source != AUDIO_SOURCE_DEFAULT, audio_is_valid_audio_source(source))
+                << "Validity of \"" << stringVal << "\" is not as expected";
+        EXPECT_EQ(stringVal, toString(source));
+    }
+}
+
+TEST(TypeConverter, ParseStreamTypes) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioStreamType>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_stream_type_t streamType;
+        EXPECT_TRUE(StreamTypeConverter::fromString(stringVal, streamType))
+                << "Conversion of \"" << stringVal << "\" failed";
+        EXPECT_EQ(stringVal, toString(streamType));
+    }
+}
+
+TEST(TypeConverter, ParseUsages) {
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioUsage>{}) {
+        const std::string stringVal = toString(enumVal);
+        audio_usage_t usage;
+        EXPECT_TRUE(UsageTypeConverter::fromString(stringVal, usage))
+                << "Conversion of \"" << stringVal << "\" failed";
+        EXPECT_EQ(stringVal, toString(usage));
+    }
+}
diff --git a/media/libmediametrics/Android.bp b/media/libmediametrics/Android.bp
index a63b8b4..d758391 100644
--- a/media/libmediametrics/Android.bp
+++ b/media/libmediametrics/Android.bp
@@ -1,13 +1,21 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_headers {
     name: "libmediametrics_headers",
     export_include_dirs: ["include"],
 }
 
-cc_library_shared {
+cc_library {
     name: "libmediametrics",
 
     srcs: [
-        "IMediaMetricsService.cpp",
         "MediaMetricsItem.cpp",
         "MediaMetrics.cpp",
     ],
@@ -17,6 +25,7 @@
         "libcutils",
         "liblog",
         "libutils",
+        "mediametricsservice-aidl-cpp",
     ],
 
     export_include_dirs: ["include"],
@@ -58,3 +67,21 @@
         "//frameworks/base/media/jni",
     ],
 }
+
+aidl_interface {
+    name: "mediametricsservice-aidl",
+    unstable: true,
+    local_include_dir: "aidl",
+    vendor_available: true,
+    srcs: [
+        "aidl/android/media/IMediaMetricsService.aidl",
+    ],
+    double_loadable: true,
+    backend: {
+        cpp: {
+            apex_available: [
+                "//apex_available:platform",
+            ],
+        },
+    },
+}
diff --git a/media/libmediametrics/MediaMetricsItem.cpp b/media/libmediametrics/MediaMetricsItem.cpp
index 7cdbe5f..d597a4d 100644
--- a/media/libmediametrics/MediaMetricsItem.cpp
+++ b/media/libmediametrics/MediaMetricsItem.cpp
@@ -31,8 +31,9 @@
 #include <utils/SortedVector.h>
 #include <utils/threads.h>
 
+#include <android/media/BnMediaMetricsService.h> // for direct Binder access
+#include <android/media/IMediaMetricsService.h>
 #include <binder/IServiceManager.h>
-#include <media/IMediaMetricsService.h>
 #include <media/MediaMetricsItem.h>
 #include <private/android_filesystem_config.h>
 
@@ -278,17 +279,19 @@
 // calls the appropriate daemon
 bool mediametrics::Item::selfrecord() {
     ALOGD_IF(DEBUG_API, "%s: delivering %s", __func__, this->toString().c_str());
-    sp<IMediaMetricsService> svc = getService();
-    if (svc != NULL) {
-        status_t status = svc->submit(this);
-        if (status != NO_ERROR) {
-            ALOGW("%s: failed to record: %s", __func__, this->toString().c_str());
-            return false;
-        }
-        return true;
-    } else {
+
+    char *str;
+    size_t size;
+    status_t status = writeToByteString(&str, &size);
+    if (status == NO_ERROR) {
+        status = submitBuffer(str, size);
+        free(str);
+    }
+    if (status != NO_ERROR) {
+        ALOGW("%s: failed to record: %s", __func__, this->toString().c_str());
         return false;
     }
+    return true;
 }
 
 //static
@@ -305,6 +308,17 @@
     switch (uid) {
     case AID_RADIO:     // telephony subsystem, RIL
         return false;
+    default:
+        // Some isolated processes can access the audio system; see
+        // AudioSystem::setAudioFlingerBinder (currently only the HotwordDetectionService). Instead
+        // of also allowing access to the MediaMetrics service, it's simpler to just disable it for
+        // now.
+        // TODO(b/190151205): Either allow the HotwordDetectionService to access MediaMetrics or
+        // make this disabling specific to that process.
+        if (uid >= AID_ISOLATED_START && uid <= AID_ISOLATED_END) {
+            return false;
+        }
+        break;
     }
 
     int enabled = property_get_int32(Item::EnabledProperty, -1);
@@ -327,7 +341,7 @@
 
 static sp<MediaMetricsDeathNotifier> sNotifier;
 // static
-sp<IMediaMetricsService> BaseItem::sMediaMetricsService;
+sp<media::IMediaMetricsService> BaseItem::sMediaMetricsService;
 static std::mutex sServiceMutex;
 static int sRemainingBindAttempts = SVC_TRIES;
 
@@ -339,29 +353,67 @@
 }
 
 // static
-bool BaseItem::submitBuffer(const char *buffer, size_t size) {
-/*
-    mediametrics::Item item;
-    status_t status = item.readFromByteString(buffer, size);
-    ALOGD("%s: status:%d, size:%zu, item:%s", __func__, status, size, item.toString().c_str());
-    return item.selfrecord();
-    */
-
+status_t BaseItem::submitBuffer(const char *buffer, size_t size) {
     ALOGD_IF(DEBUG_API, "%s: delivering %zu bytes", __func__, size);
-    sp<IMediaMetricsService> svc = getService();
-    if (svc != nullptr) {
-        const status_t status = svc->submitBuffer(buffer, size);
-        if (status != NO_ERROR) {
-            ALOGW("%s: failed(%d) to record: %zu bytes", __func__, status, size);
-            return false;
-        }
-        return true;
+
+    // Validate size
+    if (size > std::numeric_limits<int32_t>::max()) return BAD_VALUE;
+
+    // Do we have the service available?
+    sp<media::IMediaMetricsService> svc = getService();
+    if (svc == nullptr)  return NO_INIT;
+
+    ::android::status_t status = NO_ERROR;
+    if constexpr (/* DISABLES CODE */ (false)) {
+        // THIS PATH IS FOR REFERENCE ONLY.
+        // It is compiled so that any changes to IMediaMetricsService::submitBuffer()
+        // will lead here.  If this code is changed, the else branch must
+        // be changed as well.
+        //
+        // Use the AIDL calling interface - this is a bit slower as a byte vector must be
+        // constructed. As the call is one-way, the only a transaction error occurs.
+        status = svc->submitBuffer({buffer, buffer + size}).transactionError();
+    } else {
+        // Use the Binder calling interface - this direct implementation avoids
+        // malloc/copy/free for the vector and reduces the overhead for logging.
+        // We based this off of the AIDL generated file:
+        // out/soong/.intermediates/frameworks/av/media/libmediametrics/mediametricsservice-aidl-unstable-cpp-source/gen/android/media/IMediaMetricsService.cpp
+        // TODO: Create an AIDL C++ back end optimized form of vector writing.
+        ::android::Parcel _aidl_data;
+        ::android::Parcel _aidl_reply; // we don't care about this as it is one-way.
+
+        status = _aidl_data.writeInterfaceToken(svc->getInterfaceDescriptor());
+        if (status != ::android::OK) goto _aidl_error;
+
+        status = _aidl_data.writeInt32(static_cast<int32_t>(size));
+        if (status != ::android::OK) goto _aidl_error;
+
+        status = _aidl_data.write(buffer, static_cast<int32_t>(size));
+        if (status != ::android::OK) goto _aidl_error;
+
+        status = ::android::IInterface::asBinder(svc)->transact(
+                ::android::media::BnMediaMetricsService::TRANSACTION_submitBuffer,
+                _aidl_data, &_aidl_reply, ::android::IBinder::FLAG_ONEWAY);
+
+        // AIDL permits setting a default implementation for additional functionality.
+        // See go/aog/713984. This is not used here.
+        // if (status == ::android::UNKNOWN_TRANSACTION
+        //         && ::android::media::IMediaMetricsService::getDefaultImpl()) {
+        //     status = ::android::media::IMediaMetricsService::getDefaultImpl()
+        //             ->submitBuffer(immutableByteVectorFromBuffer(buffer, size))
+        //             .transactionError();
+        // }
     }
-    return false;
+
+    if (status == NO_ERROR) return NO_ERROR;
+
+    _aidl_error:
+    ALOGW("%s: failed(%d) to record: %zu bytes", __func__, status, size);
+    return status;
 }
 
 //static
-sp<IMediaMetricsService> BaseItem::getService() {
+sp<media::IMediaMetricsService> BaseItem::getService() {
     static const char *servicename = "media.metrics";
     static const bool enabled = isEnabled(); // singleton initialized
 
@@ -379,7 +431,7 @@
         if (sm != nullptr) {
             sp<IBinder> binder = sm->getService(String16(servicename));
             if (binder != nullptr) {
-                sMediaMetricsService = interface_cast<IMediaMetricsService>(binder);
+                sMediaMetricsService = interface_cast<media::IMediaMetricsService>(binder);
                 sNotifier = new MediaMetricsDeathNotifier();
                 binder->linkToDeath(sNotifier);
             } else {
diff --git a/media/libmediametrics/aidl/android/media/IMediaMetricsService.aidl b/media/libmediametrics/aidl/android/media/IMediaMetricsService.aidl
new file mode 100644
index 0000000..b14962d
--- /dev/null
+++ b/media/libmediametrics/aidl/android/media/IMediaMetricsService.aidl
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * MediaMetrics service interface
+ *
+ * {@hide}
+ */
+interface IMediaMetricsService {
+    oneway void submitBuffer(in byte[] buffer);
+}
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index 84388c9..a09a673 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -123,14 +123,17 @@
 
 #define AMEDIAMETRICS_PROP_FRAMECOUNT     "frameCount"     // int32
 #define AMEDIAMETRICS_PROP_INPUTDEVICES   "inputDevices"   // string value
+#define AMEDIAMETRICS_PROP_INTERNALTRACKID "internalTrackId" // int32
 #define AMEDIAMETRICS_PROP_INTERVALCOUNT  "intervalCount"  // int32
 #define AMEDIAMETRICS_PROP_LATENCYMS      "latencyMs"      // double value
+#define AMEDIAMETRICS_PROP_LOGSESSIONID   "logSessionId"   // hex string, "" none
 #define AMEDIAMETRICS_PROP_NAME           "name"           // string value
 #define AMEDIAMETRICS_PROP_ORIGINALFLAGS  "originalFlags"  // int32
 #define AMEDIAMETRICS_PROP_OUTPUTDEVICES  "outputDevices"  // string value
 #define AMEDIAMETRICS_PROP_PERFORMANCEMODE "performanceMode"    // string value, "none", lowLatency"
 #define AMEDIAMETRICS_PROP_PLAYBACK_PITCH "playback.pitch" // double value (AudioTrack)
 #define AMEDIAMETRICS_PROP_PLAYBACK_SPEED "playback.speed" // double value (AudioTrack)
+#define AMEDIAMETRICS_PROP_PLAYERIID      "playerIId"      // int32 (-1 invalid/unset IID)
 #define AMEDIAMETRICS_PROP_ROUTEDDEVICEID "routedDeviceId" // int32
 #define AMEDIAMETRICS_PROP_SAMPLERATE     "sampleRate"     // int32
 #define AMEDIAMETRICS_PROP_SELECTEDDEVICEID "selectedDeviceId" // int32
@@ -139,6 +142,7 @@
 #define AMEDIAMETRICS_PROP_SESSIONID      "sessionId"      // int32
 #define AMEDIAMETRICS_PROP_SHARINGMODE    "sharingMode"    // string value, "exclusive", shared"
 #define AMEDIAMETRICS_PROP_SOURCE         "source"         // string (AudioAttributes)
+#define AMEDIAMETRICS_PROP_STARTTHRESHOLDFRAMES "startThresholdFrames" // int32 (AudioTrack)
 #define AMEDIAMETRICS_PROP_STARTUPMS      "startupMs"      // double value
 // State is "ACTIVE" or "STOPPED" for AudioRecord
 #define AMEDIAMETRICS_PROP_STATE          "state"          // string
@@ -156,6 +160,14 @@
 #define AMEDIAMETRICS_PROP_VOLUME_LEFT    "volume.left"    // double (AudioTrack)
 #define AMEDIAMETRICS_PROP_VOLUME_RIGHT   "volume.right"   // double (AudioTrack)
 #define AMEDIAMETRICS_PROP_WHERE          "where"          // string value
+// EncodingClient is the encoding format requested by the client
+#define AMEDIAMETRICS_PROP_ENCODINGCLIENT "encodingClient" // string
+// PerformanceModeActual is the actual selected performance mode, could be "none', "lowLatency" or
+// "powerSaving"
+#define AMEDIAMETRICS_PROP_PERFORMANCEMODEACTUAL "performanceModeActual" // string
+#define AMEDIAMETRICS_PROP_FRAMESTRANSFERRED "framesTransferred" // int64_t, transferred frames
+// string value, "exclusive", "shared". the actual selected sharing mode by the server
+#define AMEDIAMETRICS_PROP_SHARINGMODEACTUAL "sharingModeActual"
 
 // Timing values: millisecond values are suffixed with MS and the type is double
 // nanosecond values are suffixed with NS and the type is int64.
@@ -170,6 +182,7 @@
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR       "ctor"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT "disconnect"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_DTOR       "dtor"
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAAUDIOSTREAM "endAAudioStream" // AAudioStream
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAUDIOINTERVALGROUP "endAudioIntervalGroup"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_FLUSH      "flush"  // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_INVALIDATE "invalidate" // server track, record
@@ -180,7 +193,10 @@
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_RESTORE    "restore"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETMODE    "setMode" // AudioFlinger
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETBUFFERSIZE    "setBufferSize" // AudioTrack
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETLOGSESSIONID  "setLogSessionId" // AudioTrack, Record
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYBACKPARAM "setPlaybackParam" // AudioTrack
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYERIID "setPlayerIId" // AudioTrack
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETSTARTTHRESHOLD "setStartThreshold" // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOICEVOLUME   "setVoiceVolume" // AudioFlinger
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOLUME  "setVolume"  // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_START      "start"  // AudioTrack, AudioRecord
diff --git a/media/libmediametrics/include/media/IMediaMetricsService.h b/media/libmediametrics/include/media/IMediaMetricsService.h
deleted file mode 100644
index d6871ec..0000000
--- a/media/libmediametrics/include/media/IMediaMetricsService.h
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_IMEDIAANALYTICSSERVICE_H
-#define ANDROID_IMEDIAANALYTICSSERVICE_H
-
-#include <utils/String8.h>
-#include <binder/IInterface.h>
-#include <binder/Parcel.h>
-
-#include <sys/types.h>
-#include <utils/Errors.h>
-#include <utils/Log.h>
-#include <utils/RefBase.h>
-#include <utils/List.h>
-
-#include <binder/IServiceManager.h>
-
-#include <media/MediaMetricsItem.h>
-
-namespace android {
-
-class IMediaMetricsService: public IInterface
-{
-public:
-    DECLARE_META_INTERFACE(MediaMetricsService);
-
-    /**
-     * Submits the indicated record to the mediaanalytics service, where
-     * it will be merged (if appropriate) with incomplete records that
-     * share the same key and sessionID.
-     *
-     * \param item the item to submit.
-     * \return status which is negative if an error is detected (some errors
-               may be silent and return 0 - success).
-     */
-    virtual status_t submit(mediametrics::Item *item) = 0;
-
-    virtual status_t submitBuffer(const char *buffer, size_t length) = 0;
-};
-
-// ----------------------------------------------------------------------------
-
-class BnMediaMetricsService: public BnInterface<IMediaMetricsService>
-{
-public:
-    status_t onTransact(uint32_t code,
-                        const Parcel& data,
-                        Parcel* reply,
-                        uint32_t flags = 0) override;
-
-protected:
-    // Internal call where release is true if the service is to delete the item.
-    virtual status_t submitInternal(
-            mediametrics::Item *item, bool release) = 0;
-};
-
-}; // namespace android
-
-#endif // ANDROID_IMEDIASTATISTICSSERVICE_H
diff --git a/media/libmediametrics/include/media/MediaMetricsItem.h b/media/libmediametrics/include/media/MediaMetricsItem.h
index 303343f..428992c 100644
--- a/media/libmediametrics/include/media/MediaMetricsItem.h
+++ b/media/libmediametrics/include/media/MediaMetricsItem.h
@@ -32,7 +32,8 @@
 
 namespace android {
 
-class IMediaMetricsService;
+namespace media { class IMediaMetricsService; }
+
 class Parcel;
 
 /*
@@ -239,7 +240,10 @@
 public:
     // are we collecting metrics data
     static bool isEnabled();
-    static sp<IMediaMetricsService> getService();
+    // returns the MediaMetrics service if active.
+    static sp<media::IMediaMetricsService> getService();
+    // submits a raw buffer directly to the MediaMetrics service - this is highly optimized.
+    static status_t submitBuffer(const char *buffer, size_t len);
 
 protected:
     static constexpr const char * const EnabledProperty = "media.metrics.enabled";
@@ -247,10 +251,9 @@
     static const int EnabledProperty_default = 1;
 
     // let's reuse a binder connection
-    static sp<IMediaMetricsService> sMediaMetricsService;
+    static sp<media::IMediaMetricsService> sMediaMetricsService;
 
     static void dropInstance();
-    static bool submitBuffer(const char *buffer, size_t len);
 
     template <typename T>
     struct is_item_type {
@@ -573,7 +576,7 @@
 
     bool record() {
         return updateHeader()
-                && BaseItem::submitBuffer(getBuffer(), getLength());
+                && BaseItem::submitBuffer(getBuffer(), getLength()) == OK;
     }
 
     bool isValid () const {
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index 5301f5c..f55678d 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -1,4 +1,23 @@
-cc_library_shared {
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libmediaplayerservice_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libmediaplayerservice_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library {
 
     srcs: [
         "ActivityManager.cpp",
@@ -15,7 +34,12 @@
     shared_libs: [
         "android.hardware.media.c2@1.0",
         "android.hardware.media.omx@1.0",
+        "av-types-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
         "libbase",
+        "libactivitymanager_aidl",
+        "libandroid_net",
         "libaudioclient",
         "libbinder",
         "libcamera_client",
@@ -52,9 +76,13 @@
         "libstagefright_nuplayer",
         "libstagefright_rtsp",
         "libstagefright_timedtext",
+        "framework-permission-aidl-cpp",
     ],
 
-    export_shared_lib_headers: ["libmedia"],
+    export_shared_lib_headers: [
+        "libmedia",
+        "framework-permission-aidl-cpp",
+    ],
 
     include_dirs: [
         "frameworks/av/media/libstagefright/rtsp",
@@ -76,4 +104,3 @@
     },
 
 }
-
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index c0da0ce..d278a01 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -49,6 +49,7 @@
 
 #include <codec2/hidl/client.h>
 #include <datasource/HTTPBase.h>
+#include <media/AidlConversion.h>
 #include <media/IMediaHTTPService.h>
 #include <media/IRemoteDisplay.h>
 #include <media/IRemoteDisplayClient.h>
@@ -94,6 +95,7 @@
 using android::NOT_ENOUGH_DATA;
 using android::Parcel;
 using android::media::VolumeShaper;
+using android::content::AttributionSourceState;
 
 // Max number of entries in the filter.
 const int kMaxFilterSize = 64;  // I pulled that out of thin air.
@@ -453,14 +455,22 @@
     ALOGV("MediaPlayerService destroyed");
 }
 
-sp<IMediaRecorder> MediaPlayerService::createMediaRecorder(const String16 &opPackageName)
+sp<IMediaRecorder> MediaPlayerService::createMediaRecorder(
+        const AttributionSourceState& attributionSource)
 {
-    pid_t pid = IPCThreadState::self()->getCallingPid();
-    sp<MediaRecorderClient> recorder = new MediaRecorderClient(this, pid, opPackageName);
+    // TODO b/182392769: use attribution source util
+    AttributionSourceState verifiedAttributionSource = attributionSource;
+    verifiedAttributionSource.uid = VALUE_OR_FATAL(
+      legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid()));
+    verifiedAttributionSource.pid = VALUE_OR_FATAL(
+        legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
+    sp<MediaRecorderClient> recorder =
+        new MediaRecorderClient(this, verifiedAttributionSource);
     wp<MediaRecorderClient> w = recorder;
     Mutex::Autolock lock(mLock);
     mMediaRecorderClients.add(w);
-    ALOGV("Create new media recorder client from pid %d", pid);
+    ALOGV("Create new media recorder client from pid %s",
+        verifiedAttributionSource.toString().c_str());
     return recorder;
 }
 
@@ -480,17 +490,21 @@
 }
 
 sp<IMediaPlayer> MediaPlayerService::create(const sp<IMediaPlayerClient>& client,
-        audio_session_t audioSessionId)
+        audio_session_t audioSessionId, const AttributionSourceState& attributionSource)
 {
-    pid_t pid = IPCThreadState::self()->getCallingPid();
     int32_t connId = android_atomic_inc(&mNextConnId);
+    // TODO b/182392769: use attribution source util
+    AttributionSourceState verifiedAttributionSource = attributionSource;
+    verifiedAttributionSource.pid = VALUE_OR_FATAL(
+        legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
+    verifiedAttributionSource.uid = VALUE_OR_FATAL(
+        legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid()));
 
     sp<Client> c = new Client(
-            this, pid, connId, client, audioSessionId,
-            IPCThreadState::self()->getCallingUid());
+            this, verifiedAttributionSource, connId, client, audioSessionId);
 
-    ALOGV("Create new client(%d) from pid %d, uid %d, ", connId, pid,
-         IPCThreadState::self()->getCallingUid());
+    ALOGV("Create new client(%d) from %s, ", connId,
+        verifiedAttributionSource.toString().c_str());
 
     wp<Client> w = c;
     {
@@ -543,8 +557,8 @@
     char buffer[SIZE];
     String8 result;
     result.append(" Client\n");
-    snprintf(buffer, 255, "  pid(%d), connId(%d), status(%d), looping(%s)\n",
-            mPid, mConnId, mStatus, mLoop?"true": "false");
+    snprintf(buffer, 255, "  AttributionSource(%s), connId(%d), status(%d), looping(%s)\n",
+        mAttributionSource.toString().c_str(), mConnId, mStatus, mLoop?"true": "false");
     result.append(buffer);
 
     sp<MediaPlayerBase> p;
@@ -608,7 +622,8 @@
             for (int i = 0, n = mMediaRecorderClients.size(); i < n; ++i) {
                 sp<MediaRecorderClient> c = mMediaRecorderClients[i].promote();
                 if (c != 0) {
-                    snprintf(buffer, 255, " MediaRecorderClient pid(%d)\n", c->mPid);
+                    snprintf(buffer, 255, " MediaRecorderClient pid(%d)\n",
+                            c->mAttributionSource.pid);
                     result.append(buffer);
                     write(fd, result.string(), result.size());
                     result = "\n";
@@ -731,19 +746,18 @@
 }
 
 MediaPlayerService::Client::Client(
-        const sp<MediaPlayerService>& service, pid_t pid,
+        const sp<MediaPlayerService>& service, const AttributionSourceState& attributionSource,
         int32_t connId, const sp<IMediaPlayerClient>& client,
-        audio_session_t audioSessionId, uid_t uid)
+        audio_session_t audioSessionId)
+        : mAttributionSource(attributionSource)
 {
     ALOGV("Client(%d) constructor", connId);
-    mPid = pid;
     mConnId = connId;
     mService = service;
     mClient = client;
     mLoop = false;
     mStatus = NO_INIT;
     mAudioSessionId = audioSessionId;
-    mUid = uid;
     mRetransmitEndpointValid = false;
     mAudioAttributes = NULL;
     mListener = new Listener(this);
@@ -756,7 +770,8 @@
 
 MediaPlayerService::Client::~Client()
 {
-    ALOGV("Client(%d) destructor pid = %d", mConnId, mPid);
+    ALOGV("Client(%d) destructor AttributionSource = %s", mConnId,
+            mAttributionSource.toString().c_str());
     mAudioOutput.clear();
     wp<Client> client(this);
     disconnect();
@@ -769,7 +784,8 @@
 
 void MediaPlayerService::Client::disconnect()
 {
-    ALOGV("disconnect(%d) from pid %d", mConnId, mPid);
+    ALOGV("disconnect(%d) from AttributionSource %s", mConnId,
+            mAttributionSource.toString().c_str());
     // grab local reference and clear main reference to prevent future
     // access to object
     sp<MediaPlayerBase> p;
@@ -809,11 +825,12 @@
         p.clear();
     }
     if (p == NULL) {
-        p = MediaPlayerFactory::createPlayer(playerType, mListener, mPid);
+        p = MediaPlayerFactory::createPlayer(playerType, mListener,
+            VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mAttributionSource.pid)));
     }
 
     if (p != NULL) {
-        p->setUID(mUid);
+        p->setUID(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mAttributionSource.uid)));
     }
 
     return p;
@@ -921,8 +938,8 @@
     mAudioDeviceUpdatedListener = new AudioDeviceUpdatedNotifier(p);
 
     if (!p->hardwareOutput()) {
-        mAudioOutput = new AudioOutput(mAudioSessionId, IPCThreadState::self()->getCallingUid(),
-                mPid, mAudioAttributes, mAudioDeviceUpdatedListener);
+        mAudioOutput = new AudioOutput(mAudioSessionId, mAttributionSource,
+                mAudioAttributes, mAudioDeviceUpdatedListener);
         static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
     }
 
@@ -1062,6 +1079,17 @@
     return mStatus = setDataSource_post(p, p->setDataSource(dataSource));
 }
 
+status_t MediaPlayerService::Client::setDataSource(
+        const String8& rtpParams) {
+    player_type playerType = NU_PLAYER;
+    sp<MediaPlayerBase> p = setDataSource_pre(playerType);
+    if (p == NULL) {
+        return NO_INIT;
+    }
+    // now set data source
+    return mStatus = setDataSource_post(p, p->setDataSource(rtpParams));
+}
+
 void MediaPlayerService::Client::disconnectNativeWindow_l() {
     if (mConnectedWindow != NULL) {
         status_t err = nativeWindowDisconnect(
@@ -1760,8 +1788,9 @@
 
 #undef LOG_TAG
 #define LOG_TAG "AudioSink"
-MediaPlayerService::AudioOutput::AudioOutput(audio_session_t sessionId, uid_t uid, int pid,
-        const audio_attributes_t* attr, const sp<AudioSystem::AudioDeviceCallback>& deviceCallback)
+MediaPlayerService::AudioOutput::AudioOutput(audio_session_t sessionId,
+        const AttributionSourceState& attributionSource, const audio_attributes_t* attr,
+        const sp<AudioSystem::AudioDeviceCallback>& deviceCallback)
     : mCallback(NULL),
       mCallbackCookie(NULL),
       mCallbackData(NULL),
@@ -1773,8 +1802,7 @@
       mMsecsPerFrame(0),
       mFrameSize(0),
       mSessionId(sessionId),
-      mUid(uid),
-      mPid(pid),
+      mAttributionSource(attributionSource),
       mSendLevel(0.0),
       mAuxEffectId(0),
       mFlags(AUDIO_OUTPUT_FLAG_NONE),
@@ -1808,8 +1836,7 @@
 //static
 void MediaPlayerService::AudioOutput::setMinBufferCount()
 {
-    char value[PROPERTY_VALUE_MAX];
-    if (property_get("ro.kernel.qemu", value, 0)) {
+    if (property_get_bool("ro.boot.qemu", false)) {
         mIsOnEmulator = true;
         mMinBufferCount = 12;  // to prevent systematic buffer underrun for emulator
     }
@@ -2171,8 +2198,7 @@
                     mSessionId,
                     AudioTrack::TRANSFER_CALLBACK,
                     offloadInfo,
-                    mUid,
-                    mPid,
+                    mAttributionSource,
                     mAttributes,
                     doNotReconnect,
                     1.0f,  // default value for maxRequiredSpeed
@@ -2199,8 +2225,7 @@
                     mSessionId,
                     AudioTrack::TRANSFER_DEFAULT,
                     NULL, // offload info
-                    mUid,
-                    mPid,
+                    mAttributionSource,
                     mAttributes,
                     doNotReconnect,
                     targetSpeed,
@@ -2230,6 +2255,12 @@
                       mRecycledTrack->frameCount(), t->frameCount());
                 reuse = false;
             }
+            // If recycled and new tracks are not on the same output,
+            // don't reuse the recycled one.
+            if (mRecycledTrack->getOutput() != t->getOutput()) {
+                ALOGV("output has changed, don't reuse track");
+                reuse = false;
+            }
         }
 
         if (reuse) {
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 6431ca1..98091be 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -19,6 +19,7 @@
 #define ANDROID_MEDIAPLAYERSERVICE_H
 
 #include <arpa/inet.h>
+#include <string>
 
 #include <utils/threads.h>
 #include <utils/Errors.h>
@@ -26,17 +27,20 @@
 #include <utils/String8.h>
 #include <utils/Vector.h>
 
+#include <media/AidlConversion.h>
 #include <media/AudioResamplerPublic.h>
 #include <media/AudioSystem.h>
 #include <media/MediaPlayerInterface.h>
 #include <media/Metadata.h>
 #include <media/stagefright/foundation/ABase.h>
-
+#include <android/content/AttributionSourceState.h>
 
 #include <system/audio.h>
 
 namespace android {
 
+using content::AttributionSourceState;
+
 class AudioTrack;
 struct AVSyncSettings;
 class DeathNotifier;
@@ -78,8 +82,7 @@
      public:
                                 AudioOutput(
                                         audio_session_t sessionId,
-                                        uid_t uid,
-                                        int pid,
+                                        const AttributionSourceState& attributionSource,
                                         const audio_attributes_t * attr,
                                         const sp<AudioSystem::AudioDeviceCallback>& deviceCallback);
         virtual                 ~AudioOutput();
@@ -98,6 +101,7 @@
         virtual audio_session_t getSessionId() const;
         virtual uint32_t        getSampleRate() const;
         virtual int64_t         getBufferDurationInUs() const;
+        virtual audio_output_flags_t getFlags() const { return mFlags; }
 
         virtual status_t        open(
                 uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
@@ -167,8 +171,7 @@
         float                   mMsecsPerFrame;
         size_t                  mFrameSize;
         audio_session_t         mSessionId;
-        uid_t                   mUid;
-        int                     mPid;
+        AttributionSourceState  mAttributionSource;
         float                   mSendLevel;
         int                     mAuxEffectId;
         audio_output_flags_t    mFlags;
@@ -230,12 +233,13 @@
     static  void                instantiate();
 
     // IMediaPlayerService interface
-    virtual sp<IMediaRecorder>  createMediaRecorder(const String16 &opPackageName);
+    virtual sp<IMediaRecorder> createMediaRecorder(const AttributionSourceState &attributionSource);
     void    removeMediaRecorderClient(const wp<MediaRecorderClient>& client);
     virtual sp<IMediaMetadataRetriever> createMetadataRetriever();
 
     virtual sp<IMediaPlayer>    create(const sp<IMediaPlayerClient>& client,
-                                       audio_session_t audioSessionId);
+                                       audio_session_t audioSessionId,
+                                       const AttributionSourceState& attributionSource);
 
     virtual sp<IMediaCodecList> getCodecList() const;
 
@@ -368,6 +372,7 @@
 
         virtual status_t        setDataSource(const sp<IStreamSource> &source);
         virtual status_t        setDataSource(const sp<IDataSource> &source);
+        virtual status_t        setDataSource(const String8& rtpParams);
 
 
         sp<MediaPlayerBase>     setDataSource_pre(player_type playerType);
@@ -376,7 +381,9 @@
 
                 void            notify(int msg, int ext1, int ext2, const Parcel *obj);
 
-                pid_t           pid() const { return mPid; }
+                pid_t           pid() const {
+                    return VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mAttributionSource.pid));
+                }
         virtual status_t        dump(int fd, const Vector<String16>& args);
 
                 audio_session_t getAudioSessionId() { return mAudioSessionId; }
@@ -406,11 +413,10 @@
 
         friend class MediaPlayerService;
                                 Client( const sp<MediaPlayerService>& service,
-                                        pid_t pid,
+                                        const AttributionSourceState& attributionSource,
                                         int32_t connId,
                                         const sp<IMediaPlayerClient>& client,
-                                        audio_session_t audioSessionId,
-                                        uid_t uid);
+                                        audio_session_t audioSessionId);
                                 Client();
         virtual                 ~Client();
 
@@ -454,13 +460,12 @@
                     sp<MediaPlayerService>        mService;
                     sp<IMediaPlayerClient>        mClient;
                     sp<AudioOutput>               mAudioOutput;
-                    pid_t                         mPid;
+                    const AttributionSourceState  mAttributionSource;
                     status_t                      mStatus;
                     bool                          mLoop;
                     int32_t                       mConnId;
                     audio_session_t               mAudioSessionId;
                     audio_attributes_t *          mAudioAttributes;
-                    uid_t                         mUid;
                     sp<ANativeWindow>             mConnectedWindow;
                     sp<IBinder>                   mConnectedWindowBinder;
                     struct sockaddr_in            mRetransmitEndpoint;
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index 9b1974b..a914006 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -28,6 +28,7 @@
 #include <binder/IServiceManager.h>
 #include <binder/MemoryHeapBase.h>
 #include <binder/MemoryBase.h>
+#include <camera/CameraUtils.h>
 #include <codec2/hidl/client.h>
 #include <cutils/atomic.h>
 #include <cutils/properties.h> // for property_get
@@ -123,11 +124,11 @@
         ALOGE("Invalid audio source: %d", as);
         return BAD_VALUE;
     }
-    pid_t pid = IPCThreadState::self()->getCallingPid();
-    uid_t uid = IPCThreadState::self()->getCallingUid();
 
-    if ((as == AUDIO_SOURCE_FM_TUNER && !captureAudioOutputAllowed(pid, uid))
-            || !recordingAllowed(String16(""), pid, uid)) {
+    if ((as == AUDIO_SOURCE_FM_TUNER
+            && !(captureAudioOutputAllowed(mAttributionSource)
+                    || captureTunerAudioInputAllowed(mAttributionSource)))
+            || !recordingAllowed(mAttributionSource, (audio_source_t)as)) {
         return PERMISSION_DENIED;
     }
     Mutex::Autolock lock(mLock);
@@ -376,12 +377,13 @@
     return NO_ERROR;
 }
 
-MediaRecorderClient::MediaRecorderClient(const sp<MediaPlayerService>& service, pid_t pid,
-        const String16& opPackageName)
+MediaRecorderClient::MediaRecorderClient(const sp<MediaPlayerService>& service,
+        const AttributionSourceState& attributionSource)
 {
     ALOGV("Client constructor");
-    mPid = pid;
-    mRecorder = new StagefrightRecorder(opPackageName);
+    // attribution source already validated in createMediaRecorder
+    mAttributionSource = attributionSource;
+    mRecorder = new StagefrightRecorder(attributionSource);
     mMediaPlayerService = service;
 }
 
@@ -423,30 +425,35 @@
 
     sp<IServiceManager> sm = defaultServiceManager();
 
-    // WORKAROUND: We don't know if camera exists here and getService might block for 5 seconds.
-    // Use checkService for camera if we don't know it exists.
-    static std::atomic<bool> sCameraChecked(false);  // once true never becomes false.
-    static std::atomic<bool> sCameraVerified(false); // once true never becomes false.
-    sp<IBinder> binder = (sCameraVerified || !sCameraChecked)
-        ? sm->getService(String16("media.camera")) : sm->checkService(String16("media.camera"));
-    // If the device does not have a camera, do not create a death listener for it.
-    if (binder != NULL) {
-        sCameraVerified = true;
-        mDeathNotifiers.emplace_back(
-                binder, [l = wp<IMediaRecorderClient>(listener)](){
-            sp<IMediaRecorderClient> listener = l.promote();
-            if (listener) {
-                ALOGV("media.camera service died. "
-                      "Sending death notification.");
-                listener->notify(
-                        MEDIA_ERROR, MEDIA_ERROR_SERVER_DIED,
-                        MediaPlayerService::CAMERA_PROCESS_DEATH);
-            } else {
-                ALOGW("media.camera service died without a death handler.");
-            }
-        });
+    static const bool sCameraDisabled = CameraUtils::isCameraServiceDisabled();
+
+    if (!sCameraDisabled) {
+        // WORKAROUND: We don't know if camera exists here and getService might block for 5 seconds.
+        // Use checkService for camera if we don't know it exists.
+        static std::atomic<bool> sCameraChecked(false);  // once true never becomes false.
+        static std::atomic<bool> sCameraVerified(false); // once true never becomes false.
+
+        sp<IBinder> binder = (sCameraVerified || !sCameraChecked)
+            ? sm->getService(String16("media.camera")) : sm->checkService(String16("media.camera"));
+        // If the device does not have a camera, do not create a death listener for it.
+        if (binder != NULL) {
+            sCameraVerified = true;
+            mDeathNotifiers.emplace_back(
+                    binder, [l = wp<IMediaRecorderClient>(listener)](){
+                sp<IMediaRecorderClient> listener = l.promote();
+                if (listener) {
+                    ALOGV("media.camera service died. "
+                          "Sending death notification.");
+                    listener->notify(
+                            MEDIA_ERROR, MEDIA_ERROR_SERVER_DIED,
+                            MediaPlayerService::CAMERA_PROCESS_DEATH);
+                } else {
+                    ALOGW("media.camera service died without a death handler.");
+                }
+            });
+        }
+        sCameraChecked = true;
     }
-    sCameraChecked = true;
 
     {
         using ::android::hidl::base::V1_0::IBase;
@@ -585,4 +592,13 @@
     }
     return NO_INIT;
 }
+
+status_t MediaRecorderClient::getRtpDataUsage(uint64_t *bytes) {
+    ALOGV("getRtpDataUsage");
+    Mutex::Autolock lock(mLock);
+    if (mRecorder != NULL) {
+        return mRecorder->getRtpDataUsage(bytes);
+    }
+    return NO_INIT;
+}
 }; // namespace android
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index 12257e5..dcb9f82 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -22,6 +22,7 @@
 
 #include <media/AudioSystem.h>
 #include <media/IMediaRecorder.h>
+#include <android/content/AttributionSourceState.h>
 
 #include <vector>
 
@@ -86,20 +87,20 @@
     virtual     status_t   setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
     virtual     status_t   setPreferredMicrophoneFieldDimension(float zoom);
                 status_t   getPortId(audio_port_handle_t *portId) override;
+    virtual     status_t   getRtpDataUsage(uint64_t *bytes);
 
 private:
     friend class           MediaPlayerService;  // for accessing private constructor
 
                            MediaRecorderClient(
                                    const sp<MediaPlayerService>& service,
-                                                               pid_t pid,
-                                                               const String16& opPackageName);
+                                   const content::AttributionSourceState& attributionSource);
     virtual                ~MediaRecorderClient();
 
     std::vector<DeathNotifier> mDeathNotifiers;
     sp<AudioDeviceUpdatedNotifier> mAudioDeviceUpdatedNotifier;
 
-    pid_t                  mPid;
+    content::AttributionSourceState mAttributionSource;
     mutable Mutex          mLock;
     MediaRecorderBase      *mRecorder;
     sp<MediaPlayerService> mMediaPlayerService;
diff --git a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
index 41b6f72..2aabd53 100644
--- a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
@@ -174,9 +174,7 @@
         ALOGV("getting track %zu of %zu, meta=%s", i, n, meta->toString().c_str());
 
         const char *mime;
-        CHECK(meta->findCString(kKeyMIMEType, &mime));
-
-        if (!strncasecmp(mime, "image/", 6)) {
+        if (meta->findCString(kKeyMIMEType, &mime) && !strncasecmp(mime, "image/", 6)) {
             int32_t isPrimary;
             if ((index < 0 && meta->findInt32(
                     kKeyTrackIsDefault, &isPrimary) && isPrimary)
@@ -208,12 +206,19 @@
     }
 
     const char *mime;
-    CHECK(trackMeta->findCString(kKeyMIMEType, &mime));
+    if (!trackMeta->findCString(kKeyMIMEType, &mime)) {
+        ALOGE("image track has no mime type");
+        return NULL;
+    }
     ALOGV("extracting from %s track", mime);
     if (!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
         mime = MEDIA_MIMETYPE_VIDEO_HEVC;
         trackMeta = new MetaData(*trackMeta);
         trackMeta->setCString(kKeyMIMEType, mime);
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
+        mime = MEDIA_MIMETYPE_VIDEO_AV1;
+        trackMeta = new MetaData(*trackMeta);
+        trackMeta->setCString(kKeyMIMEType, mime);
     }
 
     bool preferhw = property_get_bool(
@@ -228,7 +233,7 @@
 
     for (size_t i = 0; i < matchingCodecs.size(); ++i) {
         const AString &componentName = matchingCodecs[i];
-        sp<ImageDecoder> decoder = new ImageDecoder(componentName, trackMeta, source);
+        sp<MediaImageDecoder> decoder = new MediaImageDecoder(componentName, trackMeta, source);
         int64_t frameTimeUs = thumbnail ? -1 : 0;
         if (decoder->init(frameTimeUs, 0 /*option*/, colorFormat) == OK) {
             sp<IMemory> frame = decoder->extractFrame(rect);
@@ -299,9 +304,7 @@
         }
 
         const char *mime;
-        CHECK(meta->findCString(kKeyMIMEType, &mime));
-
-        if (!strncasecmp(mime, "video/", 6)) {
+        if (meta->findCString(kKeyMIMEType, &mime) && !strncasecmp(mime, "video/", 6)) {
             break;
         }
     }
@@ -337,7 +340,10 @@
     }
 
     const char *mime;
-    CHECK(trackMeta->findCString(kKeyMIMEType, &mime));
+    if (!trackMeta->findCString(kKeyMIMEType, &mime)) {
+        ALOGE("video track has no mime information.");
+        return NULL;
+    }
 
     bool preferhw = property_get_bool(
             "media.stagefright.thumbnail.prefer_hw_codecs", false);
@@ -523,6 +529,15 @@
         mMetaData.add(METADATA_KEY_EXIF_LENGTH, String8(tmp));
     }
 
+    int64_t xmpOffset, xmpSize;
+    if (meta->findInt64(kKeyXmpOffset, &xmpOffset)
+     && meta->findInt64(kKeyXmpSize, &xmpSize)) {
+        sprintf(tmp, "%lld", (long long)xmpOffset);
+        mMetaData.add(METADATA_KEY_XMP_OFFSET, String8(tmp));
+        sprintf(tmp, "%lld", (long long)xmpSize);
+        mMetaData.add(METADATA_KEY_XMP_LENGTH, String8(tmp));
+    }
+
     bool hasAudio = false;
     bool hasVideo = false;
     int32_t videoWidth = -1;
@@ -531,14 +546,14 @@
     int32_t audioBitrate = -1;
     int32_t rotationAngle = -1;
     int32_t imageCount = 0;
-    int32_t imagePrimary = 0;
+    int32_t imagePrimary = -1;
     int32_t imageWidth = -1;
     int32_t imageHeight = -1;
     int32_t imageRotation = -1;
 
     // The overall duration is the duration of the longest track.
     int64_t maxDurationUs = 0;
-    String8 timedTextLang;
+    String8 timedTextLang, videoMime;
     for (size_t i = 0; i < numTracks; ++i) {
         sp<MetaData> trackMeta = mExtractor->getTrackMetaData(i);
         if (!trackMeta) {
@@ -574,28 +589,33 @@
                     mMetaData.add(METADATA_KEY_SAMPLERATE, String8(tmp));
                 }
             } else if (!hasVideo && !strncasecmp("video/", mime, 6)) {
-                hasVideo = true;
-
-                CHECK(trackMeta->findInt32(kKeyWidth, &videoWidth));
-                CHECK(trackMeta->findInt32(kKeyHeight, &videoHeight));
                 if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
                     rotationAngle = 0;
                 }
                 if (!trackMeta->findInt32(kKeyFrameCount, &videoFrameCount)) {
                     videoFrameCount = 0;
                 }
-
-                parseColorAspects(trackMeta);
+                if (trackMeta->findInt32(kKeyWidth, &videoWidth)
+                    && trackMeta->findInt32(kKeyHeight, &videoHeight)) {
+                    hasVideo = true;
+                    videoMime = String8(mime);
+                    parseColorAspects(trackMeta);
+                } else {
+                    ALOGE("video track ignored for missing dimensions");
+                }
             } else if (!strncasecmp("image/", mime, 6)) {
                 int32_t isPrimary;
                 if (trackMeta->findInt32(
                         kKeyTrackIsDefault, &isPrimary) && isPrimary) {
-                    imagePrimary = imageCount;
-                    CHECK(trackMeta->findInt32(kKeyWidth, &imageWidth));
-                    CHECK(trackMeta->findInt32(kKeyHeight, &imageHeight));
                     if (!trackMeta->findInt32(kKeyRotation, &imageRotation)) {
                         imageRotation = 0;
                     }
+                    if (trackMeta->findInt32(kKeyWidth, &imageWidth)
+                        && trackMeta->findInt32(kKeyHeight, &imageHeight)) {
+                        imagePrimary = imageCount;
+                    } else {
+                        ALOGE("primary image track ignored for missing dimensions");
+                    }
                 }
                 imageCount++;
             } else if (!strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP)) {
@@ -618,7 +638,8 @@
     }
 
     // The duration value is a string representing the duration in ms.
-    sprintf(tmp, "%" PRId64, (maxDurationUs + 500) / 1000);
+    sprintf(tmp, "%" PRId64,
+           (maxDurationUs > (INT64_MAX - 500) ? INT64_MAX : (maxDurationUs + 500)) / 1000);
     mMetaData.add(METADATA_KEY_DURATION, String8(tmp));
 
     if (hasAudio) {
@@ -628,22 +649,27 @@
     if (hasVideo) {
         mMetaData.add(METADATA_KEY_HAS_VIDEO, String8("yes"));
 
+        CHECK(videoWidth >= 0);
         sprintf(tmp, "%d", videoWidth);
         mMetaData.add(METADATA_KEY_VIDEO_WIDTH, String8(tmp));
 
+        CHECK(videoHeight >= 0);
         sprintf(tmp, "%d", videoHeight);
         mMetaData.add(METADATA_KEY_VIDEO_HEIGHT, String8(tmp));
 
         sprintf(tmp, "%d", rotationAngle);
         mMetaData.add(METADATA_KEY_VIDEO_ROTATION, String8(tmp));
 
+        mMetaData.add(METADATA_KEY_VIDEO_CODEC_MIME_TYPE, videoMime);
+
         if (videoFrameCount > 0) {
             sprintf(tmp, "%d", videoFrameCount);
             mMetaData.add(METADATA_KEY_VIDEO_FRAME_COUNT, String8(tmp));
         }
     }
 
-    if (imageCount > 0) {
+    // only if we have a primary image
+    if (imageCount > 0 && imagePrimary >= 0) {
         mMetaData.add(METADATA_KEY_HAS_IMAGE, String8("yes"));
 
         sprintf(tmp, "%d", imageCount);
@@ -652,9 +678,11 @@
         sprintf(tmp, "%d", imagePrimary);
         mMetaData.add(METADATA_KEY_IMAGE_PRIMARY, String8(tmp));
 
+        CHECK(imageWidth >= 0);
         sprintf(tmp, "%d", imageWidth);
         mMetaData.add(METADATA_KEY_IMAGE_WIDTH, String8(tmp));
 
+        CHECK(imageHeight >= 0);
         sprintf(tmp, "%d", imageHeight);
         mMetaData.add(METADATA_KEY_IMAGE_HEIGHT, String8(tmp));
 
@@ -682,10 +710,9 @@
                 !strcasecmp(fileMIME, "video/x-matroska")) {
             sp<MetaData> trackMeta = mExtractor->getTrackMetaData(0);
             const char *trackMIME;
-            if (trackMeta != nullptr) {
-                CHECK(trackMeta->findCString(kKeyMIMEType, &trackMIME));
-            }
-            if (!strncasecmp("audio/", trackMIME, 6)) {
+            if (trackMeta != nullptr
+                && trackMeta->findCString(kKeyMIMEType, &trackMIME)
+                && !strncasecmp("audio/", trackMIME, 6)) {
                 // The matroska file only contains a single audio track,
                 // rewrite its mime type.
                 mMetaData.add(
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 7897959..bffd7b3 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -17,6 +17,9 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "StagefrightRecorder"
 #include <inttypes.h>
+// TODO/workaround: including base logging now as it conflicts with ADebug.h
+// and it must be included first.
+#include <android-base/logging.h>
 #include <utils/Log.h>
 
 #include "WebmWriter.h"
@@ -30,6 +33,7 @@
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
 
+#include <media/AidlConversion.h>
 #include <media/IMediaPlayerService.h>
 #include <media/MediaMetricsItem.h>
 #include <media/stagefright/foundation/ABuffer.h>
@@ -44,6 +48,7 @@
 #include <media/stagefright/CameraSourceTimeLapse.h>
 #include <media/stagefright/MPEG2TSWriter.h>
 #include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/MediaCodecSource.h>
@@ -74,6 +79,7 @@
 // NB: these are matched with public Java API constants defined
 // in frameworks/base/media/java/android/media/MediaRecorder.java
 // These must be kept synchronized with the constants there.
+static const char *kRecorderLogSessionId = "android.media.mediarecorder.log-session-id";
 static const char *kRecorderAudioBitrate = "android.media.mediarecorder.audio-bitrate";
 static const char *kRecorderAudioChannels = "android.media.mediarecorder.audio-channels";
 static const char *kRecorderAudioSampleRate = "android.media.mediarecorder.audio-samplerate";
@@ -110,13 +116,18 @@
 }
 
 
-StagefrightRecorder::StagefrightRecorder(const String16 &opPackageName)
-    : MediaRecorderBase(opPackageName),
+StagefrightRecorder::StagefrightRecorder(const AttributionSourceState& client)
+    : MediaRecorderBase(client),
       mWriter(NULL),
       mOutputFd(-1),
       mAudioSource((audio_source_t)AUDIO_SOURCE_CNT), // initialize with invalid value
       mPrivacySensitive(PRIVACY_SENSITIVE_DEFAULT),
       mVideoSource(VIDEO_SOURCE_LIST_END),
+      mRTPCVOExtMap(-1),
+      mRTPCVODegrees(0),
+      mRTPSockDscp(0),
+      mRTPSockNetwork(0),
+      mLastSeqNo(0),
       mStarted(false),
       mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
       mDeviceCallbackEnabled(false),
@@ -125,6 +136,7 @@
 
     ALOGV("Constructor");
 
+    mMetricsItem = NULL;
     mAnalyticsDirty = false;
     reset();
 }
@@ -147,7 +159,9 @@
 
     // we run as part of the media player service; what we really want to
     // know is the app which requested the recording.
-    mMetricsItem->setUid(mClientUid);
+    mMetricsItem->setUid(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mAttributionSource.uid)));
+
+    mMetricsItem->setCString(kRecorderLogSessionId, mLogSessionId.c_str());
 
     // populate the values from the raw fields.
 
@@ -199,10 +213,12 @@
 void StagefrightRecorder::flushAndResetMetrics(bool reinitialize) {
     ALOGV("flushAndResetMetrics");
     // flush anything we have, maybe setup a new record
-    if (mAnalyticsDirty && mMetricsItem != NULL) {
-        updateMetrics();
-        if (mMetricsItem->count() > 0) {
-            mMetricsItem->selfrecord();
+    if (mMetricsItem != NULL) {
+        if (mAnalyticsDirty) {
+            updateMetrics();
+            if (mMetricsItem->count() > 0) {
+                mMetricsItem->selfrecord();
+            }
         }
         delete mMetricsItem;
         mMetricsItem = NULL;
@@ -567,6 +583,32 @@
     // range that a specific encoder supports. The mismatch between the
     // the target and requested bit rate will NOT be treated as an error.
     mVideoBitRate = bitRate;
+
+    // A new bitrate(TMMBR) should be applied on runtime as well if OutputFormat is RTP_AVP
+    if (mOutputFormat == OUTPUT_FORMAT_RTP_AVP) {
+        // Regular I frames may overload the network so we reduce the bitrate to allow
+        // margins for the I frame overruns.
+        // Still send requested bitrate (TMMBR) in the reply (TMMBN).
+        const float coefficient = 0.8f;
+        mVideoBitRate = (bitRate * coefficient) / 1000 * 1000;
+    }
+    if (mOutputFormat == OUTPUT_FORMAT_RTP_AVP && mStarted && mPauseStartTimeUs == 0) {
+        mVideoEncoderSource->setEncodingBitrate(mVideoBitRate);
+        ARTPWriter* rtpWriter  = static_cast<ARTPWriter*>(mWriter.get());
+        rtpWriter->setTMMBNInfo(mOpponentID, bitRate);
+    }
+
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamVideoBitRateMode(int32_t bitRateMode) {
+    ALOGV("setParamVideoBitRateMode: %d", bitRateMode);
+    // TODO: clarify what bitrate mode of -1 is as these start from 0
+    if (bitRateMode < -1) {
+        ALOGE("Unsupported video bitrate mode: %d", bitRateMode);
+        return BAD_VALUE;
+    }
+    mVideoBitRateMode = bitRateMode;
     return OK;
 }
 
@@ -776,6 +818,113 @@
     return OK;
 }
 
+status_t StagefrightRecorder::setParamRtpLocalIp(const String8 &localIp) {
+    ALOGV("setParamVideoLocalIp: %s", localIp.string());
+
+    mLocalIp.setTo(localIp.string());
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamRtpLocalPort(int32_t localPort) {
+    ALOGV("setParamVideoLocalPort: %d", localPort);
+
+    mLocalPort = localPort;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamRtpRemoteIp(const String8 &remoteIp) {
+    ALOGV("setParamVideoRemoteIp: %s", remoteIp.string());
+
+    mRemoteIp.setTo(remoteIp.string());
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamRtpRemotePort(int32_t remotePort) {
+    ALOGV("setParamVideoRemotePort: %d", remotePort);
+
+    mRemotePort = remotePort;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamSelfID(int32_t selfID) {
+    ALOGV("setParamSelfID: %x", selfID);
+
+    mSelfID = selfID;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamVideoOpponentID(int32_t opponentID) {
+    mOpponentID = opponentID;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamPayloadType(int32_t payloadType) {
+    ALOGV("setParamPayloadType: %d", payloadType);
+
+    mPayloadType = payloadType;
+
+    if (mStarted && mOutputFormat == OUTPUT_FORMAT_RTP_AVP) {
+        mWriter->updatePayloadType(mPayloadType);
+    }
+
+    return OK;
+}
+
+status_t StagefrightRecorder::setRTPCVOExtMap(int32_t extmap) {
+    ALOGV("setRtpCvoExtMap: %d", extmap);
+
+    mRTPCVOExtMap = extmap;
+    return OK;
+}
+
+status_t StagefrightRecorder::setRTPCVODegrees(int32_t cvoDegrees) {
+    Mutex::Autolock autolock(mLock);
+    ALOGV("setRtpCvoDegrees: %d", cvoDegrees);
+
+    mRTPCVODegrees = cvoDegrees;
+
+    if (mStarted && mOutputFormat == OUTPUT_FORMAT_RTP_AVP) {
+        mWriter->updateCVODegrees(mRTPCVODegrees);
+    }
+
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamRtpDscp(int32_t dscp) {
+    ALOGV("setParamRtpDscp: %d", dscp);
+
+    mRTPSockDscp = dscp;
+    return OK;
+}
+
+status_t StagefrightRecorder::setSocketNetwork(int64_t networkHandle) {
+    ALOGV("setSocketNetwork: %llu", (unsigned long long) networkHandle);
+
+    mRTPSockNetwork = networkHandle;
+    if (mStarted && mOutputFormat == OUTPUT_FORMAT_RTP_AVP) {
+        mWriter->updateSocketNetwork(mRTPSockNetwork);
+    }
+    return OK;
+}
+
+status_t StagefrightRecorder::requestIDRFrame() {
+    status_t ret = BAD_VALUE;
+    if (mVideoEncoderSource != NULL) {
+        ret = mVideoEncoderSource->requestIDRFrame();
+    } else {
+        ALOGV("requestIDRFrame: Encoder not ready");
+    }
+    return ret;
+}
+
+status_t StagefrightRecorder::setLogSessionId(const String8 &log_session_id) {
+    ALOGV("setLogSessionId: %s", log_session_id.string());
+
+    // TODO: validity check that log_session_id is a 32-byte hex digit.
+    mLogSessionId.setTo(log_session_id.string());
+    return OK;
+}
+
 status_t StagefrightRecorder::setParameter(
         const String8 &key, const String8 &value) {
     ALOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
@@ -844,6 +993,11 @@
         if (safe_strtoi32(value.string(), &video_bitrate)) {
             return setParamVideoEncodingBitRate(video_bitrate);
         }
+    } else if (key == "video-param-bitrate-mode") {
+        int32_t video_bitrate_mode;
+        if (safe_strtoi32(value.string(), &video_bitrate_mode)) {
+            return setParamVideoBitRateMode(video_bitrate_mode);
+        }
     } else if (key == "video-param-rotation-angle-degrees") {
         int32_t degrees;
         if (safe_strtoi32(value.string(), &degrees)) {
@@ -884,6 +1038,63 @@
         if (safe_strtod(value.string(), &fps)) {
             return setParamCaptureFps(fps);
         }
+    } else if (key == "rtp-param-local-ip") {
+        return setParamRtpLocalIp(value);
+    } else if (key == "rtp-param-local-port") {
+        int32_t localPort;
+        if (safe_strtoi32(value.string(), &localPort)) {
+            return setParamRtpLocalPort(localPort);
+        }
+    } else if (key == "rtp-param-remote-ip") {
+        return setParamRtpRemoteIp(value);
+    } else if (key == "rtp-param-remote-port") {
+        int32_t remotePort;
+        if (safe_strtoi32(value.string(), &remotePort)) {
+            return setParamRtpRemotePort(remotePort);
+        }
+    } else if (key == "rtp-param-self-id") {
+        int32_t selfID;
+        int64_t temp;
+        if (safe_strtoi64(value.string(), &temp)) {
+            selfID = static_cast<int32_t>(temp);
+            return setParamSelfID(selfID);
+        }
+    } else if (key == "rtp-param-opponent-id") {
+        int32_t opnId;
+        int64_t temp;
+        if (safe_strtoi64(value.string(), &temp)) {
+            opnId = static_cast<int32_t>(temp);
+            return setParamVideoOpponentID(opnId);
+        }
+    } else if (key == "rtp-param-payload-type") {
+        int32_t payloadType;
+        if (safe_strtoi32(value.string(), &payloadType)) {
+            return setParamPayloadType(payloadType);
+        }
+    } else if (key == "rtp-param-ext-cvo-extmap") {
+        int32_t extmap;
+        if (safe_strtoi32(value.string(), &extmap)) {
+            return setRTPCVOExtMap(extmap);
+        }
+    } else if (key == "rtp-param-ext-cvo-degrees") {
+        int32_t degrees;
+        if (safe_strtoi32(value.string(), &degrees)) {
+            return setRTPCVODegrees(degrees);
+        }
+    } else if (key == "video-param-request-i-frame") {
+        return requestIDRFrame();
+    } else if (key == "rtp-param-set-socket-dscp") {
+        int32_t dscp;
+        if (safe_strtoi32(value.string(), &dscp)) {
+            return setParamRtpDscp(dscp);
+        }
+    } else if (key == "rtp-param-set-socket-network") {
+        int64_t networkHandle;
+        if (safe_strtoi64(value.string(), &networkHandle)) {
+            return setSocketNetwork(networkHandle);
+        }
+    } else if (key == "log-session-id") {
+        return setLogSessionId(value);
     } else {
         ALOGE("setParameter: failed to find key %s", key.string());
     }
@@ -932,7 +1143,9 @@
 }
 
 status_t StagefrightRecorder::setClientName(const String16& clientName) {
-    mClientName = clientName;
+
+    mAttributionSource.packageName = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_String16_string(clientName));
 
     return OK;
 }
@@ -944,10 +1157,6 @@
         return INVALID_OPERATION;
     }
 
-    // Get UID and PID here for permission checking
-    mClientUid = IPCThreadState::self()->getCallingUid();
-    mClientPid = IPCThreadState::self()->getCallingPid();
-
     status_t status = OK;
 
     switch (mOutputFormat) {
@@ -1050,6 +1259,17 @@
             sp<MetaData> meta = new MetaData;
             int64_t startTimeUs = systemTime() / 1000;
             meta->setInt64(kKeyTime, startTimeUs);
+            meta->setInt32(kKeySelfID, mSelfID);
+            meta->setInt32(kKeyPayloadType, mPayloadType);
+            meta->setInt64(kKeySocketNetwork, mRTPSockNetwork);
+            if (mRTPCVOExtMap > 0) {
+                meta->setInt32(kKeyRtpExtMap, mRTPCVOExtMap);
+                meta->setInt32(kKeyRtpCvoDegrees, mRTPCVODegrees);
+            }
+            if (mRTPSockDscp > 0) {
+                meta->setInt32(kKeyRtpDscp, mRTPSockDscp);
+            }
+
             status = mWriter->start(meta.get());
             break;
         }
@@ -1113,7 +1333,7 @@
     if (mPrivacySensitive == PRIVACY_SENSITIVE_DEFAULT) {
         if (attr.source == AUDIO_SOURCE_VOICE_COMMUNICATION
                 || attr.source == AUDIO_SOURCE_CAMCORDER) {
-            attr.flags |= AUDIO_FLAG_CAPTURE_PRIVATE;
+            attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_CAPTURE_PRIVATE);
             mPrivacySensitive = PRIVACY_SENSITIVE_ENABLED;
         } else {
             mPrivacySensitive = PRIVACY_SENSITIVE_DISABLED;
@@ -1129,19 +1349,17 @@
             return NULL;
         }
         if (mPrivacySensitive == PRIVACY_SENSITIVE_ENABLED) {
-            attr.flags |= AUDIO_FLAG_CAPTURE_PRIVATE;
+            attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_CAPTURE_PRIVATE);
         }
     }
 
     sp<AudioSource> audioSource =
         new AudioSource(
                 &attr,
-                mOpPackageName,
+                mAttributionSource,
                 sourceSampleRate,
                 mAudioChannels,
                 mSampleRate,
-                mClientUid,
-                mClientPid,
                 mSelectedDeviceId,
                 mSelectedMicDirection,
                 mSelectedMicFieldDimension);
@@ -1330,7 +1548,7 @@
         mVideoEncoderSource = source;
     }
 
-    mWriter = new ARTPWriter(mOutputFd);
+    mWriter = new ARTPWriter(mOutputFd, mLocalIp, mLocalPort, mRemoteIp, mRemotePort, mLastSeqNo);
     mWriter->addSource(source);
     mWriter->setListener(mListener);
 
@@ -1663,6 +1881,10 @@
     Size videoSize;
     videoSize.width = mVideoWidth;
     videoSize.height = mVideoHeight;
+    uid_t uid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(mAttributionSource.uid));
+    pid_t pid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(mAttributionSource.pid));
+    String16 clientName = VALUE_OR_RETURN_STATUS(
+        aidl2legacy_string_view_String16(mAttributionSource.packageName.value_or("")));
     if (mCaptureFpsEnable) {
         if (!(mCaptureFps > 0.)) {
             ALOGE("Invalid mCaptureFps value: %lf", mCaptureFps);
@@ -1670,13 +1892,13 @@
         }
 
         mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
-                mCamera, mCameraProxy, mCameraId, mClientName, mClientUid, mClientPid,
+                mCamera, mCameraProxy, mCameraId, clientName, uid, pid,
                 videoSize, mFrameRate, mPreviewSurface,
                 std::llround(1e6 / mCaptureFps));
         *cameraSource = mCameraSourceTimeLapse;
     } else {
         *cameraSource = CameraSource::CreateFromCamera(
-                mCamera, mCameraProxy, mCameraId, mClientName, mClientUid, mClientPid,
+                mCamera, mCameraProxy, mCameraId, clientName, uid, pid,
                 videoSize, mFrameRate,
                 mPreviewSurface);
     }
@@ -1784,7 +2006,13 @@
         }
     }
 
+    if (mOutputFormat == OUTPUT_FORMAT_RTP_AVP) {
+        // This indicates that a raw image provided to encoder needs to be rotated.
+        format->setInt32("rotation-degrees", mRotationDegrees);
+    }
+
     format->setInt32("bitrate", mVideoBitRate);
+    format->setInt32("bitrate-mode", mVideoBitRateMode);
     format->setInt32("frame-rate", mFrameRate);
     format->setInt32("i-frame-interval", mIFramesIntervalSec);
 
@@ -2130,6 +2358,7 @@
 
     if (mWriter != NULL) {
         err = mWriter->stop();
+        mLastSeqNo = mWriter->getSequenceNum();
         mWriter.clear();
     }
 
@@ -2206,6 +2435,8 @@
     mVideoHeight   = 144;
     mFrameRate     = -1;
     mVideoBitRate  = 192000;
+    // Following MediaCodec's default
+    mVideoBitRateMode = BITRATE_MODE_VBR;
     mSampleRate    = 8000;
     mAudioChannels = 1;
     mAudioBitRate  = 12200;
@@ -2351,6 +2582,14 @@
     return NO_INIT;
 }
 
+status_t StagefrightRecorder::getRtpDataUsage(uint64_t *bytes) {
+    if (mWriter != 0) {
+        *bytes = mWriter->getAccumulativeBytes();
+        return OK;
+    }
+    return NO_INIT;
+}
+
 status_t StagefrightRecorder::dump(
         int fd, const Vector<String16>& args) const {
     ALOGV("dump");
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index a725bee..d6de47f 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -26,9 +26,12 @@
 #include <system/audio.h>
 
 #include <media/hardware/MetadataBufferType.h>
+#include <android/content/AttributionSourceState.h>
 
 namespace android {
 
+using content::AttributionSourceState;
+
 class Camera;
 class ICameraRecordingProxy;
 class CameraSource;
@@ -42,9 +45,10 @@
 struct ALooper;
 
 struct StagefrightRecorder : public MediaRecorderBase {
-    explicit StagefrightRecorder(const String16 &opPackageName);
+    explicit StagefrightRecorder(const AttributionSourceState& attributionSource);
     virtual ~StagefrightRecorder();
     virtual status_t init();
+    virtual status_t setLogSessionId(const String8 &id);
     virtual status_t setAudioSource(audio_source_t as);
             status_t setPrivacySensitive(bool privacySensitive) override;
             status_t isPrivacySensitive(bool *privacySensitive) const override;
@@ -82,6 +86,7 @@
     virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
     virtual status_t setPreferredMicrophoneFieldDimension(float zoom);
             status_t getPortId(audio_port_handle_t *portId) const override;
+    virtual status_t getRtpDataUsage(uint64_t *bytes);
 
 private:
 
@@ -97,9 +102,6 @@
     sp<IGraphicBufferProducer> mPreviewSurface;
     sp<PersistentSurface> mPersistentSurface;
     sp<IMediaRecorderClient> mListener;
-    String16 mClientName;
-    uid_t mClientUid;
-    pid_t mClientPid;
     sp<MediaWriter> mWriter;
     int mOutputFd;
     sp<AudioSource> mAudioSourceNode;
@@ -109,6 +111,7 @@
     void flushAndResetMetrics(bool reinitialize);
     void updateMetrics();
 
+    AString mLogSessionId;
     audio_source_t mAudioSource;
     privacy_sensitive_t mPrivacySensitive;
     video_source mVideoSource;
@@ -119,6 +122,7 @@
     int32_t mVideoWidth, mVideoHeight;
     int32_t mFrameRate;
     int32_t mVideoBitRate;
+    int32_t mVideoBitRateMode;
     int32_t mAudioBitRate;
     int32_t mAudioChannels;
     int32_t mSampleRate;
@@ -138,6 +142,18 @@
     int32_t mLongitudex10000;
     int32_t mStartTimeOffsetMs;
     int32_t mTotalBitRate;
+    String8 mLocalIp;
+    String8 mRemoteIp;
+    int32_t mLocalPort;
+    int32_t mRemotePort;
+    int32_t mSelfID;
+    int32_t mOpponentID;
+    int32_t mPayloadType;
+    int32_t mRTPCVOExtMap;
+    int32_t mRTPCVODegrees;
+    int32_t mRTPSockDscp;
+    int64_t mRTPSockNetwork;
+    uint32_t mLastSeqNo;
 
     int64_t mDurationRecordedUs;
     int64_t mStartedRecordingUs;
@@ -205,6 +221,7 @@
     status_t setParamCaptureFpsEnable(int32_t timeLapseEnable);
     status_t setParamCaptureFps(double fps);
     status_t setParamVideoEncodingBitRate(int32_t bitRate);
+    status_t setParamVideoBitRateMode(int32_t bitRateMode);
     status_t setParamVideoIFramesInterval(int32_t seconds);
     status_t setParamVideoEncoderProfile(int32_t profile);
     status_t setParamVideoEncoderLevel(int32_t level);
@@ -219,6 +236,18 @@
     status_t setParamMovieTimeScale(int32_t timeScale);
     status_t setParamGeoDataLongitude(int64_t longitudex10000);
     status_t setParamGeoDataLatitude(int64_t latitudex10000);
+    status_t setParamRtpLocalIp(const String8 &localIp);
+    status_t setParamRtpLocalPort(int32_t localPort);
+    status_t setParamRtpRemoteIp(const String8 &remoteIp);
+    status_t setParamRtpRemotePort(int32_t remotePort);
+    status_t setParamSelfID(int32_t selfID);
+    status_t setParamVideoOpponentID(int32_t opponentID);
+    status_t setParamPayloadType(int32_t payloadType);
+    status_t setRTPCVOExtMap(int32_t extmap);
+    status_t setRTPCVODegrees(int32_t cvoDegrees);
+    status_t setParamRtpDscp(int32_t dscp);
+    status_t setSocketNetwork(int64_t networkHandle);
+    status_t requestIDRFrame();
     void clipVideoBitRate();
     void clipVideoFrameRate();
     void clipVideoFrameWidth();
diff --git a/media/libmediaplayerservice/datasource/Android.bp b/media/libmediaplayerservice/datasource/Android.bp
index 71fa50b..19fc172 100644
--- a/media/libmediaplayerservice/datasource/Android.bp
+++ b/media/libmediaplayerservice/datasource/Android.bp
@@ -1,3 +1,14 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libmediaplayerservice_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libmediaplayerservice_license",
+    ],
+}
+
 cc_library_static {
     name: "libplayerservice_datasource",
 
diff --git a/media/libmediaplayerservice/include/MediaPlayerInterface.h b/media/libmediaplayerservice/include/MediaPlayerInterface.h
index 436cb31..1cbd8a0 100644
--- a/media/libmediaplayerservice/include/MediaPlayerInterface.h
+++ b/media/libmediaplayerservice/include/MediaPlayerInterface.h
@@ -60,7 +60,7 @@
 #define DEFAULT_AUDIOSINK_SAMPLERATE 44100
 
 // when the channel mask isn't known, use the channel count to derive a mask in AudioSink::open()
-#define CHANNEL_MASK_USE_CHANNEL_ORDER 0
+#define CHANNEL_MASK_USE_CHANNEL_ORDER AUDIO_CHANNEL_NONE
 
 // duration below which we do not allow deep audio buffering
 #define AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US 5000000
@@ -108,6 +108,7 @@
         virtual audio_stream_type_t getAudioStreamType() const = 0;
         virtual uint32_t    getSampleRate() const = 0;
         virtual int64_t     getBufferDurationInUs() const = 0;
+        virtual audio_output_flags_t getFlags() const = 0;
 
         // If no callback is specified, use the "write" API below to submit
         // audio data.
@@ -183,6 +184,10 @@
         return INVALID_OPERATION;
     }
 
+    virtual status_t    setDataSource(const String8& /* rtpParams */) {
+        return INVALID_OPERATION;
+    }
+
     // pass the buffered IGraphicBufferProducer to the media player service
     virtual status_t    setVideoSurfaceTexture(
                                 const sp<IGraphicBufferProducer>& bufferProducer) = 0;
diff --git a/media/libmediaplayerservice/nuplayer/AWakeLock.cpp b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
index 684ba2e..af9cf45 100644
--- a/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
+++ b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
@@ -52,18 +52,19 @@
             if (binder == NULL) {
                 ALOGW("could not get the power manager service");
             } else {
-                mPowerManager = interface_cast<IPowerManager>(binder);
+                mPowerManager = interface_cast<os::IPowerManager>(binder);
                 binder->linkToDeath(mDeathRecipient);
             }
         }
         if (mPowerManager != NULL) {
             sp<IBinder> binder = new BBinder();
             int64_t token = IPCThreadState::self()->clearCallingIdentity();
-            status_t status = mPowerManager->acquireWakeLock(
-                    POWERMANAGER_PARTIAL_WAKE_LOCK,
-                    binder, String16("AWakeLock"), String16("media"));
+            binder::Status status = mPowerManager->acquireWakeLock(
+                    binder, POWERMANAGER_PARTIAL_WAKE_LOCK,
+                    String16("AWakeLock"), String16("media"),
+                    {} /* workSource */, {} /* historyTag */, -1 /* displayId */);
             IPCThreadState::self()->restoreCallingIdentity(token);
-            if (status == NO_ERROR) {
+            if (status.isOk()) {
                 mWakeLockToken = binder;
                 mWakeLockCount++;
                 return true;
diff --git a/media/libmediaplayerservice/nuplayer/AWakeLock.h b/media/libmediaplayerservice/nuplayer/AWakeLock.h
index 323e7d7..8aa3b41 100644
--- a/media/libmediaplayerservice/nuplayer/AWakeLock.h
+++ b/media/libmediaplayerservice/nuplayer/AWakeLock.h
@@ -18,7 +18,7 @@
 #define A_WAKELOCK_H_
 
 #include <media/stagefright/foundation/ABase.h>
-#include <powermanager/IPowerManager.h>
+#include <android/os/IPowerManager.h>
 #include <utils/RefBase.h>
 
 namespace android {
@@ -37,7 +37,7 @@
     virtual ~AWakeLock();
 
 private:
-    sp<IPowerManager> mPowerManager;
+    sp<os::IPowerManager> mPowerManager;
     sp<IBinder>       mWakeLockToken;
     uint32_t          mWakeLockCount;
 
diff --git a/media/libmediaplayerservice/nuplayer/Android.bp b/media/libmediaplayerservice/nuplayer/Android.bp
index 32c97cf..6d338db 100644
--- a/media/libmediaplayerservice/nuplayer/Android.bp
+++ b/media/libmediaplayerservice/nuplayer/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libmediaplayerservice_nuplayer_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libmediaplayerservice_nuplayer_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_static {
 
     srcs: [
@@ -14,6 +33,7 @@
         "NuPlayerRenderer.cpp",
         "NuPlayerStreamListener.cpp",
         "RTSPSource.cpp",
+        "RTPSource.cpp",
         "StreamingSource.cpp",
     ],
 
@@ -30,6 +50,7 @@
         "frameworks/av/media/libstagefright/mpeg2ts",
         "frameworks/av/media/libstagefright/rtsp",
         "frameworks/av/media/libstagefright/timedtext",
+        "frameworks/native/include/android",
     ],
 
     cflags: [
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index c1c4b55..d94cecf 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -31,6 +31,7 @@
 #include "NuPlayerDriver.h"
 #include "NuPlayerRenderer.h"
 #include "NuPlayerSource.h"
+#include "RTPSource.h"
 #include "RTSPSource.h"
 #include "StreamingSource.h"
 #include "GenericSource.h"
@@ -368,6 +369,18 @@
     return err;
 }
 
+void NuPlayer::setDataSourceAsync(const String8& rtpParams) {
+    ALOGD("setDataSourceAsync for RTP = %s", rtpParams.string());
+    sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
+
+    sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
+    sp<Source> source = new RTPSource(notify, rtpParams);
+
+    msg->setObject("source", source);
+    msg->post();
+    mDataSourceType = DATA_SOURCE_TYPE_RTP;
+}
+
 void NuPlayer::prepareAsync() {
     ALOGV("prepareAsync");
 
@@ -858,10 +871,12 @@
             readFromAMessage(msg, &rate);
             status_t err = OK;
             if (mRenderer != NULL) {
-                // AudioSink allows only 1.f and 0.f for offload mode.
-                // For other speed, switch to non-offload mode.
-                if (mOffloadAudio && ((rate.mSpeed != 0.f && rate.mSpeed != 1.f)
-                        || rate.mPitch != 1.f)) {
+                // AudioSink allows only 1.f and 0.f for offload and direct modes.
+                // For other speeds, restart audio to fallback to supported paths
+                bool audioDirectOutput = (mAudioSink->getFlags() & AUDIO_OUTPUT_FLAG_DIRECT) != 0;
+                if ((mOffloadAudio || audioDirectOutput) &&
+                        ((rate.mSpeed != 0.f && rate.mSpeed != 1.f) || rate.mPitch != 1.f)) {
+
                     int64_t currentPositionUs;
                     if (getCurrentPosition(&currentPositionUs) != OK) {
                         currentPositionUs = mPreviousSeekTimeUs;
@@ -1689,6 +1704,12 @@
     updateRebufferingTimer(false /* stopping */, false /* exiting */);
 }
 
+void NuPlayer::setTargetBitrate(int bitrate) {
+    if (mSource != NULL) {
+        mSource->setTargetBitrate(bitrate);
+    }
+}
+
 void NuPlayer::onPause() {
 
     updatePlaybackTimer(true /* stopping */, "onPause");
@@ -1915,6 +1936,11 @@
 
     format->setInt32("priority", 0 /* realtime */);
 
+    if (mDataSourceType == DATA_SOURCE_TYPE_RTP) {
+        ALOGV("instantiateDecoder: set decoder error free on stream corrupt.");
+        format->setInt32("corrupt-free", true);
+    }
+
     if (!audio) {
         AString mime;
         CHECK(format->findString("mime", &mime));
@@ -2715,6 +2741,14 @@
             break;
         }
 
+        case Source::kWhatIMSRxNotice:
+        {
+            sp<AMessage> IMSRxNotice;
+            CHECK(msg->findMessage("message", &IMSRxNotice));
+            sendIMSRxNotice(IMSRxNotice);
+            break;
+        }
+
         default:
             TRESPASS();
     }
@@ -2817,11 +2851,75 @@
     }
 }
 
+void NuPlayer::sendIMSRxNotice(const sp<AMessage> &msg) {
+    int32_t payloadType;
+
+    CHECK(msg->findInt32("payload-type", &payloadType));
+
+    Parcel in;
+    in.writeInt32(payloadType);
+
+    switch (payloadType) {
+        case ARTPSource::RTCP_TSFB:   // RTCP TSFB
+        case ARTPSource::RTCP_PSFB:   // RTCP PSFB
+        case ARTPSource::RTP_AUTODOWN:
+        {
+            int32_t feedbackType, id;
+            CHECK(msg->findInt32("feedback-type", &feedbackType));
+            CHECK(msg->findInt32("sender", &id));
+            in.writeInt32(feedbackType);
+            in.writeInt32(id);
+            if (payloadType == ARTPSource::RTCP_TSFB) {
+                int32_t bitrate;
+                CHECK(msg->findInt32("bit-rate", &bitrate));
+                in.writeInt32(bitrate);
+            }
+            break;
+        }
+        case ARTPSource::RTP_QUALITY:
+        case ARTPSource::RTP_QUALITY_EMC:
+        {
+            int32_t feedbackType, bitrate;
+            int32_t highestSeqNum, baseSeqNum, prevExpected;
+            int32_t numBufRecv, prevNumBufRecv;
+            CHECK(msg->findInt32("feedback-type", &feedbackType));
+            CHECK(msg->findInt32("bit-rate", &bitrate));
+            CHECK(msg->findInt32("highest-seq-num", &highestSeqNum));
+            CHECK(msg->findInt32("base-seq-num", &baseSeqNum));
+            CHECK(msg->findInt32("prev-expected", &prevExpected));
+            CHECK(msg->findInt32("num-buf-recv", &numBufRecv));
+            CHECK(msg->findInt32("prev-num-buf-recv", &prevNumBufRecv));
+            in.writeInt32(feedbackType);
+            in.writeInt32(bitrate);
+            in.writeInt32(highestSeqNum);
+            in.writeInt32(baseSeqNum);
+            in.writeInt32(prevExpected);
+            in.writeInt32(numBufRecv);
+            in.writeInt32(prevNumBufRecv);
+            break;
+        }
+        case ARTPSource::RTP_CVO:
+        {
+            int32_t cvo;
+            CHECK(msg->findInt32("cvo", &cvo));
+            in.writeInt32(cvo);
+            break;
+        }
+        default:
+        break;
+    }
+
+    notifyListener(MEDIA_IMS_RX_NOTICE, 0, 0, &in);
+}
+
 const char *NuPlayer::getDataSourceType() {
     switch (mDataSourceType) {
         case DATA_SOURCE_TYPE_HTTP_LIVE:
             return "HTTPLive";
 
+        case DATA_SOURCE_TYPE_RTP:
+            return "RTP";
+
         case DATA_SOURCE_TYPE_RTSP:
             return "RTSP";
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index ef4354c..adb7075 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -51,6 +51,8 @@
 
     void setDataSourceAsync(const sp<DataSource> &source);
 
+    void setDataSourceAsync(const String8& rtpParams);
+
     status_t getBufferingSettings(BufferingSettings* buffering /* nonnull */);
     status_t setBufferingSettings(const BufferingSettings& buffering);
 
@@ -100,6 +102,8 @@
 
     void updateInternalTimers();
 
+    void setTargetBitrate(int bitrate /* bps */);
+
 protected:
     virtual ~NuPlayer();
 
@@ -117,6 +121,7 @@
     struct GenericSource;
     struct HTTPLiveSource;
     struct Renderer;
+    struct RTPSource;
     struct RTSPSource;
     struct StreamingSource;
     struct Action;
@@ -257,6 +262,7 @@
     typedef enum {
         DATA_SOURCE_TYPE_NONE,
         DATA_SOURCE_TYPE_HTTP_LIVE,
+        DATA_SOURCE_TYPE_RTP,
         DATA_SOURCE_TYPE_RTSP,
         DATA_SOURCE_TYPE_GENERIC_URL,
         DATA_SOURCE_TYPE_GENERIC_FD,
@@ -334,6 +340,7 @@
     void sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex);
     void sendTimedMetaData(const sp<ABuffer> &buffer);
     void sendTimedTextData(const sp<ABuffer> &buffer);
+    void sendIMSRxNotice(const sp<AMessage> &msg);
 
     void writeTrackInfo(Parcel* reply, const sp<AMessage>& format) const;
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index f734439..2c1f158 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -302,7 +302,7 @@
     ALOGV("[%s] onConfigure (surface=%p)", mComponentName.c_str(), mSurface.get());
 
     mCodec = MediaCodec::CreateByType(
-            mCodecLooper, mime.c_str(), false /* encoder */, NULL /* err */, mPid, mUid);
+            mCodecLooper, mime.c_str(), false /* encoder */, NULL /* err */, mPid, mUid, format);
     int32_t secure = 0;
     if (format->findInt32("secure", &secure) && secure != 0) {
         if (mCodec != NULL) {
@@ -746,9 +746,15 @@
 
     mOutputBuffers.editItemAt(index) = buffer;
 
+    int64_t frameIndex;
+    bool frameIndexFound = buffer->meta()->findInt64("frameIndex", &frameIndex);
+
     buffer->setRange(offset, size);
     buffer->meta()->clear();
     buffer->meta()->setInt64("timeUs", timeUs);
+    if (frameIndexFound) {
+        buffer->meta()->setInt64("frameIndex", frameIndex);
+    }
 
     bool eos = flags & MediaCodec::BUFFER_FLAG_EOS;
     // we do not expect CODECCONFIG or SYNCFRAME for decoder
@@ -1050,7 +1056,7 @@
         uint32_t flags = 0;
         CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
 
-        int32_t eos, csd;
+        int32_t eos, csd, cvo;
         // we do not expect SYNCFRAME for decoder
         if (buffer->meta()->findInt32("eos", &eos) && eos) {
             flags |= MediaCodec::BUFFER_FLAG_EOS;
@@ -1058,6 +1064,24 @@
             flags |= MediaCodec::BUFFER_FLAG_CODECCONFIG;
         }
 
+        if (buffer->meta()->findInt32("cvo", (int32_t*)&cvo)) {
+            ALOGV("[%s] cvo(%d) found at %lld us", mComponentName.c_str(), cvo, (long long)timeUs);
+            switch (cvo) {
+                case 0:
+                    codecBuffer->meta()->setInt32("cvo", MediaCodec::CVO_DEGREE_0);
+                    break;
+                case 1:
+                    codecBuffer->meta()->setInt32("cvo", MediaCodec::CVO_DEGREE_90);
+                    break;
+                case 2:
+                    codecBuffer->meta()->setInt32("cvo", MediaCodec::CVO_DEGREE_180);
+                    break;
+                case 3:
+                    codecBuffer->meta()->setInt32("cvo", MediaCodec::CVO_DEGREE_270);
+                    break;
+            }
+        }
+
         // Modular DRM
         MediaBufferBase *mediaBuf = NULL;
         NuPlayerDrm::CryptoInfo *cryptInfo = NULL;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index dc144b2..2a50fc2 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -218,6 +218,26 @@
     return mAsyncResult;
 }
 
+status_t NuPlayerDriver::setDataSource(const String8& rtpParams) {
+    ALOGV("setDataSource(%p) rtp source", this);
+    Mutex::Autolock autoLock(mLock);
+
+    if (mState != STATE_IDLE) {
+        return INVALID_OPERATION;
+    }
+
+    mState = STATE_SET_DATASOURCE_PENDING;
+
+    mPlayer->setDataSourceAsync(rtpParams);
+
+    while (mState == STATE_SET_DATASOURCE_PENDING) {
+        mCondition.wait(mLock);
+    }
+
+    return mAsyncResult;
+}
+
+
 status_t NuPlayerDriver::setVideoSurfaceTexture(
         const sp<IGraphicBufferProducer> &bufferProducer) {
     ALOGV("setVideoSurfaceTexture(%p)", this);
@@ -797,7 +817,11 @@
 }
 
 status_t NuPlayerDriver::setParameter(
-        int /* key */, const Parcel & /* request */) {
+        int key, const Parcel &request ) {
+    if (key == KEY_PARAMETER_RTP_ATTRIBUTES) {
+        mPlayer->setTargetBitrate(request.readInt32());
+        return OK;
+    }
     return INVALID_OPERATION;
 }
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index f4b1968..55a0fad 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -43,6 +43,8 @@
 
     virtual status_t setDataSource(const sp<DataSource>& dataSource);
 
+    virtual status_t setDataSource(const String8& rtpParams);
+
     virtual status_t setVideoSurfaceTexture(
             const sp<IGraphicBufferProducer> &bufferProducer);
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index c30f048..4a65f71 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -922,6 +922,11 @@
             firstEntry = false;
             int64_t mediaTimeUs;
             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+            if (mediaTimeUs < 0) {
+                ALOGD("fillAudioBuffer: reset negative media time %.2f secs to zero",
+                       mediaTimeUs / 1E6);
+                mediaTimeUs = 0;
+            }
             ALOGV("fillAudioBuffer: rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
             setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
         }
@@ -1928,11 +1933,12 @@
     int32_t numChannels;
     CHECK(format->findInt32("channel-count", &numChannels));
 
-    int32_t channelMask;
-    if (!format->findInt32("channel-mask", &channelMask)) {
-        // signal to the AudioSink to derive the mask from count.
-        channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
-    }
+    int32_t rawChannelMask;
+    audio_channel_mask_t channelMask =
+            format->findInt32("channel-mask", &rawChannelMask) ?
+                    static_cast<audio_channel_mask_t>(rawChannelMask)
+                    // signal to the AudioSink to derive the mask from count.
+                    : CHANNEL_MASK_USE_CHANNEL_ORDER;
 
     int32_t sampleRate;
     CHECK(format->findInt32("sample-rate", &sampleRate));
@@ -1956,7 +1962,7 @@
             ALOGV("Mime \"%s\" mapped to audio_format 0x%x",
                     mime.c_str(), audioFormat);
 
-            int avgBitRate = -1;
+            int avgBitRate = 0;
             format->findInt32("bitrate", &avgBitRate);
 
             int32_t aacProfile = -1;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index f137c52..bf6b539 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -58,6 +58,7 @@
         kWhatInstantiateSecureDecoders,
         // Modular DRM
         kWhatDrmInfo,
+        kWhatIMSRxNotice,
     };
 
     // The provides message is used to notify the player about various
@@ -131,6 +132,8 @@
 
     virtual void setOffloadAudio(bool /* offload */) {}
 
+    virtual void setTargetBitrate(int32_t) {}
+
     // Modular DRM
     virtual status_t prepareDrm(
             const uint8_t /*uuid*/[16], const Vector<uint8_t> &/*drmSessionId*/,
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.cpp b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
new file mode 100644
index 0000000..d2d978a
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
@@ -0,0 +1,808 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RTPSource"
+#include <utils/Log.h>
+
+#include "RTPSource.h"
+
+
+
+
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <string.h>
+
+namespace android {
+
+const int64_t kNearEOSTimeoutUs = 2000000ll; // 2 secs
+static int32_t kMaxAllowedStaleAccessUnits = 20;
+
+NuPlayer::RTPSource::RTPSource(
+        const sp<AMessage> &notify,
+        const String8& rtpParams)
+    : Source(notify),
+      mRTPParams(rtpParams),
+      mFlags(0),
+      mState(DISCONNECTED),
+      mFinalResult(OK),
+      mBuffering(false),
+      mInPreparationPhase(true),
+      mRTPConn(new ARTPConnection(ARTPConnection::kViLTEConnection)),
+      mEOSTimeoutAudio(0),
+      mEOSTimeoutVideo(0),
+      mFirstAccessUnit(true),
+      mAllTracksHaveTime(false),
+      mNTPAnchorUs(-1),
+      mMediaAnchorUs(-1),
+      mLastMediaTimeUs(-1),
+      mNumAccessUnitsReceived(0),
+      mLastCVOUpdated(-1),
+      mReceivedFirstRTCPPacket(false),
+      mReceivedFirstRTPPacket(false),
+      mPausing(false),
+      mPauseGeneration(0) {
+    ALOGD("RTPSource initialized with rtpParams=%s", rtpParams.string());
+}
+
+NuPlayer::RTPSource::~RTPSource() {
+    if (mLooper != NULL) {
+        mLooper->unregisterHandler(id());
+        mLooper->unregisterHandler(mRTPConn->id());
+        mLooper->stop();
+    }
+}
+
+status_t NuPlayer::RTPSource::getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) {
+    Mutex::Autolock _l(mBufferingSettingsLock);
+    *buffering = mBufferingSettings;
+    return OK;
+}
+
+status_t NuPlayer::RTPSource::setBufferingSettings(const BufferingSettings& buffering) {
+    Mutex::Autolock _l(mBufferingSettingsLock);
+    mBufferingSettings = buffering;
+    return OK;
+}
+
+void NuPlayer::RTPSource::prepareAsync() {
+    if (mLooper == NULL) {
+        mLooper = new ALooper;
+        mLooper->setName("rtp");
+        mLooper->start();
+
+        mLooper->registerHandler(this);
+        mLooper->registerHandler(mRTPConn);
+    }
+
+    CHECK_EQ(mState, (int)DISCONNECTED);
+    mState = CONNECTING;
+
+    setParameters(mRTPParams);
+
+    TrackInfo *info = NULL;
+    unsigned i;
+    for (i = 0; i < mTracks.size(); i++) {
+        info = &mTracks.editItemAt(i);
+
+        if (info == NULL)
+            break;
+
+        AString sdp;
+        ASessionDescription::SDPStringFactory(sdp, info->mLocalIp,
+                info->mIsAudio, info->mLocalPort, info->mPayloadType, info->mAS, info->mCodecName,
+                NULL, info->mWidth, info->mHeight, info->mCVOExtMap);
+        ALOGD("RTPSource SDP =>\n%s", sdp.c_str());
+
+        sp<ASessionDescription> desc = new ASessionDescription;
+        bool isValidSdp = desc->setTo(sdp.c_str(), sdp.size());
+        ALOGV("RTPSource isValidSdp => %d", isValidSdp);
+
+        int sockRtp, sockRtcp;
+        ARTPConnection::MakeRTPSocketPair(&sockRtp, &sockRtcp, info->mLocalIp, info->mRemoteIp,
+                info->mLocalPort, info->mRemotePort, info->mSocketNetwork);
+
+        sp<AMessage> notify = new AMessage('accu', this);
+
+        ALOGV("RTPSource addStream. track-index=%d", i);
+        notify->setSize("trackIndex", i);
+        // index(i) should be started from 1. 0 is reserved for [root]
+        mRTPConn->addStream(sockRtp, sockRtcp, desc, i + 1, notify, false);
+        mRTPConn->setSelfID(info->mSelfID);
+        mRTPConn->setStaticJitterTimeMs(info->mJbTimeMs);
+
+        unsigned long PT;
+        AString formatDesc, formatParams;
+        // index(i) should be started from 1. 0 is reserved for [root]
+        desc->getFormatType(i + 1, &PT, &formatDesc, &formatParams);
+
+        int32_t clockRate, numChannels;
+        ASessionDescription::ParseFormatDesc(formatDesc.c_str(), &clockRate, &numChannels);
+        info->mTimeScale = clockRate;
+
+        info->mRTPSocket = sockRtp;
+        info->mRTCPSocket = sockRtcp;
+        info->mFirstSeqNumInSegment = 0;
+        info->mNewSegment = true;
+        info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
+        info->mRTPAnchor = 0;
+        info->mNTPAnchorUs = -1;
+        info->mNormalPlayTimeRTP = 0;
+        info->mNormalPlayTimeUs = 0ll;
+
+        // index(i) should be started from 1. 0 is reserved for [root]
+        info->mPacketSource = new APacketSource(desc, i + 1);
+
+        int32_t timeScale;
+        sp<MetaData> format = getTrackFormat(i, &timeScale);
+        sp<AnotherPacketSource> source = new AnotherPacketSource(format);
+
+        if (info->mIsAudio) {
+            mAudioTrack = source;
+        } else {
+            mVideoTrack = source;
+        }
+
+        info->mSource = source;
+        info->mRTPTime = 0;
+        info->mNormalPlaytimeUs = 0;
+        info->mNPTMappingValid = false;
+    }
+
+    if (mInPreparationPhase) {
+        mInPreparationPhase = false;
+        notifyPrepared();
+    }
+}
+
+void NuPlayer::RTPSource::start() {
+}
+
+void NuPlayer::RTPSource::pause() {
+    mState = PAUSED;
+}
+
+void NuPlayer::RTPSource::resume() {
+    mState = CONNECTING;
+}
+
+void NuPlayer::RTPSource::stop() {
+    if (mLooper == NULL) {
+        return;
+    }
+    sp<AMessage> msg = new AMessage(kWhatDisconnect, this);
+
+    sp<AMessage> dummy;
+    msg->postAndAwaitResponse(&dummy);
+}
+
+status_t NuPlayer::RTPSource::feedMoreTSData() {
+    Mutex::Autolock _l(mBufferingLock);
+    return mFinalResult;
+}
+
+sp<MetaData> NuPlayer::RTPSource::getFormatMeta(bool audio) {
+    sp<AnotherPacketSource> source = getSource(audio);
+
+    if (source == NULL) {
+        return NULL;
+    }
+
+    return source->getFormat();
+}
+
+bool NuPlayer::RTPSource::haveSufficientDataOnAllTracks() {
+    // We're going to buffer at least 2 secs worth data on all tracks before
+    // starting playback (both at startup and after a seek).
+
+    static const int64_t kMinDurationUs = 2000000ll;
+
+    int64_t mediaDurationUs = 0;
+    getDuration(&mediaDurationUs);
+    if ((mAudioTrack != NULL && mAudioTrack->isFinished(mediaDurationUs))
+            || (mVideoTrack != NULL && mVideoTrack->isFinished(mediaDurationUs))) {
+        return true;
+    }
+
+    status_t err;
+    int64_t durationUs;
+    if (mAudioTrack != NULL
+            && (durationUs = mAudioTrack->getBufferedDurationUs(&err))
+                    < kMinDurationUs
+            && err == OK) {
+        ALOGV("audio track doesn't have enough data yet. (%.2f secs buffered)",
+              durationUs / 1E6);
+        return false;
+    }
+
+    if (mVideoTrack != NULL
+            && (durationUs = mVideoTrack->getBufferedDurationUs(&err))
+                    < kMinDurationUs
+            && err == OK) {
+        ALOGV("video track doesn't have enough data yet. (%.2f secs buffered)",
+              durationUs / 1E6);
+        return false;
+    }
+
+    return true;
+}
+
+status_t NuPlayer::RTPSource::dequeueAccessUnit(
+        bool audio, sp<ABuffer> *accessUnit) {
+
+    sp<AnotherPacketSource> source = getSource(audio);
+
+    if (mState == PAUSED) {
+        ALOGV("-EWOULDBLOCK");
+        return -EWOULDBLOCK;
+    }
+
+    status_t finalResult;
+    if (!source->hasBufferAvailable(&finalResult)) {
+        if (finalResult == OK) {
+            int64_t mediaDurationUs = 0;
+            getDuration(&mediaDurationUs);
+            sp<AnotherPacketSource> otherSource = getSource(!audio);
+            status_t otherFinalResult;
+
+            // If other source already signaled EOS, this source should also signal EOS
+            if (otherSource != NULL &&
+                    !otherSource->hasBufferAvailable(&otherFinalResult) &&
+                    otherFinalResult == ERROR_END_OF_STREAM) {
+                source->signalEOS(ERROR_END_OF_STREAM);
+                return ERROR_END_OF_STREAM;
+            }
+
+            // If this source has detected near end, give it some time to retrieve more
+            // data before signaling EOS
+            if (source->isFinished(mediaDurationUs)) {
+                int64_t eosTimeout = audio ? mEOSTimeoutAudio : mEOSTimeoutVideo;
+                if (eosTimeout == 0) {
+                    setEOSTimeout(audio, ALooper::GetNowUs());
+                } else if ((ALooper::GetNowUs() - eosTimeout) > kNearEOSTimeoutUs) {
+                    setEOSTimeout(audio, 0);
+                    source->signalEOS(ERROR_END_OF_STREAM);
+                    return ERROR_END_OF_STREAM;
+                }
+                return -EWOULDBLOCK;
+            }
+
+            if (!(otherSource != NULL && otherSource->isFinished(mediaDurationUs))) {
+                // We should not enter buffering mode
+                // if any of the sources already have detected EOS.
+                // TODO: needs to be checked whether below line is needed or not.
+                // startBufferingIfNecessary();
+            }
+
+            return -EWOULDBLOCK;
+        }
+        return finalResult;
+    }
+
+    setEOSTimeout(audio, 0);
+
+    finalResult = source->dequeueAccessUnit(accessUnit);
+    if (finalResult != OK) {
+        return finalResult;
+    }
+
+    int32_t cvo;
+    if ((*accessUnit) != NULL && (*accessUnit)->meta()->findInt32("cvo", &cvo) &&
+            cvo != mLastCVOUpdated) {
+        sp<AMessage> msg = new AMessage();
+        msg->setInt32("payload-type", ARTPSource::RTP_CVO);
+        msg->setInt32("cvo", cvo);
+
+        sp<AMessage> notify = dupNotify();
+        notify->setInt32("what", kWhatIMSRxNotice);
+        notify->setMessage("message", msg);
+        notify->post();
+
+        ALOGV("notify cvo updated (%d)->(%d) to upper layer", mLastCVOUpdated, cvo);
+        mLastCVOUpdated = cvo;
+    }
+
+    return finalResult;
+}
+
+sp<AnotherPacketSource> NuPlayer::RTPSource::getSource(bool audio) {
+    return audio ? mAudioTrack : mVideoTrack;
+}
+
+void NuPlayer::RTPSource::setEOSTimeout(bool audio, int64_t timeout) {
+    if (audio) {
+        mEOSTimeoutAudio = timeout;
+    } else {
+        mEOSTimeoutVideo = timeout;
+    }
+}
+
+status_t NuPlayer::RTPSource::getDuration(int64_t *durationUs) {
+    *durationUs = 0ll;
+
+    int64_t audioDurationUs;
+    if (mAudioTrack != NULL
+            && mAudioTrack->getFormat()->findInt64(
+                kKeyDuration, &audioDurationUs)
+            && audioDurationUs > *durationUs) {
+        *durationUs = audioDurationUs;
+    }
+
+    int64_t videoDurationUs;
+    if (mVideoTrack != NULL
+            && mVideoTrack->getFormat()->findInt64(
+                kKeyDuration, &videoDurationUs)
+            && videoDurationUs > *durationUs) {
+        *durationUs = videoDurationUs;
+    }
+
+    return OK;
+}
+
+status_t NuPlayer::RTPSource::seekTo(int64_t seekTimeUs, MediaPlayerSeekMode mode) {
+    ALOGV("RTPSource::seekTo=%d, mode=%d", (int)seekTimeUs, mode);
+    return OK;
+}
+
+void NuPlayer::RTPSource::schedulePollBuffering() {
+    sp<AMessage> msg = new AMessage(kWhatPollBuffering, this);
+    msg->post(kBufferingPollIntervalUs); // 1 second intervals
+}
+
+void NuPlayer::RTPSource::onPollBuffering() {
+    schedulePollBuffering();
+}
+
+bool NuPlayer::RTPSource::isRealTime() const {
+    ALOGD("RTPSource::isRealTime=%d", true);
+    return true;
+}
+
+void NuPlayer::RTPSource::onMessageReceived(const sp<AMessage> &msg) {
+    ALOGV("onMessageReceived =%d", msg->what());
+
+    switch (msg->what()) {
+        case kWhatAccessUnitComplete:
+        {
+            if (mState == CONNECTING) {
+                mState = CONNECTED;
+            }
+
+            int32_t timeUpdate;
+            //"time-update" raised from ARTPConnection::parseSR()
+            if (msg->findInt32("time-update", &timeUpdate) && timeUpdate) {
+                size_t trackIndex;
+                CHECK(msg->findSize("trackIndex", &trackIndex));
+
+                uint32_t rtpTime;
+                uint64_t ntpTime;
+                CHECK(msg->findInt32("rtp-time", (int32_t *)&rtpTime));
+                CHECK(msg->findInt64("ntp-time", (int64_t *)&ntpTime));
+
+                onTimeUpdate(trackIndex, rtpTime, ntpTime);
+                break;
+            }
+
+            int32_t firstRTCP;
+            if (msg->findInt32("first-rtcp", &firstRTCP)) {
+                // There won't be an access unit here, it's just a notification
+                // that the data communication worked since we got the first
+                // rtcp packet.
+                ALOGV("first-rtcp");
+                break;
+            }
+
+            int32_t IMSRxNotice;
+            if (msg->findInt32("rtcp-event", &IMSRxNotice)) {
+                int32_t payloadType, feedbackType;
+                CHECK(msg->findInt32("payload-type", &payloadType));
+                CHECK(msg->findInt32("feedback-type", &feedbackType));
+
+                sp<AMessage> notify = dupNotify();
+                notify->setInt32("what", kWhatIMSRxNotice);
+                notify->setMessage("message", msg);
+                notify->post();
+
+                ALOGV("IMSRxNotice \t\t payload : %d feedback : %d",
+                      payloadType, feedbackType);
+                break;
+            }
+
+            size_t trackIndex;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+
+            sp<ABuffer> accessUnit;
+            if (msg->findBuffer("access-unit", &accessUnit) == false) {
+                break;
+            }
+
+            int32_t damaged;
+            if (accessUnit->meta()->findInt32("damaged", &damaged)
+                    && damaged) {
+                ALOGD("dropping damaged access unit.");
+                break;
+            }
+
+            // Implicitly assert on valid trackIndex here, which we ensure by
+            // never removing tracks.
+            TrackInfo *info = &mTracks.editItemAt(trackIndex);
+
+            sp<AnotherPacketSource> source = info->mSource;
+            if (source != NULL) {
+                uint32_t rtpTime;
+                CHECK(accessUnit->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+
+                /* AnotherPacketSource make an assertion if there is no ntp provided
+                   RTPSource should provide ntpUs all the times.
+                if (!info->mNPTMappingValid) {
+                    // This is a live stream, we didn't receive any normal
+                    // playtime mapping. We won't map to npt time.
+                    source->queueAccessUnit(accessUnit);
+                    break;
+                }
+
+                int64_t nptUs =
+                    ((double)rtpTime - (double)info->mRTPTime)
+                        / info->mTimeScale
+                        * 1000000ll
+                        + info->mNormalPlaytimeUs;
+
+                */
+                accessUnit->meta()->setInt64("timeUs", ALooper::GetNowUs());
+
+                source->queueAccessUnit(accessUnit);
+            }
+
+            break;
+        }
+        case kWhatDisconnect:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            for (size_t i = 0; i < mTracks.size(); ++i) {
+                TrackInfo *info = &mTracks.editItemAt(i);
+
+                if (info->mIsAudio) {
+                    mAudioTrack->signalEOS(ERROR_END_OF_STREAM);
+                    mAudioTrack = NULL;
+                    ALOGV("mAudioTrack disconnected");
+                } else {
+                    mVideoTrack->signalEOS(ERROR_END_OF_STREAM);
+                    mVideoTrack = NULL;
+                    ALOGV("mVideoTrack disconnected");
+                }
+
+                mRTPConn->removeStream(info->mRTPSocket, info->mRTCPSocket);
+                close(info->mRTPSocket);
+                close(info->mRTCPSocket);
+            }
+
+            mTracks.clear();
+            mFirstAccessUnit = true;
+            mAllTracksHaveTime = false;
+            mNTPAnchorUs = -1;
+            mMediaAnchorUs = -1;
+            mLastMediaTimeUs = -1;
+            mNumAccessUnitsReceived = 0;
+            mReceivedFirstRTCPPacket = false;
+            mReceivedFirstRTPPacket = false;
+            mPausing = false;
+            mPauseGeneration = 0;
+
+            (new AMessage)->postReply(replyID);
+
+            break;
+        }
+        case kWhatPollBuffering:
+            break;
+        default:
+            TRESPASS();
+    }
+}
+
+void NuPlayer::RTPSource::setTargetBitrate(int32_t bitrate) {
+    mRTPConn->setTargetBitrate(bitrate);
+}
+
+void NuPlayer::RTPSource::onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime) {
+    ALOGV("onTimeUpdate track %d, rtpTime = 0x%08x, ntpTime = %#016llx",
+         trackIndex, rtpTime, (long long)ntpTime);
+
+    // convert ntpTime in Q32 seconds to microseconds. Note: this will not lose precision
+    // because ntpTimeUs is at most 52 bits (double holds 53 bits)
+    int64_t ntpTimeUs = (int64_t)(ntpTime * 1E6 / (1ll << 32));
+
+    TrackInfo *track = &mTracks.editItemAt(trackIndex);
+
+    track->mRTPAnchor = rtpTime;
+    track->mNTPAnchorUs = ntpTimeUs;
+
+    if (mNTPAnchorUs < 0) {
+        mNTPAnchorUs = ntpTimeUs;
+        mMediaAnchorUs = mLastMediaTimeUs;
+    }
+
+    if (!mAllTracksHaveTime) {
+        bool allTracksHaveTime = (mTracks.size() > 0);
+        for (size_t i = 0; i < mTracks.size(); ++i) {
+            TrackInfo *track = &mTracks.editItemAt(i);
+            if (track->mNTPAnchorUs < 0) {
+                allTracksHaveTime = false;
+                break;
+            }
+        }
+        if (allTracksHaveTime) {
+            mAllTracksHaveTime = true;
+            ALOGI("Time now established for all tracks.");
+        }
+    }
+    if (mAllTracksHaveTime && dataReceivedOnAllChannels()) {
+        // Time is now established, lets start timestamping immediately
+        for (size_t i = 0; i < mTracks.size(); ++i) {
+            TrackInfo *trackInfo = &mTracks.editItemAt(i);
+            while (!trackInfo->mPackets.empty()) {
+                sp<ABuffer> accessUnit = *trackInfo->mPackets.begin();
+                trackInfo->mPackets.erase(trackInfo->mPackets.begin());
+
+                if (addMediaTimestamp(i, trackInfo, accessUnit)) {
+                    postQueueAccessUnit(i, accessUnit);
+                }
+            }
+        }
+    }
+}
+
+bool NuPlayer::RTPSource::addMediaTimestamp(
+        int32_t trackIndex, const TrackInfo *track,
+        const sp<ABuffer> &accessUnit) {
+
+    uint32_t rtpTime;
+    CHECK(accessUnit->meta()->findInt32(
+                "rtp-time", (int32_t *)&rtpTime));
+
+    int64_t relRtpTimeUs =
+        (((int64_t)rtpTime - (int64_t)track->mRTPAnchor) * 1000000ll)
+        / track->mTimeScale;
+
+    int64_t ntpTimeUs = track->mNTPAnchorUs + relRtpTimeUs;
+
+    int64_t mediaTimeUs = mMediaAnchorUs + ntpTimeUs - mNTPAnchorUs;
+
+    if (mediaTimeUs > mLastMediaTimeUs) {
+        mLastMediaTimeUs = mediaTimeUs;
+    }
+
+    if (mediaTimeUs < 0) {
+        ALOGV("dropping early accessUnit.");
+        return false;
+    }
+
+    ALOGV("track %d rtpTime=%u mediaTimeUs = %lld us (%.2f secs)",
+            trackIndex, rtpTime, (long long)mediaTimeUs, mediaTimeUs / 1E6);
+
+    accessUnit->meta()->setInt64("timeUs", mediaTimeUs);
+
+    return true;
+}
+
+bool NuPlayer::RTPSource::dataReceivedOnAllChannels() {
+    TrackInfo *track;
+    for (size_t i = 0; i < mTracks.size(); ++i) {
+        track = &mTracks.editItemAt(i);
+        if (track->mPackets.empty()) {
+            return false;
+        }
+    }
+    return true;
+}
+
+void NuPlayer::RTPSource::postQueueAccessUnit(
+        size_t trackIndex, const sp<ABuffer> &accessUnit) {
+    sp<AMessage> msg = new AMessage(kWhatAccessUnit, this);
+    msg->setInt32("what", kWhatAccessUnit);
+    msg->setSize("trackIndex", trackIndex);
+    msg->setBuffer("accessUnit", accessUnit);
+    msg->post();
+}
+
+void NuPlayer::RTPSource::postQueueEOS(size_t trackIndex, status_t finalResult) {
+    sp<AMessage> msg = new AMessage(kWhatEOS, this);
+    msg->setInt32("what", kWhatEOS);
+    msg->setSize("trackIndex", trackIndex);
+    msg->setInt32("finalResult", finalResult);
+    msg->post();
+}
+
+sp<MetaData> NuPlayer::RTPSource::getTrackFormat(size_t index, int32_t *timeScale) {
+    CHECK_GE(index, 0u);
+    CHECK_LT(index, mTracks.size());
+
+    const TrackInfo &info = mTracks.itemAt(index);
+
+    *timeScale = info.mTimeScale;
+
+    return info.mPacketSource->getFormat();
+}
+
+void NuPlayer::RTPSource::onConnected() {
+    ALOGV("onConnected");
+    mState = CONNECTED;
+}
+
+void NuPlayer::RTPSource::onDisconnected(const sp<AMessage> &msg) {
+    if (mState == DISCONNECTED) {
+        return;
+    }
+
+    status_t err;
+    CHECK(msg->findInt32("result", &err));
+    CHECK_NE(err, (status_t)OK);
+
+//    mLooper->unregisterHandler(mHandler->id());
+//    mHandler.clear();
+
+    if (mState == CONNECTING) {
+        // We're still in the preparation phase, signal that it
+        // failed.
+        notifyPrepared(err);
+    }
+
+    mState = DISCONNECTED;
+//    setError(err);
+
+}
+
+status_t NuPlayer::RTPSource::setParameter(const String8 &key, const String8 &value) {
+    ALOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
+
+    bool isAudioKey = key.contains("audio");
+    TrackInfo *info = NULL;
+    for (unsigned i = 0; i < mTracks.size(); ++i) {
+        info = &mTracks.editItemAt(i);
+        if (info != NULL && info->mIsAudio == isAudioKey) {
+            ALOGV("setParameter: %s track (%d) found", isAudioKey ? "audio" : "video" , i);
+            break;
+        }
+    }
+
+    if (info == NULL) {
+        TrackInfo newTrackInfo;
+        newTrackInfo.mIsAudio = isAudioKey;
+        mTracks.push(newTrackInfo);
+        info = &mTracks.editTop();
+        info->mJbTimeMs = kStaticJitterTimeMs;
+    }
+
+    if (key == "rtp-param-mime-type") {
+        info->mMimeType = value;
+
+        const char *mime = value.string();
+        const char *delimiter = strchr(mime, '/');
+        info->mCodecName = delimiter ? (delimiter + 1) : "<none>";
+
+        ALOGV("rtp-param-mime-type: mMimeType (%s) => mCodecName (%s)",
+                info->mMimeType.string(), info->mCodecName.string());
+    } else if (key == "video-param-decoder-profile") {
+        info->mCodecProfile = atoi(value);
+    } else if (key == "video-param-decoder-level") {
+        info->mCodecLevel = atoi(value);
+    } else if (key == "video-param-width") {
+        info->mWidth = atoi(value);
+    } else if (key == "video-param-height") {
+        info->mHeight = atoi(value);
+    } else if (key == "rtp-param-local-ip") {
+        info->mLocalIp = value;
+    } else if (key == "rtp-param-local-port") {
+        info->mLocalPort = atoi(value);
+    } else if (key == "rtp-param-remote-ip") {
+        info->mRemoteIp = value;
+    } else if (key == "rtp-param-remote-port") {
+        info->mRemotePort = atoi(value);
+    } else if (key == "rtp-param-payload-type") {
+        info->mPayloadType = atoi(value);
+    } else if (key == "rtp-param-as") {
+        //AS means guaranteed bit rate that negotiated from sdp.
+        info->mAS = atoi(value);
+    } else if (key == "rtp-param-rtp-timeout") {
+    } else if (key == "rtp-param-rtcp-timeout") {
+    } else if (key == "rtp-param-time-scale") {
+    } else if (key == "rtp-param-self-id") {
+        info->mSelfID = atoi(value);
+    } else if (key == "rtp-param-ext-cvo-extmap") {
+        info->mCVOExtMap = atoi(value);
+    } else if (key == "rtp-param-set-socket-network") {
+        int64_t networkHandle = atoll(value);
+        setSocketNetwork(networkHandle);
+    } else if (key == "rtp-param-jitter-buffer-time") {
+        // clamping min at 40, max at 3000
+        info->mJbTimeMs = std::min(std::max(40, atoi(value)), 3000);
+    }
+
+    return OK;
+}
+
+status_t NuPlayer::RTPSource::setParameters(const String8 &params) {
+    ALOGV("setParameters: %s", params.string());
+    const char *cparams = params.string();
+    const char *key_start = cparams;
+    for (;;) {
+        const char *equal_pos = strchr(key_start, '=');
+        if (equal_pos == NULL) {
+            ALOGE("Parameters %s miss a value", cparams);
+            return BAD_VALUE;
+        }
+        String8 key(key_start, equal_pos - key_start);
+        TrimString(&key);
+        if (key.length() == 0) {
+            ALOGE("Parameters %s contains an empty key", cparams);
+            return BAD_VALUE;
+        }
+        const char *value_start = equal_pos + 1;
+        const char *semicolon_pos = strchr(value_start, ';');
+        String8 value;
+        if (semicolon_pos == NULL) {
+            value.setTo(value_start);
+        } else {
+            value.setTo(value_start, semicolon_pos - value_start);
+        }
+        if (setParameter(key, value) != OK) {
+            return BAD_VALUE;
+        }
+        if (semicolon_pos == NULL) {
+            break;  // Reaches the end
+        }
+        key_start = semicolon_pos + 1;
+    }
+    return OK;
+}
+
+void NuPlayer::RTPSource::setSocketNetwork(int64_t networkHandle) {
+    ALOGV("setSocketNetwork: %llu", (unsigned long long)networkHandle);
+
+    TrackInfo *info = NULL;
+    for (size_t i = 0; i < mTracks.size(); ++i) {
+        info = &mTracks.editItemAt(i);
+
+        if (info == NULL)
+            break;
+
+        info->mSocketNetwork = networkHandle;
+    }
+}
+
+// Trim both leading and trailing whitespace from the given string.
+//static
+void NuPlayer::RTPSource::TrimString(String8 *s) {
+    size_t num_bytes = s->bytes();
+    const char *data = s->string();
+
+    size_t leading_space = 0;
+    while (leading_space < num_bytes && isspace(data[leading_space])) {
+        ++leading_space;
+    }
+
+    size_t i = num_bytes;
+    while (i > leading_space && isspace(data[i - 1])) {
+        --i;
+    }
+
+    s->setTo(String8(&data[leading_space], i - leading_space));
+}
+
+}  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.h b/media/libmediaplayerservice/nuplayer/RTPSource.h
new file mode 100644
index 0000000..3b4f9e9
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.h
@@ -0,0 +1,217 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RTP_SOURCE_H_
+
+#define RTP_SOURCE_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/Utils.h>
+#include <media/BufferingSettings.h>
+
+#include <utils/KeyedVector.h>
+#include <utils/Vector.h>
+#include <utils/RefBase.h>
+
+#include "AnotherPacketSource.h"
+#include "APacketSource.h"
+#include "ARTPConnection.h"
+#include "ARTPSource.h"
+#include "ASessionDescription.h"
+#include "NuPlayerSource.h"
+
+
+
+
+
+
+namespace android {
+
+struct ALooper;
+struct AnotherPacketSource;
+
+struct NuPlayer::RTPSource : public NuPlayer::Source {
+    RTPSource(
+            const sp<AMessage> &notify,
+            const String8& rtpParams);
+
+    virtual status_t getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) override;
+    virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
+    virtual void prepareAsync();
+    virtual void start();
+    virtual void stop();
+    virtual void pause();
+    virtual void resume();
+
+    virtual status_t feedMoreTSData();
+
+    virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
+
+    virtual status_t getDuration(int64_t *durationUs);
+    virtual status_t seekTo(
+            int64_t seekTimeUs,
+            MediaPlayerSeekMode mode = MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC) override;
+
+    virtual bool isRealTime() const;
+
+    void onMessageReceived(const sp<AMessage> &msg);
+
+    virtual void setTargetBitrate(int32_t bitrate) override;
+
+protected:
+    virtual ~RTPSource();
+
+    virtual sp<MetaData> getFormatMeta(bool audio);
+
+private:
+    enum {
+        kWhatAccessUnit = 'accU',
+        kWhatAccessUnitComplete = 'accu',
+        kWhatDisconnect = 'disc',
+        kWhatEOS = 'eos!',
+        kWhatPollBuffering = 'poll',
+        kWhatSetBufferingSettings = 'sBuS',
+    };
+
+    const int64_t kBufferingPollIntervalUs = 1000000ll;
+
+    enum State {
+        DISCONNECTED,
+        CONNECTING,
+        CONNECTED,
+        PAUSED,
+    };
+
+    struct TrackInfo {
+
+        /* SDP of track */
+        bool mIsAudio;
+        int32_t mPayloadType;
+        String8 mMimeType;
+        String8 mCodecName;
+        int32_t mCodecProfile;
+        int32_t mCodecLevel;
+        int32_t mWidth;
+        int32_t mHeight;
+        String8 mLocalIp;
+        String8 mRemoteIp;
+        int32_t mLocalPort;
+        int32_t mRemotePort;
+        int64_t mSocketNetwork;
+        int32_t mTimeScale;
+        int32_t mAS;
+
+        /* RTP jitter buffer time in milliseconds */
+        uint32_t mJbTimeMs;
+        /* Unique ID indicates itself */
+        uint32_t mSelfID;
+        /* extmap:<value> for CVO will be set to here */
+        int32_t mCVOExtMap;
+
+        /* a copy of TrackInfo in RTSPSource */
+        sp<AnotherPacketSource> mSource;
+        uint32_t mRTPTime;
+        int64_t mNormalPlaytimeUs;
+        bool mNPTMappingValid;
+
+        /* a copy of TrackInfo in MyHandler.h */
+        int mRTPSocket;
+        int mRTCPSocket;
+        uint32_t mFirstSeqNumInSegment;
+        bool mNewSegment;
+        int32_t mAllowedStaleAccessUnits;
+        uint32_t mRTPAnchor;
+        int64_t mNTPAnchorUs;
+        bool mEOSReceived;
+        uint32_t mNormalPlayTimeRTP;
+        int64_t mNormalPlayTimeUs;
+        sp<APacketSource> mPacketSource;
+        List<sp<ABuffer>> mPackets;
+    };
+
+    const String8 mRTPParams;
+    uint32_t mFlags;
+    State mState;
+    status_t mFinalResult;
+
+    // below 3 parameters need to be checked whether it needed or not.
+    Mutex mBufferingLock;
+    bool mBuffering;
+    bool mInPreparationPhase;
+    Mutex mBufferingSettingsLock;
+    BufferingSettings mBufferingSettings;
+
+    sp<ALooper> mLooper;
+
+    sp<ARTPConnection> mRTPConn;
+
+    Vector<TrackInfo> mTracks;
+    sp<AnotherPacketSource> mAudioTrack;
+    sp<AnotherPacketSource> mVideoTrack;
+
+    int64_t mEOSTimeoutAudio;
+    int64_t mEOSTimeoutVideo;
+
+    /* MyHandler.h */
+    bool mFirstAccessUnit;
+    bool mAllTracksHaveTime;
+    int64_t mNTPAnchorUs;
+    int64_t mMediaAnchorUs;
+    int64_t mLastMediaTimeUs;
+    int64_t mNumAccessUnitsReceived;
+    int32_t mLastCVOUpdated;
+    bool mReceivedFirstRTCPPacket;
+    bool mReceivedFirstRTPPacket;
+    bool mPausing;
+    int32_t mPauseGeneration;
+
+    sp<AnotherPacketSource> getSource(bool audio);
+
+    /* MyHandler.h */
+    void onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime);
+    bool addMediaTimestamp(int32_t trackIndex, const TrackInfo *track,
+            const sp<ABuffer> &accessUnit);
+    bool dataReceivedOnAllChannels();
+    void postQueueAccessUnit(size_t trackIndex, const sp<ABuffer> &accessUnit);
+    void postQueueEOS(size_t trackIndex, status_t finalResult);
+    sp<MetaData> getTrackFormat(size_t index, int32_t *timeScale);
+    void onConnected();
+    void onDisconnected(const sp<AMessage> &msg);
+
+    void schedulePollBuffering();
+    void onPollBuffering();
+
+    bool haveSufficientDataOnAllTracks();
+
+    void setEOSTimeout(bool audio, int64_t timeout);
+
+    status_t setParameters(const String8 &params);
+    status_t setParameter(const String8 &key, const String8 &value);
+    void setSocketNetwork(int64_t networkHandle);
+    static void TrimString(String8 *s);
+
+    DISALLOW_EVIL_CONSTRUCTORS(RTPSource);
+};
+
+}  // namespace android
+
+#endif  // RTP_SOURCE_H_
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 83da092..8e05de8 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -145,13 +145,17 @@
         return;
     }
 
-    // Close socket before posting message to RTSPSource message handler.
-    close(mHandler->getARTSPConnection()->getSocket());
-
     sp<AMessage> msg = new AMessage(kWhatDisconnect, this);
 
     sp<AMessage> dummy;
     msg->postAndAwaitResponse(&dummy);
+
+    // Close socket after posting message to RTSPSource message handler.
+    if (mHandler != NULL && mHandler->getARTSPConnection()->getSocket() >= 0) {
+        ALOGD("closing rtsp socket if not closed yet.");
+        close(mHandler->getARTSPConnection()->getSocket());
+    }
+
 }
 
 status_t NuPlayer::RTSPSource::feedMoreTSData() {
diff --git a/media/libmediaplayerservice/tests/Android.bp b/media/libmediaplayerservice/tests/Android.bp
index e845c33..98626fd 100644
--- a/media/libmediaplayerservice/tests/Android.bp
+++ b/media/libmediaplayerservice/tests/Android.bp
@@ -1,3 +1,14 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libmediaplayerservice_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libmediaplayerservice_license",
+    ],
+}
+
 cc_test {
 
     name: "DrmSessionManager_test",
diff --git a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
index f114046..c81a659 100644
--- a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
+++ b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
@@ -65,6 +65,14 @@
         return true;
     }
 
+    virtual bool overrideProcessInfo(
+            int /* pid */, int /* procState */, int /* oomScore */) {
+        return true;
+    }
+
+    virtual void removeProcessInfoOverride(int /* pid */) {
+    }
+
 private:
     DISALLOW_EVIL_CONSTRUCTORS(FakeProcessInfo);
 };
diff --git a/media/libmediaplayerservice/tests/stagefrightRecorder/Android.bp b/media/libmediaplayerservice/tests/stagefrightRecorder/Android.bp
new file mode 100644
index 0000000..92236ea
--- /dev/null
+++ b/media/libmediaplayerservice/tests/stagefrightRecorder/Android.bp
@@ -0,0 +1,92 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libmediaplayerservice_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libmediaplayerservice_license",
+    ],
+}
+
+cc_test {
+    name: "StagefrightRecorderTest",
+    gtest: true,
+
+    srcs: [
+        "StagefrightRecorderTest.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libmediaplayerservice",
+    ],
+
+    static_libs: [
+        "libmediaplayerservice",
+        "libstagefright_httplive",
+        "libstagefright_rtsp",
+    ],
+
+    shared_libs: [
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.omx@1.0",
+        "libbase",
+        "libandroid_net",
+        "libaudioclient",
+        "libbinder",
+        "libcamera_client",
+        "libcodec2_client",
+        "libcrypto",
+        "libcutils",
+        "libdatasource",
+        "libdl",
+        "libdrmframework",
+        "libgui",
+        "libhidlbase",
+        "liblog",
+        "libmedia",
+        "libmedia_codeclist",
+        "libmedia_omx",
+        "libmediadrm",
+        "libmediandk",
+        "libmediametrics",
+        "libmediautils",
+        "libmemunreachable",
+        "libnetd_client",
+        "libpowermanager",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libutils",
+        "framework-permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libmediaplayerservice/tests/stagefrightRecorder/AndroidTest.xml b/media/libmediaplayerservice/tests/stagefrightRecorder/AndroidTest.xml
new file mode 100644
index 0000000..427026d
--- /dev/null
+++ b/media/libmediaplayerservice/tests/stagefrightRecorder/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for StagefrightRecorder unit tests">
+    <option name="test-suite-tag" value="StagefrightRecorderTest" />
+    <target_preparer class="com.android.tradefed.targetprep.RootTargetPreparer">
+        <option name="force-root" value="true" />
+    </target_preparer>
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push"
+                value="StagefrightRecorderTest->/data/local/tmp/StagefrightRecorderTest" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="StagefrightRecorderTest" />
+    </test>
+</configuration>
diff --git a/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp b/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp
new file mode 100644
index 0000000..162c187
--- /dev/null
+++ b/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp
@@ -0,0 +1,322 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "StagefrightRecorderTest"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include <chrono>
+#include <ctime>
+#include <iostream>
+#include <string>
+#include <thread>
+
+#include <MediaPlayerService.h>
+#include <media/NdkMediaExtractor.h>
+#include <media/stagefright/MediaCodec.h>
+#include <system/audio.h>
+
+#include "StagefrightRecorder.h"
+
+#define OUTPUT_INFO_FILE_NAME "/data/local/tmp/stfrecorder_audio.info"
+#define OUTPUT_FILE_NAME_AUDIO "/data/local/tmp/stfrecorder_audio.raw"
+
+const bool kDebug = false;
+constexpr int32_t kMaxLoopCount = 10;
+constexpr int32_t kClipDurationInSec = 4;
+constexpr int32_t kPauseTimeInSec = 2;
+// Tolerance value for extracted clipduration is maximum 10% of total clipduration
+constexpr int32_t kToleranceValueInUs = kClipDurationInSec * 100000;
+
+using namespace android;
+
+class StagefrightRecorderTest
+    : public ::testing::TestWithParam<std::pair<output_format, audio_encoder>> {
+  public:
+    StagefrightRecorderTest() : mStfRecorder(nullptr), mOutputAudioFp(nullptr) {
+        mExpectedDurationInMs = 0;
+        mExpectedPauseInMs = 0;
+    }
+
+    ~StagefrightRecorderTest() {
+        if (mStfRecorder) free(mStfRecorder);
+        if (mOutputAudioFp) fclose(mOutputAudioFp);
+    }
+
+    void SetUp() override {
+        // TODO b/182392769: use attribution source util
+        AttributionSourceState attributionSource;
+        attributionSource.packageName = std::string(LOG_TAG);
+        attributionSource.token = sp<BBinder>::make();
+        mStfRecorder = new StagefrightRecorder(attributionSource);
+        ASSERT_NE(mStfRecorder, nullptr) << "Failed to create the instance of recorder";
+
+        mOutputAudioFp = fopen(OUTPUT_FILE_NAME_AUDIO, "wb");
+        ASSERT_NE(mOutputAudioFp, nullptr) << "Failed to open output file "
+                                           << OUTPUT_FILE_NAME_AUDIO << " for stagefright recorder";
+
+        int32_t fd = fileno(mOutputAudioFp);
+        ASSERT_GE(fd, 0) << "Failed to get the file descriptor of the output file for "
+                         << OUTPUT_FILE_NAME_AUDIO;
+
+        status_t status = mStfRecorder->setOutputFile(fd);
+        ASSERT_EQ(status, OK) << "Failed to set the output file " << OUTPUT_FILE_NAME_AUDIO
+                              << " for stagefright recorder";
+    }
+
+    void TearDown() override {
+        if (mOutputAudioFp) {
+            fclose(mOutputAudioFp);
+            mOutputAudioFp = nullptr;
+        }
+        if (!kDebug) {
+            int32_t status = remove(OUTPUT_FILE_NAME_AUDIO);
+            ASSERT_EQ(status, 0) << "Unable to delete the output file " << OUTPUT_FILE_NAME_AUDIO;
+        }
+    }
+
+    void setAudioRecorderFormat(output_format outputFormat, audio_encoder encoder,
+                                audio_source_t audioSource = AUDIO_SOURCE_DEFAULT);
+    void recordMedia(bool isPaused = false, int32_t numStart = 0, int32_t numPause = 0);
+    void dumpInfo();
+    void setupExtractor(AMediaExtractor *extractor, int32_t &trackCount);
+    void validateOutput();
+
+    MediaRecorderBase *mStfRecorder;
+    FILE *mOutputAudioFp;
+    double mExpectedDurationInMs;
+    double mExpectedPauseInMs;
+};
+
+void StagefrightRecorderTest::setAudioRecorderFormat(output_format outputFormat,
+                                                     audio_encoder encoder,
+                                                     audio_source_t audioSource) {
+    status_t status = mStfRecorder->setAudioSource(audioSource);
+    ASSERT_EQ(status, OK) << "Failed to set the audio source: " << audioSource;
+
+    status = mStfRecorder->setOutputFormat(outputFormat);
+    ASSERT_EQ(status, OK) << "Failed to set the output format: " << outputFormat;
+
+    status = mStfRecorder->setAudioEncoder(encoder);
+    ASSERT_EQ(status, OK) << "Failed to set the audio encoder: " << encoder;
+}
+
+void StagefrightRecorderTest::recordMedia(bool isPause, int32_t numStart, int32_t numPause) {
+    status_t status = mStfRecorder->init();
+    ASSERT_EQ(status, OK) << "Failed to initialize stagefright recorder";
+
+    status = mStfRecorder->prepare();
+    ASSERT_EQ(status, OK) << "Failed to preapre the reorder";
+
+    // first start should succeed.
+    status = mStfRecorder->start();
+    ASSERT_EQ(status, OK) << "Failed to start the recorder";
+
+    for (int32_t count = 0; count < numStart; count++) {
+        status = mStfRecorder->start();
+    }
+
+    auto tStart = std::chrono::high_resolution_clock::now();
+    // Recording media for 4 secs
+    std::this_thread::sleep_for(std::chrono::seconds(kClipDurationInSec));
+    auto tEnd = std::chrono::high_resolution_clock::now();
+    mExpectedDurationInMs = std::chrono::duration<double, std::milli>(tEnd - tStart).count();
+
+    if (isPause) {
+        // first pause should succeed.
+        status = mStfRecorder->pause();
+        ASSERT_EQ(status, OK) << "Failed to pause the recorder";
+
+        tStart = std::chrono::high_resolution_clock::now();
+        // Paused recorder for 2 secs
+        std::this_thread::sleep_for(std::chrono::seconds(kPauseTimeInSec));
+
+        for (int32_t count = 0; count < numPause; count++) {
+            status = mStfRecorder->pause();
+        }
+
+        tEnd = std::chrono::high_resolution_clock::now();
+        mExpectedPauseInMs = std::chrono::duration<double, std::milli>(tEnd - tStart).count();
+
+        status = mStfRecorder->resume();
+        ASSERT_EQ(status, OK) << "Failed to resume the recorder";
+
+        auto tStart = std::chrono::high_resolution_clock::now();
+        // Recording media for 4 secs
+        std::this_thread::sleep_for(std::chrono::seconds(kClipDurationInSec));
+        auto tEnd = std::chrono::high_resolution_clock::now();
+        mExpectedDurationInMs += std::chrono::duration<double, std::milli>(tEnd - tStart).count();
+    }
+    status = mStfRecorder->stop();
+    ASSERT_EQ(status, OK) << "Failed to stop the recorder";
+}
+
+void StagefrightRecorderTest::dumpInfo() {
+    FILE *dumpOutput = fopen(OUTPUT_INFO_FILE_NAME, "wb");
+    int32_t dumpFd = fileno(dumpOutput);
+    Vector<String16> args;
+    status_t status = mStfRecorder->dump(dumpFd, args);
+    ASSERT_EQ(status, OK) << "Failed to dump the info for the recorder";
+    fclose(dumpOutput);
+}
+
+void StagefrightRecorderTest::setupExtractor(AMediaExtractor *extractor, int32_t &trackCount) {
+    int32_t fd = open(OUTPUT_FILE_NAME_AUDIO, O_RDONLY);
+    ASSERT_GE(fd, 0) << "Failed to open recorder's output file " << OUTPUT_FILE_NAME_AUDIO
+                     << " to validate";
+
+    struct stat buf;
+    int32_t status = fstat(fd, &buf);
+    ASSERT_EQ(status, 0) << "Failed to get properties of input file " << OUTPUT_FILE_NAME_AUDIO
+                         << " for extractor";
+
+    size_t fileSize = buf.st_size;
+    ASSERT_GT(fileSize, 0) << "Size of input file " << OUTPUT_FILE_NAME_AUDIO
+                           << " to extractor cannot be zero";
+    ALOGV("Size of input file to extractor: %zu", fileSize);
+
+    status = AMediaExtractor_setDataSourceFd(extractor, fd, 0, fileSize);
+    ASSERT_EQ(status, AMEDIA_OK) << "Failed to set data source for extractor";
+
+    trackCount = AMediaExtractor_getTrackCount(extractor);
+    ALOGV("Number of tracks reported by extractor : %d", trackCount);
+}
+
+// Validate recoder's output using extractor
+void StagefrightRecorderTest::validateOutput() {
+    int32_t trackCount = -1;
+    AMediaExtractor *extractor = AMediaExtractor_new();
+    ASSERT_NE(extractor, nullptr) << "Failed to create extractor";
+    ASSERT_NO_FATAL_FAILURE(setupExtractor(extractor, trackCount));
+    ASSERT_EQ(trackCount, 1) << "Expected 1 track, saw " << trackCount;
+
+    for (int32_t idx = 0; idx < trackCount; idx++) {
+        AMediaExtractor_selectTrack(extractor, idx);
+        AMediaFormat *format = AMediaExtractor_getTrackFormat(extractor, idx);
+        ASSERT_NE(format, nullptr) << "Track format is NULL";
+        ALOGI("Track format = %s", AMediaFormat_toString(format));
+
+        int64_t clipDurationUs;
+        AMediaFormat_getInt64(format, AMEDIAFORMAT_KEY_DURATION, &clipDurationUs);
+        int32_t diff = abs((mExpectedDurationInMs * 1000) - clipDurationUs);
+        ASSERT_LE(diff, kToleranceValueInUs)
+                << "Expected duration: " << (mExpectedDurationInMs * 1000)
+                << " Actual duration: " << clipDurationUs << " Difference: " << diff
+                << " Difference is expected to be less than tolerance value: " << kToleranceValueInUs;
+
+        const char *mime = nullptr;
+        AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime);
+        ASSERT_NE(mime, nullptr) << "Track mime is NULL";
+        ALOGI("Track mime = %s", mime);
+
+        int32_t sampleRate, channelCount, bitRate;
+        AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT, &channelCount);
+        ALOGI("Channel count reported by extractor: %d", channelCount);
+        AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE, &sampleRate);
+        ALOGI("Sample Rate reported by extractor: %d", sampleRate);
+        AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, &bitRate);
+        ALOGI("Bit Rate reported by extractor: %d", bitRate);
+    }
+}
+
+TEST_F(StagefrightRecorderTest, RecordingAudioSanityTest) {
+    ASSERT_NO_FATAL_FAILURE(setAudioRecorderFormat(OUTPUT_FORMAT_DEFAULT, AUDIO_ENCODER_DEFAULT));
+
+    int32_t maxAmplitude = -1;
+    status_t status = mStfRecorder->getMaxAmplitude(&maxAmplitude);
+    ASSERT_EQ(maxAmplitude, 0) << "Invalid value of max amplitude";
+
+    ASSERT_NO_FATAL_FAILURE(recordMedia());
+
+    // Verify getMetrics() behavior
+    Parcel parcel;
+    status = mStfRecorder->getMetrics(&parcel);
+    ASSERT_EQ(status, OK) << "Failed to get the parcel from getMetrics";
+    ALOGV("Size of the Parcel returned by getMetrics: %zu", parcel.dataSize());
+    ASSERT_GT(parcel.dataSize(), 0) << "Parcel size reports empty record";
+    ASSERT_NO_FATAL_FAILURE(validateOutput());
+    if (kDebug) {
+        ASSERT_NO_FATAL_FAILURE(dumpInfo());
+    }
+}
+
+TEST_P(StagefrightRecorderTest, MultiFormatAudioRecordTest) {
+    output_format outputFormat = GetParam().first;
+    audio_encoder audioEncoder = GetParam().second;
+    ASSERT_NO_FATAL_FAILURE(setAudioRecorderFormat(outputFormat, audioEncoder));
+    ASSERT_NO_FATAL_FAILURE(recordMedia());
+    // TODO(b/161687761)
+    // Skip for AMR-NB/WB output format
+    if (!(outputFormat == OUTPUT_FORMAT_AMR_NB || outputFormat == OUTPUT_FORMAT_AMR_WB)) {
+        ASSERT_NO_FATAL_FAILURE(validateOutput());
+    }
+    if (kDebug) {
+        ASSERT_NO_FATAL_FAILURE(dumpInfo());
+    }
+}
+
+TEST_F(StagefrightRecorderTest, GetActiveMicrophonesTest) {
+    ASSERT_NO_FATAL_FAILURE(
+            setAudioRecorderFormat(OUTPUT_FORMAT_DEFAULT, AUDIO_ENCODER_DEFAULT, AUDIO_SOURCE_MIC));
+
+    status_t status = mStfRecorder->init();
+    ASSERT_EQ(status, OK) << "Init failed for stagefright recorder";
+
+    status = mStfRecorder->prepare();
+    ASSERT_EQ(status, OK) << "Failed to preapre the reorder";
+
+    status = mStfRecorder->start();
+    ASSERT_EQ(status, OK) << "Failed to start the recorder";
+
+    // Record media for 4 secs
+    std::this_thread::sleep_for(std::chrono::seconds(kClipDurationInSec));
+
+    std::vector<media::MicrophoneInfo> activeMicrophones{};
+    status = mStfRecorder->getActiveMicrophones(&activeMicrophones);
+    ASSERT_EQ(status, OK) << "Failed to get Active Microphones";
+    ASSERT_GT(activeMicrophones.size(), 0) << "No active microphones are found";
+
+    status = mStfRecorder->stop();
+    ASSERT_EQ(status, OK) << "Failed to stop the recorder";
+    if (kDebug) {
+        ASSERT_NO_FATAL_FAILURE(dumpInfo());
+    }
+}
+
+TEST_F(StagefrightRecorderTest, MultiStartPauseTest) {
+    ASSERT_NO_FATAL_FAILURE(setAudioRecorderFormat(OUTPUT_FORMAT_DEFAULT, AUDIO_ENCODER_DEFAULT));
+    ASSERT_NO_FATAL_FAILURE(recordMedia(true, kMaxLoopCount, kMaxLoopCount));
+    ASSERT_NO_FATAL_FAILURE(validateOutput());
+    if (kDebug) {
+        ASSERT_NO_FATAL_FAILURE(dumpInfo());
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        StagefrightRecorderTestAll, StagefrightRecorderTest,
+        ::testing::Values(std::make_pair(OUTPUT_FORMAT_AMR_NB, AUDIO_ENCODER_AMR_NB),
+                          std::make_pair(OUTPUT_FORMAT_AMR_WB, AUDIO_ENCODER_AMR_WB),
+                          std::make_pair(OUTPUT_FORMAT_AAC_ADTS, AUDIO_ENCODER_AAC),
+                          std::make_pair(OUTPUT_FORMAT_OGG, AUDIO_ENCODER_OPUS)));
+
+int main(int argc, char **argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = RUN_ALL_TESTS();
+    ALOGV("Test result = %d\n", status);
+    return status;
+}
diff --git a/media/libmediatranscoding/.clang-format b/media/libmediatranscoding/.clang-format
index 3198d00..f23b842 100644
--- a/media/libmediatranscoding/.clang-format
+++ b/media/libmediatranscoding/.clang-format
@@ -26,4 +26,26 @@
 DerivePointerAlignment: false
 IndentWidth: 4
 PointerAlignment: Left
-TabWidth: 4
\ No newline at end of file
+TabWidth: 4
+
+# Deviations from the above file:
+# "Don't indent the section label"
+AccessModifierOffset: -4
+# "Each line of text in your code should be at most 100 columns long."
+ColumnLimit: 100
+# "Constructor initializer lists can be all on one line or with subsequent
+# lines indented eight spaces.". clang-format does not support having the colon
+# on the same line as the constructor function name, so this is the best
+# approximation of that rule, which makes all entries in the list (except the
+# first one) have an eight space indentation.
+ConstructorInitializerIndentWidth: 6
+# There is nothing in go/droidcppstyle about case labels, but there seems to be
+# more code that does not indent the case labels in frameworks/base.
+IndentCaseLabels: false
+# There have been some bugs in which subsequent formatting operations introduce
+# weird comment jumps.
+ReflowComments: false
+# Android supports C++17 now, but it seems only Cpp11 will work now.
+# "Cpp11 is a deprecated alias for Latest" according to
+# https://clang.llvm.org/docs/ClangFormatStyleOptions.html
+Standard: Cpp11
diff --git a/media/libmediatranscoding/Android.bp b/media/libmediatranscoding/Android.bp
index f948bd8..042850c 100644
--- a/media/libmediatranscoding/Android.bp
+++ b/media/libmediatranscoding/Android.bp
@@ -14,48 +14,111 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+filegroup {
+    name: "libmediatranscoding_aidl",
+    srcs: [
+        "aidl/android/media/IMediaTranscodingService.aidl",
+        "aidl/android/media/ITranscodingClient.aidl",
+        "aidl/android/media/ITranscodingClientCallback.aidl",
+        "aidl/android/media/TranscodingErrorCode.aidl",
+        "aidl/android/media/TranscodingSessionPriority.aidl",
+        "aidl/android/media/TranscodingSessionStats.aidl",
+        "aidl/android/media/TranscodingType.aidl",
+        "aidl/android/media/TranscodingVideoCodecType.aidl",
+        "aidl/android/media/TranscodingVideoTrackFormat.aidl",
+        "aidl/android/media/TranscodingSessionParcel.aidl",
+        "aidl/android/media/TranscodingRequestParcel.aidl",
+        "aidl/android/media/TranscodingResultParcel.aidl",
+        "aidl/android/media/TranscodingTestConfig.aidl",
+    ],
+    path: "aidl",
+}
+
 // AIDL interfaces of MediaTranscoding.
 aidl_interface {
     name: "mediatranscoding_aidl_interface",
     unstable: true,
     local_include_dir: "aidl",
-    srcs: [
-        "aidl/android/media/IMediaTranscodingService.aidl",
-        "aidl/android/media/ITranscodingServiceClient.aidl",
-        "aidl/android/media/TranscodingErrorCode.aidl",
-        "aidl/android/media/TranscodingJobPriority.aidl",
-        "aidl/android/media/TranscodingType.aidl",
-        "aidl/android/media/TranscodingVideoCodecType.aidl",
-        "aidl/android/media/TranscodingJobParcel.aidl",
-        "aidl/android/media/TranscodingRequestParcel.aidl",
-        "aidl/android/media/TranscodingResultParcel.aidl",
-    ],
+    srcs: [":libmediatranscoding_aidl"],
+    backend:
+    {
+        java: {
+            enabled: true,
+            apex_available: [
+                "com.android.media",
+                "test_com.android.media",
+            ],
+            min_sdk_version: "29",
+        },
+        ndk: {
+            enabled: true,
+            apex_available: [
+                "com.android.media",
+                "test_com.android.media",
+            ],
+            min_sdk_version: "29",
+        },
+    },
 }
 
-cc_library_shared {
+cc_library {
     name: "libmediatranscoding",
 
+    min_sdk_version: "29",
+    apex_available: [
+        "com.android.media",
+        "test_com.android.media",
+    ],
+
     srcs: [
-        "TranscodingClientManager.cpp"
+        "TranscoderWrapper.cpp",
+        "TranscodingClientManager.cpp",
+        "TranscodingLogger.cpp",
+        "TranscodingResourcePolicy.cpp",
+        "TranscodingSessionController.cpp",
+        "TranscodingThermalPolicy.cpp",
+        "TranscodingUidPolicy.cpp",
     ],
 
     shared_libs: [
+        "libandroid#31",
         "libbinder_ndk",
         "libcutils",
         "liblog",
         "libutils",
+        "libmediatranscoder",
+        "libmediandk",
+        "libstatssocket#30",
+    ],
+    export_shared_lib_headers: [
+        "libmediandk",
     ],
 
     export_include_dirs: ["include"],
 
     static_libs: [
         "mediatranscoding_aidl_interface-ndk_platform",
+        "resourceobserver_aidl_interface-V1-ndk_platform",
+        "libstatslog_media",
     ],
 
     cflags: [
-        "-Werror",
-        "-Wno-error=deprecated-declarations",
         "-Wall",
+        "-Werror",
+        "-Wformat",
+        "-Wno-error=deprecated-declarations",
+        "-Wthread-safety",
+        "-Wunused",
+        "-Wunreachable-code",
     ],
 
     sanitize: {
@@ -66,3 +129,43 @@
         cfi: true,
     },
 }
+
+cc_library_static {
+    name: "libstatslog_media",
+    generated_sources: ["statslog_media.cpp"],
+    generated_headers: ["statslog_media.h"],
+    min_sdk_version: "29",
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+    export_generated_headers: ["statslog_media.h"],
+    apex_available: [
+        "com.android.media",
+        "test_com.android.media",
+    ],
+    shared_libs: [
+        "libcutils",
+        "liblog",
+        "libstatssocket#30",
+        "libutils",
+    ],
+}
+
+genrule {
+    name: "statslog_media.h",
+    tools: ["stats-log-api-gen"],
+    cmd: "$(location stats-log-api-gen) --header $(genDir)/statslog_media.h --module media --namespace android,media,stats",
+    out: [
+        "statslog_media.h",
+    ],
+}
+
+genrule {
+    name: "statslog_media.cpp",
+    tools: ["stats-log-api-gen"],
+    cmd: "$(location stats-log-api-gen) --cpp $(genDir)/statslog_media.cpp --module media --namespace android,media,stats --importHeader statslog_media.h",
+    out: [
+        "statslog_media.cpp",
+    ],
+}
\ No newline at end of file
diff --git a/media/libmediatranscoding/OWNERS b/media/libmediatranscoding/OWNERS
index 02287cb..b08d573 100644
--- a/media/libmediatranscoding/OWNERS
+++ b/media/libmediatranscoding/OWNERS
@@ -1,3 +1,5 @@
-akersten@google.com
+chz@google.com
+gokrishnan@google.com
 hkuang@google.com
 lnilsson@google.com
+pawin@google.com
diff --git a/media/libmediatranscoding/TEST_MAPPING b/media/libmediatranscoding/TEST_MAPPING
new file mode 100644
index 0000000..40f7b21
--- /dev/null
+++ b/media/libmediatranscoding/TEST_MAPPING
@@ -0,0 +1,35 @@
+{
+    "presubmit": [
+        {
+            "name": "MediaSampleQueueTests"
+        },
+        {
+            "name": "MediaSampleReaderNDKTests"
+        },
+        {
+            "name": "MediaSampleWriterTests"
+        },
+        {
+            "name": "MediaTrackTranscoderTests"
+        },
+        {
+            "name": "MediaTranscoderTests"
+        },
+        {
+            "name": "PassthroughTrackTranscoderTests"
+        },
+        {
+            "name": "TranscodingClientManager_tests"
+        },
+        {
+            "name": "TranscodingSessionController_tests"
+        },
+        {
+            "name": "VideoTrackTranscoderTests"
+        },
+        {
+            "name": "CtsMediaTranscodingTestCases"
+        }
+    ]
+}
+
diff --git a/media/libmediatranscoding/TranscoderWrapper.cpp b/media/libmediatranscoding/TranscoderWrapper.cpp
new file mode 100644
index 0000000..60b780f
--- /dev/null
+++ b/media/libmediatranscoding/TranscoderWrapper.cpp
@@ -0,0 +1,627 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TranscoderWrapper"
+
+#include <aidl/android/media/TranscodingErrorCode.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
+#include <media/TranscoderWrapper.h>
+#include <media/TranscodingRequest.h>
+#include <utils/AndroidThreads.h>
+#include <utils/Log.h>
+
+#include <thread>
+
+namespace android {
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::TranscodingErrorCode;
+using ::aidl::android::media::TranscodingVideoCodecType;
+using ::aidl::android::media::TranscodingVideoTrackFormat;
+
+static TranscodingErrorCode toTranscodingError(media_status_t status) {
+    switch (status) {
+    case AMEDIA_OK:
+        return TranscodingErrorCode::kNoError;
+    case AMEDIACODEC_ERROR_INSUFFICIENT_RESOURCE:  // FALLTHRU
+    case AMEDIACODEC_ERROR_RECLAIMED:
+        return TranscodingErrorCode::kInsufficientResources;
+    case AMEDIA_ERROR_MALFORMED:
+        return TranscodingErrorCode::kMalformed;
+    case AMEDIA_ERROR_UNSUPPORTED:
+        return TranscodingErrorCode::kUnsupported;
+    case AMEDIA_ERROR_INVALID_OBJECT:  // FALLTHRU
+    case AMEDIA_ERROR_INVALID_PARAMETER:
+        return TranscodingErrorCode::kInvalidParameter;
+    case AMEDIA_ERROR_INVALID_OPERATION:
+        return TranscodingErrorCode::kInvalidOperation;
+    case AMEDIA_ERROR_IO:
+        return TranscodingErrorCode::kErrorIO;
+    case AMEDIA_ERROR_UNKNOWN:  // FALLTHRU
+    default:
+        return TranscodingErrorCode::kUnknown;
+    }
+}
+
+static std::shared_ptr<AMediaFormat> getVideoFormat(
+        const char* originalMime,
+        const std::optional<TranscodingVideoTrackFormat>& requestedFormat) {
+    if (requestedFormat == std::nullopt) {
+        return nullptr;
+    }
+
+    std::shared_ptr<AMediaFormat> format =
+            std::shared_ptr<AMediaFormat>(AMediaFormat_new(), &AMediaFormat_delete);
+    bool changed = false;
+    if (requestedFormat->codecType == TranscodingVideoCodecType::kHevc &&
+        strcmp(originalMime, AMEDIA_MIMETYPE_VIDEO_HEVC)) {
+        AMediaFormat_setString(format.get(), AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_HEVC);
+        changed = true;
+    } else if (requestedFormat->codecType == TranscodingVideoCodecType::kAvc &&
+               strcmp(originalMime, AMEDIA_MIMETYPE_VIDEO_AVC)) {
+        AMediaFormat_setString(format.get(), AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_AVC);
+        changed = true;
+    }
+    if (requestedFormat->bitrateBps > 0) {
+        AMediaFormat_setInt32(format.get(), AMEDIAFORMAT_KEY_BIT_RATE, requestedFormat->bitrateBps);
+        changed = true;
+    }
+    // TODO: translate other fields from requestedFormat to the format for MediaTranscoder.
+    // Also need to determine more settings to expose in TranscodingVideoTrackFormat.
+    if (!changed) {
+        // Use null format for passthru.
+        format.reset();
+    }
+    return format;
+}
+
+//static
+std::string TranscoderWrapper::toString(const Event& event) {
+    std::string typeStr;
+    switch (event.type) {
+    case Event::Start:
+        typeStr = "Start";
+        break;
+    case Event::Pause:
+        typeStr = "Pause";
+        break;
+    case Event::Resume:
+        typeStr = "Resume";
+        break;
+    case Event::Stop:
+        typeStr = "Stop";
+        break;
+    case Event::Finish:
+        typeStr = "Finish";
+        break;
+    case Event::Error:
+        typeStr = "Error";
+        break;
+    case Event::Progress:
+        typeStr = "Progress";
+        break;
+    case Event::HeartBeat:
+        typeStr = "HeartBeat";
+        break;
+    case Event::Abandon:
+        typeStr = "Abandon";
+        break;
+    default:
+        return "(unknown)";
+    }
+    std::string result;
+    result = "session {" + std::to_string(event.clientId) + "," + std::to_string(event.sessionId) +
+             "}: " + typeStr;
+    if (event.type == Event::Error || event.type == Event::Progress) {
+        result += " " + std::to_string(event.arg);
+    }
+    return result;
+}
+
+class TranscoderWrapper::CallbackImpl : public MediaTranscoder::CallbackInterface {
+public:
+    CallbackImpl(const std::shared_ptr<TranscoderWrapper>& owner, ClientIdType clientId,
+                 SessionIdType sessionId)
+          : mOwner(owner), mClientId(clientId), mSessionId(sessionId) {}
+
+    virtual void onFinished(const MediaTranscoder* transcoder __unused) override {
+        auto owner = mOwner.lock();
+        if (owner != nullptr) {
+            owner->onFinish(mClientId, mSessionId);
+        }
+    }
+
+    virtual void onError(const MediaTranscoder* transcoder __unused,
+                         media_status_t error) override {
+        auto owner = mOwner.lock();
+        if (owner != nullptr) {
+            owner->onError(mClientId, mSessionId, error);
+        }
+    }
+
+    virtual void onProgressUpdate(const MediaTranscoder* transcoder __unused,
+                                  int32_t progress) override {
+        auto owner = mOwner.lock();
+        if (owner != nullptr) {
+            owner->onProgress(mClientId, mSessionId, progress);
+        }
+    }
+
+    virtual void onHeartBeat(const MediaTranscoder* transcoder __unused) override {
+        auto owner = mOwner.lock();
+        if (owner != nullptr) {
+            owner->onHeartBeat(mClientId, mSessionId);
+        }
+    }
+
+    virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
+                                     const std::shared_ptr<ndk::ScopedAParcel>& pausedState
+                                             __unused) override {
+        ALOGV("%s: session {%lld, %d}", __FUNCTION__, (long long)mClientId, mSessionId);
+    }
+
+private:
+    std::weak_ptr<TranscoderWrapper> mOwner;
+    ClientIdType mClientId;
+    SessionIdType mSessionId;
+};
+
+TranscoderWrapper::TranscoderWrapper(const std::shared_ptr<TranscoderCallbackInterface>& cb,
+                                     const std::shared_ptr<TranscodingLogger>& logger,
+                                     int64_t heartBeatIntervalUs)
+      : mCallback(cb),
+        mLogger(logger),
+        mHeartBeatIntervalUs(heartBeatIntervalUs),
+        mCurrentClientId(0),
+        mCurrentSessionId(-1),
+        mLooperReady(false) {
+    ALOGV("TranscoderWrapper CTOR: %p", this);
+}
+
+TranscoderWrapper::~TranscoderWrapper() {
+    ALOGV("TranscoderWrapper DTOR: %p", this);
+}
+
+static bool isResourceError(media_status_t err) {
+    return err == AMEDIACODEC_ERROR_RECLAIMED || err == AMEDIACODEC_ERROR_INSUFFICIENT_RESOURCE;
+}
+
+void TranscoderWrapper::reportError(ClientIdType clientId, SessionIdType sessionId,
+                                    media_status_t err) {
+    auto callback = mCallback.lock();
+    if (callback != nullptr) {
+        if (isResourceError(err)) {
+            // Add a placeholder pause state to mPausedStateMap. This is required when resuming.
+            // TODO: remove this when transcoder pause/resume logic is ready. New logic will
+            // no longer use the pause states.
+            auto it = mPausedStateMap.find(SessionKeyType(clientId, sessionId));
+            if (it == mPausedStateMap.end()) {
+                mPausedStateMap.emplace(SessionKeyType(clientId, sessionId),
+                                        new ndk::ScopedAParcel());
+            }
+
+            callback->onResourceLost(clientId, sessionId);
+        } else {
+            callback->onError(clientId, sessionId, toTranscodingError(err));
+        }
+    }
+}
+
+void TranscoderWrapper::start(ClientIdType clientId, SessionIdType sessionId,
+                              const TranscodingRequestParcel& requestParcel, uid_t callingUid,
+                              const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+    TranscodingRequest request{requestParcel};
+    queueEvent(Event::Start, clientId, sessionId, [=] {
+        media_status_t err = handleStart(clientId, sessionId, request, callingUid, clientCb);
+        if (err != AMEDIA_OK) {
+            cleanup();
+            reportError(clientId, sessionId, err);
+        } else {
+            auto callback = mCallback.lock();
+            if (callback != nullptr) {
+                callback->onStarted(clientId, sessionId);
+            }
+        }
+    });
+}
+
+void TranscoderWrapper::pause(ClientIdType clientId, SessionIdType sessionId) {
+    queueEvent(Event::Pause, clientId, sessionId, [=] {
+        media_status_t err = handlePause(clientId, sessionId);
+
+        cleanup();
+
+        if (err != AMEDIA_OK) {
+            reportError(clientId, sessionId, err);
+        } else {
+            auto callback = mCallback.lock();
+            if (callback != nullptr) {
+                callback->onPaused(clientId, sessionId);
+            }
+        }
+    });
+}
+
+void TranscoderWrapper::resume(ClientIdType clientId, SessionIdType sessionId,
+                               const TranscodingRequestParcel& requestParcel, uid_t callingUid,
+                               const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+    TranscodingRequest request{requestParcel};
+    queueEvent(Event::Resume, clientId, sessionId, [=] {
+        media_status_t err = handleResume(clientId, sessionId, request, callingUid, clientCb);
+        if (err != AMEDIA_OK) {
+            cleanup();
+            reportError(clientId, sessionId, err);
+        } else {
+            auto callback = mCallback.lock();
+            if (callback != nullptr) {
+                callback->onResumed(clientId, sessionId);
+            }
+        }
+    });
+}
+
+void TranscoderWrapper::stop(ClientIdType clientId, SessionIdType sessionId, bool abandon) {
+    queueEvent(Event::Stop, clientId, sessionId, [=] {
+        if (mTranscoder != nullptr && clientId == mCurrentClientId &&
+            sessionId == mCurrentSessionId) {
+            // Cancelling the currently running session.
+            media_status_t err = mTranscoder->cancel();
+            if (err != AMEDIA_OK) {
+                ALOGW("failed to stop transcoder: %d", err);
+            } else {
+                ALOGI("transcoder stopped");
+            }
+            logSessionEnded(TranscodingLogger::SessionEndedReason::CANCELLED, err);
+            cleanup();
+        } else {
+            // For sessions that's not currently running, release any pausedState for the session.
+            mPausedStateMap.erase(SessionKeyType(clientId, sessionId));
+        }
+        // No callback needed for stop.
+    });
+
+    if (abandon) {
+        queueEvent(Event::Abandon, 0, 0, nullptr);
+    }
+}
+
+void TranscoderWrapper::onFinish(ClientIdType clientId, SessionIdType sessionId) {
+    queueEvent(Event::Finish, clientId, sessionId, [=] {
+        if (mTranscoder != nullptr && clientId == mCurrentClientId &&
+            sessionId == mCurrentSessionId) {
+            logSessionEnded(TranscodingLogger::SessionEndedReason::FINISHED, AMEDIA_OK);
+            cleanup();
+        }
+
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onFinish(clientId, sessionId);
+        }
+    });
+}
+
+void TranscoderWrapper::onError(ClientIdType clientId, SessionIdType sessionId,
+                                media_status_t error) {
+    queueEvent(
+            Event::Error, clientId, sessionId,
+            [=] {
+                if (mTranscoder != nullptr && clientId == mCurrentClientId &&
+                    sessionId == mCurrentSessionId) {
+                    logSessionEnded(TranscodingLogger::SessionEndedReason::ERROR, error);
+                    cleanup();
+                }
+                reportError(clientId, sessionId, error);
+            },
+            error);
+}
+
+void TranscoderWrapper::onProgress(ClientIdType clientId, SessionIdType sessionId,
+                                   int32_t progress) {
+    queueEvent(
+            Event::Progress, clientId, sessionId,
+            [=] {
+                auto callback = mCallback.lock();
+                if (callback != nullptr) {
+                    callback->onProgressUpdate(clientId, sessionId, progress);
+                }
+            },
+            progress);
+}
+
+void TranscoderWrapper::onHeartBeat(ClientIdType clientId, SessionIdType sessionId) {
+    queueEvent(Event::HeartBeat, clientId, sessionId, [=] {
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onHeartBeat(clientId, sessionId);
+        }
+    });
+}
+
+media_status_t TranscoderWrapper::setupTranscoder(
+        ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
+        uid_t callingUid, const std::shared_ptr<ITranscodingClientCallback>& clientCb,
+        TranscodingLogger::SessionEndedReason* failureReason,
+        const std::shared_ptr<ndk::ScopedAParcel>& pausedState) {
+    if (clientCb == nullptr) {
+        ALOGE("client callback is null");
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (mTranscoder != nullptr) {
+        ALOGE("transcoder already running");
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    }
+
+    // Unwrap the callback and send heartbeats to the client after each operation during setup.
+    auto callback = mCallback.lock();
+    if (callback == nullptr) {
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    }
+
+    Status status;
+    ::ndk::ScopedFileDescriptor srcFd, dstFd;
+    int srcFdInt = request.sourceFd.get();
+    if (srcFdInt < 0) {
+        status = clientCb->openFileDescriptor(request.sourceFilePath, "r", &srcFd);
+        if (!status.isOk() || srcFd.get() < 0) {
+            ALOGE("failed to open source");
+            *failureReason = TranscodingLogger::SessionEndedReason::OPEN_SRC_FD_FAILED;
+            return AMEDIA_ERROR_IO;
+        }
+        srcFdInt = srcFd.get();
+    }
+
+    callback->onHeartBeat(clientId, sessionId);
+
+    int dstFdInt = request.destinationFd.get();
+    if (dstFdInt < 0) {
+        // Open dest file with "rw", as the transcoder could potentially reuse part of it
+        // for resume case. We might want the further differentiate and open with "w" only
+        // for start.
+        status = clientCb->openFileDescriptor(request.destinationFilePath, "rw", &dstFd);
+        if (!status.isOk() || dstFd.get() < 0) {
+            ALOGE("failed to open destination");
+            *failureReason = TranscodingLogger::SessionEndedReason::OPEN_DST_FD_FAILED;
+            return AMEDIA_ERROR_IO;
+        }
+        dstFdInt = dstFd.get();
+    }
+
+    callback->onHeartBeat(clientId, sessionId);
+
+    mCurrentClientId = clientId;
+    mCurrentSessionId = sessionId;
+    mCurrentCallingUid = callingUid;
+    mTranscoderCb = std::make_shared<CallbackImpl>(shared_from_this(), clientId, sessionId);
+    mTranscoder = MediaTranscoder::create(mTranscoderCb, mHeartBeatIntervalUs, request.clientPid,
+                                          request.clientUid, pausedState);
+    if (mTranscoder == nullptr) {
+        ALOGE("failed to create transcoder");
+        *failureReason = TranscodingLogger::SessionEndedReason::CREATE_FAILED;
+        return AMEDIA_ERROR_UNKNOWN;
+    }
+
+    callback->onHeartBeat(clientId, sessionId);
+
+    media_status_t err = mTranscoder->configureSource(srcFdInt);
+    if (err != AMEDIA_OK) {
+        ALOGE("failed to configure source: %d", err);
+        *failureReason = TranscodingLogger::SessionEndedReason::CONFIG_SRC_FAILED;
+        return err;
+    }
+
+    callback->onHeartBeat(clientId, sessionId);
+
+    std::vector<std::shared_ptr<AMediaFormat>> trackFormats = mTranscoder->getTrackFormats();
+    if (trackFormats.size() == 0) {
+        ALOGE("failed to get track formats!");
+        *failureReason = TranscodingLogger::SessionEndedReason::NO_TRACKS;
+        return AMEDIA_ERROR_MALFORMED;
+    }
+
+    callback->onHeartBeat(clientId, sessionId);
+
+    for (int i = 0; i < trackFormats.size(); ++i) {
+        std::shared_ptr<AMediaFormat> format;
+        const char* mime = nullptr;
+        AMediaFormat_getString(trackFormats[i].get(), AMEDIAFORMAT_KEY_MIME, &mime);
+
+        if (!strncmp(mime, "video/", 6)) {
+            format = getVideoFormat(mime, request.requestedVideoTrackFormat);
+
+            mSrcFormat = trackFormats[i];
+            mDstFormat = format;
+        }
+
+        err = mTranscoder->configureTrackFormat(i, format.get());
+        if (err != AMEDIA_OK) {
+            ALOGE("failed to configure track format for track %d: %d", i, err);
+            *failureReason = TranscodingLogger::SessionEndedReason::CONFIG_TRACK_FAILED;
+            return err;
+        }
+
+        callback->onHeartBeat(clientId, sessionId);
+    }
+
+    err = mTranscoder->configureDestination(dstFdInt);
+    if (err != AMEDIA_OK) {
+        ALOGE("failed to configure dest: %d", err);
+        *failureReason = TranscodingLogger::SessionEndedReason::CONFIG_DST_FAILED;
+        return err;
+    }
+
+    callback->onHeartBeat(clientId, sessionId);
+
+    return AMEDIA_OK;
+}
+
+media_status_t TranscoderWrapper::handleStart(
+        ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
+        uid_t callingUid, const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+    ALOGI("%s: setting up transcoder for start", __FUNCTION__);
+    TranscodingLogger::SessionEndedReason reason = TranscodingLogger::SessionEndedReason::UNKNOWN;
+    media_status_t err =
+            setupTranscoder(clientId, sessionId, request, callingUid, clientCb, &reason);
+    if (err != AMEDIA_OK) {
+        ALOGE("%s: failed to setup transcoder", __FUNCTION__);
+        logSessionEnded(reason, err);
+        return err;
+    }
+
+    mTranscodeStartTime = std::chrono::steady_clock::now();
+
+    err = mTranscoder->start();
+    if (err != AMEDIA_OK) {
+        ALOGE("%s: failed to start transcoder: %d", __FUNCTION__, err);
+        logSessionEnded(TranscodingLogger::SessionEndedReason::START_FAILED, err);
+        return err;
+    }
+
+    ALOGI("%s: transcoder started", __FUNCTION__);
+    return AMEDIA_OK;
+}
+
+media_status_t TranscoderWrapper::handlePause(ClientIdType clientId, SessionIdType sessionId) {
+    if (mTranscoder == nullptr) {
+        ALOGE("%s: transcoder is not running", __FUNCTION__);
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    }
+
+    if (clientId != mCurrentClientId || sessionId != mCurrentSessionId) {
+        ALOGW("%s: stopping session {%lld, %d} that's not current session {%lld, %d}", __FUNCTION__,
+              (long long)clientId, sessionId, (long long)mCurrentClientId, mCurrentSessionId);
+    }
+
+    ALOGI("%s: pausing transcoder", __FUNCTION__);
+
+    std::shared_ptr<ndk::ScopedAParcel> pauseStates;
+    media_status_t err = mTranscoder->pause(&pauseStates);
+    logSessionEnded(TranscodingLogger::SessionEndedReason::PAUSED, err);
+    if (err != AMEDIA_OK) {
+        ALOGE("%s: failed to pause transcoder: %d", __FUNCTION__, err);
+        return err;
+    }
+    mPausedStateMap[SessionKeyType(clientId, sessionId)] = pauseStates;
+
+    ALOGI("%s: transcoder paused", __FUNCTION__);
+    return AMEDIA_OK;
+}
+
+media_status_t TranscoderWrapper::handleResume(
+        ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
+        uid_t callingUid, const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+    std::shared_ptr<ndk::ScopedAParcel> pausedState;
+    auto it = mPausedStateMap.find(SessionKeyType(clientId, sessionId));
+    if (it != mPausedStateMap.end()) {
+        pausedState = it->second;
+        mPausedStateMap.erase(it);
+    } else {
+        ALOGE("%s: can't find paused state", __FUNCTION__);
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    }
+
+    ALOGI("%s: setting up transcoder for resume", __FUNCTION__);
+    TranscodingLogger::SessionEndedReason reason = TranscodingLogger::SessionEndedReason::UNKNOWN;
+    media_status_t err = setupTranscoder(clientId, sessionId, request, callingUid, clientCb,
+                                         &reason, pausedState);
+    if (err != AMEDIA_OK) {
+        ALOGE("%s: failed to setup transcoder: %d", __FUNCTION__, err);
+        logSessionEnded(reason, err);
+        return err;
+    }
+
+    // Note: For now resume() will just restart transcoding from the beginning, so there is no need
+    // to distinguish between resume and start from a performance perspective.
+    mTranscodeStartTime = std::chrono::steady_clock::now();
+
+    err = mTranscoder->resume();
+    if (err != AMEDIA_OK) {
+        ALOGE("%s: failed to resume transcoder: %d", __FUNCTION__, err);
+        logSessionEnded(TranscodingLogger::SessionEndedReason::RESUME_FAILED, err);
+        return err;
+    }
+
+    ALOGI("%s: transcoder resumed", __FUNCTION__);
+    return AMEDIA_OK;
+}
+
+void TranscoderWrapper::cleanup() {
+    mCurrentClientId = 0;
+    mCurrentSessionId = -1;
+    mCurrentCallingUid = -1;
+    mTranscoderCb = nullptr;
+    mTranscoder = nullptr;
+    mSrcFormat = nullptr;
+    mDstFormat = nullptr;
+}
+
+void TranscoderWrapper::logSessionEnded(const TranscodingLogger::SessionEndedReason& reason,
+                                        int error) {
+    std::chrono::microseconds transcodeDuration(-1);
+    if (reason == TranscodingLogger::SessionEndedReason::FINISHED && error == AMEDIA_OK) {
+        transcodeDuration = std::chrono::duration_cast<std::chrono::microseconds>(
+                std::chrono::steady_clock::now() - mTranscodeStartTime);
+    }
+
+    mLogger->logSessionEnded(reason, mCurrentCallingUid, error, transcodeDuration, mSrcFormat.get(),
+                             mDstFormat.get());
+}
+
+void TranscoderWrapper::queueEvent(Event::Type type, ClientIdType clientId, SessionIdType sessionId,
+                                   const std::function<void()> runnable, int32_t arg) {
+    std::scoped_lock lock{mLock};
+
+    if (!mLooperReady) {
+        // A shared_ptr to ourselves is given to the thread's stack, so that the TranscoderWrapper
+        // object doesn't go away until the thread exits. When a watchdog timeout happens, this
+        // allows the session controller to release its reference to the TranscoderWrapper object
+        // without blocking on the thread exits.
+        std::thread([owner = shared_from_this()]() { owner->threadLoop(); }).detach();
+        mLooperReady = true;
+    }
+
+    mQueue.push_back({type, clientId, sessionId, runnable, arg});
+    mCondition.notify_one();
+}
+
+void TranscoderWrapper::threadLoop() {
+    androidSetThreadPriority(0 /*tid (0 = current) */, ANDROID_PRIORITY_BACKGROUND);
+    std::unique_lock<std::mutex> lock{mLock};
+    // TranscoderWrapper currently lives in the transcoding service, as long as
+    // MediaTranscodingService itself.
+    while (true) {
+        // Wait for the next event.
+        while (mQueue.empty()) {
+            mCondition.wait(lock);
+        }
+
+        Event event = *mQueue.begin();
+        mQueue.pop_front();
+
+        ALOGV("%s: %s", __FUNCTION__, toString(event).c_str());
+
+        if (event.type == Event::Abandon) {
+            break;
+        }
+
+        lock.unlock();
+        event.runnable();
+        lock.lock();
+    }
+}
+}  // namespace android
diff --git a/media/libmediatranscoding/TranscodingClientManager.cpp b/media/libmediatranscoding/TranscodingClientManager.cpp
index 7252437..6dbcaf9 100644
--- a/media/libmediatranscoding/TranscodingClientManager.cpp
+++ b/media/libmediatranscoding/TranscodingClientManager.cpp
@@ -17,129 +17,449 @@
 // #define LOG_NDEBUG 0
 #define LOG_TAG "TranscodingClientManager"
 
+#include <aidl/android/media/BnTranscodingClient.h>
+#include <aidl/android/media/IMediaTranscodingService.h>
+#include <android/binder_ibinder.h>
+#include <android/permission_manager.h>
 #include <inttypes.h>
 #include <media/TranscodingClientManager.h>
+#include <media/TranscodingRequest.h>
+#include <media/TranscodingUidPolicy.h>
+#include <private/android_filesystem_config.h>
 #include <utils/Log.h>
+#include <utils/String16.h>
 
 namespace android {
 
-using Status = ::ndk::ScopedAStatus;
+static_assert(sizeof(ClientIdType) == sizeof(void*), "ClientIdType should be pointer-sized");
 
-// static
-TranscodingClientManager& TranscodingClientManager::getInstance() {
-    static TranscodingClientManager gInstance{};
-    return gInstance;
+using ::aidl::android::media::BnTranscodingClient;
+using ::aidl::android::media::IMediaTranscodingService;  // For service error codes
+using ::aidl::android::media::TranscodingRequestParcel;
+using ::aidl::android::media::TranscodingSessionParcel;
+using Status = ::ndk::ScopedAStatus;
+using ::ndk::SpAIBinder;
+
+//static
+std::atomic<ClientIdType> TranscodingClientManager::sCookieCounter = 0;
+//static
+std::mutex TranscodingClientManager::sCookie2ClientLock;
+//static
+std::map<ClientIdType, std::shared_ptr<TranscodingClientManager::ClientImpl>>
+        TranscodingClientManager::sCookie2Client;
+///////////////////////////////////////////////////////////////////////////////
+
+// Convenience methods for constructing binder::Status objects for error returns
+#define STATUS_ERROR_FMT(errorCode, errorString, ...) \
+    Status::fromServiceSpecificErrorWithMessage(      \
+            errorCode,                                \
+            String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, ##__VA_ARGS__))
+
+/**
+ * ClientImpl implements a single client and contains all its information.
+ */
+struct TranscodingClientManager::ClientImpl : public BnTranscodingClient {
+    /* The remote client callback that this ClientInfo is associated with.
+     * Once the ClientInfo is created, we hold an SpAIBinder so that the binder
+     * object doesn't get created again, otherwise the binder object pointer
+     * may not be unique.
+     */
+    SpAIBinder mClientBinder;
+    std::shared_ptr<ITranscodingClientCallback> mClientCallback;
+    /* A unique id assigned to the client by the service. This number is used
+     * by the service for indexing. Here we use the binder object's pointer
+     * (casted to int64t_t) as the client id.
+     */
+    ClientIdType mClientId;
+    std::string mClientName;
+    std::string mClientOpPackageName;
+
+    // Next sessionId to assign.
+    std::atomic<int32_t> mNextSessionId;
+    // Whether this client has been unregistered already.
+    std::atomic<bool> mAbandoned;
+    // Weak pointer to the client manager for this client.
+    std::weak_ptr<TranscodingClientManager> mOwner;
+
+    ClientImpl(const std::shared_ptr<ITranscodingClientCallback>& callback,
+               const std::string& clientName, const std::string& opPackageName,
+               const std::weak_ptr<TranscodingClientManager>& owner);
+
+    Status submitRequest(const TranscodingRequestParcel& /*in_request*/,
+                         TranscodingSessionParcel* /*out_session*/,
+                         bool* /*_aidl_return*/) override;
+
+    Status cancelSession(int32_t /*in_sessionId*/, bool* /*_aidl_return*/) override;
+
+    Status getSessionWithId(int32_t /*in_sessionId*/, TranscodingSessionParcel* /*out_session*/,
+                            bool* /*_aidl_return*/) override;
+
+    Status addClientUid(int32_t /*in_sessionId*/, int32_t /*in_clientUid*/,
+                        bool* /*_aidl_return*/) override;
+
+    Status getClientUids(int32_t /*in_sessionId*/,
+                         std::optional<std::vector<int32_t>>* /*_aidl_return*/) override;
+
+    Status unregister() override;
+};
+
+TranscodingClientManager::ClientImpl::ClientImpl(
+        const std::shared_ptr<ITranscodingClientCallback>& callback, const std::string& clientName,
+        const std::string& opPackageName, const std::weak_ptr<TranscodingClientManager>& owner)
+      : mClientBinder((callback != nullptr) ? callback->asBinder() : nullptr),
+        mClientCallback(callback),
+        mClientId(sCookieCounter.fetch_add(1, std::memory_order_relaxed)),
+        mClientName(clientName),
+        mClientOpPackageName(opPackageName),
+        mNextSessionId(0),
+        mAbandoned(false),
+        mOwner(owner) {}
+
+Status TranscodingClientManager::ClientImpl::submitRequest(
+        const TranscodingRequestParcel& in_request, TranscodingSessionParcel* out_session,
+        bool* _aidl_return) {
+    *_aidl_return = false;
+
+    std::shared_ptr<TranscodingClientManager> owner;
+    if (mAbandoned || (owner = mOwner.lock()) == nullptr) {
+        return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
+    }
+
+    if (in_request.sourceFilePath.empty() || in_request.destinationFilePath.empty()) {
+        return Status::ok();
+    }
+
+    int32_t callingPid = AIBinder_getCallingPid();
+    int32_t callingUid = AIBinder_getCallingUid();
+    int32_t in_clientUid = in_request.clientUid;
+    int32_t in_clientPid = in_request.clientPid;
+
+    // Check if we can trust clientUid. Only privilege caller could forward the
+    // uid on app client's behalf.
+    if (in_clientUid == IMediaTranscodingService::USE_CALLING_UID) {
+        in_clientUid = callingUid;
+    } else if (in_clientUid < 0) {
+        return Status::ok();
+    } else if (in_clientUid != callingUid && !owner->isTrustedCaller(callingPid, callingUid)) {
+        ALOGE("submitRequest rejected (clientPid %d, clientUid %d) "
+              "(don't trust callingUid %d)",
+              in_clientPid, in_clientUid, callingUid);
+        return STATUS_ERROR_FMT(IMediaTranscodingService::ERROR_PERMISSION_DENIED,
+                                "submitRequest rejected (clientPid %d, clientUid %d) "
+                                "(don't trust callingUid %d)",
+                                in_clientPid, in_clientUid, callingUid);
+    }
+
+    // Check if we can trust clientPid. Only privilege caller could forward the
+    // pid on app client's behalf.
+    if (in_clientPid == IMediaTranscodingService::USE_CALLING_PID) {
+        in_clientPid = callingPid;
+    } else if (in_clientPid < 0) {
+        return Status::ok();
+    } else if (in_clientPid != callingPid && !owner->isTrustedCaller(callingPid, callingUid)) {
+        ALOGE("submitRequest rejected (clientPid %d, clientUid %d) "
+              "(don't trust callingUid %d)",
+              in_clientPid, in_clientUid, callingUid);
+        return STATUS_ERROR_FMT(IMediaTranscodingService::ERROR_PERMISSION_DENIED,
+                                "submitRequest rejected (clientPid %d, clientUid %d) "
+                                "(don't trust callingUid %d)",
+                                in_clientPid, in_clientUid, callingUid);
+    }
+
+    int32_t sessionId = mNextSessionId.fetch_add(1);
+
+    *_aidl_return = owner->mSessionController->submit(mClientId, sessionId, callingUid,
+                                                      in_clientUid, in_request, mClientCallback);
+
+    if (*_aidl_return) {
+        out_session->sessionId = sessionId;
+
+        // TODO(chz): is some of this coming from SessionController?
+        *(TranscodingRequest*)&out_session->request = in_request;
+        out_session->awaitNumberOfSessions = 0;
+    }
+
+    return Status::ok();
 }
 
+Status TranscodingClientManager::ClientImpl::cancelSession(int32_t in_sessionId,
+                                                           bool* _aidl_return) {
+    *_aidl_return = false;
+
+    std::shared_ptr<TranscodingClientManager> owner;
+    if (mAbandoned || (owner = mOwner.lock()) == nullptr) {
+        return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
+    }
+
+    if (in_sessionId < 0) {
+        return Status::ok();
+    }
+
+    *_aidl_return = owner->mSessionController->cancel(mClientId, in_sessionId);
+    return Status::ok();
+}
+
+Status TranscodingClientManager::ClientImpl::getSessionWithId(int32_t in_sessionId,
+                                                              TranscodingSessionParcel* out_session,
+                                                              bool* _aidl_return) {
+    *_aidl_return = false;
+
+    std::shared_ptr<TranscodingClientManager> owner;
+    if (mAbandoned || (owner = mOwner.lock()) == nullptr) {
+        return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
+    }
+
+    if (in_sessionId < 0) {
+        return Status::ok();
+    }
+
+    *_aidl_return =
+            owner->mSessionController->getSession(mClientId, in_sessionId, &out_session->request);
+
+    if (*_aidl_return) {
+        out_session->sessionId = in_sessionId;
+        out_session->awaitNumberOfSessions = 0;
+    }
+    return Status::ok();
+}
+
+Status TranscodingClientManager::ClientImpl::addClientUid(int32_t in_sessionId,
+                                                          int32_t in_clientUid,
+                                                          bool* _aidl_return) {
+    *_aidl_return = false;
+
+    std::shared_ptr<TranscodingClientManager> owner;
+    if (mAbandoned || (owner = mOwner.lock()) == nullptr) {
+        return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
+    }
+
+    if (in_sessionId < 0) {
+        return Status::ok();
+    }
+
+    int32_t callingPid = AIBinder_getCallingPid();
+    int32_t callingUid = AIBinder_getCallingUid();
+
+    // Check if we can trust clientUid. Only privilege caller could add uid to existing sessions.
+    if (in_clientUid == IMediaTranscodingService::USE_CALLING_UID) {
+        in_clientUid = callingUid;
+    } else if (in_clientUid < 0) {
+        return Status::ok();
+    } else if (in_clientUid != callingUid && !owner->isTrustedCaller(callingPid, callingUid)) {
+        ALOGE("addClientUid rejected (clientUid %d) "
+              "(don't trust callingUid %d)",
+              in_clientUid, callingUid);
+        return STATUS_ERROR_FMT(IMediaTranscodingService::ERROR_PERMISSION_DENIED,
+                                "addClientUid rejected (clientUid %d) "
+                                "(don't trust callingUid %d)",
+                                in_clientUid, callingUid);
+    }
+
+    *_aidl_return = owner->mSessionController->addClientUid(mClientId, in_sessionId, in_clientUid);
+    return Status::ok();
+}
+
+Status TranscodingClientManager::ClientImpl::getClientUids(
+        int32_t in_sessionId, std::optional<std::vector<int32_t>>* _aidl_return) {
+    *_aidl_return = std::nullopt;
+
+    std::shared_ptr<TranscodingClientManager> owner;
+    if (mAbandoned || (owner = mOwner.lock()) == nullptr) {
+        return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
+    }
+
+    if (in_sessionId < 0) {
+        return Status::ok();
+    }
+
+    std::vector<int32_t> result;
+
+    if (owner->mSessionController->getClientUids(mClientId, in_sessionId, &result)) {
+        *_aidl_return = result;
+    }
+    return Status::ok();
+}
+
+Status TranscodingClientManager::ClientImpl::unregister() {
+    bool abandoned = mAbandoned.exchange(true);
+
+    std::shared_ptr<TranscodingClientManager> owner;
+    if (abandoned || (owner = mOwner.lock()) == nullptr) {
+        return Status::fromServiceSpecificError(IMediaTranscodingService::ERROR_DISCONNECTED);
+    }
+
+    // Use sessionId == -1 to cancel all realtime sessions for this client with the controller.
+    owner->mSessionController->cancel(mClientId, -1);
+    owner->removeClient(mClientId);
+
+    return Status::ok();
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
 // static
 void TranscodingClientManager::BinderDiedCallback(void* cookie) {
-    int32_t clientId = static_cast<int32_t>(reinterpret_cast<intptr_t>(cookie));
-    ALOGD("Client %" PRId32 " is dead", clientId);
-    // Don't check for pid validity since we know it's already dead.
-    TranscodingClientManager& manager = TranscodingClientManager::getInstance();
-    manager.removeClient(clientId);
+    ClientIdType clientId = reinterpret_cast<ClientIdType>(cookie);
+
+    ALOGD("Client %lld is dead", (long long)clientId);
+
+    std::shared_ptr<ClientImpl> client;
+
+    {
+        std::scoped_lock lock{sCookie2ClientLock};
+
+        auto it = sCookie2Client.find(clientId);
+        if (it != sCookie2Client.end()) {
+            client = it->second;
+        }
+    }
+
+    if (client != nullptr) {
+        client->unregister();
+    }
 }
 
-TranscodingClientManager::TranscodingClientManager()
-    : mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)) {
+TranscodingClientManager::TranscodingClientManager(
+        const std::shared_ptr<ControllerClientInterface>& controller)
+      : mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)),
+        mSessionController(controller) {
     ALOGD("TranscodingClientManager started");
+    for (uid_t uid : {AID_ROOT, AID_SYSTEM, AID_SHELL, AID_MEDIA}) {
+        mTrustedUids.insert(uid);
+    }
 }
 
 TranscodingClientManager::~TranscodingClientManager() {
     ALOGD("TranscodingClientManager exited");
 }
 
-bool TranscodingClientManager::isClientIdRegistered(int32_t clientId) const {
-    std::scoped_lock lock{mLock};
-    return mClientIdToClientInfoMap.find(clientId) != mClientIdToClientInfoMap.end();
-}
-
 void TranscodingClientManager::dumpAllClients(int fd, const Vector<String16>& args __unused) {
     String8 result;
 
     const size_t SIZE = 256;
     char buffer[SIZE];
+    std::scoped_lock lock{mLock};
 
-    snprintf(buffer, SIZE, "    Total num of Clients: %zu\n", mClientIdToClientInfoMap.size());
-    result.append(buffer);
-
-    if (mClientIdToClientInfoMap.size() > 0) {
-        snprintf(buffer, SIZE, "========== Dumping all clients =========\n");
+    if (mClientIdToClientMap.size() > 0) {
+        snprintf(buffer, SIZE, "\n========== Dumping all clients =========\n");
         result.append(buffer);
     }
 
-    for (const auto& iter : mClientIdToClientInfoMap) {
-        const std::shared_ptr<ITranscodingServiceClient> client = iter.second->mClient;
-        std::string clientName;
-        Status status = client->getName(&clientName);
-        if (!status.isOk()) {
-            ALOGE("Failed to get client: %d information", iter.first);
-            continue;
-        }
-        snprintf(buffer, SIZE, "    -- Clients: %d  name: %s\n", iter.first, clientName.c_str());
+    snprintf(buffer, SIZE, "  Total num of Clients: %zu\n", mClientIdToClientMap.size());
+    result.append(buffer);
+
+    for (const auto& iter : mClientIdToClientMap) {
+        snprintf(buffer, SIZE, "    Client %lld:  pkg: %s\n", (long long)iter.first,
+                 iter.second->mClientName.c_str());
         result.append(buffer);
     }
 
     write(fd, result.string(), result.size());
 }
 
-status_t TranscodingClientManager::addClient(std::unique_ptr<ClientInfo> client) {
-    // Validate the client.
-    if (client == nullptr || client->mClientId < 0 || client->mClientPid < 0 ||
-        client->mClientUid < 0 || client->mClientOpPackageName.empty() ||
-        client->mClientOpPackageName == "") {
-        ALOGE("Invalid client");
-        return BAD_VALUE;
+bool TranscodingClientManager::isTrustedCaller(pid_t pid, uid_t uid) {
+    if (uid > 0 && mTrustedUids.count(uid) > 0) {
+        return true;
     }
 
+    int32_t result;
+    if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+        if (APermissionManager_checkPermission("android.permission.WRITE_MEDIA_STORAGE", pid, uid,
+                                               &result) == PERMISSION_MANAGER_STATUS_OK &&
+            result == PERMISSION_MANAGER_PERMISSION_GRANTED) {
+            mTrustedUids.insert(uid);
+            return true;
+        }
+    }
+
+    return false;
+}
+
+status_t TranscodingClientManager::addClient(
+        const std::shared_ptr<ITranscodingClientCallback>& callback, const std::string& clientName,
+        const std::string& opPackageName, std::shared_ptr<ITranscodingClient>* outClient) {
+    int32_t callingPid = AIBinder_getCallingPid();
+    int32_t callingUid = AIBinder_getCallingUid();
+
+    // Check if client has the permission
+    if (!isTrustedCaller(callingPid, callingUid)) {
+        ALOGE("addClient rejected (clientPid %d, clientUid %d)", callingPid, callingUid);
+        return IMediaTranscodingService::ERROR_PERMISSION_DENIED;
+    }
+
+    // Validate the client.
+    if (callback == nullptr || clientName.empty() || opPackageName.empty()) {
+        ALOGE("Invalid client");
+        return IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT;
+    }
+
+    SpAIBinder binder = callback->asBinder();
+
     std::scoped_lock lock{mLock};
 
-    // Check if the client already exists.
-    if (mClientIdToClientInfoMap.count(client->mClientId) != 0) {
-        ALOGW("Client already exists.");
-        return ALREADY_EXISTS;
+    // Checks if the client already registers.
+    if (mRegisteredCallbacks.count((uintptr_t)binder.get()) > 0) {
+        return IMediaTranscodingService::ERROR_ALREADY_EXISTS;
     }
 
-    ALOGD("Adding client id %d pid: %d uid: %d %s", client->mClientId, client->mClientPid,
-          client->mClientUid, client->mClientOpPackageName.c_str());
+    // Creates the client (with the id assigned by ClientImpl).
+    std::shared_ptr<ClientImpl> client = ::ndk::SharedRefBase::make<ClientImpl>(
+            callback, clientName, opPackageName, shared_from_this());
 
-    AIBinder_linkToDeath(client->mClient->asBinder().get(), mDeathRecipient.get(),
+    ALOGD("Adding client id %lld, name %s, package %s", (long long)client->mClientId,
+          client->mClientName.c_str(), client->mClientOpPackageName.c_str());
+
+    {
+        std::scoped_lock lock{sCookie2ClientLock};
+        sCookie2Client.emplace(std::make_pair(client->mClientId, client));
+    }
+
+    AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(),
                          reinterpret_cast<void*>(client->mClientId));
 
     // Adds the new client to the map.
-    mClientIdToClientInfoMap[client->mClientId] = std::move(client);
+    mRegisteredCallbacks.insert((uintptr_t)binder.get());
+    mClientIdToClientMap[client->mClientId] = client;
+
+    *outClient = client;
 
     return OK;
 }
 
-status_t TranscodingClientManager::removeClient(int32_t clientId) {
-    ALOGD("Removing client id %d", clientId);
+status_t TranscodingClientManager::removeClient(ClientIdType clientId) {
+    ALOGD("Removing client id %lld", (long long)clientId);
     std::scoped_lock lock{mLock};
 
     // Checks if the client is valid.
-    auto it = mClientIdToClientInfoMap.find(clientId);
-    if (it == mClientIdToClientInfoMap.end()) {
-        ALOGE("Client id %d does not exist", clientId);
-        return INVALID_OPERATION;
+    auto it = mClientIdToClientMap.find(clientId);
+    if (it == mClientIdToClientMap.end()) {
+        ALOGE("Client id %lld does not exist", (long long)clientId);
+        return IMediaTranscodingService::ERROR_INVALID_OPERATION;
     }
 
-    std::shared_ptr<ITranscodingServiceClient> client = it->second->mClient;
+    SpAIBinder binder = it->second->mClientBinder;
 
     // Check if the client still live. If alive, unlink the death.
-    if (client) {
-        AIBinder_unlinkToDeath(client->asBinder().get(), mDeathRecipient.get(),
-                               reinterpret_cast<void*>(clientId));
+    if (binder.get() != nullptr) {
+        AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(),
+                               reinterpret_cast<void*>(it->second->mClientId));
+    }
+
+    {
+        std::scoped_lock lock{sCookie2ClientLock};
+        sCookie2Client.erase(it->second->mClientId);
     }
 
     // Erase the entry.
-    mClientIdToClientInfoMap.erase(it);
+    mClientIdToClientMap.erase(it);
+    mRegisteredCallbacks.erase((uintptr_t)binder.get());
 
     return OK;
 }
 
 size_t TranscodingClientManager::getNumOfClients() const {
     std::scoped_lock lock{mLock};
-    return mClientIdToClientInfoMap.size();
+    return mClientIdToClientMap.size();
 }
 
 }  // namespace android
diff --git a/media/libmediatranscoding/TranscodingLogger.cpp b/media/libmediatranscoding/TranscodingLogger.cpp
new file mode 100644
index 0000000..29a52b0
--- /dev/null
+++ b/media/libmediatranscoding/TranscodingLogger.cpp
@@ -0,0 +1,194 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingLogger"
+
+#include <media/NdkCommon.h>
+#include <media/TranscodingLogger.h>
+#include <statslog_media.h>
+#include <utils/Log.h>
+
+#include <cmath>
+#include <string>
+
+namespace android {
+
+static_assert(TranscodingLogger::UNKNOWN ==
+                      android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__UNKNOWN,
+              "Session event mismatch");
+static_assert(TranscodingLogger::FINISHED ==
+                      android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__FINISHED,
+              "Session event mismatch");
+static_assert(TranscodingLogger::ERROR ==
+                      android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__ERROR,
+              "Session event mismatch");
+static_assert(TranscodingLogger::PAUSED ==
+                      android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__PAUSED,
+              "Session event mismatch");
+static_assert(TranscodingLogger::CANCELLED ==
+                      android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__CANCELLED,
+              "Session event mismatch");
+static_assert(TranscodingLogger::START_FAILED ==
+                      android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__START_FAILED,
+              "Session event mismatch");
+static_assert(TranscodingLogger::RESUME_FAILED ==
+                      android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__RESUME_FAILED,
+              "Session event mismatch");
+static_assert(TranscodingLogger::CREATE_FAILED ==
+                      android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__CREATE_FAILED,
+              "Session event mismatch");
+static_assert(
+        TranscodingLogger::CONFIG_SRC_FAILED ==
+                android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__CONFIG_SRC_FAILED,
+        "Session event mismatch");
+static_assert(
+        TranscodingLogger::CONFIG_DST_FAILED ==
+                android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__CONFIG_DST_FAILED,
+        "Session event mismatch");
+static_assert(
+        TranscodingLogger::CONFIG_TRACK_FAILED ==
+                android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__CONFIG_TRACK_FAILED,
+        "Session event mismatch");
+static_assert(
+        TranscodingLogger::OPEN_SRC_FD_FAILED ==
+                android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__OPEN_SRC_FD_FAILED,
+        "Session event mismatch");
+static_assert(
+        TranscodingLogger::OPEN_DST_FD_FAILED ==
+                android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__OPEN_DST_FD_FAILED,
+        "Session event mismatch");
+static_assert(TranscodingLogger::NO_TRACKS ==
+                      android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__NO_TRACKS,
+              "Session event mismatch");
+
+static inline int32_t getInt32(AMediaFormat* fmt, const char* key, int32_t defaultValue = -1) {
+    int32_t value;
+    if (fmt == nullptr || !AMediaFormat_getInt32(fmt, key, &value)) {
+        ALOGW("Unable to get %s", key);
+        value = defaultValue;
+    }
+    return value;
+}
+
+// Note: returned string is owned by format and only valid until the next getString.
+static inline const char* getString(AMediaFormat* fmt, const char* key,
+                                    const char* defaultValue = "(null)") {
+    const char* value;
+    if (fmt == nullptr || !AMediaFormat_getString(fmt, key, &value)) {
+        ALOGW("Unable to get %s", key);
+        value = defaultValue;
+    }
+    return value;
+}
+
+TranscodingLogger::TranscodingLogger()
+      : mSessionEndedAtomWriter(&android::media::stats::stats_write) {}
+
+void TranscodingLogger::logSessionEnded(enum SessionEndedReason reason, uid_t callingUid,
+                                        int status, std::chrono::microseconds duration,
+                                        AMediaFormat* srcFormat, AMediaFormat* dstFormat) {
+    logSessionEnded(std::chrono::steady_clock::now(), reason, callingUid, status, duration,
+                    srcFormat, dstFormat);
+}
+
+void TranscodingLogger::logSessionEnded(const std::chrono::steady_clock::time_point& now,
+                                        enum SessionEndedReason reason, uid_t callingUid,
+                                        int status, std::chrono::microseconds duration,
+                                        AMediaFormat* srcFormat, AMediaFormat* dstFormat) {
+    if (srcFormat == nullptr) {
+        ALOGE("Source format is null. Dropping event.");
+        return;
+    }
+
+    if (!shouldLogAtom(now, status)) {
+        ALOGD("Maximum logged event count reached. Dropping event.");
+        return;
+    }
+
+    // Extract the pieces of information to log.
+    const int32_t srcWidth = getInt32(srcFormat, AMEDIAFORMAT_KEY_WIDTH);
+    const int32_t srcHeight = getInt32(srcFormat, AMEDIAFORMAT_KEY_HEIGHT);
+    const char* srcMime = getString(srcFormat, AMEDIAFORMAT_KEY_MIME);
+    const int32_t srcProfile = getInt32(srcFormat, AMEDIAFORMAT_KEY_PROFILE);
+    const int32_t srcLevel = getInt32(srcFormat, AMEDIAFORMAT_KEY_LEVEL);
+    const int32_t srcFrameRate = getInt32(srcFormat, AMEDIAFORMAT_KEY_FRAME_RATE);
+    const int32_t srcFrameCount = getInt32(srcFormat, AMEDIAFORMAT_KEY_FRAME_COUNT);
+    const bool srcIsHdr = AMediaFormatUtils::VideoIsHdr(srcFormat);
+
+    int32_t dstWidth = getInt32(dstFormat, AMEDIAFORMAT_KEY_WIDTH, srcWidth);
+    int32_t dstHeight = getInt32(dstFormat, AMEDIAFORMAT_KEY_HEIGHT, srcHeight);
+    const char* dstMime = dstFormat == nullptr
+                                  ? "passthrough"
+                                  : getString(dstFormat, AMEDIAFORMAT_KEY_MIME, srcMime);
+    const bool dstIsHdr = false;  // Transcoder always request SDR output.
+
+    int64_t tmpDurationUs;
+    const int32_t srcDurationMs =
+            AMediaFormat_getInt64(srcFormat, AMEDIAFORMAT_KEY_DURATION, &tmpDurationUs)
+                    ? static_cast<int32_t>(tmpDurationUs / 1000)
+                    : -1;
+
+    int32_t transcodeFrameRate = -1;
+    if (status == 0 && srcFrameCount > 0 && duration.count() > 0) {
+        std::chrono::duration<double> seconds{duration};
+        transcodeFrameRate = static_cast<int32_t>(
+                std::round(static_cast<double>(srcFrameCount) / seconds.count()));
+    }
+
+    // Write the atom.
+    mSessionEndedAtomWriter(android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED,
+                            static_cast<int>(reason), callingUid, status, transcodeFrameRate,
+                            srcWidth, srcHeight, srcMime, srcProfile, srcLevel, srcFrameRate,
+                            srcDurationMs, srcIsHdr, dstWidth, dstHeight, dstMime, dstIsHdr);
+}
+
+bool TranscodingLogger::shouldLogAtom(const std::chrono::steady_clock::time_point& now,
+                                      int status) {
+    std::scoped_lock lock{mLock};
+    static const std::chrono::hours oneDay(24);
+
+    // Remove events older than one day.
+    while (mLastLoggedAtoms.size() > 0 && (now - mLastLoggedAtoms.front().first) >= oneDay) {
+        if (mLastLoggedAtoms.front().second == AMEDIA_OK) {
+            --mSuccessfulCount;
+        }
+        mLastLoggedAtoms.pop();
+    }
+
+    // Don't log if maximum number of events is reached.
+    if (mLastLoggedAtoms.size() >= kMaxAtomsPerDay) {
+        return false;
+    }
+
+    // Don't log if the event is successful and the maximum number of successful events is reached.
+    if (status == AMEDIA_OK && mSuccessfulCount >= kMaxSuccessfulAtomsPerDay) {
+        return false;
+    }
+
+    // Record the event.
+    if (status == AMEDIA_OK) {
+        ++mSuccessfulCount;
+    }
+    mLastLoggedAtoms.emplace(now, status);
+    return true;
+}
+
+void TranscodingLogger::setSessionEndedAtomWriter(const SessionEndedAtomWriter& writer) {
+    mSessionEndedAtomWriter = writer;
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/TranscodingResourcePolicy.cpp b/media/libmediatranscoding/TranscodingResourcePolicy.cpp
new file mode 100644
index 0000000..af53f64
--- /dev/null
+++ b/media/libmediatranscoding/TranscodingResourcePolicy.cpp
@@ -0,0 +1,178 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingResourcePolicy"
+
+#include <aidl/android/media/BnResourceObserver.h>
+#include <aidl/android/media/IResourceObserverService.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <media/TranscodingResourcePolicy.h>
+#include <utils/Log.h>
+
+namespace android {
+
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::BnResourceObserver;
+using ::aidl::android::media::IResourceObserverService;
+using ::aidl::android::media::MediaObservableEvent;
+using ::aidl::android::media::MediaObservableFilter;
+using ::aidl::android::media::MediaObservableParcel;
+using ::aidl::android::media::MediaObservableType;
+
+static std::string toString(const MediaObservableParcel& observable) {
+    return "{" + ::aidl::android::media::toString(observable.type) + ", " +
+           std::to_string(observable.value) + "}";
+}
+
+struct TranscodingResourcePolicy::ResourceObserver : public BnResourceObserver {
+    explicit ResourceObserver(TranscodingResourcePolicy* owner) : mOwner(owner) {}
+
+    // IResourceObserver
+    ::ndk::ScopedAStatus onStatusChanged(
+            MediaObservableEvent event, int32_t uid, int32_t pid,
+            const std::vector<MediaObservableParcel>& observables) override {
+        ALOGD("%s: %s, uid %d, pid %d, %s", __FUNCTION__,
+              ::aidl::android::media::toString(event).c_str(), uid, pid,
+              toString(observables[0]).c_str());
+
+        // Only report kIdle event.
+        if (((uint64_t)event & (uint64_t)MediaObservableEvent::kIdle) != 0) {
+            for (auto& observable : observables) {
+                if (observable.type == MediaObservableType::kVideoSecureCodec ||
+                    observable.type == MediaObservableType::kVideoNonSecureCodec) {
+                    mOwner->onResourceAvailable(pid);
+                    break;
+                }
+            }
+        }
+        return ::ndk::ScopedAStatus::ok();
+    }
+
+    TranscodingResourcePolicy* mOwner;
+};
+
+// static
+void TranscodingResourcePolicy::BinderDiedCallback(void* cookie) {
+    TranscodingResourcePolicy* owner = reinterpret_cast<TranscodingResourcePolicy*>(cookie);
+    if (owner != nullptr) {
+        owner->unregisterSelf();
+    }
+    // TODO(chz): retry to connecting to IResourceObserverService after failure.
+    // Also need to have back-up logic if IResourceObserverService is offline for
+    // Prolonged period of time. A possible alternative could be, during period where
+    // IResourceObserverService is not available, trigger onResourceAvailable() everytime
+    // when top uid changes (in hope that'll free up some codec instances that we could
+    // reclaim).
+}
+
+TranscodingResourcePolicy::TranscodingResourcePolicy()
+      : mRegistered(false),
+        mResourceLostPid(-1),
+        mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)) {
+    registerSelf();
+}
+
+TranscodingResourcePolicy::~TranscodingResourcePolicy() {
+    unregisterSelf();
+}
+
+void TranscodingResourcePolicy::registerSelf() {
+    ALOGI("TranscodingResourcePolicy: registerSelf");
+
+    ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_observer"));
+
+    std::scoped_lock lock{mRegisteredLock};
+
+    if (mRegistered) {
+        return;
+    }
+
+    // TODO(chz): retry to connecting to IResourceObserverService after failure.
+    mService = IResourceObserverService::fromBinder(binder);
+    if (mService == nullptr) {
+        ALOGE("Failed to get IResourceObserverService");
+        return;
+    }
+
+    // Only register filters for codec resource available.
+    mObserver = ::ndk::SharedRefBase::make<ResourceObserver>(this);
+    std::vector<MediaObservableFilter> filters = {
+            {MediaObservableType::kVideoSecureCodec, MediaObservableEvent::kIdle},
+            {MediaObservableType::kVideoNonSecureCodec, MediaObservableEvent::kIdle}};
+
+    Status status = mService->registerObserver(mObserver, filters);
+    if (!status.isOk()) {
+        ALOGE("failed to register: error %d", status.getServiceSpecificError());
+        mService = nullptr;
+        mObserver = nullptr;
+        return;
+    }
+
+    AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(this));
+
+    ALOGD("@@@ registered observer");
+    mRegistered = true;
+}
+
+void TranscodingResourcePolicy::unregisterSelf() {
+    ALOGI("TranscodingResourcePolicy: unregisterSelf");
+
+    std::scoped_lock lock{mRegisteredLock};
+
+    if (!mRegistered) {
+        return;
+    }
+
+    ::ndk::SpAIBinder binder = mService->asBinder();
+    if (binder.get() != nullptr) {
+        Status status = mService->unregisterObserver(mObserver);
+        AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(this));
+    }
+
+    mService = nullptr;
+    mObserver = nullptr;
+    mRegistered = false;
+}
+
+void TranscodingResourcePolicy::setCallback(
+        const std::shared_ptr<ResourcePolicyCallbackInterface>& cb) {
+    std::scoped_lock lock{mCallbackLock};
+    mResourcePolicyCallback = cb;
+}
+
+void TranscodingResourcePolicy::setPidResourceLost(pid_t pid) {
+    std::scoped_lock lock{mCallbackLock};
+    mResourceLostPid = pid;
+}
+
+void TranscodingResourcePolicy::onResourceAvailable(pid_t pid) {
+    std::shared_ptr<ResourcePolicyCallbackInterface> cb;
+    {
+        std::scoped_lock lock{mCallbackLock};
+        // Only callback if codec resource is released from other processes.
+        if (mResourceLostPid != -1 && mResourceLostPid != pid) {
+            cb = mResourcePolicyCallback.lock();
+            mResourceLostPid = -1;
+        }
+    }
+
+    if (cb != nullptr) {
+        cb->onResourceAvailable();
+    }
+}
+}  // namespace android
diff --git a/media/libmediatranscoding/TranscodingSessionController.cpp b/media/libmediatranscoding/TranscodingSessionController.cpp
new file mode 100644
index 0000000..ea3e518
--- /dev/null
+++ b/media/libmediatranscoding/TranscodingSessionController.cpp
@@ -0,0 +1,1190 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingSessionController"
+
+#define VALIDATE_STATE 1
+
+#include <android/permission_manager.h>
+#include <inttypes.h>
+#include <media/TranscodingSessionController.h>
+#include <media/TranscodingUidPolicy.h>
+#include <utils/AndroidThreads.h>
+#include <utils/Log.h>
+
+#include <thread>
+#include <utility>
+
+namespace android {
+
+static_assert((SessionIdType)-1 < 0, "SessionIdType should be signed");
+
+constexpr static uid_t OFFLINE_UID = -1;
+constexpr static size_t kSessionHistoryMax = 100;
+
+//static
+String8 TranscodingSessionController::sessionToString(const SessionKeyType& sessionKey) {
+    return String8::format("{client:%lld, session:%d}", (long long)sessionKey.first,
+                           sessionKey.second);
+}
+
+//static
+const char* TranscodingSessionController::sessionStateToString(const Session::State sessionState) {
+    switch (sessionState) {
+    case Session::State::NOT_STARTED:
+        return "NOT_STARTED";
+    case Session::State::RUNNING:
+        return "RUNNING";
+    case Session::State::PAUSED:
+        return "PAUSED";
+    case Session::State::FINISHED:
+        return "FINISHED";
+    case Session::State::CANCELED:
+        return "CANCELED";
+    case Session::State::ERROR:
+        return "ERROR";
+    default:
+        break;
+    }
+    return "(unknown)";
+}
+
+///////////////////////////////////////////////////////////////////////////////
+struct TranscodingSessionController::Watchdog {
+    Watchdog(TranscodingSessionController* owner, int64_t timeoutUs);
+    ~Watchdog();
+
+    // Starts monitoring the session.
+    void start(const SessionKeyType& key);
+    // Stops monitoring the session.
+    void stop();
+    // Signals that the session is still alive. Must be sent at least every mTimeoutUs.
+    // (Timeout will happen if no ping in mTimeoutUs since the last ping.)
+    void keepAlive();
+
+private:
+    void threadLoop();
+    void updateTimer_l();
+
+    TranscodingSessionController* mOwner;
+    const int64_t mTimeoutUs;
+    mutable std::mutex mLock;
+    std::condition_variable mCondition GUARDED_BY(mLock);
+    // Whether watchdog is monitoring a session for timeout.
+    bool mActive GUARDED_BY(mLock);
+    // Whether watchdog is aborted and the monitoring thread should exit.
+    bool mAbort GUARDED_BY(mLock);
+    // When watchdog is active, the next timeout time point.
+    std::chrono::steady_clock::time_point mNextTimeoutTime GUARDED_BY(mLock);
+    // When watchdog is active, the session being watched.
+    SessionKeyType mSessionToWatch GUARDED_BY(mLock);
+    std::thread mThread;
+};
+
+TranscodingSessionController::Watchdog::Watchdog(TranscodingSessionController* owner,
+                                                 int64_t timeoutUs)
+      : mOwner(owner),
+        mTimeoutUs(timeoutUs),
+        mActive(false),
+        mAbort(false),
+        mThread(&Watchdog::threadLoop, this) {
+    ALOGV("Watchdog CTOR: %p", this);
+}
+
+TranscodingSessionController::Watchdog::~Watchdog() {
+    ALOGV("Watchdog DTOR: %p", this);
+
+    {
+        // Exit the looper thread.
+        std::scoped_lock lock{mLock};
+
+        mAbort = true;
+        mCondition.notify_one();
+    }
+
+    mThread.join();
+    ALOGV("Watchdog DTOR: %p, done.", this);
+}
+
+void TranscodingSessionController::Watchdog::start(const SessionKeyType& key) {
+    std::scoped_lock lock{mLock};
+
+    if (!mActive) {
+        ALOGI("Watchdog start: %s", sessionToString(key).c_str());
+
+        mActive = true;
+        mSessionToWatch = key;
+        updateTimer_l();
+        mCondition.notify_one();
+    }
+}
+
+void TranscodingSessionController::Watchdog::stop() {
+    std::scoped_lock lock{mLock};
+
+    if (mActive) {
+        ALOGI("Watchdog stop: %s", sessionToString(mSessionToWatch).c_str());
+
+        mActive = false;
+        mCondition.notify_one();
+    }
+}
+
+void TranscodingSessionController::Watchdog::keepAlive() {
+    std::scoped_lock lock{mLock};
+
+    if (mActive) {
+        ALOGI("Watchdog keepAlive: %s", sessionToString(mSessionToWatch).c_str());
+
+        updateTimer_l();
+        mCondition.notify_one();
+    }
+}
+
+// updateTimer_l() is only called with lock held.
+void TranscodingSessionController::Watchdog::updateTimer_l() NO_THREAD_SAFETY_ANALYSIS {
+    std::chrono::microseconds timeout(mTimeoutUs);
+    mNextTimeoutTime = std::chrono::steady_clock::now() + timeout;
+}
+
+// Unfortunately std::unique_lock is incompatible with -Wthread-safety.
+void TranscodingSessionController::Watchdog::threadLoop() NO_THREAD_SAFETY_ANALYSIS {
+    androidSetThreadPriority(0 /*tid (0 = current) */, ANDROID_PRIORITY_BACKGROUND);
+    std::unique_lock<std::mutex> lock{mLock};
+
+    while (!mAbort) {
+        if (!mActive) {
+            mCondition.wait(lock);
+            continue;
+        }
+        // Watchdog active, wait till next timeout time.
+        if (mCondition.wait_until(lock, mNextTimeoutTime) == std::cv_status::timeout) {
+            // If timeout happens, report timeout and deactivate watchdog.
+            mActive = false;
+            // Make a copy of session key, as once we unlock, it could be unprotected.
+            SessionKeyType sessionKey = mSessionToWatch;
+
+            ALOGE("Watchdog timeout: %s", sessionToString(sessionKey).c_str());
+
+            lock.unlock();
+            mOwner->onError(sessionKey.first, sessionKey.second,
+                            TranscodingErrorCode::kWatchdogTimeout);
+            lock.lock();
+        }
+    }
+}
+///////////////////////////////////////////////////////////////////////////////
+struct TranscodingSessionController::Pacer {
+    Pacer(const ControllerConfig& config)
+          : mBurstThresholdMs(config.pacerBurstThresholdMs),
+            mBurstCountQuota(config.pacerBurstCountQuota),
+            mBurstTimeQuotaSec(config.pacerBurstTimeQuotaSeconds) {}
+
+    ~Pacer() = default;
+
+    bool onSessionStarted(uid_t uid, uid_t callingUid);
+    void onSessionCompleted(uid_t uid, std::chrono::microseconds runningTime);
+    void onSessionCancelled(uid_t uid);
+
+private:
+    // Threshold of time between finish/start below which a back-to-back start is counted.
+    int32_t mBurstThresholdMs;
+    // Maximum allowed back-to-back start count.
+    int32_t mBurstCountQuota;
+    // Maximum allowed back-to-back running time.
+    int32_t mBurstTimeQuotaSec;
+
+    struct UidHistoryEntry {
+        bool sessionActive = false;
+        int32_t burstCount = 0;
+        std::chrono::steady_clock::duration burstDuration{0};
+        std::chrono::steady_clock::time_point lastCompletedTime;
+    };
+    std::map<uid_t, UidHistoryEntry> mUidHistoryMap;
+    std::unordered_set<uid_t> mMtpUids;
+    std::unordered_set<uid_t> mNonMtpUids;
+
+    bool isSubjectToQuota(uid_t uid, uid_t callingUid);
+};
+
+bool TranscodingSessionController::Pacer::isSubjectToQuota(uid_t uid, uid_t callingUid) {
+    // Submitting with self uid is not limited (which can only happen if it's used as an
+    // app-facing API). MediaProvider usage always submit on behalf of other uids.
+    if (uid == callingUid) {
+        return false;
+    }
+
+    if (mMtpUids.find(uid) != mMtpUids.end()) {
+        return false;
+    }
+
+    if (mNonMtpUids.find(uid) != mNonMtpUids.end()) {
+        return true;
+    }
+
+    // We don't have MTP permission info about this uid yet, check permission and save the result.
+    int32_t result;
+    if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+        if (APermissionManager_checkPermission("android.permission.ACCESS_MTP", -1 /*pid*/, uid,
+                                               &result) == PERMISSION_MANAGER_STATUS_OK &&
+            result == PERMISSION_MANAGER_PERMISSION_GRANTED) {
+            mMtpUids.insert(uid);
+            return false;
+        }
+    }
+
+    mNonMtpUids.insert(uid);
+    return true;
+}
+
+bool TranscodingSessionController::Pacer::onSessionStarted(uid_t uid, uid_t callingUid) {
+    if (!isSubjectToQuota(uid, callingUid)) {
+        ALOGI("Pacer::onSessionStarted: uid %d (caling uid: %d): not subject to quota", uid,
+              callingUid);
+        return true;
+    }
+
+    // If uid doesn't exist, only insert the entry and mark session active. Skip quota checking.
+    if (mUidHistoryMap.find(uid) == mUidHistoryMap.end()) {
+        mUidHistoryMap.emplace(uid, UidHistoryEntry{});
+        mUidHistoryMap[uid].sessionActive = true;
+        ALOGV("Pacer::onSessionStarted: uid %d: new", uid);
+        return true;
+    }
+
+    // TODO: if Thermal throttling or resoure lost happened to occurr between this start
+    // and the previous completion, we should deduct the paused time from the elapsed time.
+    // (Individual session's pause time, on the other hand, doesn't need to be deducted
+    // because it doesn't affect the gap between last completion and the start.
+    auto timeSinceLastComplete =
+            std::chrono::steady_clock::now() - mUidHistoryMap[uid].lastCompletedTime;
+    if (mUidHistoryMap[uid].burstCount >= mBurstCountQuota &&
+        mUidHistoryMap[uid].burstDuration >= std::chrono::seconds(mBurstTimeQuotaSec)) {
+        ALOGW("Pacer::onSessionStarted: uid %d: over quota, burst count %d, time %lldms", uid,
+              mUidHistoryMap[uid].burstCount,
+              (long long)mUidHistoryMap[uid].burstDuration.count() / 1000000);
+        return false;
+    }
+
+    // If not over quota, allow the session, and reset as long as this is not too close
+    // to previous completion.
+    if (timeSinceLastComplete > std::chrono::milliseconds(mBurstThresholdMs)) {
+        ALOGV("Pacer::onSessionStarted: uid %d: reset quota", uid);
+        mUidHistoryMap[uid].burstCount = 0;
+        mUidHistoryMap[uid].burstDuration = std::chrono::milliseconds(0);
+    } else {
+        ALOGV("Pacer::onSessionStarted: uid %d: burst count %d, time %lldms", uid,
+              mUidHistoryMap[uid].burstCount,
+              (long long)mUidHistoryMap[uid].burstDuration.count() / 1000000);
+    }
+
+    mUidHistoryMap[uid].sessionActive = true;
+    return true;
+}
+
+void TranscodingSessionController::Pacer::onSessionCompleted(
+        uid_t uid, std::chrono::microseconds runningTime) {
+    // Skip quota update if this uid missed the start. (Could happen if the uid is added via
+    // addClientUid() after the session start.)
+    if (mUidHistoryMap.find(uid) == mUidHistoryMap.end() || !mUidHistoryMap[uid].sessionActive) {
+        ALOGV("Pacer::onSessionCompleted: uid %d: not started", uid);
+        return;
+    }
+    ALOGV("Pacer::onSessionCompleted: uid %d: runningTime %lld", uid, runningTime.count() / 1000);
+    mUidHistoryMap[uid].sessionActive = false;
+    mUidHistoryMap[uid].burstCount++;
+    mUidHistoryMap[uid].burstDuration += runningTime;
+    mUidHistoryMap[uid].lastCompletedTime = std::chrono::steady_clock::now();
+}
+
+void TranscodingSessionController::Pacer::onSessionCancelled(uid_t uid) {
+    if (mUidHistoryMap.find(uid) == mUidHistoryMap.end()) {
+        ALOGV("Pacer::onSessionCancelled: uid %d: not present", uid);
+        return;
+    }
+    // This is only called if a uid is removed from a session (due to it being killed
+    // or the original submitting client was gone but session was kept for offline use).
+    // Since the uid is going to miss the onSessionCompleted(), we can't track this
+    // session, and have to check back at next onSessionStarted().
+    mUidHistoryMap[uid].sessionActive = false;
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+TranscodingSessionController::TranscodingSessionController(
+        const TranscoderFactoryType& transcoderFactory,
+        const std::shared_ptr<UidPolicyInterface>& uidPolicy,
+        const std::shared_ptr<ResourcePolicyInterface>& resourcePolicy,
+        const std::shared_ptr<ThermalPolicyInterface>& thermalPolicy,
+        const ControllerConfig* config)
+      : mTranscoderFactory(transcoderFactory),
+        mUidPolicy(uidPolicy),
+        mResourcePolicy(resourcePolicy),
+        mThermalPolicy(thermalPolicy),
+        mCurrentSession(nullptr),
+        mResourceLost(false) {
+    // Only push empty offline queue initially. Realtime queues are added when requests come in.
+    mUidSortedList.push_back(OFFLINE_UID);
+    mOfflineUidIterator = mUidSortedList.begin();
+    mSessionQueues.emplace(OFFLINE_UID, SessionQueueType());
+    mUidPackageNames[OFFLINE_UID] = "(offline)";
+    mThermalThrottling = thermalPolicy->getThrottlingStatus();
+    if (config != nullptr) {
+        mConfig = *config;
+    }
+    mPacer.reset(new Pacer(mConfig));
+    ALOGD("@@@ watchdog %lld, burst count %d, burst time %d, burst threshold %d",
+          (long long)mConfig.watchdogTimeoutUs, mConfig.pacerBurstCountQuota,
+          mConfig.pacerBurstTimeQuotaSeconds, mConfig.pacerBurstThresholdMs);
+}
+
+TranscodingSessionController::~TranscodingSessionController() {}
+
+void TranscodingSessionController::dumpSession_l(const Session& session, String8& result,
+                                                 bool closedSession) {
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    const TranscodingRequestParcel& request = session.request;
+    snprintf(buffer, SIZE, "      Session: %s, %s, %d%%\n", sessionToString(session.key).c_str(),
+             sessionStateToString(session.getState()), session.lastProgress);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "        pkg: %s\n", request.clientPackageName.c_str());
+    result.append(buffer);
+    snprintf(buffer, SIZE, "        src: %s\n", request.sourceFilePath.c_str());
+    result.append(buffer);
+    snprintf(buffer, SIZE, "        dst: %s\n", request.destinationFilePath.c_str());
+    result.append(buffer);
+
+    if (closedSession) {
+        snprintf(buffer, SIZE,
+                 "        waiting: %.1fs, running: %.1fs, paused: %.1fs, paused count: %d\n",
+                 session.waitingTime.count() / 1000000.0f, session.runningTime.count() / 1000000.0f,
+                 session.pausedTime.count() / 1000000.0f, session.pauseCount);
+        result.append(buffer);
+    }
+}
+
+void TranscodingSessionController::dumpAllSessions(int fd, const Vector<String16>& args __unused) {
+    String8 result;
+
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    std::scoped_lock lock{mLock};
+
+    snprintf(buffer, SIZE, "\n========== Dumping live sessions queues =========\n");
+    result.append(buffer);
+    snprintf(buffer, SIZE, "  Total num of Sessions: %zu\n", mSessionMap.size());
+    result.append(buffer);
+
+    std::vector<int32_t> uids(mUidSortedList.begin(), mUidSortedList.end());
+
+    for (int32_t i = 0; i < uids.size(); i++) {
+        const uid_t uid = uids[i];
+
+        if (mSessionQueues[uid].empty()) {
+            continue;
+        }
+        snprintf(buffer, SIZE, "    uid: %d, pkg: %s\n", uid,
+                 mUidPackageNames.count(uid) > 0 ? mUidPackageNames[uid].c_str() : "(unknown)");
+        result.append(buffer);
+        snprintf(buffer, SIZE, "      Num of sessions: %zu\n", mSessionQueues[uid].size());
+        result.append(buffer);
+        for (auto& sessionKey : mSessionQueues[uid]) {
+            auto sessionIt = mSessionMap.find(sessionKey);
+            if (sessionIt == mSessionMap.end()) {
+                snprintf(buffer, SIZE, "Failed to look up Session %s  \n",
+                         sessionToString(sessionKey).c_str());
+                result.append(buffer);
+                continue;
+            }
+            dumpSession_l(sessionIt->second, result);
+        }
+    }
+
+    snprintf(buffer, SIZE, "\n========== Dumping past sessions =========\n");
+    result.append(buffer);
+    for (auto& session : mSessionHistory) {
+        dumpSession_l(session, result, true /*closedSession*/);
+    }
+
+    write(fd, result.string(), result.size());
+}
+
+/*
+ * Returns nullptr if there is no session, or we're paused globally (due to resource lost,
+ * thermal throttling, etc.). Otherwise, return the session that should be run next.
+ */
+TranscodingSessionController::Session* TranscodingSessionController::getTopSession_l() {
+    if (mSessionMap.empty()) {
+        return nullptr;
+    }
+
+    // Return nullptr if we're paused globally due to resource lost or thermal throttling.
+    if (((mResourcePolicy != nullptr && mResourceLost) ||
+         (mThermalPolicy != nullptr && mThermalThrottling))) {
+        return nullptr;
+    }
+
+    uid_t topUid = *mUidSortedList.begin();
+    // If the current session is running, and it's in the topUid's queue, let it continue
+    // to run even if it's not the earliest in that uid's queue.
+    // For example, uid(B) is added to a session while it's pending in uid(A)'s queue, then
+    // B is brought to front which caused the session to run, then user switches back to A.
+    if (mCurrentSession != nullptr && mCurrentSession->getState() == Session::RUNNING &&
+        mCurrentSession->allClientUids.count(topUid) > 0) {
+        return mCurrentSession;
+    }
+    SessionKeyType topSessionKey = *mSessionQueues[topUid].begin();
+    return &mSessionMap[topSessionKey];
+}
+
+void TranscodingSessionController::setSessionState_l(Session* session, Session::State state) {
+    bool wasRunning = (session->getState() == Session::RUNNING);
+    session->setState(state);
+    bool isRunning = (session->getState() == Session::RUNNING);
+
+    if (wasRunning == isRunning) {
+        return;
+    }
+
+    // Currently we only have 1 running session, and we always put the previous
+    // session in non-running state before we run the new session, so it's okay
+    // to start/stop the watchdog here. If this assumption changes, we need to
+    // track the number of running sessions and start/stop watchdog based on that.
+    if (isRunning) {
+        mWatchdog->start(session->key);
+    } else {
+        mWatchdog->stop();
+    }
+}
+
+void TranscodingSessionController::Session::setState(Session::State newState) {
+    if (state == newState) {
+        return;
+    }
+    auto nowTime = std::chrono::steady_clock::now();
+    if (state != INVALID) {
+        std::chrono::microseconds elapsedTime =
+                std::chrono::duration_cast<std::chrono::microseconds>(nowTime - stateEnterTime);
+        switch (state) {
+        case PAUSED:
+            pausedTime = pausedTime + elapsedTime;
+            break;
+        case RUNNING:
+            runningTime = runningTime + elapsedTime;
+            break;
+        case NOT_STARTED:
+            waitingTime = waitingTime + elapsedTime;
+            break;
+        default:
+            break;
+        }
+    }
+    if (newState == PAUSED) {
+        pauseCount++;
+    }
+    stateEnterTime = nowTime;
+    state = newState;
+}
+
+void TranscodingSessionController::updateCurrentSession_l() {
+    Session* curSession = mCurrentSession;
+    Session* topSession = nullptr;
+
+    // Delayed init of transcoder and watchdog.
+    if (mTranscoder == nullptr) {
+        mTranscoder = mTranscoderFactory(shared_from_this());
+        mWatchdog = std::make_shared<Watchdog>(this, mConfig.watchdogTimeoutUs);
+    }
+
+    // If we found a different top session, or the top session's running state is not
+    // correct. Take some actions to ensure it's correct.
+    while ((topSession = getTopSession_l()) != curSession ||
+           (topSession != nullptr && !topSession->isRunning())) {
+        ALOGV("updateCurrentSession_l: topSession is %s, curSession is %s",
+              topSession == nullptr ? "null" : sessionToString(topSession->key).c_str(),
+              curSession == nullptr ? "null" : sessionToString(curSession->key).c_str());
+
+        // If current session is running, pause it first. Note this is needed for either
+        // cases: 1) Top session is changing to another session, or 2) Top session is
+        // changing to null (which means we should be globally paused).
+        if (curSession != nullptr && curSession->getState() == Session::RUNNING) {
+            mTranscoder->pause(curSession->key.first, curSession->key.second);
+            setSessionState_l(curSession, Session::PAUSED);
+        }
+
+        if (topSession == nullptr) {
+            // Nothing more to run (either no session or globally paused).
+            break;
+        }
+
+        // Otherwise, ensure topSession is running.
+        if (topSession->getState() == Session::NOT_STARTED) {
+            // Check if at least one client has quota to start the session.
+            bool keepForClient = false;
+            for (uid_t uid : topSession->allClientUids) {
+                if (mPacer->onSessionStarted(uid, topSession->callingUid)) {
+                    keepForClient = true;
+                    // DO NOT break here, because book-keeping still needs to happen
+                    // for the other uids.
+                }
+            }
+            if (!keepForClient) {
+                // Unfortunately all uids requesting this session are out of quota.
+                // Drop this session and try the next one.
+                {
+                    auto clientCallback = mSessionMap[topSession->key].callback.lock();
+                    if (clientCallback != nullptr) {
+                        clientCallback->onTranscodingFailed(
+                                topSession->key.second, TranscodingErrorCode::kDroppedByService);
+                    }
+                }
+                removeSession_l(topSession->key, Session::DROPPED_BY_PACER);
+                continue;
+            }
+            mTranscoder->start(topSession->key.first, topSession->key.second, topSession->request,
+                               topSession->callingUid, topSession->callback.lock());
+            setSessionState_l(topSession, Session::RUNNING);
+        } else if (topSession->getState() == Session::PAUSED) {
+            mTranscoder->resume(topSession->key.first, topSession->key.second, topSession->request,
+                                topSession->callingUid, topSession->callback.lock());
+            setSessionState_l(topSession, Session::RUNNING);
+        }
+        break;
+    }
+    mCurrentSession = topSession;
+}
+
+void TranscodingSessionController::addUidToSession_l(uid_t clientUid,
+                                                     const SessionKeyType& sessionKey) {
+    // If it's an offline session, the queue was already added in constructor.
+    // If it's a real-time sessions, check if a queue is already present for the uid,
+    // and add a new queue if needed.
+    if (clientUid != OFFLINE_UID) {
+        if (mSessionQueues.count(clientUid) == 0) {
+            mUidPolicy->registerMonitorUid(clientUid);
+            if (mUidPolicy->isUidOnTop(clientUid)) {
+                mUidSortedList.push_front(clientUid);
+            } else {
+                // Shouldn't be submitting real-time requests from non-top app,
+                // put it in front of the offline queue.
+                mUidSortedList.insert(mOfflineUidIterator, clientUid);
+            }
+        } else if (clientUid != *mUidSortedList.begin()) {
+            if (mUidPolicy->isUidOnTop(clientUid)) {
+                mUidSortedList.remove(clientUid);
+                mUidSortedList.push_front(clientUid);
+            }
+        }
+    }
+    // Append this session to the uid's queue.
+    mSessionQueues[clientUid].push_back(sessionKey);
+}
+
+void TranscodingSessionController::removeSession_l(
+        const SessionKeyType& sessionKey, Session::State finalState,
+        const std::shared_ptr<std::function<bool(uid_t uid)>>& keepUid) {
+    ALOGV("%s: session %s", __FUNCTION__, sessionToString(sessionKey).c_str());
+
+    if (mSessionMap.count(sessionKey) == 0) {
+        ALOGE("session %s doesn't exist", sessionToString(sessionKey).c_str());
+        return;
+    }
+
+    // Remove session from uid's queue.
+    bool uidQueueRemoved = false;
+    std::unordered_set<uid_t> remainingUids;
+    for (uid_t uid : mSessionMap[sessionKey].allClientUids) {
+        if (keepUid != nullptr) {
+            if ((*keepUid)(uid)) {
+                remainingUids.insert(uid);
+                continue;
+            }
+            // If we have uids to keep, the session is not going to any final
+            // state we can't use onSessionCompleted as the running time will
+            // not be valid. Only notify pacer to stop tracking this session.
+            mPacer->onSessionCancelled(uid);
+        }
+        SessionQueueType& sessionQueue = mSessionQueues[uid];
+        auto it = std::find(sessionQueue.begin(), sessionQueue.end(), sessionKey);
+        if (it == sessionQueue.end()) {
+            ALOGW("couldn't find session %s in queue for uid %d",
+                  sessionToString(sessionKey).c_str(), uid);
+            continue;
+        }
+        sessionQueue.erase(it);
+
+        // If this is the last session in a real-time queue, remove this uid's queue.
+        if (uid != OFFLINE_UID && sessionQueue.empty()) {
+            mUidSortedList.remove(uid);
+            mSessionQueues.erase(uid);
+            mUidPolicy->unregisterMonitorUid(uid);
+
+            uidQueueRemoved = true;
+        }
+    }
+
+    if (uidQueueRemoved) {
+        std::unordered_set<uid_t> topUids = mUidPolicy->getTopUids();
+        moveUidsToTop_l(topUids, false /*preserveTopUid*/);
+    }
+
+    if (keepUid != nullptr) {
+        mSessionMap[sessionKey].allClientUids = remainingUids;
+        return;
+    }
+
+    // Clear current session.
+    if (mCurrentSession == &mSessionMap[sessionKey]) {
+        mCurrentSession = nullptr;
+    }
+
+    setSessionState_l(&mSessionMap[sessionKey], finalState);
+
+    // We can use onSessionCompleted() even for CANCELLED, because runningTime is
+    // now updated by setSessionState_l().
+    for (uid_t uid : mSessionMap[sessionKey].allClientUids) {
+        mPacer->onSessionCompleted(uid, mSessionMap[sessionKey].runningTime);
+    }
+
+    mSessionHistory.push_back(mSessionMap[sessionKey]);
+    if (mSessionHistory.size() > kSessionHistoryMax) {
+        mSessionHistory.erase(mSessionHistory.begin());
+    }
+
+    // Remove session from session map.
+    mSessionMap.erase(sessionKey);
+}
+
+/**
+ * Moves the set of uids to the front of mUidSortedList (which is used to pick
+ * the next session to run).
+ *
+ * This is called when 1) we received a onTopUidsChanged() callback from UidPolicy,
+ * or 2) we removed the session queue for a uid because it becomes empty.
+ *
+ * In case of 1), if there are multiple uids in the set, and the current front
+ * uid in mUidSortedList is still in the set, we try to keep that uid at front
+ * so that current session run is not interrupted. (This is not a concern for case 2)
+ * because the queue for a uid was just removed entirely.)
+ */
+void TranscodingSessionController::moveUidsToTop_l(const std::unordered_set<uid_t>& uids,
+                                                   bool preserveTopUid) {
+    // If uid set is empty, nothing to do. Do not change the queue status.
+    if (uids.empty()) {
+        return;
+    }
+
+    // Save the current top uid.
+    uid_t curTopUid = *mUidSortedList.begin();
+    bool pushCurTopToFront = false;
+    int32_t numUidsMoved = 0;
+
+    // Go through the sorted uid list once, and move the ones in top set to front.
+    for (auto it = mUidSortedList.begin(); it != mUidSortedList.end();) {
+        uid_t uid = *it;
+
+        if (uid != OFFLINE_UID && uids.count(uid) > 0) {
+            it = mUidSortedList.erase(it);
+
+            // If this is the top we're preserving, don't push it here, push
+            // it after the for-loop.
+            if (uid == curTopUid && preserveTopUid) {
+                pushCurTopToFront = true;
+            } else {
+                mUidSortedList.push_front(uid);
+            }
+
+            // If we found all uids in the set, break out.
+            if (++numUidsMoved == uids.size()) {
+                break;
+            }
+        } else {
+            ++it;
+        }
+    }
+
+    if (pushCurTopToFront) {
+        mUidSortedList.push_front(curTopUid);
+    }
+}
+
+bool TranscodingSessionController::submit(
+        ClientIdType clientId, SessionIdType sessionId, uid_t callingUid, uid_t clientUid,
+        const TranscodingRequestParcel& request,
+        const std::weak_ptr<ITranscodingClientCallback>& callback) {
+    SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+    ALOGV("%s: session %s, uid %d, prioirty %d", __FUNCTION__, sessionToString(sessionKey).c_str(),
+          clientUid, (int32_t)request.priority);
+
+    std::scoped_lock lock{mLock};
+
+    if (mSessionMap.count(sessionKey) > 0) {
+        ALOGE("session %s already exists", sessionToString(sessionKey).c_str());
+        return false;
+    }
+
+    // Add the uid package name to the store of package names we already know.
+    if (mUidPackageNames.count(clientUid) == 0) {
+        mUidPackageNames.emplace(clientUid, request.clientPackageName);
+    }
+
+    // TODO(chz): only support offline vs real-time for now. All kUnspecified sessions
+    // go to offline queue.
+    if (request.priority == TranscodingSessionPriority::kUnspecified) {
+        clientUid = OFFLINE_UID;
+    }
+
+    // Add session to session map.
+    mSessionMap[sessionKey].key = sessionKey;
+    mSessionMap[sessionKey].callingUid = callingUid;
+    mSessionMap[sessionKey].allClientUids.insert(clientUid);
+    mSessionMap[sessionKey].request = request;
+    mSessionMap[sessionKey].callback = callback;
+    setSessionState_l(&mSessionMap[sessionKey], Session::NOT_STARTED);
+
+    addUidToSession_l(clientUid, sessionKey);
+
+    updateCurrentSession_l();
+
+    validateState_l();
+    return true;
+}
+
+bool TranscodingSessionController::cancel(ClientIdType clientId, SessionIdType sessionId) {
+    SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+    ALOGV("%s: session %s", __FUNCTION__, sessionToString(sessionKey).c_str());
+
+    std::list<SessionKeyType> sessionsToRemove, sessionsForOffline;
+
+    std::scoped_lock lock{mLock};
+
+    if (sessionId < 0) {
+        for (auto it = mSessionMap.begin(); it != mSessionMap.end(); ++it) {
+            if (it->first.first == clientId) {
+                // If there is offline request, only keep the offline client;
+                // otherwise remove the session.
+                if (it->second.allClientUids.count(OFFLINE_UID) > 0) {
+                    sessionsForOffline.push_back(it->first);
+                } else {
+                    sessionsToRemove.push_back(it->first);
+                }
+            }
+        }
+    } else {
+        if (mSessionMap.count(sessionKey) == 0) {
+            ALOGE("session %s doesn't exist", sessionToString(sessionKey).c_str());
+            return false;
+        }
+        sessionsToRemove.push_back(sessionKey);
+    }
+
+    for (auto it = sessionsToRemove.begin(); it != sessionsToRemove.end(); ++it) {
+        // If the session has ever been started, stop it now.
+        // Note that stop() is needed even if the session is currently paused. This instructs
+        // the transcoder to discard any states for the session, otherwise the states may
+        // never be discarded.
+        if (mSessionMap[*it].getState() != Session::NOT_STARTED) {
+            mTranscoder->stop(it->first, it->second);
+        }
+
+        // Remove the session.
+        removeSession_l(*it, Session::CANCELED);
+    }
+
+    auto keepUid = std::make_shared<std::function<bool(uid_t)>>(
+            [](uid_t uid) { return uid == OFFLINE_UID; });
+    for (auto it = sessionsForOffline.begin(); it != sessionsForOffline.end(); ++it) {
+        removeSession_l(*it, Session::CANCELED, keepUid);
+    }
+
+    // Start next session.
+    updateCurrentSession_l();
+
+    validateState_l();
+    return true;
+}
+
+bool TranscodingSessionController::addClientUid(ClientIdType clientId, SessionIdType sessionId,
+                                                uid_t clientUid) {
+    SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+    std::scoped_lock lock{mLock};
+
+    if (mSessionMap.count(sessionKey) == 0) {
+        ALOGE("session %s doesn't exist", sessionToString(sessionKey).c_str());
+        return false;
+    }
+
+    if (mSessionMap[sessionKey].allClientUids.count(clientUid) > 0) {
+        ALOGE("session %s already has uid %d", sessionToString(sessionKey).c_str(), clientUid);
+        return false;
+    }
+
+    mSessionMap[sessionKey].allClientUids.insert(clientUid);
+    addUidToSession_l(clientUid, sessionKey);
+
+    updateCurrentSession_l();
+
+    validateState_l();
+    return true;
+}
+
+bool TranscodingSessionController::getClientUids(ClientIdType clientId, SessionIdType sessionId,
+                                                 std::vector<int32_t>* out_clientUids) {
+    SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+    std::scoped_lock lock{mLock};
+
+    if (mSessionMap.count(sessionKey) == 0) {
+        ALOGE("session %s doesn't exist", sessionToString(sessionKey).c_str());
+        return false;
+    }
+
+    out_clientUids->clear();
+    for (uid_t uid : mSessionMap[sessionKey].allClientUids) {
+        if (uid != OFFLINE_UID) {
+            out_clientUids->push_back(uid);
+        }
+    }
+    return true;
+}
+
+bool TranscodingSessionController::getSession(ClientIdType clientId, SessionIdType sessionId,
+                                              TranscodingRequestParcel* request) {
+    SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+    std::scoped_lock lock{mLock};
+
+    if (mSessionMap.count(sessionKey) == 0) {
+        ALOGE("session %s doesn't exist", sessionToString(sessionKey).c_str());
+        return false;
+    }
+
+    *(TranscodingRequest*)request = mSessionMap[sessionKey].request;
+    return true;
+}
+
+void TranscodingSessionController::notifyClient(ClientIdType clientId, SessionIdType sessionId,
+                                                const char* reason,
+                                                std::function<void(const SessionKeyType&)> func) {
+    SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+    std::scoped_lock lock{mLock};
+
+    if (mSessionMap.count(sessionKey) == 0) {
+        ALOGW("%s: ignoring %s for session %s that doesn't exist", __FUNCTION__, reason,
+              sessionToString(sessionKey).c_str());
+        return;
+    }
+
+    // Only ignore if session was never started. In particular, propagate the status
+    // to client if the session is paused. Transcoder could have posted finish when
+    // we're pausing it, and the finish arrived after we changed current session.
+    if (mSessionMap[sessionKey].getState() == Session::NOT_STARTED) {
+        ALOGW("%s: ignoring %s for session %s that was never started", __FUNCTION__, reason,
+              sessionToString(sessionKey).c_str());
+        return;
+    }
+
+    ALOGV("%s: session %s %s", __FUNCTION__, sessionToString(sessionKey).c_str(), reason);
+    func(sessionKey);
+}
+
+void TranscodingSessionController::onStarted(ClientIdType clientId, SessionIdType sessionId) {
+    notifyClient(clientId, sessionId, "started", [=](const SessionKeyType& sessionKey) {
+        auto callback = mSessionMap[sessionKey].callback.lock();
+        if (callback != nullptr) {
+            callback->onTranscodingStarted(sessionId);
+        }
+    });
+}
+
+void TranscodingSessionController::onPaused(ClientIdType clientId, SessionIdType sessionId) {
+    notifyClient(clientId, sessionId, "paused", [=](const SessionKeyType& sessionKey) {
+        auto callback = mSessionMap[sessionKey].callback.lock();
+        if (callback != nullptr) {
+            callback->onTranscodingPaused(sessionId);
+        }
+    });
+}
+
+void TranscodingSessionController::onResumed(ClientIdType clientId, SessionIdType sessionId) {
+    notifyClient(clientId, sessionId, "resumed", [=](const SessionKeyType& sessionKey) {
+        auto callback = mSessionMap[sessionKey].callback.lock();
+        if (callback != nullptr) {
+            callback->onTranscodingResumed(sessionId);
+        }
+    });
+}
+
+void TranscodingSessionController::onFinish(ClientIdType clientId, SessionIdType sessionId) {
+    notifyClient(clientId, sessionId, "finish", [=](const SessionKeyType& sessionKey) {
+        {
+            auto clientCallback = mSessionMap[sessionKey].callback.lock();
+            if (clientCallback != nullptr) {
+                clientCallback->onTranscodingFinished(
+                        sessionId, TranscodingResultParcel({sessionId, -1 /*actualBitrateBps*/,
+                                                            std::nullopt /*sessionStats*/}));
+            }
+        }
+
+        // Remove the session.
+        removeSession_l(sessionKey, Session::FINISHED);
+
+        // Start next session.
+        updateCurrentSession_l();
+
+        validateState_l();
+    });
+}
+
+void TranscodingSessionController::onError(ClientIdType clientId, SessionIdType sessionId,
+                                           TranscodingErrorCode err) {
+    notifyClient(clientId, sessionId, "error", [=](const SessionKeyType& sessionKey) {
+        if (err == TranscodingErrorCode::kWatchdogTimeout) {
+            // Abandon the transcoder, as its handler thread might be stuck in some call to
+            // MediaTranscoder altogether, and may not be able to handle any new tasks.
+            mTranscoder->stop(clientId, sessionId, true /*abandon*/);
+            // Clear the last ref count before we create new transcoder.
+            mTranscoder = nullptr;
+            mTranscoder = mTranscoderFactory(shared_from_this());
+        }
+
+        {
+            auto clientCallback = mSessionMap[sessionKey].callback.lock();
+            if (clientCallback != nullptr) {
+                clientCallback->onTranscodingFailed(sessionId, err);
+            }
+        }
+
+        // Remove the session.
+        removeSession_l(sessionKey, Session::ERROR);
+
+        // Start next session.
+        updateCurrentSession_l();
+
+        validateState_l();
+    });
+}
+
+void TranscodingSessionController::onProgressUpdate(ClientIdType clientId, SessionIdType sessionId,
+                                                    int32_t progress) {
+    notifyClient(clientId, sessionId, "progress", [=](const SessionKeyType& sessionKey) {
+        auto callback = mSessionMap[sessionKey].callback.lock();
+        if (callback != nullptr) {
+            callback->onProgressUpdate(sessionId, progress);
+        }
+        mSessionMap[sessionKey].lastProgress = progress;
+    });
+}
+
+void TranscodingSessionController::onHeartBeat(ClientIdType clientId, SessionIdType sessionId) {
+    notifyClient(clientId, sessionId, "heart-beat",
+                 [=](const SessionKeyType& /*sessionKey*/) { mWatchdog->keepAlive(); });
+}
+
+void TranscodingSessionController::onResourceLost(ClientIdType clientId, SessionIdType sessionId) {
+    ALOGI("%s", __FUNCTION__);
+
+    notifyClient(clientId, sessionId, "resource_lost", [=](const SessionKeyType& sessionKey) {
+        if (mResourceLost) {
+            return;
+        }
+
+        Session* resourceLostSession = &mSessionMap[sessionKey];
+        if (resourceLostSession->getState() != Session::RUNNING) {
+            ALOGW("session %s lost resource but is no longer running",
+                  sessionToString(sessionKey).c_str());
+            return;
+        }
+        // If we receive a resource loss event, the transcoder already paused the transcoding,
+        // so we don't need to call onPaused() to pause it. However, we still need to notify
+        // the client and update the session state here.
+        setSessionState_l(resourceLostSession, Session::PAUSED);
+        // Notify the client as a paused event.
+        auto clientCallback = resourceLostSession->callback.lock();
+        if (clientCallback != nullptr) {
+            clientCallback->onTranscodingPaused(sessionKey.second);
+        }
+        if (mResourcePolicy != nullptr) {
+            mResourcePolicy->setPidResourceLost(resourceLostSession->request.clientPid);
+        }
+        mResourceLost = true;
+
+        validateState_l();
+    });
+}
+
+void TranscodingSessionController::onTopUidsChanged(const std::unordered_set<uid_t>& uids) {
+    if (uids.empty()) {
+        ALOGW("%s: ignoring empty uids", __FUNCTION__);
+        return;
+    }
+
+    std::string uidStr;
+    for (auto it = uids.begin(); it != uids.end(); it++) {
+        if (!uidStr.empty()) {
+            uidStr += ", ";
+        }
+        uidStr += std::to_string(*it);
+    }
+
+    ALOGD("%s: topUids: size %zu, uids: %s", __FUNCTION__, uids.size(), uidStr.c_str());
+
+    std::scoped_lock lock{mLock};
+
+    moveUidsToTop_l(uids, true /*preserveTopUid*/);
+
+    updateCurrentSession_l();
+
+    validateState_l();
+}
+
+void TranscodingSessionController::onUidGone(uid_t goneUid) {
+    ALOGD("%s: gone uid %u", __FUNCTION__, goneUid);
+
+    std::list<SessionKeyType> sessionsToRemove, sessionsForOtherUids;
+
+    std::scoped_lock lock{mLock};
+
+    for (auto it = mSessionMap.begin(); it != mSessionMap.end(); ++it) {
+        if (it->second.allClientUids.count(goneUid) > 0) {
+            // If goneUid is the only uid, remove the session; otherwise, only
+            // remove the uid from the session.
+            if (it->second.allClientUids.size() > 1) {
+                sessionsForOtherUids.push_back(it->first);
+            } else {
+                sessionsToRemove.push_back(it->first);
+            }
+        }
+    }
+
+    for (auto it = sessionsToRemove.begin(); it != sessionsToRemove.end(); ++it) {
+        // If the session has ever been started, stop it now.
+        // Note that stop() is needed even if the session is currently paused. This instructs
+        // the transcoder to discard any states for the session, otherwise the states may
+        // never be discarded.
+        if (mSessionMap[*it].getState() != Session::NOT_STARTED) {
+            mTranscoder->stop(it->first, it->second);
+        }
+
+        {
+            auto clientCallback = mSessionMap[*it].callback.lock();
+            if (clientCallback != nullptr) {
+                clientCallback->onTranscodingFailed(it->second,
+                                                    TranscodingErrorCode::kUidGoneCancelled);
+            }
+        }
+
+        // Remove the session.
+        removeSession_l(*it, Session::CANCELED);
+    }
+
+    auto keepUid = std::make_shared<std::function<bool(uid_t)>>(
+            [goneUid](uid_t uid) { return uid != goneUid; });
+    for (auto it = sessionsForOtherUids.begin(); it != sessionsForOtherUids.end(); ++it) {
+        removeSession_l(*it, Session::CANCELED, keepUid);
+    }
+
+    // Start next session.
+    updateCurrentSession_l();
+
+    validateState_l();
+}
+
+void TranscodingSessionController::onResourceAvailable() {
+    std::scoped_lock lock{mLock};
+
+    if (!mResourceLost) {
+        return;
+    }
+
+    ALOGI("%s", __FUNCTION__);
+
+    mResourceLost = false;
+    updateCurrentSession_l();
+
+    validateState_l();
+}
+
+void TranscodingSessionController::onThrottlingStarted() {
+    std::scoped_lock lock{mLock};
+
+    if (mThermalThrottling) {
+        return;
+    }
+
+    ALOGI("%s", __FUNCTION__);
+
+    mThermalThrottling = true;
+    updateCurrentSession_l();
+
+    validateState_l();
+}
+
+void TranscodingSessionController::onThrottlingStopped() {
+    std::scoped_lock lock{mLock};
+
+    if (!mThermalThrottling) {
+        return;
+    }
+
+    ALOGI("%s", __FUNCTION__);
+
+    mThermalThrottling = false;
+    updateCurrentSession_l();
+
+    validateState_l();
+}
+
+void TranscodingSessionController::validateState_l() {
+#ifdef VALIDATE_STATE
+    LOG_ALWAYS_FATAL_IF(mSessionQueues.count(OFFLINE_UID) != 1,
+                        "mSessionQueues offline queue number is not 1");
+    LOG_ALWAYS_FATAL_IF(*mOfflineUidIterator != OFFLINE_UID,
+                        "mOfflineUidIterator not pointing to offline uid");
+    LOG_ALWAYS_FATAL_IF(mUidSortedList.size() != mSessionQueues.size(),
+                        "mUidSortedList and mSessionQueues size mismatch, %zu vs %zu",
+                        mUidSortedList.size(), mSessionQueues.size());
+
+    int32_t totalSessions = 0;
+    for (auto uid : mUidSortedList) {
+        LOG_ALWAYS_FATAL_IF(mSessionQueues.count(uid) != 1,
+                            "mSessionQueues count for uid %d is not 1", uid);
+        for (auto& sessionKey : mSessionQueues[uid]) {
+            LOG_ALWAYS_FATAL_IF(mSessionMap.count(sessionKey) != 1,
+                                "mSessions count for session %s is not 1",
+                                sessionToString(sessionKey).c_str());
+        }
+
+        totalSessions += mSessionQueues[uid].size();
+    }
+    int32_t totalSessionsAlternative = 0;
+    for (auto const& s : mSessionMap) {
+        totalSessionsAlternative += s.second.allClientUids.size();
+    }
+    LOG_ALWAYS_FATAL_IF(totalSessions != totalSessionsAlternative,
+                        "session count (including dup) from mSessionQueues doesn't match that from "
+                        "mSessionMap, %d vs %d",
+                        totalSessions, totalSessionsAlternative);
+#endif  // VALIDATE_STATE
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/TranscodingThermalPolicy.cpp b/media/libmediatranscoding/TranscodingThermalPolicy.cpp
new file mode 100644
index 0000000..88f445c
--- /dev/null
+++ b/media/libmediatranscoding/TranscodingThermalPolicy.cpp
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingThermalPolicy"
+
+#include <media/TranscodingDefs.h>
+#include <media/TranscodingThermalPolicy.h>
+#include <media/TranscodingUidPolicy.h>
+#include <utils/Log.h>
+
+namespace android {
+
+static bool needThrottling(AThermalStatus status) {
+    return (status >= ATHERMAL_STATUS_SEVERE);
+}
+
+//static
+void TranscodingThermalPolicy::onStatusChange(void* data, AThermalStatus status) {
+    TranscodingThermalPolicy* policy = static_cast<TranscodingThermalPolicy*>(data);
+    policy->onStatusChange(status);
+}
+
+TranscodingThermalPolicy::TranscodingThermalPolicy()
+      : mRegistered(false), mThermalManager(nullptr), mIsThrottling(false) {
+    registerSelf();
+}
+
+TranscodingThermalPolicy::~TranscodingThermalPolicy() {
+    unregisterSelf();
+}
+
+void TranscodingThermalPolicy::registerSelf() {
+    ALOGI("TranscodingThermalPolicy: registerSelf");
+
+    std::scoped_lock lock{mRegisteredLock};
+
+    if (mRegistered) {
+        return;
+    }
+
+    if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+        AThermalManager* thermalManager = AThermal_acquireManager();
+        if (thermalManager == nullptr) {
+            ALOGE("Failed to acquire thermal manager");
+            return;
+        }
+
+        int ret = AThermal_registerThermalStatusListener(thermalManager, onStatusChange, this);
+        if (ret != 0) {
+            ALOGE("Failed to register thermal status listener");
+            AThermal_releaseManager(thermalManager);
+            return;
+        }
+
+        mIsThrottling = needThrottling(AThermal_getCurrentThermalStatus(thermalManager));
+        mThermalManager = thermalManager;
+    }
+
+    mRegistered = true;
+}
+
+void TranscodingThermalPolicy::unregisterSelf() {
+    ALOGI("TranscodingThermalPolicy: unregisterSelf");
+
+    std::scoped_lock lock{mRegisteredLock};
+
+    if (!mRegistered) {
+        return;
+    }
+
+    if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+        if (mThermalManager != nullptr) {
+            // Unregister listener
+            int ret =
+                    AThermal_unregisterThermalStatusListener(mThermalManager, onStatusChange, this);
+            if (ret != 0) {
+                ALOGW("Failed to unregister thermal status listener");
+            }
+            AThermal_releaseManager(mThermalManager);
+            mThermalManager = nullptr;
+        }
+    }
+
+    mRegistered = false;
+}
+
+void TranscodingThermalPolicy::setCallback(
+        const std::shared_ptr<ThermalPolicyCallbackInterface>& cb) {
+    std::scoped_lock lock{mCallbackLock};
+    mThermalPolicyCallback = cb;
+}
+
+bool TranscodingThermalPolicy::getThrottlingStatus() {
+    std::scoped_lock lock{mRegisteredLock};
+    return mIsThrottling;
+}
+
+void TranscodingThermalPolicy::onStatusChange(AThermalStatus status) {
+    bool isThrottling = needThrottling(status);
+
+    {
+        std::scoped_lock lock{mRegisteredLock};
+        if (isThrottling == mIsThrottling) {
+            return;
+        }
+        ALOGI("Transcoding thermal throttling changed: %d", isThrottling);
+        mIsThrottling = isThrottling;
+    }
+
+    std::scoped_lock lock{mCallbackLock};
+    std::shared_ptr<ThermalPolicyCallbackInterface> cb;
+    if ((cb = mThermalPolicyCallback.lock()) != nullptr) {
+        if (isThrottling) {
+            cb->onThrottlingStarted();
+        } else {
+            cb->onThrottlingStopped();
+        }
+    }
+}
+}  // namespace android
diff --git a/media/libmediatranscoding/TranscodingUidPolicy.cpp b/media/libmediatranscoding/TranscodingUidPolicy.cpp
new file mode 100644
index 0000000..f33c54c
--- /dev/null
+++ b/media/libmediatranscoding/TranscodingUidPolicy.cpp
@@ -0,0 +1,210 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingUidPolicy"
+
+#include <android/activity_manager.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <inttypes.h>
+#include <media/TranscodingDefs.h>
+#include <media/TranscodingUidPolicy.h>
+#include <utils/Log.h>
+
+#include <utility>
+
+namespace android {
+
+constexpr static uid_t OFFLINE_UID = -1;
+constexpr static int32_t IMPORTANCE_UNKNOWN = INT32_MAX;
+
+TranscodingUidPolicy::TranscodingUidPolicy()
+      : mUidObserver(nullptr), mRegistered(false), mTopUidState(IMPORTANCE_UNKNOWN) {
+    registerSelf();
+}
+
+TranscodingUidPolicy::~TranscodingUidPolicy() {
+    unregisterSelf();
+}
+
+void TranscodingUidPolicy::OnUidImportance(uid_t uid, int32_t uidImportance, void* cookie) {
+    TranscodingUidPolicy* owner = reinterpret_cast<TranscodingUidPolicy*>(cookie);
+    owner->onUidStateChanged(uid, uidImportance);
+}
+
+void TranscodingUidPolicy::registerSelf() {
+    if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+        mUidObserver = AActivityManager_addUidImportanceListener(&OnUidImportance, -1, (void*)this);
+    }
+
+    if (mUidObserver == nullptr) {
+        ALOGE("Failed to register uid observer");
+        return;
+    }
+
+    Mutex::Autolock _l(mUidLock);
+    mRegistered = true;
+    ALOGI("Registered uid observer");
+}
+
+void TranscodingUidPolicy::unregisterSelf() {
+    if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+        AActivityManager_removeUidImportanceListener(mUidObserver);
+        mUidObserver = nullptr;
+
+        Mutex::Autolock _l(mUidLock);
+        mRegistered = false;
+        ALOGI("Unregistered uid observer");
+    } else {
+        ALOGE("Failed to unregister uid observer");
+    }
+}
+
+void TranscodingUidPolicy::setCallback(const std::shared_ptr<UidPolicyCallbackInterface>& cb) {
+    mUidPolicyCallback = cb;
+}
+
+void TranscodingUidPolicy::registerMonitorUid(uid_t uid) {
+    Mutex::Autolock _l(mUidLock);
+    if (uid == OFFLINE_UID) {
+        ALOGW("Ignoring the offline uid");
+        return;
+    }
+    if (mUidStateMap.find(uid) != mUidStateMap.end()) {
+        ALOGE("%s: Trying to register uid: %d which is already monitored!", __FUNCTION__, uid);
+        return;
+    }
+
+    int32_t state = IMPORTANCE_UNKNOWN;
+    if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+        if (mRegistered && AActivityManager_isUidActive(uid)) {
+            state = AActivityManager_getUidImportance(uid);
+        }
+    }
+
+    ALOGV("%s: inserting new uid: %u, procState %d", __FUNCTION__, uid, state);
+
+    mUidStateMap.emplace(std::pair<uid_t, int32_t>(uid, state));
+    mStateUidMap[state].insert(uid);
+
+    updateTopUid_l();
+}
+
+void TranscodingUidPolicy::unregisterMonitorUid(uid_t uid) {
+    Mutex::Autolock _l(mUidLock);
+
+    auto it = mUidStateMap.find(uid);
+    if (it == mUidStateMap.end()) {
+        ALOGE("%s: Trying to unregister uid: %d which is not monitored!", __FUNCTION__, uid);
+        return;
+    }
+
+    auto stateIt = mStateUidMap.find(it->second);
+    if (stateIt != mStateUidMap.end()) {
+        stateIt->second.erase(uid);
+        if (stateIt->second.empty()) {
+            mStateUidMap.erase(stateIt);
+        }
+    }
+    mUidStateMap.erase(it);
+
+    updateTopUid_l();
+}
+
+bool TranscodingUidPolicy::isUidOnTop(uid_t uid) {
+    Mutex::Autolock _l(mUidLock);
+
+    return mTopUidState != IMPORTANCE_UNKNOWN && mTopUidState == getProcState_l(uid);
+}
+
+std::unordered_set<uid_t> TranscodingUidPolicy::getTopUids() const {
+    Mutex::Autolock _l(mUidLock);
+
+    if (mTopUidState == IMPORTANCE_UNKNOWN) {
+        return std::unordered_set<uid_t>();
+    }
+
+    return mStateUidMap.at(mTopUidState);
+}
+
+void TranscodingUidPolicy::onUidStateChanged(uid_t uid, int32_t procState) {
+    ALOGV("onUidStateChanged: uid %u, procState %d", uid, procState);
+
+    bool topUidSetChanged = false;
+    bool isUidGone = false;
+    std::unordered_set<uid_t> topUids;
+    {
+        Mutex::Autolock _l(mUidLock);
+        auto it = mUidStateMap.find(uid);
+        if (it != mUidStateMap.end() && it->second != procState) {
+            isUidGone = (procState == AACTIVITYMANAGER_IMPORTANCE_GONE);
+
+            topUids = mStateUidMap[mTopUidState];
+
+            // Move uid to the new procState.
+            mStateUidMap[it->second].erase(uid);
+            mStateUidMap[procState].insert(uid);
+            it->second = procState;
+
+            updateTopUid_l();
+            if (topUids != mStateUidMap[mTopUidState]) {
+                // Make a copy of the uid set for callback.
+                topUids = mStateUidMap[mTopUidState];
+                topUidSetChanged = true;
+            }
+        }
+    }
+
+    ALOGV("topUidSetChanged: %d, isUidGone %d", topUidSetChanged, isUidGone);
+
+    if (topUidSetChanged) {
+        auto callback = mUidPolicyCallback.lock();
+        if (callback != nullptr) {
+            callback->onTopUidsChanged(topUids);
+        }
+    }
+    if (isUidGone) {
+        auto callback = mUidPolicyCallback.lock();
+        if (callback != nullptr) {
+            callback->onUidGone(uid);
+        }
+    }
+}
+
+void TranscodingUidPolicy::updateTopUid_l() {
+    mTopUidState = IMPORTANCE_UNKNOWN;
+
+    // Find the lowest uid state (ignoring PROCESS_STATE_UNKNOWN) with some monitored uids.
+    for (auto stateIt = mStateUidMap.begin(); stateIt != mStateUidMap.end(); stateIt++) {
+        if (stateIt->first != IMPORTANCE_UNKNOWN && !stateIt->second.empty()) {
+            mTopUidState = stateIt->first;
+            break;
+        }
+    }
+
+    ALOGV("%s: top uid state is %d", __FUNCTION__, mTopUidState);
+}
+
+int32_t TranscodingUidPolicy::getProcState_l(uid_t uid) {
+    auto it = mUidStateMap.find(uid);
+    if (it != mUidStateMap.end()) {
+        return it->second;
+    }
+    return IMPORTANCE_UNKNOWN;
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl b/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
index 07b6c1a..ad2358e 100644
--- a/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
+++ b/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
@@ -16,9 +16,10 @@
 
 package android.media;
 
-import android.media.TranscodingJobParcel;
+import android.media.ITranscodingClient;
+import android.media.ITranscodingClientCallback;
+import android.media.TranscodingSessionParcel;
 import android.media.TranscodingRequestParcel;
-import android.media.ITranscodingServiceClient;
 
 /**
  * Binder interface for MediaTranscodingService.
@@ -48,64 +49,25 @@
     /**
      * Register the client with the MediaTranscodingService.
      *
-     * Client must call this function to register itself with the service in order to perform
-     * transcoding. This function will return a unique positive Id assigned by the service.
-     * Client should save this Id and use it for all the transaction with the service.
+     * Client must call this function to register itself with the service in
+     * order to perform transcoding tasks. This function will return an
+     * ITranscodingClient interface object. The client should save and use it
+     * for all future transactions with the service.
      *
-     * @param client interface for the MediaTranscodingService to call the client.
+     * @param callback client interface for the MediaTranscodingService to call
+     *        the client.
+     * @param clientName name of the client.
      * @param opPackageName op package name of the client.
-     * @param clientUid user id of the client.
-     * @param clientPid process id of the client.
-     * @return a unique positive Id assigned to the client by the service, -1  means failed to
-     * register.
+     * @return an ITranscodingClient interface object, with nullptr indicating
+     *         failure to register.
      */
-    int registerClient(in ITranscodingServiceClient client,
-                       in String opPackageName,
-                       in int clientUid,
-                       in int clientPid);
-
-    /**
-    * Unregister the client with the MediaTranscodingService.
-    *
-    * Client will not be able to perform any more transcoding after unregister.
-    *
-    * @param clientId assigned Id of the client.
-    * @return true if succeeds, false otherwise.
-    */
-    boolean unregisterClient(in int clientId);
+    ITranscodingClient registerClient(
+            in ITranscodingClientCallback callback,
+            in String clientName,
+            in String opPackageName);
 
     /**
     * Returns the number of clients. This is used for debugging.
     */
     int getNumOfClients();
-
-    /**
-     * Submits a transcoding request to MediaTranscodingService.
-     *
-     * @param clientId assigned Id of the client.
-     * @param request a TranscodingRequest contains transcoding configuration.
-     * @param job(output variable) a TranscodingJob generated by the MediaTranscodingService.
-     * @return a unique positive jobId generated by the MediaTranscodingService, -1 means failure.
-     */
-    int submitRequest(in int clientId,
-                      in TranscodingRequestParcel request,
-                      out TranscodingJobParcel job);
-
-    /**
-     * Cancels a transcoding job.
-     *
-     * @param clientId assigned id of the client.
-     * @param jobId a TranscodingJob generated by the MediaTranscodingService.
-     * @return true if succeeds, false otherwise.
-     */
-    boolean cancelJob(in int clientId, in int jobId);
-
-    /**
-     * Queries the job detail associated with a jobId.
-     *
-     * @param jobId a TranscodingJob generated by the MediaTranscodingService.
-     * @param job(output variable) the TranscodingJob associated with the jobId.
-     * @return true if succeeds, false otherwise.
-     */
-    boolean getJobWithId(in int jobId, out TranscodingJobParcel job);
 }
diff --git a/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl b/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl
new file mode 100644
index 0000000..9ef9052
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl
@@ -0,0 +1,89 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.TranscodingSessionParcel;
+import android.media.TranscodingRequestParcel;
+
+/**
+ * ITranscodingClient
+ *
+ * Interface for a client to communicate with MediaTranscodingService.
+ *
+ * {@hide}
+ */
+interface ITranscodingClient {
+    /**
+     * Submits a transcoding request to MediaTranscodingService.
+     *
+     * @param request a TranscodingRequest contains transcoding configuration.
+     * @param session(output variable) a TranscodingSession generated by MediaTranscodingService.
+     * @return true if success, false otherwise.
+     */
+    boolean submitRequest(in TranscodingRequestParcel request,
+                          out TranscodingSessionParcel session);
+
+    /**
+     * Cancels a transcoding session.
+     *
+     * @param sessionId a TranscodingSession generated by the MediaTranscodingService.
+     * @return true if succeeds, false otherwise.
+     */
+    boolean cancelSession(in int sessionId);
+
+    /**
+     * Queries the session detail associated with a sessionId.
+     *
+     * @param sessionId a TranscodingSession generated by the MediaTranscodingService.
+     * @param session(output variable) the TranscodingSession associated with the sessionId.
+     * @return true if succeeds, false otherwise.
+     */
+    boolean getSessionWithId(in int sessionId, out TranscodingSessionParcel session);
+
+    /**
+     * Add an additional client uid requesting a session.
+     *
+     * @sessionId the session id to which to add the additional client uid.
+     * @clientUid the additional client uid to be added.
+     * @return false if the session doesn't exist or the client is already requesting the
+     * session, true otherwise.
+     */
+    boolean addClientUid(in int sessionId, int clientUid);
+
+    /**
+     * Retrieves the (unsorted) list of all clients requesting a session.
+     *
+     * Note that if a session was submitted with offline priority (
+     * TranscodingSessionPriority::kUnspecified), it initially will not be considered requested
+     * by any particular client, because the client could go away any time after the submission.
+     * However, additional uids could be added via addClientUid() after the submission, which
+     * essentially make the request a real-time request instead of an offline request.
+     *
+     * @sessionId the session id for which to retrieve the client uid list.
+     * @clientUids array to hold the retrieved client uid list.
+     * @return false if the session doesn't exist, true otherwise.
+     */
+    @nullable
+    int[] getClientUids(in int sessionId);
+
+    /**
+    * Unregister the client with the MediaTranscodingService.
+    *
+    * Client will not be able to perform any more transcoding after unregister.
+    */
+    void unregister();
+}
diff --git a/media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl b/media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl
new file mode 100644
index 0000000..d7d9b6f
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl
@@ -0,0 +1,107 @@
+/**
+ * Copyright (c) 2019, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.TranscodingErrorCode;
+import android.media.TranscodingSessionParcel;
+import android.media.TranscodingResultParcel;
+import android.os.ParcelFileDescriptor;
+
+/**
+ * ITranscodingClientCallback
+ *
+ * Interface for the MediaTranscodingService to communicate with the client.
+ *
+ * {@hide}
+ */
+interface ITranscodingClientCallback {
+    /**
+    * Called to open a raw file descriptor to access data under a URI
+    *
+    * @param fileUri The path of the filename.
+    * @param mode The file mode to use. Must be one of ("r, "w", "rw")
+    * @return ParcelFileDescriptor if open the file successfully, null otherwise.
+    */
+    ParcelFileDescriptor openFileDescriptor(in @utf8InCpp String fileUri,
+                                            in @utf8InCpp String mode);
+
+    /**
+    * Called when the transcoding associated with the sessionId finished.
+    * This will only be called if client request to get all the status of the session.
+    *
+    * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
+    */
+    oneway void onTranscodingStarted(in int sessionId);
+
+    /**
+    * Called when the transcoding associated with the sessionId is paused.
+    * This will only be called if client request to get all the status of the session.
+    *
+    * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
+    */
+    oneway void onTranscodingPaused(in int sessionId);
+
+    /**
+    * Called when the transcoding associated with the sessionId is resumed.
+    * This will only be called if client request to get all the status of the session.
+    *
+    * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
+    */
+    oneway void onTranscodingResumed(in int sessionId);
+
+    /**
+    * Called when the transcoding associated with the sessionId finished.
+    *
+    * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
+    * @param result contains the transcoded file stats and other transcoding metrics if requested.
+    */
+    oneway void onTranscodingFinished(in int sessionId, in TranscodingResultParcel result);
+
+    /**
+    * Called when the transcoding associated with the sessionId failed.
+    *
+    * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
+    * @param errorCode error code that indicates the error.
+    */
+    oneway void onTranscodingFailed(in int sessionId, in TranscodingErrorCode errorCode);
+
+    /**
+    * Called when the transcoding configuration associated with the sessionId gets updated, i.e. wait
+    * number in the session queue.
+    *
+    * <p> This will only be called if client set requestUpdate to be true in the TranscodingRequest
+    * submitted to the MediaTranscodingService.
+    *
+    * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
+    * @param oldAwaitNumber previous number of sessions ahead of current session.
+    * @param newAwaitNumber updated number of sessions ahead of current session.
+    */
+    oneway void onAwaitNumberOfSessionsChanged(in int sessionId,
+                                           in int oldAwaitNumber,
+                                           in int newAwaitNumber);
+
+    /**
+    * Called when there is an update on the progress of the TranscodingSession.
+    *
+    * <p> This will only be called if client set requestUpdate to be true in the TranscodingRequest
+    * submitted to the MediaTranscodingService.
+    *
+    * @param sessionId sessionId assigned by the MediaTranscodingService upon receiving request.
+    * @param progress an integer number ranging from 0 ~ 100 inclusive.
+    */
+    oneway void onProgressUpdate(in int sessionId, in int progress);
+}
diff --git a/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl b/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl
deleted file mode 100644
index e23c833..0000000
--- a/media/libmediatranscoding/aidl/android/media/ITranscodingServiceClient.aidl
+++ /dev/null
@@ -1,77 +0,0 @@
-/**
- * Copyright (c) 2019, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.TranscodingErrorCode;
-import android.media.TranscodingJobParcel;
-import android.media.TranscodingResultParcel;
-
-/**
- * ITranscodingServiceClient interface for the MediaTranscodingervice to communicate with the
- * client.
- *
- * {@hide}
- */
-//TODO(hkuang): Implement the interface.
-interface ITranscodingServiceClient {
-    /**
-     * Retrieves the name of the client.
-     */
-    @utf8InCpp String getName();
-
-    /**
-    * Called when the transcoding associated with the jobId finished.
-    *
-    * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
-    * @param result contains the transcoded file stats and other transcoding metrics if requested.
-    */
-    oneway void onTranscodingFinished(in int jobId, in TranscodingResultParcel result);
-
-    /**
-    * Called when the transcoding associated with the jobId failed.
-    *
-    * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
-    * @param errorCode error code that indicates the error.
-    */
-    oneway void onTranscodingFailed(in int jobId, in TranscodingErrorCode errorCode);
-
-    /**
-    * Called when the transcoding configuration associated with the jobId gets updated, i.e. wait
-    * number in the job queue.
-    *
-    * <p> This will only be called if client set requestUpdate to be true in the TranscodingRequest
-    * submitted to the MediaTranscodingService.
-    *
-    * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
-    * @param oldAwaitNumber previous number of jobs ahead of current job.
-    * @param newAwaitNumber updated number of jobs ahead of current job.
-    */
-    oneway void onAwaitNumberOfJobsChanged(in int jobId,
-                                           in int oldAwaitNumber,
-                                           in int newAwaitNumber);
-
-    /**
-    * Called when there is an update on the progress of the TranscodingJob.
-    *
-    * <p> This will only be called if client set requestUpdate to be true in the TranscodingRequest
-    * submitted to the MediaTranscodingService.
-    *
-    * @param jobId jobId assigned by the MediaTranscodingService upon receiving request.
-    * @param progress an integer number ranging from 0 ~ 100 inclusive.
-    */
-    oneway void onProgressUpdate(in int jobId, in int progress);
-}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
index 7f47fdc..fdd86c7 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
@@ -23,11 +23,20 @@
  */
 @Backing(type = "int")
 enum TranscodingErrorCode {
-    kUnknown = 0,
-    kUnsupported = 1,
-    kDecoderError = 2,
-    kEncoderError = 3,
-    kExtractorError = 4,
-    kMuxerError = 5,
-    kInvalidBitstream = 6
+    // Errors exposed to client side.
+    kNoError = 0,
+    kDroppedByService = 1,
+    kServiceUnavailable = 2,
+
+    // Other private errors.
+    kPrivateErrorFirst     = 1000,
+    kUnknown               = kPrivateErrorFirst + 0,
+    kMalformed             = kPrivateErrorFirst + 1,
+    kUnsupported           = kPrivateErrorFirst + 2,
+    kInvalidParameter      = kPrivateErrorFirst + 3,
+    kInvalidOperation      = kPrivateErrorFirst + 4,
+    kErrorIO               = kPrivateErrorFirst + 5,
+    kInsufficientResources = kPrivateErrorFirst + 6,
+    kWatchdogTimeout       = kPrivateErrorFirst + 7,
+    kUidGoneCancelled      = kPrivateErrorFirst + 8,
 }
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl
deleted file mode 100644
index d912c38..0000000
--- a/media/libmediatranscoding/aidl/android/media/TranscodingJobParcel.aidl
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.TranscodingRequestParcel;
-
-/**
- * TranscodingJob is generated by the MediaTranscodingService upon receiving a TranscodingRequest.
- * It contains all the necessary configuration generated by the MediaTranscodingService for the
- * TranscodingRequest.
- *
- * {@hide}
- */
-//TODO(hkuang): Implement the parcelable.
-parcelable TranscodingJobParcel {
-    /**
-     * A unique positive Id generated by the MediaTranscodingService.
-     */
-    int jobId;
-
-    /**
-     * The request associated with the TranscodingJob.
-     */
-    TranscodingRequestParcel request;
-
-    /**
-    * Current number of jobs ahead of this job. The service schedules the job based on the priority
-    * passed from the client. Client could specify whether to receive updates when the
-    * awaitNumberOfJobs changes through setting requestProgressUpdate in the TranscodingRequest.
-    */
-    int awaitNumberOfJobs;
-}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingJobPriority.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingJobPriority.aidl
deleted file mode 100644
index 1a5d81a..0000000
--- a/media/libmediatranscoding/aidl/android/media/TranscodingJobPriority.aidl
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-/**
- * Priority of a transcoding job.
- *
- * {@hide}
- */
-@Backing(type="int")
-enum TranscodingJobPriority {
-    // TODO(hkuang): define what each priority level actually mean.
-    kUnspecified = 0,
-    kLow = 1,
-    /**
-     * 2 ~ 20 is reserved for future use.
-     */
-    kNormal = 21,
-    /**
-     * 22 ~ 30 is reserved for future use.
-     */
-    kHigh = 31,
-}
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
index 7b7986d..71f82b8 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
@@ -16,8 +16,11 @@
 
 package android.media;
 
-import android.media.TranscodingJobPriority;
+import android.os.ParcelFileDescriptor;
+import android.media.TranscodingSessionPriority;
+import android.media.TranscodingTestConfig;
 import android.media.TranscodingType;
+import android.media.TranscodingVideoTrackFormat;
 
 /**
  * TranscodingRequest contains the desired configuration for the transcoding.
@@ -27,9 +30,48 @@
 //TODO(hkuang): Implement the parcelable.
 parcelable TranscodingRequestParcel {
     /**
-     * Name of file to be transcoded.
+     * The absolute file path of the source file.
      */
-    @utf8InCpp String fileName;
+    @utf8InCpp String sourceFilePath;
+
+    /*
+     * The filedescrptor of the sourceFilePath. If the source Fd is provided, transcoding service
+     * will use this fd instead of calling back to client side to open the sourceFilePath. It is
+     * client's responsibility to make sure sourceFd is opened from sourceFilePath.
+     */
+    @nullable ParcelFileDescriptor sourceFd;
+
+    /**
+     * The absolute file path of the destination file.
+     */
+    @utf8InCpp String destinationFilePath;
+
+    /*
+     * The filedescrptor of the destinationFilePath. If the destination Fd is provided, transcoding
+     * service will use this fd instead of calling back to client side to open the
+     * destinationFilePath. It is client's responsibility to make sure destinationFd is opened
+     * from destinationFilePath.
+     */
+    @nullable ParcelFileDescriptor destinationFd;
+
+    /**
+     * The UID of the client that this transcoding request is for. Only privileged caller could
+     * set this Uid as only they could do the transcoding on behalf of the client.
+     * -1 means not available.
+     */
+    int clientUid = -1;
+
+    /**
+     * The PID of the client that this transcoding request is for. Only privileged caller could
+     * set this Uid as only they could do the transcoding on behalf of the client.
+     * -1 means not available.
+     */
+    int clientPid = -1;
+
+    /**
+     * The package name of the client whom this transcoding request is for.
+     */
+    @utf8InCpp String clientPackageName;
 
     /**
      * Type of the transcoding.
@@ -37,22 +79,44 @@
     TranscodingType transcodingType;
 
     /**
-     * Input source file descriptor.
+     * Requested video track format for the transcoding.
+     * Note that the transcoding service will try to fulfill the requested format as much as
+     * possbile, while subject to hardware and software limitation. The final video track format
+     * will be available in the TranscodingSessionParcel when the session is finished.
      */
-    ParcelFileDescriptor inFd;
-
-    /**
-     * Output transcoded file descriptor.
-     */
-    ParcelFileDescriptor outFd;
+    @nullable TranscodingVideoTrackFormat requestedVideoTrackFormat;
 
     /**
      * Priority of this transcoding. Service will schedule the transcoding based on the priority.
      */
-    TranscodingJobPriority priority;
+    TranscodingSessionPriority priority;
 
     /**
-     * Whether to receive update on progress and change of awaitNumJobs.
+     * Whether to receive update on progress and change of awaitNumSessions.
+     * Default to false.
      */
-    boolean requestUpdate;
+    boolean requestProgressUpdate = false;
+
+    /**
+     * Whether to receive update on session's start/stop/pause/resume.
+     * Default to false.
+     */
+    boolean requestSessionEventUpdate = false;
+
+    /**
+     * Whether this request is for testing.
+     */
+    boolean isForTesting = false;
+
+    /**
+     * Test configuration. This will be available only when isForTesting is set to true.
+     */
+    @nullable TranscodingTestConfig testConfig;
+
+     /**
+      * Whether to get the stats of the transcoding.
+      * If this is enabled, the TranscodingSessionStats will be returned in TranscodingResultParcel
+      * upon transcoding finishes.
+      */
+    boolean enableStats = false;
 }
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
index 65c49e7..7826e25 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
@@ -16,6 +16,8 @@
 
 package android.media;
 
+import android.media.TranscodingSessionStats;
+
 /**
  * Result of the transcoding.
  *
@@ -24,9 +26,9 @@
 //TODO(hkuang): Implement the parcelable.
 parcelable TranscodingResultParcel {
     /**
-     * The jobId associated with the TranscodingResult.
+     * The sessionId associated with the TranscodingResult.
      */
-    int jobId;
+    int sessionId;
 
     /**
      * Actual bitrate of the transcoded video in bits per second. This will only present for video
@@ -34,5 +36,9 @@
      */
     int actualBitrateBps;
 
-    // TODO(hkuang): Add more fields.
+    /**
+     * Stats of the transcoding session. This will only be available when client requests to get the
+     * stats in TranscodingRequestParcel.
+     */
+    @nullable TranscodingSessionStats sessionStats;
 }
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingSessionParcel.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingSessionParcel.aidl
new file mode 100644
index 0000000..3a4a500
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingSessionParcel.aidl
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.TranscodingRequestParcel;
+import android.media.TranscodingVideoTrackFormat;
+
+/**
+ * TranscodingSession is generated by the MediaTranscodingService upon receiving a
+ * TranscodingRequest. It contains all the necessary configuration generated by the
+ * MediaTranscodingService for the TranscodingRequest.
+ *
+ * {@hide}
+ */
+//TODO(hkuang): Implement the parcelable.
+parcelable TranscodingSessionParcel {
+    /**
+     * A unique positive Id generated by the MediaTranscodingService.
+     */
+    int sessionId;
+
+    /**
+     * The request associated with the TranscodingSession.
+     */
+    TranscodingRequestParcel request;
+
+    /**
+     * Output video track's format. This will only be avaiable for video transcoding and it will
+     * be avaiable when the session is finished.
+     */
+    @nullable TranscodingVideoTrackFormat videoTrackFormat;
+
+    /**
+    * Current number of sessions ahead of this session. The service schedules the session based on
+    * the priority passed from the client. Client could specify whether to receive updates when the
+    * awaitNumberOfSessions changes through setting requestProgressUpdate in the TranscodingRequest.
+    */
+    int awaitNumberOfSessions;
+}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingSessionPriority.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingSessionPriority.aidl
new file mode 100644
index 0000000..f001484
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingSessionPriority.aidl
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Priority of a transcoding session.
+ *
+ * {@hide}
+ */
+@Backing(type="int")
+enum TranscodingSessionPriority {
+    // TODO(hkuang): define what each priority level actually mean.
+    kUnspecified = 0,
+    kLow = 1,
+    /**
+     * 2 ~ 20 is reserved for future use.
+     */
+    kNormal = 21,
+    /**
+     * 22 ~ 30 is reserved for future use.
+     */
+    kHigh = 31,
+}
\ No newline at end of file
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingSessionStats.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingSessionStats.aidl
new file mode 100644
index 0000000..b3e7eea
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingSessionStats.aidl
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * TranscodingSessionStats encapsulated the stats of the a TranscodingSession.
+ *
+ * {@hide}
+ */
+parcelable TranscodingSessionStats {
+    /**
+     * System time of when the session is created.
+     */
+    long sessionCreatedTimeUs;
+
+    /**
+     * System time of when the session is finished.
+     */
+    long sessionFinishedTimeUs;
+
+    /**
+     * Total time spend on transcoding, exclude the time in pause.
+     */
+    long totalProcessingTimeUs;
+
+    /**
+     * Total time spend on handling the session, include the time in pause.
+     * The totaltimeUs is actually the same as sessionFinishedTimeUs - sessionCreatedTimeUs.
+     */
+    long totalTimeUs;
+}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl
new file mode 100644
index 0000000..6727974
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ package android.media;
+
+ /**
+  * TranscodingTestConfig contains the test configureation used in testing.
+  *
+  * {@hide}
+  */
+parcelable TranscodingTestConfig {
+    /**
+     * Passthrough mode used for testing. The transcoding service will assume the destination
+     * path already contains the transcoding of the source file and return it to client directly.
+     */
+    boolean passThroughMode = false;
+
+    /**
+     * Time of processing the session in milliseconds. Service will return the session result at
+     * least after processingTotalTimeMs from the time it starts to process the session. Note that
+     * if service uses real MediaTranscoder to do transcoding, the time spent on transcoding may be
+     * more than that.
+     */
+    int processingTotalTimeMs = 0;
+}
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingVideoTrackFormat.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingVideoTrackFormat.aidl
new file mode 100644
index 0000000..8ed241a
--- /dev/null
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingVideoTrackFormat.aidl
@@ -0,0 +1,84 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.TranscodingVideoCodecType;
+
+/**
+ * TranscodingVideoTrackFormat contains the video track format of a video.
+ *
+ * TODO(hkuang): Switch to PersistableBundle when b/156428735 is fixed or after we remove
+ * aidl_interface
+ *
+ * Note that TranscodingVideoTrackFormat is used in TranscodingRequestParcel for the  client to
+ * specify the desired transcoded video format, and is also used in TranscodingSessionParcel for the
+ * service to notify client of the final video format for transcoding.
+ * When used as input in TranscodingRequestParcel, the client only needs to specify the config that
+ * they want to change, e.g. codec or resolution, and all the missing configs will be extracted
+ * from the source video and applied to the destination video.
+ * When used as output in TranscodingSessionParcel, all the configs will be populated to indicate
+ * the final encoder configs used for transcoding.
+ *
+ * {@hide}
+ */
+parcelable TranscodingVideoTrackFormat {
+    /**
+     * Video Codec type.
+     */
+    TranscodingVideoCodecType codecType; // TranscodingVideoCodecType::kUnspecified;
+
+    /**
+     * Width of the video in pixels. -1 means unavailable.
+     */
+    int width = -1;
+
+    /**
+     * Height of the video in pixels. -1 means unavailable.
+     */
+    int height = -1;
+
+    /**
+     * Bitrate in bits per second. -1 means unavailable.
+     */
+    int bitrateBps = -1;
+
+    /**
+     * Codec profile. This must be the same constant as used in MediaCodecInfo.CodecProfileLevel.
+     * -1 means unavailable.
+     */
+    int profile = -1;
+
+    /**
+     * Codec level. This must be the same constant as used in MediaCodecInfo.CodecProfileLevel.
+     * -1 means unavailable.
+     */
+    int level = -1;
+
+    /**
+     * Decoder operating rate. This is used to work around the fact that vendor does not boost the
+     * hardware to maximum speed in transcoding usage case. This operating rate will be applied
+     * to decoder inside MediaTranscoder. -1 means unavailable.
+     */
+    int decoderOperatingRate = -1;
+
+    /**
+     * Encoder operating rate. This is used to work around the fact that vendor does not boost the
+     * hardware to maximum speed in transcoding usage case. This operating rate will be applied
+     * to encoder inside MediaTranscoder. -1 means unavailable.
+     */
+    int encoderOperatingRate = -1;
+}
diff --git a/media/libmediatranscoding/build_and_run_all_unit_tests.sh b/media/libmediatranscoding/build_and_run_all_unit_tests.sh
new file mode 100755
index 0000000..388e2ea
--- /dev/null
+++ b/media/libmediatranscoding/build_and_run_all_unit_tests.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+#
+# Script to run all transcoding related tests from subfolders.
+# Run script from this folder.
+#
+
+if [ -z "$ANDROID_BUILD_TOP" ]; then
+    echo "Android build environment not set"
+    exit -1
+fi
+
+# ensure we have mm
+. $ANDROID_BUILD_TOP/build/envsetup.sh
+
+mm
+
+echo "waiting for device"
+
+adb root && adb wait-for-device remount && adb sync
+SYNC_FINISHED=true
+
+# Run the transcoding service tests.
+pushd tests
+. build_and_run_all_unit_tests.sh
+popd
+
+# Run the transcoder tests.
+pushd transcoder/tests/
+. build_and_run_all_unit_tests.sh
+popd
+
diff --git a/media/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h b/media/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h
index 0e8dcfd..5ba1ee2 100644
--- a/media/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h
+++ b/media/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h
@@ -26,7 +26,7 @@
 namespace android {
 
 /*
- * AdjustableMaxPriorityQueue is a custom max priority queue that helps managing jobs for
+ * AdjustableMaxPriorityQueue is a custom max priority queue that helps managing sessions for
  * MediaTranscodingService.
  *
  * AdjustableMaxPriorityQueue is a wrapper template around the STL's *_heap() functions.
@@ -38,7 +38,7 @@
  */
 template <class T, class Comparator = std::less<T>>
 class AdjustableMaxPriorityQueue {
-   public:
+public:
     typedef typename std::vector<T>::iterator iterator;
     typedef typename std::vector<T>::const_iterator const_iterator;
 
@@ -104,7 +104,7 @@
     /* Return the backbone storage of this PriorityQueue. Mainly used for debugging. */
     const std::vector<T>& getStorage() const { return mHeap; };
 
-   private:
+private:
     std::vector<T> mHeap;
 
     /* Implementation shared by both public push() methods. */
diff --git a/media/libmediatranscoding/include/media/ControllerClientInterface.h b/media/libmediatranscoding/include/media/ControllerClientInterface.h
new file mode 100644
index 0000000..9311e2e
--- /dev/null
+++ b/media/libmediatranscoding/include/media/ControllerClientInterface.h
@@ -0,0 +1,91 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_CONTROLLER_CLIENT_INTERFACE_H
+#define ANDROID_MEDIA_CONTROLLER_CLIENT_INTERFACE_H
+
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <media/TranscodingDefs.h>
+
+namespace android {
+
+using ::aidl::android::media::ITranscodingClientCallback;
+using ::aidl::android::media::TranscodingRequestParcel;
+
+// Interface for a client to call the controller to schedule or retrieve
+// the status of a session.
+class ControllerClientInterface {
+public:
+    /**
+     * Submits one request to the controller.
+     *
+     * Returns true on success and false on failure. This call will fail is a session identified
+     * by <clientId, sessionId> already exists.
+     */
+    virtual bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t callingUid,
+                        uid_t clientUid, const TranscodingRequestParcel& request,
+                        const std::weak_ptr<ITranscodingClientCallback>& clientCallback) = 0;
+
+    /**
+     * Cancels a session identified by <clientId, sessionId>.
+     *
+     * If sessionId is negative (<0), all sessions with a specified priority (that's not
+     * TranscodingSessionPriority::kUnspecified) will be cancelled. Otherwise, only the single
+     * session <clientId, sessionId> will be cancelled.
+     *
+     * Returns false if a single session is being cancelled but it doesn't exist. Returns
+     * true otherwise.
+     */
+    virtual bool cancel(ClientIdType clientId, SessionIdType sessionId) = 0;
+
+    /**
+     * Retrieves information about a session.
+     *
+     * Returns true and the session if it exists, and false otherwise.
+     */
+    virtual bool getSession(ClientIdType clientId, SessionIdType sessionId,
+                            TranscodingRequestParcel* request) = 0;
+
+    /**
+     * Add an additional client uid requesting the session identified by <clientId, sessionId>.
+     *
+     * Returns false if the session doesn't exist, or the client is already requesting the
+     * session. Returns true otherwise.
+     */
+    virtual bool addClientUid(ClientIdType clientId, SessionIdType sessionId, uid_t clientUid);
+
+    /**
+     * Retrieves the (unsorted) list of all clients requesting the session identified by
+     * <clientId, sessionId>.
+     *
+     * Note that if a session was submitted with offline priority (
+     * TranscodingSessionPriority::kUnspecified), it initially will not be considered requested
+     * by any particular client, because the client could go away any time after the submission.
+     * However, additional uids could be added via addClientUid() after the submission, which
+     * essentially make the request a real-time request instead of an offline request.
+     *
+     * Returns false if the session doesn't exist. Returns true otherwise.
+     */
+    virtual bool getClientUids(ClientIdType clientId, SessionIdType sessionId,
+                               std::vector<int32_t>* out_clientUids);
+
+protected:
+    virtual ~ControllerClientInterface() = default;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_CONTROLLER_CLIENT_INTERFACE_H
diff --git a/media/libmediatranscoding/include/media/ResourcePolicyInterface.h b/media/libmediatranscoding/include/media/ResourcePolicyInterface.h
new file mode 100644
index 0000000..ecce252
--- /dev/null
+++ b/media/libmediatranscoding/include/media/ResourcePolicyInterface.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_RESOURCE_POLICY_INTERFACE_H
+#define ANDROID_MEDIA_RESOURCE_POLICY_INTERFACE_H
+#include <memory>
+namespace android {
+
+class ResourcePolicyCallbackInterface;
+
+// Interface for the SessionController to control the resource status updates.
+class ResourcePolicyInterface {
+public:
+    // Set the associated callback interface to send the events when resource
+    // status changes. (Set to nullptr will stop the updates.)
+    virtual void setCallback(const std::shared_ptr<ResourcePolicyCallbackInterface>& cb) = 0;
+    virtual void setPidResourceLost(pid_t pid) = 0;
+
+protected:
+    virtual ~ResourcePolicyInterface() = default;
+};
+
+// Interface for notifying the SessionController of a change in resource status.
+class ResourcePolicyCallbackInterface {
+public:
+    // Called when codec resources become available. The controller may use this
+    // as a signal to attempt restart transcoding sessions that were previously
+    // paused due to temporary resource loss.
+    virtual void onResourceAvailable() = 0;
+
+protected:
+    virtual ~ResourcePolicyCallbackInterface() = default;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_RESOURCE_POLICY_INTERFACE_H
diff --git a/media/libmediatranscoding/include/media/ThermalPolicyInterface.h b/media/libmediatranscoding/include/media/ThermalPolicyInterface.h
new file mode 100644
index 0000000..1cc0cc1
--- /dev/null
+++ b/media/libmediatranscoding/include/media/ThermalPolicyInterface.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_THERMAL_POLICY_INTERFACE_H
+#define ANDROID_MEDIA_THERMAL_POLICY_INTERFACE_H
+#include <memory>
+
+namespace android {
+
+class ThermalPolicyCallbackInterface;
+
+// Interface for the SessionController to control the thermal policy.
+class ThermalPolicyInterface {
+public:
+    // Set the associated callback interface to send the events when the thermal
+    // status changes.
+    virtual void setCallback(const std::shared_ptr<ThermalPolicyCallbackInterface>& cb) = 0;
+
+    // Get the current thermal throttling status. Returns true if throttling is on,
+    // false otherwise.
+    virtual bool getThrottlingStatus() = 0;
+
+protected:
+    virtual ~ThermalPolicyInterface() = default;
+};
+
+// Interface for notifying the SessionController of thermal throttling status.
+class ThermalPolicyCallbackInterface {
+public:
+    // Called when the session controller should start or stop thermal throttling.
+    virtual void onThrottlingStarted() = 0;
+    virtual void onThrottlingStopped() = 0;
+
+protected:
+    virtual ~ThermalPolicyCallbackInterface() = default;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_THERMAL_POLICY_INTERFACE_H
diff --git a/media/libmediatranscoding/include/media/TranscoderInterface.h b/media/libmediatranscoding/include/media/TranscoderInterface.h
new file mode 100644
index 0000000..3b0bd3b
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscoderInterface.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODER_INTERFACE_H
+#define ANDROID_MEDIA_TRANSCODER_INTERFACE_H
+
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingErrorCode.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <media/TranscodingDefs.h>
+
+namespace android {
+
+using ::aidl::android::media::ITranscodingClientCallback;
+using ::aidl::android::media::TranscodingErrorCode;
+using ::aidl::android::media::TranscodingRequestParcel;
+class TranscoderCallbackInterface;
+
+// Interface for the controller to call the transcoder to take actions.
+class TranscoderInterface {
+public:
+    virtual void start(ClientIdType clientId, SessionIdType sessionId,
+                       const TranscodingRequestParcel& request, uid_t callingUid,
+                       const std::shared_ptr<ITranscodingClientCallback>& clientCallback) = 0;
+    virtual void pause(ClientIdType clientId, SessionIdType sessionId) = 0;
+    virtual void resume(ClientIdType clientId, SessionIdType sessionId,
+                        const TranscodingRequestParcel& request, uid_t callingUid,
+                        const std::shared_ptr<ITranscodingClientCallback>& clientCallback) = 0;
+    // Stop the specified session. If abandon is true, the transcoder wrapper will be discarded
+    // after the session stops.
+    virtual void stop(ClientIdType clientId, SessionIdType sessionId, bool abandon = false) = 0;
+
+protected:
+    virtual ~TranscoderInterface() = default;
+};
+
+// Interface for the transcoder to notify the controller of the status of
+// the currently running session, or temporary loss of transcoding resources.
+class TranscoderCallbackInterface {
+public:
+    // TODO(chz): determine what parameters are needed here.
+    virtual void onStarted(ClientIdType clientId, SessionIdType sessionId) = 0;
+    virtual void onPaused(ClientIdType clientId, SessionIdType sessionId) = 0;
+    virtual void onResumed(ClientIdType clientId, SessionIdType sessionId) = 0;
+    virtual void onFinish(ClientIdType clientId, SessionIdType sessionId) = 0;
+    virtual void onError(ClientIdType clientId, SessionIdType sessionId,
+                         TranscodingErrorCode err) = 0;
+    virtual void onProgressUpdate(ClientIdType clientId, SessionIdType sessionId,
+                                  int32_t progress) = 0;
+    virtual void onHeartBeat(ClientIdType clientId, SessionIdType sessionId) = 0;
+
+    // Called when transcoding becomes temporarily inaccessible due to loss of resource.
+    // If there is any session currently running, it will be paused. When resource contention
+    // is solved, the controller should call TranscoderInterface's to either start a new session,
+    // or resume a paused session.
+    virtual void onResourceLost(ClientIdType clientId, SessionIdType sessionId) = 0;
+
+protected:
+    virtual ~TranscoderCallbackInterface() = default;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODER_INTERFACE_H
diff --git a/media/libmediatranscoding/include/media/TranscoderWrapper.h b/media/libmediatranscoding/include/media/TranscoderWrapper.h
new file mode 100644
index 0000000..d3d4c86
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscoderWrapper.h
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_TRANSCODER_WRAPPER_H
+#define ANDROID_TRANSCODER_WRAPPER_H
+
+#include <media/NdkMediaError.h>
+#include <media/NdkMediaFormat.h>
+#include <media/TranscoderInterface.h>
+#include <media/TranscodingLogger.h>
+
+#include <chrono>
+#include <list>
+#include <map>
+#include <mutex>
+
+namespace android {
+
+class MediaTranscoder;
+class Parcelable;
+
+/*
+ * Wrapper class around MediaTranscoder.
+ * Implements TranscoderInterface for TranscodingSessionController to use.
+ */
+class TranscoderWrapper : public TranscoderInterface,
+                          public std::enable_shared_from_this<TranscoderWrapper> {
+public:
+    TranscoderWrapper(const std::shared_ptr<TranscoderCallbackInterface>& cb,
+                      const std::shared_ptr<TranscodingLogger>& logger,
+                      int64_t heartBeatIntervalUs);
+    ~TranscoderWrapper();
+
+    // TranscoderInterface
+    void start(ClientIdType clientId, SessionIdType sessionId,
+               const TranscodingRequestParcel& request, uid_t callingUid,
+               const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+    void pause(ClientIdType clientId, SessionIdType sessionId) override;
+    void resume(ClientIdType clientId, SessionIdType sessionId,
+                const TranscodingRequestParcel& request, uid_t callingUid,
+                const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+    void stop(ClientIdType clientId, SessionIdType sessionId, bool abandon = false) override;
+    // ~TranscoderInterface
+
+private:
+    class CallbackImpl;
+    struct Event {
+        enum Type {
+            NoEvent,
+            Start,
+            Pause,
+            Resume,
+            Stop,
+            Finish,
+            Error,
+            Progress,
+            HeartBeat,
+            Abandon
+        } type;
+        ClientIdType clientId;
+        SessionIdType sessionId;
+        std::function<void()> runnable;
+        int32_t arg;
+    };
+    using SessionKeyType = std::pair<ClientIdType, SessionIdType>;
+
+    std::shared_ptr<CallbackImpl> mTranscoderCb;
+    std::shared_ptr<MediaTranscoder> mTranscoder;
+    std::weak_ptr<TranscoderCallbackInterface> mCallback;
+    std::shared_ptr<TranscodingLogger> mLogger;
+    std::shared_ptr<AMediaFormat> mSrcFormat;
+    std::shared_ptr<AMediaFormat> mDstFormat;
+    int64_t mHeartBeatIntervalUs;
+    std::mutex mLock;
+    std::condition_variable mCondition;
+    std::list<Event> mQueue;  // GUARDED_BY(mLock);
+    std::map<SessionKeyType, std::shared_ptr<ndk::ScopedAParcel>> mPausedStateMap;
+    ClientIdType mCurrentClientId;
+    SessionIdType mCurrentSessionId;
+    uid_t mCurrentCallingUid;
+    std::chrono::steady_clock::time_point mTranscodeStartTime;
+
+    // Whether the looper has been created.
+    bool mLooperReady;
+
+    static std::string toString(const Event& event);
+    void onFinish(ClientIdType clientId, SessionIdType sessionId);
+    void onError(ClientIdType clientId, SessionIdType sessionId, media_status_t status);
+    void onProgress(ClientIdType clientId, SessionIdType sessionId, int32_t progress);
+    void onHeartBeat(ClientIdType clientId, SessionIdType sessionId);
+
+    media_status_t handleStart(ClientIdType clientId, SessionIdType sessionId,
+                               const TranscodingRequestParcel& request, uid_t callingUid,
+                               const std::shared_ptr<ITranscodingClientCallback>& callback);
+    media_status_t handlePause(ClientIdType clientId, SessionIdType sessionId);
+    media_status_t handleResume(ClientIdType clientId, SessionIdType sessionId,
+                                const TranscodingRequestParcel& request, uid_t callingUid,
+                                const std::shared_ptr<ITranscodingClientCallback>& callback);
+    media_status_t setupTranscoder(
+            ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
+            uid_t callingUid, const std::shared_ptr<ITranscodingClientCallback>& callback,
+            TranscodingLogger::SessionEndedReason* failureReason /* nonnull */,
+            const std::shared_ptr<ndk::ScopedAParcel>& pausedState = nullptr);
+
+    void cleanup();
+    void logSessionEnded(const TranscodingLogger::SessionEndedReason& reason, int error);
+    void reportError(ClientIdType clientId, SessionIdType sessionId, media_status_t err);
+    void queueEvent(Event::Type type, ClientIdType clientId, SessionIdType sessionId,
+                    const std::function<void()> runnable, int32_t arg = 0);
+    void threadLoop();
+};
+
+}  // namespace android
+#endif  // ANDROID_TRANSCODER_WRAPPER_H
diff --git a/media/libmediatranscoding/include/media/TranscodingClientManager.h b/media/libmediatranscoding/include/media/TranscodingClientManager.h
index eec120a..be55c78 100644
--- a/media/libmediatranscoding/include/media/TranscodingClientManager.h
+++ b/media/libmediatranscoding/include/media/TranscodingClientManager.h
@@ -17,73 +17,77 @@
 #ifndef ANDROID_MEDIA_TRANSCODING_CLIENT_MANAGER_H
 #define ANDROID_MEDIA_TRANSCODING_CLIENT_MANAGER_H
 
-#include <aidl/android/media/BnTranscodingServiceClient.h>
-#include <android/binder_ibinder.h>
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
 #include <sys/types.h>
 #include <utils/Condition.h>
-#include <utils/RefBase.h>
 #include <utils/String8.h>
 #include <utils/Vector.h>
 
+#include <map>
 #include <mutex>
 #include <unordered_map>
+#include <unordered_set>
+
+#include "ControllerClientInterface.h"
 
 namespace android {
 
-using ::aidl::android::media::ITranscodingServiceClient;
-
-class MediaTranscodingService;
+using ::aidl::android::media::ITranscodingClient;
+using ::aidl::android::media::ITranscodingClientCallback;
 
 /*
  * TranscodingClientManager manages all the transcoding clients across different processes.
  *
- * TranscodingClientManager is a global singleton that could only acquired by
- * MediaTranscodingService. It manages all the clients's registration/unregistration and clients'
- * information. It also bookkeeps all the clients' information. It also monitors to the death of the
+ * TranscodingClientManager manages all the clients's registration/unregistration and clients'
+ * information. It also bookkeeps all the clients' information. It also monitors the death of the
  * clients. Upon client's death, it will remove the client from it.
  *
  * TODO(hkuang): Hook up with ResourceManager for resource management.
  * TODO(hkuang): Hook up with MediaMetrics to log all the transactions.
  */
-class TranscodingClientManager {
-   public:
+class TranscodingClientManager : public std::enable_shared_from_this<TranscodingClientManager> {
+public:
     virtual ~TranscodingClientManager();
 
     /**
-     * ClientInfo contains a single client's information.
-     */
-    struct ClientInfo {
-        /* The remote client that this ClientInfo is associated with. */
-        std::shared_ptr<ITranscodingServiceClient> mClient;
-        /* A unique positive Id assigned to the client by the service. */
-        int32_t mClientId;
-        /* Process id of the client */
-        int32_t mClientPid;
-        /* User id of the client. */
-        int32_t mClientUid;
-        /* Package name of the client. */
-        std::string mClientOpPackageName;
-
-        ClientInfo(const std::shared_ptr<ITranscodingServiceClient>& client, int64_t clientId,
-                   int32_t pid, int32_t uid, const std::string& opPackageName)
-            : mClient(client),
-              mClientId(clientId),
-              mClientPid(pid),
-              mClientUid(uid),
-              mClientOpPackageName(opPackageName) {}
-    };
-
-    /**
      * Adds a new client to the manager.
      *
-     * The client must have valid clientId, pid, uid and opPackageName, otherwise, this will return
-     * a non-zero errorcode. If the client has already been added, it will also return non-zero
-     * errorcode.
+     * The client must have valid callback, pid, uid, clientName and opPackageName.
+     * Otherwise, this will return a non-zero errorcode. If the client callback has
+     * already been added, it will also return non-zero errorcode.
      *
-     * @param client to be added to the manager.
+     * @param callback client callback for the service to call this client.
+     * @param clientName client's name.
+     * @param opPackageName client's package name.
+     * @param client output holding the ITranscodingClient interface for the client
+     *        to use for subsequent communications with the service.
      * @return 0 if client is added successfully, non-zero errorcode otherwise.
      */
-    status_t addClient(std::unique_ptr<ClientInfo> client);
+    status_t addClient(const std::shared_ptr<ITranscodingClientCallback>& callback,
+                       const std::string& clientName, const std::string& opPackageName,
+                       std::shared_ptr<ITranscodingClient>* client);
+
+    /**
+     * Gets the number of clients.
+     */
+    size_t getNumOfClients() const;
+
+    /**
+     * Dump all the client information to the fd.
+     */
+    void dumpAllClients(int fd, const Vector<String16>& args);
+
+private:
+    friend class MediaTranscodingService;
+    friend class TranscodingClientManagerTest;
+    struct ClientImpl;
+
+    // Only allow MediaTranscodingService and unit tests to instantiate.
+    TranscodingClientManager(const std::shared_ptr<ControllerClientInterface>& controller);
+
+    // Checks if a user is trusted (and allowed to submit sessions on behalf of other uids)
+    bool isTrustedCaller(pid_t pid, uid_t uid);
 
     /**
      * Removes an existing client from the manager.
@@ -93,39 +97,24 @@
      * @param clientId id of the client to be removed..
      * @return 0 if client is removed successfully, non-zero errorcode otherwise.
      */
-    status_t removeClient(int32_t clientId);
-
-    /**
-     * Gets the number of clients.
-     */
-    size_t getNumOfClients() const;
-
-    /**
-     * Checks if a client with clientId is already registered.
-     */
-    bool isClientIdRegistered(int32_t clientId) const;
-
-    /**
-     * Dump all the client information to the fd.
-     */
-    void dumpAllClients(int fd, const Vector<String16>& args);
-
-   private:
-    friend class MediaTranscodingService;
-    friend class TranscodingClientManagerTest;
-
-    /** Get the singleton instance of the TranscodingClientManager. */
-    static TranscodingClientManager& getInstance();
-
-    TranscodingClientManager();
+    status_t removeClient(ClientIdType clientId);
 
     static void BinderDiedCallback(void* cookie);
 
     mutable std::mutex mLock;
-    std::unordered_map<int32_t, std::unique_ptr<ClientInfo>> mClientIdToClientInfoMap
+    std::unordered_map<ClientIdType, std::shared_ptr<ClientImpl>> mClientIdToClientMap
             GUARDED_BY(mLock);
+    std::unordered_set<uintptr_t> mRegisteredCallbacks GUARDED_BY(mLock);
 
     ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+
+    std::shared_ptr<ControllerClientInterface> mSessionController;
+    std::unordered_set<uid_t> mTrustedUids;
+
+    static std::atomic<ClientIdType> sCookieCounter;
+    static std::mutex sCookie2ClientLock;
+    static std::map<ClientIdType, std::shared_ptr<ClientImpl>> sCookie2Client
+            GUARDED_BY(sCookie2ClientLock);
 };
 
 }  // namespace android
diff --git a/media/libmediatranscoding/include/media/TranscodingDefs.h b/media/libmediatranscoding/include/media/TranscodingDefs.h
new file mode 100644
index 0000000..55e5ad5
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingDefs.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_DEFS_H
+#define ANDROID_MEDIA_TRANSCODING_DEFS_H
+
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+
+// Transcoding uses some APIs available on API31+.
+#define __TRANSCODING_MIN_API__ 31
+
+namespace android {
+
+using ClientIdType = uintptr_t;
+using SessionIdType = int32_t;
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODING_DEFS_H
diff --git a/media/libmediatranscoding/include/media/TranscodingLogger.h b/media/libmediatranscoding/include/media/TranscodingLogger.h
new file mode 100644
index 0000000..dc24551
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingLogger.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_LOGGER_H
+#define ANDROID_MEDIA_TRANSCODING_LOGGER_H
+
+#include <media/NdkMediaFormat.h>
+#include <utils/Condition.h>
+
+#include <chrono>
+#include <memory>
+#include <mutex>
+#include <queue>
+
+namespace android {
+
+/** Class for logging transcoding events. */
+class TranscodingLogger {
+public:
+    /** The maximum number of atoms pushed to statsd per day. */
+    static constexpr int kMaxAtomsPerDay = 50;
+
+    /** The maximum number of successful transcoding atoms pushed to statsd per day. */
+    static constexpr int kMaxSuccessfulAtomsPerDay = 35;
+
+    /** Reason transcoding session ended. Maps to MediaTranscodingSessionEnded atom's Reason. */
+    enum SessionEndedReason {
+        UNKNOWN = 0,
+        FINISHED,
+        ERROR,
+        PAUSED,
+        CANCELLED,
+        START_FAILED,
+        RESUME_FAILED,
+        CREATE_FAILED,
+        CONFIG_SRC_FAILED,
+        CONFIG_DST_FAILED,
+        CONFIG_TRACK_FAILED,
+        OPEN_SRC_FD_FAILED,
+        OPEN_DST_FD_FAILED,
+        NO_TRACKS,
+    };
+
+    TranscodingLogger();
+    ~TranscodingLogger() = default;
+
+    /**
+     * Logs a transcoding session ended event (MediaTranscodingSessionEnded atom).
+     * @param reason Reason for the transcoding session to end.
+     * @param callingUid UID of the caller connecting to the transcoding service.
+     * @param status Status (error code) of the transcoding session.
+     * @param duration Duration of the transcoding session.
+     * @param srcFormat The source video track format.
+     * @param dstFormat The destination video track format.
+     */
+    void logSessionEnded(enum SessionEndedReason reason, uid_t callingUid, int status,
+                         std::chrono::microseconds duration, AMediaFormat* srcFormat,
+                         AMediaFormat* dstFormat);
+
+private:
+    friend class TranscodingLoggerTest;
+
+    // Function prototype for writing out the session ended atom.
+    using SessionEndedAtomWriter = std::function<int(
+            int32_t, int32_t, int32_t, int32_t, int32_t, int32_t, int32_t, char const*, int32_t,
+            int32_t, int32_t, int32_t, bool arg12, int32_t, int32_t, char const*, bool)>;
+
+    std::mutex mLock;
+    std::queue<std::pair<std::chrono::steady_clock::time_point, int>> mLastLoggedAtoms
+            GUARDED_BY(mLock);
+    uint32_t mSuccessfulCount = 0;
+    SessionEndedAtomWriter mSessionEndedAtomWriter;
+
+    void logSessionEnded(const std::chrono::steady_clock::time_point& now,
+                         enum SessionEndedReason reason, uid_t callingUid, int status,
+                         std::chrono::microseconds duration, AMediaFormat* srcFormat,
+                         AMediaFormat* dstFormat);
+    bool shouldLogAtom(const std::chrono::steady_clock::time_point& now, int status);
+    // Used for testing to validate what gets sent to statsd.
+    void setSessionEndedAtomWriter(const SessionEndedAtomWriter& writer);
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODING_LOGGER_H
diff --git a/media/libmediatranscoding/include/media/TranscodingRequest.h b/media/libmediatranscoding/include/media/TranscodingRequest.h
new file mode 100644
index 0000000..b66ccf8
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingRequest.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_REQUEST_H
+#define ANDROID_MEDIA_TRANSCODING_REQUEST_H
+
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <android/binder_parcel.h>
+
+namespace android {
+
+using ::aidl::android::media::TranscodingRequestParcel;
+
+// Helper class for duplicating a TranscodingRequestParcel
+class TranscodingRequest : public TranscodingRequestParcel {
+public:
+    TranscodingRequest() = default;
+    TranscodingRequest(const TranscodingRequestParcel& parcel) { setTo(parcel); }
+    TranscodingRequest(const TranscodingRequest& request) { setTo(request); }
+    TranscodingRequest& operator=(const TranscodingRequest& request) {
+        setTo(request);
+        return *this;
+    }
+
+private:
+    void setTo(const TranscodingRequestParcel& parcel) {
+        if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+            AParcel* p = AParcel_create();
+            parcel.writeToParcel(p);
+            AParcel_setDataPosition(p, 0);
+            readFromParcel(p);
+            AParcel_delete(p);
+        } else {
+            sourceFilePath = parcel.sourceFilePath;
+            sourceFd = ndk::ScopedFileDescriptor(dup(parcel.sourceFd.get()));
+            destinationFilePath = parcel.destinationFilePath;
+            destinationFd = ndk::ScopedFileDescriptor(dup(parcel.destinationFd.get()));
+            clientUid = parcel.clientUid;
+            clientPid = parcel.clientPid;
+            clientPackageName = parcel.clientPackageName;
+            transcodingType = parcel.transcodingType;
+            requestedVideoTrackFormat = parcel.requestedVideoTrackFormat;
+            priority = parcel.priority;
+            requestProgressUpdate = parcel.requestProgressUpdate;
+            requestSessionEventUpdate = parcel.requestSessionEventUpdate;
+            isForTesting = parcel.isForTesting;
+            testConfig = parcel.testConfig;
+        }
+    }
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODING_REQUEST_H
diff --git a/media/libmediatranscoding/include/media/TranscodingResourcePolicy.h b/media/libmediatranscoding/include/media/TranscodingResourcePolicy.h
new file mode 100644
index 0000000..ee232e7
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingResourcePolicy.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_RESOURCE_POLICY_H
+#define ANDROID_MEDIA_TRANSCODING_RESOURCE_POLICY_H
+
+#include <android/binder_auto_utils.h>
+#include <media/ResourcePolicyInterface.h>
+#include <utils/Condition.h>
+
+#include <mutex>
+namespace aidl {
+namespace android {
+namespace media {
+class IResourceObserverService;
+}
+}  // namespace android
+}  // namespace aidl
+
+namespace android {
+
+using ::aidl::android::media::IResourceObserverService;
+
+class TranscodingResourcePolicy : public ResourcePolicyInterface {
+public:
+    explicit TranscodingResourcePolicy();
+    ~TranscodingResourcePolicy();
+
+    void setCallback(const std::shared_ptr<ResourcePolicyCallbackInterface>& cb) override;
+    void setPidResourceLost(pid_t pid) override;
+
+private:
+    struct ResourceObserver;
+    mutable std::mutex mRegisteredLock;
+    bool mRegistered GUARDED_BY(mRegisteredLock);
+    std::shared_ptr<IResourceObserverService> mService GUARDED_BY(mRegisteredLock);
+    std::shared_ptr<ResourceObserver> mObserver;
+
+    mutable std::mutex mCallbackLock;
+    std::weak_ptr<ResourcePolicyCallbackInterface> mResourcePolicyCallback
+            GUARDED_BY(mCallbackLock);
+    pid_t mResourceLostPid GUARDED_BY(mCallbackLock);
+
+    ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+
+    static void BinderDiedCallback(void* cookie);
+
+    void registerSelf();
+    void unregisterSelf();
+    void onResourceAvailable(pid_t pid);
+};  // class TranscodingUidPolicy
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODING_RESOURCE_POLICY_H
diff --git a/media/libmediatranscoding/include/media/TranscodingSessionController.h b/media/libmediatranscoding/include/media/TranscodingSessionController.h
new file mode 100644
index 0000000..2657889
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingSessionController.h
@@ -0,0 +1,206 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_SESSION_CONTROLLER_H
+#define ANDROID_MEDIA_TRANSCODING_SESSION_CONTROLLER_H
+
+#include <aidl/android/media/TranscodingSessionPriority.h>
+#include <media/ControllerClientInterface.h>
+#include <media/ResourcePolicyInterface.h>
+#include <media/ThermalPolicyInterface.h>
+#include <media/TranscoderInterface.h>
+#include <media/TranscodingRequest.h>
+#include <media/UidPolicyInterface.h>
+#include <utils/String8.h>
+#include <utils/Vector.h>
+
+#include <chrono>
+#include <functional>
+#include <list>
+#include <map>
+#include <mutex>
+
+namespace android {
+using ::aidl::android::media::TranscodingResultParcel;
+using ::aidl::android::media::TranscodingSessionPriority;
+
+class TranscodingSessionController
+      : public UidPolicyCallbackInterface,
+        public ControllerClientInterface,
+        public TranscoderCallbackInterface,
+        public ResourcePolicyCallbackInterface,
+        public ThermalPolicyCallbackInterface,
+        public std::enable_shared_from_this<TranscodingSessionController> {
+public:
+    virtual ~TranscodingSessionController();
+
+    // ControllerClientInterface
+    bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t callingUid, uid_t clientUid,
+                const TranscodingRequestParcel& request,
+                const std::weak_ptr<ITranscodingClientCallback>& clientCallback) override;
+    bool cancel(ClientIdType clientId, SessionIdType sessionId) override;
+    bool getSession(ClientIdType clientId, SessionIdType sessionId,
+                    TranscodingRequestParcel* request) override;
+    bool addClientUid(ClientIdType clientId, SessionIdType sessionId, uid_t clientUid) override;
+    bool getClientUids(ClientIdType clientId, SessionIdType sessionId,
+                       std::vector<int32_t>* out_clientUids) override;
+    // ~ControllerClientInterface
+
+    // TranscoderCallbackInterface
+    void onStarted(ClientIdType clientId, SessionIdType sessionId) override;
+    void onPaused(ClientIdType clientId, SessionIdType sessionId) override;
+    void onResumed(ClientIdType clientId, SessionIdType sessionId) override;
+    void onFinish(ClientIdType clientId, SessionIdType sessionId) override;
+    void onError(ClientIdType clientId, SessionIdType sessionId, TranscodingErrorCode err) override;
+    void onProgressUpdate(ClientIdType clientId, SessionIdType sessionId,
+                          int32_t progress) override;
+    void onHeartBeat(ClientIdType clientId, SessionIdType sessionId) override;
+    void onResourceLost(ClientIdType clientId, SessionIdType sessionId) override;
+    // ~TranscoderCallbackInterface
+
+    // UidPolicyCallbackInterface
+    void onTopUidsChanged(const std::unordered_set<uid_t>& uids) override;
+    void onUidGone(uid_t goneUid) override;
+    // ~UidPolicyCallbackInterface
+
+    // ResourcePolicyCallbackInterface
+    void onResourceAvailable() override;
+    // ~ResourcePolicyCallbackInterface
+
+    // ThermalPolicyCallbackInterface
+    void onThrottlingStarted() override;
+    void onThrottlingStopped() override;
+    // ~ResourcePolicyCallbackInterface
+
+    /**
+     * Dump all the session information to the fd.
+     */
+    void dumpAllSessions(int fd, const Vector<String16>& args);
+
+private:
+    friend class MediaTranscodingService;
+    friend class TranscodingSessionControllerTest;
+
+    using SessionKeyType = std::pair<ClientIdType, SessionIdType>;
+    using SessionQueueType = std::list<SessionKeyType>;
+    using TranscoderFactoryType = std::function<std::shared_ptr<TranscoderInterface>(
+            const std::shared_ptr<TranscoderCallbackInterface>&)>;
+
+    struct ControllerConfig {
+        // Watchdog timeout.
+        int64_t watchdogTimeoutUs = 3000000LL;
+        // Threshold of time between finish/start below which a back-to-back start is counted.
+        int32_t pacerBurstThresholdMs = 1000;
+        // Maximum allowed back-to-back start count.
+        int32_t pacerBurstCountQuota = 10;
+        // Maximum allowed back-to-back running time.
+        int32_t pacerBurstTimeQuotaSeconds = 120;  // 2-min
+    };
+
+    struct Session {
+        enum State {
+            INVALID = -1,
+            NOT_STARTED = 0,
+            RUNNING,
+            PAUSED,
+            // The following states would not appear in live sessions map, but could
+            // appear in past sessions map for logging purpose.
+            FINISHED,
+            CANCELED,
+            ERROR,
+            DROPPED_BY_PACER,
+        };
+        SessionKeyType key;
+        uid_t callingUid;
+        std::unordered_set<uid_t> allClientUids;
+        int32_t lastProgress = 0;
+        int32_t pauseCount = 0;
+        std::chrono::time_point<std::chrono::steady_clock> stateEnterTime;
+        std::chrono::microseconds waitingTime{0};
+        std::chrono::microseconds runningTime{0};
+        std::chrono::microseconds pausedTime{0};
+
+        TranscodingRequest request;
+        std::weak_ptr<ITranscodingClientCallback> callback;
+
+        // Must use setState to change state.
+        void setState(Session::State state);
+        State getState() const { return state; }
+        bool isRunning() { return state == RUNNING; }
+
+    private:
+        State state = INVALID;
+    };
+
+    struct Watchdog;
+    struct Pacer;
+
+    ControllerConfig mConfig;
+
+    // TODO(chz): call transcoder without global lock.
+    // Use mLock for all entrypoints for now.
+    mutable std::mutex mLock;
+
+    std::map<SessionKeyType, Session> mSessionMap;
+
+    // uid->SessionQueue map (uid == -1: offline queue)
+    std::map<uid_t, SessionQueueType> mSessionQueues;
+
+    // uids, with the head being the most-recently-top app, 2nd item is the
+    // previous top app, etc.
+    std::list<uid_t> mUidSortedList;
+    std::list<uid_t>::iterator mOfflineUidIterator;
+    std::map<uid_t, std::string> mUidPackageNames;
+
+    TranscoderFactoryType mTranscoderFactory;
+    std::shared_ptr<TranscoderInterface> mTranscoder;
+    std::shared_ptr<UidPolicyInterface> mUidPolicy;
+    std::shared_ptr<ResourcePolicyInterface> mResourcePolicy;
+    std::shared_ptr<ThermalPolicyInterface> mThermalPolicy;
+
+    Session* mCurrentSession;
+    bool mResourceLost;
+    bool mThermalThrottling;
+    std::list<Session> mSessionHistory;
+    std::shared_ptr<Watchdog> mWatchdog;
+    std::shared_ptr<Pacer> mPacer;
+
+    // Only allow MediaTranscodingService and unit tests to instantiate.
+    TranscodingSessionController(const TranscoderFactoryType& transcoderFactory,
+                                 const std::shared_ptr<UidPolicyInterface>& uidPolicy,
+                                 const std::shared_ptr<ResourcePolicyInterface>& resourcePolicy,
+                                 const std::shared_ptr<ThermalPolicyInterface>& thermalPolicy,
+                                 const ControllerConfig* config = nullptr);
+
+    void dumpSession_l(const Session& session, String8& result, bool closedSession = false);
+    Session* getTopSession_l();
+    void updateCurrentSession_l();
+    void addUidToSession_l(uid_t uid, const SessionKeyType& sessionKey);
+    void removeSession_l(const SessionKeyType& sessionKey, Session::State finalState,
+                         const std::shared_ptr<std::function<bool(uid_t uid)>>& keepUid = nullptr);
+    void moveUidsToTop_l(const std::unordered_set<uid_t>& uids, bool preserveTopUid);
+    void setSessionState_l(Session* session, Session::State state);
+    void notifyClient(ClientIdType clientId, SessionIdType sessionId, const char* reason,
+                      std::function<void(const SessionKeyType&)> func);
+    // Internal state verifier (debug only)
+    void validateState_l();
+
+    static String8 sessionToString(const SessionKeyType& sessionKey);
+    static const char* sessionStateToString(const Session::State sessionState);
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODING_SESSION_CONTROLLER_H
diff --git a/media/libmediatranscoding/include/media/TranscodingThermalPolicy.h b/media/libmediatranscoding/include/media/TranscodingThermalPolicy.h
new file mode 100644
index 0000000..81c6eac
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingThermalPolicy.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_THERMAL_POLICY_H
+#define ANDROID_MEDIA_TRANSCODING_THERMAL_POLICY_H
+
+#include <android/thermal.h>
+#include <media/ThermalPolicyInterface.h>
+#include <utils/Condition.h>
+
+#include <mutex>
+
+namespace android {
+
+class TranscodingThermalPolicy : public ThermalPolicyInterface {
+public:
+    explicit TranscodingThermalPolicy();
+    ~TranscodingThermalPolicy();
+
+    void setCallback(const std::shared_ptr<ThermalPolicyCallbackInterface>& cb) override;
+    bool getThrottlingStatus() override;
+
+private:
+    mutable std::mutex mRegisteredLock;
+    bool mRegistered GUARDED_BY(mRegisteredLock);
+
+    mutable std::mutex mCallbackLock;
+    std::weak_ptr<ThermalPolicyCallbackInterface> mThermalPolicyCallback GUARDED_BY(mCallbackLock);
+
+    AThermalManager* mThermalManager;
+    bool mIsThrottling;
+
+    static void onStatusChange(void* data, AThermalStatus status);
+    void onStatusChange(AThermalStatus status);
+    void registerSelf();
+    void unregisterSelf();
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODING_THERMAL_POLICY_H
diff --git a/media/libmediatranscoding/include/media/TranscodingUidPolicy.h b/media/libmediatranscoding/include/media/TranscodingUidPolicy.h
new file mode 100644
index 0000000..4dde5a6
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingUidPolicy.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_UID_POLICY_H
+#define ANDROID_MEDIA_TRANSCODING_UID_POLICY_H
+
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <media/UidPolicyInterface.h>
+#include <sys/types.h>
+#include <utils/Condition.h>
+
+#include <map>
+#include <mutex>
+#include <unordered_map>
+#include <unordered_set>
+
+struct AActivityManager_UidImportanceListener;
+
+namespace android {
+
+// Observer for UID lifecycle and provide information about the uid's app
+// priority used by the session controller.
+class TranscodingUidPolicy : public UidPolicyInterface {
+public:
+    explicit TranscodingUidPolicy();
+    ~TranscodingUidPolicy();
+
+    // UidPolicyInterface
+    bool isUidOnTop(uid_t uid) override;
+    void registerMonitorUid(uid_t uid) override;
+    void unregisterMonitorUid(uid_t uid) override;
+    std::unordered_set<uid_t> getTopUids() const override;
+    void setCallback(const std::shared_ptr<UidPolicyCallbackInterface>& cb) override;
+    // ~UidPolicyInterface
+
+private:
+    void onUidStateChanged(uid_t uid, int32_t procState);
+    void registerSelf();
+    void unregisterSelf();
+    int32_t getProcState_l(uid_t uid) NO_THREAD_SAFETY_ANALYSIS;
+    void updateTopUid_l() NO_THREAD_SAFETY_ANALYSIS;
+
+    static void OnUidImportance(uid_t uid, int32_t uidImportance, void* cookie);
+
+    struct ResourceManagerClient;
+    mutable Mutex mUidLock;
+    AActivityManager_UidImportanceListener* mUidObserver;
+
+    bool mRegistered GUARDED_BY(mUidLock);
+    int32_t mTopUidState GUARDED_BY(mUidLock);
+    std::unordered_map<uid_t, int32_t> mUidStateMap GUARDED_BY(mUidLock);
+    std::map<int32_t, std::unordered_set<uid_t>> mStateUidMap GUARDED_BY(mUidLock);
+    std::weak_ptr<UidPolicyCallbackInterface> mUidPolicyCallback;
+};  // class TranscodingUidPolicy
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODING_SERVICE_H
diff --git a/media/libmediatranscoding/include/media/UidPolicyInterface.h b/media/libmediatranscoding/include/media/UidPolicyInterface.h
new file mode 100644
index 0000000..445a2ff
--- /dev/null
+++ b/media/libmediatranscoding/include/media/UidPolicyInterface.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_UID_POLICY_INTERFACE_H
+#define ANDROID_MEDIA_UID_POLICY_INTERFACE_H
+
+#include <unordered_set>
+
+namespace android {
+
+class UidPolicyCallbackInterface;
+
+// Interface for the controller to query a uid's info.
+class UidPolicyInterface {
+public:
+    // Instruct the uid policy to start monitoring a uid.
+    virtual void registerMonitorUid(uid_t uid) = 0;
+    // Instruct the uid policy to stop monitoring a uid.
+    virtual void unregisterMonitorUid(uid_t uid) = 0;
+    // Whether a uid is among the set of uids that's currently top priority.
+    virtual bool isUidOnTop(uid_t uid) = 0;
+    // Retrieves the set of uids that's currently top priority.
+    virtual std::unordered_set<uid_t> getTopUids() const = 0;
+    // Set the associated callback interface to send the events when uid states change.
+    virtual void setCallback(const std::shared_ptr<UidPolicyCallbackInterface>& cb) = 0;
+
+protected:
+    virtual ~UidPolicyInterface() = default;
+};
+
+// Interface for notifying the controller of a change in uid states.
+class UidPolicyCallbackInterface {
+public:
+    // Called when the set of uids that's top priority among the uids of interest
+    // has changed. The receiver of this callback should adjust accordingly.
+    virtual void onTopUidsChanged(const std::unordered_set<uid_t>& uids) = 0;
+
+    // Called when a uid is gone.
+    virtual void onUidGone(uid_t goneUid) = 0;
+
+protected:
+    virtual ~UidPolicyCallbackInterface() = default;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_UID_POLICY_INTERFACE_H
diff --git a/media/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp b/media/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp
index d58af4e..a35ca53 100644
--- a/media/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp
+++ b/media/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp
@@ -36,7 +36,7 @@
 namespace android {
 
 class IntUniquePtrComp {
-   public:
+public:
     bool operator()(const std::unique_ptr<int>& lhs, const std::unique_ptr<int>& rhs) const {
         return *lhs < *rhs;
     }
@@ -223,19 +223,19 @@
 }
 
 // Test the heap property and make sure it is the same as std::priority_queue.
-TEST(AdjustableMaxPriorityQueueTest, TranscodingJobTest) {
-    // Test data structure that mimics the Transcoding job.
-    struct TranscodingJob {
+TEST(AdjustableMaxPriorityQueueTest, TranscodingSessionTest) {
+    // Test data structure that mimics the Transcoding session.
+    struct TranscodingSession {
         int32_t priority;
         int64_t createTimeUs;
     };
 
-    // The job is arranging according to priority with highest priority comes first.
-    // For the job with the same priority, the job with early createTime will come first.
-    class TranscodingJobComp {
-       public:
-        bool operator()(const std::unique_ptr<TranscodingJob>& lhs,
-                        const std::unique_ptr<TranscodingJob>& rhs) const {
+    // The session is arranging according to priority with highest priority comes first.
+    // For the session with the same priority, the session with early createTime will come first.
+    class TranscodingSessionComp {
+    public:
+        bool operator()(const std::unique_ptr<TranscodingSession>& lhs,
+                        const std::unique_ptr<TranscodingSession>& rhs) const {
             if (lhs->priority != rhs->priority) {
                 return lhs->priority < rhs->priority;
             }
@@ -244,46 +244,47 @@
     };
 
     // Map to save each value's position in the heap.
-    std::unordered_map<int, TranscodingJob*> jobIdToJobMap;
+    std::unordered_map<int, TranscodingSession*> sessionIdToSessionMap;
 
-    TranscodingJob testJobs[] = {
-            {1 /*priority*/, 66 /*createTimeUs*/},  // First job,
-            {2 /*priority*/, 67 /*createTimeUs*/},  // Second job,
-            {2 /*priority*/, 66 /*createTimeUs*/},  // Third job,
-            {3 /*priority*/, 68 /*createTimeUs*/},  // Fourth job.
+    TranscodingSession testSessions[] = {
+            {1 /*priority*/, 66 /*createTimeUs*/},  // First session,
+            {2 /*priority*/, 67 /*createTimeUs*/},  // Second session,
+            {2 /*priority*/, 66 /*createTimeUs*/},  // Third session,
+            {3 /*priority*/, 68 /*createTimeUs*/},  // Fourth session.
     };
 
-    AdjustableMaxPriorityQueue<std::unique_ptr<TranscodingJob>, TranscodingJobComp> jobQueue;
+    AdjustableMaxPriorityQueue<std::unique_ptr<TranscodingSession>, TranscodingSessionComp>
+            sessionQueue;
 
-    // Pushes all the jobs into the heap.
-    for (int jobId = 0; jobId < 4; ++jobId) {
-        auto newJob = std::make_unique<TranscodingJob>(testJobs[jobId]);
-        jobIdToJobMap[jobId] = newJob.get();
-        EXPECT_TRUE(jobQueue.push(std::move(newJob)));
+    // Pushes all the sessions into the heap.
+    for (int sessionId = 0; sessionId < 4; ++sessionId) {
+        auto newSession = std::make_unique<TranscodingSession>(testSessions[sessionId]);
+        sessionIdToSessionMap[sessionId] = newSession.get();
+        EXPECT_TRUE(sessionQueue.push(std::move(newSession)));
     }
 
-    // Check the job queue size.
-    EXPECT_EQ(4, jobQueue.size());
+    // Check the session queue size.
+    EXPECT_EQ(4, sessionQueue.size());
 
-    // Check the top and it should be Forth job: (3, 68)
-    const std::unique_ptr<TranscodingJob>& topJob = jobQueue.top();
-    EXPECT_EQ(3, topJob->priority);
-    EXPECT_EQ(68, topJob->createTimeUs);
+    // Check the top and it should be Forth session: (3, 68)
+    const std::unique_ptr<TranscodingSession>& topSession = sessionQueue.top();
+    EXPECT_EQ(3, topSession->priority);
+    EXPECT_EQ(68, topSession->createTimeUs);
 
     // Consume the top.
-    std::unique_ptr<TranscodingJob> consumeJob = jobQueue.consume_top();
+    std::unique_ptr<TranscodingSession> consumeSession = sessionQueue.consume_top();
 
-    // Check the top and it should be Third Job (2, 66)
-    const std::unique_ptr<TranscodingJob>& topJob2 = jobQueue.top();
-    EXPECT_EQ(2, topJob2->priority);
-    EXPECT_EQ(66, topJob2->createTimeUs);
+    // Check the top and it should be Third Session (2, 66)
+    const std::unique_ptr<TranscodingSession>& topSession2 = sessionQueue.top();
+    EXPECT_EQ(2, topSession2->priority);
+    EXPECT_EQ(66, topSession2->createTimeUs);
 
-    // Change the Second job's priority to 4 from (2, 67) -> (4, 67). It should becomes top of the
-    // queue.
-    jobIdToJobMap[1]->priority = 4;
-    jobQueue.rebuild();
-    const std::unique_ptr<TranscodingJob>& topJob3 = jobQueue.top();
-    EXPECT_EQ(4, topJob3->priority);
-    EXPECT_EQ(67, topJob3->createTimeUs);
+    // Change the Second session's priority to 4 from (2, 67) -> (4, 67). It should becomes
+    // top of the queue.
+    sessionIdToSessionMap[1]->priority = 4;
+    sessionQueue.rebuild();
+    const std::unique_ptr<TranscodingSession>& topSession3 = sessionQueue.top();
+    EXPECT_EQ(4, topSession3->priority);
+    EXPECT_EQ(67, topSession3->createTimeUs);
 }
 }  // namespace android
\ No newline at end of file
diff --git a/media/libmediatranscoding/tests/Android.bp b/media/libmediatranscoding/tests/Android.bp
index 8191b00..603611a 100644
--- a/media/libmediatranscoding/tests/Android.bp
+++ b/media/libmediatranscoding/tests/Android.bp
@@ -1,4 +1,19 @@
 // Build the unit tests for libmediatranscoding.
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+filegroup {
+    name: "test_assets",
+    path: "assets",
+    srcs: ["assets/**/*"],
+}
+
 cc_defaults {
     name: "libmediatranscoding_test_defaults",
 
@@ -8,15 +23,16 @@
     ],
 
     shared_libs: [
+        "libandroid",
         "libbinder_ndk",
         "libcutils",
         "liblog",
         "libutils",
-        "libmediatranscoding"
     ],
 
     static_libs: [
         "mediatranscoding_aidl_interface-ndk_platform",
+        "libmediatranscoding",
     ],
 
     cflags: [
@@ -38,6 +54,16 @@
 }
 
 //
+// TranscodingSessionController unit test
+//
+cc_test {
+    name: "TranscodingSessionController_tests",
+    defaults: ["libmediatranscoding_test_defaults"],
+
+    srcs: ["TranscodingSessionController_tests.cpp"],
+}
+
+//
 // AdjustableMaxPriorityQueue unit test
 //
 cc_test {
@@ -45,4 +71,16 @@
     defaults: ["libmediatranscoding_test_defaults"],
 
     srcs: ["AdjustableMaxPriorityQueue_tests.cpp"],
-}
\ No newline at end of file
+}
+
+//
+// TranscodingLogger unit test
+//
+cc_test {
+    name: "TranscodingLogger_tests",
+    defaults: ["libmediatranscoding_test_defaults"],
+    shared_libs: ["libmediandk", "libstatssocket#30"],
+    static_libs: ["libmediatranscoder", "libstatslog_media"],
+
+    srcs: ["TranscodingLogger_tests.cpp"],
+}
diff --git a/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp b/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
index 5d2419d..9233410 100644
--- a/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
+++ b/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
@@ -19,262 +19,647 @@
 // #define LOG_NDEBUG 0
 #define LOG_TAG "TranscodingClientManagerTest"
 
-#include <aidl/android/media/BnTranscodingServiceClient.h>
+#include <aidl/android/media/BnTranscodingClientCallback.h>
 #include <aidl/android/media/IMediaTranscodingService.h>
-#include <aidl/android/media/ITranscodingServiceClient.h>
 #include <android-base/logging.h>
 #include <android/binder_manager.h>
 #include <android/binder_process.h>
 #include <gtest/gtest.h>
+#include <media/ControllerClientInterface.h>
 #include <media/TranscodingClientManager.h>
+#include <media/TranscodingRequest.h>
 #include <utils/Log.h>
 
+#include <list>
+
 namespace android {
 
 using Status = ::ndk::ScopedAStatus;
-using aidl::android::media::BnTranscodingServiceClient;
-using aidl::android::media::IMediaTranscodingService;
-using aidl::android::media::ITranscodingServiceClient;
+using ::aidl::android::media::BnTranscodingClientCallback;
+using ::aidl::android::media::IMediaTranscodingService;
+using ::aidl::android::media::TranscodingErrorCode;
+using ::aidl::android::media::TranscodingRequestParcel;
+using ::aidl::android::media::TranscodingResultParcel;
+using ::aidl::android::media::TranscodingSessionParcel;
+using ::aidl::android::media::TranscodingSessionPriority;
 
-constexpr int32_t kInvalidClientId = -1;
-constexpr int32_t kInvalidClientPid = -1;
-constexpr int32_t kInvalidClientUid = -1;
-constexpr const char* kInvalidClientOpPackageName = "";
+constexpr pid_t kInvalidClientPid = -5;
+constexpr pid_t kInvalidClientUid = -10;
+constexpr const char* kInvalidClientName = "";
+constexpr const char* kInvalidClientPackage = "";
 
-constexpr int32_t kClientId = 1;
-constexpr int32_t kClientPid = 2;
-constexpr int32_t kClientUid = 3;
-constexpr const char* kClientOpPackageName = "TestClient";
+constexpr const char* kClientName = "TestClientName";
+constexpr const char* kClientPackage = "TestClientPackage";
+constexpr uid_t OFFLINE_UID = -1;
 
-struct TestClient : public BnTranscodingServiceClient {
-    TestClient(const std::shared_ptr<IMediaTranscodingService>& service) : mService(service) {
-        ALOGD("TestClient Created");
-    }
+#define SESSION(n) (n)
 
-    Status getName(std::string* _aidl_return) override {
-        *_aidl_return = "test_client";
+struct TestClientCallback : public BnTranscodingClientCallback {
+    TestClientCallback() { ALOGI("TestClientCallback Created"); }
+
+    virtual ~TestClientCallback() { ALOGI("TestClientCallback destroyed"); };
+
+    Status openFileDescriptor(const std::string& /*in_fileUri*/, const std::string& /*in_mode*/,
+                              ::ndk::ScopedFileDescriptor* /*_aidl_return*/) override {
         return Status::ok();
     }
 
-    Status onTranscodingFinished(
-            int32_t /* in_jobId */,
-            const ::aidl::android::media::TranscodingResultParcel& /* in_result */) override {
+    Status onTranscodingStarted(int32_t /*in_sessionId*/) override { return Status::ok(); }
+
+    Status onTranscodingPaused(int32_t /*in_sessionId*/) override { return Status::ok(); }
+
+    Status onTranscodingResumed(int32_t /*in_sessionId*/) override { return Status::ok(); }
+
+    Status onTranscodingFinished(int32_t in_sessionId,
+                                 const TranscodingResultParcel& in_result) override {
+        EXPECT_EQ(in_sessionId, in_result.sessionId);
+        mEventQueue.push_back(Finished(in_sessionId));
         return Status::ok();
     }
 
-    Status onTranscodingFailed(
-            int32_t /* in_jobId */,
-            ::aidl::android::media::TranscodingErrorCode /*in_errorCode */) override {
+    Status onTranscodingFailed(int32_t in_sessionId,
+                               TranscodingErrorCode /*in_errorCode */) override {
+        mEventQueue.push_back(Failed(in_sessionId));
         return Status::ok();
     }
 
-    Status onAwaitNumberOfJobsChanged(int32_t /* in_jobId */, int32_t /* in_oldAwaitNumber */,
-                                      int32_t /* in_newAwaitNumber */) override {
+    Status onAwaitNumberOfSessionsChanged(int32_t /* in_sessionId */,
+                                          int32_t /* in_oldAwaitNumber */,
+                                          int32_t /* in_newAwaitNumber */) override {
         return Status::ok();
     }
 
-    Status onProgressUpdate(int32_t /* in_jobId */, int32_t /* in_progress */) override {
+    Status onProgressUpdate(int32_t /* in_sessionId */, int32_t /* in_progress */) override {
         return Status::ok();
     }
 
-    virtual ~TestClient() { ALOGI("TestClient destroyed"); };
+    struct Event {
+        enum {
+            NoEvent,
+            Finished,
+            Failed,
+        } type;
+        SessionIdType sessionId;
+    };
 
-   private:
-    std::shared_ptr<IMediaTranscodingService> mService;
-    TestClient(const TestClient&) = delete;
-    TestClient& operator=(const TestClient&) = delete;
+    static constexpr Event NoEvent = {Event::NoEvent, 0};
+#define DECLARE_EVENT(action) \
+    static Event action(SessionIdType sessionId) { return {Event::action, sessionId}; }
+
+    DECLARE_EVENT(Finished);
+    DECLARE_EVENT(Failed);
+
+    const Event& popEvent() {
+        if (mEventQueue.empty()) {
+            mPoppedEvent = NoEvent;
+        } else {
+            mPoppedEvent = *mEventQueue.begin();
+            mEventQueue.pop_front();
+        }
+        return mPoppedEvent;
+    }
+
+private:
+    Event mPoppedEvent;
+    std::list<Event> mEventQueue;
+
+    TestClientCallback(const TestClientCallback&) = delete;
+    TestClientCallback& operator=(const TestClientCallback&) = delete;
+};
+
+bool operator==(const TestClientCallback::Event& lhs, const TestClientCallback::Event& rhs) {
+    return lhs.type == rhs.type && lhs.sessionId == rhs.sessionId;
+}
+
+struct TestController : public ControllerClientInterface {
+    TestController() { ALOGI("TestController Created"); }
+
+    virtual ~TestController() { ALOGI("TestController Destroyed"); }
+
+    bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t /*callingUid*/,
+                uid_t clientUid, const TranscodingRequestParcel& request,
+                const std::weak_ptr<ITranscodingClientCallback>& clientCallback) override {
+        SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+        if (mSessions.count(sessionKey) > 0) {
+            return false;
+        }
+
+        // This is the secret name we'll check, to test error propagation from
+        // the controller back to client.
+        if (request.sourceFilePath == "bad_source_file") {
+            return false;
+        }
+
+        if (request.priority == TranscodingSessionPriority::kUnspecified) {
+            clientUid = OFFLINE_UID;
+        }
+
+        mSessions[sessionKey].request = request;
+        mSessions[sessionKey].callback = clientCallback;
+        mSessions[sessionKey].allClientUids.insert(clientUid);
+
+        mLastSession = sessionKey;
+        return true;
+    }
+
+    bool addClientUid(ClientIdType clientId, SessionIdType sessionId, uid_t clientUid) override {
+        SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+        if (mSessions.count(sessionKey) == 0) {
+            return false;
+        }
+        if (mSessions[sessionKey].allClientUids.count(clientUid) > 0) {
+            return false;
+        }
+        mSessions[sessionKey].allClientUids.insert(clientUid);
+        return true;
+    }
+
+    bool getClientUids(ClientIdType clientId, SessionIdType sessionId,
+                       std::vector<int32_t>* out_clientUids) override {
+        SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+        if (mSessions.count(sessionKey) == 0) {
+            return false;
+        }
+        out_clientUids->clear();
+        for (uid_t uid : mSessions[sessionKey].allClientUids) {
+            if (uid != OFFLINE_UID) {
+                out_clientUids->push_back(uid);
+            }
+        }
+        return true;
+    }
+
+    bool cancel(ClientIdType clientId, SessionIdType sessionId) override {
+        SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+
+        if (mSessions.count(sessionKey) == 0) {
+            return false;
+        }
+        mSessions.erase(sessionKey);
+        return true;
+    }
+
+    bool getSession(ClientIdType clientId, SessionIdType sessionId,
+                    TranscodingRequestParcel* request) override {
+        SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
+        if (mSessions.count(sessionKey) == 0) {
+            return false;
+        }
+
+        *(TranscodingRequest*)request = mSessions[sessionKey].request;
+        return true;
+    }
+
+    void finishLastSession() {
+        auto it = mSessions.find(mLastSession);
+        if (it == mSessions.end()) {
+            return;
+        }
+        {
+            auto clientCallback = it->second.callback.lock();
+            if (clientCallback != nullptr) {
+                clientCallback->onTranscodingFinished(
+                        mLastSession.second,
+                        TranscodingResultParcel({mLastSession.second, 0, std::nullopt}));
+            }
+        }
+        mSessions.erase(it);
+    }
+
+    void abortLastSession() {
+        auto it = mSessions.find(mLastSession);
+        if (it == mSessions.end()) {
+            return;
+        }
+        {
+            auto clientCallback = it->second.callback.lock();
+            if (clientCallback != nullptr) {
+                clientCallback->onTranscodingFailed(mLastSession.second,
+                                                    TranscodingErrorCode::kUnknown);
+            }
+        }
+        mSessions.erase(it);
+    }
+
+    struct Session {
+        TranscodingRequest request;
+        std::weak_ptr<ITranscodingClientCallback> callback;
+        std::unordered_set<uid_t> allClientUids;
+    };
+
+    typedef std::pair<ClientIdType, SessionIdType> SessionKeyType;
+    std::map<SessionKeyType, Session> mSessions;
+    SessionKeyType mLastSession;
 };
 
 class TranscodingClientManagerTest : public ::testing::Test {
-   public:
-    TranscodingClientManagerTest() : mClientManager(TranscodingClientManager::getInstance()) {
+public:
+    TranscodingClientManagerTest()
+          : mController(new TestController()),
+            mClientManager(new TranscodingClientManager(mController)) {
         ALOGD("TranscodingClientManagerTest created");
     }
 
     void SetUp() override {
-        ::ndk::SpAIBinder binder(AServiceManager_getService("media.transcoding"));
-        mService = IMediaTranscodingService::fromBinder(binder);
-        if (mService == nullptr) {
-            ALOGE("Failed to connect to the media.trascoding service.");
-            return;
-        }
-
-        mTestClient = ::ndk::SharedRefBase::make<TestClient>(mService);
+        mClientCallback1 = ::ndk::SharedRefBase::make<TestClientCallback>();
+        mClientCallback2 = ::ndk::SharedRefBase::make<TestClientCallback>();
+        mClientCallback3 = ::ndk::SharedRefBase::make<TestClientCallback>();
     }
 
-    void TearDown() override {
-        ALOGI("TranscodingClientManagerTest tear down");
-        mService = nullptr;
-    }
+    void TearDown() override { ALOGI("TranscodingClientManagerTest tear down"); }
 
     ~TranscodingClientManagerTest() { ALOGD("TranscodingClientManagerTest destroyed"); }
 
-    TranscodingClientManager& mClientManager;
-    std::shared_ptr<ITranscodingServiceClient> mTestClient = nullptr;
-    std::shared_ptr<IMediaTranscodingService> mService = nullptr;
+    void addMultipleClients() {
+        EXPECT_EQ(
+                mClientManager->addClient(mClientCallback1, kClientName, kClientPackage, &mClient1),
+                OK);
+        EXPECT_NE(mClient1, nullptr);
+
+        EXPECT_EQ(
+                mClientManager->addClient(mClientCallback2, kClientName, kClientPackage, &mClient2),
+                OK);
+        EXPECT_NE(mClient2, nullptr);
+
+        EXPECT_EQ(
+                mClientManager->addClient(mClientCallback3, kClientName, kClientPackage, &mClient3),
+                OK);
+        EXPECT_NE(mClient3, nullptr);
+
+        EXPECT_EQ(mClientManager->getNumOfClients(), 3);
+    }
+
+    void unregisterMultipleClients() {
+        EXPECT_TRUE(mClient1->unregister().isOk());
+        EXPECT_TRUE(mClient2->unregister().isOk());
+        EXPECT_TRUE(mClient3->unregister().isOk());
+        EXPECT_EQ(mClientManager->getNumOfClients(), 0);
+    }
+
+    std::shared_ptr<TestController> mController;
+    std::shared_ptr<TranscodingClientManager> mClientManager;
+    std::shared_ptr<ITranscodingClient> mClient1;
+    std::shared_ptr<ITranscodingClient> mClient2;
+    std::shared_ptr<ITranscodingClient> mClient3;
+    std::shared_ptr<TestClientCallback> mClientCallback1;
+    std::shared_ptr<TestClientCallback> mClientCallback2;
+    std::shared_ptr<TestClientCallback> mClientCallback3;
 };
 
-TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientId) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    // Create a client with invalid client id.
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client, kInvalidClientId, kClientPid, kClientUid, kClientOpPackageName);
-
-    // Add the client to the manager and expect failure.
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err != OK);
+TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientCallback) {
+    // Add a client with null callback and expect failure.
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err = mClientManager->addClient(nullptr, kClientName, kClientPackage, &client);
+    EXPECT_EQ(err, IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT);
 }
+//
+//TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientPid) {
+//    // Add a client with invalid Pid and expect failure.
+//    std::shared_ptr<ITranscodingClient> client;
+//    status_t err = mClientManager->addClient(mClientCallback1,
+//                                             kClientName, kClientPackage, &client);
+//    EXPECT_EQ(err, IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT);
+//}
 
-TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientPid) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    // Create a client with invalid Pid.
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client, kClientId, kInvalidClientPid, kClientUid, kClientOpPackageName);
-
-    // Add the client to the manager and expect failure.
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err != OK);
-}
-
-TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientUid) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    // Create a client with invalid Uid.
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client, kClientId, kClientPid, kInvalidClientUid, kClientOpPackageName);
-
-    // Add the client to the manager and expect failure.
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err != OK);
+TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientName) {
+    // Add a client with invalid name and expect failure.
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err = mClientManager->addClient(mClientCallback1, kInvalidClientName, kClientPackage,
+                                             &client);
+    EXPECT_EQ(err, IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT);
 }
 
 TEST_F(TranscodingClientManagerTest, TestAddingWithInvalidClientPackageName) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    // Create a client with invalid packagename.
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client, kClientId, kClientPid, kClientUid, kInvalidClientOpPackageName);
-
-    // Add the client to the manager and expect failure.
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err != OK);
+    // Add a client with invalid packagename and expect failure.
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err = mClientManager->addClient(mClientCallback1, kClientName, kInvalidClientPackage,
+                                             &client);
+    EXPECT_EQ(err, IMediaTranscodingService::ERROR_ILLEGAL_ARGUMENT);
 }
 
 TEST_F(TranscodingClientManagerTest, TestAddingValidClient) {
-    std::shared_ptr<ITranscodingServiceClient> client1 =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
+    // Add a valid client, should succeed.
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err =
+            mClientManager->addClient(mClientCallback1, kClientName, kClientPackage, &client);
+    EXPECT_EQ(err, OK);
+    EXPECT_NE(client.get(), nullptr);
+    EXPECT_EQ(mClientManager->getNumOfClients(), 1);
 
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client1, kClientId, kClientPid, kClientUid, kClientOpPackageName);
-
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err == OK);
-
-    size_t numOfClients = mClientManager.getNumOfClients();
-    EXPECT_EQ(numOfClients, 1);
-
-    err = mClientManager.removeClient(kClientId);
-    EXPECT_TRUE(err == OK);
+    // Unregister client, should succeed.
+    Status status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+    EXPECT_EQ(mClientManager->getNumOfClients(), 0);
 }
 
 TEST_F(TranscodingClientManagerTest, TestAddingDupliacteClient) {
-    std::shared_ptr<ITranscodingServiceClient> client1 =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err =
+            mClientManager->addClient(mClientCallback1, kClientName, kClientPackage, &client);
+    EXPECT_EQ(err, OK);
+    EXPECT_NE(client.get(), nullptr);
+    EXPECT_EQ(mClientManager->getNumOfClients(), 1);
 
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client1, kClientId, kClientPid, kClientUid, kClientOpPackageName);
+    std::shared_ptr<ITranscodingClient> dupClient;
+    err = mClientManager->addClient(mClientCallback1, "dupClient", "dupPackage", &dupClient);
+    EXPECT_EQ(err, IMediaTranscodingService::ERROR_ALREADY_EXISTS);
+    EXPECT_EQ(dupClient.get(), nullptr);
+    EXPECT_EQ(mClientManager->getNumOfClients(), 1);
 
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err == OK);
+    Status status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+    EXPECT_EQ(mClientManager->getNumOfClients(), 0);
 
-    err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err != OK);
+    err = mClientManager->addClient(mClientCallback1, "dupClient", "dupPackage", &dupClient);
+    EXPECT_EQ(err, OK);
+    EXPECT_NE(dupClient.get(), nullptr);
+    EXPECT_EQ(mClientManager->getNumOfClients(), 1);
 
-    err = mClientManager.removeClient(kClientId);
-    EXPECT_TRUE(err == OK);
+    status = dupClient->unregister();
+    EXPECT_TRUE(status.isOk());
+    EXPECT_EQ(mClientManager->getNumOfClients(), 0);
 }
 
 TEST_F(TranscodingClientManagerTest, TestAddingMultipleClient) {
-    std::shared_ptr<ITranscodingServiceClient> client1 =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo1 =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client1, kClientId, kClientPid, kClientUid, kClientOpPackageName);
-
-    status_t err = mClientManager.addClient(std::move(clientInfo1));
-    EXPECT_TRUE(err == OK);
-
-    std::shared_ptr<ITranscodingServiceClient> client2 =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo2 =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client2, kClientId + 1, kClientPid, kClientUid, kClientOpPackageName);
-
-    err = mClientManager.addClient(std::move(clientInfo2));
-    EXPECT_TRUE(err == OK);
-
-    std::shared_ptr<ITranscodingServiceClient> client3 =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-
-    // Create a client with invalid packagename.
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo3 =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client3, kClientId + 2, kClientPid, kClientUid, kClientOpPackageName);
-
-    err = mClientManager.addClient(std::move(clientInfo3));
-    EXPECT_TRUE(err == OK);
-
-    size_t numOfClients = mClientManager.getNumOfClients();
-    EXPECT_EQ(numOfClients, 3);
-
-    err = mClientManager.removeClient(kClientId);
-    EXPECT_TRUE(err == OK);
-
-    err = mClientManager.removeClient(kClientId + 1);
-    EXPECT_TRUE(err == OK);
-
-    err = mClientManager.removeClient(kClientId + 2);
-    EXPECT_TRUE(err == OK);
+    addMultipleClients();
+    unregisterMultipleClients();
 }
 
-TEST_F(TranscodingClientManagerTest, TestRemovingNonExistClient) {
-    status_t err = mClientManager.removeClient(kInvalidClientId);
-    EXPECT_TRUE(err != OK);
+TEST_F(TranscodingClientManagerTest, TestSubmitCancelGetSessions) {
+    addMultipleClients();
 
-    err = mClientManager.removeClient(1000 /* clientId */);
-    EXPECT_TRUE(err != OK);
+    // Test sessionId assignment.
+    TranscodingRequestParcel request;
+    request.sourceFilePath = "test_source_file_0";
+    request.destinationFilePath = "test_desintaion_file_0";
+    TranscodingSessionParcel session;
+    bool result;
+    EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(session.sessionId, SESSION(0));
+
+    request.sourceFilePath = "test_source_file_1";
+    request.destinationFilePath = "test_desintaion_file_1";
+    EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(session.sessionId, SESSION(1));
+
+    request.sourceFilePath = "test_source_file_2";
+    request.destinationFilePath = "test_desintaion_file_2";
+    EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(session.sessionId, SESSION(2));
+
+    // Test submit bad request (no valid sourceFilePath) fails.
+    TranscodingRequestParcel badRequest;
+    badRequest.sourceFilePath = "bad_source_file";
+    badRequest.destinationFilePath = "bad_destination_file";
+    EXPECT_TRUE(mClient1->submitRequest(badRequest, &session, &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Test submit with bad pid/uid.
+    badRequest.sourceFilePath = "test_source_file_3";
+    badRequest.destinationFilePath = "test_desintaion_file_3";
+    badRequest.clientPid = kInvalidClientPid;
+    badRequest.clientUid = kInvalidClientUid;
+    EXPECT_TRUE(mClient1->submitRequest(badRequest, &session, &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Test get sessions by id.
+    EXPECT_TRUE(mClient1->getSessionWithId(SESSION(2), &session, &result).isOk());
+    EXPECT_EQ(session.sessionId, SESSION(2));
+    EXPECT_EQ(session.request.sourceFilePath, "test_source_file_2");
+    EXPECT_TRUE(result);
+
+    // Test get sessions by invalid id fails.
+    EXPECT_TRUE(mClient1->getSessionWithId(SESSION(100), &session, &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Test cancel non-existent session fail.
+    EXPECT_TRUE(mClient2->cancelSession(SESSION(100), &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Test cancel valid sessionId in arbitrary order.
+    EXPECT_TRUE(mClient1->cancelSession(SESSION(2), &result).isOk());
+    EXPECT_TRUE(result);
+
+    EXPECT_TRUE(mClient1->cancelSession(SESSION(0), &result).isOk());
+    EXPECT_TRUE(result);
+
+    EXPECT_TRUE(mClient1->cancelSession(SESSION(1), &result).isOk());
+    EXPECT_TRUE(result);
+
+    // Test cancel session again fails.
+    EXPECT_TRUE(mClient1->cancelSession(SESSION(1), &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Test get session after cancel fails.
+    EXPECT_TRUE(mClient1->getSessionWithId(SESSION(2), &session, &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Test sessionId independence for each client.
+    EXPECT_TRUE(mClient2->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(session.sessionId, SESSION(0));
+
+    EXPECT_TRUE(mClient2->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(session.sessionId, SESSION(1));
+
+    unregisterMultipleClients();
 }
 
-TEST_F(TranscodingClientManagerTest, TestCheckClientWithClientId) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
+TEST_F(TranscodingClientManagerTest, TestClientCallback) {
+    addMultipleClients();
 
-    std::unique_ptr<TranscodingClientManager::ClientInfo> clientInfo =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    client, kClientId, kClientPid, kClientUid, kClientOpPackageName);
+    TranscodingRequestParcel request;
+    request.sourceFilePath = "test_source_file_name";
+    request.destinationFilePath = "test_destination_file_name";
+    TranscodingSessionParcel session;
+    bool result;
+    EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(session.sessionId, SESSION(0));
 
-    status_t err = mClientManager.addClient(std::move(clientInfo));
-    EXPECT_TRUE(err == OK);
+    mController->finishLastSession();
+    EXPECT_EQ(mClientCallback1->popEvent(), TestClientCallback::Finished(session.sessionId));
 
-    bool res = mClientManager.isClientIdRegistered(kClientId);
-    EXPECT_TRUE(res);
+    EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(session.sessionId, SESSION(1));
 
-    res = mClientManager.isClientIdRegistered(kInvalidClientId);
-    EXPECT_FALSE(res);
+    mController->abortLastSession();
+    EXPECT_EQ(mClientCallback1->popEvent(), TestClientCallback::Failed(session.sessionId));
+
+    EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(session.sessionId, SESSION(2));
+
+    EXPECT_TRUE(mClient2->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_EQ(session.sessionId, SESSION(0));
+
+    mController->finishLastSession();
+    EXPECT_EQ(mClientCallback2->popEvent(), TestClientCallback::Finished(session.sessionId));
+
+    unregisterMultipleClients();
 }
 
-}  // namespace android
\ No newline at end of file
+TEST_F(TranscodingClientManagerTest, TestUseAfterUnregister) {
+    // Add a client.
+    std::shared_ptr<ITranscodingClient> client;
+    status_t err =
+            mClientManager->addClient(mClientCallback1, kClientName, kClientPackage, &client);
+    EXPECT_EQ(err, OK);
+    EXPECT_NE(client.get(), nullptr);
+
+    // Submit 2 requests, 1 offline and 1 realtime.
+    TranscodingRequestParcel request;
+    TranscodingSessionParcel session;
+    bool result;
+
+    request.sourceFilePath = "test_source_file_0";
+    request.destinationFilePath = "test_destination_file_0";
+    request.priority = TranscodingSessionPriority::kUnspecified;
+    EXPECT_TRUE(client->submitRequest(request, &session, &result).isOk() && result);
+    EXPECT_EQ(session.sessionId, SESSION(0));
+
+    request.sourceFilePath = "test_source_file_1";
+    request.destinationFilePath = "test_destination_file_1";
+    request.priority = TranscodingSessionPriority::kNormal;
+    EXPECT_TRUE(client->submitRequest(request, &session, &result).isOk() && result);
+    EXPECT_EQ(session.sessionId, SESSION(1));
+
+    // Unregister client, should succeed.
+    Status status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+
+    // Test submit new request after unregister, should fail with ERROR_DISCONNECTED.
+    request.sourceFilePath = "test_source_file_2";
+    request.destinationFilePath = "test_destination_file_2";
+    request.priority = TranscodingSessionPriority::kNormal;
+    status = client->submitRequest(request, &session, &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    // Test cancel sessions after unregister, should fail with ERROR_DISCONNECTED
+    // regardless of realtime or offline session, or whether the sessionId is valid.
+    status = client->cancelSession(SESSION(0), &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    status = client->cancelSession(SESSION(1), &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    status = client->cancelSession(SESSION(2), &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    // Test get sessions, should fail with ERROR_DISCONNECTED regardless of realtime
+    // or offline session, or whether the sessionId is valid.
+    status = client->getSessionWithId(SESSION(0), &session, &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    status = client->getSessionWithId(SESSION(1), &session, &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    status = client->getSessionWithId(SESSION(2), &session, &result);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+}
+
+TEST_F(TranscodingClientManagerTest, TestAddGetClientUidsInvalidArgs) {
+    addMultipleClients();
+
+    bool result;
+    std::optional<std::vector<int32_t>> clientUids;
+    TranscodingRequestParcel request;
+    TranscodingSessionParcel session;
+    uid_t ownUid = ::getuid();
+
+    // Add/Get clients with invalid session id fails.
+    EXPECT_TRUE(mClient1->addClientUid(-1, ownUid, &result).isOk());
+    EXPECT_FALSE(result);
+    EXPECT_TRUE(mClient1->addClientUid(SESSION(0), ownUid, &result).isOk());
+    EXPECT_FALSE(result);
+    EXPECT_TRUE(mClient1->getClientUids(-1, &clientUids).isOk());
+    EXPECT_EQ(clientUids, std::nullopt);
+    EXPECT_TRUE(mClient1->getClientUids(SESSION(0), &clientUids).isOk());
+    EXPECT_EQ(clientUids, std::nullopt);
+
+    unregisterMultipleClients();
+}
+
+TEST_F(TranscodingClientManagerTest, TestAddGetClientUids) {
+    addMultipleClients();
+
+    bool result;
+    std::optional<std::vector<int32_t>> clientUids;
+    TranscodingRequestParcel request;
+    TranscodingSessionParcel session;
+    uid_t ownUid = ::getuid();
+
+    // Submit one real-time session.
+    request.sourceFilePath = "test_source_file_0";
+    request.destinationFilePath = "test_desintaion_file_0";
+    request.priority = TranscodingSessionPriority::kNormal;
+    EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+
+    // Should have own uid in client uid list.
+    EXPECT_TRUE(mClient1->getClientUids(SESSION(0), &clientUids).isOk());
+    EXPECT_NE(clientUids, std::nullopt);
+    EXPECT_EQ(clientUids->size(), 1);
+    EXPECT_EQ((*clientUids)[0], ownUid);
+
+    // Adding invalid client uid should fail.
+    EXPECT_TRUE(mClient1->addClientUid(SESSION(0), kInvalidClientUid, &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Adding own uid again should fail.
+    EXPECT_TRUE(mClient1->addClientUid(SESSION(0), ownUid, &result).isOk());
+    EXPECT_FALSE(result);
+
+    // Submit one offline session.
+    request.sourceFilePath = "test_source_file_1";
+    request.destinationFilePath = "test_desintaion_file_1";
+    request.priority = TranscodingSessionPriority::kUnspecified;
+    EXPECT_TRUE(mClient1->submitRequest(request, &session, &result).isOk());
+    EXPECT_TRUE(result);
+
+    // Should not have own uid in client uid list.
+    EXPECT_TRUE(mClient1->getClientUids(SESSION(1), &clientUids).isOk());
+    EXPECT_NE(clientUids, std::nullopt);
+    EXPECT_EQ(clientUids->size(), 0);
+
+    // Add own uid (with IMediaTranscodingService::USE_CALLING_UID) again, should succeed.
+    EXPECT_TRUE(
+            mClient1->addClientUid(SESSION(1), IMediaTranscodingService::USE_CALLING_UID, &result)
+                    .isOk());
+    EXPECT_TRUE(result);
+    EXPECT_TRUE(mClient1->getClientUids(SESSION(1), &clientUids).isOk());
+    EXPECT_NE(clientUids, std::nullopt);
+    EXPECT_EQ(clientUids->size(), 1);
+    EXPECT_EQ((*clientUids)[0], ownUid);
+
+    // Add more uids, should succeed.
+    int32_t kFakeUid = ::getuid() ^ 0x1;
+    EXPECT_TRUE(mClient1->addClientUid(SESSION(1), kFakeUid, &result).isOk());
+    EXPECT_TRUE(result);
+    EXPECT_TRUE(mClient1->getClientUids(SESSION(1), &clientUids).isOk());
+    EXPECT_NE(clientUids, std::nullopt);
+    std::unordered_set<uid_t> uidSet;
+    uidSet.insert(clientUids->begin(), clientUids->end());
+    EXPECT_EQ(uidSet.size(), 2);
+    EXPECT_EQ(uidSet.count(ownUid), 1);
+    EXPECT_EQ(uidSet.count(kFakeUid), 1);
+
+    unregisterMultipleClients();
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/tests/TranscodingLogger_tests.cpp b/media/libmediatranscoding/tests/TranscodingLogger_tests.cpp
new file mode 100644
index 0000000..39e5cd4
--- /dev/null
+++ b/media/libmediatranscoding/tests/TranscodingLogger_tests.cpp
@@ -0,0 +1,286 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for TranscodingLogger
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingLoggerTest"
+
+#include <android-base/logging.h>
+#include <gtest/gtest.h>
+#include <media/NdkCommon.h>
+#include <media/TranscodingLogger.h>
+#include <statslog_media.h>
+#include <utils/Log.h>
+
+#include <chrono>
+
+namespace android {
+
+using Reason = TranscodingLogger::SessionEndedReason;
+
+// Data structure corresponding to MediaTranscodingEnded atom.
+struct SessionEndedAtom {
+    SessionEndedAtom(int32_t atomCode, int32_t reason, int32_t callingUid, int32_t status,
+                     int32_t transcoderFps, int32_t srcWidth, int32_t srcHeight,
+                     char const* srcMime, int32_t srcProfile, int32_t srcLevel, int32_t srcFps,
+                     int32_t srcDurationMs, bool srcIsHdr, int32_t dstWidth, int32_t dstHeight,
+                     char const* dstMime, bool dstIsHdr)
+          : atomCode(atomCode),
+            reason(reason),
+            callingUid(callingUid),
+            status(status),
+            transcoderFps(transcoderFps),
+            srcWidth(srcWidth),
+            srcHeight(srcHeight),
+            srcMime(srcMime),
+            srcProfile(srcProfile),
+            srcLevel(srcLevel),
+            srcFps(srcFps),
+            srcDurationMs(srcDurationMs),
+            srcIsHdr(srcIsHdr),
+            dstWidth(dstWidth),
+            dstHeight(dstHeight),
+            dstMime(dstMime),
+            dstIsHdr(dstIsHdr) {}
+
+    int32_t atomCode;
+    int32_t reason;
+    int32_t callingUid;
+    int32_t status;
+    int32_t transcoderFps;
+    int32_t srcWidth;
+    int32_t srcHeight;
+    std::string srcMime;
+    int32_t srcProfile;
+    int32_t srcLevel;
+    int32_t srcFps;
+    int32_t srcDurationMs;
+    bool srcIsHdr;
+    int32_t dstWidth;
+    int32_t dstHeight;
+    std::string dstMime;
+    bool dstIsHdr;
+};
+
+// Default configuration values.
+static constexpr int32_t kDefaultCallingUid = 1;
+static constexpr std::chrono::microseconds kDefaultTranscodeDuration = std::chrono::seconds{2};
+
+static constexpr int32_t kDefaultSrcWidth = 1920;
+static constexpr int32_t kDefaultSrcHeight = 1080;
+static const std::string kDefaultSrcMime{AMEDIA_MIMETYPE_VIDEO_HEVC};
+static constexpr int32_t kDefaultSrcProfile = 1;    // HEVC Main
+static constexpr int32_t kDefaultSrcLevel = 65536;  // HEVCMainTierLevel51
+static constexpr int32_t kDefaultSrcFps = 30;
+static constexpr int32_t kDefaultSrcFrameCount = 120;
+static constexpr int64_t kDefaultSrcDurationUs = 1000000 * kDefaultSrcFrameCount / kDefaultSrcFps;
+
+static constexpr int32_t kDefaultDstWidth = 1280;
+static constexpr int32_t kDefaultDstHeight = 720;
+static const std::string kDefaultDstMime{AMEDIA_MIMETYPE_VIDEO_AVC};
+
+// Util for creating a default source video format.
+static AMediaFormat* CreateSrcFormat() {
+    AMediaFormat* fmt = AMediaFormat_new();
+    AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_WIDTH, kDefaultSrcWidth);
+    AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_HEIGHT, kDefaultSrcHeight);
+    AMediaFormat_setString(fmt, AMEDIAFORMAT_KEY_MIME, kDefaultSrcMime.c_str());
+    AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_PROFILE, kDefaultSrcProfile);
+    AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_LEVEL, kDefaultSrcLevel);
+    AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_FRAME_RATE, kDefaultSrcFps);
+    AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_FRAME_COUNT, kDefaultSrcFrameCount);
+    AMediaFormat_setInt64(fmt, AMEDIAFORMAT_KEY_DURATION, kDefaultSrcDurationUs);
+    return fmt;
+}
+
+// Util for creating a default destination video format.
+static AMediaFormat* CreateDstFormat() {
+    AMediaFormat* fmt = AMediaFormat_new();
+    AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_WIDTH, kDefaultDstWidth);
+    AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_HEIGHT, kDefaultDstHeight);
+    AMediaFormat_setString(fmt, AMEDIAFORMAT_KEY_MIME, kDefaultDstMime.c_str());
+    return fmt;
+}
+
+class TranscodingLoggerTest : public ::testing::Test {
+public:
+    TranscodingLoggerTest() { ALOGI("TranscodingLoggerTest created"); }
+
+    void SetUp() override {
+        ALOGI("TranscodingLoggerTest set up");
+        mLogger.reset(new TranscodingLogger());
+        mLoggedAtoms.clear();
+        mSrcFormat.reset();
+        mDstFormat.reset();
+
+        // Set a custom atom writer that saves all data, so the test can validate it afterwards.
+        mLogger->setSessionEndedAtomWriter(
+                [=](int32_t atomCode, int32_t reason, int32_t callingUid, int32_t status,
+                    int32_t transcoderFps, int32_t srcWidth, int32_t srcHeight, char const* srcMime,
+                    int32_t srcProfile, int32_t srcLevel, int32_t srcFps, int32_t srcDurationMs,
+                    bool srcIsHdr, int32_t dstWidth, int32_t dstHeight, char const* dstMime,
+                    bool dstIsHdr) -> int {
+                    mLoggedAtoms.emplace_back(atomCode, reason, callingUid, status, transcoderFps,
+                                              srcWidth, srcHeight, srcMime, srcProfile, srcLevel,
+                                              srcFps, srcDurationMs, srcIsHdr, dstWidth, dstHeight,
+                                              dstMime, dstIsHdr);
+                    return 0;
+                });
+    }
+
+    void logSession(const std::chrono::steady_clock::time_point& time, Reason reason, int status,
+                    AMediaFormat* srcFormat, AMediaFormat* dstFormat) {
+        mLogger->logSessionEnded(time, reason, kDefaultCallingUid, status,
+                                 kDefaultTranscodeDuration, srcFormat, dstFormat);
+    }
+
+    void logSession(const std::chrono::steady_clock::time_point& time, Reason reason, int status) {
+        if (!mSrcFormat) {
+            mSrcFormat = std::shared_ptr<AMediaFormat>(CreateSrcFormat(), &AMediaFormat_delete);
+        }
+        if (!mDstFormat) {
+            mDstFormat = std::shared_ptr<AMediaFormat>(CreateDstFormat(), &AMediaFormat_delete);
+        }
+        logSession(time, reason, status, mSrcFormat.get(), mDstFormat.get());
+    }
+
+    void logSessionFinished(const std::chrono::steady_clock::time_point& time) {
+        logSession(time, Reason::FINISHED, 0);
+    }
+
+    void logSessionFailed(const std::chrono::steady_clock::time_point& time) {
+        logSession(time, Reason::ERROR, AMEDIA_ERROR_UNKNOWN);
+    }
+
+    int logCount() const { return mLoggedAtoms.size(); }
+
+    void validateLatestAtom(Reason reason, int status, bool passthrough = false) {
+        const SessionEndedAtom& atom = mLoggedAtoms.back();
+
+        EXPECT_EQ(atom.atomCode, android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED);
+        EXPECT_EQ(atom.reason, static_cast<int>(reason));
+        EXPECT_EQ(atom.callingUid, kDefaultCallingUid);
+        EXPECT_EQ(atom.status, status);
+        EXPECT_EQ(atom.srcWidth, kDefaultSrcWidth);
+        EXPECT_EQ(atom.srcHeight, kDefaultSrcHeight);
+        EXPECT_EQ(atom.srcMime, kDefaultSrcMime);
+        EXPECT_EQ(atom.srcProfile, kDefaultSrcProfile);
+        EXPECT_EQ(atom.srcLevel, kDefaultSrcLevel);
+        EXPECT_EQ(atom.srcFps, kDefaultSrcFps);
+        EXPECT_EQ(atom.srcDurationMs, kDefaultSrcDurationUs / 1000);
+        EXPECT_FALSE(atom.srcIsHdr);
+        EXPECT_EQ(atom.dstWidth, passthrough ? kDefaultSrcWidth : kDefaultDstWidth);
+        EXPECT_EQ(atom.dstHeight, passthrough ? kDefaultSrcHeight : kDefaultDstHeight);
+        EXPECT_EQ(atom.dstMime, passthrough ? "passthrough" : kDefaultDstMime);
+        EXPECT_FALSE(atom.dstIsHdr);
+
+        // Transcoder frame rate is only present on successful sessions.
+        if (status == AMEDIA_OK) {
+            std::chrono::duration<double> seconds{kDefaultTranscodeDuration};
+            const int32_t transcoderFps =
+                    static_cast<int32_t>(kDefaultSrcFrameCount / seconds.count());
+            EXPECT_EQ(atom.transcoderFps, transcoderFps);
+        } else {
+            EXPECT_EQ(atom.transcoderFps, -1);
+        }
+    }
+
+    void TearDown() override { ALOGI("TranscodingLoggerTest tear down"); }
+    ~TranscodingLoggerTest() { ALOGD("TranscodingLoggerTest destroyed"); }
+
+    std::shared_ptr<TranscodingLogger> mLogger;
+    std::vector<SessionEndedAtom> mLoggedAtoms;
+
+    std::shared_ptr<AMediaFormat> mSrcFormat;
+    std::shared_ptr<AMediaFormat> mDstFormat;
+};
+
+TEST_F(TranscodingLoggerTest, TestDailyLogQuota) {
+    ALOGD("TestDailyLogQuota");
+    auto start = std::chrono::steady_clock::now();
+
+    EXPECT_LT(TranscodingLogger::kMaxSuccessfulAtomsPerDay, TranscodingLogger::kMaxAtomsPerDay);
+
+    // 1. Check that the first kMaxSuccessfulAtomsPerDay successful atoms are logged.
+    for (int i = 0; i < TranscodingLogger::kMaxSuccessfulAtomsPerDay; ++i) {
+        logSessionFinished(start + std::chrono::seconds{i});
+        EXPECT_EQ(logCount(), i + 1);
+    }
+
+    // 2. Check that subsequent successful atoms within the same 24h interval are not logged.
+    for (int i = 1; i < 24; ++i) {
+        logSessionFinished(start + std::chrono::hours{i});
+        EXPECT_EQ(logCount(), TranscodingLogger::kMaxSuccessfulAtomsPerDay);
+    }
+
+    // 3. Check that failed atoms are logged up to kMaxAtomsPerDay.
+    for (int i = TranscodingLogger::kMaxSuccessfulAtomsPerDay;
+         i < TranscodingLogger::kMaxAtomsPerDay; ++i) {
+        logSessionFailed(start + std::chrono::seconds{i});
+        EXPECT_EQ(logCount(), i + 1);
+    }
+
+    // 4. Check that subsequent failed atoms within the same 24h interval are not logged.
+    for (int i = 1; i < 24; ++i) {
+        logSessionFailed(start + std::chrono::hours{i});
+        EXPECT_EQ(logCount(), TranscodingLogger::kMaxAtomsPerDay);
+    }
+
+    // 5. Check that failed and successful atoms are logged again after 24h.
+    logSessionFinished(start + std::chrono::hours{24});
+    EXPECT_EQ(logCount(), TranscodingLogger::kMaxAtomsPerDay + 1);
+
+    logSessionFailed(start + std::chrono::hours{24} + std::chrono::seconds{1});
+    EXPECT_EQ(logCount(), TranscodingLogger::kMaxAtomsPerDay + 2);
+}
+
+TEST_F(TranscodingLoggerTest, TestNullFormats) {
+    ALOGD("TestNullFormats");
+    auto srcFormat = std::shared_ptr<AMediaFormat>(CreateSrcFormat(), &AMediaFormat_delete);
+    auto dstFormat = std::shared_ptr<AMediaFormat>(CreateDstFormat(), &AMediaFormat_delete);
+    auto now = std::chrono::steady_clock::now();
+
+    // Source format null, should not log.
+    logSession(now, Reason::FINISHED, AMEDIA_OK, nullptr /*srcFormat*/, dstFormat.get());
+    EXPECT_EQ(logCount(), 0);
+
+    // Both formats null, should not log.
+    logSession(now, Reason::FINISHED, AMEDIA_OK, nullptr /*srcFormat*/, nullptr /*dstFormat*/);
+    EXPECT_EQ(logCount(), 0);
+
+    // Destination format null (passthrough mode), should log.
+    logSession(now, Reason::FINISHED, AMEDIA_OK, srcFormat.get(), nullptr /*dstFormat*/);
+    EXPECT_EQ(logCount(), 1);
+    validateLatestAtom(Reason::FINISHED, AMEDIA_OK, true /*passthrough*/);
+}
+
+TEST_F(TranscodingLoggerTest, TestAtomContentCorrectness) {
+    ALOGD("TestAtomContentCorrectness");
+    auto now = std::chrono::steady_clock::now();
+
+    // Log and validate a failure.
+    logSession(now, Reason::ERROR, AMEDIA_ERROR_MALFORMED);
+    EXPECT_EQ(logCount(), 1);
+    validateLatestAtom(Reason::ERROR, AMEDIA_ERROR_MALFORMED);
+
+    // Log and validate a success.
+    logSession(now, Reason::FINISHED, AMEDIA_OK);
+    EXPECT_EQ(logCount(), 2);
+    validateLatestAtom(Reason::FINISHED, AMEDIA_OK);
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp b/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
new file mode 100644
index 0000000..ef9c4f8
--- /dev/null
+++ b/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
@@ -0,0 +1,1347 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for TranscodingSessionController
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingSessionControllerTest"
+
+#include <aidl/android/media/BnTranscodingClientCallback.h>
+#include <aidl/android/media/IMediaTranscodingService.h>
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <gtest/gtest.h>
+#include <media/TranscodingClientManager.h>
+#include <media/TranscodingSessionController.h>
+#include <utils/Log.h>
+
+#include <unordered_set>
+
+namespace android {
+
+using Status = ::ndk::ScopedAStatus;
+using aidl::android::media::BnTranscodingClientCallback;
+using aidl::android::media::IMediaTranscodingService;
+using aidl::android::media::ITranscodingClient;
+using aidl::android::media::TranscodingRequestParcel;
+
+constexpr ClientIdType kClientId = 1000;
+constexpr SessionIdType kClientSessionId = 0;
+constexpr uid_t kClientUid = 5000;
+constexpr pid_t kClientPid = 10000;
+constexpr uid_t kInvalidUid = (uid_t)-1;
+constexpr pid_t kInvalidPid = (pid_t)-1;
+
+#define CLIENT(n) (kClientId + (n))
+#define SESSION(n) (kClientSessionId + (n))
+#define UID(n) (kClientUid + (n))
+#define PID(n) (kClientPid + (n))
+
+class TestUidPolicy : public UidPolicyInterface {
+public:
+    TestUidPolicy() = default;
+    virtual ~TestUidPolicy() = default;
+
+    // UidPolicyInterface
+    void registerMonitorUid(uid_t /*uid*/) override {}
+    void unregisterMonitorUid(uid_t /*uid*/) override {}
+    bool isUidOnTop(uid_t uid) override { return mTopUids.count(uid) > 0; }
+    std::unordered_set<uid_t> getTopUids() const override { return mTopUids; }
+    void setCallback(const std::shared_ptr<UidPolicyCallbackInterface>& cb) override {
+        mUidPolicyCallback = cb;
+    }
+    void setTop(uid_t uid) {
+        std::unordered_set<uid_t> uids = {uid};
+        setTop(uids);
+    }
+    void setTop(const std::unordered_set<uid_t>& uids) {
+        mTopUids = uids;
+        auto uidPolicyCb = mUidPolicyCallback.lock();
+        if (uidPolicyCb != nullptr) {
+            uidPolicyCb->onTopUidsChanged(mTopUids);
+        }
+    }
+
+    std::unordered_set<uid_t> mTopUids;
+    std::weak_ptr<UidPolicyCallbackInterface> mUidPolicyCallback;
+};
+
+class TestResourcePolicy : public ResourcePolicyInterface {
+public:
+    TestResourcePolicy() { reset(); }
+    virtual ~TestResourcePolicy() = default;
+
+    // ResourcePolicyInterface
+    void setCallback(const std::shared_ptr<ResourcePolicyCallbackInterface>& /*cb*/) override {}
+    void setPidResourceLost(pid_t pid) override { mResourceLostPid = pid; }
+    // ~ResourcePolicyInterface
+
+    pid_t getPid() {
+        pid_t result = mResourceLostPid;
+        reset();
+        return result;
+    }
+
+private:
+    void reset() { mResourceLostPid = kInvalidPid; }
+    pid_t mResourceLostPid;
+};
+
+class TestThermalPolicy : public ThermalPolicyInterface {
+public:
+    TestThermalPolicy() = default;
+    virtual ~TestThermalPolicy() = default;
+
+    // ThermalPolicyInterface
+    void setCallback(const std::shared_ptr<ThermalPolicyCallbackInterface>& /*cb*/) override {}
+    bool getThrottlingStatus() { return false; }
+    // ~ThermalPolicyInterface
+
+private:
+};
+
+class TestTranscoder : public TranscoderInterface {
+public:
+    TestTranscoder() : mGeneration(0) {}
+    virtual ~TestTranscoder() {}
+
+    // TranscoderInterface
+    void start(ClientIdType clientId, SessionIdType sessionId,
+               const TranscodingRequestParcel& /*request*/, uid_t /*callingUid*/,
+               const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) override {
+        append(Start(clientId, sessionId));
+    }
+    void pause(ClientIdType clientId, SessionIdType sessionId) override {
+        append(Pause(clientId, sessionId));
+    }
+    void resume(ClientIdType clientId, SessionIdType sessionId,
+                const TranscodingRequestParcel& /*request*/, uid_t /*callingUid*/,
+                const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) override {
+        append(Resume(clientId, sessionId));
+    }
+    void stop(ClientIdType clientId, SessionIdType sessionId, bool abandon) override {
+        append(abandon ? Abandon(clientId, sessionId) : Stop(clientId, sessionId));
+    }
+
+    void onFinished(ClientIdType clientId, SessionIdType sessionId) {
+        append(Finished(clientId, sessionId));
+    }
+
+    void onFailed(ClientIdType clientId, SessionIdType sessionId, TranscodingErrorCode err) {
+        append(Failed(clientId, sessionId), err);
+    }
+
+    void onCreated() {
+        std::scoped_lock lock{mLock};
+        mGeneration++;
+    }
+
+    struct Event {
+        enum { NoEvent, Start, Pause, Resume, Stop, Finished, Failed, Abandon } type;
+        ClientIdType clientId;
+        SessionIdType sessionId;
+    };
+
+    static constexpr Event NoEvent = {Event::NoEvent, 0, 0};
+
+#define DECLARE_EVENT(action)                                             \
+    static Event action(ClientIdType clientId, SessionIdType sessionId) { \
+        return {Event::action, clientId, sessionId};                      \
+    }
+
+    DECLARE_EVENT(Start);
+    DECLARE_EVENT(Pause);
+    DECLARE_EVENT(Resume);
+    DECLARE_EVENT(Stop);
+    DECLARE_EVENT(Finished);
+    DECLARE_EVENT(Failed);
+    DECLARE_EVENT(Abandon);
+
+    // Push 1 event to back.
+    void append(const Event& event,
+                const TranscodingErrorCode err = TranscodingErrorCode::kNoError) {
+        std::unique_lock lock(mLock);
+
+        mEventQueue.push_back(event);
+        // Error is sticky, non-error event will not erase it, only getLastError()
+        // clears last error.
+        if (err != TranscodingErrorCode::kNoError) {
+            mLastErrorQueue.push_back(err);
+        }
+        mCondition.notify_one();
+    }
+
+    // Pop 1 event from front, wait for up to timeoutUs if empty.
+    const Event& popEvent(int64_t timeoutUs = 0) {
+        std::unique_lock lock(mLock);
+
+        if (mEventQueue.empty() && timeoutUs > 0) {
+            mCondition.wait_for(lock, std::chrono::microseconds(timeoutUs));
+        }
+
+        if (mEventQueue.empty()) {
+            mPoppedEvent = NoEvent;
+        } else {
+            mPoppedEvent = *mEventQueue.begin();
+            mEventQueue.pop_front();
+        }
+
+        return mPoppedEvent;
+    }
+
+    TranscodingErrorCode getLastError() {
+        std::scoped_lock lock{mLock};
+        if (mLastErrorQueue.empty()) {
+            return TranscodingErrorCode::kNoError;
+        }
+        TranscodingErrorCode err = mLastErrorQueue.front();
+        mLastErrorQueue.pop_front();
+        return err;
+    }
+
+    int32_t getGeneration() {
+        std::scoped_lock lock{mLock};
+        return mGeneration;
+    }
+
+private:
+    std::mutex mLock;
+    std::condition_variable mCondition;
+    Event mPoppedEvent;
+    std::list<Event> mEventQueue;
+    std::list<TranscodingErrorCode> mLastErrorQueue;
+    int32_t mGeneration;
+};
+
+bool operator==(const TestTranscoder::Event& lhs, const TestTranscoder::Event& rhs) {
+    return lhs.type == rhs.type && lhs.clientId == rhs.clientId && lhs.sessionId == rhs.sessionId;
+}
+
+struct TestClientCallback : public BnTranscodingClientCallback {
+    TestClientCallback(TestTranscoder* owner, ClientIdType clientId, uid_t clientUid)
+          : mOwner(owner), mClientId(clientId), mClientUid(clientUid) {
+        ALOGD("TestClient Created");
+    }
+
+    ClientIdType clientId() const { return mClientId; }
+    uid_t clientUid() const { return mClientUid; }
+
+    Status openFileDescriptor(const std::string& /*in_fileUri*/, const std::string& /*in_mode*/,
+                              ::ndk::ScopedFileDescriptor* /*_aidl_return*/) override {
+        return Status::ok();
+    }
+
+    Status onTranscodingStarted(int32_t /*in_sessionId*/) override { return Status::ok(); }
+
+    Status onTranscodingPaused(int32_t /*in_sessionId*/) override { return Status::ok(); }
+
+    Status onTranscodingResumed(int32_t /*in_sessionId*/) override { return Status::ok(); }
+
+    Status onTranscodingFinished(int32_t in_sessionId,
+                                 const TranscodingResultParcel& in_result) override {
+        EXPECT_EQ(in_sessionId, in_result.sessionId);
+        ALOGD("TestClientCallback: received onTranscodingFinished");
+        mOwner->onFinished(mClientId, in_sessionId);
+        return Status::ok();
+    }
+
+    Status onTranscodingFailed(int32_t in_sessionId, TranscodingErrorCode in_errorCode) override {
+        mOwner->onFailed(mClientId, in_sessionId, in_errorCode);
+        return Status::ok();
+    }
+
+    Status onAwaitNumberOfSessionsChanged(int32_t /* in_sessionId */,
+                                          int32_t /* in_oldAwaitNumber */,
+                                          int32_t /* in_newAwaitNumber */) override {
+        return Status::ok();
+    }
+
+    Status onProgressUpdate(int32_t /* in_sessionId */, int32_t /* in_progress */) override {
+        return Status::ok();
+    }
+
+    virtual ~TestClientCallback() { ALOGI("TestClient destroyed"); };
+
+private:
+    TestTranscoder* mOwner;
+    ClientIdType mClientId;
+    uid_t mClientUid;
+    TestClientCallback(const TestClientCallback&) = delete;
+    TestClientCallback& operator=(const TestClientCallback&) = delete;
+};
+
+class TranscodingSessionControllerTest : public ::testing::Test {
+public:
+    TranscodingSessionControllerTest() { ALOGI("TranscodingSessionControllerTest created"); }
+    ~TranscodingSessionControllerTest() { ALOGD("TranscodingSessionControllerTest destroyed"); }
+
+    void SetUp() override {
+        ALOGI("TranscodingSessionControllerTest set up");
+        mTranscoder.reset(new TestTranscoder());
+        mUidPolicy.reset(new TestUidPolicy());
+        mResourcePolicy.reset(new TestResourcePolicy());
+        mThermalPolicy.reset(new TestThermalPolicy());
+        // Overrid default burst params with shorter values for testing.
+        TranscodingSessionController::ControllerConfig config = {
+                .pacerBurstThresholdMs = 500,
+                .pacerBurstCountQuota = 10,
+                .pacerBurstTimeQuotaSeconds = 3,
+        };
+        mController.reset(new TranscodingSessionController(
+                [this](const std::shared_ptr<TranscoderCallbackInterface>& /*cb*/) {
+                    // Here we require that the SessionController clears out all its refcounts of
+                    // the transcoder object when it calls create.
+                    EXPECT_EQ(mTranscoder.use_count(), 1);
+                    mTranscoder->onCreated();
+                    return mTranscoder;
+                },
+                mUidPolicy, mResourcePolicy, mThermalPolicy, &config));
+        mUidPolicy->setCallback(mController);
+
+        // Set priority only, ignore other fields for now.
+        mOfflineRequest.priority = TranscodingSessionPriority::kUnspecified;
+        mRealtimeRequest.priority = TranscodingSessionPriority::kHigh;
+        mClientCallback0 = ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(),
+                                                                          CLIENT(0), UID(0));
+        mClientCallback1 = ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(),
+                                                                          CLIENT(1), UID(1));
+        mClientCallback2 = ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(),
+                                                                          CLIENT(2), UID(2));
+        mClientCallback3 = ::ndk::SharedRefBase::make<TestClientCallback>(mTranscoder.get(),
+                                                                          CLIENT(3), UID(3));
+    }
+
+    void TearDown() override { ALOGI("TranscodingSessionControllerTest tear down"); }
+
+    void expectTimeout(int64_t clientId, int32_t sessionId, int32_t generation) {
+        EXPECT_EQ(mTranscoder->popEvent(2900000), TestTranscoder::NoEvent);
+        EXPECT_EQ(mTranscoder->popEvent(200000), TestTranscoder::Abandon(clientId, sessionId));
+        EXPECT_EQ(mTranscoder->popEvent(100000), TestTranscoder::Failed(clientId, sessionId));
+        EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kWatchdogTimeout);
+        // Should have created new transcoder.
+        EXPECT_EQ(mTranscoder->getGeneration(), generation);
+        EXPECT_EQ(mTranscoder.use_count(), 2);
+    }
+
+    void testPacerHelper(int numSubmits, int sessionDurationMs, int expectedSuccess) {
+        testPacerHelper(numSubmits, sessionDurationMs, expectedSuccess, mClientCallback0, {},
+                        false /*pauseLastSuccessSession*/, true /*useRealCallingUid*/);
+    }
+
+    void testPacerHelperWithPause(int numSubmits, int sessionDurationMs, int expectedSuccess) {
+        testPacerHelper(numSubmits, sessionDurationMs, expectedSuccess, mClientCallback0, {},
+                        true /*pauseLastSuccessSession*/, true /*useRealCallingUid*/);
+    }
+
+    void testPacerHelperWithMultipleUids(int numSubmits, int sessionDurationMs, int expectedSuccess,
+                                         const std::shared_ptr<TestClientCallback>& client,
+                                         const std::vector<int>& additionalClientUids) {
+        testPacerHelper(numSubmits, sessionDurationMs, expectedSuccess, client,
+                        additionalClientUids, false /*pauseLastSuccessSession*/,
+                        true /*useRealCallingUid*/);
+    }
+
+    void testPacerHelperWithSelfUid(int numSubmits, int sessionDurationMs, int expectedSuccess) {
+        testPacerHelper(numSubmits, sessionDurationMs, expectedSuccess, mClientCallback0, {},
+                        false /*pauseLastSuccessSession*/, false /*useRealCallingUid*/);
+    }
+
+    void testPacerHelper(int numSubmits, int sessionDurationMs, int expectedSuccess,
+                         const std::shared_ptr<TestClientCallback>& client,
+                         const std::vector<int>& additionalClientUids, bool pauseLastSuccessSession,
+                         bool useRealCallingUid) {
+        uid_t callingUid = useRealCallingUid ? ::getuid() : client->clientUid();
+        for (int i = 0; i < numSubmits; i++) {
+            mController->submit(client->clientId(), SESSION(i), callingUid, client->clientUid(),
+                                mRealtimeRequest, client);
+            for (int additionalUid : additionalClientUids) {
+                mController->addClientUid(client->clientId(), SESSION(i), additionalUid);
+            }
+        }
+        for (int i = 0; i < expectedSuccess; i++) {
+            EXPECT_EQ(mTranscoder->popEvent(),
+                      TestTranscoder::Start(client->clientId(), SESSION(i)));
+            if ((i == expectedSuccess - 1) && pauseLastSuccessSession) {
+                // Insert a pause of 3 sec to the last success running session
+                mController->onThrottlingStarted();
+                EXPECT_EQ(mTranscoder->popEvent(),
+                          TestTranscoder::Pause(client->clientId(), SESSION(i)));
+                sleep(3);
+                mController->onThrottlingStopped();
+                EXPECT_EQ(mTranscoder->popEvent(),
+                          TestTranscoder::Resume(client->clientId(), SESSION(i)));
+            }
+            usleep(sessionDurationMs * 1000);
+            // Test half of Finish and half of Error, both should be counted as burst runs.
+            if (i & 1) {
+                mController->onFinish(client->clientId(), SESSION(i));
+                EXPECT_EQ(mTranscoder->popEvent(),
+                          TestTranscoder::Finished(client->clientId(), SESSION(i)));
+            } else {
+                mController->onError(client->clientId(), SESSION(i),
+                                     TranscodingErrorCode::kUnknown);
+                EXPECT_EQ(mTranscoder->popEvent(100000),
+                          TestTranscoder::Failed(client->clientId(), SESSION(i)));
+                EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kUnknown);
+            }
+        }
+        for (int i = expectedSuccess; i < numSubmits; i++) {
+            EXPECT_EQ(mTranscoder->popEvent(),
+                      TestTranscoder::Failed(client->clientId(), SESSION(i)));
+            EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kDroppedByService);
+        }
+    }
+
+    std::shared_ptr<TestTranscoder> mTranscoder;
+    std::shared_ptr<TestUidPolicy> mUidPolicy;
+    std::shared_ptr<TestResourcePolicy> mResourcePolicy;
+    std::shared_ptr<TestThermalPolicy> mThermalPolicy;
+    std::shared_ptr<TranscodingSessionController> mController;
+    TranscodingRequestParcel mOfflineRequest;
+    TranscodingRequestParcel mRealtimeRequest;
+    std::shared_ptr<TestClientCallback> mClientCallback0;
+    std::shared_ptr<TestClientCallback> mClientCallback1;
+    std::shared_ptr<TestClientCallback> mClientCallback2;
+    std::shared_ptr<TestClientCallback> mClientCallback3;
+};
+
+TEST_F(TranscodingSessionControllerTest, TestSubmitSession) {
+    ALOGD("TestSubmitSession");
+
+    // Start with UID(1) on top.
+    mUidPolicy->setTop(UID(1));
+
+    // Submit offline session to CLIENT(0) in UID(0).
+    // Should start immediately (because this is the only session).
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), 0));
+
+    // Submit real-time session to CLIENT(0).
+    // Should pause offline session and start new session,  even if UID(0) is not on top.
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+
+    // Submit real-time session to CLIENT(0), should be queued after the previous session.
+    mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit real-time session to CLIENT(1) in same uid, should be queued after the previous
+    // session.
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mRealtimeRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit real-time session to CLIENT(2) in UID(1).
+    // Should pause previous session and start new session, because UID(1) is (has been) top.
+    mController->submit(CLIENT(2), SESSION(0), UID(2), UID(1), mRealtimeRequest, mClientCallback2);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
+
+    // Submit offline session, shouldn't generate any event.
+    mController->submit(CLIENT(2), SESSION(1), UID(2), UID(1), mOfflineRequest, mClientCallback2);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Bring UID(0) to top.
+    mUidPolicy->setTop(UID(0));
+    // Should pause current session, and resume last session in UID(0).
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(1)));
+}
+
+TEST_F(TranscodingSessionControllerTest, TestCancelSession) {
+    ALOGD("TestCancelSession");
+
+    // Submit real-time session SESSION(0), should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit real-time session SESSION(1), should not start.
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit offline session SESSION(2), should not start.
+    mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Cancel queued real-time session.
+    // Cancel real-time session SESSION(1), should be cancelled.
+    EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(1)));
+
+    // Cancel queued offline session.
+    // Cancel offline session SESSION(2), should be cancelled.
+    EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(2)));
+
+    // Submit offline session SESSION(3), shouldn't cause any event.
+    mController->submit(CLIENT(0), SESSION(3), UID(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Cancel running real-time session SESSION(0).
+    // - Should be stopped first then cancelled.
+    // - Should also start offline session SESSION(2) because real-time queue is empty.
+    EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(3)));
+
+    // Submit real-time session SESSION(4), offline SESSION(3) should pause and SESSION(4)
+    // should start.
+    mController->submit(CLIENT(0), SESSION(4), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(3)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(4)));
+
+    // Cancel paused SESSION(3). SESSION(3) should be stopped.
+    EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(3)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(3)));
+}
+
+TEST_F(TranscodingSessionControllerTest, TestCancelSessionWithMultipleUids) {
+    ALOGD("TestCancelSessionWithMultipleUids");
+    std::vector<int32_t> clientUids;
+
+    // Submit real-time session SESSION(0), should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit real-time session SESSION(1), should not start.
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit offline session SESSION(2), should not start.
+    mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // UID(1) moves to top.
+    mUidPolicy->setTop(UID(1));
+
+    // Add UID(1) to the offline SESSION(2), SESSION(2) should start and SESSION(0) should pause.
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(2), UID(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
+
+    // Add UID(1) to SESSION(1) as well.
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(1), UID(1)));
+
+    // Cancel SESSION(2), should be cancelled and SESSION(1) should start.
+    EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(2)));
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(2), &clientUids));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(2)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+
+    // Cancel SESSION(1), should be cancelled and SESSION(0) should resume.
+    EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(1)));
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(1), &clientUids));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+}
+
+TEST_F(TranscodingSessionControllerTest, TestCancelAllSessionsForClient) {
+    // Submit real-time session SESSION(0), should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit real-time session SESSION(1), should not start.
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit offline session SESSION(2), should not start.
+    mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    std::vector<int32_t> clientUids;
+    // Make some more uids blocked on the sessions.
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(0), UID(1)));
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(1), UID(1)));
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(2), UID(1)));
+    EXPECT_TRUE(mController->getClientUids(CLIENT(0), SESSION(0), &clientUids));
+    EXPECT_EQ(clientUids.size(), 2);
+    EXPECT_TRUE(mController->getClientUids(CLIENT(0), SESSION(1), &clientUids));
+    EXPECT_EQ(clientUids.size(), 2);
+    EXPECT_TRUE(mController->getClientUids(CLIENT(0), SESSION(2), &clientUids));
+    EXPECT_EQ(clientUids.size(), 1);
+
+    // Cancel all sessions for CLIENT(0) with -1.
+    // Expect SESSION(0) and SESSION(1) to be gone.
+    // Expect SESSION(2) still there with empty client uid list (only kept for offline) and start.
+    EXPECT_TRUE(mController->cancel(CLIENT(0), -1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(0)));
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(0), &clientUids));
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(1), &clientUids));
+    EXPECT_TRUE(mController->getClientUids(CLIENT(0), SESSION(2), &clientUids));
+    EXPECT_EQ(clientUids.size(), 0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
+}
+
+TEST_F(TranscodingSessionControllerTest, TestFinishSession) {
+    ALOGD("TestFinishSession");
+
+    // Start with unspecified top UID.
+    // Finish without any sessions submitted, should be ignored.
+    mController->onFinish(CLIENT(0), SESSION(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit offline session SESSION(0), should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit real-time session SESSION(1), should pause offline session and start immediately.
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+
+    // Submit real-time session SESSION(2), should not start.
+    mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Finish when the session never started, should be ignored.
+    mController->onFinish(CLIENT(0), SESSION(2));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // UID(1) moves to top.
+    mUidPolicy->setTop(UID(1));
+    // Submit real-time session to CLIENT(1) in UID(1), should pause previous session and start
+    // new session.
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(1), mRealtimeRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
+
+    // Simulate Finish that arrived late, after pause issued by controller.
+    // Should still be propagated to client, but shouldn't trigger any new start.
+    mController->onFinish(CLIENT(0), SESSION(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(1)));
+
+    // Finish running real-time session, should start next real-time session in queue.
+    mController->onFinish(CLIENT(1), SESSION(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(1), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
+
+    // Finish running real-time session, should resume next session (offline session) in queue.
+    mController->onFinish(CLIENT(0), SESSION(2));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(2)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+
+    // Finish running offline session.
+    mController->onFinish(CLIENT(0), SESSION(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(0)));
+
+    // Duplicate finish for last session, should be ignored.
+    mController->onFinish(CLIENT(0), SESSION(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestFinishSessionWithMultipleUids) {
+    ALOGD("TestFinishSessionWithMultipleUids");
+    std::vector<int32_t> clientUids;
+
+    // Start with unspecified top uid.
+    // Submit real-time session SESSION(0), should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit real-time session SESSION(1), should not start.
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(1), UID(1)));
+
+    // Submit real-time session SESSION(2), should not start.
+    mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(2), UID(1)));
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(2), UID(2)));
+
+    // UID(1) moves to top.
+    mUidPolicy->setTop(UID(1));
+    // SESSION(0) should pause, SESSION(1) should start.
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+
+    // Finish SESSION(1), SESSION(2) (next in line for UID(1)) should start.
+    mController->onFinish(CLIENT(0), SESSION(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(1), &clientUids));
+
+    // Finish SESSION(2), SESSION(0) should resume.
+    mController->onFinish(CLIENT(0), SESSION(2));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(2)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(2), &clientUids));
+}
+
+TEST_F(TranscodingSessionControllerTest, TestFailSession) {
+    ALOGD("TestFailSession");
+
+    // Start with unspecified top UID.
+    // Fail without any sessions submitted, should be ignored.
+    mController->onError(CLIENT(0), SESSION(0), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit offline session SESSION(0), should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit real-time session SESSION(1), should pause offline session and start immediately.
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+
+    // Submit real-time session SESSION(2), should not start.
+    mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Fail when the session never started, should be ignored.
+    mController->onError(CLIENT(0), SESSION(2), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // UID(1) moves to top.
+    mUidPolicy->setTop(UID(1));
+    // Submit real-time session to CLIENT(1) in UID(1), should pause previous session and start
+    // new session.
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(1), mRealtimeRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
+
+    // Simulate Fail that arrived late, after pause issued by controller.
+    // Should still be propagated to client, but shouldn't trigger any new start.
+    mController->onError(CLIENT(0), SESSION(1), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(1)));
+    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kUnknown);
+
+    // Fail running real-time session, should start next real-time session in queue.
+    mController->onError(CLIENT(1), SESSION(0), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(1), SESSION(0)));
+    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
+
+    // Fail running real-time session, should resume next session (offline session) in queue.
+    mController->onError(CLIENT(0), SESSION(2), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(2)));
+    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+
+    // Fail running offline session, and test error code propagation.
+    mController->onError(CLIENT(0), SESSION(0), TranscodingErrorCode::kInvalidOperation);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kInvalidOperation);
+
+    // Duplicate fail for last session, should be ignored.
+    mController->onError(CLIENT(0), SESSION(0), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestFailSessionWithMultipleUids) {
+    ALOGD("TestFailSessionWithMultipleUids");
+    std::vector<int32_t> clientUids;
+
+    // Start with unspecified top uid.
+    // Submit real-time session SESSION(0), should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit real-time session SESSION(1), should not start.
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(1), UID(1)));
+
+    // Submit real-time session SESSION(2), should not start.
+    mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // UID(1) moves to top.
+    mUidPolicy->setTop(UID(1));
+    // SESSION(0) should pause, SESSION(1) should start.
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+
+    // Add UID(1) and UID(2) to SESSION(2).
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(2), UID(1)));
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(2), UID(2)));
+
+    // Fail SESSION(1), SESSION(2) (next in line for UID(1)) should start.
+    mController->onError(CLIENT(0), SESSION(1), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(1)));
+    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kUnknown);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(1), &clientUids));
+
+    // Fail SESSION(2), SESSION(0) should resume.
+    mController->onError(CLIENT(0), SESSION(2), TranscodingErrorCode::kInvalidOperation);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(2)));
+    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kInvalidOperation);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(2), &clientUids));
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTopUidChanged) {
+    ALOGD("TestTopUidChanged");
+
+    // Start with unspecified top UID.
+    // Submit real-time session to CLIENT(0), session should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit offline session to CLIENT(0), should not start.
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mOfflineRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Move UID(1) to top.
+    mUidPolicy->setTop(UID(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit real-time session to CLIENT(2) in different uid UID(1).
+    // Should pause previous session and start new session.
+    mController->submit(CLIENT(2), SESSION(0), UID(2), UID(1), mRealtimeRequest, mClientCallback2);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
+
+    // Bring UID(0) back to top.
+    mUidPolicy->setTop(UID(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+
+    // Bring invalid uid to top.
+    mUidPolicy->setTop(kInvalidUid);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Finish session, next real-time session should resume.
+    mController->onFinish(CLIENT(0), SESSION(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), SESSION(0)));
+
+    // Finish session, offline session should start.
+    mController->onFinish(CLIENT(2), SESSION(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(2), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTopUidChangedMultipleUids) {
+    ALOGD("TestTopUidChangedMultipleUids");
+
+    // Start with unspecified top UID.
+    // Submit real-time session to CLIENT(0), session should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit offline session to CLIENT(0), should not start.
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mOfflineRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Bring UID(1) to top.
+    mUidPolicy->setTop(UID(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Add UID(1) to SESSION(0), SESSION(0) should continue to run
+    // (no pause&resume of the same session).
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(0), UID(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Bring UID(0) back to top, SESSION(0) should continue to run
+    // (no pause&resume of the same session).
+    mUidPolicy->setTop(UID(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Bring UID(2) to top.
+    mUidPolicy->setTop(UID(2));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    // Add UID(2) to the offline session, it should be started.
+    EXPECT_TRUE(mController->addClientUid(CLIENT(1), SESSION(0), UID(2)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
+
+    // ADD UID(3) to SESSION(0).
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(0), UID(3)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    // Bring UID(3) to top, SESSION(0) should resume.
+    mUidPolicy->setTop(UID(3));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(1), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+
+    // Now make UID(2) also blocked on CLIENT(0), SESSION(0).
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(0), UID(2)));
+
+    // Bring UID(2) back to top, CLIENT(0), SESSION(0) should continue to run (even if it's
+    // added to UID(2)'s queue later than CLIENT(1)'s SESSION(0)).
+    mUidPolicy->setTop(UID(2));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTopUidSetChanged) {
+    ALOGD("TestTopUidSetChanged");
+
+    // Start with unspecified top UID.
+    // Submit real-time session to CLIENT(0), session should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit offline session to CLIENT(0), should not start.
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mOfflineRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Set UID(0), UID(1) to top set.
+    // UID(0) should continue to run.
+    mUidPolicy->setTop({UID(0), UID(1)});
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit real-time session to CLIENT(2) in different uid UID(1).
+    // UID(0) should pause and UID(1) should start.
+    mController->submit(CLIENT(2), SESSION(0), UID(2), UID(1), mRealtimeRequest, mClientCallback2);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
+
+    // Remove UID(0) from top set, and only leave UID(1) in the set.
+    // UID(1) should continue to run.
+    mUidPolicy->setTop(UID(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Set UID(0), UID(2) to top set.
+    // UID(1) should continue to run.
+    mUidPolicy->setTop({UID(1), UID(2)});
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Bring UID(0) back to top.
+    mUidPolicy->setTop(UID(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+
+    // Bring invalid uid to top.
+    mUidPolicy->setTop(kInvalidUid);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Finish session, next real-time session from UID(1) should resume, even if UID(1)
+    // no longer top.
+    mController->onFinish(CLIENT(0), SESSION(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), SESSION(0)));
+
+    // Finish session, offline session should start.
+    mController->onFinish(CLIENT(2), SESSION(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(2), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
+}
+
+TEST_F(TranscodingSessionControllerTest, TestUidGone) {
+    ALOGD("TestUidGone");
+
+    mUidPolicy->setTop(UID(0));
+    // Start with unspecified top UID.
+    // Submit real-time sessions to CLIENT(0), session should start immediately.
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(1), UID(1)));
+
+    // Submit real-time session to CLIENT(1), should not start.
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(1), mOfflineRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    EXPECT_TRUE(mController->addClientUid(CLIENT(1), SESSION(0), UID(1)));
+
+    // Tell the controller that UID(0) is gone.
+    mUidPolicy->setTop(UID(1));
+    // CLIENT(0)'s SESSION(1) should start, SESSION(0) should be cancelled.
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+    mController->onUidGone(UID(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kUidGoneCancelled);
+
+    std::vector<int32_t> clientUids;
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(0), &clientUids));
+    EXPECT_TRUE(mController->getClientUids(CLIENT(0), SESSION(1), &clientUids));
+    EXPECT_EQ(clientUids.size(), 1);
+    EXPECT_EQ(clientUids[0], UID(1));
+
+    // Tell the controller that UID(1) is gone too.
+    mController->onUidGone(UID(1));
+    // CLIENT(1)'s SESSION(0) should start, CLIENT(0)'s SESSION(1) should be cancelled.
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(1)));
+    EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kUidGoneCancelled);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
+    // CLIENT(1) SESSION(0) should not have any client uids as it's only kept for offline.
+    EXPECT_TRUE(mController->getClientUids(CLIENT(1), SESSION(0), &clientUids));
+    EXPECT_EQ(clientUids.size(), 0);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestAddGetClientUids) {
+    ALOGD("TestAddGetClientUids");
+
+    // Add/get client uids with non-existent session, should fail.
+    std::vector<int32_t> clientUids;
+    uid_t ownUid = ::getuid();
+    EXPECT_FALSE(mController->addClientUid(CLIENT(0), SESSION(0), ownUid));
+    EXPECT_FALSE(mController->getClientUids(CLIENT(0), SESSION(0), &clientUids));
+
+    // Submit a real-time request.
+    EXPECT_TRUE(mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest,
+                                    mClientCallback0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Should have own uid in client uids.
+    EXPECT_TRUE(mController->getClientUids(CLIENT(0), SESSION(0), &clientUids));
+    EXPECT_EQ(clientUids.size(), 1);
+    EXPECT_EQ(clientUids[0], UID(0));
+
+    // Add UID(0) again should fail.
+    EXPECT_FALSE(mController->addClientUid(CLIENT(0), SESSION(0), UID(0)));
+
+    // Add own uid should succeed.
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(0), ownUid));
+    EXPECT_TRUE(mController->getClientUids(CLIENT(0), SESSION(0), &clientUids));
+    std::unordered_set<uid_t> uidSet;
+    uidSet.insert(clientUids.begin(), clientUids.end());
+    EXPECT_EQ(uidSet.size(), 2);
+    EXPECT_EQ(uidSet.count(UID(0)), 1);
+    EXPECT_EQ(uidSet.count(ownUid), 1);
+
+    // Submit an offline request.
+    EXPECT_TRUE(mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mOfflineRequest,
+                                    mClientCallback0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Should not have own uid in client uids.
+    EXPECT_TRUE(mController->getClientUids(CLIENT(0), SESSION(1), &clientUids));
+    EXPECT_EQ(clientUids.size(), 0);
+
+    // Move UID(1) to top.
+    mUidPolicy->setTop(UID(1));
+    // Add UID(1) to offline session, offline session should start and SESSION(0) should pause.
+    EXPECT_TRUE(mController->addClientUid(CLIENT(0), SESSION(1), UID(1)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+}
+
+/* Test resource lost without thermal throttling */
+TEST_F(TranscodingSessionControllerTest, TestResourceLost) {
+    ALOGD("TestResourceLost");
+
+    // Start with unspecified top UID.
+    // Submit real-time session to CLIENT(0), session should start immediately.
+    mRealtimeRequest.clientPid = PID(0);
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit offline session to CLIENT(0), should not start.
+    mOfflineRequest.clientPid = PID(0);
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mOfflineRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Move UID(1) to top.
+    mUidPolicy->setTop(UID(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit real-time session to CLIENT(2) in different uid UID(1).
+    // Should pause previous session and start new session.
+    mRealtimeRequest.clientPid = PID(1);
+    mController->submit(CLIENT(2), SESSION(0), UID(2), UID(1), mRealtimeRequest, mClientCallback2);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
+
+    // Test 0: No call into ResourcePolicy if resource lost is from a non-running
+    // or non-existent session.
+    mController->onResourceLost(CLIENT(0), SESSION(0));
+    EXPECT_EQ(mResourcePolicy->getPid(), kInvalidPid);
+    mController->onResourceLost(CLIENT(3), SESSION(0));
+    EXPECT_EQ(mResourcePolicy->getPid(), kInvalidPid);
+
+    // Test 1: No queue change during resource loss.
+    // Signal resource lost.
+    mController->onResourceLost(CLIENT(2), SESSION(0));
+    EXPECT_EQ(mResourcePolicy->getPid(), PID(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Signal resource available, CLIENT(2) should resume.
+    mController->onResourceAvailable();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), SESSION(0)));
+
+    // Test 2: Change of queue order during resource loss.
+    // Signal resource lost.
+    mController->onResourceLost(CLIENT(2), SESSION(0));
+    EXPECT_EQ(mResourcePolicy->getPid(), PID(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Move UID(0) back to top, should have no resume due to no resource.
+    mUidPolicy->setTop(UID(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Signal resource available, CLIENT(0) should resume.
+    mController->onResourceAvailable();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+
+    // Test 3:
+    mController->onResourceLost(CLIENT(0), SESSION(0));
+    EXPECT_EQ(mResourcePolicy->getPid(), PID(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    // Cancel the paused top session during resource lost.
+    EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(0)));
+    // Signal resource available, CLIENT(2)'s session should start.
+    mController->onResourceAvailable();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), SESSION(0)));
+
+    // Test 4: Adding new queue during resource loss.
+    // Signal resource lost.
+    mController->onResourceLost(CLIENT(2), SESSION(0));
+    EXPECT_EQ(mResourcePolicy->getPid(), PID(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Move UID(2) to top.
+    mUidPolicy->setTop(UID(2));
+
+    // Submit real-time session to CLIENT(3) in UID(2), session shouldn't start due to no resource.
+    mRealtimeRequest.clientPid = PID(2);
+    mController->submit(CLIENT(3), SESSION(0), UID(3), UID(2), mRealtimeRequest, mClientCallback3);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Signal resource available, CLIENT(3)'s session should start.
+    mController->onResourceAvailable();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(3), SESSION(0)));
+}
+
+/* Test thermal throttling without resource lost */
+TEST_F(TranscodingSessionControllerTest, TestThermalCallback) {
+    ALOGD("TestThermalCallback");
+
+    // Start with unspecified top UID.
+    // Submit real-time session to CLIENT(0), session should start immediately.
+    mRealtimeRequest.clientPid = PID(0);
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit offline session to CLIENT(0), should not start.
+    mOfflineRequest.clientPid = PID(0);
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mOfflineRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Move UID(1) to top.
+    mUidPolicy->setTop(UID(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit real-time session to CLIENT(2) in different uid UID(1).
+    // Should pause previous session and start new session.
+    mRealtimeRequest.clientPid = PID(1);
+    mController->submit(CLIENT(2), SESSION(0), UID(2), UID(1), mRealtimeRequest, mClientCallback2);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
+
+    // Test 0: Basic case, no queue change during throttling, top session should pause/resume
+    // with throttling.
+    mController->onThrottlingStarted();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), SESSION(0)));
+    mController->onThrottlingStopped();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), SESSION(0)));
+
+    // Test 1: Change of queue order during thermal throttling, when throttling stops,
+    // new top session should resume.
+    mController->onThrottlingStarted();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), SESSION(0)));
+    mUidPolicy->setTop(UID(0));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    mController->onThrottlingStopped();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
+
+    // Test 2: Cancel session during throttling, when throttling stops, new top
+    // session should resume.
+    mController->onThrottlingStarted();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    // Cancel the paused top session during throttling.
+    EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Stop(CLIENT(0), SESSION(0)));
+    // Throttling stops, CLIENT(2)'s session should start.
+    mController->onThrottlingStopped();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), SESSION(0)));
+
+    // Test 3: Add new queue during throttling, when throttling stops, new top
+    // session should resume.
+    mController->onThrottlingStarted();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), SESSION(0)));
+    // Move UID(2) to top.
+    mUidPolicy->setTop(UID(2));
+    // Submit real-time session to CLIENT(3) in UID(2), session shouldn't start during throttling.
+    mRealtimeRequest.clientPid = PID(2);
+    mController->submit(CLIENT(3), SESSION(0), UID(3), UID(2), mRealtimeRequest, mClientCallback3);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    // Throttling stops, CLIENT(3)'s session should start.
+    mController->onThrottlingStopped();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(3), SESSION(0)));
+}
+
+/* Test resource lost and thermal throttling happening simultaneously */
+TEST_F(TranscodingSessionControllerTest, TestResourceLostAndThermalCallback) {
+    ALOGD("TestResourceLostAndThermalCallback");
+
+    // Start with unspecified top UID.
+    // Submit real-time session to CLIENT(0), session should start immediately.
+    mRealtimeRequest.clientPid = PID(0);
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Submit offline session to CLIENT(0), should not start.
+    mOfflineRequest.clientPid = PID(0);
+    mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mOfflineRequest, mClientCallback1);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Move UID(1) to top.
+    mUidPolicy->setTop(UID(1));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+
+    // Submit real-time session to CLIENT(2) in different uid UID(1).
+    // Should pause previous session and start new session.
+    mRealtimeRequest.clientPid = PID(1);
+    mController->submit(CLIENT(2), SESSION(0), UID(2), UID(1), mRealtimeRequest, mClientCallback2);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
+
+    // Test 0: Resource lost during throttling.
+    // Throttling starts, top session should pause.
+    mController->onThrottlingStarted();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(2), SESSION(0)));
+    // Signal resource lost, this should get ignored because the session is now paused.
+    mController->onResourceLost(CLIENT(2), SESSION(0));
+    EXPECT_EQ(mResourcePolicy->getPid(), kInvalidPid);
+    // Signal resource available, CLIENT(2) shouldn't resume.
+    mController->onResourceAvailable();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    // Throttling ends, top session should resume.
+    mController->onThrottlingStopped();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), SESSION(0)));
+
+    // Test 1: Throttling during resource lost.
+    mController->onResourceLost(CLIENT(2), SESSION(0));
+    EXPECT_EQ(mResourcePolicy->getPid(), PID(1));
+    mController->onThrottlingStarted();
+    mController->onThrottlingStopped();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    mController->onResourceAvailable();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), SESSION(0)));
+
+    // Test 2: Interleaving resource lost and throttling.
+    mController->onResourceLost(CLIENT(2), SESSION(0));
+    EXPECT_EQ(mResourcePolicy->getPid(), PID(1));
+    mController->onThrottlingStarted();
+    mController->onResourceAvailable();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
+    mController->onThrottlingStopped();
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), SESSION(0)));
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTranscoderWatchdogNoHeartbeat) {
+    ALOGD("TestTranscoderWatchdogTimeout");
+
+    // Submit session to CLIENT(0) in UID(0).
+    // Should start immediately (because this is the only session).
+    mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
+
+    // Test 1: If not sending keep-alive at all, timeout after 3 seconds.
+    expectTimeout(CLIENT(0), SESSION(0), 2);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTranscoderWatchdogHeartbeat) {
+    // Test 2: No timeout as long as keep-alive coming; timeout after keep-alive stops.
+    mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+
+    for (int i = 0; i < 5; i++) {
+        EXPECT_EQ(mTranscoder->popEvent(1000000), TestTranscoder::NoEvent);
+        mController->onHeartBeat(CLIENT(0), SESSION(1));
+    }
+    expectTimeout(CLIENT(0), SESSION(1), 2);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTranscoderWatchdogDuringPause) {
+    int expectedGen = 2;
+
+    // Test 3a: No timeout for paused session even if no keep-alive is sent.
+    mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
+    // Trigger a pause by sending a resource lost.
+    mController->onResourceLost(CLIENT(0), SESSION(2));
+    EXPECT_EQ(mTranscoder->popEvent(3100000), TestTranscoder::NoEvent);
+    mController->onResourceAvailable();
+    EXPECT_EQ(mTranscoder->popEvent(100000), TestTranscoder::Resume(CLIENT(0), SESSION(2)));
+    expectTimeout(CLIENT(0), SESSION(2), expectedGen++);
+
+    // Test 3b: No timeout for paused session even if no keep-alive is sent.
+    mController->submit(CLIENT(0), SESSION(3), UID(0), UID(0), mOfflineRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(3)));
+    // Let the session run almost to timeout, to test timeout reset after pause.
+    EXPECT_EQ(mTranscoder->popEvent(2900000), TestTranscoder::NoEvent);
+    // Trigger a pause by submitting a higher-priority request.
+    mController->submit(CLIENT(0), SESSION(4), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(3)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(4)));
+    // Finish the higher-priority session, lower-priority session should resume,
+    // and the timeout should reset to full value.
+    mController->onFinish(CLIENT(0), SESSION(4));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(4)));
+    EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(3)));
+    expectTimeout(CLIENT(0), SESSION(3), expectedGen++);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerOverCountOnly) {
+    ALOGD("TestTranscoderPacerOverCountOnly");
+    testPacerHelper(12 /*numSubmits*/, 100 /*sessionDurationMs*/, 12 /*expectedSuccess*/);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerOverTimeOnly) {
+    ALOGD("TestTranscoderPacerOverTimeOnly");
+    testPacerHelper(5 /*numSubmits*/, 1000 /*sessionDurationMs*/, 5 /*expectedSuccess*/);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerOverQuota) {
+    ALOGD("TestTranscoderPacerOverQuota");
+    testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerWithPause) {
+    ALOGD("TestTranscoderPacerDuringPause");
+    testPacerHelperWithPause(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/);
+}
+
+/*
+ * Test the case where multiple client uids request the same session. Session should only
+ * be dropped when all clients are over quota.
+ */
+TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerMultipleUids) {
+    ALOGD("TestTranscoderPacerMultipleUids");
+    // First, run mClientCallback0 to the point of no quota.
+    testPacerHelperWithMultipleUids(12 /*numSubmits*/, 400 /*sessionDurationMs*/,
+                                    10 /*expectedSuccess*/, mClientCallback0, {});
+    // Make UID(0) block on Client1's sessions too, Client1's quota should not be affected.
+    testPacerHelperWithMultipleUids(12 /*numSubmits*/, 400 /*sessionDurationMs*/,
+                                    10 /*expectedSuccess*/, mClientCallback1, {UID(0)});
+    // Make UID(10) block on Client2's sessions. We expect to see 11 succeeds (instead of 10),
+    // because the addClientUid() is called after the submit, and first session is already
+    // started by the time UID(10) is added. UID(10) allowed us to run the 11th session,
+    // after that both UID(10) and UID(2) are out of quota.
+    testPacerHelperWithMultipleUids(12 /*numSubmits*/, 400 /*sessionDurationMs*/,
+                                    11 /*expectedSuccess*/, mClientCallback2, {UID(10)});
+}
+
+/*
+ * Use same uid for clientUid and callingUid, should not be limited by quota.
+ */
+TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerSelfUid) {
+    ALOGD("TestTranscoderPacerSelfUid");
+    testPacerHelperWithSelfUid(12 /*numSubmits*/, 400 /*sessionDurationMs*/,
+                               12 /*expectedSuccess*/);
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/Video_4K_HEVC_10Frames_Audio.mp4 b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/Video_4K_HEVC_10Frames_Audio.mp4
new file mode 100644
index 0000000..b652dd9
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/Video_4K_HEVC_10Frames_Audio.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/backyard_hevc_1920x1080_20Mbps.mp4 b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/backyard_hevc_1920x1080_20Mbps.mp4
new file mode 100644
index 0000000..80d1ec3
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/backyard_hevc_1920x1080_20Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4 b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4
new file mode 100644
index 0000000..ef7e1b7
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4 b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4
new file mode 100644
index 0000000..df42a15
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/jets_hevc_1280x720_20Mbps.mp4 b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/jets_hevc_1280x720_20Mbps.mp4
new file mode 100644
index 0000000..7794b99
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/jets_hevc_1280x720_20Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/longtest_15s.mp4 b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/longtest_15s.mp4
new file mode 100644
index 0000000..b50d8e4
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/longtest_15s.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_12Mbps.mp4 b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_12Mbps.mp4
new file mode 100644
index 0000000..92dda3b
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_12Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_20Mbps.mp4 b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_20Mbps.mp4
new file mode 100644
index 0000000..2fe37bd
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_20Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/video_1280x720_hevc_hdr10_static_3mbps.mp4 b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/video_1280x720_hevc_hdr10_static_3mbps.mp4
new file mode 100644
index 0000000..17150d4
--- /dev/null
+++ b/media/libmediatranscoding/tests/assets/TranscodingTestAssets/video_1280x720_hevc_hdr10_static_3mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh b/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
index d8e4830..3cbf1dd 100644
--- a/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
+++ b/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
@@ -3,24 +3,36 @@
 # Run tests in this directory.
 #
 
-if [ -z "$ANDROID_BUILD_TOP" ]; then
-    echo "Android build environment not set"
-    exit -1
+if [ "$SYNC_FINISHED" != true ]; then
+  if [ -z "$ANDROID_BUILD_TOP" ]; then
+      echo "Android build environment not set"
+      exit -1
+  fi
+
+  # ensure we have mm
+  . $ANDROID_BUILD_TOP/build/envsetup.sh
+
+  mm
+
+  echo "waiting for device"
+
+  adb root && adb wait-for-device remount && adb sync
 fi
 
-# ensure we have mm
-. $ANDROID_BUILD_TOP/build/envsetup.sh
-
-mm
-
-echo "waiting for device"
-
-adb root && adb wait-for-device remount && adb sync
-
 echo "========================================"
 
 echo "testing TranscodingClientManager"
-adb shell /data/nativetest64/TranscodingClientManager_tests/TranscodingClientManager_tests
+#adb shell /data/nativetest64/TranscodingClientManager_tests/TranscodingClientManager_tests
+adb shell /data/nativetest/TranscodingClientManager_tests/TranscodingClientManager_tests
 
 echo "testing AdjustableMaxPriorityQueue"
-adb shell /data/nativetest64/AdjustableMaxPriorityQueue_tests/AdjustableMaxPriorityQueue_tests
+#adb shell /data/nativetest64/AdjustableMaxPriorityQueue_tests/AdjustableMaxPriorityQueue_tests
+adb shell /data/nativetest/AdjustableMaxPriorityQueue_tests/AdjustableMaxPriorityQueue_tests
+
+echo "testing TranscodingSessionController"
+#adb shell /data/nativetest64/TranscodingSessionController_tests/TranscodingSessionController_tests
+adb shell /data/nativetest/TranscodingSessionController_tests/TranscodingSessionController_tests
+
+echo "testing TranscodingLogger"
+#adb shell /data/nativetest64/TranscodingLogger_tests/TranscodingLogger_tests
+adb shell /data/nativetest/TranscodingLogger_tests/TranscodingLogger_tests
diff --git a/media/libmediatranscoding/tests/push_assets.sh b/media/libmediatranscoding/tests/push_assets.sh
new file mode 100755
index 0000000..cc71514
--- /dev/null
+++ b/media/libmediatranscoding/tests/push_assets.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+#
+# Pushes the assets to the /data/local/tmp.
+#
+
+if [ "$SYNC_FINISHED" != true ]; then
+  if [ -z "$ANDROID_BUILD_TOP" ]; then
+      echo "Android build environment not set"
+      exit -1
+  fi
+
+  # ensure we have mm
+  . $ANDROID_BUILD_TOP/build/envsetup.sh
+
+  mm
+
+  echo "waiting for device"
+
+  adb root && adb wait-for-device remount
+fi
+
+echo "Copying files to device"
+
+adb shell mkdir -p /data/local/tmp/TranscodingTestAssets
+
+FILES=$ANDROID_BUILD_TOP/frameworks/av/media/libmediatranscoding/tests/assets/TranscodingTestAssets/*
+for file in $FILES
+do 
+adb push --sync $file /data/local/tmp/TranscodingTestAssets
+done
+
+echo "Copy done"
diff --git a/media/libmediatranscoding/transcoder/Android.bp b/media/libmediatranscoding/transcoder/Android.bp
new file mode 100644
index 0000000..ccb2ec2
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/Android.bp
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+    name: "mediatranscoder_defaults",
+
+    srcs: [
+        "MediaSampleQueue.cpp",
+        "MediaSampleReaderNDK.cpp",
+        "MediaSampleWriter.cpp",
+        "MediaTrackTranscoder.cpp",
+        "MediaTranscoder.cpp",
+        "NdkCommon.cpp",
+        "PassthroughTrackTranscoder.cpp",
+        "VideoTrackTranscoder.cpp",
+    ],
+
+    min_sdk_version: "29",
+    apex_available: [
+        "com.android.media",
+        "test_com.android.media",
+    ],
+
+    //header_libs: [ "libarect_headers", "libarect_headers_for_ndk" ],
+    static_libs: [ "libarect" ],
+    shared_libs: [
+        "libbase",
+        "libcutils",
+        "libmediandk",
+        "libnativewindow",
+        "libutils",
+        "libbinder_ndk",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wformat",
+        "-Wno-error=deprecated-declarations",
+        "-Wthread-safety",
+        "-Wunused",
+        "-Wunreachable-code",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+}
+
+cc_library {
+    name: "libmediatranscoder",
+    defaults: ["mediatranscoder_defaults"],
+}
diff --git a/media/libmediatranscoding/transcoder/MediaSampleQueue.cpp b/media/libmediatranscoding/transcoder/MediaSampleQueue.cpp
new file mode 100644
index 0000000..b085c98
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/MediaSampleQueue.cpp
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleQueue"
+
+#include <android-base/logging.h>
+#include <media/MediaSampleQueue.h>
+
+namespace android {
+
+bool MediaSampleQueue::enqueue(const std::shared_ptr<MediaSample>& sample) {
+    std::scoped_lock<std::mutex> lock(mMutex);
+    if (!mAborted) {
+        mSampleQueue.push(sample);
+        mCondition.notify_one();
+    }
+    return mAborted;
+}
+
+// Unfortunately std::unique_lock is incompatible with -Wthread-safety
+bool MediaSampleQueue::dequeue(std::shared_ptr<MediaSample>* sample) NO_THREAD_SAFETY_ANALYSIS {
+    std::unique_lock<std::mutex> lock(mMutex);
+    while (mSampleQueue.empty() && !mAborted) {
+        mCondition.wait(lock);
+    }
+
+    if (!mAborted) {
+        if (sample != nullptr) {
+            *sample = mSampleQueue.front();
+        }
+        mSampleQueue.pop();
+    }
+    return mAborted;
+}
+
+bool MediaSampleQueue::isEmpty() {
+    std::scoped_lock<std::mutex> lock(mMutex);
+    return mSampleQueue.empty();
+}
+
+void MediaSampleQueue::abort() {
+    std::scoped_lock<std::mutex> lock(mMutex);
+    // Clear the queue and notify consumers.
+    std::queue<std::shared_ptr<MediaSample>> empty = {};
+    std::swap(mSampleQueue, empty);
+    mAborted = true;
+    mCondition.notify_all();
+}
+}  // namespace android
\ No newline at end of file
diff --git a/media/libmediatranscoding/transcoder/MediaSampleReaderNDK.cpp b/media/libmediatranscoding/transcoder/MediaSampleReaderNDK.cpp
new file mode 100644
index 0000000..8d36e31
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/MediaSampleReaderNDK.cpp
@@ -0,0 +1,480 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleReader"
+
+#include <android-base/logging.h>
+#include <media/MediaSampleReaderNDK.h>
+
+#include <algorithm>
+#include <cmath>
+
+namespace android {
+
+// Check that the extractor sample flags have the expected NDK meaning.
+static_assert(SAMPLE_FLAG_SYNC_SAMPLE == AMEDIAEXTRACTOR_SAMPLE_FLAG_SYNC,
+              "Sample flag mismatch: SYNC_SAMPLE");
+
+// static
+std::shared_ptr<MediaSampleReader> MediaSampleReaderNDK::createFromFd(int fd, size_t offset,
+                                                                      size_t size) {
+    AMediaExtractor* extractor = AMediaExtractor_new();
+    if (extractor == nullptr) {
+        LOG(ERROR) << "Unable to allocate AMediaExtractor";
+        return nullptr;
+    }
+
+    media_status_t status = AMediaExtractor_setDataSourceFd(extractor, fd, offset, size);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "AMediaExtractor_setDataSourceFd returned error: " << status;
+        AMediaExtractor_delete(extractor);
+        return nullptr;
+    }
+
+    auto sampleReader = std::shared_ptr<MediaSampleReaderNDK>(new MediaSampleReaderNDK(extractor));
+    return sampleReader;
+}
+
+MediaSampleReaderNDK::MediaSampleReaderNDK(AMediaExtractor* extractor)
+      : mExtractor(extractor), mTrackCount(AMediaExtractor_getTrackCount(mExtractor)) {
+    if (mTrackCount > 0) {
+        mTrackCursors.resize(mTrackCount);
+    }
+}
+
+MediaSampleReaderNDK::~MediaSampleReaderNDK() {
+    if (mExtractor != nullptr) {
+        AMediaExtractor_delete(mExtractor);
+    }
+}
+
+void MediaSampleReaderNDK::advanceTrack_l(int trackIndex) {
+    if (!mEnforceSequentialAccess) {
+        // Note: Positioning the extractor before advancing the track is needed for two reasons:
+        // 1. To enable multiple advances without explicitly letting the extractor catch up.
+        // 2. To prevent the extractor from being farther than "next".
+        (void)moveToTrack_l(trackIndex);
+    }
+
+    SampleCursor& cursor = mTrackCursors[trackIndex];
+    cursor.previous = cursor.current;
+    cursor.current = cursor.next;
+    cursor.next.reset();
+
+    if (mEnforceSequentialAccess && trackIndex == mExtractorTrackIndex) {
+        while (advanceExtractor_l()) {
+            SampleCursor& cursor = mTrackCursors[mExtractorTrackIndex];
+            if (cursor.current.isSet && cursor.current.index == mExtractorSampleIndex) {
+                if (mExtractorTrackIndex != trackIndex) {
+                    mTrackSignals[mExtractorTrackIndex].notify_all();
+                }
+                break;
+            }
+        }
+    }
+    return;
+}
+
+bool MediaSampleReaderNDK::advanceExtractor_l() {
+    // Reset the "next" sample time whenever the extractor advances past a sample that is current,
+    // to ensure that "next" is appropriately updated when the extractor advances over the next
+    // sample of that track.
+    if (mTrackCursors[mExtractorTrackIndex].current.isSet &&
+        mTrackCursors[mExtractorTrackIndex].current.index == mExtractorSampleIndex) {
+        mTrackCursors[mExtractorTrackIndex].next.reset();
+    }
+
+    // Update the extractor's sample index even if this track reaches EOS, so that the other tracks
+    // are not given an incorrect extractor position.
+    mExtractorSampleIndex++;
+    if (!AMediaExtractor_advance(mExtractor)) {
+        LOG(DEBUG) << "  EOS in advanceExtractor_l";
+        mEosReached = true;
+        for (auto it = mTrackSignals.begin(); it != mTrackSignals.end(); ++it) {
+            it->second.notify_all();
+        }
+        return false;
+    }
+
+    mExtractorTrackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+
+    SampleCursor& cursor = mTrackCursors[mExtractorTrackIndex];
+    if (mExtractorSampleIndex > cursor.previous.index) {
+        if (!cursor.current.isSet) {
+            cursor.current.set(mExtractorSampleIndex, AMediaExtractor_getSampleTime(mExtractor));
+        } else if (!cursor.next.isSet && mExtractorSampleIndex > cursor.current.index) {
+            cursor.next.set(mExtractorSampleIndex, AMediaExtractor_getSampleTime(mExtractor));
+        }
+    }
+
+    return true;
+}
+
+media_status_t MediaSampleReaderNDK::seekExtractorBackwards_l(int64_t targetTimeUs,
+                                                              int targetTrackIndex,
+                                                              uint64_t targetSampleIndex) {
+    if (targetSampleIndex > mExtractorSampleIndex) {
+        LOG(ERROR) << "Error: Forward seek is not supported";
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    // AMediaExtractor supports reading negative timestamps but does not support seeking to them.
+    const int64_t seekToTimeUs = std::max(targetTimeUs, (int64_t)0);
+    media_status_t status =
+            AMediaExtractor_seekTo(mExtractor, seekToTimeUs, AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to seek to " << seekToTimeUs << ", target " << targetTimeUs;
+        return status;
+    }
+
+    mEosReached = false;
+    mExtractorTrackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+    int64_t sampleTimeUs = AMediaExtractor_getSampleTime(mExtractor);
+
+    while (sampleTimeUs != targetTimeUs || mExtractorTrackIndex != targetTrackIndex) {
+        if (!AMediaExtractor_advance(mExtractor)) {
+            return AMEDIA_ERROR_END_OF_STREAM;
+        }
+        mExtractorTrackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+        sampleTimeUs = AMediaExtractor_getSampleTime(mExtractor);
+    }
+    mExtractorSampleIndex = targetSampleIndex;
+    return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::moveToSample_l(SamplePosition& pos, int trackIndex) {
+    // Seek backwards if the extractor is ahead of the sample.
+    if (pos.isSet && mExtractorSampleIndex > pos.index) {
+        media_status_t status = seekExtractorBackwards_l(pos.timeStampUs, trackIndex, pos.index);
+        if (status != AMEDIA_OK) return status;
+    }
+
+    // Advance until extractor points to the sample.
+    while (!(pos.isSet && pos.index == mExtractorSampleIndex)) {
+        if (!advanceExtractor_l()) {
+            return AMEDIA_ERROR_END_OF_STREAM;
+        }
+    }
+
+    return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::moveToTrack_l(int trackIndex) {
+    return moveToSample_l(mTrackCursors[trackIndex].current, trackIndex);
+}
+
+media_status_t MediaSampleReaderNDK::waitForTrack_l(int trackIndex,
+                                                    std::unique_lock<std::mutex>& lockHeld) {
+    while (trackIndex != mExtractorTrackIndex && !mEosReached && mEnforceSequentialAccess) {
+        mTrackSignals[trackIndex].wait(lockHeld);
+    }
+
+    if (mEosReached) {
+        return AMEDIA_ERROR_END_OF_STREAM;
+    }
+
+    if (!mEnforceSequentialAccess) {
+        return moveToTrack_l(trackIndex);
+    }
+
+    return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::primeExtractorForTrack_l(
+        int trackIndex, std::unique_lock<std::mutex>& lockHeld) {
+    if (mExtractorTrackIndex < 0) {
+        mExtractorTrackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+        if (mExtractorTrackIndex < 0) {
+            return AMEDIA_ERROR_END_OF_STREAM;
+        }
+        mTrackCursors[mExtractorTrackIndex].current.set(mExtractorSampleIndex,
+                                                        AMediaExtractor_getSampleTime(mExtractor));
+    }
+
+    if (mEnforceSequentialAccess) {
+        return waitForTrack_l(trackIndex, lockHeld);
+    } else {
+        return moveToTrack_l(trackIndex);
+    }
+}
+
+media_status_t MediaSampleReaderNDK::selectTrack(int trackIndex) {
+    std::scoped_lock lock(mExtractorMutex);
+
+    if (trackIndex < 0 || trackIndex >= mTrackCount) {
+        LOG(ERROR) << "Invalid trackIndex " << trackIndex << " for trackCount " << mTrackCount;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    } else if (mTrackSignals.find(trackIndex) != mTrackSignals.end()) {
+        LOG(ERROR) << "TrackIndex " << trackIndex << " already selected";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    } else if (mExtractorTrackIndex >= 0) {
+        LOG(ERROR) << "Tracks must be selected before sample reading begins.";
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    media_status_t status = AMediaExtractor_selectTrack(mExtractor, trackIndex);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "AMediaExtractor_selectTrack returned error: " << status;
+        return status;
+    }
+
+    mTrackSignals.emplace(std::piecewise_construct, std::forward_as_tuple(trackIndex),
+                          std::forward_as_tuple());
+    return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::unselectTrack(int trackIndex) {
+    std::scoped_lock lock(mExtractorMutex);
+
+    if (trackIndex < 0 || trackIndex >= mTrackCount) {
+        LOG(ERROR) << "Invalid trackIndex " << trackIndex << " for trackCount " << mTrackCount;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    } else if (mExtractorTrackIndex >= 0) {
+        LOG(ERROR) << "unselectTrack must be called before sample reading begins.";
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    auto it = mTrackSignals.find(trackIndex);
+    if (it == mTrackSignals.end()) {
+        LOG(ERROR) << "TrackIndex " << trackIndex << " is not selected";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+    mTrackSignals.erase(it);
+
+    media_status_t status = AMediaExtractor_unselectTrack(mExtractor, trackIndex);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "AMediaExtractor_selectTrack returned error: " << status;
+        return status;
+    }
+
+    return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::setEnforceSequentialAccess(bool enforce) {
+    LOG(DEBUG) << "setEnforceSequentialAccess( " << enforce << " )";
+
+    std::scoped_lock lock(mExtractorMutex);
+
+    if (mEnforceSequentialAccess && !enforce) {
+        // If switching from enforcing to not enforcing sequential access there may be threads
+        // waiting that needs to be woken up.
+        for (auto it = mTrackSignals.begin(); it != mTrackSignals.end(); ++it) {
+            it->second.notify_all();
+        }
+    } else if (!mEnforceSequentialAccess && enforce && mExtractorTrackIndex >= 0) {
+        // If switching from not enforcing to enforcing sequential access the extractor needs to be
+        // positioned for the track farthest behind so that it won't get stuck waiting.
+        struct {
+            SamplePosition* pos = nullptr;
+            int trackIndex = -1;
+        } earliestSample;
+
+        for (int trackIndex = 0; trackIndex < mTrackCount; ++trackIndex) {
+            SamplePosition& lastKnownTrackPosition = mTrackCursors[trackIndex].current.isSet
+                                                             ? mTrackCursors[trackIndex].current
+                                                             : mTrackCursors[trackIndex].previous;
+
+            if (lastKnownTrackPosition.isSet) {
+                if (earliestSample.pos == nullptr ||
+                    earliestSample.pos->index > lastKnownTrackPosition.index) {
+                    earliestSample.pos = &lastKnownTrackPosition;
+                    earliestSample.trackIndex = trackIndex;
+                }
+            }
+        }
+
+        if (earliestSample.pos == nullptr) {
+            LOG(ERROR) << "No known sample position found";
+            return AMEDIA_ERROR_UNKNOWN;
+        }
+
+        media_status_t status = moveToSample_l(*earliestSample.pos, earliestSample.trackIndex);
+        if (status != AMEDIA_OK) return status;
+
+        while (!(mTrackCursors[mExtractorTrackIndex].current.isSet &&
+                 mTrackCursors[mExtractorTrackIndex].current.index == mExtractorSampleIndex)) {
+            if (!advanceExtractor_l()) {
+                return AMEDIA_ERROR_END_OF_STREAM;
+            }
+        }
+    }
+
+    mEnforceSequentialAccess = enforce;
+    return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::getEstimatedBitrateForTrack(int trackIndex, int32_t* bitrate) {
+    std::scoped_lock lock(mExtractorMutex);
+    media_status_t status = AMEDIA_OK;
+
+    if (mTrackSignals.find(trackIndex) == mTrackSignals.end()) {
+        LOG(ERROR) << "Track is not selected.";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    } else if (bitrate == nullptr) {
+        LOG(ERROR) << "bitrate pointer is NULL.";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    } else if (mExtractorTrackIndex >= 0) {
+        LOG(ERROR) << "getEstimatedBitrateForTrack must be called before sample reading begins.";
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    // Sample the track.
+    static constexpr int64_t kSamplingDurationUs = 10 * 1000 * 1000;  // 10 seconds
+    size_t lastSampleSize = 0;
+    size_t totalSampleSize = 0;
+    int64_t firstSampleTimeUs = 0;
+    int64_t lastSampleTimeUs = 0;
+
+    do {
+        if (AMediaExtractor_getSampleTrackIndex(mExtractor) == trackIndex) {
+            lastSampleTimeUs = AMediaExtractor_getSampleTime(mExtractor);
+            if (totalSampleSize == 0) {
+                firstSampleTimeUs = lastSampleTimeUs;
+            }
+
+            lastSampleSize = AMediaExtractor_getSampleSize(mExtractor);
+            totalSampleSize += lastSampleSize;
+        }
+    } while ((lastSampleTimeUs - firstSampleTimeUs) < kSamplingDurationUs &&
+             AMediaExtractor_advance(mExtractor));
+
+    // Reset the extractor to the beginning.
+    status = AMediaExtractor_seekTo(mExtractor, 0, AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to reset extractor: " << status;
+        return status;
+    }
+
+    int64_t durationUs = 0;
+    const int64_t sampledDurationUs = lastSampleTimeUs - firstSampleTimeUs;
+
+    if (sampledDurationUs < kSamplingDurationUs) {
+        // Track is shorter than the sampling duration so use the full track duration to get better
+        // accuracy (i.e. don't skip the last sample).
+        AMediaFormat* trackFormat = getTrackFormat(trackIndex);
+        if (!AMediaFormat_getInt64(trackFormat, AMEDIAFORMAT_KEY_DURATION, &durationUs)) {
+            durationUs = 0;
+        }
+        AMediaFormat_delete(trackFormat);
+    }
+
+    if (durationUs == 0) {
+        // The sampled duration does not account for the last sample's duration so its size should
+        // not be included either.
+        totalSampleSize -= lastSampleSize;
+        durationUs = sampledDurationUs;
+    }
+
+    if (totalSampleSize == 0 || durationUs <= 0) {
+        LOG(ERROR) << "Unable to estimate track bitrate";
+        return AMEDIA_ERROR_MALFORMED;
+    }
+
+    *bitrate = roundf((float)totalSampleSize * 8 * 1000000 / durationUs);
+    return AMEDIA_OK;
+}
+
+media_status_t MediaSampleReaderNDK::getSampleInfoForTrack(int trackIndex, MediaSampleInfo* info) {
+    std::unique_lock<std::mutex> lock(mExtractorMutex);
+
+    if (mTrackSignals.find(trackIndex) == mTrackSignals.end()) {
+        LOG(ERROR) << "Track not selected.";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    } else if (info == nullptr) {
+        LOG(ERROR) << "MediaSampleInfo pointer is NULL.";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    media_status_t status = primeExtractorForTrack_l(trackIndex, lock);
+    if (status == AMEDIA_OK) {
+        info->presentationTimeUs = AMediaExtractor_getSampleTime(mExtractor);
+        info->flags = AMediaExtractor_getSampleFlags(mExtractor);
+        info->size = AMediaExtractor_getSampleSize(mExtractor);
+    } else if (status == AMEDIA_ERROR_END_OF_STREAM) {
+        info->presentationTimeUs = 0;
+        info->flags = SAMPLE_FLAG_END_OF_STREAM;
+        info->size = 0;
+        LOG(DEBUG) << "  getSampleInfoForTrack #" << trackIndex << ": End Of Stream";
+    } else {
+        LOG(ERROR) << "  getSampleInfoForTrack #" << trackIndex << ": Error " << status;
+    }
+
+    return status;
+}
+
+media_status_t MediaSampleReaderNDK::readSampleDataForTrack(int trackIndex, uint8_t* buffer,
+                                                            size_t bufferSize) {
+    std::unique_lock<std::mutex> lock(mExtractorMutex);
+
+    if (mTrackSignals.find(trackIndex) == mTrackSignals.end()) {
+        LOG(ERROR) << "Track not selected.";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    } else if (buffer == nullptr) {
+        LOG(ERROR) << "buffer pointer is NULL";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    media_status_t status = primeExtractorForTrack_l(trackIndex, lock);
+    if (status != AMEDIA_OK) {
+        return status;
+    }
+
+    ssize_t sampleSize = AMediaExtractor_getSampleSize(mExtractor);
+    if (bufferSize < sampleSize) {
+        LOG(ERROR) << "Buffer is too small for sample, " << bufferSize << " vs " << sampleSize;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    ssize_t bytesRead = AMediaExtractor_readSampleData(mExtractor, buffer, bufferSize);
+    if (bytesRead < sampleSize) {
+        LOG(ERROR) << "Unable to read full sample, " << bytesRead << " vs " << sampleSize;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    advanceTrack_l(trackIndex);
+
+    return AMEDIA_OK;
+}
+
+void MediaSampleReaderNDK::advanceTrack(int trackIndex) {
+    std::scoped_lock lock(mExtractorMutex);
+
+    if (mTrackSignals.find(trackIndex) != mTrackSignals.end()) {
+        advanceTrack_l(trackIndex);
+    } else {
+        LOG(ERROR) << "Trying to advance a track that is not selected (#" << trackIndex << ")";
+    }
+}
+
+AMediaFormat* MediaSampleReaderNDK::getFileFormat() {
+    return AMediaExtractor_getFileFormat(mExtractor);
+}
+
+size_t MediaSampleReaderNDK::getTrackCount() const {
+    return mTrackCount;
+}
+
+AMediaFormat* MediaSampleReaderNDK::getTrackFormat(int trackIndex) {
+    if (trackIndex < 0 || trackIndex >= mTrackCount) {
+        LOG(ERROR) << "Invalid trackIndex " << trackIndex << " for trackCount " << mTrackCount;
+        return AMediaFormat_new();
+    }
+
+    return AMediaExtractor_getTrackFormat(mExtractor, trackIndex);
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp b/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
new file mode 100644
index 0000000..e931cc1
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
@@ -0,0 +1,351 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleWriter"
+
+#include <android-base/logging.h>
+#include <media/MediaSampleWriter.h>
+#include <media/NdkCommon.h>
+#include <media/NdkMediaMuxer.h>
+#include <sys/prctl.h>
+#include <utils/AndroidThreads.h>
+
+namespace android {
+
+class DefaultMuxer : public MediaSampleWriterMuxerInterface {
+public:
+    // MediaSampleWriterMuxerInterface
+    ssize_t addTrack(AMediaFormat* trackFormat) override {
+        // If the track format has rotation, need to call AMediaMuxer_setOrientationHint
+        // to set the rotation. Muxer doesn't take rotation specified on the track.
+        const char* mime;
+        if (AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime) &&
+            strncmp(mime, "video/", 6) == 0) {
+            int32_t rotation;
+            if (AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_ROTATION, &rotation) &&
+                (rotation != 0)) {
+                AMediaMuxer_setOrientationHint(mMuxer, rotation);
+            }
+        }
+
+        return AMediaMuxer_addTrack(mMuxer, trackFormat);
+    }
+    media_status_t start() override { return AMediaMuxer_start(mMuxer); }
+    media_status_t writeSampleData(size_t trackIndex, const uint8_t* data,
+                                   const AMediaCodecBufferInfo* info) override {
+        return AMediaMuxer_writeSampleData(mMuxer, trackIndex, data, info);
+    }
+    media_status_t stop() override { return AMediaMuxer_stop(mMuxer); }
+    // ~MediaSampleWriterMuxerInterface
+
+    static std::shared_ptr<DefaultMuxer> create(int fd) {
+        AMediaMuxer* ndkMuxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4);
+        if (ndkMuxer == nullptr) {
+            LOG(ERROR) << "Unable to create AMediaMuxer";
+            return nullptr;
+        }
+
+        return std::make_shared<DefaultMuxer>(ndkMuxer);
+    }
+
+    ~DefaultMuxer() {
+        if (mMuxer != nullptr) {
+            AMediaMuxer_delete(mMuxer);
+        }
+    }
+
+    DefaultMuxer(AMediaMuxer* muxer) : mMuxer(muxer){};
+    DefaultMuxer() = delete;
+
+private:
+    AMediaMuxer* mMuxer;
+};
+
+// static
+std::shared_ptr<MediaSampleWriter> MediaSampleWriter::Create() {
+    return std::shared_ptr<MediaSampleWriter>(new MediaSampleWriter());
+}
+
+MediaSampleWriter::~MediaSampleWriter() {
+    if (mState == STARTED) {
+        stop();
+    }
+}
+
+bool MediaSampleWriter::init(int fd, const std::weak_ptr<CallbackInterface>& callbacks,
+                             int64_t heartBeatIntervalUs) {
+    return init(DefaultMuxer::create(fd), callbacks, heartBeatIntervalUs);
+}
+
+bool MediaSampleWriter::init(const std::shared_ptr<MediaSampleWriterMuxerInterface>& muxer,
+                             const std::weak_ptr<CallbackInterface>& callbacks,
+                             int64_t heartBeatIntervalUs) {
+    if (callbacks.lock() == nullptr) {
+        LOG(ERROR) << "Callback object cannot be null";
+        return false;
+    } else if (muxer == nullptr) {
+        LOG(ERROR) << "Muxer cannot be null";
+        return false;
+    }
+
+    std::scoped_lock lock(mMutex);
+    if (mState != UNINITIALIZED) {
+        LOG(ERROR) << "Sample writer is already initialized";
+        return false;
+    }
+
+    mState = INITIALIZED;
+    mMuxer = muxer;
+    mCallbacks = callbacks;
+    mHeartBeatIntervalUs = heartBeatIntervalUs;
+    return true;
+}
+
+MediaSampleWriter::MediaSampleConsumerFunction MediaSampleWriter::addTrack(
+        const std::shared_ptr<AMediaFormat>& trackFormat) {
+    if (trackFormat == nullptr) {
+        LOG(ERROR) << "Track format must be non-null";
+        return nullptr;
+    }
+
+    std::scoped_lock lock(mMutex);
+    if (mState != INITIALIZED) {
+        LOG(ERROR) << "Muxer needs to be initialized when adding tracks.";
+        return nullptr;
+    }
+
+    AMediaFormat* trackFormatCopy = AMediaFormat_new();
+    AMediaFormat_copy(trackFormatCopy, trackFormat.get());
+    // Request muxer to use background priorities by default.
+    AMediaFormatUtils::SetDefaultFormatValueInt32(TBD_AMEDIACODEC_PARAMETER_KEY_BACKGROUND_MODE,
+                                                  trackFormatCopy, 1 /* true */);
+
+    ssize_t trackIndexOrError = mMuxer->addTrack(trackFormatCopy);
+    AMediaFormat_delete(trackFormatCopy);
+    if (trackIndexOrError < 0) {
+        LOG(ERROR) << "Failed to add media track to muxer: " << trackIndexOrError;
+        return nullptr;
+    }
+    const size_t trackIndex = static_cast<size_t>(trackIndexOrError);
+
+    int64_t durationUs;
+    if (!AMediaFormat_getInt64(trackFormat.get(), AMEDIAFORMAT_KEY_DURATION, &durationUs)) {
+        durationUs = 0;
+    }
+
+    mTracks.emplace(trackIndex, durationUs);
+    std::shared_ptr<MediaSampleWriter> thisWriter = shared_from_this();
+
+    return [self = shared_from_this(), trackIndex](const std::shared_ptr<MediaSample>& sample) {
+        self->addSampleToTrack(trackIndex, sample);
+    };
+}
+
+void MediaSampleWriter::addSampleToTrack(size_t trackIndex,
+                                         const std::shared_ptr<MediaSample>& sample) {
+    if (sample == nullptr) return;
+
+    bool wasEmpty;
+    {
+        std::scoped_lock lock(mMutex);
+        wasEmpty = mSampleQueue.empty();
+        mSampleQueue.push(std::make_pair(trackIndex, sample));
+    }
+
+    if (wasEmpty) {
+        mSampleSignal.notify_one();
+    }
+}
+
+bool MediaSampleWriter::start() {
+    std::scoped_lock lock(mMutex);
+
+    if (mTracks.size() == 0) {
+        LOG(ERROR) << "No tracks to write.";
+        return false;
+    } else if (mState != INITIALIZED) {
+        LOG(ERROR) << "Sample writer is not initialized";
+        return false;
+    }
+
+    mState = STARTED;
+    std::thread([this] {
+        androidSetThreadPriority(0 /* tid (0 = current) */, ANDROID_PRIORITY_BACKGROUND);
+        prctl(PR_SET_NAME, (unsigned long)"SampleWriterTrd", 0, 0, 0);
+
+        bool wasStopped = false;
+        media_status_t status = writeSamples(&wasStopped);
+        if (auto callbacks = mCallbacks.lock()) {
+            if (wasStopped && status == AMEDIA_OK) {
+                callbacks->onStopped(this);
+            } else {
+                callbacks->onFinished(this, status);
+            }
+        }
+    }).detach();
+    return true;
+}
+
+void MediaSampleWriter::stop() {
+    {
+        std::scoped_lock lock(mMutex);
+        if (mState != STARTED) {
+            LOG(ERROR) << "Sample writer is not started.";
+            return;
+        }
+        mState = STOPPED;
+    }
+
+    mSampleSignal.notify_all();
+}
+
+media_status_t MediaSampleWriter::writeSamples(bool* wasStopped) {
+    media_status_t muxerStatus = mMuxer->start();
+    if (muxerStatus != AMEDIA_OK) {
+        LOG(ERROR) << "Error starting muxer: " << muxerStatus;
+        return muxerStatus;
+    }
+
+    media_status_t writeStatus = runWriterLoop(wasStopped);
+    if (writeStatus != AMEDIA_OK) {
+        LOG(ERROR) << "Error writing samples: " << writeStatus;
+    }
+
+    muxerStatus = mMuxer->stop();
+    if (muxerStatus != AMEDIA_OK) {
+        LOG(ERROR) << "Error stopping muxer: " << muxerStatus;
+    }
+
+    return writeStatus != AMEDIA_OK ? writeStatus : muxerStatus;
+}
+
+media_status_t MediaSampleWriter::runWriterLoop(bool* wasStopped) NO_THREAD_SAFETY_ANALYSIS {
+    AMediaCodecBufferInfo bufferInfo;
+    int32_t lastProgressUpdate = 0;
+    bool progressSinceLastReport = false;
+    int trackEosCount = 0;
+
+    // Set the "primary" track that will be used to determine progress to the track with longest
+    // duration.
+    int primaryTrackIndex = -1;
+    int64_t longestDurationUs = 0;
+    for (auto it = mTracks.begin(); it != mTracks.end(); ++it) {
+        if (it->second.mDurationUs > longestDurationUs) {
+            primaryTrackIndex = it->first;
+            longestDurationUs = it->second.mDurationUs;
+        }
+    }
+
+    std::chrono::microseconds updateInterval(mHeartBeatIntervalUs);
+    std::chrono::steady_clock::time_point nextUpdateTime =
+            std::chrono::steady_clock::now() + updateInterval;
+
+    while (true) {
+        if (trackEosCount >= mTracks.size()) {
+            break;
+        }
+
+        size_t trackIndex;
+        std::shared_ptr<MediaSample> sample;
+        {
+            std::unique_lock lock(mMutex);
+            while (mSampleQueue.empty() && mState == STARTED) {
+                if (mHeartBeatIntervalUs <= 0) {
+                    mSampleSignal.wait(lock);
+                    continue;
+                }
+
+                if (mSampleSignal.wait_until(lock, nextUpdateTime) == std::cv_status::timeout) {
+                    // Send heart-beat if there is any progress since last update time.
+                    if (progressSinceLastReport) {
+                        if (auto callbacks = mCallbacks.lock()) {
+                            callbacks->onHeartBeat(this);
+                        }
+                        progressSinceLastReport = false;
+                    }
+                    nextUpdateTime += updateInterval;
+                }
+            }
+
+            if (mState == STOPPED) {
+                *wasStopped = true;
+                return AMEDIA_OK;
+            }
+
+            auto& topEntry = mSampleQueue.top();
+            trackIndex = topEntry.first;
+            sample = topEntry.second;
+            mSampleQueue.pop();
+        }
+
+        TrackRecord& track = mTracks[trackIndex];
+
+        if (sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) {
+            if (track.mReachedEos) {
+                continue;
+            }
+
+            // Track reached end of stream.
+            track.mReachedEos = true;
+            trackEosCount++;
+
+            // Preserve source track duration by setting the appropriate timestamp on the
+            // empty End-Of-Stream sample.
+            if (track.mDurationUs > 0 && track.mFirstSampleTimeSet) {
+                sample->info.presentationTimeUs = track.mDurationUs + track.mFirstSampleTimeUs;
+            }
+        }
+
+        track.mPrevSampleTimeUs = sample->info.presentationTimeUs;
+        if (!track.mFirstSampleTimeSet) {
+            // Record the first sample's timestamp in order to translate duration to EOS
+            // time for tracks that does not start at 0.
+            track.mFirstSampleTimeUs = sample->info.presentationTimeUs;
+            track.mFirstSampleTimeSet = true;
+        }
+
+        bufferInfo.offset = sample->dataOffset;
+        bufferInfo.size = sample->info.size;
+        bufferInfo.flags = sample->info.flags;
+        bufferInfo.presentationTimeUs = sample->info.presentationTimeUs;
+
+        media_status_t status = mMuxer->writeSampleData(trackIndex, sample->buffer, &bufferInfo);
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "writeSampleData returned " << status;
+            return status;
+        }
+        sample.reset();
+
+        // TODO(lnilsson): Add option to toggle progress reporting on/off.
+        if (trackIndex == primaryTrackIndex) {
+            const int64_t elapsed = track.mPrevSampleTimeUs - track.mFirstSampleTimeUs;
+            int32_t progress = (elapsed * 100) / track.mDurationUs;
+            progress = std::clamp(progress, 0, 100);
+
+            if (progress > lastProgressUpdate) {
+                if (auto callbacks = mCallbacks.lock()) {
+                    callbacks->onProgressUpdate(this, progress);
+                }
+                lastProgressUpdate = progress;
+            }
+        }
+        progressSinceLastReport = true;
+    }
+
+    return AMEDIA_OK;
+}
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp
new file mode 100644
index 0000000..f01a948
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp
@@ -0,0 +1,140 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaTrackTranscoder"
+
+#include <android-base/logging.h>
+#include <media/MediaTrackTranscoder.h>
+#include <media/MediaTrackTranscoderCallback.h>
+#include <utils/AndroidThreads.h>
+
+namespace android {
+
+media_status_t MediaTrackTranscoder::configure(
+        const std::shared_ptr<MediaSampleReader>& mediaSampleReader, int trackIndex,
+        const std::shared_ptr<AMediaFormat>& destinationFormat) {
+    std::scoped_lock lock{mStateMutex};
+
+    if (mState != UNINITIALIZED) {
+        LOG(ERROR) << "Configure can only be called once";
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    if (mediaSampleReader == nullptr) {
+        LOG(ERROR) << "MediaSampleReader is null";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+    if (trackIndex < 0 || trackIndex >= mediaSampleReader->getTrackCount()) {
+        LOG(ERROR) << "TrackIndex is invalid " << trackIndex;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    mMediaSampleReader = mediaSampleReader;
+    mTrackIndex = trackIndex;
+
+    mSourceFormat = std::shared_ptr<AMediaFormat>(mMediaSampleReader->getTrackFormat(mTrackIndex),
+                                                  &AMediaFormat_delete);
+    if (mSourceFormat == nullptr) {
+        LOG(ERROR) << "Unable to get format for track #" << mTrackIndex;
+        return AMEDIA_ERROR_MALFORMED;
+    }
+
+    media_status_t status = configureDestinationFormat(destinationFormat);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "configure failed with error " << status;
+        return status;
+    }
+
+    mState = CONFIGURED;
+    return AMEDIA_OK;
+}
+
+bool MediaTrackTranscoder::start() {
+    std::scoped_lock lock{mStateMutex};
+
+    if (mState != CONFIGURED) {
+        LOG(ERROR) << "TrackTranscoder must be configured before started";
+        return false;
+    }
+    mState = STARTED;
+
+    std::thread([this] {
+        androidSetThreadPriority(0 /* tid (0 = current) */, ANDROID_PRIORITY_BACKGROUND);
+        bool stopped = false;
+        media_status_t status = runTranscodeLoop(&stopped);
+
+        // Output an EOS sample if the transcoder was stopped.
+        if (stopped) {
+            auto sample = std::make_shared<MediaSample>();
+            sample->info.flags = SAMPLE_FLAG_END_OF_STREAM;
+            onOutputSampleAvailable(sample);
+        }
+
+        // Notify the client.
+        if (auto callbacks = mTranscoderCallback.lock()) {
+            if (stopped) {
+                callbacks->onTrackStopped(this);
+            } else if (status == AMEDIA_OK) {
+                callbacks->onTrackFinished(this);
+            } else {
+                callbacks->onTrackError(this, status);
+            }
+        }
+    }).detach();
+
+    return true;
+}
+
+void MediaTrackTranscoder::stop(bool stopOnSyncSample) {
+    std::scoped_lock lock{mStateMutex};
+
+    if (mState == STARTED || (mStopRequest == STOP_ON_SYNC && !stopOnSyncSample)) {
+        mStopRequest = stopOnSyncSample ? STOP_ON_SYNC : STOP_NOW;
+        abortTranscodeLoop();
+        mState = STOPPED;
+    } else {
+        LOG(WARNING) << "TrackTranscoder must be started before stopped";
+    }
+}
+
+void MediaTrackTranscoder::notifyTrackFormatAvailable() {
+    if (auto callbacks = mTranscoderCallback.lock()) {
+        callbacks->onTrackFormatAvailable(this);
+    }
+}
+
+void MediaTrackTranscoder::onOutputSampleAvailable(const std::shared_ptr<MediaSample>& sample) {
+    std::scoped_lock lock{mSampleMutex};
+    if (mSampleConsumer == nullptr) {
+        mSampleQueue.enqueue(sample);
+    } else {
+        mSampleConsumer(sample);
+    }
+}
+
+void MediaTrackTranscoder::setSampleConsumer(
+        const MediaSampleWriter::MediaSampleConsumerFunction& sampleConsumer) {
+    std::scoped_lock lock{mSampleMutex};
+    mSampleConsumer = sampleConsumer;
+
+    std::shared_ptr<MediaSample> sample;
+    while (!mSampleQueue.isEmpty() && !mSampleQueue.dequeue(&sample)) {
+        mSampleConsumer(sample);
+    }
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/MediaTranscoder.cpp b/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
new file mode 100644
index 0000000..e20f7ab
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
@@ -0,0 +1,477 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscoder"
+
+#include <android-base/logging.h>
+#include <fcntl.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/MediaSampleWriter.h>
+#include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
+#include <media/PassthroughTrackTranscoder.h>
+#include <media/VideoTrackTranscoder.h>
+#include <sys/prctl.h>
+#include <unistd.h>
+
+namespace android {
+
+static std::shared_ptr<AMediaFormat> createVideoTrackFormat(AMediaFormat* srcFormat,
+                                                            AMediaFormat* options) {
+    if (srcFormat == nullptr || options == nullptr) {
+        LOG(ERROR) << "Cannot merge null formats";
+        return nullptr;
+    }
+
+    // ------- Define parameters to copy from the source track format -------
+    std::vector<AMediaFormatUtils::EntryCopier> srcParamsToCopy{
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_MIME, String),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_DURATION, Int64),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_WIDTH, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_HEIGHT, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_FRAME_RATE, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_RANGE, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_STANDARD, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_TRANSFER, Int32),
+    };
+
+    // If the destination codec is the same as the source codec, we can preserve profile and level
+    // from the source track as default values. Otherwise leave them unspecified.
+    const char *srcMime, *dstMime;
+    AMediaFormat_getString(srcFormat, AMEDIAFORMAT_KEY_MIME, &srcMime);
+    if (!AMediaFormat_getString(options, AMEDIAFORMAT_KEY_MIME, &dstMime) ||
+        strcmp(srcMime, dstMime) == 0) {
+        srcParamsToCopy.push_back(ENTRY_COPIER(AMEDIAFORMAT_KEY_PROFILE, String));
+        srcParamsToCopy.push_back(ENTRY_COPIER(AMEDIAFORMAT_KEY_LEVEL, String));
+    }
+
+    // ------- Define parameters to copy from the caller's options -------
+    static const std::vector<AMediaFormatUtils::EntryCopier> kSupportedOptions{
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_MIME, String),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_DURATION, Int64),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_WIDTH, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_HEIGHT, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_BIT_RATE, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_PROFILE, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_LEVEL, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_RANGE, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_STANDARD, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_TRANSFER, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_FRAME_RATE, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_PRIORITY, Int32),
+            ENTRY_COPIER2(AMEDIAFORMAT_KEY_OPERATING_RATE, Float, Int32),
+    };
+
+    // ------- Copy parameters from source and options to the destination -------
+    auto trackFormat = std::shared_ptr<AMediaFormat>(AMediaFormat_new(), &AMediaFormat_delete);
+    AMediaFormatUtils::CopyFormatEntries(srcFormat, trackFormat.get(), srcParamsToCopy);
+    AMediaFormatUtils::CopyFormatEntries(options, trackFormat.get(), kSupportedOptions);
+    return trackFormat;
+}
+
+void MediaTranscoder::onThreadFinished(const void* thread, media_status_t threadStatus,
+                                       bool threadStopped) {
+    LOG(DEBUG) << "Thread " << thread << " finished with status " << threadStatus << " stopped "
+               << threadStopped;
+
+    // Stop all threads if one reports an error.
+    if (threadStatus != AMEDIA_OK) {
+        requestStop(false /* stopOnSync */);
+    }
+
+    std::scoped_lock lock{mThreadStateMutex};
+
+    // Record the change.
+    mThreadStates[thread] = DONE;
+    if (threadStatus != AMEDIA_OK && mTranscoderStatus == AMEDIA_OK) {
+        mTranscoderStatus = threadStatus;
+    }
+
+    mTranscoderStopped |= threadStopped;
+
+    // Check if all threads are done. Note that if all transcoders have stopped but the sample
+    // writer has not yet started, it never will.
+    bool transcodersDone = true;
+    ThreadState sampleWriterState = PENDING;
+    for (const auto& it : mThreadStates) {
+        LOG(DEBUG) << "  Thread " << it.first << " state" << it.second;
+        if (it.first == static_cast<const void*>(mSampleWriter.get())) {
+            sampleWriterState = it.second;
+        } else {
+            transcodersDone &= (it.second == DONE);
+        }
+    }
+    if (!transcodersDone || sampleWriterState == RUNNING) {
+        return;
+    }
+
+    // All done. Send callback asynchronously and wake up threads waiting in cancel/pause.
+    mThreadsDone = true;
+    if (!mCallbackSent) {
+        std::thread asyncNotificationThread{[this, self = shared_from_this(),
+                                             status = mTranscoderStatus,
+                                             stopped = mTranscoderStopped] {
+            prctl(PR_SET_NAME, (unsigned long)"TranscodCallbk", 0, 0, 0);
+
+            // If the transcoder was stopped that means a caller is waiting in stop or pause
+            // in which case we don't send a callback.
+            if (status != AMEDIA_OK) {
+                mCallbacks->onError(this, status);
+            } else if (!stopped) {
+                mCallbacks->onFinished(this);
+            }
+            mThreadsDoneSignal.notify_all();
+        }};
+        asyncNotificationThread.detach();
+        mCallbackSent = true;
+    }
+}
+
+void MediaTranscoder::onTrackFormatAvailable(const MediaTrackTranscoder* transcoder) {
+    LOG(DEBUG) << "TrackTranscoder " << transcoder << " format available.";
+
+    std::scoped_lock lock{mTracksAddedMutex};
+    const void* sampleWriterPtr = static_cast<const void*>(mSampleWriter.get());
+
+    // Ignore duplicate format change.
+    if (mTracksAdded.count(transcoder) > 0) {
+        return;
+    }
+
+    // Add track to the writer.
+    auto consumer = mSampleWriter->addTrack(transcoder->getOutputFormat());
+    if (consumer == nullptr) {
+        LOG(ERROR) << "Unable to add track to sample writer.";
+        onThreadFinished(sampleWriterPtr, AMEDIA_ERROR_UNKNOWN, false /* stopped */);
+        return;
+    }
+
+    // The sample writer is not yet started so notify the caller that progress is still made.
+    if (mHeartBeatIntervalUs > 0) {
+        mCallbacks->onHeartBeat(this);
+    }
+
+    MediaTrackTranscoder* mutableTranscoder = const_cast<MediaTrackTranscoder*>(transcoder);
+    mutableTranscoder->setSampleConsumer(consumer);
+
+    mTracksAdded.insert(transcoder);
+    bool errorStarting = false;
+    if (mTracksAdded.size() == mTrackTranscoders.size()) {
+        // Enable sequential access mode on the sample reader to achieve optimal read performance.
+        // This has to wait until all tracks have delivered their output formats and the sample
+        // writer is started. Otherwise the tracks will not get their output sample queues drained
+        // and the transcoder could hang due to one track running out of buffers and blocking the
+        // other tracks from reading source samples before they could output their formats.
+
+        std::scoped_lock lock{mThreadStateMutex};
+        // Don't start the sample writer if a stop already has been requested.
+        if (!mSampleWriterStopped) {
+            if (!mCancelled) {
+                mSampleReader->setEnforceSequentialAccess(true);
+            }
+            LOG(DEBUG) << "Starting sample writer.";
+            errorStarting = !mSampleWriter->start();
+            if (!errorStarting) {
+                mThreadStates[sampleWriterPtr] = RUNNING;
+            }
+        }
+    }
+
+    if (errorStarting) {
+        LOG(ERROR) << "Unable to start sample writer.";
+        onThreadFinished(sampleWriterPtr, AMEDIA_ERROR_UNKNOWN, false /* stopped */);
+    }
+}
+
+void MediaTranscoder::onTrackFinished(const MediaTrackTranscoder* transcoder) {
+    LOG(DEBUG) << "TrackTranscoder " << transcoder << " finished";
+    onThreadFinished(static_cast<const void*>(transcoder), AMEDIA_OK, false /* stopped */);
+}
+
+void MediaTranscoder::onTrackStopped(const MediaTrackTranscoder* transcoder) {
+    LOG(DEBUG) << "TrackTranscoder " << transcoder << " stopped";
+    onThreadFinished(static_cast<const void*>(transcoder), AMEDIA_OK, true /* stopped */);
+}
+
+void MediaTranscoder::onTrackError(const MediaTrackTranscoder* transcoder, media_status_t status) {
+    LOG(ERROR) << "TrackTranscoder " << transcoder << " returned error " << status;
+    onThreadFinished(static_cast<const void*>(transcoder), status, false /* stopped */);
+}
+
+void MediaTranscoder::onFinished(const MediaSampleWriter* writer, media_status_t status) {
+    LOG(status == AMEDIA_OK ? DEBUG : ERROR) << "Sample writer finished with status " << status;
+    onThreadFinished(static_cast<const void*>(writer), status, false /* stopped */);
+}
+
+void MediaTranscoder::onStopped(const MediaSampleWriter* writer) {
+    LOG(DEBUG) << "Sample writer " << writer << " stopped";
+    onThreadFinished(static_cast<const void*>(writer), AMEDIA_OK, true /* stopped */);
+}
+
+void MediaTranscoder::onProgressUpdate(const MediaSampleWriter* writer __unused, int32_t progress) {
+    // Dispatch progress updated to the client.
+    mCallbacks->onProgressUpdate(this, progress);
+}
+
+void MediaTranscoder::onHeartBeat(const MediaSampleWriter* writer __unused) {
+    // Signal heart-beat to the client.
+    mCallbacks->onHeartBeat(this);
+}
+
+MediaTranscoder::MediaTranscoder(const std::shared_ptr<CallbackInterface>& callbacks,
+                                 int64_t heartBeatIntervalUs, pid_t pid, uid_t uid)
+      : mCallbacks(callbacks), mHeartBeatIntervalUs(heartBeatIntervalUs), mPid(pid), mUid(uid) {}
+
+std::shared_ptr<MediaTranscoder> MediaTranscoder::create(
+        const std::shared_ptr<CallbackInterface>& callbacks, int64_t heartBeatIntervalUs, pid_t pid,
+        uid_t uid, const std::shared_ptr<ndk::ScopedAParcel>& pausedState) {
+    if (pausedState != nullptr) {
+        LOG(INFO) << "Initializing from paused state.";
+    }
+    if (callbacks == nullptr) {
+        LOG(ERROR) << "Callbacks cannot be null";
+        return nullptr;
+    }
+
+    return std::shared_ptr<MediaTranscoder>(
+            new MediaTranscoder(callbacks, heartBeatIntervalUs, pid, uid));
+}
+
+media_status_t MediaTranscoder::configureSource(int fd) {
+    if (fd < 0) {
+        LOG(ERROR) << "Invalid source fd: " << fd;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    const size_t fileSize = lseek(fd, 0, SEEK_END);
+    lseek(fd, 0, SEEK_SET);
+
+    mSampleReader = MediaSampleReaderNDK::createFromFd(fd, 0 /* offset */, fileSize);
+    if (mSampleReader == nullptr) {
+        LOG(ERROR) << "Unable to parse source fd: " << fd;
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    const size_t trackCount = mSampleReader->getTrackCount();
+    for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
+        AMediaFormat* trackFormat = mSampleReader->getTrackFormat(static_cast<int>(trackIndex));
+        if (trackFormat == nullptr) {
+            LOG(ERROR) << "Track #" << trackIndex << " has no format";
+            return AMEDIA_ERROR_MALFORMED;
+        }
+
+        mSourceTrackFormats.emplace_back(trackFormat, &AMediaFormat_delete);
+    }
+
+    return AMEDIA_OK;
+}
+
+std::vector<std::shared_ptr<AMediaFormat>> MediaTranscoder::getTrackFormats() const {
+    // Return a deep copy of the formats to avoid the caller modifying our internal formats.
+    std::vector<std::shared_ptr<AMediaFormat>> trackFormats;
+    for (const std::shared_ptr<AMediaFormat>& sourceFormat : mSourceTrackFormats) {
+        AMediaFormat* copy = AMediaFormat_new();
+        AMediaFormat_copy(copy, sourceFormat.get());
+        trackFormats.emplace_back(copy, &AMediaFormat_delete);
+    }
+    return trackFormats;
+}
+
+media_status_t MediaTranscoder::configureTrackFormat(size_t trackIndex,
+                                                     AMediaFormat* destinationOptions) {
+    if (mSampleReader == nullptr) {
+        LOG(ERROR) << "Source must be configured before tracks";
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    } else if (trackIndex >= mSourceTrackFormats.size()) {
+        LOG(ERROR) << "Track index " << trackIndex
+                   << " is out of bounds. Track count: " << mSourceTrackFormats.size();
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    std::shared_ptr<MediaTrackTranscoder> transcoder;
+    std::shared_ptr<AMediaFormat> trackFormat;
+
+    if (destinationOptions == nullptr) {
+        transcoder = std::make_shared<PassthroughTrackTranscoder>(shared_from_this());
+    } else {
+        AMediaFormat* srcTrackFormat = mSourceTrackFormats[trackIndex].get();
+
+        const char* srcMime = nullptr;
+        if (!AMediaFormat_getString(srcTrackFormat, AMEDIAFORMAT_KEY_MIME, &srcMime)) {
+            LOG(ERROR) << "Source track #" << trackIndex << " has no mime type";
+            return AMEDIA_ERROR_MALFORMED;
+        }
+
+        if (strncmp(srcMime, "video/", 6) != 0) {
+            LOG(ERROR) << "Only video tracks are supported for transcoding. Unable to configure "
+                          "track #"
+                       << trackIndex << " with mime " << srcMime;
+            return AMEDIA_ERROR_UNSUPPORTED;
+        }
+
+        const char* dstMime = nullptr;
+        if (AMediaFormat_getString(destinationOptions, AMEDIAFORMAT_KEY_MIME, &dstMime)) {
+            if (strncmp(dstMime, "video/", 6) != 0) {
+                LOG(ERROR) << "Unable to convert media types for track #" << trackIndex << ", from "
+                           << srcMime << " to " << dstMime;
+                return AMEDIA_ERROR_UNSUPPORTED;
+            }
+        }
+
+        transcoder = VideoTrackTranscoder::create(shared_from_this(), mPid, mUid);
+
+        trackFormat = createVideoTrackFormat(srcTrackFormat, destinationOptions);
+        if (trackFormat == nullptr) {
+            LOG(ERROR) << "Unable to create video track format";
+            return AMEDIA_ERROR_UNKNOWN;
+        }
+    }
+
+    media_status_t status = mSampleReader->selectTrack(trackIndex);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to select track " << trackIndex;
+        return status;
+    }
+
+    status = transcoder->configure(mSampleReader, trackIndex, trackFormat);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Configure track transcoder for track #" << trackIndex << " returned error "
+                   << status;
+        mSampleReader->unselectTrack(trackIndex);
+        return status;
+    }
+
+    std::scoped_lock lock{mThreadStateMutex};
+    mThreadStates[static_cast<const void*>(transcoder.get())] = PENDING;
+
+    mTrackTranscoders.emplace_back(std::move(transcoder));
+    return AMEDIA_OK;
+}
+
+media_status_t MediaTranscoder::configureDestination(int fd) {
+    if (fd < 0) {
+        LOG(ERROR) << "Invalid destination fd: " << fd;
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (mSampleWriter != nullptr) {
+        LOG(ERROR) << "Destination is already configured.";
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    }
+
+    mSampleWriter = MediaSampleWriter::Create();
+    const bool initOk = mSampleWriter->init(fd, shared_from_this(), mHeartBeatIntervalUs);
+
+    if (!initOk) {
+        LOG(ERROR) << "Unable to initialize sample writer with destination fd: " << fd;
+        mSampleWriter.reset();
+        return AMEDIA_ERROR_UNKNOWN;
+    }
+
+    std::scoped_lock lock{mThreadStateMutex};
+    mThreadStates[static_cast<const void*>(mSampleWriter.get())] = PENDING;
+    return AMEDIA_OK;
+}
+
+media_status_t MediaTranscoder::start() {
+    if (mTrackTranscoders.size() < 1) {
+        LOG(ERROR) << "Unable to start, no tracks are configured.";
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    } else if (mSampleWriter == nullptr) {
+        LOG(ERROR) << "Unable to start, destination is not configured";
+        return AMEDIA_ERROR_INVALID_OPERATION;
+    }
+
+    // Start transcoders
+    bool started = true;
+    {
+        std::scoped_lock lock{mThreadStateMutex};
+        for (auto& transcoder : mTrackTranscoders) {
+            if (!(started = transcoder->start())) {
+                break;
+            }
+            mThreadStates[static_cast<const void*>(transcoder.get())] = RUNNING;
+        }
+    }
+    if (!started) {
+        LOG(ERROR) << "Unable to start track transcoder.";
+        cancel();
+        return AMEDIA_ERROR_UNKNOWN;
+    }
+    return AMEDIA_OK;
+}
+
+media_status_t MediaTranscoder::requestStop(bool stopOnSync) {
+    std::scoped_lock lock{mThreadStateMutex};
+    if (mCancelled) {
+        LOG(DEBUG) << "MediaTranscoder already cancelled";
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    if (!stopOnSync) {
+        mSampleWriterStopped = true;
+        mSampleWriter->stop();
+    }
+
+    mSampleReader->setEnforceSequentialAccess(false);
+    for (auto& transcoder : mTrackTranscoders) {
+        transcoder->stop(stopOnSync);
+    }
+
+    mCancelled = true;
+    return AMEDIA_OK;
+}
+
+void MediaTranscoder::waitForThreads() NO_THREAD_SAFETY_ANALYSIS {
+    std::unique_lock lock{mThreadStateMutex};
+    while (!mThreadsDone) {
+        mThreadsDoneSignal.wait(lock);
+    }
+}
+
+media_status_t MediaTranscoder::pause(std::shared_ptr<ndk::ScopedAParcel>* pausedState) {
+    media_status_t status = requestStop(true /* stopOnSync */);
+    if (status != AMEDIA_OK) {
+        return status;
+    }
+
+    waitForThreads();
+
+    // TODO: write internal states to parcel.
+    *pausedState = std::shared_ptr<::ndk::ScopedAParcel>(new ::ndk::ScopedAParcel());
+    return AMEDIA_OK;
+}
+
+media_status_t MediaTranscoder::cancel() {
+    media_status_t status = requestStop(false /* stopOnSync */);
+    if (status != AMEDIA_OK) {
+        return status;
+    }
+
+    waitForThreads();
+
+    // TODO: Release transcoders?
+    return AMEDIA_OK;
+}
+
+media_status_t MediaTranscoder::resume() {
+    // TODO: restore internal states from parcel.
+    return start();
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/NdkCommon.cpp b/media/libmediatranscoding/transcoder/NdkCommon.cpp
new file mode 100644
index 0000000..e133cd6
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/NdkCommon.cpp
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkCommon"
+
+#include <android-base/logging.h>
+#include <media/NdkCommon.h>
+
+#include <cstdio>
+#include <cstring>
+#include <utility>
+
+/* TODO(b/153592281)
+ * Note: constants used by the native media tests but not available in media ndk api
+ */
+const char* AMEDIA_MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
+const char* AMEDIA_MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9";
+const char* AMEDIA_MIMETYPE_VIDEO_AV1 = "video/av01";
+const char* AMEDIA_MIMETYPE_VIDEO_AVC = "video/avc";
+const char* AMEDIA_MIMETYPE_VIDEO_HEVC = "video/hevc";
+const char* AMEDIA_MIMETYPE_VIDEO_MPEG4 = "video/mp4v-es";
+const char* AMEDIA_MIMETYPE_VIDEO_H263 = "video/3gpp";
+
+/* TODO(b/153592281) */
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_ALLOW_FRAME_DROP = "allow-frame-drop";
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_REQUEST_SYNC_FRAME = "request-sync";
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_VIDEO_BITRATE = "video-bitrate";
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_MAX_B_FRAMES = "max-bframes";
+
+/* TODO(lnilsson): Finalize value or adopt AMediaFormat key once available. */
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_COLOR_TRANSFER_REQUEST = "color-transfer-request";
+const char* TBD_AMEDIACODEC_PARAMETER_KEY_BACKGROUND_MODE = "android._background-mode";
+
+namespace AMediaFormatUtils {
+
+#define DEFINE_FORMAT_VALUE_COPY_FUNC(_type, _typeName)                                      \
+    bool CopyFormatEntry##_typeName(const char* key, AMediaFormat* from, AMediaFormat* to) { \
+        _type value;                                                                         \
+        if (AMediaFormat_get##_typeName(from, key, &value)) {                                \
+            AMediaFormat_set##_typeName(to, key, value);                                     \
+            return true;                                                                     \
+        }                                                                                    \
+        return false;                                                                        \
+    }
+
+DEFINE_FORMAT_VALUE_COPY_FUNC(const char*, String);
+DEFINE_FORMAT_VALUE_COPY_FUNC(int64_t, Int64);
+DEFINE_FORMAT_VALUE_COPY_FUNC(int32_t, Int32);
+DEFINE_FORMAT_VALUE_COPY_FUNC(float, Float);
+
+void CopyFormatEntries(AMediaFormat* from, AMediaFormat* to,
+                       const std::vector<EntryCopier>& entries) {
+    if (from == nullptr || to == nullptr) {
+        LOG(ERROR) << "Cannot copy null formats";
+        return;
+    } else if (entries.empty()) {
+        LOG(WARNING) << "No entries to copy";
+        return;
+    }
+
+    for (auto& entry : entries) {
+        if (!entry.copy(entry.key, from, to) && entry.copy2 != nullptr) {
+            entry.copy2(entry.key, from, to);
+        }
+    }
+}
+
+#define DEFINE_SET_DEFAULT_FORMAT_VALUE_FUNC(_type, _typeName)                                  \
+    bool SetDefaultFormatValue##_typeName(const char* key, AMediaFormat* format, _type value) { \
+        _type tmp;                                                                              \
+        if (!AMediaFormat_get##_typeName(format, key, &tmp)) {                                  \
+            AMediaFormat_set##_typeName(format, key, value);                                    \
+            return true;                                                                        \
+        }                                                                                       \
+        return false;                                                                           \
+    }
+
+DEFINE_SET_DEFAULT_FORMAT_VALUE_FUNC(float, Float);
+DEFINE_SET_DEFAULT_FORMAT_VALUE_FUNC(int32_t, Int32);
+
+// Determines whether a track format describes HDR video content or not. The
+// logic is based on isHdr() in libstagefright/Utils.cpp.
+bool VideoIsHdr(AMediaFormat* format) {
+    // If VUI signals HDR content, this internal flag is set by the extractor.
+    int32_t isHdr;
+    if (AMediaFormat_getInt32(format, "android._is-hdr", &isHdr)) {
+        return isHdr;
+    }
+
+    // If container supplied HDR static info without transfer set, assume HDR.
+    const char* hdrInfo;
+    int32_t transfer;
+    if ((AMediaFormat_getString(format, AMEDIAFORMAT_KEY_HDR_STATIC_INFO, &hdrInfo) ||
+         AMediaFormat_getString(format, "hdr10-plus-info", &hdrInfo)) &&
+        !AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_COLOR_TRANSFER, &transfer)) {
+        return true;
+    }
+
+    // Otherwise, check if an HDR transfer function is set.
+    if (AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_COLOR_TRANSFER, &transfer)) {
+        return transfer == COLOR_TRANSFER_ST2084 || transfer == COLOR_TRANSFER_HLG;
+    }
+
+    return false;
+}
+}  // namespace AMediaFormatUtils
diff --git a/media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp
new file mode 100644
index 0000000..3335d6c
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp
@@ -0,0 +1,167 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "PassthroughTrackTranscoder"
+
+#include <android-base/logging.h>
+#include <media/PassthroughTrackTranscoder.h>
+#include <sys/prctl.h>
+
+namespace android {
+
+PassthroughTrackTranscoder::BufferPool::~BufferPool() {
+    for (auto it = mAddressSizeMap.begin(); it != mAddressSizeMap.end(); ++it) {
+        delete[] it->first;
+    }
+}
+
+uint8_t* PassthroughTrackTranscoder::BufferPool::getBufferWithSize(size_t minimumBufferSize)
+        NO_THREAD_SAFETY_ANALYSIS {
+    std::unique_lock lock(mMutex);
+
+    // Wait if maximum number of buffers are allocated but none are free.
+    while (mAddressSizeMap.size() >= mMaxBufferCount && mFreeBufferMap.empty() && !mAborted) {
+        mCondition.wait(lock);
+    }
+
+    if (mAborted) {
+        return nullptr;
+    }
+
+    // Check if the free list contains a large enough buffer.
+    auto it = mFreeBufferMap.lower_bound(minimumBufferSize);
+    if (it != mFreeBufferMap.end()) {
+        uint8_t* buffer = it->second;
+        mFreeBufferMap.erase(it);
+        return buffer;
+    }
+
+    // If the maximum buffer count is reached, remove an existing free buffer.
+    if (mAddressSizeMap.size() >= mMaxBufferCount) {
+        auto it = mFreeBufferMap.begin();
+        mAddressSizeMap.erase(it->second);
+        delete[] it->second;
+        mFreeBufferMap.erase(it);
+    }
+
+    // Allocate a new buffer.
+    uint8_t* buffer = new (std::nothrow) uint8_t[minimumBufferSize];
+    if (buffer == nullptr) {
+        LOG(ERROR) << "Unable to allocate new buffer of size: " << minimumBufferSize;
+        return nullptr;
+    }
+
+    // Add the buffer to the tracking set.
+    mAddressSizeMap.emplace(buffer, minimumBufferSize);
+    return buffer;
+}
+
+void PassthroughTrackTranscoder::BufferPool::returnBuffer(uint8_t* buffer) {
+    std::scoped_lock lock(mMutex);
+
+    if (buffer == nullptr || mAddressSizeMap.find(buffer) == mAddressSizeMap.end()) {
+        LOG(WARNING) << "Ignoring untracked buffer " << buffer;
+        return;
+    }
+
+    mFreeBufferMap.emplace(mAddressSizeMap[buffer], buffer);
+    mCondition.notify_one();
+}
+
+void PassthroughTrackTranscoder::BufferPool::abort() {
+    std::scoped_lock lock(mMutex);
+    mAborted = true;
+    mCondition.notify_all();
+}
+
+media_status_t PassthroughTrackTranscoder::configureDestinationFormat(
+        const std::shared_ptr<AMediaFormat>& destinationFormat __unused) {
+    // Called by MediaTrackTranscoder. Passthrough doesn't care about destination so just return ok.
+    return AMEDIA_OK;
+}
+
+media_status_t PassthroughTrackTranscoder::runTranscodeLoop(bool* stopped) {
+    prctl(PR_SET_NAME, (unsigned long)"PassthruThread", 0, 0, 0);
+
+    MediaSampleInfo info;
+    std::shared_ptr<MediaSample> sample;
+    bool eosReached = false;
+
+    // Notify the track format as soon as we start. It's same as the source format.
+    notifyTrackFormatAvailable();
+
+    MediaSample::OnSampleReleasedCallback bufferReleaseCallback =
+            [bufferPool = mBufferPool](MediaSample* sample) {
+                bufferPool->returnBuffer(const_cast<uint8_t*>(sample->buffer));
+            };
+
+    // Move samples until EOS is reached or transcoding is stopped.
+    while (mStopRequest != STOP_NOW && !eosReached) {
+        media_status_t status = mMediaSampleReader->getSampleInfoForTrack(mTrackIndex, &info);
+
+        if (status == AMEDIA_OK) {
+            uint8_t* buffer = mBufferPool->getBufferWithSize(info.size);
+            if (buffer == nullptr) {
+                if (mStopRequest == STOP_NOW) {
+                    break;
+                }
+
+                LOG(ERROR) << "Unable to get buffer from pool";
+                return AMEDIA_ERROR_UNKNOWN;
+            }
+
+            sample = MediaSample::createWithReleaseCallback(
+                    buffer, 0 /* offset */, 0 /* bufferId */, bufferReleaseCallback);
+
+            status = mMediaSampleReader->readSampleDataForTrack(mTrackIndex, buffer, info.size);
+            if (status != AMEDIA_OK) {
+                LOG(ERROR) << "Unable to read next sample data. Aborting transcode.";
+                return status;
+            }
+
+        } else if (status == AMEDIA_ERROR_END_OF_STREAM) {
+            sample = std::make_shared<MediaSample>();
+            eosReached = true;
+        } else {
+            LOG(ERROR) << "Unable to get next sample info. Aborting transcode.";
+            return status;
+        }
+
+        sample->info = info;
+        onOutputSampleAvailable(sample);
+
+        if (mStopRequest == STOP_ON_SYNC && info.flags & SAMPLE_FLAG_SYNC_SAMPLE) {
+            break;
+        }
+    }
+
+    if (mStopRequest != NONE && !eosReached) {
+        *stopped = true;
+    }
+    return AMEDIA_OK;
+}
+
+void PassthroughTrackTranscoder::abortTranscodeLoop() {
+    if (mStopRequest == STOP_NOW) {
+        mBufferPool->abort();
+    }
+}
+
+std::shared_ptr<AMediaFormat> PassthroughTrackTranscoder::getOutputFormat() const {
+    return mSourceFormat;
+}
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
new file mode 100644
index 0000000..7272a74
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
@@ -0,0 +1,649 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "VideoTrackTranscoder"
+
+#include <android-base/logging.h>
+#include <android-base/properties.h>
+#include <media/NdkCommon.h>
+#include <media/VideoTrackTranscoder.h>
+#include <sys/prctl.h>
+
+using namespace AMediaFormatUtils;
+
+namespace android {
+
+// Check that the codec sample flags have the expected NDK meaning.
+static_assert(SAMPLE_FLAG_CODEC_CONFIG == AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG,
+              "Sample flag mismatch: CODEC_CONFIG");
+static_assert(SAMPLE_FLAG_END_OF_STREAM == AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM,
+              "Sample flag mismatch: END_OF_STREAM");
+static_assert(SAMPLE_FLAG_PARTIAL_FRAME == AMEDIACODEC_BUFFER_FLAG_PARTIAL_FRAME,
+              "Sample flag mismatch: PARTIAL_FRAME");
+
+// Color format defined by surface. (See MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface.)
+static constexpr int32_t kColorFormatSurface = 0x7f000789;
+// Default key frame interval in seconds.
+static constexpr float kDefaultKeyFrameIntervalSeconds = 1.0f;
+// Default codec operating rate.
+static int32_t kDefaultCodecOperatingRate720P = base::GetIntProperty(
+        "debug.media.transcoding.codec_max_operating_rate_720P", /*default*/ 480);
+static int32_t kDefaultCodecOperatingRate1080P = base::GetIntProperty(
+        "debug.media.transcoding.codec_max_operating_rate_1080P", /*default*/ 240);
+// Default codec priority.
+static constexpr int32_t kDefaultCodecPriority = 1;
+// Default bitrate, in case source estimation fails.
+static constexpr int32_t kDefaultBitrateMbps = 10 * 1000 * 1000;
+// Default frame rate.
+static constexpr int32_t kDefaultFrameRate = 30;
+
+template <typename T>
+void VideoTrackTranscoder::BlockingQueue<T>::push(T const& value, bool front) {
+    {
+        std::scoped_lock lock(mMutex);
+        if (mAborted) {
+            return;
+        }
+
+        if (front) {
+            mQueue.push_front(value);
+        } else {
+            mQueue.push_back(value);
+        }
+    }
+    mCondition.notify_one();
+}
+
+template <typename T>
+T VideoTrackTranscoder::BlockingQueue<T>::pop() {
+    std::unique_lock lock(mMutex);
+    while (mQueue.empty()) {
+        mCondition.wait(lock);
+    }
+    T value = mQueue.front();
+    mQueue.pop_front();
+    return value;
+}
+
+// Note: Do not call if another thread might waiting in pop.
+template <typename T>
+void VideoTrackTranscoder::BlockingQueue<T>::abort() {
+    std::scoped_lock lock(mMutex);
+    mAborted = true;
+    mQueue.clear();
+}
+
+// The CodecWrapper class is used to let AMediaCodec instances outlive the transcoder object itself
+// by giving the codec a weak pointer to the transcoder. Codecs wrapped in this object are kept
+// alive by the transcoder and the codec's outstanding buffers. Once the transcoder stops and all
+// output buffers have been released by downstream components the codec will also be released.
+class VideoTrackTranscoder::CodecWrapper {
+public:
+    CodecWrapper(AMediaCodec* codec, const std::weak_ptr<VideoTrackTranscoder>& transcoder)
+          : mCodec(codec), mTranscoder(transcoder), mCodecStarted(false) {}
+    ~CodecWrapper() {
+        if (mCodecStarted) {
+            AMediaCodec_stop(mCodec);
+        }
+        AMediaCodec_delete(mCodec);
+    }
+
+    AMediaCodec* getCodec() { return mCodec; }
+    std::shared_ptr<VideoTrackTranscoder> getTranscoder() const { return mTranscoder.lock(); };
+    void setStarted() { mCodecStarted = true; }
+
+private:
+    AMediaCodec* mCodec;
+    std::weak_ptr<VideoTrackTranscoder> mTranscoder;
+    bool mCodecStarted;
+};
+
+// Dispatch responses to codec callbacks onto the message queue.
+struct AsyncCodecCallbackDispatch {
+    static void onAsyncInputAvailable(AMediaCodec* codec, void* userdata, int32_t index) {
+        VideoTrackTranscoder::CodecWrapper* wrapper =
+                static_cast<VideoTrackTranscoder::CodecWrapper*>(userdata);
+        if (auto transcoder = wrapper->getTranscoder()) {
+            if (codec == transcoder->mDecoder) {
+                transcoder->mCodecMessageQueue.push(
+                        [transcoder, index] { transcoder->enqueueInputSample(index); });
+            }
+        }
+    }
+
+    static void onAsyncOutputAvailable(AMediaCodec* codec, void* userdata, int32_t index,
+                                       AMediaCodecBufferInfo* bufferInfoPtr) {
+        VideoTrackTranscoder::CodecWrapper* wrapper =
+                static_cast<VideoTrackTranscoder::CodecWrapper*>(userdata);
+        AMediaCodecBufferInfo bufferInfo = *bufferInfoPtr;
+        if (auto transcoder = wrapper->getTranscoder()) {
+            transcoder->mCodecMessageQueue.push([transcoder, index, codec, bufferInfo] {
+                if (codec == transcoder->mDecoder) {
+                    transcoder->transferBuffer(index, bufferInfo);
+                } else if (codec == transcoder->mEncoder->getCodec()) {
+                    transcoder->dequeueOutputSample(index, bufferInfo);
+                }
+            });
+        }
+    }
+
+    static void onAsyncFormatChanged(AMediaCodec* codec, void* userdata, AMediaFormat* format) {
+        VideoTrackTranscoder::CodecWrapper* wrapper =
+                static_cast<VideoTrackTranscoder::CodecWrapper*>(userdata);
+        if (auto transcoder = wrapper->getTranscoder()) {
+            const bool isDecoder = codec == transcoder->mDecoder;
+            const char* kCodecName = (isDecoder ? "Decoder" : "Encoder");
+            LOG(INFO) << kCodecName << " format changed: " << AMediaFormat_toString(format);
+            transcoder->mCodecMessageQueue.push([transcoder, format, isDecoder] {
+                transcoder->updateTrackFormat(format, isDecoder);
+            });
+        }
+    }
+
+    static void onAsyncError(AMediaCodec* codec, void* userdata, media_status_t error,
+                             int32_t actionCode, const char* detail) {
+        LOG(ERROR) << "Error from codec " << codec << ", userdata " << userdata << ", error "
+                   << error << ", action " << actionCode << ", detail " << detail;
+        VideoTrackTranscoder::CodecWrapper* wrapper =
+                static_cast<VideoTrackTranscoder::CodecWrapper*>(userdata);
+        if (auto transcoder = wrapper->getTranscoder()) {
+            transcoder->mCodecMessageQueue.push(
+                    [transcoder, error] { transcoder->mStatus = error; }, true);
+        }
+    }
+};
+
+// static
+std::shared_ptr<VideoTrackTranscoder> VideoTrackTranscoder::create(
+        const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback, pid_t pid,
+        uid_t uid) {
+    return std::shared_ptr<VideoTrackTranscoder>(
+            new VideoTrackTranscoder(transcoderCallback, pid, uid));
+}
+
+VideoTrackTranscoder::~VideoTrackTranscoder() {
+    if (mDecoder != nullptr) {
+        AMediaCodec_delete(mDecoder);
+    }
+
+    if (mSurface != nullptr) {
+        ANativeWindow_release(mSurface);
+    }
+}
+
+// Search the default operating rate based on resolution.
+static int32_t getDefaultOperatingRate(AMediaFormat* encoderFormat) {
+    int32_t width, height;
+    if (AMediaFormat_getInt32(encoderFormat, AMEDIAFORMAT_KEY_WIDTH, &width) && (width > 0) &&
+        AMediaFormat_getInt32(encoderFormat, AMEDIAFORMAT_KEY_HEIGHT, &height) && (height > 0)) {
+        if ((width == 1280 && height == 720) || (width == 720 && height == 1280)) {
+            return kDefaultCodecOperatingRate720P;
+        } else if ((width == 1920 && height == 1080) || (width == 1080 && height == 1920)) {
+            return kDefaultCodecOperatingRate1080P;
+        } else {
+            LOG(WARNING) << "Could not find default operating rate: " << width << " " << height;
+            // Don't set operating rate if the correct dimensions are not found.
+        }
+    } else {
+        LOG(ERROR) << "Failed to get default operating rate due to missing resolution";
+    }
+    return -1;
+}
+
+// Creates and configures the codecs.
+media_status_t VideoTrackTranscoder::configureDestinationFormat(
+        const std::shared_ptr<AMediaFormat>& destinationFormat) {
+    media_status_t status = AMEDIA_OK;
+
+    if (destinationFormat == nullptr) {
+        LOG(ERROR) << "Destination format is null, use passthrough transcoder";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    AMediaFormat* encoderFormat = AMediaFormat_new();
+    if (!encoderFormat || AMediaFormat_copy(encoderFormat, destinationFormat.get()) != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to copy destination format";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (!AMediaFormat_getInt32(encoderFormat, AMEDIAFORMAT_KEY_BIT_RATE, &mConfiguredBitrate)) {
+        status = mMediaSampleReader->getEstimatedBitrateForTrack(mTrackIndex, &mConfiguredBitrate);
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "Unable to estimate bitrate. Using default " << kDefaultBitrateMbps;
+            mConfiguredBitrate = kDefaultBitrateMbps;
+        }
+
+        LOG(INFO) << "Configuring bitrate " << mConfiguredBitrate;
+        AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_BIT_RATE, mConfiguredBitrate);
+    }
+
+    SetDefaultFormatValueFloat(AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, encoderFormat,
+                               kDefaultKeyFrameIntervalSeconds);
+
+    int32_t operatingRate = getDefaultOperatingRate(encoderFormat);
+
+    if (operatingRate != -1) {
+        float tmpf;
+        int32_t tmpi;
+        if (!AMediaFormat_getFloat(encoderFormat, AMEDIAFORMAT_KEY_OPERATING_RATE, &tmpf) &&
+            !AMediaFormat_getInt32(encoderFormat, AMEDIAFORMAT_KEY_OPERATING_RATE, &tmpi)) {
+            AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_OPERATING_RATE, operatingRate);
+        }
+    }
+
+    SetDefaultFormatValueInt32(AMEDIAFORMAT_KEY_PRIORITY, encoderFormat, kDefaultCodecPriority);
+    SetDefaultFormatValueInt32(AMEDIAFORMAT_KEY_FRAME_RATE, encoderFormat, kDefaultFrameRate);
+    AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT, kColorFormatSurface);
+
+    // Always encode without rotation. The rotation degree will be transferred directly to
+    // MediaSampleWriter track format, and MediaSampleWriter will call AMediaMuxer_setOrientationHint.
+    AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_ROTATION, 0);
+
+    // Request encoder to use background priorities by default.
+    SetDefaultFormatValueInt32(TBD_AMEDIACODEC_PARAMETER_KEY_BACKGROUND_MODE, encoderFormat,
+                               1 /* true */);
+
+    mDestinationFormat = std::shared_ptr<AMediaFormat>(encoderFormat, &AMediaFormat_delete);
+
+    // Create and configure the encoder.
+    const char* destinationMime = nullptr;
+    bool ok = AMediaFormat_getString(mDestinationFormat.get(), AMEDIAFORMAT_KEY_MIME,
+                                     &destinationMime);
+    if (!ok) {
+        LOG(ERROR) << "Destination MIME type is required for transcoding.";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+#define __TRANSCODING_MIN_API__ 31
+
+    AMediaCodec* encoder;
+    if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+        encoder = AMediaCodec_createEncoderByTypeForClient(destinationMime, mPid, mUid);
+    } else {
+        encoder = AMediaCodec_createEncoderByType(destinationMime);
+    }
+    if (encoder == nullptr) {
+        LOG(ERROR) << "Unable to create encoder for type " << destinationMime;
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+    mEncoder = std::make_shared<CodecWrapper>(encoder, shared_from_this());
+
+    LOG(INFO) << "Configuring encoder with: " << AMediaFormat_toString(mDestinationFormat.get());
+    status = AMediaCodec_configure(mEncoder->getCodec(), mDestinationFormat.get(),
+                                   NULL /* surface */, NULL /* crypto */,
+                                   AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to configure video encoder: " << status;
+        return status;
+    }
+
+    status = AMediaCodec_createInputSurface(mEncoder->getCodec(), &mSurface);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to create an encoder input surface: %d" << status;
+        return status;
+    }
+
+    // Create and configure the decoder.
+    const char* sourceMime = nullptr;
+    ok = AMediaFormat_getString(mSourceFormat.get(), AMEDIAFORMAT_KEY_MIME, &sourceMime);
+    if (!ok) {
+        LOG(ERROR) << "Source MIME type is required for transcoding.";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+        mDecoder = AMediaCodec_createDecoderByTypeForClient(sourceMime, mPid, mUid);
+    } else {
+        mDecoder = AMediaCodec_createDecoderByType(sourceMime);
+    }
+    if (mDecoder == nullptr) {
+        LOG(ERROR) << "Unable to create decoder for type " << sourceMime;
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    auto decoderFormat = std::shared_ptr<AMediaFormat>(AMediaFormat_new(), &AMediaFormat_delete);
+    if (!decoderFormat ||
+        AMediaFormat_copy(decoderFormat.get(), mSourceFormat.get()) != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to copy source format";
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    // Request decoder to convert HDR content to SDR.
+    const bool sourceIsHdr = VideoIsHdr(mSourceFormat.get());
+    if (sourceIsHdr) {
+        AMediaFormat_setInt32(decoderFormat.get(),
+                              TBD_AMEDIACODEC_PARAMETER_KEY_COLOR_TRANSFER_REQUEST,
+                              COLOR_TRANSFER_SDR_VIDEO);
+    }
+
+    // Prevent decoder from overwriting frames that the encoder has not yet consumed.
+    AMediaFormat_setInt32(decoderFormat.get(), TBD_AMEDIACODEC_PARAMETER_KEY_ALLOW_FRAME_DROP, 0);
+
+    // Copy over configurations that apply to both encoder and decoder.
+    static const std::vector<EntryCopier> kEncoderEntriesToCopy{
+            ENTRY_COPIER2(AMEDIAFORMAT_KEY_OPERATING_RATE, Float, Int32),
+            ENTRY_COPIER(AMEDIAFORMAT_KEY_PRIORITY, Int32),
+            ENTRY_COPIER(TBD_AMEDIACODEC_PARAMETER_KEY_BACKGROUND_MODE, Int32),
+    };
+    CopyFormatEntries(mDestinationFormat.get(), decoderFormat.get(), kEncoderEntriesToCopy);
+
+    LOG(INFO) << "Configuring decoder with: " << AMediaFormat_toString(decoderFormat.get());
+    status = AMediaCodec_configure(mDecoder, decoderFormat.get(), mSurface, NULL /* crypto */,
+                                   0 /* flags */);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to configure video decoder: " << status;
+        return status;
+    }
+
+    if (sourceIsHdr) {
+        bool supported = false;
+        AMediaFormat* inputFormat = AMediaCodec_getInputFormat(mDecoder);
+
+        if (inputFormat != nullptr) {
+            int32_t transferFunc;
+            supported = AMediaFormat_getInt32(inputFormat,
+                                              TBD_AMEDIACODEC_PARAMETER_KEY_COLOR_TRANSFER_REQUEST,
+                                              &transferFunc) &&
+                        transferFunc == COLOR_TRANSFER_SDR_VIDEO;
+            AMediaFormat_delete(inputFormat);
+        }
+
+        if (!supported) {
+            LOG(ERROR) << "HDR to SDR conversion unsupported by the codec";
+            return AMEDIA_ERROR_UNSUPPORTED;
+        }
+    }
+
+    // Configure codecs to run in async mode.
+    AMediaCodecOnAsyncNotifyCallback asyncCodecCallbacks = {
+            .onAsyncInputAvailable = AsyncCodecCallbackDispatch::onAsyncInputAvailable,
+            .onAsyncOutputAvailable = AsyncCodecCallbackDispatch::onAsyncOutputAvailable,
+            .onAsyncFormatChanged = AsyncCodecCallbackDispatch::onAsyncFormatChanged,
+            .onAsyncError = AsyncCodecCallbackDispatch::onAsyncError};
+
+    // Note: The decoder does not need its own wrapper because its lifetime is tied to the
+    // transcoder. But the same callbacks are reused for decoder and encoder so we pass the encoder
+    // wrapper as userdata here but never read the codec from it in the callback.
+    status = AMediaCodec_setAsyncNotifyCallback(mDecoder, asyncCodecCallbacks, mEncoder.get());
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to set decoder to async mode: " << status;
+        return status;
+    }
+
+    status = AMediaCodec_setAsyncNotifyCallback(mEncoder->getCodec(), asyncCodecCallbacks,
+                                                mEncoder.get());
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to set encoder to async mode: " << status;
+        return status;
+    }
+
+    return AMEDIA_OK;
+}
+
+void VideoTrackTranscoder::enqueueInputSample(int32_t bufferIndex) {
+    media_status_t status = AMEDIA_OK;
+
+    if (mEosFromSource) {
+        return;
+    }
+
+    status = mMediaSampleReader->getSampleInfoForTrack(mTrackIndex, &mSampleInfo);
+    if (status != AMEDIA_OK && status != AMEDIA_ERROR_END_OF_STREAM) {
+        LOG(ERROR) << "Error getting next sample info: " << status;
+        mStatus = status;
+        return;
+    }
+    const bool endOfStream = (status == AMEDIA_ERROR_END_OF_STREAM);
+
+    if (!endOfStream) {
+        size_t bufferSize = 0;
+        uint8_t* sourceBuffer = AMediaCodec_getInputBuffer(mDecoder, bufferIndex, &bufferSize);
+        if (sourceBuffer == nullptr) {
+            LOG(ERROR) << "Decoder returned a NULL input buffer.";
+            mStatus = AMEDIA_ERROR_UNKNOWN;
+            return;
+        } else if (bufferSize < mSampleInfo.size) {
+            LOG(ERROR) << "Decoder returned an input buffer that is smaller than the sample.";
+            mStatus = AMEDIA_ERROR_UNKNOWN;
+            return;
+        }
+
+        status = mMediaSampleReader->readSampleDataForTrack(mTrackIndex, sourceBuffer,
+                                                            mSampleInfo.size);
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "Unable to read next sample data. Aborting transcode.";
+            mStatus = status;
+            return;
+        }
+
+        if (mSampleInfo.size) {
+            ++mInputFrameCount;
+        }
+    } else {
+        LOG(DEBUG) << "EOS from source.";
+        mEosFromSource = true;
+    }
+
+    status = AMediaCodec_queueInputBuffer(mDecoder, bufferIndex, 0, mSampleInfo.size,
+                                          mSampleInfo.presentationTimeUs, mSampleInfo.flags);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to queue input buffer for decode: " << status;
+        mStatus = status;
+        return;
+    }
+}
+
+void VideoTrackTranscoder::transferBuffer(int32_t bufferIndex, AMediaCodecBufferInfo bufferInfo) {
+    if (bufferIndex >= 0) {
+        bool needsRender = bufferInfo.size > 0;
+        AMediaCodec_releaseOutputBuffer(mDecoder, bufferIndex, needsRender);
+    }
+
+    if (bufferInfo.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
+        LOG(DEBUG) << "EOS from decoder.";
+        media_status_t status = AMediaCodec_signalEndOfInputStream(mEncoder->getCodec());
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "SignalEOS on encoder returned error: " << status;
+            mStatus = status;
+        }
+    }
+}
+
+void VideoTrackTranscoder::dequeueOutputSample(int32_t bufferIndex,
+                                               AMediaCodecBufferInfo bufferInfo) {
+    if (bufferIndex >= 0) {
+        size_t sampleSize = 0;
+        uint8_t* buffer =
+                AMediaCodec_getOutputBuffer(mEncoder->getCodec(), bufferIndex, &sampleSize);
+
+        MediaSample::OnSampleReleasedCallback bufferReleaseCallback =
+                [encoder = mEncoder](MediaSample* sample) {
+                    AMediaCodec_releaseOutputBuffer(encoder->getCodec(), sample->bufferId,
+                                                    false /* render */);
+                };
+
+        std::shared_ptr<MediaSample> sample = MediaSample::createWithReleaseCallback(
+                buffer, bufferInfo.offset, bufferIndex, bufferReleaseCallback);
+        sample->info.size = bufferInfo.size;
+        sample->info.flags = bufferInfo.flags;
+        sample->info.presentationTimeUs = bufferInfo.presentationTimeUs;
+
+        if (bufferInfo.size > 0 && (bufferInfo.flags & SAMPLE_FLAG_CODEC_CONFIG) == 0) {
+            ++mOutputFrameCount;
+        }
+        onOutputSampleAvailable(sample);
+
+        mLastSampleWasSync = sample->info.flags & SAMPLE_FLAG_SYNC_SAMPLE;
+    }
+
+    if (bufferInfo.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
+        LOG(DEBUG) << "EOS from encoder.";
+        mEosFromEncoder = true;
+
+        if (mInputFrameCount != mOutputFrameCount) {
+            LOG(WARNING) << "Input / Output frame count mismatch: " << mInputFrameCount << " vs "
+                         << mOutputFrameCount;
+            if (mInputFrameCount > 0 && mOutputFrameCount == 0) {
+                LOG(ERROR) << "Encoder did not produce any output frames.";
+                mStatus = AMEDIA_ERROR_UNKNOWN;
+            }
+        }
+    }
+}
+
+void VideoTrackTranscoder::updateTrackFormat(AMediaFormat* outputFormat, bool fromDecoder) {
+    if (fromDecoder) {
+        static const std::vector<AMediaFormatUtils::EntryCopier> kValuesToCopy{
+                ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_RANGE, Int32),
+                ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_STANDARD, Int32),
+                ENTRY_COPIER(AMEDIAFORMAT_KEY_COLOR_TRANSFER, Int32),
+        };
+        AMediaFormat* params = AMediaFormat_new();
+        if (params != nullptr) {
+            AMediaFormatUtils::CopyFormatEntries(outputFormat, params, kValuesToCopy);
+            if (AMediaCodec_setParameters(mEncoder->getCodec(), params) != AMEDIA_OK) {
+                LOG(WARNING) << "Unable to update encoder with color information";
+            }
+            AMediaFormat_delete(params);
+        }
+        return;
+    }
+
+    if (mActualOutputFormat != nullptr) {
+        LOG(WARNING) << "Ignoring duplicate format change.";
+        return;
+    }
+
+    AMediaFormat* formatCopy = AMediaFormat_new();
+    if (!formatCopy || AMediaFormat_copy(formatCopy, outputFormat) != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to copy outputFormat";
+        AMediaFormat_delete(formatCopy);
+        mStatus = AMEDIA_ERROR_INVALID_PARAMETER;
+        return;
+    }
+
+    // Generate the actual track format for muxer based on the encoder output format,
+    // since many vital information comes in the encoder format (eg. CSD).
+    // Transfer necessary fields from the user-configured track format (derived from
+    // source track format and user transcoding request) where needed.
+
+    // Transfer SAR settings:
+    // If mDestinationFormat has SAR set, it means the original source has SAR specified
+    // at container level. This is supposed to override any SAR settings in the bitstream,
+    // thus should always be transferred to the container of the transcoded file.
+    int32_t sarWidth, sarHeight;
+    if (AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_SAR_WIDTH, &sarWidth) &&
+        (sarWidth > 0) &&
+        AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_SAR_HEIGHT, &sarHeight) &&
+        (sarHeight > 0)) {
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_SAR_WIDTH, sarWidth);
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_SAR_HEIGHT, sarHeight);
+    }
+    // Transfer DAR settings.
+    int32_t displayWidth, displayHeight;
+    if (AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_DISPLAY_WIDTH, &displayWidth) &&
+        (displayWidth > 0) &&
+        AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_DISPLAY_HEIGHT,
+                              &displayHeight) &&
+        (displayHeight > 0)) {
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_DISPLAY_WIDTH, displayWidth);
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_DISPLAY_HEIGHT, displayHeight);
+    }
+
+    // Transfer rotation settings.
+    // Note that muxer itself doesn't take rotation from the track format. It requires
+    // AMediaMuxer_setOrientationHint to set the rotation. Here we pass the rotation to
+    // MediaSampleWriter using the track format. MediaSampleWriter will then call
+    // AMediaMuxer_setOrientationHint as needed.
+    int32_t rotation;
+    if (AMediaFormat_getInt32(mSourceFormat.get(), AMEDIAFORMAT_KEY_ROTATION, &rotation) &&
+        (rotation != 0)) {
+        AMediaFormat_setInt32(formatCopy, AMEDIAFORMAT_KEY_ROTATION, rotation);
+    }
+
+    // Transfer track duration.
+    // Preserve the source track duration by sending it to MediaSampleWriter.
+    int64_t durationUs;
+    if (AMediaFormat_getInt64(mSourceFormat.get(), AMEDIAFORMAT_KEY_DURATION, &durationUs) &&
+        durationUs > 0) {
+        AMediaFormat_setInt64(formatCopy, AMEDIAFORMAT_KEY_DURATION, durationUs);
+    }
+
+    // TODO: transfer other fields as required.
+
+    mActualOutputFormat = std::shared_ptr<AMediaFormat>(formatCopy, &AMediaFormat_delete);
+    LOG(INFO) << "Actual output format: " << AMediaFormat_toString(formatCopy);
+
+    notifyTrackFormatAvailable();
+}
+
+media_status_t VideoTrackTranscoder::runTranscodeLoop(bool* stopped) {
+    prctl(PR_SET_NAME, (unsigned long)"VideTranscodTrd", 0, 0, 0);
+
+    // Push start decoder and encoder as two messages, so that these are subject to the
+    // stop request as well. If the session is cancelled (or paused) immediately after start,
+    // we don't need to waste time start then stop the codecs.
+    mCodecMessageQueue.push([this] {
+        media_status_t status = AMediaCodec_start(mDecoder);
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "Unable to start video decoder: " << status;
+            mStatus = status;
+        }
+    });
+
+    mCodecMessageQueue.push([this] {
+        media_status_t status = AMediaCodec_start(mEncoder->getCodec());
+        if (status != AMEDIA_OK) {
+            LOG(ERROR) << "Unable to start video encoder: " << status;
+            mStatus = status;
+        }
+        mEncoder->setStarted();
+    });
+
+    // Process codec events until EOS is reached, transcoding is stopped or an error occurs.
+    while (mStopRequest != STOP_NOW && !mEosFromEncoder && mStatus == AMEDIA_OK) {
+        std::function<void()> message = mCodecMessageQueue.pop();
+        message();
+
+        if (mStopRequest == STOP_ON_SYNC && mLastSampleWasSync) {
+            break;
+        }
+    }
+
+    mCodecMessageQueue.abort();
+    AMediaCodec_stop(mDecoder);
+
+    // Signal if transcoding was stopped before it finished.
+    if (mStopRequest != NONE && !mEosFromEncoder && mStatus == AMEDIA_OK) {
+        *stopped = true;
+    }
+
+    return mStatus;
+}
+
+void VideoTrackTranscoder::abortTranscodeLoop() {
+    if (mStopRequest == STOP_NOW) {
+        // Wake up transcoder thread.
+        mCodecMessageQueue.push([] {}, true /* front */);
+    }
+}
+
+std::shared_ptr<AMediaFormat> VideoTrackTranscoder::getOutputFormat() const {
+    return mActualOutputFormat;
+}
+
+}  // namespace android
diff --git a/media/libmediatranscoding/transcoder/benchmark/Android.bp b/media/libmediatranscoding/transcoder/benchmark/Android.bp
new file mode 100644
index 0000000..459f0ae
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/benchmark/Android.bp
@@ -0,0 +1,41 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+    name: "benchmarkdefaults",
+    shared_libs: [
+        "libmediandk",
+        "libbase",
+        "libbinder_ndk",
+        "libbinder",
+        "libutils",
+        "libnativewindow",
+    ],
+    static_libs: ["libmediatranscoder", "libgoogle-benchmark"],
+    test_config_template: "AndroidTestTemplate.xml",
+    test_suites: ["device-tests", "TranscoderBenchmarks"],
+}
+
+cc_test {
+    name: "MediaTranscoderBenchmark",
+    srcs: ["MediaTranscoderBenchmark.cpp"],
+    defaults: ["benchmarkdefaults"],
+}
+
+cc_test {
+    name: "MediaSampleReaderBenchmark",
+    srcs: ["MediaSampleReaderBenchmark.cpp"],
+    defaults: ["benchmarkdefaults"],
+}
+
+cc_test {
+    name: "MediaTrackTranscoderBenchmark",
+    srcs: ["MediaTrackTranscoderBenchmark.cpp"],
+    defaults: ["benchmarkdefaults"],
+}
diff --git a/media/libmediatranscoding/transcoder/benchmark/AndroidTestTemplate.xml b/media/libmediatranscoding/transcoder/benchmark/AndroidTestTemplate.xml
new file mode 100644
index 0000000..683f07b
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/benchmark/AndroidTestTemplate.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Unit test configuration for {MODULE}">
+    <option name="test-suite-tag" value="TranscoderBenchmarks" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="false" />
+        <option name="push-file" key="{MODULE}" value="/data/local/tmp/{MODULE}" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libmediatranscoding/transcoder/benchmark/TranscodingBenchmark-1.2.zip?unzip=true"
+            value="/data/local/tmp/TranscodingBenchmark/" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GoogleBenchmarkTest" >
+        <option name="native-benchmark-device-path" value="/data/local/tmp" />
+        <option name="benchmark-module-name" value="{MODULE}" />
+    </test>
+</configuration>
+
diff --git a/media/libmediatranscoding/transcoder/benchmark/MediaSampleReaderBenchmark.cpp b/media/libmediatranscoding/transcoder/benchmark/MediaSampleReaderBenchmark.cpp
new file mode 100644
index 0000000..f0b9304
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/benchmark/MediaSampleReaderBenchmark.cpp
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * MediaSampleReader benchmark tests.
+ *
+ * How to run the benchmark:
+ *
+ * 1. Download the media assets from http://go/transcodingbenchmark and push the directory
+ *    ("TranscodingBenchmark") to /data/local/tmp.
+ *
+ * 2. Compile the benchmark and sync to device:
+ *      $ mm -j72 && adb sync
+ *
+ * 3. Run:
+ *      $ adb shell /data/nativetest64/MediaSampleReaderBenchmark/MediaSampleReaderBenchmark
+ */
+
+#define LOG_TAG "MediaSampleReaderBenchmark"
+
+#include <android-base/logging.h>
+#include <benchmark/benchmark.h>
+#include <fcntl.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <unistd.h>
+
+#include <thread>
+
+using namespace android;
+
+static void ReadMediaSamples(benchmark::State& state, const std::string& srcFileName,
+                             bool readAudio, bool sequentialAccess = false) {
+    // Asset directory.
+    static const std::string kAssetDirectory = "/data/local/tmp/TranscodingBenchmark/";
+
+    int srcFd = 0;
+    std::string srcPath = kAssetDirectory + srcFileName;
+
+    if ((srcFd = open(srcPath.c_str(), O_RDONLY)) < 0) {
+        state.SkipWithError("Unable to open source file");
+        return;
+    }
+
+    const size_t fileSize = lseek(srcFd, 0, SEEK_END);
+    lseek(srcFd, 0, SEEK_SET);
+
+    for (auto _ : state) {
+        auto sampleReader = MediaSampleReaderNDK::createFromFd(srcFd, 0, fileSize);
+        if (sampleReader->setEnforceSequentialAccess(sequentialAccess) != AMEDIA_OK) {
+            state.SkipWithError("setEnforceSequentialAccess failed");
+            return;
+        }
+
+        // Select tracks.
+        std::vector<int> trackIndices;
+        for (int trackIndex = 0; trackIndex < sampleReader->getTrackCount(); ++trackIndex) {
+            const char* mime = nullptr;
+
+            AMediaFormat* trackFormat = sampleReader->getTrackFormat(trackIndex);
+            AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+
+            if (strncmp(mime, "video/", 6) == 0) {
+                int32_t frameCount;
+                if (AMediaFormat_getInt32(trackFormat, AMEDIAFORMAT_KEY_FRAME_COUNT, &frameCount)) {
+                    state.counters["VideoFrameRate"] =
+                            benchmark::Counter(frameCount, benchmark::Counter::kIsRate);
+                }
+            } else if (!readAudio && strncmp(mime, "audio/", 6) == 0) {
+                continue;
+            }
+
+            trackIndices.push_back(trackIndex);
+            sampleReader->selectTrack(trackIndex);
+        }
+
+        // Start threads.
+        std::vector<std::thread> trackThreads;
+        for (auto trackIndex : trackIndices) {
+            trackThreads.emplace_back([trackIndex, sampleReader, &state] {
+                LOG(INFO) << "Track " << trackIndex << " started";
+                MediaSampleInfo info;
+
+                size_t bufferSize = 0;
+                std::unique_ptr<uint8_t[]> buffer;
+
+                while (true) {
+                    media_status_t status = sampleReader->getSampleInfoForTrack(trackIndex, &info);
+                    if (status == AMEDIA_ERROR_END_OF_STREAM) {
+                        break;
+                    }
+
+                    if (info.size > bufferSize) {
+                        bufferSize = info.size;
+                        buffer.reset(new uint8_t[bufferSize]);
+                    }
+
+                    status = sampleReader->readSampleDataForTrack(trackIndex, buffer.get(),
+                                                                  bufferSize);
+                    if (status != AMEDIA_OK) {
+                        state.SkipWithError("Error reading sample data");
+                        break;
+                    }
+                }
+
+                LOG(INFO) << "Track " << trackIndex << " finished";
+            });
+        }
+
+        // Join threads.
+        for (auto& thread : trackThreads) {
+            thread.join();
+        }
+    }
+
+    close(srcFd);
+}
+
+// Benchmark registration wrapper for transcoding.
+#define TRANSCODER_BENCHMARK(func) \
+    BENCHMARK(func)->UseRealTime()->MeasureProcessCPUTime()->Unit(benchmark::kMillisecond)
+
+static void BM_MediaSampleReader_AudioVideo_Parallel(benchmark::State& state) {
+    ReadMediaSamples(state, "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4",
+                     true /* readAudio */);
+}
+
+static void BM_MediaSampleReader_AudioVideo_Sequential(benchmark::State& state) {
+    ReadMediaSamples(state, "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4",
+                     true /* readAudio */, true /* sequentialAccess */);
+}
+
+static void BM_MediaSampleReader_Video(benchmark::State& state) {
+    ReadMediaSamples(state, "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4",
+                     false /* readAudio */);
+}
+
+TRANSCODER_BENCHMARK(BM_MediaSampleReader_AudioVideo_Parallel);
+TRANSCODER_BENCHMARK(BM_MediaSampleReader_AudioVideo_Sequential);
+TRANSCODER_BENCHMARK(BM_MediaSampleReader_Video);
+
+BENCHMARK_MAIN();
diff --git a/media/libmediatranscoding/transcoder/benchmark/MediaTrackTranscoderBenchmark.cpp b/media/libmediatranscoding/transcoder/benchmark/MediaTrackTranscoderBenchmark.cpp
new file mode 100644
index 0000000..d6ed2c6
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/benchmark/MediaTrackTranscoderBenchmark.cpp
@@ -0,0 +1,456 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Native media track transcoder benchmark tests.
+ *
+ * How to run the benchmark:
+ *
+ * 1. Download the media assets from http://go/transcodingbenchmark and push the directory
+ *    ("TranscodingBenchmark") to /data/local/tmp.
+ *
+ * 2. Compile the benchmark and sync to device:
+ *      $ mm -j72 && adb sync
+ *
+ * 3. Run:
+ *      $ adb shell /data/nativetest64/MediaTrackTranscoderBenchmark/MediaTrackTranscoderBenchmark
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaTrackTranscoderBenchmark"
+
+#include <android-base/logging.h>
+#include <android/binder_process.h>
+#include <benchmark/benchmark.h>
+#include <fcntl.h>
+#include <media/MediaSampleReader.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/MediaTrackTranscoder.h>
+#include <media/MediaTrackTranscoderCallback.h>
+#include <media/NdkCommon.h>
+#include <media/PassthroughTrackTranscoder.h>
+#include <media/VideoTrackTranscoder.h>
+
+using namespace android;
+
+typedef enum {
+    kVideo,
+    kAudio,
+} MediaType;
+
+class TrackTranscoderCallbacks : public MediaTrackTranscoderCallback {
+public:
+    virtual void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder __unused) override {}
+
+    virtual void onTrackFinished(const MediaTrackTranscoder* transcoder __unused) override {
+        std::unique_lock lock(mMutex);
+        mFinished = true;
+        mCondition.notify_all();
+    }
+
+    virtual void onTrackStopped(const MediaTrackTranscoder* transcoder __unused) override {
+        std::unique_lock lock(mMutex);
+        mFinished = true;
+        mCondition.notify_all();
+    }
+
+    virtual void onTrackError(const MediaTrackTranscoder* transcoder __unused,
+                              media_status_t status) override {
+        std::unique_lock lock(mMutex);
+        mFinished = true;
+        mStatus = status;
+        mCondition.notify_all();
+    }
+
+    void waitForTranscodingFinished() {
+        std::unique_lock lock(mMutex);
+        while (!mFinished) {
+            mCondition.wait(lock);
+        }
+    }
+
+    media_status_t mStatus = AMEDIA_OK;
+
+private:
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mFinished = false;
+};
+
+/**
+ * MockSampleReader holds a ringbuffer of the first samples in the provided source track. Samples
+ * are returned to the caller from the ringbuffer in a round-robin fashion with increasing
+ * timestamps. The number of samples returned before EOS matches the number of frames in the source
+ * track.
+ */
+class MockSampleReader : public MediaSampleReader {
+public:
+    static std::shared_ptr<MediaSampleReader> createFromFd(int fd, size_t offset, size_t size) {
+        AMediaExtractor* extractor = AMediaExtractor_new();
+        media_status_t status = AMediaExtractor_setDataSourceFd(extractor, fd, offset, size);
+        if (status != AMEDIA_OK) return nullptr;
+
+        auto sampleReader = std::shared_ptr<MockSampleReader>(new MockSampleReader(extractor));
+        return sampleReader;
+    }
+
+    AMediaFormat* getFileFormat() override { return AMediaExtractor_getFileFormat(mExtractor); }
+
+    size_t getTrackCount() const override { return AMediaExtractor_getTrackCount(mExtractor); }
+
+    AMediaFormat* getTrackFormat(int trackIndex) override {
+        return AMediaExtractor_getTrackFormat(mExtractor, trackIndex);
+    }
+
+    media_status_t selectTrack(int trackIndex) override {
+        if (mSelectedTrack >= 0) return AMEDIA_ERROR_UNSUPPORTED;
+        mSelectedTrack = trackIndex;
+
+        media_status_t status = AMediaExtractor_selectTrack(mExtractor, trackIndex);
+        if (status != AMEDIA_OK) return status;
+
+        // Get the sample count.
+        AMediaFormat* format = getTrackFormat(trackIndex);
+        const bool haveSampleCount =
+                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_FRAME_COUNT, &mSampleCount);
+        AMediaFormat_delete(format);
+
+        if (!haveSampleCount) {
+            LOG(ERROR) << "No sample count in track format.";
+            return AMEDIA_ERROR_UNSUPPORTED;
+        }
+
+        // Buffer samples.
+        const int32_t targetBufferCount = 60;
+        std::unique_ptr<uint8_t[]> buffer;
+        MediaSampleInfo info;
+        while (true) {
+            info.presentationTimeUs = AMediaExtractor_getSampleTime(mExtractor);
+            info.flags = AMediaExtractor_getSampleFlags(mExtractor);
+            info.size = AMediaExtractor_getSampleSize(mExtractor);
+
+            // Finish buffering after either reading all the samples in the track or after
+            // completing the GOP satisfying the target count.
+            if (mSamples.size() == mSampleCount ||
+                (mSamples.size() >= targetBufferCount && info.flags & SAMPLE_FLAG_SYNC_SAMPLE)) {
+                break;
+            }
+
+            buffer.reset(new uint8_t[info.size]);
+
+            ssize_t bytesRead = AMediaExtractor_readSampleData(mExtractor, buffer.get(), info.size);
+            if (bytesRead != info.size) {
+                return AMEDIA_ERROR_UNKNOWN;
+            }
+
+            mSamples.emplace_back(std::move(buffer), info);
+
+            AMediaExtractor_advance(mExtractor);
+        }
+
+        mFirstPtsUs = mSamples[0].second.presentationTimeUs;
+        mPtsDiff = mSamples[1].second.presentationTimeUs - mSamples[0].second.presentationTimeUs;
+
+        return AMEDIA_OK;
+    }
+
+    media_status_t unselectTrack(int trackIndex __unused) override {
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    media_status_t setEnforceSequentialAccess(bool enforce __unused) override { return AMEDIA_OK; }
+
+    media_status_t getEstimatedBitrateForTrack(int trackIndex __unused,
+                                               int32_t* bitrate __unused) override {
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    media_status_t getSampleInfoForTrack(int trackIndex, MediaSampleInfo* info) override {
+        if (trackIndex != mSelectedTrack) return AMEDIA_ERROR_INVALID_PARAMETER;
+
+        if (mCurrentSampleIndex >= mSampleCount) {
+            info->presentationTimeUs = 0;
+            info->size = 0;
+            info->flags = SAMPLE_FLAG_END_OF_STREAM;
+            return AMEDIA_ERROR_END_OF_STREAM;
+        }
+
+        *info = mSamples[mCurrentSampleIndex % mSamples.size()].second;
+        info->presentationTimeUs = mFirstPtsUs + mCurrentSampleIndex * mPtsDiff;
+        return AMEDIA_OK;
+    }
+
+    media_status_t readSampleDataForTrack(int trackIndex, uint8_t* buffer,
+                                          size_t bufferSize) override {
+        if (trackIndex != mSelectedTrack) return AMEDIA_ERROR_INVALID_PARAMETER;
+
+        if (mCurrentSampleIndex >= mSampleCount) return AMEDIA_ERROR_END_OF_STREAM;
+
+        auto& p = mSamples[mCurrentSampleIndex % mSamples.size()];
+
+        if (bufferSize < p.second.size) return AMEDIA_ERROR_INVALID_PARAMETER;
+        memcpy(buffer, p.first.get(), p.second.size);
+
+        advanceTrack(trackIndex);
+        return AMEDIA_OK;
+    }
+
+    void advanceTrack(int trackIndex) {
+        if (trackIndex != mSelectedTrack) return;
+        ++mCurrentSampleIndex;
+    }
+
+    virtual ~MockSampleReader() override { AMediaExtractor_delete(mExtractor); }
+
+private:
+    MockSampleReader(AMediaExtractor* extractor) : mExtractor(extractor) {}
+    AMediaExtractor* mExtractor = nullptr;
+    int32_t mSampleCount = 0;
+    std::vector<std::pair<std::unique_ptr<uint8_t[]>, MediaSampleInfo>> mSamples;
+    int mSelectedTrack = -1;
+    int32_t mCurrentSampleIndex = 0;
+    int64_t mFirstPtsUs = 0;
+    int64_t mPtsDiff = 0;
+};
+
+static std::shared_ptr<AMediaFormat> GetDefaultTrackFormat(MediaType mediaType,
+                                                           AMediaFormat* sourceFormat) {
+    // Default video config.
+    static constexpr int32_t kVideoBitRate = 20 * 1000 * 1000;  // 20 mbps
+    static constexpr float kVideoFrameRate = 30.0f;             // 30 fps
+
+    AMediaFormat* format = nullptr;
+
+    if (mediaType == kVideo) {
+        format = AMediaFormat_new();
+        AMediaFormat_copy(format, sourceFormat);
+        AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_AVC);
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, kVideoBitRate);
+        AMediaFormat_setFloat(format, AMEDIAFORMAT_KEY_FRAME_RATE, kVideoFrameRate);
+    }
+    // nothing for audio.
+
+    return std::shared_ptr<AMediaFormat>(format, &AMediaFormat_delete);
+}
+
+/** Gets a MediaSampleReader for the source file */
+static std::shared_ptr<MediaSampleReader> GetSampleReader(const std::string& srcFileName,
+                                                          bool mock) {
+    // Asset directory
+    static const std::string kAssetDirectory = "/data/local/tmp/TranscodingBenchmark/";
+
+    int srcFd = 0;
+    std::string srcPath = kAssetDirectory + srcFileName;
+
+    if ((srcFd = open(srcPath.c_str(), O_RDONLY)) < 0) {
+        return nullptr;
+    }
+
+    const size_t fileSize = lseek(srcFd, 0, SEEK_END);
+    lseek(srcFd, 0, SEEK_SET);
+
+    std::shared_ptr<MediaSampleReader> sampleReader;
+
+    if (mock) {
+        sampleReader = MockSampleReader::createFromFd(srcFd, 0 /* offset */, fileSize);
+    } else {
+        sampleReader = MediaSampleReaderNDK::createFromFd(srcFd, 0 /* offset */, fileSize);
+    }
+
+    if (srcFd > 0) close(srcFd);
+    return sampleReader;
+}
+
+/**
+ * Configures a MediaTrackTranscoder with an empty sample consumer so that the samples are returned
+ * to the transcoder immediately.
+ */
+static void ConfigureEmptySampleConsumer(const std::shared_ptr<MediaTrackTranscoder>& transcoder,
+                                         uint32_t& sampleCount) {
+    transcoder->setSampleConsumer([&sampleCount](const std::shared_ptr<MediaSample>& sample) {
+        if (!(sample->info.flags & SAMPLE_FLAG_CODEC_CONFIG) && sample->info.size > 0) {
+            ++sampleCount;
+        }
+    });
+}
+
+/**
+ * Callback to edit track format for transcoding.
+ * @param dstFormat The default track format for the track type.
+ */
+using TrackFormatEditCallback = std::function<void(AMediaFormat* dstFormat)>;
+
+/**
+ * Configures a MediaTrackTranscoder with the provided MediaSampleReader, reading from the first
+ * track that matches the specified media type.
+ */
+static bool ConfigureSampleReader(const std::shared_ptr<MediaTrackTranscoder>& transcoder,
+                                  const std::shared_ptr<MediaSampleReader>& sampleReader,
+                                  MediaType mediaType,
+                                  const TrackFormatEditCallback& formatEditor) {
+    int srcTrackIndex = -1;
+    std::shared_ptr<AMediaFormat> srcTrackFormat = nullptr;
+
+    for (int trackIndex = 0; trackIndex < sampleReader->getTrackCount(); ++trackIndex) {
+        AMediaFormat* trackFormat = sampleReader->getTrackFormat(trackIndex);
+
+        const char* mime = nullptr;
+        AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+
+        if ((mediaType == kVideo && strncmp(mime, "video/", 6) == 0) ||
+            (mediaType == kAudio && strncmp(mime, "audio/", 6) == 0)) {
+            srcTrackIndex = trackIndex;
+            srcTrackFormat = std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete);
+            break;
+        }
+        AMediaFormat_delete(trackFormat);
+    }
+
+    if (srcTrackIndex == -1) {
+        LOG(ERROR) << "No matching source track found";
+        return false;
+    }
+
+    media_status_t status = sampleReader->selectTrack(srcTrackIndex);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "Unable to select track";
+        return false;
+    }
+
+    auto destinationFormat = GetDefaultTrackFormat(mediaType, srcTrackFormat.get());
+    if (formatEditor != nullptr) {
+        formatEditor(destinationFormat.get());
+    }
+    status = transcoder->configure(sampleReader, srcTrackIndex, destinationFormat);
+    if (status != AMEDIA_OK) {
+        LOG(ERROR) << "transcoder configure returned " << status;
+        return false;
+    }
+
+    return true;
+}
+
+static void BenchmarkTranscoder(benchmark::State& state, const std::string& srcFileName,
+                                bool mockReader, MediaType mediaType,
+                                const TrackFormatEditCallback& formatEditor = nullptr) {
+    static pthread_once_t once = PTHREAD_ONCE_INIT;
+    pthread_once(&once, ABinderProcess_startThreadPool);
+
+    for (auto _ : state) {
+        std::shared_ptr<TrackTranscoderCallbacks> callbacks =
+                std::make_shared<TrackTranscoderCallbacks>();
+        std::shared_ptr<MediaTrackTranscoder> transcoder;
+
+        if (mediaType == kVideo) {
+            transcoder = VideoTrackTranscoder::create(callbacks);
+        } else {
+            transcoder = std::make_shared<PassthroughTrackTranscoder>(callbacks);
+        }
+
+        std::shared_ptr<MediaSampleReader> sampleReader = GetSampleReader(srcFileName, mockReader);
+        if (sampleReader == nullptr) {
+            state.SkipWithError("Unable to create sample reader");
+            return;
+        }
+
+        if (!ConfigureSampleReader(transcoder, sampleReader, mediaType, formatEditor)) {
+            state.SkipWithError("Unable to configure the transcoder");
+            return;
+        }
+
+        uint32_t sampleCount = 0;
+        ConfigureEmptySampleConsumer(transcoder, sampleCount);
+
+        if (!transcoder->start()) {
+            state.SkipWithError("Unable to start the transcoder");
+            return;
+        }
+
+        callbacks->waitForTranscodingFinished();
+        transcoder->stop();
+
+        if (callbacks->mStatus != AMEDIA_OK) {
+            state.SkipWithError("Transcoder failed with error");
+            return;
+        }
+
+        LOG(DEBUG) << "Number of samples received: " << sampleCount;
+        state.counters["FrameRate"] = benchmark::Counter(sampleCount, benchmark::Counter::kIsRate);
+    }
+}
+
+static void BenchmarkTranscoderWithOperatingRate(benchmark::State& state,
+                                                 const std::string& srcFile, bool mockReader,
+                                                 MediaType mediaType) {
+    TrackFormatEditCallback editor;
+    const int32_t operatingRate = state.range(0);
+    const int32_t priority = state.range(1);
+
+    if (operatingRate >= 0 && priority >= 0) {
+        editor = [operatingRate, priority](AMediaFormat* format) {
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_OPERATING_RATE, operatingRate);
+            AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_PRIORITY, priority);
+        };
+    }
+    BenchmarkTranscoder(state, srcFile, mockReader, mediaType, editor);
+}
+
+//-------------------------------- AVC to AVC Benchmarks -------------------------------------------
+
+static void BM_VideoTranscode_AVC2AVC(benchmark::State& state) {
+    const char* srcFile = "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4";
+    BenchmarkTranscoderWithOperatingRate(state, srcFile, false /* mockReader */, kVideo);
+}
+
+static void BM_VideoTranscode_AVC2AVC_NoExtractor(benchmark::State& state) {
+    const char* srcFile = "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4";
+    BenchmarkTranscoderWithOperatingRate(state, srcFile, true /* mockReader */, kVideo);
+}
+
+//-------------------------------- HEVC to AVC Benchmarks ------------------------------------------
+
+static void BM_VideoTranscode_HEVC2AVC(benchmark::State& state) {
+    const char* srcFile = "video_1920x1080_3863frame_hevc_4Mbps_30fps_aac.mp4";
+    BenchmarkTranscoderWithOperatingRate(state, srcFile, false /* mockReader */, kVideo);
+}
+
+static void BM_VideoTranscode_HEVC2AVC_NoExtractor(benchmark::State& state) {
+    const char* srcFile = "video_1920x1080_3863frame_hevc_4Mbps_30fps_aac.mp4";
+    BenchmarkTranscoderWithOperatingRate(state, srcFile, true /* mockReader */, kVideo);
+}
+
+//-------------------------------- Benchmark Registration ------------------------------------------
+
+// Benchmark registration wrapper for transcoding.
+#define TRANSCODER_BENCHMARK(func) \
+    BENCHMARK(func)->UseRealTime()->MeasureProcessCPUTime()->Unit(benchmark::kMillisecond)
+
+// Benchmark registration for testing different operating rate and priority combinations.
+#define TRANSCODER_OPERATING_RATE_BENCHMARK(func)  \
+    TRANSCODER_BENCHMARK(func)                     \
+            ->Args({-1, -1}) /* <-- Use default */ \
+            ->Args({240, 0})                       \
+            ->Args({INT32_MAX, 0})                 \
+            ->Args({240, 1})                       \
+            ->Args({INT32_MAX, 1})
+
+TRANSCODER_OPERATING_RATE_BENCHMARK(BM_VideoTranscode_AVC2AVC);
+TRANSCODER_OPERATING_RATE_BENCHMARK(BM_VideoTranscode_AVC2AVC_NoExtractor);
+
+TRANSCODER_OPERATING_RATE_BENCHMARK(BM_VideoTranscode_HEVC2AVC);
+TRANSCODER_OPERATING_RATE_BENCHMARK(BM_VideoTranscode_HEVC2AVC_NoExtractor);
+
+BENCHMARK_MAIN();
diff --git a/media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp b/media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp
new file mode 100644
index 0000000..8f8ad4e
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp
@@ -0,0 +1,648 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Native media transcoder library benchmark tests.
+ *
+ * How to run the benchmark:
+ *
+ * 1. Download the media assets from http://go/transcodingbenchmark and push the directory
+ *    ("TranscodingBenchmark") to /data/local/tmp.
+ *
+ * 2. Compile the benchmark and sync to device:
+ *      $ mm -j72 && adb sync
+ *
+ * 3. Run:
+ *      $ adb shell /data/nativetest64/MediaTranscoderBenchmark/MediaTranscoderBenchmark
+ */
+
+#include <benchmark/benchmark.h>
+#include <binder/ProcessState.h>
+#include <fcntl.h>
+#include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
+
+#include <iostream>
+
+using namespace android;
+
+const std::string PARAM_VIDEO_FRAME_RATE = "VideoFrameRate";
+
+class TranscoderCallbacks : public MediaTranscoder::CallbackInterface {
+public:
+    virtual void onFinished(const MediaTranscoder* transcoder __unused) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mFinished = true;
+        mCondition.notify_all();
+    }
+
+    virtual void onError(const MediaTranscoder* transcoder __unused,
+                         media_status_t error) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mFinished = true;
+        mStatus = error;
+        mCondition.notify_all();
+    }
+
+    virtual void onProgressUpdate(const MediaTranscoder* transcoder __unused,
+                                  int32_t progress __unused) override {}
+
+    virtual void onHeartBeat(const MediaTranscoder* transcoder __unused) override {}
+
+    virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
+                                     const std::shared_ptr<ndk::ScopedAParcel>& pausedState
+                                             __unused) override {}
+
+    bool waitForTranscodingFinished() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mFinished) {
+            if (mCondition.wait_for(lock, std::chrono::minutes(5)) == std::cv_status::timeout) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    media_status_t mStatus = AMEDIA_OK;
+
+private:
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mFinished = false;
+};
+
+static AMediaFormat* CreateDefaultVideoFormat() {
+    // Default bitrate
+    static constexpr int32_t kVideoBitRate = 20 * 1000 * 1000;  // 20Mbs
+
+    AMediaFormat* videoFormat = AMediaFormat_new();
+    AMediaFormat_setInt32(videoFormat, AMEDIAFORMAT_KEY_BIT_RATE, kVideoBitRate);
+    AMediaFormat_setString(videoFormat, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_AVC);
+    return videoFormat;
+}
+
+/**
+ * Callback to configure tracks for transcoding.
+ * @param mime The source track mime type.
+ * @param dstFormat The destination format if the track should be transcoded or nullptr if the track
+ * should be passed through.
+ * @return True if the track should be included in the output file.
+ */
+using TrackSelectionCallback = std::function<bool(const char* mime, AMediaFormat** dstFormat)>;
+
+static void TranscodeMediaFile(benchmark::State& state, const std::string& srcFileName,
+                               const std::string& dstFileName,
+                               TrackSelectionCallback trackSelectionCallback) {
+    // Write-only, create file if non-existent.
+    static constexpr int kDstOpenFlags = O_WRONLY | O_CREAT;
+    // User R+W permission.
+    static constexpr int kDstFileMode = S_IRUSR | S_IWUSR;
+    // Asset directory
+    static const std::string kAssetDirectory = "/data/local/tmp/TranscodingBenchmark/";
+
+    // Transcoding configuration params to be logged
+    int64_t trackDurationUs = 0;
+    int32_t width = 0;
+    int32_t height = 0;
+    std::string sourceMime = "NA";
+    std::string targetMime = "NA";
+    bool includeAudio = false;
+    bool transcodeVideo = false;
+    int32_t targetBitrate = 0;
+
+    int srcFd = 0;
+    int dstFd = 0;
+
+    std::string srcPath = kAssetDirectory + srcFileName;
+    std::string dstPath = kAssetDirectory + dstFileName;
+
+    media_status_t status = AMEDIA_OK;
+
+    if ((srcFd = open(srcPath.c_str(), O_RDONLY)) < 0) {
+        state.SkipWithError("Unable to open source file");
+        goto exit;
+    }
+    if ((dstFd = open(dstPath.c_str(), kDstOpenFlags, kDstFileMode)) < 0) {
+        state.SkipWithError("Unable to open destination file");
+        goto exit;
+    }
+
+    for (auto _ : state) {
+        auto callbacks = std::make_shared<TranscoderCallbacks>();
+        auto transcoder = MediaTranscoder::create(callbacks);
+
+        status = transcoder->configureSource(srcFd);
+        if (status != AMEDIA_OK) {
+            state.SkipWithError("Unable to configure transcoder source");
+            goto exit;
+        }
+
+        status = transcoder->configureDestination(dstFd);
+        if (status != AMEDIA_OK) {
+            state.SkipWithError("Unable to configure transcoder destination");
+            goto exit;
+        }
+
+        std::vector<std::shared_ptr<AMediaFormat>> trackFormats = transcoder->getTrackFormats();
+        for (int i = 0; i < trackFormats.size(); ++i) {
+            AMediaFormat* srcFormat = trackFormats[i].get();
+            AMediaFormat* dstFormat = nullptr;
+
+            const char* mime = nullptr;
+            if (!AMediaFormat_getString(srcFormat, AMEDIAFORMAT_KEY_MIME, &mime)) {
+                state.SkipWithError("Source track format does not have MIME type");
+                goto exit;
+            }
+
+            if (strncmp(mime, "video/", 6) == 0) {
+                int32_t frameCount;
+                if (AMediaFormat_getInt32(srcFormat, AMEDIAFORMAT_KEY_FRAME_COUNT, &frameCount)) {
+                    state.counters[PARAM_VIDEO_FRAME_RATE] = benchmark::Counter(
+                            frameCount, benchmark::Counter::kIsIterationInvariantRate);
+                }
+                if (!AMediaFormat_getInt32(srcFormat, AMEDIAFORMAT_KEY_WIDTH, &width)) {
+                    state.SkipWithError("Video source track format does not have width");
+                    goto exit;
+                }
+                if (!AMediaFormat_getInt32(srcFormat, AMEDIAFORMAT_KEY_HEIGHT, &height)) {
+                    state.SkipWithError("Video source track format does not have height");
+                    goto exit;
+                }
+                AMediaFormat_getInt64(srcFormat, AMEDIAFORMAT_KEY_DURATION, &trackDurationUs);
+                sourceMime = mime;
+            }
+
+            if (trackSelectionCallback(mime, &dstFormat)) {
+                status = transcoder->configureTrackFormat(i, dstFormat);
+                if (strncmp(mime, "video/", 6) == 0 && dstFormat != nullptr) {
+                    const char* mime = nullptr;
+                    if (AMediaFormat_getString(dstFormat, AMEDIAFORMAT_KEY_MIME, &mime)) {
+                        targetMime = mime;
+                    }
+                    AMediaFormat_getInt32(dstFormat, AMEDIAFORMAT_KEY_BIT_RATE, &targetBitrate);
+                    transcodeVideo = true;
+                } else if (strncmp(mime, "audio/", 6) == 0) {
+                    includeAudio = true;
+                }
+            }
+
+            if (dstFormat != nullptr) {
+                AMediaFormat_delete(dstFormat);
+            }
+            if (status != AMEDIA_OK) {
+                state.SkipWithError("Unable to configure track");
+                goto exit;
+            }
+        }
+
+        status = transcoder->start();
+        if (status != AMEDIA_OK) {
+            state.SkipWithError("Unable to start transcoder");
+            goto exit;
+        }
+
+        if (!callbacks->waitForTranscodingFinished()) {
+            transcoder->cancel();
+            state.SkipWithError("Transcoder timed out");
+            goto exit;
+        }
+        if (callbacks->mStatus != AMEDIA_OK) {
+            state.SkipWithError("Transcoder error when running");
+            goto exit;
+        }
+    }
+
+    // Set transcoding configuration params in benchmark label
+    state.SetLabel(srcFileName + "," +
+                   std::to_string(width) + "x" + std::to_string(height) + "," +
+                   sourceMime + "," +
+                   std::to_string(trackDurationUs/1000) + "," +
+                   (includeAudio ? "Yes" : "No") + "," +
+                   (transcodeVideo ? "Yes" : "No") + "," +
+                   targetMime + "," +
+                   std::to_string(targetBitrate)
+                   );
+
+exit:
+    if (srcFd > 0) close(srcFd);
+    if (dstFd > 0) close(dstFd);
+}
+
+/**
+ * Callback to edit track format for transcoding.
+ * @param dstFormat The default track format for the track type.
+ */
+using TrackFormatEditCallback = std::function<void(AMediaFormat* dstFormat)>;
+
+static void TranscodeMediaFile(benchmark::State& state, const std::string& srcFileName,
+                               const std::string& dstFileName, bool includeAudio,
+                               bool transcodeVideo,
+                               const TrackFormatEditCallback& videoFormatEditor = nullptr) {
+    TranscodeMediaFile(state, srcFileName, dstFileName,
+                       [=](const char* mime, AMediaFormat** dstFormatOut) -> bool {
+                           *dstFormatOut = nullptr;
+                           if (strncmp(mime, "video/", 6) == 0 && transcodeVideo) {
+                               *dstFormatOut = CreateDefaultVideoFormat();
+                               if (videoFormatEditor != nullptr) {
+                                   videoFormatEditor(*dstFormatOut);
+                               }
+                           } else if (strncmp(mime, "audio/", 6) == 0 && !includeAudio) {
+                               return false;
+                           }
+                           return true;
+                       });
+}
+
+static void SetMaxOperatingRate(AMediaFormat* format) {
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_OPERATING_RATE, INT32_MAX);
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_PRIORITY, 1);
+}
+
+//-------------------------------- AVC to AVC Benchmarks -------------------------------------------
+
+static void BM_TranscodeAvc2AvcAudioVideo2AudioVideo(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4",
+                       "video_1920x1080_3648frame_h264_22Mbps_30fps_aac_transcoded_AV.mp4",
+                       true /* includeAudio */, true /* transcodeVideo */);
+}
+
+static void BM_TranscodeAvc2AvcVideo2Video(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3648frame_h264_22Mbps_30fps.mp4",
+                       "video_1920x1080_3648frame_h264_22Mbps_30fps_transcoded_V.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */);
+}
+
+static void BM_TranscodeAvc2AvcAV2AVMaxOperatingRate(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4",
+                       "video_1920x1080_3648frame_h264_22Mbps_30fps_aac_transcoded_AV.mp4",
+                       true /* includeAudio */, true /* transcodeVideo */, SetMaxOperatingRate);
+}
+
+static void BM_TranscodeAvc2AvcV2VMaxOperatingRate(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3648frame_h264_22Mbps_30fps.mp4",
+                       "video_1920x1080_3648frame_h264_22Mbps_30fps_transcoded_V.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */, SetMaxOperatingRate);
+}
+
+static void BM_TranscodeAvc2AvcAV2AV720P(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1280x720_3648frame_h264_16Mbps_30fps_aac.mp4",
+                       "video_1280x720_3648frame_h264_16Mbps_30fps_aac_transcoded_AV.mp4",
+                       true /* includeAudio */, true /* transcodeVideo */);
+}
+
+static void BM_TranscodeAvc2AvcAV2AV720PMaxOperatingRate(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1280x720_3648frame_h264_16Mbps_30fps_aac.mp4",
+                       "video_1280x720_3648frame_h264_16Mbps_30fps_aac_transcoded_AV.mp4",
+                       true /* includeAudio */, true /* transcodeVideo */, SetMaxOperatingRate);
+}
+//-------------------------------- HEVC to AVC Benchmarks ------------------------------------------
+
+static void BM_TranscodeHevc2AvcAudioVideo2AudioVideo(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3863frame_hevc_4Mbps_30fps_aac.mp4",
+                       "video_1920x1080_3863frame_hevc_4Mbps_30fps_aac_transcoded_AV.mp4",
+                       true /* includeAudio */, true /* transcodeVideo */);
+}
+
+static void BM_TranscodeHevc2AvcVideo2Video(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3863frame_hevc_4Mbps_30fps.mp4",
+                       "video_1920x1080_3863frame_hevc_4Mbps_30fps_transcoded_V.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */);
+}
+
+static void BM_TranscodeHevc2AvcAV2AVMaxOperatingRate(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3863frame_hevc_4Mbps_30fps_aac.mp4",
+                       "video_1920x1080_3863frame_hevc_4Mbps_30fps_aac_transcoded_AV.mp4",
+                       true /* includeAudio */, true /* transcodeVideo */, SetMaxOperatingRate);
+}
+
+static void BM_TranscodeHevc2AvcV2VMaxOperatingRate(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3863frame_hevc_4Mbps_30fps.mp4",
+                       "video_1920x1080_3863frame_hevc_4Mbps_30fps_transcoded_V.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */, SetMaxOperatingRate);
+}
+
+static void BM_TranscodeHevc2AvcAV2AV720P(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1280x720_3863frame_hevc_16Mbps_30fps_aac.mp4",
+                       "video_1280x720_3863frame_hevc_16Mbps_30fps_aac_transcoded_AV.mp4",
+                       true /* includeAudio */, true /* transcodeVideo */);
+}
+
+static void BM_TranscodeHevc2AvcAV2AV720PMaxOperatingRate(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1280x720_3863frame_hevc_16Mbps_30fps_aac.mp4",
+                       "video_1280x720_3863frame_hevc_16Mbps_30fps_aac_transcoded_AV.mp4",
+                       true /* includeAudio */, true /* transcodeVideo */, SetMaxOperatingRate);
+}
+
+//-------------------------------- Passthrough Benchmarks ------------------------------------------
+
+static void BM_TranscodeAudioVideoPassthrough(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3648frame_h264_22Mbps_30fps_aac.mp4",
+                       "video_1920x1080_3648frame_h264_22Mbps_30fps_aac_passthrough_AV.mp4",
+                       true /* includeAudio */, false /* transcodeVideo */);
+}
+static void BM_TranscodeVideoPassthrough(benchmark::State& state) {
+    TranscodeMediaFile(state, "video_1920x1080_3648frame_h264_22Mbps_30fps.mp4",
+                       "video_1920x1080_3648frame_h264_22Mbps_30fps_passthrough_AV.mp4",
+                       false /* includeAudio */, false /* transcodeVideo */);
+}
+
+//---------------------------- Codecs, Resolutions, Bitrate  ---------------------------------------
+static void SetMimeBitrate(AMediaFormat* format, std::string mime, int32_t bitrate) {
+    AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, mime.c_str());
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, bitrate);
+}
+
+static void BM_1920x1080_Avc22Mbps2Avc12Mbps(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_1920_1080_30fps_h264_22Mbps.mp4",
+                       "tx_bm_1920_1080_30fps_h264_22Mbps_transcoded_h264_12Mbps.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */,
+                       [mime = "video/avc", bitrate = 12000000](AMediaFormat* dstFormat) {
+                           SetMimeBitrate(dstFormat, mime, bitrate);
+                       });
+}
+
+static void BM_1920x1080_Avc15Mbps2Avc8Mbps(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_1920_1080_30fps_h264_15Mbps.mp4",
+                       "tx_bm_1920_1080_30fps_h264_15Mbps_transcoded_h264_8Mbps.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */,
+                       [mime = "video/avc", bitrate = 8000000](AMediaFormat* dstFormat) {
+                           SetMimeBitrate(dstFormat, mime, bitrate);
+                       });
+}
+
+static void BM_1920x1080_Avc15Mbps2AvcPassthrough(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_1920_1080_30fps_h264_15Mbps.mp4",
+                       "tx_bm_1920_1080_30fps_h264_15Mbps_passthrough_V.mp4",
+                       false /* includeAudio */, false /* transcodeVideo */);
+}
+
+static void BM_1920x1080_Avc15MbpsAac2Avc8Mbps(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_1920_1080_30fps_h264_15Mbps_aac.mp4",
+                       "tx_bm_1920_1080_30fps_h264_15Mbps_aac_transcoded_h264_8Mbps.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */,
+                       [mime = "video/avc", bitrate = 8000000](AMediaFormat* dstFormat) {
+                           SetMimeBitrate(dstFormat, mime, bitrate);
+                       });
+}
+
+static void BM_1920x1080_Avc15MbpsAac2Avc8MbpsAac(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_1920_1080_30fps_h264_15Mbps_aac.mp4",
+                       "tx_bm_1920_1080_30fps_h264_15Mbps_aac_transcoded_h264_8Mbps_aac.mp4",
+                       true /* includeAudio */, true /* transcodeVideo */,
+                       [mime = "video/avc", bitrate = 8000000](AMediaFormat* dstFormat) {
+                           SetMimeBitrate(dstFormat, mime, bitrate);
+                       });
+}
+
+static void BM_1920x1080_Avc15MbpsAac2AvcPassthrough(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_1920_1080_30fps_h264_15Mbps_aac.mp4",
+                       "tx_bm_1920_1080_30fps_h264_15Mbps_aac_passthrough_V.mp4",
+                       false /* includeAudio */, false /* transcodeVideo */);
+}
+
+static void BM_1920x1080_Avc15MbpsAac2AvcAacPassthrough(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_1920_1080_30fps_h264_15Mbps_aac.mp4",
+                       "tx_bm_1920_1080_30fps_h264_15Mbps_aac_passthrough_AV.mp4",
+                       true /* includeAudio */, false /* transcodeVideo */);
+}
+
+static void BM_1920x1080_Hevc17Mbps2Hevc8Mbps(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_1920_1080_30fps_hevc_17Mbps.mp4",
+                       "tx_bm_1920_1080_30fps_hevc_17Mbps_transcoded_hevc_8Mbps.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */,
+                       [mime = "video/hevc", bitrate = 8000000](AMediaFormat* dstFormat) {
+                           SetMimeBitrate(dstFormat, mime, bitrate);
+                       });
+}
+
+static void BM_1920x1080_Hevc17Mbps2Avc12Mbps(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_1920_1080_30fps_hevc_17Mbps.mp4",
+                       "tx_bm_1920_1080_30fps_hevc_17Mbps_transcoded_h264_12Mbps.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */,
+                       [mime = "video/avc", bitrate = 12000000](AMediaFormat* dstFormat) {
+                           SetMimeBitrate(dstFormat, mime, bitrate);
+                       });
+}
+
+static void BM_1920x1080_60fps_Hevc28Mbps2Avc15Mbps(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_1920_1080_60fps_hevc_28Mbps.mp4",
+                       "tx_bm_1920_1080_60fps_hevc_28Mbps_transcoded_h264_15Mbps.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */,
+                       [mime = "video/avc", bitrate = 15000000](AMediaFormat* dstFormat) {
+                           SetMimeBitrate(dstFormat, mime, bitrate);
+                       });
+}
+
+static void BM_1280x720_Avc10Mbps2Avc4Mbps(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_1280_720_30fps_h264_10Mbps.mp4",
+                       "tx_bm_1280_720_30fps_h264_10Mbps_transcoded_h264_4Mbps.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */,
+                       [mime = "video/avc", bitrate = 4000000](AMediaFormat* dstFormat) {
+                           SetMimeBitrate(dstFormat, mime, bitrate);
+                       });
+}
+
+static void BM_1280x720_Avc10Mbps2AvcPassthrough(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_1280_720_30fps_h264_10Mbps.mp4",
+                       "tx_bm_1280_720_30fps_h264_10Mbps_passthrough_V.mp4",
+                       false /* includeAudio */, false /* transcodeVideo */);
+}
+
+static void BM_1280x720_Avc10MbpsAac2Avc4Mbps(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_1280_720_30fps_h264_10Mbps_aac.mp4",
+                       "tx_bm_1280_720_30fps_h264_10Mbps_aac_transcoded_h264_4Mbps.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */,
+                       [mime = "video/avc", bitrate = 4000000](AMediaFormat* dstFormat) {
+                           SetMimeBitrate(dstFormat, mime, bitrate);
+                       });
+}
+
+static void BM_1280x720_Avc10MbpsAac2Avc4MbpsAac(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_1280_720_30fps_h264_10Mbps_aac.mp4",
+                       "tx_bm_1280_720_30fps_h264_10Mbps_aac_transcoded_h264_4Mbps_aac.mp4",
+                       true /* includeAudio */, true /* transcodeVideo */,
+                       [mime = "video/avc", bitrate = 4000000](AMediaFormat* dstFormat) {
+                           SetMimeBitrate(dstFormat, mime, bitrate);
+                       });
+}
+
+static void BM_1280x720_Avc10MbpsAac2AvcPassthrough(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_1280_720_30fps_h264_10Mbps_aac.mp4",
+                       "tx_bm_1280_720_30fps_h264_10Mbps_aac_passthrough_V.mp4",
+                       false /* includeAudio */, false /* transcodeVideo */);
+}
+
+static void BM_1280x720_Avc10MbpsAac2AvcAacPassthrough(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_1280_720_30fps_h264_10Mbps_aac.mp4",
+                       "tx_bm_1280_720_30fps_h264_10Mbps_aac_passthrough_AV.mp4",
+                       true /* includeAudio */, false /* transcodeVideo */);
+}
+
+static void BM_1280x720_Hevc8Mbps2Avc4Mbps(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_1280_720_30fps_hevc_8Mbps.mp4",
+                       "tx_bm_1280_720_30fps_hevc_8Mbps_transcoded_h264_4Mbps.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */,
+                       [mime = "video/avc", bitrate = 4000000](AMediaFormat* dstFormat) {
+                           SetMimeBitrate(dstFormat, mime, bitrate);
+                       });
+}
+
+static void BM_1080x1920_Avc15Mbps2Avc8Mbps(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_1080_1920_30fps_h264_15Mbps.mp4",
+                       "tx_bm_1080_1920_30fps_h264_15Mbps_transcoded_h264_8Mbps.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */,
+                       [mime = "video/avc", bitrate = 8000000](AMediaFormat* dstFormat) {
+                           SetMimeBitrate(dstFormat, mime, bitrate);
+                       });
+}
+
+static void BM_720x1280_Avc10Mbps2Avc4Mbps(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_720_1280_30fps_h264_10Mbps.mp4",
+                       "tx_bm_720_1280_30fps_h264_10Mbps_transcoded_h264_4Mbps.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */,
+                       [mime = "video/avc", bitrate = 4000000](AMediaFormat* dstFormat) {
+                           SetMimeBitrate(dstFormat, mime, bitrate);
+                       });
+}
+
+static void BM_3840x2160_Hevc42Mbps2Avc20Mbps(benchmark::State& state) {
+    TranscodeMediaFile(state, "tx_bm_3840_2160_30fps_hevc_42Mbps.mp4",
+                       "tx_bm_3840_2160_30fps_hevc_42Mbps_transcoded_h264_4Mbps.mp4",
+                       false /* includeAudio */, true /* transcodeVideo */,
+                       [mime = "video/avc", bitrate = 20000000](AMediaFormat* dstFormat) {
+                           SetMimeBitrate(dstFormat, mime, bitrate);
+                       });
+}
+
+//-------------------------------- Benchmark Registration ------------------------------------------
+
+// Benchmark registration wrapper for transcoding.
+#define TRANSCODER_BENCHMARK(func) \
+    BENCHMARK(func)->UseRealTime()->MeasureProcessCPUTime()->Unit(benchmark::kMillisecond)
+
+TRANSCODER_BENCHMARK(BM_TranscodeAvc2AvcAudioVideo2AudioVideo);
+TRANSCODER_BENCHMARK(BM_TranscodeAvc2AvcVideo2Video);
+TRANSCODER_BENCHMARK(BM_TranscodeAvc2AvcAV2AVMaxOperatingRate);
+TRANSCODER_BENCHMARK(BM_TranscodeAvc2AvcV2VMaxOperatingRate);
+TRANSCODER_BENCHMARK(BM_TranscodeAvc2AvcAV2AV720P);
+TRANSCODER_BENCHMARK(BM_TranscodeAvc2AvcAV2AV720PMaxOperatingRate);
+
+TRANSCODER_BENCHMARK(BM_TranscodeHevc2AvcAudioVideo2AudioVideo);
+TRANSCODER_BENCHMARK(BM_TranscodeHevc2AvcVideo2Video);
+TRANSCODER_BENCHMARK(BM_TranscodeHevc2AvcAV2AVMaxOperatingRate);
+TRANSCODER_BENCHMARK(BM_TranscodeHevc2AvcV2VMaxOperatingRate);
+TRANSCODER_BENCHMARK(BM_TranscodeHevc2AvcAV2AV720P);
+TRANSCODER_BENCHMARK(BM_TranscodeHevc2AvcAV2AV720PMaxOperatingRate);
+
+TRANSCODER_BENCHMARK(BM_TranscodeAudioVideoPassthrough);
+TRANSCODER_BENCHMARK(BM_TranscodeVideoPassthrough);
+
+TRANSCODER_BENCHMARK(BM_1920x1080_Avc22Mbps2Avc12Mbps);
+TRANSCODER_BENCHMARK(BM_1920x1080_Avc15Mbps2Avc8Mbps);
+TRANSCODER_BENCHMARK(BM_1920x1080_Avc15Mbps2AvcPassthrough);
+TRANSCODER_BENCHMARK(BM_1920x1080_Avc15MbpsAac2Avc8Mbps);
+TRANSCODER_BENCHMARK(BM_1920x1080_Avc15MbpsAac2Avc8MbpsAac);
+TRANSCODER_BENCHMARK(BM_1920x1080_Avc15MbpsAac2AvcPassthrough);
+TRANSCODER_BENCHMARK(BM_1920x1080_Avc15MbpsAac2AvcAacPassthrough);
+TRANSCODER_BENCHMARK(BM_1920x1080_Hevc17Mbps2Hevc8Mbps);
+TRANSCODER_BENCHMARK(BM_1920x1080_Hevc17Mbps2Avc12Mbps);
+TRANSCODER_BENCHMARK(BM_1920x1080_60fps_Hevc28Mbps2Avc15Mbps);
+
+TRANSCODER_BENCHMARK(BM_1280x720_Avc10Mbps2Avc4Mbps);
+TRANSCODER_BENCHMARK(BM_1280x720_Avc10Mbps2AvcPassthrough);
+TRANSCODER_BENCHMARK(BM_1280x720_Avc10MbpsAac2Avc4Mbps);
+TRANSCODER_BENCHMARK(BM_1280x720_Avc10MbpsAac2Avc4MbpsAac);
+TRANSCODER_BENCHMARK(BM_1280x720_Avc10MbpsAac2AvcPassthrough);
+TRANSCODER_BENCHMARK(BM_1280x720_Avc10MbpsAac2AvcAacPassthrough);
+TRANSCODER_BENCHMARK(BM_1280x720_Hevc8Mbps2Avc4Mbps);
+
+TRANSCODER_BENCHMARK(BM_1080x1920_Avc15Mbps2Avc8Mbps);
+TRANSCODER_BENCHMARK(BM_720x1280_Avc10Mbps2Avc4Mbps);
+
+TRANSCODER_BENCHMARK(BM_3840x2160_Hevc42Mbps2Avc20Mbps);
+
+class CustomCsvReporter : public benchmark::BenchmarkReporter {
+public:
+    CustomCsvReporter() : mPrintedHeader(false) {}
+    virtual bool ReportContext(const Context& context);
+    virtual void ReportRuns(const std::vector<Run>& reports);
+
+private:
+    void PrintRunData(const Run& report);
+
+    bool mPrintedHeader;
+    std::vector<std::string> mHeaders = {
+        "File",          "Resolution",     "SourceMime", "VideoTrackDuration(ms)",
+        "IncludeAudio",  "TranscodeVideo", "TargetMime", "TargetBirate(bps)",
+        "real_time(ms)", "cpu_time(ms)",   PARAM_VIDEO_FRAME_RATE
+    };
+};
+
+bool CustomCsvReporter::ReportContext(const Context& context __unused) {
+    return true;
+}
+
+void CustomCsvReporter::ReportRuns(const std::vector<Run>& reports) {
+    std::ostream& Out = GetOutputStream();
+
+    if (!mPrintedHeader) {
+        // print the header
+        for (auto header = mHeaders.begin(); header != mHeaders.end();) {
+            Out << *header++;
+            if (header != mHeaders.end()) Out << ",";
+        }
+        Out << "\n";
+        mPrintedHeader = true;
+    }
+
+    // print results for each run
+    for (const auto& run : reports) {
+        PrintRunData(run);
+    }
+}
+
+void CustomCsvReporter::PrintRunData(const Run& run) {
+    if (run.error_occurred) {
+        return;
+    }
+    std::ostream& Out = GetOutputStream();
+    // Log the transcoding params reported through label
+    Out << run.report_label << ",";
+    Out << run.GetAdjustedRealTime() << ",";
+    Out << run.GetAdjustedCPUTime() << ",";
+    auto frameRate = run.counters.find(PARAM_VIDEO_FRAME_RATE);
+    if (frameRate == run.counters.end()) {
+        Out << "NA"
+            << ",";
+    } else {
+        Out << frameRate->second << ",";
+    }
+    Out << '\n';
+}
+
+int main(int argc, char** argv) {
+    android::ProcessState::self()->startThreadPool();
+    std::unique_ptr<benchmark::BenchmarkReporter> fileReporter;
+    for (int i = 1; i < argc; ++i) {
+        if (std::string(argv[i]).find("--benchmark_out") != std::string::npos) {
+            fileReporter.reset(new CustomCsvReporter);
+            break;
+        }
+    }
+    ::benchmark::Initialize(&argc, argv);
+    if (::benchmark::ReportUnrecognizedArguments(argc, argv)) return 1;
+    ::benchmark::RunSpecifiedBenchmarks(nullptr, fileReporter.get());
+}
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSample.h b/media/libmediatranscoding/transcoder/include/media/MediaSample.h
new file mode 100644
index 0000000..8a239a6
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSample.h
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SAMPLE_H
+#define ANDROID_MEDIA_SAMPLE_H
+
+#include <cstdint>
+#include <functional>
+#include <memory>
+
+namespace android {
+
+/**
+ * Media sample flags.
+ * These flags purposely match the media NDK's buffer and extractor flags with one exception. The
+ * NDK extractor's flag for encrypted samples (AMEDIAEXTRACTOR_SAMPLE_FLAG_ENCRYPTED) is equal to 2,
+ * i.e. the same as SAMPLE_FLAG_CODEC_CONFIG below and NDK's AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG.
+ * Sample producers based on the NDK's extractor is responsible for catching those values.
+ * Note that currently the media transcoder does not support encrypted samples.
+ */
+enum : uint32_t {
+    SAMPLE_FLAG_SYNC_SAMPLE = 1,
+    SAMPLE_FLAG_CODEC_CONFIG = 2,
+    SAMPLE_FLAG_END_OF_STREAM = 4,
+    SAMPLE_FLAG_PARTIAL_FRAME = 8,
+};
+
+/**
+ * MediaSampleInfo is an object that carries information about a compressed media sample without
+ * holding any sample data.
+ */
+struct MediaSampleInfo {
+    /** The sample's presentation timestamp in microseconds. */
+    int64_t presentationTimeUs = 0;
+
+    /** The size of the compressed sample data in bytes. */
+    size_t size = 0;
+
+    /** Sample flags. */
+    uint32_t flags = 0;
+};
+
+/**
+ * MediaSample holds a compressed media sample in memory.
+ */
+struct MediaSample {
+    /**
+     * Callback to notify that a media sample is about to be released, giving the creator a chance
+     * to reclaim the data buffer backing the sample. Once this callback returns, the media sample
+     * instance *will* be released so it cannot be used outside of the callback. To enable the
+     * callback, create the media sample with {@link #createWithReleaseCallback}.
+     * @param sample The sample to be released.
+     */
+    using OnSampleReleasedCallback = std::function<void(MediaSample* sample)>;
+
+    /**
+     * Creates a new media sample instance with a registered release callback. The release callback
+     * will get called right before the media sample is released giving the creator a chance to
+     * reclaim the buffer.
+     * @param buffer Byte buffer containing the sample's compressed data.
+     * @param dataOffset Offset, in bytes, to the sample's compressed data inside the buffer.
+     * @param bufferId Buffer identifier that can be used to identify the buffer on release.
+     * @param releaseCallback The sample release callback.
+     * @return A new media sample instance.
+     */
+    static std::shared_ptr<MediaSample> createWithReleaseCallback(
+            uint8_t* buffer, size_t dataOffset, uint32_t bufferId,
+            OnSampleReleasedCallback releaseCallback) {
+        MediaSample* sample = new MediaSample(buffer, dataOffset, bufferId, releaseCallback);
+        return std::shared_ptr<MediaSample>(
+                sample, std::bind(&MediaSample::releaseSample, std::placeholders::_1));
+    }
+
+    /**
+     * Byte buffer containing the sample's compressed data. The media sample instance does not take
+     * ownership of the buffer and will not automatically release the memory, but the caller can
+     * register a release callback by creating the media sample with
+     * {@link #createWithReleaseCallback}.
+     */
+    const uint8_t* buffer = nullptr;
+
+    /** Offset, in bytes, to the sample's compressed data inside the buffer. */
+    size_t dataOffset = 0;
+
+    /**
+     * Buffer identifier. This identifier is likely only meaningful to the sample data producer and
+     * can be used for reclaiming the buffer once a consumer is done processing it.
+     */
+    uint32_t bufferId = 0xBAADF00D;
+
+    /** Media sample information. */
+    MediaSampleInfo info;
+
+    MediaSample() = default;
+
+private:
+    MediaSample(uint8_t* buffer, size_t dataOffset, uint32_t bufferId,
+                OnSampleReleasedCallback releaseCallback)
+          : buffer(buffer),
+            dataOffset(dataOffset),
+            bufferId(bufferId),
+            mReleaseCallback(releaseCallback){};
+
+    static void releaseSample(MediaSample* sample) {
+        if (sample->mReleaseCallback != nullptr) {
+            sample->mReleaseCallback(sample);
+        }
+        delete sample;
+    }
+
+    // Do not allow copying to prevent dangling pointers in the copied object after the original is
+    // released.
+    MediaSample(const MediaSample&) = delete;
+    MediaSample& operator=(const MediaSample&) = delete;
+
+    const OnSampleReleasedCallback mReleaseCallback = nullptr;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_SAMPLE_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleQueue.h b/media/libmediatranscoding/transcoder/include/media/MediaSampleQueue.h
new file mode 100644
index 0000000..c6cf1a4
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSampleQueue.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SAMPLE_QUEUE_H
+#define ANDROID_MEDIA_SAMPLE_QUEUE_H
+
+#include <media/MediaSample.h>
+#include <utils/Mutex.h>
+
+#include <memory>
+#include <mutex>
+#include <queue>
+
+namespace android {
+
+/**
+ * MediaSampleQueue asynchronously connects a producer and a consumer of media samples.
+ * Media samples flows through the queue in FIFO order. If the queue is empty the consumer will be
+ * blocked until a new media sample is added or until the producer aborts the queue operation.
+ */
+class MediaSampleQueue {
+public:
+    /**
+     * Enqueues a media sample at the end of the queue and notifies potentially waiting consumers.
+     * If the queue has previously been aborted this method does nothing.
+     * @param sample The media sample to enqueue.
+     * @return True if the queue has been aborted.
+     */
+    bool enqueue(const std::shared_ptr<MediaSample>& sample);
+
+    /**
+     * Removes the next media sample from the queue and returns it. If the queue has previously been
+     * aborted this method returns null. Note that this method will block while the queue is empty.
+     * @param[out] sample The next media sample in the queue.
+     * @return True if the queue has been aborted.
+     */
+    bool dequeue(std::shared_ptr<MediaSample>* sample /* nonnull */);
+
+    /**
+     * Checks if the queue currently holds any media samples.
+     * @return True if the queue is empty or has been aborted. False otherwise.
+     */
+    bool isEmpty();
+
+    /**
+     * Aborts the queue operation. This clears the queue and notifies waiting consumers. After the
+     * has been aborted it is not possible to enqueue more samples, and dequeue will return null.
+     */
+    void abort();
+
+private:
+    std::queue<std::shared_ptr<MediaSample>> mSampleQueue GUARDED_BY(mMutex);
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mAborted GUARDED_BY(mMutex) = false;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_SAMPLE_QUEUE_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleReader.h b/media/libmediatranscoding/transcoder/include/media/MediaSampleReader.h
new file mode 100644
index 0000000..5c7eeac
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSampleReader.h
@@ -0,0 +1,145 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SAMPLE_READER_H
+#define ANDROID_MEDIA_SAMPLE_READER_H
+
+#include <media/MediaSample.h>
+#include <media/NdkMediaError.h>
+#include <media/NdkMediaFormat.h>
+
+namespace android {
+
+/**
+ * MediaSampleReader is an interface for reading media samples from a container. MediaSampleReader
+ * allows for reading samples from multiple tracks on individual threads independently of each other
+ * while preserving the order of samples. Due to poor non-sequential access performance of the
+ * underlying extractor, MediaSampleReader can optionally enforce sequential sample access by
+ * blocking requests for tracks that the underlying extractor does not currently point to. Waiting
+ * threads are serviced once the reader advances to a sample from the specified track. Due to this
+ * it is important to read samples and advance the reader from all selected tracks to avoid hanging
+ * other tracks. MediaSampleReader implementations are thread safe and sample access should be done
+ * on one thread per selected track.
+ */
+class MediaSampleReader {
+public:
+    /**
+     * Returns the file format of the media container as a AMediaFormat.
+     * The caller is responsible for releasing the format when finished with it using
+     * AMediaFormat_delete().
+     * @return The file media format.
+     */
+    virtual AMediaFormat* getFileFormat() = 0;
+
+    /**
+     * Returns the number of tracks in the media container.
+     * @return The number of tracks.
+     */
+    virtual size_t getTrackCount() const = 0;
+
+    /**
+     * Returns the media format of a specific track as a AMediaFormat.
+     * The caller is responsible for releasing the format when finished with it using
+     * AMediaFormat_delete().
+     * @param trackIndex The track index (zero-based).
+     * @return The track media format.
+     */
+    virtual AMediaFormat* getTrackFormat(int trackIndex) = 0;
+
+    /**
+     * Select a track for sample access. Tracks must be selected in order for sample information and
+     * sample data to be available for that track. Samples for selected tracks must be accessed on
+     * its own thread to avoid blocking other tracks.
+     * @param trackIndex The track to select.
+     * @return AMEDIA_OK on success.
+     */
+    virtual media_status_t selectTrack(int trackIndex) = 0;
+
+    /**
+     * Undo a track selection.
+     * @param trackIndex The track to un-select.
+     * @return AMEDIA_OK on success.
+     */
+    virtual media_status_t unselectTrack(int trackIndex) = 0;
+
+    /**
+     * Toggles sequential access enforcement on or off. When the reader enforces sequential access
+     * calls to read sample information will block unless the underlying extractor points to the
+     * specified track.
+     * @param enforce True to enforce sequential access.
+     * @return AMEDIA_OK on success.
+     */
+    virtual media_status_t setEnforceSequentialAccess(bool enforce) = 0;
+
+    /**
+     * Estimates the bitrate of a source track by sampling sample sizes. The bitrate is returned in
+     * megabits per second (Mbps). This method will fail if the track only contains a single sample
+     * and does not have an associated duration.
+     * @param trackIndex The source track index.
+     * @param bitrate Output param for the bitrate.
+     * @return AMEDIA_OK on success.
+     */
+    virtual media_status_t getEstimatedBitrateForTrack(int trackIndex, int32_t* bitrate);
+
+    /**
+     * Returns the sample information for the current sample in the specified track. Note that this
+     * method will block until the reader advances to a sample belonging to the requested track if
+     * the reader is in sequential access mode.
+     * @param trackIndex The track index (zero-based).
+     * @param info Pointer to a MediaSampleInfo object where the sample information is written.
+     * @return AMEDIA_OK on success, AMEDIA_ERROR_END_OF_STREAM if there are no more samples to read
+     * from the track and AMEDIA_ERROR_INVALID_PARAMETER if trackIndex is out of bounds or the
+     * info pointer is NULL. Other AMEDIA_ERROR_* return values may not be recoverable.
+     */
+    virtual media_status_t getSampleInfoForTrack(int trackIndex, MediaSampleInfo* info) = 0;
+
+    /**
+     * Returns the sample data for the current sample in the specified track into the supplied
+     * buffer. Note that this method will block until the reader advances to a sample belonging to
+     * the requested track if the reader is in sequential access mode. Upon successful return this
+     * method will also advance the specified track to the next sample.
+     * @param trackIndex The track index (zero-based).
+     * @param buffer The buffer to write the sample's data to.
+     * @param bufferSize The size of the supplied buffer.
+     * @return AMEDIA_OK on success, AMEDIA_ERROR_END_OF_STREAM if there are no more samples to read
+     * from the track and AMEDIA_ERROR_INVALID_PARAMETER if trackIndex is out of bounds, if the
+     * buffer pointer is NULL or if bufferSize is too small for the sample. Other AMEDIA_ERROR_*
+     * return values may not be recoverable.
+     */
+    virtual media_status_t readSampleDataForTrack(int trackIndex, uint8_t* buffer,
+                                                  size_t bufferSize) = 0;
+
+    /**
+     * Advance the specified track to the next sample. If the reader is in sequential access mode
+     * and the current sample belongs to the specified track, the reader will also advance to the
+     * next sample and wake up any threads waiting on the new track.
+     * @param trackIndex The track index (zero-based).
+     */
+    virtual void advanceTrack(int trackIndex) = 0;
+
+    /** Destructor. */
+    virtual ~MediaSampleReader() = default;
+
+    /** Constructor. */
+    MediaSampleReader() = default;
+
+private:
+    MediaSampleReader(const MediaSampleReader&) = delete;
+    MediaSampleReader& operator=(const MediaSampleReader&) = delete;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_SAMPLE_READER_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleReaderNDK.h b/media/libmediatranscoding/transcoder/include/media/MediaSampleReaderNDK.h
new file mode 100644
index 0000000..30cc37f
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSampleReaderNDK.h
@@ -0,0 +1,141 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SAMPLE_READER_NDK_H
+#define ANDROID_MEDIA_SAMPLE_READER_NDK_H
+
+#include <media/MediaSampleReader.h>
+#include <media/NdkMediaExtractor.h>
+
+#include <map>
+#include <memory>
+#include <mutex>
+#include <vector>
+
+namespace android {
+
+/**
+ * MediaSampleReaderNDK is a concrete implementation of the MediaSampleReader interface based on the
+ * media NDK extractor.
+ */
+class MediaSampleReaderNDK : public MediaSampleReader {
+public:
+    /**
+     * Creates a new MediaSampleReaderNDK instance wrapped in a shared pointer.
+     * @param fd Source file descriptor. The caller is responsible for closing the fd and it is safe
+     *           to do so when this method returns.
+     * @param offset Source data offset.
+     * @param size Source data size.
+     * @return A shared pointer referencing the new MediaSampleReaderNDK instance on success, or an
+     *         empty shared pointer if an error occurred.
+     */
+    static std::shared_ptr<MediaSampleReader> createFromFd(int fd, size_t offset, size_t size);
+
+    AMediaFormat* getFileFormat() override;
+    size_t getTrackCount() const override;
+    AMediaFormat* getTrackFormat(int trackIndex) override;
+    media_status_t selectTrack(int trackIndex) override;
+    media_status_t unselectTrack(int trackIndex) override;
+    media_status_t setEnforceSequentialAccess(bool enforce) override;
+    media_status_t getEstimatedBitrateForTrack(int trackIndex, int32_t* bitrate) override;
+    media_status_t getSampleInfoForTrack(int trackIndex, MediaSampleInfo* info) override;
+    media_status_t readSampleDataForTrack(int trackIndex, uint8_t* buffer,
+                                          size_t bufferSize) override;
+    void advanceTrack(int trackIndex) override;
+
+    virtual ~MediaSampleReaderNDK() override;
+
+private:
+    /**
+     * SamplePosition describes the position of a single sample in the media file using its
+     * timestamp and index in the file.
+     */
+    struct SamplePosition {
+        uint64_t index = 0;
+        int64_t timeStampUs = 0;
+        bool isSet = false;
+
+        void set(uint64_t sampleIndex, int64_t sampleTimeUs) {
+            index = sampleIndex;
+            timeStampUs = sampleTimeUs;
+            isSet = true;
+        }
+
+        void reset() { isSet = false; }
+    };
+
+    /**
+     * SampleCursor keeps track of the sample position for a specific track. When the track is
+     * advanced, previous is set to current, current to next and next is reset. As the extractor
+     * advances over the combined timeline of tracks, it updates current and next for the track it
+     * points to if they are not already set.
+     */
+    struct SampleCursor {
+        SamplePosition previous;
+        SamplePosition current;
+        SamplePosition next;
+    };
+
+    /**
+     * Creates a new MediaSampleReaderNDK object from an AMediaExtractor. The extractor needs to be
+     * initialized with a valid data source before attempting to create a MediaSampleReaderNDK.
+     * @param extractor The initialized media extractor.
+     */
+    MediaSampleReaderNDK(AMediaExtractor* extractor);
+
+    /** Advances the track to next sample. */
+    void advanceTrack_l(int trackIndex);
+
+    /** Advances the extractor to next sample. */
+    bool advanceExtractor_l();
+
+    /** Moves the extractor backwards to the specified sample. */
+    media_status_t seekExtractorBackwards_l(int64_t targetTimeUs, int targetTrackIndex,
+                                            uint64_t targetSampleIndex);
+
+    /** Moves the extractor to the specified sample. */
+    media_status_t moveToSample_l(SamplePosition& pos, int trackIndex);
+
+    /** Moves the extractor to the next sample of the specified track. */
+    media_status_t moveToTrack_l(int trackIndex);
+
+    /** In sequential mode, waits for the extractor to reach the next sample for the track. */
+    media_status_t waitForTrack_l(int trackIndex, std::unique_lock<std::mutex>& lockHeld);
+
+    /**
+     * Ensures the extractor is ready for the next sample of the track regardless of access mode.
+     */
+    media_status_t primeExtractorForTrack_l(int trackIndex, std::unique_lock<std::mutex>& lockHeld);
+
+    AMediaExtractor* mExtractor = nullptr;
+    std::mutex mExtractorMutex;
+    const size_t mTrackCount;
+
+    int mExtractorTrackIndex = -1;
+    uint64_t mExtractorSampleIndex = 0;
+
+    bool mEosReached = false;
+    bool mEnforceSequentialAccess = false;
+
+    // Maps selected track indices to condition variables for sequential sample access control.
+    std::map<int, std::condition_variable> mTrackSignals;
+
+    // Samples cursor for each track in the file.
+    std::vector<SampleCursor> mTrackCursors;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_SAMPLE_READER_NDK_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h b/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h
new file mode 100644
index 0000000..23a234b
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h
@@ -0,0 +1,220 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SAMPLE_WRITER_H
+#define ANDROID_MEDIA_SAMPLE_WRITER_H
+
+#include <media/MediaSample.h>
+#include <media/NdkMediaCodec.h>
+#include <media/NdkMediaError.h>
+#include <media/NdkMediaFormat.h>
+#include <utils/Mutex.h>
+
+#include <condition_variable>
+#include <functional>
+#include <memory>
+#include <mutex>
+#include <queue>
+#include <thread>
+#include <unordered_map>
+
+namespace android {
+
+/**
+ * Muxer interface used by MediaSampleWriter.
+ * Methods in this interface are guaranteed to be called sequentially by MediaSampleWriter.
+ */
+class MediaSampleWriterMuxerInterface {
+public:
+    /**
+     * Adds a new track to the muxer.
+     * @param trackFormat Format of the new track.
+     * @return A non-negative track index on success, or a negative number on failure.
+     */
+    virtual ssize_t addTrack(AMediaFormat* trackFormat) = 0;
+
+    /** Starts the muxer. */
+    virtual media_status_t start() = 0;
+    /**
+     * Writes sample data to a previously added track.
+     * @param trackIndex Index of the track the sample data belongs to.
+     * @param data The sample data.
+     * @param info The sample information.
+     * @return The number of bytes written.
+     */
+    virtual media_status_t writeSampleData(size_t trackIndex, const uint8_t* data,
+                                           const AMediaCodecBufferInfo* info) = 0;
+
+    /** Stops the muxer. */
+    virtual media_status_t stop() = 0;
+    virtual ~MediaSampleWriterMuxerInterface() = default;
+};
+
+/**
+ * MediaSampleWriter is a wrapper around a muxer. The sample writer puts samples on a queue that
+ * is serviced by an internal thread to minimize blocking time for clients. MediaSampleWriter also
+ * provides progress reporting. The default muxer interface implementation is based
+ * directly on AMediaMuxer.
+ */
+class MediaSampleWriter : public std::enable_shared_from_this<MediaSampleWriter> {
+public:
+    /** Function prototype for delivering media samples to the writer. */
+    using MediaSampleConsumerFunction =
+            std::function<void(const std::shared_ptr<MediaSample>& sample)>;
+
+    /** Callback interface. */
+    class CallbackInterface {
+    public:
+        /**
+         * Sample writer finished. The finished callback is only called after the sample writer has
+         * been successfully started.
+         */
+        virtual void onFinished(const MediaSampleWriter* writer, media_status_t status) = 0;
+
+        /** Sample writer was stopped before it was finished. */
+        virtual void onStopped(const MediaSampleWriter* writer) = 0;
+
+        /** Sample writer progress update in percent. */
+        virtual void onProgressUpdate(const MediaSampleWriter* writer, int32_t progress) = 0;
+
+        /** Sample writer heart-beat signal. */
+        virtual void onHeartBeat(const MediaSampleWriter* writer) = 0;
+
+        virtual ~CallbackInterface() = default;
+    };
+
+    static std::shared_ptr<MediaSampleWriter> Create();
+
+    /**
+     * Initializes the sample writer with its default muxer implementation. MediaSampleWriter needs
+     * to be initialized before tracks are added and can only be initialized once.
+     * @param fd An open file descriptor to write to. The caller is responsible for closing this
+     *        file descriptor and it is safe to do so once this method returns.
+     * @param callbacks Client callback object that gets called by the sample writer.
+     * @param heartBeatIntervalUs Interval (in microsecond) at which the sample writer should send a
+     *        heart-beat to onProgressUpdate() to indicate it's making progress. Value <=0 indicates
+     *        that the heartbeat is not required.
+     * @return True if the writer was successfully initialized.
+     */
+    bool init(int fd, const std::weak_ptr<CallbackInterface>& callbacks /* nonnull */,
+              int64_t heartBeatIntervalUs = -1);
+
+    /**
+     * Initializes the sample writer with a custom muxer interface implementation.
+     * @param muxer The custom muxer interface implementation.
+     * @param @param callbacks Client callback object that gets called by the sample writer.
+     * @param heartBeatIntervalUs Interval (in microsecond) at which the sample writer should send a
+     *        heart-beat to onProgressUpdate() to indicate it's making progress.
+     * @return True if the writer was successfully initialized.
+     */
+    bool init(const std::shared_ptr<MediaSampleWriterMuxerInterface>& muxer /* nonnull */,
+              const std::weak_ptr<CallbackInterface>& callbacks /* nonnull */,
+              int64_t heartBeatIntervalUs = -1);
+
+    /**
+     * Adds a new track to the sample writer. Tracks must be added after the sample writer has been
+     * initialized and before it is started.
+     * @param trackFormat The format of the track to add.
+     * @return A sample consumer to add samples to if the track was successfully added, or nullptr
+     * if the track could not be added.
+     */
+    MediaSampleConsumerFunction addTrack(
+            const std::shared_ptr<AMediaFormat>& trackFormat /* nonnull */);
+
+    /**
+     * Starts the sample writer. The sample writer will start processing samples and writing them to
+     * its muxer on an internal thread. MediaSampleWriter can only be started once.
+     * @return True if the sample writer was successfully started.
+     */
+    bool start();
+
+    /**
+     * Stops the sample writer. If the sample writer is not yet finished, its operation will be
+     * aborted and the onStopped callback will fire. If the sample writer has already finished and
+     * the onFinished callback has fired the writer has already automatically stopped and there is
+     * no need to call stop manually. Once the sample writer has been stopped it cannot be
+     * restarted. This method is asynchronous and will not wait for the sample writer to stop before
+     * returning.
+     */
+    void stop();
+
+    /** Destructor. */
+    ~MediaSampleWriter();
+
+private:
+    struct TrackRecord {
+        TrackRecord(int64_t durationUs)
+              : mDurationUs(durationUs),
+                mFirstSampleTimeUs(0),
+                mPrevSampleTimeUs(INT64_MIN),
+                mFirstSampleTimeSet(false),
+                mReachedEos(false){};
+
+        TrackRecord() : TrackRecord(0){};
+
+        int64_t mDurationUs;
+        int64_t mFirstSampleTimeUs;
+        int64_t mPrevSampleTimeUs;
+        bool mFirstSampleTimeSet;
+        bool mReachedEos;
+    };
+
+    // Track index and sample.
+    using SampleEntry = std::pair<size_t, std::shared_ptr<MediaSample>>;
+
+    struct SampleComparator {
+        // Return true if lhs should come after rhs in the sample queue.
+        bool operator()(const SampleEntry& lhs, const SampleEntry& rhs) {
+            const bool lhsEos = lhs.second->info.flags & SAMPLE_FLAG_END_OF_STREAM;
+            const bool rhsEos = rhs.second->info.flags & SAMPLE_FLAG_END_OF_STREAM;
+
+            if (lhsEos && !rhsEos) {
+                return true;
+            } else if (!lhsEos && rhsEos) {
+                return false;
+            } else if (lhsEos && rhsEos) {
+                return lhs.first > rhs.first;
+            }
+
+            return lhs.second->info.presentationTimeUs > rhs.second->info.presentationTimeUs;
+        }
+    };
+
+    std::weak_ptr<CallbackInterface> mCallbacks;
+    std::shared_ptr<MediaSampleWriterMuxerInterface> mMuxer;
+    int64_t mHeartBeatIntervalUs;
+
+    std::mutex mMutex;  // Protects sample queue and state.
+    std::condition_variable mSampleSignal;
+    std::unordered_map<size_t, TrackRecord> mTracks;
+    std::priority_queue<SampleEntry, std::vector<SampleEntry>, SampleComparator> mSampleQueue
+            GUARDED_BY(mMutex);
+
+    enum : int {
+        UNINITIALIZED,
+        INITIALIZED,
+        STARTED,
+        STOPPED,
+    } mState GUARDED_BY(mMutex);
+
+    MediaSampleWriter() : mState(UNINITIALIZED){};
+    void addSampleToTrack(size_t trackIndex, const std::shared_ptr<MediaSample>& sample);
+    media_status_t writeSamples(bool* wasStopped);
+    media_status_t runWriterLoop(bool* wasStopped);
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_SAMPLE_WRITER_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h
new file mode 100644
index 0000000..724b919
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h
@@ -0,0 +1,151 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRACK_TRANSCODER_H
+#define ANDROID_MEDIA_TRACK_TRANSCODER_H
+
+#include <media/MediaSampleQueue.h>
+#include <media/MediaSampleReader.h>
+#include <media/MediaSampleWriter.h>
+#include <media/NdkMediaError.h>
+#include <media/NdkMediaFormat.h>
+#include <utils/Mutex.h>
+
+#include <functional>
+#include <memory>
+#include <mutex>
+#include <thread>
+
+namespace android {
+
+class MediaTrackTranscoderCallback;
+
+/**
+ * Base class for all track transcoders. MediaTrackTranscoder operates asynchronously on an internal
+ * thread and communicates through a MediaTrackTranscoderCallback instance. Transcoded samples are
+ * enqueued on the MediaTrackTranscoder's output queue. Samples need to be dequeued from the output
+ * queue or the transcoder will run out of buffers and stall. Once the consumer is done with a
+ * transcoded sample it is the consumer's responsibility to as soon as possible release all
+ * references to that sample in order to return the buffer to the transcoder. MediaTrackTranscoder
+ * is an abstract class and instances are created through one of the concrete subclasses.
+ *
+ * The base class MediaTrackTranscoder is responsible for thread and state management and guarantees
+ * that operations {configure, start, stop} are sent to the derived class in correct order.
+ * MediaTrackTranscoder is also responsible for delivering callback notifications once the
+ * transcoder has been successfully started.
+ */
+class MediaTrackTranscoder {
+public:
+    /**
+     * Configures the track transcoder with an input MediaSampleReader and a destination format.
+     * A track transcoder have to be configured before it is started.
+     * @param mediaSampleReader The MediaSampleReader to read input samples from.
+     * @param trackIndex The index of the track to transcode in mediaSampleReader.
+     * @param destinationFormat The destination format.
+     * @return AMEDIA_OK if the track transcoder was successfully configured.
+     */
+    media_status_t configure(const std::shared_ptr<MediaSampleReader>& mediaSampleReader,
+                             int trackIndex,
+                             const std::shared_ptr<AMediaFormat>& destinationFormat);
+
+    /**
+     * Starts the track transcoder. After the track transcoder is successfully started it will run
+     * until a callback signals that transcoding has ended. Start should only be called once.
+     * @return True if the track transcoder started, or false if it had already been started.
+     */
+    bool start();
+
+    /**
+     * Stops the track transcoder. Once the transcoding has been stopped it cannot be restarted
+     * again. It is safe to call stop multiple times. Stop is an asynchronous operation. Once the
+     * track transcoder has stopped the onTrackStopped callback will get called, unless the
+     * transcoding finished or encountered an error before it could be stopped in which case the
+     * callbacks corresponding to those events will be called instead.
+     * @param stopOnSyncSample Request the transcoder to stop after emitting a sync sample.
+     */
+    void stop(bool stopOnSyncSample = false);
+
+    /**
+     * Set the sample consumer function. The MediaTrackTranscoder will deliver transcoded samples to
+     * this function. If the MediaTrackTranscoder is started before a consumer is set the transcoder
+     * will buffer a limited number of samples internally before stalling. Once a consumer has been
+     * set the internally buffered samples will be delivered to the consumer.
+     * @param sampleConsumer The sample consumer function.
+     */
+    void setSampleConsumer(const MediaSampleWriter::MediaSampleConsumerFunction& sampleConsumer);
+
+    /**
+      * Retrieves the track transcoder's final output format. The output is available after the
+      * track transcoder has been successfully configured.
+      * @return The track output format.
+      */
+    virtual std::shared_ptr<AMediaFormat> getOutputFormat() const = 0;
+
+    virtual ~MediaTrackTranscoder() = default;
+
+protected:
+    MediaTrackTranscoder(const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback)
+          : mTranscoderCallback(transcoderCallback){};
+
+    // Called by subclasses when the actual track format becomes available.
+    void notifyTrackFormatAvailable();
+
+    // Called by subclasses when a transcoded sample is available. Samples must not hold a strong
+    // reference to the track transcoder in order to avoid retain cycles through the track
+    // transcoder's sample queue.
+    void onOutputSampleAvailable(const std::shared_ptr<MediaSample>& sample);
+
+    // configureDestinationFormat needs to be implemented by subclasses, and gets called on an
+    // external thread before start.
+    virtual media_status_t configureDestinationFormat(
+            const std::shared_ptr<AMediaFormat>& destinationFormat) = 0;
+
+    // runTranscodeLoop needs to be implemented by subclasses, and gets called on
+    // MediaTrackTranscoder's internal thread when the track transcoder is started.
+    virtual media_status_t runTranscodeLoop(bool* stopped) = 0;
+
+    // abortTranscodeLoop needs to be implemented by subclasses, and should request transcoding to
+    // be aborted as soon as possible. It should be safe to call abortTranscodeLoop multiple times.
+    virtual void abortTranscodeLoop() = 0;
+
+    std::shared_ptr<MediaSampleReader> mMediaSampleReader;
+    int mTrackIndex;
+    std::shared_ptr<AMediaFormat> mSourceFormat;
+
+    enum StopRequest {
+        NONE,
+        STOP_NOW,
+        STOP_ON_SYNC,
+    };
+    std::atomic<StopRequest> mStopRequest = NONE;
+
+private:
+    std::mutex mSampleMutex;
+    // SampleQueue for buffering output samples before a sample consumer has been set.
+    MediaSampleQueue mSampleQueue GUARDED_BY(mSampleMutex);
+    MediaSampleWriter::MediaSampleConsumerFunction mSampleConsumer GUARDED_BY(mSampleMutex);
+    const std::weak_ptr<MediaTrackTranscoderCallback> mTranscoderCallback;
+    std::mutex mStateMutex;
+    enum {
+        UNINITIALIZED,
+        CONFIGURED,
+        STARTED,
+        STOPPED,
+    } mState GUARDED_BY(mStateMutex) = UNINITIALIZED;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRACK_TRANSCODER_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h
new file mode 100644
index 0000000..7b62d46
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRACK_TRANSCODER_CALLBACK_H
+#define ANDROID_MEDIA_TRACK_TRANSCODER_CALLBACK_H
+
+#include <media/NdkMediaError.h>
+
+namespace android {
+
+class MediaTrackTranscoder;
+
+/** Callback interface for MediaTrackTranscoder. */
+class MediaTrackTranscoderCallback {
+public:
+    /**
+     * Called when the MediaTrackTranscoder's actual track format becomes available.
+     * @param transcoder The MediaTrackTranscoder whose track format becomes available.
+     */
+    virtual void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder);
+    /**
+     * Called when the MediaTrackTranscoder instance have finished transcoding all media samples
+     * successfully.
+     * @param transcoder The MediaTrackTranscoder that finished the transcoding.
+     */
+    virtual void onTrackFinished(const MediaTrackTranscoder* transcoder);
+
+    /**
+     * Called when the MediaTrackTranscoder instance was explicitly stopped before it was finished.
+     * @param transcoder The MediaTrackTranscoder that was stopped.
+     */
+    virtual void onTrackStopped(const MediaTrackTranscoder* transcoder);
+
+    /**
+     * Called when the MediaTrackTranscoder instance encountered an error it could not recover from.
+     * @param transcoder The MediaTrackTranscoder that encountered the error.
+     * @param status The non-zero error code describing the encountered error.
+     */
+    virtual void onTrackError(const MediaTrackTranscoder* transcoder, media_status_t status);
+
+protected:
+    virtual ~MediaTrackTranscoderCallback() = default;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRACK_TRANSCODER_CALLBACK_H
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h b/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
new file mode 100644
index 0000000..8776dc9
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
@@ -0,0 +1,177 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODER_H
+#define ANDROID_MEDIA_TRANSCODER_H
+
+#include <android/binder_auto_utils.h>
+#include <media/MediaSampleWriter.h>
+#include <media/MediaTrackTranscoderCallback.h>
+#include <media/NdkMediaCodecPlatform.h>
+#include <media/NdkMediaError.h>
+#include <media/NdkMediaFormat.h>
+#include <utils/Mutex.h>
+
+#include <atomic>
+#include <memory>
+#include <mutex>
+#include <unordered_set>
+
+namespace android {
+
+class MediaSampleReader;
+
+class MediaTranscoder : public std::enable_shared_from_this<MediaTranscoder>,
+                        public MediaTrackTranscoderCallback,
+                        public MediaSampleWriter::CallbackInterface {
+public:
+    /** Callbacks from transcoder to client. */
+    class CallbackInterface {
+    public:
+        /** Transcoder finished successfully. */
+        virtual void onFinished(const MediaTranscoder* transcoder) = 0;
+
+        /** Transcoder encountered an unrecoverable error. */
+        virtual void onError(const MediaTranscoder* transcoder, media_status_t error) = 0;
+
+        /** Transcoder progress update reported in percent from 0 to 100. */
+        virtual void onProgressUpdate(const MediaTranscoder* transcoder, int32_t progress) = 0;
+
+        /** Transcoder heart-beat signal. */
+        virtual void onHeartBeat(const MediaTranscoder* transcoder) = 0;
+
+        /**
+         * Transcoder lost codec resources and paused operations. The client can resume transcoding
+         * again when resources are available by either:
+         *   1) Calling resume on the same MediaTranscoder instance.
+         *   2) Creating a new MediaTranscoding instance with the paused state and then calling
+         *      resume.
+         */
+        virtual void onCodecResourceLost(
+                const MediaTranscoder* transcoder,
+                const std::shared_ptr<ndk::ScopedAParcel>& pausedState) = 0;
+
+        virtual ~CallbackInterface() = default;
+    };
+
+    /**
+     * Creates a new MediaTranscoder instance. If the supplied paused state is valid, the transcoder
+     * will be initialized with the paused state and be ready to be resumed right away. It is not
+     * possible to change any configurations on a paused transcoder.
+     */
+    static std::shared_ptr<MediaTranscoder> create(
+            const std::shared_ptr<CallbackInterface>& callbacks, int64_t heartBeatIntervalUs = -1,
+            pid_t pid = AMEDIACODEC_CALLING_PID, uid_t uid = AMEDIACODEC_CALLING_UID,
+            const std::shared_ptr<ndk::ScopedAParcel>& pausedState = nullptr);
+
+    /** Configures source from path fd. */
+    media_status_t configureSource(int fd);
+
+    /** Gets the media formats of all tracks in the file. */
+    std::vector<std::shared_ptr<AMediaFormat>> getTrackFormats() const;
+
+    /**
+     * Configures transcoding of a track. Tracks that are not configured will not present in the
+     * final transcoded file, i.e. tracks will be dropped by default. Passing nullptr for
+     * trackFormat means the track will be copied unchanged ("passthrough") to the destination.
+     * Track configurations must be done after the source has been configured.
+     * Note: trackFormat is not modified but cannot be const.
+     */
+    media_status_t configureTrackFormat(size_t trackIndex, AMediaFormat* trackFormat);
+
+    /** Configures destination from fd. */
+    media_status_t configureDestination(int fd);
+
+    /** Starts transcoding. No configurations can be made once the transcoder has started. */
+    media_status_t start();
+
+    /**
+     * Pauses transcoding and finalizes the partial transcoded file to disk. Pause is a synchronous
+     * operation and will wait until all internal components are done. Once this method returns it
+     * is safe to release the transcoder instance. No callback will be called if the transcoder was
+     * paused successfully. But if the transcoding finishes or encountered an error during pause,
+     * the corresponding callback will be called.
+     */
+    media_status_t pause(std::shared_ptr<ndk::ScopedAParcel>* pausedState);
+
+    /** Resumes a paused transcoding. */
+    media_status_t resume();
+
+    /**
+     * Cancels the transcoding. Once canceled the transcoding can not be restarted. Client
+     * will be responsible for cleaning up the abandoned file. Cancel is a synchronous operation and
+     * will wait until all internal components are done. Once this method returns it is safe to
+     * release the transcoder instance. Normally no callback will be called when the transcoder is
+     * cancelled. But if the transcoding finishes or encountered an error during cancel, the
+     * corresponding callback will be called.
+     */
+    media_status_t cancel();
+
+    virtual ~MediaTranscoder() = default;
+
+private:
+    MediaTranscoder(const std::shared_ptr<CallbackInterface>& callbacks,
+                    int64_t heartBeatIntervalUs, pid_t pid, uid_t uid);
+
+    // MediaTrackTranscoderCallback
+    virtual void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder) override;
+    virtual void onTrackFinished(const MediaTrackTranscoder* transcoder) override;
+    virtual void onTrackStopped(const MediaTrackTranscoder* transcoder) override;
+    virtual void onTrackError(const MediaTrackTranscoder* transcoder,
+                              media_status_t status) override;
+    // ~MediaTrackTranscoderCallback
+
+    // MediaSampleWriter::CallbackInterface
+    virtual void onFinished(const MediaSampleWriter* writer, media_status_t status) override;
+    virtual void onStopped(const MediaSampleWriter* writer) override;
+    virtual void onProgressUpdate(const MediaSampleWriter* writer, int32_t progress) override;
+    virtual void onHeartBeat(const MediaSampleWriter* writer) override;
+    // ~MediaSampleWriter::CallbackInterface
+
+    void onThreadFinished(const void* thread, media_status_t threadStatus, bool threadStopped);
+    media_status_t requestStop(bool stopOnSync);
+    void waitForThreads();
+
+    std::shared_ptr<CallbackInterface> mCallbacks;
+    std::shared_ptr<MediaSampleReader> mSampleReader;
+    std::shared_ptr<MediaSampleWriter> mSampleWriter;
+    std::vector<std::shared_ptr<AMediaFormat>> mSourceTrackFormats;
+    std::vector<std::shared_ptr<MediaTrackTranscoder>> mTrackTranscoders;
+    std::mutex mTracksAddedMutex;
+    std::unordered_set<const MediaTrackTranscoder*> mTracksAdded GUARDED_BY(mTracksAddedMutex);
+    int64_t mHeartBeatIntervalUs;
+    pid_t mPid;
+    uid_t mUid;
+
+    enum ThreadState {
+        PENDING = 0,  // Not yet started.
+        RUNNING,      // Currently running.
+        DONE,         // Done running (can be finished, stopped or error).
+    };
+    std::mutex mThreadStateMutex;
+    std::condition_variable mThreadsDoneSignal;
+    std::unordered_map<const void*, ThreadState> mThreadStates GUARDED_BY(mThreadStateMutex);
+    media_status_t mTranscoderStatus GUARDED_BY(mThreadStateMutex) = AMEDIA_OK;
+    bool mTranscoderStopped GUARDED_BY(mThreadStateMutex) = false;
+    bool mThreadsDone GUARDED_BY(mThreadStateMutex) = false;
+    bool mCallbackSent GUARDED_BY(mThreadStateMutex) = false;
+    bool mSampleWriterStopped GUARDED_BY(mThreadStateMutex) = false;
+
+    std::atomic_bool mCancelled = false;
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_TRANSCODER_H
diff --git a/media/libmediatranscoding/transcoder/include/media/NdkCommon.h b/media/libmediatranscoding/transcoder/include/media/NdkCommon.h
new file mode 100644
index 0000000..2441922
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/NdkCommon.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_NDK_COMMON_H
+#define ANDROID_MEDIA_TRANSCODING_NDK_COMMON_H
+
+#include <media/NdkMediaFormat.h>
+
+#include <vector>
+
+extern const char* AMEDIA_MIMETYPE_VIDEO_VP8;
+extern const char* AMEDIA_MIMETYPE_VIDEO_VP9;
+extern const char* AMEDIA_MIMETYPE_VIDEO_AV1;
+extern const char* AMEDIA_MIMETYPE_VIDEO_AVC;
+extern const char* AMEDIA_MIMETYPE_VIDEO_HEVC;
+extern const char* AMEDIA_MIMETYPE_VIDEO_MPEG4;
+extern const char* AMEDIA_MIMETYPE_VIDEO_H263;
+
+// TODO(b/146420990)
+// TODO: make MediaTranscoder use the consts from this header.
+typedef enum {
+    OUTPUT_FORMAT_START = 0,
+    OUTPUT_FORMAT_MPEG_4 = OUTPUT_FORMAT_START,
+    OUTPUT_FORMAT_WEBM = OUTPUT_FORMAT_START + 1,
+    OUTPUT_FORMAT_THREE_GPP = OUTPUT_FORMAT_START + 2,
+    OUTPUT_FORMAT_HEIF = OUTPUT_FORMAT_START + 3,
+    OUTPUT_FORMAT_OGG = OUTPUT_FORMAT_START + 4,
+    OUTPUT_FORMAT_LIST_END = OUTPUT_FORMAT_START + 4,
+} MuxerFormat;
+
+// Color formats supported by encoder - should mirror supportedColorList
+// from MediaCodecConstants.h (are these going to be deprecated)
+static constexpr int COLOR_FormatYUV420SemiPlanar = 21;
+static constexpr int COLOR_FormatYUV420Flexible = 0x7F420888;
+static constexpr int COLOR_FormatSurface = 0x7f000789;
+
+// Color transfer functions defined by MediaCodecConstants.h but not in NDK
+static constexpr int32_t COLOR_TRANSFER_HLG = 7;
+static constexpr int32_t COLOR_TRANSFER_LINEAR = 1;
+static constexpr int32_t COLOR_TRANSFER_SDR_VIDEO = 3;
+static constexpr int32_t COLOR_TRANSFER_ST2084 = 6;
+
+// constants not defined in NDK
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_ALLOW_FRAME_DROP;
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_REQUEST_SYNC_FRAME;
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_VIDEO_BITRATE;
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_MAX_B_FRAMES;
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_COLOR_TRANSFER_REQUEST;
+extern const char* TBD_AMEDIACODEC_PARAMETER_KEY_BACKGROUND_MODE;
+static constexpr int TBD_AMEDIACODEC_BUFFER_FLAG_KEY_FRAME = 0x1;
+
+static constexpr int kBitrateModeConstant = 2;
+
+namespace AMediaFormatUtils {
+
+typedef struct {
+    const char* key;
+    bool (*copy)(const char* key, AMediaFormat* from, AMediaFormat* to);
+    bool (*copy2)(const char* key, AMediaFormat* from, AMediaFormat* to);
+} EntryCopier;
+
+#define ENTRY_COPIER(keyName, typeName) \
+    { keyName, AMediaFormatUtils::CopyFormatEntry##typeName, nullptr }
+#define ENTRY_COPIER2(keyName, typeName, typeName2)            \
+    {                                                          \
+        keyName, AMediaFormatUtils::CopyFormatEntry##typeName, \
+                AMediaFormatUtils::CopyFormatEntry##typeName2  \
+    }
+
+bool CopyFormatEntryString(const char* key, AMediaFormat* from, AMediaFormat* to);
+bool CopyFormatEntryInt64(const char* key, AMediaFormat* from, AMediaFormat* to);
+bool CopyFormatEntryInt32(const char* key, AMediaFormat* from, AMediaFormat* to);
+bool CopyFormatEntryFloat(const char* key, AMediaFormat* from, AMediaFormat* to);
+
+void CopyFormatEntries(AMediaFormat* from, AMediaFormat* to,
+                       const std::vector<EntryCopier>& entries);
+
+bool SetDefaultFormatValueFloat(const char* key, AMediaFormat* format, float value);
+bool SetDefaultFormatValueInt32(const char* key, AMediaFormat* format, int32_t value);
+
+bool VideoIsHdr(AMediaFormat* format);
+
+}  // namespace AMediaFormatUtils
+#endif  // ANDROID_MEDIA_TRANSCODING_NDK_COMMON_H
diff --git a/media/libmediatranscoding/transcoder/include/media/PassthroughTrackTranscoder.h b/media/libmediatranscoding/transcoder/include/media/PassthroughTrackTranscoder.h
new file mode 100644
index 0000000..c074831
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/PassthroughTrackTranscoder.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_PASSTHROUGH_TRACK_TRANSCODER_H
+#define ANDROID_PASSTHROUGH_TRACK_TRANSCODER_H
+
+#include <media/MediaTrackTranscoder.h>
+#include <media/NdkMediaFormat.h>
+
+#include <condition_variable>
+#include <map>
+#include <mutex>
+#include <unordered_map>
+
+namespace android {
+
+/**
+ * Track transcoder for passthrough mode. Passthrough mode copies sample data from a track unchanged
+ * from source file to destination file. This track transcoder uses an internal pool of buffers.
+ * When the maximum number of buffers are allocated and all of them are waiting on the output queue
+ * the transcoder will stall until samples are dequeued from the output queue and released.
+ */
+class PassthroughTrackTranscoder : public MediaTrackTranscoder {
+public:
+    /** Maximum number of buffers to be allocated at a given time. */
+    static constexpr int kMaxBufferCountDefault = 16;
+
+    PassthroughTrackTranscoder(
+            const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback)
+          : MediaTrackTranscoder(transcoderCallback),
+            mBufferPool(std::make_shared<BufferPool>(kMaxBufferCountDefault)){};
+    virtual ~PassthroughTrackTranscoder() override = default;
+
+private:
+    friend class BufferPoolTests;
+
+    /** Class to pool and reuse buffers. */
+    class BufferPool {
+    public:
+        explicit BufferPool(int maxBufferCount) : mMaxBufferCount(maxBufferCount){};
+        ~BufferPool();
+
+        /**
+         * Retrieve a buffer from the pool. Buffers are allocated on demand. This method will block
+         * if the maximum number of buffers is reached and there are no free buffers available.
+         * @param minimumBufferSize The minimum size of the buffer.
+         * @return The buffer or nullptr if allocation failed or the pool was aborted.
+         */
+        uint8_t* getBufferWithSize(size_t minimumBufferSize);
+
+        /**
+         * Return a buffer to the pool.
+         * @param buffer The buffer to return.
+         */
+        void returnBuffer(uint8_t* buffer);
+
+        /** Wakes up threads waiting on buffers and prevents new buffers from being returned. */
+        void abort();
+
+    private:
+        // Maximum number of active buffers at a time.
+        const int mMaxBufferCount;
+
+        // Map containing all tracked buffers.
+        std::unordered_map<uint8_t*, size_t> mAddressSizeMap GUARDED_BY(mMutex);
+
+        // Map containing the currently free buffers.
+        std::multimap<size_t, uint8_t*> mFreeBufferMap GUARDED_BY(mMutex);
+
+        std::mutex mMutex;
+        std::condition_variable mCondition;
+        bool mAborted GUARDED_BY(mMutex) = false;
+    };
+
+    // MediaTrackTranscoder
+    media_status_t runTranscodeLoop(bool* stopped) override;
+    void abortTranscodeLoop() override;
+    media_status_t configureDestinationFormat(
+            const std::shared_ptr<AMediaFormat>& destinationFormat) override;
+    std::shared_ptr<AMediaFormat> getOutputFormat() const override;
+    // ~MediaTrackTranscoder
+
+    std::shared_ptr<BufferPool> mBufferPool;
+};
+
+}  // namespace android
+#endif  // ANDROID_PASSTHROUGH_TRACK_TRANSCODER_H
diff --git a/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h b/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
new file mode 100644
index 0000000..3e72882
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_VIDEO_TRACK_TRANSCODER_H
+#define ANDROID_VIDEO_TRACK_TRANSCODER_H
+
+#include <android/native_window.h>
+#include <media/MediaTrackTranscoder.h>
+#include <media/NdkMediaCodecPlatform.h>
+#include <media/NdkMediaFormat.h>
+
+#include <condition_variable>
+#include <deque>
+#include <mutex>
+
+namespace android {
+
+/**
+ * Track transcoder for video tracks. VideoTrackTranscoder uses AMediaCodec from the Media NDK
+ * internally. The two media codecs are run in asynchronous mode and shares uncompressed buffers
+ * using a native surface (ANativeWindow). Codec callback events are placed on a message queue and
+ * serviced in order on the transcoding thread managed by MediaTrackTranscoder.
+ */
+class VideoTrackTranscoder : public std::enable_shared_from_this<VideoTrackTranscoder>,
+                             public MediaTrackTranscoder {
+public:
+    static std::shared_ptr<VideoTrackTranscoder> create(
+            const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback,
+            pid_t pid = AMEDIACODEC_CALLING_PID, uid_t uid = AMEDIACODEC_CALLING_UID);
+
+    virtual ~VideoTrackTranscoder() override;
+
+private:
+    friend struct AsyncCodecCallbackDispatch;
+    friend class VideoTrackTranscoderTests;
+
+    // Minimal blocking queue used as a message queue by VideoTrackTranscoder.
+    template <typename T>
+    class BlockingQueue {
+    public:
+        void push(T const& value, bool front = false);
+        T pop();
+        void abort();
+
+    private:
+        std::mutex mMutex;
+        std::condition_variable mCondition;
+        std::deque<T> mQueue;
+        bool mAborted = false;
+    };
+    class CodecWrapper;
+
+    VideoTrackTranscoder(const std::weak_ptr<MediaTrackTranscoderCallback>& transcoderCallback,
+                         pid_t pid, uid_t uid)
+          : MediaTrackTranscoder(transcoderCallback), mPid(pid), mUid(uid){};
+
+    // MediaTrackTranscoder
+    media_status_t runTranscodeLoop(bool* stopped) override;
+    void abortTranscodeLoop() override;
+    media_status_t configureDestinationFormat(
+            const std::shared_ptr<AMediaFormat>& destinationFormat) override;
+    std::shared_ptr<AMediaFormat> getOutputFormat() const override;
+    // ~MediaTrackTranscoder
+
+    // Enqueues an input sample with the decoder.
+    void enqueueInputSample(int32_t bufferIndex);
+
+    // Moves a decoded buffer from the decoder's output to the encoder's input.
+    void transferBuffer(int32_t bufferIndex, AMediaCodecBufferInfo bufferInfo);
+
+    // Dequeues an encoded buffer from the encoder and adds it to the output queue.
+    void dequeueOutputSample(int32_t bufferIndex, AMediaCodecBufferInfo bufferInfo);
+
+    // Updates the video track's actual format based on encoder and decoder output format.
+    void updateTrackFormat(AMediaFormat* outputFormat, bool fromDecoder);
+
+    AMediaCodec* mDecoder = nullptr;
+    std::shared_ptr<CodecWrapper> mEncoder;
+    ANativeWindow* mSurface = nullptr;
+    bool mEosFromSource = false;
+    bool mEosFromEncoder = false;
+    bool mLastSampleWasSync = false;
+    media_status_t mStatus = AMEDIA_OK;
+    MediaSampleInfo mSampleInfo;
+    BlockingQueue<std::function<void()>> mCodecMessageQueue;
+    std::shared_ptr<AMediaFormat> mDestinationFormat;
+    std::shared_ptr<AMediaFormat> mActualOutputFormat;
+    pid_t mPid;
+    uid_t mUid;
+    uint64_t mInputFrameCount = 0;
+    uint64_t mOutputFrameCount = 0;
+    int32_t mConfiguredBitrate = 0;
+};
+
+}  // namespace android
+#endif  // ANDROID_VIDEO_TRACK_TRANSCODER_H
diff --git a/media/libmediatranscoding/transcoder/setloglevel.sh b/media/libmediatranscoding/transcoder/setloglevel.sh
new file mode 100755
index 0000000..b0f0a2e
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/setloglevel.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+
+if [ $# -ne 1 ]
+then
+    echo "Usage 1: $0 <loglevel>"
+    echo "  Set all transcoder log tags to <loglevel>"
+    echo "Usage 2: $0 -l"
+    echo "  List all transcoder log tags and exit"
+    exit 1
+fi
+
+# List all log tags
+declare -a tags=(
+  MediaTranscoder MediaTrackTranscoder VideoTrackTranscoder PassthroughTrackTranscoder
+  MediaSampleWriter MediaSampleReader MediaSampleQueue MediaTranscoderTests
+  MediaTrackTranscoderTests VideoTrackTranscoderTests PassthroughTrackTranscoderTests
+  MediaSampleWriterTests MediaSampleReaderNDKTests MediaSampleQueueTests HdrTranscodeTests)
+
+if [ "$1" == "-l" ]; then
+  echo "Transcoder log tags:"
+  for tag in "${tags[@]}"; do echo -n "$tag "; done
+  echo
+  exit 0
+fi
+
+level=$1
+echo Setting transcoder log level to $level
+
+# Set log level for all tags
+for tag in "${tags[@]}"
+do
+    adb shell setprop log.tag.${tag} $level
+done
+
+# Pick up new settings
+adb shell stop && adb shell start
diff --git a/media/libmediatranscoding/transcoder/tests/Android.bp b/media/libmediatranscoding/transcoder/tests/Android.bp
new file mode 100644
index 0000000..11b19c9
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/Android.bp
@@ -0,0 +1,104 @@
+// Unit tests for libmediatranscoder.
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+    name: "testdefaults",
+
+    header_libs: [
+        "libbase_headers",
+        "libmedia_headers",
+    ],
+
+    static_libs: [
+        "libmediatranscoder",
+    ],
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+        "libcrypto",
+        "libcutils",
+        "libmediandk",
+        "libnativewindow",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+
+    data: [":test_assets"],
+    test_config_template: "AndroidTestTemplate.xml",
+    test_suites: ["device-tests", "TranscoderTests"],
+}
+
+// MediaSampleReaderNDK unit test
+cc_test {
+    name: "MediaSampleReaderNDKTests",
+    defaults: ["testdefaults"],
+    srcs: ["MediaSampleReaderNDKTests.cpp"],
+}
+
+// MediaSampleQueue unit test
+cc_test {
+    name: "MediaSampleQueueTests",
+    defaults: ["testdefaults"],
+    srcs: ["MediaSampleQueueTests.cpp"],
+}
+
+// MediaTrackTranscoder unit test
+cc_test {
+    name: "MediaTrackTranscoderTests",
+    defaults: ["testdefaults"],
+    srcs: ["MediaTrackTranscoderTests.cpp"],
+}
+
+// VideoTrackTranscoder unit test
+cc_test {
+    name: "VideoTrackTranscoderTests",
+    defaults: ["testdefaults"],
+    srcs: ["VideoTrackTranscoderTests.cpp"],
+}
+
+// PassthroughTrackTranscoder unit test
+cc_test {
+    name: "PassthroughTrackTranscoderTests",
+    defaults: ["testdefaults"],
+    srcs: ["PassthroughTrackTranscoderTests.cpp"],
+}
+
+// MediaSampleWriter unit test
+cc_test {
+    name: "MediaSampleWriterTests",
+    defaults: ["testdefaults"],
+    srcs: ["MediaSampleWriterTests.cpp"],
+}
+
+// HDR Transcode unit test
+cc_test {
+    name: "HdrTranscodeTests",
+    defaults: ["testdefaults"],
+    srcs: ["HdrTranscodeTests.cpp"],
+}
+
+// MediaTranscoder unit test
+cc_test {
+    name: "MediaTranscoderTests",
+    defaults: ["testdefaults"],
+    srcs: ["MediaTranscoderTests.cpp"],
+}
diff --git a/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml b/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
new file mode 100644
index 0000000..c3a0ced
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Unit test configuration for {MODULE}">
+    <option name="test-suite-tag" value="TranscoderTests" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="false" />
+        <option name="push-file" key="TranscodingTestAssets" value="/data/local/tmp/TranscodingTestAssets" />
+        <option name="push-file" key="{MODULE}" value="/data/local/tmp/{MODULE}" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="{MODULE}" />
+        <option name="native-test-timeout" value="30m" />
+    </test>
+</configuration>
+
diff --git a/media/libmediatranscoding/transcoder/tests/HdrTranscodeTests.cpp b/media/libmediatranscoding/transcoder/tests/HdrTranscodeTests.cpp
new file mode 100644
index 0000000..3a2882b
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/HdrTranscodeTests.cpp
@@ -0,0 +1,179 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for HDR to SDR transcoding.
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "HdrTranscodeTests"
+
+#include <android-base/logging.h>
+#include <android-base/properties.h>
+#include <android/binder_process.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
+
+#include "TranscoderTestUtils.h"
+
+namespace android {
+
+// Debug property to load the sample HDR plugin.
+static const std::string kLoadSamplePluginProperty{"debug.codec2.force-sample-plugin"};
+
+// SDR color standard, from MediaFormat.
+static constexpr int COLOR_STANDARD_BT709 = 1;
+
+class HdrTranscodeTests : public ::testing::Test {
+public:
+    HdrTranscodeTests() { LOG(DEBUG) << "HdrTranscodeTests created"; }
+    ~HdrTranscodeTests() { LOG(DEBUG) << "HdrTranscodeTests destroyed"; }
+
+    void SetUp() override {
+        LOG(DEBUG) << "HdrTranscodeTests set up";
+        mCallbacks = std::make_shared<TestTranscoderCallbacks>();
+        ABinderProcess_startThreadPool();
+    }
+
+    void TearDown() override {
+        LOG(DEBUG) << "HdrTranscodeTests tear down";
+        mCallbacks.reset();
+    }
+
+    media_status_t transcode(const char* srcFile, const char* dstFile, const char* dstMime) {
+        std::string srcPath = mSrcDir + srcFile;
+        std::string dstPath = mDstDir + dstFile;
+
+        auto transcoder = MediaTranscoder::create(mCallbacks, -1 /*heartBeatIntervalUs*/);
+        EXPECT_NE(transcoder, nullptr);
+
+        const int srcFd = open(srcPath.c_str(), O_RDONLY);
+        EXPECT_EQ(transcoder->configureSource(srcFd), AMEDIA_OK);
+        close(srcFd);
+
+        std::vector<std::shared_ptr<AMediaFormat>> trackFormats = transcoder->getTrackFormats();
+        EXPECT_GT(trackFormats.size(), 0);
+
+        for (int i = 0; i < trackFormats.size(); ++i) {
+            std::shared_ptr<AMediaFormat> format;
+            const char* mime = nullptr;
+
+            AMediaFormat_getString(trackFormats[i].get(), AMEDIAFORMAT_KEY_MIME, &mime);
+            if (strncmp(mime, "video/", 6) == 0) {
+                format = std::shared_ptr<AMediaFormat>(AMediaFormat_new(), &AMediaFormat_delete);
+                AMediaFormat_setString(format.get(), AMEDIAFORMAT_KEY_MIME, dstMime);
+            }
+
+            media_status_t status = transcoder->configureTrackFormat(i, format.get());
+            if (status != AMEDIA_OK) {
+                return status;
+            }
+        }
+
+        const int dstFd = open(dstPath.c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
+        EXPECT_EQ(transcoder->configureDestination(dstFd), AMEDIA_OK);
+        close(dstFd);
+
+        media_status_t startStatus = transcoder->start();
+        EXPECT_EQ(startStatus, AMEDIA_OK);
+        if (startStatus != AMEDIA_OK) {
+            return startStatus;
+        }
+
+        mCallbacks->waitForTranscodingFinished();
+        return mCallbacks->mStatus;
+    }
+
+    media_status_t validateOutput(const char* dstFile __unused) {
+        std::string path = mDstDir + dstFile;
+
+        auto format = TranscoderTestUtils::GetVideoFormat(path);
+        EXPECT_NE(format.get(), nullptr);
+
+        int32_t value;
+        EXPECT_TRUE(AMediaFormat_getInt32(format.get(), AMEDIAFORMAT_KEY_COLOR_STANDARD, &value));
+        EXPECT_EQ(value, COLOR_STANDARD_BT709);
+
+        EXPECT_TRUE(AMediaFormat_getInt32(format.get(), AMEDIAFORMAT_KEY_COLOR_TRANSFER, &value));
+        EXPECT_EQ(value, COLOR_TRANSFER_SDR_VIDEO);
+
+        // TODO(lnilsson): Validate decoded pixels as well. Either by comparing similarity against a
+        //  known good "golden master" corresponding SDR video, or by looking at the histogram.
+        return AMEDIA_OK;
+    }
+
+    bool hdrToSdrConversionSupported(const char* hdrFile) {
+        std::string srcPath = mSrcDir + hdrFile;
+
+        std::string mime;
+        auto format = TranscoderTestUtils::GetVideoFormat(srcPath, &mime);
+        EXPECT_NE(format.get(), nullptr);
+
+        AMediaCodec* decoder = AMediaCodec_createDecoderByType(mime.c_str());
+        EXPECT_NE(decoder, nullptr);
+
+        AMediaFormat_setInt32(format.get(), TBD_AMEDIACODEC_PARAMETER_KEY_COLOR_TRANSFER_REQUEST,
+                              COLOR_TRANSFER_SDR_VIDEO);
+
+        EXPECT_EQ(AMediaCodec_configure(decoder, format.get(), nullptr /*surface*/,
+                                        nullptr /*crypto*/, 0 /*flags*/),
+                  AMEDIA_OK);
+
+        AMediaFormat* inputFormat = AMediaCodec_getInputFormat(decoder);
+        EXPECT_NE(inputFormat, nullptr);
+
+        int32_t transferFunc;
+        bool conversionSupported =
+                AMediaFormat_getInt32(inputFormat,
+                                      TBD_AMEDIACODEC_PARAMETER_KEY_COLOR_TRANSFER_REQUEST,
+                                      &transferFunc) &&
+                transferFunc == COLOR_TRANSFER_SDR_VIDEO;
+
+        AMediaFormat_delete(inputFormat);
+        AMediaCodec_delete(decoder);
+
+        return conversionSupported;
+    }
+
+    std::shared_ptr<TestTranscoderCallbacks> mCallbacks;
+    const std::string mSrcDir{"/data/local/tmp/TranscodingTestAssets/"};
+    const std::string mDstDir{"/data/local/tmp/"};
+};
+
+TEST_F(HdrTranscodeTests, TestHdrSamplePluginTranscode) {
+    const char* hdrFile = "video_1280x720_hevc_hdr10_static_3mbps.mp4";
+    const char* dstFile = "video_1280x720_hevc_hdr10_static_3mbps_transcoded.mp4";
+
+    EXPECT_TRUE(android::base::SetProperty(kLoadSamplePluginProperty, "true"));
+
+    if (hdrToSdrConversionSupported(hdrFile)) {
+        LOG(INFO) << "HDR -> SDR supported, validating output..";
+        EXPECT_EQ(transcode(hdrFile, dstFile, AMEDIA_MIMETYPE_VIDEO_AVC), AMEDIA_OK);
+        EXPECT_EQ(validateOutput(dstFile), AMEDIA_OK);
+    } else {
+        LOG(INFO) << "HDR -> SDR *not* supported";
+        EXPECT_EQ(transcode(hdrFile, dstFile, AMEDIA_MIMETYPE_VIDEO_AVC), AMEDIA_ERROR_UNSUPPORTED);
+    }
+
+    EXPECT_TRUE(android::base::SetProperty(kLoadSamplePluginProperty, "false"));
+}
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/MediaSampleQueueTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaSampleQueueTests.cpp
new file mode 100644
index 0000000..6357e4d
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/MediaSampleQueueTests.cpp
@@ -0,0 +1,237 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaSampleQueue
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleQueueTests"
+
+#include <android-base/logging.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleQueue.h>
+
+#include <thread>
+
+namespace android {
+
+/** Duration to use when delaying threads to order operations. */
+static constexpr int64_t kThreadDelayDurationMs = 100;
+
+class MediaSampleQueueTests : public ::testing::Test {
+public:
+    MediaSampleQueueTests() { LOG(DEBUG) << "MediaSampleQueueTests created"; }
+    ~MediaSampleQueueTests() { LOG(DEBUG) << "MediaSampleQueueTests destroyed"; }
+};
+
+static std::shared_ptr<MediaSample> newSample(uint32_t id) {
+    return MediaSample::createWithReleaseCallback(nullptr /* buffer */, 0 /* offset */, id,
+                                                  nullptr /* callback */);
+}
+
+TEST_F(MediaSampleQueueTests, TestSequentialDequeueOrder) {
+    LOG(DEBUG) << "TestSequentialDequeueOrder Starts";
+
+    static constexpr int kNumSamples = 4;
+    MediaSampleQueue sampleQueue;
+    EXPECT_TRUE(sampleQueue.isEmpty());
+
+    // Enqueue loop.
+    for (int i = 0; i < kNumSamples; ++i) {
+        sampleQueue.enqueue(newSample(i));
+        EXPECT_FALSE(sampleQueue.isEmpty());
+    }
+
+    // Dequeue loop.
+    for (int i = 0; i < kNumSamples; ++i) {
+        std::shared_ptr<MediaSample> sample;
+        bool aborted = sampleQueue.dequeue(&sample);
+        EXPECT_NE(sample, nullptr);
+        EXPECT_EQ(sample->bufferId, i);
+        EXPECT_FALSE(aborted);
+    }
+    EXPECT_TRUE(sampleQueue.isEmpty());
+}
+
+TEST_F(MediaSampleQueueTests, TestInterleavedDequeueOrder) {
+    LOG(DEBUG) << "TestInterleavedDequeueOrder Starts";
+
+    static constexpr int kNumSamples = 4;
+    MediaSampleQueue sampleQueue;
+
+    // Enqueue and dequeue.
+    for (int i = 0; i < kNumSamples; ++i) {
+        sampleQueue.enqueue(newSample(i));
+        EXPECT_FALSE(sampleQueue.isEmpty());
+
+        std::shared_ptr<MediaSample> sample;
+        bool aborted = sampleQueue.dequeue(&sample);
+        EXPECT_NE(sample, nullptr);
+        EXPECT_EQ(sample->bufferId, i);
+        EXPECT_FALSE(aborted);
+        EXPECT_TRUE(sampleQueue.isEmpty());
+    }
+}
+
+TEST_F(MediaSampleQueueTests, TestBlockingDequeue) {
+    LOG(DEBUG) << "TestBlockingDequeue Starts";
+
+    MediaSampleQueue sampleQueue;
+
+    std::thread enqueueThread([&sampleQueue] {
+        // Note: This implementation is a bit racy. Any amount of sleep will not guarantee that the
+        // main thread will be blocked on the sample queue by the time this thread calls enqueue.
+        // But we can say with high confidence that it will and the test will not fail regardless.
+        std::this_thread::sleep_for(std::chrono::milliseconds(kThreadDelayDurationMs));
+        sampleQueue.enqueue(newSample(1));
+    });
+
+    std::shared_ptr<MediaSample> sample;
+    bool aborted = sampleQueue.dequeue(&sample);
+    EXPECT_NE(sample, nullptr);
+    EXPECT_EQ(sample->bufferId, 1);
+    EXPECT_FALSE(aborted);
+    EXPECT_TRUE(sampleQueue.isEmpty());
+
+    enqueueThread.join();
+}
+
+TEST_F(MediaSampleQueueTests, TestDequeueBufferRelease) {
+    LOG(DEBUG) << "TestDequeueBufferRelease Starts";
+
+    static constexpr int kNumSamples = 4;
+    std::vector<bool> bufferReleased(kNumSamples, false);
+
+    MediaSample::OnSampleReleasedCallback callback = [&bufferReleased](MediaSample* sample) {
+        bufferReleased[sample->bufferId] = true;
+    };
+
+    MediaSampleQueue sampleQueue;
+    for (int i = 0; i < kNumSamples; ++i) {
+        bool aborted = sampleQueue.enqueue(
+                MediaSample::createWithReleaseCallback(nullptr, 0, i, callback));
+        EXPECT_FALSE(aborted);
+    }
+
+    for (int i = 0; i < kNumSamples; ++i) {
+        EXPECT_FALSE(bufferReleased[i]);
+    }
+
+    for (int i = 0; i < kNumSamples; ++i) {
+        {
+            std::shared_ptr<MediaSample> sample;
+            bool aborted = sampleQueue.dequeue(&sample);
+            EXPECT_NE(sample, nullptr);
+            EXPECT_EQ(sample->bufferId, i);
+            EXPECT_FALSE(bufferReleased[i]);
+            EXPECT_FALSE(aborted);
+        }
+
+        for (int j = 0; j < kNumSamples; ++j) {
+            EXPECT_EQ(bufferReleased[j], j <= i);
+        }
+    }
+}
+
+TEST_F(MediaSampleQueueTests, TestAbortBufferRelease) {
+    LOG(DEBUG) << "TestAbortBufferRelease Starts";
+
+    static constexpr int kNumSamples = 4;
+    std::vector<bool> bufferReleased(kNumSamples, false);
+
+    MediaSample::OnSampleReleasedCallback callback = [&bufferReleased](MediaSample* sample) {
+        bufferReleased[sample->bufferId] = true;
+    };
+
+    MediaSampleQueue sampleQueue;
+    for (int i = 0; i < kNumSamples; ++i) {
+        bool aborted = sampleQueue.enqueue(
+                MediaSample::createWithReleaseCallback(nullptr, 0, i, callback));
+        EXPECT_FALSE(aborted);
+    }
+
+    for (int i = 0; i < kNumSamples; ++i) {
+        EXPECT_FALSE(bufferReleased[i]);
+    }
+
+    EXPECT_FALSE(sampleQueue.isEmpty());
+    sampleQueue.abort();
+    EXPECT_TRUE(sampleQueue.isEmpty());
+
+    for (int i = 0; i < kNumSamples; ++i) {
+        EXPECT_TRUE(bufferReleased[i]);
+    }
+}
+
+TEST_F(MediaSampleQueueTests, TestNonEmptyAbort) {
+    LOG(DEBUG) << "TestNonEmptyAbort Starts";
+
+    MediaSampleQueue sampleQueue;
+    bool aborted = sampleQueue.enqueue(newSample(1));
+    EXPECT_FALSE(aborted);
+
+    sampleQueue.abort();
+
+    std::shared_ptr<MediaSample> sample;
+    aborted = sampleQueue.dequeue(&sample);
+    EXPECT_TRUE(aborted);
+    EXPECT_EQ(sample, nullptr);
+
+    aborted = sampleQueue.enqueue(sample);
+    EXPECT_TRUE(aborted);
+}
+
+TEST_F(MediaSampleQueueTests, TestEmptyAbort) {
+    LOG(DEBUG) << "TestEmptyAbort Starts";
+
+    MediaSampleQueue sampleQueue;
+    sampleQueue.abort();
+
+    std::shared_ptr<MediaSample> sample;
+    bool aborted = sampleQueue.dequeue(&sample);
+    EXPECT_TRUE(aborted);
+    EXPECT_EQ(sample, nullptr);
+
+    aborted = sampleQueue.enqueue(sample);
+    EXPECT_TRUE(aborted);
+}
+
+TEST_F(MediaSampleQueueTests, TestBlockingAbort) {
+    LOG(DEBUG) << "TestBlockingAbort Starts";
+
+    MediaSampleQueue sampleQueue;
+
+    std::thread abortingThread([&sampleQueue] {
+        // Note: This implementation is a bit racy. Any amount of sleep will not guarantee that the
+        // main thread will be blocked on the sample queue by the time this thread calls abort.
+        // But we can say with high confidence that it will and the test will not fail regardless.
+        std::this_thread::sleep_for(std::chrono::milliseconds(kThreadDelayDurationMs));
+        sampleQueue.abort();
+    });
+
+    std::shared_ptr<MediaSample> sample;
+    bool aborted = sampleQueue.dequeue(&sample);
+    EXPECT_TRUE(aborted);
+    EXPECT_EQ(sample, nullptr);
+
+    abortingThread.join();
+}
+
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp
new file mode 100644
index 0000000..3a499e5
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp
@@ -0,0 +1,433 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaSampleReaderNDK
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleReaderNDKTests"
+
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <openssl/md5.h>
+#include <utils/Timers.h>
+
+#include <cmath>
+#include <mutex>
+#include <thread>
+
+// TODO(b/153453392): Test more asset types (frame reordering?).
+
+namespace android {
+
+#define SEC_TO_USEC(s) ((s)*1000 * 1000)
+
+/** Helper class for comparing sample data using checksums. */
+class Sample {
+public:
+    Sample(uint32_t flags, int64_t timestamp, size_t size, const uint8_t* buffer)
+          : mFlags{flags}, mTimestamp{timestamp}, mSize{size} {
+        initChecksum(buffer);
+    }
+
+    Sample(AMediaExtractor* extractor) {
+        mFlags = AMediaExtractor_getSampleFlags(extractor);
+        mTimestamp = AMediaExtractor_getSampleTime(extractor);
+        mSize = static_cast<size_t>(AMediaExtractor_getSampleSize(extractor));
+
+        auto buffer = std::make_unique<uint8_t[]>(mSize);
+        AMediaExtractor_readSampleData(extractor, buffer.get(), mSize);
+
+        initChecksum(buffer.get());
+    }
+
+    void initChecksum(const uint8_t* buffer) {
+        MD5_CTX md5Ctx;
+        MD5_Init(&md5Ctx);
+        MD5_Update(&md5Ctx, buffer, mSize);
+        MD5_Final(mChecksum, &md5Ctx);
+    }
+
+    bool operator==(const Sample& rhs) const {
+        return mSize == rhs.mSize && mFlags == rhs.mFlags && mTimestamp == rhs.mTimestamp &&
+               memcmp(mChecksum, rhs.mChecksum, MD5_DIGEST_LENGTH) == 0;
+    }
+
+    uint32_t mFlags;
+    int64_t mTimestamp;
+    size_t mSize;
+    uint8_t mChecksum[MD5_DIGEST_LENGTH];
+};
+
+/** Constant for selecting all samples. */
+static constexpr int SAMPLE_COUNT_ALL = -1;
+
+/**
+ * Utility class to test different sample access patterns combined with sequential or parallel
+ * sample access modes.
+ */
+class SampleAccessTester {
+public:
+    SampleAccessTester(int sourceFd, size_t fileSize) {
+        mSampleReader = MediaSampleReaderNDK::createFromFd(sourceFd, 0, fileSize);
+        EXPECT_TRUE(mSampleReader);
+
+        mTrackCount = mSampleReader->getTrackCount();
+
+        for (int trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+            EXPECT_EQ(mSampleReader->selectTrack(trackIndex), AMEDIA_OK);
+        }
+
+        mSamples.resize(mTrackCount);
+        mTrackThreads.resize(mTrackCount);
+    }
+
+    void getSampleInfo(int trackIndex) {
+        MediaSampleInfo info;
+        media_status_t status = mSampleReader->getSampleInfoForTrack(trackIndex, &info);
+        EXPECT_EQ(status, AMEDIA_OK);
+    }
+
+    void readSamplesAsync(int trackIndex, int sampleCount) {
+        mTrackThreads[trackIndex] = std::thread{[this, trackIndex, sampleCount] {
+            int samplesRead = 0;
+            MediaSampleInfo info;
+            while (samplesRead < sampleCount || sampleCount == SAMPLE_COUNT_ALL) {
+                media_status_t status = mSampleReader->getSampleInfoForTrack(trackIndex, &info);
+                if (status != AMEDIA_OK) {
+                    EXPECT_EQ(status, AMEDIA_ERROR_END_OF_STREAM);
+                    EXPECT_TRUE((info.flags & SAMPLE_FLAG_END_OF_STREAM) != 0);
+                    break;
+                }
+                ASSERT_TRUE((info.flags & SAMPLE_FLAG_END_OF_STREAM) == 0);
+
+                auto buffer = std::make_unique<uint8_t[]>(info.size);
+                status = mSampleReader->readSampleDataForTrack(trackIndex, buffer.get(), info.size);
+                EXPECT_EQ(status, AMEDIA_OK);
+
+                mSampleMutex.lock();
+                const uint8_t* bufferPtr = buffer.get();
+                mSamples[trackIndex].emplace_back(info.flags, info.presentationTimeUs, info.size,
+                                                  bufferPtr);
+                mSampleMutex.unlock();
+                ++samplesRead;
+            }
+        }};
+    }
+
+    void readSamplesAsync(int sampleCount) {
+        for (int trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+            readSamplesAsync(trackIndex, sampleCount);
+        }
+    }
+
+    void waitForTrack(int trackIndex) {
+        ASSERT_TRUE(mTrackThreads[trackIndex].joinable());
+        mTrackThreads[trackIndex].join();
+    }
+
+    void waitForTracks() {
+        for (int trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+            waitForTrack(trackIndex);
+        }
+    }
+
+    void setEnforceSequentialAccess(bool enforce) {
+        media_status_t status = mSampleReader->setEnforceSequentialAccess(enforce);
+        EXPECT_EQ(status, AMEDIA_OK);
+    }
+
+    std::vector<std::vector<Sample>>& getSamples() { return mSamples; }
+
+    std::shared_ptr<MediaSampleReader> mSampleReader;
+    size_t mTrackCount;
+    std::mutex mSampleMutex;
+    std::vector<std::thread> mTrackThreads;
+    std::vector<std::vector<Sample>> mSamples;
+};
+
+class MediaSampleReaderNDKTests : public ::testing::Test {
+public:
+    MediaSampleReaderNDKTests() { LOG(DEBUG) << "MediaSampleReaderNDKTests created"; }
+
+    void SetUp() override {
+        LOG(DEBUG) << "MediaSampleReaderNDKTests set up";
+
+        // Need to start a thread pool to prevent AMediaExtractor binder calls from starving
+        // (b/155663561).
+        ABinderProcess_startThreadPool();
+
+        const char* sourcePath =
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+
+        mSourceFd = open(sourcePath, O_RDONLY);
+        ASSERT_GT(mSourceFd, 0);
+
+        mFileSize = lseek(mSourceFd, 0, SEEK_END);
+        lseek(mSourceFd, 0, SEEK_SET);
+
+        mExtractor = AMediaExtractor_new();
+        ASSERT_NE(mExtractor, nullptr);
+
+        media_status_t status =
+                AMediaExtractor_setDataSourceFd(mExtractor, mSourceFd, 0, mFileSize);
+        ASSERT_EQ(status, AMEDIA_OK);
+
+        mTrackCount = AMediaExtractor_getTrackCount(mExtractor);
+        for (size_t trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+            AMediaExtractor_selectTrack(mExtractor, trackIndex);
+        }
+    }
+
+    void initExtractorSamples() {
+        if (mExtractorSamples.size() == mTrackCount) return;
+
+        // Save sample information, per track, as reported by the extractor.
+        mExtractorSamples.resize(mTrackCount);
+        do {
+            const int trackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+            mExtractorSamples[trackIndex].emplace_back(mExtractor);
+        } while (AMediaExtractor_advance(mExtractor));
+
+        AMediaExtractor_seekTo(mExtractor, 0, AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC);
+    }
+
+    std::vector<int32_t> getTrackBitrates() {
+        size_t totalSize[mTrackCount];
+        memset(totalSize, 0, sizeof(totalSize));
+
+        do {
+            const int trackIndex = AMediaExtractor_getSampleTrackIndex(mExtractor);
+            totalSize[trackIndex] += AMediaExtractor_getSampleSize(mExtractor);
+        } while (AMediaExtractor_advance(mExtractor));
+
+        AMediaExtractor_seekTo(mExtractor, 0, AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC);
+
+        std::vector<int32_t> bitrates;
+        for (int trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+            int64_t durationUs;
+            AMediaFormat* trackFormat = AMediaExtractor_getTrackFormat(mExtractor, trackIndex);
+            EXPECT_NE(trackFormat, nullptr);
+            EXPECT_TRUE(AMediaFormat_getInt64(trackFormat, AMEDIAFORMAT_KEY_DURATION, &durationUs));
+            bitrates.push_back(roundf((float)totalSize[trackIndex] * 8 * 1000000 / durationUs));
+        }
+
+        return bitrates;
+    }
+
+    void compareSamples(std::vector<std::vector<Sample>>& readerSamples) {
+        initExtractorSamples();
+        EXPECT_EQ(readerSamples.size(), mTrackCount);
+
+        for (int trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+            LOG(DEBUG) << "Track " << trackIndex << ", comparing "
+                       << readerSamples[trackIndex].size() << " samples.";
+            EXPECT_EQ(readerSamples[trackIndex].size(), mExtractorSamples[trackIndex].size());
+            for (size_t sampleIndex = 0; sampleIndex < readerSamples[trackIndex].size();
+                 sampleIndex++) {
+                EXPECT_EQ(readerSamples[trackIndex][sampleIndex],
+                          mExtractorSamples[trackIndex][sampleIndex]);
+            }
+        }
+    }
+
+    void TearDown() override {
+        LOG(DEBUG) << "MediaSampleReaderNDKTests tear down";
+        AMediaExtractor_delete(mExtractor);
+        close(mSourceFd);
+    }
+
+    ~MediaSampleReaderNDKTests() { LOG(DEBUG) << "MediaSampleReaderNDKTests destroyed"; }
+
+    AMediaExtractor* mExtractor = nullptr;
+    size_t mTrackCount;
+    int mSourceFd;
+    size_t mFileSize;
+    std::vector<std::vector<Sample>> mExtractorSamples;
+};
+
+/** Reads all samples from all tracks in parallel. */
+TEST_F(MediaSampleReaderNDKTests, TestParallelSampleAccess) {
+    LOG(DEBUG) << "TestParallelSampleAccess Starts";
+
+    SampleAccessTester tester{mSourceFd, mFileSize};
+    tester.readSamplesAsync(SAMPLE_COUNT_ALL);
+    tester.waitForTracks();
+    compareSamples(tester.getSamples());
+}
+
+/** Reads all samples except the last in each track, before finishing. */
+TEST_F(MediaSampleReaderNDKTests, TestLastSampleBeforeEOS) {
+    LOG(DEBUG) << "TestLastSampleBeforeEOS Starts";
+    initExtractorSamples();
+
+    {  // Natural track order
+        SampleAccessTester tester{mSourceFd, mFileSize};
+        for (int trackIndex = 0; trackIndex < mTrackCount; ++trackIndex) {
+            tester.readSamplesAsync(trackIndex, mExtractorSamples[trackIndex].size() - 1);
+        }
+        tester.waitForTracks();
+        for (int trackIndex = 0; trackIndex < mTrackCount; ++trackIndex) {
+            tester.readSamplesAsync(trackIndex, SAMPLE_COUNT_ALL);
+            tester.waitForTrack(trackIndex);
+        }
+        compareSamples(tester.getSamples());
+    }
+
+    {  // Reverse track order
+        SampleAccessTester tester{mSourceFd, mFileSize};
+        for (int trackIndex = mTrackCount - 1; trackIndex >= 0; --trackIndex) {
+            tester.readSamplesAsync(trackIndex, mExtractorSamples[trackIndex].size() - 1);
+        }
+        tester.waitForTracks();
+        for (int trackIndex = mTrackCount - 1; trackIndex >= 0; --trackIndex) {
+            tester.readSamplesAsync(trackIndex, SAMPLE_COUNT_ALL);
+            tester.waitForTrack(trackIndex);
+        }
+        compareSamples(tester.getSamples());
+    }
+}
+
+/** Reads all samples from all tracks sequentially. */
+TEST_F(MediaSampleReaderNDKTests, TestSequentialSampleAccess) {
+    LOG(DEBUG) << "TestSequentialSampleAccess Starts";
+
+    SampleAccessTester tester{mSourceFd, mFileSize};
+    tester.setEnforceSequentialAccess(true);
+    tester.readSamplesAsync(SAMPLE_COUNT_ALL);
+    tester.waitForTracks();
+    compareSamples(tester.getSamples());
+}
+
+/** Reads all samples from one track in parallel mode before switching to sequential mode. */
+TEST_F(MediaSampleReaderNDKTests, TestMixedSampleAccessTrackEOS) {
+    LOG(DEBUG) << "TestMixedSampleAccessTrackEOS Starts";
+
+    for (int readSampleInfoFlag = 0; readSampleInfoFlag <= 1; readSampleInfoFlag++) {
+        for (int trackIndToEOS = 0; trackIndToEOS < mTrackCount; ++trackIndToEOS) {
+            LOG(DEBUG) << "Testing EOS of track " << trackIndToEOS;
+
+            SampleAccessTester tester{mSourceFd, mFileSize};
+
+            // If the flag is set, read sample info from a different track before draining the track
+            // under test to force the reader to save the extractor position.
+            if (readSampleInfoFlag) {
+                tester.getSampleInfo((trackIndToEOS + 1) % mTrackCount);
+            }
+
+            // Read all samples from one track before enabling sequential access
+            tester.readSamplesAsync(trackIndToEOS, SAMPLE_COUNT_ALL);
+            tester.waitForTrack(trackIndToEOS);
+            tester.setEnforceSequentialAccess(true);
+
+            for (int trackIndex = 0; trackIndex < mTrackCount; ++trackIndex) {
+                if (trackIndex == trackIndToEOS) continue;
+
+                tester.readSamplesAsync(trackIndex, SAMPLE_COUNT_ALL);
+                tester.waitForTrack(trackIndex);
+            }
+
+            compareSamples(tester.getSamples());
+        }
+    }
+}
+
+/**
+ * Reads different combinations of sample counts from all tracks in parallel mode before switching
+ * to sequential mode and reading the rest of the samples.
+ */
+TEST_F(MediaSampleReaderNDKTests, TestMixedSampleAccess) {
+    LOG(DEBUG) << "TestMixedSampleAccess Starts";
+    initExtractorSamples();
+
+    for (int trackIndToTest = 0; trackIndToTest < mTrackCount; ++trackIndToTest) {
+        for (int sampleCount = 0; sampleCount <= (mExtractorSamples[trackIndToTest].size() + 1);
+             ++sampleCount) {
+            SampleAccessTester tester{mSourceFd, mFileSize};
+
+            for (int trackIndex = 0; trackIndex < mTrackCount; ++trackIndex) {
+                if (trackIndex == trackIndToTest) {
+                    tester.readSamplesAsync(trackIndex, sampleCount);
+                } else {
+                    tester.readSamplesAsync(trackIndex, mExtractorSamples[trackIndex].size() / 2);
+                }
+            }
+
+            tester.waitForTracks();
+            tester.setEnforceSequentialAccess(true);
+
+            tester.readSamplesAsync(SAMPLE_COUNT_ALL);
+            tester.waitForTracks();
+
+            compareSamples(tester.getSamples());
+        }
+    }
+}
+
+TEST_F(MediaSampleReaderNDKTests, TestEstimatedBitrateAccuracy) {
+    // Just put a somewhat reasonable upper bound on the estimated bitrate expected in our test
+    // assets. This is mostly to make sure the estimation is not way off.
+    static constexpr int32_t kMaxEstimatedBitrate = 100 * 1000 * 1000;  // 100 Mbps
+
+    auto sampleReader = MediaSampleReaderNDK::createFromFd(mSourceFd, 0, mFileSize);
+    ASSERT_TRUE(sampleReader);
+
+    std::vector<int32_t> actualTrackBitrates = getTrackBitrates();
+    for (int trackIndex = 0; trackIndex < mTrackCount; ++trackIndex) {
+        EXPECT_EQ(sampleReader->selectTrack(trackIndex), AMEDIA_OK);
+
+        int32_t bitrate;
+        EXPECT_EQ(sampleReader->getEstimatedBitrateForTrack(trackIndex, &bitrate), AMEDIA_OK);
+        EXPECT_GT(bitrate, 0);
+        EXPECT_LT(bitrate, kMaxEstimatedBitrate);
+
+        // Note: The test asset currently used in this test is shorter than the sampling duration
+        // used to estimate the bitrate in the sample reader. So for now the estimation should be
+        // exact but if/when a longer asset is used a reasonable delta needs to be defined.
+        EXPECT_EQ(bitrate, actualTrackBitrates[trackIndex]);
+    }
+}
+
+TEST_F(MediaSampleReaderNDKTests, TestInvalidFd) {
+    std::shared_ptr<MediaSampleReader> sampleReader =
+            MediaSampleReaderNDK::createFromFd(0, 0, mFileSize);
+    ASSERT_TRUE(sampleReader == nullptr);
+
+    sampleReader = MediaSampleReaderNDK::createFromFd(-1, 0, mFileSize);
+    ASSERT_TRUE(sampleReader == nullptr);
+}
+
+TEST_F(MediaSampleReaderNDKTests, TestZeroSize) {
+    std::shared_ptr<MediaSampleReader> sampleReader =
+            MediaSampleReaderNDK::createFromFd(mSourceFd, 0, 0);
+    ASSERT_TRUE(sampleReader == nullptr);
+}
+
+TEST_F(MediaSampleReaderNDKTests, TestInvalidOffset) {
+    std::shared_ptr<MediaSampleReader> sampleReader =
+            MediaSampleReaderNDK::createFromFd(mSourceFd, mFileSize, mFileSize);
+    ASSERT_TRUE(sampleReader == nullptr);
+}
+
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp
new file mode 100644
index 0000000..0a8a06e
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp
@@ -0,0 +1,601 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaSampleWriter
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaSampleWriterTests"
+
+#include <android-base/logging.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleQueue.h>
+#include <media/MediaSampleWriter.h>
+#include <media/NdkMediaExtractor.h>
+
+#include <condition_variable>
+#include <list>
+#include <mutex>
+
+namespace android {
+
+/** Muxer interface to enable MediaSampleWriter testing. */
+class TestMuxer : public MediaSampleWriterMuxerInterface {
+public:
+    // MuxerInterface
+    ssize_t addTrack(AMediaFormat* trackFormat) override {
+        mEventQueue.push_back(AddTrack(trackFormat));
+        return mTrackCount++;
+    }
+    media_status_t start() override {
+        mEventQueue.push_back(Start());
+        return AMEDIA_OK;
+    }
+
+    media_status_t writeSampleData(size_t trackIndex, const uint8_t* data,
+                                   const AMediaCodecBufferInfo* info) override {
+        mEventQueue.push_back(WriteSample(trackIndex, data, info));
+        return AMEDIA_OK;
+    }
+    media_status_t stop() override {
+        mEventQueue.push_back(Stop());
+        return AMEDIA_OK;
+    }
+    // ~MuxerInterface
+
+    struct Event {
+        enum { NoEvent, AddTrack, Start, WriteSample, Stop } type = NoEvent;
+        const AMediaFormat* format = nullptr;
+        size_t trackIndex = 0;
+        const uint8_t* data = nullptr;
+        AMediaCodecBufferInfo info{};
+    };
+
+    static constexpr Event NoEvent = {Event::NoEvent, nullptr, 0, nullptr, {}};
+
+    static Event AddTrack(const AMediaFormat* format) {
+        return {.type = Event::AddTrack, .format = format};
+    }
+
+    static Event Start() { return {.type = Event::Start}; }
+    static Event Stop() { return {.type = Event::Stop}; }
+
+    static Event WriteSample(size_t trackIndex, const uint8_t* data,
+                             const AMediaCodecBufferInfo* info) {
+        return {.type = Event::WriteSample, .trackIndex = trackIndex, .data = data, .info = *info};
+    }
+
+    static Event WriteSampleWithPts(size_t trackIndex, int64_t pts) {
+        return {.type = Event::WriteSample, .trackIndex = trackIndex, .info = {0, 0, pts, 0}};
+    }
+
+    void pushEvent(const Event& e) {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mEventQueue.push_back(e);
+        mCondition.notify_one();
+    }
+
+    const Event& popEvent(bool wait = false) {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (wait && mEventQueue.empty()) {
+            mCondition.wait_for(lock, std::chrono::milliseconds(200));
+        }
+
+        if (mEventQueue.empty()) {
+            mPoppedEvent = NoEvent;
+        } else {
+            mPoppedEvent = *mEventQueue.begin();
+            mEventQueue.pop_front();
+        }
+        return mPoppedEvent;
+    }
+
+private:
+    Event mPoppedEvent;
+    std::list<Event> mEventQueue;
+    ssize_t mTrackCount = 0;
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+};
+
+bool operator==(const AMediaCodecBufferInfo& lhs, const AMediaCodecBufferInfo& rhs) {
+    return lhs.offset == rhs.offset && lhs.size == rhs.size &&
+           lhs.presentationTimeUs == rhs.presentationTimeUs && lhs.flags == rhs.flags;
+}
+
+bool operator==(const TestMuxer::Event& lhs, const TestMuxer::Event& rhs) {
+    // Don't test format pointer equality since the writer can make a copy.
+    return lhs.type == rhs.type /*&& lhs.format == rhs.format*/ &&
+           lhs.trackIndex == rhs.trackIndex && lhs.data == rhs.data && lhs.info == rhs.info;
+}
+
+/** Represents a media source file. */
+class TestMediaSource {
+public:
+    void init() {
+        static const char* sourcePath =
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+
+        mExtractor = AMediaExtractor_new();
+        ASSERT_NE(mExtractor, nullptr);
+
+        int sourceFd = open(sourcePath, O_RDONLY);
+        ASSERT_GT(sourceFd, 0);
+
+        off_t fileSize = lseek(sourceFd, 0, SEEK_END);
+        lseek(sourceFd, 0, SEEK_SET);
+
+        media_status_t status = AMediaExtractor_setDataSourceFd(mExtractor, sourceFd, 0, fileSize);
+        ASSERT_EQ(status, AMEDIA_OK);
+        close(sourceFd);
+
+        mTrackCount = AMediaExtractor_getTrackCount(mExtractor);
+        ASSERT_GT(mTrackCount, 1);
+        for (size_t trackIndex = 0; trackIndex < mTrackCount; trackIndex++) {
+            AMediaFormat* trackFormat = AMediaExtractor_getTrackFormat(mExtractor, trackIndex);
+            ASSERT_NE(trackFormat, nullptr);
+
+            const char* mime = nullptr;
+            AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+            if (strncmp(mime, "video/", 6) == 0) {
+                mVideoTrackIndex = trackIndex;
+            } else if (strncmp(mime, "audio/", 6) == 0) {
+                mAudioTrackIndex = trackIndex;
+            }
+
+            mTrackFormats.push_back(
+                    std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete));
+
+            AMediaExtractor_selectTrack(mExtractor, trackIndex);
+        }
+        EXPECT_GE(mVideoTrackIndex, 0);
+        EXPECT_GE(mAudioTrackIndex, 0);
+    }
+
+    void reset() const {
+        media_status_t status = AMediaExtractor_seekTo(mExtractor, 0 /* seekPosUs */,
+                                                       AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC);
+        ASSERT_EQ(status, AMEDIA_OK);
+    }
+
+    AMediaExtractor* mExtractor = nullptr;
+    size_t mTrackCount = 0;
+    std::vector<std::shared_ptr<AMediaFormat>> mTrackFormats;
+    int mVideoTrackIndex = -1;
+    int mAudioTrackIndex = -1;
+};
+
+class TestCallbacks : public MediaSampleWriter::CallbackInterface {
+public:
+    bool hasFinished() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        return mFinished;
+    }
+
+    // MediaSampleWriter::CallbackInterface
+    virtual void onFinished(const MediaSampleWriter* writer __unused,
+                            media_status_t status) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        EXPECT_FALSE(mFinished);
+        mFinished = true;
+        mStatus = status;
+        mCondition.notify_all();
+    }
+
+    virtual void onStopped(const MediaSampleWriter* writer __unused) {
+        std::unique_lock<std::mutex> lock(mMutex);
+        EXPECT_FALSE(mFinished);
+        mStopped = true;
+        mCondition.notify_all();
+    }
+
+    virtual void onProgressUpdate(const MediaSampleWriter* writer __unused,
+                                  int32_t progress) override {
+        EXPECT_GT(progress, mLastProgress);
+        EXPECT_GE(progress, 0);
+        EXPECT_LE(progress, 100);
+
+        mLastProgress = progress;
+        mProgressUpdateCount++;
+    }
+
+    virtual void onHeartBeat(const MediaSampleWriter* writer __unused) override {}
+    // ~MediaSampleWriter::CallbackInterface
+
+    void waitForWritingFinished() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mFinished && !mStopped) {
+            mCondition.wait(lock);
+        }
+    }
+
+    uint32_t getProgressUpdateCount() const { return mProgressUpdateCount; }
+    bool wasStopped() const { return mStopped; }
+
+private:
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mFinished = false;
+    bool mStopped = false;
+    media_status_t mStatus = AMEDIA_OK;
+    int32_t mLastProgress = -1;
+    uint32_t mProgressUpdateCount = 0;
+};
+
+class MediaSampleWriterTests : public ::testing::Test {
+public:
+    MediaSampleWriterTests() { LOG(DEBUG) << "MediaSampleWriterTests created"; }
+    ~MediaSampleWriterTests() { LOG(DEBUG) << "MediaSampleWriterTests destroyed"; }
+
+    static const TestMediaSource& getMediaSource() {
+        static TestMediaSource sMediaSource;
+        static std::once_flag sOnceToken;
+
+        std::call_once(sOnceToken, [] { sMediaSource.init(); });
+
+        sMediaSource.reset();
+        return sMediaSource;
+    }
+
+    static std::shared_ptr<MediaSample> newSample(int64_t ptsUs, uint32_t flags, size_t size,
+                                                  size_t offset, const uint8_t* buffer) {
+        auto sample = std::make_shared<MediaSample>();
+        sample->info.presentationTimeUs = ptsUs;
+        sample->info.flags = flags;
+        sample->info.size = size;
+        sample->dataOffset = offset;
+        sample->buffer = buffer;
+        return sample;
+    }
+
+    static std::shared_ptr<MediaSample> newSampleEos() {
+        return newSample(0, SAMPLE_FLAG_END_OF_STREAM, 0, 0, nullptr);
+    }
+
+    static std::shared_ptr<MediaSample> newSampleWithPts(int64_t ptsUs) {
+        static uint32_t sampleCount = 0;
+
+        // Use sampleCount to get a unique mock sample.
+        uint32_t sampleId = ++sampleCount;
+        return newSample(ptsUs, 0, sampleId, sampleId, reinterpret_cast<const uint8_t*>(sampleId));
+    }
+
+    static std::shared_ptr<MediaSample> newSampleWithPtsOnly(int64_t ptsUs) {
+        return newSample(ptsUs, 0, 0, 0, nullptr);
+    }
+
+    void SetUp() override {
+        LOG(DEBUG) << "MediaSampleWriterTests set up";
+        mTestMuxer = std::make_shared<TestMuxer>();
+    }
+
+    void TearDown() override {
+        LOG(DEBUG) << "MediaSampleWriterTests tear down";
+        mTestMuxer.reset();
+    }
+
+protected:
+    std::shared_ptr<TestMuxer> mTestMuxer;
+    std::shared_ptr<TestCallbacks> mTestCallbacks = std::make_shared<TestCallbacks>();
+};
+
+TEST_F(MediaSampleWriterTests, TestAddTrackWithoutInit) {
+    const TestMediaSource& mediaSource = getMediaSource();
+
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+    EXPECT_EQ(writer->addTrack(mediaSource.mTrackFormats[0]), nullptr);
+}
+
+TEST_F(MediaSampleWriterTests, TestStartWithoutInit) {
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+    EXPECT_FALSE(writer->start());
+}
+
+TEST_F(MediaSampleWriterTests, TestStartWithoutTracks) {
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+    EXPECT_TRUE(writer->init(mTestMuxer, mTestCallbacks));
+    EXPECT_FALSE(writer->start());
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::NoEvent);
+}
+
+TEST_F(MediaSampleWriterTests, TestAddInvalidTrack) {
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+    EXPECT_TRUE(writer->init(mTestMuxer, mTestCallbacks));
+
+    EXPECT_EQ(writer->addTrack(nullptr), nullptr);
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::NoEvent);
+}
+
+TEST_F(MediaSampleWriterTests, TestDoubleStartStop) {
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+
+    std::shared_ptr<TestCallbacks> callbacks = std::make_shared<TestCallbacks>();
+    EXPECT_TRUE(writer->init(mTestMuxer, callbacks));
+
+    const TestMediaSource& mediaSource = getMediaSource();
+    EXPECT_NE(writer->addTrack(mediaSource.mTrackFormats[0]), nullptr);
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::AddTrack(mediaSource.mTrackFormats[0].get()));
+
+    ASSERT_TRUE(writer->start());
+    EXPECT_FALSE(writer->start());
+
+    writer->stop();
+    writer->stop();
+    callbacks->waitForWritingFinished();
+    EXPECT_TRUE(callbacks->wasStopped());
+}
+
+TEST_F(MediaSampleWriterTests, TestStopWithoutStart) {
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+    EXPECT_TRUE(writer->init(mTestMuxer, mTestCallbacks));
+
+    const TestMediaSource& mediaSource = getMediaSource();
+    EXPECT_NE(writer->addTrack(mediaSource.mTrackFormats[0]), nullptr);
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::AddTrack(mediaSource.mTrackFormats[0].get()));
+
+    writer->stop();
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::NoEvent);
+}
+
+TEST_F(MediaSampleWriterTests, TestStartWithoutCallback) {
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+
+    std::weak_ptr<MediaSampleWriter::CallbackInterface> unassignedWp;
+    EXPECT_FALSE(writer->init(mTestMuxer, unassignedWp));
+
+    std::shared_ptr<MediaSampleWriter::CallbackInterface> unassignedSp;
+    EXPECT_FALSE(writer->init(mTestMuxer, unassignedSp));
+
+    const TestMediaSource& mediaSource = getMediaSource();
+    EXPECT_EQ(writer->addTrack(mediaSource.mTrackFormats[0]), nullptr);
+    ASSERT_FALSE(writer->start());
+}
+
+TEST_F(MediaSampleWriterTests, TestProgressUpdate) {
+    const TestMediaSource& mediaSource = getMediaSource();
+
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+    EXPECT_TRUE(writer->init(mTestMuxer, mTestCallbacks));
+
+    std::shared_ptr<AMediaFormat> videoFormat =
+            std::shared_ptr<AMediaFormat>(AMediaFormat_new(), &AMediaFormat_delete);
+    AMediaFormat_copy(videoFormat.get(),
+                      mediaSource.mTrackFormats[mediaSource.mVideoTrackIndex].get());
+
+    AMediaFormat_setInt64(videoFormat.get(), AMEDIAFORMAT_KEY_DURATION, 100);
+    auto sampleConsumer = writer->addTrack(videoFormat);
+    EXPECT_NE(sampleConsumer, nullptr);
+    ASSERT_TRUE(writer->start());
+
+    for (int64_t pts = 0; pts < 100; ++pts) {
+        sampleConsumer(newSampleWithPts(pts));
+    }
+    sampleConsumer(newSampleEos());
+    mTestCallbacks->waitForWritingFinished();
+
+    EXPECT_EQ(mTestCallbacks->getProgressUpdateCount(), 100);
+}
+
+TEST_F(MediaSampleWriterTests, TestInterleaving) {
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+    EXPECT_TRUE(writer->init(mTestMuxer, mTestCallbacks));
+
+    // Use two tracks for this test.
+    static constexpr int kNumTracks = 2;
+    MediaSampleWriter::MediaSampleConsumerFunction sampleConsumers[kNumTracks];
+    std::vector<std::pair<std::shared_ptr<MediaSample>, size_t>> addedSamples;
+    const TestMediaSource& mediaSource = getMediaSource();
+
+    for (int trackIdx = 0; trackIdx < kNumTracks; ++trackIdx) {
+        auto trackFormat = mediaSource.mTrackFormats[trackIdx % mediaSource.mTrackCount];
+        sampleConsumers[trackIdx] = writer->addTrack(trackFormat);
+        EXPECT_NE(sampleConsumers[trackIdx], nullptr);
+        EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::AddTrack(trackFormat.get()));
+    }
+
+    // Create samples in the expected interleaved order for easy verification.
+    auto addSampleToTrackWithPts = [&addedSamples, &sampleConsumers](int trackIndex, int64_t pts) {
+        auto sample = newSampleWithPts(pts);
+        sampleConsumers[trackIndex](sample);
+        addedSamples.emplace_back(sample, trackIndex);
+    };
+
+    addSampleToTrackWithPts(0, 0);
+    addSampleToTrackWithPts(1, 4);
+
+    addSampleToTrackWithPts(0, 1);
+    addSampleToTrackWithPts(0, 2);
+    addSampleToTrackWithPts(0, 3);
+    addSampleToTrackWithPts(0, 10);
+
+    addSampleToTrackWithPts(1, 5);
+    addSampleToTrackWithPts(1, 6);
+    addSampleToTrackWithPts(1, 11);
+
+    addSampleToTrackWithPts(0, 12);
+    addSampleToTrackWithPts(1, 13);
+
+    for (int trackIndex = 0; trackIndex < kNumTracks; ++trackIndex) {
+        sampleConsumers[trackIndex](newSampleEos());
+    }
+
+    // Start the writer.
+    ASSERT_TRUE(writer->start());
+
+    // Wait for writer to complete.
+    mTestCallbacks->waitForWritingFinished();
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::Start());
+
+    std::sort(addedSamples.begin(), addedSamples.end(),
+              [](const std::pair<std::shared_ptr<MediaSample>, size_t>& left,
+                 const std::pair<std::shared_ptr<MediaSample>, size_t>& right) {
+                  return left.first->info.presentationTimeUs < right.first->info.presentationTimeUs;
+              });
+
+    // Verify sample order.
+    for (auto entry : addedSamples) {
+        auto sample = entry.first;
+        auto trackIndex = entry.second;
+
+        const TestMuxer::Event& event = mTestMuxer->popEvent();
+        EXPECT_EQ(event.type, TestMuxer::Event::WriteSample);
+        EXPECT_EQ(event.trackIndex, trackIndex);
+        EXPECT_EQ(event.data, sample->buffer);
+        EXPECT_EQ(event.info.offset, sample->dataOffset);
+        EXPECT_EQ(event.info.size, sample->info.size);
+        EXPECT_EQ(event.info.presentationTimeUs, sample->info.presentationTimeUs);
+        EXPECT_EQ(event.info.flags, sample->info.flags);
+    }
+
+    // Verify EOS samples.
+    for (int trackIndex = 0; trackIndex < kNumTracks; ++trackIndex) {
+        auto trackFormat = mediaSource.mTrackFormats[trackIndex % mediaSource.mTrackCount];
+        int64_t duration = 0;
+        AMediaFormat_getInt64(trackFormat.get(), AMEDIAFORMAT_KEY_DURATION, &duration);
+
+        // EOS timestamp = first sample timestamp + duration.
+        const int64_t endTime = duration + (trackIndex == 1 ? 4 : 0);
+        const AMediaCodecBufferInfo info = {0, 0, endTime, AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM};
+
+        EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::WriteSample(trackIndex, nullptr, &info));
+    }
+
+    EXPECT_EQ(mTestMuxer->popEvent(), TestMuxer::Stop());
+    EXPECT_TRUE(mTestCallbacks->hasFinished());
+}
+
+// Convenience function for reading a sample from an AMediaExtractor represented as a MediaSample.
+static std::shared_ptr<MediaSample> readSampleAndAdvance(AMediaExtractor* extractor,
+                                                         size_t* trackIndexOut) {
+    int trackIndex = AMediaExtractor_getSampleTrackIndex(extractor);
+    if (trackIndex < 0) {
+        return nullptr;
+    }
+
+    if (trackIndexOut != nullptr) {
+        *trackIndexOut = trackIndex;
+    }
+
+    ssize_t sampleSize = AMediaExtractor_getSampleSize(extractor);
+    int64_t sampleTimeUs = AMediaExtractor_getSampleTime(extractor);
+    uint32_t flags = AMediaExtractor_getSampleFlags(extractor);
+
+    size_t bufferSize = static_cast<size_t>(sampleSize);
+    uint8_t* buffer = new uint8_t[bufferSize];
+
+    ssize_t dataRead = AMediaExtractor_readSampleData(extractor, buffer, bufferSize);
+    EXPECT_EQ(dataRead, sampleSize);
+
+    auto sample = MediaSample::createWithReleaseCallback(
+            buffer, 0 /* offset */, 0 /* id */, [buffer](MediaSample*) { delete[] buffer; });
+    sample->info.size = bufferSize;
+    sample->info.presentationTimeUs = sampleTimeUs;
+    sample->info.flags = flags;
+
+    (void)AMediaExtractor_advance(extractor);
+    return sample;
+}
+
+TEST_F(MediaSampleWriterTests, TestDefaultMuxer) {
+    // Write samples straight from an extractor and validate output file.
+    static const char* destinationPath =
+            "/data/local/tmp/MediaSampleWriterTests_TestDefaultMuxer_output.MP4";
+    const int destinationFd =
+            open(destinationPath, O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR | S_IROTH);
+    ASSERT_GT(destinationFd, 0);
+
+    // Initialize writer.
+    std::shared_ptr<MediaSampleWriter> writer = MediaSampleWriter::Create();
+    EXPECT_TRUE(writer->init(destinationFd, mTestCallbacks));
+    close(destinationFd);
+
+    // Add tracks.
+    const TestMediaSource& mediaSource = getMediaSource();
+    std::vector<MediaSampleWriter::MediaSampleConsumerFunction> sampleConsumers;
+
+    for (size_t trackIndex = 0; trackIndex < mediaSource.mTrackCount; trackIndex++) {
+        auto consumer = writer->addTrack(mediaSource.mTrackFormats[trackIndex]);
+        sampleConsumers.push_back(consumer);
+    }
+
+    // Start the writer.
+    ASSERT_TRUE(writer->start());
+
+    // Enqueue samples and finally End Of Stream.
+    std::shared_ptr<MediaSample> sample;
+    size_t trackIndex;
+    while ((sample = readSampleAndAdvance(mediaSource.mExtractor, &trackIndex)) != nullptr) {
+        sampleConsumers[trackIndex](sample);
+    }
+    for (trackIndex = 0; trackIndex < mediaSource.mTrackCount; trackIndex++) {
+        sampleConsumers[trackIndex](newSampleEos());
+    }
+
+    // Wait for writer.
+    mTestCallbacks->waitForWritingFinished();
+
+    // Compare output file with source.
+    mediaSource.reset();
+
+    AMediaExtractor* extractor = AMediaExtractor_new();
+    ASSERT_NE(extractor, nullptr);
+
+    int sourceFd = open(destinationPath, O_RDONLY);
+    ASSERT_GT(sourceFd, 0);
+
+    off_t fileSize = lseek(sourceFd, 0, SEEK_END);
+    lseek(sourceFd, 0, SEEK_SET);
+
+    media_status_t status = AMediaExtractor_setDataSourceFd(extractor, sourceFd, 0, fileSize);
+    ASSERT_EQ(status, AMEDIA_OK);
+    close(sourceFd);
+
+    size_t trackCount = AMediaExtractor_getTrackCount(extractor);
+    EXPECT_EQ(trackCount, mediaSource.mTrackCount);
+
+    for (size_t trackIndex = 0; trackIndex < trackCount; trackIndex++) {
+        AMediaFormat* trackFormat = AMediaExtractor_getTrackFormat(extractor, trackIndex);
+        ASSERT_NE(trackFormat, nullptr);
+
+        AMediaExtractor_selectTrack(extractor, trackIndex);
+    }
+
+    // Compare samples.
+    std::shared_ptr<MediaSample> sample1 = readSampleAndAdvance(mediaSource.mExtractor, nullptr);
+    std::shared_ptr<MediaSample> sample2 = readSampleAndAdvance(extractor, nullptr);
+
+    while (sample1 != nullptr && sample2 != nullptr) {
+        EXPECT_EQ(sample1->info.presentationTimeUs, sample2->info.presentationTimeUs);
+        EXPECT_EQ(sample1->info.size, sample2->info.size);
+        EXPECT_EQ(sample1->info.flags, sample2->info.flags);
+
+        EXPECT_EQ(memcmp(sample1->buffer, sample2->buffer, sample1->info.size), 0);
+
+        sample1 = readSampleAndAdvance(mediaSource.mExtractor, nullptr);
+        sample2 = readSampleAndAdvance(extractor, nullptr);
+    }
+    EXPECT_EQ(sample1, nullptr);
+    EXPECT_EQ(sample2, nullptr);
+
+    AMediaExtractor_delete(extractor);
+}
+
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp
new file mode 100644
index 0000000..791e983
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp
@@ -0,0 +1,335 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTrackTranscoder
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaTrackTranscoderTests"
+
+#include <android-base/logging.h>
+#include <android/binder_process.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/MediaTrackTranscoder.h>
+#include <media/PassthroughTrackTranscoder.h>
+#include <media/VideoTrackTranscoder.h>
+
+#include "TranscoderTestUtils.h"
+
+namespace android {
+
+/** TrackTranscoder types to test. */
+enum TrackTranscoderType {
+    VIDEO,
+    PASSTHROUGH,
+};
+
+class MediaTrackTranscoderTests : public ::testing::TestWithParam<TrackTranscoderType> {
+public:
+    MediaTrackTranscoderTests() { LOG(DEBUG) << "MediaTrackTranscoderTests created"; }
+
+    void SetUp() override {
+        LOG(DEBUG) << "MediaTrackTranscoderTests set up";
+
+        // Need to start a thread pool to prevent AMediaExtractor binder calls from starving
+        // (b/155663561).
+        ABinderProcess_startThreadPool();
+
+        mCallback = std::make_shared<TestTrackTranscoderCallback>();
+
+        switch (GetParam()) {
+        case VIDEO:
+            mTranscoder = VideoTrackTranscoder::create(mCallback);
+            break;
+        case PASSTHROUGH:
+            mTranscoder = std::make_shared<PassthroughTrackTranscoder>(mCallback);
+            break;
+        }
+        ASSERT_NE(mTranscoder, nullptr);
+
+        initSampleReader("/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4");
+    }
+
+    void initSampleReader(const char* sourcePath) {
+        const int sourceFd = open(sourcePath, O_RDONLY);
+        ASSERT_GT(sourceFd, 0);
+
+        const size_t fileSize = lseek(sourceFd, 0, SEEK_END);
+        lseek(sourceFd, 0, SEEK_SET);
+
+        mMediaSampleReader = MediaSampleReaderNDK::createFromFd(sourceFd, 0 /* offset */, fileSize);
+        ASSERT_NE(mMediaSampleReader, nullptr);
+        close(sourceFd);
+
+        for (size_t trackIndex = 0; trackIndex < mMediaSampleReader->getTrackCount();
+             ++trackIndex) {
+            AMediaFormat* trackFormat = mMediaSampleReader->getTrackFormat(trackIndex);
+            ASSERT_NE(trackFormat, nullptr);
+
+            const char* mime = nullptr;
+            AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+            ASSERT_NE(mime, nullptr);
+
+            if (GetParam() == VIDEO && strncmp(mime, "video/", 6) == 0) {
+                mTrackIndex = trackIndex;
+
+                mSourceFormat = std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete);
+                ASSERT_NE(mSourceFormat, nullptr);
+
+                mDestinationFormat =
+                        TrackTranscoderTestUtils::getDefaultVideoDestinationFormat(trackFormat);
+                ASSERT_NE(mDestinationFormat, nullptr);
+                break;
+            } else if (GetParam() == PASSTHROUGH && strncmp(mime, "audio/", 6) == 0) {
+                // TODO(lnilsson): Test metadata track passthrough after hkuang@ provides sample.
+                mTrackIndex = trackIndex;
+
+                mSourceFormat = std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete);
+                ASSERT_NE(mSourceFormat, nullptr);
+                break;
+            }
+
+            AMediaFormat_delete(trackFormat);
+        }
+
+        ASSERT_NE(mSourceFormat, nullptr);
+        EXPECT_EQ(mMediaSampleReader->selectTrack(mTrackIndex), AMEDIA_OK);
+    }
+
+    // Drains the transcoder's output queue in a loop.
+    void drainOutputSamples(int numSamplesToSave = 0) {
+        mTranscoder->setSampleConsumer(
+                [this, numSamplesToSave](const std::shared_ptr<MediaSample>& sample) {
+                    ASSERT_NE(sample, nullptr);
+
+                    mGotEndOfStream = (sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) != 0;
+
+                    if (mSavedSamples.size() < numSamplesToSave) {
+                        mSavedSamples.push_back(sample);
+                    }
+
+                    if (mSavedSamples.size() == numSamplesToSave || mGotEndOfStream) {
+                        mSamplesSavedSemaphore.signal();
+                    }
+                });
+    }
+
+    void TearDown() override { LOG(DEBUG) << "MediaTrackTranscoderTests tear down"; }
+
+    ~MediaTrackTranscoderTests() { LOG(DEBUG) << "MediaTrackTranscoderTests destroyed"; }
+
+protected:
+    std::shared_ptr<MediaTrackTranscoder> mTranscoder;
+    std::shared_ptr<TestTrackTranscoderCallback> mCallback;
+
+    std::shared_ptr<MediaSampleReader> mMediaSampleReader;
+    int mTrackIndex;
+
+    std::shared_ptr<AMediaFormat> mSourceFormat;
+    std::shared_ptr<AMediaFormat> mDestinationFormat;
+
+    std::vector<std::shared_ptr<MediaSample>> mSavedSamples;
+    OneShotSemaphore mSamplesSavedSemaphore;
+    bool mGotEndOfStream = false;
+};
+
+TEST_P(MediaTrackTranscoderTests, WaitNormalOperation) {
+    LOG(DEBUG) << "Testing WaitNormalOperation";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(mTranscoder->start());
+    drainOutputSamples();
+    EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
+    EXPECT_TRUE(mCallback->transcodingFinished());
+    EXPECT_TRUE(mGotEndOfStream);
+}
+
+TEST_P(MediaTrackTranscoderTests, StopNormalOperation) {
+    LOG(DEBUG) << "Testing StopNormalOperation";
+
+    // Use a longer test asset to make sure that transcoding can be stopped.
+    initSampleReader("/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4");
+
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    EXPECT_TRUE(mTranscoder->start());
+    mCallback->waitUntilTrackFormatAvailable();
+    mTranscoder->stop();
+    EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
+    EXPECT_TRUE(mCallback->transcodingWasStopped());
+}
+
+TEST_P(MediaTrackTranscoderTests, StartWithoutConfigure) {
+    LOG(DEBUG) << "Testing StartWithoutConfigure";
+    EXPECT_FALSE(mTranscoder->start());
+}
+
+TEST_P(MediaTrackTranscoderTests, StopWithoutStart) {
+    LOG(DEBUG) << "Testing StopWithoutStart";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    mTranscoder->stop();
+}
+
+TEST_P(MediaTrackTranscoderTests, DoubleStartStop) {
+    LOG(DEBUG) << "Testing DoubleStartStop";
+
+    // Use a longer test asset to make sure that transcoding can be stopped.
+    initSampleReader("/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4");
+
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    EXPECT_TRUE(mTranscoder->start());
+    EXPECT_FALSE(mTranscoder->start());
+    mTranscoder->stop();
+    mTranscoder->stop();
+    EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
+    EXPECT_TRUE(mCallback->transcodingWasStopped());
+}
+
+TEST_P(MediaTrackTranscoderTests, DoubleConfigure) {
+    LOG(DEBUG) << "Testing DoubleConfigure";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_ERROR_UNSUPPORTED);
+}
+
+TEST_P(MediaTrackTranscoderTests, ConfigureAfterFail) {
+    LOG(DEBUG) << "Testing ConfigureAfterFail";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, -1, mDestinationFormat),
+              AMEDIA_ERROR_INVALID_PARAMETER);
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+}
+
+TEST_P(MediaTrackTranscoderTests, RestartAfterStop) {
+    LOG(DEBUG) << "Testing RestartAfterStop";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    EXPECT_TRUE(mTranscoder->start());
+    mTranscoder->stop();
+    EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
+    EXPECT_FALSE(mTranscoder->start());
+}
+
+TEST_P(MediaTrackTranscoderTests, RestartAfterFinish) {
+    LOG(DEBUG) << "Testing RestartAfterFinish";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(mTranscoder->start());
+    drainOutputSamples();
+    EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
+    mTranscoder->stop();
+    EXPECT_FALSE(mTranscoder->start());
+    EXPECT_TRUE(mGotEndOfStream);
+}
+
+TEST_P(MediaTrackTranscoderTests, HoldSampleAfterTranscoderRelease) {
+    LOG(DEBUG) << "Testing HoldSampleAfterTranscoderRelease";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(mTranscoder->start());
+    drainOutputSamples(1 /* numSamplesToSave */);
+    EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
+    mTranscoder->stop();
+    EXPECT_TRUE(mGotEndOfStream);
+
+    mTranscoder.reset();
+
+    std::this_thread::sleep_for(std::chrono::milliseconds(20));
+    mSavedSamples.clear();
+}
+
+TEST_P(MediaTrackTranscoderTests, HoldSampleAfterTranscoderStop) {
+    LOG(DEBUG) << "Testing HoldSampleAfterTranscoderStop";
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(mTranscoder->start());
+    drainOutputSamples(1 /* numSamplesToSave */);
+    mSamplesSavedSemaphore.wait();
+    mTranscoder->stop();
+    EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
+
+    std::this_thread::sleep_for(std::chrono::milliseconds(20));
+    mSavedSamples.clear();
+}
+
+TEST_P(MediaTrackTranscoderTests, NullSampleReader) {
+    LOG(DEBUG) << "Testing NullSampleReader";
+    std::shared_ptr<MediaSampleReader> nullSampleReader;
+    EXPECT_NE(mTranscoder->configure(nullSampleReader, mTrackIndex, mDestinationFormat), AMEDIA_OK);
+    ASSERT_FALSE(mTranscoder->start());
+}
+
+TEST_P(MediaTrackTranscoderTests, InvalidTrackIndex) {
+    LOG(DEBUG) << "Testing InvalidTrackIndex";
+    EXPECT_NE(mTranscoder->configure(mMediaSampleReader, -1, mDestinationFormat), AMEDIA_OK);
+    EXPECT_NE(mTranscoder->configure(mMediaSampleReader, mMediaSampleReader->getTrackCount(),
+                                     mDestinationFormat),
+              AMEDIA_OK);
+}
+
+TEST_P(MediaTrackTranscoderTests, StopOnSync) {
+    LOG(DEBUG) << "Testing StopOnSync";
+
+    // Use a longer test asset to make sure there is a GOP to finish.
+    initSampleReader("/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4");
+
+    EXPECT_EQ(mTranscoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+
+    bool lastSampleWasEos = false;
+    bool lastRealSampleWasSync = false;
+    OneShotSemaphore samplesReceivedSemaphore;
+    uint32_t sampleCount = 0;
+
+    mTranscoder->setSampleConsumer([&](const std::shared_ptr<MediaSample>& sample) {
+        ASSERT_NE(sample, nullptr);
+
+        if ((lastSampleWasEos = sample->info.flags & SAMPLE_FLAG_END_OF_STREAM)) {
+            samplesReceivedSemaphore.signal();
+            return;
+        }
+        lastRealSampleWasSync = sample->info.flags & SAMPLE_FLAG_SYNC_SAMPLE;
+
+        if (++sampleCount >= 10) {  // Wait for a few samples before stopping.
+            samplesReceivedSemaphore.signal();
+        }
+    });
+
+    ASSERT_TRUE(mTranscoder->start());
+    samplesReceivedSemaphore.wait();
+    mTranscoder->stop(true /* stopOnSync */);
+    EXPECT_EQ(mCallback->waitUntilFinished(), AMEDIA_OK);
+
+    EXPECT_TRUE(lastSampleWasEos);
+    EXPECT_TRUE(lastRealSampleWasSync);
+    EXPECT_TRUE(mCallback->transcodingWasStopped());
+}
+
+};  // namespace android
+
+using namespace android;
+
+INSTANTIATE_TEST_SUITE_P(MediaTrackTranscoderTestsAll, MediaTrackTranscoderTests,
+                         ::testing::Values(VIDEO, PASSTHROUGH));
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
new file mode 100644
index 0000000..b7c7bd8
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
@@ -0,0 +1,412 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscoder
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscoderTests"
+
+#include <android-base/logging.h>
+#include <android/binder_process.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
+
+#include "TranscoderTestUtils.h"
+
+namespace android {
+
+#define DEFINE_FORMAT_VALUE_EQUAL_FUNC(_type, _typeName)                                  \
+    static bool equal##_typeName(const char* key, AMediaFormat* src, AMediaFormat* dst) { \
+        _type srcVal, dstVal;                                                             \
+        bool srcPresent = AMediaFormat_get##_typeName(src, key, &srcVal);                 \
+        bool dstPresent = AMediaFormat_get##_typeName(dst, key, &dstVal);                 \
+        return (srcPresent == dstPresent) && (!srcPresent || (srcVal == dstVal));         \
+    }
+
+DEFINE_FORMAT_VALUE_EQUAL_FUNC(int64_t, Int64);
+DEFINE_FORMAT_VALUE_EQUAL_FUNC(int32_t, Int32);
+
+struct FormatVerifierEntry {
+    const char* key;
+    std::function<bool(const char*, AMediaFormat*, AMediaFormat*)> equal;
+};
+
+static const FormatVerifierEntry kFieldsToPreserve[] = {
+        {AMEDIAFORMAT_KEY_DURATION, equalInt64},       {AMEDIAFORMAT_KEY_WIDTH, equalInt32},
+        {AMEDIAFORMAT_KEY_HEIGHT, equalInt32},         {AMEDIAFORMAT_KEY_FRAME_RATE, equalInt32},
+        {AMEDIAFORMAT_KEY_FRAME_COUNT, equalInt32},    {AMEDIAFORMAT_KEY_DISPLAY_WIDTH, equalInt32},
+        {AMEDIAFORMAT_KEY_DISPLAY_HEIGHT, equalInt32}, {AMEDIAFORMAT_KEY_SAR_WIDTH, equalInt32},
+        {AMEDIAFORMAT_KEY_SAR_HEIGHT, equalInt32},     {AMEDIAFORMAT_KEY_ROTATION, equalInt32},
+};
+
+// Write-only, create file if non-existent, don't overwrite existing file.
+static constexpr int kOpenFlags = O_WRONLY | O_CREAT | O_EXCL;
+// User R+W permission.
+static constexpr int kFileMode = S_IRUSR | S_IWUSR;
+
+class MediaTranscoderTests : public ::testing::Test {
+public:
+    MediaTranscoderTests() { LOG(DEBUG) << "MediaTranscoderTests created"; }
+    ~MediaTranscoderTests() { LOG(DEBUG) << "MediaTranscoderTests destroyed"; }
+
+    void SetUp() override {
+        LOG(DEBUG) << "MediaTranscoderTests set up";
+        mCallbacks = std::make_shared<TestTranscoderCallbacks>();
+        ABinderProcess_startThreadPool();
+    }
+
+    void TearDown() override {
+        LOG(DEBUG) << "MediaTranscoderTests tear down";
+        mCallbacks.reset();
+    }
+
+    void deleteFile(const char* path) { unlink(path); }
+
+    float getFileSizeDiffPercent(const char* path1, const char* path2, bool absolute = false) {
+        struct stat s1, s2;
+        EXPECT_EQ(stat(path1, &s1), 0);
+        EXPECT_EQ(stat(path2, &s2), 0);
+
+        int64_t diff = s2.st_size - s1.st_size;
+        if (absolute && diff < 0) diff = -diff;
+
+        return (float)diff * 100.0f / s1.st_size;
+    }
+
+    typedef enum {
+        kRunToCompletion,
+        kCheckHeartBeat,
+        kCancelAfterProgress,
+        kCancelAfterStart,
+        kPauseAfterProgress,
+        kPauseAfterStart,
+    } TranscodeExecutionControl;
+
+    using FormatConfigurationCallback = std::function<AMediaFormat*(AMediaFormat*)>;
+    media_status_t transcodeHelper(const char* srcPath, const char* destPath,
+                                   FormatConfigurationCallback formatCallback,
+                                   TranscodeExecutionControl executionControl = kRunToCompletion,
+                                   int64_t heartBeatIntervalUs = -1) {
+        auto transcoder = MediaTranscoder::create(mCallbacks, heartBeatIntervalUs);
+        EXPECT_NE(transcoder, nullptr);
+
+        const int srcFd = open(srcPath, O_RDONLY);
+        EXPECT_EQ(transcoder->configureSource(srcFd), AMEDIA_OK);
+
+        std::vector<std::shared_ptr<AMediaFormat>> trackFormats = transcoder->getTrackFormats();
+        EXPECT_GT(trackFormats.size(), 0);
+
+        for (int i = 0; i < trackFormats.size(); ++i) {
+            AMediaFormat* format = formatCallback(trackFormats[i].get());
+            EXPECT_EQ(transcoder->configureTrackFormat(i, format), AMEDIA_OK);
+
+            // Save original video track format for verification.
+            const char* mime = nullptr;
+            AMediaFormat_getString(trackFormats[i].get(), AMEDIAFORMAT_KEY_MIME, &mime);
+            if (strncmp(mime, "video/", 6) == 0) {
+                mSourceVideoFormat = trackFormats[i];
+            }
+
+            if (format != nullptr) {
+                AMediaFormat_delete(format);
+            }
+        }
+        deleteFile(destPath);
+        const int dstFd = open(destPath, kOpenFlags, kFileMode);
+        EXPECT_EQ(transcoder->configureDestination(dstFd), AMEDIA_OK);
+
+        media_status_t startStatus = transcoder->start();
+        EXPECT_EQ(startStatus, AMEDIA_OK);
+
+        if (startStatus == AMEDIA_OK) {
+            std::shared_ptr<ndk::ScopedAParcel> pausedState;
+
+            switch (executionControl) {
+            case kCancelAfterProgress:
+                mCallbacks->waitForProgressMade();
+                FALLTHROUGH_INTENDED;
+            case kCancelAfterStart:
+                transcoder->cancel();
+                break;
+            case kPauseAfterProgress:
+                mCallbacks->waitForProgressMade();
+                FALLTHROUGH_INTENDED;
+            case kPauseAfterStart:
+                transcoder->pause(&pausedState);
+                break;
+            case kCheckHeartBeat: {
+                mCallbacks->waitForProgressMade();
+                auto startTime = std::chrono::system_clock::now();
+                mCallbacks->waitForTranscodingFinished();
+                auto finishTime = std::chrono::system_clock::now();
+                int32_t expectedCount =
+                        (finishTime - startTime) / std::chrono::microseconds(heartBeatIntervalUs);
+                // Here we relax the expected count by 1, in case the last heart-beat just
+                // missed the window, other than that the count should be exact.
+                EXPECT_GE(mCallbacks->mHeartBeatCount, expectedCount - 1);
+                break;
+            }
+            case kRunToCompletion:
+            default:
+                mCallbacks->waitForTranscodingFinished();
+                break;
+            }
+        }
+        close(srcFd);
+        close(dstFd);
+
+        return mCallbacks->mStatus;
+    }
+
+    void testTranscodeVideo(const char* srcPath, const char* destPath, const char* dstMime,
+                            int32_t bitrate = 0) {
+        EXPECT_EQ(transcodeHelper(srcPath, destPath,
+                                  [dstMime, bitrate](AMediaFormat* sourceFormat) {
+                                      AMediaFormat* format = nullptr;
+                                      const char* mime = nullptr;
+                                      AMediaFormat_getString(sourceFormat, AMEDIAFORMAT_KEY_MIME,
+                                                             &mime);
+
+                                      if (strncmp(mime, "video/", 6) == 0 &&
+                                          (bitrate > 0 || dstMime != nullptr)) {
+                                          format = AMediaFormat_new();
+
+                                          if (bitrate > 0) {
+                                              AMediaFormat_setInt32(
+                                                      format, AMEDIAFORMAT_KEY_BIT_RATE, bitrate);
+                                          }
+
+                                          if (dstMime != nullptr) {
+                                              AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME,
+                                                                     dstMime);
+                                          }
+                                      }
+                                      return format;
+                                  }),
+                  AMEDIA_OK);
+
+        if (dstMime != nullptr) {
+            std::vector<FormatVerifierEntry> extraVerifiers = {
+                    {AMEDIAFORMAT_KEY_MIME,
+                     [dstMime](const char* key, AMediaFormat* src __unused, AMediaFormat* dst) {
+                         const char* mime = nullptr;
+                         AMediaFormat_getString(dst, key, &mime);
+                         return !strcmp(mime, dstMime);
+                     }},
+            };
+            verifyOutputFormat(destPath, &extraVerifiers);
+        } else {
+            verifyOutputFormat(destPath);
+        }
+    }
+
+    void verifyOutputFormat(const char* destPath,
+                            const std::vector<FormatVerifierEntry>* extraVerifiers = nullptr) {
+        int dstFd = open(destPath, O_RDONLY);
+        EXPECT_GT(dstFd, 0);
+        ssize_t fileSize = lseek(dstFd, 0, SEEK_END);
+        lseek(dstFd, 0, SEEK_SET);
+
+        std::shared_ptr<MediaSampleReader> sampleReader =
+                MediaSampleReaderNDK::createFromFd(dstFd, 0, fileSize);
+        ASSERT_NE(sampleReader, nullptr);
+
+        std::shared_ptr<AMediaFormat> videoFormat;
+        const size_t trackCount = sampleReader->getTrackCount();
+        for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
+            AMediaFormat* trackFormat = sampleReader->getTrackFormat(static_cast<int>(trackIndex));
+            if (trackFormat != nullptr) {
+                const char* mime = nullptr;
+                AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+
+                if (strncmp(mime, "video/", 6) == 0) {
+                    LOG(INFO) << "Track # " << trackIndex << ": "
+                              << AMediaFormat_toString(trackFormat);
+                    videoFormat = std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete);
+                    break;
+                }
+            }
+        }
+
+        EXPECT_NE(videoFormat, nullptr);
+        if (videoFormat != nullptr) {
+            LOG(INFO) << "source video format: " << AMediaFormat_toString(mSourceVideoFormat.get());
+            LOG(INFO) << "transcoded video format: " << AMediaFormat_toString(videoFormat.get());
+
+            for (int i = 0; i < (sizeof(kFieldsToPreserve) / sizeof(kFieldsToPreserve[0])); ++i) {
+                EXPECT_TRUE(kFieldsToPreserve[i].equal(kFieldsToPreserve[i].key,
+                                                       mSourceVideoFormat.get(), videoFormat.get()))
+                        << "Failed at key " << kFieldsToPreserve[i].key;
+            }
+
+            if (extraVerifiers != nullptr) {
+                for (int i = 0; i < extraVerifiers->size(); ++i) {
+                    const FormatVerifierEntry& entry = (*extraVerifiers)[i];
+                    EXPECT_TRUE(
+                            entry.equal(entry.key, mSourceVideoFormat.get(), videoFormat.get()));
+                }
+            }
+        }
+
+        close(dstFd);
+    }
+
+    std::shared_ptr<TestTranscoderCallbacks> mCallbacks;
+    std::shared_ptr<AMediaFormat> mSourceVideoFormat;
+};
+
+TEST_F(MediaTranscoderTests, TestPassthrough) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_Passthrough.MP4";
+    testTranscodeVideo(srcPath, destPath, nullptr);
+}
+
+TEST_F(MediaTranscoderTests, TestVideoTranscode_AvcToAvc_Basic) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_VideoTranscode_AvcToAvc_Basic.MP4";
+    testTranscodeVideo(srcPath, destPath, AMEDIA_MIMETYPE_VIDEO_AVC);
+}
+
+TEST_F(MediaTranscoderTests, TestVideoTranscode_HevcToAvc_Basic) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/jets_hevc_1280x720_20Mbps.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_VideoTranscode_HevcToAvc_Basic.MP4";
+    testTranscodeVideo(srcPath, destPath, AMEDIA_MIMETYPE_VIDEO_AVC);
+}
+
+TEST_F(MediaTranscoderTests, TestVideoTranscode_HevcToAvc_Rotation) {
+    const char* srcPath =
+            "/data/local/tmp/TranscodingTestAssets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_VideoTranscode_HevcToAvc_Rotation.MP4";
+    testTranscodeVideo(srcPath, destPath, AMEDIA_MIMETYPE_VIDEO_AVC);
+}
+
+TEST_F(MediaTranscoderTests, TestVideoTranscode_4K) {
+#if defined(__i386__) || defined(__x86_64__)
+    LOG(WARNING) << "Skipping 4K test on x86 as SW encoder does not support 4K.";
+    return;
+#else
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/Video_4K_HEVC_10Frames_Audio.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_4K.MP4";
+    testTranscodeVideo(srcPath, destPath, AMEDIA_MIMETYPE_VIDEO_AVC);
+#endif
+}
+
+TEST_F(MediaTranscoderTests, TestPreserveBitrate) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_PreserveBitrate.MP4";
+    testTranscodeVideo(srcPath, destPath, AMEDIA_MIMETYPE_VIDEO_AVC);
+
+    // Require maximum of 25% difference in file size.
+    // TODO(b/174678336): Find a better test asset to tighten the threshold.
+    EXPECT_LT(getFileSizeDiffPercent(srcPath, destPath, true /* absolute*/), 25);
+}
+
+TEST_F(MediaTranscoderTests, TestCustomBitrate) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+    const char* destPath1 = "/data/local/tmp/MediaTranscoder_CustomBitrate_2Mbps.MP4";
+    const char* destPath2 = "/data/local/tmp/MediaTranscoder_CustomBitrate_8Mbps.MP4";
+    testTranscodeVideo(srcPath, destPath1, AMEDIA_MIMETYPE_VIDEO_AVC, 2 * 1000 * 1000);
+    mCallbacks = std::make_shared<TestTranscoderCallbacks>();
+    testTranscodeVideo(srcPath, destPath2, AMEDIA_MIMETYPE_VIDEO_AVC, 8 * 1000 * 1000);
+
+    // The source asset is very short and heavily compressed from the beginning so don't expect the
+    // requested bitrate to be exactly matched. However the 8mbps should at least be larger.
+    // TODO(b/174678336): Find a better test asset to tighten the threshold.
+    EXPECT_GT(getFileSizeDiffPercent(destPath1, destPath2), 10);
+}
+
+static AMediaFormat* getAVCVideoFormat(AMediaFormat* sourceFormat) {
+    AMediaFormat* format = nullptr;
+    const char* mime = nullptr;
+    AMediaFormat_getString(sourceFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+
+    if (strncmp(mime, "video/", 6) == 0) {
+        format = AMediaFormat_new();
+        AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_AVC);
+    }
+
+    return format;
+}
+
+TEST_F(MediaTranscoderTests, TestCancelAfterProgress) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_Cancel.MP4";
+
+    for (int i = 0; i < 20; ++i) {
+        EXPECT_EQ(transcodeHelper(srcPath, destPath, getAVCVideoFormat, kCancelAfterProgress),
+                  AMEDIA_OK);
+        EXPECT_FALSE(mCallbacks->mFinished);
+        mCallbacks = std::make_shared<TestTranscoderCallbacks>();
+    }
+}
+
+TEST_F(MediaTranscoderTests, TestCancelAfterStart) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_Cancel.MP4";
+
+    for (int i = 0; i < 20; ++i) {
+        EXPECT_EQ(transcodeHelper(srcPath, destPath, getAVCVideoFormat, kCancelAfterStart),
+                  AMEDIA_OK);
+        EXPECT_FALSE(mCallbacks->mFinished);
+        mCallbacks = std::make_shared<TestTranscoderCallbacks>();
+    }
+}
+
+TEST_F(MediaTranscoderTests, TestPauseAfterProgress) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_Pause.MP4";
+
+    for (int i = 0; i < 20; ++i) {
+        EXPECT_EQ(transcodeHelper(srcPath, destPath, getAVCVideoFormat, kPauseAfterProgress),
+                  AMEDIA_OK);
+        EXPECT_FALSE(mCallbacks->mFinished);
+        mCallbacks = std::make_shared<TestTranscoderCallbacks>();
+    }
+}
+
+TEST_F(MediaTranscoderTests, TestPauseAfterStart) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_Pause.MP4";
+
+    for (int i = 0; i < 20; ++i) {
+        EXPECT_EQ(transcodeHelper(srcPath, destPath, getAVCVideoFormat, kPauseAfterStart),
+                  AMEDIA_OK);
+        EXPECT_FALSE(mCallbacks->mFinished);
+        mCallbacks = std::make_shared<TestTranscoderCallbacks>();
+    }
+}
+
+TEST_F(MediaTranscoderTests, TestHeartBeat) {
+    const char* srcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+    const char* destPath = "/data/local/tmp/MediaTranscoder_HeartBeat.MP4";
+
+    // Use a shorter value of 500ms than the default 1000ms to get more heart beat for testing.
+    const int64_t heartBeatIntervalUs = 500000LL;
+    EXPECT_EQ(transcodeHelper(srcPath, destPath, getAVCVideoFormat, kCheckHeartBeat,
+                              heartBeatIntervalUs),
+              AMEDIA_OK);
+    EXPECT_TRUE(mCallbacks->mFinished);
+}
+
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp
new file mode 100644
index 0000000..fdbf535
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp
@@ -0,0 +1,308 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for PassthroughTrackTranscoder
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "PassthroughTrackTranscoderTests"
+
+#include <android-base/logging.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/NdkMediaExtractor.h>
+#include <media/PassthroughTrackTranscoder.h>
+#include <openssl/md5.h>
+
+#include <vector>
+
+#include "TranscoderTestUtils.h"
+
+namespace android {
+
+class PassthroughTrackTranscoderTests : public ::testing::Test {
+public:
+    PassthroughTrackTranscoderTests() { LOG(DEBUG) << "PassthroughTrackTranscoderTests created"; }
+
+    void SetUp() override { LOG(DEBUG) << "PassthroughTrackTranscoderTests set up"; }
+
+    void initSourceAndExtractor() {
+        const char* sourcePath =
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+
+        mExtractor = AMediaExtractor_new();
+        ASSERT_NE(mExtractor, nullptr);
+
+        mSourceFd = open(sourcePath, O_RDONLY);
+        ASSERT_GT(mSourceFd, 0);
+
+        mSourceFileSize = lseek(mSourceFd, 0, SEEK_END);
+        lseek(mSourceFd, 0, SEEK_SET);
+
+        media_status_t status =
+                AMediaExtractor_setDataSourceFd(mExtractor, mSourceFd, 0, mSourceFileSize);
+        ASSERT_EQ(status, AMEDIA_OK);
+
+        const size_t trackCount = AMediaExtractor_getTrackCount(mExtractor);
+        for (size_t trackIndex = 0; trackIndex < trackCount; trackIndex++) {
+            AMediaFormat* trackFormat = AMediaExtractor_getTrackFormat(mExtractor, trackIndex);
+            ASSERT_NE(trackFormat, nullptr);
+
+            const char* mime = nullptr;
+            AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+            ASSERT_NE(mime, nullptr);
+
+            if (strncmp(mime, "audio/", 6) == 0) {
+                mTrackIndex = trackIndex;
+                AMediaExtractor_selectTrack(mExtractor, trackIndex);
+                break;
+            }
+
+            AMediaFormat_delete(trackFormat);
+        }
+    }
+
+    void TearDown() override {
+        LOG(DEBUG) << "PassthroughTrackTranscoderTests tear down";
+        if (mExtractor != nullptr) {
+            AMediaExtractor_delete(mExtractor);
+            mExtractor = nullptr;
+        }
+        if (mSourceFd > 0) {
+            close(mSourceFd);
+            mSourceFd = -1;
+        }
+    }
+
+    ~PassthroughTrackTranscoderTests() {
+        LOG(DEBUG) << "PassthroughTrackTranscoderTests destroyed";
+    }
+
+    int mSourceFd = -1;
+    size_t mSourceFileSize;
+    int mTrackIndex;
+    AMediaExtractor* mExtractor = nullptr;
+};
+
+/** Helper class for comparing sample data using checksums. */
+class SampleID {
+public:
+    SampleID(const uint8_t* sampleData, ssize_t sampleSize) : mSize{sampleSize} {
+        MD5_CTX md5Ctx;
+        MD5_Init(&md5Ctx);
+        MD5_Update(&md5Ctx, sampleData, sampleSize);
+        MD5_Final(mChecksum, &md5Ctx);
+    }
+
+    bool operator==(const SampleID& rhs) const {
+        return mSize == rhs.mSize && memcmp(mChecksum, rhs.mChecksum, MD5_DIGEST_LENGTH) == 0;
+    }
+
+    uint8_t mChecksum[MD5_DIGEST_LENGTH];
+    ssize_t mSize;
+};
+
+/**
+ * Tests that the output samples of PassthroughTrackTranscoder are identical to the source samples
+ * and in correct order.
+ */
+TEST_F(PassthroughTrackTranscoderTests, SampleEquality) {
+    LOG(DEBUG) << "Testing SampleEquality";
+
+    ssize_t bufferSize = 1024;
+    auto buffer = std::make_unique<uint8_t[]>(bufferSize);
+
+    initSourceAndExtractor();
+
+    // Loop through all samples of a track and store size and checksums.
+    std::vector<SampleID> sampleChecksums;
+
+    int64_t sampleTime = AMediaExtractor_getSampleTime(mExtractor);
+    while (sampleTime != -1) {
+        if (AMediaExtractor_getSampleTrackIndex(mExtractor) == mTrackIndex) {
+            ssize_t sampleSize = AMediaExtractor_getSampleSize(mExtractor);
+            if (bufferSize < sampleSize) {
+                bufferSize = sampleSize;
+                buffer = std::make_unique<uint8_t[]>(bufferSize);
+            }
+
+            ssize_t bytesRead =
+                    AMediaExtractor_readSampleData(mExtractor, buffer.get(), bufferSize);
+            ASSERT_EQ(bytesRead, sampleSize);
+
+            SampleID sampleId{buffer.get(), sampleSize};
+            sampleChecksums.push_back(sampleId);
+        }
+
+        AMediaExtractor_advance(mExtractor);
+        sampleTime = AMediaExtractor_getSampleTime(mExtractor);
+    }
+
+    // Create and start the transcoder.
+    auto callback = std::make_shared<TestTrackTranscoderCallback>();
+    PassthroughTrackTranscoder transcoder{callback};
+
+    std::shared_ptr<MediaSampleReader> mediaSampleReader =
+            MediaSampleReaderNDK::createFromFd(mSourceFd, 0, mSourceFileSize);
+    EXPECT_NE(mediaSampleReader, nullptr);
+
+    EXPECT_EQ(mediaSampleReader->selectTrack(mTrackIndex), AMEDIA_OK);
+    EXPECT_EQ(transcoder.configure(mediaSampleReader, mTrackIndex, nullptr /* destinationFormat */),
+              AMEDIA_OK);
+    ASSERT_TRUE(transcoder.start());
+
+    // Pull transcoder's output samples and compare against input checksums.
+    bool eos = false;
+    uint64_t sampleCount = 0;
+    transcoder.setSampleConsumer(
+            [&sampleCount, &sampleChecksums, &eos](const std::shared_ptr<MediaSample>& sample) {
+                ASSERT_NE(sample, nullptr);
+                EXPECT_FALSE(eos);
+
+                if (sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) {
+                    eos = true;
+                } else {
+                    SampleID sampleId{sample->buffer, static_cast<ssize_t>(sample->info.size)};
+                    EXPECT_TRUE(sampleId == sampleChecksums[sampleCount]);
+                    ++sampleCount;
+                }
+            });
+
+    callback->waitUntilFinished();
+    EXPECT_EQ(sampleCount, sampleChecksums.size());
+}
+
+/** Class for testing PassthroughTrackTranscoder's built in buffer pool. */
+class BufferPoolTests : public ::testing::Test {
+public:
+    static constexpr int kMaxBuffers = 5;
+
+    void SetUp() override {
+        LOG(DEBUG) << "BufferPoolTests set up";
+        mBufferPool = std::make_shared<PassthroughTrackTranscoder::BufferPool>(kMaxBuffers);
+    }
+
+    void TearDown() override {
+        LOG(DEBUG) << "BufferPoolTests tear down";
+        mBufferPool.reset();
+    }
+
+    std::shared_ptr<PassthroughTrackTranscoder::BufferPool> mBufferPool;
+};
+
+TEST_F(BufferPoolTests, BufferReuse) {
+    LOG(DEBUG) << "Testing BufferReuse";
+
+    uint8_t* buffer1 = mBufferPool->getBufferWithSize(10);
+    EXPECT_NE(buffer1, nullptr);
+
+    uint8_t* buffer2 = mBufferPool->getBufferWithSize(10);
+    EXPECT_NE(buffer2, nullptr);
+    EXPECT_NE(buffer2, buffer1);
+
+    mBufferPool->returnBuffer(buffer1);
+
+    uint8_t* buffer3 = mBufferPool->getBufferWithSize(10);
+    EXPECT_NE(buffer3, nullptr);
+    EXPECT_NE(buffer3, buffer2);
+    EXPECT_EQ(buffer3, buffer1);
+
+    mBufferPool->returnBuffer(buffer2);
+
+    uint8_t* buffer4 = mBufferPool->getBufferWithSize(10);
+    EXPECT_NE(buffer4, nullptr);
+    EXPECT_NE(buffer4, buffer1);
+    EXPECT_EQ(buffer4, buffer2);
+}
+
+TEST_F(BufferPoolTests, SmallestAvailableBuffer) {
+    LOG(DEBUG) << "Testing SmallestAvailableBuffer";
+
+    uint8_t* buffer1 = mBufferPool->getBufferWithSize(10);
+    EXPECT_NE(buffer1, nullptr);
+
+    uint8_t* buffer2 = mBufferPool->getBufferWithSize(15);
+    EXPECT_NE(buffer2, nullptr);
+    EXPECT_NE(buffer2, buffer1);
+
+    uint8_t* buffer3 = mBufferPool->getBufferWithSize(20);
+    EXPECT_NE(buffer3, nullptr);
+    EXPECT_NE(buffer3, buffer1);
+    EXPECT_NE(buffer3, buffer2);
+
+    mBufferPool->returnBuffer(buffer1);
+    mBufferPool->returnBuffer(buffer2);
+    mBufferPool->returnBuffer(buffer3);
+
+    uint8_t* buffer4 = mBufferPool->getBufferWithSize(11);
+    EXPECT_NE(buffer4, nullptr);
+    EXPECT_EQ(buffer4, buffer2);
+
+    uint8_t* buffer5 = mBufferPool->getBufferWithSize(11);
+    EXPECT_NE(buffer5, nullptr);
+    EXPECT_EQ(buffer5, buffer3);
+}
+
+TEST_F(BufferPoolTests, AddAfterAbort) {
+    LOG(DEBUG) << "Testing AddAfterAbort";
+
+    uint8_t* buffer1 = mBufferPool->getBufferWithSize(10);
+    EXPECT_NE(buffer1, nullptr);
+    mBufferPool->returnBuffer(buffer1);
+
+    mBufferPool->abort();
+    uint8_t* buffer2 = mBufferPool->getBufferWithSize(10);
+    EXPECT_EQ(buffer2, nullptr);
+}
+
+TEST_F(BufferPoolTests, MaximumBuffers) {
+    LOG(DEBUG) << "Testing MaximumBuffers";
+
+    static constexpr size_t kBufferBaseSize = 10;
+    std::unordered_map<uint8_t*, size_t> addressSizeMap;
+
+    // Get kMaxBuffers * 2 new buffers with increasing size.
+    // (Note: Once kMaxBuffers have been allocated, the pool will delete old buffers to accommodate
+    // new ones making the deleted buffers free to be reused by the system's heap memory allocator.
+    // So we cannot test that each new pointer is unique here.)
+    for (int i = 0; i < kMaxBuffers * 2; i++) {
+        size_t size = kBufferBaseSize + i;
+        uint8_t* buffer = mBufferPool->getBufferWithSize(size);
+        EXPECT_NE(buffer, nullptr);
+        addressSizeMap[buffer] = size;
+        mBufferPool->returnBuffer(buffer);
+    }
+
+    // Verify that the pool now contains the kMaxBuffers largest buffers allocated above and that
+    // the buffer of matching size is returned.
+    for (int i = kMaxBuffers; i < kMaxBuffers * 2; i++) {
+        size_t size = kBufferBaseSize + i;
+        uint8_t* buffer = mBufferPool->getBufferWithSize(size);
+        EXPECT_NE(buffer, nullptr);
+
+        auto it = addressSizeMap.find(buffer);
+        ASSERT_NE(it, addressSizeMap.end());
+        EXPECT_EQ(it->second, size);
+        mBufferPool->returnBuffer(buffer);
+    }
+}
+
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/README.md b/media/libmediatranscoding/transcoder/tests/README.md
new file mode 100644
index 0000000..59417b0
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/README.md
@@ -0,0 +1,14 @@
+## Transcoder Testing ##
+---
+#### Transcoder unit tests :
+To run all transcoder unit tests, run the supplied script from this folder:
+
+```
+./build_and_run_all_unit_tests.sh
+```
+
+To run individual unit test modules, use atest:
+
+```
+atest MediaSampleReaderNDK
+```
diff --git a/media/libmediatranscoding/transcoder/tests/TranscoderTestUtils.h b/media/libmediatranscoding/transcoder/tests/TranscoderTestUtils.h
new file mode 100644
index 0000000..35fe25b
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/TranscoderTestUtils.h
@@ -0,0 +1,225 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/MediaTrackTranscoder.h>
+#include <media/MediaTrackTranscoderCallback.h>
+#include <media/MediaTranscoder.h>
+
+#include <condition_variable>
+#include <memory>
+#include <mutex>
+
+namespace android {
+
+//
+// This file contains transcoding test utilities.
+//
+
+namespace TranscoderTestUtils {
+
+std::shared_ptr<AMediaFormat> GetVideoFormat(const std::string& path,
+                                             std::string* mimeOut = nullptr) {
+    int fd = open(path.c_str(), O_RDONLY);
+    EXPECT_GT(fd, 0);
+    ssize_t fileSize = lseek(fd, 0, SEEK_END);
+    lseek(fd, 0, SEEK_SET);
+
+    auto sampleReader = MediaSampleReaderNDK::createFromFd(fd, 0, fileSize);
+    EXPECT_NE(sampleReader, nullptr);
+
+    for (size_t i = 0; i < sampleReader->getTrackCount(); ++i) {
+        AMediaFormat* format = sampleReader->getTrackFormat(i);
+
+        const char* mime = nullptr;
+        AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime);
+        if (strncmp(mime, "video/", 6) == 0) {
+            if (mimeOut != nullptr) {
+                mimeOut->assign(mime);
+            }
+            return std::shared_ptr<AMediaFormat>(format, &AMediaFormat_delete);
+        }
+
+        AMediaFormat_delete(format);
+    }
+    return nullptr;
+}
+
+};  // namespace TranscoderTestUtils
+
+class TrackTranscoderTestUtils {
+public:
+    static std::shared_ptr<AMediaFormat> getDefaultVideoDestinationFormat(
+            AMediaFormat* sourceFormat, bool includeBitrate = true) {
+        // Default video destination format setup.
+        static constexpr float kFrameRate = 30.0f;
+        static constexpr int32_t kBitRate = 2 * 1000 * 1000;
+
+        AMediaFormat* destinationFormat = AMediaFormat_new();
+        AMediaFormat_copy(destinationFormat, sourceFormat);
+        AMediaFormat_setFloat(destinationFormat, AMEDIAFORMAT_KEY_FRAME_RATE, kFrameRate);
+        if (includeBitrate) {
+            AMediaFormat_setInt32(destinationFormat, AMEDIAFORMAT_KEY_BIT_RATE, kBitRate);
+        }
+
+        return std::shared_ptr<AMediaFormat>(destinationFormat, &AMediaFormat_delete);
+    }
+};
+
+class TestTrackTranscoderCallback : public MediaTrackTranscoderCallback {
+public:
+    TestTrackTranscoderCallback() = default;
+    ~TestTrackTranscoderCallback() = default;
+
+    // MediaTrackTranscoderCallback
+    void onTrackFormatAvailable(const MediaTrackTranscoder* transcoder __unused) {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mTrackFormatAvailable = true;
+        mTrackFormatAvailableCondition.notify_all();
+    }
+
+    void onTrackFinished(const MediaTrackTranscoder* transcoder __unused) {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mTranscodingFinished = true;
+        mTranscodingFinishedCondition.notify_all();
+    }
+
+    virtual void onTrackStopped(const MediaTrackTranscoder* transcoder __unused) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mTranscodingFinished = true;
+        mTranscodingStopped = true;
+        mTranscodingFinishedCondition.notify_all();
+    }
+
+    void onTrackError(const MediaTrackTranscoder* transcoder __unused, media_status_t status) {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mTranscodingFinished = true;
+        mStatus = status;
+        mTranscodingFinishedCondition.notify_all();
+    }
+    // ~MediaTrackTranscoderCallback
+
+    media_status_t waitUntilFinished() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mTranscodingFinished) {
+            mTranscodingFinishedCondition.wait(lock);
+        }
+        return mStatus;
+    }
+
+    void waitUntilTrackFormatAvailable() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mTrackFormatAvailable) {
+            mTrackFormatAvailableCondition.wait(lock);
+        }
+    }
+
+    bool transcodingWasStopped() const { return mTranscodingFinished && mTranscodingStopped; }
+    bool transcodingFinished() const {
+        return mTranscodingFinished && !mTranscodingStopped && mStatus == AMEDIA_OK;
+    }
+
+private:
+    media_status_t mStatus = AMEDIA_OK;
+    std::mutex mMutex;
+    std::condition_variable mTranscodingFinishedCondition;
+    std::condition_variable mTrackFormatAvailableCondition;
+    bool mTranscodingFinished = false;
+    bool mTranscodingStopped = false;
+    bool mTrackFormatAvailable = false;
+};
+
+class TestTranscoderCallbacks : public MediaTranscoder::CallbackInterface {
+public:
+    virtual void onFinished(const MediaTranscoder* transcoder __unused) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        EXPECT_FALSE(mFinished);
+        mFinished = true;
+        mCondition.notify_all();
+    }
+
+    virtual void onError(const MediaTranscoder* transcoder __unused,
+                         media_status_t error) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        EXPECT_NE(error, AMEDIA_OK);
+        EXPECT_FALSE(mFinished);
+        mFinished = true;
+        mStatus = error;
+        mCondition.notify_all();
+    }
+
+    virtual void onProgressUpdate(const MediaTranscoder* transcoder __unused,
+                                  int32_t progress) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        if (progress > 0 && !mProgressMade) {
+            mProgressMade = true;
+            mCondition.notify_all();
+        }
+    }
+
+    virtual void onHeartBeat(const MediaTranscoder* transcoder __unused) override {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mHeartBeatCount++;
+    }
+
+    virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
+                                     const std::shared_ptr<ndk::ScopedAParcel>& pausedState
+                                             __unused) override {}
+
+    void waitForTranscodingFinished() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mFinished) {
+            mCondition.wait(lock);
+        }
+    }
+
+    void waitForProgressMade() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mProgressMade && !mFinished) {
+            mCondition.wait(lock);
+        }
+    }
+    media_status_t mStatus = AMEDIA_OK;
+    bool mFinished = false;
+    int32_t mHeartBeatCount = 0;
+
+private:
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mProgressMade = false;
+};
+
+class OneShotSemaphore {
+public:
+    void wait() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mSignaled) {
+            mCondition.wait(lock);
+        }
+    }
+
+    void signal() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mSignaled = true;
+        mCondition.notify_all();
+    }
+
+private:
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mSignaled = false;
+};
+
+};  // namespace android
diff --git a/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
new file mode 100644
index 0000000..88c3fd3
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
@@ -0,0 +1,225 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for VideoTrackTranscoder
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "VideoTrackTranscoderTests"
+
+#include <android-base/logging.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <media/MediaSampleReaderNDK.h>
+#include <media/NdkCommon.h>
+#include <media/VideoTrackTranscoder.h>
+#include <utils/Timers.h>
+
+#include "TranscoderTestUtils.h"
+
+namespace android {
+
+// TODO(b/155304421): Implement more advanced video specific tests:
+//  - Codec conversions (HEVC -> AVC).
+//  - Bitrate validation.
+//  - Output frame validation through PSNR.
+
+class VideoTrackTranscoderTests : public ::testing::Test {
+public:
+    VideoTrackTranscoderTests() { LOG(DEBUG) << "VideoTrackTranscoderTests created"; }
+
+    void SetUp() override {
+        LOG(DEBUG) << "VideoTrackTranscoderTests set up";
+        const char* sourcePath =
+                "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+
+        const int sourceFd = open(sourcePath, O_RDONLY);
+        ASSERT_GT(sourceFd, 0);
+
+        const off_t fileSize = lseek(sourceFd, 0, SEEK_END);
+        lseek(sourceFd, 0, SEEK_SET);
+
+        mMediaSampleReader = MediaSampleReaderNDK::createFromFd(sourceFd, 0, fileSize);
+        ASSERT_NE(mMediaSampleReader, nullptr);
+        close(sourceFd);
+
+        for (size_t trackIndex = 0; trackIndex < mMediaSampleReader->getTrackCount();
+             ++trackIndex) {
+            AMediaFormat* trackFormat = mMediaSampleReader->getTrackFormat(trackIndex);
+            ASSERT_NE(trackFormat, nullptr);
+
+            const char* mime = nullptr;
+            AMediaFormat_getString(trackFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+            ASSERT_NE(mime, nullptr);
+
+            if (strncmp(mime, "video/", 6) == 0) {
+                mTrackIndex = trackIndex;
+
+                mSourceFormat = std::shared_ptr<AMediaFormat>(trackFormat, &AMediaFormat_delete);
+                ASSERT_NE(mSourceFormat, nullptr);
+
+                mDestinationFormat =
+                        TrackTranscoderTestUtils::getDefaultVideoDestinationFormat(trackFormat);
+                ASSERT_NE(mDestinationFormat, nullptr);
+                break;
+            }
+
+            AMediaFormat_delete(trackFormat);
+        }
+
+        ASSERT_NE(mSourceFormat, nullptr);
+    }
+
+    void TearDown() override { LOG(DEBUG) << "VideoTrackTranscoderTests tear down"; }
+
+    ~VideoTrackTranscoderTests() { LOG(DEBUG) << "VideoTrackTranscoderTests destroyed"; }
+
+    static int32_t getConfiguredBitrate(const std::shared_ptr<VideoTrackTranscoder>& transcoder) {
+        return transcoder->mConfiguredBitrate;
+    }
+
+    std::shared_ptr<MediaSampleReader> mMediaSampleReader;
+    int mTrackIndex;
+    std::shared_ptr<AMediaFormat> mSourceFormat;
+    std::shared_ptr<AMediaFormat> mDestinationFormat;
+};
+
+TEST_F(VideoTrackTranscoderTests, SampleSoundness) {
+    LOG(DEBUG) << "Testing SampleSoundness";
+    auto callback = std::make_shared<TestTrackTranscoderCallback>();
+    auto transcoder = VideoTrackTranscoder::create(callback);
+
+    EXPECT_EQ(mMediaSampleReader->selectTrack(mTrackIndex), AMEDIA_OK);
+    EXPECT_EQ(transcoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(transcoder->start());
+
+    bool eos = false;
+    uint64_t sampleCount = 0;
+    transcoder->setSampleConsumer([&sampleCount, &eos](const std::shared_ptr<MediaSample>& sample) {
+        ASSERT_NE(sample, nullptr);
+        const uint32_t flags = sample->info.flags;
+
+        if (sampleCount == 0) {
+            // Expect first sample to be a codec config.
+            EXPECT_TRUE((flags & SAMPLE_FLAG_CODEC_CONFIG) != 0);
+            EXPECT_TRUE((flags & SAMPLE_FLAG_SYNC_SAMPLE) == 0);
+            EXPECT_TRUE((flags & SAMPLE_FLAG_END_OF_STREAM) == 0);
+            EXPECT_TRUE((flags & SAMPLE_FLAG_PARTIAL_FRAME) == 0);
+        } else if (sampleCount == 1) {
+            // Expect second sample to be a sync sample.
+            EXPECT_TRUE((flags & SAMPLE_FLAG_CODEC_CONFIG) == 0);
+            EXPECT_TRUE((flags & SAMPLE_FLAG_SYNC_SAMPLE) != 0);
+            EXPECT_TRUE((flags & SAMPLE_FLAG_END_OF_STREAM) == 0);
+        }
+
+        if (!(flags & SAMPLE_FLAG_END_OF_STREAM)) {
+            // Expect a valid buffer unless it is EOS.
+            EXPECT_NE(sample->buffer, nullptr);
+            EXPECT_NE(sample->bufferId, 0xBAADF00D);
+            EXPECT_GT(sample->info.size, 0);
+        } else {
+            EXPECT_FALSE(eos);
+            eos = true;
+        }
+
+        ++sampleCount;
+    });
+
+    EXPECT_EQ(callback->waitUntilFinished(), AMEDIA_OK);
+}
+
+TEST_F(VideoTrackTranscoderTests, PreserveBitrate) {
+    LOG(DEBUG) << "Testing PreserveBitrate";
+    auto callback = std::make_shared<TestTrackTranscoderCallback>();
+    auto transcoder = VideoTrackTranscoder::create(callback);
+
+    auto destFormat = TrackTranscoderTestUtils::getDefaultVideoDestinationFormat(
+            mSourceFormat.get(), false /* includeBitrate*/);
+    EXPECT_NE(destFormat, nullptr);
+
+    EXPECT_EQ(mMediaSampleReader->selectTrack(mTrackIndex), AMEDIA_OK);
+
+    int32_t srcBitrate;
+    EXPECT_EQ(mMediaSampleReader->getEstimatedBitrateForTrack(mTrackIndex, &srcBitrate), AMEDIA_OK);
+
+    ASSERT_EQ(transcoder->configure(mMediaSampleReader, mTrackIndex, destFormat), AMEDIA_OK);
+    ASSERT_TRUE(transcoder->start());
+
+    callback->waitUntilTrackFormatAvailable();
+    transcoder->stop();
+    EXPECT_EQ(callback->waitUntilFinished(), AMEDIA_OK);
+
+    int32_t outBitrate = getConfiguredBitrate(transcoder);
+    ASSERT_GT(outBitrate, 0);
+
+    EXPECT_EQ(srcBitrate, outBitrate);
+}
+
+// VideoTrackTranscoder needs a valid destination format.
+TEST_F(VideoTrackTranscoderTests, NullDestinationFormat) {
+    LOG(DEBUG) << "Testing NullDestinationFormat";
+    auto callback = std::make_shared<TestTrackTranscoderCallback>();
+    std::shared_ptr<AMediaFormat> nullFormat;
+
+    auto transcoder = VideoTrackTranscoder::create(callback);
+    EXPECT_EQ(transcoder->configure(mMediaSampleReader, 0 /* trackIndex */, nullFormat),
+              AMEDIA_ERROR_INVALID_PARAMETER);
+}
+
+TEST_F(VideoTrackTranscoderTests, LingeringEncoder) {
+    OneShotSemaphore semaphore;
+    auto callback = std::make_shared<TestTrackTranscoderCallback>();
+    auto transcoder = VideoTrackTranscoder::create(callback);
+
+    EXPECT_EQ(mMediaSampleReader->selectTrack(mTrackIndex), AMEDIA_OK);
+    EXPECT_EQ(transcoder->configure(mMediaSampleReader, mTrackIndex, mDestinationFormat),
+              AMEDIA_OK);
+    ASSERT_TRUE(transcoder->start());
+
+    std::vector<std::shared_ptr<MediaSample>> samples;
+    transcoder->setSampleConsumer(
+            [&samples, &semaphore](const std::shared_ptr<MediaSample>& sample) {
+                if (samples.size() >= 4) return;
+
+                ASSERT_NE(sample, nullptr);
+                samples.push_back(sample);
+
+                if (samples.size() == 4 || sample->info.flags & SAMPLE_FLAG_END_OF_STREAM) {
+                    semaphore.signal();
+                }
+            });
+
+    // Wait for the encoder to output samples before stopping and releasing the transcoder.
+    semaphore.wait();
+
+    transcoder->stop();
+    EXPECT_EQ(callback->waitUntilFinished(), AMEDIA_OK);
+    transcoder.reset();
+
+    // Return buffers to the codec so that it can resume processing, but keep one buffer to avoid
+    // the codec being released.
+    samples.resize(1);
+
+    // Wait for async codec events.
+    std::this_thread::sleep_for(std::chrono::seconds(1));
+}
+
+}  // namespace android
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
diff --git a/media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh b/media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh
new file mode 100755
index 0000000..bba2bc5
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh
@@ -0,0 +1,52 @@
+#!/bin/bash
+
+# Exit on compilation error.
+set -e
+
+#
+# Run tests in this directory.
+#
+if [ "$SYNC_FINISHED" != true ]; then
+  if [ -z "$ANDROID_BUILD_TOP" ]; then
+      echo "Android build environment not set"
+      exit -1
+  fi
+
+  # ensure we have mm
+  . $ANDROID_BUILD_TOP/build/envsetup.sh
+
+  mm
+
+  echo "waiting for device"
+
+  adb root && adb wait-for-device remount && adb sync
+fi
+
+# Push the files onto the device.
+. $ANDROID_BUILD_TOP/frameworks/av/media/libmediatranscoding/tests/push_assets.sh
+
+echo "========================================"
+
+# Don't exit if a test fails.
+set +e
+
+echo "testing MediaSampleReaderNDK"
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/MediaSampleReaderNDKTests/MediaSampleReaderNDKTests
+
+echo "testing MediaSampleQueue"
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/MediaSampleQueueTests/MediaSampleQueueTests
+
+echo "testing MediaTrackTranscoder"
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/MediaTrackTranscoderTests/MediaTrackTranscoderTests
+
+echo "testing VideoTrackTranscoder"
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/VideoTrackTranscoderTests/VideoTrackTranscoderTests
+
+echo "testing PassthroughTrackTranscoder"
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/PassthroughTrackTranscoderTests/PassthroughTrackTranscoderTests
+
+echo "testing MediaSampleWriter"
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/MediaSampleWriterTests/MediaSampleWriterTests
+
+echo "testing MediaTranscoder"
+adb shell ASAN_OPTIONS=detect_container_overflow=0 /data/nativetest64/MediaTranscoderTests/MediaTranscoderTests
diff --git a/media/libmediatranscoding/transcoder/tests/fuzzer/Android.bp b/media/libmediatranscoding/transcoder/tests/fuzzer/Android.bp
new file mode 100644
index 0000000..69b2827
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/fuzzer/Android.bp
@@ -0,0 +1,55 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_fuzz {
+    name: "media_transcoder_fuzzer",
+    srcs: [
+        "media_transcoder_fuzzer.cpp",
+    ],
+    static_libs: [
+        "liblog",
+        "libmediatranscoder",
+    ],
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+        "libmediandk",
+        "libnativewindow",
+        "libutils",
+    ],
+    header_libs: [
+        "libcodec2_headers",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/media/libmediatranscoding/transcoder/tests/fuzzer/README.md b/media/libmediatranscoding/transcoder/tests/fuzzer/README.md
new file mode 100644
index 0000000..d376a20
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/fuzzer/README.md
@@ -0,0 +1,59 @@
+# Fuzzer for libmediatranscoder
+
+## Plugin Design Considerations
+The fuzzer plugin for libmediatranscoder is designed based on the understanding of the
+transcoder and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+Transcoder supports the following parameters:
+1. Destination Mime Type (parameter name: `dstMime`)
+2. AVC Profile (parameter name: `profile`)
+3. HEVC Profile (parameter name: `profile`)
+4. AVC Level (parameter name: `level`)
+5. HEVC Level (parameter name: `level`)
+6. Bitrate (parameter name: `bitrate`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `dstMime` | 0. `AMEDIA_MIMETYPE_VIDEO_AVC` 1. `AMEDIA_MIMETYPE_VIDEO_HEVC` | Bit 0 (LSB) of 1st byte of data |
+| `profile` for AVC | 0. `PROFILE_AVC_BASELINE` 1. `PROFILE_AVC_CONSTRAINED_BASELINE` 2. `PROFILE_AVC_MAIN`| All bits of 2nd byte of data modulus 3 |
+| `profile` for HEVC | 0. `PROFILE_HEVC_MAIN` 1. `PROFILE_HEVC_MAIN_STILL` | All bits of 2nd byte of data modulus 2 |
+| `level` for AVC | 0. `LEVEL_AVC_1` 1. `LEVEL_AVC_1B` 2. `LEVEL_AVC_1_1` 3. `LEVEL_AVC_1_2` 4. `LEVEL_AVC_1_3` 5. `LEVEL_AVC_2` 6. `LEVEL_AVC_2_1` 7. `LEVEL_AVC_2_2` 8. `LEVEL_AVC_3` 9. `LEVEL_AVC_3_1` 10. `LEVEL_AVC_3_2` 11. `LEVEL_AVC_4` 12. `LEVEL_AVC_4_1` 13. `LEVEL_AVC_4_2` 14. `LEVEL_AVC_5`| All bits of 3rd byte of data modulus 15 |
+| `level` for HEVC | 0. `LEVEL_HEVC_MAIN_1` 1. `LEVEL_HEVC_MAIN_2` 2. `LEVEL_HEVC_MAIN_2_1` 3. `LEVEL_HEVC_MAIN_3` 4. `LEVEL_HEVC_MAIN_3_1` 5. `LEVEL_HEVC_MAIN_4` 6. `LEVEL_HEVC_MAIN_4_1` 7. `LEVEL_HEVC_MAIN_5` 8. `LEVEL_HEVC_MAIN_5_1` 9. `LEVEL_HEVC_MAIN_5_2` | All bits of 3rd byte of data modulus 10 |
+| `bitrate` | In the range `0` to `500000000` | All bits of 4th and 5th byte of data |
+
+This also ensures that the plugin is always deterministic for any given input.
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the transcoder.
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build media_transcoder_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) media_transcoder_fuzzer
+```
+#### Steps to run
+Create a directory CORPUS_DIR
+```
+  $ adb shell mkdir CORPUS_DIR
+```
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/${TARGET_ARCH}/media_transcoder_fuzzer/media_transcoder_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/libmediatranscoding/transcoder/tests/fuzzer/media_transcoder_fuzzer.cpp b/media/libmediatranscoding/transcoder/tests/fuzzer/media_transcoder_fuzzer.cpp
new file mode 100644
index 0000000..ec36c0f
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tests/fuzzer/media_transcoder_fuzzer.cpp
@@ -0,0 +1,211 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include <C2Config.h>
+#include <android/binder_process.h>
+#include <fcntl.h>
+#include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
+#include <stdio.h>
+
+#define UNUSED_PARAM __attribute__((unused))
+#define SRC_FILE "sourceTranscodingFile"
+#define DEST_FILE "destTranscodingFile"
+
+using namespace std;
+using namespace android;
+
+const char* kMimeType[] = {AMEDIA_MIMETYPE_VIDEO_AVC, AMEDIA_MIMETYPE_VIDEO_HEVC};
+const C2Config::profile_t kAvcProfile[] = {C2Config::PROFILE_AVC_BASELINE,
+                                           C2Config::PROFILE_AVC_CONSTRAINED_BASELINE,
+                                           C2Config::PROFILE_AVC_MAIN};
+const C2Config::level_t kAvcLevel[] = {
+        C2Config::LEVEL_AVC_1,   C2Config::LEVEL_AVC_1B,  C2Config::LEVEL_AVC_1_1,
+        C2Config::LEVEL_AVC_1_2, C2Config::LEVEL_AVC_1_3, C2Config::LEVEL_AVC_2,
+        C2Config::LEVEL_AVC_2_1, C2Config::LEVEL_AVC_2_2, C2Config::LEVEL_AVC_3,
+        C2Config::LEVEL_AVC_3_1, C2Config::LEVEL_AVC_3_2, C2Config::LEVEL_AVC_4,
+        C2Config::LEVEL_AVC_4_1, C2Config::LEVEL_AVC_4_2, C2Config::LEVEL_AVC_5,
+};
+const C2Config::profile_t kHevcProfile[] = {C2Config::PROFILE_HEVC_MAIN,
+                                            C2Config::PROFILE_HEVC_MAIN_STILL};
+const C2Config::level_t kHevcLevel[] = {
+        C2Config::LEVEL_HEVC_MAIN_1,   C2Config::LEVEL_HEVC_MAIN_2,   C2Config::LEVEL_HEVC_MAIN_2_1,
+        C2Config::LEVEL_HEVC_MAIN_3,   C2Config::LEVEL_HEVC_MAIN_3_1, C2Config::LEVEL_HEVC_MAIN_4,
+        C2Config::LEVEL_HEVC_MAIN_4_1, C2Config::LEVEL_HEVC_MAIN_5,   C2Config::LEVEL_HEVC_MAIN_5_1,
+        C2Config::LEVEL_HEVC_MAIN_5_2};
+const size_t kNumAvcProfile = size(kAvcProfile);
+const size_t kNumAvcLevel = size(kAvcLevel);
+const size_t kNumHevcProfile = size(kHevcProfile);
+const size_t kNumHevcLevel = size(kHevcLevel);
+const size_t kMaxBitrate = 500000000;
+
+enum {
+    IDX_MIME_TYPE = 0,
+    IDX_PROFILE,
+    IDX_LEVEL,
+    IDX_BITRATE_BYTE_1,
+    IDX_BITRATE_BYTE_2,
+    IDX_LAST
+};
+
+class TestCallbacks : public MediaTranscoder::CallbackInterface {
+public:
+    virtual void onFinished(const MediaTranscoder* transcoder UNUSED_PARAM) override {
+        unique_lock<mutex> lock(mMutex);
+        mFinished = true;
+        mCondition.notify_all();
+    }
+
+    virtual void onError(const MediaTranscoder* transcoder UNUSED_PARAM,
+                         media_status_t error UNUSED_PARAM) override {
+        unique_lock<mutex> lock(mMutex);
+        mFinished = true;
+        mCondition.notify_all();
+    }
+
+    virtual void onProgressUpdate(const MediaTranscoder* transcoder UNUSED_PARAM,
+                                  int32_t progress) override {
+        unique_lock<mutex> lock(mMutex);
+        if (progress > 0 && !mProgressMade) {
+            mProgressMade = true;
+            mCondition.notify_all();
+        }
+    }
+
+    virtual void onHeartBeat(const MediaTranscoder* transcoder UNUSED_PARAM) override {}
+
+    virtual void onCodecResourceLost(const MediaTranscoder* transcoder UNUSED_PARAM,
+                                     const shared_ptr<ndk::ScopedAParcel>& pausedState
+                                             UNUSED_PARAM) override {}
+
+    void waitForTranscodingFinished() {
+        unique_lock<mutex> lock(mMutex);
+        while (!mFinished) {
+            mCondition.wait(lock);
+        }
+    }
+
+private:
+    mutex mMutex;
+    condition_variable mCondition;
+    bool mFinished = false;
+    bool mProgressMade = false;
+};
+
+class MediaTranscoderFuzzer {
+public:
+    void init();
+    void invokeTranscoder(const uint8_t* data, size_t size);
+    void deInit();
+
+private:
+    AMediaFormat* getFormat(AMediaFormat* sourceFormat) {
+        AMediaFormat* format = nullptr;
+        const char* mime = nullptr;
+        AMediaFormat_getString(sourceFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+        if (mime != nullptr) {
+            if (strncmp(mime, "video/", 6) == 0 && (mDestMime != nullptr)) {
+                format = AMediaFormat_new();
+                AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, mDestMime);
+                AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_PROFILE, mProfile);
+                AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_LEVEL, mLevel);
+                AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, mBitrate);
+                AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_OPERATING_RATE, INT32_MAX);
+                AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_PRIORITY, 1);
+            }
+        }
+        return format;
+    }
+
+    shared_ptr<TestCallbacks> mCallbacks;
+    int mSrcFd = 0;
+    int mDestFd = 0;
+    const char* mDestMime;
+    C2Config::profile_t mProfile;
+    C2Config::level_t mLevel;
+    uint64_t mBitrate = 0;
+};
+
+void MediaTranscoderFuzzer::init() {
+    mCallbacks = make_shared<TestCallbacks>();
+    ABinderProcess_startThreadPool();
+}
+
+void MediaTranscoderFuzzer::deInit() {
+    mCallbacks.reset();
+    if (mSrcFd) {
+        close(mSrcFd);
+    }
+    if (mDestFd) {
+        close(mDestFd);
+    }
+}
+
+void MediaTranscoderFuzzer::invokeTranscoder(const uint8_t* data, size_t size) {
+    auto transcoder = MediaTranscoder::create(mCallbacks);
+    if (transcoder == nullptr) {
+        return;
+    }
+
+    mDestMime = kMimeType[data[IDX_MIME_TYPE] & 0x01];
+    mBitrate = (((data[IDX_BITRATE_BYTE_1] << 8) | data[IDX_BITRATE_BYTE_2]) * 1000) % kMaxBitrate;
+    if (mDestMime == AMEDIA_MIMETYPE_VIDEO_AVC) {
+        mProfile = kAvcProfile[data[IDX_PROFILE] % kNumAvcProfile];
+        mLevel = kAvcLevel[data[IDX_LEVEL] % kNumAvcLevel];
+    } else {
+        mProfile = kHevcProfile[data[IDX_PROFILE] % kNumHevcProfile];
+        mLevel = kHevcLevel[data[IDX_LEVEL] % kNumHevcLevel];
+    }
+
+    data += IDX_LAST;
+    size -= IDX_LAST;
+
+    mSrcFd = memfd_create(SRC_FILE, MFD_ALLOW_SEALING);
+    write(mSrcFd, data, size);
+
+    transcoder->configureSource(mSrcFd);
+    vector<shared_ptr<AMediaFormat>> trackFormats = transcoder->getTrackFormats();
+    for (int i = 0; i < trackFormats.size(); ++i) {
+        AMediaFormat* format = getFormat(trackFormats[i].get());
+        transcoder->configureTrackFormat(i, format);
+
+        if (format != nullptr) {
+            AMediaFormat_delete(format);
+        }
+    }
+    mDestFd = memfd_create(DEST_FILE, MFD_ALLOW_SEALING);
+    transcoder->configureDestination(mDestFd);
+    if (transcoder->start() == AMEDIA_OK) {
+        mCallbacks->waitForTranscodingFinished();
+        transcoder->cancel();
+    }
+    close(mSrcFd);
+    close(mDestFd);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    if (size < IDX_LAST + 1) {
+        return 0;
+    }
+    MediaTranscoderFuzzer transcoderFuzzer;
+    transcoderFuzzer.init();
+    transcoderFuzzer.invokeTranscoder(data, size);
+    transcoderFuzzer.deInit();
+    return 0;
+}
diff --git a/media/libmediatranscoding/transcoder/tools/Android.bp b/media/libmediatranscoding/transcoder/tools/Android.bp
new file mode 100644
index 0000000..ba30d34
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tools/Android.bp
@@ -0,0 +1,33 @@
+package {
+    // See: http://go/android-license-faq
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_binary {
+    name: "transcode",
+    srcs: ["Transcode.cpp"],
+
+    shared_libs: [
+        "libmediandk",
+        "libmediatranscoder",
+    ],
+
+    header_libs: [
+        "libbase_headers",
+    ],
+
+    compile_multilib: "32",
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+}
diff --git a/media/libmediatranscoding/transcoder/tools/Transcode.cpp b/media/libmediatranscoding/transcoder/tools/Transcode.cpp
new file mode 100644
index 0000000..1f5649e
--- /dev/null
+++ b/media/libmediatranscoding/transcoder/tools/Transcode.cpp
@@ -0,0 +1,210 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/macros.h>
+#include <fcntl.h>
+#include <getopt.h>
+#include <media/MediaTranscoder.h>
+#include <media/NdkCommon.h>
+
+using namespace android;
+
+#define ERR_MSG(fmt, ...) fprintf(stderr, "Error: " fmt "\n", ##__VA_ARGS__)
+
+class TranscoderCallbacks : public MediaTranscoder::CallbackInterface {
+public:
+    media_status_t waitForTranscodingFinished() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        while (!mFinished) {
+            mCondition.wait(lock);
+        }
+        return mStatus;
+    }
+
+private:
+    virtual void onFinished(const MediaTranscoder* /*transcoder*/) override {
+        notifyTranscoderFinished(AMEDIA_OK);
+    }
+
+    virtual void onError(const MediaTranscoder* /*transcoder*/, media_status_t error) override {
+        ERR_MSG("Transcoder failed with error %d", error);
+        notifyTranscoderFinished(error);
+    }
+
+    virtual void onProgressUpdate(const MediaTranscoder* /*transcoder*/,
+                                  int32_t /*progress*/) override {}
+
+    virtual void onCodecResourceLost(
+            const MediaTranscoder* /*transcoder*/,
+            const std::shared_ptr<ndk::ScopedAParcel>& /*pausedState*/) override {
+        ERR_MSG("Transcoder lost codec resource while transcoding");
+        notifyTranscoderFinished(AMEDIACODEC_ERROR_INSUFFICIENT_RESOURCE);
+    }
+
+    virtual void onHeartBeat(const MediaTranscoder* /*transcoder*/) override {}
+
+    void notifyTranscoderFinished(media_status_t status) {
+        std::unique_lock<std::mutex> lock(mMutex);
+        mFinished = true;
+        mStatus = status;
+        mCondition.notify_all();
+    }
+
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    bool mFinished = false;
+    media_status_t mStatus = AMEDIA_OK;
+};
+
+struct TranscodeConfig {
+    std::string srcFile;
+    std::string dstFile;
+
+    std::string dstCodec{AMEDIA_MIMETYPE_VIDEO_AVC};
+    int32_t bitrate = -1;
+};
+
+static int transcode(const struct TranscodeConfig& config) {
+    auto callbacks = std::make_shared<TranscoderCallbacks>();
+    auto transcoder = MediaTranscoder::create(callbacks, -1 /*heartBeatIntervalUs*/);
+
+    const int srcFd = open(config.srcFile.c_str(), O_RDONLY);
+    if (srcFd <= 0) {
+        ERR_MSG("Unable to open source file %s", config.srcFile.c_str());
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    media_status_t status = transcoder->configureSource(srcFd);
+    close(srcFd);
+    if (status != AMEDIA_OK) {
+        ERR_MSG("configureSource returned error %d", status);
+        return status;
+    }
+
+    std::vector<std::shared_ptr<AMediaFormat>> trackFormats = transcoder->getTrackFormats();
+    if (trackFormats.size() <= 0) {
+        ERR_MSG("No tracks found in source file");
+        return AMEDIA_ERROR_MALFORMED;
+    }
+
+    for (int i = 0; i < trackFormats.size(); ++i) {
+        AMediaFormat* dstFormat = nullptr;
+
+        const char* mime = nullptr;
+        AMediaFormat_getString(trackFormats[i].get(), AMEDIAFORMAT_KEY_MIME, &mime);
+
+        if (strncmp(mime, "video/", 6) == 0) {
+            dstFormat = AMediaFormat_new();
+            AMediaFormat_setString(dstFormat, AMEDIAFORMAT_KEY_MIME, config.dstCodec.c_str());
+
+            if (config.bitrate > 0) {
+                AMediaFormat_setInt32(dstFormat, AMEDIAFORMAT_KEY_BIT_RATE, config.bitrate);
+            }
+        }
+
+        status = transcoder->configureTrackFormat(i, dstFormat);
+
+        if (dstFormat != nullptr) {
+            AMediaFormat_delete(dstFormat);
+        }
+
+        if (status != AMEDIA_OK) {
+            ERR_MSG("configureTrack returned error %d", status);
+            return status;
+        }
+    }
+
+    // Note: Overwrites existing file.
+    const int dstFd = open(config.dstFile.c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
+    if (dstFd <= 0) {
+        ERR_MSG("Unable to open destination file %s", config.dstFile.c_str());
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    status = transcoder->configureDestination(dstFd);
+    close(dstFd);
+    if (status != AMEDIA_OK) {
+        ERR_MSG("configureDestination returned error %d", status);
+        return status;
+    }
+
+    status = transcoder->start();
+    if (status != AMEDIA_OK) {
+        ERR_MSG("start returned error %d", status);
+        return status;
+    }
+
+    return callbacks->waitForTranscodingFinished();
+}
+
+// Options.
+static const struct option kLongOpts[] = {{"help", no_argument, nullptr, 'h'},
+                                          {"codec", required_argument, nullptr, 'c'},
+                                          {"bitrate", required_argument, nullptr, 'b'},
+                                          {0, 0, 0, 0}};
+static const char kShortOpts[] = "hc:b:";
+
+static void printUsageAndExit() {
+    const char* usage =
+            "  -h / --help    : Print this usage message and exit.\n"
+            "  -c / --codec   : Specify output video codec type using MediaFormat codec mime "
+            "type.\n"
+            "                     Defaults to \"video/avc\".\n"
+            "  -b / --bitrate : Specify output video bitrate in bits per second.\n"
+            "                     Defaults to estimating and preserving the original bitrate.\n"
+            "";
+
+    printf("Usage: %s [-h] [-c CODEC] <srcfile> <dstfile>\n%s", getprogname(), usage);
+    exit(-1);
+}
+
+int main(int argc, char** argv) {
+    int c;
+    TranscodeConfig config;
+
+    while ((c = getopt_long(argc, argv, kShortOpts, kLongOpts, nullptr)) >= 0) {
+        switch (c) {
+        case 'c':
+            config.dstCodec.assign(optarg);
+            break;
+
+        case 'b':
+            config.bitrate = atoi(optarg);
+            if (config.bitrate <= 0) {
+                ERR_MSG("Bitrate must an integer larger than zero.");
+                printUsageAndExit();
+            }
+            break;
+
+        case '?':
+            FALLTHROUGH_INTENDED;
+        case 'h':
+            FALLTHROUGH_INTENDED;
+        default:
+            printUsageAndExit();
+            break;
+        }
+    }
+
+    if (optind > (argc - 2)) {
+        ERR_MSG("Source and destination file not specified");
+        printUsageAndExit();
+    }
+    config.srcFile.assign(argv[optind++]);
+    config.dstFile.assign(argv[optind]);
+
+    return transcode(config);
+}
diff --git a/media/libnbaio/Android.bp b/media/libnbaio/Android.bp
index 04ddcff..e9422cc 100644
--- a/media/libnbaio/Android.bp
+++ b/media/libnbaio/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_defaults {
     name: "libnbaio_mono_defaults",
     srcs: [
diff --git a/media/libnbaio/AudioStreamInSource.cpp b/media/libnbaio/AudioStreamInSource.cpp
index 1054b68..ca98b28 100644
--- a/media/libnbaio/AudioStreamInSource.cpp
+++ b/media/libnbaio/AudioStreamInSource.cpp
@@ -46,13 +46,11 @@
         status_t result;
         result = mStream->getBufferSize(&mStreamBufferSizeBytes);
         if (result != OK) return result;
-        audio_format_t streamFormat;
-        uint32_t sampleRate;
-        audio_channel_mask_t channelMask;
-        result = mStream->getAudioProperties(&sampleRate, &channelMask, &streamFormat);
+        audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+        result = mStream->getAudioProperties(&config);
         if (result != OK) return result;
-        mFormat = Format_from_SR_C(sampleRate,
-                audio_channel_count_from_in_mask(channelMask), streamFormat);
+        mFormat = Format_from_SR_C(config.sample_rate,
+                audio_channel_count_from_in_mask(config.channel_mask), config.format);
         mFrameSize = Format_frameSize(mFormat);
     }
     return NBAIO_Source::negotiate(offers, numOffers, counterOffers, numCounterOffers);
diff --git a/media/libnbaio/AudioStreamOutSink.cpp b/media/libnbaio/AudioStreamOutSink.cpp
index 8564899..581867f 100644
--- a/media/libnbaio/AudioStreamOutSink.cpp
+++ b/media/libnbaio/AudioStreamOutSink.cpp
@@ -44,13 +44,11 @@
         status_t result;
         result = mStream->getBufferSize(&mStreamBufferSizeBytes);
         if (result != OK) return result;
-        audio_format_t streamFormat;
-        uint32_t sampleRate;
-        audio_channel_mask_t channelMask;
-        result = mStream->getAudioProperties(&sampleRate, &channelMask, &streamFormat);
+        audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+        result = mStream->getAudioProperties(&config);
         if (result != OK) return result;
-        mFormat = Format_from_SR_C(sampleRate,
-                audio_channel_count_from_out_mask(channelMask), streamFormat);
+        mFormat = Format_from_SR_C(config.sample_rate,
+                audio_channel_count_from_out_mask(config.channel_mask), config.format);
         mFrameSize = Format_frameSize(mFormat);
     }
     return NBAIO_Sink::negotiate(offers, numOffers, counterOffers, numCounterOffers);
diff --git a/media/libnbaio/include/media/nbaio/Pipe.h b/media/libnbaio/include/media/nbaio/Pipe.h
index 0431976..54dc08f 100644
--- a/media/libnbaio/include/media/nbaio/Pipe.h
+++ b/media/libnbaio/include/media/nbaio/Pipe.h
@@ -23,7 +23,7 @@
 namespace android {
 
 // Pipe is multi-thread safe for readers (see PipeReader), but safe for only a single writer thread.
-// It cannot UNDERRUN on write, unless we allow designation of a master reader that provides the
+// It cannot UNDERRUN on write, unless we allow designation of a primary reader that provides the
 // time-base. Readers can be added and removed dynamically, and it's OK to have no readers.
 class Pipe : public NBAIO_Sink {
 
diff --git a/media/libnblog/Android.bp b/media/libnblog/Android.bp
index 1188320..8cfece6 100644
--- a/media/libnblog/Android.bp
+++ b/media/libnblog/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_shared {
 
     name: "libnblog",
diff --git a/media/libnblog/ReportPerformance.cpp b/media/libnblog/ReportPerformance.cpp
index aa678ba..4c78b01 100644
--- a/media/libnblog/ReportPerformance.cpp
+++ b/media/libnblog/ReportPerformance.cpp
@@ -92,8 +92,8 @@
         (*dataJson)["threadNum"] = item.first;
         root.append(*dataJson);
     }
-    Json::StyledWriter writer;
-    std::string rootStr = writer.write(root);
+    Json::StreamWriterBuilder factory;
+    std::string rootStr = Json::writeString(factory, root);
     write(fd, rootStr.c_str(), rootStr.size());
 }
 
diff --git a/media/libshmem/Android.bp b/media/libshmem/Android.bp
new file mode 100644
index 0000000..6e48078
--- /dev/null
+++ b/media/libshmem/Android.bp
@@ -0,0 +1,79 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+aidl_interface {
+    name: "shared-file-region-aidl",
+    unstable: true,
+    host_supported: true,
+    vendor_available: true,
+    double_loadable: true,
+    local_include_dir: "aidl",
+    srcs: [
+        "aidl/android/media/SharedFileRegion.aidl",
+    ],
+}
+
+cc_library {
+    name: "libshmemcompat",
+    export_include_dirs: ["include"],
+    srcs: ["ShmemCompat.cpp"],
+    host_supported: true,
+    vendor_available: true,
+    double_loadable: true,
+    shared_libs: [
+        "libbinder",
+        "libshmemutil",
+        "libutils",
+        "shared-file-region-aidl-cpp",
+    ],
+    export_shared_lib_headers: [
+        "libbinder",
+        "libutils",
+        "shared-file-region-aidl-cpp",
+    ],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+cc_library {
+    name: "libshmemutil",
+    export_include_dirs: ["include"],
+    srcs: ["ShmemUtil.cpp"],
+    host_supported: true,
+    vendor_available: true,
+    double_loadable: true,
+    shared_libs: [
+        "shared-file-region-aidl-cpp",
+    ],
+    export_shared_lib_headers: [
+        "shared-file-region-aidl-cpp",
+    ],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+cc_test {
+    name: "shmemTest",
+    srcs: ["ShmemTest.cpp"],
+    shared_libs: [
+        "libbinder",
+        "libcutils",
+        "libshmemcompat",
+        "libshmemutil",
+        "libutils",
+        "shared-file-region-aidl-cpp",
+    ],
+    test_suites: ["device-tests"],
+}
diff --git a/media/libshmem/OWNERS b/media/libshmem/OWNERS
new file mode 100644
index 0000000..29fa2f5
--- /dev/null
+++ b/media/libshmem/OWNERS
@@ -0,0 +1,3 @@
+ytai@google.com
+mnaganov@google.com
+elaurent@google.com
diff --git a/media/libshmem/README.md b/media/libshmem/README.md
new file mode 100644
index 0000000..c25fa7f
--- /dev/null
+++ b/media/libshmem/README.md
@@ -0,0 +1,6 @@
+# libshmem
+
+This library provides facilities for sharing memory across processes over (stable) AIDL. The main
+feature is the definition of the `android.media.SharedMemory` AIDL type, which represents a block of
+memory that can be shared between processes. In addition, a few utilities are provided to facilitate
+the use of shared memory and to integrate with legacy code that uses older facilities.
\ No newline at end of file
diff --git a/media/libshmem/ShmemCompat.cpp b/media/libshmem/ShmemCompat.cpp
new file mode 100644
index 0000000..246cb24
--- /dev/null
+++ b/media/libshmem/ShmemCompat.cpp
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "media/ShmemCompat.h"
+
+#include "binder/MemoryBase.h"
+#include "binder/MemoryHeapBase.h"
+#include "media/ShmemUtil.h"
+
+namespace android {
+namespace media {
+
+bool convertSharedFileRegionToIMemory(const SharedFileRegion& shmem,
+                                      sp<IMemory>* result) {
+    assert(result != nullptr);
+
+    if (!validateSharedFileRegion(shmem)) {
+        return false;
+    }
+
+    // Heap offset and size must be page aligned.
+    const size_t pageSize = getpagesize();
+    const size_t pageMask = ~(pageSize - 1);
+
+    // OK if this wraps.
+    const uint64_t endOffset = static_cast<uint64_t>(shmem.offset) +
+            static_cast<uint64_t>(shmem.size);
+
+    // Round down to page boundary.
+    const uint64_t heapStartOffset = shmem.offset & pageMask;
+    // Round up to page boundary.
+    const uint64_t heapEndOffset = (endOffset + pageSize - 1) & pageMask;
+    const uint64_t heapSize = heapEndOffset - heapStartOffset;
+
+    if (heapStartOffset > std::numeric_limits<size_t>::max() ||
+        heapSize > std::numeric_limits<size_t>::max()) {
+        return false;
+    }
+
+    uint32_t flags = !shmem.writeable ? IMemoryHeap::READ_ONLY : 0;
+
+    const sp<MemoryHeapBase> heap =
+            new MemoryHeapBase(shmem.fd.get(), heapSize, flags, heapStartOffset);
+    *result = sp<MemoryBase>::make(heap,
+                                   shmem.offset - heapStartOffset,
+                                   shmem.size);
+    return true;
+}
+
+bool convertIMemoryToSharedFileRegion(const sp<IMemory>& mem,
+                                      SharedFileRegion* result) {
+    assert(mem != nullptr);
+    assert(result != nullptr);
+
+    *result = SharedFileRegion();
+
+    ssize_t offset;
+    size_t size;
+
+    sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
+    if (size > 0) {
+        if (heap == nullptr) {
+            return false;
+        }
+        // Make sure the offset and size do not overflow from int64 boundaries.
+        if (size > std::numeric_limits<int64_t>::max() ||
+                offset > std::numeric_limits<int64_t>::max() ||
+                heap->getOffset() > std::numeric_limits<int64_t>::max() ||
+                static_cast<uint64_t>(heap->getOffset()) +
+                static_cast<uint64_t>(offset)
+                        > std::numeric_limits<int64_t>::max()) {
+            return false;
+        }
+
+        const int fd = fcntl(heap->getHeapID(), F_DUPFD_CLOEXEC, 0);
+        if (fd < 0) {
+            return false;
+        }
+        result->fd.reset(base::unique_fd(fd));
+        result->size = size;
+        result->offset = heap->getOffset() + offset;
+        result->writeable = (heap->getFlags() & IMemoryHeap::READ_ONLY) == 0;
+    }
+    return true;
+}
+
+bool convertNullableSharedFileRegionToIMemory(const std::optional<SharedFileRegion>& shmem,
+                                              sp<IMemory>* result) {
+    assert(result != nullptr);
+
+    if (!shmem.has_value()) {
+        result->clear();
+        return true;
+    }
+
+    return convertSharedFileRegionToIMemory(shmem.value(), result);
+}
+
+bool convertNullableIMemoryToSharedFileRegion(const sp<IMemory>& mem,
+                                              std::optional<SharedFileRegion>* result) {
+    assert(result != nullptr);
+
+    if (mem == nullptr) {
+        result->reset();
+        return true;
+    }
+
+    result->emplace();
+    return convertIMemoryToSharedFileRegion(mem, &result->value());
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libshmem/ShmemTest.cpp b/media/libshmem/ShmemTest.cpp
new file mode 100644
index 0000000..874f34c
--- /dev/null
+++ b/media/libshmem/ShmemTest.cpp
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <gtest/gtest.h>
+
+#include "binder/MemoryBase.h"
+#include "binder/MemoryHeapBase.h"
+#include "cutils/ashmem.h"
+#include "media/ShmemCompat.h"
+#include "media/ShmemUtil.h"
+
+namespace android {
+namespace media {
+namespace {
+
+// Creates a SharedFileRegion instance.
+SharedFileRegion makeSharedFileRegion(int64_t offset, int64_t size) {
+    SharedFileRegion shmem;
+    shmem.offset = offset;
+    shmem.size = size;
+    int fd = ashmem_create_region("", size + offset);
+    assert(fd >= 0);
+    shmem.fd = os::ParcelFileDescriptor(base::unique_fd(fd));
+    return shmem;
+}
+
+// Creates a SharedFileRegion instance with an invalid FD.
+SharedFileRegion makeInvalidSharedFileRegion(int64_t offset, int64_t size) {
+    SharedFileRegion shmem;
+    shmem.offset = offset;
+    shmem.size = size;
+    return shmem;
+}
+
+sp<IMemory> makeIMemory(const std::vector<uint8_t>& content, bool writeable = true) {
+    constexpr size_t kOffset = 19;
+
+    sp<MemoryHeapBase> heap = new MemoryHeapBase(content.size(),
+                                                 !writeable ? IMemoryHeap::READ_ONLY : 0);
+    sp<IMemory> result = sp<MemoryBase>::make(heap, kOffset, content.size());
+    memcpy(result->unsecurePointer(), content.data(), content.size());
+    return result;
+}
+
+TEST(ShmemTest, Validate) {
+    EXPECT_TRUE(validateSharedFileRegion(makeSharedFileRegion(0, 0)));
+    EXPECT_TRUE(validateSharedFileRegion(makeSharedFileRegion(1, 2)));
+    EXPECT_FALSE(validateSharedFileRegion(makeSharedFileRegion(-1, 2)));
+    EXPECT_FALSE(validateSharedFileRegion(makeSharedFileRegion(2, -1)));
+    EXPECT_FALSE(validateSharedFileRegion(makeInvalidSharedFileRegion(1, 2)));
+}
+
+TEST(ShmemTest, Conversion) {
+    sp<IMemory> reconstructed;
+    {
+        SharedFileRegion shmem;
+        sp<IMemory> imem = makeIMemory({6, 5, 3});
+        ASSERT_TRUE(convertIMemoryToSharedFileRegion(imem, &shmem));
+        ASSERT_EQ(3, shmem.size);
+        ASSERT_GE(shmem.fd.get(), 0);
+        ASSERT_TRUE(shmem.writeable);
+        ASSERT_TRUE(convertSharedFileRegionToIMemory(shmem, &reconstructed));
+    }
+    ASSERT_EQ(3, reconstructed->size());
+    ASSERT_EQ(reconstructed->getMemory()->getFlags() & IMemoryHeap::READ_ONLY,  0);
+    const uint8_t* p =
+            reinterpret_cast<const uint8_t*>(reconstructed->unsecurePointer());
+    EXPECT_EQ(6, p[0]);
+    EXPECT_EQ(5, p[1]);
+    EXPECT_EQ(3, p[2]);
+}
+
+TEST(ShmemTest, ConversionReadOnly) {
+    sp<IMemory> reconstructed;
+    {
+        SharedFileRegion shmem;
+        sp<IMemory> imem = makeIMemory({6, 5, 3}, false);
+        ASSERT_TRUE(convertIMemoryToSharedFileRegion(imem, &shmem));
+        ASSERT_EQ(3, shmem.size);
+        ASSERT_GE(shmem.fd.get(), 0);
+        ASSERT_FALSE(shmem.writeable);
+        ASSERT_TRUE(convertSharedFileRegionToIMemory(shmem, &reconstructed));
+    }
+    ASSERT_EQ(3, reconstructed->size());
+    ASSERT_NE(reconstructed->getMemory()->getFlags() & IMemoryHeap::READ_ONLY,  0);
+    const uint8_t* p =
+            reinterpret_cast<const uint8_t*>(reconstructed->unsecurePointer());
+    EXPECT_EQ(6, p[0]);
+    EXPECT_EQ(5, p[1]);
+    EXPECT_EQ(3, p[2]);
+}
+
+TEST(ShmemTest, NullConversion) {
+    sp<IMemory> reconstructed;
+    {
+        std::optional<SharedFileRegion> shmem;
+        sp<IMemory> imem;
+        ASSERT_TRUE(convertNullableIMemoryToSharedFileRegion(imem, &shmem));
+        ASSERT_FALSE(shmem.has_value());
+        ASSERT_TRUE(convertNullableSharedFileRegionToIMemory(shmem, &reconstructed));
+    }
+    ASSERT_EQ(nullptr, reconstructed);
+}
+
+}  // namespace
+}  // namespace media
+}  // namespace android
diff --git a/media/libshmem/ShmemUtil.cpp b/media/libshmem/ShmemUtil.cpp
new file mode 100644
index 0000000..e075346
--- /dev/null
+++ b/media/libshmem/ShmemUtil.cpp
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "media/ShmemUtil.h"
+
+namespace android {
+namespace media {
+
+bool validateSharedFileRegion(const SharedFileRegion& shmem) {
+    // FD must be valid.
+    if (shmem.fd.get() < 0) {
+        return false;
+    }
+
+    // Size and offset must be non-negative.
+    if (shmem.size < 0 || shmem.offset < 0) {
+        return false;
+    }
+
+    uint64_t size = shmem.size;
+    uint64_t offset = shmem.offset;
+
+    // Must not wrap.
+    if (offset > offset + size) {
+        return false;
+    }
+
+    return true;
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libshmem/aidl/android/media/SharedFileRegion.aidl b/media/libshmem/aidl/android/media/SharedFileRegion.aidl
new file mode 100644
index 0000000..199b647
--- /dev/null
+++ b/media/libshmem/aidl/android/media/SharedFileRegion.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * A shared file region.
+ *
+ * This type contains the required information to share a region of a file between processes over
+ * AIDL.
+ * An instance of this type represents a valid FD. For representing a null SharedFileRegion, use a
+ * @nullable SharedFileRegion.
+ * Primarily, this is intended for shared memory blocks.
+ *
+ * @hide
+ */
+parcelable SharedFileRegion {
+    /** File descriptor of the region. Must be valid. */
+    ParcelFileDescriptor fd;
+    /** Offset, in bytes within the file of the start of the region. Must be non-negative. */
+    long offset;
+    /** Size, in bytes of the memory region. Must be non-negative. */
+    long size;
+    /** Whether the region is writeable. */
+    boolean writeable;
+}
diff --git a/media/libshmem/include/media/ShmemCompat.h b/media/libshmem/include/media/ShmemCompat.h
new file mode 100644
index 0000000..ba59f25
--- /dev/null
+++ b/media/libshmem/include/media/ShmemCompat.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+// This module contains utilities for interfacing between legacy code that is using IMemory and new
+// code that is using android.os.SharedFileRegion.
+
+#include <optional>
+
+#include "android/media/SharedFileRegion.h"
+#include "binder/IMemory.h"
+#include "utils/StrongPointer.h"
+
+namespace android {
+namespace media {
+
+/**
+ * Converts a SharedFileRegion parcelable to an IMemory instance.
+ * @param shmem The SharedFileRegion instance.
+ * @param result The resulting IMemory instance. May not be null.
+ * @return true if the conversion is successful (should always succeed under normal circumstances,
+ *         failure usually means corrupt data).
+ */
+bool convertSharedFileRegionToIMemory(const SharedFileRegion& shmem,
+                                      sp<IMemory>* result);
+
+/**
+ * Converts a nullable SharedFileRegion parcelable to an IMemory instance.
+ * @param shmem The SharedFileRegion instance.
+ * @param result The resulting IMemory instance. May not be null. Pointee assigned to null,
+ *               if the input is null.
+ * @return true if the conversion is successful (should always succeed under normal circumstances,
+ *         failure usually means corrupt data).
+ */
+bool convertNullableSharedFileRegionToIMemory(const std::optional<SharedFileRegion>& shmem,
+                                              sp<IMemory>* result);
+
+/**
+ * Converts an IMemory instance to SharedFileRegion.
+ * @param mem The IMemory instance. May not be null.
+ * @param result The resulting SharedFileRegion instance.
+ * @return true if the conversion is successful (should always succeed under normal circumstances,
+ *         failure usually means corrupt data).
+ */
+bool convertIMemoryToSharedFileRegion(const sp<IMemory>& mem,
+                                      SharedFileRegion* result);
+
+/**
+ * Converts a nullable IMemory instance to a nullable SharedFileRegion.
+ * @param mem The IMemory instance. May be null.
+ * @param result The resulting SharedFileRegion instance. May not be null. Assigned to empty,
+ *               if the input is null.
+ * @return true if the conversion is successful (should always succeed under normal circumstances,
+ *         failure usually means corrupt data).
+ */
+bool convertNullableIMemoryToSharedFileRegion(const sp<IMemory>& mem,
+                                              std::optional<SharedFileRegion>* result);
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libshmem/include/media/ShmemUtil.h b/media/libshmem/include/media/ShmemUtil.h
new file mode 100644
index 0000000..3a7a5a5
--- /dev/null
+++ b/media/libshmem/include/media/ShmemUtil.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+// This module contains utilities for working with android.os.SharedFileRegion.
+
+#include "android/media/SharedFileRegion.h"
+
+namespace android {
+namespace media {
+
+/**
+ * Checks whether a SharedFileRegion instance is valid (all the fields have sane values).
+ */
+bool validateSharedFileRegion(const SharedFileRegion& shmem);
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 63ab654..1aa1848 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -74,6 +74,12 @@
     kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles
 };
 
+namespace {
+
+constexpr char TUNNEL_PEEK_KEY[] = "android._trigger-tunnel-peek";
+
+}
+
 // OMX errors are directly mapped into status_t range if
 // there is no corresponding MediaError status code.
 // Use the statusFromOMXError(int32_t omxError) function.
@@ -279,6 +285,13 @@
 
     void postFillThisBuffer(BufferInfo *info);
 
+    void maybePostExtraOutputMetadataBufferRequest() {
+        if (!mPendingExtraOutputMetadataBufferRequest) {
+            (new AMessage(kWhatSubmitExtraOutputMetadataBuffer, mCodec))->post();
+            mPendingExtraOutputMetadataBufferRequest = true;
+        }
+    }
+
 private:
     // Handles an OMX message. Returns true iff message was handled.
     bool onOMXMessage(const sp<AMessage> &msg);
@@ -302,6 +315,8 @@
 
     void getMoreInputDataIfPossible();
 
+    bool mPendingExtraOutputMetadataBufferRequest;
+
     DISALLOW_EVIL_CONSTRUCTORS(BaseState);
 };
 
@@ -555,6 +570,8 @@
       mShutdownInProgress(false),
       mExplicitShutdown(false),
       mIsLegacyVP9Decoder(false),
+      mIsStreamCorruptFree(false),
+      mIsLowLatency(false),
       mEncoderDelay(0),
       mEncoderPadding(0),
       mRotationDegrees(0),
@@ -887,7 +904,7 @@
 
             sp<DataConverter> converter = mConverter[portIndex];
             if (converter != NULL) {
-                // here we assume sane conversions of max 4:1, so result fits in int32
+                // here we assume conversions of max 4:1, so result fits in int32
                 if (portIndex == kPortIndexInput) {
                     conversionBufferSize = converter->sourceSize(bufSize);
                 } else {
@@ -1454,6 +1471,10 @@
     mCallback->onOutputFramesRendered(done);
 }
 
+void ACodec::onFirstTunnelFrameReady() {
+    mCallback->onFirstTunnelFrameReady();
+}
+
 ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() {
     ANativeWindowBuffer *buf;
     CHECK(mNativeWindow.get() != NULL);
@@ -2237,6 +2258,12 @@
             }
             err = setupG711Codec(encoder, sampleRate, numChannels);
         }
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_OPUS)) {
+        int32_t numChannels = 1, sampleRate = 48000;
+        if (msg->findInt32("channel-count", &numChannels) &&
+            msg->findInt32("sample-rate", &sampleRate)) {
+            err = setupOpusCodec(encoder, sampleRate, numChannels);
+        }
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) {
         // numChannels needs to be set to properly communicate PCM values.
         int32_t numChannels = 2, sampleRate = 44100, compressionLevel = -1;
@@ -2323,6 +2350,12 @@
         mChannelMaskPresent = false;
     }
 
+    int32_t isCorruptFree = 0;
+    if (msg->findInt32("corrupt-free", &isCorruptFree)) {
+        mIsStreamCorruptFree = isCorruptFree == 1 ? true : false;
+        ALOGV("corrupt-free=[%d]", mIsStreamCorruptFree);
+    }
+
     int32_t maxInputSize;
     if (msg->findInt32("max-input-size", &maxInputSize)) {
         err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize);
@@ -2409,6 +2442,7 @@
     if (err != OK) {
         ALOGE("decoder can not set low-latency to %d (err %d)", lowLatency, err);
     }
+    mIsLowLatency = (lowLatency && err == OK);
     return err;
 }
 
@@ -2436,6 +2470,30 @@
     return err;
 }
 
+status_t ACodec::setTunnelPeek(int32_t tunnelPeek) {
+    if (mIsEncoder) {
+        ALOGE("encoder does not support %s", TUNNEL_PEEK_KEY);
+        return BAD_VALUE;
+    }
+    if (!mTunneled) {
+        ALOGE("%s is only supported in tunnel mode", TUNNEL_PEEK_KEY);
+        return BAD_VALUE;
+    }
+
+    OMX_CONFIG_BOOLEANTYPE config;
+    InitOMXParams(&config);
+    config.bEnabled = (OMX_BOOL)(tunnelPeek != 0);
+    status_t err = mOMXNode->setConfig(
+            (OMX_INDEXTYPE)OMX_IndexConfigAndroidTunnelPeek,
+            &config, sizeof(config));
+    if (err != OK) {
+        ALOGE("decoder cannot set %s to %d (err %d)",
+              TUNNEL_PEEK_KEY, tunnelPeek, err);
+    }
+
+    return err;
+}
+
 status_t ACodec::setAudioPresentation(int32_t presentationId, int32_t programId) {
     OMX_AUDIO_CONFIG_ANDROID_AUDIOPRESENTATION config;
     InitOMXParams(&config);
@@ -2602,15 +2660,15 @@
     unsigned int numLayers = 0;
     unsigned int numBLayers = 0;
     int tags;
-    char dummy;
+    char tmp;
     OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE pattern =
         OMX_VIDEO_AndroidTemporalLayeringPatternNone;
-    if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1
+    if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &tmp) == 1
             && numLayers > 0) {
         pattern = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC;
     } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c",
-                    &numLayers, &dummy, &numBLayers, &dummy))
-            && (tags == 1 || (tags == 3 && dummy == '+'))
+                    &numLayers, &tmp, &numBLayers, &tmp))
+            && (tags == 1 || (tags == 3 && tmp == '+'))
             && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) {
         numLayers += numBLayers;
         pattern = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid;
@@ -3110,6 +3168,26 @@
             kPortIndexInput, sampleRate, numChannels);
 }
 
+status_t ACodec::setupOpusCodec(bool encoder, int32_t sampleRate, int32_t numChannels) {
+    if (encoder) {
+        return INVALID_OPERATION;
+    }
+    OMX_AUDIO_PARAM_ANDROID_OPUSTYPE def;
+    InitOMXParams(&def);
+    def.nPortIndex = kPortIndexInput;
+    status_t err = mOMXNode->getParameter(
+            (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, &def, sizeof(def));
+    if (err != OK) {
+        ALOGE("setupOpusCodec(): Error %d getting OMX_IndexParamAudioAndroidOpus parameter", err);
+        return err;
+    }
+    def.nSampleRate = sampleRate;
+    def.nChannels = numChannels;
+    err = mOMXNode->setParameter(
+           (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, &def, sizeof(def));
+    return err;
+}
+
 status_t ACodec::setupFlacCodec(
         bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel,
         AudioEncoding encoding) {
@@ -4127,6 +4205,29 @@
         ALOGI("setupVideoEncoder succeeded");
     }
 
+    // Video should be encoded as stand straight because RTP protocol
+    // can provide rotation information only if CVO is supported.
+    // This needs to be added to support non-CVO case for video streaming scenario.
+    int32_t rotation = 0;
+    if (msg->findInt32("rotation-degrees", &rotation)) {
+        OMX_CONFIG_ROTATIONTYPE config;
+        InitOMXParams(&config);
+        config.nPortIndex = kPortIndexOutput;
+        status_t err = mOMXNode->getConfig(
+                (OMX_INDEXTYPE)OMX_IndexConfigCommonRotate, &config, sizeof(config));
+        if (err != OK) {
+            ALOGW("Failed to getConfig of OMX_IndexConfigCommonRotate(err %d)", err);
+        }
+        config.nRotation = rotation;
+        err = mOMXNode->setConfig(
+                (OMX_INDEXTYPE)OMX_IndexConfigCommonRotate, &config, sizeof(config));
+
+        ALOGD("Applying encoder-rotation=[%d] to video encoder.", config.nRotation);
+        if (err != OK) {
+            ALOGW("Failed to setConfig of OMX_IndexConfigCommonRotate(err %d)", err);
+        }
+    }
+
     return err;
 }
 
@@ -4757,15 +4858,15 @@
         unsigned int numLayers = 0;
         unsigned int numBLayers = 0;
         int tags;
-        char dummy;
-        if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1
+        char tmp;
+        if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &tmp) == 1
                 && numLayers > 0) {
             pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
             tsType = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC;
             tsLayers = numLayers;
         } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c",
-                        &numLayers, &dummy, &numBLayers, &dummy))
-                && (tags == 1 || (tags == 3 && dummy == '+'))
+                        &numLayers, &tmp, &numBLayers, &tmp))
+                && (tags == 1 || (tags == 3 && tmp == '+'))
                 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) {
             pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
             // VPX does not have a concept of B-frames, so just count all layers
@@ -5283,6 +5384,34 @@
                     if (mChannelMaskPresent) {
                         notify->setInt32("channel-mask", mChannelMask);
                     }
+
+                    if (!mIsEncoder && portIndex == kPortIndexOutput) {
+                        AString mime;
+                        if (mConfigFormat->findString("mime", &mime)
+                                && !strcasecmp(MEDIA_MIMETYPE_AUDIO_AAC, mime.c_str())) {
+
+                            OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE presentation;
+                            InitOMXParams(&presentation);
+                            err = mOMXNode->getParameter(
+                                    (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacDrcPresentation,
+                                    &presentation, sizeof(presentation));
+                            if (err != OK) {
+                                return err;
+                            }
+                            notify->setInt32("aac-encoded-target-level",
+                                             presentation.nEncodedTargetLevel);
+                            notify->setInt32("aac-drc-cut-level", presentation.nDrcCut);
+                            notify->setInt32("aac-drc-boost-level", presentation.nDrcBoost);
+                            notify->setInt32("aac-drc-heavy-compression",
+                                             presentation.nHeavyCompression);
+                            notify->setInt32("aac-target-ref-level",
+                                             presentation.nTargetReferenceLevel);
+                            notify->setInt32("aac-drc-effect-type", presentation.nDrcEffectType);
+                            notify->setInt32("aac-drc-album-mode", presentation.nDrcAlbumMode);
+                            notify->setInt32("aac-drc-output-loudness",
+                                             presentation.nDrcOutputLoudness);
+                        }
+                    }
                     break;
                 }
 
@@ -5588,15 +5717,18 @@
     int32_t range, standard, transfer;
     convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer);
 
+    int32_t dsRange, dsStandard, dsTransfer;
+    getColorConfigFromDataSpace(dataSpace, &dsRange, &dsStandard, &dsTransfer);
+
     // if some aspects are unspecified, use dataspace fields
     if (range == 0) {
-        range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT;
+        range = dsRange;
     }
     if (standard == 0) {
-        standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT;
+        standard = dsStandard;
     }
     if (transfer == 0) {
-        transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT;
+        transfer = dsTransfer;
     }
 
     mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event
@@ -5722,7 +5854,8 @@
 
 ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState)
     : AState(parentState),
-      mCodec(codec) {
+      mCodec(codec),
+      mPendingExtraOutputMetadataBufferRequest(false) {
 }
 
 ACodec::BaseState::PortMode ACodec::BaseState::getPortMode(
@@ -5766,17 +5899,19 @@
 
         case ACodec::kWhatSetSurface:
         {
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
-
             sp<RefBase> obj;
             CHECK(msg->findObject("surface", &obj));
 
             status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get()));
 
-            sp<AMessage> response = new AMessage;
-            response->setInt32("err", err);
-            response->postReply(replyID);
+            sp<AReplyToken> replyID;
+            if (msg->senderAwaitsResponse(&replyID)) {
+                sp<AMessage> response = new AMessage;
+                response->setInt32("err", err);
+                response->postReply(replyID);
+            } else if (err != OK) {
+                mCodec->signalError(OMX_ErrorUndefined, err);
+            }
             break;
         }
 
@@ -5823,6 +5958,21 @@
             break;
         }
 
+        case kWhatSubmitExtraOutputMetadataBuffer: {
+            mPendingExtraOutputMetadataBufferRequest = false;
+            if (getPortMode(kPortIndexOutput) == RESUBMIT_BUFFERS && mCodec->mIsLowLatency) {
+                // Decoders often need more than one output buffer to be
+                // submitted before processing a single input buffer.
+                // For low latency codecs, we don't want to wait for more input
+                // to be queued to get those output buffers submitted.
+                if (mCodec->submitOutputMetadataBuffer() == OK
+                        && mCodec->mMetadataBuffersToSubmit > 0) {
+                    maybePostExtraOutputMetadataBufferRequest();
+                }
+            }
+            break;
+        }
+
         default:
             return false;
     }
@@ -5974,6 +6124,12 @@
         return false;
     }
 
+    if (mCodec->mIsStreamCorruptFree && data1 == (OMX_U32)OMX_ErrorStreamCorrupt) {
+        ALOGV("[%s] handle OMX_ErrorStreamCorrupt as a normal operation",
+                mCodec->mComponentName.c_str());
+        return true;
+    }
+
     ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1);
 
     // verify OMX component sends back an error we expect.
@@ -6081,6 +6237,13 @@
         return;
     }
 
+    int32_t cvo;
+    if (mCodec->mNativeWindow != NULL && buffer != NULL &&
+            buffer->meta()->findInt32("cvo", &cvo)) {
+        ALOGV("cvo(%d) found in buffer #%u", cvo, bufferID);
+        setNativeWindowRotation(mCodec->mNativeWindow.get(), cvo);
+    }
+
     info->mStatus = BufferInfo::OWNED_BY_US;
     info->mData = buffer;
 
@@ -6179,7 +6342,12 @@
                             (outputMode == FREE_BUFFERS ? "FREE" :
                              outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT"));
                     if (outputMode == RESUBMIT_BUFFERS) {
-                        mCodec->submitOutputMetadataBuffer();
+                        status_t err = mCodec->submitOutputMetadataBuffer();
+                        if (mCodec->mIsLowLatency
+                                && err == OK
+                                && mCodec->mMetadataBuffersToSubmit > 0) {
+                            maybePostExtraOutputMetadataBufferRequest();
+                        }
                     }
                 }
                 info->checkReadFence("onInputBufferFilled");
@@ -6729,6 +6897,7 @@
     ALOGV("onAllocateComponent");
 
     CHECK(mCodec->mOMXNode == NULL);
+    mCodec->mFatalError = false;
 
     sp<AMessage> notify = new AMessage(kWhatOMXMessageList, mCodec);
     notify->setInt32("generation", mCodec->mNodeGeneration + 1);
@@ -6947,10 +7116,9 @@
         return err;
     }
 
-    using hardware::media::omx::V1_0::utils::TWOmxNode;
     err = statusFromBinderStatus(
             mCodec->mGraphicBufferSource->configure(
-                    new TWOmxNode(mCodec->mOMXNode),
+                    mCodec->mOMXNode->getHalInterface<IOmxNode>(),
                     static_cast<hardware::graphics::common::V1_0::Dataspace>(dataSpace)));
     if (err != OK) {
         ALOGE("[%s] Unable to configure for node (err %d)",
@@ -7325,6 +7493,9 @@
                 break;
         }
     }
+    if (mCodec->mIsLowLatency) {
+        maybePostExtraOutputMetadataBufferRequest();
+    }
 
     // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
     mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround();
@@ -7631,8 +7802,8 @@
         mInputFormat->setInt64("android._stop-time-offset-us", stopTimeOffsetUs);
     }
 
-    int32_t dummy;
-    if (params->findInt32("request-sync", &dummy)) {
+    int32_t tmp;
+    if (params->findInt32("request-sync", &tmp)) {
         status_t err = requestIDRFrame();
 
         if (err != OK) {
@@ -7704,6 +7875,67 @@
     // Ignore errors as failure is expected for codecs that aren't video encoders.
     (void)configureTemporalLayers(params, false /* inConfigure */, mOutputFormat);
 
+    AString mime;
+    if (!mIsEncoder
+            && (mConfigFormat->findString("mime", &mime))
+            && !strcasecmp(MEDIA_MIMETYPE_AUDIO_AAC, mime.c_str())) {
+        OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE presentation;
+        InitOMXParams(&presentation);
+        mOMXNode->getParameter(
+                    (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacDrcPresentation,
+                    &presentation, sizeof(presentation));
+        int32_t value32 = 0;
+        bool updated = false;
+        if (params->findInt32("aac-pcm-limiter-enable", &value32)) {
+            presentation.nPCMLimiterEnable = value32;
+            updated = true;
+        }
+        if (params->findInt32("aac-encoded-target-level", &value32)) {
+            presentation.nEncodedTargetLevel = value32;
+            updated = true;
+        }
+        if (params->findInt32("aac-drc-cut-level", &value32)) {
+            presentation.nDrcCut = value32;
+            updated = true;
+        }
+        if (params->findInt32("aac-drc-boost-level", &value32)) {
+            presentation.nDrcBoost = value32;
+            updated = true;
+        }
+        if (params->findInt32("aac-drc-heavy-compression", &value32)) {
+            presentation.nHeavyCompression = value32;
+            updated = true;
+        }
+        if (params->findInt32("aac-target-ref-level", &value32)) {
+            presentation.nTargetReferenceLevel = value32;
+            updated = true;
+        }
+        if (params->findInt32("aac-drc-effect-type", &value32)) {
+            presentation.nDrcEffectType = value32;
+            updated = true;
+        }
+        if (params->findInt32("aac-drc-album-mode", &value32)) {
+            presentation.nDrcAlbumMode = value32;
+            updated = true;
+        }
+        if (!params->findInt32("aac-drc-output-loudness", &value32)) {
+            presentation.nDrcOutputLoudness = value32;
+            updated = true;
+        }
+        if (updated) {
+            mOMXNode->setParameter((OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacDrcPresentation,
+                &presentation, sizeof(presentation));
+        }
+    }
+
+    int32_t tunnelPeek = 0;
+    if (params->findInt32(TUNNEL_PEEK_KEY, &tunnelPeek)) {
+        status_t err = setTunnelPeek(tunnelPeek);
+        if (err != OK) {
+            return err;
+        }
+    }
+
     return setVendorParameters(params);
 }
 
@@ -8169,6 +8401,12 @@
             return true;
         }
 
+        case OMX_EventOnFirstTunnelFrameReady:
+        {
+            mCodec->onFirstTunnelFrameReady();
+            return true;
+        }
+
         default:
             return BaseState::onOMXEvent(event, data1, data2);
     }
@@ -8207,17 +8445,38 @@
             FALLTHROUGH_INTENDED;
         }
         case kWhatResume:
-        case kWhatSetParameters:
         {
-            if (msg->what() == kWhatResume) {
-                ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str());
-            }
+            ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str());
 
             mCodec->deferMessage(msg);
             handled = true;
             break;
         }
 
+        case kWhatSetParameters:
+        {
+            sp<AMessage> params;
+            CHECK(msg->findMessage("params", &params));
+
+            sp<ABuffer> hdr10PlusInfo;
+            if (params->findBuffer("hdr10-plus-info", &hdr10PlusInfo)) {
+                if (hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
+                    (void)mCodec->setHdr10PlusInfo(hdr10PlusInfo);
+                }
+                params->removeEntryAt(params->findEntryByName("hdr10-plus-info"));
+
+                if (params->countEntries() == 0) {
+                    msg->removeEntryAt(msg->findEntryByName("params"));
+                }
+            }
+
+            if (msg->countEntries() > 0) {
+                mCodec->deferMessage(msg);
+            }
+            handled = true;
+            break;
+        }
+
         case kWhatForceStateTransition:
         {
             int32_t generation = 0;
@@ -8228,6 +8487,23 @@
             break;
         }
 
+        case kWhatSetSurface:
+        {
+            ALOGV("[%s] Deferring setSurface", mCodec->mComponentName.c_str());
+
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            mCodec->deferMessage(msg);
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", OK);
+            response->postReply(replyID);
+
+            handled = true;
+            break;
+        }
+
         case kWhatCheckIfStuck:
         {
             int32_t generation = 0;
@@ -8328,6 +8604,15 @@
             return false;
         }
 
+        case OMX_EventConfigUpdate:
+        {
+            CHECK_EQ(data1, (OMX_U32)kPortIndexOutput);
+
+            mCodec->onConfigUpdate((OMX_INDEXTYPE)data2);
+
+            return true;
+        }
+
         default:
             return BaseState::onOMXEvent(event, data1, data2);
     }
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 3bccb7b..a052a70 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -1,3 +1,20 @@
+package {
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_headers {
     name: "libstagefright_headers",
     export_include_dirs: ["include"],
@@ -8,6 +25,12 @@
         "com.android.media.swcodec",
     ],
     min_sdk_version: "29",
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 cc_library_static {
@@ -34,6 +57,12 @@
         "libstagefright_foundation",
         "libutils"
     ],
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 cc_library_static {
@@ -58,9 +87,18 @@
     },
 
     header_libs: [
+        "libaudioclient_headers",
         "libstagefright_foundation_headers",
+        "media_ndk_headers",
     ],
-    shared_libs: ["libmediandk"],
+
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
     export_include_dirs: ["include"],
 }
 
@@ -132,8 +170,10 @@
 
     header_libs: [
         "libaudioclient_headers",
-        "libmedia_headers",
+        "libbase_headers",
+        "libmedia_datasource_headers",
         "media_ndk_headers",
+        "media_plugin_headers",
     ],
 
     cflags: [
@@ -150,6 +190,18 @@
             "signed-integer-overflow",
         ],
     },
+
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+        linux: {
+            cflags: [
+                "-DDISABLE_AUDIO_SYSTEM_OFFLOAD",
+            ],
+        }
+    },
 }
 
 cc_library_shared {
@@ -176,7 +228,7 @@
     ],
 
     static_libs: [
-        "librenderengine",
+        "librenderfright",
     ],
 
     export_include_dirs: [
@@ -222,6 +274,7 @@
         "MPEG2TSWriter.cpp",
         "MPEG4Writer.cpp",
         "MediaAdapter.cpp",
+        "MediaAppender.cpp",
         "MediaClock.cpp",
         "MediaCodec.cpp",
         "MediaCodecList.cpp",
@@ -285,6 +338,8 @@
         "android.hardware.cas.native@1.0",
         "android.hardware.drm@1.0",
         "android.hardware.media.omx@1.0",
+        "framework-permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
     ],
 
     static_libs: [
@@ -297,6 +352,9 @@
         "libogg",
         "libwebm",
         "libstagefright_id3",
+        "framework-permission-aidl-cpp",
+        "libmediandk_format",
+        "libmedia_ndkformatpriv",
     ],
 
     header_libs:[
@@ -304,6 +362,7 @@
         "libnativeloader-headers",
         "libstagefright_xmlparser_headers",
         "media_ndk_headers",
+        "libmediaformatshaper_headers",
     ],
 
     export_shared_lib_headers: [
@@ -311,6 +370,7 @@
         "libhidlmemory",
         "libmedia",
         "android.hidl.allocator@1.0",
+        "framework-permission-aidl-cpp",
     ],
 
     export_include_dirs: [
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index 4bc861e..b6acdc8 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -21,6 +21,8 @@
 #define LOG_TAG "AudioSource"
 #include <utils/Log.h>
 
+#include <binder/IPCThreadState.h>
+#include <media/AidlConversion.h>
 #include <media/AudioRecord.h>
 #include <media/stagefright/AudioSource.h>
 #include <media/stagefright/MediaBuffer.h>
@@ -32,6 +34,8 @@
 
 namespace android {
 
+using content::AttributionSourceState;
+
 static void AudioRecordCallbackFunction(int event, void *user, void *info) {
     AudioSource *source = (AudioSource *) user;
     switch (event) {
@@ -50,74 +54,101 @@
 }
 
 AudioSource::AudioSource(
+    const audio_attributes_t *attr, const AttributionSourceState& attributionSource,
+    uint32_t sampleRate, uint32_t channelCount, uint32_t outSampleRate,
+    audio_port_handle_t selectedDeviceId,
+    audio_microphone_direction_t selectedMicDirection,
+    float selectedMicFieldDimension)
+{
+  set(attr, attributionSource, sampleRate, channelCount, outSampleRate, selectedDeviceId,
+      selectedMicDirection, selectedMicFieldDimension);
+}
+
+AudioSource::AudioSource(
         const audio_attributes_t *attr, const String16 &opPackageName,
         uint32_t sampleRate, uint32_t channelCount, uint32_t outSampleRate,
         uid_t uid, pid_t pid, audio_port_handle_t selectedDeviceId,
         audio_microphone_direction_t selectedMicDirection,
         float selectedMicFieldDimension)
-    : mStarted(false),
-      mSampleRate(sampleRate),
-      mOutSampleRate(outSampleRate > 0 ? outSampleRate : sampleRate),
-      mTrackMaxAmplitude(false),
-      mStartTimeUs(0),
-      mStopSystemTimeUs(-1),
-      mLastFrameTimestampUs(0),
-      mMaxAmplitude(0),
-      mPrevSampleTimeUs(0),
-      mInitialReadTimeUs(0),
-      mNumFramesReceived(0),
-      mNumFramesSkipped(0),
-      mNumFramesLost(0),
-      mNumClientOwnedBuffers(0),
-      mNoMoreFramesToRead(false) {
-    ALOGV("sampleRate: %u, outSampleRate: %u, channelCount: %u",
-            sampleRate, outSampleRate, channelCount);
-    CHECK(channelCount == 1 || channelCount == 2);
-    CHECK(sampleRate > 0);
+{
+    // TODO b/182392769: use attribution source util
+    AttributionSourceState attributionSource;
+    attributionSource.packageName = VALUE_OR_FATAL(legacy2aidl_String16_string(opPackageName));
+    attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(uid));
+    attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(pid));
+    attributionSource.token = sp<BBinder>::make();
+    set(attr, attributionSource, sampleRate, channelCount, outSampleRate, selectedDeviceId,
+      selectedMicDirection, selectedMicFieldDimension);
+}
 
-    size_t minFrameCount;
-    status_t status = AudioRecord::getMinFrameCount(&minFrameCount,
-                                           sampleRate,
-                                           AUDIO_FORMAT_PCM_16_BIT,
-                                           audio_channel_in_mask_from_count(channelCount));
-    if (status == OK) {
-        // make sure that the AudioRecord callback never returns more than the maximum
-        // buffer size
-        uint32_t frameCount = kMaxBufferSize / sizeof(int16_t) / channelCount;
+void AudioSource::set(
+   const audio_attributes_t *attr, const AttributionSourceState& attributionSource,
+        uint32_t sampleRate, uint32_t channelCount, uint32_t outSampleRate,
+        audio_port_handle_t selectedDeviceId,
+        audio_microphone_direction_t selectedMicDirection,
+        float selectedMicFieldDimension)
+{
+   mStarted = false;
+   mSampleRate = sampleRate;
+   mOutSampleRate = outSampleRate > 0 ? outSampleRate : sampleRate;
+   mTrackMaxAmplitude = false;
+   mStartTimeUs = 0;
+   mStopSystemTimeUs = -1;
+   mLastFrameTimestampUs = 0;
+   mMaxAmplitude = 0;
+   mPrevSampleTimeUs = 0;
+   mInitialReadTimeUs = 0;
+   mNumFramesReceived = 0;
+   mNumFramesSkipped = 0;
+   mNumFramesLost = 0;
+   mNumClientOwnedBuffers = 0;
+   mNoMoreFramesToRead = false;
+  ALOGV("sampleRate: %u, outSampleRate: %u, channelCount: %u",
+        sampleRate, outSampleRate, channelCount);
+  CHECK(channelCount == 1 || channelCount == 2);
+  CHECK(sampleRate > 0);
 
-        // make sure that the AudioRecord total buffer size is large enough
-        size_t bufCount = 2;
-        while ((bufCount * frameCount) < minFrameCount) {
-            bufCount++;
-        }
+  size_t minFrameCount;
+  status_t status = AudioRecord::getMinFrameCount(&minFrameCount,
+                                                  sampleRate,
+                                                  AUDIO_FORMAT_PCM_16_BIT,
+                                                  audio_channel_in_mask_from_count(channelCount));
+  if (status == OK) {
+    // make sure that the AudioRecord callback never returns more than the maximum
+    // buffer size
+    uint32_t frameCount = kMaxBufferSize / sizeof(int16_t) / channelCount;
 
-        mRecord = new AudioRecord(
-                    AUDIO_SOURCE_DEFAULT, sampleRate, AUDIO_FORMAT_PCM_16_BIT,
-                    audio_channel_in_mask_from_count(channelCount),
-                    opPackageName,
-                    (size_t) (bufCount * frameCount),
-                    AudioRecordCallbackFunction,
-                    this,
-                    frameCount /*notificationFrames*/,
-                    AUDIO_SESSION_ALLOCATE,
-                    AudioRecord::TRANSFER_DEFAULT,
-                    AUDIO_INPUT_FLAG_NONE,
-                    uid,
-                    pid,
-                    attr,
-                    selectedDeviceId,
-                    selectedMicDirection,
-                    selectedMicFieldDimension);
-        // Set caller name so it can be logged in destructor.
-        // MediaMetricsConstants.h: AMEDIAMETRICS_PROP_CALLERNAME_VALUE_MEDIA
-        mRecord->setCallerName("media");
-        mInitCheck = mRecord->initCheck();
-        if (mInitCheck != OK) {
-            mRecord.clear();
-        }
-    } else {
-        mInitCheck = status;
+    // make sure that the AudioRecord total buffer size is large enough
+    size_t bufCount = 2;
+    while ((bufCount * frameCount) < minFrameCount) {
+      bufCount++;
     }
+
+    mRecord = new AudioRecord(
+        AUDIO_SOURCE_DEFAULT, sampleRate, AUDIO_FORMAT_PCM_16_BIT,
+        audio_channel_in_mask_from_count(channelCount),
+        attributionSource,
+        (size_t) (bufCount * frameCount),
+        AudioRecordCallbackFunction,
+        this,
+        frameCount /*notificationFrames*/,
+        AUDIO_SESSION_ALLOCATE,
+        AudioRecord::TRANSFER_DEFAULT,
+        AUDIO_INPUT_FLAG_NONE,
+        attr,
+        selectedDeviceId,
+        selectedMicDirection,
+        selectedMicFieldDimension);
+    // Set caller name so it can be logged in destructor.
+    // MediaMetricsConstants.h: AMEDIAMETRICS_PROP_CALLERNAME_VALUE_MEDIA
+    mRecord->setCallerName("media");
+    mInitCheck = mRecord->initCheck();
+    if (mInitCheck != OK) {
+      mRecord.clear();
+    }
+  } else {
+    mInitCheck = status;
+  }
 }
 
 AudioSource::~AudioSource() {
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 9b3f420..95afa62 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -46,88 +46,6 @@
 
 static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
 
-struct CameraSourceListener : public CameraListener {
-    explicit CameraSourceListener(const sp<CameraSource> &source);
-
-    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
-    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
-                          camera_frame_metadata_t *metadata);
-
-    virtual void postDataTimestamp(
-            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
-
-    virtual void postRecordingFrameHandleTimestamp(nsecs_t timestamp, native_handle_t* handle);
-
-    virtual void postRecordingFrameHandleTimestampBatch(
-                const std::vector<nsecs_t>& timestamps,
-                const std::vector<native_handle_t*>& handles);
-
-protected:
-    virtual ~CameraSourceListener();
-
-private:
-    wp<CameraSource> mSource;
-
-    CameraSourceListener(const CameraSourceListener &);
-    CameraSourceListener &operator=(const CameraSourceListener &);
-};
-
-CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
-    : mSource(source) {
-}
-
-CameraSourceListener::~CameraSourceListener() {
-}
-
-void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
-    UNUSED_UNLESS_VERBOSE(msgType);
-    UNUSED_UNLESS_VERBOSE(ext1);
-    UNUSED_UNLESS_VERBOSE(ext2);
-    ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
-}
-
-void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
-                                    camera_frame_metadata_t * /* metadata */) {
-    ALOGV("postData(%d, ptr:%p, size:%zu)",
-         msgType, dataPtr->unsecurePointer(), dataPtr->size());
-
-    sp<CameraSource> source = mSource.promote();
-    if (source.get() != NULL) {
-        source->dataCallback(msgType, dataPtr);
-    }
-}
-
-void CameraSourceListener::postDataTimestamp(
-        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
-
-    sp<CameraSource> source = mSource.promote();
-    if (source.get() != NULL) {
-        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
-    }
-}
-
-void CameraSourceListener::postRecordingFrameHandleTimestamp(nsecs_t timestamp,
-        native_handle_t* handle) {
-    sp<CameraSource> source = mSource.promote();
-    if (source.get() != nullptr) {
-        source->recordingFrameHandleCallbackTimestamp(timestamp/1000, handle);
-    }
-}
-
-void CameraSourceListener::postRecordingFrameHandleTimestampBatch(
-        const std::vector<nsecs_t>& timestamps,
-        const std::vector<native_handle_t*>& handles) {
-    sp<CameraSource> source = mSource.promote();
-    if (source.get() != nullptr) {
-        int n = timestamps.size();
-        std::vector<nsecs_t> modifiedTimestamps(n);
-        for (int i = 0; i < n; i++) {
-            modifiedTimestamps[i] = timestamps[i] / 1000;
-        }
-        source->recordingFrameHandleCallbackTimestampBatch(modifiedTimestamps, handles);
-    }
-}
-
 static int32_t getColorFormat(const char* colorFormat) {
     if (!colorFormat) {
         ALOGE("Invalid color format");
@@ -169,16 +87,6 @@
     return -1;
 }
 
-CameraSource *CameraSource::Create(const String16 &clientName) {
-    Size size;
-    size.width = -1;
-    size.height = -1;
-
-    sp<hardware::ICamera> camera;
-    return new CameraSource(camera, NULL, 0, clientName, Camera::USE_CALLING_UID,
-            Camera::USE_CALLING_PID, size, -1, NULL, false);
-}
-
 // static
 CameraSource *CameraSource::CreateFromCamera(
     const sp<hardware::ICamera>& camera,
@@ -189,12 +97,10 @@
     pid_t clientPid,
     Size videoSize,
     int32_t frameRate,
-    const sp<IGraphicBufferProducer>& surface,
-    bool storeMetaDataInVideoBuffers) {
+    const sp<IGraphicBufferProducer>& surface) {
 
     CameraSource *source = new CameraSource(camera, proxy, cameraId,
-            clientName, clientUid, clientPid, videoSize, frameRate, surface,
-            storeMetaDataInVideoBuffers);
+            clientName, clientUid, clientPid, videoSize, frameRate, surface);
     return source;
 }
 
@@ -207,8 +113,7 @@
     pid_t clientPid,
     Size videoSize,
     int32_t frameRate,
-    const sp<IGraphicBufferProducer>& surface,
-    bool storeMetaDataInVideoBuffers)
+    const sp<IGraphicBufferProducer>& surface)
     : mCameraFlags(0),
       mNumInputBuffers(0),
       mVideoFrameRate(-1),
@@ -231,8 +136,7 @@
 
     mInitCheck = init(camera, proxy, cameraId,
                     clientName, clientUid, clientPid,
-                    videoSize, frameRate,
-                    storeMetaDataInVideoBuffers);
+                    videoSize, frameRate);
     if (mInitCheck != OK) releaseCamera();
 }
 
@@ -245,7 +149,8 @@
     int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid) {
 
     if (camera == 0) {
-        mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid);
+        mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__);
         if (mCamera == 0) return -EBUSY;
         mCameraFlags &= ~FLAGS_HOT_CAMERA;
     } else {
@@ -531,15 +436,13 @@
         uid_t clientUid,
         pid_t clientPid,
         Size videoSize,
-        int32_t frameRate,
-        bool storeMetaDataInVideoBuffers) {
+        int32_t frameRate) {
 
     ALOGV("init");
     status_t err = OK;
     int64_t token = IPCThreadState::self()->clearCallingIdentity();
     err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid, clientPid,
-                               videoSize, frameRate,
-                               storeMetaDataInVideoBuffers);
+                               videoSize, frameRate);
     IPCThreadState::self()->restoreCallingIdentity(token);
     return err;
 }
@@ -626,8 +529,7 @@
         uid_t clientUid,
         pid_t clientPid,
         Size videoSize,
-        int32_t frameRate,
-        bool storeMetaDataInVideoBuffers) {
+        int32_t frameRate) {
     ALOGV("initWithCameraAccess");
     status_t err = OK;
 
@@ -667,24 +569,12 @@
         CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
     }
 
-    // By default, store real data in video buffers.
-    mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV;
-    if (storeMetaDataInVideoBuffers) {
-        if (OK == mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE)) {
-            mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE;
-        } else if (OK == mCamera->setVideoBufferMode(
-                hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA)) {
-            mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA;
-        }
-    }
-
-    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV) {
-        err = mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV);
-        if (err != OK) {
-            ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV failed: "
-                    "%s (err=%d)", __FUNCTION__, strerror(-err), err);
-            return err;
-        }
+    // Use buffer queue to receive video buffers from camera
+    err = mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
+    if (err != OK) {
+        ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_BUFFER_QUEUE failed: "
+                "%s (err=%d)", __FUNCTION__, strerror(-err), err);
+        return err;
     }
 
     int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
@@ -724,54 +614,26 @@
     int64_t token = IPCThreadState::self()->clearCallingIdentity();
     status_t err;
 
-    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
-        // Initialize buffer queue.
-        err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat,
-                (android_dataspace_t)mEncoderDataSpace,
-                mNumInputBuffers > 0 ? mNumInputBuffers : 1);
-        if (err != OK) {
-            ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__,
-                    strerror(-err), err);
-            return err;
-        }
-    } else {
-        if (mNumInputBuffers > 0) {
-            err = mCamera->sendCommand(
-                CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0);
-
-            // This could happen for CameraHAL1 clients; thus the failure is
-            // not a fatal error
-            if (err != OK) {
-                ALOGW("Failed to set video buffer count to %d due to %d",
-                    mNumInputBuffers, err);
-            }
-        }
-
-        err = mCamera->sendCommand(
-            CAMERA_CMD_SET_VIDEO_FORMAT, mEncoderFormat, mEncoderDataSpace);
-
-        // This could happen for CameraHAL1 clients; thus the failure is
-        // not a fatal error
-        if (err != OK) {
-            ALOGW("Failed to set video encoder format/dataspace to %d, %d due to %d",
-                    mEncoderFormat, mEncoderDataSpace, err);
-        }
-
-        // Create memory heap to store buffers as VideoNativeMetadata.
-        createVideoBufferMemoryHeap(sizeof(VideoNativeHandleMetadata), kDefaultVideoBufferCount);
+    // Initialize buffer queue.
+    err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat,
+            (android_dataspace_t)mEncoderDataSpace,
+            mNumInputBuffers > 0 ? mNumInputBuffers : 1);
+    if (err != OK) {
+        ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__,
+                strerror(-err), err);
+        return err;
     }
 
+    // Start data flow
     err = OK;
     if (mCameraFlags & FLAGS_HOT_CAMERA) {
         mCamera->unlock();
         mCamera.clear();
-        if ((err = mCameraRecordingProxy->startRecording(
-                new ProxyListener(this))) != OK) {
+        if ((err = mCameraRecordingProxy->startRecording()) != OK) {
             ALOGE("Failed to start recording, received error: %s (%d)",
                     strerror(-err), err);
         }
     } else {
-        mCamera->setListener(new CameraSourceListener(this));
         mCamera->startRecording();
         if (!mCamera->recordingEnabled()) {
             err = -EINVAL;
@@ -797,7 +659,7 @@
     mStartTimeUs = 0;
     mNumInputBuffers = 0;
     mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-    mEncoderDataSpace = HAL_DATASPACE_V0_BT709;
+    mEncoderDataSpace = mBufferDataSpace = HAL_DATASPACE_V0_BT709;
 
     if (meta) {
         int64_t startTimeUs;
@@ -817,6 +679,7 @@
         }
         if (meta->findInt32(kKeyColorSpace, &mEncoderDataSpace)) {
             ALOGI("Using encoder data space: %#x", mEncoderDataSpace);
+            mBufferDataSpace = mEncoderDataSpace;
         }
     }
 
@@ -836,7 +699,6 @@
         }
     } else {
         if (mCamera != 0) {
-            mCamera->setListener(NULL);
             mCamera->stopRecording();
         }
     }
@@ -935,97 +797,31 @@
 void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
     ALOGV("releaseRecordingFrame");
 
-    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
-        // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
-        ssize_t offset;
-        size_t size;
-        sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
-        if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
-            ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__,
-                    heap->getHeapID(), mMemoryHeapBase->getHeapID());
-            return;
-        }
-
-        VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
-                (uint8_t*)heap->getBase() + offset);
-
-        // Find the corresponding buffer item for the native window buffer.
-        ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer);
-        if (index == NAME_NOT_FOUND) {
-            ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer);
-            return;
-        }
-
-        BufferItem buffer = mReceivedBufferItemMap.valueAt(index);
-        mReceivedBufferItemMap.removeItemsAt(index);
-        mVideoBufferConsumer->releaseBuffer(buffer);
-        mMemoryBases.push_back(frame);
-        mMemoryBaseAvailableCond.signal();
-    } else {
-        native_handle_t* handle = nullptr;
-
-        // Check if frame contains a VideoNativeHandleMetadata.
-        if (frame->size() == sizeof(VideoNativeHandleMetadata)) {
-          // TODO: Using unsecurePointer() has some associated security pitfalls
-          //       (see declaration for details).
-          //       Either document why it is safe in this case or address the
-          //       issue (e.g. by copying).
-           VideoNativeHandleMetadata *metadata =
-                (VideoNativeHandleMetadata*)(frame->unsecurePointer());
-            if (metadata->eType == kMetadataBufferTypeNativeHandleSource) {
-                handle = metadata->pHandle;
-            }
-        }
-
-        if (handle != nullptr) {
-            ssize_t offset;
-            size_t size;
-            sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
-            if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
-                ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)",
-		     __FUNCTION__, heap->getHeapID(), mMemoryHeapBase->getHeapID());
-                return;
-            }
-            uint32_t batchSize = 0;
-            {
-                Mutex::Autolock autoLock(mBatchLock);
-                if (mInflightBatchSizes.size() > 0) {
-                    batchSize = mInflightBatchSizes[0];
-                }
-            }
-            if (batchSize == 0) { // return buffers one by one
-                // Frame contains a VideoNativeHandleMetadata. Send the handle back to camera.
-                releaseRecordingFrameHandle(handle);
-                mMemoryBases.push_back(frame);
-                mMemoryBaseAvailableCond.signal();
-            } else { // Group buffers in batch then return
-                Mutex::Autolock autoLock(mBatchLock);
-                mInflightReturnedHandles.push_back(handle);
-                mInflightReturnedMemorys.push_back(frame);
-                if (mInflightReturnedHandles.size() == batchSize) {
-                    releaseRecordingFrameHandleBatch(mInflightReturnedHandles);
-
-                    mInflightBatchSizes.pop_front();
-                    mInflightReturnedHandles.clear();
-                    for (const auto& mem : mInflightReturnedMemorys) {
-                        mMemoryBases.push_back(mem);
-                        mMemoryBaseAvailableCond.signal();
-                    }
-                    mInflightReturnedMemorys.clear();
-                }
-            }
-
-        } else if (mCameraRecordingProxy != nullptr) {
-            // mCamera is created by application. Return the frame back to camera via camera
-            // recording proxy.
-            mCameraRecordingProxy->releaseRecordingFrame(frame);
-        } else if (mCamera != nullptr) {
-            // mCamera is created by CameraSource. Return the frame directly back to camera.
-            int64_t token = IPCThreadState::self()->clearCallingIdentity();
-            mCamera->releaseRecordingFrame(frame);
-            IPCThreadState::self()->restoreCallingIdentity(token);
-        }
+    // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
+    ssize_t offset;
+    size_t size;
+    sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
+    if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
+        ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__,
+                heap->getHeapID(), mMemoryHeapBase->getHeapID());
+        return;
     }
+
+    VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
+        (uint8_t*)heap->getBase() + offset);
+
+    // Find the corresponding buffer item for the native window buffer.
+    ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer);
+    if (index == NAME_NOT_FOUND) {
+        ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer);
+        return;
+    }
+
+    BufferItem buffer = mReceivedBufferItemMap.valueAt(index);
+    mReceivedBufferItemMap.removeItemsAt(index);
+    mVideoBufferConsumer->releaseBuffer(buffer);
+    mMemoryBases.push_back(frame);
+    mMemoryBaseAvailableCond.signal();
 }
 
 void CameraSource::releaseQueuedFrames() {
@@ -1114,6 +910,11 @@
         (*buffer)->setObserver(this);
         (*buffer)->add_ref();
         (*buffer)->meta_data().setInt64(kKeyTime, frameTime);
+        if (mBufferDataSpace != mEncoderDataSpace) {
+            ALOGD("Data space updated to %x", mBufferDataSpace);
+            (*buffer)->meta_data().setInt32(kKeyColorSpace, mBufferDataSpace);
+            mEncoderDataSpace = mBufferDataSpace;
+        }
     }
     return OK;
 }
@@ -1181,152 +982,6 @@
     return false;
 }
 
-void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
-        int32_t msgType __unused, const sp<IMemory> &data) {
-    ALOGV("dataCallbackTimestamp: timestamp %lld us", (long long)timestampUs);
-    Mutex::Autolock autoLock(mLock);
-
-    if (shouldSkipFrameLocked(timestampUs)) {
-        releaseOneRecordingFrame(data);
-        return;
-    }
-
-    ++mNumFramesReceived;
-
-    CHECK(data != NULL && data->size() > 0);
-    mFramesReceived.push_back(data);
-    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
-    mFrameTimes.push_back(timeUs);
-    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
-        mStartTimeUs, timeUs);
-    mFrameAvailableCondition.signal();
-}
-
-void CameraSource::releaseRecordingFrameHandle(native_handle_t* handle) {
-    if (mCameraRecordingProxy != nullptr) {
-        mCameraRecordingProxy->releaseRecordingFrameHandle(handle);
-    } else if (mCamera != nullptr) {
-        int64_t token = IPCThreadState::self()->clearCallingIdentity();
-        mCamera->releaseRecordingFrameHandle(handle);
-        IPCThreadState::self()->restoreCallingIdentity(token);
-    } else {
-        native_handle_close(handle);
-        native_handle_delete(handle);
-    }
-}
-
-void CameraSource::releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles) {
-    if (mCameraRecordingProxy != nullptr) {
-        mCameraRecordingProxy->releaseRecordingFrameHandleBatch(handles);
-    } else if (mCamera != nullptr) {
-        int64_t token = IPCThreadState::self()->clearCallingIdentity();
-        mCamera->releaseRecordingFrameHandleBatch(handles);
-        IPCThreadState::self()->restoreCallingIdentity(token);
-    } else {
-        for (auto& handle : handles) {
-            native_handle_close(handle);
-            native_handle_delete(handle);
-        }
-    }
-}
-
-void CameraSource::recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
-                native_handle_t* handle) {
-    ALOGV("%s: timestamp %lld us", __FUNCTION__, (long long)timestampUs);
-    Mutex::Autolock autoLock(mLock);
-    if (handle == nullptr) return;
-
-    if (shouldSkipFrameLocked(timestampUs)) {
-        releaseRecordingFrameHandle(handle);
-        return;
-    }
-
-    while (mMemoryBases.empty()) {
-        if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
-                TIMED_OUT) {
-            ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
-            releaseRecordingFrameHandle(handle);
-            return;
-        }
-    }
-
-    ++mNumFramesReceived;
-
-    sp<IMemory> data = *mMemoryBases.begin();
-    mMemoryBases.erase(mMemoryBases.begin());
-
-    // Wrap native handle in sp<IMemory> so it can be pushed to mFramesReceived.
-    VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(data->unsecurePointer());
-    metadata->eType = kMetadataBufferTypeNativeHandleSource;
-    metadata->pHandle = handle;
-
-    mFramesReceived.push_back(data);
-    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
-    mFrameTimes.push_back(timeUs);
-    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64, mStartTimeUs, timeUs);
-    mFrameAvailableCondition.signal();
-}
-
-void CameraSource::recordingFrameHandleCallbackTimestampBatch(
-        const std::vector<int64_t>& timestampsUs,
-        const std::vector<native_handle_t*>& handles) {
-    size_t n = timestampsUs.size();
-    if (n != handles.size()) {
-        ALOGE("%s: timestampsUs(%zu) and handles(%zu) size mismatch!",
-                __FUNCTION__, timestampsUs.size(), handles.size());
-    }
-
-    Mutex::Autolock autoLock(mLock);
-    int batchSize = 0;
-    for (size_t i = 0; i < n; i++) {
-        int64_t timestampUs = timestampsUs[i];
-        native_handle_t* handle = handles[i];
-
-        ALOGV("%s: timestamp %lld us", __FUNCTION__, (long long)timestampUs);
-        if (handle == nullptr) continue;
-
-        if (shouldSkipFrameLocked(timestampUs)) {
-            releaseRecordingFrameHandle(handle);
-            continue;
-        }
-
-        while (mMemoryBases.empty()) {
-            if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
-                    TIMED_OUT) {
-                ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
-                releaseRecordingFrameHandle(handle);
-                continue;
-            }
-        }
-        ++batchSize;
-        ++mNumFramesReceived;
-        sp<IMemory> data = *mMemoryBases.begin();
-        mMemoryBases.erase(mMemoryBases.begin());
-
-        // Wrap native handle in sp<IMemory> so it can be pushed to mFramesReceived.
-        // TODO: Using unsecurePointer() has some associated security pitfalls
-        //       (see declaration for details).
-        //       Either document why it is safe in this case or address the
-        //       issue (e.g. by copying).
-        VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(data->unsecurePointer());
-        metadata->eType = kMetadataBufferTypeNativeHandleSource;
-        metadata->pHandle = handle;
-
-        mFramesReceived.push_back(data);
-        int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
-        mFrameTimes.push_back(timeUs);
-        ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64, mStartTimeUs, timeUs);
-
-    }
-    if (batchSize > 0) {
-        Mutex::Autolock autoLock(mBatchLock);
-        mInflightBatchSizes.push_back(batchSize);
-    }
-    for (int i = 0; i < batchSize; i++) {
-        mFrameAvailableCondition.signal();
-    }
-}
-
 CameraSource::BufferQueueListener::BufferQueueListener(const sp<BufferItemConsumer>& consumer,
         const sp<CameraSource>& cameraSource) {
     mConsumer = consumer;
@@ -1391,6 +1046,7 @@
     // Find a available memory slot to store the buffer as VideoNativeMetadata.
     sp<IMemory> data = *mMemoryBases.begin();
     mMemoryBases.erase(mMemoryBases.begin());
+    mBufferDataSpace = buffer.mDataSpace;
 
     ssize_t offset;
     size_t size;
@@ -1417,41 +1073,7 @@
 MetadataBufferType CameraSource::metaDataStoredInVideoBuffers() const {
     ALOGV("metaDataStoredInVideoBuffers");
 
-    // Output buffers will contain metadata if camera sends us buffer in metadata mode or via
-    // buffer queue.
-    switch (mVideoBufferMode) {
-        case hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA:
-            return kMetadataBufferTypeNativeHandleSource;
-        case hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE:
-            return kMetadataBufferTypeANWBuffer;
-        default:
-            return kMetadataBufferTypeInvalid;
-    }
-}
-
-CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
-    mSource = source;
-}
-
-void CameraSource::ProxyListener::dataCallbackTimestamp(
-        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
-    mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
-}
-
-void CameraSource::ProxyListener::recordingFrameHandleCallbackTimestamp(nsecs_t timestamp,
-        native_handle_t* handle) {
-    mSource->recordingFrameHandleCallbackTimestamp(timestamp / 1000, handle);
-}
-
-void CameraSource::ProxyListener::recordingFrameHandleCallbackTimestampBatch(
-        const std::vector<int64_t>& timestampsUs,
-        const std::vector<native_handle_t*>& handles) {
-    int n = timestampsUs.size();
-    std::vector<nsecs_t> modifiedTimestamps(n);
-    for (int i = 0; i < n; i++) {
-        modifiedTimestamps[i] = timestampsUs[i] / 1000;
-    }
-    mSource->recordingFrameHandleCallbackTimestampBatch(modifiedTimestamps, handles);
+    return kMetadataBufferTypeANWBuffer;
 }
 
 void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index e0a6eb3..50a512f 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -45,15 +45,13 @@
         Size videoSize,
         int32_t videoFrameRate,
         const sp<IGraphicBufferProducer>& surface,
-        int64_t timeBetweenFrameCaptureUs,
-        bool storeMetaDataInVideoBuffers) {
+        int64_t timeBetweenFrameCaptureUs) {
 
     CameraSourceTimeLapse *source = new
             CameraSourceTimeLapse(camera, proxy, cameraId,
                 clientName, clientUid, clientPid,
                 videoSize, videoFrameRate, surface,
-                timeBetweenFrameCaptureUs,
-                storeMetaDataInVideoBuffers);
+                timeBetweenFrameCaptureUs);
 
     if (source != NULL) {
         if (source->initCheck() != OK) {
@@ -74,11 +72,9 @@
         Size videoSize,
         int32_t videoFrameRate,
         const sp<IGraphicBufferProducer>& surface,
-        int64_t timeBetweenFrameCaptureUs,
-        bool storeMetaDataInVideoBuffers)
+        int64_t timeBetweenFrameCaptureUs)
       : CameraSource(camera, proxy, cameraId, clientName, clientUid, clientPid,
-                videoSize, videoFrameRate, surface,
-                storeMetaDataInVideoBuffers),
+                videoSize, videoFrameRate, surface),
       mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
       mLastTimeLapseFrameRealTimestampUs(0),
       mSkipCurrentFrame(false) {
@@ -173,12 +169,6 @@
     ALOGV("signalBufferReturned");
     Mutex::Autolock autoLock(mQuickStopLock);
     if (mQuickStop && (buffer == mLastReadBufferCopy)) {
-        if (metaDataStoredInVideoBuffers() == kMetadataBufferTypeNativeHandleSource) {
-            native_handle_t* handle = (
-                (VideoNativeHandleMetadata*)(mLastReadBufferCopy->data()))->pHandle;
-            native_handle_close(handle);
-            native_handle_delete(handle);
-        }
         buffer->setObserver(NULL);
         buffer->release();
         mLastReadBufferCopy = NULL;
@@ -191,8 +181,7 @@
 void createMediaBufferCopy(
         const MediaBufferBase& sourceBuffer,
         int64_t frameTime,
-        MediaBufferBase **newBuffer,
-        int32_t videoBufferMode) {
+        MediaBufferBase **newBuffer) {
 
     ALOGV("createMediaBufferCopy");
     size_t sourceSize = sourceBuffer.size();
@@ -203,19 +192,13 @@
 
     (*newBuffer)->meta_data().setInt64(kKeyTime, frameTime);
 
-    if (videoBufferMode == kMetadataBufferTypeNativeHandleSource) {
-        ((VideoNativeHandleMetadata*)((*newBuffer)->data()))->pHandle =
-            native_handle_clone(
-                ((VideoNativeHandleMetadata*)(sourceBuffer.data()))->pHandle);
-    }
 }
 
 void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBufferBase& sourceBuffer) {
     ALOGV("fillLastReadBufferCopy");
     int64_t frameTime;
     CHECK(sourceBuffer.meta_data().findInt64(kKeyTime, &frameTime));
-    createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy,
-        metaDataStoredInVideoBuffers());
+    createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
     mLastReadBufferCopy->add_ref();
     mLastReadBufferCopy->setObserver(this);
 }
@@ -240,19 +223,6 @@
     }
 }
 
-sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(
-        const sp<IMemory> &source_data) {
-
-    ALOGV("createIMemoryCopy");
-    size_t source_size = source_data->size();
-    void* source_pointer = source_data->unsecurePointer();
-
-    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
-    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
-    memcpy(newMemory->unsecurePointer(), source_pointer, source_size);
-    return newMemory;
-}
-
 bool CameraSourceTimeLapse::skipCurrentFrame(int64_t /* timestampUs */) {
     ALOGV("skipCurrentFrame");
     if (mSkipCurrentFrame) {
@@ -318,31 +288,6 @@
     return false;
 }
 
-void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
-            const sp<IMemory> &data) {
-    ALOGV("dataCallbackTimestamp");
-    mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
-    CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
-}
-
-void CameraSourceTimeLapse::recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
-            native_handle_t* handle) {
-    ALOGV("recordingFrameHandleCallbackTimestamp");
-    mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
-    CameraSource::recordingFrameHandleCallbackTimestamp(timestampUs, handle);
-}
-
-void CameraSourceTimeLapse::recordingFrameHandleCallbackTimestampBatch(
-        const std::vector<int64_t>& timestampsUs,
-        const std::vector<native_handle_t*>& handles) {
-    ALOGV("recordingFrameHandleCallbackTimestampBatch");
-    int n = timestampsUs.size();
-    for (int i = 0; i < n; i++) {
-        // Don't do batching for CameraSourceTimeLapse for now
-        recordingFrameHandleCallbackTimestamp(timestampsUs[i], handles[i]);
-    }
-}
-
 void CameraSourceTimeLapse::processBufferQueueFrame(BufferItem& buffer) {
     ALOGV("processBufferQueueFrame");
     int64_t timestampUs = buffer.mTimestamp / 1000;
diff --git a/media/libstagefright/FrameCaptureProcessor.cpp b/media/libstagefright/FrameCaptureProcessor.cpp
index 96c1195..8cd7f82 100644
--- a/media/libstagefright/FrameCaptureProcessor.cpp
+++ b/media/libstagefright/FrameCaptureProcessor.cpp
@@ -164,14 +164,15 @@
 
     if (err != OK) {
         ALOGE("drawLayers returned err %d", err);
-        return err;
+    } else {
+        err = fence->wait(500);
+        if (err != OK) {
+            ALOGW("wait for fence returned err %d", err);
+            err = OK;
+        }
     }
-
-    err = fence->wait(500);
-    if (err != OK) {
-        ALOGW("wait for fence returned err %d", err);
-    }
-    return OK;
+    mRE->cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL);
+    return err;
 }
 
 void FrameCaptureProcessor::onMessageReceived(const sp<AMessage> &msg) {
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 734f5bb..efd4070 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -19,6 +19,7 @@
 
 #include "include/FrameDecoder.h"
 #include "include/FrameCaptureLayer.h"
+#include "include/HevcUtils.h"
 #include <binder/MemoryBase.h>
 #include <binder/MemoryHeapBase.h>
 #include <gui/Surface.h>
@@ -43,7 +44,7 @@
 namespace android {
 
 static const int64_t kBufferTimeOutUs = 10000LL; // 10 msec
-static const size_t kRetryCount = 50; // must be >0
+static const size_t kRetryCount = 100; // must be >0
 static const int64_t kDefaultSampleDurationUs = 33333LL; // 33ms
 
 sp<IMemory> allocVideoFrame(const sp<MetaData>& trackMeta,
@@ -66,6 +67,12 @@
     if (trackMeta->findInt32(kKeySARWidth, &sarWidth)
             && trackMeta->findInt32(kKeySARHeight, &sarHeight)
             && sarHeight != 0) {
+        int32_t multVal;
+        if (width < 0 || sarWidth < 0 ||
+            __builtin_mul_overflow(width, sarWidth, &multVal)) {
+            ALOGE("displayWidth overflow %dx%d", width, sarWidth);
+            return NULL;
+        }
         displayWidth = (width * sarWidth) / sarHeight;
         displayHeight = height;
     } else if (trackMeta->findInt32(kKeyDisplayWidth, &displayWidth)
@@ -86,6 +93,16 @@
         rotationAngle = 0;
     }
 
+    if (!metaOnly) {
+        int32_t multVal;
+        if (width < 0 || height < 0 || dstBpp < 0 ||
+            __builtin_mul_overflow(dstBpp, width, &multVal) ||
+            __builtin_mul_overflow(multVal, height, &multVal)) {
+            ALOGE("Frame size overflow %dx%d bpp %d", width, height, dstBpp);
+            return NULL;
+        }
+    }
+
     VideoFrame frame(width, height, displayWidth, displayHeight,
             tileWidth, tileHeight, rotationAngle, dstBpp, !metaOnly, iccSize);
 
@@ -96,7 +113,7 @@
         return NULL;
     }
     sp<IMemory> frameMem = new MemoryBase(heap, 0, size);
-    if (frameMem == NULL) {
+    if (frameMem == NULL || frameMem->unsecurePointer() == NULL) {
         ALOGE("not enough memory for VideoFrame size=%zu", size);
         return NULL;
     }
@@ -120,15 +137,23 @@
             false /*allocRotated*/, true /*metaOnly*/);
 }
 
+bool isAvif(const sp<MetaData> &trackMeta) {
+    const char *mime;
+    return trackMeta->findCString(kKeyMIMEType, &mime)
+        && (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)
+            || !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF));
+}
+
 bool findThumbnailInfo(
         const sp<MetaData> &trackMeta, int32_t *width, int32_t *height,
         uint32_t *type = NULL, const void **data = NULL, size_t *size = NULL) {
     uint32_t dummyType;
     const void *dummyData;
     size_t dummySize;
+    int codecConfigKey = isAvif(trackMeta) ? kKeyThumbnailAV1C : kKeyThumbnailHVCC;
     return trackMeta->findInt32(kKeyThumbnailWidth, width)
         && trackMeta->findInt32(kKeyThumbnailHeight, height)
-        && trackMeta->findData(kKeyThumbnailHVCC,
+        && trackMeta->findData(codecConfigKey,
                 type ?: &dummyType, data ?: &dummyData, size ?: &dummySize);
 }
 
@@ -456,7 +481,8 @@
         const sp<IMediaSource> &source)
     : FrameDecoder(componentName, trackMeta, source),
       mFrame(NULL),
-      mIsAvcOrHevc(false),
+      mIsAvc(false),
+      mIsHevc(false),
       mSeekMode(MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC),
       mTargetTimeUs(-1LL),
       mDefaultSampleDurationUs(0) {
@@ -479,8 +505,8 @@
         return NULL;
     }
 
-    mIsAvcOrHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)
-            || !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
+    mIsAvc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
+    mIsHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
 
     if (frameTimeUs < 0) {
         int64_t thumbNailTime = -1ll;
@@ -543,8 +569,10 @@
         ALOGV("Seeking closest: targetTimeUs=%lld", (long long)mTargetTimeUs);
     }
 
-    if (mIsAvcOrHevc && !isSeekingClosest
-            && IsIDR(codecBuffer->data(), codecBuffer->size())) {
+    if (!isSeekingClosest
+            && ((mIsAvc && IsIDR(codecBuffer->data(), codecBuffer->size()))
+            || (mIsHevc && IsIDR(
+            codecBuffer->data(), codecBuffer->size())))) {
         // Only need to decode one IDR frame, unless we're seeking with CLOSEST
         // option, in which case we need to actually decode to targetTimeUs.
         *flags |= MediaCodec::BUFFER_FLAG_EOS;
@@ -616,6 +644,10 @@
                 0,
                 dstBpp(),
                 mCaptureLayer != nullptr /*allocRotated*/);
+        if (frameMem == nullptr) {
+            return NO_MEMORY;
+        }
+
         mFrame = static_cast<VideoFrame*>(frameMem->unsecurePointer());
 
         setFrame(frameMem);
@@ -712,7 +744,7 @@
 
 ////////////////////////////////////////////////////////////////////////
 
-ImageDecoder::ImageDecoder(
+MediaImageDecoder::MediaImageDecoder(
         const AString &componentName,
         const sp<MetaData> &trackMeta,
         const sp<IMediaSource> &source)
@@ -728,7 +760,7 @@
       mTargetTiles(0) {
 }
 
-sp<AMessage> ImageDecoder::onGetFormatAndSeekOptions(
+sp<AMessage> MediaImageDecoder::onGetFormatAndSeekOptions(
         int64_t frameTimeUs, int /*seekMode*/,
         MediaSource::ReadOptions *options, sp<Surface> * /*window*/) {
     sp<MetaData> overrideMeta;
@@ -748,7 +780,10 @@
         overrideMeta->remove(kKeyDisplayHeight);
         overrideMeta->setInt32(kKeyWidth, mWidth);
         overrideMeta->setInt32(kKeyHeight, mHeight);
-        overrideMeta->setData(kKeyHVCC, type, data, size);
+        // The AV1 codec configuration data is passed via CSD0 to the AV1
+        // decoder.
+        const int codecConfigKey = isAvif(trackMeta()) ? kKeyOpaqueCSD0 : kKeyHVCC;
+        overrideMeta->setData(codecConfigKey, type, data, size);
         options->setSeekTo(-1);
     } else {
         CHECK(trackMeta()->findInt32(kKeyWidth, &mWidth));
@@ -801,7 +836,7 @@
     return videoFormat;
 }
 
-status_t ImageDecoder::onExtractRect(FrameRect *rect) {
+status_t MediaImageDecoder::onExtractRect(FrameRect *rect) {
     // TODO:
     // This callback is for verifying whether we can decode the rect,
     // and if so, set up the internal variables for decoding.
@@ -840,7 +875,7 @@
     return OK;
 }
 
-status_t ImageDecoder::onOutputReceived(
+status_t MediaImageDecoder::onOutputReceived(
         const sp<MediaCodecBuffer> &videoFrameBuffer,
         const sp<AMessage> &outputFormat, int64_t /*timeUs*/, bool *done) {
     if (outputFormat == NULL) {
@@ -855,6 +890,11 @@
     if (mFrame == NULL) {
         sp<IMemory> frameMem = allocVideoFrame(
                 trackMeta(), mWidth, mHeight, mTileWidth, mTileHeight, dstBpp());
+
+        if (frameMem == nullptr) {
+            return NO_MEMORY;
+        }
+
         mFrame = static_cast<VideoFrame*>(frameMem->unsecurePointer());
 
         setFrame(frameMem);
diff --git a/media/libstagefright/HevcUtils.cpp b/media/libstagefright/HevcUtils.cpp
index b347453..5f9c20e 100644
--- a/media/libstagefright/HevcUtils.cpp
+++ b/media/libstagefright/HevcUtils.cpp
@@ -30,9 +30,14 @@
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/Utils.h>
 
+#define UNUSED_PARAM __attribute__((unused))
+
 namespace android {
 
-static const uint8_t kHevcNalUnitTypes[5] = {
+static const uint8_t kHevcNalUnitTypes[8] = {
+    kHevcNalUnitTypeCodedSliceIdr,
+    kHevcNalUnitTypeCodedSliceIdrNoLP,
+    kHevcNalUnitTypeCodedSliceCra,
     kHevcNalUnitTypeVps,
     kHevcNalUnitTypeSps,
     kHevcNalUnitTypePps,
@@ -375,8 +380,56 @@
     return reader.overRead() ? ERROR_MALFORMED : OK;
 }
 
+void HevcParameterSets::FindHEVCDimensions(const sp<ABuffer> &SpsBuffer, int32_t *width, int32_t *height)
+{
+    ALOGD("FindHEVCDimensions");
+    // See Rec. ITU-T H.265 v3 (04/2015) Chapter 7.3.2.2 for reference
+    ABitReader reader(SpsBuffer->data() + 1, SpsBuffer->size() - 1);
+    // Skip sps_video_parameter_set_id
+    reader.skipBits(4);
+    uint8_t maxSubLayersMinus1 = reader.getBitsWithFallback(3, 0);
+    // Skip sps_temporal_id_nesting_flag;
+    reader.skipBits(1);
+    // Skip general profile
+    reader.skipBits(96);
+    if (maxSubLayersMinus1 > 0) {
+        bool subLayerProfilePresentFlag[8];
+        bool subLayerLevelPresentFlag[8];
+        for (int i = 0; i < maxSubLayersMinus1; ++i) {
+            subLayerProfilePresentFlag[i] = reader.getBitsWithFallback(1, 0);
+            subLayerLevelPresentFlag[i] = reader.getBitsWithFallback(1, 0);
+        }
+        // Skip reserved
+        reader.skipBits(2 * (8 - maxSubLayersMinus1));
+        for (int i = 0; i < maxSubLayersMinus1; ++i) {
+            if (subLayerProfilePresentFlag[i]) {
+                // Skip profile
+                reader.skipBits(88);
+            }
+            if (subLayerLevelPresentFlag[i]) {
+                // Skip sub_layer_level_idc[i]
+                reader.skipBits(8);
+            }
+        }
+    }
+    // Skip sps_seq_parameter_set_id
+    skipUE(&reader);
+    uint8_t chromaFormatIdc = parseUEWithFallback(&reader, 0);
+    if (chromaFormatIdc == 3) {
+        // Skip separate_colour_plane_flag
+        reader.skipBits(1);
+    }
+    skipUE(&reader);
+    skipUE(&reader);
+
+    // pic_width_in_luma_samples
+    *width = parseUEWithFallback(&reader, 0);
+    // pic_height_in_luma_samples
+    *height = parseUEWithFallback(&reader, 0);
+}
+
 status_t HevcParameterSets::parsePps(
-        const uint8_t* data __unused, size_t size __unused) {
+        const uint8_t* data UNUSED_PARAM, size_t size UNUSED_PARAM) {
     return OK;
 }
 
@@ -489,4 +542,26 @@
     return OK;
 }
 
+bool HevcParameterSets::IsHevcIDR(const uint8_t *data, size_t size) {
+    bool foundIDR = false;
+    const uint8_t *nalStart;
+    size_t nalSize;
+    while (!foundIDR && getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
+        if (nalSize == 0) {
+            ALOGE("Encountered zero-length HEVC NAL");
+            return false;
+        }
+
+        uint8_t nalType = (nalStart[0] & 0x7E) >> 1;
+        switch(nalType) {
+            case kHevcNalUnitTypeCodedSliceIdr:
+            case kHevcNalUnitTypeCodedSliceIdrNoLP:
+            case kHevcNalUnitTypeCodedSliceCra:
+                foundIDR = true;
+                break;
+        }
+    }
+
+    return foundIDR;
+}
 }  // namespace android
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 8f7d4bf..7c7fcac 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -519,12 +519,13 @@
     mSendNotify = false;
     mWriteSeekErr = false;
     mFallocateErr = false;
-
     // Reset following variables for all the sessions and they will be
     // initialized in start(MetaData *param).
     mIsRealTimeRecording = true;
+    mIsBackgroundMode = false;
     mUse4ByteNalLength = true;
     mOffset = 0;
+    mMaxOffsetAppend = 0;
     mPreAllocateFileEndOffset = 0;
     mMdatOffset = 0;
     mMdatEndOffset = 0;
@@ -542,6 +543,7 @@
     mNumGrids = 0;
     mNextItemId = kItemIdBase;
     mHasRefs = false;
+    mResetStatus = OK;
     mPreAllocFirstTime = true;
     mPrevAllTracksTotalMetaDataSizeEstimate = 0;
 
@@ -661,6 +663,14 @@
     sp<MetaData> meta = source->getFormat();
     meta->findCString(kKeyMIMEType, &mime);
 
+
+    // Background mode for media transcoding. If either audio or video track signal this is in
+    // background mode, we will set all the threads to run in background priority.
+    int32_t isBackgroundMode;
+    if (meta && meta->findInt32(kKeyBackgroundMode, &isBackgroundMode)) {
+        mIsBackgroundMode |= isBackgroundMode;
+    }
+
     if (Track::getFourCCForMime(mime) == NULL) {
         ALOGE("Unsupported mime '%s'", mime);
         return ERROR_UNSUPPORTED;
@@ -991,6 +1001,19 @@
         seekOrPostError(mFd, mFreeBoxOffset, SEEK_SET);
         writeInt32(mInMemoryCacheSize);
         write("free", 4);
+        if (mInMemoryCacheSize >= 8) {
+            off64_t bufSize = mInMemoryCacheSize - 8;
+            char* zeroBuffer = new (std::nothrow) char[bufSize];
+            if (zeroBuffer) {
+                std::fill_n(zeroBuffer, bufSize, '0');
+                writeOrPostError(mFd, zeroBuffer, bufSize);
+                delete [] zeroBuffer;
+            } else {
+                ALOGW("freebox in file isn't initialized to 0");
+            }
+        } else {
+            ALOGW("freebox size is less than 8:%" PRId64, mInMemoryCacheSize);
+        }
         mMdatOffset = mFreeBoxOffset + mInMemoryCacheSize;
     } else {
         mMdatOffset = mOffset;
@@ -1027,6 +1050,11 @@
     return OK;
 }
 
+status_t MPEG4Writer::stop() {
+    // If reset was in progress, wait for it to complete.
+    return reset(true, true);
+}
+
 status_t MPEG4Writer::pause() {
     ALOGW("MPEG4Writer: pause is not supported");
     return ERROR_UNSUPPORTED;
@@ -1131,11 +1159,24 @@
     if (!truncatePreAllocation()) {
         if (err == OK) { err = ERROR_IO; }
     }
+
+    // TODO(b/174770856) remove this measurement (and perhaps the fsync)
+    nsecs_t sync_started = systemTime(SYSTEM_TIME_REALTIME);
     if (fsync(mFd) != 0) {
         ALOGW("(ignored)fsync err:%s(%d)", std::strerror(errno), errno);
         // Don't bubble up fsync error, b/157291505.
         // if (err == OK) { err = ERROR_IO; }
     }
+    nsecs_t sync_finished = systemTime(SYSTEM_TIME_REALTIME);
+    nsecs_t sync_elapsed_ns = sync_finished - sync_started;
+    int64_t filesize = -1;
+    struct stat statbuf;
+    if (fstat(mFd, &statbuf) == 0) {
+        filesize = statbuf.st_size;
+    }
+    ALOGD("final fsync() takes %" PRId64 " ms, file size %" PRId64,
+          sync_elapsed_ns / 1000000, (int64_t) filesize);
+
     if (close(mFd) != 0) {
         ALOGE("close err:%s(%d)", std::strerror(errno), errno);
         if (err == OK) { err = ERROR_IO; }
@@ -1159,8 +1200,12 @@
     return err;
 }
 
-void MPEG4Writer::finishCurrentSession() {
-    reset(false /* stopSource */);
+status_t MPEG4Writer::finishCurrentSession() {
+    ALOGV("finishCurrentSession");
+    /* Don't wait if reset is in progress already, that avoids deadlock
+     * as finishCurrentSession() is called from control looper thread.
+     */
+    return reset(false, false);
 }
 
 status_t MPEG4Writer::switchFd() {
@@ -1182,11 +1227,32 @@
     return err;
 }
 
-status_t MPEG4Writer::reset(bool stopSource) {
+status_t MPEG4Writer::reset(bool stopSource, bool waitForAnyPreviousCallToComplete) {
     ALOGD("reset()");
-    std::lock_guard<std::mutex> l(mResetMutex);
+    std::unique_lock<std::mutex> lk(mResetMutex, std::defer_lock);
+    if (waitForAnyPreviousCallToComplete) {
+        /* stop=>reset from client needs the return value of reset call, hence wait here
+         * if a reset was in process already.
+         */
+        lk.lock();
+    } else if (!lk.try_lock()) {
+        /* Internal reset from control looper thread shouldn't wait for any reset in
+         * process already.
+         */
+        return INVALID_OPERATION;
+    }
+
+    if (mResetStatus != OK) {
+        /* Don't have to proceed if reset has finished with an error before.
+         * If there was no error before, proceeding reset would be harmless, as the
+         * the call would return from the mInitCheck condition below.
+         */
+        return mResetStatus;
+    }
+
     if (mInitCheck != OK) {
-        return OK;
+        mResetStatus = OK;
+        return mResetStatus;
     } else {
         if (!mWriterThreadStarted ||
             !mStarted) {
@@ -1198,7 +1264,8 @@
             if (writerErr != OK) {
                 retErr = writerErr;
             }
-            return retErr;
+            mResetStatus = retErr;
+            return mResetStatus;
         }
     }
 
@@ -1245,7 +1312,8 @@
     if (err != OK && err != ERROR_MALFORMED) {
         // Ignoring release() return value as there was an "err" already.
         release();
-        return err;
+        mResetStatus = err;
+        return mResetStatus;
     }
 
     // Fix up the size of the 'mdat' chunk.
@@ -1303,7 +1371,8 @@
     if (err == OK) {
         err = errRelease;
     }
-    return err;
+    mResetStatus = err;
+    return mResetStatus;
 }
 
 /*
@@ -1494,6 +1563,26 @@
         MediaBuffer *buffer, bool usePrefix,
         uint32_t tiffHdrOffset, size_t *bytesWritten) {
     off64_t old_offset = mOffset;
+    int64_t offset;
+    ALOGV("buffer->range_length:%lld", (long long)buffer->range_length());
+    if (buffer->meta_data().findInt64(kKeySampleFileOffset, &offset)) {
+        ALOGV("offset:%lld, old_offset:%lld", (long long)offset, (long long)old_offset);
+        if (old_offset == offset) {
+            mOffset += buffer->range_length();
+        } else {
+            ALOGV("offset and old_offset are not equal! diff:%lld", (long long)offset - old_offset);
+            mOffset = offset + buffer->range_length();
+            // mOffset += buffer->range_length() + offset - old_offset;
+        }
+        *bytesWritten = buffer->range_length();
+        ALOGV("mOffset:%lld, mMaxOffsetAppend:%lld, bytesWritten:%lld", (long long)mOffset,
+                  (long long)mMaxOffsetAppend, (long long)*bytesWritten);
+        mMaxOffsetAppend = std::max(mOffset, mMaxOffsetAppend);
+        seekOrPostError(mFd, mMaxOffsetAppend, SEEK_SET);
+        return offset;
+    }
+
+    ALOGV("mOffset:%lld, mMaxOffsetAppend:%lld", (long long)mOffset, (long long)mMaxOffsetAppend);
 
     if (usePrefix) {
         addMultipleLengthPrefixedSamples_l(buffer);
@@ -1510,6 +1599,10 @@
         mOffset += buffer->range_length();
     }
     *bytesWritten = mOffset - old_offset;
+
+    ALOGV("mOffset:%lld, old_offset:%lld, bytesWritten:%lld", (long long)mOffset,
+          (long long)old_offset, (long long)*bytesWritten);
+
     return old_offset;
 }
 
@@ -1522,6 +1615,7 @@
         (const uint8_t *)buffer->data() + buffer->range_offset();
 
     if (!memcmp(ptr, "\x00\x00\x00\x01", 4)) {
+        ALOGV("stripping start code");
         buffer->set_range(
                 buffer->range_offset() + 4, buffer->range_length() - 4);
     }
@@ -1552,8 +1646,10 @@
 }
 
 void MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) {
+    ALOGV("alp:buffer->range_length:%lld", (long long)buffer->range_length());
     size_t length = buffer->range_length();
     if (mUse4ByteNalLength) {
+        ALOGV("mUse4ByteNalLength");
         uint8_t x[4];
         x[0] = length >> 24;
         x[1] = (length >> 16) & 0xff;
@@ -1563,6 +1659,7 @@
         writeOrPostError(mFd, (const uint8_t*)buffer->data() + buffer->range_offset(), length);
         mOffset += length + 4;
     } else {
+        ALOGV("mUse2ByteNalLength");
         CHECK_LT(length, 65536u);
 
         uint8_t x[2];
@@ -2221,7 +2318,11 @@
     if (mLooper == nullptr) {
         mLooper = new ALooper;
         mLooper->setName("MP4WtrCtrlHlpLooper");
-        err = mLooper->start();
+        if (mIsBackgroundMode) {
+            err = mLooper->start(false, false, ANDROID_PRIORITY_BACKGROUND);
+        } else {
+            err = mLooper->start();
+        }
         mReflector = new AHandlerReflector<MPEG4Writer>(this);
         mLooper->registerHandler(mReflector);
     }
@@ -2454,31 +2555,27 @@
             int fd = mNextFd;
             mNextFd = -1;
             mLock.unlock();
-            finishCurrentSession();
-            initInternal(fd, false /*isFirstSession*/);
-            start(mStartMeta.get());
-            mSwitchPending = false;
-            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED, 0);
+            if (finishCurrentSession() == OK) {
+                initInternal(fd, false /*isFirstSession*/);
+                status_t status = start(mStartMeta.get());
+                mSwitchPending = false;
+                if (status == OK)  {
+                    notify(MEDIA_RECORDER_EVENT_INFO,
+                           MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED, 0);
+                }
+            }
             break;
         }
-        // ::write() or lseek64() wasn't a success, file could be malformed
+        /* ::write() or lseek64() wasn't a success, file could be malformed.
+         * Or fallocate() failed. reset() and notify client on both the cases.
+         */
+        case kWhatFallocateError: // fallthrough
         case kWhatIOError: {
-            ALOGE("kWhatIOError");
             int32_t err;
             CHECK(msg->findInt32("err", &err));
-            // Stop tracks' threads and main writer thread.
-            stop();
-            notify(MEDIA_RECORDER_EVENT_ERROR, MEDIA_RECORDER_ERROR_UNKNOWN, err);
-            break;
-        }
-        // fallocate() failed, hence stop() and notify app.
-        case kWhatFallocateError: {
-            ALOGE("kWhatFallocateError");
-            int32_t err;
-            CHECK(msg->findInt32("err", &err));
-            // Stop tracks' threads and main writer thread.
-            stop();
-            //TODO: introduce a suitable MEDIA_RECORDER_ERROR_* instead MEDIA_RECORDER_ERROR_UNKNOWN?
+            // If reset already in process, don't wait for it complete to avoid deadlock.
+            reset(true, false);
+            //TODO: new MEDIA_RECORDER_ERROR_**** instead MEDIA_RECORDER_ERROR_UNKNOWN ?
             notify(MEDIA_RECORDER_EVENT_ERROR, MEDIA_RECORDER_ERROR_UNKNOWN, err);
             break;
         }
@@ -2486,7 +2583,7 @@
          * Responding with other options could be added later if required.
          */
         case kWhatNoIOErrorSoFar: {
-            ALOGD("kWhatNoIOErrorSoFar");
+            ALOGV("kWhatNoIOErrorSoFar");
             sp<AMessage> response = new AMessage;
             response->setInt32("err", OK);
             sp<AReplyToken> replyID;
@@ -2694,6 +2791,11 @@
 
     prctl(PR_SET_NAME, (unsigned long)"MPEG4Writer", 0, 0, 0);
 
+    if (mIsBackgroundMode) {
+        // Background priority for media transcoding.
+        androidSetThreadPriority(0 /* tid (0 = current) */, ANDROID_PRIORITY_BACKGROUND);
+    }
+
     Mutex::Autolock autoLock(mLock);
     while (!mDone) {
         Chunk chunk;
@@ -2719,6 +2821,9 @@
     }
 
     writeAllChunks();
+    ALOGV("threadFunc mOffset:%lld, mMaxOffsetAppend:%lld", (long long)mOffset,
+          (long long)mMaxOffsetAppend);
+    mOffset = std::max(mOffset, mMaxOffsetAppend);
 }
 
 status_t MPEG4Writer::startWriterThread() {
@@ -2774,7 +2879,7 @@
         // even if the file is well-formed and the primary picture is correct.
 
         // Reserve item ids for samples + grid
-        size_t numItemsToReserve = mNumTiles + (mNumTiles > 1);
+        size_t numItemsToReserve = mNumTiles + (mNumTiles > 0);
         status_t err = mOwner->reserveItemId_l(numItemsToReserve, &mItemIdBase);
         if (err != OK) {
             return err;
@@ -3280,6 +3385,7 @@
     uint32_t lastSamplesPerChunk = 0;
     int64_t lastSampleDurationUs = -1;      // Duration calculated from EOS buffer and its timestamp
     int64_t lastSampleDurationTicks = -1;   // Timescale based ticks
+    int64_t sampleFileOffset = -1;
 
     if (mIsAudio) {
         prctl(PR_SET_NAME, (unsigned long)"MP4WtrAudTrkThread", 0, 0, 0);
@@ -3291,6 +3397,9 @@
 
     if (mOwner->isRealTimeRecording()) {
         androidSetThreadPriority(0, ANDROID_PRIORITY_AUDIO);
+    } else if (mOwner->isBackgroundMode()) {
+        // Background priority for media transcoding.
+        androidSetThreadPriority(0 /* tid (0 = current) */, ANDROID_PRIORITY_BACKGROUND);
     }
 
     sp<MetaData> meta_data;
@@ -3299,6 +3408,7 @@
     MediaBufferBase *buffer;
     const char *trackName = getTrackType();
     while (!mDone && (err = mSource->read(&buffer)) == OK) {
+        ALOGV("read:buffer->range_length:%lld", (long long)buffer->range_length());
         int32_t isEOS = false;
         if (buffer->range_length() == 0) {
             if (buffer->meta_data().findInt32(kKeyIsEndOfStream, &isEOS) && isEOS) {
@@ -3405,6 +3515,14 @@
                 continue;
             }
         }
+        if (!buffer->meta_data().findInt64(kKeySampleFileOffset, &sampleFileOffset)) {
+            sampleFileOffset = -1;
+        }
+        int64_t lastSample = -1;
+        if (!buffer->meta_data().findInt64(kKeyLastSampleIndexInChunk, &lastSample)) {
+            lastSample = -1;
+        }
+        ALOGV("sampleFileOffset:%lld", (long long)sampleFileOffset);
 
         /*
          * Reserve space in the file for the current sample + to be written MOOV box. If reservation
@@ -3412,7 +3530,7 @@
          * write MOOV box successfully as space for the same was reserved in the prior call.
          * Release the current buffer/sample here.
          */
-        if (!mOwner->preAllocate(buffer->range_length())) {
+        if (sampleFileOffset == -1 && !mOwner->preAllocate(buffer->range_length())) {
             buffer->release();
             buffer = nullptr;
             break;
@@ -3423,9 +3541,14 @@
         // Make a deep copy of the MediaBuffer and Metadata and release
         // the original as soon as we can
         MediaBuffer *copy = new MediaBuffer(buffer->range_length());
-        memcpy(copy->data(), (uint8_t *)buffer->data() + buffer->range_offset(),
-                buffer->range_length());
+        if (sampleFileOffset != -1) {
+            copy->meta_data().setInt64(kKeySampleFileOffset, sampleFileOffset);
+        } else {
+            memcpy(copy->data(), (uint8_t*)buffer->data() + buffer->range_offset(),
+                   buffer->range_length());
+        }
         copy->set_range(0, buffer->range_length());
+
         meta_data = new MetaData(buffer->meta_data());
         buffer->release();
         buffer = NULL;
@@ -3433,14 +3556,16 @@
             copy->meta_data().setInt32(kKeyExifTiffOffset, tiffHdrOffset);
         }
         bool usePrefix = this->usePrefix() && !isExif;
-
-        if (usePrefix) StripStartcode(copy);
-
+        if (sampleFileOffset == -1 && usePrefix) {
+            StripStartcode(copy);
+        }
         size_t sampleSize = copy->range_length();
-        if (usePrefix) {
+        if (sampleFileOffset == -1 && usePrefix) {
             if (mOwner->useNalLengthFour()) {
+                ALOGV("nallength4");
                 sampleSize += 4;
             } else {
+                ALOGV("nallength2");
                 sampleSize += 2;
             }
         }
@@ -3735,7 +3860,8 @@
                 chunkTimestampUs = timestampUs;
             } else {
                 int64_t chunkDurationUs = timestampUs - chunkTimestampUs;
-                if (chunkDurationUs > interleaveDurationUs) {
+                if (chunkDurationUs > interleaveDurationUs || lastSample > 1) {
+                    ALOGV("lastSample:%lld", (long long)lastSample);
                     if (chunkDurationUs > mMaxChunkDurationUs) {
                         mMaxChunkDurationUs = chunkDurationUs;
                     }
@@ -3971,6 +4097,10 @@
     return mIsRealTimeRecording;
 }
 
+bool MPEG4Writer::isBackgroundMode() const {
+    return mIsBackgroundMode;
+}
+
 bool MPEG4Writer::useNalLengthFour() {
     return mUse4ByteNalLength;
 }
@@ -4217,13 +4347,20 @@
 void MPEG4Writer::Track::writeColrBox() {
     ColorAspects aspects;
     memset(&aspects, 0, sizeof(aspects));
+    // Color metadata may have changed.
+    sp<MetaData> meta = mSource->getFormat();
     // TRICKY: using | instead of || because we want to execute all findInt32-s
-    if (mMeta->findInt32(kKeyColorPrimaries, (int32_t*)&aspects.mPrimaries)
-            | mMeta->findInt32(kKeyTransferFunction, (int32_t*)&aspects.mTransfer)
-            | mMeta->findInt32(kKeyColorMatrix, (int32_t*)&aspects.mMatrixCoeffs)
-            | mMeta->findInt32(kKeyColorRange, (int32_t*)&aspects.mRange)) {
+    if (meta->findInt32(kKeyColorPrimaries, (int32_t*)&aspects.mPrimaries)
+            | meta->findInt32(kKeyTransferFunction, (int32_t*)&aspects.mTransfer)
+            | meta->findInt32(kKeyColorMatrix, (int32_t*)&aspects.mMatrixCoeffs)
+            | meta->findInt32(kKeyColorRange, (int32_t*)&aspects.mRange)) {
         int32_t primaries, transfer, coeffs;
         bool fullRange;
+        ALOGV("primaries=%s transfer=%s matrix=%s range=%s",
+                asString(aspects.mPrimaries),
+                asString(aspects.mTransfer),
+                asString(aspects.mMatrixCoeffs),
+                asString(aspects.mRange));
         ColorUtils::convertCodecColorAspectsToIsoAspects(
                 aspects, &primaries, &transfer, &coeffs, &fullRange);
         mOwner->beginBox("colr");
@@ -4233,6 +4370,8 @@
         mOwner->writeInt16(coeffs);
         mOwner->writeInt8(int8_t(fullRange ? 0x80 : 0x0));
         mOwner->endBox(); // colr
+    } else {
+        ALOGV("no color information");
     }
 }
 
@@ -4715,10 +4854,18 @@
 
 // This is useful if the pixel is not square
 void MPEG4Writer::Track::writePaspBox() {
-    mOwner->beginBox("pasp");
-    mOwner->writeInt32(1 << 16);  // hspacing
-    mOwner->writeInt32(1 << 16);  // vspacing
-    mOwner->endBox();  // pasp
+    // Do not write 'pasp' box unless the track format specifies it.
+    // According to ISO/IEC 14496-12 (ISO base media file format), 'pasp' box
+    // is optional. If present, it overrides the SAR from the video CSD. Only
+    // set it if the track format specifically requests that.
+    int32_t hSpacing, vSpacing;
+    if (mMeta->findInt32(kKeySARWidth, &hSpacing) && (hSpacing > 0)
+            && mMeta->findInt32(kKeySARHeight, &vSpacing) && (vSpacing > 0)) {
+        mOwner->beginBox("pasp");
+        mOwner->writeInt32(hSpacing);  // hspacing
+        mOwner->writeInt32(vSpacing);  // vspacing
+        mOwner->endBox();  // pasp
+    }
 }
 
 int64_t MPEG4Writer::Track::getStartTimeOffsetTimeUs() const {
diff --git a/media/libstagefright/MediaAdapter.cpp b/media/libstagefright/MediaAdapter.cpp
index f1b6e8c..5a2a910 100644
--- a/media/libstagefright/MediaAdapter.cpp
+++ b/media/libstagefright/MediaAdapter.cpp
@@ -114,6 +114,13 @@
         return -EINVAL;
     }
 
+    /* As mAdapterLock is unlocked while waiting for signalBufferReturned,
+     * a new buffer for the same track could be pushed from another thread
+     * in the client process, mBufferGatingMutex will help to hold that
+     * until the previous buffer is processed.
+     */
+    std::unique_lock<std::mutex> lk(mBufferGatingMutex);
+
     Mutex::Autolock autoLock(mAdapterLock);
     if (!mStarted) {
         ALOGE("pushBuffer called before start");
diff --git a/media/libstagefright/MediaAppender.cpp b/media/libstagefright/MediaAppender.cpp
new file mode 100644
index 0000000..5d80b30
--- /dev/null
+++ b/media/libstagefright/MediaAppender.cpp
@@ -0,0 +1,425 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaAppender"
+
+#include <media/stagefright/MediaAppender.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <utils/Log.h>
+// TODO : check if this works for NDK apps without JVM
+// #include <media/ndk/NdkJavaVMHelperPriv.h>
+
+namespace android {
+
+struct MediaAppender::sampleDataInfo {
+    size_t size;
+    int64_t time;
+    size_t exTrackIndex;
+    sp<MetaData> meta;
+};
+
+sp<MediaAppender> MediaAppender::create(int fd, AppendMode mode) {
+    if (fd < 0) {
+        ALOGE("invalid file descriptor");
+        return nullptr;
+    }
+    if (!(mode >= APPEND_MODE_FIRST && mode <= APPEND_MODE_LAST)) {
+        ALOGE("invalid mode %d", mode);
+        return nullptr;
+    }
+    sp<MediaAppender> ma = new (std::nothrow) MediaAppender(fd, mode);
+    if (ma->init() != OK) {
+        return nullptr;
+    }
+    return ma;
+}
+
+// TODO: inject mediamuxer and mediaextractor objects.
+// TODO: @format is not required as an input if we can sniff the file and find the format of
+//       the existing content.
+// TODO: Code it to the interface(MediaAppender), and have a separate MediaAppender NDK
+MediaAppender::MediaAppender(int fd, AppendMode mode)
+    : mFd(fd),
+      mMode(mode),
+      // TODO : check if this works for NDK apps without JVM
+      // mExtractor(new NuMediaExtractor(NdkJavaVMHelper::getJNIEnv() != nullptr
+      //           ? NuMediaExtractor::EntryPoint::NDK_WITH_JVM
+      //           : NuMediaExtractor::EntryPoint::NDK_NO_JVM)),
+      mExtractor(new (std::nothrow) NuMediaExtractor(NuMediaExtractor::EntryPoint::NDK_WITH_JVM)),
+      mTrackCount(0),
+      mState(UNINITIALIZED) {
+          ALOGV("MediaAppender::MediaAppender mode:%d", mode);
+      }
+
+status_t MediaAppender::init() {
+    std::scoped_lock lock(mMutex);
+    ALOGV("MediaAppender::init");
+    status_t status = mExtractor->setDataSource(mFd, 0, lseek(mFd, 0, SEEK_END));
+    if (status != OK) {
+        ALOGE("extractor_setDataSource failed, status :%d", status);
+        return status;
+    }
+
+    if (strcmp("MPEG4Extractor", mExtractor->getName()) == 0) {
+        mFormat = MediaMuxer::OUTPUT_FORMAT_MPEG_4;
+    } else {
+        ALOGE("Unsupported format, extractor name:%s", mExtractor->getName());
+        return ERROR_UNSUPPORTED;
+    }
+
+    mTrackCount = mExtractor->countTracks();
+    ALOGV("mTrackCount:%zu", mTrackCount);
+    if (mTrackCount == 0) {
+        ALOGE("no tracks are present");
+        return ERROR_MALFORMED;
+    }
+    size_t exTrackIndex = 0;
+    ssize_t audioTrackIndex = -1, videoTrackIndex = -1;
+    bool audioSyncSampleTimeSet = false;
+
+    while (exTrackIndex < mTrackCount) {
+        sp<AMessage> fmt;
+        status = mExtractor->getTrackFormat(exTrackIndex, &fmt, 0);
+        if (status != OK) {
+            ALOGE("getTrackFormat failed for trackIndex:%zu, status:%d", exTrackIndex, status);
+            return status;
+        }
+        AString mime;
+        if (fmt->findString("mime", &mime)) {
+            if (!strncasecmp(mime.c_str(), "video/", 6)) {
+                ALOGV("VideoTrack");
+                if (videoTrackIndex != -1) {
+                    ALOGE("Not more than one video track is supported");
+                    return ERROR_UNSUPPORTED;
+                }
+                videoTrackIndex = exTrackIndex;
+            } else if (!strncasecmp(mime.c_str(), "audio/", 6)) {
+                ALOGV("AudioTrack");
+                if (audioTrackIndex != -1) {
+                    ALOGE("Not more than one audio track is supported");
+                }
+                audioTrackIndex = exTrackIndex;
+            } else {
+                ALOGV("Neither Video nor Audio track");
+            }
+        }
+        mFmtIndexMap.emplace(exTrackIndex, fmt);
+        mSampleCountVect.emplace_back(0);
+        mMaxTimestampVect.emplace_back(0);
+        mLastSyncSampleTimeVect.emplace_back(0);
+        status = mExtractor->selectTrack(exTrackIndex);
+        if (status != OK) {
+            ALOGE("selectTrack failed for trackIndex:%zu, status:%d", exTrackIndex, status);
+            return status;
+        }
+        ++exTrackIndex;
+    }
+
+    ALOGV("AudioTrackIndex:%zu, VideoTrackIndex:%zu", audioTrackIndex, videoTrackIndex);
+
+    do {
+        sampleDataInfo tmpSDI;
+        // TODO: read info into members of the struct sampleDataInfo directly
+        size_t sampleSize;
+        status = mExtractor->getSampleSize(&sampleSize);
+        if (status != OK) {
+            ALOGE("getSampleSize failed, status:%d", status);
+            return status;
+        }
+        mSampleSizeVect.emplace_back(sampleSize);
+        tmpSDI.size = sampleSize;
+        int64_t sampleTime = 0;
+        status = mExtractor->getSampleTime(&sampleTime);
+        if (status != OK) {
+            ALOGE("getSampleTime failed, status:%d", status);
+            return status;
+        }
+        mSampleTimeVect.emplace_back(sampleTime);
+        tmpSDI.time = sampleTime;
+        status = mExtractor->getSampleTrackIndex(&exTrackIndex);
+        if (status != OK) {
+            ALOGE("getSampleTrackIndex failed, status:%d", status);
+            return status;
+        }
+        mSampleIndexVect.emplace_back(exTrackIndex);
+        tmpSDI.exTrackIndex = exTrackIndex;
+        ++mSampleCountVect[exTrackIndex];
+        mMaxTimestampVect[exTrackIndex] = std::max(mMaxTimestampVect[exTrackIndex], sampleTime);
+        sp<MetaData> sampleMeta;
+        status = mExtractor->getSampleMeta(&sampleMeta);
+        if (status != OK) {
+            ALOGE("getSampleMeta failed, status:%d", status);
+            return status;
+        }
+        mSampleMetaVect.emplace_back(sampleMeta);
+        int32_t val = 0;
+        if (sampleMeta->findInt32(kKeyIsSyncFrame, &val) && val != 0) {
+            mLastSyncSampleTimeVect[exTrackIndex] = sampleTime;
+        }
+        tmpSDI.meta = sampleMeta;
+        mSDI.emplace_back(tmpSDI);
+    } while (mExtractor->advance() == OK);
+
+    mExtractor.clear();
+
+    std::sort(mSDI.begin(), mSDI.end(), [](sampleDataInfo& a, sampleDataInfo& b) {
+        int64_t aOffset, bOffset;
+        a.meta->findInt64(kKeySampleFileOffset, &aOffset);
+        b.meta->findInt64(kKeySampleFileOffset, &bOffset);
+        return aOffset < bOffset;
+    });
+    for (int64_t syncSampleTime : mLastSyncSampleTimeVect) {
+        ALOGV("before ignoring frames, mLastSyncSampleTimeVect:%lld", (long long)syncSampleTime);
+    }
+    ALOGV("mMode:%u", mMode);
+    if (mMode == APPEND_MODE_IGNORE_LAST_VIDEO_GOP && videoTrackIndex != -1 ) {
+        ALOGV("Video track is present");
+        bool lastVideoIframe = false;
+        size_t lastVideoIframeOffset = 0;
+        int64_t lastVideoSampleTime = -1;
+        for (auto rItr = mSDI.rbegin(); rItr != mSDI.rend(); ++rItr) {
+            if (rItr->exTrackIndex != videoTrackIndex) {
+                continue;
+            }
+            if (lastVideoSampleTime == -1) {
+                lastVideoSampleTime = rItr->time;
+            }
+            int64_t offset = 0;
+            if (!rItr->meta->findInt64(kKeySampleFileOffset, &offset) || offset == 0) {
+                ALOGE("Missing offset");
+                return ERROR_MALFORMED;
+            }
+            ALOGV("offset:%lld", (long long)offset);
+            int32_t val = 0;
+            if (rItr->meta->findInt32(kKeyIsSyncFrame, &val) && val != 0) {
+                ALOGV("sampleTime:%lld", (long long)rItr->time);
+                ALOGV("lastVideoSampleTime:%lld", (long long)lastVideoSampleTime);
+                if (lastVideoIframe == false && (lastVideoSampleTime - rItr->time) >
+                                1000000/* Track interleaving duration in MPEG4Writer*/) {
+                    ALOGV("lastVideoIframe got chosen");
+                    lastVideoIframe = true;
+                    mLastSyncSampleTimeVect[videoTrackIndex] = rItr->time;
+                    lastVideoIframeOffset = offset;
+                    ALOGV("lastVideoIframeOffset:%lld", (long long)offset);
+                    break;
+                }
+            }
+        }
+        if (lastVideoIframe == false) {
+            ALOGV("Need to rewrite all samples");
+            mLastSyncSampleTimeVect[videoTrackIndex] = 0;
+            lastVideoIframeOffset = 0;
+        }
+        unsigned int framesIgnoredCount = 0;
+        for (auto itr = mSDI.begin(); itr != mSDI.end();) {
+            int64_t offset = 0;
+            ALOGV("trackIndex:%zu, %" PRId64 "", itr->exTrackIndex, itr->time);
+            if (itr->meta->findInt64(kKeySampleFileOffset, &offset) &&
+                                        offset >= lastVideoIframeOffset) {
+                ALOGV("offset:%lld", (long long)offset);
+                if (!audioSyncSampleTimeSet && audioTrackIndex != -1 &&
+                                            audioTrackIndex == itr->exTrackIndex) {
+                    mLastSyncSampleTimeVect[audioTrackIndex] = itr->time;
+                    audioSyncSampleTimeSet = true;
+                }
+                itr = mSDI.erase(itr);
+                ++framesIgnoredCount;
+            } else {
+                ++itr;
+            }
+        }
+        ALOGV("framesIgnoredCount:%u", framesIgnoredCount);
+    }
+
+    if (mMode == APPEND_MODE_IGNORE_LAST_VIDEO_GOP && videoTrackIndex == -1 &&
+                            audioTrackIndex != -1) {
+        ALOGV("Only AudioTrack is present");
+        for (auto rItr = mSDI.rbegin(); rItr != mSDI.rend();  ++rItr) {
+            int32_t val = 0;
+            if (rItr->meta->findInt32(kKeyIsSyncFrame, &val) && val != 0) {
+                    mLastSyncSampleTimeVect[audioTrackIndex] = rItr->time;
+                    break;
+            }
+        }
+        unsigned int framesIgnoredCount = 0;
+        for (auto itr = mSDI.begin(); itr != mSDI.end();) {
+            if (itr->time >= mLastSyncSampleTimeVect[audioTrackIndex]) {
+                itr = mSDI.erase(itr);
+                ++framesIgnoredCount;
+            } else {
+                ++itr;
+            }
+        }
+        ALOGV("framesIgnoredCount :%u", framesIgnoredCount);
+    }
+
+    for (size_t i = 0; i < mLastSyncSampleTimeVect.size(); ++i) {
+        ALOGV("mLastSyncSampleTimeVect[%zu]:%lld", i, (long long)mLastSyncSampleTimeVect[i]);
+        mFmtIndexMap[i]->setInt64(
+                "sample-time-before-append" /*AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND*/,
+                mLastSyncSampleTimeVect[i]);
+    }
+    for (size_t i = 0; i < mMaxTimestampVect.size(); ++i) {
+        ALOGV("mMaxTimestamp[%zu]:%lld", i, (long long)mMaxTimestampVect[i]);
+    }
+    for (size_t i = 0; i < mSampleCountVect.size(); ++i) {
+        ALOGV("SampleCountVect[%zu]:%zu", i, mSampleCountVect[i]);
+    }
+    mState = INITIALIZED;
+    return OK;
+}
+
+MediaAppender::~MediaAppender() {
+    ALOGV("MediaAppender::~MediaAppender");
+    mMuxer.clear();
+    mExtractor.clear();
+}
+
+status_t MediaAppender::start() {
+    std::scoped_lock lock(mMutex);
+    ALOGV("MediaAppender::start");
+    if (mState != INITIALIZED) {
+        ALOGE("MediaAppender::start() is called in invalid state %d", mState);
+        return INVALID_OPERATION;
+    }
+    mMuxer = new (std::nothrow) MediaMuxer(mFd, mFormat);
+    for (const auto& n : mFmtIndexMap) {
+        ssize_t muxIndex = mMuxer->addTrack(n.second);
+        if (muxIndex < 0) {
+            ALOGE("addTrack failed");
+            return UNKNOWN_ERROR;
+        }
+        mTrackIndexMap.emplace(n.first, muxIndex);
+    }
+    ALOGV("trackIndexmap size:%zu", mTrackIndexMap.size());
+
+    status_t status = mMuxer->start();
+    if (status != OK) {
+        ALOGE("muxer start failed:%d", status);
+        return status;
+    }
+
+    ALOGV("Sorting samples based on their offsets");
+    for (int i = 0; i < mSDI.size(); ++i) {
+        ALOGV("i:%d", i + 1);
+        /* TODO : Allocate a single allocation of the max size, and reuse it across ABuffers if
+         * using new ABuffer(void *, size_t).
+         */
+        sp<ABuffer> data = new (std::nothrow) ABuffer(mSDI[i].size);
+        if (data == nullptr) {
+            ALOGE("memory allocation failed");
+            return NO_MEMORY;
+        }
+        data->setRange(0, mSDI[i].size);
+        int32_t val = 0;
+        int sampleFlags = 0;
+        if (mSDI[i].meta->findInt32(kKeyIsSyncFrame, &val) && val != 0) {
+            sampleFlags |= MediaCodec::BUFFER_FLAG_SYNCFRAME;
+        }
+
+        int64_t val64;
+        if (mSDI[i].meta->findInt64(kKeySampleFileOffset, &val64)) {
+            ALOGV("SampleFileOffset Found :%zu:%lld:%lld", mSDI[i].exTrackIndex,
+                  (long long)mSampleCountVect[mSDI[i].exTrackIndex], (long long)val64);
+            sp<AMessage> bufMeta = data->meta();
+            bufMeta->setInt64("sample-file-offset" /*AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND*/,
+                              val64);
+        }
+        if (mSDI[i].meta->findInt64(kKeyLastSampleIndexInChunk, &val64)) {
+            ALOGV("kKeyLastSampleIndexInChunk Found %lld:%lld",
+                  (long long)mSampleCountVect[mSDI[i].exTrackIndex], (long long)val64);
+            sp<AMessage> bufMeta = data->meta();
+            bufMeta->setInt64(
+                    "last-sample-index-in-chunk" /*AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK*/,
+                    val64);
+        }
+        status = mMuxer->writeSampleData(data, mTrackIndexMap[mSDI[i].exTrackIndex], mSDI[i].time,
+                                         sampleFlags);
+        if (status != OK) {
+            ALOGE("muxer writeSampleData failed:%d", status);
+            return status;
+        }
+    }
+    mState = STARTED;
+    return OK;
+}
+
+status_t MediaAppender::stop() {
+    std::scoped_lock lock(mMutex);
+    ALOGV("MediaAppender::stop");
+    if (mState == STARTED) {
+        status_t status = mMuxer->stop();
+        if (status != OK) {
+            mState = ERROR;
+        } else {
+            mState = STOPPED;
+        }
+        return status;
+    } else {
+        ALOGE("stop() is called in invalid state %d", mState);
+        return INVALID_OPERATION;
+    }
+}
+
+ssize_t MediaAppender::getTrackCount() {
+    std::scoped_lock lock(mMutex);
+    ALOGV("MediaAppender::getTrackCount");
+    if (mState != INITIALIZED && mState != STARTED) {
+        ALOGE("getTrackCount() is called in invalid state %d", mState);
+        return -1;
+    }
+    return mTrackCount;
+}
+
+sp<AMessage> MediaAppender::getTrackFormat(size_t idx) {
+    std::scoped_lock lock(mMutex);
+    ALOGV("MediaAppender::getTrackFormat");
+    if (mState != INITIALIZED && mState != STARTED) {
+        ALOGE("getTrackFormat() is called in invalid state %d", mState);
+        return nullptr;
+    }
+    if (idx < 0 || idx >= mTrackCount) {
+        ALOGE("getTrackFormat() idx is out of range");
+        return nullptr;
+    }
+    return mFmtIndexMap[idx];
+}
+
+status_t MediaAppender::writeSampleData(const sp<ABuffer>& buffer, size_t trackIndex,
+                                        int64_t timeUs, uint32_t flags) {
+    std::scoped_lock lock(mMutex);
+    ALOGV("writeSampleData:trackIndex:%zu, time:%" PRId64 "", trackIndex, timeUs);
+    return mMuxer->writeSampleData(buffer, trackIndex, timeUs, flags);
+}
+
+status_t MediaAppender::setOrientationHint([[maybe_unused]] int degrees) {
+    ALOGE("setOrientationHint not supported. Has to be called prior to start on initial muxer");
+    return ERROR_UNSUPPORTED;
+};
+
+status_t MediaAppender::setLocation([[maybe_unused]] int latit, [[maybe_unused]] int longit) {
+    ALOGE("setLocation not supported. Has to be called prior to start on initial muxer");
+    return ERROR_UNSUPPORTED;
+}
+
+ssize_t MediaAppender::addTrack([[maybe_unused]] const sp<AMessage> &format) {
+    ALOGE("addTrack not supported");
+    return ERROR_UNSUPPORTED;
+}
+
+}  // namespace android
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 94c0b84..c03236a 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -19,13 +19,20 @@
 #define LOG_TAG "MediaCodec"
 #include <utils/Log.h>
 
+#include <set>
+#include <stdlib.h>
+
 #include <inttypes.h>
 #include <stdlib.h>
+#include <dlfcn.h>
 
 #include <C2Buffer.h>
 
 #include "include/SoftwareRenderer.h"
+#include "PlaybackDurationAccumulator.h"
 
+#include <android/binder_manager.h>
+#include <android/content/pm/IPackageManagerNative.h>
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
 #include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
 
@@ -33,7 +40,9 @@
 #include <aidl/android/media/IResourceManagerService.h>
 #include <android/binder_ibinder.h>
 #include <android/binder_manager.h>
+#include <android/dlext.h>
 #include <binder/IMemory.h>
+#include <binder/IServiceManager.h>
 #include <binder/MemoryDealer.h>
 #include <cutils/properties.h>
 #include <gui/BufferQueue.h>
@@ -45,6 +54,10 @@
 #include <media/MediaCodecInfo.h>
 #include <media/MediaMetricsItem.h>
 #include <media/MediaResource.h>
+#include <media/NdkMediaErrorPriv.h>
+#include <media/NdkMediaFormat.h>
+#include <media/NdkMediaFormatPriv.h>
+#include <media/formatshaper/FormatShaper.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
@@ -59,12 +72,14 @@
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MediaFilter.h>
 #include <media/stagefright/OMXClient.h>
 #include <media/stagefright/PersistentSurface.h>
 #include <media/stagefright/SurfaceUtils.h>
+#include <nativeloader/dlext_namespaces.h>
 #include <private/android_filesystem_config.h>
 #include <utils/Singleton.h>
 
@@ -81,6 +96,7 @@
 // NB: these are matched with public Java API constants defined
 // in frameworks/base/media/java/android/media/MediaCodec.java
 // These must be kept synchronized with the constants there.
+static const char *kCodecLogSessionId = "android.media.mediacodec.log-session-id";
 static const char *kCodecCodec = "android.media.mediacodec.codec";  /* e.g. OMX.google.aac.decoder */
 static const char *kCodecMime = "android.media.mediacodec.mime";    /* e.g. audio/mime */
 static const char *kCodecMode = "android.media.mediacodec.mode";    /* audio, video */
@@ -91,6 +107,27 @@
 static const char *kCodecWidth = "android.media.mediacodec.width";     /* 0..n */
 static const char *kCodecHeight = "android.media.mediacodec.height";   /* 0..n */
 static const char *kCodecRotation = "android.media.mediacodec.rotation-degrees";  /* 0/90/180/270 */
+static const char *kCodecColorFormat = "android.media.mediacodec.color-format";
+static const char *kCodecFrameRate = "android.media.mediacodec.frame-rate";
+static const char *kCodecCaptureRate = "android.media.mediacodec.capture-rate";
+static const char *kCodecOperatingRate = "android.media.mediacodec.operating-rate";
+static const char *kCodecPriority = "android.media.mediacodec.priority";
+
+// Min/Max QP before shaping
+static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
+static const char *kCodecOriginalVideoQPIMax = "android.media.mediacodec.original-video-qp-i-max";
+static const char *kCodecOriginalVideoQPPMin = "android.media.mediacodec.original-video-qp-p-min";
+static const char *kCodecOriginalVideoQPPMax = "android.media.mediacodec.original-video-qp-p-max";
+static const char *kCodecOriginalVideoQPBMin = "android.media.mediacodec.original-video-qp-b-min";
+static const char *kCodecOriginalVideoQPBMax = "android.media.mediacodec.original-video-qp-b-max";
+
+// Min/Max QP after shaping
+static const char *kCodecRequestedVideoQPIMin = "android.media.mediacodec.video-qp-i-min";
+static const char *kCodecRequestedVideoQPIMax = "android.media.mediacodec.video-qp-i-max";
+static const char *kCodecRequestedVideoQPPMin = "android.media.mediacodec.video-qp-p-min";
+static const char *kCodecRequestedVideoQPPMax = "android.media.mediacodec.video-qp-p-max";
+static const char *kCodecRequestedVideoQPBMin = "android.media.mediacodec.video-qp-b-min";
+static const char *kCodecRequestedVideoQPBMax = "android.media.mediacodec.video-qp-b-max";
 
 // NB: These are not yet exposed as public Java API constants.
 static const char *kCodecCrypto = "android.media.mediacodec.crypto";   /* 0,1 */
@@ -98,6 +135,7 @@
 static const char *kCodecLevel = "android.media.mediacodec.level";  /* 0..n */
 static const char *kCodecBitrateMode = "android.media.mediacodec.bitrate_mode";  /* CQ/VBR/CBR */
 static const char *kCodecBitrate = "android.media.mediacodec.bitrate";  /* 0..n */
+static const char *kCodecOriginalBitrate = "android.media.mediacodec.original.bitrate";  /* 0..n */
 static const char *kCodecMaxWidth = "android.media.mediacodec.maxwidth";  /* 0..n */
 static const char *kCodecMaxHeight = "android.media.mediacodec.maxheight";  /* 0..n */
 static const char *kCodecError = "android.media.mediacodec.errcode";
@@ -115,6 +153,13 @@
 static const char *kCodecNumLowLatencyModeOn = "android.media.mediacodec.low-latency.on";  /* 0..n */
 static const char *kCodecNumLowLatencyModeOff = "android.media.mediacodec.low-latency.off";  /* 0..n */
 static const char *kCodecFirstFrameIndexLowLatencyModeOn = "android.media.mediacodec.low-latency.first-frame";  /* 0..n */
+static const char *kCodecChannelCount = "android.media.mediacodec.channelCount";
+static const char *kCodecSampleRate = "android.media.mediacodec.sampleRate";
+static const char *kCodecVideoEncodedBytes = "android.media.mediacodec.vencode.bytes";
+static const char *kCodecVideoEncodedFrames = "android.media.mediacodec.vencode.frames";
+static const char *kCodecVideoInputBytes = "android.media.mediacodec.video.input.bytes";
+static const char *kCodecVideoInputFrames = "android.media.mediacodec.video.input.frames";
+static const char *kCodecVideoEncodedDurationUs = "android.media.mediacodec.vencode.durationUs";
 
 // the kCodecRecent* fields appear only in getMetrics() results
 static const char *kCodecRecentLatencyMax = "android.media.mediacodec.recent.max";      /* in us */
@@ -122,6 +167,12 @@
 static const char *kCodecRecentLatencyAvg = "android.media.mediacodec.recent.avg";      /* in us */
 static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
 static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist";    /* in us */
+static const char *kCodecPlaybackDurationSec =
+        "android.media.mediacodec.playback-duration-sec"; /* in sec */
+
+/* -1: shaper disabled
+   >=0: number of fields changed */
+static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped";
 
 // XXX suppress until we get our representation right
 static bool kEmitHistogram = false;
@@ -201,6 +252,10 @@
     // implements DeathRecipient
     static void BinderDiedCallback(void* cookie);
     void binderDied();
+    static Mutex sLockCookies;
+    static std::set<void*> sCookies;
+    static void addCookie(void* cookie);
+    static void removeCookie(void* cookie);
 
     void addResource(const MediaResourceParcel &resource);
     void removeResource(const MediaResourceParcel &resource);
@@ -227,8 +282,15 @@
 }
 
 MediaCodec::ResourceManagerServiceProxy::~ResourceManagerServiceProxy() {
+
+    // remove the cookie, so any in-flight death notification will get dropped
+    // by our handler.
+    removeCookie(this);
+
+    Mutex::Autolock _l(mLock);
     if (mService != nullptr) {
         AIBinder_unlinkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
+        mService = nullptr;
     }
 }
 
@@ -240,13 +302,39 @@
         return;
     }
 
+    // Kill clients pending removal.
+    mService->reclaimResourcesFromClientsPendingRemoval(mPid);
+
+    // so our handler will process the death notifications
+    addCookie(this);
+
+    // after this, require mLock whenever using mService
     AIBinder_linkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
 }
 
 //static
+Mutex MediaCodec::ResourceManagerServiceProxy::sLockCookies;
+std::set<void*> MediaCodec::ResourceManagerServiceProxy::sCookies;
+
+//static
+void MediaCodec::ResourceManagerServiceProxy::addCookie(void* cookie) {
+    Mutex::Autolock _l(sLockCookies);
+    sCookies.insert(cookie);
+}
+
+//static
+void MediaCodec::ResourceManagerServiceProxy::removeCookie(void* cookie) {
+    Mutex::Autolock _l(sLockCookies);
+    sCookies.erase(cookie);
+}
+
+//static
 void MediaCodec::ResourceManagerServiceProxy::BinderDiedCallback(void* cookie) {
-    auto thiz = static_cast<ResourceManagerServiceProxy*>(cookie);
-    thiz->binderDied();
+    Mutex::Autolock _l(sLockCookies);
+    if (sCookies.find(cookie) != sCookies.end()) {
+        auto thiz = static_cast<ResourceManagerServiceProxy*>(cookie);
+        thiz->binderDied();
+    }
 }
 
 void MediaCodec::ResourceManagerServiceProxy::binderDied() {
@@ -318,11 +406,24 @@
         BufferQueue::createBufferQueue(&mProducer, &mConsumer);
         mSurface = new Surface(mProducer, false /* controlledByApp */);
         struct ConsumerListener : public BnConsumerListener {
-            void onFrameAvailable(const BufferItem&) override {}
+            ConsumerListener(const sp<IGraphicBufferConsumer> &consumer) {
+                mConsumer = consumer;
+            }
+            void onFrameAvailable(const BufferItem&) override {
+                BufferItem buffer;
+                // consume buffer
+                sp<IGraphicBufferConsumer> consumer = mConsumer.promote();
+                if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == NO_ERROR) {
+                    consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber,
+                                            EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, buffer.mFence);
+                }
+            }
+
+            wp<IGraphicBufferConsumer> mConsumer;
             void onBuffersReleased() override {}
             void onSidebandStreamChanged() override {}
         };
-        sp<ConsumerListener> listener{new ConsumerListener};
+        sp<ConsumerListener> listener{new ConsumerListener(mConsumer)};
         mConsumer->consumerConnect(listener, false);
         mConsumer->setConsumerName(String8{"MediaCodec.release"});
         mConsumer->setConsumerUsageBits(usage);
@@ -358,6 +459,7 @@
     kWhatSignaledInputEOS    = 'seos',
     kWhatOutputFramesRendered = 'outR',
     kWhatOutputBuffersChanged = 'outC',
+    kWhatFirstTunnelFrameReady = 'ftfR',
 };
 
 class BufferCallback : public CodecBase::BufferCallback {
@@ -420,6 +522,7 @@
     virtual void onSignaledInputEOS(status_t err) override;
     virtual void onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) override;
     virtual void onOutputBuffersChanged() override;
+    virtual void onFirstTunnelFrameReady() override;
 private:
     const sp<AMessage> mNotify;
 };
@@ -540,6 +643,12 @@
     notify->post();
 }
 
+void CodecCallback::onFirstTunnelFrameReady() {
+    sp<AMessage> notify(mNotify->dup());
+    notify->setInt32("what", kWhatFirstTunnelFrameReady);
+    notify->post();
+}
+
 }  // namespace
 
 ////////////////////////////////////////////////////////////////////////////////
@@ -548,12 +657,20 @@
 sp<MediaCodec> MediaCodec::CreateByType(
         const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
         uid_t uid) {
+    sp<AMessage> format;
+    return CreateByType(looper, mime, encoder, err, pid, uid, format);
+}
+
+sp<MediaCodec> MediaCodec::CreateByType(
+        const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
+        uid_t uid, sp<AMessage> format) {
     Vector<AString> matchingCodecs;
 
     MediaCodecList::findMatchingCodecs(
             mime.c_str(),
             encoder,
             0,
+            format,
             &matchingCodecs);
 
     if (err != NULL) {
@@ -615,7 +732,10 @@
     return new PersistentSurface(bufferProducer, bufferSource);
 }
 
-MediaCodec::MediaCodec(const sp<ALooper> &looper, pid_t pid, uid_t uid)
+MediaCodec::MediaCodec(
+        const sp<ALooper> &looper, pid_t pid, uid_t uid,
+        std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
+        std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo)
     : mState(UNINITIALIZED),
       mReleasedByResourceManager(false),
       mLooper(looper),
@@ -632,15 +752,27 @@
       mDequeueInputReplyID(0),
       mDequeueOutputTimeoutGeneration(0),
       mDequeueOutputReplyID(0),
+      mTunneledInputWidth(0),
+      mTunneledInputHeight(0),
+      mTunneled(false),
+      mTunnelPeekState(TunnelPeekState::kEnabledNoBuffer),
       mHaveInputSurface(false),
       mHavePendingInputBuffers(false),
       mCpuBoostRequested(false),
+      mPlaybackDurationAccumulator(new PlaybackDurationAccumulator()),
+      mIsSurfaceToScreen(false),
       mLatencyUnknown(0),
+      mBytesEncoded(0),
+      mEarliestEncodedPtsUs(INT64_MAX),
+      mLatestEncodedPtsUs(INT64_MIN),
+      mFramesEncoded(0),
       mNumLowLatencyEnables(0),
       mNumLowLatencyDisables(0),
       mIsLowLatencyModeOn(false),
       mIndexOfFirstFrameWhenLowLatencyOn(-1),
-      mInputBufferCounter(0) {
+      mInputBufferCounter(0),
+      mGetCodecBase(getCodecBase),
+      mGetCodecInfo(getCodecInfo) {
     if (uid == kNoUid) {
         mUid = AIBinder_getCallingUid();
     } else {
@@ -648,6 +780,33 @@
     }
     mResourceManagerProxy = new ResourceManagerServiceProxy(pid, mUid,
             ::ndk::SharedRefBase::make<ResourceManagerClient>(this));
+    if (!mGetCodecBase) {
+        mGetCodecBase = [](const AString &name, const char *owner) {
+            return GetCodecBase(name, owner);
+        };
+    }
+    if (!mGetCodecInfo) {
+        mGetCodecInfo = [](const AString &name, sp<MediaCodecInfo> *info) -> status_t {
+            *info = nullptr;
+            const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
+            if (!mcl) {
+                return NO_INIT;  // if called from Java should raise IOException
+            }
+            AString tmp = name;
+            if (tmp.endsWith(".secure")) {
+                tmp.erase(tmp.size() - 7, 7);
+            }
+            for (const AString &codecName : { name, tmp }) {
+                ssize_t codecIdx = mcl->findCodecByName(codecName.c_str());
+                if (codecIdx < 0) {
+                    continue;
+                }
+                *info = mcl->getCodecInfo(codecIdx);
+                return OK;
+            }
+            return NAME_NOT_FOUND;
+        };
+    }
 
     initMediametrics();
 }
@@ -708,12 +867,30 @@
     if (mLatencyUnknown > 0) {
         mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
     }
+    int64_t playbackDurationSec = mPlaybackDurationAccumulator->getDurationInSeconds();
+    if (playbackDurationSec > 0) {
+        mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDurationSec, playbackDurationSec);
+    }
     if (mLifetimeStartNs > 0) {
         nsecs_t lifetime = systemTime(SYSTEM_TIME_MONOTONIC) - mLifetimeStartNs;
         lifetime = lifetime / (1000 * 1000);    // emitted in ms, truncated not rounded
         mediametrics_setInt64(mMetricsHandle, kCodecLifetimeMs, lifetime);
     }
 
+    if (mBytesEncoded) {
+        Mutex::Autolock al(mOutputStatsLock);
+
+        mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedBytes, mBytesEncoded);
+        int64_t duration = 0;
+        if (mLatestEncodedPtsUs > mEarliestEncodedPtsUs) {
+            duration = mLatestEncodedPtsUs - mEarliestEncodedPtsUs;
+        }
+        mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedDurationUs, duration);
+        mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedFrames, mFramesEncoded);
+        mediametrics_setInt64(mMetricsHandle, kCodecVideoInputFrames, mFramesInput);
+        mediametrics_setInt64(mMetricsHandle, kCodecVideoInputBytes, mBytesInput);
+    }
+
     {
         Mutex::Autolock al(mLatencyLock);
         mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOn, mNumLowLatencyEnables);
@@ -792,6 +969,87 @@
     }
 }
 
+constexpr const char *MediaCodec::asString(TunnelPeekState state, const char *default_string){
+    switch(state) {
+        case TunnelPeekState::kEnabledNoBuffer:
+            return "EnabledNoBuffer";
+        case TunnelPeekState::kDisabledNoBuffer:
+            return "DisabledNoBuffer";
+        case TunnelPeekState::kBufferDecoded:
+            return "BufferDecoded";
+        case TunnelPeekState::kBufferRendered:
+            return "BufferRendered";
+        case TunnelPeekState::kDisabledQueued:
+            return "DisabledQueued";
+        case TunnelPeekState::kEnabledQueued:
+            return "EnabledQueued";
+        default:
+            return default_string;
+    }
+}
+
+void MediaCodec::updateTunnelPeek(const sp<AMessage> &msg) {
+    int32_t tunnelPeek = 0;
+    if (!msg->findInt32("tunnel-peek", &tunnelPeek)){
+        return;
+    }
+
+    TunnelPeekState previousState = mTunnelPeekState;
+    if(tunnelPeek == 0){
+        switch (mTunnelPeekState) {
+            case TunnelPeekState::kEnabledNoBuffer:
+                mTunnelPeekState = TunnelPeekState::kDisabledNoBuffer;
+                break;
+            case TunnelPeekState::kEnabledQueued:
+                mTunnelPeekState = TunnelPeekState::kDisabledQueued;
+                break;
+            default:
+                ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
+                return;
+        }
+    } else {
+        switch (mTunnelPeekState) {
+            case TunnelPeekState::kDisabledNoBuffer:
+                mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
+                break;
+            case TunnelPeekState::kDisabledQueued:
+                mTunnelPeekState = TunnelPeekState::kEnabledQueued;
+                break;
+            case TunnelPeekState::kBufferDecoded:
+                msg->setInt32("android._trigger-tunnel-peek", 1);
+                mTunnelPeekState = TunnelPeekState::kBufferRendered;
+                break;
+            default:
+                ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
+                return;
+        }
+    }
+
+    ALOGV("TunnelPeekState: %s -> %s", asString(previousState), asString(mTunnelPeekState));
+}
+
+void MediaCodec::updatePlaybackDuration(const sp<AMessage> &msg) {
+    int what = 0;
+    msg->findInt32("what", &what);
+    if (msg->what() != kWhatCodecNotify && what != kWhatOutputFramesRendered) {
+        static bool logged = false;
+        if (!logged) {
+            logged = true;
+            ALOGE("updatePlaybackDuration: expected kWhatOuputFramesRendered (%d)", msg->what());
+        }
+        return;
+    }
+    // Playback duration only counts if the buffers are going to the screen.
+    if (!mIsSurfaceToScreen) {
+        return;
+    }
+    int64_t renderTimeNs;
+    size_t index = 0;
+    while (msg->findInt64(AStringPrintf("%zu-system-nano", index++).c_str(), &renderTimeNs)) {
+        mPlaybackDurationAccumulator->processRenderTime(renderTimeNs);
+    }
+}
+
 bool MediaCodec::Histogram::setup(int nbuckets, int64_t width, int64_t floor)
 {
     if (nbuckets <= 0 || width <= 0) {
@@ -883,7 +1141,7 @@
 }
 
 // when we send a buffer to the codec;
-void MediaCodec::statsBufferSent(int64_t presentationUs) {
+void MediaCodec::statsBufferSent(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
 
     // only enqueue if we have a legitimate time
     if (presentationUs <= 0) {
@@ -897,6 +1155,11 @@
         });
     }
 
+    if (mIsVideo && (mFlags & kFlagIsEncoder)) {
+        mBytesInput += buffer->size();
+        mFramesInput++;
+    }
+
     const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
     BufferFlightTiming_t startdata = { presentationUs, nowNs };
 
@@ -917,10 +1180,34 @@
 }
 
 // when we get a buffer back from the codec
-void MediaCodec::statsBufferReceived(int64_t presentationUs) {
+void MediaCodec::statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
 
     CHECK_NE(mState, UNINITIALIZED);
 
+    if (mIsVideo && (mFlags & kFlagIsEncoder)) {
+        int32_t flags = 0;
+        (void) buffer->meta()->findInt32("flags", &flags);
+
+        // some of these frames, we don't want to count
+        // standalone EOS.... has an invalid timestamp
+        if ((flags & (BUFFER_FLAG_CODECCONFIG|BUFFER_FLAG_EOS)) == 0) {
+            mBytesEncoded += buffer->size();
+            mFramesEncoded++;
+
+            Mutex::Autolock al(mOutputStatsLock);
+            int64_t timeUs = 0;
+            if (buffer->meta()->findInt64("timeUs", &timeUs)) {
+                if (timeUs > mLatestEncodedPtsUs) {
+                    mLatestEncodedPtsUs = timeUs;
+                }
+                // can't chain as an else-if or this never triggers
+                if (timeUs < mEarliestEncodedPtsUs) {
+                    mEarliestEncodedPtsUs = timeUs;
+                }
+            }
+        }
+    }
+
     // mutex access to mBuffersInFlight and other stats
     Mutex::Autolock al(mLatencyLock);
 
@@ -976,7 +1263,7 @@
         return;
     }
 
-    // nowNs start our calculations
+    // now start our calculations
     const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
     int64_t latencyUs = (nowNs - startdata.startedNs + 500) / 1000;
 
@@ -1009,6 +1296,12 @@
     return err;
 }
 
+void MediaCodec::PostReplyWithError(const sp<AMessage> &msg, int32_t err) {
+    sp<AReplyToken> replyID;
+    CHECK(msg->senderAwaitsResponse(&replyID));
+    PostReplyWithError(replyID, err);
+}
+
 void MediaCodec::PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err) {
     int32_t finalErr = err;
     if (mReleasedByResourceManager) {
@@ -1085,40 +1378,30 @@
     bool secureCodec = false;
     const char *owner = "";
     if (!name.startsWith("android.filter.")) {
-        AString tmp = name;
-        if (tmp.endsWith(".secure")) {
-            secureCodec = true;
-            tmp.erase(tmp.size() - 7, 7);
-        }
-        const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
-        if (mcl == NULL) {
+        status_t err = mGetCodecInfo(name, &mCodecInfo);
+        if (err != OK) {
             mCodec = NULL;  // remove the codec.
-            return NO_INIT; // if called from Java should raise IOException
-        }
-        for (const AString &codecName : { name, tmp }) {
-            ssize_t codecIdx = mcl->findCodecByName(codecName.c_str());
-            if (codecIdx < 0) {
-                continue;
-            }
-            mCodecInfo = mcl->getCodecInfo(codecIdx);
-            Vector<AString> mediaTypes;
-            mCodecInfo->getSupportedMediaTypes(&mediaTypes);
-            for (size_t i = 0; i < mediaTypes.size(); i++) {
-                if (mediaTypes[i].startsWith("video/")) {
-                    mIsVideo = true;
-                    break;
-                }
-            }
-            break;
+            return err;
         }
         if (mCodecInfo == nullptr) {
+            ALOGE("Getting codec info with name '%s' failed", name.c_str());
             return NAME_NOT_FOUND;
         }
+        secureCodec = name.endsWith(".secure");
+        Vector<AString> mediaTypes;
+        mCodecInfo->getSupportedMediaTypes(&mediaTypes);
+        for (size_t i = 0; i < mediaTypes.size(); ++i) {
+            if (mediaTypes[i].startsWith("video/")) {
+                mIsVideo = true;
+                break;
+            }
+        }
         owner = mCodecInfo->getOwnerName();
     }
 
-    mCodec = GetCodecBase(name, owner);
+    mCodec = mGetCodecBase(name, owner);
     if (mCodec == NULL) {
+        ALOGE("Getting codec base with name '%s' (owner='%s') failed", name.c_str(), owner);
         return NAME_NOT_FOUND;
     }
 
@@ -1197,6 +1480,21 @@
     return msg->post();
 }
 
+status_t MediaCodec::setOnFirstTunnelFrameReadyNotification(const sp<AMessage> &notify) {
+    sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
+    msg->setMessage("first-tunnel-frame-ready", notify);
+    return msg->post();
+}
+
+/*
+ * MediaFormat Shaping forward declarations
+ * including the property name we use for control.
+ */
+static int enableMediaFormatShapingDefault = 1;
+static const char enableMediaFormatShapingProperty[] = "debug.stagefright.enableshaping";
+static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
+                      bool reverse);
+
 status_t MediaCodec::configure(
         const sp<AMessage> &format,
         const sp<Surface> &nativeWindow,
@@ -1227,6 +1525,8 @@
     }
 
     if (mIsVideo) {
+        // TODO: validity check log-session-id: it should be a 32-hex-digit.
+        format->findString("log-session-id", &mLogSessionId);
         format->findInt32("width", &mVideoWidth);
         format->findInt32("height", &mVideoHeight);
         if (!format->findInt32("rotation-degrees", &mRotationDegrees)) {
@@ -1234,6 +1534,7 @@
         }
 
         if (mMetricsHandle != 0) {
+            mediametrics_setCString(mMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
             mediametrics_setInt32(mMetricsHandle, kCodecWidth, mVideoWidth);
             mediametrics_setInt32(mMetricsHandle, kCodecHeight, mVideoHeight);
             mediametrics_setInt32(mMetricsHandle, kCodecRotation, mRotationDegrees);
@@ -1245,6 +1546,26 @@
             if (format->findInt32("max-height", &maxHeight)) {
                 mediametrics_setInt32(mMetricsHandle, kCodecMaxHeight, maxHeight);
             }
+            int32_t colorFormat = -1;
+            if (format->findInt32("color-format", &colorFormat)) {
+                mediametrics_setInt32(mMetricsHandle, kCodecColorFormat, colorFormat);
+            }
+            float frameRate = -1.0;
+            if (format->findFloat("frame-rate", &frameRate)) {
+                mediametrics_setDouble(mMetricsHandle, kCodecFrameRate, frameRate);
+            }
+            float captureRate = -1.0;
+            if (format->findFloat("capture-rate", &captureRate)) {
+                mediametrics_setDouble(mMetricsHandle, kCodecCaptureRate, captureRate);
+            }
+            float operatingRate = -1.0;
+            if (format->findFloat("operating-rate", &operatingRate)) {
+                mediametrics_setDouble(mMetricsHandle, kCodecOperatingRate, operatingRate);
+            }
+            int32_t priority = -1;
+            if (format->findInt32("priority", &priority)) {
+                mediametrics_setInt32(mMetricsHandle, kCodecPriority, priority);
+            }
         }
 
         // Prevent possible integer overflow in downstream code.
@@ -1253,6 +1574,61 @@
             ALOGE("Invalid size(s), width=%d, height=%d", mVideoWidth, mVideoHeight);
             return BAD_VALUE;
         }
+
+    } else {
+        if (mMetricsHandle != 0) {
+            int32_t channelCount;
+            if (format->findInt32(KEY_CHANNEL_COUNT, &channelCount)) {
+                mediametrics_setInt32(mMetricsHandle, kCodecChannelCount, channelCount);
+            }
+            int32_t sampleRate;
+            if (format->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
+                mediametrics_setInt32(mMetricsHandle, kCodecSampleRate, sampleRate);
+            }
+        }
+    }
+
+    if (flags & CONFIGURE_FLAG_ENCODE) {
+        int8_t enableShaping = property_get_bool(enableMediaFormatShapingProperty,
+                                                 enableMediaFormatShapingDefault);
+        if (!enableShaping) {
+            ALOGI("format shaping disabled, property '%s'", enableMediaFormatShapingProperty);
+            if (mMetricsHandle != 0) {
+                mediametrics_setInt32(mMetricsHandle, kCodecShapingEnhanced, -1);
+            }
+        } else {
+            (void) shapeMediaFormat(format, flags);
+            // XXX: do we want to do this regardless of shaping enablement?
+            mapFormat(mComponentName, format, nullptr, false);
+        }
+    }
+
+    // push min/max QP to MediaMetrics after shaping
+    if (mIsVideo && mMetricsHandle != 0) {
+        int32_t qpIMin = -1;
+        if (format->findInt32("video-qp-i-min", &qpIMin)) {
+            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
+        }
+        int32_t qpIMax = -1;
+        if (format->findInt32("video-qp-i-max", &qpIMax)) {
+            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMax, qpIMax);
+        }
+        int32_t qpPMin = -1;
+        if (format->findInt32("video-qp-p-min", &qpPMin)) {
+            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPPMin, qpPMin);
+        }
+        int32_t qpPMax = -1;
+        if (format->findInt32("video-qp-p-max", &qpPMax)) {
+            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPPMax, qpPMax);
+        }
+        int32_t qpBMin = -1;
+        if (format->findInt32("video-qp-b-min", &qpBMin)) {
+            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPBMin, qpBMin);
+        }
+        int32_t qpBMax = -1;
+        if (format->findInt32("video-qp-b-max", &qpBMax)) {
+            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPBMax, qpBMax);
+        }
     }
 
     updateLowLatency(format);
@@ -1277,6 +1653,8 @@
     // save msg for reset
     mConfigureMsg = msg;
 
+    sp<AMessage> callback = mCallback;
+
     status_t err;
     std::vector<MediaResourceParcel> resources;
     resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
@@ -1284,16 +1662,12 @@
     // the reclaimResource call doesn't consider the requester's buffer size for now.
     resources.push_back(MediaResource::GraphicMemoryResource(1));
     for (int i = 0; i <= kMaxRetry; ++i) {
-        if (i > 0) {
-            // Don't try to reclaim resource for the first time.
-            if (!mResourceManagerProxy->reclaimResource(resources)) {
-                break;
-            }
-        }
-
         sp<AMessage> response;
         err = PostAndAwaitResponse(msg, &response);
         if (err != OK && err != INVALID_OPERATION) {
+            if (isResourceError(err) && !mResourceManagerProxy->reclaimResource(resources)) {
+                break;
+            }
             // MediaCodec now set state to UNINITIALIZED upon any fatal error.
             // To maintain backward-compatibility, do a reset() to put codec
             // back into INITIALIZED state.
@@ -1301,7 +1675,18 @@
             // the configure failure is due to wrong state.
 
             ALOGE("configure failed with err 0x%08x, resetting...", err);
-            reset();
+            status_t err2 = reset();
+            if (err2 != OK) {
+                ALOGE("retrying configure: failed to reset codec (%08x)", err2);
+                break;
+            }
+            if (callback != nullptr) {
+                err2 = setCallback(callback);
+                if (err2 != OK) {
+                    ALOGE("retrying configure: failed to set callback (%08x)", err2);
+                    break;
+                }
+            }
         }
         if (!isResourceError(err)) {
             break;
@@ -1311,6 +1696,465 @@
     return err;
 }
 
+// Media Format Shaping support
+//
+
+static android::mediaformatshaper::FormatShaperOps_t *sShaperOps = NULL;
+static bool sIsHandheld = true;
+
+static bool connectFormatShaper() {
+    static std::once_flag sCheckOnce;
+
+    ALOGV("connectFormatShaper...");
+
+    std::call_once(sCheckOnce, [&](){
+
+        void *libHandle = NULL;
+        nsecs_t loading_started = systemTime(SYSTEM_TIME_MONOTONIC);
+
+        // prefer any copy in the mainline module
+        //
+        android_namespace_t *mediaNs = android_get_exported_namespace("com_android_media");
+        AString libraryName = "libmediaformatshaper.so";
+
+        if (mediaNs != NULL) {
+            static const android_dlextinfo dlextinfo = {
+                .flags = ANDROID_DLEXT_USE_NAMESPACE,
+                .library_namespace = mediaNs,
+            };
+
+            AString libraryMainline = "/apex/com.android.media/";
+#if __LP64__
+            libraryMainline.append("lib64/");
+#else
+            libraryMainline.append("lib/");
+#endif
+            libraryMainline.append(libraryName);
+
+            libHandle = android_dlopen_ext(libraryMainline.c_str(), RTLD_NOW|RTLD_NODELETE,
+                                                 &dlextinfo);
+
+            if (libHandle != NULL) {
+                sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
+                                dlsym(libHandle, "shaper_ops");
+            } else {
+                ALOGW("connectFormatShaper: unable to load mainline formatshaper %s",
+                      libraryMainline.c_str());
+            }
+        } else {
+            ALOGV("connectFormatShaper: couldn't find media namespace.");
+        }
+
+        // fall back to the system partition, if present.
+        //
+        if (sShaperOps == NULL) {
+
+            libHandle = dlopen(libraryName.c_str(), RTLD_NOW|RTLD_NODELETE);
+
+            if (libHandle != NULL) {
+                sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
+                                dlsym(libHandle, "shaper_ops");
+            } else {
+                ALOGW("connectFormatShaper: unable to load formatshaper %s", libraryName.c_str());
+            }
+        }
+
+        if (sShaperOps != nullptr
+            && sShaperOps->version != android::mediaformatshaper::SHAPER_VERSION_V1) {
+            ALOGW("connectFormatShaper: unhandled version ShaperOps: %d, DISABLED",
+                  sShaperOps->version);
+            sShaperOps = nullptr;
+        }
+
+        if (sShaperOps != nullptr) {
+            ALOGV("connectFormatShaper: connected to library %s", libraryName.c_str());
+        }
+
+        nsecs_t loading_finished = systemTime(SYSTEM_TIME_MONOTONIC);
+        ALOGV("connectFormatShaper: loaded libraries: %" PRId64 " us",
+              (loading_finished - loading_started)/1000);
+
+
+        // we also want to know whether this is a handheld device
+        // start with assumption that the device is handheld.
+        sIsHandheld = true;
+        sp<IServiceManager> serviceMgr = defaultServiceManager();
+        sp<content::pm::IPackageManagerNative> packageMgr;
+        if (serviceMgr.get() != nullptr) {
+            sp<IBinder> binder = serviceMgr->waitForService(String16("package_native"));
+            packageMgr = interface_cast<content::pm::IPackageManagerNative>(binder);
+        }
+        // if we didn't get serviceMgr, we'll leave packageMgr as default null
+        if (packageMgr != nullptr) {
+
+            // MUST have these
+            static const String16 featuresNeeded[] = {
+                String16("android.hardware.touchscreen")
+            };
+            // these must be present to be a handheld
+            for (::android::String16 required : featuresNeeded) {
+                bool hasFeature = false;
+                binder::Status status = packageMgr->hasSystemFeature(required, 0, &hasFeature);
+                if (!status.isOk()) {
+                    ALOGE("%s: hasSystemFeature failed: %s",
+                        __func__, status.exceptionMessage().c_str());
+                    continue;
+                }
+                ALOGV("feature %s says %d", String8(required).c_str(), hasFeature);
+                if (!hasFeature) {
+                    ALOGV("... which means we are not handheld");
+                    sIsHandheld = false;
+                    break;
+                }
+            }
+
+            // MUST NOT have these
+            static const String16 featuresDisallowed[] = {
+                String16("android.hardware.type.automotive"),
+                String16("android.hardware.type.television"),
+                String16("android.hardware.type.watch")
+            };
+            // any of these present -- we aren't a handheld
+            for (::android::String16 forbidden : featuresDisallowed) {
+                bool hasFeature = false;
+                binder::Status status = packageMgr->hasSystemFeature(forbidden, 0, &hasFeature);
+                if (!status.isOk()) {
+                    ALOGE("%s: hasSystemFeature failed: %s",
+                        __func__, status.exceptionMessage().c_str());
+                    continue;
+                }
+                ALOGV("feature %s says %d", String8(forbidden).c_str(), hasFeature);
+                if (hasFeature) {
+                    ALOGV("... which means we are not handheld");
+                    sIsHandheld = false;
+                    break;
+                }
+            }
+        }
+
+    });
+
+    return true;
+}
+
+
+#if 0
+// a construct to force the above dlopen() to run very early.
+// goal: so the dlopen() doesn't happen on critical path of latency sensitive apps
+// failure of this means that cold start of those apps is slower by the time to dlopen()
+// TODO(b/183454066): tradeoffs between memory of early loading vs latency of late loading
+//
+static bool forceEarlyLoadingShaper = connectFormatShaper();
+#endif
+
+// parse the codec's properties: mapping, whether it meets min quality, etc
+// and pass them into the video quality code
+//
+static void loadCodecProperties(mediaformatshaper::shaperHandle_t shaperHandle,
+                                  sp<MediaCodecInfo> codecInfo, AString mediaType) {
+
+    sp<MediaCodecInfo::Capabilities> capabilities =
+                    codecInfo->getCapabilitiesFor(mediaType.c_str());
+    if (capabilities == nullptr) {
+        ALOGI("no capabilities as part of the codec?");
+    } else {
+        const sp<AMessage> &details = capabilities->getDetails();
+        AString mapTarget;
+        int count = details->countEntries();
+        for(int ix = 0; ix < count; ix++) {
+            AMessage::Type entryType;
+            const char *mapSrc = details->getEntryNameAt(ix, &entryType);
+            // XXX: re-use ix from getEntryAt() to avoid additional findXXX() invocation
+            //
+            static const char *featurePrefix = "feature-";
+            static const int featurePrefixLen = strlen(featurePrefix);
+            static const char *tuningPrefix = "tuning-";
+            static const int tuningPrefixLen = strlen(tuningPrefix);
+            static const char *mappingPrefix = "mapping-";
+            static const int mappingPrefixLen = strlen(mappingPrefix);
+
+            if (mapSrc == NULL) {
+                continue;
+            } else if (!strncmp(mapSrc, featurePrefix, featurePrefixLen)) {
+                int32_t intValue;
+                if (details->findInt32(mapSrc, &intValue)) {
+                    ALOGV("-- feature '%s' -> %d", mapSrc, intValue);
+                    (void)(sShaperOps->setFeature)(shaperHandle, &mapSrc[featurePrefixLen],
+                                                   intValue);
+                }
+                continue;
+            } else if (!strncmp(mapSrc, tuningPrefix, tuningPrefixLen)) {
+                AString value;
+                if (details->findString(mapSrc, &value)) {
+                    ALOGV("-- tuning '%s' -> '%s'", mapSrc, value.c_str());
+                    (void)(sShaperOps->setTuning)(shaperHandle, &mapSrc[tuningPrefixLen],
+                                                   value.c_str());
+                }
+                continue;
+            } else if (!strncmp(mapSrc, mappingPrefix, mappingPrefixLen)) {
+                AString target;
+                if (details->findString(mapSrc, &target)) {
+                    ALOGV("-- mapping %s: map %s to %s", mapSrc, &mapSrc[mappingPrefixLen],
+                          target.c_str());
+                    // key is really "kind-key"
+                    // separate that, so setMap() sees the triple  kind, key, value
+                    const char *kind = &mapSrc[mappingPrefixLen];
+                    const char *sep = strchr(kind, '-');
+                    const char *key = sep+1;
+                    if (sep != NULL) {
+                         std::string xkind = std::string(kind, sep-kind);
+                        (void)(sShaperOps->setMap)(shaperHandle, xkind.c_str(),
+                                                   key, target.c_str());
+                    }
+                }
+            }
+        }
+    }
+
+    // we also carry in the codec description whether we are on a handheld device.
+    // this info is eventually used by both the Codec and the C2 machinery to inform
+    // the underlying codec whether to do any shaping.
+    //
+    if (sIsHandheld) {
+        // set if we are indeed a handheld device (or in future 'any eligible device'
+        // missing on devices that aren't eligible for minimum quality enforcement.
+        (void)(sShaperOps->setFeature)(shaperHandle, "_vq_eligible.device", 1);
+        // strictly speaking, it's a tuning, but those are strings and feature stores int
+        (void)(sShaperOps->setFeature)(shaperHandle, "_quality.target", 1 /* S_HANDHELD */);
+    }
+}
+
+status_t MediaCodec::setupFormatShaper(AString mediaType) {
+    ALOGV("setupFormatShaper: initializing shaper data for codec %s mediaType %s",
+          mComponentName.c_str(), mediaType.c_str());
+
+    nsecs_t mapping_started = systemTime(SYSTEM_TIME_MONOTONIC);
+
+    // someone might have beaten us to it.
+    mediaformatshaper::shaperHandle_t shaperHandle;
+    shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
+    if (shaperHandle != nullptr) {
+        ALOGV("shaperhandle %p -- no initialization needed", shaperHandle);
+        return OK;
+    }
+
+    // we get to build & register one
+    shaperHandle = sShaperOps->createShaper(mComponentName.c_str(), mediaType.c_str());
+    if (shaperHandle == nullptr) {
+        ALOGW("unable to create a shaper for cocodec %s mediaType %s",
+              mComponentName.c_str(), mediaType.c_str());
+        return OK;
+    }
+
+    (void) loadCodecProperties(shaperHandle, mCodecInfo, mediaType);
+
+    shaperHandle = sShaperOps->registerShaper(shaperHandle,
+                                              mComponentName.c_str(), mediaType.c_str());
+
+    nsecs_t mapping_finished = systemTime(SYSTEM_TIME_MONOTONIC);
+    ALOGV("setupFormatShaper: populated shaper node for codec %s: %" PRId64 " us",
+          mComponentName.c_str(), (mapping_finished - mapping_started)/1000);
+
+    return OK;
+}
+
+
+// Format Shaping
+//      Mapping and Manipulation of encoding parameters
+//
+//      All of these decisions are pushed into the shaper instead of here within MediaCodec.
+//      this includes decisions based on whether the codec implements minimum quality bars
+//      itself or needs to be shaped outside of the codec.
+//      This keeps all those decisions in one place.
+//      It also means that we push some extra decision information (is this a handheld device
+//      or one that is otherwise eligible for minimum quality manipulation, which generational
+//      quality target is in force, etc).  This allows those values to be cached in the
+//      per-codec structures that are done 1 time within a process instead of for each
+//      codec instantiation.
+//
+
+status_t MediaCodec::shapeMediaFormat(
+            const sp<AMessage> &format,
+            uint32_t flags) {
+    ALOGV("shapeMediaFormat entry");
+
+    if (!(flags & CONFIGURE_FLAG_ENCODE)) {
+        ALOGW("shapeMediaFormat: not encoder");
+        return OK;
+    }
+    if (mCodecInfo == NULL) {
+        ALOGW("shapeMediaFormat: no codecinfo");
+        return OK;
+    }
+
+    AString mediaType;
+    if (!format->findString("mime", &mediaType)) {
+        ALOGW("shapeMediaFormat: no mediaType information");
+        return OK;
+    }
+
+    // make sure we have the function entry points for the shaper library
+    //
+
+    connectFormatShaper();
+    if (sShaperOps == nullptr) {
+        ALOGW("shapeMediaFormat: no MediaFormatShaper hooks available");
+        return OK;
+    }
+
+    // find the shaper information for this codec+mediaType pair
+    //
+    mediaformatshaper::shaperHandle_t shaperHandle;
+    shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
+    if (shaperHandle == nullptr)  {
+        setupFormatShaper(mediaType);
+        shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
+    }
+    if (shaperHandle == nullptr) {
+        ALOGW("shapeMediaFormat: no handler for codec %s mediatype %s",
+              mComponentName.c_str(), mediaType.c_str());
+        return OK;
+    }
+
+    // run the shaper
+    //
+
+    ALOGV("Shaping input: %s", format->debugString(0).c_str());
+
+    sp<AMessage> updatedFormat = format->dup();
+    AMediaFormat *updatedNdkFormat = AMediaFormat_fromMsg(&updatedFormat);
+
+    int result = (*sShaperOps->shapeFormat)(shaperHandle, updatedNdkFormat, flags);
+    if (result == 0) {
+        AMediaFormat_getFormat(updatedNdkFormat, &updatedFormat);
+
+        sp<AMessage> deltas = updatedFormat->changesFrom(format, false /* deep */);
+        size_t changeCount = deltas->countEntries();
+        ALOGD("shapeMediaFormat: deltas(%zu): %s", changeCount, deltas->debugString(2).c_str());
+        if (mMetricsHandle != 0) {
+            mediametrics_setInt32(mMetricsHandle, kCodecShapingEnhanced, changeCount);
+        }
+        if (changeCount > 0) {
+            if (mMetricsHandle != 0) {
+                // save some old properties before we fold in the new ones
+                int32_t bitrate;
+                if (format->findInt32(KEY_BIT_RATE, &bitrate)) {
+                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalBitrate, bitrate);
+                }
+                int32_t qpIMin = -1;
+                if (format->findInt32("original-video-qp-i-min", &qpIMin)) {
+                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPIMin, qpIMin);
+                }
+                int32_t qpIMax = -1;
+                if (format->findInt32("original-video-qp-i-max", &qpIMax)) {
+                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPIMax, qpIMax);
+                }
+                int32_t qpPMin = -1;
+                if (format->findInt32("original-video-qp-p-min", &qpPMin)) {
+                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPPMin, qpPMin);
+                }
+                int32_t qpPMax = -1;
+                if (format->findInt32("original-video-qp-p-max", &qpPMax)) {
+                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPPMax, qpPMax);
+                }
+                 int32_t qpBMin = -1;
+                if (format->findInt32("original-video-qp-b-min", &qpBMin)) {
+                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPBMin, qpBMin);
+                }
+                int32_t qpBMax = -1;
+                if (format->findInt32("original-video-qp-b-max", &qpBMax)) {
+                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPBMax, qpBMax);
+                }
+            }
+            // NB: for any field in both format and deltas, the deltas copy wins
+            format->extend(deltas);
+        }
+    }
+
+    AMediaFormat_delete(updatedNdkFormat);
+    return OK;
+}
+
+static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
+                      bool reverse) {
+    AString mediaType;
+    if (!format->findString("mime", &mediaType)) {
+        ALOGW("mapFormat: no mediaType information");
+        return;
+    }
+    ALOGV("mapFormat: codec %s mediatype %s kind %s reverse %d", componentName.c_str(),
+          mediaType.c_str(), kind ? kind : "<all>", reverse);
+
+    // make sure we have the function entry points for the shaper library
+    //
+
+#if 0
+    // let's play the faster "only do mapping if we've already loaded the library
+    connectFormatShaper();
+#endif
+    if (sShaperOps == nullptr) {
+        ALOGV("mapFormat: no MediaFormatShaper hooks available");
+        return;
+    }
+
+    // find the shaper information for this codec+mediaType pair
+    //
+    mediaformatshaper::shaperHandle_t shaperHandle;
+    shaperHandle = sShaperOps->findShaper(componentName.c_str(), mediaType.c_str());
+    if (shaperHandle == nullptr) {
+        ALOGV("mapFormat: no shaper handle");
+        return;
+    }
+
+    const char **mappings;
+    if (reverse)
+        mappings = sShaperOps->getReverseMappings(shaperHandle, kind);
+    else
+        mappings = sShaperOps->getMappings(shaperHandle, kind);
+
+    if (mappings == nullptr) {
+        ALOGV("no mappings returned");
+        return;
+    }
+
+    ALOGV("Pre-mapping: %s",  format->debugString(2).c_str());
+    // do the mapping
+    //
+    int entries = format->countEntries();
+    for (int i = 0; ; i += 2) {
+        if (mappings[i] == nullptr) {
+            break;
+        }
+
+        size_t ix = format->findEntryByName(mappings[i]);
+        if (ix < entries) {
+            ALOGV("map '%s' to '%s'", mappings[i], mappings[i+1]);
+            status_t status = format->setEntryNameAt(ix, mappings[i+1]);
+            if (status != OK) {
+                ALOGW("Unable to map from '%s' to '%s': status %d",
+                      mappings[i], mappings[i+1], status);
+            }
+        }
+    }
+    ALOGV("Post-mapping: %s",  format->debugString(2).c_str());
+
+
+    // reclaim the mapping memory
+    for (int i = 0; ; i += 2) {
+        if (mappings[i] == nullptr) {
+            break;
+        }
+        free((void*)mappings[i]);
+        free((void*)mappings[i + 1]);
+    }
+    free(mappings);
+    mappings = nullptr;
+}
+
+//
+// end of Format Shaping hooks within MediaCodec
+//
+
 status_t MediaCodec::releaseCrypto()
 {
     ALOGV("releaseCrypto");
@@ -1410,6 +2254,8 @@
 status_t MediaCodec::start() {
     sp<AMessage> msg = new AMessage(kWhatStart, this);
 
+    sp<AMessage> callback;
+
     status_t err;
     std::vector<MediaResourceParcel> resources;
     resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
@@ -1434,6 +2280,20 @@
                 ALOGE("retrying start: failed to configure codec");
                 break;
             }
+            if (callback != nullptr) {
+                err = setCallback(callback);
+                if (err != OK) {
+                    ALOGE("retrying start: failed to set callback");
+                    break;
+                }
+                ALOGD("succeed to set callback for reclaim");
+            }
+        }
+
+        // Keep callback message after the first iteration if necessary.
+        if (i == 0 && mCallback != nullptr && mFlags & kFlagIsAsync) {
+            callback = mCallback;
+            ALOGD("keep callback message for reclaim");
         }
 
         sp<AMessage> response;
@@ -1512,7 +2372,6 @@
     mStickyError = OK;
 
     // reset state not reset by setState(UNINITIALIZED)
-    mReplyID = 0;
     mDequeueInputReplyID = 0;
     mDequeueOutputReplyID = 0;
     mDequeueInputTimeoutGeneration = 0;
@@ -1906,6 +2765,22 @@
     return OK;
 }
 
+status_t MediaCodec::querySupportedVendorParameters(std::vector<std::string> *names) {
+    return mCodec->querySupportedParameters(names);
+}
+
+status_t MediaCodec::describeParameter(const std::string &name, CodecParameterDescriptor *desc) {
+    return mCodec->describeParameter(name, desc);
+}
+
+status_t MediaCodec::subscribeToVendorParameters(const std::vector<std::string> &names) {
+    return mCodec->subscribeToParameters(names);
+}
+
+status_t MediaCodec::unsubscribeFromVendorParameters(const std::vector<std::string> &names) {
+    return mCodec->unsubscribeFromParameters(names);
+}
+
 void MediaCodec::requestActivityNotification(const sp<AMessage> &notify) {
     sp<AMessage> msg = new AMessage(kWhatRequestActivityNotification, this);
     msg->setMessage("notify", notify);
@@ -2044,20 +2919,25 @@
     } else if (mFlags & kFlagOutputBuffersChanged) {
         PostReplyWithError(replyID, INFO_OUTPUT_BUFFERS_CHANGED);
         mFlags &= ~kFlagOutputBuffersChanged;
-    } else if (mFlags & kFlagOutputFormatChanged) {
-        PostReplyWithError(replyID, INFO_FORMAT_CHANGED);
-        mFlags &= ~kFlagOutputFormatChanged;
     } else {
         sp<AMessage> response = new AMessage;
-        ssize_t index = dequeuePortBuffer(kPortIndexOutput);
-
-        if (index < 0) {
-            CHECK_EQ(index, -EAGAIN);
+        BufferInfo *info = peekNextPortBuffer(kPortIndexOutput);
+        if (!info) {
             return false;
         }
 
-        const sp<MediaCodecBuffer> &buffer =
-            mPortBuffers[kPortIndexOutput][index].mData;
+        // In synchronous mode, output format change should be handled
+        // at dequeue to put the event at the correct order.
+
+        const sp<MediaCodecBuffer> &buffer = info->mData;
+        handleOutputFormatChangeIfNeeded(buffer);
+        if (mFlags & kFlagOutputFormatChanged) {
+            PostReplyWithError(replyID, INFO_FORMAT_CHANGED);
+            mFlags &= ~kFlagOutputFormatChanged;
+            return true;
+        }
+
+        ssize_t index = dequeuePortBuffer(kPortIndexOutput);
 
         response->setSize("index", index);
         response->setSize("offset", buffer->offset());
@@ -2066,14 +2946,15 @@
         int64_t timeUs;
         CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
 
-        statsBufferReceived(timeUs);
-
         response->setInt64("timeUs", timeUs);
 
         int32_t flags;
         CHECK(buffer->meta()->findInt32("flags", &flags));
 
         response->setInt32("flags", flags);
+
+        statsBufferReceived(timeUs, buffer);
+
         response->postReply(replyID);
     }
 
@@ -2094,14 +2975,16 @@
                     CHECK(msg->findInt32("err", &err));
                     CHECK(msg->findInt32("actionCode", &actionCode));
 
-                    ALOGE("Codec reported err %#x, actionCode %d, while in state %d",
-                            err, actionCode, mState);
+                    ALOGE("Codec reported err %#x, actionCode %d, while in state %d/%s",
+                            err, actionCode, mState, stateString(mState).c_str());
                     if (err == DEAD_OBJECT) {
                         mFlags |= kFlagSawMediaServerDie;
                         mFlags &= ~kFlagIsComponentAllocated;
                     }
 
                     bool sendErrorResponse = true;
+                    std::string origin{"kWhatError:"};
+                    origin += stateString(mState);
 
                     switch (mState) {
                         case INITIALIZING:
@@ -2149,18 +3032,27 @@
                         case STOPPING:
                         {
                             if (mFlags & kFlagSawMediaServerDie) {
+                                bool postPendingReplies = true;
+                                if (mState == RELEASING && !mReplyID) {
+                                    ALOGD("Releasing asynchronously, so nothing to reply here.");
+                                    postPendingReplies = false;
+                                }
                                 // MediaServer died, there definitely won't
                                 // be a shutdown complete notification after
                                 // all.
 
-                                // note that we're directly going from
+                                // note that we may be directly going from
                                 // STOPPING->UNINITIALIZED, instead of the
                                 // usual STOPPING->INITIALIZED state.
                                 setState(UNINITIALIZED);
                                 if (mState == RELEASING) {
                                     mComponentName.clear();
                                 }
-                                (new AMessage)->postReply(mReplyID);
+                                if (postPendingReplies) {
+                                    postPendingRepliesAndDeferredMessages(origin + ":dead");
+                                }
+                                sendErrorResponse = false;
+                            } else if (!mReplyID) {
                                 sendErrorResponse = false;
                             }
                             break;
@@ -2186,7 +3078,7 @@
                         case FLUSHED:
                         case STARTED:
                         {
-                            sendErrorResponse = false;
+                            sendErrorResponse = (mReplyID != nullptr);
 
                             setStickyError(err);
                             postActivityNotificationIfPossible();
@@ -2216,7 +3108,7 @@
 
                         default:
                         {
-                            sendErrorResponse = false;
+                            sendErrorResponse = (mReplyID != nullptr);
 
                             setStickyError(err);
                             postActivityNotificationIfPossible();
@@ -2243,7 +3135,15 @@
                     }
 
                     if (sendErrorResponse) {
-                        PostReplyWithError(mReplyID, err);
+                        // TRICKY: replicate PostReplyWithError logic for
+                        //         err code override
+                        int32_t finalErr = err;
+                        if (mReleasedByResourceManager) {
+                            // override the err code if MediaCodec has been
+                            // released by ResourceManager.
+                            finalErr = DEAD_OBJECT;
+                        }
+                        postPendingRepliesAndDeferredMessages(origin, finalErr);
                     }
                     break;
                 }
@@ -2253,8 +3153,8 @@
                     if (mState == RELEASING || mState == UNINITIALIZED) {
                         // In case a kWhatError or kWhatRelease message came in and replied,
                         // we log a warning and ignore.
-                        ALOGW("allocate interrupted by error or release, current state %d",
-                              mState);
+                        ALOGW("allocate interrupted by error or release, current state %d/%s",
+                              mState, stateString(mState).c_str());
                         break;
                     }
                     CHECK_EQ(mState, INITIALIZING);
@@ -2291,7 +3191,7 @@
                                 MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
                     }
 
-                    (new AMessage)->postReply(mReplyID);
+                    postPendingRepliesAndDeferredMessages("kWhatComponentAllocated");
                     break;
                 }
 
@@ -2300,8 +3200,8 @@
                     if (mState == RELEASING || mState == UNINITIALIZED || mState == INITIALIZED) {
                         // In case a kWhatError or kWhatRelease message came in and replied,
                         // we log a warning and ignore.
-                        ALOGW("configure interrupted by error or release, current state %d",
-                              mState);
+                        ALOGW("configure interrupted by error or release, current state %d/%s",
+                              mState, stateString(mState).c_str());
                         break;
                     }
                     CHECK_EQ(mState, CONFIGURING);
@@ -2316,7 +3216,7 @@
                     if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
                         // signal frame dropping mode in the input format as this may also be
                         // meaningful and confusing for an encoder in a transcoder scenario
-                        mInputFormat->setInt32("allow-frame-drop", mAllowFrameDroppingBySurface);
+                        mInputFormat->setInt32(KEY_ALLOW_FRAME_DROP, mAllowFrameDroppingBySurface);
                     }
                     sp<AMessage> interestingFormat =
                             (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
@@ -2330,7 +3230,7 @@
                         mFlags |= kFlagUsesSoftwareRenderer;
                     }
                     setState(CONFIGURED);
-                    (new AMessage)->postReply(mReplyID);
+                    postPendingRepliesAndDeferredMessages("kWhatComponentConfigured");
 
                     // augment our media metrics info, now that we know more things
                     // such as what the codec extracted from any CSD passed in.
@@ -2375,6 +3275,12 @@
 
                 case kWhatInputSurfaceCreated:
                 {
+                    if (mState != CONFIGURED) {
+                        // state transitioned unexpectedly; we should have replied already.
+                        ALOGD("received kWhatInputSurfaceCreated message in state %s",
+                                stateString(mState).c_str());
+                        break;
+                    }
                     // response to initiateCreateInputSurface()
                     status_t err = NO_ERROR;
                     sp<AMessage> response = new AMessage;
@@ -2393,12 +3299,18 @@
                     } else {
                         response->setInt32("err", err);
                     }
-                    response->postReply(mReplyID);
+                    postPendingRepliesAndDeferredMessages("kWhatInputSurfaceCreated", response);
                     break;
                 }
 
                 case kWhatInputSurfaceAccepted:
                 {
+                    if (mState != CONFIGURED) {
+                        // state transitioned unexpectedly; we should have replied already.
+                        ALOGD("received kWhatInputSurfaceAccepted message in state %s",
+                                stateString(mState).c_str());
+                        break;
+                    }
                     // response to initiateSetInputSurface()
                     status_t err = NO_ERROR;
                     sp<AMessage> response = new AMessage();
@@ -2409,19 +3321,25 @@
                     } else {
                         response->setInt32("err", err);
                     }
-                    response->postReply(mReplyID);
+                    postPendingRepliesAndDeferredMessages("kWhatInputSurfaceAccepted", response);
                     break;
                 }
 
                 case kWhatSignaledInputEOS:
                 {
+                    if (!isExecuting()) {
+                        // state transitioned unexpectedly; we should have replied already.
+                        ALOGD("received kWhatSignaledInputEOS message in state %s",
+                                stateString(mState).c_str());
+                        break;
+                    }
                     // response to signalEndOfInputStream()
                     sp<AMessage> response = new AMessage;
                     status_t err;
                     if (msg->findInt32("err", &err)) {
                         response->setInt32("err", err);
                     }
-                    response->postReply(mReplyID);
+                    postPendingRepliesAndDeferredMessages("kWhatSignaledInputEOS", response);
                     break;
                 }
 
@@ -2430,7 +3348,8 @@
                     if (mState == RELEASING || mState == UNINITIALIZED) {
                         // In case a kWhatRelease message came in and replied,
                         // we log a warning and ignore.
-                        ALOGW("start interrupted by release, current state %d", mState);
+                        ALOGW("start interrupted by release, current state %d/%s",
+                              mState, stateString(mState).c_str());
                         break;
                     }
 
@@ -2440,7 +3359,7 @@
                                 MediaResource::GraphicMemoryResource(getGraphicBufferSize()));
                     }
                     setState(STARTED);
-                    (new AMessage)->postReply(mReplyID);
+                    postPendingRepliesAndDeferredMessages("kWhatStartCompleted");
                     break;
                 }
 
@@ -2453,9 +3372,18 @@
 
                 case kWhatOutputFramesRendered:
                 {
-                    // ignore these in all states except running, and check that we have a
-                    // notification set
-                    if (mState == STARTED && mOnFrameRenderedNotification != NULL) {
+                    // ignore these in all states except running
+                    if (mState != STARTED) {
+                        break;
+                    }
+                    TunnelPeekState previousState = mTunnelPeekState;
+                    mTunnelPeekState = TunnelPeekState::kBufferRendered;
+                    ALOGV("TunnelPeekState: %s -> %s",
+                          asString(previousState),
+                          asString(TunnelPeekState::kBufferRendered));
+                    updatePlaybackDuration(msg);
+                    // check that we have a notification set
+                    if (mOnFrameRenderedNotification != NULL) {
                         sp<AMessage> notify = mOnFrameRenderedNotification->dup();
                         notify->setMessage("data", msg);
                         notify->post();
@@ -2463,6 +3391,48 @@
                     break;
                 }
 
+                case kWhatFirstTunnelFrameReady:
+                {
+                    if (mState != STARTED) {
+                        break;
+                    }
+                    TunnelPeekState previousState = mTunnelPeekState;
+                    switch(mTunnelPeekState) {
+                        case TunnelPeekState::kDisabledNoBuffer:
+                        case TunnelPeekState::kDisabledQueued:
+                            mTunnelPeekState = TunnelPeekState::kBufferDecoded;
+                            ALOGV("First tunnel frame ready");
+                            ALOGV("TunnelPeekState: %s -> %s",
+                                  asString(previousState),
+                                  asString(mTunnelPeekState));
+                            break;
+                        case TunnelPeekState::kEnabledNoBuffer:
+                        case TunnelPeekState::kEnabledQueued:
+                            {
+                                sp<AMessage> parameters = new AMessage();
+                                parameters->setInt32("android._trigger-tunnel-peek", 1);
+                                mCodec->signalSetParameters(parameters);
+                            }
+                            mTunnelPeekState = TunnelPeekState::kBufferRendered;
+                            ALOGV("First tunnel frame ready");
+                            ALOGV("TunnelPeekState: %s -> %s",
+                                  asString(previousState),
+                                  asString(mTunnelPeekState));
+                            break;
+                        default:
+                            ALOGV("Ignoring first tunnel frame ready, TunnelPeekState: %s",
+                                  asString(mTunnelPeekState));
+                            break;
+                    }
+
+                    if (mOnFirstTunnelFrameReadyNotification != nullptr) {
+                        sp<AMessage> notify = mOnFirstTunnelFrameReadyNotification->dup();
+                        notify->setMessage("data", msg);
+                        notify->post();
+                    }
+                    break;
+                }
+
                 case kWhatFillThisBuffer:
                 {
                     /* size_t index = */updateBuffers(kPortIndexInput, msg);
@@ -2541,107 +3511,13 @@
                         break;
                     }
 
-                    sp<RefBase> obj;
-                    CHECK(msg->findObject("buffer", &obj));
-                    sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
-
-                    if (mOutputFormat != buffer->format()) {
-                        if (mFlags & kFlagUseBlockModel) {
-                            sp<AMessage> diff1 = mOutputFormat->changesFrom(buffer->format());
-                            sp<AMessage> diff2 = buffer->format()->changesFrom(mOutputFormat);
-                            std::set<std::string> keys;
-                            size_t numEntries = diff1->countEntries();
-                            AMessage::Type type;
-                            for (size_t i = 0; i < numEntries; ++i) {
-                                keys.emplace(diff1->getEntryNameAt(i, &type));
-                            }
-                            numEntries = diff2->countEntries();
-                            for (size_t i = 0; i < numEntries; ++i) {
-                                keys.emplace(diff2->getEntryNameAt(i, &type));
-                            }
-                            sp<WrapperObject<std::set<std::string>>> changedKeys{
-                                new WrapperObject<std::set<std::string>>{std::move(keys)}};
-                            buffer->meta()->setObject("changedKeys", changedKeys);
-                        }
-                        mOutputFormat = buffer->format();
-                        ALOGV("[%s] output format changed to: %s",
-                                mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
-
-                        if (mSoftRenderer == NULL &&
-                                mSurface != NULL &&
-                                (mFlags & kFlagUsesSoftwareRenderer)) {
-                            AString mime;
-                            CHECK(mOutputFormat->findString("mime", &mime));
-
-                            // TODO: propagate color aspects to software renderer to allow better
-                            // color conversion to RGB. For now, just mark dataspace for YUV
-                            // rendering.
-                            int32_t dataSpace;
-                            if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
-                                ALOGD("[%s] setting dataspace on output surface to #%x",
-                                        mComponentName.c_str(), dataSpace);
-                                int err = native_window_set_buffers_data_space(
-                                        mSurface.get(), (android_dataspace)dataSpace);
-                                ALOGW_IF(err != 0, "failed to set dataspace on surface (%d)", err);
-                            }
-                            if (mOutputFormat->contains("hdr-static-info")) {
-                                HDRStaticInfo info;
-                                if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
-                                    setNativeWindowHdrMetadata(mSurface.get(), &info);
-                                }
-                            }
-
-                            sp<ABuffer> hdr10PlusInfo;
-                            if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
-                                    && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
-                                native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
-                                        hdr10PlusInfo->size(), hdr10PlusInfo->data());
-                            }
-
-                            if (mime.startsWithIgnoreCase("video/")) {
-                                mSurface->setDequeueTimeout(-1);
-                                mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
-                            }
-                        }
-
-                        requestCpuBoostIfNeeded();
-
-                        if (mFlags & kFlagIsEncoder) {
-                            // Before we announce the format change we should
-                            // collect codec specific data and amend the output
-                            // format as necessary.
-                            int32_t flags = 0;
-                            (void) buffer->meta()->findInt32("flags", &flags);
-                            if ((flags & BUFFER_FLAG_CODECCONFIG) && !(mFlags & kFlagIsSecure)) {
-                                status_t err =
-                                    amendOutputFormatWithCodecSpecificData(buffer);
-
-                                if (err != OK) {
-                                    ALOGE("Codec spit out malformed codec "
-                                          "specific data!");
-                                }
-                            }
-                        }
-                        if (mFlags & kFlagIsAsync) {
-                            onOutputFormatChanged();
-                        } else {
-                            mFlags |= kFlagOutputFormatChanged;
-                            postActivityNotificationIfPossible();
-                        }
-
-                        // Notify mCrypto of video resolution changes
-                        if (mCrypto != NULL) {
-                            int32_t left, top, right, bottom, width, height;
-                            if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
-                                mCrypto->notifyResolution(right - left + 1, bottom - top + 1);
-                            } else if (mOutputFormat->findInt32("width", &width)
-                                    && mOutputFormat->findInt32("height", &height)) {
-                                mCrypto->notifyResolution(width, height);
-                            }
-                        }
-                    }
-
                     if (mFlags & kFlagIsAsync) {
+                        sp<RefBase> obj;
+                        CHECK(msg->findObject("buffer", &obj));
+                        sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
+
+                        // In asynchronous mode, output format change is processed immediately.
+                        handleOutputFormatChangeIfNeeded(buffer);
                         onOutputBufferAvailable();
                     } else if (mFlags & kFlagDequeueOutputPending) {
                         CHECK(handleDequeueOutputBuffer(mDequeueOutputReplyID));
@@ -2666,18 +3542,26 @@
                 case kWhatStopCompleted:
                 {
                     if (mState != STOPPING) {
-                        ALOGW("Received kWhatStopCompleted in state %d", mState);
+                        ALOGW("Received kWhatStopCompleted in state %d/%s",
+                              mState, stateString(mState).c_str());
                         break;
                     }
                     setState(INITIALIZED);
-                    (new AMessage)->postReply(mReplyID);
+                    if (mReplyID) {
+                        postPendingRepliesAndDeferredMessages("kWhatStopCompleted");
+                    } else {
+                        ALOGW("kWhatStopCompleted: presumably an error occurred earlier, "
+                              "but the operation completed anyway. (last reply origin=%s)",
+                              mLastReplyOrigin.c_str());
+                    }
                     break;
                 }
 
                 case kWhatReleaseCompleted:
                 {
                     if (mState != RELEASING) {
-                        ALOGW("Received kWhatReleaseCompleted in state %d", mState);
+                        ALOGW("Received kWhatReleaseCompleted in state %d/%s",
+                              mState, stateString(mState).c_str());
                         break;
                     }
                     setState(UNINITIALIZED);
@@ -2694,7 +3578,7 @@
                     mReleaseSurface.reset();
 
                     if (mReplyID != nullptr) {
-                        (new AMessage)->postReply(mReplyID);
+                        postPendingRepliesAndDeferredMessages("kWhatReleaseCompleted");
                     }
                     if (mAsyncReleaseCompleteNotification != nullptr) {
                         flushMediametrics();
@@ -2707,8 +3591,8 @@
                 case kWhatFlushCompleted:
                 {
                     if (mState != FLUSHING) {
-                        ALOGW("received FlushCompleted message in state %d",
-                                mState);
+                        ALOGW("received FlushCompleted message in state %d/%s",
+                                mState, stateString(mState).c_str());
                         break;
                     }
 
@@ -2719,7 +3603,7 @@
                         mCodec->signalResume();
                     }
 
-                    (new AMessage)->postReply(mReplyID);
+                    postPendingRepliesAndDeferredMessages("kWhatFlushCompleted");
                     break;
                 }
 
@@ -2731,14 +3615,18 @@
 
         case kWhatInit:
         {
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
-
             if (mState != UNINITIALIZED) {
-                PostReplyWithError(replyID, INVALID_OPERATION);
+                PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             }
 
+            if (mReplyID) {
+                mDeferredMessages.push_back(msg);
+                break;
+            }
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
             mReplyID = replyID;
             setState(INITIALIZING);
 
@@ -2763,6 +3651,9 @@
             if (msg->findMessage("on-frame-rendered", &notify)) {
                 mOnFrameRenderedNotification = notify;
             }
+            if (msg->findMessage("first-tunnel-frame-ready", &notify)) {
+                mOnFirstTunnelFrameReadyNotification = notify;
+            }
             break;
         }
 
@@ -2800,14 +3691,18 @@
 
         case kWhatConfigure:
         {
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
-
             if (mState != INITIALIZED) {
-                PostReplyWithError(replyID, INVALID_OPERATION);
+                PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             }
 
+            if (mReplyID) {
+                mDeferredMessages.push_back(msg);
+                break;
+            }
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
             sp<RefBase> obj;
             CHECK(msg->findObject("surface", &obj));
 
@@ -2820,7 +3715,7 @@
             }
 
             if (obj != NULL) {
-                if (!format->findInt32("allow-frame-drop", &mAllowFrameDroppingBySurface)) {
+                if (!format->findInt32(KEY_ALLOW_FRAME_DROP, &mAllowFrameDroppingBySurface)) {
                     // allow frame dropping by surface by default
                     mAllowFrameDroppingBySurface = true;
                 }
@@ -2880,6 +3775,19 @@
 
             extractCSD(format);
 
+            int32_t tunneled;
+            if (format->findInt32("feature-tunneled-playback", &tunneled) && tunneled != 0) {
+                ALOGI("Configuring TUNNELED video playback.");
+                mTunneled = true;
+            } else {
+                mTunneled = false;
+            }
+
+            int32_t background = 0;
+            if (format->findInt32("android._background-mode", &background) && background) {
+                androidSetThreadPriority(gettid(), ANDROID_PRIORITY_BACKGROUND);
+            }
+
             mCodec->initiateConfigureComponent(format);
             break;
         }
@@ -2945,15 +3853,19 @@
         case kWhatCreateInputSurface:
         case kWhatSetInputSurface:
         {
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
-
             // Must be configured, but can't have been started yet.
             if (mState != CONFIGURED) {
-                PostReplyWithError(replyID, INVALID_OPERATION);
+                PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             }
 
+            if (mReplyID) {
+                mDeferredMessages.push_back(msg);
+                break;
+            }
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
             mReplyID = replyID;
             if (msg->what() == kWhatCreateInputSurface) {
                 mCodec->initiateCreateInputSurface();
@@ -2968,9 +3880,6 @@
         }
         case kWhatStart:
         {
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
-
             if (mState == FLUSHED) {
                 setState(STARTED);
                 if (mHavePendingInputBuffers) {
@@ -2978,13 +3887,25 @@
                     mHavePendingInputBuffers = false;
                 }
                 mCodec->signalResume();
-                PostReplyWithError(replyID, OK);
+                PostReplyWithError(msg, OK);
                 break;
             } else if (mState != CONFIGURED) {
-                PostReplyWithError(replyID, INVALID_OPERATION);
+                PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             }
 
+            if (mReplyID) {
+                mDeferredMessages.push_back(msg);
+                break;
+            }
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            TunnelPeekState previousState = mTunnelPeekState;
+            mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
+            ALOGV("TunnelPeekState: %s -> %s",
+                  asString(previousState),
+                  asString(TunnelPeekState::kEnabledNoBuffer));
+
             mReplyID = replyID;
             setState(STARTING);
 
@@ -2992,15 +3913,42 @@
             break;
         }
 
-        case kWhatStop:
+        case kWhatStop: {
+            if (mReplyID) {
+                mDeferredMessages.push_back(msg);
+                break;
+            }
+            [[fallthrough]];
+        }
         case kWhatRelease:
         {
             State targetState =
                 (msg->what() == kWhatStop) ? INITIALIZED : UNINITIALIZED;
 
+            if ((mState == RELEASING && targetState == UNINITIALIZED)
+                    || (mState == STOPPING && targetState == INITIALIZED)) {
+                mDeferredMessages.push_back(msg);
+                break;
+            }
+
             sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
 
+            sp<AMessage> asyncNotify;
+            (void)msg->findMessage("async", &asyncNotify);
+            // post asyncNotify if going out of scope.
+            struct AsyncNotifyPost {
+                AsyncNotifyPost(const sp<AMessage> &asyncNotify) : mAsyncNotify(asyncNotify) {}
+                ~AsyncNotifyPost() {
+                    if (mAsyncNotify) {
+                        mAsyncNotify->post();
+                    }
+                }
+                void clear() { mAsyncNotify.clear(); }
+            private:
+                sp<AMessage> mAsyncNotify;
+            } asyncNotifyPost{asyncNotify};
+
             // already stopped/released
             if (mState == UNINITIALIZED && mReleasedByResourceManager) {
                 sp<AMessage> response = new AMessage;
@@ -3012,7 +3960,13 @@
             int32_t reclaimed = 0;
             msg->findInt32("reclaimed", &reclaimed);
             if (reclaimed) {
-                mReleasedByResourceManager = true;
+                if (!mReleasedByResourceManager) {
+                    // notify the async client
+                    if (mFlags & kFlagIsAsync) {
+                        onError(DEAD_OBJECT, ACTION_CODE_FATAL);
+                    }
+                    mReleasedByResourceManager = true;
+                }
 
                 int32_t force = 0;
                 msg->findInt32("force", &force);
@@ -3024,10 +3978,6 @@
                     response->setInt32("err", WOULD_BLOCK);
                     response->postReply(replyID);
 
-                    // notify the async client
-                    if (mFlags & kFlagIsAsync) {
-                        onError(DEAD_OBJECT, ACTION_CODE_FATAL);
-                    }
                     break;
                 }
             }
@@ -3058,18 +4008,26 @@
                 break;
             }
 
-            // If we're flushing, stopping, configuring or starting  but
+            // If we're flushing, configuring or starting  but
             // received a release request, post the reply for the pending call
             // first, and consider it done. The reply token will be replaced
             // after this, and we'll no longer be able to reply.
-            if (mState == FLUSHING || mState == STOPPING
-                    || mState == CONFIGURING || mState == STARTING) {
-                (new AMessage)->postReply(mReplyID);
+            if (mState == FLUSHING || mState == CONFIGURING || mState == STARTING) {
+                // mReply is always set if in these states.
+                postPendingRepliesAndDeferredMessages(
+                        std::string("kWhatRelease:") + stateString(mState));
+            }
+            // If we're stopping but received a release request, post the reply
+            // for the pending call if necessary. Note that the reply may have been
+            // already posted due to an error.
+            if (mState == STOPPING && mReplyID) {
+                postPendingRepliesAndDeferredMessages("kWhatRelease:STOPPING");
             }
 
             if (mFlags & kFlagSawMediaServerDie) {
                 // It's dead, Jim. Don't expect initiateShutdown to yield
                 // any useful results now...
+                // Any pending reply would have been handled at kWhatError.
                 setState(UNINITIALIZED);
                 if (targetState == UNINITIALIZED) {
                     mComponentName.clear();
@@ -3083,12 +4041,12 @@
             // reply now with an error to unblock the client, client can
             // release after the failure (instead of ANR).
             if (msg->what() == kWhatStop && (mFlags & kFlagStickyError)) {
+                // Any pending reply would have been handled at kWhatError.
                 PostReplyWithError(replyID, getStickyError());
                 break;
             }
 
-            sp<AMessage> asyncNotify;
-            if (msg->findMessage("async", &asyncNotify) && asyncNotify != nullptr) {
+            if (asyncNotify != nullptr) {
                 if (mSurface != NULL) {
                     if (!mReleaseSurface) {
                         uint64_t usage = 0;
@@ -3112,6 +4070,12 @@
                 }
             }
 
+            if (mReplyID) {
+                // State transition replies are handled above, so this reply
+                // would not be related to state transition. As we are
+                // shutting down the component, just fail the operation.
+                postPendingRepliesAndDeferredMessages("kWhatRelease:reply", UNKNOWN_ERROR);
+            }
             mReplyID = replyID;
             setState(msg->what() == kWhatStop ? STOPPING : RELEASING);
 
@@ -3126,8 +4090,8 @@
 
             if (asyncNotify != nullptr) {
                 mResourceManagerProxy->markClientForPendingRemoval();
-                (new AMessage)->postReply(mReplyID);
-                mReplyID = 0;
+                postPendingRepliesAndDeferredMessages("kWhatRelease:async");
+                asyncNotifyPost.clear();
                 mAsyncReleaseCompleteNotification = asyncNotify;
             }
 
@@ -3298,17 +4262,21 @@
 
         case kWhatSignalEndOfInputStream:
         {
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
-
             if (!isExecuting() || !mHaveInputSurface) {
-                PostReplyWithError(replyID, INVALID_OPERATION);
+                PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             } else if (mFlags & kFlagStickyError) {
-                PostReplyWithError(replyID, getStickyError());
+                PostReplyWithError(msg, getStickyError());
                 break;
             }
 
+            if (mReplyID) {
+                mDeferredMessages.push_back(msg);
+                break;
+            }
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
             mReplyID = replyID;
             mCodec->signalEndOfInputStream();
             break;
@@ -3350,23 +4318,32 @@
 
         case kWhatFlush:
         {
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
-
             if (!isExecuting()) {
-                PostReplyWithError(replyID, INVALID_OPERATION);
+                PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             } else if (mFlags & kFlagStickyError) {
-                PostReplyWithError(replyID, getStickyError());
+                PostReplyWithError(msg, getStickyError());
                 break;
             }
 
+            if (mReplyID) {
+                mDeferredMessages.push_back(msg);
+                break;
+            }
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
             mReplyID = replyID;
             // TODO: skip flushing if already FLUSHED
             setState(FLUSHING);
 
             mCodec->signalFlush();
             returnBuffersToCodec();
+            TunnelPeekState previousState = mTunnelPeekState;
+            mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
+            ALOGV("TunnelPeekState: %s -> %s",
+                  asString(previousState),
+                  asString(TunnelPeekState::kEnabledNoBuffer));
             break;
         }
 
@@ -3474,6 +4451,108 @@
     }
 }
 
+void MediaCodec::handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> &buffer) {
+    sp<AMessage> format = buffer->format();
+    if (mOutputFormat == format) {
+        return;
+    }
+    if (mFlags & kFlagUseBlockModel) {
+        sp<AMessage> diff1 = mOutputFormat->changesFrom(format);
+        sp<AMessage> diff2 = format->changesFrom(mOutputFormat);
+        std::set<std::string> keys;
+        size_t numEntries = diff1->countEntries();
+        AMessage::Type type;
+        for (size_t i = 0; i < numEntries; ++i) {
+            keys.emplace(diff1->getEntryNameAt(i, &type));
+        }
+        numEntries = diff2->countEntries();
+        for (size_t i = 0; i < numEntries; ++i) {
+            keys.emplace(diff2->getEntryNameAt(i, &type));
+        }
+        sp<WrapperObject<std::set<std::string>>> changedKeys{
+            new WrapperObject<std::set<std::string>>{std::move(keys)}};
+        buffer->meta()->setObject("changedKeys", changedKeys);
+    }
+    mOutputFormat = format;
+    mapFormat(mComponentName, format, nullptr, true);
+    ALOGV("[%s] output format changed to: %s",
+            mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
+
+    if (mSoftRenderer == NULL &&
+            mSurface != NULL &&
+            (mFlags & kFlagUsesSoftwareRenderer)) {
+        AString mime;
+        CHECK(mOutputFormat->findString("mime", &mime));
+
+        // TODO: propagate color aspects to software renderer to allow better
+        // color conversion to RGB. For now, just mark dataspace for YUV
+        // rendering.
+        int32_t dataSpace;
+        if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
+            ALOGD("[%s] setting dataspace on output surface to #%x",
+                    mComponentName.c_str(), dataSpace);
+            int err = native_window_set_buffers_data_space(
+                    mSurface.get(), (android_dataspace)dataSpace);
+            ALOGW_IF(err != 0, "failed to set dataspace on surface (%d)", err);
+        }
+        if (mOutputFormat->contains("hdr-static-info")) {
+            HDRStaticInfo info;
+            if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
+                setNativeWindowHdrMetadata(mSurface.get(), &info);
+            }
+        }
+
+        sp<ABuffer> hdr10PlusInfo;
+        if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
+                && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
+            native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
+                    hdr10PlusInfo->size(), hdr10PlusInfo->data());
+        }
+
+        if (mime.startsWithIgnoreCase("video/")) {
+            mSurface->setDequeueTimeout(-1);
+            mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
+        }
+    }
+
+    requestCpuBoostIfNeeded();
+
+    if (mFlags & kFlagIsEncoder) {
+        // Before we announce the format change we should
+        // collect codec specific data and amend the output
+        // format as necessary.
+        int32_t flags = 0;
+        (void) buffer->meta()->findInt32("flags", &flags);
+        if ((flags & BUFFER_FLAG_CODECCONFIG) && !(mFlags & kFlagIsSecure)
+                && !mOwnerName.startsWith("codec2::")) {
+            status_t err =
+                amendOutputFormatWithCodecSpecificData(buffer);
+
+            if (err != OK) {
+                ALOGE("Codec spit out malformed codec "
+                      "specific data!");
+            }
+        }
+    }
+    if (mFlags & kFlagIsAsync) {
+        onOutputFormatChanged();
+    } else {
+        mFlags |= kFlagOutputFormatChanged;
+        postActivityNotificationIfPossible();
+    }
+
+    // Notify mCrypto of video resolution changes
+    if (mCrypto != NULL) {
+        int32_t left, top, right, bottom, width, height;
+        if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
+            mCrypto->notifyResolution(right - left + 1, bottom - top + 1);
+        } else if (mOutputFormat->findInt32("width", &width)
+                && mOutputFormat->findInt32("height", &height)) {
+            mCrypto->notifyResolution(width, height);
+        }
+    }
+}
+
 void MediaCodec::extractCSD(const sp<AMessage> &format) {
     mCSD.clear();
 
@@ -3807,11 +4886,44 @@
         buffer->meta()->setInt32("csd", true);
     }
 
+    if (mTunneled) {
+        TunnelPeekState previousState = mTunnelPeekState;
+        switch(mTunnelPeekState){
+            case TunnelPeekState::kEnabledNoBuffer:
+                buffer->meta()->setInt32("tunnel-first-frame", 1);
+                mTunnelPeekState = TunnelPeekState::kEnabledQueued;
+                ALOGV("TunnelPeekState: %s -> %s",
+                        asString(previousState),
+                        asString(mTunnelPeekState));
+                break;
+            case TunnelPeekState::kDisabledNoBuffer:
+                buffer->meta()->setInt32("tunnel-first-frame", 1);
+                mTunnelPeekState = TunnelPeekState::kDisabledQueued;
+                ALOGV("TunnelPeekState: %s -> %s",
+                        asString(previousState),
+                        asString(mTunnelPeekState));
+                break;
+            default:
+                break;
+        }
+    }
+
     status_t err = OK;
     if (hasCryptoOrDescrambler() && !c2Buffer && !memory) {
         AString *errorDetailMsg;
         CHECK(msg->findPointer("errorDetailMsg", (void **)&errorDetailMsg));
-
+        // Notify mCrypto of video resolution changes
+        if (mTunneled && mCrypto != NULL) {
+            int32_t width, height;
+            if (mInputFormat->findInt32("width", &width) &&
+                mInputFormat->findInt32("height", &height) && width > 0 && height > 0) {
+                if (width != mTunneledInputWidth || height != mTunneledInputHeight) {
+                    mTunneledInputWidth = width;
+                    mTunneledInputHeight = height;
+                    mCrypto->notifyResolution(width, height);
+                }
+            }
+        }
         err = mBufferChannel->queueSecureInputBuffer(
                 buffer,
                 (mFlags & kFlagIsSecure),
@@ -3840,7 +4952,7 @@
         info->mOwnedByClient = false;
         info->mData.clear();
 
-        statsBufferSent(timeUs);
+        statsBufferSent(timeUs, buffer);
     }
 
     return err;
@@ -3931,7 +5043,15 @@
                 }
             }
         }
-        mBufferChannel->renderOutputBuffer(buffer, renderTimeNs);
+        status_t err = mBufferChannel->renderOutputBuffer(buffer, renderTimeNs);
+
+        if (err == NO_INIT) {
+            ALOGE("rendering to non-initilized(obsolete) surface");
+            return err;
+        }
+        if (err != OK) {
+            ALOGI("rendring output error %d", err);
+        }
     } else {
         mBufferChannel->discardBuffer(buffer);
     }
@@ -3939,19 +5059,31 @@
     return OK;
 }
 
-ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
+MediaCodec::BufferInfo *MediaCodec::peekNextPortBuffer(int32_t portIndex) {
     CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
 
     List<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
 
     if (availBuffers->empty()) {
+        return nullptr;
+    }
+
+    return &mPortBuffers[portIndex][*availBuffers->begin()];
+}
+
+ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
+    CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
+
+    BufferInfo *info = peekNextPortBuffer(portIndex);
+    if (!info) {
         return -EAGAIN;
     }
 
+    List<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
     size_t index = *availBuffers->begin();
+    CHECK_EQ(info, &mPortBuffers[portIndex][index]);
     availBuffers->erase(availBuffers->begin());
 
-    BufferInfo *info = &mPortBuffers[portIndex][index];
     CHECK(!info->mOwnedByClient);
     {
         Mutex::Autolock al(mBufferLock);
@@ -3985,6 +5117,10 @@
             return ALREADY_EXISTS;
         }
 
+        // in case we don't connect, ensure that we don't signal the surface is
+        // connected to the screen
+        mIsSurfaceToScreen = false;
+
         err = nativeWindowConnect(surface.get(), "connectToSurface");
         if (err == OK) {
             // Require a fresh set of buffers after each connect by using a unique generation
@@ -4010,6 +5146,10 @@
             if (!mAllowFrameDroppingBySurface) {
                 disableLegacyBufferDropPostQ(surface);
             }
+            // keep track whether or not the buffers of the connected surface go to the screen
+            int result = 0;
+            surface->query(NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &result);
+            mIsSurfaceToScreen = result != 0;
         }
     }
     // do not return ALREADY_EXISTS unless surfaces are the same
@@ -4027,6 +5167,7 @@
         }
         // assume disconnected even on error
         mSurface.clear();
+        mIsSurfaceToScreen = false;
     }
     return err;
 }
@@ -4071,13 +5212,13 @@
 
         msg->setInt64("timeUs", timeUs);
 
-        statsBufferReceived(timeUs);
-
         int32_t flags;
         CHECK(buffer->meta()->findInt32("flags", &flags));
 
         msg->setInt32("flags", flags);
 
+        statsBufferReceived(timeUs, buffer);
+
         msg->post();
     }
 }
@@ -4144,6 +5285,8 @@
 
 status_t MediaCodec::onSetParameters(const sp<AMessage> &params) {
     updateLowLatency(params);
+    mapFormat(mComponentName, params, nullptr, false);
+    updateTunnelPeek(params);
     mCodec->signalSetParameters(params);
 
     return OK;
@@ -4193,6 +5336,33 @@
     return OK;
 }
 
+void MediaCodec::postPendingRepliesAndDeferredMessages(
+        std::string origin, status_t err /* = OK */) {
+    sp<AMessage> response{new AMessage};
+    if (err != OK) {
+        response->setInt32("err", err);
+    }
+    postPendingRepliesAndDeferredMessages(origin, response);
+}
+
+void MediaCodec::postPendingRepliesAndDeferredMessages(
+        std::string origin, const sp<AMessage> &response) {
+    LOG_ALWAYS_FATAL_IF(
+            !mReplyID,
+            "postPendingRepliesAndDeferredMessages: mReplyID == null, from %s following %s",
+            origin.c_str(),
+            mLastReplyOrigin.c_str());
+    mLastReplyOrigin = origin;
+    response->postReply(mReplyID);
+    mReplyID.clear();
+    ALOGV_IF(!mDeferredMessages.empty(),
+            "posting %zu deferred messages", mDeferredMessages.size());
+    for (sp<AMessage> msg : mDeferredMessages) {
+        msg->post();
+    }
+    mDeferredMessages.clear();
+}
+
 std::string MediaCodec::stateString(State state) {
     const char *rval = NULL;
     char rawbuffer[16]; // room for "%d"
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index ac54fa1..6243828 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -44,6 +44,7 @@
 #include <cutils/properties.h>
 
 #include <algorithm>
+#include <regex>
 
 namespace android {
 
@@ -209,9 +210,12 @@
             ALOGD("ignored a null builder");
             continue;
         }
-        mInitCheck = builder->buildMediaCodecList(&writer);
-        if (mInitCheck != OK) {
-            break;
+        auto currentCheck = builder->buildMediaCodecList(&writer);
+        if (currentCheck != OK) {
+            ALOGD("ignored failed builder");
+            continue;
+        } else {
+            mInitCheck = currentCheck;
         }
     }
     writer.writeGlobalSettings(mGlobalSettings);
@@ -345,6 +349,14 @@
 void MediaCodecList::findMatchingCodecs(
         const char *mime, bool encoder, uint32_t flags,
         Vector<AString> *matches) {
+    sp<AMessage> format;        // initializes as clear/null
+    findMatchingCodecs(mime, encoder, flags, format, matches);
+}
+
+//static
+void MediaCodecList::findMatchingCodecs(
+        const char *mime, bool encoder, uint32_t flags, sp<AMessage> format,
+        Vector<AString> *matches) {
     matches->clear();
 
     const sp<IMediaCodecList> list = getInstance();
@@ -365,14 +377,22 @@
 
         const sp<MediaCodecInfo> info = list->getCodecInfo(matchIndex);
         CHECK(info != nullptr);
+
         AString componentName = info->getCodecName();
 
+        if (!codecHandlesFormat(mime, info, format)) {
+            ALOGV("skipping codec '%s' which doesn't satisfy format %s",
+                  componentName.c_str(), format->debugString(2).c_str());
+            continue;
+        }
+
         if ((flags & kHardwareCodecsOnly) && isSoftwareCodec(componentName)) {
             ALOGV("skipping SW codec '%s'", componentName.c_str());
-        } else {
-            matches->push(componentName);
-            ALOGV("matching '%s'", componentName.c_str());
+            continue;
         }
+
+        matches->push(componentName);
+        ALOGV("matching '%s'", componentName.c_str());
     }
 
     if (flags & kPreferSoftwareCodecs ||
@@ -381,4 +401,118 @@
     }
 }
 
+/*static*/
+bool MediaCodecList::codecHandlesFormat(const char *mime, sp<MediaCodecInfo> info,
+                                        sp<AMessage> format) {
+
+    if (format == nullptr) {
+        ALOGD("codecHandlesFormat: no format, so no extra checks");
+        return true;
+    }
+
+    sp<MediaCodecInfo::Capabilities> capabilities = info->getCapabilitiesFor(mime);
+
+    // ... no capabilities listed means 'handle it all'
+    if (capabilities == nullptr) {
+        ALOGD("codecHandlesFormat: no capabilities for refinement");
+        return true;
+    }
+
+    const sp<AMessage> &details = capabilities->getDetails();
+
+    // if parsing the capabilities fails, ignore this particular codec
+    // currently video-centric evaluation
+    //
+    // TODO: like to make it handle the same set of properties from
+    // MediaCodecInfo::isFormatSupported()
+    // not yet done here are:
+    //  profile, level, bitrate, features,
+
+    bool isVideo = false;
+    if (strncmp(mime, "video/", 6) == 0) {
+        isVideo = true;
+    }
+
+    if (isVideo) {
+        int width = -1;
+        int height = -1;
+
+        if (format->findInt32("height", &height) && format->findInt32("width", &width)) {
+
+            // is it within the supported size range of the codec?
+            AString sizeRange;
+            AString minSize,maxSize;
+            AString minWidth, minHeight;
+            AString maxWidth, maxHeight;
+            if (!details->findString("size-range", &sizeRange)
+                || !splitString(sizeRange, "-", &minSize, &maxSize)) {
+                ALOGW("Unable to parse size-range from codec info");
+                return false;
+            }
+            if (!splitString(minSize, "x", &minWidth, &minHeight)) {
+                if (!splitString(minSize, "*", &minWidth, &minHeight)) {
+                    ALOGW("Unable to parse size-range/min-size from codec info");
+                    return false;
+                }
+            }
+            if (!splitString(maxSize, "x", &maxWidth, &maxHeight)) {
+                if (!splitString(maxSize, "*", &maxWidth, &maxHeight)) {
+                    ALOGW("Unable to fully parse size-range/max-size from codec info");
+                    return false;
+                }
+            }
+
+            // strtol() returns 0 if unable to parse a number, which works for our later tests
+            int minW = strtol(minWidth.c_str(), NULL, 10);
+            int minH = strtol(minHeight.c_str(), NULL, 10);
+            int maxW = strtol(maxWidth.c_str(), NULL, 10);
+            int maxH = strtol(maxHeight.c_str(), NULL, 10);
+
+            if (minW == 0 || minH == 0 || maxW == 0 || maxH == 0) {
+                ALOGW("Unable to parse values from size-range from codec info");
+                return false;
+            }
+
+            // finally, comparison time
+            if (width < minW || width > maxW || height < minH || height > maxH) {
+                ALOGV("format %dx%d outside of allowed %dx%d-%dx%d",
+                      width, height, minW, minH, maxW, maxH);
+                // at this point, it's a rejection, UNLESS
+                // the codec allows swapping width and height
+                int32_t swappable;
+                if (!details->findInt32("feature-can-swap-width-height", &swappable)
+                    || swappable == 0) {
+                    return false;
+                }
+                // NB: deliberate comparison of height vs width limits (and width vs height)
+                if (height < minW || height > maxW || width < minH || width > maxH) {
+                    return false;
+                }
+            }
+
+            // @ 'alignment' [e.g. "2x2" which tells us that both dimensions must be even]
+            // no alignment == we're ok with anything
+            AString alignment, alignWidth, alignHeight;
+            if (details->findString("alignment", &alignment)) {
+                if (splitString(alignment, "x", &alignWidth, &alignHeight) ||
+                    splitString(alignment, "*", &alignWidth, &alignHeight)) {
+                    int wAlign = strtol(alignWidth.c_str(), NULL, 10);
+                    int hAlign = strtol(alignHeight.c_str(), NULL, 10);
+                    // strtol() returns 0 if failing to parse, treat as "no restriction"
+                    if (wAlign > 0 && hAlign > 0) {
+                         if ((width % wAlign) != 0 || (height % hAlign) != 0) {
+                            ALOGV("format dimensions %dx%d not aligned to %dx%d",
+                                 width, height, wAlign, hAlign);
+                            return false;
+                         }
+                    }
+                }
+            }
+        }
+    }
+
+    // haven't found a reason to discard this one
+    return true;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 1395c27..0107c32 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -30,6 +30,7 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ColorUtils.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecConstants.h>
@@ -168,9 +169,7 @@
 }
 
 status_t MediaCodecSource::Puller::setStopTimeUs(int64_t stopTimeUs) {
-    sp<AMessage> msg = new AMessage(kWhatSetStopTimeUs, this);
-    msg->setInt64("stop-time-us", stopTimeUs);
-    return postSynchronouslyAndReturnError(msg);
+    return mSource->setStopTimeUs(stopTimeUs);
 }
 
 status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta, const sp<AMessage> &notify) {
@@ -188,19 +187,11 @@
 }
 
 void MediaCodecSource::Puller::stop() {
-    bool interrupt = false;
-    {
-        // mark stopping before actually reaching kWhatStop on the looper, so the pulling will
-        // stop.
-        Mutexed<Queue>::Locked queue(mQueue);
-        queue->mPulling = false;
-        interrupt = queue->mReadPendingSince && (queue->mReadPendingSince < ALooper::GetNowUs() - 1000000);
-        queue->flush(); // flush any unprocessed pulled buffers
-    }
-
-    if (interrupt) {
-        interruptSource();
-    }
+    // mark stopping before actually reaching kWhatStop on the looper, so the pulling will
+    // stop.
+    Mutexed<Queue>::Locked queue(mQueue);
+    queue->mPulling = false;
+    queue->flush(); // flush any unprocessed pulled buffers
 }
 
 void MediaCodecSource::Puller::interruptSource() {
@@ -435,6 +426,30 @@
     buffer->release();
 }
 
+status_t MediaCodecSource::setEncodingBitrate(int32_t bitRate) {
+    ALOGV("setEncodingBitrate (%d)", bitRate);
+
+    if (mEncoder == NULL) {
+        ALOGW("setEncodingBitrate (%d) : mEncoder is null", bitRate);
+        return BAD_VALUE;
+    }
+
+    sp<AMessage> params = new AMessage;
+    params->setInt32("video-bitrate", bitRate);
+
+    return mEncoder->setParameters(params);
+}
+
+status_t MediaCodecSource::requestIDRFrame() {
+    if (mEncoder == NULL) {
+        ALOGW("requestIDRFrame : mEncoder is null");
+        return BAD_VALUE;
+    } else {
+        mEncoder->requestIDRFrame();
+        return OK;
+    }
+}
+
 MediaCodecSource::MediaCodecSource(
         const sp<ALooper> &looper,
         const sp<AMessage> &outputFormat,
@@ -660,9 +675,9 @@
     if (mStopping && reachedEOS) {
         ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
         if (mPuller != NULL) {
-            mPuller->stopSource();
+            mPuller->interruptSource();
         }
-        ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
+        ALOGI("source (%s) stopped", mIsVideo ? "video" : "audio");
         // posting reply to everyone that's waiting
         List<sp<AReplyToken>>::iterator it;
         for (it = mStopReplyIDQueue.begin();
@@ -690,6 +705,9 @@
 status_t MediaCodecSource::feedEncoderInputBuffers() {
     MediaBufferBase* mbuf = NULL;
     while (!mAvailEncoderInputIndices.empty() && mPuller->readBuffer(&mbuf)) {
+        if (!mEncoder) {
+            return BAD_VALUE;
+        }
         size_t bufferIndex = *mAvailEncoderInputIndices.begin();
         mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
 
@@ -744,6 +762,23 @@
             memcpy(inbuf->data(), mbuf->data(), size);
 
             if (mIsVideo) {
+                int32_t ds = 0;
+                if (mbuf->meta_data().findInt32(kKeyColorSpace, &ds)
+                        && ds != HAL_DATASPACE_UNKNOWN) {
+                    android_dataspace dataspace = static_cast<android_dataspace>(ds);
+                    ColorUtils::convertDataSpaceToV0(dataspace);
+                    ALOGD("Updating dataspace to %x", dataspace);
+                    int32_t standard, transfer, range;
+                    ColorUtils::getColorConfigFromDataSpace(
+                            dataspace, &range, &standard, &transfer);
+                    sp<AMessage> msg = new AMessage;
+                    msg->setInt32(KEY_COLOR_STANDARD, standard);
+                    msg->setInt32(KEY_COLOR_TRANSFER, transfer);
+                    msg->setInt32(KEY_COLOR_RANGE, range);
+                    msg->setInt32("android._dataspace", dataspace);
+                    mEncoder->setParameters(msg);
+                }
+
                 // video encoder will release MediaBuffer when done
                 // with underlying data.
                 inbuf->meta()->setObject("mediaBufferHolder", new MediaBufferHolder(mbuf));
@@ -851,7 +886,7 @@
     {
         int32_t eos = 0;
         if (msg->findInt32("eos", &eos) && eos) {
-            ALOGV("puller (%s) reached EOS", mIsVideo ? "video" : "audio");
+            ALOGI("puller (%s) reached EOS", mIsVideo ? "video" : "audio");
             signalEOS();
             break;
         }
@@ -1069,12 +1104,7 @@
         if (generation != mGeneration) {
              break;
         }
-
-        if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
-            ALOGV("source (%s) stopping", mIsVideo ? "video" : "audio");
-            mPuller->interruptSource();
-            ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
-        }
+        ALOGD("source (%s) stopping stalled", mIsVideo ? "video" : "audio");
         signalEOS();
         break;
     }
@@ -1106,7 +1136,7 @@
         if (mFlags & FLAG_USE_SURFACE_INPUT) {
             sp<AMessage> params = new AMessage;
             params->setInt64(PARAMETER_KEY_OFFSET_TIME, mInputBufferTimeOffsetUs);
-            err = mEncoder->setParameters(params);
+            err = mEncoder ? mEncoder->setParameters(params) : BAD_VALUE;
         }
 
         sp<AMessage> response = new AMessage;
@@ -1126,7 +1156,7 @@
         if (mFlags & FLAG_USE_SURFACE_INPUT) {
             sp<AMessage> params = new AMessage;
             params->setInt64("stop-time-us", stopTimeUs);
-            err = mEncoder->setParameters(params);
+            err = mEncoder ? mEncoder->setParameters(params) : BAD_VALUE;
         } else {
             err = mPuller->setStopTimeUs(stopTimeUs);
         }
diff --git a/media/libstagefright/MediaExtractorFactory.cpp b/media/libstagefright/MediaExtractorFactory.cpp
index c6e753d..2520e2a 100644
--- a/media/libstagefright/MediaExtractorFactory.cpp
+++ b/media/libstagefright/MediaExtractorFactory.cpp
@@ -59,7 +59,7 @@
             sp<IMediaExtractor> ex;
             mediaExService->makeExtractor(
                     CreateIDataSourceFromDataSource(source),
-                    mime ? std::make_unique<std::string>(mime) : nullptr,
+                    mime ? std::optional<std::string>(mime) : std::nullopt,
                     &ex);
             return ex;
         } else {
@@ -188,11 +188,11 @@
     // sanity check check struct version, uuid, name
     if (plugin->def.def_version != EXTRACTORDEF_VERSION_NDK_V1 &&
             plugin->def.def_version != EXTRACTORDEF_VERSION_NDK_V2) {
-        ALOGE("don't understand extractor format %u, ignoring.", plugin->def.def_version);
+        ALOGW("don't understand extractor format %u, ignoring.", plugin->def.def_version);
         return;
     }
     if (memcmp(&plugin->def.extractor_uuid, "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0", 16) == 0) {
-        ALOGE("invalid UUID, ignoring");
+        ALOGW("invalid UUID, ignoring");
         return;
     }
     if (plugin->def.extractor_name == NULL || strlen(plugin->def.extractor_name) == 0) {
@@ -244,13 +244,18 @@
             void *libHandle = android_dlopen_ext(
                     libPath.string(),
                     RTLD_NOW | RTLD_LOCAL, dlextinfo);
-            CHECK(libHandle != nullptr)
-                    << "couldn't dlopen(" << libPath.string() << ") " << strerror(errno);
+            if (libHandle == nullptr) {
+                ALOGI("dlopen(%s) reported error %s", libPath.string(), strerror(errno));
+                continue;
+            }
 
             GetExtractorDef getDef =
                 (GetExtractorDef) dlsym(libHandle, "GETEXTRACTORDEF");
-            CHECK(getDef != nullptr)
-                    << libPath.string() << " does not contain sniffer";
+            if (getDef == nullptr) {
+                ALOGI("no sniffer found in %s", libPath.string());
+                dlclose(libHandle);
+                continue;
+            }
 
             ALOGV("registering sniffer for %s", libPath.string());
             RegisterExtractor(
@@ -258,7 +263,7 @@
         }
         closedir(libDir);
     } else {
-        ALOGE("couldn't opendir(%s)", libDirPath);
+        ALOGI("plugin directory not present (%s)", libDirPath);
     }
 }
 
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index cab4ebd..a946f71 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -76,6 +76,7 @@
     mFileMeta.clear();
     mWriter.clear();
     mTrackList.clear();
+    mFormatList.clear();
 }
 
 ssize_t MediaMuxer::addTrack(const sp<AMessage> &format) {
@@ -92,7 +93,9 @@
     }
 
     sp<MetaData> trackMeta = new MetaData;
-    convertMessageToMetaData(format, trackMeta);
+    if (convertMessageToMetaData(format, trackMeta) != OK) {
+        return BAD_VALUE;
+    }
 
     sp<MediaAdapter> newTrack = new MediaAdapter(trackMeta);
     status_t result = mWriter->addSource(newTrack);
@@ -107,6 +110,8 @@
             ALOGW("addTrack() setCaptureRate failed :%d", result);
         }
     }
+
+    mFormatList.add(format);
     return mTrackList.add(newTrack);
 }
 
@@ -175,16 +180,23 @@
 
 status_t MediaMuxer::writeSampleData(const sp<ABuffer> &buffer, size_t trackIndex,
                                      int64_t timeUs, uint32_t flags) {
-    Mutex::Autolock autoLock(mMuxerLock);
-
     if (buffer.get() == NULL) {
         ALOGE("WriteSampleData() get an NULL buffer.");
         return -EINVAL;
     }
-
-    if (mState != STARTED) {
-        ALOGE("WriteSampleData() is called in invalid state %d", mState);
-        return INVALID_OPERATION;
+    {
+        /* As MediaMuxer's writeSampleData handles inputs from multiple tracks,
+         * limited the scope of mMuxerLock to this inner block so that the
+         * current track's buffer does not wait until the completion
+         * of processing of previous buffer of the same or another track.
+         * It's the responsibility of individual track - MediaAdapter object
+         * to gate its buffers.
+         */
+        Mutex::Autolock autoLock(mMuxerLock);
+        if (mState != STARTED) {
+            ALOGE("WriteSampleData() is called in invalid state %d", mState);
+            return INVALID_OPERATION;
+        }
     }
 
     if (trackIndex >= mTrackList.size()) {
@@ -215,9 +227,42 @@
         ALOGV("BUFFER_FLAG_EOS");
     }
 
+    sp<AMessage> bufMeta = buffer->meta();
+    int64_t val64;
+    if (bufMeta->findInt64("sample-file-offset", &val64)) {
+        sampleMetaData.setInt64(kKeySampleFileOffset, val64);
+    }
+    if (bufMeta->findInt64(
+                "last-sample-index-in-chunk" /*AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK*/,
+                &val64)) {
+        sampleMetaData.setInt64(kKeyLastSampleIndexInChunk, val64);
+    }
+
     sp<MediaAdapter> currentTrack = mTrackList[trackIndex];
     // This pushBuffer will wait until the mediaBuffer is consumed.
     return currentTrack->pushBuffer(mediaBuffer);
 }
 
+ssize_t MediaMuxer::getTrackCount() {
+    Mutex::Autolock autoLock(mMuxerLock);
+    if (mState != INITIALIZED && mState != STARTED) {
+        ALOGE("getTrackCount() must be called either in INITIALIZED or STARTED state");
+        return -1;
+    }
+    return mTrackList.size();
+}
+
+sp<AMessage> MediaMuxer::getTrackFormat([[maybe_unused]] size_t idx) {
+    Mutex::Autolock autoLock(mMuxerLock);
+    if (mState != INITIALIZED && mState != STARTED) {
+        ALOGE("getTrackFormat() must be called either in INITIALIZED or STARTED state");
+        return nullptr;
+    }
+    if (idx < 0 || idx >= mFormatList.size()) {
+        ALOGE("getTrackFormat() idx is out of range");
+        return nullptr;
+    }
+    return mFormatList[idx];
+}
+
 }  // namespace android
diff --git a/media/libstagefright/MediaTrack.cpp b/media/libstagefright/MediaTrack.cpp
index 24ba38a..2447f5e 100644
--- a/media/libstagefright/MediaTrack.cpp
+++ b/media/libstagefright/MediaTrack.cpp
@@ -133,6 +133,14 @@
         if (format->mFormat->findInt64("target-time", &val64)) {
             meta.setInt64(kKeyTargetTime, val64);
         }
+        if (format->mFormat->findInt64("sample-file-offset", &val64)) {
+            meta.setInt64(kKeySampleFileOffset, val64);
+        }
+        if (format->mFormat->findInt64(
+                    "last-sample-index-in-chunk" /*AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK*/,
+                    &val64)) {
+            meta.setInt64(kKeyLastSampleIndexInChunk, val64);
+        }
         int32_t val32;
         if (format->mFormat->findInt32("is-sync-frame", &val32)) {
             meta.setInt32(kKeyIsSyncFrame, val32);
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index 050d7c2..6893324 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -50,8 +50,9 @@
       mSampleTimeUs(timeUs) {
 }
 
-NuMediaExtractor::NuMediaExtractor()
-    : mTotalBitrate(-1LL),
+NuMediaExtractor::NuMediaExtractor(EntryPoint entryPoint)
+    : mEntryPoint(entryPoint),
+      mTotalBitrate(-1LL),
       mDurationUs(-1LL) {
 }
 
@@ -93,6 +94,7 @@
     if (mImpl == NULL) {
         return ERROR_UNSUPPORTED;
     }
+    setEntryPointToRemoteMediaExtractor();
 
     status_t err = OK;
     if (!mCasToken.empty()) {
@@ -134,6 +136,7 @@
     if (mImpl == NULL) {
         return ERROR_UNSUPPORTED;
     }
+    setEntryPointToRemoteMediaExtractor();
 
     if (!mCasToken.empty()) {
         err = mImpl->setMediaCas(mCasToken);
@@ -168,6 +171,7 @@
     if (mImpl == NULL) {
         return ERROR_UNSUPPORTED;
     }
+    setEntryPointToRemoteMediaExtractor();
 
     if (!mCasToken.empty()) {
         err = mImpl->setMediaCas(mCasToken);
@@ -185,6 +189,11 @@
     return err;
 }
 
+const char* NuMediaExtractor::getName() const {
+    Mutex::Autolock autoLock(mLock);
+    return mImpl == nullptr ? nullptr : mImpl->name().string();
+}
+
 static String8 arrayToString(const std::vector<uint8_t> &array) {
     String8 result;
     for (size_t i = 0; i < array.size(); i++) {
@@ -298,8 +307,16 @@
 
     sp<MetaData> meta = mImpl->getMetaData();
 
+    if (meta == nullptr) {
+        //extractor did not publish file metadata
+        return -EINVAL;
+    }
+
     const char *mime;
-    CHECK(meta->findCString(kKeyMIMEType, &mime));
+    if (!meta->findCString(kKeyMIMEType, &mime)) {
+        // no mime type maps to invalid
+        return -EINVAL;
+    }
     *format = new AMessage();
     (*format)->setString("mime", mime);
 
@@ -312,6 +329,27 @@
         (*format)->setBuffer("pssh", buf);
     }
 
+    // Copy over the slow-motion related metadata
+    const void *slomoMarkers;
+    size_t slomoMarkersSize;
+    if (meta->findData(kKeySlowMotionMarkers, &type, &slomoMarkers, &slomoMarkersSize)
+            && slomoMarkersSize > 0) {
+        sp<ABuffer> buf = new ABuffer(slomoMarkersSize);
+        memcpy(buf->data(), slomoMarkers, slomoMarkersSize);
+        (*format)->setBuffer("slow-motion-markers", buf);
+    }
+
+    int32_t temporalLayerCount;
+    if (meta->findInt32(kKeyTemporalLayerCount, &temporalLayerCount)
+            && temporalLayerCount > 0) {
+        (*format)->setInt32("temporal-layer-count", temporalLayerCount);
+    }
+
+    float captureFps;
+    if (meta->findFloat(kKeyCaptureFramerate, &captureFps) && captureFps > 0.0f) {
+        (*format)->setFloat("capture-rate", captureFps);
+    }
+
     return OK;
 }
 
@@ -324,6 +362,11 @@
 
     sp<MetaData> meta = mImpl->getMetaData();
 
+    if (meta == nullptr) {
+        //extractor did not publish file metadata
+        return -EINVAL;
+    }
+
     int64_t exifOffset, exifSize;
     if (meta->findInt64(kKeyExifOffset, &exifOffset)
      && meta->findInt64(kKeyExifSize, &exifSize)) {
@@ -468,6 +511,16 @@
     }
 }
 
+void NuMediaExtractor::setEntryPointToRemoteMediaExtractor() {
+    if (mImpl == NULL) {
+        return;
+    }
+    status_t err = mImpl->setEntryPoint(mEntryPoint);
+    if (err != OK) {
+        ALOGW("Failed to set entry point with error %d.", err);
+    }
+}
+
 ssize_t NuMediaExtractor::fetchAllTrackSamples(
         int64_t seekTimeUs, MediaSource::ReadOptions::SeekMode mode) {
     TrackInfo *minInfo = NULL;
@@ -845,4 +898,15 @@
     return ERROR_UNSUPPORTED;
 }
 
+status_t NuMediaExtractor::setLogSessionId(const String8& logSessionId) {
+    if (mImpl == nullptr) {
+        return ERROR_UNSUPPORTED;
+    }
+    status_t status = mImpl->setLogSessionId(logSessionId);
+    if (status != OK) {
+        ALOGW("Failed to set log session id: %d.", status);
+    }
+    return status;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/OWNERS b/media/libstagefright/OWNERS
new file mode 100644
index 0000000..0cc2294
--- /dev/null
+++ b/media/libstagefright/OWNERS
@@ -0,0 +1,11 @@
+set noparent
+chz@google.com
+essick@google.com
+lajos@google.com
+marcone@google.com
+taklee@google.com
+wonsik@google.com
+
+# LON
+olly@google.com
+andrewlewis@google.com
diff --git a/media/libstagefright/PlaybackDurationAccumulator.h b/media/libstagefright/PlaybackDurationAccumulator.h
new file mode 100644
index 0000000..cb5f0c4
--- /dev/null
+++ b/media/libstagefright/PlaybackDurationAccumulator.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef PLAYBACK_DURATION_ACCUMULATOR_H_
+
+namespace android {
+
+// Accumulates playback duration by processing render times of individual frames and by ignoring
+// frames rendered during inactive playbacks such as seeking, pausing, or re-buffering.
+class PlaybackDurationAccumulator {
+private:
+    // Controls the maximum delta between render times before considering the playback is not
+    // active and has stalled.
+    static const int64_t MAX_PRESENTATION_DURATION_NS = 500 * 1000 * 1000;
+
+public:
+    PlaybackDurationAccumulator() {
+        mPlaybackDurationNs = 0;
+        mPreviousRenderTimeNs = 0;
+    }
+
+    // Process a render time expressed in nanoseconds.
+    void processRenderTime(int64_t newRenderTimeNs) {
+        // If we detect wrap-around or out of order frames, just ignore the duration for this
+        // and the next frame.
+        if (newRenderTimeNs < mPreviousRenderTimeNs) {
+            mPreviousRenderTimeNs = 0;
+        }
+        if (mPreviousRenderTimeNs > 0) {
+            int64_t presentationDurationNs = newRenderTimeNs - mPreviousRenderTimeNs;
+            if (presentationDurationNs < MAX_PRESENTATION_DURATION_NS) {
+                mPlaybackDurationNs += presentationDurationNs;
+            }
+        }
+        mPreviousRenderTimeNs = newRenderTimeNs;
+    }
+
+    int64_t getDurationInSeconds() {
+        return mPlaybackDurationNs / 1000 / 1000 / 1000; // Nanoseconds to seconds.
+    }
+
+private:
+    // The playback duration accumulated so far.
+    int64_t mPlaybackDurationNs;
+    // The previous render time used to compute the next presentation duration.
+    int64_t mPreviousRenderTimeNs;
+};
+
+}
+
+#endif
+
diff --git a/media/libstagefright/RemoteMediaExtractor.cpp b/media/libstagefright/RemoteMediaExtractor.cpp
index 25e43c2..baa2ca1 100644
--- a/media/libstagefright/RemoteMediaExtractor.cpp
+++ b/media/libstagefright/RemoteMediaExtractor.cpp
@@ -40,6 +40,16 @@
 static const char *kExtractorMime = "android.media.mediaextractor.mime";
 static const char *kExtractorTracks = "android.media.mediaextractor.ntrk";
 
+// The following are not available in frameworks/base/media/java/android/media/MediaExtractor.java
+// because they are not applicable or useful to that API.
+static const char *kExtractorEntryPoint = "android.media.mediaextractor.entry";
+static const char *kExtractorLogSessionId = "android.media.mediaextractor.logSessionId";
+
+static const char *kEntryPointSdk = "sdk";
+static const char *kEntryPointWithJvm = "ndk-with-jvm";
+static const char *kEntryPointNoJvm = "ndk-no-jvm";
+static const char *kEntryPointOther = "other";
+
 RemoteMediaExtractor::RemoteMediaExtractor(
         MediaExtractor *extractor,
         const sp<DataSource> &source,
@@ -74,6 +84,9 @@
             }
             // what else is interesting and not already available?
         }
+        // By default, we set the entry point to be "other". Clients of this
+        // class will override this value by calling setEntryPoint.
+        mMetricsItem->setCString(kExtractorEntryPoint, kEntryPointOther);
     }
 }
 
@@ -143,6 +156,33 @@
     return String8(mExtractor->name());
 }
 
+status_t RemoteMediaExtractor::setEntryPoint(EntryPoint entryPoint) {
+    const char* entryPointString;
+    switch (entryPoint) {
+      case EntryPoint::SDK:
+            entryPointString = kEntryPointSdk;
+            break;
+        case EntryPoint::NDK_WITH_JVM:
+            entryPointString = kEntryPointWithJvm;
+            break;
+        case EntryPoint::NDK_NO_JVM:
+            entryPointString = kEntryPointNoJvm;
+            break;
+        case EntryPoint::OTHER:
+            entryPointString = kEntryPointOther;
+            break;
+        default:
+            return BAD_VALUE;
+    }
+    mMetricsItem->setCString(kExtractorEntryPoint, entryPointString);
+    return OK;
+}
+
+status_t RemoteMediaExtractor::setLogSessionId(const String8& logSessionId) {
+    mMetricsItem->setCString(kExtractorLogSessionId, logSessionId.c_str());
+    return OK;
+}
+
 ////////////////////////////////////////////////////////////////////////////////
 
 // static
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index 4c94baa..1f569ef 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -132,37 +132,63 @@
 }
 
 void setNativeWindowHdrMetadata(ANativeWindow *nativeWindow, HDRStaticInfo *info) {
-    struct android_smpte2086_metadata smpte2086_meta = {
-            .displayPrimaryRed = {
-                    info->sType1.mR.x * 0.00002f,
-                    info->sType1.mR.y * 0.00002f
-            },
-            .displayPrimaryGreen = {
-                    info->sType1.mG.x * 0.00002f,
-                    info->sType1.mG.y * 0.00002f
-            },
-            .displayPrimaryBlue = {
-                    info->sType1.mB.x * 0.00002f,
-                    info->sType1.mB.y * 0.00002f
-            },
-            .whitePoint = {
-                    info->sType1.mW.x * 0.00002f,
-                    info->sType1.mW.y * 0.00002f
-            },
-            .maxLuminance = (float) info->sType1.mMaxDisplayLuminance,
-            .minLuminance = info->sType1.mMinDisplayLuminance * 0.0001f
-    };
+    // If mastering max and min luminance fields are 0, do not use them.
+    // It indicates the value may not be present in the stream.
+    if ((float)info->sType1.mMaxDisplayLuminance > 0.0f &&
+        (info->sType1.mMinDisplayLuminance * 0.0001f) > 0.0f) {
+        struct android_smpte2086_metadata smpte2086_meta = {
+                .displayPrimaryRed = {
+                        info->sType1.mR.x * 0.00002f,
+                        info->sType1.mR.y * 0.00002f
+                },
+                .displayPrimaryGreen = {
+                        info->sType1.mG.x * 0.00002f,
+                        info->sType1.mG.y * 0.00002f
+                },
+                .displayPrimaryBlue = {
+                        info->sType1.mB.x * 0.00002f,
+                        info->sType1.mB.y * 0.00002f
+                },
+                .whitePoint = {
+                        info->sType1.mW.x * 0.00002f,
+                        info->sType1.mW.y * 0.00002f
+                },
+                .maxLuminance = (float) info->sType1.mMaxDisplayLuminance,
+                .minLuminance = info->sType1.mMinDisplayLuminance * 0.0001f
+        };
 
-    int err = native_window_set_buffers_smpte2086_metadata(nativeWindow, &smpte2086_meta);
-    ALOGW_IF(err != 0, "failed to set smpte2086 metadata on surface (%d)", err);
+        int err = native_window_set_buffers_smpte2086_metadata(nativeWindow, &smpte2086_meta);
+        ALOGW_IF(err != 0, "failed to set smpte2086 metadata on surface (%d)", err);
+    }
 
-    struct android_cta861_3_metadata cta861_meta = {
-            .maxContentLightLevel = (float) info->sType1.mMaxContentLightLevel,
-            .maxFrameAverageLightLevel = (float) info->sType1.mMaxFrameAverageLightLevel
-    };
+    // If the content light level fields are 0, do not use them, it
+    // indicates the value may not be present in the stream.
+    if ((float)info->sType1.mMaxContentLightLevel > 0.0f &&
+        (float)info->sType1.mMaxFrameAverageLightLevel > 0.0f) {
+        struct android_cta861_3_metadata cta861_meta = {
+                .maxContentLightLevel = (float) info->sType1.mMaxContentLightLevel,
+                .maxFrameAverageLightLevel = (float) info->sType1.mMaxFrameAverageLightLevel
+        };
 
-    err = native_window_set_buffers_cta861_3_metadata(nativeWindow, &cta861_meta);
-    ALOGW_IF(err != 0, "failed to set cta861_3 metadata on surface (%d)", err);
+        int err = native_window_set_buffers_cta861_3_metadata(nativeWindow, &cta861_meta);
+        ALOGW_IF(err != 0, "failed to set cta861_3 metadata on surface (%d)", err);
+    }
+}
+
+status_t setNativeWindowRotation(
+        ANativeWindow *nativeWindow /* nonnull */, int rotation) {
+
+    int transform = 0;
+    if ((rotation % 90) == 0) {
+        switch ((rotation / 90) & 3) {
+            case 1:  transform = HAL_TRANSFORM_ROT_90;  break;
+            case 2:  transform = HAL_TRANSFORM_ROT_180; break;
+            case 3:  transform = HAL_TRANSFORM_ROT_270; break;
+            default: transform = 0;                     break;
+        }
+    }
+
+    return native_window_set_buffers_transform(nativeWindow, transform);
 }
 
 status_t pushBlankBuffersToNativeWindow(ANativeWindow *nativeWindow /* nonnull */) {
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index 8b36ea5..7ce2968 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -1,5 +1,17 @@
 {
-  "presubmit": [
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    // writerTest fails about 5 out of 66
+    // { "name": "writerTest" },
+
+    { "name": "HEVCUtilsUnitTest" },
+    { "name": "ExtractorFactoryTest" }
+
+  ],
+
+  "presubmit-large": [
     {
       "name": "CtsMediaTestCases",
       "options": [
@@ -19,6 +31,14 @@
       ]
     }
   ],
+  "presubmit": [
+    {
+      "name": "mediacodecTest"
+    },
+    {
+      "name": "CtsMediaTranscodingTestCases"
+    }
+  ],
   "postsubmit": [
     {
        "name": "BatteryChecker_test"
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index a1e4d43..4c18f87 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -47,6 +47,16 @@
 #include <media/AudioParameter.h>
 #include <system/audio.h>
 
+// TODO : Remove the defines once mainline media is built against NDK >= 31.
+// The mp4 extractor is part of mainline and builds against NDK 29 as of
+// writing. These keys are available only from NDK 31:
+#define AMEDIAFORMAT_KEY_MPEGH_PROFILE_LEVEL_INDICATION \
+  "mpegh-profile-level-indication"
+#define AMEDIAFORMAT_KEY_MPEGH_REFERENCE_CHANNEL_LAYOUT \
+  "mpegh-reference-channel-layout"
+#define AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS \
+  "mpegh-compatible-sets"
+
 namespace android {
 
 static status_t copyNALUToABuffer(sp<ABuffer> *buffer, const uint8_t *ptr, size_t length) {
@@ -725,14 +735,19 @@
     }
 };
 
-static std::vector<std::pair<const char *, uint32_t>> int64Mappings {
+static std::vector<std::pair<const char*, uint32_t>> int64Mappings {
     {
-        { "exif-offset", kKeyExifOffset },
-        { "exif-size", kKeyExifSize },
-        { "target-time", kKeyTargetTime },
-        { "thumbnail-time", kKeyThumbnailTime },
-        { "timeUs", kKeyTime },
-        { "durationUs", kKeyDuration },
+        { "exif-offset", kKeyExifOffset},
+        { "exif-size", kKeyExifSize},
+        { "xmp-offset", kKeyXmpOffset},
+        { "xmp-size", kKeyXmpSize},
+        { "target-time", kKeyTargetTime},
+        { "thumbnail-time", kKeyThumbnailTime},
+        { "timeUs", kKeyTime},
+        { "durationUs", kKeyDuration},
+        { "sample-file-offset", kKeySampleFileOffset},
+        { "last-sample-index-in-chunk", kKeyLastSampleIndexInChunk},
+        { "sample-time-before-append", kKeySampleTimeBeforeAppend},
     }
 };
 
@@ -769,6 +784,8 @@
         { "sei", kKeySEI },
         { "text-format-data", kKeyTextFormatData },
         { "thumbnail-csd-hevc", kKeyThumbnailHVCC },
+        { "slow-motion-markers", kKeySlowMotionMarkers },
+        { "thumbnail-csd-av1c", kKeyThumbnailAV1C },
     }
 };
 
@@ -1078,6 +1095,25 @@
             msg->setInt32("is-adts", isADTS);
         }
 
+        int32_t mpeghProfileLevelIndication;
+        if (meta->findInt32(kKeyMpeghProfileLevelIndication, &mpeghProfileLevelIndication)) {
+            msg->setInt32(AMEDIAFORMAT_KEY_MPEGH_PROFILE_LEVEL_INDICATION,
+                    mpeghProfileLevelIndication);
+        }
+        int32_t mpeghReferenceChannelLayout;
+        if (meta->findInt32(kKeyMpeghReferenceChannelLayout, &mpeghReferenceChannelLayout)) {
+            msg->setInt32(AMEDIAFORMAT_KEY_MPEGH_REFERENCE_CHANNEL_LAYOUT,
+                    mpeghReferenceChannelLayout);
+        }
+        if (meta->findData(kKeyMpeghCompatibleSets, &type, &data, &size)) {
+            sp<ABuffer> buffer = new (std::nothrow) ABuffer(size);
+            if (buffer.get() == NULL || buffer->base() == NULL) {
+                return NO_MEMORY;
+            }
+            msg->setBuffer(AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS, buffer);
+            memcpy(buffer->data(), data, size);
+        }
+
         int32_t aacProfile = -1;
         if (meta->findInt32(kKeyAACAOT, &aacProfile)) {
             msg->setInt32("aac-profile", aacProfile);
@@ -1135,7 +1171,7 @@
         // assertion, let's be lenient for now...
         // CHECK((ptr[4] >> 2) == 0x3f);  // reserved
 
-        size_t lengthSize __unused = 1 + (ptr[4] & 3);
+        // we can get lengthSize value from 1 + (ptr[4] & 3)
 
         // commented out check below as H264_QVGA_500_NO_AUDIO.3gp
         // violates it...
@@ -1663,13 +1699,16 @@
         meta->setInt32(kKeyColorMatrix, colorAspects.mMatrixCoeffs);
     }
 }
-
-void convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) {
+/* Converts key and value pairs in AMessage format to MetaData format.
+ * Also checks for the presence of required keys.
+ */
+status_t convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) {
     AString mime;
     if (msg->findString("mime", &mime)) {
         meta->setCString(kKeyMIMEType, mime.c_str());
     } else {
-        ALOGW("did not find mime type");
+        ALOGV("did not find mime type");
+        return BAD_VALUE;
     }
 
     convertMessageToMetaDataFromMappings(msg, meta);
@@ -1697,6 +1736,12 @@
         meta->setInt32(kKeyIsSyncFrame, 1);
     }
 
+    // Mode for media transcoding.
+    int32_t isBackgroundMode;
+    if (msg->findInt32("android._background-mode", &isBackgroundMode) && isBackgroundMode != 0) {
+        meta->setInt32(isBackgroundMode, 1);
+    }
+
     int32_t avgBitrate = 0;
     int32_t maxBitrate;
     if (msg->findInt32("bitrate", &avgBitrate) && avgBitrate > 0) {
@@ -1719,6 +1764,7 @@
             meta->setInt32(kKeyHeight, height);
         } else {
             ALOGV("did not find width and/or height");
+            return BAD_VALUE;
         }
 
         int32_t sarWidth, sarHeight;
@@ -1803,14 +1849,14 @@
             }
         }
     } else if (mime.startsWith("audio/")) {
-        int32_t numChannels;
-        if (msg->findInt32("channel-count", &numChannels)) {
-            meta->setInt32(kKeyChannelCount, numChannels);
+        int32_t numChannels, sampleRate;
+        if (!msg->findInt32("channel-count", &numChannels) ||
+                !msg->findInt32("sample-rate", &sampleRate)) {
+            ALOGV("did not find channel-count and/or sample-rate");
+            return BAD_VALUE;
         }
-        int32_t sampleRate;
-        if (msg->findInt32("sample-rate", &sampleRate)) {
-            meta->setInt32(kKeySampleRate, sampleRate);
-        }
+        meta->setInt32(kKeyChannelCount, numChannels);
+        meta->setInt32(kKeySampleRate, sampleRate);
         int32_t bitsPerSample;
         if (msg->findInt32("bits-per-sample", &bitsPerSample)) {
             meta->setInt32(kKeyBitsPerSample, bitsPerSample);
@@ -1833,6 +1879,23 @@
             meta->setInt32(kKeyIsADTS, isADTS);
         }
 
+        int32_t mpeghProfileLevelIndication = -1;
+        if (msg->findInt32(AMEDIAFORMAT_KEY_MPEGH_PROFILE_LEVEL_INDICATION,
+                &mpeghProfileLevelIndication)) {
+            meta->setInt32(kKeyMpeghProfileLevelIndication, mpeghProfileLevelIndication);
+        }
+        int32_t mpeghReferenceChannelLayout = -1;
+        if (msg->findInt32(AMEDIAFORMAT_KEY_MPEGH_REFERENCE_CHANNEL_LAYOUT,
+                &mpeghReferenceChannelLayout)) {
+            meta->setInt32(kKeyMpeghReferenceChannelLayout, mpeghReferenceChannelLayout);
+        }
+        sp<ABuffer> mpeghCompatibleSets;
+        if (msg->findBuffer(AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS,
+                &mpeghCompatibleSets)) {
+            meta->setData(kKeyMpeghCompatibleSets, kTypeHCOS,
+                    mpeghCompatibleSets->data(), mpeghCompatibleSets->size());
+        }
+
         int32_t aacProfile = -1;
         if (msg->findInt32("aac-profile", &aacProfile)) {
             meta->setInt32(kKeyAACAOT, aacProfile);
@@ -1900,7 +1963,8 @@
             std::vector<uint8_t> hvcc(csd0size + 1024);
             size_t outsize = reassembleHVCC(csd0, hvcc.data(), hvcc.size(), 4);
             meta->setData(kKeyHVCC, kTypeHVCC, hvcc.data(), outsize);
-        } else if (mime == MEDIA_MIMETYPE_VIDEO_AV1) {
+        } else if (mime == MEDIA_MIMETYPE_VIDEO_AV1 ||
+                   mime == MEDIA_MIMETYPE_IMAGE_AVIF) {
             meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
         } else if (mime == MEDIA_MIMETYPE_VIDEO_DOLBY_VISION) {
             if (msg->findBuffer("csd-2", &csd2)) {
@@ -1925,7 +1989,8 @@
                     }
                 }
             } else {
-                ALOGW("We need csd-2!!. %s", msg->debugString().c_str());
+                ALOGE("We need csd-2!!. %s", msg->debugString().c_str());
+                return BAD_VALUE;
             }
         } else if (mime == MEDIA_MIMETYPE_VIDEO_VP9) {
             meta->setData(kKeyVp9CodecPrivate, 0, csd0->data(), csd0->size());
@@ -1991,6 +2056,7 @@
     ALOGI("converted %s to:", msg->debugString(0).c_str());
     meta->dumpToLog();
 #endif
+    return OK;
 }
 
 status_t sendMetaDataToHal(sp<MediaPlayerBase::AudioSink>& sink,
@@ -2136,8 +2202,10 @@
     }
     info->sample_rate = srate;
 
-    int32_t cmask = 0;
-    if (!meta->findInt32(kKeyChannelMask, &cmask) || cmask == CHANNEL_MASK_USE_CHANNEL_ORDER) {
+    int32_t rawChannelMask;
+    audio_channel_mask_t cmask = meta->findInt32(kKeyChannelMask, &rawChannelMask) ?
+            static_cast<audio_channel_mask_t>(rawChannelMask) : CHANNEL_MASK_USE_CHANNEL_ORDER;
+    if (cmask == CHANNEL_MASK_USE_CHANNEL_ORDER) {
         ALOGV("track of type '%s' does not publish channel mask", mime);
 
         // Try a channel count instead
@@ -2156,7 +2224,7 @@
     }
     info->duration_us = duration;
 
-    int32_t brate = -1;
+    int32_t brate = 0;
     if (!meta->findInt32(kKeyBitRate, &brate)) {
         ALOGV("track of type '%s' does not publish bitrate", mime);
     }
@@ -2178,7 +2246,11 @@
     }
     // Check if offload is possible for given format, stream type, sample rate,
     // bit rate, duration, video and streaming
-    return AudioSystem::isOffloadSupported(info);
+#ifdef DISABLE_AUDIO_SYSTEM_OFFLOAD
+    return false;
+#else
+    return AudioSystem::getOffloadSupport(info) != AUDIO_OFFLOAD_NOT_SUPPORTED;
+#endif
 }
 
 HLSTime::HLSTime(const sp<AMessage>& meta) :
diff --git a/media/libstagefright/bqhelper/Android.bp b/media/libstagefright/bqhelper/Android.bp
index 8698d33..0e2b472 100644
--- a/media/libstagefright/bqhelper/Android.bp
+++ b/media/libstagefright/bqhelper/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_defaults {
     name: "libstagefright_bufferqueue-defaults",
     double_loadable: true,
@@ -101,6 +110,7 @@
         "//apex_available:platform",
     ],
     vendor_available: false,
+    min_sdk_version: "29",
     static_libs: [
         "libgui_bufferqueue_static",
     ],
diff --git a/media/libstagefright/bqhelper/TEST_MAPPING b/media/libstagefright/bqhelper/TEST_MAPPING
new file mode 100644
index 0000000..c7f2fd8
--- /dev/null
+++ b/media/libstagefright/bqhelper/TEST_MAPPING
@@ -0,0 +1,6 @@
+// mappings for frameworks/av/media/libstagefright/bqhelper
+{
+  "presubmit": [
+    { "name": "FrameDropper_test"}
+  ]
+}
diff --git a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h b/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h
index 4e83059..7e2909a 100644
--- a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h
+++ b/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h
@@ -30,6 +30,8 @@
     FrameDropper();
 
     // maxFrameRate required to be positive.
+    // maxFrameRate negative causes shouldDrop() to always return false
+    // maxFrameRate == 0 is illegal
     status_t setMaxFrameRate(float maxFrameRate);
 
     // Returns false if max frame rate has not been set via setMaxFrameRate.
diff --git a/media/libstagefright/bqhelper/tests/Android.bp b/media/libstagefright/bqhelper/tests/Android.bp
index 2fbc3d6..95953ee 100644
--- a/media/libstagefright/bqhelper/tests/Android.bp
+++ b/media/libstagefright/bqhelper/tests/Android.bp
@@ -1,15 +1,27 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_test {
     name: "FrameDropper_test",
-
+    test_suites: [
+        "device-tests",
+        "mts",
+    ],
     srcs: ["FrameDropper_test.cpp"],
-
     shared_libs: [
         "libstagefright_bufferqueue_helper",
         "libutils",
     ],
-
     cflags: [
         "-Werror",
         "-Wall",
     ],
+    compile_multilib: "first",
+
 }
diff --git a/media/libstagefright/bqhelper/tests/FrameDropper_test.cpp b/media/libstagefright/bqhelper/tests/FrameDropper_test.cpp
index 55ae77c..b18067f 100644
--- a/media/libstagefright/bqhelper/tests/FrameDropper_test.cpp
+++ b/media/libstagefright/bqhelper/tests/FrameDropper_test.cpp
@@ -110,7 +110,7 @@
 };
 
 TEST_F(FrameDropperTest, TestInvalidMaxFrameRate) {
-    EXPECT_NE(OK, mFrameDropper->setMaxFrameRate(-1.0));
+    EXPECT_EQ(OK, mFrameDropper->setMaxFrameRate(-1.0));
     EXPECT_NE(OK, mFrameDropper->setMaxFrameRate(0));
 }
 
diff --git a/media/libstagefright/codecs/aacdec/Android.bp b/media/libstagefright/codecs/aacdec/Android.bp
index 46b3b8f..5ab49a7 100644
--- a/media/libstagefright/codecs/aacdec/Android.bp
+++ b/media/libstagefright/codecs/aacdec/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_codecs_aacdec_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_codecs_aacdec_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_aacdec",
     defaults: ["libstagefright_softomx-defaults"],
diff --git a/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp
index 168d140..157cab6 100644
--- a/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp
+++ b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp
@@ -217,7 +217,7 @@
         }
         else { // handle other used encoder target levels
 
-            // Sanity check: DRC presentation mode is only specified for max. 5.1 channels
+            // Validation check: DRC presentation mode is only specified for max. 5.1 channels
             if (mStreamNrAACChan > 6) {
                 drcPresMode = 0;
             }
@@ -308,7 +308,7 @@
             } // switch()
         } // if (mEncoderTarget  == GPM_ENCODER_TARGET_LEVEL)
 
-        // sanity again
+        // validation check again
         if (newHeavy == 1) {
             newBoostFactor=127; // not really needed as the same would be done by the decoder anyway
             newAttFactor = 127;
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
index 2aeddd7..92ec94f 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
@@ -38,6 +38,7 @@
 #define DRC_DEFAULT_MOBILE_DRC_HEAVY 1   /* switch for heavy compression for mobile conf */
 #define DRC_DEFAULT_MOBILE_DRC_EFFECT 3  /* MPEG-D DRC effect type; 3 => Limited playback range */
 #define DRC_DEFAULT_MOBILE_DRC_ALBUM 0  /* MPEG-D DRC album mode; 0 => album mode is disabled, 1 => album mode is enabled */
+#define DRC_DEFAULT_MOBILE_OUTPUT_LOUDNESS -1 /* decoder output loudness; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
 #define DRC_DEFAULT_MOBILE_ENC_LEVEL (-1) /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
 #define MAX_CHANNEL_COUNT            8  /* maximum number of audio channels that can be decoded */
 // names of properties that can be used to override the default DRC settings
@@ -230,6 +231,15 @@
     // For seven and eight channel input streams, enable 6.1 and 7.1 channel output
     aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, -1);
 
+    mDrcCompressMode = DRC_DEFAULT_MOBILE_DRC_HEAVY;
+    mDrcTargetRefLevel = DRC_DEFAULT_MOBILE_REF_LEVEL;
+    mDrcEncTargetLevel = DRC_DEFAULT_MOBILE_ENC_LEVEL;
+    mDrcBoostFactor = DRC_DEFAULT_MOBILE_DRC_BOOST;
+    mDrcAttenuationFactor = DRC_DEFAULT_MOBILE_DRC_CUT;
+    mDrcEffectType = DRC_DEFAULT_MOBILE_DRC_EFFECT;
+    mDrcAlbumMode = DRC_DEFAULT_MOBILE_DRC_ALBUM;
+    mDrcOutputLoudness = DRC_DEFAULT_MOBILE_OUTPUT_LOUDNESS;
+
     return status;
 }
 
@@ -358,6 +368,27 @@
             return OMX_ErrorNone;
         }
 
+        case OMX_IndexParamAudioAndroidAacDrcPresentation:
+        {
+             OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE *aacPresParams =
+                    (OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE *)params;
+
+            ALOGD("get OMX_IndexParamAudioAndroidAacDrcPresentation");
+
+            if (!isValidOMXParam(aacPresParams)) {
+                return OMX_ErrorBadParameter;
+            }
+            aacPresParams->nDrcEffectType = mDrcEffectType;
+            aacPresParams->nDrcAlbumMode = mDrcAlbumMode;
+            aacPresParams->nDrcBoost =  mDrcBoostFactor;
+            aacPresParams->nDrcCut = mDrcAttenuationFactor;
+            aacPresParams->nHeavyCompression = mDrcCompressMode;
+            aacPresParams->nTargetReferenceLevel = mDrcTargetRefLevel;
+            aacPresParams->nEncodedTargetLevel = mDrcEncTargetLevel;
+            aacPresParams ->nDrcOutputLoudness = mDrcOutputLoudness;
+            return OMX_ErrorNone;
+        }
+
         default:
             return SimpleSoftOMXComponent::internalGetParameter(index, params);
     }
@@ -464,11 +495,13 @@
             if (aacPresParams->nDrcEffectType >= -1) {
                 ALOGV("set nDrcEffectType=%d", aacPresParams->nDrcEffectType);
                 aacDecoder_SetParam(mAACDecoder, AAC_UNIDRC_SET_EFFECT, aacPresParams->nDrcEffectType);
+                mDrcEffectType = aacPresParams->nDrcEffectType;
             }
             if (aacPresParams->nDrcAlbumMode >= -1) {
                 ALOGV("set nDrcAlbumMode=%d", aacPresParams->nDrcAlbumMode);
                 aacDecoder_SetParam(mAACDecoder, AAC_UNIDRC_ALBUM_MODE,
                         aacPresParams->nDrcAlbumMode);
+                mDrcAlbumMode = aacPresParams->nDrcAlbumMode;
             }
             bool updateDrcWrapper = false;
             if (aacPresParams->nDrcBoost >= 0) {
@@ -476,34 +509,42 @@
                 mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR,
                         aacPresParams->nDrcBoost);
                 updateDrcWrapper = true;
+                mDrcBoostFactor = aacPresParams->nDrcBoost;
             }
             if (aacPresParams->nDrcCut >= 0) {
                 ALOGV("set nDrcCut=%d", aacPresParams->nDrcCut);
                 mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, aacPresParams->nDrcCut);
                 updateDrcWrapper = true;
+                mDrcAttenuationFactor = aacPresParams->nDrcCut;
             }
             if (aacPresParams->nHeavyCompression >= 0) {
                 ALOGV("set nHeavyCompression=%d", aacPresParams->nHeavyCompression);
                 mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY,
                         aacPresParams->nHeavyCompression);
                 updateDrcWrapper = true;
+                mDrcCompressMode = aacPresParams->nHeavyCompression;
             }
             if (aacPresParams->nTargetReferenceLevel >= -1) {
                 ALOGV("set nTargetReferenceLevel=%d", aacPresParams->nTargetReferenceLevel);
                 mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET,
                         aacPresParams->nTargetReferenceLevel);
                 updateDrcWrapper = true;
+                mDrcTargetRefLevel = aacPresParams->nTargetReferenceLevel;
             }
             if (aacPresParams->nEncodedTargetLevel >= 0) {
                 ALOGV("set nEncodedTargetLevel=%d", aacPresParams->nEncodedTargetLevel);
                 mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET,
                         aacPresParams->nEncodedTargetLevel);
                 updateDrcWrapper = true;
+                mDrcEncTargetLevel = aacPresParams->nEncodedTargetLevel;
             }
             if (aacPresParams->nPCMLimiterEnable >= 0) {
                 aacDecoder_SetParam(mAACDecoder, AAC_PCM_LIMITER_ENABLE,
                         (aacPresParams->nPCMLimiterEnable != 0));
             }
+            if (aacPresParams ->nDrcOutputLoudness != DRC_DEFAULT_MOBILE_OUTPUT_LOUDNESS) {
+                mDrcOutputLoudness = aacPresParams ->nDrcOutputLoudness;
+            }
             if (updateDrcWrapper) {
                 mDrcWrap.update();
             }
@@ -854,6 +895,11 @@
                     // fall through
                 }
 
+                if ( mDrcOutputLoudness != mStreamInfo->outputLoudness) {
+                    ALOGD("update Loudness, before = %d, now = %d", mDrcOutputLoudness, mStreamInfo->outputLoudness);
+                    mDrcOutputLoudness = mStreamInfo->outputLoudness;
+                }
+
                 /*
                  * AAC+/eAAC+ streams can be signalled in two ways: either explicitly
                  * or implicitly, according to MPEG4 spec. AAC+/eAAC+ is a dual
@@ -1201,6 +1247,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.h b/media/libstagefright/codecs/aacdec/SoftAAC2.h
index 5bee710..9f98aa1 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.h
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.h
@@ -85,6 +85,17 @@
     int32_t mOutputDelayRingBufferWritePos;
     int32_t mOutputDelayRingBufferReadPos;
     int32_t mOutputDelayRingBufferFilled;
+
+    //drc
+    int32_t mDrcCompressMode;
+    int32_t mDrcTargetRefLevel;
+    int32_t mDrcEncTargetLevel;
+    int32_t mDrcBoostFactor;
+    int32_t mDrcAttenuationFactor;
+    int32_t mDrcEffectType;
+    int32_t mDrcAlbumMode;
+    int32_t mDrcOutputLoudness;
+
     bool outputDelayRingBufferPutSamples(INT_PCM *samples, int numSamples);
     int32_t outputDelayRingBufferGetSamples(INT_PCM *samples, int numSamples);
     int32_t outputDelayRingBufferSamplesAvailable();
diff --git a/media/libstagefright/codecs/aacenc/Android.bp b/media/libstagefright/codecs/aacenc/Android.bp
index bf789c4..793125f 100644
--- a/media/libstagefright/codecs/aacenc/Android.bp
+++ b/media/libstagefright/codecs/aacenc/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_codecs_aacenc_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_codecs_aacenc_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_aacenc",
     defaults: ["libstagefright_softomx-defaults"],
diff --git a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp
index 6e437cf..90421b9 100644
--- a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp
+++ b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp
@@ -732,6 +732,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/amrnb/common/Android.bp b/media/libstagefright/codecs/amrnb/common/Android.bp
deleted file mode 100644
index 59a791d..0000000
--- a/media/libstagefright/codecs/amrnb/common/Android.bp
+++ /dev/null
@@ -1,91 +0,0 @@
-cc_library {
-    name: "libstagefright_amrnb_common",
-    vendor_available: true,
-    host_supported: true,
-    min_sdk_version: "29",
-
-    srcs: [
-        "src/add.cpp",
-        "src/az_lsp.cpp",
-        "src/bitno_tab.cpp",
-        "src/bitreorder_tab.cpp",
-        "src/bits2prm.cpp",
-        "src/c2_9pf_tab.cpp",
-        "src/copy.cpp",
-        "src/div_32.cpp",
-        "src/div_s.cpp",
-        "src/extract_h.cpp",
-        "src/extract_l.cpp",
-        "src/gains_tbl.cpp",
-        "src/gc_pred.cpp",
-        "src/gmed_n.cpp",
-        "src/grid_tbl.cpp",
-        "src/gray_tbl.cpp",
-        "src/int_lpc.cpp",
-        "src/inv_sqrt.cpp",
-        "src/inv_sqrt_tbl.cpp",
-        "src/l_abs.cpp",
-        "src/l_deposit_h.cpp",
-        "src/l_deposit_l.cpp",
-        "src/l_shr_r.cpp",
-        "src/log2.cpp",
-        "src/log2_norm.cpp",
-        "src/log2_tbl.cpp",
-        "src/lsfwt.cpp",
-        "src/lsp.cpp",
-        "src/lsp_az.cpp",
-        "src/lsp_lsf.cpp",
-        "src/lsp_lsf_tbl.cpp",
-        "src/lsp_tab.cpp",
-        "src/mult_r.cpp",
-        "src/norm_l.cpp",
-        "src/norm_s.cpp",
-        "src/ph_disp_tab.cpp",
-        "src/pow2.cpp",
-        "src/pow2_tbl.cpp",
-        "src/pred_lt.cpp",
-        "src/q_plsf.cpp",
-        "src/q_plsf_3.cpp",
-        "src/q_plsf_3_tbl.cpp",
-        "src/q_plsf_5.cpp",
-        "src/q_plsf_5_tbl.cpp",
-        "src/qua_gain_tbl.cpp",
-        "src/reorder.cpp",
-        "src/residu.cpp",
-        "src/round.cpp",
-        "src/set_zero.cpp",
-        "src/shr.cpp",
-        "src/shr_r.cpp",
-        "src/sqrt_l.cpp",
-        "src/sqrt_l_tbl.cpp",
-        "src/sub.cpp",
-        "src/syn_filt.cpp",
-        "src/vad1.cpp",
-        "src/weight_a.cpp",
-        "src/window_tab.cpp",
-    ],
-
-    export_include_dirs: ["include"],
-
-    cflags: [
-        "-DOSCL_UNUSED_ARG(x)=(void)(x)",
-        "-DOSCL_IMPORT_REF=",
-        "-DOSCL_EXPORT_REF=",
-
-        "-Werror",
-    ],
-
-    target: {
-        darwin: {
-            enabled: false,
-        },
-    },
-
-    //addressing b/25409744
-    //sanitize: {
-    //    misc_undefined: [
-    //        "signed-integer-overflow",
-    //        "unsigned-integer-overflow",
-    //    ],
-    //},
-}
diff --git a/media/libstagefright/codecs/amrnb/dec/Android.bp b/media/libstagefright/codecs/amrnb/dec/Android.bp
index b8e00b3..2c0954d 100644
--- a/media/libstagefright/codecs/amrnb/dec/Android.bp
+++ b/media/libstagefright/codecs/amrnb/dec/Android.bp
@@ -1,86 +1,20 @@
-cc_library_static {
-    name: "libstagefright_amrnbdec",
-    vendor_available: true,
-    host_supported: true,
-    min_sdk_version: "29",
-
-    srcs: [
-        "src/a_refl.cpp",
-        "src/agc.cpp",
-        "src/amrdecode.cpp",
-        "src/b_cn_cod.cpp",
-        "src/bgnscd.cpp",
-        "src/c_g_aver.cpp",
-        "src/d1035pf.cpp",
-        "src/d2_11pf.cpp",
-        "src/d2_9pf.cpp",
-        "src/d3_14pf.cpp",
-        "src/d4_17pf.cpp",
-        "src/d8_31pf.cpp",
-        "src/d_gain_c.cpp",
-        "src/d_gain_p.cpp",
-        "src/d_plsf.cpp",
-        "src/d_plsf_3.cpp",
-        "src/d_plsf_5.cpp",
-        "src/dec_amr.cpp",
-        "src/dec_gain.cpp",
-        "src/dec_input_format_tab.cpp",
-        "src/dec_lag3.cpp",
-        "src/dec_lag6.cpp",
-        "src/dtx_dec.cpp",
-        "src/ec_gains.cpp",
-        "src/ex_ctrl.cpp",
-        "src/if2_to_ets.cpp",
-        "src/int_lsf.cpp",
-        "src/lsp_avg.cpp",
-        "src/ph_disp.cpp",
-        "src/post_pro.cpp",
-        "src/preemph.cpp",
-        "src/pstfilt.cpp",
-        "src/qgain475_tab.cpp",
-        "src/sp_dec.cpp",
-        "src/wmf_to_ets.cpp",
-    ],
-
-    export_include_dirs: ["src"],
-
-    cflags: [
-        "-DOSCL_UNUSED_ARG(x)=(void)(x)",
-        "-DOSCL_IMPORT_REF=",
-
-        "-Werror",
-    ],
-
-    version_script: "exports.lds",
-
-    //sanitize: {
-    //    misc_undefined: [
-    //        "signed-integer-overflow",
-    //    ],
-    //},
-
-    shared_libs: [
-        "libstagefright_amrnb_common",
-        "liblog",
-    ],
-
-    target: {
-        darwin: {
-            enabled: false,
-        },
-    },
-}
-
 //###############################################################################
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_amrdec",
     defaults: ["libstagefright_softomx-defaults"],
 
     srcs: ["SoftAMR.cpp"],
 
-    local_include_dirs: ["src"],
-
     cflags: [
         "-DOSCL_IMPORT_REF=",
     ],
@@ -103,39 +37,3 @@
         "libstagefright_amrnb_common",
     ],
 }
-
-//###############################################################################
-cc_test {
-    name: "libstagefright_amrnbdec_test",
-    gtest: false,
-    host_supported: true,
-
-    srcs: ["test/amrnbdec_test.cpp"],
-
-    cflags: ["-Wall", "-Werror"],
-
-    local_include_dirs: ["src"],
-
-    static_libs: [
-        "libstagefright_amrnbdec",
-        "libsndfile",
-    ],
-
-    shared_libs: [
-        "libstagefright_amrnb_common",
-        "libaudioutils",
-        "liblog",
-    ],
-
-    target: {
-        darwin: {
-            enabled: false,
-        },
-    },
-
-    //sanitize: {
-    //    misc_undefined: [
-    //        "signed-integer-overflow",
-    //    ],
-    //},
-}
diff --git a/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp b/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp
index cdfc03a..01da3f8 100644
--- a/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp
+++ b/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp
@@ -576,6 +576,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/amrnb/dec/test/Android.bp b/media/libstagefright/codecs/amrnb/dec/test/Android.bp
deleted file mode 100644
index 7a95cfa..0000000
--- a/media/libstagefright/codecs/amrnb/dec/test/Android.bp
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-cc_test {
-    name: "AmrnbDecoderTest",
-    gtest: true,
-
-    srcs: [
-        "AmrnbDecoderTest.cpp",
-    ],
-
-    static_libs: [
-        "libstagefright_amrnb_common",
-        "libstagefright_amrnbdec",
-        "libaudioutils",
-        "libsndfile",
-    ],
-
-    shared_libs: [
-        "liblog",
-    ],
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-    ],
-
-    sanitize: {
-        cfi: true,
-        misc_undefined: [
-            "unsigned-integer-overflow",
-            "signed-integer-overflow",
-        ],
-    },
-}
diff --git a/media/libstagefright/codecs/amrnb/dec/test/README.md b/media/libstagefright/codecs/amrnb/dec/test/README.md
deleted file mode 100644
index 62e13ae..0000000
--- a/media/libstagefright/codecs/amrnb/dec/test/README.md
+++ /dev/null
@@ -1,34 +0,0 @@
-## Media Testing ##
----
-#### AMR-NB Decoder :
-The Amr-Nb Decoder Test Suite validates the amrnb decoder available in libstagefright.
-
-Run the following steps to build the test suite:
-```
-m AmrnbDecoderTest
-```
-
-The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
-
-The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
-
-To test 64-bit binary push binaries from nativetest64.
-```
-adb push ${OUT}/data/nativetest64/AmrnbDecoderTest/AmrnbDecoderTest /data/local/tmp/
-```
-
-To test 32-bit binary push binaries from nativetest.
-```
-adb push ${OUT}/data/nativetest/AmrnbDecoderTest/AmrnbDecoderTest /data/local/tmp/
-```
-
-The resource file for the tests is taken from [here](https://drive.google.com/drive/folders/13cM4tAaVFrmr-zGFqaAzFBbKs75pnm9b). Push these files into device for testing.
-Download amr-nb folder and push all the files in this folder to /data/local/tmp/ on the device.
-```
-adb push amr-nb/. /data/local/tmp/
-```
-
-usage: AmrnbDecoderTest -P \<path_to_folder\>
-```
-adb shell /data/local/tmp/AmrnbDecoderTest -P /data/local/tmp/
-```
diff --git a/media/libstagefright/codecs/amrnb/enc/Android.bp b/media/libstagefright/codecs/amrnb/enc/Android.bp
index 73a1d4b..6bf2d39 100644
--- a/media/libstagefright/codecs/amrnb/enc/Android.bp
+++ b/media/libstagefright/codecs/amrnb/enc/Android.bp
@@ -1,98 +1,21 @@
-cc_library_static {
-    name: "libstagefright_amrnbenc",
-    vendor_available: true,
-    min_sdk_version: "29",
-
-    srcs: [
-        "src/amrencode.cpp",
-        "src/autocorr.cpp",
-        "src/c1035pf.cpp",
-        "src/c2_11pf.cpp",
-        "src/c2_9pf.cpp",
-        "src/c3_14pf.cpp",
-        "src/c4_17pf.cpp",
-        "src/c8_31pf.cpp",
-        "src/calc_cor.cpp",
-        "src/calc_en.cpp",
-        "src/cbsearch.cpp",
-        "src/cl_ltp.cpp",
-        "src/cod_amr.cpp",
-        "src/convolve.cpp",
-        "src/cor_h.cpp",
-        "src/cor_h_x.cpp",
-        "src/cor_h_x2.cpp",
-        "src/corrwght_tab.cpp",
-        "src/dtx_enc.cpp",
-        "src/enc_lag3.cpp",
-        "src/enc_lag6.cpp",
-        "src/enc_output_format_tab.cpp",
-        "src/ets_to_if2.cpp",
-        "src/ets_to_wmf.cpp",
-        "src/g_adapt.cpp",
-        "src/g_code.cpp",
-        "src/g_pitch.cpp",
-        "src/gain_q.cpp",
-        "src/hp_max.cpp",
-        "src/inter_36.cpp",
-        "src/inter_36_tab.cpp",
-        "src/l_comp.cpp",
-        "src/l_extract.cpp",
-        "src/l_negate.cpp",
-        "src/lag_wind.cpp",
-        "src/lag_wind_tab.cpp",
-        "src/levinson.cpp",
-        "src/lpc.cpp",
-        "src/ol_ltp.cpp",
-        "src/p_ol_wgh.cpp",
-        "src/pitch_fr.cpp",
-        "src/pitch_ol.cpp",
-        "src/pre_big.cpp",
-        "src/pre_proc.cpp",
-        "src/prm2bits.cpp",
-        "src/q_gain_c.cpp",
-        "src/q_gain_p.cpp",
-        "src/qgain475.cpp",
-        "src/qgain795.cpp",
-        "src/qua_gain.cpp",
-        "src/s10_8pf.cpp",
-        "src/set_sign.cpp",
-        "src/sid_sync.cpp",
-        "src/sp_enc.cpp",
-        "src/spreproc.cpp",
-        "src/spstproc.cpp",
-        "src/ton_stab.cpp",
-    ],
-
-    header_libs: ["libstagefright_headers"],
-    export_include_dirs: ["src"],
-
-    cflags: [
-        "-DOSCL_UNUSED_ARG(x)=(void)(x)",
-        "-Werror",
-    ],
-
-    version_script: "exports.lds",
-
-    //addressing b/25409744
-    //sanitize: {
-    //    misc_undefined: [
-    //        "signed-integer-overflow",
-    //    ],
-    //},
-
-    shared_libs: ["libstagefright_amrnb_common"],
-}
 
 //###############################################################################
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_amrnbenc",
     defaults: ["libstagefright_softomx-defaults"],
 
     srcs: ["SoftAMRNBEncoder.cpp"],
 
-    local_include_dirs: ["src"],
-
     //addressing b/25409744
     //sanitize: {
     //    misc_undefined: [
@@ -106,27 +29,3 @@
         "libstagefright_amrnb_common",
     ],
 }
-
-//###############################################################################
-
-cc_test {
-    name: "libstagefright_amrnbenc_test",
-    gtest: false,
-
-    srcs: ["test/amrnb_enc_test.cpp"],
-
-    cflags: ["-Wall", "-Werror"],
-
-    local_include_dirs: ["src"],
-
-    static_libs: ["libstagefright_amrnbenc"],
-
-    shared_libs: ["libstagefright_amrnb_common"],
-
-    //addressing b/25409744
-    //sanitize: {
-    //    misc_undefined: [
-    //        "signed-integer-overflow",
-    //    ],
-    //},
-}
diff --git a/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp b/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp
index 85ab64e..a1f6686 100644
--- a/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp
@@ -421,6 +421,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cor_h_x2.cpp b/media/libstagefright/codecs/amrnb/enc/src/cor_h_x2.cpp
deleted file mode 100644
index b4fd867..0000000
--- a/media/libstagefright/codecs/amrnb/enc/src/cor_h_x2.cpp
+++ /dev/null
@@ -1,282 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-------------------------------------------------------------------------------
-
-
-
- Pathname: ./audio/gsm-amr/c/src/cor_h_x2.c
-
-     Date: 11/07/2001
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created a separate file for cor_h_x2 function.
-
- Description: Fixed typecasting issue with TI C compiler and defined one
-              local variable per line. Updated copyright year.
-
- Description: Added #define for log2(32) = 5.
-
- Description: Added call to round() and L_shl() functions in the last FOR
-              loop to make code bit-exact.
-
- Description: Added pOverflow as a variable that's passed in for the EPOC
-              modifications.
-
- Description: Changed round function name to pv_round to avoid conflict with
-              round function in C standard library.
-
- Description: Using intrinsics from fxp_arithmetic.h .
-
- Description: Replacing fxp_arithmetic.h with basic_op.h.
-
- Description:
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include "typedef.h"
-#include "cnst.h"
-#include "cor_h_x.h"
-#include "cor_h_x2.h" // BX
-#include "basic_op.h"
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-#define LOG2_OF_32  5
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-
-/*
-------------------------------------------------------------------------------
- FUNCTION NAME: cor_h_x2
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    h = vector containing the impulse response of the weighted synthesis
-        filter; vector contents are of type Word16; vector length is
-        2 * L_SUBFR
-    x = target signal vector; vector contents are of type Word16; vector
-        length is L_SUBFR
-    dn = vector containing the correlation between the target and the
-         impulse response; vector contents are of type Word16; vector
-         length is L_CODE
-    sf = scaling factor of type Word16 ; 2 when mode is MR122, 1 for all
-         other modes
-    nb_track = number of ACB tracks (Word16)
-    step = step size between pulses in one track (Word16)
-    pOverflow = pointer to overflow (Flag)
-
- Outputs:
-    dn contents are the newly calculated correlation values
-    pOverflow = 1 if the math functions called by cor_h_x2 result in overflow
-    else zero.
-
- Returns:
-    None
-
- Global Variables Used:
-    None
-
- Local Variables Needed:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- This function computes the correlation between the target signal (x) and the
- impulse response (h).
-
- The correlation is given by: d[n] = sum_{i=n}^{L-1} x[i] h[i-n],
- where: n=0,...,L-1
-
- d[n] is normalized such that the sum of 5 maxima of d[n] corresponding to
- each position track does not saturate.
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
- None
-
-------------------------------------------------------------------------------
- REFERENCES
-
- cor_h.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-The original etsi reference code uses a global flag Overflow. However, in the
-actual implementation a pointer to a the overflow flag is passed in.
-
-void cor_h_x2 (
-    Word16 h[],    // (i): impulse response of weighted synthesis filter
-    Word16 x[],    // (i): target
-    Word16 dn[],   // (o): correlation between target and h[]
-    Word16 sf,     // (i): scaling factor: 2 for 12.2, 1 for others
-    Word16 nb_track,// (i): the number of ACB tracks
-    Word16 step    // (i): step size from one pulse position to the next
-                           in one track
-)
-{
-    Word16 i, j, k;
-    Word32 s, y32[L_CODE], max, tot;
-
-    // first keep the result on 32 bits and find absolute maximum
-
-    tot = 5;
-
-    for (k = 0; k < nb_track; k++)
-    {
-        max = 0;
-        for (i = k; i < L_CODE; i += step)
-        {
-            s = 0;
-            for (j = i; j < L_CODE; j++)
-                s = L_mac (s, x[j], h[j - i]);
-
-            y32[i] = s;
-
-            s = L_abs (s);
-            if (L_sub (s, max) > (Word32) 0L)
-                max = s;
-        }
-        tot = L_add (tot, L_shr (max, 1));
-    }
-
-    j = sub (norm_l (tot), sf);
-
-    for (i = 0; i < L_CODE; i++)
-    {
-        dn[i] = pv_round (L_shl (y32[i], j));
-    }
-}
-
-------------------------------------------------------------------------------
- RESOURCES USED [optional]
-
- When the code is written for a specific target processor the
- the resources used should be documented below.
-
- HEAP MEMORY USED: x bytes
-
- STACK MEMORY USED: x bytes
-
- CLOCK CYCLES: (cycle count equation for this function) + (variable
-                used to represent cycle count for each subroutine
-                called)
-     where: (cycle count variable) = cycle count for [subroutine
-                                     name]
-
-------------------------------------------------------------------------------
- CAUTION [optional]
- [State any special notes, constraints or cautions for users of this function]
-
-------------------------------------------------------------------------------
-*/
-
-void cor_h_x2(
-    Word16 h[],    /* (i): impulse response of weighted synthesis filter */
-    Word16 x[],    /* (i): target                                        */
-    Word16 dn[],   /* (o): correlation between target and h[]            */
-    Word16 sf,     /* (i): scaling factor: 2 for 12.2, 1 for others      */
-    Word16 nb_track,/* (i): the number of ACB tracks                     */
-    Word16 step,   /* (i): step size from one pulse position to the next
-                           in one track                                  */
-    Flag *pOverflow
-)
-{
-    Word16 i;
-    Word16 j;
-    Word16 k;
-    Word32 s;
-    Word32 y32[L_CODE];
-    Word32 max;
-    Word32 tot;
-
-
-    /* first keep the result on 32 bits and find absolute maximum */
-    tot = LOG2_OF_32;
-    for (k = 0; k < nb_track; k++)
-    {
-        max = 0;
-        for (i = k; i < L_CODE; i += step)
-        {
-            s = 0;
-
-            for (j = i; j < L_CODE; j++)
-            {
-                s = amrnb_fxp_mac_16_by_16bb((Word32)x[j], (Word32)h[j-i], s);
-            }
-
-            s = s << 1;
-            y32[i] = s;
-            s = L_abs(s);
-
-            if (s > max)
-            {
-                max = s;
-            }
-        }
-        tot = (tot + (max >> 1));
-    }
-
-    j = sub(norm_l(tot), sf, pOverflow);
-
-    for (i = 0; i < L_CODE; i++)
-    {
-        dn[i] = pv_round(L_shl(y32[i], j, pOverflow), pOverflow);
-    }
-
-    return;
-}
diff --git a/media/libstagefright/codecs/amrnb/enc/src/pitch_fr.cpp b/media/libstagefright/codecs/amrnb/enc/src/pitch_fr.cpp
deleted file mode 100644
index 5a846fa..0000000
--- a/media/libstagefright/codecs/amrnb/enc/src/pitch_fr.cpp
+++ /dev/null
@@ -1,1609 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-------------------------------------------------------------------------------
-
-
-
- Pathname: ./audio/gsm-amr/c/src/pitch_fr.c
- Functions:
-
-
-     Date: 02/04/2002
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Added pOverflow as a passed in value to searchFrac and made
-              other fixes to the code regarding simple syntax fixes. Removed
-              the include of stio.h.
-
- Description: *lag-- decrements the pointer.  (*lag)-- decrements what is
- pointed to.  The latter is what the coder intended, but the former is
- the coding instruction that was used.
-
- Description: A common problem -- a comparison != 0 was inadvertantly replaced
- by a comparison == 0.
-
-
- Description:  For Norm_Corr() and getRange()
-              1. Eliminated unused include files.
-              2. Replaced array addressing by pointers
-              3. Eliminated math operations that unnecessary checked for
-                 saturation, in some cases this by shifting before adding and
-                 in other cases by evaluating the operands
-              4. Unrolled loops to speed up processing, use decrement loops
-              5. Replaced extract_l() call with equivalent code
-              6. Modified scaling threshold and group all shifts (avoiding
-                 successive shifts)
-
- Description:  Replaced OSCL mem type functions and eliminated include
-               files that now are chosen by OSCL definitions
-
- Description:  Replaced "int" and/or "char" with OSCL defined types.
-
- Description: Removed compiler warnings.
-
- Description:
-------------------------------------------------------------------------------
- MODULE DESCRIPTION
-
-      File             : pitch_fr.c
-      Purpose          : Find the pitch period with 1/3 or 1/6 subsample
-                       : resolution (closed loop).
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include <stdlib.h>
-
-#include "pitch_fr.h"
-#include "oper_32b.h"
-#include "cnst.h"
-#include "enc_lag3.h"
-#include "enc_lag6.h"
-#include "inter_36.h"
-#include "inv_sqrt.h"
-#include "convolve.h"
-
-#include "basic_op.h"
-
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL VARIABLE DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*
- * mode dependent parameters used in Pitch_fr()
- * Note: order of MRxx in 'enum Mode' is important!
- */
-static const struct
-{
-    Word16 max_frac_lag;     /* lag up to which fractional lags are used    */
-    Word16 flag3;            /* enable 1/3 instead of 1/6 fract. resolution */
-    Word16 first_frac;       /* first fractional to check                   */
-    Word16 last_frac;        /* last fractional to check                    */
-    Word16 delta_int_low;    /* integer lag below TO to start search from   */
-    Word16 delta_int_range;  /* integer range around T0                     */
-    Word16 delta_frc_low;    /* fractional below T0                         */
-    Word16 delta_frc_range;  /* fractional range around T0                  */
-    Word16 pit_min;          /* minimum pitch                               */
-} mode_dep_parm[N_MODES] =
-{
-    /* MR475 */  { 84,  1, -2,  2,  5, 10,  5,  9, PIT_MIN },
-    /* MR515 */  { 84,  1, -2,  2,  5, 10,  5,  9, PIT_MIN },
-    /* MR59  */  { 84,  1, -2,  2,  3,  6,  5,  9, PIT_MIN },
-    /* MR67  */  { 84,  1, -2,  2,  3,  6,  5,  9, PIT_MIN },
-    /* MR74  */  { 84,  1, -2,  2,  3,  6,  5,  9, PIT_MIN },
-    /* MR795 */  { 84,  1, -2,  2,  3,  6, 10, 19, PIT_MIN },
-    /* MR102 */  { 84,  1, -2,  2,  3,  6,  5,  9, PIT_MIN },
-    /* MR122 */  { 94,  0, -3,  3,  3,  6,  5,  9, PIT_MIN_MR122 }
-};
-
-/*
-------------------------------------------------------------------------------
- FUNCTION NAME: Norm_Corr
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    exc[] = pointer to buffer of type Word16
-    xn[]  = pointer to buffer of type Word16
-    h[]   = pointer to buffer of type Word16
-    L_subfr = length of sub frame (Word16)
-    t_min  = the minimum table value of type Word16
-    t_max = the maximum table value of type Word16
-    corr_norm[] = pointer to buffer of type Word16
-
- Outputs:
-    pOverflow = 1 if the math functions called result in overflow else zero.
-
- Returns:
-    None
-
- Global Variables Used:
-    None
-
- Local Variables Needed:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
-  FUNCTION:   Norm_Corr()
-
-  PURPOSE: Find the normalized correlation between the target vector
-           and the filtered past excitation.
-
-  DESCRIPTION:
-     The normalized correlation is given by the correlation between the
-     target and filtered past excitation divided by the square root of
-     the energy of filtered excitation.
-                   corr[k] = <x[], y_k[]>/sqrt(y_k[],y_k[])
-     where x[] is the target vector and y_k[] is the filtered past
-     excitation at delay k.
-
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
- None
-
-------------------------------------------------------------------------------
- REFERENCES
-
- pitch_fr.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-static void Norm_Corr (Word16 exc[], Word16 xn[], Word16 h[], Word16 L_subfr,
-                       Word16 t_min, Word16 t_max, Word16 corr_norm[])
-{
-    Word16 i, j, k;
-    Word16 corr_h, corr_l, norm_h, norm_l;
-    Word32 s;
-
-    // Usally dynamic allocation of (L_subfr)
-    Word16 excf[L_SUBFR];
-    Word16 scaling, h_fac, *s_excf, scaled_excf[L_SUBFR];
-
-    k = -t_min;
-
-    // compute the filtered excitation for the first delay t_min
-
-    Convolve (&exc[k], h, excf, L_subfr);
-
-    // scale "excf[]" to avoid overflow
-
-    for (j = 0; j < L_subfr; j++) {
-        scaled_excf[j] = shr (excf[j], 2);
-    }
-
-    // Compute 1/sqrt(energy of excf[])
-
-    s = 0;
-    for (j = 0; j < L_subfr; j++) {
-        s = L_mac (s, excf[j], excf[j]);
-    }
-    if (L_sub (s, 67108864L) <= 0) {            // if (s <= 2^26)
-        s_excf = excf;
-        h_fac = 15 - 12;
-        scaling = 0;
-    }
-    else {
-        // "excf[]" is divided by 2
-        s_excf = scaled_excf;
-        h_fac = 15 - 12 - 2;
-        scaling = 2;
-    }
-
-    // loop for every possible period
-
-    for (i = t_min; i <= t_max; i++) {
-        // Compute 1/sqrt(energy of excf[])
-
-        s = 0;
-        for (j = 0; j < L_subfr; j++) {
-            s = L_mac (s, s_excf[j], s_excf[j]);
-        }
-
-        s = Inv_sqrt (s);
-        L_Extract (s, &norm_h, &norm_l);
-
-        // Compute correlation between xn[] and excf[]
-
-        s = 0;
-        for (j = 0; j < L_subfr; j++) {
-            s = L_mac (s, xn[j], s_excf[j]);
-        }
-        L_Extract (s, &corr_h, &corr_l);
-
-        // Normalize correlation = correlation * (1/sqrt(energy))
-
-        s = Mpy_32 (corr_h, corr_l, norm_h, norm_l);
-
-        corr_norm[i] = extract_h (L_shl (s, 16));
-
-            // modify the filtered excitation excf[] for the next iteration
-
-        if (sub (i, t_max) != 0) {
-            k--;
-            for (j = L_subfr - 1; j > 0; j--) {
-                s = L_mult (exc[k], h[j]);
-                s = L_shl (s, h_fac);
-                s_excf[j] = add (extract_h (s), s_excf[j - 1]);
-            }
-            s_excf[0] = shr (exc[k], scaling);
-        }
-    }
-    return;
-}
-
-------------------------------------------------------------------------------
- RESOURCES USED [optional]
-
- When the code is written for a specific target processor the
- the resources used should be documented below.
-
- HEAP MEMORY USED: x bytes
-
- STACK MEMORY USED: x bytes
-
- CLOCK CYCLES: (cycle count equation for this function) + (variable
-                used to represent cycle count for each subroutine
-                called)
-     where: (cycle count variable) = cycle count for [subroutine
-                                     name]
-
-------------------------------------------------------------------------------
- CAUTION [optional]
- [State any special notes, constraints or cautions for users of this function]
-
-------------------------------------------------------------------------------
-*/
-
-static void Norm_Corr(Word16 exc[],
-                      Word16 xn[],
-                      Word16 h[],
-                      Word16 L_subfr,
-                      Word16 t_min,
-                      Word16 t_max,
-                      Word16 corr_norm[],
-                      Flag *pOverflow)
-{
-    Word16 i;
-    Word16 j;
-    Word16 k;
-    Word16 corr_h;
-    Word16 corr_l;
-    Word16 norm_h;
-    Word16 norm_l;
-    Word32 s;
-    Word32 s2;
-    Word16 excf[L_SUBFR];
-    Word16 scaling;
-    Word16 h_fac;
-    Word16 *s_excf;
-    Word16 scaled_excf[L_SUBFR];
-    Word16 *p_s_excf;
-    Word16 *p_excf;
-    Word16  temp;
-    Word16 *p_x;
-    Word16 *p_h;
-
-    k = -t_min;
-
-    /* compute the filtered excitation for the first delay t_min */
-
-    Convolve(&exc[k], h, excf, L_subfr);
-
-    /* scale "excf[]" to avoid overflow */
-    s = 0;
-    p_s_excf = scaled_excf;
-    p_excf   = excf;
-
-    for (j = (L_subfr >> 1); j != 0; j--)
-    {
-        temp = *(p_excf++);
-        *(p_s_excf++) = temp >> 2;
-        s += (Word32) temp * temp;
-        temp = *(p_excf++);
-        *(p_s_excf++) = temp >> 2;
-        s += (Word32) temp * temp;
-    }
-
-
-    if (s <= (67108864L >> 1))
-    {
-        s_excf = excf;
-        h_fac = 12;
-        scaling = 0;
-    }
-    else
-    {
-        /* "excf[]" is divided by 2 */
-        s_excf = scaled_excf;
-        h_fac = 14;
-        scaling = 2;
-    }
-
-    /* loop for every possible period */
-
-    for (i = t_min; i <= t_max; i++)
-    {
-        /* Compute 1/sqrt(energy of excf[]) */
-
-        s   = s2 = 0;
-        p_x      = xn;
-        p_s_excf = s_excf;
-        j        = L_subfr >> 1;
-
-        while (j--)
-        {
-            s  += (Word32) * (p_x++) * *(p_s_excf);
-            s2 += ((Word32)(*(p_s_excf)) * (*(p_s_excf)));
-            p_s_excf++;
-            s  += (Word32) * (p_x++) * *(p_s_excf);
-            s2 += ((Word32)(*(p_s_excf)) * (*(p_s_excf)));
-            p_s_excf++;
-        }
-
-        s2     = s2 << 1;
-        s2     = Inv_sqrt(s2, pOverflow);
-        norm_h = (Word16)(s2 >> 16);
-        norm_l = (Word16)((s2 >> 1) - (norm_h << 15));
-        corr_h = (Word16)(s >> 15);
-        corr_l = (Word16)((s) - (corr_h << 15));
-
-        /* Normalize correlation = correlation * (1/sqrt(energy)) */
-
-        s = Mpy_32(corr_h, corr_l, norm_h, norm_l, pOverflow);
-
-        corr_norm[i] = (Word16) s ;
-
-        /* modify the filtered excitation excf[] for the next iteration */
-        if (i != t_max)
-        {
-            k--;
-            temp = exc[k];
-            p_s_excf = &s_excf[L_subfr - 1];
-            p_h = &h[L_subfr - 1];
-
-            p_excf = &s_excf[L_subfr - 2];
-            for (j = (L_subfr - 1) >> 1; j != 0; j--)
-            {
-                s = ((Word32) temp * *(p_h--)) >> h_fac;
-                *(p_s_excf--) = (Word16) s  + *(p_excf--);
-                s = ((Word32) temp * *(p_h--)) >> h_fac;
-                *(p_s_excf--) = (Word16) s  + *(p_excf--);
-            }
-
-            s = ((Word32) temp * *(p_h)) >> h_fac;
-            *(p_s_excf--) = (Word16) s  + *(p_excf);
-
-            *(p_s_excf) = temp >> scaling;
-        }
-
-    }
-    return;
-}
-
-/****************************************************************************/
-
-
-/*
-------------------------------------------------------------------------------
- FUNCTION NAME: searchFrac
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    lag = pointer to integer pitch of type Word16
-    frac = pointer to starting point of search fractional pitch of type Word16
-    last_frac = endpoint of search  of type Word16
-    corr[] = pointer to normalized correlation of type Word16
-    flag3 = subsample resolution (3: =1 / 6: =0) of type Word16
-
- Outputs:
-    None
-
- Returns:
-    None
-
- Global Variables Used:
-    None
-
- Local Variables Needed:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
-   FUNCTION:   searchFrac()
-
-   PURPOSE: Find fractional pitch
-
-   DESCRIPTION:
-      The function interpolates the normalized correlation at the
-      fractional positions around lag T0. The position at which the
-      interpolation function reaches its maximum is the fractional pitch.
-      Starting point of the search is frac, end point is last_frac.
-      frac is overwritten with the fractional pitch.
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
- None
-
-------------------------------------------------------------------------------
- REFERENCES
-
- pitch_fr.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-static void searchFrac (
-    Word16 *lag,       // i/o : integer pitch
-    Word16 *frac,      // i/o : start point of search -
-                               fractional pitch
-    Word16 last_frac,  // i   : endpoint of search
-    Word16 corr[],     // i   : normalized correlation
-    Word16 flag3       // i   : subsample resolution
-                                (3: =1 / 6: =0)
-)
-{
-    Word16 i;
-    Word16 max;
-    Word16 corr_int;
-
-    // Test the fractions around T0 and choose the one which maximizes
-    // the interpolated normalized correlation.
-
-    max = Interpol_3or6 (&corr[*lag], *frac, flag3); // function result
-
-    for (i = add (*frac, 1); i <= last_frac; i++) {
-        corr_int = Interpol_3or6 (&corr[*lag], i, flag3);
-        if (sub (corr_int, max) > 0) {
-            max = corr_int;
-            *frac = i;
-        }
-    }
-
-    if (flag3 == 0) {
-        // Limit the fraction value in the interval [-2,-1,0,1,2,3]
-
-        if (sub (*frac, -3) == 0) {
-            *frac = 3;
-            *lag = sub (*lag, 1);
-        }
-    }
-    else {
-        // limit the fraction value between -1 and 1
-
-        if (sub (*frac, -2) == 0) {
-            *frac = 1;
-            *lag = sub (*lag, 1);
-        }
-        if (sub (*frac, 2) == 0) {
-            *frac = -1;
-            *lag = add (*lag, 1);
-        }
-    }
-}
-
-------------------------------------------------------------------------------
- RESOURCES USED [optional]
-
- When the code is written for a specific target processor the
- the resources used should be documented below.
-
- HEAP MEMORY USED: x bytes
-
- STACK MEMORY USED: x bytes
-
- CLOCK CYCLES: (cycle count equation for this function) + (variable
-                used to represent cycle count for each subroutine
-                called)
-     where: (cycle count variable) = cycle count for [subroutine
-                                     name]
-
-------------------------------------------------------------------------------
- CAUTION [optional]
- [State any special notes, constraints or cautions for users of this function]
-
-------------------------------------------------------------------------------
-*/
-
-static void searchFrac(
-    Word16 *lag,       /* i/o : integer pitch           */
-    Word16 *frac,      /* i/o : start point of search -
-                                fractional pitch        */
-    Word16 last_frac,  /* i   : endpoint of search      */
-    Word16 corr[],     /* i   : normalized correlation  */
-    Word16 flag3,      /* i   : subsample resolution
-                                (3: =1 / 6: =0)         */
-    Flag   *pOverflow
-)
-{
-    Word16 i;
-    Word16 max;
-    Word16 corr_int;
-
-    /* Test the fractions around T0 and choose the one which maximizes   */
-    /* the interpolated normalized correlation.                          */
-
-    max = Interpol_3or6(&corr[*lag], *frac, flag3, pOverflow);
-    /* function result */
-
-    for (i = *frac + 1; i <= last_frac; i++)
-    {
-        corr_int = Interpol_3or6(&corr[*lag], i, flag3, pOverflow);
-        if (corr_int > max)
-        {
-            max = corr_int;
-            *frac = i;
-        }
-    }
-
-    if (flag3 == 0)
-    {
-        /* Limit the fraction value in the interval [-2,-1,0,1,2,3] */
-
-        if (*frac == -3)
-        {
-            *frac = 3;
-            (*lag)--;
-        }
-    }
-    else
-    {
-        /* limit the fraction value between -1 and 1 */
-
-        if (*frac == -2)
-        {
-            *frac = 1;
-            (*lag)--;
-        }
-        if (*frac == 2)
-        {
-            *frac = -1;
-            (*lag)++;
-        }
-    }
-}
-
-/****************************************************************************/
-
-
-/*
-------------------------------------------------------------------------------
- FUNCTION NAME: getRange
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    T0 = integer pitch of type Word16
-    delta_low = search start offset of type Word16
-    delta_range = search range of type Word16
-    pitmin = minimum pitch of type Word16
-    pitmax = maximum pitch of type Word16
-    t0_min = search range minimum of type Word16
-    t0_max = search range maximum of type Word16
-
- Outputs:
-    pOverflow = 1 if the math functions called result in overflow else zero.
-
- Returns:
-    None
-
- Global Variables Used:
-    None
-
- Local Variables Needed:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
-   FUNCTION:   getRange()
-
-   PURPOSE: Sets range around open-loop pitch or integer pitch of last subframe
-
-   DESCRIPTION:
-      Takes integer pitch T0 and calculates a range around it with
-        t0_min = T0-delta_low  and t0_max = (T0-delta_low) + delta_range
-      t0_min and t0_max are bounded by pitmin and pitmax
-------------------------------------------------------------------------------
- REQUIREMENTS
-
- None
-
-------------------------------------------------------------------------------
- REFERENCES
-
- pitch_fr.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-static void getRange (
-    Word16 T0,           // i : integer pitch
-    Word16 delta_low,    // i : search start offset
-    Word16 delta_range,  // i : search range
-    Word16 pitmin,       // i : minimum pitch
-    Word16 pitmax,       // i : maximum pitch
-    Word16 *t0_min,      // o : search range minimum
-    Word16 *t0_max)      // o : search range maximum
-{
-    *t0_min = sub(T0, delta_low);
-    if (sub(*t0_min, pitmin) < 0) {
-        *t0_min = pitmin;
-    }
-    *t0_max = add(*t0_min, delta_range);
-    if (sub(*t0_max, pitmax) > 0) {
-        *t0_max = pitmax;
-        *t0_min = sub(*t0_max, delta_range);
-    }
-}
-
-------------------------------------------------------------------------------
- RESOURCES USED [optional]
-
- When the code is written for a specific target processor the
- the resources used should be documented below.
-
- HEAP MEMORY USED: x bytes
-
- STACK MEMORY USED: x bytes
-
- CLOCK CYCLES: (cycle count equation for this function) + (variable
-                used to represent cycle count for each subroutine
-                called)
-     where: (cycle count variable) = cycle count for [subroutine
-                                     name]
-
-------------------------------------------------------------------------------
- CAUTION [optional]
- [State any special notes, constraints or cautions for users of this function]
-
-------------------------------------------------------------------------------
-*/
-static void getRange(
-    Word16 T0,           /* i : integer pitch          */
-    Word16 delta_low,    /* i : search start offset    */
-    Word16 delta_range,  /* i : search range           */
-    Word16 pitmin,       /* i : minimum pitch          */
-    Word16 pitmax,       /* i : maximum pitch          */
-    Word16 *t0_min,      /* o : search range minimum   */
-    Word16 *t0_max,      /* o : search range maximum   */
-    Flag   *pOverflow)
-{
-
-    Word16 temp;
-    OSCL_UNUSED_ARG(pOverflow);
-
-    temp = *t0_min;
-    temp = T0 - delta_low;
-    if (temp < pitmin)
-    {
-        temp = pitmin;
-    }
-    *t0_min = temp;
-
-    temp +=  delta_range;
-    if (temp > pitmax)
-    {
-        temp = pitmax;
-        *t0_min = pitmax - delta_range;
-    }
-    *t0_max = temp;
-
-}
-
-
-/****************************************************************************/
-
-
-/*
-------------------------------------------------------------------------------
- FUNCTION NAME: Pitch_fr_init
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    state = pointer to a pointer of structure type Pitch_fr_State.
-
- Outputs:
-    None
-
- Returns:
-    Returns a zero if successful and -1 if not successful.
-
- Global Variables Used:
-    None
-
- Local Variables Needed:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
-  Function:   Pitch_fr_init
-  Purpose:    Allocates state memory and initializes state memory
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
- None
-
-------------------------------------------------------------------------------
- REFERENCES
-
- pitch_fr.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-int Pitch_fr_init (Pitch_frState **state)
-{
-    Pitch_frState* s;
-
-    if (state == (Pitch_frState **) NULL){
-        // fprintf(stderr, "Pitch_fr_init: invalid parameter\n");
-        return -1;
-    }
-    *state = NULL;
-
-    // allocate memory
-    if ((s= (Pitch_frState *) malloc(sizeof(Pitch_frState))) == NULL){
-        // fprintf(stderr, "Pitch_fr_init: can not malloc state structure\n");
-        return -1;
-    }
-
-    Pitch_fr_reset(s);
-    *state = s;
-
-    return 0;
-}
-
-------------------------------------------------------------------------------
- RESOURCES USED [optional]
-
- When the code is written for a specific target processor the
- the resources used should be documented below.
-
- HEAP MEMORY USED: x bytes
-
- STACK MEMORY USED: x bytes
-
- CLOCK CYCLES: (cycle count equation for this function) + (variable
-                used to represent cycle count for each subroutine
-                called)
-     where: (cycle count variable) = cycle count for [subroutine
-                                     name]
-
-------------------------------------------------------------------------------
- CAUTION [optional]
- [State any special notes, constraints or cautions for users of this function]
-
-------------------------------------------------------------------------------
-*/
-Word16 Pitch_fr_init(Pitch_frState **state)
-{
-    Pitch_frState* s;
-
-    if (state == (Pitch_frState **) NULL)
-    {
-        /* fprintf(stderr, "Pitch_fr_init: invalid parameter\n"); */
-        return -1;
-    }
-    *state = NULL;
-
-    /* allocate memory */
-    if ((s = (Pitch_frState *) malloc(sizeof(Pitch_frState))) == NULL)
-    {
-        /* fprintf(stderr, "Pitch_fr_init: can not malloc state structure\n"); */
-        return -1;
-    }
-
-    Pitch_fr_reset(s);
-    *state = s;
-
-    return 0;
-}
-
-
-/****************************************************************************/
-
-
-/*
-------------------------------------------------------------------------------
- FUNCTION NAME: Pitch_fr_reset
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    state = pointer to a pointer of structure type Pitch_fr_State.
-
- Outputs:
-    None
-
- Returns:
-    Returns a zero if successful and -1 if not successful.
-
- Global Variables Used:
-    None
-
- Local Variables Needed:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
-  Function:   Pitch_fr_reset
-  Purpose:    Initializes state memory to zero
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
- None
-
-------------------------------------------------------------------------------
- REFERENCES
-
- pitch_fr.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-int Pitch_fr_reset (Pitch_frState *state)
-{
-
-    if (state == (Pitch_frState *) NULL){
-        // fprintf(stderr, "Pitch_fr_reset: invalid parameter\n");
-        return -1;
-    }
-
-    state->T0_prev_subframe = 0;
-
-    return 0;
-}
-
-------------------------------------------------------------------------------
- RESOURCES USED [optional]
-
- When the code is written for a specific target processor the
- the resources used should be documented below.
-
- HEAP MEMORY USED: x bytes
-
- STACK MEMORY USED: x bytes
-
- CLOCK CYCLES: (cycle count equation for this function) + (variable
-                used to represent cycle count for each subroutine
-                called)
-     where: (cycle count variable) = cycle count for [subroutine
-                                     name]
-
-------------------------------------------------------------------------------
- CAUTION [optional]
- [State any special notes, constraints or cautions for users of this function]
-
-------------------------------------------------------------------------------
-*/
-Word16 Pitch_fr_reset(Pitch_frState *state)
-{
-
-    if (state == (Pitch_frState *) NULL)
-    {
-        /* fprintf(stderr, "Pitch_fr_reset: invalid parameter\n"); */
-        return -1;
-    }
-
-    state->T0_prev_subframe = 0;
-
-    return 0;
-}
-
-
-/****************************************************************************/
-
-
-/*
-------------------------------------------------------------------------------
- FUNCTION NAME: Pitch_fr_exit
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    state = pointer to a pointer of structure type Pitch_fr_State.
-
- Outputs:
-    None
-
- Returns:
-    None
-
- Global Variables Used:
-    None
-
- Local Variables Needed:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
-  Function:   Pitch_fr_exit
-  Purpose:    The memory for state is freed.
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
- None
-
-------------------------------------------------------------------------------
- REFERENCES
-
- pitch_fr.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-void Pitch_fr_exit (Pitch_frState **state)
-{
-    if (state == NULL || *state == NULL)
-        return;
-
-    // deallocate memory
-    free(*state);
-    *state = NULL;
-
-    return;
-}
-
-------------------------------------------------------------------------------
- RESOURCES USED [optional]
-
- When the code is written for a specific target processor the
- the resources used should be documented below.
-
- HEAP MEMORY USED: x bytes
-
- STACK MEMORY USED: x bytes
-
- CLOCK CYCLES: (cycle count equation for this function) + (variable
-                used to represent cycle count for each subroutine
-                called)
-     where: (cycle count variable) = cycle count for [subroutine
-                                     name]
-
-------------------------------------------------------------------------------
- CAUTION [optional]
- [State any special notes, constraints or cautions for users of this function]
-
-------------------------------------------------------------------------------
-*/
-void Pitch_fr_exit(Pitch_frState **state)
-{
-    if (state == NULL || *state == NULL)
-        return;
-
-    /* deallocate memory */
-    free(*state);
-    *state = NULL;
-
-    return;
-}
-
-/****************************************************************************/
-
-
-/*
-------------------------------------------------------------------------------
- FUNCTION NAME: Pitch_fr
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    st = pointer to stat structure of type Pitch_frState
-    mode = codec mode of type enum Mode
-    T_op[] = pointer to open loop pitch lags of type Word16
-    exc[] = pointer to excitation buffer of type Word16
-    xn[] = pointer to target vector of type Word16
-    h[] = pointer to impulse response of synthesis and weighting filters
-          of type Word16
-    L_subfr = length of subframe of type Word16
-    i_subfr = subframe offset of type Word16
-
- Outputs:
-    pit_frac = pointer to pitch period (fractional) of type Word16
-    resu3 = pointer to subsample resolution of type Word16
-    ana_index = pointer to index of encoding of type Word16
-
- Returns:
-    None
-
- Global Variables Used:
-    None
-
- Local Variables Needed:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
-   FUNCTION:   Pitch_fr()
-
-   PURPOSE: Find the pitch period with 1/3 or 1/6 subsample resolution
-            (closed loop).
-
-   DESCRIPTION:
-         - find the normalized correlation between the target and filtered
-           past excitation in the search range.
-         - select the delay with maximum normalized correlation.
-         - interpolate the normalized correlation at fractions -3/6 to 3/6
-           with step 1/6 around the chosen delay.
-         - The fraction which gives the maximum interpolated value is chosen.
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
- None
-
-------------------------------------------------------------------------------
- REFERENCES
-
- pitch_fr.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-Word16 Pitch_fr (        // o   : pitch period (integer)
-    Pitch_frState *st,   // i/o : State struct
-    enum Mode mode,      // i   : codec mode
-    Word16 T_op[],       // i   : open loop pitch lags
-    Word16 exc[],        // i   : excitation buffer                      Q0
-    Word16 xn[],         // i   : target vector                          Q0
-    Word16 h[],          // i   : impulse response of synthesis and
-                                  weighting filters                     Q12
-    Word16 L_subfr,      // i   : Length of subframe
-    Word16 i_subfr,      // i   : subframe offset
-    Word16 *pit_frac,    // o   : pitch period (fractional)
-    Word16 *resu3,       // o   : subsample resolution 1/3 (=1) or 1/6 (=0)
-    Word16 *ana_index    // o   : index of encoding
-)
-{
-    Word16 i;
-    Word16 t_min, t_max;
-    Word16 t0_min, t0_max;
-    Word16 max, lag, frac;
-    Word16 tmp_lag;
-    Word16 *corr;
-    Word16 corr_v[40];    // Total length = t0_max-t0_min+1+2*L_INTER_SRCH
-
-    Word16 max_frac_lag;
-    Word16 flag3, flag4;
-    Word16 last_frac;
-    Word16 delta_int_low, delta_int_range;
-    Word16 delta_frc_low, delta_frc_range;
-    Word16 pit_min;
-    Word16 frame_offset;
-    Word16 delta_search;
-
-    //-----------------------------------------------------------------------
-     //                      set mode specific variables
-     //----------------------------------------------------------------------
-
-    max_frac_lag    = mode_dep_parm[mode].max_frac_lag;
-    flag3           = mode_dep_parm[mode].flag3;
-    frac            = mode_dep_parm[mode].first_frac;
-    last_frac       = mode_dep_parm[mode].last_frac;
-    delta_int_low   = mode_dep_parm[mode].delta_int_low;
-    delta_int_range = mode_dep_parm[mode].delta_int_range;
-
-    delta_frc_low   = mode_dep_parm[mode].delta_frc_low;
-    delta_frc_range = mode_dep_parm[mode].delta_frc_range;
-    pit_min         = mode_dep_parm[mode].pit_min;
-
-    //-----------------------------------------------------------------------
-    //                 decide upon full or differential search
-    //-----------------------------------------------------------------------
-
-    delta_search = 1;
-
-    if ((i_subfr == 0) || (sub(i_subfr,L_FRAME_BY2) == 0)) {
-
-        // Subframe 1 and 3
-
-        if (((sub((Word16)mode, (Word16)MR475) != 0) && (sub((Word16)mode,
-            (Word16)MR515) != 0)) ||
-            (sub(i_subfr,L_FRAME_BY2) != 0)) {
-
-            // set t0_min, t0_max for full search
-            // this is *not* done for mode MR475, MR515 in subframe 3
-
-            delta_search = 0; // no differential search
-
-            // calculate index into T_op which contains the open-loop
-            // pitch estimations for the 2 big subframes
-
-            frame_offset = 1;
-            if (i_subfr == 0)
-                frame_offset = 0;
-
-            // get T_op from the corresponding half frame and
-            // set t0_min, t0_max
-
-            getRange (T_op[frame_offset], delta_int_low, delta_int_range,
-                      pit_min, PIT_MAX, &t0_min, &t0_max);
-        }
-        else {
-
-            // mode MR475, MR515 and 3. Subframe: delta search as well
-            getRange (st->T0_prev_subframe, delta_frc_low, delta_frc_range,
-                      pit_min, PIT_MAX, &t0_min, &t0_max);
-        }
-    }
-    else {
-
-        // for Subframe 2 and 4
-        // get range around T0 of previous subframe for delta search
-
-        getRange (st->T0_prev_subframe, delta_frc_low, delta_frc_range,
-                  pit_min, PIT_MAX, &t0_min, &t0_max);
-    }
-
-    //-----------------------------------------------------------------------
-                Find interval to compute normalized correlation
-     -----------------------------------------------------------------------
-
-    t_min = sub (t0_min, L_INTER_SRCH);
-    t_max = add (t0_max, L_INTER_SRCH);
-
-    corr = &corr_v[-t_min];
-
-    //-----------------------------------------------------------------------
-      Compute normalized correlation between target and filtered excitation
-     -----------------------------------------------------------------------
-
-    Norm_Corr (exc, xn, h, L_subfr, t_min, t_max, corr);
-
-    //-----------------------------------------------------------------------
-                                Find integer pitch
-     -----------------------------------------------------------------------
-
-    max = corr[t0_min];
-    lag = t0_min;
-
-    for (i = t0_min + 1; i <= t0_max; i++) {
-        if (sub (corr[i], max) >= 0) {
-            max = corr[i];
-            lag = i;
-        }
-    }
-
-    //-----------------------------------------------------------------------
-                             Find fractional pitch
-     -----------------------------------------------------------------------
-    if ((delta_search == 0) && (sub (lag, max_frac_lag) > 0)) {
-
-        // full search and integer pitch greater than max_frac_lag
-        // fractional search is not needed, set fractional to zero
-
-        frac = 0;
-    }
-    else {
-
-        // if differential search AND mode MR475 OR MR515 OR MR59 OR MR67
-        // then search fractional with 4 bits resolution
-
-       if ((delta_search != 0) &&
-           ((sub ((Word16)mode, (Word16)MR475) == 0) ||
-            (sub ((Word16)mode, (Word16)MR515) == 0) ||
-            (sub ((Word16)mode, (Word16)MR59) == 0) ||
-            (sub ((Word16)mode, (Word16)MR67) == 0))) {
-
-          // modify frac or last_frac according to position of last
-          // integer pitch: either search around integer pitch,
-          // or only on left or right side
-
-          tmp_lag = st->T0_prev_subframe;
-          if ( sub( sub(tmp_lag, t0_min), 5) > 0)
-             tmp_lag = add (t0_min, 5);
-          if ( sub( sub(t0_max, tmp_lag), 4) > 0)
-               tmp_lag = sub (t0_max, 4);
-
-          if ((sub (lag, tmp_lag) == 0) ||
-              (sub (lag, sub(tmp_lag, 1)) == 0)) {
-
-             // normal search in fractions around T0
-
-             searchFrac (&lag, &frac, last_frac, corr, flag3);
-
-          }
-          else if (sub (lag, sub (tmp_lag, 2)) == 0) {
-             // limit search around T0 to the right side
-             frac = 0;
-             searchFrac (&lag, &frac, last_frac, corr, flag3);
-          }
-          else if (sub (lag, add(tmp_lag, 1)) == 0) {
-             // limit search around T0 to the left side
-             last_frac = 0;
-             searchFrac (&lag, &frac, last_frac, corr, flag3);
-          }
-          else {
-             // no fractional search
-             frac = 0;
-            }
-       }
-       else
-          // test the fractions around T0
-          searchFrac (&lag, &frac, last_frac, corr, flag3);
-    }
-
-    //-----------------------------------------------------------------------
-     //                           encode pitch
-     //-----------------------------------------------------------------------
-
-    if (flag3 != 0) {
-       // flag4 indicates encoding with 4 bit resolution;
-       // this is needed for mode MR475, MR515 and MR59
-
-       flag4 = 0;
-       if ( (sub ((Word16)mode, (Word16)MR475) == 0) ||
-            (sub ((Word16)mode, (Word16)MR515) == 0) ||
-            (sub ((Word16)mode, (Word16)MR59) == 0) ||
-            (sub ((Word16)mode, (Word16)MR67) == 0) ) {
-          flag4 = 1;
-       }
-
-       // encode with 1/3 subsample resolution
-
-       *ana_index = Enc_lag3(lag, frac, st->T0_prev_subframe,
-                             t0_min, t0_max, delta_search, flag4);
-       // function result
-
-    }
-    else
-    {
-       // encode with 1/6 subsample resolution
-
-       *ana_index = Enc_lag6(lag, frac, t0_min, delta_search);
-       // function result
-    }
-
-     //-----------------------------------------------------------------------
-     //                          update state variables
-     //-----------------------------------------------------------------------
-
-    st->T0_prev_subframe = lag;
-
-     //-----------------------------------------------------------------------
-     //                      update output variables
-     //-----------------------------------------------------------------------
-
-    *resu3    = flag3;
-
-    *pit_frac = frac;
-
-    return (lag);
-}
-
-
-------------------------------------------------------------------------------
- RESOURCES USED [optional]
-
- When the code is written for a specific target processor the
- the resources used should be documented below.
-
- HEAP MEMORY USED: x bytes
-
- STACK MEMORY USED: x bytes
-
- CLOCK CYCLES: (cycle count equation for this function) + (variable
-                used to represent cycle count for each subroutine
-                called)
-     where: (cycle count variable) = cycle count for [subroutine
-                                     name]
-
-------------------------------------------------------------------------------
- CAUTION [optional]
- [State any special notes, constraints or cautions for users of this function]
-
-------------------------------------------------------------------------------
-*/
-Word16 Pitch_fr(         /* o   : pitch period (integer)                    */
-    Pitch_frState *st,   /* i/o : State struct                              */
-    enum Mode mode,      /* i   : codec mode                                */
-    Word16 T_op[],       /* i   : open loop pitch lags                      */
-    Word16 exc[],        /* i   : excitation buffer                      Q0 */
-    Word16 xn[],         /* i   : target vector                          Q0 */
-    Word16 h[],          /* i   : impulse response of synthesis and
-                                  weighting filters                     Q12 */
-    Word16 L_subfr,      /* i   : Length of subframe                        */
-    Word16 i_subfr,      /* i   : subframe offset                           */
-    Word16 *pit_frac,    /* o   : pitch period (fractional)                 */
-    Word16 *resu3,       /* o   : subsample resolution 1/3 (=1) or 1/6 (=0) */
-    Word16 *ana_index,   /* o   : index of encoding                         */
-    Flag   *pOverflow
-)
-{
-    Word16 i;
-    Word16 t_min;
-    Word16 t_max;
-    Word16 t0_min = 0;
-    Word16 t0_max;
-    Word16 max;
-    Word16 lag;
-    Word16 frac;
-    Word16 tmp_lag;
-    Word16 *corr;
-    Word16 corr_v[40];    /* Total length = t0_max-t0_min+1+2*L_INTER_SRCH */
-
-    Word16 max_frac_lag;
-    Word16 flag3;
-    Word16 flag4;
-    Word16 last_frac;
-    Word16 delta_int_low;
-    Word16 delta_int_range;
-    Word16 delta_frc_low;
-    Word16 delta_frc_range;
-    Word16 pit_min;
-    Word16 frame_offset;
-    Word16 delta_search;
-
-    /*-----------------------------------------------------------------------*
-     *                      set mode specific variables                      *
-     *-----------------------------------------------------------------------*/
-
-    max_frac_lag    = mode_dep_parm[mode].max_frac_lag;
-    flag3           = mode_dep_parm[mode].flag3;
-    frac            = mode_dep_parm[mode].first_frac;
-    last_frac       = mode_dep_parm[mode].last_frac;
-    delta_int_low   = mode_dep_parm[mode].delta_int_low;
-    delta_int_range = mode_dep_parm[mode].delta_int_range;
-
-    delta_frc_low   = mode_dep_parm[mode].delta_frc_low;
-    delta_frc_range = mode_dep_parm[mode].delta_frc_range;
-    pit_min         = mode_dep_parm[mode].pit_min;
-
-    /*-----------------------------------------------------------------------*
-     *                 decide upon full or differential search               *
-     *-----------------------------------------------------------------------*/
-
-    delta_search = 1;
-
-    if ((i_subfr == 0) || (i_subfr == L_FRAME_BY2))
-    {
-
-        /* Subframe 1 and 3 */
-
-        if (((mode != MR475) && (mode != MR515)) || (i_subfr != L_FRAME_BY2))
-        {
-
-            /* set t0_min, t0_max for full search */
-            /* this is *not* done for mode MR475, MR515 in subframe 3 */
-
-            delta_search = 0; /* no differential search */
-
-            /* calculate index into T_op which contains the open-loop */
-            /* pitch estimations for the 2 big subframes */
-
-            frame_offset = 1;
-            if (i_subfr == 0)
-                frame_offset = 0;
-
-            /* get T_op from the corresponding half frame and */
-            /* set t0_min, t0_max */
-
-            getRange(T_op[frame_offset], delta_int_low, delta_int_range,
-                     pit_min, PIT_MAX, &t0_min, &t0_max, pOverflow);
-        }
-        else
-        {
-
-            /* mode MR475, MR515 and 3. Subframe: delta search as well */
-            getRange(st->T0_prev_subframe, delta_frc_low, delta_frc_range,
-                     pit_min, PIT_MAX, &t0_min, &t0_max, pOverflow);
-        }
-    }
-    else
-    {
-
-        /* for Subframe 2 and 4 */
-        /* get range around T0 of previous subframe for delta search */
-
-        getRange(st->T0_prev_subframe, delta_frc_low, delta_frc_range,
-                 pit_min, PIT_MAX, &t0_min, &t0_max, pOverflow);
-    }
-
-    /*-----------------------------------------------------------------------*
-     *           Find interval to compute normalized correlation             *
-     *-----------------------------------------------------------------------*/
-
-    t_min = sub(t0_min, L_INTER_SRCH, pOverflow);
-    t_max = add(t0_max, L_INTER_SRCH, pOverflow);
-
-    corr = &corr_v[-t_min];
-
-    /*-----------------------------------------------------------------------*
-     * Compute normalized correlation between target and filtered excitation *
-     *-----------------------------------------------------------------------*/
-
-    Norm_Corr(exc, xn, h, L_subfr, t_min, t_max, corr, pOverflow);
-
-    /*-----------------------------------------------------------------------*
-     *                           Find integer pitch                          *
-     *-----------------------------------------------------------------------*/
-
-    max = corr[t0_min];
-    lag = t0_min;
-
-    for (i = t0_min + 1; i <= t0_max; i++)
-    {
-        if (corr[i] >= max)
-        {
-            max = corr[i];
-            lag = i;
-        }
-    }
-
-    /*-----------------------------------------------------------------------*
-     *                        Find fractional pitch                          *
-     *-----------------------------------------------------------------------*/
-    if ((delta_search == 0) && (lag > max_frac_lag))
-    {
-
-        /* full search and integer pitch greater than max_frac_lag */
-        /* fractional search is not needed, set fractional to zero */
-
-        frac = 0;
-    }
-    else
-    {
-
-        /* if differential search AND mode MR475 OR MR515 OR MR59 OR MR67   */
-        /* then search fractional with 4 bits resolution           */
-
-        if ((delta_search != 0) &&
-                ((mode == MR475) || (mode == MR515) ||
-                 (mode == MR59) || (mode == MR67)))
-        {
-
-            /* modify frac or last_frac according to position of last */
-            /* integer pitch: either search around integer pitch, */
-            /* or only on left or right side */
-
-            tmp_lag = st->T0_prev_subframe;
-            if (sub(sub(tmp_lag, t0_min, pOverflow), 5, pOverflow) > 0)
-                tmp_lag = add(t0_min, 5, pOverflow);
-            if (sub(sub(t0_max, tmp_lag, pOverflow), 4, pOverflow) > 0)
-                tmp_lag = sub(t0_max, 4, pOverflow);
-
-            if ((lag == tmp_lag) || (lag == (tmp_lag - 1)))
-            {
-
-                /* normal search in fractions around T0 */
-
-                searchFrac(&lag, &frac, last_frac, corr, flag3, pOverflow);
-
-            }
-            else if (lag == (tmp_lag - 2))
-            {
-                /* limit search around T0 to the right side */
-                frac = 0;
-                searchFrac(&lag, &frac, last_frac, corr, flag3, pOverflow);
-            }
-            else if (lag == (tmp_lag + 1))
-            {
-                /* limit search around T0 to the left side */
-                last_frac = 0;
-                searchFrac(&lag, &frac, last_frac, corr, flag3, pOverflow);
-            }
-            else
-            {
-                /* no fractional search */
-                frac = 0;
-            }
-        }
-        else
-            /* test the fractions around T0 */
-            searchFrac(&lag, &frac, last_frac, corr, flag3, pOverflow);
-    }
-
-    /*-----------------------------------------------------------------------*
-     *                           encode pitch                                *
-     *-----------------------------------------------------------------------*/
-
-    if (flag3 != 0)
-    {
-        /* flag4 indicates encoding with 4 bit resolution;         */
-        /* this is needed for mode MR475, MR515 and MR59           */
-
-        flag4 = 0;
-        if ((mode == MR475) || (mode == MR515) ||
-                (mode == MR59) || (mode == MR67))
-        {
-            flag4 = 1;
-        }
-
-        /* encode with 1/3 subsample resolution */
-
-        *ana_index = Enc_lag3(lag, frac, st->T0_prev_subframe,
-                              t0_min, t0_max, delta_search, flag4, pOverflow);
-        /* function result */
-
-    }
-    else
-    {
-        /* encode with 1/6 subsample resolution */
-
-        *ana_index = Enc_lag6(lag, frac, t0_min, delta_search, pOverflow);
-        /* function result */
-    }
-
-    /*-----------------------------------------------------------------------*
-     *                          update state variables                       *
-     *-----------------------------------------------------------------------*/
-
-    st->T0_prev_subframe = lag;
-
-    /*-----------------------------------------------------------------------*
-     *                      update output variables                          *
-     *-----------------------------------------------------------------------*/
-
-    *resu3    = flag3;
-
-    *pit_frac = frac;
-
-    return (lag);
-}
-
diff --git a/media/libstagefright/codecs/amrnb/enc/src/qgain475.cpp b/media/libstagefright/codecs/amrnb/enc/src/qgain475.cpp
deleted file mode 100644
index f8da589..0000000
--- a/media/libstagefright/codecs/amrnb/enc/src/qgain475.cpp
+++ /dev/null
@@ -1,1445 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-------------------------------------------------------------------------------
-
-
-
- Pathname: ./audio/gsm-amr/c/src/qgain475.c
- Funtions: MR475_quant_store_results
-           MR475_update_unq_pred
-           MR475_gain_quant
-
-------------------------------------------------------------------------------
- MODULE DESCRIPTION
-
- These modules handle the quantization of pitch and codebook gains for MR475.
-
-------------------------------------------------------------------------------
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include "qgain475.h"
-#include "typedef.h"
-#include "basic_op.h"
-#include "mode.h"
-#include "cnst.h"
-#include "pow2.h"
-#include "log2.h"
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-#define MR475_VQ_SIZE 256
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL VARIABLE DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/* The table contains the following data:
- *
- *    g_pitch(0)        (Q14) // for sub-
- *    g_fac(0)          (Q12) // frame 0 and 2
- *    g_pitch(1)        (Q14) // for sub-
- *    g_fac(2)          (Q12) // frame 1 and 3
- *
- */
-static const Word16 table_gain_MR475[MR475_VQ_SIZE*4] =
-{
-    /*g_pit(0), g_fac(0),      g_pit(1), g_fac(1) */
-    812,          128,           542,      140,
-    2873,         1135,          2266,     3402,
-    2067,          563,         12677,      647,
-    4132,         1798,          5601,     5285,
-    7689,          374,          3735,      441,
-    10912,         2638,         11807,     2494,
-    20490,          797,          5218,      675,
-    6724,         8354,          5282,     1696,
-    1488,          428,          5882,      452,
-    5332,         4072,          3583,     1268,
-    2469,          901,         15894,     1005,
-    14982,         3271,         10331,     4858,
-    3635,         2021,          2596,      835,
-    12360,         4892,         12206,     1704,
-    13432,         1604,          9118,     2341,
-    3968,         1538,          5479,     9936,
-    3795,          417,          1359,      414,
-    3640,         1569,          7995,     3541,
-    11405,          645,          8552,      635,
-    4056,         1377,         16608,     6124,
-    11420,          700,          2007,      607,
-    12415,         1578,         11119,     4654,
-    13680,         1708,         11990,     1229,
-    7996,         7297,         13231,     5715,
-    2428,         1159,          2073,     1941,
-    6218,         6121,          3546,     1804,
-    8925,         1802,          8679,     1580,
-    13935,         3576,         13313,     6237,
-    6142,         1130,          5994,     1734,
-    14141,         4662,         11271,     3321,
-    12226,         1551,         13931,     3015,
-    5081,        10464,          9444,     6706,
-    1689,          683,          1436,     1306,
-    7212,         3933,          4082,     2713,
-    7793,          704,         15070,      802,
-    6299,         5212,          4337,     5357,
-    6676,          541,          6062,      626,
-    13651,         3700,         11498,     2408,
-    16156,          716,         12177,      751,
-    8065,        11489,          6314,     2256,
-    4466,          496,          7293,      523,
-    10213,         3833,          8394,     3037,
-    8403,          966,         14228,     1880,
-    8703,         5409,         16395,     4863,
-    7420,         1979,          6089,     1230,
-    9371,         4398,         14558,     3363,
-    13559,         2873,         13163,     1465,
-    5534,         1678,         13138,    14771,
-    7338,          600,          1318,      548,
-    4252,         3539,         10044,     2364,
-    10587,          622,         13088,      669,
-    14126,         3526,          5039,     9784,
-    15338,          619,          3115,      590,
-    16442,         3013,         15542,     4168,
-    15537,         1611,         15405,     1228,
-    16023,         9299,          7534,     4976,
-    1990,         1213,         11447,     1157,
-    12512,         5519,          9475,     2644,
-    7716,         2034,         13280,     2239,
-    16011,         5093,          8066,     6761,
-    10083,         1413,          5002,     2347,
-    12523,         5975,         15126,     2899,
-    18264,         2289,         15827,     2527,
-    16265,        10254,         14651,    11319,
-    1797,          337,          3115,      397,
-    3510,         2928,          4592,     2670,
-    7519,          628,         11415,      656,
-    5946,         2435,          6544,     7367,
-    8238,          829,          4000,      863,
-    10032,         2492,         16057,     3551,
-    18204,         1054,          6103,     1454,
-    5884,         7900,         18752,     3468,
-    1864,          544,          9198,      683,
-    11623,         4160,          4594,     1644,
-    3158,         1157,         15953,     2560,
-    12349,         3733,         17420,     5260,
-    6106,         2004,          2917,     1742,
-    16467,         5257,         16787,     1680,
-    17205,         1759,          4773,     3231,
-    7386,         6035,         14342,    10012,
-    4035,          442,          4194,      458,
-    9214,         2242,          7427,     4217,
-    12860,          801,         11186,      825,
-    12648,         2084,         12956,     6554,
-    9505,          996,          6629,      985,
-    10537,         2502,         15289,     5006,
-    12602,         2055,         15484,     1653,
-    16194,         6921,         14231,     5790,
-    2626,          828,          5615,     1686,
-    13663,         5778,          3668,     1554,
-    11313,         2633,          9770,     1459,
-    14003,         4733,         15897,     6291,
-    6278,         1870,          7910,     2285,
-    16978,         4571,         16576,     3849,
-    15248,         2311,         16023,     3244,
-    14459,        17808,         11847,     2763,
-    1981,         1407,          1400,      876,
-    4335,         3547,          4391,     4210,
-    5405,          680,         17461,      781,
-    6501,         5118,          8091,     7677,
-    7355,          794,          8333,     1182,
-    15041,         3160,         14928,     3039,
-    20421,          880,         14545,      852,
-    12337,        14708,          6904,     1920,
-    4225,          933,          8218,     1087,
-    10659,         4084,         10082,     4533,
-    2735,          840,         20657,     1081,
-    16711,         5966,         15873,     4578,
-    10871,         2574,          3773,     1166,
-    14519,         4044,         20699,     2627,
-    15219,         2734,         15274,     2186,
-    6257,         3226,         13125,    19480,
-    7196,          930,          2462,     1618,
-    4515,         3092,         13852,     4277,
-    10460,          833,         17339,      810,
-    16891,         2289,         15546,     8217,
-    13603,         1684,          3197,     1834,
-    15948,         2820,         15812,     5327,
-    17006,         2438,         16788,     1326,
-    15671,         8156,         11726,     8556,
-    3762,         2053,          9563,     1317,
-    13561,         6790,         12227,     1936,
-    8180,         3550,         13287,     1778,
-    16299,         6599,         16291,     7758,
-    8521,         2551,          7225,     2645,
-    18269,         7489,         16885,     2248,
-    17882,         2884,         17265,     3328,
-    9417,        20162,         11042,     8320,
-    1286,          620,          1431,      583,
-    5993,         2289,          3978,     3626,
-    5144,          752,         13409,      830,
-    5553,         2860,         11764,     5908,
-    10737,          560,          5446,      564,
-    13321,         3008,         11946,     3683,
-    19887,          798,          9825,      728,
-    13663,         8748,          7391,     3053,
-    2515,          778,          6050,      833,
-    6469,         5074,          8305,     2463,
-    6141,         1865,         15308,     1262,
-    14408,         4547,         13663,     4515,
-    3137,         2983,          2479,     1259,
-    15088,         4647,         15382,     2607,
-    14492,         2392,         12462,     2537,
-    7539,         2949,         12909,    12060,
-    5468,          684,          3141,      722,
-    5081,         1274,         12732,     4200,
-    15302,          681,          7819,      592,
-    6534,         2021,         16478,     8737,
-    13364,          882,          5397,      899,
-    14656,         2178,         14741,     4227,
-    14270,         1298,         13929,     2029,
-    15477,         7482,         15815,     4572,
-    2521,         2013,          5062,     1804,
-    5159,         6582,          7130,     3597,
-    10920,         1611,         11729,     1708,
-    16903,         3455,         16268,     6640,
-    9306,         1007,          9369,     2106,
-    19182,         5037,         12441,     4269,
-    15919,         1332,         15357,     3512,
-    11898,        14141,         16101,     6854,
-    2010,          737,          3779,      861,
-    11454,         2880,          3564,     3540,
-    9057,         1241,         12391,      896,
-    8546,         4629,         11561,     5776,
-    8129,          589,          8218,      588,
-    18728,         3755,         12973,     3149,
-    15729,          758,         16634,      754,
-    15222,        11138,         15871,     2208,
-    4673,          610,         10218,      678,
-    15257,         4146,          5729,     3327,
-    8377,         1670,         19862,     2321,
-    15450,         5511,         14054,     5481,
-    5728,         2888,          7580,     1346,
-    14384,         5325,         16236,     3950,
-    15118,         3744,         15306,     1435,
-    14597,         4070,         12301,    15696,
-    7617,         1699,          2170,      884,
-    4459,         4567,         18094,     3306,
-    12742,          815,         14926,      907,
-    15016,         4281,         15518,     8368,
-    17994,         1087,          2358,      865,
-    16281,         3787,         15679,     4596,
-    16356,         1534,         16584,     2210,
-    16833,         9697,         15929,     4513,
-    3277,         1085,          9643,     2187,
-    11973,         6068,          9199,     4462,
-    8955,         1629,         10289,     3062,
-    16481,         5155,         15466,     7066,
-    13678,         2543,          5273,     2277,
-    16746,         6213,         16655,     3408,
-    20304,         3363,         18688,     1985,
-    14172,        12867,         15154,    15703,
-    4473,         1020,          1681,      886,
-    4311,         4301,          8952,     3657,
-    5893,         1147,         11647,     1452,
-    15886,         2227,          4582,     6644,
-    6929,         1205,          6220,      799,
-    12415,         3409,         15968,     3877,
-    19859,         2109,          9689,     2141,
-    14742,         8830,         14480,     2599,
-    1817,         1238,          7771,      813,
-    19079,         4410,          5554,     2064,
-    3687,         2844,         17435,     2256,
-    16697,         4486,         16199,     5388,
-    8028,         2763,          3405,     2119,
-    17426,         5477,         13698,     2786,
-    19879,         2720,          9098,     3880,
-    18172,         4833,         17336,    12207,
-    5116,          996,          4935,      988,
-    9888,         3081,          6014,     5371,
-    15881,         1667,          8405,     1183,
-    15087,         2366,         19777,     7002,
-    11963,         1562,          7279,     1128,
-    16859,         1532,         15762,     5381,
-    14708,         2065,         20105,     2155,
-    17158,         8245,         17911,     6318,
-    5467,         1504,          4100,     2574,
-    17421,         6810,          5673,     2888,
-    16636,         3382,          8975,     1831,
-    20159,         4737,         19550,     7294,
-    6658,         2781,         11472,     3321,
-    19397,         5054,         18878,     4722,
-    16439,         2373,         20430,     4386,
-    11353,        26526,         11593,     3068,
-    2866,         1566,          5108,     1070,
-    9614,         4915,          4939,     3536,
-    7541,          878,         20717,      851,
-    6938,         4395,         16799,     7733,
-    10137,         1019,          9845,      964,
-    15494,         3955,         15459,     3430,
-    18863,          982,         20120,      963,
-    16876,        12887,         14334,     4200,
-    6599,         1220,          9222,      814,
-    16942,         5134,          5661,     4898,
-    5488,         1798,         20258,     3962,
-    17005,         6178,         17929,     5929,
-    9365,         3420,          7474,     1971,
-    19537,         5177,         19003,     3006,
-    16454,         3788,         16070,     2367,
-    8664,         2743,          9445,    26358,
-    10856,         1287,          3555,     1009,
-    5606,         3622,         19453,     5512,
-    12453,          797,         20634,      911,
-    15427,         3066,         17037,    10275,
-    18883,         2633,          3913,     1268,
-    19519,         3371,         18052,     5230,
-    19291,         1678,         19508,     3172,
-    18072,        10754,         16625,     6845,
-    3134,         2298,         10869,     2437,
-    15580,         6913,         12597,     3381,
-    11116,         3297,         16762,     2424,
-    18853,         6715,         17171,     9887,
-    12743,         2605,          8937,     3140,
-    19033,         7764,         18347,     3880,
-    20475,         3682,         19602,     3380,
-    13044,        19373,         10526,    23124
-};
-
-/*
-------------------------------------------------------------------------------
- FUNCTION NAME: MR475_quant_store_results
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    pred_st = pointer to structure of type gc_predState
-    p = pointer to selected quantizer table entry (const Word16)
-    gcode0 = predicted CB gain (Word16)
-    exp_gcode0 = exponent of predicted CB gain (Word16)
-    gain_pit = pointer to Pitch gain (Word16)
-    gain_cod = pointer to Code gain (Word16)
-
- Outputs:
-    pred_st points to the updated structure of type gc_predState
-    gain_pit points to Pitch gain
-    gain_cod points to Code gain
-    pOverflow points to overflow indicator (Flag)
-
- Returns:
-    None.
-
- Global Variables Used:
-    None.
-
- Local Variables Needed:
-    None.
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- This function calculates the final fixed codebook gain and the predictor
- update values, and updates the gain predictor.
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
- None.
-
-------------------------------------------------------------------------------
- REFERENCES
-
- qgain475.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-static void MR475_quant_store_results(
-
-    gc_predState *pred_st, // i/o: gain predictor state struct
-    const Word16 *p,       // i  : pointer to selected quantizer table entry
-    Word16 gcode0,         // i  : predicted CB gain,     Q(14 - exp_gcode0)
-    Word16 exp_gcode0,     // i  : exponent of predicted CB gain,        Q0
-    Word16 *gain_pit,      // o  : Pitch gain,                           Q14
-    Word16 *gain_cod       // o  : Code gain,                            Q1
-)
-{
-
-    Word16 g_code, exp, frac, tmp;
-    Word32 L_tmp;
-
-    Word16 qua_ener_MR122; // o  : quantized energy error, MR122 version Q10
-    Word16 qua_ener;       // o  : quantized energy error,               Q10
-
-    // Read the quantized gains
-    *gain_pit = *p++;
-    g_code = *p++;
-
-    //------------------------------------------------------------------*
-     *  calculate final fixed codebook gain:                            *
-     *  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~                            *
-     *                                                                  *
-     *   gc = gc0 * g                                                   *
-     *------------------------------------------------------------------
-
-    L_tmp = L_mult(g_code, gcode0);
-    L_tmp = L_shr(L_tmp, sub(10, exp_gcode0));
-    *gain_cod = extract_h(L_tmp);
-
-    //------------------------------------------------------------------*
-     *  calculate predictor update values and update gain predictor:    *
-     *  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~    *
-     *                                                                  *
-     *   qua_ener       = log2(g)                                       *
-     *   qua_ener_MR122 = 20*log10(g)                                   *
-     *------------------------------------------------------------------
-
-    Log2 (L_deposit_l (g_code), &exp, &frac); // Log2(x Q12) = log2(x) + 12
-    exp = sub(exp, 12);
-
-    tmp = shr_r (frac, 5);
-    qua_ener_MR122 = add (tmp, shl (exp, 10));
-
-    L_tmp = Mpy_32_16(exp, frac, 24660); // 24660 Q12 ~= 6.0206 = 20*log10(2)
-    qua_ener = pv_round (L_shl (L_tmp, 13)); // Q12 * Q0 = Q13 -> Q10
-
-    gc_pred_update(pred_st, qua_ener_MR122, qua_ener);
-}
-
-------------------------------------------------------------------------------
- RESOURCES USED [optional]
-
- When the code is written for a specific target processor the
- the resources used should be documented below.
-
- HEAP MEMORY USED: x bytes
-
- STACK MEMORY USED: x bytes
-
- CLOCK CYCLES: (cycle count equation for this function) + (variable
-                used to represent cycle count for each subroutine
-                called)
-     where: (cycle count variable) = cycle count for [subroutine
-                                     name]
-
-------------------------------------------------------------------------------
- CAUTION [optional]
- [State any special notes, constraints or cautions for users of this function]
-
-------------------------------------------------------------------------------
-*/
-
-static void MR475_quant_store_results(
-    gc_predState *pred_st, /* i/o: gain predictor state struct               */
-    const Word16 *p,       /* i  : pointer to selected quantizer table entry */
-    Word16 gcode0,         /* i  : predicted CB gain,     Q(14 - exp_gcode0) */
-    Word16 exp_gcode0,     /* i  : exponent of predicted CB gain,        Q0  */
-    Word16 *gain_pit,      /* o  : Pitch gain,                           Q14 */
-    Word16 *gain_cod,      /* o  : Code gain,                            Q1  */
-    Flag   *pOverflow      /* o  : overflow indicator                        */
-)
-{
-    Word16 g_code;
-    Word16 exp;
-    Word16 frac;
-    Word16 tmp;
-    Word32 L_tmp;
-
-    Word16 qua_ener_MR122; /* o  : quantized energy error, MR122 version Q10 */
-    Word16 qua_ener;       /* o  : quantized energy error,               Q10 */
-
-
-    /* Read the quantized gains */
-    *gain_pit = *p++;
-    g_code = *p++;
-
-    /*------------------------------------------------------------------*
-     *  calculate final fixed codebook gain:                            *
-     *  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~                            *
-     *                                                                  *
-     *   gc = gc0 * g                                                   *
-     *------------------------------------------------------------------*/
-
-    L_tmp = ((Word32) g_code * gcode0) << 1;
-    tmp   = 10 - exp_gcode0;
-    L_tmp = L_shr(L_tmp, tmp, pOverflow);
-    *gain_cod = (Word16)(L_tmp >> 16);
-
-    /*------------------------------------------------------------------*
-     *  calculate predictor update values and update gain predictor:    *
-     *  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~    *
-     *                                                                  *
-     *   qua_ener       = log2(g)                                       *
-     *   qua_ener_MR122 = 20*log10(g)                                   *
-     *------------------------------------------------------------------*/
-
-    /* Log2(x Q12) = log2(x) + 12 */
-    Log2((Word32) g_code, &exp, &frac, pOverflow);
-    exp -= 12;
-
-    tmp = shr_r(frac, 5, pOverflow);
-    qua_ener_MR122 = exp << 10;
-    qua_ener_MR122 = tmp + qua_ener_MR122;
-
-    /* 24660 Q12 ~= 6.0206 = 20*log10(2) */
-    L_tmp = Mpy_32_16(exp, frac, 24660, pOverflow);
-    L_tmp = L_tmp << 13;
-
-    /* Q12 * Q0 = Q13 -> Q10 */
-    qua_ener = (Word16)((L_tmp + (Word32) 0x00008000L) >> 16);
-
-    gc_pred_update(pred_st, qua_ener_MR122, qua_ener);
-
-    return;
-}
-
-/****************************************************************************/
-
-
-/*
-------------------------------------------------------------------------------
- FUNCTION NAME: MR475_update_unq_pred
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    pred_st = pointer to structure of type gc_predState
-    exp_gcode0 = predicted CB gain (exponent MSW) (Word16)
-    frac_gcode0 = predicted CB gain (exponent LSW) (Word16)
-    cod_gain_exp = optimum codebook gain (exponent)(Word16)
-    cod_gain_frac = optimum codebook gain (fraction) (Word16)
-
- Outputs:
-    pred_st points to the updated structure of type gc_predState
-    pOverflow points to overflow indicator (Flag)
-
- Returns:
-    None.
-
- Global Variables Used:
-    None.
-
- Local Variables Needed:
-    None.
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- This module uses the optimum codebook gain and updates the "unquantized"
- gain predictor with the (bounded) prediction error.
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
- None.
-
-------------------------------------------------------------------------------
- REFERENCES
-
- qgain475.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-void
-MR475_update_unq_pred(
-    gc_predState *pred_st, // i/o: gain predictor state struct
-    Word16 exp_gcode0,     // i  : predicted CB gain (exponent MSW),  Q0
-    Word16 frac_gcode0,    // i  : predicted CB gain (exponent LSW),  Q15
-    Word16 cod_gain_exp,   // i  : optimum codebook gain (exponent),  Q0
-    Word16 cod_gain_frac   // i  : optimum codebook gain (fraction),  Q15
-)
-{
-    Word16 tmp, exp, frac;
-    Word16 qua_ener, qua_ener_MR122;
-    Word32 L_tmp;
-
-    // calculate prediction error factor (given optimum CB gain gcu):
-    //   predErrFact = gcu / gcode0
-    //   (limit to MIN_PRED_ERR_FACT <= predErrFact <= MAX_PRED_ERR_FACT
-    //    -> limit qua_ener*)
-    //
-    // calculate prediction error (log):
-    //
-    //   qua_ener_MR122 = log2(predErrFact)
-    //   qua_ener       = 20*log10(predErrFact)
-
-    if (cod_gain_frac <= 0)
-    {
-        // if gcu <= 0 -> predErrFact = 0 < MIN_PRED_ERR_FACT
-        // -> set qua_ener(_MR122) directly
-        qua_ener = MIN_QUA_ENER;
-        qua_ener_MR122 = MIN_QUA_ENER_MR122;
-    }
-    else
-    {
-        // convert gcode0 from DPF to standard fraction/exponent format
-        // with normalized frac, i.e. 16384 <= frac <= 32767
-        // Note: exponent correction (exp=exp-14) is done after div_s
-        frac_gcode0 = extract_l (Pow2 (14, frac_gcode0));
-
-        // make sure cod_gain_frac < frac_gcode0  for div_s
-        if (sub(cod_gain_frac, frac_gcode0) >= 0)
-        {
-            cod_gain_frac = shr (cod_gain_frac, 1);
-            cod_gain_exp = add (cod_gain_exp, 1);
-        }
-
-        // predErrFact
-        //   = gcu / gcode0
-        //   = cod_gain_frac/frac_gcode0 * 2^(cod_gain_exp-(exp_gcode0-14))
-        //   = div_s (c_g_f, frac_gcode0)*2^-15 * 2^(c_g_e-exp_gcode0+14)
-        //   = div_s * 2^(cod_gain_exp-exp_gcode0 - 1)
-
-        frac = div_s (cod_gain_frac, frac_gcode0);
-        tmp = sub (sub (cod_gain_exp, exp_gcode0), 1);
-
-        Log2 (L_deposit_l (frac), &exp, &frac);
-        exp = add (exp, tmp);
-
-        // calculate prediction error (log2, Q10)
-        qua_ener_MR122 = shr_r (frac, 5);
-        qua_ener_MR122 = add (qua_ener_MR122, shl (exp, 10));
-
-        if (sub(qua_ener_MR122, MIN_QUA_ENER_MR122) < 0)
-        {
-            qua_ener = MIN_QUA_ENER;
-            qua_ener_MR122 = MIN_QUA_ENER_MR122;
-        }
-        else if (sub(qua_ener_MR122, MAX_QUA_ENER_MR122) > 0)
-        {
-            qua_ener = MAX_QUA_ENER;
-            qua_ener_MR122 = MAX_QUA_ENER_MR122;
-        }
-        else
-        {
-            // calculate prediction error (20*log10, Q10)
-            L_tmp = Mpy_32_16(exp, frac, 24660);
-            // 24660 Q12 ~= 6.0206 = 20*log10(2)
-            qua_ener = pv_round (L_shl (L_tmp, 13));
-            // Q12 * Q0 = Q13 -> Q26 -> Q10
-        }
-    }
-
-    // update MA predictor memory
-    gc_pred_update(pred_st, qua_ener_MR122, qua_ener);
-}
-
-------------------------------------------------------------------------------
- RESOURCES USED [optional]
-
- When the code is written for a specific target processor the
- the resources used should be documented below.
-
- HEAP MEMORY USED: x bytes
-
- STACK MEMORY USED: x bytes
-
- CLOCK CYCLES: (cycle count equation for this function) + (variable
-                used to represent cycle count for each subroutine
-                called)
-     where: (cycle count variable) = cycle count for [subroutine
-                                     name]
-
-------------------------------------------------------------------------------
- CAUTION [optional]
- [State any special notes, constraints or cautions for users of this function]
-
-------------------------------------------------------------------------------
-*/
-
-void MR475_update_unq_pred(
-    gc_predState *pred_st, /* i/o: gain predictor state struct            */
-    Word16 exp_gcode0,     /* i  : predicted CB gain (exponent MSW),  Q0  */
-    Word16 frac_gcode0,    /* i  : predicted CB gain (exponent LSW),  Q15 */
-    Word16 cod_gain_exp,   /* i  : optimum codebook gain (exponent),  Q0  */
-    Word16 cod_gain_frac,  /* i  : optimum codebook gain (fraction),  Q15 */
-    Flag   *pOverflow      /* o  : overflow indicator                     */
-)
-{
-    Word16 tmp;
-    Word16 exp;
-    Word16 frac;
-    Word16 qua_ener;
-    Word16 qua_ener_MR122;
-    Word32 L_tmp;
-
-    /* calculate prediction error factor (given optimum CB gain gcu):
-     *
-     *   predErrFact = gcu / gcode0
-     *   (limit to MIN_PRED_ERR_FACT <= predErrFact <= MAX_PRED_ERR_FACT
-     *    -> limit qua_ener*)
-     *
-     * calculate prediction error (log):
-     *
-     *   qua_ener_MR122 = log2(predErrFact)
-     *   qua_ener       = 20*log10(predErrFact)
-     *
-     */
-
-    if (cod_gain_frac <= 0)
-    {
-        /* if gcu <= 0 -> predErrFact = 0 < MIN_PRED_ERR_FACT */
-        /* -> set qua_ener(_MR122) directly                   */
-        qua_ener = MIN_QUA_ENER;
-        qua_ener_MR122 = MIN_QUA_ENER_MR122;
-    }
-    else
-    {
-        /* convert gcode0 from DPF to standard fraction/exponent format */
-        /* with normalized frac, i.e. 16384 <= frac <= 32767            */
-        /* Note: exponent correction (exp=exp-14) is done after div_s   */
-        frac_gcode0 = (Word16)(Pow2(14, frac_gcode0, pOverflow));
-
-        /* make sure cod_gain_frac < frac_gcode0  for div_s */
-        if (cod_gain_frac >= frac_gcode0)
-        {
-            cod_gain_frac >>= 1;
-            cod_gain_exp += 1;
-        }
-
-        /*
-          predErrFact
-             = gcu / gcode0
-             = cod_gain_frac/frac_gcode0 * 2^(cod_gain_exp-(exp_gcode0-14))
-             = div_s (c_g_f, frac_gcode0)*2^-15 * 2^(c_g_e-exp_gcode0+14)
-             = div_s * 2^(cod_gain_exp-exp_gcode0 - 1)
-        */
-        frac = div_s(cod_gain_frac, frac_gcode0);
-        tmp = cod_gain_exp - exp_gcode0;
-        tmp -= 1;
-
-        Log2((Word32) frac, &exp, &frac, pOverflow);
-        exp += tmp;
-
-        /* calculate prediction error (log2, Q10) */
-        qua_ener_MR122 = shr_r(frac, 5, pOverflow);
-        tmp = exp << 10;
-        qua_ener_MR122 += tmp;
-
-        if (qua_ener_MR122 > MAX_QUA_ENER_MR122)
-        {
-            qua_ener = MAX_QUA_ENER;
-            qua_ener_MR122 = MAX_QUA_ENER_MR122;
-        }
-        else
-        {
-            /* calculate prediction error (20*log10, Q10) */
-            L_tmp = Mpy_32_16(exp, frac, 24660, pOverflow);
-            /* 24660 Q12 ~= 6.0206 = 20*log10(2) */
-            L_tmp =  L_shl(L_tmp, 13, pOverflow);
-            qua_ener = pv_round(L_tmp, pOverflow);
-
-            /* Q12 * Q0 = Q13 -> Q26 -> Q10     */
-        }
-    }
-
-    /* update MA predictor memory */
-    gc_pred_update(pred_st, qua_ener_MR122, qua_ener);
-
-
-    return;
-}
-
-/****************************************************************************/
-
-
-/*
-------------------------------------------------------------------------------
- FUNCTION NAME: MR475_gain_quant
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    pred_st = pointer to structure of type gc_predState
-    sf0_exp_gcode0 = predicted CB gain (exponent) (Word16)
-    f0_frac_gcode0 = predicted CB gain (fraction) (Word16)
-    sf0_exp_coeff = energy coeff. (exponent part) (Word16)
-    sf0_frac_coeff = energy coeff. ((fraction part) (Word16)
-    sf0_exp_target_en = exponent of target energy (Word16)
-    sf0_frac_target_en = fraction of target energy (Word16)
-    sf1_code_nosharp = innovative codebook vector  (Word16)
-    sf1_exp_gcode0 = predicted CB gain (exponent) (Word16)
-    sf1_frac_gcode0 = predicted CB gain (fraction) (Word16)
-    sf1_exp_coeff = energy coeff. (exponent part) (Word16)
-    sf1_frac_coeff = energy coeff. (fraction part) (Word16)
-    sf1_exp_target_en = exponent of target energy (Word16)
-    sf1_frac_target_en = fraction of target energy (Word16)
-    gp_limit = pitch gain limit (Word16)
-    sf0_gain_pit = pointer to Pitch gain (Word16)
-    sf0_gain_cod = pointer to Code gain (Word16)
-    sf1_gain_pit = pointer to Pitch gain (Word16)
-    sf1_gain_cod = pointer to Code gain (Word16)
-
- Outputs:
-    pred_st points to the updated structure of type gc_predState
-    sf0_gain_pit points to Pitch gain
-    sf0_gain_cod points to Code gain
-    sf1_gain_pit points to Pitch gain
-    sf1_gain_cod points to Code gain
-
- Returns:
-    index = index of quantization
-
- Global Variables Used:
-    None.
-
- Local Variables Needed:
-    None.
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- This module provides quantization of pitch and codebook gains for two
- subframes using the predicted codebook gain.
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
- None.
-
-------------------------------------------------------------------------------
- REFERENCES
-
- qgain475.c, UMTS GSM AMR speech codec, R99 - Version 3.2.0, March 2, 2001
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-Word16
-MR475_gain_quant(              // o  : index of quantization.
-    gc_predState *pred_st,     // i/o: gain predictor state struct
-
-                               // data from subframe 0 (or 2)
-    Word16 sf0_exp_gcode0,     // i  : predicted CB gain (exponent),      Q0
-    Word16 sf0_frac_gcode0,    // i  : predicted CB gain (fraction),      Q15
-    Word16 sf0_exp_coeff[],    // i  : energy coeff. (5), exponent part,  Q0
-    Word16 sf0_frac_coeff[],   // i  : energy coeff. (5), fraction part,  Q15
-                               //      (frac_coeff and exp_coeff computed in
-                               //       calc_filt_energies())
-    Word16 sf0_exp_target_en,  // i  : exponent of target energy,         Q0
-    Word16 sf0_frac_target_en, // i  : fraction of target energy,         Q15
-
-                               // data from subframe 1 (or 3)
-    Word16 sf1_code_nosharp[], // i  : innovative codebook vector (L_SUBFR)
-                               //      (whithout pitch sharpening)
-    Word16 sf1_exp_gcode0,     // i  : predicted CB gain (exponent),      Q0
-    Word16 sf1_frac_gcode0,    // i  : predicted CB gain (fraction),      Q15
-    Word16 sf1_exp_coeff[],    // i  : energy coeff. (5), exponent part,  Q0
-    Word16 sf1_frac_coeff[],   // i  : energy coeff. (5), fraction part,  Q15
-                               //      (frac_coeff and exp_coeff computed in
-                               //       calc_filt_energies())
-    Word16 sf1_exp_target_en,  // i  : exponent of target energy,         Q0
-    Word16 sf1_frac_target_en, // i  : fraction of target energy,         Q15
-
-    Word16 gp_limit,           // i  : pitch gain limit
-
-    Word16 *sf0_gain_pit,      // o  : Pitch gain,                        Q14
-    Word16 *sf0_gain_cod,      // o  : Code gain,                         Q1
-
-    Word16 *sf1_gain_pit,      // o  : Pitch gain,                        Q14
-    Word16 *sf1_gain_cod       // o  : Code gain,                         Q1
-)
-{
-    const Word16 *p;
-    Word16 i, index = 0;
-    Word16 tmp;
-    Word16 exp;
-    Word16 sf0_gcode0, sf1_gcode0;
-    Word16 g_pitch, g2_pitch, g_code, g2_code, g_pit_cod;
-    Word16 coeff[10], coeff_lo[10], exp_max[10];  // 0..4: sf0; 5..9: sf1
-    Word32 L_tmp, dist_min;
-
-     *-------------------------------------------------------------------*
-     *  predicted codebook gain                                          *
-     *  ~~~~~~~~~~~~~~~~~~~~~~~                                          *
-     *  gc0     = 2^exp_gcode0 + 2^frac_gcode0                           *
-     *                                                                   *
-     *  gcode0 (Q14) = 2^14*2^frac_gcode0 = gc0 * 2^(14-exp_gcode0)      *
-     *-------------------------------------------------------------------*
-
-    sf0_gcode0 = extract_l(Pow2(14, sf0_frac_gcode0));
-    sf1_gcode0 = extract_l(Pow2(14, sf1_frac_gcode0));
-
-     * For each subframe, the error energy (sum) to be minimized consists
-     * of five terms, t[0..4].
-     *
-     *                      t[0] =    gp^2  * <y1 y1>
-     *                      t[1] = -2*gp    * <xn y1>
-     *                      t[2] =    gc^2  * <y2 y2>
-     *                      t[3] = -2*gc    * <xn y2>
-     *                      t[4] =  2*gp*gc * <y1 y2>
-     *
-
-    // sf 0
-    // determine the scaling exponent for g_code: ec = ec0 - 11
-    exp = sub(sf0_exp_gcode0, 11);
-
-    // calculate exp_max[i] = s[i]-1
-    exp_max[0] = sub(sf0_exp_coeff[0], 13);
-    exp_max[1] = sub(sf0_exp_coeff[1], 14);
-    exp_max[2] = add(sf0_exp_coeff[2], add(15, shl(exp, 1)));
-    exp_max[3] = add(sf0_exp_coeff[3], exp);
-    exp_max[4] = add(sf0_exp_coeff[4], add(1, exp));
-
-    // sf 1
-    // determine the scaling exponent for g_code: ec = ec0 - 11
-    exp = sub(sf1_exp_gcode0, 11);
-
-    // calculate exp_max[i] = s[i]-1
-    exp_max[5] = sub(sf1_exp_coeff[0], 13);
-    exp_max[6] = sub(sf1_exp_coeff[1], 14);
-    exp_max[7] = add(sf1_exp_coeff[2], add(15, shl(exp, 1)));
-    exp_max[8] = add(sf1_exp_coeff[3], exp);
-    exp_max[9] = add(sf1_exp_coeff[4], add(1, exp));
-
-     *-------------------------------------------------------------------*
-     *  Gain search equalisation:                                        *
-     *  ~~~~~~~~~~~~~~~~~~~~~~~~~                                        *
-     *  The MSE for the two subframes is weighted differently if there   *
-     *  is a big difference in the corresponding target energies         *
-     *-------------------------------------------------------------------*
-
-    // make the target energy exponents the same by de-normalizing the
-    // fraction of the smaller one. This is necessary to be able to compare
-    // them
-
-    exp = sf0_exp_target_en - sf1_exp_target_en;
-    if (exp > 0)
-    {
-        sf1_frac_target_en = shr (sf1_frac_target_en, exp);
-    }
-    else
-    {
-        sf0_frac_target_en = shl (sf0_frac_target_en, exp);
-    }
-
-    // assume no change of exponents
-    exp = 0;
-
-    // test for target energy difference; set exp to +1 or -1 to scale
-    // up/down coefficients for sf 1
-
-    tmp = shr_r (sf1_frac_target_en, 1);   // tmp = ceil(0.5*en(sf1))
-    if (sub (tmp, sf0_frac_target_en) > 0) // tmp > en(sf0)?
-    {
-        // target_energy(sf1) > 2*target_energy(sf0)
-        //   -> scale up MSE(sf0) by 2 by adding 1 to exponents 0..4
-        exp = 1;
-    }
-    else
-    {
-        tmp = shr (add (sf0_frac_target_en, 3), 2); // tmp=ceil(0.25*en(sf0))
-        if (sub (tmp, sf1_frac_target_en) > 0)      // tmp > en(sf1)?
-        {
-            // target_energy(sf1) < 0.25*target_energy(sf0)
-            //   -> scale down MSE(sf0) by 0.5 by subtracting 1 from
-            //      coefficients 0..4
-            exp = -1;
-        }
-    }
-
-    for (i = 0; i < 5; i++)
-    {
-        exp_max[i] = add (exp_max[i], exp);
-    }
-
-     *-------------------------------------------------------------------*
-     *  Find maximum exponent:                                           *
-     *  ~~~~~~~~~~~~~~~~~~~~~~                                           *
-     *                                                                   *
-     *  For the sum operation, all terms must have the same scaling;     *
-     *  that scaling should be low enough to prevent overflow. There-    *
-     *  fore, the maximum scale is determined and all coefficients are   *
-     *  re-scaled:                                                       *
-     *                                                                   *
-     *    exp = max(exp_max[i]) + 1;                                     *
-     *    e = exp_max[i]-exp;         e <= 0!                            *
-     *    c[i] = c[i]*2^e                                                *
-     *-------------------------------------------------------------------*
-
-    exp = exp_max[0];
-    for (i = 1; i < 10; i++)
-    {
-        if (sub(exp_max[i], exp) > 0)
-        {
-            exp = exp_max[i];
-        }
-    }
-    exp = add(exp, 1);      // To avoid overflow
-
-    p = &sf0_frac_coeff[0];
-    for (i = 0; i < 5; i++) {
-        tmp = sub(exp, exp_max[i]);
-        L_tmp = L_deposit_h(*p++);
-        L_tmp = L_shr(L_tmp, tmp);
-        L_Extract(L_tmp, &coeff[i], &coeff_lo[i]);
-    }
-    p = &sf1_frac_coeff[0];
-    for (; i < 10; i++) {
-        tmp = sub(exp, exp_max[i]);
-        L_tmp = L_deposit_h(*p++);
-        L_tmp = L_shr(L_tmp, tmp);
-        L_Extract(L_tmp, &coeff[i], &coeff_lo[i]);
-    }
-
-    //-------------------------------------------------------------------*
-     *  Codebook search:                                                 *
-     *  ~~~~~~~~~~~~~~~~                                                 *
-     *                                                                   *
-     *  For each pair (g_pitch, g_fac) in the table calculate the        *
-     *  terms t[0..4] and sum them up; the result is the mean squared    *
-     *  error for the quantized gains from the table. The index for the  *
-     *  minimum MSE is stored and finally used to retrieve the quantized *
-     *  gains                                                            *
-     *-------------------------------------------------------------------
-
-    // start with "infinite" MSE
-    dist_min = MAX_32;
-
-    p = &table_gain_MR475[0];
-
-    for (i = 0; i < MR475_VQ_SIZE; i++)
-    {
-        // subframe 0 (and 2) calculations
-        g_pitch = *p++;
-        g_code = *p++;
-
-        g_code = mult(g_code, sf0_gcode0);
-        g2_pitch = mult(g_pitch, g_pitch);
-        g2_code = mult(g_code, g_code);
-        g_pit_cod = mult(g_code, g_pitch);
-
-        L_tmp = Mpy_32_16(       coeff[0], coeff_lo[0], g2_pitch);
-        L_tmp = Mac_32_16(L_tmp, coeff[1], coeff_lo[1], g_pitch);
-        L_tmp = Mac_32_16(L_tmp, coeff[2], coeff_lo[2], g2_code);
-        L_tmp = Mac_32_16(L_tmp, coeff[3], coeff_lo[3], g_code);
-        L_tmp = Mac_32_16(L_tmp, coeff[4], coeff_lo[4], g_pit_cod);
-
-        tmp = sub (g_pitch, gp_limit);
-
-        // subframe 1 (and 3) calculations
-        g_pitch = *p++;
-        g_code = *p++;
-
-        if (tmp <= 0 && sub(g_pitch, gp_limit) <= 0)
-        {
-            g_code = mult(g_code, sf1_gcode0);
-            g2_pitch = mult(g_pitch, g_pitch);
-            g2_code = mult(g_code, g_code);
-            g_pit_cod = mult(g_code, g_pitch);
-
-            L_tmp = Mac_32_16(L_tmp, coeff[5], coeff_lo[5], g2_pitch);
-            L_tmp = Mac_32_16(L_tmp, coeff[6], coeff_lo[6], g_pitch);
-            L_tmp = Mac_32_16(L_tmp, coeff[7], coeff_lo[7], g2_code);
-            L_tmp = Mac_32_16(L_tmp, coeff[8], coeff_lo[8], g_code);
-            L_tmp = Mac_32_16(L_tmp, coeff[9], coeff_lo[9], g_pit_cod);
-
-            // store table index if MSE for this index is lower
-               than the minimum MSE seen so far
-            if (L_sub(L_tmp, dist_min) < (Word32) 0)
-            {
-                dist_min = L_tmp;
-                index = i;
-            }
-        }
-    }
-
-     *------------------------------------------------------------------*
-     *  read quantized gains and update MA predictor memories           *
-     *  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~           *
-     *------------------------------------------------------------------*
-
-    // for subframe 0, the pre-calculated gcode0/exp_gcode0 are the same
-    // as those calculated from the "real" predictor using quantized gains
-    tmp = shl(index, 2);
-    MR475_quant_store_results(pred_st,
-                              &table_gain_MR475[tmp],
-                              sf0_gcode0,
-                              sf0_exp_gcode0,
-                              sf0_gain_pit,
-                              sf0_gain_cod);
-
-    // calculate new predicted gain for subframe 1 (this time using
-    // the real, quantized gains)
-    gc_pred(pred_st, MR475, sf1_code_nosharp,
-            &sf1_exp_gcode0, &sf1_frac_gcode0,
-            &sf0_exp_gcode0, &sf0_gcode0); // last two args are dummy
-    sf1_gcode0 = extract_l(Pow2(14, sf1_frac_gcode0));
-
-    tmp = add (tmp, 2);
-    MR475_quant_store_results(pred_st,
-                              &table_gain_MR475[tmp],
-                              sf1_gcode0,
-                              sf1_exp_gcode0,
-                              sf1_gain_pit,
-                              sf1_gain_cod);
-
-    return index;
-}
-
-------------------------------------------------------------------------------
- RESOURCES USED [optional]
-
- When the code is written for a specific target processor the
- the resources used should be documented below.
-
- HEAP MEMORY USED: x bytes
-
- STACK MEMORY USED: x bytes
-
- CLOCK CYCLES: (cycle count equation for this function) + (variable
-                used to represent cycle count for each subroutine
-                called)
-     where: (cycle count variable) = cycle count for [subroutine
-                                     name]
-
-------------------------------------------------------------------------------
- CAUTION [optional]
- [State any special notes, constraints or cautions for users of this function]
-
-------------------------------------------------------------------------------
-*/
-
-Word16 MR475_gain_quant(       /* o  : index of quantization.                 */
-    gc_predState *pred_st,     /* i/o: gain predictor state struct            */
-
-    /* data from subframe 0 (or 2) */
-    Word16 sf0_exp_gcode0,     /* i  : predicted CB gain (exponent),      Q0  */
-    Word16 sf0_frac_gcode0,    /* i  : predicted CB gain (fraction),      Q15 */
-    Word16 sf0_exp_coeff[],    /* i  : energy coeff. (5), exponent part,  Q0  */
-    Word16 sf0_frac_coeff[],   /* i  : energy coeff. (5), fraction part,  Q15 */
-    /*      (frac_coeff and exp_coeff computed in  */
-    /*       calc_filt_energies())                 */
-    Word16 sf0_exp_target_en,  /* i  : exponent of target energy,         Q0  */
-    Word16 sf0_frac_target_en, /* i  : fraction of target energy,         Q15 */
-
-    /* data from subframe 1 (or 3) */
-    Word16 sf1_code_nosharp[], /* i  : innovative codebook vector (L_SUBFR)   */
-    /*      (whithout pitch sharpening)            */
-    Word16 sf1_exp_gcode0,     /* i  : predicted CB gain (exponent),      Q0  */
-    Word16 sf1_frac_gcode0,    /* i  : predicted CB gain (fraction),      Q15 */
-    Word16 sf1_exp_coeff[],    /* i  : energy coeff. (5), exponent part,  Q0  */
-    Word16 sf1_frac_coeff[],   /* i  : energy coeff. (5), fraction part,  Q15 */
-    /*      (frac_coeff and exp_coeff computed in  */
-    /*       calc_filt_energies())                 */
-    Word16 sf1_exp_target_en,  /* i  : exponent of target energy,         Q0  */
-    Word16 sf1_frac_target_en, /* i  : fraction of target energy,         Q15 */
-
-    Word16 gp_limit,           /* i  : pitch gain limit                       */
-
-    Word16 *sf0_gain_pit,      /* o  : Pitch gain,                        Q14 */
-    Word16 *sf0_gain_cod,      /* o  : Code gain,                         Q1  */
-
-    Word16 *sf1_gain_pit,      /* o  : Pitch gain,                        Q14 */
-    Word16 *sf1_gain_cod,      /* o  : Code gain,                         Q1  */
-    Flag   *pOverflow          /* o  : overflow indicator                     */
-)
-{
-    const Word16 *p;
-    Word16 i;
-    Word16 index = 0;
-    Word16 tmp;
-    Word16 exp;
-    Word16 sf0_gcode0;
-    Word16 sf1_gcode0;
-    Word16 g_pitch;
-    Word16 g2_pitch;
-    Word16 g_code;
-    Word16 g2_code;
-    Word16 g_pit_cod;
-    Word16 coeff[10];
-    Word16 coeff_lo[10];
-    Word16 exp_max[10];  /* 0..4: sf0; 5..9: sf1 */
-    Word32 L_tmp;
-    Word32 dist_min;
-
-    /*-------------------------------------------------------------------*
-     *  predicted codebook gain                                          *
-     *  ~~~~~~~~~~~~~~~~~~~~~~~                                          *
-     *  gc0     = 2^exp_gcode0 + 2^frac_gcode0                           *
-     *                                                                   *
-     *  gcode0 (Q14) = 2^14*2^frac_gcode0 = gc0 * 2^(14-exp_gcode0)      *
-     *-------------------------------------------------------------------*/
-
-    sf0_gcode0 = (Word16)(Pow2(14, sf0_frac_gcode0, pOverflow));
-    sf1_gcode0 = (Word16)(Pow2(14, sf1_frac_gcode0, pOverflow));
-
-    /*
-     * For each subframe, the error energy (sum) to be minimized consists
-     * of five terms, t[0..4].
-     *
-     *                      t[0] =    gp^2  * <y1 y1>
-     *                      t[1] = -2*gp    * <xn y1>
-     *                      t[2] =    gc^2  * <y2 y2>
-     *                      t[3] = -2*gc    * <xn y2>
-     *                      t[4] =  2*gp*gc * <y1 y2>
-     *
-     */
-
-    /* sf 0 */
-    /* determine the scaling exponent for g_code: ec = ec0 - 11 */
-    exp = sf0_exp_gcode0 - 11;
-
-    /* calculate exp_max[i] = s[i]-1 */
-    exp_max[0] = (sf0_exp_coeff[0] - 13);
-    exp_max[1] = (sf0_exp_coeff[1] - 14);
-    exp_max[2] = (sf0_exp_coeff[2] + (15 + (exp << 1)));
-    exp_max[3] = (sf0_exp_coeff[3] + exp);
-    exp_max[4] = (sf0_exp_coeff[4] + (1 + exp));
-
-    /* sf 1 */
-    /* determine the scaling exponent for g_code: ec = ec0 - 11 */
-    exp = sf1_exp_gcode0 - 11;
-
-    /* calculate exp_max[i] = s[i]-1 */
-    exp_max[5] = (sf1_exp_coeff[0] - 13);
-    exp_max[6] = (sf1_exp_coeff[1] - 14);
-    exp_max[7] = (sf1_exp_coeff[2] + (15 + (exp << 1)));
-    exp_max[8] = (sf1_exp_coeff[3] + exp);
-    exp_max[9] = (sf1_exp_coeff[4] + (1 + exp));
-
-    /*-------------------------------------------------------------------*
-     *  Gain search equalisation:                                        *
-     *  ~~~~~~~~~~~~~~~~~~~~~~~~~                                        *
-     *  The MSE for the two subframes is weighted differently if there   *
-     *  is a big difference in the corresponding target energies         *
-     *-------------------------------------------------------------------*/
-
-    /* make the target energy exponents the same by de-normalizing the
-       fraction of the smaller one. This is necessary to be able to compare
-       them
-     */
-    exp = sf0_exp_target_en - sf1_exp_target_en;
-    if (exp > 0)
-    {
-        sf1_frac_target_en >>= exp;
-    }
-    else
-    {
-        sf0_frac_target_en >>= (-exp);
-    }
-
-    /* assume no change of exponents */
-    exp = 0;
-
-    /* test for target energy difference; set exp to +1 or -1 to scale
-     * up/down coefficients for sf 1
-     */
-    tmp = shr_r(sf1_frac_target_en, 1, pOverflow);  /* tmp = ceil(0.5*en(sf1)) */
-
-    if (tmp > sf0_frac_target_en)          /* tmp > en(sf0)? */
-    {
-        /*
-         * target_energy(sf1) > 2*target_energy(sf0)
-         *   -> scale up MSE(sf0) by 2 by adding 1 to exponents 0..4
-         */
-        exp = 1;
-    }
-    else
-    {
-        tmp = ((sf0_frac_target_en + 3) >> 2); /* tmp=ceil(0.25*en(sf0)) */
-
-        if (tmp > sf1_frac_target_en)      /* tmp > en(sf1)? */
-        {
-            /*
-             * target_energy(sf1) < 0.25*target_energy(sf0)
-             *   -> scale down MSE(sf0) by 0.5 by subtracting 1 from
-             *      coefficients 0..4
-             */
-            exp = -1;
-        }
-    }
-
-    for (i = 0; i < 5; i++)
-    {
-        exp_max[i] += exp;
-    }
-
-    /*-------------------------------------------------------------------*
-     *  Find maximum exponent:                                           *
-     *  ~~~~~~~~~~~~~~~~~~~~~~                                           *
-     *                                                                   *
-     *  For the sum operation, all terms must have the same scaling;     *
-     *  that scaling should be low enough to prevent overflow. There-    *
-     *  fore, the maximum scale is determined and all coefficients are   *
-     *  re-scaled:                                                       *
-     *                                                                   *
-     *    exp = max(exp_max[i]) + 1;                                     *
-     *    e = exp_max[i]-exp;         e <= 0!                            *
-     *    c[i] = c[i]*2^e                                                *
-     *-------------------------------------------------------------------*/
-
-    exp = exp_max[0];
-    for (i = 9; i > 0; i--)
-    {
-        if (exp_max[i] > exp)
-        {
-            exp = exp_max[i];
-        }
-    }
-    exp++;      /* To avoid overflow */
-
-    p = &sf0_frac_coeff[0];
-    for (i = 0; i < 5; i++)
-    {
-        tmp = (exp - exp_max[i]);
-        L_tmp = ((Word32)(*p++) << 16);
-        L_tmp = L_shr(L_tmp, tmp, pOverflow);
-        coeff[i] = (Word16)(L_tmp >> 16);
-        coeff_lo[i] = (Word16)((L_tmp >> 1) - ((L_tmp >> 16) << 15));
-    }
-    p = &sf1_frac_coeff[0];
-    for (; i < 10; i++)
-    {
-        tmp = exp - exp_max[i];
-        L_tmp = ((Word32)(*p++) << 16);
-        L_tmp = L_shr(L_tmp, tmp, pOverflow);
-        coeff[i] = (Word16)(L_tmp >> 16);
-        coeff_lo[i] = (Word16)((L_tmp >> 1) - ((L_tmp >> 16) << 15));
-    }
-
-
-    /*-------------------------------------------------------------------*
-     *  Codebook search:                                                 *
-     *  ~~~~~~~~~~~~~~~~                                                 *
-     *                                                                   *
-     *  For each pair (g_pitch, g_fac) in the table calculate the        *
-     *  terms t[0..4] and sum them up; the result is the mean squared    *
-     *  error for the quantized gains from the table. The index for the  *
-     *  minimum MSE is stored and finally used to retrieve the quantized *
-     *  gains                                                            *
-     *-------------------------------------------------------------------*/
-
-    /* start with "infinite" MSE */
-    dist_min = MAX_32;
-
-    p = &table_gain_MR475[0];
-
-    for (i = 0; i < MR475_VQ_SIZE; i++)
-    {
-        /* subframe 0 (and 2) calculations */
-        g_pitch = *p++;
-        g_code = *p++;
-
-        /* Need to be there OKA */
-        g_code    = (Word16)(((Word32) g_code * sf0_gcode0) >> 15);
-        g2_pitch  = (Word16)(((Word32) g_pitch * g_pitch) >> 15);
-        g2_code   = (Word16)(((Word32) g_code * g_code) >> 15);
-        g_pit_cod = (Word16)(((Word32) g_code * g_pitch) >> 15);
-
-
-        L_tmp = Mpy_32_16(coeff[0], coeff_lo[0], g2_pitch, pOverflow) +
-                Mpy_32_16(coeff[1], coeff_lo[1], g_pitch, pOverflow) +
-                Mpy_32_16(coeff[2], coeff_lo[2], g2_code, pOverflow) +
-                Mpy_32_16(coeff[3], coeff_lo[3], g_code, pOverflow) +
-                Mpy_32_16(coeff[4], coeff_lo[4], g_pit_cod, pOverflow);
-
-        tmp = (g_pitch - gp_limit);
-
-        /* subframe 1 (and 3) calculations */
-        g_pitch = *p++;
-        g_code = *p++;
-
-        if ((tmp <= 0) && (g_pitch <= gp_limit))
-        {
-            g_code = (Word16)(((Word32) g_code * sf1_gcode0) >> 15);
-            g2_pitch  = (Word16)(((Word32) g_pitch * g_pitch) >> 15);
-            g2_code   = (Word16)(((Word32) g_code * g_code) >> 15);
-            g_pit_cod = (Word16)(((Word32) g_code * g_pitch) >> 15);
-
-            L_tmp += (Mpy_32_16(coeff[5], coeff_lo[5], g2_pitch, pOverflow) +
-                      Mpy_32_16(coeff[6], coeff_lo[6], g_pitch, pOverflow) +
-                      Mpy_32_16(coeff[7], coeff_lo[7], g2_code, pOverflow) +
-                      Mpy_32_16(coeff[8], coeff_lo[8], g_code, pOverflow) +
-                      Mpy_32_16(coeff[9], coeff_lo[9], g_pit_cod, pOverflow));
-
-            /* store table index if MSE for this index is lower
-               than the minimum MSE seen so far */
-            if (L_tmp < dist_min)
-            {
-                dist_min = L_tmp;
-                index = i;
-            }
-        }
-    }
-
-    /*------------------------------------------------------------------*
-     *  read quantized gains and update MA predictor memories           *
-     *  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~           *
-     *------------------------------------------------------------------*/
-
-    /* for subframe 0, the pre-calculated gcode0/exp_gcode0 are the same
-       as those calculated from the "real" predictor using quantized gains */
-    tmp = index << 2;
-    MR475_quant_store_results(pred_st,
-                              &table_gain_MR475[tmp],
-                              sf0_gcode0,
-                              sf0_exp_gcode0,
-                              sf0_gain_pit,
-                              sf0_gain_cod,
-                              pOverflow);
-
-    /* calculate new predicted gain for subframe 1 (this time using
-       the real, quantized gains)                                   */
-    gc_pred(pred_st, MR475, sf1_code_nosharp,
-            &sf1_exp_gcode0, &sf1_frac_gcode0,
-            &sf0_exp_gcode0, &sf0_gcode0, /* dummy args */
-            pOverflow);
-
-    sf1_gcode0 = (Word16)(Pow2(14, sf1_frac_gcode0, pOverflow));
-
-    tmp += 2;
-    MR475_quant_store_results(
-        pred_st,
-        &table_gain_MR475[tmp],
-        sf1_gcode0,
-        sf1_exp_gcode0,
-        sf1_gain_pit,
-        sf1_gain_cod,
-        pOverflow);
-
-    return(index);
-}
diff --git a/media/libstagefright/codecs/amrnb/enc/test/Android.bp b/media/libstagefright/codecs/amrnb/enc/test/Android.bp
deleted file mode 100644
index e8982fe..0000000
--- a/media/libstagefright/codecs/amrnb/enc/test/Android.bp
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-cc_test {
-    name: "AmrnbEncoderTest",
-    gtest: true,
-
-    srcs: [
-        "AmrnbEncoderTest.cpp",
-    ],
-
-    static_libs: [
-        "libstagefright_amrnb_common",
-        "libstagefright_amrnbenc",
-        "libaudioutils",
-        "libsndfile",
-    ],
-
-    shared_libs: [
-        "liblog",
-    ],
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-    ],
-
-    sanitize: {
-        cfi: true,
-        misc_undefined: [
-            "unsigned-integer-overflow",
-            "signed-integer-overflow",
-        ],
-    },
-}
diff --git a/media/libstagefright/codecs/amrnb/fuzzer/Android.bp b/media/libstagefright/codecs/amrnb/fuzzer/Android.bp
deleted file mode 100644
index 54de1cc..0000000
--- a/media/libstagefright/codecs/amrnb/fuzzer/Android.bp
+++ /dev/null
@@ -1,37 +0,0 @@
-/******************************************************************************
- *
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at:
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- *****************************************************************************
- * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
- */
-
-cc_fuzz {
-    name: "amrnb_dec_fuzzer",
-    host_supported: true,
-    srcs: [
-        "amrnb_dec_fuzzer.cpp",
-    ],
-    static_libs: [
-        "libstagefright_amrnbdec",
-        "libstagefright_amrnb_common",
-        "liblog",
-    ],
-    target: {
-        darwin: {
-            enabled: false,
-        },
-    },
-}
diff --git a/media/libstagefright/codecs/amrnb/fuzzer/amrnb_dec_fuzzer.cpp b/media/libstagefright/codecs/amrnb/fuzzer/amrnb_dec_fuzzer.cpp
deleted file mode 100644
index d4e7e5c..0000000
--- a/media/libstagefright/codecs/amrnb/fuzzer/amrnb_dec_fuzzer.cpp
+++ /dev/null
@@ -1,94 +0,0 @@
-/******************************************************************************
- *
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at:
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- *****************************************************************************
- * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
- */
-#include <string.h>
-#include <algorithm>
-#include "gsmamr_dec.h"
-
-// Constants for AMR-NB
-constexpr int32_t kSamplesPerFrame = L_FRAME;
-constexpr int32_t kBitsPerSample = 16;
-constexpr int32_t kOutputBufferSize = kSamplesPerFrame * kBitsPerSample / 8;
-const bitstream_format kBitStreamFormats[2] = {MIME_IETF, IF2};
-const int32_t kLocalWmfDecBytesPerFrame[8] = {12, 13, 15, 17, 19, 20, 26, 31};
-const int32_t kLocalIf2DecBytesPerFrame[8] = {13, 14, 16, 18, 19, 21, 26, 31};
-
-class Codec {
- public:
-  Codec() = default;
-  ~Codec() { deInitDecoder(); }
-  int16_t initDecoder();
-  void deInitDecoder();
-  void decodeFrames(const uint8_t *data, size_t size);
-
- private:
-  void *mAmrHandle = nullptr;
-};
-
-int16_t Codec::initDecoder() { return GSMInitDecode(&mAmrHandle, (Word8 *)"AMRNBDecoder"); }
-
-void Codec::deInitDecoder() { GSMDecodeFrameExit(&mAmrHandle); }
-
-void Codec::decodeFrames(const uint8_t *data, size_t size) {
-  while (size > 0) {
-    uint8_t mode = *data;
-    bool bit = mode & 0x01;
-    bitstream_format bitsreamFormat = kBitStreamFormats[bit];
-    int32_t frameSize = 0;
-    /* Find frame type */
-    Frame_Type_3GPP frameType = static_cast<Frame_Type_3GPP>((mode >> 3) & 0x07);
-    ++data;
-    --size;
-    if (bit) {
-      frameSize = kLocalIf2DecBytesPerFrame[frameType];
-    } else {
-      frameSize = kLocalWmfDecBytesPerFrame[frameType];
-    }
-    int16_t outputBuf[kOutputBufferSize];
-    uint8_t *inputBuf = new uint8_t[frameSize];
-    if (!inputBuf) {
-      return;
-    }
-    int32_t minSize = std::min((int32_t)size, frameSize);
-    memcpy(inputBuf, data, minSize);
-    AMRDecode(mAmrHandle, frameType, inputBuf, outputBuf, bitsreamFormat);
-    /* AMRDecode() decodes minSize number of bytes if decode is successful.
-     * AMRDecode() returns -1 if decode fails.
-     * Even if no bytes are decoded, increment by minSize to ensure fuzzer proceeds
-     * to feed next data */
-    data += minSize;
-    size -= minSize;
-    delete[] inputBuf;
-  }
-}
-
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
-  if (size < 2) {
-    return 0;
-  }
-  Codec *codec = new Codec();
-  if (!codec) {
-    return 0;
-  }
-  if (codec->initDecoder() == 0) {
-    codec->decodeFrames(data, size);
-  }
-  delete codec;
-  return 0;
-}
diff --git a/media/libstagefright/codecs/amrwb/Android.bp b/media/libstagefright/codecs/amrwb/Android.bp
deleted file mode 100644
index 204cbe3..0000000
--- a/media/libstagefright/codecs/amrwb/Android.bp
+++ /dev/null
@@ -1,104 +0,0 @@
-cc_library_static {
-    name: "libstagefright_amrwbdec",
-    vendor_available: true,
-    host_supported: true,
-    min_sdk_version: "29",
-
-    srcs: [
-        "src/agc2_amr_wb.cpp",
-        "src/band_pass_6k_7k.cpp",
-        "src/dec_acelp_2p_in_64.cpp",
-        "src/dec_acelp_4p_in_64.cpp",
-        "src/dec_alg_codebook.cpp",
-        "src/dec_gain2_amr_wb.cpp",
-        "src/deemphasis_32.cpp",
-        "src/dtx_decoder_amr_wb.cpp",
-        "src/get_amr_wb_bits.cpp",
-        "src/highpass_400hz_at_12k8.cpp",
-        "src/highpass_50hz_at_12k8.cpp",
-        "src/homing_amr_wb_dec.cpp",
-        "src/interpolate_isp.cpp",
-        "src/isf_extrapolation.cpp",
-        "src/isp_az.cpp",
-        "src/isp_isf.cpp",
-        "src/lagconceal.cpp",
-        "src/low_pass_filt_7k.cpp",
-        "src/median5.cpp",
-        "src/mime_io.cpp",
-        "src/noise_gen_amrwb.cpp",
-        "src/normalize_amr_wb.cpp",
-        "src/oversamp_12k8_to_16k.cpp",
-        "src/phase_dispersion.cpp",
-        "src/pit_shrp.cpp",
-        "src/pred_lt4.cpp",
-        "src/preemph_amrwb_dec.cpp",
-        "src/pvamrwb_math_op.cpp",
-        "src/pvamrwbdecoder.cpp",
-        "src/q_gain2_tab.cpp",
-        "src/qisf_ns.cpp",
-        "src/qisf_ns_tab.cpp",
-        "src/qpisf_2s.cpp",
-        "src/qpisf_2s_tab.cpp",
-        "src/scale_signal.cpp",
-        "src/synthesis_amr_wb.cpp",
-        "src/voice_factor.cpp",
-        "src/wb_syn_filt.cpp",
-        "src/weight_amrwb_lpc.cpp",
-    ],
-
-    export_include_dirs: [
-        "src",
-        "include",
-    ],
-
-    cflags: [
-        "-DOSCL_UNUSED_ARG(x)=(void)(x)",
-        "-DOSCL_IMPORT_REF=",
-
-        "-Werror",
-    ],
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-        ],
-    },
-
-    target: {
-        darwin: {
-            enabled: false,
-        },
-    },
-}
-
-//###############################################################################
-cc_test {
-    name: "libstagefright_amrwbdec_test",
-    gtest: false,
-    host_supported: true,
-
-    srcs: ["test/amrwbdec_test.cpp"],
-
-    cflags: ["-Wall", "-Werror"],
-
-    static_libs: [
-        "libstagefright_amrwbdec",
-        "libsndfile",
-    ],
-
-    local_include_dirs: ["src"],
-
-    shared_libs: ["libaudioutils"],
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-        ],
-    },
-
-    target: {
-        darwin: {
-            enabled: false,
-        },
-    },
-}
diff --git a/media/libstagefright/codecs/amrwb/NOTICE b/media/libstagefright/codecs/amrwb/NOTICE
deleted file mode 100644
index c5b1efa..0000000
--- a/media/libstagefright/codecs/amrwb/NOTICE
+++ /dev/null
@@ -1,190 +0,0 @@
-
-   Copyright (c) 2005-2008, The Android Open Source Project
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
diff --git a/media/libstagefright/codecs/amrwb/fuzzer/Android.bp b/media/libstagefright/codecs/amrwb/fuzzer/Android.bp
deleted file mode 100644
index 46f77e3..0000000
--- a/media/libstagefright/codecs/amrwb/fuzzer/Android.bp
+++ /dev/null
@@ -1,35 +0,0 @@
-/******************************************************************************
- *
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at:
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- *****************************************************************************
- * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
- */
-
-cc_fuzz {
-    name: "amrwb_dec_fuzzer",
-    host_supported: true,
-    srcs: [
-        "amrwb_dec_fuzzer.cpp",
-    ],
-    static_libs: [
-        "libstagefright_amrwbdec",
-    ],
-    target: {
-        darwin: {
-            enabled: false,
-        },
-    },
-}
diff --git a/media/libstagefright/codecs/amrwb/patent_disclaimer.txt b/media/libstagefright/codecs/amrwb/patent_disclaimer.txt
deleted file mode 100644
index b4bf11d..0000000
--- a/media/libstagefright/codecs/amrwb/patent_disclaimer.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-
-THIS IS NOT A GRANT OF PATENT RIGHTS.
-
-Google makes no representation or warranty that the codecs for which
-source code is made available hereunder are unencumbered by
-third-party patents.  Those intending to use this source code in
-hardware or software products are advised that implementations of
-these codecs, including in open source software or shareware, may
-require patent licenses from the relevant patent holders.
diff --git a/media/libstagefright/codecs/amrwb/src/wb_syn_filt.cpp b/media/libstagefright/codecs/amrwb/src/wb_syn_filt.cpp
deleted file mode 100644
index e1af6d4..0000000
--- a/media/libstagefright/codecs/amrwb/src/wb_syn_filt.cpp
+++ /dev/null
@@ -1,307 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.173
-    ANSI-C code for the Adaptive Multi-Rate - Wideband (AMR-WB) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2007, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-------------------------------------------------------------------------------
-
-
-
- Filename: wb_syn_filt.cpp
-
-     Date: 05/08/2004
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
-
- Description:
-
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
-wb_syn_filt
-
-     int16 a[],               (i) Q12 : a[m+1] prediction coefficients
-     int16 m,                 (i)     : order of LP filter
-     int16 x[],               (i)     : input signal
-     int16 y[],               (o)     : output signal
-     int16 lg,                (i)     : size of filtering
-     int16 mem[],             (i/o)   : memory associated with this filtering.
-     int16 update,            (i)     : 0=no update, 1=update of memory.
-     int16 y_buf[]
-
-Syn_filt_32
-
-     int16 a[],              (i) Q12 : a[m+1] prediction coefficients
-     int16 m,                (i)     : order of LP filter
-     int16 exc[],            (i) Qnew: excitation (exc[i] >> Qnew)
-     int16 Qnew,             (i)     : exc scaling = 0(min) to 8(max)
-     int16 sig_hi[],         (o) /16 : synthesis high
-     int16 sig_lo[],         (o) /16 : synthesis low
-     int16 lg                (i)     : size of filtering
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
-    Do the synthesis filtering 1/A(z)  16 and 32-bits version
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
-
-------------------------------------------------------------------------------
- REFERENCES
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-------------------------------------------------------------------------------
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-
-
-#include "pv_amr_wb_type_defs.h"
-#include "pvamrwbdecoder_mem_funcs.h"
-#include "pvamrwbdecoder_basic_op.h"
-#include "pvamrwb_math_op.h"
-#include "pvamrwbdecoder_cnst.h"
-#include "pvamrwbdecoder_acelp.h"
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-void wb_syn_filt(
-    int16 a[],       /* (i) Q12 : a[m+1] prediction coefficients           */
-    int16 m,         /* (i)     : order of LP filter                       */
-    int16 x[],       /* (i)     : input signal                             */
-    int16 y[],       /* (o)     : output signal                            */
-    int16 lg,        /* (i)     : size of filtering                        */
-    int16 mem[],     /* (i/o)   : memory associated with this filtering.   */
-    int16 update,    /* (i)     : 0=no update, 1=update of memory.         */
-    int16 y_buf[]
-)
-{
-
-    int16 i, j;
-    int32 L_tmp1;
-    int32 L_tmp2;
-    int32 L_tmp3;
-    int32 L_tmp4;
-    int16 *yy;
-
-    /* copy initial filter states into synthesis buffer */
-    pv_memcpy(y_buf, mem, m*sizeof(*yy));
-
-    yy = &y_buf[m];
-
-    /* Do the filtering. */
-
-    for (i = 0; i < lg >> 2; i++)
-    {
-        L_tmp1 = -((int32)x[(i<<2)] << 11);
-        L_tmp2 = -((int32)x[(i<<2)+1] << 11);
-        L_tmp3 = -((int32)x[(i<<2)+2] << 11);
-        L_tmp4 = -((int32)x[(i<<2)+3] << 11);
-
-        /* a[] uses Q12 and abs(a) =< 1 */
-
-        L_tmp1  = fxp_mac_16by16(yy[(i<<2) -3], a[3], L_tmp1);
-        L_tmp2  = fxp_mac_16by16(yy[(i<<2) -2], a[3], L_tmp2);
-        L_tmp1  = fxp_mac_16by16(yy[(i<<2) -2], a[2], L_tmp1);
-        L_tmp2  = fxp_mac_16by16(yy[(i<<2) -1], a[2], L_tmp2);
-        L_tmp1  = fxp_mac_16by16(yy[(i<<2) -1], a[1], L_tmp1);
-
-        for (j = 4; j < m; j += 2)
-        {
-            L_tmp1  = fxp_mac_16by16(yy[(i<<2)-1  - j], a[j+1], L_tmp1);
-            L_tmp2  = fxp_mac_16by16(yy[(i<<2)    - j], a[j+1], L_tmp2);
-            L_tmp1  = fxp_mac_16by16(yy[(i<<2)    - j], a[j  ], L_tmp1);
-            L_tmp2  = fxp_mac_16by16(yy[(i<<2)+1  - j], a[j  ], L_tmp2);
-            L_tmp3  = fxp_mac_16by16(yy[(i<<2)+1  - j], a[j+1], L_tmp3);
-            L_tmp4  = fxp_mac_16by16(yy[(i<<2)+2  - j], a[j+1], L_tmp4);
-            L_tmp3  = fxp_mac_16by16(yy[(i<<2)+2  - j], a[j  ], L_tmp3);
-            L_tmp4  = fxp_mac_16by16(yy[(i<<2)+3  - j], a[j  ], L_tmp4);
-        }
-
-        L_tmp1  = fxp_mac_16by16(yy[(i<<2)    - j], a[j], L_tmp1);
-        L_tmp2  = fxp_mac_16by16(yy[(i<<2)+1  - j], a[j], L_tmp2);
-        L_tmp3  = fxp_mac_16by16(yy[(i<<2)+2  - j], a[j], L_tmp3);
-        L_tmp4  = fxp_mac_16by16(yy[(i<<2)+3  - j], a[j], L_tmp4);
-
-        L_tmp1 = shl_int32(L_tmp1, 4);
-
-        y[(i<<2)] = yy[(i<<2)] = amr_wb_round(-L_tmp1);
-
-        L_tmp2  = fxp_mac_16by16(yy[(i<<2)], a[1], L_tmp2);
-
-        L_tmp2 = shl_int32(L_tmp2, 4);
-
-        y[(i<<2)+1] = yy[(i<<2)+1] = amr_wb_round(-L_tmp2);
-
-        L_tmp3  = fxp_mac_16by16(yy[(i<<2) - 1], a[3], L_tmp3);
-        L_tmp4  = fxp_mac_16by16(yy[(i<<2)], a[3], L_tmp4);
-        L_tmp3  = fxp_mac_16by16(yy[(i<<2)], a[2], L_tmp3);
-        L_tmp4  = fxp_mac_16by16(yy[(i<<2) + 1], a[2], L_tmp4);
-        L_tmp3  = fxp_mac_16by16(yy[(i<<2) + 1], a[1], L_tmp3);
-
-        L_tmp3 = shl_int32(L_tmp3, 4);
-
-        y[(i<<2)+2] = yy[(i<<2)+2] = amr_wb_round(-L_tmp3);
-
-        L_tmp4  = fxp_mac_16by16(yy[(i<<2)+2], a[1], L_tmp4);
-
-        L_tmp4 = shl_int32(L_tmp4, 4);
-
-        y[(i<<2)+3] = yy[(i<<2)+3] = amr_wb_round(-L_tmp4);
-    }
-
-
-    /* Update memory if required */
-
-    if (update)
-    {
-        pv_memcpy(mem, &y[lg - m], m*sizeof(*y));
-    }
-
-    return;
-}
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-void Syn_filt_32(
-    int16 a[],              /* (i) Q12 : a[m+1] prediction coefficients */
-    int16 m,                /* (i)     : order of LP filter             */
-    int16 exc[],            /* (i) Qnew: excitation (exc[i] >> Qnew)    */
-    int16 Qnew,             /* (i)     : exc scaling = 0(min) to 8(max) */
-    int16 sig_hi[],         /* (o) /16 : synthesis high                 */
-    int16 sig_lo[],         /* (o) /16 : synthesis low                  */
-    int16 lg                /* (i)     : size of filtering              */
-)
-{
-    int16 i, k, a0;
-    int32 L_tmp1;
-    int32 L_tmp2;
-    int32 L_tmp3;
-    int32 L_tmp4;
-
-    a0 = 9 - Qnew;        /* input / 16 and >>Qnew */
-
-    /* Do the filtering. */
-
-    for (i = 0; i < lg >> 1; i++)
-    {
-
-        L_tmp3 = 0;
-        L_tmp4 = 0;
-
-        L_tmp1 = fxp_mul_16by16(sig_lo[(i<<1) - 1], a[1]);
-        L_tmp2 = fxp_mul_16by16(sig_hi[(i<<1) - 1], a[1]);
-
-        for (k = 2; k < m; k += 2)
-        {
-
-            L_tmp1 = fxp_mac_16by16(sig_lo[(i<<1)-1 - k], a[k+1], L_tmp1);
-            L_tmp2 = fxp_mac_16by16(sig_hi[(i<<1)-1 - k], a[k+1], L_tmp2);
-            L_tmp1 = fxp_mac_16by16(sig_lo[(i<<1)   - k], a[k  ], L_tmp1);
-            L_tmp2 = fxp_mac_16by16(sig_hi[(i<<1)   - k], a[k  ], L_tmp2);
-            L_tmp3 = fxp_mac_16by16(sig_lo[(i<<1)   - k], a[k+1], L_tmp3);
-            L_tmp4 = fxp_mac_16by16(sig_hi[(i<<1)   - k], a[k+1], L_tmp4);
-            L_tmp3 = fxp_mac_16by16(sig_lo[(i<<1)+1 - k], a[k  ], L_tmp3);
-            L_tmp4 = fxp_mac_16by16(sig_hi[(i<<1)+1 - k], a[k  ], L_tmp4);
-        }
-
-        L_tmp1 = -fxp_mac_16by16(sig_lo[(i<<1)   - k], a[k], L_tmp1);
-        L_tmp3 =  fxp_mac_16by16(sig_lo[(i<<1)+1 - k], a[k], L_tmp3);
-        L_tmp2 =  fxp_mac_16by16(sig_hi[(i<<1)   - k], a[k], L_tmp2);
-        L_tmp4 =  fxp_mac_16by16(sig_hi[(i<<1)+1 - k], a[k], L_tmp4);
-
-
-
-        L_tmp1 >>= 11;      /* -4 : sig_lo[i] << 4 */
-
-        L_tmp1 += (int32)exc[(i<<1)] << a0;
-
-        L_tmp1 -= (L_tmp2 << 1);
-        /* sig_hi = bit16 to bit31 of synthesis */
-        L_tmp1 = shl_int32(L_tmp1, 3);           /* ai in Q12 */
-
-        sig_hi[(i<<1)] = (int16)(L_tmp1 >> 16);
-
-        L_tmp4 = fxp_mac_16by16((int16)(L_tmp1 >> 16), a[1], L_tmp4);
-
-        /* sig_lo = bit4 to bit15 of synthesis */
-        /* L_tmp1 >>= 4 : sig_lo[i] >> 4 */
-        sig_lo[(i<<1)] = (int16)((L_tmp1 >> 4) - ((L_tmp1 >> 16) << 12));
-
-        L_tmp3 = fxp_mac_16by16(sig_lo[(i<<1)], a[1], L_tmp3);
-        L_tmp3 = -L_tmp3 >> 11;
-
-        L_tmp3 += (int32)exc[(i<<1)+1] << a0;
-
-        L_tmp3 -= (L_tmp4 << 1);
-        /* sig_hi = bit16 to bit31 of synthesis */
-        L_tmp3 = shl_int32(L_tmp3, 3);           /* ai in Q12 */
-        sig_hi[(i<<1)+1] = (int16)(L_tmp3 >> 16);
-
-        /* sig_lo = bit4 to bit15 of synthesis */
-        /* L_tmp1 >>= 4 : sig_lo[i] >> 4 */
-        sig_lo[(i<<1)+1] = (int16)((L_tmp3 >> 4) - (sig_hi[(i<<1)+1] << 12));
-    }
-
-}
-
-
diff --git a/media/libstagefright/codecs/amrwb/test/Android.bp b/media/libstagefright/codecs/amrwb/test/Android.bp
deleted file mode 100644
index 968215a..0000000
--- a/media/libstagefright/codecs/amrwb/test/Android.bp
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-cc_test {
-    name: "AmrwbDecoderTest",
-    gtest: true,
-
-    srcs: [
-        "AmrwbDecoderTest.cpp",
-    ],
-
-    static_libs: [
-        "libstagefright_amrwbdec",
-        "libsndfile",
-        "libaudioutils",
-    ],
-
-    shared_libs: [
-        "liblog",
-    ],
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-    ],
-
-    sanitize: {
-        cfi: true,
-        misc_undefined: [
-            "unsigned-integer-overflow",
-            "signed-integer-overflow",
-        ],
-    },
-}
diff --git a/media/libstagefright/codecs/amrwbenc/Android.bp b/media/libstagefright/codecs/amrwbenc/Android.bp
index 64f302c..00e7bc9 100644
--- a/media/libstagefright/codecs/amrwbenc/Android.bp
+++ b/media/libstagefright/codecs/amrwbenc/Android.bp
@@ -1,147 +1,13 @@
-cc_library_static {
-    name: "libstagefright_amrwbenc",
-    vendor_available: true,
-    min_sdk_version: "29",
 
-    srcs: [
-        "src/autocorr.c",
-        "src/az_isp.c",
-        "src/bits.c",
-        "src/c2t64fx.c",
-        "src/c4t64fx.c",
-        "src/convolve.c",
-        "src/cor_h_x.c",
-        "src/decim54.c",
-        "src/deemph.c",
-        "src/dtx.c",
-        "src/g_pitch.c",
-        "src/gpclip.c",
-        "src/homing.c",
-        "src/hp400.c",
-        "src/hp50.c",
-        "src/hp6k.c",
-        "src/hp_wsp.c",
-        "src/int_lpc.c",
-        "src/isp_az.c",
-        "src/isp_isf.c",
-        "src/lag_wind.c",
-        "src/levinson.c",
-        "src/log2.c",
-        "src/lp_dec2.c",
-        "src/math_op.c",
-        "src/oper_32b.c",
-        "src/p_med_ol.c",
-        "src/pit_shrp.c",
-        "src/pitch_f4.c",
-        "src/pred_lt4.c",
-        "src/preemph.c",
-        "src/q_gain2.c",
-        "src/q_pulse.c",
-        "src/qisf_ns.c",
-        "src/qpisf_2s.c",
-        "src/random.c",
-        "src/residu.c",
-        "src/scale.c",
-        "src/stream.c",
-        "src/syn_filt.c",
-        "src/updt_tar.c",
-        "src/util.c",
-        "src/voAMRWBEnc.c",
-        "src/voicefac.c",
-        "src/wb_vad.c",
-        "src/weight_a.c",
-        "src/mem_align.c",
-    ],
-
-    arch: {
-        arm: {
-            srcs: [
-                "src/asm/ARMV5E/convolve_opt.s",
-                "src/asm/ARMV5E/cor_h_vec_opt.s",
-                "src/asm/ARMV5E/Deemph_32_opt.s",
-                "src/asm/ARMV5E/Dot_p_opt.s",
-                "src/asm/ARMV5E/Filt_6k_7k_opt.s",
-                "src/asm/ARMV5E/Norm_Corr_opt.s",
-                "src/asm/ARMV5E/pred_lt4_1_opt.s",
-                "src/asm/ARMV5E/residu_asm_opt.s",
-                "src/asm/ARMV5E/scale_sig_opt.s",
-                "src/asm/ARMV5E/Syn_filt_32_opt.s",
-                "src/asm/ARMV5E/syn_filt_opt.s",
-            ],
-
-            cflags: [
-                "-DARM",
-                "-DASM_OPT",
-            ],
-            local_include_dirs: ["src/asm/ARMV5E"],
-
-            instruction_set: "arm",
-
-            neon: {
-                exclude_srcs: [
-                    "src/asm/ARMV5E/convolve_opt.s",
-                    "src/asm/ARMV5E/cor_h_vec_opt.s",
-                    "src/asm/ARMV5E/Deemph_32_opt.s",
-                    "src/asm/ARMV5E/Dot_p_opt.s",
-                    "src/asm/ARMV5E/Filt_6k_7k_opt.s",
-                    "src/asm/ARMV5E/Norm_Corr_opt.s",
-                    "src/asm/ARMV5E/pred_lt4_1_opt.s",
-                    "src/asm/ARMV5E/residu_asm_opt.s",
-                    "src/asm/ARMV5E/scale_sig_opt.s",
-                    "src/asm/ARMV5E/Syn_filt_32_opt.s",
-                    "src/asm/ARMV5E/syn_filt_opt.s",
-                ],
-
-                srcs: [
-                    "src/asm/ARMV7/convolve_neon.s",
-                    "src/asm/ARMV7/cor_h_vec_neon.s",
-                    "src/asm/ARMV7/Deemph_32_neon.s",
-                    "src/asm/ARMV7/Dot_p_neon.s",
-                    "src/asm/ARMV7/Filt_6k_7k_neon.s",
-                    "src/asm/ARMV7/Norm_Corr_neon.s",
-                    "src/asm/ARMV7/pred_lt4_1_neon.s",
-                    "src/asm/ARMV7/residu_asm_neon.s",
-                    "src/asm/ARMV7/scale_sig_neon.s",
-                    "src/asm/ARMV7/Syn_filt_32_neon.s",
-                    "src/asm/ARMV7/syn_filt_neon.s",
-                ],
-
-                // don't actually generate neon instructions, see bug 26932980
-                cflags: [
-                    "-DARMV7",
-                    "-mfpu=vfpv3",
-                ],
-                local_include_dirs: [
-                    "src/asm/ARMV5E",
-                    "src/asm/ARMV7",
-                ],
-            },
-
-        },
-    },
-
-    include_dirs: [
-        "frameworks/av/include",
-        "frameworks/av/media/libstagefright/include",
-    ],
-
-    local_include_dirs: ["src"],
-    export_include_dirs: ["inc"],
-
-    shared_libs: [
-        "libstagefright_enc_common",
-        "liblog",
-    ],
-
-    cflags: ["-Werror"],
-    sanitize: {
-        cfi: true,
-    },
-
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
 }
 
-//###############################################################################
-
 cc_library_shared {
     name: "libstagefright_soft_amrwbenc",
     defaults: ["libstagefright_softomx-defaults"],
@@ -163,4 +29,3 @@
         "libstagefright_enc_common",
     ],
 }
-
diff --git a/media/libstagefright/codecs/amrwbenc/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/amrwbenc/MODULE_LICENSE_APACHE2
deleted file mode 100644
index e69de29..0000000
--- a/media/libstagefright/codecs/amrwbenc/MODULE_LICENSE_APACHE2
+++ /dev/null
diff --git a/media/libstagefright/codecs/amrwbenc/NOTICE b/media/libstagefright/codecs/amrwbenc/NOTICE
deleted file mode 100644
index c5b1efa..0000000
--- a/media/libstagefright/codecs/amrwbenc/NOTICE
+++ /dev/null
@@ -1,190 +0,0 @@
-
-   Copyright (c) 2005-2008, The Android Open Source Project
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
diff --git a/media/libstagefright/codecs/amrwbenc/SampleCode/Android.bp b/media/libstagefright/codecs/amrwbenc/SampleCode/Android.bp
deleted file mode 100644
index 9442fc4..0000000
--- a/media/libstagefright/codecs/amrwbenc/SampleCode/Android.bp
+++ /dev/null
@@ -1,28 +0,0 @@
-cc_test {
-    name: "AMRWBEncTest",
-    gtest: false,
-
-    srcs: ["AMRWB_E_SAMPLE.c"],
-
-    cflags: ["-Wall", "-Werror"],
-
-    arch: {
-        arm: {
-            instruction_set: "arm",
-        },
-    },
-
-    shared_libs: [
-        "libdl",
-        "liblog",
-    ],
-
-    static_libs: [
-        "libstagefright_amrwbenc",
-        "libstagefright_enc_common",
-    ],
-
-    sanitize: {
-        cfi: true,
-    },
-}
diff --git a/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.cpp b/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.cpp
index 7fb8a4c..657a5ce 100644
--- a/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.cpp
+++ b/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.cpp
@@ -476,6 +476,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/amrwbenc/src/preemph.c b/media/libstagefright/codecs/amrwbenc/src/preemph.c
deleted file mode 100644
index 70c8650..0000000
--- a/media/libstagefright/codecs/amrwbenc/src/preemph.c
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- ** Copyright 2003-2010, VisualOn, Inc.
- **
- ** Licensed under the Apache License, Version 2.0 (the "License");
- ** you may not use this file except in compliance with the License.
- ** You may obtain a copy of the License at
- **
- **     http://www.apache.org/licenses/LICENSE-2.0
- **
- ** Unless required by applicable law or agreed to in writing, software
- ** distributed under the License is distributed on an "AS IS" BASIS,
- ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ** See the License for the specific language governing permissions and
- ** limitations under the License.
- */
-
-/***********************************************************************
-*      File: preemph.c                                                *
-*                                                                     *
-*      Description: Preemphasis: filtering through 1 - g z^-1         *
-*              Preemph2 --> signal is multiplied by 2             *
-*                                                                     *
-************************************************************************/
-
-#include "typedef.h"
-#include "basic_op.h"
-
-void Preemph(
-        Word16 x[],                           /* (i/o)   : input signal overwritten by the output */
-        Word16 mu,                            /* (i) Q15 : preemphasis coefficient                */
-        Word16 lg,                            /* (i)     : lenght of filtering                    */
-        Word16 * mem                          /* (i/o)   : memory (x[-1])                         */
-        )
-{
-    Word16 temp;
-    Word32 i, L_tmp;
-
-    temp = x[lg - 1];
-
-    for (i = lg - 1; i > 0; i--)
-    {
-        L_tmp = L_deposit_h(x[i]);
-        L_tmp -= (x[i - 1] * mu)<<1;
-        x[i] = (L_tmp + 0x8000)>>16;
-    }
-
-    L_tmp = L_deposit_h(x[0]);
-    L_tmp -= ((*mem) * mu)<<1;
-    x[0] = (L_tmp + 0x8000)>>16;
-
-    *mem = temp;
-
-    return;
-}
-
-
-void Preemph2(
-        Word16 x[],                           /* (i/o)   : input signal overwritten by the output */
-        Word16 mu,                            /* (i) Q15 : preemphasis coefficient                */
-        Word16 lg,                            /* (i)     : lenght of filtering                    */
-        Word16 * mem                          /* (i/o)   : memory (x[-1])                         */
-         )
-{
-    Word16 temp;
-    Word32 i, L_tmp;
-
-    temp = x[lg - 1];
-
-    for (i = (Word16) (lg - 1); i > 0; i--)
-    {
-        L_tmp = L_deposit_h(x[i]);
-        L_tmp -= (x[i - 1] * mu)<<1; // only called with mu == 22282, so this won't overflow
-        if (L_tmp > INT32_MAX / 2) {
-            L_tmp = INT32_MAX / 2;
-        }
-        L_tmp = (L_tmp << 1);
-        if (L_tmp > INT32_MAX - 0x8000) {
-            L_tmp = INT32_MAX - 0x8000;
-        }
-        x[i] = (L_tmp + 0x8000)>>16;
-    }
-
-    L_tmp = L_deposit_h(x[0]);
-    L_tmp -= ((*mem) * mu)<<1;
-    if (L_tmp > INT32_MAX / 2) {
-        L_tmp = INT32_MAX / 2;
-    }
-    L_tmp = (L_tmp << 1);
-    if (L_tmp > INT32_MAX - 0x8000) {
-        L_tmp = INT32_MAX - 0x8000;
-    }
-    x[0] = (L_tmp + 0x8000)>>16;
-
-    *mem = temp;
-
-    return;
-}
-
-
-
diff --git a/media/libstagefright/codecs/amrwbenc/src/q_pulse.c b/media/libstagefright/codecs/amrwbenc/src/q_pulse.c
deleted file mode 100644
index fe0bdda..0000000
--- a/media/libstagefright/codecs/amrwbenc/src/q_pulse.c
+++ /dev/null
@@ -1,400 +0,0 @@
-/*
- ** Copyright 2003-2010, VisualOn, Inc.
- **
- ** Licensed under the Apache License, Version 2.0 (the "License");
- ** you may not use this file except in compliance with the License.
- ** You may obtain a copy of the License at
- **
- **     http://www.apache.org/licenses/LICENSE-2.0
- **
- ** Unless required by applicable law or agreed to in writing, software
- ** distributed under the License is distributed on an "AS IS" BASIS,
- ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ** See the License for the specific language governing permissions and
- ** limitations under the License.
- */
-
-/***********************************************************************
-*      File: q_pulse.c                                                 *
-*                                                                      *
-*      Description: Coding and decoding of algebraic codebook          *
-*                                                                      *
-************************************************************************/
-
-#include <stdio.h>
-#include "typedef.h"
-#include "basic_op.h"
-#include "q_pulse.h"
-
-#define NB_POS 16                          /* pos in track, mask for sign bit */
-
-Word32 quant_1p_N1(                        /* (o) return N+1 bits             */
-        Word16 pos,                        /* (i) position of the pulse       */
-        Word16 N)                          /* (i) number of bits for position */
-{
-    Word16 mask;
-    Word32 index;
-
-    mask = (1 << N) - 1;              /* mask = ((1<<N)-1); */
-    /*-------------------------------------------------------*
-     * Quantization of 1 pulse with N+1 bits:                *
-     *-------------------------------------------------------*/
-    index = L_deposit_l((Word16) (pos & mask));
-    if ((pos & NB_POS) != 0)
-    {
-        index = vo_L_add(index, L_deposit_l(1 << N));   /* index += 1 << N; */
-    }
-    return (index);
-}
-
-
-Word32 quant_2p_2N1(                       /* (o) return (2*N)+1 bits         */
-        Word16 pos1,                          /* (i) position of the pulse 1     */
-        Word16 pos2,                          /* (i) position of the pulse 2     */
-        Word16 N)                             /* (i) number of bits for position */
-{
-    Word16 mask, tmp;
-    Word32 index;
-    mask = (1 << N) - 1;              /* mask = ((1<<N)-1); */
-    /*-------------------------------------------------------*
-     * Quantization of 2 pulses with 2*N+1 bits:             *
-     *-------------------------------------------------------*/
-    if (((pos2 ^ pos1) & NB_POS) == 0)
-    {
-        /* sign of 1st pulse == sign of 2th pulse */
-        if(pos1 <= pos2)          /* ((pos1 - pos2) <= 0) */
-        {
-            /* index = ((pos1 & mask) << N) + (pos2 & mask); */
-            index = L_deposit_l(add1((((Word16) (pos1 & mask)) << N), ((Word16) (pos2 & mask))));
-        } else
-        {
-            /* ((pos2 & mask) << N) + (pos1 & mask); */
-            index = L_deposit_l(add1((((Word16) (pos2 & mask)) << N), ((Word16) (pos1 & mask))));
-        }
-        if ((pos1 & NB_POS) != 0)
-        {
-            tmp = (N << 1);
-            index = vo_L_add(index, (1L << tmp));       /* index += 1 << (2*N); */
-        }
-    } else
-    {
-        /* sign of 1st pulse != sign of 2th pulse */
-        if (vo_sub((Word16) (pos1 & mask), (Word16) (pos2 & mask)) <= 0)
-        {
-            /* index = ((pos2 & mask) << N) + (pos1 & mask); */
-            index = L_deposit_l(add1((((Word16) (pos2 & mask)) << N), ((Word16) (pos1 & mask))));
-            if ((pos2 & NB_POS) != 0)
-            {
-                tmp = (N << 1);           /* index += 1 << (2*N); */
-                index = vo_L_add(index, (1L << tmp));
-            }
-        } else
-        {
-            /* index = ((pos1 & mask) << N) + (pos2 & mask);     */
-            index = L_deposit_l(add1((((Word16) (pos1 & mask)) << N), ((Word16) (pos2 & mask))));
-            if ((pos1 & NB_POS) != 0)
-            {
-                tmp = (N << 1);
-                index = vo_L_add(index, (1 << tmp));    /* index += 1 << (2*N); */
-            }
-        }
-    }
-    return (index);
-}
-
-
-Word32 quant_3p_3N1(                       /* (o) return (3*N)+1 bits         */
-        Word16 pos1,                          /* (i) position of the pulse 1     */
-        Word16 pos2,                          /* (i) position of the pulse 2     */
-        Word16 pos3,                          /* (i) position of the pulse 3     */
-        Word16 N)                             /* (i) number of bits for position */
-{
-    Word16 nb_pos;
-    Word32 index;
-
-    nb_pos =(1 <<(N - 1));            /* nb_pos = (1<<(N-1)); */
-    /*-------------------------------------------------------*
-     * Quantization of 3 pulses with 3*N+1 bits:             *
-     *-------------------------------------------------------*/
-    if (((pos1 ^ pos2) & nb_pos) == 0)
-    {
-        index = quant_2p_2N1(pos1, pos2, sub(N, 1));    /* index = quant_2p_2N1(pos1, pos2, (N-1)); */
-        /* index += (pos1 & nb_pos) << N; */
-        index = vo_L_add(index, (L_deposit_l((Word16) (pos1 & nb_pos)) << N));
-        /* index += quant_1p_N1(pos3, N) << (2*N); */
-        index = vo_L_add(index, (quant_1p_N1(pos3, N)<<(N << 1)));
-
-    } else if (((pos1 ^ pos3) & nb_pos) == 0)
-    {
-        index = quant_2p_2N1(pos1, pos3, sub(N, 1));    /* index = quant_2p_2N1(pos1, pos3, (N-1)); */
-        index = vo_L_add(index, (L_deposit_l((Word16) (pos1 & nb_pos)) << N));
-        /* index += (pos1 & nb_pos) << N; */
-        index = vo_L_add(index, (quant_1p_N1(pos2, N) << (N << 1)));
-        /* index += quant_1p_N1(pos2, N) <<
-         * (2*N); */
-    } else
-    {
-        index = quant_2p_2N1(pos2, pos3, (N - 1));    /* index = quant_2p_2N1(pos2, pos3, (N-1)); */
-        /* index += (pos2 & nb_pos) << N;            */
-        index = vo_L_add(index, (L_deposit_l((Word16) (pos2 & nb_pos)) << N));
-        /* index += quant_1p_N1(pos1, N) << (2*N);   */
-        index = vo_L_add(index, (quant_1p_N1(pos1, N) << (N << 1)));
-    }
-    return (index);
-}
-
-
-Word32 quant_4p_4N1(                       /* (o) return (4*N)+1 bits         */
-        Word16 pos1,                          /* (i) position of the pulse 1     */
-        Word16 pos2,                          /* (i) position of the pulse 2     */
-        Word16 pos3,                          /* (i) position of the pulse 3     */
-        Word16 pos4,                          /* (i) position of the pulse 4     */
-        Word16 N)                             /* (i) number of bits for position */
-{
-    Word16 nb_pos;
-    Word32 index;
-
-    nb_pos = 1 << (N - 1);            /* nb_pos = (1<<(N-1));  */
-    /*-------------------------------------------------------*
-     * Quantization of 4 pulses with 4*N+1 bits:             *
-     *-------------------------------------------------------*/
-    if (((pos1 ^ pos2) & nb_pos) == 0)
-    {
-        index = quant_2p_2N1(pos1, pos2, sub(N, 1));    /* index = quant_2p_2N1(pos1, pos2, (N-1)); */
-        /* index += (pos1 & nb_pos) << N;    */
-        index = vo_L_add(index, (L_deposit_l((Word16) (pos1 & nb_pos)) << N));
-        /* index += quant_2p_2N1(pos3, pos4, N) << (2*N); */
-        index = vo_L_add(index, (quant_2p_2N1(pos3, pos4, N) << (N << 1)));
-    } else if (((pos1 ^ pos3) & nb_pos) == 0)
-    {
-        index = quant_2p_2N1(pos1, pos3, (N - 1));
-        /* index += (pos1 & nb_pos) << N; */
-        index = vo_L_add(index, (L_deposit_l((Word16) (pos1 & nb_pos)) << N));
-        /* index += quant_2p_2N1(pos2, pos4, N) << (2*N); */
-        index = vo_L_add(index, (quant_2p_2N1(pos2, pos4, N) << (N << 1)));
-    } else
-    {
-        index = quant_2p_2N1(pos2, pos3, (N - 1));
-        /* index += (pos2 & nb_pos) << N; */
-        index = vo_L_add(index, (L_deposit_l((Word16) (pos2 & nb_pos)) << N));
-        /* index += quant_2p_2N1(pos1, pos4, N) << (2*N); */
-        index = vo_L_add(index, (quant_2p_2N1(pos1, pos4, N) << (N << 1)));
-    }
-    return (index);
-}
-
-
-Word32 quant_4p_4N(                        /* (o) return 4*N bits             */
-        Word16 pos[],                         /* (i) position of the pulse 1..4  */
-        Word16 N)                             /* (i) number of bits for position */
-{
-    Word16 nb_pos, mask __unused, n_1, tmp;
-    Word16 posA[4], posB[4];
-    Word32 i, j, k, index;
-
-    n_1 = (Word16) (N - 1);
-    nb_pos = (1 << n_1);                  /* nb_pos = (1<<n_1); */
-    mask = vo_sub((1 << N), 1);              /* mask = ((1<<N)-1); */
-
-    i = 0;
-    j = 0;
-    for (k = 0; k < 4; k++)
-    {
-        if ((pos[k] & nb_pos) == 0)
-        {
-            posA[i++] = pos[k];
-        } else
-        {
-            posB[j++] = pos[k];
-        }
-    }
-
-    switch (i)
-    {
-        case 0:
-            tmp = vo_sub((N << 2), 3);           /* index = 1 << ((4*N)-3); */
-            index = (1L << tmp);
-            /* index += quant_4p_4N1(posB[0], posB[1], posB[2], posB[3], n_1); */
-            index = vo_L_add(index, quant_4p_4N1(posB[0], posB[1], posB[2], posB[3], n_1));
-            break;
-        case 1:
-            /* index = quant_1p_N1(posA[0], n_1) << ((3*n_1)+1); */
-            tmp = add1((Word16)((vo_L_mult(3, n_1) >> 1)), 1);
-            index = L_shl(quant_1p_N1(posA[0], n_1), tmp);
-            /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1); */
-            index = vo_L_add(index, quant_3p_3N1(posB[0], posB[1], posB[2], n_1));
-            break;
-        case 2:
-            tmp = ((n_1 << 1) + 1);         /* index = quant_2p_2N1(posA[0], posA[1], n_1) << ((2*n_1)+1); */
-            index = L_shl(quant_2p_2N1(posA[0], posA[1], n_1), tmp);
-            /* index += quant_2p_2N1(posB[0], posB[1], n_1); */
-            index = vo_L_add(index, quant_2p_2N1(posB[0], posB[1], n_1));
-            break;
-        case 3:
-            /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << N; */
-            index = L_shl(quant_3p_3N1(posA[0], posA[1], posA[2], n_1), N);
-            index = vo_L_add(index, quant_1p_N1(posB[0], n_1));        /* index += quant_1p_N1(posB[0], n_1); */
-            break;
-        case 4:
-            index = quant_4p_4N1(posA[0], posA[1], posA[2], posA[3], n_1);
-            break;
-        default:
-            index = 0;
-            fprintf(stderr, "Error in function quant_4p_4N\n");
-    }
-    tmp = ((N << 2) - 2);               /* index += (i & 3) << ((4*N)-2); */
-    index = vo_L_add(index, L_shl((L_deposit_l(i) & (3L)), tmp));
-
-    return (index);
-}
-
-
-
-Word32 quant_5p_5N(                        /* (o) return 5*N bits             */
-        Word16 pos[],                         /* (i) position of the pulse 1..5  */
-        Word16 N)                             /* (i) number of bits for position */
-{
-    Word16 nb_pos, n_1, tmp;
-    Word16 posA[5], posB[5];
-    Word32 i, j, k, index, tmp2;
-
-    n_1 = (Word16) (N - 1);
-    nb_pos = (1 << n_1);                  /* nb_pos = (1<<n_1); */
-
-    i = 0;
-    j = 0;
-    for (k = 0; k < 5; k++)
-    {
-        if ((pos[k] & nb_pos) == 0)
-        {
-            posA[i++] = pos[k];
-        } else
-        {
-            posB[j++] = pos[k];
-        }
-    }
-
-    switch (i)
-    {
-        case 0:
-            tmp = vo_sub((Word16)((vo_L_mult(5, N) >> 1)), 1);        /* ((5*N)-1)) */
-            index = L_shl(1L, tmp);   /* index = 1 << ((5*N)-1); */
-            tmp = add1((N << 1), 1);  /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1) << ((2*N)+1);*/
-            tmp2 = L_shl(quant_3p_3N1(posB[0], posB[1], posB[2], n_1), tmp);
-            index = vo_L_add(index, tmp2);
-            index = vo_L_add(index, quant_2p_2N1(posB[3], posB[4], N));        /* index += quant_2p_2N1(posB[3], posB[4], N); */
-            break;
-        case 1:
-            tmp = vo_sub((Word16)((vo_L_mult(5, N) >> 1)), 1);        /* index = 1 << ((5*N)-1); */
-            index = L_shl(1L, tmp);
-            tmp = add1((N << 1), 1);   /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1) <<((2*N)+1);  */
-            tmp2 = L_shl(quant_3p_3N1(posB[0], posB[1], posB[2], n_1), tmp);
-            index = vo_L_add(index, tmp2);
-            index = vo_L_add(index, quant_2p_2N1(posB[3], posA[0], N));        /* index += quant_2p_2N1(posB[3], posA[0], N); */
-            break;
-        case 2:
-            tmp = vo_sub((Word16)((vo_L_mult(5, N) >> 1)), 1);        /* ((5*N)-1)) */
-            index = L_shl(1L, tmp);            /* index = 1 << ((5*N)-1); */
-            tmp = add1((N << 1), 1);           /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1) << ((2*N)+1);  */
-            tmp2 = L_shl(quant_3p_3N1(posB[0], posB[1], posB[2], n_1), tmp);
-            index = vo_L_add(index, tmp2);
-            index = vo_L_add(index, quant_2p_2N1(posA[0], posA[1], N));        /* index += quant_2p_2N1(posA[0], posA[1], N); */
-            break;
-        case 3:
-            tmp = add1((N << 1), 1);           /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << ((2*N)+1);  */
-            index = L_shl(quant_3p_3N1(posA[0], posA[1], posA[2], n_1), tmp);
-            index = vo_L_add(index, quant_2p_2N1(posB[0], posB[1], N));        /* index += quant_2p_2N1(posB[0], posB[1], N); */
-            break;
-        case 4:
-            tmp = add1((N << 1), 1);           /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << ((2*N)+1);  */
-            index = L_shl(quant_3p_3N1(posA[0], posA[1], posA[2], n_1), tmp);
-            index = vo_L_add(index, quant_2p_2N1(posA[3], posB[0], N));        /* index += quant_2p_2N1(posA[3], posB[0], N); */
-            break;
-        case 5:
-            tmp = add1((N << 1), 1);           /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << ((2*N)+1);  */
-            index = L_shl(quant_3p_3N1(posA[0], posA[1], posA[2], n_1), tmp);
-            index = vo_L_add(index, quant_2p_2N1(posA[3], posA[4], N));        /* index += quant_2p_2N1(posA[3], posA[4], N); */
-            break;
-        default:
-            index = 0;
-            fprintf(stderr, "Error in function quant_5p_5N\n");
-    }
-
-    return (index);
-}
-
-
-Word32 quant_6p_6N_2(                      /* (o) return (6*N)-2 bits         */
-        Word16 pos[],                         /* (i) position of the pulse 1..6  */
-        Word16 N)                             /* (i) number of bits for position */
-{
-    Word16 nb_pos, n_1;
-    Word16 posA[6], posB[6];
-    Word32 i, j, k, index;
-
-    /* !!  N and n_1 are constants -> it doesn't need to be operated by Basic Operators */
-    n_1 = (Word16) (N - 1);
-    nb_pos = (1 << n_1);                  /* nb_pos = (1<<n_1); */
-
-    i = 0;
-    j = 0;
-    for (k = 0; k < 6; k++)
-    {
-        if ((pos[k] & nb_pos) == 0)
-        {
-            posA[i++] = pos[k];
-        } else
-        {
-            posB[j++] = pos[k];
-        }
-    }
-
-    switch (i)
-    {
-        case 0:
-            index = (1 << (Word16) (6 * N - 5));        /* index = 1 << ((6*N)-5); */
-            index = vo_L_add(index, (quant_5p_5N(posB, n_1) << N)); /* index += quant_5p_5N(posB, n_1) << N; */
-            index = vo_L_add(index, quant_1p_N1(posB[5], n_1));        /* index += quant_1p_N1(posB[5], n_1); */
-            break;
-        case 1:
-            index = (1L << (Word16) (6 * N - 5));        /* index = 1 << ((6*N)-5); */
-            index = vo_L_add(index, (quant_5p_5N(posB, n_1) << N)); /* index += quant_5p_5N(posB, n_1) << N; */
-            index = vo_L_add(index, quant_1p_N1(posA[0], n_1));        /* index += quant_1p_N1(posA[0], n_1); */
-            break;
-        case 2:
-            index = (1L << (Word16) (6 * N - 5));        /* index = 1 << ((6*N)-5); */
-            /* index += quant_4p_4N(posB, n_1) << ((2*n_1)+1); */
-            index = vo_L_add(index, (quant_4p_4N(posB, n_1) << (Word16) (2 * n_1 + 1)));
-            index = vo_L_add(index, quant_2p_2N1(posA[0], posA[1], n_1));      /* index += quant_2p_2N1(posA[0], posA[1], n_1); */
-            break;
-        case 3:
-            index = (quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << (Word16) (3 * n_1 + 1));
-                                              /* index = quant_3p_3N1(posA[0], posA[1], posA[2], n_1) << ((3*n_1)+1); */
-            index =vo_L_add(index, quant_3p_3N1(posB[0], posB[1], posB[2], n_1));
-                                             /* index += quant_3p_3N1(posB[0], posB[1], posB[2], n_1); */
-            break;
-        case 4:
-            i = 2;
-            index = (quant_4p_4N(posA, n_1) << (Word16) (2 * n_1 + 1));  /* index = quant_4p_4N(posA, n_1) << ((2*n_1)+1); */
-            index = vo_L_add(index, quant_2p_2N1(posB[0], posB[1], n_1));      /* index += quant_2p_2N1(posB[0], posB[1], n_1); */
-            break;
-        case 5:
-            i = 1;
-            index = (quant_5p_5N(posA, n_1) << N);       /* index = quant_5p_5N(posA, n_1) << N; */
-            index = vo_L_add(index, quant_1p_N1(posB[0], n_1));        /* index += quant_1p_N1(posB[0], n_1); */
-            break;
-        case 6:
-            i = 0;
-            index = (quant_5p_5N(posA, n_1) << N);       /* index = quant_5p_5N(posA, n_1) << N; */
-            index = vo_L_add(index, quant_1p_N1(posA[5], n_1));        /* index += quant_1p_N1(posA[5], n_1); */
-            break;
-        default:
-            index = 0;
-            fprintf(stderr, "Error in function quant_6p_6N_2\n");
-    }
-    index = vo_L_add(index, ((L_deposit_l(i) & 3L) << (Word16) (6 * N - 4)));   /* index += (i & 3) << ((6*N)-4); */
-
-    return (index);
-}
-
-
diff --git a/media/libstagefright/codecs/amrwbenc/test/Android.bp b/media/libstagefright/codecs/amrwbenc/test/Android.bp
deleted file mode 100644
index 7042bc5..0000000
--- a/media/libstagefright/codecs/amrwbenc/test/Android.bp
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-cc_test {
-    name: "AmrwbEncoderTest",
-    gtest: true,
-
-    srcs: [
-        "AmrwbEncoderTest.cpp",
-    ],
-
-    static_libs: [
-        "libstagefright_enc_common",
-        "libstagefright_amrwbenc",
-        "libaudioutils",
-        "libsndfile",
-    ],
-
-    shared_libs: [
-        "liblog",
-    ],
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-    ],
-
-    sanitize: {
-        cfi: true,
-        misc_undefined: [
-            "unsigned-integer-overflow",
-            "signed-integer-overflow",
-        ],
-    },
-}
diff --git a/media/libstagefright/codecs/avcdec/Android.bp b/media/libstagefright/codecs/avcdec/Android.bp
index 0bb6bb0..1c2f9be 100644
--- a/media/libstagefright/codecs/avcdec/Android.bp
+++ b/media/libstagefright/codecs/avcdec/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_avcdec",
     defaults: ["libstagefright_softomx-defaults"],
@@ -16,6 +25,9 @@
             "signed-integer-overflow",
         ],
         cfi: true,
+        config: {
+            cfi_assembly_support: true,
+        },
     },
 
     ldflags: ["-Wl,-Bsymbolic"],
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
index 5a4b2f8..3891f23 100644
--- a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
+++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
@@ -724,6 +724,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
         OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/avcenc/Android.bp b/media/libstagefright/codecs/avcenc/Android.bp
index 980261c..586088c 100644
--- a/media/libstagefright/codecs/avcenc/Android.bp
+++ b/media/libstagefright/codecs/avcenc/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_avcenc",
     defaults: ["libstagefright_softomx-defaults"],
@@ -10,6 +19,9 @@
             "signed-integer-overflow",
         ],
         cfi: true,
+        config: {
+            cfi_assembly_support: true,
+        },
     },
 
     cflags: [
diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
index 9db6465..01174c9 100644
--- a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
+++ b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
@@ -1507,6 +1507,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/common/Android.bp b/media/libstagefright/codecs/common/Android.bp
index 260a60a..08691e7 100644
--- a/media/libstagefright/codecs/common/Android.bp
+++ b/media/libstagefright/codecs/common/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_codecs_common_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_codecs_common_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library {
     name: "libstagefright_enc_common",
     vendor_available: true,
@@ -14,4 +33,11 @@
     export_include_dirs: ["include"],
 
     cflags: ["-Werror"],
+
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/libstagefright/codecs/flac/dec/Android.bp b/media/libstagefright/codecs/flac/dec/Android.bp
index 4064751..d06e0b7 100644
--- a/media/libstagefright/codecs/flac/dec/Android.bp
+++ b/media/libstagefright/codecs/flac/dec/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_codecs_flac_dec_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_codecs_flac_dec_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_flacdec",
     defaults: ["libstagefright_softomx-defaults"],
diff --git a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp b/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp
index 842a7ce..d6448d3 100644
--- a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp
+++ b/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp
@@ -491,6 +491,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/flac/enc/Android.bp b/media/libstagefright/codecs/flac/enc/Android.bp
index f35bce1..59a4675 100644
--- a/media/libstagefright/codecs/flac/enc/Android.bp
+++ b/media/libstagefright/codecs/flac/enc/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_codecs_flac_enc_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_codecs_flac_enc_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_flacenc",
     defaults: ["libstagefright_softomx-defaults"],
diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
index 078c8e3..24216a2 100644
--- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
+++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
@@ -592,6 +592,7 @@
 }  // namespace android
 
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/g711/dec/Android.bp b/media/libstagefright/codecs/g711/dec/Android.bp
index f5357f4..1dc34c3 100644
--- a/media/libstagefright/codecs/g711/dec/Android.bp
+++ b/media/libstagefright/codecs/g711/dec/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_codecs_g711_dec_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_codecs_g711_dec_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_g711dec",
     defaults: ["libstagefright_softomx-defaults"],
diff --git a/media/libstagefright/codecs/g711/dec/SoftG711.cpp b/media/libstagefright/codecs/g711/dec/SoftG711.cpp
index 877cb5a..fe91510 100644
--- a/media/libstagefright/codecs/g711/dec/SoftG711.cpp
+++ b/media/libstagefright/codecs/g711/dec/SoftG711.cpp
@@ -382,6 +382,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/gsm/dec/Android.bp b/media/libstagefright/codecs/gsm/dec/Android.bp
index 5672d89..efa2f83 100644
--- a/media/libstagefright/codecs/gsm/dec/Android.bp
+++ b/media/libstagefright/codecs/gsm/dec/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_codecs_gsm_dec_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_codecs_gsm_dec_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_gsmdec",
     defaults: ["libstagefright_softomx-defaults"],
diff --git a/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp b/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp
index d777229..330cb8a 100644
--- a/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp
+++ b/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp
@@ -354,6 +354,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/gsm/dec/SoftGSM.h b/media/libstagefright/codecs/gsm/dec/SoftGSM.h
index ef86915..d5885a6 100644
--- a/media/libstagefright/codecs/gsm/dec/SoftGSM.h
+++ b/media/libstagefright/codecs/gsm/dec/SoftGSM.h
@@ -20,9 +20,7 @@
 
 #include <media/stagefright/omx/SimpleSoftOMXComponent.h>
 
-extern "C" {
 #include "gsm.h"
-}
 
 namespace android {
 
diff --git a/media/libstagefright/codecs/hevcdec/Android.bp b/media/libstagefright/codecs/hevcdec/Android.bp
index ec436ce..2c4091b 100644
--- a/media/libstagefright/codecs/hevcdec/Android.bp
+++ b/media/libstagefright/codecs/hevcdec/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_hevcdec",
     defaults: ["libstagefright_softomx-defaults"],
@@ -17,6 +26,9 @@
             "signed-integer-overflow",
         ],
         cfi: true,
+        config: {
+            cfi_assembly_support: true,
+        },
     },
 
     // We need this because the current asm generates the following link error:
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
index f6ae1f4..176da47 100644
--- a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
@@ -713,6 +713,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(const char *name,
         const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
         OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/m4v_h263/dec/Android.bp b/media/libstagefright/codecs/m4v_h263/dec/Android.bp
index 4303565..725c79c 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/Android.bp
+++ b/media/libstagefright/codecs/m4v_h263/dec/Android.bp
@@ -1,80 +1,19 @@
-cc_library_static {
-    name: "libstagefright_m4vh263dec",
-    vendor_available: true,
-    apex_available: [
-        "//apex_available:platform",
-        "com.android.media.swcodec",
-    ],
-    min_sdk_version: "29",
-    shared_libs: ["liblog"],
 
-    srcs: [
-        "src/adaptive_smooth_no_mmx.cpp",
-        "src/bitstream.cpp",
-        "src/block_idct.cpp",
-        "src/cal_dc_scaler.cpp",
-        "src/chvr_filter.cpp",
-        "src/chv_filter.cpp",
-        "src/combined_decode.cpp",
-        "src/conceal.cpp",
-        "src/datapart_decode.cpp",
-        "src/dcac_prediction.cpp",
-        "src/dec_pred_intra_dc.cpp",
-        "src/deringing_chroma.cpp",
-        "src/deringing_luma.cpp",
-        "src/find_min_max.cpp",
-        "src/get_pred_adv_b_add.cpp",
-        "src/get_pred_outside.cpp",
-        "src/idct.cpp",
-        "src/idct_vca.cpp",
-        "src/mb_motion_comp.cpp",
-        "src/mb_utils.cpp",
-        "src/packet_util.cpp",
-        "src/post_filter.cpp",
-        "src/post_proc_semaphore.cpp",
-        "src/pp_semaphore_chroma_inter.cpp",
-        "src/pp_semaphore_luma.cpp",
-        "src/pvdec_api.cpp",
-        "src/scaling_tab.cpp",
-        "src/vlc_decode.cpp",
-        "src/vlc_dequant.cpp",
-        "src/vlc_tab.cpp",
-        "src/vop.cpp",
-        "src/zigzag_tab.cpp",
-    ],
-
-    header_libs: [
-        "media_plugin_headers",
-        "libstagefright_headers"
-    ],
-
-    local_include_dirs: ["src"],
-    export_include_dirs: ["include"],
-
-    cflags: [
-        "-Werror",
-    ],
-
-    version_script: "exports.lds",
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-        ],
-        cfi: true,
-    },
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
 }
 
-//###############################################################################
-
 cc_library_shared {
     name: "libstagefright_soft_mpeg4dec",
     defaults: ["libstagefright_softomx-defaults"],
 
     srcs: ["SoftMPEG4.cpp"],
 
-    local_include_dirs: ["src"],
-
     cflags: [
     ],
 
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
index 60750d9..a4b3e2f 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
@@ -412,6 +412,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/m4v_h263/dec/include/mp4dec_api.h b/media/libstagefright/codecs/m4v_h263/dec/include/mp4dec_api.h
deleted file mode 100644
index 1f404ce..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/include/mp4dec_api.h
+++ /dev/null
@@ -1,188 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#ifndef _MP4DEC_API_H_
-#define _MP4DEC_API_H_
-
-#include "m4vh263_decoder_pv_types.h"
-
-// #define PV_TOLERATE_VOL_ERRORS
-#define PV_MEMORY_POOL
-
-#ifndef _PV_TYPES_
-#define _PV_TYPES_
-
-typedef uint Bool;
-
-#define PV_CODEC_INIT  0
-#define PV_CODEC_STOP  1
-#endif
-
-#define PV_TRUE  1
-#define PV_FALSE 0
-
-#ifndef OSCL_IMPORT_REF
-#define OSCL_IMPORT_REF /* empty */
-#endif
-#ifndef OSCL_EXPORT_REF
-#define OSCL_EXPORT_REF /* empty */
-#endif
-
-/* flag for post-processing  4/25/00 */
-
-#ifdef DEC_NOPOSTPROC
-#undef PV_POSTPROC_ON   /* enable compilation of post-processing code */
-#else
-#define PV_POSTPROC_ON
-#endif
-
-#define PV_NO_POST_PROC 0
-#define PV_DEBLOCK 1
-#define PV_DERING  2
-
-
-
-#include "visual_header.h" // struct VolInfo is defined
-
-
-/**@name Structure and Data Types
- * These type definitions specify the input / output from the PVMessage
- * library.
- */
-
-/*@{*/
-/* The application has to allocate space for this structure */
-typedef struct tagOutputFrame
-{
-    uint8       *data;          /* pointer to output YUV buffer */
-    uint32      timeStamp;      /* time stamp */
-} OutputFrame;
-
-typedef struct tagApplicationData
-{
-    int layer;          /* current video layer */
-    void *object;       /* some optional data field */
-} applicationData;
-
-/* Application controls, this structed shall be allocated */
-/*    and initialized in the application.                 */
-typedef struct tagvideoDecControls
-{
-    /* The following fucntion pointer is copied to BitstreamDecVideo structure  */
-    /*    upon initialization and never used again. */
-    int (*readBitstreamData)(uint8 *buf, int nbytes_required, void *appData);
-    applicationData appData;
-
-    uint8 *outputFrame;
-    void *videoDecoderData;     /* this is an internal pointer that is only used */
-    /* in the decoder library.   */
-#ifdef PV_MEMORY_POOL
-    int32 size;
-#endif
-    int nLayers;
-    /* pointers to VOL data for frame-based decoding. */
-    uint8 *volbuf[2];           /* maximum of 2 layers for now */
-    int32 volbuf_size[2];
-
-} VideoDecControls;
-
-typedef enum
-{
-    H263_MODE = 0, MPEG4_MODE, UNKNOWN_MODE
-} MP4DecodingMode;
-
-typedef enum
-{
-    MP4_I_FRAME, MP4_P_FRAME, MP4_B_FRAME, MP4_BAD_FRAME
-} MP4FrameType;
-
-typedef struct tagVopHeaderInfo
-{
-    int     currLayer;
-    uint32  timestamp;
-    MP4FrameType    frameType;
-    int     refSelCode;
-    int16       quantizer;
-} VopHeaderInfo;
-
-/*--------------------------------------------------------------------------*
- * VideoRefCopyInfo:
- * OMAP DSP specific typedef structure, to support the user (ARM) copying
- * of a Reference Frame into the Video Decoder.
- *--------------------------------------------------------------------------*/
-typedef struct tagVideoRefCopyInfoPtr
-{
-    uint8   *yChan;             /* The Y component frame the user can copy a new reference to */
-    uint8   *uChan;             /* The U component frame the user can copy a new reference to */
-    uint8   *vChan;             /* The V component frame the user can copy a new reference to */
-    uint8   *currentVop;        /* The Vop for video the user can copy a new reference to */
-} VideoRefCopyInfoPtr;
-
-typedef struct tagVideoRefCopyInfoData
-{
-    int16   width;              /* Width */
-    int16   height;             /* Height */
-    int16   realWidth;          /* Non-padded width, not a multiple of 16. */
-    int16   realHeight;         /* Non-padded height, not a multiple of 16. */
-} VideoRefCopyInfoData;
-
-typedef struct tagVideoRefCopyInfo
-{
-    VideoRefCopyInfoData data;
-    VideoRefCopyInfoPtr ptrs;
-} VideoRefCopyInfo;
-
-/*@}*/
-
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-
-    OSCL_IMPORT_REF Bool    PVInitVideoDecoder(VideoDecControls *decCtrl, uint8 *volbuf[], int32 *volbuf_size, int nLayers, int width, int height, MP4DecodingMode mode);
-    Bool    PVAllocVideoData(VideoDecControls *decCtrl, int width, int height, int nLayers);
-    OSCL_IMPORT_REF Bool    PVCleanUpVideoDecoder(VideoDecControls *decCtrl);
-    Bool    PVResetVideoDecoder(VideoDecControls *decCtrl);
-    OSCL_IMPORT_REF void    PVSetReferenceYUV(VideoDecControls *decCtrl, uint8 *refYUV);
-    Bool    PVDecSetReference(VideoDecControls *decCtrl, uint8 *refYUV, uint32 timestamp);
-    Bool    PVDecSetEnhReference(VideoDecControls *decCtrl, uint8 *refYUV, uint32 timestamp);
-    OSCL_IMPORT_REF Bool    PVDecodeVideoFrame(VideoDecControls *decCtrl, uint8 *bitstream[], uint32 *timestamp, int32 *buffer_size, uint use_ext_timestamp[], uint8* currYUV);
-    Bool    PVDecodeVopHeader(VideoDecControls *decCtrl, uint8 *buffer[], uint32 timestamp[], int32 buffer_size[], VopHeaderInfo *header_info, uint use_ext_timestamp[], uint8 *currYUV);
-    Bool    PVDecodeVopBody(VideoDecControls *decCtrl, int32 buffer_size[]);
-    void    PVDecPostProcess(VideoDecControls *decCtrl, uint8 *outputYUV);
-    OSCL_IMPORT_REF void    PVGetVideoDimensions(VideoDecControls *decCtrl, int32 *display_width, int32 *display_height);
-    OSCL_IMPORT_REF void    PVGetBufferDimensions(VideoDecControls *decCtrl, int32 *buf_width, int32 *buf_height);
-    OSCL_IMPORT_REF void    PVSetPostProcType(VideoDecControls *decCtrl, int mode);
-    uint32  PVGetVideoTimeStamp(VideoDecControls *decoderControl);
-    int     PVGetDecBitrate(VideoDecControls *decCtrl);
-    int     PVGetDecFramerate(VideoDecControls *decCtrl);
-    uint8   *PVGetDecOutputFrame(VideoDecControls *decCtrl);
-    int     PVGetLayerID(VideoDecControls *decCtrl);
-    int32   PVGetDecMemoryUsage(VideoDecControls *decCtrl);
-    OSCL_IMPORT_REF MP4DecodingMode PVGetDecBitstreamMode(VideoDecControls *decCtrl);
-    Bool    PVExtractVolHeader(uint8 *video_buffer, uint8 *vol_header, int32 *vol_header_size);
-    int32   PVLocateFrameHeader(uint8 *video_buffer, int32 vop_size);
-    int32   PVLocateH263FrameHeader(uint8 *video_buffer, int32 vop_size);
-    Bool    PVGetVolInfo(VideoDecControls *decCtrl, VolInfo *pVolInfo); // BX 6/24/04
-    Bool    IsIntraFrame(VideoDecControls *decoderControl);
-
-#ifdef __cplusplus
-}
-#endif
-#endif /* _MP4DEC_API_H_ */
-
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/adaptive_smooth_no_mmx.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/adaptive_smooth_no_mmx.cpp
deleted file mode 100644
index e2761eb..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/adaptive_smooth_no_mmx.cpp
+++ /dev/null
@@ -1,421 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-
- Description: Separated modules into one function per file and put into
-    new template.
-
- Description: Optimizing C code and adding comments.  Also changing variable
-    names to make them more meaningful.
-
- Who:                   Date:
- Description:
-
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-
-    Rec_Y = pointer to 0th position in buffer containing luminance values
-        of type uint8.
-    y_start = value of y coordinate of type int that specifies the first
-        row of pixels to be used in the filter algorithm.
-    x_start = value of x coordinate of type int that specifies the first
-        column of pixels to be used in the filter algorithm.
-    y_blk_start = value of the y coordinate of type int that specifies the
-        row of pixels which contains the start of a block. The row
-        specified by y_blk_start+BLK_SIZE is the last row of pixels
-        that are used in the filter algorithm.
-    x_blk_start = value of the x coordinate of type int that specifies the
-        column of pixels which contains the start of a block.  The
-        column specified by x_blk_start+BLK_SIZE is the last column of
-        pixels that are used in the filter algorithm.
-    thr = value of type int that is compared to the elements in Rec_Y to
-        determine if a particular value in Rec_Y will be modified by
-        the filter or not
-    width = value of type int that specifies the width of the display
-        in pixels (or pels, equivalently).
-    max_diff = value of type int that specifies the value that may be added
-        or subtracted from the pixel in Rec_Y that is being filtered
-        if the filter algorithm decides to change that particular
-        pixel's luminance value.
-
-
- Local Stores/Buffers/Pointers Needed:
-    None
-
- Global Stores/Buffers/Pointers Needed:
-    None
-
- Outputs:
-    None
-
- Pointers and Buffers Modified:
-    Buffer pointed to by Rec_Y is modified with the filtered
-    luminance values.
-
- Local Stores Modified:
-    None
-
- Global Stores Modified:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- This function implements a motion compensated noise filter using adaptive
- weighted averaging of luminance values.  *Rec_Y contains the luminance values
- that are being filtered.
-
- The picture below depicts a 3x3 group of pixel luminance values.  The "u", "c",
- and "l" stand for "upper", "center" and "lower", respectively.  The location
- of pelc0 is specified by x_start and y_start in the 1-D array "Rec_Y" as
- follows (assuming x_start=0):
-
- location of pelc0 = [(y_start+1) * width] + x_start
-
- Moving up or down 1 row (moving from pelu2 to pelc2, for example) is done by
- incrementing or decrementing "width" elements within Rec_Y.
-
- The coordinates of the upper left hand corner of a block (not the group of
- 9 pixels depicted in the figure below) is specified by
- (y_blk_start, x_blk_start).  The width and height of the block is BLKSIZE.
- (y_start,x_start) may be specified independently of (y_blk_start, x_blk_start).
-
-    (y_start,x_start)
- -----------|--------------------------
-    |   |   |   |   |
-    |   X   | pelu1 | pelu2 |
-    | pelu0 |   |   |
-    |   |   |   |
- --------------------------------------
-    |   |   |   |
-    | pelc0 | pelc1 | pelc2 |
-    |   |   |   |
-    |   |   |   |
- --------------------------------------
-    |   |   |   |
-    | pell0 | pell1 | pell2 |
-    |   |   |   |
-    |   |   |   |
- --------------------------------------
-
- The filtering of the luminance values is achieved by comparing the 9
- luminance values to a threshold value ("thr") and then changing the
- luminance value of pelc1 if all of the values are above or all of the values
- are below the threshold.  The amount that the luminance value is changed
- depends on a weighted sum of the 9 luminance values. The position of Pelc1
- is then advanced to the right by one (as well as all of the surrounding pixels)
- and the same calculation is performed again for the luminance value of the new
- Pelc1. This continues row-wise until pixels in the last row of the block are
- filtered.
-
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
- None.
-
-------------------------------------------------------------------------------
- REFERENCES
-
- ..\corelibs\decoder\common\src\post_proc.c
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-------------------------------------------------------------------------------
- RESOURCES USED
-   When the code is written for a specific target processor the
-     the resources used should be documented below.
-
- STACK USAGE: [stack count for this module] + [variable to represent
-          stack usage for each subroutine called]
-
-     where: [stack usage variable] = stack usage for [subroutine
-         name] (see [filename].ext)
-
- DATA MEMORY USED: x words
-
- PROGRAM MEMORY USED: x words
-
- CLOCK CYCLES: [cycle count equation for this module] + [variable
-           used to represent cycle count for each subroutine
-           called]
-
-     where: [cycle count variable] = cycle count for [subroutine
-        name] (see [filename].ext)
-
-------------------------------------------------------------------------------
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "mp4dec_lib.h"
-#include    "post_proc.h"
-#include    "mp4def.h"
-
-#define OSCL_DISABLE_WARNING_CONV_POSSIBLE_LOSS_OF_DATA
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-#ifdef PV_POSTPROC_ON
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-void AdaptiveSmooth_NoMMX(
-    uint8 *Rec_Y,       /* i/o  */
-    int y_start,        /* i    */
-    int x_start,        /* i    */
-    int y_blk_start,    /* i    */
-    int x_blk_start,    /* i    */
-    int thr,        /* i    */
-    int width,      /* i    */
-    int max_diff        /* i    */
-)
-{
-
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int  sign_v[15];
-    int sum_v[15];
-    int *sum_V_ptr;
-    int *sign_V_ptr;
-    uint8 pelu;
-    uint8 pelc;
-    uint8 pell;
-    uint8 *pelp;
-    uint8 oldrow[15];
-    int  sum;
-    int sum1;
-    uint8 *Rec_Y_ptr;
-    int32  addr_v;
-    int row_cntr;
-    int col_cntr;
-
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    /*  first row
-    */
-    addr_v = (int32)(y_start + 1) * width;  /* y coord of 1st element in the row  /
-                     /containing pelc pixel /     */
-    Rec_Y_ptr = &Rec_Y[addr_v + x_start];  /* initializing pointer to
-                           /  pelc0 position  */
-    sum_V_ptr = &sum_v[0];  /* initializing pointer to 0th element of array
-                /   that will contain weighted sums of pixel
-                /   luminance values */
-    sign_V_ptr = &sign_v[0];  /*  initializing pointer to 0th element of
-                  /   array that will contain sums that indicate
-                  /    how many of the 9 pixels are above or below
-                  /    the threshold value (thr)    */
-    pelp = &oldrow[0];  /* initializing pointer to the 0th element of array
-                /    that will contain current values of pelc that
-                /   are saved and used as values of pelu when the
-                /   next row of pixels are filtered */
-
-    pelu = *(Rec_Y_ptr - width);  /* assigning value of pelu0 to pelu  */
-    *pelp++ = pelc = *Rec_Y_ptr; /* assigning value of pelc0 to pelc and
-                     /  storing this value in pelp which
-                     /   will be used as value of pelu0 when
-                     /  next row is filtered */
-    pell = *(Rec_Y_ptr + width);  /* assigning value of pell0 to pell */
-    Rec_Y_ptr++; /* advancing pointer from pelc0 to pelc1 */
-    *sum_V_ptr++ = pelu + (pelc << 1) + pell;  /* weighted sum of pelu0,
-                         /  pelc0 and pell0  */
-    /* sum of 0's and 1's (0 if pixel value is below thr, 1 if value
-    /is above thr)  */
-    *sign_V_ptr++ = INDEX(pelu, thr) + INDEX(pelc, thr) + INDEX(pell, thr);
-
-
-    pelu = *(Rec_Y_ptr - width);  /* assigning value of pelu1 to pelu */
-    *pelp++ = pelc = *Rec_Y_ptr; /* assigning value of pelc1 to pelc and
-                     /  storing this value in pelp which
-                     /  will be used as the value of pelu1 when
-                     /  next row is filtered */
-    pell = *(Rec_Y_ptr + width);  /* assigning value of pell1 to pell */
-    Rec_Y_ptr++;  /* advancing pointer from pelc1 to pelc2 */
-    *sum_V_ptr++ = pelu + (pelc << 1) + pell; /* weighted sum of pelu1,
-                        / pelc1 and pell1  */
-    /* sum of 0's and 1's (0 if pixel value is below thr, 1 if value
-    /is above thr)  */
-    *sign_V_ptr++ = INDEX(pelu, thr) + INDEX(pelc, thr) + INDEX(pell, thr);
-
-    /* The loop below performs the filtering for the first row of
-    /   pixels in the region.  It steps across the remaining pixels in
-    /   the row and alters the luminance value of pelc1 if necessary,
-    /   depending on the luminance values of the adjacent pixels*/
-
-    for (col_cntr = (x_blk_start + BLKSIZE - 1) - x_start; col_cntr > 0; col_cntr--)
-    {
-        pelu = *(Rec_Y_ptr - width);  /* assigning value of pelu2 to
-                        /   pelu */
-        *pelp++ = pelc = *Rec_Y_ptr; /* assigning value of pelc2 to pelc
-                         / and storing this value in pelp
-                         / which will be used   as value of pelu2
-                         / when next row is filtered */
-        pell = *(Rec_Y_ptr + width); /* assigning value of pell2 to pell */
-
-        /* weighted sum of pelu1, pelc1 and pell1  */
-        *sum_V_ptr = pelu + (pelc << 1) + pell;
-        /* sum of 0's and 1's (0 if pixel value is below thr,
-        /1 if value is above thr)  */
-        *sign_V_ptr = INDEX(pelu, thr) + INDEX(pelc, thr) +
-                      INDEX(pell, thr);
-        /* the value of sum1 indicates how many of the 9 pixels'
-        /luminance values are above or equal to thr */
-        sum1 = *(sign_V_ptr - 2) + *(sign_V_ptr - 1) + *sign_V_ptr;
-
-        /* alter the luminance value of pelc1 if all 9 luminance values
-        /are above or equal to thr or if all 9 values are below thr */
-        if (sum1 == 0 || sum1 == 9)
-        {
-            /* sum is a weighted average of the 9 pixel luminance
-            /values   */
-            sum = (*(sum_V_ptr - 2) + (*(sum_V_ptr - 1) << 1) +
-                   *sum_V_ptr + 8) >> 4;
-
-            Rec_Y_ptr--;  /* move pointer back to pelc1  */
-            /* If luminance value of pelc1 is larger than
-            / sum by more than max_diff, then subract max_diff
-            / from luminance value of pelc1*/
-            if ((int)(*Rec_Y_ptr - sum) > max_diff)
-            {
-                sum = *Rec_Y_ptr - max_diff;
-            }
-            /* If luminance value of pelc1 is smaller than
-            / sum by more than max_diff, then add max_diff
-            / to luminance value of pelc1*/
-            else if ((int)(*Rec_Y_ptr - sum) < -max_diff)
-            {
-                sum = *Rec_Y_ptr + max_diff;
-            }
-            *Rec_Y_ptr++ = sum; /* assign value of sum to pelc1
-                         and advance pointer to pelc2 */
-        }
-        Rec_Y_ptr++; /* advance pointer to new value of pelc2
-                 /   old pelc2 is now treated as pelc1*/
-        sum_V_ptr++; /* pointer is advanced so next weighted sum may
-                 /  be saved */
-        sign_V_ptr++; /* pointer is advanced so next sum of 0's and
-                  / 1's may be saved  */
-    }
-
-    /* The nested loops below perform the filtering for the remaining rows */
-
-    addr_v = (y_start + 2) * width;  /* advance addr_v to the next row
-                     /   (corresponding to pell0)*/
-    /* The outer loop steps throught the rows.   */
-    for (row_cntr = (y_blk_start + BLKSIZE) - (y_start + 2); row_cntr > 0; row_cntr--)
-    {
-        Rec_Y_ptr = &Rec_Y[addr_v + x_start]; /* advance pointer to
-            /the old pell0, which has become the new pelc0 */
-        addr_v += width;  /* move addr_v down 1 row */
-        sum_V_ptr = &sum_v[0];  /* re-initializing pointer */
-        sign_V_ptr = &sign_v[0];  /* re-initilaizing pointer */
-        pelp = &oldrow[0]; /* re-initializing pointer */
-
-        pelu = *pelp; /* setting pelu0 to old value of pelc0 */
-        *pelp++ = pelc = *Rec_Y_ptr;
-        pell = *(Rec_Y_ptr + width);
-        Rec_Y_ptr++;
-        *sum_V_ptr++ = pelu + (pelc << 1) + pell;
-        *sign_V_ptr++ = INDEX(pelu, thr) + INDEX(pelc, thr) +
-                        INDEX(pell, thr);
-
-        pelu = *pelp; /* setting pelu1 to old value of pelc1 */
-        *pelp++ = pelc = *Rec_Y_ptr;
-        pell = *(Rec_Y_ptr + width);
-        Rec_Y_ptr++;
-        *sum_V_ptr++ = pelu + (pelc << 1) + pell;
-        *sign_V_ptr++ = INDEX(pelu, thr) + INDEX(pelc, thr) +
-                        INDEX(pell, thr);
-        /* The inner loop steps through the columns */
-        for (col_cntr = (x_blk_start + BLKSIZE - 1) - x_start; col_cntr > 0; col_cntr--)
-        {
-            pelu = *pelp; /* setting pelu2 to old value of pelc2 */
-            *pelp++ = pelc = *Rec_Y_ptr;
-            pell = *(Rec_Y_ptr + width);
-
-            *sum_V_ptr = pelu + (pelc << 1) + pell;
-            *sign_V_ptr = INDEX(pelu, thr) + INDEX(pelc, thr) +
-                          INDEX(pell, thr);
-
-            sum1 = *(sign_V_ptr - 2) + *(sign_V_ptr - 1) + *sign_V_ptr;
-            /* the "if" statement below is the same as the one in
-            / the first loop */
-            if (sum1 == 0 || sum1 == 9)
-            {
-                sum = (*(sum_V_ptr - 2) + (*(sum_V_ptr - 1) << 1) +
-                       *sum_V_ptr + 8) >> 4;
-
-                Rec_Y_ptr--;
-                if ((int)(*Rec_Y_ptr - sum) > max_diff)
-                {
-                    sum = *Rec_Y_ptr - max_diff;
-                }
-                else if ((int)(*Rec_Y_ptr - sum) < -max_diff)
-                {
-                    sum = *Rec_Y_ptr + max_diff;
-                }
-                *Rec_Y_ptr++ = (uint8) sum;
-            }
-            Rec_Y_ptr++;
-            sum_V_ptr++;
-            sign_V_ptr++;
-        }
-    }
-
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return;
-}
-#endif
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/block_idct.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/block_idct.cpp
deleted file mode 100644
index 3d10086..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/block_idct.cpp
+++ /dev/null
@@ -1,912 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    [input_variable_name] = [description of the input to module, its type
-                 definition, and length (when applicable)]
-
- Local Stores/Buffers/Pointers Needed:
-    [local_store_name] = [description of the local store, its type
-                  definition, and length (when applicable)]
-    [local_buffer_name] = [description of the local buffer, its type
-                   definition, and length (when applicable)]
-    [local_ptr_name] = [description of the local pointer, its type
-                definition, and length (when applicable)]
-
- Global Stores/Buffers/Pointers Needed:
-    [global_store_name] = [description of the global store, its type
-                   definition, and length (when applicable)]
-    [global_buffer_name] = [description of the global buffer, its type
-                definition, and length (when applicable)]
-    [global_ptr_name] = [description of the global pointer, its type
-                 definition, and length (when applicable)]
-
- Outputs:
-    [return_variable_name] = [description of data/pointer returned
-                  by module, its type definition, and length
-                  (when applicable)]
-
- Pointers and Buffers Modified:
-    [variable_bfr_ptr] points to the [describe where the
-      variable_bfr_ptr points to, its type definition, and length
-      (when applicable)]
-    [variable_bfr] contents are [describe the new contents of
-      variable_bfr]
-
- Local Stores Modified:
-    [local_store_name] = [describe new contents, its type
-                  definition, and length (when applicable)]
-
- Global Stores Modified:
-    [global_store_name] = [describe new contents, its type
-                   definition, and length (when applicable)]
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
-------------------------------------------------------------------------------
- REFERENCES
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-------------------------------------------------------------------------------
- RESOURCES USED
-   When the code is written for a specific target processor the
-     the resources used should be documented below.
-
- STACK USAGE: [stack count for this module] + [variable to represent
-          stack usage for each subroutine called]
-
-     where: [stack usage variable] = stack usage for [subroutine
-         name] (see [filename].ext)
-
- DATA MEMORY USED: x words
-
- PROGRAM MEMORY USED: x words
-
- CLOCK CYCLES: [cycle count equation for this module] + [variable
-           used to represent cycle count for each subroutine
-           called]
-
-     where: [cycle count variable] = cycle count for [subroutine
-        name] (see [filename].ext)
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include "mp4dec_lib.h"
-#include "idct.h"
-#include "motion_comp.h"
-
-#define OSCL_DISABLE_WARNING_CONV_POSSIBLE_LOSS_OF_DATA
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-/* private prototypes */
-static void idctrow(int16 *blk, uint8 *pred, uint8 *dst, int width);
-static void idctrow_intra(int16 *blk, PIXEL *, int width);
-static void idctcol(int16 *blk);
-
-#ifdef FAST_IDCT
-// mapping from nz_coefs to functions to be used
-
-
-// ARM4 does not allow global data when they are not constant hence
-// an array of function pointers cannot be considered as array of constants
-// (actual addresses are only known when the dll is loaded).
-// So instead of arrays of function pointers, we'll store here
-// arrays of rows or columns and then call the idct function
-// corresponding to such the row/column number:
-
-
-static void (*const idctcolVCA[10][4])(int16*) =
-{
-    {&idctcol1, &idctcol0, &idctcol0, &idctcol0},
-    {&idctcol1, &idctcol1, &idctcol0, &idctcol0},
-    {&idctcol2, &idctcol1, &idctcol0, &idctcol0},
-    {&idctcol3, &idctcol1, &idctcol0, &idctcol0},
-    {&idctcol3, &idctcol2, &idctcol0, &idctcol0},
-    {&idctcol3, &idctcol2, &idctcol1, &idctcol0},
-    {&idctcol3, &idctcol2, &idctcol1, &idctcol1},
-    {&idctcol3, &idctcol2, &idctcol2, &idctcol1},
-    {&idctcol3, &idctcol3, &idctcol2, &idctcol1},
-    {&idctcol4, &idctcol3, &idctcol2, &idctcol1}
-};
-
-
-static void (*const idctrowVCA[10])(int16*, uint8*, uint8*, int) =
-{
-    &idctrow1,
-    &idctrow2,
-    &idctrow2,
-    &idctrow2,
-    &idctrow2,
-    &idctrow3,
-    &idctrow4,
-    &idctrow4,
-    &idctrow4,
-    &idctrow4
-};
-
-
-static void (*const idctcolVCA2[16])(int16*) =
-{
-    &idctcol0, &idctcol4, &idctcol3, &idctcol4,
-    &idctcol2, &idctcol4, &idctcol3, &idctcol4,
-    &idctcol1, &idctcol4, &idctcol3, &idctcol4,
-    &idctcol2, &idctcol4, &idctcol3, &idctcol4
-};
-
-static void (*const idctrowVCA2[8])(int16*, uint8*, uint8*, int) =
-{
-    &idctrow1, &idctrow4, &idctrow3, &idctrow4,
-    &idctrow2, &idctrow4, &idctrow3, &idctrow4
-};
-
-static void (*const idctrowVCA_intra[10])(int16*, PIXEL *, int) =
-{
-    &idctrow1_intra,
-    &idctrow2_intra,
-    &idctrow2_intra,
-    &idctrow2_intra,
-    &idctrow2_intra,
-    &idctrow3_intra,
-    &idctrow4_intra,
-    &idctrow4_intra,
-    &idctrow4_intra,
-    &idctrow4_intra
-};
-
-static void (*const idctrowVCA2_intra[8])(int16*, PIXEL *, int) =
-{
-    &idctrow1_intra, &idctrow4_intra, &idctrow3_intra, &idctrow4_intra,
-    &idctrow2_intra, &idctrow4_intra, &idctrow3_intra, &idctrow4_intra
-};
-#endif
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-void MBlockIDCT(VideoDecData *video)
-{
-    Vop *currVop = video->currVop;
-    MacroBlock *mblock = video->mblock;
-    PIXEL *c_comp;
-    PIXEL *cu_comp;
-    PIXEL *cv_comp;
-    int x_pos = video->mbnum_col;
-    int y_pos = video->mbnum_row;
-    int width, width_uv;
-    int32 offset;
-    width = video->width;
-    width_uv = width >> 1;
-    offset = (int32)(y_pos << 4) * width + (x_pos << 4);
-
-    c_comp  = currVop->yChan + offset;
-    cu_comp = currVop->uChan + (offset >> 2) + (x_pos << 2);
-    cv_comp = currVop->vChan + (offset >> 2) + (x_pos << 2);
-
-    BlockIDCT_intra(mblock, c_comp, 0, width);
-    BlockIDCT_intra(mblock, c_comp + 8, 1, width);
-    BlockIDCT_intra(mblock, c_comp + (width << 3), 2, width);
-    BlockIDCT_intra(mblock, c_comp + (width << 3) + 8, 3, width);
-    BlockIDCT_intra(mblock, cu_comp, 4, width_uv);
-    BlockIDCT_intra(mblock, cv_comp, 5, width_uv);
-}
-
-
-void BlockIDCT_intra(
-    MacroBlock *mblock, PIXEL *c_comp, int comp, int width)
-{
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int16 *coeff_in = mblock->block[comp];
-#ifdef INTEGER_IDCT
-#ifdef FAST_IDCT  /* VCA IDCT using nzcoefs and bitmaps*/
-    int i, bmapr;
-    int nz_coefs = mblock->no_coeff[comp];
-    uint8 *bitmapcol = mblock->bitmapcol[comp];
-    uint8 bitmaprow = mblock->bitmaprow[comp];
-
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    if (nz_coefs <= 10)
-    {
-        bmapr = (nz_coefs - 1);
-
-        (*(idctcolVCA[bmapr]))(coeff_in);
-        (*(idctcolVCA[bmapr][1]))(coeff_in + 1);
-        (*(idctcolVCA[bmapr][2]))(coeff_in + 2);
-        (*(idctcolVCA[bmapr][3]))(coeff_in + 3);
-
-        (*idctrowVCA_intra[nz_coefs-1])(coeff_in, c_comp, width);
-    }
-    else
-    {
-        i = 8;
-        while (i--)
-        {
-            bmapr = (int)bitmapcol[i];
-            if (bmapr)
-            {
-                if ((bmapr&0xf) == 0)         /*  07/18/01 */
-                {
-                    (*(idctcolVCA2[bmapr>>4]))(coeff_in + i);
-                }
-                else
-                {
-                    idctcol(coeff_in + i);
-                }
-            }
-        }
-        if ((bitmapcol[4] | bitmapcol[5] | bitmapcol[6] | bitmapcol[7]) == 0)
-        {
-            bitmaprow >>= 4;
-            (*(idctrowVCA2_intra[(int)bitmaprow]))(coeff_in, c_comp, width);
-        }
-        else
-        {
-            idctrow_intra(coeff_in, c_comp, width);
-        }
-    }
-#else
-    void idct_intra(int *block, uint8 *comp, int width);
-    idct_intra(coeff_in, c_comp, width);
-#endif
-#else
-    void idctref_intra(int *block, uint8 *comp, int width);
-    idctref_intra(coeff_in, c_comp, width);
-#endif
-
-
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return;
-}
-
-/*  08/04/05, no residue, just copy from pred to output */
-void Copy_Blk_to_Vop(uint8 *dst, uint8 *pred, int width)
-{
-    /* copy 4 bytes at a time */
-    width -= 4;
-    *((uint32*)dst) = *((uint32*)pred);
-    *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
-    *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
-    *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
-    *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
-    *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
-    *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
-    *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
-    *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
-    *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
-    *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
-    *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
-    *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
-    *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
-    *((uint32*)(dst += width)) = *((uint32*)(pred += 12));
-    *((uint32*)(dst += 4)) = *((uint32*)(pred += 4));
-
-    return ;
-}
-
-/*  08/04/05 compute IDCT and add prediction at the end  */
-void BlockIDCT(
-    uint8 *dst,  /* destination */
-    uint8 *pred, /* prediction block, pitch 16 */
-    int16   *coeff_in,  /* DCT data, size 64 */
-    int width, /* width of dst */
-    int nz_coefs,
-    uint8 *bitmapcol,
-    uint8 bitmaprow
-)
-{
-#ifdef INTEGER_IDCT
-#ifdef FAST_IDCT  /* VCA IDCT using nzcoefs and bitmaps*/
-    int i, bmapr;
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    if (nz_coefs <= 10)
-    {
-        bmapr = (nz_coefs - 1);
-        (*(idctcolVCA[bmapr]))(coeff_in);
-        (*(idctcolVCA[bmapr][1]))(coeff_in + 1);
-        (*(idctcolVCA[bmapr][2]))(coeff_in + 2);
-        (*(idctcolVCA[bmapr][3]))(coeff_in + 3);
-
-        (*idctrowVCA[nz_coefs-1])(coeff_in, pred, dst, width);
-        return ;
-    }
-    else
-    {
-        i = 8;
-
-        while (i--)
-        {
-            bmapr = (int)bitmapcol[i];
-            if (bmapr)
-            {
-                if ((bmapr&0xf) == 0)         /*  07/18/01 */
-                {
-                    (*(idctcolVCA2[bmapr>>4]))(coeff_in + i);
-                }
-                else
-                {
-                    idctcol(coeff_in + i);
-                }
-            }
-        }
-        if ((bitmapcol[4] | bitmapcol[5] | bitmapcol[6] | bitmapcol[7]) == 0)
-        {
-            (*(idctrowVCA2[bitmaprow>>4]))(coeff_in, pred, dst, width);
-        }
-        else
-        {
-            idctrow(coeff_in, pred, dst, width);
-        }
-        return ;
-    }
-#else // FAST_IDCT
-    void idct(int *block, uint8 *pred, uint8 *dst, int width);
-    idct(coeff_in, pred, dst, width);
-    return;
-#endif // FAST_IDCT
-#else // INTEGER_IDCT
-    void idctref(int *block, uint8 *pred, uint8 *dst, int width);
-    idctref(coeff_in, pred, dst, width);
-    return;
-#endif // INTEGER_IDCT
-
-}
-/*----------------------------------------------------------------------------
-;  End Function: block_idct
-----------------------------------------------------------------------------*/
-
-
-/****************************************************************************/
-
-/*
-------------------------------------------------------------------------------
- FUNCTION NAME: idctrow
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS FOR idctrow
-
- Inputs:
-    [input_variable_name] = [description of the input to module, its type
-                 definition, and length (when applicable)]
-
- Local Stores/Buffers/Pointers Needed:
-    [local_store_name] = [description of the local store, its type
-                  definition, and length (when applicable)]
-    [local_buffer_name] = [description of the local buffer, its type
-                   definition, and length (when applicable)]
-    [local_ptr_name] = [description of the local pointer, its type
-                definition, and length (when applicable)]
-
- Global Stores/Buffers/Pointers Needed:
-    [global_store_name] = [description of the global store, its type
-                   definition, and length (when applicable)]
-    [global_buffer_name] = [description of the global buffer, its type
-                definition, and length (when applicable)]
-    [global_ptr_name] = [description of the global pointer, its type
-                 definition, and length (when applicable)]
-
- Outputs:
-    [return_variable_name] = [description of data/pointer returned
-                  by module, its type definition, and length
-                  (when applicable)]
-
- Pointers and Buffers Modified:
-    [variable_bfr_ptr] points to the [describe where the
-      variable_bfr_ptr points to, its type definition, and length
-      (when applicable)]
-    [variable_bfr] contents are [describe the new contents of
-      variable_bfr]
-
- Local Stores Modified:
-    [local_store_name] = [describe new contents, its type
-                  definition, and length (when applicable)]
-
- Global Stores Modified:
-    [global_store_name] = [describe new contents, its type
-                   definition, and length (when applicable)]
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION FOR idctrow
-
-------------------------------------------------------------------------------
- REQUIREMENTS FOR idctrow
-
-------------------------------------------------------------------------------
- REFERENCES FOR idctrow
-
-------------------------------------------------------------------------------
- PSEUDO-CODE FOR idctrow
-
-------------------------------------------------------------------------------
- RESOURCES USED FOR idctrow
-   When the code is written for a specific target processor the
-     the resources used should be documented below.
-
- STACK USAGE: [stack count for this module] + [variable to represent
-          stack usage for each subroutine called]
-
-     where: [stack usage variable] = stack usage for [subroutine
-         name] (see [filename].ext)
-
- DATA MEMORY USED: x words
-
- PROGRAM MEMORY USED: x words
-
- CLOCK CYCLES: [cycle count equation for this module] + [variable
-           used to represent cycle count for each subroutine
-           called]
-
-     where: [cycle count variable] = cycle count for [subroutine
-        name] (see [filename].ext)
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; Function Code FOR idctrow
-----------------------------------------------------------------------------*/
-void idctrow(
-    int16 *blk, uint8 *pred, uint8 *dst, int width
-)
-{
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
-    int i = 8;
-    uint32 pred_word, dst_word;
-    int res, res2;
-
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    /* row (horizontal) IDCT
-    *
-    * 7                       pi         1 dst[k] = sum c[l] * src[l] * cos( -- *
-    * ( k + - ) * l ) l=0                      8          2
-    *
-    * where: c[0]    = 128 c[1..7] = 128*sqrt(2) */
-
-    /* preset the offset, such that we can take advantage pre-offset addressing mode   */
-    width -= 4;
-    dst -= width;
-    pred -= 12;
-    blk -= 8;
-
-    while (i--)
-    {
-        x1 = (int32)blk[12] << 8;
-        blk[12] = 0;
-        x2 = blk[14];
-        blk[14] = 0;
-        x3 = blk[10];
-        blk[10] = 0;
-        x4 = blk[9];
-        blk[9] = 0;
-        x5 = blk[15];
-        blk[15] = 0;
-        x6 = blk[13];
-        blk[13] = 0;
-        x7 = blk[11];
-        blk[11] = 0;
-        x0 = ((*(blk += 8)) << 8) + 8192;
-        blk[0] = 0;   /* for proper rounding in the fourth stage */
-
-        /* first stage */
-        x8 = W7 * (x4 + x5) + 4;
-        x4 = (x8 + (W1 - W7) * x4) >> 3;
-        x5 = (x8 - (W1 + W7) * x5) >> 3;
-        x8 = W3 * (x6 + x7) + 4;
-        x6 = (x8 - (W3 - W5) * x6) >> 3;
-        x7 = (x8 - (W3 + W5) * x7) >> 3;
-
-        /* second stage */
-        x8 = x0 + x1;
-        x0 -= x1;
-        x1 = W6 * (x3 + x2) + 4;
-        x2 = (x1 - (W2 + W6) * x2) >> 3;
-        x3 = (x1 + (W2 - W6) * x3) >> 3;
-        x1 = x4 + x6;
-        x4 -= x6;
-        x6 = x5 + x7;
-        x5 -= x7;
-
-        /* third stage */
-        x7 = x8 + x3;
-        x8 -= x3;
-        x3 = x0 + x2;
-        x0 -= x2;
-        x2 = (181 * (x4 + x5) + 128) >> 8;
-        x4 = (181 * (x4 - x5) + 128) >> 8;
-
-        /* fourth stage */
-        pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
-
-        res = (x7 + x1) >> 14;
-        ADD_AND_CLIP1(res);
-        res2 = (x3 + x2) >> 14;
-        ADD_AND_CLIP2(res2);
-        dst_word = (res2 << 8) | res;
-        res = (x0 + x4) >> 14;
-        ADD_AND_CLIP3(res);
-        dst_word |= (res << 16);
-        res = (x8 + x6) >> 14;
-        ADD_AND_CLIP4(res);
-        dst_word |= (res << 24);
-        *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
-
-        pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
-
-        res = (x8 - x6) >> 14;
-        ADD_AND_CLIP1(res);
-        res2 = (x0 - x4) >> 14;
-        ADD_AND_CLIP2(res2);
-        dst_word = (res2 << 8) | res;
-        res = (x3 - x2) >> 14;
-        ADD_AND_CLIP3(res);
-        dst_word |= (res << 16);
-        res = (x7 - x1) >> 14;
-        ADD_AND_CLIP4(res);
-        dst_word |= (res << 24);
-        *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
-    }
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return;
-}
-
-__attribute__((no_sanitize("signed-integer-overflow")))
-void idctrow_intra(
-    int16 *blk, PIXEL *comp, int width
-)
-{
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int32 x0, x1, x2, x3, x4, x5, x6, x7, x8, temp;
-    int i = 8;
-    int offset = width;
-    int32 word;
-
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    /* row (horizontal) IDCT
-    *
-    * 7                       pi         1 dst[k] = sum c[l] * src[l] * cos( -- *
-    * ( k + - ) * l ) l=0                      8          2
-    *
-    * where: c[0]    = 128 c[1..7] = 128*sqrt(2) */
-    while (i--)
-    {
-        x1 = (int32)blk[4] << 8;
-        blk[4] = 0;
-        x2 = blk[6];
-        blk[6] = 0;
-        x3 = blk[2];
-        blk[2] = 0;
-        x4 = blk[1];
-        blk[1] = 0;
-        x5 = blk[7];
-        blk[7] = 0;
-        x6 = blk[5];
-        blk[5] = 0;
-        x7 = blk[3];
-        blk[3] = 0;
-#ifndef FAST_IDCT
-        /* shortcut */  /* covered by idctrow1  01/9/2001 */
-        if (!(x1 | x2 | x3 | x4 | x5 | x6 | x7))
-        {
-            blk[0] = blk[1] = blk[2] = blk[3] = blk[4] = blk[5] = blk[6] = blk[7] = (blk[0] + 32) >> 6;
-            return;
-        }
-#endif
-        x0 = ((int32)blk[0] << 8) + 8192;
-        blk[0] = 0;  /* for proper rounding in the fourth stage */
-
-        /* first stage */
-        x8 = W7 * (x4 + x5) + 4;
-        x4 = (x8 + (W1 - W7) * x4) >> 3;
-        x5 = (x8 - (W1 + W7) * x5) >> 3;
-        x8 = W3 * (x6 + x7) + 4;
-        x6 = (x8 - (W3 - W5) * x6) >> 3;
-        x7 = (x8 - (W3 + W5) * x7) >> 3;
-
-        /* second stage */
-        x8 = x0 + x1;
-        x0 -= x1;
-        x1 = W6 * (x3 + x2) + 4;
-        x2 = (x1 - (W2 + W6) * x2) >> 3;
-        x3 = (x1 + (W2 - W6) * x3) >> 3;
-        x1 = x4 + x6;
-        x4 -= x6;
-        x6 = x5 + x7;
-        x5 -= x7;
-
-        /* third stage */
-        x7 = x8 + x3;
-        x8 -= x3;
-        x3 = x0 + x2;
-        x0 -= x2;
-        x2 = (181 * (x4 + x5) + 128) >> 8;
-        x4 = (181 * (x4 - x5) + 128) >> 8;
-
-        /* fourth stage */
-        word = ((x7 + x1) >> 14);
-        CLIP_RESULT(word)
-
-        temp = ((x3 + x2) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 8);
-
-        temp = ((x0 + x4) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 16);
-
-        temp = ((x8 + x6) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 24);
-        *((int32*)(comp)) = word;
-
-        word = ((x8 - x6) >> 14);
-        CLIP_RESULT(word)
-
-        temp = ((x0 - x4) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 8);
-
-        temp = ((x3 - x2) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 16);
-
-        temp = ((x7 - x1) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 24);
-        *((int32*)(comp + 4)) = word;
-        comp += offset;
-
-        blk += B_SIZE;
-    }
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return;
-}
-
-/*----------------------------------------------------------------------------
-; End Function: idctrow
-----------------------------------------------------------------------------*/
-
-
-/****************************************************************************/
-
-/*
-------------------------------------------------------------------------------
- FUNCTION NAME: idctcol
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS FOR idctcol
-
- Inputs:
-    [input_variable_name] = [description of the input to module, its type
-                 definition, and length (when applicable)]
-
- Local Stores/Buffers/Pointers Needed:
-    [local_store_name] = [description of the local store, its type
-                  definition, and length (when applicable)]
-    [local_buffer_name] = [description of the local buffer, its type
-                   definition, and length (when applicable)]
-    [local_ptr_name] = [description of the local pointer, its type
-                definition, and length (when applicable)]
-
- Global Stores/Buffers/Pointers Needed:
-    [global_store_name] = [description of the global store, its type
-                   definition, and length (when applicable)]
-    [global_buffer_name] = [description of the global buffer, its type
-                definition, and length (when applicable)]
-    [global_ptr_name] = [description of the global pointer, its type
-                 definition, and length (when applicable)]
-
- Outputs:
-    [return_variable_name] = [description of data/pointer returned
-                  by module, its type definition, and length
-                  (when applicable)]
-
- Pointers and Buffers Modified:
-    [variable_bfr_ptr] points to the [describe where the
-      variable_bfr_ptr points to, its type definition, and length
-      (when applicable)]
-    [variable_bfr] contents are [describe the new contents of
-      variable_bfr]
-
- Local Stores Modified:
-    [local_store_name] = [describe new contents, its type
-                  definition, and length (when applicable)]
-
- Global Stores Modified:
-    [global_store_name] = [describe new contents, its type
-                   definition, and length (when applicable)]
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION FOR idctcol
-
-------------------------------------------------------------------------------
- REQUIREMENTS FOR idctcol
-
-------------------------------------------------------------------------------
- REFERENCES FOR idctcol
-
-------------------------------------------------------------------------------
- PSEUDO-CODE FOR idctcol
-
-------------------------------------------------------------------------------
- RESOURCES USED FOR idctcol
-   When the code is written for a specific target processor the
-     the resources used should be documented below.
-
- STACK USAGE: [stack count for this module] + [variable to represent
-          stack usage for each subroutine called]
-
-     where: [stack usage variable] = stack usage for [subroutine
-         name] (see [filename].ext)
-
- DATA MEMORY USED: x words
-
- PROGRAM MEMORY USED: x words
-
- CLOCK CYCLES: [cycle count equation for this module] + [variable
-           used to represent cycle count for each subroutine
-           called]
-
-     where: [cycle count variable] = cycle count for [subroutine
-        name] (see [filename].ext)
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; Function Code FOR idctcol
-----------------------------------------------------------------------------*/
-void idctcol(
-    int16 *blk
-)
-{
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
-
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    /* column (vertical) IDCT
-    *
-    * 7                         pi         1 dst[8*k] = sum c[l] * src[8*l] *
-    * cos( -- * ( k + - ) * l ) l=0                        8          2
-    *
-    * where: c[0]    = 1/1024 c[1..7] = (1/1024)*sqrt(2) */
-    x1 = (int32)blk[32] << 11;
-    x2 = blk[48];
-    x3 = blk[16];
-    x4 = blk[8];
-    x5 = blk[56];
-    x6 = blk[40];
-    x7 = blk[24];
-#ifndef FAST_IDCT
-    /* shortcut */        /* covered by idctcolumn1  01/9/2001 */
-    if (!(x1 | x2 | x3 | x4 | x5 | x6 | x7))
-    {
-        blk[0] = blk[8] = blk[16] = blk[24] = blk[32] = blk[40] = blk[48] = blk[56]
-                                              = blk[0] << 3;
-        return;
-    }
-#endif
-
-    x0 = ((int32)blk[0] << 11) + 128;
-
-    /* first stage */
-    x8 = W7 * (x4 + x5);
-    x4 = x8 + (W1 - W7) * x4;
-    x5 = x8 - (W1 + W7) * x5;
-    x8 = W3 * (x6 + x7);
-    x6 = x8 - (W3 - W5) * x6;
-    x7 = x8 - (W3 + W5) * x7;
-
-    /* second stage */
-    x8 = x0 + x1;
-    x0 -= x1;
-    x1 = W6 * (x3 + x2);
-    x2 = x1 - (W2 + W6) * x2;
-    x3 = x1 + (W2 - W6) * x3;
-    x1 = x4 + x6;
-    x4 -= x6;
-    x6 = x5 + x7;
-    x5 -= x7;
-
-    /* third stage */
-    x7 = x8 + x3;
-    x8 -= x3;
-    x3 = x0 + x2;
-    x0 -= x2;
-    x2 = (181 * (x4 + x5) + 128) >> 8;
-    x4 = (181 * (x4 - x5) + 128) >> 8;
-
-    /* fourth stage */
-    blk[0]    = (x7 + x1) >> 8;
-    blk[8] = (x3 + x2) >> 8;
-    blk[16] = (x0 + x4) >> 8;
-    blk[24] = (x8 + x6) >> 8;
-    blk[32] = (x8 - x6) >> 8;
-    blk[40] = (x0 - x4) >> 8;
-    blk[48] = (x3 - x2) >> 8;
-    blk[56] = (x7 - x1) >> 8;
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return;
-}
-/*----------------------------------------------------------------------------
-;  End Function: idctcol
-----------------------------------------------------------------------------*/
-
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/chv_filter.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/chv_filter.cpp
deleted file mode 100644
index 6593b48..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/chv_filter.cpp
+++ /dev/null
@@ -1,654 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    [input_variable_name] = [description of the input to module, its type
-                 definition, and length (when applicable)]
-
- Local Stores/Buffers/Pointers Needed:
-    [local_store_name] = [description of the local store, its type
-                  definition, and length (when applicable)]
-    [local_buffer_name] = [description of the local buffer, its type
-                   definition, and length (when applicable)]
-    [local_ptr_name] = [description of the local pointer, its type
-                definition, and length (when applicable)]
-
- Global Stores/Buffers/Pointers Needed:
-    [global_store_name] = [description of the global store, its type
-                   definition, and length (when applicable)]
-    [global_buffer_name] = [description of the global buffer, its type
-                definition, and length (when applicable)]
-    [global_ptr_name] = [description of the global pointer, its type
-                 definition, and length (when applicable)]
-
- Outputs:
-    [return_variable_name] = [description of data/pointer returned
-                  by module, its type definition, and length
-                  (when applicable)]
-
- Pointers and Buffers Modified:
-    [variable_bfr_ptr] points to the [describe where the
-      variable_bfr_ptr points to, its type definition, and length
-      (when applicable)]
-    [variable_bfr] contents are [describe the new contents of
-      variable_bfr]
-
- Local Stores Modified:
-    [local_store_name] = [describe new contents, its type
-                  definition, and length (when applicable)]
-
- Global Stores Modified:
-    [global_store_name] = [describe new contents, its type
-                   definition, and length (when applicable)]
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
-   For fast Deblock filtering
-   Newer version (macroblock based processing)
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
- [List requirements to be satisfied by this module.]
-
-------------------------------------------------------------------------------
- REFERENCES
-
- [List all references used in designing this module.]
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-------------------------------------------------------------------------------
- RESOURCES USED
-   When the code is written for a specific target processor the
-     the resources used should be documented below.
-
- STACK USAGE: [stack count for this module] + [variable to represent
-          stack usage for each subroutine called]
-
-     where: [stack usage variable] = stack usage for [subroutine
-         name] (see [filename].ext)
-
- DATA MEMORY USED: x words
-
- PROGRAM MEMORY USED: x words
-
- CLOCK CYCLES: [cycle count equation for this module] + [variable
-           used to represent cycle count for each subroutine
-           called]
-
-     where: [cycle count variable] = cycle count for [subroutine
-        name] (see [filename].ext)
-
-------------------------------------------------------------------------------
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "mp4dec_lib.h"
-#include    "post_proc.h"
-
-#define OSCL_DISABLE_WARNING_CONV_POSSIBLE_LOSS_OF_DATA
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-//#define FILTER_LEN_8
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-
-----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-#ifdef PV_POSTPROC_ON
-
-/*************************************************************************
-    Function prototype : void CombinedHorzVertFilter(   uint8 *rec,
-                                                        int width,
-                                                        int height,
-                                                        int *QP_store,
-                                                        int chr,
-                                                        uint8 *pp_mod)
-    Parameters  :
-        rec     :   pointer to the decoded frame buffer.
-        width   :   width of decoded frame.
-        height  :   height of decoded frame
-        QP_store:   pointer to the array of QP corresponding to the decoded frame.
-                    It had only one value for each MB.
-        chr     :   luma or color indication
-                    == 0 luma
-                    == 1 color
-        pp_mod  :   The semphore used for deblocking
-
-    Remark      :   The function do the deblocking on decoded frames.
-                    First based on the semaphore info., it is divided into hard and soft filtering.
-                    To differentiate real and fake edge, it then check the difference with QP to
-                    decide whether to do the filtering or not.
-
-*************************************************************************/
-
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-void CombinedHorzVertFilter(
-    uint8 *rec,
-    int width,
-    int height,
-    int16 *QP_store,
-    int chr,
-    uint8 *pp_mod)
-{
-
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int br, bc, mbr, mbc;
-    int QP = 1;
-    uint8 *ptr, *ptr_e;
-    int pp_w, pp_h;
-    int brwidth;
-
-    int jVal0, jVal1, jVal2;
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    pp_w = (width >> 3);
-    pp_h = (height >> 3);
-
-    for (mbr = 0; mbr < pp_h; mbr += 2)         /* row of blocks */
-    {
-        brwidth = mbr * pp_w;               /* number of blocks above current block row */
-        for (mbc = 0; mbc < pp_w; mbc += 2)     /* col of blocks */
-        {
-            if (!chr)
-                QP = QP_store[(brwidth>>2) + (mbc>>1)]; /* QP is per MB based value */
-
-            /********* for each block **************/
-            /****************** Horiz. Filtering ********************/
-            for (br = mbr + 1; br < mbr + 3; br++)  /* 2x2 blocks */
-            {
-                brwidth += pp_w;                    /* number of blocks above & left current block row */
-                /* the profile on ARM920T shows separate these two boundary check is faster than combine them */
-                if (br < pp_h)                  /* boundary : don't do it on the lowest row block */
-                    for (bc = mbc; bc < mbc + 2; bc++)
-                    {
-                        /****** check boundary for deblocking ************/
-                        if (bc < pp_w)              /* boundary : don't do it on the most right col block */
-                        {
-                            ptr = rec + (brwidth << 6) + (bc << 3);
-                            jVal0 = brwidth + bc;
-                            if (chr)    QP = QP_store[jVal0];
-
-                            ptr_e = ptr + 8;        /* pointer to where the loop ends */
-
-                            if (((pp_mod[jVal0]&0x02)) && ((pp_mod[jVal0-pp_w]&0x02)))
-                            {
-                                /* Horiz Hard filter */
-                                do
-                                {
-                                    jVal0 = *(ptr - width);     /* C */
-                                    jVal1 = *ptr;               /* D */
-                                    jVal2 = jVal1 - jVal0;
-
-                                    if (((jVal2 > 0) && (jVal2 < (QP << 1)))
-                                            || ((jVal2 < 0) && (jVal2 > -(QP << 1)))) /* (D-C) compared with 2QP */
-                                    {
-                                        /* differentiate between real and fake edge */
-                                        jVal0 = ((jVal0 + jVal1) >> 1);     /* (D+C)/2 */
-                                        *(ptr - width) = (uint8)(jVal0);    /*  C */
-                                        *ptr = (uint8)(jVal0);          /*  D */
-
-                                        jVal0 = *(ptr - (width << 1));      /* B */
-                                        jVal1 = *(ptr + width);         /* E */
-                                        jVal2 = jVal1 - jVal0;      /* E-B */
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal0 += ((jVal2 + 3) >> 2);
-                                            jVal1 -= ((jVal2 + 3) >> 2);
-                                            *(ptr - (width << 1)) = (uint8)jVal0;       /*  store B */
-                                            *(ptr + width) = (uint8)jVal1;          /* store E */
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal0 -= ((3 - jVal2) >> 2);
-                                            jVal1 += ((3 - jVal2) >> 2);
-                                            *(ptr - (width << 1)) = (uint8)jVal0;       /*  store B */
-                                            *(ptr + width) = (uint8)jVal1;          /* store E */
-                                        }
-
-                                        jVal0 = *(ptr - (width << 1) - width);  /* A */
-                                        jVal1 = *(ptr + (width << 1));      /* F */
-                                        jVal2 = jVal1 - jVal0;              /* (F-A) */
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal0 += ((jVal2 + 7) >> 3);
-                                            jVal1 -= ((jVal2 + 7) >> 3);
-                                            *(ptr - (width << 1) - width) = (uint8)(jVal0);
-                                            *(ptr + (width << 1)) = (uint8)(jVal1);
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal0 -= ((7 - jVal2) >> 3);
-                                            jVal1 += ((7 - jVal2) >> 3);
-                                            *(ptr - (width << 1) - width) = (uint8)(jVal0);
-                                            *(ptr + (width << 1)) = (uint8)(jVal1);
-                                        }
-                                    }/* a3_0 > 2QP */
-                                }
-                                while (++ptr < ptr_e);
-                            }
-                            else   /* Horiz soft filter*/
-                            {
-                                do
-                                {
-                                    jVal0 = *(ptr - width); /* B */
-                                    jVal1 = *ptr;           /* C */
-                                    jVal2 = jVal1 - jVal0;  /* C-B */
-
-                                    if (((jVal2 > 0) && (jVal2 < (QP)))
-                                            || ((jVal2 < 0) && (jVal2 > -(QP)))) /* (C-B) compared with QP */
-                                    {
-
-                                        jVal0 = ((jVal0 + jVal1) >> 1);     /* (B+C)/2 cannot overflow; ceil() */
-                                        *(ptr - width) = (uint8)(jVal0);    /* B = (B+C)/2 */
-                                        *ptr = (uint8)jVal0;            /* C = (B+C)/2 */
-
-                                        jVal0 = *(ptr - (width << 1));      /* A */
-                                        jVal1 = *(ptr + width);         /* D */
-                                        jVal2 = jVal1 - jVal0;          /* D-A */
-
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal1 -= ((jVal2 + 7) >> 3);
-                                            jVal0 += ((jVal2 + 7) >> 3);
-                                            *(ptr - (width << 1)) = (uint8)jVal0;       /* A */
-                                            *(ptr + width) = (uint8)jVal1;          /* D */
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal1 += ((7 - jVal2) >> 3);
-                                            jVal0 -= ((7 - jVal2) >> 3);
-                                            *(ptr - (width << 1)) = (uint8)jVal0;       /* A */
-                                            *(ptr + width) = (uint8)jVal1;          /* D */
-                                        }
-                                    }
-                                }
-                                while (++ptr < ptr_e);
-                            } /* Soft filter*/
-                        }/* boundary checking*/
-                    }/*bc*/
-            }/*br*/
-            brwidth -= (pp_w << 1);
-            /****************** Vert. Filtering ********************/
-            for (br = mbr; br < mbr + 2; br++)
-            {
-                if (br < pp_h)
-                    for (bc = mbc + 1; bc < mbc + 3; bc++)
-                    {
-                        /****** check boundary for deblocking ************/
-                        if (bc < pp_w)
-                        {
-                            ptr = rec + (brwidth << 6) + (bc << 3);
-                            jVal0 = brwidth + bc;
-                            if (chr)    QP = QP_store[jVal0];
-
-                            ptr_e = ptr + (width << 3);
-
-                            if (((pp_mod[jVal0-1]&0x01)) && ((pp_mod[jVal0]&0x01)))
-                            {
-                                /* Vert Hard filter */
-                                do
-                                {
-                                    jVal1 = *ptr;       /* D */
-                                    jVal0 = *(ptr - 1); /* C */
-                                    jVal2 = jVal1 - jVal0;  /* D-C */
-
-                                    if (((jVal2 > 0) && (jVal2 < (QP << 1)))
-                                            || ((jVal2 < 0) && (jVal2 > -(QP << 1))))
-                                    {
-                                        jVal1 = (jVal0 + jVal1) >> 1;   /* (C+D)/2 */
-                                        *ptr        =   jVal1;
-                                        *(ptr - 1)  =   jVal1;
-
-                                        jVal1 = *(ptr + 1);     /* E */
-                                        jVal0 = *(ptr - 2);     /* B */
-                                        jVal2 = jVal1 - jVal0;      /* E-B */
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal1 -= ((jVal2 + 3) >> 2);        /* E = E -(E-B)/4 */
-                                            jVal0 += ((jVal2 + 3) >> 2);        /* B = B +(E-B)/4 */
-                                            *(ptr + 1) = jVal1;
-                                            *(ptr - 2) = jVal0;
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal1 += ((3 - jVal2) >> 2);        /* E = E -(E-B)/4 */
-                                            jVal0 -= ((3 - jVal2) >> 2);        /* B = B +(E-B)/4 */
-                                            *(ptr + 1) = jVal1;
-                                            *(ptr - 2) = jVal0;
-                                        }
-
-                                        jVal1 = *(ptr + 2);     /* F */
-                                        jVal0 = *(ptr - 3);     /* A */
-
-                                        jVal2 = jVal1 - jVal0;          /* (F-A) */
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal1 -= ((jVal2 + 7) >> 3);    /* F -= (F-A)/8 */
-                                            jVal0 += ((jVal2 + 7) >> 3);    /* A += (F-A)/8 */
-                                            *(ptr + 2) = jVal1;
-                                            *(ptr - 3) = jVal0;
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal1 -= ((jVal2 - 7) >> 3);    /* F -= (F-A)/8 */
-                                            jVal0 += ((jVal2 - 7) >> 3);    /* A += (F-A)/8 */
-                                            *(ptr + 2) = jVal1;
-                                            *(ptr - 3) = jVal0;
-                                        }
-                                    }   /* end of ver hard filetering */
-                                }
-                                while ((ptr += width) < ptr_e);
-                            }
-                            else   /* Vert soft filter*/
-                            {
-                                do
-                                {
-                                    jVal1 = *ptr;               /* C */
-                                    jVal0 = *(ptr - 1);         /* B */
-                                    jVal2 = jVal1 - jVal0;
-
-                                    if (((jVal2 > 0) && (jVal2 < (QP)))
-                                            || ((jVal2 < 0) && (jVal2 > -(QP))))
-                                    {
-
-                                        jVal1 = (jVal0 + jVal1 + 1) >> 1;
-                                        *ptr = jVal1;           /* C */
-                                        *(ptr - 1) = jVal1;     /* B */
-
-                                        jVal1 = *(ptr + 1);     /* D */
-                                        jVal0 = *(ptr - 2);     /* A */
-                                        jVal2 = (jVal1 - jVal0);        /* D- A */
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal1 -= (((jVal2) + 7) >> 3);      /* D -= (D-A)/8 */
-                                            jVal0 += (((jVal2) + 7) >> 3);      /* A += (D-A)/8 */
-                                            *(ptr + 1) = jVal1;
-                                            *(ptr - 2) = jVal0;
-
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal1 += ((7 - (jVal2)) >> 3);      /* D -= (D-A)/8 */
-                                            jVal0 -= ((7 - (jVal2)) >> 3);      /* A += (D-A)/8 */
-                                            *(ptr + 1) = jVal1;
-                                            *(ptr - 2) = jVal0;
-                                        }
-                                    }
-                                }
-                                while ((ptr += width) < ptr_e);
-                            } /* Soft filter*/
-                        } /* boundary*/
-                    } /*bc*/
-                brwidth += pp_w;
-            }/*br*/
-            brwidth -= (pp_w << 1);
-        }/*mbc*/
-        brwidth += (pp_w << 1);
-    }/*mbr*/
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return;
-}
-void CombinedHorzVertFilter_NoSoftDeblocking(
-    uint8 *rec,
-    int width,
-    int height,
-    int16 *QP_store,
-    int chr,
-    uint8 *pp_mod)
-{
-
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int br, bc, mbr, mbc;
-    int QP = 1;
-    uint8 *ptr, *ptr_e;
-    int pp_w, pp_h;
-    int brwidth;
-
-    int jVal0, jVal1, jVal2;
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    pp_w = (width >> 3);
-    pp_h = (height >> 3);
-
-    for (mbr = 0; mbr < pp_h; mbr += 2)         /* row of blocks */
-    {
-        brwidth = mbr * pp_w;               /* number of blocks above current block row */
-        for (mbc = 0; mbc < pp_w; mbc += 2)     /* col of blocks */
-        {
-            if (!chr)
-                QP = QP_store[(brwidth>>2) + (mbc>>1)]; /* QP is per MB based value */
-
-            /********* for each block **************/
-            /****************** Horiz. Filtering ********************/
-            for (br = mbr + 1; br < mbr + 3; br++)  /* 2x2 blocks */
-            {
-                brwidth += pp_w;                    /* number of blocks above & left current block row */
-                /* the profile on ARM920T shows separate these two boundary check is faster than combine them */
-                if (br < pp_h)                  /* boundary : don't do it on the lowest row block */
-                    for (bc = mbc; bc < mbc + 2; bc++)
-                    {
-                        /****** check boundary for deblocking ************/
-                        if (bc < pp_w)              /* boundary : don't do it on the most right col block */
-                        {
-                            ptr = rec + (brwidth << 6) + (bc << 3);
-                            jVal0 = brwidth + bc;
-                            if (chr)    QP = QP_store[jVal0];
-
-                            ptr_e = ptr + 8;        /* pointer to where the loop ends */
-
-                            if (((pp_mod[jVal0]&0x02)) && ((pp_mod[jVal0-pp_w]&0x02)))
-                            {
-                                /* Horiz Hard filter */
-                                do
-                                {
-                                    jVal0 = *(ptr - width);     /* C */
-                                    jVal1 = *ptr;               /* D */
-                                    jVal2 = jVal1 - jVal0;
-
-                                    if (((jVal2 > 0) && (jVal2 < (QP << 1)))
-                                            || ((jVal2 < 0) && (jVal2 > -(QP << 1)))) /* (D-C) compared with 2QP */
-                                    {
-                                        /* differentiate between real and fake edge */
-                                        jVal0 = ((jVal0 + jVal1) >> 1);     /* (D+C)/2 */
-                                        *(ptr - width) = (uint8)(jVal0);    /*  C */
-                                        *ptr = (uint8)(jVal0);          /*  D */
-
-                                        jVal0 = *(ptr - (width << 1));      /* B */
-                                        jVal1 = *(ptr + width);         /* E */
-                                        jVal2 = jVal1 - jVal0;      /* E-B */
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal0 += ((jVal2 + 3) >> 2);
-                                            jVal1 -= ((jVal2 + 3) >> 2);
-                                            *(ptr - (width << 1)) = (uint8)jVal0;       /*  store B */
-                                            *(ptr + width) = (uint8)jVal1;          /* store E */
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal0 -= ((3 - jVal2) >> 2);
-                                            jVal1 += ((3 - jVal2) >> 2);
-                                            *(ptr - (width << 1)) = (uint8)jVal0;       /*  store B */
-                                            *(ptr + width) = (uint8)jVal1;          /* store E */
-                                        }
-
-                                        jVal0 = *(ptr - (width << 1) - width);  /* A */
-                                        jVal1 = *(ptr + (width << 1));      /* F */
-                                        jVal2 = jVal1 - jVal0;              /* (F-A) */
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal0 += ((jVal2 + 7) >> 3);
-                                            jVal1 -= ((jVal2 + 7) >> 3);
-                                            *(ptr - (width << 1) - width) = (uint8)(jVal0);
-                                            *(ptr + (width << 1)) = (uint8)(jVal1);
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal0 -= ((7 - jVal2) >> 3);
-                                            jVal1 += ((7 - jVal2) >> 3);
-                                            *(ptr - (width << 1) - width) = (uint8)(jVal0);
-                                            *(ptr + (width << 1)) = (uint8)(jVal1);
-                                        }
-                                    }/* a3_0 > 2QP */
-                                }
-                                while (++ptr < ptr_e);
-                            }
-
-                        }/* boundary checking*/
-                    }/*bc*/
-            }/*br*/
-            brwidth -= (pp_w << 1);
-            /****************** Vert. Filtering ********************/
-            for (br = mbr; br < mbr + 2; br++)
-            {
-                if (br < pp_h)
-                    for (bc = mbc + 1; bc < mbc + 3; bc++)
-                    {
-                        /****** check boundary for deblocking ************/
-                        if (bc < pp_w)
-                        {
-                            ptr = rec + (brwidth << 6) + (bc << 3);
-                            jVal0 = brwidth + bc;
-                            if (chr)    QP = QP_store[jVal0];
-
-                            ptr_e = ptr + (width << 3);
-
-                            if (((pp_mod[jVal0-1]&0x01)) && ((pp_mod[jVal0]&0x01)))
-                            {
-                                /* Vert Hard filter */
-                                do
-                                {
-                                    jVal1 = *ptr;       /* D */
-                                    jVal0 = *(ptr - 1); /* C */
-                                    jVal2 = jVal1 - jVal0;  /* D-C */
-
-                                    if (((jVal2 > 0) && (jVal2 < (QP << 1)))
-                                            || ((jVal2 < 0) && (jVal2 > -(QP << 1))))
-                                    {
-                                        jVal1 = (jVal0 + jVal1) >> 1;   /* (C+D)/2 */
-                                        *ptr        =   jVal1;
-                                        *(ptr - 1)  =   jVal1;
-
-                                        jVal1 = *(ptr + 1);     /* E */
-                                        jVal0 = *(ptr - 2);     /* B */
-                                        jVal2 = jVal1 - jVal0;      /* E-B */
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal1 -= ((jVal2 + 3) >> 2);        /* E = E -(E-B)/4 */
-                                            jVal0 += ((jVal2 + 3) >> 2);        /* B = B +(E-B)/4 */
-                                            *(ptr + 1) = jVal1;
-                                            *(ptr - 2) = jVal0;
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal1 += ((3 - jVal2) >> 2);        /* E = E -(E-B)/4 */
-                                            jVal0 -= ((3 - jVal2) >> 2);        /* B = B +(E-B)/4 */
-                                            *(ptr + 1) = jVal1;
-                                            *(ptr - 2) = jVal0;
-                                        }
-
-                                        jVal1 = *(ptr + 2);     /* F */
-                                        jVal0 = *(ptr - 3);     /* A */
-
-                                        jVal2 = jVal1 - jVal0;          /* (F-A) */
-
-                                        if (jVal2 > 0)
-                                        {
-                                            jVal1 -= ((jVal2 + 7) >> 3);    /* F -= (F-A)/8 */
-                                            jVal0 += ((jVal2 + 7) >> 3);    /* A += (F-A)/8 */
-                                            *(ptr + 2) = jVal1;
-                                            *(ptr - 3) = jVal0;
-                                        }
-                                        else if (jVal2)
-                                        {
-                                            jVal1 -= ((jVal2 - 7) >> 3);    /* F -= (F-A)/8 */
-                                            jVal0 += ((jVal2 - 7) >> 3);    /* A += (F-A)/8 */
-                                            *(ptr + 2) = jVal1;
-                                            *(ptr - 3) = jVal0;
-                                        }
-                                    }   /* end of ver hard filetering */
-                                }
-                                while ((ptr += width) < ptr_e);
-                            }
-
-                        } /* boundary*/
-                    } /*bc*/
-                brwidth += pp_w;
-            }/*br*/
-            brwidth -= (pp_w << 1);
-        }/*mbc*/
-        brwidth += (pp_w << 1);
-    }/*mbr*/
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return;
-}
-#endif
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/chvr_filter.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/chvr_filter.cpp
deleted file mode 100644
index 795cf71..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/chvr_filter.cpp
+++ /dev/null
@@ -1,565 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include    "mp4dec_lib.h"
-#include    "post_proc.h"
-
-#ifdef PV_POSTPROC_ON
-
-void CombinedHorzVertRingFilter(
-    uint8 *rec,
-    int width,
-    int height,
-    int16 *QP_store,
-    int chr,
-    uint8 *pp_mod)
-{
-
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int index, counter;
-    int br, bc, incr, mbr, mbc;
-    int QP = 1;
-    int v[5];
-    uint8 *ptr, *ptr_c, *ptr_n;
-    int w1, w2, w3, w4;
-    int pp_w, pp_h, brwidth;
-    int sum, delta;
-    int a3_0, a3_1, a3_2, A3_0;
-    /* for Deringing Threshold approach (MPEG4)*/
-    int max_diff, thres, v0, h0, min_blk, max_blk;
-    int cnthflag;
-
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    /* Calculate the width and height of the area in blocks (divide by 8) */
-    pp_w = (width >> 3);
-    pp_h = (height >> 3);
-
-    /* Set up various values needed for updating pointers into rec */
-    w1 = width;             /* Offset to next row in pixels */
-    w2 = width << 1;        /* Offset to two rows in pixels */
-    w3 = w1 + w2;           /* Offset to three rows in pixels */
-    w4 = w2 << 1;           /* Offset to four rows in pixels */
-    incr = width - BLKSIZE; /* Offset to next row after processing block */
-
-    /* Work through the area hortizontally by two rows per step */
-    for (mbr = 0; mbr < pp_h; mbr += 2)
-    {
-        /* brwidth contains the block number of the leftmost block
-         * of the current row */
-        brwidth = mbr * pp_w;
-
-        /* Work through the area vertically by two columns per step */
-        for (mbc = 0; mbc < pp_w; mbc += 2)
-        {
-            /* if the data is luminance info, get the correct
-                    * quantization paramenter. One parameter per macroblock */
-            if (!chr)
-            {
-                /* brwidth/4 is the macroblock number and mbc/2 is the macroblock col number*/
-                QP = QP_store[(brwidth>>2) + (mbc>>1)];
-            }
-
-            /****************** Horiz. Filtering ********************/
-            /* Process four blocks for the filtering        */
-            /********************************************************/
-            /* Loop over two rows of blocks */
-            for (br = mbr + 1; br < mbr + 3; br++)    /* br is the row counter in blocks */
-            {
-                /* Set brwidth to the first (leftmost) block number of the next row */
-                /* brwidth is used as an index when counting blocks */
-                brwidth += pp_w;
-
-                /* Loop over two columns of blocks in the row */
-                for (bc = mbc; bc < mbc + 2; bc++)    /* bc is the column counter in blocks */
-                {
-                    /****** check boundary for deblocking ************/
-                    /* Execute if the row and column counters are within the area */
-                    if (br < pp_h && bc < pp_w)
-                    {
-                        /* Set the ptr to the first pixel of the first block of the second row
-                        * brwidth * 64 is the pixel row offset
-                        * bc * 8 is the pixel column offset */
-                        ptr = rec + (brwidth << 6) + (bc << 3);
-
-                        /* Set the index to the current block of the second row counting in blocks */
-                        index = brwidth + bc;
-
-                        /* if the data is chrominance info, get the correct
-                         * quantization paramenter. One parameter per block. */
-                        if (chr)
-                        {
-                            QP = QP_store[index];
-                        }
-
-                        /* Execute hard horizontal filter if semaphore for horizontal deblocking
-                          * is set for the current block and block immediately above it */
-                        if (((pp_mod[index]&0x02) != 0) && ((pp_mod[index-pp_w]&0x02) != 0))
-                        {   /* Hard filter */
-
-                            /* Set HorzHflag (bit 4) in the pp_mod location */
-                            pp_mod[index-pp_w] |= 0x10; /*  4/26/00 reuse pp_mod for HorzHflag*/
-
-                            /* Filter across the 8 pixels of the block */
-                            for (index = BLKSIZE; index > 0; index--)
-                            {
-                                /* Difference between the current pixel and the pixel above it */
-                                a3_0 = *ptr - *(ptr - w1);
-
-                                /* if the magnitude of the difference is greater than the KThH threshold
-                                 * and within the quantization parameter, apply hard filter */
-                                if ((a3_0 > KThH || a3_0 < -KThH) && a3_0<QP && a3_0> -QP)
-                                {
-                                    ptr_c = ptr - w3;   /* Points to pixel three rows above */
-                                    ptr_n = ptr + w1;   /* Points to pixel one row below */
-                                    v[0] = (int)(*(ptr_c - w3));
-                                    v[1] = (int)(*(ptr_c - w2));
-                                    v[2] = (int)(*(ptr_c - w1));
-                                    v[3] = (int)(*ptr_c);
-                                    v[4] = (int)(*(ptr_c + w1));
-
-                                    sum = v[0]
-                                          + v[1]
-                                          + v[2]
-                                          + *ptr_c
-                                          + v[4]
-                                          + (*(ptr_c + w2))
-                                          + (*(ptr_c + w3));  /* Current pixel */
-
-                                    delta = (sum + *ptr_c + 4) >> 3;   /* Average pixel values with rounding */
-                                    *(ptr_c) = (uint8) delta;
-
-                                    /* Move pointer down one row of pixels (points to pixel two rows
-                                     * above current pixel) */
-                                    ptr_c += w1;
-
-                                    for (counter = 0; counter < 5; counter++)
-                                    {
-                                        /* Subtract off highest pixel and add in pixel below */
-                                        sum = sum - v[counter] + *ptr_n;
-                                        /* Average the pixel values with rounding */
-                                        delta = (sum + *ptr_c + 4) >> 3;
-                                        *ptr_c = (uint8)(delta);
-
-                                        /* Increment pointers to next pixel row */
-                                        ptr_c += w1;
-                                        ptr_n += w1;
-                                    }
-                                }
-                                /* Increment pointer to next pixel */
-                                ++ptr;
-                            } /* index*/
-                        }
-                        else
-                        { /* soft filter*/
-
-                            /* Clear HorzHflag (bit 4) in the pp_mod location */
-                            pp_mod[index-pp_w] &= 0xef; /* reset 1110,1111 */
-
-                            for (index = BLKSIZE; index > 0; index--)
-                            {
-                                /* Difference between the current pixel and the pixel above it */
-                                a3_0 = *(ptr) - *(ptr - w1);
-
-                                /* if the magnitude of the difference is greater than the KTh threshold,
-                                 * apply soft filter */
-                                if ((a3_0 > KTh || a3_0 < -KTh))
-                                {
-
-                                    /* Sum of weighted differences */
-                                    a3_0 += ((*(ptr - w2) - *(ptr + w1)) << 1) + (a3_0 << 2);
-
-                                    /* Check if sum is less than the quantization parameter */
-                                    if (PV_ABS(a3_0) < (QP << 3))
-                                    {
-                                        a3_1 = *(ptr - w2) - *(ptr - w3);
-                                        a3_1 += ((*(ptr - w4) - *(ptr - w1)) << 1) + (a3_1 << 2);
-
-                                        a3_2  = *(ptr + w2) - *(ptr + w1);
-                                        a3_2 += ((*(ptr) - *(ptr + w3)) << 1) + (a3_2 << 2);
-
-                                        A3_0 = PV_ABS(a3_0) - PV_MIN(PV_ABS(a3_1), PV_ABS(a3_2));
-
-                                        if (A3_0 > 0)
-                                        {
-                                            A3_0 += A3_0 << 2;
-                                            A3_0 = (A3_0 + 32) >> 6;
-                                            if (a3_0 > 0)
-                                            {
-                                                A3_0 = -A3_0;
-                                            }
-
-                                            delta = (*(ptr - w1) - *(ptr)) >> 1;
-                                            if (delta >= 0)
-                                            {
-                                                if (delta >= A3_0)
-                                                {
-                                                    delta = PV_MAX(A3_0, 0);
-                                                }
-                                            }
-                                            else
-                                            {
-                                                if (A3_0 > 0)
-                                                {
-                                                    delta = 0;
-                                                }
-                                                else
-                                                {
-                                                    delta = PV_MAX(A3_0, delta);
-                                                }
-                                            }
-
-                                            *(ptr - w1) = (uint8)(*(ptr - w1) - delta);
-                                            *(ptr) = (uint8)(*(ptr) + delta);
-                                        }
-                                    } /*threshold*/
-                                }
-                                /* Increment pointer to next pixel */
-                                ++ptr;
-                            } /*index*/
-                        } /* Soft filter*/
-                    }/* boundary checking*/
-                }/*bc*/
-            }/*br*/
-            brwidth -= (pp_w << 1);
-
-
-            /****************** Vert. Filtering *********************/
-            /* Process four blocks for the filtering        */
-            /********************************************************/
-            /* Loop over two rows of blocks */
-            for (br = mbr; br < mbr + 2; br++)      /* br is the row counter in blocks */
-            {
-                for (bc = mbc + 1; bc < mbc + 3; bc++)  /* bc is the column counter in blocks */
-                {
-                    /****** check boundary for deblocking ************/
-                    /* Execute if the row and column counters are within the area */
-                    if (br < pp_h && bc < pp_w)
-                    {
-                        /* Set the ptr to the first pixel of the first block of the second row
-                        * brwidth * 64 is the pixel row offset
-                        * bc * 8 is the pixel column offset */
-                        ptr = rec + (brwidth << 6) + (bc << 3);
-
-                        /* Set the index to the current block of the second row counting in blocks */
-                        index = brwidth + bc;
-
-                        /* if the data is chrominance info, get the correct
-                         * quantization paramenter. One parameter per block. */
-                        if (chr)
-                        {
-                            QP = QP_store[index];
-                        }
-
-                        /* Execute hard vertical filter if semaphore for vertical deblocking
-                          * is set for the current block and block immediately left of it */
-                        if (((pp_mod[index-1]&0x01) != 0) && ((pp_mod[index]&0x01) != 0))
-                        {   /* Hard filter */
-
-                            /* Set VertHflag (bit 5) in the pp_mod location of previous block*/
-                            pp_mod[index-1] |= 0x20; /*  4/26/00 reuse pp_mod for VertHflag*/
-
-                            /* Filter across the 8 pixels of the block */
-                            for (index = BLKSIZE; index > 0; index--)
-                            {
-                                /* Difference between the current pixel
-                                * and the pixel to left of it */
-                                a3_0 = *ptr - *(ptr - 1);
-
-                                /* if the magnitude of the difference is greater than the KThH threshold
-                                 * and within the quantization parameter, apply hard filter */
-                                if ((a3_0 > KThH || a3_0 < -KThH) && a3_0<QP && a3_0> -QP)
-                                {
-                                    ptr_c = ptr - 3;
-                                    ptr_n = ptr + 1;
-                                    v[0] = (int)(*(ptr_c - 3));
-                                    v[1] = (int)(*(ptr_c - 2));
-                                    v[2] = (int)(*(ptr_c - 1));
-                                    v[3] = (int)(*ptr_c);
-                                    v[4] = (int)(*(ptr_c + 1));
-
-                                    sum = v[0]
-                                          + v[1]
-                                          + v[2]
-                                          + *ptr_c
-                                          + v[4]
-                                          + (*(ptr_c + 2))
-                                          + (*(ptr_c + 3));
-
-                                    delta = (sum + *ptr_c + 4) >> 3;
-                                    *(ptr_c) = (uint8) delta;
-
-                                    /* Move pointer down one pixel to the right */
-                                    ptr_c += 1;
-                                    for (counter = 0; counter < 5; counter++)
-                                    {
-                                        /* Subtract off highest pixel and add in pixel below */
-                                        sum = sum - v[counter] + *ptr_n;
-                                        /* Average the pixel values with rounding */
-                                        delta = (sum + *ptr_c + 4) >> 3;
-                                        *ptr_c = (uint8)(delta);
-
-                                        /* Increment pointers to next pixel */
-                                        ptr_c += 1;
-                                        ptr_n += 1;
-                                    }
-                                }
-                                /* Increment pointers to next pixel row */
-                                ptr += w1;
-                            } /* index*/
-                        }
-                        else
-                        { /* soft filter*/
-
-                            /* Clear VertHflag (bit 5) in the pp_mod location */
-                            pp_mod[index-1] &= 0xdf; /* reset 1101,1111 */
-                            for (index = BLKSIZE; index > 0; index--)
-                            {
-                                /* Difference between the current pixel and the pixel above it */
-                                a3_0 = *(ptr) - *(ptr - 1);
-
-                                /* if the magnitude of the difference is greater than the KTh threshold,
-                                 * apply soft filter */
-                                if ((a3_0 > KTh || a3_0 < -KTh))
-                                {
-
-                                    /* Sum of weighted differences */
-                                    a3_0 += ((*(ptr - 2) - *(ptr + 1)) << 1) + (a3_0 << 2);
-
-                                    /* Check if sum is less than the quantization parameter */
-                                    if (PV_ABS(a3_0) < (QP << 3))
-                                    {
-                                        a3_1 = *(ptr - 2) - *(ptr - 3);
-                                        a3_1 += ((*(ptr - 4) - *(ptr - 1)) << 1) + (a3_1 << 2);
-
-                                        a3_2  = *(ptr + 2) - *(ptr + 1);
-                                        a3_2 += ((*(ptr) - *(ptr + 3)) << 1) + (a3_2 << 2);
-
-                                        A3_0 = PV_ABS(a3_0) - PV_MIN(PV_ABS(a3_1), PV_ABS(a3_2));
-
-                                        if (A3_0 > 0)
-                                        {
-                                            A3_0 += A3_0 << 2;
-                                            A3_0 = (A3_0 + 32) >> 6;
-                                            if (a3_0 > 0)
-                                            {
-                                                A3_0 = -A3_0;
-                                            }
-
-                                            delta = (*(ptr - 1) - *(ptr)) >> 1;
-                                            if (delta >= 0)
-                                            {
-                                                if (delta >= A3_0)
-                                                {
-                                                    delta = PV_MAX(A3_0, 0);
-                                                }
-                                            }
-                                            else
-                                            {
-                                                if (A3_0 > 0)
-                                                {
-                                                    delta = 0;
-                                                }
-                                                else
-                                                {
-                                                    delta = PV_MAX(A3_0, delta);
-                                                }
-                                            }
-
-                                            *(ptr - 1) = (uint8)(*(ptr - 1) - delta);
-                                            *(ptr) = (uint8)(*(ptr) + delta);
-                                        }
-                                    } /*threshold*/
-                                }
-                                ptr += w1;
-                            } /*index*/
-                        } /* Soft filter*/
-                    } /* boundary*/
-                } /*bc*/
-                /* Increment pointer to next row of pixels */
-                brwidth += pp_w;
-            }/*br*/
-            brwidth -= (pp_w << 1);
-
-            /****************** Deringing ***************************/
-            /* Process four blocks for the filtering        */
-            /********************************************************/
-            /* Loop over two rows of blocks */
-            for (br = mbr; br < mbr + 2; br++)
-            {
-                /* Loop over two columns of blocks in the row */
-                for (bc = mbc; bc < mbc + 2; bc++)
-                {
-                    /* Execute if the row and column counters are within the area */
-                    if (br < pp_h && bc < pp_w)
-                    {
-                        /* Set the index to the current block */
-                        index = brwidth + bc;
-
-                        /* Execute deringing if semaphore for deringing (bit-3 of pp_mod)
-                         * is set for the current block */
-                        if ((pp_mod[index]&0x04) != 0)
-                        {
-                            /* Don't process deringing if on an edge block */
-                            if (br > 0 && bc > 0 && br < pp_h - 1 && bc < pp_w - 1)
-                            {
-                                /* cnthflag = weighted average of HorzHflag of current,
-                                 * one above, previous blocks*/
-                                cnthflag = ((pp_mod[index] & 0x10) +
-                                            (pp_mod[index-pp_w] & 0x10) +
-                                            ((pp_mod[index-1] >> 1) & 0x10) +
-                                            ((pp_mod[index] >> 1) & 0x10)) >> 4; /* 4/26/00*/
-
-                                /* Do the deringing if decision flags indicate it's necessary */
-                                if (cnthflag < 3)
-                                {
-                                    /* if the data is chrominance info, get the correct
-                                     * quantization paramenter. One parameter per block. */
-                                    if (chr)
-                                    {
-                                        QP = QP_store[index];
-                                    }
-
-                                    /* Set amount to change luminance if it needs to be changed
-                                     * based on quantization parameter */
-                                    max_diff = (QP >> 2) + 4;
-
-                                    /* Set pointer to first pixel of current block */
-                                    ptr = rec + (brwidth << 6) + (bc << 3);
-
-                                    /* Find minimum and maximum value of pixel block */
-                                    FindMaxMin(ptr, &min_blk, &max_blk, incr);
-
-                                    /* threshold determination */
-                                    thres = (max_blk + min_blk + 1) >> 1;
-
-                                    /* If pixel range is greater or equal than DERING_THR, smooth the region */
-                                    if ((max_blk - min_blk) >= DERING_THR) /*smooth 8x8 region*/
-#ifndef NoMMX
-                                    {
-                                        /* smooth all pixels in the block*/
-                                        DeringAdaptiveSmoothMMX(ptr, width, thres, max_diff);
-                                    }
-#else
-                                    {
-                                        /* Setup the starting point of the region to smooth */
-                                        v0 = (br << 3) - 1;
-                                        h0 = (bc << 3) - 1;
-
-                                        /*smooth 8x8 region*/
-                                        AdaptiveSmooth_NoMMX(rec, v0, h0, v0 + 1, h0 + 1, thres, width, max_diff);
-                                    }
-#endif
-                                }/*cnthflag*/
-                            } /*dering br==1 or bc==1 (boundary block)*/
-                            else    /* Process the boundary blocks */
-                            {
-                                /* Decide to perform deblocking based on the semaphore flags
-                                   * of the neighboring blocks in each case. A certain number of
-                                 * hard filtering flags have to be set in order to signal need
-                                 * for smoothing */
-                                if (br > 0 && br < pp_h - 1)
-                                {
-                                    if (bc > 0)
-                                    {
-                                        cnthflag = ((pp_mod[index-pp_w] & 0x10) +
-                                                    (pp_mod[index] & 0x10) +
-                                                    ((pp_mod[index-1] >> 1) & 0x10)) >> 4;
-                                    }
-                                    else
-                                    {
-                                        cnthflag = ((pp_mod[index] & 0x10) +
-                                                    (pp_mod[index-pp_w] & 0x10) +
-                                                    ((pp_mod[index] >> 1) & 0x10)) >> 4;
-                                    }
-                                }
-                                else if (bc > 0 && bc < pp_w - 1)
-                                {
-                                    if (br > 0)
-                                    {
-                                        cnthflag = ((pp_mod[index-pp_w] & 0x10) +
-                                                    ((pp_mod[index-1] >> 1) & 0x10) +
-                                                    ((pp_mod[index] >> 1) & 0x10)) >> 4;
-                                    }
-                                    else
-                                    {
-                                        cnthflag = ((pp_mod[index] & 0x10) +
-                                                    ((pp_mod[index-1] >> 1) & 0x10) +
-                                                    ((pp_mod[index] >> 1) & 0x10)) >> 4;
-                                    }
-                                }
-                                else /* at the corner do default*/
-                                {
-                                    cnthflag = 0;
-                                }
-
-                                /* Do the deringing if decision flags indicate it's necessary */
-                                if (cnthflag < 2)
-                                {
-
-                                    /* if the data is chrominance info, get the correct
-                                                         * quantization paramenter. One parameter per block. */
-                                    if (chr)
-                                    {
-                                        QP = QP_store[index];
-                                    }
-
-                                    /* Set amount to change luminance if it needs to be changed
-                                     * based on quantization parameter */
-                                    max_diff = (QP >> 2) + 4;
-
-                                    /* Set pointer to first pixel of current block */
-                                    ptr = rec + (brwidth << 6) + (bc << 3);
-
-                                    /* Find minimum and maximum value of pixel block */
-                                    FindMaxMin(ptr, &min_blk, &max_blk, incr);
-
-                                    /* threshold determination */
-                                    thres = (max_blk + min_blk + 1) >> 1;
-
-                                    /* Setup the starting point of the region to smooth
-                                     * This is going to be a 4x4 region */
-                                    v0 = (br << 3) + 1;
-                                    h0 = (bc << 3) + 1;
-
-                                    /* If pixel range is greater or equal than DERING_THR, smooth the region */
-                                    if ((max_blk - min_blk) >= DERING_THR)
-                                    {
-                                        /* Smooth 4x4 region */
-                                        AdaptiveSmooth_NoMMX(rec, v0, h0, v0 - 3, h0 - 3, thres, width, max_diff);
-                                    }
-                                }/*cnthflag*/
-                            } /* br==0, bc==0*/
-                        }  /* dering*/
-                    } /*boundary condition*/
-                }/*bc*/
-                brwidth += pp_w;
-            }/*br*/
-            brwidth -= (pp_w << 1);
-        }/*mbc*/
-        brwidth += (pp_w << 1);
-    }/*mbr*/
-
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return ;
-}
-#endif
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/combined_decode.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/combined_decode.cpp
deleted file mode 100644
index 6499233..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/combined_decode.cpp
+++ /dev/null
@@ -1,840 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "mp4dec_lib.h" /* video decoder function prototypes */
-#include "vlc_decode.h"
-#include "bitstream.h"
-#include "scaling.h"
-#include "mbtype_mode.h"
-
-#define OSCL_DISABLE_WARNING_CONDITIONAL_IS_CONSTANT
-/* ======================================================================== */
-/*  Function : DecodeFrameCombinedMode()                                    */
-/*  Purpose  : Decode a frame of MPEG4 bitstream in combined mode.          */
-/*  In/out   :                                                              */
-/*  Return   :                                                              */
-/*  Modified :                                                              */
-/*                                                                          */
-/*      03/30/2000 : Cleaned up and optimized the code.             */
-/*      03/31/2000 : Added proper handling of MB stuffing.          */
-/*      04/13/2000 : Rewrote this combined mode path completely     */
-/*                           so that it handles "Combined Mode With Error   */
-/*                           Resilience."  Now the code resembles the       */
-/*                           pseudo codes in MPEG-4 standard better.        */
-/*      10/13/2000 : Add fast VLC+dequant                           */
-/*      04/13/2001 : fix MB_stuffing                               */
-/*      08/07/2001 : remove MBzero                                  */
-/* ======================================================================== */
-PV_STATUS DecodeFrameCombinedMode(VideoDecData *video)
-{
-    PV_STATUS status;
-    int mbnum;
-    Vop *currVop = video->currVop;
-    BitstreamDecVideo *stream = video->bitstream;
-    int shortVideoHeader = video->shortVideoHeader;
-    int16 QP, *QPMB = video->QPMB;
-    uint8 *Mode = video->headerInfo.Mode;
-    int nTotalMB = video->nTotalMB;
-    int nMBPerRow = video->nMBPerRow;
-    int slice_counter;
-    uint32 tmpvar, long_zero_bits;
-    uint code;
-    int valid_stuffing;
-    int resync_marker_length;
-    int stuffing_length;
-
-    /* add this for error resilient, 05/18/2000 */
-    int32 startPacket;
-    int mb_start;
-    /* copy and pad to prev_Vop for INTER coding */
-    switch (currVop->predictionType)
-    {
-        case I_VOP :
-//      oscl_memset(Mode, MODE_INTRA, sizeof(uint8)*nTotalMB);
-            resync_marker_length = 17;
-            stuffing_length = 9;
-            break;
-        case P_VOP :
-            oscl_memset(video->motX, 0, sizeof(MOT)*4*nTotalMB);
-            oscl_memset(video->motY, 0, sizeof(MOT)*4*nTotalMB);
-//      oscl_memset(Mode, MODE_INTER, sizeof(uint8)*nTotalMB);
-            resync_marker_length = 16 + currVop->fcodeForward;
-            stuffing_length = 10;
-            break;
-        default :
-            mp4dec_log("DecodeFrameCombinedMode(): Vop type not supported.\n");
-            return PV_FAIL;
-    }
-#ifdef PV_ANNEX_IJKT_SUPPORT
-    if (video->shortVideoHeader)
-    {
-        if (video->advanced_INTRA)
-        {
-            if (video->modified_quant)
-            {
-                video->vlcDecCoeffIntra = &VlcDecTCOEFShortHeader_AnnexIT;
-                video->vlcDecCoeffInter = &VlcDecTCOEFShortHeader_AnnexT;
-            }
-            else
-            {
-                video->vlcDecCoeffIntra = &VlcDecTCOEFShortHeader_AnnexI;
-                video->vlcDecCoeffInter = &VlcDecTCOEFShortHeader;
-            }
-        }
-        else
-        {
-            if (video->modified_quant)
-            {
-                video->vlcDecCoeffInter = video->vlcDecCoeffIntra = &VlcDecTCOEFShortHeader_AnnexT;
-            }
-            else
-            {
-                video->vlcDecCoeffInter = video->vlcDecCoeffIntra = &VlcDecTCOEFShortHeader;
-            }
-        }
-    }
-
-#endif
-
-    /** Initialize sliceNo ***/
-    mbnum = slice_counter = 0;
-//  oscl_memset(video->sliceNo, 0, sizeof(uint8)*nTotalMB);
-    QP = video->currVop->quantizer;
-
-    do
-    {
-        /* This section is equivalent to motion_shape_texture() */
-        /*    in the MPEG-4 standard.     04/13/2000          */
-        mb_start = mbnum;
-        video->usePrevQP = 0;             /*  04/27/01 */
-        startPacket = getPointer(stream);
-
-#ifdef PV_ANNEX_IJKT_SUPPORT
-        if (video->modified_quant)
-        {
-            video->QP_CHR = MQ_chroma_QP_table[QP];
-        }
-        else
-        {
-            video->QP_CHR = QP;     /* ANNEX_T */
-        }
-#endif
-        /* remove any stuffing bits */
-        BitstreamShowBits16(stream, stuffing_length, &code);
-        while (code == 1)
-        {
-            PV_BitstreamFlushBits(stream, stuffing_length);
-            BitstreamShowBits16(stream, stuffing_length, &code);
-        }
-
-        do
-        {
-            /* we need video->mbnum in lower level functions */
-            video->mbnum = mbnum;
-            video->mbnum_row = PV_GET_ROW(mbnum, nMBPerRow);
-            video->mbnum_col = mbnum - video->mbnum_row * nMBPerRow;
-            /* assign slice number for each macroblocks */
-            video->sliceNo[mbnum] = (uint8) slice_counter;
-
-            /* decode COD, MCBPC, ACpred_flag, CPBY and DQUANT */
-            /* We have to discard stuffed MB header */
-            status = GetMBheader(video, &QP);
-
-            if (status != PV_SUCCESS)
-            {
-                VideoDecoderErrorDetected(video);
-                video->mbnum = mb_start;
-                movePointerTo(stream, (startPacket & -8));
-                break;
-            }
-
-            /* Store the QP value for later use in AC prediction */
-            QPMB[mbnum] = QP;
-
-            if (Mode[mbnum] != MODE_SKIPPED)
-            {
-                /* decode the DCT coeficients for the MB */
-                status = GetMBData(video);
-                if (status != PV_SUCCESS)
-                {
-                    VideoDecoderErrorDetected(video);
-                    video->mbnum = mb_start;
-                    movePointerTo(stream, (startPacket & -8));
-                    break;
-                }
-            }
-            else /* MODE_SKIPPED */
-            {
-                SkippedMBMotionComp(video); /*  08/04/05 */
-            }
-            // Motion compensation and put video->mblock->pred_block
-            mbnum++;
-
-            /* remove any stuffing bits */
-            BitstreamShowBits16(stream, stuffing_length, &code);
-            while (code == 1)
-            {
-                PV_BitstreamFlushBits(stream, stuffing_length);
-                BitstreamShowBits16(stream, stuffing_length, &code);
-            }
-
-            /* have we reached the end of the video packet or vop? */
-            if (shortVideoHeader)
-            {
-#ifdef PV_ANNEX_IJKT_SUPPORT
-                if (!video->slice_structure)
-                {
-#endif
-                    if (mbnum >= (int)(video->mbnum_row + 1)*video->nMBinGOB)   /*  10/11/01 */
-                    {
-                        if (mbnum >= nTotalMB) return PV_SUCCESS;
-                        status = BitstreamShowBits32(stream, GOB_RESYNC_MARKER_LENGTH, &tmpvar);
-
-                        if (tmpvar == GOB_RESYNC_MARKER)
-                        {
-                            break;
-                        }
-                        else
-                        {
-                            status = PV_BitstreamShowBitsByteAlign(stream, GOB_RESYNC_MARKER_LENGTH, &tmpvar);
-                            if (tmpvar == GOB_RESYNC_MARKER) break;
-                        }
-                    }
-#ifdef PV_ANNEX_IJKT_SUPPORT
-                }
-                else
-                {
-
-                    if (mbnum >= nTotalMB)  /* in case no valid stuffing  06/23/01 */
-                    {
-                        valid_stuffing = validStuffing_h263(stream);
-                        if (valid_stuffing == 0)
-                        {
-                            VideoDecoderErrorDetected(video);
-                            ConcealPacket(video, mb_start, nTotalMB, slice_counter);
-                        }
-                        return PV_SUCCESS;
-                    }
-                    /* ANNEX_K */
-                    PV_BitstreamShowBitsByteAlignNoForceStuffing(stream, 17, &tmpvar);
-                    if (tmpvar == RESYNC_MARKER)
-                    {
-                        valid_stuffing = validStuffing_h263(stream);
-                        if (valid_stuffing)
-                            break; /*  06/21/01 */
-                    }
-
-                }
-#endif
-            }
-            else
-            {
-                if (mbnum >= nTotalMB)  /* in case no valid stuffing  06/23/01 */
-                {
-                    /*  11/01/2002 if we are at the end of the frame and there is some garbage data
-                    at the end of the frame (i.e. no next startcode) break if the stuffing is valid */
-                    valid_stuffing = validStuffing(stream);
-                    if (valid_stuffing == 0)
-                    {
-                        /* end 11/01/2002 */
-                        VideoDecoderErrorDetected(video);
-                        ConcealPacket(video, mb_start, nTotalMB, slice_counter);
-                    }
-                    PV_BitstreamByteAlign(stream);
-                    return PV_SUCCESS;
-                }
-
-                status = PV_BitstreamShowBitsByteAlign(stream, 23, &tmpvar); /* this call is valid for f_code < 8 */
-                long_zero_bits = !tmpvar;
-
-                if ((tmpvar >> (23 - resync_marker_length)) == RESYNC_MARKER || long_zero_bits)
-                {
-                    valid_stuffing = validStuffing(stream);
-                    if (valid_stuffing)
-                        break; /*  06/21/01 */
-                }
-
-            }
-        }
-        while (TRUE);
-
-        if (shortVideoHeader)
-        { /* We need to check newgob to refresh quantizer */
-#ifdef PV_ANNEX_IJKT_SUPPORT
-            if (!video->slice_structure)
-            {
-#endif
-                while ((status = PV_GobHeader(video)) == PV_FAIL)
-                {
-                    if ((status = quickSearchGOBHeader(stream)) != PV_SUCCESS)
-                    {
-                        break;
-                    }
-                }
-
-                mbnum = currVop->gobNumber * video->nMBinGOB;
-#ifdef PV_ANNEX_IJKT_SUPPORT
-            }
-            else
-            {
-                while ((status = PV_H263SliceHeader(video, &mbnum)) == PV_FAIL)
-                {
-                    if ((status = quickSearchH263SliceHeader(stream)) != PV_SUCCESS)
-                    {
-                        break;
-                    }
-                }
-            }
-
-#endif
-        }
-        else
-        {
-            while ((status = PV_ReadVideoPacketHeader(video, &mbnum)) == PV_FAIL)
-            {
-                if ((status = quickSearchVideoPacketHeader(stream, resync_marker_length)) != PV_SUCCESS)
-                {
-                    break;
-                }
-            }
-        }
-
-        if (status == PV_END_OF_VOP)
-        {
-            mbnum = nTotalMB;
-        }
-
-        if (mbnum > video->mbnum + 1)
-        {
-            ConcealPacket(video, video->mbnum, mbnum, slice_counter);
-        }
-        QP = video->currVop->quantizer;
-        slice_counter++;
-        if (mbnum >= nTotalMB) break;
-
-    }
-    while (TRUE);
-    return PV_SUCCESS;
-}
-
-
-/* ============================================================================ */
-/*  Function : GetMBHeader()                                                    */
-/*  Purpose  : Decode MB header, not_coded, mcbpc, ac_pred_flag, cbpy, dquant.  */
-/*  In/out   :                                                                  */
-/*  Return   :                                                                  */
-/*  Modified :                                                                  */
-/*                                                                              */
-/*      3/29/00 : Changed the returned value and optimized the code.    */
-/*      4/01/01 : new ACDC prediction structure                         */
-/* ============================================================================ */
-PV_STATUS GetMBheader(VideoDecData *video, int16 *QP)
-{
-    BitstreamDecVideo *stream = video->bitstream;
-    int mbnum = video->mbnum;
-    uint8 *Mode = video->headerInfo.Mode;
-    int x_pos = video->mbnum_col;
-    typeDCStore *DC = video->predDC + mbnum;
-    typeDCACStore *DCAC_row = video->predDCAC_row + x_pos;
-    typeDCACStore *DCAC_col = video->predDCAC_col;
-    const static int16  DQ_tab[4] = { -1, -2, 1, 2};
-
-    int CBPY, CBPC;
-    int MBtype, VopType;
-    int MCBPC;
-    uint DQUANT;
-    int comp;
-    Bool mb_coded;
-
-    VopType = video->currVop->predictionType;
-    mb_coded = ((VopType == I_VOP) ? TRUE : !BitstreamRead1Bits_INLINE(stream));
-
-    if (!mb_coded)
-    {
-        /* skipped macroblock */
-        Mode[mbnum] = MODE_SKIPPED;
-        //oscl_memset(DCAC_row, 0, sizeof(typeDCACStore));   /*  SKIPPED_ACDC */
-        //oscl_memset(DCAC_col, 0, sizeof(typeDCACStore));
-        ZERO_OUT_64BYTES(DCAC_row);
-        ZERO_OUT_64BYTES(DCAC_col); /*  08/12/05 */
-
-        for (comp = 0; comp < 6; comp++)
-        {
-            (*DC)[comp] = mid_gray;
-        }
-    }
-    else
-    {
-        /* coded macroblock */
-        if (VopType == I_VOP)
-        {
-            MCBPC = PV_VlcDecMCBPC_com_intra(stream);
-        }
-        else
-        {
-#ifdef PV_ANNEX_IJKT_SUPPORT
-            if (!video->deblocking)
-            {
-                MCBPC = PV_VlcDecMCBPC_com_inter(stream);
-            }
-            else
-            {
-                MCBPC = PV_VlcDecMCBPC_com_inter_H263(stream);
-            }
-#else
-            MCBPC = PV_VlcDecMCBPC_com_inter(stream);
-#endif
-        }
-
-        if (VLC_ERROR_DETECTED(MCBPC))
-        {
-            return PV_FAIL;
-        }
-
-        Mode[mbnum] = (uint8)(MBtype = MBtype_mode[MCBPC & 7]);
-        CBPC = (MCBPC >> 4) & 3;
-
-#ifdef PV_ANNEX_IJKT_SUPPORT
-        if (MBtype & INTRA_MASK)
-        {
-            if (!video->shortVideoHeader)
-            {
-                video->acPredFlag[mbnum] = (uint8) BitstreamRead1Bits(stream);
-            }
-            else
-            {
-                if (video->advanced_INTRA)
-                {
-                    if (!BitstreamRead1Bits(stream))
-                    {
-                        video->acPredFlag[mbnum] = 0;
-                    }
-                    else
-                    {
-                        video->acPredFlag[mbnum] = 1;
-                        if (BitstreamRead1Bits(stream))
-                        {
-                            video->mblock->direction = 0;
-                        }
-                        else
-                        {
-                            video->mblock->direction = 1;
-                        }
-                    }
-                }
-                else
-                {
-                    video->acPredFlag[mbnum] = 0;
-                }
-            }
-        }
-#else
-        if ((MBtype & INTRA_MASK) && !video->shortVideoHeader)
-        {
-            video->acPredFlag[mbnum] = (uint8) BitstreamRead1Bits_INLINE(stream);
-        }
-        else
-        {
-            video->acPredFlag[mbnum] = 0;
-        }
-#endif
-        CBPY = PV_VlcDecCBPY(stream, MBtype & INTRA_MASK); /* INTRA || INTRA_Q */
-        if (CBPY < 0)
-        {
-            return PV_FAIL;
-        }
-
-        // GW 04/23/99
-        video->headerInfo.CBP[mbnum] = (uint8)(CBPY << 2 | (CBPC & 3));
-#ifdef PV_ANNEX_IJKT_SUPPORT
-        if (MBtype & Q_MASK)
-        {
-            if (!video->modified_quant)
-            {
-                DQUANT = BitstreamReadBits16(stream, 2);
-                *QP += DQ_tab[DQUANT];
-
-                if (*QP < 1) *QP = 1;
-                else if (*QP > 31) *QP = 31;
-                video->QP_CHR = *QP;  /* ANNEX_T */
-            }
-            else
-            {
-                if (BitstreamRead1Bits(stream))
-                {
-                    if (BitstreamRead1Bits(stream))
-                    {
-                        *QP += DQ_tab_Annex_T_11[*QP];
-                    }
-                    else
-                    {
-                        *QP += DQ_tab_Annex_T_10[*QP];
-                    }
-                    if (*QP < 1) *QP = 1;
-                    else if (*QP > 31) *QP = 31;
-                }
-                else
-                {
-                    *QP = (int16)BitstreamReadBits16(stream, 5);
-                }
-                video->QP_CHR =  MQ_chroma_QP_table[*QP];
-            }
-        }
-#else
-        if (MBtype & Q_MASK)
-        {
-            DQUANT = BitstreamReadBits16(stream, 2);
-            *QP += DQ_tab[DQUANT];
-
-            if (*QP < 1) *QP = 1;
-            else if (*QP > 31) *QP = 31;
-        }
-#endif
-    }
-    return PV_SUCCESS;
-}
-
-
-
-
-
-/***********************************************************CommentBegin******
-*       3/10/00  : initial modification to the
-*                new PV-Decoder Lib format.
-*       4/2/2000 : Cleanup and error-handling modification.  This
-*                   function has been divided into several sub-functions for
-*                   better coding style and maintainance reason.  I also
-*                   greatly shrunk the code size here.
-*       9/18/2000 : VlcDecode+Dequant optimization *
-*       4/01/2001 : new ACDC prediction structure
-*       3/29/2002 : removed GetIntraMB and GetInterMB
-***********************************************************CommentEnd********/
-PV_STATUS GetMBData(VideoDecData *video)
-{
-    BitstreamDecVideo *stream = video->bitstream;
-    int mbnum = video->mbnum;
-    MacroBlock *mblock = video->mblock;
-    int16 *dataBlock;
-    PIXEL *c_comp;
-    uint mode = video->headerInfo.Mode[mbnum];
-    uint CBP = video->headerInfo.CBP[mbnum];
-    typeDCStore *DC = video->predDC + mbnum;
-    int intra_dc_vlc_thr = video->currVop->intraDCVlcThr;
-    int16 QP = video->QPMB[mbnum];
-    int16 QP_tmp = QP;
-    int width = video->width;
-    int  comp;
-    int  switched;
-    int ncoeffs[6] = {0, 0, 0, 0, 0, 0};
-    int *no_coeff = mblock->no_coeff;
-    int16 DC_coeff;
-    PV_STATUS status;
-
-#ifdef PV_POSTPROC_ON
-    /* post-processing */
-    uint8 *pp_mod[6];
-    int TotalMB = video->nTotalMB;
-    int MB_in_width = video->nMBPerRow;
-#endif
-    int y_pos = video->mbnum_row;
-    int x_pos = video->mbnum_col;
-    int32 offset = (int32)(y_pos << 4) * width + (x_pos << 4);
-
-    /* Decode each 8-by-8 blocks. comp 0 ~ 3 are luminance blocks, 4 ~ 5 */
-    /*  are chrominance blocks.   04/03/2000.                          */
-#ifdef PV_POSTPROC_ON
-    if (video->postFilterType != PV_NO_POST_PROC)
-    {
-        /** post-processing ***/
-        pp_mod[0] = video->pstprcTypCur + (y_pos << 1) * (MB_in_width << 1) + (x_pos << 1);
-        pp_mod[1] = pp_mod[0] + 1;
-        pp_mod[2] = pp_mod[0] + (MB_in_width << 1);
-        pp_mod[3] = pp_mod[2] + 1;
-        pp_mod[4] = video->pstprcTypCur + (TotalMB << 2) + mbnum;
-        pp_mod[5] = pp_mod[4] + TotalMB;
-    }
-#endif
-
-    /*  oscl_memset(mblock->block, 0, sizeof(typeMBStore));    Aug 9,2005 */
-
-    if (mode & INTRA_MASK) /* MODE_INTRA || MODE_INTRA_Q */
-    {
-        switched = 0;
-        if (intra_dc_vlc_thr)
-        {
-            if (video->usePrevQP)
-                QP_tmp = video->QPMB[mbnum-1];   /* running QP  04/26/01 */
-
-            switched = (intra_dc_vlc_thr == 7 || QP_tmp >= intra_dc_vlc_thr * 2 + 11);
-        }
-
-        mblock->DCScalarLum = cal_dc_scaler(QP, LUMINANCE_DC_TYPE);   /*  3/01/01 */
-        mblock->DCScalarChr = cal_dc_scaler(QP, CHROMINANCE_DC_TYPE);
-
-        for (comp = 0; comp < 6; comp++)
-        {
-            dataBlock = mblock->block[comp];    /* 10/20/2000 */
-
-            if (video->shortVideoHeader)
-            {
-#ifdef PV_ANNEX_IJKT_SUPPORT
-                if (!video->advanced_INTRA)
-                {
-#endif
-                    DC_coeff = (int16) BitstreamReadBits16_INLINE(stream, 8);
-
-                    if ((DC_coeff & 0x7f) == 0) /* 128 & 0  */
-                    {
-                        /* currently we will only signal FAIL for 128. We will ignore the 0 case  */
-                        if (DC_coeff == 128)
-                        {
-                            return PV_FAIL;
-                        }
-                        else
-                        {
-                            VideoDecoderErrorDetected(video);
-                        }
-                    }
-                    if (DC_coeff == 255)
-                    {
-                        DC_coeff = 128;
-                    }
-                    dataBlock[0] = (int16) DC_coeff;
-#ifdef PV_ANNEX_IJKT_SUPPORT
-                }
-#endif
-                ncoeffs[comp] = VlcDequantH263IntraBlock_SH(video, comp, mblock->bitmapcol[comp], &mblock->bitmaprow[comp]);
-
-            }
-            else
-            {
-                if (switched == 0)
-                {
-                    status = PV_DecodePredictedIntraDC(comp, stream, &DC_coeff);
-                    if (status != PV_SUCCESS) return PV_FAIL;
-
-                    dataBlock[0] = (int16) DC_coeff;
-                }
-                ncoeffs[comp] = VlcDequantH263IntraBlock(video, comp,
-                                switched, mblock->bitmapcol[comp], &mblock->bitmaprow[comp]);
-            }
-
-            if (VLC_ERROR_DETECTED(ncoeffs[comp]))
-            {
-                if (switched)
-                    return PV_FAIL;
-                else
-                {
-                    ncoeffs[comp] = 1;
-                    oscl_memset((dataBlock + 1), 0, sizeof(int16)*63);
-                }
-            }
-            no_coeff[comp] = ncoeffs[comp];
-
-#ifdef PV_POSTPROC_ON
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[comp] = (uint8) PostProcSemaphore(dataBlock);
-#endif
-        }
-        MBlockIDCT(video);
-    }
-    else      /* INTER modes */
-    {   /*  moved it here Aug 15, 2005 */
-        /* decode the motion vector (if there are any) */
-        status = PV_GetMBvectors(video, mode);
-        if (status != PV_SUCCESS)
-        {
-            return status;
-        }
-
-
-        MBMotionComp(video, CBP);
-        c_comp  = video->currVop->yChan + offset;
-
-#ifdef PV_ANNEX_IJKT_SUPPORT
-        for (comp = 0; comp < 4; comp++)
-        {
-            (*DC)[comp] = mid_gray;
-            if (CBP & (1 << (5 - comp)))
-            {
-                ncoeffs[comp] = VlcDequantH263InterBlock(video, comp, mblock->bitmapcol[comp], &mblock->bitmaprow[comp]);
-                if (VLC_ERROR_DETECTED(ncoeffs[comp])) return PV_FAIL;
-
-                BlockIDCT(c_comp + (comp&2)*(width << 2) + 8*(comp&1), mblock->pred_block + (comp&2)*64 + 8*(comp&1), mblock->block[comp], width, ncoeffs[comp],
-                          mblock->bitmapcol[comp], mblock->bitmaprow[comp]);
-
-#ifdef PV_POSTPROC_ON
-                /* for inter just test for ringing */
-                if (video->postFilterType != PV_NO_POST_PROC)
-                    *pp_mod[comp] = (uint8)((ncoeffs[comp] > 3) ? 4 : 0);
-#endif
-            }
-            else
-            {
-                /* no IDCT for all zeros blocks  03/28/2002 */
-                /*              BlockIDCT();                */
-#ifdef PV_POSTPROC_ON
-                if (video->postFilterType != PV_NO_POST_PROC)
-                    *pp_mod[comp] = 0;
-#endif
-            }
-        }
-
-        video->QPMB[mbnum] = video->QP_CHR;     /* ANNEX_T */
-
-
-
-        (*DC)[4] = mid_gray;
-        if (CBP & 2)
-        {
-            ncoeffs[4] = VlcDequantH263InterBlock(video, 4, mblock->bitmapcol[4], &mblock->bitmaprow[4]);
-            if (VLC_ERROR_DETECTED(ncoeffs[4])) return PV_FAIL;
-
-            BlockIDCT(video->currVop->uChan + (offset >> 2) + (x_pos << 2), mblock->pred_block + 256, mblock->block[4], width >> 1, ncoeffs[4],
-                      mblock->bitmapcol[4], mblock->bitmaprow[4]);
-
-#ifdef PV_POSTPROC_ON
-            /* for inter just test for ringing */
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[4] = (uint8)((ncoeffs[4] > 3) ? 4 : 0);
-#endif
-        }
-        else
-        {
-            /* no IDCT for all zeros blocks  03/28/2002 */
-            /*              BlockIDCT();                */
-#ifdef PV_POSTPROC_ON
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[4] = 0;
-#endif
-        }
-        (*DC)[5] = mid_gray;
-        if (CBP & 1)
-        {
-            ncoeffs[5] = VlcDequantH263InterBlock(video, 5, mblock->bitmapcol[5], &mblock->bitmaprow[5]);
-            if (VLC_ERROR_DETECTED(ncoeffs[5])) return PV_FAIL;
-
-            BlockIDCT(video->currVop->vChan + (offset >> 2) + (x_pos << 2), mblock->pred_block + 264, mblock->block[5], width >> 1, ncoeffs[5],
-                      mblock->bitmapcol[5], mblock->bitmaprow[5]);
-
-#ifdef PV_POSTPROC_ON
-            /* for inter just test for ringing */
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[5] = (uint8)((ncoeffs[5] > 3) ? 4 : 0);
-#endif
-        }
-        else
-        {
-            /* no IDCT for all zeros blocks  03/28/2002 */
-            /*              BlockIDCT();                */
-#ifdef PV_POSTPROC_ON
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[5] = 0;
-#endif
-        }
-        video->QPMB[mbnum] = QP;  /* restore the QP values  ANNEX_T*/
-#else
-        for (comp = 0; comp < 4; comp++)
-        {
-            (*DC)[comp] = mid_gray;
-            if (CBP & (1 << (5 - comp)))
-            {
-                ncoeffs[comp] = VlcDequantH263InterBlock(video, comp, mblock->bitmapcol[comp], &mblock->bitmaprow[comp]);
-                if (VLC_ERROR_DETECTED(ncoeffs[comp])) return PV_FAIL;
-
-                BlockIDCT(c_comp + (comp&2)*(width << 2) + 8*(comp&1), mblock->pred_block + (comp&2)*64 + 8*(comp&1), mblock->block[comp], width, ncoeffs[comp],
-                          mblock->bitmapcol[comp], mblock->bitmaprow[comp]);
-
-#ifdef PV_POSTPROC_ON
-                /* for inter just test for ringing */
-                if (video->postFilterType != PV_NO_POST_PROC)
-                    *pp_mod[comp] = (uint8)((ncoeffs[comp] > 3) ? 4 : 0);
-#endif
-            }
-            else
-            {
-                /* no IDCT for all zeros blocks  03/28/2002 */
-                /*              BlockIDCT();                */
-#ifdef PV_POSTPROC_ON
-                if (video->postFilterType != PV_NO_POST_PROC)
-                    *pp_mod[comp] = 0;
-#endif
-            }
-        }
-
-        (*DC)[4] = mid_gray;
-        if (CBP & 2)
-        {
-            ncoeffs[4] = VlcDequantH263InterBlock(video, 4, mblock->bitmapcol[4], &mblock->bitmaprow[4]);
-            if (VLC_ERROR_DETECTED(ncoeffs[4])) return PV_FAIL;
-
-            BlockIDCT(video->currVop->uChan + (offset >> 2) + (x_pos << 2), mblock->pred_block + 256, mblock->block[4], width >> 1, ncoeffs[4],
-                      mblock->bitmapcol[4], mblock->bitmaprow[4]);
-
-#ifdef PV_POSTPROC_ON
-            /* for inter just test for ringing */
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[4] = (uint8)((ncoeffs[4] > 3) ? 4 : 0);
-#endif
-        }
-        else
-        {
-            /* no IDCT for all zeros blocks  03/28/2002 */
-            /*              BlockIDCT();                */
-#ifdef PV_POSTPROC_ON
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[4] = 0;
-#endif
-        }
-        (*DC)[5] = mid_gray;
-        if (CBP & 1)
-        {
-            ncoeffs[5] = VlcDequantH263InterBlock(video, 5, mblock->bitmapcol[5], &mblock->bitmaprow[5]);
-            if (VLC_ERROR_DETECTED(ncoeffs[5])) return PV_FAIL;
-
-            BlockIDCT(video->currVop->vChan + (offset >> 2) + (x_pos << 2), mblock->pred_block + 264, mblock->block[5], width >> 1, ncoeffs[5],
-                      mblock->bitmapcol[5], mblock->bitmaprow[5]);
-
-#ifdef PV_POSTPROC_ON
-            /* for inter just test for ringing */
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[5] = (uint8)((ncoeffs[5] > 3) ? 4 : 0);
-#endif
-        }
-        else
-        {
-            /* no IDCT for all zeros blocks  03/28/2002 */
-            /*              BlockIDCT();                */
-#ifdef PV_POSTPROC_ON
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[5] = 0;
-#endif
-#endif  // PV_ANNEX_IJKT_SUPPORT
-
-
-
-
-
-
-    }
-
-    video->usePrevQP = 1;          /* should be set after decoding the first Coded  04/27/01 */
-    return PV_SUCCESS;
-}
-
-
-
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/datapart_decode.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/datapart_decode.cpp
deleted file mode 100644
index 00db04b..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/datapart_decode.cpp
+++ /dev/null
@@ -1,794 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "mp4dec_lib.h"
-#include "vlc_decode.h"
-#include "bitstream.h"
-#include "scaling.h"
-#include "mbtype_mode.h"
-#include "idct.h"
-
-#define OSCL_DISABLE_WARNING_CONDITIONAL_IS_CONSTANT
-/* ======================================================================== */
-/*  Function : DecodeFrameDataPartMode()                                    */
-/*  Purpose  : Decode a frame of MPEG4 bitstream in datapartitioning mode.  */
-/*  In/out   :                                                              */
-/*  Return   :                                                              */
-/*  Modified :                                                              */
-/*                                                                          */
-/*      04/25/2000 : Rewrite the data partitioning path completely  */
-/*                           according to the pseudo codes in MPEG-4        */
-/*                           standard.                                      */
-/*  Modified : 09/18/2000 add fast VlcDecode+Dequant                    */
-/*             04/17/2001 cleanup                                       */
-/* ======================================================================== */
-PV_STATUS DecodeFrameDataPartMode(VideoDecData *video)
-{
-    PV_STATUS status;
-    Vop *currVop = video->currVop;
-    BitstreamDecVideo *stream = video->bitstream;
-
-    int nMBPerRow = video->nMBPerRow;
-
-    int vopType = currVop->predictionType;
-    int mbnum;
-    int nTotalMB = video->nTotalMB;
-    int slice_counter;
-    int resync_marker_length;
-
-    /* copy and pad to prev_Vop for INTER coding */
-    switch (vopType)
-    {
-        case I_VOP :
-//      oscl_memset(Mode, MODE_INTRA, sizeof(uint8)*nTotalMB);
-            resync_marker_length = 17;
-            break;
-        case P_VOP :
-            oscl_memset(video->motX, 0, sizeof(MOT)*4*nTotalMB);
-            oscl_memset(video->motY, 0, sizeof(MOT)*4*nTotalMB);
-//      oscl_memset(Mode, MODE_INTER, sizeof(uint8)*nTotalMB);
-            resync_marker_length = 16 + currVop->fcodeForward;
-            break;
-        default :
-            mp4dec_log("DecodeFrameDataPartMode(): Vop type not supported.\n");
-            return PV_FAIL;
-    }
-
-    /** Initialize sliceNo ***/
-    mbnum = slice_counter = 0;
-//  oscl_memset(video->sliceNo, 0, sizeof(uint8)*nTotalMB);
-
-    do
-    {
-        /* This section is equivalent to motion_shape_texture() */
-        /* in the MPEG-4 standard.            04/13/2000      */
-        video->mbnum = mbnum;
-        video->mbnum_row = PV_GET_ROW(mbnum, nMBPerRow);   /*  This is needed if nbnum is read from the packet header */
-        video->mbnum_col = mbnum - video->mbnum_row * nMBPerRow;
-
-        switch (vopType)
-        {
-            case I_VOP :
-                status = DecodeDataPart_I_VideoPacket(video, slice_counter);
-                break;
-
-            case P_VOP :
-                status = DecodeDataPart_P_VideoPacket(video, slice_counter);
-                break;
-
-            default :
-                mp4dec_log("DecodeFrameDataPartMode(): Vop type not supported.\n");
-                return PV_FAIL;
-        }
-
-        while ((status = PV_ReadVideoPacketHeader(video, &mbnum)) == PV_FAIL)
-        {
-            if ((status = quickSearchVideoPacketHeader(stream, resync_marker_length)) != PV_SUCCESS)
-            {
-                break;
-            }
-        }
-
-        if (status == PV_END_OF_VOP)
-        {
-            mbnum = nTotalMB;
-        }
-
-        if (mbnum > video->mbnum + 1)
-        {
-            ConcealPacket(video, video->mbnum, mbnum, slice_counter);
-        }
-        slice_counter++;
-        if (mbnum >= nTotalMB)
-        {
-            break;
-        }
-
-
-    }
-    while (TRUE);
-
-    return PV_SUCCESS;
-}
-
-
-/* ======================================================================== */
-/*  Function : DecodeDataPart_I_VideoPacket()                               */
-/*  Date     : 04/25/2000                                                   */
-/*  Purpose  : Decode Data Partitioned Mode Video Packet in I-VOP           */
-/*  In/out   :                                                              */
-/*  Return   : PV_SUCCESS if successed, PV_FAIL if failed.                  */
-/*  Modified : 09/18/2000 add fast VlcDecode+Dequant                    */
-/*             04/01/2001 fixed MB_stuffing, removed unnecessary code   */
-/* ======================================================================== */
-PV_STATUS DecodeDataPart_I_VideoPacket(VideoDecData *video, int slice_counter)
-{
-    PV_STATUS status;
-    uint8 *Mode = video->headerInfo.Mode;
-    BitstreamDecVideo *stream = video->bitstream;
-    int  nTotalMB = video->nTotalMB;
-    int  mbnum, mb_start, mb_end;
-    int16 QP, *QPMB = video->QPMB;
-    int  MBtype, MCBPC, CBPY;
-    uint32 tmpvar;
-    uint code;
-    int nMBPerRow = video->nMBPerRow;
-    Bool valid_stuffing;
-    int32 startSecondPart, startFirstPart = getPointer(stream);
-
-    /* decode the first partition */
-    QP = video->currVop->quantizer;
-    mb_start = mbnum = video->mbnum;
-    video->usePrevQP = 0;         /*  04/27/01 */
-
-
-    BitstreamShowBits16(stream, 9, &code);
-    while (code == 1)
-    {
-        PV_BitstreamFlushBits(stream, 9);
-        BitstreamShowBits16(stream, 9, &code);
-    }
-
-    do
-    {
-        /* decode COD, MCBPC, ACpred_flag, CPBY and DQUANT */
-        MCBPC = PV_VlcDecMCBPC_com_intra(stream);
-
-        if (!VLC_ERROR_DETECTED(MCBPC))
-        {
-            Mode[mbnum] = (uint8)(MBtype = MBtype_mode[MCBPC & 7]);
-            video->headerInfo.CBP[mbnum] = (uint8)((MCBPC >> 4) & 3);
-            status = GetMBheaderDataPart_DQUANT_DC(video, &QP);
-            video->usePrevQP = 1;        /* set it after the first coded MB      04/27/01 */
-        }
-        else
-        {
-            /* Report the error to the application.   06/20/2000 */
-            VideoDecoderErrorDetected(video);
-            video->mbnum = mb_start;
-            movePointerTo(stream, startFirstPart);
-            return PV_FAIL;
-        }
-
-        video->sliceNo[mbnum] = (uint8) slice_counter;
-        QPMB[mbnum] = QP;
-        video->mbnum = ++mbnum;
-
-        BitstreamShowBits16(stream, 9, &code);
-        while (code == 1)
-        {
-            PV_BitstreamFlushBits(stream, 9);
-            BitstreamShowBits16(stream, 9, &code);
-        }
-        /* have we reached the end of the video packet or vop? */
-        status = BitstreamShowBits32(stream, DC_MARKER_LENGTH, &tmpvar);
-
-    }
-    while (tmpvar != DC_MARKER && video->mbnum < nTotalMB);
-
-    if (tmpvar == DC_MARKER)
-    {
-        PV_BitstreamFlushBits(stream, DC_MARKER_LENGTH);
-    }
-    else
-    {
-        status = quickSearchDCM(stream);
-        if (status == PV_SUCCESS)
-        {
-            /* only way you can end up being here is in the last packet,and there is stuffing at
-            the end of the first partition */
-            PV_BitstreamFlushBits(stream, DC_MARKER_LENGTH);
-        }
-        else
-        {
-            /* Report the error to the application.   06/20/2000 */
-            VideoDecoderErrorDetected(video);
-            movePointerTo(stream, startFirstPart);
-            video->mbnum = mb_start;
-            /* concealment will be taken care of in the upper layer */
-            return PV_FAIL;
-        }
-    }
-
-    /* decode the second partition */
-    startSecondPart = getPointer(stream);
-
-    mb_end = video->mbnum;
-
-    for (mbnum = mb_start; mbnum < mb_end; mbnum++)
-    {
-        MBtype = Mode[mbnum];
-        /* No skipped mode in I-packets  3/1/2001    */
-        video->mbnum = mbnum;
-
-        video->mbnum_row = PV_GET_ROW(mbnum, nMBPerRow);   /*  This is needed if nbnum is read from the packet header */
-        video->mbnum_col = mbnum - video->mbnum_row * nMBPerRow;
-        /* there is always acdcpred in DataPart mode  04/10/01 */
-        video->acPredFlag[mbnum] = (uint8) BitstreamRead1Bits(stream);
-
-        CBPY = PV_VlcDecCBPY(stream, MBtype & INTRA_MASK); /* MODE_INTRA || MODE_INTRA_Q */
-        if (CBPY < 0)
-        {
-            /* Report the error to the application.   06/20/2000 */
-            VideoDecoderErrorDetected(video);
-            movePointerTo(stream, startSecondPart); /*  */
-            /* Conceal packet,  05/15/2000 */
-            ConcealTexture_I(video, startFirstPart, mb_start, mb_end, slice_counter);
-            return PV_FAIL;
-        }
-
-        video->headerInfo.CBP[mbnum] |= (uint8)(CBPY << 2);
-    }
-
-    video->usePrevQP = 0;
-
-    for (mbnum = mb_start; mbnum < mb_end; mbnum++)
-    {
-        video->mbnum = mbnum;
-
-        video->mbnum_row = PV_GET_ROW(mbnum , nMBPerRow);  /*  This is needed if nbnum is read from the packet header */
-        video->mbnum_col = mbnum - video->mbnum_row * nMBPerRow;
-        /* No skipped mode in I-packets  3/1/2001    */
-        /* decode the DCT coeficients for the MB */
-        status = GetMBData_DataPart(video);
-        if (status != PV_SUCCESS)
-        {
-            /* Report the error to the application.   06/20/2000 */
-            VideoDecoderErrorDetected(video);
-            movePointerTo(stream, startSecondPart); /*  */
-            /* Conceal packet,  05/15/2000 */
-            ConcealTexture_I(video, startFirstPart, mb_start, mb_end, slice_counter);
-            return status;
-        }
-        video->usePrevQP = 1;           /*  04/27/01 should be set after decoding first MB */
-    }
-
-    valid_stuffing = validStuffing(stream);
-    if (!valid_stuffing)
-    {
-        VideoDecoderErrorDetected(video);
-        movePointerTo(stream, startSecondPart);
-        ConcealTexture_I(video, startFirstPart, mb_start, mb_end, slice_counter);
-        return PV_FAIL;
-    }
-    return PV_SUCCESS;
-}
-
-
-/* ======================================================================== */
-/*  Function : DecodeDataPart_P_VideoPacket()                               */
-/*  Date     : 04/25/2000                                                   */
-/*  Purpose  : Decode Data Partitioned Mode Video Packet in P-VOP           */
-/*  In/out   :                                                              */
-/*  Return   : PV_SUCCESS if successed, PV_FAIL if failed.                  */
-/*  Modified :   09/18/2000,  fast VlcDecode+Dequant                        */
-/*              04/13/2001,  fixed MB_stuffing, new ACDC pred structure,  */
-/*                              cleanup                                     */
-/*              08/07/2001,  remove MBzero                              */
-/* ======================================================================== */
-PV_STATUS DecodeDataPart_P_VideoPacket(VideoDecData *video, int slice_counter)
-{
-    PV_STATUS status;
-    uint8 *Mode = video->headerInfo.Mode;
-    BitstreamDecVideo *stream = video->bitstream;
-    int nTotalMB = video->nTotalMB;
-    int mbnum, mb_start, mb_end;
-    int16 QP, *QPMB = video->QPMB;
-    int MBtype, CBPY;
-    Bool valid_stuffing;
-    int intra_MB;
-    uint32 tmpvar;
-    uint code;
-    int32  startFirstPart, startSecondPart;
-    int nMBPerRow = video->nMBPerRow;
-    uint8 *pbyte;
-    /* decode the first partition */
-    startFirstPart = getPointer(stream);
-    mb_start = video->mbnum;
-    video->usePrevQP = 0;            /*  04/27/01 */
-
-    BitstreamShowBits16(stream, 10, &code);
-    while (code == 1)
-    {
-        PV_BitstreamFlushBits(stream, 10);
-        BitstreamShowBits16(stream, 10, &code);
-    }
-
-    do
-    {
-        /* decode COD, MCBPC, ACpred_flag, CPBY and DQUANT */
-        /* We have to discard stuffed MB header */
-
-        status = GetMBheaderDataPart_P(video);
-
-        if (status != PV_SUCCESS)
-        {
-            /* Report the error to the application.   06/20/2000 */
-            VideoDecoderErrorDetected(video);
-            movePointerTo(stream, startFirstPart);
-            video->mbnum = mb_start;
-            return PV_FAIL;
-        }
-
-        /* we must update slice_counter before motion vector decoding.   */
-        video->sliceNo[video->mbnum] = (uint8) slice_counter;
-
-        if (Mode[video->mbnum] & INTER_MASK) /* INTER || INTER_Q || INTER_4V */
-        {
-            /* decode the motion vector (if there are any) */
-            status = PV_GetMBvectors(video, Mode[video->mbnum]);
-            if (status != PV_SUCCESS)
-            {
-                /* Report the error to the application.   06/20/2000 */
-                VideoDecoderErrorDetected(video);
-                movePointerTo(stream, startFirstPart);
-                video->mbnum = mb_start;
-                return PV_FAIL;
-            }
-        }
-        video->mbnum++;
-
-        video->mbnum_row = PV_GET_ROW(video->mbnum, nMBPerRow);   /*  This is needed if mbnum is read from the packet header */
-        video->mbnum_col = video->mbnum - video->mbnum_row * nMBPerRow;
-
-        BitstreamShowBits16(stream, 10, &code);
-        while (code == 1)
-        {
-            PV_BitstreamFlushBits(stream, 10);
-            BitstreamShowBits16(stream, 10, &code);
-        }
-        /* have we reached the end of the video packet or vop? */
-        status = BitstreamShowBits32(stream, MOTION_MARKER_COMB_LENGTH, &tmpvar);
-        /*      if (status != PV_SUCCESS && status != PV_END_OF_BUFFER) return status;  */
-    }
-    while (tmpvar != MOTION_MARKER_COMB && video->mbnum < nTotalMB);
-
-    if (tmpvar == MOTION_MARKER_COMB)
-    {
-        PV_BitstreamFlushBits(stream, MOTION_MARKER_COMB_LENGTH);
-    }
-    else
-    {
-        status = quickSearchMotionMarker(stream);
-        if (status == PV_SUCCESS)
-        {
-            /* only way you can end up being here is in the last packet,and there is stuffing at
-            the end of the first partition */
-            PV_BitstreamFlushBits(stream, MOTION_MARKER_COMB_LENGTH);
-        }
-        else
-        {
-            /* Report the error to the application.   06/20/2000 */
-            VideoDecoderErrorDetected(video);
-            movePointerTo(stream, startFirstPart);
-            video->mbnum = mb_start;
-            /* concealment will be taken care of in the upper layer  */
-            return PV_FAIL;
-        }
-    }
-
-    /* decode the second partition */
-    startSecondPart = getPointer(stream);
-    QP = video->currVop->quantizer;
-
-    mb_end = video->mbnum;
-
-    for (mbnum = mb_start; mbnum < mb_end; mbnum++)
-    {
-        MBtype = Mode[mbnum];
-
-        if (MBtype == MODE_SKIPPED)
-        {
-            QPMB[mbnum] = QP; /*  03/01/01 */
-            continue;
-        }
-        intra_MB = (MBtype & INTRA_MASK); /* (MBtype == MODE_INTRA || MBtype == MODE_INTRA_Q) */
-        video->mbnum = mbnum;
-        video->mbnum_row = PV_GET_ROW(mbnum, nMBPerRow);   /*  This is needed if nbnum is read from the packet header */
-        video->mbnum_col = mbnum - video->mbnum_row * nMBPerRow;
-
-        /* there is always acdcprediction in DataPart mode    04/10/01 */
-        if (intra_MB)
-        {
-            video->acPredFlag[mbnum] = (uint8) BitstreamRead1Bits_INLINE(stream);
-        }
-
-        CBPY = PV_VlcDecCBPY(stream, intra_MB);
-        if (CBPY < 0)
-        {
-            /* Report the error to the application.   06/20/2000 */
-            VideoDecoderErrorDetected(video);
-            /* Conceal second partition,  5/15/2000 */
-            movePointerTo(stream, startSecondPart);
-            ConcealTexture_P(video, mb_start, mb_end, slice_counter);
-            return PV_FAIL;
-        }
-
-        video->headerInfo.CBP[mbnum] |= (uint8)(CBPY << 2);
-        if (intra_MB || MBtype == MODE_INTER_Q)                     /*  04/26/01 */
-        {
-            status = GetMBheaderDataPart_DQUANT_DC(video, &QP);
-            if (status != PV_SUCCESS) return status;
-        }
-        video->usePrevQP = 1;        /*  04/27/01 */
-        QPMB[mbnum] = QP;
-    }
-
-    video->usePrevQP = 0;  /*  04/27/01 */
-
-    for (mbnum = mb_start; mbnum < mb_end; mbnum++)
-    {
-        video->mbnum = mbnum;
-        video->mbnum_row = PV_GET_ROW(mbnum, nMBPerRow);  /*  This is needed if nbnum is read from the packet header */
-        video->mbnum_col = mbnum - video->mbnum_row * nMBPerRow;
-
-
-        if (Mode[mbnum] != MODE_SKIPPED)
-        {
-            /* decode the DCT coeficients for the MB */
-            status = GetMBData_DataPart(video);
-            if (status != PV_SUCCESS)
-            {
-                /* Report the error to the application.   06/20/2000 */
-                VideoDecoderErrorDetected(video);
-
-                /* Conceal second partition,  5/15/2000 */
-                movePointerTo(stream, startSecondPart);
-                ConcealTexture_P(video, mb_start, mb_end, slice_counter);
-                return status;
-            }
-            video->usePrevQP = 1;  /*  04/27/01 */
-        }
-        else
-        {   // SKIPPED
-
-            /* Motion compensation and put it to video->mblock->pred_block */
-            SkippedMBMotionComp(video);
-
-            //oscl_memset(video->predDCAC_row + video->mbnum_col, 0, sizeof(typeDCACStore)); /*  SKIPPED_ACDC */
-            //oscl_memset(video->predDCAC_col, 0, sizeof(typeDCACStore));
-            /*  08/08/2005 */
-            pbyte = (uint8*)(video->predDCAC_row + video->mbnum_col);
-            ZERO_OUT_64BYTES(pbyte);
-            pbyte = (uint8*)(video->predDCAC_col);
-            ZERO_OUT_64BYTES(pbyte);
-
-        }
-    }
-
-    valid_stuffing = validStuffing(stream);   /*  */
-    if (!valid_stuffing)
-    {
-        VideoDecoderErrorDetected(video);
-        movePointerTo(stream, startSecondPart); /*  */
-        ConcealTexture_P(video, mb_start, mb_end, slice_counter);
-
-        return PV_FAIL;
-    }
-    return PV_SUCCESS;
-}
-
-
-/* ======================================================================== */
-/*  Function : GetMBheaderDataPart_DQUANT_DC()                              */
-/*  Date     : 04/26/2000                                                   */
-/*  Purpose  : Decode DQUANT and DC in Data Partitioned Mode for both       */
-/*             I-VOP and P-VOP.                                             */
-/*  In/out   :                                                              */
-/*  Return   : PV_SUCCESS if successed, PV_FAIL if failed.                  */
-/*  Modified : 02/13/2001 new ACDC prediction structure,        */
-/*                                       cleanup                            */
-/* ======================================================================== */
-PV_STATUS GetMBheaderDataPart_DQUANT_DC(VideoDecData *video, int16 *QP)
-{
-    PV_STATUS status = PV_SUCCESS;
-    BitstreamDecVideo *stream = video->bitstream;
-    int mbnum = video->mbnum;
-    int intra_dc_vlc_thr = video->currVop->intraDCVlcThr;
-    uint8 *Mode = video->headerInfo.Mode;
-    int  MBtype = Mode[mbnum];
-    typeDCStore *DC = video->predDC + mbnum;
-    int  comp;
-    Bool switched;
-    uint  DQUANT;
-    int16 QP_tmp;
-
-    const static int  DQ_tab[4] = { -1, -2, 1, 2};
-
-    if (MBtype & Q_MASK)             /* INTRA_Q || INTER_Q */
-    {
-        DQUANT = BitstreamReadBits16(stream, 2);
-        *QP += DQ_tab[DQUANT];
-
-        if (*QP < 1) *QP = 1;
-        else if (*QP > 31) *QP = 31;
-    }
-    if (MBtype & INTRA_MASK)  /* INTRA || INTRA_Q */ /* no switch, code DC separately */
-    {
-        QP_tmp = *QP;                      /* running QP  04/26/01*/
-        switched = 0;
-        if (intra_dc_vlc_thr)                 /*  04/27/01 */
-        {
-            if (video->usePrevQP)
-                QP_tmp = video->QPMB[mbnum-1];
-            switched = (intra_dc_vlc_thr == 7 || QP_tmp >= intra_dc_vlc_thr * 2 + 11);
-        }
-        if (!switched)
-        {
-            for (comp = 0; comp < 6; comp++)
-            {
-                status = PV_DecodePredictedIntraDC(comp, stream, (*DC + comp));   /*  03/01/01 */
-                if (status != PV_SUCCESS) return PV_FAIL;
-            }
-        }
-        else
-        {
-            for (comp = 0; comp < 6; comp++)
-            {
-                (*DC)[comp] = 0;   /*  04/26/01 needed for switched case*/
-            }
-        }
-    }
-    return status;
-}
-
-
-/***********************************************************CommentBegin******
-*       04/25/2000 : Initial modification to the new PV Lib format.
-*       04/17/2001 : new ACDC pred structure
-***********************************************************CommentEnd********/
-PV_STATUS GetMBheaderDataPart_P(VideoDecData *video)
-{
-    BitstreamDecVideo *stream = video->bitstream;
-    int mbnum = video->mbnum;
-    uint8 *Mode = video->headerInfo.Mode;
-    typeDCStore *DC = video->predDC + mbnum;
-    uint no_dct_flag;
-    int comp;
-    int MCBPC;
-
-    no_dct_flag = BitstreamRead1Bits_INLINE(stream);
-
-    if (no_dct_flag)
-    {
-        /* skipped macroblock */
-        Mode[mbnum] = MODE_SKIPPED;
-
-        for (comp = 0; comp < 6; comp++)
-        {
-            (*DC)[comp] = mid_gray;
-            /*  ACDC REMOVE AC coefs are set in DecodeDataPart_P */
-        }
-    }
-    else
-    {
-        /* coded macroblock */
-        MCBPC = PV_VlcDecMCBPC_com_inter(stream);
-
-        if (VLC_ERROR_DETECTED(MCBPC))
-        {
-            return PV_FAIL;
-        }
-
-        Mode[mbnum] = (uint8)MBtype_mode[MCBPC & 7];
-        video->headerInfo.CBP[mbnum] = (uint8)((MCBPC >> 4) & 3);
-    }
-
-    return PV_SUCCESS;
-}
-
-
-/***********************************************************CommentBegin******
-*       04/17/01  new ACDC pred structure, reorganized code, cleanup
-***********************************************************CommentEnd********/
-PV_STATUS GetMBData_DataPart(VideoDecData *video)
-{
-    int mbnum = video->mbnum;
-    int16 *dataBlock;
-    MacroBlock *mblock = video->mblock;
-    int QP = video->QPMB[mbnum];
-    int32 offset;
-    PIXEL *c_comp;
-    int width = video->width;
-    int intra_dc_vlc_thr = video->currVop->intraDCVlcThr;
-    uint CBP = video->headerInfo.CBP[mbnum];
-    uint8 mode = video->headerInfo.Mode[mbnum];
-    int x_pos = video->mbnum_col;
-    typeDCStore *DC = video->predDC + mbnum;
-    int  ncoeffs[6], *no_coeff = mblock->no_coeff;
-    int  comp;
-    Bool  switched;
-    int QP_tmp = QP;
-
-    int y_pos = video->mbnum_row;
-#ifdef PV_POSTPROC_ON
-    uint8 *pp_mod[6];
-    int TotalMB = video->nTotalMB;
-    int MB_in_width = video->nMBPerRow;
-#endif
-
-
-
-    /*****
-    *     Decoding of the 6 blocks (depending on transparent pattern)
-    *****/
-#ifdef PV_POSTPROC_ON
-    if (video->postFilterType != PV_NO_POST_PROC)
-    {
-        /** post-processing ***/
-        pp_mod[0] = video->pstprcTypCur + (y_pos << 1) * (MB_in_width << 1) + (x_pos << 1);
-        pp_mod[1] = pp_mod[0] + 1;
-        pp_mod[2] = pp_mod[0] + (MB_in_width << 1);
-        pp_mod[3] = pp_mod[2] + 1;
-        pp_mod[4] = video->pstprcTypCur + (TotalMB << 2) + mbnum;
-        pp_mod[5] = pp_mod[4] + TotalMB;
-    }
-#endif
-
-    /*  oscl_memset(mblock->block, 0, sizeof(typeMBStore));    Aug 9,2005 */
-
-    if (mode & INTRA_MASK) /* MODE_INTRA || mode == MODE_INTRA_Q */
-    {
-        switched = 0;
-        if (intra_dc_vlc_thr)
-        {
-            if (video->usePrevQP)
-                QP_tmp = video->QPMB[mbnum-1];   /* running QP  04/26/01 */
-
-            switched = (intra_dc_vlc_thr == 7 || QP_tmp >= intra_dc_vlc_thr * 2 + 11);
-        }
-
-        mblock->DCScalarLum = cal_dc_scaler(QP, LUMINANCE_DC_TYPE);     /*   ACDC 03/01/01 */
-        mblock->DCScalarChr = cal_dc_scaler(QP, CHROMINANCE_DC_TYPE);
-
-        for (comp = 0; comp < 6; comp++)
-        {
-            dataBlock = mblock->block[comp];    /*, 10/20/2000 */
-
-            dataBlock[0] = (*DC)[comp];
-
-            ncoeffs[comp] = VlcDequantH263IntraBlock(video, comp,
-                            switched, mblock->bitmapcol[comp], &mblock->bitmaprow[comp]);
-
-            if (VLC_ERROR_DETECTED(ncoeffs[comp]))         /*  */
-            {
-                if (switched)
-                    return PV_FAIL;
-                else
-                {
-                    ncoeffs[comp] = 1;
-                    oscl_memset((dataBlock + 1), 0, sizeof(int16)*63);
-                }
-            }
-            no_coeff[comp] = ncoeffs[comp];
-            /*  modified to new semaphore for post-proc */
-            // Future work:: can be combined in the dequant function
-            // @todo Deblocking Semaphore for INTRA block
-#ifdef PV_POSTPROC_ON
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[comp] = (uint8) PostProcSemaphore(dataBlock);
-#endif
-        }
-        MBlockIDCT(video);
-    }
-    else /* MODE INTER*/
-    {
-
-
-
-
-        MBMotionComp(video, CBP);
-        offset = (int32)(y_pos << 4) * width + (x_pos << 4);
-        c_comp  = video->currVop->yChan + offset;
-
-
-        for (comp = 0; comp < 4; comp++)
-        {
-            (*DC)[comp] = mid_gray;
-
-            if (CBP & (1 << (5 - comp)))
-            {
-                ncoeffs[comp] = VlcDequantH263InterBlock(video, comp,
-                                mblock->bitmapcol[comp], &mblock->bitmaprow[comp]);
-                if (VLC_ERROR_DETECTED(ncoeffs[comp]))
-                    return PV_FAIL;
-
-
-                BlockIDCT(c_comp + (comp&2)*(width << 2) + 8*(comp&1), mblock->pred_block + (comp&2)*64 + 8*(comp&1), mblock->block[comp], width, ncoeffs[comp],
-                          mblock->bitmapcol[comp], mblock->bitmaprow[comp]);
-
-            }
-            else
-            {
-                ncoeffs[comp] = 0;
-            }
-
-            /*  @todo Deblocking Semaphore for INTRA block, for inter just test for ringing  */
-#ifdef PV_POSTPROC_ON
-            if (video->postFilterType != PV_NO_POST_PROC)
-                *pp_mod[comp] = (uint8)((ncoeffs[comp] > 3) ? 4 : 0);
-#endif
-        }
-
-        (*DC)[4] = mid_gray;
-        if (CBP & 2)
-        {
-            ncoeffs[4] = VlcDequantH263InterBlock(video, 4,
-                                                  mblock->bitmapcol[4], &mblock->bitmaprow[4]);
-            if (VLC_ERROR_DETECTED(ncoeffs[4]))
-                return PV_FAIL;
-
-            BlockIDCT(video->currVop->uChan + (offset >> 2) + (x_pos << 2), mblock->pred_block + 256, mblock->block[4], width >> 1, ncoeffs[4],
-                      mblock->bitmapcol[4], mblock->bitmaprow[4]);
-
-        }
-        else
-        {
-            ncoeffs[4] = 0;
-        }
-#ifdef PV_POSTPROC_ON
-        if (video->postFilterType != PV_NO_POST_PROC)
-            *pp_mod[4] = (uint8)((ncoeffs[4] > 3) ? 4 : 0);
-#endif
-        (*DC)[5] = mid_gray;
-        if (CBP & 1)
-        {
-            ncoeffs[5] = VlcDequantH263InterBlock(video, 5,
-                                                  mblock->bitmapcol[5], &mblock->bitmaprow[5]);
-            if (VLC_ERROR_DETECTED(ncoeffs[5]))
-                return PV_FAIL;
-
-            BlockIDCT(video->currVop->vChan + (offset >> 2) + (x_pos << 2), mblock->pred_block + 264, mblock->block[5], width >> 1, ncoeffs[5],
-                      mblock->bitmapcol[5], mblock->bitmaprow[5]);
-
-        }
-        else
-        {
-            ncoeffs[5] = 0;
-        }
-#ifdef PV_POSTPROC_ON
-        if (video->postFilterType != PV_NO_POST_PROC)
-            *pp_mod[5] = (uint8)((ncoeffs[5] > 3) ? 4 : 0);
-#endif
-
-
-
-
-        /* Motion compensation and put it to video->mblock->pred_block */
-    }
-    return PV_SUCCESS;
-}
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/deringing_chroma.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/deringing_chroma.cpp
deleted file mode 100644
index ce779b0..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/deringing_chroma.cpp
+++ /dev/null
@@ -1,215 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include    "mp4dec_lib.h"
-#include    "post_proc.h"
-
-#ifdef PV_POSTPROC_ON
-
-void Deringing_Chroma(
-    uint8 *Rec_C,
-    int width,
-    int height,
-    int16 *QP_store,
-    int,
-    uint8 *pp_mod
-)
-{
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int thres;
-    int v_blk, h_blk;
-    int max_diff;
-    int v_pel, h_pel;
-    int max_blk, min_blk;
-    int v0, h0;
-    uint8 *ptr;
-    int sum, sum1, incr;
-    int32 addr_v;
-    int sign_v[10], sum_v[10];
-    int *ptr2, *ptr3;
-    uint8 pelu, pelc, pell;
-    incr = width - BLKSIZE;
-
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    /* chrominance */
-    /* Do the first line (7 pixels at a time => Don't use MMX)*/
-    for (h_blk = 0; h_blk < width; h_blk += BLKSIZE)
-    {
-        max_diff = (QP_store[h_blk>>3] >> 2) + 4;
-        ptr = &Rec_C[h_blk];
-        max_blk = min_blk = *ptr;
-        FindMaxMin(ptr, &min_blk, &max_blk, width);
-        h0 = ((h_blk - 1) >= 1) ? (h_blk - 1) : 1;
-
-        if (max_blk - min_blk >= 4)
-        {
-            thres = (max_blk + min_blk + 1) >> 1;
-
-
-            for (v_pel = 1; v_pel < BLKSIZE - 1; v_pel++)
-            {
-                addr_v = (int32)v_pel * width;
-                ptr = &Rec_C[addr_v + h0 - 1];
-                ptr2 = &sum_v[0];
-                ptr3 = &sign_v[0];
-
-                pelu = *(ptr - width);
-                pelc = *ptr;
-                pell = *(ptr + width);
-                ptr++;
-                *ptr2++ = pelu + (pelc << 1) + pell;
-                *ptr3++ = INDEX(pelu, thres) + INDEX(pelc, thres) + INDEX(pell, thres);
-
-                pelu = *(ptr - width);
-                pelc = *ptr;
-                pell = *(ptr + width);
-                ptr++;
-                *ptr2++ = pelu + (pelc << 1) + pell;
-                *ptr3++ = INDEX(pelu, thres) + INDEX(pelc, thres) + INDEX(pell, thres);
-
-                for (h_pel = h0; h_pel < h_blk + BLKSIZE - 1; h_pel++)
-                {
-                    pelu = *(ptr - width);
-                    pelc = *ptr;
-                    pell = *(ptr + width);
-
-                    *ptr2 = pelu + (pelc << 1) + pell;
-                    *ptr3 = INDEX(pelu, thres) + INDEX(pelc, thres) + INDEX(pell, thres);
-
-                    sum1 = *(ptr3 - 2) + *(ptr3 - 1) + *ptr3;
-                    if (sum1 == 0 || sum1 == 9)
-                    {
-                        sum = (*(ptr2 - 2) + (*(ptr2 - 1) << 1) + *ptr2 + 8) >> 4;
-
-                        ptr--;
-                        if (PV_ABS(*ptr - sum) > max_diff)
-                        {
-                            if (sum > *ptr)
-                                sum = *ptr + max_diff;
-                            else
-                                sum = *ptr - max_diff;
-                        }
-                        *ptr++ = (uint8) sum;
-                    }
-                    ptr++;
-                    ptr2++;
-                    ptr3++;
-                }
-            }
-        }
-    }
-
-    for (v_blk = BLKSIZE; v_blk < height; v_blk += BLKSIZE)
-    {
-        v0 = v_blk - 1;
-        /* Do the first block (pixels=7 => No MMX) */
-        max_diff = (QP_store[((((int32)v_blk*width)>>3))>>3] >> 2) + 4;
-        ptr = &Rec_C[(int32)v_blk * width];
-        max_blk = min_blk = *ptr;
-        FindMaxMin(ptr, &min_blk, &max_blk, incr);
-
-        if (max_blk - min_blk >= 4)
-        {
-            thres = (max_blk + min_blk + 1) >> 1;
-
-            for (v_pel = v0; v_pel < v_blk + BLKSIZE - 1; v_pel++)
-            {
-                addr_v = v_pel * width;
-                ptr = &Rec_C[addr_v];
-                ptr2 = &sum_v[0];
-                ptr3 = &sign_v[0];
-
-                pelu = *(ptr - width);
-                pelc = *ptr;
-                pell = *(ptr + width);
-                ptr++;
-                *ptr2++ = pelu + (pelc << 1) + pell;
-                *ptr3++ = INDEX(pelu, thres) + INDEX(pelc, thres) + INDEX(pell, thres);
-
-                pelu = *(ptr - width);
-                pelc = *ptr;
-                pell = *(ptr + width);
-                ptr++;
-                *ptr2++ = pelu + (pelc << 1) + pell;
-                *ptr3++ = INDEX(pelu, thres) + INDEX(pelc, thres) + INDEX(pell, thres);
-
-                for (h_pel = 1; h_pel < BLKSIZE - 1; h_pel++)
-                {
-                    pelu = *(ptr - width);
-                    pelc = *ptr;
-                    pell = *(ptr + width);
-
-                    *ptr2 = pelu + (pelc << 1) + pell;
-                    *ptr3 = INDEX(pelu, thres) + INDEX(pelc, thres) + INDEX(pell, thres);
-
-                    sum1 = *(ptr3 - 2) + *(ptr3 - 1) + *ptr3;
-                    if (sum1 == 0 || sum1 == 9)
-                    {
-                        sum = (*(ptr2 - 2) + (*(ptr2 - 1) << 1) + *ptr2 + 8) >> 4;
-
-                        ptr--;
-                        if (PV_ABS(*ptr - sum) > max_diff)
-                        {
-                            if (sum > *ptr)
-                                sum = *ptr + max_diff;
-                            else
-                                sum = *ptr - max_diff;
-                        }
-                        *ptr++ = (uint8) sum;
-                    }
-                    ptr++;
-                    ptr2++;
-                    ptr3++;
-                }
-            }
-        }
-
-
-        /* Do the rest in MMX */
-        for (h_blk = BLKSIZE; h_blk < width; h_blk += BLKSIZE)
-        {
-            if ((pp_mod[(v_blk/8)*(width/8)+h_blk/8]&0x4) != 0)
-            {
-                max_diff = (QP_store[((((int32)v_blk*width)>>3)+h_blk)>>3] >> 2) + 4;
-                ptr = &Rec_C[(int32)v_blk * width + h_blk];
-                max_blk = min_blk = *ptr;
-                FindMaxMin(ptr, &min_blk, &max_blk, incr);
-                h0 = h_blk - 1;
-
-                if (max_blk - min_blk >= 4)
-                {
-                    thres = (max_blk + min_blk + 1) >> 1;
-#ifdef NoMMX
-                    AdaptiveSmooth_NoMMX(Rec_C, v0, h0, v_blk, h_blk, thres, width, max_diff);
-#else
-                    DeringAdaptiveSmoothMMX(&Rec_C[(int32)v0*width+h0], width, thres, max_diff);
-#endif
-                }
-            }
-        }
-    } /* macroblock level */
-
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return;
-}
-#endif
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/deringing_luma.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/deringing_luma.cpp
deleted file mode 100644
index b5574b4..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/deringing_luma.cpp
+++ /dev/null
@@ -1,231 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include    "mp4dec_lib.h"
-#include    "post_proc.h"
-
-#ifdef PV_POSTPROC_ON
-
-void Deringing_Luma(
-    uint8 *Rec_Y,
-    int width,
-    int height,
-    int16 *QP_store,
-    int,
-    uint8 *pp_mod)
-{
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int thres[4], range[4], max_range_blk, max_thres_blk;
-    int MB_V, MB_H, BLK_V, BLK_H;
-    int v_blk, h_blk;
-    int max_diff;
-    int max_blk, min_blk;
-    int v0, h0;
-    uint8 *ptr;
-    int thr, blks, incr;
-    int mb_indx, blk_indx;
-
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    incr = width - BLKSIZE;
-
-    /* Dering the first line of macro blocks */
-    for (MB_H = 0; MB_H < width; MB_H += MBSIZE)
-    {
-        max_diff = (QP_store[(MB_H)>>4] >> 2) + 4;
-
-        /* threshold determination */
-        max_range_blk = max_thres_blk = 0;
-        blks = 0;
-
-        for (BLK_V = 0; BLK_V < MBSIZE; BLK_V += BLKSIZE)
-        {
-            for (BLK_H = 0; BLK_H < MBSIZE; BLK_H += BLKSIZE)
-            {
-                ptr = &Rec_Y[(int32)(BLK_V) * width + MB_H + BLK_H];
-                FindMaxMin(ptr, &min_blk, &max_blk, incr);
-
-                thres[blks] = (max_blk + min_blk + 1) >> 1;
-                range[blks] = max_blk - min_blk;
-
-                if (range[blks] >= max_range_blk)
-                {
-                    max_range_blk = range[blks];
-                    max_thres_blk = thres[blks];
-                }
-                blks++;
-            }
-        }
-
-        blks = 0;
-        for (v_blk = 0; v_blk < MBSIZE; v_blk += BLKSIZE)
-        {
-            v0 = ((v_blk - 1) >= 1) ? (v_blk - 1) : 1;
-            for (h_blk = MB_H; h_blk < MB_H + MBSIZE; h_blk += BLKSIZE)
-            {
-                h0 = ((h_blk - 1) >= 1) ? (h_blk - 1) : 1;
-
-                /* threshold rearrangement for flat region adjacent to non-flat region */
-                if (range[blks]<32 && max_range_blk >= 64)
-                    thres[blks] = max_thres_blk;
-
-                /* threshold rearrangement for deblocking
-                (blockiness annoying at DC dominant region) */
-                if (max_range_blk >= 16)
-                {
-                    /* adaptive smoothing */
-                    thr = thres[blks];
-
-                    AdaptiveSmooth_NoMMX(Rec_Y, v0, h0, v_blk, h_blk,
-                                         thr, width, max_diff);
-                }
-                blks++;
-            } /* block level (Luminance) */
-        }
-    } /* macroblock level */
-
-
-    /* Do the rest of the macro-block-lines */
-    for (MB_V = MBSIZE; MB_V < height; MB_V += MBSIZE)
-    {
-        /* First macro-block */
-        max_diff = (QP_store[((((int32)MB_V*width)>>4))>>4] >> 2) + 4;
-        /* threshold determination */
-        max_range_blk = max_thres_blk = 0;
-        blks = 0;
-        for (BLK_V = 0; BLK_V < MBSIZE; BLK_V += BLKSIZE)
-        {
-            for (BLK_H = 0; BLK_H < MBSIZE; BLK_H += BLKSIZE)
-            {
-                ptr = &Rec_Y[(int32)(MB_V + BLK_V) * width + BLK_H];
-                FindMaxMin(ptr, &min_blk, &max_blk, incr);
-                thres[blks] = (max_blk + min_blk + 1) >> 1;
-                range[blks] = max_blk - min_blk;
-
-                if (range[blks] >= max_range_blk)
-                {
-                    max_range_blk = range[blks];
-                    max_thres_blk = thres[blks];
-                }
-                blks++;
-            }
-        }
-
-        blks = 0;
-        for (v_blk = MB_V; v_blk < MB_V + MBSIZE; v_blk += BLKSIZE)
-        {
-            v0 = v_blk - 1;
-            for (h_blk = 0; h_blk < MBSIZE; h_blk += BLKSIZE)
-            {
-                h0 = ((h_blk - 1) >= 1) ? (h_blk - 1) : 1;
-
-                /* threshold rearrangement for flat region adjacent to non-flat region */
-                if (range[blks]<32 && max_range_blk >= 64)
-                    thres[blks] = max_thres_blk;
-
-                /* threshold rearrangement for deblocking
-                (blockiness annoying at DC dominant region) */
-                if (max_range_blk >= 16)
-                {
-                    /* adaptive smoothing */
-                    thr = thres[blks];
-
-                    AdaptiveSmooth_NoMMX(Rec_Y, v0, h0, v_blk, h_blk,
-                                         thr, width, max_diff);
-                }
-                blks++;
-            }
-        } /* block level (Luminance) */
-
-        /* Rest of the macro-blocks */
-        for (MB_H = MBSIZE; MB_H < width; MB_H += MBSIZE)
-        {
-            max_diff = (QP_store[((((int32)MB_V*width)>>4)+MB_H)>>4] >> 2) + 4;
-
-            /* threshold determination */
-            max_range_blk = max_thres_blk = 0;
-            blks = 0;
-
-            mb_indx = (MB_V / 8) * (width / 8) + MB_H / 8;
-            for (BLK_V = 0; BLK_V < MBSIZE; BLK_V += BLKSIZE)
-            {
-                for (BLK_H = 0; BLK_H < MBSIZE; BLK_H += BLKSIZE)
-                {
-                    blk_indx = mb_indx + (BLK_V / 8) * width / 8 + BLK_H / 8;
-                    /* Update based on pp_mod only */
-                    if ((pp_mod[blk_indx]&0x4) != 0)
-                    {
-                        ptr = &Rec_Y[(int32)(MB_V + BLK_V) * width + MB_H + BLK_H];
-                        FindMaxMin(ptr, &min_blk, &max_blk, incr);
-                        thres[blks] = (max_blk + min_blk + 1) >> 1;
-                        range[blks] = max_blk - min_blk;
-
-                        if (range[blks] >= max_range_blk)
-                        {
-                            max_range_blk = range[blks];
-                            max_thres_blk = thres[blks];
-                        }
-                    }
-                    blks++;
-                }
-            }
-
-            blks = 0;
-            for (v_blk = MB_V; v_blk < MB_V + MBSIZE; v_blk += BLKSIZE)
-            {
-                v0 = v_blk - 1;
-                mb_indx = (v_blk / 8) * (width / 8);
-                for (h_blk = MB_H; h_blk < MB_H + MBSIZE; h_blk += BLKSIZE)
-                {
-                    h0 = h_blk - 1;
-                    blk_indx = mb_indx + h_blk / 8;
-                    if ((pp_mod[blk_indx]&0x4) != 0)
-                    {
-                        /* threshold rearrangement for flat region adjacent to non-flat region */
-                        if (range[blks]<32 && max_range_blk >= 64)
-                            thres[blks] = max_thres_blk;
-
-                        /* threshold rearrangement for deblocking
-                        (blockiness annoying at DC dominant region) */
-                        if (max_range_blk >= 16)
-                        {
-                            /* adaptive smoothing */
-                            thr = thres[blks];
-#ifdef NoMMX
-                            AdaptiveSmooth_NoMMX(Rec_Y, v0, h0, v_blk, h_blk,
-                                                 thr, width, max_diff);
-#else
-                            DeringAdaptiveSmoothMMX(&Rec_Y[v0*width+h0],
-                                                    width, thr, max_diff);
-#endif
-                        }
-                    }
-                    blks++;
-                }
-            } /* block level (Luminance) */
-        } /* macroblock level */
-    } /* macroblock level */
-
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return;
-}
-#endif
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/find_min_max.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/find_min_max.cpp
deleted file mode 100644
index 1ac88a1..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/find_min_max.cpp
+++ /dev/null
@@ -1,176 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    input_ptr = pointer to the buffer containing values of type UChar
-            in a 2D block of data.
-    min_ptr = pointer to the minimum value of type Int to be found in a
-          square block of size BLKSIZE contained in 2D block of data.
-    max_ptr = pointer to the maximum value of type Int to be found in a
-          square block of size BLKSIZE contained in 2D block of data.
-    incr = value of type Int representing the width of 2D block of data.
-
- Local Stores/Buffers/Pointers Needed:
-    None
-
- Global Stores/Buffers/Pointers Needed:
-    None
-
- Outputs:
-    None
-
- Pointers and Buffers Modified:
-    min_ptr points to the found minimum value in the square block of
-    size BLKSIZE contained in 2D block of data.
-
-    max_ptr points to the found maximum value in the square block of
-    size BLKSIZE contained in 2D block of data.
-
- Local Stores Modified:
-    None
-
- Global Stores Modified:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- This function finds the maximum and the minimum values in a square block of
- data of size BLKSIZE * BLKSIZE. The data is contained in the buffer which
- represents a 2D block of data that is larger than BLKSIZE * BLKSIZE.
- This is illustrated below.
-
-    mem loc x + 00h -> o o o o o o o o o o o o o o o o
-    mem loc x + 10h -> o o o o o X X X X X X X X o o o
-    mem loc x + 20h -> o o o o o X X X X X X X X o o o
-    mem loc x + 30h -> o o o o o X X X X X X X X o o o
-    mem loc x + 40h -> o o o o o X X X X X X X X o o o
-    mem loc x + 50h -> o o o o o X X X X X X X X o o o
-    mem loc x + 60h -> o o o o o X X X X X X X X o o o
-    mem loc x + 70h -> o o o o o X X X X X X X X o o o
-    mem loc x + 80h -> o o o o o X X X X X X X X o o o
-    mem loc x + 90h -> o o o o o o o o o o o o o o o o
-    mem loc x + A0h -> o o o o o o o o o o o o o o o o
-    mem loc x + B0h -> o o o o o o o o o o o o o o o o
-
-For illustration purposes, the diagram assumes that BLKSIZE is equal to 8
-but this is not a requirement. In this diagram, the buffer starts at
-location x but the input pointer, input_ptr, passed into this function
-would be the first row of data to be searched which is at x + 15h. The
-value of incr passed onto this function represents the amount the input_ptr
-needs to be incremented to point to the next row of data.
-
-This function compares each value in a row to the current maximum and
-minimum. After each row, input_ptr is incremented to point to the next row.
-This is repeated until all rows have been processed. When the search is
-complete the location pointed to by min_ptr contains the minimum value
-found and the location pointed to by max_ptr contains the maximum value found.
-
-------------------------------------------------------------------------------
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "mp4dec_lib.h"
-#include    "post_proc.h"
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-
-#ifdef PV_POSTPROC_ON
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-void  FindMaxMin(
-    uint8 *input_ptr,
-    int *min_ptr,
-    int *max_ptr,
-    int incr)
-{
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    uint    i, j;
-    int min, max;
-
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    max = min = *input_ptr;
-    /*  incr = incr - BLKSIZE; */   /*  09/06/2001, already passed in as width - BLKSIZE */
-
-    for (i = BLKSIZE; i > 0; i--)
-    {
-        for (j = BLKSIZE; j > 0; j--)
-        {
-            if (*input_ptr > max)
-            {
-                max = *input_ptr;
-            }
-            else if (*input_ptr < min)
-            {
-                min = *input_ptr;
-            }
-            input_ptr += 1;
-        }
-
-        /* set pointer to the beginning of the next row*/
-        input_ptr += incr;
-    }
-
-    *max_ptr = max;
-    *min_ptr = min;
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return;
-}
-#endif
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/idct_vca.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/idct_vca.cpp
deleted file mode 100644
index f35ce4f..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/idct_vca.cpp
+++ /dev/null
@@ -1,660 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "mp4def.h"
-#include "idct.h"
-#include "motion_comp.h"
-
-#ifdef FAST_IDCT
-
-/****************************************************************
-*       vca_idct.c : created 6/1/99 for several options
-*                     of hard-coded reduced idct function (using nz_coefs)
-******************************************************************/
-
-/*****************************************************/
-//pretested version
-void idctrow0(int16 *, uint8 *, uint8 *, int)
-{
-    return ;
-}
-void idctcol0(int16 *)
-{
-    return ;
-}
-
-void idctrow1(int16 *blk, uint8 *pred, uint8 *dst, int width)
-{
-    /* shortcut */
-    int tmp;
-    int i = 8;
-    uint32 pred_word, dst_word;
-    int res, res2;
-
-    /* preset the offset, such that we can take advantage pre-offset addressing mode   */
-    width -= 4;
-    dst -= width;
-    pred -= 12;
-    blk -= 8;
-
-    while (i--)
-    {
-        tmp = (*(blk += 8) + 32) >> 6;
-        *blk = 0;
-
-        pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
-        res = tmp + (pred_word & 0xFF);
-        CLIP_RESULT(res);
-        res2 = tmp + ((pred_word >> 8) & 0xFF);
-        CLIP_RESULT(res2);
-        dst_word = (res2 << 8) | res;
-        res = tmp + ((pred_word >> 16) & 0xFF);
-        CLIP_RESULT(res);
-        dst_word |= (res << 16);
-        res = tmp + ((pred_word >> 24) & 0xFF);
-        CLIP_RESULT(res);
-        dst_word |= (res << 24);
-        *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
-
-        pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
-        res = tmp + (pred_word & 0xFF);
-        CLIP_RESULT(res);
-        res2 = tmp + ((pred_word >> 8) & 0xFF);
-        CLIP_RESULT(res2);
-        dst_word = (res2 << 8) | res;
-        res = tmp + ((pred_word >> 16) & 0xFF);
-        CLIP_RESULT(res);
-        dst_word |= (res << 16);
-        res = tmp + ((pred_word >> 24) & 0xFF);
-        CLIP_RESULT(res);
-        dst_word |= (res << 24);
-        *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
-    }
-    return;
-}
-
-void idctcol1(int16 *blk)
-{ /* shortcut */
-    blk[0] = blk[8] = blk[16] = blk[24] = blk[32] = blk[40] = blk[48] = blk[56] =
-                                              blk[0] << 3;
-    return;
-}
-
-void idctrow2(int16 *blk, uint8 *pred, uint8 *dst, int width)
-{
-    int32 x0, x1, x2, x4, x5;
-    int i = 8;
-    uint32 pred_word, dst_word;
-    int res, res2;
-
-    /* preset the offset, such that we can take advantage pre-offset addressing mode   */
-    width -= 4;
-    dst -= width;
-    pred -= 12;
-    blk -= 8;
-
-    while (i--)
-    {
-        /* shortcut */
-        x4 = blk[9];
-        blk[9] = 0;
-        x0 = ((*(blk += 8)) << 8) + 8192;
-        *blk = 0;  /* for proper rounding in the fourth stage */
-
-        /* first stage */
-        x5 = (W7 * x4 + 4) >> 3;
-        x4 = (W1 * x4 + 4) >> 3;
-
-        /* third stage */
-        x2 = (181 * (x4 + x5) + 128) >> 8;
-        x1 = (181 * (x4 - x5) + 128) >> 8;
-
-        /* fourth stage */
-        pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
-        res = (x0 + x4) >> 14;
-        ADD_AND_CLIP1(res);
-        res2 = (x0 + x2) >> 14;
-        ADD_AND_CLIP2(res2);
-        dst_word = (res2 << 8) | res;
-        res = (x0 + x1) >> 14;
-        ADD_AND_CLIP3(res);
-        dst_word |= (res << 16);
-        res = (x0 + x5) >> 14;
-        ADD_AND_CLIP4(res);
-        dst_word |= (res << 24);
-        *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
-
-        pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
-        res = (x0 - x5) >> 14;
-        ADD_AND_CLIP1(res);
-        res2 = (x0 - x1) >> 14;
-        ADD_AND_CLIP2(res2);
-        dst_word = (res2 << 8) | res;
-        res = (x0 - x2) >> 14;
-        ADD_AND_CLIP3(res);
-        dst_word |= (res << 16);
-        res = (x0 - x4) >> 14;
-        ADD_AND_CLIP4(res);
-        dst_word |= (res << 24);
-        *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
-    }
-    return ;
-}
-
-void idctcol2(int16 *blk)
-{
-    int32 x0, x1, x3, x5, x7;//, x8;
-
-    x1 = blk[8];
-    x0 = ((int32)blk[0] << 11) + 128;
-    /* both upper and lower*/
-
-    x7 = W7 * x1;
-    x1 = W1 * x1;
-
-    x3 = x7;
-    x5 = (181 * (x1 - x7) + 128) >> 8;
-    x7 = (181 * (x1 + x7) + 128) >> 8;
-
-    blk[0] = (x0 + x1) >> 8;
-    blk[8] = (x0 + x7) >> 8;
-    blk[16] = (x0 + x5) >> 8;
-    blk[24] = (x0 + x3) >> 8;
-    blk[56] = (x0 - x1) >> 8;
-    blk[48] = (x0 - x7) >> 8;
-    blk[40] = (x0 - x5) >> 8;
-    blk[32] = (x0 - x3) >> 8;
-
-    return ;
-}
-
-void idctrow3(int16 *blk, uint8 *pred, uint8 *dst, int width)
-{
-    int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
-    int i = 8;
-    uint32 pred_word, dst_word;
-    int res, res2;
-
-    /* preset the offset, such that we can take advantage pre-offset addressing mode   */
-    width -= 4;
-    dst -= width;
-    pred -= 12;
-    blk -= 8;
-
-    while (i--)
-    {
-        x2 = blk[10];
-        blk[10] = 0;
-        x1 = blk[9];
-        blk[9] = 0;
-        x0 = ((*(blk += 8)) << 8) + 8192;
-        *blk = 0;   /* for proper rounding in the fourth stage */
-        /* both upper and lower*/
-        /* both x2orx6 and x0orx4 */
-
-        x4 = x0;
-        x6 = (W6 * x2 + 4) >> 3;
-        x2 = (W2 * x2 + 4) >> 3;
-        x8 = x0 - x2;
-        x0 += x2;
-        x2 = x8;
-        x8 = x4 - x6;
-        x4 += x6;
-        x6 = x8;
-
-        x7 = (W7 * x1 + 4) >> 3;
-        x1 = (W1 * x1 + 4) >> 3;
-        x3 = x7;
-        x5 = (181 * (x1 - x7) + 128) >> 8;
-        x7 = (181 * (x1 + x7) + 128) >> 8;
-
-        pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
-        res = (x0 + x1) >> 14;
-        ADD_AND_CLIP1(res);
-        res2 = (x4 + x7) >> 14;
-        ADD_AND_CLIP2(res2);
-        dst_word = (res2 << 8) | res;
-        res = (x6 + x5) >> 14;
-        ADD_AND_CLIP3(res);
-        dst_word |= (res << 16);
-        res = (x2 + x3) >> 14;
-        ADD_AND_CLIP4(res);
-        dst_word |= (res << 24);
-        *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
-
-        pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
-        res = (x2 - x3) >> 14;
-        ADD_AND_CLIP1(res);
-        res2 = (x6 - x5) >> 14;
-        ADD_AND_CLIP2(res2);
-        dst_word = (res2 << 8) | res;
-        res = (x4 - x7) >> 14;
-        ADD_AND_CLIP3(res);
-        dst_word |= (res << 16);
-        res = (x0 - x1) >> 14;
-        ADD_AND_CLIP4(res);
-        dst_word |= (res << 24);
-        *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
-    }
-
-    return ;
-}
-
-void idctcol3(int16 *blk)
-{
-    int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
-
-    x2 = blk[16];
-    x1 = blk[8];
-    x0 = ((int32)blk[0] << 11) + 128;
-
-    x4 = x0;
-    x6 = W6 * x2;
-    x2 = W2 * x2;
-    x8 = x0 - x2;
-    x0 += x2;
-    x2 = x8;
-    x8 = x4 - x6;
-    x4 += x6;
-    x6 = x8;
-
-    x7 = W7 * x1;
-    x1 = W1 * x1;
-    x3 = x7;
-    x5 = (181 * (x1 - x7) + 128) >> 8;
-    x7 = (181 * (x1 + x7) + 128) >> 8;
-
-    blk[0] = (x0 + x1) >> 8;
-    blk[8] = (x4 + x7) >> 8;
-    blk[16] = (x6 + x5) >> 8;
-    blk[24] = (x2 + x3) >> 8;
-    blk[56] = (x0 - x1) >> 8;
-    blk[48] = (x4 - x7) >> 8;
-    blk[40] = (x6 - x5) >> 8;
-    blk[32] = (x2 - x3) >> 8;
-
-    return;
-}
-
-
-void idctrow4(int16 *blk, uint8 *pred, uint8 *dst, int width)
-{
-    int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
-    int i = 8;
-    uint32 pred_word, dst_word;
-    int res, res2;
-
-    /* preset the offset, such that we can take advantage pre-offset addressing mode   */
-    width -= 4;
-    dst -= width;
-    pred -= 12;
-    blk -= 8;
-
-    while (i--)
-    {
-        x2 = blk[10];
-        blk[10] = 0;
-        x1 = blk[9];
-        blk[9] = 0;
-        x3 = blk[11];
-        blk[11] = 0;
-        x0 = ((*(blk += 8)) << 8) + 8192;
-        *blk = 0;    /* for proper rounding in the fourth stage */
-
-        x4 = x0;
-        x6 = (W6 * x2 + 4) >> 3;
-        x2 = (W2 * x2 + 4) >> 3;
-        x8 = x0 - x2;
-        x0 += x2;
-        x2 = x8;
-        x8 = x4 - x6;
-        x4 += x6;
-        x6 = x8;
-
-        x7 = (W7 * x1 + 4) >> 3;
-        x1 = (W1 * x1 + 4) >> 3;
-        x5 = (W3 * x3 + 4) >> 3;
-        x3 = (- W5 * x3 + 4) >> 3;
-        x8 = x1 - x5;
-        x1 += x5;
-        x5 = x8;
-        x8 = x7 - x3;
-        x3 += x7;
-        x7 = (181 * (x5 + x8) + 128) >> 8;
-        x5 = (181 * (x5 - x8) + 128) >> 8;
-
-        pred_word = *((uint32*)(pred += 12)); /* read 4 bytes from pred */
-        res = (x0 + x1) >> 14;
-        ADD_AND_CLIP1(res);
-        res2 = (x4 + x7) >> 14;
-        ADD_AND_CLIP2(res2);
-        dst_word = (res2 << 8) | res;
-        res = (x6 + x5) >> 14;
-        ADD_AND_CLIP3(res);
-        dst_word |= (res << 16);
-        res = (x2 + x3) >> 14;
-        ADD_AND_CLIP4(res);
-        dst_word |= (res << 24);
-        *((uint32*)(dst += width)) = dst_word; /* save 4 bytes to dst */
-
-        pred_word = *((uint32*)(pred += 4)); /* read 4 bytes from pred */
-        res = (x2 - x3) >> 14;
-        ADD_AND_CLIP1(res);
-        res2 = (x6 - x5) >> 14;
-        ADD_AND_CLIP2(res2);
-        dst_word = (res2 << 8) | res;
-        res = (x4 - x7) >> 14;
-        ADD_AND_CLIP3(res);
-        dst_word |= (res << 16);
-        res = (x0 - x1) >> 14;
-        ADD_AND_CLIP4(res);
-        dst_word |= (res << 24);
-        *((uint32*)(dst += 4)) = dst_word; /* save 4 bytes to dst */
-    }
-    return ;
-}
-
-void idctcol4(int16 *blk)
-{
-    int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
-    x2 = blk[16];
-    x1 = blk[8];
-    x3 = blk[24];
-    x0 = ((int32)blk[0] << 11) + 128;
-
-    x4 = x0;
-    x6 = W6 * x2;
-    x2 = W2 * x2;
-    x8 = x0 - x2;
-    x0 += x2;
-    x2 = x8;
-    x8 = x4 - x6;
-    x4 += x6;
-    x6 = x8;
-
-    x7 = W7 * x1;
-    x1 = W1 * x1;
-    x5 = W3 * x3;
-    x3 = -W5 * x3;
-    x8 = x1 - x5;
-    x1 += x5;
-    x5 = x8;
-    x8 = x7 - x3;
-    x3 += x7;
-    x7 = (181 * (x5 + x8) + 128) >> 8;
-    x5 = (181 * (x5 - x8) + 128) >> 8;
-
-
-    blk[0] = (x0 + x1) >> 8;
-    blk[8] = (x4 + x7) >> 8;
-    blk[16] = (x6 + x5) >> 8;
-    blk[24] = (x2 + x3) >> 8;
-    blk[56] = (x0 - x1) >> 8;
-    blk[48] = (x4 - x7) >> 8;
-    blk[40] = (x6 - x5) >> 8;
-    blk[32] = (x2 - x3) >> 8;
-
-    return ;
-}
-
-void idctrow0_intra(int16 *, PIXEL *, int)
-{
-    return ;
-}
-
-void idctrow1_intra(int16 *blk, PIXEL *comp, int width)
-{
-    /* shortcut */
-    int32 tmp;
-    int i = 8;
-    int offset = width;
-    uint32 word;
-
-    comp -= offset;
-    while (i--)
-    {
-        tmp = ((blk[0] + 32) >> 6);
-        blk[0] = 0;
-        CLIP_RESULT(tmp)
-
-        word = (tmp << 8) | tmp;
-        word = (word << 16) | word;
-
-        *((uint32*)(comp += offset)) = word;
-        *((uint32*)(comp + 4)) = word;
-
-
-
-
-        blk += B_SIZE;
-    }
-    return;
-}
-
-void idctrow2_intra(int16 *blk, PIXEL *comp, int width)
-{
-    int32 x0, x1, x2, x4, x5, temp;
-    int i = 8;
-    int offset = width;
-    int32 word;
-
-    comp -= offset;
-    while (i--)
-    {
-        /* shortcut */
-        x4 = blk[1];
-        blk[1] = 0;
-        x0 = ((int32)blk[0] << 8) + 8192;
-        blk[0] = 0;   /* for proper rounding in the fourth stage */
-
-        /* first stage */
-        x5 = (W7 * x4 + 4) >> 3;
-        x4 = (W1 * x4 + 4) >> 3;
-
-        /* third stage */
-        x2 = (181 * (x4 + x5) + 128) >> 8;
-        x1 = (181 * (x4 - x5) + 128) >> 8;
-
-        /* fourth stage */
-        word = ((x0 + x4) >> 14);
-        CLIP_RESULT(word)
-
-        temp = ((x0 + x2) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 8);
-        temp = ((x0 + x1) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 16);
-        temp = ((x0 + x5) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 24);
-        *((int32*)(comp += offset)) = word;
-
-        word = ((x0 - x5) >> 14);
-        CLIP_RESULT(word)
-        temp = ((x0 - x1) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 8);
-        temp = ((x0 - x2) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 16);
-        temp = ((x0 - x4) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 24);
-        *((int32*)(comp + 4)) = word;
-
-        blk += B_SIZE;
-    }
-    return ;
-}
-
-void idctrow3_intra(int16 *blk, PIXEL *comp, int width)
-{
-    int32 x0, x1, x2, x3, x4, x5, x6, x7, x8, temp;
-    int i = 8;
-    int offset = width;
-    int32 word;
-
-    comp -= offset;
-
-    while (i--)
-    {
-        x2 = blk[2];
-        blk[2] = 0;
-        x1 = blk[1];
-        blk[1] = 0;
-        x0 = ((int32)blk[0] << 8) + 8192;
-        blk[0] = 0;/* for proper rounding in the fourth stage */
-        /* both upper and lower*/
-        /* both x2orx6 and x0orx4 */
-
-        x4 = x0;
-        x6 = (W6 * x2 + 4) >> 3;
-        x2 = (W2 * x2 + 4) >> 3;
-        x8 = x0 - x2;
-        x0 += x2;
-        x2 = x8;
-        x8 = x4 - x6;
-        x4 += x6;
-        x6 = x8;
-
-        x7 = (W7 * x1 + 4) >> 3;
-        x1 = (W1 * x1 + 4) >> 3;
-        x3 = x7;
-        x5 = (181 * (x1 - x7) + 128) >> 8;
-        x7 = (181 * (x1 + x7) + 128) >> 8;
-
-        word = ((x0 + x1) >> 14);
-        CLIP_RESULT(word)
-        temp = ((x4 + x7) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 8);
-
-
-        temp = ((x6 + x5) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 16);
-
-        temp = ((x2 + x3) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 24);
-        *((int32*)(comp += offset)) = word;
-
-        word = ((x2 - x3) >> 14);
-        CLIP_RESULT(word)
-
-        temp = ((x6 - x5) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 8);
-
-        temp = ((x4 - x7) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 16);
-
-        temp = ((x0 - x1) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 24);
-        *((int32*)(comp + 4)) = word;
-
-        blk += B_SIZE;
-    }
-    return ;
-}
-
-void idctrow4_intra(int16 *blk, PIXEL *comp, int width)
-{
-    int32 x0, x1, x2, x3, x4, x5, x6, x7, x8, temp;
-    int i = 8;
-    int offset = width;
-    int32 word;
-
-    comp -= offset;
-
-    while (i--)
-    {
-        x2 = blk[2];
-        blk[2] = 0;
-        x1 = blk[1];
-        blk[1] = 0;
-        x3 = blk[3];
-        blk[3] = 0;
-        x0 = ((int32)blk[0] << 8) + 8192;
-        blk[0] = 0;/* for proper rounding in the fourth stage */
-
-        x4 = x0;
-        x6 = (W6 * x2 + 4) >> 3;
-        x2 = (W2 * x2 + 4) >> 3;
-        x8 = x0 - x2;
-        x0 += x2;
-        x2 = x8;
-        x8 = x4 - x6;
-        x4 += x6;
-        x6 = x8;
-
-        x7 = (W7 * x1 + 4) >> 3;
-        x1 = (W1 * x1 + 4) >> 3;
-        x5 = (W3 * x3 + 4) >> 3;
-        x3 = (- W5 * x3 + 4) >> 3;
-        x8 = x1 - x5;
-        x1 += x5;
-        x5 = x8;
-        x8 = x7 - x3;
-        x3 += x7;
-        x7 = (181 * (x5 + x8) + 128) >> 8;
-        x5 = (181 * (x5 - x8) + 128) >> 8;
-
-        word = ((x0 + x1) >> 14);
-        CLIP_RESULT(word)
-
-        temp = ((x4 + x7) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 8);
-
-
-        temp = ((x6 + x5) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 16);
-
-        temp = ((x2 + x3) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 24);
-        *((int32*)(comp += offset)) = word;
-
-        word = ((x2 - x3) >> 14);
-        CLIP_RESULT(word)
-
-        temp = ((x6 - x5) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 8);
-
-        temp = ((x4 - x7) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 16);
-
-        temp = ((x0 - x1) >> 14);
-        CLIP_RESULT(temp)
-        word = word | (temp << 24);
-        *((int32*)(comp + 4)) = word;
-
-        blk += B_SIZE;
-    }
-
-    return ;
-}
-
-#endif
-
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/mb_motion_comp.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/mb_motion_comp.cpp
deleted file mode 100644
index 877723d..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/mb_motion_comp.cpp
+++ /dev/null
@@ -1,639 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-
-#define LOG_TAG "m4v_h263"
-#include <log/log.h>
-
-/*
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    video = pointer to structure of type VideoDecData
-
- Local Stores/Buffers/Pointers Needed:
-    roundtab16 = rounding table
-
- Global Stores/Buffers/Pointers Needed:
-    None
-
- Outputs:
-    None
-
- Pointers and Buffers Modified:
-    video->currVop->yChan contents are the newly calculated luminance
-      data
-    video->currVop->uChan contents are the newly calculated chrominance
-      b data
-    video->currVop->vChan contents are the newly calculated chrominance
-      r data
-    video->pstprcTypCur contents are the updated semaphore propagation
-      values
-
- Local Stores Modified:
-    None
-
- Global Stores Modified:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- This function performs high level motion compensation on the luminance and
- chrominance data. It sets up all the parameters required by the functions
- that perform luminance and chrominance prediction and it initializes the
- pointer to the post processing semaphores of a given block. It also checks
- the motion compensation mode in order to determine which luminance or
- chrominance prediction functions to call and determines how the post
- processing semaphores are updated.
-
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include "mp4dec_lib.h"
-#include "motion_comp.h"
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-/* 09/29/2000 bring this from mp4def.h */
-// const static int roundtab4[] = {0,1,1,1};
-// const static int roundtab8[] = {0,0,1,1,1,1,1,2};
-/*** 10/30 for TPS */
-// const static int roundtab12[] = {0,0,0,1,1,1,1,1,1,1,2,2};
-/* 10/30 for TPS ***/
-const static int roundtab16[] = {0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2};
-
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-/** modified 3 August 2005 to do prediction and put the results in
-video->mblock->pred_block, no adding with residue */
-
-void  MBMotionComp(
-    VideoDecData *video,
-    int CBP
-)
-{
-
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    /* Previous Video Object Plane */
-    Vop *prev = video->prevVop;
-
-    /* Current Macroblock (MB) in the VOP */
-    int mbnum = video->mbnum;
-
-    /* Number of MB per data row */
-    int MB_in_width = video->nMBPerRow;
-    int ypos, xpos;
-    PIXEL *c_comp, *c_prev;
-    PIXEL *cu_comp, *cu_prev;
-    PIXEL *cv_comp, *cv_prev;
-    int height, width, pred_width;
-    int imv, mvwidth;
-    int32 offset;
-    uint8 mode;
-    uint8 *pred_block, *pred;
-
-    /* Motion vector (dx,dy) in half-pel resolution */
-    int dx, dy;
-
-    MOT px[4], py[4];
-    int xpred, ypred;
-    int xsum;
-    int round1;
-#ifdef PV_POSTPROC_ON // 2/14/2001      
-    /* Total number of pixels in the VOL */
-    int32 size = (int32) video->nTotalMB << 8;
-    uint8 *pp_dec_y, *pp_dec_u;
-    int ll[4];
-    int tmp = 0;
-    uint8 msk_deblock = 0;
-#endif
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    /* Set rounding type */
-    /* change from array to single 09/29/2000 */
-    round1 = (int)(1 - video->currVop->roundingType);
-
-    /* width of luminance data in pixels (y axis) */
-    width = video->width;
-
-    /* heigth of luminance data in pixels (x axis) */
-    height = video->height;
-
-    /* number of blocks per row */
-    mvwidth = MB_in_width << 1;
-
-    /* starting y position in current MB; origin of MB */
-    ypos = video->mbnum_row << 4 ;
-    /* starting x position in current MB; origin of MB */
-    xpos = video->mbnum_col << 4 ;
-
-    /* offset to (x,y) position in current luminance MB */
-    /* in pixel resolution                              */
-    /* ypos*width -> row, +x -> column */
-    offset = (int32)ypos * width + xpos;
-
-    /* get mode for current MB */
-    mode = video->headerInfo.Mode[mbnum];
-
-    /* block index */
-    /* imv = (xpos/8) + ((ypos/8) * mvwidth) */
-    imv = (offset >> 6) - (xpos >> 6) + (xpos >> 3);
-    if (mode & INTER_1VMASK)
-    {
-        dx = px[0] = px[1] = px[2] = px[3] = video->motX[imv];
-        dy = py[0] = py[1] = py[2] = py[3] = video->motY[imv];
-        if ((dx & 3) == 0)
-        {
-            dx = dx >> 1;
-        }
-        else
-        {
-            /* x component of MV is or'ed for rounding (?) */
-            dx = (dx >> 1) | 1;
-        }
-
-        /* y component of motion vector; divide by 2 for to */
-        /* convert to full-pel resolution.                  */
-        if ((dy & 3) == 0)
-        {
-            dy = dy >> 1;
-        }
-        else
-        {
-            /* y component of MV is or'ed for rounding (?) */
-            dy = (dy >> 1) | 1;
-        }
-    }
-    else
-    {
-        px[0] = video->motX[imv];
-        px[1] = video->motX[imv+1];
-        px[2] = video->motX[imv+mvwidth];
-        px[3] = video->motX[imv+mvwidth+1];
-        xsum = px[0] + px[1] + px[2] + px[3];
-        dx = PV_SIGN(xsum) * (roundtab16[(PV_ABS(xsum)) & 0xF] +
-                              (((PV_ABS(xsum)) >> 4) << 1));
-        py[0] = video->motY[imv];
-        py[1] = video->motY[imv+1];
-        py[2] = video->motY[imv+mvwidth];
-        py[3] = video->motY[imv+mvwidth+1];
-        xsum = py[0] + py[1] + py[2] + py[3];
-        dy = PV_SIGN(xsum) * (roundtab16[(PV_ABS(xsum)) & 0xF] +
-                              (((PV_ABS(xsum)) >> 4) << 1));
-    }
-
-    /* Pointer to previous luminance frame */
-    c_prev  = prev->yChan;
-    if (!c_prev) {
-        ALOGE("b/35269635");
-        android_errorWriteLog(0x534e4554, "35269635");
-        return;
-    }
-
-    pred_block = video->mblock->pred_block;
-
-    /* some blocks have no residue or INTER4V */
-    /*if (mode == MODE_INTER4V)   05/08/15 */
-    /* Motion Compensation for an 8x8 block within a MB */
-    /* (4 MV per MB) */
-
-
-
-    /* Call function that performs luminance prediction */
-    /*      luminance_pred_mode_inter4v(xpos, ypos, px, py, c_prev,
-                    video->mblock->pred_block, width, height,
-                    round1, mvwidth, &xsum, &ysum);*/
-    c_comp = video->currVop->yChan + offset;
-
-
-    xpred = (int)((xpos << 1) + px[0]);
-    ypred = (int)((ypos << 1) + py[0]);
-
-    if ((CBP >> 5)&1)
-    {
-        pred = pred_block;
-        pred_width = 16;
-    }
-    else
-    {
-        pred = c_comp;
-        pred_width = width;
-    }
-
-    /* check whether the MV points outside the frame */
-    if (xpred >= 0 && xpred <= ((width << 1) - (2*B_SIZE)) &&
-            ypred >= 0 && ypred <= ((height << 1) - (2*B_SIZE)))
-    {   /*****************************/
-        /* (x,y) is inside the frame */
-        /*****************************/
-        ;
-        GetPredAdvBTable[ypred&1][xpred&1](c_prev + (xpred >> 1) + ((ypred >> 1)*width),
-                                           pred, width, (pred_width << 1) | round1);
-    }
-    else
-    {   /******************************/
-        /* (x,y) is outside the frame */
-        /******************************/
-        GetPredOutside(xpred, ypred, c_prev,
-                       pred, width, height, round1, pred_width);
-    }
-
-
-    /* Compute prediction values over current luminance MB */
-    /* (blocks 1); add motion vector prior to input;       */
-    /* add 8 to x_pos to advance to next block         */
-    xpred = (int)(((xpos + B_SIZE) << 1) + px[1]);
-    ypred = (int)((ypos << 1) + py[1]);
-
-    if ((CBP >> 4)&1)
-    {
-        pred = pred_block + 8;
-        pred_width = 16;
-    }
-    else
-    {
-        pred = c_comp + 8;
-        pred_width = width;
-    }
-
-    /* check whether the MV points outside the frame */
-    if (xpred >= 0 && xpred <= ((width << 1) - (2*B_SIZE)) &&
-            ypred >= 0 && ypred <= ((height << 1) - (2*B_SIZE)))
-    {   /*****************************/
-        /* (x,y) is inside the frame */
-        /*****************************/
-        GetPredAdvBTable[ypred&1][xpred&1](c_prev + (xpred >> 1) + ((ypred >> 1)*width),
-                                           pred, width, (pred_width << 1) | round1);
-    }
-    else
-    {   /******************************/
-        /* (x,y) is outside the frame */
-        /******************************/
-        GetPredOutside(xpred, ypred, c_prev,
-                       pred, width, height, round1, pred_width);
-    }
-
-
-
-    /* Compute prediction values over current luminance MB */
-    /* (blocks 2); add motion vector prior to input        */
-    /* add 8 to y_pos to advance to block on next row      */
-    xpred = (int)((xpos << 1) + px[2]);
-    ypred = (int)(((ypos + B_SIZE) << 1) + py[2]);
-
-    if ((CBP >> 3)&1)
-    {
-        pred = pred_block + 128;
-        pred_width = 16;
-    }
-    else
-    {
-        pred = c_comp + (width << 3);
-        pred_width = width;
-    }
-
-    /* check whether the MV points outside the frame */
-    if (xpred >= 0 && xpred <= ((width << 1) - (2*B_SIZE)) &&
-            ypred >= 0 && ypred <= ((height << 1) - (2*B_SIZE)))
-    {   /*****************************/
-        /* (x,y) is inside the frame */
-        /*****************************/
-        GetPredAdvBTable[ypred&1][xpred&1](c_prev + (xpred >> 1) + ((ypred >> 1)*width),
-                                           pred, width, (pred_width << 1) | round1);
-    }
-    else
-    {   /******************************/
-        /* (x,y) is outside the frame */
-        /******************************/
-        GetPredOutside(xpred, ypred, c_prev,
-                       pred, width, height, round1, pred_width);
-    }
-
-
-
-    /* Compute prediction values over current luminance MB */
-    /* (blocks 3); add motion vector prior to input;       */
-    /* add 8 to x_pos and y_pos to advance to next block   */
-    /* on next row                         */
-    xpred = (int)(((xpos + B_SIZE) << 1) + px[3]);
-    ypred = (int)(((ypos + B_SIZE) << 1) + py[3]);
-
-    if ((CBP >> 2)&1)
-    {
-        pred = pred_block + 136;
-        pred_width = 16;
-    }
-    else
-    {
-        pred = c_comp + (width << 3) + 8;
-        pred_width = width;
-    }
-
-    /* check whether the MV points outside the frame */
-    if (xpred >= 0 && xpred <= ((width << 1) - (2*B_SIZE)) &&
-            ypred >= 0 && ypred <= ((height << 1) - (2*B_SIZE)))
-    {   /*****************************/
-        /* (x,y) is inside the frame */
-        /*****************************/
-        GetPredAdvBTable[ypred&1][xpred&1](c_prev + (xpred >> 1) + ((ypred >> 1)*width),
-                                           pred, width, (pred_width << 1) | round1);
-    }
-    else
-    {   /******************************/
-        /* (x,y) is outside the frame */
-        /******************************/
-        GetPredOutside(xpred, ypred, c_prev,
-                       pred, width, height, round1, pred_width);
-    }
-    /* Call function to set de-blocking and de-ringing */
-    /*   semaphores for luminance                      */
-
-#ifdef PV_POSTPROC_ON
-    if (video->postFilterType != PV_NO_POST_PROC)
-    {
-        if (mode&INTER_1VMASK)
-        {
-            pp_dec_y = video->pstprcTypCur + imv;
-            ll[0] = 1;
-            ll[1] = mvwidth - 1;
-            ll[2] = 1;
-            ll[3] = -mvwidth - 1;
-            msk_deblock = pp_semaphore_luma(xpred, ypred, pp_dec_y,
-                                            video->pstprcTypPrv, ll, &tmp, px[0], py[0], mvwidth,
-                                            width, height);
-
-            pp_dec_u = video->pstprcTypCur + (size >> 6) +
-                       ((imv + (xpos >> 3)) >> 2);
-
-            pp_semaphore_chroma_inter(xpred, ypred, pp_dec_u,
-                                      video->pstprcTypPrv, dx, dy, mvwidth, height, size,
-                                      tmp, msk_deblock);
-        }
-        else
-        {
-            /* Post-processing mode (MBM_INTER8) */
-            /* deblocking and deringing) */
-            pp_dec_y = video->pstprcTypCur + imv;
-            *pp_dec_y = 4;
-            *(pp_dec_y + 1) = 4;
-            *(pp_dec_y + mvwidth) = 4;
-            *(pp_dec_y + mvwidth + 1) = 4;
-            pp_dec_u = video->pstprcTypCur + (size >> 6) +
-                       ((imv + (xpos >> 3)) >> 2);
-            *pp_dec_u = 4;
-            pp_dec_u[size>>8] = 4;
-        }
-    }
-#endif
-
-
-    /* xpred and ypred calculation for Chrominance is */
-    /* in full-pel resolution.                        */
-
-    /* Chrominance */
-    /* width of chrominance data in pixels (y axis) */
-    width >>= 1;
-
-    /* heigth of chrominance data in pixels (x axis) */
-    height >>= 1;
-
-    /* Pointer to previous chrominance b frame */
-    cu_prev = prev->uChan;
-
-    /* Pointer to previous chrominance r frame */
-    cv_prev = prev->vChan;
-
-    /* x position in prediction data offset by motion vector */
-    /* xpred calculation for Chrominance is in full-pel      */
-    /* resolution.                                           */
-    xpred = xpos + dx;
-
-    /* y position in prediction data offset by motion vector */
-    /* ypred calculation for Chrominance is in full-pel      */
-    /* resolution.                                           */
-    ypred = ypos + dy;
-
-    cu_comp = video->currVop->uChan + (offset >> 2) + (xpos >> 2);
-    cv_comp = video->currVop->vChan + (offset >> 2) + (xpos >> 2);
-
-    /* Call function that performs chrominance prediction */
-    /*      chrominance_pred(xpred, ypred, cu_prev, cv_prev,
-            pred_block, width_uv, height_uv,
-            round1);*/
-    if (xpred >= 0 && xpred <= ((width << 1) - (2*B_SIZE)) && ypred >= 0 &&
-            ypred <= ((height << 1) - (2*B_SIZE)))
-    {
-        /*****************************/
-        /* (x,y) is inside the frame */
-        /*****************************/
-        if ((CBP >> 1)&1)
-        {
-            pred = pred_block + 256;
-            pred_width = 16;
-        }
-        else
-        {
-            pred = cu_comp;
-            pred_width = width;
-        }
-
-        /* Compute prediction for Chrominance b (block[4]) */
-        GetPredAdvBTable[ypred&1][xpred&1](cu_prev + (xpred >> 1) + ((ypred >> 1)*width),
-                                           pred, width, (pred_width << 1) | round1);
-
-        if (CBP&1)
-        {
-            pred = pred_block + 264;
-            pred_width = 16;
-        }
-        else
-        {
-            pred = cv_comp;
-            pred_width = width;
-        }
-        /* Compute prediction for Chrominance r (block[5]) */
-        GetPredAdvBTable[ypred&1][xpred&1](cv_prev + (xpred >> 1) + ((ypred >> 1)*width),
-                                           pred, width, (pred_width << 1) | round1);
-
-        return ;
-    }
-    else
-    {
-        /******************************/
-        /* (x,y) is outside the frame */
-        /******************************/
-        if ((CBP >> 1)&1)
-        {
-            pred = pred_block + 256;
-            pred_width = 16;
-        }
-        else
-        {
-            pred = cu_comp;
-            pred_width = width;
-        }
-
-        /* Compute prediction for Chrominance b (block[4]) */
-        GetPredOutside(xpred, ypred,    cu_prev,
-                       pred, width, height, round1, pred_width);
-
-        if (CBP&1)
-        {
-            pred = pred_block + 264;
-            pred_width = 16;
-        }
-        else
-        {
-            pred = cv_comp;
-            pred_width = width;
-        }
-
-        /* Compute prediction for Chrominance r (block[5]) */
-        GetPredOutside(xpred, ypred,    cv_prev,
-                       pred, width, height, round1, pred_width);
-
-        return ;
-    }
-
-}
-
-/*** special function for skipped macroblock,  Aug 15, 2005 */
-void  SkippedMBMotionComp(
-    VideoDecData *video
-)
-{
-    Vop *prev = video->prevVop;
-    Vop *comp;
-    int ypos, xpos;
-    PIXEL *c_comp, *c_prev;
-    PIXEL *cu_comp, *cu_prev;
-    PIXEL *cv_comp, *cv_prev;
-    int width, width_uv;
-    int32 offset;
-#ifdef PV_POSTPROC_ON // 2/14/2001      
-    int imv;
-    int32 size = (int32) video->nTotalMB << 8;
-    uint8 *pp_dec_y, *pp_dec_u;
-    uint8 *pp_prev1;
-    int mvwidth = video->nMBPerRow << 1;
-#endif
-
-    width = video->width;
-    width_uv  = width >> 1;
-    ypos = video->mbnum_row << 4 ;
-    xpos = video->mbnum_col << 4 ;
-    offset = (int32)ypos * width + xpos;
-
-
-    /* zero motion compensation for previous frame */
-    /*mby*width + mbx;*/
-    c_prev  = prev->yChan;
-    if (!c_prev) {
-        ALOGE("b/35269635");
-        android_errorWriteLog(0x534e4554, "35269635");
-        return;
-    }
-    c_prev += offset;
-
-    /*by*width_uv + bx;*/
-    cu_prev = prev->uChan + (offset >> 2) + (xpos >> 2);
-    /*by*width_uv + bx;*/
-    cv_prev = prev->vChan + (offset >> 2) + (xpos >> 2);
-
-    comp = video->currVop;
-
-    c_comp  = comp->yChan + offset;
-    cu_comp = comp->uChan + (offset >> 2) + (xpos >> 2);
-    cv_comp = comp->vChan + (offset >> 2) + (xpos >> 2);
-
-
-    /* Copy previous reconstructed frame into the current frame */
-    PutSKIPPED_MB(c_comp,  c_prev, width);
-    PutSKIPPED_B(cu_comp, cu_prev, width_uv);
-    PutSKIPPED_B(cv_comp, cv_prev, width_uv);
-
-    /*  10/24/2000 post_processing semaphore generation */
-#ifdef PV_POSTPROC_ON // 2/14/2001
-    if (video->postFilterType != PV_NO_POST_PROC)
-    {
-        imv = (offset >> 6) - (xpos >> 6) + (xpos >> 3);
-        /* Post-processing mode (copy previous MB) */
-        pp_prev1 = video->pstprcTypPrv + imv;
-        pp_dec_y = video->pstprcTypCur + imv;
-        *pp_dec_y = *pp_prev1;
-        *(pp_dec_y + 1) = *(pp_prev1 + 1);
-        *(pp_dec_y + mvwidth) = *(pp_prev1 + mvwidth);
-        *(pp_dec_y + mvwidth + 1) = *(pp_prev1 + mvwidth + 1);
-
-        /* chrominance */
-        /*4*MB_in_width*MB_in_height*/
-        pp_prev1 = video->pstprcTypPrv + (size >> 6) +
-                   ((imv + (xpos >> 3)) >> 2);
-        pp_dec_u = video->pstprcTypCur + (size >> 6) +
-                   ((imv + (xpos >> 3)) >> 2);
-        *pp_dec_u = *pp_prev1;
-        pp_dec_u[size>>8] = pp_prev1[size>>8];
-    }
-#endif
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-
-    return;
-}
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/mp4dec_lib.h b/media/libstagefright/codecs/m4v_h263/dec/src/mp4dec_lib.h
deleted file mode 100644
index 9cd4edc..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/mp4dec_lib.h
+++ /dev/null
@@ -1,334 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#ifndef _MP4DECLIB_H_
-#define _MP4DECLIB_H_
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include "mp4def.h" /* typedef */
-#include "mp4lib_int.h" /* main video structure */
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL VARIABLES REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; SIMPLE TYPEDEF'S
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; ENUMERATED TYPEDEF'S
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; STRUCTURES TYPEDEF'S
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; GLOBAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif /* __cplusplus */
-
-    /* defined in pvdec_api.c, these function are not supposed to be    */
-    /* exposed to programmers outside PacketVideo.  08/15/2000.    */
-    uint VideoDecoderErrorDetected(VideoDecData *video);
-
-#ifdef ENABLE_LOG
-    void m4vdec_dprintf(char *format, ...);
-#define mp4dec_log(message) m4vdec_dprintf(message)
-#else
-#define mp4dec_log(message)
-#endif
-
-    /*--------------------------------------------------------------------------*/
-    /* defined in frame_buffer.c */
-    PV_STATUS FillFrameBufferNew(BitstreamDecVideo *stream);
-    PV_STATUS FillFrameBuffer(BitstreamDecVideo *stream, int short_header);
-
-    /*--------------------------------------------------------------------------*/
-    /* defined in dc_ac_pred.c */
-    int cal_dc_scaler(int QP, int type);
-    PV_STATUS PV_DecodePredictedIntraDC(int compnum, BitstreamDecVideo *stream,
-                                        int16 *IntraDC_delta);
-
-    void    doDCACPrediction(VideoDecData *video, int comp, int16 *q_block,
-                             int *direction);
-
-#ifdef PV_ANNEX_IJKT_SUPPORT
-    void    doDCACPrediction_I(VideoDecData *video, int comp, int16 *q_block);
-#endif
-    /*--------------------------------------------------------------------------*/
-    /* defined in block_idct.c */
-    void MBlockIDCTAdd(VideoDecData *video, int nz_coefs[]);
-
-    void BlockIDCT(uint8 *dst, uint8 *pred, int16 *blk, int width, int nzcoefs,
-                   uint8 *bitmapcol, uint8 bitmaprow);
-
-    void MBlockIDCT(VideoDecData *video);
-    void BlockIDCT_intra(MacroBlock *mblock, PIXEL *c_comp, int comp, int width_offset);
-    /*--------------------------------------------------------------------------*/
-    /* defined in combined_decode.c */
-    PV_STATUS DecodeFrameCombinedMode(VideoDecData *video);
-    PV_STATUS GetMBheader(VideoDecData *video, int16 *QP);
-    PV_STATUS GetMBData(VideoDecData *video);
-
-    /*--------------------------------------------------------------------------*/
-    /* defined in datapart_decode.c */
-    PV_STATUS DecodeFrameDataPartMode(VideoDecData *video);
-    PV_STATUS GetMBheaderDataPart_DQUANT_DC(VideoDecData *video, int16 *QP);
-    PV_STATUS GetMBheaderDataPart_P(VideoDecData *video);
-    PV_STATUS DecodeDataPart_I_VideoPacket(VideoDecData *video, int slice_counter);
-    PV_STATUS DecodeDataPart_P_VideoPacket(VideoDecData *video, int slice_counter);
-    PV_STATUS GetMBData_DataPart(VideoDecData *video);
-
-    /*--------------------------------------------------------------------------*/
-    /* defined in packet_util.c */
-    PV_STATUS PV_ReadVideoPacketHeader(VideoDecData *video, int *next_MB);
-    PV_STATUS RecoverPacketError(BitstreamDecVideo *stream, int marker_length, int32 *nextVop);
-    PV_STATUS RecoverGOBError(BitstreamDecVideo *stream, int marker_length, int32 *vopPos);
-    PV_STATUS PV_GobHeader(VideoDecData *video);
-#ifdef PV_ANNEX_IJKT_SUPPORT
-    PV_STATUS PV_H263SliceHeader(VideoDecData *videoInt, int *next_MB);
-#endif
-    /*--------------------------------------------------------------------------*/
-    /* defined in motion_comp.c */
-    void MBMotionComp(VideoDecData *video, int CBP);
-    void  SkippedMBMotionComp(VideoDecData *video);
-
-    /*--------------------------------------------------------------------------*/
-    /* defined in chrominance_pred.c */
-    void chrominance_pred(
-        int xpred,          /* i */
-        int ypred,          /* i */
-        uint8 *cu_prev,     /* i */
-        uint8 *cv_prev,     /* i */
-        uint8 *pred_block,  /* i */
-        int width_uv,       /* i */
-        int height_uv,      /* i */
-        int round1
-    );
-
-    /*--------------------------------------------------------------------------*/
-    /* defined in luminance_pred_mode_inter.c */
-    void luminance_pred_mode_inter(
-        int xpred,          /* i */
-        int ypred,          /* i */
-        uint8 *c_prev,      /* i */
-        uint8 *pred_block,  /* i */
-        int width,          /* i */
-        int height,         /* i */
-        int round1
-    );
-
-    /*--------------------------------------------------------------------------*/
-    /* defined in luminance_pred_mode_inter4v.c */
-    void luminance_pred_mode_inter4v(
-        int xpos,           /* i */
-        int ypos,           /* i */
-        MOT *px,            /* i */
-        MOT *py,            /* i */
-        uint8 *c_prev,      /* i */
-        uint8 *pred_block,  /* i */
-        int width,          /* i */
-        int height,         /* i */
-        int round1,         /* i */
-        int mvwidth,            /* i */
-        int *xsum_ptr,          /* i/o */
-        int *ysum_ptr           /* i/o */
-    );
-
-    /*--------------------------------------------------------------------------*/
-    /* defined in pp_semaphore_chroma_inter.c */
-#ifdef PV_POSTPROC_ON
-    void pp_semaphore_chroma_inter(
-        int xpred,      /* i */
-        int ypred,      /* i */
-        uint8   *pp_dec_u,  /* i/o */
-        uint8   *pstprcTypPrv,  /* i */
-        int dx,     /* i */
-        int dy,     /* i */
-        int mvwidth,    /* i */
-        int height,     /* i */
-        int32   size,       /* i */
-        int mv_loc,     /* i */
-        uint8   msk_deblock /* i */
-    );
-
-    /*--------------------------------------------------------------------------*/
-    /* defined in pp_semaphore_luma.c */
-    uint8 pp_semaphore_luma(
-        int xpred,      /* i */
-        int ypred,      /* i */
-        uint8   *pp_dec_y,  /* i/o */
-        uint8   *pstprcTypPrv,  /* i */
-        int *ll,        /* i */
-        int *mv_loc,    /* i/o */
-        int dx,     /* i */
-        int dy,     /* i */
-        int mvwidth,    /* i */
-        int width,      /* i */
-        int height      /* i */
-    );
-#endif
-    /*--------------------------------------------------------------------------*/
-    /* defined in get_pred_adv_mb_add.c */
-    int GetPredAdvancedMB(
-        int xpos,
-        int ypos,
-        uint8 *c_prev,
-        uint8 *pred_block,
-        int width,
-        int rnd1
-    );
-
-    /*--------------------------------------------------------------------------*/
-    /* defined in get_pred_adv_b_add.c */
-    int GetPredAdvancedBy0x0(
-        uint8 *c_prev,      /* i */
-        uint8 *pred_block,      /* i */
-        int width,      /* i */
-        int pred_width_rnd /* i */
-    );
-
-    int GetPredAdvancedBy0x1(
-        uint8 *c_prev,      /* i */
-        uint8 *pred_block,      /* i */
-        int width,      /* i */
-        int pred_width_rnd /* i */
-    );
-
-    int GetPredAdvancedBy1x0(
-        uint8 *c_prev,      /* i */
-        uint8 *pred_block,      /* i */
-        int width,      /* i */
-        int pred_width_rnd /* i */
-    );
-
-    int GetPredAdvancedBy1x1(
-        uint8 *c_prev,      /* i */
-        uint8 *pred_block,      /* i */
-        int width,      /* i */
-        int pred_width_rnd /* i */
-    );
-
-    /*--------------------------------------------------------------------------*/
-    /* defined in get_pred_outside.c */
-    int GetPredOutside(
-        int xpos,
-        int ypos,
-        uint8 *c_prev,
-        uint8 *pred_block,
-        int width,
-        int height,
-        int rnd1,
-        int pred_width
-    );
-
-    /*--------------------------------------------------------------------------*/
-    /* defined in find_pmvsErrRes.c */
-    void mv_prediction(VideoDecData *video, int block, MOT *mvx, MOT *mvy);
-
-    /*--------------------------------------------------------------------------*/
-
-    /*--------------------------------------------------------------------------*/
-    /* defined in mb_utils.c */
-    void Copy_MB_into_Vop(uint8 *comp, int yChan[][NCOEFF_BLOCK], int width);
-    void Copy_B_into_Vop(uint8 *comp, int cChan[], int width);
-    void PutSKIPPED_MB(uint8 *comp, uint8 *c_prev, int width);
-    void PutSKIPPED_B(uint8 *comp, uint8 *c_prev, int width);
-
-    /*--------------------------------------------------------------------------*/
-    /* defined in vop.c */
-    PV_STATUS DecodeGOVHeader(BitstreamDecVideo *stream, uint32 *time_base);
-    PV_STATUS DecodeVOLHeader(VideoDecData *video, int layer);
-    PV_STATUS DecodeVOPHeader(VideoDecData *video, Vop *currVop, Bool use_ext_tiemstamp);
-    PV_STATUS DecodeShortHeader(VideoDecData *video, Vop *currVop);
-    PV_STATUS PV_DecodeVop(VideoDecData *video);
-    uint32 CalcVopDisplayTime(Vol *currVol, Vop *currVop, int shortVideoHeader);
-
-    /*--------------------------------------------------------------------------*/
-    /* defined in post_proc.c */
-#ifdef PV_ANNEX_IJKT_SUPPORT
-    void H263_Deblock(uint8 *rec,   int width, int height, int16 *QP_store, uint8 *mode, int chr, int T);
-#endif
-    int  PostProcSemaphore(int16 *q_block);
-    void PostFilter(VideoDecData *video, int filer_type, uint8 *output);
-    void FindMaxMin(uint8 *ptr, int *min, int *max, int incr);
-    void DeringAdaptiveSmoothMMX(uint8 *img, int incr, int thres, int mxdf);
-    void AdaptiveSmooth_NoMMX(uint8 *Rec_Y, int v0, int h0, int v_blk, int h_blk,
-                              int thr, int width, int max_diff);
-    void Deringing_Luma(uint8 *Rec_Y, int width, int height, int16 *QP_store,
-                        int Combined, uint8 *pp_mod);
-    void Deringing_Chroma(uint8 *Rec_C, int width, int height, int16 *QP_store,
-                          int Combined, uint8 *pp_mod);
-    void CombinedHorzVertFilter(uint8 *rec, int width, int height, int16 *QP_store,
-                                int chr, uint8 *pp_mod);
-    void CombinedHorzVertFilter_NoSoftDeblocking(uint8 *rec, int width, int height, int16 *QP_store,
-            int chr, uint8 *pp_mod);
-    void CombinedHorzVertRingFilter(uint8 *rec, int width, int height,
-                                    int16 *QP_store, int chr, uint8 *pp_mod);
-
-    /*--------------------------------------------------------------------------*/
-    /* defined in conceal.c */
-    void ConcealTexture_I(VideoDecData *video, int32 startFirstPartition, int mb_start, int mb_stop,
-                          int slice_counter);
-    void ConcealTexture_P(VideoDecData *video, int mb_start, int mb_stop,
-                          int slice_counter);
-    void ConcealPacket(VideoDecData *video, int mb_start, int mb_stop,
-                       int slice_counter);
-    void CopyVopMB(Vop *curr, uint8 *prev, int mbnum, int width, int height);
-
-    /* define in vlc_dequant.c ,  09/18/2000*/
-#ifdef PV_SUPPORT_MAIN_PROFILE
-    int VlcDequantMpegIntraBlock(void *video, int comp, int switched,
-                                 uint8 *bitmapcol, uint8 *bitmaprow);
-    int VlcDequantMpegInterBlock(void *video, int comp,
-                                 uint8 *bitmapcol, uint8 *bitmaprow);
-#endif
-    int VlcDequantH263IntraBlock(VideoDecData *video, int comp, int switched,
-                                 uint8 *bitmapcol, uint8 *bitmaprow);
-    int VlcDequantH263IntraBlock_SH(VideoDecData *video, int comp,
-                                    uint8 *bitmapcol, uint8 *bitmaprow);
-    int VlcDequantH263InterBlock(VideoDecData *video, int comp,
-                                 uint8 *bitmapcol, uint8 *bitmaprow);
-
-#ifdef __cplusplus
-}
-#endif /* __cplusplus */
-
-/*----------------------------------------------------------------------------
-; END
-----------------------------------------------------------------------------*/
-#endif
-
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/post_filter.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/post_filter.cpp
deleted file mode 100644
index b36050c..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/post_filter.cpp
+++ /dev/null
@@ -1,585 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-
-#include    "mp4dec_lib.h"
-
-#ifdef PV_ANNEX_IJKT_SUPPORT
-#include    "motion_comp.h"
-#include "mbtype_mode.h"
-const static int STRENGTH_tab[] = {0, 1, 1, 2, 2, 3, 3, 4, 4, 4, 5, 5, 6, 6, 7, 7, 7, 8, 8, 8, 9, 9, 9, 10, 10, 10, 11, 11, 11, 12, 12, 12};
-#endif
-
-#ifdef PV_POSTPROC_ON
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-void PostFilter(
-    VideoDecData *video,
-    int filter_type,
-    uint8 *output)
-{
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    uint8 *pp_mod;
-    int16 *QP_store;
-    int combined_with_deblock_filter;
-    int nTotalMB = video->nTotalMB;
-    int width, height;
-    int32 size;
-    int softDeblocking;
-    uint8 *decodedFrame = video->videoDecControls->outputFrame;
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    width = video->width;
-    height = video->height;
-    size = (int32)width * height;
-
-    oscl_memcpy(output, decodedFrame, size);
-    oscl_memcpy(output + size, decodedFrame + size, (size >> 2));
-    oscl_memcpy(output + size + (size >> 2), decodedFrame + size + (size >> 2), (size >> 2));
-
-    if (filter_type == 0)
-        return;
-
-    /* The softDecoding cutoff corresponds to ~93000 bps for QCIF 15fps clip  */
-    if (PVGetDecBitrate(video->videoDecControls) > (100*video->frameRate*(size >> 12)))  // MC_sofDeblock
-        softDeblocking = FALSE;
-    else
-        softDeblocking = TRUE;
-
-    combined_with_deblock_filter = filter_type & PV_DEBLOCK;
-    QP_store = video->QPMB;
-
-    /* Luma */
-    pp_mod = video->pstprcTypCur;
-
-    if ((filter_type & PV_DEBLOCK) && (filter_type & PV_DERING))
-    {
-        CombinedHorzVertRingFilter(output, width, height, QP_store, 0, pp_mod);
-    }
-    else
-    {
-        if (filter_type & PV_DEBLOCK)
-        {
-            if (softDeblocking)
-            {
-                CombinedHorzVertFilter(output, width, height,
-                                       QP_store, 0, pp_mod);
-            }
-            else
-            {
-                CombinedHorzVertFilter_NoSoftDeblocking(output, width, height,
-                                                        QP_store, 0, pp_mod);
-            }
-        }
-        if (filter_type & PV_DERING)
-        {
-            Deringing_Luma(output, width, height, QP_store,
-                           combined_with_deblock_filter, pp_mod);
-
-        }
-    }
-
-    /* Chroma */
-
-    pp_mod += (nTotalMB << 2);
-    output += size;
-
-    if ((filter_type & PV_DEBLOCK) && (filter_type & PV_DERING))
-    {
-        CombinedHorzVertRingFilter(output, (int)(width >> 1), (int)(height >> 1), QP_store, (int) 1, pp_mod);
-    }
-    else
-    {
-        if (filter_type & PV_DEBLOCK)
-        {
-            if (softDeblocking)
-            {
-                CombinedHorzVertFilter(output, (int)(width >> 1),
-                                       (int)(height >> 1), QP_store, (int) 1, pp_mod);
-            }
-            else
-            {
-                CombinedHorzVertFilter_NoSoftDeblocking(output, (int)(width >> 1),
-                                                        (int)(height >> 1), QP_store, (int) 1, pp_mod);
-            }
-        }
-        if (filter_type & PV_DERING)
-        {
-            Deringing_Chroma(output, (int)(width >> 1),
-                             (int)(height >> 1), QP_store,
-                             combined_with_deblock_filter, pp_mod);
-        }
-    }
-
-    pp_mod += nTotalMB;
-    output += (size >> 2);
-
-    if ((filter_type & PV_DEBLOCK) && (filter_type & PV_DERING))
-    {
-        CombinedHorzVertRingFilter(output, (int)(width >> 1), (int)(height >> 1), QP_store, (int) 1, pp_mod);
-    }
-    else
-    {
-        if (filter_type & PV_DEBLOCK)
-        {
-            if (softDeblocking)
-            {
-                CombinedHorzVertFilter(output, (int)(width >> 1),
-                                       (int)(height >> 1), QP_store, (int) 1, pp_mod);
-            }
-            else
-            {
-                CombinedHorzVertFilter_NoSoftDeblocking(output, (int)(width >> 1),
-                                                        (int)(height >> 1), QP_store, (int) 1, pp_mod);
-            }
-        }
-        if (filter_type & PV_DERING)
-        {
-            Deringing_Chroma(output, (int)(width >> 1),
-                             (int)(height >> 1), QP_store,
-                             combined_with_deblock_filter, pp_mod);
-        }
-    }
-
-    /*  swap current pp_mod to prev_frame pp_mod */
-    pp_mod = video->pstprcTypCur;
-    video->pstprcTypCur = video->pstprcTypPrv;
-    video->pstprcTypPrv = pp_mod;
-
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return;
-}
-#endif
-
-
-#ifdef PV_ANNEX_IJKT_SUPPORT
-void H263_Deblock(uint8 *rec,
-                  int width,
-                  int height,
-                  int16 *QP_store,
-                  uint8 *mode,
-                  int chr, int annex_T)
-{
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int i, j, k;
-    uint8 *rec_y;
-    int tmpvar;
-    int mbnum, strength, A_D, d1_2, d1, d2, A, B, C, D, b_size;
-    int d, offset, nMBPerRow, nMBPerCol, width2 = (width << 1);
-    /* MAKE SURE I-VOP INTRA MACROBLOCKS ARE SET TO NON-SKIPPED MODE*/
-    mbnum = 0;
-
-    if (chr)
-    {
-        nMBPerRow = width >> 3;
-        nMBPerCol = height >> 3;
-        b_size = 8;
-    }
-    else
-    {
-        nMBPerRow = width >> 4;
-        nMBPerCol = height >> 4;
-        b_size = 16;
-    }
-
-
-    /********************************* VERTICAL FILTERING ****************************/
-    /* vertical filtering of mid sections no need to check neighboring QP's etc */
-    if (!chr)
-    {
-        rec_y = rec + (width << 3);
-        for (i = 0; i < (height >> 4); i++)
-        {
-            for (j = 0; j < (width >> 4); j++)
-            {
-                if (mode[mbnum] != MODE_SKIPPED)
-                {
-                    k = 16;
-                    strength = STRENGTH_tab[QP_store[mbnum]];
-                    while (k--)
-                    {
-                        A =  *(rec_y - width2);
-                        D = *(rec_y + width);
-                        A_D = A - D;
-                        C = *rec_y;
-                        B = *(rec_y - width);
-                        d = (((C - B) << 2) + A_D);
-
-                        if (d < 0)
-                        {
-                            d1 = -(-d >> 3);
-                            if (d1 < -(strength << 1))
-                            {
-                                d1 = 0;
-                            }
-                            else if (d1 < -strength)
-                            {
-                                d1 = -d1 - (strength << 1);
-                            }
-                            d1_2 = -d1 >> 1;
-                        }
-                        else
-                        {
-                            d1 = d >> 3;
-                            if (d1 > (strength << 1))
-                            {
-                                d1 = 0;
-                            }
-                            else if (d1 > strength)
-                            {
-                                d1 = (strength << 1) - d1;
-                            }
-                            d1_2 = d1 >> 1;
-                        }
-
-                        if (A_D < 0)
-                        {
-                            d2 = -(-A_D >> 2);
-                            if (d2 < -d1_2)
-                            {
-                                d2 = -d1_2;
-                            }
-                        }
-                        else
-                        {
-                            d2 = A_D >> 2;
-                            if (d2 > d1_2)
-                            {
-                                d2 = d1_2;
-                            }
-                        }
-
-                        *(rec_y - width2) = A - d2;
-                        tmpvar = B + d1;
-                        CLIP_RESULT(tmpvar)
-                        *(rec_y - width) = tmpvar;
-                        tmpvar = C - d1;
-                        CLIP_RESULT(tmpvar)
-                        *rec_y = tmpvar;
-                        *(rec_y + width) = D + d2;
-                        rec_y++;
-                    }
-                }
-                else
-                {
-                    rec_y += b_size;
-                }
-                mbnum++;
-            }
-            rec_y += (15 * width);
-
-        }
-    }
-
-    /* VERTICAL boundary blocks */
-
-
-    rec_y = rec + width * b_size;
-
-    mbnum = nMBPerRow;
-    for (i = 0; i < nMBPerCol - 1; i++)
-    {
-        for (j = 0; j < nMBPerRow; j++)
-        {
-            if (mode[mbnum] != MODE_SKIPPED || mode[mbnum - nMBPerRow] != MODE_SKIPPED)
-            {
-                k = b_size;
-                if (mode[mbnum] != MODE_SKIPPED)
-                {
-                    strength = STRENGTH_tab[(annex_T ?  MQ_chroma_QP_table[QP_store[mbnum]] : QP_store[mbnum])];
-                }
-                else
-                {
-                    strength = STRENGTH_tab[(annex_T ?  MQ_chroma_QP_table[QP_store[mbnum - nMBPerRow]] : QP_store[mbnum - nMBPerRow])];
-                }
-
-                while (k--)
-                {
-                    A =  *(rec_y - width2);
-                    D =  *(rec_y + width);
-                    A_D = A - D;
-                    C = *rec_y;
-                    B = *(rec_y - width);
-                    d = (((C - B) << 2) + A_D);
-
-                    if (d < 0)
-                    {
-                        d1 = -(-d >> 3);
-                        if (d1 < -(strength << 1))
-                        {
-                            d1 = 0;
-                        }
-                        else if (d1 < -strength)
-                        {
-                            d1 = -d1 - (strength << 1);
-                        }
-                        d1_2 = -d1 >> 1;
-                    }
-                    else
-                    {
-                        d1 = d >> 3;
-                        if (d1 > (strength << 1))
-                        {
-                            d1 = 0;
-                        }
-                        else if (d1 > strength)
-                        {
-                            d1 = (strength << 1) - d1;
-                        }
-                        d1_2 = d1 >> 1;
-                    }
-
-                    if (A_D < 0)
-                    {
-                        d2 = -(-A_D >> 2);
-                        if (d2 < -d1_2)
-                        {
-                            d2 = -d1_2;
-                        }
-                    }
-                    else
-                    {
-                        d2 = A_D >> 2;
-                        if (d2 > d1_2)
-                        {
-                            d2 = d1_2;
-                        }
-                    }
-
-                    *(rec_y - width2) = A - d2;
-                    tmpvar = B + d1;
-                    CLIP_RESULT(tmpvar)
-                    *(rec_y - width) = tmpvar;
-                    tmpvar = C - d1;
-                    CLIP_RESULT(tmpvar)
-                    *rec_y = tmpvar;
-                    *(rec_y + width) = D + d2;
-                    rec_y++;
-                }
-            }
-            else
-            {
-                rec_y += b_size;
-            }
-            mbnum++;
-        }
-        rec_y += ((b_size - 1) * width);
-
-    }
-
-
-    /***************************HORIZONTAL FILTERING ********************************************/
-    mbnum = 0;
-    /* HORIZONTAL INNER */
-    if (!chr)
-    {
-        rec_y = rec + 8;
-        offset = width * b_size - b_size;
-
-        for (i = 0; i < nMBPerCol; i++)
-        {
-            for (j = 0; j < nMBPerRow; j++)
-            {
-                if (mode[mbnum] != MODE_SKIPPED)
-                {
-                    k = 16;
-                    strength = STRENGTH_tab[QP_store[mbnum]];
-                    while (k--)
-                    {
-                        A =  *(rec_y - 2);
-                        D =  *(rec_y + 1);
-                        A_D = A - D;
-                        C = *rec_y;
-                        B = *(rec_y - 1);
-                        d = (((C - B) << 2) + A_D);
-
-                        if (d < 0)
-                        {
-                            d1 = -(-d >> 3);
-                            if (d1 < -(strength << 1))
-                            {
-                                d1 = 0;
-                            }
-                            else if (d1 < -strength)
-                            {
-                                d1 = -d1 - (strength << 1);
-                            }
-                            d1_2 = -d1 >> 1;
-                        }
-                        else
-                        {
-                            d1 = d >> 3;
-                            if (d1 > (strength << 1))
-                            {
-                                d1 = 0;
-                            }
-                            else if (d1 > strength)
-                            {
-                                d1 = (strength << 1) - d1;
-                            }
-                            d1_2 = d1 >> 1;
-                        }
-
-                        if (A_D < 0)
-                        {
-                            d2 = -(-A_D >> 2);
-                            if (d2 < -d1_2)
-                            {
-                                d2 = -d1_2;
-                            }
-                        }
-                        else
-                        {
-                            d2 = A_D >> 2;
-                            if (d2 > d1_2)
-                            {
-                                d2 = d1_2;
-                            }
-                        }
-
-                        *(rec_y - 2) = A - d2;
-                        tmpvar = B + d1;
-                        CLIP_RESULT(tmpvar)
-                        *(rec_y - 1) = tmpvar;
-                        tmpvar = C - d1;
-                        CLIP_RESULT(tmpvar)
-                        *rec_y = tmpvar;
-                        *(rec_y + 1) = D + d2;
-                        rec_y += width;
-                    }
-                    rec_y -= offset;
-                }
-                else
-                {
-                    rec_y += b_size;
-                }
-                mbnum++;
-            }
-            rec_y += (15 * width);
-
-        }
-    }
-
-
-
-    /* HORIZONTAL EDGE */
-    rec_y = rec + b_size;
-    offset = width * b_size - b_size;
-    mbnum = 1;
-    for (i = 0; i < nMBPerCol; i++)
-    {
-        for (j = 0; j < nMBPerRow - 1; j++)
-        {
-            if (mode[mbnum] != MODE_SKIPPED || mode[mbnum-1] != MODE_SKIPPED)
-            {
-                k = b_size;
-                if (mode[mbnum] != MODE_SKIPPED)
-                {
-                    strength = STRENGTH_tab[(annex_T ?  MQ_chroma_QP_table[QP_store[mbnum]] : QP_store[mbnum])];
-                }
-                else
-                {
-                    strength = STRENGTH_tab[(annex_T ?  MQ_chroma_QP_table[QP_store[mbnum - 1]] : QP_store[mbnum - 1])];
-                }
-
-                while (k--)
-                {
-                    A =  *(rec_y - 2);
-                    D =  *(rec_y + 1);
-                    A_D = A - D;
-                    C = *rec_y;
-                    B = *(rec_y - 1);
-                    d = (((C - B) << 2) + A_D);
-
-                    if (d < 0)
-                    {
-                        d1 = -(-d >> 3);
-                        if (d1 < -(strength << 1))
-                        {
-                            d1 = 0;
-                        }
-                        else if (d1 < -strength)
-                        {
-                            d1 = -d1 - (strength << 1);
-                        }
-                        d1_2 = -d1 >> 1;
-                    }
-                    else
-                    {
-                        d1 = d >> 3;
-                        if (d1 > (strength << 1))
-                        {
-                            d1 = 0;
-                        }
-                        else if (d1 > strength)
-                        {
-                            d1 = (strength << 1) - d1;
-                        }
-                        d1_2 = d1 >> 1;
-                    }
-
-                    if (A_D < 0)
-                    {
-                        d2 = -(-A_D >> 2);
-                        if (d2 < -d1_2)
-                        {
-                            d2 = -d1_2;
-                        }
-                    }
-                    else
-                    {
-                        d2 = A_D >> 2;
-                        if (d2 > d1_2)
-                        {
-                            d2 = d1_2;
-                        }
-                    }
-
-                    *(rec_y - 2) = A - d2;
-                    tmpvar = B + d1;
-                    CLIP_RESULT(tmpvar)
-                    *(rec_y - 1) = tmpvar;
-                    tmpvar = C - d1;
-                    CLIP_RESULT(tmpvar)
-                    *rec_y = tmpvar;
-                    *(rec_y + 1) = D + d2;
-                    rec_y += width;
-                }
-                rec_y -= offset;
-            }
-            else
-            {
-                rec_y += b_size;
-            }
-            mbnum++;
-        }
-        rec_y += ((width * (b_size - 1)) + b_size);
-        mbnum++;
-    }
-
-    return;
-}
-#endif
-
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/post_proc_semaphore.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/post_proc_semaphore.cpp
deleted file mode 100644
index 3abc6be..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/post_proc_semaphore.cpp
+++ /dev/null
@@ -1,247 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    q_block = pointer to buffer of inverse quantized DCT coefficients of type
-              int for intra-VOP mode or buffer of residual data of type int
-              for inter-VOP mode
-
- Local Stores/Buffers/Pointers Needed:
-    None
-
- Global Stores/Buffers/Pointers Needed:
-    None
-
- Outputs:
-    postmode = post processing semaphore with the vertical deblocking,
-               horizontal deblocking, and deringing bits set up accordingly
-
- Pointers and Buffers Modified:
-    None
-
- Local Stores Modified:
-    None
-
- Global Stores Modified:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- This function sets up the postmode semaphore based on the contents of the
- buffer pointed to by q_block. The function starts out with the assumption
- that all entries of q_block, except for the first entry (q_block[0]), are
- zero. This case can induce horizontal and vertical blocking artifacts,
- therefore, both horizontal and vertical deblocking bits are enabled.
-
- The following conditions are tested when setting up the horizontal/vertical
- deblocking and deringing bits:
- 1. When only the elements of the top row of the B_SIZE x B_SIZE block
-    (q_block[n], n = 0,..., B_SIZE-1) are non-zero, vertical blocking artifacts
-    may result, therefore, only the vertical deblocking bit is enabled.
-    Otherwise, the vertical deblocking bit is disabled.
- 2. When only the elements of the far left column of the B_SIZE x B_SIZE block
-    (q_block[n*B_SIZE], n = 0, ..., B_SIZE-1) are non-zero, horizontal blocking
-    artifacts may result, therefore, only the horizontal deblocking bit is
-    enabled. Otherwise, the horizontal deblocking bit is disabled.
- 3. If any non-zero elements exist in positions other than q_block[0],
-    q_block[1], or q_block[B_SIZE], the deringing bit is enabled. Otherwise,
-    it is disabled.
-
- The 3 least significant bits of postmode defines vertical or horizontal
- deblocking and deringing.
-
- The valid values are shown below:
- -------------------------------------------------------
- |           Type                 | Enabled | Disabled |
- -------------------------------------------------------
- | Vertical Deblocking (Bit #0)   |    1    |     0    |
- -------------------------------------------------------
- | Horizontal Deblocking (Bit #1) |    1    |     0    |
- -------------------------------------------------------
- | Deringing (Bit #2)             |    1    |     0    |
- -------------------------------------------------------
-
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "mp4dec_lib.h"
-#include    "mp4def.h"
-#include    "post_proc.h"
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-#ifdef PV_POSTPROC_ON
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-int PostProcSemaphore(
-    int16 *q_block)
-{
-    /*----------------------------------------------------------------------------
-    ; Define all local variables
-    ----------------------------------------------------------------------------*/
-    int i, j;
-
-    /* Set default value to vertical and horizontal deblocking enabled */
-    /* Initial assumption is that only q_block[0] element is non-zero, */
-    /* therefore, vertical and horizontal deblocking bits are set to 1 */
-    int postmode = 0x3;
-
-    /*----------------------------------------------------------------------------
-    ; Function body here
-    ----------------------------------------------------------------------------*/
-    /* Vertical deblocking bit is enabled when only the entire top row of   */
-    /* the B_SIZE x B_SIZE block, i.e., q_block[n], n = 0,..., B_SIZE-1,    */
-    /* are non-zero. Since initial assumption is that all elements, except  */
-    /* q_block[0], is zero, we need to check the remaining elements in the  */
-    /* top row to  determine if all or some are non-zero.                   */
-    if (q_block[1] != 0)
-    {
-        /* At this point, q_block[0] and q_block[1] are non-zero, while */
-        /* q_block[n], n = 2,..., B_SIZE-1, are zero. Therefore, we     */
-        /* need to disable vertical deblocking                          */
-        postmode &= 0xE;
-    }
-
-    for (i = 2; i < B_SIZE; i++)
-    {
-        if (q_block[i])
-        {
-            /* Check if q_block[n], n = 2,..., B_SIZE-1, are non-zero.*/
-            /* If any of them turn out to be non-zero, we need to     */
-            /* disable vertical deblocking.                           */
-            postmode &= 0xE;
-
-            /* Deringing is enabled if any nonzero elements exist in */
-            /* positions other than q_block[0], q_block[1] or        */
-            /* q_block[B_SIZE].                                      */
-            postmode |= 0x4;
-
-            break;
-        }
-    }
-
-    /* Horizontal deblocking bit is enabled when only the entire far */
-    /* left column, i.e., q_block[n*B_SIZE], n = 0, ..., B_SIZE-1,   */
-    /* are non-zero. Since initial assumption is that all elements,  */
-    /* except q_block[0], is zero, we need to check the remaining    */
-    /* elements in the far left column to determine if all or some   */
-    /* are non-zero.                                                 */
-    if (q_block[B_SIZE])
-    {
-        /* At this point, only q_block[0] and q_block[B_SIZE] are non-zero, */
-        /* while q_block[n*B_SIZE], n = 2, 3,..., B_SIZE-1, are zero.       */
-        /* Therefore, we need to disable horizontal deblocking.             */
-        postmode &= 0xD;
-    }
-
-    for (i = 16; i < NCOEFF_BLOCK; i += B_SIZE)
-    {
-        if (q_block[i])
-        {
-            /* Check if q_block[n], n = 2*B_SIZE,...,(B_SIZE-1)*B_SIZE,  */
-            /* are non-zero. If any of them turn out to be non-zero,     */
-            /* we need to disable horizontal deblocking.                 */
-            postmode &= 0xD;
-
-            /* Deringing is enabled if any nonzero elements exist in */
-            /* positions other than q_block[0], q_block[1] or        */
-            /* q_block[B_SIZE].                                      */
-            postmode |= 0x4;
-
-            break;
-        }
-    }
-
-    /* At this point, only the first row and far left column elements */
-    /* have been tested. If deringing bit is still not set at this    */
-    /* point, check the rest of q_block to determine if the elements  */
-    /* are non-zero. If all elements, besides q_block[0], q_block[1], */
-    /* or q_block[B_SIZE] are non-zero, deringing bit must be set     */
-    if ((postmode & 0x4) == 0)
-    {
-        for (i = 1; i < B_SIZE; i++)
-        {
-            for (j = 1; j < B_SIZE; j++)
-            {
-                if (q_block[(i<<3)+j])
-                {
-                    /* At this point, q_block[0] and another q_block */
-                    /* element are non-zero, therefore, we need to   */
-                    /* disable vertical and horizontal deblocking    */
-                    postmode &= 0xC;
-
-                    /* Deringing is enabled if any nonzero elements exist in */
-                    /* positions other than q_block[0], q_block[1] or        */
-                    /* q_block[B_SIZE].                                      */
-                    postmode |= 0x4;
-
-                    /* Set outer FOR loop count to B_SIZE to get out of */
-                    /* outer FOR loop                                   */
-                    i = B_SIZE;
-
-                    /* Get out of inner FOR loop */
-                    break;
-                }
-            }
-        }
-    }
-
-    /*----------------------------------------------------------------------------
-    ; Return nothing or data or data pointer
-    ----------------------------------------------------------------------------*/
-    return (postmode);
-}
-
-#endif
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/pp_semaphore_chroma_inter.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/pp_semaphore_chroma_inter.cpp
deleted file mode 100644
index 7c20222..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/pp_semaphore_chroma_inter.cpp
+++ /dev/null
@@ -1,262 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    xpred = x-axis coordinate of the block used for prediction (int)
-    ypred = y-axis coordinate of the block used for prediction (int)
-    pp_dec_u = pointer to the post processing semaphore for chrominance
-               (uint8)
-    pstprcTypPrv = pointer the previous frame's post processing type
-                   (uint8)
-    dx = horizontal component of the motion vector (int)
-    dy = vertical component of the motion vector (int)
-    mvwidth = number of blocks per row in the luminance VOP (int)
-    height = luminance VOP height in pixels (int)
-    size = total number of pixel in the current luminance VOP (int)
-    mv_loc = flag indicating location of the motion compensated
-         (x,y) position with respect to the luminance MB (int);
-         0 -> inside MB, 1 -> outside MB
-    msk_deblock = flag indicating whether to perform deblocking
-              (msk_deblock = 0) or not (msk_deblock = 1) (uint8)
-
- Local Stores/Buffers/Pointers Needed:
-    None
-
- Global Stores/Buffers/Pointers Needed:
-    None
-
- Outputs:
-    None
-
- Pointers and Buffers Modified:
-    pp_dec_u contents are the updated semaphore propagation data
-
- Local Stores Modified:
-    None
-
- Global Stores Modified:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- This functions performs post processing semaphore propagation processing
- after chrominance prediction in interframe processing mode.
-
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "mp4dec_api.h"
-#include    "mp4def.h"
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-#ifdef PV_POSTPROC_ON
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-    /*----------------------------------------------------------------------------
-    ; FUNCTION CODE
-    ----------------------------------------------------------------------------*/
-    void pp_semaphore_chroma_inter(
-        int xpred,      /* i */
-        int ypred,      /* i */
-        uint8   *pp_dec_u,  /* i/o */
-        uint8   *pstprcTypPrv,  /* i */
-        int dx,     /* i */
-        int dy,     /* i */
-        int mvwidth,    /* i */
-        int height,     /* i */
-        int32   size,       /* i */
-        int mv_loc,     /* i */
-        uint8   msk_deblock /* i */
-    )
-    {
-        /*----------------------------------------------------------------------------
-        ; Define all local variables
-        ----------------------------------------------------------------------------*/
-        int mmvy, mmvx, nmvy, nmvx;
-        uint8 *pp_prev1, *pp_prev2, *pp_prev3, *pp_prev4;
-
-        /*----------------------------------------------------------------------------
-        ; Function body here
-        ----------------------------------------------------------------------------*/
-
-        /* 09/28/2000, modify semaphore propagation to */
-        /* accommodate smart indexing */
-        mmvx = xpred >> 4;  /* block x coor */
-        nmvx = mmvx;
-
-        mmvy = ypred >> 4;  /* block y coor */
-        nmvy = mmvy;
-
-        /* Check if MV is outside the frame */
-        if (mv_loc == 1)
-        {
-            /* Perform boundary check */
-            if (nmvx < 0)
-            {
-                nmvx = 0;
-            }
-            else if (nmvx > mvwidth - 1)
-            {
-                nmvx = mvwidth - 1;
-            }
-
-            if (nmvy < 0)
-            {
-                nmvy = 0;
-            }
-            else if (nmvy > (height >> 4) - 1)
-            {
-                nmvy = (height >> 4) - 1;
-            }
-        }
-
-        /* Calculate pointer to first chrominance b semaphores in       */
-        /* pstprcTypPrv, i.e., first chrominance b semaphore is in      */
-        /* (pstprcTypPrv + (size>>6)).                  */
-        /* Since total number of chrominance blocks per row in a VOP    */
-        /* is half of the total number of luminance blocks per row in a */
-        /* VOP, we use (mvwidth >> 1) when calculating the row offset.  */
-        pp_prev1 = pstprcTypPrv + (size >> 6) + nmvx + nmvy * (mvwidth >> 1) ;
-
-        /* Check if MV is a multiple of 16 */
-        /*  1/5/01, make sure it doesn't go out of bound */
-        if (((dy&0xF) != 0) && (mmvy + 1 < (height >> 4) - 1))
-        {   /* dy is not a multiple of 16 */
-
-            /* pp_prev3 is the block below pp_prev1 block */
-            pp_prev3 = pp_prev1 + (mvwidth >> 1);
-        }
-        else
-        {   /* dy is a multiple of 16 */
-            pp_prev3 = pp_prev1;
-        }
-
-        /*  1/5/01, make sure it doesn't go out of bound */
-        if (((dx&0xF) != 0) && (mmvx + 1 < (mvwidth >> 1) - 1))
-        {   /* dx is not a multiple of 16 */
-
-            /* pp_prev2 is the block to the right of pp_prev1 block */
-            pp_prev2 = pp_prev1 + 1;
-
-            /* pp_prev4 is the block to the right of the block */
-            /* below pp_prev1 block                */
-            pp_prev4 = pp_prev3 + 1;
-        }
-        else
-        {   /* dx is a multiple of 16 */
-
-            pp_prev2 = pp_prev1;
-            pp_prev4 = pp_prev3;
-        }
-
-        /* Advance offset to location of first Chrominance R semaphore in */
-        /* pstprcTypPrv. Since the number of pixels in a Chrominance VOP  */
-        /* is (number of pixels in Luminance VOP/4), and there are 64     */
-        /* pixels in an 8x8 Chrominance block, the offset can be      */
-        /* calculated as:                         */
-        /*  mv_loc = (number of pixels in Luminance VOP/(4*64))   */
-        /*         = size/256 = size>>8               */
-        mv_loc = (size >> 8);
-
-        /*  11/3/00, change the propagation for deblocking */
-        if (msk_deblock == 0)
-        {
-
-            /* Deblocking semaphore propagation for Chrominance */
-            /* b semaphores                     */
-            *(pp_dec_u) = 0;
-
-            /* Advance offset to point to Chrominance r semaphores */
-            pp_dec_u += mv_loc;
-
-            /* Deblocking semaphore propagation for Chrominance */
-            /* r semaphores                     */
-            *(pp_dec_u) = 0;
-        }
-        else
-        {
-            /* Deringing semaphore propagation for Chrominance B block */
-            if ((*(pp_dec_u)&4) == 0)
-            {
-                *(pp_dec_u) |= ((*(pp_prev1) | *(pp_prev2) |
-                                 *(pp_prev3) | *(pp_prev4)) & 0x4);
-            }
-
-            /* Advance offset to point to Chrominance r semaphores */
-            pp_dec_u += mv_loc;
-            pp_prev1 += mv_loc;
-            pp_prev2 += mv_loc;
-            pp_prev3 += mv_loc;
-            pp_prev4 += mv_loc;
-
-            /* Deringing semaphore propagation for Chrominance R */
-            if ((*(pp_dec_u)&4) == 0)
-            {
-                *(pp_dec_u) |= ((*(pp_prev1) | *(pp_prev2) |
-                                 *(pp_prev3) | *(pp_prev4)) & 0x4);
-            }
-        }
-
-        /*----------------------------------------------------------------------------
-        ; Return nothing or data or data pointer
-        ----------------------------------------------------------------------------*/
-        return;
-    }
-#ifdef __cplusplus
-}
-#endif
-
-#endif
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/pp_semaphore_luma.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/pp_semaphore_luma.cpp
deleted file mode 100644
index b3a1ebd..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/pp_semaphore_luma.cpp
+++ /dev/null
@@ -1,378 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
- Inputs:
-    xpred = x-axis coordinate of the MB used for prediction (int)
-    ypred = y-axis coordinate of the MB used for prediction (int)
-    pp_dec_y = pointer to the post processing semaphore for current
-           luminance frame (uint8)
-    pstprcTypPrv = pointer the previous frame's post processing type
-                   (uint8)
-    ll = pointer to the buffer (int)
-    mv_loc = flag indicating location of the motion compensated
-         (x,y) position with respect to the luminance MB (int);
-         0 -> inside MB, 1 -> outside MB
-    dx = horizontal component of the motion vector (int)
-    dy = vertical component of the motion vector (int)
-    mvwidth = number of blocks per row (int)
-    width = luminance VOP width in pixels (int)
-    height = luminance VOP height in pixels (int)
-
- Local Stores/Buffers/Pointers Needed:
-    None
-
- Global Stores/Buffers/Pointers Needed:
-    None
-
- Outputs:
-    msk_deblock = flag that indicates whether deblocking is to be
-              performed (msk_deblock = 0) or not (msk_deblock =
-              1) (uint8)
-
- Pointers and Buffers Modified:
-    pp_dec_y contents are the updated semapohore propagation data
-
- Local Stores Modified:
-    None
-
- Global Stores Modified:
-    None
-
-------------------------------------------------------------------------------
- FUNCTION DESCRIPTION
-
- This functions performs post processing semaphore propagation processing
- after luminance prediction.
-
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "mp4dec_api.h"
-#include    "mp4def.h"
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-#ifdef PV_POSTPROC_ON
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-    /*----------------------------------------------------------------------------
-    ; FUNCTION CODE
-    ----------------------------------------------------------------------------*/
-    uint8 pp_semaphore_luma(
-        int xpred,      /* i */
-        int ypred,      /* i */
-        uint8   *pp_dec_y,  /* i/o */
-        uint8   *pstprcTypPrv,  /* i */
-        int *ll,        /* i */
-        int *mv_loc,    /* i/o */
-        int dx,     /* i */
-        int dy,     /* i */
-        int mvwidth,    /* i */
-        int width,      /* i */
-        int height      /* i */
-    )
-    {
-        /*----------------------------------------------------------------------------
-        ; Define all local variables
-        ----------------------------------------------------------------------------*/
-        int kk, mmvy, mmvx, nmvx, nmvy;
-        uint8   *pp_prev1, *pp_prev2, *pp_prev3, *pp_prev4;
-        uint8   msk_deblock = 0;        /*  11/3/00 */
-
-        /*----------------------------------------------------------------------------
-        ; Function body here
-        ----------------------------------------------------------------------------*/
-        /* Interframe Processing - 1 MV per MB */
-
-        /* check whether the MV points outside the frame */
-        if (xpred >= 0 && xpred <= ((width << 1) - (2*MB_SIZE)) && ypred >= 0 &&
-                ypred <= ((height << 1) - (2*MB_SIZE)))
-        {   /*****************************/
-            /* (x,y) is inside the frame */
-            /*****************************/
-
-            /*  10/24/2000 post_processing semaphore */
-            /* generation */
-
-            /*  10/23/2000 no boundary checking*/
-            *mv_loc = 0;
-
-            /* Calculate block x coordinate. Divide by 16 is for  */
-            /* converting half-pixel resolution to block          */
-            mmvx = xpred >> 4;
-
-            /* Calculate block y coordinate. Divide by 16 is for */
-            /* converting half-pixel resolution to block         */
-            mmvy = ypred >> 4;
-
-            /* Find post processing semaphore location for block */
-            /* used for prediction, i.e.,                */
-            /* pp_prev1 = &pstprcTypPrv[mmvy*mvwidth][mmvx]      */
-            pp_prev1 = pstprcTypPrv + mmvx + mmvy * mvwidth;
-
-            /* Check if MV is a multiple of 16 */
-            if ((dx&0xF) != 0)
-            {   /* dx is not a multiple of 16 */
-
-                /* pp_prev2 is the block to the right of */
-                /* pp_prev1 block            */
-                pp_prev2 = pp_prev1 + 1;
-
-                if ((dy&0xF) != 0)
-                {   /* dy is not a multiple of 16 */
-
-                    /* pp_prev3 is the block below */
-                    /* pp_prev1 block          */
-                    pp_prev3 = pp_prev1 + mvwidth;
-                }
-                else
-                {   /* dy is a multiple of 16 */
-
-                    pp_prev3 = pp_prev1;
-                }
-
-                /* pp_prev4 is the block to the right of */
-                /* pp_prev3 block.           */
-                pp_prev4 = pp_prev3 + 1;
-            }
-            else
-            {   /* dx is a multiple of 16 */
-
-                pp_prev2 = pp_prev1;
-
-                if ((dy&0xF) != 0)
-                {   /* dy is not a multiple of 16 */
-
-                    /* pp_prev3 is the block below */
-                    /* pp_prev1 block.         */
-                    pp_prev3 = pp_prev1 + mvwidth;
-                }
-                else
-                {   /* dy is a multiple of 16 */
-
-                    pp_prev3 = pp_prev1;
-                    msk_deblock = 0x3;
-                }
-
-                pp_prev4 = pp_prev3;
-            }
-
-            /* Perform post processing semaphore propagation for each */
-            /* of the 4 blocks in a MB.               */
-            for (kk = 0; kk < 4; kk++)
-            {
-                /* Deringing semaphore propagation */
-                if ((*(pp_dec_y) & 4) == 0)
-                {
-                    *(pp_dec_y) |= ((*(pp_prev1) | *(pp_prev2) |
-                                     *(pp_prev3) | *(pp_prev4)) & 0x4);
-                }
-                /* Deblocking semaphore propagation */
-                /*  11/3/00, change the propagation for deblocking */
-                if (msk_deblock == 0)
-                {
-                    *(pp_dec_y) = 0;
-                }
-
-                pp_dec_y += ll[kk];
-                pp_prev1 += ll[kk];
-                pp_prev2 += ll[kk];
-                pp_prev3 += ll[kk];
-                pp_prev4 += ll[kk];
-            }
-
-        }
-        else
-        {   /******************************/
-            /* (x,y) is outside the frame */
-            /******************************/
-
-            /*  10/24/2000 post_processing semaphore */
-            /* generation */
-
-            /*  10/23/2000 boundary checking*/
-            *mv_loc = 1;
-
-            /* Perform post processing semaphore propagation for each */
-            /* of the 4 blocks in a MB.               */
-            for (kk = 0; kk < 4; kk++)
-            {
-                /* Calculate block x coordinate and round (?).  */
-                /* Divide by 16 is for converting half-pixel    */
-                /* resolution to block.             */
-                mmvx = (xpred + ((kk & 1) << 3)) >> 4;
-                nmvx = mmvx;
-
-                /* Calculate block y coordinate and round (?).  */
-                /* Divide by 16 is for converting half-pixel    */
-                /* resolution to block.             */
-                mmvy = (ypred + ((kk & 2) << 2)) >> 4;
-                nmvy = mmvy;
-
-                /* Perform boundary checking */
-                if (nmvx < 0)
-                {
-                    nmvx = 0;
-                }
-                else if (nmvx > mvwidth - 1)
-                {
-                    nmvx = mvwidth - 1;
-                }
-
-                if (nmvy < 0)
-                {
-                    nmvy = 0;
-                }
-                else if (nmvy > (height >> 3) - 1)
-                {
-                    nmvy = (height >> 3) - 1;
-                }
-
-                /* Find post processing semaphore location for block */
-                /* used for prediction, i.e.,                */
-                /* pp_prev1 = &pstprcTypPrv[nmvy*mvwidth][nmvx]      */
-                pp_prev1 = pstprcTypPrv + nmvx + nmvy * mvwidth;
-
-                /* Check if x component of MV is a multiple of 16    */
-                /* and check if block x coordinate is out of bounds  */
-                if (((dx&0xF) != 0) && (mmvx + 1 < mvwidth - 1))
-                {   /* dx is not a multiple of 16 and the block */
-                    /* x coordinate is within the bounds        */
-
-                    /* pp_prev2 is the block to the right of */
-                    /* pp_prev1 block            */
-                    pp_prev2 = pp_prev1 + 1;
-
-                    /* Check if y component of MV is a multiple */
-                    /* of 16 and check if block y coordinate is */
-                    /* out of bounds                */
-                    if (((dy&0xF) != 0) && (mmvy + 1 < (height >> 3) - 1))
-                    {   /* dy is not a multiple of 16 and */
-                        /* the block y coordinate is      */
-                        /* within the bounds              */
-
-                        /* pp_prev3 is the block below */
-                        /* pp_prev1 block          */
-                        pp_prev3 = pp_prev1 + mvwidth;
-
-                        /* all prediction are from different blocks */
-                        msk_deblock = 0x3;
-                    }
-                    else
-                    {   /* dy is a multiple of 16 or the block */
-                        /* y coordinate is out of bounds       */
-
-                        pp_prev3 = pp_prev1;
-                    }
-
-                    /* pp_prev4 is the block to the right of */
-                    /* pp_prev3 block.           */
-                    pp_prev4 = pp_prev3 + 1;
-                }
-                else
-                {   /* dx is a multiple of 16 or the block x */
-                    /* coordinate is out of bounds           */
-
-                    pp_prev2 = pp_prev1;
-
-                    /* Check if y component of MV is a multiple */
-                    /* of 16 and check if block y coordinate is */
-                    /* out of bounds                */
-                    if (((dy&0xF) != 0) && (mmvy + 1 < (height >> 3) - 1))
-                    {   /* dy is not a multiple of 16 and */
-                        /* the block y coordinate is      */
-                        /* within the bounds              */
-
-                        /* pp_prev3 is the block below */
-                        /* pp_prev1 block.         */
-                        pp_prev3 = pp_prev1 + mvwidth;
-                    }
-                    else
-                    {   /* dy is a multiple of 16 or the block */
-                        /* y coordinate is out of bounds       */
-
-                        pp_prev3 = pp_prev1;
-                    }
-
-                    pp_prev4 = pp_prev3;
-                }
-
-                /* Deringing semaphore propagation */
-                if ((*(pp_dec_y)&4) == 0)
-                {
-                    *(pp_dec_y) |= ((*(pp_prev1) |
-                                     *(pp_prev2) | *(pp_prev3) |
-                                     *(pp_prev4)) & 0x4);
-                }
-                /* Deblocking semaphore propagation */
-                /*  11/3/00, change the propaga= */
-                /* tion for deblocking */
-                if (msk_deblock == 0)
-                {
-                    *(pp_dec_y) = 0;
-                }
-
-                pp_dec_y += ll[kk];
-            }
-        }
-
-        /*----------------------------------------------------------------------------
-        ; Return nothing or data or data pointer
-        ----------------------------------------------------------------------------*/
-        return (msk_deblock);
-    }
-#ifdef __cplusplus
-}
-#endif
-#endif
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/pvdec_api.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/pvdec_api.cpp
deleted file mode 100644
index 9c0fcfa..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/src/pvdec_api.cpp
+++ /dev/null
@@ -1,1781 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#define LOG_TAG "pvdec_api"
-#include <log/log.h>
-#include "mp4dec_lib.h"
-#include "vlc_decode.h"
-#include "bitstream.h"
-
-#ifndef INT32_MAX
-#define INT32_MAX 0x7fffffff
-#endif
-
-#ifndef SIZE_MAX
-#define SIZE_MAX ((size_t) -1)
-#endif
-
-#define OSCL_DISABLE_WARNING_CONDITIONAL_IS_CONSTANT
-
-#ifdef DEC_INTERNAL_MEMORY_OPT
-#define QCIF_MBS 99
-#define QCIF_BS (4*QCIF_MBS)
-#define QCIF_MB_ROWS 11
-extern uint8                IMEM_sliceNo[QCIF_MBS];
-extern uint8                IMEM_acPredFlag[QCIF_MBS];
-extern uint8                IMEM_headerInfo_Mode[QCIF_MBS];
-extern uint8                IMEM_headerInfo_CBP[QCIF_MBS];
-extern int                  IMEM_headerInfo_QPMB[QCIF_MBS];
-extern MacroBlock           IMEM_mblock;
-extern MOT                  IMEM_motX[QCIF_BS];
-extern MOT                  IMEM_motY[QCIF_BS];
-extern BitstreamDecVideo    IMEM_BitstreamDecVideo[4];
-extern typeDCStore          IMEM_predDC[QCIF_MBS];
-extern typeDCACStore        IMEM_predDCAC_col[QCIF_MB_ROWS+1];
-
-extern VideoDecData         IMEM_VideoDecData[1];
-extern Vop                  IMEM_currVop[1];
-extern Vop                  IMEM_prevVop[1];
-extern PIXEL                IMEM_currVop_yChan[QCIF_MBS*128*3];
-extern PIXEL                IMEM_prevVop_yChan[QCIF_MBS*128*3];
-extern uint8                IMEM_pstprcTypCur[6*QCIF_MBS];
-extern uint8                IMEM_pstprcTypPrv[6*QCIF_MBS];
-
-
-extern Vop                  IMEM_vopHEADER[2];
-extern Vol                  IMEM_VOL[2];
-extern Vop                  IMEM_vopHeader[2][1];
-extern Vol                  IMEM_vol[2][1];
-
-#endif
-
-/* ======================================================================== */
-/*  Function : PVInitVideoDecoder()                                         */
-/*  Date     : 04/11/2000, 08/29/2000                                       */
-/*  Purpose  : Initialization of the MPEG-4 video decoder library.          */
-/*             The return type is Bool instead of PV_STATUS because         */
-/*             we don't want to expose PV_STATUS to (outside) programmers   */
-/*             that use our decoder library SDK.                            */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/* ======================================================================== */
-OSCL_EXPORT_REF Bool PVInitVideoDecoder(VideoDecControls *decCtrl, uint8 *volbuf[],
-                                        int32 *volbuf_size, int nLayers, int width, int height, MP4DecodingMode mode)
-{
-    VideoDecData *video = (VideoDecData *) decCtrl->videoDecoderData;
-    Bool status = PV_TRUE;
-    int idx;
-    BitstreamDecVideo *stream;
-
-
-    oscl_memset(decCtrl, 0, sizeof(VideoDecControls)); /* fix a size bug.   03/28/2001 */
-    decCtrl->nLayers = nLayers;
-    for (idx = 0; idx < nLayers; idx++)
-    {
-        decCtrl->volbuf[idx] = volbuf[idx];
-        decCtrl->volbuf_size[idx] = volbuf_size[idx];
-    }
-
-    /* memory allocation & initialization */
-#ifdef DEC_INTERNAL_MEMORY_OPT
-    video = IMEM_VideoDecData;
-#else
-    video = (VideoDecData *) oscl_malloc(sizeof(VideoDecData));
-#endif
-    if (video != NULL)
-    {
-        oscl_memset(video, 0, sizeof(VideoDecData));
-        video->memoryUsage = sizeof(VideoDecData);
-        video->numberOfLayers = nLayers;
-#ifdef DEC_INTERNAL_MEMORY_OPT
-        video->vol = (Vol **) IMEM_VOL;
-#else
-        if ((size_t)nLayers > SIZE_MAX / sizeof(Vol *)) {
-            status = PV_FALSE;
-            oscl_free(video);
-            goto fail;
-        }
-
-        video->vol = (Vol **) oscl_malloc(nLayers * sizeof(Vol *));
-#endif
-        if (video->vol == NULL) status = PV_FALSE;
-        video->memoryUsage += nLayers * sizeof(Vol *);
-
-        /* be sure not to leak any previous state */
-        PVCleanUpVideoDecoder(decCtrl);
-        /* we need to setup this pointer for the application to */
-        /*    pass it around.                                   */
-        decCtrl->videoDecoderData = (void *) video;
-        video->videoDecControls = decCtrl;  /* yes. we have a cyclic */
-        /* references here :)    */
-
-        /* Allocating Vop space, this has to change when we add */
-        /*    spatial scalability to the decoder                */
-#ifdef DEC_INTERNAL_MEMORY_OPT
-        video->currVop = IMEM_currVop;
-        if (video->currVop == NULL) status = PV_FALSE;
-        else oscl_memset(video->currVop, 0, sizeof(Vop));
-        video->prevVop = IMEM_prevVop;
-        if (video->prevVop == NULL) status = PV_FALSE;
-        else oscl_memset(video->prevVop, 0, sizeof(Vop));
-        video->memoryUsage += (sizeof(Vop) * 2);
-        video->vopHeader = (Vop **) IMEM_vopHEADER;
-#else
-
-        video->currVop = (Vop *) oscl_malloc(sizeof(Vop));
-        if (video->currVop == NULL) status = PV_FALSE;
-        else oscl_memset(video->currVop, 0, sizeof(Vop));
-        video->prevVop = (Vop *) oscl_malloc(sizeof(Vop));
-        if (video->prevVop == NULL) status = PV_FALSE;
-        else oscl_memset(video->prevVop, 0, sizeof(Vop));
-        video->memoryUsage += (sizeof(Vop) * 2);
-
-        if ((size_t)nLayers > SIZE_MAX / sizeof(Vop *)) {
-            status = PV_FALSE;
-            goto fail;
-        }
-
-        video->vopHeader = (Vop **) oscl_malloc(sizeof(Vop *) * nLayers);
-#endif
-        if (video->vopHeader == NULL) status = PV_FALSE;
-        else oscl_memset(video->vopHeader, 0, sizeof(Vop *)*nLayers);
-        video->memoryUsage += (sizeof(Vop *) * nLayers);
-
-        video->initialized = PV_FALSE;
-        /* Decode the header to get all information to allocate data */
-        if (status == PV_TRUE)
-        {
-            /* initialize decoded frame counter.   04/24/2001 */
-            video->frame_idx = -1;
-
-
-            for (idx = 0; idx < nLayers; idx++)
-            {
-
-#ifdef DEC_INTERNAL_MEMORY_OPT
-                video->vopHeader[idx] = IMEM_vopHeader[idx];
-#else
-                video->vopHeader[idx] = (Vop *) oscl_malloc(sizeof(Vop));
-#endif
-                if (video->vopHeader[idx] == NULL)
-                {
-                    status = PV_FALSE;
-                    break;
-                }
-                else
-                {
-                    oscl_memset(video->vopHeader[idx], 0, sizeof(Vop));
-                    video->vopHeader[idx]->timeStamp = 0;
-                    video->memoryUsage += (sizeof(Vop));
-                }
-#ifdef DEC_INTERNAL_MEMORY_OPT
-                video->vol[idx] = IMEM_vol[idx];
-                video->memoryUsage += sizeof(Vol);
-                if (video->vol[idx] == NULL) status = PV_FALSE;
-                else oscl_memset(video->vol[idx], 0, sizeof(Vol));
-                stream = IMEM_BitstreamDecVideo;
-#else
-                video->vol[idx] = (Vol *) oscl_malloc(sizeof(Vol));
-                if (video->vol[idx] == NULL)
-                {
-                    status = PV_FALSE;
-                    break;
-                }
-                else
-                {
-                    video->memoryUsage += sizeof(Vol);
-                    oscl_memset(video->vol[idx], 0, sizeof(Vol));
-                }
-
-                stream = (BitstreamDecVideo *) oscl_malloc(sizeof(BitstreamDecVideo));
-#endif
-                video->memoryUsage += sizeof(BitstreamDecVideo);
-                if (stream == NULL)
-                {
-                    status = PV_FALSE;
-                    break;
-                }
-                else
-                {
-                    int32 buffer_size;
-                    oscl_memset(stream, 0, sizeof(BitstreamDecVideo));
-                    if ((buffer_size = BitstreamOpen(stream, idx)) < 0)
-                    {
-                        mp4dec_log("InitVideoDecoder(): Can't allocate bitstream buffer.\n");
-                        status = PV_FALSE;
-                        break;
-                    }
-                    video->memoryUsage += buffer_size;
-                    video->vol[idx]->bitstream = stream;
-                    video->vol[idx]->volID = idx;
-                    video->vol[idx]->timeInc_offset = 0;  /*  11/12/01 */
-                    video->vlcDecCoeffIntra = &VlcDecTCOEFShortHeader;
-                    video->vlcDecCoeffInter = &VlcDecTCOEFShortHeader;
-                    if (mode == MPEG4_MODE)
-                    {
-                        /* Set up VOL header bitstream for frame-based decoding.  08/30/2000 */
-                        BitstreamReset(stream, decCtrl->volbuf[idx], decCtrl->volbuf_size[idx]);
-
-                        switch (DecodeVOLHeader(video, idx))
-                        {
-                            case PV_SUCCESS :
-                                if (status == PV_TRUE)
-                                    status = PV_TRUE;   /*  we want to make sure that if first layer is bad, second layer is good return PV_FAIL */
-                                else
-                                    status = PV_FALSE;
-                                break;
-#ifdef PV_TOLERATE_VOL_ERRORS
-                            case PV_BAD_VOLHEADER:
-                                status = PV_TRUE;
-                                break;
-#endif
-                            default :
-                                status = PV_FALSE;
-                                break;
-                        }
-
-                    }
-                    else
-                    {
-                        video->shortVideoHeader = PV_TRUE;
-                    }
-
-                    if (video->shortVideoHeader == PV_TRUE)
-                    {
-                        mode = H263_MODE;
-                        /* Set max width and height.  In H.263 mode, we use    */
-                        /*  volbuf_size[0] to pass in width and volbuf_size[1] */
-                        /*  to pass in height.                    04/23/2001 */
-                        video->prevVop->temporalRef = 0; /*  11/12/01 */
-                        /* Compute some convenience variables:   04/23/2001 */
-                        video->vol[idx]->quantType = 0;
-                        video->vol[idx]->quantPrecision = 5;
-                        video->vol[idx]->errorResDisable = 1;
-                        video->vol[idx]->dataPartitioning = 0;
-                        video->vol[idx]->useReverseVLC = 0;
-                        video->intra_acdcPredDisable = 1;
-                        video->vol[idx]->scalability = 0;
-
-                        video->displayWidth = width;
-                        video->displayHeight = height;
-                        video->width = (width + 15) & -16;
-                        video->height = (height + 15) & -16;
-                        video->size = (int32)video->width * video->height;
-
-#ifdef PV_ANNEX_IJKT_SUPPORT
-                        video->modified_quant = 0;
-                        video->advanced_INTRA = 0;
-                        video->deblocking = 0;
-                        video->slice_structure = 0;
-#endif
-                    }
-
-                }
-            }
-
-        }
-        if (status != PV_FALSE)
-        {
-            status = PVAllocVideoData(decCtrl, width, height, nLayers);
-            video->initialized = PV_TRUE;
-        }
-    }
-    else
-    {
-        status = PV_FALSE;
-    }
-
-fail:
-    if (status == PV_FALSE) PVCleanUpVideoDecoder(decCtrl);
-
-    return status;
-}
-
-Bool PVAllocVideoData(VideoDecControls *decCtrl, int width, int height, int nLayers)
-{
-    VideoDecData *video = (VideoDecData *) decCtrl->videoDecoderData;
-    Bool status = PV_TRUE;
-    int nTotalMB;
-    int nMBPerRow;
-    int32 size;
-
-    if (video->shortVideoHeader == PV_TRUE)
-    {
-        video->displayWidth = width;
-        video->displayHeight = height;
-        video->width = (width + 15) & -16;
-        video->height = (height + 15) & -16;
-
-        video->nMBPerRow =
-            video->nMBinGOB  = video->width / MB_SIZE;
-        video->nMBPerCol =
-            video->nGOBinVop = video->height / MB_SIZE;
-        video->nTotalMB =
-            video->nMBPerRow * video->nMBPerCol;
-    }
-
-    if (((uint64_t)video->width * video->height) > (uint64_t)INT32_MAX / sizeof(PIXEL)) {
-        return PV_FALSE;
-    }
-
-    size = (int32)sizeof(PIXEL) * video->width * video->height;
-#ifdef PV_MEMORY_POOL
-    decCtrl->size = size;
-#else
-#ifdef DEC_INTERNAL_MEMORY_OPT
-    video->currVop->yChan = IMEM_currVop_yChan; /* Allocate memory for all VOP OKA 3/2/1*/
-    if (video->currVop->yChan == NULL) status = PV_FALSE;
-    else {
-        video->currVop->uChan = video->currVop->yChan + size;
-        video->currVop->vChan = video->currVop->uChan + (size >> 2);
-    }
-
-    video->prevVop->yChan = IMEM_prevVop_yChan; /* Allocate memory for all VOP OKA 3/2/1*/
-    if (video->prevVop->yChan == NULL) status = PV_FALSE;
-    else {
-        video->prevVop->uChan = video->prevVop->yChan + size;
-        video->prevVop->vChan = video->prevVop->uChan + (size >> 2);
-    }
-#else
-    if (size > INT32_MAX / 3) {
-        return PV_FALSE;
-    }
-    video->currVop->yChan = (PIXEL *) oscl_malloc(size * 3 / 2); /* Allocate memory for all VOP OKA 3/2/1*/
-    if (video->currVop->yChan == NULL) status = PV_FALSE;
-    else {
-        video->currVop->uChan = video->currVop->yChan + size;
-        video->currVop->vChan = video->currVop->uChan + (size >> 2);
-    }
-    video->prevVop->yChan = (PIXEL *) oscl_malloc(size * 3 / 2); /* Allocate memory for all VOP OKA 3/2/1*/
-    if (video->prevVop->yChan == NULL) status = PV_FALSE;
-    else {
-        video->prevVop->uChan = video->prevVop->yChan + size;
-        video->prevVop->vChan = video->prevVop->uChan + (size >> 2);
-    }
-#endif
-    video->memoryUsage += (size * 3);
-#endif   // MEMORY_POOL
-    /* Note that baseVop, enhcVop is only used to hold enhancement */
-    /*    layer header information.                  05/04/2000  */
-    if (nLayers > 1)
-    {
-        video->prevEnhcVop = (Vop *) oscl_malloc(sizeof(Vop));
-        video->memoryUsage += (sizeof(Vop));
-        if (video->prevEnhcVop == NULL)
-        {
-            status = PV_FALSE;
-        }
-        else
-        {
-            oscl_memset(video->prevEnhcVop, 0, sizeof(Vop));
-#ifndef PV_MEMORY_POOL
-            if (size > INT32_MAX / 3) {
-                return PV_FALSE;
-            }
-
-            video->prevEnhcVop->yChan = (PIXEL *) oscl_malloc(size * 3 / 2); /* Allocate memory for all VOP OKA 3/2/1*/
-            if (video->prevEnhcVop->yChan == NULL) status = PV_FALSE;
-            else {
-                video->prevEnhcVop->uChan = video->prevEnhcVop->yChan + size;
-                video->prevEnhcVop->vChan = video->prevEnhcVop->uChan + (size >> 2);
-            }
-            video->memoryUsage += (3 * size / 2);
-#endif
-        }
-    }
-
-    /* Allocating space for slices, AC prediction flag, and */
-    /*    AC/DC prediction storage */
-    nTotalMB = video->nTotalMB;
-    nMBPerRow = video->nMBPerRow;
-
-#ifdef DEC_INTERNAL_MEMORY_OPT
-    video->sliceNo = (uint8 *)(IMEM_sliceNo);
-    if (video->sliceNo == NULL) status = PV_FALSE;
-    video->memoryUsage += nTotalMB;
-    video->acPredFlag = (uint8 *)(IMEM_acPredFlag);
-    if (video->acPredFlag == NULL) status = PV_FALSE;
-    video->memoryUsage += (nTotalMB);
-    video->predDC = (typeDCStore *)(IMEM_predDC);
-    if (video->predDC == NULL) status = PV_FALSE;
-    video->memoryUsage += (nTotalMB * sizeof(typeDCStore));
-    video->predDCAC_col = (typeDCACStore *)(IMEM_predDCAC_col);
-    if (video->predDCAC_col == NULL) status = PV_FALSE;
-    video->memoryUsage += ((nMBPerRow + 1) * sizeof(typeDCACStore));
-    video->predDCAC_row = video->predDCAC_col + 1;
-    video->headerInfo.Mode = (uint8 *)(IMEM_headerInfo_Mode);
-    if (video->headerInfo.Mode == NULL) status = PV_FALSE;
-    video->memoryUsage += nTotalMB;
-    video->headerInfo.CBP = (uint8 *)(IMEM_headerInfo_CBP);
-    if (video->headerInfo.CBP == NULL) status = PV_FALSE;
-    video->memoryUsage += nTotalMB;
-    video->QPMB = (int *)(IMEM_headerInfo_QPMB);
-    if (video->QPMB == NULL) status = PV_FALSE;
-    video->memoryUsage += (nTotalMB * sizeof(int));
-    video->mblock = &IMEM_mblock;
-    if (video->mblock == NULL) status = PV_FALSE;
-    oscl_memset(video->mblock->block, 0, sizeof(int16)*6*NCOEFF_BLOCK); //  Aug 23,2005
-
-    video->memoryUsage += sizeof(MacroBlock);
-    video->motX = (MOT *)(IMEM_motX);
-    if (video->motX == NULL) status = PV_FALSE;
-    video->motY = (MOT *)(IMEM_motY);
-    if (video->motY == NULL) status = PV_FALSE;
-    video->memoryUsage += (sizeof(MOT) * 8 * nTotalMB);
-#else
-    video->sliceNo = (uint8 *) oscl_malloc(nTotalMB);
-    if (video->sliceNo == NULL) status = PV_FALSE;
-    else oscl_memset(video->sliceNo, 0, nTotalMB);
-    video->memoryUsage += nTotalMB;
-
-    video->acPredFlag = (uint8 *) oscl_malloc(nTotalMB * sizeof(uint8));
-    if (video->acPredFlag == NULL) status = PV_FALSE;
-    else oscl_memset(video->acPredFlag, 0, nTotalMB * sizeof(uint8));
-    video->memoryUsage += (nTotalMB);
-
-    if ((size_t)nTotalMB > SIZE_MAX / sizeof(typeDCStore)) {
-        return PV_FALSE;
-    }
-    video->predDC = (typeDCStore *) oscl_malloc(nTotalMB * sizeof(typeDCStore));
-    if (video->predDC == NULL) status = PV_FALSE;
-    else oscl_memset(video->predDC, 0, nTotalMB * sizeof(typeDCStore));
-    video->memoryUsage += (nTotalMB * sizeof(typeDCStore));
-
-    if (nMBPerRow > INT32_MAX - 1
-            || (size_t)(nMBPerRow + 1) > SIZE_MAX / sizeof(typeDCACStore)) {
-        return PV_FALSE;
-    }
-    video->predDCAC_col = (typeDCACStore *) oscl_malloc((nMBPerRow + 1) * sizeof(typeDCACStore));
-    if (video->predDCAC_col == NULL) status = PV_FALSE;
-    else oscl_memset(video->predDCAC_col, 0, (nMBPerRow + 1) * sizeof(typeDCACStore));
-    video->memoryUsage += ((nMBPerRow + 1) * sizeof(typeDCACStore));
-
-    /* element zero will be used for storing vertical (col) AC coefficients */
-    /*  the rest will be used for storing horizontal (row) AC coefficients  */
-    video->predDCAC_row = video->predDCAC_col + 1;        /*  ACDC */
-
-    /* Allocating HeaderInfo structure & Quantizer array */
-    video->headerInfo.Mode = (uint8 *) oscl_malloc(nTotalMB);
-    if (video->headerInfo.Mode == NULL) status = PV_FALSE;
-    else oscl_memset(video->headerInfo.Mode, 0, nTotalMB);
-    video->memoryUsage += nTotalMB;
-    video->headerInfo.CBP = (uint8 *) oscl_malloc(nTotalMB);
-    if (video->headerInfo.CBP == NULL) status = PV_FALSE;
-    else oscl_memset (video->headerInfo.CBP, 0, nTotalMB);
-    video->memoryUsage += nTotalMB;
-
-    if ((size_t)nTotalMB > SIZE_MAX / sizeof(int16)) {
-        return PV_FALSE;
-    }
-    video->QPMB = (int16 *) oscl_malloc(nTotalMB * sizeof(int16));
-    if (video->QPMB == NULL) status = PV_FALSE;
-    else memset(video->QPMB, 0x0, nTotalMB * sizeof(int16));
-    video->memoryUsage += (nTotalMB * sizeof(int));
-
-    /* Allocating macroblock space */
-    video->mblock = (MacroBlock *) oscl_malloc(sizeof(MacroBlock));
-    if (video->mblock == NULL)
-    {
-        status = PV_FALSE;
-    }
-    else
-    {
-        oscl_memset(video->mblock->block, 0, sizeof(int16)*6*NCOEFF_BLOCK); //  Aug 23,2005
-
-        video->memoryUsage += sizeof(MacroBlock);
-    }
-    /* Allocating motion vector space */
-    if ((size_t)nTotalMB > SIZE_MAX / (sizeof(MOT) * 4)) {
-        return PV_FALSE;
-    }
-    video->motX = (MOT *) oscl_malloc(sizeof(MOT) * 4 * nTotalMB);
-    if (video->motX == NULL) status = PV_FALSE;
-    else memset(video->motX, 0, sizeof(MOT) * 4 * nTotalMB);
-    video->motY = (MOT *) oscl_malloc(sizeof(MOT) * 4 * nTotalMB);
-    if (video->motY == NULL) status = PV_FALSE;
-    else memset(video->motY, 0, sizeof(MOT) * 4 * nTotalMB);
-    video->memoryUsage += (sizeof(MOT) * 8 * nTotalMB);
-#endif
-
-#ifdef PV_POSTPROC_ON
-    /* Allocating space for post-processing Mode */
-#ifdef DEC_INTERNAL_MEMORY_OPT
-    video->pstprcTypCur = IMEM_pstprcTypCur;
-    video->memoryUsage += (nTotalMB * 6);
-    if (video->pstprcTypCur == NULL)
-    {
-        status = PV_FALSE;
-    }
-    else
-    {
-        oscl_memset(video->pstprcTypCur, 0, 4*nTotalMB + 2*nTotalMB);
-    }
-
-    video->pstprcTypPrv = IMEM_pstprcTypPrv;
-    video->memoryUsage += (nTotalMB * 6);
-    if (video->pstprcTypPrv == NULL)
-    {
-        status = PV_FALSE;
-    }
-    else
-    {
-        oscl_memset(video->pstprcTypPrv, 0, nTotalMB*6);
-    }
-
-#else
-    if (nTotalMB > INT32_MAX / 6) {
-        return PV_FALSE;
-    }
-    video->pstprcTypCur = (uint8 *) oscl_malloc(nTotalMB * 6);
-    video->memoryUsage += (nTotalMB * 6);
-    if (video->pstprcTypCur == NULL)
-    {
-        status = PV_FALSE;
-    }
-    else
-    {
-        oscl_memset(video->pstprcTypCur, 0, 4*nTotalMB + 2*nTotalMB);
-    }
-
-    video->pstprcTypPrv = (uint8 *) oscl_malloc(nTotalMB * 6);
-    video->memoryUsage += (nTotalMB * 6);
-    if (video->pstprcTypPrv == NULL)
-    {
-        status = PV_FALSE;
-    }
-    else
-    {
-        oscl_memset(video->pstprcTypPrv, 0, nTotalMB*6);
-    }
-
-#endif
-
-#endif
-
-    /* initialize the decoder library */
-    video->prevVop->predictionType = I_VOP;
-    video->prevVop->timeStamp = 0;
-#ifndef PV_MEMORY_POOL
-    oscl_memset(video->prevVop->yChan, 16, sizeof(uint8)*size);     /*  10/31/01 */
-    oscl_memset(video->prevVop->uChan, 128, sizeof(uint8)*size / 2);
-
-    oscl_memset(video->currVop->yChan, 0, sizeof(uint8)*size*3 / 2);
-    if (nLayers > 1)
-    {
-        oscl_memset(video->prevEnhcVop->yChan, 0, sizeof(uint8)*size*3 / 2);
-        video->prevEnhcVop->timeStamp = 0;
-    }
-    video->concealFrame = video->prevVop->yChan;               /*  07/07/2001 */
-    decCtrl->outputFrame = video->prevVop->yChan;              /*  06/19/2002 */
-#endif
-
-    /* always start from base layer */
-    video->currLayer = 0;
-    return status;
-}
-
-/* ======================================================================== */
-/*  Function : PVResetVideoDecoder()                                        */
-/*  Date     : 01/14/2002                                                   */
-/*  Purpose  : Reset video timestamps                                       */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/* ======================================================================== */
-Bool PVResetVideoDecoder(VideoDecControls *decCtrl)
-{
-    VideoDecData *video = (VideoDecData *) decCtrl->videoDecoderData;
-    int idx;
-
-    for (idx = 0; idx < decCtrl->nLayers; idx++)
-    {
-        video->vopHeader[idx]->timeStamp = 0;
-    }
-    video->prevVop->timeStamp = 0;
-    if (decCtrl->nLayers > 1)
-        video->prevEnhcVop->timeStamp = 0;
-
-    oscl_memset(video->mblock->block, 0, sizeof(int16)*6*NCOEFF_BLOCK); //  Aug 23,2005
-
-    return PV_TRUE;
-}
-
-
-/* ======================================================================== */
-/*  Function : PVCleanUpVideoDecoder()                                      */
-/*  Date     : 04/11/2000, 08/29/2000                                       */
-/*  Purpose  : Cleanup of the MPEG-4 video decoder library.                 */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/* ======================================================================== */
-OSCL_EXPORT_REF Bool PVCleanUpVideoDecoder(VideoDecControls *decCtrl)
-{
-    int idx;
-    VideoDecData *video = (VideoDecData *) decCtrl->videoDecoderData;
-#ifdef DEC_INTERNAL_MEMORY_OPT
-    if (video)
-    {
-#ifdef PV_POSTPROC_ON
-        video->pstprcTypCur = NULL;
-        video->pstprcTypPrv = NULL;
-#endif
-
-        video->acPredFlag       = NULL;
-        video->sliceNo          = NULL;
-        video->motX             = NULL;
-        video->motY             = NULL;
-        video->mblock           = NULL;
-        video->QPMB             = NULL;
-        video->predDC           = NULL;
-        video->predDCAC_row     = NULL;
-        video->predDCAC_col     = NULL;
-        video->headerInfo.Mode  = NULL;
-        video->headerInfo.CBP   = NULL;
-        if (video->numberOfLayers > 1)
-        {
-            if (video->prevEnhcVop)
-            {
-                video->prevEnhcVop->uChan = NULL;
-                video->prevEnhcVop->vChan = NULL;
-                if (video->prevEnhcVop->yChan) oscl_free(video->prevEnhcVop->yChan);
-                oscl_free(video->prevEnhcVop);
-            }
-        }
-        if (video->currVop)
-        {
-            video->currVop->uChan = NULL;
-            video->currVop->vChan = NULL;
-            if (video->currVop->yChan)
-                video->currVop->yChan = NULL;
-            video->currVop = NULL;
-        }
-        if (video->prevVop)
-        {
-            video->prevVop->uChan = NULL;
-            video->prevVop->vChan = NULL;
-            if (video->prevVop->yChan)
-                video->prevVop->yChan = NULL;
-            video->prevVop = NULL;
-        }
-
-        if (video->vol)
-        {
-            for (idx = 0; idx < video->numberOfLayers; idx++)
-            {
-                if (video->vol[idx])
-                {
-                    BitstreamClose(video->vol[idx]->bitstream);
-                    video->vol[idx]->bitstream = NULL;
-                    video->vol[idx] = NULL;
-                }
-                video->vopHeader[idx] = NULL;
-
-            }
-            video->vol = NULL;
-            video->vopHeader = NULL;
-        }
-
-        video = NULL;
-        decCtrl->videoDecoderData = NULL;
-    }
-
-#else
-
-    if (video)
-    {
-#ifdef PV_POSTPROC_ON
-        if (video->pstprcTypCur) oscl_free(video->pstprcTypCur);
-        if (video->pstprcTypPrv) oscl_free(video->pstprcTypPrv);
-#endif
-        if (video->predDC) oscl_free(video->predDC);
-        video->predDCAC_row = NULL;
-        if (video->predDCAC_col) oscl_free(video->predDCAC_col);
-        if (video->motX) oscl_free(video->motX);
-        if (video->motY) oscl_free(video->motY);
-        if (video->mblock) oscl_free(video->mblock);
-        if (video->QPMB) oscl_free(video->QPMB);
-        if (video->headerInfo.Mode) oscl_free(video->headerInfo.Mode);
-        if (video->headerInfo.CBP) oscl_free(video->headerInfo.CBP);
-        if (video->sliceNo) oscl_free(video->sliceNo);
-        if (video->acPredFlag) oscl_free(video->acPredFlag);
-
-        if (video->numberOfLayers > 1)
-        {
-            if (video->prevEnhcVop)
-            {
-                video->prevEnhcVop->uChan = NULL;
-                video->prevEnhcVop->vChan = NULL;
-                if (video->prevEnhcVop->yChan) oscl_free(video->prevEnhcVop->yChan);
-                oscl_free(video->prevEnhcVop);
-            }
-        }
-        if (video->currVop)
-        {
-
-#ifndef PV_MEMORY_POOL
-            video->currVop->uChan = NULL;
-            video->currVop->vChan = NULL;
-            if (video->currVop->yChan)
-                oscl_free(video->currVop->yChan);
-#endif
-            oscl_free(video->currVop);
-        }
-        if (video->prevVop)
-        {
-#ifndef PV_MEMORY_POOL
-            video->prevVop->uChan = NULL;
-            video->prevVop->vChan = NULL;
-            if (video->prevVop->yChan)
-                oscl_free(video->prevVop->yChan);
-#endif
-            oscl_free(video->prevVop);
-        }
-
-        if (video->vol)
-        {
-            for (idx = 0; idx < video->numberOfLayers; idx++)
-            {
-                if (video->vol[idx])
-                {
-                    if (video->vol[idx]->bitstream)
-                    {
-                        BitstreamClose(video->vol[idx]->bitstream);
-                        oscl_free(video->vol[idx]->bitstream);
-                    }
-                    oscl_free(video->vol[idx]);
-                }
-
-            }
-            oscl_free(video->vol);
-        }
-
-        for (idx = 0; idx < video->numberOfLayers; idx++)
-        {
-            if (video->vopHeader[idx]) oscl_free(video->vopHeader[idx]);
-        }
-
-        if (video->vopHeader) oscl_free(video->vopHeader);
-
-        oscl_free(video);
-        decCtrl->videoDecoderData = NULL;
-    }
-#endif
-    return PV_TRUE;
-}
-/* ======================================================================== */
-/*  Function : PVGetVideoDimensions()                                       */
-/*  Date     : 040505                                                       */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : the display_width and display_height of                      */
-/*          the frame in the current layer.                                 */
-/*  Note     : This is not a macro or inline function because we do         */
-/*              not want to expose our internal data structure.             */
-/*  Modified :                                                              */
-/* ======================================================================== */
-OSCL_EXPORT_REF void PVGetVideoDimensions(VideoDecControls *decCtrl, int32 *display_width, int32 *display_height)
-{
-    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
-    *display_width = video->displayWidth;
-    *display_height = video->displayHeight;
-}
-
-OSCL_EXPORT_REF void PVGetBufferDimensions(VideoDecControls *decCtrl, int32 *width, int32 *height) {
-    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
-    *width = video->width;
-    *height = video->height;
-}
-
-/* ======================================================================== */
-/*  Function : PVGetVideoTimeStamp()                                        */
-/*  Date     : 04/27/2000, 08/29/2000                                       */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : current time stamp in millisecond.                           */
-/*  Note     :                                                              */
-/*  Modified :                                                              */
-/* ======================================================================== */
-uint32 PVGetVideoTimeStamp(VideoDecControls *decCtrl)
-{
-    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
-    return video->currTimestamp;
-}
-
-
-/* ======================================================================== */
-/*  Function : PVSetPostProcType()                                          */
-/*  Date     : 07/07/2000                                                   */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : Set post-processing filter type.                             */
-/*  Note     :                                                              */
-/*  Modified : . 08/29/2000 changes the name for consistency.               */
-/* ======================================================================== */
-OSCL_EXPORT_REF void PVSetPostProcType(VideoDecControls *decCtrl, int mode)
-{
-    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
-    video->postFilterType = mode;
-}
-
-
-/* ======================================================================== */
-/*  Function : PVGetDecBitrate()                                            */
-/*  Date     : 08/23/2000                                                   */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : This function returns the average bits per second.           */
-/*  Note     :                                                              */
-/*  Modified :                                                              */
-/* ======================================================================== */
-int PVGetDecBitrate(VideoDecControls *decCtrl)
-{
-    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
-    int     idx;
-    int32   sum = 0;
-
-    for (idx = 0; idx < BITRATE_AVERAGE_WINDOW; idx++)
-    {
-        sum += video->nBitsPerVop[idx];
-    }
-    sum = (sum * video->frameRate) / (10 * BITRATE_AVERAGE_WINDOW);
-    return (int) sum;
-}
-
-
-/* ======================================================================== */
-/*  Function : PVGetDecFramerate()                                          */
-/*  Date     : 08/23/2000                                                   */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : This function returns the average frame per 10 second.       */
-/*  Note     : The fps can be calculated by PVGetDecFramerate()/10          */
-/*  Modified :                                                              */
-/* ======================================================================== */
-int PVGetDecFramerate(VideoDecControls *decCtrl)
-{
-    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
-
-    return video->frameRate;
-}
-
-/* ======================================================================== */
-/*  Function : PVGetOutputFrame()                                           */
-/*  Date     : 05/07/2001                                                   */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : This function returns the pointer to the output frame        */
-/*  Note     :                                                              */
-/*  Modified :                                                              */
-/* ======================================================================== */
-uint8 *PVGetDecOutputFrame(VideoDecControls *decCtrl)
-{
-    return decCtrl->outputFrame;
-}
-
-/* ======================================================================== */
-/*  Function : PVGetLayerID()                                               */
-/*  Date     : 07/09/2001                                                   */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : This function returns decoded frame layer id (BASE/ENHANCE)  */
-/*  Note     :                                                              */
-/*  Modified :                                                              */
-/* ======================================================================== */
-int PVGetLayerID(VideoDecControls *decCtrl)
-{
-    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
-    return video->currLayer;
-}
-/* ======================================================================== */
-/*  Function : PVGetDecMemoryUsage()                                        */
-/*  Date     : 08/23/2000                                                   */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : This function returns the amount of memory used.             */
-/*  Note     :                                                              */
-/*  Modified :                                                              */
-/* ======================================================================== */
-int32 PVGetDecMemoryUsage(VideoDecControls *decCtrl)
-{
-    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
-    return video->memoryUsage;
-}
-
-
-/* ======================================================================== */
-/*  Function : PVGetDecBitstreamMode()                                      */
-/*  Date     : 08/23/2000                                                   */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : This function returns the decoding mode of the baselayer     */
-/*              bitstream.                                                  */
-/*  Note     :                                                              */
-/*  Modified :                                                              */
-/* ======================================================================== */
-OSCL_EXPORT_REF MP4DecodingMode PVGetDecBitstreamMode(VideoDecControls *decCtrl)
-{
-    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
-    if (video->shortVideoHeader)
-    {
-        return H263_MODE;
-    }
-    else
-    {
-        return MPEG4_MODE;
-    }
-}
-
-
-/* ======================================================================== */
-/*  Function : PVExtractVolHeader()                                         */
-/*  Date     : 08/29/2000                                                   */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : Extract vol header of the bitstream from buffer[].           */
-/*  Note     :                                                              */
-/*  Modified :                                                              */
-/* ======================================================================== */
-Bool PVExtractVolHeader(uint8 *video_buffer, uint8 *vol_header, int32 *vol_header_size)
-{
-    int idx = -1;
-    uint8 start_code_prefix[] = { 0x00, 0x00, 0x01 };
-    uint8 h263_prefix[] = { 0x00, 0x00, 0x80 };
-
-    if (oscl_memcmp(h263_prefix, video_buffer, 3) == 0) /* we have short header stream */
-    {
-        oscl_memcpy(vol_header, video_buffer, 32);
-        *vol_header_size = 32;
-        return TRUE;
-    }
-    else
-    {
-        if (oscl_memcmp(start_code_prefix, video_buffer, 3) ||
-                (video_buffer[3] != 0xb0 && video_buffer[3] >= 0x20)) return FALSE;
-
-        do
-        {
-            idx++;
-            while (oscl_memcmp(start_code_prefix, video_buffer + idx, 3))
-            {
-                idx++;
-                if (idx + 3 >= *vol_header_size) goto quit;
-            }
-        }
-        while (video_buffer[idx+3] != 0xb3 && video_buffer[idx+3] != 0xb6);
-
-        oscl_memcpy(vol_header, video_buffer, idx);
-        *vol_header_size = idx;
-        return TRUE;
-    }
-
-quit:
-    oscl_memcpy(vol_header, video_buffer, *vol_header_size);
-    return FALSE;
-}
-
-
-/* ======================================================================== */
-/*  Function : PVLocateFrameHeader()                                        */
-/*  Date     : 04/8/2005                                                    */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : Return the offset to the first SC in the buffer              */
-/*  Note     :                                                              */
-/*  Modified :                                                              */
-/* ======================================================================== */
-int32 PVLocateFrameHeader(uint8 *ptr, int32 size)
-{
-    int count = 0;
-    int32 i = size;
-
-    if (size < 1)
-    {
-        return 0;
-    }
-    while (i--)
-    {
-        if ((count > 1) && (*ptr == 0x01))
-        {
-            i += 2;
-            break;
-        }
-
-        if (*ptr++)
-            count = 0;
-        else
-            count++;
-    }
-    return (size - (i + 1));
-}
-
-
-/* ======================================================================== */
-/*  Function : PVLocateH263FrameHeader()                                    */
-/*  Date     : 04/8/2005                                                    */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : Return the offset to the first SC in the buffer              */
-/*  Note     :                                                              */
-/*  Modified :                                                              */
-/* ======================================================================== */
-int32 PVLocateH263FrameHeader(uint8 *ptr, int32 size)
-{
-    int count = 0;
-    int32 i = size;
-
-    if (size < 1)
-    {
-        return 0;
-    }
-
-    while (i--)
-    {
-        if ((count > 1) && ((*ptr & 0xFC) == 0x80))
-        {
-            i += 2;
-            break;
-        }
-
-        if (*ptr++)
-            count = 0;
-        else
-            count++;
-    }
-    return (size - (i + 1));
-}
-
-
-/* ======================================================================== */
-/*  Function : PVDecodeVideoFrame()                                         */
-/*  Date     : 08/29/2000                                                   */
-/*  Purpose  : Decode one video frame and return a YUV-12 image.            */
-/*  In/out   :                                                              */
-/*  Return   :                                                              */
-/*  Note     :                                                              */
-/*  Modified : 04/17/2001 removed PV_EOS, PV_END_OF_BUFFER              */
-/*           : 08/22/2002 break up into 2 functions PVDecodeVopHeader and */
-/*                          PVDecodeVopBody                                 */
-/* ======================================================================== */
-OSCL_EXPORT_REF Bool PVDecodeVideoFrame(VideoDecControls *decCtrl, uint8 *buffer[],
-                                        uint32 timestamp[], int32 buffer_size[], uint use_ext_timestamp[], uint8 *currYUV)
-{
-    PV_STATUS status = PV_FAIL;
-    VopHeaderInfo header_info;
-
-    status = (PV_STATUS)PVDecodeVopHeader(decCtrl, buffer, timestamp, buffer_size, &header_info, use_ext_timestamp, currYUV);
-    if (status != PV_TRUE)
-        return PV_FALSE;
-
-    if (PVDecodeVopBody(decCtrl, buffer_size) != PV_TRUE)
-    {
-        return PV_FALSE;
-    }
-
-    return PV_TRUE;
-}
-
-/* ======================================================================== */
-/*  Function : PVDecodeVopHeader()                                          */
-/*  Date     : 08/22/2002                                                   */
-/*  Purpose  : Determine target layer and decode vop header, modified from  */
-/*              original PVDecodeVideoFrame.                                */
-/*  In/out   :                                                              */
-/*  Return   :                                                              */
-/*  Note     :                                                              */
-/*  Modified :                                                              */
-/* ======================================================================== */
-Bool PVDecodeVopHeader(VideoDecControls *decCtrl, uint8 *buffer[],
-                       uint32 timestamp[], int32 buffer_size[], VopHeaderInfo *header_info, uint use_ext_timestamp [], uint8 *currYUV)
-{
-    VideoDecData *video = (VideoDecData *) decCtrl->videoDecoderData;
-    Vol *currVol;
-    Vop *currVop = video->currVop;
-    Vop **vopHeader = video->vopHeader;
-    BitstreamDecVideo *stream;
-
-    int target_layer;
-
-#ifdef PV_SUPPORT_TEMPORAL_SCALABILITY
-    PV_STATUS status = PV_FAIL;
-    int idx;
-    int32 display_time;
-
-    /* decide which frame to decode next */
-    if (decCtrl->nLayers > 1)
-    {
-        display_time = target_layer = -1;
-        for (idx = 0; idx < decCtrl->nLayers; idx++)
-        {
-            /* do we have data for this layer? */
-            if (buffer_size[idx] <= 0)
-            {
-                timestamp[idx] = -1;
-                continue;
-            }
-
-            /* did the application provide a timestamp for this vop? */
-            if (timestamp[idx] < 0)
-            {
-                if (vopHeader[idx]->timeStamp < 0)
-                {
-                    /* decode the timestamp in the bitstream */
-                    video->currLayer = idx;
-                    stream = video->vol[idx]->bitstream;
-                    BitstreamReset(stream, buffer[idx], buffer_size[idx]);
-
-                    while ((status = DecodeVOPHeader(video, vopHeader[idx], FALSE)) != PV_SUCCESS)
-                    {
-                        /* Try to find a VOP header in the buffer.   08/30/2000. */
-                        if (PVSearchNextM4VFrame(stream) != PV_SUCCESS)
-                        {
-                            /* if we don't have data for enhancement layer, */
-                            /*    don't just stop.   09/07/2000.          */
-                            buffer_size[idx] = 0;
-                            break;
-                        }
-                    }
-                    if (status == PV_SUCCESS)
-                    {
-                        vopHeader[idx]->timeStamp =
-                            timestamp[idx] = CalcVopDisplayTime(video->vol[idx], vopHeader[idx], video->shortVideoHeader);
-                        if (idx == 0) vopHeader[idx]->refSelectCode = 1;
-                    }
-                }
-                else
-                {
-                    /* We've decoded this vop header in the previous run already. */
-                    timestamp[idx] = vopHeader[idx]->timeStamp;
-                }
-            }
-
-            /* Use timestamps to select the next VOP to be decoded */
-            if (timestamp[idx] >= 0 && (display_time < 0 || display_time > timestamp[idx]))
-            {
-                display_time = timestamp[idx];
-                target_layer = idx;
-            }
-            else if (display_time == timestamp[idx])
-            {
-                /* we have to handle either SNR or spatial scalability here. */
-            }
-        }
-        if (target_layer < 0) return PV_FALSE;
-
-        /* set up for decoding the target layer */
-        video->currLayer = target_layer;
-        currVol = video->vol[target_layer];
-        video->bitstream = stream = currVol->bitstream;
-
-        /* We need to decode the vop header if external timestamp   */
-        /*    is provided.    10/04/2000                            */
-        if (vopHeader[target_layer]->timeStamp < 0)
-        {
-            stream = video->vol[target_layer]->bitstream;
-            BitstreamReset(stream, buffer[target_layer], buffer_size[target_layer]);
-
-            while (DecodeVOPHeader(video, vopHeader[target_layer], TRUE) != PV_SUCCESS)
-            {
-                /* Try to find a VOP header in the buffer.   08/30/2000. */
-                if (PVSearchNextM4VFrame(stream) != PV_SUCCESS)
-                {
-                    /* if we don't have data for enhancement layer, */
-                    /*    don't just stop.   09/07/2000.          */
-                    buffer_size[target_layer] = 0;
-                    break;
-                }
-            }
-            video->vol[target_layer]->timeInc_offset = vopHeader[target_layer]->timeInc;
-            video->vol[target_layer]->moduloTimeBase = timestamp[target_layer];
-            vopHeader[target_layer]->timeStamp = timestamp[target_layer];
-            if (target_layer == 0) vopHeader[target_layer]->refSelectCode = 1;
-        }
-    }
-    else /* base layer only decoding */
-    {
-#endif
-        video->currLayer = target_layer = 0;
-        currVol = video->vol[0];
-        video->bitstream = stream = currVol->bitstream;
-        if (buffer_size[0] <= 0) return PV_FALSE;
-        BitstreamReset(stream, buffer[0], buffer_size[0]);
-
-        if (video->shortVideoHeader)
-        {
-            while (DecodeShortHeader(video, vopHeader[0]) != PV_SUCCESS)
-            {
-                if (PVSearchNextH263Frame(stream) != PV_SUCCESS)
-                {
-                    /* There is no vop header in the buffer,    */
-                    /*   clean bitstream buffer.     2/5/2001   */
-                    buffer_size[0] = 0;
-                    if (video->initialized == PV_FALSE)
-                    {
-                        video->displayWidth = video->width = 0;
-                        video->displayHeight = video->height = 0;
-                    }
-                    return PV_FALSE;
-                }
-            }
-
-            if (use_ext_timestamp[0])
-            {
-                /* MTB for H263 is absolute TR */
-                /* following line is equivalent to  round((timestamp[0]*30)/1001);   11/13/2001 */
-                video->vol[0]->moduloTimeBase = 30 * ((timestamp[0] + 17) / 1001) + (30 * ((timestamp[0] + 17) % 1001) / 1001);
-                vopHeader[0]->timeStamp = timestamp[0];
-            }
-            else
-                vopHeader[0]->timeStamp = CalcVopDisplayTime(currVol, vopHeader[0], video->shortVideoHeader);
-        }
-        else
-        {
-            while (DecodeVOPHeader(video, vopHeader[0], FALSE) != PV_SUCCESS)
-            {
-                /* Try to find a VOP header in the buffer.   08/30/2000. */
-                if (PVSearchNextM4VFrame(stream) != PV_SUCCESS)
-                {
-                    /* There is no vop header in the buffer,    */
-                    /*   clean bitstream buffer.     2/5/2001   */
-                    buffer_size[0] = 0;
-                    return PV_FALSE;
-                }
-            }
-
-            if (use_ext_timestamp[0])
-            {
-                video->vol[0]->timeInc_offset = vopHeader[0]->timeInc;
-                video->vol[0]->moduloTimeBase = timestamp[0];  /*  11/12/2001 */
-                vopHeader[0]->timeStamp = timestamp[0];
-            }
-            else
-            {
-                vopHeader[0]->timeStamp = CalcVopDisplayTime(currVol, vopHeader[0], video->shortVideoHeader);
-            }
-        }
-
-        /* set up some base-layer only parameters */
-        vopHeader[0]->refSelectCode = 1;
-#ifdef PV_SUPPORT_TEMPORAL_SCALABILITY
-    }
-#endif
-    timestamp[target_layer] = video->currTimestamp = vopHeader[target_layer]->timeStamp;
-#ifdef PV_MEMORY_POOL
-    vopHeader[target_layer]->yChan = (PIXEL *)currYUV;
-    vopHeader[target_layer]->uChan = (PIXEL *)currYUV + decCtrl->size;
-    vopHeader[target_layer]->vChan = (PIXEL *)(vopHeader[target_layer]->uChan) + (decCtrl->size >> 2);
-#else
-    vopHeader[target_layer]->yChan = currVop->yChan;
-    vopHeader[target_layer]->uChan = currVop->uChan;
-    vopHeader[target_layer]->vChan = currVop->vChan;
-#endif
-    oscl_memcpy(currVop, vopHeader[target_layer], sizeof(Vop));
-
-#ifdef PV_SUPPORT_TEMPORAL_SCALABILITY
-    vopHeader[target_layer]->timeStamp = -1;
-#endif
-    /* put header info into the structure */
-    header_info->currLayer = target_layer;
-    header_info->timestamp = video->currTimestamp;
-    header_info->frameType = (MP4FrameType)currVop->predictionType;
-    header_info->refSelCode = vopHeader[target_layer]->refSelectCode;
-    header_info->quantizer = currVop->quantizer;
-    /***************************************/
-
-    return PV_TRUE;
-}
-
-
-/* ======================================================================== */
-/*  Function : PVDecodeVopBody()                                            */
-/*  Date     : 08/22/2002                                                   */
-/*  Purpose  : Decode vop body after the header is decoded, modified from   */
-/*              original PVDecodeVideoFrame.                                */
-/*  In/out   :                                                              */
-/*  Return   :                                                              */
-/*  Note     :                                                              */
-/*  Modified :                                                              */
-/* ======================================================================== */
-Bool PVDecodeVopBody(VideoDecControls *decCtrl, int32 buffer_size[])
-{
-    PV_STATUS status = PV_FAIL;
-    VideoDecData *video = (VideoDecData *) decCtrl->videoDecoderData;
-    int target_layer = video->currLayer;
-    Vol *currVol = video->vol[target_layer];
-    Vop *currVop = video->currVop;
-    Vop *prevVop = video->prevVop;
-    Vop *tempVopPtr;
-    int bytes_consumed = 0; /* Record how many bits we used in the buffer.   04/24/2001 */
-
-    int idx;
-
-    if (currVop->vopCoded == 0)                  /*  07/03/2001 */
-    {
-        PV_BitstreamByteAlign(currVol->bitstream);
-        /* We should always clear up bitstream buffer.   10/10/2000 */
-        bytes_consumed = (getPointer(currVol->bitstream) + 7) >> 3;
-
-        if (bytes_consumed > currVol->bitstream->data_end_pos)
-        {
-            bytes_consumed = currVol->bitstream->data_end_pos;
-        }
-
-        if (bytes_consumed < buffer_size[target_layer])
-        {
-            /* If we only consume part of the bits in the buffer, take those */
-            /*  out.     04/24/2001 */
-            /*          oscl_memcpy(buffer[target_layer], buffer[target_layer]+bytes_consumed,
-                            (buffer_size[target_layer]-=bytes_consumed)); */
-            buffer_size[target_layer] -= bytes_consumed;
-        }
-        else
-        {
-            buffer_size[target_layer] = 0;
-        }
-#ifdef PV_MEMORY_POOL
-
-        if (target_layer)
-        {
-            if (video->prevEnhcVop->timeStamp > video->prevVop->timeStamp)
-            {
-                video->prevVop = video->prevEnhcVop;
-            }
-        }
-
-        if (!video->prevVop->yChan) {
-            ALOGE("b/35269635");
-            android_errorWriteLog(0x534e4554, "35269635");
-            return PV_FALSE;
-        }
-        oscl_memcpy(currVop->yChan, video->prevVop->yChan, (decCtrl->size*3) / 2);
-
-        video->prevVop = prevVop;
-
-        video->concealFrame = currVop->yChan;       /*  07/07/2001 */
-
-        video->vop_coding_type = currVop->predictionType; /*  07/09/01 */
-
-        decCtrl->outputFrame = currVop->yChan;
-
-        /* Swap VOP pointers.  No enhc. frame oscl_memcpy() anymore!   04/24/2001 */
-        if (target_layer)
-        {
-            tempVopPtr = video->prevEnhcVop;
-            video->prevEnhcVop = video->currVop;
-            video->currVop = tempVopPtr;
-        }
-        else
-        {
-            tempVopPtr = video->prevVop;
-            video->prevVop = video->currVop;
-            video->currVop = tempVopPtr;
-        }
-#else
-        if (target_layer)       /* this is necessary to avoid flashback problems   06/21/2002*/
-        {
-            video->prevEnhcVop->timeStamp = currVop->timeStamp;
-        }
-        else
-        {
-            video->prevVop->timeStamp = currVop->timeStamp;
-        }
-#endif
-        video->vop_coding_type = currVop->predictionType; /*  07/09/01 */
-        /* the following is necessary to avoid displaying an notCoded I-VOP at the beginning of a session
-        or after random positioning  07/03/02*/
-        if (currVop->predictionType == I_VOP)
-        {
-            video->vop_coding_type = P_VOP;
-        }
-
-
-        return PV_TRUE;
-    }
-    /* ======================================================= */
-    /*  Decode vop body (if there is no error in the header!)  */
-    /* ======================================================= */
-
-    /* first, we need to select a reference frame */
-    if (decCtrl->nLayers > 1)
-    {
-        if (currVop->predictionType == I_VOP)
-        {
-            /* do nothing here */
-        }
-        else if (currVop->predictionType == P_VOP)
-        {
-            switch (currVop->refSelectCode)
-            {
-                case 0 : /* most recently decoded enhancement vop */
-                    /* Setup video->prevVop before we call PV_DecodeVop().   04/24/2001 */
-                    if (video->prevEnhcVop->timeStamp >= video->prevVop->timeStamp)
-                        video->prevVop = video->prevEnhcVop;
-                    break;
-
-                case 1 : /* most recently displayed base-layer vop */
-                    if (target_layer)
-                    {
-                        if (video->prevEnhcVop->timeStamp > video->prevVop->timeStamp)
-                            video->prevVop = video->prevEnhcVop;
-                    }
-                    break;
-
-                case 2 : /* next base-layer vop in display order */
-                    break;
-
-                case 3 : /* temporally coincident base-layer vop (no MV's) */
-                    break;
-            }
-        }
-        else /* we have a B-Vop */
-        {
-            mp4dec_log("DecodeVideoFrame(): B-VOP not supported.\n");
-        }
-    }
-
-    /* This is for the calculation of the frame rate and bitrate. */
-    idx = ++video->frame_idx % BITRATE_AVERAGE_WINDOW;
-
-    /* Calculate bitrate for this layer.   08/23/2000 */
-    status = PV_DecodeVop(video);
-    video->nBitsPerVop[idx] = getPointer(currVol->bitstream);
-    video->prevTimestamp[idx] = currVop->timeStamp;
-
-    /* restore video->prevVop after PV_DecodeVop().   04/24/2001 */
-//  if (currVop->refSelectCode == 0) video->prevVop = prevVop;
-    video->prevVop = prevVop;
-
-    /* Estimate the frame rate.   08/23/2000 */
-    video->duration = video->prevTimestamp[idx];
-    video->duration -= video->prevTimestamp[(++idx)%BITRATE_AVERAGE_WINDOW];
-    if (video->duration > 0)
-    { /* Only update framerate when the timestamp is right */
-        video->frameRate = (int)(FRAMERATE_SCALE) / video->duration;
-    }
-
-    /* We should always clear up bitstream buffer.   10/10/2000 */
-    bytes_consumed = (getPointer(currVol->bitstream) + 7) >> 3; /*  11/4/03 */
-
-    if (bytes_consumed > currVol->bitstream->data_end_pos)
-    {
-        bytes_consumed = currVol->bitstream->data_end_pos;
-    }
-
-    if (bytes_consumed < buffer_size[target_layer])
-    {
-        /* If we only consume part of the bits in the buffer, take those */
-        /*  out.     04/24/2001 */
-        /*      oscl_memcpy(buffer[target_layer], buffer[target_layer]+bytes_consumed,
-                    (buffer_size[target_layer]-=bytes_consumed)); */
-        buffer_size[target_layer] -= bytes_consumed;
-    }
-    else
-    {
-        buffer_size[target_layer] = 0;
-    }
-    switch (status)
-    {
-        case PV_FAIL :
-            return PV_FALSE;        /* this will take care of concealment if we lose whole frame  */
-
-        case PV_END_OF_VOP :
-            /* we may want to differenciate PV_END_OF_VOP and PV_SUCCESS */
-            /*    in the future.     05/10/2000                      */
-
-        case PV_SUCCESS :
-            /* Nohting is wrong :). */
-
-
-            video->concealFrame = video->currVop->yChan;       /*  07/07/2001 */
-
-            video->vop_coding_type = video->currVop->predictionType; /*  07/09/01 */
-
-            decCtrl->outputFrame = video->currVop->yChan;
-
-            /* Swap VOP pointers.  No enhc. frame oscl_memcpy() anymore!   04/24/2001 */
-            if (target_layer)
-            {
-                tempVopPtr = video->prevEnhcVop;
-                video->prevEnhcVop = video->currVop;
-                video->currVop = tempVopPtr;
-            }
-            else
-            {
-                tempVopPtr = video->prevVop;
-                video->prevVop = video->currVop;
-                video->currVop = tempVopPtr;
-            }
-            break;
-
-        default :
-            /* This will never happen */
-            break;
-    }
-
-    return PV_TRUE;
-}
-
-#ifdef PV_MEMORY_POOL
-OSCL_EXPORT_REF void PVSetReferenceYUV(VideoDecControls *decCtrl, uint8 *YUV)
-{
-    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
-    video->prevVop->yChan = (PIXEL *)YUV;
-    video->prevVop->uChan = (PIXEL *)YUV + video->size;
-    video->prevVop->vChan = (PIXEL *)video->prevVop->uChan + (decCtrl->size >> 2);
-    oscl_memset(video->prevVop->yChan, 16, sizeof(uint8)*decCtrl->size);     /*  10/31/01 */
-    oscl_memset(video->prevVop->uChan, 128, sizeof(uint8)*decCtrl->size / 2);
-    video->concealFrame = video->prevVop->yChan;               /*  07/07/2001 */
-    decCtrl->outputFrame = video->prevVop->yChan;              /*  06/19/2002 */
-}
-#endif
-
-
-/* ======================================================================== */
-/*  Function : VideoDecoderErrorDetected()                                  */
-/*  Date     : 06/20/2000                                                   */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : This function will be called everytime an error int the      */
-/*              bitstream is detected.                                      */
-/*  Note     :                                                              */
-/*  Modified :                                                              */
-/* ======================================================================== */
-uint VideoDecoderErrorDetected(VideoDecData *)
-{
-    /* This is only used for trapping bitstream error for debuging */
-    return 0;
-}
-
-#ifdef ENABLE_LOG
-#include <stdio.h>
-#include <stdarg.h>
-/* ======================================================================== */
-/*  Function : m4vdec_dprintf()                                             */
-/*  Date     : 08/15/2000                                                   */
-/*  Purpose  : This is a function that logs messages in the mpeg4 video     */
-/*             decoder.  We can call the standard PacketVideo PVMessage     */
-/*             from inside this function if necessary.                      */
-/*  In/out   :                                                              */
-/*  Return   :                                                              */
-/*  Note     : To turn on the logging, LOG_MP4DEC_MESSAGE must be defined   */
-/*              when compiling this file (only this file).                  */
-/*  Modified :                                                              */
-/* ======================================================================== */
-void m4vdec_dprintf(char *format, ...)
-{
-    FILE *log_fp;
-    va_list args;
-    va_start(args, format);
-
-    /* open the log file */
-    log_fp = fopen("\\mp4dec_log.txt", "a+");
-    if (log_fp == NULL) return;
-    /* output the message */
-    vfprintf(log_fp, format, args);
-    fclose(log_fp);
-
-    va_end(args);
-}
-#endif
-
-
-/* ======================================================================== */
-/*  Function : IsIntraFrame()                                               */
-/*  Date     : 05/29/2000                                                   */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : The most recently decoded frame is an Intra frame.           */
-/*  Note     :                                                              */
-/*  Modified :                                                              */
-/* ======================================================================== */
-Bool IsIntraFrame(VideoDecControls *decCtrl)
-{
-    VideoDecData *video = (VideoDecData *)decCtrl->videoDecoderData;
-    return (video->vop_coding_type == I_VOP);
-}
-
-/* ======================================================================== */
-/*  Function : PVDecPostProcess()                                           */
-/*  Date     : 01/09/2002                                                   */
-/*  Purpose  : PostProcess one video frame and return a YUV-12 image.       */
-/*  In/out   :                                                              */
-/*  Return   :                                                              */
-/*  Note     :                                                              */
-/*  Modified :                                                              */
-/* ======================================================================== */
-void PVDecPostProcess(VideoDecControls *decCtrl, uint8 *outputYUV)
-{
-    uint8 *outputBuffer;
-#ifdef PV_POSTPROC_ON
-    VideoDecData *video = (VideoDecData *) decCtrl->videoDecoderData;
-    int32 tmpvar;
-    if (outputYUV)
-    {
-        outputBuffer = outputYUV;
-    }
-    else
-    {
-        if (video->postFilterType)
-        {
-            outputBuffer = video->currVop->yChan;
-        }
-        else
-        {
-            outputBuffer = decCtrl->outputFrame;
-        }
-    }
-
-    if (video->postFilterType)
-    {
-        /* Post-processing,  */
-        PostFilter(video, video->postFilterType, outputBuffer);
-    }
-    else
-    {
-        if (outputYUV)
-        {
-            /* Copy decoded frame to the output buffer. */
-            tmpvar = (int32)video->width * video->height;
-            oscl_memcpy(outputBuffer, decCtrl->outputFrame, tmpvar*3 / 2);           /*  3/3/01 */
-        }
-    }
-#else
-    outputBuffer = decCtrl->outputFrame;
-    outputYUV;
-#endif
-    decCtrl->outputFrame = outputBuffer;
-    return;
-}
-
-
-/* ======================================================================== */
-/*  Function : PVDecSetReference(VideoDecControls *decCtrl, uint8 *refYUV,  */
-/*                              int32 timestamp)                            */
-/*  Date     : 07/22/2003                                                   */
-/*  Purpose  : Get YUV reference frame from external source.                */
-/*  In/out   : YUV 4-2-0 frame containing new reference frame in the same   */
-/*   : dimension as original, i.e., doesn't have to be multiple of 16 !!!.  */
-/*  Return   :                                                              */
-/*  Note     :                                                              */
-/*  Modified :                                                              */
-/* ======================================================================== */
-Bool PVDecSetReference(VideoDecControls *decCtrl, uint8 *refYUV, uint32 timestamp)
-{
-    VideoDecData *video = (VideoDecData *) decCtrl->videoDecoderData;
-    Vop *prevVop = video->prevVop;
-    int width = video->width;
-    uint8 *dstPtr, *orgPtr, *dstPtr2, *orgPtr2;
-    int32 size = (int32)width * video->height;
-
-
-    /* set new parameters */
-    prevVop->timeStamp = timestamp;
-    prevVop->predictionType = I_VOP;
-
-    dstPtr = prevVop->yChan;
-    orgPtr = refYUV;
-    oscl_memcpy(dstPtr, orgPtr, size);
-    dstPtr = prevVop->uChan;
-    dstPtr2 = prevVop->vChan;
-    orgPtr = refYUV + size;
-    orgPtr2 = orgPtr + (size >> 2);
-    oscl_memcpy(dstPtr, orgPtr, (size >> 2));
-    oscl_memcpy(dstPtr2, orgPtr2, (size >> 2));
-
-    video->concealFrame = video->prevVop->yChan;
-    video->vop_coding_type = I_VOP;
-    decCtrl->outputFrame = video->prevVop->yChan;
-
-    return PV_TRUE;
-}
-
-/* ======================================================================== */
-/*  Function : PVDecSetEnhReference(VideoDecControls *decCtrl, uint8 *refYUV,   */
-/*                              int32 timestamp)                            */
-/*  Date     : 07/23/2003                                                   */
-/*  Purpose  : Get YUV enhance reference frame from external source.        */
-/*  In/out   : YUV 4-2-0 frame containing new reference frame in the same   */
-/*   : dimension as original, i.e., doesn't have to be multiple of 16 !!!.  */
-/*  Return   :                                                              */
-/*  Note     :                                                              */
-/*  Modified :                                                              */
-/* ======================================================================== */
-Bool PVDecSetEnhReference(VideoDecControls *decCtrl, uint8 *refYUV, uint32 timestamp)
-{
-    VideoDecData *video = (VideoDecData *) decCtrl->videoDecoderData;
-    Vop *prevEnhcVop = video->prevEnhcVop;
-    uint8 *dstPtr, *orgPtr, *dstPtr2, *orgPtr2;
-    int32 size = (int32) video->width * video->height;
-
-    if (video->numberOfLayers <= 1)
-        return PV_FALSE;
-
-
-    /* set new parameters */
-    prevEnhcVop->timeStamp = timestamp;
-    prevEnhcVop->predictionType = I_VOP;
-
-    dstPtr = prevEnhcVop->yChan;
-    orgPtr = refYUV;
-    oscl_memcpy(dstPtr, orgPtr, size);
-    dstPtr = prevEnhcVop->uChan;
-    dstPtr2 = prevEnhcVop->vChan;
-    orgPtr = refYUV + size;
-    orgPtr2 = orgPtr + (size >> 2);
-    oscl_memcpy(dstPtr, orgPtr, (size >> 2));
-    oscl_memcpy(dstPtr2, orgPtr2, (size >> 2));
-    video->concealFrame = video->prevEnhcVop->yChan;
-    video->vop_coding_type = I_VOP;
-    decCtrl->outputFrame = video->prevEnhcVop->yChan;
-
-    return PV_TRUE;
-}
-
-
-/* ======================================================================== */
-/*  Function : PVGetVolInfo()                                               */
-/*  Date     : 08/06/2003                                                   */
-/*  Purpose  : Get the vol info(only base-layer).                           */
-/*  In/out   :                                                              */
-/*  Return   :                                                              */
-/*  Note     :                                                              */
-/*  Modified : 06/24/2004                                                   */
-/* ======================================================================== */
-Bool PVGetVolInfo(VideoDecControls *decCtrl, VolInfo *pVolInfo)
-{
-    Vol *currVol;
-
-    if (pVolInfo == NULL || decCtrl == NULL || decCtrl->videoDecoderData == NULL ||
-            ((VideoDecData *)decCtrl->videoDecoderData)->vol[0] == NULL) return PV_FALSE;
-
-    currVol = ((VideoDecData *)(decCtrl->videoDecoderData))->vol[0];
-
-    // get the VOL info
-    pVolInfo->shortVideoHeader = (int32)((VideoDecData *)(decCtrl->videoDecoderData))->shortVideoHeader;
-    pVolInfo->dataPartitioning = (int32)currVol->dataPartitioning;
-    pVolInfo->errorResDisable  = (int32)currVol->errorResDisable;
-    pVolInfo->useReverseVLC    = (int32)currVol->useReverseVLC;
-    pVolInfo->scalability      = (int32)currVol->scalability;
-    pVolInfo->nbitsTimeIncRes  = (int32)currVol->nbitsTimeIncRes;
-    pVolInfo->profile_level_id = (int32)currVol->profile_level_id;
-
-    return PV_TRUE;
-}
-
-
-
diff --git a/media/libstagefright/codecs/m4v_h263/dec/test/Android.bp b/media/libstagefright/codecs/m4v_h263/dec/test/Android.bp
deleted file mode 100644
index 655491a..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/test/Android.bp
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-cc_test {
-    name: "Mpeg4H263DecoderTest",
-    gtest: true,
-
-    srcs: [
-        "Mpeg4H263DecoderTest.cpp",
-    ],
-
-    shared_libs: [
-        "liblog",
-    ],
-
-    static_libs: [
-        "libstagefright_m4vh263dec",
-        "libstagefright_foundation",
-    ],
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-    ],
-
-    sanitize: {
-        misc_undefined: [
-            "unsigned-integer-overflow",
-            "signed-integer-overflow",
-        ],
-        cfi: true,
-    },
-}
diff --git a/media/libstagefright/codecs/m4v_h263/dec/test/AndroidTest.xml b/media/libstagefright/codecs/m4v_h263/dec/test/AndroidTest.xml
deleted file mode 100755
index f572b0c..0000000
--- a/media/libstagefright/codecs/m4v_h263/dec/test/AndroidTest.xml
+++ /dev/null
@@ -1,31 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2020 The Android Open Source Project
-
-     Licensed under the Apache License, Version 2.0 (the "License");
-     you may not use this file except in compliance with the License.
-     You may obtain a copy of the License at
-
-          http://www.apache.org/licenses/LICENSE-2.0
-
-     Unless required by applicable law or agreed to in writing, software
-     distributed under the License is distributed on an "AS IS" BASIS,
-     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     See the License for the specific language governing permissions and
-     limitations under the License.
--->
-<configuration description="Test module config for Mpeg4H263 Decoder unit tests">
-    <option name="test-suite-tag" value="Mpeg4H263DecoderTest" />
-    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
-        <option name="cleanup" value="true" />
-        <option name="push" value="Mpeg4H263DecoderTest->/data/local/tmp/Mpeg4H263DecoderTest" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/codecs/m4v_h263/dec/test/Mpeg4H263Decoder-1.1.zip?unzip=true"
-            value="/data/local/tmp/Mpeg4H263DecoderTestRes/" />
-    </target_preparer>
-
-    <test class="com.android.tradefed.testtype.GTest" >
-        <option name="native-test-device-path" value="/data/local/tmp" />
-        <option name="module-name" value="Mpeg4H263DecoderTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/Mpeg4H263DecoderTestRes/" />
-    </test>
-</configuration>
diff --git a/media/libstagefright/codecs/m4v_h263/enc/Android.bp b/media/libstagefright/codecs/m4v_h263/enc/Android.bp
index b8bc24e..d10e40d 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/Android.bp
+++ b/media/libstagefright/codecs/m4v_h263/enc/Android.bp
@@ -1,59 +1,20 @@
-cc_library_static {
-    name: "libstagefright_m4vh263enc",
-    vendor_available: true,
-    apex_available: [
-        "//apex_available:platform",
-        "com.android.media.swcodec",
-    ],
-    min_sdk_version: "29",
-
-    srcs: [
-        "src/bitstream_io.cpp",
-        "src/combined_encode.cpp", "src/datapart_encode.cpp",
-        "src/dct.cpp",
-        "src/findhalfpel.cpp",
-        "src/fastcodemb.cpp",
-        "src/fastidct.cpp",
-        "src/fastquant.cpp",
-        "src/me_utils.cpp",
-        "src/mp4enc_api.cpp",
-        "src/rate_control.cpp",
-        "src/motion_est.cpp",
-        "src/motion_comp.cpp",
-        "src/sad.cpp",
-        "src/sad_halfpel.cpp",
-        "src/vlc_encode.cpp",
-        "src/vop.cpp",
-    ],
-
-    cflags: [
-        "-DBX_RC",
-        "-Werror",
-    ],
-
-    version_script: "exports.lds",
-
-    local_include_dirs: ["src"],
-    export_include_dirs: ["include"],
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-        ],
-        cfi: true,
-    },
-}
-
 //###############################################################################
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_mpeg4enc",
     defaults: ["libstagefright_softomx-defaults"],
 
     srcs: ["SoftMPEG4Encoder.cpp"],
 
-    local_include_dirs: ["src"],
-
     cflags: [
         "-DBX_RC",
     ],
@@ -67,29 +28,3 @@
         cfi: true,
     },
 }
-
-//###############################################################################
-
-cc_test {
-    name: "libstagefright_m4vh263enc_test",
-    gtest: false,
-
-    srcs: ["test/m4v_h263_enc_test.cpp"],
-
-    local_include_dirs: ["src"],
-
-    cflags: [
-        "-DBX_RC",
-        "-Wall",
-        "-Werror",
-    ],
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-        ],
-        cfi: true,
-    },
-
-    static_libs: ["libstagefright_m4vh263enc"],
-}
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
index fa7db81..fb6c4e2 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
@@ -528,6 +528,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/motion_est.cpp b/media/libstagefright/codecs/m4v_h263/enc/src/motion_est.cpp
deleted file mode 100644
index 997b78d..0000000
--- a/media/libstagefright/codecs/m4v_h263/enc/src/motion_est.cpp
+++ /dev/null
@@ -1,1741 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "mp4def.h"
-#include "mp4enc_lib.h"
-#include "mp4lib_int.h"
-#include "m4venc_oscl.h"
-
-//#define PRINT_MV
-#define MIN_GOP 1   /* minimum size of GOP,  1/23/01, need to be tested */
-
-#define CANDIDATE_DISTANCE  0 /* distance candidate from one another to consider as a distinct one */
-/* shouldn't be more than 3 */
-
-#define ZERO_MV_PREF    0 /* 0: bias (0,0)MV before full-pel search, lowest complexity*/
-/* 1: bias (0,0)MV after full-pel search, before half-pel, highest comp */
-/* 2: bias (0,0)MV after half-pel, high comp, better PSNR */
-
-#define RASTER_REFRESH  /* instead of random INTRA refresh, do raster scan,  2/26/01 */
-
-#ifdef RASTER_REFRESH
-#define TARGET_REFRESH_PER_REGION 4 /* , no. MB per frame to be INTRA refreshed */
-#else
-#define TARGET_REFRESH_PER_REGION 1 /* , no. MB per region to be INTRA refreshed */
-#endif
-
-#define ALL_CAND_EQUAL  10  /*  any number greater than 5 will work */
-
-#define NumPixelMB  256     /*  number of pixels used in SAD calculation */
-
-#define DEF_8X8_WIN 3   /* search region for 8x8 MVs around the 16x16 MV */
-#define MB_Nb  256
-
-#define PREF_NULL_VEC 129   /* for zero vector bias */
-#define PREF_16_VEC 129     /* 1MV bias versus 4MVs*/
-#define PREF_INTRA  512     /* bias for INTRA coding */
-
-const static Int tab_exclude[9][9] =  // [last_loc][curr_loc]
-{
-    {0, 0, 0, 0, 0, 0, 0, 0, 0},
-    {0, 0, 0, 0, 1, 1, 1, 0, 0},
-    {0, 0, 0, 0, 1, 1, 1, 1, 1},
-    {0, 0, 0, 0, 0, 0, 1, 1, 1},
-    {0, 1, 1, 0, 0, 0, 1, 1, 1},
-    {0, 1, 1, 0, 0, 0, 0, 0, 1},
-    {0, 1, 1, 1, 1, 0, 0, 0, 1},
-    {0, 0, 1, 1, 1, 0, 0, 0, 0},
-    {0, 0, 1, 1, 1, 1, 1, 0, 0}
-}; //to decide whether to continue or compute
-
-const static Int refine_next[8][2] =    /* [curr_k][increment] */
-{
-    {0, 0}, {2, 0}, {1, 1}, {0, 2}, { -1, 1}, { -2, 0}, { -1, -1}, {0, -2}
-};
-
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    void MBMotionSearch(VideoEncData *video, UChar *cur, UChar *best_cand[],
-    Int i0, Int j0, Int type_pred, Int fullsearch, Int *hp_guess);
-
-    Int  fullsearch(VideoEncData *video, Vol *currVol, UChar *ref, UChar *cur,
-                    Int *imin, Int *jmin, Int ilow, Int ihigh, Int jlow, Int jhigh);
-    Int fullsearchBlk(VideoEncData *video, Vol *currVol, UChar *cent, UChar *cur,
-                      Int *imin, Int *jmin, Int ilow, Int ihigh, Int jlow, Int jhigh, Int range);
-    void CandidateSelection(Int *mvx, Int *mvy, Int *num_can, Int imb, Int jmb,
-                            VideoEncData *video, Int type_pred);
-    void RasterIntraUpdate(UChar *intraArray, UChar *Mode, Int totalMB, Int numRefresh);
-    void ResetIntraUpdate(UChar *intraArray, Int totalMB);
-    void ResetIntraUpdateRegion(UChar *intraArray, Int start_i, Int rwidth,
-                                Int start_j, Int rheight, Int mbwidth, Int mbheight);
-
-    void MoveNeighborSAD(Int dn[], Int new_loc);
-    Int FindMin(Int dn[]);
-    void PrepareCurMB(VideoEncData *video, UChar *cur);
-
-#ifdef __cplusplus
-}
-#endif
-
-/***************************************/
-/*  2/28/01, for HYPOTHESIS TESTING */
-#ifdef HTFM     /* defined in mp4def.h */
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-    void CalcThreshold(double pf, double exp_lamda[], Int nrmlz_th[]);
-    void    HTFMPrepareCurMB(VideoEncData *video, HTFM_Stat *htfm_stat, UChar *cur);
-#ifdef __cplusplus
-}
-#endif
-
-
-#define HTFM_Pf  0.25   /* 3/2/1, probability of false alarm, can be varied from 0 to 0.5 */
-/***************************************/
-#endif
-
-#ifdef _SAD_STAT
-ULong num_MB = 0;
-ULong num_HP_MB = 0;
-ULong num_Blk = 0;
-ULong num_HP_Blk = 0;
-ULong num_cand = 0;
-ULong num_better_hp = 0;
-ULong i_dist_from_guess = 0;
-ULong j_dist_from_guess = 0;
-ULong num_hp_not_zero = 0;
-#endif
-
-
-
-/*==================================================================
-    Function:   MotionEstimation
-    Date:       10/3/2000
-    Purpose:    Go through all macroblock for motion search and
-                determine scene change detection.
-====================================================================*/
-
-void MotionEstimation(VideoEncData *video)
-{
-    UChar use_4mv = video->encParams->MV8x8_Enabled;
-    Vol *currVol = video->vol[video->currLayer];
-    Vop *currVop = video->currVop;
-    VideoEncFrameIO *currFrame = video->input;
-    Int i, j, comp;
-    Int mbwidth = currVol->nMBPerRow;
-    Int mbheight = currVol->nMBPerCol;
-    Int totalMB = currVol->nTotalMB;
-    Int width = currFrame->pitch;
-    UChar *mode_mb, *Mode = video->headerInfo.Mode;
-    MOT *mot_mb, **mot = video->mot;
-    UChar *intraArray = video->intraArray;
-    Int FS_en = video->encParams->FullSearch_Enabled;
-    void (*ComputeMBSum)(UChar *, Int, MOT *) = video->functionPointer->ComputeMBSum;
-    void (*ChooseMode)(UChar*, UChar*, Int, Int) = video->functionPointer->ChooseMode;
-
-    Int numIntra, start_i, numLoop, incr_i;
-    Int mbnum, offset;
-    UChar *cur, *best_cand[5];
-    Int sad8 = 0, sad16 = 0;
-    Int totalSAD = 0;   /* average SAD for rate control */
-    Int skip_halfpel_4mv;
-    Int f_code_p, f_code_n, max_mag = 0, min_mag = 0;
-    Int type_pred;
-    Int xh[5] = {0, 0, 0, 0, 0};
-    Int yh[5] = {0, 0, 0, 0, 0}; /* half-pel */
-    UChar hp_mem4MV[17*17*4];
-
-#ifdef HTFM
-    /***** HYPOTHESIS TESTING ********/  /* 2/28/01 */
-    Int collect = 0;
-    HTFM_Stat htfm_stat;
-    double newvar[16];
-    double exp_lamda[15];
-    /*********************************/
-#endif
-    Int hp_guess = 0;
-#ifdef PRINT_MV
-    FILE *fp_debug;
-#endif
-
-//  FILE *fstat;
-//  static int frame_num = 0;
-
-    offset = 0;
-
-    if (video->currVop->predictionType == I_VOP)
-    {   /* compute the SAV */
-        mbnum = 0;
-        cur = currFrame->yChan;
-
-        for (j = 0; j < mbheight; j++)
-        {
-            for (i = 0; i < mbwidth; i++)
-            {
-                video->mbnum = mbnum;
-                mot_mb = mot[mbnum];
-
-                (*ComputeMBSum)(cur + (i << 4), width, mot_mb);
-
-                totalSAD += mot_mb[0].sad;
-
-                mbnum++;
-            }
-            cur += (width << 4);
-        }
-
-        video->sumMAD = (float)totalSAD / (float)NumPixelMB;
-
-        ResetIntraUpdate(intraArray, totalMB);
-
-        return  ;
-    }
-
-    /* 09/20/05 */
-    if (video->prevBaseVop->padded == 0 && !video->encParams->H263_Enabled)
-    {
-        PaddingEdge(video->prevBaseVop);
-        video->prevBaseVop->padded = 1;
-    }
-
-    /* Random INTRA update */
-    /*  suggest to do it in CodeMB */
-    /*  2/21/2001 */
-    //if(video->encParams->RC_Type == CBR_1 || video->encParams->RC_Type == CBR_2)
-    if (video->currLayer == 0 && video->encParams->Refresh)
-    {
-        RasterIntraUpdate(intraArray, Mode, totalMB, video->encParams->Refresh);
-    }
-
-    video->sad_extra_info = NULL;
-
-#ifdef HTFM
-    /***** HYPOTHESIS TESTING ********/  /* 2/28/01 */
-    InitHTFM(video, &htfm_stat, newvar, &collect);
-    /*********************************/
-#endif
-
-    if ((video->encParams->SceneChange_Det == 1) /*&& video->currLayer==0 */
-            && ((video->encParams->LayerFrameRate[0] < 5.0) || (video->numVopsInGOP > MIN_GOP)))
-        /* do not try to detect a new scene if low frame rate and too close to previous I-frame */
-    {
-        incr_i = 2;
-        numLoop = 2;
-        start_i = 1;
-        type_pred = 0; /* for initial candidate selection */
-    }
-    else
-    {
-        incr_i = 1;
-        numLoop = 1;
-        start_i = 0;
-        type_pred = 2;
-    }
-
-    /* First pass, loop thru half the macroblock */
-    /* determine scene change */
-    /* Second pass, for the rest of macroblocks */
-    numIntra = 0;
-    while (numLoop--)
-    {
-        for (j = 0; j < mbheight; j++)
-        {
-            if (incr_i > 1)
-                start_i = (start_i == 0 ? 1 : 0) ; /* toggle 0 and 1 */
-
-            offset = width * (j << 4) + (start_i << 4);
-
-            mbnum = j * mbwidth + start_i;
-
-            for (i = start_i; i < mbwidth; i += incr_i)
-            {
-                video->mbnum = mbnum;
-                mot_mb = mot[mbnum];
-                mode_mb = Mode + mbnum;
-
-                cur = currFrame->yChan + offset;
-
-
-                if (*mode_mb != MODE_INTRA)
-                {
-#if defined(HTFM)
-                    HTFMPrepareCurMB(video, &htfm_stat, cur);
-#else
-                    PrepareCurMB(video, cur);
-#endif
-                    /************************************************************/
-                    /******** full-pel 1MV and 4MVs search **********************/
-
-#ifdef _SAD_STAT
-                    num_MB++;
-#endif
-                    MBMotionSearch(video, cur, best_cand, i << 4, j << 4, type_pred,
-                                   FS_en, &hp_guess);
-
-#ifdef PRINT_MV
-                    fp_debug = fopen("c:\\bitstream\\mv1_debug.txt", "a");
-                    fprintf(fp_debug, "#%d (%d,%d,%d) : ", mbnum, mot_mb[0].x, mot_mb[0].y, mot_mb[0].sad);
-                    fprintf(fp_debug, "(%d,%d,%d) : (%d,%d,%d) : (%d,%d,%d) : (%d,%d,%d) : ==>\n",
-                            mot_mb[1].x, mot_mb[1].y, mot_mb[1].sad,
-                            mot_mb[2].x, mot_mb[2].y, mot_mb[2].sad,
-                            mot_mb[3].x, mot_mb[3].y, mot_mb[3].sad,
-                            mot_mb[4].x, mot_mb[4].y, mot_mb[4].sad);
-                    fclose(fp_debug);
-#endif
-                    sad16 = mot_mb[0].sad;
-#ifdef NO_INTER4V
-                    sad8 = sad16;
-#else
-                    sad8 = mot_mb[1].sad + mot_mb[2].sad + mot_mb[3].sad + mot_mb[4].sad;
-#endif
-
-                    /* choose between INTRA or INTER */
-                    (*ChooseMode)(mode_mb, cur, width, ((sad8 < sad16) ? sad8 : sad16));
-                }
-                else    /* INTRA update, use for prediction 3/23/01 */
-                {
-                    mot_mb[0].x = mot_mb[0].y = 0;
-                }
-
-                if (*mode_mb == MODE_INTRA)
-                {
-                    numIntra++ ;
-
-                    /* compute SAV for rate control and fast DCT, 11/28/00 */
-                    (*ComputeMBSum)(cur, width, mot_mb);
-
-                    /* leave mot_mb[0] as it is for fast motion search */
-                    /* set the 4 MVs to zeros */
-                    for (comp = 1; comp <= 4; comp++)
-                    {
-                        mot_mb[comp].x = 0;
-                        mot_mb[comp].y = 0;
-                    }
-#ifdef PRINT_MV
-                    fp_debug = fopen("c:\\bitstream\\mv1_debug.txt", "a");
-                    fprintf(fp_debug, "\n");
-                    fclose(fp_debug);
-#endif
-                }
-                else /* *mode_mb = MODE_INTER;*/
-                {
-                    if (video->encParams->HalfPel_Enabled)
-                    {
-#ifdef _SAD_STAT
-                        num_HP_MB++;
-#endif
-                        /* find half-pel resolution motion vector */
-                        FindHalfPelMB(video, cur, mot_mb, best_cand[0],
-                                      i << 4, j << 4, xh, yh, hp_guess);
-#ifdef PRINT_MV
-                        fp_debug = fopen("c:\\bitstream\\mv1_debug.txt", "a");
-                        fprintf(fp_debug, "(%d,%d), %d\n", mot_mb[0].x, mot_mb[0].y, mot_mb[0].sad);
-                        fclose(fp_debug);
-#endif
-                        skip_halfpel_4mv = ((sad16 - mot_mb[0].sad) <= (MB_Nb >> 1) + 1);
-                        sad16 = mot_mb[0].sad;
-
-#ifndef NO_INTER4V
-                        if (use_4mv && !skip_halfpel_4mv)
-                        {
-                            /* Also decide 1MV or 4MV !!!!!!!!*/
-                            sad8 = FindHalfPelBlk(video, cur, mot_mb, sad16,
-                                                  best_cand, mode_mb, i << 4, j << 4, xh, yh, hp_mem4MV);
-
-#ifdef PRINT_MV
-                            fp_debug = fopen("c:\\bitstream\\mv1_debug.txt", "a");
-                            fprintf(fp_debug, " (%d,%d,%d) : (%d,%d,%d) : (%d,%d,%d) : (%d,%d,%d) \n",
-                                    mot_mb[1].x, mot_mb[1].y, mot_mb[1].sad,
-                                    mot_mb[2].x, mot_mb[2].y, mot_mb[2].sad,
-                                    mot_mb[3].x, mot_mb[3].y, mot_mb[3].sad,
-                                    mot_mb[4].x, mot_mb[4].y, mot_mb[4].sad);
-                            fclose(fp_debug);
-#endif
-                        }
-#endif /* NO_INTER4V */
-                    }
-                    else    /* HalfPel_Enabled ==0  */
-                    {
-#ifndef NO_INTER4V
-                        //if(sad16 < sad8-PREF_16_VEC)
-                        if (sad16 - PREF_16_VEC > sad8)
-                        {
-                            *mode_mb = MODE_INTER4V;
-                        }
-#endif
-                    }
-#if (ZERO_MV_PREF==2)   /* use mot_mb[7].sad as d0 computed in MBMotionSearch*/
-                    /******************************************************/
-                    if (mot_mb[7].sad - PREF_NULL_VEC < sad16 && mot_mb[7].sad - PREF_NULL_VEC < sad8)
-                    {
-                        mot_mb[0].sad = mot_mb[7].sad - PREF_NULL_VEC;
-                        mot_mb[0].x = mot_mb[0].y = 0;
-                        *mode_mb = MODE_INTER;
-                    }
-                    /******************************************************/
-#endif
-                    if (*mode_mb == MODE_INTER)
-                    {
-                        if (mot_mb[0].x == 0 && mot_mb[0].y == 0)   /* use zero vector */
-                            mot_mb[0].sad += PREF_NULL_VEC; /* add back the bias */
-
-                        mot_mb[1].sad = mot_mb[2].sad = mot_mb[3].sad = mot_mb[4].sad = (mot_mb[0].sad + 2) >> 2;
-                        mot_mb[1].x = mot_mb[2].x = mot_mb[3].x = mot_mb[4].x = mot_mb[0].x;
-                        mot_mb[1].y = mot_mb[2].y = mot_mb[3].y = mot_mb[4].y = mot_mb[0].y;
-
-                    }
-                }
-
-                /* find maximum magnitude */
-                /* compute average SAD for rate control, 11/28/00 */
-                if (*mode_mb == MODE_INTER)
-                {
-#ifdef PRINT_MV
-                    fp_debug = fopen("c:\\bitstream\\mv1_debug.txt", "a");
-                    fprintf(fp_debug, "%d MODE_INTER\n", mbnum);
-                    fclose(fp_debug);
-#endif
-                    totalSAD += mot_mb[0].sad;
-                    if (mot_mb[0].x > max_mag)
-                        max_mag = mot_mb[0].x;
-                    if (mot_mb[0].y > max_mag)
-                        max_mag = mot_mb[0].y;
-                    if (mot_mb[0].x < min_mag)
-                        min_mag = mot_mb[0].x;
-                    if (mot_mb[0].y < min_mag)
-                        min_mag = mot_mb[0].y;
-                }
-                else if (*mode_mb == MODE_INTER4V)
-                {
-#ifdef PRINT_MV
-                    fp_debug = fopen("c:\\bitstream\\mv1_debug.txt", "a");
-                    fprintf(fp_debug, "%d MODE_INTER4V\n", mbnum);
-                    fclose(fp_debug);
-#endif
-                    totalSAD += sad8;
-                    for (comp = 1; comp <= 4; comp++)
-                    {
-                        if (mot_mb[comp].x > max_mag)
-                            max_mag = mot_mb[comp].x;
-                        if (mot_mb[comp].y > max_mag)
-                            max_mag = mot_mb[comp].y;
-                        if (mot_mb[comp].x < min_mag)
-                            min_mag = mot_mb[comp].x;
-                        if (mot_mb[comp].y < min_mag)
-                            min_mag = mot_mb[comp].y;
-                    }
-                }
-                else    /* MODE_INTRA */
-                {
-#ifdef PRINT_MV
-                    fp_debug = fopen("c:\\bitstream\\mv1_debug.txt", "a");
-                    fprintf(fp_debug, "%d MODE_INTRA\n", mbnum);
-                    fclose(fp_debug);
-#endif
-                    totalSAD += mot_mb[0].sad;
-                }
-                mbnum += incr_i;
-                offset += (incr_i << 4);
-
-            }
-        }
-
-        if (incr_i > 1 && numLoop) /* scene change on and first loop */
-        {
-            //if(numIntra > ((totalMB>>3)<<1) + (totalMB>>3)) /* 75% of 50%MBs */
-            if (numIntra > (0.30*(totalMB / 2.0))) /* 15% of 50%MBs */
-            {
-                /******** scene change detected *******************/
-                currVop->predictionType = I_VOP;
-                M4VENC_MEMSET(Mode, MODE_INTRA, sizeof(UChar)*totalMB); /* set this for MB level coding*/
-                currVop->quantizer = video->encParams->InitQuantIvop[video->currLayer];
-
-                /* compute the SAV for rate control & fast DCT */
-                totalSAD = 0;
-                offset = 0;
-                mbnum = 0;
-                cur = currFrame->yChan;
-
-                for (j = 0; j < mbheight; j++)
-                {
-                    for (i = 0; i < mbwidth; i++)
-                    {
-                        video->mbnum = mbnum;
-                        mot_mb = mot[mbnum];
-
-
-                        (*ComputeMBSum)(cur + (i << 4), width, mot_mb);
-                        totalSAD += mot_mb[0].sad;
-
-                        mbnum++;
-                    }
-                    cur += (width << 4);
-                }
-
-                video->sumMAD = (float)totalSAD / (float)NumPixelMB;
-                ResetIntraUpdate(intraArray, totalMB);
-                /* video->numVopsInGOP=0; 3/13/01 move it to vop.c*/
-
-                return ;
-            }
-        }
-        /******** no scene change, continue motion search **********************/
-        start_i = 0;
-        type_pred++; /* second pass */
-    }
-
-    video->sumMAD = (float)totalSAD / (float)NumPixelMB;    /* avg SAD */
-
-    /* find f_code , 10/27/2000 */
-    f_code_p = 1;
-    while ((max_mag >> (4 + f_code_p)) > 0)
-        f_code_p++;
-
-    f_code_n = 1;
-    min_mag *= -1;
-    while ((min_mag - 1) >> (4 + f_code_n) > 0)
-        f_code_n++;
-
-    currVop->fcodeForward = (f_code_p > f_code_n ? f_code_p : f_code_n);
-
-#ifdef HTFM
-    /***** HYPOTHESIS TESTING ********/  /* 2/28/01 */
-    if (collect)
-    {
-        collect = 0;
-        UpdateHTFM(video, newvar, exp_lamda, &htfm_stat);
-    }
-    /*********************************/
-#endif
-
-    return ;
-}
-
-
-#ifdef HTFM
-void InitHTFM(VideoEncData *video, HTFM_Stat *htfm_stat, double *newvar, Int *collect)
-{
-    Int i;
-    Int lx = video->currVop->width; //  padding
-    Int lx2 = lx << 1;
-    Int lx3 = lx2 + lx;
-    Int rx = video->currVop->pitch;
-    Int rx2 = rx << 1;
-    Int rx3 = rx2 + rx;
-
-    Int *offset, *offset2;
-
-    /* 4/11/01, collect data every 30 frames, doesn't have to be base layer */
-    if (((Int)video->numVopsInGOP) % 30 == 1)
-    {
-
-        *collect = 1;
-
-        htfm_stat->countbreak = 0;
-        htfm_stat->abs_dif_mad_avg = 0;
-
-        for (i = 0; i < 16; i++)
-        {
-            newvar[i] = 0.0;
-        }
-//      video->functionPointer->SAD_MB_PADDING = &SAD_MB_PADDING_HTFM_Collect;
-        video->functionPointer->SAD_Macroblock = &SAD_MB_HTFM_Collect;
-        video->functionPointer->SAD_MB_HalfPel[0] = NULL;
-        video->functionPointer->SAD_MB_HalfPel[1] = &SAD_MB_HP_HTFM_Collectxh;
-        video->functionPointer->SAD_MB_HalfPel[2] = &SAD_MB_HP_HTFM_Collectyh;
-        video->functionPointer->SAD_MB_HalfPel[3] = &SAD_MB_HP_HTFM_Collectxhyh;
-        video->sad_extra_info = (void*)(htfm_stat);
-        offset = htfm_stat->offsetArray;
-        offset2 = htfm_stat->offsetRef;
-    }
-    else
-    {
-//      video->functionPointer->SAD_MB_PADDING = &SAD_MB_PADDING_HTFM;
-        video->functionPointer->SAD_Macroblock = &SAD_MB_HTFM;
-        video->functionPointer->SAD_MB_HalfPel[0] = NULL;
-        video->functionPointer->SAD_MB_HalfPel[1] = &SAD_MB_HP_HTFMxh;
-        video->functionPointer->SAD_MB_HalfPel[2] = &SAD_MB_HP_HTFMyh;
-        video->functionPointer->SAD_MB_HalfPel[3] = &SAD_MB_HP_HTFMxhyh;
-        video->sad_extra_info = (void*)(video->nrmlz_th);
-        offset = video->nrmlz_th + 16;
-        offset2 = video->nrmlz_th + 32;
-    }
-
-    offset[0] = 0;
-    offset[1] = lx2 + 2;
-    offset[2] = 2;
-    offset[3] = lx2;
-    offset[4] = lx + 1;
-    offset[5] = lx3 + 3;
-    offset[6] = lx + 3;
-    offset[7] = lx3 + 1;
-    offset[8] = lx;
-    offset[9] = lx3 + 2;
-    offset[10] = lx3 ;
-    offset[11] = lx + 2 ;
-    offset[12] = 1;
-    offset[13] = lx2 + 3;
-    offset[14] = lx2 + 1;
-    offset[15] = 3;
-
-    offset2[0] = 0;
-    offset2[1] = rx2 + 2;
-    offset2[2] = 2;
-    offset2[3] = rx2;
-    offset2[4] = rx + 1;
-    offset2[5] = rx3 + 3;
-    offset2[6] = rx + 3;
-    offset2[7] = rx3 + 1;
-    offset2[8] = rx;
-    offset2[9] = rx3 + 2;
-    offset2[10] = rx3 ;
-    offset2[11] = rx + 2 ;
-    offset2[12] = 1;
-    offset2[13] = rx2 + 3;
-    offset2[14] = rx2 + 1;
-    offset2[15] = 3;
-
-    return ;
-}
-
-void UpdateHTFM(VideoEncData *video, double *newvar, double *exp_lamda, HTFM_Stat *htfm_stat)
-{
-    if (htfm_stat->countbreak == 0)
-        htfm_stat->countbreak = 1;
-
-    newvar[0] = (double)(htfm_stat->abs_dif_mad_avg) / (htfm_stat->countbreak * 16.);
-
-    if (newvar[0] < 0.001)
-    {
-        newvar[0] = 0.001; /* to prevent floating overflow */
-    }
-    exp_lamda[0] =  1 / (newvar[0] * 1.4142136);
-    exp_lamda[1] = exp_lamda[0] * 1.5825;
-    exp_lamda[2] = exp_lamda[0] * 2.1750;
-    exp_lamda[3] = exp_lamda[0] * 3.5065;
-    exp_lamda[4] = exp_lamda[0] * 3.1436;
-    exp_lamda[5] = exp_lamda[0] * 3.5315;
-    exp_lamda[6] = exp_lamda[0] * 3.7449;
-    exp_lamda[7] = exp_lamda[0] * 4.5854;
-    exp_lamda[8] = exp_lamda[0] * 4.6191;
-    exp_lamda[9] = exp_lamda[0] * 5.4041;
-    exp_lamda[10] = exp_lamda[0] * 6.5974;
-    exp_lamda[11] = exp_lamda[0] * 10.5341;
-    exp_lamda[12] = exp_lamda[0] * 10.0719;
-    exp_lamda[13] = exp_lamda[0] * 12.0516;
-    exp_lamda[14] = exp_lamda[0] * 15.4552;
-
-    CalcThreshold(HTFM_Pf, exp_lamda, video->nrmlz_th);
-    return ;
-}
-
-
-void CalcThreshold(double pf, double exp_lamda[], Int nrmlz_th[])
-{
-    Int i;
-    double temp[15];
-    //  printf("\nLamda: ");
-
-    /* parametric PREMODELling */
-    for (i = 0; i < 15; i++)
-    {
-        //    printf("%g ",exp_lamda[i]);
-        if (pf < 0.5)
-            temp[i] = 1 / exp_lamda[i] * M4VENC_LOG(2 * pf);
-        else
-            temp[i] = -1 / exp_lamda[i] * M4VENC_LOG(2 * (1 - pf));
-    }
-
-    nrmlz_th[15] = 0;
-    for (i = 0; i < 15; i++)        /* scale upto no.pixels */
-        nrmlz_th[i] = (Int)(temp[i] * ((i + 1) << 4) + 0.5);
-
-    return ;
-}
-
-void    HTFMPrepareCurMB(VideoEncData *video, HTFM_Stat *htfm_stat, UChar *cur)
-{
-    void* tmp = (void*)(video->currYMB);
-    ULong *htfmMB = (ULong*)tmp;
-    UChar *ptr, byte;
-    Int *offset;
-    Int i;
-    ULong word;
-    Int width = video->currVop->width;
-
-    if (((Int)video->numVopsInGOP) % 30 == 1)
-    {
-        offset = htfm_stat->offsetArray;
-    }
-    else
-    {
-        offset = video->nrmlz_th + 16;
-    }
-
-    for (i = 0; i < 16; i++)
-    {
-        ptr = cur + offset[i];
-        word = ptr[0];
-        byte = ptr[4];
-        word |= (byte << 8);
-        byte = ptr[8];
-        word |= (byte << 16);
-        byte = ptr[12];
-        word |= (byte << 24);
-        *htfmMB++ = word;
-
-        word = *(ptr += (width << 2));
-        byte = ptr[4];
-        word |= (byte << 8);
-        byte = ptr[8];
-        word |= (byte << 16);
-        byte = ptr[12];
-        word |= (byte << 24);
-        *htfmMB++ = word;
-
-        word = *(ptr += (width << 2));
-        byte = ptr[4];
-        word |= (byte << 8);
-        byte = ptr[8];
-        word |= (byte << 16);
-        byte = ptr[12];
-        word |= (byte << 24);
-        *htfmMB++ = word;
-
-        word = *(ptr += (width << 2));
-        byte = ptr[4];
-        word |= (byte << 8);
-        byte = ptr[8];
-        word |= (byte << 16);
-        byte = ptr[12];
-        word |= (byte << 24);
-        *htfmMB++ = word;
-    }
-
-    return ;
-}
-
-
-#endif
-
-void    PrepareCurMB(VideoEncData *video, UChar *cur)
-{
-    void* tmp = (void*)(video->currYMB);
-    ULong *currYMB = (ULong*)tmp;
-    Int i;
-    Int width = video->currVop->width;
-
-    cur -= width;
-
-    for (i = 0; i < 16; i++)
-    {
-        *currYMB++ = *((ULong*)(cur += width));
-        *currYMB++ = *((ULong*)(cur + 4));
-        *currYMB++ = *((ULong*)(cur + 8));
-        *currYMB++ = *((ULong*)(cur + 12));
-    }
-
-    return ;
-}
-
-
-/*==================================================================
-    Function:   MBMotionSearch
-    Date:       09/06/2000
-    Purpose:    Perform motion estimation for a macroblock.
-                Find 1MV and 4MVs in half-pels resolutions.
-                Using ST1 algorithm provided by Chalidabhongse and Kuo
-                CSVT March'98.
-
-==================================================================*/
-
-void MBMotionSearch(VideoEncData *video, UChar *cur, UChar *best_cand[],
-                    Int i0, Int j0, Int type_pred, Int FS_en, Int *hp_guess)
-{
-    Vol *currVol = video->vol[video->currLayer];
-    UChar *ref, *cand, *ncand = NULL, *cur8;
-    void *extra_info = video->sad_extra_info;
-    Int mbnum = video->mbnum;
-    Int width = video->currVop->width; /* 6/12/01, must be multiple of 16 */
-    Int height = video->currVop->height;
-    MOT **mot = video->mot;
-    UChar use_4mv = video->encParams->MV8x8_Enabled;
-    UChar h263_mode = video->encParams->H263_Enabled;
-    Int(*SAD_Macroblock)(UChar*, UChar*, Int, void*) = video->functionPointer->SAD_Macroblock;
-    Int(*SAD_Block)(UChar*, UChar*, Int, Int, void*) = video->functionPointer->SAD_Block;
-    VideoEncParams *encParams = video->encParams;
-    Int range = encParams->SearchRange;
-
-    Int lx = video->currVop->pitch; /* padding */
-    Int comp;
-    Int i, j, imin, jmin, ilow, ihigh, jlow, jhigh, iorg, jorg;
-    Int d, dmin, dn[9];
-#if (ZERO_MV_PREF==1)   /* compute (0,0) MV at the end */
-    Int d0;
-#endif
-    Int k;
-    Int mvx[5], mvy[5], imin0, jmin0;
-    Int num_can, center_again;
-    Int last_loc, new_loc = 0;
-    Int step, max_step = range >> 1;
-    Int next;
-
-    ref = video->forwardRefVop->yChan; /* origin of actual frame */
-
-    cur = video->currYMB; /* use smaller memory space for current MB */
-
-    /*  find limit of the search (adjusting search range)*/
-
-    if (!h263_mode)
-    {
-        ilow = i0 - range;
-        if (ilow < -15)
-            ilow = -15;
-        ihigh = i0 + range - 1;
-        if (ihigh > width - 1)
-            ihigh = width - 1;
-        jlow = j0 - range;
-        if (jlow < -15)
-            jlow = -15;
-        jhigh = j0 + range - 1;
-        if (jhigh > height - 1)
-            jhigh = height - 1;
-    }
-    else
-    {
-        ilow = i0 - range;
-        if (ilow < 0)
-            ilow = 0;
-        ihigh = i0 + range - 1;
-        if (ihigh > width - 16)
-            ihigh = width - 16;
-        jlow = j0 - range;
-        if (jlow < 0)
-            jlow = 0;
-        jhigh = j0 + range - 1;
-        if (jhigh > height - 16)
-            jhigh = height - 16;
-    }
-
-    imin = i0;
-    jmin = j0; /* needed for fullsearch */
-    ncand = ref + imin + jmin * lx;
-
-    /* for first row of MB, fullsearch can be used */
-    if (FS_en)
-    {
-        *hp_guess = 0; /* no guess for fast half-pel */
-
-        dmin =  fullsearch(video, currVol, ref, cur, &imin, &jmin, ilow, ihigh, jlow, jhigh);
-
-        ncand = ref + imin + jmin * lx;
-
-        mot[mbnum][0].sad = dmin;
-        mot[mbnum][0].x = (imin - i0) << 1;
-        mot[mbnum][0].y = (jmin - j0) << 1;
-        imin0 = imin << 1;  /* 16x16 MV in half-pel resolution */
-        jmin0 = jmin << 1;
-        best_cand[0] = ncand;
-    }
-    else
-    {   /* 4/7/01, modified this testing for fullsearch the top row to only upto (0,3) MB */
-        /*            upto 30% complexity saving with the same complexity */
-        if (video->forwardRefVop->predictionType == I_VOP && j0 == 0 && i0 <= 64 && type_pred != 1)
-        {
-            *hp_guess = 0; /* no guess for fast half-pel */
-            dmin =  fullsearch(video, currVol, ref, cur, &imin, &jmin, ilow, ihigh, jlow, jhigh);
-            ncand = ref + imin + jmin * lx;
-        }
-        else
-        {
-            /************** initialize candidate **************************/
-            /* find initial motion vector */
-            CandidateSelection(mvx, mvy, &num_can, i0 >> 4, j0 >> 4, video, type_pred);
-
-            dmin = 65535;
-
-            /* check if all are equal */
-            if (num_can == ALL_CAND_EQUAL)
-            {
-                i = i0 + mvx[0];
-                j = j0 + mvy[0];
-
-                if (i >= ilow && i <= ihigh && j >= jlow && j <= jhigh)
-                {
-                    cand = ref + i + j * lx;
-
-                    d = (*SAD_Macroblock)(cand, cur, (dmin << 16) | lx, extra_info);
-
-                    if (d < dmin)
-                    {
-                        dmin = d;
-                        imin = i;
-                        jmin = j;
-                        ncand = cand;
-                    }
-                }
-            }
-            else
-            {
-                /************** evaluate unique candidates **********************/
-                for (k = 0; k < num_can; k++)
-                {
-                    i = i0 + mvx[k];
-                    j = j0 + mvy[k];
-
-                    if (i >= ilow && i <= ihigh && j >= jlow && j <= jhigh)
-                    {
-                        cand = ref + i + j * lx;
-                        d = (*SAD_Macroblock)(cand, cur, (dmin << 16) | lx, extra_info);
-
-                        if (d < dmin)
-                        {
-                            dmin = d;
-                            imin = i;
-                            jmin = j;
-                            ncand = cand;
-                        }
-                        else if ((d == dmin) && PV_ABS(mvx[k]) + PV_ABS(mvy[k]) < PV_ABS(i0 - imin) + PV_ABS(j0 - jmin))
-                        {
-                            dmin = d;
-                            imin = i;
-                            jmin = j;
-                            ncand = cand;
-                        }
-                    }
-                }
-            }
-            if (num_can == 0 || dmin == 65535) /* no candidate selected */
-            {
-                ncand = ref + i0 + j0 * lx; /* use (0,0) MV as initial value */
-                mot[mbnum][7].sad = dmin = (*SAD_Macroblock)(ncand, cur, (65535 << 16) | lx, extra_info);
-#if (ZERO_MV_PREF==1)   /* compute (0,0) MV at the end */
-                d0 = dmin;
-#endif
-                imin = i0;
-                jmin = j0;
-            }
-
-#if (ZERO_MV_PREF==0)  /*  COMPUTE ZERO VECTOR FIRST !!!!!*/
-            dmin -= PREF_NULL_VEC;
-#endif
-
-            /******************* local refinement ***************************/
-            center_again = 0;
-            last_loc = new_loc = 0;
-            //          ncand = ref + jmin*lx + imin;  /* center of the search */
-            step = 0;
-            dn[0] = dmin;
-            while (!center_again && step <= max_step)
-            {
-
-                MoveNeighborSAD(dn, last_loc);
-
-                center_again = 1;
-                i = imin;
-                j = jmin - 1;
-                cand = ref + i + j * lx;
-
-                /*  starting from [0,-1] */
-                /* spiral check one step at a time*/
-                for (k = 2; k <= 8; k += 2)
-                {
-                    if (!tab_exclude[last_loc][k]) /* exclude last step computation */
-                    {       /* not already computed */
-                        if (i >= ilow && i <= ihigh && j >= jlow && j <= jhigh)
-                        {
-                            d = (*SAD_Macroblock)(cand, cur, (dmin << 16) | lx, extra_info);
-                            dn[k] = d; /* keep it for half pel use */
-
-                            if (d < dmin)
-                            {
-                                ncand = cand;
-                                dmin = d;
-                                imin = i;
-                                jmin = j;
-                                center_again = 0;
-                                new_loc = k;
-                            }
-                            else if ((d == dmin) && PV_ABS(i0 - i) + PV_ABS(j0 - j) < PV_ABS(i0 - imin) + PV_ABS(j0 - jmin))
-                            {
-                                ncand = cand;
-                                imin = i;
-                                jmin = j;
-                                center_again = 0;
-                                new_loc = k;
-                            }
-                        }
-                    }
-                    if (k == 8)  /* end side search*/
-                    {
-                        if (!center_again)
-                        {
-                            k = -1; /* start diagonal search */
-                            cand -= lx;
-                            j--;
-                        }
-                    }
-                    else
-                    {
-                        next = refine_next[k][0];
-                        i += next;
-                        cand += next;
-                        next = refine_next[k][1];
-                        j += next;
-                        cand += lx * next;
-                    }
-                }
-                last_loc = new_loc;
-                step ++;
-            }
-            if (!center_again)
-                MoveNeighborSAD(dn, last_loc);
-
-            *hp_guess = FindMin(dn);
-
-        }
-
-#if (ZERO_MV_PREF==1)   /* compute (0,0) MV at the end */
-        if (d0 - PREF_NULL_VEC < dmin)
-        {
-            ncand = ref + i0 + j0 * lx;
-            dmin = d0;
-            imin = i0;
-            jmin = j0;
-        }
-#endif
-        mot[mbnum][0].sad = dmin;
-        mot[mbnum][0].x = (imin - i0) << 1;
-        mot[mbnum][0].y = (jmin - j0) << 1;
-        imin0 = imin << 1;  /* 16x16 MV in half-pel resolution */
-        jmin0 = jmin << 1;
-        best_cand[0] = ncand;
-    }
-    /* imin and jmin is the best 1 MV */
-#ifndef NO_INTER4V
-    /*******************  Find 4 motion vectors ****************************/
-    if (use_4mv && !h263_mode)
-    {
-#ifdef _SAD_STAT
-        num_Blk += 4;
-#endif
-        /* starting from the best 1MV */
-        //offset = imin + jmin*lx;
-        iorg = i0;
-        jorg = j0;
-
-        for (comp = 0; comp < 4; comp++)
-        {
-            i0 = iorg + ((comp & 1) << 3);
-            j0 = jorg + ((comp & 2) << 2);
-
-            imin = (imin0 >> 1) + ((comp & 1) << 3);    /* starting point from 16x16 MV */
-            jmin = (jmin0 >> 1) + ((comp & 2) << 2);
-            ncand = ref + imin + jmin * lx;
-
-            cur8 = cur + ((comp & 1) << 3) + (((comp & 2) << 2) << 4) ; /* 11/30/05, smaller cache */
-
-            /*  find limit of the search (adjusting search range)*/
-            ilow = i0 - range;
-            ihigh = i0 + range - 1 ;/* 4/9/01 */
-            if (ilow < -15)
-                ilow = -15;
-            if (ihigh > width - 1)
-                ihigh = width - 1;
-            jlow = j0 - range;
-            jhigh = j0 + range - 1 ;/* 4/9/01 */
-            if (jlow < -15)
-                jlow = -15;
-            if (jhigh > height - 1)
-                jhigh = height - 1;
-
-            SAD_Block = video->functionPointer->SAD_Block;
-
-            if (FS_en)  /* fullsearch enable, center around 16x16 MV */
-            {
-                dmin =  fullsearchBlk(video, currVol, ncand, cur8, &imin, &jmin, ilow, ihigh, jlow, jhigh, range);
-                ncand = ref + imin + jmin * lx;
-
-                mot[mbnum][comp+1].sad = dmin;
-                mot[mbnum][comp+1].x = (imin - i0) << 1;
-                mot[mbnum][comp+1].y = (jmin - j0) << 1;
-                best_cand[comp+1] = ncand;
-            }
-            else    /* no fullsearch, do local search */
-            {
-                /* starting point from 16x16 */
-                dmin = (*SAD_Block)(ncand, cur8, 65536, lx, extra_info);
-
-                /******************* local refinement ***************************/
-                center_again = 0;
-                last_loc = 0;
-
-                while (!center_again)
-                {
-                    center_again = 1;
-                    i = imin;
-                    j = jmin - 1;
-                    cand = ref + i + j * lx;
-
-                    /*  starting from [0,-1] */
-                    /* spiral check one step at a time*/
-                    for (k = 2; k <= 8; k += 2)
-                    {
-                        if (!tab_exclude[last_loc][k]) /* exclude last step computation */
-                        {       /* not already computed */
-                            if (i >= ilow && i <= ihigh && j >= jlow && j <= jhigh)
-                            {
-                                d = (*SAD_Block)(cand, cur8, dmin, lx, extra_info);
-
-                                if (d < dmin)
-                                {
-                                    ncand = cand;
-                                    dmin = d;
-                                    imin = i;
-                                    jmin = j;
-                                    center_again = 0;
-                                    new_loc = k;
-                                }
-                                else if ((d == dmin) &&
-                                         PV_ABS(i0 - i) + PV_ABS(j0 - j) < PV_ABS(i0 - imin) + PV_ABS(j0 - jmin))
-                                {
-                                    ncand = cand;
-                                    imin = i;
-                                    jmin = j;
-                                    center_again = 0;
-                                    new_loc = k;
-                                }
-                            }
-                        }
-                        if (k == 8)  /* end side search*/
-                        {
-                            if (!center_again)
-                            {
-                                k = -1; /* start diagonal search */
-                                if (j <= height - 1 && j > 0)   cand -= lx;
-                                j--;
-                            }
-                        }
-                        else
-                        {
-                            next = refine_next[k][0];
-                            cand += next;
-                            i += next;
-                            next = refine_next[k][1];
-                            cand += lx * next;
-                            j += next;
-                        }
-                    }
-                    last_loc = new_loc;
-                }
-                mot[mbnum][comp+1].sad = dmin;
-                mot[mbnum][comp+1].x = (imin - i0) << 1;
-                mot[mbnum][comp+1].y = (jmin - j0) << 1;
-                best_cand[comp+1] = ncand;
-            }
-            /********************************************/
-        }
-    }
-    else
-#endif  /* NO_INTER4V */
-    {
-        mot[mbnum][1].sad = mot[mbnum][2].sad = mot[mbnum][3].sad = mot[mbnum][4].sad = (dmin + 2) >> 2;
-        mot[mbnum][1].x = mot[mbnum][2].x = mot[mbnum][3].x = mot[mbnum][4].x = mot[mbnum][0].x;
-        mot[mbnum][1].y = mot[mbnum][2].y = mot[mbnum][3].y = mot[mbnum][4].y = mot[mbnum][0].y;
-        best_cand[1] = best_cand[2] = best_cand[3] = best_cand[4] = ncand;
-
-    }
-    return ;
-}
-
-
-/*===============================================================================
-    Function:   fullsearch
-    Date:       09/16/2000
-    Purpose:    Perform full-search motion estimation over the range of search
-                region in a spiral-outward manner.
-    Input/Output:   VideoEncData, current Vol, previou Vop, pointer to the left corner of
-                current VOP, current coord (also output), boundaries.
-===============================================================================*/
-
-Int fullsearch(VideoEncData *video, Vol *currVol, UChar *prev, UChar *cur,
-               Int *imin, Int *jmin, Int ilow, Int ihigh, Int jlow, Int jhigh)
-{
-    Int range = video->encParams->SearchRange;
-    UChar *cand;
-    Int i, j, k, l;
-    Int d, dmin;
-    Int i0 = *imin; /* current position */
-    Int j0 = *jmin;
-    Int(*SAD_Macroblock)(UChar*, UChar*, Int, void*) = video->functionPointer->SAD_Macroblock;
-    void *extra_info = video->sad_extra_info;
-//  UChar h263_mode = video->encParams->H263_Enabled;
-    Int lx = video->currVop->pitch; /* with padding */
-
-    Int offset = i0 + j0 * lx;
-
-    OSCL_UNUSED_ARG(currVol);
-
-    cand = prev + offset;
-
-    dmin  = (*SAD_Macroblock)(cand, cur, (65535 << 16) | lx, (void*)extra_info) - PREF_NULL_VEC;
-
-    /* perform spiral search */
-    for (k = 1; k <= range; k++)
-    {
-
-        i = i0 - k;
-        j = j0 - k;
-
-        cand = prev + i + j * lx;
-
-        for (l = 0; l < 8*k; l++)
-        {
-            /* no need for boundary checking again */
-            if (i >= ilow && i <= ihigh && j >= jlow && j <= jhigh)
-            {
-                d = (*SAD_Macroblock)(cand, cur, (dmin << 16) | lx, (void*)extra_info);
-
-                if (d < dmin)
-                {
-                    dmin = d;
-                    *imin = i;
-                    *jmin = j;
-                }
-                else if ((d == dmin) && PV_ABS(i0 - i) + PV_ABS(j0 - j) < PV_ABS(i0 - *imin) + PV_ABS(j0 - *jmin))
-                {
-                    dmin = d;
-                    *imin = i;
-                    *jmin = j;
-                }
-            }
-
-            if (l < (k << 1))
-            {
-                i++;
-                cand++;
-            }
-            else if (l < (k << 2))
-            {
-                j++;
-                cand += lx;
-            }
-            else if (l < ((k << 2) + (k << 1)))
-            {
-                i--;
-                cand--;
-            }
-            else
-            {
-                j--;
-                cand -= lx;
-            }
-        }
-    }
-
-    return dmin;
-}
-
-#ifndef NO_INTER4V
-/*===============================================================================
-    Function:   fullsearchBlk
-    Date:       01/9/2001
-    Purpose:    Perform full-search motion estimation of an 8x8 block over the range
-                of search region in a spiral-outward manner centered at the 16x16 MV.
-    Input/Output:   VideoEncData, MB coordinate, pointer to the initial MV on the
-                reference, pointer to coor of current block, search range.
-===============================================================================*/
-Int fullsearchBlk(VideoEncData *video, Vol *currVol, UChar *cent, UChar *cur,
-                  Int *imin, Int *jmin, Int ilow, Int ihigh, Int jlow, Int jhigh, Int range)
-{
-    UChar *cand, *ref;
-    Int i, j, k, l, istart, jstart;
-    Int d, dmin;
-    Int lx = video->currVop->pitch; /* with padding */
-    Int(*SAD_Block)(UChar*, UChar*, Int, Int, void*) = video->functionPointer->SAD_Block;
-    void *extra_info = video->sad_extra_info;
-
-    OSCL_UNUSED_ARG(currVol);
-
-    /* starting point centered at 16x16 MV */
-    ref = cent;
-    istart = *imin;
-    jstart = *jmin;
-
-    dmin = (*SAD_Block)(ref, cur, 65536, lx, (void*)extra_info);
-
-    cand = ref;
-    /* perform spiral search */
-    for (k = 1; k <= range; k++)
-    {
-
-        i = istart - k;
-        j = jstart - k;
-        cand -= (lx + 1);  /* candidate region */
-
-        for (l = 0; l < 8*k; l++)
-        {
-            /* no need for boundary checking again */
-            if (i >= ilow && i <= ihigh && j >= jlow && j <= jhigh)
-            {
-                d = (*SAD_Block)(cand, cur, dmin, lx, (void*)extra_info);
-
-                if (d < dmin)
-                {
-                    dmin = d;
-                    *imin = i;
-                    *jmin = j;
-                }
-                else if ((d == dmin) &&
-                         PV_ABS(istart - i) + PV_ABS(jstart - j) < PV_ABS(istart - *imin) + PV_ABS(jstart - *jmin))
-                {
-                    dmin = d;
-                    *imin = i;
-                    *jmin = j;
-                }
-            }
-
-            if (l < (k << 1))
-            {
-                i++;
-                cand++;
-            }
-            else if (l < (k << 2))
-            {
-                j++;
-                cand += lx;
-            }
-            else if (l < ((k << 2) + (k << 1)))
-            {
-                i--;
-                cand--;
-            }
-            else
-            {
-                j--;
-                cand -= lx;
-            }
-        }
-    }
-
-    return dmin;
-}
-#endif /* NO_INTER4V */
-
-/*===============================================================================
-    Function:   CandidateSelection
-    Date:       09/16/2000
-    Purpose:    Fill up the list of candidate using spatio-temporal correlation
-                among neighboring blocks.
-    Input/Output:   type_pred = 0: first pass, 1: second pass, or no SCD
-    Modified:    09/23/01, get rid of redundant candidates before passing back.
-===============================================================================*/
-
-void CandidateSelection(Int *mvx, Int *mvy, Int *num_can, Int imb, Int jmb,
-                        VideoEncData *video, Int type_pred)
-{
-    MOT **mot = video->mot;
-    MOT *pmot;
-    Int mbnum = video->mbnum;
-    Vol *currVol = video->vol[video->currLayer];
-    Int mbwidth = currVol->nMBPerRow;
-    Int mbheight = currVol->nMBPerCol;
-    Int i, j, same, num1;
-
-    *num_can = 0;
-
-    if (video->forwardRefVop->predictionType == P_VOP)
-    {
-        /* Spatio-Temporal Candidate (five candidates) */
-        if (type_pred == 0) /* first pass */
-        {
-            pmot = &mot[mbnum][0]; /* same coordinate previous frame */
-            mvx[(*num_can)] = (pmot->x) >> 1;
-            mvy[(*num_can)++] = (pmot->y) >> 1;
-            if (imb >= (mbwidth >> 1) && imb > 0)  /*left neighbor previous frame */
-            {
-                pmot = &mot[mbnum-1][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-            }
-            else if (imb + 1 < mbwidth)   /*right neighbor previous frame */
-            {
-                pmot = &mot[mbnum+1][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-            }
-
-            if (jmb < mbheight - 1)  /*bottom neighbor previous frame */
-            {
-                pmot = &mot[mbnum+mbwidth][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-            }
-            else if (jmb > 0)   /*upper neighbor previous frame */
-            {
-                pmot = &mot[mbnum-mbwidth][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-            }
-
-            if (imb > 0 && jmb > 0)  /* upper-left neighbor current frame*/
-            {
-                pmot = &mot[mbnum-mbwidth-1][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-            }
-            if (jmb > 0 && imb < mbheight - 1)  /* upper right neighbor current frame*/
-            {
-                pmot = &mot[mbnum-mbwidth+1][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-            }
-        }
-        else    /* second pass */
-            /* original ST1 algorithm */
-        {
-            pmot = &mot[mbnum][0]; /* same coordinate previous frame */
-            mvx[(*num_can)] = (pmot->x) >> 1;
-            mvy[(*num_can)++] = (pmot->y) >> 1;
-
-            if (imb > 0)  /*left neighbor current frame */
-            {
-                pmot = &mot[mbnum-1][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-            }
-            if (jmb > 0)  /*upper neighbor current frame */
-            {
-                pmot = &mot[mbnum-mbwidth][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-            }
-            if (imb < mbwidth - 1)  /*right neighbor previous frame */
-            {
-                pmot = &mot[mbnum+1][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-            }
-            if (jmb < mbheight - 1)  /*bottom neighbor previous frame */
-            {
-                pmot = &mot[mbnum+mbwidth][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-            }
-        }
-    }
-    else  /* only Spatial Candidate (four candidates)*/
-    {
-        if (type_pred == 0) /*first pass*/
-        {
-            if (imb > 1)  /* neighbor two blocks away to the left */
-            {
-                pmot = &mot[mbnum-2][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-            }
-            if (imb > 0 && jmb > 0)  /* upper-left neighbor */
-            {
-                pmot = &mot[mbnum-mbwidth-1][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-            }
-            if (jmb > 0 && imb < mbheight - 1)  /* upper right neighbor */
-            {
-                pmot = &mot[mbnum-mbwidth+1][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-            }
-        }
-//#ifdef SCENE_CHANGE_DETECTION
-        /* second pass (ST2 algorithm)*/
-        else if (type_pred == 1) /* 4/7/01 */
-        {
-            if (imb > 0)  /*left neighbor current frame */
-            {
-                pmot = &mot[mbnum-1][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-            }
-            if (jmb > 0)  /*upper neighbor current frame */
-            {
-                pmot = &mot[mbnum-mbwidth][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-            }
-            if (imb < mbwidth - 1)  /*right neighbor current frame */
-            {
-                pmot = &mot[mbnum+1][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-            }
-            if (jmb < mbheight - 1)  /*bottom neighbor current frame */
-            {
-                pmot = &mot[mbnum+mbwidth][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-            }
-        }
-//#else
-        else /* original ST1 algorithm */
-        {
-            if (imb > 0)  /*left neighbor current frame */
-            {
-                pmot = &mot[mbnum-1][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-
-                if (jmb > 0)  /*upper-left neighbor current frame */
-                {
-                    pmot = &mot[mbnum-mbwidth-1][0];
-                    mvx[(*num_can)] = (pmot->x) >> 1;
-                    mvy[(*num_can)++] = (pmot->y) >> 1;
-                }
-
-            }
-            if (jmb > 0)  /*upper neighbor current frame */
-            {
-                pmot = &mot[mbnum-mbwidth][0];
-                mvx[(*num_can)] = (pmot->x) >> 1;
-                mvy[(*num_can)++] = (pmot->y) >> 1;
-
-                if (imb < mbheight - 1)  /*upper-right neighbor current frame */
-                {
-                    pmot = &mot[mbnum-mbwidth+1][0];
-                    mvx[(*num_can)] = (pmot->x) >> 1;
-                    mvy[(*num_can)++] = (pmot->y) >> 1;
-                }
-            }
-        }
-//#endif
-    }
-
-    /* 3/23/01, remove redundant candidate (possible k-mean) */
-    num1 = *num_can;
-    *num_can = 1;
-    for (i = 1; i < num1; i++)
-    {
-        same = 0;
-        j = 0;
-        while (!same && j < *num_can)
-        {
-#if (CANDIDATE_DISTANCE==0)
-            if (mvx[i] == mvx[j] && mvy[i] == mvy[j])
-#else
-            // modified k-mean, 3/24/01, shouldn't be greater than 3
-            if (PV_ABS(mvx[i] - mvx[j]) + PV_ABS(mvy[i] - mvy[j]) < CANDIDATE_DISTANCE)
-#endif
-                same = 1;
-            j++;
-        }
-        if (!same)
-        {
-            mvx[*num_can] = mvx[i];
-            mvy[*num_can] = mvy[i];
-            (*num_can)++;
-        }
-    }
-
-#ifdef _SAD_STAT
-    num_cand += (*num_can);
-#endif
-
-    if (num1 == 5 && *num_can == 1)
-        *num_can = ALL_CAND_EQUAL; /* all are equal */
-
-    return ;
-}
-
-/*===========================================================================
-    Function:   RasterIntraUpdate
-    Date:       2/26/01
-    Purpose:    To raster-scan assign INTRA-update .
-                N macroblocks are updated (also was programmable).
-===========================================================================*/
-void RasterIntraUpdate(UChar *intraArray, UChar *Mode, Int totalMB, Int numRefresh)
-{
-    Int indx, i;
-
-    /* find the last refresh MB */
-    indx = 0;
-    while (intraArray[indx] == 1 && indx < totalMB)
-        indx++;
-
-    /* add more  */
-    for (i = 0; i < numRefresh && indx < totalMB; i++)
-    {
-        Mode[indx] = MODE_INTRA;
-        intraArray[indx++] = 1;
-    }
-
-    /* if read the end of frame, reset and loop around */
-    if (indx >= totalMB - 1)
-    {
-        ResetIntraUpdate(intraArray, totalMB);
-        indx = 0;
-        while (i < numRefresh && indx < totalMB)
-        {
-            intraArray[indx] = 1;
-            Mode[indx++] = MODE_INTRA;
-            i++;
-        }
-    }
-
-    return ;
-}
-
-/*===========================================================================
-    Function:   ResetIntraUpdate
-    Date:       11/28/00
-    Purpose:    Reset already intra updated flags to all zero
-===========================================================================*/
-
-void ResetIntraUpdate(UChar *intraArray, Int totalMB)
-{
-    M4VENC_MEMSET(intraArray, 0, sizeof(UChar)*totalMB);
-    return ;
-}
-
-/*===========================================================================
-    Function:   ResetIntraUpdateRegion
-    Date:       12/1/00
-    Purpose:    Reset already intra updated flags in one region to all zero
-===========================================================================*/
-void ResetIntraUpdateRegion(UChar *intraArray, Int start_i, Int rwidth,
-                            Int start_j, Int rheight, Int mbwidth, Int mbheight)
-{
-    Int indx, j;
-
-    if (start_i + rwidth >= mbwidth)
-        rwidth = mbwidth - start_i;
-    if (start_j + rheight >= mbheight)
-        rheight = mbheight - start_j;
-
-    for (j = start_j; j < start_j + rheight; j++)
-    {
-        indx = j * mbwidth;
-        M4VENC_MEMSET(intraArray + indx + start_i, 0, sizeof(UChar)*rwidth);
-    }
-
-    return ;
-}
-
-/*************************************************************
-    Function:   MoveNeighborSAD
-    Date:       3/27/01
-    Purpose:    Move neighboring SAD around when center has shifted
-*************************************************************/
-
-void MoveNeighborSAD(Int dn[], Int new_loc)
-{
-    Int tmp[9];
-    tmp[0] = dn[0];
-    tmp[1] = dn[1];
-    tmp[2] = dn[2];
-    tmp[3] = dn[3];
-    tmp[4] = dn[4];
-    tmp[5] = dn[5];
-    tmp[6] = dn[6];
-    tmp[7] = dn[7];
-    tmp[8] = dn[8];
-    dn[0] = dn[1] = dn[2] = dn[3] = dn[4] = dn[5] = dn[6] = dn[7] = dn[8] = 65536;
-
-    switch (new_loc)
-    {
-        case 0:
-            break;
-        case 1:
-            dn[4] = tmp[2];
-            dn[5] = tmp[0];
-            dn[6] = tmp[8];
-            break;
-        case 2:
-            dn[4] = tmp[3];
-            dn[5] = tmp[4];
-            dn[6] = tmp[0];
-            dn[7] = tmp[8];
-            dn[8] = tmp[1];
-            break;
-        case 3:
-            dn[6] = tmp[4];
-            dn[7] = tmp[0];
-            dn[8] = tmp[2];
-            break;
-        case 4:
-            dn[1] = tmp[2];
-            dn[2] = tmp[3];
-            dn[6] = tmp[5];
-            dn[7] = tmp[6];
-            dn[8] = tmp[0];
-            break;
-        case 5:
-            dn[1] = tmp[0];
-            dn[2] = tmp[4];
-            dn[8] = tmp[6];
-            break;
-        case 6:
-            dn[1] = tmp[8];
-            dn[2] = tmp[0];
-            dn[3] = tmp[4];
-            dn[4] = tmp[5];
-            dn[8] = tmp[7];
-            break;
-        case 7:
-            dn[2] = tmp[8];
-            dn[3] = tmp[0];
-            dn[4] = tmp[6];
-            break;
-        case 8:
-            dn[2] = tmp[1];
-            dn[3] = tmp[2];
-            dn[4] = tmp[0];
-            dn[5] = tmp[6];
-            dn[6] = tmp[7];
-            break;
-    }
-    dn[0] = tmp[new_loc];
-
-    return ;
-}
-
-/* 3/28/01, find minimal of dn[9] */
-
-Int FindMin(Int dn[])
-{
-    Int min, i;
-    Int dmin;
-
-    dmin = dn[1];
-    min = 1;
-    for (i = 2; i < 9; i++)
-    {
-        if (dn[i] < dmin)
-        {
-            dmin = dn[i];
-            min = i;
-        }
-    }
-
-    return min;
-}
-
-
-
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/mp4enc_api.cpp b/media/libstagefright/codecs/m4v_h263/enc/src/mp4enc_api.cpp
deleted file mode 100644
index 7ab8f45..0000000
--- a/media/libstagefright/codecs/m4v_h263/enc/src/mp4enc_api.cpp
+++ /dev/null
@@ -1,3307 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-
-#include "mp4enc_lib.h"
-#include "bitstream_io.h"
-#include "rate_control.h"
-#include "m4venc_oscl.h"
-
-#ifndef INT32_MAX
-#define INT32_MAX 0x7fffffff
-#endif
-
-#ifndef SIZE_MAX
-#define SIZE_MAX ((size_t) -1)
-#endif
-
-/* Inverse normal zigzag */
-const static Int zigzag_i[NCOEFF_BLOCK] =
-{
-    0, 1, 8, 16, 9, 2, 3, 10,
-    17, 24, 32, 25, 18, 11, 4, 5,
-    12, 19, 26, 33, 40, 48, 41, 34,
-    27, 20, 13, 6, 7, 14, 21, 28,
-    35, 42, 49, 56, 57, 50, 43, 36,
-    29, 22, 15, 23, 30, 37, 44, 51,
-    58, 59, 52, 45, 38, 31, 39, 46,
-    53, 60, 61, 54, 47, 55, 62, 63
-};
-
-/* INTRA */
-const static Int mpeg_iqmat_def[NCOEFF_BLOCK] =
-    {  8, 17, 18, 19, 21, 23, 25, 27,
-       17, 18, 19, 21, 23, 25, 27, 28,
-       20, 21, 22, 23, 24, 26, 28, 30,
-       21, 22, 23, 24, 26, 28, 30, 32,
-       22, 23, 24, 26, 28, 30, 32, 35,
-       23, 24, 26, 28, 30, 32, 35, 38,
-       25, 26, 28, 30, 32, 35, 38, 41,
-       27, 28, 30, 32, 35, 38, 41, 45
-    };
-
-/* INTER */
-const static Int mpeg_nqmat_def[64]  =
-    { 16, 17, 18, 19, 20, 21, 22, 23,
-      17, 18, 19, 20, 21, 22, 23, 24,
-      18, 19, 20, 21, 22, 23, 24, 25,
-      19, 20, 21, 22, 23, 24, 26, 27,
-      20, 21, 22, 23, 25, 26, 27, 28,
-      21, 22, 23, 24, 26, 27, 28, 30,
-      22, 23, 24, 26, 27, 28, 30, 31,
-      23, 24, 25, 27, 28, 30, 31, 33
-    };
-
-/* Profiles and levels */
-/* Simple profile(level 0-3) and Core profile (level 1-2) */
-/* {SPL0, SPL1, SPL2, SPL3, CPL1, CPL2, CPL2, CPL2} , SPL0: Simple Profile@Level0, CPL1: Core Profile@Level1, the last two are redundant for easy table manipulation */
-const static Int profile_level_code[8] =
-{
-    0x08, 0x01, 0x02, 0x03, 0x21, 0x22, 0x22, 0x22
-};
-
-const static Int profile_level_max_bitrate[8] =
-{
-    64000, 64000, 128000, 384000, 384000, 2000000, 2000000, 2000000
-};
-
-const static Int profile_level_max_packet_size[8] =
-{
-    2048, 2048, 4096, 8192, 4096, 8192, 8192, 8192
-};
-
-const static Int profile_level_max_mbsPerSec[8] =
-{
-    1485, 1485, 5940, 11880, 5940, 23760, 23760, 23760
-};
-
-const static Int profile_level_max_VBV_size[8] =
-{
-    163840, 163840, 655360, 655360, 262144, 1310720, 1310720, 1310720
-};
-
-
-/* Simple scalable profile (level 0-2) and Core scalable profile (level 1-3) */
-/* {SSPL0, SSPL1, SSPL2, SSPL2, CSPL1, CSPL2, CSPL3, CSPL3} , SSPL0: Simple Scalable Profile@Level0, CSPL1: Core Scalable Profile@Level1, the fourth is redundant for easy table manipulation */
-
-const static Int scalable_profile_level_code[8] =
-{
-    0x10, 0x11, 0x12, 0x12, 0xA1, 0xA2, 0xA3, 0xA3
-};
-
-const static Int scalable_profile_level_max_bitrate[8] =
-{
-    128000, 128000, 256000, 256000, 768000, 1500000, 4000000, 4000000
-};
-
-/* in bits */
-const static Int scalable_profile_level_max_packet_size[8] =
-{
-    2048, 2048, 4096, 4096, 4096, 4096, 16384, 16384
-};
-
-const static Int scalable_profile_level_max_mbsPerSec[8] =
-{
-    1485, 7425, 23760, 23760, 14850, 29700, 120960, 120960
-};
-
-const static Int scalable_profile_level_max_VBV_size[8] =
-{
-    163840, 655360, 655360, 655360, 1048576, 1310720, 1310720, 1310720
-};
-
-
-/* H263 profile 0 @ level 10-70 */
-const static Int   h263Level[8] = {0, 10, 20, 30, 40, 50, 60, 70};
-const static float rBR_bound[8] = {0, 1, 2, 6, 32, 64, 128, 256};
-const static float max_h263_framerate[2] = {(float)30000 / (float)2002,
-        (float)30000 / (float)1001
-                                           };
-const static Int   max_h263_width[2]  = {176, 352};
-const static Int   max_h263_height[2] = {144, 288};
-
-/* 6/2/2001, newly added functions to make PVEncodeVop more readable. */
-Int DetermineCodingLayer(VideoEncData *video, Int *nLayer, ULong modTime);
-void DetermineVopType(VideoEncData *video, Int currLayer);
-Int UpdateSkipNextFrame(VideoEncData *video, ULong *modTime, Int *size, PV_STATUS status);
-Bool SetProfile_BufferSize(VideoEncData *video, float delay, Int bInitialized);
-
-#ifdef PRINT_RC_INFO
-extern FILE *facct;
-extern int tiTotalNumBitsGenerated;
-extern int iStuffBits;
-#endif
-
-#ifdef PRINT_EC
-extern FILE *fec;
-#endif
-
-
-/* ======================================================================== */
-/*  Function : PVGetDefaultEncOption()                                      */
-/*  Date     : 12/12/2005                                                   */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVGetDefaultEncOption(VideoEncOptions *encOption, Int encUseCase)
-{
-    VideoEncOptions defaultUseCase = {H263_MODE, profile_level_max_packet_size[SIMPLE_PROFILE_LEVEL0] >> 3,
-                                      SIMPLE_PROFILE_LEVEL0, PV_OFF, 0, 1, 1000, 33, {144, 144}, {176, 176}, {15, 30}, {64000, 128000},
-                                      {10, 10}, {12, 12}, {0, 0}, CBR_1, 0.0, PV_OFF, -1, 0, PV_OFF, 16, PV_OFF, 0, PV_ON
-                                     };
-
-    OSCL_UNUSED_ARG(encUseCase); // unused for now. Later we can add more defaults setting and use this
-    // argument to select the right one.
-    /* in the future we can create more meaningful use-cases */
-    if (encOption == NULL)
-    {
-        return PV_FALSE;
-    }
-
-    M4VENC_MEMCPY(encOption, &defaultUseCase, sizeof(VideoEncOptions));
-
-    return PV_TRUE;
-}
-
-/* ======================================================================== */
-/*  Function : PVInitVideoEncoder()                                         */
-/*  Date     : 08/22/2000                                                   */
-/*  Purpose  : Initialization of MP4 Encoder and VO bitstream               */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :  5/21/01, allocate only yChan and assign uChan & vChan   */
-/*              12/12/05, add encoding option as input argument         */
-/* ======================================================================== */
-OSCL_EXPORT_REF Bool    PVInitVideoEncoder(VideoEncControls *encoderControl, VideoEncOptions *encOption)
-{
-
-    Bool        status = PV_TRUE;
-    Int         nLayers, idx, i, j;
-    Int         max = 0, max_width = 0, max_height = 0, pitch, offset;
-    Int         size = 0, nTotalMB = 0;
-    VideoEncData *video;
-    Vol         *pVol;
-    VideoEncParams  *pEncParams;
-    Int         temp_w, temp_h, mbsPerSec;
-
-    /******************************************/
-    /*      this part use to be PVSetEncode() */
-    Int profile_table_index, *profile_level_table;
-    Int profile_level = encOption->profile_level;
-    Int PacketSize = encOption->packetSize << 3;
-    Int timeInc, timeIncRes;
-    float profile_max_framerate;
-    VideoEncParams *encParams;
-
-    if (encoderControl->videoEncoderData) /* this has been called */
-    {
-        if (encoderControl->videoEncoderInit) /* check if PVInitVideoEncoder() has been called  */
-        {
-            PVCleanUpVideoEncoder(encoderControl);
-            encoderControl->videoEncoderInit = 0;
-        }
-
-        M4VENC_FREE(encoderControl->videoEncoderData);
-        encoderControl->videoEncoderData = NULL;
-    }
-    encoderControl->videoEncoderInit = 0;   /* reset this value */
-
-    video = (VideoEncData *)M4VENC_MALLOC(sizeof(VideoEncData)); /* allocate memory for encData */
-
-    if (video == NULL)
-        return PV_FALSE;
-
-    M4VENC_MEMSET(video, 0, sizeof(VideoEncData));
-
-    encoderControl->videoEncoderData = (void *) video;         /* set up pointer in VideoEncData structure */
-
-    video->encParams = (VideoEncParams *)M4VENC_MALLOC(sizeof(VideoEncParams));
-    if (video->encParams == NULL)
-        goto CLEAN_UP;
-
-    M4VENC_MEMSET(video->encParams, 0, sizeof(VideoEncParams));
-
-    encParams = video->encParams;
-    encParams->nLayers = encOption->numLayers;
-
-    /* Check whether the input packetsize is valid (Note: put code here (before any memory allocation) in order to avoid memory leak */
-    if ((Int)profile_level < (Int)(SIMPLE_SCALABLE_PROFILE_LEVEL0))  /* non-scalable profile */
-    {
-        profile_level_table = (Int *)profile_level_max_packet_size;
-        profile_table_index = (Int)profile_level;
-        if (encParams->nLayers != 1)
-        {
-            goto CLEAN_UP;
-        }
-
-        encParams->LayerMaxMbsPerSec[0] = profile_level_max_mbsPerSec[profile_table_index];
-
-    }
-    else   /* scalable profile */
-    {
-        profile_level_table = (Int *)scalable_profile_level_max_packet_size;
-        profile_table_index = (Int)profile_level - (Int)(SIMPLE_SCALABLE_PROFILE_LEVEL0);
-        if (encParams->nLayers < 2)
-        {
-            goto CLEAN_UP;
-        }
-        for (i = 0; i < encParams->nLayers; i++)
-        {
-            encParams->LayerMaxMbsPerSec[i] = scalable_profile_level_max_mbsPerSec[profile_table_index];
-        }
-
-    }
-
-    /* cannot have zero size packet with these modes */
-    if (PacketSize == 0)
-    {
-        if (encOption->encMode == DATA_PARTITIONING_MODE)
-        {
-            goto CLEAN_UP;
-        }
-        if (encOption->encMode == COMBINE_MODE_WITH_ERR_RES)
-        {
-            encOption->encMode = COMBINE_MODE_NO_ERR_RES;
-        }
-    }
-
-    if (encOption->gobHeaderInterval == 0)
-    {
-        if (encOption->encMode == H263_MODE_WITH_ERR_RES)
-        {
-            encOption->encMode = H263_MODE;
-        }
-
-        if (encOption->encMode == SHORT_HEADER_WITH_ERR_RES)
-        {
-            encOption->encMode = SHORT_HEADER;
-        }
-    }
-
-    if (PacketSize > profile_level_table[profile_table_index])
-        goto CLEAN_UP;
-
-    /* Initial Defaults for all Modes */
-
-    encParams->SequenceStartCode = 1;
-    encParams->GOV_Enabled = 0;
-    encParams->RoundingType = 0;
-    encParams->IntraDCVlcThr = PV_MAX(PV_MIN(encOption->intraDCVlcTh, 7), 0);
-    encParams->ACDCPrediction = ((encOption->useACPred == PV_ON) ? TRUE : FALSE);
-    encParams->RC_Type = encOption->rcType;
-    encParams->Refresh = encOption->numIntraMB;
-    encParams->ResyncMarkerDisable = 0; /* Enable Resync Marker */
-
-    for (i = 0; i < encOption->numLayers; i++)
-    {
-#ifdef NO_MPEG_QUANT
-        encParams->QuantType[i] = 0;
-#else
-        encParams->QuantType[i] = encOption->quantType[i];      /* H263 */
-#endif
-        if (encOption->pQuant[i] >= 1 && encOption->pQuant[i] <= 31)
-        {
-            encParams->InitQuantPvop[i] = encOption->pQuant[i];
-        }
-        else
-        {
-            goto CLEAN_UP;
-        }
-        if (encOption->iQuant[i] >= 1 && encOption->iQuant[i] <= 31)
-        {
-            encParams->InitQuantIvop[i] = encOption->iQuant[i];
-        }
-        else
-        {
-            goto CLEAN_UP;
-        }
-    }
-
-    encParams->HalfPel_Enabled = 1;
-    encParams->SearchRange = encOption->searchRange; /* 4/16/2001 */
-    encParams->FullSearch_Enabled = 0;
-#ifdef NO_INTER4V
-    encParams->MV8x8_Enabled = 0;
-#else
-    encParams->MV8x8_Enabled = 0;// comment out for now!! encOption->mv8x8Enable;
-#endif
-    encParams->H263_Enabled = 0;
-    encParams->GOB_Header_Interval = 0; // need to be reset to 0
-    encParams->IntraPeriod = encOption->intraPeriod;    /* Intra update period update default*/
-    encParams->SceneChange_Det = encOption->sceneDetect;
-    encParams->FineFrameSkip_Enabled = 0;
-    encParams->NoFrameSkip_Enabled = encOption->noFrameSkipped;
-    encParams->NoPreSkip_Enabled = encOption->noFrameSkipped;
-    encParams->GetVolHeader[0] = 0;
-    encParams->GetVolHeader[1] = 0;
-    encParams->ResyncPacketsize = encOption->packetSize << 3;
-    encParams->LayerMaxBitRate[0] = 0;
-    encParams->LayerMaxBitRate[1] = 0;
-    encParams->LayerMaxFrameRate[0] = (float)0.0;
-    encParams->LayerMaxFrameRate[1] = (float)0.0;
-    encParams->VBV_delay = encOption->vbvDelay;  /* 2sec VBV buffer size */
-
-    switch (encOption->encMode)
-    {
-
-        case SHORT_HEADER:
-        case SHORT_HEADER_WITH_ERR_RES:
-
-            /* From Table 6-26 */
-            encParams->nLayers = 1;
-            encParams->QuantType[0] = 0;    /*H263 */
-            encParams->ResyncMarkerDisable = 1; /* Disable Resync Marker */
-            encParams->DataPartitioning = 0; /* Combined Mode */
-            encParams->ReversibleVLC = 0;   /* Disable RVLC */
-            encParams->RoundingType = 0;
-            encParams->IntraDCVlcThr = 7;   /* use_intra_dc_vlc = 0 */
-            encParams->MV8x8_Enabled = 0;
-
-            encParams->GOB_Header_Interval = encOption->gobHeaderInterval;
-            encParams->H263_Enabled = 2;
-            encParams->GOV_Enabled = 0;
-            encParams->TimeIncrementRes = 30000;        /* timeIncrementRes for H263 */
-            break;
-
-        case H263_MODE:
-        case H263_MODE_WITH_ERR_RES:
-
-            /* From Table 6-26 */
-            encParams->nLayers = 1;
-            encParams->QuantType[0] = 0;    /*H263 */
-            encParams->ResyncMarkerDisable = 1; /* Disable Resync Marker */
-            encParams->DataPartitioning = 0; /* Combined Mode */
-            encParams->ReversibleVLC = 0;   /* Disable RVLC */
-            encParams->RoundingType = 0;
-            encParams->IntraDCVlcThr = 7;   /* use_intra_dc_vlc = 0 */
-            encParams->MV8x8_Enabled = 0;
-
-            encParams->H263_Enabled = 1;
-            encParams->GOV_Enabled = 0;
-            encParams->TimeIncrementRes = 30000;        /* timeIncrementRes for H263 */
-
-            break;
-#ifndef H263_ONLY
-        case DATA_PARTITIONING_MODE:
-
-            encParams->DataPartitioning = 1;        /* Base Layer Data Partitioning */
-            encParams->ResyncMarkerDisable = 0; /* Resync Marker */
-#ifdef NO_RVLC
-            encParams->ReversibleVLC = 0;
-#else
-            encParams->ReversibleVLC = (encOption->rvlcEnable == PV_ON); /* RVLC when Data Partitioning */
-#endif
-            encParams->ResyncPacketsize = PacketSize;
-            break;
-
-        case COMBINE_MODE_WITH_ERR_RES:
-
-            encParams->DataPartitioning = 0;        /* Combined Mode */
-            encParams->ResyncMarkerDisable = 0; /* Resync Marker */
-            encParams->ReversibleVLC = 0;           /* No RVLC */
-            encParams->ResyncPacketsize = PacketSize;
-            break;
-
-        case COMBINE_MODE_NO_ERR_RES:
-
-            encParams->DataPartitioning = 0;        /* Combined Mode */
-            encParams->ResyncMarkerDisable = 1; /* Disable Resync Marker */
-            encParams->ReversibleVLC = 0;           /* No RVLC */
-            break;
-#endif
-        default:
-            goto CLEAN_UP;
-    }
-    /* Set the constraints (maximum values) according to the input profile and level */
-    /* Note that profile_table_index is already figured out above */
-
-    /* base layer */
-    encParams->profile_table_index    = profile_table_index; /* Used to limit the profile and level in SetProfile_BufferSize() */
-
-    /* check timeIncRes */
-    timeIncRes = encOption->timeIncRes;
-    timeInc = encOption->tickPerSrc;
-
-    if ((timeIncRes >= 1) && (timeIncRes <= 65536) && (timeInc < timeIncRes) && (timeInc != 0))
-    {
-        if (!encParams->H263_Enabled)
-        {
-            encParams->TimeIncrementRes = timeIncRes;
-        }
-        else
-        {
-            encParams->TimeIncrementRes = 30000;
-//          video->FrameRate = 30000/(float)1001; /* fix it to 29.97 fps */
-        }
-        video->FrameRate = timeIncRes / ((float)timeInc);
-    }
-    else
-    {
-        goto CLEAN_UP;
-    }
-
-    /* check frame dimension */
-    if (encParams->H263_Enabled)
-    {
-        switch (encOption->encWidth[0])
-        {
-            case 128:
-                if (encOption->encHeight[0] != 96) /* source_format = 1 */
-                    goto CLEAN_UP;
-                break;
-            case 176:
-                if (encOption->encHeight[0] != 144) /* source_format = 2 */
-                    goto CLEAN_UP;
-                break;
-            case 352:
-                if (encOption->encHeight[0] != 288) /* source_format = 2 */
-                    goto CLEAN_UP;
-                break;
-
-            case 704:
-                if (encOption->encHeight[0] != 576) /* source_format = 2 */
-                    goto CLEAN_UP;
-                break;
-            case 1408:
-                if (encOption->encHeight[0] != 1152) /* source_format = 2 */
-                    goto CLEAN_UP;
-                break;
-
-            default:
-                goto CLEAN_UP;
-        }
-    }
-    for (i = 0; i < encParams->nLayers; i++)
-    {
-        encParams->LayerHeight[i] = encOption->encHeight[i];
-        encParams->LayerWidth[i] = encOption->encWidth[i];
-    }
-
-    /* check frame rate */
-    for (i = 0; i < encParams->nLayers; i++)
-    {
-        encParams->LayerFrameRate[i] = encOption->encFrameRate[i];
-    }
-
-    if (encParams->nLayers > 1)
-    {
-        if (encOption->encFrameRate[0] == encOption->encFrameRate[1] ||
-                encOption->encFrameRate[0] == 0. || encOption->encFrameRate[1] == 0.) /* 7/31/03 */
-            goto CLEAN_UP;
-    }
-    /* set max frame rate */
-    for (i = 0; i < encParams->nLayers; i++)
-    {
-
-        /* Make sure the maximum framerate is consistent with the given profile and level */
-        nTotalMB = ((encParams->LayerWidth[i] + 15) / 16) * ((encParams->LayerHeight[i] + 15) / 16);
-
-        if (nTotalMB > 0)
-            profile_max_framerate = (float)encParams->LayerMaxMbsPerSec[i] / (float)nTotalMB;
-
-        else
-            profile_max_framerate = (float)30.0;
-
-        encParams->LayerMaxFrameRate[i] = PV_MIN(profile_max_framerate, encParams->LayerFrameRate[i]);
-    }
-
-    /* check bit rate */
-    /* set max bit rate */
-    for (i = 0; i < encParams->nLayers; i++)
-    {
-        encParams->LayerBitRate[i] = encOption->bitRate[i];
-        encParams->LayerMaxBitRate[i] = encOption->bitRate[i];
-    }
-    if (encParams->nLayers > 1)
-    {
-        if (encOption->bitRate[0] == encOption->bitRate[1] ||
-                encOption->bitRate[0] == 0 || encOption->bitRate[1] == 0) /* 7/31/03 */
-            goto CLEAN_UP;
-    }
-    /* check rate control and vbv delay*/
-    encParams->RC_Type = encOption->rcType;
-
-    if (encOption->vbvDelay == 0.0) /* set to default */
-    {
-        switch (encOption->rcType)
-        {
-            case CBR_1:
-            case CBR_2:
-                encParams->VBV_delay = (float)2.0; /* default 2sec VBV buffer size */
-                break;
-
-            case CBR_LOWDELAY:
-                encParams->VBV_delay = (float)0.5; /* default 0.5sec VBV buffer size */
-                break;
-
-            case VBR_1:
-            case VBR_2:
-                encParams->VBV_delay = (float)10.0; /* default 10sec VBV buffer size */
-                break;
-            default:
-                break;
-        }
-    }
-    else /* force this value */
-    {
-        encParams->VBV_delay = encOption->vbvDelay;
-    }
-
-    /* check search range */
-    if (encParams->H263_Enabled && encOption->searchRange > 16)
-    {
-        encParams->SearchRange = 16; /* 4/16/2001 */
-    }
-
-    /*****************************************/
-    /* checking for conflict between options */
-    /*****************************************/
-
-    if (video->encParams->RC_Type == CBR_1 || video->encParams->RC_Type == CBR_2 || video->encParams->RC_Type == CBR_LOWDELAY)  /* if CBR */
-    {
-#ifdef _PRINT_STAT
-        if (video->encParams->NoFrameSkip_Enabled == PV_ON ||
-                video->encParams->NoPreSkip_Enabled == PV_ON) /* don't allow frame skip*/
-            printf("WARNING!!!! CBR with NoFrameSkip\n");
-#endif
-    }
-    else if (video->encParams->RC_Type == CONSTANT_Q)   /* constant_Q */
-    {
-        video->encParams->NoFrameSkip_Enabled = PV_ON;  /* no frame skip */
-        video->encParams->NoPreSkip_Enabled = PV_ON;    /* no frame skip */
-#ifdef _PRINT_STAT
-        printf("Turn on NoFrameSkip\n");
-#endif
-    }
-
-    if (video->encParams->NoFrameSkip_Enabled == PV_ON) /* if no frame skip */
-    {
-        video->encParams->FineFrameSkip_Enabled = PV_OFF;
-#ifdef _PRINT_STAT
-        printf("NoFrameSkip !!! may violate VBV_BUFFER constraint.\n");
-        printf("Turn off FineFrameSkip\n");
-#endif
-    }
-
-    /******************************************/
-    /******************************************/
-
-    nLayers = video->encParams->nLayers; /* Number of Layers to be encoded */
-
-    /* Find the maximum width*height for memory allocation of the VOPs */
-    for (idx = 0; idx < nLayers; idx++)
-    {
-        temp_w = video->encParams->LayerWidth[idx];
-        temp_h = video->encParams->LayerHeight[idx];
-
-        if ((temp_w*temp_h) > max)
-        {
-            max = temp_w * temp_h;
-            max_width = ((temp_w + 15) >> 4) << 4;
-            max_height = ((temp_h + 15) >> 4) << 4;
-            if (((uint64_t)max_width * max_height) > (uint64_t)INT32_MAX
-                    || temp_w > INT32_MAX - 15 || temp_h > INT32_MAX - 15) {
-                goto CLEAN_UP;
-            }
-            nTotalMB = ((max_width * max_height) >> 8);
-        }
-
-        /* Check if the video size and framerate(MBsPerSec) are vald */
-        mbsPerSec = (Int)(nTotalMB * video->encParams->LayerFrameRate[idx]);
-        if (mbsPerSec > video->encParams->LayerMaxMbsPerSec[idx]) status = PV_FALSE;
-    }
-
-    /****************************************************/
-    /* Set Profile and Video Buffer Size for each layer */
-    /****************************************************/
-    if (video->encParams->RC_Type == CBR_LOWDELAY) video->encParams->VBV_delay = 0.5; /* For CBR_LOWDELAY, we set 0.5sec buffer */
-    status = SetProfile_BufferSize(video, video->encParams->VBV_delay, 1);
-    if (status != PV_TRUE)
-        goto CLEAN_UP;
-
-    /****************************************/
-    /* memory allocation and initialization */
-    /****************************************/
-
-    if (video == NULL) goto CLEAN_UP;
-
-    /* cyclic reference for passing through both structures */
-    video->videoEncControls = encoderControl;
-
-    //video->currLayer = 0; /* Set current Layer to 0 */
-    //video->currFrameNo = 0; /* Set current frame Number to 0 */
-    video->nextModTime = 0;
-    video->nextEncIVop = 0; /* Sets up very first frame to be I-VOP! */
-    video->numVopsInGOP = 0; /* counter for Vops in Gop, 2/8/01 */
-
-    //video->frameRate = video->encParams->LayerFrameRate[0]; /* Set current layer frame rate */
-
-    video->QPMB = (UChar *) M4VENC_MALLOC(nTotalMB * sizeof(UChar)); /* Memory for MB quantizers */
-    if (video->QPMB == NULL) goto CLEAN_UP;
-
-
-    video->headerInfo.Mode = (UChar *) M4VENC_MALLOC(sizeof(UChar) * nTotalMB); /* Memory for MB Modes */
-    if (video->headerInfo.Mode == NULL) goto CLEAN_UP;
-    video->headerInfo.CBP = (UChar *) M4VENC_MALLOC(sizeof(UChar) * nTotalMB);   /* Memory for CBP (Y and C) of each MB */
-    if (video->headerInfo.CBP == NULL) goto CLEAN_UP;
-
-    /* Allocating motion vector space and interpolation memory*/
-
-    if ((size_t)nTotalMB > SIZE_MAX / sizeof(MOT *)) {
-        goto CLEAN_UP;
-    }
-    video->mot = (MOT **)M4VENC_MALLOC(sizeof(MOT *) * nTotalMB);
-    if (video->mot == NULL) goto CLEAN_UP;
-
-    for (idx = 0; idx < nTotalMB; idx++)
-    {
-        video->mot[idx] = (MOT *)M4VENC_MALLOC(sizeof(MOT) * 8);
-        if (video->mot[idx] == NULL)
-        {
-            goto CLEAN_UP;
-        }
-    }
-
-    video->intraArray = (UChar *)M4VENC_MALLOC(sizeof(UChar) * nTotalMB);
-    if (video->intraArray == NULL) goto CLEAN_UP;
-
-    video->sliceNo = (UChar *) M4VENC_MALLOC(nTotalMB); /* Memory for Slice Numbers */
-    if (video->sliceNo == NULL) goto CLEAN_UP;
-    /* Allocating space for predDCAC[][8][16], Not that I intentionally  */
-    /*    increase the dimension of predDCAC from [][6][15] to [][8][16] */
-    /*    so that compilers can generate faster code to indexing the     */
-    /*    data inside (by using << instead of *).         04/14/2000. */
-    /* 5/29/01, use  decoder lib ACDC prediction memory scheme.  */
-    if ((size_t)nTotalMB > SIZE_MAX / sizeof(typeDCStore)) {
-        goto CLEAN_UP;
-    }
-    video->predDC = (typeDCStore *) M4VENC_MALLOC(nTotalMB * sizeof(typeDCStore));
-    if (video->predDC == NULL) goto CLEAN_UP;
-
-    if (!video->encParams->H263_Enabled)
-    {
-        if ((size_t)((max_width >> 4) + 1) > SIZE_MAX / sizeof(typeDCACStore)) {
-            goto CLEAN_UP;
-        }
-        video->predDCAC_col = (typeDCACStore *) M4VENC_MALLOC(((max_width >> 4) + 1) * sizeof(typeDCACStore));
-        if (video->predDCAC_col == NULL) goto CLEAN_UP;
-
-        /* element zero will be used for storing vertical (col) AC coefficients */
-        /*  the rest will be used for storing horizontal (row) AC coefficients  */
-        video->predDCAC_row = video->predDCAC_col + 1;        /*  ACDC */
-
-        if ((size_t)nTotalMB > SIZE_MAX / sizeof(Int)) {
-            goto CLEAN_UP;
-        }
-        video->acPredFlag = (Int *) M4VENC_MALLOC(nTotalMB * sizeof(Int)); /* Memory for acPredFlag */
-        if (video->acPredFlag == NULL) goto CLEAN_UP;
-    }
-
-    video->outputMB = (MacroBlock *) M4VENC_MALLOC(sizeof(MacroBlock)); /* Allocating macroblock space */
-    if (video->outputMB == NULL) goto CLEAN_UP;
-    M4VENC_MEMSET(video->outputMB->block[0], 0, (sizeof(Short) << 6)*6);
-
-    M4VENC_MEMSET(video->dataBlock, 0, sizeof(Short) << 7);
-    /* Allocate (2*packetsize) working bitstreams */
-
-    video->bitstream1 = BitStreamCreateEnc(2 * 4096); /*allocate working stream 1*/
-    if (video->bitstream1 == NULL) goto CLEAN_UP;
-    video->bitstream2 = BitStreamCreateEnc(2 * 4096); /*allocate working stream 2*/
-    if (video->bitstream2 == NULL) goto CLEAN_UP;
-    video->bitstream3 = BitStreamCreateEnc(2 * 4096); /*allocate working stream 3*/
-    if (video->bitstream3 == NULL) goto CLEAN_UP;
-
-    /* allocate overrun buffer */
-    // this buffer is used when user's buffer is too small to hold one frame.
-    // It is not needed for slice-based encoding.
-    if (nLayers == 1)
-    {
-        video->oBSize = encParams->BufferSize[0] >> 3;
-    }
-    else
-    {
-        video->oBSize = PV_MAX((encParams->BufferSize[0] >> 3), (encParams->BufferSize[1] >> 3));
-    }
-
-    if (video->oBSize > DEFAULT_OVERRUN_BUFFER_SIZE || encParams->RC_Type == CONSTANT_Q) // set limit
-    {
-        video->oBSize = DEFAULT_OVERRUN_BUFFER_SIZE;
-    }
-    video->overrunBuffer = (UChar*) M4VENC_MALLOC(sizeof(UChar) * video->oBSize);
-    if (video->overrunBuffer == NULL) goto CLEAN_UP;
-
-
-    video->currVop = (Vop *) M4VENC_MALLOC(sizeof(Vop)); /* Memory for Current VOP */
-    if (video->currVop == NULL) goto CLEAN_UP;
-
-    /* add padding, 09/19/05 */
-    if (video->encParams->H263_Enabled) /* make it conditional  11/28/05 */
-    {
-        pitch = max_width;
-        offset = 0;
-    }
-    else
-    {
-        pitch = max_width + 32;
-        offset = (pitch << 4) + 16;
-        max_height += 32;
-    }
-    if (((uint64_t)pitch * max_height) > (uint64_t)INT32_MAX) {
-        goto CLEAN_UP;
-    }
-    size = pitch * max_height;
-
-    if (size > INT32_MAX - (size >> 1)
-            || (size_t)(size + (size >> 1)) > SIZE_MAX / sizeof(PIXEL)) {
-        goto CLEAN_UP;
-    }
-    video->currVop->allChan = video->currVop->yChan = (PIXEL *)M4VENC_MALLOC(sizeof(PIXEL) * (size + (size >> 1))); /* Memory for currVop Y */
-    if (video->currVop->yChan == NULL) goto CLEAN_UP;
-    video->currVop->uChan = video->currVop->yChan + size;/* Memory for currVop U */
-    video->currVop->vChan = video->currVop->uChan + (size >> 2);/* Memory for currVop V */
-
-    /* shift for the offset */
-    if (offset)
-    {
-        video->currVop->yChan += offset; /* offset to the origin.*/
-        video->currVop->uChan += (offset >> 2) + 4;
-        video->currVop->vChan += (offset >> 2) + 4;
-    }
-
-    video->forwardRefVop = video->currVop;      /*  Initialize forwardRefVop */
-    video->backwardRefVop = video->currVop;     /*  Initialize backwardRefVop */
-
-    video->prevBaseVop = (Vop *) M4VENC_MALLOC(sizeof(Vop));         /* Memory for Previous Base Vop */
-    if (video->prevBaseVop == NULL) goto CLEAN_UP;
-    video->prevBaseVop->allChan = video->prevBaseVop->yChan = (PIXEL *) M4VENC_MALLOC(sizeof(PIXEL) * (size + (size >> 1))); /* Memory for prevBaseVop Y */
-    if (video->prevBaseVop->yChan == NULL) goto CLEAN_UP;
-    video->prevBaseVop->uChan = video->prevBaseVop->yChan + size; /* Memory for prevBaseVop U */
-    video->prevBaseVop->vChan = video->prevBaseVop->uChan + (size >> 2); /* Memory for prevBaseVop V */
-
-    if (offset)
-    {
-        video->prevBaseVop->yChan += offset; /* offset to the origin.*/
-        video->prevBaseVop->uChan += (offset >> 2) + 4;
-        video->prevBaseVop->vChan += (offset >> 2) + 4;
-    }
-
-
-    if (0) /* If B Frames */
-    {
-        video->nextBaseVop = (Vop *) M4VENC_MALLOC(sizeof(Vop));         /* Memory for Next Base Vop */
-        if (video->nextBaseVop == NULL) goto CLEAN_UP;
-        video->nextBaseVop->allChan = video->nextBaseVop->yChan = (PIXEL *) M4VENC_MALLOC(sizeof(PIXEL) * (size + (size >> 1))); /* Memory for nextBaseVop Y */
-        if (video->nextBaseVop->yChan == NULL) goto CLEAN_UP;
-        video->nextBaseVop->uChan = video->nextBaseVop->yChan + size; /* Memory for nextBaseVop U */
-        video->nextBaseVop->vChan = video->nextBaseVop->uChan + (size >> 2); /* Memory for nextBaseVop V */
-
-        if (offset)
-        {
-            video->nextBaseVop->yChan += offset; /* offset to the origin.*/
-            video->nextBaseVop->uChan += (offset >> 2) + 4;
-            video->nextBaseVop->vChan += (offset >> 2) + 4;
-        }
-    }
-
-    if (nLayers > 1)   /* If enhancement layers */
-    {
-        video->prevEnhanceVop = (Vop *) M4VENC_MALLOC(sizeof(Vop));      /* Memory for Previous Enhancement Vop */
-        if (video->prevEnhanceVop == NULL) goto CLEAN_UP;
-        video->prevEnhanceVop->allChan = video->prevEnhanceVop->yChan = (PIXEL *) M4VENC_MALLOC(sizeof(PIXEL) * (size + (size >> 1))); /* Memory for Previous Ehancement Y */
-        if (video->prevEnhanceVop->yChan == NULL) goto CLEAN_UP;
-        video->prevEnhanceVop->uChan = video->prevEnhanceVop->yChan + size; /* Memory for Previous Enhancement U */
-        video->prevEnhanceVop->vChan = video->prevEnhanceVop->uChan + (size >> 2); /* Memory for Previous Enhancement V */
-
-        if (offset)
-        {
-            video->prevEnhanceVop->yChan += offset; /* offset to the origin.*/
-            video->prevEnhanceVop->uChan += (offset >> 2) + 4;
-            video->prevEnhanceVop->vChan += (offset >> 2) + 4;
-        }
-    }
-
-    video->numberOfLayers = nLayers; /* Number of Layers */
-    video->sumMAD = 0;
-
-
-    /* 04/09/01, for Vops in the use multipass processing */
-    for (idx = 0; idx < nLayers; idx++)
-    {
-        video->pMP[idx] = (MultiPass *)M4VENC_MALLOC(sizeof(MultiPass));
-        if (video->pMP[idx] == NULL)    goto CLEAN_UP;
-        M4VENC_MEMSET(video->pMP[idx], 0, sizeof(MultiPass));
-
-        video->pMP[idx]->encoded_frames = -1; /* forget about the very first I frame */
-
-
-        /* RDInfo **pRDSamples */
-        video->pMP[idx]->pRDSamples = (RDInfo **)M4VENC_MALLOC(30 * sizeof(RDInfo *));
-        if (video->pMP[idx]->pRDSamples == NULL)    goto CLEAN_UP;
-        for (i = 0; i < 30; i++)
-        {
-            video->pMP[idx]->pRDSamples[i] = (RDInfo *)M4VENC_MALLOC(32 * sizeof(RDInfo));
-            if (video->pMP[idx]->pRDSamples[i] == NULL) goto CLEAN_UP;
-            for (j = 0; j < 32; j++)    M4VENC_MEMSET(&(video->pMP[idx]->pRDSamples[i][j]), 0, sizeof(RDInfo));
-        }
-        video->pMP[idx]->frameRange = (Int)(video->encParams->LayerFrameRate[idx] * 1.0); /* 1.0s time frame*/
-        video->pMP[idx]->frameRange = PV_MAX(video->pMP[idx]->frameRange, 5);
-        video->pMP[idx]->frameRange = PV_MIN(video->pMP[idx]->frameRange, 30);
-
-        video->pMP[idx]->framePos = -1;
-
-    }
-    /* /// End /////////////////////////////////////// */
-
-
-    if ((size_t)nLayers > SIZE_MAX / sizeof(Vol *)) {
-        goto CLEAN_UP;
-    }
-    video->vol = (Vol **)M4VENC_MALLOC(nLayers * sizeof(Vol *)); /* Memory for VOL pointers */
-
-    /* Memory allocation and Initialization of Vols and writing of headers */
-    if (video->vol == NULL) goto CLEAN_UP;
-
-    for (idx = 0; idx < nLayers; idx++)
-    {
-        video->volInitialize[idx] = 1;
-        video->refTick[idx] = 0;
-        video->relLayerCodeTime[idx] = 1000;
-        video->vol[idx] = (Vol *)M4VENC_MALLOC(sizeof(Vol));
-        if (video->vol[idx] == NULL)  goto CLEAN_UP;
-
-        pVol = video->vol[idx];
-        pEncParams = video->encParams;
-
-        M4VENC_MEMSET(video->vol[idx], 0, sizeof(Vol));
-        /* Initialize some VOL parameters */
-        pVol->volID = idx;  /* Set VOL ID */
-        pVol->shortVideoHeader = pEncParams->H263_Enabled; /*Short Header */
-        pVol->GOVStart = pEncParams->GOV_Enabled; /* GOV Header */
-        pVol->timeIncrementResolution = video->encParams->TimeIncrementRes;
-        pVol->nbitsTimeIncRes = 1;
-        while (pVol->timeIncrementResolution > (1 << pVol->nbitsTimeIncRes))
-        {
-            pVol->nbitsTimeIncRes++;
-        }
-
-        /* timing stuff */
-        pVol->timeIncrement = 0;
-        pVol->moduloTimeBase = 0;
-        pVol->fixedVopRate = 0; /* No fixed VOP rate */
-        pVol->stream = (BitstreamEncVideo *)M4VENC_MALLOC(sizeof(BitstreamEncVideo)); /* allocate BitstreamEncVideo Instance */
-        if (pVol->stream == NULL)  goto CLEAN_UP;
-
-        pVol->width = pEncParams->LayerWidth[idx];      /* Layer Width */
-        pVol->height = pEncParams->LayerHeight[idx];    /* Layer Height */
-        //  pVol->intra_acdcPredDisable = pEncParams->ACDCPrediction; /* ACDC Prediction */
-        pVol->ResyncMarkerDisable = pEncParams->ResyncMarkerDisable; /* Resync Marker Mode */
-        pVol->dataPartitioning = pEncParams->DataPartitioning; /* Data Partitioning */
-        pVol->useReverseVLC = pEncParams->ReversibleVLC; /* RVLC */
-        if (idx > 0) /* Scalability layers */
-        {
-            pVol->ResyncMarkerDisable = 1;
-            pVol->dataPartitioning = 0;
-            pVol->useReverseVLC = 0; /*  No RVLC */
-        }
-        pVol->quantType = pEncParams->QuantType[idx];           /* Quantizer Type */
-
-        /* no need to init Quant Matrices */
-
-        pVol->scalability = 0;  /* Vol Scalability */
-        if (idx > 0)
-            pVol->scalability = 1; /* Multiple layers => Scalability */
-
-        /* Initialize Vol to Temporal scalability.  It can change during encoding */
-        pVol->scalType = 1;
-        /* Initialize reference Vol ID to the base layer = 0 */
-        pVol->refVolID = 0;
-        /* Initialize layer resolution to same as the reference */
-        pVol->refSampDir = 0;
-        pVol->horSamp_m = 1;
-        pVol->horSamp_n = 1;
-        pVol->verSamp_m = 1;
-        pVol->verSamp_n = 1;
-        pVol->enhancementType = 0; /* We always enhance the entire region */
-
-        pVol->nMBPerRow = (pVol->width + 15) / 16;
-        pVol->nMBPerCol = (pVol->height + 15) / 16;
-        pVol->nTotalMB = pVol->nMBPerRow * pVol->nMBPerCol;
-
-        if (pVol->nTotalMB >= 1)
-            pVol->nBitsForMBID = 1;
-        if (pVol->nTotalMB >= 3)
-            pVol->nBitsForMBID = 2;
-        if (pVol->nTotalMB >= 5)
-            pVol->nBitsForMBID = 3;
-        if (pVol->nTotalMB >= 9)
-            pVol->nBitsForMBID = 4;
-        if (pVol->nTotalMB >= 17)
-            pVol->nBitsForMBID = 5;
-        if (pVol->nTotalMB >= 33)
-            pVol->nBitsForMBID = 6;
-        if (pVol->nTotalMB >= 65)
-            pVol->nBitsForMBID = 7;
-        if (pVol->nTotalMB >= 129)
-            pVol->nBitsForMBID = 8;
-        if (pVol->nTotalMB >= 257)
-            pVol->nBitsForMBID = 9;
-        if (pVol->nTotalMB >= 513)
-            pVol->nBitsForMBID = 10;
-        if (pVol->nTotalMB >= 1025)
-            pVol->nBitsForMBID = 11;
-        if (pVol->nTotalMB >= 2049)
-            pVol->nBitsForMBID = 12;
-        if (pVol->nTotalMB >= 4097)
-            pVol->nBitsForMBID = 13;
-        if (pVol->nTotalMB >= 8193)
-            pVol->nBitsForMBID = 14;
-        if (pVol->nTotalMB >= 16385)
-            pVol->nBitsForMBID = 15;
-        if (pVol->nTotalMB >= 32769)
-            pVol->nBitsForMBID = 16;
-        if (pVol->nTotalMB >= 65537)
-            pVol->nBitsForMBID = 17;
-        if (pVol->nTotalMB >= 131073)
-            pVol->nBitsForMBID = 18;
-
-        if (pVol->shortVideoHeader)
-        {
-            switch (pVol->width)
-            {
-                case 128:
-                    if (pVol->height == 96)  /* source_format = 1 */
-                    {
-                        pVol->nGOBinVop = 6;
-                        pVol->nMBinGOB = 8;
-                    }
-                    else
-                        status = PV_FALSE;
-                    break;
-
-                case 176:
-                    if (pVol->height == 144)  /* source_format = 2 */
-                    {
-                        pVol->nGOBinVop = 9;
-                        pVol->nMBinGOB = 11;
-                    }
-                    else
-                        status = PV_FALSE;
-                    break;
-                case 352:
-                    if (pVol->height == 288)  /* source_format = 2 */
-                    {
-                        pVol->nGOBinVop = 18;
-                        pVol->nMBinGOB = 22;
-                    }
-                    else
-                        status = PV_FALSE;
-                    break;
-
-                case 704:
-                    if (pVol->height == 576)  /* source_format = 2 */
-                    {
-                        pVol->nGOBinVop = 18;
-                        pVol->nMBinGOB = 88;
-                    }
-                    else
-                        status = PV_FALSE;
-                    break;
-                case 1408:
-                    if (pVol->height == 1152)  /* source_format = 2 */
-                    {
-                        pVol->nGOBinVop = 18;
-                        pVol->nMBinGOB = 352;
-                    }
-                    else
-                        status = PV_FALSE;
-                    break;
-
-                default:
-                    status = PV_FALSE;
-                    break;
-            }
-        }
-    }
-
-    /***************************************************/
-    /* allocate and initialize rate control parameters */
-    /***************************************************/
-
-    /* BEGIN INITIALIZATION OF ANNEX L RATE CONTROL */
-    if (video->encParams->RC_Type != CONSTANT_Q)
-    {
-        for (idx = 0; idx < nLayers; idx++) /* 12/25/00 */
-        {
-            video->rc[idx] =
-                (rateControl *)M4VENC_MALLOC(sizeof(rateControl));
-
-            if (video->rc[idx] == NULL) goto CLEAN_UP;
-
-            M4VENC_MEMSET(video->rc[idx], 0, sizeof(rateControl));
-        }
-        if (PV_SUCCESS != RC_Initialize(video))
-        {
-            goto CLEAN_UP;
-        }
-        /* initialization for 2-pass rate control */
-    }
-    /* END INITIALIZATION OF ANNEX L RATE CONTROL */
-
-    /********** assign platform dependent functions ***********************/
-    /* 1/23/01 */
-    /* This must be done at run-time not a compile time */
-    video->functionPointer = (FuncPtr*) M4VENC_MALLOC(sizeof(FuncPtr));
-    if (video->functionPointer == NULL) goto CLEAN_UP;
-
-    video->functionPointer->ComputeMBSum = &ComputeMBSum_C;
-    video->functionPointer->SAD_MB_HalfPel[0] = NULL;
-    video->functionPointer->SAD_MB_HalfPel[1] = &SAD_MB_HalfPel_Cxh;
-    video->functionPointer->SAD_MB_HalfPel[2] = &SAD_MB_HalfPel_Cyh;
-    video->functionPointer->SAD_MB_HalfPel[3] = &SAD_MB_HalfPel_Cxhyh;
-
-#ifndef NO_INTER4V
-    video->functionPointer->SAD_Blk_HalfPel = &SAD_Blk_HalfPel_C;
-    video->functionPointer->SAD_Block = &SAD_Block_C;
-#endif
-    video->functionPointer->SAD_Macroblock = &SAD_Macroblock_C;
-    video->functionPointer->ChooseMode = &ChooseMode_C;
-    video->functionPointer->GetHalfPelMBRegion = &GetHalfPelMBRegion_C;
-//  video->functionPointer->SAD_MB_PADDING = &SAD_MB_PADDING; /* 4/21/01 */
-
-
-    encoderControl->videoEncoderInit = 1;  /* init done! */
-
-    return PV_TRUE;
-
-CLEAN_UP:
-    PVCleanUpVideoEncoder(encoderControl);
-
-    return PV_FALSE;
-}
-
-
-/* ======================================================================== */
-/*  Function : PVCleanUpVideoEncoder()                                      */
-/*  Date     : 08/22/2000                                                   */
-/*  Purpose  : Deallocates allocated memory from InitVideoEncoder()         */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified : 5/21/01, free only yChan in Vop                          */
-/*                                                                          */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool    PVCleanUpVideoEncoder(VideoEncControls *encoderControl)
-{
-    Int idx, i;
-    VideoEncData *video = (VideoEncData *)encoderControl->videoEncoderData;
-    int nTotalMB;
-    int max_width, offset;
-
-#ifdef PRINT_RC_INFO
-    if (facct != NULL)
-    {
-        fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
-        fprintf(facct, "TOTAL NUM BITS GENERATED %d\n", tiTotalNumBitsGenerated);
-        fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
-        fprintf(facct, "TOTAL NUMBER OF FRAMES CODED %d\n",
-                video->encParams->rc[0]->totalFrameNumber);
-        fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
-        fprintf(facct, "Average BitRate %d\n",
-                (tiTotalNumBitsGenerated / (90 / 30)));
-        fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
-        fprintf(facct, "TOTAL NUMBER OF STUFF BITS %d\n", (iStuffBits + 10740));
-        fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
-        fprintf(facct, "TOTAL NUMBER OF BITS TO NETWORK %d\n", (35800*90 / 30));;
-        fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
-        fprintf(facct, "SUM OF STUFF BITS AND GENERATED BITS %d\n",
-                (tiTotalNumBitsGenerated + iStuffBits + 10740));
-        fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
-        fprintf(facct, "UNACCOUNTED DIFFERENCE %d\n",
-                ((35800*90 / 30) - (tiTotalNumBitsGenerated + iStuffBits + 10740)));
-        fprintf(facct, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n");
-        fclose(facct);
-    }
-#endif
-
-#ifdef PRINT_EC
-    fclose(fec);
-#endif
-
-    if (video != NULL)
-    {
-
-        if (video->QPMB) M4VENC_FREE(video->QPMB);
-        if (video->headerInfo.Mode)M4VENC_FREE(video->headerInfo.Mode);
-        if (video->headerInfo.CBP)M4VENC_FREE(video->headerInfo.CBP);
-
-
-        if (video->mot)
-        {
-            nTotalMB = video->vol[0]->nTotalMB;
-            for (idx = 1; idx < video->currLayer; idx++)
-                if (video->vol[idx]->nTotalMB > nTotalMB)
-                    nTotalMB = video->vol[idx]->nTotalMB;
-            for (idx = 0; idx < nTotalMB; idx++)
-            {
-                if (video->mot[idx])
-                    M4VENC_FREE(video->mot[idx]);
-            }
-            M4VENC_FREE(video->mot);
-        }
-
-        if (video->intraArray) M4VENC_FREE(video->intraArray);
-
-        if (video->sliceNo)M4VENC_FREE(video->sliceNo);
-        if (video->acPredFlag)M4VENC_FREE(video->acPredFlag);
-//      if(video->predDCAC)M4VENC_FREE(video->predDCAC);
-        if (video->predDC) M4VENC_FREE(video->predDC);
-        video->predDCAC_row = NULL;
-        if (video->predDCAC_col) M4VENC_FREE(video->predDCAC_col);
-        if (video->outputMB)M4VENC_FREE(video->outputMB);
-
-        if (video->bitstream1)BitstreamCloseEnc(video->bitstream1);
-        if (video->bitstream2)BitstreamCloseEnc(video->bitstream2);
-        if (video->bitstream3)BitstreamCloseEnc(video->bitstream3);
-
-        if (video->overrunBuffer) M4VENC_FREE(video->overrunBuffer);
-
-        max_width = video->encParams->LayerWidth[0];
-        max_width = (((max_width + 15) >> 4) << 4); /* 09/19/05 */
-        if (video->encParams->H263_Enabled)
-        {
-            offset = 0;
-        }
-        else
-        {
-            offset = ((max_width + 32) << 4) + 16;
-        }
-
-        if (video->currVop)
-        {
-            if (video->currVop->allChan)
-            {
-                M4VENC_FREE(video->currVop->allChan);
-            }
-            M4VENC_FREE(video->currVop);
-        }
-
-        if (video->nextBaseVop)
-        {
-            if (video->nextBaseVop->allChan)
-            {
-                M4VENC_FREE(video->nextBaseVop->allChan);
-            }
-            M4VENC_FREE(video->nextBaseVop);
-        }
-
-        if (video->prevBaseVop)
-        {
-            if (video->prevBaseVop->allChan)
-            {
-                M4VENC_FREE(video->prevBaseVop->allChan);
-            }
-            M4VENC_FREE(video->prevBaseVop);
-        }
-        if (video->prevEnhanceVop)
-        {
-            if (video->prevEnhanceVop->allChan)
-            {
-                M4VENC_FREE(video->prevEnhanceVop->allChan);
-            }
-            M4VENC_FREE(video->prevEnhanceVop);
-        }
-
-        /* 04/09/01, for Vops in the use multipass processing */
-        for (idx = 0; idx < video->encParams->nLayers; idx++)
-        {
-            if (video->pMP[idx])
-            {
-                if (video->pMP[idx]->pRDSamples)
-                {
-                    for (i = 0; i < 30; i++)
-                    {
-                        if (video->pMP[idx]->pRDSamples[i])
-                            M4VENC_FREE(video->pMP[idx]->pRDSamples[i]);
-                    }
-                    M4VENC_FREE(video->pMP[idx]->pRDSamples);
-                }
-
-                M4VENC_MEMSET(video->pMP[idx], 0, sizeof(MultiPass));
-                M4VENC_FREE(video->pMP[idx]);
-            }
-        }
-        /* //  End /////////////////////////////////////// */
-
-        if (video->vol)
-        {
-            for (idx = 0; idx < video->encParams->nLayers; idx++)
-            {
-                if (video->vol[idx])
-                {
-                    if (video->vol[idx]->stream)
-                        M4VENC_FREE(video->vol[idx]->stream);
-                    M4VENC_FREE(video->vol[idx]);
-                }
-            }
-            M4VENC_FREE(video->vol);
-        }
-
-        /***************************************************/
-        /* stop rate control parameters */
-        /***************************************************/
-
-        /* ANNEX L RATE CONTROL */
-        if (video->encParams->RC_Type != CONSTANT_Q)
-        {
-            RC_Cleanup(video->rc, video->encParams->nLayers);
-
-            for (idx = 0; idx < video->encParams->nLayers; idx++)
-            {
-                if (video->rc[idx])
-                    M4VENC_FREE(video->rc[idx]);
-            }
-        }
-
-        if (video->functionPointer) M4VENC_FREE(video->functionPointer);
-
-        /* If application has called PVCleanUpVideoEncoder then we deallocate */
-        /* If PVInitVideoEncoder class it, then we DO NOT deallocate */
-        if (video->encParams)
-        {
-            M4VENC_FREE(video->encParams);
-        }
-
-        M4VENC_FREE(video);
-        encoderControl->videoEncoderData = NULL; /* video */
-    }
-
-    encoderControl->videoEncoderInit = 0;
-
-    return PV_TRUE;
-}
-
-/* ======================================================================== */
-/*  Function : PVGetVolHeader()                                             */
-/*  Date     : 7/17/2001,                                                   */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVGetVolHeader(VideoEncControls *encCtrl, UChar *volHeader, Int *size, Int layer)
-{
-    VideoEncData    *encData;
-    PV_STATUS   EncodeVOS_Start(VideoEncControls *encCtrl);
-    encData = (VideoEncData *)encCtrl->videoEncoderData;
-
-
-    if (encData == NULL)
-        return PV_FALSE;
-    if (encData->encParams == NULL)
-        return PV_FALSE;
-
-
-    encData->currLayer = layer; /* Set Layer */
-    /*pv_status = */
-    EncodeVOS_Start(encCtrl); /* Encode VOL Header */
-
-    encData->encParams->GetVolHeader[layer] = 1; /* Set usage flag: Needed to support old method*/
-
-    /* Copy bitstream to buffer and set the size */
-
-    if (*size > encData->bitstream1->byteCount)
-    {
-        *size = encData->bitstream1->byteCount;
-        M4VENC_MEMCPY(volHeader, encData->bitstream1->bitstreamBuffer, *size);
-    }
-    else
-        return PV_FALSE;
-
-    /* Reset bitstream1 buffer parameters */
-    BitstreamEncReset(encData->bitstream1);
-
-    return PV_TRUE;
-}
-
-/* ======================================================================== */
-/*  Function : PVGetOverrunBuffer()                                         */
-/*  Purpose  : Get the overrun buffer `                                     */
-/*  In/out   :                                                              */
-/*  Return   : Pointer to overrun buffer.                                   */
-/*  Modified :                                                              */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF UChar* PVGetOverrunBuffer(VideoEncControls *encCtrl)
-{
-    VideoEncData *video = (VideoEncData *)encCtrl->videoEncoderData;
-    Int currLayer = video->currLayer;
-    Vol *currVol = video->vol[currLayer];
-
-    if (currVol->stream->bitstreamBuffer != video->overrunBuffer) // not used
-    {
-        return NULL;
-    }
-
-    return video->overrunBuffer;
-}
-
-
-
-
-/* ======================================================================== */
-/*  Function : EncodeVideoFrame()                                           */
-/*  Date     : 08/22/2000                                                   */
-/*  Purpose  : Encode video frame and return bitstream                      */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/*  02.14.2001                                      */
-/*              Finishing new timestamp 32-bit input                        */
-/*              Applications need to take care of wrap-around               */
-/* ======================================================================== */
-OSCL_EXPORT_REF Bool PVEncodeVideoFrame(VideoEncControls *encCtrl, VideoEncFrameIO *vid_in, VideoEncFrameIO *vid_out,
-                                        ULong *nextModTime, UChar *bstream, Int *size, Int *nLayer)
-{
-    Bool status = PV_TRUE;
-    PV_STATUS pv_status;
-    VideoEncData *video = (VideoEncData *)encCtrl->videoEncoderData;
-    VideoEncParams *encParams = video->encParams;
-    Vol *currVol;
-    Vop *tempForwRefVop = NULL;
-    Int tempRefSelCode = 0;
-    PV_STATUS   EncodeVOS_Start(VideoEncControls *encCtrl);
-    Int width_16, height_16;
-    Int width, height;
-    Vop *temp;
-    Int encodeVop = 0;
-    void  PaddingEdge(Vop *padVop);
-    Int currLayer = -1;
-    //Int nLayers = encParams->nLayers;
-
-    ULong modTime = vid_in->timestamp;
-
-#ifdef RANDOM_REFSELCODE   /* add random selection of reference Vop */
-    Int random_val[30] = {0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0};
-    static Int rand_idx = 0;
-#endif
-
-    /*******************************************************/
-    /* Determine Next Vop to encode, if any, and nLayer    */
-    /*******************************************************/
-    //i = nLayers-1;
-
-    if (video->volInitialize[0]) /* first vol to code */
-    {
-        video->nextModTime = video->modTimeRef = ((modTime) - ((modTime) % 1000));
-    }
-
-    encodeVop = DetermineCodingLayer(video, nLayer, modTime);
-    currLayer = *nLayer;
-    if ((currLayer < 0) || (currLayer > encParams->nLayers - 1))
-        return PV_FALSE;
-
-    /******************************************/
-    /* If post-skipping still effective --- return */
-    /******************************************/
-
-    if (!encodeVop) /* skip enh layer, no base layer coded --- return */
-    {
-#ifdef _PRINT_STAT
-        printf("No frame coded. Continue to next frame.");
-#endif
-        /* expected next code time, convert back to millisec */
-        *nextModTime = video->nextModTime;
-
-#ifdef ALLOW_VOP_NOT_CODED
-        if (video->vol[0]->shortVideoHeader) /* Short Video Header = 1 */
-        {
-            *size = 0;
-            *nLayer = -1;
-        }
-        else
-        {
-            *nLayer = 0;
-            EncodeVopNotCoded(video, bstream, size, modTime);
-            *size = video->vol[0]->stream->byteCount;
-        }
-#else
-        *size = 0;
-        *nLayer = -1;
-#endif
-        return status;
-    }
-
-
-//ENCODE_VOP_AGAIN:  /* 12/30/00 */
-
-    /**************************************************************/
-    /* Initialize Vol stream structure with application bitstream */
-    /**************************************************************/
-
-    currVol = video->vol[currLayer];
-    currVol->stream->bitstreamBuffer = bstream;
-    currVol->stream->bufferSize = *size;
-    BitstreamEncReset(currVol->stream);
-    BitstreamSetOverrunBuffer(currVol->stream, video->overrunBuffer, video->oBSize, video);
-
-    /***********************************************************/
-    /* Encode VOS and VOL Headers on first call for each layer */
-    /***********************************************************/
-
-    if (video->volInitialize[currLayer])
-    {
-        video->currVop->timeInc = 0;
-        video->prevBaseVop->timeInc = 0;
-        if (!video->encParams->GetVolHeader[currLayer])
-            pv_status = EncodeVOS_Start(encCtrl);
-    }
-
-    /***************************************************/
-    /* Copy Input Video Frame to Internal Video Buffer */
-    /***************************************************/
-    /* Determine Width and Height of Vop Layer */
-
-    width = encParams->LayerWidth[currLayer];   /* Get input width */
-    height = encParams->LayerHeight[currLayer]; /* Get input height */
-    /* Round Up to nearest multiple of 16 : MPEG-4 Standard */
-
-    width_16 = ((width + 15) / 16) * 16;            /* Round up to nearest multiple of 16 */
-    height_16 = ((height + 15) / 16) * 16;          /* Round up to nearest multiple of 16 */
-
-    video->input = vid_in;  /* point to the frame input */
-
-    /*//  End ////////////////////////////// */
-
-
-    /**************************************/
-    /* Determine VOP Type                 */
-    /* 6/2/2001, separate function      */
-    /**************************************/
-    DetermineVopType(video, currLayer);
-
-    /****************************/
-    /*    Initialize VOP        */
-    /****************************/
-    video->currVop->volID = currVol->volID;
-    video->currVop->width = width_16;
-    video->currVop->height = height_16;
-    if (video->encParams->H263_Enabled) /*  11/28/05 */
-    {
-        video->currVop->pitch = width_16;
-    }
-    else
-    {
-        video->currVop->pitch = width_16 + 32;
-    }
-    video->currVop->timeInc = currVol->timeIncrement;
-    video->currVop->vopCoded = 1;
-    video->currVop->roundingType = 0;
-    video->currVop->intraDCVlcThr = encParams->IntraDCVlcThr;
-
-    if (currLayer == 0
-#ifdef RANDOM_REFSELCODE   /* add random selection of reference Vop */
-            || random_val[rand_idx] || video->volInitialize[currLayer]
-#endif
-       )
-    {
-        tempForwRefVop = video->forwardRefVop; /* keep initial state */
-        if (tempForwRefVop != NULL) tempRefSelCode = tempForwRefVop->refSelectCode;
-
-        video->forwardRefVop = video->prevBaseVop;
-        video->forwardRefVop->refSelectCode = 1;
-    }
-#ifdef RANDOM_REFSELCODE
-    else
-    {
-        tempForwRefVop = video->forwardRefVop; /* keep initial state */
-        if (tempForwRefVop != NULL) tempRefSelCode = tempForwRefVop->refSelectCode;
-
-        video->forwardRefVop = video->prevEnhanceVop;
-        video->forwardRefVop->refSelectCode = 0;
-    }
-    rand_idx++;
-    rand_idx %= 30;
-#endif
-
-    video->currVop->refSelectCode = video->forwardRefVop->refSelectCode;
-    video->currVop->gobNumber = 0;
-    video->currVop->gobFrameID = video->currVop->predictionType;
-    video->currVop->temporalRef = (modTime * 30 / 1001) % 256;
-
-    video->currVop->temporalInterval = 0;
-
-    if (video->currVop->predictionType == I_VOP)
-        video->currVop->quantizer = encParams->InitQuantIvop[currLayer];
-    else
-        video->currVop->quantizer = encParams->InitQuantPvop[currLayer];
-
-
-    /****************/
-    /* Encode Vop */
-    /****************/
-    video->slice_coding = 0;
-
-    pv_status = EncodeVop(video);
-#ifdef _PRINT_STAT
-    if (video->currVop->predictionType == I_VOP)
-        printf(" I-VOP ");
-    else
-        printf(" P-VOP (ref.%d)", video->forwardRefVop->refSelectCode);
-#endif
-
-    /************************************/
-    /* Update Skip Next Frame           */
-    /************************************/
-    *nLayer = UpdateSkipNextFrame(video, nextModTime, size, pv_status);
-    if (*nLayer == -1) /* skip current frame */
-    {
-        /* make sure that pointers are restored to the previous state */
-        if (currLayer == 0)
-        {
-            video->forwardRefVop = tempForwRefVop; /* For P-Vop base only */
-            video->forwardRefVop->refSelectCode = tempRefSelCode;
-        }
-
-        return status;
-    }
-
-    /* If I-VOP was encoded, reset IntraPeriod */
-    if ((currLayer == 0) && (encParams->IntraPeriod > 0) && (video->currVop->predictionType == I_VOP))
-        video->nextEncIVop = encParams->IntraPeriod;
-
-    /* Set HintTrack Information */
-    if (currLayer != -1)
-    {
-        if (currVol->prevModuloTimeBase)
-            video->hintTrackInfo.MTB = 1;
-        else
-            video->hintTrackInfo.MTB = 0;
-        video->hintTrackInfo.LayerID = (UChar)currVol->volID;
-        video->hintTrackInfo.CodeType = (UChar)video->currVop->predictionType;
-        video->hintTrackInfo.RefSelCode = (UChar)video->currVop->refSelectCode;
-    }
-
-    /************************************************/
-    /* Determine nLayer and timeInc for next encode */
-    /* 12/27/00 always go by the highest layer*/
-    /************************************************/
-
-    /**********************************************************/
-    /* Copy Reconstructed Buffer to Output Video Frame Buffer */
-    /**********************************************************/
-    vid_out->yChan = video->currVop->yChan;
-    vid_out->uChan = video->currVop->uChan;
-    vid_out->vChan = video->currVop->vChan;
-    if (video->encParams->H263_Enabled)
-    {
-        vid_out->height = video->currVop->height; /* padded height */
-        vid_out->pitch = video->currVop->width; /* padded width */
-    }
-    else
-    {
-        vid_out->height = video->currVop->height + 32; /* padded height */
-        vid_out->pitch = video->currVop->width + 32; /* padded width */
-    }
-    //video_out->timestamp = video->modTime;
-    vid_out->timestamp = (ULong)(((video->prevFrameNum[currLayer] * 1000) / encParams->LayerFrameRate[currLayer]) + video->modTimeRef + 0.5);
-
-    /*// End /////////////////////// */
-
-    /***********************************/
-    /* Update Ouput bstream byte count */
-    /***********************************/
-
-    *size = currVol->stream->byteCount;
-
-    /****************************************/
-    /* Swap Vop Pointers for Base Layer     */
-    /****************************************/
-    if (currLayer == 0)
-    {
-        temp = video->prevBaseVop;
-        video->prevBaseVop = video->currVop;
-        video->prevBaseVop->padded = 0; /* not padded */
-        video->currVop  = temp;
-        video->forwardRefVop = video->prevBaseVop; /* For P-Vop base only */
-        video->forwardRefVop->refSelectCode = 1;
-    }
-    else
-    {
-        temp = video->prevEnhanceVop;
-        video->prevEnhanceVop = video->currVop;
-        video->prevEnhanceVop->padded = 0; /* not padded */
-        video->currVop = temp;
-        video->forwardRefVop = video->prevEnhanceVop;
-        video->forwardRefVop->refSelectCode = 0;
-    }
-
-    /****************************************/
-    /* Modify the intialize flag at the end.*/
-    /****************************************/
-    if (video->volInitialize[currLayer])
-        video->volInitialize[currLayer] = 0;
-
-    return status;
-}
-
-#ifndef NO_SLICE_ENCODE
-/* ======================================================================== */
-/*  Function : PVEncodeFrameSet()                                           */
-/*  Date     : 04/18/2000                                                   */
-/*  Purpose  : Enter a video frame and perform front-end time check plus ME */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-OSCL_EXPORT_REF Bool PVEncodeFrameSet(VideoEncControls *encCtrl, VideoEncFrameIO *vid_in, ULong *nextModTime, Int *nLayer)
-{
-    Bool status = PV_TRUE;
-    VideoEncData *video = (VideoEncData *)encCtrl->videoEncoderData;
-    VideoEncParams *encParams = video->encParams;
-    Vol *currVol;
-    PV_STATUS   EncodeVOS_Start(VideoEncControls *encCtrl);
-    Int width_16, height_16;
-    Int width, height;
-    Int encodeVop = 0;
-    void  PaddingEdge(Vop *padVop);
-    Int currLayer = -1;
-    //Int nLayers = encParams->nLayers;
-
-    ULong   modTime = vid_in->timestamp;
-
-#ifdef RANDOM_REFSELCODE   /* add random selection of reference Vop */
-    Int random_val[30] = {0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0};
-    static Int rand_idx = 0;
-#endif
-    /*******************************************************/
-    /* Determine Next Vop to encode, if any, and nLayer    */
-    /*******************************************************/
-
-    video->modTime = modTime;
-
-    //i = nLayers-1;
-
-    if (video->volInitialize[0]) /* first vol to code */
-    {
-        video->nextModTime = video->modTimeRef = ((modTime) - ((modTime) % 1000));
-    }
-
-
-    encodeVop = DetermineCodingLayer(video, nLayer, modTime);
-
-    currLayer = *nLayer;
-
-    /******************************************/
-    /* If post-skipping still effective --- return */
-    /******************************************/
-
-    if (!encodeVop) /* skip enh layer, no base layer coded --- return */
-    {
-#ifdef _PRINT_STAT
-        printf("No frame coded. Continue to next frame.");
-#endif
-        *nLayer = -1;
-
-        /* expected next code time, convert back to millisec */
-        *nextModTime = video->nextModTime;;
-        return status;
-    }
-
-    /**************************************************************/
-    /* Initialize Vol stream structure with application bitstream */
-    /**************************************************************/
-
-    currVol = video->vol[currLayer];
-    currVol->stream->bufferSize = 0;
-    BitstreamEncReset(currVol->stream);
-
-    /***********************************************************/
-    /* Encode VOS and VOL Headers on first call for each layer */
-    /***********************************************************/
-
-    if (video->volInitialize[currLayer])
-    {
-        video->currVop->timeInc = 0;
-        video->prevBaseVop->timeInc = 0;
-    }
-
-    /***************************************************/
-    /* Copy Input Video Frame to Internal Video Buffer */
-    /***************************************************/
-    /* Determine Width and Height of Vop Layer */
-
-    width = encParams->LayerWidth[currLayer];   /* Get input width */
-    height = encParams->LayerHeight[currLayer]; /* Get input height */
-    /* Round Up to nearest multiple of 16 : MPEG-4 Standard */
-
-    width_16 = ((width + 15) / 16) * 16;            /* Round up to nearest multiple of 16 */
-    height_16 = ((height + 15) / 16) * 16;          /* Round up to nearest multiple of 16 */
-
-    video->input = vid_in;  /* point to the frame input */
-
-    /*//  End ////////////////////////////// */
-
-
-    /**************************************/
-    /* Determine VOP Type                 */
-    /* 6/2/2001, separate function      */
-    /**************************************/
-    DetermineVopType(video, currLayer);
-
-    /****************************/
-    /*    Initialize VOP        */
-    /****************************/
-    video->currVop->volID = currVol->volID;
-    video->currVop->width = width_16;
-    video->currVop->height = height_16;
-    if (video->encParams->H263_Enabled) /*  11/28/05 */
-    {
-        video->currVop->pitch = width_16;
-    }
-    else
-    {
-        video->currVop->pitch = width_16 + 32;
-    }
-    video->currVop->timeInc = currVol->timeIncrement;
-    video->currVop->vopCoded = 1;
-    video->currVop->roundingType = 0;
-    video->currVop->intraDCVlcThr = encParams->IntraDCVlcThr;
-
-    if (currLayer == 0
-#ifdef RANDOM_REFSELCODE   /* add random selection of reference Vop */
-            || random_val[rand_idx] || video->volInitialize[currLayer]
-#endif
-       )
-    {
-        video->tempForwRefVop = video->forwardRefVop; /* keep initial state */
-        if (video->tempForwRefVop != NULL) video->tempRefSelCode = video->tempForwRefVop->refSelectCode;
-
-        video->forwardRefVop = video->prevBaseVop;
-        video->forwardRefVop->refSelectCode = 1;
-    }
-#ifdef RANDOM_REFSELCODE
-    else
-    {
-        video->tempForwRefVop = video->forwardRefVop; /* keep initial state */
-        if (video->tempForwRefVop != NULL) video->tempRefSelCode = video->tempForwRefVop->refSelectCode;
-
-        video->forwardRefVop = video->prevEnhanceVop;
-        video->forwardRefVop->refSelectCode = 0;
-    }
-    rand_idx++;
-    rand_idx %= 30;
-#endif
-
-    video->currVop->refSelectCode = video->forwardRefVop->refSelectCode;
-    video->currVop->gobNumber = 0;
-    video->currVop->gobFrameID = video->currVop->predictionType;
-    video->currVop->temporalRef = ((modTime) * 30 / 1001) % 256;
-
-    video->currVop->temporalInterval = 0;
-
-    if (video->currVop->predictionType == I_VOP)
-        video->currVop->quantizer = encParams->InitQuantIvop[currLayer];
-    else
-        video->currVop->quantizer = encParams->InitQuantPvop[currLayer];
-
-    /****************/
-    /* Encode Vop   */
-    /****************/
-    video->slice_coding = 1;
-
-    /*pv_status =*/
-    EncodeVop(video);
-
-#ifdef _PRINT_STAT
-    if (video->currVop->predictionType == I_VOP)
-        printf(" I-VOP ");
-    else
-        printf(" P-VOP (ref.%d)", video->forwardRefVop->refSelectCode);
-#endif
-
-    /* Set HintTrack Information */
-    if (currVol->prevModuloTimeBase)
-        video->hintTrackInfo.MTB = 1;
-    else
-        video->hintTrackInfo.MTB = 0;
-
-    video->hintTrackInfo.LayerID = (UChar)currVol->volID;
-    video->hintTrackInfo.CodeType = (UChar)video->currVop->predictionType;
-    video->hintTrackInfo.RefSelCode = (UChar)video->currVop->refSelectCode;
-
-    return status;
-}
-#endif /* NO_SLICE_ENCODE */
-
-#ifndef NO_SLICE_ENCODE
-/* ======================================================================== */
-/*  Function : PVEncodePacket()                                             */
-/*  Date     : 04/18/2002                                                   */
-/*  Purpose  : Encode one packet and return bitstream                       */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-OSCL_EXPORT_REF Bool PVEncodeSlice(VideoEncControls *encCtrl, UChar *bstream, Int *size,
-                                   Int *endofFrame, VideoEncFrameIO *vid_out, ULong *nextModTime)
-{
-    PV_STATUS pv_status;
-    VideoEncData *video = (VideoEncData *)encCtrl->videoEncoderData;
-    VideoEncParams *encParams = video->encParams;
-    Vol *currVol;
-    PV_STATUS   EncodeVOS_Start(VideoEncControls *encCtrl);
-    Vop *temp;
-    void  PaddingEdge(Vop *padVop);
-    Int currLayer = video->currLayer;
-    Int pre_skip;
-    Int pre_size;
-    /**************************************************************/
-    /* Initialize Vol stream structure with application bitstream */
-    /**************************************************************/
-
-    currVol = video->vol[currLayer];
-    currVol->stream->bitstreamBuffer = bstream;
-    pre_size = currVol->stream->byteCount;
-    currVol->stream->bufferSize = pre_size + (*size);
-
-    /***********************************************************/
-    /* Encode VOS and VOL Headers on first call for each layer */
-    /***********************************************************/
-
-    if (video->volInitialize[currLayer])
-    {
-        if (!video->encParams->GetVolHeader[currLayer])
-            pv_status = EncodeVOS_Start(encCtrl);
-    }
-
-    /****************/
-    /* Encode Slice */
-    /****************/
-    pv_status = EncodeSlice(video);
-
-    *endofFrame = 0;
-
-    if (video->mbnum >= currVol->nTotalMB && !video->end_of_buf)
-    {
-        *endofFrame = 1;
-
-        /************************************/
-        /* Update Skip Next Frame           */
-        /************************************/
-        pre_skip = UpdateSkipNextFrame(video, nextModTime, size, pv_status); /* modified such that no pre-skipped */
-
-        if (pre_skip == -1) /* error */
-        {
-            *endofFrame = -1;
-            /* make sure that pointers are restored to the previous state */
-            if (currLayer == 0)
-            {
-                video->forwardRefVop = video->tempForwRefVop; /* For P-Vop base only */
-                video->forwardRefVop->refSelectCode = video->tempRefSelCode;
-            }
-
-            return pv_status;
-        }
-
-        /* If I-VOP was encoded, reset IntraPeriod */
-        if ((currLayer == 0) && (encParams->IntraPeriod > 0) && (video->currVop->predictionType == I_VOP))
-            video->nextEncIVop = encParams->IntraPeriod;
-
-        /**********************************************************/
-        /* Copy Reconstructed Buffer to Output Video Frame Buffer */
-        /**********************************************************/
-        vid_out->yChan = video->currVop->yChan;
-        vid_out->uChan = video->currVop->uChan;
-        vid_out->vChan = video->currVop->vChan;
-        if (video->encParams->H263_Enabled)
-        {
-            vid_out->height = video->currVop->height; /* padded height */
-            vid_out->pitch = video->currVop->width; /* padded width */
-        }
-        else
-        {
-            vid_out->height = video->currVop->height + 32; /* padded height */
-            vid_out->pitch = video->currVop->width + 32; /* padded width */
-        }
-        //vid_out->timestamp = video->modTime;
-        vid_out->timestamp = (ULong)(((video->prevFrameNum[currLayer] * 1000) / encParams->LayerFrameRate[currLayer]) + video->modTimeRef + 0.5);
-
-        /*// End /////////////////////// */
-
-        /****************************************/
-        /* Swap Vop Pointers for Base Layer     */
-        /****************************************/
-
-        if (currLayer == 0)
-        {
-            temp = video->prevBaseVop;
-            video->prevBaseVop = video->currVop;
-            video->prevBaseVop->padded = 0; /* not padded */
-            video->currVop = temp;
-            video->forwardRefVop = video->prevBaseVop; /* For P-Vop base only */
-            video->forwardRefVop->refSelectCode = 1;
-        }
-        else
-        {
-            temp = video->prevEnhanceVop;
-            video->prevEnhanceVop = video->currVop;
-            video->prevEnhanceVop->padded = 0; /* not padded */
-            video->currVop = temp;
-            video->forwardRefVop = video->prevEnhanceVop;
-            video->forwardRefVop->refSelectCode = 0;
-        }
-    }
-
-    /***********************************/
-    /* Update Ouput bstream byte count */
-    /***********************************/
-
-    *size = currVol->stream->byteCount - pre_size;
-
-    /****************************************/
-    /* Modify the intialize flag at the end.*/
-    /****************************************/
-    if (video->volInitialize[currLayer])
-        video->volInitialize[currLayer] = 0;
-
-    return pv_status;
-}
-#endif /* NO_SLICE_ENCODE */
-
-
-/* ======================================================================== */
-/*  Function : PVGetH263ProfileLevelID()                                    */
-/*  Date     : 02/05/2003                                                   */
-/*  Purpose  : Get H.263 Profile ID and level ID for profile 0              */
-/*  In/out   : Profile ID=0, levelID is what we want                        */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/*  Note     : h263Level[8], rBR_bound[8], max_h263_framerate[2]            */
-/*             max_h263_width[2], max_h263_height[2] are global             */
-/*                                                                          */
-/* ======================================================================== */
-OSCL_EXPORT_REF Bool PVGetH263ProfileLevelID(VideoEncControls *encCtrl, Int *profileID, Int *levelID)
-{
-    VideoEncData *encData;
-    Int width, height;
-    float bitrate_r, framerate;
-
-
-    /* For this version, we only support H.263 profile 0 */
-    *profileID = 0;
-
-    *levelID = 0;
-    encData = (VideoEncData *)encCtrl->videoEncoderData;
-
-    if (encData == NULL)
-        return PV_FALSE;
-    if (encData->encParams == NULL)
-        return PV_FALSE;
-
-    if (!encData->encParams->H263_Enabled) return PV_FALSE;
-
-
-    /* get image width, height, bitrate and framerate */
-    width     = encData->encParams->LayerWidth[0];
-    height    = encData->encParams->LayerHeight[0];
-    bitrate_r = (float)(encData->encParams->LayerBitRate[0]) / (float)64000.0;
-    framerate = encData->encParams->LayerFrameRate[0];
-    if (!width || !height || !(bitrate_r > 0 && framerate > 0)) return PV_FALSE;
-
-    /* This is the most frequent case : level 10 */
-    if (bitrate_r <= rBR_bound[1] && framerate <= max_h263_framerate[0] &&
-            (width <= max_h263_width[0] && height <= max_h263_height[0]))
-    {
-        *levelID = h263Level[1];
-        return PV_TRUE;
-    }
-    else if (bitrate_r > rBR_bound[4] ||
-             (width > max_h263_width[1] || height > max_h263_height[1]) ||
-             framerate > max_h263_framerate[1])    /* check the highest level 70 */
-    {
-        *levelID = h263Level[7];
-        return PV_TRUE;
-    }
-    else   /* search level 20, 30, 40 */
-    {
-
-        /* pick out level 20 */
-        if (bitrate_r <= rBR_bound[2] &&
-                ((width <= max_h263_width[0] && height <= max_h263_height[0] && framerate <= max_h263_framerate[1]) ||
-                 (width <= max_h263_width[1] && height <= max_h263_height[1] && framerate <= max_h263_framerate[0])))
-        {
-            *levelID = h263Level[2];
-            return PV_TRUE;
-        }
-        else   /* width, height and framerate are ok, now choose level 30 or 40 */
-        {
-            *levelID = (bitrate_r <= rBR_bound[3] ? h263Level[3] : h263Level[4]);
-            return PV_TRUE;
-        }
-    }
-}
-
-/* ======================================================================== */
-/*  Function : PVGetMPEG4ProfileLevelID()                                   */
-/*  Date     : 26/06/2008                                                   */
-/*  Purpose  : Get MPEG4 Level after initialized                            */
-/*  In/out   : profile_level according to interface                         */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-OSCL_EXPORT_REF Bool PVGetMPEG4ProfileLevelID(VideoEncControls *encCtrl, Int *profile_level, Int nLayer)
-{
-    VideoEncData* video;
-    Int i;
-
-    video = (VideoEncData *)encCtrl->videoEncoderData;
-
-    if (nLayer == 0)
-    {
-        for (i = 0; i < 8; i++)
-        {
-            if (video->encParams->ProfileLevel[0] == profile_level_code[i])
-            {
-                break;
-            }
-        }
-        *profile_level = i;
-    }
-    else
-    {
-        for (i = 0; i < 8; i++)
-        {
-            if (video->encParams->ProfileLevel[0] == scalable_profile_level_code[i])
-            {
-                break;
-            }
-        }
-        *profile_level = i + SIMPLE_SCALABLE_PROFILE_LEVEL0;
-    }
-
-    return true;
-}
-
-#ifndef LIMITED_API
-/* ======================================================================== */
-/*  Function : PVUpdateEncFrameRate                                         */
-/*  Date     : 04/08/2002                                                   */
-/*  Purpose  : Update target frame rates of the encoded base and enhance    */
-/*             layer(if any) while encoding operation is ongoing            */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVUpdateEncFrameRate(VideoEncControls *encCtrl, float *frameRate)
-{
-    VideoEncData    *encData;
-    Int i;// nTotalMB, mbPerSec;
-
-    encData = (VideoEncData *)encCtrl->videoEncoderData;
-
-    if (encData == NULL)
-        return PV_FALSE;
-    if (encData->encParams == NULL)
-        return PV_FALSE;
-
-    /* Update the framerates for all the layers */
-    for (i = 0; i < encData->encParams->nLayers; i++)
-    {
-
-        /* New check: encoding framerate should be consistent with the given profile and level */
-        //nTotalMB = (((encData->encParams->LayerWidth[i]+15)/16)*16)*(((encData->encParams->LayerHeight[i]+15)/16)*16)/(16*16);
-        //mbPerSec = (Int)(nTotalMB * frameRate[i]);
-        //if(mbPerSec > encData->encParams->LayerMaxMbsPerSec[i]) return PV_FALSE;
-        if (frameRate[i] > encData->encParams->LayerMaxFrameRate[i]) return PV_FALSE; /* set by users or profile */
-
-        encData->encParams->LayerFrameRate[i] = frameRate[i];
-    }
-
-    return RC_UpdateBXRCParams((void*) encData);
-
-}
-#endif
-#ifndef LIMITED_API
-/* ======================================================================== */
-/*  Function : PVUpdateBitRate                                              */
-/*  Date     : 04/08/2002                                                   */
-/*  Purpose  : Update target bit rates of the encoded base and enhance      */
-/*             layer(if any) while encoding operation is ongoing            */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVUpdateBitRate(VideoEncControls *encCtrl, Int *bitRate)
-{
-    VideoEncData    *encData;
-    Int i;
-
-    encData = (VideoEncData *)encCtrl->videoEncoderData;
-
-    if (encData == NULL)
-        return PV_FALSE;
-    if (encData->encParams == NULL)
-        return PV_FALSE;
-
-    /* Update the bitrates for all the layers */
-    for (i = 0; i < encData->encParams->nLayers; i++)
-    {
-        if (bitRate[i] > encData->encParams->LayerMaxBitRate[i]) /* set by users or profile */
-        {
-            return PV_FALSE;
-        }
-        encData->encParams->LayerBitRate[i] = bitRate[i];
-    }
-
-    return RC_UpdateBXRCParams((void*) encData);
-
-}
-#endif
-#ifndef LIMITED_API
-/* ============================================================================ */
-/*  Function : PVUpdateVBVDelay()                                                   */
-/*  Date     : 4/23/2004                                                        */
-/*  Purpose  : Update VBV buffer size(in delay)                                 */
-/*  In/out   :                                                                  */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                        */
-/*  Modified :                                                                  */
-/*                                                                              */
-/* ============================================================================ */
-
-Bool PVUpdateVBVDelay(VideoEncControls *encCtrl, float delay)
-{
-
-    VideoEncData    *encData;
-    Int total_bitrate, max_buffer_size;
-    int index;
-
-    encData = (VideoEncData *)encCtrl->videoEncoderData;
-
-    if (encData == NULL)
-        return PV_FALSE;
-    if (encData->encParams == NULL)
-        return PV_FALSE;
-
-    /* Check whether the input delay is valid based on the given profile */
-    total_bitrate   = (encData->encParams->nLayers == 1 ? encData->encParams->LayerBitRate[0] :
-                       encData->encParams->LayerBitRate[1]);
-    index = encData->encParams->profile_table_index;
-    max_buffer_size = (encData->encParams->nLayers == 1 ? profile_level_max_VBV_size[index] :
-                       scalable_profile_level_max_VBV_size[index]);
-
-    if (total_bitrate*delay > (float)max_buffer_size)
-        return PV_FALSE;
-
-    encData->encParams->VBV_delay = delay;
-    return PV_TRUE;
-
-}
-#endif
-#ifndef LIMITED_API
-/* ======================================================================== */
-/*  Function : PVUpdateIFrameInterval()                                         */
-/*  Date     : 04/10/2002                                                   */
-/*  Purpose  : updates the INTRA frame refresh interval while encoding      */
-/*             is ongoing                                                   */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVUpdateIFrameInterval(VideoEncControls *encCtrl, Int aIFramePeriod)
-{
-    VideoEncData    *encData;
-
-    encData = (VideoEncData *)encCtrl->videoEncoderData;
-
-    if (encData == NULL)
-        return PV_FALSE;
-    if (encData->encParams == NULL)
-        return PV_FALSE;
-
-    encData->encParams->IntraPeriod = aIFramePeriod;
-    return PV_TRUE;
-}
-#endif
-#ifndef LIMITED_API
-/* ======================================================================== */
-/*  Function : PVSetNumIntraMBRefresh()                                     */
-/*  Date     : 08/05/2003                                                   */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-OSCL_EXPORT_REF Bool    PVUpdateNumIntraMBRefresh(VideoEncControls *encCtrl, Int numMB)
-{
-    VideoEncData    *encData;
-
-    encData = (VideoEncData *)encCtrl->videoEncoderData;
-
-    if (encData == NULL)
-        return PV_FALSE;
-
-    encData->encParams->Refresh = numMB;
-
-    return PV_TRUE;
-}
-#endif
-#ifndef LIMITED_API
-/* ======================================================================== */
-/*  Function : PVIFrameRequest()                                            */
-/*  Date     : 04/10/2002                                                   */
-/*  Purpose  : encodes the next base frame as an I-Vop                      */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVIFrameRequest(VideoEncControls *encCtrl)
-{
-    VideoEncData    *encData;
-
-    encData = (VideoEncData *)encCtrl->videoEncoderData;
-
-    if (encData == NULL)
-        return PV_FALSE;
-    if (encData->encParams == NULL)
-        return PV_FALSE;
-
-    encData->nextEncIVop = 1;
-    return PV_TRUE;
-}
-#endif
-#ifndef LIMITED_API
-/* ======================================================================== */
-/*  Function : PVGetEncMemoryUsage()                                        */
-/*  Date     : 10/17/2000                                                   */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Int PVGetEncMemoryUsage(VideoEncControls *encCtrl)
-{
-    VideoEncData    *encData;
-
-    encData = (VideoEncData *)encCtrl->videoEncoderData;
-
-    if (encData == NULL)
-        return PV_FALSE;
-    if (encData->encParams == NULL)
-        return PV_FALSE;
-    return encData->encParams->MemoryUsage;
-}
-#endif
-
-/* ======================================================================== */
-/*  Function : PVGetHintTrack()                                             */
-/*  Date     : 1/17/2001,                                                   */
-/*  Purpose  :                                                              */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVGetHintTrack(VideoEncControls *encCtrl, MP4HintTrack *info)
-{
-    VideoEncData    *encData;
-
-    encData = (VideoEncData *)encCtrl->videoEncoderData;
-
-    if (encData == NULL)
-        return PV_FALSE;
-    if (encData->encParams == NULL)
-        return PV_FALSE;
-    info->MTB = encData->hintTrackInfo.MTB;
-    info->LayerID = encData->hintTrackInfo.LayerID;
-    info->CodeType = encData->hintTrackInfo.CodeType;
-    info->RefSelCode = encData->hintTrackInfo.RefSelCode;
-
-    return PV_TRUE;
-}
-
-/* ======================================================================== */
-/*  Function : PVGetMaxVideoFrameSize()                                     */
-/*  Date     : 7/17/2001,                                                   */
-/*  Purpose  : Function merely returns the maximum buffer size              */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVGetMaxVideoFrameSize(VideoEncControls *encCtrl, Int *maxVideoFrameSize)
-{
-    VideoEncData    *encData;
-
-    encData = (VideoEncData *)encCtrl->videoEncoderData;
-
-    if (encData == NULL)
-        return PV_FALSE;
-    if (encData->encParams == NULL)
-        return PV_FALSE;
-
-
-
-    *maxVideoFrameSize = encData->encParams->BufferSize[0];
-
-    if (encData->encParams->nLayers == 2)
-        if (*maxVideoFrameSize < encData->encParams->BufferSize[1])
-            *maxVideoFrameSize = encData->encParams->BufferSize[1];
-    *maxVideoFrameSize >>= 3;   /* Convert to Bytes */
-
-    if (*maxVideoFrameSize <= 4000)
-        *maxVideoFrameSize = 4000;
-
-    return PV_TRUE;
-}
-#ifndef LIMITED_API
-/* ======================================================================== */
-/*  Function : PVGetVBVSize()                                               */
-/*  Date     : 4/15/2002                                                    */
-/*  Purpose  : Function merely returns the maximum buffer size              */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF Bool PVGetVBVSize(VideoEncControls *encCtrl, Int *VBVSize)
-{
-    VideoEncData    *encData;
-
-    encData = (VideoEncData *)encCtrl->videoEncoderData;
-
-    if (encData == NULL)
-        return PV_FALSE;
-    if (encData->encParams == NULL)
-        return PV_FALSE;
-
-    *VBVSize = encData->encParams->BufferSize[0];
-    if (encData->encParams->nLayers == 2)
-        *VBVSize += encData->encParams->BufferSize[1];
-
-    return PV_TRUE;
-
-}
-#endif
-/* ======================================================================== */
-/*  Function : EncodeVOS_Start()                                            */
-/*  Date     : 08/22/2000                                                   */
-/*  Purpose  : Encodes the VOS,VO, and VOL or Short Headers                 */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-PV_STATUS EncodeVOS_Start(VideoEncControls *encoderControl)
-{
-
-    VideoEncData *video = (VideoEncData *)encoderControl->videoEncoderData;
-    Vol         *currVol = video->vol[video->currLayer];
-    PV_STATUS status = PV_SUCCESS;
-    //int profile_level=0x01;
-    BitstreamEncVideo *stream = video->bitstream1;
-    int i, j;
-
-    /********************************/
-    /* Check for short_video_header */
-    /********************************/
-    if (currVol->shortVideoHeader == 1)
-        return status;
-    else
-    {
-        /* Short Video Header or M4V */
-
-        /**************************/
-        /* VisualObjectSequence ()*/
-        /**************************/
-        status = BitstreamPutGT16Bits(stream, 32, SESSION_START_CODE);
-        /*  Determine profile_level */
-        status = BitstreamPutBits(stream, 8, video->encParams->ProfileLevel[video->currLayer]);
-
-        /******************/
-        /* VisualObject() */
-        /******************/
-
-        status = BitstreamPutGT16Bits(stream, 32, VISUAL_OBJECT_START_CODE);
-        status = BitstreamPut1Bits(stream, 0x00); /* visual object identifier */
-        status = BitstreamPutBits(stream, 4, 0x01); /* visual object Type == "video ID" */
-        status = BitstreamPut1Bits(stream, 0x00); /* no video signal type */
-
-        /*temp   = */
-        BitstreamMpeg4ByteAlignStuffing(stream);
-
-
-        status = BitstreamPutGT16Bits(stream, 27, VO_START_CODE);/* byte align: should be 2 bits */
-        status = BitstreamPutBits(stream, 5, 0x00);/*  Video ID = 0  */
-
-
-
-        /**********************/
-        /* VideoObjectLayer() */
-        /**********************/
-        if (currVol->shortVideoHeader == 0)
-        { /* M4V  else Short Video Header */
-            status = BitstreamPutGT16Bits(stream, VOL_START_CODE_LENGTH, VOL_START_CODE);
-            status = BitstreamPutBits(stream, 4, currVol->volID);/*  video_object_layer_id */
-            status = BitstreamPut1Bits(stream, 0x00);/*  Random Access = 0  */
-
-            if (video->currLayer == 0)
-                status = BitstreamPutBits(stream, 8, 0x01);/* Video Object Type Indication = 1  ... Simple Object Type */
-            else
-                status = BitstreamPutBits(stream, 8, 0x02);/* Video Object Type Indication = 2  ... Simple Scalable Object Type */
-
-            status = BitstreamPut1Bits(stream, 0x00);/*  is_object_layer_identifer = 0 */
-
-
-            status = BitstreamPutBits(stream, 4, 0x01); /* aspect_ratio_info = 1 ... 1:1(Square) */
-            status = BitstreamPut1Bits(stream, 0x00);/* vol_control_parameters = 0 */
-            status = BitstreamPutBits(stream, 2, 0x00);/* video_object_layer_shape = 00 ... rectangular */
-            status = BitstreamPut1Bits(stream, 0x01);/* marker bit */
-            status = BitstreamPutGT8Bits(stream, 16, currVol->timeIncrementResolution);/* vop_time_increment_resolution */
-            status = BitstreamPut1Bits(stream, 0x01);/* marker bit */
-            status = BitstreamPut1Bits(stream, currVol->fixedVopRate);/* fixed_vop_rate = 0 */
-
-            /* For Rectangular VO layer shape */
-            status = BitstreamPut1Bits(stream, 0x01);/* marker bit */
-            status = BitstreamPutGT8Bits(stream, 13, currVol->width);/* video_object_layer_width */
-            status = BitstreamPut1Bits(stream, 0x01);/* marker bit */
-            status = BitstreamPutGT8Bits(stream, 13, currVol->height);/* video_object_layer_height */
-            status = BitstreamPut1Bits(stream, 0x01);/*marker bit */
-
-            status = BitstreamPut1Bits(stream, 0x00);/*interlaced = 0 */
-            status = BitstreamPut1Bits(stream, 0x01);/* obmc_disable = 1 */
-            status = BitstreamPut1Bits(stream, 0x00);/* sprite_enable = 0 */
-            status = BitstreamPut1Bits(stream, 0x00);/* not_8_bit = 0 */
-            status = BitstreamPut1Bits(stream, currVol->quantType);/*   quant_type */
-
-            if (currVol->quantType)
-            {
-                status = BitstreamPut1Bits(stream, currVol->loadIntraQuantMat); /* Intra quant matrix */
-                if (currVol->loadIntraQuantMat)
-                {
-                    for (j = 63; j >= 1; j--)
-                        if (currVol->iqmat[*(zigzag_i+j)] != currVol->iqmat[*(zigzag_i+j-1)])
-                            break;
-                    if ((j == 1) && (currVol->iqmat[*(zigzag_i+j)] == currVol->iqmat[*(zigzag_i+j-1)]))
-                        j = 0;
-                    for (i = 0; i < j + 1; i++)
-                        BitstreamPutBits(stream, 8, currVol->iqmat[*(zigzag_i+i)]);
-                    if (j < 63)
-                        BitstreamPutBits(stream, 8, 0);
-                }
-                else
-                {
-                    for (j = 0; j < 64; j++)
-                        currVol->iqmat[j] = mpeg_iqmat_def[j];
-
-                }
-                status = BitstreamPut1Bits(stream, currVol->loadNonIntraQuantMat); /* Non-Intra quant matrix */
-                if (currVol->loadNonIntraQuantMat)
-                {
-                    for (j = 63; j >= 1; j--)
-                        if (currVol->niqmat[*(zigzag_i+j)] != currVol->niqmat[*(zigzag_i+j-1)])
-                            break;
-                    if ((j == 1) && (currVol->niqmat[*(zigzag_i+j)] == currVol->niqmat[*(zigzag_i+j-1)]))
-                        j = 0;
-                    for (i = 0; i < j + 1; i++)
-                        BitstreamPutBits(stream, 8, currVol->niqmat[*(zigzag_i+i)]);
-                    if (j < 63)
-                        BitstreamPutBits(stream, 8, 0);
-                }
-                else
-                {
-                    for (j = 0; j < 64; j++)
-                        currVol->niqmat[j] = mpeg_nqmat_def[j];
-                }
-            }
-
-            status = BitstreamPut1Bits(stream, 0x01);   /* complexity_estimation_disable = 1 */
-            status = BitstreamPut1Bits(stream, currVol->ResyncMarkerDisable);/* Resync_marker_disable */
-            status = BitstreamPut1Bits(stream, currVol->dataPartitioning);/* Data partitioned */
-
-            if (currVol->dataPartitioning)
-                status = BitstreamPut1Bits(stream, currVol->useReverseVLC); /* Reversible_vlc */
-
-
-            if (currVol->scalability) /* Scalability*/
-            {
-
-                status = BitstreamPut1Bits(stream, currVol->scalability);/* Scalability = 1 */
-                status = BitstreamPut1Bits(stream, currVol->scalType);/* hierarchy _type ... Spatial= 0 and Temporal = 1 */
-                status = BitstreamPutBits(stream, 4, currVol->refVolID);/* ref_layer_id  */
-                status = BitstreamPut1Bits(stream, currVol->refSampDir);/* ref_layer_sampling_direc*/
-                status = BitstreamPutBits(stream, 5, currVol->horSamp_n);/*hor_sampling_factor_n*/
-                status = BitstreamPutBits(stream, 5, currVol->horSamp_m);/*hor_sampling_factor_m*/
-                status = BitstreamPutBits(stream, 5, currVol->verSamp_n);/*vert_sampling_factor_n*/
-                status = BitstreamPutBits(stream, 5, currVol->verSamp_m);/*vert_sampling_factor_m*/
-                status = BitstreamPut1Bits(stream, currVol->enhancementType);/* enhancement_type*/
-            }
-            else /* No Scalability */
-                status = BitstreamPut1Bits(stream, currVol->scalability);/* Scalability = 0 */
-
-            /*temp = */
-            BitstreamMpeg4ByteAlignStuffing(stream); /* Byte align Headers for VOP */
-        }
-    }
-
-    return status;
-}
-
-/* ======================================================================== */
-/*  Function : VOS_End()                                                    */
-/*  Date     : 08/22/2000                                                   */
-/*  Purpose  : Visual Object Sequence End                                   */
-/*  In/out   :                                                              */
-/*  Return   : PV_TRUE if successed, PV_FALSE if failed.                    */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-
-PV_STATUS VOS_End(VideoEncControls *encoderControl)
-{
-    PV_STATUS status = PV_SUCCESS;
-    VideoEncData *video = (VideoEncData *)encoderControl->videoEncoderData;
-    Vol         *currVol = video->vol[video->currLayer];
-    BitstreamEncVideo *stream = currVol->stream;
-
-
-    status = BitstreamPutBits(stream, SESSION_END_CODE, 32);
-
-    return status;
-}
-
-/* ======================================================================== */
-/*  Function : DetermineCodingLayer                                         */
-/*  Date     : 06/02/2001                                                   */
-/*  Purpose  : Find layer to code based on current mod time, assuming that
-               it's time to encode enhanced layer.                          */
-/*  In/out   :                                                              */
-/*  Return   : Number of layer to code.                                     */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-
-Int DetermineCodingLayer(VideoEncData *video, Int *nLayer, ULong modTime)
-{
-    Vol **vol = video->vol;
-    VideoEncParams *encParams = video->encParams;
-    Int numLayers = encParams->nLayers;
-    UInt modTimeRef = video->modTimeRef;
-    float *LayerFrameRate = encParams->LayerFrameRate;
-    UInt frameNum[4], frameTick;
-    ULong frameModTime, nextFrmModTime;
-#ifdef REDUCE_FRAME_VARIANCE    /* To limit how close 2 frames can be */
-    float frameInterval;
-#endif
-    float srcFrameInterval;
-    Int frameInc;
-    Int i, extra_skip;
-    Int encodeVop = 0;
-
-    i = numLayers - 1;
-
-    if (modTime - video->nextModTime > ((ULong)(-1)) >> 1) /* next time wrapped around */
-        return 0; /* not time to code it yet */
-
-    video->relLayerCodeTime[i] -= 1000;
-    video->nextEncIVop--;  /* number of Vops in highest layer resolution. */
-    video->numVopsInGOP++;
-
-    /* from this point frameModTime and nextFrmModTime are internal */
-
-    frameNum[i] = (UInt)((modTime - modTimeRef) * LayerFrameRate[i] + 500) / 1000;
-    if (video->volInitialize[i])
-    {
-        video->prevFrameNum[i] = frameNum[i] - 1;
-    }
-    else if (frameNum[i] <= video->prevFrameNum[i])
-    {
-        return 0; /* do not encode this frame */
-    }
-
-    /**** this part computes expected next frame *******/
-    frameModTime = (ULong)(((frameNum[i] * 1000) / LayerFrameRate[i]) + modTimeRef + 0.5); /* rec. time */
-    nextFrmModTime = (ULong)((((frameNum[i] + 1) * 1000) / LayerFrameRate[i]) + modTimeRef + 0.5); /* rec. time */
-
-    srcFrameInterval = 1000 / video->FrameRate;
-
-    video->nextModTime = nextFrmModTime - (ULong)(srcFrameInterval / 2.) - 1; /* between current and next frame */
-
-#ifdef REDUCE_FRAME_VARIANCE    /* To limit how close 2 frames can be */
-    frameInterval = 1000 / LayerFrameRate[i]; /* next rec. time */
-    delta = (Int)(frameInterval / 4); /* empirical number */
-    if (video->nextModTime - modTime  < (ULong)delta) /* need to move nextModTime further. */
-    {
-        video->nextModTime += ((delta - video->nextModTime + modTime)); /* empirical formula  */
-    }
-#endif
-    /****************************************************/
-
-    /* map frame no.to tick from modTimeRef */
-    /*frameTick = (frameNum[i]*vol[i]->timeIncrementResolution) ;
-    frameTick = (UInt)((frameTick + (encParams->LayerFrameRate[i]/2))/encParams->LayerFrameRate[i]);*/
-    /*  11/16/01, change frameTick to be the closest tick from the actual modTime */
-    /*  12/12/02, add (double) to prevent large number wrap-around */
-    frameTick = (Int)(((double)(modTime - modTimeRef) * vol[i]->timeIncrementResolution + 500) / 1000);
-
-    /* find timeIncrement to be put in the bitstream */
-    /* refTick is second boundary reference. */
-    vol[i]->timeIncrement = frameTick - video->refTick[i];
-
-
-    vol[i]->moduloTimeBase = 0;
-    while (vol[i]->timeIncrement >= vol[i]->timeIncrementResolution)
-    {
-        vol[i]->timeIncrement -= vol[i]->timeIncrementResolution;
-        vol[i]->moduloTimeBase++;
-        /* do not update refTick and modTimeRef yet, do it after encoding!! */
-    }
-
-    if (video->relLayerCodeTime[i] <= 0)    /* no skipping */
-    {
-        encodeVop = 1;
-        video->currLayer = *nLayer = i;
-        video->relLayerCodeTime[i] += 1000;
-
-        /* takes care of more dropped frame than expected */
-        extra_skip = -1;
-        frameInc = (frameNum[i] - video->prevFrameNum[i]);
-        extra_skip += frameInc;
-
-        if (extra_skip > 0)
-        {   /* update rc->Nr, rc->B, (rc->Rr)*/
-            video->nextEncIVop -= extra_skip;
-            video->numVopsInGOP += extra_skip;
-            if (encParams->RC_Type != CONSTANT_Q)
-            {
-                RC_UpdateBuffer(video, i, extra_skip);
-            }
-        }
-
-    }
-    /* update frame no. */
-    video->prevFrameNum[i] = frameNum[i];
-
-    /* go through all lower layer */
-    for (i = (numLayers - 2); i >= 0; i--)
-    {
-
-        video->relLayerCodeTime[i] -= 1000;
-
-        /* find timeIncrement to be put in the bitstream */
-        vol[i]->timeIncrement = frameTick - video->refTick[i];
-
-        if (video->relLayerCodeTime[i] <= 0) /* time to encode base */
-        {
-            /* 12/27/00 */
-            encodeVop = 1;
-            video->currLayer = *nLayer = i;
-            video->relLayerCodeTime[i] +=
-                (Int)((1000.0 * encParams->LayerFrameRate[numLayers-1]) / encParams->LayerFrameRate[i]);
-
-            vol[i]->moduloTimeBase = 0;
-            while (vol[i]->timeIncrement >= vol[i]->timeIncrementResolution)
-            {
-                vol[i]->timeIncrement -= vol[i]->timeIncrementResolution;
-                vol[i]->moduloTimeBase++;
-                /* do not update refTick and modTimeRef yet, do it after encoding!! */
-            }
-
-            /* takes care of more dropped frame than expected */
-            frameNum[i] = (UInt)((frameModTime - modTimeRef) * encParams->LayerFrameRate[i] + 500) / 1000;
-            if (video->volInitialize[i])
-                video->prevFrameNum[i] = frameNum[i] - 1;
-
-            extra_skip = -1;
-            frameInc = (frameNum[i] - video->prevFrameNum[i]);
-            extra_skip += frameInc;
-
-            if (extra_skip > 0)
-            {   /* update rc->Nr, rc->B, (rc->Rr)*/
-                if (encParams->RC_Type != CONSTANT_Q)
-                {
-                    RC_UpdateBuffer(video, i, extra_skip);
-                }
-            }
-            /* update frame no. */
-            video->prevFrameNum[i] = frameNum[i];
-        }
-    }
-
-#ifdef _PRINT_STAT
-    if (encodeVop)
-        printf(" TI: %d ", vol[*nLayer]->timeIncrement);
-#endif
-
-    return encodeVop;
-}
-
-/* ======================================================================== */
-/*  Function : DetermineVopType                                             */
-/*  Date     : 06/02/2001                                                   */
-/*  Purpose  : The name says it all.                                        */
-/*  In/out   :                                                              */
-/*  Return   : void .                                                       */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-
-void DetermineVopType(VideoEncData *video, Int currLayer)
-{
-    VideoEncParams *encParams = video->encParams;
-//  Vol *currVol = video->vol[currLayer];
-
-    if (encParams->IntraPeriod == 0) /* I-VOPs only */
-    {
-        if (video->currLayer > 0)
-            video->currVop->predictionType = P_VOP;
-        else
-        {
-            video->currVop->predictionType = I_VOP;
-            if (video->numVopsInGOP >= 132)
-                video->numVopsInGOP = 0;
-        }
-    }
-    else if (encParams->IntraPeriod == -1)  /* IPPPPP... */
-    {
-
-        /* maintain frame type if previous frame is pre-skipped, 06/02/2001 */
-        if (encParams->RC_Type == CONSTANT_Q || video->rc[currLayer]->skip_next_frame != -1)
-            video->currVop->predictionType = P_VOP;
-
-        if (video->currLayer == 0)
-        {
-            if (/*video->numVopsInGOP>=132 || */video->volInitialize[currLayer])
-            {
-                video->currVop->predictionType = I_VOP;
-                video->numVopsInGOP = 0; /* force INTRA update every 132 base frames*/
-                video->nextEncIVop = 1;
-            }
-            else if (video->nextEncIVop == 0 || video->currVop->predictionType == I_VOP)
-            {
-                video->numVopsInGOP = 0;
-                video->nextEncIVop = 1;
-            }
-        }
-    }
-    else   /* IntraPeriod>0 : IPPPPPIPPPPPI... */
-    {
-
-        /* maintain frame type if previous frame is pre-skipped, 06/02/2001 */
-        if (encParams->RC_Type == CONSTANT_Q || video->rc[currLayer]->skip_next_frame != -1)
-            video->currVop->predictionType = P_VOP;
-
-        if (currLayer == 0)
-        {
-            if (video->nextEncIVop <= 0 || video->currVop->predictionType == I_VOP)
-            {
-                video->nextEncIVop = encParams->IntraPeriod;
-                video->currVop->predictionType = I_VOP;
-                video->numVopsInGOP = 0;
-            }
-        }
-    }
-
-    return ;
-}
-
-/* ======================================================================== */
-/*  Function : UpdateSkipNextFrame                                          */
-/*  Date     : 06/02/2001                                                   */
-/*  Purpose  : From rate control frame skipping decision, update timing
-                related parameters.                                         */
-/*  In/out   :                                                              */
-/*  Return   : Current coded layer.                                         */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-
-Int UpdateSkipNextFrame(VideoEncData *video, ULong *modTime, Int *size, PV_STATUS status)
-{
-    Int currLayer = video->currLayer;
-    Int nLayer = currLayer;
-    VideoEncParams *encParams = video->encParams;
-    Int numLayers = encParams->nLayers;
-    Vol *currVol = video->vol[currLayer];
-    Vol **vol = video->vol;
-    Int num_skip, extra_skip;
-    Int i;
-    UInt newRefTick, deltaModTime;
-    UInt temp;
-
-    if (encParams->RC_Type != CONSTANT_Q)
-    {
-        if (video->volInitialize[0] && currLayer == 0)  /* always encode the first frame */
-        {
-            RC_ResetSkipNextFrame(video, currLayer);
-            //return currLayer;  09/15/05
-        }
-        else
-        {
-            if (RC_GetSkipNextFrame(video, currLayer) < 0 || status == PV_END_OF_BUF)   /* Skip Current Frame */
-            {
-
-#ifdef _PRINT_STAT
-                printf("Skip current frame");
-#endif
-                currVol->moduloTimeBase = currVol->prevModuloTimeBase;
-
-                /*********************/
-                /* prepare to return */
-                /*********************/
-                *size = 0;  /* Set Bitstream buffer to zero */
-
-                /* Determine nLayer and modTime for next encode */
-
-                *modTime = video->nextModTime;
-                nLayer = -1;
-
-                return nLayer; /* return immediately without updating RefTick & modTimeRef */
-                /* If I-VOP was attempted, then ensure next base is I-VOP */
-                /*if((encParams->IntraPeriod>0) && (video->currVop->predictionType == I_VOP))
-                video->nextEncIVop = 0; commented out by 06/05/01 */
-
-            }
-            else if ((num_skip = RC_GetSkipNextFrame(video, currLayer)) > 0)
-            {
-
-#ifdef _PRINT_STAT
-                printf("Skip next %d frames", num_skip);
-#endif
-                /* to keep the Nr of enh layer the same */
-                /* adjust relLayerCodeTime only, do not adjust layerCodeTime[numLayers-1] */
-                extra_skip = 0;
-                for (i = 0; i < currLayer; i++)
-                {
-                    if (video->relLayerCodeTime[i] <= 1000)
-                    {
-                        extra_skip = 1;
-                        break;
-                    }
-                }
-
-                for (i = currLayer; i < numLayers; i++)
-                {
-                    video->relLayerCodeTime[i] += (num_skip + extra_skip) *
-                                                  ((Int)((1000.0 * encParams->LayerFrameRate[numLayers-1]) / encParams->LayerFrameRate[i]));
-                }
-            }
-        }/* first frame */
-    }
-    /*****  current frame is encoded, now update refTick ******/
-
-    video->refTick[currLayer] += vol[currLayer]->prevModuloTimeBase * vol[currLayer]->timeIncrementResolution;
-
-    /* Reset layerCodeTime every I-VOP to prevent overflow */
-    if (currLayer == 0)
-    {
-        /*  12/12/02, fix for weird targer frame rate of 9.99 fps or 3.33 fps */
-        if (((encParams->IntraPeriod != 0) /*&& (video->currVop->predictionType==I_VOP)*/) ||
-                ((encParams->IntraPeriod == 0) && (video->numVopsInGOP == 0)))
-        {
-            newRefTick = video->refTick[0];
-
-            for (i = 1; i < numLayers; i++)
-            {
-                if (video->refTick[i] < newRefTick)
-                    newRefTick = video->refTick[i];
-            }
-
-            /* check to make sure that the update is integer multiple of frame number */
-            /* how many msec elapsed from last modTimeRef */
-            deltaModTime = (newRefTick / vol[0]->timeIncrementResolution) * 1000;
-
-            for (i = numLayers - 1; i >= 0; i--)
-            {
-                temp = (UInt)(deltaModTime * encParams->LayerFrameRate[i]); /* 12/12/02 */
-                if (temp % 1000)
-                    newRefTick = 0;
-
-            }
-            if (newRefTick > 0)
-            {
-                video->modTimeRef += deltaModTime;
-                for (i = numLayers - 1; i >= 0; i--)
-                {
-                    video->prevFrameNum[i] -= (UInt)(deltaModTime * encParams->LayerFrameRate[i]) / 1000;
-                    video->refTick[i] -= newRefTick;
-                }
-            }
-        }
-    }
-
-    *modTime =  video->nextModTime;
-
-    return nLayer;
-}
-
-
-#ifndef ORIGINAL_VERSION
-
-/* ======================================================================== */
-/*  Function : SetProfile_BufferSize                                        */
-/*  Date     : 04/08/2002                                                   */
-/*  Purpose  : Set profile and video buffer size, copied from Jim's code    */
-/*             in PVInitVideoEncoder(.), since we have different places     */
-/*             to reset profile and video buffer size                       */
-/*  In/out   :                                                              */
-/*  Return   :                                                              */
-/*  Modified :                                                              */
-/*                                                                          */
-/* ======================================================================== */
-
-Bool SetProfile_BufferSize(VideoEncData *video, float delay, Int bInitialized)
-{
-    Int i, j, start, end;
-//  Int BaseMBsPerSec = 0, EnhMBsPerSec = 0;
-    Int nTotalMB = 0;
-    Int idx, temp_w, temp_h, max = 0, max_width, max_height;
-
-    Int nLayers = video->encParams->nLayers; /* Number of Layers to be encoded */
-
-    Int total_bitrate = 0, base_bitrate;
-    Int total_packet_size = 0, base_packet_size;
-    Int total_MBsPerSec = 0, base_MBsPerSec;
-    Int total_VBV_size = 0, base_VBV_size, enhance_VBV_size = 0;
-    float total_framerate, base_framerate;
-    float upper_bound_ratio;
-    Int bFound = 0;
-    Int k = 0, width16, height16, index;
-    Int lowest_level;
-
-#define MIN_BUFF    16000 /* 16k minimum buffer size */
-#define BUFF_CONST  2.0    /* 2000ms */
-#define UPPER_BOUND_RATIO 8.54 /* upper_bound = 1.4*(1.1+bound/10)*bitrate/framerate */
-
-#define QCIF_WIDTH  176
-#define QCIF_HEIGHT 144
-
-    index = video->encParams->profile_table_index;
-
-    /* Calculate "nTotalMB" */
-    /* Find the maximum width*height for memory allocation of the VOPs */
-    for (idx = 0; idx < nLayers; idx++)
-    {
-        temp_w = video->encParams->LayerWidth[idx];
-        temp_h = video->encParams->LayerHeight[idx];
-
-        if ((temp_w*temp_h) > max)
-        {
-            max = temp_w * temp_h;
-            max_width = temp_w;
-            max_height = temp_h;
-            nTotalMB = ((max_width + 15) >> 4) * ((max_height + 15) >> 4);
-        }
-    }
-    upper_bound_ratio = (video->encParams->RC_Type == CBR_LOWDELAY ? (float)5.0 : (float)UPPER_BOUND_RATIO);
-
-
-    /* Get the basic information: bitrate, packet_size, MBs/s and VBV_size */
-    base_bitrate        = video->encParams->LayerBitRate[0];
-    if (video->encParams->LayerMaxBitRate[0] != 0) /* video->encParams->LayerMaxBitRate[0] == 0 means it has not been set */
-    {
-        base_bitrate    = PV_MAX(base_bitrate, video->encParams->LayerMaxBitRate[0]);
-    }
-    else /* if the max is not set, set it to the specified profile/level */
-    {
-        video->encParams->LayerMaxBitRate[0] = profile_level_max_bitrate[index];
-    }
-
-    base_framerate      = video->encParams->LayerFrameRate[0];
-    if (video->encParams->LayerMaxFrameRate[0] != 0)
-    {
-        base_framerate  = PV_MAX(base_framerate, video->encParams->LayerMaxFrameRate[0]);
-    }
-    else /* if the max is not set, set it to the specified profile/level */
-    {
-        video->encParams->LayerMaxFrameRate[0] = (float)profile_level_max_mbsPerSec[index] / nTotalMB;
-    }
-
-    base_packet_size    = video->encParams->ResyncPacketsize;
-    base_MBsPerSec      = (Int)(base_framerate * nTotalMB);
-    base_VBV_size       = PV_MAX((Int)(base_bitrate * delay),
-                                 (Int)(upper_bound_ratio * base_bitrate / base_framerate));
-    base_VBV_size       = PV_MAX(base_VBV_size, MIN_BUFF);
-
-    /* if the buffer is larger than maximum buffer size, we'll clip it */
-    if (base_VBV_size > profile_level_max_VBV_size[5])
-        base_VBV_size = profile_level_max_VBV_size[5];
-
-
-    /* Check if the buffer exceeds the maximum buffer size given the maximum profile and level */
-    if (nLayers == 1 && base_VBV_size > profile_level_max_VBV_size[index])
-        return FALSE;
-
-
-    if (nLayers == 2)
-    {
-        total_bitrate       = video->encParams->LayerBitRate[1];
-        if (video->encParams->LayerMaxBitRate[1] != 0)
-        {
-            total_bitrate   = PV_MIN(total_bitrate, video->encParams->LayerMaxBitRate[1]);
-        }
-        else /* if the max is not set, set it to the specified profile/level */
-        {
-            video->encParams->LayerMaxBitRate[1] = scalable_profile_level_max_bitrate[index];
-        }
-
-        total_framerate     = video->encParams->LayerFrameRate[1];
-        if (video->encParams->LayerMaxFrameRate[1] != 0)
-        {
-            total_framerate     = PV_MIN(total_framerate, video->encParams->LayerMaxFrameRate[1]);
-        }
-        else /* if the max is not set, set it to the specified profile/level */
-        {
-            video->encParams->LayerMaxFrameRate[1] = (float)scalable_profile_level_max_mbsPerSec[index] / nTotalMB;
-        }
-
-        total_packet_size   = video->encParams->ResyncPacketsize;
-        total_MBsPerSec     = (Int)(total_framerate * nTotalMB);
-
-        enhance_VBV_size    = PV_MAX((Int)((total_bitrate - base_bitrate) * delay),
-                                     (Int)(upper_bound_ratio * (total_bitrate - base_bitrate) / (total_framerate - base_framerate)));
-        enhance_VBV_size    = PV_MAX(enhance_VBV_size, MIN_BUFF);
-
-        total_VBV_size      = base_VBV_size + enhance_VBV_size;
-
-        /* if the buffer is larger than maximum buffer size, we'll clip it */
-        if (total_VBV_size > scalable_profile_level_max_VBV_size[6])
-        {
-            total_VBV_size = scalable_profile_level_max_VBV_size[6];
-            enhance_VBV_size = total_VBV_size - base_VBV_size;
-        }
-
-        /* Check if the buffer exceeds the maximum buffer size given the maximum profile and level */
-        if (total_VBV_size > scalable_profile_level_max_VBV_size[index])
-            return FALSE;
-    }
-
-
-    if (!bInitialized) /* Has been initialized --> profile @ level has been figured out! */
-    {
-        video->encParams->BufferSize[0] = base_VBV_size;
-        if (nLayers > 1)
-            video->encParams->BufferSize[1] = enhance_VBV_size;
-
-        return PV_TRUE;
-    }
-
-
-    /* Profile @ level determination */
-    if (nLayers == 1)
-    {
-        /* BASE ONLY : Simple Profile(SP) Or Core Profile(CP) */
-        if (base_bitrate     > profile_level_max_bitrate[index]     ||
-                base_packet_size > profile_level_max_packet_size[index] ||
-                base_MBsPerSec   > profile_level_max_mbsPerSec[index]   ||
-                base_VBV_size    > profile_level_max_VBV_size[index])
-
-            return PV_FALSE; /* Beyond the bound of Core Profile @ Level2 */
-
-        /* For H263/Short header, determine k*16384 */
-        width16  = ((video->encParams->LayerWidth[0] + 15) >> 4) << 4;
-        height16 = ((video->encParams->LayerHeight[0] + 15) >> 4) << 4;
-        if (video->encParams->H263_Enabled)
-        {
-            k = 4;
-            if (width16  == 2*QCIF_WIDTH && height16 == 2*QCIF_HEIGHT)  /* CIF */
-                k = 16;
-
-            else if (width16  == 4*QCIF_WIDTH && height16 == 4*QCIF_HEIGHT)  /* 4CIF */
-                k = 32;
-
-            else if (width16  == 8*QCIF_WIDTH && height16 == 8*QCIF_HEIGHT)  /* 16CIF */
-                k = 64;
-
-            video->encParams->maxFrameSize  = k * 16384;
-
-            /* Make sure the buffer size is limited to the top profile and level: the Core profile and level 2 */
-            if (base_VBV_size > (Int)(k*16384 + 4*(float)profile_level_max_bitrate[5]*1001.0 / 30000.0))
-                base_VBV_size = (Int)(k * 16384 + 4 * (float)profile_level_max_bitrate[5] * 1001.0 / 30000.0);
-
-            if (base_VBV_size > (Int)(k*16384 + 4*(float)profile_level_max_bitrate[index]*1001.0 / 30000.0))
-                return PV_FALSE;
-        }
-
-        /* Search the appropriate profile@level index */
-        if (!video->encParams->H263_Enabled &&
-                (video->encParams->IntraDCVlcThr != 0 || video->encParams->SearchRange > 16))
-        {
-            lowest_level = 1; /* cannot allow SPL0 */
-        }
-        else
-        {
-            lowest_level = 0; /* SPL0 */
-        }
-
-        for (i = lowest_level; i <= index; i++)
-        {
-            if (i != 4 && /* skip Core Profile@Level1 because the parameters in it are smaller than those in Simple Profile@Level3 */
-                    base_bitrate     <= profile_level_max_bitrate[i]     &&
-                    base_packet_size <= profile_level_max_packet_size[i] &&
-                    base_MBsPerSec   <= profile_level_max_mbsPerSec[i]   &&
-                    base_VBV_size    <= (video->encParams->H263_Enabled ? (Int)(k*16384 + 4*(float)profile_level_max_bitrate[i]*1001.0 / 30000.0) :
-                                         profile_level_max_VBV_size[i]))
-                break;
-        }
-        if (i > index) return PV_FALSE; /* Nothing found!! */
-
-        /* Found out the actual profile @ level : index "i" */
-        if (i == 0)
-        {
-            /* For Simple Profile @ Level 0, we need to do one more check: image size <= QCIF */
-            if (width16 > QCIF_WIDTH || height16 > QCIF_HEIGHT)
-                i = 1; /* image size > QCIF, then set SP level1 */
-        }
-
-        video->encParams->ProfileLevel[0] = profile_level_code[i];
-        video->encParams->BufferSize[0]   = base_VBV_size;
-
-        if (video->encParams->LayerMaxBitRate[0] == 0)
-            video->encParams->LayerMaxBitRate[0] = profile_level_max_bitrate[i];
-
-        if (video->encParams->LayerMaxFrameRate[0] == 0)
-            video->encParams->LayerMaxFrameRate[0] = PV_MIN(30, (float)profile_level_max_mbsPerSec[i] / nTotalMB);
-
-        /* For H263/Short header, one special constraint for VBV buffer size */
-        if (video->encParams->H263_Enabled)
-            video->encParams->BufferSize[0] = (Int)(k * 16384 + 4 * (float)profile_level_max_bitrate[i] * 1001.0 / 30000.0);
-
-    }
-    else
-    {
-        /* SCALABALE MODE: Simple Scalable Profile(SSP) Or Core Scalable Profile(CSP) */
-
-        if (total_bitrate       > scalable_profile_level_max_bitrate[index]     ||
-                total_packet_size   > scalable_profile_level_max_packet_size[index] ||
-                total_MBsPerSec     > scalable_profile_level_max_mbsPerSec[index]   ||
-                total_VBV_size      > scalable_profile_level_max_VBV_size[index])
-
-            return PV_FALSE; /* Beyond given profile and level */
-
-        /* One-time check: Simple Scalable Profile or Core Scalable Profile */
-        if (total_bitrate       <= scalable_profile_level_max_bitrate[2]        &&
-                total_packet_size   <= scalable_profile_level_max_packet_size[2]    &&
-                total_MBsPerSec     <= scalable_profile_level_max_mbsPerSec[2]      &&
-                total_VBV_size      <= scalable_profile_level_max_VBV_size[2])
-
-        {
-            start = 0;
-            end = index;
-        }
-
-        else
-        {
-            start = 4;
-            end = index;
-        }
-
-
-        /* Search the scalable profile */
-        for (i = start; i <= end; i++)
-        {
-            if (total_bitrate       <= scalable_profile_level_max_bitrate[i]     &&
-                    total_packet_size   <= scalable_profile_level_max_packet_size[i] &&
-                    total_MBsPerSec     <= scalable_profile_level_max_mbsPerSec[i]   &&
-                    total_VBV_size      <= scalable_profile_level_max_VBV_size[i])
-
-                break;
-        }
-        if (i > end) return PV_FALSE;
-
-        /* Search the base profile */
-        if (i == 0)
-        {
-            j = 0;
-            bFound = 1;
-        }
-        else        bFound = 0;
-
-        for (j = start; !bFound && j <= i; j++)
-        {
-            if (base_bitrate        <= profile_level_max_bitrate[j]      &&
-                    base_packet_size    <= profile_level_max_packet_size[j]  &&
-                    base_MBsPerSec      <= profile_level_max_mbsPerSec[j]    &&
-                    base_VBV_size       <= profile_level_max_VBV_size[j])
-
-            {
-                bFound = 1;
-                break;
-            }
-        }
-
-        if (!bFound) // && start == 4)
-            return PV_FALSE; /* mis-match in the profiles between base layer and enhancement layer */
-
-        /* j for base layer, i for enhancement layer */
-        video->encParams->ProfileLevel[0] = profile_level_code[j];
-        video->encParams->ProfileLevel[1] = scalable_profile_level_code[i];
-        video->encParams->BufferSize[0]   = base_VBV_size;
-        video->encParams->BufferSize[1]   = enhance_VBV_size;
-
-        if (video->encParams->LayerMaxBitRate[0] == 0)
-            video->encParams->LayerMaxBitRate[0] = profile_level_max_bitrate[j];
-
-        if (video->encParams->LayerMaxBitRate[1] == 0)
-            video->encParams->LayerMaxBitRate[1] = scalable_profile_level_max_bitrate[i];
-
-        if (video->encParams->LayerMaxFrameRate[0] == 0)
-            video->encParams->LayerMaxFrameRate[0] = PV_MIN(30, (float)profile_level_max_mbsPerSec[j] / nTotalMB);
-
-        if (video->encParams->LayerMaxFrameRate[1] == 0)
-            video->encParams->LayerMaxFrameRate[1] = PV_MIN(30, (float)scalable_profile_level_max_mbsPerSec[i] / nTotalMB);
-
-
-    } /* end of: if(nLayers == 1) */
-
-
-    if (!video->encParams->H263_Enabled && (video->encParams->ProfileLevel[0] == 0x08)) /* SPL0 restriction*/
-    {
-        /* PV only allow frame-based rate control, no QP change from one MB to another
-        if(video->encParams->ACDCPrediction == TRUE && MB-based rate control)
-         return PV_FALSE */
-    }
-
-    return PV_TRUE;
-}
-
-#endif /* #ifndef ORIGINAL_VERSION */
-
-
-
diff --git a/media/libstagefright/codecs/m4v_h263/enc/test/Android.bp b/media/libstagefright/codecs/m4v_h263/enc/test/Android.bp
deleted file mode 100644
index b9a8117..0000000
--- a/media/libstagefright/codecs/m4v_h263/enc/test/Android.bp
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-cc_test {
-    name: "Mpeg4H263EncoderTest",
-    gtest: true,
-
-    srcs : [ "Mpeg4H263EncoderTest.cpp" ],
-
-    shared_libs: [
-        "libutils",
-        "liblog",
-    ],
-
-    static_libs: [
-        "libstagefright_m4vh263enc",
-    ],
-
-    cflags: [
-        "-DOSCL_IMPORT_REF=",
-        "-Wall",
-        "-Werror",
-    ],
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-            "unsigned-integer-overflow",
-        ],
-        cfi: true,
-    },
-}
diff --git a/media/libstagefright/codecs/mp3dec/Android.bp b/media/libstagefright/codecs/mp3dec/Android.bp
index 96106f1..b669c84 100644
--- a/media/libstagefright/codecs/mp3dec/Android.bp
+++ b/media/libstagefright/codecs/mp3dec/Android.bp
@@ -1,93 +1,19 @@
-cc_library_static {
-    name: "libstagefright_mp3dec",
-    vendor_available: true,
-    min_sdk_version: "29",
 
-    srcs: [
-        "src/pvmp3_normalize.cpp",
-        "src/pvmp3_alias_reduction.cpp",
-        "src/pvmp3_crc.cpp",
-        "src/pvmp3_decode_header.cpp",
-        "src/pvmp3_decode_huff_cw.cpp",
-        "src/pvmp3_getbits.cpp",
-        "src/pvmp3_dequantize_sample.cpp",
-        "src/pvmp3_framedecoder.cpp",
-        "src/pvmp3_get_main_data_size.cpp",
-        "src/pvmp3_get_side_info.cpp",
-        "src/pvmp3_get_scale_factors.cpp",
-        "src/pvmp3_mpeg2_get_scale_data.cpp",
-        "src/pvmp3_mpeg2_get_scale_factors.cpp",
-        "src/pvmp3_mpeg2_stereo_proc.cpp",
-        "src/pvmp3_huffman_decoding.cpp",
-        "src/pvmp3_huffman_parsing.cpp",
-        "src/pvmp3_tables.cpp",
-        "src/pvmp3_imdct_synth.cpp",
-        "src/pvmp3_mdct_6.cpp",
-        "src/pvmp3_dct_6.cpp",
-        "src/pvmp3_poly_phase_synthesis.cpp",
-        "src/pvmp3_equalizer.cpp",
-        "src/pvmp3_seek_synch.cpp",
-        "src/pvmp3_stereo_proc.cpp",
-        "src/pvmp3_reorder.cpp",
-
-        "src/pvmp3_polyphase_filter_window.cpp",
-        "src/pvmp3_mdct_18.cpp",
-        "src/pvmp3_dct_9.cpp",
-        "src/pvmp3_dct_16.cpp",
-    ],
-
-    arch: {
-        arm: {
-            exclude_srcs: [
-                "src/pvmp3_polyphase_filter_window.cpp",
-                "src/pvmp3_mdct_18.cpp",
-                "src/pvmp3_dct_9.cpp",
-                "src/pvmp3_dct_16.cpp",
-            ],
-            srcs: [
-                "src/asm/pvmp3_polyphase_filter_window_gcc.s",
-                "src/asm/pvmp3_mdct_18_gcc.s",
-                "src/asm/pvmp3_dct_9_gcc.s",
-                "src/asm/pvmp3_dct_16_gcc.s",
-            ],
-
-            instruction_set: "arm",
-        },
-    },
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-        ],
-        cfi: true,
-    },
-
-    include_dirs: ["frameworks/av/media/libstagefright/include"],
-
-    export_include_dirs: [
-        "include",
-        "src",
-    ],
-
-    cflags: [
-        "-DOSCL_UNUSED_ARG(x)=(void)(x)",
-        "-Werror",
-    ],
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
 }
 
-//###############################################################################
-
 cc_library_shared {
     name: "libstagefright_soft_mp3dec",
     defaults: ["libstagefright_softomx-defaults"],
 
     srcs: ["SoftMP3.cpp"],
 
-    local_include_dirs: [
-        "src",
-        "include",
-    ],
-
     version_script: "exports.lds",
 
     sanitize: {
@@ -99,35 +25,3 @@
 
     static_libs: ["libstagefright_mp3dec"],
 }
-
-//###############################################################################
-cc_test {
-    name: "libstagefright_mp3dec_test",
-    gtest: false,
-
-    srcs: [
-        "test/mp3dec_test.cpp",
-        "test/mp3reader.cpp",
-    ],
-
-    cflags: ["-Wall", "-Werror"],
-
-    local_include_dirs: [
-        "src",
-        "include",
-    ],
-
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-        ],
-        cfi: true,
-    },
-
-    static_libs: [
-        "libstagefright_mp3dec",
-        "libsndfile",
-    ],
-
-    shared_libs: ["libaudioutils"],
-}
diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
index b5d32ed..07bb45a 100644
--- a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
+++ b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
@@ -23,7 +23,7 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaDefs.h>
 
-#include "include/pvmp3decoder_api.h"
+#include <pvmp3decoder_api.h>
 
 namespace android {
 
@@ -498,6 +498,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_framedecoder.cpp b/media/libstagefright/codecs/mp3dec/src/pvmp3_framedecoder.cpp
deleted file mode 100644
index 15d2feb..0000000
--- a/media/libstagefright/codecs/mp3dec/src/pvmp3_framedecoder.cpp
+++ /dev/null
@@ -1,835 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/*
-------------------------------------------------------------------------------
-
-   PacketVideo Corp.
-   MP3 Decoder Library
-
-   Filename: pvmp3_framedecoder.cpp
-
-   Functions:
-    pvmp3_framedecoder
-    pvmp3_InitDecoder
-    pvmp3_resetDecoder
-
-    Date: 09/21/2007
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
-
- Description:
-
-------------------------------------------------------------------------------
- INPUT AND OUTPUT DEFINITIONS
-
-Input
-    pExt = pointer to the external interface structure. See the file
-           pvmp3decoder_api.h for a description of each field.
-           Data type of pointer to a tPVMP3DecoderExternal
-           structure.
-
-    pMem = void pointer to hide the internal implementation of the library
-           It is cast back to a tmp3dec_file structure. This structure
-           contains information that needs to persist between calls to
-           this function, or is too big to be placed on the stack, even
-           though the data is only needed during execution of this function
-           Data type void pointer, internally pointer to a tmp3dec_file
-           structure.
-
-
- Outputs:
-     status = ERROR condition.  see structure  ERROR_CODE
-
- Pointers and Buffers Modified:
-    pMem contents are modified.
-    pExt: (more detail in the file pvmp3decoder_api.h)
-    inputBufferUsedLength - number of array elements used up by the stream.
-    samplingRate - sampling rate in samples per sec
-    bitRate - bit rate in bits per second, varies frame to frame.
-
-
-
-------------------------------------------------------------------------------
- FUNCTIONS DESCRIPTION
-
-    pvmp3_framedecoder
-        frame decoder library driver
-    pvmp3_InitDecoder
-        Decoder Initialization
-    pvmp3_resetDecoder
-        Reset Decoder
-
-------------------------------------------------------------------------------
- REQUIREMENTS
-
-
-------------------------------------------------------------------------------
- REFERENCES
-
- [1] ISO MPEG Audio Subgroup Software Simulation Group (1996)
-     ISO 13818-3 MPEG-2 Audio Decoder - Lower Sampling Frequency Extension
-
-------------------------------------------------------------------------------
- PSEUDO-CODE
-
-------------------------------------------------------------------------------
-*/
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-
-
-#include "pvmp3_framedecoder.h"
-#include "pvmp3_dec_defs.h"
-#include "pvmp3_poly_phase_synthesis.h"
-#include "pvmp3_tables.h"
-#include "pvmp3_imdct_synth.h"
-#include "pvmp3_alias_reduction.h"
-#include "pvmp3_reorder.h"
-#include "pvmp3_dequantize_sample.h"
-#include "pvmp3_stereo_proc.h"
-#include "pvmp3_mpeg2_stereo_proc.h"
-#include "pvmp3_get_side_info.h"
-#include "pvmp3_get_scale_factors.h"
-#include "pvmp3_mpeg2_get_scale_factors.h"
-#include "pvmp3_decode_header.h"
-#include "pvmp3_get_main_data_size.h"
-#include "s_tmp3dec_file.h"
-#include "pvmp3_getbits.h"
-#include "mp3_mem_funcs.h"
-
-
-/*----------------------------------------------------------------------------
-; MACROS
-; Define module specific macros here
-----------------------------------------------------------------------------*/
-
-
-/*----------------------------------------------------------------------------
-; DEFINES
-; Include all pre-processor statements here. Include conditional
-; compile variables also.
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL FUNCTION DEFINITIONS
-; Function Prototype declaration
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; LOCAL STORE/BUFFER/POINTER DEFINITIONS
-; Variable declaration - defined here and used outside this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL FUNCTION REFERENCES
-; Declare functions defined elsewhere and referenced in this module
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
-; Declare variables used in this module but defined elsewhere
-----------------------------------------------------------------------------*/
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-ERROR_CODE pvmp3_framedecoder(tPVMP3DecoderExternal *pExt,
-                              void              *pMem)
-{
-
-    ERROR_CODE        errorCode  = NO_DECODING_ERROR;
-
-    int32   crc_error_count = 0;
-    uint32  sent_crc = 0;
-    uint32  computed_crc = 0;
-
-    tmp3dec_chan   *pChVars[CHAN];
-    tmp3dec_file   *pVars = (tmp3dec_file *)pMem;
-
-    mp3Header info_data;
-    mp3Header *info = &info_data;
-
-    pVars->inputStream.pBuffer  = pExt->pInputBuffer;
-
-
-    pVars->inputStream.usedBits  = pExt->inputBufferUsedLength << 3;
-    pVars->inputStream.inputBufferCurrentLength  = pExt->inputBufferCurrentLength;
-
-
-    errorCode = pvmp3_decode_header(&pVars->inputStream,
-                                    info,
-                                    &computed_crc);
-
-    if (errorCode != NO_DECODING_ERROR)
-    {
-        pExt->outputFrameSize = 0;
-        return errorCode;
-    }
-
-    pVars->num_channels = (info->mode == MPG_MD_MONO) ? 1 : 2;
-    pExt->num_channels = pVars->num_channels;
-
-    int32 outputFrameSize = (info->version_x == MPEG_1) ?
-                            2 * SUBBANDS_NUMBER * FILTERBANK_BANDS :
-                            SUBBANDS_NUMBER * FILTERBANK_BANDS;
-
-    outputFrameSize = (info->mode == MPG_MD_MONO) ? outputFrameSize : outputFrameSize << 1;
-
-
-    /*
-     *  Check if output buffer has enough room to hold output PCM
-     */
-    if (pExt->outputFrameSize >= outputFrameSize)
-    {
-        pExt->outputFrameSize = outputFrameSize;
-    }
-    else
-    {
-        pExt->outputFrameSize = 0;
-        return OUTPUT_BUFFER_TOO_SMALL;
-    }
-
-
-    pChVars[ LEFT] = &pVars->perChan[ LEFT];
-    pChVars[RIGHT] = &pVars->perChan[RIGHT];
-
-
-
-
-    if (info->error_protection)
-    {
-        if (!bitsAvailable(&pVars->inputStream, 16))
-        {
-            return SIDE_INFO_ERROR;
-        }
-
-        /*
-         *  Get crc content
-         */
-        sent_crc = getUpTo17bits(&pVars->inputStream, 16);
-    }
-
-
-    if (info->layer_description == 3)
-    {
-        int32 gr;
-        int32 ch;
-        uint32 main_data_end;
-        int32 bytes_to_discard;
-        int16 *ptrOutBuffer = pExt->pOutputBuffer;
-
-        /*
-         * Side Information must be extracted from the bitstream and store for use
-         * during the decoded of the associated frame
-         */
-
-        errorCode = pvmp3_get_side_info(&pVars->inputStream,
-                                        &pVars->sideInfo,
-                                        info,
-                                        &computed_crc);
-
-        if (errorCode != NO_DECODING_ERROR)
-        {
-            pExt->outputFrameSize = 0;
-            return errorCode;
-        }
-
-        /*
-         *  If CRC was sent, check that matches the one got while parsing data
-         *  disable crc if this is the desired mode
-         */
-        if (info->error_protection)
-        {
-            if ((computed_crc != sent_crc) && pExt->crcEnabled)
-            {
-                crc_error_count++;
-            }
-        }
-
-        /*
-         * main data (scalefactors, Huffman coded, etc,) are not necessarily located
-         * adjacent to the side-info. Beginning of main data is located using
-         * field "main_data_begin" of the current frame. The length does not include
-         * header and side info.
-         * "main_data_begin" points to the first bit of main data of a frame. It is a negative
-         * offset in bytes from the first byte of the sync word
-         * main_data_begin = 0  <===> main data start rigth after side info.
-         */
-
-        int32 temp = pvmp3_get_main_data_size(info, pVars);
-
-
-        /*
-         *  Check if available data holds a full frame, if not flag an error
-         */
-
-        if ((uint32)pVars->predicted_frame_size > pVars->inputStream.inputBufferCurrentLength)
-        {
-            pExt->outputFrameSize = 0;
-            return NO_ENOUGH_MAIN_DATA_ERROR;
-        }
-
-        /*
-         *  Fill in internal circular buffer
-         */
-        fillMainDataBuf(pVars, temp);
-
-
-        main_data_end = pVars->mainDataStream.usedBits >> 3; /* in bytes */
-        if ((main_data_end << 3) < pVars->mainDataStream.usedBits)
-        {
-            main_data_end++;
-            pVars->mainDataStream.usedBits = main_data_end << 3;
-        }
-
-
-        // force signed computation; buffer sizes and offsets are all going to be
-        // well within the constraints of 32-bit signed math.
-        bytes_to_discard = pVars->frame_start
-                           - ((int32)pVars->sideInfo.main_data_begin)
-                           - ((int32)main_data_end);
-
-
-        if (main_data_end > BUFSIZE)   /* check overflow on the buffer */
-        {
-            pVars->frame_start -= BUFSIZE;
-
-            pVars->mainDataStream.usedBits -= (BUFSIZE << 3);
-        }
-
-        pVars->frame_start += temp;
-
-
-        if (bytes_to_discard < 0 || crc_error_count)
-        {
-            /*
-             *  Not enough data to decode, then we should avoid reading this
-             *  data ( getting/ignoring sido info and scale data)
-             *  Main data could be located in the previous frame, so an unaccounted
-             *  frame can cause incorrect processing
-             *  Just run the polyphase filter to "clean" the history buffer
-             */
-            errorCode = NO_ENOUGH_MAIN_DATA_ERROR;
-
-            /*
-             *  Clear the input to these filters
-             */
-
-            pv_memset((void*)pChVars[RIGHT]->work_buf_int32,
-                      0,
-                      SUBBANDS_NUMBER*FILTERBANK_BANDS*sizeof(pChVars[RIGHT]->work_buf_int32[0]));
-
-            pv_memset((void*)pChVars[LEFT]->work_buf_int32,
-                      0,
-                      SUBBANDS_NUMBER*FILTERBANK_BANDS*sizeof(pChVars[LEFT]->work_buf_int32[0]));
-
-            /*  clear circular buffers, to avoid any glitch */
-            pv_memset((void*)&pChVars[ LEFT]->circ_buffer[576],
-                      0,
-                      480*sizeof(pChVars[ LEFT]->circ_buffer[0]));
-            pv_memset((void*)&pChVars[RIGHT]->circ_buffer[576],
-                      0,
-                      480*sizeof(pChVars[RIGHT]->circ_buffer[0]));
-
-            pChVars[ LEFT]->used_freq_lines = 575;
-            pChVars[RIGHT]->used_freq_lines = 575;
-
-        }
-        else
-        {
-            pVars->mainDataStream.usedBits += (bytes_to_discard << 3);
-        }
-
-        /*
-         *  if (fr_ps->header->version_x == MPEG_1), use 2 granules, otherwise just 1
-         */
-        for (gr = 0; gr < (1 + !(info->version_x)); gr++)
-        {
-            if (errorCode != NO_ENOUGH_MAIN_DATA_ERROR)
-            {
-                for (ch = 0; ch < pVars->num_channels; ch++)
-                {
-                    int32 part2_start = pVars->mainDataStream.usedBits;
-
-                    if (info->version_x == MPEG_1)
-                    {
-
-                        pvmp3_get_scale_factors(&pVars->scaleFactors[ch],
-                                                &pVars->sideInfo,
-                                                gr,
-                                                ch,
-                                                &pVars->mainDataStream);
-                    }
-                    else
-                    {
-                        int32 * tmp = pVars->Scratch_mem;
-                        pvmp3_mpeg2_get_scale_factors(&pVars->scaleFactors[ch],
-                                                      &pVars->sideInfo,
-                                                      gr,
-                                                      ch,
-                                                      info,
-                                                      (uint32 *)tmp,
-                                                      &pVars->mainDataStream);
-                    }
-
-                    pChVars[ch]->used_freq_lines = pvmp3_huffman_parsing(pChVars[ch]->work_buf_int32,
-                                                   &pVars->sideInfo.ch[ch].gran[gr],
-                                                   pVars,
-                                                   part2_start,
-                                                   info);
-
-
-                    pvmp3_dequantize_sample(pChVars[ch]->work_buf_int32,
-                                            &pVars->scaleFactors[ch],
-                                            &pVars->sideInfo.ch[ch].gran[gr],
-                                            pChVars[ch]->used_freq_lines,
-                                            info);
-
-
-
-
-                }   /* for (ch=0; ch<stereo; ch++)  */
-
-                if (pVars->num_channels == 2)
-                {
-
-                    int32 used_freq_lines = (pChVars[ LEFT]->used_freq_lines  >
-                                             pChVars[RIGHT]->used_freq_lines) ?
-                                            pChVars[ LEFT]->used_freq_lines  :
-                                            pChVars[RIGHT]->used_freq_lines;
-
-                    pChVars[ LEFT]->used_freq_lines = used_freq_lines;
-                    pChVars[RIGHT]->used_freq_lines = used_freq_lines;
-
-                    if (info->version_x == MPEG_1)
-                    {
-                        pvmp3_stereo_proc(pChVars[ LEFT]->work_buf_int32,
-                                          pChVars[RIGHT]->work_buf_int32,
-                                          &pVars->scaleFactors[RIGHT],
-                                          &pVars->sideInfo.ch[LEFT].gran[gr],
-                                          used_freq_lines,
-                                          info);
-                    }
-                    else
-                    {
-                        int32 * tmp = pVars->Scratch_mem;
-                        pvmp3_mpeg2_stereo_proc(pChVars[ LEFT]->work_buf_int32,
-                                                pChVars[RIGHT]->work_buf_int32,
-                                                &pVars->scaleFactors[RIGHT],
-                                                &pVars->sideInfo.ch[ LEFT].gran[gr],
-                                                &pVars->sideInfo.ch[RIGHT].gran[gr],
-                                                (uint32 *)tmp,
-                                                used_freq_lines,
-                                                info);
-                    }
-                }
-
-            } /* if ( errorCode != NO_ENOUGH_MAIN_DATA_ERROR) */
-
-            for (ch = 0; ch < pVars->num_channels; ch++)
-            {
-
-                pvmp3_reorder(pChVars[ch]->work_buf_int32,
-                              &pVars->sideInfo.ch[ch].gran[gr],
-                              &pChVars[ ch]->used_freq_lines,
-                              info,
-                              pVars->Scratch_mem);
-
-                pvmp3_alias_reduction(pChVars[ch]->work_buf_int32,
-                                      &pVars->sideInfo.ch[ch].gran[gr],
-                                      &pChVars[ ch]->used_freq_lines,
-                                      info);
-
-
-                /*
-                 *   IMDCT
-                 */
-                /* set mxposition
-                 * In case of mixed blocks, # of bands with long
-                 * blocks (2 or 4) else 0
-                 */
-                uint16 mixedBlocksLongBlocks = 0; /*  0 = long or short, 2=mixed, 4=mixed 2.5@8000 */
-                if (pVars->sideInfo.ch[ch].gran[gr].mixed_block_flag &&
-                        pVars->sideInfo.ch[ch].gran[gr].window_switching_flag)
-                {
-                    if ((info->version_x == MPEG_2_5) && (info->sampling_frequency == 2))
-                    {
-                        mixedBlocksLongBlocks = 4; /* mpeg2.5 @ 8 KHz */
-                    }
-                    else
-                    {
-                        mixedBlocksLongBlocks = 2;
-                    }
-                }
-
-                pvmp3_imdct_synth(pChVars[ch]->work_buf_int32,
-                                  pChVars[ch]->overlap,
-                                  pVars->sideInfo.ch[ch].gran[gr].block_type,
-                                  mixedBlocksLongBlocks,
-                                  pChVars[ ch]->used_freq_lines,
-                                  pVars->Scratch_mem);
-
-
-                /*
-                 *   Polyphase synthesis
-                 */
-
-                pvmp3_poly_phase_synthesis(pChVars[ch],
-                                           pVars->num_channels,
-                                           pExt->equalizerType,
-                                           &ptrOutBuffer[ch]);
-
-
-            }/* end ch loop */
-
-            ptrOutBuffer += pVars->num_channels * SUBBANDS_NUMBER * FILTERBANK_BANDS;
-        }  /*   for (gr=0;gr<Max_gr;gr++)  */
-
-        /* skip ancillary data */
-        if (info->bitrate_index > 0)
-        { /* if not free-format */
-
-            int32 ancillary_data_lenght = pVars->predicted_frame_size << 3;
-
-            ancillary_data_lenght  -= pVars->inputStream.usedBits;
-
-            /* skip ancillary data */
-            if (ancillary_data_lenght > 0)
-            {
-                pVars->inputStream.usedBits += ancillary_data_lenght;
-            }
-
-        }
-
-        /*
-         *  This overrides a possible NO_ENOUGH_MAIN_DATA_ERROR
-         */
-        errorCode = NO_DECODING_ERROR;
-
-    }
-    else
-    {
-        /*
-         * The info on the header leads to an unsupported layer, more data
-         * will not fix this, so this is a bad frame,
-         */
-
-        pExt->outputFrameSize = 0;
-        return UNSUPPORTED_LAYER;
-    }
-
-    pExt->inputBufferUsedLength = pVars->inputStream.usedBits >> 3;
-    pExt->totalNumberOfBitsUsed += pVars->inputStream.usedBits;
-    pExt->version = info->version_x;
-    pExt->samplingRate = mp3_s_freq[info->version_x][info->sampling_frequency];
-    pExt->bitRate = mp3_bitrate[pExt->version][info->bitrate_index];
-
-
-    /*
-     *  Always verify buffer overrun condition
-     */
-
-    if (pExt->inputBufferUsedLength > pExt->inputBufferCurrentLength)
-    {
-        pExt->outputFrameSize = 0;
-        errorCode = NO_ENOUGH_MAIN_DATA_ERROR;
-    }
-
-    return errorCode;
-
-}
-
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-__inline void fillDataBuf(tmp3Bits *pMainData,
-                          uint32 val)       /* val to write into the buffer */
-{
-    pMainData->pBuffer[module(pMainData->offset++, BUFSIZE)] = (uint8)val;
-}
-
-
-void fillMainDataBuf(void  *pMem, int32 temp)
-{
-    tmp3dec_file   *pVars = (tmp3dec_file *)pMem;
-
-
-    int32 offset = (pVars->inputStream.usedBits) >> INBUF_ARRAY_INDEX_SHIFT;
-
-    /*
-     *  Check if input circular buffer boundaries need to be enforced
-     */
-    if ((offset + temp) < BUFSIZE)
-    {
-        uint8 * ptr = pVars->inputStream.pBuffer + offset;
-
-        offset = pVars->mainDataStream.offset;
-
-        /*
-         *  Check if main data circular buffer boundaries need to be enforced
-         */
-        if ((offset + temp) < BUFSIZE)
-        {
-            pv_memcpy((pVars->mainDataStream.pBuffer + offset), ptr, temp*sizeof(uint8));
-            pVars->mainDataStream.offset += temp;
-        }
-        else
-        {
-            int32 tmp1 = *(ptr++);
-            for (int32 nBytes = temp >> 1; nBytes != 0; nBytes--)  /* read main data. */
-            {
-                int32 tmp2 = *(ptr++);
-                fillDataBuf(&pVars->mainDataStream, tmp1);
-                fillDataBuf(&pVars->mainDataStream, tmp2);
-                tmp1 = *(ptr++);
-            }
-
-            if (temp&1)
-            {
-                fillDataBuf(&pVars->mainDataStream, tmp1);
-            }
-
-            /* adjust circular buffer counter */
-            pVars->mainDataStream.offset = module(pVars->mainDataStream.offset, BUFSIZE);
-        }
-    }
-    else
-    {
-        for (int32 nBytes = temp >> 1; nBytes != 0; nBytes--)  /* read main data. */
-        {
-            fillDataBuf(&pVars->mainDataStream, *(pVars->inputStream.pBuffer + module(offset++  , BUFSIZE)));
-            fillDataBuf(&pVars->mainDataStream, *(pVars->inputStream.pBuffer + module(offset++  , BUFSIZE)));
-        }
-        if (temp&1)
-        {
-            fillDataBuf(&pVars->mainDataStream, *(pVars->inputStream.pBuffer + module(offset  , BUFSIZE)));
-        }
-    }
-
-
-    pVars->inputStream.usedBits += (temp) << INBUF_ARRAY_INDEX_SHIFT;
-}
-
-
-
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-uint32 pvmp3_decoderMemRequirements(void)
-{
-    uint32 size;
-
-    size = (uint32) sizeof(tmp3dec_file);
-    return (size);
-}
-
-
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-#include "pvmp3_decode_huff_cw.h"
-
-void pvmp3_InitDecoder(tPVMP3DecoderExternal *pExt,
-                       void  *pMem)
-{
-
-    tmp3dec_file      *pVars;
-    huffcodetab       *pHuff;
-
-    pVars = (tmp3dec_file *)pMem;
-    memset(pVars, 0, sizeof(*pVars));
-
-    pExt->totalNumberOfBitsUsed = 0;
-    pExt->inputBufferCurrentLength = 0;
-    pExt->inputBufferUsedLength    = 0;
-
-    pVars->inputStream.pBuffer = pExt->pInputBuffer;
-
-    /*
-     *  Initialize huffman decoding table
-     */
-
-    pHuff = pVars->ht;
-    pHuff[0].linbits = 0;
-    pHuff[0].pdec_huff_tab = pvmp3_decode_huff_cw_tab0;
-    pHuff[1].linbits = 0;
-    pHuff[1].pdec_huff_tab = pvmp3_decode_huff_cw_tab1;
-    pHuff[2].linbits = 0;
-    pHuff[2].pdec_huff_tab = pvmp3_decode_huff_cw_tab2;
-    pHuff[3].linbits = 0;
-    pHuff[3].pdec_huff_tab = pvmp3_decode_huff_cw_tab3;
-    pHuff[4].linbits = 0;
-    pHuff[4].pdec_huff_tab = pvmp3_decode_huff_cw_tab0; /* tbl 4 is not used */
-    pHuff[5].linbits = 4;
-    pHuff[5].pdec_huff_tab = pvmp3_decode_huff_cw_tab5;
-    pHuff[6].linbits = 0;
-    pHuff[6].pdec_huff_tab = pvmp3_decode_huff_cw_tab6;
-    pHuff[7].linbits = 0;
-    pHuff[7].pdec_huff_tab = pvmp3_decode_huff_cw_tab7;
-    pHuff[8].linbits = 0;
-    pHuff[8].pdec_huff_tab = pvmp3_decode_huff_cw_tab8;
-    pHuff[9].linbits = 0;
-    pHuff[9].pdec_huff_tab = pvmp3_decode_huff_cw_tab9;
-    pHuff[10].linbits = 0;
-    pHuff[10].pdec_huff_tab = pvmp3_decode_huff_cw_tab10;
-    pHuff[11].linbits = 0;
-    pHuff[11].pdec_huff_tab = pvmp3_decode_huff_cw_tab11;
-    pHuff[12].linbits = 0;
-    pHuff[12].pdec_huff_tab = pvmp3_decode_huff_cw_tab12;
-    pHuff[13].linbits = 0;
-    pHuff[13].pdec_huff_tab = pvmp3_decode_huff_cw_tab13;
-    pHuff[14].linbits = 0;
-    pHuff[14].pdec_huff_tab = pvmp3_decode_huff_cw_tab0; /* tbl 14 is not used */
-    pHuff[15].linbits = 0;
-    pHuff[15].pdec_huff_tab = pvmp3_decode_huff_cw_tab15;
-    pHuff[16].linbits = 1;
-    pHuff[16].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
-    pHuff[17].linbits = 2;
-    pHuff[17].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
-    pHuff[18].linbits = 3;
-    pHuff[18].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
-    pHuff[19].linbits = 4;
-    pHuff[19].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
-    pHuff[20].linbits = 6;
-    pHuff[20].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
-    pHuff[21].linbits = 8;
-    pHuff[21].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
-    pHuff[22].linbits = 10;
-    pHuff[22].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
-    pHuff[23].linbits = 13;
-    pHuff[23].pdec_huff_tab = pvmp3_decode_huff_cw_tab16;
-    pHuff[24].linbits = 4;
-    pHuff[24].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
-    pHuff[25].linbits = 5;
-    pHuff[25].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
-    pHuff[26].linbits = 6;
-    pHuff[26].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
-    pHuff[27].linbits = 7;
-    pHuff[27].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
-    pHuff[28].linbits = 8;
-    pHuff[28].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
-    pHuff[29].linbits = 9;
-    pHuff[29].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
-    pHuff[30].linbits = 11;
-    pHuff[30].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
-    pHuff[31].linbits = 13;
-    pHuff[31].pdec_huff_tab = pvmp3_decode_huff_cw_tab24;
-    pHuff[32].linbits = 0;
-    pHuff[32].pdec_huff_tab = pvmp3_decode_huff_cw_tab32;
-    pHuff[33].linbits = 0;
-    pHuff[33].pdec_huff_tab = pvmp3_decode_huff_cw_tab33;
-
-    /*
-     *  Initialize polysynthesis circular buffer mechanism
-     */
-    /* clear buffers */
-
-    pvmp3_resetDecoder(pMem);
-
-}
-
-
-/*----------------------------------------------------------------------------
-; FUNCTION CODE
-----------------------------------------------------------------------------*/
-
-
-void pvmp3_resetDecoder(void  *pMem)
-{
-
-    tmp3dec_file      *pVars;
-    tmp3dec_chan      *pChVars[CHAN];
-
-    pVars = (tmp3dec_file *)pMem;
-    pChVars[ LEFT] = &pVars->perChan[ LEFT];
-    pChVars[RIGHT] = &pVars->perChan[RIGHT];
-
-    pVars->frame_start = 0;
-
-    pVars->mainDataStream.offset = 0;
-
-    pVars->mainDataStream.pBuffer =  pVars->mainDataBuffer;
-    pVars->mainDataStream.usedBits = 0;
-
-
-    pVars->inputStream.usedBits = 0; // in bits
-
-
-    pChVars[ LEFT]->used_freq_lines = 575;
-    pChVars[RIGHT]->used_freq_lines = 575;
-
-
-    /*
-     *  Initialize polysynthesis circular buffer mechanism
-     */
-
-    pv_memset((void*)&pChVars[ LEFT]->circ_buffer[576],
-              0,
-              480*sizeof(pChVars[ LEFT]->circ_buffer[0]));
-    pv_memset((void*)&pChVars[RIGHT]->circ_buffer[576],
-              0,
-              480*sizeof(pChVars[RIGHT]->circ_buffer[0]));
-
-
-    pv_memset((void*)pChVars[ LEFT]->overlap,
-              0,
-              SUBBANDS_NUMBER*FILTERBANK_BANDS*sizeof(pChVars[ LEFT]->overlap[0]));
-
-
-    pv_memset((void*)pChVars[ RIGHT]->overlap,
-              0,
-              SUBBANDS_NUMBER*FILTERBANK_BANDS*sizeof(pChVars[ RIGHT]->overlap[0]));
-
-
-
-
-
-    /*
-     *  Clear all the structures
-     */
-
-
-    pv_memset((void*)&pVars->scaleFactors[RIGHT],
-              0,
-              sizeof(mp3ScaleFactors));
-
-    pv_memset((void*)&pVars->scaleFactors[LEFT],
-              0,
-              sizeof(mp3ScaleFactors));
-
-    pv_memset((void*)&pVars->sideInfo,
-              0,
-              sizeof(mp3SideInfo));
-
-    pv_memset((void*)&pVars->sideInfo,
-              0,
-              sizeof(mp3SideInfo));
-
-}
diff --git a/media/libstagefright/codecs/mp3dec/test/Android.bp b/media/libstagefright/codecs/mp3dec/test/Android.bp
deleted file mode 100644
index 0ff8b12..0000000
--- a/media/libstagefright/codecs/mp3dec/test/Android.bp
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-cc_test {
-    name: "Mp3DecoderTest",
-    gtest: true,
-
-    srcs: [
-        "mp3reader.cpp",
-        "Mp3DecoderTest.cpp",
-    ],
-
-    static_libs: [
-        "libstagefright_mp3dec",
-        "libsndfile",
-        "libaudioutils",
-    ],
-
-    shared_libs: [
-        "liblog",
-    ],
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-    ],
-
-    sanitize: {
-        cfi: true,
-        misc_undefined: [
-            "unsigned-integer-overflow",
-            "signed-integer-overflow",
-        ],
-    },
-}
diff --git a/media/libstagefright/codecs/mp3dec/test/AndroidTest.xml b/media/libstagefright/codecs/mp3dec/test/AndroidTest.xml
deleted file mode 100644
index 233f9bb..0000000
--- a/media/libstagefright/codecs/mp3dec/test/AndroidTest.xml
+++ /dev/null
@@ -1,31 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2020 The Android Open Source Project
-
-     Licensed under the Apache License, Version 2.0 (the "License");
-     you may not use this file except in compliance with the License.
-     You may obtain a copy of the License at
-
-          http://www.apache.org/licenses/LICENSE-2.0
-
-     Unless required by applicable law or agreed to in writing, software
-     distributed under the License is distributed on an "AS IS" BASIS,
-     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     See the License for the specific language governing permissions and
-     limitations under the License.
--->
-<configuration description="Test module config for Mp3 Decoder unit test">
-    <option name="test-suite-tag" value="Mp3DecoderTest" />
-    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
-        <option name="cleanup" value="true" />
-        <option name="push" value="Mp3DecoderTest->/data/local/tmp/Mp3DecoderTest" />
-        <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/mp3dec/test/Mp3DecoderTest-1.1.zip?unzip=true"
-            value="/data/local/tmp/Mp3DecoderTestRes/" />
-    </target_preparer>
-
-    <test class="com.android.tradefed.testtype.GTest" >
-        <option name="native-test-device-path" value="/data/local/tmp" />
-        <option name="module-name" value="Mp3DecoderTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/Mp3DecoderTestRes/" />
-    </test>
-</configuration>
diff --git a/media/libstagefright/codecs/mp3dec/test/Mp3DecoderTest.cpp b/media/libstagefright/codecs/mp3dec/test/Mp3DecoderTest.cpp
deleted file mode 100644
index 0784c0c..0000000
--- a/media/libstagefright/codecs/mp3dec/test/Mp3DecoderTest.cpp
+++ /dev/null
@@ -1,201 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "Mp3DecoderTest"
-
-#include <utils/Log.h>
-
-#include <audio_utils/sndfile.h>
-#include <stdio.h>
-
-#include "mp3reader.h"
-#include "pvmp3decoder_api.h"
-
-#include "Mp3DecoderTestEnvironment.h"
-
-#define OUTPUT_FILE "/data/local/tmp/mp3Decode.out"
-
-constexpr int32_t kInputBufferSize = 1024 * 10;
-constexpr int32_t kOutputBufferSize = 4608 * 2;
-constexpr int32_t kMaxCount = 10;
-constexpr int32_t kNumFrameReset = 150;
-
-static Mp3DecoderTestEnvironment *gEnv = nullptr;
-
-class Mp3DecoderTest : public ::testing::TestWithParam<string> {
-  public:
-    Mp3DecoderTest() : mConfig(nullptr) {}
-
-    ~Mp3DecoderTest() {
-        if (mConfig) {
-            delete mConfig;
-            mConfig = nullptr;
-        }
-    }
-
-    virtual void SetUp() override {
-        mConfig = new tPVMP3DecoderExternal{};
-        ASSERT_NE(mConfig, nullptr) << "Failed to initialize config. No Memory available";
-        mConfig->equalizerType = flat;
-        mConfig->crcEnabled = false;
-    }
-
-    tPVMP3DecoderExternal *mConfig;
-    Mp3Reader mMp3Reader;
-
-    ERROR_CODE DecodeFrames(void *decoderbuf, SNDFILE *outFileHandle, SF_INFO sfInfo,
-                            int32_t frameCount = INT32_MAX);
-    SNDFILE *openOutputFile(SF_INFO *sfInfo);
-};
-
-ERROR_CODE Mp3DecoderTest::DecodeFrames(void *decoderBuf, SNDFILE *outFileHandle, SF_INFO sfInfo,
-                                        int32_t frameCount) {
-    uint8_t inputBuf[kInputBufferSize];
-    int16_t outputBuf[kOutputBufferSize];
-    uint32_t bytesRead;
-    ERROR_CODE decoderErr;
-    while (frameCount > 0) {
-        bool success = mMp3Reader.getFrame(inputBuf, &bytesRead);
-        if (!success) {
-            break;
-        }
-        mConfig->inputBufferCurrentLength = bytesRead;
-        mConfig->inputBufferMaxLength = 0;
-        mConfig->inputBufferUsedLength = 0;
-        mConfig->pInputBuffer = inputBuf;
-        mConfig->pOutputBuffer = outputBuf;
-        mConfig->outputFrameSize = kOutputBufferSize / sizeof(int16_t);
-        decoderErr = pvmp3_framedecoder(mConfig, decoderBuf);
-        if (decoderErr != NO_DECODING_ERROR) break;
-        sf_writef_short(outFileHandle, outputBuf, mConfig->outputFrameSize / sfInfo.channels);
-        frameCount--;
-    }
-    return decoderErr;
-}
-
-SNDFILE *Mp3DecoderTest::openOutputFile(SF_INFO *sfInfo) {
-    memset(sfInfo, 0, sizeof(SF_INFO));
-    sfInfo->channels = mMp3Reader.getNumChannels();
-    sfInfo->format = SF_FORMAT_WAV | SF_FORMAT_PCM_16;
-    sfInfo->samplerate = mMp3Reader.getSampleRate();
-    SNDFILE *outFileHandle = sf_open(OUTPUT_FILE, SFM_WRITE, sfInfo);
-    return outFileHandle;
-}
-
-TEST_F(Mp3DecoderTest, MultiCreateMp3DecoderTest) {
-    size_t memRequirements = pvmp3_decoderMemRequirements();
-    ASSERT_NE(memRequirements, 0) << "Failed to get the memory requirement size";
-    void *decoderBuf = malloc(memRequirements);
-    ASSERT_NE(decoderBuf, nullptr)
-            << "Failed to allocate decoder memory of size " << memRequirements;
-    for (int count = 0; count < kMaxCount; count++) {
-        pvmp3_InitDecoder(mConfig, decoderBuf);
-        ALOGV("Decoder created successfully");
-    }
-    if (decoderBuf) {
-        free(decoderBuf);
-        decoderBuf = nullptr;
-    }
-}
-
-TEST_P(Mp3DecoderTest, DecodeTest) {
-    size_t memRequirements = pvmp3_decoderMemRequirements();
-    ASSERT_NE(memRequirements, 0) << "Failed to get the memory requirement size";
-    void *decoderBuf = malloc(memRequirements);
-    ASSERT_NE(decoderBuf, nullptr)
-            << "Failed to allocate decoder memory of size " << memRequirements;
-
-    pvmp3_InitDecoder(mConfig, decoderBuf);
-    ALOGV("Decoder created successfully");
-    string inputFile = gEnv->getRes() + GetParam();
-    bool status = mMp3Reader.init(inputFile.c_str());
-    ASSERT_TRUE(status) << "Unable to initialize the mp3Reader";
-
-    // Open the output file.
-    SF_INFO sfInfo;
-    SNDFILE *outFileHandle = openOutputFile(&sfInfo);
-    ASSERT_NE(outFileHandle, nullptr) << "Error opening output file for writing decoded output";
-
-    ERROR_CODE decoderErr = DecodeFrames(decoderBuf, outFileHandle, sfInfo);
-    ASSERT_EQ(decoderErr, NO_DECODING_ERROR) << "Failed to decode the frames";
-    ASSERT_EQ(sfInfo.channels, mConfig->num_channels) << "Number of channels does not match";
-    ASSERT_EQ(sfInfo.samplerate, mConfig->samplingRate) << "Sample rate does not match";
-
-    mMp3Reader.close();
-    sf_close(outFileHandle);
-    if (decoderBuf) {
-        free(decoderBuf);
-        decoderBuf = nullptr;
-    }
-}
-
-TEST_P(Mp3DecoderTest, ResetDecoderTest) {
-    size_t memRequirements = pvmp3_decoderMemRequirements();
-    ASSERT_NE(memRequirements, 0) << "Failed to get the memory requirement size";
-    void *decoderBuf = malloc(memRequirements);
-    ASSERT_NE(decoderBuf, nullptr)
-            << "Failed to allocate decoder memory of size " << memRequirements;
-
-    pvmp3_InitDecoder(mConfig, decoderBuf);
-    ALOGV("Decoder created successfully.");
-    string inputFile = gEnv->getRes() + GetParam();
-    bool status = mMp3Reader.init(inputFile.c_str());
-    ASSERT_TRUE(status) << "Unable to initialize the mp3Reader";
-
-    // Open the output file.
-    SF_INFO sfInfo;
-    SNDFILE *outFileHandle = openOutputFile(&sfInfo);
-    ASSERT_NE(outFileHandle, nullptr) << "Error opening output file for writing decoded output";
-
-    ERROR_CODE decoderErr;
-    decoderErr = DecodeFrames(decoderBuf, outFileHandle, sfInfo, kNumFrameReset);
-    ASSERT_EQ(decoderErr, NO_DECODING_ERROR) << "Failed to decode the frames";
-    ASSERT_EQ(sfInfo.channels, mConfig->num_channels) << "Number of channels does not match";
-    ASSERT_EQ(sfInfo.samplerate, mConfig->samplingRate) << "Sample rate does not match";
-
-    pvmp3_resetDecoder(decoderBuf);
-    // Decode the same file.
-    decoderErr = DecodeFrames(decoderBuf, outFileHandle, sfInfo);
-    ASSERT_EQ(decoderErr, NO_DECODING_ERROR) << "Failed to decode the frames";
-    ASSERT_EQ(sfInfo.channels, mConfig->num_channels) << "Number of channels does not match";
-    ASSERT_EQ(sfInfo.samplerate, mConfig->samplingRate) << "Sample rate does not match";
-
-    mMp3Reader.close();
-    sf_close(outFileHandle);
-    if (decoderBuf) {
-        free(decoderBuf);
-        decoderBuf = nullptr;
-    }
-}
-
-INSTANTIATE_TEST_SUITE_P(Mp3DecoderTestAll, Mp3DecoderTest,
-                         ::testing::Values(("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3"),
-                                           ("bbb_44100hz_2ch_128kbps_mp3_5mins.mp3"),
-                                           ("bug_136053885.mp3"),
-                                           ("bbb_mp3_stereo_192kbps_48000hz.mp3")));
-
-int main(int argc, char **argv) {
-    gEnv = new Mp3DecoderTestEnvironment();
-    ::testing::AddGlobalTestEnvironment(gEnv);
-    ::testing::InitGoogleTest(&argc, argv);
-    int status = gEnv->initFromOptions(argc, argv);
-    if (status == 0) {
-        status = RUN_ALL_TESTS();
-        ALOGV("Test result = %d\n", status);
-    }
-    return status;
-}
diff --git a/media/libstagefright/codecs/mpeg2dec/Android.bp b/media/libstagefright/codecs/mpeg2dec/Android.bp
index e849410..abd1379 100644
--- a/media/libstagefright/codecs/mpeg2dec/Android.bp
+++ b/media/libstagefright/codecs/mpeg2dec/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_mpeg2dec",
     defaults: ["libstagefright_softomx-defaults"],
diff --git a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
index 9d5f342..9f8001f 100644
--- a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
+++ b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
@@ -864,6 +864,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
         OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/on2/dec/Android.bp b/media/libstagefright/codecs/on2/dec/Android.bp
index 82bb8d1..ba6dc2a 100644
--- a/media/libstagefright/codecs/on2/dec/Android.bp
+++ b/media/libstagefright/codecs/on2/dec/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_codecs_on2_dec_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_codecs_on2_dec_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_vpxdec",
     defaults: ["libstagefright_softomx-defaults"],
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
index 0f2ff17..bffc23a 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
@@ -355,6 +355,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/on2/enc/Android.bp b/media/libstagefright/codecs/on2/enc/Android.bp
index 705e554..e85ff98 100644
--- a/media/libstagefright/codecs/on2/enc/Android.bp
+++ b/media/libstagefright/codecs/on2/enc/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_codecs_on2_enc_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_codecs_on2_enc_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_vpxenc",
     defaults: ["libstagefright_softomx-defaults"],
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
index d0cb071..e9b4341 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
@@ -775,6 +775,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/opus/dec/Android.bp b/media/libstagefright/codecs/opus/dec/Android.bp
index 71a2a0d..3d8af69 100644
--- a/media/libstagefright/codecs/opus/dec/Android.bp
+++ b/media/libstagefright/codecs/opus/dec/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_opusdec",
     defaults: ["libstagefright_softomx-defaults"],
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
index 4f61aa8..dcd8dda 100644
--- a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
@@ -58,6 +58,8 @@
       mInputBufferCount(0),
       mDecoder(NULL),
       mHeader(NULL),
+      mNumChannels(1),
+      mSamplingRate(kRate),
       mCodecDelay(0),
       mSeekPreRoll(0),
       mAnchorTimeUs(0),
@@ -169,11 +171,11 @@
             }
 
             opusParams->nAudioBandWidth = 0;
-            opusParams->nSampleRate = kRate;
+            opusParams->nSampleRate = mSamplingRate;
             opusParams->nBitRate = 0;
 
             if (!isConfigured()) {
-                opusParams->nChannels = 1;
+                opusParams->nChannels = mNumChannels;
             } else {
                 opusParams->nChannels = mHeader->channels;
             }
@@ -274,7 +276,8 @@
             if (opusParams->nPortIndex != 0) {
                 return OMX_ErrorUndefined;
             }
-
+            mNumChannels = opusParams->nChannels;
+            mSamplingRate = opusParams->nSampleRate;
             return OMX_ErrorNone;
         }
 
@@ -496,6 +499,8 @@
                                    *(reinterpret_cast<int64_t*>(inHeader->pBuffer +
                                                                 inHeader->nOffset)),
                                    kRate);
+                mSamplingRate = kRate;
+                mNumChannels = mHeader->channels;
                 notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
                 mOutputPortSettingsChange = AWAITING_DISABLED;
             }
@@ -661,6 +666,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.h b/media/libstagefright/codecs/opus/dec/SoftOpus.h
index 91cafa1..00058c8 100644
--- a/media/libstagefright/codecs/opus/dec/SoftOpus.h
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.h
@@ -70,6 +70,8 @@
     OpusMSDecoder *mDecoder;
     OpusHeader *mHeader;
 
+    int32_t mNumChannels;
+    int32_t mSamplingRate;
     int64_t mCodecDelay;
     int64_t mSeekPreRoll;
     int64_t mSamplesToDiscard;
diff --git a/media/libstagefright/codecs/raw/Android.bp b/media/libstagefright/codecs/raw/Android.bp
index fcc7a0a..3673786 100644
--- a/media/libstagefright/codecs/raw/Android.bp
+++ b/media/libstagefright/codecs/raw/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_codecs_raw_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_codecs_raw_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_rawdec",
     defaults: ["libstagefright_softomx-defaults"],
diff --git a/media/libstagefright/codecs/raw/SoftRaw.cpp b/media/libstagefright/codecs/raw/SoftRaw.cpp
index 0e31804..82dd171 100644
--- a/media/libstagefright/codecs/raw/SoftRaw.cpp
+++ b/media/libstagefright/codecs/raw/SoftRaw.cpp
@@ -273,6 +273,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/vorbis/dec/Android.bp b/media/libstagefright/codecs/vorbis/dec/Android.bp
index 3efb952..7764294 100644
--- a/media/libstagefright/codecs/vorbis/dec/Android.bp
+++ b/media/libstagefright/codecs/vorbis/dec/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_codecs_vorbis_dec_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_codecs_vorbis_dec_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_vorbisdec",
     defaults: ["libstagefright_softomx-defaults"],
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
index 08e20cc..3daed10 100644
--- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
+++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
@@ -636,6 +636,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent *createSoftOMXComponent(
         const char *name, const OMX_CALLBACKTYPE *callbacks,
         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
diff --git a/media/libstagefright/codecs/xaacdec/Android.bp b/media/libstagefright/codecs/xaacdec/Android.bp
index 5385dbc..1d03c16 100644
--- a/media/libstagefright/codecs/xaacdec/Android.bp
+++ b/media/libstagefright/codecs/xaacdec/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_library_shared {
     name: "libstagefright_soft_xaacdec",
     defaults: ["libstagefright_softomx-defaults"],
@@ -14,6 +23,9 @@
         // integer_overflow: true,
         misc_undefined: [ "signed-integer-overflow", "unsigned-integer-overflow", ],
         cfi: true,
+        config: {
+            cfi_assembly_support: true,
+        },
     },
 
     static_libs: ["libxaacdec"],
diff --git a/media/libstagefright/codecs/xaacdec/SoftXAAC.cpp b/media/libstagefright/codecs/xaacdec/SoftXAAC.cpp
index 87e8fd4..a478642 100644
--- a/media/libstagefright/codecs/xaacdec/SoftXAAC.cpp
+++ b/media/libstagefright/codecs/xaacdec/SoftXAAC.cpp
@@ -1693,6 +1693,7 @@
 
 }  // namespace android
 
+__attribute__((cfi_canonical_jump_table))
 android::SoftOMXComponent* createSoftOMXComponent(const char* name,
                                                   const OMX_CALLBACKTYPE* callbacks,
                                                   OMX_PTR appData, OMX_COMPONENTTYPE** component) {
diff --git a/media/libstagefright/colorconversion/Android.bp b/media/libstagefright/colorconversion/Android.bp
index 6b08b08..06cebd3 100644
--- a/media/libstagefright/colorconversion/Android.bp
+++ b/media/libstagefright/colorconversion/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_colorconversion_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_colorconversion_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_static {
     name: "libstagefright_color_conversion",
 
diff --git a/media/libstagefright/data/Android.bp b/media/libstagefright/data/Android.bp
index 616b4b3..6da2ccb 100644
--- a/media/libstagefright/data/Android.bp
+++ b/media/libstagefright/data/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 prebuilt_etc {
     name: "mediaswcodec.xml",
     src: "media_codecs_sw.xml",
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index dd2eed3..a4e3425 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -183,7 +183,7 @@
             <Feature name="adaptive-playback" />
         </MediaCodec>
         <MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="!slow-cpu">
-            <Limit name="size" min="2x2" max="1920x1080" />
+            <Limit name="size" min="2x2" max="2048x2048" />
             <Limit name="alignment" value="2x2" />
             <Limit name="block-size" value="16x16" />
             <Limit name="block-count" range="1-16384" />
@@ -237,7 +237,7 @@
             <Limit name="sample-rate" ranges="8000,12000,16000,24000,48000" />
             <Limit name="bitrate" range="500-512000" />
             <Limit name="complexity" range="0-10"  default="5" />
-            <Feature name="bitrate-modes" value="CBR" />
+            <Feature name="bitrate-modes" value="CBR,VBR" />
         </MediaCodec>
         <MediaCodec name="c2.android.h263.encoder" type="video/3gpp">
             <Alias name="OMX.google.h263.encoder" />
@@ -274,6 +274,9 @@
                 <Limit name="bitrate" range="1-2000000" />
             </Variant>
             <Feature name="intra-refresh" />
+            <!-- Video Quality control -->
+                    <!-- supports QP bounding with standard keys -->
+            <Feature name="qp-bounds" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp8.encoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
             <Alias name="OMX.google.vp8.encoder" />
diff --git a/media/libstagefright/filters/Android.bp b/media/libstagefright/filters/Android.bp
index 88f30c4..acc9e87 100644
--- a/media/libstagefright/filters/Android.bp
+++ b/media/libstagefright/filters/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_library_static {
     name: "libstagefright_mediafilter",
 
diff --git a/media/libstagefright/flac/dec/Android.bp b/media/libstagefright/flac/dec/Android.bp
index 32b2075..665aae1 100644
--- a/media/libstagefright/flac/dec/Android.bp
+++ b/media/libstagefright/flac/dec/Android.bp
@@ -1,7 +1,27 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_flac_dec_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_flac_dec_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library {
     name: "libstagefright_flacdec",
     vendor_available: true,
     min_sdk_version: "29",
+    host_supported: true,
 
     srcs: [
         "FLACDecoder.cpp",
@@ -20,12 +40,12 @@
     },
 
     shared_libs: [
-        "libaudioutils",
         "liblog",
     ],
 
     static_libs: [
         "libFLAC",
+        "libaudioutils", // needed for 'float_from_i32'
     ],
 
     export_static_lib_headers: [
@@ -33,6 +53,13 @@
     ],
 
     header_libs: [
-        "libmedia_headers",
+        "libstagefright_foundation_headers",
+        "libstagefright_headers",
     ],
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/libstagefright/flac/dec/FLACDecoder.cpp b/media/libstagefright/flac/dec/FLACDecoder.cpp
index cef0bc6..f5e9532 100644
--- a/media/libstagefright/flac/dec/FLACDecoder.cpp
+++ b/media/libstagefright/flac/dec/FLACDecoder.cpp
@@ -433,7 +433,7 @@
             if (mBuffer == nullptr) {
                 mBufferDataSize = 0;
                 mBufferLen = 0;
-                ALOGE("decodeOneFrame: failed to allocate memory for input buffer");
+                ALOGE("addDataToBuffer: failed to allocate memory for input buffer");
                 return NO_MEMORY;
             }
             mBufferLen = mBufferDataSize + inBufferLen;
diff --git a/media/libstagefright/flac/dec/test/Android.bp b/media/libstagefright/flac/dec/test/Android.bp
index 70ca80a..a4c2735 100644
--- a/media/libstagefright/flac/dec/test/Android.bp
+++ b/media/libstagefright/flac/dec/test/Android.bp
@@ -14,6 +14,17 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_flac_dec_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_flac_dec_license",
+    ],
+}
+
 cc_test {
     name: "FlacDecoderTest",
     gtest: true,
diff --git a/media/libstagefright/foundation/ABuffer.cpp b/media/libstagefright/foundation/ABuffer.cpp
index c8965d9..c79384c 100644
--- a/media/libstagefright/foundation/ABuffer.cpp
+++ b/media/libstagefright/foundation/ABuffer.cpp
@@ -67,7 +67,7 @@
 
 void ABuffer::setRange(size_t offset, size_t size) {
     CHECK_LE(offset, mCapacity);
-    CHECK_LE(offset + size, mCapacity);
+    CHECK_LE(size, mCapacity - offset);
 
     mRangeOffset = offset;
     mRangeLength = size;
diff --git a/media/libstagefright/foundation/ALooperRoster.cpp b/media/libstagefright/foundation/ALooperRoster.cpp
index 8a7c3eb..0a4e598 100644
--- a/media/libstagefright/foundation/ALooperRoster.cpp
+++ b/media/libstagefright/foundation/ALooperRoster.cpp
@@ -166,7 +166,7 @@
         }
         s.append("\n");
     }
-    write(fd, s.string(), s.size());
+    (void)write(fd, s.string(), s.size());
 }
 
 }  // namespace android
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp
index 7752bda..c2114b3 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/libstagefright/foundation/AMessage.cpp
@@ -33,7 +33,7 @@
 
 #include <media/stagefright/foundation/hexdump.h>
 
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 #include <binder/Parcel.h>
 #endif
 
@@ -54,13 +54,11 @@
 
 AMessage::AMessage(void)
     : mWhat(0),
-      mTarget(0),
-      mNumItems(0) {
+      mTarget(0) {
 }
 
 AMessage::AMessage(uint32_t what, const sp<const AHandler> &handler)
-    : mWhat(what),
-      mNumItems(0) {
+    : mWhat(what) {
     setTarget(handler);
 }
 
@@ -89,13 +87,13 @@
 }
 
 void AMessage::clear() {
-    for (size_t i = 0; i < mNumItems; ++i) {
-        Item *item = &mItems[i];
-        delete[] item->mName;
-        item->mName = NULL;
-        freeItemValue(item);
+    // Item needs to be handled delicately
+    for (Item &item : mItems) {
+        delete[] item.mName;
+        item.mName = NULL;
+        freeItemValue(&item);
     }
-    mNumItems = 0;
+    mItems.clear();
 }
 
 void AMessage::freeItemValue(Item *item) {
@@ -157,7 +155,7 @@
     size_t memchecks = 0;
 #endif
     size_t i = 0;
-    for (; i < mNumItems; i++) {
+    for (; i < mItems.size(); i++) {
         if (len != mItems[i].mNameLength) {
             continue;
         }
@@ -172,7 +170,7 @@
     {
         Mutex::Autolock _l(gLock);
         ++gFindItemCalls;
-        gAverageNumItems += mNumItems;
+        gAverageNumItems += mItems.size();
         gAverageNumMemChecks += memchecks;
         gAverageNumChecks += i;
         reportStats();
@@ -188,20 +186,26 @@
     memcpy((void*)mName, name, len + 1);
 }
 
+AMessage::Item::Item(const char *name, size_t len)
+    : mType(kTypeInt32) {
+    // mName and mNameLength are initialized by setName
+    setName(name, len);
+}
+
 AMessage::Item *AMessage::allocateItem(const char *name) {
     size_t len = strlen(name);
     size_t i = findItemIndex(name, len);
     Item *item;
 
-    if (i < mNumItems) {
+    if (i < mItems.size()) {
         item = &mItems[i];
         freeItemValue(item);
     } else {
-        CHECK(mNumItems < kMaxNumItems);
-        i = mNumItems++;
+        CHECK(mItems.size() < kMaxNumItems);
+        i = mItems.size();
+        // place a 'blank' item at the end - this is of type kTypeInt32
+        mItems.emplace_back(name, len);
         item = &mItems[i];
-        item->mType = kTypeInt32;
-        item->setName(name, len);
     }
 
     return item;
@@ -210,7 +214,7 @@
 const AMessage::Item *AMessage::findItem(
         const char *name, Type type) const {
     size_t i = findItemIndex(name, strlen(name));
-    if (i < mNumItems) {
+    if (i < mItems.size()) {
         const Item *item = &mItems[i];
         return item->mType == type ? item : NULL;
 
@@ -220,7 +224,7 @@
 
 bool AMessage::findAsFloat(const char *name, float *value) const {
     size_t i = findItemIndex(name, strlen(name));
-    if (i < mNumItems) {
+    if (i < mItems.size()) {
         const Item *item = &mItems[i];
         switch (item->mType) {
             case kTypeFloat:
@@ -247,7 +251,7 @@
 
 bool AMessage::findAsInt64(const char *name, int64_t *value) const {
     size_t i = findItemIndex(name, strlen(name));
-    if (i < mNumItems) {
+    if (i < mItems.size()) {
         const Item *item = &mItems[i];
         switch (item->mType) {
             case kTypeInt64:
@@ -265,15 +269,16 @@
 
 bool AMessage::contains(const char *name) const {
     size_t i = findItemIndex(name, strlen(name));
-    return i < mNumItems;
+    return i < mItems.size();
 }
 
 #define BASIC_TYPE(NAME,FIELDNAME,TYPENAME)                             \
 void AMessage::set##NAME(const char *name, TYPENAME value) {            \
     Item *item = allocateItem(name);                                    \
-                                                                        \
-    item->mType = kType##NAME;                                          \
-    item->u.FIELDNAME = value;                                          \
+    if (item) {                                                         \
+        item->mType = kType##NAME;                                      \
+        item->u.FIELDNAME = value;                                      \
+    }                                                                   \
 }                                                                       \
                                                                         \
 /* NOLINT added to avoid incorrect warning/fix from clang.tidy */       \
@@ -298,8 +303,10 @@
 void AMessage::setString(
         const char *name, const char *s, ssize_t len) {
     Item *item = allocateItem(name);
-    item->mType = kTypeString;
-    item->u.stringValue = new AString(s, len < 0 ? strlen(s) : len);
+    if (item) {
+        item->mType = kTypeString;
+        item->u.stringValue = new AString(s, len < 0 ? strlen(s) : len);
+    }
 }
 
 void AMessage::setString(
@@ -310,10 +317,12 @@
 void AMessage::setObjectInternal(
         const char *name, const sp<RefBase> &obj, Type type) {
     Item *item = allocateItem(name);
-    item->mType = type;
+    if (item) {
+        item->mType = type;
 
-    if (obj != NULL) { obj->incStrong(this); }
-    item->u.refValue = obj.get();
+        if (obj != NULL) { obj->incStrong(this); }
+        item->u.refValue = obj.get();
+    }
 }
 
 void AMessage::setObject(const char *name, const sp<RefBase> &obj) {
@@ -326,22 +335,26 @@
 
 void AMessage::setMessage(const char *name, const sp<AMessage> &obj) {
     Item *item = allocateItem(name);
-    item->mType = kTypeMessage;
+    if (item) {
+        item->mType = kTypeMessage;
 
-    if (obj != NULL) { obj->incStrong(this); }
-    item->u.refValue = obj.get();
+        if (obj != NULL) { obj->incStrong(this); }
+        item->u.refValue = obj.get();
+    }
 }
 
 void AMessage::setRect(
         const char *name,
         int32_t left, int32_t top, int32_t right, int32_t bottom) {
     Item *item = allocateItem(name);
-    item->mType = kTypeRect;
+    if (item) {
+        item->mType = kTypeRect;
 
-    item->u.rectValue.mLeft = left;
-    item->u.rectValue.mTop = top;
-    item->u.rectValue.mRight = right;
-    item->u.rectValue.mBottom = bottom;
+        item->u.rectValue.mLeft = left;
+        item->u.rectValue.mTop = top;
+        item->u.rectValue.mRight = right;
+        item->u.rectValue.mBottom = bottom;
+    }
 }
 
 bool AMessage::findString(const char *name, AString *value) const {
@@ -466,18 +479,18 @@
 
 sp<AMessage> AMessage::dup() const {
     sp<AMessage> msg = new AMessage(mWhat, mHandler.promote());
-    msg->mNumItems = mNumItems;
+    msg->mItems = mItems;
 
 #ifdef DUMP_STATS
     {
         Mutex::Autolock _l(gLock);
         ++gDupCalls;
-        gAverageDupItems += mNumItems;
+        gAverageDupItems += mItems.size();
         reportStats();
     }
 #endif
 
-    for (size_t i = 0; i < mNumItems; ++i) {
+    for (size_t i = 0; i < mItems.size(); ++i) {
         const Item *from = &mItems[i];
         Item *to = &msg->mItems[i];
 
@@ -560,7 +573,7 @@
     }
     s.append(") = {\n");
 
-    for (size_t i = 0; i < mNumItems; ++i) {
+    for (size_t i = 0; i < mItems.size(); ++i) {
         const Item &item = mItems[i];
 
         switch (item.mType) {
@@ -646,26 +659,27 @@
     return s;
 }
 
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 // static
 sp<AMessage> AMessage::FromParcel(const Parcel &parcel, size_t maxNestingLevel) {
     int32_t what = parcel.readInt32();
     sp<AMessage> msg = new AMessage();
     msg->setWhat(what);
 
-    msg->mNumItems = static_cast<size_t>(parcel.readInt32());
-    if (msg->mNumItems > kMaxNumItems) {
+    size_t numItems = static_cast<size_t>(parcel.readInt32());
+    if (numItems > kMaxNumItems) {
         ALOGE("Too large number of items clipped.");
-        msg->mNumItems = kMaxNumItems;
+        numItems = kMaxNumItems;
     }
+    msg->mItems.resize(numItems);
 
-    for (size_t i = 0; i < msg->mNumItems; ++i) {
+    for (size_t i = 0; i < msg->mItems.size(); ++i) {
         Item *item = &msg->mItems[i];
 
         const char *name = parcel.readCString();
         if (name == NULL) {
             ALOGE("Failed reading name for an item. Parsing aborted.");
-            msg->mNumItems = i;
+            msg->mItems.resize(i);
             break;
         }
 
@@ -709,7 +723,7 @@
                 if (stringValue == NULL) {
                     ALOGE("Failed reading string value from a parcel. "
                         "Parsing aborted.");
-                    msg->mNumItems = i;
+                    msg->mItems.resize(i);
                     continue;
                     // The loop will terminate subsequently.
                 } else {
@@ -754,11 +768,9 @@
 
 void AMessage::writeToParcel(Parcel *parcel) const {
     parcel->writeInt32(static_cast<int32_t>(mWhat));
-    parcel->writeInt32(static_cast<int32_t>(mNumItems));
+    parcel->writeInt32(static_cast<int32_t>(mItems.size()));
 
-    for (size_t i = 0; i < mNumItems; ++i) {
-        const Item &item = mItems[i];
-
+    for (const Item &item : mItems) {
         parcel->writeCString(item.mName);
         parcel->writeInt32(static_cast<int32_t>(item.mType));
 
@@ -813,7 +825,7 @@
         }
     }
 }
-#endif  // __ANDROID_VNDK__
+#endif  // !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 
 sp<AMessage> AMessage::changesFrom(const sp<const AMessage> &other, bool deep) const {
     if (other == NULL) {
@@ -828,8 +840,7 @@
         diff->setTarget(mHandler.promote());
     }
 
-    for (size_t i = 0; i < mNumItems; ++i) {
-        const Item &item = mItems[i];
+    for (const Item &item : mItems) {
         const Item *oitem = other->findItem(item.mName, item.mType);
         switch (item.mType) {
             case kTypeInt32:
@@ -936,11 +947,11 @@
 }
 
 size_t AMessage::countEntries() const {
-    return mNumItems;
+    return mItems.size();
 }
 
 const char *AMessage::getEntryNameAt(size_t index, Type *type) const {
-    if (index >= mNumItems) {
+    if (index >= mItems.size()) {
         *type = kTypeInt32;
 
         return NULL;
@@ -953,7 +964,7 @@
 
 AMessage::ItemData AMessage::getEntryAt(size_t index) const {
     ItemData it;
-    if (index < mNumItems) {
+    if (index < mItems.size()) {
         switch (mItems[index].mType) {
             case kTypeInt32:    it.set(mItems[index].u.int32Value); break;
             case kTypeInt64:    it.set(mItems[index].u.int64Value); break;
@@ -986,7 +997,7 @@
 }
 
 status_t AMessage::setEntryNameAt(size_t index, const char *name) {
-    if (index >= mNumItems) {
+    if (index >= mItems.size()) {
         return BAD_INDEX;
     }
     if (name == nullptr) {
@@ -996,7 +1007,7 @@
         return OK; // name has not changed
     }
     size_t len = strlen(name);
-    if (findItemIndex(name, len) < mNumItems) {
+    if (findItemIndex(name, len) < mItems.size()) {
         return ALREADY_EXISTS;
     }
     delete[] mItems[index].mName;
@@ -1011,7 +1022,7 @@
     sp<AMessage> msgValue;
     sp<ABuffer> bufValue;
 
-    if (index >= mNumItems) {
+    if (index >= mItems.size()) {
         return BAD_INDEX;
     }
     if (!item.used()) {
@@ -1060,29 +1071,41 @@
 }
 
 status_t AMessage::removeEntryAt(size_t index) {
-    if (index >= mNumItems) {
+    if (index >= mItems.size()) {
         return BAD_INDEX;
     }
     // delete entry data and objects
-    --mNumItems;
     delete[] mItems[index].mName;
     mItems[index].mName = nullptr;
     freeItemValue(&mItems[index]);
 
     // swap entry with last entry and clear last entry's data
-    if (index < mNumItems) {
-        mItems[index] = mItems[mNumItems];
-        mItems[mNumItems].mName = nullptr;
-        mItems[mNumItems].mType = kTypeInt32;
+    size_t lastIndex = mItems.size() - 1;
+    if (index < lastIndex) {
+        mItems[index] = mItems[lastIndex];
+        mItems[lastIndex].mName = nullptr;
+        mItems[lastIndex].mType = kTypeInt32;
     }
+    mItems.pop_back();
     return OK;
 }
 
+status_t AMessage::removeEntryByName(const char *name) {
+    if (name == nullptr) {
+        return BAD_VALUE;
+    }
+    size_t index = findEntryByName(name);
+    if (index >= mItems.size()) {
+        return BAD_INDEX;
+    }
+    return removeEntryAt(index);
+}
+
 void AMessage::setItem(const char *name, const ItemData &item) {
     if (item.used()) {
         Item *it = allocateItem(name);
         if (it != nullptr) {
-            setEntryAt(it - mItems, item);
+            setEntryAt(it - &mItems[0], item);
         }
     }
 }
@@ -1097,11 +1120,11 @@
         return;
     }
 
-    for (size_t ix = 0; ix < other->mNumItems; ++ix) {
+    for (size_t ix = 0; ix < other->mItems.size(); ++ix) {
         Item *it = allocateItem(other->mItems[ix].mName);
         if (it != nullptr) {
             ItemData data = other->getEntryAt(ix);
-            setEntryAt(it - mItems, data);
+            setEntryAt(it - &mItems[0], data);
         }
     }
 }
diff --git a/media/libstagefright/foundation/AString.cpp b/media/libstagefright/foundation/AString.cpp
index 4bd186c..b1ed077 100644
--- a/media/libstagefright/foundation/AString.cpp
+++ b/media/libstagefright/foundation/AString.cpp
@@ -27,7 +27,7 @@
 #include "ADebug.h"
 #include "AString.h"
 
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 #include <binder/Parcel.h>
 #endif
 
@@ -365,7 +365,7 @@
     return !strcasecmp(mData + mSize - suffixLen, suffix);
 }
 
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 // static
 AString AString::FromParcel(const Parcel &parcel) {
     size_t size = static_cast<size_t>(parcel.readInt32());
@@ -380,17 +380,21 @@
     }
     return err;
 }
-#endif
+#endif // !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 
 AString AStringPrintf(const char *format, ...) {
     va_list ap;
     va_start(ap, format);
 
     char *buffer;
-    vasprintf(&buffer, format, ap);
+    int bufferSize = vasprintf(&buffer, format, ap);
 
     va_end(ap);
 
+    if(bufferSize < 0) {
+        return AString();
+    }
+
     AString result(buffer);
 
     free(buffer);
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index f440e00..dd2c66f 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_foundation_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_foundation_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_headers {
     name: "libstagefright_foundation_headers",
     export_include_dirs: ["include"],
@@ -12,6 +31,7 @@
     vndk: {
         enabled: true,
     },
+    host_supported: true,
     double_loadable: true,
     include_dirs: [
         "frameworks/av/include",
@@ -25,7 +45,9 @@
     ],
 
     header_libs: [
-        "libhardware_headers",
+        // this is only needed for the vendor variant that removes libbinder, but vendor
+        // target below does not allow adding header_libs.
+        "libbinder_headers",
         "libstagefright_foundation_headers",
         "media_ndk_headers",
         "media_plugin_headers",
@@ -63,6 +85,7 @@
         "AudioPresentationInfo.cpp",
         "ByteUtils.cpp",
         "ColorUtils.cpp",
+        "ColorUtils_fill.cpp",
         "FoundationUtils.cpp",
         "MediaBuffer.cpp",
         "MediaBufferBase.cpp",
@@ -79,6 +102,7 @@
 
     target: {
         vendor: {
+            // TODO: add libbinder_headers here instead of above when it becomes supported
             exclude_shared_libs: [
                 "libbinder",
             ],
@@ -86,6 +110,14 @@
                 "-DNO_IMEMORY",
             ],
         },
+        apex: {
+            exclude_shared_libs: [
+                "libbinder",
+            ],
+        },
+        darwin: {
+            enabled: false,
+        },
     },
 
     clang: true,
@@ -117,3 +149,66 @@
         "-DNO_IMEMORY",
     ],
 }
+
+// this gets linked into extractors in media mainline module
+// so must be ndk api 29 so that it runs on >=Q
+cc_library_static {
+    name: "libstagefright_foundation_colorutils_ndk",
+    host_supported: true,
+    vendor_available: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
+    shared_libs: [
+        "liblog",
+        "libutils",             // for sp<>
+        // actually invokes this, but called from folks who already load it
+        // "libmediandk",
+    ],
+
+    header_libs: [
+        // this is only needed for the vendor variant that removes libbinder, but vendor
+        // target below does not allow adding header_libs.
+        "libbinder_headers",
+        "libstagefright_foundation_headers",
+        "media_ndk_headers",
+        "media_plugin_headers",
+    ],
+
+    local_include_dirs: [
+        "include/media/stagefright/foundation",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    srcs: [
+        "ColorUtils_ndk.cpp",
+        "ColorUtils_fill.cpp",
+    ],
+
+    clang: true,
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+
+    min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
+
+}
+
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/libstagefright/foundation/ColorUtils.cpp
index 070e325..fa722b5 100644
--- a/media/libstagefright/foundation/ColorUtils.cpp
+++ b/media/libstagefright/foundation/ColorUtils.cpp
@@ -613,6 +613,35 @@
 }
 
 // static
+void ColorUtils::getColorConfigFromDataSpace(
+        const android_dataspace &dataspace, int32_t *range, int32_t *standard, int32_t *transfer) {
+    uint32_t gfxRange =
+        (dataspace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT;
+    uint32_t gfxStandard =
+        (dataspace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT;
+    uint32_t gfxTransfer =
+        (dataspace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT;
+
+    // assume 1-to-1 mapping to HAL values (to deal with potential vendor extensions)
+    CU::ColorRange    cuRange    = CU::kColorRangeUnspecified;
+    CU::ColorStandard cuStandard = CU::kColorStandardUnspecified;
+    CU::ColorTransfer cuTransfer = CU::kColorTransferUnspecified;
+    // TRICKY: use & to ensure all three mappings are completed
+    if (!(sGfxRanges.map(gfxRange, &cuRange) & sGfxStandards.map(gfxStandard, &cuStandard)
+            & sGfxTransfers.map(gfxTransfer, &cuTransfer))) {
+        ALOGW("could not safely map graphics dataspace (R:%u S:%u T:%u) to "
+              "platform color aspects (R:%u(%s) S:%u(%s) T:%u(%s)",
+              gfxRange, gfxStandard, gfxTransfer,
+              cuRange,    asString(cuRange),
+              cuStandard, asString(cuStandard),
+              cuTransfer, asString(cuTransfer));
+    }
+    *range    = cuRange;
+    *standard = cuStandard;
+    *transfer = cuTransfer;
+}
+
+// static
 void ColorUtils::getColorConfigFromFormat(
         const sp<AMessage> &format, int32_t *range, int32_t *standard, int32_t *transfer) {
     if (!format->findInt32("color-range", range)) {
@@ -693,13 +722,6 @@
             transfer, asString((ColorTransfer)transfer));
 }
 
-
-// static
-void ColorUtils::setHDRStaticInfoIntoAMediaFormat(
-        const HDRStaticInfo &info, AMediaFormat *format) {
-    setHDRStaticInfoIntoFormat(info, format->mFormat);
-}
-
 // static
 void ColorUtils::setHDRStaticInfoIntoFormat(
         const HDRStaticInfo &info, sp<AMessage> &format) {
@@ -707,48 +729,7 @@
 
     // Convert the data in infoBuffer to little endian format as defined by CTA-861-3
     uint8_t *data = infoBuffer->data();
-    // Static_Metadata_Descriptor_ID
-    data[0] = info.mID;
-
-    // display primary 0
-    data[1] = LO_UINT16(info.sType1.mR.x);
-    data[2] = HI_UINT16(info.sType1.mR.x);
-    data[3] = LO_UINT16(info.sType1.mR.y);
-    data[4] = HI_UINT16(info.sType1.mR.y);
-
-    // display primary 1
-    data[5] = LO_UINT16(info.sType1.mG.x);
-    data[6] = HI_UINT16(info.sType1.mG.x);
-    data[7] = LO_UINT16(info.sType1.mG.y);
-    data[8] = HI_UINT16(info.sType1.mG.y);
-
-    // display primary 2
-    data[9] = LO_UINT16(info.sType1.mB.x);
-    data[10] = HI_UINT16(info.sType1.mB.x);
-    data[11] = LO_UINT16(info.sType1.mB.y);
-    data[12] = HI_UINT16(info.sType1.mB.y);
-
-    // white point
-    data[13] = LO_UINT16(info.sType1.mW.x);
-    data[14] = HI_UINT16(info.sType1.mW.x);
-    data[15] = LO_UINT16(info.sType1.mW.y);
-    data[16] = HI_UINT16(info.sType1.mW.y);
-
-    // MaxDisplayLuminance
-    data[17] = LO_UINT16(info.sType1.mMaxDisplayLuminance);
-    data[18] = HI_UINT16(info.sType1.mMaxDisplayLuminance);
-
-    // MinDisplayLuminance
-    data[19] = LO_UINT16(info.sType1.mMinDisplayLuminance);
-    data[20] = HI_UINT16(info.sType1.mMinDisplayLuminance);
-
-    // MaxContentLightLevel
-    data[21] = LO_UINT16(info.sType1.mMaxContentLightLevel);
-    data[22] = HI_UINT16(info.sType1.mMaxContentLightLevel);
-
-    // MaxFrameAverageLightLevel
-    data[23] = LO_UINT16(info.sType1.mMaxFrameAverageLightLevel);
-    data[24] = HI_UINT16(info.sType1.mMaxFrameAverageLightLevel);
+    fillHdrStaticInfoBuffer(info, data);
 
     format->setBuffer("hdr-static-info", infoBuffer);
 }
diff --git a/media/libstagefright/foundation/ColorUtils_fill.cpp b/media/libstagefright/foundation/ColorUtils_fill.cpp
new file mode 100644
index 0000000..f07493e
--- /dev/null
+++ b/media/libstagefright/foundation/ColorUtils_fill.cpp
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ColorUtils"
+
+#include <inttypes.h>
+#include <arpa/inet.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+
+namespace android {
+
+// shortcut names for brevity in the following tables
+typedef ColorAspects CA;
+typedef ColorUtils CU;
+
+#define HI_UINT16(a) (((a) >> 8) & 0xFF)
+#define LO_UINT16(a) ((a) & 0xFF)
+
+//
+// static
+void ColorUtils::fillHdrStaticInfoBuffer( const HDRStaticInfo &info, uint8_t *data) {
+    // Static_Metadata_Descriptor_ID
+    data[0] = info.mID;
+
+    // display primary 0
+    data[1] = LO_UINT16(info.sType1.mR.x);
+    data[2] = HI_UINT16(info.sType1.mR.x);
+    data[3] = LO_UINT16(info.sType1.mR.y);
+    data[4] = HI_UINT16(info.sType1.mR.y);
+
+    // display primary 1
+    data[5] = LO_UINT16(info.sType1.mG.x);
+    data[6] = HI_UINT16(info.sType1.mG.x);
+    data[7] = LO_UINT16(info.sType1.mG.y);
+    data[8] = HI_UINT16(info.sType1.mG.y);
+
+    // display primary 2
+    data[9] = LO_UINT16(info.sType1.mB.x);
+    data[10] = HI_UINT16(info.sType1.mB.x);
+    data[11] = LO_UINT16(info.sType1.mB.y);
+    data[12] = HI_UINT16(info.sType1.mB.y);
+
+    // white point
+    data[13] = LO_UINT16(info.sType1.mW.x);
+    data[14] = HI_UINT16(info.sType1.mW.x);
+    data[15] = LO_UINT16(info.sType1.mW.y);
+    data[16] = HI_UINT16(info.sType1.mW.y);
+
+    // MaxDisplayLuminance
+    data[17] = LO_UINT16(info.sType1.mMaxDisplayLuminance);
+    data[18] = HI_UINT16(info.sType1.mMaxDisplayLuminance);
+
+    // MinDisplayLuminance
+    data[19] = LO_UINT16(info.sType1.mMinDisplayLuminance);
+    data[20] = HI_UINT16(info.sType1.mMinDisplayLuminance);
+
+    // MaxContentLightLevel
+    data[21] = LO_UINT16(info.sType1.mMaxContentLightLevel);
+    data[22] = HI_UINT16(info.sType1.mMaxContentLightLevel);
+
+    // MaxFrameAverageLightLevel
+    data[23] = LO_UINT16(info.sType1.mMaxFrameAverageLightLevel);
+    data[24] = HI_UINT16(info.sType1.mMaxFrameAverageLightLevel);
+}
+
+
+}  // namespace android
+
diff --git a/media/libstagefright/foundation/ColorUtils_ndk.cpp b/media/libstagefright/foundation/ColorUtils_ndk.cpp
new file mode 100644
index 0000000..3ed2425
--- /dev/null
+++ b/media/libstagefright/foundation/ColorUtils_ndk.cpp
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ColorUtils"
+
+#include <inttypes.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+#include <media/NdkMediaFormat.h>
+#include <utils/Log.h>
+
+namespace android {
+
+// static
+void ColorUtils::setHDRStaticInfoIntoAMediaFormat(
+        const HDRStaticInfo &info, AMediaFormat *format) {
+    uint8_t *data = (uint8_t *) malloc(25);
+    if (data != NULL) {
+        fillHdrStaticInfoBuffer(info, data);
+        AMediaFormat_setBuffer(format, "hdr-static-info", data, 25);
+        free(data);
+    }
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/foundation/MediaBuffer.cpp b/media/libstagefright/foundation/MediaBuffer.cpp
index 8e245dc..68df21f 100644
--- a/media/libstagefright/foundation/MediaBuffer.cpp
+++ b/media/libstagefright/foundation/MediaBuffer.cpp
@@ -51,12 +51,12 @@
       mRangeLength(size),
       mOwnsData(true),
       mMetaData(new MetaDataBase) {
-#ifndef NO_IMEMORY
+#if !defined(NO_IMEMORY) && !defined(__ANDROID_APEX__)
     if (size < kSharedMemThreshold
             || std::atomic_load_explicit(&mUseSharedMemory, std::memory_order_seq_cst) == 0) {
 #endif
         mData = malloc(size);
-#ifndef NO_IMEMORY
+#if !defined(NO_IMEMORY) && !defined(__ANDROID_APEX__)
     } else {
         ALOGV("creating memoryDealer");
         size_t newSize = 0;
diff --git a/media/libstagefright/foundation/MediaBufferGroup.cpp b/media/libstagefright/foundation/MediaBufferGroup.cpp
index 3c25047..fc98f28 100644
--- a/media/libstagefright/foundation/MediaBufferGroup.cpp
+++ b/media/libstagefright/foundation/MediaBufferGroup.cpp
@@ -62,7 +62,7 @@
         mInternal->mGrowthLimit = buffers;
     }
 
-#ifndef NO_IMEMORY
+#if !defined(NO_IMEMORY) && !defined(__ANDROID_APEX__)
     if (buffer_size >= kSharedMemoryThreshold) {
         ALOGD("creating MemoryDealer");
         // Using a single MemoryDealer is efficient for a group of shared memory objects.
diff --git a/media/libstagefright/foundation/MediaDefs.cpp b/media/libstagefright/foundation/MediaDefs.cpp
index a08fed1..ada5d81 100644
--- a/media/libstagefright/foundation/MediaDefs.cpp
+++ b/media/libstagefright/foundation/MediaDefs.cpp
@@ -20,6 +20,7 @@
 
 const char *MEDIA_MIMETYPE_IMAGE_JPEG = "image/jpeg";
 const char *MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC = "image/vnd.android.heic";
+const char *MEDIA_MIMETYPE_IMAGE_AVIF = "image/avif";
 
 const char *MEDIA_MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
 const char *MEDIA_MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9";
@@ -57,6 +58,8 @@
 const char *MEDIA_MIMETYPE_AUDIO_EAC3 = "audio/eac3";
 const char *MEDIA_MIMETYPE_AUDIO_EAC3_JOC = "audio/eac3-joc";
 const char *MEDIA_MIMETYPE_AUDIO_AC4 = "audio/ac4";
+const char *MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1 = "audio/mha1";
+const char *MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1 = "audio/mhm1";
 const char *MEDIA_MIMETYPE_AUDIO_SCRAMBLED = "audio/scrambled";
 const char *MEDIA_MIMETYPE_AUDIO_ALAC = "audio/alac";
 const char *MEDIA_MIMETYPE_AUDIO_WMA = "audio/x-ms-wma";
diff --git a/media/libstagefright/foundation/MetaData.cpp b/media/libstagefright/foundation/MetaData.cpp
index 8174597..7f48cfd 100644
--- a/media/libstagefright/foundation/MetaData.cpp
+++ b/media/libstagefright/foundation/MetaData.cpp
@@ -28,7 +28,7 @@
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MetaData.h>
 
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 #include <binder/Parcel.h>
 #endif
 
@@ -48,7 +48,7 @@
 MetaData::~MetaData() {
 }
 
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 /* static */
 sp<MetaData> MetaData::createFromParcel(const Parcel &parcel) {
 
diff --git a/media/libstagefright/foundation/MetaDataBase.cpp b/media/libstagefright/foundation/MetaDataBase.cpp
index 4b439c6..3f050ea 100644
--- a/media/libstagefright/foundation/MetaDataBase.cpp
+++ b/media/libstagefright/foundation/MetaDataBase.cpp
@@ -28,7 +28,7 @@
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MetaDataBase.h>
 
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 #include <binder/Parcel.h>
 #endif
 
@@ -452,7 +452,7 @@
     }
 }
 
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 status_t MetaDataBase::writeToParcel(Parcel &parcel) {
     status_t ret;
     size_t numItems = mInternalData->mItems.size();
@@ -532,7 +532,7 @@
     ALOGW("no metadata in parcel");
     return UNKNOWN_ERROR;
 }
-#endif
+#endif // !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 
 }  // namespace android
 
diff --git a/media/libstagefright/foundation/OpusHeader.cpp b/media/libstagefright/foundation/OpusHeader.cpp
index 784e802..30d0ae6 100644
--- a/media/libstagefright/foundation/OpusHeader.cpp
+++ b/media/libstagefright/foundation/OpusHeader.cpp
@@ -146,6 +146,10 @@
 int WriteOpusHeader(const OpusHeader &header, int input_sample_rate,
                     uint8_t* output, size_t output_size) {
     // See https://wiki.xiph.org/OggOpus#ID_Header.
+    if (header.channels < 1 || header.channels > kMaxChannels) {
+        ALOGE("Invalid channel count: %d", header.channels);
+        return -1;
+    }
     const size_t total_size = kOpusHeaderStreamMapOffset + header.channels;
     if (output_size < total_size) {
         ALOGE("Output buffer too small for header.");
diff --git a/media/libstagefright/foundation/TEST_MAPPING b/media/libstagefright/foundation/TEST_MAPPING
index 3301c4b..a70c352 100644
--- a/media/libstagefright/foundation/TEST_MAPPING
+++ b/media/libstagefright/foundation/TEST_MAPPING
@@ -1,5 +1,14 @@
+// mappings for frameworks/av/media/libstagefright/foundation
 {
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "OpusHeaderTest" }
+  ],
+
   "presubmit": [
-    { "name": "sf_foundation_test" }
+    { "name": "sf_foundation_test" },
+    { "name": "MetaDataBaseUnitTest"}
   ]
 }
diff --git a/media/libstagefright/foundation/avc_utils.cpp b/media/libstagefright/foundation/avc_utils.cpp
index f53d2c9..9d6887c 100644
--- a/media/libstagefright/foundation/avc_utils.cpp
+++ b/media/libstagefright/foundation/avc_utils.cpp
@@ -559,11 +559,9 @@
     CHECK_NE(video_object_type_indication,
              0x21u /* Fine Granularity Scalable */);
 
-    unsigned video_object_layer_verid __unused;
-    unsigned video_object_layer_priority __unused;
     if (br.getBits(1)) {
-        video_object_layer_verid = br.getBits(4);
-        video_object_layer_priority = br.getBits(3);
+        br.skipBits(4); //video_object_layer_verid
+        br.skipBits(3); //video_object_layer_priority
     }
     unsigned aspect_ratio_info = br.getBits(4);
     if (aspect_ratio_info == 0x0f /* extended PAR */) {
@@ -622,7 +620,7 @@
     unsigned video_object_layer_height = br.getBits(13);
     CHECK(br.getBits(1));  // marker_bit
 
-    unsigned interlaced __unused = br.getBits(1);
+    br.skipBits(1); // interlaced
 
     *width = video_object_layer_width;
     *height = video_object_layer_height;
@@ -668,7 +666,7 @@
         return false;
     }
 
-    unsigned protection __unused = (header >> 16) & 1;
+    // we can get protection value from (header >> 16) & 1
 
     unsigned bitrate_index = (header >> 12) & 0x0f;
 
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ADebug.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ADebug.h
index ab17a02..e4b99bf 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ADebug.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ADebug.h
@@ -148,7 +148,8 @@
     static char *GetDebugName(const char *name);
 
     inline static bool isExperimentEnabled(
-            const char *name __unused /* nonnull */, bool allow __unused = true) {
+            const char *name __attribute__((unused)) /* nonnull */,
+            bool allow __attribute__((unused)) = true) {
 #ifdef ENABLE_STAGEFRIGHT_EXPERIMENTS
         if (!strcmp(name, "legacy-adaptive")) {
             return getExperimentFlag(allow, name, 2, 1); // every other day
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h b/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h
index b5d6666..960212a 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h
@@ -24,6 +24,8 @@
 #include <utils/KeyedVector.h>
 #include <utils/RefBase.h>
 
+#include <vector>
+
 namespace android {
 
 struct ABuffer;
@@ -63,7 +65,7 @@
     AMessage();
     AMessage(uint32_t what, const sp<const AHandler> &handler);
 
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
     // Construct an AMessage from a parcel.
     // nestingAllowed determines how many levels AMessage can be nested inside
     // AMessage. The default value here is arbitrarily set to 255.
@@ -88,13 +90,14 @@
     // All items in the AMessage must have types that are recognized by
     // FromParcel(); otherwise, TRESPASS error will occur.
     void writeToParcel(Parcel *parcel) const;
-#endif
+#endif // !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 
     void setWhat(uint32_t what);
     uint32_t what() const;
 
     void setTarget(const sp<const AHandler> &handler);
 
+    // removes all items
     void clear();
 
     void setInt32(const char *name, int32_t value);
@@ -261,6 +264,17 @@
      */
     status_t removeEntryAt(size_t index);
 
+    /**
+     * Removes an entry based on name.
+     *
+     * \param name  name of the entry
+     *
+     * \retval OK the entry was removed successfully
+     * \retval BAD_VALUE name is invalid (null)
+     * \retval BAD_INDEX name not found
+     */
+    status_t removeEntryByName(const char *name);
+
 protected:
     virtual ~AMessage();
 
@@ -291,16 +305,39 @@
         size_t      mNameLength;
         Type mType;
         void setName(const char *name, size_t len);
+        Item() : mName(nullptr), mNameLength(0), mType(kTypeInt32) { }
+        Item(const char *name, size_t length);
     };
 
     enum {
-        kMaxNumItems = 64
+        kMaxNumItems = 256
     };
-    Item mItems[kMaxNumItems];
-    size_t mNumItems;
+    std::vector<Item> mItems;
 
+    /**
+     * Allocates an item with the given key |name|. If the key already exists, the corresponding
+     * item value is freed. Otherwise a new item is added.
+     *
+     * This method currently asserts if the number of elements would exceed the max number of
+     * elements allowed (kMaxNumItems). This is a security precaution to avoid arbitrarily large
+     * AMessage structures.
+     *
+     * @todo(b/192153245) Either revisit this security precaution, or change the behavior to
+     *      silently ignore keys added after the max number of elements are reached.
+     *
+     * @note All previously returned Item* pointers are deemed invalid after this call. (E.g. from
+     *       allocateItem or findItem)
+     *
+     * @param name the key for the requested item.
+     *
+     * @return Item* a pointer to the item.
+     */
     Item *allocateItem(const char *name);
+
+    /** Frees the value for the item. */
     void freeItemValue(Item *item);
+
+    /** Finds an item with given key |name| and |type|. Returns nullptr if item is not found. */
     const Item *findItem(const char *name, Type type) const;
 
     void setObjectInternal(
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AString.h b/media/libstagefright/foundation/include/media/stagefright/foundation/AString.h
index deef0d4..517774b 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/AString.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/AString.h
@@ -89,7 +89,7 @@
 
     void tolower();
 
-#ifndef __ANDROID_VNDK__
+#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
     static AString FromParcel(const Parcel &parcel);
     status_t writeToParcel(Parcel *parcel) const;
 #endif
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AUtils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/AUtils.h
index af6b357..3b646dc 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/AUtils.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/AUtils.h
@@ -63,7 +63,7 @@
 
 template<class T>
 void ENSURE_UNSIGNED_TYPE() {
-    T TYPE_MUST_BE_UNSIGNED[(T)-1 < 0 ? -1 : 0] __unused;
+    T TYPE_MUST_BE_UNSIGNED[(T)-1 < 0 ? -1 : 0] __attribute__((unused));
 }
 
 // needle is in range [hayStart, hayStart + haySize)
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
index cd0af2b..a2b6c4f 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
@@ -156,6 +156,10 @@
     // suited to blending. This requires implicit color space conversion on part of the device.
     static android_dataspace getDataSpaceForColorAspects(ColorAspects &aspects, bool mayExpand);
 
+    // it returns the platform color configs from given |dataspace|.
+    static void getColorConfigFromDataSpace(
+            const android_dataspace &dataspace, int *range, int *standard, int *transfer);
+
     // converts |dataSpace| to a V0 enum, and returns true if dataSpace is an aspect-only value
     static bool convertDataSpaceToV0(android_dataspace &dataSpace);
 
@@ -187,6 +191,8 @@
     static void setHDRStaticInfoIntoFormat(const HDRStaticInfo &info, sp<AMessage> &format);
     // writes |info| into format.
     static void setHDRStaticInfoIntoAMediaFormat(const HDRStaticInfo &info, AMediaFormat *format);
+    // (internal) used by the setHDRStaticInfoInfo* routines
+    static void fillHdrStaticInfoBuffer( const HDRStaticInfo &info, uint8_t *data);
 };
 
 inline static const char *asString(android::ColorUtils::ColorStandard i, const char *def = "??") {
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h b/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
index 1f9e636..f5cecef 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
@@ -22,6 +22,7 @@
 
 extern const char *MEDIA_MIMETYPE_IMAGE_JPEG;
 extern const char *MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC;
+extern const char *MEDIA_MIMETYPE_IMAGE_AVIF;
 
 extern const char *MEDIA_MIMETYPE_VIDEO_VP8;
 extern const char *MEDIA_MIMETYPE_VIDEO_VP9;
@@ -59,6 +60,8 @@
 extern const char *MEDIA_MIMETYPE_AUDIO_EAC3;
 extern const char *MEDIA_MIMETYPE_AUDIO_EAC3_JOC;
 extern const char *MEDIA_MIMETYPE_AUDIO_AC4;
+extern const char *MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1;
+extern const char *MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1;
 extern const char *MEDIA_MIMETYPE_AUDIO_SCRAMBLED;
 extern const char *MEDIA_MIMETYPE_AUDIO_ALAC;
 extern const char *MEDIA_MIMETYPE_AUDIO_WMA;
@@ -89,6 +92,8 @@
     kAudioEncodingPcm16bit = 2,
     kAudioEncodingPcm8bit = 3,
     kAudioEncodingPcmFloat = 4,
+    kAudioEncodingPcm24bitPacked = 21,
+    kAudioEncodingPcm32bit = 22,
 };
 
 }  // namespace android
diff --git a/media/libstagefright/foundation/tests/AMessage_test.cpp b/media/libstagefright/foundation/tests/AMessage_test.cpp
new file mode 100644
index 0000000..2b11326
--- /dev/null
+++ b/media/libstagefright/foundation/tests/AMessage_test.cpp
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AData_test"
+
+#include <gtest/gtest.h>
+#include <utils/RefBase.h>
+
+#include <media/stagefright/foundation/AMessage.h>
+
+using namespace android;
+
+class AMessageTest : public ::testing::Test {
+};
+
+
+TEST(AMessage_tests, item_manipulation) {
+  sp<AMessage> m1 = new AMessage();
+
+  m1->setInt32("value", 2);
+  m1->setInt32("bar", 3);
+
+  int32_t i32;
+  EXPECT_TRUE(m1->findInt32("value", &i32));
+  EXPECT_EQ(2, i32);
+
+  EXPECT_TRUE(m1->findInt32("bar", &i32));
+  EXPECT_EQ(3, i32);
+
+
+  m1->setInt64("big", INT64_MAX);
+  m1->setInt64("smaller", INT64_MAX - 2);
+  m1->setInt64("smallest", 257);
+
+  int64_t i64;
+  EXPECT_TRUE(m1->findInt64("big", &i64));
+  EXPECT_EQ(INT64_MAX, i64);
+
+  EXPECT_TRUE(m1->findInt64("smaller", &i64));
+  EXPECT_EQ(INT64_MAX - 2, i64);
+
+  m1->setSize("size1", 257);
+  m1->setSize("size2", 1023);
+
+  size_t sizing;
+  EXPECT_TRUE(m1->findSize("size2", &sizing));
+  EXPECT_EQ(1023, sizing);
+  EXPECT_TRUE(m1->findSize("size1", &sizing));
+  EXPECT_EQ(257, sizing);
+
+  m1->setDouble("precise", 10.5);
+  m1->setDouble("small", 0.125);
+
+  double d;
+  EXPECT_TRUE(m1->findDouble("precise", &d));
+  EXPECT_EQ(10.5, d);
+
+  EXPECT_TRUE(m1->findDouble("small", &d));
+  EXPECT_EQ(0.125, d);
+
+  // should be unchanged from the top of the test
+  EXPECT_TRUE(m1->findInt32("bar", &i32));
+  EXPECT_EQ(3, i32);
+
+  EXPECT_FALSE(m1->findInt32("nonesuch", &i32));
+  EXPECT_FALSE(m1->findInt64("nonesuch2", &i64));
+  // types disagree, not found
+  EXPECT_FALSE(m1->findInt32("big", &i32));
+  EXPECT_FALSE(m1->findInt32("precise", &i32));
+
+  // integral types should come back true
+  EXPECT_TRUE(m1->findAsInt64("big", &i64));
+  EXPECT_EQ(INT64_MAX, i64);
+  EXPECT_TRUE(m1->findAsInt64("bar", &i64));
+  EXPECT_EQ(3, i64);
+  EXPECT_FALSE(m1->findAsInt64("precise", &i64));
+
+  // recovers ints, size, and floating point values
+  float value;
+  EXPECT_TRUE(m1->findAsFloat("value", &value));
+  EXPECT_EQ(2, value);
+  EXPECT_TRUE(m1->findAsFloat("smallest", &value));
+  EXPECT_EQ(257, value);
+  EXPECT_TRUE(m1->findAsFloat("size2", &value));
+  EXPECT_EQ(1023, value);
+  EXPECT_TRUE(m1->findAsFloat("precise", &value));
+  EXPECT_EQ(10.5, value);
+  EXPECT_TRUE(m1->findAsFloat("small", &value));
+  EXPECT_EQ(0.125, value);
+
+
+  // need to handle still:
+  // strings
+  // Object
+  // Buffer
+  // Message (nested)
+  //
+
+  // removal
+  m1->setInt32("shortlived", 2);
+  m1->setInt32("alittlelonger", 2);
+  EXPECT_EQ(OK, m1->removeEntryByName("shortlived"));
+  EXPECT_EQ(BAD_VALUE, m1->removeEntryByName(nullptr));
+  EXPECT_EQ(BAD_INDEX, m1->removeEntryByName("themythicalnonesuch"));
+  EXPECT_FALSE(m1->findInt32("shortlived", &i32));
+  EXPECT_TRUE(m1->findInt32("alittlelonger", &i32));
+
+  EXPECT_NE(OK, m1->removeEntryByName("notpresent"));
+
+}
+
diff --git a/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsTestEnvironment.h b/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsTestEnvironment.h
new file mode 100644
index 0000000..b28a7bc
--- /dev/null
+++ b/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsTestEnvironment.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __AVC_UTILS_TEST_ENVIRONMENT_H__
+#define __AVC_UTILS_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class AVCUtilsTestEnvironment : public::testing::Environment {
+  public:
+    AVCUtilsTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int AVCUtilsTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"path", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P': {
+                setRes(optarg);
+                break;
+            }
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __AVC_UTILS_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsUnitTest.cpp b/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsUnitTest.cpp
new file mode 100644
index 0000000..77a8599
--- /dev/null
+++ b/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsUnitTest.cpp
@@ -0,0 +1,411 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AVCUtilsUnitTest"
+#include <utils/Log.h>
+
+#include <fstream>
+
+#include "media/stagefright/foundation/ABitReader.h"
+#include "media/stagefright/foundation/avc_utils.h"
+
+#include "AVCUtilsTestEnvironment.h"
+
+constexpr size_t kSmallBufferSize = 2;
+constexpr uint8_t kSPSmask = 0x1f;
+constexpr uint8_t kSPSStartCode = 0x07;
+constexpr uint8_t kConfigVersion = 0x01;
+
+using namespace android;
+
+static AVCUtilsTestEnvironment *gEnv = nullptr;
+
+class MpegAudioUnitTest
+    : public ::testing::TestWithParam<
+              tuple</*audioHeader*/ uint32_t, /*frameSize*/ int32_t, /*sampleRate*/ int32_t,
+                    /*numChannels*/ int32_t, /*bitRate*/ int32_t, /*numSamples*/ int32_t>> {};
+
+class VOLDimensionTest
+    : public ::testing::TestWithParam<
+              tuple</*fileName*/ string, /*volWidth*/ int32_t, /*volHeight*/ int32_t>> {};
+
+class AVCUtils {
+  public:
+    bool SetUpAVCUtils(string fileName, string infoFileName) {
+        mInputFile = gEnv->getRes() + fileName;
+        mInputFileStream.open(mInputFile, ifstream::in);
+        if (!mInputFileStream.is_open()) return false;
+
+        mInfoFile = gEnv->getRes() + infoFileName;
+        mInfoFileStream.open(mInfoFile, ifstream::in);
+        if (!mInputFileStream.is_open()) return false;
+        return true;
+    }
+
+    ~AVCUtils() {
+        if (mInputFileStream.is_open()) mInputFileStream.close();
+        if (mInfoFileStream.is_open()) mInfoFileStream.close();
+    }
+
+    string mInputFile;
+    string mInfoFile;
+
+    ifstream mInputFileStream;
+    ifstream mInfoFileStream;
+};
+
+class AVCDimensionTest
+    : public AVCUtils,
+      public ::testing::TestWithParam<
+              tuple</*fileName*/ string, /*infoFileName*/ string,
+                    /*avcWidth*/ size_t, /*avcHeight*/ size_t, /*numberOfNALUnits*/ int32_t>> {
+  public:
+    virtual void SetUp() override {
+        tuple<string, string, size_t, size_t, size_t> params = GetParam();
+        string fileName = get<0>(params);
+        string infoFileName = get<1>(params);
+        AVCUtils::SetUpAVCUtils(fileName, infoFileName);
+
+        mFrameWidth = get<2>(params);
+        mFrameHeight = get<3>(params);
+        mNalUnitsExpected = get<4>(params);
+    }
+
+    size_t mFrameWidth;
+    size_t mFrameHeight;
+    int32_t mNalUnitsExpected;
+};
+
+class AvccBoxTest : public AVCDimensionTest {
+  public:
+    virtual void SetUp() override { AVCDimensionTest::SetUp(); }
+};
+
+class AVCFrameTest
+    : public AVCUtils,
+      public ::testing::TestWithParam<pair</*fileName*/ string, /*infoFileName*/ string>> {
+  public:
+    virtual void SetUp() override {
+        string fileName = GetParam().first;
+        string infoFileName = GetParam().second;
+        AVCUtils::SetUpAVCUtils(fileName, infoFileName);
+    }
+};
+
+TEST_P(MpegAudioUnitTest, AudioProfileTest) {
+    tuple<uint32_t, size_t, int, int, int, int> params = GetParam();
+    uint32_t header = get<0>(params);
+
+    size_t audioFrameSize = get<1>(params);
+    int audioSampleRate = get<2>(params);
+    int audioNumChannels = get<3>(params);
+    int audioBitRate = get<4>(params);
+    int audioNumSamples = get<5>(params);
+
+    size_t frameSize = 0;
+    int sampleRate = 0;
+    int numChannels = 0;
+    int bitRate = 0;
+    int numSamples = 0;
+
+    bool status = GetMPEGAudioFrameSize(header, &frameSize, &sampleRate, &numChannels, &bitRate,
+                                        &numSamples);
+    ASSERT_TRUE(status) << "Failed to get Audio properties";
+
+    ASSERT_EQ(frameSize, audioFrameSize) << "Wrong frame size found";
+
+    ASSERT_EQ(sampleRate, audioSampleRate) << "Wrong sample rate found";
+
+    ASSERT_EQ(numChannels, audioNumChannels) << "Wrong number of channels found";
+
+    ASSERT_EQ(bitRate, audioBitRate) << "Wrong bit rate found";
+
+    ASSERT_EQ(numSamples, audioNumSamples) << "Wrong number of samples found";
+}
+
+TEST_P(VOLDimensionTest, DimensionTest) {
+    tuple<string, int32_t, int32_t> params = GetParam();
+    string inputFile = gEnv->getRes() + get<0>(params);
+    ifstream inputFileStream;
+    inputFileStream.open(inputFile, ifstream::in);
+    ASSERT_TRUE(inputFileStream.is_open()) << "Failed to open: " << inputFile;
+
+    struct stat buf;
+    int8_t err = stat(inputFile.c_str(), &buf);
+    ASSERT_EQ(err, 0) << "Failed to get information for file: " << inputFile;
+
+    size_t fileSize = buf.st_size;
+    ASSERT_NE(fileSize, 0) << "Invalid file size found";
+
+    const uint8_t *volBuffer = new uint8_t[fileSize];
+    ASSERT_NE(volBuffer, nullptr) << "Failed to allocate VOL buffer of size: " << fileSize;
+
+    inputFileStream.read((char *)(volBuffer), fileSize);
+    ASSERT_EQ(inputFileStream.gcount(), fileSize)
+            << "Failed to read complete file, bytes read: " << inputFileStream.gcount();
+
+    int32_t width = get<1>(params);
+    int32_t height = get<2>(params);
+    int32_t volWidth = -1;
+    int32_t volHeight = -1;
+
+    bool status = ExtractDimensionsFromVOLHeader(volBuffer, fileSize, &volWidth, &volHeight);
+    ASSERT_TRUE(status)
+            << "Failed to get VOL dimensions from function: ExtractDimensionsFromVOLHeader()";
+
+    ASSERT_EQ(volWidth, width) << "Expected width: " << width << "Found: " << volWidth;
+
+    ASSERT_EQ(volHeight, height) << "Expected height: " << height << "Found: " << volHeight;
+
+    delete[] volBuffer;
+}
+
+TEST_P(AVCDimensionTest, DimensionTest) {
+    int32_t numNalUnits = 0;
+    int32_t avcWidth = -1;
+    int32_t avcHeight = -1;
+    string line;
+    string type;
+    size_t chunkLength;
+    while (getline(mInfoFileStream, line)) {
+        istringstream stringLine(line);
+        stringLine >> type >> chunkLength;
+        ASSERT_GT(chunkLength, 0) << "Length of the data chunk must be greater than zero";
+
+        const uint8_t *data = new uint8_t[chunkLength];
+        ASSERT_NE(data, nullptr) << "Failed to create a data buffer of size: " << chunkLength;
+
+        const uint8_t *nalStart;
+        size_t nalSize;
+
+        mInputFileStream.read((char *)data, chunkLength);
+        ASSERT_EQ(mInputFileStream.gcount(), chunkLength)
+                << "Failed to read complete file, bytes read: " << mInputFileStream.gcount();
+
+        size_t smallBufferSize = kSmallBufferSize;
+        const uint8_t *sanityData = new uint8_t[smallBufferSize];
+        memcpy((void *)sanityData, (void *)data, smallBufferSize);
+
+        status_t result = getNextNALUnit(&sanityData, &smallBufferSize, &nalStart, &nalSize, true);
+        ASSERT_EQ(result, -EAGAIN) << "Invalid result found when wrong NAL unit passed";
+
+        while (!getNextNALUnit(&data, &chunkLength, &nalStart, &nalSize, true)) {
+            numNalUnits++;
+            // Check if it's an SPS
+            if ((nalStart[0] & kSPSmask) != kSPSStartCode) continue;
+            ASSERT_TRUE(nalSize > 0) << "NAL unit size must be greater than 0";
+
+            sp<ABuffer> spsBuffer = new ABuffer(nalSize);
+            ASSERT_NE(spsBuffer, nullptr) << "ABuffer returned null for size: " << nalSize;
+
+            memcpy(spsBuffer->data(), nalStart, nalSize);
+            FindAVCDimensions(spsBuffer, &avcWidth, &avcHeight);
+            spsBuffer.clear();
+            ASSERT_EQ(avcWidth, mFrameWidth)
+                    << "Expected width: " << mFrameWidth << "Found: " << avcWidth;
+
+            ASSERT_EQ(avcHeight, mFrameHeight)
+                    << "Expected height: " << mFrameHeight << "Found: " << avcHeight;
+        }
+        delete[] data;
+    }
+    if (mNalUnitsExpected < 0) {
+        ASSERT_GT(numNalUnits, 0) << "Failed to find an NAL Unit";
+    } else {
+        ASSERT_EQ(numNalUnits, mNalUnitsExpected)
+                << "Expected number of NAL units: " << mNalUnitsExpected
+                << " found: " << numNalUnits;
+    }
+}
+
+TEST_P(AvccBoxTest, AvccBoxValidationTest) {
+    int32_t avcWidth = -1;
+    int32_t avcHeight = -1;
+    int32_t accessUnitLength = 0;
+    int32_t profile = -1;
+    int32_t level = -1;
+    string line;
+    string type;
+    size_t chunkLength;
+    while (getline(mInfoFileStream, line)) {
+        istringstream stringLine(line);
+        stringLine >> type >> chunkLength;
+
+        if (type.compare("SPS") && type.compare("PPS")) continue;
+        ASSERT_GT(chunkLength, 0) << "Length of the data chunk must be greater than zero";
+
+        accessUnitLength += chunkLength;
+
+        if (!type.compare("SPS")) {
+            const uint8_t *data = new uint8_t[chunkLength];
+            ASSERT_NE(data, nullptr) << "Failed to create a data buffer of size: " << chunkLength;
+
+            const uint8_t *nalStart;
+            size_t nalSize;
+
+            mInputFileStream.read((char *)data, (uint32_t)chunkLength);
+            ASSERT_EQ(mInputFileStream.gcount(), chunkLength)
+                    << "Failed to read complete file, bytes read: " << mInputFileStream.gcount();
+
+            while (!getNextNALUnit(&data, &chunkLength, &nalStart, &nalSize, true)) {
+                // Check if it's an SPS
+                ASSERT_TRUE(nalSize > 0 && (nalStart[0] & kSPSmask) == kSPSStartCode)
+                        << "Failed to get SPS";
+
+                ASSERT_GE(nalSize, 4) << "SPS size must be greater than or equal to 4";
+
+                profile = nalStart[1];
+                level = nalStart[3];
+            }
+            delete[] data;
+        }
+    }
+    const uint8_t *accessUnitData = new uint8_t[accessUnitLength];
+    ASSERT_NE(accessUnitData, nullptr) << "Failed to create a buffer of size: " << accessUnitLength;
+
+    mInputFileStream.seekg(0, ios::beg);
+    mInputFileStream.read((char *)accessUnitData, accessUnitLength);
+    ASSERT_EQ(mInputFileStream.gcount(), accessUnitLength)
+            << "Failed to read complete file, bytes read: " << mInputFileStream.gcount();
+
+    sp<ABuffer> accessUnit = new ABuffer(accessUnitLength);
+    ASSERT_NE(accessUnit, nullptr)
+            << "Failed to create an android data buffer of size: " << accessUnitLength;
+
+    memcpy(accessUnit->data(), accessUnitData, accessUnitLength);
+    sp<ABuffer> csdDataBuffer = MakeAVCCodecSpecificData(accessUnit, &avcWidth, &avcHeight);
+    ASSERT_NE(csdDataBuffer, nullptr) << "No data returned from MakeAVCCodecSpecificData()";
+
+    ASSERT_EQ(avcWidth, mFrameWidth) << "Expected width: " << mFrameWidth << "Found: " << avcWidth;
+
+    ASSERT_EQ(avcHeight, mFrameHeight)
+            << "Expected height: " << mFrameHeight << "Found: " << avcHeight;
+
+    uint8_t *csdData = csdDataBuffer->data();
+    ASSERT_EQ(*csdData, kConfigVersion) << "Invalid configuration version";
+
+    ASSERT_GE(csdDataBuffer->size(), 4) << "CSD data size must be greater than or equal to 4";
+
+    ASSERT_EQ(*(csdData + 1), profile)
+            << "Expected AVC profile: " << profile << " found: " << *(csdData + 1);
+
+    ASSERT_EQ(*(csdData + 3), level)
+            << "Expected AVC level: " << level << " found: " << *(csdData + 3);
+    csdDataBuffer.clear();
+    delete[] accessUnitData;
+    accessUnit.clear();
+}
+
+TEST_P(AVCFrameTest, FrameTest) {
+    string line;
+    string type;
+    size_t chunkLength;
+    int32_t frameLayerID;
+    while (getline(mInfoFileStream, line)) {
+        uint32_t layerID = 0;
+        istringstream stringLine(line);
+        stringLine >> type >> chunkLength >> frameLayerID;
+        ASSERT_GT(chunkLength, 0) << "Length of the data chunk must be greater than zero";
+
+        char *data = new char[chunkLength];
+        ASSERT_NE(data, nullptr) << "Failed to allocation data buffer of size: " << chunkLength;
+
+        mInputFileStream.read(data, chunkLength);
+        ASSERT_EQ(mInputFileStream.gcount(), chunkLength)
+                << "Failed to read complete file, bytes read: " << mInputFileStream.gcount();
+
+        if (!type.compare("IDR")) {
+            bool isIDR = IsIDR((uint8_t *)data, chunkLength);
+            ASSERT_TRUE(isIDR);
+
+            layerID = FindAVCLayerId((uint8_t *)data, chunkLength);
+            ASSERT_EQ(layerID, frameLayerID) << "Wrong layer ID found";
+        } else if (!type.compare("P") || !type.compare("B")) {
+            sp<ABuffer> accessUnit = new ABuffer(chunkLength);
+            ASSERT_NE(accessUnit, nullptr) << "Unable to create access Unit";
+
+            memcpy(accessUnit->data(), data, chunkLength);
+            bool isReferenceFrame = IsAVCReferenceFrame(accessUnit);
+            ASSERT_TRUE(isReferenceFrame);
+
+            accessUnit.clear();
+            layerID = FindAVCLayerId((uint8_t *)data, chunkLength);
+            ASSERT_EQ(layerID, frameLayerID) << "Wrong layer ID found";
+        }
+        delete[] data;
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(AVCUtilsTestAll, MpegAudioUnitTest,
+                         ::testing::Values(make_tuple(0xFFFB9204, 418, 44100, 2, 128, 1152),
+                                           make_tuple(0xFFFB7604, 289, 48000, 2, 96, 1152),
+                                           make_tuple(0xFFFE5604, 164, 48000, 2, 160, 384)));
+
+// Info File contains the type and length for each chunk/frame
+INSTANTIATE_TEST_SUITE_P(
+        AVCUtilsTestAll, AVCDimensionTest,
+        ::testing::Values(make_tuple("crowd_8x8p50f32_200kbps_bp.h264",
+                                     "crowd_8x8p50f32_200kbps_bp.info", 8, 8, 11),
+                          make_tuple("crowd_640x360p24f300_1000kbps_bp.h264",
+                                     "crowd_640x360p24f300_1000kbps_bp.info", 640, 360, 11),
+                          make_tuple("crowd_1280x720p30f300_5000kbps_bp.h264",
+                                     "crowd_1280x720p30f300_5000kbps_bp.info", 1280, 720, 12),
+                          make_tuple("crowd_1920x1080p50f300_12000kbps_bp.h264",
+                                     "crowd_1920x1080p50f300_12000kbps_bp.info", 1920, 1080, 14),
+                          make_tuple("crowd_3840x2160p60f300_68000kbps_bp.h264",
+                                     "crowd_3840x2160p60f300_68000kbps_bp.info", 3840, 2160, 14)));
+
+// Info File contains the type and length for each chunk/frame
+INSTANTIATE_TEST_SUITE_P(
+        AVCUtilsTestAll, AvccBoxTest,
+        ::testing::Values(make_tuple("crowd_8x8p50f32_200kbps_bp.h264",
+                                     "crowd_8x8p50f32_200kbps_bp.info", 8, 8, 11),
+                          make_tuple("crowd_1280x720p30f300_5000kbps_bp.h264",
+                                     "crowd_1280x720p30f300_5000kbps_bp.info", 1280, 720, 12),
+                          make_tuple("crowd_1920x1080p50f300_12000kbps_bp.h264",
+                                     "crowd_1920x1080p50f300_12000kbps_bp.info", 1920, 1080, 14)));
+
+// Info File contains the type and length for each chunk/frame
+INSTANTIATE_TEST_SUITE_P(AVCUtilsTestAll, VOLDimensionTest,
+                         ::testing::Values(make_tuple("volData_720_480", 720, 480),
+                                           make_tuple("volData_1280_720", 1280, 720),
+                                           make_tuple("volData_1920_1080", 1920, 1080)));
+
+// Info File contains the type, length and layer ID for each chunk/frame
+INSTANTIATE_TEST_SUITE_P(AVCUtilsTestAll, AVCFrameTest,
+                         ::testing::Values(make_tuple("crowd_8x8p50f32_200kbps_bp.h264",
+                                                      "crowd_8x8p50f32_200kbps_bp.info"),
+                                           make_tuple("crowd_640x360p24f300_1000kbps_bp.h264",
+                                                      "crowd_640x360p24f300_1000kbps_bp.info"),
+                                           make_tuple("crowd_1280x720p30f300_5000kbps_bp.h264",
+                                                      "crowd_1280x720p30f300_5000kbps_bp.info"),
+                                           make_tuple("crowd_1920x1080p50f300_12000kbps_bp.h264",
+                                                      "crowd_1920x1080p50f300_12000kbps_bp.info"),
+                                           make_tuple("crowd_3840x2160p60f300_68000kbps_bp.h264",
+                                                      "crowd_3840x2160p60f300_68000kbps_bp.info")));
+
+int main(int argc, char **argv) {
+    gEnv = new AVCUtilsTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGV("Test result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/foundation/tests/AVCUtils/Android.bp b/media/libstagefright/foundation/tests/AVCUtils/Android.bp
new file mode 100644
index 0000000..594da56
--- /dev/null
+++ b/media/libstagefright/foundation/tests/AVCUtils/Android.bp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_foundation_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_foundation_license",
+    ],
+}
+
+cc_test {
+    name: "AVCUtilsUnitTest",
+    gtest: true,
+
+    srcs: [
+        "AVCUtilsUnitTest.cpp",
+    ],
+
+    shared_libs: [
+        "libutils",
+        "liblog",
+    ],
+
+    static_libs: [
+        "libstagefright",
+        "libstagefright_foundation",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright/foundation",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/foundation/tests/AVCUtils/AndroidTest.xml b/media/libstagefright/foundation/tests/AVCUtils/AndroidTest.xml
new file mode 100644
index 0000000..6a088a8
--- /dev/null
+++ b/media/libstagefright/foundation/tests/AVCUtils/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for AVC Utils unit tests">
+    <option name="test-suite-tag" value="AVCUtilsUnitTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="false" />
+        <option name="push" value="AVCUtilsUnitTest->/data/local/tmp/AVCUtilsUnitTest" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsUnitTest.zip?unzip=true"
+            value="/data/local/tmp/AVCUtilsUnitTest/" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="AVCUtilsUnitTest" />
+        <option name="native-test-flag" value="-P /data/local/tmp/AVCUtilsUnitTest/" />
+    </test>
+</configuration>
diff --git a/media/libstagefright/foundation/tests/AVCUtils/README.md b/media/libstagefright/foundation/tests/AVCUtils/README.md
new file mode 100644
index 0000000..609d72e
--- /dev/null
+++ b/media/libstagefright/foundation/tests/AVCUtils/README.md
@@ -0,0 +1,39 @@
+## Media Testing ##
+---
+#### AVCUtils Test
+The AVC Utility Unit Test Suite validates the avc_utils librariy available in libstagefright/foundation.
+
+Run the following steps to build the test suite:
+```
+m AVCUtilsUnitTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/AVCUtilsUnitTest/AVCUtilsUnitTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/AVCUtilsUnitTest/AVCUtilsUnitTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsUnitTest.zip). Download, unzip and push these files into device for testing.
+
+```
+adb push AVCUtilsUnitTest /data/local/tmp/
+```
+
+usage: AVCUtilsUnitTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/AVCUtilsUnitTest -P /data/local/tmp/AVCUtilsUnitTest/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest AVCUtilsUnitTest -- --enable-module-dynamic-download=true
+```
diff --git a/media/libstagefright/foundation/tests/Android.bp b/media/libstagefright/foundation/tests/Android.bp
index f2157c9..e50742e 100644
--- a/media/libstagefright/foundation/tests/Android.bp
+++ b/media/libstagefright/foundation/tests/Android.bp
@@ -1,3 +1,14 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_foundation_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_foundation_license",
+    ],
+}
+
 cc_test {
     name: "sf_foundation_test",
     test_suites: ["device-tests"],
@@ -19,9 +30,39 @@
 
     srcs: [
         "AData_test.cpp",
+        "AMessage_test.cpp",
         "Base64_test.cpp",
         "Flagged_test.cpp",
         "TypeTraits_test.cpp",
         "Utils_test.cpp",
     ],
 }
+
+cc_test {
+    name: "MetaDataBaseUnitTest",
+    test_suites: ["device-tests"],
+    gtest: true,
+
+    srcs: [
+        "MetaDataBaseUnitTest.cpp",
+    ],
+
+    shared_libs: [
+        "libutils",
+        "liblog",
+    ],
+
+    static_libs: [
+        "libstagefright",
+        "libstagefright_foundation",
+    ],
+
+    header_libs: [
+        "libmedia_headers",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+}
diff --git a/media/libstagefright/foundation/tests/MetaDataBaseUnitTest.cpp b/media/libstagefright/foundation/tests/MetaDataBaseUnitTest.cpp
new file mode 100644
index 0000000..0aed4d2
--- /dev/null
+++ b/media/libstagefright/foundation/tests/MetaDataBaseUnitTest.cpp
@@ -0,0 +1,288 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <gtest/gtest.h>
+
+#include <stdio.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <fstream>
+
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaDataBase.h>
+
+constexpr int32_t kWidth1 = 1920;
+constexpr int32_t kHeight1 = 1080;
+constexpr int32_t kWidth2 = 1280;
+constexpr int32_t kHeight2 = 920;
+constexpr int32_t kWidth3 = 720;
+constexpr int32_t kHeight3 = 480;
+constexpr int32_t kProfile = 1;
+constexpr int32_t kLevel = 1;
+constexpr int32_t kPlatformValue = 1;
+
+// Rectangle margins
+constexpr int32_t kLeft = 100;
+constexpr int32_t kTop = 100;
+constexpr int32_t kRight = 100;
+constexpr int32_t kBottom = 100;
+
+constexpr int64_t kDurationUs = 60000000;
+
+constexpr float kCaptureRate = 30.0;
+
+namespace android {
+
+class MetaDataBaseUnitTest : public ::testing::Test {};
+
+TEST_F(MetaDataBaseUnitTest, CreateMetaDataBaseTest) {
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
+
+    // Testing copy constructor
+    MetaDataBase *metaDataCopy = metaData;
+    ASSERT_NE(metaDataCopy, nullptr) << "Failed to create meta data copy";
+
+    delete metaData;
+}
+
+TEST_F(MetaDataBaseUnitTest, SetAndFindDataTest) {
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
+
+    // Setting the different key-value pair type for first time, overwrite
+    // expected to be false
+    bool status = metaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+    ASSERT_FALSE(status) << "Initializing kKeyMIMEType, overwrite is expected to be false";
+
+    status = metaData->setInt32(kKeyWidth, kWidth1);
+    ASSERT_FALSE(status) << "Initializing kKeyWidth, overwrite is expected to be false";
+    status = metaData->setInt32(kKeyHeight, kHeight1);
+    ASSERT_FALSE(status) << "Initializing kKeyHeight, overwrite is expected to be false";
+    status = metaData->setInt32(kKeyVideoProfile, kProfile);
+    ASSERT_FALSE(status) << "Initializing kKeyVideoProfile, overwrite is expected to be false";
+    status = metaData->setInt32(kKeyVideoLevel, kLevel);
+    ASSERT_FALSE(status) << "Initializing kKeyVideoLevel, overwrite is expected to be false";
+
+    status = metaData->setInt64(kKeyDuration, kDurationUs);
+    ASSERT_FALSE(status) << "Initializing kKeyDuration, overwrite is expected to be false";
+
+    status = metaData->setFloat(kKeyCaptureFramerate, kCaptureRate);
+    ASSERT_FALSE(status) << "Initializing kKeyCaptureFramerate, overwrite is expected to be false";
+
+    const int32_t *platform = &kPlatformValue;
+    status = metaData->setPointer(kKeyPlatformPrivate, (void *)platform);
+    ASSERT_FALSE(status) << "Initializing kKeyPlatformPrivate, overwrite is expected to be false";
+
+    status = metaData->setRect(kKeyCropRect, kLeft, kTop, kRight, kBottom);
+    ASSERT_FALSE(status) << "Initializing kKeyCropRect, overwrite is expected to be false";
+
+    // Dump to log for reference
+    metaData->dumpToLog();
+
+    // Find the data which was set
+    const char *mime;
+    status = metaData->findCString(kKeyMIMEType, &mime);
+    ASSERT_TRUE(status) << "kKeyMIMEType key does not exists in metadata";
+    ASSERT_STREQ(mime, MEDIA_MIMETYPE_VIDEO_AVC) << "Incorrect mime type returned";
+
+    int32_t width, height, profile, level;
+    status = metaData->findInt32(kKeyWidth, &width);
+    ASSERT_TRUE(status) << "kKeyWidth key does not exists in metadata";
+    ASSERT_EQ(width, kWidth1) << "Incorrect value of width returned";
+
+    status = metaData->findInt32(kKeyHeight, &height);
+    ASSERT_TRUE(status) << "kKeyHeight key does not exists in metadata";
+    ASSERT_EQ(height, kHeight1) << "Incorrect value of height returned";
+
+    status = metaData->findInt32(kKeyVideoProfile, &profile);
+    ASSERT_TRUE(status) << "kKeyVideoProfile key does not exists in metadata";
+    ASSERT_EQ(profile, kProfile) << "Incorrect value of profile returned";
+
+    status = metaData->findInt32(kKeyVideoLevel, &level);
+    ASSERT_TRUE(status) << "kKeyVideoLevel key does not exists in metadata";
+    ASSERT_EQ(level, kLevel) << "Incorrect value of level returned";
+
+    int64_t duration;
+    status = metaData->findInt64(kKeyDuration, &duration);
+    ASSERT_TRUE(status) << "kKeyDuration key does not exists in metadata";
+    ASSERT_EQ(duration, kDurationUs) << "Incorrect value of duration returned";
+
+    float frameRate;
+    status = metaData->findFloat(kKeyCaptureFramerate, &frameRate);
+    ASSERT_TRUE(status) << "kKeyCaptureFramerate key does not exists in metadata";
+    ASSERT_EQ(frameRate, kCaptureRate) << "Incorrect value of captureFrameRate returned";
+
+    int32_t top, bottom, left, right;
+    status = metaData->findRect(kKeyCropRect, &left, &top, &right, &bottom);
+    ASSERT_TRUE(status) << "kKeyCropRect key does not exists in metadata";
+    ASSERT_EQ(left, kLeft) << "Incorrect value of left margin returned";
+    ASSERT_EQ(top, kTop) << "Incorrect value of top margin returned";
+    ASSERT_EQ(right, kRight) << "Incorrect value of right margin returned";
+    ASSERT_EQ(bottom, kBottom) << "Incorrect value of bottom margin returned";
+
+    void *platformValue;
+    status = metaData->findPointer(kKeyPlatformPrivate, &platformValue);
+    ASSERT_TRUE(status) << "kKeyPlatformPrivate key does not exists in metadata";
+    ASSERT_EQ(platformValue, &kPlatformValue) << "Incorrect value of pointer returned";
+
+    // Check for the key which is not added to metadata
+    int32_t angle;
+    status = metaData->findInt32(kKeyRotation, &angle);
+    ASSERT_FALSE(status) << "Value for an invalid key is returned when the key is not set";
+
+    delete (metaData);
+}
+
+TEST_F(MetaDataBaseUnitTest, OverWriteFunctionalityTest) {
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
+
+    // set/set/read to check first overwrite operation
+    bool status = metaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+    ASSERT_FALSE(status) << "Initializing kKeyMIMEType, overwrite is expected to be false";
+    // Overwrite the value
+    status = metaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_HEVC);
+    ASSERT_TRUE(status) << "Setting kKeyMIMEType again, overwrite is expected to be true";
+    // Check the value
+    const char *mime;
+    status = metaData->findCString(kKeyMIMEType, &mime);
+    ASSERT_TRUE(status) << "kKeyMIMEType key does not exists in metadata";
+    ASSERT_STREQ(mime, MEDIA_MIMETYPE_VIDEO_HEVC) << "Mime value is not overwritten";
+
+    // set/set/set/read to check second overwrite operation
+    status = metaData->setInt32(kKeyWidth, kWidth1);
+    ASSERT_FALSE(status) << "Initializing kKeyWidth, overwrite is expected to be false";
+    status = metaData->setInt32(kKeyHeight, kHeight1);
+    ASSERT_FALSE(status) << "Initializing kKeyHeight, overwrite is expected to be false";
+    // Overwrite the value
+    status = metaData->setInt32(kKeyWidth, kWidth2);
+    ASSERT_TRUE(status) << "Setting kKeyWidth again, overwrite is expected to be true";
+    status = metaData->setInt32(kKeyHeight, kHeight2);
+    ASSERT_TRUE(status) << "Setting kKeyHeight again, overwrite is expected to be true";
+    // Overwrite the value again
+    status = metaData->setInt32(kKeyWidth, kWidth3);
+    ASSERT_TRUE(status) << "Setting kKeyWidth again, overwrite is expected to be true";
+    status = metaData->setInt32(kKeyHeight, kHeight3);
+    ASSERT_TRUE(status) << "Setting kKeyHeight again, overwrite is expected to be true";
+    // Check the value
+    int32_t width, height;
+    status = metaData->findInt32(kKeyWidth, &width);
+    ASSERT_TRUE(status) << "kKeyWidth key does not exists in metadata";
+    ASSERT_EQ(width, kWidth3) << "Value of width is not overwritten";
+
+    status = metaData->findInt32(kKeyHeight, &height);
+    ASSERT_TRUE(status) << "kKeyHeight key does not exists in metadata";
+    ASSERT_EQ(height, kHeight3) << "Value of height is not overwritten";
+
+    delete (metaData);
+}
+
+TEST_F(MetaDataBaseUnitTest, RemoveKeyTest) {
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
+
+    bool status = metaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+    ASSERT_FALSE(status) << "Initializing kKeyMIMEType, overwrite is expected to be false";
+    // Query the key
+    status = metaData->hasData(kKeyMIMEType);
+    ASSERT_TRUE(status) << "MetaData does not have the mime key";
+
+    status = metaData->remove(kKeyMIMEType);
+    ASSERT_TRUE(status) << "Failed to remove the kKeyMIMEType key";
+
+    // Query the key
+    status = metaData->hasData(kKeyMIMEType);
+    ASSERT_FALSE(status) << "MetaData has mime key after removing it, expected to be false";
+
+    // Remove the non existing key
+    status = metaData->remove(kKeyMIMEType);
+    ASSERT_FALSE(status) << "Removed the non existing key";
+
+    // Check overwriting the removed key
+    metaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_HEVC);
+    ASSERT_FALSE(status) << "Overwrite should be false since the key was removed";
+
+    status = metaData->setInt32(kKeyWidth, kWidth1);
+    ASSERT_FALSE(status) << "Initializing kKeyWidth, overwrite is expected to be false";
+
+    // Clear whole metadata
+    metaData->clear();
+
+    // Check finding key after clearing the metadata
+    int32_t width;
+    status = metaData->findInt32(kKeyWidth, &width);
+    ASSERT_FALSE(status) << "MetaData found kKeyWidth key after clearing all the items in it, "
+                            "expected to be false";
+
+    // Query the key
+    status = metaData->hasData(kKeyWidth);
+    ASSERT_FALSE(status)
+            << "MetaData has width key after clearing all the items in it, expected to be false";
+
+    status = metaData->hasData(kKeyMIMEType);
+    ASSERT_FALSE(status)
+            << "MetaData has mime key after clearing all the items in it, expected to be false";
+
+    // Check removing key after clearing the metadata
+    status = metaData->remove(kKeyMIMEType);
+    ASSERT_FALSE(status) << "Removed the key, after clearing the metadata";
+
+    // Checking set after clearing the metadata
+    status = metaData->setInt32(kKeyWidth, kWidth1);
+    ASSERT_FALSE(status) << "Overwrite should be false since the metadata was cleared";
+
+    metaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_HEVC);
+    ASSERT_FALSE(status) << "Overwrite should be false since the metadata was cleared";
+
+    delete (metaData);
+}
+
+TEST_F(MetaDataBaseUnitTest, ConvertToStringTest) {
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
+
+    String8 info = metaData->toString();
+    ASSERT_EQ(info.length(), 0) << "Empty MetaData length is non-zero: " << info.length();
+
+    bool status = metaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+    ASSERT_FALSE(status) << "Initializing kKeyMIMEType, overwrite is expected to be false";
+
+    status = metaData->setInt32(kKeyWidth, kWidth1);
+    ASSERT_FALSE(status) << "Initializing kKeyWidth, overwrite is expected to be false";
+    status = metaData->setInt32(kKeyHeight, kHeight1);
+    ASSERT_FALSE(status) << "Initializing kKeyHeight, overwrite is expected to be false";
+    status = metaData->setInt32(kKeyVideoProfile, kProfile);
+    ASSERT_FALSE(status) << "Initializing kKeyVideoProfile, overwrite is expected to be false";
+    status = metaData->setInt32(kKeyVideoLevel, kLevel);
+    ASSERT_FALSE(status) << "Initializing kKeyVideoLevel, overwrite is expected to be false";
+
+    info = metaData->toString();
+    ASSERT_GT(info.length(), 0) << "MetaData contains no information";
+
+    // Dump to log for reference
+    metaData->dumpToLog();
+
+    // Clear whole metadata
+    metaData->clear();
+
+    info = metaData->toString();
+    ASSERT_EQ(info.length(), 0) << "MetaData length is non-zero after clearing it: "
+                                << info.length();
+
+    delete (metaData);
+}
+
+}  // namespace android
diff --git a/media/libstagefright/foundation/tests/OpusHeader/Android.bp b/media/libstagefright/foundation/tests/OpusHeader/Android.bp
new file mode 100644
index 0000000..fa2b40e
--- /dev/null
+++ b/media/libstagefright/foundation/tests/OpusHeader/Android.bp
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_foundation_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_foundation_license",
+    ],
+}
+
+cc_test {
+    name: "OpusHeaderTest",
+    test_suites: ["device-tests"],
+    gtest: true,
+
+    srcs: [
+        "OpusHeaderTest.cpp",
+    ],
+
+    shared_libs: [
+        "liblog",
+    ],
+
+    static_libs: [
+        "libstagefright_foundation",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+}
diff --git a/media/libstagefright/foundation/tests/OpusHeader/AndroidTest.xml b/media/libstagefright/foundation/tests/OpusHeader/AndroidTest.xml
new file mode 100644
index 0000000..afee16a
--- /dev/null
+++ b/media/libstagefright/foundation/tests/OpusHeader/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for opus header unit tests">
+    <option name="test-suite-tag" value="OpusHeaderTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="OpusHeaderTest->/data/local/tmp/OpusHeaderTest" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/OpusHeader/OpusHeader.zip?unzip=true"
+            value="/data/local/tmp/OpusHeaderTestRes/" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="OpusHeaderTest" />
+        <option name="native-test-flag" value="-P /data/local/tmp/OpusHeaderTestRes/" />
+    </test>
+</configuration>
\ No newline at end of file
diff --git a/media/libstagefright/foundation/tests/OpusHeader/OpusHeaderTest.cpp b/media/libstagefright/foundation/tests/OpusHeader/OpusHeaderTest.cpp
new file mode 100644
index 0000000..e39c915
--- /dev/null
+++ b/media/libstagefright/foundation/tests/OpusHeader/OpusHeaderTest.cpp
@@ -0,0 +1,342 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "OpusHeaderTest"
+#include <utils/Log.h>
+
+#include <fstream>
+#include <stdio.h>
+#include <string.h>
+
+#include <media/stagefright/foundation/OpusHeader.h>
+
+#include "OpusHeaderTestEnvironment.h"
+
+using namespace android;
+
+#define OUTPUT_FILE_NAME "/data/local/tmp/OpusOutput"
+
+// Opus in WebM is a well-known, yet under-documented, format. The codec private data
+// of the track is an Opus Ogg header (https://tools.ietf.org/html/rfc7845#section-5.1)
+// channel mapping offset in opus header
+constexpr size_t kOpusHeaderStreamMapOffset = 21;
+constexpr size_t kMaxOpusHeaderSize = 100;
+// AOPUSHDR + AOPUSHDRLength +
+// (8 + 8 ) +
+// Header(csd) + num_streams + num_coupled + 1
+// (19 + 1 + 1 + 1) +
+// AOPUSDLY + AOPUSDLYLength + DELAY + AOPUSPRL + AOPUSPRLLength + PRL
+// (8 + 8 + 8 + 8 + 8 + 8)
+// = 86
+constexpr size_t kOpusHeaderChannelMapOffset = 86;
+constexpr uint32_t kOpusSampleRate = 48000;
+constexpr uint64_t kOpusSeekPrerollNs = 80000000;
+constexpr int64_t kNsecPerSec = 1000000000ll;
+
+// Opus uses Vorbis channel mapping, and Vorbis channel mapping specifies
+// mappings for up to 8 channels. This information is part of the Vorbis I
+// Specification:
+// http://www.xiph.org/vorbis/doc/Vorbis_I_spec.html
+constexpr int kMaxChannels = 8;
+constexpr uint8_t kOpusChannelMap[kMaxChannels][kMaxChannels] = {
+        {0},
+        {0, 1},
+        {0, 2, 1},
+        {0, 1, 2, 3},
+        {0, 4, 1, 2, 3},
+        {0, 4, 1, 2, 3, 5},
+        {0, 4, 1, 2, 3, 5, 6},
+        {0, 6, 1, 2, 3, 4, 5, 7},
+};
+
+static OpusHeaderTestEnvironment *gEnv = nullptr;
+
+class OpusHeaderTest {
+  public:
+    OpusHeaderTest() : mInputBuffer(nullptr) {}
+
+    ~OpusHeaderTest() {
+        if (mEleStream.is_open()) mEleStream.close();
+        if (mInputBuffer) {
+            free(mInputBuffer);
+            mInputBuffer = nullptr;
+        }
+    }
+    ifstream mEleStream;
+    uint8_t *mInputBuffer;
+};
+
+class OpusHeaderParseTest : public OpusHeaderTest,
+                            public ::testing::TestWithParam<
+                                    tuple<string /* InputFileName */, int32_t /* ChannelCount */,
+                                          bool /* isHeaderValid */, bool /* isCodecDelayValid */,
+                                          bool /* isSeekPreRollValid */, bool /* isInputValid */>> {
+};
+
+class OpusHeaderWriteTest
+    : public OpusHeaderTest,
+      public ::testing::TestWithParam<tuple<int32_t /* ChannelCount */, int32_t /* skipSamples */,
+                                            string /* referenceFile */>> {};
+
+TEST_P(OpusHeaderWriteTest, WriteTest) {
+    tuple<int32_t, int32_t, string> params = GetParam();
+    OpusHeader writtenHeader;
+    memset(&writtenHeader, 0, sizeof(writtenHeader));
+    int32_t channels = get<0>(params);
+    writtenHeader.channels = channels;
+    writtenHeader.num_streams = channels;
+    writtenHeader.channel_mapping = ((channels > 8) ? 255 : (channels > 2));
+    int32_t skipSamples = get<1>(params);
+    string referenceFileName = gEnv->getRes() + get<2>(params);
+    writtenHeader.skip_samples = skipSamples;
+    uint64_t codecDelayNs = skipSamples * kNsecPerSec / kOpusSampleRate;
+    uint8_t headerData[kMaxOpusHeaderSize];
+    int32_t headerSize = WriteOpusHeaders(writtenHeader, kOpusSampleRate, headerData,
+                                          sizeof(headerData), codecDelayNs, kOpusSeekPrerollNs);
+    ASSERT_GT(headerSize, 0) << "failed to generate Opus header";
+    ASSERT_LE(headerSize, kMaxOpusHeaderSize)
+            << "Invalid header written. Header size can't exceed kMaxOpusHeaderSize";
+
+    ofstream ostrm;
+    ostrm.open(OUTPUT_FILE_NAME, ofstream::binary);
+    ASSERT_TRUE(ostrm.is_open()) << "Failed to open output file " << OUTPUT_FILE_NAME;
+    ostrm.write(reinterpret_cast<char *>(headerData), headerSize);
+    ostrm.close();
+
+    mEleStream.open(referenceFileName, ifstream::binary);
+    ASSERT_EQ(mEleStream.is_open(), true) << "Failed to open referenceFileName " << get<2>(params);
+
+    struct stat buf;
+    int32_t statStatus = stat(referenceFileName.c_str(), &buf);
+    ASSERT_EQ(statStatus, 0) << "Unable to get file properties";
+
+    size_t fileSize = buf.st_size;
+    mInputBuffer = (uint8_t *)malloc(fileSize);
+    ASSERT_NE(mInputBuffer, nullptr) << "Insufficient memory. Malloc failed for size " << fileSize;
+
+    mEleStream.read(reinterpret_cast<char *>(mInputBuffer), fileSize);
+    ASSERT_EQ(mEleStream.gcount(), fileSize) << "mEleStream.gcount() != bytesCount";
+
+    ASSERT_EQ(fileSize, headerSize)
+            << "Mismatch in size between header generated and reference header";
+    int32_t match = memcmp(reinterpret_cast<char *>(mInputBuffer),
+                           reinterpret_cast<char *>(headerData), fileSize);
+    ASSERT_EQ(match, 0) << "Opus header does not match reference file: " << referenceFileName;
+
+    size_t opusHeadSize = 0;
+    size_t codecDelayBufSize = 0;
+    size_t seekPreRollBufSize = 0;
+    void *opusHeadBuf = nullptr;
+    void *codecDelayBuf = nullptr;
+    void *seekPreRollBuf = nullptr;
+    bool status = GetOpusHeaderBuffers(headerData, headerSize, &opusHeadBuf, &opusHeadSize,
+                                       &codecDelayBuf, &codecDelayBufSize, &seekPreRollBuf,
+                                       &seekPreRollBufSize);
+    ASSERT_TRUE(status) << "Encountered error in GetOpusHeaderBuffers";
+
+    uint64_t value = *((uint64_t *)codecDelayBuf);
+    ASSERT_EQ(value, codecDelayNs);
+
+    value = *((uint64_t *)seekPreRollBuf);
+    ASSERT_EQ(value, kOpusSeekPrerollNs);
+
+    OpusHeader parsedHeader;
+    status = ParseOpusHeader((uint8_t *)opusHeadBuf, opusHeadSize, &parsedHeader);
+    ASSERT_TRUE(status) << "Encountered error while Parsing Opus Header.";
+
+    ASSERT_EQ(writtenHeader.channels, parsedHeader.channels)
+            << "Invalid header generated. Mismatch between channel counts";
+
+    ASSERT_EQ(writtenHeader.skip_samples, parsedHeader.skip_samples)
+            << "Mismatch between no of skipSamples written "
+               "and no of skipSamples got after parsing";
+
+    ASSERT_EQ(writtenHeader.channel_mapping, parsedHeader.channel_mapping)
+            << "Mismatch between channelMapping written "
+               "and channelMapping got after parsing";
+
+    if (parsedHeader.channel_mapping) {
+        ASSERT_GT(parsedHeader.channels, 2);
+        ASSERT_EQ(writtenHeader.num_streams, parsedHeader.num_streams)
+                << "Invalid header generated. Mismatch between channel counts";
+
+        ASSERT_EQ(writtenHeader.num_coupled, parsedHeader.num_coupled)
+                << "Invalid header generated. Mismatch between channel counts";
+
+        ASSERT_EQ(parsedHeader.num_coupled + parsedHeader.num_streams, parsedHeader.channels);
+
+        ASSERT_LE(parsedHeader.num_coupled, parsedHeader.num_streams)
+                << "Invalid header generated. Number of coupled streams cannot be greater than "
+                   "number "
+                   "of streams.";
+
+        ASSERT_EQ(headerSize, kOpusHeaderChannelMapOffset + writtenHeader.channels)
+                << "Invalid header written. Header size should be equal to 86 + "
+                   "writtenHeader.channels";
+
+        uint8_t mappedChannelNumber;
+        for (int32_t channelNumber = 0; channelNumber < channels; channelNumber++) {
+            mappedChannelNumber = *(reinterpret_cast<uint8_t *>(opusHeadBuf) +
+                                    kOpusHeaderStreamMapOffset + channelNumber);
+            ASSERT_LT(mappedChannelNumber, channels) << "Invalid header generated. Channel mapping "
+                                                        "cannot be greater than channel count.";
+
+            ASSERT_EQ(mappedChannelNumber, kOpusChannelMap[channels - 1][channelNumber])
+                    << "Invalid header generated. Channel mapping is not as per specification.";
+        }
+    } else {
+        ASSERT_LE(parsedHeader.channels, 2);
+    }
+}
+
+TEST_P(OpusHeaderParseTest, ParseTest) {
+    tuple<string, int32_t, bool, bool, bool, bool> params = GetParam();
+    string inputFileName = gEnv->getRes() + get<0>(params);
+    mEleStream.open(inputFileName, ifstream::binary);
+    ASSERT_EQ(mEleStream.is_open(), true) << "Failed to open inputfile " << get<0>(params);
+    bool isHeaderValid = get<2>(params);
+    bool isCodecDelayValid = get<3>(params);
+    bool isSeekPreRollValid = get<4>(params);
+    bool isInputValid = get<5>(params);
+
+    struct stat buf;
+    stat(inputFileName.c_str(), &buf);
+    size_t fileSize = buf.st_size;
+    mInputBuffer = (uint8_t *)malloc(fileSize);
+    ASSERT_NE(mInputBuffer, nullptr) << "Insufficient memory. Malloc failed for size " << fileSize;
+
+    mEleStream.read(reinterpret_cast<char *>(mInputBuffer), fileSize);
+    ASSERT_EQ(mEleStream.gcount(), fileSize) << "mEleStream.gcount() != bytesCount";
+
+    OpusHeader header;
+    size_t opusHeadSize = 0;
+    size_t codecDelayBufSize = 0;
+    size_t seekPreRollBufSize = 0;
+    void *opusHeadBuf = nullptr;
+    void *codecDelayBuf = nullptr;
+    void *seekPreRollBuf = nullptr;
+    bool status = GetOpusHeaderBuffers(mInputBuffer, fileSize, &opusHeadBuf, &opusHeadSize,
+                                       &codecDelayBuf, &codecDelayBufSize, &seekPreRollBuf,
+                                       &seekPreRollBufSize);
+    if (!isHeaderValid) {
+        ASSERT_EQ(opusHeadBuf, nullptr);
+    } else {
+        ASSERT_NE(opusHeadBuf, nullptr);
+    }
+    if (!isCodecDelayValid) {
+        ASSERT_EQ(codecDelayBuf, nullptr);
+    } else {
+        ASSERT_NE(codecDelayBuf, nullptr);
+    }
+    if (!isSeekPreRollValid) {
+        ASSERT_EQ(seekPreRollBuf, nullptr);
+    } else {
+        ASSERT_NE(seekPreRollBuf, nullptr);
+    }
+    if (!status) {
+        ASSERT_FALSE(isInputValid) << "GetOpusHeaderBuffers failed";
+        return;
+    }
+
+    status = ParseOpusHeader((uint8_t *)opusHeadBuf, opusHeadSize, &header);
+
+    if (status) {
+        ASSERT_TRUE(isInputValid) << "Parse opus header didn't fail for invalid input";
+    } else {
+        ASSERT_FALSE(isInputValid);
+        return;
+    }
+
+    int32_t channels = get<1>(params);
+    ASSERT_EQ(header.channels, channels) << "Parser returned invalid channel count";
+    ASSERT_LE(header.channels, kMaxChannels);
+
+    ASSERT_LE(header.num_coupled, header.num_streams)
+            << "Invalid header generated. Number of coupled streams cannot be greater than number "
+               "of streams.";
+
+    ASSERT_EQ(header.num_coupled + header.num_streams, header.channels);
+
+    if (header.channel_mapping) {
+        uint8_t mappedChannelNumber;
+        for (int32_t channelNumber = 0; channelNumber < channels; channelNumber++) {
+            mappedChannelNumber = *(reinterpret_cast<uint8_t *>(opusHeadBuf) +
+                                    kOpusHeaderStreamMapOffset + channelNumber);
+            ASSERT_LT(mappedChannelNumber, channels)
+                    << "Invalid header. Channel mapping cannot be greater than channel count.";
+
+            ASSERT_EQ(mappedChannelNumber, kOpusChannelMap[channels - 1][channelNumber])
+                    << "Invalid header generated. Channel mapping "
+                       "is not as per specification.";
+        }
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        OpusHeaderTestAll, OpusHeaderWriteTest,
+        ::testing::Values(make_tuple(1, 312, "output_channels_1skipSamples_312.opus"),
+                          make_tuple(2, 312, "output_channels_2skipSamples_312.opus"),
+                          make_tuple(5, 312, "output_channels_5skipSamples_312.opus"),
+                          make_tuple(6, 312, "output_channels_6skipSamples_312.opus"),
+                          make_tuple(1, 0, "output_channels_1skipSamples_0.opus"),
+                          make_tuple(2, 0, "output_channels_2skipSamples_0.opus"),
+                          make_tuple(5, 0, "output_channels_5skipSamples_0.opus"),
+                          make_tuple(6, 0, "output_channels_6skipSamples_0.opus"),
+                          make_tuple(1, 624, "output_channels_1skipSamples_624.opus"),
+                          make_tuple(2, 624, "output_channels_2skipSamples_624.opus"),
+                          make_tuple(5, 624, "output_channels_5skipSamples_624.opus"),
+                          make_tuple(6, 624, "output_channels_6skipSamples_624.opus")));
+
+INSTANTIATE_TEST_SUITE_P(
+        OpusHeaderTestAll, OpusHeaderParseTest,
+        ::testing::Values(
+                make_tuple("2ch_valid_size83B.opus", 2, true, true, true, true),
+                make_tuple("3ch_valid_size88B.opus", 3, true, true, true, true),
+                make_tuple("5ch_valid.opus", 5, true, false, false, true),
+                make_tuple("6ch_valid.opus", 6, true, false, false, true),
+                make_tuple("1ch_valid.opus", 1, true, false, false, true),
+                make_tuple("2ch_valid.opus", 2, true, false, false, true),
+                make_tuple("3ch_invalid_size.opus", 3, true, true, true, false),
+                make_tuple("3ch_invalid_streams.opus", 3, true, true, true, false),
+                make_tuple("5ch_invalid_channelmapping.opus", 5, true, false, false, false),
+                make_tuple("5ch_invalid_coupledstreams.opus", 5, true, false, false, false),
+                make_tuple("6ch_invalid_channelmapping.opus", 6, true, false, false, false),
+                make_tuple("9ch_invalid_channels.opus", 9, true, true, true, false),
+                make_tuple("2ch_invalid_header.opus", 2, false, false, false, false),
+                make_tuple("2ch_invalid_headerlength_16.opus", 2, false, false, false, false),
+                make_tuple("2ch_invalid_headerlength_256.opus", 2, false, false, false, false),
+                make_tuple("2ch_invalid_size.opus", 2, false, false, false, false),
+                make_tuple("3ch_invalid_channelmapping_0.opus", 3, true, true, true, false),
+                make_tuple("3ch_invalid_coupledstreams.opus", 3, true, true, true, false),
+                make_tuple("3ch_invalid_headerlength.opus", 3, true, true, true, false),
+                make_tuple("3ch_invalid_headerSize1.opus", 3, false, false, false, false),
+                make_tuple("3ch_invalid_headerSize2.opus", 3, false, false, false, false),
+                make_tuple("3ch_invalid_headerSize3.opus", 3, false, false, false, false),
+                make_tuple("3ch_invalid_nodelay.opus", 3, false, false, false, false),
+                make_tuple("3ch_invalid_nopreroll.opus", 3, false, false, false, false)));
+
+int main(int argc, char **argv) {
+    gEnv = new OpusHeaderTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGD("Opus Header Test Result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/foundation/tests/OpusHeader/OpusHeaderTestEnvironment.h b/media/libstagefright/foundation/tests/OpusHeader/OpusHeaderTestEnvironment.h
new file mode 100644
index 0000000..d0163c3
--- /dev/null
+++ b/media/libstagefright/foundation/tests/OpusHeader/OpusHeaderTestEnvironment.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __OPUS_HEADER_TEST_ENVIRONMENT_H__
+#define __OPUS_HEADER_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class OpusHeaderTestEnvironment : public ::testing::Environment {
+  public:
+    OpusHeaderTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int OpusHeaderTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"path", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P': {
+                setRes(optarg);
+                break;
+            }
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __OPUS_HEADER_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/foundation/tests/OpusHeader/README.md b/media/libstagefright/foundation/tests/OpusHeader/README.md
new file mode 100644
index 0000000..860c827
--- /dev/null
+++ b/media/libstagefright/foundation/tests/OpusHeader/README.md
@@ -0,0 +1,39 @@
+## Media Testing ##
+---
+#### Opus Header
+The OpusHeader Test Suite validates the OPUS header available in libstagefright.
+
+Run the following steps to build the test suite:
+```
+m OpusHeaderTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/OpusHeaderTest/OpusHeaderTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/OpusHeaderTest/OpusHeaderTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/OpusHeader/OpusHeader.zip). Download, unzip and push these files into device for testing.
+
+```
+adb push OpusHeader /data/local/tmp/
+```
+
+usage: OpusHeaderTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/OpusHeaderTest -P /data/local/tmp/OpusHeader/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest OpusHeaderTest -- --enable-module-dynamic-download=true
+```
diff --git a/media/libstagefright/foundation/tests/TypeTraits_test.cpp b/media/libstagefright/foundation/tests/TypeTraits_test.cpp
index 1e2049d..d5383d1 100644
--- a/media/libstagefright/foundation/tests/TypeTraits_test.cpp
+++ b/media/libstagefright/foundation/tests/TypeTraits_test.cpp
@@ -30,7 +30,7 @@
     enum IA : int32_t { };
 };
 
-// =========== basic sanity tests for type-support templates
+// =========== basic tests for type-support templates
 TEST_F(TypeTraitsTest, StaticTests) {
 
     // ============ is_integral_or_enum
diff --git a/media/libstagefright/foundation/tests/colorutils/Android.bp b/media/libstagefright/foundation/tests/colorutils/Android.bp
new file mode 100644
index 0000000..0fea0d5
--- /dev/null
+++ b/media/libstagefright/foundation/tests/colorutils/Android.bp
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_foundation_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_foundation_license",
+    ],
+}
+
+cc_test {
+    name: "ColorUtilsTest",
+    gtest: true,
+
+    srcs: [
+        "ColorUtilsTest.cpp",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libutils",
+        "libmediandk",
+    ],
+
+    static_libs: [
+        "libstagefright_foundation_colorutils_ndk",
+        "libstagefright_foundation",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/foundation/tests/colorutils/ColorUtilsTest.cpp b/media/libstagefright/foundation/tests/colorutils/ColorUtilsTest.cpp
new file mode 100644
index 0000000..0d802b4
--- /dev/null
+++ b/media/libstagefright/foundation/tests/colorutils/ColorUtilsTest.cpp
@@ -0,0 +1,773 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ColorUtilsTest"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include <stdio.h>
+
+#include <media/NdkMediaFormat.h>
+#include <media/NdkMediaFormatPriv.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+
+const size_t kHDRBufferSize = 25;
+const uint16_t kHDRInfoTestValue1 = 420;
+const uint16_t kHDRInfoTestValue2 = 42069;
+
+using namespace android;
+
+typedef ColorAspects CA;
+
+class ColorRangeTest : public ::testing::TestWithParam</* ColorRange */ CA::Range> {
+  public:
+    ColorRangeTest() { mRange = GetParam(); };
+
+    CA::Range mRange;
+};
+
+class ColorTransferTest : public ::testing::TestWithParam</* ColorTransfer */ CA::Transfer> {
+  public:
+    ColorTransferTest() { mTransfer = GetParam(); };
+
+    CA::Transfer mTransfer;
+};
+
+class ColorStandardTest : public ::testing::TestWithParam<std::pair<
+                                  /* Primaries */ CA::Primaries,
+                                  /* MatrixCoeffs */ CA::MatrixCoeffs>> {
+  public:
+    ColorStandardTest() {
+        mPrimaries = GetParam().first;
+        mMatrixCoeffs = GetParam().second;
+    };
+
+    CA::Primaries mPrimaries;
+    CA::MatrixCoeffs mMatrixCoeffs;
+};
+
+class IsoToPlatformAspectsTest : public ::testing::TestWithParam<std::tuple<
+                                         /* Primaries */ CA::Primaries,
+                                         /* Transfer */ CA::Transfer,
+                                         /* MatrixCoeffs */ CA::MatrixCoeffs,
+                                         /* Standard */ int32_t,
+                                         /* Transfer */ int32_t>> {
+  public:
+    IsoToPlatformAspectsTest() {
+        mPrimaries = std::get<0>(GetParam());
+        mTransfer = std::get<1>(GetParam());
+        mMatrixCoeffs = std::get<2>(GetParam());
+        mPlatformStandard = std::get<3>(GetParam());
+        mPlatformTransfer = std::get<4>(GetParam());
+    };
+
+    CA::Primaries mPrimaries;
+    CA::Transfer mTransfer;
+    CA::MatrixCoeffs mMatrixCoeffs;
+    int32_t mPlatformStandard;
+    int32_t mPlatformTransfer;
+};
+
+class ColorAspectsTest : public ::testing::TestWithParam<std::tuple<
+                                 /* Primaries */ CA::Primaries,
+                                 /* ColorTransfer */ CA::Transfer,
+                                 /* MatrixCoeffs */ CA::MatrixCoeffs,
+                                 /* ColorRange */ CA::Range,
+                                 /* ColorStandard */ CA::Standard>> {
+  public:
+    ColorAspectsTest() {
+        mPrimaries = std::get<0>(GetParam());
+        mTransfer = std::get<1>(GetParam());
+        mMatrixCoeffs = std::get<2>(GetParam());
+        mRange = std::get<3>(GetParam());
+        mStandard = std::get<4>(GetParam());
+    };
+
+    CA::Primaries mPrimaries;
+    CA::Transfer mTransfer;
+    CA::MatrixCoeffs mMatrixCoeffs;
+    CA::Range mRange;
+    CA::Standard mStandard;
+};
+
+class DefaultColorAspectsTest : public ::testing::TestWithParam<std::tuple<
+                                        /* Width */ int32_t,
+                                        /* Height */ int32_t,
+                                        /* Primaries */ CA::Primaries,
+                                        /* MatrixCoeffs */ CA::MatrixCoeffs>> {
+  public:
+    DefaultColorAspectsTest() {
+        mWidth = std::get<0>(GetParam());
+        mHeight = std::get<1>(GetParam());
+        mPrimaries = std::get<2>(GetParam());
+        mMatrixCoeffs = std::get<3>(GetParam());
+    };
+
+    int32_t mWidth;
+    int32_t mHeight;
+    CA::Primaries mPrimaries;
+    CA::MatrixCoeffs mMatrixCoeffs;
+};
+
+class DataSpaceTest : public ::testing::TestWithParam<std::tuple<
+                              /* ColorRange */ CA::Range,
+                              /* Primaries */ CA::Primaries,
+                              /* ColorTransfer */ CA::Transfer,
+                              /* MatrixCoeffs */ CA::MatrixCoeffs,
+                              /* v0_android_dataspace */ android_dataspace,
+                              /* android_dataspace */ android_dataspace>> {
+  public:
+    DataSpaceTest() {
+        mRange = std::get<0>(GetParam());
+        mPrimaries = std::get<1>(GetParam());
+        mTransfer = std::get<2>(GetParam());
+        mMatrixCoeffs = std::get<3>(GetParam());
+        mDataSpaceV0 = std::get<4>(GetParam());
+        mDataSpace = std::get<5>(GetParam());
+    };
+
+    CA::Range mRange;
+    CA::Primaries mPrimaries;
+    CA::Transfer mTransfer;
+    CA::MatrixCoeffs mMatrixCoeffs;
+    android_dataspace mDataSpaceV0;
+    android_dataspace mDataSpace;
+};
+
+TEST_P(ColorRangeTest, WrapColorRangeTest) {
+    int32_t range = ColorUtils::wrapColorAspectsIntoColorRange(mRange);
+    CA::Range unwrappedRange;
+    status_t status = ColorUtils::unwrapColorAspectsFromColorRange(range, &unwrappedRange);
+    ASSERT_EQ(status, OK) << "unwrapping ColorAspects from ColorRange failed";
+    EXPECT_EQ(unwrappedRange, mRange) << "Returned ColorRange doesn't match";
+    ALOGV("toString test: Range: %s", asString(mRange, "default"));
+}
+
+TEST_P(ColorTransferTest, WrapColorTransferTest) {
+    int32_t transfer = ColorUtils::wrapColorAspectsIntoColorTransfer(mTransfer);
+    CA::Transfer unwrappedTransfer;
+    status_t status = ColorUtils::unwrapColorAspectsFromColorTransfer(transfer, &unwrappedTransfer);
+    ASSERT_EQ(status, OK) << "unwrapping ColorAspects from ColorTransfer failed";
+    EXPECT_EQ(unwrappedTransfer, mTransfer) << "Returned ColorTransfer doesn't match";
+    ALOGV("toString test: Transfer: %s", asString(mTransfer, "default"));
+}
+
+TEST_P(ColorStandardTest, WrapColorStandardTest) {
+    int32_t standard = ColorUtils::wrapColorAspectsIntoColorStandard(mPrimaries, mMatrixCoeffs);
+    CA::Primaries unwrappedPrimaries;
+    CA::MatrixCoeffs unwrappedMatrixCoeffs;
+    status_t status = ColorUtils::unwrapColorAspectsFromColorStandard(standard, &unwrappedPrimaries,
+                                                                      &unwrappedMatrixCoeffs);
+    ASSERT_EQ(status, OK) << "unwrapping ColorAspects from ColorStandard failed";
+    EXPECT_EQ(unwrappedPrimaries, mPrimaries) << "Returned primaries doesn't match";
+    EXPECT_EQ(unwrappedMatrixCoeffs, mMatrixCoeffs) << "Returned  matrixCoeffs doesn't match";
+}
+
+TEST_P(ColorAspectsTest, PlatformAspectsTest) {
+    CA aspects;
+    aspects.mRange = mRange;
+    aspects.mPrimaries = mPrimaries;
+    aspects.mTransfer = mTransfer;
+    aspects.mMatrixCoeffs = mMatrixCoeffs;
+
+    int32_t range = -1;
+    int32_t standard = -1;
+    int32_t transfer = -1;
+    status_t status = ColorUtils::convertCodecColorAspectsToPlatformAspects(aspects, &range,
+                                                                            &standard, &transfer);
+    ASSERT_EQ(status, OK) << "Conversion of ColorAspects to PlatformAspects failed";
+
+    CA returnedAspects;
+    status = ColorUtils::convertPlatformColorAspectsToCodecAspects(range, standard, transfer,
+                                                                   returnedAspects);
+    ASSERT_EQ(status, OK) << "Conversion of PlatformAspects to ColorAspects failed";
+    EXPECT_EQ(returnedAspects.mRange, aspects.mRange)
+            << "range mismatch for conversion between PlatformAspects";
+    EXPECT_EQ(returnedAspects.mPrimaries, aspects.mPrimaries)
+            << "primaries mismatch for conversion between PlatformAspects";
+    EXPECT_EQ(returnedAspects.mTransfer, aspects.mTransfer)
+            << "transfer mismatch for conversion between PlatformAspects";
+    EXPECT_EQ(returnedAspects.mMatrixCoeffs, aspects.mMatrixCoeffs)
+            << "matrixCoeffs mismatch for conversion between PlatformAspects";
+}
+
+TEST_P(ColorAspectsTest, IsoAspectsTest) {
+    CA aspects;
+    aspects.mRange = mRange;
+    aspects.mPrimaries = mPrimaries;
+    aspects.mTransfer = mTransfer;
+    aspects.mMatrixCoeffs = mMatrixCoeffs;
+
+    int32_t primaries = -1;
+    int32_t colorTransfer = -1;
+    int32_t matrixCoeffs = -1;
+    bool fullRange = false;
+    ColorUtils::convertCodecColorAspectsToIsoAspects(aspects, &primaries, &colorTransfer,
+                                                     &matrixCoeffs, &fullRange);
+
+    CA returnedAspects;
+    ColorUtils::convertIsoColorAspectsToCodecAspects(primaries, colorTransfer, matrixCoeffs,
+                                                     fullRange, returnedAspects);
+    EXPECT_EQ(returnedAspects.mRange, aspects.mRange)
+            << "range mismatch for conversion between IsoAspects";
+    EXPECT_EQ(returnedAspects.mPrimaries, aspects.mPrimaries)
+            << "primaries mismatch for conversion between IsoAspects";
+    EXPECT_EQ(returnedAspects.mTransfer, aspects.mTransfer)
+            << "transfer mismatch for conversion between IsoAspects";
+    EXPECT_EQ(returnedAspects.mMatrixCoeffs, aspects.mMatrixCoeffs)
+            << "matrixCoeffs mismatch for conversion between IsoAspects";
+}
+
+TEST_P(IsoToPlatformAspectsTest, IsoAspectsToPlatformAspectsTest) {
+    CA aspects;
+    aspects.mPrimaries = mPrimaries;
+    aspects.mTransfer = mTransfer;
+    aspects.mMatrixCoeffs = mMatrixCoeffs;
+
+    int32_t isoPrimaries = -1;
+    int32_t isoTransfer = -1;
+    int32_t isoMatrixCoeffs = -1;
+    bool fullrange = false;
+    ColorUtils::convertCodecColorAspectsToIsoAspects(aspects, &isoPrimaries, &isoTransfer,
+                                                     &isoMatrixCoeffs, &fullrange);
+
+    int32_t range = -1;
+    int32_t standard = -1;
+    int32_t transfer = -1;
+    ColorUtils::convertIsoColorAspectsToPlatformAspects(isoPrimaries, isoTransfer, isoMatrixCoeffs,
+                                                        fullrange, &range, &standard, &transfer);
+    if (fullrange) {
+        EXPECT_EQ(range, ColorUtils::kColorRangeFull)
+                << "range incorrect converting to PlatformAspects";
+    }
+    EXPECT_EQ(standard, mPlatformStandard) << "standard incorrect converting to PlatformAspects";
+    EXPECT_EQ(transfer, mPlatformTransfer) << "transfer incorrect converting to PlatformAspects";
+}
+
+TEST_P(ColorAspectsTest, PackColorAspectsTest) {
+    CA aspects;
+    aspects.mRange = mRange;
+    aspects.mPrimaries = mPrimaries;
+    aspects.mTransfer = mTransfer;
+    aspects.mMatrixCoeffs = mMatrixCoeffs;
+    uint32_t packedColorAspects = ColorUtils::packToU32(aspects);
+
+    CA unpackedAspects = ColorUtils::unpackToColorAspects(packedColorAspects);
+    EXPECT_EQ(unpackedAspects.mRange, mRange) << "range mismatch after unpacking";
+    EXPECT_EQ(unpackedAspects.mPrimaries, mPrimaries) << "primaries mismatch after unpacking";
+    EXPECT_EQ(unpackedAspects.mTransfer, mTransfer) << "transfer mismatch after unpacking";
+    EXPECT_EQ(unpackedAspects.mMatrixCoeffs, mMatrixCoeffs)
+            << "matrixCoeffs mismatch after unpacking";
+    ALOGV("toString test: Standard: %s", asString(mStandard, "default"));
+}
+
+TEST_P(DefaultColorAspectsTest, DefaultColorAspectsTest) {
+    CA aspects;
+    aspects.mRange = CA::RangeUnspecified;
+    aspects.mPrimaries = CA::PrimariesUnspecified;
+    aspects.mMatrixCoeffs = CA::MatrixUnspecified;
+    aspects.mTransfer = CA::TransferUnspecified;
+
+    ColorUtils::setDefaultCodecColorAspectsIfNeeded(aspects, mWidth, mHeight);
+    EXPECT_EQ(aspects.mRange, CA::RangeLimited) << "range not set to default";
+    EXPECT_EQ(aspects.mPrimaries, mPrimaries) << "primaries not set to default";
+    EXPECT_EQ(aspects.mMatrixCoeffs, mMatrixCoeffs) << "matrixCoeffs not set to default";
+    EXPECT_EQ(aspects.mTransfer, CA::TransferSMPTE170M) << "transfer not set to default";
+}
+
+TEST_P(DataSpaceTest, DataSpaceTest) {
+    CA aspects;
+    aspects.mRange = mRange;
+    aspects.mPrimaries = mPrimaries;
+    aspects.mTransfer = mTransfer;
+    aspects.mMatrixCoeffs = mMatrixCoeffs;
+
+    android_dataspace dataSpace = ColorUtils::getDataSpaceForColorAspects(aspects, false);
+    EXPECT_EQ(dataSpace, mDataSpace) << "Returned incorrect dataspace";
+
+    bool status = ColorUtils::convertDataSpaceToV0(dataSpace);
+    ASSERT_TRUE(status) << "Returned v0 dataspace is not aspect-only";
+    EXPECT_EQ(dataSpace, mDataSpaceV0) << "Returned incorrect v0 dataspace";
+}
+
+TEST(ColorUtilsUnitTest, AspectsChangedTest) {
+    CA origAspects;
+    origAspects.mRange = CA::Range::RangeFull;
+    origAspects.mPrimaries = CA::Primaries::PrimariesBT709_5;
+    origAspects.mTransfer = CA::Transfer::TransferLinear;
+    origAspects.mMatrixCoeffs = CA::MatrixCoeffs::MatrixBT709_5;
+
+    CA aspects;
+    aspects.mRange = CA::Range::RangeFull;
+    aspects.mPrimaries = CA::Primaries::PrimariesBT709_5;
+    aspects.mTransfer = CA::Transfer::TransferLinear;
+    aspects.mMatrixCoeffs = CA::MatrixCoeffs::MatrixBT709_5;
+
+    bool status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects);
+    ASSERT_FALSE(status) << "ColorAspects comparison check failed";
+
+    aspects.mRange = CA::Range::RangeLimited;
+    status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects);
+    ASSERT_TRUE(status) << "ColorAspects comparison check failed";
+    EXPECT_EQ(aspects.mRange, CA::Range::RangeUnspecified) << "range should have been unspecified";
+    aspects.mRange = CA::Range::RangeFull;
+
+    aspects.mTransfer = CA::Transfer::TransferSRGB;
+    status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects);
+    ASSERT_TRUE(status) << "ColorAspects comparison check failed";
+    EXPECT_EQ(aspects.mTransfer, CA::Transfer::TransferUnspecified)
+            << "transfer should have been unspecified";
+    aspects.mTransfer = CA::Transfer::TransferLinear;
+
+    aspects.mPrimaries = CA::Primaries::PrimariesBT2020;
+    status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects, true);
+    ASSERT_TRUE(status) << "ColorAspects comparison check failed";
+    EXPECT_EQ(aspects.mPrimaries, CA::Primaries::PrimariesUnspecified)
+            << "primaries should have been unspecified";
+    EXPECT_EQ(aspects.mMatrixCoeffs, CA::MatrixCoeffs::MatrixUnspecified)
+            << "matrixCoeffs should have been unspecified";
+
+    aspects.mMatrixCoeffs = CA::MatrixCoeffs::MatrixSMPTE240M;
+    status = ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects, true);
+    ASSERT_TRUE(status) << "ColorAspects comparison check failed";
+    EXPECT_EQ(aspects.mPrimaries, CA::Primaries::PrimariesUnspecified)
+            << "primaries should have been unspecified";
+    EXPECT_EQ(aspects.mMatrixCoeffs, CA::MatrixCoeffs::MatrixUnspecified)
+            << "matrixCoeffs should have been unspecified";
+}
+
+TEST(ColorUtilsUnitTest, ColorConfigFromFormatTest) {
+    int range = -1;
+    int standard = -1;
+    int transfer = -1;
+    sp<AMessage> format = new AMessage();
+    ASSERT_NE(format, nullptr) << "failed to create AMessage";
+    ColorUtils::getColorConfigFromFormat(format, &range, &standard, &transfer);
+    EXPECT_EQ(range | standard | transfer, 0) << "color config didn't default to 0";
+
+    format->setInt32(KEY_COLOR_RANGE, CA::Range::RangeFull);
+    format->setInt32(KEY_COLOR_STANDARD, CA::Standard::StandardBT709);
+    format->setInt32(KEY_COLOR_TRANSFER, CA::Transfer::TransferLinear);
+    ColorUtils::getColorConfigFromFormat(format, &range, &standard, &transfer);
+    EXPECT_EQ(range, CA::Range::RangeFull) << "range mismatch";
+    EXPECT_EQ(standard, CA::Standard::StandardBT709) << "standard mismatch";
+    EXPECT_EQ(transfer, CA::Transfer::TransferLinear) << "transfer mismatch";
+
+    range = standard = transfer = -1;
+    sp<AMessage> copyFormat = new AMessage();
+    ASSERT_NE(copyFormat, nullptr) << "failed to create AMessage";
+    ColorUtils::copyColorConfig(format, copyFormat);
+    bool status = copyFormat->findInt32(KEY_COLOR_RANGE, &range);
+    ASSERT_TRUE(status) << "ColorConfig range entry missing";
+    status = copyFormat->findInt32(KEY_COLOR_STANDARD, &standard);
+    ASSERT_TRUE(status) << "ColorConfig standard entry missing";
+    status = copyFormat->findInt32(KEY_COLOR_TRANSFER, &transfer);
+    ASSERT_TRUE(status) << "ColorConfig transfer entry missing";
+    EXPECT_EQ(range, CA::Range::RangeFull) << "range mismatch";
+    EXPECT_EQ(standard, CA::Standard::StandardBT709) << "standard mismatch";
+    EXPECT_EQ(transfer, CA::Transfer::TransferLinear) << "transfer mismatchd";
+
+    range = standard = transfer = -1;
+    ColorUtils::getColorConfigFromFormat(copyFormat, &range, &standard, &transfer);
+    EXPECT_EQ(range, CA::Range::RangeFull) << "range mismatch";
+    EXPECT_EQ(standard, CA::Standard::StandardBT709) << "standard mismatch";
+    EXPECT_EQ(transfer, CA::Transfer::TransferLinear) << "transfer mismatch";
+}
+
+TEST_P(ColorAspectsTest, FormatTest) {
+    CA aspects;
+    sp<AMessage> format = new AMessage();
+    ASSERT_NE(format, nullptr) << "failed to create AMessage";
+    ColorUtils::setColorAspectsIntoFormat(aspects, format, true);
+
+    CA returnedAspects;
+    ColorUtils::getColorAspectsFromFormat(format, returnedAspects);
+    EXPECT_EQ(returnedAspects.mRange, aspects.mRange) << "range mismatch";
+    EXPECT_EQ(returnedAspects.mPrimaries, aspects.mPrimaries) << "primaries mismatch";
+    EXPECT_EQ(returnedAspects.mTransfer, aspects.mTransfer) << "transfer mismatch";
+    EXPECT_EQ(returnedAspects.mMatrixCoeffs, aspects.mMatrixCoeffs) << "matrixCoeffs mismatch";
+
+    aspects.mRange = mRange;
+    aspects.mPrimaries = mPrimaries;
+    aspects.mTransfer = mTransfer;
+    aspects.mMatrixCoeffs = mMatrixCoeffs;
+    ColorUtils::setColorAspectsIntoFormat(aspects, format);
+
+    memset(&returnedAspects, 0, sizeof(returnedAspects));
+    ColorUtils::getColorAspectsFromFormat(format, returnedAspects);
+    EXPECT_EQ(returnedAspects.mRange, aspects.mRange) << "range mismatch";
+    EXPECT_EQ(returnedAspects.mPrimaries, aspects.mPrimaries) << "primaries mismatch";
+    EXPECT_EQ(returnedAspects.mTransfer, aspects.mTransfer) << "transfer mismatch";
+    EXPECT_EQ(returnedAspects.mMatrixCoeffs, aspects.mMatrixCoeffs) << "matrixCoeffs mismatch";
+}
+
+TEST(ColorUtilsUnitTest, HDRStaticInfoTest) {
+    sp<AMessage> format = new AMessage();
+    ASSERT_NE(format, nullptr) << "failed to create AMessage";
+
+    HDRStaticInfo returnedInfoHDR;
+    bool status = ColorUtils::getHDRStaticInfoFromFormat(format, &returnedInfoHDR);
+    ASSERT_FALSE(status) << "HDR info should be absent in empty format";
+
+    HDRStaticInfo infoHDR;
+    infoHDR.sType1.mMaxDisplayLuminance = kHDRInfoTestValue2;
+    infoHDR.sType1.mMinDisplayLuminance = kHDRInfoTestValue1;
+    infoHDR.sType1.mMaxContentLightLevel = kHDRInfoTestValue2;
+    infoHDR.sType1.mMaxFrameAverageLightLevel = kHDRInfoTestValue1;
+    infoHDR.sType1.mR.x = kHDRInfoTestValue1;
+    infoHDR.sType1.mR.y = kHDRInfoTestValue2;
+    infoHDR.sType1.mG.x = kHDRInfoTestValue1;
+    infoHDR.sType1.mG.y = kHDRInfoTestValue2;
+    infoHDR.sType1.mB.x = kHDRInfoTestValue1;
+    infoHDR.sType1.mB.y = kHDRInfoTestValue2;
+    infoHDR.sType1.mW.x = kHDRInfoTestValue1;
+    infoHDR.sType1.mW.y = kHDRInfoTestValue2;
+    ColorUtils::setHDRStaticInfoIntoFormat(infoHDR, format);
+
+    status = ColorUtils::getHDRStaticInfoFromFormat(format, &returnedInfoHDR);
+    ASSERT_TRUE(status) << "Failed to get HDR info from format";
+    ASSERT_EQ(0, memcmp(&returnedInfoHDR, &infoHDR, sizeof(infoHDR))) << " HDRStaticInfo mismatch";
+
+    AMediaFormat *mediaFormat = AMediaFormat_new();
+    ASSERT_NE(mediaFormat, nullptr) << "Unable to create AMediaFormat";
+    ColorUtils::setHDRStaticInfoIntoAMediaFormat(infoHDR, mediaFormat);
+    memset(&returnedInfoHDR, 0, sizeof(returnedInfoHDR));
+    status = ColorUtils::getHDRStaticInfoFromFormat(mediaFormat->mFormat, &returnedInfoHDR);
+    AMediaFormat_delete(mediaFormat);
+    ASSERT_TRUE(status) << "Failed to get HDR info from media format";
+    ASSERT_EQ(0, memcmp(&returnedInfoHDR, &infoHDR, sizeof(infoHDR))) << " HDRStaticInfo mismatch";
+}
+
+TEST(ColorUtilsUnitTest, SanityTest) {
+    CA::Primaries unmappedPrimaries = (CA::Primaries)(CA::Primaries::PrimariesOther + 1);
+    CA::MatrixCoeffs unmappedMatrixCoeffs = (CA::MatrixCoeffs)(CA::MatrixOther + 1);
+    int32_t colorStandard =
+            ColorUtils::wrapColorAspectsIntoColorStandard(unmappedPrimaries, CA::MatrixUnspecified);
+    EXPECT_EQ(colorStandard, ColorUtils::kColorStandardUnspecified)
+            << "Standard unspecified expected";
+    colorStandard =
+            ColorUtils::wrapColorAspectsIntoColorStandard(CA::PrimariesOther, unmappedMatrixCoeffs);
+    EXPECT_EQ(colorStandard, ColorUtils::kColorStandardUnspecified)
+            << "Standard unspecified expected";
+    colorStandard = ColorUtils::wrapColorAspectsIntoColorStandard(CA::PrimariesBT601_6_525,
+                                                                  CA::MatrixBT2020);
+    EXPECT_GE(colorStandard, ColorUtils::kColorStandardExtendedStart)
+            << "Standard greater than extended start expected";
+    unmappedPrimaries = (CA::Primaries)(CA::Primaries::PrimariesBT2020 + 1);
+    unmappedMatrixCoeffs = (CA::MatrixCoeffs)(CA::MatrixBT2020Constant + 1);
+    colorStandard =
+            ColorUtils::wrapColorAspectsIntoColorStandard(unmappedPrimaries, unmappedMatrixCoeffs);
+    EXPECT_GE(colorStandard, ColorUtils::kColorStandardExtendedStart)
+            << "Standard greater than extended start expected";
+
+    CA aspects;
+    int32_t colorRange = -1;
+    colorStandard = -1;
+    int32_t colorTransfer = -1;
+    aspects.mPrimaries = (CA::Primaries)(CA::Primaries::PrimariesOther + 1);
+    status_t status = ColorUtils::convertCodecColorAspectsToPlatformAspects(
+            aspects, &colorRange, &colorStandard, &colorTransfer);
+    EXPECT_NE(status, OK) << "invalid colorAspects value accepted";
+
+    int32_t colorPrimaries = -1;
+    colorTransfer = -1;
+    int32_t colorMatrixCoeffs = -1;
+    bool fullRange = false;
+    aspects.mPrimaries = CA::PrimariesOther;
+    aspects.mTransfer = CA::TransferOther;
+    aspects.mMatrixCoeffs = CA::MatrixOther;
+    ColorUtils::convertCodecColorAspectsToIsoAspects(aspects, &colorPrimaries, &colorTransfer,
+                                                     &colorMatrixCoeffs, &fullRange);
+    CA returnedAspects;
+    ColorUtils::convertIsoColorAspectsToCodecAspects(colorPrimaries, colorTransfer,
+                                                     colorMatrixCoeffs, fullRange, returnedAspects);
+    EXPECT_EQ(returnedAspects.mPrimaries, CA::PrimariesUnspecified)
+            << "expected unspecified Primaries";
+    EXPECT_EQ(returnedAspects.mTransfer, CA::TransferUnspecified)
+            << "expected unspecified Transfer";
+    EXPECT_EQ(returnedAspects.mMatrixCoeffs, CA::MatrixUnspecified)
+            << "expected unspecified MatrixCoeffs";
+
+    // invalid values, other value equals 0xFF
+    colorPrimaries = CA::PrimariesOther;
+    colorTransfer = CA::TransferOther;
+    colorMatrixCoeffs = CA::MatrixOther;
+    fullRange = false;
+    memset(&returnedAspects, 0, sizeof(returnedAspects));
+    ColorUtils::convertIsoColorAspectsToCodecAspects(colorPrimaries, colorTransfer,
+                                                     colorMatrixCoeffs, fullRange, returnedAspects);
+    EXPECT_EQ(returnedAspects.mPrimaries, CA::PrimariesUnspecified)
+            << "expected unspecified Primaries";
+    EXPECT_EQ(returnedAspects.mTransfer, CA::TransferUnspecified)
+            << "expected unspecified Transfer";
+    EXPECT_EQ(returnedAspects.mMatrixCoeffs, CA::MatrixUnspecified)
+            << "expected unspecified MatrixCoeffs";
+
+    CA::Primaries primaries = CA::PrimariesUnspecified;
+    CA::MatrixCoeffs matrixCoeffs = CA::MatrixUnspecified;
+    status = ColorUtils::unwrapColorAspectsFromColorStandard(ColorUtils::kColorStandardVendorStart,
+                                                             &primaries, &matrixCoeffs);
+    EXPECT_EQ(status, OK) << "unwrapping aspects from color standard failed";
+
+    primaries = CA::PrimariesUnspecified;
+    matrixCoeffs = CA::MatrixUnspecified;
+    status = ColorUtils::unwrapColorAspectsFromColorStandard(
+            ColorUtils::kColorStandardVendorStart * 4, &primaries, &matrixCoeffs);
+    EXPECT_NE(status, OK) << "unwrapping aspects from color standard failed";
+
+    colorRange = ColorUtils::wrapColorAspectsIntoColorRange((CA::Range)(CA::RangeOther + 1));
+    EXPECT_EQ(colorRange, ColorUtils::kColorRangeUnspecified) << "expected unspecified color range";
+
+    CA::Range range;
+    status = ColorUtils::unwrapColorAspectsFromColorRange(
+            ColorUtils::kColorRangeVendorStart + CA::RangeOther + 1, &range);
+    EXPECT_NE(status, OK) << "invalid range value accepted";
+    EXPECT_EQ(range, CA::RangeOther) << "returned unexpected range value";
+
+    colorTransfer =
+            ColorUtils::wrapColorAspectsIntoColorTransfer((CA::Transfer)(CA::TransferOther + 1));
+    EXPECT_EQ(colorTransfer, ColorUtils::kColorTransferUnspecified)
+            << "expected unspecified color transfer";
+
+    CA::Transfer transfer;
+    status = ColorUtils::unwrapColorAspectsFromColorTransfer(
+            ColorUtils::kColorTransferVendorStart + CA::TransferOther + 1, &transfer);
+    EXPECT_NE(status, OK) << "invalid transfer value accepted";
+    EXPECT_EQ(transfer, CA::TransferOther) << "expected other color transfer";
+}
+
+TEST(ColorUtilsUnitTest, HDRInfoSanityTest) {
+    HDRStaticInfo hdrInfo;
+    sp<AMessage> format = new AMessage();
+    ASSERT_NE(format, nullptr) << "failed to create AMessage";
+
+    bool boolStatus = ColorUtils::getHDRStaticInfoFromFormat(format, &hdrInfo);
+    EXPECT_FALSE(boolStatus) << "HDRStaticInfo should not be present";
+
+    sp<ABuffer> invalidSizeHDRInfoBuffer = new ABuffer(kHDRBufferSize - 1);
+    ASSERT_NE(invalidSizeHDRInfoBuffer, nullptr) << "failed to create ABuffer";
+    format->setBuffer(KEY_HDR_STATIC_INFO, invalidSizeHDRInfoBuffer);
+    memset(&hdrInfo, 0, sizeof(hdrInfo));
+    boolStatus = ColorUtils::getHDRStaticInfoFromFormat(format, &hdrInfo);
+    EXPECT_FALSE(boolStatus) << "incorrect HDRStaticInfo buffer accepted";
+
+    sp<ABuffer> invalidHDRInfoBuffer = new ABuffer(kHDRBufferSize);
+    ASSERT_NE(invalidHDRInfoBuffer, nullptr) << "failed to create ABuffer";
+    uint8_t *data = invalidHDRInfoBuffer->data();
+    *data = HDRStaticInfo::kType1 + 1;
+    format->setBuffer(KEY_HDR_STATIC_INFO, invalidHDRInfoBuffer);
+    memset(&hdrInfo, 0, sizeof(hdrInfo));
+    boolStatus = ColorUtils::getHDRStaticInfoFromFormat(format, &hdrInfo);
+    EXPECT_FALSE(boolStatus) << "incorrect HDRStaticInfo buffer accepted";
+
+    CA aspects;
+    format->setInt32(KEY_COLOR_RANGE, ColorUtils::kColorRangeVendorStart + CA::RangeOther + 1);
+    format->setInt32(KEY_COLOR_STANDARD, CA::Standard::StandardBT709);
+    format->setInt32(KEY_COLOR_TRANSFER, CA::Transfer::TransferLinear);
+    ColorUtils::getColorAspectsFromFormat(format, aspects);
+    EXPECT_EQ(aspects.mRange, CA::RangeOther) << "unexpected range";
+}
+
+TEST(ColorUtilsUnitTest, DataSpaceSanityTest) {
+    CA aspects;
+    aspects.mRange = CA::RangeUnspecified;
+    aspects.mPrimaries = CA::PrimariesUnspecified;
+    aspects.mMatrixCoeffs = CA::MatrixUnspecified;
+    aspects.mTransfer = CA::TransferUnspecified;
+    android_dataspace dataSpace = ColorUtils::getDataSpaceForColorAspects(aspects, true);
+    EXPECT_EQ(dataSpace, 0) << "expected invalid dataspace";
+    aspects.mPrimaries = CA::PrimariesUnspecified;
+    aspects.mMatrixCoeffs = CA::MatrixBT2020Constant;
+    dataSpace = ColorUtils::getDataSpaceForColorAspects(aspects, true);
+    EXPECT_NE(dataSpace, 0) << "unexpected value";
+}
+
+INSTANTIATE_TEST_SUITE_P(ColorUtilsUnitTest, ColorRangeTest,
+                         ::testing::Values(
+                                 // ColorRange
+                                 CA::Range::RangeLimited, CA::Range::RangeFull,
+                                 CA::Range::RangeUnspecified, CA::Range::RangeOther));
+
+INSTANTIATE_TEST_SUITE_P(ColorUtilsUnitTest, ColorTransferTest,
+                         ::testing::Values(
+                                 // ColorTransfer
+                                 CA::Transfer::TransferUnspecified, CA::Transfer::TransferLinear,
+                                 CA::Transfer::TransferSRGB, CA::Transfer::TransferSMPTE170M,
+                                 CA::Transfer::TransferGamma22, CA::Transfer::TransferGamma28,
+                                 CA::Transfer::TransferST2084, CA::Transfer::TransferHLG,
+                                 CA::Transfer::TransferSMPTE240M, CA::Transfer::TransferXvYCC,
+                                 CA::Transfer::TransferBT1361, CA::Transfer::TransferST428,
+                                 CA::Transfer::TransferOther));
+
+INSTANTIATE_TEST_SUITE_P(
+        ColorUtilsUnitTest, ColorStandardTest,
+        ::testing::Values(
+                // Primaries, MatrixCoeffs
+                std::make_pair(CA::Primaries::PrimariesUnspecified,
+                               CA::MatrixCoeffs::MatrixUnspecified),
+                std::make_pair(CA::Primaries::PrimariesBT709_5,
+                               CA::MatrixCoeffs::MatrixBT709_5),
+                std::make_pair(CA::Primaries::PrimariesBT601_6_625,
+                               CA::MatrixCoeffs::MatrixBT601_6),
+                std::make_pair(CA::Primaries::PrimariesBT601_6_625,
+                               CA::MatrixCoeffs::MatrixBT709_5),
+                std::make_pair(CA::Primaries::PrimariesBT601_6_525,
+                               CA::MatrixCoeffs::MatrixBT601_6),
+                std::make_pair(CA::Primaries::PrimariesBT601_6_525,
+                               CA::MatrixCoeffs::MatrixSMPTE240M),
+                std::make_pair(CA::Primaries::PrimariesBT2020,
+                               CA::MatrixCoeffs::MatrixBT2020),
+                std::make_pair(CA::Primaries::PrimariesBT2020,
+                               CA::MatrixCoeffs::MatrixBT2020Constant),
+                std::make_pair(CA::Primaries::PrimariesBT470_6M,
+                               CA::MatrixCoeffs::MatrixBT470_6M),
+                std::make_pair(CA::Primaries::PrimariesGenericFilm,
+                               CA::MatrixCoeffs::MatrixBT2020)));
+
+INSTANTIATE_TEST_SUITE_P(
+        ColorUtilsUnitTest, ColorAspectsTest,
+        ::testing::Values(
+                // Primaries, ColorTransfer, MatrixCoeffs, ColorRange, ColorStandard
+                std::make_tuple(CA::Primaries::PrimariesUnspecified,
+                                CA::Transfer::TransferUnspecified,
+                                CA::MatrixCoeffs::MatrixUnspecified, CA::Range::RangeFull,
+                                CA::Standard::StandardUnspecified),
+                std::make_tuple(CA::Primaries::PrimariesBT709_5, CA::Transfer::TransferLinear,
+                                CA::MatrixCoeffs::MatrixBT709_5, CA::Range::RangeFull,
+                                CA::Standard::StandardBT709),
+                std::make_tuple(CA::Primaries::PrimariesBT601_6_625, CA::Transfer::TransferSRGB,
+                                CA::MatrixCoeffs::MatrixBT601_6, CA::Range::RangeFull,
+                                CA::Standard::StandardUnspecified),
+                std::make_tuple(CA::Primaries::PrimariesBT601_6_625,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT709_5,
+                                CA::Range::RangeFull, CA::Standard::StandardUnspecified),
+                std::make_tuple(CA::Primaries::PrimariesBT601_6_525, CA::Transfer::TransferGamma22,
+                                CA::MatrixCoeffs::MatrixBT601_6, CA::Range::RangeFull,
+                                CA::Standard::StandardUnspecified),
+                std::make_tuple(CA::Primaries::PrimariesBT601_6_525, CA::Transfer::TransferGamma28,
+                                CA::MatrixCoeffs::MatrixSMPTE240M, CA::Range::RangeFull,
+                                CA::Standard::StandardBT470M),
+                std::make_tuple(CA::Primaries::PrimariesBT2020, CA::Transfer::TransferST2084,
+                                CA::MatrixCoeffs::MatrixBT2020, CA::Range::RangeFull,
+                                CA::Standard::StandardBT601_525),
+                std::make_tuple(CA::Primaries::PrimariesBT2020, CA::Transfer::TransferHLG,
+                                CA::MatrixCoeffs::MatrixBT2020Constant, CA::Range::RangeFull,
+                                CA::Standard::StandardBT601_525),
+                std::make_tuple(CA::Primaries::PrimariesBT470_6M, CA::Transfer::TransferLinear,
+                                CA::MatrixCoeffs::MatrixBT470_6M, CA::Range::RangeFull,
+                                CA::Standard::StandardUnspecified),
+                std::make_tuple(CA::Primaries::PrimariesGenericFilm, CA::Transfer::TransferLinear,
+                                CA::MatrixCoeffs::MatrixBT2020, CA::Range::RangeFull,
+                                CA::Standard::StandardBT601_625)));
+
+INSTANTIATE_TEST_SUITE_P(
+        ColorUtilsUnitTest, IsoToPlatformAspectsTest,
+        ::testing::Values(
+                // Primaries, Transfer, MatrixCoeffs, Standard, Transfer
+                std::make_tuple(CA::PrimariesUnspecified, CA::TransferUnspecified,
+                                CA::MatrixUnspecified, ColorUtils::kColorStandardUnspecified,
+                                ColorUtils::kColorTransferUnspecified),
+                std::make_tuple(CA::PrimariesBT709_5, CA::TransferLinear, CA::MatrixBT709_5,
+                                ColorUtils::kColorStandardBT709, ColorUtils::kColorTransferLinear),
+                std::make_tuple(CA::PrimariesBT601_6_625, CA::TransferSRGB, CA::MatrixBT601_6,
+                                ColorUtils::kColorStandardBT601_625,
+                                ColorUtils::kColorTransferSRGB),
+                std::make_tuple(CA::PrimariesBT601_6_625, CA::TransferSMPTE170M, CA::MatrixBT709_5,
+                                ColorUtils::kColorStandardBT601_625_Unadjusted,
+                                ColorUtils::kColorTransferSMPTE_170M),
+                std::make_tuple(CA::PrimariesBT601_6_525, CA::TransferGamma22, CA::MatrixBT601_6,
+                                ColorUtils::kColorStandardBT601_525,
+                                ColorUtils::kColorTransferGamma22),
+                std::make_tuple(CA::PrimariesBT601_6_525, CA::TransferGamma28, CA::MatrixSMPTE240M,
+                                ColorUtils::kColorStandardBT601_525_Unadjusted,
+                                ColorUtils::kColorTransferGamma28),
+                std::make_tuple(CA::PrimariesBT2020, CA::TransferST2084, CA::MatrixBT2020,
+                                ColorUtils::kColorStandardBT2020, ColorUtils::kColorTransferST2084),
+                std::make_tuple(CA::PrimariesBT2020, CA::TransferHLG, CA::MatrixBT2020Constant,
+                                ColorUtils::kColorStandardBT2020Constant,
+                                ColorUtils::kColorTransferHLG),
+                std::make_tuple(CA::PrimariesBT470_6M, CA::TransferUnspecified, CA::MatrixBT470_6M,
+                                ColorUtils::kColorStandardBT470M,
+                                ColorUtils::kColorTransferUnspecified),
+                std::make_tuple(CA::PrimariesGenericFilm, CA::TransferLinear, CA::MatrixBT2020,
+                                ColorUtils::kColorStandardFilm, ColorUtils::kColorTransferLinear)));
+
+INSTANTIATE_TEST_SUITE_P(
+        ColorUtilsUnitTest, DefaultColorAspectsTest,
+        ::testing::Values(
+                // Width, Height, Primaries, MatrixCoeffs
+                std::make_tuple(3840, 3840, CA::PrimariesBT2020, CA::MatrixBT2020),
+                std::make_tuple(720, 576, CA::PrimariesBT601_6_625, CA::MatrixBT601_6),
+                std::make_tuple(480, 360, CA::PrimariesBT601_6_525, CA::MatrixBT601_6),
+                std::make_tuple(480, 1920, CA::PrimariesBT709_5, CA::MatrixBT709_5)));
+
+INSTANTIATE_TEST_SUITE_P(
+        ColorUtilsUnitTest, DataSpaceTest,
+        ::testing::Values(
+                // ColorRange, Primaries, ColorTransfer, MatrixCoeffs, v0_android_dataspace,
+                // android_dataspace
+                std::make_tuple(CA::Range::RangeFull, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSRGB, CA::MatrixCoeffs::MatrixBT709_5,
+                                HAL_DATASPACE_V0_SRGB, HAL_DATASPACE_SRGB),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT709_5,
+                                HAL_DATASPACE_V0_BT709, HAL_DATASPACE_BT709),
+                std::make_tuple(CA::Range::RangeFull, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferLinear, CA::MatrixCoeffs::MatrixBT709_5,
+                                HAL_DATASPACE_V0_SRGB_LINEAR, HAL_DATASPACE_SRGB_LINEAR),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_525,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT601_6,
+                                HAL_DATASPACE_V0_BT601_525, HAL_DATASPACE_BT601_525),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_625,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT601_6,
+                                HAL_DATASPACE_V0_BT601_625, HAL_DATASPACE_BT601_625),
+                std::make_tuple(CA::Range::RangeFull, CA::Primaries::PrimariesBT601_6_625,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT601_6,
+                                HAL_DATASPACE_V0_JFIF, HAL_DATASPACE_JFIF),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT470_6M,
+                                HAL_DATASPACE_V0_BT601_625, HAL_DATASPACE_BT601_625),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT601_6,
+                                HAL_DATASPACE_V0_BT601_625, HAL_DATASPACE_BT601_625),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixSMPTE240M,
+                                HAL_DATASPACE_V0_BT709, HAL_DATASPACE_BT709),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT2020,
+                                HAL_DATASPACE_V0_BT709, HAL_DATASPACE_BT709),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT709_5,
+                                CA::Transfer::TransferSMPTE170M,
+                                CA::MatrixCoeffs::MatrixBT2020Constant, HAL_DATASPACE_V0_BT601_525,
+                                HAL_DATASPACE_BT601_525),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_625,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT470_6M,
+                                HAL_DATASPACE_V0_BT601_625, HAL_DATASPACE_BT601_625),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_625,
+                                CA::Transfer::TransferSMPTE170M,
+                                CA::MatrixCoeffs::MatrixBT2020Constant, HAL_DATASPACE_V0_BT601_525,
+                                HAL_DATASPACE_BT601_525),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_525,
+                                CA::Transfer::TransferSMPTE170M, CA::MatrixCoeffs::MatrixBT470_6M,
+                                HAL_DATASPACE_V0_BT601_525, HAL_DATASPACE_BT601_525),
+                std::make_tuple(CA::Range::RangeLimited, CA::Primaries::PrimariesBT601_6_525,
+                                CA::Transfer::TransferSMPTE170M,
+                                CA::MatrixCoeffs::MatrixBT2020Constant, HAL_DATASPACE_V0_BT601_525,
+                                HAL_DATASPACE_BT601_525)));
diff --git a/media/libstagefright/http/Android.bp b/media/libstagefright/http/Android.bp
index 8655caf..f4d6d99 100644
--- a/media/libstagefright/http/Android.bp
+++ b/media/libstagefright/http/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_library_shared {
     name: "libstagefright_http_support",
 
diff --git a/media/libstagefright/httplive/Android.bp b/media/libstagefright/httplive/Android.bp
index 12e7ca6..0b0acbf 100644
--- a/media/libstagefright/httplive/Android.bp
+++ b/media/libstagefright/httplive/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_httplive_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_httplive_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library {
     name: "libstagefright_httplive",
 
diff --git a/media/libstagefright/id3/Android.bp b/media/libstagefright/id3/Android.bp
index db37fe9..3f5ba47 100644
--- a/media/libstagefright/id3/Android.bp
+++ b/media/libstagefright/id3/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_id3_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_id3_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_static {
     name: "libstagefright_id3",
     min_sdk_version: "29",
@@ -5,7 +24,9 @@
     srcs: ["ID3.cpp"],
 
     header_libs: [
-        "libmedia_headers",
+        "libmedia_datasource_headers",
+        "libstagefright_foundation_headers",
+        "libstagefright_headers",
         "media_ndk_headers",
     ],
 
@@ -19,6 +40,12 @@
         ],
         cfi: true,
     },
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 //###############################################################################
diff --git a/media/libstagefright/id3/ID3.cpp b/media/libstagefright/id3/ID3.cpp
index e97f6eb..1f3cad9 100644
--- a/media/libstagefright/id3/ID3.cpp
+++ b/media/libstagefright/id3/ID3.cpp
@@ -234,10 +234,20 @@
     }
 
     // first handle global unsynchronization
+    bool hasGlobalUnsync = false;
     if (header.flags & 0x80) {
-        ALOGV("removing unsynchronization");
+        ALOGV("has Global unsynchronization");
+        hasGlobalUnsync = true;
+        // we have to wait on applying global unsynchronization to V2.4 frames
+        // if we apply it now, the length information within any V2.4 frames goes bad
+        // Removing unsynchronization shrinks the buffer, but lengths (stored in safesync
+        // format) stored within the frame reflect "pre-shrinking" totals.
 
-        removeUnsynchronization();
+        // we can (and should) apply the non-2.4 synch now.
+        if ( header.version_major != 4) {
+            ALOGV("Apply global unsync for non V2.4 frames");
+            removeUnsynchronization();
+        }
     }
 
     // handle extended header, if present
@@ -327,9 +337,10 @@
     // Handle any v2.4 per-frame unsynchronization
     // The id3 spec isn't clear about what should happen if the global
     // unsynchronization flag is combined with per-frame unsynchronization,
-    // or whether that's even allowed, so this code assumes id3 writing
-    // tools do the right thing and not apply double-unsynchronization,
-    // but will honor the flags if they are set.
+    // or whether that's even allowed. We choose a "no more than 1 unsynchronization"
+    // semantic; the V2_4 unsynchronizer gets a copy of the global flag so it can handle
+    // this possible ambiquity.
+    //
     if (header.version_major == 4) {
         void *copy = malloc(size);
         if (copy == NULL) {
@@ -341,12 +352,12 @@
 
         memcpy(copy, mData, size);
 
-        bool success = removeUnsynchronizationV2_4(false /* iTunesHack */);
+        bool success = removeUnsynchronizationV2_4(false /* iTunesHack */, hasGlobalUnsync);
         if (!success) {
             memcpy(mData, copy, size);
             mSize = size;
 
-            success = removeUnsynchronizationV2_4(true /* iTunesHack */);
+            success = removeUnsynchronizationV2_4(true /* iTunesHack */, hasGlobalUnsync);
 
             if (success) {
                 ALOGV("Had to apply the iTunes hack to parse this ID3 tag");
@@ -365,7 +376,6 @@
     }
 
 
-
     if (header.version_major == 2) {
         mVersion = ID3_V2_2;
     } else if (header.version_major == 3) {
@@ -407,7 +417,7 @@
     }
 }
 
-bool ID3::removeUnsynchronizationV2_4(bool iTunesHack) {
+bool ID3::removeUnsynchronizationV2_4(bool iTunesHack, bool hasGlobalUnsync) {
     size_t oldSize = mSize;
 
     size_t offset = mFirstFrameOffset;
@@ -443,7 +453,11 @@
             flags &= ~1;
         }
 
-        if ((flags & 2) && (dataSize >= 2)) {
+        ALOGV("hasglobal %d  flags&2 %d", hasGlobalUnsync, flags&2);
+        if (hasGlobalUnsync && !(flags & 2)) {
+            ALOGV("OOPS: global unsync set, but per-frame NOT set; removing unsync anyway");
+        }
+        if ((hasGlobalUnsync || (flags & 2)) && (dataSize >= 2)) {
             // This frame has "unsynchronization", so we have to replace occurrences
             // of 0xff 0x00 with just 0xff in order to get the real data.
 
@@ -470,7 +484,6 @@
                 ALOGE("b/34618607 (%zu %zu %zu %zu)", readOffset, writeOffset, oldSize, mSize);
                 android_errorWriteLog(0x534e4554, "34618607");
             }
-
         }
         flags &= ~2;
         if (flags != prevFlags || iTunesHack) {
diff --git a/media/libstagefright/id3/TEST_MAPPING b/media/libstagefright/id3/TEST_MAPPING
new file mode 100644
index 0000000..d82d26e
--- /dev/null
+++ b/media/libstagefright/id3/TEST_MAPPING
@@ -0,0 +1,24 @@
+// frameworks/av/media/libstagefright/id3
+{
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "ID3Test" }
+  ],
+
+  "presubmit-large": [
+    // this doesn't seem to run any tests.
+    // but: cts-tradefed run -m CtsMediaTestCases -t android.media.cts.MediaMetadataRetrieverTest
+    // does run he 32 and 64 bit tests, but not the instant tests
+    // but all I know is that with 'atest', it's not running
+    {
+      "name": "CtsMediaTestCases",
+      "options": [
+          {
+            "include-filter": "android.media.cts.MediaMetadataRetrieverTest"
+          }
+      ]
+    }
+  ]
+}
diff --git a/media/libstagefright/id3/test/Android.bp b/media/libstagefright/id3/test/Android.bp
index 9d26eec..52cdfa5 100644
--- a/media/libstagefright/id3/test/Android.bp
+++ b/media/libstagefright/id3/test/Android.bp
@@ -14,8 +14,20 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_id3_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_id3_license",
+    ],
+}
+
 cc_test {
     name: "ID3Test",
+    test_suites: ["device-tests"],
     gtest: true,
 
     srcs: ["ID3Test.cpp"],
diff --git a/media/libstagefright/id3/test/AndroidTest.xml b/media/libstagefright/id3/test/AndroidTest.xml
index 6c6697d..50f9253 100644
--- a/media/libstagefright/id3/test/AndroidTest.xml
+++ b/media/libstagefright/id3/test/AndroidTest.xml
@@ -19,7 +19,7 @@
         <option name="cleanup" value="true" />
         <option name="push" value="ID3Test->/data/local/tmp/ID3Test" />
         <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/id3/test/ID3Test.zip?unzip=true"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/id3/test/ID3Test-1.2.zip?unzip=true"
             value="/data/local/tmp/ID3TestRes/" />
     </target_preparer>
 
diff --git a/media/libstagefright/id3/test/ID3Test.cpp b/media/libstagefright/id3/test/ID3Test.cpp
index cd5cd9e..a0a84ec 100644
--- a/media/libstagefright/id3/test/ID3Test.cpp
+++ b/media/libstagefright/id3/test/ID3Test.cpp
@@ -29,6 +29,7 @@
 
 #include "ID3TestEnvironment.h"
 
+
 using namespace android;
 
 static ID3TestEnvironment *gEnv = nullptr;
@@ -41,6 +42,7 @@
 
 TEST_P(ID3tagTest, TagTest) {
     string path = gEnv->getRes() + GetParam();
+    ALOGV(" =====   TagTest for %s", path.c_str());
     sp<FileSource> file = new FileSource(path.c_str());
     ASSERT_EQ(file->initCheck(), (status_t)OK) << "File initialization failed! \n";
     DataSourceHelper helper(file->wrap());
@@ -51,7 +53,7 @@
     while (!it.done()) {
         String8 id;
         it.getID(&id);
-        ASSERT_GT(id.length(), 0) << "No ID tag found! \n";
+        ASSERT_GT(id.length(), 0) << "Found an ID3 tag of 0 size";
         ALOGV("Found ID tag: %s\n", String8(id).c_str());
         it.next();
     }
@@ -60,19 +62,21 @@
 TEST_P(ID3versionTest, VersionTest) {
     int versionNumber = GetParam().second;
     string path = gEnv->getRes() + GetParam().first;
+    ALOGV(" =====   VersionTest for %s", path.c_str());
     sp<android::FileSource> file = new FileSource(path.c_str());
     ASSERT_EQ(file->initCheck(), (status_t)OK) << "File initialization failed! \n";
 
     DataSourceHelper helper(file->wrap());
     ID3 tag(&helper);
     ASSERT_TRUE(tag.isValid()) << "No valid ID3 tag found for " << path.c_str() << "\n";
-    ASSERT_TRUE(tag.version() >= versionNumber)
-            << "Expected version: " << tag.version() << " Found version: " << versionNumber;
+    ASSERT_EQ(tag.version(), versionNumber)
+            << "Found version: " << tag.version() << " Expected version: " << versionNumber;
 }
 
 TEST_P(ID3textTagTest, TextTagTest) {
     int numTextFrames = GetParam().second;
     string path = gEnv->getRes() + GetParam().first;
+    ALOGV(" =====   TextTagTest for %s", path.c_str());
     sp<android::FileSource> file = new FileSource(path.c_str());
     ASSERT_EQ(file->initCheck(), (status_t)OK) << "File initialization failed! \n";
 
@@ -81,17 +85,34 @@
     ASSERT_TRUE(tag.isValid()) << "No valid ID3 tag found for " << path.c_str() << "\n";
     int countTextFrames = 0;
     ID3::Iterator it(tag, nullptr);
-    while (!it.done()) {
-        String8 id;
-        it.getID(&id);
-        ASSERT_GT(id.length(), 0);
-        if (id[0] == 'T') {
-            String8 text;
-            countTextFrames++;
-            it.getString(&text);
-            ALOGV("Found text frame %s : %s \n", id.string(), text.string());
+    if (tag.version() != ID3::ID3_V1 && tag.version() != ID3::ID3_V1_1) {
+        while (!it.done()) {
+            String8 id;
+            it.getID(&id);
+            ASSERT_GT(id.length(), 0) << "Found an ID3 tag of 0 size";
+            if (id[0] == 'T') {
+                String8 text;
+                countTextFrames++;
+                it.getString(&text);
+                ALOGV("Found text frame %s : %s \n", id.string(), text.string());
+            }
+            it.next();
         }
-        it.next();
+    } else {
+        while (!it.done()) {
+            String8 id;
+            String8 text;
+            it.getID(&id);
+            ASSERT_GT(id.length(), 0) << "Found an ID3 tag of 0 size";
+            it.getString(&text);
+            // if the tag has a value
+            if (strcmp(text.string(), "")) {
+                countTextFrames++;
+                ALOGV("ID: %s\n", id.c_str());
+                ALOGV("Text string: %s\n", text.string());
+            }
+            it.next();
+        }
     }
     ASSERT_EQ(countTextFrames, numTextFrames)
             << "Expected " << numTextFrames << " text frames, found " << countTextFrames;
@@ -100,6 +121,7 @@
 TEST_P(ID3albumArtTest, AlbumArtTest) {
     bool albumArtPresent = GetParam().second;
     string path = gEnv->getRes() + GetParam().first;
+    ALOGV(" =====   AlbumArt for %s", path.c_str());
     sp<android::FileSource> file = new FileSource(path.c_str());
     ASSERT_EQ(file->initCheck(), (status_t)OK) << "File initialization failed! \n";
 
@@ -114,10 +136,11 @@
         if (data) {
             ALOGV("Found album art: size = %zu mime = %s \n", dataSize, mime.string());
         }
-        ASSERT_NE(data, nullptr) << "Expected album art, found none!" << path;
+        ASSERT_NE(data, nullptr) << "Expected album art, found none! " << path;
     } else {
         ASSERT_EQ(data, nullptr) << "Found album art when expected none!";
     }
+
 #if (LOG_NDEBUG == 0)
     hexdump(data, dataSize > 128 ? 128 : dataSize);
 #endif
@@ -137,7 +160,7 @@
     while (!it.done()) {
         String8 id;
         it.getID(&id);
-        ASSERT_GT(id.length(), 0);
+        ASSERT_GT(id.length(), 0) << "Found an ID3 tag of 0 size";
         // Check if the tag is an "APIC/PIC" tag.
         if (String8(id) == "APIC" || String8(id) == "PIC") {
             count++;
@@ -150,7 +173,7 @@
                 hexdump(data, dataSize > 128 ? 128 : dataSize);
 #endif
             }
-            ASSERT_NE(data, nullptr) << "Expected album art, found none!" << path;
+            ASSERT_NE(data, nullptr) << "Expected album art, found none! " << path;
         }
         it.next();
     }
@@ -158,57 +181,90 @@
                                   << " album arts! \n";
 }
 
+// we have a test asset with large album art -- which is larger than our 3M cap
+// that we inserted intentionally in the ID3 parsing routine.
+// Rather than have it fail all the time, we have wrapped it under an #ifdef
+// so that the tests will pass.
+#undef  TEST_LARGE
+
+
+// it appears that bbb_2sec_v24_unsynchronizedAllFrames.mp3 is not a legal file,
+// so we've commented it out of the list of files to be tested
+//
+
 INSTANTIATE_TEST_SUITE_P(id3TestAll, ID3tagTest,
-                         ::testing::Values("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3",
-                                           "bbb_44100hz_2ch_128kbps_mp3_30sec_1_image.mp3",
-                                           "bbb_44100hz_2ch_128kbps_mp3_30sec_2_image.mp3",
-                                           "bbb_44100hz_2ch_128kbps_mp3_5mins.mp3",
-                                           "bbb_44100hz_2ch_128kbps_mp3_5mins_1_image.mp3",
-                                           "bbb_44100hz_2ch_128kbps_mp3_5mins_2_image.mp3",
-                                           "bbb_44100hz_2ch_128kbps_mp3_5mins_largeSize.mp3",
-                                           "bbb_44100hz_2ch_128kbps_mp3_30sec_moreTextFrames.mp3"));
+                         ::testing::Values("bbb_1sec_v23.mp3",
+                                           "bbb_1sec_1_image.mp3",
+                                           "bbb_1sec_2_image.mp3",
+                                           "bbb_2sec_v24.mp3",
+                                           "bbb_2sec_1_image.mp3",
+                                           "bbb_2sec_2_image.mp3",
+                                           "bbb_2sec_largeSize.mp3",
+                                           "bbb_1sec_v23_3tags.mp3",
+                                           "bbb_1sec_v1_5tags.mp3",
+                                           "bbb_2sec_v24_unsynchronizedOneFrame.mp3",
+                                           "idv24_unsynchronized.mp3"));
 
 INSTANTIATE_TEST_SUITE_P(
         id3TestAll, ID3versionTest,
-        ::testing::Values(make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3", 4),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_1_image.mp3", 4),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_2_image.mp3", 4),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins.mp3", 4),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_1_image.mp3", 4),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_2_image.mp3", 4),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_largeSize.mp3", 4),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_moreTextFrames.mp3", 4)));
+        ::testing::Values(make_pair("bbb_1sec_v23.mp3", ID3::ID3_V2_3),
+                          make_pair("bbb_1sec_1_image.mp3", ID3::ID3_V2_3),
+                          make_pair("bbb_1sec_2_image.mp3", ID3::ID3_V2_3),
+                          make_pair("bbb_2sec_v24.mp3", ID3::ID3_V2_4),
+                          make_pair("bbb_2sec_1_image.mp3", ID3::ID3_V2_4),
+                          make_pair("bbb_2sec_2_image.mp3", ID3::ID3_V2_4),
+#if TEST_LARGE
+                          make_pair("bbb_2sec_largeSize.mp3", ID3::ID3_V2_4), // FAIL
+#endif
+                          make_pair("bbb_1sec_v23_3tags.mp3", ID3::ID3_V2_3),
+                          make_pair("bbb_1sec_v1_5tags.mp3", ID3::ID3_V1_1),
+                          make_pair("bbb_1sec_v1_3tags.mp3", ID3::ID3_V1_1),
+                          make_pair("bbb_2sec_v24_unsynchronizedOneFrame.mp3", ID3::ID3_V2_4),
+                          make_pair("idv24_unsynchronized.mp3", ID3::ID3_V2_4)));
 
 INSTANTIATE_TEST_SUITE_P(
         id3TestAll, ID3textTagTest,
-        ::testing::Values(make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3", 1),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_1_image.mp3", 1),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_2_image.mp3", 1),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins.mp3", 1),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_1_image.mp3", 1),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_2_image.mp3", 1),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_largeSize.mp3", 1),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_moreTextFrames.mp3", 5)));
+        ::testing::Values(
+                make_pair("bbb_1sec_v23.mp3", 1),
+                make_pair("bbb_1sec_1_image.mp3", 1),
+                make_pair("bbb_1sec_2_image.mp3", 1),
+                make_pair("bbb_2sec_v24.mp3", 1),
+                make_pair("bbb_2sec_1_image.mp3", 1),
+                make_pair("bbb_2sec_2_image.mp3", 1),
+#if TEST_LARGE
+                make_pair("bbb_2sec_largeSize.mp3", 1), // FAIL
+#endif
+                make_pair("bbb_1sec_v23_3tags.mp3", 3),
+                make_pair("bbb_1sec_v1_5tags.mp3", 5),
+                make_pair("bbb_1sec_v1_3tags.mp3", 3),
+                make_pair("bbb_2sec_v24_unsynchronizedOneFrame.mp3", 3)
+                ));
 
-INSTANTIATE_TEST_SUITE_P(
-        id3TestAll, ID3albumArtTest,
-        ::testing::Values(make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3", false),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_1_image.mp3", true),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_2_image.mp3", true),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins.mp3", false),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_1_image.mp3", true),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_2_image.mp3", true),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_largeSize.mp3", true)));
+INSTANTIATE_TEST_SUITE_P(id3TestAll, ID3albumArtTest,
+                         ::testing::Values(make_pair("bbb_1sec_v23.mp3", false),
+                                           make_pair("bbb_1sec_1_image.mp3", true),
+                                           make_pair("bbb_1sec_2_image.mp3", true),
+                                           make_pair("bbb_2sec_v24.mp3", false),
+                                           make_pair("bbb_2sec_1_image.mp3", true),
+                                           make_pair("bbb_2sec_2_image.mp3", true),
+#if TEST_LARGE
+                                           make_pair("bbb_2sec_largeSize.mp3", true), // FAIL
+#endif
+                                           make_pair("bbb_1sec_v1_5tags.mp3", false),
+                                           make_pair("idv24_unsynchronized.mp3", true)
+                                           ));
 
-INSTANTIATE_TEST_SUITE_P(
-        id3TestAll, ID3multiAlbumArtTest,
-        ::testing::Values(make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec.mp3", 0),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins.mp3", 0),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_1_image.mp3", 1),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_1_image.mp3", 1),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_30sec_2_image.mp3", 2),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_2_image.mp3", 2),
-                          make_pair("bbb_44100hz_2ch_128kbps_mp3_5mins_largeSize.mp3", 3)));
+INSTANTIATE_TEST_SUITE_P(id3TestAll, ID3multiAlbumArtTest,
+                         ::testing::Values(make_pair("bbb_1sec_v23.mp3", 0),
+                                           make_pair("bbb_2sec_v24.mp3", 0),
+#if TEST_LARGE
+                                           make_pair("bbb_2sec_largeSize.mp3", 3), // FAIL
+#endif
+                                           make_pair("bbb_1sec_1_image.mp3", 1),
+                                           make_pair("bbb_2sec_1_image.mp3", 1),
+                                           make_pair("bbb_1sec_2_image.mp3", 2),
+                                           make_pair("bbb_2sec_2_image.mp3", 2)
+                                           ));
 
 int main(int argc, char **argv) {
     gEnv = new ID3TestEnvironment();
diff --git a/media/libstagefright/include/FrameDecoder.h b/media/libstagefright/include/FrameDecoder.h
index 19ae0e3..d59e4f5 100644
--- a/media/libstagefright/include/FrameDecoder.h
+++ b/media/libstagefright/include/FrameDecoder.h
@@ -135,7 +135,8 @@
 private:
     sp<FrameCaptureLayer> mCaptureLayer;
     VideoFrame *mFrame;
-    bool mIsAvcOrHevc;
+    bool mIsAvc;
+    bool mIsHevc;
     MediaSource::ReadOptions::SeekMode mSeekMode;
     int64_t mTargetTimeUs;
     List<int64_t> mSampleDurations;
@@ -145,8 +146,8 @@
     status_t captureSurface();
 };
 
-struct ImageDecoder : public FrameDecoder {
-    ImageDecoder(
+struct MediaImageDecoder : public FrameDecoder {
+   MediaImageDecoder(
             const AString &componentName,
             const sp<MetaData> &trackMeta,
             const sp<IMediaSource> &source);
diff --git a/media/libstagefright/include/HevcUtils.h b/media/libstagefright/include/HevcUtils.h
index 0f59631..6a4a168 100644
--- a/media/libstagefright/include/HevcUtils.h
+++ b/media/libstagefright/include/HevcUtils.h
@@ -30,6 +30,10 @@
 namespace android {
 
 enum {
+    kHevcNalUnitTypeCodedSliceIdr = 19,
+    kHevcNalUnitTypeCodedSliceIdrNoLP = 20,
+    kHevcNalUnitTypeCodedSliceCra = 21,
+
     kHevcNalUnitTypeVps = 32,
     kHevcNalUnitTypeSps = 33,
     kHevcNalUnitTypePps = 34,
@@ -90,8 +94,11 @@
     // Note that this method does not write the start code.
     bool write(size_t index, uint8_t* dest, size_t size);
     status_t makeHvcc(uint8_t *hvcc, size_t *hvccSize, size_t nalSizeLength);
+    void FindHEVCDimensions(
+            const sp<ABuffer> &SpsBuffer, int32_t *width, int32_t *height);
 
     Info getInfo() const { return mInfo; }
+    static bool IsHevcIDR(const uint8_t *data, size_t size);
 
 private:
     status_t parseVps(const uint8_t* data, size_t size);
diff --git a/media/libstagefright/include/ID3.h b/media/libstagefright/include/ID3.h
index 0be5896..bd0d27c 100644
--- a/media/libstagefright/include/ID3.h
+++ b/media/libstagefright/include/ID3.h
@@ -91,7 +91,7 @@
     bool parseV1(DataSourceBase *source);
     bool parseV2(DataSourceBase *source, off64_t offset);
     void removeUnsynchronization();
-    bool removeUnsynchronizationV2_4(bool iTunesHack);
+    bool removeUnsynchronizationV2_4(bool iTunesHack, bool hasGlobalUnsync);
 
     static bool ParseSyncsafeInteger(const uint8_t encoded[4], size_t *x);
 
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 83e92b9..c84cc10 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -31,6 +31,7 @@
 #include <OMX_Audio.h>
 #include <hardware/gralloc.h>
 #include <nativebase/nativebase.h>
+#include <android/hardware/graphics/common/1.2/types.h>
 #include <android/hidl/allocator/1.0/IAllocator.h>
 #include <android/hidl/memory/1.0/IMemory.h>
 
@@ -54,6 +55,7 @@
 struct DescribeColorFormat2Params;
 struct DataConverter;
 
+using android::hardware::graphics::common::V1_2::BufferUsage;
 typedef hidl::allocator::V1_0::IAllocator TAllocator;
 typedef hidl::memory::V1_0::IMemory TMemory;
 
@@ -147,6 +149,7 @@
         kWhatReleaseCodecInstance    = 'relC',
         kWhatForceStateTransition    = 'fstt',
         kWhatCheckIfStuck            = 'Cstk',
+        kWhatSubmitExtraOutputMetadataBuffer = 'sbxo',
     };
 
     enum {
@@ -164,7 +167,8 @@
     enum {
         kVideoGrallocUsage = (GRALLOC_USAGE_HW_TEXTURE
                             | GRALLOC_USAGE_HW_COMPOSER
-                            | GRALLOC_USAGE_EXTERNAL_DISP),
+                            | GRALLOC_USAGE_EXTERNAL_DISP)
+                            | static_cast<uint64_t>(BufferUsage::VIDEO_DECODER),
     };
 
     struct BufferInfo {
@@ -272,6 +276,8 @@
     bool mShutdownInProgress;
     bool mExplicitShutdown;
     bool mIsLegacyVP9Decoder;
+    bool mIsStreamCorruptFree;
+    bool mIsLowLatency;
 
     // If "mKeepComponentAllocated" we only transition back to Loaded state
     // and do not release the component instance.
@@ -499,6 +505,7 @@
     status_t setupAMRCodec(bool encoder, bool isWAMR, int32_t bitRate);
     status_t setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels);
 
+    status_t setupOpusCodec(bool encoder, int32_t sampleRate, int32_t numChannels);
     status_t setupFlacCodec(
             bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel,
             AudioEncoding encoding);
@@ -511,6 +518,7 @@
     status_t setLowLatency(int32_t lowLatency);
     status_t setLatency(uint32_t latency);
     status_t getLatency(uint32_t *latency);
+    status_t setTunnelPeek(int32_t tunnelPeek);
     status_t setAudioPresentation(int32_t presentationId, int32_t programId);
     status_t setOperatingRate(float rateFloat, bool isVideo);
     status_t getIntraRefreshPeriod(uint32_t *intraRefreshPeriod);
@@ -571,6 +579,8 @@
     void notifyOfRenderedFrames(
             bool dropIncomplete = false, FrameRenderTracker::Info *until = NULL);
 
+    void onFirstTunnelFrameReady();
+
     // Pass |expectedFormat| to print a warning if the format differs from it.
     // Using sp<> instead of const sp<>& because expectedFormat is likely the current mOutputFormat
     // which will get updated inside.
diff --git a/media/libstagefright/include/media/stagefright/AudioSource.h b/media/libstagefright/include/media/stagefright/AudioSource.h
index 451aa57..43d50f1 100644
--- a/media/libstagefright/include/media/stagefright/AudioSource.h
+++ b/media/libstagefright/include/media/stagefright/AudioSource.h
@@ -31,22 +31,35 @@
 
 namespace android {
 
+using content::AttributionSourceState;
+
 class AudioRecord;
 
 struct AudioSource : public MediaSource, public MediaBufferObserver {
     // Note that the "channels" parameter _is_ the number of channels,
     // _not_ a bitmask of audio_channels_t constants.
     AudioSource(
-            const audio_attributes_t *attr,
-            const String16 &opPackageName,
-            uint32_t sampleRate,
-            uint32_t channels,
-            uint32_t outSampleRate = 0,
-            uid_t uid = -1,
-            pid_t pid = -1,
-            audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
-            audio_microphone_direction_t selectedMicDirection = MIC_DIRECTION_UNSPECIFIED,
-            float selectedMicFieldDimension = MIC_FIELD_DIMENSION_NORMAL);
+        const audio_attributes_t *attr,
+        const AttributionSourceState& attributionSource,
+        uint32_t sampleRate,
+        uint32_t channels,
+        uint32_t outSampleRate = 0,
+        audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
+        audio_microphone_direction_t selectedMicDirection = MIC_DIRECTION_UNSPECIFIED,
+        float selectedMicFieldDimension = MIC_FIELD_DIMENSION_NORMAL);
+
+    // Legacy constructor kept for vendor dependencies
+    AudioSource(
+        const audio_attributes_t *attr,
+        const String16 &opPackageName,
+        uint32_t sampleRate,
+        uint32_t channels,
+        uint32_t outSampleRate = 0,
+        uid_t uid = -1,
+        pid_t pid = -1,
+        audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
+        audio_microphone_direction_t selectedMicDirection = MIC_DIRECTION_UNSPECIFIED,
+        float selectedMicFieldDimension = MIC_FIELD_DIMENSION_NORMAL);
 
     status_t initCheck() const;
 
@@ -131,6 +144,16 @@
 
     AudioSource(const AudioSource &);
     AudioSource &operator=(const AudioSource &);
+
+    void set(
+        const audio_attributes_t *attr,
+        const AttributionSourceState& attributionSource,
+        uint32_t sampleRate,
+        uint32_t channels,
+        uint32_t outSampleRate = 0,
+        audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
+        audio_microphone_direction_t selectedMicDirection = MIC_DIRECTION_UNSPECIFIED,
+        float selectedMicFieldDimension = MIC_FIELD_DIMENSION_NORMAL);
 };
 
 }  // namespace android
diff --git a/media/libstagefright/include/media/stagefright/CameraSource.h b/media/libstagefright/include/media/stagefright/CameraSource.h
index 6f0d3b5..e8770ed 100644
--- a/media/libstagefright/include/media/stagefright/CameraSource.h
+++ b/media/libstagefright/include/media/stagefright/CameraSource.h
@@ -23,7 +23,6 @@
 #include <media/stagefright/MediaBuffer.h>
 #include <camera/android/hardware/ICamera.h>
 #include <camera/ICameraRecordingProxy.h>
-#include <camera/ICameraRecordingProxyListener.h>
 #include <camera/CameraParameters.h>
 #include <gui/BufferItemConsumer.h>
 #include <utils/List.h>
@@ -40,17 +39,6 @@
 class CameraSource : public MediaSource, public MediaBufferObserver {
 public:
     /**
-     * Factory method to create a new CameraSource using the current
-     * settings (such as video size, frame rate, color format, etc)
-     * from the default camera.
-     *
-     * @param clientName The package/process name of the client application.
-     *    This is used for permissions checking.
-     * @return NULL on error.
-     */
-    static CameraSource *Create(const String16 &clientName);
-
-    /**
      * Factory method to create a new CameraSource.
      *
      * @param camera the video input frame data source. If it is NULL,
@@ -89,8 +77,7 @@
                                           pid_t clientPid,
                                           Size videoSize,
                                           int32_t frameRate,
-                                          const sp<IGraphicBufferProducer>& surface,
-                                          bool storeMetaDataInVideoBuffers = true);
+                                          const sp<IGraphicBufferProducer>& surface);
 
     virtual ~CameraSource();
 
@@ -132,26 +119,6 @@
 protected:
 
     /**
-     * The class for listening to BnCameraRecordingProxyListener. This is used to receive video
-     * buffers in VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV and VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA
-     * mode. When a frame is available, CameraSource::dataCallbackTimestamp() will be called.
-     */
-    class ProxyListener: public BnCameraRecordingProxyListener {
-    public:
-        ProxyListener(const sp<CameraSource>& source);
-        virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
-                const sp<IMemory> &data);
-        virtual void recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
-                native_handle_t* handle);
-        virtual void recordingFrameHandleCallbackTimestampBatch(
-                const std::vector<int64_t>& timestampsUs,
-                const std::vector<native_handle_t*>& handles);
-
-    private:
-        sp<CameraSource> mSource;
-    };
-
-    /**
      * The class for listening to BufferQueue's onFrameAvailable. This is used to receive video
      * buffers in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode. When a frame is available,
      * CameraSource::processBufferQueueFrame() will be called.
@@ -192,6 +159,7 @@
     int32_t  mColorFormat;
     int32_t  mEncoderFormat;
     int32_t  mEncoderDataSpace;
+    int32_t  mBufferDataSpace;
     status_t mInitCheck;
 
     sp<Camera>   mCamera;
@@ -213,32 +181,15 @@
     CameraSource(const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
                  int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid,
                  Size videoSize, int32_t frameRate,
-                 const sp<IGraphicBufferProducer>& surface,
-                 bool storeMetaDataInVideoBuffers);
+                 const sp<IGraphicBufferProducer>& surface);
 
     virtual status_t startCameraRecording();
     virtual void releaseRecordingFrame(const sp<IMemory>& frame);
-    virtual void releaseRecordingFrameHandle(native_handle_t* handle);
-    // stagefright recorder not using this for now
-    virtual void releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles);
 
     // Returns true if need to skip the current frame.
     // Called from dataCallbackTimestamp.
     virtual bool skipCurrentFrame(int64_t /*timestampUs*/) {return false;}
 
-    // Callback called when still camera raw data is available.
-    virtual void dataCallback(int32_t /*msgType*/, const sp<IMemory>& /*data*/) {}
-
-    virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
-            const sp<IMemory> &data);
-
-    virtual void recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
-            native_handle_t* handle);
-
-    virtual void recordingFrameHandleCallbackTimestampBatch(
-            const std::vector<int64_t>& timestampsUs,
-            const std::vector<native_handle_t*>& handles);
-
     // Process a buffer item received in BufferQueueListener.
     virtual void processBufferQueueFrame(BufferItem& buffer);
 
@@ -261,9 +212,6 @@
     int64_t mGlitchDurationThresholdUs;
     bool mCollectStats;
 
-    // The mode video buffers are received from camera. One of VIDEO_BUFFER_MODE_*.
-    int32_t mVideoBufferMode;
-
     static const uint32_t kDefaultVideoBufferCount = 32;
 
     /**
@@ -297,12 +245,12 @@
 
     status_t init(const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
                   int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid,
-                  Size videoSize, int32_t frameRate, bool storeMetaDataInVideoBuffers);
+                  Size videoSize, int32_t frameRate);
 
     status_t initWithCameraAccess(
                   const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
                   int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid,
-                  Size videoSize, int32_t frameRate, bool storeMetaDataInVideoBuffers);
+                  Size videoSize, int32_t frameRate);
 
     // Initialize the buffer queue used in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
     status_t initBufferQueue(uint32_t width, uint32_t height, uint32_t format,
diff --git a/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h b/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
index 533e33b..3c311cf 100644
--- a/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
+++ b/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
@@ -45,8 +45,7 @@
         Size videoSize,
         int32_t videoFrameRate,
         const sp<IGraphicBufferProducer>& surface,
-        int64_t timeBetweenTimeLapseFrameCaptureUs,
-        bool storeMetaDataInVideoBuffers = true);
+        int64_t timeBetweenTimeLapseFrameCaptureUs);
 
     virtual ~CameraSourceTimeLapse();
 
@@ -122,8 +121,7 @@
         Size videoSize,
         int32_t videoFrameRate,
         const sp<IGraphicBufferProducer>& surface,
-        int64_t timeBetweenTimeLapseFrameCaptureUs,
-        bool storeMetaDataInVideoBuffers = true);
+        int64_t timeBetweenTimeLapseFrameCaptureUs);
 
     // Wrapper over CameraSource::signalBufferReturned() to implement quick stop.
     // It only handles the case when mLastReadBufferCopy is signalled. Otherwise
@@ -137,33 +135,6 @@
     // frame needs to be skipped and this function just returns the value of mSkipCurrentFrame.
     virtual bool skipCurrentFrame(int64_t timestampUs);
 
-    // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
-    // timestamp and set mSkipCurrentFrame.
-    // Then it calls the base CameraSource::dataCallbackTimestamp()
-    // This will be called in VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV and
-    // VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA mode.
-    virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
-            const sp<IMemory> &data);
-
-    // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
-    // timestamp and set mSkipCurrentFrame.
-    // Then it calls the base CameraSource::recordingFrameHandleCallbackTimestamp() or
-    // CameraSource::recordingFrameHandleCallbackTimestampBatch()
-    // This will be called in VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA mode when
-    // the metadata is VideoNativeHandleMetadata.
-    virtual void recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
-            native_handle_t* handle);
-
-    // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
-    // timestamp and set mSkipCurrentFrame.
-    // Then it calls the base CameraSource::recordingFrameHandleCallbackTimestamp() or
-    // CameraSource::recordingFrameHandleCallbackTimestampBatch()
-    // This will be called in VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA mode when
-    // the metadata is VideoNativeHandleMetadata.
-    virtual void recordingFrameHandleCallbackTimestampBatch(
-            const std::vector<int64_t>& timestampsUs,
-            const std::vector<native_handle_t*>& handles);
-
     // Process a buffer item received in CameraSource::BufferQueueListener.
     // This will be called in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
     virtual void processBufferQueueFrame(BufferItem& buffer);
@@ -187,9 +158,6 @@
     // Wrapper to enter threadTimeLapseEntry()
     static void *ThreadTimeLapseWrapper(void *me);
 
-    // Creates a copy of source_data into a new memory of final type MemoryBase.
-    sp<IMemory> createIMemoryCopy(const sp<IMemory> &source_data);
-
     CameraSourceTimeLapse(const CameraSourceTimeLapse &);
     CameraSourceTimeLapse &operator=(const CameraSourceTimeLapse &);
 };
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index dd6df90..efb2f86 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -61,6 +61,11 @@
 
 using hardware::cas::native::V1_0::IDescrambler;
 
+struct CodecParameterDescriptor {
+    std::string name;
+    AMessage::Type type;
+};
+
 struct CodecBase : public AHandler, /* static */ ColorUtils {
     /**
      * This interface defines events firing from CodecBase back to MediaCodec.
@@ -173,6 +178,10 @@
          * Notify MediaCodec that output buffers are changed.
          */
         virtual void onOutputBuffersChanged() = 0;
+        /**
+         * Notify MediaCodec that the first tunnel frame is ready.
+         */
+        virtual void onFirstTunnelFrameReady() = 0;
     };
 
     /**
@@ -233,6 +242,64 @@
     virtual void signalSetParameters(const sp<AMessage> &msg) = 0;
     virtual void signalEndOfInputStream() = 0;
 
+    /**
+     * Query supported parameters from this instance, and fill |names| with the
+     * names of the parameters.
+     *
+     * \param names string vector to fill with supported parameters.
+     * \return OK if successful;
+     *         BAD_VALUE if |names| is null;
+     *         INVALID_OPERATION if already released;
+     *         ERROR_UNSUPPORTED if not supported.
+     */
+    virtual status_t querySupportedParameters([[maybe_unused]] std::vector<std::string> *names) {
+        return ERROR_UNSUPPORTED;
+    }
+    /**
+     * Fill |desc| with description of the parameter with |name|.
+     *
+     * \param name name of the parameter to describe
+     * \param desc pointer to CodecParameterDescriptor to be filled
+     * \return OK if successful;
+     *         BAD_VALUE if |desc| is null;
+     *         NAME_NOT_FOUND if |name| is not recognized by the component;
+     *         INVALID_OPERATION if already released;
+     *         ERROR_UNSUPPORTED if not supported.
+     */
+    virtual status_t describeParameter(
+            [[maybe_unused]] const std::string &name,
+            [[maybe_unused]] CodecParameterDescriptor *desc) {
+        return ERROR_UNSUPPORTED;
+    }
+    /**
+     * Subscribe to parameters in |names| and get output format change event
+     * when they change.
+     * Unrecognized / already subscribed parameters are ignored.
+     *
+     * \param names names of parameters to subscribe
+     * \return OK if successful;
+     *         INVALID_OPERATION if already released;
+     *         ERROR_UNSUPPORTED if not supported.
+     */
+    virtual status_t subscribeToParameters(
+            [[maybe_unused]] const std::vector<std::string> &names) {
+        return ERROR_UNSUPPORTED;
+    }
+    /**
+     * Unsubscribe from parameters in |names| and no longer get
+     * output format change event when they change.
+     * Unrecognized / already unsubscribed parameters are ignored.
+     *
+     * \param names names of parameters to unsubscribe
+     * \return OK if successful;
+     *         INVALID_OPERATION if already released;
+     *         ERROR_UNSUPPORTED if not supported.
+     */
+    virtual status_t unsubscribeFromParameters(
+            [[maybe_unused]] const std::vector<std::string> &names) {
+        return ERROR_UNSUPPORTED;
+    }
+
     typedef CodecBase *(*CreateCodecFunc)(void);
     typedef PersistentSurface *(*CreateInputSurfaceFunc)(void);
 
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 501cf2c..7c3eca6 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -46,7 +46,7 @@
 
     // Returns INVALID_OPERATION if there is no source or track.
     virtual status_t start(MetaData *param = NULL);
-    virtual status_t stop() { return reset(); }
+    virtual status_t stop();
     virtual status_t pause();
     virtual bool reachedEOS();
     virtual status_t dump(int fd, const Vector<String16>& args);
@@ -97,6 +97,7 @@
     sp<MetaData> mStartMeta;
     status_t mInitCheck;
     bool mIsRealTimeRecording;
+    bool mIsBackgroundMode;
     bool mUse4ByteNalLength;
     bool mIsFileSizeLimitExplicitlyRequested;
     bool mPaused;
@@ -106,6 +107,7 @@
     off64_t mOffset;
     off64_t mPreAllocateFileEndOffset;  //End of file offset during preallocation.
     off64_t mMdatOffset;
+    off64_t mMaxOffsetAppend; // File offset written upto while appending.
     off64_t mMdatEndOffset;  // End offset of mdat atom.
     uint8_t *mInMemoryCache;
     off64_t mInMemoryCacheOffset;
@@ -126,6 +128,7 @@
     bool mWriteSeekErr;
     bool mFallocateErr;
     bool mPreAllocationEnabled;
+    status_t mResetStatus;
     // Queue to hold top long write durations
     std::priority_queue<std::chrono::microseconds, std::vector<std::chrono::microseconds>,
                         std::greater<std::chrono::microseconds>> mWriteDurationPQ;
@@ -135,7 +138,9 @@
     sp<AHandlerReflector<MPEG4Writer> > mReflector;
 
     Mutex mLock;
+    // Serialize reset calls from client of MPEG4Writer and MP4WtrCtrlHlpLooper.
     std::mutex mResetMutex;
+    // Serialize preallocation calls from different track threads.
     std::mutex mFallocMutex;
     bool mPreAllocFirstTime; // Pre-allocate space for file and track headers only once per file.
     uint64_t mPrevAllTracksTotalMetaDataSizeEstimate;
@@ -271,6 +276,10 @@
     // By default, real time recording is on.
     bool isRealTimeRecording() const;
 
+    // Return whether the writer is used in background mode for media
+    // transcoding.
+    bool isBackgroundMode() const;
+
     void lock();
     void unlock();
 
@@ -304,7 +313,7 @@
     void writeGeoDataBox();
     void writeLatitude(int degreex10000);
     void writeLongitude(int degreex10000);
-    void finishCurrentSession();
+    status_t finishCurrentSession();
 
     void addDeviceMeta();
     void writeHdlr(const char *handlerType);
@@ -337,7 +346,7 @@
     void sendSessionSummary();
     status_t release();
     status_t switchFd();
-    status_t reset(bool stopSource = true);
+    status_t reset(bool stopSource = true, bool waitForAnyPreviousCallToComplete = true);
 
     static uint32_t getMpeg4Time();
 
diff --git a/media/libstagefright/include/media/stagefright/MediaAdapter.h b/media/libstagefright/include/media/stagefright/MediaAdapter.h
index 177a9e9..c7d7765 100644
--- a/media/libstagefright/include/media/stagefright/MediaAdapter.h
+++ b/media/libstagefright/include/media/stagefright/MediaAdapter.h
@@ -58,6 +58,7 @@
 
 private:
     Mutex mAdapterLock;
+    std::mutex mBufferGatingMutex;
     // Make sure the read() wait for the incoming buffer.
     Condition mBufferReadCond;
     // Make sure the pushBuffer() wait for the current buffer consumed.
diff --git a/media/libstagefright/include/media/stagefright/MediaAppender.h b/media/libstagefright/include/media/stagefright/MediaAppender.h
new file mode 100644
index 0000000..c2f6f10
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/MediaAppender.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_APPENDER_H
+#define ANDROID_MEDIA_APPENDER_H
+
+#include <media/stagefright/MediaMuxer.h>
+#include <media/stagefright/NuMediaExtractor.h>
+#include <stack>
+
+namespace android {
+
+struct MediaAppender : public MediaMuxerBase {
+public:
+    enum AppendMode {
+        APPEND_MODE_FIRST = 0,
+        APPEND_MODE_IGNORE_LAST_VIDEO_GOP = APPEND_MODE_FIRST,
+        APPEND_MODE_ADD_TO_EXISTING_DATA = 1,
+        APPEND_MODE_LAST = APPEND_MODE_ADD_TO_EXISTING_DATA,
+    };
+
+    static sp<MediaAppender> create(int fd, AppendMode mode);
+
+    virtual ~MediaAppender();
+
+    status_t init();
+
+    status_t start();
+
+    status_t stop();
+
+    status_t writeSampleData(const sp<ABuffer>& buffer, size_t trackIndex, int64_t timeUs,
+                             uint32_t flags);
+
+    status_t setOrientationHint(int degrees);
+
+    status_t setLocation(int latitude, int longitude);
+
+    ssize_t addTrack(const sp<AMessage> &format);
+
+    ssize_t getTrackCount();
+
+    sp<AMessage> getTrackFormat(size_t idx);
+
+private:
+    MediaAppender(int fd, AppendMode mode);
+
+    int mFd;
+    MediaMuxer::OutputFormat mFormat;
+    AppendMode mMode;
+    sp<NuMediaExtractor> mExtractor;
+    sp<MediaMuxer> mMuxer;
+    size_t mTrackCount;
+    // Map track index given by extractor to the ones received from muxer.
+    std::map<size_t, ssize_t> mTrackIndexMap;
+    // Count of the samples in each track, indexed by extractor track ids.
+    std::vector<size_t> mSampleCountVect;
+    // Extractor track index of samples.
+    std::vector<size_t> mSampleIndexVect;
+    // Track format indexed by extractor track ids.
+    std::map<size_t, sp<AMessage>> mFmtIndexMap;
+    // Size of samples.
+    std::vector<size_t> mSampleSizeVect;
+    // Presentation time stamp of samples.
+    std::vector<int64_t> mSampleTimeVect;
+    // Timestamp of last sample of tracks.
+    std::vector<int64_t> mMaxTimestampVect;
+    // Metadata of samples.
+    std::vector<sp<MetaData>> mSampleMetaVect;
+    std::mutex mMutex;
+    // Timestamp of the last sync sample of tracks.
+    std::vector<int64_t> mLastSyncSampleTimeVect;
+
+    struct sampleDataInfo;
+    std::vector<sampleDataInfo> mSDI;
+
+    enum : int {
+        UNINITIALIZED,
+        INITIALIZED,
+        STARTED,
+        STOPPED,
+        ERROR,
+    } mState GUARDED_BY(mMutex);
+};
+
+}  // namespace android
+#endif  // ANDROID_MEDIA_APPENDER_H
\ No newline at end of file
diff --git a/media/libstagefright/include/media/stagefright/MediaBuffer.h b/media/libstagefright/include/media/stagefright/MediaBuffer.h
index 9145b63..2c03f27 100644
--- a/media/libstagefright/include/media/stagefright/MediaBuffer.h
+++ b/media/libstagefright/include/media/stagefright/MediaBuffer.h
@@ -46,7 +46,7 @@
     explicit MediaBuffer(size_t size);
 
     explicit MediaBuffer(const sp<ABuffer> &buffer);
-#ifndef NO_IMEMORY
+#if !defined(NO_IMEMORY) && !defined(__ANDROID_APEX__)
     MediaBuffer(const sp<IMemory> &mem) :
          // TODO: Using unsecurePointer() has some associated security pitfalls
          //       (see declaration for details).
@@ -97,7 +97,7 @@
     }
 
     virtual int remoteRefcount() const {
-#ifndef NO_IMEMORY
+#if !defined(NO_IMEMORY) && !defined(__ANDROID_APEX__)
          // TODO: Using unsecurePointer() has some associated security pitfalls
          //       (see declaration for details).
          //       Either document why it is safe in this case or address the
@@ -114,7 +114,7 @@
 
     // returns old value
     int addRemoteRefcount(int32_t value) {
-#ifndef NO_IMEMORY
+#if !defined(NO_IMEMORY) && !defined(__ANDROID_APEX__)
           // TODO: Using unsecurePointer() has some associated security pitfalls
          //       (see declaration for details).
          //       Either document why it is safe in this case or address the
@@ -132,7 +132,7 @@
     }
 
     static bool isDeadObject(const sp<IMemory> &memory) {
-#ifndef NO_IMEMORY
+#if !defined(NO_IMEMORY) && !defined(__ANDROID_APEX__)
          // TODO: Using unsecurePointer() has some associated security pitfalls
          //       (see declaration for details).
          //       Either document why it is safe in this case or address the
@@ -235,7 +235,7 @@
     };
 
     inline SharedControl *getSharedControl() const {
-#ifndef NO_IMEMORY
+#if !defined(NO_IMEMORY) && !defined(__ANDROID_APEX__)
          // TODO: Using unsecurePointer() has some associated security pitfalls
          //       (see declaration for details).
          //       Either document why it is safe in this case or address the
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index f7e6c27..d372140 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -50,6 +50,7 @@
 struct BatteryChecker;
 class BufferChannelBase;
 struct CodecBase;
+struct CodecParameterDescriptor;
 class IBatteryStats;
 struct ICrypto;
 class MediaCodecBuffer;
@@ -57,6 +58,7 @@
 struct PersistentSurface;
 class SoftwareRenderer;
 class Surface;
+class PlaybackDurationAccumulator;
 namespace hardware {
 namespace cas {
 namespace native {
@@ -81,6 +83,13 @@
         BUFFER_FLAG_MUXER_DATA    = 16,
     };
 
+    enum CVODegree {
+        CVO_DEGREE_0   = 0,
+        CVO_DEGREE_90  = 90,
+        CVO_DEGREE_180 = 180,
+        CVO_DEGREE_270 = 270,
+    };
+
     enum {
         CB_INPUT_AVAILABLE = 1,
         CB_OUTPUT_AVAILABLE = 2,
@@ -96,6 +105,10 @@
             const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err = NULL,
             pid_t pid = kNoPid, uid_t uid = kNoUid);
 
+    static sp<MediaCodec> CreateByType(
+            const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err,
+            pid_t pid, uid_t uid, sp<AMessage> format);
+
     static sp<MediaCodec> CreateByComponentName(
             const sp<ALooper> &looper, const AString &name, status_t *err = NULL,
             pid_t pid = kNoPid, uid_t uid = kNoUid);
@@ -121,6 +134,8 @@
 
     status_t setOnFrameRenderedNotification(const sp<AMessage> &notify);
 
+    status_t setOnFirstTunnelFrameReadyNotification(const sp<AMessage> &notify);
+
     status_t createInputSurface(sp<IGraphicBufferProducer>* bufferProducer);
 
     status_t setInputSurface(const sp<PersistentSurface> &surface);
@@ -239,6 +254,11 @@
 
     status_t setParameters(const sp<AMessage> &params);
 
+    status_t querySupportedVendorParameters(std::vector<std::string> *names);
+    status_t describeParameter(const std::string &name, CodecParameterDescriptor *desc);
+    status_t subscribeToVendorParameters(const std::vector<std::string> &names);
+    status_t unsubscribeFromVendorParameters(const std::vector<std::string> &names);
+
     // Create a MediaCodec notification message from a list of rendered or dropped render infos
     // by adding rendered frame information to a base notification message. Returns the number
     // of frames that were rendered.
@@ -354,6 +374,30 @@
         bool mOwnedByClient;
     };
 
+    // This type is used to track the tunnel mode video peek state machine:
+    //
+    // DisabledNoBuffer -> EnabledNoBuffer  when tunnel-peek = true
+    // DisabledQueued   -> EnabledQueued    when tunnel-peek = true
+    // DisabledNoBuffer -> DisabledQueued   when first frame queued
+    // EnabledNoBuffer  -> DisabledNoBuffer when tunnel-peek = false
+    // EnabledQueued    -> DisabledQueued   when tunnel-peek = false
+    // EnabledNoBuffer  -> EnabledQueued    when first frame queued
+    // DisabledNoBuffer -> BufferDecoded    when kWhatFirstTunnelFrameReady
+    // DisabledQueued   -> BufferDecoded    when kWhatFirstTunnelFrameReady
+    // EnabledNoBuffer  -> BufferDecoded    when kWhatFirstTunnelFrameReady
+    // EnabledQueued    -> BufferDecoded    when kWhatFirstTunnelFrameReady
+    // BufferDecoded    -> BufferRendered   when kWhatFrameRendered
+    // <all states>     -> EnabledNoBuffer  when flush
+    // <all states>     -> EnabledNoBuffer  when stop then configure then start
+    enum struct TunnelPeekState {
+        kDisabledNoBuffer,
+        kEnabledNoBuffer,
+        kDisabledQueued,
+        kEnabledQueued,
+        kBufferDecoded,
+        kBufferRendered,
+    };
+
     struct ResourceManagerServiceProxy;
 
     State mState;
@@ -366,7 +410,10 @@
     AString mOwnerName;
     sp<MediaCodecInfo> mCodecInfo;
     sp<AReplyToken> mReplyID;
+    std::string mLastReplyOrigin;
+    std::vector<sp<AMessage>> mDeferredMessages;
     uint32_t mFlags;
+    int64_t mPresentationTimeUs = 0;
     status_t mStickyError;
     sp<Surface> mSurface;
     SoftwareRenderer *mSoftRenderer;
@@ -378,16 +425,21 @@
     void flushMediametrics();
     void updateEphemeralMediametrics(mediametrics_handle_t item);
     void updateLowLatency(const sp<AMessage> &msg);
+    constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
+    void updateTunnelPeek(const sp<AMessage> &msg);
+    void updatePlaybackDuration(const sp<AMessage> &msg);
 
     sp<AMessage> mOutputFormat;
     sp<AMessage> mInputFormat;
     sp<AMessage> mCallback;
     sp<AMessage> mOnFrameRenderedNotification;
     sp<AMessage> mAsyncReleaseCompleteNotification;
+    sp<AMessage> mOnFirstTunnelFrameReadyNotification;
 
     sp<ResourceManagerServiceProxy> mResourceManagerProxy;
 
     bool mIsVideo;
+    AString mLogSessionId;
     int32_t mVideoWidth;
     int32_t mVideoHeight;
     int32_t mRotationDegrees;
@@ -399,6 +451,17 @@
     // configure parameter
     sp<AMessage> mConfigureMsg;
 
+    // rewrites the format description during configure() for encoding.
+    // format and flags as they exist within configure()
+    // the (possibly) updated format is returned in place.
+    status_t shapeMediaFormat(
+            const sp<AMessage> &format,
+            uint32_t flags);
+
+    // populate the format shaper library with information for this codec encoding
+    // for the indicated media type
+    status_t setupFormatShaper(AString mediaType);
+
     // Used only to synchronize asynchronous getBufferAndFormat
     // across all the other (synchronous) buffer state change
     // operations, such as de/queueIn/OutputBuffer, start and
@@ -416,6 +479,11 @@
 
     sp<ICrypto> mCrypto;
 
+    int32_t mTunneledInputWidth;
+    int32_t mTunneledInputHeight;
+    bool mTunneled;
+    TunnelPeekState mTunnelPeekState;
+
     sp<IDescrambler> mDescrambler;
 
     List<sp<ABuffer> > mCSD;
@@ -428,13 +496,20 @@
 
     std::shared_ptr<BufferChannelBase> mBufferChannel;
 
-    MediaCodec(const sp<ALooper> &looper, pid_t pid, uid_t uid);
+    PlaybackDurationAccumulator * mPlaybackDurationAccumulator;
+    bool mIsSurfaceToScreen;
+
+    MediaCodec(
+            const sp<ALooper> &looper, pid_t pid, uid_t uid,
+            std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase = nullptr,
+            std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo = nullptr);
 
     static sp<CodecBase> GetCodecBase(const AString &name, const char *owner = nullptr);
 
     static status_t PostAndAwaitResponse(
             const sp<AMessage> &msg, sp<AMessage> *response);
 
+    void PostReplyWithError(const sp<AMessage> &msg, int32_t err);
     void PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err);
 
     status_t init(const AString &name);
@@ -445,6 +520,7 @@
     size_t updateBuffers(int32_t portIndex, const sp<AMessage> &msg);
     status_t onQueueInputBuffer(const sp<AMessage> &msg);
     status_t onReleaseOutputBuffer(const sp<AMessage> &msg);
+    BufferInfo *peekNextPortBuffer(int32_t portIndex);
     ssize_t dequeuePortBuffer(int32_t portIndex);
 
     status_t getBufferAndFormat(
@@ -476,6 +552,7 @@
     status_t onSetParameters(const sp<AMessage> &params);
 
     status_t amendOutputFormatWithCodecSpecificData(const sp<MediaCodecBuffer> &buffer);
+    void handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> &buffer);
     bool isExecuting() const;
 
     uint64_t getGraphicBufferSize();
@@ -484,6 +561,9 @@
     bool hasPendingBuffer(int portIndex);
     bool hasPendingBuffer();
 
+    void postPendingRepliesAndDeferredMessages(std::string origin, status_t err = OK);
+    void postPendingRepliesAndDeferredMessages(std::string origin, const sp<AMessage> &response);
+
     /* called to get the last codec error when the sticky flag is set.
      * if no such codec error is found, returns UNKNOWN_ERROR.
      */
@@ -506,6 +586,15 @@
     std::deque<BufferFlightTiming_t> mBuffersInFlight;
     Mutex mLatencyLock;
     int64_t mLatencyUnknown;    // buffers for which we couldn't calculate latency
+
+    Mutex mOutputStatsLock;
+    int64_t mBytesEncoded = 0;
+    int64_t mEarliestEncodedPtsUs = INT64_MAX;
+    int64_t mLatestEncodedPtsUs = INT64_MIN;
+    int64_t mFramesEncoded = 0;
+    int64_t mBytesInput = 0;
+    int64_t mFramesInput = 0;
+
     int64_t mNumLowLatencyEnables;  // how many times low latency mode is enabled
     int64_t mNumLowLatencyDisables;  // how many times low latency mode is disabled
     bool mIsLowLatencyModeOn;  // is low latency mode on currently
@@ -521,8 +610,8 @@
 
     sp<BatteryChecker> mBatteryChecker;
 
-    void statsBufferSent(int64_t presentationUs);
-    void statsBufferReceived(int64_t presentationUs);
+    void statsBufferSent(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer);
+    void statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer);
 
     enum {
         // the default shape of our latency histogram buckets
@@ -569,6 +658,10 @@
 
     Histogram mLatencyHist;
 
+    std::function<sp<CodecBase>(const AString &, const char *)> mGetCodecBase;
+    std::function<status_t(const AString &, sp<MediaCodecInfo> *)> mGetCodecInfo;
+    friend class MediaTestHelper;
+
     DISALLOW_EVIL_CONSTRUCTORS(MediaCodec);
 };
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 178d334..4237e8c 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -557,12 +557,14 @@
 }
 
 constexpr int32_t BITRATE_MODE_CBR = 2;
+constexpr int32_t BITRATE_MODE_CBR_FD = 3;
 constexpr int32_t BITRATE_MODE_CQ = 0;
 constexpr int32_t BITRATE_MODE_VBR = 1;
 
 inline static const char *asString_BitrateMode(int32_t i, const char *def = "??") {
     switch (i) {
         case BITRATE_MODE_CBR:  return "CBR";
+        case BITRATE_MODE_CBR_FD: return "CBR_FD";
         case BITRATE_MODE_CQ:   return "CQ";
         case BITRATE_MODE_VBR:  return "VBR";
         default:                return def;
@@ -619,6 +621,7 @@
 constexpr int32_t COLOR_FormatYUV422SemiPlanar        = 24;
 constexpr int32_t COLOR_FormatYUV444Flexible          = 0x7F444888;
 constexpr int32_t COLOR_FormatYUV444Interleaved       = 29;
+constexpr int32_t COLOR_FormatYUVP010                 = 54;
 constexpr int32_t COLOR_QCOM_FormatYUV420SemiPlanar   = 0x7fa30c00;
 constexpr int32_t COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100;
 
@@ -683,6 +686,7 @@
 constexpr char FEATURE_AdaptivePlayback[]       = "adaptive-playback";
 constexpr char FEATURE_IntraRefresh[] = "intra-refresh";
 constexpr char FEATURE_PartialFrame[] = "partial-frame";
+constexpr char FEATURE_QpBounds[] = "qp-bounds";
 constexpr char FEATURE_SecurePlayback[]         = "secure-playback";
 constexpr char FEATURE_TunneledPlayback[]       = "tunneled-playback";
 
@@ -744,6 +748,7 @@
 constexpr char KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT[] = "aac-max-output-channel_count";
 constexpr char KEY_AAC_PROFILE[] = "aac-profile";
 constexpr char KEY_AAC_SBR_MODE[] = "aac-sbr-mode";
+constexpr char KEY_ALLOW_FRAME_DROP[] = "allow-frame-drop";
 constexpr char KEY_AUDIO_SESSION_ID[] = "audio-session-id";
 constexpr char KEY_BIT_RATE[] = "bitrate";
 constexpr char KEY_BITRATE_MODE[] = "bitrate-mode";
@@ -751,7 +756,7 @@
 constexpr char KEY_CA_SYSTEM_ID[] = "ca-system-id";
 constexpr char KEY_CA_PRIVATE_DATA[] = "ca-private-data";
 constexpr char KEY_CAPTURE_RATE[] = "capture-rate";
-constexpr char KEY_CHANNEL_COUNT[] = "channel-count";
+constexpr char KEY_CHANNEL_COUNT[] = "channel-count";   // value N, eq to range 1..N
 constexpr char KEY_CHANNEL_MASK[] = "channel-mask";
 constexpr char KEY_COLOR_FORMAT[] = "color-format";
 constexpr char KEY_COLOR_RANGE[] = "color-range";
@@ -792,7 +797,7 @@
 constexpr char KEY_PCM_ENCODING[] = "pcm-encoding";
 constexpr char KEY_PIXEL_ASPECT_RATIO_HEIGHT[] = "sar-height";
 constexpr char KEY_PIXEL_ASPECT_RATIO_WIDTH[] = "sar-width";
-constexpr char KEY_PREPEND_HEADERS_TO_SYNC_FRAMES[] = "prepend-sps-pps-to-idr-frames";
+constexpr char KEY_PREPEND_HEADER_TO_SYNC_FRAMES[] = "prepend-sps-pps-to-idr-frames";
 constexpr char KEY_PRIORITY[] = "priority";
 constexpr char KEY_PROFILE[] = "profile";
 constexpr char KEY_PUSH_BLANK_BUFFERS_ON_STOP[] = "push-blank-buffers-on-shutdown";
@@ -806,6 +811,14 @@
 constexpr char KEY_TILE_HEIGHT[] = "tile-height";
 constexpr char KEY_TILE_WIDTH[] = "tile-width";
 constexpr char KEY_TRACK_ID[] = "track-id";
+constexpr char KEY_VIDEO_QP_B_MAX[] = "video-qp-b-max";
+constexpr char KEY_VIDEO_QP_B_MIN[] = "video-qp-b-min";
+constexpr char KEY_VIDEO_QP_I_MAX[] = "video-qp-i-max";
+constexpr char KEY_VIDEO_QP_I_MIN[] = "video-qp-i-min";
+constexpr char KEY_VIDEO_QP_MAX[] = "video-qp-max";
+constexpr char KEY_VIDEO_QP_MIN[] = "video-qp-min";
+constexpr char KEY_VIDEO_QP_P_MAX[] = "video-qp-p-max";
+constexpr char KEY_VIDEO_QP_P_MIN[] = "video-qp-p-min";
 constexpr char KEY_WIDTH[] = "width";
 
 // from MediaCodec.java
@@ -847,9 +860,9 @@
 constexpr char PARAMETER_KEY_REQUEST_SYNC_FRAME[] = "request-sync";
 constexpr char PARAMETER_KEY_SUSPEND[] = "drop-input-frames";
 constexpr char PARAMETER_KEY_SUSPEND_TIME[] = "drop-start-time-us";
+constexpr char PARAMETER_KEY_TUNNEL_PEEK[] =  "tunnel-peek";
 constexpr char PARAMETER_KEY_VIDEO_BITRATE[] = "video-bitrate";
 
 }
 
 #endif  // MEDIA_CODEC_CONSTANTS_H_
-
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecList.h b/media/libstagefright/include/media/stagefright/MediaCodecList.h
index e681d25..3cf455c 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecList.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecList.h
@@ -75,6 +75,16 @@
             uint32_t flags,
             Vector<AString> *matchingCodecs);
 
+    // add optional format, to further refine matching codecs
+    static void findMatchingCodecs(
+            const char *mime,
+            bool createEncoder,
+            uint32_t flags,
+            sp<AMessage> format,
+            Vector<AString> *matchingCodecs);
+
+    static bool codecHandlesFormat(const char *mime, sp<MediaCodecInfo> info, sp<AMessage> format);
+
     static bool isSoftwareCodec(const AString &componentName);
 
 private:
@@ -88,7 +98,7 @@
     static sp<IMediaCodecList> sCodecList;
     static sp<IMediaCodecList> sRemoteList;
 
-    status_t mInitCheck;
+    status_t mInitCheck{NO_INIT};
 
     sp<AMessage> mGlobalSettings;
     std::vector<sp<MediaCodecInfo> > mCodecInfos;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecListWriter.h b/media/libstagefright/include/media/stagefright/MediaCodecListWriter.h
index f53b23e..bf85d7e 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecListWriter.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecListWriter.h
@@ -19,7 +19,6 @@
 #define MEDIA_CODEC_LIST_WRITER_H_
 
 #include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/MediaCodecListWriter.h>
 #include <media/MediaCodecInfo.h>
 
 #include <utils/Errors.h>
@@ -65,6 +64,7 @@
     std::vector<sp<MediaCodecInfo>> mCodecInfos;
 
     friend struct MediaCodecList;
+    friend class MediaTestHelper;
 };
 
 /**
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecSource.h b/media/libstagefright/include/media/stagefright/MediaCodecSource.h
index 2f98af1..0f7b535 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecSource.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecSource.h
@@ -64,12 +64,15 @@
 
     // MediaBufferObserver
     virtual void signalBufferReturned(MediaBufferBase *buffer);
+    virtual status_t setEncodingBitrate(int32_t bitRate /* bps */);
 
     // for AHandlerReflector
     void onMessageReceived(const sp<AMessage> &msg);
 
 
 
+    status_t requestIDRFrame();
+
 protected:
     virtual ~MediaCodecSource();
 
diff --git a/media/libstagefright/include/media/stagefright/MediaErrors.h b/media/libstagefright/include/media/stagefright/MediaErrors.h
index 5418f10..d1df2ca 100644
--- a/media/libstagefright/include/media/stagefright/MediaErrors.h
+++ b/media/libstagefright/include/media/stagefright/MediaErrors.h
@@ -75,7 +75,32 @@
     ERROR_DRM_RESOURCE_CONTENTION            = DRM_ERROR_BASE - 14,
     ERROR_DRM_SESSION_LOST_STATE             = DRM_ERROR_BASE - 15,
     ERROR_DRM_INVALID_STATE                  = DRM_ERROR_BASE - 16,
-    ERROR_DRM_LAST_USED_ERRORCODE            = DRM_ERROR_BASE - 16,
+
+    // New in S / drm@1.4:
+    ERROR_DRM_CERTIFICATE_MALFORMED          = DRM_ERROR_BASE - 17,
+    ERROR_DRM_CERTIFICATE_MISSING            = DRM_ERROR_BASE - 18,
+    ERROR_DRM_CRYPTO_LIBRARY                 = DRM_ERROR_BASE - 19,
+    ERROR_DRM_GENERIC_OEM                    = DRM_ERROR_BASE - 20,
+    ERROR_DRM_GENERIC_PLUGIN                 = DRM_ERROR_BASE - 21,
+    ERROR_DRM_INIT_DATA                      = DRM_ERROR_BASE - 22,
+    ERROR_DRM_KEY_NOT_LOADED                 = DRM_ERROR_BASE - 23,
+    ERROR_DRM_LICENSE_PARSE                  = DRM_ERROR_BASE - 24,
+    ERROR_DRM_LICENSE_POLICY                 = DRM_ERROR_BASE - 25,
+    ERROR_DRM_LICENSE_RELEASE                = DRM_ERROR_BASE - 26,
+    ERROR_DRM_LICENSE_REQUEST_REJECTED       = DRM_ERROR_BASE - 27,
+    ERROR_DRM_LICENSE_RESTORE                = DRM_ERROR_BASE - 28,
+    ERROR_DRM_LICENSE_STATE                  = DRM_ERROR_BASE - 29,
+    ERROR_DRM_MEDIA_FRAMEWORK                = DRM_ERROR_BASE - 30,
+    ERROR_DRM_PROVISIONING_CERTIFICATE       = DRM_ERROR_BASE - 31,
+    ERROR_DRM_PROVISIONING_CONFIG            = DRM_ERROR_BASE - 32,
+    ERROR_DRM_PROVISIONING_PARSE             = DRM_ERROR_BASE - 33,
+    ERROR_DRM_PROVISIONING_REQUEST_REJECTED  = DRM_ERROR_BASE - 34,
+    ERROR_DRM_PROVISIONING_RETRY             = DRM_ERROR_BASE - 35,
+    ERROR_DRM_SECURE_STOP_RELEASE            = DRM_ERROR_BASE - 36,
+    ERROR_DRM_STORAGE_READ                   = DRM_ERROR_BASE - 37,
+    ERROR_DRM_STORAGE_WRITE                  = DRM_ERROR_BASE - 38,
+    ERROR_DRM_ZERO_SUBSAMPLES                = DRM_ERROR_BASE - 39,
+    ERROR_DRM_LAST_USED_ERRORCODE            = ERROR_DRM_ZERO_SUBSAMPLES,
 
     ERROR_DRM_VENDOR_MAX                     = DRM_ERROR_BASE - 500,
     ERROR_DRM_VENDOR_MIN                     = DRM_ERROR_BASE - 999,
@@ -138,6 +163,57 @@
             || (ERROR_DRM_VENDOR_MIN <= err && err <= ERROR_DRM_VENDOR_MAX);
 }
 
+static inline std::string StrCryptoError(status_t err) {
+#define STATUS_CASE(STATUS) \
+    case STATUS:            \
+        return #STATUS
+
+    switch (err) {
+        STATUS_CASE(ERROR_DRM_UNKNOWN);
+        STATUS_CASE(ERROR_DRM_NO_LICENSE);
+        STATUS_CASE(ERROR_DRM_LICENSE_EXPIRED);
+        STATUS_CASE(ERROR_DRM_SESSION_NOT_OPENED);
+        STATUS_CASE(ERROR_DRM_DECRYPT_UNIT_NOT_INITIALIZED);
+        STATUS_CASE(ERROR_DRM_DECRYPT);
+        STATUS_CASE(ERROR_DRM_CANNOT_HANDLE);
+        STATUS_CASE(ERROR_DRM_TAMPER_DETECTED);
+        STATUS_CASE(ERROR_DRM_NOT_PROVISIONED);
+        STATUS_CASE(ERROR_DRM_DEVICE_REVOKED);
+        STATUS_CASE(ERROR_DRM_RESOURCE_BUSY);
+        STATUS_CASE(ERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION);
+        STATUS_CASE(ERROR_DRM_INSUFFICIENT_SECURITY);
+        STATUS_CASE(ERROR_DRM_FRAME_TOO_LARGE);
+        STATUS_CASE(ERROR_DRM_RESOURCE_CONTENTION);
+        STATUS_CASE(ERROR_DRM_SESSION_LOST_STATE);
+        STATUS_CASE(ERROR_DRM_INVALID_STATE);
+        STATUS_CASE(ERROR_DRM_CERTIFICATE_MALFORMED);
+        STATUS_CASE(ERROR_DRM_CERTIFICATE_MISSING);
+        STATUS_CASE(ERROR_DRM_CRYPTO_LIBRARY);
+        STATUS_CASE(ERROR_DRM_GENERIC_OEM);
+        STATUS_CASE(ERROR_DRM_GENERIC_PLUGIN);
+        STATUS_CASE(ERROR_DRM_INIT_DATA);
+        STATUS_CASE(ERROR_DRM_KEY_NOT_LOADED);
+        STATUS_CASE(ERROR_DRM_LICENSE_PARSE);
+        STATUS_CASE(ERROR_DRM_LICENSE_POLICY);
+        STATUS_CASE(ERROR_DRM_LICENSE_RELEASE);
+        STATUS_CASE(ERROR_DRM_LICENSE_REQUEST_REJECTED);
+        STATUS_CASE(ERROR_DRM_LICENSE_RESTORE);
+        STATUS_CASE(ERROR_DRM_LICENSE_STATE);
+        STATUS_CASE(ERROR_DRM_MEDIA_FRAMEWORK);
+        STATUS_CASE(ERROR_DRM_PROVISIONING_CERTIFICATE);
+        STATUS_CASE(ERROR_DRM_PROVISIONING_CONFIG);
+        STATUS_CASE(ERROR_DRM_PROVISIONING_PARSE);
+        STATUS_CASE(ERROR_DRM_PROVISIONING_REQUEST_REJECTED);
+        STATUS_CASE(ERROR_DRM_PROVISIONING_RETRY);
+        STATUS_CASE(ERROR_DRM_SECURE_STOP_RELEASE);
+        STATUS_CASE(ERROR_DRM_STORAGE_READ);
+        STATUS_CASE(ERROR_DRM_STORAGE_WRITE);
+        STATUS_CASE(ERROR_DRM_ZERO_SUBSAMPLES);
+#undef STATUS_CASE
+    }
+    return statusToString(err);
+}
+
 }  // namespace android
 
 #endif  // MEDIA_ERRORS_H_
diff --git a/media/libstagefright/include/media/stagefright/MediaMuxer.h b/media/libstagefright/include/media/stagefright/MediaMuxer.h
index a1b9465..e97a65e 100644
--- a/media/libstagefright/include/media/stagefright/MediaMuxer.h
+++ b/media/libstagefright/include/media/stagefright/MediaMuxer.h
@@ -22,7 +22,12 @@
 #include <utils/Vector.h>
 #include <utils/threads.h>
 
+#include <map>
+#include <mutex>
+#include <vector>
+
 #include "media/stagefright/foundation/ABase.h"
+#include "MediaMuxerBase.h"
 
 namespace android {
 
@@ -33,6 +38,7 @@
 struct MediaSource;
 class MetaData;
 struct MediaWriter;
+struct NuMediaExtractor;
 
 // MediaMuxer is used to mux multiple tracks into a video. Currently, we only
 // support a mp4 file as the output.
@@ -40,19 +46,8 @@
 // Constructor -> addTrack+ -> start -> writeSampleData+ -> stop
 // If muxing operation need to be cancelled, the app is responsible for
 // deleting the output file after stop.
-struct MediaMuxer : public RefBase {
+struct MediaMuxer : public MediaMuxerBase {
 public:
-    // Please update media/java/android/media/MediaMuxer.java if the
-    // OutputFormat is updated.
-    enum OutputFormat {
-        OUTPUT_FORMAT_MPEG_4      = 0,
-        OUTPUT_FORMAT_WEBM        = 1,
-        OUTPUT_FORMAT_THREE_GPP   = 2,
-        OUTPUT_FORMAT_HEIF        = 3,
-        OUTPUT_FORMAT_OGG         = 4,
-        OUTPUT_FORMAT_LIST_END // must be last - used to validate format type
-    };
-
     // Construct the muxer with the file descriptor. Note that the MediaMuxer
     // will close this file at stop().
     MediaMuxer(int fd, OutputFormat format);
@@ -117,10 +112,25 @@
     status_t writeSampleData(const sp<ABuffer> &buffer, size_t trackIndex,
                              int64_t timeUs, uint32_t flags) ;
 
+    /**
+     * Gets the number of tracks added successfully.  Should be called in
+     * INITIALIZED(after constructor) or STARTED(after start()) state.
+     * @return the number of tracks or -1 in wrong state.
+     */
+    ssize_t getTrackCount();
+
+    /**
+     * Gets the format of the track by their index.
+     * @param idx : index of the track whose format is wanted.
+     * @return smart pointer to AMessage containing the format details.
+     */
+    sp<AMessage> getTrackFormat(size_t idx);
+
 private:
     const OutputFormat mFormat;
     sp<MediaWriter> mWriter;
     Vector< sp<MediaAdapter> > mTrackList;  // Each track has its MediaAdapter.
+    Vector< sp<AMessage> > mFormatList; // Format of each track.
     sp<MetaData> mFileMeta;  // Metadata for the whole file.
     Mutex mMuxerLock;
 
diff --git a/media/libstagefright/include/media/stagefright/MediaMuxerBase.h b/media/libstagefright/include/media/stagefright/MediaMuxerBase.h
new file mode 100644
index 0000000..f02d510
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/MediaMuxerBase.h
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_MUXER_BASE_H_
+#define MEDIA_MUXER_BASE_H_
+
+#include <utils/RefBase.h>
+#include "media/stagefright/foundation/ABase.h"
+
+namespace android {
+
+struct ABuffer;
+struct AMessage;
+
+// MediaMuxer is used to mux multiple tracks into a video. Currently, we only
+// support a mp4 file as the output.
+// The expected calling order of the functions is:
+// Constructor -> addTrack+ -> start -> writeSampleData+ -> stop
+// If muxing operation need to be cancelled, the app is responsible for
+// deleting the output file after stop.
+struct MediaMuxerBase : public RefBase {
+public:
+    // Please update media/java/android/media/MediaMuxer.java if the
+    // OutputFormat is updated.
+    enum OutputFormat {
+        OUTPUT_FORMAT_MPEG_4      = 0,
+        OUTPUT_FORMAT_WEBM        = 1,
+        OUTPUT_FORMAT_THREE_GPP   = 2,
+        OUTPUT_FORMAT_HEIF        = 3,
+        OUTPUT_FORMAT_OGG         = 4,
+        OUTPUT_FORMAT_LIST_END // must be last - used to validate format type
+    };
+
+    // Construct the muxer with the file descriptor. Note that the MediaMuxer
+    // will close this file at stop().
+    MediaMuxerBase() {};
+
+    virtual ~MediaMuxerBase() {};
+
+    /**
+     * Add a track with its format information. This should be
+     * called before start().
+     * @param format the track's format.
+     * @return the track's index or negative number if error.
+     */
+    virtual ssize_t addTrack(const sp<AMessage> &format) = 0;
+
+    /**
+     * Start muxing. Make sure all the tracks have been added before
+     * calling this.
+     */
+    virtual status_t start() = 0;
+
+    /**
+     * Set the orientation hint.
+     * @param degrees The rotation degrees. It has to be either 0,
+     *                90, 180 or 270.
+     * @return OK if no error.
+     */
+    virtual status_t setOrientationHint(int degrees) = 0;
+
+    /**
+     * Set the location.
+     * @param latitude The latitude in degree x 1000. Its value must be in the range
+     * [-900000, 900000].
+     * @param longitude The longitude in degree x 1000. Its value must be in the range
+     * [-1800000, 1800000].
+     * @return OK if no error.
+     */
+    virtual status_t setLocation(int latitude, int longitude) = 0;
+
+    /**
+     * Stop muxing.
+     * This method is a blocking call. Depending on how
+     * much data is bufferred internally, the time needed for stopping
+     * the muxer may be time consuming. UI thread is
+     * not recommended for launching this call.
+     * @return OK if no error.
+     */
+    virtual status_t stop() = 0;
+
+    /**
+     * Send a sample buffer for muxing.
+     * The buffer can be reused once this method returns. Typically,
+     * this function won't be blocked for very long, and thus there
+     * is no need to use a separate thread calling this method to
+     * push a buffer.
+     * @param buffer the incoming sample buffer.
+     * @param trackIndex the buffer's track index number.
+     * @param timeUs the buffer's time stamp.
+     * @param flags the only supported flag for now is
+     *              MediaCodec::BUFFER_FLAG_SYNCFRAME.
+     * @return OK if no error.
+     */
+    virtual status_t writeSampleData(const sp<ABuffer> &buffer, size_t trackIndex,
+                             int64_t timeUs, uint32_t flags) = 0 ;
+
+    /**
+     * Gets the number of tracks added successfully.  Should be called in
+     * INITIALIZED(after constructor) or STARTED(after start()) state.
+     * @return the number of tracks or -1 in wrong state.
+     */
+    virtual ssize_t getTrackCount() = 0;
+
+    /**
+     * Gets the format of the track by their index.
+     * @param idx : index of the track whose format is wanted.
+     * @return smart pointer to AMessage containing the format details.
+     */
+    virtual sp<AMessage> getTrackFormat(size_t idx) = 0;
+
+private:
+
+    DISALLOW_EVIL_CONSTRUCTORS(MediaMuxerBase);
+};
+
+}  // namespace android
+
+#endif  // MEDIA_MUXER_BASE_H_
+
diff --git a/media/libstagefright/include/media/stagefright/MediaWriter.h b/media/libstagefright/include/media/stagefright/MediaWriter.h
index 1f4fbcb..9f20185 100644
--- a/media/libstagefright/include/media/stagefright/MediaWriter.h
+++ b/media/libstagefright/include/media/stagefright/MediaWriter.h
@@ -54,6 +54,11 @@
     virtual void setStartTimeOffsetMs(int /*ms*/) {}
     virtual int32_t getStartTimeOffsetMs() const { return 0; }
     virtual status_t setNextFd(int /*fd*/) { return INVALID_OPERATION; }
+    virtual void updateCVODegrees(int32_t /*cvoDegrees*/) {}
+    virtual void updatePayloadType(int32_t /*payloadType*/) {}
+    virtual void updateSocketNetwork(int64_t /*socketNetwork*/) {}
+    virtual uint32_t getSequenceNum() { return 0; }
+    virtual uint64_t getAccumulativeBytes() { return 0; }
 
 protected:
     virtual ~MediaWriter() {}
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index 64eb8b4..c80012e 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -62,6 +62,7 @@
     kKeyDVCC              = 'dvcc',  // raw data
     kKeyAV1C              = 'av1c',  // raw data
     kKeyThumbnailHVCC     = 'thvc',  // raw data
+    kKeyThumbnailAV1C     = 'tav1',  // raw data
     kKeyD263              = 'd263',  // raw data
     kKeyOpusHeader        = 'ohdr',  // raw data
     kKeyOpusCodecDelay    = 'ocod',  // uint64_t (codec delay in ns)
@@ -126,6 +127,8 @@
     kKeyTrackTimeStatus   = 'tktm',  // int64_t
 
     kKeyRealTimeRecording = 'rtrc',  // bool (int32_t)
+    kKeyBackgroundMode = 'bkmd',  // bool (int32_t)
+
     kKeyNumBuffers        = 'nbbf',  // int32_t
 
     // Ogg files can be tagged to be automatically looping...
@@ -152,6 +155,10 @@
     kKeyIsADTS            = 'adts',  // bool (int32_t)
     kKeyAACAOT            = 'aaot',  // int32_t
 
+    kKeyMpeghProfileLevelIndication = 'hpli', // int32_t
+    kKeyMpeghReferenceChannelLayout = 'hrcl', // int32_t
+    kKeyMpeghCompatibleSets         = 'hcos', // raw data
+
     // If a MediaBuffer's data represents (at least partially) encrypted
     // data, the following fields aid in decryption.
     // The data can be thought of as pairs of plain and encrypted data
@@ -224,6 +231,8 @@
     kKeyExifSize         = 'exsz', // int64_t, Exif data size
     kKeyExifTiffOffset   = 'thdr', // int32_t, if > 0, buffer contains exif data block with
                                    // tiff hdr at specified offset
+    kKeyXmpOffset        = 'xmof', // int64_t, XMP data offset
+    kKeyXmpSize          = 'xmsz', // int64_t, XMP data size
     kKeyPcmBigEndian     = 'pcmb', // bool (int32_t)
 
     // Key for ALAC Magic Cookie
@@ -247,6 +256,25 @@
 
     // Treat empty track as malformed for MediaRecorder.
     kKeyEmptyTrackMalFormed = 'nemt', // bool (int32_t)
+
+    kKeyVps              = 'sVps', // int32_t, indicates that a buffer has vps.
+    kKeySps              = 'sSps', // int32_t, indicates that a buffer has sps.
+    kKeyPps              = 'sPps', // int32_t, indicates that a buffer has pps.
+    kKeySelfID           = 'sfid', // int32_t, source ID to identify itself on RTP protocol.
+    kKeyPayloadType      = 'pTyp', // int32_t, SDP negotiated payload type.
+    kKeyRtpExtMap        = 'extm', // int32_t, rtp extension ID for cvo on RTP protocol.
+    kKeyRtpCvoDegrees    = 'cvod', // int32_t, rtp cvo degrees as per 3GPP 26.114.
+    kKeyRtpDscp          = 'dscp', // int32_t, DSCP(Differentiated services codepoint) of RFC 2474.
+    kKeySocketNetwork    = 'sNet', // int64_t, socket will be bound to network handle.
+
+    // Slow-motion markers
+    kKeySlowMotionMarkers = 'slmo', // raw data, byte array following spec for
+                                    // MediaFormat#KEY_SLOW_MOTION_MARKERS
+
+    kKeySampleFileOffset = 'sfof', // int64_t, sample's offset in a media file.
+    kKeyLastSampleIndexInChunk = 'lsic',  //int64_t, index of last sample in a chunk.
+    kKeySampleTimeBeforeAppend = 'lsba', // int64_t, timestamp of last sample of a track.
+
 };
 
 enum {
@@ -256,6 +284,7 @@
     kTypeAV1C        = 'av1c',
     kTypeDVCC        = 'dvcc',
     kTypeD263        = 'd263',
+    kTypeHCOS        = 'hcos',
 };
 
 enum {
diff --git a/media/libstagefright/include/media/stagefright/NuMediaExtractor.h b/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
index 227cead..d17a480 100644
--- a/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
+++ b/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
@@ -47,12 +47,14 @@
         SAMPLE_FLAG_ENCRYPTED   = 2,
     };
 
+    typedef IMediaExtractor::EntryPoint EntryPoint;
+
     // identical to IMediaExtractor::GetTrackMetaDataFlags
     enum GetTrackFormatFlags {
         kIncludeExtensiveMetaData = 1, // reads sample table and possibly stream headers
     };
 
-    NuMediaExtractor();
+    explicit NuMediaExtractor(EntryPoint entryPoint);
 
     status_t setDataSource(
             const sp<MediaHTTPService> &httpService,
@@ -98,6 +100,10 @@
 
     status_t getAudioPresentations(size_t trackIdx, AudioPresentationCollection *presentations);
 
+    status_t setLogSessionId(const String8& logSessionId);
+
+    const char* getName() const;
+
 protected:
     virtual ~NuMediaExtractor();
 
@@ -128,6 +134,8 @@
         uint32_t mTrackFlags;  // bitmask of "TrackFlags"
     };
 
+    const EntryPoint mEntryPoint;
+
     mutable Mutex mLock;
 
     sp<DataSource> mDataSource;
@@ -139,6 +147,8 @@
     int64_t mTotalBitrate;  // in bits/sec
     int64_t mDurationUs;
 
+    void setEntryPointToRemoteMediaExtractor();
+
     ssize_t fetchAllTrackSamples(
             int64_t seekTimeUs = -1ll,
             MediaSource::ReadOptions::SeekMode mode =
diff --git a/media/libstagefright/include/media/stagefright/ProcessInfo.h b/media/libstagefright/include/media/stagefright/ProcessInfo.h
index 0be1a52..b8a3c10 100644
--- a/media/libstagefright/include/media/stagefright/ProcessInfo.h
+++ b/media/libstagefright/include/media/stagefright/ProcessInfo.h
@@ -20,6 +20,9 @@
 
 #include <media/stagefright/foundation/ABase.h>
 #include <media/stagefright/ProcessInfoInterface.h>
+#include <map>
+#include <mutex>
+#include <utils/Condition.h>
 
 namespace android {
 
@@ -28,11 +31,20 @@
 
     virtual bool getPriority(int pid, int* priority);
     virtual bool isValidPid(int pid);
+    virtual bool overrideProcessInfo(int pid, int procState, int oomScore);
+    virtual void removeProcessInfoOverride(int pid);
 
 protected:
     virtual ~ProcessInfo();
 
 private:
+    struct ProcessInfoOverride {
+        int procState;
+        int oomScore;
+    };
+    std::mutex mOverrideLock;
+    std::map<int, ProcessInfoOverride> mOverrideMap GUARDED_BY(mOverrideLock);
+
     DISALLOW_EVIL_CONSTRUCTORS(ProcessInfo);
 };
 
diff --git a/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h b/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h
index b39112a..9260181 100644
--- a/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h
+++ b/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h
@@ -24,6 +24,8 @@
 struct ProcessInfoInterface : public RefBase {
     virtual bool getPriority(int pid, int* priority) = 0;
     virtual bool isValidPid(int pid) = 0;
+    virtual bool overrideProcessInfo(int pid, int procState, int oomScore);
+    virtual void removeProcessInfoOverride(int pid);
 
 protected:
     virtual ~ProcessInfoInterface() {}
diff --git a/media/libstagefright/include/media/stagefright/RemoteDataSource.h b/media/libstagefright/include/media/stagefright/RemoteDataSource.h
index d82be8a..d605cda 100644
--- a/media/libstagefright/include/media/stagefright/RemoteDataSource.h
+++ b/media/libstagefright/include/media/stagefright/RemoteDataSource.h
@@ -41,6 +41,11 @@
         close();
     }
     virtual sp<IMemory> getIMemory() {
+        Mutex::Autolock lock(mLock);
+        if (mMemory.get() == nullptr) {
+            ALOGE("getIMemory() failed, mMemory is nullptr");
+            return nullptr;
+        }
         return mMemory;
     }
     virtual ssize_t readAt(off64_t offset, size_t size) {
@@ -48,19 +53,35 @@
         if (size > kBufferSize) {
             size = kBufferSize;
         }
+
+        Mutex::Autolock lock(mLock);
+        if (mSource.get() == nullptr) {
+            ALOGE("readAt() failed, mSource is nullptr");
+            return 0;
+        }
         return mSource->readAt(offset, mMemory->unsecurePointer(), size);
     }
     virtual status_t getSize(off64_t *size) {
+        Mutex::Autolock lock(mLock);
+        if (mSource.get() == nullptr) {
+            ALOGE("getSize() failed, mSource is nullptr");
+            return INVALID_OPERATION;
+        }
         return mSource->getSize(size);
     }
     virtual void close() {
         // Protect strong pointer assignments. This also can be called from the binder
         // clean-up procedure which is running on a separate thread.
-        Mutex::Autolock lock(mCloseLock);
+        Mutex::Autolock lock(mLock);
         mSource = nullptr;
         mMemory = nullptr;
     }
     virtual uint32_t getFlags() {
+        Mutex::Autolock lock(mLock);
+        if (mSource.get() == nullptr) {
+            ALOGE("getSize() failed, mSource is nullptr");
+            return 0;
+        }
         return mSource->flags();
     }
     virtual String8 toString()  {
@@ -75,9 +96,10 @@
     sp<IMemory> mMemory;
     sp<DataSource> mSource;
     String8 mName;
-    Mutex mCloseLock;
+    Mutex mLock;
 
     explicit RemoteDataSource(const sp<DataSource> &source) {
+        Mutex::Autolock lock(mLock);
         mSource = source;
         sp<MemoryDealer> memoryDealer = new MemoryDealer(kBufferSize, "RemoteDataSource");
         mMemory = memoryDealer->allocate(kBufferSize);
diff --git a/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h b/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h
index 2ce7bc7..9ad90d7 100644
--- a/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h
+++ b/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h
@@ -42,6 +42,8 @@
     virtual uint32_t flags() const;
     virtual status_t setMediaCas(const HInterfaceToken &casToken);
     virtual String8 name();
+    virtual status_t setEntryPoint(EntryPoint entryPoint);
+    virtual status_t setLogSessionId(const String8& logSessionId);
 
 private:
     MediaExtractor *mExtractor;
diff --git a/media/libstagefright/include/media/stagefright/SurfaceUtils.h b/media/libstagefright/include/media/stagefright/SurfaceUtils.h
index ae55c65..35b3fa2 100644
--- a/media/libstagefright/include/media/stagefright/SurfaceUtils.h
+++ b/media/libstagefright/include/media/stagefright/SurfaceUtils.h
@@ -38,6 +38,8 @@
         int width, int height, int format, int rotation, int usage, bool reconnect);
 void setNativeWindowHdrMetadata(
         ANativeWindow *nativeWindow /* nonnull */, HDRStaticInfo *info /* nonnull */);
+status_t setNativeWindowRotation(
+        ANativeWindow *nativeWindow /* nonnull */, int rotation);
 status_t pushBlankBuffersToNativeWindow(ANativeWindow *nativeWindow /* nonnull */);
 status_t nativeWindowConnect(ANativeWindow *surface, const char *reason);
 status_t nativeWindowDisconnect(ANativeWindow *surface, const char *reason);
diff --git a/media/libstagefright/include/media/stagefright/Utils.h b/media/libstagefright/include/media/stagefright/Utils.h
index 2b9b759..1673120 100644
--- a/media/libstagefright/include/media/stagefright/Utils.h
+++ b/media/libstagefright/include/media/stagefright/Utils.h
@@ -33,7 +33,7 @@
         const MetaDataBase *meta, sp<AMessage> *format);
 status_t convertMetaDataToMessage(
         const sp<MetaData> &meta, sp<AMessage> *format);
-void convertMessageToMetaData(
+status_t convertMessageToMetaData(
         const sp<AMessage> &format, sp<MetaData> &meta);
 
 // Returns a pointer to the next NAL start code in buffer of size |length| starting at |data|, or
diff --git a/media/libstagefright/mpeg2ts/Android.bp b/media/libstagefright/mpeg2ts/Android.bp
index fbb2d0c..a970224 100644
--- a/media/libstagefright/mpeg2ts/Android.bp
+++ b/media/libstagefright/mpeg2ts/Android.bp
@@ -1,12 +1,30 @@
-cc_library_static {
-    name: "libstagefright_mpeg2support",
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_mpeg2ts_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_mpeg2ts_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_defaults {
+    name: "libstagefright_mpeg2support_defaults",
 
     srcs: [
         "AnotherPacketSource.cpp",
         "ATSParser.cpp",
         "CasManager.cpp",
         "ESQueue.cpp",
-        "HlsSampleDecryptor.cpp",
     ],
 
     include_dirs: [
@@ -28,7 +46,6 @@
     },
 
     shared_libs: [
-        "libcrypto",
         "libhidlmemory",
         "android.hardware.cas.native@1.0",
         "android.hidl.memory@1.0",
@@ -36,9 +53,10 @@
     ],
 
     header_libs: [
-        "libmedia_headers",
+        "libmedia_datasource_headers",
         "libaudioclient_headers",
         "media_ndk_headers",
+        "libstagefright_foundation_headers",
     ],
 
     export_include_dirs: ["."],
@@ -48,4 +66,39 @@
     ],
 
     min_sdk_version: "29",
+
+    host_supported: true,
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+
+cc_library_static {
+    name: "libstagefright_mpeg2support",
+    defaults: [
+        "libstagefright_mpeg2support_defaults",
+    ],
+    cflags: [
+        "-DENABLE_CRYPTO",
+    ],
+    shared_libs: [
+        "libcrypto",
+    ],
+    srcs: [
+        "HlsSampleDecryptor.cpp",
+    ],
+}
+
+cc_library_static {
+    name: "libstagefright_mpeg2support_nocrypto",
+    defaults: [
+        "libstagefright_mpeg2support_defaults",
+    ],
+    apex_available: [
+        "com.android.media",
+    ],
 }
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index ea5d2de..192ba77 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -36,7 +36,7 @@
 #include <inttypes.h>
 #include <netinet/in.h>
 
-#ifndef __ANDROID_APEX__
+#ifdef ENABLE_CRYPTO
 #include "HlsSampleDecryptor.h"
 #endif
 
@@ -55,10 +55,10 @@
     // Create the decryptor anyway since we don't know the use-case unless key is provided
     // Won't decrypt if key info not available (e.g., scanner/extractor just parsing ts files)
     mSampleDecryptor = isSampleEncrypted() ?
-#ifdef __ANDROID_APEX__
-        new SampleDecryptor
-#else
+#ifdef ENABLE_CRYPTO
         new HlsSampleDecryptor
+#else
+        new SampleDecryptor
 #endif
         : NULL;
 }
@@ -172,29 +172,26 @@
         return 0;
     }
 
-    unsigned bsmod __unused = bits.getBits(3);
+    bits.skipBits(3); // bsmod
     unsigned acmod = bits.getBits(3);
-    unsigned cmixlev __unused = 0;
-    unsigned surmixlev __unused = 0;
-    unsigned dsurmod __unused = 0;
 
     if ((acmod & 1) > 0 && acmod != 1) {
         if (bits.numBitsLeft() < 2) {
             return 0;
         }
-        cmixlev = bits.getBits(2);
+        bits.skipBits(2); //cmixlev
     }
     if ((acmod & 4) > 0) {
         if (bits.numBitsLeft() < 2) {
             return 0;
         }
-        surmixlev = bits.getBits(2);
+        bits.skipBits(2); //surmixlev
     }
     if (acmod == 2) {
         if (bits.numBitsLeft() < 2) {
             return 0;
         }
-        dsurmod = bits.getBits(2);
+        bits.skipBits(2); //dsurmod
     }
 
     if (bits.numBitsLeft() < 1) {
@@ -269,7 +266,7 @@
         samplingRate = samplingRateTable2[fscod2];
     } else {
         samplingRate = samplingRateTable[fscod];
-        unsigned numblkscod __unused = bits.getBits(2);
+        bits.skipBits(2); // numblkscod
     }
 
     unsigned acmod = bits.getBits(3);
@@ -1087,7 +1084,7 @@
     }
     unsigned numAUs = bits.getBits(8);
     bits.skipBits(8);
-    unsigned quantization_word_length __unused = bits.getBits(2);
+    bits.skipBits(2); // quantization_word_length
     unsigned audio_sampling_frequency = bits.getBits(3);
     unsigned num_channels = bits.getBits(3);
 
diff --git a/media/libstagefright/mpeg2ts/TEST_MAPPING b/media/libstagefright/mpeg2ts/TEST_MAPPING
new file mode 100644
index 0000000..9f4bbdf
--- /dev/null
+++ b/media/libstagefright/mpeg2ts/TEST_MAPPING
@@ -0,0 +1,9 @@
+// frameworks/av/media/libstagefright/mpeg2ts
+{
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "Mpeg2tsUnitTest" }
+  ]
+}
diff --git a/media/libstagefright/mpeg2ts/test/Android.bp b/media/libstagefright/mpeg2ts/test/Android.bp
new file mode 100644
index 0000000..464b039
--- /dev/null
+++ b/media/libstagefright/mpeg2ts/test/Android.bp
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_mpeg2ts_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_mpeg2ts_license",
+    ],
+}
+
+cc_test{
+    name: "Mpeg2tsUnitTest",
+    gtest: true,
+    test_suites: ["device-tests"],
+
+    srcs: [
+        "Mpeg2tsUnitTest.cpp"
+    ],
+
+    shared_libs: [
+        "android.hardware.cas@1.0",
+        "android.hardware.cas.native@1.0",
+        "android.hidl.token@1.0-utils",
+        "android.hidl.allocator@1.0",
+        "libcrypto",
+        "libhidlbase",
+        "libhidlmemory",
+        "liblog",
+        "libmedia",
+        "libbinder",
+        "libbinder_ndk",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libdatasource",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libstagefright_metadatautils",
+        "libstagefright_mpeg2support",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/extractors/",
+        "frameworks/av/media/libstagefright/",
+    ],
+
+    header_libs: [
+        "libmedia_headers",
+        "libaudioclient_headers",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/mpeg2ts/test/AndroidTest.xml b/media/libstagefright/mpeg2ts/test/AndroidTest.xml
new file mode 100644
index 0000000..ac1294d
--- /dev/null
+++ b/media/libstagefright/mpeg2ts/test/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for Mpeg2ts unit tests">
+    <option name="test-suite-tag" value="Mpeg2tsUnitTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="Mpeg2tsUnitTest->/data/local/tmp/Mpeg2tsUnitTest" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest.zip?unzip=true"
+            value="/data/local/tmp/Mpeg2tsUnitTestRes/" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="Mpeg2tsUnitTest" />
+        <option name="native-test-flag" value="-P /data/local/tmp/Mpeg2tsUnitTestRes/" />
+    </test>
+</configuration>
diff --git a/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest.cpp b/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest.cpp
new file mode 100644
index 0000000..79c233b
--- /dev/null
+++ b/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest.cpp
@@ -0,0 +1,236 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Mpeg2tsUnitTest"
+
+#include <utils/Log.h>
+
+#include <stdint.h>
+#include <sys/stat.h>
+
+#include <datasource/FileSource.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaDataBase.h>
+#include <media/stagefright/foundation/AUtils.h>
+
+#include "mpeg2ts/ATSParser.h"
+#include "mpeg2ts/AnotherPacketSource.h"
+
+#include "Mpeg2tsUnitTestEnvironment.h"
+
+constexpr size_t kTSPacketSize = 188;
+constexpr uint16_t kPIDMask = 0x1FFF;
+// Max value of PID which is also used for Null packets
+constexpr uint16_t kPIDMaxValue = 8191;
+constexpr uint8_t kTSSyncByte = 0x47;
+constexpr uint8_t kVideoPresent = 0x01;
+constexpr uint8_t kAudioPresent = 0x02;
+constexpr uint8_t kMetaDataPresent = 0x04;
+
+static Mpeg2tsUnitTestEnvironment *gEnv = nullptr;
+
+using namespace android;
+
+class Mpeg2tsUnitTest
+    : public ::testing ::TestWithParam<
+              tuple</*fileName*/ string, /*sourceType*/ char, /*numSource*/ uint16_t>> {
+  public:
+    Mpeg2tsUnitTest()
+        : mInputBuffer(nullptr), mSource(nullptr), mFpInput(nullptr), mParser(nullptr) {}
+
+    ~Mpeg2tsUnitTest() {
+        if (mInputBuffer) free(mInputBuffer);
+        if (mFpInput) fclose(mFpInput);
+        mSource.clear();
+    }
+
+    void SetUp() override {
+        mOffset = 0;
+        mNumDataSource = 0;
+        tuple<string, char, uint16_t> params = GetParam();
+        char sourceType = get<1>(params);
+        /* mSourceType = 0b x x x x x M A V
+                                     /  |  \
+                            metaData  audio  video */
+        mMediaType = (sourceType & 0x07);
+        mNumDataSource = get<2>(params);
+        string inputFile = gEnv->getRes() + get<0>(params);
+        mFpInput = fopen(inputFile.c_str(), "rb");
+        ASSERT_NE(mFpInput, nullptr) << "Failed to open file: " << inputFile;
+
+        struct stat buf;
+        int8_t err = stat(inputFile.c_str(), &buf);
+        ASSERT_EQ(err, 0) << "Failed to get information for file: " << inputFile;
+
+        long fileSize = buf.st_size;
+        mTotalPackets = fileSize / kTSPacketSize;
+        int32_t fd = fileno(mFpInput);
+        ASSERT_GE(fd, 0) << "Failed to get the integer file descriptor";
+
+        mSource = new FileSource(dup(fd), 0, buf.st_size);
+        ASSERT_NE(mSource, nullptr) << "Failed to get the data source!";
+
+        mParser = new ATSParser();
+        ASSERT_NE(mParser, nullptr) << "Unable to create ATS parser!";
+        mInputBuffer = (uint8_t *)malloc(kTSPacketSize);
+        ASSERT_NE(mInputBuffer, nullptr) << "Failed to allocate memory for TS packet!";
+    }
+
+    uint64_t mOffset;
+    uint64_t mTotalPackets;
+    uint16_t mNumDataSource;
+
+    int8_t mMediaType;
+
+    uint8_t *mInputBuffer;
+    string mInputFile;
+    sp<DataSource> mSource;
+    FILE *mFpInput;
+    ATSParser *mParser;
+};
+
+TEST_P(Mpeg2tsUnitTest, MediaInfoTest) {
+    bool videoFound = false;
+    bool audioFound = false;
+    bool metaDataFound = false;
+    bool syncPointPresent = false;
+
+    int16_t totalDataSource = 0;
+    int32_t val32 = 0;
+    uint8_t numDataSource = 0;
+    uint8_t packet[kTSPacketSize];
+    ssize_t numBytesRead = -1;
+
+    ATSParser::SyncEvent event(mOffset);
+    static const ATSParser::SourceType mediaType[] = {ATSParser::VIDEO, ATSParser::AUDIO,
+                                                      ATSParser::META, ATSParser::NUM_SOURCE_TYPES};
+    const uint32_t nMediaTypes = sizeof(mediaType) / sizeof(mediaType[0]);
+
+    while ((numBytesRead = mSource->readAt(mOffset, packet, kTSPacketSize)) == kTSPacketSize) {
+        ASSERT_TRUE(packet[0] == kTSSyncByte) << "Sync byte error!";
+
+        // pid is 13 bits
+        uint16_t pid = (packet[1] + (packet[2] << 8)) & kPIDMask;
+        ASSERT_TRUE(pid <= kPIDMaxValue) << "Invalid PID: " << pid;
+
+        status_t err = mParser->feedTSPacket(packet, kTSPacketSize, &event);
+        ASSERT_EQ(err, (status_t)OK) << "Unable to feed TS packet!";
+
+        mOffset += numBytesRead;
+        for (int i = 0; i < nMediaTypes; i++) {
+            if (mParser->hasSource(mediaType[i])) {
+                switch (mediaType[i]) {
+                    case ATSParser::VIDEO:
+                        videoFound = true;
+                        break;
+                    case ATSParser::AUDIO:
+                        audioFound = true;
+                        break;
+                    case ATSParser::META:
+                        metaDataFound = true;
+                        break;
+                    case ATSParser::NUM_SOURCE_TYPES:
+                        numDataSource = 3;
+                        break;
+                    default:
+                        break;
+                }
+            }
+        }
+        if (videoFound && audioFound && metaDataFound && (numDataSource == 3)) break;
+    }
+
+    for (int i = 0; i < nMediaTypes; i++) {
+        ATSParser::SourceType currentMediaType = mediaType[i];
+        if (mParser->hasSource(currentMediaType)) {
+            if (event.hasReturnedData()) {
+                syncPointPresent = true;
+                sp<AnotherPacketSource> syncPacketSource = event.getMediaSource();
+                ASSERT_NE(syncPacketSource, nullptr)
+                        << "Cannot get sync source for media type: " << currentMediaType;
+
+                status_t err = syncPacketSource->start();
+                ASSERT_EQ(err, (status_t)OK) << "Error returned while starting!";
+
+                sp<MetaData> format = syncPacketSource->getFormat();
+                ASSERT_NE(format, nullptr) << "Unable to get the format of the source packet!";
+
+                MediaBufferBase *buf;
+                syncPacketSource->read(&buf, nullptr);
+                ASSERT_NE(buf, nullptr) << "Failed to read sync packet source data";
+
+                MetaDataBase &inMeta = buf->meta_data();
+                bool status = inMeta.findInt32(kKeyIsSyncFrame, &val32);
+                ASSERT_EQ(status, true) << "Sync frame key is not set";
+
+                status = inMeta.findInt32(kKeyCryptoMode, &val32);
+                ASSERT_EQ(status, false) << "Invalid packet, found scrambled packets!";
+
+                err = syncPacketSource->stop();
+                ASSERT_EQ(err, (status_t)OK) << "Error returned while stopping!";
+            }
+            sp<AnotherPacketSource> packetSource = mParser->getSource(currentMediaType);
+            ASSERT_NE(packetSource, nullptr)
+                    << "Cannot get source for media type: " << currentMediaType;
+
+            status_t err = packetSource->start();
+            ASSERT_EQ(err, (status_t)OK) << "Error returned while starting!";
+            sp<MetaData> format = packetSource->getFormat();
+            ASSERT_NE(format, nullptr) << "Unable to get the format of the packet!";
+
+            err = packetSource->stop();
+            ASSERT_EQ(err, (status_t)OK) << "Error returned while stopping!";
+        }
+    }
+
+    ASSERT_EQ(videoFound, bool(mMediaType & kVideoPresent)) << "No Video packets found!";
+    ASSERT_EQ(audioFound, bool(mMediaType & kAudioPresent)) << "No Audio packets found!";
+    ASSERT_EQ(metaDataFound, bool(mMediaType & kMetaDataPresent)) << "No meta data found!";
+
+    if (videoFound || audioFound) {
+        ASSERT_TRUE(syncPointPresent) << "No sync points found for audio/video";
+    }
+
+    if (videoFound) totalDataSource += 1;
+    if (audioFound) totalDataSource += 1;
+    if (metaDataFound) totalDataSource += 1;
+
+    ASSERT_TRUE(totalDataSource == mNumDataSource)
+            << "Expected " << mNumDataSource << " data sources, found " << totalDataSource;
+    if (numDataSource == 3) {
+        ASSERT_EQ(numDataSource, mNumDataSource)
+                << "Expected " << mNumDataSource << " data sources, found " << totalDataSource;
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        infoTest, Mpeg2tsUnitTest,
+        ::testing::Values(make_tuple("crowd_1920x1080_25fps_6700kbps_h264.ts", 0x01, 1),
+                          make_tuple("segment000001.ts", 0x03, 2),
+                          make_tuple("bbb_44100hz_2ch_128kbps_mp3_5mins.ts", 0x02, 1)));
+
+int32_t main(int argc, char **argv) {
+    gEnv = new Mpeg2tsUnitTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    uint8_t status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGV("Mpeg2tsUnit Test Result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTestEnvironment.h b/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTestEnvironment.h
new file mode 100644
index 0000000..9e41db7
--- /dev/null
+++ b/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTestEnvironment.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __MPEG2TS_UNIT_TEST_ENVIRONMENT_H__
+#define __MPEG2TS_UNIT_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class Mpeg2tsUnitTestEnvironment : public::testing::Environment {
+  public:
+    Mpeg2tsUnitTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int Mpeg2tsUnitTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"path", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P': {
+                setRes(optarg);
+                break;
+            }
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __MPEG2TS_UNIT_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/mpeg2ts/test/README.md b/media/libstagefright/mpeg2ts/test/README.md
new file mode 100644
index 0000000..237ce72
--- /dev/null
+++ b/media/libstagefright/mpeg2ts/test/README.md
@@ -0,0 +1,38 @@
+## Media Testing ##
+---
+#### Mpeg2TS Unit Test :
+The Mpeg2TS Unit Test Suite validates the functionality of the libraries present in Mpeg2TS.
+
+Run the following steps to build the test suite:
+```
+mmm frameworks/av/media/libstagefright/mpeg2ts/test/
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+
+adb push ${OUT}/data/nativetest64/Mpeg2tsUnitTest/Mpeg2tsUnitTest /data/local/tmp/
+
+To test 32-bit binary push binaries from nativetest.
+
+adb push ${OUT}/data/nativetest/Mpeg2tsUnitTest/Mpeg2tsUnitTest /data/local/tmp/
+
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest.zip ).
+Download, unzip and push these files into device for testing.
+
+```
+adb push Mpeg2tsUnitTestRes/. /data/local/tmp/
+```
+
+usage: Mpeg2tsUnitTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/Mpeg2tsUnitTest -P /data/local/tmp/Mpeg2tsUnitTestRes/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest Mpeg2tsUnitTest -- --enable-module-dynamic-download=true
+```
diff --git a/media/libstagefright/omx/1.0/Omx.cpp b/media/libstagefright/omx/1.0/Omx.cpp
index eef9ce3..eb15039 100644
--- a/media/libstagefright/omx/1.0/Omx.cpp
+++ b/media/libstagefright/omx/1.0/Omx.cpp
@@ -22,7 +22,7 @@
 #include <media/openmax/OMX_AsString.h>
 
 #include <media/stagefright/omx/OMXUtils.h>
-#include <media/stagefright/omx/OMXMaster.h>
+#include <media/stagefright/omx/OMXStore.h>
 #include <media/stagefright/omx/OmxGraphicBufferSource.h>
 
 #include <media/stagefright/omx/1.0/WOmxNode.h>
@@ -41,21 +41,21 @@
 constexpr size_t kMaxNodeInstances = (1 << 16);
 
 Omx::Omx() :
-    mMaster(new OMXMaster()),
+    mStore(new OMXStore()),
     mParser() {
     (void)mParser.parseXmlFilesInSearchDirs();
     (void)mParser.parseXmlPath(mParser.defaultProfilingResultsXmlPath);
 }
 
 Omx::~Omx() {
-    delete mMaster;
+    delete mStore;
 }
 
 Return<void> Omx::listNodes(listNodes_cb _hidl_cb) {
     std::list<::android::IOMX::ComponentInfo> list;
     char componentName[256];
     for (OMX_U32 index = 0;
-            mMaster->enumerateComponents(
+            mStore->enumerateComponents(
             componentName, sizeof(componentName), index) == OMX_ErrorNone;
             ++index) {
         list.push_back(::android::IOMX::ComponentInfo());
@@ -63,7 +63,7 @@
         info.mName = componentName;
         ::android::Vector<::android::String8> roles;
         OMX_ERRORTYPE err =
-                mMaster->getRolesOfComponent(componentName, &roles);
+                mStore->getRolesOfComponent(componentName, &roles);
         if (err == OMX_ErrorNone) {
             for (OMX_U32 i = 0; i < roles.size(); ++i) {
                 info.mRoles.push_back(roles[i]);
@@ -101,7 +101,7 @@
                 this, new LWOmxObserver(observer), name.c_str());
 
         OMX_COMPONENTTYPE *handle;
-        OMX_ERRORTYPE err = mMaster->makeComponentInstance(
+        OMX_ERRORTYPE err = mStore->makeComponentInstance(
                 name.c_str(), &OMXNodeInstance::kCallbacks,
                 instance.get(), &handle);
 
@@ -208,7 +208,7 @@
 
     OMX_ERRORTYPE err = OMX_ErrorNone;
     if (instance->handle() != NULL) {
-        err = mMaster->destroyComponentInstance(
+        err = mStore->destroyComponentInstance(
                 static_cast<OMX_COMPONENTTYPE*>(instance->handle()));
     }
     return StatusFromOMXError(err);
diff --git a/media/libstagefright/omx/1.0/OmxStore.cpp b/media/libstagefright/omx/1.0/OmxStore.cpp
index 67f478e..b5c1166 100644
--- a/media/libstagefright/omx/1.0/OmxStore.cpp
+++ b/media/libstagefright/omx/1.0/OmxStore.cpp
@@ -54,6 +54,24 @@
         });
     }
 
+    if (!nodes.empty()) {
+        auto anyNode = nodes.cbegin();
+        std::string::const_iterator first = anyNode->cbegin();
+        std::string::const_iterator last = anyNode->cend();
+        for (const std::string &name : nodes) {
+            std::string::const_iterator it1 = first;
+            for (std::string::const_iterator it2 = name.cbegin();
+                    it1 != last && it2 != name.cend() && tolower(*it1) == tolower(*it2);
+                    ++it1, ++it2) {
+            }
+            last = it1;
+        }
+        mPrefix = std::string(first, last);
+        LOG(INFO) << "omx common prefix: '" << mPrefix.c_str() << "'";
+    } else {
+        LOG(INFO) << "omx common prefix: no nodes";
+    }
+
     MediaCodecsXmlParser parser;
     parser.parseXmlFilesInSearchDirs(xmlNames, searchDirs);
     if (profilingResultsXmlPath != nullptr) {
@@ -112,8 +130,6 @@
         mRoleList[i] = std::move(role);
         ++i;
     }
-
-    mPrefix = parser.getCommonPrefix();
 }
 
 OmxStore::~OmxStore() {
diff --git a/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp b/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
index 7d217eb..f7bf3ba 100644
--- a/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
+++ b/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
@@ -67,7 +67,7 @@
             int32_t dataSpace, int32_t aspects, int32_t pixelFormat) override {
         Message tMsg;
         tMsg.type = Message::Type::EVENT;
-        tMsg.fence = native_handle_create(0, 0);
+        tMsg.fence.setTo(native_handle_create(0, 0), /* shouldOwn = */ true);
         tMsg.data.eventData.event = uint32_t(OMX_EventDataSpaceChanged);
         tMsg.data.eventData.data1 = dataSpace;
         tMsg.data.eventData.data2 = aspects;
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index 78b4f19..54c5697 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_omx_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_omx_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libstagefright_omx",
     vendor_available: true,
@@ -7,7 +26,7 @@
     double_loadable: true,
 
     srcs: [
-        "OMXMaster.cpp",
+        "OMXStore.cpp",
         "OMXNodeInstance.cpp",
         "OMXUtils.cpp",
         "OmxGraphicBufferSource.cpp",
@@ -227,4 +246,3 @@
     },
     cflags: ["-Wall", "-Werror"],
 }
-
diff --git a/media/libstagefright/omx/OMXMaster.cpp b/media/libstagefright/omx/OMXMaster.cpp
deleted file mode 100644
index 094b1f5..0000000
--- a/media/libstagefright/omx/OMXMaster.cpp
+++ /dev/null
@@ -1,233 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "OMXMaster"
-#include <android-base/properties.h>
-#include <utils/Log.h>
-
-#include <media/stagefright/omx/OMXMaster.h>
-#include <media/stagefright/omx/SoftOMXPlugin.h>
-#include <media/stagefright/foundation/ADebug.h>
-
-#include <vndksupport/linker.h>
-
-#include <dlfcn.h>
-#include <fcntl.h>
-
-namespace android {
-
-OMXMaster::OMXMaster() {
-
-    pid_t pid = getpid();
-    char filename[20];
-    snprintf(filename, sizeof(filename), "/proc/%d/comm", pid);
-    int fd = open(filename, O_RDONLY);
-    if (fd < 0) {
-      ALOGW("couldn't determine process name");
-      strlcpy(mProcessName, "<unknown>", sizeof(mProcessName));
-    } else {
-      ssize_t len = read(fd, mProcessName, sizeof(mProcessName));
-      if (len < 2) {
-        ALOGW("couldn't determine process name");
-        strlcpy(mProcessName, "<unknown>", sizeof(mProcessName));
-      } else {
-        // the name is newline terminated, so erase the newline
-        mProcessName[len - 1] = 0;
-      }
-      close(fd);
-    }
-
-    addVendorPlugin();
-    addPlatformPlugin();
-}
-
-OMXMaster::~OMXMaster() {
-    clearPlugins();
-}
-
-void OMXMaster::addVendorPlugin() {
-    addPlugin("libstagefrighthw.so");
-}
-
-void OMXMaster::addPlatformPlugin() {
-    addPlugin("libstagefright_softomx_plugin.so");
-}
-
-void OMXMaster::addPlugin(const char *libname) {
-    if (::android::base::GetIntProperty("vendor.media.omx", int64_t(1)) == 0) {
-        return;
-    }
-
-    void *libHandle = android_load_sphal_library(libname, RTLD_NOW);
-
-    if (libHandle == NULL) {
-        return;
-    }
-
-    typedef OMXPluginBase *(*CreateOMXPluginFunc)();
-    CreateOMXPluginFunc createOMXPlugin =
-        (CreateOMXPluginFunc)dlsym(
-                libHandle, "createOMXPlugin");
-    if (!createOMXPlugin)
-        createOMXPlugin = (CreateOMXPluginFunc)dlsym(
-                libHandle, "_ZN7android15createOMXPluginEv");
-
-    OMXPluginBase *plugin = nullptr;
-    if (createOMXPlugin) {
-        plugin = (*createOMXPlugin)();
-    }
-
-    if (plugin) {
-        mPlugins.push_back({ plugin, libHandle });
-        addPlugin(plugin);
-    } else {
-        android_unload_sphal_library(libHandle);
-    }
-}
-
-void OMXMaster::addPlugin(OMXPluginBase *plugin) {
-    Mutex::Autolock autoLock(mLock);
-
-    OMX_U32 index = 0;
-
-    char name[128];
-    OMX_ERRORTYPE err;
-    while ((err = plugin->enumerateComponents(
-                    name, sizeof(name), index++)) == OMX_ErrorNone) {
-        String8 name8(name);
-
-        if (mPluginByComponentName.indexOfKey(name8) >= 0) {
-            ALOGE("A component of name '%s' already exists, ignoring this one.",
-                 name8.string());
-
-            continue;
-        }
-
-        mPluginByComponentName.add(name8, plugin);
-    }
-
-    if (err != OMX_ErrorNoMore) {
-        ALOGE("OMX plugin failed w/ error 0x%08x after registering %zu "
-             "components", err, mPluginByComponentName.size());
-    }
-}
-
-void OMXMaster::clearPlugins() {
-    Mutex::Autolock autoLock(mLock);
-
-    mPluginByComponentName.clear();
-    mPluginByInstance.clear();
-
-    typedef void (*DestroyOMXPluginFunc)(OMXPluginBase*);
-    for (const Plugin &plugin : mPlugins) {
-        DestroyOMXPluginFunc destroyOMXPlugin =
-            (DestroyOMXPluginFunc)dlsym(
-                    plugin.mLibHandle, "destroyOMXPlugin");
-        if (destroyOMXPlugin)
-            destroyOMXPlugin(plugin.mOmx);
-        else
-            delete plugin.mOmx;
-
-        android_unload_sphal_library(plugin.mLibHandle);
-    }
-
-    mPlugins.clear();
-}
-
-OMX_ERRORTYPE OMXMaster::makeComponentInstance(
-        const char *name,
-        const OMX_CALLBACKTYPE *callbacks,
-        OMX_PTR appData,
-        OMX_COMPONENTTYPE **component) {
-    ALOGI("makeComponentInstance(%s) in %s process", name, mProcessName);
-    Mutex::Autolock autoLock(mLock);
-
-    *component = NULL;
-
-    ssize_t index = mPluginByComponentName.indexOfKey(String8(name));
-
-    if (index < 0) {
-        return OMX_ErrorInvalidComponentName;
-    }
-
-    OMXPluginBase *plugin = mPluginByComponentName.valueAt(index);
-    OMX_ERRORTYPE err =
-        plugin->makeComponentInstance(name, callbacks, appData, component);
-
-    if (err != OMX_ErrorNone) {
-        return err;
-    }
-
-    mPluginByInstance.add(*component, plugin);
-
-    return err;
-}
-
-OMX_ERRORTYPE OMXMaster::destroyComponentInstance(
-        OMX_COMPONENTTYPE *component) {
-    Mutex::Autolock autoLock(mLock);
-
-    ssize_t index = mPluginByInstance.indexOfKey(component);
-
-    if (index < 0) {
-        return OMX_ErrorBadParameter;
-    }
-
-    OMXPluginBase *plugin = mPluginByInstance.valueAt(index);
-    mPluginByInstance.removeItemsAt(index);
-
-    return plugin->destroyComponentInstance(component);
-}
-
-OMX_ERRORTYPE OMXMaster::enumerateComponents(
-        OMX_STRING name,
-        size_t size,
-        OMX_U32 index) {
-    Mutex::Autolock autoLock(mLock);
-
-    size_t numComponents = mPluginByComponentName.size();
-
-    if (index >= numComponents) {
-        return OMX_ErrorNoMore;
-    }
-
-    const String8 &name8 = mPluginByComponentName.keyAt(index);
-
-    CHECK(size >= 1 + name8.size());
-    strcpy(name, name8.string());
-
-    return OMX_ErrorNone;
-}
-
-OMX_ERRORTYPE OMXMaster::getRolesOfComponent(
-        const char *name,
-        Vector<String8> *roles) {
-    Mutex::Autolock autoLock(mLock);
-
-    roles->clear();
-
-    ssize_t index = mPluginByComponentName.indexOfKey(String8(name));
-
-    if (index < 0) {
-        return OMX_ErrorInvalidComponentName;
-    }
-
-    OMXPluginBase *plugin = mPluginByComponentName.valueAt(index);
-    return plugin->getRolesOfComponent(name, roles);
-}
-
-}  // namespace android
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index ac42373..bebd516 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -22,7 +22,7 @@
 #include <inttypes.h>
 
 #include <media/stagefright/omx/OMXNodeInstance.h>
-#include <media/stagefright/omx/OMXMaster.h>
+#include <media/stagefright/omx/OMXStore.h>
 #include <media/stagefright/omx/OMXUtils.h>
 #include <android/IOMXBufferSource.h>
 
diff --git a/media/libstagefright/omx/OMXStore.cpp b/media/libstagefright/omx/OMXStore.cpp
new file mode 100644
index 0000000..e8fee42
--- /dev/null
+++ b/media/libstagefright/omx/OMXStore.cpp
@@ -0,0 +1,233 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "OMXStore"
+#include <android-base/properties.h>
+#include <utils/Log.h>
+
+#include <media/stagefright/omx/OMXStore.h>
+#include <media/stagefright/omx/SoftOMXPlugin.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+#include <vndksupport/linker.h>
+
+#include <dlfcn.h>
+#include <fcntl.h>
+
+namespace android {
+
+OMXStore::OMXStore() {
+
+    pid_t pid = getpid();
+    char filename[20];
+    snprintf(filename, sizeof(filename), "/proc/%d/comm", pid);
+    int fd = open(filename, O_RDONLY);
+    if (fd < 0) {
+      ALOGW("couldn't determine process name");
+      strlcpy(mProcessName, "<unknown>", sizeof(mProcessName));
+    } else {
+      ssize_t len = read(fd, mProcessName, sizeof(mProcessName));
+      if (len < 2) {
+        ALOGW("couldn't determine process name");
+        strlcpy(mProcessName, "<unknown>", sizeof(mProcessName));
+      } else {
+        // the name is newline terminated, so erase the newline
+        mProcessName[len - 1] = 0;
+      }
+      close(fd);
+    }
+
+    addVendorPlugin();
+    addPlatformPlugin();
+}
+
+OMXStore::~OMXStore() {
+    clearPlugins();
+}
+
+void OMXStore::addVendorPlugin() {
+    addPlugin("libstagefrighthw.so");
+}
+
+void OMXStore::addPlatformPlugin() {
+    addPlugin("libstagefright_softomx_plugin.so");
+}
+
+void OMXStore::addPlugin(const char *libname) {
+    if (::android::base::GetIntProperty("vendor.media.omx", int64_t(1)) == 0) {
+        return;
+    }
+
+    void *libHandle = android_load_sphal_library(libname, RTLD_NOW);
+
+    if (libHandle == NULL) {
+        return;
+    }
+
+    typedef OMXPluginBase *(*CreateOMXPluginFunc)();
+    CreateOMXPluginFunc createOMXPlugin =
+        (CreateOMXPluginFunc)dlsym(
+                libHandle, "createOMXPlugin");
+    if (!createOMXPlugin)
+        createOMXPlugin = (CreateOMXPluginFunc)dlsym(
+                libHandle, "_ZN7android15createOMXPluginEv");
+
+    OMXPluginBase *plugin = nullptr;
+    if (createOMXPlugin) {
+        plugin = (*createOMXPlugin)();
+    }
+
+    if (plugin) {
+        mPlugins.push_back({ plugin, libHandle });
+        addPlugin(plugin);
+    } else {
+        android_unload_sphal_library(libHandle);
+    }
+}
+
+void OMXStore::addPlugin(OMXPluginBase *plugin) {
+    Mutex::Autolock autoLock(mLock);
+
+    OMX_U32 index = 0;
+
+    char name[128];
+    OMX_ERRORTYPE err;
+    while ((err = plugin->enumerateComponents(
+                    name, sizeof(name), index++)) == OMX_ErrorNone) {
+        String8 name8(name);
+
+        if (mPluginByComponentName.indexOfKey(name8) >= 0) {
+            ALOGE("A component of name '%s' already exists, ignoring this one.",
+                 name8.string());
+
+            continue;
+        }
+
+        mPluginByComponentName.add(name8, plugin);
+    }
+
+    if (err != OMX_ErrorNoMore) {
+        ALOGE("OMX plugin failed w/ error 0x%08x after registering %zu "
+             "components", err, mPluginByComponentName.size());
+    }
+}
+
+void OMXStore::clearPlugins() {
+    Mutex::Autolock autoLock(mLock);
+
+    mPluginByComponentName.clear();
+    mPluginByInstance.clear();
+
+    typedef void (*DestroyOMXPluginFunc)(OMXPluginBase*);
+    for (const Plugin &plugin : mPlugins) {
+        DestroyOMXPluginFunc destroyOMXPlugin =
+            (DestroyOMXPluginFunc)dlsym(
+                    plugin.mLibHandle, "destroyOMXPlugin");
+        if (destroyOMXPlugin)
+            destroyOMXPlugin(plugin.mOmx);
+        else
+            delete plugin.mOmx;
+
+        android_unload_sphal_library(plugin.mLibHandle);
+    }
+
+    mPlugins.clear();
+}
+
+OMX_ERRORTYPE OMXStore::makeComponentInstance(
+        const char *name,
+        const OMX_CALLBACKTYPE *callbacks,
+        OMX_PTR appData,
+        OMX_COMPONENTTYPE **component) {
+    ALOGI("makeComponentInstance(%s) in %s process", name, mProcessName);
+    Mutex::Autolock autoLock(mLock);
+
+    *component = NULL;
+
+    ssize_t index = mPluginByComponentName.indexOfKey(String8(name));
+
+    if (index < 0) {
+        return OMX_ErrorInvalidComponentName;
+    }
+
+    OMXPluginBase *plugin = mPluginByComponentName.valueAt(index);
+    OMX_ERRORTYPE err =
+        plugin->makeComponentInstance(name, callbacks, appData, component);
+
+    if (err != OMX_ErrorNone) {
+        return err;
+    }
+
+    mPluginByInstance.add(*component, plugin);
+
+    return err;
+}
+
+OMX_ERRORTYPE OMXStore::destroyComponentInstance(
+        OMX_COMPONENTTYPE *component) {
+    Mutex::Autolock autoLock(mLock);
+
+    ssize_t index = mPluginByInstance.indexOfKey(component);
+
+    if (index < 0) {
+        return OMX_ErrorBadParameter;
+    }
+
+    OMXPluginBase *plugin = mPluginByInstance.valueAt(index);
+    mPluginByInstance.removeItemsAt(index);
+
+    return plugin->destroyComponentInstance(component);
+}
+
+OMX_ERRORTYPE OMXStore::enumerateComponents(
+        OMX_STRING name,
+        size_t size,
+        OMX_U32 index) {
+    Mutex::Autolock autoLock(mLock);
+
+    size_t numComponents = mPluginByComponentName.size();
+
+    if (index >= numComponents) {
+        return OMX_ErrorNoMore;
+    }
+
+    const String8 &name8 = mPluginByComponentName.keyAt(index);
+
+    CHECK(size >= 1 + name8.size());
+    strcpy(name, name8.string());
+
+    return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE OMXStore::getRolesOfComponent(
+        const char *name,
+        Vector<String8> *roles) {
+    Mutex::Autolock autoLock(mLock);
+
+    roles->clear();
+
+    ssize_t index = mPluginByComponentName.indexOfKey(String8(name));
+
+    if (index < 0) {
+        return OMX_ErrorInvalidComponentName;
+    }
+
+    OMXPluginBase *plugin = mPluginByComponentName.valueAt(index);
+    return plugin->getRolesOfComponent(name, roles);
+}
+
+}  // namespace android
diff --git a/media/libstagefright/omx/OMXUtils.cpp b/media/libstagefright/omx/OMXUtils.cpp
index 1b8493a..49b2dec 100644
--- a/media/libstagefright/omx/OMXUtils.cpp
+++ b/media/libstagefright/omx/OMXUtils.cpp
@@ -172,6 +172,8 @@
             "audio_decoder.ac4", "audio_encoder.ac4" },
         { MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC,
             "image_decoder.heic", "image_encoder.heic" },
+        { MEDIA_MIMETYPE_IMAGE_AVIF,
+            "image_decoder.avif", "image_encoder.avif" },
     };
 
     static const size_t kNumMimeToRole =
@@ -354,7 +356,7 @@
     DescribeColorFormat2Params describeParams;
     InitOMXParams(&describeParams);
     describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat;
-    // reasonable dummy values
+    // reasonable initial values (that will be overwritten)
     describeParams.nFrameWidth = 128;
     describeParams.nFrameHeight = 128;
     describeParams.nStride = 128;
diff --git a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
index ddb459f..44415aa 100644
--- a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
+++ b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
@@ -17,6 +17,10 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "SimpleSoftOMXComponent"
 #include <utils/Log.h>
+#include <OMX_Core.h>
+#include <OMX_Audio.h>
+#include <OMX_IndexExt.h>
+#include <OMX_AudioExt.h>
 
 #include <media/stagefright/omx/SimpleSoftOMXComponent.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -74,7 +78,7 @@
 
     OMX_U32 portIndex;
 
-    switch (index) {
+    switch ((int)index) {
         case OMX_IndexParamPortDefinition:
         {
             const OMX_PARAM_PORTDEFINITIONTYPE *portDefs =
@@ -108,6 +112,19 @@
             break;
         }
 
+         case OMX_IndexParamAudioAndroidAacDrcPresentation:
+        {
+            if (mState == OMX_StateInvalid) {
+                return false;
+            }
+            const OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE *aacPresParams =
+                            (const OMX_AUDIO_PARAM_ANDROID_AACDRCPRESENTATIONTYPE *)params;
+            if (!isValidOMXParam(aacPresParams)) {
+                return false;
+            }
+            return true;
+         }
+
         default:
             return false;
     }
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h
index 5a46b26..84ae511 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Omx.h
@@ -27,7 +27,7 @@
 
 namespace android {
 
-struct OMXMaster;
+struct OMXStore;
 struct OMXNodeInstance;
 
 namespace hardware {
@@ -51,7 +51,7 @@
 using ::android::sp;
 using ::android::wp;
 
-using ::android::OMXMaster;
+using ::android::OMXStore;
 using ::android::OMXNodeInstance;
 
 struct Omx : public IOmx, public hidl_death_recipient {
@@ -73,7 +73,7 @@
     status_t freeNode(sp<OMXNodeInstance> const& instance);
 
 protected:
-    OMXMaster* mMaster;
+    OMXStore* mStore;
     Mutex mLock;
     KeyedVector<wp<IBase>, sp<OMXNodeInstance> > mLiveNodes;
     KeyedVector<OMXNodeInstance*, wp<IBase> > mNode2Observer;
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/OMXMaster.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXMaster.h
deleted file mode 100644
index 93eaef1..0000000
--- a/media/libstagefright/omx/include/media/stagefright/omx/OMXMaster.h
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef OMX_MASTER_H_
-
-#define OMX_MASTER_H_
-
-#include <media/hardware/OMXPluginBase.h>
-
-#include <utils/threads.h>
-#include <utils/KeyedVector.h>
-#include <utils/List.h>
-#include <utils/String8.h>
-
-namespace android {
-
-struct OMXMaster : public OMXPluginBase {
-    OMXMaster();
-    virtual ~OMXMaster();
-
-    virtual OMX_ERRORTYPE makeComponentInstance(
-            const char *name,
-            const OMX_CALLBACKTYPE *callbacks,
-            OMX_PTR appData,
-            OMX_COMPONENTTYPE **component);
-
-    virtual OMX_ERRORTYPE destroyComponentInstance(
-            OMX_COMPONENTTYPE *component);
-
-    virtual OMX_ERRORTYPE enumerateComponents(
-            OMX_STRING name,
-            size_t size,
-            OMX_U32 index);
-
-    virtual OMX_ERRORTYPE getRolesOfComponent(
-            const char *name,
-            Vector<String8> *roles);
-
-private:
-    char mProcessName[16];
-    Mutex mLock;
-    struct Plugin {
-        OMXPluginBase *mOmx;
-        void *mLibHandle;
-    };
-    List<Plugin> mPlugins;
-    KeyedVector<String8, OMXPluginBase *> mPluginByComponentName;
-    KeyedVector<OMX_COMPONENTTYPE *, OMXPluginBase *> mPluginByInstance;
-
-    void addVendorPlugin();
-    void addPlatformPlugin();
-    void addPlugin(const char *libname);
-    void addPlugin(OMXPluginBase *plugin);
-    void clearPlugins();
-
-    OMXMaster(const OMXMaster &);
-    OMXMaster &operator=(const OMXMaster &);
-};
-
-}  // namespace android
-
-#endif  // OMX_MASTER_H_
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
index a761ef6..5f32c9e 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/OMXNodeInstance.h
@@ -33,7 +33,7 @@
 class GraphicBuffer;
 class IOMXBufferSource;
 class IOMXObserver;
-struct OMXMaster;
+struct OMXStore;
 class OMXBuffer;
 using IHidlMemory = hidl::memory::V1_0::IMemory;
 using hardware::media::omx::V1_0::implementation::Omx;
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/OMXStore.h b/media/libstagefright/omx/include/media/stagefright/omx/OMXStore.h
new file mode 100644
index 0000000..5d6c3ed
--- /dev/null
+++ b/media/libstagefright/omx/include/media/stagefright/omx/OMXStore.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OMX_STORE_H_
+
+#define OMX_STORE_H_
+
+#include <media/hardware/OMXPluginBase.h>
+
+#include <utils/threads.h>
+#include <utils/KeyedVector.h>
+#include <utils/List.h>
+#include <utils/String8.h>
+
+namespace android {
+
+struct OMXStore : public OMXPluginBase {
+    OMXStore();
+    virtual ~OMXStore();
+
+    virtual OMX_ERRORTYPE makeComponentInstance(
+            const char *name,
+            const OMX_CALLBACKTYPE *callbacks,
+            OMX_PTR appData,
+            OMX_COMPONENTTYPE **component);
+
+    virtual OMX_ERRORTYPE destroyComponentInstance(
+            OMX_COMPONENTTYPE *component);
+
+    virtual OMX_ERRORTYPE enumerateComponents(
+            OMX_STRING name,
+            size_t size,
+            OMX_U32 index);
+
+    virtual OMX_ERRORTYPE getRolesOfComponent(
+            const char *name,
+            Vector<String8> *roles);
+
+private:
+    char mProcessName[16];
+    Mutex mLock;
+    struct Plugin {
+        OMXPluginBase *mOmx;
+        void *mLibHandle;
+    };
+    List<Plugin> mPlugins;
+    KeyedVector<String8, OMXPluginBase *> mPluginByComponentName;
+    KeyedVector<OMX_COMPONENTTYPE *, OMXPluginBase *> mPluginByInstance;
+
+    void addVendorPlugin();
+    void addPlatformPlugin();
+    void addPlugin(const char *libname);
+    void addPlugin(OMXPluginBase *plugin);
+    void clearPlugins();
+
+    OMXStore(const OMXStore &);
+    OMXStore &operator=(const OMXStore &);
+};
+
+}  // namespace android
+
+#endif  // OMX_STORE_H_
diff --git a/media/libstagefright/omx/tests/Android.bp b/media/libstagefright/omx/tests/Android.bp
index fefb3bb..83cc80e 100644
--- a/media/libstagefright/omx/tests/Android.bp
+++ b/media/libstagefright/omx/tests/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_omx_tests_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_omx_tests_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_test {
     name: "omx_tests",
     gtest: false,
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
new file mode 100644
index 0000000..9a7bad9
--- /dev/null
+++ b/media/libstagefright/renderfright/Android.bp
@@ -0,0 +1,119 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
+cc_defaults {
+    name: "renderfright_defaults",
+    cflags: [
+        "-DLOG_TAG=\"renderfright\"",
+        "-Wall",
+        "-Werror",
+        "-Wthread-safety",
+        "-Wunused",
+        "-Wunreachable-code",
+    ],
+}
+
+cc_defaults {
+    name: "librenderfright_defaults",
+    defaults: ["renderfright_defaults"],
+    cflags: [
+        "-DGL_GLEXT_PROTOTYPES",
+        "-DEGL_EGLEXT_PROTOTYPES",
+    ],
+    shared_libs: [
+        "libbase",
+        "libcutils",
+        "libEGL",
+        "libGLESv1_CM",
+        "libGLESv2",
+        "liblog",
+        "libnativewindow",
+        "libprocessgroup",
+        "libsync",
+        "libui",
+        "libutils",
+    ],
+    local_include_dirs: ["include"],
+    export_include_dirs: ["include"],
+}
+
+filegroup {
+    name: "librenderfright_sources",
+    srcs: [
+        "Description.cpp",
+        "Mesh.cpp",
+        "RenderEngine.cpp",
+        "Texture.cpp",
+    ],
+}
+
+filegroup {
+    name: "librenderfright_gl_sources",
+    srcs: [
+        "gl/GLESRenderEngine.cpp",
+        "gl/GLExtensions.cpp",
+        "gl/GLFramebuffer.cpp",
+        "gl/GLImage.cpp",
+        "gl/GLShadowTexture.cpp",
+        "gl/GLShadowVertexGenerator.cpp",
+        "gl/GLSkiaShadowPort.cpp",
+        "gl/GLVertexBuffer.cpp",
+        "gl/ImageManager.cpp",
+        "gl/Program.cpp",
+        "gl/ProgramCache.cpp",
+        "gl/filters/BlurFilter.cpp",
+        "gl/filters/GenericProgram.cpp",
+    ],
+}
+
+filegroup {
+    name: "librenderfright_threaded_sources",
+    srcs: [
+        "threaded/RenderEngineThreaded.cpp",
+    ],
+}
+
+cc_library_static {
+    name: "librenderfright",
+    defaults: ["librenderfright_defaults"],
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
+    double_loadable: true,
+    clang: true,
+    cflags: [
+        "-fvisibility=hidden",
+        "-Werror=format",
+    ],
+    srcs: [
+        ":librenderfright_sources",
+        ":librenderfright_gl_sources",
+        ":librenderfright_threaded_sources",
+    ],
+    lto: {
+        thin: true,
+    },
+}
+
+cc_library_static {
+    name: "librenderfright_mocks",
+    defaults: ["librenderfright_defaults"],
+    srcs: [
+        "mock/Framebuffer.cpp",
+        "mock/Image.cpp",
+        "mock/RenderEngine.cpp",
+    ],
+    static_libs: [
+        "libgtest",
+        "libgmock",
+    ],
+    local_include_dirs: ["include"],
+    export_include_dirs: ["include"],
+}
diff --git a/media/libstagefright/renderfright/Description.cpp b/media/libstagefright/renderfright/Description.cpp
new file mode 100644
index 0000000..b9cea10
--- /dev/null
+++ b/media/libstagefright/renderfright/Description.cpp
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/private/Description.h>
+
+#include <stdint.h>
+
+#include <utils/TypeHelpers.h>
+
+namespace android {
+namespace renderengine {
+
+Description::TransferFunction Description::dataSpaceToTransferFunction(ui::Dataspace dataSpace) {
+    ui::Dataspace transfer = static_cast<ui::Dataspace>(dataSpace & ui::Dataspace::TRANSFER_MASK);
+    switch (transfer) {
+        case ui::Dataspace::TRANSFER_ST2084:
+            return Description::TransferFunction::ST2084;
+        case ui::Dataspace::TRANSFER_HLG:
+            return Description::TransferFunction::HLG;
+        case ui::Dataspace::TRANSFER_LINEAR:
+            return Description::TransferFunction::LINEAR;
+        default:
+            return Description::TransferFunction::SRGB;
+    }
+}
+
+bool Description::hasInputTransformMatrix() const {
+    const mat4 identity;
+    return inputTransformMatrix != identity;
+}
+
+bool Description::hasOutputTransformMatrix() const {
+    const mat4 identity;
+    return outputTransformMatrix != identity;
+}
+
+bool Description::hasColorMatrix() const {
+    const mat4 identity;
+    return colorMatrix != identity;
+}
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/Mesh.cpp b/media/libstagefright/renderfright/Mesh.cpp
new file mode 100644
index 0000000..ed2f45f
--- /dev/null
+++ b/media/libstagefright/renderfright/Mesh.cpp
@@ -0,0 +1,146 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/Mesh.h>
+
+#include <utils/Log.h>
+
+namespace android {
+namespace renderengine {
+
+Mesh::Mesh(Primitive primitive, size_t vertexCount, size_t vertexSize, size_t texCoordSize,
+           size_t cropCoordsSize, size_t shadowColorSize, size_t shadowParamsSize,
+           size_t indexCount)
+      : mVertexCount(vertexCount),
+        mVertexSize(vertexSize),
+        mTexCoordsSize(texCoordSize),
+        mCropCoordsSize(cropCoordsSize),
+        mShadowColorSize(shadowColorSize),
+        mShadowParamsSize(shadowParamsSize),
+        mPrimitive(primitive),
+        mIndexCount(indexCount) {
+    if (vertexCount == 0) {
+        mVertices.resize(1);
+        mVertices[0] = 0.0f;
+        mStride = 0;
+        return;
+    }
+    size_t stride = vertexSize + texCoordSize + cropCoordsSize + shadowColorSize + shadowParamsSize;
+    size_t remainder = (stride * vertexCount) / vertexCount;
+    // Since all of the input parameters are unsigned, if stride is less than
+    // either vertexSize or texCoordSize, it must have overflowed. remainder
+    // will be equal to stride as long as stride * vertexCount doesn't overflow.
+    if ((stride < vertexSize) || (remainder != stride)) {
+        ALOGE("Overflow in Mesh(..., %zu, %zu, %zu, %zu, %zu, %zu)", vertexCount, vertexSize,
+              texCoordSize, cropCoordsSize, shadowColorSize, shadowParamsSize);
+        mVertices.resize(1);
+        mVertices[0] = 0.0f;
+        mVertexCount = 0;
+        mVertexSize = 0;
+        mTexCoordsSize = 0;
+        mCropCoordsSize = 0;
+        mShadowColorSize = 0;
+        mShadowParamsSize = 0;
+        mStride = 0;
+        return;
+    }
+
+    mVertices.resize(stride * vertexCount);
+    mStride = stride;
+    mIndices.resize(indexCount);
+}
+
+Mesh::Primitive Mesh::getPrimitive() const {
+    return mPrimitive;
+}
+
+float const* Mesh::getPositions() const {
+    return mVertices.data();
+}
+float* Mesh::getPositions() {
+    return mVertices.data();
+}
+
+float const* Mesh::getTexCoords() const {
+    return mVertices.data() + mVertexSize;
+}
+float* Mesh::getTexCoords() {
+    return mVertices.data() + mVertexSize;
+}
+
+float const* Mesh::getCropCoords() const {
+    return mVertices.data() + mVertexSize + mTexCoordsSize;
+}
+float* Mesh::getCropCoords() {
+    return mVertices.data() + mVertexSize + mTexCoordsSize;
+}
+
+float const* Mesh::getShadowColor() const {
+    return mVertices.data() + mVertexSize + mTexCoordsSize + mCropCoordsSize;
+}
+float* Mesh::getShadowColor() {
+    return mVertices.data() + mVertexSize + mTexCoordsSize + mCropCoordsSize;
+}
+
+float const* Mesh::getShadowParams() const {
+    return mVertices.data() + mVertexSize + mTexCoordsSize + mCropCoordsSize + mShadowColorSize;
+}
+float* Mesh::getShadowParams() {
+    return mVertices.data() + mVertexSize + mTexCoordsSize + mCropCoordsSize + mShadowColorSize;
+}
+
+uint16_t const* Mesh::getIndices() const {
+    return mIndices.data();
+}
+
+uint16_t* Mesh::getIndices() {
+    return mIndices.data();
+}
+
+size_t Mesh::getVertexCount() const {
+    return mVertexCount;
+}
+
+size_t Mesh::getVertexSize() const {
+    return mVertexSize;
+}
+
+size_t Mesh::getTexCoordsSize() const {
+    return mTexCoordsSize;
+}
+
+size_t Mesh::getShadowColorSize() const {
+    return mShadowColorSize;
+}
+
+size_t Mesh::getShadowParamsSize() const {
+    return mShadowParamsSize;
+}
+
+size_t Mesh::getByteStride() const {
+    return mStride * sizeof(float);
+}
+
+size_t Mesh::getStride() const {
+    return mStride;
+}
+
+size_t Mesh::getIndexCount() const {
+    return mIndexCount;
+}
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/RenderEngine.cpp b/media/libstagefright/renderfright/RenderEngine.cpp
new file mode 100644
index 0000000..e96ffdb
--- /dev/null
+++ b/media/libstagefright/renderfright/RenderEngine.cpp
@@ -0,0 +1,103 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/RenderEngine.h>
+
+#include <cutils/properties.h>
+#include <log/log.h>
+#include "gl/GLESRenderEngine.h"
+#include "threaded/RenderEngineThreaded.h"
+
+namespace android {
+namespace renderengine {
+
+std::unique_ptr<RenderEngine> RenderEngine::create(const RenderEngineCreationArgs& args) {
+    RenderEngineType renderEngineType = args.renderEngineType;
+
+    // Keep the ability to override by PROPERTIES:
+    char prop[PROPERTY_VALUE_MAX];
+    property_get(PROPERTY_DEBUG_RENDERENGINE_BACKEND, prop, "");
+    if (strcmp(prop, "gles") == 0) {
+        renderEngineType = RenderEngineType::GLES;
+    }
+    if (strcmp(prop, "threaded") == 0) {
+        renderEngineType = RenderEngineType::THREADED;
+    }
+
+    switch (renderEngineType) {
+        case RenderEngineType::THREADED:
+            ALOGD("Threaded RenderEngine with GLES Backend");
+            return renderengine::threaded::RenderEngineThreaded::create(
+                    [args]() { return android::renderengine::gl::GLESRenderEngine::create(args); });
+        case RenderEngineType::GLES:
+        default:
+            ALOGD("RenderEngine with GLES Backend");
+            return renderengine::gl::GLESRenderEngine::create(args);
+    }
+}
+
+RenderEngine::~RenderEngine() = default;
+
+// static
+SyncFeatures &SyncFeatures::GetInstance() {
+    static SyncFeatures syncFeatures;
+    return syncFeatures;
+}
+
+bool SyncFeatures::useNativeFenceSync() const { return mHasNativeFenceSync; }
+bool SyncFeatures::useFenceSync() const { return mHasFenceSync; }
+bool SyncFeatures::useWaitSync() const { return mHasWaitSync; }
+
+SyncFeatures::SyncFeatures()
+    : mHasNativeFenceSync(false),
+      mHasFenceSync(false),
+      mHasWaitSync(false) {
+    EGLDisplay dpy = eglGetDisplay(EGL_DEFAULT_DISPLAY);
+    // This can only be called after EGL has been initialized; otherwise the
+    // check below will abort.
+    const char* exts = eglQueryString(dpy, EGL_EXTENSIONS);
+    LOG_ALWAYS_FATAL_IF(exts == nullptr, "eglQueryString failed");
+    if (strstr(exts, "EGL_ANDROID_native_fence_sync")) {
+        // This makes GLConsumer use the EGL_ANDROID_native_fence_sync
+        // extension to create Android native fences to signal when all
+        // GLES reads for a given buffer have completed.
+        mHasNativeFenceSync = true;
+    }
+    if (strstr(exts, "EGL_KHR_fence_sync")) {
+        mHasFenceSync = true;
+    }
+    if (strstr(exts, "EGL_KHR_wait_sync")) {
+        mHasWaitSync = true;
+    }
+}
+
+namespace impl {
+
+RenderEngine::RenderEngine(const RenderEngineCreationArgs& args) : mArgs(args) {}
+
+RenderEngine::~RenderEngine() = default;
+
+bool RenderEngine::useNativeFenceSync() const {
+    return SyncFeatures::GetInstance().useNativeFenceSync();
+}
+
+bool RenderEngine::useWaitSync() const {
+    return SyncFeatures::GetInstance().useWaitSync();
+}
+
+} // namespace impl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/Texture.cpp b/media/libstagefright/renderfright/Texture.cpp
new file mode 100644
index 0000000..154cde8
--- /dev/null
+++ b/media/libstagefright/renderfright/Texture.cpp
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/Texture.h>
+
+namespace android {
+namespace renderengine {
+
+Texture::Texture()
+      : mTextureName(0), mTextureTarget(TEXTURE_2D), mWidth(0), mHeight(0), mFiltering(false) {}
+
+Texture::Texture(Target textureTarget, uint32_t textureName)
+      : mTextureName(textureName),
+        mTextureTarget(textureTarget),
+        mWidth(0),
+        mHeight(0),
+        mFiltering(false) {}
+
+void Texture::init(Target textureTarget, uint32_t textureName) {
+    mTextureName = textureName;
+    mTextureTarget = textureTarget;
+}
+
+Texture::~Texture() {}
+
+void Texture::setMatrix(float const* matrix) {
+    mTextureMatrix = mat4(matrix);
+}
+
+void Texture::setFiltering(bool enabled) {
+    mFiltering = enabled;
+}
+
+void Texture::setDimensions(size_t width, size_t height) {
+    mWidth = width;
+    mHeight = height;
+}
+
+uint32_t Texture::getTextureName() const {
+    return mTextureName;
+}
+
+uint32_t Texture::getTextureTarget() const {
+    return mTextureTarget;
+}
+
+const mat4& Texture::getMatrix() const {
+    return mTextureMatrix;
+}
+
+bool Texture::getFiltering() const {
+    return mFiltering;
+}
+
+size_t Texture::getWidth() const {
+    return mWidth;
+}
+
+size_t Texture::getHeight() const {
+    return mHeight;
+}
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLESRenderEngine.cpp b/media/libstagefright/renderfright/gl/GLESRenderEngine.cpp
new file mode 100644
index 0000000..04da9a5
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLESRenderEngine.cpp
@@ -0,0 +1,1766 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#undef LOG_TAG
+#define LOG_TAG "RenderEngine"
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include <sched.h>
+#include <cmath>
+#include <fstream>
+#include <sstream>
+#include <unordered_set>
+
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#include <android-base/stringprintf.h>
+#include <cutils/compiler.h>
+#include <cutils/properties.h>
+#include <renderengine/Mesh.h>
+#include <renderengine/Texture.h>
+#include <renderengine/private/Description.h>
+#include <sync/sync.h>
+#include <ui/ColorSpace.h>
+#include <ui/DebugUtils.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/Rect.h>
+#include <ui/Region.h>
+#include <utils/KeyedVector.h>
+#include <utils/Trace.h>
+#include "GLESRenderEngine.h"
+#include "GLExtensions.h"
+#include "GLFramebuffer.h"
+#include "GLImage.h"
+#include "GLShadowVertexGenerator.h"
+#include "Program.h"
+#include "ProgramCache.h"
+#include "filters/BlurFilter.h"
+
+bool checkGlError(const char* op, int lineNumber) {
+    bool errorFound = false;
+    GLint error = glGetError();
+    while (error != GL_NO_ERROR) {
+        errorFound = true;
+        error = glGetError();
+        ALOGV("after %s() (line # %d) glError (0x%x)\n", op, lineNumber, error);
+    }
+    return errorFound;
+}
+
+static constexpr bool outputDebugPPMs = false;
+
+void writePPM(const char* basename, GLuint width, GLuint height) {
+    ALOGV("writePPM #%s: %d x %d", basename, width, height);
+
+    std::vector<GLubyte> pixels(width * height * 4);
+    std::vector<GLubyte> outBuffer(width * height * 3);
+
+    // TODO(courtneygo): We can now have float formats, need
+    // to remove this code or update to support.
+    // Make returned pixels fit in uint32_t, one byte per component
+    glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE, pixels.data());
+    if (checkGlError(__FUNCTION__, __LINE__)) {
+        return;
+    }
+
+    std::string filename(basename);
+    filename.append(".ppm");
+    std::ofstream file(filename.c_str(), std::ios::binary);
+    if (!file.is_open()) {
+        ALOGE("Unable to open file: %s", filename.c_str());
+        ALOGE("You may need to do: \"adb shell setenforce 0\" to enable "
+              "surfaceflinger to write debug images");
+        return;
+    }
+
+    file << "P6\n";
+    file << width << "\n";
+    file << height << "\n";
+    file << 255 << "\n";
+
+    auto ptr = reinterpret_cast<char*>(pixels.data());
+    auto outPtr = reinterpret_cast<char*>(outBuffer.data());
+    for (int y = height - 1; y >= 0; y--) {
+        char* data = ptr + y * width * sizeof(uint32_t);
+
+        for (GLuint x = 0; x < width; x++) {
+            // Only copy R, G and B components
+            outPtr[0] = data[0];
+            outPtr[1] = data[1];
+            outPtr[2] = data[2];
+            data += sizeof(uint32_t);
+            outPtr += 3;
+        }
+    }
+    file.write(reinterpret_cast<char*>(outBuffer.data()), outBuffer.size());
+}
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+using base::StringAppendF;
+using ui::Dataspace;
+
+static status_t selectConfigForAttribute(EGLDisplay dpy, EGLint const* attrs, EGLint attribute,
+                                         EGLint wanted, EGLConfig* outConfig) {
+    EGLint numConfigs = -1, n = 0;
+    eglGetConfigs(dpy, nullptr, 0, &numConfigs);
+    std::vector<EGLConfig> configs(numConfigs, EGL_NO_CONFIG_KHR);
+    eglChooseConfig(dpy, attrs, configs.data(), configs.size(), &n);
+    configs.resize(n);
+
+    if (!configs.empty()) {
+        if (attribute != EGL_NONE) {
+            for (EGLConfig config : configs) {
+                EGLint value = 0;
+                eglGetConfigAttrib(dpy, config, attribute, &value);
+                if (wanted == value) {
+                    *outConfig = config;
+                    return NO_ERROR;
+                }
+            }
+        } else {
+            // just pick the first one
+            *outConfig = configs[0];
+            return NO_ERROR;
+        }
+    }
+
+    return NAME_NOT_FOUND;
+}
+
+static status_t selectEGLConfig(EGLDisplay display, EGLint format, EGLint renderableType,
+                                EGLConfig* config) {
+    // select our EGLConfig. It must support EGL_RECORDABLE_ANDROID if
+    // it is to be used with WIFI displays
+    status_t err;
+    EGLint wantedAttribute;
+    EGLint wantedAttributeValue;
+
+    std::vector<EGLint> attribs;
+    if (renderableType) {
+        const ui::PixelFormat pixelFormat = static_cast<ui::PixelFormat>(format);
+        const bool is1010102 = pixelFormat == ui::PixelFormat::RGBA_1010102;
+
+        // Default to 8 bits per channel.
+        const EGLint tmpAttribs[] = {
+                EGL_RENDERABLE_TYPE,
+                renderableType,
+                EGL_RECORDABLE_ANDROID,
+                EGL_TRUE,
+                EGL_SURFACE_TYPE,
+                EGL_WINDOW_BIT | EGL_PBUFFER_BIT,
+                EGL_FRAMEBUFFER_TARGET_ANDROID,
+                EGL_TRUE,
+                EGL_RED_SIZE,
+                is1010102 ? 10 : 8,
+                EGL_GREEN_SIZE,
+                is1010102 ? 10 : 8,
+                EGL_BLUE_SIZE,
+                is1010102 ? 10 : 8,
+                EGL_ALPHA_SIZE,
+                is1010102 ? 2 : 8,
+                EGL_NONE,
+        };
+        std::copy(tmpAttribs, tmpAttribs + (sizeof(tmpAttribs) / sizeof(EGLint)),
+                  std::back_inserter(attribs));
+        wantedAttribute = EGL_NONE;
+        wantedAttributeValue = EGL_NONE;
+    } else {
+        // if no renderable type specified, fallback to a simplified query
+        wantedAttribute = EGL_NATIVE_VISUAL_ID;
+        wantedAttributeValue = format;
+    }
+
+    err = selectConfigForAttribute(display, attribs.data(), wantedAttribute, wantedAttributeValue,
+                                   config);
+    if (err == NO_ERROR) {
+        EGLint caveat;
+        if (eglGetConfigAttrib(display, *config, EGL_CONFIG_CAVEAT, &caveat))
+            ALOGW_IF(caveat == EGL_SLOW_CONFIG, "EGL_SLOW_CONFIG selected!");
+    }
+
+    return err;
+}
+
+std::unique_ptr<GLESRenderEngine> GLESRenderEngine::create(const RenderEngineCreationArgs& args) {
+    // initialize EGL for the default display
+    EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
+    if (!eglInitialize(display, nullptr, nullptr)) {
+        LOG_ALWAYS_FATAL("failed to initialize EGL");
+    }
+
+    const auto eglVersion = eglQueryString(display, EGL_VERSION);
+    if (!eglVersion) {
+        checkGlError(__FUNCTION__, __LINE__);
+        LOG_ALWAYS_FATAL("eglQueryString(EGL_VERSION) failed");
+    }
+
+    const auto eglExtensions = eglQueryString(display, EGL_EXTENSIONS);
+    if (!eglExtensions) {
+        checkGlError(__FUNCTION__, __LINE__);
+        LOG_ALWAYS_FATAL("eglQueryString(EGL_EXTENSIONS) failed");
+    }
+
+    GLExtensions& extensions = GLExtensions::getInstance();
+    extensions.initWithEGLStrings(eglVersion, eglExtensions);
+
+    // The code assumes that ES2 or later is available if this extension is
+    // supported.
+    EGLConfig config = EGL_NO_CONFIG;
+    if (!extensions.hasNoConfigContext()) {
+        config = chooseEglConfig(display, args.pixelFormat, /*logConfig*/ true);
+    }
+
+    bool useContextPriority =
+            extensions.hasContextPriority() && args.contextPriority == ContextPriority::HIGH;
+    EGLContext protectedContext = EGL_NO_CONTEXT;
+    if (args.enableProtectedContext && extensions.hasProtectedContent()) {
+        protectedContext = createEglContext(display, config, nullptr, useContextPriority,
+                                            Protection::PROTECTED);
+        ALOGE_IF(protectedContext == EGL_NO_CONTEXT, "Can't create protected context");
+    }
+
+    EGLContext ctxt = createEglContext(display, config, protectedContext, useContextPriority,
+                                       Protection::UNPROTECTED);
+
+    // if can't create a GL context, we can only abort.
+    LOG_ALWAYS_FATAL_IF(ctxt == EGL_NO_CONTEXT, "EGLContext creation failed");
+
+    EGLSurface stub = EGL_NO_SURFACE;
+    if (!extensions.hasSurfacelessContext()) {
+        stub = createStubEglPbufferSurface(display, config, args.pixelFormat,
+                                           Protection::UNPROTECTED);
+        LOG_ALWAYS_FATAL_IF(stub == EGL_NO_SURFACE, "can't create stub pbuffer");
+    }
+    EGLBoolean success = eglMakeCurrent(display, stub, stub, ctxt);
+    LOG_ALWAYS_FATAL_IF(!success, "can't make stub pbuffer current");
+    extensions.initWithGLStrings(glGetString(GL_VENDOR), glGetString(GL_RENDERER),
+                                 glGetString(GL_VERSION), glGetString(GL_EXTENSIONS));
+
+    EGLSurface protectedStub = EGL_NO_SURFACE;
+    if (protectedContext != EGL_NO_CONTEXT && !extensions.hasSurfacelessContext()) {
+        protectedStub = createStubEglPbufferSurface(display, config, args.pixelFormat,
+                                                    Protection::PROTECTED);
+        ALOGE_IF(protectedStub == EGL_NO_SURFACE, "can't create protected stub pbuffer");
+    }
+
+    // now figure out what version of GL did we actually get
+    GlesVersion version = parseGlesVersion(extensions.getVersion());
+
+    LOG_ALWAYS_FATAL_IF(args.supportsBackgroundBlur && version < GLES_VERSION_3_0,
+        "Blurs require OpenGL ES 3.0. Please unset ro.surface_flinger.supports_background_blur");
+
+    // initialize the renderer while GL is current
+    std::unique_ptr<GLESRenderEngine> engine;
+    switch (version) {
+        case GLES_VERSION_1_0:
+        case GLES_VERSION_1_1:
+            LOG_ALWAYS_FATAL("SurfaceFlinger requires OpenGL ES 2.0 minimum to run.");
+            break;
+        case GLES_VERSION_2_0:
+        case GLES_VERSION_3_0:
+            engine = std::make_unique<GLESRenderEngine>(args, display, config, ctxt, stub,
+                                                        protectedContext, protectedStub);
+            break;
+    }
+
+    ALOGI("OpenGL ES informations:");
+    ALOGI("vendor    : %s", extensions.getVendor());
+    ALOGI("renderer  : %s", extensions.getRenderer());
+    ALOGI("version   : %s", extensions.getVersion());
+    ALOGI("extensions: %s", extensions.getExtensions());
+    ALOGI("GL_MAX_TEXTURE_SIZE = %zu", engine->getMaxTextureSize());
+    ALOGI("GL_MAX_VIEWPORT_DIMS = %zu", engine->getMaxViewportDims());
+
+    return engine;
+}
+
+EGLConfig GLESRenderEngine::chooseEglConfig(EGLDisplay display, int format, bool logConfig) {
+    status_t err;
+    EGLConfig config;
+
+    // First try to get an ES3 config
+    err = selectEGLConfig(display, format, EGL_OPENGL_ES3_BIT, &config);
+    if (err != NO_ERROR) {
+        // If ES3 fails, try to get an ES2 config
+        err = selectEGLConfig(display, format, EGL_OPENGL_ES2_BIT, &config);
+        if (err != NO_ERROR) {
+            // If ES2 still doesn't work, probably because we're on the emulator.
+            // try a simplified query
+            ALOGW("no suitable EGLConfig found, trying a simpler query");
+            err = selectEGLConfig(display, format, 0, &config);
+            if (err != NO_ERROR) {
+                // this EGL is too lame for android
+                LOG_ALWAYS_FATAL("no suitable EGLConfig found, giving up");
+            }
+        }
+    }
+
+    if (logConfig) {
+        // print some debugging info
+        EGLint r, g, b, a;
+        eglGetConfigAttrib(display, config, EGL_RED_SIZE, &r);
+        eglGetConfigAttrib(display, config, EGL_GREEN_SIZE, &g);
+        eglGetConfigAttrib(display, config, EGL_BLUE_SIZE, &b);
+        eglGetConfigAttrib(display, config, EGL_ALPHA_SIZE, &a);
+        ALOGI("EGL information:");
+        ALOGI("vendor    : %s", eglQueryString(display, EGL_VENDOR));
+        ALOGI("version   : %s", eglQueryString(display, EGL_VERSION));
+        ALOGI("extensions: %s", eglQueryString(display, EGL_EXTENSIONS));
+        ALOGI("Client API: %s", eglQueryString(display, EGL_CLIENT_APIS) ?: "Not Supported");
+        ALOGI("EGLSurface: %d-%d-%d-%d, config=%p", r, g, b, a, config);
+    }
+
+    return config;
+}
+
+GLESRenderEngine::GLESRenderEngine(const RenderEngineCreationArgs& args, EGLDisplay display,
+                                   EGLConfig config, EGLContext ctxt, EGLSurface stub,
+                                   EGLContext protectedContext, EGLSurface protectedStub)
+      : renderengine::impl::RenderEngine(args),
+        mEGLDisplay(display),
+        mEGLConfig(config),
+        mEGLContext(ctxt),
+        mStubSurface(stub),
+        mProtectedEGLContext(protectedContext),
+        mProtectedStubSurface(protectedStub),
+        mVpWidth(0),
+        mVpHeight(0),
+        mFramebufferImageCacheSize(args.imageCacheSize),
+        mUseColorManagement(args.useColorManagement) {
+    glGetIntegerv(GL_MAX_TEXTURE_SIZE, &mMaxTextureSize);
+    glGetIntegerv(GL_MAX_VIEWPORT_DIMS, mMaxViewportDims);
+
+    glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
+    glPixelStorei(GL_PACK_ALIGNMENT, 4);
+
+    // Initialize protected EGL Context.
+    if (mProtectedEGLContext != EGL_NO_CONTEXT) {
+        EGLBoolean success = eglMakeCurrent(display, mProtectedStubSurface, mProtectedStubSurface,
+                                            mProtectedEGLContext);
+        ALOGE_IF(!success, "can't make protected context current");
+        glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
+        glPixelStorei(GL_PACK_ALIGNMENT, 4);
+        success = eglMakeCurrent(display, mStubSurface, mStubSurface, mEGLContext);
+        LOG_ALWAYS_FATAL_IF(!success, "can't make default context current");
+    }
+
+    // mColorBlindnessCorrection = M;
+
+    if (mUseColorManagement) {
+        const ColorSpace srgb(ColorSpace::sRGB());
+        const ColorSpace displayP3(ColorSpace::DisplayP3());
+        const ColorSpace bt2020(ColorSpace::BT2020());
+
+        // no chromatic adaptation needed since all color spaces use D65 for their white points.
+        mSrgbToXyz = mat4(srgb.getRGBtoXYZ());
+        mDisplayP3ToXyz = mat4(displayP3.getRGBtoXYZ());
+        mBt2020ToXyz = mat4(bt2020.getRGBtoXYZ());
+        mXyzToSrgb = mat4(srgb.getXYZtoRGB());
+        mXyzToDisplayP3 = mat4(displayP3.getXYZtoRGB());
+        mXyzToBt2020 = mat4(bt2020.getXYZtoRGB());
+
+        // Compute sRGB to Display P3 and BT2020 transform matrix.
+        // NOTE: For now, we are limiting output wide color space support to
+        // Display-P3 and BT2020 only.
+        mSrgbToDisplayP3 = mXyzToDisplayP3 * mSrgbToXyz;
+        mSrgbToBt2020 = mXyzToBt2020 * mSrgbToXyz;
+
+        // Compute Display P3 to sRGB and BT2020 transform matrix.
+        mDisplayP3ToSrgb = mXyzToSrgb * mDisplayP3ToXyz;
+        mDisplayP3ToBt2020 = mXyzToBt2020 * mDisplayP3ToXyz;
+
+        // Compute BT2020 to sRGB and Display P3 transform matrix
+        mBt2020ToSrgb = mXyzToSrgb * mBt2020ToXyz;
+        mBt2020ToDisplayP3 = mXyzToDisplayP3 * mBt2020ToXyz;
+    }
+
+    char value[PROPERTY_VALUE_MAX];
+    property_get("debug.egl.traceGpuCompletion", value, "0");
+    if (atoi(value)) {
+        mTraceGpuCompletion = true;
+        mFlushTracer = std::make_unique<FlushTracer>(this);
+    }
+
+    if (args.supportsBackgroundBlur) {
+        mBlurFilter = new BlurFilter(*this);
+        checkErrors("BlurFilter creation");
+    }
+
+    mImageManager = std::make_unique<ImageManager>(this);
+    mImageManager->initThread(args.realtime);
+    mDrawingBuffer = createFramebuffer();
+    sp<GraphicBuffer> buf =
+            new GraphicBuffer(1, 1, PIXEL_FORMAT_RGBA_8888, 1,
+                              GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE, "placeholder");
+
+    const status_t err = buf->initCheck();
+    if (err != OK) {
+        ALOGE("Error allocating placeholder buffer: %d", err);
+        return;
+    }
+    mPlaceholderBuffer = buf.get();
+    EGLint attributes[] = {
+            EGL_NONE,
+    };
+    mPlaceholderImage = eglCreateImageKHR(mEGLDisplay, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID,
+                                          mPlaceholderBuffer, attributes);
+    ALOGE_IF(mPlaceholderImage == EGL_NO_IMAGE_KHR, "Failed to create placeholder image: %#x",
+             eglGetError());
+}
+
+GLESRenderEngine::~GLESRenderEngine() {
+    // Destroy the image manager first.
+    mImageManager = nullptr;
+    std::lock_guard<std::mutex> lock(mRenderingMutex);
+    unbindFrameBuffer(mDrawingBuffer.get());
+    mDrawingBuffer = nullptr;
+    while (!mFramebufferImageCache.empty()) {
+        EGLImageKHR expired = mFramebufferImageCache.front().second;
+        mFramebufferImageCache.pop_front();
+        eglDestroyImageKHR(mEGLDisplay, expired);
+    }
+    eglDestroyImageKHR(mEGLDisplay, mPlaceholderImage);
+    mImageCache.clear();
+    eglMakeCurrent(mEGLDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
+    eglTerminate(mEGLDisplay);
+}
+
+std::unique_ptr<Framebuffer> GLESRenderEngine::createFramebuffer() {
+    return std::make_unique<GLFramebuffer>(*this);
+}
+
+std::unique_ptr<Image> GLESRenderEngine::createImage() {
+    return std::make_unique<GLImage>(*this);
+}
+
+Framebuffer* GLESRenderEngine::getFramebufferForDrawing() {
+    return mDrawingBuffer.get();
+}
+
+void GLESRenderEngine::primeCache() const {
+    ProgramCache::getInstance().primeCache(mInProtectedContext ? mProtectedEGLContext : mEGLContext,
+                                           mArgs.useColorManagement,
+                                           mArgs.precacheToneMapperShaderOnly);
+}
+
+base::unique_fd GLESRenderEngine::flush() {
+    ATRACE_CALL();
+    if (!GLExtensions::getInstance().hasNativeFenceSync()) {
+        return base::unique_fd();
+    }
+
+    EGLSyncKHR sync = eglCreateSyncKHR(mEGLDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, nullptr);
+    if (sync == EGL_NO_SYNC_KHR) {
+        ALOGW("failed to create EGL native fence sync: %#x", eglGetError());
+        return base::unique_fd();
+    }
+
+    // native fence fd will not be populated until flush() is done.
+    glFlush();
+
+    // get the fence fd
+    base::unique_fd fenceFd(eglDupNativeFenceFDANDROID(mEGLDisplay, sync));
+    eglDestroySyncKHR(mEGLDisplay, sync);
+    if (fenceFd == EGL_NO_NATIVE_FENCE_FD_ANDROID) {
+        ALOGW("failed to dup EGL native fence sync: %#x", eglGetError());
+    }
+
+    // Only trace if we have a valid fence, as current usage falls back to
+    // calling finish() if the fence fd is invalid.
+    if (CC_UNLIKELY(mTraceGpuCompletion && mFlushTracer) && fenceFd.get() >= 0) {
+        mFlushTracer->queueSync(eglCreateSyncKHR(mEGLDisplay, EGL_SYNC_FENCE_KHR, nullptr));
+    }
+
+    return fenceFd;
+}
+
+bool GLESRenderEngine::finish() {
+    ATRACE_CALL();
+    if (!GLExtensions::getInstance().hasFenceSync()) {
+        ALOGW("no synchronization support");
+        return false;
+    }
+
+    EGLSyncKHR sync = eglCreateSyncKHR(mEGLDisplay, EGL_SYNC_FENCE_KHR, nullptr);
+    if (sync == EGL_NO_SYNC_KHR) {
+        ALOGW("failed to create EGL fence sync: %#x", eglGetError());
+        return false;
+    }
+
+    if (CC_UNLIKELY(mTraceGpuCompletion && mFlushTracer)) {
+        mFlushTracer->queueSync(eglCreateSyncKHR(mEGLDisplay, EGL_SYNC_FENCE_KHR, nullptr));
+    }
+
+    return waitSync(sync, EGL_SYNC_FLUSH_COMMANDS_BIT_KHR);
+}
+
+bool GLESRenderEngine::waitSync(EGLSyncKHR sync, EGLint flags) {
+    EGLint result = eglClientWaitSyncKHR(mEGLDisplay, sync, flags, 2000000000 /*2 sec*/);
+    EGLint error = eglGetError();
+    eglDestroySyncKHR(mEGLDisplay, sync);
+    if (result != EGL_CONDITION_SATISFIED_KHR) {
+        if (result == EGL_TIMEOUT_EXPIRED_KHR) {
+            ALOGW("fence wait timed out");
+        } else {
+            ALOGW("error waiting on EGL fence: %#x", error);
+        }
+        return false;
+    }
+
+    return true;
+}
+
+bool GLESRenderEngine::waitFence(base::unique_fd fenceFd) {
+    if (!GLExtensions::getInstance().hasNativeFenceSync() ||
+        !GLExtensions::getInstance().hasWaitSync()) {
+        return false;
+    }
+
+    // release the fd and transfer the ownership to EGLSync
+    EGLint attribs[] = {EGL_SYNC_NATIVE_FENCE_FD_ANDROID, fenceFd.release(), EGL_NONE};
+    EGLSyncKHR sync = eglCreateSyncKHR(mEGLDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, attribs);
+    if (sync == EGL_NO_SYNC_KHR) {
+        ALOGE("failed to create EGL native fence sync: %#x", eglGetError());
+        return false;
+    }
+
+    // XXX: The spec draft is inconsistent as to whether this should return an
+    // EGLint or void.  Ignore the return value for now, as it's not strictly
+    // needed.
+    eglWaitSyncKHR(mEGLDisplay, sync, 0);
+    EGLint error = eglGetError();
+    eglDestroySyncKHR(mEGLDisplay, sync);
+    if (error != EGL_SUCCESS) {
+        ALOGE("failed to wait for EGL native fence sync: %#x", error);
+        return false;
+    }
+
+    return true;
+}
+
+void GLESRenderEngine::clearWithColor(float red, float green, float blue, float alpha) {
+    ATRACE_CALL();
+    glDisable(GL_BLEND);
+    glClearColor(red, green, blue, alpha);
+    glClear(GL_COLOR_BUFFER_BIT);
+}
+
+void GLESRenderEngine::fillRegionWithColor(const Region& region, float red, float green, float blue,
+                                           float alpha) {
+    size_t c;
+    Rect const* r = region.getArray(&c);
+    Mesh mesh = Mesh::Builder()
+                        .setPrimitive(Mesh::TRIANGLES)
+                        .setVertices(c * 6 /* count */, 2 /* size */)
+                        .build();
+    Mesh::VertexArray<vec2> position(mesh.getPositionArray<vec2>());
+    for (size_t i = 0; i < c; i++, r++) {
+        position[i * 6 + 0].x = r->left;
+        position[i * 6 + 0].y = r->top;
+        position[i * 6 + 1].x = r->left;
+        position[i * 6 + 1].y = r->bottom;
+        position[i * 6 + 2].x = r->right;
+        position[i * 6 + 2].y = r->bottom;
+        position[i * 6 + 3].x = r->left;
+        position[i * 6 + 3].y = r->top;
+        position[i * 6 + 4].x = r->right;
+        position[i * 6 + 4].y = r->bottom;
+        position[i * 6 + 5].x = r->right;
+        position[i * 6 + 5].y = r->top;
+    }
+    setupFillWithColor(red, green, blue, alpha);
+    drawMesh(mesh);
+}
+
+void GLESRenderEngine::setScissor(const Rect& region) {
+    glScissor(region.left, region.top, region.getWidth(), region.getHeight());
+    glEnable(GL_SCISSOR_TEST);
+}
+
+void GLESRenderEngine::disableScissor() {
+    glDisable(GL_SCISSOR_TEST);
+}
+
+void GLESRenderEngine::genTextures(size_t count, uint32_t* names) {
+    glGenTextures(count, names);
+}
+
+void GLESRenderEngine::deleteTextures(size_t count, uint32_t const* names) {
+    for (int i = 0; i < count; ++i) {
+        mTextureView.erase(names[i]);
+    }
+    glDeleteTextures(count, names);
+}
+
+void GLESRenderEngine::bindExternalTextureImage(uint32_t texName, const Image& image) {
+    ATRACE_CALL();
+    const GLImage& glImage = static_cast<const GLImage&>(image);
+    const GLenum target = GL_TEXTURE_EXTERNAL_OES;
+
+    glBindTexture(target, texName);
+    if (glImage.getEGLImage() != EGL_NO_IMAGE_KHR) {
+        glEGLImageTargetTexture2DOES(target, static_cast<GLeglImageOES>(glImage.getEGLImage()));
+    }
+}
+
+status_t GLESRenderEngine::bindExternalTextureBuffer(uint32_t texName,
+                                                     const sp<GraphicBuffer>& buffer,
+                                                     const sp<Fence>& bufferFence) {
+    if (buffer == nullptr) {
+        return BAD_VALUE;
+    }
+
+    ATRACE_CALL();
+
+    bool found = false;
+    {
+        std::lock_guard<std::mutex> lock(mRenderingMutex);
+        auto cachedImage = mImageCache.find(buffer->getId());
+        found = (cachedImage != mImageCache.end());
+    }
+
+    // If we couldn't find the image in the cache at this time, then either
+    // SurfaceFlinger messed up registering the buffer ahead of time or we got
+    // backed up creating other EGLImages.
+    if (!found) {
+        status_t cacheResult = mImageManager->cache(buffer);
+        if (cacheResult != NO_ERROR) {
+            return cacheResult;
+        }
+    }
+
+    // Whether or not we needed to cache, re-check mImageCache to make sure that
+    // there's an EGLImage. The current threading model guarantees that we don't
+    // destroy a cached image until it's really not needed anymore (i.e. this
+    // function should not be called), so the only possibility is that something
+    // terrible went wrong and we should just bind something and move on.
+    {
+        std::lock_guard<std::mutex> lock(mRenderingMutex);
+        auto cachedImage = mImageCache.find(buffer->getId());
+
+        if (cachedImage == mImageCache.end()) {
+            // We failed creating the image if we got here, so bail out.
+            ALOGE("Failed to create an EGLImage when rendering");
+            bindExternalTextureImage(texName, *createImage());
+            return NO_INIT;
+        }
+
+        bindExternalTextureImage(texName, *cachedImage->second);
+        mTextureView.insert_or_assign(texName, buffer->getId());
+    }
+
+    // Wait for the new buffer to be ready.
+    if (bufferFence != nullptr && bufferFence->isValid()) {
+        if (GLExtensions::getInstance().hasWaitSync()) {
+            base::unique_fd fenceFd(bufferFence->dup());
+            if (fenceFd == -1) {
+                ALOGE("error dup'ing fence fd: %d", errno);
+                return -errno;
+            }
+            if (!waitFence(std::move(fenceFd))) {
+                ALOGE("failed to wait on fence fd");
+                return UNKNOWN_ERROR;
+            }
+        } else {
+            status_t err = bufferFence->waitForever("RenderEngine::bindExternalTextureBuffer");
+            if (err != NO_ERROR) {
+                ALOGE("error waiting for fence: %d", err);
+                return err;
+            }
+        }
+    }
+
+    return NO_ERROR;
+}
+
+void GLESRenderEngine::cacheExternalTextureBuffer(const sp<GraphicBuffer>& buffer) {
+    mImageManager->cacheAsync(buffer, nullptr);
+}
+
+std::shared_ptr<ImageManager::Barrier> GLESRenderEngine::cacheExternalTextureBufferForTesting(
+        const sp<GraphicBuffer>& buffer) {
+    auto barrier = std::make_shared<ImageManager::Barrier>();
+    mImageManager->cacheAsync(buffer, barrier);
+    return barrier;
+}
+
+status_t GLESRenderEngine::cacheExternalTextureBufferInternal(const sp<GraphicBuffer>& buffer) {
+    if (buffer == nullptr) {
+        return BAD_VALUE;
+    }
+
+    {
+        std::lock_guard<std::mutex> lock(mRenderingMutex);
+        if (mImageCache.count(buffer->getId()) > 0) {
+            // If there's already an image then fail fast here.
+            return NO_ERROR;
+        }
+    }
+    ATRACE_CALL();
+
+    // Create the image without holding a lock so that we don't block anything.
+    std::unique_ptr<Image> newImage = createImage();
+
+    bool created = newImage->setNativeWindowBuffer(buffer->getNativeBuffer(),
+                                                   buffer->getUsage() & GRALLOC_USAGE_PROTECTED);
+    if (!created) {
+        ALOGE("Failed to create image. size=%ux%u st=%u usage=%#" PRIx64 " fmt=%d",
+              buffer->getWidth(), buffer->getHeight(), buffer->getStride(), buffer->getUsage(),
+              buffer->getPixelFormat());
+        return NO_INIT;
+    }
+
+    {
+        std::lock_guard<std::mutex> lock(mRenderingMutex);
+        if (mImageCache.count(buffer->getId()) > 0) {
+            // In theory it's possible for another thread to recache the image,
+            // so bail out if another thread won.
+            return NO_ERROR;
+        }
+        mImageCache.insert(std::make_pair(buffer->getId(), std::move(newImage)));
+    }
+
+    return NO_ERROR;
+}
+
+void GLESRenderEngine::unbindExternalTextureBuffer(uint64_t bufferId) {
+    mImageManager->releaseAsync(bufferId, nullptr);
+}
+
+std::shared_ptr<ImageManager::Barrier> GLESRenderEngine::unbindExternalTextureBufferForTesting(
+        uint64_t bufferId) {
+    auto barrier = std::make_shared<ImageManager::Barrier>();
+    mImageManager->releaseAsync(bufferId, barrier);
+    return barrier;
+}
+
+void GLESRenderEngine::unbindExternalTextureBufferInternal(uint64_t bufferId) {
+    std::unique_ptr<Image> image;
+    {
+        std::lock_guard<std::mutex> lock(mRenderingMutex);
+        const auto& cachedImage = mImageCache.find(bufferId);
+
+        if (cachedImage != mImageCache.end()) {
+            ALOGV("Destroying image for buffer: %" PRIu64, bufferId);
+            // Move the buffer out of cache first, so that we can destroy
+            // without holding the cache's lock.
+            image = std::move(cachedImage->second);
+            mImageCache.erase(bufferId);
+            return;
+        }
+    }
+    ALOGV("Failed to find image for buffer: %" PRIu64, bufferId);
+}
+
+FloatRect GLESRenderEngine::setupLayerCropping(const LayerSettings& layer, Mesh& mesh) {
+    // Translate win by the rounded corners rect coordinates, to have all values in
+    // layer coordinate space.
+    FloatRect cropWin = layer.geometry.boundaries;
+    const FloatRect& roundedCornersCrop = layer.geometry.roundedCornersCrop;
+    cropWin.left -= roundedCornersCrop.left;
+    cropWin.right -= roundedCornersCrop.left;
+    cropWin.top -= roundedCornersCrop.top;
+    cropWin.bottom -= roundedCornersCrop.top;
+    Mesh::VertexArray<vec2> cropCoords(mesh.getCropCoordArray<vec2>());
+    cropCoords[0] = vec2(cropWin.left, cropWin.top);
+    cropCoords[1] = vec2(cropWin.left, cropWin.top + cropWin.getHeight());
+    cropCoords[2] = vec2(cropWin.right, cropWin.top + cropWin.getHeight());
+    cropCoords[3] = vec2(cropWin.right, cropWin.top);
+
+    setupCornerRadiusCropSize(roundedCornersCrop.getWidth(), roundedCornersCrop.getHeight());
+    return cropWin;
+}
+
+void GLESRenderEngine::handleRoundedCorners(const DisplaySettings& display,
+                                            const LayerSettings& layer, const Mesh& mesh) {
+    // We separate the layer into 3 parts essentially, such that we only turn on blending for the
+    // top rectangle and the bottom rectangle, and turn off blending for the middle rectangle.
+    FloatRect bounds = layer.geometry.roundedCornersCrop;
+
+    // Explicitly compute the transform from the clip rectangle to the physical
+    // display. Normally, this is done in glViewport but we explicitly compute
+    // it here so that we can get the scissor bounds correct.
+    const Rect& source = display.clip;
+    const Rect& destination = display.physicalDisplay;
+    // Here we compute the following transform:
+    // 1. Translate the top left corner of the source clip to (0, 0)
+    // 2. Rotate the clip rectangle about the origin in accordance with the
+    // orientation flag
+    // 3. Translate the top left corner back to the origin.
+    // 4. Scale the clip rectangle to the destination rectangle dimensions
+    // 5. Translate the top left corner to the destination rectangle's top left
+    // corner.
+    const mat4 translateSource = mat4::translate(vec4(-source.left, -source.top, 0, 1));
+    mat4 rotation;
+    int displacementX = 0;
+    int displacementY = 0;
+    float destinationWidth = static_cast<float>(destination.getWidth());
+    float destinationHeight = static_cast<float>(destination.getHeight());
+    float sourceWidth = static_cast<float>(source.getWidth());
+    float sourceHeight = static_cast<float>(source.getHeight());
+    const float rot90InRadians = 2.0f * static_cast<float>(M_PI) / 4.0f;
+    switch (display.orientation) {
+        case ui::Transform::ROT_90:
+            rotation = mat4::rotate(rot90InRadians, vec3(0, 0, 1));
+            displacementX = source.getHeight();
+            std::swap(sourceHeight, sourceWidth);
+            break;
+        case ui::Transform::ROT_180:
+            rotation = mat4::rotate(rot90InRadians * 2.0f, vec3(0, 0, 1));
+            displacementY = source.getHeight();
+            displacementX = source.getWidth();
+            break;
+        case ui::Transform::ROT_270:
+            rotation = mat4::rotate(rot90InRadians * 3.0f, vec3(0, 0, 1));
+            displacementY = source.getWidth();
+            std::swap(sourceHeight, sourceWidth);
+            break;
+        default:
+            break;
+    }
+
+    const mat4 intermediateTranslation = mat4::translate(vec4(displacementX, displacementY, 0, 1));
+    const mat4 scale = mat4::scale(
+            vec4(destinationWidth / sourceWidth, destinationHeight / sourceHeight, 1, 1));
+    const mat4 translateDestination =
+            mat4::translate(vec4(destination.left, destination.top, 0, 1));
+    const mat4 globalTransform =
+            translateDestination * scale * intermediateTranslation * rotation * translateSource;
+
+    const mat4 transformMatrix = globalTransform * layer.geometry.positionTransform;
+    const vec4 leftTopCoordinate(bounds.left, bounds.top, 1.0, 1.0);
+    const vec4 rightBottomCoordinate(bounds.right, bounds.bottom, 1.0, 1.0);
+    const vec4 leftTopCoordinateInBuffer = transformMatrix * leftTopCoordinate;
+    const vec4 rightBottomCoordinateInBuffer = transformMatrix * rightBottomCoordinate;
+    bounds = FloatRect(std::min(leftTopCoordinateInBuffer[0], rightBottomCoordinateInBuffer[0]),
+                       std::min(leftTopCoordinateInBuffer[1], rightBottomCoordinateInBuffer[1]),
+                       std::max(leftTopCoordinateInBuffer[0], rightBottomCoordinateInBuffer[0]),
+                       std::max(leftTopCoordinateInBuffer[1], rightBottomCoordinateInBuffer[1]));
+
+    // Finally, we cut the layer into 3 parts, with top and bottom parts having rounded corners
+    // and the middle part without rounded corners.
+    const int32_t radius = ceil(layer.geometry.roundedCornersRadius);
+    const Rect topRect(bounds.left, bounds.top, bounds.right, bounds.top + radius);
+    setScissor(topRect);
+    drawMesh(mesh);
+    const Rect bottomRect(bounds.left, bounds.bottom - radius, bounds.right, bounds.bottom);
+    setScissor(bottomRect);
+    drawMesh(mesh);
+
+    // The middle part of the layer can turn off blending.
+    if (topRect.bottom < bottomRect.top) {
+        const Rect middleRect(bounds.left, bounds.top + radius, bounds.right,
+                              bounds.bottom - radius);
+        setScissor(middleRect);
+        mState.cornerRadius = 0.0;
+        disableBlending();
+        drawMesh(mesh);
+    }
+    disableScissor();
+}
+
+status_t GLESRenderEngine::bindFrameBuffer(Framebuffer* framebuffer) {
+    ATRACE_CALL();
+    GLFramebuffer* glFramebuffer = static_cast<GLFramebuffer*>(framebuffer);
+    EGLImageKHR eglImage = glFramebuffer->getEGLImage();
+    uint32_t textureName = glFramebuffer->getTextureName();
+    uint32_t framebufferName = glFramebuffer->getFramebufferName();
+
+    // Bind the texture and turn our EGLImage into a texture
+    glBindTexture(GL_TEXTURE_2D, textureName);
+    glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, (GLeglImageOES)eglImage);
+
+    // Bind the Framebuffer to render into
+    glBindFramebuffer(GL_FRAMEBUFFER, framebufferName);
+    glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureName, 0);
+
+    uint32_t glStatus = glCheckFramebufferStatus(GL_FRAMEBUFFER);
+    ALOGE_IF(glStatus != GL_FRAMEBUFFER_COMPLETE_OES, "glCheckFramebufferStatusOES error %d",
+             glStatus);
+
+    return glStatus == GL_FRAMEBUFFER_COMPLETE_OES ? NO_ERROR : BAD_VALUE;
+}
+
+void GLESRenderEngine::unbindFrameBuffer(Framebuffer* /*framebuffer*/) {
+    ATRACE_CALL();
+
+    // back to main framebuffer
+    glBindFramebuffer(GL_FRAMEBUFFER, 0);
+}
+
+bool GLESRenderEngine::cleanupPostRender(CleanupMode mode) {
+    ATRACE_CALL();
+
+    if (mPriorResourcesCleaned ||
+        (mLastDrawFence != nullptr && mLastDrawFence->getStatus() != Fence::Status::Signaled)) {
+        // If we don't have a prior frame needing cleanup, then don't do anything.
+        return false;
+    }
+
+    // This is a bit of a band-aid fix for FrameCaptureProcessor, as we should
+    // not need to keep memory around if we don't need to do so.
+    if (mode == CleanupMode::CLEAN_ALL) {
+        // TODO: SurfaceFlinger memory utilization may benefit from resetting
+        // texture bindings as well. Assess if it does and there's no performance regression
+        // when rebinding the same image data to the same texture, and if so then its mode
+        // behavior can be tweaked.
+        if (mPlaceholderImage != EGL_NO_IMAGE_KHR) {
+            for (auto [textureName, bufferId] : mTextureView) {
+                if (bufferId && mPlaceholderImage != EGL_NO_IMAGE_KHR) {
+                    glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureName);
+                    glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES,
+                                                 static_cast<GLeglImageOES>(mPlaceholderImage));
+                    mTextureView[textureName] = std::nullopt;
+                    checkErrors();
+                }
+            }
+        }
+        {
+            std::lock_guard<std::mutex> lock(mRenderingMutex);
+            mImageCache.clear();
+        }
+    }
+
+    // Bind the texture to placeholder so that backing image data can be freed.
+    GLFramebuffer* glFramebuffer = static_cast<GLFramebuffer*>(getFramebufferForDrawing());
+    glFramebuffer->allocateBuffers(1, 1, mPlaceholderDrawBuffer);
+    // Release the cached fence here, so that we don't churn reallocations when
+    // we could no-op repeated calls of this method instead.
+    mLastDrawFence = nullptr;
+    mPriorResourcesCleaned = true;
+    return true;
+}
+
+void GLESRenderEngine::checkErrors() const {
+    checkErrors(nullptr);
+}
+
+void GLESRenderEngine::checkErrors(const char* tag) const {
+    do {
+        // there could be more than one error flag
+        GLenum error = glGetError();
+        if (error == GL_NO_ERROR) break;
+        if (tag == nullptr) {
+            ALOGE("GL error 0x%04x", int(error));
+        } else {
+            ALOGE("GL error: %s -> 0x%04x", tag, int(error));
+        }
+    } while (true);
+}
+
+bool GLESRenderEngine::supportsProtectedContent() const {
+    return mProtectedEGLContext != EGL_NO_CONTEXT;
+}
+
+bool GLESRenderEngine::useProtectedContext(bool useProtectedContext) {
+    if (useProtectedContext == mInProtectedContext) {
+        return true;
+    }
+    if (useProtectedContext && mProtectedEGLContext == EGL_NO_CONTEXT) {
+        return false;
+    }
+    const EGLSurface surface = useProtectedContext ? mProtectedStubSurface : mStubSurface;
+    const EGLContext context = useProtectedContext ? mProtectedEGLContext : mEGLContext;
+    const bool success = eglMakeCurrent(mEGLDisplay, surface, surface, context) == EGL_TRUE;
+    if (success) {
+        mInProtectedContext = useProtectedContext;
+    }
+    return success;
+}
+EGLImageKHR GLESRenderEngine::createFramebufferImageIfNeeded(ANativeWindowBuffer* nativeBuffer,
+                                                             bool isProtected,
+                                                             bool useFramebufferCache) {
+    sp<GraphicBuffer> graphicBuffer = GraphicBuffer::from(nativeBuffer);
+    if (useFramebufferCache) {
+        std::lock_guard<std::mutex> lock(mFramebufferImageCacheMutex);
+        for (const auto& image : mFramebufferImageCache) {
+            if (image.first == graphicBuffer->getId()) {
+                return image.second;
+            }
+        }
+    }
+    EGLint attributes[] = {
+            isProtected ? EGL_PROTECTED_CONTENT_EXT : EGL_NONE,
+            isProtected ? EGL_TRUE : EGL_NONE,
+            EGL_NONE,
+    };
+    EGLImageKHR image = eglCreateImageKHR(mEGLDisplay, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID,
+                                          nativeBuffer, attributes);
+    if (useFramebufferCache) {
+        if (image != EGL_NO_IMAGE_KHR) {
+            std::lock_guard<std::mutex> lock(mFramebufferImageCacheMutex);
+            if (mFramebufferImageCache.size() >= mFramebufferImageCacheSize) {
+                EGLImageKHR expired = mFramebufferImageCache.front().second;
+                mFramebufferImageCache.pop_front();
+                eglDestroyImageKHR(mEGLDisplay, expired);
+            }
+            mFramebufferImageCache.push_back({graphicBuffer->getId(), image});
+        }
+    }
+
+    return image;
+}
+
+status_t GLESRenderEngine::drawLayers(const DisplaySettings& display,
+                                      const std::vector<const LayerSettings*>& layers,
+                                      const sp<GraphicBuffer>& buffer,
+                                      const bool useFramebufferCache, base::unique_fd&& bufferFence,
+                                      base::unique_fd* drawFence) {
+    ATRACE_CALL();
+    if (layers.empty()) {
+        ALOGV("Drawing empty layer stack");
+        return NO_ERROR;
+    }
+
+    if (bufferFence.get() >= 0) {
+        // Duplicate the fence for passing to waitFence.
+        base::unique_fd bufferFenceDup(dup(bufferFence.get()));
+        if (bufferFenceDup < 0 || !waitFence(std::move(bufferFenceDup))) {
+            ATRACE_NAME("Waiting before draw");
+            sync_wait(bufferFence.get(), -1);
+        }
+    }
+
+    if (buffer == nullptr) {
+        ALOGE("No output buffer provided. Aborting GPU composition.");
+        return BAD_VALUE;
+    }
+
+    std::unique_ptr<BindNativeBufferAsFramebuffer> fbo;
+    // Gathering layers that requested blur, we'll need them to decide when to render to an
+    // offscreen buffer, and when to render to the native buffer.
+    std::deque<const LayerSettings*> blurLayers;
+    if (CC_LIKELY(mBlurFilter != nullptr)) {
+        for (auto layer : layers) {
+            if (layer->backgroundBlurRadius > 0) {
+                blurLayers.push_back(layer);
+            }
+        }
+    }
+    const auto blurLayersSize = blurLayers.size();
+
+    if (blurLayersSize == 0) {
+        fbo = std::make_unique<BindNativeBufferAsFramebuffer>(*this,
+                                                              buffer.get()->getNativeBuffer(),
+                                                              useFramebufferCache);
+        if (fbo->getStatus() != NO_ERROR) {
+            ALOGE("Failed to bind framebuffer! Aborting GPU composition for buffer (%p).",
+                  buffer->handle);
+            checkErrors();
+            return fbo->getStatus();
+        }
+        setViewportAndProjection(display.physicalDisplay, display.clip);
+    } else {
+        setViewportAndProjection(display.physicalDisplay, display.clip);
+        auto status =
+                mBlurFilter->setAsDrawTarget(display, blurLayers.front()->backgroundBlurRadius);
+        if (status != NO_ERROR) {
+            ALOGE("Failed to prepare blur filter! Aborting GPU composition for buffer (%p).",
+                  buffer->handle);
+            checkErrors();
+            return status;
+        }
+    }
+
+    // clear the entire buffer, sometimes when we reuse buffers we'd persist
+    // ghost images otherwise.
+    // we also require a full transparent framebuffer for overlays. This is
+    // probably not quite efficient on all GPUs, since we could filter out
+    // opaque layers.
+    clearWithColor(0.0, 0.0, 0.0, 0.0);
+
+    setOutputDataSpace(display.outputDataspace);
+    setDisplayMaxLuminance(display.maxLuminance);
+
+    const mat4 projectionMatrix =
+            ui::Transform(display.orientation).asMatrix4() * mState.projectionMatrix;
+    if (!display.clearRegion.isEmpty()) {
+        glDisable(GL_BLEND);
+        fillRegionWithColor(display.clearRegion, 0.0, 0.0, 0.0, 1.0);
+    }
+
+    Mesh mesh = Mesh::Builder()
+                        .setPrimitive(Mesh::TRIANGLE_FAN)
+                        .setVertices(4 /* count */, 2 /* size */)
+                        .setTexCoords(2 /* size */)
+                        .setCropCoords(2 /* size */)
+                        .build();
+    for (auto const layer : layers) {
+        if (blurLayers.size() > 0 && blurLayers.front() == layer) {
+            blurLayers.pop_front();
+
+            auto status = mBlurFilter->prepare();
+            if (status != NO_ERROR) {
+                ALOGE("Failed to render blur effect! Aborting GPU composition for buffer (%p).",
+                      buffer->handle);
+                checkErrors("Can't render first blur pass");
+                return status;
+            }
+
+            if (blurLayers.size() == 0) {
+                // Done blurring, time to bind the native FBO and render our blur onto it.
+                fbo = std::make_unique<BindNativeBufferAsFramebuffer>(*this,
+                                                                      buffer.get()
+                                                                              ->getNativeBuffer(),
+                                                                      useFramebufferCache);
+                status = fbo->getStatus();
+                setViewportAndProjection(display.physicalDisplay, display.clip);
+            } else {
+                // There's still something else to blur, so let's keep rendering to our FBO
+                // instead of to the display.
+                status = mBlurFilter->setAsDrawTarget(display,
+                                                      blurLayers.front()->backgroundBlurRadius);
+            }
+            if (status != NO_ERROR) {
+                ALOGE("Failed to bind framebuffer! Aborting GPU composition for buffer (%p).",
+                      buffer->handle);
+                checkErrors("Can't bind native framebuffer");
+                return status;
+            }
+
+            status = mBlurFilter->render(blurLayersSize > 1);
+            if (status != NO_ERROR) {
+                ALOGE("Failed to render blur effect! Aborting GPU composition for buffer (%p).",
+                      buffer->handle);
+                checkErrors("Can't render blur filter");
+                return status;
+            }
+        }
+
+        mState.maxMasteringLuminance = layer->source.buffer.maxMasteringLuminance;
+        mState.maxContentLuminance = layer->source.buffer.maxContentLuminance;
+        mState.projectionMatrix = projectionMatrix * layer->geometry.positionTransform;
+
+        const FloatRect bounds = layer->geometry.boundaries;
+        Mesh::VertexArray<vec2> position(mesh.getPositionArray<vec2>());
+        position[0] = vec2(bounds.left, bounds.top);
+        position[1] = vec2(bounds.left, bounds.bottom);
+        position[2] = vec2(bounds.right, bounds.bottom);
+        position[3] = vec2(bounds.right, bounds.top);
+
+        setupLayerCropping(*layer, mesh);
+        setColorTransform(display.colorTransform * layer->colorTransform);
+
+        bool usePremultipliedAlpha = true;
+        bool disableTexture = true;
+        bool isOpaque = false;
+        if (layer->source.buffer.buffer != nullptr) {
+            disableTexture = false;
+            isOpaque = layer->source.buffer.isOpaque;
+
+            sp<GraphicBuffer> gBuf = layer->source.buffer.buffer;
+            bindExternalTextureBuffer(layer->source.buffer.textureName, gBuf,
+                                      layer->source.buffer.fence);
+
+            usePremultipliedAlpha = layer->source.buffer.usePremultipliedAlpha;
+            Texture texture(Texture::TEXTURE_EXTERNAL, layer->source.buffer.textureName);
+            mat4 texMatrix = layer->source.buffer.textureTransform;
+
+            texture.setMatrix(texMatrix.asArray());
+            texture.setFiltering(layer->source.buffer.useTextureFiltering);
+
+            texture.setDimensions(gBuf->getWidth(), gBuf->getHeight());
+            setSourceY410BT2020(layer->source.buffer.isY410BT2020);
+
+            renderengine::Mesh::VertexArray<vec2> texCoords(mesh.getTexCoordArray<vec2>());
+            texCoords[0] = vec2(0.0, 0.0);
+            texCoords[1] = vec2(0.0, 1.0);
+            texCoords[2] = vec2(1.0, 1.0);
+            texCoords[3] = vec2(1.0, 0.0);
+            setupLayerTexturing(texture);
+        }
+
+        const half3 solidColor = layer->source.solidColor;
+        const half4 color = half4(solidColor.r, solidColor.g, solidColor.b, layer->alpha);
+        // Buffer sources will have a black solid color ignored in the shader,
+        // so in that scenario the solid color passed here is arbitrary.
+        setupLayerBlending(usePremultipliedAlpha, isOpaque, disableTexture, color,
+                           layer->geometry.roundedCornersRadius);
+        if (layer->disableBlending) {
+            glDisable(GL_BLEND);
+        }
+        setSourceDataSpace(layer->sourceDataspace);
+
+        if (layer->shadow.length > 0.0f) {
+            handleShadow(layer->geometry.boundaries, layer->geometry.roundedCornersRadius,
+                         layer->shadow);
+        }
+        // We only want to do a special handling for rounded corners when having rounded corners
+        // is the only reason it needs to turn on blending, otherwise, we handle it like the
+        // usual way since it needs to turn on blending anyway.
+        else if (layer->geometry.roundedCornersRadius > 0.0 && color.a >= 1.0f && isOpaque) {
+            handleRoundedCorners(display, *layer, mesh);
+        } else {
+            drawMesh(mesh);
+        }
+
+        // Cleanup if there's a buffer source
+        if (layer->source.buffer.buffer != nullptr) {
+            disableBlending();
+            setSourceY410BT2020(false);
+            disableTexturing();
+        }
+    }
+
+    if (drawFence != nullptr) {
+        *drawFence = flush();
+    }
+    // If flush failed or we don't support native fences, we need to force the
+    // gl command stream to be executed.
+    if (drawFence == nullptr || drawFence->get() < 0) {
+        bool success = finish();
+        if (!success) {
+            ALOGE("Failed to flush RenderEngine commands");
+            checkErrors();
+            // Chances are, something illegal happened (either the caller passed
+            // us bad parameters, or we messed up our shader generation).
+            return INVALID_OPERATION;
+        }
+        mLastDrawFence = nullptr;
+    } else {
+        // The caller takes ownership of drawFence, so we need to duplicate the
+        // fd here.
+        mLastDrawFence = new Fence(dup(drawFence->get()));
+    }
+    mPriorResourcesCleaned = false;
+
+    checkErrors();
+    return NO_ERROR;
+}
+
+void GLESRenderEngine::setViewportAndProjection(Rect viewport, Rect clip) {
+    ATRACE_CALL();
+    mVpWidth = viewport.getWidth();
+    mVpHeight = viewport.getHeight();
+
+    // We pass the top left corner instead of the bottom left corner,
+    // because since we're rendering off-screen first.
+    glViewport(viewport.left, viewport.top, mVpWidth, mVpHeight);
+
+    mState.projectionMatrix = mat4::ortho(clip.left, clip.right, clip.top, clip.bottom, 0, 1);
+}
+
+void GLESRenderEngine::setupLayerBlending(bool premultipliedAlpha, bool opaque, bool disableTexture,
+                                          const half4& color, float cornerRadius) {
+    mState.isPremultipliedAlpha = premultipliedAlpha;
+    mState.isOpaque = opaque;
+    mState.color = color;
+    mState.cornerRadius = cornerRadius;
+
+    if (disableTexture) {
+        mState.textureEnabled = false;
+    }
+
+    if (color.a < 1.0f || !opaque || cornerRadius > 0.0f) {
+        glEnable(GL_BLEND);
+        glBlendFunc(premultipliedAlpha ? GL_ONE : GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
+    } else {
+        glDisable(GL_BLEND);
+    }
+}
+
+void GLESRenderEngine::setSourceY410BT2020(bool enable) {
+    mState.isY410BT2020 = enable;
+}
+
+void GLESRenderEngine::setSourceDataSpace(Dataspace source) {
+    mDataSpace = source;
+}
+
+void GLESRenderEngine::setOutputDataSpace(Dataspace dataspace) {
+    mOutputDataSpace = dataspace;
+}
+
+void GLESRenderEngine::setDisplayMaxLuminance(const float maxLuminance) {
+    mState.displayMaxLuminance = maxLuminance;
+}
+
+void GLESRenderEngine::setupLayerTexturing(const Texture& texture) {
+    GLuint target = texture.getTextureTarget();
+    glBindTexture(target, texture.getTextureName());
+    GLenum filter = GL_NEAREST;
+    if (texture.getFiltering()) {
+        filter = GL_LINEAR;
+    }
+    glTexParameteri(target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+    glTexParameteri(target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+    glTexParameteri(target, GL_TEXTURE_MAG_FILTER, filter);
+    glTexParameteri(target, GL_TEXTURE_MIN_FILTER, filter);
+
+    mState.texture = texture;
+    mState.textureEnabled = true;
+}
+
+void GLESRenderEngine::setColorTransform(const mat4& colorTransform) {
+    mState.colorMatrix = colorTransform;
+}
+
+void GLESRenderEngine::disableTexturing() {
+    mState.textureEnabled = false;
+}
+
+void GLESRenderEngine::disableBlending() {
+    glDisable(GL_BLEND);
+}
+
+void GLESRenderEngine::setupFillWithColor(float r, float g, float b, float a) {
+    mState.isPremultipliedAlpha = true;
+    mState.isOpaque = false;
+    mState.color = half4(r, g, b, a);
+    mState.textureEnabled = false;
+    glDisable(GL_BLEND);
+}
+
+void GLESRenderEngine::setupCornerRadiusCropSize(float width, float height) {
+    mState.cropSize = half2(width, height);
+}
+
+void GLESRenderEngine::drawMesh(const Mesh& mesh) {
+    ATRACE_CALL();
+    if (mesh.getTexCoordsSize()) {
+        glEnableVertexAttribArray(Program::texCoords);
+        glVertexAttribPointer(Program::texCoords, mesh.getTexCoordsSize(), GL_FLOAT, GL_FALSE,
+                              mesh.getByteStride(), mesh.getTexCoords());
+    }
+
+    glVertexAttribPointer(Program::position, mesh.getVertexSize(), GL_FLOAT, GL_FALSE,
+                          mesh.getByteStride(), mesh.getPositions());
+
+    if (mState.cornerRadius > 0.0f) {
+        glEnableVertexAttribArray(Program::cropCoords);
+        glVertexAttribPointer(Program::cropCoords, mesh.getVertexSize(), GL_FLOAT, GL_FALSE,
+                              mesh.getByteStride(), mesh.getCropCoords());
+    }
+
+    if (mState.drawShadows) {
+        glEnableVertexAttribArray(Program::shadowColor);
+        glVertexAttribPointer(Program::shadowColor, mesh.getShadowColorSize(), GL_FLOAT, GL_FALSE,
+                              mesh.getByteStride(), mesh.getShadowColor());
+
+        glEnableVertexAttribArray(Program::shadowParams);
+        glVertexAttribPointer(Program::shadowParams, mesh.getShadowParamsSize(), GL_FLOAT, GL_FALSE,
+                              mesh.getByteStride(), mesh.getShadowParams());
+    }
+
+    Description managedState = mState;
+    // By default, DISPLAY_P3 is the only supported wide color output. However,
+    // when HDR content is present, hardware composer may be able to handle
+    // BT2020 data space, in that case, the output data space is set to be
+    // BT2020_HLG or BT2020_PQ respectively. In GPU fall back we need
+    // to respect this and convert non-HDR content to HDR format.
+    if (mUseColorManagement) {
+        Dataspace inputStandard = static_cast<Dataspace>(mDataSpace & Dataspace::STANDARD_MASK);
+        Dataspace inputTransfer = static_cast<Dataspace>(mDataSpace & Dataspace::TRANSFER_MASK);
+        Dataspace outputStandard =
+                static_cast<Dataspace>(mOutputDataSpace & Dataspace::STANDARD_MASK);
+        Dataspace outputTransfer =
+                static_cast<Dataspace>(mOutputDataSpace & Dataspace::TRANSFER_MASK);
+        bool needsXYZConversion = needsXYZTransformMatrix();
+
+        // NOTE: if the input standard of the input dataspace is not STANDARD_DCI_P3 or
+        // STANDARD_BT2020, it will be  treated as STANDARD_BT709
+        if (inputStandard != Dataspace::STANDARD_DCI_P3 &&
+            inputStandard != Dataspace::STANDARD_BT2020) {
+            inputStandard = Dataspace::STANDARD_BT709;
+        }
+
+        if (needsXYZConversion) {
+            // The supported input color spaces are standard RGB, Display P3 and BT2020.
+            switch (inputStandard) {
+                case Dataspace::STANDARD_DCI_P3:
+                    managedState.inputTransformMatrix = mDisplayP3ToXyz;
+                    break;
+                case Dataspace::STANDARD_BT2020:
+                    managedState.inputTransformMatrix = mBt2020ToXyz;
+                    break;
+                default:
+                    managedState.inputTransformMatrix = mSrgbToXyz;
+                    break;
+            }
+
+            // The supported output color spaces are BT2020, Display P3 and standard RGB.
+            switch (outputStandard) {
+                case Dataspace::STANDARD_BT2020:
+                    managedState.outputTransformMatrix = mXyzToBt2020;
+                    break;
+                case Dataspace::STANDARD_DCI_P3:
+                    managedState.outputTransformMatrix = mXyzToDisplayP3;
+                    break;
+                default:
+                    managedState.outputTransformMatrix = mXyzToSrgb;
+                    break;
+            }
+        } else if (inputStandard != outputStandard) {
+            // At this point, the input data space and output data space could be both
+            // HDR data spaces, but they match each other, we do nothing in this case.
+            // In addition to the case above, the input data space could be
+            // - scRGB linear
+            // - scRGB non-linear
+            // - sRGB
+            // - Display P3
+            // - BT2020
+            // The output data spaces could be
+            // - sRGB
+            // - Display P3
+            // - BT2020
+            switch (outputStandard) {
+                case Dataspace::STANDARD_BT2020:
+                    if (inputStandard == Dataspace::STANDARD_BT709) {
+                        managedState.outputTransformMatrix = mSrgbToBt2020;
+                    } else if (inputStandard == Dataspace::STANDARD_DCI_P3) {
+                        managedState.outputTransformMatrix = mDisplayP3ToBt2020;
+                    }
+                    break;
+                case Dataspace::STANDARD_DCI_P3:
+                    if (inputStandard == Dataspace::STANDARD_BT709) {
+                        managedState.outputTransformMatrix = mSrgbToDisplayP3;
+                    } else if (inputStandard == Dataspace::STANDARD_BT2020) {
+                        managedState.outputTransformMatrix = mBt2020ToDisplayP3;
+                    }
+                    break;
+                default:
+                    if (inputStandard == Dataspace::STANDARD_DCI_P3) {
+                        managedState.outputTransformMatrix = mDisplayP3ToSrgb;
+                    } else if (inputStandard == Dataspace::STANDARD_BT2020) {
+                        managedState.outputTransformMatrix = mBt2020ToSrgb;
+                    }
+                    break;
+            }
+        }
+
+        // we need to convert the RGB value to linear space and convert it back when:
+        // - there is a color matrix that is not an identity matrix, or
+        // - there is an output transform matrix that is not an identity matrix, or
+        // - the input transfer function doesn't match the output transfer function.
+        if (managedState.hasColorMatrix() || managedState.hasOutputTransformMatrix() ||
+            inputTransfer != outputTransfer) {
+            managedState.inputTransferFunction =
+                    Description::dataSpaceToTransferFunction(inputTransfer);
+            managedState.outputTransferFunction =
+                    Description::dataSpaceToTransferFunction(outputTransfer);
+        }
+    }
+
+    ProgramCache::getInstance().useProgram(mInProtectedContext ? mProtectedEGLContext : mEGLContext,
+                                           managedState);
+
+    if (mState.drawShadows) {
+        glDrawElements(mesh.getPrimitive(), mesh.getIndexCount(), GL_UNSIGNED_SHORT,
+                       mesh.getIndices());
+    } else {
+        glDrawArrays(mesh.getPrimitive(), 0, mesh.getVertexCount());
+    }
+
+    if (mUseColorManagement && outputDebugPPMs) {
+        static uint64_t managedColorFrameCount = 0;
+        std::ostringstream out;
+        out << "/data/texture_out" << managedColorFrameCount++;
+        writePPM(out.str().c_str(), mVpWidth, mVpHeight);
+    }
+
+    if (mesh.getTexCoordsSize()) {
+        glDisableVertexAttribArray(Program::texCoords);
+    }
+
+    if (mState.cornerRadius > 0.0f) {
+        glDisableVertexAttribArray(Program::cropCoords);
+    }
+
+    if (mState.drawShadows) {
+        glDisableVertexAttribArray(Program::shadowColor);
+        glDisableVertexAttribArray(Program::shadowParams);
+    }
+}
+
+size_t GLESRenderEngine::getMaxTextureSize() const {
+    return mMaxTextureSize;
+}
+
+size_t GLESRenderEngine::getMaxViewportDims() const {
+    return mMaxViewportDims[0] < mMaxViewportDims[1] ? mMaxViewportDims[0] : mMaxViewportDims[1];
+}
+
+void GLESRenderEngine::dump(std::string& result) {
+    const GLExtensions& extensions = GLExtensions::getInstance();
+    ProgramCache& cache = ProgramCache::getInstance();
+
+    StringAppendF(&result, "EGL implementation : %s\n", extensions.getEGLVersion());
+    StringAppendF(&result, "%s\n", extensions.getEGLExtensions());
+    StringAppendF(&result, "GLES: %s, %s, %s\n", extensions.getVendor(), extensions.getRenderer(),
+                  extensions.getVersion());
+    StringAppendF(&result, "%s\n", extensions.getExtensions());
+    StringAppendF(&result, "RenderEngine supports protected context: %d\n",
+                  supportsProtectedContent());
+    StringAppendF(&result, "RenderEngine is in protected context: %d\n", mInProtectedContext);
+    StringAppendF(&result, "RenderEngine program cache size for unprotected context: %zu\n",
+                  cache.getSize(mEGLContext));
+    StringAppendF(&result, "RenderEngine program cache size for protected context: %zu\n",
+                  cache.getSize(mProtectedEGLContext));
+    StringAppendF(&result, "RenderEngine last dataspace conversion: (%s) to (%s)\n",
+                  dataspaceDetails(static_cast<android_dataspace>(mDataSpace)).c_str(),
+                  dataspaceDetails(static_cast<android_dataspace>(mOutputDataSpace)).c_str());
+    {
+        std::lock_guard<std::mutex> lock(mRenderingMutex);
+        StringAppendF(&result, "RenderEngine image cache size: %zu\n", mImageCache.size());
+        StringAppendF(&result, "Dumping buffer ids...\n");
+        for (const auto& [id, unused] : mImageCache) {
+            StringAppendF(&result, "0x%" PRIx64 "\n", id);
+        }
+    }
+    {
+        std::lock_guard<std::mutex> lock(mFramebufferImageCacheMutex);
+        StringAppendF(&result, "RenderEngine framebuffer image cache size: %zu\n",
+                      mFramebufferImageCache.size());
+        StringAppendF(&result, "Dumping buffer ids...\n");
+        for (const auto& [id, unused] : mFramebufferImageCache) {
+            StringAppendF(&result, "0x%" PRIx64 "\n", id);
+        }
+    }
+}
+
+GLESRenderEngine::GlesVersion GLESRenderEngine::parseGlesVersion(const char* str) {
+    int major, minor;
+    if (sscanf(str, "OpenGL ES-CM %d.%d", &major, &minor) != 2) {
+        if (sscanf(str, "OpenGL ES %d.%d", &major, &minor) != 2) {
+            ALOGW("Unable to parse GL_VERSION string: \"%s\"", str);
+            return GLES_VERSION_1_0;
+        }
+    }
+
+    if (major == 1 && minor == 0) return GLES_VERSION_1_0;
+    if (major == 1 && minor >= 1) return GLES_VERSION_1_1;
+    if (major == 2 && minor >= 0) return GLES_VERSION_2_0;
+    if (major == 3 && minor >= 0) return GLES_VERSION_3_0;
+
+    ALOGW("Unrecognized OpenGL ES version: %d.%d", major, minor);
+    return GLES_VERSION_1_0;
+}
+
+EGLContext GLESRenderEngine::createEglContext(EGLDisplay display, EGLConfig config,
+                                              EGLContext shareContext, bool useContextPriority,
+                                              Protection protection) {
+    EGLint renderableType = 0;
+    if (config == EGL_NO_CONFIG) {
+        renderableType = EGL_OPENGL_ES3_BIT;
+    } else if (!eglGetConfigAttrib(display, config, EGL_RENDERABLE_TYPE, &renderableType)) {
+        LOG_ALWAYS_FATAL("can't query EGLConfig RENDERABLE_TYPE");
+    }
+    EGLint contextClientVersion = 0;
+    if (renderableType & EGL_OPENGL_ES3_BIT) {
+        contextClientVersion = 3;
+    } else if (renderableType & EGL_OPENGL_ES2_BIT) {
+        contextClientVersion = 2;
+    } else if (renderableType & EGL_OPENGL_ES_BIT) {
+        contextClientVersion = 1;
+    } else {
+        LOG_ALWAYS_FATAL("no supported EGL_RENDERABLE_TYPEs");
+    }
+
+    std::vector<EGLint> contextAttributes;
+    contextAttributes.reserve(7);
+    contextAttributes.push_back(EGL_CONTEXT_CLIENT_VERSION);
+    contextAttributes.push_back(contextClientVersion);
+    if (useContextPriority) {
+        contextAttributes.push_back(EGL_CONTEXT_PRIORITY_LEVEL_IMG);
+        contextAttributes.push_back(EGL_CONTEXT_PRIORITY_HIGH_IMG);
+    }
+    if (protection == Protection::PROTECTED) {
+        contextAttributes.push_back(EGL_PROTECTED_CONTENT_EXT);
+        contextAttributes.push_back(EGL_TRUE);
+    }
+    contextAttributes.push_back(EGL_NONE);
+
+    EGLContext context = eglCreateContext(display, config, shareContext, contextAttributes.data());
+
+    if (contextClientVersion == 3 && context == EGL_NO_CONTEXT) {
+        // eglGetConfigAttrib indicated we can create GLES 3 context, but we failed, thus
+        // EGL_NO_CONTEXT so that we can abort.
+        if (config != EGL_NO_CONFIG) {
+            return context;
+        }
+        // If |config| is EGL_NO_CONFIG, we speculatively try to create GLES 3 context, so we should
+        // try to fall back to GLES 2.
+        contextAttributes[1] = 2;
+        context = eglCreateContext(display, config, shareContext, contextAttributes.data());
+    }
+
+    return context;
+}
+
+EGLSurface GLESRenderEngine::createStubEglPbufferSurface(EGLDisplay display, EGLConfig config,
+                                                         int hwcFormat, Protection protection) {
+    EGLConfig stubConfig = config;
+    if (stubConfig == EGL_NO_CONFIG) {
+        stubConfig = chooseEglConfig(display, hwcFormat, /*logConfig*/ true);
+    }
+    std::vector<EGLint> attributes;
+    attributes.reserve(7);
+    attributes.push_back(EGL_WIDTH);
+    attributes.push_back(1);
+    attributes.push_back(EGL_HEIGHT);
+    attributes.push_back(1);
+    if (protection == Protection::PROTECTED) {
+        attributes.push_back(EGL_PROTECTED_CONTENT_EXT);
+        attributes.push_back(EGL_TRUE);
+    }
+    attributes.push_back(EGL_NONE);
+
+    return eglCreatePbufferSurface(display, stubConfig, attributes.data());
+}
+
+bool GLESRenderEngine::isHdrDataSpace(const Dataspace dataSpace) const {
+    const Dataspace standard = static_cast<Dataspace>(dataSpace & Dataspace::STANDARD_MASK);
+    const Dataspace transfer = static_cast<Dataspace>(dataSpace & Dataspace::TRANSFER_MASK);
+    return standard == Dataspace::STANDARD_BT2020 &&
+            (transfer == Dataspace::TRANSFER_ST2084 || transfer == Dataspace::TRANSFER_HLG);
+}
+
+// For convenience, we want to convert the input color space to XYZ color space first,
+// and then convert from XYZ color space to output color space when
+// - SDR and HDR contents are mixed, either SDR content will be converted to HDR or
+//   HDR content will be tone-mapped to SDR; Or,
+// - there are HDR PQ and HLG contents presented at the same time, where we want to convert
+//   HLG content to PQ content.
+// In either case above, we need to operate the Y value in XYZ color space. Thus, when either
+// input data space or output data space is HDR data space, and the input transfer function
+// doesn't match the output transfer function, we would enable an intermediate transfrom to
+// XYZ color space.
+bool GLESRenderEngine::needsXYZTransformMatrix() const {
+    const bool isInputHdrDataSpace = isHdrDataSpace(mDataSpace);
+    const bool isOutputHdrDataSpace = isHdrDataSpace(mOutputDataSpace);
+    const Dataspace inputTransfer = static_cast<Dataspace>(mDataSpace & Dataspace::TRANSFER_MASK);
+    const Dataspace outputTransfer =
+            static_cast<Dataspace>(mOutputDataSpace & Dataspace::TRANSFER_MASK);
+
+    return (isInputHdrDataSpace || isOutputHdrDataSpace) && inputTransfer != outputTransfer;
+}
+
+bool GLESRenderEngine::isImageCachedForTesting(uint64_t bufferId) {
+    std::lock_guard<std::mutex> lock(mRenderingMutex);
+    const auto& cachedImage = mImageCache.find(bufferId);
+    return cachedImage != mImageCache.end();
+}
+
+bool GLESRenderEngine::isTextureNameKnownForTesting(uint32_t texName) {
+    const auto& entry = mTextureView.find(texName);
+    return entry != mTextureView.end();
+}
+
+std::optional<uint64_t> GLESRenderEngine::getBufferIdForTextureNameForTesting(uint32_t texName) {
+    const auto& entry = mTextureView.find(texName);
+    return entry != mTextureView.end() ? entry->second : std::nullopt;
+}
+
+bool GLESRenderEngine::isFramebufferImageCachedForTesting(uint64_t bufferId) {
+    std::lock_guard<std::mutex> lock(mFramebufferImageCacheMutex);
+    return std::any_of(mFramebufferImageCache.cbegin(), mFramebufferImageCache.cend(),
+                       [=](std::pair<uint64_t, EGLImageKHR> image) {
+                           return image.first == bufferId;
+                       });
+}
+
+// FlushTracer implementation
+GLESRenderEngine::FlushTracer::FlushTracer(GLESRenderEngine* engine) : mEngine(engine) {
+    mThread = std::thread(&GLESRenderEngine::FlushTracer::loop, this);
+}
+
+GLESRenderEngine::FlushTracer::~FlushTracer() {
+    {
+        std::lock_guard<std::mutex> lock(mMutex);
+        mRunning = false;
+    }
+    mCondition.notify_all();
+    if (mThread.joinable()) {
+        mThread.join();
+    }
+}
+
+void GLESRenderEngine::FlushTracer::queueSync(EGLSyncKHR sync) {
+    std::lock_guard<std::mutex> lock(mMutex);
+    char name[64];
+    const uint64_t frameNum = mFramesQueued++;
+    snprintf(name, sizeof(name), "Queueing sync for frame: %lu",
+             static_cast<unsigned long>(frameNum));
+    ATRACE_NAME(name);
+    mQueue.push({sync, frameNum});
+    ATRACE_INT("GPU Frames Outstanding", mQueue.size());
+    mCondition.notify_one();
+}
+
+void GLESRenderEngine::FlushTracer::loop() {
+    while (mRunning) {
+        QueueEntry entry;
+        {
+            std::lock_guard<std::mutex> lock(mMutex);
+
+            mCondition.wait(mMutex,
+                            [&]() REQUIRES(mMutex) { return !mQueue.empty() || !mRunning; });
+
+            if (!mRunning) {
+                // if mRunning is false, then FlushTracer is being destroyed, so
+                // bail out now.
+                break;
+            }
+            entry = mQueue.front();
+            mQueue.pop();
+        }
+        {
+            char name[64];
+            snprintf(name, sizeof(name), "waiting for frame %lu",
+                     static_cast<unsigned long>(entry.mFrameNum));
+            ATRACE_NAME(name);
+            mEngine->waitSync(entry.mSync, 0);
+        }
+    }
+}
+
+void GLESRenderEngine::handleShadow(const FloatRect& casterRect, float casterCornerRadius,
+                                    const ShadowSettings& settings) {
+    ATRACE_CALL();
+    const float casterZ = settings.length / 2.0f;
+    const GLShadowVertexGenerator shadows(casterRect, casterCornerRadius, casterZ,
+                                          settings.casterIsTranslucent, settings.ambientColor,
+                                          settings.spotColor, settings.lightPos,
+                                          settings.lightRadius);
+
+    // setup mesh for both shadows
+    Mesh mesh = Mesh::Builder()
+                        .setPrimitive(Mesh::TRIANGLES)
+                        .setVertices(shadows.getVertexCount(), 2 /* size */)
+                        .setShadowAttrs()
+                        .setIndices(shadows.getIndexCount())
+                        .build();
+
+    Mesh::VertexArray<vec2> position = mesh.getPositionArray<vec2>();
+    Mesh::VertexArray<vec4> shadowColor = mesh.getShadowColorArray<vec4>();
+    Mesh::VertexArray<vec3> shadowParams = mesh.getShadowParamsArray<vec3>();
+    shadows.fillVertices(position, shadowColor, shadowParams);
+    shadows.fillIndices(mesh.getIndicesArray());
+
+    mState.cornerRadius = 0.0f;
+    mState.drawShadows = true;
+    setupLayerTexturing(mShadowTexture.getTexture());
+    drawMesh(mesh);
+    mState.drawShadows = false;
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLESRenderEngine.h b/media/libstagefright/renderfright/gl/GLESRenderEngine.h
new file mode 100644
index 0000000..2c6eae2
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLESRenderEngine.h
@@ -0,0 +1,296 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_GLESRENDERENGINE_H_
+#define SF_GLESRENDERENGINE_H_
+
+#include <condition_variable>
+#include <deque>
+#include <mutex>
+#include <queue>
+#include <thread>
+#include <unordered_map>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <GLES2/gl2.h>
+#include <android-base/thread_annotations.h>
+#include <renderengine/RenderEngine.h>
+#include <renderengine/private/Description.h>
+#include <sys/types.h>
+#include "GLShadowTexture.h"
+#include "ImageManager.h"
+
+#define EGL_NO_CONFIG ((EGLConfig)0)
+
+namespace android {
+
+namespace renderengine {
+
+class Mesh;
+class Texture;
+
+namespace gl {
+
+class GLImage;
+class BlurFilter;
+
+class GLESRenderEngine : public impl::RenderEngine {
+public:
+    static std::unique_ptr<GLESRenderEngine> create(const RenderEngineCreationArgs& args);
+
+    GLESRenderEngine(const RenderEngineCreationArgs& args, EGLDisplay display, EGLConfig config,
+                     EGLContext ctxt, EGLSurface stub, EGLContext protectedContext,
+                     EGLSurface protectedStub);
+    ~GLESRenderEngine() override EXCLUDES(mRenderingMutex);
+
+    void primeCache() const override;
+    void genTextures(size_t count, uint32_t* names) override;
+    void deleteTextures(size_t count, uint32_t const* names) override;
+    void bindExternalTextureImage(uint32_t texName, const Image& image) override;
+    status_t bindExternalTextureBuffer(uint32_t texName, const sp<GraphicBuffer>& buffer,
+                                       const sp<Fence>& fence) EXCLUDES(mRenderingMutex);
+    void cacheExternalTextureBuffer(const sp<GraphicBuffer>& buffer) EXCLUDES(mRenderingMutex);
+    void unbindExternalTextureBuffer(uint64_t bufferId) EXCLUDES(mRenderingMutex);
+    status_t bindFrameBuffer(Framebuffer* framebuffer) override;
+    void unbindFrameBuffer(Framebuffer* framebuffer) override;
+
+    bool isProtected() const override { return mInProtectedContext; }
+    bool supportsProtectedContent() const override;
+    bool useProtectedContext(bool useProtectedContext) override;
+    status_t drawLayers(const DisplaySettings& display,
+                        const std::vector<const LayerSettings*>& layers,
+                        const sp<GraphicBuffer>& buffer, const bool useFramebufferCache,
+                        base::unique_fd&& bufferFence, base::unique_fd* drawFence) override;
+    bool cleanupPostRender(CleanupMode mode) override;
+
+    EGLDisplay getEGLDisplay() const { return mEGLDisplay; }
+    // Creates an output image for rendering to
+    EGLImageKHR createFramebufferImageIfNeeded(ANativeWindowBuffer* nativeBuffer, bool isProtected,
+                                               bool useFramebufferCache)
+            EXCLUDES(mFramebufferImageCacheMutex);
+
+    // Test-only methods
+    // Returns true iff mImageCache contains an image keyed by bufferId
+    bool isImageCachedForTesting(uint64_t bufferId) EXCLUDES(mRenderingMutex);
+    // Returns true iff texName was previously generated by RenderEngine and was
+    // not destroyed.
+    bool isTextureNameKnownForTesting(uint32_t texName);
+    // Returns the buffer ID of the content bound to texName, or nullopt if no
+    // such mapping exists.
+    std::optional<uint64_t> getBufferIdForTextureNameForTesting(uint32_t texName);
+    // Returns true iff mFramebufferImageCache contains an image keyed by bufferId
+    bool isFramebufferImageCachedForTesting(uint64_t bufferId)
+            EXCLUDES(mFramebufferImageCacheMutex);
+    // These are wrappers around public methods above, but exposing Barrier
+    // objects so that tests can block.
+    std::shared_ptr<ImageManager::Barrier> cacheExternalTextureBufferForTesting(
+            const sp<GraphicBuffer>& buffer);
+    std::shared_ptr<ImageManager::Barrier> unbindExternalTextureBufferForTesting(uint64_t bufferId);
+
+protected:
+    Framebuffer* getFramebufferForDrawing() override;
+    void dump(std::string& result) override EXCLUDES(mRenderingMutex)
+            EXCLUDES(mFramebufferImageCacheMutex);
+    size_t getMaxTextureSize() const override;
+    size_t getMaxViewportDims() const override;
+
+private:
+    enum GlesVersion {
+        GLES_VERSION_1_0 = 0x10000,
+        GLES_VERSION_1_1 = 0x10001,
+        GLES_VERSION_2_0 = 0x20000,
+        GLES_VERSION_3_0 = 0x30000,
+    };
+
+    static EGLConfig chooseEglConfig(EGLDisplay display, int format, bool logConfig);
+    static GlesVersion parseGlesVersion(const char* str);
+    static EGLContext createEglContext(EGLDisplay display, EGLConfig config,
+                                       EGLContext shareContext, bool useContextPriority,
+                                       Protection protection);
+    static EGLSurface createStubEglPbufferSurface(EGLDisplay display, EGLConfig config,
+                                                  int hwcFormat, Protection protection);
+    std::unique_ptr<Framebuffer> createFramebuffer();
+    std::unique_ptr<Image> createImage();
+    void checkErrors() const;
+    void checkErrors(const char* tag) const;
+    void setScissor(const Rect& region);
+    void disableScissor();
+    bool waitSync(EGLSyncKHR sync, EGLint flags);
+    status_t cacheExternalTextureBufferInternal(const sp<GraphicBuffer>& buffer)
+            EXCLUDES(mRenderingMutex);
+    void unbindExternalTextureBufferInternal(uint64_t bufferId) EXCLUDES(mRenderingMutex);
+
+    // A data space is considered HDR data space if it has BT2020 color space
+    // with PQ or HLG transfer function.
+    bool isHdrDataSpace(const ui::Dataspace dataSpace) const;
+    bool needsXYZTransformMatrix() const;
+    // Defines the viewport, and sets the projection matrix to the projection
+    // defined by the clip.
+    void setViewportAndProjection(Rect viewport, Rect clip);
+    // Evicts stale images from the buffer cache.
+    void evictImages(const std::vector<LayerSettings>& layers);
+    // Computes the cropping window for the layer and sets up cropping
+    // coordinates for the mesh.
+    FloatRect setupLayerCropping(const LayerSettings& layer, Mesh& mesh);
+
+    // We do a special handling for rounded corners when it's possible to turn off blending
+    // for the majority of the layer. The rounded corners needs to turn on blending such that
+    // we can set the alpha value correctly, however, only the corners need this, and since
+    // blending is an expensive operation, we want to turn off blending when it's not necessary.
+    void handleRoundedCorners(const DisplaySettings& display, const LayerSettings& layer,
+                              const Mesh& mesh);
+    base::unique_fd flush();
+    bool finish();
+    bool waitFence(base::unique_fd fenceFd);
+    void clearWithColor(float red, float green, float blue, float alpha);
+    void fillRegionWithColor(const Region& region, float red, float green, float blue, float alpha);
+    void handleShadow(const FloatRect& casterRect, float casterCornerRadius,
+                      const ShadowSettings& shadowSettings);
+    void setupLayerBlending(bool premultipliedAlpha, bool opaque, bool disableTexture,
+                            const half4& color, float cornerRadius);
+    void setupLayerTexturing(const Texture& texture);
+    void setupFillWithColor(float r, float g, float b, float a);
+    void setColorTransform(const mat4& colorTransform);
+    void disableTexturing();
+    void disableBlending();
+    void setupCornerRadiusCropSize(float width, float height);
+
+    // HDR and color management related functions and state
+    void setSourceY410BT2020(bool enable);
+    void setSourceDataSpace(ui::Dataspace source);
+    void setOutputDataSpace(ui::Dataspace dataspace);
+    void setDisplayMaxLuminance(const float maxLuminance);
+
+    // drawing
+    void drawMesh(const Mesh& mesh);
+
+    EGLDisplay mEGLDisplay;
+    EGLConfig mEGLConfig;
+    EGLContext mEGLContext;
+    EGLSurface mStubSurface;
+    EGLContext mProtectedEGLContext;
+    EGLSurface mProtectedStubSurface;
+    GLint mMaxViewportDims[2];
+    GLint mMaxTextureSize;
+    GLuint mVpWidth;
+    GLuint mVpHeight;
+    Description mState;
+    GLShadowTexture mShadowTexture;
+
+    mat4 mSrgbToXyz;
+    mat4 mDisplayP3ToXyz;
+    mat4 mBt2020ToXyz;
+    mat4 mXyzToSrgb;
+    mat4 mXyzToDisplayP3;
+    mat4 mXyzToBt2020;
+    mat4 mSrgbToDisplayP3;
+    mat4 mSrgbToBt2020;
+    mat4 mDisplayP3ToSrgb;
+    mat4 mDisplayP3ToBt2020;
+    mat4 mBt2020ToSrgb;
+    mat4 mBt2020ToDisplayP3;
+
+    bool mInProtectedContext = false;
+    // If set to true, then enables tracing flush() and finish() to systrace.
+    bool mTraceGpuCompletion = false;
+    // Maximum size of mFramebufferImageCache. If more images would be cached, then (approximately)
+    // the last recently used buffer should be kicked out.
+    uint32_t mFramebufferImageCacheSize = 0;
+
+    // Cache of output images, keyed by corresponding GraphicBuffer ID.
+    std::deque<std::pair<uint64_t, EGLImageKHR>> mFramebufferImageCache
+            GUARDED_BY(mFramebufferImageCacheMutex);
+    // The only reason why we have this mutex is so that we don't segfault when
+    // dumping info.
+    std::mutex mFramebufferImageCacheMutex;
+
+    // Current dataspace of layer being rendered
+    ui::Dataspace mDataSpace = ui::Dataspace::UNKNOWN;
+
+    // Current output dataspace of the render engine
+    ui::Dataspace mOutputDataSpace = ui::Dataspace::UNKNOWN;
+
+    // Whether device supports color management, currently color management
+    // supports sRGB, DisplayP3 color spaces.
+    const bool mUseColorManagement = false;
+
+    // Cache of GL images that we'll store per GraphicBuffer ID
+    std::unordered_map<uint64_t, std::unique_ptr<Image>> mImageCache GUARDED_BY(mRenderingMutex);
+    std::unordered_map<uint32_t, std::optional<uint64_t>> mTextureView;
+
+    // Mutex guarding rendering operations, so that:
+    // 1. GL operations aren't interleaved, and
+    // 2. Internal state related to rendering that is potentially modified by
+    // multiple threads is guaranteed thread-safe.
+    std::mutex mRenderingMutex;
+
+    std::unique_ptr<Framebuffer> mDrawingBuffer;
+    // this is a 1x1 RGB buffer, but over-allocate in case a driver wants more
+    // memory or if it needs to satisfy alignment requirements. In this case:
+    // assume that each channel requires 4 bytes, and add 3 additional bytes to
+    // ensure that we align on a word. Allocating 16 bytes will provide a
+    // guarantee that we don't clobber memory.
+    uint32_t mPlaceholderDrawBuffer[4];
+    // Placeholder buffer and image, similar to mPlaceholderDrawBuffer, but
+    // instead these are intended for cleaning up texture memory with the
+    // GL_TEXTURE_EXTERNAL_OES target.
+    ANativeWindowBuffer* mPlaceholderBuffer = nullptr;
+    EGLImage mPlaceholderImage = EGL_NO_IMAGE_KHR;
+    sp<Fence> mLastDrawFence;
+    // Store a separate boolean checking if prior resources were cleaned up, as
+    // devices that don't support native sync fences can't rely on a last draw
+    // fence that doesn't exist.
+    bool mPriorResourcesCleaned = true;
+
+    // Blur effect processor, only instantiated when a layer requests it.
+    BlurFilter* mBlurFilter = nullptr;
+
+    class FlushTracer {
+    public:
+        FlushTracer(GLESRenderEngine* engine);
+        ~FlushTracer();
+        void queueSync(EGLSyncKHR sync) EXCLUDES(mMutex);
+
+        struct QueueEntry {
+            EGLSyncKHR mSync = nullptr;
+            uint64_t mFrameNum = 0;
+        };
+
+    private:
+        void loop();
+        GLESRenderEngine* const mEngine;
+        std::thread mThread;
+        std::condition_variable_any mCondition;
+        std::mutex mMutex;
+        std::queue<QueueEntry> mQueue GUARDED_BY(mMutex);
+        uint64_t mFramesQueued GUARDED_BY(mMutex) = 0;
+        bool mRunning = true;
+    };
+    friend class FlushTracer;
+    friend class ImageManager;
+    friend class GLFramebuffer;
+    friend class BlurFilter;
+    friend class GenericProgram;
+    std::unique_ptr<FlushTracer> mFlushTracer;
+    std::unique_ptr<ImageManager> mImageManager = std::make_unique<ImageManager>(this);
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
+
+#endif /* SF_GLESRENDERENGINE_H_ */
diff --git a/media/libstagefright/renderfright/gl/GLExtensions.cpp b/media/libstagefright/renderfright/gl/GLExtensions.cpp
new file mode 100644
index 0000000..2924b0e
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLExtensions.cpp
@@ -0,0 +1,135 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "GLExtensions.h"
+
+#include <string>
+#include <unordered_set>
+
+#include <stdint.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+ANDROID_SINGLETON_STATIC_INSTANCE(android::renderengine::gl::GLExtensions)
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+namespace {
+
+class ExtensionSet {
+public:
+    ExtensionSet(const char* extensions) {
+        char const* curr = extensions;
+        char const* head = curr;
+        do {
+            head = strchr(curr, ' ');
+            size_t len = head ? head - curr : strlen(curr);
+            if (len > 0) {
+                mExtensions.emplace(curr, len);
+            }
+            curr = head + 1;
+        } while (head);
+    }
+
+    bool hasExtension(const char* extension) const { return mExtensions.count(extension) > 0; }
+
+private:
+    std::unordered_set<std::string> mExtensions;
+};
+
+} // anonymous namespace
+
+void GLExtensions::initWithGLStrings(GLubyte const* vendor, GLubyte const* renderer,
+                                     GLubyte const* version, GLubyte const* extensions) {
+    mVendor = (char const*)vendor;
+    mRenderer = (char const*)renderer;
+    mVersion = (char const*)version;
+    mExtensions = (char const*)extensions;
+
+    ExtensionSet extensionSet(mExtensions.c_str());
+    if (extensionSet.hasExtension("GL_EXT_protected_textures")) {
+        mHasProtectedTexture = true;
+    }
+}
+
+char const* GLExtensions::getVendor() const {
+    return mVendor.string();
+}
+
+char const* GLExtensions::getRenderer() const {
+    return mRenderer.string();
+}
+
+char const* GLExtensions::getVersion() const {
+    return mVersion.string();
+}
+
+char const* GLExtensions::getExtensions() const {
+    return mExtensions.string();
+}
+
+void GLExtensions::initWithEGLStrings(char const* eglVersion, char const* eglExtensions) {
+    mEGLVersion = eglVersion;
+    mEGLExtensions = eglExtensions;
+
+    ExtensionSet extensionSet(eglExtensions);
+
+    // EGL_ANDROIDX_no_config_context is an experimental extension with no
+    // written specification. It will be replaced by something more formal.
+    // SurfaceFlinger is using it to allow a single EGLContext to render to
+    // both a 16-bit primary display framebuffer and a 32-bit virtual display
+    // framebuffer.
+    //
+    // EGL_KHR_no_config_context is official extension to allow creating a
+    // context that works with any surface of a display.
+    if (extensionSet.hasExtension("EGL_ANDROIDX_no_config_context") ||
+        extensionSet.hasExtension("EGL_KHR_no_config_context")) {
+        mHasNoConfigContext = true;
+    }
+
+    if (extensionSet.hasExtension("EGL_ANDROID_native_fence_sync")) {
+        mHasNativeFenceSync = true;
+    }
+    if (extensionSet.hasExtension("EGL_KHR_fence_sync")) {
+        mHasFenceSync = true;
+    }
+    if (extensionSet.hasExtension("EGL_KHR_wait_sync")) {
+        mHasWaitSync = true;
+    }
+    if (extensionSet.hasExtension("EGL_EXT_protected_content")) {
+        mHasProtectedContent = true;
+    }
+    if (extensionSet.hasExtension("EGL_IMG_context_priority")) {
+        mHasContextPriority = true;
+    }
+    if (extensionSet.hasExtension("EGL_KHR_surfaceless_context")) {
+        mHasSurfacelessContext = true;
+    }
+}
+
+char const* GLExtensions::getEGLVersion() const {
+    return mEGLVersion.string();
+}
+
+char const* GLExtensions::getEGLExtensions() const {
+    return mEGLExtensions.string();
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLExtensions.h b/media/libstagefright/renderfright/gl/GLExtensions.h
new file mode 100644
index 0000000..ef00009
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLExtensions.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SF_GLEXTENSION_H
+#define ANDROID_SF_GLEXTENSION_H
+
+#include <stdint.h>
+#include <sys/types.h>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <GLES/gl.h>
+#include <GLES/glext.h>
+#include <utils/Singleton.h>
+#include <utils/String8.h>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GLExtensions : public Singleton<GLExtensions> {
+public:
+    bool hasNoConfigContext() const { return mHasNoConfigContext; }
+    bool hasNativeFenceSync() const { return mHasNativeFenceSync; }
+    bool hasFenceSync() const { return mHasFenceSync; }
+    bool hasWaitSync() const { return mHasWaitSync; }
+    bool hasProtectedContent() const { return mHasProtectedContent; }
+    bool hasContextPriority() const { return mHasContextPriority; }
+    bool hasSurfacelessContext() const { return mHasSurfacelessContext; }
+    bool hasProtectedTexture() const { return mHasProtectedTexture; }
+
+    void initWithGLStrings(GLubyte const* vendor, GLubyte const* renderer, GLubyte const* version,
+                           GLubyte const* extensions);
+    char const* getVendor() const;
+    char const* getRenderer() const;
+    char const* getVersion() const;
+    char const* getExtensions() const;
+
+    void initWithEGLStrings(char const* eglVersion, char const* eglExtensions);
+    char const* getEGLVersion() const;
+    char const* getEGLExtensions() const;
+
+protected:
+    GLExtensions() = default;
+
+private:
+    friend class Singleton<GLExtensions>;
+
+    bool mHasNoConfigContext = false;
+    bool mHasNativeFenceSync = false;
+    bool mHasFenceSync = false;
+    bool mHasWaitSync = false;
+    bool mHasProtectedContent = false;
+    bool mHasContextPriority = false;
+    bool mHasSurfacelessContext = false;
+    bool mHasProtectedTexture = false;
+
+    String8 mVendor;
+    String8 mRenderer;
+    String8 mVersion;
+    String8 mExtensions;
+    String8 mEGLVersion;
+    String8 mEGLExtensions;
+
+    GLExtensions(const GLExtensions&);
+    GLExtensions& operator=(const GLExtensions&);
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
+
+#endif // ANDROID_SF_GLEXTENSION_H
diff --git a/media/libstagefright/renderfright/gl/GLFramebuffer.cpp b/media/libstagefright/renderfright/gl/GLFramebuffer.cpp
new file mode 100644
index 0000000..d6582b3
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLFramebuffer.cpp
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include "GLFramebuffer.h"
+
+#include <GLES/gl.h>
+#include <GLES/glext.h>
+#include <GLES2/gl2ext.h>
+#include <GLES3/gl3.h>
+#include <nativebase/nativebase.h>
+#include <utils/Trace.h>
+#include "GLESRenderEngine.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+GLFramebuffer::GLFramebuffer(GLESRenderEngine& engine)
+      : mEngine(engine), mEGLDisplay(engine.getEGLDisplay()), mEGLImage(EGL_NO_IMAGE_KHR) {
+    glGenTextures(1, &mTextureName);
+    glGenFramebuffers(1, &mFramebufferName);
+}
+
+GLFramebuffer::~GLFramebuffer() {
+    glDeleteFramebuffers(1, &mFramebufferName);
+    glDeleteTextures(1, &mTextureName);
+}
+
+bool GLFramebuffer::setNativeWindowBuffer(ANativeWindowBuffer* nativeBuffer, bool isProtected,
+                                          const bool useFramebufferCache) {
+    ATRACE_CALL();
+    if (mEGLImage != EGL_NO_IMAGE_KHR) {
+        if (!usingFramebufferCache) {
+            eglDestroyImageKHR(mEGLDisplay, mEGLImage);
+        }
+        mEGLImage = EGL_NO_IMAGE_KHR;
+        mBufferWidth = 0;
+        mBufferHeight = 0;
+    }
+
+    if (nativeBuffer) {
+        mEGLImage = mEngine.createFramebufferImageIfNeeded(nativeBuffer, isProtected,
+                                                           useFramebufferCache);
+        if (mEGLImage == EGL_NO_IMAGE_KHR) {
+            return false;
+        }
+        usingFramebufferCache = useFramebufferCache;
+        mBufferWidth = nativeBuffer->width;
+        mBufferHeight = nativeBuffer->height;
+    }
+    return true;
+}
+
+void GLFramebuffer::allocateBuffers(uint32_t width, uint32_t height, void* data) {
+    ATRACE_CALL();
+
+    glBindTexture(GL_TEXTURE_2D, mTextureName);
+    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, data);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_MIRRORED_REPEAT);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_MIRRORED_REPEAT);
+
+    mBufferHeight = height;
+    mBufferWidth = width;
+    mEngine.checkErrors("Allocating Fbo texture");
+
+    bind();
+    glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, mTextureName, 0);
+    mStatus = glCheckFramebufferStatus(GL_FRAMEBUFFER);
+    unbind();
+    glBindTexture(GL_TEXTURE_2D, 0);
+
+    if (mStatus != GL_FRAMEBUFFER_COMPLETE) {
+        ALOGE("Frame buffer is not complete. Error %d", mStatus);
+    }
+}
+
+void GLFramebuffer::bind() const {
+    glBindFramebuffer(GL_FRAMEBUFFER, mFramebufferName);
+}
+
+void GLFramebuffer::bindAsReadBuffer() const {
+    glBindFramebuffer(GL_READ_FRAMEBUFFER, mFramebufferName);
+}
+
+void GLFramebuffer::bindAsDrawBuffer() const {
+    glBindFramebuffer(GL_DRAW_FRAMEBUFFER, mFramebufferName);
+}
+
+void GLFramebuffer::unbind() const {
+    glBindFramebuffer(GL_FRAMEBUFFER, 0);
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLFramebuffer.h b/media/libstagefright/renderfright/gl/GLFramebuffer.h
new file mode 100644
index 0000000..6757695
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLFramebuffer.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdint>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <GLES2/gl2.h>
+#include <renderengine/Framebuffer.h>
+
+struct ANativeWindowBuffer;
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GLESRenderEngine;
+
+class GLFramebuffer : public renderengine::Framebuffer {
+public:
+    explicit GLFramebuffer(GLESRenderEngine& engine);
+    explicit GLFramebuffer(GLESRenderEngine& engine, bool multiTarget);
+    ~GLFramebuffer() override;
+
+    bool setNativeWindowBuffer(ANativeWindowBuffer* nativeBuffer, bool isProtected,
+                               const bool useFramebufferCache) override;
+    void allocateBuffers(uint32_t width, uint32_t height, void* data = nullptr);
+    EGLImageKHR getEGLImage() const { return mEGLImage; }
+    uint32_t getTextureName() const { return mTextureName; }
+    uint32_t getFramebufferName() const { return mFramebufferName; }
+    int32_t getBufferHeight() const { return mBufferHeight; }
+    int32_t getBufferWidth() const { return mBufferWidth; }
+    GLenum getStatus() const { return mStatus; }
+    void bind() const;
+    void bindAsReadBuffer() const;
+    void bindAsDrawBuffer() const;
+    void unbind() const;
+
+private:
+    GLESRenderEngine& mEngine;
+    EGLDisplay mEGLDisplay;
+    EGLImageKHR mEGLImage;
+    bool usingFramebufferCache = false;
+    GLenum mStatus = GL_FRAMEBUFFER_UNSUPPORTED;
+    uint32_t mTextureName, mFramebufferName;
+
+    int32_t mBufferHeight = 0;
+    int32_t mBufferWidth = 0;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLImage.cpp b/media/libstagefright/renderfright/gl/GLImage.cpp
new file mode 100644
index 0000000..77e648e
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLImage.cpp
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include "GLImage.h"
+
+#include <vector>
+
+#include <log/log.h>
+#include <utils/Trace.h>
+#include "GLESRenderEngine.h"
+#include "GLExtensions.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+static std::vector<EGLint> buildAttributeList(bool isProtected) {
+    std::vector<EGLint> attrs;
+    attrs.reserve(16);
+
+    attrs.push_back(EGL_IMAGE_PRESERVED_KHR);
+    attrs.push_back(EGL_TRUE);
+
+    if (isProtected && GLExtensions::getInstance().hasProtectedContent()) {
+        attrs.push_back(EGL_PROTECTED_CONTENT_EXT);
+        attrs.push_back(EGL_TRUE);
+    }
+
+    attrs.push_back(EGL_NONE);
+
+    return attrs;
+}
+
+GLImage::GLImage(const GLESRenderEngine& engine) : mEGLDisplay(engine.getEGLDisplay()) {}
+
+GLImage::~GLImage() {
+    setNativeWindowBuffer(nullptr, false);
+}
+
+bool GLImage::setNativeWindowBuffer(ANativeWindowBuffer* buffer, bool isProtected) {
+    ATRACE_CALL();
+    if (mEGLImage != EGL_NO_IMAGE_KHR) {
+        if (!eglDestroyImageKHR(mEGLDisplay, mEGLImage)) {
+            ALOGE("failed to destroy image: %#x", eglGetError());
+        }
+        mEGLImage = EGL_NO_IMAGE_KHR;
+    }
+
+    if (buffer) {
+        std::vector<EGLint> attrs = buildAttributeList(isProtected);
+        mEGLImage = eglCreateImageKHR(mEGLDisplay, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID,
+                                      static_cast<EGLClientBuffer>(buffer), attrs.data());
+        if (mEGLImage == EGL_NO_IMAGE_KHR) {
+            ALOGE("failed to create EGLImage: %#x", eglGetError());
+            return false;
+        }
+        mProtected = isProtected;
+    }
+
+    return true;
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLImage.h b/media/libstagefright/renderfright/gl/GLImage.h
new file mode 100644
index 0000000..59d6ce3
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLImage.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdint>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <android-base/macros.h>
+#include <renderengine/Image.h>
+
+struct ANativeWindowBuffer;
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GLESRenderEngine;
+
+class GLImage : public renderengine::Image {
+public:
+    explicit GLImage(const GLESRenderEngine& engine);
+    ~GLImage() override;
+
+    bool setNativeWindowBuffer(ANativeWindowBuffer* buffer, bool isProtected) override;
+
+    EGLImageKHR getEGLImage() const { return mEGLImage; }
+    bool isProtected() const { return mProtected; }
+
+private:
+    EGLDisplay mEGLDisplay;
+    EGLImageKHR mEGLImage = EGL_NO_IMAGE_KHR;
+    bool mProtected = false;
+
+    DISALLOW_COPY_AND_ASSIGN(GLImage);
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLShadowTexture.cpp b/media/libstagefright/renderfright/gl/GLShadowTexture.cpp
new file mode 100644
index 0000000..2423a34
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLShadowTexture.cpp
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <GLES/gl.h>
+#include <GLES/glext.h>
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#include <GLES3/gl3.h>
+
+#include "GLShadowTexture.h"
+#include "GLSkiaShadowPort.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+GLShadowTexture::GLShadowTexture() {
+    fillShadowTextureData(mTextureData, SHADOW_TEXTURE_WIDTH);
+
+    glGenTextures(1, &mName);
+    glBindTexture(GL_TEXTURE_2D, mName);
+    glTexImage2D(GL_TEXTURE_2D, 0 /* base image level */, GL_ALPHA, SHADOW_TEXTURE_WIDTH,
+                 SHADOW_TEXTURE_HEIGHT, 0 /* border */, GL_ALPHA, GL_UNSIGNED_BYTE, mTextureData);
+    mTexture.init(Texture::TEXTURE_2D, mName);
+    mTexture.setFiltering(true);
+    mTexture.setDimensions(SHADOW_TEXTURE_WIDTH, 1);
+}
+
+GLShadowTexture::~GLShadowTexture() {
+    glDeleteTextures(1, &mName);
+}
+
+const Texture& GLShadowTexture::getTexture() {
+    return mTexture;
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLShadowTexture.h b/media/libstagefright/renderfright/gl/GLShadowTexture.h
new file mode 100644
index 0000000..250a9d7
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLShadowTexture.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <renderengine/Texture.h>
+#include <cstdint>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GLShadowTexture {
+public:
+    GLShadowTexture();
+    ~GLShadowTexture();
+
+    const Texture& getTexture();
+
+private:
+    static constexpr int SHADOW_TEXTURE_WIDTH = 128;
+    static constexpr int SHADOW_TEXTURE_HEIGHT = 1;
+
+    GLuint mName;
+    Texture mTexture;
+    uint8_t mTextureData[SHADOW_TEXTURE_WIDTH];
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLShadowVertexGenerator.cpp b/media/libstagefright/renderfright/gl/GLShadowVertexGenerator.cpp
new file mode 100644
index 0000000..3181f9b
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLShadowVertexGenerator.cpp
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/Mesh.h>
+
+#include <math/vec4.h>
+
+#include <ui/Rect.h>
+#include <ui/Transform.h>
+
+#include "GLShadowVertexGenerator.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+GLShadowVertexGenerator::GLShadowVertexGenerator(const FloatRect& casterRect,
+                                                 float casterCornerRadius, float casterZ,
+                                                 bool casterIsTranslucent, const vec4& ambientColor,
+                                                 const vec4& spotColor, const vec3& lightPosition,
+                                                 float lightRadius) {
+    mDrawAmbientShadow = ambientColor.a > 0.f;
+    mDrawSpotShadow = spotColor.a > 0.f;
+
+    // Generate geometries and find number of vertices to generate
+    if (mDrawAmbientShadow) {
+        mAmbientShadowGeometry = getAmbientShadowGeometry(casterRect, casterCornerRadius, casterZ,
+                                                          casterIsTranslucent, ambientColor);
+        mAmbientShadowVertexCount = getVertexCountForGeometry(*mAmbientShadowGeometry.get());
+        mAmbientShadowIndexCount = getIndexCountForGeometry(*mAmbientShadowGeometry.get());
+    } else {
+        mAmbientShadowVertexCount = 0;
+        mAmbientShadowIndexCount = 0;
+    }
+
+    if (mDrawSpotShadow) {
+        mSpotShadowGeometry =
+                getSpotShadowGeometry(casterRect, casterCornerRadius, casterZ, casterIsTranslucent,
+                                      spotColor, lightPosition, lightRadius);
+        mSpotShadowVertexCount = getVertexCountForGeometry(*mSpotShadowGeometry.get());
+        mSpotShadowIndexCount = getIndexCountForGeometry(*mSpotShadowGeometry.get());
+    } else {
+        mSpotShadowVertexCount = 0;
+        mSpotShadowIndexCount = 0;
+    }
+}
+
+size_t GLShadowVertexGenerator::getVertexCount() const {
+    return mAmbientShadowVertexCount + mSpotShadowVertexCount;
+}
+
+size_t GLShadowVertexGenerator::getIndexCount() const {
+    return mAmbientShadowIndexCount + mSpotShadowIndexCount;
+}
+
+void GLShadowVertexGenerator::fillVertices(Mesh::VertexArray<vec2>& position,
+                                           Mesh::VertexArray<vec4>& color,
+                                           Mesh::VertexArray<vec3>& params) const {
+    if (mDrawAmbientShadow) {
+        fillVerticesForGeometry(*mAmbientShadowGeometry.get(), mAmbientShadowVertexCount, position,
+                                color, params);
+    }
+    if (mDrawSpotShadow) {
+        fillVerticesForGeometry(*mSpotShadowGeometry.get(), mSpotShadowVertexCount,
+                                Mesh::VertexArray<vec2>(position, mAmbientShadowVertexCount),
+                                Mesh::VertexArray<vec4>(color, mAmbientShadowVertexCount),
+                                Mesh::VertexArray<vec3>(params, mAmbientShadowVertexCount));
+    }
+}
+
+void GLShadowVertexGenerator::fillIndices(uint16_t* indices) const {
+    if (mDrawAmbientShadow) {
+        fillIndicesForGeometry(*mAmbientShadowGeometry.get(), mAmbientShadowIndexCount,
+                               0 /* starting vertex offset */, indices);
+    }
+    if (mDrawSpotShadow) {
+        fillIndicesForGeometry(*mSpotShadowGeometry.get(), mSpotShadowIndexCount,
+                               mAmbientShadowVertexCount /* starting vertex offset */,
+                               &(indices[mAmbientShadowIndexCount]));
+    }
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLShadowVertexGenerator.h b/media/libstagefright/renderfright/gl/GLShadowVertexGenerator.h
new file mode 100644
index 0000000..112f976
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLShadowVertexGenerator.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <math/vec4.h>
+#include <ui/Rect.h>
+
+#include "GLSkiaShadowPort.h"
+
+namespace android {
+namespace renderengine {
+
+class Mesh;
+
+namespace gl {
+
+/**
+ * Generates gl attributes required to draw shadow spot and/or ambient shadows.
+ *
+ * Each shadow can support different colors. This class generates three vertex attributes for
+ * each shadow, its position, color and shadow params(offset and distance). These can be sent
+ * using a single glDrawElements call.
+ */
+class GLShadowVertexGenerator {
+public:
+    GLShadowVertexGenerator(const FloatRect& casterRect, float casterCornerRadius, float casterZ,
+                            bool casterIsTranslucent, const vec4& ambientColor,
+                            const vec4& spotColor, const vec3& lightPosition, float lightRadius);
+    ~GLShadowVertexGenerator() = default;
+
+    size_t getVertexCount() const;
+    size_t getIndexCount() const;
+    void fillVertices(Mesh::VertexArray<vec2>& position, Mesh::VertexArray<vec4>& color,
+                      Mesh::VertexArray<vec3>& params) const;
+    void fillIndices(uint16_t* indices) const;
+
+private:
+    bool mDrawAmbientShadow;
+    std::unique_ptr<Geometry> mAmbientShadowGeometry;
+    int mAmbientShadowVertexCount = 0;
+    int mAmbientShadowIndexCount = 0;
+
+    bool mDrawSpotShadow;
+    std::unique_ptr<Geometry> mSpotShadowGeometry;
+    int mSpotShadowVertexCount = 0;
+    int mSpotShadowIndexCount = 0;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLSkiaShadowPort.cpp b/media/libstagefright/renderfright/gl/GLSkiaShadowPort.cpp
new file mode 100644
index 0000000..da8b435
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLSkiaShadowPort.cpp
@@ -0,0 +1,656 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <math/vec4.h>
+
+#include <renderengine/Mesh.h>
+
+#include <ui/Rect.h>
+#include <ui/Transform.h>
+
+#include <utils/Log.h>
+
+#include "GLSkiaShadowPort.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+/**
+ * The shadow geometry logic and vertex generation code has been ported from skia shadow
+ * fast path OpenGL implementation to draw shadows around rects and rounded rects including
+ * circles.
+ *
+ * path: skia/src/gpu/GrRenderTargetContext.cpp GrRenderTargetContext::drawFastShadow
+ *
+ * Modifications made:
+ * - Switched to using std lib math functions
+ * - Fall off function is implemented in vertex shader rather than a shadow texture
+ * - Removed transformations applied on the caster rect since the caster will be in local
+ *   coordinate space and will be transformed by the vertex shader.
+ */
+
+static inline float divide_and_pin(float numer, float denom, float min, float max) {
+    if (denom == 0.0f) return min;
+    return std::clamp(numer / denom, min, max);
+}
+
+static constexpr auto SK_ScalarSqrt2 = 1.41421356f;
+static constexpr auto kAmbientHeightFactor = 1.0f / 128.0f;
+static constexpr auto kAmbientGeomFactor = 64.0f;
+// Assuming that we have a light height of 600 for the spot shadow,
+// the spot values will reach their maximum at a height of approximately 292.3077.
+// We'll round up to 300 to keep it simple.
+static constexpr auto kMaxAmbientRadius = 300 * kAmbientHeightFactor * kAmbientGeomFactor;
+
+inline float AmbientBlurRadius(float height) {
+    return std::min(height * kAmbientHeightFactor * kAmbientGeomFactor, kMaxAmbientRadius);
+}
+inline float AmbientRecipAlpha(float height) {
+    return 1.0f + std::max(height * kAmbientHeightFactor, 0.0f);
+}
+
+//////////////////////////////////////////////////////////////////////////////
+// Circle Data
+//
+// We have two possible cases for geometry for a circle:
+
+// In the case of a normal fill, we draw geometry for the circle as an octagon.
+static const uint16_t gFillCircleIndices[] = {
+        // enter the octagon
+        // clang-format off
+         0, 1, 8, 1, 2, 8,
+         2, 3, 8, 3, 4, 8,
+         4, 5, 8, 5, 6, 8,
+         6, 7, 8, 7, 0, 8,
+        // clang-format on
+};
+
+// For stroked circles, we use two nested octagons.
+static const uint16_t gStrokeCircleIndices[] = {
+        // enter the octagon
+        // clang-format off
+         0, 1,  9, 0,  9,  8,
+         1, 2, 10, 1, 10,  9,
+         2, 3, 11, 2, 11, 10,
+         3, 4, 12, 3, 12, 11,
+         4, 5, 13, 4, 13, 12,
+         5, 6, 14, 5, 14, 13,
+         6, 7, 15, 6, 15, 14,
+         7, 0,  8, 7,  8, 15,
+        // clang-format on
+};
+
+#define SK_ARRAY_COUNT(a) (sizeof(a) / sizeof((a)[0]))
+static const int kIndicesPerFillCircle = SK_ARRAY_COUNT(gFillCircleIndices);
+static const int kIndicesPerStrokeCircle = SK_ARRAY_COUNT(gStrokeCircleIndices);
+static const int kVertsPerStrokeCircle = 16;
+static const int kVertsPerFillCircle = 9;
+
+static int circle_type_to_vert_count(bool stroked) {
+    return stroked ? kVertsPerStrokeCircle : kVertsPerFillCircle;
+}
+
+static int circle_type_to_index_count(bool stroked) {
+    return stroked ? kIndicesPerStrokeCircle : kIndicesPerFillCircle;
+}
+
+static const uint16_t* circle_type_to_indices(bool stroked) {
+    return stroked ? gStrokeCircleIndices : gFillCircleIndices;
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// RoundRect Data
+//
+// The geometry for a shadow roundrect is similar to a 9-patch:
+//    ____________
+//   |_|________|_|
+//   | |        | |
+//   | |        | |
+//   | |        | |
+//   |_|________|_|
+//   |_|________|_|
+//
+// However, each corner is rendered as a fan rather than a simple quad, as below. (The diagram
+// shows the upper part of the upper left corner. The bottom triangle would similarly be split
+// into two triangles.)
+//    ________
+//   |\  \   |
+//   |  \ \  |
+//   |    \\ |
+//   |      \|
+//   --------
+//
+// The center of the fan handles the curve of the corner. For roundrects where the stroke width
+// is greater than the corner radius, the outer triangles blend from the curve to the straight
+// sides. Otherwise these triangles will be degenerate.
+//
+// In the case where the stroke width is greater than the corner radius and the
+// blur radius (overstroke), we add additional geometry to mark out the rectangle in the center.
+// This rectangle extends the coverage values of the center edges of the 9-patch.
+//    ____________
+//   |_|________|_|
+//   | |\ ____ /| |
+//   | | |    | | |
+//   | | |____| | |
+//   |_|/______\|_|
+//   |_|________|_|
+//
+// For filled rrects we reuse the stroke geometry but add an additional quad to the center.
+
+static const uint16_t gRRectIndices[] = {
+        // clang-format off
+     // overstroke quads
+     // we place this at the beginning so that we can skip these indices when rendering as filled
+     0, 6, 25, 0, 25, 24,
+     6, 18, 27, 6, 27, 25,
+     18, 12, 26, 18, 26, 27,
+     12, 0, 24, 12, 24, 26,
+
+     // corners
+     0, 1, 2, 0, 2, 3, 0, 3, 4, 0, 4, 5,
+     6, 11, 10, 6, 10, 9, 6, 9, 8, 6, 8, 7,
+     12, 17, 16, 12, 16, 15, 12, 15, 14, 12, 14, 13,
+     18, 19, 20, 18, 20, 21, 18, 21, 22, 18, 22, 23,
+
+     // edges
+     0, 5, 11, 0, 11, 6,
+     6, 7, 19, 6, 19, 18,
+     18, 23, 17, 18, 17, 12,
+     12, 13, 1, 12, 1, 0,
+
+     // fill quad
+     // we place this at the end so that we can skip these indices when rendering as stroked
+     0, 6, 18, 0, 18, 12,
+        // clang-format on
+};
+
+// overstroke count
+static const int kIndicesPerOverstrokeRRect = SK_ARRAY_COUNT(gRRectIndices) - 6;
+// simple stroke count skips overstroke indices
+static const int kIndicesPerStrokeRRect = kIndicesPerOverstrokeRRect - 6 * 4;
+// fill count adds final quad to stroke count
+static const int kIndicesPerFillRRect = kIndicesPerStrokeRRect + 6;
+static const int kVertsPerStrokeRRect = 24;
+static const int kVertsPerOverstrokeRRect = 28;
+static const int kVertsPerFillRRect = 24;
+
+static int rrect_type_to_vert_count(RRectType type) {
+    switch (type) {
+        case kFill_RRectType:
+            return kVertsPerFillRRect;
+        case kStroke_RRectType:
+            return kVertsPerStrokeRRect;
+        case kOverstroke_RRectType:
+            return kVertsPerOverstrokeRRect;
+    }
+    ALOGE("Invalid rect type: %d", type);
+    return -1;
+}
+
+static int rrect_type_to_index_count(RRectType type) {
+    switch (type) {
+        case kFill_RRectType:
+            return kIndicesPerFillRRect;
+        case kStroke_RRectType:
+            return kIndicesPerStrokeRRect;
+        case kOverstroke_RRectType:
+            return kIndicesPerOverstrokeRRect;
+    }
+    ALOGE("Invalid rect type: %d", type);
+    return -1;
+}
+
+static const uint16_t* rrect_type_to_indices(RRectType type) {
+    switch (type) {
+        case kFill_RRectType:
+        case kStroke_RRectType:
+            return gRRectIndices + 6 * 4;
+        case kOverstroke_RRectType:
+            return gRRectIndices;
+    }
+    ALOGE("Invalid rect type: %d", type);
+    return nullptr;
+}
+
+static void fillInCircleVerts(const Geometry& args, bool isStroked,
+                              Mesh::VertexArray<vec2>& position,
+                              Mesh::VertexArray<vec4>& shadowColor,
+                              Mesh::VertexArray<vec3>& shadowParams) {
+    vec4 color = args.fColor;
+    float outerRadius = args.fOuterRadius;
+    float innerRadius = args.fInnerRadius;
+    float blurRadius = args.fBlurRadius;
+    float distanceCorrection = outerRadius / blurRadius;
+
+    const FloatRect& bounds = args.fDevBounds;
+
+    // The inner radius in the vertex data must be specified in normalized space.
+    innerRadius = innerRadius / outerRadius;
+
+    vec2 center = vec2(bounds.getWidth() / 2.0f, bounds.getHeight() / 2.0f);
+    float halfWidth = 0.5f * bounds.getWidth();
+    float octOffset = 0.41421356237f; // sqrt(2) - 1
+    int vertexCount = 0;
+
+    position[vertexCount] = center + vec2(-octOffset * halfWidth, -halfWidth);
+    shadowColor[vertexCount] = color;
+    shadowParams[vertexCount] = vec3(-octOffset, -1, distanceCorrection);
+    vertexCount++;
+
+    position[vertexCount] = center + vec2(octOffset * halfWidth, -halfWidth);
+    shadowColor[vertexCount] = color;
+    shadowParams[vertexCount] = vec3(octOffset, -1, distanceCorrection);
+    vertexCount++;
+
+    position[vertexCount] = center + vec2(halfWidth, -octOffset * halfWidth);
+    shadowColor[vertexCount] = color;
+    shadowParams[vertexCount] = vec3(1, -octOffset, distanceCorrection);
+    vertexCount++;
+
+    position[vertexCount] = center + vec2(halfWidth, octOffset * halfWidth);
+    shadowColor[vertexCount] = color;
+    shadowParams[vertexCount] = vec3(1, octOffset, distanceCorrection);
+    vertexCount++;
+
+    position[vertexCount] = center + vec2(octOffset * halfWidth, halfWidth);
+    shadowColor[vertexCount] = color;
+    shadowParams[vertexCount] = vec3(octOffset, 1, distanceCorrection);
+    vertexCount++;
+
+    position[vertexCount] = center + vec2(-octOffset * halfWidth, halfWidth);
+    shadowColor[vertexCount] = color;
+    shadowParams[vertexCount] = vec3(-octOffset, 1, distanceCorrection);
+    vertexCount++;
+
+    position[vertexCount] = center + vec2(-halfWidth, octOffset * halfWidth);
+    shadowColor[vertexCount] = color;
+    shadowParams[vertexCount] = vec3(-1, octOffset, distanceCorrection);
+    vertexCount++;
+
+    position[vertexCount] = center + vec2(-halfWidth, -octOffset * halfWidth);
+    shadowColor[vertexCount] = color;
+    shadowParams[vertexCount] = vec3(-1, -octOffset, distanceCorrection);
+    vertexCount++;
+
+    if (isStroked) {
+        // compute the inner ring
+
+        // cosine and sine of pi/8
+        float c = 0.923579533f;
+        float s = 0.382683432f;
+        float r = args.fInnerRadius;
+
+        position[vertexCount] = center + vec2(-s * r, -c * r);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(-s * innerRadius, -c * innerRadius, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = center + vec2(s * r, -c * r);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(s * innerRadius, -c * innerRadius, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = center + vec2(c * r, -s * r);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(c * innerRadius, -s * innerRadius, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = center + vec2(c * r, s * r);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(c * innerRadius, s * innerRadius, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = center + vec2(s * r, c * r);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(s * innerRadius, c * innerRadius, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = center + vec2(-s * r, c * r);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(-s * innerRadius, c * innerRadius, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = center + vec2(-c * r, s * r);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(-c * innerRadius, s * innerRadius, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = center + vec2(-c * r, -s * r);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(-c * innerRadius, -s * innerRadius, distanceCorrection);
+        vertexCount++;
+    } else {
+        // filled
+        position[vertexCount] = center;
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(0, 0, distanceCorrection);
+        vertexCount++;
+    }
+}
+
+static void fillInRRectVerts(const Geometry& args, Mesh::VertexArray<vec2>& position,
+                             Mesh::VertexArray<vec4>& shadowColor,
+                             Mesh::VertexArray<vec3>& shadowParams) {
+    vec4 color = args.fColor;
+    float outerRadius = args.fOuterRadius;
+
+    const FloatRect& bounds = args.fDevBounds;
+
+    float umbraInset = args.fUmbraInset;
+    float minDim = 0.5f * std::min(bounds.getWidth(), bounds.getHeight());
+    if (umbraInset > minDim) {
+        umbraInset = minDim;
+    }
+
+    float xInner[4] = {bounds.left + umbraInset, bounds.right - umbraInset,
+                       bounds.left + umbraInset, bounds.right - umbraInset};
+    float xMid[4] = {bounds.left + outerRadius, bounds.right - outerRadius,
+                     bounds.left + outerRadius, bounds.right - outerRadius};
+    float xOuter[4] = {bounds.left, bounds.right, bounds.left, bounds.right};
+    float yInner[4] = {bounds.top + umbraInset, bounds.top + umbraInset, bounds.bottom - umbraInset,
+                       bounds.bottom - umbraInset};
+    float yMid[4] = {bounds.top + outerRadius, bounds.top + outerRadius,
+                     bounds.bottom - outerRadius, bounds.bottom - outerRadius};
+    float yOuter[4] = {bounds.top, bounds.top, bounds.bottom, bounds.bottom};
+
+    float blurRadius = args.fBlurRadius;
+
+    // In the case where we have to inset more for the umbra, our two triangles in the
+    // corner get skewed to a diamond rather than a square. To correct for that,
+    // we also skew the vectors we send to the shader that help define the circle.
+    // By doing so, we end up with a quarter circle in the corner rather than the
+    // elliptical curve.
+
+    // This is a bit magical, but it gives us the correct results at extrema:
+    //   a) umbraInset == outerRadius produces an orthogonal vector
+    //   b) outerRadius == 0 produces a diagonal vector
+    // And visually the corner looks correct.
+    vec2 outerVec = vec2(outerRadius - umbraInset, -outerRadius - umbraInset);
+    outerVec = normalize(outerVec);
+    // We want the circle edge to fall fractionally along the diagonal at
+    //      (sqrt(2)*(umbraInset - outerRadius) + outerRadius)/sqrt(2)*umbraInset
+    //
+    // Setting the components of the diagonal offset to the following value will give us that.
+    float diagVal = umbraInset / (SK_ScalarSqrt2 * (outerRadius - umbraInset) - outerRadius);
+    vec2 diagVec = vec2(diagVal, diagVal);
+    float distanceCorrection = umbraInset / blurRadius;
+
+    int vertexCount = 0;
+    // build corner by corner
+    for (int i = 0; i < 4; ++i) {
+        // inner point
+        position[vertexCount] = vec2(xInner[i], yInner[i]);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(0, 0, distanceCorrection);
+        vertexCount++;
+
+        // outer points
+        position[vertexCount] = vec2(xOuter[i], yInner[i]);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(0, -1, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = vec2(xOuter[i], yMid[i]);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(outerVec.x, outerVec.y, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = vec2(xOuter[i], yOuter[i]);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(diagVec.x, diagVec.y, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = vec2(xMid[i], yOuter[i]);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(outerVec.x, outerVec.y, distanceCorrection);
+        vertexCount++;
+
+        position[vertexCount] = vec2(xInner[i], yOuter[i]);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(0, -1, distanceCorrection);
+        vertexCount++;
+    }
+
+    // Add the additional vertices for overstroked rrects.
+    // Effectively this is an additional stroked rrect, with its
+    // parameters equal to those in the center of the 9-patch. This will
+    // give constant values across this inner ring.
+    if (kOverstroke_RRectType == args.fType) {
+        float inset = umbraInset + args.fInnerRadius;
+
+        // TL
+        position[vertexCount] = vec2(bounds.left + inset, bounds.top + inset);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(0, 0, distanceCorrection);
+        vertexCount++;
+
+        // TR
+        position[vertexCount] = vec2(bounds.right - inset, bounds.top + inset);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(0, 0, distanceCorrection);
+        vertexCount++;
+
+        // BL
+        position[vertexCount] = vec2(bounds.left + inset, bounds.bottom - inset);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(0, 0, distanceCorrection);
+        vertexCount++;
+
+        // BR
+        position[vertexCount] = vec2(bounds.right - inset, bounds.bottom - inset);
+        shadowColor[vertexCount] = color;
+        shadowParams[vertexCount] = vec3(0, 0, distanceCorrection);
+        vertexCount++;
+    }
+}
+
+int getVertexCountForGeometry(const Geometry& shadowGeometry) {
+    if (shadowGeometry.fIsCircle) {
+        return circle_type_to_vert_count(shadowGeometry.fType);
+    }
+
+    return rrect_type_to_vert_count(shadowGeometry.fType);
+}
+
+int getIndexCountForGeometry(const Geometry& shadowGeometry) {
+    if (shadowGeometry.fIsCircle) {
+        return circle_type_to_index_count(kStroke_RRectType == shadowGeometry.fType);
+    }
+
+    return rrect_type_to_index_count(shadowGeometry.fType);
+}
+
+void fillVerticesForGeometry(const Geometry& shadowGeometry, int /* vertexCount */,
+                             Mesh::VertexArray<vec2> position, Mesh::VertexArray<vec4> shadowColor,
+                             Mesh::VertexArray<vec3> shadowParams) {
+    if (shadowGeometry.fIsCircle) {
+        fillInCircleVerts(shadowGeometry, shadowGeometry.fIsStroked, position, shadowColor,
+                          shadowParams);
+    } else {
+        fillInRRectVerts(shadowGeometry, position, shadowColor, shadowParams);
+    }
+}
+
+void fillIndicesForGeometry(const Geometry& shadowGeometry, int indexCount,
+                            int startingVertexOffset, uint16_t* indices) {
+    if (shadowGeometry.fIsCircle) {
+        const uint16_t* primIndices = circle_type_to_indices(shadowGeometry.fIsStroked);
+        for (int i = 0; i < indexCount; ++i) {
+            indices[i] = primIndices[i] + startingVertexOffset;
+        }
+    } else {
+        const uint16_t* primIndices = rrect_type_to_indices(shadowGeometry.fType);
+        for (int i = 0; i < indexCount; ++i) {
+            indices[i] = primIndices[i] + startingVertexOffset;
+        }
+    }
+}
+
+inline void GetSpotParams(float occluderZ, float lightX, float lightY, float lightZ,
+                          float lightRadius, float& blurRadius, float& scale, vec2& translate) {
+    float zRatio = divide_and_pin(occluderZ, lightZ - occluderZ, 0.0f, 0.95f);
+    blurRadius = lightRadius * zRatio;
+    scale = divide_and_pin(lightZ, lightZ - occluderZ, 1.0f, 1.95f);
+    translate.x = -zRatio * lightX;
+    translate.y = -zRatio * lightY;
+}
+
+static std::unique_ptr<Geometry> getShadowGeometry(const vec4& color, const FloatRect& devRect,
+                                                   float devRadius, float blurRadius,
+                                                   float insetWidth) {
+    // An insetWidth > 1/2 rect width or height indicates a simple fill.
+    const bool isCircle = ((devRadius >= devRect.getWidth()) && (devRadius >= devRect.getHeight()));
+
+    FloatRect bounds = devRect;
+    float innerRadius = 0.0f;
+    float outerRadius = devRadius;
+    float umbraInset;
+
+    RRectType type = kFill_RRectType;
+    if (isCircle) {
+        umbraInset = 0;
+    } else {
+        umbraInset = std::max(outerRadius, blurRadius);
+    }
+
+    // If stroke is greater than width or height, this is still a fill,
+    // otherwise we compute stroke params.
+    if (isCircle) {
+        innerRadius = devRadius - insetWidth;
+        type = innerRadius > 0 ? kStroke_RRectType : kFill_RRectType;
+    } else {
+        if (insetWidth <= 0.5f * std::min(devRect.getWidth(), devRect.getHeight())) {
+            // We don't worry about a real inner radius, we just need to know if we
+            // need to create overstroke vertices.
+            innerRadius = std::max(insetWidth - umbraInset, 0.0f);
+            type = innerRadius > 0 ? kOverstroke_RRectType : kStroke_RRectType;
+        }
+    }
+    const bool isStroked = (kStroke_RRectType == type);
+    return std::make_unique<Geometry>(Geometry{color, outerRadius, umbraInset, innerRadius,
+                                               blurRadius, bounds, type, isCircle, isStroked});
+}
+
+std::unique_ptr<Geometry> getAmbientShadowGeometry(const FloatRect& casterRect,
+                                                   float casterCornerRadius, float casterZ,
+                                                   bool casterIsTranslucent,
+                                                   const vec4& ambientColor) {
+    float devSpaceInsetWidth = AmbientBlurRadius(casterZ);
+    const float umbraRecipAlpha = AmbientRecipAlpha(casterZ);
+    const float devSpaceAmbientBlur = devSpaceInsetWidth * umbraRecipAlpha;
+
+    // Outset the shadow rrect to the border of the penumbra
+    float ambientPathOutset = devSpaceInsetWidth;
+    FloatRect outsetRect(casterRect);
+    outsetRect.left -= ambientPathOutset;
+    outsetRect.top -= ambientPathOutset;
+    outsetRect.right += ambientPathOutset;
+    outsetRect.bottom += ambientPathOutset;
+
+    float outsetRad = casterCornerRadius + ambientPathOutset;
+    if (casterIsTranslucent) {
+        // set a large inset to force a fill
+        devSpaceInsetWidth = outsetRect.getWidth();
+    }
+
+    return getShadowGeometry(ambientColor, outsetRect, std::abs(outsetRad), devSpaceAmbientBlur,
+                             std::abs(devSpaceInsetWidth));
+}
+
+std::unique_ptr<Geometry> getSpotShadowGeometry(const FloatRect& casterRect,
+                                                float casterCornerRadius, float casterZ,
+                                                bool casterIsTranslucent, const vec4& spotColor,
+                                                const vec3& lightPosition, float lightRadius) {
+    float devSpaceSpotBlur;
+    float spotScale;
+    vec2 spotOffset;
+    GetSpotParams(casterZ, lightPosition.x, lightPosition.y, lightPosition.z, lightRadius,
+                  devSpaceSpotBlur, spotScale, spotOffset);
+    // handle scale of radius due to CTM
+    const float srcSpaceSpotBlur = devSpaceSpotBlur;
+
+    // Adjust translate for the effect of the scale.
+    spotOffset.x += spotScale;
+    spotOffset.y += spotScale;
+
+    // Compute the transformed shadow rect
+    ui::Transform shadowTransform;
+    shadowTransform.set(spotOffset.x, spotOffset.y);
+    shadowTransform.set(spotScale, 0, 0, spotScale);
+    FloatRect spotShadowRect = shadowTransform.transform(casterRect);
+    float spotShadowRadius = casterCornerRadius * spotScale;
+
+    // Compute the insetWidth
+    float blurOutset = srcSpaceSpotBlur;
+    float insetWidth = blurOutset;
+    if (casterIsTranslucent) {
+        // If transparent, just do a fill
+        insetWidth += spotShadowRect.getWidth();
+    } else {
+        // For shadows, instead of using a stroke we specify an inset from the penumbra
+        // border. We want to extend this inset area so that it meets up with the caster
+        // geometry. The inset geometry will by default already be inset by the blur width.
+        //
+        // We compare the min and max corners inset by the radius between the original
+        // rrect and the shadow rrect. The distance between the two plus the difference
+        // between the scaled radius and the original radius gives the distance from the
+        // transformed shadow shape to the original shape in that corner. The max
+        // of these gives the maximum distance we need to cover.
+        //
+        // Since we are outsetting by 1/2 the blur distance, we just add the maxOffset to
+        // that to get the full insetWidth.
+        float maxOffset;
+        if (casterCornerRadius <= 0.f) {
+            // Manhattan distance works better for rects
+            maxOffset = std::max(std::max(std::abs(spotShadowRect.left - casterRect.left),
+                                          std::abs(spotShadowRect.top - casterRect.top)),
+                                 std::max(std::abs(spotShadowRect.right - casterRect.right),
+                                          std::abs(spotShadowRect.bottom - casterRect.bottom)));
+        } else {
+            float dr = spotShadowRadius - casterCornerRadius;
+            vec2 upperLeftOffset = vec2(spotShadowRect.left - casterRect.left + dr,
+                                        spotShadowRect.top - casterRect.top + dr);
+            vec2 lowerRightOffset = vec2(spotShadowRect.right - casterRect.right - dr,
+                                         spotShadowRect.bottom - casterRect.bottom - dr);
+            maxOffset = sqrt(std::max(dot(upperLeftOffset, lowerRightOffset),
+                                      dot(lowerRightOffset, lowerRightOffset))) +
+                    dr;
+        }
+        insetWidth += std::max(blurOutset, maxOffset);
+    }
+
+    // Outset the shadow rrect to the border of the penumbra
+    spotShadowRadius += blurOutset;
+    spotShadowRect.left -= blurOutset;
+    spotShadowRect.top -= blurOutset;
+    spotShadowRect.right += blurOutset;
+    spotShadowRect.bottom += blurOutset;
+
+    return getShadowGeometry(spotColor, spotShadowRect, std::abs(spotShadowRadius),
+                             2.0f * devSpaceSpotBlur, std::abs(insetWidth));
+}
+
+void fillShadowTextureData(uint8_t* data, size_t shadowTextureWidth) {
+    for (int i = 0; i < shadowTextureWidth; i++) {
+        const float d = 1 - i / ((shadowTextureWidth * 1.0f) - 1.0f);
+        data[i] = static_cast<uint8_t>((exp(-4.0f * d * d) - 0.018f) * 255);
+    }
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLSkiaShadowPort.h b/media/libstagefright/renderfright/gl/GLSkiaShadowPort.h
new file mode 100644
index 0000000..912c8bb
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLSkiaShadowPort.h
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <math/vec4.h>
+#include <renderengine/Mesh.h>
+#include <ui/Rect.h>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+/**
+ * The shadow geometry logic and vertex generation code has been ported from skia shadow
+ * fast path OpenGL implementation to draw shadows around rects and rounded rects including
+ * circles.
+ *
+ * path: skia/src/gpu/GrRenderTargetContext.cpp GrRenderTargetContext::drawFastShadow
+ *
+ * Modifications made:
+ * - Switched to using std lib math functions
+ * - Fall off function is implemented in vertex shader rather than a shadow texture
+ * - Removed transformations applied on the caster rect since the caster will be in local
+ *   coordinate space and will be transformed by the vertex shader.
+ */
+
+enum RRectType {
+    kFill_RRectType,
+    kStroke_RRectType,
+    kOverstroke_RRectType,
+};
+
+struct Geometry {
+    vec4 fColor;
+    float fOuterRadius;
+    float fUmbraInset;
+    float fInnerRadius;
+    float fBlurRadius;
+    FloatRect fDevBounds;
+    RRectType fType;
+    bool fIsCircle;
+    bool fIsStroked;
+};
+
+std::unique_ptr<Geometry> getSpotShadowGeometry(const FloatRect& casterRect,
+                                                float casterCornerRadius, float casterZ,
+                                                bool casterIsTranslucent, const vec4& spotColor,
+                                                const vec3& lightPosition, float lightRadius);
+
+std::unique_ptr<Geometry> getAmbientShadowGeometry(const FloatRect& casterRect,
+                                                   float casterCornerRadius, float casterZ,
+                                                   bool casterIsTranslucent,
+                                                   const vec4& ambientColor);
+
+int getVertexCountForGeometry(const Geometry& shadowGeometry);
+
+int getIndexCountForGeometry(const Geometry& shadowGeometry);
+
+void fillVerticesForGeometry(const Geometry& shadowGeometry, int vertexCount,
+                             Mesh::VertexArray<vec2> position, Mesh::VertexArray<vec4> shadowColor,
+                             Mesh::VertexArray<vec3> shadowParams);
+
+void fillIndicesForGeometry(const Geometry& shadowGeometry, int indexCount,
+                            int startingVertexOffset, uint16_t* indices);
+
+/**
+ * Maps shadow geometry 'alpha' varying (1 for darkest, 0 for transparent) to
+ * darkness at that spot. Values are determined by an exponential falloff
+ * function provided by UX.
+ *
+ * The texture is used for quick lookup in theshadow shader.
+ *
+ * textureData - filled with shadow texture data that needs to be at least of
+ *               size textureWidth
+ *
+ * textureWidth - width of the texture, height is always 1
+ */
+void fillShadowTextureData(uint8_t* textureData, size_t textureWidth);
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLVertexBuffer.cpp b/media/libstagefright/renderfright/gl/GLVertexBuffer.cpp
new file mode 100644
index 0000000..e50c471
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLVertexBuffer.cpp
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include "GLVertexBuffer.h"
+
+#include <GLES/gl.h>
+#include <GLES2/gl2.h>
+#include <nativebase/nativebase.h>
+#include <utils/Trace.h>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+GLVertexBuffer::GLVertexBuffer() {
+    glGenBuffers(1, &mBufferName);
+}
+
+GLVertexBuffer::~GLVertexBuffer() {
+    glDeleteBuffers(1, &mBufferName);
+}
+
+void GLVertexBuffer::allocateBuffers(const GLfloat data[], const GLuint size) {
+    ATRACE_CALL();
+    bind();
+    glBufferData(GL_ARRAY_BUFFER, size * sizeof(GLfloat), data, GL_STATIC_DRAW);
+    unbind();
+}
+
+void GLVertexBuffer::bind() const {
+    glBindBuffer(GL_ARRAY_BUFFER, mBufferName);
+}
+
+void GLVertexBuffer::unbind() const {
+    glBindBuffer(GL_ARRAY_BUFFER, 0);
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/GLVertexBuffer.h b/media/libstagefright/renderfright/gl/GLVertexBuffer.h
new file mode 100644
index 0000000..c0fd0c1
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/GLVertexBuffer.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdint>
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <GLES2/gl2.h>
+
+struct ANativeWindowBuffer;
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GLESRenderEngine;
+
+class GLVertexBuffer {
+public:
+    explicit GLVertexBuffer();
+    ~GLVertexBuffer();
+
+    void allocateBuffers(const GLfloat data[], const GLuint size);
+    uint32_t getBufferName() const { return mBufferName; }
+    void bind() const;
+    void unbind() const;
+
+private:
+    uint32_t mBufferName;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/ImageManager.cpp b/media/libstagefright/renderfright/gl/ImageManager.cpp
new file mode 100644
index 0000000..5b0cf52
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/ImageManager.cpp
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#undef LOG_TAG
+#define LOG_TAG "RenderEngine"
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include <pthread.h>
+
+#include <processgroup/sched_policy.h>
+#include <utils/Trace.h>
+#include "GLESRenderEngine.h"
+#include "ImageManager.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+ImageManager::ImageManager(GLESRenderEngine* engine) : mEngine(engine) {}
+
+void ImageManager::initThread(bool realtime) {
+    mThread = std::thread([this]() { threadMain(); });
+    pthread_setname_np(mThread.native_handle(), "ImageManager");
+    if (realtime) {
+        // Use SCHED_FIFO to minimize jitter
+        struct sched_param param = {0};
+        param.sched_priority = 2;
+        if (pthread_setschedparam(mThread.native_handle(), SCHED_FIFO, &param) != 0) {
+            ALOGE("Couldn't set SCHED_FIFO for ImageManager");
+        }
+    }
+}
+
+ImageManager::~ImageManager() {
+    {
+        std::lock_guard<std::mutex> lock(mMutex);
+        mRunning = false;
+    }
+    mCondition.notify_all();
+    if (mThread.joinable()) {
+        mThread.join();
+    }
+}
+
+void ImageManager::cacheAsync(const sp<GraphicBuffer>& buffer,
+                              const std::shared_ptr<Barrier>& barrier) {
+    if (buffer == nullptr) {
+        {
+            std::lock_guard<std::mutex> lock(barrier->mutex);
+            barrier->isOpen = true;
+            barrier->result = BAD_VALUE;
+        }
+        barrier->condition.notify_one();
+        return;
+    }
+    ATRACE_CALL();
+    QueueEntry entry = {QueueEntry::Operation::Insert, buffer, buffer->getId(), barrier};
+    queueOperation(std::move(entry));
+}
+
+status_t ImageManager::cache(const sp<GraphicBuffer>& buffer) {
+    ATRACE_CALL();
+    auto barrier = std::make_shared<Barrier>();
+    cacheAsync(buffer, barrier);
+    std::lock_guard<std::mutex> lock(barrier->mutex);
+    barrier->condition.wait(barrier->mutex,
+                            [&]() REQUIRES(barrier->mutex) { return barrier->isOpen; });
+    return barrier->result;
+}
+
+void ImageManager::releaseAsync(uint64_t bufferId, const std::shared_ptr<Barrier>& barrier) {
+    ATRACE_CALL();
+    QueueEntry entry = {QueueEntry::Operation::Delete, nullptr, bufferId, barrier};
+    queueOperation(std::move(entry));
+}
+
+void ImageManager::queueOperation(const QueueEntry&& entry) {
+    {
+        std::lock_guard<std::mutex> lock(mMutex);
+        mQueue.emplace(entry);
+        ATRACE_INT("ImageManagerQueueDepth", mQueue.size());
+    }
+    mCondition.notify_one();
+}
+
+void ImageManager::threadMain() {
+    set_sched_policy(0, SP_FOREGROUND);
+    bool run;
+    {
+        std::lock_guard<std::mutex> lock(mMutex);
+        run = mRunning;
+    }
+    while (run) {
+        QueueEntry entry;
+        {
+            std::lock_guard<std::mutex> lock(mMutex);
+            mCondition.wait(mMutex,
+                            [&]() REQUIRES(mMutex) { return !mQueue.empty() || !mRunning; });
+            run = mRunning;
+
+            if (!mRunning) {
+                // if mRunning is false, then ImageManager is being destroyed, so
+                // bail out now.
+                break;
+            }
+
+            entry = mQueue.front();
+            mQueue.pop();
+            ATRACE_INT("ImageManagerQueueDepth", mQueue.size());
+        }
+
+        status_t result = NO_ERROR;
+        switch (entry.op) {
+            case QueueEntry::Operation::Delete:
+                mEngine->unbindExternalTextureBufferInternal(entry.bufferId);
+                break;
+            case QueueEntry::Operation::Insert:
+                result = mEngine->cacheExternalTextureBufferInternal(entry.buffer);
+                break;
+        }
+        if (entry.barrier != nullptr) {
+            {
+                std::lock_guard<std::mutex> entryLock(entry.barrier->mutex);
+                entry.barrier->result = result;
+                entry.barrier->isOpen = true;
+            }
+            entry.barrier->condition.notify_one();
+        }
+    }
+
+    ALOGD("Reached end of threadMain, terminating ImageManager thread!");
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/ImageManager.h b/media/libstagefright/renderfright/gl/ImageManager.h
new file mode 100644
index 0000000..6be8e3c
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/ImageManager.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <condition_variable>
+#include <mutex>
+#include <queue>
+#include <thread>
+
+#include <ui/GraphicBuffer.h>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GLESRenderEngine;
+
+class ImageManager {
+public:
+    struct Barrier {
+        std::mutex mutex;
+        std::condition_variable_any condition;
+        bool isOpen GUARDED_BY(mutex) = false;
+        status_t result GUARDED_BY(mutex) = NO_ERROR;
+    };
+    ImageManager(GLESRenderEngine* engine);
+    ~ImageManager();
+    // Starts the background thread for the ImageManager
+    // We need this to guarantee that the class is fully-constructed before the
+    // thread begins running.
+    void initThread(bool realtime);
+    void cacheAsync(const sp<GraphicBuffer>& buffer, const std::shared_ptr<Barrier>& barrier)
+            EXCLUDES(mMutex);
+    status_t cache(const sp<GraphicBuffer>& buffer);
+    void releaseAsync(uint64_t bufferId, const std::shared_ptr<Barrier>& barrier) EXCLUDES(mMutex);
+
+private:
+    struct QueueEntry {
+        enum class Operation { Delete, Insert };
+
+        Operation op = Operation::Delete;
+        sp<GraphicBuffer> buffer = nullptr;
+        uint64_t bufferId = 0;
+        std::shared_ptr<Barrier> barrier = nullptr;
+    };
+
+    void queueOperation(const QueueEntry&& entry);
+    void threadMain();
+    GLESRenderEngine* const mEngine;
+    std::thread mThread;
+    std::condition_variable_any mCondition;
+    std::mutex mMutex;
+    std::queue<QueueEntry> mQueue GUARDED_BY(mMutex);
+
+    bool mRunning GUARDED_BY(mMutex) = true;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/Program.cpp b/media/libstagefright/renderfright/gl/Program.cpp
new file mode 100644
index 0000000..f4fbf35
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/Program.cpp
@@ -0,0 +1,163 @@
+/*Gluint
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Program.h"
+
+#include <stdint.h>
+
+#include <log/log.h>
+#include <math/mat4.h>
+#include <utils/String8.h>
+#include "ProgramCache.h"
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+Program::Program(const ProgramCache::Key& /*needs*/, const char* vertex, const char* fragment)
+      : mInitialized(false) {
+    GLuint vertexId = buildShader(vertex, GL_VERTEX_SHADER);
+    GLuint fragmentId = buildShader(fragment, GL_FRAGMENT_SHADER);
+    GLuint programId = glCreateProgram();
+    glAttachShader(programId, vertexId);
+    glAttachShader(programId, fragmentId);
+    glBindAttribLocation(programId, position, "position");
+    glBindAttribLocation(programId, texCoords, "texCoords");
+    glBindAttribLocation(programId, cropCoords, "cropCoords");
+    glBindAttribLocation(programId, shadowColor, "shadowColor");
+    glBindAttribLocation(programId, shadowParams, "shadowParams");
+    glLinkProgram(programId);
+
+    GLint status;
+    glGetProgramiv(programId, GL_LINK_STATUS, &status);
+    if (status != GL_TRUE) {
+        ALOGE("Error while linking shaders:");
+        GLint infoLen = 0;
+        glGetProgramiv(programId, GL_INFO_LOG_LENGTH, &infoLen);
+        if (infoLen > 1) {
+            GLchar log[infoLen];
+            glGetProgramInfoLog(programId, infoLen, 0, &log[0]);
+            ALOGE("%s", log);
+        }
+        glDetachShader(programId, vertexId);
+        glDetachShader(programId, fragmentId);
+        glDeleteShader(vertexId);
+        glDeleteShader(fragmentId);
+        glDeleteProgram(programId);
+    } else {
+        mProgram = programId;
+        mVertexShader = vertexId;
+        mFragmentShader = fragmentId;
+        mInitialized = true;
+        mProjectionMatrixLoc = glGetUniformLocation(programId, "projection");
+        mTextureMatrixLoc = glGetUniformLocation(programId, "texture");
+        mSamplerLoc = glGetUniformLocation(programId, "sampler");
+        mColorLoc = glGetUniformLocation(programId, "color");
+        mDisplayMaxLuminanceLoc = glGetUniformLocation(programId, "displayMaxLuminance");
+        mMaxMasteringLuminanceLoc = glGetUniformLocation(programId, "maxMasteringLuminance");
+        mMaxContentLuminanceLoc = glGetUniformLocation(programId, "maxContentLuminance");
+        mInputTransformMatrixLoc = glGetUniformLocation(programId, "inputTransformMatrix");
+        mOutputTransformMatrixLoc = glGetUniformLocation(programId, "outputTransformMatrix");
+        mCornerRadiusLoc = glGetUniformLocation(programId, "cornerRadius");
+        mCropCenterLoc = glGetUniformLocation(programId, "cropCenter");
+
+        // set-up the default values for our uniforms
+        glUseProgram(programId);
+        glUniformMatrix4fv(mProjectionMatrixLoc, 1, GL_FALSE, mat4().asArray());
+        glEnableVertexAttribArray(0);
+    }
+}
+
+bool Program::isValid() const {
+    return mInitialized;
+}
+
+void Program::use() {
+    glUseProgram(mProgram);
+}
+
+GLuint Program::getAttrib(const char* name) const {
+    // TODO: maybe use a local cache
+    return glGetAttribLocation(mProgram, name);
+}
+
+GLint Program::getUniform(const char* name) const {
+    // TODO: maybe use a local cache
+    return glGetUniformLocation(mProgram, name);
+}
+
+GLuint Program::buildShader(const char* source, GLenum type) {
+    GLuint shader = glCreateShader(type);
+    glShaderSource(shader, 1, &source, 0);
+    glCompileShader(shader);
+    GLint status;
+    glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
+    if (status != GL_TRUE) {
+        // Some drivers return wrong values for GL_INFO_LOG_LENGTH
+        // use a fixed size instead
+        GLchar log[512];
+        glGetShaderInfoLog(shader, sizeof(log), 0, log);
+        ALOGE("Error while compiling shader: \n%s\n%s", source, log);
+        glDeleteShader(shader);
+        return 0;
+    }
+    return shader;
+}
+
+void Program::setUniforms(const Description& desc) {
+    // TODO: we should have a mechanism here to not always reset uniforms that
+    // didn't change for this program.
+
+    if (mSamplerLoc >= 0) {
+        glUniform1i(mSamplerLoc, 0);
+        glUniformMatrix4fv(mTextureMatrixLoc, 1, GL_FALSE, desc.texture.getMatrix().asArray());
+    }
+    if (mColorLoc >= 0) {
+        const float color[4] = {desc.color.r, desc.color.g, desc.color.b, desc.color.a};
+        glUniform4fv(mColorLoc, 1, color);
+    }
+    if (mInputTransformMatrixLoc >= 0) {
+        mat4 inputTransformMatrix = desc.inputTransformMatrix;
+        glUniformMatrix4fv(mInputTransformMatrixLoc, 1, GL_FALSE, inputTransformMatrix.asArray());
+    }
+    if (mOutputTransformMatrixLoc >= 0) {
+        // The output transform matrix and color matrix can be combined as one matrix
+        // that is applied right before applying OETF.
+        mat4 outputTransformMatrix = desc.colorMatrix * desc.outputTransformMatrix;
+        glUniformMatrix4fv(mOutputTransformMatrixLoc, 1, GL_FALSE, outputTransformMatrix.asArray());
+    }
+    if (mDisplayMaxLuminanceLoc >= 0) {
+        glUniform1f(mDisplayMaxLuminanceLoc, desc.displayMaxLuminance);
+    }
+    if (mMaxMasteringLuminanceLoc >= 0) {
+        glUniform1f(mMaxMasteringLuminanceLoc, desc.maxMasteringLuminance);
+    }
+    if (mMaxContentLuminanceLoc >= 0) {
+        glUniform1f(mMaxContentLuminanceLoc, desc.maxContentLuminance);
+    }
+    if (mCornerRadiusLoc >= 0) {
+        glUniform1f(mCornerRadiusLoc, desc.cornerRadius);
+    }
+    if (mCropCenterLoc >= 0) {
+        glUniform2f(mCropCenterLoc, desc.cropSize.x / 2.0f, desc.cropSize.y / 2.0f);
+    }
+    // these uniforms are always present
+    glUniformMatrix4fv(mProjectionMatrixLoc, 1, GL_FALSE, desc.projectionMatrix.asArray());
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/Program.h b/media/libstagefright/renderfright/gl/Program.h
new file mode 100644
index 0000000..fc3755e
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/Program.h
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_RENDER_ENGINE_PROGRAM_H
+#define SF_RENDER_ENGINE_PROGRAM_H
+
+#include <stdint.h>
+
+#include <GLES2/gl2.h>
+#include <renderengine/private/Description.h>
+#include "ProgramCache.h"
+
+namespace android {
+
+class String8;
+
+namespace renderengine {
+namespace gl {
+
+/*
+ * Abstracts a GLSL program comprising a vertex and fragment shader
+ */
+class Program {
+public:
+    // known locations for position and texture coordinates
+    enum {
+        /* position of each vertex for vertex shader */
+        position = 0,
+
+        /* UV coordinates for texture mapping */
+        texCoords = 1,
+
+        /* Crop coordinates, in pixels */
+        cropCoords = 2,
+
+        /* Shadow color */
+        shadowColor = 3,
+
+        /* Shadow params */
+        shadowParams = 4,
+    };
+
+    Program(const ProgramCache::Key& needs, const char* vertex, const char* fragment);
+    ~Program() = default;
+
+    /* whether this object is usable */
+    bool isValid() const;
+
+    /* Binds this program to the GLES context */
+    void use();
+
+    /* Returns the location of the specified attribute */
+    GLuint getAttrib(const char* name) const;
+
+    /* Returns the location of the specified uniform */
+    GLint getUniform(const char* name) const;
+
+    /* set-up uniforms from the description */
+    void setUniforms(const Description& desc);
+
+private:
+    GLuint buildShader(const char* source, GLenum type);
+
+    // whether the initialization succeeded
+    bool mInitialized;
+
+    // Name of the OpenGL program and shaders
+    GLuint mProgram;
+    GLuint mVertexShader;
+    GLuint mFragmentShader;
+
+    /* location of the projection matrix uniform */
+    GLint mProjectionMatrixLoc;
+
+    /* location of the texture matrix uniform */
+    GLint mTextureMatrixLoc;
+
+    /* location of the sampler uniform */
+    GLint mSamplerLoc;
+
+    /* location of the color uniform */
+    GLint mColorLoc;
+
+    /* location of display luminance uniform */
+    GLint mDisplayMaxLuminanceLoc;
+    /* location of max mastering luminance uniform */
+    GLint mMaxMasteringLuminanceLoc;
+    /* location of max content luminance uniform */
+    GLint mMaxContentLuminanceLoc;
+
+    /* location of transform matrix */
+    GLint mInputTransformMatrixLoc;
+    GLint mOutputTransformMatrixLoc;
+
+    /* location of corner radius uniform */
+    GLint mCornerRadiusLoc;
+
+    /* location of surface crop origin uniform, for rounded corner clipping */
+    GLint mCropCenterLoc;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
+
+#endif /* SF_RENDER_ENGINE_PROGRAM_H */
diff --git a/media/libstagefright/renderfright/gl/ProgramCache.cpp b/media/libstagefright/renderfright/gl/ProgramCache.cpp
new file mode 100644
index 0000000..3ae35ec
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/ProgramCache.cpp
@@ -0,0 +1,800 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include "ProgramCache.h"
+
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#include <log/log.h>
+#include <renderengine/private/Description.h>
+#include <utils/String8.h>
+#include <utils/Trace.h>
+#include "Program.h"
+
+ANDROID_SINGLETON_STATIC_INSTANCE(android::renderengine::gl::ProgramCache)
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+/*
+ * A simple formatter class to automatically add the endl and
+ * manage the indentation.
+ */
+
+class Formatter;
+static Formatter& indent(Formatter& f);
+static Formatter& dedent(Formatter& f);
+
+class Formatter {
+    String8 mString;
+    int mIndent;
+    typedef Formatter& (*FormaterManipFunc)(Formatter&);
+    friend Formatter& indent(Formatter& f);
+    friend Formatter& dedent(Formatter& f);
+
+public:
+    Formatter() : mIndent(0) {}
+
+    String8 getString() const { return mString; }
+
+    friend Formatter& operator<<(Formatter& out, const char* in) {
+        for (int i = 0; i < out.mIndent; i++) {
+            out.mString.append("    ");
+        }
+        out.mString.append(in);
+        out.mString.append("\n");
+        return out;
+    }
+    friend inline Formatter& operator<<(Formatter& out, const String8& in) {
+        return operator<<(out, in.string());
+    }
+    friend inline Formatter& operator<<(Formatter& to, FormaterManipFunc func) {
+        return (*func)(to);
+    }
+};
+Formatter& indent(Formatter& f) {
+    f.mIndent++;
+    return f;
+}
+Formatter& dedent(Formatter& f) {
+    f.mIndent--;
+    return f;
+}
+
+void ProgramCache::primeCache(
+        EGLContext context, bool useColorManagement, bool toneMapperShaderOnly) {
+    auto& cache = mCaches[context];
+    uint32_t shaderCount = 0;
+
+    if (toneMapperShaderOnly) {
+        Key shaderKey;
+        // base settings used by HDR->SDR tonemap only
+        shaderKey.set(Key::BLEND_MASK | Key::INPUT_TRANSFORM_MATRIX_MASK |
+                      Key::OUTPUT_TRANSFORM_MATRIX_MASK | Key::OUTPUT_TF_MASK |
+                      Key::OPACITY_MASK | Key::ALPHA_MASK |
+                      Key::ROUNDED_CORNERS_MASK | Key::TEXTURE_MASK,
+                      Key::BLEND_NORMAL | Key::INPUT_TRANSFORM_MATRIX_ON |
+                      Key::OUTPUT_TRANSFORM_MATRIX_ON | Key::OUTPUT_TF_SRGB |
+                      Key::OPACITY_OPAQUE | Key::ALPHA_EQ_ONE |
+                      Key::ROUNDED_CORNERS_OFF | Key::TEXTURE_EXT);
+        for (int i = 0; i < 4; i++) {
+            // Cache input transfer for HLG & ST2084
+            shaderKey.set(Key::INPUT_TF_MASK, (i & 1) ?
+                    Key::INPUT_TF_HLG : Key::INPUT_TF_ST2084);
+
+            // Cache Y410 input on or off
+            shaderKey.set(Key::Y410_BT2020_MASK, (i & 2) ?
+                    Key::Y410_BT2020_ON : Key::Y410_BT2020_OFF);
+            if (cache.count(shaderKey) == 0) {
+                cache.emplace(shaderKey, generateProgram(shaderKey));
+                shaderCount++;
+            }
+        }
+        return;
+    }
+
+    uint32_t keyMask = Key::BLEND_MASK | Key::OPACITY_MASK | Key::ALPHA_MASK | Key::TEXTURE_MASK
+        | Key::ROUNDED_CORNERS_MASK;
+    // Prime the cache for all combinations of the above masks,
+    // leaving off the experimental color matrix mask options.
+
+    nsecs_t timeBefore = systemTime();
+    for (uint32_t keyVal = 0; keyVal <= keyMask; keyVal++) {
+        Key shaderKey;
+        shaderKey.set(keyMask, keyVal);
+        uint32_t tex = shaderKey.getTextureTarget();
+        if (tex != Key::TEXTURE_OFF && tex != Key::TEXTURE_EXT && tex != Key::TEXTURE_2D) {
+            continue;
+        }
+        if (cache.count(shaderKey) == 0) {
+            cache.emplace(shaderKey, generateProgram(shaderKey));
+            shaderCount++;
+        }
+    }
+
+    // Prime for sRGB->P3 conversion
+    if (useColorManagement) {
+        Key shaderKey;
+        shaderKey.set(Key::BLEND_MASK | Key::OUTPUT_TRANSFORM_MATRIX_MASK | Key::INPUT_TF_MASK |
+                              Key::OUTPUT_TF_MASK,
+                      Key::BLEND_PREMULT | Key::OUTPUT_TRANSFORM_MATRIX_ON | Key::INPUT_TF_SRGB |
+                              Key::OUTPUT_TF_SRGB);
+        for (int i = 0; i < 16; i++) {
+            shaderKey.set(Key::OPACITY_MASK,
+                          (i & 1) ? Key::OPACITY_OPAQUE : Key::OPACITY_TRANSLUCENT);
+            shaderKey.set(Key::ALPHA_MASK, (i & 2) ? Key::ALPHA_LT_ONE : Key::ALPHA_EQ_ONE);
+
+            // Cache rounded corners
+            shaderKey.set(Key::ROUNDED_CORNERS_MASK,
+                          (i & 4) ? Key::ROUNDED_CORNERS_ON : Key::ROUNDED_CORNERS_OFF);
+
+            // Cache texture off option for window transition
+            shaderKey.set(Key::TEXTURE_MASK, (i & 8) ? Key::TEXTURE_EXT : Key::TEXTURE_OFF);
+            if (cache.count(shaderKey) == 0) {
+                cache.emplace(shaderKey, generateProgram(shaderKey));
+                shaderCount++;
+            }
+        }
+    }
+
+    nsecs_t timeAfter = systemTime();
+    float compileTimeMs = static_cast<float>(timeAfter - timeBefore) / 1.0E6;
+    ALOGD("shader cache generated - %u shaders in %f ms\n", shaderCount, compileTimeMs);
+}
+
+ProgramCache::Key ProgramCache::computeKey(const Description& description) {
+    Key needs;
+    needs.set(Key::TEXTURE_MASK,
+              !description.textureEnabled
+                      ? Key::TEXTURE_OFF
+                      : description.texture.getTextureTarget() == GL_TEXTURE_EXTERNAL_OES
+                              ? Key::TEXTURE_EXT
+                              : description.texture.getTextureTarget() == GL_TEXTURE_2D
+                                      ? Key::TEXTURE_2D
+                                      : Key::TEXTURE_OFF)
+            .set(Key::ALPHA_MASK, (description.color.a < 1) ? Key::ALPHA_LT_ONE : Key::ALPHA_EQ_ONE)
+            .set(Key::BLEND_MASK,
+                 description.isPremultipliedAlpha ? Key::BLEND_PREMULT : Key::BLEND_NORMAL)
+            .set(Key::OPACITY_MASK,
+                 description.isOpaque ? Key::OPACITY_OPAQUE : Key::OPACITY_TRANSLUCENT)
+            .set(Key::Key::INPUT_TRANSFORM_MATRIX_MASK,
+                 description.hasInputTransformMatrix() ? Key::INPUT_TRANSFORM_MATRIX_ON
+                                                       : Key::INPUT_TRANSFORM_MATRIX_OFF)
+            .set(Key::Key::OUTPUT_TRANSFORM_MATRIX_MASK,
+                 description.hasOutputTransformMatrix() || description.hasColorMatrix()
+                         ? Key::OUTPUT_TRANSFORM_MATRIX_ON
+                         : Key::OUTPUT_TRANSFORM_MATRIX_OFF)
+            .set(Key::ROUNDED_CORNERS_MASK,
+                 description.cornerRadius > 0 ? Key::ROUNDED_CORNERS_ON : Key::ROUNDED_CORNERS_OFF)
+            .set(Key::SHADOW_MASK, description.drawShadows ? Key::SHADOW_ON : Key::SHADOW_OFF);
+    needs.set(Key::Y410_BT2020_MASK,
+              description.isY410BT2020 ? Key::Y410_BT2020_ON : Key::Y410_BT2020_OFF);
+
+    if (needs.hasTransformMatrix() ||
+        (description.inputTransferFunction != description.outputTransferFunction)) {
+        switch (description.inputTransferFunction) {
+            case Description::TransferFunction::LINEAR:
+            default:
+                needs.set(Key::INPUT_TF_MASK, Key::INPUT_TF_LINEAR);
+                break;
+            case Description::TransferFunction::SRGB:
+                needs.set(Key::INPUT_TF_MASK, Key::INPUT_TF_SRGB);
+                break;
+            case Description::TransferFunction::ST2084:
+                needs.set(Key::INPUT_TF_MASK, Key::INPUT_TF_ST2084);
+                break;
+            case Description::TransferFunction::HLG:
+                needs.set(Key::INPUT_TF_MASK, Key::INPUT_TF_HLG);
+                break;
+        }
+
+        switch (description.outputTransferFunction) {
+            case Description::TransferFunction::LINEAR:
+            default:
+                needs.set(Key::OUTPUT_TF_MASK, Key::OUTPUT_TF_LINEAR);
+                break;
+            case Description::TransferFunction::SRGB:
+                needs.set(Key::OUTPUT_TF_MASK, Key::OUTPUT_TF_SRGB);
+                break;
+            case Description::TransferFunction::ST2084:
+                needs.set(Key::OUTPUT_TF_MASK, Key::OUTPUT_TF_ST2084);
+                break;
+            case Description::TransferFunction::HLG:
+                needs.set(Key::OUTPUT_TF_MASK, Key::OUTPUT_TF_HLG);
+                break;
+        }
+    }
+
+    return needs;
+}
+
+// Generate EOTF that converts signal values to relative display light,
+// both normalized to [0, 1].
+void ProgramCache::generateEOTF(Formatter& fs, const Key& needs) {
+    switch (needs.getInputTF()) {
+        case Key::INPUT_TF_SRGB:
+            fs << R"__SHADER__(
+                float EOTF_sRGB(float srgb) {
+                    return srgb <= 0.04045 ? srgb / 12.92 : pow((srgb + 0.055) / 1.055, 2.4);
+                }
+
+                vec3 EOTF_sRGB(const vec3 srgb) {
+                    return vec3(EOTF_sRGB(srgb.r), EOTF_sRGB(srgb.g), EOTF_sRGB(srgb.b));
+                }
+
+                vec3 EOTF(const vec3 srgb) {
+                    return sign(srgb.rgb) * EOTF_sRGB(abs(srgb.rgb));
+                }
+            )__SHADER__";
+            break;
+        case Key::INPUT_TF_ST2084:
+            fs << R"__SHADER__(
+                vec3 EOTF(const highp vec3 color) {
+                    const highp float m1 = (2610.0 / 4096.0) / 4.0;
+                    const highp float m2 = (2523.0 / 4096.0) * 128.0;
+                    const highp float c1 = (3424.0 / 4096.0);
+                    const highp float c2 = (2413.0 / 4096.0) * 32.0;
+                    const highp float c3 = (2392.0 / 4096.0) * 32.0;
+
+                    highp vec3 tmp = pow(clamp(color, 0.0, 1.0), 1.0 / vec3(m2));
+                    tmp = max(tmp - c1, 0.0) / (c2 - c3 * tmp);
+                    return pow(tmp, 1.0 / vec3(m1));
+                }
+            )__SHADER__";
+            break;
+        case Key::INPUT_TF_HLG:
+            fs << R"__SHADER__(
+                highp float EOTF_channel(const highp float channel) {
+                    const highp float a = 0.17883277;
+                    const highp float b = 0.28466892;
+                    const highp float c = 0.55991073;
+                    return channel <= 0.5 ? channel * channel / 3.0 :
+                            (exp((channel - c) / a) + b) / 12.0;
+                }
+
+                vec3 EOTF(const highp vec3 color) {
+                    return vec3(EOTF_channel(color.r), EOTF_channel(color.g),
+                            EOTF_channel(color.b));
+                }
+            )__SHADER__";
+            break;
+        default:
+            fs << R"__SHADER__(
+                vec3 EOTF(const vec3 linear) {
+                    return linear;
+                }
+            )__SHADER__";
+            break;
+    }
+}
+
+void ProgramCache::generateToneMappingProcess(Formatter& fs, const Key& needs) {
+    // Convert relative light to absolute light.
+    switch (needs.getInputTF()) {
+        case Key::INPUT_TF_ST2084:
+            fs << R"__SHADER__(
+                highp vec3 ScaleLuminance(highp vec3 color) {
+                    return color * 10000.0;
+                }
+            )__SHADER__";
+            break;
+        case Key::INPUT_TF_HLG:
+            fs << R"__SHADER__(
+                highp vec3 ScaleLuminance(highp vec3 color) {
+                    // The formula is:
+                    // alpha * pow(Y, gamma - 1.0) * color + beta;
+                    // where alpha is 1000.0, gamma is 1.2, beta is 0.0.
+                    return color * 1000.0 * pow(color.y, 0.2);
+                }
+            )__SHADER__";
+            break;
+        default:
+            fs << R"__SHADER__(
+                highp vec3 ScaleLuminance(highp vec3 color) {
+                    return color * displayMaxLuminance;
+                }
+            )__SHADER__";
+            break;
+    }
+
+    // Tone map absolute light to display luminance range.
+    switch (needs.getInputTF()) {
+        case Key::INPUT_TF_ST2084:
+        case Key::INPUT_TF_HLG:
+            switch (needs.getOutputTF()) {
+                case Key::OUTPUT_TF_HLG:
+                    // Right now when mixed PQ and HLG contents are presented,
+                    // HLG content will always be converted to PQ. However, for
+                    // completeness, we simply clamp the value to [0.0, 1000.0].
+                    fs << R"__SHADER__(
+                        highp vec3 ToneMap(highp vec3 color) {
+                            return clamp(color, 0.0, 1000.0);
+                        }
+                    )__SHADER__";
+                    break;
+                case Key::OUTPUT_TF_ST2084:
+                    fs << R"__SHADER__(
+                        highp vec3 ToneMap(highp vec3 color) {
+                            return color;
+                        }
+                    )__SHADER__";
+                    break;
+                default:
+                    fs << R"__SHADER__(
+                        highp vec3 ToneMap(highp vec3 color) {
+                            float maxMasteringLumi = maxMasteringLuminance;
+                            float maxContentLumi = maxContentLuminance;
+                            float maxInLumi = min(maxMasteringLumi, maxContentLumi);
+                            float maxOutLumi = displayMaxLuminance;
+
+                            float nits = color.y;
+
+                            // clamp to max input luminance
+                            nits = clamp(nits, 0.0, maxInLumi);
+
+                            // scale [0.0, maxInLumi] to [0.0, maxOutLumi]
+                            if (maxInLumi <= maxOutLumi) {
+                                return color * (maxOutLumi / maxInLumi);
+                            } else {
+                                // three control points
+                                const float x0 = 10.0;
+                                const float y0 = 17.0;
+                                float x1 = maxOutLumi * 0.75;
+                                float y1 = x1;
+                                float x2 = x1 + (maxInLumi - x1) / 2.0;
+                                float y2 = y1 + (maxOutLumi - y1) * 0.75;
+
+                                // horizontal distances between the last three control points
+                                float h12 = x2 - x1;
+                                float h23 = maxInLumi - x2;
+                                // tangents at the last three control points
+                                float m1 = (y2 - y1) / h12;
+                                float m3 = (maxOutLumi - y2) / h23;
+                                float m2 = (m1 + m3) / 2.0;
+
+                                if (nits < x0) {
+                                    // scale [0.0, x0] to [0.0, y0] linearly
+                                    float slope = y0 / x0;
+                                    return color * slope;
+                                } else if (nits < x1) {
+                                    // scale [x0, x1] to [y0, y1] linearly
+                                    float slope = (y1 - y0) / (x1 - x0);
+                                    nits = y0 + (nits - x0) * slope;
+                                } else if (nits < x2) {
+                                    // scale [x1, x2] to [y1, y2] using Hermite interp
+                                    float t = (nits - x1) / h12;
+                                    nits = (y1 * (1.0 + 2.0 * t) + h12 * m1 * t) * (1.0 - t) * (1.0 - t) +
+                                            (y2 * (3.0 - 2.0 * t) + h12 * m2 * (t - 1.0)) * t * t;
+                                } else {
+                                    // scale [x2, maxInLumi] to [y2, maxOutLumi] using Hermite interp
+                                    float t = (nits - x2) / h23;
+                                    nits = (y2 * (1.0 + 2.0 * t) + h23 * m2 * t) * (1.0 - t) * (1.0 - t) +
+                                            (maxOutLumi * (3.0 - 2.0 * t) + h23 * m3 * (t - 1.0)) * t * t;
+                                }
+                            }
+
+                            // color.y is greater than x0 and is thus non-zero
+                            return color * (nits / color.y);
+                        }
+                    )__SHADER__";
+                    break;
+            }
+            break;
+        default:
+            // inverse tone map; the output luminance can be up to maxOutLumi.
+            fs << R"__SHADER__(
+                highp vec3 ToneMap(highp vec3 color) {
+                    const float maxOutLumi = 3000.0;
+
+                    const float x0 = 5.0;
+                    const float y0 = 2.5;
+                    float x1 = displayMaxLuminance * 0.7;
+                    float y1 = maxOutLumi * 0.15;
+                    float x2 = displayMaxLuminance * 0.9;
+                    float y2 = maxOutLumi * 0.45;
+                    float x3 = displayMaxLuminance;
+                    float y3 = maxOutLumi;
+
+                    float c1 = y1 / 3.0;
+                    float c2 = y2 / 2.0;
+                    float c3 = y3 / 1.5;
+
+                    float nits = color.y;
+
+                    float scale;
+                    if (nits <= x0) {
+                        // scale [0.0, x0] to [0.0, y0] linearly
+                        const float slope = y0 / x0;
+                        return color * slope;
+                    } else if (nits <= x1) {
+                        // scale [x0, x1] to [y0, y1] using a curve
+                        float t = (nits - x0) / (x1 - x0);
+                        nits = (1.0 - t) * (1.0 - t) * y0 + 2.0 * (1.0 - t) * t * c1 + t * t * y1;
+                    } else if (nits <= x2) {
+                        // scale [x1, x2] to [y1, y2] using a curve
+                        float t = (nits - x1) / (x2 - x1);
+                        nits = (1.0 - t) * (1.0 - t) * y1 + 2.0 * (1.0 - t) * t * c2 + t * t * y2;
+                    } else {
+                        // scale [x2, x3] to [y2, y3] using a curve
+                        float t = (nits - x2) / (x3 - x2);
+                        nits = (1.0 - t) * (1.0 - t) * y2 + 2.0 * (1.0 - t) * t * c3 + t * t * y3;
+                    }
+
+                    // color.y is greater than x0 and is thus non-zero
+                    return color * (nits / color.y);
+                }
+            )__SHADER__";
+            break;
+    }
+
+    // convert absolute light to relative light.
+    switch (needs.getOutputTF()) {
+        case Key::OUTPUT_TF_ST2084:
+            fs << R"__SHADER__(
+                highp vec3 NormalizeLuminance(highp vec3 color) {
+                    return color / 10000.0;
+                }
+            )__SHADER__";
+            break;
+        case Key::OUTPUT_TF_HLG:
+            fs << R"__SHADER__(
+                highp vec3 NormalizeLuminance(highp vec3 color) {
+                    return color / 1000.0 * pow(color.y / 1000.0, -0.2 / 1.2);
+                }
+            )__SHADER__";
+            break;
+        default:
+            fs << R"__SHADER__(
+                highp vec3 NormalizeLuminance(highp vec3 color) {
+                    return color / displayMaxLuminance;
+                }
+            )__SHADER__";
+            break;
+    }
+}
+
+// Generate OOTF that modifies the relative scence light to relative display light.
+void ProgramCache::generateOOTF(Formatter& fs, const ProgramCache::Key& needs) {
+    if (!needs.needsToneMapping()) {
+        fs << R"__SHADER__(
+            highp vec3 OOTF(const highp vec3 color) {
+                return color;
+            }
+        )__SHADER__";
+    } else {
+        generateToneMappingProcess(fs, needs);
+        fs << R"__SHADER__(
+            highp vec3 OOTF(const highp vec3 color) {
+                return NormalizeLuminance(ToneMap(ScaleLuminance(color)));
+            }
+        )__SHADER__";
+    }
+}
+
+// Generate OETF that converts relative display light to signal values,
+// both normalized to [0, 1]
+void ProgramCache::generateOETF(Formatter& fs, const Key& needs) {
+    switch (needs.getOutputTF()) {
+        case Key::OUTPUT_TF_SRGB:
+            fs << R"__SHADER__(
+                float OETF_sRGB(const float linear) {
+                    return linear <= 0.0031308 ?
+                            linear * 12.92 : (pow(linear, 1.0 / 2.4) * 1.055) - 0.055;
+                }
+
+                vec3 OETF_sRGB(const vec3 linear) {
+                    return vec3(OETF_sRGB(linear.r), OETF_sRGB(linear.g), OETF_sRGB(linear.b));
+                }
+
+                vec3 OETF(const vec3 linear) {
+                    return sign(linear.rgb) * OETF_sRGB(abs(linear.rgb));
+                }
+            )__SHADER__";
+            break;
+        case Key::OUTPUT_TF_ST2084:
+            fs << R"__SHADER__(
+                vec3 OETF(const vec3 linear) {
+                    const highp float m1 = (2610.0 / 4096.0) / 4.0;
+                    const highp float m2 = (2523.0 / 4096.0) * 128.0;
+                    const highp float c1 = (3424.0 / 4096.0);
+                    const highp float c2 = (2413.0 / 4096.0) * 32.0;
+                    const highp float c3 = (2392.0 / 4096.0) * 32.0;
+
+                    highp vec3 tmp = pow(linear, vec3(m1));
+                    tmp = (c1 + c2 * tmp) / (1.0 + c3 * tmp);
+                    return pow(tmp, vec3(m2));
+                }
+            )__SHADER__";
+            break;
+        case Key::OUTPUT_TF_HLG:
+            fs << R"__SHADER__(
+                highp float OETF_channel(const highp float channel) {
+                    const highp float a = 0.17883277;
+                    const highp float b = 0.28466892;
+                    const highp float c = 0.55991073;
+                    return channel <= 1.0 / 12.0 ? sqrt(3.0 * channel) :
+                            a * log(12.0 * channel - b) + c;
+                }
+
+                vec3 OETF(const highp vec3 color) {
+                    return vec3(OETF_channel(color.r), OETF_channel(color.g),
+                            OETF_channel(color.b));
+                }
+            )__SHADER__";
+            break;
+        default:
+            fs << R"__SHADER__(
+                vec3 OETF(const vec3 linear) {
+                    return linear;
+                }
+            )__SHADER__";
+            break;
+    }
+}
+
+String8 ProgramCache::generateVertexShader(const Key& needs) {
+    Formatter vs;
+    if (needs.hasTextureCoords()) {
+        vs << "attribute vec4 texCoords;"
+           << "varying vec2 outTexCoords;";
+    }
+    if (needs.hasRoundedCorners()) {
+        vs << "attribute lowp vec4 cropCoords;";
+        vs << "varying lowp vec2 outCropCoords;";
+    }
+    if (needs.drawShadows()) {
+        vs << "attribute lowp vec4 shadowColor;";
+        vs << "varying lowp vec4 outShadowColor;";
+        vs << "attribute lowp vec4 shadowParams;";
+        vs << "varying lowp vec3 outShadowParams;";
+    }
+    vs << "attribute vec4 position;"
+       << "uniform mat4 projection;"
+       << "uniform mat4 texture;"
+       << "void main(void) {" << indent << "gl_Position = projection * position;";
+    if (needs.hasTextureCoords()) {
+        vs << "outTexCoords = (texture * texCoords).st;";
+    }
+    if (needs.hasRoundedCorners()) {
+        vs << "outCropCoords = cropCoords.st;";
+    }
+    if (needs.drawShadows()) {
+        vs << "outShadowColor = shadowColor;";
+        vs << "outShadowParams = shadowParams.xyz;";
+    }
+    vs << dedent << "}";
+    return vs.getString();
+}
+
+String8 ProgramCache::generateFragmentShader(const Key& needs) {
+    Formatter fs;
+    if (needs.getTextureTarget() == Key::TEXTURE_EXT) {
+        fs << "#extension GL_OES_EGL_image_external : require";
+    }
+
+    // default precision is required-ish in fragment shaders
+    fs << "precision mediump float;";
+
+    if (needs.getTextureTarget() == Key::TEXTURE_EXT) {
+        fs << "uniform samplerExternalOES sampler;";
+    } else if (needs.getTextureTarget() == Key::TEXTURE_2D) {
+        fs << "uniform sampler2D sampler;";
+    }
+
+    if (needs.hasTextureCoords()) {
+        fs << "varying vec2 outTexCoords;";
+    }
+
+    if (needs.hasRoundedCorners()) {
+        // Rounded corners implementation using a signed distance function.
+        fs << R"__SHADER__(
+            uniform float cornerRadius;
+            uniform vec2 cropCenter;
+            varying vec2 outCropCoords;
+
+            /**
+             * This function takes the current crop coordinates and calculates an alpha value based
+             * on the corner radius and distance from the crop center.
+             */
+            float applyCornerRadius(vec2 cropCoords)
+            {
+                vec2 position = cropCoords - cropCenter;
+                // Scale down the dist vector here, as otherwise large corner
+                // radii can cause floating point issues when computing the norm
+                vec2 dist = (abs(position) - cropCenter + vec2(cornerRadius)) / 16.0;
+                // Once we've found the norm, then scale back up.
+                float plane = length(max(dist, vec2(0.0))) * 16.0;
+                return 1.0 - clamp(plane - cornerRadius, 0.0, 1.0);
+            }
+            )__SHADER__";
+    }
+
+    if (needs.drawShadows()) {
+        fs << R"__SHADER__(
+            varying lowp vec4 outShadowColor;
+            varying lowp vec3 outShadowParams;
+
+            /**
+             * Returns the shadow color.
+             */
+            vec4 getShadowColor()
+            {
+                lowp float d = length(outShadowParams.xy);
+                vec2 uv = vec2(outShadowParams.z * (1.0 - d), 0.5);
+                lowp float factor = texture2D(sampler, uv).a;
+                return outShadowColor * factor;
+            }
+            )__SHADER__";
+    }
+
+    if (needs.getTextureTarget() == Key::TEXTURE_OFF || needs.hasAlpha()) {
+        fs << "uniform vec4 color;";
+    }
+
+    if (needs.isY410BT2020()) {
+        fs << R"__SHADER__(
+            vec3 convertY410BT2020(const vec3 color) {
+                const vec3 offset = vec3(0.0625, 0.5, 0.5);
+                const mat3 transform = mat3(
+                    vec3(1.1678,  1.1678, 1.1678),
+                    vec3(   0.0, -0.1878, 2.1481),
+                    vec3(1.6836, -0.6523,   0.0));
+                // Y is in G, U is in R, and V is in B
+                return clamp(transform * (color.grb - offset), 0.0, 1.0);
+            }
+            )__SHADER__";
+    }
+
+    if (needs.hasTransformMatrix() || (needs.getInputTF() != needs.getOutputTF())) {
+        if (needs.needsToneMapping()) {
+            fs << "uniform float displayMaxLuminance;";
+            fs << "uniform float maxMasteringLuminance;";
+            fs << "uniform float maxContentLuminance;";
+        }
+
+        if (needs.hasInputTransformMatrix()) {
+            fs << "uniform mat4 inputTransformMatrix;";
+            fs << R"__SHADER__(
+                highp vec3 InputTransform(const highp vec3 color) {
+                    return clamp(vec3(inputTransformMatrix * vec4(color, 1.0)), 0.0, 1.0);
+                }
+            )__SHADER__";
+        } else {
+            fs << R"__SHADER__(
+                highp vec3 InputTransform(const highp vec3 color) {
+                    return color;
+                }
+            )__SHADER__";
+        }
+
+        // the transformation from a wider colorspace to a narrower one can
+        // result in >1.0 or <0.0 pixel values
+        if (needs.hasOutputTransformMatrix()) {
+            fs << "uniform mat4 outputTransformMatrix;";
+            fs << R"__SHADER__(
+                highp vec3 OutputTransform(const highp vec3 color) {
+                    return clamp(vec3(outputTransformMatrix * vec4(color, 1.0)), 0.0, 1.0);
+                }
+            )__SHADER__";
+        } else {
+            fs << R"__SHADER__(
+                highp vec3 OutputTransform(const highp vec3 color) {
+                    return clamp(color, 0.0, 1.0);
+                }
+            )__SHADER__";
+        }
+
+        generateEOTF(fs, needs);
+        generateOOTF(fs, needs);
+        generateOETF(fs, needs);
+    }
+
+    fs << "void main(void) {" << indent;
+    if (needs.drawShadows()) {
+        fs << "gl_FragColor = getShadowColor();";
+    } else {
+        if (needs.isTexturing()) {
+            fs << "gl_FragColor = texture2D(sampler, outTexCoords);";
+            if (needs.isY410BT2020()) {
+                fs << "gl_FragColor.rgb = convertY410BT2020(gl_FragColor.rgb);";
+            }
+        } else {
+            fs << "gl_FragColor.rgb = color.rgb;";
+            fs << "gl_FragColor.a = 1.0;";
+        }
+        if (needs.isOpaque()) {
+            fs << "gl_FragColor.a = 1.0;";
+        }
+        if (needs.hasAlpha()) {
+            // modulate the current alpha value with alpha set
+            if (needs.isPremultiplied()) {
+                // ... and the color too if we're premultiplied
+                fs << "gl_FragColor *= color.a;";
+            } else {
+                fs << "gl_FragColor.a *= color.a;";
+            }
+        }
+    }
+
+    if (needs.hasTransformMatrix() || (needs.getInputTF() != needs.getOutputTF())) {
+        if (!needs.isOpaque() && needs.isPremultiplied()) {
+            // un-premultiply if needed before linearization
+            // avoid divide by 0 by adding 0.5/256 to the alpha channel
+            fs << "gl_FragColor.rgb = gl_FragColor.rgb / (gl_FragColor.a + 0.0019);";
+        }
+        fs << "gl_FragColor.rgb = "
+              "OETF(OutputTransform(OOTF(InputTransform(EOTF(gl_FragColor.rgb)))));";
+        if (!needs.isOpaque() && needs.isPremultiplied()) {
+            // and re-premultiply if needed after gamma correction
+            fs << "gl_FragColor.rgb = gl_FragColor.rgb * (gl_FragColor.a + 0.0019);";
+        }
+    }
+
+    if (needs.hasRoundedCorners()) {
+        if (needs.isPremultiplied()) {
+            fs << "gl_FragColor *= vec4(applyCornerRadius(outCropCoords));";
+        } else {
+            fs << "gl_FragColor.a *= applyCornerRadius(outCropCoords);";
+        }
+    }
+
+    fs << dedent << "}";
+    return fs.getString();
+}
+
+std::unique_ptr<Program> ProgramCache::generateProgram(const Key& needs) {
+    ATRACE_CALL();
+
+    // vertex shader
+    String8 vs = generateVertexShader(needs);
+
+    // fragment shader
+    String8 fs = generateFragmentShader(needs);
+
+    return std::make_unique<Program>(needs, vs.string(), fs.string());
+}
+
+void ProgramCache::useProgram(EGLContext context, const Description& description) {
+    // generate the key for the shader based on the description
+    Key needs(computeKey(description));
+
+    // look-up the program in the cache
+    auto& cache = mCaches[context];
+    auto it = cache.find(needs);
+    if (it == cache.end()) {
+        // we didn't find our program, so generate one...
+        nsecs_t time = systemTime();
+        it = cache.emplace(needs, generateProgram(needs)).first;
+        time = systemTime() - time;
+
+        ALOGV(">>> generated new program for context %p: needs=%08X, time=%u ms (%zu programs)",
+              context, needs.mKey, uint32_t(ns2ms(time)), cache.size());
+    }
+
+    // here we have a suitable program for this description
+    std::unique_ptr<Program>& program = it->second;
+    if (program->isValid()) {
+        program->use();
+        program->setUniforms(description);
+    }
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/ProgramCache.h b/media/libstagefright/renderfright/gl/ProgramCache.h
new file mode 100644
index 0000000..901e631
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/ProgramCache.h
@@ -0,0 +1,228 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_RENDER_ENGINE_PROGRAMCACHE_H
+#define SF_RENDER_ENGINE_PROGRAMCACHE_H
+
+#include <memory>
+#include <unordered_map>
+
+#include <EGL/egl.h>
+#include <GLES2/gl2.h>
+#include <renderengine/private/Description.h>
+#include <utils/Singleton.h>
+#include <utils/TypeHelpers.h>
+
+namespace android {
+
+class String8;
+
+namespace renderengine {
+
+struct Description;
+
+namespace gl {
+
+class Formatter;
+class Program;
+
+/*
+ * This class generates GLSL programs suitable to handle a given
+ * Description. It's responsible for figuring out what to
+ * generate from a Description.
+ * It also maintains a cache of these Programs.
+ */
+class ProgramCache : public Singleton<ProgramCache> {
+public:
+    /*
+     * Key is used to retrieve a Program in the cache.
+     * A Key is generated from a Description.
+     */
+    class Key {
+        friend class ProgramCache;
+        typedef uint32_t key_t;
+        key_t mKey;
+
+    public:
+        enum {
+            BLEND_SHIFT = 0,
+            BLEND_MASK = 1 << BLEND_SHIFT,
+            BLEND_PREMULT = 1 << BLEND_SHIFT,
+            BLEND_NORMAL = 0 << BLEND_SHIFT,
+
+            OPACITY_SHIFT = 1,
+            OPACITY_MASK = 1 << OPACITY_SHIFT,
+            OPACITY_OPAQUE = 1 << OPACITY_SHIFT,
+            OPACITY_TRANSLUCENT = 0 << OPACITY_SHIFT,
+
+            ALPHA_SHIFT = 2,
+            ALPHA_MASK = 1 << ALPHA_SHIFT,
+            ALPHA_LT_ONE = 1 << ALPHA_SHIFT,
+            ALPHA_EQ_ONE = 0 << ALPHA_SHIFT,
+
+            TEXTURE_SHIFT = 3,
+            TEXTURE_MASK = 3 << TEXTURE_SHIFT,
+            TEXTURE_OFF = 0 << TEXTURE_SHIFT,
+            TEXTURE_EXT = 1 << TEXTURE_SHIFT,
+            TEXTURE_2D = 2 << TEXTURE_SHIFT,
+
+            ROUNDED_CORNERS_SHIFT = 5,
+            ROUNDED_CORNERS_MASK = 1 << ROUNDED_CORNERS_SHIFT,
+            ROUNDED_CORNERS_OFF = 0 << ROUNDED_CORNERS_SHIFT,
+            ROUNDED_CORNERS_ON = 1 << ROUNDED_CORNERS_SHIFT,
+
+            INPUT_TRANSFORM_MATRIX_SHIFT = 6,
+            INPUT_TRANSFORM_MATRIX_MASK = 1 << INPUT_TRANSFORM_MATRIX_SHIFT,
+            INPUT_TRANSFORM_MATRIX_OFF = 0 << INPUT_TRANSFORM_MATRIX_SHIFT,
+            INPUT_TRANSFORM_MATRIX_ON = 1 << INPUT_TRANSFORM_MATRIX_SHIFT,
+
+            OUTPUT_TRANSFORM_MATRIX_SHIFT = 7,
+            OUTPUT_TRANSFORM_MATRIX_MASK = 1 << OUTPUT_TRANSFORM_MATRIX_SHIFT,
+            OUTPUT_TRANSFORM_MATRIX_OFF = 0 << OUTPUT_TRANSFORM_MATRIX_SHIFT,
+            OUTPUT_TRANSFORM_MATRIX_ON = 1 << OUTPUT_TRANSFORM_MATRIX_SHIFT,
+
+            INPUT_TF_SHIFT = 8,
+            INPUT_TF_MASK = 3 << INPUT_TF_SHIFT,
+            INPUT_TF_LINEAR = 0 << INPUT_TF_SHIFT,
+            INPUT_TF_SRGB = 1 << INPUT_TF_SHIFT,
+            INPUT_TF_ST2084 = 2 << INPUT_TF_SHIFT,
+            INPUT_TF_HLG = 3 << INPUT_TF_SHIFT,
+
+            OUTPUT_TF_SHIFT = 10,
+            OUTPUT_TF_MASK = 3 << OUTPUT_TF_SHIFT,
+            OUTPUT_TF_LINEAR = 0 << OUTPUT_TF_SHIFT,
+            OUTPUT_TF_SRGB = 1 << OUTPUT_TF_SHIFT,
+            OUTPUT_TF_ST2084 = 2 << OUTPUT_TF_SHIFT,
+            OUTPUT_TF_HLG = 3 << OUTPUT_TF_SHIFT,
+
+            Y410_BT2020_SHIFT = 12,
+            Y410_BT2020_MASK = 1 << Y410_BT2020_SHIFT,
+            Y410_BT2020_OFF = 0 << Y410_BT2020_SHIFT,
+            Y410_BT2020_ON = 1 << Y410_BT2020_SHIFT,
+
+            SHADOW_SHIFT = 13,
+            SHADOW_MASK = 1 << SHADOW_SHIFT,
+            SHADOW_OFF = 0 << SHADOW_SHIFT,
+            SHADOW_ON = 1 << SHADOW_SHIFT,
+        };
+
+        inline Key() : mKey(0) {}
+        inline Key(const Key& rhs) : mKey(rhs.mKey) {}
+
+        inline Key& set(key_t mask, key_t value) {
+            mKey = (mKey & ~mask) | value;
+            return *this;
+        }
+
+        inline bool isTexturing() const { return (mKey & TEXTURE_MASK) != TEXTURE_OFF; }
+        inline bool hasTextureCoords() const { return isTexturing() && !drawShadows(); }
+        inline int getTextureTarget() const { return (mKey & TEXTURE_MASK); }
+        inline bool isPremultiplied() const { return (mKey & BLEND_MASK) == BLEND_PREMULT; }
+        inline bool isOpaque() const { return (mKey & OPACITY_MASK) == OPACITY_OPAQUE; }
+        inline bool hasAlpha() const { return (mKey & ALPHA_MASK) == ALPHA_LT_ONE; }
+        inline bool hasRoundedCorners() const {
+            return (mKey & ROUNDED_CORNERS_MASK) == ROUNDED_CORNERS_ON;
+        }
+        inline bool drawShadows() const { return (mKey & SHADOW_MASK) == SHADOW_ON; }
+        inline bool hasInputTransformMatrix() const {
+            return (mKey & INPUT_TRANSFORM_MATRIX_MASK) == INPUT_TRANSFORM_MATRIX_ON;
+        }
+        inline bool hasOutputTransformMatrix() const {
+            return (mKey & OUTPUT_TRANSFORM_MATRIX_MASK) == OUTPUT_TRANSFORM_MATRIX_ON;
+        }
+        inline bool hasTransformMatrix() const {
+            return hasInputTransformMatrix() || hasOutputTransformMatrix();
+        }
+        inline int getInputTF() const { return (mKey & INPUT_TF_MASK); }
+        inline int getOutputTF() const { return (mKey & OUTPUT_TF_MASK); }
+
+        // When HDR and non-HDR contents are mixed, or different types of HDR contents are
+        // mixed, we will do a tone mapping process to tone map the input content to output
+        // content. Currently, the following conversions handled, they are:
+        // * SDR -> HLG
+        // * SDR -> PQ
+        // * HLG -> PQ
+        inline bool needsToneMapping() const {
+            int inputTF = getInputTF();
+            int outputTF = getOutputTF();
+
+            // Return false when converting from SDR to SDR.
+            if (inputTF == Key::INPUT_TF_SRGB && outputTF == Key::OUTPUT_TF_LINEAR) {
+                return false;
+            }
+            if (inputTF == Key::INPUT_TF_LINEAR && outputTF == Key::OUTPUT_TF_SRGB) {
+                return false;
+            }
+
+            inputTF >>= Key::INPUT_TF_SHIFT;
+            outputTF >>= Key::OUTPUT_TF_SHIFT;
+            return inputTF != outputTF;
+        }
+        inline bool isY410BT2020() const { return (mKey & Y410_BT2020_MASK) == Y410_BT2020_ON; }
+
+        // for use by std::unordered_map
+
+        bool operator==(const Key& other) const { return mKey == other.mKey; }
+
+        struct Hash {
+            size_t operator()(const Key& key) const { return static_cast<size_t>(key.mKey); }
+        };
+    };
+
+    ProgramCache() = default;
+    ~ProgramCache() = default;
+
+    // Generate shaders to populate the cache
+    void primeCache(const EGLContext context, bool useColorManagement, bool toneMapperShaderOnly);
+
+    size_t getSize(const EGLContext context) { return mCaches[context].size(); }
+
+    // useProgram lookup a suitable program in the cache or generates one
+    // if none can be found.
+    void useProgram(const EGLContext context, const Description& description);
+
+private:
+    // compute a cache Key from a Description
+    static Key computeKey(const Description& description);
+    // Generate EOTF based from Key.
+    static void generateEOTF(Formatter& fs, const Key& needs);
+    // Generate necessary tone mapping methods for OOTF.
+    static void generateToneMappingProcess(Formatter& fs, const Key& needs);
+    // Generate OOTF based from Key.
+    static void generateOOTF(Formatter& fs, const Key& needs);
+    // Generate OETF based from Key.
+    static void generateOETF(Formatter& fs, const Key& needs);
+    // generates a program from the Key
+    static std::unique_ptr<Program> generateProgram(const Key& needs);
+    // generates the vertex shader from the Key
+    static String8 generateVertexShader(const Key& needs);
+    // generates the fragment shader from the Key
+    static String8 generateFragmentShader(const Key& needs);
+
+    // Key/Value map used for caching Programs. Currently the cache
+    // is never shrunk (and the GL program objects are never deleted).
+    std::unordered_map<EGLContext, std::unordered_map<Key, std::unique_ptr<Program>, Key::Hash>>
+            mCaches;
+};
+
+} // namespace gl
+} // namespace renderengine
+
+ANDROID_BASIC_TYPES_TRAITS(renderengine::gl::ProgramCache::Key)
+
+} // namespace android
+
+#endif /* SF_RENDER_ENGINE_PROGRAMCACHE_H */
diff --git a/media/libstagefright/renderfright/gl/filters/BlurFilter.cpp b/media/libstagefright/renderfright/gl/filters/BlurFilter.cpp
new file mode 100644
index 0000000..19f18c0
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/filters/BlurFilter.cpp
@@ -0,0 +1,268 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include "BlurFilter.h"
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+#include <GLES3/gl3.h>
+#include <GLES3/gl3ext.h>
+#include <ui/GraphicTypes.h>
+#include <cstdint>
+
+#include <utils/Trace.h>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+BlurFilter::BlurFilter(GLESRenderEngine& engine)
+      : mEngine(engine),
+        mCompositionFbo(engine),
+        mPingFbo(engine),
+        mPongFbo(engine),
+        mMixProgram(engine),
+        mBlurProgram(engine) {
+    mMixProgram.compile(getVertexShader(), getMixFragShader());
+    mMPosLoc = mMixProgram.getAttributeLocation("aPosition");
+    mMUvLoc = mMixProgram.getAttributeLocation("aUV");
+    mMTextureLoc = mMixProgram.getUniformLocation("uTexture");
+    mMCompositionTextureLoc = mMixProgram.getUniformLocation("uCompositionTexture");
+    mMMixLoc = mMixProgram.getUniformLocation("uMix");
+
+    mBlurProgram.compile(getVertexShader(), getFragmentShader());
+    mBPosLoc = mBlurProgram.getAttributeLocation("aPosition");
+    mBUvLoc = mBlurProgram.getAttributeLocation("aUV");
+    mBTextureLoc = mBlurProgram.getUniformLocation("uTexture");
+    mBOffsetLoc = mBlurProgram.getUniformLocation("uOffset");
+
+    static constexpr auto size = 2.0f;
+    static constexpr auto translation = 1.0f;
+    const GLfloat vboData[] = {
+        // Vertex data
+        translation - size, -translation - size,
+        translation - size, -translation + size,
+        translation + size, -translation + size,
+        // UV data
+        0.0f, 0.0f - translation,
+        0.0f, size - translation,
+        size, size - translation
+    };
+    mMeshBuffer.allocateBuffers(vboData, 12 /* size */);
+}
+
+status_t BlurFilter::setAsDrawTarget(const DisplaySettings& display, uint32_t radius) {
+    ATRACE_NAME("BlurFilter::setAsDrawTarget");
+    mRadius = radius;
+    mDisplayX = display.physicalDisplay.left;
+    mDisplayY = display.physicalDisplay.top;
+
+    if (mDisplayWidth < display.physicalDisplay.width() ||
+        mDisplayHeight < display.physicalDisplay.height()) {
+        ATRACE_NAME("BlurFilter::allocatingTextures");
+
+        mDisplayWidth = display.physicalDisplay.width();
+        mDisplayHeight = display.physicalDisplay.height();
+        mCompositionFbo.allocateBuffers(mDisplayWidth, mDisplayHeight);
+
+        const uint32_t fboWidth = floorf(mDisplayWidth * kFboScale);
+        const uint32_t fboHeight = floorf(mDisplayHeight * kFboScale);
+        mPingFbo.allocateBuffers(fboWidth, fboHeight);
+        mPongFbo.allocateBuffers(fboWidth, fboHeight);
+
+        if (mPingFbo.getStatus() != GL_FRAMEBUFFER_COMPLETE) {
+            ALOGE("Invalid ping buffer");
+            return mPingFbo.getStatus();
+        }
+        if (mPongFbo.getStatus() != GL_FRAMEBUFFER_COMPLETE) {
+            ALOGE("Invalid pong buffer");
+            return mPongFbo.getStatus();
+        }
+        if (mCompositionFbo.getStatus() != GL_FRAMEBUFFER_COMPLETE) {
+            ALOGE("Invalid composition buffer");
+            return mCompositionFbo.getStatus();
+        }
+        if (!mBlurProgram.isValid()) {
+            ALOGE("Invalid shader");
+            return GL_INVALID_OPERATION;
+        }
+    }
+
+    mCompositionFbo.bind();
+    glViewport(0, 0, mCompositionFbo.getBufferWidth(), mCompositionFbo.getBufferHeight());
+    return NO_ERROR;
+}
+
+void BlurFilter::drawMesh(GLuint uv, GLuint position) {
+
+    glEnableVertexAttribArray(uv);
+    glEnableVertexAttribArray(position);
+    mMeshBuffer.bind();
+    glVertexAttribPointer(position, 2 /* size */, GL_FLOAT, GL_FALSE,
+                          2 * sizeof(GLfloat) /* stride */, 0 /* offset */);
+    glVertexAttribPointer(uv, 2 /* size */, GL_FLOAT, GL_FALSE, 0 /* stride */,
+                          (GLvoid*)(6 * sizeof(GLfloat)) /* offset */);
+    mMeshBuffer.unbind();
+
+    // draw mesh
+    glDrawArrays(GL_TRIANGLES, 0 /* first */, 3 /* count */);
+}
+
+status_t BlurFilter::prepare() {
+    ATRACE_NAME("BlurFilter::prepare");
+
+    // Kawase is an approximation of Gaussian, but it behaves differently from it.
+    // A radius transformation is required for approximating them, and also to introduce
+    // non-integer steps, necessary to smoothly interpolate large radii.
+    const auto radius = mRadius / 6.0f;
+
+    // Calculate how many passes we'll do, based on the radius.
+    // Too many passes will make the operation expensive.
+    const auto passes = min(kMaxPasses, (uint32_t)ceil(radius));
+
+    const float radiusByPasses = radius / (float)passes;
+    const float stepX = radiusByPasses / (float)mCompositionFbo.getBufferWidth();
+    const float stepY = radiusByPasses / (float)mCompositionFbo.getBufferHeight();
+
+    // Let's start by downsampling and blurring the composited frame simultaneously.
+    mBlurProgram.useProgram();
+    glActiveTexture(GL_TEXTURE0);
+    glUniform1i(mBTextureLoc, 0);
+    glBindTexture(GL_TEXTURE_2D, mCompositionFbo.getTextureName());
+    glUniform2f(mBOffsetLoc, stepX, stepY);
+    glViewport(0, 0, mPingFbo.getBufferWidth(), mPingFbo.getBufferHeight());
+    mPingFbo.bind();
+    drawMesh(mBUvLoc, mBPosLoc);
+
+    // And now we'll ping pong between our textures, to accumulate the result of various offsets.
+    GLFramebuffer* read = &mPingFbo;
+    GLFramebuffer* draw = &mPongFbo;
+    glViewport(0, 0, draw->getBufferWidth(), draw->getBufferHeight());
+    for (auto i = 1; i < passes; i++) {
+        ATRACE_NAME("BlurFilter::renderPass");
+        draw->bind();
+
+        glBindTexture(GL_TEXTURE_2D, read->getTextureName());
+        glUniform2f(mBOffsetLoc, stepX * i, stepY * i);
+
+        drawMesh(mBUvLoc, mBPosLoc);
+
+        // Swap buffers for next iteration
+        auto tmp = draw;
+        draw = read;
+        read = tmp;
+    }
+    mLastDrawTarget = read;
+
+    return NO_ERROR;
+}
+
+status_t BlurFilter::render(bool multiPass) {
+    ATRACE_NAME("BlurFilter::render");
+
+    // Now let's scale our blur up. It will be interpolated with the larger composited
+    // texture for the first frames, to hide downscaling artifacts.
+    GLfloat mix = fmin(1.0, mRadius / kMaxCrossFadeRadius);
+
+    // When doing multiple passes, we cannot try to read mCompositionFbo, given that we'll
+    // be writing onto it. Let's disable the crossfade, otherwise we'd need 1 extra frame buffer,
+    // as large as the screen size.
+    if (mix >= 1 || multiPass) {
+        mLastDrawTarget->bindAsReadBuffer();
+        glBlitFramebuffer(0, 0, mLastDrawTarget->getBufferWidth(),
+                          mLastDrawTarget->getBufferHeight(), mDisplayX, mDisplayY, mDisplayWidth,
+                          mDisplayHeight, GL_COLOR_BUFFER_BIT, GL_LINEAR);
+        return NO_ERROR;
+    }
+
+    mMixProgram.useProgram();
+    glUniform1f(mMMixLoc, mix);
+    glActiveTexture(GL_TEXTURE0);
+    glBindTexture(GL_TEXTURE_2D, mLastDrawTarget->getTextureName());
+    glUniform1i(mMTextureLoc, 0);
+    glActiveTexture(GL_TEXTURE1);
+    glBindTexture(GL_TEXTURE_2D, mCompositionFbo.getTextureName());
+    glUniform1i(mMCompositionTextureLoc, 1);
+
+    drawMesh(mMUvLoc, mMPosLoc);
+
+    glUseProgram(0);
+    glActiveTexture(GL_TEXTURE0);
+    mEngine.checkErrors("Drawing blur mesh");
+    return NO_ERROR;
+}
+
+string BlurFilter::getVertexShader() const {
+    return R"SHADER(#version 310 es
+        precision mediump float;
+
+        in vec2 aPosition;
+        in highp vec2 aUV;
+        out highp vec2 vUV;
+
+        void main() {
+            vUV = aUV;
+            gl_Position = vec4(aPosition, 0.0, 1.0);
+        }
+    )SHADER";
+}
+
+string BlurFilter::getFragmentShader() const {
+    return R"SHADER(#version 310 es
+        precision mediump float;
+
+        uniform sampler2D uTexture;
+        uniform vec2 uOffset;
+
+        in highp vec2 vUV;
+        out vec4 fragColor;
+
+        void main() {
+            fragColor  = texture(uTexture, vUV, 0.0);
+            fragColor += texture(uTexture, vUV + vec2( uOffset.x,  uOffset.y), 0.0);
+            fragColor += texture(uTexture, vUV + vec2( uOffset.x, -uOffset.y), 0.0);
+            fragColor += texture(uTexture, vUV + vec2(-uOffset.x,  uOffset.y), 0.0);
+            fragColor += texture(uTexture, vUV + vec2(-uOffset.x, -uOffset.y), 0.0);
+
+            fragColor = vec4(fragColor.rgb * 0.2, 1.0);
+        }
+    )SHADER";
+}
+
+string BlurFilter::getMixFragShader() const {
+    string shader = R"SHADER(#version 310 es
+        precision mediump float;
+
+        in highp vec2 vUV;
+        out vec4 fragColor;
+
+        uniform sampler2D uCompositionTexture;
+        uniform sampler2D uTexture;
+        uniform float uMix;
+
+        void main() {
+            vec4 blurred = texture(uTexture, vUV);
+            vec4 composition = texture(uCompositionTexture, vUV);
+            fragColor = mix(composition, blurred, uMix);
+        }
+    )SHADER";
+    return shader;
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/filters/BlurFilter.h b/media/libstagefright/renderfright/gl/filters/BlurFilter.h
new file mode 100644
index 0000000..593a8fd
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/filters/BlurFilter.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <ui/GraphicTypes.h>
+#include "../GLESRenderEngine.h"
+#include "../GLFramebuffer.h"
+#include "../GLVertexBuffer.h"
+#include "GenericProgram.h"
+
+using namespace std;
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+/**
+ * This is an implementation of a Kawase blur, as described in here:
+ * https://community.arm.com/cfs-file/__key/communityserver-blogs-components-weblogfiles/
+ * 00-00-00-20-66/siggraph2015_2D00_mmg_2D00_marius_2D00_notes.pdf
+ */
+class BlurFilter {
+public:
+    // Downsample FBO to improve performance
+    static constexpr float kFboScale = 0.25f;
+    // Maximum number of render passes
+    static constexpr uint32_t kMaxPasses = 4;
+    // To avoid downscaling artifacts, we interpolate the blurred fbo with the full composited
+    // image, up to this radius.
+    static constexpr float kMaxCrossFadeRadius = 30.0f;
+
+    explicit BlurFilter(GLESRenderEngine& engine);
+    virtual ~BlurFilter(){};
+
+    // Set up render targets, redirecting output to offscreen texture.
+    status_t setAsDrawTarget(const DisplaySettings&, uint32_t radius);
+    // Execute blur passes, rendering to offscreen texture.
+    status_t prepare();
+    // Render blur to the bound framebuffer (screen).
+    status_t render(bool multiPass);
+
+private:
+    uint32_t mRadius;
+    void drawMesh(GLuint uv, GLuint position);
+    string getVertexShader() const;
+    string getFragmentShader() const;
+    string getMixFragShader() const;
+
+    GLESRenderEngine& mEngine;
+    // Frame buffer holding the composited background.
+    GLFramebuffer mCompositionFbo;
+    // Frame buffers holding the blur passes.
+    GLFramebuffer mPingFbo;
+    GLFramebuffer mPongFbo;
+    uint32_t mDisplayWidth = 0;
+    uint32_t mDisplayHeight = 0;
+    uint32_t mDisplayX = 0;
+    uint32_t mDisplayY = 0;
+    // Buffer holding the final blur pass.
+    GLFramebuffer* mLastDrawTarget;
+
+    // VBO containing vertex and uv data of a fullscreen triangle.
+    GLVertexBuffer mMeshBuffer;
+
+    GenericProgram mMixProgram;
+    GLuint mMPosLoc;
+    GLuint mMUvLoc;
+    GLuint mMMixLoc;
+    GLuint mMTextureLoc;
+    GLuint mMCompositionTextureLoc;
+
+    GenericProgram mBlurProgram;
+    GLuint mBPosLoc;
+    GLuint mBUvLoc;
+    GLuint mBTextureLoc;
+    GLuint mBOffsetLoc;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/filters/GenericProgram.cpp b/media/libstagefright/renderfright/gl/filters/GenericProgram.cpp
new file mode 100644
index 0000000..bb35889
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/filters/GenericProgram.cpp
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "GenericProgram.h"
+
+#include <GLES/gl.h>
+#include <GLES/glext.h>
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+GenericProgram::GenericProgram(GLESRenderEngine& engine) : mEngine(engine) {}
+
+GenericProgram::~GenericProgram() {
+    if (mVertexShaderHandle != 0) {
+        if (mProgramHandle != 0) {
+            glDetachShader(mProgramHandle, mVertexShaderHandle);
+        }
+        glDeleteShader(mVertexShaderHandle);
+    }
+
+    if (mFragmentShaderHandle != 0) {
+        if (mProgramHandle != 0) {
+            glDetachShader(mProgramHandle, mFragmentShaderHandle);
+        }
+        glDeleteShader(mFragmentShaderHandle);
+    }
+
+    if (mProgramHandle != 0) {
+        glDeleteProgram(mProgramHandle);
+    }
+}
+
+void GenericProgram::compile(string vertexShader, string fragmentShader) {
+    mVertexShaderHandle = compileShader(GL_VERTEX_SHADER, vertexShader);
+    mFragmentShaderHandle = compileShader(GL_FRAGMENT_SHADER, fragmentShader);
+    if (mVertexShaderHandle == 0 || mFragmentShaderHandle == 0) {
+        ALOGE("Aborting program creation.");
+        return;
+    }
+    mProgramHandle = createAndLink(mVertexShaderHandle, mFragmentShaderHandle);
+    mEngine.checkErrors("Linking program");
+}
+
+void GenericProgram::useProgram() const {
+    glUseProgram(mProgramHandle);
+}
+
+GLuint GenericProgram::compileShader(GLuint type, string src) const {
+    const GLuint shader = glCreateShader(type);
+    if (shader == 0) {
+        mEngine.checkErrors("Creating shader");
+        return 0;
+    }
+    const GLchar* charSrc = (const GLchar*)src.c_str();
+    glShaderSource(shader, 1, &charSrc, nullptr);
+    glCompileShader(shader);
+
+    GLint isCompiled = 0;
+    glGetShaderiv(shader, GL_COMPILE_STATUS, &isCompiled);
+    if (isCompiled == GL_FALSE) {
+        GLint maxLength = 0;
+        glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &maxLength);
+        string errorLog;
+        errorLog.reserve(maxLength);
+        glGetShaderInfoLog(shader, maxLength, &maxLength, errorLog.data());
+        glDeleteShader(shader);
+        ALOGE("Error compiling shader: %s", errorLog.c_str());
+        return 0;
+    }
+    return shader;
+}
+GLuint GenericProgram::createAndLink(GLuint vertexShader, GLuint fragmentShader) const {
+    const GLuint program = glCreateProgram();
+    mEngine.checkErrors("Creating program");
+
+    glAttachShader(program, vertexShader);
+    glAttachShader(program, fragmentShader);
+    glLinkProgram(program);
+    mEngine.checkErrors("Linking program");
+    return program;
+}
+
+GLuint GenericProgram::getUniformLocation(const string name) const {
+    if (mProgramHandle == 0) {
+        ALOGE("Can't get location of %s on an invalid program.", name.c_str());
+        return -1;
+    }
+    return glGetUniformLocation(mProgramHandle, (const GLchar*)name.c_str());
+}
+
+GLuint GenericProgram::getAttributeLocation(const string name) const {
+    if (mProgramHandle == 0) {
+        ALOGE("Can't get location of %s on an invalid program.", name.c_str());
+        return -1;
+    }
+    return glGetAttribLocation(mProgramHandle, (const GLchar*)name.c_str());
+}
+
+bool GenericProgram::isValid() const {
+    return mProgramHandle != 0;
+}
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/gl/filters/GenericProgram.h b/media/libstagefright/renderfright/gl/filters/GenericProgram.h
new file mode 100644
index 0000000..6da2a5a
--- /dev/null
+++ b/media/libstagefright/renderfright/gl/filters/GenericProgram.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <ui/GraphicTypes.h>
+#include "../GLESRenderEngine.h"
+#include "../GLFramebuffer.h"
+
+using namespace std;
+
+namespace android {
+namespace renderengine {
+namespace gl {
+
+class GenericProgram {
+public:
+    explicit GenericProgram(GLESRenderEngine& renderEngine);
+    ~GenericProgram();
+    void compile(string vertexShader, string fragmentShader);
+    bool isValid() const;
+    void useProgram() const;
+    GLuint getAttributeLocation(const string name) const;
+    GLuint getUniformLocation(const string name) const;
+
+private:
+    GLuint compileShader(GLuint type, const string src) const;
+    GLuint createAndLink(GLuint vertexShader, GLuint fragmentShader) const;
+
+    GLESRenderEngine& mEngine;
+    GLuint mVertexShaderHandle = 0;
+    GLuint mFragmentShaderHandle = 0;
+    GLuint mProgramHandle = 0;
+};
+
+} // namespace gl
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/DisplaySettings.h b/media/libstagefright/renderfright/include/renderengine/DisplaySettings.h
new file mode 100644
index 0000000..ca16d2c
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/DisplaySettings.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <iosfwd>
+
+#include <math/mat4.h>
+#include <ui/GraphicTypes.h>
+#include <ui/Rect.h>
+#include <ui/Region.h>
+#include <ui/Transform.h>
+
+namespace android {
+namespace renderengine {
+
+// DisplaySettings contains the settings that are applicable when drawing all
+// layers for a given display.
+struct DisplaySettings {
+    // Rectangle describing the physical display. We will project from the
+    // logical clip onto this rectangle.
+    Rect physicalDisplay = Rect::INVALID_RECT;
+
+    // Rectangle bounded by the x,y- clipping planes in the logical display, so
+    // that the orthographic projection matrix can be computed. When
+    // constructing this matrix, z-coordinate bound are assumed to be at z=0 and
+    // z=1.
+    Rect clip = Rect::INVALID_RECT;
+
+    // Maximum luminance pulled from the display's HDR capabilities.
+    float maxLuminance = 1.0f;
+
+    // Output dataspace that will be populated if wide color gamut is used, or
+    // DataSpace::UNKNOWN otherwise.
+    ui::Dataspace outputDataspace = ui::Dataspace::UNKNOWN;
+
+    // Additional color transform to apply in linear space after transforming
+    // to the output dataspace.
+    mat4 colorTransform = mat4();
+
+    // Region that will be cleared to (0, 0, 0, 1) prior to rendering.
+    // This is specified in layer-stack space.
+    Region clearRegion = Region::INVALID_REGION;
+
+    // An additional orientation flag to be applied after clipping the output.
+    // By way of example, this may be used for supporting fullscreen screenshot
+    // capture of a device in landscape while the buffer is in portrait
+    // orientation.
+    uint32_t orientation = ui::Transform::ROT_0;
+};
+
+static inline bool operator==(const DisplaySettings& lhs, const DisplaySettings& rhs) {
+    return lhs.physicalDisplay == rhs.physicalDisplay && lhs.clip == rhs.clip &&
+            lhs.maxLuminance == rhs.maxLuminance && lhs.outputDataspace == rhs.outputDataspace &&
+            lhs.colorTransform == rhs.colorTransform &&
+            lhs.clearRegion.hasSameRects(rhs.clearRegion) && lhs.orientation == rhs.orientation;
+}
+
+// Defining PrintTo helps with Google Tests.
+static inline void PrintTo(const DisplaySettings& settings, ::std::ostream* os) {
+    *os << "DisplaySettings {";
+    *os << "\n    .physicalDisplay = ";
+    PrintTo(settings.physicalDisplay, os);
+    *os << "\n    .clip = ";
+    PrintTo(settings.clip, os);
+    *os << "\n    .maxLuminance = " << settings.maxLuminance;
+    *os << "\n    .outputDataspace = ";
+    PrintTo(settings.outputDataspace, os);
+    *os << "\n    .colorTransform = " << settings.colorTransform;
+    *os << "\n    .clearRegion = ";
+    PrintTo(settings.clearRegion, os);
+    *os << "\n    .orientation = " << settings.orientation;
+    *os << "\n}";
+}
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/Framebuffer.h b/media/libstagefright/renderfright/include/renderengine/Framebuffer.h
new file mode 100644
index 0000000..6511127
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/Framebuffer.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdint>
+
+struct ANativeWindowBuffer;
+
+namespace android {
+namespace renderengine {
+
+class Framebuffer {
+public:
+    virtual ~Framebuffer() = default;
+
+    virtual bool setNativeWindowBuffer(ANativeWindowBuffer* nativeBuffer, bool isProtected,
+                                       const bool useFramebufferCache) = 0;
+};
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/Image.h b/media/libstagefright/renderfright/include/renderengine/Image.h
new file mode 100644
index 0000000..3bb4731
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/Image.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+struct ANativeWindowBuffer;
+
+namespace android {
+namespace renderengine {
+
+class Image {
+public:
+    virtual ~Image() = default;
+    virtual bool setNativeWindowBuffer(ANativeWindowBuffer* buffer, bool isProtected) = 0;
+};
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/LayerSettings.h b/media/libstagefright/renderfright/include/renderengine/LayerSettings.h
new file mode 100644
index 0000000..95e9367
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/LayerSettings.h
@@ -0,0 +1,258 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <iosfwd>
+
+#include <math/mat4.h>
+#include <math/vec3.h>
+#include <renderengine/Texture.h>
+#include <ui/Fence.h>
+#include <ui/FloatRect.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicTypes.h>
+#include <ui/Rect.h>
+#include <ui/Region.h>
+#include <ui/Transform.h>
+
+namespace android {
+namespace renderengine {
+
+// Metadata describing the input buffer to render from.
+struct Buffer {
+    // Buffer containing the image that we will render.
+    // If buffer == nullptr, then the rest of the fields in this struct will be
+    // ignored.
+    sp<GraphicBuffer> buffer = nullptr;
+
+    // Fence that will fire when the buffer is ready to be bound.
+    sp<Fence> fence = nullptr;
+
+    // Texture identifier to bind the external texture to.
+    // TODO(alecmouri): This is GL-specific...make the type backend-agnostic.
+    uint32_t textureName = 0;
+
+    // Whether to use filtering when rendering the texture.
+    bool useTextureFiltering = false;
+
+    // Transform matrix to apply to texture coordinates.
+    mat4 textureTransform = mat4();
+
+    // Whether to use pre-multiplied alpha.
+    bool usePremultipliedAlpha = true;
+
+    // Override flag that alpha for each pixel in the buffer *must* be 1.0.
+    // LayerSettings::alpha is still used if isOpaque==true - this flag only
+    // overrides the alpha channel of the buffer.
+    bool isOpaque = false;
+
+    // HDR color-space setting for Y410.
+    bool isY410BT2020 = false;
+    float maxMasteringLuminance = 0.0;
+    float maxContentLuminance = 0.0;
+};
+
+// Metadata describing the layer geometry.
+struct Geometry {
+    // Boundaries of the layer.
+    FloatRect boundaries = FloatRect();
+
+    // Transform matrix to apply to mesh coordinates.
+    mat4 positionTransform = mat4();
+
+    // Radius of rounded corners, if greater than 0. Otherwise, this layer's
+    // corners are not rounded.
+    // Having corner radius will force GPU composition on the layer and its children, drawing it
+    // with a special shader. The shader will receive the radius and the crop rectangle as input,
+    // modifying the opacity of the destination texture, multiplying it by a number between 0 and 1.
+    // We query Layer#getRoundedCornerState() to retrieve the radius as well as the rounded crop
+    // rectangle to figure out how to apply the radius for this layer. The crop rectangle will be
+    // in local layer coordinate space, so we have to take the layer transform into account when
+    // walking up the tree.
+    float roundedCornersRadius = 0.0;
+
+    // Rectangle within which corners will be rounded.
+    FloatRect roundedCornersCrop = FloatRect();
+};
+
+// Descriptor of the source pixels for this layer.
+struct PixelSource {
+    // Source buffer
+    Buffer buffer = Buffer();
+
+    // The solid color with which to fill the layer.
+    // This should only be populated if we don't render from an application
+    // buffer.
+    half3 solidColor = half3(0.0f, 0.0f, 0.0f);
+};
+
+/*
+ * Contains the configuration for the shadows drawn by single layer. Shadow follows
+ * material design guidelines.
+ */
+struct ShadowSettings {
+    // Color to the ambient shadow. The alpha is premultiplied.
+    vec4 ambientColor = vec4();
+
+    // Color to the spot shadow. The alpha is premultiplied. The position of the spot shadow
+    // depends on the light position.
+    vec4 spotColor = vec4();
+
+    // Position of the light source used to cast the spot shadow.
+    vec3 lightPos = vec3();
+
+    // Radius of the spot light source. Smaller radius will have sharper edges,
+    // larger radius will have softer shadows
+    float lightRadius = 0.f;
+
+    // Length of the cast shadow. If length is <= 0.f no shadows will be drawn.
+    float length = 0.f;
+
+    // If true fill in the casting layer is translucent and the shadow needs to fill the bounds.
+    // Otherwise the shadow will only be drawn around the edges of the casting layer.
+    bool casterIsTranslucent = false;
+};
+
+// The settings that RenderEngine requires for correctly rendering a Layer.
+struct LayerSettings {
+    // Geometry information
+    Geometry geometry = Geometry();
+
+    // Source pixels for this layer.
+    PixelSource source = PixelSource();
+
+    // Alpha option to blend with the source pixels
+    half alpha = half(0.0);
+
+    // Color space describing how the source pixels should be interpreted.
+    ui::Dataspace sourceDataspace = ui::Dataspace::UNKNOWN;
+
+    // Additional layer-specific color transform to be applied before the global
+    // transform.
+    mat4 colorTransform = mat4();
+
+    // True if blending will be forced to be disabled.
+    bool disableBlending = false;
+
+    ShadowSettings shadow;
+
+    int backgroundBlurRadius = 0;
+};
+
+// Keep in sync with custom comparison function in
+// compositionengine/impl/ClientCompositionRequestCache.cpp
+static inline bool operator==(const Buffer& lhs, const Buffer& rhs) {
+    return lhs.buffer == rhs.buffer && lhs.fence == rhs.fence &&
+            lhs.textureName == rhs.textureName &&
+            lhs.useTextureFiltering == rhs.useTextureFiltering &&
+            lhs.textureTransform == rhs.textureTransform &&
+            lhs.usePremultipliedAlpha == rhs.usePremultipliedAlpha &&
+            lhs.isOpaque == rhs.isOpaque && lhs.isY410BT2020 == rhs.isY410BT2020 &&
+            lhs.maxMasteringLuminance == rhs.maxMasteringLuminance &&
+            lhs.maxContentLuminance == rhs.maxContentLuminance;
+}
+
+static inline bool operator==(const Geometry& lhs, const Geometry& rhs) {
+    return lhs.boundaries == rhs.boundaries && lhs.positionTransform == rhs.positionTransform &&
+            lhs.roundedCornersRadius == rhs.roundedCornersRadius &&
+            lhs.roundedCornersCrop == rhs.roundedCornersCrop;
+}
+
+static inline bool operator==(const PixelSource& lhs, const PixelSource& rhs) {
+    return lhs.buffer == rhs.buffer && lhs.solidColor == rhs.solidColor;
+}
+
+static inline bool operator==(const ShadowSettings& lhs, const ShadowSettings& rhs) {
+    return lhs.ambientColor == rhs.ambientColor && lhs.spotColor == rhs.spotColor &&
+            lhs.lightPos == rhs.lightPos && lhs.lightRadius == rhs.lightRadius &&
+            lhs.length == rhs.length && lhs.casterIsTranslucent == rhs.casterIsTranslucent;
+}
+
+static inline bool operator==(const LayerSettings& lhs, const LayerSettings& rhs) {
+    return lhs.geometry == rhs.geometry && lhs.source == rhs.source && lhs.alpha == rhs.alpha &&
+            lhs.sourceDataspace == rhs.sourceDataspace &&
+            lhs.colorTransform == rhs.colorTransform &&
+            lhs.disableBlending == rhs.disableBlending && lhs.shadow == rhs.shadow &&
+            lhs.backgroundBlurRadius == rhs.backgroundBlurRadius;
+}
+
+// Defining PrintTo helps with Google Tests.
+
+static inline void PrintTo(const Buffer& settings, ::std::ostream* os) {
+    *os << "Buffer {";
+    *os << "\n    .buffer = " << settings.buffer.get();
+    *os << "\n    .fence = " << settings.fence.get();
+    *os << "\n    .textureName = " << settings.textureName;
+    *os << "\n    .useTextureFiltering = " << settings.useTextureFiltering;
+    *os << "\n    .textureTransform = " << settings.textureTransform;
+    *os << "\n    .usePremultipliedAlpha = " << settings.usePremultipliedAlpha;
+    *os << "\n    .isOpaque = " << settings.isOpaque;
+    *os << "\n    .isY410BT2020 = " << settings.isY410BT2020;
+    *os << "\n    .maxMasteringLuminance = " << settings.maxMasteringLuminance;
+    *os << "\n    .maxContentLuminance = " << settings.maxContentLuminance;
+    *os << "\n}";
+}
+
+static inline void PrintTo(const Geometry& settings, ::std::ostream* os) {
+    *os << "Geometry {";
+    *os << "\n    .boundaries = ";
+    PrintTo(settings.boundaries, os);
+    *os << "\n    .positionTransform = " << settings.positionTransform;
+    *os << "\n    .roundedCornersRadius = " << settings.roundedCornersRadius;
+    *os << "\n    .roundedCornersCrop = ";
+    PrintTo(settings.roundedCornersCrop, os);
+    *os << "\n}";
+}
+
+static inline void PrintTo(const PixelSource& settings, ::std::ostream* os) {
+    *os << "PixelSource {";
+    *os << "\n    .buffer = ";
+    PrintTo(settings.buffer, os);
+    *os << "\n    .solidColor = " << settings.solidColor;
+    *os << "\n}";
+}
+
+static inline void PrintTo(const ShadowSettings& settings, ::std::ostream* os) {
+    *os << "ShadowSettings {";
+    *os << "\n    .ambientColor = " << settings.ambientColor;
+    *os << "\n    .spotColor = " << settings.spotColor;
+    *os << "\n    .lightPos = " << settings.lightPos;
+    *os << "\n    .lightRadius = " << settings.lightRadius;
+    *os << "\n    .length = " << settings.length;
+    *os << "\n    .casterIsTranslucent = " << settings.casterIsTranslucent;
+    *os << "\n}";
+}
+
+static inline void PrintTo(const LayerSettings& settings, ::std::ostream* os) {
+    *os << "LayerSettings {";
+    *os << "\n    .geometry = ";
+    PrintTo(settings.geometry, os);
+    *os << "\n    .source = ";
+    PrintTo(settings.source, os);
+    *os << "\n    .alpha = " << settings.alpha;
+    *os << "\n    .sourceDataspace = ";
+    PrintTo(settings.sourceDataspace, os);
+    *os << "\n    .colorTransform = " << settings.colorTransform;
+    *os << "\n    .disableBlending = " << settings.disableBlending;
+    *os << "\n    .backgroundBlurRadius = " << settings.backgroundBlurRadius;
+    *os << "\n    .shadow = ";
+    PrintTo(settings.shadow, os);
+    *os << "\n}";
+}
+
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/Mesh.h b/media/libstagefright/renderfright/include/renderengine/Mesh.h
new file mode 100644
index 0000000..167f13f
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/Mesh.h
@@ -0,0 +1,205 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_RENDER_ENGINE_MESH_H
+#define SF_RENDER_ENGINE_MESH_H
+
+#include <vector>
+
+#include <stdint.h>
+
+namespace android {
+namespace renderengine {
+
+class Mesh {
+public:
+    class Builder;
+
+    enum Primitive {
+        TRIANGLES = 0x0004,      // GL_TRIANGLES
+        TRIANGLE_STRIP = 0x0005, // GL_TRIANGLE_STRIP
+        TRIANGLE_FAN = 0x0006    // GL_TRIANGLE_FAN
+    };
+
+    ~Mesh() = default;
+
+    /*
+     * VertexArray handles the stride automatically.
+     */
+    template <typename TYPE>
+    class VertexArray {
+        friend class Mesh;
+        float* mData;
+        size_t mStride;
+        size_t mOffset = 0;
+        VertexArray(float* data, size_t stride) : mData(data), mStride(stride) {}
+
+    public:
+        // Returns a vertex array at an offset so its easier to append attributes from
+        // multiple sources.
+        VertexArray(VertexArray<TYPE>& other, size_t offset)
+              : mData(other.mData), mStride(other.mStride), mOffset(offset) {}
+
+        TYPE& operator[](size_t index) {
+            return *reinterpret_cast<TYPE*>(&mData[(index + mOffset) * mStride]);
+        }
+        TYPE const& operator[](size_t index) const {
+            return *reinterpret_cast<TYPE const*>(&mData[(index + mOffset) * mStride]);
+        }
+    };
+
+    template <typename TYPE>
+    VertexArray<TYPE> getPositionArray() {
+        return VertexArray<TYPE>(getPositions(), mStride);
+    }
+
+    template <typename TYPE>
+    VertexArray<TYPE> getTexCoordArray() {
+        return VertexArray<TYPE>(getTexCoords(), mStride);
+    }
+
+    template <typename TYPE>
+    VertexArray<TYPE> getCropCoordArray() {
+        return VertexArray<TYPE>(getCropCoords(), mStride);
+    }
+
+    template <typename TYPE>
+    VertexArray<TYPE> getShadowColorArray() {
+        return VertexArray<TYPE>(getShadowColor(), mStride);
+    }
+
+    template <typename TYPE>
+    VertexArray<TYPE> getShadowParamsArray() {
+        return VertexArray<TYPE>(getShadowParams(), mStride);
+    }
+
+    uint16_t* getIndicesArray() { return getIndices(); }
+
+    Primitive getPrimitive() const;
+
+    // returns a pointer to the vertices positions
+    float const* getPositions() const;
+
+    // returns a pointer to the vertices texture coordinates
+    float const* getTexCoords() const;
+
+    // returns a pointer to the vertices crop coordinates
+    float const* getCropCoords() const;
+
+    // returns a pointer to colors
+    float const* getShadowColor() const;
+
+    // returns a pointer to the shadow params
+    float const* getShadowParams() const;
+
+    // returns a pointer to indices
+    uint16_t const* getIndices() const;
+
+    // number of vertices in this mesh
+    size_t getVertexCount() const;
+
+    // dimension of vertices
+    size_t getVertexSize() const;
+
+    // dimension of texture coordinates
+    size_t getTexCoordsSize() const;
+
+    size_t getShadowParamsSize() const;
+
+    size_t getShadowColorSize() const;
+
+    size_t getIndexCount() const;
+
+    // return stride in bytes
+    size_t getByteStride() const;
+
+    // return stride in floats
+    size_t getStride() const;
+
+private:
+    Mesh(Primitive primitive, size_t vertexCount, size_t vertexSize, size_t texCoordSize,
+         size_t cropCoordsSize, size_t shadowColorSize, size_t shadowParamsSize, size_t indexCount);
+    Mesh(const Mesh&);
+    Mesh& operator=(const Mesh&);
+    Mesh const& operator=(const Mesh&) const;
+
+    float* getPositions();
+    float* getTexCoords();
+    float* getCropCoords();
+    float* getShadowColor();
+    float* getShadowParams();
+    uint16_t* getIndices();
+
+    std::vector<float> mVertices;
+    size_t mVertexCount;
+    size_t mVertexSize;
+    size_t mTexCoordsSize;
+    size_t mCropCoordsSize;
+    size_t mShadowColorSize;
+    size_t mShadowParamsSize;
+    size_t mStride;
+    Primitive mPrimitive;
+    std::vector<uint16_t> mIndices;
+    size_t mIndexCount;
+};
+
+class Mesh::Builder {
+public:
+    Builder& setPrimitive(Primitive primitive) {
+        mPrimitive = primitive;
+        return *this;
+    };
+    Builder& setVertices(size_t vertexCount, size_t vertexSize) {
+        mVertexCount = vertexCount;
+        mVertexSize = vertexSize;
+        return *this;
+    };
+    Builder& setTexCoords(size_t texCoordsSize) {
+        mTexCoordsSize = texCoordsSize;
+        return *this;
+    };
+    Builder& setCropCoords(size_t cropCoordsSize) {
+        mCropCoordsSize = cropCoordsSize;
+        return *this;
+    };
+    Builder& setShadowAttrs() {
+        mShadowParamsSize = 3;
+        mShadowColorSize = 4;
+        return *this;
+    };
+    Builder& setIndices(size_t indexCount) {
+        mIndexCount = indexCount;
+        return *this;
+    };
+    Mesh build() const {
+        return Mesh{mPrimitive,      mVertexCount,     mVertexSize,       mTexCoordsSize,
+                    mCropCoordsSize, mShadowColorSize, mShadowParamsSize, mIndexCount};
+    }
+
+private:
+    size_t mVertexCount = 0;
+    size_t mVertexSize = 0;
+    size_t mTexCoordsSize = 0;
+    size_t mCropCoordsSize = 0;
+    size_t mShadowColorSize = 0;
+    size_t mShadowParamsSize = 0;
+    size_t mIndexCount = 0;
+    Primitive mPrimitive;
+};
+
+} // namespace renderengine
+} // namespace android
+#endif /* SF_RENDER_ENGINE_MESH_H */
diff --git a/media/libstagefright/renderfright/include/renderengine/RenderEngine.h b/media/libstagefright/renderfright/include/renderengine/RenderEngine.h
new file mode 100644
index 0000000..373d07b
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/RenderEngine.h
@@ -0,0 +1,347 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_RENDERENGINE_H_
+#define SF_RENDERENGINE_H_
+
+#include <stdint.h>
+#include <sys/types.h>
+#include <memory>
+
+#include <android-base/unique_fd.h>
+#include <math/mat4.h>
+#include <renderengine/DisplaySettings.h>
+#include <renderengine/Framebuffer.h>
+#include <renderengine/Image.h>
+#include <renderengine/LayerSettings.h>
+#include <ui/GraphicTypes.h>
+#include <ui/Transform.h>
+
+/**
+ * Allows to set RenderEngine backend to GLES (default) or Vulkan (NOT yet supported).
+ */
+#define PROPERTY_DEBUG_RENDERENGINE_BACKEND "debug.stagefright.renderengine.backend"
+
+struct ANativeWindowBuffer;
+
+namespace android {
+
+class Rect;
+class Region;
+
+namespace renderengine {
+
+class BindNativeBufferAsFramebuffer;
+class Image;
+class Mesh;
+class Texture;
+struct RenderEngineCreationArgs;
+
+namespace threaded {
+class RenderEngineThreaded;
+}
+
+namespace impl {
+class RenderEngine;
+}
+
+enum class Protection {
+    UNPROTECTED = 1,
+    PROTECTED = 2,
+};
+
+class RenderEngine {
+public:
+    enum class ContextPriority {
+        LOW = 1,
+        MEDIUM = 2,
+        HIGH = 3,
+    };
+
+    enum class RenderEngineType {
+        GLES = 1,
+        THREADED = 2,
+    };
+
+    static std::unique_ptr<RenderEngine> create(const RenderEngineCreationArgs& args);
+
+    virtual ~RenderEngine() = 0;
+
+    // ----- BEGIN DEPRECATED INTERFACE -----
+    // This interface, while still in use until a suitable replacement is built,
+    // should be considered deprecated, minus some methods which still may be
+    // used to support legacy behavior.
+    virtual void primeCache() const = 0;
+
+    // dump the extension strings. always call the base class.
+    virtual void dump(std::string& result) = 0;
+
+    virtual bool useNativeFenceSync() const = 0;
+    virtual bool useWaitSync() const = 0;
+    virtual void genTextures(size_t count, uint32_t* names) = 0;
+    virtual void deleteTextures(size_t count, uint32_t const* names) = 0;
+    virtual void bindExternalTextureImage(uint32_t texName, const Image& image) = 0;
+    // Legacy public method used by devices that don't support native fence
+    // synchronization in their GPU driver, as this method provides implicit
+    // synchronization for latching buffers.
+    virtual status_t bindExternalTextureBuffer(uint32_t texName, const sp<GraphicBuffer>& buffer,
+                                               const sp<Fence>& fence) = 0;
+    // Caches Image resources for this buffer, but does not bind the buffer to
+    // a particular texture.
+    // Note that work is deferred to an additional thread, i.e. this call
+    // is made asynchronously, but the caller can expect that cache/unbind calls
+    // are performed in a manner that's conflict serializable, i.e. unbinding
+    // a buffer should never occur before binding the buffer if the caller
+    // called {bind, cache}ExternalTextureBuffer before calling unbind.
+    virtual void cacheExternalTextureBuffer(const sp<GraphicBuffer>& buffer) = 0;
+    // Removes internal resources referenced by the bufferId. This method should be
+    // invoked when the caller will no longer hold a reference to a GraphicBuffer
+    // and needs to clean up its resources.
+    // Note that work is deferred to an additional thread, i.e. this call
+    // is made asynchronously, but the caller can expect that cache/unbind calls
+    // are performed in a manner that's conflict serializable, i.e. unbinding
+    // a buffer should never occur before binding the buffer if the caller
+    // called {bind, cache}ExternalTextureBuffer before calling unbind.
+    virtual void unbindExternalTextureBuffer(uint64_t bufferId) = 0;
+    // When binding a native buffer, it must be done before setViewportAndProjection
+    // Returns NO_ERROR when binds successfully, NO_MEMORY when there's no memory for allocation.
+    virtual status_t bindFrameBuffer(Framebuffer* framebuffer) = 0;
+    virtual void unbindFrameBuffer(Framebuffer* framebuffer) = 0;
+
+    enum class CleanupMode {
+        CLEAN_OUTPUT_RESOURCES,
+        CLEAN_ALL,
+    };
+    // Clean-up method that should be called on the main thread after the
+    // drawFence returned by drawLayers fires. This method will free up
+    // resources used by the most recently drawn frame. If the frame is still
+    // being drawn, then this call is silently ignored.
+    //
+    // If mode is CLEAN_OUTPUT_RESOURCES, then only resources related to the
+    // output framebuffer are cleaned up, including the sibling texture.
+    //
+    // If mode is CLEAN_ALL, then we also cleanup resources related to any input
+    // buffers.
+    //
+    // Returns true if resources were cleaned up, and false if we didn't need to
+    // do any work.
+    virtual bool cleanupPostRender(CleanupMode mode = CleanupMode::CLEAN_OUTPUT_RESOURCES) = 0;
+
+    // queries
+    virtual size_t getMaxTextureSize() const = 0;
+    virtual size_t getMaxViewportDims() const = 0;
+
+    // ----- END DEPRECATED INTERFACE -----
+
+    // ----- BEGIN NEW INTERFACE -----
+
+    virtual bool isProtected() const = 0;
+    virtual bool supportsProtectedContent() const = 0;
+    virtual bool useProtectedContext(bool useProtectedContext) = 0;
+
+    // Renders layers for a particular display via GPU composition. This method
+    // should be called for every display that needs to be rendered via the GPU.
+    // @param display The display-wide settings that should be applied prior to
+    // drawing any layers.
+    //
+    // Assumptions when calling this method:
+    // 1. There is exactly one caller - i.e. multi-threading is not supported.
+    // 2. Additional threads may be calling the {bind,cache}ExternalTexture
+    // methods above. But the main thread is responsible for holding resources
+    // such that Image destruction does not occur while this method is called.
+    //
+    // TODO(b/136806342): This should behavior should ideally be fixed since
+    // the above two assumptions are brittle, as conditional thread safetyness
+    // may be insufficient when maximizing rendering performance in the future.
+    //
+    // @param layers The layers to draw onto the display, in Z-order.
+    // @param buffer The buffer which will be drawn to. This buffer will be
+    // ready once drawFence fires.
+    // @param useFramebufferCache True if the framebuffer cache should be used.
+    // If an implementation does not cache output framebuffers, then this
+    // parameter does nothing.
+    // @param bufferFence Fence signalling that the buffer is ready to be drawn
+    // to.
+    // @param drawFence A pointer to a fence, which will fire when the buffer
+    // has been drawn to and is ready to be examined. The fence will be
+    // initialized by this method. The caller will be responsible for owning the
+    // fence.
+    // @return An error code indicating whether drawing was successful. For
+    // now, this always returns NO_ERROR.
+    virtual status_t drawLayers(const DisplaySettings& display,
+                                const std::vector<const LayerSettings*>& layers,
+                                const sp<GraphicBuffer>& buffer, const bool useFramebufferCache,
+                                base::unique_fd&& bufferFence, base::unique_fd* drawFence) = 0;
+
+protected:
+    // Gets a framebuffer to render to. This framebuffer may or may not be
+    // cached depending on the implementation.
+    //
+    // Note that this method does not transfer ownership, so the caller most not
+    // live longer than RenderEngine.
+    virtual Framebuffer* getFramebufferForDrawing() = 0;
+    friend class BindNativeBufferAsFramebuffer;
+    friend class threaded::RenderEngineThreaded;
+};
+
+struct RenderEngineCreationArgs {
+    int pixelFormat;
+    uint32_t imageCacheSize;
+    bool useColorManagement;
+    bool enableProtectedContext;
+    bool precacheToneMapperShaderOnly;
+    bool supportsBackgroundBlur;
+    RenderEngine::ContextPriority contextPriority;
+    RenderEngine::RenderEngineType renderEngineType;
+    bool realtime;
+
+    struct Builder;
+
+private:
+    // must be created by Builder via constructor with full argument list
+    RenderEngineCreationArgs(int _pixelFormat, uint32_t _imageCacheSize, bool _useColorManagement,
+                             bool _enableProtectedContext, bool _precacheToneMapperShaderOnly,
+                             bool _supportsBackgroundBlur,
+                             RenderEngine::ContextPriority _contextPriority,
+                             RenderEngine::RenderEngineType _renderEngineType,
+                             bool _realtime)
+          : pixelFormat(_pixelFormat),
+            imageCacheSize(_imageCacheSize),
+            useColorManagement(_useColorManagement),
+            enableProtectedContext(_enableProtectedContext),
+            precacheToneMapperShaderOnly(_precacheToneMapperShaderOnly),
+            supportsBackgroundBlur(_supportsBackgroundBlur),
+            contextPriority(_contextPriority),
+            renderEngineType(_renderEngineType),
+            realtime(_realtime) {}
+    RenderEngineCreationArgs() = delete;
+};
+
+struct RenderEngineCreationArgs::Builder {
+    Builder() {}
+
+    Builder& setPixelFormat(int pixelFormat) {
+        this->pixelFormat = pixelFormat;
+        return *this;
+    }
+    Builder& setImageCacheSize(uint32_t imageCacheSize) {
+        this->imageCacheSize = imageCacheSize;
+        return *this;
+    }
+    Builder& setUseColorManagerment(bool useColorManagement) {
+        this->useColorManagement = useColorManagement;
+        return *this;
+    }
+    Builder& setEnableProtectedContext(bool enableProtectedContext) {
+        this->enableProtectedContext = enableProtectedContext;
+        return *this;
+    }
+    Builder& setPrecacheToneMapperShaderOnly(bool precacheToneMapperShaderOnly) {
+        this->precacheToneMapperShaderOnly = precacheToneMapperShaderOnly;
+        return *this;
+    }
+    Builder& setSupportsBackgroundBlur(bool supportsBackgroundBlur) {
+        this->supportsBackgroundBlur = supportsBackgroundBlur;
+        return *this;
+    }
+    Builder& setContextPriority(RenderEngine::ContextPriority contextPriority) {
+        this->contextPriority = contextPriority;
+        return *this;
+    }
+    Builder& setRenderEngineType(RenderEngine::RenderEngineType renderEngineType) {
+        this->renderEngineType = renderEngineType;
+        return *this;
+    }
+    Builder& setRealtime(bool realtime) {
+        this->realtime = realtime;
+        return *this;
+    }
+    RenderEngineCreationArgs build() const {
+        return RenderEngineCreationArgs(pixelFormat, imageCacheSize, useColorManagement,
+                                        enableProtectedContext, precacheToneMapperShaderOnly,
+                                        supportsBackgroundBlur, contextPriority, renderEngineType,
+                                        realtime);
+    }
+
+private:
+    // 1 means RGBA_8888
+    int pixelFormat = 1;
+    uint32_t imageCacheSize = 0;
+    bool useColorManagement = true;
+    bool enableProtectedContext = false;
+    bool precacheToneMapperShaderOnly = false;
+    bool supportsBackgroundBlur = false;
+    RenderEngine::ContextPriority contextPriority = RenderEngine::ContextPriority::MEDIUM;
+    RenderEngine::RenderEngineType renderEngineType = RenderEngine::RenderEngineType::GLES;
+    bool realtime = true;
+};
+
+class BindNativeBufferAsFramebuffer {
+public:
+    BindNativeBufferAsFramebuffer(RenderEngine& engine, ANativeWindowBuffer* buffer,
+                                  const bool useFramebufferCache)
+          : mEngine(engine), mFramebuffer(mEngine.getFramebufferForDrawing()), mStatus(NO_ERROR) {
+        mStatus = mFramebuffer->setNativeWindowBuffer(buffer, mEngine.isProtected(),
+                                                      useFramebufferCache)
+                ? mEngine.bindFrameBuffer(mFramebuffer)
+                : NO_MEMORY;
+    }
+    ~BindNativeBufferAsFramebuffer() {
+        mFramebuffer->setNativeWindowBuffer(nullptr, false, /*arbitrary*/ true);
+        mEngine.unbindFrameBuffer(mFramebuffer);
+    }
+    status_t getStatus() const { return mStatus; }
+
+private:
+    RenderEngine& mEngine;
+    Framebuffer* mFramebuffer;
+    status_t mStatus;
+};
+
+class SyncFeatures {
+public:
+    static SyncFeatures &GetInstance();
+    bool useNativeFenceSync() const;
+    bool useFenceSync() const;
+    bool useWaitSync() const;
+
+private:
+    SyncFeatures();
+    bool mHasNativeFenceSync;
+    bool mHasFenceSync;
+    bool mHasWaitSync;
+};
+
+namespace impl {
+
+// impl::RenderEngine contains common implementation that is graphics back-end agnostic.
+class RenderEngine : public renderengine::RenderEngine {
+public:
+    virtual ~RenderEngine() = 0;
+
+    bool useNativeFenceSync() const override;
+    bool useWaitSync() const override;
+
+protected:
+    RenderEngine(const RenderEngineCreationArgs& args);
+    const RenderEngineCreationArgs mArgs;
+};
+
+} // namespace impl
+} // namespace renderengine
+} // namespace android
+
+#endif /* SF_RENDERENGINE_H_ */
diff --git a/media/libstagefright/renderfright/include/renderengine/Texture.h b/media/libstagefright/renderfright/include/renderengine/Texture.h
new file mode 100644
index 0000000..c69ace0
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/Texture.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_RENDER_ENGINE_TEXTURE_H
+#define SF_RENDER_ENGINE_TEXTURE_H
+
+#include <stdint.h>
+
+#include <math/mat4.h>
+
+namespace android {
+namespace renderengine {
+
+class Texture {
+public:
+    enum Target { TEXTURE_2D = 0x0DE1, TEXTURE_EXTERNAL = 0x8D65 };
+
+    Texture();
+    Texture(Target textureTarget, uint32_t textureName);
+    ~Texture();
+
+    void init(Target textureTarget, uint32_t textureName);
+
+    void setMatrix(float const* matrix);
+    void setFiltering(bool enabled);
+    void setDimensions(size_t width, size_t height);
+
+    uint32_t getTextureName() const;
+    uint32_t getTextureTarget() const;
+
+    const mat4& getMatrix() const;
+    bool getFiltering() const;
+    size_t getWidth() const;
+    size_t getHeight() const;
+
+private:
+    uint32_t mTextureName;
+    uint32_t mTextureTarget;
+    size_t mWidth;
+    size_t mHeight;
+    bool mFiltering;
+    mat4 mTextureMatrix;
+};
+
+} // namespace renderengine
+} // namespace android
+#endif /* SF_RENDER_ENGINE_TEXTURE_H */
diff --git a/media/libstagefright/renderfright/include/renderengine/mock/Framebuffer.h b/media/libstagefright/renderfright/include/renderengine/mock/Framebuffer.h
new file mode 100644
index 0000000..dfb6a4e
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/mock/Framebuffer.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <gmock/gmock.h>
+#include <renderengine/Framebuffer.h>
+
+namespace android {
+namespace renderengine {
+namespace mock {
+
+class Framebuffer : public renderengine::Framebuffer {
+public:
+    Framebuffer();
+    ~Framebuffer() override;
+
+    MOCK_METHOD3(setNativeWindowBuffer, bool(ANativeWindowBuffer*, bool, const bool));
+};
+
+} // namespace mock
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/mock/Image.h b/media/libstagefright/renderfright/include/renderengine/mock/Image.h
new file mode 100644
index 0000000..2b0eed1
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/mock/Image.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <gmock/gmock.h>
+#include <renderengine/Image.h>
+
+namespace android {
+namespace renderengine {
+namespace mock {
+
+class Image : public renderengine::Image {
+public:
+    Image();
+    ~Image() override;
+
+    MOCK_METHOD2(setNativeWindowBuffer, bool(ANativeWindowBuffer* buffer, bool isProtected));
+};
+
+} // namespace mock
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/mock/RenderEngine.h b/media/libstagefright/renderfright/include/renderengine/mock/RenderEngine.h
new file mode 100644
index 0000000..e03dd58
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/mock/RenderEngine.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <gmock/gmock.h>
+#include <renderengine/DisplaySettings.h>
+#include <renderengine/LayerSettings.h>
+#include <renderengine/Mesh.h>
+#include <renderengine/RenderEngine.h>
+#include <renderengine/Texture.h>
+#include <ui/Fence.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/Region.h>
+
+namespace android {
+namespace renderengine {
+namespace mock {
+
+class RenderEngine : public renderengine::RenderEngine {
+public:
+    RenderEngine();
+    ~RenderEngine() override;
+
+    MOCK_METHOD0(getFramebufferForDrawing, Framebuffer*());
+    MOCK_CONST_METHOD0(primeCache, void());
+    MOCK_METHOD1(dump, void(std::string&));
+    MOCK_CONST_METHOD0(useNativeFenceSync, bool());
+    MOCK_CONST_METHOD0(useWaitSync, bool());
+    MOCK_CONST_METHOD0(isCurrent, bool());
+    MOCK_METHOD2(genTextures, void(size_t, uint32_t*));
+    MOCK_METHOD2(deleteTextures, void(size_t, uint32_t const*));
+    MOCK_METHOD2(bindExternalTextureImage, void(uint32_t, const renderengine::Image&));
+    MOCK_METHOD1(cacheExternalTextureBuffer, void(const sp<GraphicBuffer>&));
+    MOCK_METHOD3(bindExternalTextureBuffer,
+                 status_t(uint32_t, const sp<GraphicBuffer>&, const sp<Fence>&));
+    MOCK_METHOD1(unbindExternalTextureBuffer, void(uint64_t));
+    MOCK_METHOD1(bindFrameBuffer, status_t(renderengine::Framebuffer*));
+    MOCK_METHOD1(unbindFrameBuffer, void(renderengine::Framebuffer*));
+    MOCK_METHOD1(drawMesh, void(const renderengine::Mesh&));
+    MOCK_CONST_METHOD0(getMaxTextureSize, size_t());
+    MOCK_CONST_METHOD0(getMaxViewportDims, size_t());
+    MOCK_CONST_METHOD0(isProtected, bool());
+    MOCK_CONST_METHOD0(supportsProtectedContent, bool());
+    MOCK_METHOD1(useProtectedContext, bool(bool));
+    MOCK_METHOD1(cleanupPostRender, bool(CleanupMode mode));
+    MOCK_METHOD6(drawLayers,
+                 status_t(const DisplaySettings&, const std::vector<const LayerSettings*>&,
+                          const sp<GraphicBuffer>&, const bool, base::unique_fd&&,
+                          base::unique_fd*));
+};
+
+} // namespace mock
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/include/renderengine/private/Description.h b/media/libstagefright/renderfright/include/renderengine/private/Description.h
new file mode 100644
index 0000000..a62161a
--- /dev/null
+++ b/media/libstagefright/renderfright/include/renderengine/private/Description.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SF_RENDER_ENGINE_DESCRIPTION_H_
+#define SF_RENDER_ENGINE_DESCRIPTION_H_
+
+#include <renderengine/Texture.h>
+#include <ui/GraphicTypes.h>
+
+namespace android {
+namespace renderengine {
+
+/*
+ * This is the structure that holds the state of the rendering engine.
+ * This class is used to generate a corresponding GLSL program and set the
+ * appropriate uniform.
+ */
+struct Description {
+    enum class TransferFunction : int {
+        LINEAR,
+        SRGB,
+        ST2084,
+        HLG, // Hybrid Log-Gamma for HDR.
+    };
+
+    static TransferFunction dataSpaceToTransferFunction(ui::Dataspace dataSpace);
+
+    Description() = default;
+    ~Description() = default;
+
+    bool hasInputTransformMatrix() const;
+    bool hasOutputTransformMatrix() const;
+    bool hasColorMatrix() const;
+
+    // whether textures are premultiplied
+    bool isPremultipliedAlpha = false;
+    // whether this layer is marked as opaque
+    bool isOpaque = true;
+
+    // corner radius of the layer
+    float cornerRadius = 0;
+
+    // Size of the rounded rectangle we are cropping to
+    half2 cropSize;
+
+    // Texture this layer uses
+    Texture texture;
+    bool textureEnabled = false;
+
+    // color used when texturing is disabled or when setting alpha.
+    half4 color;
+
+    // true if the sampled pixel values are in Y410/BT2020 rather than RGBA
+    bool isY410BT2020 = false;
+
+    // transfer functions for the input/output
+    TransferFunction inputTransferFunction = TransferFunction::LINEAR;
+    TransferFunction outputTransferFunction = TransferFunction::LINEAR;
+
+    float displayMaxLuminance;
+    float maxMasteringLuminance;
+    float maxContentLuminance;
+
+    // projection matrix
+    mat4 projectionMatrix;
+
+    // The color matrix will be applied in linear space right before OETF.
+    mat4 colorMatrix;
+    mat4 inputTransformMatrix;
+    mat4 outputTransformMatrix;
+
+    // True if this layer will draw a shadow.
+    bool drawShadows = false;
+};
+
+} // namespace renderengine
+} // namespace android
+
+#endif /* SF_RENDER_ENGINE_DESCRIPTION_H_ */
diff --git a/media/libstagefright/renderfright/mock/Framebuffer.cpp b/media/libstagefright/renderfright/mock/Framebuffer.cpp
new file mode 100644
index 0000000..fbdcaab
--- /dev/null
+++ b/media/libstagefright/renderfright/mock/Framebuffer.cpp
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/mock/Framebuffer.h>
+
+namespace android {
+namespace renderengine {
+namespace mock {
+
+// The Google Mock documentation recommends explicit non-header instantiations
+// for better compile time performance.
+Framebuffer::Framebuffer() = default;
+Framebuffer::~Framebuffer() = default;
+
+} // namespace mock
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/mock/Image.cpp b/media/libstagefright/renderfright/mock/Image.cpp
new file mode 100644
index 0000000..57f4346
--- /dev/null
+++ b/media/libstagefright/renderfright/mock/Image.cpp
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/mock/Image.h>
+
+namespace android {
+namespace renderengine {
+namespace mock {
+
+// The Google Mock documentation recommends explicit non-header instantiations
+// for better compile time performance.
+Image::Image() = default;
+Image::~Image() = default;
+
+} // namespace mock
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/mock/RenderEngine.cpp b/media/libstagefright/renderfright/mock/RenderEngine.cpp
new file mode 100644
index 0000000..261636d
--- /dev/null
+++ b/media/libstagefright/renderfright/mock/RenderEngine.cpp
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <renderengine/mock/RenderEngine.h>
+
+namespace android {
+namespace renderengine {
+namespace mock {
+
+// The Google Mock documentation recommends explicit non-header instantiations
+// for better compile time performance.
+RenderEngine::RenderEngine() = default;
+RenderEngine::~RenderEngine() = default;
+
+} // namespace mock
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/tests/Android.bp b/media/libstagefright/renderfright/tests/Android.bp
new file mode 100644
index 0000000..e4b13fb
--- /dev/null
+++ b/media/libstagefright/renderfright/tests/Android.bp
@@ -0,0 +1,50 @@
+// Copyright 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
+cc_test {
+    name: "librenderfright_test",
+    defaults: ["surfaceflinger_defaults"],
+    test_suites: ["device-tests"],
+    srcs: [
+        "RenderEngineTest.cpp",
+        "RenderEngineThreadedTest.cpp",
+    ],
+    static_libs: [
+        "libgmock",
+        "librenderfright",
+        "librenderfright_mocks",
+    ],
+    shared_libs: [
+        "libbase",
+        "libcutils",
+        "libEGL",
+        "libGLESv2",
+        "libgui",
+        "liblog",
+        "libnativewindow",
+        "libprocessgroup",
+        "libsync",
+        "libui",
+        "libutils",
+    ],
+}
diff --git a/media/libstagefright/renderfright/tests/RenderEngineTest.cpp b/media/libstagefright/renderfright/tests/RenderEngineTest.cpp
new file mode 100644
index 0000000..2697ff4
--- /dev/null
+++ b/media/libstagefright/renderfright/tests/RenderEngineTest.cpp
@@ -0,0 +1,1469 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// TODO(b/129481165): remove the #pragma below and fix conversion issues
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wconversion"
+
+#include <chrono>
+#include <condition_variable>
+#include <fstream>
+
+#include <cutils/properties.h>
+#include <gtest/gtest.h>
+#include <renderengine/RenderEngine.h>
+#include <sync/sync.h>
+#include <ui/PixelFormat.h>
+#include "../gl/GLESRenderEngine.h"
+#include "../threaded/RenderEngineThreaded.h"
+
+constexpr int DEFAULT_DISPLAY_WIDTH = 128;
+constexpr int DEFAULT_DISPLAY_HEIGHT = 256;
+constexpr int DEFAULT_DISPLAY_OFFSET = 64;
+constexpr bool WRITE_BUFFER_TO_FILE_ON_FAILURE = false;
+
+namespace android {
+
+struct RenderEngineTest : public ::testing::Test {
+    static void SetUpTestSuite() {
+        sRE = renderengine::gl::GLESRenderEngine::create(
+                renderengine::RenderEngineCreationArgs::Builder()
+                        .setPixelFormat(static_cast<int>(ui::PixelFormat::RGBA_8888))
+                        .setImageCacheSize(1)
+                        .setUseColorManagerment(false)
+                        .setEnableProtectedContext(false)
+                        .setPrecacheToneMapperShaderOnly(false)
+                        .setSupportsBackgroundBlur(true)
+                        .setContextPriority(renderengine::RenderEngine::ContextPriority::MEDIUM)
+                        .setRenderEngineType(renderengine::RenderEngine::RenderEngineType::GLES)
+                        .build());
+    }
+
+    static void TearDownTestSuite() {
+        // The ordering here is important - sCurrentBuffer must live longer
+        // than RenderEngine to avoid a null reference on tear-down.
+        sRE = nullptr;
+        sCurrentBuffer = nullptr;
+    }
+
+    static sp<GraphicBuffer> allocateDefaultBuffer() {
+        return new GraphicBuffer(DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT,
+                                 HAL_PIXEL_FORMAT_RGBA_8888, 1,
+                                 GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
+                                         GRALLOC_USAGE_HW_RENDER,
+                                 "output");
+    }
+
+    // Allocates a 1x1 buffer to fill with a solid color
+    static sp<GraphicBuffer> allocateSourceBuffer(uint32_t width, uint32_t height) {
+        return new GraphicBuffer(width, height, HAL_PIXEL_FORMAT_RGBA_8888, 1,
+                                 GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
+                                         GRALLOC_USAGE_HW_TEXTURE,
+                                 "input");
+    }
+
+    RenderEngineTest() { mBuffer = allocateDefaultBuffer(); }
+
+    ~RenderEngineTest() {
+        if (WRITE_BUFFER_TO_FILE_ON_FAILURE && ::testing::Test::HasFailure()) {
+            writeBufferToFile("/data/texture_out_");
+        }
+        for (uint32_t texName : mTexNames) {
+            sRE->deleteTextures(1, &texName);
+            EXPECT_FALSE(sRE->isTextureNameKnownForTesting(texName));
+        }
+    }
+
+    void writeBufferToFile(const char* basename) {
+        std::string filename(basename);
+        filename.append(::testing::UnitTest::GetInstance()->current_test_info()->name());
+        filename.append(".ppm");
+        std::ofstream file(filename.c_str(), std::ios::binary);
+        if (!file.is_open()) {
+            ALOGE("Unable to open file: %s", filename.c_str());
+            ALOGE("You may need to do: \"adb shell setenforce 0\" to enable "
+                  "surfaceflinger to write debug images");
+            return;
+        }
+
+        uint8_t* pixels;
+        mBuffer->lock(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+                      reinterpret_cast<void**>(&pixels));
+
+        file << "P6\n";
+        file << mBuffer->getWidth() << "\n";
+        file << mBuffer->getHeight() << "\n";
+        file << 255 << "\n";
+
+        std::vector<uint8_t> outBuffer(mBuffer->getWidth() * mBuffer->getHeight() * 3);
+        auto outPtr = reinterpret_cast<uint8_t*>(outBuffer.data());
+
+        for (uint32_t j = 0; j < mBuffer->getHeight(); j++) {
+            const uint8_t* src = pixels + (mBuffer->getStride() * j) * 4;
+            for (uint32_t i = 0; i < mBuffer->getWidth(); i++) {
+                // Only copy R, G and B components
+                outPtr[0] = src[0];
+                outPtr[1] = src[1];
+                outPtr[2] = src[2];
+                outPtr += 3;
+
+                src += 4;
+            }
+        }
+        file.write(reinterpret_cast<char*>(outBuffer.data()), outBuffer.size());
+        mBuffer->unlock();
+    }
+
+    void expectBufferColor(const Region& region, uint8_t r, uint8_t g, uint8_t b, uint8_t a) {
+        size_t c;
+        Rect const* rect = region.getArray(&c);
+        for (size_t i = 0; i < c; i++, rect++) {
+            expectBufferColor(*rect, r, g, b, a);
+        }
+    }
+
+    void expectBufferColor(const Rect& rect, uint8_t r, uint8_t g, uint8_t b, uint8_t a,
+                           uint8_t tolerance = 0) {
+        auto colorCompare = [tolerance](const uint8_t* colorA, const uint8_t* colorB) {
+            auto colorBitCompare = [tolerance](uint8_t a, uint8_t b) {
+                uint8_t tmp = a >= b ? a - b : b - a;
+                return tmp <= tolerance;
+            };
+            return std::equal(colorA, colorA + 4, colorB, colorBitCompare);
+        };
+
+        expectBufferColor(rect, r, g, b, a, colorCompare);
+    }
+
+    void expectBufferColor(const Rect& region, uint8_t r, uint8_t g, uint8_t b, uint8_t a,
+                           std::function<bool(const uint8_t* a, const uint8_t* b)> colorCompare) {
+        uint8_t* pixels;
+        mBuffer->lock(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+                      reinterpret_cast<void**>(&pixels));
+        int32_t maxFails = 10;
+        int32_t fails = 0;
+        for (int32_t j = 0; j < region.getHeight(); j++) {
+            const uint8_t* src =
+                    pixels + (mBuffer->getStride() * (region.top + j) + region.left) * 4;
+            for (int32_t i = 0; i < region.getWidth(); i++) {
+                const uint8_t expected[4] = {r, g, b, a};
+                bool equal = colorCompare(src, expected);
+                EXPECT_TRUE(equal)
+                        << "pixel @ (" << region.left + i << ", " << region.top + j << "): "
+                        << "expected (" << static_cast<uint32_t>(r) << ", "
+                        << static_cast<uint32_t>(g) << ", " << static_cast<uint32_t>(b) << ", "
+                        << static_cast<uint32_t>(a) << "), "
+                        << "got (" << static_cast<uint32_t>(src[0]) << ", "
+                        << static_cast<uint32_t>(src[1]) << ", " << static_cast<uint32_t>(src[2])
+                        << ", " << static_cast<uint32_t>(src[3]) << ")";
+                src += 4;
+                if (!equal && ++fails >= maxFails) {
+                    break;
+                }
+            }
+            if (fails >= maxFails) {
+                break;
+            }
+        }
+        mBuffer->unlock();
+    }
+
+    void expectAlpha(const Rect& rect, uint8_t a) {
+        auto colorCompare = [](const uint8_t* colorA, const uint8_t* colorB) {
+            return colorA[3] == colorB[3];
+        };
+        expectBufferColor(rect, 0.0f /* r */, 0.0f /*g */, 0.0f /* b */, a, colorCompare);
+    }
+
+    void expectShadowColor(const renderengine::LayerSettings& castingLayer,
+                           const renderengine::ShadowSettings& shadow, const ubyte4& casterColor,
+                           const ubyte4& backgroundColor) {
+        const Rect casterRect(castingLayer.geometry.boundaries);
+        Region casterRegion = Region(casterRect);
+        const float casterCornerRadius = castingLayer.geometry.roundedCornersRadius;
+        if (casterCornerRadius > 0.0f) {
+            // ignore the corners if a corner radius is set
+            Rect cornerRect(casterCornerRadius, casterCornerRadius);
+            casterRegion.subtractSelf(cornerRect.offsetTo(casterRect.left, casterRect.top));
+            casterRegion.subtractSelf(
+                    cornerRect.offsetTo(casterRect.right - casterCornerRadius, casterRect.top));
+            casterRegion.subtractSelf(
+                    cornerRect.offsetTo(casterRect.left, casterRect.bottom - casterCornerRadius));
+            casterRegion.subtractSelf(cornerRect.offsetTo(casterRect.right - casterCornerRadius,
+                                                          casterRect.bottom - casterCornerRadius));
+        }
+
+        const float shadowInset = shadow.length * -1.0f;
+        const Rect casterWithShadow =
+                Rect(casterRect).inset(shadowInset, shadowInset, shadowInset, shadowInset);
+        const Region shadowRegion = Region(casterWithShadow).subtractSelf(casterRect);
+        const Region backgroundRegion = Region(fullscreenRect()).subtractSelf(casterWithShadow);
+
+        // verify casting layer
+        expectBufferColor(casterRegion, casterColor.r, casterColor.g, casterColor.b, casterColor.a);
+
+        // verify shadows by testing just the alpha since its difficult to validate the shadow color
+        size_t c;
+        Rect const* r = shadowRegion.getArray(&c);
+        for (size_t i = 0; i < c; i++, r++) {
+            expectAlpha(*r, 255);
+        }
+
+        // verify background
+        expectBufferColor(backgroundRegion, backgroundColor.r, backgroundColor.g, backgroundColor.b,
+                          backgroundColor.a);
+    }
+
+    static renderengine::ShadowSettings getShadowSettings(const vec2& casterPos, float shadowLength,
+                                                          bool casterIsTranslucent) {
+        renderengine::ShadowSettings shadow;
+        shadow.ambientColor = {0.0f, 0.0f, 0.0f, 0.039f};
+        shadow.spotColor = {0.0f, 0.0f, 0.0f, 0.19f};
+        shadow.lightPos = vec3(casterPos.x, casterPos.y, 0);
+        shadow.lightRadius = 0.0f;
+        shadow.length = shadowLength;
+        shadow.casterIsTranslucent = casterIsTranslucent;
+        return shadow;
+    }
+
+    static Rect fullscreenRect() { return Rect(DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT); }
+
+    static Rect offsetRect() {
+        return Rect(DEFAULT_DISPLAY_OFFSET, DEFAULT_DISPLAY_OFFSET, DEFAULT_DISPLAY_WIDTH,
+                    DEFAULT_DISPLAY_HEIGHT);
+    }
+
+    static Rect offsetRectAtZero() {
+        return Rect(DEFAULT_DISPLAY_WIDTH - DEFAULT_DISPLAY_OFFSET,
+                    DEFAULT_DISPLAY_HEIGHT - DEFAULT_DISPLAY_OFFSET);
+    }
+
+    void invokeDraw(renderengine::DisplaySettings settings,
+                    std::vector<const renderengine::LayerSettings*> layers,
+                    sp<GraphicBuffer> buffer) {
+        base::unique_fd fence;
+        status_t status =
+                sRE->drawLayers(settings, layers, buffer, true, base::unique_fd(), &fence);
+        sCurrentBuffer = buffer;
+
+        int fd = fence.release();
+        if (fd >= 0) {
+            sync_wait(fd, -1);
+            close(fd);
+        }
+
+        ASSERT_EQ(NO_ERROR, status);
+        if (layers.size() > 0) {
+            ASSERT_TRUE(sRE->isFramebufferImageCachedForTesting(buffer->getId()));
+        }
+    }
+
+    void drawEmptyLayers() {
+        renderengine::DisplaySettings settings;
+        std::vector<const renderengine::LayerSettings*> layers;
+        // Meaningless buffer since we don't do any drawing
+        sp<GraphicBuffer> buffer = new GraphicBuffer();
+        invokeDraw(settings, layers, buffer);
+    }
+
+    template <typename SourceVariant>
+    void fillBuffer(half r, half g, half b, half a);
+
+    template <typename SourceVariant>
+    void fillRedBuffer();
+
+    template <typename SourceVariant>
+    void fillGreenBuffer();
+
+    template <typename SourceVariant>
+    void fillBlueBuffer();
+
+    template <typename SourceVariant>
+    void fillRedTransparentBuffer();
+
+    template <typename SourceVariant>
+    void fillRedOffsetBuffer();
+
+    template <typename SourceVariant>
+    void fillBufferPhysicalOffset();
+
+    template <typename SourceVariant>
+    void fillBufferCheckers(uint32_t rotation);
+
+    template <typename SourceVariant>
+    void fillBufferCheckersRotate0();
+
+    template <typename SourceVariant>
+    void fillBufferCheckersRotate90();
+
+    template <typename SourceVariant>
+    void fillBufferCheckersRotate180();
+
+    template <typename SourceVariant>
+    void fillBufferCheckersRotate270();
+
+    template <typename SourceVariant>
+    void fillBufferWithLayerTransform();
+
+    template <typename SourceVariant>
+    void fillBufferLayerTransform();
+
+    template <typename SourceVariant>
+    void fillBufferWithColorTransform();
+
+    template <typename SourceVariant>
+    void fillBufferColorTransform();
+
+    template <typename SourceVariant>
+    void fillRedBufferWithRoundedCorners();
+
+    template <typename SourceVariant>
+    void fillBufferWithRoundedCorners();
+
+    template <typename SourceVariant>
+    void fillBufferAndBlurBackground();
+
+    template <typename SourceVariant>
+    void overlayCorners();
+
+    void fillRedBufferTextureTransform();
+
+    void fillBufferTextureTransform();
+
+    void fillRedBufferWithPremultiplyAlpha();
+
+    void fillBufferWithPremultiplyAlpha();
+
+    void fillRedBufferWithoutPremultiplyAlpha();
+
+    void fillBufferWithoutPremultiplyAlpha();
+
+    void fillGreenColorBufferThenClearRegion();
+
+    void clearLeftRegion();
+
+    void clearRegion();
+
+    template <typename SourceVariant>
+    void drawShadow(const renderengine::LayerSettings& castingLayer,
+                    const renderengine::ShadowSettings& shadow, const ubyte4& casterColor,
+                    const ubyte4& backgroundColor);
+
+    // Keep around the same renderengine object to save on initialization time.
+    // For now, exercise the GL backend directly so that some caching specifics
+    // can be tested without changing the interface.
+    static std::unique_ptr<renderengine::gl::GLESRenderEngine> sRE;
+    // Hack to avoid NPE in the EGL driver: the GraphicBuffer needs to
+    // be freed *after* RenderEngine is destroyed, so that the EGL image is
+    // destroyed first.
+    static sp<GraphicBuffer> sCurrentBuffer;
+
+    sp<GraphicBuffer> mBuffer;
+
+    std::vector<uint32_t> mTexNames;
+};
+
+std::unique_ptr<renderengine::gl::GLESRenderEngine> RenderEngineTest::sRE = nullptr;
+sp<GraphicBuffer> RenderEngineTest::sCurrentBuffer = nullptr;
+
+struct ColorSourceVariant {
+    static void fillColor(renderengine::LayerSettings& layer, half r, half g, half b,
+                          RenderEngineTest* /*fixture*/) {
+        layer.source.solidColor = half3(r, g, b);
+    }
+};
+
+struct RelaxOpaqueBufferVariant {
+    static void setOpaqueBit(renderengine::LayerSettings& layer) {
+        layer.source.buffer.isOpaque = false;
+    }
+
+    static uint8_t getAlphaChannel() { return 255; }
+};
+
+struct ForceOpaqueBufferVariant {
+    static void setOpaqueBit(renderengine::LayerSettings& layer) {
+        layer.source.buffer.isOpaque = true;
+    }
+
+    static uint8_t getAlphaChannel() {
+        // The isOpaque bit will override the alpha channel, so this should be
+        // arbitrary.
+        return 10;
+    }
+};
+
+template <typename OpaquenessVariant>
+struct BufferSourceVariant {
+    static void fillColor(renderengine::LayerSettings& layer, half r, half g, half b,
+                          RenderEngineTest* fixture) {
+        sp<GraphicBuffer> buf = RenderEngineTest::allocateSourceBuffer(1, 1);
+        uint32_t texName;
+        fixture->sRE->genTextures(1, &texName);
+        fixture->mTexNames.push_back(texName);
+
+        uint8_t* pixels;
+        buf->lock(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+                  reinterpret_cast<void**>(&pixels));
+
+        for (uint32_t j = 0; j < buf->getHeight(); j++) {
+            uint8_t* iter = pixels + (buf->getStride() * j) * 4;
+            for (uint32_t i = 0; i < buf->getWidth(); i++) {
+                iter[0] = uint8_t(r * 255);
+                iter[1] = uint8_t(g * 255);
+                iter[2] = uint8_t(b * 255);
+                iter[3] = OpaquenessVariant::getAlphaChannel();
+                iter += 4;
+            }
+        }
+
+        buf->unlock();
+
+        layer.source.buffer.buffer = buf;
+        layer.source.buffer.textureName = texName;
+        OpaquenessVariant::setOpaqueBit(layer);
+    }
+};
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBuffer(half r, half g, half b, half a) {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = fullscreenRect().toFloatRect();
+    SourceVariant::fillColor(layer, r, g, b, this);
+    layer.alpha = a;
+
+    layers.push_back(&layer);
+
+    invokeDraw(settings, layers, mBuffer);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillRedBuffer() {
+    fillBuffer<SourceVariant>(1.0f, 0.0f, 0.0f, 1.0f);
+    expectBufferColor(fullscreenRect(), 255, 0, 0, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillGreenBuffer() {
+    fillBuffer<SourceVariant>(0.0f, 1.0f, 0.0f, 1.0f);
+    expectBufferColor(fullscreenRect(), 0, 255, 0, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBlueBuffer() {
+    fillBuffer<SourceVariant>(0.0f, 0.0f, 1.0f, 1.0f);
+    expectBufferColor(fullscreenRect(), 0, 0, 255, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillRedTransparentBuffer() {
+    fillBuffer<SourceVariant>(1.0f, 0.0f, 0.0f, .2f);
+    expectBufferColor(fullscreenRect(), 51, 0, 0, 51);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillRedOffsetBuffer() {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = offsetRect();
+    settings.clip = offsetRectAtZero();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = offsetRectAtZero().toFloatRect();
+    SourceVariant::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+    layer.alpha = 1.0f;
+
+    layers.push_back(&layer);
+    invokeDraw(settings, layers, mBuffer);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferPhysicalOffset() {
+    fillRedOffsetBuffer<SourceVariant>();
+
+    expectBufferColor(Rect(DEFAULT_DISPLAY_OFFSET, DEFAULT_DISPLAY_OFFSET, DEFAULT_DISPLAY_WIDTH,
+                           DEFAULT_DISPLAY_HEIGHT),
+                      255, 0, 0, 255);
+    Rect offsetRegionLeft(DEFAULT_DISPLAY_OFFSET, DEFAULT_DISPLAY_HEIGHT);
+    Rect offsetRegionTop(DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_OFFSET);
+
+    expectBufferColor(offsetRegionLeft, 0, 0, 0, 0);
+    expectBufferColor(offsetRegionTop, 0, 0, 0, 0);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferCheckers(uint32_t orientationFlag) {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    // Here logical space is 2x2
+    settings.clip = Rect(2, 2);
+    settings.orientation = orientationFlag;
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layerOne;
+    Rect rectOne(0, 0, 1, 1);
+    layerOne.geometry.boundaries = rectOne.toFloatRect();
+    SourceVariant::fillColor(layerOne, 1.0f, 0.0f, 0.0f, this);
+    layerOne.alpha = 1.0f;
+
+    renderengine::LayerSettings layerTwo;
+    Rect rectTwo(0, 1, 1, 2);
+    layerTwo.geometry.boundaries = rectTwo.toFloatRect();
+    SourceVariant::fillColor(layerTwo, 0.0f, 1.0f, 0.0f, this);
+    layerTwo.alpha = 1.0f;
+
+    renderengine::LayerSettings layerThree;
+    Rect rectThree(1, 0, 2, 1);
+    layerThree.geometry.boundaries = rectThree.toFloatRect();
+    SourceVariant::fillColor(layerThree, 0.0f, 0.0f, 1.0f, this);
+    layerThree.alpha = 1.0f;
+
+    layers.push_back(&layerOne);
+    layers.push_back(&layerTwo);
+    layers.push_back(&layerThree);
+
+    invokeDraw(settings, layers, mBuffer);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferCheckersRotate0() {
+    fillBufferCheckers<SourceVariant>(ui::Transform::ROT_0);
+    expectBufferColor(Rect(0, 0, DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2), 255, 0, 0,
+                      255);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, 0, DEFAULT_DISPLAY_WIDTH,
+                           DEFAULT_DISPLAY_HEIGHT / 2),
+                      0, 0, 255, 255);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2,
+                           DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+                      0, 0, 0, 0);
+    expectBufferColor(Rect(0, DEFAULT_DISPLAY_HEIGHT / 2, DEFAULT_DISPLAY_WIDTH / 2,
+                           DEFAULT_DISPLAY_HEIGHT),
+                      0, 255, 0, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferCheckersRotate90() {
+    fillBufferCheckers<SourceVariant>(ui::Transform::ROT_90);
+    expectBufferColor(Rect(0, 0, DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2), 0, 255, 0,
+                      255);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, 0, DEFAULT_DISPLAY_WIDTH,
+                           DEFAULT_DISPLAY_HEIGHT / 2),
+                      255, 0, 0, 255);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2,
+                           DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+                      0, 0, 255, 255);
+    expectBufferColor(Rect(0, DEFAULT_DISPLAY_HEIGHT / 2, DEFAULT_DISPLAY_WIDTH / 2,
+                           DEFAULT_DISPLAY_HEIGHT),
+                      0, 0, 0, 0);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferCheckersRotate180() {
+    fillBufferCheckers<SourceVariant>(ui::Transform::ROT_180);
+    expectBufferColor(Rect(0, 0, DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2), 0, 0, 0,
+                      0);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, 0, DEFAULT_DISPLAY_WIDTH,
+                           DEFAULT_DISPLAY_HEIGHT / 2),
+                      0, 255, 0, 255);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2,
+                           DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+                      255, 0, 0, 255);
+    expectBufferColor(Rect(0, DEFAULT_DISPLAY_HEIGHT / 2, DEFAULT_DISPLAY_WIDTH / 2,
+                           DEFAULT_DISPLAY_HEIGHT),
+                      0, 0, 255, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferCheckersRotate270() {
+    fillBufferCheckers<SourceVariant>(ui::Transform::ROT_270);
+    expectBufferColor(Rect(0, 0, DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2), 0, 0, 255,
+                      255);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, 0, DEFAULT_DISPLAY_WIDTH,
+                           DEFAULT_DISPLAY_HEIGHT / 2),
+                      0, 0, 0, 0);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2,
+                           DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+                      0, 255, 0, 255);
+    expectBufferColor(Rect(0, DEFAULT_DISPLAY_HEIGHT / 2, DEFAULT_DISPLAY_WIDTH / 2,
+                           DEFAULT_DISPLAY_HEIGHT),
+                      255, 0, 0, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferWithLayerTransform() {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    // Here logical space is 2x2
+    settings.clip = Rect(2, 2);
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = Rect(1, 1).toFloatRect();
+    // Translate one pixel diagonally
+    layer.geometry.positionTransform = mat4(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1);
+    SourceVariant::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+    layer.source.solidColor = half3(1.0f, 0.0f, 0.0f);
+    layer.alpha = 1.0f;
+
+    layers.push_back(&layer);
+
+    invokeDraw(settings, layers, mBuffer);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferLayerTransform() {
+    fillBufferWithLayerTransform<SourceVariant>();
+    expectBufferColor(Rect(0, 0, DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT / 2), 0, 0, 0, 0);
+    expectBufferColor(Rect(0, 0, DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT), 0, 0, 0, 0);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT / 2,
+                           DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+                      255, 0, 0, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferWithColorTransform() {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = Rect(1, 1);
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = Rect(1, 1).toFloatRect();
+    SourceVariant::fillColor(layer, 0.5f, 0.25f, 0.125f, this);
+    layer.alpha = 1.0f;
+
+    // construct a fake color matrix
+    // annihilate green and blue channels
+    settings.colorTransform = mat4::scale(vec4(1, 0, 0, 1));
+    // set red channel to red + green
+    layer.colorTransform = mat4(1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1);
+
+    layer.alpha = 1.0f;
+    layer.geometry.boundaries = Rect(1, 1).toFloatRect();
+
+    layers.push_back(&layer);
+
+    invokeDraw(settings, layers, mBuffer);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferColorTransform() {
+    fillBufferWithColorTransform<SourceVariant>();
+    expectBufferColor(fullscreenRect(), 191, 0, 0, 255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillRedBufferWithRoundedCorners() {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = fullscreenRect().toFloatRect();
+    layer.geometry.roundedCornersRadius = 5.0f;
+    layer.geometry.roundedCornersCrop = fullscreenRect().toFloatRect();
+    SourceVariant::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+    layer.alpha = 1.0f;
+
+    layers.push_back(&layer);
+
+    invokeDraw(settings, layers, mBuffer);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferWithRoundedCorners() {
+    fillRedBufferWithRoundedCorners<SourceVariant>();
+    // Corners should be ignored...
+    expectBufferColor(Rect(0, 0, 1, 1), 0, 0, 0, 0);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH - 1, 0, DEFAULT_DISPLAY_WIDTH, 1), 0, 0, 0, 0);
+    expectBufferColor(Rect(0, DEFAULT_DISPLAY_HEIGHT - 1, 1, DEFAULT_DISPLAY_HEIGHT), 0, 0, 0, 0);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH - 1, DEFAULT_DISPLAY_HEIGHT - 1,
+                           DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+                      0, 0, 0, 0);
+    // ...And the non-rounded portion should be red.
+    // Other pixels may be anti-aliased, so let's not check those.
+    expectBufferColor(Rect(5, 5, DEFAULT_DISPLAY_WIDTH - 5, DEFAULT_DISPLAY_HEIGHT - 5), 255, 0, 0,
+                      255);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::fillBufferAndBlurBackground() {
+        char value[PROPERTY_VALUE_MAX];
+    property_get("ro.surface_flinger.supports_background_blur", value, "0");
+    if (!atoi(value)) {
+        // This device doesn't support blurs, no-op.
+        return;
+    }
+
+    auto blurRadius = 50;
+    auto center = DEFAULT_DISPLAY_WIDTH / 2;
+
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings backgroundLayer;
+    backgroundLayer.geometry.boundaries = fullscreenRect().toFloatRect();
+    SourceVariant::fillColor(backgroundLayer, 0.0f, 1.0f, 0.0f, this);
+    backgroundLayer.alpha = 1.0f;
+    layers.push_back(&backgroundLayer);
+
+    renderengine::LayerSettings leftLayer;
+    leftLayer.geometry.boundaries =
+            Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT).toFloatRect();
+    SourceVariant::fillColor(leftLayer, 1.0f, 0.0f, 0.0f, this);
+    leftLayer.alpha = 1.0f;
+    layers.push_back(&leftLayer);
+
+    renderengine::LayerSettings blurLayer;
+    blurLayer.geometry.boundaries = fullscreenRect().toFloatRect();
+    blurLayer.backgroundBlurRadius = blurRadius;
+    blurLayer.alpha = 0;
+    layers.push_back(&blurLayer);
+
+    invokeDraw(settings, layers, mBuffer);
+
+    expectBufferColor(Rect(center - 1, center - 5, center, center + 5), 150, 150, 0, 255,
+                      50 /* tolerance */);
+    expectBufferColor(Rect(center, center - 5, center + 1, center + 5), 150, 150, 0, 255,
+                      50 /* tolerance */);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::overlayCorners() {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layersFirst;
+
+    renderengine::LayerSettings layerOne;
+    layerOne.geometry.boundaries =
+            FloatRect(0, 0, DEFAULT_DISPLAY_WIDTH / 3.0, DEFAULT_DISPLAY_HEIGHT / 3.0);
+    SourceVariant::fillColor(layerOne, 1.0f, 0.0f, 0.0f, this);
+    layerOne.alpha = 0.2;
+
+    layersFirst.push_back(&layerOne);
+    invokeDraw(settings, layersFirst, mBuffer);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 3, DEFAULT_DISPLAY_HEIGHT / 3), 51, 0, 0, 51);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 3 + 1, DEFAULT_DISPLAY_HEIGHT / 3 + 1,
+                           DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+                      0, 0, 0, 0);
+
+    std::vector<const renderengine::LayerSettings*> layersSecond;
+    renderengine::LayerSettings layerTwo;
+    layerTwo.geometry.boundaries =
+            FloatRect(DEFAULT_DISPLAY_WIDTH / 3.0, DEFAULT_DISPLAY_HEIGHT / 3.0,
+                      DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT);
+    SourceVariant::fillColor(layerTwo, 0.0f, 1.0f, 0.0f, this);
+    layerTwo.alpha = 1.0f;
+
+    layersSecond.push_back(&layerTwo);
+    invokeDraw(settings, layersSecond, mBuffer);
+
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 3, DEFAULT_DISPLAY_HEIGHT / 3), 0, 0, 0, 0);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 3 + 1, DEFAULT_DISPLAY_HEIGHT / 3 + 1,
+                           DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT),
+                      0, 255, 0, 255);
+}
+
+void RenderEngineTest::fillRedBufferTextureTransform() {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = Rect(1, 1);
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layer;
+    // Here will allocate a checker board texture, but transform texture
+    // coordinates so that only the upper left is applied.
+    sp<GraphicBuffer> buf = allocateSourceBuffer(2, 2);
+    uint32_t texName;
+    RenderEngineTest::sRE->genTextures(1, &texName);
+    this->mTexNames.push_back(texName);
+
+    uint8_t* pixels;
+    buf->lock(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+              reinterpret_cast<void**>(&pixels));
+    // Red top left, Green top right, Blue bottom left, Black bottom right
+    pixels[0] = 255;
+    pixels[1] = 0;
+    pixels[2] = 0;
+    pixels[3] = 255;
+    pixels[4] = 0;
+    pixels[5] = 255;
+    pixels[6] = 0;
+    pixels[7] = 255;
+    pixels[8] = 0;
+    pixels[9] = 0;
+    pixels[10] = 255;
+    pixels[11] = 255;
+    buf->unlock();
+
+    layer.source.buffer.buffer = buf;
+    layer.source.buffer.textureName = texName;
+    // Transform coordinates to only be inside the red quadrant.
+    layer.source.buffer.textureTransform = mat4::scale(vec4(0.2, 0.2, 1, 1));
+    layer.alpha = 1.0f;
+    layer.geometry.boundaries = Rect(1, 1).toFloatRect();
+
+    layers.push_back(&layer);
+
+    invokeDraw(settings, layers, mBuffer);
+}
+
+void RenderEngineTest::fillBufferTextureTransform() {
+    fillRedBufferTextureTransform();
+    expectBufferColor(fullscreenRect(), 255, 0, 0, 255);
+}
+
+void RenderEngineTest::fillRedBufferWithPremultiplyAlpha() {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    // Here logical space is 1x1
+    settings.clip = Rect(1, 1);
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layer;
+    sp<GraphicBuffer> buf = allocateSourceBuffer(1, 1);
+    uint32_t texName;
+    RenderEngineTest::sRE->genTextures(1, &texName);
+    this->mTexNames.push_back(texName);
+
+    uint8_t* pixels;
+    buf->lock(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+              reinterpret_cast<void**>(&pixels));
+    pixels[0] = 255;
+    pixels[1] = 0;
+    pixels[2] = 0;
+    pixels[3] = 255;
+    buf->unlock();
+
+    layer.source.buffer.buffer = buf;
+    layer.source.buffer.textureName = texName;
+    layer.source.buffer.usePremultipliedAlpha = true;
+    layer.alpha = 0.5f;
+    layer.geometry.boundaries = Rect(1, 1).toFloatRect();
+
+    layers.push_back(&layer);
+
+    invokeDraw(settings, layers, mBuffer);
+}
+
+void RenderEngineTest::fillBufferWithPremultiplyAlpha() {
+    fillRedBufferWithPremultiplyAlpha();
+    expectBufferColor(fullscreenRect(), 128, 0, 0, 128);
+}
+
+void RenderEngineTest::fillRedBufferWithoutPremultiplyAlpha() {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    // Here logical space is 1x1
+    settings.clip = Rect(1, 1);
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layer;
+    sp<GraphicBuffer> buf = allocateSourceBuffer(1, 1);
+    uint32_t texName;
+    RenderEngineTest::sRE->genTextures(1, &texName);
+    this->mTexNames.push_back(texName);
+
+    uint8_t* pixels;
+    buf->lock(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+              reinterpret_cast<void**>(&pixels));
+    pixels[0] = 255;
+    pixels[1] = 0;
+    pixels[2] = 0;
+    pixels[3] = 255;
+    buf->unlock();
+
+    layer.source.buffer.buffer = buf;
+    layer.source.buffer.textureName = texName;
+    layer.source.buffer.usePremultipliedAlpha = false;
+    layer.alpha = 0.5f;
+    layer.geometry.boundaries = Rect(1, 1).toFloatRect();
+
+    layers.push_back(&layer);
+
+    invokeDraw(settings, layers, mBuffer);
+}
+
+void RenderEngineTest::fillBufferWithoutPremultiplyAlpha() {
+    fillRedBufferWithoutPremultiplyAlpha();
+    expectBufferColor(fullscreenRect(), 128, 0, 0, 64, 1);
+}
+
+void RenderEngineTest::clearLeftRegion() {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    // Here logical space is 4x4
+    settings.clip = Rect(4, 4);
+    settings.clearRegion = Region(Rect(2, 4));
+    std::vector<const renderengine::LayerSettings*> layers;
+    // fake layer, without bounds should not render anything
+    renderengine::LayerSettings layer;
+    layers.push_back(&layer);
+    invokeDraw(settings, layers, mBuffer);
+}
+
+void RenderEngineTest::clearRegion() {
+    // Reuse mBuffer
+    clearLeftRegion();
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, DEFAULT_DISPLAY_HEIGHT), 0, 0, 0, 255);
+    expectBufferColor(Rect(DEFAULT_DISPLAY_WIDTH / 2, 0, DEFAULT_DISPLAY_WIDTH,
+                           DEFAULT_DISPLAY_HEIGHT),
+                      0, 0, 0, 0);
+}
+
+template <typename SourceVariant>
+void RenderEngineTest::drawShadow(const renderengine::LayerSettings& castingLayer,
+                                  const renderengine::ShadowSettings& shadow,
+                                  const ubyte4& casterColor, const ubyte4& backgroundColor) {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    // add background layer
+    renderengine::LayerSettings bgLayer;
+    bgLayer.geometry.boundaries = fullscreenRect().toFloatRect();
+    ColorSourceVariant::fillColor(bgLayer, backgroundColor.r / 255.0f, backgroundColor.g / 255.0f,
+                                  backgroundColor.b / 255.0f, this);
+    bgLayer.alpha = backgroundColor.a / 255.0f;
+    layers.push_back(&bgLayer);
+
+    // add shadow layer
+    renderengine::LayerSettings shadowLayer;
+    shadowLayer.geometry.boundaries = castingLayer.geometry.boundaries;
+    shadowLayer.alpha = castingLayer.alpha;
+    shadowLayer.shadow = shadow;
+    layers.push_back(&shadowLayer);
+
+    // add layer casting the shadow
+    renderengine::LayerSettings layer = castingLayer;
+    SourceVariant::fillColor(layer, casterColor.r / 255.0f, casterColor.g / 255.0f,
+                             casterColor.b / 255.0f, this);
+    layers.push_back(&layer);
+
+    invokeDraw(settings, layers, mBuffer);
+}
+
+TEST_F(RenderEngineTest, drawLayers_noLayersToDraw) {
+    drawEmptyLayers();
+}
+
+TEST_F(RenderEngineTest, drawLayers_nullOutputBuffer) {
+    renderengine::DisplaySettings settings;
+    std::vector<const renderengine::LayerSettings*> layers;
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = fullscreenRect().toFloatRect();
+    BufferSourceVariant<ForceOpaqueBufferVariant>::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+    layers.push_back(&layer);
+    base::unique_fd fence;
+    status_t status = sRE->drawLayers(settings, layers, nullptr, true, base::unique_fd(), &fence);
+
+    ASSERT_EQ(BAD_VALUE, status);
+}
+
+TEST_F(RenderEngineTest, drawLayers_nullOutputFence) {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = fullscreenRect().toFloatRect();
+    BufferSourceVariant<ForceOpaqueBufferVariant>::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+    layer.alpha = 1.0;
+    layers.push_back(&layer);
+
+    status_t status = sRE->drawLayers(settings, layers, mBuffer, true, base::unique_fd(), nullptr);
+    sCurrentBuffer = mBuffer;
+    ASSERT_EQ(NO_ERROR, status);
+    expectBufferColor(fullscreenRect(), 255, 0, 0, 255);
+}
+
+TEST_F(RenderEngineTest, drawLayers_doesNotCacheFramebuffer) {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = fullscreenRect().toFloatRect();
+    BufferSourceVariant<ForceOpaqueBufferVariant>::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+    layer.alpha = 1.0;
+    layers.push_back(&layer);
+
+    status_t status = sRE->drawLayers(settings, layers, mBuffer, false, base::unique_fd(), nullptr);
+    sCurrentBuffer = mBuffer;
+    ASSERT_EQ(NO_ERROR, status);
+    ASSERT_FALSE(sRE->isFramebufferImageCachedForTesting(mBuffer->getId()));
+    expectBufferColor(fullscreenRect(), 255, 0, 0, 255);
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillRedBuffer_colorSource) {
+    fillRedBuffer<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillGreenBuffer_colorSource) {
+    fillGreenBuffer<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBlueBuffer_colorSource) {
+    fillBlueBuffer<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillRedTransparentBuffer_colorSource) {
+    fillRedTransparentBuffer<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferPhysicalOffset_colorSource) {
+    fillBufferPhysicalOffset<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate0_colorSource) {
+    fillBufferCheckersRotate0<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate90_colorSource) {
+    fillBufferCheckersRotate90<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate180_colorSource) {
+    fillBufferCheckersRotate180<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate270_colorSource) {
+    fillBufferCheckersRotate270<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferLayerTransform_colorSource) {
+    fillBufferLayerTransform<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferColorTransform_colorSource) {
+    fillBufferLayerTransform<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferRoundedCorners_colorSource) {
+    fillBufferWithRoundedCorners<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferAndBlurBackground_colorSource) {
+    fillBufferAndBlurBackground<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_overlayCorners_colorSource) {
+    overlayCorners<ColorSourceVariant>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillRedBuffer_opaqueBufferSource) {
+    fillRedBuffer<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillGreenBuffer_opaqueBufferSource) {
+    fillGreenBuffer<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBlueBuffer_opaqueBufferSource) {
+    fillBlueBuffer<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillRedTransparentBuffer_opaqueBufferSource) {
+    fillRedTransparentBuffer<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferPhysicalOffset_opaqueBufferSource) {
+    fillBufferPhysicalOffset<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate0_opaqueBufferSource) {
+    fillBufferCheckersRotate0<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate90_opaqueBufferSource) {
+    fillBufferCheckersRotate90<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate180_opaqueBufferSource) {
+    fillBufferCheckersRotate180<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate270_opaqueBufferSource) {
+    fillBufferCheckersRotate270<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferLayerTransform_opaqueBufferSource) {
+    fillBufferLayerTransform<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferColorTransform_opaqueBufferSource) {
+    fillBufferLayerTransform<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferRoundedCorners_opaqueBufferSource) {
+    fillBufferWithRoundedCorners<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferAndBlurBackground_opaqueBufferSource) {
+    fillBufferAndBlurBackground<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_overlayCorners_opaqueBufferSource) {
+    overlayCorners<BufferSourceVariant<ForceOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillRedBuffer_bufferSource) {
+    fillRedBuffer<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillGreenBuffer_bufferSource) {
+    fillGreenBuffer<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBlueBuffer_bufferSource) {
+    fillBlueBuffer<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillRedTransparentBuffer_bufferSource) {
+    fillRedTransparentBuffer<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferPhysicalOffset_bufferSource) {
+    fillBufferPhysicalOffset<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate0_bufferSource) {
+    fillBufferCheckersRotate0<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate90_bufferSource) {
+    fillBufferCheckersRotate90<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate180_bufferSource) {
+    fillBufferCheckersRotate180<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferCheckersRotate270_bufferSource) {
+    fillBufferCheckersRotate270<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferLayerTransform_bufferSource) {
+    fillBufferLayerTransform<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferColorTransform_bufferSource) {
+    fillBufferLayerTransform<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferRoundedCorners_bufferSource) {
+    fillBufferWithRoundedCorners<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferAndBlurBackground_bufferSource) {
+    fillBufferAndBlurBackground<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_overlayCorners_bufferSource) {
+    overlayCorners<BufferSourceVariant<RelaxOpaqueBufferVariant>>();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBufferTextureTransform) {
+    fillBufferTextureTransform();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBuffer_premultipliesAlpha) {
+    fillBufferWithPremultiplyAlpha();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillBuffer_withoutPremultiplyingAlpha) {
+    fillBufferWithoutPremultiplyAlpha();
+}
+
+TEST_F(RenderEngineTest, drawLayers_clearRegion) {
+    clearRegion();
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillsBufferAndCachesImages) {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = fullscreenRect().toFloatRect();
+    BufferSourceVariant<ForceOpaqueBufferVariant>::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+
+    layers.push_back(&layer);
+    invokeDraw(settings, layers, mBuffer);
+    uint64_t bufferId = layer.source.buffer.buffer->getId();
+    EXPECT_TRUE(sRE->isImageCachedForTesting(bufferId));
+    std::shared_ptr<renderengine::gl::ImageManager::Barrier> barrier =
+            sRE->unbindExternalTextureBufferForTesting(bufferId);
+    std::lock_guard<std::mutex> lock(barrier->mutex);
+    ASSERT_TRUE(barrier->condition.wait_for(barrier->mutex, std::chrono::seconds(5),
+                                            [&]() REQUIRES(barrier->mutex) {
+                                                return barrier->isOpen;
+                                            }));
+    EXPECT_FALSE(sRE->isImageCachedForTesting(bufferId));
+    EXPECT_EQ(NO_ERROR, barrier->result);
+}
+
+TEST_F(RenderEngineTest, bindExternalBuffer_withNullBuffer) {
+    status_t result = sRE->bindExternalTextureBuffer(0, nullptr, nullptr);
+    ASSERT_EQ(BAD_VALUE, result);
+}
+
+TEST_F(RenderEngineTest, bindExternalBuffer_cachesImages) {
+    sp<GraphicBuffer> buf = allocateSourceBuffer(1, 1);
+    uint32_t texName;
+    sRE->genTextures(1, &texName);
+    mTexNames.push_back(texName);
+
+    sRE->bindExternalTextureBuffer(texName, buf, nullptr);
+    uint64_t bufferId = buf->getId();
+    EXPECT_TRUE(sRE->isImageCachedForTesting(bufferId));
+    std::shared_ptr<renderengine::gl::ImageManager::Barrier> barrier =
+            sRE->unbindExternalTextureBufferForTesting(bufferId);
+    std::lock_guard<std::mutex> lock(barrier->mutex);
+    ASSERT_TRUE(barrier->condition.wait_for(barrier->mutex, std::chrono::seconds(5),
+                                            [&]() REQUIRES(barrier->mutex) {
+                                                return barrier->isOpen;
+                                            }));
+    EXPECT_EQ(NO_ERROR, barrier->result);
+    EXPECT_FALSE(sRE->isImageCachedForTesting(bufferId));
+}
+
+TEST_F(RenderEngineTest, cacheExternalBuffer_withNullBuffer) {
+    std::shared_ptr<renderengine::gl::ImageManager::Barrier> barrier =
+            sRE->cacheExternalTextureBufferForTesting(nullptr);
+    std::lock_guard<std::mutex> lock(barrier->mutex);
+    ASSERT_TRUE(barrier->condition.wait_for(barrier->mutex, std::chrono::seconds(5),
+                                            [&]() REQUIRES(barrier->mutex) {
+                                                return barrier->isOpen;
+                                            }));
+    EXPECT_TRUE(barrier->isOpen);
+    EXPECT_EQ(BAD_VALUE, barrier->result);
+}
+
+TEST_F(RenderEngineTest, cacheExternalBuffer_cachesImages) {
+    sp<GraphicBuffer> buf = allocateSourceBuffer(1, 1);
+    uint64_t bufferId = buf->getId();
+    std::shared_ptr<renderengine::gl::ImageManager::Barrier> barrier =
+            sRE->cacheExternalTextureBufferForTesting(buf);
+    {
+        std::lock_guard<std::mutex> lock(barrier->mutex);
+        ASSERT_TRUE(barrier->condition.wait_for(barrier->mutex, std::chrono::seconds(5),
+                                                [&]() REQUIRES(barrier->mutex) {
+                                                    return barrier->isOpen;
+                                                }));
+        EXPECT_EQ(NO_ERROR, barrier->result);
+    }
+    EXPECT_TRUE(sRE->isImageCachedForTesting(bufferId));
+    barrier = sRE->unbindExternalTextureBufferForTesting(bufferId);
+    {
+        std::lock_guard<std::mutex> lock(barrier->mutex);
+        ASSERT_TRUE(barrier->condition.wait_for(barrier->mutex, std::chrono::seconds(5),
+                                                [&]() REQUIRES(barrier->mutex) {
+                                                    return barrier->isOpen;
+                                                }));
+        EXPECT_EQ(NO_ERROR, barrier->result);
+    }
+    EXPECT_FALSE(sRE->isImageCachedForTesting(bufferId));
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillShadow_casterLayerMinSize) {
+    const ubyte4 casterColor(255, 0, 0, 255);
+    const ubyte4 backgroundColor(255, 255, 255, 255);
+    const float shadowLength = 5.0f;
+    Rect casterBounds(1, 1);
+    casterBounds.offsetBy(shadowLength + 1, shadowLength + 1);
+    renderengine::LayerSettings castingLayer;
+    castingLayer.geometry.boundaries = casterBounds.toFloatRect();
+    castingLayer.alpha = 1.0f;
+    renderengine::ShadowSettings settings =
+            getShadowSettings(vec2(casterBounds.left, casterBounds.top), shadowLength,
+                              false /* casterIsTranslucent */);
+
+    drawShadow<ColorSourceVariant>(castingLayer, settings, casterColor, backgroundColor);
+    expectShadowColor(castingLayer, settings, casterColor, backgroundColor);
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillShadow_casterColorLayer) {
+    const ubyte4 casterColor(255, 0, 0, 255);
+    const ubyte4 backgroundColor(255, 255, 255, 255);
+    const float shadowLength = 5.0f;
+    Rect casterBounds(DEFAULT_DISPLAY_WIDTH / 3.0f, DEFAULT_DISPLAY_HEIGHT / 3.0f);
+    casterBounds.offsetBy(shadowLength + 1, shadowLength + 1);
+    renderengine::LayerSettings castingLayer;
+    castingLayer.geometry.boundaries = casterBounds.toFloatRect();
+    castingLayer.alpha = 1.0f;
+    renderengine::ShadowSettings settings =
+            getShadowSettings(vec2(casterBounds.left, casterBounds.top), shadowLength,
+                              false /* casterIsTranslucent */);
+
+    drawShadow<ColorSourceVariant>(castingLayer, settings, casterColor, backgroundColor);
+    expectShadowColor(castingLayer, settings, casterColor, backgroundColor);
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillShadow_casterOpaqueBufferLayer) {
+    const ubyte4 casterColor(255, 0, 0, 255);
+    const ubyte4 backgroundColor(255, 255, 255, 255);
+    const float shadowLength = 5.0f;
+    Rect casterBounds(DEFAULT_DISPLAY_WIDTH / 3.0f, DEFAULT_DISPLAY_HEIGHT / 3.0f);
+    casterBounds.offsetBy(shadowLength + 1, shadowLength + 1);
+    renderengine::LayerSettings castingLayer;
+    castingLayer.geometry.boundaries = casterBounds.toFloatRect();
+    castingLayer.alpha = 1.0f;
+    renderengine::ShadowSettings settings =
+            getShadowSettings(vec2(casterBounds.left, casterBounds.top), shadowLength,
+                              false /* casterIsTranslucent */);
+
+    drawShadow<BufferSourceVariant<ForceOpaqueBufferVariant>>(castingLayer, settings, casterColor,
+                                                              backgroundColor);
+    expectShadowColor(castingLayer, settings, casterColor, backgroundColor);
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillShadow_casterWithRoundedCorner) {
+    const ubyte4 casterColor(255, 0, 0, 255);
+    const ubyte4 backgroundColor(255, 255, 255, 255);
+    const float shadowLength = 5.0f;
+    Rect casterBounds(DEFAULT_DISPLAY_WIDTH / 3.0f, DEFAULT_DISPLAY_HEIGHT / 3.0f);
+    casterBounds.offsetBy(shadowLength + 1, shadowLength + 1);
+    renderengine::LayerSettings castingLayer;
+    castingLayer.geometry.boundaries = casterBounds.toFloatRect();
+    castingLayer.geometry.roundedCornersRadius = 3.0f;
+    castingLayer.geometry.roundedCornersCrop = casterBounds.toFloatRect();
+    castingLayer.alpha = 1.0f;
+    renderengine::ShadowSettings settings =
+            getShadowSettings(vec2(casterBounds.left, casterBounds.top), shadowLength,
+                              false /* casterIsTranslucent */);
+
+    drawShadow<BufferSourceVariant<ForceOpaqueBufferVariant>>(castingLayer, settings, casterColor,
+                                                              backgroundColor);
+    expectShadowColor(castingLayer, settings, casterColor, backgroundColor);
+}
+
+TEST_F(RenderEngineTest, drawLayers_fillShadow_translucentCasterWithAlpha) {
+    const ubyte4 casterColor(255, 0, 0, 255);
+    const ubyte4 backgroundColor(255, 255, 255, 255);
+    const float shadowLength = 5.0f;
+    Rect casterBounds(DEFAULT_DISPLAY_WIDTH / 3.0f, DEFAULT_DISPLAY_HEIGHT / 3.0f);
+    casterBounds.offsetBy(shadowLength + 1, shadowLength + 1);
+    renderengine::LayerSettings castingLayer;
+    castingLayer.geometry.boundaries = casterBounds.toFloatRect();
+    castingLayer.alpha = 0.5f;
+    renderengine::ShadowSettings settings =
+            getShadowSettings(vec2(casterBounds.left, casterBounds.top), shadowLength,
+                              true /* casterIsTranslucent */);
+
+    drawShadow<BufferSourceVariant<RelaxOpaqueBufferVariant>>(castingLayer, settings, casterColor,
+                                                              backgroundColor);
+
+    // verify only the background since the shadow will draw behind the caster
+    const float shadowInset = settings.length * -1.0f;
+    const Rect casterWithShadow =
+            Rect(casterBounds).inset(shadowInset, shadowInset, shadowInset, shadowInset);
+    const Region backgroundRegion = Region(fullscreenRect()).subtractSelf(casterWithShadow);
+    expectBufferColor(backgroundRegion, backgroundColor.r, backgroundColor.g, backgroundColor.b,
+                      backgroundColor.a);
+}
+
+TEST_F(RenderEngineTest, cleanupPostRender_cleansUpOnce) {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = fullscreenRect().toFloatRect();
+    BufferSourceVariant<ForceOpaqueBufferVariant>::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+    layer.alpha = 1.0;
+    layers.push_back(&layer);
+
+    base::unique_fd fenceOne;
+    sRE->drawLayers(settings, layers, mBuffer, true, base::unique_fd(), &fenceOne);
+    base::unique_fd fenceTwo;
+    sRE->drawLayers(settings, layers, mBuffer, true, std::move(fenceOne), &fenceTwo);
+
+    const int fd = fenceTwo.get();
+    if (fd >= 0) {
+        sync_wait(fd, -1);
+    }
+    // Only cleanup the first time.
+    EXPECT_TRUE(sRE->cleanupPostRender(
+            renderengine::RenderEngine::CleanupMode::CLEAN_OUTPUT_RESOURCES));
+    EXPECT_FALSE(sRE->cleanupPostRender(
+            renderengine::RenderEngine::CleanupMode::CLEAN_OUTPUT_RESOURCES));
+}
+
+TEST_F(RenderEngineTest, cleanupPostRender_whenCleaningAll_replacesTextureMemory) {
+    renderengine::DisplaySettings settings;
+    settings.physicalDisplay = fullscreenRect();
+    settings.clip = fullscreenRect();
+
+    std::vector<const renderengine::LayerSettings*> layers;
+    renderengine::LayerSettings layer;
+    layer.geometry.boundaries = fullscreenRect().toFloatRect();
+    BufferSourceVariant<ForceOpaqueBufferVariant>::fillColor(layer, 1.0f, 0.0f, 0.0f, this);
+    layer.alpha = 1.0;
+    layers.push_back(&layer);
+
+    base::unique_fd fence;
+    sRE->drawLayers(settings, layers, mBuffer, true, base::unique_fd(), &fence);
+
+    const int fd = fence.get();
+    if (fd >= 0) {
+        sync_wait(fd, -1);
+    }
+
+    uint64_t bufferId = layer.source.buffer.buffer->getId();
+    uint32_t texName = layer.source.buffer.textureName;
+    EXPECT_TRUE(sRE->isImageCachedForTesting(bufferId));
+    EXPECT_EQ(bufferId, sRE->getBufferIdForTextureNameForTesting(texName));
+
+    EXPECT_TRUE(sRE->cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL));
+
+    // Now check that our view of memory is good.
+    EXPECT_FALSE(sRE->isImageCachedForTesting(bufferId));
+    EXPECT_EQ(std::nullopt, sRE->getBufferIdForTextureNameForTesting(bufferId));
+    EXPECT_TRUE(sRE->isTextureNameKnownForTesting(texName));
+}
+
+} // namespace android
+
+// TODO(b/129481165): remove the #pragma below and fix conversion issues
+#pragma clang diagnostic pop // ignored "-Wconversion"
diff --git a/media/libstagefright/renderfright/tests/RenderEngineThreadedTest.cpp b/media/libstagefright/renderfright/tests/RenderEngineThreadedTest.cpp
new file mode 100644
index 0000000..97c7442
--- /dev/null
+++ b/media/libstagefright/renderfright/tests/RenderEngineThreadedTest.cpp
@@ -0,0 +1,216 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cutils/properties.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <renderengine/mock/RenderEngine.h>
+#include "../threaded/RenderEngineThreaded.h"
+
+namespace android {
+
+using testing::_;
+using testing::Eq;
+using testing::Mock;
+using testing::Return;
+
+struct RenderEngineThreadedTest : public ::testing::Test {
+    ~RenderEngineThreadedTest() {}
+
+    void SetUp() override {
+        mThreadedRE = renderengine::threaded::RenderEngineThreaded::create(
+                [this]() { return std::unique_ptr<renderengine::RenderEngine>(mRenderEngine); });
+    }
+
+    std::unique_ptr<renderengine::threaded::RenderEngineThreaded> mThreadedRE;
+    renderengine::mock::RenderEngine* mRenderEngine = new renderengine::mock::RenderEngine();
+};
+
+TEST_F(RenderEngineThreadedTest, dump) {
+    std::string testString = "XYZ";
+    EXPECT_CALL(*mRenderEngine, dump(_));
+    mThreadedRE->dump(testString);
+}
+
+TEST_F(RenderEngineThreadedTest, primeCache) {
+    EXPECT_CALL(*mRenderEngine, primeCache());
+    mThreadedRE->primeCache();
+}
+
+TEST_F(RenderEngineThreadedTest, genTextures) {
+    uint32_t texName;
+    EXPECT_CALL(*mRenderEngine, genTextures(1, &texName));
+    mThreadedRE->genTextures(1, &texName);
+}
+
+TEST_F(RenderEngineThreadedTest, deleteTextures) {
+    uint32_t texName;
+    EXPECT_CALL(*mRenderEngine, deleteTextures(1, &texName));
+    mThreadedRE->deleteTextures(1, &texName);
+}
+
+TEST_F(RenderEngineThreadedTest, bindExternalBuffer_nullptrBuffer) {
+    EXPECT_CALL(*mRenderEngine, bindExternalTextureBuffer(0, Eq(nullptr), Eq(nullptr)))
+            .WillOnce(Return(BAD_VALUE));
+    status_t result = mThreadedRE->bindExternalTextureBuffer(0, nullptr, nullptr);
+    ASSERT_EQ(BAD_VALUE, result);
+}
+
+TEST_F(RenderEngineThreadedTest, bindExternalBuffer_withBuffer) {
+    sp<GraphicBuffer> buf = new GraphicBuffer();
+    EXPECT_CALL(*mRenderEngine, bindExternalTextureBuffer(0, buf, Eq(nullptr)))
+            .WillOnce(Return(NO_ERROR));
+    status_t result = mThreadedRE->bindExternalTextureBuffer(0, buf, nullptr);
+    ASSERT_EQ(NO_ERROR, result);
+}
+
+TEST_F(RenderEngineThreadedTest, cacheExternalTextureBuffer_nullptr) {
+    EXPECT_CALL(*mRenderEngine, cacheExternalTextureBuffer(Eq(nullptr)));
+    mThreadedRE->cacheExternalTextureBuffer(nullptr);
+}
+
+TEST_F(RenderEngineThreadedTest, cacheExternalTextureBuffer_withBuffer) {
+    sp<GraphicBuffer> buf = new GraphicBuffer();
+    EXPECT_CALL(*mRenderEngine, cacheExternalTextureBuffer(buf));
+    mThreadedRE->cacheExternalTextureBuffer(buf);
+}
+
+TEST_F(RenderEngineThreadedTest, unbindExternalTextureBuffer) {
+    EXPECT_CALL(*mRenderEngine, unbindExternalTextureBuffer(0x0));
+    mThreadedRE->unbindExternalTextureBuffer(0x0);
+}
+
+TEST_F(RenderEngineThreadedTest, bindFrameBuffer_returnsBadValue) {
+    std::unique_ptr<renderengine::Framebuffer> framebuffer;
+    EXPECT_CALL(*mRenderEngine, bindFrameBuffer(framebuffer.get())).WillOnce(Return(BAD_VALUE));
+    status_t result = mThreadedRE->bindFrameBuffer(framebuffer.get());
+    ASSERT_EQ(BAD_VALUE, result);
+}
+
+TEST_F(RenderEngineThreadedTest, bindFrameBuffer_returnsNoError) {
+    std::unique_ptr<renderengine::Framebuffer> framebuffer;
+    EXPECT_CALL(*mRenderEngine, bindFrameBuffer(framebuffer.get())).WillOnce(Return(NO_ERROR));
+    status_t result = mThreadedRE->bindFrameBuffer(framebuffer.get());
+    ASSERT_EQ(NO_ERROR, result);
+}
+
+TEST_F(RenderEngineThreadedTest, unbindFrameBuffer) {
+    std::unique_ptr<renderengine::Framebuffer> framebuffer;
+    EXPECT_CALL(*mRenderEngine, unbindFrameBuffer(framebuffer.get()));
+    mThreadedRE->unbindFrameBuffer(framebuffer.get());
+}
+
+TEST_F(RenderEngineThreadedTest, getMaxTextureSize_returns20) {
+    size_t size = 20;
+    EXPECT_CALL(*mRenderEngine, getMaxTextureSize()).WillOnce(Return(size));
+    size_t result = mThreadedRE->getMaxTextureSize();
+    ASSERT_EQ(size, result);
+}
+
+TEST_F(RenderEngineThreadedTest, getMaxTextureSize_returns0) {
+    size_t size = 0;
+    EXPECT_CALL(*mRenderEngine, getMaxTextureSize()).WillOnce(Return(size));
+    size_t result = mThreadedRE->getMaxTextureSize();
+    ASSERT_EQ(size, result);
+}
+
+TEST_F(RenderEngineThreadedTest, getMaxViewportDims_returns20) {
+    size_t dims = 20;
+    EXPECT_CALL(*mRenderEngine, getMaxViewportDims()).WillOnce(Return(dims));
+    size_t result = mThreadedRE->getMaxViewportDims();
+    ASSERT_EQ(dims, result);
+}
+
+TEST_F(RenderEngineThreadedTest, getMaxViewportDims_returns0) {
+    size_t dims = 0;
+    EXPECT_CALL(*mRenderEngine, getMaxViewportDims()).WillOnce(Return(dims));
+    size_t result = mThreadedRE->getMaxViewportDims();
+    ASSERT_EQ(dims, result);
+}
+
+TEST_F(RenderEngineThreadedTest, isProtected_returnsFalse) {
+    EXPECT_CALL(*mRenderEngine, isProtected()).WillOnce(Return(false));
+    status_t result = mThreadedRE->isProtected();
+    ASSERT_EQ(false, result);
+}
+
+TEST_F(RenderEngineThreadedTest, isProtected_returnsTrue) {
+    EXPECT_CALL(*mRenderEngine, isProtected()).WillOnce(Return(true));
+    size_t result = mThreadedRE->isProtected();
+    ASSERT_EQ(true, result);
+}
+
+TEST_F(RenderEngineThreadedTest, supportsProtectedContent_returnsFalse) {
+    EXPECT_CALL(*mRenderEngine, supportsProtectedContent()).WillOnce(Return(false));
+    status_t result = mThreadedRE->supportsProtectedContent();
+    ASSERT_EQ(false, result);
+}
+
+TEST_F(RenderEngineThreadedTest, supportsProtectedContent_returnsTrue) {
+    EXPECT_CALL(*mRenderEngine, supportsProtectedContent()).WillOnce(Return(true));
+    status_t result = mThreadedRE->supportsProtectedContent();
+    ASSERT_EQ(true, result);
+}
+
+TEST_F(RenderEngineThreadedTest, useProtectedContext_returnsFalse) {
+    EXPECT_CALL(*mRenderEngine, useProtectedContext(false)).WillOnce(Return(false));
+    status_t result = mThreadedRE->useProtectedContext(false);
+    ASSERT_EQ(false, result);
+}
+
+TEST_F(RenderEngineThreadedTest, useProtectedContext_returnsTrue) {
+    EXPECT_CALL(*mRenderEngine, useProtectedContext(false)).WillOnce(Return(true));
+    status_t result = mThreadedRE->useProtectedContext(false);
+    ASSERT_EQ(true, result);
+}
+
+TEST_F(RenderEngineThreadedTest, cleanupPostRender_returnsFalse) {
+    EXPECT_CALL(*mRenderEngine,
+                cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL))
+            .WillOnce(Return(false));
+    status_t result =
+            mThreadedRE->cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL);
+    ASSERT_EQ(false, result);
+}
+
+TEST_F(RenderEngineThreadedTest, cleanupPostRender_returnsTrue) {
+    EXPECT_CALL(*mRenderEngine,
+                cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL))
+            .WillOnce(Return(true));
+    status_t result =
+            mThreadedRE->cleanupPostRender(renderengine::RenderEngine::CleanupMode::CLEAN_ALL);
+    ASSERT_EQ(true, result);
+}
+
+TEST_F(RenderEngineThreadedTest, drawLayers) {
+    renderengine::DisplaySettings settings;
+    std::vector<const renderengine::LayerSettings*> layers;
+    sp<GraphicBuffer> buffer = new GraphicBuffer();
+    base::unique_fd bufferFence;
+    base::unique_fd drawFence;
+
+    EXPECT_CALL(*mRenderEngine, drawLayers)
+            .WillOnce([](const renderengine::DisplaySettings&,
+                         const std::vector<const renderengine::LayerSettings*>&,
+                         const sp<GraphicBuffer>&, const bool, base::unique_fd&&,
+                         base::unique_fd*) -> status_t { return NO_ERROR; });
+
+    status_t result = mThreadedRE->drawLayers(settings, layers, buffer, false,
+                                              std::move(bufferFence), &drawFence);
+    ASSERT_EQ(NO_ERROR, result);
+}
+
+} // namespace android
diff --git a/media/libstagefright/renderfright/threaded/RenderEngineThreaded.cpp b/media/libstagefright/renderfright/threaded/RenderEngineThreaded.cpp
new file mode 100644
index 0000000..222d3ee
--- /dev/null
+++ b/media/libstagefright/renderfright/threaded/RenderEngineThreaded.cpp
@@ -0,0 +1,402 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+
+#include "RenderEngineThreaded.h"
+
+#include <sched.h>
+#include <chrono>
+#include <future>
+
+#include <android-base/stringprintf.h>
+#include <utils/Trace.h>
+
+#include "gl/GLESRenderEngine.h"
+
+using namespace std::chrono_literals;
+
+namespace android {
+namespace renderengine {
+namespace threaded {
+
+std::unique_ptr<RenderEngineThreaded> RenderEngineThreaded::create(CreateInstanceFactory factory) {
+    return std::make_unique<RenderEngineThreaded>(std::move(factory));
+}
+
+RenderEngineThreaded::RenderEngineThreaded(CreateInstanceFactory factory) {
+    ATRACE_CALL();
+
+    std::lock_guard lockThread(mThreadMutex);
+    mThread = std::thread(&RenderEngineThreaded::threadMain, this, factory);
+}
+
+RenderEngineThreaded::~RenderEngineThreaded() {
+    {
+        std::lock_guard lock(mThreadMutex);
+        mRunning = false;
+        mCondition.notify_one();
+    }
+
+    if (mThread.joinable()) {
+        mThread.join();
+    }
+}
+
+// NO_THREAD_SAFETY_ANALYSIS is because std::unique_lock presently lacks thread safety annotations.
+void RenderEngineThreaded::threadMain(CreateInstanceFactory factory) NO_THREAD_SAFETY_ANALYSIS {
+    ATRACE_CALL();
+
+    struct sched_param param = {0};
+    param.sched_priority = 2;
+    if (sched_setscheduler(0, SCHED_FIFO, &param) != 0) {
+        ALOGE("Couldn't set SCHED_FIFO");
+    }
+
+    mRenderEngine = factory();
+
+    std::unique_lock<std::mutex> lock(mThreadMutex);
+    pthread_setname_np(pthread_self(), mThreadName);
+
+    while (mRunning) {
+        if (!mFunctionCalls.empty()) {
+            auto task = mFunctionCalls.front();
+            mFunctionCalls.pop();
+            task(*mRenderEngine);
+        }
+        mCondition.wait(lock, [this]() REQUIRES(mThreadMutex) {
+            return !mRunning || !mFunctionCalls.empty();
+        });
+    }
+}
+
+void RenderEngineThreaded::primeCache() const {
+    std::promise<void> resultPromise;
+    std::future<void> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::primeCache");
+            instance.primeCache();
+            resultPromise.set_value();
+        });
+    }
+    mCondition.notify_one();
+    resultFuture.wait();
+}
+
+void RenderEngineThreaded::dump(std::string& result) {
+    std::promise<std::string> resultPromise;
+    std::future<std::string> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise, &result](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::dump");
+            std::string localResult = result;
+            instance.dump(localResult);
+            resultPromise.set_value(std::move(localResult));
+        });
+    }
+    mCondition.notify_one();
+    // Note: This is an rvalue.
+    result.assign(resultFuture.get());
+}
+
+bool RenderEngineThreaded::useNativeFenceSync() const {
+    std::promise<bool> resultPromise;
+    std::future<bool> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& /*instance*/) {
+            ATRACE_NAME("REThreaded::useNativeFenceSync");
+            bool returnValue = SyncFeatures::GetInstance().useNativeFenceSync();
+            resultPromise.set_value(returnValue);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+bool RenderEngineThreaded::useWaitSync() const {
+    std::promise<bool> resultPromise;
+    std::future<bool> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& /*instance*/) {
+            ATRACE_NAME("REThreaded::useWaitSync");
+            bool returnValue = SyncFeatures::GetInstance().useWaitSync();
+            resultPromise.set_value(returnValue);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+void RenderEngineThreaded::genTextures(size_t count, uint32_t* names) {
+    std::promise<void> resultPromise;
+    std::future<void> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise, count, names](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::genTextures");
+            instance.genTextures(count, names);
+            resultPromise.set_value();
+        });
+    }
+    mCondition.notify_one();
+    resultFuture.wait();
+}
+
+void RenderEngineThreaded::deleteTextures(size_t count, uint32_t const* names) {
+    std::promise<void> resultPromise;
+    std::future<void> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise, count, &names](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::deleteTextures");
+            instance.deleteTextures(count, names);
+            resultPromise.set_value();
+        });
+    }
+    mCondition.notify_one();
+    resultFuture.wait();
+}
+
+void RenderEngineThreaded::bindExternalTextureImage(uint32_t texName, const Image& image) {
+    std::promise<void> resultPromise;
+    std::future<void> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push(
+                [&resultPromise, texName, &image](renderengine::RenderEngine& instance) {
+                    ATRACE_NAME("REThreaded::bindExternalTextureImage");
+                    instance.bindExternalTextureImage(texName, image);
+                    resultPromise.set_value();
+                });
+    }
+    mCondition.notify_one();
+    resultFuture.wait();
+}
+
+status_t RenderEngineThreaded::bindExternalTextureBuffer(uint32_t texName,
+                                                         const sp<GraphicBuffer>& buffer,
+                                                         const sp<Fence>& fence) {
+    std::promise<status_t> resultPromise;
+    std::future<status_t> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push(
+                [&resultPromise, texName, &buffer, &fence](renderengine::RenderEngine& instance) {
+                    ATRACE_NAME("REThreaded::bindExternalTextureBuffer");
+                    status_t status = instance.bindExternalTextureBuffer(texName, buffer, fence);
+                    resultPromise.set_value(status);
+                });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+void RenderEngineThreaded::cacheExternalTextureBuffer(const sp<GraphicBuffer>& buffer) {
+    std::promise<void> resultPromise;
+    std::future<void> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise, &buffer](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::cacheExternalTextureBuffer");
+            instance.cacheExternalTextureBuffer(buffer);
+            resultPromise.set_value();
+        });
+    }
+    mCondition.notify_one();
+    resultFuture.wait();
+}
+
+void RenderEngineThreaded::unbindExternalTextureBuffer(uint64_t bufferId) {
+    std::promise<void> resultPromise;
+    std::future<void> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise, &bufferId](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::unbindExternalTextureBuffer");
+            instance.unbindExternalTextureBuffer(bufferId);
+            resultPromise.set_value();
+        });
+    }
+    mCondition.notify_one();
+    resultFuture.wait();
+}
+
+status_t RenderEngineThreaded::bindFrameBuffer(Framebuffer* framebuffer) {
+    std::promise<status_t> resultPromise;
+    std::future<status_t> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise, &framebuffer](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::bindFrameBuffer");
+            status_t status = instance.bindFrameBuffer(framebuffer);
+            resultPromise.set_value(status);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+void RenderEngineThreaded::unbindFrameBuffer(Framebuffer* framebuffer) {
+    std::promise<void> resultPromise;
+    std::future<void> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise, &framebuffer](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::unbindFrameBuffer");
+            instance.unbindFrameBuffer(framebuffer);
+            resultPromise.set_value();
+        });
+    }
+    mCondition.notify_one();
+    resultFuture.wait();
+}
+
+size_t RenderEngineThreaded::getMaxTextureSize() const {
+    std::promise<size_t> resultPromise;
+    std::future<size_t> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::getMaxTextureSize");
+            size_t size = instance.getMaxTextureSize();
+            resultPromise.set_value(size);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+size_t RenderEngineThreaded::getMaxViewportDims() const {
+    std::promise<size_t> resultPromise;
+    std::future<size_t> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::getMaxViewportDims");
+            size_t size = instance.getMaxViewportDims();
+            resultPromise.set_value(size);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+bool RenderEngineThreaded::isProtected() const {
+    std::promise<bool> resultPromise;
+    std::future<bool> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::isProtected");
+            bool returnValue = instance.isProtected();
+            resultPromise.set_value(returnValue);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+bool RenderEngineThreaded::supportsProtectedContent() const {
+    std::promise<bool> resultPromise;
+    std::future<bool> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::supportsProtectedContent");
+            bool returnValue = instance.supportsProtectedContent();
+            resultPromise.set_value(returnValue);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+bool RenderEngineThreaded::useProtectedContext(bool useProtectedContext) {
+    std::promise<bool> resultPromise;
+    std::future<bool> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push(
+                [&resultPromise, useProtectedContext](renderengine::RenderEngine& instance) {
+                    ATRACE_NAME("REThreaded::useProtectedContext");
+                    bool returnValue = instance.useProtectedContext(useProtectedContext);
+                    resultPromise.set_value(returnValue);
+                });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+Framebuffer* RenderEngineThreaded::getFramebufferForDrawing() {
+    std::promise<Framebuffer*> resultPromise;
+    std::future<Framebuffer*> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::getFramebufferForDrawing");
+            Framebuffer* framebuffer = instance.getFramebufferForDrawing();
+            resultPromise.set_value(framebuffer);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+bool RenderEngineThreaded::cleanupPostRender(CleanupMode mode) {
+    std::promise<bool> resultPromise;
+    std::future<bool> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise, mode](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::cleanupPostRender");
+            bool returnValue = instance.cleanupPostRender(mode);
+            resultPromise.set_value(returnValue);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+status_t RenderEngineThreaded::drawLayers(const DisplaySettings& display,
+                                          const std::vector<const LayerSettings*>& layers,
+                                          const sp<GraphicBuffer>& buffer,
+                                          const bool useFramebufferCache,
+                                          base::unique_fd&& bufferFence,
+                                          base::unique_fd* drawFence) {
+    std::promise<status_t> resultPromise;
+    std::future<status_t> resultFuture = resultPromise.get_future();
+    {
+        std::lock_guard lock(mThreadMutex);
+        mFunctionCalls.push([&resultPromise, &display, &layers, &buffer, useFramebufferCache,
+                             &bufferFence, &drawFence](renderengine::RenderEngine& instance) {
+            ATRACE_NAME("REThreaded::drawLayers");
+            status_t status = instance.drawLayers(display, layers, buffer, useFramebufferCache,
+                                                  std::move(bufferFence), drawFence);
+            resultPromise.set_value(status);
+        });
+    }
+    mCondition.notify_one();
+    return resultFuture.get();
+}
+
+} // namespace threaded
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/renderfright/threaded/RenderEngineThreaded.h b/media/libstagefright/renderfright/threaded/RenderEngineThreaded.h
new file mode 100644
index 0000000..86a49e9
--- /dev/null
+++ b/media/libstagefright/renderfright/threaded/RenderEngineThreaded.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/thread_annotations.h>
+#include <condition_variable>
+#include <mutex>
+#include <queue>
+#include <thread>
+
+#include "renderengine/RenderEngine.h"
+
+namespace android {
+namespace renderengine {
+namespace threaded {
+
+using CreateInstanceFactory = std::function<std::unique_ptr<renderengine::RenderEngine>()>;
+
+/**
+ * This class extends a basic RenderEngine class. It contains a thread. Each time a function of
+ * this class is called, we create a lambda function that is put on a queue. The main thread then
+ * executes the functions in order.
+ */
+class RenderEngineThreaded : public RenderEngine {
+public:
+    static std::unique_ptr<RenderEngineThreaded> create(CreateInstanceFactory factory);
+
+    RenderEngineThreaded(CreateInstanceFactory factory);
+    ~RenderEngineThreaded() override;
+    void primeCache() const override;
+
+    void dump(std::string& result) override;
+
+    bool useNativeFenceSync() const override;
+    bool useWaitSync() const override;
+    void genTextures(size_t count, uint32_t* names) override;
+    void deleteTextures(size_t count, uint32_t const* names) override;
+    void bindExternalTextureImage(uint32_t texName, const Image& image) override;
+    status_t bindExternalTextureBuffer(uint32_t texName, const sp<GraphicBuffer>& buffer,
+                                       const sp<Fence>& fence) override;
+    void cacheExternalTextureBuffer(const sp<GraphicBuffer>& buffer) override;
+    void unbindExternalTextureBuffer(uint64_t bufferId) override;
+    status_t bindFrameBuffer(Framebuffer* framebuffer) override;
+    void unbindFrameBuffer(Framebuffer* framebuffer) override;
+    size_t getMaxTextureSize() const override;
+    size_t getMaxViewportDims() const override;
+
+    bool isProtected() const override;
+    bool supportsProtectedContent() const override;
+    bool useProtectedContext(bool useProtectedContext) override;
+    bool cleanupPostRender(CleanupMode mode) override;
+
+    status_t drawLayers(const DisplaySettings& display,
+                        const std::vector<const LayerSettings*>& layers,
+                        const sp<GraphicBuffer>& buffer, const bool useFramebufferCache,
+                        base::unique_fd&& bufferFence, base::unique_fd* drawFence) override;
+
+protected:
+    Framebuffer* getFramebufferForDrawing() override;
+
+private:
+    void threadMain(CreateInstanceFactory factory);
+
+    /* ------------------------------------------------------------------------
+     * Threading
+     */
+    const char* const mThreadName = "RenderEngineThread";
+    // Protects the creation and destruction of mThread.
+    mutable std::mutex mThreadMutex;
+    std::thread mThread GUARDED_BY(mThreadMutex);
+    bool mRunning GUARDED_BY(mThreadMutex) = true;
+    mutable std::queue<std::function<void(renderengine::RenderEngine& instance)>> mFunctionCalls
+            GUARDED_BY(mThreadMutex);
+    mutable std::condition_variable mCondition;
+
+    /* ------------------------------------------------------------------------
+     * Render Engine
+     */
+    std::unique_ptr<renderengine::RenderEngine> mRenderEngine;
+};
+} // namespace threaded
+} // namespace renderengine
+} // namespace android
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index 4bc67e8..e1cc5ec 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -25,40 +25,205 @@
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/foundation/hexdump.h>
 
+#include <android-base/properties.h>
+
 #include <stdint.h>
 
 namespace android {
 
+const double JITTER_MULTIPLE = 1.5f;
+
 // static
 AAVCAssembler::AAVCAssembler(const sp<AMessage> &notify)
     : mNotifyMsg(notify),
       mAccessUnitRTPTime(0),
       mNextExpectedSeqNoValid(false),
       mNextExpectedSeqNo(0),
-      mAccessUnitDamaged(false) {
+      mAccessUnitDamaged(false),
+      mFirstIFrameProvided(false),
+      mLastIFrameProvidedAtMs(0),
+      mLastRtpTimeJitterDataUs(0),
+      mWidth(0),
+      mHeight(0) {
 }
 
 AAVCAssembler::~AAVCAssembler() {
 }
 
+int32_t AAVCAssembler::addNack(
+        const sp<ARTPSource> &source) {
+    List<sp<ABuffer>> *queue = source->queue();
+    int32_t nackCount = 0;
+
+    List<sp<ABuffer> >::iterator it = queue->begin();
+
+    if (it == queue->end()) {
+        return nackCount /* 0 */;
+    }
+
+    uint16_t queueHeadSeqNum = (*it)->int32Data();
+
+    // move to the packet after which RTCP:NACK was sent.
+    for (; it != queue->end(); ++it) {
+        int32_t seqNum = (*it)->int32Data();
+        if (seqNum >= source->mHighestNackNumber) {
+            break;
+        }
+    }
+
+    int32_t nackStartAt = -1;
+
+    while (it != queue->end()) {
+        int32_t seqBeforeLast = (*it)->int32Data();
+        // increase iterator.
+        if ((++it) == queue->end()) {
+            break;
+        }
+        int32_t seqLast = (*it)->int32Data();
+
+        if ((seqLast - seqBeforeLast) < 0) {
+            ALOGD("addNack: found end of seqNum from(%d) to(%d)", seqBeforeLast, seqLast);
+            source->mHighestNackNumber = 0;
+        }
+
+        // missed packet found
+        if (seqLast > (seqBeforeLast + 1) &&
+                // we didn't send RTCP:NACK for this packet yet.
+                (seqLast - 1) > source->mHighestNackNumber) {
+            source->mHighestNackNumber = seqLast - 1;
+            nackStartAt = seqBeforeLast + 1;
+            break;
+        }
+
+    }
+
+    if (nackStartAt != -1) {
+        nackCount = source->mHighestNackNumber - nackStartAt + 1;
+        ALOGD("addNack: nackCount=%d, nackFrom=%d, nackTo=%d", nackCount,
+                nackStartAt, source->mHighestNackNumber);
+
+        uint16_t mask = (uint16_t)(0xffff) >> (16 - nackCount + 1);
+        source->setSeqNumToNACK(nackStartAt, mask, queueHeadSeqNum);
+    }
+
+    return nackCount;
+}
+
 ARTPAssembler::AssemblyStatus AAVCAssembler::addNALUnit(
         const sp<ARTPSource> &source) {
     List<sp<ABuffer> > *queue = source->queue();
+    const uint32_t firstRTPTime = source->mFirstRtpTime;
 
     if (queue->empty()) {
         return NOT_ENOUGH_DATA;
     }
 
-    if (mNextExpectedSeqNoValid) {
-        List<sp<ABuffer> >::iterator it = queue->begin();
-        while (it != queue->end()) {
-            if ((uint32_t)(*it)->int32Data() >= mNextExpectedSeqNo) {
-                break;
-            }
+    sp<ABuffer> buffer = *queue->begin();
+    buffer->meta()->setObject("source", source);
 
-            it = queue->erase(it);
+    /**
+     * RFC3550 calculates the interarrival jitter time for 'ALL packets'.
+     * But that is not useful as an ingredient of buffering time.
+     * Instead, we calculates the time only for all 'NAL units'.
+     */
+    int64_t rtpTime = findRTPTime(firstRTPTime, buffer);
+    int64_t nowTimeUs = ALooper::GetNowUs();
+    if (rtpTime != mLastRtpTimeJitterDataUs) {
+        source->putBaseJitterData(rtpTime, nowTimeUs);
+        mLastRtpTimeJitterDataUs = rtpTime;
+    }
+    source->putInterArrivalJitterData(rtpTime, nowTimeUs);
+
+    const int64_t startTimeMs = source->mFirstSysTime / 1000;
+    const int64_t nowTimeMs = nowTimeUs / 1000;
+    const int32_t staticJitterTimeMs = source->getStaticJitterTimeMs();
+    const int32_t baseJitterTimeMs = source->getBaseJitterTimeMs();
+    const int32_t dynamicJitterTimeMs = source->getInterArrivalJitterTimeMs();
+    const int64_t clockRate = source->mClockRate;
+
+    int64_t playedTimeMs = nowTimeMs - startTimeMs;
+    int64_t playedTimeRtp = source->mFirstRtpTime + MsToRtp(playedTimeMs, clockRate);
+
+    /**
+     * Based on experiences in real commercial network services,
+     * 300 ms is a maximum heuristic jitter buffer time for video RTP service.
+     */
+
+    /**
+     * The base jitter is an expected additional propagation time.
+     * We can drop packets if the time doesn't meet our standards.
+     * If it gets shorter, we can get faster response but should drop delayed packets.
+     * Expecting range : 50ms ~ 1000ms (But 300 ms would be practical upper bound)
+     */
+    const int32_t baseJbTimeMs = std::min(std::max(staticJitterTimeMs, baseJitterTimeMs), 300);
+    /**
+     * Dynamic jitter is a variance of interarrival time as defined in the 6.4.1 of RFC 3550.
+     * We can regard this as a tolerance of every data putting moments.
+     * Expecting range : 0ms ~ 150ms (Not to over 300 ms practically)
+     */
+    const int32_t dynamicJbTimeMs = std::min(dynamicJitterTimeMs, 150);
+    const int64_t dynamicJbTimeRtp = MsToRtp(dynamicJbTimeMs, clockRate);
+    /* Fundamental jitter time */
+    const int32_t jitterTimeMs = baseJbTimeMs;
+    const int64_t jitterTimeRtp = MsToRtp(jitterTimeMs, clockRate);
+
+    // Till (T), this assembler waits unconditionally to collect current NAL unit
+    int64_t expiredTimeRtp = rtpTime + jitterTimeRtp;       // When does this buffer expire ? (T)
+    int64_t diffTimeRtp = playedTimeRtp - expiredTimeRtp;
+    bool isExpired = (diffTimeRtp >= 0);                    // It's expired if T is passed away
+
+    // From (T), this assembler tries to complete the NAL till (T + try)
+    int32_t tryJbTimeMs = baseJitterTimeMs / 2 + dynamicJbTimeMs;
+    int64_t tryJbTimeRtp = MsToRtp(tryJbTimeMs, clockRate);
+    bool isFirstLineBroken = (diffTimeRtp > tryJbTimeRtp);
+
+    // After (T + try), it gives last chance till (T + try + a) with warning messages.
+    int64_t alpha = dynamicJbTimeRtp * JITTER_MULTIPLE;     // Use Dyn as 'a'
+    bool isSecondLineBroken = (diffTimeRtp > (tryJbTimeRtp + alpha));   // The Maginot line
+
+    if (mShowQueue && mShowQueueCnt < 20) {
+        showCurrentQueue(queue);
+        printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
+        printRTPTime(rtpTime, playedTimeRtp, expiredTimeRtp, isExpired);
+        mShowQueueCnt++;
+    }
+
+    AAVCAssembler::addNack(source);
+
+    if (!isExpired) {
+        ALOGV("buffering in jitter buffer.");
+        return NOT_ENOUGH_DATA;
+    }
+
+    if (isFirstLineBroken) {
+        if (isSecondLineBroken) {
+            int64_t totalDiffTimeMs = RtpToMs(diffTimeRtp + jitterTimeRtp, clockRate);
+            ALOGE("buffer too late... \t RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
+                    "Seq# %d \t ExpSeq# %d \t"
+                    "JitterMs %d + (%d + %d * %.3f)",
+                    (long long)diffTimeRtp, (long long)totalDiffTimeMs,
+                    buffer->int32Data(), mNextExpectedSeqNo,
+                    jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
+            printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
+            printRTPTime(rtpTime, playedTimeRtp, expiredTimeRtp, isExpired);
+
+            mNextExpectedSeqNo = pickProperSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
+        }  else {
+            ALOGW("=== WARNING === buffer arrived after %d + %d = %d ms === WARNING === ",
+                    jitterTimeMs, tryJbTimeMs, jitterTimeMs + tryJbTimeMs);
+        }
+    }
+
+    if (mNextExpectedSeqNoValid) {
+        int32_t size = queue->size();
+        int32_t cntRemove = deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
+
+        if (cntRemove > 0) {
+            source->noticeAbandonBuffer(cntRemove);
+            ALOGW("delete %d of %d buffers", cntRemove, size);
         }
 
         if (queue->empty()) {
@@ -66,7 +231,7 @@
         }
     }
 
-    sp<ABuffer> buffer = *queue->begin();
+    buffer = *queue->begin();
 
     if (!mNextExpectedSeqNoValid) {
         mNextExpectedSeqNoValid = true;
@@ -123,15 +288,72 @@
     }
 }
 
+void AAVCAssembler::checkSpsUpdated(const sp<ABuffer> &buffer) {
+    const uint8_t *data = buffer->data();
+    unsigned nalType = data[0] & 0x1f;
+    if (nalType == 0x7) {
+        int32_t width = 0, height = 0;
+        FindAVCDimensions(buffer, &width, &height);
+        if (width != mWidth || height != mHeight) {
+            mFirstIFrameProvided = false;
+            mWidth = width;
+            mHeight = height;
+            ALOGD("found a new resolution (%u x %u)", mWidth, mHeight);
+        }
+    }
+}
+
+void AAVCAssembler::checkIFrameProvided(const sp<ABuffer> &buffer) {
+    if (buffer->size() == 0) {
+        return;
+    }
+    const uint8_t *data = buffer->data();
+    unsigned nalType = data[0] & 0x1f;
+    if (nalType == 0x5) {
+        mLastIFrameProvidedAtMs = ALooper::GetNowUs() / 1000;
+        if (!mFirstIFrameProvided) {
+            mFirstIFrameProvided = true;
+
+            uint32_t rtpTime;
+            CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+            ALOGD("got First I-frame to be decoded. rtpTime=%d, size=%zu", rtpTime, buffer->size());
+        }
+    }
+}
+
+bool AAVCAssembler::dropFramesUntilIframe(const sp<ABuffer> &buffer) {
+    const uint8_t *data = buffer->data();
+    unsigned nalType = data[0] & 0x1f;
+    if (!mFirstIFrameProvided && nalType < 0x5) {
+        return true;
+    }
+
+    return false;
+}
+
 void AAVCAssembler::addSingleNALUnit(const sp<ABuffer> &buffer) {
     ALOGV("addSingleNALUnit of size %zu", buffer->size());
 #if !LOG_NDEBUG
     hexdump(buffer->data(), buffer->size());
 #endif
 
+    checkSpsUpdated(buffer);
+    checkIFrameProvided(buffer);
+
     uint32_t rtpTime;
     CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
 
+    if (dropFramesUntilIframe(buffer)) {
+        sp<ARTPSource> source = nullptr;
+        buffer->meta()->findObject("source", (sp<android::RefBase>*)&source);
+        if (source != nullptr) {
+            ALOGD("Issued FIR to get the I-frame");
+            source->onIssueFIRByAssembler();
+        }
+        ALOGV("Dropping P-frame till I-frame provided. rtpTime %u", rtpTime);
+        return;
+    }
+
     if (!mNALUnits.empty() && rtpTime != mAccessUnitRTPTime) {
         submitAccessUnit();
     }
@@ -216,6 +438,11 @@
     size_t totalCount = 1;
     bool complete = false;
 
+    uint32_t rtpTimeStartAt;
+    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTimeStartAt));
+    uint32_t startSeqNo = buffer->int32Data();
+    bool pFrame = nalType == 0x1;
+
     if (data[1] & 0x40) {
         // Huh? End bit also set on the first buffer.
 
@@ -224,6 +451,8 @@
         complete = true;
     } else {
         List<sp<ABuffer> >::iterator it = ++queue->begin();
+        int32_t connected = 1;
+        bool snapped = false;
         while (it != queue->end()) {
             ALOGV("sequence length %zu", totalCount);
 
@@ -233,26 +462,32 @@
             size_t size = buffer->size();
 
             if ((uint32_t)buffer->int32Data() != expectedSeqNo) {
-                ALOGV("sequence not complete, expected seqNo %d, got %d",
-                     expectedSeqNo, (uint32_t)buffer->int32Data());
+                ALOGD("sequence not complete, expected seqNo %u, got %u, nalType %u",
+                     expectedSeqNo, (unsigned)buffer->int32Data(), nalType);
+                snapped = true;
 
-                return WRONG_SEQUENCE_NUMBER;
+                if (!pFrame) {
+                    return WRONG_SEQUENCE_NUMBER;
+                }
             }
 
+            if (!snapped) {
+                connected++;
+            }
+
+            uint32_t rtpTime;
+            CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
             if (size < 2
                     || data[0] != indicator
                     || (data[1] & 0x1f) != nalType
-                    || (data[1] & 0x80)) {
+                    || (data[1] & 0x80)
+                    || rtpTime != rtpTimeStartAt) {
                 ALOGV("Ignoring malformed FU buffer.");
 
                 // Delete the whole start of the FU.
 
-                it = queue->begin();
-                for (size_t i = 0; i <= totalCount; ++i) {
-                    it = queue->erase(it);
-                }
-
                 mNextExpectedSeqNo = expectedSeqNo + 1;
+                deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
 
                 return MALFORMED_PACKET;
             }
@@ -260,9 +495,17 @@
             totalSize += size - 2;
             ++totalCount;
 
-            expectedSeqNo = expectedSeqNo + 1;
+            expectedSeqNo = (uint32_t)buffer->int32Data() + 1;
 
             if (data[1] & 0x40) {
+                if (pFrame && !recycleUnit(startSeqNo, expectedSeqNo,
+                            connected, totalCount, 0.5f)) {
+                    mNextExpectedSeqNo = expectedSeqNo;
+                    deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
+
+                    return MALFORMED_PACKET;
+                }
+
                 // This is the last fragment.
                 complete = true;
                 break;
@@ -290,6 +533,8 @@
     unit->data()[0] = (nri << 5) | nalType;
 
     size_t offset = 1;
+    int32_t cvo = -1;
+    sp<ARTPSource> source = nullptr;
     List<sp<ABuffer> >::iterator it = queue->begin();
     for (size_t i = 0; i < totalCount; ++i) {
         const sp<ABuffer> &buffer = *it;
@@ -300,6 +545,9 @@
 #endif
 
         memcpy(unit->data() + offset, buffer->data() + 2, buffer->size() - 2);
+
+        buffer->meta()->findObject("source", (sp<android::RefBase>*)&source);
+        buffer->meta()->findInt32("cvo", &cvo);
         offset += buffer->size() - 2;
 
         it = queue->erase(it);
@@ -307,6 +555,13 @@
 
     unit->setRange(0, totalSize);
 
+    if (cvo >= 0) {
+        unit->meta()->setInt32("cvo", cvo);
+    }
+    if (source != nullptr) {
+        unit->meta()->setObject("source", source);
+    }
+
     addSingleNALUnit(unit);
 
     ALOGV("successfully assembled a NAL unit from fragments.");
@@ -317,7 +572,11 @@
 void AAVCAssembler::submitAccessUnit() {
     CHECK(!mNALUnits.empty());
 
-    ALOGV("Access unit complete (%zu nal units)", mNALUnits.size());
+    if(android::base::GetBoolProperty("debug.stagefright.fps", false)) {
+        ALOGD("Access unit complete (%zu nal units)", mNALUnits.size());
+    } else {
+        ALOGV("Access unit complete (%zu nal units)", mNALUnits.size());
+    }
 
     size_t totalSize = 0;
     for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
@@ -327,6 +586,7 @@
 
     sp<ABuffer> accessUnit = new ABuffer(totalSize);
     size_t offset = 0;
+    int32_t cvo = -1;
     for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
          it != mNALUnits.end(); ++it) {
         memcpy(accessUnit->data() + offset, "\x00\x00\x00\x01", 4);
@@ -335,6 +595,8 @@
         sp<ABuffer> nal = *it;
         memcpy(accessUnit->data() + offset, nal->data(), nal->size());
         offset += nal->size();
+
+        nal->meta()->findInt32("cvo", &cvo);
     }
 
     CopyTimes(accessUnit, *mNALUnits.begin());
@@ -343,6 +605,9 @@
     printf(mAccessUnitDamaged ? "X" : ".");
     fflush(stdout);
 #endif
+    if (cvo >= 0) {
+        accessUnit->meta()->setInt32("cvo", cvo);
+    }
 
     if (mAccessUnitDamaged) {
         accessUnit->meta()->setInt32("damaged", true);
@@ -356,22 +621,68 @@
     msg->post();
 }
 
+int32_t AAVCAssembler::pickProperSeq(const Queue *queue,
+        uint32_t first, int64_t play, int64_t jit) {
+    sp<ABuffer> buffer = *(queue->begin());
+    int32_t nextSeqNo = buffer->int32Data();
+
+    Queue::const_iterator it = queue->begin();
+    while (it != queue->end()) {
+        int64_t rtpTime = findRTPTime(first, *it);
+        // if pkt in time exists, that should be the next pivot
+        if (rtpTime + jit >= play) {
+            nextSeqNo = (*it)->int32Data();
+            break;
+        }
+        it++;
+    }
+    return nextSeqNo;
+}
+
+bool AAVCAssembler::recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
+        size_t avail, float goodRatio) {
+    float total = end - start;
+    float valid = connected;
+    float exist = avail;
+    bool isRecycle = (valid / total) >= goodRatio;
+
+    ALOGV("checking p-frame losses.. recvBufs %f valid %f diff %f recycle? %d",
+            exist, valid, total, isRecycle);
+
+    return isRecycle;
+}
+
+int32_t AAVCAssembler::deleteUnitUnderSeq(Queue *queue, uint32_t seq) {
+    int32_t initSize = queue->size();
+    Queue::iterator it = queue->begin();
+    while (it != queue->end()) {
+        if ((uint32_t)(*it)->int32Data() >= seq) {
+            break;
+        }
+        it++;
+    }
+    queue->erase(queue->begin(), it);
+    return initSize - queue->size();
+}
+
 ARTPAssembler::AssemblyStatus AAVCAssembler::assembleMore(
         const sp<ARTPSource> &source) {
     AssemblyStatus status = addNALUnit(source);
     if (status == MALFORMED_PACKET) {
-        mAccessUnitDamaged = true;
+        uint64_t msecsSinceLastIFrame = (ALooper::GetNowUs() / 1000) - mLastIFrameProvidedAtMs;
+        if (msecsSinceLastIFrame > 1000) {
+            ALOGV("request FIR to get a new I-Frame, time since "
+                    "last I-Frame %llu ms", (unsigned long long)msecsSinceLastIFrame);
+            source->onIssueFIRByAssembler();
+        }
     }
     return status;
 }
 
 void AAVCAssembler::packetLost() {
     CHECK(mNextExpectedSeqNoValid);
-    ALOGV("packetLost (expected %d)", mNextExpectedSeqNo);
-
+    ALOGD("packetLost (expected %u)", mNextExpectedSeqNo);
     ++mNextExpectedSeqNo;
-
-    mAccessUnitDamaged = true;
 }
 
 void AAVCAssembler::onByeReceived() {
diff --git a/media/libstagefright/rtsp/AAVCAssembler.h b/media/libstagefright/rtsp/AAVCAssembler.h
index e19480c..8d19773 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.h
+++ b/media/libstagefright/rtsp/AAVCAssembler.h
@@ -31,6 +31,7 @@
 struct AAVCAssembler : public ARTPAssembler {
     explicit AAVCAssembler(const sp<AMessage> &notify);
 
+    typedef List<sp<ABuffer> > Queue;
 protected:
     virtual ~AAVCAssembler();
 
@@ -45,8 +46,17 @@
     bool mNextExpectedSeqNoValid;
     uint32_t mNextExpectedSeqNo;
     bool mAccessUnitDamaged;
+    bool mFirstIFrameProvided;
+    uint64_t mLastIFrameProvidedAtMs;
+    int64_t mLastRtpTimeJitterDataUs;
+    int32_t mWidth;
+    int32_t mHeight;
     List<sp<ABuffer> > mNALUnits;
 
+    int32_t addNack(const sp<ARTPSource> &source);
+    void checkSpsUpdated(const sp<ABuffer> &buffer);
+    void checkIFrameProvided(const sp<ABuffer> &buffer);
+    bool dropFramesUntilIframe(const sp<ABuffer> &buffer);
     AssemblyStatus addNALUnit(const sp<ARTPSource> &source);
     void addSingleNALUnit(const sp<ABuffer> &buffer);
     AssemblyStatus addFragmentedNALUnit(List<sp<ABuffer> > *queue);
@@ -54,6 +64,11 @@
 
     void submitAccessUnit();
 
+    int32_t pickProperSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
+    bool recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
+            size_t avail, float goodRatio);
+    int32_t deleteUnitUnderSeq(Queue *q, uint32_t seq);
+
     DISALLOW_EVIL_CONSTRUCTORS(AAVCAssembler);
 };
 
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
new file mode 100644
index 0000000..d32e85d
--- /dev/null
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -0,0 +1,709 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AHEVCAssembler"
+#include <utils/Log.h>
+
+#include "AHEVCAssembler.h"
+
+#include "ARTPSource.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <include/HevcUtils.h>
+#include <media/stagefright/foundation/hexdump.h>
+
+#include <stdint.h>
+
+#define H265_NALU_MASK 0x3F
+#define H265_NALU_VPS 0x20
+#define H265_NALU_SPS 0x21
+#define H265_NALU_PPS 0x22
+#define H265_NALU_AP 0x30
+#define H265_NALU_FU 0x31
+#define H265_NALU_PACI 0x32
+
+
+namespace android {
+
+const double JITTER_MULTIPLE = 1.5f;
+
+// static
+AHEVCAssembler::AHEVCAssembler(const sp<AMessage> &notify)
+    : mNotifyMsg(notify),
+      mAccessUnitRTPTime(0),
+      mNextExpectedSeqNoValid(false),
+      mNextExpectedSeqNo(0),
+      mAccessUnitDamaged(false),
+      mFirstIFrameProvided(false),
+      mLastIFrameProvidedAtMs(0),
+      mLastRtpTimeJitterDataUs(0),
+      mWidth(0),
+      mHeight(0) {
+
+      ALOGV("Constructor");
+}
+
+AHEVCAssembler::~AHEVCAssembler() {
+}
+
+int32_t AHEVCAssembler::addNack(
+        const sp<ARTPSource> &source) {
+    List<sp<ABuffer>> *queue = source->queue();
+    int32_t nackCount = 0;
+
+    List<sp<ABuffer> >::iterator it = queue->begin();
+
+    if (it == queue->end()) {
+        return nackCount /* 0 */;
+    }
+
+    uint16_t queueHeadSeqNum = (*it)->int32Data();
+
+    // move to the packet after which RTCP:NACK was sent.
+    for (; it != queue->end(); ++it) {
+        int32_t seqNum = (*it)->int32Data();
+        if (seqNum >= source->mHighestNackNumber) {
+            break;
+        }
+    }
+
+    int32_t nackStartAt = -1;
+
+    while (it != queue->end()) {
+        int32_t seqBeforeLast = (*it)->int32Data();
+        // increase iterator.
+        if ((++it) == queue->end()) {
+            break;
+        }
+
+        int32_t seqLast = (*it)->int32Data();
+
+        if ((seqLast - seqBeforeLast) < 0) {
+            ALOGD("addNack: found end of seqNum from(%d) to(%d)", seqBeforeLast, seqLast);
+            source->mHighestNackNumber = 0;
+        }
+
+        // missed packet found
+        if (seqLast > (seqBeforeLast + 1) &&
+            // we didn't send RTCP:NACK for this packet yet.
+            (seqLast - 1) > source->mHighestNackNumber) {
+            source->mHighestNackNumber = seqLast -1;
+            nackStartAt = seqBeforeLast + 1;
+            break;
+        }
+
+    }
+
+    if (nackStartAt != -1) {
+        nackCount = source->mHighestNackNumber - nackStartAt + 1;
+        ALOGD("addNack: nackCount=%d, nackFrom=%d, nackTo=%d", nackCount,
+            nackStartAt, source->mHighestNackNumber);
+
+        uint16_t mask = (uint16_t)(0xffff) >> (16 - nackCount + 1);
+        source->setSeqNumToNACK(nackStartAt, mask, queueHeadSeqNum);
+    }
+
+    return nackCount;
+}
+
+ARTPAssembler::AssemblyStatus AHEVCAssembler::addNALUnit(
+        const sp<ARTPSource> &source) {
+    List<sp<ABuffer> > *queue = source->queue();
+    const uint32_t firstRTPTime = source->mFirstRtpTime;
+
+    if (queue->empty()) {
+        return NOT_ENOUGH_DATA;
+    }
+
+    sp<ABuffer> buffer = *queue->begin();
+    buffer->meta()->setObject("source", source);
+
+    /**
+     * RFC3550 calculates the interarrival jitter time for 'ALL packets'.
+     * But that is not useful as an ingredient of buffering time.
+     * Instead, we calculates the time only for all 'NAL units'.
+     */
+    int64_t rtpTime = findRTPTime(firstRTPTime, buffer);
+    int64_t nowTimeUs = ALooper::GetNowUs();
+    if (rtpTime != mLastRtpTimeJitterDataUs) {
+        source->putBaseJitterData(rtpTime, nowTimeUs);
+        mLastRtpTimeJitterDataUs = rtpTime;
+    }
+    source->putInterArrivalJitterData(rtpTime, nowTimeUs);
+
+    const int64_t startTimeMs = source->mFirstSysTime / 1000;
+    const int64_t nowTimeMs = nowTimeUs / 1000;
+    const int32_t staticJitterTimeMs = source->getStaticJitterTimeMs();
+    const int32_t baseJitterTimeMs = source->getBaseJitterTimeMs();
+    const int32_t dynamicJitterTimeMs = source->getInterArrivalJitterTimeMs();
+    const int64_t clockRate = source->mClockRate;
+
+    int64_t playedTimeMs = nowTimeMs - startTimeMs;
+    int64_t playedTimeRtp = source->mFirstRtpTime + MsToRtp(playedTimeMs, clockRate);
+
+    /**
+     * Based on experiences in real commercial network services,
+     * 300 ms is a maximum heuristic jitter buffer time for video RTP service.
+     */
+
+    /**
+     * The base jitter is an expected additional propagation time.
+     * We can drop packets if the time doesn't meet our standards.
+     * If it gets shorter, we can get faster response but should drop delayed packets.
+     * Expecting range : 50ms ~ 1000ms (But 300 ms would be practical upper bound)
+     */
+    const int32_t baseJbTimeMs = std::min(std::max(staticJitterTimeMs, baseJitterTimeMs), 300);
+    /**
+     * Dynamic jitter is a variance of interarrival time as defined in the 6.4.1 of RFC 3550.
+     * We can regard this as a tolerance of every data putting moments.
+     * Expecting range : 0ms ~ 150ms (Not to over 300 ms practically)
+     */
+    const int32_t dynamicJbTimeMs = std::min(dynamicJitterTimeMs, 150);
+    const int64_t dynamicJbTimeRtp = MsToRtp(dynamicJbTimeMs, clockRate);
+    /* Fundamental jitter time */
+    const int32_t jitterTimeMs = baseJbTimeMs;
+    const int64_t jitterTimeRtp = MsToRtp(jitterTimeMs, clockRate);
+
+    // Till (T), this assembler waits unconditionally to collect current NAL unit
+    int64_t expiredTimeRtp = rtpTime + jitterTimeRtp;       // When does this buffer expire ? (T)
+    int64_t diffTimeRtp = playedTimeRtp - expiredTimeRtp;
+    bool isExpired = (diffTimeRtp >= 0);                    // It's expired if T is passed away
+
+    // From (T), this assembler tries to complete the NAL till (T + try)
+    int32_t tryJbTimeMs = baseJitterTimeMs / 2 + dynamicJbTimeMs;
+    int64_t tryJbTimeRtp = MsToRtp(tryJbTimeMs, clockRate);
+    bool isFirstLineBroken = (diffTimeRtp > tryJbTimeRtp);
+
+    // After (T + try), it gives last chance till (T + try + a) with warning messages.
+    int64_t alpha = dynamicJbTimeRtp * JITTER_MULTIPLE;     // Use Dyn as 'a'
+    bool isSecondLineBroken = (diffTimeRtp > (tryJbTimeRtp + alpha));   // The Maginot line
+
+    if (mShowQueueCnt < 20) {
+        showCurrentQueue(queue);
+        printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
+        printRTPTime(rtpTime, playedTimeRtp, expiredTimeRtp, isExpired);
+        mShowQueueCnt++;
+    }
+
+    AHEVCAssembler::addNack(source);
+
+    if (!isExpired) {
+        ALOGV("buffering in jitter buffer.");
+        return NOT_ENOUGH_DATA;
+    }
+
+    if (isFirstLineBroken) {
+        if (isSecondLineBroken) {
+            int64_t totalDiffTimeMs = RtpToMs(diffTimeRtp + jitterTimeRtp, clockRate);
+            ALOGE("buffer too late... \t RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
+                    "Seq# %d \t ExpSeq# %d \t"
+                    "JitterMs %d + (%d + %d * %.3f)",
+                    (long long)diffTimeRtp, (long long)totalDiffTimeMs,
+                    buffer->int32Data(), mNextExpectedSeqNo,
+                    jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
+            printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
+            printRTPTime(rtpTime, playedTimeRtp, expiredTimeRtp, isExpired);
+
+            mNextExpectedSeqNo = pickProperSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
+        }  else {
+            ALOGW("=== WARNING === buffer arrived after %d + %d = %d ms === WARNING === ",
+                    jitterTimeMs, tryJbTimeMs, jitterTimeMs + tryJbTimeMs);
+        }
+    }
+
+    if (mNextExpectedSeqNoValid) {
+        int32_t size = queue->size();
+        int32_t cntRemove = deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
+
+        if (cntRemove > 0) {
+            source->noticeAbandonBuffer(cntRemove);
+            ALOGW("delete %d of %d buffers", cntRemove, size);
+        }
+
+        if (queue->empty()) {
+            return NOT_ENOUGH_DATA;
+        }
+    }
+
+    buffer = *queue->begin();
+
+    if (!mNextExpectedSeqNoValid) {
+        mNextExpectedSeqNoValid = true;
+        mNextExpectedSeqNo = (uint32_t)buffer->int32Data();
+    } else if ((uint32_t)buffer->int32Data() != mNextExpectedSeqNo) {
+        ALOGV("Not the sequence number I expected");
+
+        return WRONG_SEQUENCE_NUMBER;
+    }
+
+    const uint8_t *data = buffer->data();
+    size_t size = buffer->size();
+
+    if (size < 1 || (data[0] & 0x80)) {
+        // Corrupt.
+
+        ALOGV("Ignoring corrupt buffer.");
+        queue->erase(queue->begin());
+
+        ++mNextExpectedSeqNo;
+        return MALFORMED_PACKET;
+    }
+
+    unsigned nalType = (data[0] >> 1) & H265_NALU_MASK;
+    if (nalType > 0 && nalType < H265_NALU_AP) {
+        addSingleNALUnit(buffer);
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+        return OK;
+    } else if (nalType == H265_NALU_FU) {
+        // FU-A
+        return addFragmentedNALUnit(queue);
+    } else if (nalType == H265_NALU_AP) {
+        // STAP-A
+        bool success = addSingleTimeAggregationPacket(buffer);
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+
+        return success ? OK : MALFORMED_PACKET;
+    } else if (nalType == 0) {
+        ALOGV("Ignoring undefined nal type.");
+
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+
+        return OK;
+    } else {
+        ALOGV("Ignoring unsupported buffer (nalType=%d)", nalType);
+
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+
+        return MALFORMED_PACKET;
+    }
+}
+
+void AHEVCAssembler::checkSpsUpdated(const sp<ABuffer> &buffer) {
+    if (buffer->size() == 0) {
+        return;
+    }
+    const uint8_t *data = buffer->data();
+    HevcParameterSets paramSets;
+    unsigned nalType = (data[0] >> 1) & H265_NALU_MASK;
+    if (nalType == H265_NALU_SPS) {
+        int32_t width = 0, height = 0;
+        paramSets.FindHEVCDimensions(buffer, &width, &height);
+        ALOGV("existing resolution (%u x %u)", mWidth, mHeight);
+        if (width != mWidth || height != mHeight) {
+            mFirstIFrameProvided = false;
+            mWidth = width;
+            mHeight = height;
+            ALOGD("found a new resolution (%u x %u)", mWidth, mHeight);
+        }
+    }
+}
+
+void AHEVCAssembler::checkIFrameProvided(const sp<ABuffer> &buffer) {
+    if (buffer->size() == 0) {
+        return;
+    }
+    const uint8_t *data = buffer->data();
+    unsigned nalType = (data[0] >> 1) & H265_NALU_MASK;
+    if (nalType > 0x0F && nalType < 0x18) {
+        mLastIFrameProvidedAtMs = ALooper::GetNowUs() / 1000;
+        if (!mFirstIFrameProvided) {
+            mFirstIFrameProvided = true;
+            uint32_t rtpTime;
+            CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+            ALOGD("got First I-frame to be decoded. rtpTime=%d, size=%zu", rtpTime, buffer->size());
+        }
+    }
+}
+
+bool AHEVCAssembler::dropFramesUntilIframe(const sp<ABuffer> &buffer) {
+    if (buffer->size() == 0) {
+        return false;
+    }
+    const uint8_t *data = buffer->data();
+    unsigned nalType = (data[0] >> 1) & H265_NALU_MASK;
+    return !mFirstIFrameProvided && nalType < 0x10;
+}
+
+void AHEVCAssembler::addSingleNALUnit(const sp<ABuffer> &buffer) {
+    ALOGV("addSingleNALUnit of size %zu", buffer->size());
+#if !LOG_NDEBUG
+    hexdump(buffer->data(), buffer->size());
+#endif
+    checkSpsUpdated(buffer);
+    checkIFrameProvided(buffer);
+
+    uint32_t rtpTime;
+    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+
+    if (dropFramesUntilIframe(buffer)) {
+        sp<ARTPSource> source = nullptr;
+        buffer->meta()->findObject("source", (sp<android::RefBase>*)&source);
+        if (source != nullptr) {
+            ALOGD("Issued FIR to get the I-frame");
+            source->onIssueFIRByAssembler();
+        }
+        ALOGD("drop P-frames till an I-frame provided. rtpTime %u", rtpTime);
+        return;
+    }
+
+    if (!mNALUnits.empty() && rtpTime != mAccessUnitRTPTime) {
+        submitAccessUnit();
+    }
+    mAccessUnitRTPTime = rtpTime;
+
+    mNALUnits.push_back(buffer);
+}
+
+bool AHEVCAssembler::addSingleTimeAggregationPacket(const sp<ABuffer> &buffer) {
+    const uint8_t *data = buffer->data();
+    size_t size = buffer->size();
+
+    if (size < 3) {
+        ALOGV("Discarding too small STAP-A packet.");
+        return false;
+    }
+
+    ++data;
+    --size;
+    while (size >= 2) {
+        size_t nalSize = (data[0] << 8) | data[1];
+
+        if (size < nalSize + 2) {
+            ALOGV("Discarding malformed STAP-A packet.");
+            return false;
+        }
+
+        sp<ABuffer> unit = new ABuffer(nalSize);
+        memcpy(unit->data(), &data[2], nalSize);
+
+        CopyTimes(unit, buffer);
+
+        addSingleNALUnit(unit);
+
+        data += 2 + nalSize;
+        size -= 2 + nalSize;
+    }
+
+    if (size != 0) {
+        ALOGV("Unexpected padding at end of STAP-A packet.");
+    }
+
+    return true;
+}
+
+ARTPAssembler::AssemblyStatus AHEVCAssembler::addFragmentedNALUnit(
+        List<sp<ABuffer> > *queue) {
+    CHECK(!queue->empty());
+
+    sp<ABuffer> buffer = *queue->begin();
+    const uint8_t *data = buffer->data();
+    size_t size = buffer->size();
+
+    CHECK(size > 0);
+    /*   H265 payload header is 16 bit
+        0 1 2 3 4 5 6 7 0 1 2 3 4 5 6 7
+       +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+       |F|     Type  |  Layer ID | TID |
+       +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+     */
+    unsigned indicator = (data[0] >> 1);
+
+    CHECK((indicator & H265_NALU_MASK) == H265_NALU_FU);
+
+    if (size < 3) {
+        ALOGV("Ignoring malformed FU buffer (size = %zu)", size);
+
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+        return MALFORMED_PACKET;
+    }
+
+    if (!(data[2] & 0x80)) {
+        // Start bit not set on the first buffer.
+
+        ALOGV("Start bit not set on first buffer");
+
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+        return MALFORMED_PACKET;
+    }
+
+    /*  FU INDICATOR HDR
+        0 1 2 3 4 5 6 7
+       +-+-+-+-+-+-+-+-+
+       |S|E|   Type    |
+       +-+-+-+-+-+-+-+-+
+     */
+    uint32_t nalType = data[2] & H265_NALU_MASK;
+    uint32_t tid = data[1] & 0x7;
+    ALOGV("nalType =%u, tid =%u", nalType, tid);
+
+    uint32_t expectedSeqNo = (uint32_t)buffer->int32Data() + 1;
+    size_t totalSize = size - 3;
+    size_t totalCount = 1;
+    bool complete = false;
+
+    uint32_t rtpTimeStartAt;
+    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTimeStartAt));
+    uint32_t startSeqNo = buffer->int32Data();
+    bool pFrame = (nalType < 0x10);
+
+    if (data[2] & 0x40) {
+        // Huh? End bit also set on the first buffer.
+
+        ALOGV("Grrr. This isn't fragmented at all.");
+
+        complete = true;
+    } else {
+        List<sp<ABuffer> >::iterator it = ++queue->begin();
+        int32_t connected = 1;
+        bool snapped = false;
+        while (it != queue->end()) {
+            ALOGV("sequence length %zu", totalCount);
+
+            const sp<ABuffer> &buffer = *it;
+
+            const uint8_t *data = buffer->data();
+            size_t size = buffer->size();
+
+            if ((uint32_t)buffer->int32Data() != expectedSeqNo) {
+                ALOGV("sequence not complete, expected seqNo %u, got %u, nalType %u",
+                     expectedSeqNo, (uint32_t)buffer->int32Data(), nalType);
+                snapped = true;
+
+                if (!pFrame) {
+                    return WRONG_SEQUENCE_NUMBER;
+                }
+            }
+
+            if (!snapped) {
+                connected++;
+            }
+
+            uint32_t rtpTime;
+            CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+            if (size < 3
+                    || ((data[0] >> 1) & H265_NALU_MASK) != indicator
+                    || (data[2] & H265_NALU_MASK) != nalType
+                    || (data[2] & 0x80)
+                    || rtpTime != rtpTimeStartAt) {
+                ALOGV("Ignoring malformed FU buffer.");
+
+                // Delete the whole start of the FU.
+
+                mNextExpectedSeqNo = expectedSeqNo + 1;
+                deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
+
+                return MALFORMED_PACKET;
+            }
+
+            totalSize += size - 3;
+            ++totalCount;
+
+            expectedSeqNo = (uint32_t)buffer->int32Data() + 1;
+
+            if (data[2] & 0x40) {
+                if (pFrame && !recycleUnit(startSeqNo, expectedSeqNo,
+                        connected, totalCount, 0.5f)) {
+                    mNextExpectedSeqNo = expectedSeqNo;
+                    deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
+
+                    return MALFORMED_PACKET;
+                }
+                // This is the last fragment.
+                complete = true;
+                break;
+            }
+
+            ++it;
+        }
+    }
+
+    if (!complete) {
+        return NOT_ENOUGH_DATA;
+    }
+
+    mNextExpectedSeqNo = expectedSeqNo;
+
+    // We found all the fragments that make up the complete NAL unit.
+
+    // Leave room for the header. So far totalSize did not include the
+    // header byte.
+    totalSize += 2;
+
+    sp<ABuffer> unit = new ABuffer(totalSize);
+    CopyTimes(unit, *queue->begin());
+
+    unit->data()[0] = (nalType << 1);
+    unit->data()[1] = tid;
+
+    size_t offset = 2;
+    int32_t cvo = -1;
+    List<sp<ABuffer> >::iterator it = queue->begin();
+    for (size_t i = 0; i < totalCount; ++i) {
+        const sp<ABuffer> &buffer = *it;
+
+        ALOGV("piece #%zu/%zu", i + 1, totalCount);
+#if !LOG_NDEBUG
+        hexdump(buffer->data(), buffer->size());
+#endif
+
+        memcpy(unit->data() + offset, buffer->data() + 3, buffer->size() - 3);
+        buffer->meta()->findInt32("cvo", &cvo);
+        offset += buffer->size() - 3;
+
+        it = queue->erase(it);
+    }
+
+    unit->setRange(0, totalSize);
+
+    if (cvo >= 0) {
+        unit->meta()->setInt32("cvo", cvo);
+    }
+
+    addSingleNALUnit(unit);
+
+    ALOGV("successfully assembled a NAL unit from fragments.");
+
+    return OK;
+}
+
+void AHEVCAssembler::submitAccessUnit() {
+    CHECK(!mNALUnits.empty());
+
+    ALOGV("Access unit complete (%zu nal units)", mNALUnits.size());
+
+    size_t totalSize = 0;
+    for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
+         it != mNALUnits.end(); ++it) {
+        totalSize += 4 + (*it)->size();
+    }
+
+    sp<ABuffer> accessUnit = new ABuffer(totalSize);
+    size_t offset = 0;
+    int32_t cvo = -1;
+    for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
+         it != mNALUnits.end(); ++it) {
+        memcpy(accessUnit->data() + offset, "\x00\x00\x00\x01", 4);
+        offset += 4;
+
+        sp<ABuffer> nal = *it;
+        memcpy(accessUnit->data() + offset, nal->data(), nal->size());
+        offset += nal->size();
+        nal->meta()->findInt32("cvo", &cvo);
+    }
+
+    CopyTimes(accessUnit, *mNALUnits.begin());
+
+#if 0
+    printf(mAccessUnitDamaged ? "X" : ".");
+    fflush(stdout);
+#endif
+    if (cvo >= 0) {
+        accessUnit->meta()->setInt32("cvo", cvo);
+    }
+
+    if (mAccessUnitDamaged) {
+        accessUnit->meta()->setInt32("damaged", true);
+    }
+
+    mNALUnits.clear();
+    mAccessUnitDamaged = false;
+
+    sp<AMessage> msg = mNotifyMsg->dup();
+    msg->setBuffer("access-unit", accessUnit);
+    msg->post();
+}
+
+int32_t AHEVCAssembler::pickProperSeq(const Queue *queue,
+        uint32_t first, int64_t play, int64_t jit) {
+    sp<ABuffer> buffer = *(queue->begin());
+    int32_t nextSeqNo = buffer->int32Data();
+
+    Queue::const_iterator it = queue->begin();
+    while (it != queue->end()) {
+        int64_t rtpTime = findRTPTime(first, *it);
+        // if pkt in time exists, that should be the next pivot
+        if (rtpTime + jit >= play) {
+            nextSeqNo = (*it)->int32Data();
+            break;
+        }
+        it++;
+    }
+    return nextSeqNo;
+}
+
+bool AHEVCAssembler::recycleUnit(uint32_t start, uint32_t end,  uint32_t connected,
+         size_t avail, float goodRatio) {
+    float total = end - start;
+    float valid = connected;
+    float exist = avail;
+    bool isRecycle = (valid / total) >= goodRatio;
+
+    ALOGV("checking p-frame losses.. recvBufs %f valid %f diff %f recycle? %d",
+            exist, valid, total, isRecycle);
+
+    return isRecycle;
+}
+
+int32_t AHEVCAssembler::deleteUnitUnderSeq(Queue *queue, uint32_t seq) {
+    int32_t initSize = queue->size();
+    Queue::iterator it = queue->begin();
+    while (it != queue->end()) {
+        if ((uint32_t)(*it)->int32Data() >= seq) {
+            break;
+        }
+        it++;
+    }
+    queue->erase(queue->begin(), it);
+    return initSize - queue->size();
+}
+
+ARTPAssembler::AssemblyStatus AHEVCAssembler::assembleMore(
+        const sp<ARTPSource> &source) {
+    AssemblyStatus status = addNALUnit(source);
+    if (status == MALFORMED_PACKET) {
+        uint64_t msecsSinceLastIFrame = (ALooper::GetNowUs() / 1000) - mLastIFrameProvidedAtMs;
+        if (msecsSinceLastIFrame > 1000) {
+            ALOGV("request FIR to get a new I-Frame, time after "
+                    "last I-Frame in %llu ms", (unsigned long long)msecsSinceLastIFrame);
+            source->onIssueFIRByAssembler();
+        }
+    }
+    return status;
+}
+
+void AHEVCAssembler::packetLost() {
+    CHECK(mNextExpectedSeqNoValid);
+    ALOGD("packetLost (expected %u)", mNextExpectedSeqNo);
+
+    ++mNextExpectedSeqNo;
+}
+
+void AHEVCAssembler::onByeReceived() {
+    sp<AMessage> msg = mNotifyMsg->dup();
+    msg->setInt32("eos", true);
+    msg->post();
+}
+
+}  // namespace android
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.h b/media/libstagefright/rtsp/AHEVCAssembler.h
new file mode 100644
index 0000000..68777a7
--- /dev/null
+++ b/media/libstagefright/rtsp/AHEVCAssembler.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef A_HEVC_ASSEMBLER_H_
+
+#define A_HEVC_ASSEMBLER_H_
+
+#include "ARTPAssembler.h"
+
+#include <utils/List.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct ABuffer;
+struct AMessage;
+
+struct AHEVCAssembler : public ARTPAssembler {
+    AHEVCAssembler(const sp<AMessage> &notify);
+
+    typedef List<sp<ABuffer> > Queue;
+
+protected:
+    virtual ~AHEVCAssembler();
+
+    virtual AssemblyStatus assembleMore(const sp<ARTPSource> &source);
+    virtual void onByeReceived();
+    virtual void packetLost();
+
+private:
+    sp<AMessage> mNotifyMsg;
+
+    uint32_t mAccessUnitRTPTime;
+    bool mNextExpectedSeqNoValid;
+    uint32_t mNextExpectedSeqNo;
+    bool mAccessUnitDamaged;
+    bool mFirstIFrameProvided;
+    uint64_t mLastIFrameProvidedAtMs;
+    int64_t mLastRtpTimeJitterDataUs;
+    int32_t mWidth;
+    int32_t mHeight;
+    List<sp<ABuffer> > mNALUnits;
+
+    int32_t addNack(const sp<ARTPSource> &source);
+    void checkSpsUpdated(const sp<ABuffer> &buffer);
+    void checkIFrameProvided(const sp<ABuffer> &buffer);
+    bool dropFramesUntilIframe(const sp<ABuffer> &buffer);
+    AssemblyStatus addNALUnit(const sp<ARTPSource> &source);
+    void addSingleNALUnit(const sp<ABuffer> &buffer);
+    AssemblyStatus addFragmentedNALUnit(List<sp<ABuffer> > *queue);
+    bool addSingleTimeAggregationPacket(const sp<ABuffer> &buffer);
+
+    void submitAccessUnit();
+
+    int32_t pickProperSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
+    bool recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
+             size_t avail, float goodRatio);
+    int32_t deleteUnitUnderSeq(Queue *queue, uint32_t seq);
+
+    DISALLOW_EVIL_CONSTRUCTORS(AHEVCAssembler);
+};
+
+}  // namespace android
+
+#endif  // A_HEVC_ASSEMBLER_H_
diff --git a/media/libstagefright/rtsp/APacketSource.cpp b/media/libstagefright/rtsp/APacketSource.cpp
index 574bd7a..169df46 100644
--- a/media/libstagefright/rtsp/APacketSource.cpp
+++ b/media/libstagefright/rtsp/APacketSource.cpp
@@ -454,6 +454,17 @@
 
         mFormat->setInt32(kKeyWidth, width);
         mFormat->setInt32(kKeyHeight, height);
+    } else if (!strncmp(desc.c_str(), "H265/", 5)) {
+        mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_HEVC);
+
+        int32_t width, height;
+        if (!sessionDesc->getDimensions(index, PT, &width, &height)) {
+            width = -1;
+            height = -1;
+        }
+
+        mFormat->setInt32(kKeyWidth, width);
+        mFormat->setInt32(kKeyHeight, height);
     } else if (!strncmp(desc.c_str(), "H263-2000/", 10)
             || !strncmp(desc.c_str(), "H263-1998/", 10)) {
         mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
@@ -583,4 +594,15 @@
     return mFormat;
 }
 
+bool APacketSource::isVideo() {
+    bool isVideo = false;
+
+    const char *mime;
+    if (mFormat->findCString(kKeyMIMEType, &mime)) {
+        isVideo = !strncasecmp(mime, "video/", 6);
+    }
+
+    return isVideo;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/rtsp/APacketSource.h b/media/libstagefright/rtsp/APacketSource.h
index 530e537..2b9b5ba 100644
--- a/media/libstagefright/rtsp/APacketSource.h
+++ b/media/libstagefright/rtsp/APacketSource.h
@@ -33,6 +33,8 @@
 
     virtual sp<MetaData> getFormat();
 
+    bool isVideo();
+
 protected:
     virtual ~APacketSource();
 
diff --git a/media/libstagefright/rtsp/ARTPAssembler.cpp b/media/libstagefright/rtsp/ARTPAssembler.cpp
index befc226..52aa3a0 100644
--- a/media/libstagefright/rtsp/ARTPAssembler.cpp
+++ b/media/libstagefright/rtsp/ARTPAssembler.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#define LOG_TAG "ARTPAssembler"
 #include "ARTPAssembler.h"
 
 #include <media/stagefright/foundation/ABuffer.h>
@@ -21,12 +22,16 @@
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AMessage.h>
 
+#include <android-base/properties.h>
+
 #include <stdint.h>
 
 namespace android {
 
 ARTPAssembler::ARTPAssembler()
-    : mFirstFailureTimeUs(-1) {
+    : mShowQueueCnt(0),
+      mFirstFailureTimeUs(-1) {
+    mShowQueue = android::base::GetBoolProperty("debug.stagefright.rtp", false);
 }
 
 void ARTPAssembler::onPacketReceived(const sp<ARTPSource> &source) {
@@ -141,4 +146,15 @@
     return accessUnit;
 }
 
+void ARTPAssembler::showCurrentQueue(List<sp<ABuffer> > *queue) {
+    AString temp("Queue elem size : ");
+    List<sp<ABuffer> >::iterator it = queue->begin();
+    while (it != queue->end()) {
+        temp.append((*it)->size());
+        temp.append("  \t");
+        it++;
+    }
+    ALOGD("%s",temp.c_str());
+};
+
 }  // namespace android
diff --git a/media/libstagefright/rtsp/ARTPAssembler.h b/media/libstagefright/rtsp/ARTPAssembler.h
index 4082d4c..f959c40 100644
--- a/media/libstagefright/rtsp/ARTPAssembler.h
+++ b/media/libstagefright/rtsp/ARTPAssembler.h
@@ -19,6 +19,9 @@
 #define A_RTP_ASSEMBLER_H_
 
 #include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
 #include <utils/List.h>
 #include <utils/RefBase.h>
 
@@ -56,12 +59,52 @@
     static sp<ABuffer> MakeCompoundFromPackets(
             const List<sp<ABuffer> > &frames);
 
+    void showCurrentQueue(List<sp<ABuffer> > *queue);
+
+    bool mShowQueue;
+    int32_t mShowQueueCnt;
+
+    // Utility functions
+    inline int64_t findRTPTime(const uint32_t& firstRTPTime, const sp<ABuffer>& buffer);
+    inline int64_t MsToRtp(int64_t ms, int64_t clockRate);
+    inline int64_t RtpToMs(int64_t rtp, int64_t clockRate);
+    inline void printNowTimeMs(int64_t start, int64_t now, int64_t play);
+    inline void printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp);
+
 private:
     int64_t mFirstFailureTimeUs;
 
     DISALLOW_EVIL_CONSTRUCTORS(ARTPAssembler);
 };
 
+inline int64_t ARTPAssembler::findRTPTime(const uint32_t& firstRTPTime, const sp<ABuffer>& buffer) {
+    /* If you want to +,-,* rtpTime, recommend to declare rtpTime as int64_t.
+       Because rtpTime can be near UINT32_MAX. Beware the overflow. */
+    int64_t rtpTime = 0;
+    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+    // If the first overs 2^31 and rtp unders 2^31, the rtp value is overflowed one.
+    int64_t overflowMask = (firstRTPTime & 0x80000000 & ~rtpTime) << 1;
+    return rtpTime | overflowMask;
+}
+
+inline int64_t ARTPAssembler::MsToRtp(int64_t ms, int64_t clockRate) {
+    return ms * clockRate / 1000;
+}
+
+inline int64_t ARTPAssembler::RtpToMs(int64_t rtp, int64_t clockRate) {
+    return rtp * 1000 / clockRate;
+}
+
+inline void ARTPAssembler::printNowTimeMs(int64_t start, int64_t now, int64_t play) {
+    ALOGD("start=%lld, now=%lld, played=%lld",
+            (long long)start, (long long)now, (long long)play);
+}
+
+inline void ARTPAssembler::printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp) {
+    ALOGD("rtp-time(JB)=%lld, played-rtp-time(JB)=%lld, expired-rtp-time(JB)=%lld expired=%d",
+            (long long)rtp, (long long)play, (long long)exp, isExp);
+}
+
 }  // namespace android
 
 #endif  // A_RTP_ASSEMBLER_H_
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 6a4706d..a4da433 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -30,6 +30,8 @@
 #include <media/stagefright/foundation/AString.h>
 #include <media/stagefright/foundation/hexdump.h>
 
+#include <android/multinetwork.h>
+
 #include <arpa/inet.h>
 #include <sys/socket.h>
 
@@ -53,6 +55,7 @@
 const int64_t ARTPConnection::kSelectTimeoutUs = 1000LL;
 
 struct ARTPConnection::StreamInfo {
+    bool isIPv6;
     int mRTPSocket;
     int mRTCPSocket;
     sp<ASessionDescription> mSessionDesc;
@@ -63,14 +66,23 @@
     int64_t mNumRTCPPacketsReceived;
     int64_t mNumRTPPacketsReceived;
     struct sockaddr_in mRemoteRTCPAddr;
+    struct sockaddr_in6 mRemoteRTCPAddr6;
 
     bool mIsInjected;
+
+    // A place to save time when it polls
+    int64_t mLastPollTimeUs;
+    // RTCP Extension for CVO
+    int mCVOExtMap; // will be set to 0 if cvo is not negotiated in sdp
 };
 
 ARTPConnection::ARTPConnection(uint32_t flags)
     : mFlags(flags),
       mPollEventPending(false),
-      mLastReceiverReportTimeUs(-1) {
+      mLastReceiverReportTimeUs(-1),
+      mLastBitrateReportTimeUs(-1),
+      mTargetBitrate(-1),
+      mStaticJitterTimeMs(kStaticJitterTimeMs) {
 }
 
 ARTPConnection::~ARTPConnection() {
@@ -92,6 +104,11 @@
     msg->post();
 }
 
+void ARTPConnection::seekStream() {
+    sp<AMessage> msg = new AMessage(kWhatSeekStream, this);
+    msg->post();
+}
+
 void ARTPConnection::removeStream(int rtpSocket, int rtcpSocket) {
     sp<AMessage> msg = new AMessage(kWhatRemoveStream, this);
     msg->setInt32("rtp-socket", rtpSocket);
@@ -121,7 +138,7 @@
     unsigned start = (unsigned)((rand()* 1000LL)/RAND_MAX) + 15550;
     start &= ~1;
 
-    for (unsigned port = start; port < 65536; port += 2) {
+    for (unsigned port = start; port < 65535; port += 2) {
         struct sockaddr_in addr;
         memset(addr.sin_zero, 0, sizeof(addr.sin_zero));
         addr.sin_family = AF_INET;
@@ -139,12 +156,130 @@
                  (const struct sockaddr *)&addr, sizeof(addr)) == 0) {
             *rtpPort = port;
             return;
+        } else {
+            // we should recreate a RTP socket to avoid bind other port in same RTP socket
+            close(*rtpSocket);
+
+            *rtpSocket = socket(AF_INET, SOCK_DGRAM, 0);
+            CHECK_GE(*rtpSocket, 0);
+            bumpSocketBufferSize(*rtpSocket);
         }
     }
 
     TRESPASS();
 }
 
+// static
+void ARTPConnection::MakeRTPSocketPair(
+        int *rtpSocket, int *rtcpSocket, const char *localIp, const char *remoteIp,
+        unsigned localPort, unsigned remotePort, int64_t socketNetwork) {
+    bool isIPv6 = false;
+    if (strchr(localIp, ':') != NULL)
+        isIPv6 = true;
+
+    *rtpSocket = socket(isIPv6 ? AF_INET6 : AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(*rtpSocket, 0);
+
+    bumpSocketBufferSize(*rtpSocket);
+
+    *rtcpSocket = socket(isIPv6 ? AF_INET6 : AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(*rtcpSocket, 0);
+
+    if (socketNetwork != 0) {
+        ALOGD("trying to bind rtp socket(%d) to network(%llu).",
+                *rtpSocket, (unsigned long long)socketNetwork);
+
+        int result = android_setsocknetwork((net_handle_t)socketNetwork, *rtpSocket);
+        if (result != 0) {
+            ALOGW("failed(%d) to bind rtp socket(%d) to network(%llu)",
+                    result, *rtpSocket, (unsigned long long)socketNetwork);
+        }
+        result = android_setsocknetwork((net_handle_t)socketNetwork, *rtcpSocket);
+        if (result != 0) {
+            ALOGW("failed(%d) to bind rtcp socket(%d) to network(%llu)",
+                    result, *rtcpSocket, (unsigned long long)socketNetwork);
+        }
+    }
+
+    bumpSocketBufferSize(*rtcpSocket);
+
+    struct sockaddr *addr;
+    struct sockaddr_in addr4;
+    struct sockaddr_in6 addr6;
+
+    if (isIPv6) {
+        addr = (struct sockaddr *)&addr6;
+        memset(&addr6, 0, sizeof(addr6));
+        addr6.sin6_family = AF_INET6;
+        inet_pton(AF_INET6, localIp, &addr6.sin6_addr);
+        addr6.sin6_port = htons((uint16_t)localPort);
+    } else {
+        addr = (struct sockaddr *)&addr4;
+        memset(&addr4, 0, sizeof(addr4));
+        addr4.sin_family = AF_INET;
+        addr4.sin_addr.s_addr = inet_addr(localIp);
+        addr4.sin_port = htons((uint16_t)localPort);
+    }
+
+    int sockopt = 1;
+    setsockopt(*rtpSocket, SOL_SOCKET, SO_REUSEADDR, (int *)&sockopt, sizeof(sockopt));
+    setsockopt(*rtcpSocket, SOL_SOCKET, SO_REUSEADDR, (int *)&sockopt, sizeof(sockopt));
+
+    int sizeSockSt = isIPv6 ? sizeof(addr6) : sizeof(addr4);
+
+    if (bind(*rtpSocket, addr, sizeSockSt) == 0) {
+        ALOGI("rtp socket successfully binded. addr=%s:%d", localIp, localPort);
+    } else {
+        ALOGE("failed to bind rtp socket addr=%s:%d err=%s", localIp, localPort, strerror(errno));
+        return;
+    }
+
+    if (isIPv6)
+        addr6.sin6_port = htons(localPort + 1);
+    else
+        addr4.sin_port = htons(localPort + 1);
+
+    if (bind(*rtcpSocket, addr, sizeSockSt) == 0) {
+        ALOGI("rtcp socket successfully binded. addr=%s:%d", localIp, localPort + 1);
+    } else {
+        ALOGE("failed to bind rtcp socket addr=%s:%d err=%s", localIp,
+                localPort + 1, strerror(errno));
+    }
+
+    // Re uses addr variable as remote addr.
+    if (isIPv6) {
+        memset(&addr6, 0, sizeof(addr6));
+        addr6.sin6_family = AF_INET6;
+        inet_pton(AF_INET6, remoteIp, &addr6.sin6_addr);
+        addr6.sin6_port = htons((uint16_t)remotePort);
+    } else {
+        memset(&addr4, 0, sizeof(addr4));
+        addr4.sin_family = AF_INET;
+        addr4.sin_addr.s_addr = inet_addr(remoteIp);
+        addr4.sin_port = htons((uint16_t)remotePort);
+    }
+    if (connect(*rtpSocket, addr, sizeSockSt) == 0) {
+        ALOGI("rtp socket successfully connected to remote=%s:%d", remoteIp, remotePort);
+    } else {
+        ALOGE("failed to connect rtp socket to remote addr=%s:%d err=%s", remoteIp,
+                remotePort, strerror(errno));
+        return;
+    }
+
+    if (isIPv6)
+        addr6.sin6_port = htons(remotePort + 1);
+    else
+        addr4.sin_port = htons(remotePort + 1);
+
+    if (connect(*rtcpSocket, addr, sizeSockSt) == 0) {
+        ALOGI("rtcp socket successfully connected to remote=%s:%d", remoteIp, remotePort + 1);
+    } else {
+        ALOGE("failed to connect rtcp socket addr=%s:%d err=%s", remoteIp,
+                remotePort + 1, strerror(errno));
+        return;
+    }
+}
+
 void ARTPConnection::onMessageReceived(const sp<AMessage> &msg) {
     switch (msg->what()) {
         case kWhatAddStream:
@@ -153,6 +288,12 @@
             break;
         }
 
+        case kWhatSeekStream:
+        {
+            onSeekStream(msg);
+            break;
+        }
+
         case kWhatRemoveStream:
         {
             onRemoveStream(msg);
@@ -204,12 +345,37 @@
     info->mNumRTCPPacketsReceived = 0;
     info->mNumRTPPacketsReceived = 0;
     memset(&info->mRemoteRTCPAddr, 0, sizeof(info->mRemoteRTCPAddr));
+    memset(&info->mRemoteRTCPAddr6, 0, sizeof(info->mRemoteRTCPAddr6));
+
+    sp<ASessionDescription> sessionDesc = info->mSessionDesc;
+    info->mCVOExtMap = 0;
+    for (size_t i = 1; i < sessionDesc->countTracks(); ++i) {
+        int32_t cvoExtMap;
+        if (sessionDesc->getCvoExtMap(i, &cvoExtMap)) {
+            info->mCVOExtMap = cvoExtMap;
+            ALOGI("urn:3gpp:video-orientation(cvo) found as extmap:%d", info->mCVOExtMap);
+        } else {
+            ALOGI("urn:3gpp:video-orientation(cvo) not found :%d", info->mCVOExtMap);
+        }
+    }
 
     if (!injected) {
         postPollEvent();
     }
 }
 
+void ARTPConnection::onSeekStream(const sp<AMessage> &msg) {
+    (void)msg; // unused param as of now.
+    List<StreamInfo>::iterator it = mStreams.begin();
+    while (it != mStreams.end()) {
+        for (size_t i = 0; i < it->mSources.size(); ++i) {
+            sp<ARTPSource> source = it->mSources.valueAt(i);
+            source->timeReset();
+        }
+        ++it;
+    }
+}
+
 void ARTPConnection::onRemoveStream(const sp<AMessage> &msg) {
     int32_t rtpSocket, rtcpSocket;
     CHECK(msg->findInt32("rtp-socket", &rtpSocket));
@@ -275,6 +441,7 @@
         return;
     }
 
+    int64_t nowUs = ALooper::GetNowUs();
     int res = select(maxSocket + 1, &rs, NULL, NULL, &tv);
 
     if (res > 0) {
@@ -284,6 +451,7 @@
                 ++it;
                 continue;
             }
+            it->mLastPollTimeUs = nowUs;
 
             status_t err = OK;
             if (FD_ISSET(it->mRTPSocket, &rs)) {
@@ -295,17 +463,58 @@
 
             if (err == -ECONNRESET) {
                 // socket failure, this stream is dead, Jim.
+                sp<AMessage> notify = it->mNotifyMsg->dup();
+                notify->setInt32("rtcp-event", 1);
+                notify->setInt32("payload-type", 400);
+                notify->setInt32("feedback-type", 1);
+                notify->setInt32("sender", it->mSources.valueAt(0)->getSelfID());
+                notify->post();
 
                 ALOGW("failed to receive RTP/RTCP datagram.");
                 it = mStreams.erase(it);
                 continue;
             }
 
+            // add NACK and FIR that needs to be sent immediately.
+            sp<ABuffer> buffer = new ABuffer(kMaxUDPSize);
+            for (size_t i = 0; i < it->mSources.size(); ++i) {
+                buffer->setRange(0, 0);
+                int cnt = it->mSources.valueAt(i)->addNACK(buffer);
+                if (cnt > 0) {
+                    ALOGV("Send NACK for lost %d Packets", cnt);
+                    send(&*it, buffer);
+                }
+
+                buffer->setRange(0, 0);
+                it->mSources.valueAt(i)->addFIR(buffer);
+                if (buffer->size() > 0) {
+                    ALOGD("Send FIR immediately for lost Packets");
+                    send(&*it, buffer);
+                }
+
+                buffer->setRange(0, 0);
+                it->mSources.valueAt(i)->addTMMBR(buffer, mTargetBitrate);
+                mTargetBitrate = -1;
+                if (buffer->size() > 0) {
+                    ALOGV("Sending TMMBR...");
+                    ssize_t n = send(&*it, buffer);
+
+                    if (n != (ssize_t)buffer->size()) {
+                        ALOGW("failed to send RTCP TMMBR (%s).",
+                                n >= 0 ? "connection gone" : strerror(errno));
+
+                        it = mStreams.erase(it);
+                        continue;
+                    }
+                }
+            }
+
             ++it;
         }
     }
 
-    int64_t nowUs = ALooper::GetNowUs();
+    checkRxBitrate(nowUs);
+
     if (mLastReceiverReportTimeUs <= 0
             || mLastReceiverReportTimeUs + 5000000LL <= nowUs) {
         sp<ABuffer> buffer = new ABuffer(kMaxUDPSize);
@@ -340,24 +549,16 @@
             if (buffer->size() > 0) {
                 ALOGV("Sending RR...");
 
-                ssize_t n;
-                do {
-                    n = sendto(
-                        s->mRTCPSocket, buffer->data(), buffer->size(), 0,
-                        (const struct sockaddr *)&s->mRemoteRTCPAddr,
-                        sizeof(s->mRemoteRTCPAddr));
-                } while (n < 0 && errno == EINTR);
+                ssize_t n = send(s, buffer);
 
-                if (n <= 0) {
+                if (n != (ssize_t)buffer->size()) {
                     ALOGW("failed to send RTCP receiver report (%s).",
-                         n == 0 ? "connection gone" : strerror(errno));
+                            n >= 0 ? "connection gone" : strerror(errno));
 
                     it = mStreams.erase(it);
                     continue;
                 }
 
-                CHECK_EQ(n, (ssize_t)buffer->size());
-
                 mLastReceiverReportTimeUs = nowUs;
             }
 
@@ -377,9 +578,22 @@
 
     sp<ABuffer> buffer = new ABuffer(65536);
 
+    struct sockaddr *pRemoteRTCPAddr;
+    int sizeSockSt;
+    if (s->isIPv6) {
+        pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr6;
+        sizeSockSt = sizeof(struct sockaddr_in6);
+    } else {
+        pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr;
+        sizeSockSt = sizeof(struct sockaddr_in);
+    }
     socklen_t remoteAddrLen =
         (!receiveRTP && s->mNumRTCPPacketsReceived == 0)
-            ? sizeof(s->mRemoteRTCPAddr) : 0;
+            ? sizeSockSt : 0;
+
+    if (mFlags & kViLTEConnection) {
+        remoteAddrLen = 0;
+    }
 
     ssize_t nbytes;
     do {
@@ -388,8 +602,9 @@
             buffer->data(),
             buffer->capacity(),
             0,
-            remoteAddrLen > 0 ? (struct sockaddr *)&s->mRemoteRTCPAddr : NULL,
+            remoteAddrLen > 0 ? pRemoteRTCPAddr : NULL,
             remoteAddrLen > 0 ? &remoteAddrLen : NULL);
+        mCumulativeBytes += nbytes;
     } while (nbytes < 0 && errno == EINTR);
 
     if (nbytes <= 0) {
@@ -410,6 +625,36 @@
     return err;
 }
 
+ssize_t ARTPConnection::send(const StreamInfo *info, const sp<ABuffer> buffer) {
+        struct sockaddr* pRemoteRTCPAddr;
+        int sizeSockSt;
+
+        /* It seems this isIPv6 variable is useless.
+         * We should remove it to prevent confusion */
+        if (info->isIPv6) {
+            pRemoteRTCPAddr = (struct sockaddr *)&info->mRemoteRTCPAddr6;
+            sizeSockSt = sizeof(struct sockaddr_in6);
+        } else {
+            pRemoteRTCPAddr = (struct sockaddr *)&info->mRemoteRTCPAddr;
+            sizeSockSt = sizeof(struct sockaddr_in);
+        }
+
+        if (mFlags & kViLTEConnection) {
+            ALOGV("ViLTE RTCP");
+            pRemoteRTCPAddr = NULL;
+            sizeSockSt = 0;
+        }
+
+        ssize_t n;
+        do {
+            n = sendto(
+                    info->mRTCPSocket, buffer->data(), buffer->size(), 0,
+                    pRemoteRTCPAddr, sizeSockSt);
+        } while (n < 0 && errno == EINTR);
+
+        return n;
+}
+
 status_t ARTPConnection::parseRTP(StreamInfo *s, const sp<ABuffer> &buffer) {
     if (s->mNumRTPPacketsReceived++ == 0) {
         sp<AMessage> notify = s->mNotifyMsg->dup();
@@ -431,6 +676,11 @@
         return -1;
     }
 
+    if ((data[1] & 0x7f) == 20 /* decimal */) {
+        // Unassigned payload type
+        return -1;
+    }
+
     if (data[0] & 0x20) {
         // Padding present.
 
@@ -454,6 +704,7 @@
         return -1;
     }
 
+    int32_t cvoDegrees = -1;
     if (data[0] & 0x10) {
         // Header eXtension present.
 
@@ -467,13 +718,14 @@
         const uint8_t *extensionData = &data[payloadOffset];
 
         size_t extensionLength =
-            4 * (extensionData[2] << 8 | extensionData[3]);
+            (4 * (extensionData[2] << 8 | extensionData[3])) + 4;
 
-        if (size < payloadOffset + 4 + extensionLength) {
+        if (size < payloadOffset + extensionLength) {
             return -1;
         }
 
-        payloadOffset += 4 + extensionLength;
+        parseRTPExt(s, (const uint8_t *)extensionData, extensionLength, &cvoDegrees);
+        payloadOffset += extensionLength;
     }
 
     uint32_t srcId = u32at(&data[8]);
@@ -487,6 +739,9 @@
     meta->setInt32("rtp-time", rtpTime);
     meta->setInt32("PT", data[1] & 0x7f);
     meta->setInt32("M", data[1] >> 7);
+    if (cvoDegrees >= 0) {
+        meta->setInt32("cvo", cvoDegrees);
+    }
 
     buffer->setInt32Data(u16at(&data[2]));
     buffer->setRange(payloadOffset, size - payloadOffset);
@@ -496,11 +751,65 @@
     return OK;
 }
 
+status_t ARTPConnection::parseRTPExt(StreamInfo *s,
+        const uint8_t *extHeader, size_t extLen, int32_t *cvoDegrees) {
+    if (extLen < 4)
+        return -1;
+
+    uint16_t header = (extHeader[0] << 8) | (extHeader[1]);
+    bool isOnebyteHeader = false;
+
+    if (header == 0xBEDE) {
+        isOnebyteHeader = true;
+    } else if (header == 0x1000) {
+        ALOGW("parseRTPExt: two-byte header is not implemented yet");
+        return -1;
+    } else {
+        ALOGW("parseRTPExt: can not recognize header");
+        return -1;
+    }
+
+    const uint8_t *extPayload = extHeader + 4;
+    extLen -= 4;
+    size_t offset = 0; //start from first payload of rtp extension.
+    // one-byte header parser
+    while (isOnebyteHeader && offset < extLen) {
+        uint8_t extmapId = extPayload[offset] >> 4;
+        uint8_t length = (extPayload[offset] & 0xF) + 1;
+        offset++;
+
+        // padding case
+        if (extmapId == 0)
+            continue;
+
+        uint8_t data[16]; // maximum length value
+        for (uint8_t j = 0; offset + j <= extLen && j < length; j++) {
+            data[j] = extPayload[offset + j];
+        }
+
+        offset += length;
+
+        if (extmapId == s->mCVOExtMap) {
+            *cvoDegrees = (int32_t)data[0];
+            return OK;
+        }
+    }
+
+    return BAD_VALUE;
+}
+
 status_t ARTPConnection::parseRTCP(StreamInfo *s, const sp<ABuffer> &buffer) {
     if (s->mNumRTCPPacketsReceived++ == 0) {
         sp<AMessage> notify = s->mNotifyMsg->dup();
         notify->setInt32("first-rtcp", true);
         notify->post();
+
+        ALOGI("send first-rtcp event to upper layer as ImsRxNotice");
+        sp<AMessage> imsNotify = s->mNotifyMsg->dup();
+        imsNotify->setInt32("rtcp-event", 1);
+        imsNotify->setInt32("payload-type", 101);
+        imsNotify->setInt32("feedback-type", 0);
+        imsNotify->post();
     }
 
     const uint8_t *data = buffer->data();
@@ -551,8 +860,12 @@
                 break;
 
             case 205:  // TSFB (transport layer specific feedback)
+                parseTSFB(s, data, headerLength);
+                break;
             case 206:  // PSFB (payload specific feedback)
                 // hexdump(data, headerLength);
+                parsePSFB(s, data, headerLength);
+                ALOGI("RTCP packet type %u of size %zu", (unsigned)data[1], headerLength);
                 break;
 
             case 203:
@@ -589,6 +902,12 @@
 
     sp<ARTPSource> source = findSource(s, id);
 
+    // Report a final stastics to be used for rtp data usage.
+    int64_t nowUs = ALooper::GetNowUs();
+    int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
+    int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
+    source->notifyPktInfo(bitrate, true /* isRegular */);
+
     source->byeReceived();
 
     return OK;
@@ -621,6 +940,144 @@
     return 0;
 }
 
+status_t ARTPConnection::parseTSFB(
+        StreamInfo *s, const uint8_t *data, size_t size) {
+    if (size < 12) {
+        // broken packet
+        return -1;
+    }
+
+    uint8_t msgType = data[0] & 0x1f;
+    uint32_t id = u32at(&data[4]);
+
+    const uint8_t *ptr = &data[12];
+    size -= 12;
+
+    using namespace std;
+    size_t FCISize;
+    switch(msgType) {
+        case 1:     // Generic NACK
+        {
+            FCISize = 4;
+            while (size >= FCISize) {
+                uint16_t PID = u16at(&ptr[0]);  // lost packet RTP number
+                uint16_t BLP = u16at(&ptr[2]);  // Bitmask of following Lost Packets
+
+                size -= FCISize;
+                ptr += FCISize;
+
+                AString list_of_losts;
+                list_of_losts.append(PID);
+                for (int i=0 ; i<16 ; i++) {
+                    bool is_lost = BLP & (0x1 << i);
+                    if (is_lost) {
+                        list_of_losts.append(", ");
+                        list_of_losts.append(PID + i);
+                    }
+                }
+                ALOGI("Opponent losts packet of RTP %s", list_of_losts.c_str());
+            }
+            break;
+        }
+        case 3:     // TMMBR
+        case 4:     // TMMBN
+        {
+            FCISize = 8;
+            while (size >= FCISize) {
+                uint32_t MxTBR = u32at(&ptr[4]);
+                uint32_t MxTBRExp = MxTBR >> 26;
+                uint32_t MxTBRMantissa = (MxTBR >> 9) & 0x01FFFF;
+                uint32_t overhead = MxTBR & 0x01FF;
+
+                size -= FCISize;
+                ptr += FCISize;
+
+                uint32_t bitRate = (1 << MxTBRExp) * MxTBRMantissa;
+
+                if (msgType == 3)
+                    ALOGI("Op -> UE Req Tx bitrate : %d X 2^%d = %d",
+                        MxTBRMantissa, MxTBRExp, bitRate);
+                else if (msgType == 4)
+                    ALOGI("OP -> UE Noti Rx bitrate : %d X 2^%d = %d",
+                        MxTBRMantissa, MxTBRExp, bitRate);
+
+                sp<AMessage> notify = s->mNotifyMsg->dup();
+                notify->setInt32("rtcp-event", 1);
+                notify->setInt32("payload-type", 205);
+                notify->setInt32("feedback-type", msgType);
+                notify->setInt32("sender", id);
+                notify->setInt32("bit-rate", bitRate);
+                notify->post();
+                ALOGI("overhead : %d", overhead);
+            }
+            break;
+        }
+        default:
+        {
+            ALOGI("Not supported TSFB type %d", msgType);
+            break;
+        }
+    }
+
+    return 0;
+}
+
+status_t ARTPConnection::parsePSFB(
+        StreamInfo *s, const uint8_t *data, size_t size) {
+    if (size < 12) {
+        // broken packet
+        return -1;
+    }
+
+    uint8_t msgType = data[0] & 0x1f;
+    uint32_t id = u32at(&data[4]);
+
+    const uint8_t *ptr = &data[12];
+    size -= 12;
+
+    using namespace std;
+    switch(msgType) {
+        case 1:     // Picture Loss Indication (PLI)
+        {
+            if (size > 0) {
+                // PLI does not need parameters
+                break;
+            };
+            sp<AMessage> notify = s->mNotifyMsg->dup();
+            notify->setInt32("rtcp-event", 1);
+            notify->setInt32("payload-type", 206);
+            notify->setInt32("feedback-type", msgType);
+            notify->setInt32("sender", id);
+            notify->post();
+            ALOGI("PLI detected.");
+            break;
+        }
+        case 4:     // Full Intra Request (FIR)
+        {
+            if (size < 4) {
+                break;
+            }
+            uint32_t requestedId = u32at(&ptr[0]);
+            if (requestedId == (uint32_t)mSelfID) {
+                sp<AMessage> notify = s->mNotifyMsg->dup();
+                notify->setInt32("rtcp-event", 1);
+                notify->setInt32("payload-type", 206);
+                notify->setInt32("feedback-type", msgType);
+                notify->setInt32("sender", id);
+                notify->post();
+                ALOGI("FIR detected.");
+            }
+            break;
+        }
+        default:
+        {
+            ALOGI("Not supported PSFB type %d", msgType);
+            break;
+        }
+    }
+
+    return 0;
+}
 sp<ARTPSource> ARTPConnection::findSource(StreamInfo *info, uint32_t srcId) {
     sp<ARTPSource> source;
     ssize_t index = info->mSources.indexOfKey(srcId);
@@ -630,6 +1087,12 @@
         source = new ARTPSource(
                 srcId, info->mSessionDesc, info->mIndex, info->mNotifyMsg);
 
+        if (mFlags & kViLTEConnection) {
+            source->setPeriodicFIR(false);
+        }
+
+        source->setSelfID(mSelfID);
+        source->setStaticJitterTimeMs(mStaticJitterTimeMs);
         info->mSources.add(srcId, source);
     } else {
         source = info->mSources.valueAt(index);
@@ -645,6 +1108,78 @@
     msg->post();
 }
 
+void ARTPConnection::setSelfID(const uint32_t selfID) {
+    mSelfID = selfID;
+}
+
+void ARTPConnection::setStaticJitterTimeMs(const uint32_t jbTimeMs) {
+    mStaticJitterTimeMs = jbTimeMs;
+}
+
+void ARTPConnection::setTargetBitrate(int32_t targetBitrate) {
+    mTargetBitrate = targetBitrate;
+}
+
+void ARTPConnection::checkRxBitrate(int64_t nowUs) {
+    if (mLastBitrateReportTimeUs <= 0) {
+        mCumulativeBytes = 0;
+        mLastBitrateReportTimeUs = nowUs;
+    }
+    else if (mLastEarlyNotifyTimeUs + 100000ll <= nowUs) {
+        int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
+        int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
+        mLastEarlyNotifyTimeUs = nowUs;
+
+        List<StreamInfo>::iterator it = mStreams.begin();
+        while (it != mStreams.end()) {
+            StreamInfo *s = &*it;
+            if (s->mIsInjected) {
+                ++it;
+                continue;
+            }
+            for (size_t i = 0; i < s->mSources.size(); ++i) {
+                sp<ARTPSource> source = s->mSources.valueAt(i);
+                if (source->isNeedToEarlyNotify()) {
+                    source->notifyPktInfo(bitrate, false /* isRegular */);
+                    mLastEarlyNotifyTimeUs = nowUs + (1000000ll * 3600 * 24); // after 1 day
+                }
+            }
+            ++it;
+        }
+    }
+    else if (mLastBitrateReportTimeUs + 1000000ll <= nowUs) {
+        int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
+        int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
+        ALOGI("Actual Rx bitrate : %d bits/sec", bitrate);
+
+        sp<ABuffer> buffer = new ABuffer(kMaxUDPSize);
+        List<StreamInfo>::iterator it = mStreams.begin();
+        while (it != mStreams.end()) {
+            StreamInfo *s = &*it;
+            if (s->mIsInjected) {
+                ++it;
+                continue;
+            }
+
+            if (s->mNumRTCPPacketsReceived == 0) {
+                // We have never received any RTCP packets on this stream,
+                // we don't even know where to send a report.
+                ++it;
+                continue;
+            }
+
+            buffer->setRange(0, 0);
+            for (size_t i = 0; i < s->mSources.size(); ++i) {
+                sp<ARTPSource> source = s->mSources.valueAt(i);
+                source->notifyPktInfo(bitrate, true /* isRegular */);
+            }
+            ++it;
+        }
+        mCumulativeBytes = 0;
+        mLastBitrateReportTimeUs = nowUs;
+        mLastEarlyNotifyTimeUs = nowUs;
+    }
+}
 void ARTPConnection::onInjectPacket(const sp<AMessage> &msg) {
     int32_t index;
     CHECK(msg->findInt32("index", &index));
@@ -672,4 +1207,3 @@
 }
 
 }  // namespace android
-
diff --git a/media/libstagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/ARTPConnection.h
index d5f7c2e..adf9670 100644
--- a/media/libstagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/ARTPConnection.h
@@ -30,6 +30,7 @@
 struct ARTPConnection : public AHandler {
     enum Flags {
         kRegularlyRequestFIR = 2,
+        kViLTEConnection = 4,
     };
 
     explicit ARTPConnection(uint32_t flags = 0);
@@ -39,16 +40,27 @@
             const sp<ASessionDescription> &sessionDesc, size_t index,
             const sp<AMessage> &notify,
             bool injected);
-
+    void seekStream();
     void removeStream(int rtpSocket, int rtcpSocket);
 
     void injectPacket(int index, const sp<ABuffer> &buffer);
 
+    void setSelfID(const uint32_t selfID);
+    void setStaticJitterTimeMs(const uint32_t jbTimeMs);
+    void setTargetBitrate(int32_t targetBitrate);
+
     // Creates a pair of UDP datagram sockets bound to adjacent ports
     // (the rtpSocket is bound to an even port, the rtcpSocket to the
     // next higher port).
     static void MakePortPair(
             int *rtpSocket, int *rtcpSocket, unsigned *rtpPort);
+    // Creates a pair of UDP datagram sockets bound to assigned ip and
+    // ports (the rtpSocket is bound to an even port, the rtcpSocket
+    // to the next higher port).
+    static void MakeRTPSocketPair(
+            int *rtpSocket, int *rtcpSocket,
+            const char *localIp, const char *remoteIp,
+            unsigned localPort, unsigned remotePort, int64_t socketNetwork = 0);
 
 protected:
     virtual ~ARTPConnection();
@@ -57,6 +69,7 @@
 private:
     enum {
         kWhatAddStream,
+        kWhatSeekStream,
         kWhatRemoveStream,
         kWhatPollStreams,
         kWhatInjectPacket,
@@ -71,18 +84,33 @@
 
     bool mPollEventPending;
     int64_t mLastReceiverReportTimeUs;
+    int64_t mLastBitrateReportTimeUs;
+    int64_t mLastEarlyNotifyTimeUs;
+
+    int32_t mSelfID;
+    int32_t mTargetBitrate;
+
+    uint32_t mStaticJitterTimeMs;
+
+    int32_t mCumulativeBytes;
 
     void onAddStream(const sp<AMessage> &msg);
+    void onSeekStream(const sp<AMessage> &msg);
     void onRemoveStream(const sp<AMessage> &msg);
     void onPollStreams();
     void onInjectPacket(const sp<AMessage> &msg);
     void onSendReceiverReports();
+    void checkRxBitrate(int64_t nowUs);
 
     status_t receive(StreamInfo *info, bool receiveRTP);
+    ssize_t send(const StreamInfo *info, const sp<ABuffer> buffer);
 
     status_t parseRTP(StreamInfo *info, const sp<ABuffer> &buffer);
+    status_t parseRTPExt(StreamInfo *s, const uint8_t *extData, size_t extLen, int32_t *cvoDegrees);
     status_t parseRTCP(StreamInfo *info, const sp<ABuffer> &buffer);
     status_t parseSR(StreamInfo *info, const uint8_t *data, size_t size);
+    status_t parseTSFB(StreamInfo *info, const uint8_t *data, size_t size);
+    status_t parsePSFB(StreamInfo *info, const uint8_t *data, size_t size);
     status_t parseBYE(StreamInfo *info, const uint8_t *data, size_t size);
 
     sp<ARTPSource> findSource(StreamInfo *info, uint32_t id);
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index f5f8128..f960482 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -22,6 +22,7 @@
 
 #include "AAMRAssembler.h"
 #include "AAVCAssembler.h"
+#include "AHEVCAssembler.h"
 #include "AH263Assembler.h"
 #include "AMPEG2TSAssembler.h"
 #include "AMPEG4AudioAssembler.h"
@@ -33,23 +34,35 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 
+#include <strings.h>
+
 namespace android {
 
-static const uint32_t kSourceID = 0xdeadbeef;
+static uint32_t kSourceID = 0xdeadbeef;
 
 ARTPSource::ARTPSource(
         uint32_t id,
         const sp<ASessionDescription> &sessionDesc, size_t index,
         const sp<AMessage> &notify)
-    : mID(id),
+    : mFirstSeqNumber(0),
+      mFirstRtpTime(0),
+      mFirstSysTime(0),
+      mClockRate(0),
+      mFirstSsrc(0),
+      mHighestNackNumber(0),
+      mID(id),
       mHighestSeqNumber(0),
       mPrevExpected(0),
       mBaseSeqNumber(0),
       mNumBuffersReceived(0),
       mPrevNumBuffersReceived(0),
+      mPrevExpectedForRR(0),
+      mPrevNumBuffersReceivedForRR(0),
+      mStaticJbTimeMs(kStaticJitterTimeMs),
       mLastNTPTime(0),
       mLastNTPTimeUpdateUs(0),
       mIssueFIRRequests(false),
+      mIssueFIRByAssembler(false),
       mLastFIRRequestUs(-1),
       mNextFIRSeqNo((rand() * 256.0) / RAND_MAX),
       mNotify(notify) {
@@ -61,6 +74,9 @@
     if (!strncmp(desc.c_str(), "H264/", 5)) {
         mAssembler = new AAVCAssembler(notify);
         mIssueFIRRequests = true;
+    } else if (!strncmp(desc.c_str(), "H265/", 5)) {
+        mAssembler = new AHEVCAssembler(notify);
+        mIssueFIRRequests = true;
     } else if (!strncmp(desc.c_str(), "MP4A-LATM/", 10)) {
         mAssembler = new AMPEG4AudioAssembler(notify, params);
     } else if (!strncmp(desc.c_str(), "H263-1998/", 10)
@@ -86,6 +102,11 @@
     if (mAssembler != NULL && !mAssembler->initCheck()) {
         mAssembler.clear();
     }
+
+    int32_t clockRate, numChannels;
+    ASessionDescription::ParseFormatDesc(desc.c_str(), &clockRate, &numChannels);
+    mClockRate = clockRate;
+    mJitterCalc = new JitterCalc(mClockRate);
 }
 
 static uint32_t AbsDiff(uint32_t seq1, uint32_t seq2) {
@@ -109,16 +130,55 @@
     notify->post();
 }
 
+void ARTPSource::timeReset() {
+    mFirstRtpTime = 0;
+    mFirstSysTime = 0;
+    mFirstSsrc = 0;
+    mHighestNackNumber = 0;
+    mHighestSeqNumber = 0;
+    mPrevExpected = 0;
+    mBaseSeqNumber = 0;
+    mNumBuffersReceived = 0;
+    mPrevNumBuffersReceived = 0;
+    mPrevExpectedForRR = 0;
+    mPrevNumBuffersReceivedForRR = 0;
+    mLastNTPTime = 0;
+    mLastNTPTimeUpdateUs = 0;
+    mIssueFIRByAssembler = false;
+    mLastFIRRequestUs = -1;
+}
+
 bool ARTPSource::queuePacket(const sp<ABuffer> &buffer) {
     uint32_t seqNum = (uint32_t)buffer->int32Data();
 
-    if (mNumBuffersReceived++ == 0) {
+    int32_t ssrc = 0;
+    buffer->meta()->findInt32("ssrc", &ssrc);
+
+    if (mNumBuffersReceived++ == 0 && mFirstSysTime == 0) {
+        uint32_t firstRtpTime;
+        CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&firstRtpTime));
+        mFirstSysTime = ALooper::GetNowUs();
         mHighestSeqNumber = seqNum;
         mBaseSeqNumber = seqNum;
+        mFirstRtpTime = firstRtpTime;
+        mFirstSsrc = ssrc;
+        ALOGD("first-rtp arrived: first-rtp-time=%u, sys-time=%lld, seq-num=%u, ssrc=%d",
+                mFirstRtpTime, (long long)mFirstSysTime, mHighestSeqNumber, mFirstSsrc);
+        mJitterCalc->init(mFirstRtpTime, mFirstSysTime, 0, mStaticJbTimeMs * 1000);
+        if (mQueue.size() > 0) {
+            ALOGD("clearing buffers which belonged to previous timeline"
+                    " since a base timeline has been changed.");
+            mQueue.clear();
+        }
         mQueue.push_back(buffer);
         return true;
     }
 
+    if (mFirstSsrc != ssrc) {
+        ALOGW("Discarding a buffer due to unexpected ssrc");
+        return false;
+    }
+
     // Only the lower 16-bit of the sequence numbers are transmitted,
     // derive the high-order bits by choosing the candidate closest
     // to the highest sequence number (extended to 32 bits) received so far.
@@ -181,20 +241,34 @@
 }
 
 void ARTPSource::addFIR(const sp<ABuffer> &buffer) {
-    if (!mIssueFIRRequests) {
+    if (!mIssueFIRRequests && !mIssueFIRByAssembler) {
         return;
     }
 
+    bool send = false;
     int64_t nowUs = ALooper::GetNowUs();
-    if (mLastFIRRequestUs >= 0 && mLastFIRRequestUs + 5000000LL > nowUs) {
-        // Send FIR requests at most every 5 secs.
+    int64_t usecsSinceLastFIR = nowUs - mLastFIRRequestUs;
+    if (mLastFIRRequestUs < 0) {
+        // A first FIR, just send it.
+        send = true;
+    }  else if (mIssueFIRByAssembler && (usecsSinceLastFIR > 1000000)) {
+        // A FIR issued by Assembler.
+        // Send it if last FIR is not sent within a sec.
+        send = true;
+    } else if (mIssueFIRRequests && (usecsSinceLastFIR > 5000000)) {
+        // A FIR issued periodically regardless packet loss.
+        // Send it if last FIR is not sent within 5 secs.
+        send = true;
+    }
+
+    if (!send) {
         return;
     }
 
     mLastFIRRequestUs = nowUs;
 
     if (buffer->size() + 20 > buffer->capacity()) {
-        ALOGW("RTCP buffer too small to accomodate FIR.");
+        ALOGW("RTCP buffer too small to accommodate FIR.");
         return;
     }
 
@@ -203,7 +277,7 @@
     data[0] = 0x80 | 4;
     data[1] = 206;  // PSFB
     data[2] = 0;
-    data[3] = 4;
+    data[3] = 4;    // total (4+1) * sizeof(int32_t) = 20 bytes
     data[4] = kSourceID >> 24;
     data[5] = (kSourceID >> 16) & 0xff;
     data[6] = (kSourceID >> 8) & 0xff;
@@ -225,14 +299,16 @@
     data[18] = 0x00;
     data[19] = 0x00;
 
-    buffer->setRange(buffer->offset(), buffer->size() + 20);
+    buffer->setRange(buffer->offset(), buffer->size() + (data[3] + 1) * sizeof(int32_t));
+
+    mIssueFIRByAssembler = false;
 
     ALOGV("Added FIR request.");
 }
 
 void ARTPSource::addReceiverReport(const sp<ABuffer> &buffer) {
     if (buffer->size() + 32 > buffer->capacity()) {
-        ALOGW("RTCP buffer too small to accomodate RR.");
+        ALOGW("RTCP buffer too small to accommodate RR.");
         return;
     }
 
@@ -240,16 +316,16 @@
 
     // According to appendix A.3 in RFC 3550
     uint32_t expected = mHighestSeqNumber - mBaseSeqNumber + 1;
-    int64_t intervalExpected = expected - mPrevExpected;
-    int64_t intervalReceived = mNumBuffersReceived - mPrevNumBuffersReceived;
+    int64_t intervalExpected = expected - mPrevExpectedForRR;
+    int64_t intervalReceived = mNumBuffersReceived - mPrevNumBuffersReceivedForRR;
     int64_t intervalPacketLost = intervalExpected - intervalReceived;
 
     if (intervalExpected > 0 && intervalPacketLost > 0) {
         fraction = (intervalPacketLost << 8) / intervalExpected;
     }
 
-    mPrevExpected = expected;
-    mPrevNumBuffersReceived = mNumBuffersReceived;
+    mPrevExpectedForRR = expected;
+    mPrevNumBuffersReceivedForRR = mNumBuffersReceived;
     int32_t cumulativePacketLost = (int32_t)expected - mNumBuffersReceived;
 
     uint8_t *data = buffer->data() + buffer->size();
@@ -257,7 +333,7 @@
     data[0] = 0x80 | 1;
     data[1] = 201;  // RR
     data[2] = 0;
-    data[3] = 7;
+    data[3] = 7;    // total (7+1) * sizeof(int32_t) = 32 bytes
     data[4] = kSourceID >> 24;
     data[5] = (kSourceID >> 16) & 0xff;
     data[6] = (kSourceID >> 8) & 0xff;
@@ -279,10 +355,11 @@
     data[18] = (mHighestSeqNumber >> 8) & 0xff;
     data[19] = mHighestSeqNumber & 0xff;
 
-    data[20] = 0x00;  // Interarrival jitter
-    data[21] = 0x00;
-    data[22] = 0x00;
-    data[23] = 0x00;
+    uint32_t jitterTime = 0;
+    data[20] = jitterTime >> 24;    // Interarrival jitter
+    data[21] = (jitterTime >> 16) & 0xff;
+    data[22] = (jitterTime >> 8) & 0xff;
+    data[23] = jitterTime & 0xff;
 
     uint32_t LSR = 0;
     uint32_t DLSR = 0;
@@ -303,9 +380,230 @@
     data[30] = (DLSR >> 8) & 0xff;
     data[31] = DLSR & 0xff;
 
-    buffer->setRange(buffer->offset(), buffer->size() + 32);
+    buffer->setRange(buffer->offset(), buffer->size() + (data[3] + 1) * sizeof(int32_t));
 }
 
+void ARTPSource::addTMMBR(const sp<ABuffer> &buffer, int32_t targetBitrate) {
+    if (buffer->size() + 20 > buffer->capacity()) {
+        ALOGW("RTCP buffer too small to accommodate RR.");
+        return;
+    }
+
+    if (targetBitrate <= 0) {
+        return;
+    }
+
+    uint8_t *data = buffer->data() + buffer->size();
+
+    data[0] = 0x80 | 3; // TMMBR
+    data[1] = 205;      // TSFB
+    data[2] = 0;
+    data[3] = 4;        // total (4+1) * sizeof(int32_t) = 20 bytes
+    data[4] = kSourceID >> 24;
+    data[5] = (kSourceID >> 16) & 0xff;
+    data[6] = (kSourceID >> 8) & 0xff;
+    data[7] = kSourceID & 0xff;
+
+    *(int32_t*)(&data[8]) = 0;  // 4 bytes blank
+
+    data[12] = mID >> 24;
+    data[13] = (mID >> 16) & 0xff;
+    data[14] = (mID >> 8) & 0xff;
+    data[15] = mID & 0xff;
+
+    // Find the first bit '1' from left & right side of the value.
+    int32_t leftEnd = 31 - __builtin_clz(targetBitrate);
+    int32_t rightEnd = ffs(targetBitrate) - 1;
+
+    // Mantissa have only 17bit space by RTCP specification.
+    if ((leftEnd - rightEnd) > 16) {
+        rightEnd = leftEnd - 16;
+    }
+    int32_t mantissa = targetBitrate >> rightEnd;
+
+    data[16] = ((rightEnd << 2) & 0xfc) | ((mantissa & 0x18000) >> 15);
+    data[17] =                             (mantissa & 0x07f80) >> 7;
+    data[18] =                             (mantissa & 0x0007f) << 1;
+    data[19] = 40;              // 40 bytes overhead;
+
+    buffer->setRange(buffer->offset(), buffer->size() + (data[3] + 1) * sizeof(int32_t));
+
+    ALOGI("UE -> Op Req Rx bitrate : %d ", mantissa << rightEnd);
+}
+
+int ARTPSource::addNACK(const sp<ABuffer> &buffer) {
+    constexpr size_t kMaxFCIs = 10; // max number of FCIs
+    if (buffer->size() + (3 + kMaxFCIs) * sizeof(int32_t) > buffer->capacity()) {
+        ALOGW("RTCP buffer too small to accommodate NACK.");
+        return -1;
+    }
+
+    uint8_t *data = buffer->data() + buffer->size();
+
+    data[0] = 0x80 | 1; // Generic NACK
+    data[1] = 205;      // TSFB
+    data[2] = 0;
+    data[3] = 0;        // will be decided later
+    data[4] = kSourceID >> 24;
+    data[5] = (kSourceID >> 16) & 0xff;
+    data[6] = (kSourceID >> 8) & 0xff;
+    data[7] = kSourceID & 0xff;
+
+    data[8] = mID >> 24;
+    data[9] = (mID >> 16) & 0xff;
+    data[10] = (mID >> 8) & 0xff;
+    data[11] = mID & 0xff;
+
+    List<int> list;
+    List<int>::iterator it;
+    getSeqNumToNACK(list, kMaxFCIs);
+    size_t cnt = 0;
+
+    int *FCI = (int *)(data + 12);
+    for (it = list.begin(); it != list.end() && cnt < kMaxFCIs; it++) {
+        *(FCI + cnt) = *it;
+        cnt++;
+    }
+
+    data[3] = (3 + cnt) - 1;  // total (3 + #ofFCI) * sizeof(int32_t) byte
+
+    buffer->setRange(buffer->offset(), buffer->size() + (data[3] + 1) * sizeof(int32_t));
+
+    return cnt;
+}
+
+int ARTPSource::getSeqNumToNACK(List<int>& list, int size) {
+    AutoMutex _l(mMapLock);
+    int cnt = 0;
+
+    std::map<uint16_t, infoNACK>::iterator it;
+    for(it = mNACKMap.begin(); it != mNACKMap.end() && cnt < size; it++) {
+        infoNACK &info_it = it->second;
+        if (info_it.needToNACK) {
+            info_it.needToNACK = false;
+            // switch LSB to MSB for sending N/W
+            uint32_t FCI;
+            uint8_t *temp = (uint8_t *)&FCI;
+            temp[0] = (info_it.seqNum >> 8) & 0xff;
+            temp[1] = (info_it.seqNum)      & 0xff;
+            temp[2] = (info_it.mask >> 8)   & 0xff;
+            temp[3] = (info_it.mask)        & 0xff;
+
+            list.push_back(FCI);
+            cnt++;
+        }
+    }
+
+    return cnt;
+}
+
+void ARTPSource::setSeqNumToNACK(uint16_t seqNum, uint16_t mask, uint16_t nowJitterHeadSeqNum) {
+    AutoMutex _l(mMapLock);
+    infoNACK info = {seqNum, mask, nowJitterHeadSeqNum, true};
+    std::map<uint16_t, infoNACK>::iterator it;
+
+    it = mNACKMap.find(seqNum);
+    if (it != mNACKMap.end()) {
+        infoNACK &info_it = it->second;
+        // renew if (mask or head seq) is changed
+        if ((info_it.mask != mask) || (info_it.nowJitterHeadSeqNum != nowJitterHeadSeqNum)) {
+            info_it = info;
+        }
+    } else {
+        mNACKMap[seqNum] = info;
+    }
+
+    // delete all NACK far from current Jitter's first sequence number
+    it = mNACKMap.begin();
+    while (it != mNACKMap.end()) {
+        infoNACK &info_it = it->second;
+
+        int diff = nowJitterHeadSeqNum - info_it.nowJitterHeadSeqNum;
+        if (diff > 100) {
+            ALOGV("Delete %d pkt from NACK map ", info_it.seqNum);
+            it = mNACKMap.erase(it);
+        } else {
+            it++;
+        }
+    }
+
+}
+
+uint32_t ARTPSource::getSelfID() {
+    return kSourceID;
+}
+
+void ARTPSource::setSelfID(const uint32_t selfID) {
+    kSourceID = selfID;
+}
+
+void ARTPSource::setPeriodicFIR(bool enable) {
+    ALOGD("setPeriodicFIR %d", enable);
+    mIssueFIRRequests = enable;
+}
+
+int32_t ARTPSource::getStaticJitterTimeMs() {
+    return mStaticJbTimeMs;
+}
+
+int32_t ARTPSource::getBaseJitterTimeMs() {
+    return mJitterCalc->getBaseJitterMs();
+}
+
+int32_t ARTPSource::getInterArrivalJitterTimeMs() {
+    return mJitterCalc->getInterArrivalJitterMs();
+}
+
+void ARTPSource::setStaticJitterTimeMs(const uint32_t jbTimeMs) {
+    mStaticJbTimeMs = jbTimeMs;
+}
+
+void ARTPSource::putBaseJitterData(uint32_t timeStamp, int64_t arrivalTime) {
+    mJitterCalc->putBaseData(timeStamp, arrivalTime);
+}
+
+void ARTPSource::putInterArrivalJitterData(uint32_t timeStamp, int64_t arrivalTime) {
+    mJitterCalc->putInterArrivalData(timeStamp, arrivalTime);
+}
+
+bool ARTPSource::isNeedToEarlyNotify() {
+    uint32_t expected = mHighestSeqNumber - mBaseSeqNumber + 1;
+    int32_t intervalExpectedInNow = expected - mPrevExpected;
+    int32_t intervalReceivedInNow = mNumBuffersReceived - mPrevNumBuffersReceived;
+
+    if (intervalExpectedInNow - intervalReceivedInNow > 5)
+        return true;
+    return false;
+}
+
+void ARTPSource::notifyPktInfo(int32_t bitrate, bool isRegular) {
+    int32_t payloadType = isRegular ? RTP_QUALITY : RTP_QUALITY_EMC;
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("rtcp-event", 1);
+    notify->setInt32("payload-type", payloadType);
+    notify->setInt32("feedback-type", 0);
+    // sending target bitrate up to application to share rtp quality.
+    notify->setInt32("bit-rate", bitrate);
+    notify->setInt32("highest-seq-num", mHighestSeqNumber);
+    notify->setInt32("base-seq-num", mBaseSeqNumber);
+    notify->setInt32("prev-expected", mPrevExpected);
+    notify->setInt32("num-buf-recv", mNumBuffersReceived);
+    notify->setInt32("prev-num-buf-recv", mPrevNumBuffersReceived);
+    notify->post();
+
+    if (isRegular) {
+        uint32_t expected = mHighestSeqNumber - mBaseSeqNumber + 1;
+        mPrevExpected = expected;
+        mPrevNumBuffersReceived = mNumBuffersReceived;
+    }
+}
+
+void ARTPSource::onIssueFIRByAssembler() {
+    mIssueFIRByAssembler = true;
+}
+
+void ARTPSource::noticeAbandonBuffer(int cnt) {
+    mNumBuffersReceived -= cnt;
+}
 }  // namespace android
-
-
diff --git a/media/libstagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/ARTPSource.h
index f44e83f..2d804d8 100644
--- a/media/libstagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/ARTPSource.h
@@ -23,9 +23,16 @@
 #include <media/stagefright/foundation/ABase.h>
 #include <utils/List.h>
 #include <utils/RefBase.h>
+#include <utils/Thread.h>
+
+#include <map>
+
+#include "JitterCalculator.h"
 
 namespace android {
 
+const uint32_t kStaticJitterTimeMs = 50;   // 50ms
+
 struct ABuffer;
 struct AMessage;
 struct ARTPAssembler;
@@ -37,7 +44,19 @@
             const sp<ASessionDescription> &sessionDesc, size_t index,
             const sp<AMessage> &notify);
 
+    enum {
+        RTP_FIRST_PACKET = 100,
+        RTCP_FIRST_PACKET = 101,
+        RTP_QUALITY = 102,
+        RTP_QUALITY_EMC = 103,
+        RTCP_TSFB = 205,
+        RTCP_PSFB = 206,
+        RTP_CVO = 300,
+        RTP_AUTODOWN = 400,
+    };
+
     void processRTPPacket(const sp<ABuffer> &buffer);
+    void timeReset();
     void timeUpdate(uint32_t rtpTime, uint64_t ntpTime);
     void byeReceived();
 
@@ -45,22 +64,68 @@
 
     void addReceiverReport(const sp<ABuffer> &buffer);
     void addFIR(const sp<ABuffer> &buffer);
+    void addTMMBR(const sp<ABuffer> &buffer, int32_t targetBitrate);
+    int addNACK(const sp<ABuffer> &buffer);
+    void setSeqNumToNACK(uint16_t seqNum, uint16_t mask, uint16_t nowJitterHeadSeqNum);
+    uint32_t getSelfID();
+    void setSelfID(const uint32_t selfID);
+    void setPeriodicFIR(bool enable);
+
+    int32_t getStaticJitterTimeMs();
+    int32_t getBaseJitterTimeMs();
+    int32_t getInterArrivalJitterTimeMs();
+    void setStaticJitterTimeMs(const uint32_t jbTimeMs);
+    void putBaseJitterData(uint32_t timeStamp, int64_t arrivalTime);
+    void putInterArrivalJitterData(uint32_t timeStamp, int64_t arrivalTime);
+
+    bool isNeedToEarlyNotify();
+    void notifyPktInfo(int32_t bitrate, bool isRegular);
+    // FIR needs to be sent by missing packet or broken video image.
+    void onIssueFIRByAssembler();
+
+    void noticeAbandonBuffer(int cnt=1);
+
+    int32_t mFirstSeqNumber;
+    uint32_t mFirstRtpTime;
+    int64_t mFirstSysTime;
+    int32_t mClockRate;
+
+    int32_t mFirstSsrc;
+    int32_t mHighestNackNumber;
 
 private:
+
     uint32_t mID;
     uint32_t mHighestSeqNumber;
     uint32_t mPrevExpected;
     uint32_t mBaseSeqNumber;
     int32_t mNumBuffersReceived;
     int32_t mPrevNumBuffersReceived;
+    uint32_t mPrevExpectedForRR;
+    int32_t mPrevNumBuffersReceivedForRR;
 
     List<sp<ABuffer> > mQueue;
     sp<ARTPAssembler> mAssembler;
 
+    int32_t mStaticJbTimeMs;
+    sp<JitterCalc> mJitterCalc;
+
+    typedef struct infoNACK {
+        uint16_t seqNum;
+        uint16_t mask;
+        uint16_t nowJitterHeadSeqNum;
+        bool    needToNACK;
+    } infoNACK;
+
+    Mutex mMapLock;
+    std::map<uint16_t, infoNACK> mNACKMap;
+    int getSeqNumToNACK(List<int>& list, int size);
+
     uint64_t mLastNTPTime;
     int64_t mLastNTPTimeUpdateUs;
 
     bool mIssueFIRRequests;
+    bool mIssueFIRByAssembler;
     int64_t mLastFIRRequestUs;
     uint8_t mNextFIRSeqNo;
 
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 58d6086..29e263d 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -20,8 +20,6 @@
 
 #include "ARTPWriter.h"
 
-#include <fcntl.h>
-
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -32,13 +30,43 @@
 #include <media/stagefright/MetaData.h>
 #include <utils/ByteOrder.h>
 
+#include <fcntl.h>
+#include <strings.h>
+
 #define PT      97
 #define PT_STR  "97"
 
+#define H264_NALU_MASK 0x1F
+#define H264_NALU_SPS 0x7
+#define H264_NALU_PPS 0x8
+#define H264_NALU_IFRAME 0x5
+#define H264_NALU_PFRAME 0x1
+
+#define H265_NALU_MASK 0x3F
+#define H265_NALU_VPS 0x20
+#define H265_NALU_SPS 0x21
+#define H265_NALU_PPS 0x22
+
+#define IPV4_HEADER_SIZE 20
+#define IPV6_HEADER_SIZE 40
+#define UDP_HEADER_SIZE 8
+#define TCPIPV4_HEADER_SIZE (IPV4_HEADER_SIZE + UDP_HEADER_SIZE)
+#define TCPIPV6_HEADER_SIZE (IPV6_HEADER_SIZE + UDP_HEADER_SIZE)
+#define TCPIP_HEADER_SIZE TCPIPV4_HEADER_SIZE
+#define RTP_HEADER_SIZE 12
+#define RTP_HEADER_EXT_SIZE 8
+#define RTP_FU_HEADER_SIZE 2
+#define RTP_PAYLOAD_ROOM_SIZE 100 // ROOM size for IPv6 header, ESP and etc.
+
+
 namespace android {
 
 // static const size_t kMaxPacketSize = 65507;  // maximum payload in UDP over IP
-static const size_t kMaxPacketSize = 1500;
+static const size_t kMaxPacketSize = 1280;
+static char kCNAME[255] = "someone@somewhere";
+
+static const size_t kTrafficRecorderMaxEntries = 128;
+static const size_t kTrafficRecorderMaxTimeSpanMs = 2000;
 
 static int UniformRand(int limit) {
     return ((double)rand() * limit) / RAND_MAX;
@@ -48,15 +76,20 @@
     : mFlags(0),
       mFd(dup(fd)),
       mLooper(new ALooper),
-      mReflector(new AHandlerReflector<ARTPWriter>(this)) {
+      mReflector(new AHandlerReflector<ARTPWriter>(this)),
+      mTrafficRec(new TrafficRecorder<uint32_t /* Time */, Bytes>(
+              kTrafficRecorderMaxEntries, kTrafficRecorderMaxTimeSpanMs)) {
     CHECK_GE(fd, 0);
+    mIsIPv6 = false;
 
     mLooper->setName("rtp writer");
     mLooper->registerHandler(mReflector);
     mLooper->start();
 
-    mSocket = socket(AF_INET, SOCK_DGRAM, 0);
-    CHECK_GE(mSocket, 0);
+    mRTPSocket = socket(AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(mRTPSocket, 0);
+    mRTCPSocket = socket(AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(mRTCPSocket, 0);
 
     memset(mRTPAddr.sin_zero, 0, sizeof(mRTPAddr.sin_zero));
     mRTPAddr.sin_family = AF_INET;
@@ -72,6 +105,46 @@
 
     mRTCPAddr = mRTPAddr;
     mRTCPAddr.sin_port = htons(ntohs(mRTPAddr.sin_port) | 1);
+    mSPSBuf = NULL;
+    mPPSBuf = NULL;
+
+#if LOG_TO_FILES
+    mRTPFd = open(
+            "/data/misc/rtpout.bin",
+            O_WRONLY | O_CREAT | O_TRUNC,
+            0644);
+    CHECK_GE(mRTPFd, 0);
+
+    mRTCPFd = open(
+            "/data/misc/rtcpout.bin",
+            O_WRONLY | O_CREAT | O_TRUNC,
+            0644);
+    CHECK_GE(mRTCPFd, 0);
+#endif
+}
+
+ARTPWriter::ARTPWriter(int fd, String8& localIp, int localPort, String8& remoteIp,
+    int remotePort, uint32_t seqNo)
+    : mFlags(0),
+      mFd(dup(fd)),
+      mLooper(new ALooper),
+      mReflector(new AHandlerReflector<ARTPWriter>(this)),
+      mTrafficRec(new TrafficRecorder<uint32_t /* Time */, Bytes>(
+              kTrafficRecorderMaxEntries, kTrafficRecorderMaxTimeSpanMs)) {
+    CHECK_GE(fd, 0);
+    mIsIPv6 = false;
+
+    mLooper->setName("rtp writer");
+    mLooper->registerHandler(mReflector);
+    mLooper->start();
+
+    makeSocketPairAndBind(localIp, localPort, remoteIp , remotePort);
+    mVPSBuf = NULL;
+    mSPSBuf = NULL;
+    mPPSBuf = NULL;
+
+    initState();
+    mSeqNo = seqNo;     // Must use explicit # of seq for RTP continuity
 
 #if LOG_TO_FILES
     mRTPFd = open(
@@ -89,6 +162,21 @@
 }
 
 ARTPWriter::~ARTPWriter() {
+    if (mVPSBuf != NULL) {
+        mVPSBuf->release();
+        mVPSBuf = NULL;
+    }
+
+    if (mSPSBuf != NULL) {
+        mSPSBuf->release();
+        mSPSBuf = NULL;
+    }
+
+    if (mPPSBuf != NULL) {
+        mPPSBuf->release();
+        mPPSBuf = NULL;
+    }
+
 #if LOG_TO_FILES
     close(mRTCPFd);
     mRTCPFd = -1;
@@ -97,13 +185,39 @@
     mRTPFd = -1;
 #endif
 
-    close(mSocket);
-    mSocket = -1;
+    close(mRTPSocket);
+    mRTPSocket = -1;
+
+    close(mRTCPSocket);
+    mRTCPSocket = -1;
 
     close(mFd);
     mFd = -1;
 }
 
+void ARTPWriter::initState() {
+    if (mSourceID == 0)
+        mSourceID = rand();
+    mPayloadType = 0;
+    if (mSeqNo == 0)
+        mSeqNo = UniformRand(65536);
+    mRTPTimeBase = 0;
+    mNumRTPSent = 0;
+    mNumRTPOctetsSent = 0;
+    mLastRTPTime = 0;
+    mLastNTPTime = 0;
+
+    mOpponentID = 0;
+    mBitrate = 192000;
+
+    mNumSRsSent = 0;
+    mRTPCVOExtMap = -1;
+    mRTPCVODegrees = 0;
+    mRTPSockNetwork = 0;
+
+    mMode = INVALID;
+}
+
 status_t ARTPWriter::addSource(const sp<MediaSource> &source) {
     mSource = source;
     return OK;
@@ -114,28 +228,46 @@
     return (mFlags & kFlagEOS) != 0;
 }
 
-status_t ARTPWriter::start(MetaData * /* params */) {
+status_t ARTPWriter::start(MetaData * params) {
     Mutex::Autolock autoLock(mLock);
     if (mFlags & kFlagStarted) {
         return INVALID_OPERATION;
     }
 
     mFlags &= ~kFlagEOS;
-    mSourceID = rand();
-    mSeqNo = UniformRand(65536);
-    mRTPTimeBase = rand();
-    mNumRTPSent = 0;
-    mNumRTPOctetsSent = 0;
-    mLastRTPTime = 0;
-    mLastNTPTime = 0;
-    mNumSRsSent = 0;
+    initState();
 
     const char *mime;
     CHECK(mSource->getFormat()->findCString(kKeyMIMEType, &mime));
 
-    mMode = INVALID;
+    int32_t selfID = 0;
+    if (params->findInt32(kKeySelfID, &selfID))
+        mSourceID = selfID;
+
+    int32_t payloadType = 0;
+    if (params->findInt32(kKeyPayloadType, &payloadType))
+        mPayloadType = payloadType;
+
+    int32_t rtpExtMap = 0;
+    if (params->findInt32(kKeyRtpExtMap, &rtpExtMap))
+        mRTPCVOExtMap = rtpExtMap;
+
+    int32_t rtpCVODegrees = 0;
+    if (params->findInt32(kKeyRtpCvoDegrees, &rtpCVODegrees))
+        mRTPCVODegrees = rtpCVODegrees;
+
+    int32_t dscp = 0;
+    if (params->findInt32(kKeyRtpDscp, &dscp))
+        updateSocketDscp(dscp);
+
+    int64_t sockNetwork = 0;
+    if (params->findInt64(kKeySocketNetwork, &sockNetwork))
+        updateSocketNetwork(sockNetwork);
+
     if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
         mMode = H264;
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)) {
+        mMode = H265;
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_H263)) {
         mMode = H263;
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) {
@@ -187,11 +319,138 @@
     }
 }
 
+static const uint8_t SPCSize = 4;      // Start Prefix Code Size
+static const uint8_t startPrefixCode[SPCSize] = {0, 0, 0, 1};
+static const uint8_t spcKMPidx[SPCSize] = {0, 0, 2, 0};
+static void SpsPpsParser(MediaBufferBase *buffer,
+        MediaBufferBase **spsBuffer, MediaBufferBase **ppsBuffer) {
+
+    while (buffer->range_length() > 0) {
+        const uint8_t *NALPtr = (const uint8_t *)buffer->data() + buffer->range_offset();
+        uint8_t nalType = (*NALPtr) & H264_NALU_MASK;
+
+        MediaBufferBase **targetPtr = NULL;
+        if (nalType == H264_NALU_SPS) {
+            targetPtr = spsBuffer;
+        } else if (nalType == H264_NALU_PPS) {
+            targetPtr = ppsBuffer;
+        } else {
+            return;
+        }
+        ALOGV("SPS(7) or PPS(8) found. Type %d", nalType);
+
+        uint32_t bufferSize = buffer->range_length();
+        MediaBufferBase *&target = *targetPtr;
+        uint32_t i = 0, j = 0;
+        bool isBoundFound = false;
+        for (i = 0; i < bufferSize; i++) {
+            while (j > 0 && NALPtr[i] != startPrefixCode[j]) {
+                j = spcKMPidx[j - 1];
+            }
+            if (NALPtr[i] == startPrefixCode[j]) {
+                j++;
+                if (j == SPCSize) {
+                    isBoundFound = true;
+                    break;
+                }
+            }
+        }
+
+        uint32_t targetSize;
+        if (target != NULL) {
+            target->release();
+        }
+        // note that targetSize is never 0 as the first byte is never part
+        // of a start prefix
+        if (isBoundFound) {
+            targetSize = i - SPCSize + 1;
+            target = MediaBufferBase::Create(targetSize);
+            memcpy(target->data(),
+                   (const uint8_t *)buffer->data() + buffer->range_offset(),
+                   targetSize);
+            buffer->set_range(buffer->range_offset() + targetSize + SPCSize,
+                              buffer->range_length() - targetSize - SPCSize);
+        } else {
+            targetSize = bufferSize;
+            target = MediaBufferBase::Create(targetSize);
+            memcpy(target->data(),
+                   (const uint8_t *)buffer->data() + buffer->range_offset(),
+                   targetSize);
+            buffer->set_range(buffer->range_offset() + bufferSize, 0);
+            return;
+        }
+    }
+}
+
+static void VpsSpsPpsParser(MediaBufferBase *buffer,
+        MediaBufferBase **vpsBuffer, MediaBufferBase **spsBuffer, MediaBufferBase **ppsBuffer) {
+
+    while (buffer->range_length() > 0) {
+        const uint8_t *NALPtr = (const uint8_t *)buffer->data() + buffer->range_offset();
+        uint8_t nalType = ((*NALPtr) >> 1) & H265_NALU_MASK;
+
+        MediaBufferBase **targetPtr = NULL;
+        if (nalType == H265_NALU_VPS) {
+            targetPtr = vpsBuffer;
+        } else if (nalType == H265_NALU_SPS) {
+            targetPtr = spsBuffer;
+        } else if (nalType == H265_NALU_PPS) {
+            targetPtr = ppsBuffer;
+        } else {
+            return;
+        }
+        ALOGV("VPS(32) SPS(33) or PPS(34) found. Type %d", nalType);
+
+        uint32_t bufferSize = buffer->range_length();
+        MediaBufferBase *&target = *targetPtr;
+        uint32_t i = 0, j = 0;
+        bool isBoundFound = false;
+        for (i = 0; i < bufferSize; i++) {
+            while (j > 0 && NALPtr[i] != startPrefixCode[j]) {
+                j = spcKMPidx[j - 1];
+            }
+            if (NALPtr[i] == startPrefixCode[j]) {
+                j++;
+                if (j == SPCSize) {
+                    isBoundFound = true;
+                    break;
+                }
+            }
+        }
+
+        uint32_t targetSize;
+        if (target != NULL) {
+            target->release();
+        }
+        // note that targetSize is never 0 as the first byte is never part
+        // of a start prefix
+        if (isBoundFound) {
+            targetSize = i - SPCSize + 1;
+            target = MediaBufferBase::Create(targetSize);
+            memcpy(target->data(),
+                   (const uint8_t *)buffer->data() + buffer->range_offset(),
+                   targetSize);
+            buffer->set_range(buffer->range_offset() + targetSize + SPCSize,
+                              buffer->range_length() - targetSize - SPCSize);
+        } else {
+            targetSize = bufferSize;
+            target = MediaBufferBase::Create(targetSize);
+            memcpy(target->data(),
+                   (const uint8_t *)buffer->data() + buffer->range_offset(),
+                   targetSize);
+            buffer->set_range(buffer->range_offset() + bufferSize, 0);
+            return;
+        }
+    }
+}
+
 void ARTPWriter::onMessageReceived(const sp<AMessage> &msg) {
     switch (msg->what()) {
         case kWhatStart:
         {
-            CHECK_EQ(mSource->start(), (status_t)OK);
+            sp<MetaData> meta = new MetaData();
+            meta->setInt64(kKeyTime, 10ll);
+            CHECK_EQ(mSource->start(meta.get()), (status_t)OK);
 
 #if 0
             if (mMode == H264) {
@@ -264,6 +523,18 @@
     }
 }
 
+void ARTPWriter::setTMMBNInfo(uint32_t opponentID, uint32_t bitrate) {
+    mOpponentID = opponentID;
+    mBitrate = bitrate;
+
+    sp<ABuffer> buffer = new ABuffer(65536);
+    buffer->setRange(0, 0);
+
+    addTMMBN(buffer);
+
+    send(buffer, true /* isRTCP */);
+}
+
 void ARTPWriter::onRead(const sp<AMessage> &msg) {
     MediaBufferBase *mediaBuf;
     status_t err = mSource->read(&mediaBuf);
@@ -281,7 +552,16 @@
 
         if (mMode == H264) {
             StripStartcode(mediaBuf);
-            sendAVCData(mediaBuf);
+            SpsPpsParser(mediaBuf, &mSPSBuf, &mPPSBuf);
+            if (mediaBuf->range_length() > 0) {
+                sendAVCData(mediaBuf);
+            }
+        } else if (mMode == H265) {
+            StripStartcode(mediaBuf);
+            VpsSpsPpsParser(mediaBuf, &mVPSBuf, &mSPSBuf, &mPPSBuf);
+            if (mediaBuf->range_length() > 0) {
+                sendHEVCData(mediaBuf);
+            }
         } else if (mMode == H263) {
             sendH263Data(mediaBuf);
         } else if (mMode == AMR_NB || mMode == AMR_WB) {
@@ -309,12 +589,39 @@
 }
 
 void ARTPWriter::send(const sp<ABuffer> &buffer, bool isRTCP) {
-    ssize_t n = sendto(
-            mSocket, buffer->data(), buffer->size(), 0,
-            (const struct sockaddr *)(isRTCP ? &mRTCPAddr : &mRTPAddr),
-            sizeof(mRTCPAddr));
+    int sizeSockSt;
+    struct sockaddr *remAddr;
 
-    CHECK_EQ(n, (ssize_t)buffer->size());
+    if (mIsIPv6) {
+        sizeSockSt = sizeof(struct sockaddr_in6);
+        if (isRTCP)
+            remAddr = (struct sockaddr *)&mRTCPAddr6;
+        else
+            remAddr = (struct sockaddr *)&mRTPAddr6;
+    } else {
+        sizeSockSt = sizeof(struct sockaddr_in);
+        if (isRTCP)
+            remAddr = (struct sockaddr *)&mRTCPAddr;
+        else
+            remAddr = (struct sockaddr *)&mRTPAddr;
+    }
+
+    // Unseal code if moderator is needed (prevent overflow of instant bandwidth)
+    // Set limit bits per period through the moderator.
+    // ex) 6KByte/10ms = 48KBit/10ms = 4.8MBit/s instant limit
+    // ModerateInstantTraffic(10, 6 * 1024);
+
+    ssize_t n = sendto(isRTCP ? mRTCPSocket : mRTPSocket,
+            buffer->data(), buffer->size(), 0, remAddr, sizeSockSt);
+
+    if (n != (ssize_t)buffer->size()) {
+        ALOGW("packets can not be sent. ret=%d, buf=%d", (int)n, (int)buffer->size());
+    } else {
+        // Record current traffic & Print bits while last 1sec (1000ms)
+        mTrafficRec->writeBytes(buffer->size() +
+                (mIsIPv6 ? TCPIPV6_HEADER_SIZE : TCPIPV4_HEADER_SIZE));
+        mTrafficRec->printAccuBitsForLastPeriod(1000, 1000);
+    }
 
 #if LOG_TO_FILES
     int fd = isRTCP ? mRTCPFd : mRTPFd;
@@ -379,7 +686,6 @@
 
     data[offset++] = 1;  // CNAME
 
-    static const char *kCNAME = "someone@somewhere";
     data[offset++] = strlen(kCNAME);
 
     memcpy(&data[offset], kCNAME, strlen(kCNAME));
@@ -416,9 +722,55 @@
     buffer->setRange(buffer->offset(), buffer->size() + offset);
 }
 
+void ARTPWriter::addTMMBN(const sp<ABuffer> &buffer) {
+    if (buffer->size() + 20 > buffer->capacity()) {
+        ALOGW("RTCP buffer too small to accommodate SR.");
+        return;
+    }
+    if (mOpponentID == 0)
+        return;
+
+    uint8_t *data = buffer->data() + buffer->size();
+
+    data[0] = 0x80 | 4; // TMMBN
+    data[1] = 205;      // TSFB
+    data[2] = 0;
+    data[3] = 4;        // total (4+1) * sizeof(int32_t) = 20 bytes
+    data[4] = mSourceID >> 24;
+    data[5] = (mSourceID >> 16) & 0xff;
+    data[6] = (mSourceID >> 8) & 0xff;
+    data[7] = mSourceID & 0xff;
+
+    *(int32_t*)(&data[8]) = 0;  // 4 bytes blank
+
+    data[12] = mOpponentID >> 24;
+    data[13] = (mOpponentID >> 16) & 0xff;
+    data[14] = (mOpponentID >> 8) & 0xff;
+    data[15] = mOpponentID & 0xff;
+
+    // Find the first bit '1' from left & right side of the value.
+    int32_t leftEnd = 31 - __builtin_clz(mBitrate);
+    int32_t rightEnd = ffs(mBitrate) - 1;
+
+    // Mantissa have only 17bit space by RTCP specification.
+    if ((leftEnd - rightEnd) > 16) {
+        rightEnd = leftEnd - 16;
+    }
+    int32_t mantissa = mBitrate >> rightEnd;
+
+    data[16] = ((rightEnd << 2) & 0xfc) | ((mantissa & 0x18000) >> 15);
+    data[17] =                             (mantissa & 0x07f80) >> 7;
+    data[18] =                             (mantissa & 0x0007f) << 1;
+    data[19] = 40;              // 40 bytes overhead;
+
+    buffer->setRange(buffer->offset(), buffer->size() + 20);
+
+    ALOGI("UE -> Op Noti Tx bitrate : %d ", mantissa << rightEnd);
+}
+
 // static
 uint64_t ARTPWriter::GetNowNTP() {
-    uint64_t nowUs = ALooper::GetNowUs();
+    uint64_t nowUs = systemTime(SYSTEM_TIME_REALTIME) / 1000ll;
 
     nowUs += ((70LL * 365 + 17) * 24) * 60 * 60 * 1000000LL;
 
@@ -463,7 +815,7 @@
         sdp.append("m=audio ");
     }
 
-    sdp.append(AStringPrintf("%d", ntohs(mRTPAddr.sin_port)));
+    sdp.append(AStringPrintf("%d", mIsIPv6 ? ntohs(mRTPAddr6.sin6_port) : ntohs(mRTPAddr.sin_port)));
     sdp.append(
           " RTP/AVP " PT_STR "\r\n"
           "b=AS 320000\r\n"
@@ -569,24 +921,93 @@
     send(buffer, true /* isRTCP */);
 }
 
-void ARTPWriter::sendAVCData(MediaBufferBase *mediaBuf) {
+void ARTPWriter::sendSPSPPSIfIFrame(MediaBufferBase *mediaBuf, int64_t timeUs) {
+    CHECK(mediaBuf->range_length() > 0);
+    const uint8_t *mediaData =
+        (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
+
+    if ((mediaData[0] & H264_NALU_MASK) != H264_NALU_IFRAME) {
+        return;
+    }
+
+    if (mSPSBuf != NULL) {
+        mSPSBuf->meta_data().setInt64(kKeyTime, timeUs);
+        mSPSBuf->meta_data().setInt32(kKeySps, 1);
+        sendAVCData(mSPSBuf);
+    }
+
+    if (mPPSBuf != NULL) {
+        mPPSBuf->meta_data().setInt64(kKeyTime, timeUs);
+        mPPSBuf->meta_data().setInt32(kKeyPps, 1);
+        sendAVCData(mPPSBuf);
+    }
+}
+
+void ARTPWriter::sendVPSSPSPPSIfIFrame(MediaBufferBase *mediaBuf, int64_t timeUs) {
+    CHECK(mediaBuf->range_length() > 0);
+    const uint8_t *mediaData =
+        (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
+
+    int nalType = ((mediaData[0] >> 1) & H265_NALU_MASK);
+    if (!(nalType >= 16 && nalType <= 21) /*H265_NALU_IFRAME*/) {
+        return;
+    }
+
+    if (mVPSBuf != NULL) {
+        mVPSBuf->meta_data().setInt64(kKeyTime, timeUs);
+        mVPSBuf->meta_data().setInt32(kKeyVps, 1);
+        sendHEVCData(mVPSBuf);
+    }
+
+    if (mSPSBuf != NULL) {
+        mSPSBuf->meta_data().setInt64(kKeyTime, timeUs);
+        mSPSBuf->meta_data().setInt32(kKeySps, 1);
+        sendHEVCData(mSPSBuf);
+    }
+
+    if (mPPSBuf != NULL) {
+        mPPSBuf->meta_data().setInt64(kKeyTime, timeUs);
+        mPPSBuf->meta_data().setInt32(kKeyPps, 1);
+        sendHEVCData(mPPSBuf);
+    }
+}
+
+void ARTPWriter::sendHEVCData(MediaBufferBase *mediaBuf) {
     // 12 bytes RTP header + 2 bytes for the FU-indicator and FU-header.
     CHECK_GE(kMaxPacketSize, 12u + 2u);
 
     int64_t timeUs;
     CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
 
-    uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100LL);
+    sendVPSSPSPPSIfIFrame(mediaBuf, timeUs);
 
+    uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100ll);
+
+    CHECK(mediaBuf->range_length() > 0);
     const uint8_t *mediaData =
         (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
 
+    int32_t isNonVCL = 0;
+    if (mediaBuf->meta_data().findInt32(kKeyVps, &isNonVCL) ||
+            mediaBuf->meta_data().findInt32(kKeySps, &isNonVCL) ||
+            mediaBuf->meta_data().findInt32(kKeyPps, &isNonVCL)) {
+        isNonVCL = 1;
+    }
+
     sp<ABuffer> buffer = new ABuffer(kMaxPacketSize);
-    if (mediaBuf->range_length() + 12 <= buffer->capacity()) {
+    if (mediaBuf->range_length() + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE
+            + RTP_PAYLOAD_ROOM_SIZE <= buffer->capacity()) {
         // The data fits into a single packet
         uint8_t *data = buffer->data();
         data[0] = 0x80;
-        data[1] = (1 << 7) | PT;  // M-bit
+        if (mRTPCVOExtMap > 0) {
+            data[0] |= 0x10;
+        }
+        if (isNonVCL) {
+            data[1] = mPayloadType;  // Marker bit should not be set in case of Non-VCL
+        } else {
+            data[1] = (1 << 7) | mPayloadType;  // M-bit
+        }
         data[2] = (mSeqNo >> 8) & 0xff;
         data[3] = mSeqNo & 0xff;
         data[4] = rtpTime >> 24;
@@ -598,34 +1019,75 @@
         data[10] = (mSourceID >> 8) & 0xff;
         data[11] = mSourceID & 0xff;
 
-        memcpy(&data[12],
+        int rtpExtIndex = 0;
+        if (mRTPCVOExtMap > 0) {
+            /*
+                0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+               |       0xBE    |    0xDE       |           length=3            |
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+               |  ID   | L=0   |     data      |  ID   |  L=1  |   data...
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+                     ...data   |    0 (pad)    |    0 (pad)    |  ID   | L=3   |
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+               |                          data                                 |
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+
+              In the one-byte header form of extensions, the 16-bit value required
+              by the RTP specification for a header extension, labeled in the RTP
+              specification as "defined by profile", takes the fixed bit pattern
+              0xBEDE (the first version of this specification was written on the
+              feast day of the Venerable Bede).
+            */
+            data[12] = 0xBE;
+            data[13] = 0xDE;
+            // put a length of RTP Extension.
+            data[14] = 0x00;
+            data[15] = 0x01;
+            // put extmap of RTP assigned for CVO.
+            data[16] = (mRTPCVOExtMap << 4) | 0x0;
+            // put image degrees as per CVO specification.
+            data[17] = mRTPCVODegrees;
+            data[18] = 0x0;
+            data[19] = 0x0;
+            rtpExtIndex = 8;
+        }
+
+        memcpy(&data[12 + rtpExtIndex],
                mediaData, mediaBuf->range_length());
 
-        buffer->setRange(0, mediaBuf->range_length() + 12);
+        buffer->setRange(0, mediaBuf->range_length() + (12 + rtpExtIndex));
 
         send(buffer, false /* isRTCP */);
 
         ++mSeqNo;
         ++mNumRTPSent;
-        mNumRTPOctetsSent += buffer->size() - 12;
+        mNumRTPOctetsSent += buffer->size() - (12 + rtpExtIndex);
     } else {
         // FU-A
 
-        unsigned nalType = mediaData[0];
-        size_t offset = 1;
+        unsigned nalType = (mediaData[0] >> 1) & H265_NALU_MASK;
+        ALOGV("H265 nalType 0x%x, data[0]=0x%x", nalType, mediaData[0]);
+        size_t offset = 2; //H265 payload header is 16 bit.
 
         bool firstPacket = true;
         while (offset < mediaBuf->range_length()) {
             size_t size = mediaBuf->range_length() - offset;
             bool lastPacket = true;
-            if (size + 12 + 2 > buffer->capacity()) {
+            if (size + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE +
+                    RTP_FU_HEADER_SIZE + RTP_PAYLOAD_ROOM_SIZE > buffer->capacity()) {
                 lastPacket = false;
-                size = buffer->capacity() - 12 - 2;
+                size = buffer->capacity() - TCPIP_HEADER_SIZE - RTP_HEADER_SIZE -
+                    RTP_HEADER_EXT_SIZE - RTP_FU_HEADER_SIZE - RTP_PAYLOAD_ROOM_SIZE;
             }
 
             uint8_t *data = buffer->data();
             data[0] = 0x80;
-            data[1] = (lastPacket ? (1 << 7) : 0x00) | PT;  // M-bit
+            if (lastPacket && mRTPCVOExtMap > 0) {
+                data[0] |= 0x10;
+            }
+            data[1] = (lastPacket ? (1 << 7) : 0x00) | mPayloadType;  // M-bit
             data[2] = (mSeqNo >> 8) & 0xff;
             data[3] = mSeqNo & 0xff;
             data[4] = rtpTime >> 24;
@@ -637,24 +1099,245 @@
             data[10] = (mSourceID >> 8) & 0xff;
             data[11] = mSourceID & 0xff;
 
-            data[12] = 28 | (nalType & 0xe0);
+            int rtpExtIndex = 0;
+            if (lastPacket && mRTPCVOExtMap > 0) {
+                data[12] = 0xBE;
+                data[13] = 0xDE;
+                data[14] = 0x00;
+                data[15] = 0x01;
+                data[16] = (mRTPCVOExtMap << 4) | 0x0;
+                data[17] = mRTPCVODegrees;
+                data[18] = 0x0;
+                data[19] = 0x0;
+                rtpExtIndex = 8;
+            }
+
+            /*  H265 payload header is 16 bit
+                 0 1 2 3 4 5 6 7 0 1 2 3 4 5 6 7
+                +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+                |F|    Type   |  Layer ID | TID |
+                +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+            */
+            ALOGV("H265 payload header 0x%x %x", mediaData[0], mediaData[1]);
+            // excludes Type from 1st byte of H265 payload header.
+            data[12 + rtpExtIndex] = mediaData[0] & 0x81;
+            // fills Type as FU (49 == 0x31)
+            data[12 + rtpExtIndex] = data[12 + rtpExtIndex] | (0x31 << 1);
+            data[13 + rtpExtIndex] = mediaData[1];
+
+            ALOGV("H265 FU header 0x%x %x", data[12 + rtpExtIndex], data[13 + rtpExtIndex]);
 
             CHECK(!firstPacket || !lastPacket);
+            /*
+                FU INDICATOR HDR
+                 0 1 2 3 4 5 6 7
+                +-+-+-+-+-+-+-+-+
+                |S|E|   Type    |
+                +-+-+-+-+-+-+-+-+
+            */
 
-            data[13] =
+            data[14 + rtpExtIndex] =
                 (firstPacket ? 0x80 : 0x00)
                 | (lastPacket ? 0x40 : 0x00)
-                | (nalType & 0x1f);
+                | (nalType & H265_NALU_MASK);
+            ALOGV("H265 FU indicator 0x%x", data[14]);
 
-            memcpy(&data[14], &mediaData[offset], size);
+            memcpy(&data[15 + rtpExtIndex], &mediaData[offset], size);
 
-            buffer->setRange(0, 14 + size);
+            buffer->setRange(0, 15 + rtpExtIndex + size);
 
             send(buffer, false /* isRTCP */);
 
             ++mSeqNo;
             ++mNumRTPSent;
-            mNumRTPOctetsSent += buffer->size() - 12;
+            mNumRTPOctetsSent += buffer->size() - (12 + rtpExtIndex);
+
+            firstPacket = false;
+            offset += size;
+        }
+    }
+
+    mLastRTPTime = rtpTime;
+    mLastNTPTime = GetNowNTP();
+}
+
+void ARTPWriter::sendAVCData(MediaBufferBase *mediaBuf) {
+    // 12 bytes RTP header + 2 bytes for the FU-indicator and FU-header.
+    CHECK_GE(kMaxPacketSize, 12u + 2u);
+
+    int64_t timeUs;
+    CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
+
+    sendSPSPPSIfIFrame(mediaBuf, timeUs);
+
+    uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100LL);
+
+    CHECK(mediaBuf->range_length() > 0);
+    const uint8_t *mediaData =
+        (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
+
+    int32_t sps, pps;
+    bool isSpsPps = false;
+    if (mediaBuf->meta_data().findInt32(kKeySps, &sps) ||
+            mediaBuf->meta_data().findInt32(kKeyPps, &pps)) {
+        isSpsPps = true;
+    }
+
+    mTrafficRec->updateClock(ALooper::GetNowUs() / 1000);
+    sp<ABuffer> buffer = new ABuffer(kMaxPacketSize);
+    if (mediaBuf->range_length() + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE
+            + RTP_PAYLOAD_ROOM_SIZE <= buffer->capacity()) {
+        // The data fits into a single packet
+        uint8_t *data = buffer->data();
+        data[0] = 0x80;
+        if (mRTPCVOExtMap > 0) {
+            data[0] |= 0x10;
+        }
+        if (isSpsPps) {
+            data[1] = mPayloadType;  // Marker bit should not be set in case of sps/pps
+        } else {
+            data[1] = (1 << 7) | mPayloadType;
+        }
+        data[2] = (mSeqNo >> 8) & 0xff;
+        data[3] = mSeqNo & 0xff;
+        data[4] = rtpTime >> 24;
+        data[5] = (rtpTime >> 16) & 0xff;
+        data[6] = (rtpTime >> 8) & 0xff;
+        data[7] = rtpTime & 0xff;
+        data[8] = mSourceID >> 24;
+        data[9] = (mSourceID >> 16) & 0xff;
+        data[10] = (mSourceID >> 8) & 0xff;
+        data[11] = mSourceID & 0xff;
+
+        int rtpExtIndex = 0;
+        if (mRTPCVOExtMap > 0) {
+            /*
+                0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+               |       0xBE    |    0xDE       |           length=3            |
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+               |  ID   | L=0   |     data      |  ID   |  L=1  |   data...
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+                     ...data   |    0 (pad)    |    0 (pad)    |  ID   | L=3   |
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+               |                          data                                 |
+               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+
+              In the one-byte header form of extensions, the 16-bit value required
+              by the RTP specification for a header extension, labeled in the RTP
+              specification as "defined by profile", takes the fixed bit pattern
+              0xBEDE (the first version of this specification was written on the
+              feast day of the Venerable Bede).
+            */
+            data[12] = 0xBE;
+            data[13] = 0xDE;
+            // put a length of RTP Extension.
+            data[14] = 0x00;
+            data[15] = 0x01;
+            // put extmap of RTP assigned for CVO.
+            data[16] = (mRTPCVOExtMap << 4) | 0x0;
+            // put image degrees as per CVO specification.
+            data[17] = mRTPCVODegrees;
+            data[18] = 0x0;
+            data[19] = 0x0;
+            rtpExtIndex = 8;
+        }
+
+        memcpy(&data[12 + rtpExtIndex],
+               mediaData, mediaBuf->range_length());
+
+        buffer->setRange(0, mediaBuf->range_length() + (12 + rtpExtIndex));
+
+        send(buffer, false /* isRTCP */);
+
+        ++mSeqNo;
+        ++mNumRTPSent;
+        mNumRTPOctetsSent += buffer->size() - (12 + rtpExtIndex);
+    } else {
+        // FU-A
+
+        unsigned nalType = mediaData[0] & H264_NALU_MASK;
+        ALOGV("H264 nalType 0x%x, data[0]=0x%x", nalType, mediaData[0]);
+        size_t offset = 1;
+
+        bool firstPacket = true;
+        while (offset < mediaBuf->range_length()) {
+            size_t size = mediaBuf->range_length() - offset;
+            bool lastPacket = true;
+            if (size + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE +
+                    RTP_FU_HEADER_SIZE + RTP_PAYLOAD_ROOM_SIZE > buffer->capacity()) {
+                lastPacket = false;
+                size = buffer->capacity() - TCPIP_HEADER_SIZE - RTP_HEADER_SIZE -
+                    RTP_HEADER_EXT_SIZE - RTP_FU_HEADER_SIZE - RTP_PAYLOAD_ROOM_SIZE;
+            }
+
+            uint8_t *data = buffer->data();
+            data[0] = 0x80;
+            if (lastPacket && mRTPCVOExtMap > 0) {
+                data[0] |= 0x10;
+            }
+            data[1] = (lastPacket ? (1 << 7) : 0x00) | mPayloadType;  // M-bit
+            data[2] = (mSeqNo >> 8) & 0xff;
+            data[3] = mSeqNo & 0xff;
+            data[4] = rtpTime >> 24;
+            data[5] = (rtpTime >> 16) & 0xff;
+            data[6] = (rtpTime >> 8) & 0xff;
+            data[7] = rtpTime & 0xff;
+            data[8] = mSourceID >> 24;
+            data[9] = (mSourceID >> 16) & 0xff;
+            data[10] = (mSourceID >> 8) & 0xff;
+            data[11] = mSourceID & 0xff;
+
+            int rtpExtIndex = 0;
+            if (lastPacket && mRTPCVOExtMap > 0) {
+                data[12] = 0xBE;
+                data[13] = 0xDE;
+                data[14] = 0x00;
+                data[15] = 0x01;
+                data[16] = (mRTPCVOExtMap << 4) | 0x0;
+                data[17] = mRTPCVODegrees;
+                data[18] = 0x0;
+                data[19] = 0x0;
+                rtpExtIndex = 8;
+            }
+
+            /*  H264 payload header is 8 bit
+                 0 1 2 3 4 5 6 7
+                +-+-+-+-+-+-+-+-+
+                |F|NRI|  Type   |
+                +-+-+-+-+-+-+-+-+
+            */
+            ALOGV("H264 payload header 0x%x", mediaData[0]);
+            // excludes Type from 1st byte of H264 payload header.
+            data[12 + rtpExtIndex] = mediaData[0] & 0xe0;
+            // fills Type as FU (28 == 0x1C)
+            data[12 + rtpExtIndex] = data[12 + rtpExtIndex] | 0x1C;
+
+            CHECK(!firstPacket || !lastPacket);
+            /*
+                FU header
+                 0 1 2 3 4 5 6 7
+                +-+-+-+-+-+-+-+-+
+                |S|E|R|  Type   |
+                +-+-+-+-+-+-+-+-+
+            */
+
+            data[13 + rtpExtIndex] =
+                (firstPacket ? 0x80 : 0x00)
+                | (lastPacket ? 0x40 : 0x00)
+                | (nalType & H264_NALU_MASK);
+            ALOGV("H264 FU header 0x%x", data[13]);
+
+            memcpy(&data[14 + rtpExtIndex], &mediaData[offset], size);
+
+            buffer->setRange(0, 14 + rtpExtIndex + size);
+
+            send(buffer, false /* isRTCP */);
+
+            ++mSeqNo;
+            ++mNumRTPSent;
+            mNumRTPOctetsSent += buffer->size() - (12 + rtpExtIndex);
 
             firstPacket = false;
             offset += size;
@@ -696,7 +1379,7 @@
 
         uint8_t *data = buffer->data();
         data[0] = 0x80;
-        data[1] = (lastPacket ? 0x80 : 0x00) | PT;  // M-bit
+        data[1] = (lastPacket ? 0x80 : 0x00) | mPayloadType;  // M-bit
         data[2] = (mSeqNo >> 8) & 0xff;
         data[3] = mSeqNo & 0xff;
         data[4] = rtpTime >> 24;
@@ -727,6 +1410,58 @@
     mLastNTPTime = GetNowNTP();
 }
 
+void ARTPWriter::updateCVODegrees(int32_t cvoDegrees) {
+    Mutex::Autolock autoLock(mLock);
+    mRTPCVODegrees = cvoDegrees;
+}
+
+void ARTPWriter::updatePayloadType(int32_t payloadType) {
+    Mutex::Autolock autoLock(mLock);
+    mPayloadType = payloadType;
+}
+
+void ARTPWriter::updateSocketDscp(int32_t dscp) {
+    mRtpLayer3Dscp = dscp << 2;
+
+    /* mRtpLayer3Dscp will be mapped to WMM(Wifi) as per operator's requirement */
+    if (setsockopt(mRTPSocket, IPPROTO_IP, IP_TOS,
+                (int *)&mRtpLayer3Dscp, sizeof(mRtpLayer3Dscp)) < 0) {
+        ALOGE("failed to set dscp on rtpsock. err=%s", strerror(errno));
+    } else {
+        ALOGD("successfully set dscp on rtpsock. opt=%d", mRtpLayer3Dscp);
+        setsockopt(mRTCPSocket, IPPROTO_IP, IP_TOS,
+                (int *)&mRtpLayer3Dscp, sizeof(mRtpLayer3Dscp));
+        ALOGD("successfully set dscp on rtcpsock. opt=%d", mRtpLayer3Dscp);
+    }
+}
+
+void ARTPWriter::updateSocketNetwork(int64_t socketNetwork) {
+    mRTPSockNetwork = (net_handle_t)socketNetwork;
+    ALOGI("trying to bind rtp socket(%d) to network(%llu).",
+                mRTPSocket, (unsigned long long)mRTPSockNetwork);
+
+    int result = android_setsocknetwork(mRTPSockNetwork, mRTPSocket);
+    if (result != 0) {
+        ALOGW("failed(%d) to bind rtp socket(%d) to network(%llu)",
+                result, mRTPSocket, (unsigned long long)mRTPSockNetwork);
+    }
+    result = android_setsocknetwork(mRTPSockNetwork, mRTCPSocket);
+    if (result != 0) {
+        ALOGW("failed(%d) to bind rtcp socket(%d) to network(%llu)",
+                result, mRTCPSocket, (unsigned long long)mRTPSockNetwork);
+    }
+    ALOGI("done. bind rtp socket(%d) to network(%llu)",
+                mRTPSocket, (unsigned long long)mRTPSockNetwork);
+}
+
+uint32_t ARTPWriter::getSequenceNum() {
+    return mSeqNo;
+}
+
+uint64_t ARTPWriter::getAccumulativeBytes() {
+    return mTrafficRec->readBytesForTotal();
+}
+
 static size_t getFrameSize(bool isWide, unsigned FT) {
     static const size_t kFrameSizeNB[8] = {
         95, 103, 118, 134, 148, 159, 204, 244
@@ -778,7 +1513,7 @@
     // The data fits into a single packet
     uint8_t *data = buffer->data();
     data[0] = 0x80;
-    data[1] = PT;
+    data[1] = mPayloadType;
     if (mNumRTPSent == 0) {
         // Signal start of talk-spurt.
         data[1] |= 0x80;  // M-bit
@@ -834,5 +1569,91 @@
     mLastNTPTime = GetNowNTP();
 }
 
-}  // namespace android
+void ARTPWriter::makeSocketPairAndBind(String8& localIp, int localPort,
+        String8& remoteIp, int remotePort) {
+    static char kSomeone[16] = "someone@";
+    int nameLength = strlen(kSomeone);
+    memcpy(kCNAME, kSomeone, nameLength);
+    memcpy(kCNAME + nameLength, localIp.c_str(), localIp.length() + 1);
 
+    if (localIp.contains(":"))
+        mIsIPv6 = true;
+    else
+        mIsIPv6 = false;
+
+    mRTPSocket = socket(mIsIPv6 ? AF_INET6 : AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(mRTPSocket, 0);
+    mRTCPSocket = socket(mIsIPv6 ? AF_INET6 : AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(mRTCPSocket, 0);
+
+    int sockopt = 1;
+    setsockopt(mRTPSocket, SOL_SOCKET, SO_REUSEADDR, (int *)&sockopt, sizeof(sockopt));
+    setsockopt(mRTCPSocket, SOL_SOCKET, SO_REUSEADDR, (int *)&sockopt, sizeof(sockopt));
+
+    if (mIsIPv6) {
+        memset(&mLocalAddr6, 0, sizeof(mLocalAddr6));
+        memset(&mRTPAddr6, 0, sizeof(mRTPAddr6));
+        memset(&mRTCPAddr6, 0, sizeof(mRTCPAddr6));
+
+        mLocalAddr6.sin6_family = AF_INET6;
+        inet_pton(AF_INET6, localIp.string(), &mLocalAddr6.sin6_addr);
+        mLocalAddr6.sin6_port = htons((uint16_t)localPort);
+
+        mRTPAddr6.sin6_family = AF_INET6;
+        inet_pton(AF_INET6, remoteIp.string(), &mRTPAddr6.sin6_addr);
+        mRTPAddr6.sin6_port = htons((uint16_t)remotePort);
+
+        mRTCPAddr6 = mRTPAddr6;
+        mRTCPAddr6.sin6_port = htons((uint16_t)(remotePort + 1));
+    } else {
+        memset(&mLocalAddr, 0, sizeof(mLocalAddr));
+        memset(&mRTPAddr, 0, sizeof(mRTPAddr));
+        memset(&mRTCPAddr, 0, sizeof(mRTCPAddr));
+
+        mLocalAddr.sin_family = AF_INET;
+        mLocalAddr.sin_addr.s_addr = inet_addr(localIp.string());
+        mLocalAddr.sin_port = htons((uint16_t)localPort);
+
+        mRTPAddr.sin_family = AF_INET;
+        mRTPAddr.sin_addr.s_addr = inet_addr(remoteIp.string());
+        mRTPAddr.sin_port = htons((uint16_t)remotePort);
+
+        mRTCPAddr = mRTPAddr;
+        mRTCPAddr.sin_port = htons((uint16_t)(remotePort + 1));
+    }
+
+    struct sockaddr *localAddr = mIsIPv6 ?
+        (struct sockaddr*)&mLocalAddr6 : (struct sockaddr*)&mLocalAddr;
+
+    int sizeSockSt = mIsIPv6 ? sizeof(mLocalAddr6) : sizeof(mLocalAddr);
+
+    if (bind(mRTPSocket, localAddr, sizeSockSt) == -1) {
+        ALOGE("failed to bind rtp %s:%d err=%s", localIp.string(), localPort, strerror(errno));
+    } else {
+        ALOGD("succeed to bind rtp %s:%d", localIp.string(), localPort);
+    }
+
+    if (mIsIPv6)
+        mLocalAddr6.sin6_port = htons((uint16_t)(localPort + 1));
+    else
+        mLocalAddr.sin_port = htons((uint16_t)(localPort + 1));
+
+    if (bind(mRTCPSocket, localAddr, sizeSockSt) == -1) {
+        ALOGE("failed to bind rtcp %s:%d err=%s", localIp.string(), localPort + 1, strerror(errno));
+    } else {
+        ALOGD("succeed to bind rtcp %s:%d", localIp.string(), localPort + 1);
+    }
+}
+
+// TODO : Develop more advanced moderator based on AS & TMMBR value
+void ARTPWriter::ModerateInstantTraffic(uint32_t samplePeriod, uint32_t limitBytes) {
+    unsigned int bytes =  mTrafficRec->readBytesForLastPeriod(samplePeriod);
+    if (bytes > limitBytes) {
+        ALOGI("Nuclear moderator. #seq = %d \t\t %d bits / 10ms",
+              mSeqNo, bytes * 8);
+        usleep(4000);
+        mTrafficRec->updateClock(ALooper::GetNowUs() / 1000);
+    }
+}
+
+}  // namespace android
diff --git a/media/libstagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/ARTPWriter.h
index 2f13486..28d6ec5 100644
--- a/media/libstagefright/rtsp/ARTPWriter.h
+++ b/media/libstagefright/rtsp/ARTPWriter.h
@@ -27,6 +27,9 @@
 #include <arpa/inet.h>
 #include <sys/socket.h>
 
+#include <android/multinetwork.h>
+#include "TrafficRecorder.h"
+
 #define LOG_TO_FILES    0
 
 namespace android {
@@ -36,14 +39,24 @@
 
 struct ARTPWriter : public MediaWriter {
     explicit ARTPWriter(int fd);
+    explicit ARTPWriter(int fd, String8& localIp, int localPort,
+                                String8& remoteIp, int remotePort,
+                                uint32_t seqNo);
 
     virtual status_t addSource(const sp<MediaSource> &source);
     virtual bool reachedEOS();
     virtual status_t start(MetaData *params);
     virtual status_t stop();
     virtual status_t pause();
+    void updateCVODegrees(int32_t cvoDegrees);
+    void updatePayloadType(int32_t payloadType);
+    void updateSocketDscp(int32_t dscp);
+    void updateSocketNetwork(int64_t socketNetwork);
+    uint32_t getSequenceNum();
+    virtual uint64_t getAccumulativeBytes() override;
 
     virtual void onMessageReceived(const sp<AMessage> &msg);
+    virtual void setTMMBNInfo(uint32_t opponentID, uint32_t bitrate);
 
 protected:
     virtual ~ARTPWriter();
@@ -76,15 +89,27 @@
     sp<ALooper> mLooper;
     sp<AHandlerReflector<ARTPWriter> > mReflector;
 
-    int mSocket;
+    bool mIsIPv6;
+    int mRTPSocket, mRTCPSocket;
+    struct sockaddr_in mLocalAddr;
     struct sockaddr_in mRTPAddr;
     struct sockaddr_in mRTCPAddr;
+    struct sockaddr_in6 mLocalAddr6;
+    struct sockaddr_in6 mRTPAddr6;
+    struct sockaddr_in6 mRTCPAddr6;
+    int32_t mRtpLayer3Dscp;
+    net_handle_t mRTPSockNetwork;
 
     AString mProfileLevel;
     AString mSeqParamSet;
     AString mPicParamSet;
 
+    MediaBufferBase *mVPSBuf;
+    MediaBufferBase *mSPSBuf;
+    MediaBufferBase *mPPSBuf;
+
     uint32_t mSourceID;
+    uint32_t mPayloadType;
     uint32_t mSeqNo;
     uint32_t mRTPTimeBase;
     uint32_t mNumRTPSent;
@@ -92,10 +117,18 @@
     uint32_t mLastRTPTime;
     uint64_t mLastNTPTime;
 
+    uint32_t mOpponentID;
+    uint32_t mBitrate;
+    typedef uint64_t Bytes;
+    sp<TrafficRecorder<uint32_t /* Time */, Bytes> > mTrafficRec;
+
     int32_t mNumSRsSent;
+    int32_t mRTPCVOExtMap;
+    int32_t mRTPCVODegrees;
 
     enum {
         INVALID,
+        H265,
         H264,
         H263,
         AMR_NB,
@@ -104,22 +137,29 @@
 
     static uint64_t GetNowNTP();
 
+    void initState();
     void onRead(const sp<AMessage> &msg);
     void onSendSR(const sp<AMessage> &msg);
 
     void addSR(const sp<ABuffer> &buffer);
     void addSDES(const sp<ABuffer> &buffer);
+    void addTMMBN(const sp<ABuffer> &buffer);
 
     void makeH264SPropParamSets(MediaBufferBase *buffer);
     void dumpSessionDesc();
 
     void sendBye();
+    void sendVPSSPSPPSIfIFrame(MediaBufferBase *mediaBuf, int64_t timeUs);
+    void sendSPSPPSIfIFrame(MediaBufferBase *mediaBuf, int64_t timeUs);
+    void sendHEVCData(MediaBufferBase *mediaBuf);
     void sendAVCData(MediaBufferBase *mediaBuf);
     void sendH263Data(MediaBufferBase *mediaBuf);
     void sendAMRData(MediaBufferBase *mediaBuf);
 
     void send(const sp<ABuffer> &buffer, bool isRTCP);
+    void makeSocketPairAndBind(String8& localIp, int localPort, String8& remoteIp, int remotePort);
 
+    void ModerateInstantTraffic(uint32_t samplePeriod, uint32_t limitBytes);
     DISALLOW_EVIL_CONSTRUCTORS(ARTPWriter);
 };
 
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index bb66f4c..c33bf3f 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -329,6 +329,7 @@
     mPass.clear();
     mAuthType = NONE;
     mNonce.clear();
+    mRealm.clear();
 
     mState = DISCONNECTED;
 }
@@ -911,6 +912,14 @@
         CHECK_GE(j, 0);
 
         mNonce.setTo(value, i + 7, j - i - 7);
+
+        i = value.find("realm=");
+        CHECK_GE(i, 0);
+        CHECK_EQ(value.c_str()[i + 6], '\"');
+        j = value.find("\"", i + 7);
+        CHECK_GE(j, 0);
+
+        mRealm.setTo(value, i + 7, j - i - 7);
     }
 
     return true;
@@ -993,7 +1002,7 @@
     AString A1;
     A1.append(mUser);
     A1.append(":");
-    A1.append("Streaming Server");
+    A1.append(mRealm);
     A1.append(":");
     A1.append(mPass);
 
@@ -1029,6 +1038,9 @@
     fragment.append("\", ");
     fragment.append("response=\"");
     fragment.append(digest);
+    fragment.append("\", ");
+    fragment.append("realm=\"");
+    fragment.append(mRealm);
     fragment.append("\"");
     fragment.append("\r\n");
 
diff --git a/media/libstagefright/rtsp/ARTSPConnection.h b/media/libstagefright/rtsp/ARTSPConnection.h
index 56b604d..7cdd4c0 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.h
+++ b/media/libstagefright/rtsp/ARTSPConnection.h
@@ -84,6 +84,7 @@
     AString mUser, mPass;
     AuthType mAuthType;
     AString mNonce;
+    AString mRealm;
     int mSocket;
     int32_t mConnectionID;
     int32_t mNextCSeq;
diff --git a/media/libstagefright/rtsp/ASessionDescription.cpp b/media/libstagefright/rtsp/ASessionDescription.cpp
index 2b42040..5b5b4b1 100644
--- a/media/libstagefright/rtsp/ASessionDescription.cpp
+++ b/media/libstagefright/rtsp/ASessionDescription.cpp
@@ -27,6 +27,8 @@
 
 namespace android {
 
+constexpr unsigned kDefaultAs = 960; // kbps?
+
 ASessionDescription::ASessionDescription()
     : mIsValid(false) {
 }
@@ -103,7 +105,7 @@
                     key.setTo(line, 0, colonPos);
 
                     if (key == "a=fmtp" || key == "a=rtpmap"
-                            || key == "a=framesize") {
+                            || key == "a=framesize" || key == "a=extmap") {
                         ssize_t spacePos = line.find(" ", colonPos + 1);
                         if (spacePos < 0) {
                             return false;
@@ -201,6 +203,33 @@
     return true;
 }
 
+bool ASessionDescription::getCvoExtMap(
+        size_t index, int32_t *cvoExtMap) const {
+    CHECK_GE(index, 0u);
+    CHECK_LT(index, mTracks.size());
+
+    AString key, value;
+    *cvoExtMap = 0;
+
+    const Attribs &track = mTracks.itemAt(index);
+    for (size_t i = 0; i < track.size(); i++) {
+        value = track.valueAt(i);
+        if (value.size() > 0 && strcmp(value.c_str(), "urn:3gpp:video-orientation") == 0) {
+            key = track.keyAt(i);
+            break;
+        }
+    }
+
+    if (key.size() > 0) {
+        const char *colonPos = strrchr(key.c_str(), ':');
+        colonPos++;
+        *cvoExtMap = atoi(colonPos);
+        return true;
+    }
+
+    return false;
+}
+
 void ASessionDescription::getFormatType(
         size_t index, unsigned long *PT,
         AString *desc, AString *params) const {
@@ -345,5 +374,74 @@
     return *npt2 > *npt1;
 }
 
+// static
+void ASessionDescription::SDPStringFactory(AString &sdp,
+        const char *ip, bool isAudio, unsigned port, unsigned payloadType,
+        unsigned as, const char *codec, const char *fmtp,
+        int32_t width, int32_t height, int32_t cvoExtMap)
+{
+    bool isIPv4 = (AString(ip).find("::") == -1) ? true : false;
+    sdp.clear();
+    sdp.append("v=0\r\n");
+
+    sdp.append("a=range:npt=now-\r\n");
+
+    sdp.append("m=");
+    sdp.append(isAudio ? "audio " : "video ");
+    sdp.append(port);
+    sdp.append(" RTP/AVP ");
+    sdp.append(payloadType);
+    sdp.append("\r\n");
+
+    sdp.append("c= IN IP");
+    if (isIPv4) {
+        sdp.append("4 ");
+    } else {
+        sdp.append("6 ");
+    }
+    sdp.append(ip);
+    sdp.append("\r\n");
+
+    sdp.append("b=AS:");
+    sdp.append(as > 0 ? as : kDefaultAs);
+    sdp.append("\r\n");
+
+    sdp.append("a=rtpmap:");
+    sdp.append(payloadType);
+    sdp.append(" ");
+    sdp.append(codec);
+    sdp.append("/");
+    sdp.append(isAudio ? "8000" : "90000");
+    sdp.append("\r\n");
+
+    if (fmtp != NULL) {
+        sdp.append("a=fmtp:");
+        sdp.append(payloadType);
+        sdp.append(" ");
+        sdp.append(fmtp);
+        sdp.append("\r\n");
+    }
+
+    if (!isAudio && width > 0 && height > 0) {
+        sdp.append("a=framesize:");
+        sdp.append(payloadType);
+        sdp.append(" ");
+        sdp.append(width);
+        sdp.append("-");
+        sdp.append(height);
+        sdp.append("\r\n");
+    }
+
+    if (cvoExtMap > 0) {
+        sdp.append("a=extmap:");
+        sdp.append(cvoExtMap);
+        sdp.append(" ");
+        sdp.append("urn:3gpp:video-orientation");
+        sdp.append("\r\n");
+    }
+
+    ALOGV("SDPStringFactory => %s", sdp.c_str());
+}
+
 }  // namespace android
 
diff --git a/media/libstagefright/rtsp/ASessionDescription.h b/media/libstagefright/rtsp/ASessionDescription.h
index b462983..91f5442 100644
--- a/media/libstagefright/rtsp/ASessionDescription.h
+++ b/media/libstagefright/rtsp/ASessionDescription.h
@@ -40,6 +40,8 @@
     size_t countTracks() const;
     void getFormat(size_t index, AString *value) const;
 
+    bool getCvoExtMap(size_t index, int32_t *cvoExtMap) const;
+
     void getFormatType(
             size_t index, unsigned long *PT,
             AString *desc, AString *params) const;
@@ -63,6 +65,9 @@
     // i.e. we have a fixed duration, otherwise this is live streaming.
     static bool parseNTPRange(const char *s, float *npt1, float *npt2);
 
+    static void SDPStringFactory(AString &sdp, const char *ip, bool isAudio, unsigned port,
+        unsigned payloadType, unsigned as, const char *codec, const char *fmtp = NULL,
+        int32_t width = 0, int32_t height = 0, int32_t cvoExtMap = 0);
 protected:
     virtual ~ASessionDescription();
 
diff --git a/media/libstagefright/rtsp/Android.bp b/media/libstagefright/rtsp/Android.bp
index a5a895e..34d1788 100644
--- a/media/libstagefright/rtsp/Android.bp
+++ b/media/libstagefright/rtsp/Android.bp
@@ -1,9 +1,29 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_rtsp_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_rtsp_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_defaults {
     name: "libstagefright_rtsp_defaults",
 
     srcs: [
         "AAMRAssembler.cpp",
         "AAVCAssembler.cpp",
+        "AHEVCAssembler.cpp",
         "AH263Assembler.cpp",
         "AMPEG2TSAssembler.cpp",
         "AMPEG4AudioAssembler.cpp",
@@ -16,10 +36,12 @@
         "ARTPWriter.cpp",
         "ARTSPConnection.cpp",
         "ASessionDescription.cpp",
+        "JitterCalculator.cpp",
         "SDPLoader.cpp",
     ],
 
     shared_libs: [
+        "libandroid_net",
         "libcrypto",
         "libdatasource",
         "libmedia",
@@ -28,6 +50,7 @@
     include_dirs: [
         "frameworks/av/media/libstagefright",
         "frameworks/native/include/media/openmax",
+        "frameworks/native/include/android",
     ],
 
     arch: {
diff --git a/media/libstagefright/rtsp/JitterCalculator.cpp b/media/libstagefright/rtsp/JitterCalculator.cpp
new file mode 100644
index 0000000..93b5a83
--- /dev/null
+++ b/media/libstagefright/rtsp/JitterCalculator.cpp
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "JitterCalc"
+#include <utils/Log.h>
+
+#include "JitterCalculator.h"
+
+#include <stdlib.h>
+
+namespace android {
+
+JitterCalc::JitterCalc(int32_t clockRate)
+    : mClockRate(clockRate) {
+    init(0, 0, 0, 0);
+}
+
+void JitterCalc::init(uint32_t rtpTime, int64_t arrivalTimeUs, int32_t base, int32_t inter) {
+    mFirstTimeStamp = rtpTime;
+    mLastTimeStamp = rtpTime;
+    mFirstArrivalTimeUs = arrivalTimeUs;
+    mLastArrivalTimeUs = arrivalTimeUs;
+
+    mBaseJitterUs = base;
+    mInterArrivalJitterUs = inter;
+}
+
+void JitterCalc::putBaseData(int64_t rtpTime, int64_t arrivalTimeUs) {
+    // A RTP time wraps around after UINT32_MAX. We must consider this case.
+    const int64_t UINT32_MSB = 0x80000000;
+    int64_t overflowMask = (mFirstTimeStamp & UINT32_MSB & ~rtpTime) << 1;
+    int64_t tempRtpTime = overflowMask | rtpTime;
+
+    // Base jitter implementation can be various
+    int64_t scheduledTimeUs = (tempRtpTime - (int64_t)mFirstTimeStamp) * 1000000ll / mClockRate;
+    int64_t elapsedTimeUs = arrivalTimeUs - mFirstArrivalTimeUs;
+    int64_t correctionTimeUs = elapsedTimeUs - scheduledTimeUs; // additional propagation delay;
+    mBaseJitterUs = (mBaseJitterUs * 15 + correctionTimeUs) / 16;
+    ALOGV("BaseJitterUs : %lld \t\t correctionTimeUs : %lld",
+            (long long)mBaseJitterUs, (long long)correctionTimeUs);
+}
+
+void JitterCalc::putInterArrivalData(int64_t rtpTime, int64_t arrivalTimeUs) {
+    const int64_t UINT32_MSB = 0x80000000;
+    int64_t tempRtpTime = rtpTime;
+    int64_t tempLastTimeStamp = mLastTimeStamp;
+
+    // A RTP time wraps around after UINT32_MAX. We must consider this case.
+    int64_t overflowMask = (mLastTimeStamp ^ rtpTime) & UINT32_MSB;
+    tempRtpTime |= ((overflowMask & ~rtpTime) << 1);
+    tempLastTimeStamp |= ((overflowMask & ~mLastTimeStamp) << 1);
+
+    // 6.4.1 of RFC3550 defines this interarrival jitter value.
+    int64_t diffTimeStampUs = abs(tempRtpTime - tempLastTimeStamp) * 1000000ll / mClockRate;
+    int64_t diffArrivalUs = arrivalTimeUs - mLastArrivalTimeUs; // Can't be minus
+    ALOGV("diffTimeStampUs %lld \t\t diffArrivalUs %lld",
+            (long long)diffTimeStampUs, (long long)diffArrivalUs);
+
+    int64_t varianceUs = diffArrivalUs - diffTimeStampUs;
+    mInterArrivalJitterUs = (mInterArrivalJitterUs * 15 + abs(varianceUs)) / 16;
+
+    mLastTimeStamp = (uint32_t)rtpTime;
+    mLastArrivalTimeUs = arrivalTimeUs;
+}
+
+int32_t JitterCalc::getBaseJitterMs() {
+    return mBaseJitterUs / 1000;
+}
+
+int32_t JitterCalc::getInterArrivalJitterMs() {
+    return mInterArrivalJitterUs / 1000;
+}
+
+}   // namespace android
+
diff --git a/media/libstagefright/rtsp/JitterCalculator.h b/media/libstagefright/rtsp/JitterCalculator.h
new file mode 100644
index 0000000..ff36f1f
--- /dev/null
+++ b/media/libstagefright/rtsp/JitterCalculator.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef A_JITTER_CALCULATOR_H_
+
+#define A_JITTER_CALCULATOR_H_
+
+#include <stdint.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class JitterCalc : public RefBase {
+private:
+    // Time Stamp per Second
+    const int32_t mClockRate;
+
+    uint32_t mFirstTimeStamp;
+    uint32_t mLastTimeStamp;
+    int64_t mFirstArrivalTimeUs;
+    int64_t mLastArrivalTimeUs;
+
+    int32_t mBaseJitterUs;
+    int32_t mInterArrivalJitterUs;
+
+public:
+    JitterCalc(int32_t clockRate);
+
+    void init(uint32_t rtpTime, int64_t arrivalTimeUs, int32_t base, int32_t inter);
+    void putInterArrivalData(int64_t rtpTime, int64_t arrivalTime);
+    void putBaseData(int64_t rtpTime, int64_t arrivalTimeUs);
+    int32_t getBaseJitterMs();
+    int32_t getInterArrivalJitterMs();
+};
+
+}   // namespace android
+
+#endif  // A_JITTER_CALCULATOR_H_
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 7f025a5..988cec7 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -74,7 +74,8 @@
 
 // The allowed maximum number of stale access units at the beginning of
 // a new sequence.
-static int32_t kMaxAllowedStaleAccessUnits = 20;
+static int32_t kMaxAllowedStaleAudioAccessUnits = 20;
+static int32_t kMaxAllowedStaleVideoAccessUnits = 400;
 
 static int64_t kTearDownTimeoutUs = 3000000ll;
 
@@ -108,6 +109,10 @@
     }
 }
 
+static int32_t GetMaxAllowedStaleCount(bool isVideo) {
+    return isVideo ? kMaxAllowedStaleVideoAccessUnits : kMaxAllowedStaleAudioAccessUnits;
+}
+
 struct MyHandler : public AHandler {
     enum {
         kWhatConnected                  = 'conn',
@@ -1032,6 +1037,11 @@
                     break;
                 }
 
+                int32_t rtcpEvent;
+                if (msg->findInt32("rtcp-event", &rtcpEvent)) {
+                    break;
+                }
+
                 ++mNumAccessUnitsReceived;
                 postAccessUnitTimeoutCheck();
 
@@ -1325,6 +1335,8 @@
 
                         ALOGV("rtp-info: %s", response->mHeaders.valueAt(i).c_str());
 
+                        mRTPConn->seekStream();
+
                         ALOGI("seek completed.");
                     }
                 }
@@ -1509,7 +1521,7 @@
             TrackInfo *info = &mTracks.editItemAt(trackIndex);
             info->mFirstSeqNumInSegment = seq;
             info->mNewSegment = true;
-            info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
+            info->mAllowedStaleAccessUnits = GetMaxAllowedStaleCount(info->mIsVideo);
 
             CHECK(GetAttribute((*it).c_str(), "rtptime", &val));
 
@@ -1551,6 +1563,7 @@
         int mRTPSocket;
         int mRTCPSocket;
         bool mUsingInterleavedTCP;
+        bool mIsVideo;
         uint32_t mFirstSeqNumInSegment;
         bool mNewSegment;
         int32_t mAllowedStaleAccessUnits;
@@ -1635,9 +1648,10 @@
         info->mURL = trackURL;
         info->mPacketSource = source;
         info->mUsingInterleavedTCP = false;
+        info->mIsVideo = source->isVideo();
         info->mFirstSeqNumInSegment = 0;
         info->mNewSegment = true;
-        info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
+        info->mAllowedStaleAccessUnits = GetMaxAllowedStaleCount(info->mIsVideo);
         info->mRTPSocket = -1;
         info->mRTCPSocket = -1;
         info->mRTPAnchor = 0;
@@ -1833,11 +1847,12 @@
                 // by ARTPSource. Only the low 16 bits of seq in RTP-Info of reply of
                 // RTSP "PLAY" command should be used to detect the first RTP packet
                 // after seeking.
+                int32_t maxAllowedStaleAccessUnits = GetMaxAllowedStaleCount(track->mIsVideo);
                 if (mSeekable) {
                     if (track->mAllowedStaleAccessUnits > 0) {
                         uint32_t seqNum16 = seqNum & 0xffff;
                         uint32_t firstSeqNumInSegment16 = track->mFirstSeqNumInSegment & 0xffff;
-                        if (seqNum16 > firstSeqNumInSegment16 + kMaxAllowedStaleAccessUnits
+                        if (seqNum16 > firstSeqNumInSegment16 + maxAllowedStaleAccessUnits
                                 || seqNum16 < firstSeqNumInSegment16) {
                             // Not the first rtp packet of the stream after seeking, discarding.
                             track->mAllowedStaleAccessUnits--;
@@ -1852,7 +1867,7 @@
                         mNumAccessUnitsReceived = 0;
                         ALOGW_IF(track->mAllowedStaleAccessUnits == 0,
                              "Still no first rtp packet after %d stale ones",
-                             kMaxAllowedStaleAccessUnits);
+                             maxAllowedStaleAccessUnits);
                         track->mAllowedStaleAccessUnits = -1;
                         return UNKNOWN_ERROR;
                     }
diff --git a/media/libstagefright/rtsp/NetworkUtils.cpp b/media/libstagefright/rtsp/NetworkUtils.cpp
index cc36b78..c053be8 100644
--- a/media/libstagefright/rtsp/NetworkUtils.cpp
+++ b/media/libstagefright/rtsp/NetworkUtils.cpp
@@ -14,6 +14,8 @@
  * limitations under the License.
  */
 
+#include <unistd.h>
+
 //#define LOG_NDEBUG 0
 #define LOG_TAG "NetworkUtils"
 #include <utils/Log.h>
diff --git a/media/libstagefright/rtsp/QualManager.cpp b/media/libstagefright/rtsp/QualManager.cpp
new file mode 100644
index 0000000..37aa326
--- /dev/null
+++ b/media/libstagefright/rtsp/QualManager.cpp
@@ -0,0 +1,174 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "QualManager"
+
+#include <algorithm>
+
+#include <sys/prctl.h>
+#include <utils/Log.h>
+
+#include "QualManager.h"
+
+namespace android {
+
+QualManager::Watcher::Watcher(int32_t timeLimit)
+    : Thread(false), mWatching(false), mSwitch(false),
+      mTimeLimit(timeLimit * 1000000LL)     // timeLimit ms
+{
+}
+
+bool QualManager::Watcher::isExpired() const
+{
+    return mSwitch;
+}
+
+void QualManager::Watcher::setup() {
+    AutoMutex _l(mMyLock);
+    if (mWatching == false) {
+        mWatching = true;
+        mMyCond.signal();
+    }
+}
+
+void QualManager::Watcher::release() {
+    AutoMutex _l(mMyLock);
+    if (mSwitch) {
+        ALOGW("%s DISARMED", name);
+        mSwitch = false;
+    }
+    if (mWatching == true) {
+        ALOGW("%s DISARMED", name);
+        mWatching = false;
+        mMyCond.signal();
+    }
+}
+
+void QualManager::Watcher::exit() {
+    AutoMutex _l(mMyLock);
+    // The order is important to avoid dead lock.
+    Thread::requestExit();
+    mMyCond.signal();
+}
+
+QualManager::Watcher::~Watcher() {
+    ALOGI("%s thread dead", name);
+}
+
+bool QualManager::Watcher::threadLoop() {
+    AutoMutex _l(mMyLock);
+#if defined(__linux__)
+    prctl(PR_GET_NAME, name, 0, 0, 0);
+#endif
+    while (!exitPending()) {
+        ALOGW("%s Timer init", name);
+        mMyCond.wait(mMyLock);                      // waits as non-watching state
+        if (exitPending())
+            return false;
+        ALOGW("%s timer BOOM after %d msec", name, (int)(mTimeLimit / 1000000LL));
+        mMyCond.waitRelative(mMyLock, mTimeLimit);  // waits as watching satte
+        if (mWatching == true) {
+            mSwitch = true;
+            ALOGW("%s BOOM!!!!", name);
+        }
+        mWatching = false;
+    }
+    return false;
+}
+
+
+QualManager::QualManager()
+    : mMinBitrate(-1), mMaxBitrate(-1),
+      mTargetBitrate(512000), mLastTargetBitrate(-1),
+      mLastSetBitrateTime(0), mIsNewTargetBitrate(false)
+{
+    VFPWatcher = new Watcher(3000);     //Very Few Packet Watcher
+    VFPWatcher->run("VeryFewPtk");
+    LBRWatcher = new Watcher(10000);    //Low Bit Rate Watcher
+    LBRWatcher->run("LowBitRate");
+}
+
+QualManager::~QualManager() {
+    VFPWatcher->exit();
+    LBRWatcher->exit();
+}
+
+int32_t QualManager::getTargetBitrate() {
+    if (mIsNewTargetBitrate) {
+        mIsNewTargetBitrate = false;
+        mLastTargetBitrate = clampingBitrate(mTargetBitrate);
+        mTargetBitrate = mLastTargetBitrate;
+        return mTargetBitrate;
+    } else {
+        return -1;
+    }
+}
+
+bool QualManager::isNeedToDowngrade() {
+    return LBRWatcher->isExpired();
+}
+
+void QualManager::setTargetBitrate(uint8_t fraction, int64_t nowUs, bool isTooLowPkts) {
+    /* Too Low Packet. Maybe opponent is switching camera.
+     * If this condition goes longer, we should down bitrate.
+     */
+    if (isTooLowPkts) {
+        VFPWatcher->setup();
+    } else {
+        VFPWatcher->release();
+    }
+
+    if ((fraction > (256 * 5 / 100) && !isTooLowPkts) || VFPWatcher->isExpired()) {
+        // loss more than 5%                          or  VFPWatcher BOOMED
+        mTargetBitrate -= mBitrateStep * 3;
+    } else if (fraction <= (256 * 2 /100)) {
+        // loss less than 2%
+        mTargetBitrate += mBitrateStep;
+    }
+
+    if (mTargetBitrate > mMaxBitrate) {
+        mTargetBitrate = mMaxBitrate + mBitrateStep;
+    } else if (mTargetBitrate < mMinBitrate) {
+        LBRWatcher->setup();
+        mTargetBitrate = mMinBitrate - mBitrateStep;
+    }
+
+    if (mLastTargetBitrate != clampingBitrate(mTargetBitrate) ||
+        nowUs - mLastSetBitrateTime > 5000000ll) {
+        mIsNewTargetBitrate = true;
+        mLastSetBitrateTime = nowUs;
+    }
+}
+
+void QualManager::setMinMaxBitrate(int32_t min, int32_t max) {
+    mMinBitrate = min;
+    mMaxBitrate = max;
+    mBitrateStep = (max - min) / 8;
+}
+
+void QualManager::setBitrateData(int32_t bitrate, int64_t /*now*/) {
+    // A bitrate that is considered packetloss also should be good.
+    if (bitrate >= mMinBitrate && mTargetBitrate >= mMinBitrate) {
+        LBRWatcher->release();
+    } else if (bitrate < mMinBitrate){
+        LBRWatcher->setup();
+    }
+}
+
+int32_t QualManager::clampingBitrate(int32_t bitrate) {
+    return std::min(std::max(mMinBitrate, bitrate), mMaxBitrate);
+}
+} // namespace android
diff --git a/media/libstagefright/rtsp/QualManager.h b/media/libstagefright/rtsp/QualManager.h
new file mode 100644
index 0000000..a7dc921
--- /dev/null
+++ b/media/libstagefright/rtsp/QualManager.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef QUAL_MANAGER_H_
+
+#define QUAL_MANAGER_H_
+
+#include <stdint.h>
+#include <utils/Thread.h>
+
+namespace android {
+class QualManager {
+public:
+    QualManager();
+    ~QualManager();
+
+    int32_t getTargetBitrate();
+    bool isNeedToDowngrade();
+
+    void setTargetBitrate(uint8_t fraction, int64_t nowUs, bool isTooLowPkts);
+    void setMinMaxBitrate(int32_t min, int32_t max);
+    void setBitrateData(int32_t bitrate, int64_t now);
+private:
+    class Watcher : public Thread
+    {
+    public:
+        Watcher(int32_t timeLimit);
+
+        void setup();
+        void release();
+        void exit();
+        bool isExpired() const;
+    private:
+        virtual ~Watcher();
+        virtual bool threadLoop();
+
+        char name[32] = {0,};
+
+        Condition mMyCond;
+        Mutex mMyLock;
+
+        bool mWatching;
+        bool mSwitch;
+        const nsecs_t mTimeLimit;
+    };
+    sp<Watcher> VFPWatcher;
+    sp<Watcher> LBRWatcher;
+    int32_t mMinBitrate;
+    int32_t mMaxBitrate;
+    int32_t mBitrateStep;
+
+    int32_t mTargetBitrate;
+    int32_t mLastTargetBitrate;
+    int64_t mLastSetBitrateTime;
+
+    bool mIsNewTargetBitrate;
+
+    int32_t clampingBitrate(int32_t bitrate);
+};
+} //namespace android
+
+#endif  // QUAL_MANAGER_H_
diff --git a/media/libstagefright/rtsp/TrafficRecorder.h b/media/libstagefright/rtsp/TrafficRecorder.h
new file mode 100644
index 0000000..8ba8f90
--- /dev/null
+++ b/media/libstagefright/rtsp/TrafficRecorder.h
@@ -0,0 +1,179 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef A_TRAFFIC_RECORDER_H_
+
+#define A_TRAFFIC_RECORDER_H_
+
+#include <android-base/logging.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+// Circular array to save recent amount of bytes
+template <class Time, class Bytes>
+class TrafficRecorder : public RefBase {
+private:
+    constexpr static size_t kMinNumEntries = 4;
+    constexpr static size_t kMaxNumEntries = 1024;
+
+    size_t mSize;
+    size_t mSizeMask;
+    Time *mTimeArray = NULL;
+    Bytes *mBytesArray = NULL;
+    size_t mHeadIdx;
+    size_t mTailIdx;
+
+    int mLastReadIdx;
+
+    const Time mRecordLimit;
+    Time mClock;
+    Time mLastTimeOfPrint;
+    Bytes mAccuBytes;
+
+public:
+    TrafficRecorder(size_t size, Time accuTimeLimit);
+    virtual ~TrafficRecorder();
+
+    void init();
+    void updateClock(Time now);
+    Bytes readBytesForTotal();
+    Bytes readBytesForLastPeriod(Time period);
+    void writeBytes(Bytes bytes);
+    void printAccuBitsForLastPeriod(Time period, Time unit);
+};
+
+template <class Time, class Bytes>
+TrafficRecorder<Time, Bytes>::TrafficRecorder(size_t size, Time recordLimit)
+    : mRecordLimit(recordLimit) {
+    if (size > kMaxNumEntries) {
+        LOG(VERBOSE) << "Limiting TrafficRecorder size to " << kMaxNumEntries;
+        size = kMaxNumEntries;
+    } else if (size < kMinNumEntries) {
+        LOG(VERBOSE) << "Limiting TrafficRecorder size to " << kMaxNumEntries;
+        size = kMinNumEntries;
+    }
+
+    size_t exp = ((sizeof(size_t) == 8) ?
+                  64 - __builtin_clzl(size - 1) :
+                  32 - __builtin_clz(size - 1));
+    mSize = (1ul << exp);         // size = 2^exp
+    mSizeMask = mSize - 1;
+
+    LOG(VERBOSE) << "TrafficRecorder Init size " << mSize;
+    mTimeArray = new Time[mSize];
+    mBytesArray = new Bytes[mSize];
+
+    init();
+}
+
+template <class Time, class Bytes>
+TrafficRecorder<Time, Bytes>::~TrafficRecorder() {
+    delete[] mTimeArray;
+    delete[] mBytesArray;
+}
+
+template <class Time, class Bytes>
+void TrafficRecorder<Time, Bytes>::init() {
+    mHeadIdx = 0;
+    mTailIdx = mSizeMask;
+    for (int i = 0 ; i < mSize ; i++) {
+        mTimeArray[i] = 0;
+        mBytesArray[i] = 0;
+    }
+    mClock = 0;
+    mLastReadIdx = 0;
+    mLastTimeOfPrint = 0;
+    mAccuBytes = 0;
+}
+
+template <class Time, class Bytes>
+void TrafficRecorder<Time, Bytes>::updateClock(Time now) {
+    mClock = now;
+}
+
+template <class Time, class Bytes>
+Bytes TrafficRecorder<Time, Bytes>::readBytesForTotal() {
+    return mAccuBytes;
+}
+
+template <class Time, class Bytes>
+Bytes TrafficRecorder<Time, Bytes>::readBytesForLastPeriod(Time period) {
+    // Not enough data
+    if (period > mClock)
+        return 0;
+
+    Bytes bytes = 0;
+    int i = mHeadIdx;
+    while (i != mTailIdx) {
+        LOG(VERBOSE) << "READ " << i << " time " << mTimeArray[i]
+                << " \t EndOfPeriod " << mClock - period
+                << "\t\t Bytes:" << mBytesArray[i] << "\t\t Accu: " << bytes;
+        if (mTimeArray[i] < mClock - period) {
+            break;
+        }
+        bytes += mBytesArray[i];
+        i = (i - 1) & mSizeMask;
+    }
+    mLastReadIdx = (i + 1) & mSizeMask;
+
+    return bytes;
+}
+
+template <class Time, class Bytes>
+void TrafficRecorder<Time, Bytes>::writeBytes(Bytes bytes) {
+    int writeIdx;
+    if (mClock == mTimeArray[mHeadIdx]) {
+        writeIdx = mHeadIdx;
+        mBytesArray[writeIdx] += bytes;
+    } else {
+        writeIdx = (mHeadIdx + 1) & mSizeMask;
+        mTimeArray[writeIdx] = mClock;
+        mBytesArray[writeIdx] = bytes;
+    }
+
+    LOG(VERBOSE) << "WRITE " << writeIdx << " time " << mClock;
+    if (writeIdx == mTailIdx) {
+        mTailIdx = (mTailIdx + 1) & mSizeMask;
+    }
+
+    mHeadIdx = writeIdx;
+    mAccuBytes += bytes;
+}
+
+template <class Time, class Bytes>
+void TrafficRecorder<Time, Bytes>::printAccuBitsForLastPeriod(Time period, Time unit) {
+    Time timeSinceLastPrint = mClock - mLastTimeOfPrint;
+    if (timeSinceLastPrint < period)
+        return;
+
+    Bytes sum = readBytesForLastPeriod(period);
+    Time readPeriod = mClock - mTimeArray[mLastReadIdx];
+
+    float numOfUnit = (float)(readPeriod) / (unit + FLT_MIN);
+    ALOGD("Actual Tx period %.3f unit \t %.0f bytes (%.0f Kbits)/Unit",
+          numOfUnit, sum / numOfUnit, sum * 8.f / numOfUnit / 1000.f);
+    mLastTimeOfPrint = mClock;
+
+    if (mClock - mTimeArray[mTailIdx] < mRecordLimit) {
+        // Size is not enough to record bytes for mRecordLimit period
+        ALOGW("Traffic recorder size is not enough. mRecordLimit %d", mRecordLimit);
+    }
+}
+
+}  // namespace android
+
+#endif  // A_TRAFFIC_RECORDER_H_
diff --git a/media/libstagefright/tests/Android.bp b/media/libstagefright/tests/Android.bp
index a7f94c1..a799a13 100644
--- a/media/libstagefright/tests/Android.bp
+++ b/media/libstagefright/tests/Android.bp
@@ -1,5 +1,24 @@
 // Build the unit tests.
 
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_tests_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_tests_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_test {
     name: "MediaCodecListOverrides_test",
 
@@ -9,7 +28,6 @@
         "libmedia",
         "libstagefright",
         "libstagefright_foundation",
-        "libstagefright_omx",
         "libutils",
         "liblog",
     ],
@@ -17,11 +35,8 @@
     include_dirs: [
         "frameworks/av/media/libstagefright",
         "frameworks/av/media/libstagefright/include",
-        "frameworks/native/include/media/openmax",
     ],
 
-    compile_multilib: "32",
-
     cflags: [
         "-Werror",
         "-Wall",
@@ -44,4 +59,4 @@
         "-Werror",
         "-Wall",
     ],
-}
\ No newline at end of file
+}
diff --git a/media/libstagefright/tests/ESDS/Android.bp b/media/libstagefright/tests/ESDS/Android.bp
new file mode 100644
index 0000000..04e9b29
--- /dev/null
+++ b/media/libstagefright/tests/ESDS/Android.bp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_tests_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_tests_license",
+    ],
+}
+
+cc_test {
+    name: "ESDSTest",
+    gtest: true,
+
+    srcs: [
+        "ESDSTest.cpp",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "libdatasource",
+        "liblog",
+        "libmedia",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libstagefright_esds",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/tests/ESDS/AndroidTest.xml b/media/libstagefright/tests/ESDS/AndroidTest.xml
new file mode 100644
index 0000000..a4fbc7f
--- /dev/null
+++ b/media/libstagefright/tests/ESDS/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for ESDS unit test">
+    <option name="test-suite-tag" value="ESDSTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="ESDSTest->/data/local/tmp/ESDSTest" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/ESDS/ESDSTestRes-1.0.zip?unzip=true"
+            value="/data/local/tmp/ESDSTestRes/" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="ESDSTest" />
+        <option name="native-test-flag" value="-P /data/local/tmp/ESDSTestRes/" />
+    </test>
+</configuration>
diff --git a/media/libstagefright/tests/ESDS/ESDSTest.cpp b/media/libstagefright/tests/ESDS/ESDSTest.cpp
new file mode 100644
index 0000000..101e00c
--- /dev/null
+++ b/media/libstagefright/tests/ESDS/ESDSTest.cpp
@@ -0,0 +1,210 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ESDSTest"
+#include <utils/Log.h>
+
+#include <stdio.h>
+#include <string.h>
+#include <fstream>
+
+#include <ESDS.h>
+#include <binder/ProcessState.h>
+#include <datasource/FileSource.h>
+#include <media/stagefright/MediaExtractorFactory.h>
+#include <media/stagefright/MetaData.h>
+
+#include "ESDSTestEnvironment.h"
+
+using namespace android;
+
+static ESDSTestEnvironment *gEnv = nullptr;
+
+struct ESDSParams {
+    const char *inputFile;
+    int32_t objectTypeIndication;
+    const char *codecSpecificInfoData;
+    int32_t codecSpecificInfoDataSize;
+    int32_t bitrateMax;
+    int32_t bitrateAvg;
+};
+
+class ESDSUnitTest : public ::testing::TestWithParam<tuple<
+                             /* InputFile */ const char *,
+                             /* ObjectTypeIndication */ int32_t,
+                             /* CodecSpecificInfoData */ const char *,
+                             /* CodecSpecificInfoDataSize */ int32_t,
+                             /* BitrateMax */ int32_t,
+                             /* BitrateAvg */ int32_t>> {
+  public:
+    ESDSUnitTest() : mESDSData(nullptr) {
+        mESDSParams.inputFile = get<0>(GetParam());
+        mESDSParams.objectTypeIndication = get<1>(GetParam());
+        mESDSParams.codecSpecificInfoData = get<2>(GetParam());
+        mESDSParams.codecSpecificInfoDataSize = get<3>(GetParam());
+        mESDSParams.bitrateMax = get<4>(GetParam());
+        mESDSParams.bitrateAvg = get<5>(GetParam());
+    };
+
+    virtual void TearDown() override {
+        if (mDataSource) mDataSource.clear();
+        if (mInputFp) {
+            fclose(mInputFp);
+            mInputFp = nullptr;
+        }
+    }
+
+    virtual void SetUp() override { ASSERT_NO_FATAL_FAILURE(readESDSData()); }
+    const void *mESDSData;
+    size_t mESDSSize;
+    ESDSParams mESDSParams;
+
+  private:
+    void readESDSData() {
+        string inputFile = gEnv->getRes() + mESDSParams.inputFile;
+        mInputFp = fopen(inputFile.c_str(), "rb");
+        ASSERT_NE(mInputFp, nullptr) << "File open failed for file: " << inputFile;
+        int32_t fd = fileno(mInputFp);
+        ASSERT_GE(fd, 0) << "File descriptor invalid for file: " << inputFile;
+
+        struct stat buf;
+        status_t status = stat(inputFile.c_str(), &buf);
+        ASSERT_EQ(status, 0) << "Failed to get properties of input file: " << mESDSParams.inputFile;
+        size_t fileSize = buf.st_size;
+
+        mDataSource = new FileSource(dup(fd), 0, fileSize);
+        ASSERT_NE(mDataSource, nullptr) << "Unable to create data source for file: " << inputFile;
+
+        sp<IMediaExtractor> extractor = MediaExtractorFactory::Create(mDataSource);
+        if (extractor == nullptr) {
+            mDataSource.clear();
+            ASSERT_TRUE(false) << "Unable to create extractor for file: " << inputFile;
+        }
+
+        size_t numTracks = extractor->countTracks();
+        ASSERT_GT(numTracks, 0) << "No tracks in file: " << inputFile;
+        ASSERT_TRUE(esdsDataPresent(numTracks, extractor))
+                << "Unable to find esds in any track in file: " << inputFile;
+    }
+
+    bool esdsDataPresent(size_t numTracks, sp<IMediaExtractor> extractor) {
+        bool foundESDS = false;
+        uint32_t type;
+        for (size_t i = 0; i < numTracks; ++i) {
+            sp<MetaData> trackMeta = extractor->getTrackMetaData(i);
+            if (trackMeta != nullptr &&
+                trackMeta->findData(kKeyESDS, &type, &mESDSData, &mESDSSize)) {
+                trackMeta->clear();
+                foundESDS = true;
+                break;
+            }
+        }
+        return foundESDS;
+    }
+
+    FILE *mInputFp;
+    sp<DataSource> mDataSource;
+};
+
+TEST_P(ESDSUnitTest, InvalidDataTest) {
+    void *invalidData = calloc(mESDSSize, 1);
+    ASSERT_NE(invalidData, nullptr) << "Unable to allocate memory";
+    ESDS esds(invalidData, mESDSSize);
+    free(invalidData);
+    ASSERT_NE(esds.InitCheck(), OK) << "invalid ESDS data accepted";
+}
+
+TEST(ESDSSanityUnitTest, ConstructorSanityTest) {
+    void *invalidData = malloc(1);
+    ASSERT_NE(invalidData, nullptr) << "Unable to allocate memory";
+    ESDS esds_zero(invalidData, 0);
+    free(invalidData);
+    ASSERT_NE(esds_zero.InitCheck(), OK) << "invalid ESDS data accepted";
+
+    ESDS esds_null(NULL, 0);
+    ASSERT_NE(esds_null.InitCheck(), OK) << "invalid ESDS data accepted";
+}
+
+TEST_P(ESDSUnitTest, CreateAndDestroyTest) {
+    ESDS esds(mESDSData, mESDSSize);
+    ASSERT_EQ(esds.InitCheck(), OK) << "ESDS data invalid";
+}
+
+TEST_P(ESDSUnitTest, ObjectTypeIndicationTest) {
+    ESDS esds(mESDSData, mESDSSize);
+    ASSERT_EQ(esds.InitCheck(), OK) << "ESDS data invalid";
+    uint8_t objectTypeIndication;
+    status_t status = esds.getObjectTypeIndication(&objectTypeIndication);
+    ASSERT_EQ(status, OK) << "ESDS objectTypeIndication data invalid";
+    ASSERT_EQ(objectTypeIndication, mESDSParams.objectTypeIndication)
+            << "ESDS objectTypeIndication data doesn't match";
+}
+
+TEST_P(ESDSUnitTest, CodecSpecificInfoTest) {
+    ESDS esds(mESDSData, mESDSSize);
+    ASSERT_EQ(esds.InitCheck(), OK) << "ESDS data invalid";
+    status_t status;
+    const void *codecSpecificInfo;
+    size_t codecSpecificInfoSize;
+    status = esds.getCodecSpecificInfo(&codecSpecificInfo, &codecSpecificInfoSize);
+    ASSERT_EQ(status, OK) << "ESDS getCodecSpecificInfo data invalid";
+    ASSERT_EQ(mESDSParams.codecSpecificInfoDataSize, codecSpecificInfoSize)
+            << "CodecSpecificInfo data doesn't match";
+    status = memcmp(codecSpecificInfo, mESDSParams.codecSpecificInfoData, codecSpecificInfoSize);
+    ASSERT_EQ(status, 0) << "CodecSpecificInfo data doesn't match";
+}
+
+TEST_P(ESDSUnitTest, GetBitrateTest) {
+    ESDS esds(mESDSData, mESDSSize);
+    ASSERT_EQ(esds.InitCheck(), OK) << "ESDS data invalid";
+    uint32_t bitrateMax;
+    uint32_t bitrateAvg;
+    status_t status = esds.getBitRate(&bitrateMax, &bitrateAvg);
+    ASSERT_EQ(status, OK) << "ESDS bitRate data invalid";
+    ASSERT_EQ(bitrateMax, mESDSParams.bitrateMax) << "ESDS bitrateMax doesn't match";
+    ASSERT_EQ(bitrateAvg, mESDSParams.bitrateAvg) << "ESDS bitrateAvg doesn't match";
+    ASSERT_LE(bitrateAvg, bitrateMax) << "ESDS bitrateMax is less than bitrateAvg";
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        ESDSUnitTestAll, ESDSUnitTest,
+        ::testing::Values(
+                // InputFile, ObjectTypeIndication, CodecSpecificInfoData,
+                // CodecSpecificInfoDataSize, BitrateMax, BitrateAvg
+                make_tuple("video_176x144_3gp_h263_56kbps_12fps_aac_stereo_128kbps_22050hz.3gp", 64,
+                           "\x13\x90", 2, 131072, 0),
+                make_tuple("video_1280x720_mp4_mpeg2_3000kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
+                           97,
+                           "\x00\x00\x01\xB3\x50\x02\xD0\x35\xFF\xFF\xE1\xA0\x00\x00\x01\xB5\x15"
+                           "\x6A\x00\x01\x00\x00",
+                           22, 3415452, 3415452),
+                make_tuple("video_176x144_3gp_h263_56kbps_25fps_aac_mono_24kbps_11025hz.3gp", 64,
+                           "\x15\x08", 2, 24576, 0)));
+
+int main(int argc, char **argv) {
+    // MediaExtractor needs binder thread pool
+    ProcessState::self()->startThreadPool();
+    gEnv = new ESDSTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGV("Test result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/tests/ESDS/ESDSTestEnvironment.h b/media/libstagefright/tests/ESDS/ESDSTestEnvironment.h
new file mode 100644
index 0000000..4ca2303
--- /dev/null
+++ b/media/libstagefright/tests/ESDS/ESDSTestEnvironment.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __ESDS_TEST_ENVIRONMENT_H__
+#define __ESDS_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class ESDSTestEnvironment : public ::testing::Environment {
+  public:
+    ESDSTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int ESDSTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"res", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P':
+                setRes(optarg);
+                break;
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __ESDS_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/tests/ESDS/README.md b/media/libstagefright/tests/ESDS/README.md
new file mode 100644
index 0000000..100fb86
--- /dev/null
+++ b/media/libstagefright/tests/ESDS/README.md
@@ -0,0 +1,40 @@
+## Media Testing ##
+---
+#### ESDS Unit Test :
+The ESDS Unit Test Suite validates the ESDS class available in libstagefright.
+
+Run the following steps to build the test suite:
+```
+m ESDSTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/ESDSTest/ESDSTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/ESDSTest/ESDSTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/ESDS/ESDSTestRes-1.0.zip)
+Download, unzip and push these files into device for testing.
+
+```
+adb push ESDSTestRes /data/local/tmp/
+```
+
+usage: ESDSTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/ESDSTest -P /data/local/tmp/ESDSTestRes/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest ESDSTest -- --enable-module-dynamic-download=true
+```
diff --git a/media/libstagefright/tests/HEVC/Android.bp b/media/libstagefright/tests/HEVC/Android.bp
new file mode 100644
index 0000000..91bf385
--- /dev/null
+++ b/media/libstagefright/tests/HEVC/Android.bp
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_tests_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_tests_license",
+    ],
+}
+
+cc_test {
+    name: "HEVCUtilsUnitTest",
+    test_suites: ["device-tests"],
+    gtest: true,
+
+    srcs: [
+        "HEVCUtilsUnitTest.cpp",
+    ],
+
+    shared_libs: [
+        "libutils",
+        "liblog",
+    ],
+
+    static_libs: [
+        "libstagefright",
+        "libstagefright_foundation",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/tests/HEVC/AndroidTest.xml b/media/libstagefright/tests/HEVC/AndroidTest.xml
new file mode 100644
index 0000000..ff850a2
--- /dev/null
+++ b/media/libstagefright/tests/HEVC/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for HEVC Utils unit tests">
+    <option name="test-suite-tag" value="HEVCUtilsUnitTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="false" />
+        <option name="push" value="HEVCUtilsUnitTest->/data/local/tmp/HEVCUtilsUnitTest" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/HEVCUtils/HEVCUtilsUnitTest.zip?unzip=true"
+            value="/data/local/tmp/HEVCUtilsUnitTest/" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="HEVCUtilsUnitTest" />
+        <option name="native-test-flag" value="-P /data/local/tmp/HEVCUtilsUnitTest/" />
+    </test>
+</configuration>
diff --git a/media/libstagefright/tests/HEVC/HEVCUtilsTestEnvironment.h b/media/libstagefright/tests/HEVC/HEVCUtilsTestEnvironment.h
new file mode 100644
index 0000000..e4481e1
--- /dev/null
+++ b/media/libstagefright/tests/HEVC/HEVCUtilsTestEnvironment.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __HEVC_UTILS_TEST_ENVIRONMENT_H__
+#define __HEVC_UTILS_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class HEVCUtilsTestEnvironment : public::testing::Environment {
+  public:
+    HEVCUtilsTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int HEVCUtilsTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"path", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P': {
+                setRes(optarg);
+                break;
+            }
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __HEVC_UTILS_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/tests/HEVC/HEVCUtilsUnitTest.cpp b/media/libstagefright/tests/HEVC/HEVCUtilsUnitTest.cpp
new file mode 100644
index 0000000..324a042
--- /dev/null
+++ b/media/libstagefright/tests/HEVC/HEVCUtilsUnitTest.cpp
@@ -0,0 +1,208 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "HevcUtilityTest"
+#include <utils/Log.h>
+
+#include <fstream>
+
+#include <media/stagefright/foundation/ABitReader.h>
+#include "include/HevcUtils.h"
+
+#include "HEVCUtilsTestEnvironment.h"
+
+using namespace android;
+
+// max size of hvcc box is 2 KB
+constexpr uint32_t kHvccBoxMaxSize = 2048;
+constexpr uint32_t kHvccBoxMinSize = 20;
+constexpr uint32_t kVPSCode = 32;
+constexpr uint32_t kSPSCode = 33;
+constexpr uint32_t kPPSCode = 34;
+constexpr uint32_t kNALSizeLength = 2;
+
+static HEVCUtilsTestEnvironment *gEnv = nullptr;
+
+class HEVCUtilsUnitTest
+    : public ::testing::TestWithParam<
+              tuple</*fileName*/ string, /*infoFileName*/ string, /*numVPSNals*/ size_t,
+                    /*numSPSNals*/ size_t, /*numPPSNals*/ size_t, /*frameRate*/ int16_t,
+                    /*isHdr*/ bool>> {
+  public:
+    ~HEVCUtilsUnitTest() {
+        if (mMediaFileStream.is_open()) mMediaFileStream.close();
+        if (mInfoFileStream.is_open()) mInfoFileStream.close();
+    }
+
+    virtual void SetUp() override {
+        tuple<string, string, size_t, size_t, size_t, int16_t, bool> params = GetParam();
+        string inputMediaFile = gEnv->getRes() + get<0>(params);
+        mMediaFileStream.open(inputMediaFile, ifstream::in);
+        ASSERT_TRUE(mMediaFileStream.is_open()) << "Failed to open media file: " << inputMediaFile;
+
+        string inputInfoFile = gEnv->getRes() + get<1>(params);
+        mInfoFileStream.open(inputInfoFile, ifstream::in);
+        ASSERT_TRUE(mInfoFileStream.is_open()) << "Failed to open info file: " << inputInfoFile;
+
+        mNumVPSNals = get<2>(params);
+        mNumSPSNals = get<3>(params);
+        mNumPPSNals = get<4>(params);
+        mFrameRate = get<5>(params);
+        mIsHDR = get<6>(params);
+    }
+
+    size_t mNumVPSNals;
+    size_t mNumSPSNals;
+    size_t mNumPPSNals;
+    int16_t mFrameRate;
+    bool mIsHDR;
+    ifstream mMediaFileStream;
+    ifstream mInfoFileStream;
+};
+
+TEST_P(HEVCUtilsUnitTest, NALUnitTest) {
+    HevcParameterSets hevcParams;
+
+    string line;
+    int32_t index = 0;
+    status_t err;
+    while (getline(mInfoFileStream, line)) {
+        string type;
+        int32_t chunkLength;
+
+        istringstream stringLine(line);
+        stringLine >> type >> chunkLength;
+        ASSERT_GT(chunkLength, 0) << "Length of data chunk must be greater than 0";
+
+        char *data = (char *)malloc(chunkLength);
+        ASSERT_NE(data, nullptr) << "Failed to allocate data buffer of size: " << chunkLength;
+
+        mMediaFileStream.read(data, chunkLength);
+        ASSERT_EQ(mMediaFileStream.gcount(), chunkLength)
+                << "Failed to read complete file, bytes read: " << mMediaFileStream.gcount();
+
+        // A valid startcode consists of at least two 0x00 bytes followed by 0x01.
+        int32_t offset = 0;
+        for (; offset + 2 < chunkLength; ++offset) {
+            if (data[offset + 2] == 0x01 && data[offset + 1] == 0x00 && data[offset] == 0x00) {
+                break;
+            }
+        }
+        offset += 3;
+        ASSERT_LE(offset, chunkLength) << "NAL unit offset must not exceed the chunk length";
+
+        uint8_t *nalUnit = (uint8_t *)(data + offset);
+        size_t nalUnitLength = chunkLength - offset;
+
+        // Add NAL units only if they're of type: VPS/SPS/PPS/SEI
+        if (!((type.compare("VPS") && type.compare("SPS") && type.compare("PPS") &&
+               type.compare("SEI")))) {
+            err = hevcParams.addNalUnit(nalUnit, nalUnitLength);
+            ASSERT_EQ(err, (status_t)OK)
+                    << "Failed to add NAL Unit type: " << type << " Size: " << nalUnitLength;
+
+            size_t sizeNalUnit = hevcParams.getSize(index);
+            ASSERT_EQ(sizeNalUnit, nalUnitLength) << "Invalid size returned for NAL: " << type;
+
+            uint8_t *destination = (uint8_t *)malloc(nalUnitLength);
+            ASSERT_NE(destination, nullptr)
+                    << "Failed to allocate buffer of size: " << nalUnitLength;
+
+            bool status = hevcParams.write(index, destination, nalUnitLength);
+            ASSERT_TRUE(status) << "Unable to write NAL Unit data";
+
+            free(destination);
+            index++;
+        } else {
+            err = hevcParams.addNalUnit(nalUnit, nalUnitLength);
+            ASSERT_NE(err, (status_t)OK) << "Invalid NAL Unit added, type: " << type;
+        }
+        free(data);
+    }
+
+    size_t numNalUnits = hevcParams.getNumNalUnitsOfType(kVPSCode);
+    ASSERT_EQ(numNalUnits, mNumVPSNals) << "Wrong number of VPS NAL Units";
+
+    numNalUnits = hevcParams.getNumNalUnitsOfType(kSPSCode);
+    ASSERT_EQ(numNalUnits, mNumSPSNals) << "Wrong number of SPS NAL Units";
+
+    numNalUnits = hevcParams.getNumNalUnitsOfType(kPPSCode);
+    ASSERT_EQ(numNalUnits, mNumPPSNals) << "Wrong number of PPS NAL Units";
+
+    HevcParameterSets::Info info = hevcParams.getInfo();
+    ASSERT_EQ(info & HevcParameterSets::kInfoIsHdr,
+              (mIsHDR ? HevcParameterSets::kInfoIsHdr : HevcParameterSets::kInfoNone))
+            << "Wrong info about HDR";
+
+    ASSERT_EQ(info & HevcParameterSets::kInfoHasColorDescription,
+              (mIsHDR ? HevcParameterSets::kInfoHasColorDescription : HevcParameterSets::kInfoNone))
+            << "Wrong info about color description";
+
+    // an HEVC file starts with VPS, SPS and PPS NAL units in sequence.
+    uint8_t typeNalUnit = hevcParams.getType(0);
+    ASSERT_EQ(typeNalUnit, kHevcNalUnitTypeVps)
+            << "Expected NAL type: 32(VPS), found: " << typeNalUnit;
+
+    typeNalUnit = hevcParams.getType(1);
+    ASSERT_EQ(typeNalUnit, kHevcNalUnitTypeSps)
+            << "Expected NAL type: 33(SPS), found: " << typeNalUnit;
+
+    typeNalUnit = hevcParams.getType(2);
+    ASSERT_EQ(typeNalUnit, kHevcNalUnitTypePps)
+            << "Expected NAL type: 34(PPS), found: " << typeNalUnit;
+
+    size_t hvccBoxSize = kHvccBoxMaxSize;
+    uint8_t *hvcc = (uint8_t *)malloc(kHvccBoxMaxSize);
+    ASSERT_NE(hvcc, nullptr) << "Failed to allocate a hvcc buffer of size: " << kHvccBoxMaxSize;
+
+    err = hevcParams.makeHvcc(hvcc, &hvccBoxSize, kNALSizeLength);
+    ASSERT_EQ(err, (status_t)OK) << "Unable to create hvcc box";
+
+    ASSERT_GT(hvccBoxSize, kHvccBoxMinSize)
+            << "Hvcc box size must be greater than " << kHvccBoxMinSize;
+
+    int16_t frameRate = hvcc[kHvccBoxMinSize - 1] | (hvcc[kHvccBoxMinSize] << 8);
+    if (frameRate != mFrameRate)
+        cout << "[   WARN   ] Expected frame rate: " << mFrameRate << " Found: " << frameRate
+             << endl;
+
+    free(hvcc);
+}
+
+// Info File contains the type and length for each chunk/frame
+INSTANTIATE_TEST_SUITE_P(
+        HEVCUtilsUnitTestAll, HEVCUtilsUnitTest,
+        ::testing::Values(make_tuple("crowd_3840x2160p50f300_32500kbps.hevc",
+                                     "crowd_3840x2160p50f300_32500kbps.info", 1, 1, 1, 50, false),
+                          make_tuple("crowd_1920x1080p24f300_4500kbps.hevc",
+                                     "crowd_1920x1080p24f300_4500kbps.info", 1, 1, 1, 24, false),
+                          make_tuple("crowd_1280x720p24f300_3000kbps.hevc",
+                                     "crowd_1280x720p24f300_3000kbps.info", 1, 1, 1, 24, false),
+                          make_tuple("crowd_640x360p24f300_500kbps.hevc",
+                                     "crowd_640x360p24f300_500kbps.info", 1, 1, 1, 24, false)));
+
+int main(int argc, char **argv) {
+    gEnv = new HEVCUtilsTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGV("Test result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/tests/HEVC/README.md b/media/libstagefright/tests/HEVC/README.md
new file mode 100644
index 0000000..fa0e99c
--- /dev/null
+++ b/media/libstagefright/tests/HEVC/README.md
@@ -0,0 +1,39 @@
+## Media Testing ##
+---
+#### HEVC Utils Test
+The HEVC Utility Unit Test Suite validates the HevcUtils library available in libstagefright.
+
+Run the following steps to build the test suite:
+```
+m HEVCUtilsUnitTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/HEVCUtilsUnitTest/HEVCUtilsUnitTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/HEVCUtilsUnitTest/HEVCUtilsUnitTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/HEVCUtils/HEVCUtilsUnitTest.zip). Download, unzip and push these files into device for testing.
+
+```
+adb push HEVCUtilsUnitTest /data/local/tmp/
+```
+
+usage: HEVCUtilsUnitTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/HEVCUtilsUnitTest -P /data/local/tmp/HEVCUtilsUnitTest/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest HEVCUtilsUnitTest -- --enable-module-dynamic-download=true
+```
diff --git a/media/libstagefright/tests/extractorFactory/Android.bp b/media/libstagefright/tests/extractorFactory/Android.bp
index e3e61d7..13d5b89 100644
--- a/media/libstagefright/tests/extractorFactory/Android.bp
+++ b/media/libstagefright/tests/extractorFactory/Android.bp
@@ -14,9 +14,21 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_tests_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_tests_license",
+    ],
+}
+
 cc_test {
     name: "ExtractorFactoryTest",
     gtest: true,
+    test_suites: ["device-tests"],
 
     srcs: [
         "ExtractorFactoryTest.cpp",
diff --git a/media/libstagefright/tests/fuzzers/Android.bp b/media/libstagefright/tests/fuzzers/Android.bp
new file mode 100644
index 0000000..0097830
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/Android.bp
@@ -0,0 +1,94 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_tests_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_tests_license",
+    ],
+}
+
+cc_defaults {
+    name: "libstagefright_fuzzer_defaults",
+    cflags: [
+        "-Wno-multichar",
+        "-Werror",
+        "-Wno-error=deprecated-declarations",
+        "-Wall",
+    ],
+    shared_libs: [
+        "libstagefright",
+        "libstagefright_codecbase",
+        "libbase",
+        "libcutils",
+        "libutils",
+        "libstagefright_foundation",
+        "libmedia",
+        "libaudioclient",
+        "libmedia_omx",
+        "libgui",
+        "libbinder",
+        "liblog",
+        "media_permission-aidl-cpp",
+    ],
+    include_dirs: [
+        "frameworks/av/media/libstagefright",
+    ],
+}
+
+cc_fuzz {
+    name: "libstagefright_mediaclock_fuzzer",
+    srcs: [
+        "MediaClockFuzzer.cpp",
+    ],
+    defaults: ["libstagefright_fuzzer_defaults"],
+}
+
+cc_fuzz {
+    name: "libstagefright_mediascanner_fuzzer",
+    srcs: [
+        "StagefrightMediaScannerFuzzer.cpp",
+    ],
+    defaults: ["libstagefright_fuzzer_defaults"],
+}
+
+cc_fuzz {
+    name: "libstagefright_skipcutbuffer_fuzzer",
+    srcs: [
+        "SkipCutBufferFuzzer.cpp",
+    ],
+    defaults: ["libstagefright_fuzzer_defaults"],
+}
+
+cc_fuzz {
+    name: "libstagefright_mediamuxer_fuzzer",
+    srcs: [
+        "MediaMuxerFuzzer.cpp",
+    ],
+    defaults: ["libstagefright_fuzzer_defaults"],
+}
+
+cc_fuzz {
+    name: "libstagefright_frameDecoder_fuzzer",
+    srcs: [
+        "FrameDecoderFuzzer.cpp",
+    ],
+    defaults: ["libstagefright_fuzzer_defaults"],
+}
+
+cc_fuzz {
+    name: "libstagefright_writer_fuzzer",
+    srcs: [
+        "FuzzerMediaUtility.cpp",
+        "WriterFuzzer.cpp",
+    ],
+    dictionary: "dictionaries/formats.dict",
+    defaults: ["libstagefright_fuzzer_defaults"],
+    static_libs: [
+        "libstagefright_webm",
+        "libdatasource",
+        "libstagefright_esds",
+        "libogg",
+    ],
+}
diff --git a/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
new file mode 100644
index 0000000..c251479
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "include/FrameDecoder.h"
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/IMediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/foundation/AString.h>
+#include "FrameDecoderHelpers.h"
+#include "IMediaSourceFuzzImpl.h"
+
+namespace android {
+
+#define MAX_MEDIA_BUFFER_SIZE 2048
+
+// Fuzzer entry point.
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    // Init our wrapper
+    FuzzedDataProvider fdp(data, size);
+
+    std::string name = fdp.ConsumeRandomLengthString(fdp.remaining_bytes());
+    AString componentName(name.c_str());
+    sp<MetaData> trackMeta = generateMetaData(&fdp);
+    sp<IMediaSource> source = new IMediaSourceFuzzImpl(&fdp, MAX_MEDIA_BUFFER_SIZE);
+
+    // Image or video Decoder?
+    sp<FrameDecoder> decoder;
+    bool isVideoDecoder = fdp.ConsumeBool();
+    if (isVideoDecoder) {
+        decoder = new VideoFrameDecoder(componentName, trackMeta, source);
+    } else {
+        decoder = new MediaImageDecoder(componentName, trackMeta, source);
+    }
+
+    while (fdp.remaining_bytes()) {
+        switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 3)) {
+            case 0:
+                decoder->init(/*frameTimeUs*/ fdp.ConsumeIntegral<int64_t>(),
+                              /*option*/ fdp.ConsumeIntegral<int>(),
+                              /*colorFormat*/ fdp.ConsumeIntegral<int>());
+                break;
+            case 1:
+                decoder->extractFrame();
+                break;
+            case 2: {
+                FrameRect rect;
+                rect.left = fdp.ConsumeIntegral<int32_t>();
+                rect.top = fdp.ConsumeIntegral<int32_t>();
+                rect.right = fdp.ConsumeIntegral<int32_t>();
+                rect.bottom = fdp.ConsumeIntegral<int32_t>();
+                decoder->extractFrame(&rect);
+                break;
+            }
+            case 3: {
+                sp<MetaData> trackMeta = generateMetaData(&fdp);
+                decoder->getMetadataOnly(trackMeta,
+                                         /*colorFormat*/ fdp.ConsumeIntegral<int>(),
+                                         /*thumbnail*/ fdp.ConsumeBool());
+                break;
+            }
+        }
+    }
+
+    generated_mime_types.clear();
+
+    return 0;
+}
+
+}  // namespace android
diff --git a/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h b/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h
new file mode 100644
index 0000000..228c04a
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h
@@ -0,0 +1,90 @@
+
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <media/stagefright/MetaData.h>
+#include "MediaMimeTypes.h"
+
+#define MAX_METADATA_BUF_SIZE 512
+
+namespace android {
+
+std::vector<std::shared_ptr<char>> generated_mime_types;
+
+sp<MetaData> generateMetaData(FuzzedDataProvider *fdp) {
+    sp<MetaData> newMeta = new MetaData();
+
+    // random MIME Type
+    const char *mime_type;
+    size_t index = fdp->ConsumeIntegralInRange<size_t>(0, kMimeTypes.size());
+    // Let there be a chance of a true random string
+    if (index == kMimeTypes.size()) {
+        std::string mime_str = fdp->ConsumeRandomLengthString(64);
+        std::shared_ptr<char> mime_cstr(new char[mime_str.length()+1]);
+        generated_mime_types.push_back(mime_cstr);
+        strncpy(mime_cstr.get(), mime_str.c_str(), mime_str.length()+1);
+        mime_type = mime_cstr.get();
+    } else {
+        mime_type = kMimeTypes[index];
+    }
+    newMeta->setCString(kKeyMIMEType, mime_type);
+
+    // Thumbnail time
+    newMeta->setInt64(kKeyThumbnailTime, fdp->ConsumeIntegral<int64_t>());
+
+    // Values used by allocVideoFrame
+    newMeta->setInt32(kKeyRotation, fdp->ConsumeIntegral<int32_t>());
+    size_t profile_size =
+        fdp->ConsumeIntegralInRange<size_t>(0, MAX_METADATA_BUF_SIZE);
+    std::vector<uint8_t> profile_bytes =
+        fdp->ConsumeBytes<uint8_t>(profile_size);
+    newMeta->setData(kKeyIccProfile,
+                     fdp->ConsumeIntegral<int32_t>(),
+                     profile_bytes.empty() ? nullptr : profile_bytes.data(),
+                     profile_bytes.size());
+    newMeta->setInt32(kKeySARWidth, fdp->ConsumeIntegral<int32_t>());
+    newMeta->setInt32(kKeySARHeight, fdp->ConsumeIntegral<int32_t>());
+    newMeta->setInt32(kKeyDisplayWidth, fdp->ConsumeIntegral<int32_t>());
+    newMeta->setInt32(kKeyDisplayHeight, fdp->ConsumeIntegral<int32_t>());
+
+    // Values used by findThumbnailInfo
+    newMeta->setInt32(kKeyThumbnailWidth, fdp->ConsumeIntegral<int32_t>());
+    newMeta->setInt32(kKeyThumbnailHeight, fdp->ConsumeIntegral<int32_t>());
+    size_t thumbnail_size =
+        fdp->ConsumeIntegralInRange<size_t>(0, MAX_METADATA_BUF_SIZE);
+    std::vector<uint8_t> thumb_bytes =
+        fdp->ConsumeBytes<uint8_t>(thumbnail_size);
+    newMeta->setData(kKeyThumbnailHVCC,
+                     fdp->ConsumeIntegral<int32_t>(),
+                     thumb_bytes.empty() ? nullptr : thumb_bytes.data(),
+                     thumb_bytes.size());
+
+    // Values used by findGridInfo
+    newMeta->setInt32(kKeyTileWidth, fdp->ConsumeIntegral<int32_t>());
+    newMeta->setInt32(kKeyTileHeight, fdp->ConsumeIntegral<int32_t>());
+    newMeta->setInt32(kKeyGridRows, fdp->ConsumeIntegral<int32_t>());
+    newMeta->setInt32(kKeyGridCols, fdp->ConsumeIntegral<int32_t>());
+
+    // A few functions perform a CHECK() that height/width are set
+    newMeta->setInt32(kKeyHeight, fdp->ConsumeIntegral<int32_t>());
+    newMeta->setInt32(kKeyWidth, fdp->ConsumeIntegral<int32_t>());
+
+    return newMeta;
+}
+
+}  // namespace android
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
new file mode 100644
index 0000000..810ae95
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
@@ -0,0 +1,124 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "FuzzerMediaUtility.h"
+
+#include <media/stagefright/AACWriter.h>
+#include <media/stagefright/AMRWriter.h>
+#include <media/stagefright/MPEG2TSWriter.h>
+#include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/OggWriter.h>
+
+#include "MediaMimeTypes.h"
+#include "webm/WebmWriter.h"
+
+namespace android {
+std::string genMimeType(FuzzedDataProvider *dataProvider) {
+    uint8_t idx = dataProvider->ConsumeIntegralInRange<uint8_t>(0, kMimeTypes.size() - 1);
+    return std::string(kMimeTypes[idx]);
+}
+
+sp<IMediaExtractor> genMediaExtractor(FuzzedDataProvider *dataProvider, std::string mimeType,
+                                      uint16_t maxDataAmount) {
+    uint32_t dataBlobSize = dataProvider->ConsumeIntegralInRange<uint16_t>(0, maxDataAmount);
+    std::vector<uint8_t> data = dataProvider->ConsumeBytes<uint8_t>(dataBlobSize);
+    // data:[<mediatype>][;base64],<data>
+    std::string uri("data:");
+    uri += mimeType;
+    // Currently libstagefright only accepts base64 uris
+    uri += ";base64,";
+    android::AString out;
+    android::encodeBase64(data.data(), data.size(), &out);
+    uri += out.c_str();
+
+    sp<DataSource> source =
+        DataSourceFactory::getInstance()->CreateFromURI(NULL /* httpService */, uri.c_str());
+
+    if (source == NULL) {
+        return NULL;
+    }
+
+    return MediaExtractorFactory::Create(source);
+}
+
+sp<MediaSource> genMediaSource(FuzzedDataProvider *dataProvider, uint16_t maxMediaBlobSize) {
+    std::string mime = genMimeType(dataProvider);
+    sp<IMediaExtractor> extractor = genMediaExtractor(dataProvider, mime, maxMediaBlobSize);
+
+    if (extractor == NULL) {
+        return NULL;
+    }
+
+    for (size_t i = 0; i < extractor->countTracks(); ++i) {
+        sp<MetaData> meta = extractor->getTrackMetaData(i);
+
+        const char *trackMime;
+        if (!strcasecmp(mime.c_str(), trackMime)) {
+            sp<IMediaSource> track = extractor->getTrack(i);
+            if (track == NULL) {
+                return NULL;
+            }
+            return new CallbackMediaSource(track);
+        }
+    }
+
+    return NULL;
+}
+
+sp<MediaWriter> createWriter(int fd, StandardWriters writerType, sp<MetaData> fileMeta) {
+    sp<MediaWriter> writer;
+    switch (writerType) {
+        case OGG:
+            writer = new OggWriter(fd);
+            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_OGG);
+            break;
+        case AAC:
+            writer = new AACWriter(fd);
+            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AAC_ADIF);
+            break;
+        case AAC_ADTS:
+            writer = new AACWriter(fd);
+            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AAC_ADTS);
+            break;
+        case WEBM:
+            writer = new WebmWriter(fd);
+            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_WEBM);
+            break;
+        case MPEG4:
+            writer = new MPEG4Writer(fd);
+            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_MPEG_4);
+            break;
+        case AMR_NB:
+            writer = new AMRWriter(fd);
+            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AMR_NB);
+            break;
+        case AMR_WB:
+            writer = new AMRWriter(fd);
+            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AMR_WB);
+            break;
+        case MPEG2TS:
+            writer = new MPEG2TSWriter(fd);
+            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_MPEG2TS);
+            break;
+        default:
+            return nullptr;
+    }
+    if (writer != nullptr) {
+        fileMeta->setInt32(kKeyRealTimeRecording, false);
+    }
+    return writer;
+}
+}  // namespace android
\ No newline at end of file
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
new file mode 100644
index 0000000..98bfb94
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <datasource/DataSourceFactory.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <android/IMediaExtractor.h>
+#include <media/IMediaHTTPService.h>
+#include <media/mediarecorder.h>
+#include <media/stagefright/CallbackMediaSource.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaExtractorFactory.h>
+#include <media/stagefright/MediaWriter.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/foundation/base64.h>
+#include <utils/StrongPointer.h>
+
+namespace android {
+enum StandardWriters {
+    OGG,
+    AAC,
+    AAC_ADTS,
+    WEBM,
+    MPEG4,
+    AMR_NB,
+    AMR_WB,
+    MPEG2TS,
+    // Allows FuzzedDataProvider to find the end of this enum.
+    kMaxValue = MPEG2TS,
+};
+
+std::string genMimeType(FuzzedDataProvider *dataProvider);
+sp<IMediaExtractor> genMediaExtractor(FuzzedDataProvider *dataProvider, uint16_t dataAmount);
+sp<MediaSource> genMediaSource(FuzzedDataProvider *dataProvider, uint16_t maxMediaBlobSize);
+
+sp<MediaWriter> createWriter(int32_t fd, StandardWriters writerType, sp<MetaData> fileMeta);
+}  // namespace android
diff --git a/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h b/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h
new file mode 100644
index 0000000..e769950
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMEDIASOURCEFUZZIMPL_H
+#define IMEDIASOURCEFUZZIMPL_H
+
+#include <media/stagefright/MediaSource.h>
+
+namespace android {
+
+class IMediaSourceFuzzImpl : public IMediaSource {
+ public:
+    IMediaSourceFuzzImpl(FuzzedDataProvider *_fdp, size_t _max_buffer_size) :
+        fdp(_fdp),
+        max_buffer_size(_max_buffer_size) {}
+    status_t start(MetaData*) override { return 0; }
+    status_t stop() override { return 0; }
+    sp<MetaData> getFormat() override { return nullptr; }
+    status_t read(MediaBufferBase**,
+        const MediaSource::ReadOptions*) override;
+    status_t readMultiple(Vector<MediaBufferBase*>*, uint32_t,
+        const MediaSource::ReadOptions*) override;
+    bool supportReadMultiple() override { return true; }
+    bool supportNonblockingRead() override { return true; }
+    status_t pause() override { return 0; }
+
+ protected:
+    IBinder* onAsBinder() { return nullptr; }
+
+ private:
+    FuzzedDataProvider *fdp;
+    std::vector<std::shared_ptr<MediaBufferBase>> buffer_bases;
+    const size_t max_buffer_size;
+};
+
+// This class is simply to expose the destructor
+class MediaBufferFuzzImpl : public MediaBuffer {
+ public:
+    MediaBufferFuzzImpl(void *data, size_t size) : MediaBuffer(data, size) {}
+    ~MediaBufferFuzzImpl() {}
+};
+
+status_t IMediaSourceFuzzImpl::read(MediaBufferBase **buffer,
+        const MediaSource::ReadOptions *options) {
+    Vector<MediaBufferBase*> buffers;
+    status_t ret = readMultiple(&buffers, 1, options);
+    *buffer = buffers.empty() ? nullptr : buffers[0];
+
+    return ret;
+}
+
+status_t IMediaSourceFuzzImpl::readMultiple(Vector<MediaBufferBase*>* buffers,
+        uint32_t maxNumBuffers, const MediaSource::ReadOptions*) {
+    uint32_t num_buffers =
+        fdp->ConsumeIntegralInRange<uint32_t>(0, maxNumBuffers);
+    for(uint32_t i = 0; i < num_buffers; i++) {
+        std::vector<uint8_t> buf = fdp->ConsumeBytes<uint8_t>(
+            fdp->ConsumeIntegralInRange<size_t>(0, max_buffer_size));
+
+        std::shared_ptr<MediaBufferBase> mbb(
+            new MediaBufferFuzzImpl(buf.data(), buf.size()));
+
+        buffer_bases.push_back(mbb);
+        buffers->push_back(mbb.get());
+    }
+
+    // STATUS_OK
+    return 0;
+}
+
+} // namespace android
+
+#endif // IMEDIASOURCEFUZZIMPL_H
+
diff --git a/media/libstagefright/tests/fuzzers/MediaClockFuzzer.cpp b/media/libstagefright/tests/fuzzers/MediaClockFuzzer.cpp
new file mode 100644
index 0000000..9b26f0b
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/MediaClockFuzzer.cpp
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Authors: corbin.souffrant@leviathansecurity.com
+//          dylan.katz@leviathansecurity.com
+
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaClock.h>
+
+namespace android {
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    sp<MediaClock> mClock(new MediaClock);
+
+    bool registered = false;
+    while (fdp.remaining_bytes() > 0) {
+        switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 5)) {
+            case 0: {
+                if (registered == false) {
+                    mClock->init();
+                    registered = true;
+                }
+                break;
+                }
+            case 1: {
+                int64_t startingTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
+                mClock->setStartingTimeMedia(startingTimeMediaUs);
+                break;
+            }
+            case 2: {
+                mClock->clearAnchor();
+                break;
+            }
+            case 3: {
+                int64_t anchorTimeRealUs = fdp.ConsumeIntegral<int64_t>();
+                int64_t anchorTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
+                int64_t maxTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
+                mClock->updateAnchor(anchorTimeMediaUs, anchorTimeRealUs,
+                                     maxTimeMediaUs);
+                break;
+            }
+            case 4: {
+                int64_t maxTimeMediaUs = fdp.ConsumeIntegral<int64_t>();
+                mClock->updateMaxTimeMedia(maxTimeMediaUs);
+                break;
+                }
+            case 5: {
+                wp<AMessage> msg(new AMessage);
+                mClock->setNotificationMessage(msg.promote());
+            }
+        }
+    }
+
+    return 0;
+}
+}  // namespace android
diff --git a/media/libstagefright/tests/fuzzers/MediaMimeTypes.h b/media/libstagefright/tests/fuzzers/MediaMimeTypes.h
new file mode 100644
index 0000000..9f337ac
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/MediaMimeTypes.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FUZZER_MEDIAMIMETYPES_H_
+#define FUZZER_MEDIAMIMETYPES_H_
+
+#include <media/stagefright/foundation/MediaDefs.h>
+
+namespace android {
+
+static const std::vector<const char*> kMimeTypes {
+    MEDIA_MIMETYPE_IMAGE_JPEG,
+    MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC,
+    MEDIA_MIMETYPE_VIDEO_VP8,
+    MEDIA_MIMETYPE_VIDEO_VP9,
+    MEDIA_MIMETYPE_VIDEO_AV1,
+    MEDIA_MIMETYPE_VIDEO_AVC,
+    MEDIA_MIMETYPE_VIDEO_HEVC,
+    MEDIA_MIMETYPE_VIDEO_MPEG4,
+    MEDIA_MIMETYPE_VIDEO_H263,
+    MEDIA_MIMETYPE_VIDEO_MPEG2,
+    MEDIA_MIMETYPE_VIDEO_RAW,
+    MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
+    MEDIA_MIMETYPE_VIDEO_SCRAMBLED,
+    MEDIA_MIMETYPE_VIDEO_DIVX,
+    MEDIA_MIMETYPE_VIDEO_DIVX3,
+    MEDIA_MIMETYPE_VIDEO_XVID,
+    MEDIA_MIMETYPE_VIDEO_MJPEG,
+    MEDIA_MIMETYPE_AUDIO_AMR_NB,
+    MEDIA_MIMETYPE_AUDIO_AMR_WB,
+    MEDIA_MIMETYPE_AUDIO_MPEG,
+    MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
+    MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
+    MEDIA_MIMETYPE_AUDIO_MIDI,
+    MEDIA_MIMETYPE_AUDIO_AAC,
+    MEDIA_MIMETYPE_AUDIO_QCELP,
+    MEDIA_MIMETYPE_AUDIO_VORBIS,
+    MEDIA_MIMETYPE_AUDIO_OPUS,
+    MEDIA_MIMETYPE_AUDIO_G711_ALAW,
+    MEDIA_MIMETYPE_AUDIO_G711_MLAW,
+    MEDIA_MIMETYPE_AUDIO_RAW,
+    MEDIA_MIMETYPE_AUDIO_FLAC,
+    MEDIA_MIMETYPE_AUDIO_AAC_ADTS,
+    MEDIA_MIMETYPE_AUDIO_MSGSM,
+    MEDIA_MIMETYPE_AUDIO_AC3,
+    MEDIA_MIMETYPE_AUDIO_EAC3,
+    MEDIA_MIMETYPE_AUDIO_EAC3_JOC,
+    MEDIA_MIMETYPE_AUDIO_AC4,
+    MEDIA_MIMETYPE_AUDIO_SCRAMBLED,
+    MEDIA_MIMETYPE_AUDIO_ALAC,
+    MEDIA_MIMETYPE_AUDIO_WMA,
+    MEDIA_MIMETYPE_AUDIO_MS_ADPCM,
+    MEDIA_MIMETYPE_AUDIO_DVI_IMA_ADPCM,
+    MEDIA_MIMETYPE_CONTAINER_MPEG4,
+    MEDIA_MIMETYPE_CONTAINER_WAV,
+    MEDIA_MIMETYPE_CONTAINER_OGG,
+    MEDIA_MIMETYPE_CONTAINER_MATROSKA,
+    MEDIA_MIMETYPE_CONTAINER_MPEG2TS,
+    MEDIA_MIMETYPE_CONTAINER_AVI,
+    MEDIA_MIMETYPE_CONTAINER_MPEG2PS,
+    MEDIA_MIMETYPE_CONTAINER_HEIF,
+    MEDIA_MIMETYPE_TEXT_3GPP,
+    MEDIA_MIMETYPE_TEXT_SUBRIP,
+    MEDIA_MIMETYPE_TEXT_VTT,
+    MEDIA_MIMETYPE_TEXT_CEA_608,
+    MEDIA_MIMETYPE_TEXT_CEA_708,
+    MEDIA_MIMETYPE_DATA_TIMED_ID3
+};
+
+}  // namespace android
+
+#endif  // FUZZER_MEDIAMIMETYPES_H_
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
new file mode 100644
index 0000000..5df3267
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
@@ -0,0 +1,103 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Authors: corbin.souffrant@leviathansecurity.com
+//          dylan.katz@leviathansecurity.com
+
+#include <MediaMuxerFuzzer.h>
+#include <cutils/ashmem.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/MediaMuxer.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+// Can't seem to get setBuffer or setString working. It always segfaults on a
+// null pointer read or memleaks. So that functionality is missing.
+void createMessage(AMessage *msg, FuzzedDataProvider *fdp) {
+  size_t count = fdp->ConsumeIntegralInRange<size_t>(0, 32);
+  while (fdp->remaining_bytes() > 0 && count > 0) {
+    uint8_t function_id =
+        fdp->ConsumeIntegralInRange<uint8_t>(0, amessage_setvals.size() - 1);
+    amessage_setvals[function_id](msg, fdp);
+    count--;
+  }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+  size_t data_size = fdp.ConsumeIntegralInRange<size_t>(0, size);
+  int fd = ashmem_create_region("mediamuxer_fuzz_region", data_size);
+  if (fd < 0)
+    return 0;
+
+  uint8_t *sh_data = static_cast<uint8_t *>(
+      mmap(NULL, data_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0));
+  if (sh_data == MAP_FAILED)
+    return 0;
+
+  MediaMuxer::OutputFormat format =
+      (MediaMuxer::OutputFormat)fdp.ConsumeIntegralInRange<int32_t>(0, 4);
+  sp<MediaMuxer> mMuxer(new MediaMuxer(fd, format));
+
+  while (fdp.remaining_bytes() > 1) {
+    switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 4)) {
+    case 0: {
+      // For some reason it only likes mp4s here...
+      if (format == 1 || format == 4)
+        break;
+
+      sp<AMessage> a_format(new AMessage);
+      createMessage(a_format.get(), &fdp);
+      mMuxer->addTrack(a_format);
+      break;
+    }
+    case 1: {
+      mMuxer->start();
+      break;
+    }
+    case 2: {
+      int degrees = fdp.ConsumeIntegral<int>();
+      mMuxer->setOrientationHint(degrees);
+      break;
+    }
+    case 3: {
+      int latitude = fdp.ConsumeIntegral<int>();
+      int longitude = fdp.ConsumeIntegral<int>();
+      mMuxer->setLocation(latitude, longitude);
+      break;
+    }
+    case 4: {
+      size_t buf_size = fdp.ConsumeIntegralInRange<size_t>(0, data_size);
+      sp<ABuffer> a_buffer(new ABuffer(buf_size));
+
+      size_t trackIndex = fdp.ConsumeIntegral<size_t>();
+      int64_t timeUs = fdp.ConsumeIntegral<int64_t>();
+      uint32_t flags = fdp.ConsumeIntegral<uint32_t>();
+      mMuxer->writeSampleData(a_buffer, trackIndex, timeUs, flags);
+    }
+    }
+  }
+
+  if (fdp.ConsumeBool())
+    mMuxer->stop();
+
+  munmap(sh_data, data_size);
+  close(fd);
+  return 0;
+}
+} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h
new file mode 100644
index 0000000..7d4421d
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Authors: corbin.souffrant@leviathansecurity.com
+//          dylan.katz@leviathansecurity.com
+
+#pragma once
+
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+// Mappings vectors are the list of attributes that the MediaMuxer
+// class looks for in the message.
+static std::vector<const char *> floatMappings{
+    "capture-rate",
+    "time-lapse-fps",
+    "frame-rate",
+};
+
+static std::vector<const char *> int64Mappings{
+    "exif-offset",    "exif-size", "target-time",
+    "thumbnail-time", "timeUs",    "durationUs",
+};
+
+static std::vector<const char *> int32Mappings{"loop",
+                                               "time-scale",
+                                               "crypto-mode",
+                                               "crypto-default-iv-size",
+                                               "crypto-encrypted-byte-block",
+                                               "crypto-skip-byte-block",
+                                               "frame-count",
+                                               "max-bitrate",
+                                               "pcm-big-endian",
+                                               "temporal-layer-count",
+                                               "temporal-layer-id",
+                                               "thumbnail-width",
+                                               "thumbnail-height",
+                                               "track-id",
+                                               "valid-samples",
+                                               "color-format",
+                                               "ca-system-id",
+                                               "is-sync-frame",
+                                               "bitrate",
+                                               "max-bitrate",
+                                               "width",
+                                               "height",
+                                               "sar-width",
+                                               "sar-height",
+                                               "display-width",
+                                               "display-height",
+                                               "is-default",
+                                               "tile-width",
+                                               "tile-height",
+                                               "grid-rows",
+                                               "grid-cols",
+                                               "rotation-degrees",
+                                               "channel-count",
+                                               "sample-rate",
+                                               "bits-per-sample",
+                                               "channel-mask",
+                                               "encoder-delay",
+                                               "encoder-padding",
+                                               "is-adts",
+                                               "frame-rate",
+                                               "max-height",
+                                               "max-width",
+                                               "max-input-size",
+                                               "haptic-channel-count",
+                                               "pcm-encoding",
+                                               "aac-profile"};
+
+static const std::vector<std::function<void(AMessage *, FuzzedDataProvider *)>>
+    amessage_setvals = {
+        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
+          msg->setRect("crop", fdp->ConsumeIntegral<int32_t>(),
+                       fdp->ConsumeIntegral<int32_t>(),
+                       fdp->ConsumeIntegral<int32_t>(),
+                       fdp->ConsumeIntegral<int32_t>());
+        },
+        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
+          msg->setFloat(floatMappings[fdp->ConsumeIntegralInRange<size_t>(
+                            0, floatMappings.size() - 1)],
+                        fdp->ConsumeFloatingPoint<float>());
+        },
+        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
+          msg->setInt64(int64Mappings[fdp->ConsumeIntegralInRange<size_t>(
+                            0, int64Mappings.size() - 1)],
+                        fdp->ConsumeIntegral<int64_t>());
+        },
+        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
+          msg->setInt32(int32Mappings[fdp->ConsumeIntegralInRange<size_t>(
+                            0, int32Mappings.size() - 1)],
+                        fdp->ConsumeIntegral<int32_t>());
+        }};
+} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/SkipCutBufferFuzzer.cpp b/media/libstagefright/tests/fuzzers/SkipCutBufferFuzzer.cpp
new file mode 100644
index 0000000..1f78e6d
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/SkipCutBufferFuzzer.cpp
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Authors: corbin.souffrant@leviathansecurity.com
+//          dylan.katz@leviathansecurity.com
+
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/SkipCutBuffer.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+  size_t skip = fdp.ConsumeIntegral<size_t>();
+  size_t cut = fdp.ConsumeIntegral<size_t>();
+  size_t num16Channels = fdp.ConsumeIntegral<size_t>();
+  sp<SkipCutBuffer> sBuffer(new SkipCutBuffer(skip, cut, num16Channels));
+
+  while (fdp.remaining_bytes() > 0) {
+    // Cap size to 1024 to limit max amount allocated.
+    size_t buf_size = fdp.ConsumeIntegralInRange<size_t>(0, 1024);
+    size_t range = fdp.ConsumeIntegralInRange<size_t>(0, buf_size);
+    size_t length = fdp.ConsumeIntegralInRange<size_t>(0, buf_size - range);
+
+    switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 4)) {
+    case 0: {
+      sp<ABuffer> a_buffer(new ABuffer(buf_size));
+      sp<AMessage> format(new AMessage);
+      sp<MediaCodecBuffer> s_buffer(new MediaCodecBuffer(format, a_buffer));
+      s_buffer->setRange(range, length);
+      sBuffer->submit(s_buffer);
+      break;
+    }
+    case 1: {
+      std::unique_ptr<MediaBufferBase> m_buffer(new MediaBuffer(buf_size));
+      m_buffer->set_range(range, length);
+      sBuffer->submit(reinterpret_cast<MediaBuffer *>(m_buffer.get()));
+      break;
+    }
+    case 2: {
+      sp<ABuffer> a_buffer(new ABuffer(buf_size));
+      sp<AMessage> format(new AMessage);
+      sp<MediaCodecBuffer> s_buffer(new MediaCodecBuffer(format, a_buffer));
+      a_buffer->setRange(range, length);
+      sBuffer->submit(a_buffer);
+      break;
+    }
+    case 3: {
+      sBuffer->clear();
+      break;
+    }
+    case 4: {
+      sBuffer->size();
+    }
+    }
+  }
+  return 0;
+}
+} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/StagefrightMediaScannerFuzzer.cpp b/media/libstagefright/tests/fuzzers/StagefrightMediaScannerFuzzer.cpp
new file mode 100644
index 0000000..c50c951
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/StagefrightMediaScannerFuzzer.cpp
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Authors: corbin.souffrant@leviathansecurity.com
+//          dylan.katz@leviathansecurity.com
+
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/StagefrightMediaScanner.h>
+
+#include <cstdio>
+
+namespace android {
+class FuzzMediaScannerClient : public MediaScannerClient {
+ public:
+    virtual status_t scanFile(const char*, long long, long long, bool, bool) {
+        return 0;
+    }
+
+    virtual status_t handleStringTag(const char*, const char*) { return 0; }
+
+    virtual status_t setMimeType(const char*) { return 0; }
+};
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    StagefrightMediaScanner mScanner = StagefrightMediaScanner();
+    // Without this, the fuzzer crashes for some reason.
+    mScanner.setLocale("");
+
+    while (fdp.remaining_bytes() > 0) {
+        switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 1)) {
+            case 0: {
+                std::string path = fdp.ConsumeRandomLengthString(fdp.remaining_bytes());
+                std::string mimeType =
+                    fdp.ConsumeRandomLengthString(fdp.remaining_bytes());
+                std::shared_ptr<MediaScannerClient> client(new FuzzMediaScannerClient());
+                mScanner.processFile(path.c_str(), mimeType.c_str(), *client);
+                break;
+            }
+            case 1: {
+                int fd = fdp.ConsumeIntegral<int>();
+                if (fd >= 0 && fd <= 2) fd = 3;
+                mScanner.extractAlbumArt(fd);
+            }
+        }
+    }
+    return 0;
+}
+}  // namespace android
diff --git a/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp b/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp
new file mode 100644
index 0000000..97d1160
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp
@@ -0,0 +1,228 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Authors: corbin.souffrant@leviathansecurity.com
+//          dylan.katz@leviathansecurity.com
+
+#include <android-base/file.h>
+#include <android/content/AttributionSourceState.h>
+#include <ctype.h>
+#include <media/mediarecorder.h>
+#include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/MediaDefs.h>
+#include <stdlib.h>
+#include <utils/StrongPointer.h>
+#include <utils/Vector.h>
+
+#include <functional>
+#include <string>
+
+#include "FuzzerMediaUtility.h"
+#include "fuzzer/FuzzedDataProvider.h"
+
+static constexpr uint16_t kMaxOperations = 5000;
+static constexpr uint8_t kMaxPackageNameLen = 50;
+// For other strings in mpeg we want a higher limit.
+static constexpr uint16_t kMaxMPEGStrLen = 1000;
+static constexpr uint16_t kMaxMediaBlobSize = 1000;
+
+namespace android {
+
+using android::content::AttributionSourceState;
+
+std::string getFourCC(FuzzedDataProvider *fdp) {
+    std::string fourCC = fdp->ConsumeRandomLengthString(4);
+    // Replace any existing nulls
+    for (size_t pos = 0; pos < fourCC.length(); pos++) {
+        if (fourCC.at(pos) == '\0') {
+            fourCC.replace(pos, 1, "a");
+        }
+    }
+
+    // If our string is too short, fill the remainder with "a"s.
+    while (fourCC.length() < 4) {
+        fourCC += 'a';
+    }
+    return fourCC;
+}
+
+typedef std::vector<std::function<void(FuzzedDataProvider*,
+                                    sp<MediaWriter>, sp<MetaData>, int tmpFileFd)>> OperationVec;
+typedef std::vector<std::function<void(FuzzedDataProvider*, MPEG4Writer*)>> MPEG4OperationVec;
+static const OperationVec operations = {
+    [](FuzzedDataProvider*, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
+        mediaWriter->pause();
+    },
+    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int tmpFd) {
+        bool valid_fd = dataProvider->ConsumeBool();
+        int fd = -1;
+        if (valid_fd) {
+            fd = tmpFd;
+        }
+        // Args don't seem to be used
+        Vector<String16> args;
+        mediaWriter->dump(fd, args);
+    },
+    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int tmpFd) {
+        bool valid_fd = dataProvider->ConsumeBool();
+        int fd = -1;
+        if (valid_fd) {
+            fd = tmpFd;
+        }
+        mediaWriter->setNextFd(fd);
+    },
+    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
+        mediaWriter->setCaptureRate(dataProvider->ConsumeFloatingPoint<float>());
+    },
+    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
+        mediaWriter->setMaxFileDuration(dataProvider->ConsumeIntegral<int64_t>());
+    },
+    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
+        mediaWriter->setStartTimeOffsetMs(dataProvider->ConsumeIntegral<int>());
+
+        // Likely won't do much, but might as well as do a quick check
+        // while we're here.
+        mediaWriter->getStartTimeOffsetMs();
+    },
+    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
+        mediaWriter->setMaxFileDuration(dataProvider->ConsumeIntegral<int64_t>());
+    },
+    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
+        mediaWriter->setMaxFileDuration(dataProvider->ConsumeIntegral<int64_t>());
+    },
+};
+
+static const MPEG4OperationVec mpeg4Operations = {
+    [](FuzzedDataProvider*, MPEG4Writer *mediaWriter) { mediaWriter->notifyApproachingLimit(); },
+    // Lower level write methods.
+    // High-level startBox/endBox/etc are all called elsewhere,
+    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
+        uint8_t val = dataProvider->ConsumeIntegral<uint8_t>();
+        mediaWriter->writeInt8(val);
+    },
+    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
+        uint16_t val = dataProvider->ConsumeIntegral<uint16_t>();
+        mediaWriter->writeInt16(val);
+    },
+    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
+        uint32_t val = dataProvider->ConsumeIntegral<uint32_t>();
+        mediaWriter->writeInt32(val);
+    },
+    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
+        uint64_t val = dataProvider->ConsumeIntegral<uint64_t>();
+        mediaWriter->writeInt64(val);
+    },
+    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
+        std::string strVal = dataProvider->ConsumeRandomLengthString(kMaxMPEGStrLen);
+        mediaWriter->writeCString(strVal.c_str());
+    },
+    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
+        std::string fourCC = getFourCC(dataProvider);
+        mediaWriter->writeFourcc(fourCC.c_str());
+    },
+
+    // Misc setters
+    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
+        uint32_t layers = dataProvider->ConsumeIntegral<uint32_t>();
+        mediaWriter->setTemporalLayerCount(layers);
+    },
+    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
+        uint32_t duration = dataProvider->ConsumeIntegral<uint32_t>();
+        mediaWriter->setInterleaveDuration(duration);
+    },
+    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
+        int lat = dataProvider->ConsumeIntegral<int>();
+        int lon = dataProvider->ConsumeIntegral<int>();
+        mediaWriter->setGeoData(lat, lon);
+    },
+};
+
+// Not all writers can always add new sources, so we'll need additional checks.
+void addSource(FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter) {
+    sp<MediaSource> mediaSource = genMediaSource(dataProvider, kMaxMediaBlobSize);
+    if (mediaSource == NULL) {
+        // There's a static check preventing NULLs in addSource.
+        return;
+    }
+    mediaWriter->addSource(mediaSource);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    FuzzedDataProvider dataProvider(data, size);
+    TemporaryFile tf;
+    sp<MetaData> fileMeta = new MetaData;
+    StandardWriters writerType = dataProvider.ConsumeEnum<StandardWriters>();
+    sp<MediaWriter> writer = createWriter(tf.fd, writerType, fileMeta);
+
+    AttributionSourceState attributionSource;
+    attributionSource.packageName = dataProvider.ConsumeRandomLengthString(kMaxPackageNameLen);
+    attributionSource.uid = dataProvider.ConsumeIntegral<int32_t>();
+    attributionSource.pid = dataProvider.ConsumeIntegral<int32_t>();
+    attributionSource.token = sp<BBinder>::make();
+    sp<MediaRecorder> mr = new MediaRecorder(attributionSource);
+    writer->setListener(mr);
+
+    uint8_t baseOpLen = operations.size();
+    uint8_t totalLen = baseOpLen;
+    uint8_t maxSources;
+    // Different writers support different amounts of sources.
+    switch (writerType) {
+        case StandardWriters::AAC:
+        case StandardWriters::AAC_ADTS:
+        case StandardWriters::AMR_NB:
+        case StandardWriters::AMR_WB:
+        case StandardWriters::OGG:
+            maxSources = 1;
+            break;
+        case StandardWriters::WEBM:
+            maxSources = 2;
+            break;
+        default:
+            maxSources = UINT8_MAX;
+            break;
+    }
+    // Initialize some number of sources and add them to our writer.
+    uint8_t sourceCount = dataProvider.ConsumeIntegralInRange<uint8_t>(0, maxSources);
+    for (uint8_t i = 0; i < sourceCount; i++) {
+        addSource(&dataProvider, writer);
+    }
+
+    // Increase our range if additional operations are implemented.
+    // Currently only MPEG4 has additiona public operations on their writer.
+    if (writerType == StandardWriters::MPEG4) {
+        totalLen += mpeg4Operations.size();
+    }
+
+    // Many operations require the writer to be started.
+    writer->start(fileMeta.get());
+    for (size_t ops_run = 0; dataProvider.remaining_bytes() > 0 && ops_run < kMaxOperations - 1;
+            ops_run++) {
+        uint8_t op = dataProvider.ConsumeIntegralInRange<uint8_t>(0, totalLen - 1);
+        if (op < baseOpLen) {
+            operations[op](&dataProvider, writer, fileMeta, tf.fd);
+        } else if (writerType == StandardWriters::MPEG4) {
+            mpeg4Operations[op - baseOpLen](&dataProvider, (MPEG4Writer*)writer.get());
+        } else {
+            // Here just in case, will error out.
+            operations[op](&dataProvider, writer, fileMeta, tf.fd);
+        }
+    }
+    writer->stop();
+
+    writer.clear();
+    writer = nullptr;
+    return 0;
+}
+}  // namespace android
diff --git a/media/libstagefright/tests/fuzzers/dictionaries/formats.dict b/media/libstagefright/tests/fuzzers/dictionaries/formats.dict
new file mode 100644
index 0000000..4ab22de
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/dictionaries/formats.dict
@@ -0,0 +1,299 @@
+############################################################################################################
+# This file is a combination of these dictionaries:                                                        #
+# https://github.com/google/fuzzing/blob/0c48531c4d317cea9479b3ec1b0ddb9edc438c3a/dictionaries/mp4.dict    #
+# https://github.com/google/fuzzing/blob/0c48531c4d317cea9479b3ec1b0ddb9edc438c3a/dictionaries/webm.dict   #
+# https://github.com/google/fuzzing/blob/0c48531c4d317cea9479b3ec1b0ddb9edc438c3a/dictionaries/ogg.dict    #
+# MPEG4, OGG, and WEBM are all formats used by MediaWriter.                                                #
+############################################################################################################
+# mp4.dict
+# Taken from https://chromium.googlesource.com/chromium/src/+/master/media/test/mp4.dict
+FOURCC_NULL="\x00\x00\x00\x00"
+FOURCC_AC3 ="\x61\x63\x2d\x33"
+FOURCC_EAC3="\x65\x63\x2d\x33"
+FOURCC_AVC1="\x61\x76\x63\x31"
+FOURCC_AVC3="\x61\x76\x63\x33"
+FOURCC_AVCC="\x61\x76\x63\x43"
+FOURCC_BLOC="\x62\x6C\x6F\x63"
+FOURCC_CENC="\x63\x65\x6e\x63"
+FOURCC_CO64="\x63\x6f\x36\x34"
+FOURCC_CTTS="\x63\x74\x74\x73"
+FOURCC_DINF="\x64\x69\x6e\x66"
+FOURCC_EDTS="\x65\x64\x74\x73"
+FOURCC_EMSG="\x65\x6d\x73\x67"
+FOURCC_ELST="\x65\x6c\x73\x74"
+FOURCC_ENCA="\x65\x6e\x63\x61"
+FOURCC_ENCV="\x65\x6e\x63\x76"
+FOURCC_ESDS="\x65\x73\x64\x73"
+FOURCC_FREE="\x66\x72\x65\x65"
+FOURCC_FRMA="\x66\x72\x6d\x61"
+FOURCC_FTYP="\x66\x74\x79\x70"
+FOURCC_HDLR="\x68\x64\x6c\x72"
+FOURCC_HINT="\x68\x69\x6e\x74"
+FOURCC_HVC1="\x68\x76\x63\x31"
+FOURCC_HVCC="\x68\x76\x63\x43"
+FOURCC_IODS="\x69\x6f\x64\x73"
+FOURCC_MDAT="\x6d\x64\x61\x74"
+FOURCC_MDHD="\x6d\x64\x68\x64"
+FOURCC_MDIA="\x6d\x64\x69\x61"
+FOURCC_MECO="\x6d\x65\x63\x6f"
+FOURCC_MEHD="\x6d\x65\x68\x64"
+FOURCC_META="\x6d\x65\x74\x61"
+FOURCC_MFHD="\x6d\x66\x68\x64"
+FOURCC_MFRA="\x6d\x66\x72\x61"
+FOURCC_MINF="\x6d\x69\x6e\x66"
+FOURCC_MOOF="\x6d\x6f\x6f\x66"
+FOURCC_MOOV="\x6d\x6f\x6f\x76"
+FOURCC_MP4A="\x6d\x70\x34\x61"
+FOURCC_MP4V="\x6d\x70\x34\x76"
+FOURCC_MVEX="\x6d\x76\x65\x78"
+FOURCC_MVHD="\x6d\x76\x68\x64"
+FOURCC_PASP="\x70\x61\x73\x70"
+FOURCC_PDIN="\x70\x64\x69\x6e"
+FOURCC_PRFT="\x70\x72\x66\x74"
+FOURCC_PSSH="\x70\x73\x73\x68"
+FOURCC_SAIO="\x73\x61\x69\x6f"
+FOURCC_SAIZ="\x73\x61\x69\x7a"
+FOURCC_SBGP="\x73\x62\x67\x70"
+FOURCC_SCHI="\x73\x63\x68\x69"
+FOURCC_SCHM="\x73\x63\x68\x6d"
+FOURCC_SDTP="\x73\x64\x74\x70"
+FOURCC_SEIG="\x73\x65\x69\x67"
+FOURCC_SENC="\x73\x65\x6e\x63"
+FOURCC_SGPD="\x73\x67\x70\x64"
+FOURCC_SIDX="\x73\x69\x64\x78"
+FOURCC_SINF="\x73\x69\x6e\x66"
+FOURCC_SKIP="\x73\x6b\x69\x70"
+FOURCC_SMHD="\x73\x6d\x68\x64"
+FOURCC_SOUN="\x73\x6f\x75\x6e"
+FOURCC_SSIX="\x73\x73\x69\x78"
+FOURCC_STBL="\x73\x74\x62\x6c"
+FOURCC_STCO="\x73\x74\x63\x6f"
+FOURCC_STSC="\x73\x74\x73\x63"
+FOURCC_STSD="\x73\x74\x73\x64"
+FOURCC_STSS="\x73\x74\x73\x73"
+FOURCC_STSZ="\x73\x74\x73\x7a"
+FOURCC_STTS="\x73\x74\x74\x73"
+FOURCC_STYP="\x73\x74\x79\x70"
+FOURCC_TENC="\x74\x65\x6e\x63"
+FOURCC_TFDT="\x74\x66\x64\x74"
+FOURCC_TFHD="\x74\x66\x68\x64"
+FOURCC_TKHD="\x74\x6b\x68\x64"
+FOURCC_TRAF="\x74\x72\x61\x66"
+FOURCC_TRAK="\x74\x72\x61\x6b"
+FOURCC_TREX="\x74\x72\x65\x78"
+FOURCC_TRUN="\x74\x72\x75\x6e"
+FOURCC_UDTA="\x75\x64\x74\x61"
+FOURCC_UUID="\x75\x75\x69\x64"
+FOURCC_VIDE="\x76\x69\x64\x65"
+FOURCC_VMHD="\x76\x6d\x68\x64"
+FOURCC_WIDE="\x77\x69\x64\x65"
+
+# ogg.dict
+# https://xiph.org/vorbis/doc/Vorbis_I_spec.html
+
+header="OggS"
+
+# Codecs
+"BBCD\x00"
+"\x7fFLAC"
+"\x80theora"
+"\x01vorbis"
+"CELT    "
+"CMML\x00\x00\x00\x00"
+"\x8bJNG\x0d\x0a\x1a\x0a"
+"\x80kate\x00\x00\x00"
+"OggMIDI\x00"
+"\x8aMNG\x0d\x0a\x1a\x0a"
+"PCM     "
+"\x89PNG\x0d\x0a\x1a\x0a"
+"Speex   "
+"YUV4MPEG"
+
+# Metadata
+"TITLE="
+"VERSION="
+"ALBUM="
+"TRACKNUMBER="
+"ARTIST="
+"PERFORMER="
+"COPYRIGHT="
+"LICENSE="
+"ORGANIZATION="
+"DESCRIPTION="
+"GENRE="
+"DATE="
+"LOCATION="
+"CONTACT="
+"ISRC="
+
+# webm.dict
+# Element IDs.
+IdAesSettingsCipherMode = "\x47\xE8"
+IdAlphaMode = "\x53\xC0"
+IdAspectRatioType = "\x54\xB3"
+IdAudio = "\xE1"
+IdBitDepth = "\x62\x64"
+IdBitsPerChannel = "\x55\xB2"
+IdBlock = "\xA1"
+IdBlockAddId = "\xEE"
+IdBlockAdditional = "\xA5"
+IdBlockAdditions = "\x75\xA1"
+IdBlockDuration = "\x9B"
+IdBlockGroup = "\xA0"
+IdBlockMore = "\xA6"
+IdBlockVirtual = "\xA2"
+IdCbSubsamplingHorz = "\x55\xB5"
+IdCbSubsamplingVert = "\x55\xB6"
+IdChannels = "\x9F"
+IdChapCountry = "\x43\x7E"
+IdChapLanguage = "\x43\x7C"
+IdChapString = "\x85"
+IdChapterAtom = "\xB6"
+IdChapterDisplay = "\x80"
+IdChapterStringUID = "\x56\x54"
+IdChapterStringUid = "\x56\x54"
+IdChapterTimeEnd = "\x92"
+IdChapterTimeStart = "\x91"
+IdChapterUID = "\x73\xC4"
+IdChapterUid = "\x73\xC4"
+IdChapters = "\x10\x43\xA7\x70"
+IdChromaSitingHorz = "\x55\xB7"
+IdChromaSitingVert = "\x55\xB8"
+IdChromaSubsamplingHorz = "\x55\xB3"
+IdChromaSubsamplingVert = "\x55\xB4"
+IdCluster = "\x1F\x43\xB6\x75"
+IdCodecDelay = "\x56\xAA"
+IdCodecID = "\x86"
+IdCodecName = "\x25\x86\x88"
+IdCodecPrivate = "\x63\xA2"
+IdColour = "\x55\xB0"
+IdContentEncAESSettings = "\x47\xE7"
+IdContentEncAesSettings = "\x47\xE7"
+IdContentEncAlgo = "\x47\xE1"
+IdContentEncKeyId = "\x47\xE2"
+IdContentEncoding = "\x62\x40"
+IdContentEncodingOrder = "\x50\x31"
+IdContentEncodingScope = "\x50\x32"
+IdContentEncodingType = "\x50\x33"
+IdContentEncodings = "\x6D\x80"
+IdContentEncryption = "\x50\x35"
+IdCueBlockNumber = "\x53\x78"
+IdCueClusterPosition = "\xF1"
+IdCueDuration = "\xB2"
+IdCuePoint = "\xBB"
+IdCueRelativePosition = "\xF0"
+IdCueTime = "\xB3"
+IdCueTrack = "\xF7"
+IdCueTrackPositions = "\xB7"
+IdCues = "\x1C\x53\xBB\x6B"
+IdDateUTC = "\x44\x61"
+IdDateUtc = "\x44\x61"
+IdDefaultDuration = "\x23\xE3\x83"
+IdDiscardPadding = "\x75\xA2"
+IdDisplayHeight = "\x54\xBA"
+IdDisplayUnit = "\x54\xB2"
+IdDisplayWidth = "\x54\xB0"
+IdDocType = "\x42\x82"
+IdDocTypeReadVersion = "\x42\x85"
+IdDocTypeVersion = "\x42\x87"
+IdDuration = "\x44\x89"
+IdEBML = "\x1A\x45\xDF\xA3"
+IdEBMLMaxIDLength = "\x42\xF2"
+IdEBMLMaxSizeLength = "\x42\xF3"
+IdEBMLReadVersion = "\x42\xF7"
+IdEBMLVersion = "\x42\x86"
+IdEbml = "\x1A\x45\xDF\xA3"
+IdEbmlMaxIdLength = "\x42\xF2"
+IdEbmlMaxSizeLength = "\x42\xF3"
+IdEbmlReadVersion = "\x42\xF7"
+IdEbmlVersion = "\x42\x86"
+IdEditionEntry = "\x45\xB9"
+IdFileUsedEndTime = "\x46\x62"
+IdFileUsedStartTime = "\x46\x61"
+IdFlagDefault = "\x88"
+IdFlagEnabled = "\xB9"
+IdFlagForced = "\x55\xAA"
+IdFlagInterlaced = "\x9A"
+IdFlagLacing = "\x9C"
+IdFrameRate = "\x23\x83\xE3"
+IdInfo = "\x15\x49\xA9\x66"
+IdLaceNumber = "\xCC"
+IdLanguage = "\x22\xB5\x9C"
+IdLuminanceMax = "\x55\xD9"
+IdLuminanceMin = "\x55\xDA"
+IdMasteringMetadata = "\x55\xD0"
+IdMatrixCoefficients = "\x55\xB1"
+IdMaxCll = "\x55\xBC"
+IdMaxFall = "\x55\xBD"
+IdMuxingApp = "\x4D\x80"
+IdName = "\x53\x6E"
+IdOutputSamplingFrequency = "\x78\xB5"
+IdPixelCropBottom = "\x54\xAA"
+IdPixelCropLeft = "\x54\xCC"
+IdPixelCropRight = "\x54\xDD"
+IdPixelCropTop = "\x54\xBB"
+IdPixelHeight = "\xBA"
+IdPixelWidth = "\xB0"
+IdPrevSize = "\xAB"
+IdPrimaries = "\x55\xBB"
+IdPrimaryBChromaticityX = "\x55\xD5"
+IdPrimaryBChromaticityY = "\x55\xD6"
+IdPrimaryGChromaticityX = "\x55\xD3"
+IdPrimaryGChromaticityY = "\x55\xD4"
+IdPrimaryRChromaticityX = "\x55\xD1"
+IdPrimaryRChromaticityY = "\x55\xD2"
+IdProjection = "\x76\x70"
+IdProjectionPosePitch = "\x76\x74"
+IdProjectionPoseRoll = "\x76\x75"
+IdProjectionPoseYaw = "\x76\x73"
+IdProjectionPrivate = "\x76\x72"
+IdProjectionType = "\x76\x71"
+IdRange = "\x55\xB9"
+IdReferenceBlock = "\xFB"
+IdSamplingFrequency = "\xB5"
+IdSeek = "\x4D\xBB"
+IdSeekHead = "\x11\x4D\x9B\x74"
+IdSeekID = "\x53\xAB"
+IdSeekPosition = "\x53\xAC"
+IdSeekPreRoll = "\x56\xBB"
+IdSegment = "\x18\x53\x80\x67"
+IdSimpleBlock = "\xA3"
+IdSimpleTag = "\x67\xC8"
+IdSliceDuration = "\xCF"
+IdSlices = "\x8E"
+IdStereoMode = "\x53\xB8"
+IdTag = "\x73\x73"
+IdTagBinary = "\x44\x85"
+IdTagDefault = "\x44\x84"
+IdTagLanguage = "\x44\x7A"
+IdTagName = "\x45\xA3"
+IdTagString = "\x44\x87"
+IdTagTrackUid = "\x63\xC5"
+IdTags = "\x12\x54\xC3\x67"
+IdTargetType = "\x63\xCA"
+IdTargetTypeValue = "\x68\xCA"
+IdTargets = "\x63\xC0"
+IdTimeSlice = "\xE8"
+IdTimecode = "\xE7"
+IdTimecodeScale = "\x2A\xD7\xB1"
+IdTitle = "\x7B\xA9"
+IdTrackEntry = "\xAE"
+IdTrackNumber = "\xD7"
+IdTrackType = "\x83"
+IdTrackUID = "\x73\xC5"
+IdTracks = "\x16\x54\xAE\x6B"
+IdTransferCharacteristics = "\x55\xBA"
+IdTrickMasterTrackSegmentUID = "\xC4"
+IdTrickMasterTrackUID = "\xC7"
+IdTrickTrackFlag = "\xC6"
+IdTrickTrackSegmentUID = "\xC1"
+IdTrickTrackUID = "\xC0"
+IdVideo = "\xE0"
+IdVoid = "\xEC"
+IdWhitePointChromaticityX = "\x55\xD7"
+IdWhitePointChromaticityY = "\x55\xD8"
+IdWritingApp = "\x57\x41"
+
+# Interesting sizes.
+SizeUnknown = "\xFF"
+
diff --git a/media/libstagefright/tests/mediacodec/Android.bp b/media/libstagefright/tests/mediacodec/Android.bp
new file mode 100644
index 0000000..9cdc6d4
--- /dev/null
+++ b/media/libstagefright/tests/mediacodec/Android.bp
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_tests_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_tests_license",
+    ],
+}
+
+cc_test {
+    name: "mediacodecTest",
+    gtest: true,
+
+    srcs: [
+        "MediaCodecTest.cpp",
+        "MediaTestHelper.cpp",
+    ],
+
+    header_libs: [
+        "libmediadrm_headers",
+    ],
+
+    shared_libs: [
+        "libgui",
+        "libmedia",
+        "libmedia_codeclist",
+        "libmediametrics",
+        "libmediandk",
+        "libstagefright",
+        "libstagefright_codecbase",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libgmock",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+
+    test_suites: [
+        "general-tests",
+    ],
+}
diff --git a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
new file mode 100644
index 0000000..ac1e9b1
--- /dev/null
+++ b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
@@ -0,0 +1,395 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <future>
+#include <thread>
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <gui/Surface.h>
+#include <mediadrm/ICrypto.h>
+#include <media/stagefright/CodecBase.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaCodecListWriter.h>
+#include <media/MediaCodecInfo.h>
+
+#include "MediaTestHelper.h"
+
+namespace android {
+
+class MockBufferChannel : public BufferChannelBase {
+public:
+    ~MockBufferChannel() override = default;
+
+    MOCK_METHOD(void, setCrypto, (const sp<ICrypto> &crypto), (override));
+    MOCK_METHOD(void, setDescrambler, (const sp<IDescrambler> &descrambler), (override));
+    MOCK_METHOD(status_t, queueInputBuffer, (const sp<MediaCodecBuffer> &buffer), (override));
+    MOCK_METHOD(status_t, queueSecureInputBuffer,
+            (const sp<MediaCodecBuffer> &buffer,
+             bool secure,
+             const uint8_t *key,
+             const uint8_t *iv,
+             CryptoPlugin::Mode mode,
+             CryptoPlugin::Pattern pattern,
+             const CryptoPlugin::SubSample *subSamples,
+             size_t numSubSamples,
+             AString *errorDetailMsg),
+            (override));
+    MOCK_METHOD(status_t, attachBuffer,
+            (const std::shared_ptr<C2Buffer> &c2Buffer, const sp<MediaCodecBuffer> &buffer),
+            (override));
+    MOCK_METHOD(status_t, attachEncryptedBuffer,
+            (const sp<hardware::HidlMemory> &memory,
+             bool secure,
+             const uint8_t *key,
+             const uint8_t *iv,
+             CryptoPlugin::Mode mode,
+             CryptoPlugin::Pattern pattern,
+             size_t offset,
+             const CryptoPlugin::SubSample *subSamples,
+             size_t numSubSamples,
+             const sp<MediaCodecBuffer> &buffer),
+            (override));
+    MOCK_METHOD(status_t, renderOutputBuffer,
+            (const sp<MediaCodecBuffer> &buffer, int64_t timestampNs),
+            (override));
+    MOCK_METHOD(status_t, discardBuffer, (const sp<MediaCodecBuffer> &buffer), (override));
+    MOCK_METHOD(void, getInputBufferArray, (Vector<sp<MediaCodecBuffer>> *array), (override));
+    MOCK_METHOD(void, getOutputBufferArray, (Vector<sp<MediaCodecBuffer>> *array), (override));
+};
+
+class MockCodec : public CodecBase {
+public:
+    MockCodec(std::function<void(const std::shared_ptr<MockBufferChannel> &)> mock) {
+        mMockBufferChannel = std::make_shared<MockBufferChannel>();
+        mock(mMockBufferChannel);
+    }
+    ~MockCodec() override = default;
+
+    MOCK_METHOD(void, initiateAllocateComponent, (const sp<AMessage> &msg), (override));
+    MOCK_METHOD(void, initiateConfigureComponent, (const sp<AMessage> &msg), (override));
+    MOCK_METHOD(void, initiateCreateInputSurface, (), (override));
+    MOCK_METHOD(void, initiateSetInputSurface, (const sp<PersistentSurface> &surface), (override));
+    MOCK_METHOD(void, initiateStart, (), (override));
+    MOCK_METHOD(void, initiateShutdown, (bool keepComponentAllocated), (override));
+    MOCK_METHOD(void, onMessageReceived, (const sp<AMessage> &msg), (override));
+    MOCK_METHOD(status_t, setSurface, (const sp<Surface> &surface), (override));
+    MOCK_METHOD(void, signalFlush, (), (override));
+    MOCK_METHOD(void, signalResume, (), (override));
+    MOCK_METHOD(void, signalRequestIDRFrame, (), (override));
+    MOCK_METHOD(void, signalSetParameters, (const sp<AMessage> &msg), (override));
+    MOCK_METHOD(void, signalEndOfInputStream, (), (override));
+
+    std::shared_ptr<BufferChannelBase> getBufferChannel() override {
+        return mMockBufferChannel;
+    }
+
+    const std::unique_ptr<CodecCallback> &callback() {
+        return mCallback;
+    }
+
+    std::shared_ptr<MockBufferChannel> mMockBufferChannel;
+};
+
+class Counter {
+public:
+    Counter() = default;
+    explicit Counter(int32_t initCount) : mCount(initCount) {}
+    ~Counter() = default;
+
+    int32_t advance() {
+        std::unique_lock<std::mutex> lock(mMutex);
+        ++mCount;
+        mCondition.notify_all();
+        return mCount;
+    }
+
+    template <typename Rep, typename Period, typename ...Args>
+    int32_t waitFor(const std::chrono::duration<Rep, Period> &duration, Args... values) {
+        std::initializer_list<int32_t> list = {values...};
+        std::unique_lock<std::mutex> lock(mMutex);
+        mCondition.wait_for(
+                lock,
+                duration,
+                [&list, this]{
+                    return std::find(list.begin(), list.end(), mCount) != list.end();
+                });
+        return mCount;
+    }
+
+    template <typename ...Args>
+    int32_t wait(Args... values) {
+        std::initializer_list<int32_t> list = {values...};
+        std::unique_lock<std::mutex> lock(mMutex);
+        mCondition.wait(
+                lock,
+                [&list, this]{
+                    return std::find(list.begin(), list.end(), mCount) != list.end();
+                });
+        return mCount;
+    }
+
+private:
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    int32_t mCount = 0;
+};
+
+}  // namespace android
+
+using namespace android;
+using ::testing::_;
+
+static sp<MediaCodec> SetupMediaCodec(
+        const AString &owner,
+        const AString &codecName,
+        const AString &mediaType,
+        const sp<ALooper> &looper,
+        std::function<sp<CodecBase>(const AString &name, const char *owner)> getCodecBase) {
+    std::shared_ptr<MediaCodecListWriter> listWriter =
+        MediaTestHelper::CreateCodecListWriter();
+    std::unique_ptr<MediaCodecInfoWriter> infoWriter = listWriter->addMediaCodecInfo();
+    infoWriter->setName(codecName.c_str());
+    infoWriter->setOwner(owner.c_str());
+    infoWriter->addMediaType(mediaType.c_str());
+    std::vector<sp<MediaCodecInfo>> codecInfos;
+    MediaTestHelper::WriteCodecInfos(listWriter, &codecInfos);
+    std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo =
+        [codecInfos](const AString &name, sp<MediaCodecInfo> *info) -> status_t {
+            auto it = std::find_if(
+                    codecInfos.begin(), codecInfos.end(),
+                    [&name](const sp<MediaCodecInfo> &info) {
+                        return name.equalsIgnoreCase(info->getCodecName());
+                    });
+
+            *info = (it == codecInfos.end()) ? nullptr : *it;
+            return (*info) ? OK : NAME_NOT_FOUND;
+        };
+
+    looper->start();
+    return MediaTestHelper::CreateCodec(
+            codecName, looper, getCodecBase, getCodecInfo);
+}
+
+TEST(MediaCodecTest, ReclaimReleaseRace) {
+    // Test scenario:
+    //
+    // 1) ResourceManager thread calls reclaim(), message posted to
+    //    MediaCodec looper thread.
+    // 2) MediaCodec looper thread calls initiateShutdown(), shutdown being
+    //    handled at the component thread.
+    // 3) Client thread calls release(), message posted to & handle at
+    //    MediaCodec looper thread.
+    // 4) MediaCodec looper thread may call initiateShutdown().
+    // 5) initiateShutdown() from 2) is handled at onReleaseComplete() event
+    //    posted to MediaCodec looper thread.
+    // 6) If called, initiateShutdown() from 4) is handled and
+    //    onReleaseComplete() event posted to MediaCodec looper thread.
+
+    static const AString kCodecName{"test.codec"};
+    static const AString kCodecOwner{"nobody"};
+    static const AString kMediaType{"video/x-test"};
+
+    enum {
+        kInit,
+        kShutdownFromReclaimReceived,
+        kReleaseCalled,
+    };
+    Counter counter{kInit};
+    sp<MockCodec> mockCodec;
+    std::function<sp<CodecBase>(const AString &name, const char *owner)> getCodecBase =
+        [&mockCodec, &counter](const AString &, const char *) {
+            mockCodec = new MockCodec([](const std::shared_ptr<MockBufferChannel> &) {
+                // No mock setup, as we don't expect any buffer operations
+                // in this scenario.
+            });
+            ON_CALL(*mockCodec, initiateAllocateComponent(_))
+                .WillByDefault([mockCodec](const sp<AMessage> &) {
+                    mockCodec->callback()->onComponentAllocated(kCodecName.c_str());
+                });
+            ON_CALL(*mockCodec, initiateShutdown(_))
+                .WillByDefault([mockCodec, &counter](bool) {
+                    int32_t stage = counter.wait(kInit, kReleaseCalled);
+                    if (stage == kInit) {
+                        // Mark that 2) happened, so test can proceed to 3)
+                        counter.advance();
+                    } else if (stage == kReleaseCalled) {
+                        // Handle 6)
+                        mockCodec->callback()->onReleaseCompleted();
+                    }
+                });
+            return mockCodec;
+        };
+
+    sp<ALooper> looper{new ALooper};
+    sp<MediaCodec> codec = SetupMediaCodec(
+            kCodecOwner, kCodecName, kMediaType, looper, getCodecBase);
+    ASSERT_NE(nullptr, codec) << "Codec must not be null";
+    ASSERT_NE(nullptr, mockCodec) << "MockCodec must not be null";
+    std::promise<void> reclaimCompleted;
+    std::promise<void> releaseCompleted;
+    Counter threadExitCounter;
+    std::thread([codec, &reclaimCompleted]{
+        // Simulate ResourceManager thread. Proceed with 1)
+        MediaTestHelper::Reclaim(codec, true /* force */);
+        reclaimCompleted.set_value();
+    }).detach();
+    std::thread([codec, &counter, &releaseCompleted]{
+        // Simulate client thread. Wait until 2) is complete
+        (void)counter.wait(kShutdownFromReclaimReceived);
+        // Proceed to 3), and mark that 5) is ready to happen.
+        // NOTE: it's difficult to pinpoint when 4) happens, so we will sleep
+        //       to meet the timing.
+        counter.advance();
+        codec->release();
+        releaseCompleted.set_value();
+    }).detach();
+    std::thread([mockCodec, &counter]{
+        // Simulate component thread. Wait until 3) is complete
+        (void)counter.wait(kReleaseCalled);
+        // We want 4) to complete before moving forward, but it is hard to
+        // wait for this exact event. Just sleep so that the other thread can
+        // proceed and complete 4).
+        std::this_thread::sleep_for(std::chrono::milliseconds(100));
+        // Proceed to 5).
+        mockCodec->callback()->onReleaseCompleted();
+    }).detach();
+    EXPECT_EQ(
+            std::future_status::ready,
+            reclaimCompleted.get_future().wait_for(std::chrono::seconds(5)))
+        << "reclaim timed out";
+    EXPECT_EQ(
+            std::future_status::ready,
+            releaseCompleted.get_future().wait_for(std::chrono::seconds(5)))
+        << "release timed out";
+    looper->stop();
+}
+
+TEST(MediaCodecTest, ErrorWhileStopping) {
+    // Test scenario:
+    //
+    // 1) Client thread calls stop(); MediaCodec looper thread calls
+    //    initiateShutdown(); shutdown is being handled at the component thread.
+    // 2) Error occurred, but the shutdown operation is still being done.
+    // 3) Another error occurred during the shutdown operation.
+    // 4) MediaCodec looper thread handles the error.
+    // 5) Client releases the codec upon the error; previous shutdown is still
+    //    going on.
+    // 6) Component thread completes shutdown and posts onStopCompleted();
+    //    Shutdown from release also completes.
+
+    static const AString kCodecName{"test.codec"};
+    static const AString kCodecOwner{"nobody"};
+    static const AString kMediaType{"video/x-test"};
+
+    sp<MockCodec> mockCodec;
+    std::function<sp<CodecBase>(const AString &name, const char *owner)> getCodecBase =
+        [&mockCodec](const AString &, const char *) {
+            mockCodec = new MockCodec([](const std::shared_ptr<MockBufferChannel> &) {
+                // No mock setup, as we don't expect any buffer operations
+                // in this scenario.
+            });
+            ON_CALL(*mockCodec, initiateAllocateComponent(_))
+                .WillByDefault([mockCodec](const sp<AMessage> &) {
+                    mockCodec->callback()->onComponentAllocated(kCodecName.c_str());
+                });
+            ON_CALL(*mockCodec, initiateConfigureComponent(_))
+                .WillByDefault([mockCodec](const sp<AMessage> &msg) {
+                    mockCodec->callback()->onComponentConfigured(
+                            msg->dup(), msg->dup());
+                });
+            ON_CALL(*mockCodec, initiateStart())
+                .WillByDefault([mockCodec]() {
+                    mockCodec->callback()->onStartCompleted();
+                });
+            ON_CALL(*mockCodec, initiateShutdown(true))
+                .WillByDefault([mockCodec](bool) {
+                    // 2)
+                    mockCodec->callback()->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+                    // 3)
+                    mockCodec->callback()->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+                });
+            ON_CALL(*mockCodec, initiateShutdown(false))
+                .WillByDefault([mockCodec](bool) {
+                    // Previous stop finished now.
+                    mockCodec->callback()->onStopCompleted();
+                    // Release also finished.
+                    mockCodec->callback()->onReleaseCompleted();
+                });
+            return mockCodec;
+        };
+
+    sp<ALooper> looper{new ALooper};
+    sp<MediaCodec> codec = SetupMediaCodec(
+            kCodecOwner, kCodecName, kMediaType, looper, getCodecBase);
+    ASSERT_NE(nullptr, codec) << "Codec must not be null";
+    ASSERT_NE(nullptr, mockCodec) << "MockCodec must not be null";
+
+    codec->configure(new AMessage, nullptr, nullptr, 0);
+    codec->start();
+    // stop() will fail because of the error
+    EXPECT_NE(OK, codec->stop());
+    // sleep here so that the looper thread can handle all the errors.
+    std::this_thread::sleep_for(std::chrono::milliseconds(100));
+    // upon receiving the error, client tries to release the codec.
+    codec->release();
+    looper->stop();
+}
+
+TEST(MediaCodecTest, DeadWhileAsyncReleasing) {
+    // Test scenario:
+    //
+    // 1) Client thread calls release(); MediaCodec looper thread calls
+    //    initiateShutdown(); shutdown is being handled at the component thread.
+    // 2) Codec service died during the shutdown operation.
+    // 3) MediaCodec looper thread handles the death.
+
+    static const AString kCodecName{"test.codec"};
+    static const AString kCodecOwner{"nobody"};
+    static const AString kMediaType{"video/x-test"};
+
+    sp<MockCodec> mockCodec;
+    std::function<sp<CodecBase>(const AString &name, const char *owner)> getCodecBase =
+        [&mockCodec](const AString &, const char *) {
+            mockCodec = new MockCodec([](const std::shared_ptr<MockBufferChannel> &) {
+                // No mock setup, as we don't expect any buffer operations
+                // in this scenario.
+            });
+            ON_CALL(*mockCodec, initiateAllocateComponent(_))
+                .WillByDefault([mockCodec](const sp<AMessage> &) {
+                    mockCodec->callback()->onComponentAllocated(kCodecName.c_str());
+                });
+            ON_CALL(*mockCodec, initiateShutdown(_))
+                .WillByDefault([mockCodec](bool) {
+                    // 2)
+                    mockCodec->callback()->onError(DEAD_OBJECT, ACTION_CODE_FATAL);
+                    // Codec service has died, no callback.
+                });
+            return mockCodec;
+        };
+
+    sp<ALooper> looper{new ALooper};
+    sp<MediaCodec> codec = SetupMediaCodec(
+            kCodecOwner, kCodecName, kMediaType, looper, getCodecBase);
+    ASSERT_NE(nullptr, codec) << "Codec must not be null";
+    ASSERT_NE(nullptr, mockCodec) << "MockCodec must not be null";
+
+    codec->releaseAsync(new AMessage);
+    // sleep here so that the looper thread can handle the error
+    std::this_thread::sleep_for(std::chrono::milliseconds(100));
+    looper->stop();
+}
diff --git a/media/libstagefright/tests/mediacodec/MediaTestHelper.cpp b/media/libstagefright/tests/mediacodec/MediaTestHelper.cpp
new file mode 100644
index 0000000..bbe3c05
--- /dev/null
+++ b/media/libstagefright/tests/mediacodec/MediaTestHelper.cpp
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaCodecListWriter.h>
+
+#include "MediaTestHelper.h"
+
+namespace android {
+
+// static
+sp<MediaCodec> MediaTestHelper::CreateCodec(
+        const AString &name,
+        const sp<ALooper> &looper,
+        std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
+        std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo) {
+    sp<MediaCodec> codec = new MediaCodec(
+            looper, MediaCodec::kNoPid, MediaCodec::kNoUid, getCodecBase, getCodecInfo);
+    if (codec->init(name) != OK) {
+        return nullptr;
+    }
+    return codec;
+}
+
+// static
+void MediaTestHelper::Reclaim(const sp<MediaCodec> &codec, bool force) {
+    codec->reclaim(force);
+}
+
+// static
+std::shared_ptr<MediaCodecListWriter> MediaTestHelper::CreateCodecListWriter() {
+    return std::shared_ptr<MediaCodecListWriter>(new MediaCodecListWriter);
+}
+
+// static
+void MediaTestHelper::WriteCodecInfos(
+        const std::shared_ptr<MediaCodecListWriter> &writer,
+        std::vector<sp<MediaCodecInfo>> *codecInfos) {
+    writer->writeCodecInfos(codecInfos);
+}
+
+}  // namespace android
diff --git a/media/libstagefright/tests/mediacodec/MediaTestHelper.h b/media/libstagefright/tests/mediacodec/MediaTestHelper.h
new file mode 100644
index 0000000..f3d6110
--- /dev/null
+++ b/media/libstagefright/tests/mediacodec/MediaTestHelper.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_TEST_HELPER_H_
+
+#define MEDIA_TEST_HELPER_H_
+
+#include <media/stagefright/foundation/AString.h>
+#include <utils/StrongPointer.h>
+
+namespace android {
+
+struct ALooper;
+struct CodecBase;
+struct MediaCodec;
+struct MediaCodecInfo;
+struct MediaCodecListWriter;
+
+class MediaTestHelper {
+public:
+    // MediaCodec
+    static sp<MediaCodec> CreateCodec(
+            const AString &name,
+            const sp<ALooper> &looper,
+            std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
+            std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo);
+    static void Reclaim(const sp<MediaCodec> &codec, bool force);
+
+    // MediaCodecListWriter
+    static std::shared_ptr<MediaCodecListWriter> CreateCodecListWriter();
+    static void WriteCodecInfos(
+            const std::shared_ptr<MediaCodecListWriter> &writer,
+            std::vector<sp<MediaCodecInfo>> *codecInfos);
+};
+
+}  // namespace android
+
+#endif  // MEDIA_TEST_HELPER_H_
diff --git a/media/libstagefright/tests/metadatautils/Android.bp b/media/libstagefright/tests/metadatautils/Android.bp
new file mode 100644
index 0000000..ecdf89b
--- /dev/null
+++ b/media/libstagefright/tests/metadatautils/Android.bp
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_tests_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_tests_license",
+    ],
+}
+
+cc_test {
+    name: "MetaDataUtilsTest",
+    gtest: true,
+
+    srcs: [
+        "MetaDataUtilsTest.cpp",
+    ],
+
+    static_libs: [
+        "libstagefright_metadatautils",
+        "libstagefright_esds",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libutils",
+        "libmediandk",
+        "libstagefright",
+        "libstagefright_foundation",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/tests/metadatautils/AndroidTest.xml b/media/libstagefright/tests/metadatautils/AndroidTest.xml
new file mode 100644
index 0000000..d6497f3
--- /dev/null
+++ b/media/libstagefright/tests/metadatautils/AndroidTest.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for MetaDataUtils unit test">
+    <option name="test-suite-tag" value="MetaDataUtilsTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="false" />
+        <option name="push" value="MetaDataUtilsTest->/data/local/tmp/MetaDataUtilsTest" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/metadatautils/MetaDataUtilsTestRes-1.0.zip?unzip=true"
+            value="/data/local/tmp/MetaDataUtilsTestRes/" />
+    </target_preparer>
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="MetaDataUtilsTest" />
+        <option name="native-test-flag" value="-P /data/local/tmp/MetaDataUtilsTestRes/" />
+    </test>
+</configuration>
diff --git a/media/libstagefright/tests/metadatautils/MetaDataUtilsTest.cpp b/media/libstagefright/tests/metadatautils/MetaDataUtilsTest.cpp
new file mode 100644
index 0000000..9fd5fdb
--- /dev/null
+++ b/media/libstagefright/tests/metadatautils/MetaDataUtilsTest.cpp
@@ -0,0 +1,490 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MetaDataUtilsTest"
+#include <utils/Log.h>
+
+#include <fstream>
+#include <string>
+
+#include <ESDS.h>
+#include <media/NdkMediaFormat.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaDataBase.h>
+#include <media/stagefright/MetaDataUtils.h>
+#include <media/stagefright/foundation/ABitReader.h>
+
+#include "MetaDataUtilsTestEnvironment.h"
+
+constexpr uint8_t kAdtsCsdSize = 7;
+// from AAC specs: https://www.iso.org/standard/43345.html
+constexpr int32_t kSamplingFreq[] = {96000, 88200, 64000, 48000, 44100, 32000,
+                                     24000, 22050, 16000, 12000, 11025, 8000};
+constexpr uint8_t kMaxSamplingFreqIndex = sizeof(kSamplingFreq) / sizeof(kSamplingFreq[0]);
+
+static MetaDataUtilsTestEnvironment *gEnv = nullptr;
+
+using namespace android;
+
+class MetaDataValidate {
+  public:
+    MetaDataValidate() : mInputBuffer(nullptr) {}
+
+    ~MetaDataValidate() {
+        if (mInputBuffer) {
+            delete[] mInputBuffer;
+            mInputBuffer = nullptr;
+        }
+    }
+
+    void SetUpMetaDataValidate(string fileName) {
+        struct stat buf;
+        int8_t err = stat(fileName.c_str(), &buf);
+        ASSERT_EQ(err, 0) << "Failed to get file information for file: " << fileName;
+
+        mInputBufferSize = buf.st_size;
+        FILE *inputFilePtr = fopen(fileName.c_str(), "rb+");
+        ASSERT_NE(inputFilePtr, nullptr) << "Failed to open file: " << fileName;
+
+        mInputBuffer = new uint8_t[mInputBufferSize];
+        ASSERT_NE(mInputBuffer, nullptr)
+                << "Failed to allocate memory of size: " << mInputBufferSize;
+
+        int32_t numBytes =
+                fread((char *)mInputBuffer, sizeof(uint8_t), mInputBufferSize, inputFilePtr);
+        ASSERT_EQ(numBytes, mInputBufferSize) << numBytes << " of " << mInputBufferSize << " read";
+
+        fclose(inputFilePtr);
+    }
+
+    size_t mInputBufferSize;
+    const uint8_t *mInputBuffer;
+};
+
+class AvcCSDTest : public ::testing::TestWithParam<
+                           tuple<string /*inputFile*/, size_t /*avcWidth*/, size_t /*avcHeight*/>> {
+  public:
+    AvcCSDTest() : mInputBuffer(nullptr) {}
+
+    ~AvcCSDTest() {
+        if (mInputBuffer) {
+            delete[] mInputBuffer;
+            mInputBuffer = nullptr;
+        }
+    }
+    virtual void SetUp() override {
+        tuple<string, size_t, size_t> params = GetParam();
+        string inputFile = gEnv->getRes() + get<0>(params);
+        mFrameWidth = get<1>(params);
+        mFrameHeight = get<2>(params);
+
+        struct stat buf;
+        int8_t err = stat(inputFile.c_str(), &buf);
+        ASSERT_EQ(err, 0) << "Failed to get information for file: " << inputFile;
+
+        mInputBufferSize = buf.st_size;
+        FILE *inputFilePtr = fopen(inputFile.c_str(), "rb+");
+        ASSERT_NE(inputFilePtr, nullptr) << "Failed to open file: " << inputFile;
+
+        mInputBuffer = new uint8_t[mInputBufferSize];
+        ASSERT_NE(mInputBuffer, nullptr)
+                << "Failed to create a buffer of size: " << mInputBufferSize;
+
+        int32_t numBytes =
+                fread((char *)mInputBuffer, sizeof(uint8_t), mInputBufferSize, inputFilePtr);
+        ASSERT_EQ(numBytes, mInputBufferSize) << numBytes << " of " << mInputBufferSize << " read";
+
+        fclose(inputFilePtr);
+    }
+
+    size_t mFrameWidth;
+    size_t mFrameHeight;
+    size_t mInputBufferSize;
+    const uint8_t *mInputBuffer;
+};
+
+class AvcCSDValidateTest : public MetaDataValidate,
+                           public ::testing::TestWithParam<string /*inputFile*/> {
+  public:
+    virtual void SetUp() override {
+        string inputFile = gEnv->getRes() + GetParam();
+
+        ASSERT_NO_FATAL_FAILURE(SetUpMetaDataValidate(inputFile));
+    }
+};
+
+class AacCSDTest
+    : public ::testing::TestWithParam<tuple<uint32_t /*profile*/, uint32_t /*samplingFreqIndex*/,
+                                            uint32_t /*channelConfig*/>> {
+  public:
+    virtual void SetUp() override {
+        tuple<uint32_t, uint32_t, uint32_t> params = GetParam();
+        mAacProfile = get<0>(params);
+        mAacSamplingFreqIndex = get<1>(params);
+        mAacChannelConfig = get<2>(params);
+    }
+
+    uint32_t mAacProfile;
+    uint32_t mAacSamplingFreqIndex;
+    uint32_t mAacChannelConfig;
+};
+
+class AacADTSTest
+    : public ::testing::TestWithParam<
+              tuple<string /*adtsFile*/, uint32_t /*channelCount*/, uint32_t /*sampleRate*/>> {
+  public:
+    AacADTSTest() : mInputBuffer(nullptr) {}
+
+    virtual void SetUp() override {
+        tuple<string, uint32_t, uint32_t> params = GetParam();
+        string fileName = gEnv->getRes() + get<0>(params);
+        mAacChannelCount = get<1>(params);
+        mAacSampleRate = get<2>(params);
+
+        FILE *filePtr = fopen(fileName.c_str(), "r");
+        ASSERT_NE(filePtr, nullptr) << "Failed to open file: " << fileName;
+
+        mInputBuffer = new uint8_t[kAdtsCsdSize];
+        ASSERT_NE(mInputBuffer, nullptr) << "Failed to allocate a memory of size: " << kAdtsCsdSize;
+
+        int32_t numBytes = fread((void *)mInputBuffer, sizeof(uint8_t), kAdtsCsdSize, filePtr);
+        ASSERT_EQ(numBytes, kAdtsCsdSize)
+                << "Failed to read complete file, bytes read: " << numBytes;
+
+        fclose(filePtr);
+    }
+    int32_t mAacChannelCount;
+    int32_t mAacSampleRate;
+    const uint8_t *mInputBuffer;
+};
+
+class AacCSDValidateTest : public MetaDataValidate,
+                           public ::testing::TestWithParam<string /*inputFile*/> {
+  public:
+    virtual void SetUp() override {
+        string inputFile = gEnv->getRes() + GetParam();
+
+        ASSERT_NO_FATAL_FAILURE(SetUpMetaDataValidate(inputFile));
+    }
+};
+
+class VorbisTest : public ::testing::TestWithParam<pair<string /*fileName*/, string /*infoFile*/>> {
+  public:
+    virtual void SetUp() override {
+        pair<string, string> params = GetParam();
+        string inputMediaFile = gEnv->getRes() + params.first;
+        mInputFileStream.open(inputMediaFile, ifstream::in);
+        ASSERT_TRUE(mInputFileStream.is_open()) << "Failed to open data file: " << inputMediaFile;
+
+        string inputInfoFile = gEnv->getRes() + params.second;
+        mInfoFileStream.open(inputInfoFile, ifstream::in);
+        ASSERT_TRUE(mInputFileStream.is_open()) << "Failed to open data file: " << inputInfoFile;
+        ASSERT_FALSE(inputInfoFile.empty()) << "Empty info file: " << inputInfoFile;
+    }
+
+    ~VorbisTest() {
+        if (mInputFileStream.is_open()) mInputFileStream.close();
+        if (mInfoFileStream.is_open()) mInfoFileStream.close();
+    }
+
+    ifstream mInputFileStream;
+    ifstream mInfoFileStream;
+};
+
+TEST_P(AvcCSDTest, AvcCSDValidationTest) {
+    AMediaFormat *csdData = AMediaFormat_new();
+    ASSERT_NE(csdData, nullptr) << "Failed to create AMedia format";
+
+    bool status = MakeAVCCodecSpecificData(csdData, mInputBuffer, mInputBufferSize);
+    ASSERT_TRUE(status) << "Failed to make AVC CSD from AMediaFormat";
+
+    int32_t avcWidth = -1;
+    status = AMediaFormat_getInt32(csdData, AMEDIAFORMAT_KEY_WIDTH, &avcWidth);
+    ASSERT_TRUE(status) << "Failed to get avc width";
+    ASSERT_EQ(avcWidth, mFrameWidth);
+
+    int32_t avcHeight = -1;
+    status = AMediaFormat_getInt32(csdData, AMEDIAFORMAT_KEY_HEIGHT, &avcHeight);
+    ASSERT_TRUE(status) << "Failed to get avc height";
+    ASSERT_EQ(avcHeight, mFrameHeight);
+
+    const char *mimeType = "";
+    status = AMediaFormat_getString(csdData, AMEDIAFORMAT_KEY_MIME, &mimeType);
+    ASSERT_TRUE(status) << "Failed to get the mime type";
+    ASSERT_STREQ(mimeType, MEDIA_MIMETYPE_VIDEO_AVC);
+
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create MetaData Base";
+
+    status = MakeAVCCodecSpecificData(*metaData, mInputBuffer, mInputBufferSize);
+    ASSERT_TRUE(status) << "Failed to make AVC CSD from MetaDataBase";
+
+    avcWidth = -1;
+    status = metaData->findInt32(kKeyWidth, &avcWidth);
+    ASSERT_TRUE(status) << "Failed to find the width";
+    ASSERT_EQ(avcWidth, mFrameWidth);
+
+    avcHeight = -1;
+    status = metaData->findInt32(kKeyHeight, &avcHeight);
+    ASSERT_TRUE(status) << "Failed to find the height";
+    ASSERT_EQ(avcHeight, mFrameHeight);
+
+    void *csdAMediaFormatBuffer = nullptr;
+    size_t csdAMediaFormatSize;
+    status = AMediaFormat_getBuffer(csdData, AMEDIAFORMAT_KEY_CSD_AVC, &csdAMediaFormatBuffer,
+                                    &csdAMediaFormatSize);
+    ASSERT_TRUE(status) << "Failed to get the CSD from AMediaFormat";
+    ASSERT_NE(csdAMediaFormatBuffer, nullptr) << "Invalid CSD from AMediaFormat";
+
+    const void *csdMetaDataBaseBuffer = nullptr;
+    size_t csdMetaDataBaseSize = 0;
+    uint32_t mediaType;
+    status = metaData->findData(kKeyAVCC, &mediaType, &csdMetaDataBaseBuffer, &csdMetaDataBaseSize);
+    ASSERT_TRUE(status) << "Failed to get the CSD from MetaDataBase";
+    ASSERT_NE(csdMetaDataBaseBuffer, nullptr) << "Invalid CSD from MetaDataBase";
+    ASSERT_GT(csdMetaDataBaseSize, 0) << "CSD size must be greater than 0";
+    ASSERT_EQ(csdMetaDataBaseSize, csdAMediaFormatSize)
+            << "CSD size of MetaData type and AMediaFormat type must be same";
+
+    int32_t result = memcmp(csdAMediaFormatBuffer, csdMetaDataBaseBuffer, csdAMediaFormatSize);
+    ASSERT_EQ(result, 0) << "CSD from AMediaFormat and MetaDataBase do not match";
+
+    delete metaData;
+    AMediaFormat_delete(csdData);
+}
+
+TEST_P(AvcCSDValidateTest, AvcValidateTest) {
+    AMediaFormat *csdData = AMediaFormat_new();
+    ASSERT_NE(csdData, nullptr) << "Failed to create AMedia format";
+
+    bool status = MakeAVCCodecSpecificData(csdData, mInputBuffer, mInputBufferSize);
+    ASSERT_FALSE(status) << "MakeAVCCodecSpecificData with AMediaFormat succeeds with invalid data";
+
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create MetaData Base";
+
+    status = MakeAVCCodecSpecificData(*metaData, mInputBuffer, mInputBufferSize);
+    ASSERT_FALSE(status) << "MakeAVCCodecSpecificData with MetaDataBase succeeds with invalid data";
+}
+
+TEST_P(AacCSDTest, AacCSDValidationTest) {
+    AMediaFormat *csdData = AMediaFormat_new();
+    ASSERT_NE(csdData, nullptr) << "Failed to create AMedia format";
+
+    ASSERT_GE(mAacSamplingFreqIndex, 0);
+    ASSERT_LT(mAacSamplingFreqIndex, kMaxSamplingFreqIndex);
+    bool status = MakeAACCodecSpecificData(csdData, mAacProfile, mAacSamplingFreqIndex,
+                                           mAacChannelConfig);
+    ASSERT_TRUE(status) << "Failed to make AAC CSD from AMediaFormat";
+
+    int32_t sampleRate = -1;
+    status = AMediaFormat_getInt32(csdData, AMEDIAFORMAT_KEY_SAMPLE_RATE, &sampleRate);
+    ASSERT_TRUE(status) << "Failed to get sample rate";
+    ASSERT_EQ(kSamplingFreq[mAacSamplingFreqIndex], sampleRate);
+
+    int32_t channelCount = -1;
+    status = AMediaFormat_getInt32(csdData, AMEDIAFORMAT_KEY_CHANNEL_COUNT, &channelCount);
+    ASSERT_TRUE(status) << "Failed to get channel count";
+    ASSERT_EQ(channelCount, mAacChannelConfig);
+
+    const char *mimeType = "";
+    status = AMediaFormat_getString(csdData, AMEDIAFORMAT_KEY_MIME, &mimeType);
+    ASSERT_TRUE(status) << "Failed to get the mime type";
+    ASSERT_STREQ(mimeType, MEDIA_MIMETYPE_AUDIO_AAC);
+
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create MetaData Base";
+
+    status = MakeAACCodecSpecificData(*metaData, mAacProfile, mAacSamplingFreqIndex,
+                                      mAacChannelConfig);
+    ASSERT_TRUE(status) << "Failed to make AAC CSD from MetaDataBase";
+
+    sampleRate = -1;
+    status = metaData->findInt32(kKeySampleRate, &sampleRate);
+    ASSERT_TRUE(status) << "Failed to get sampling rate";
+    ASSERT_EQ(kSamplingFreq[mAacSamplingFreqIndex], sampleRate);
+
+    channelCount = -1;
+    status = metaData->findInt32(kKeyChannelCount, &channelCount);
+    ASSERT_TRUE(status) << "Failed to get channel count";
+    ASSERT_EQ(channelCount, mAacChannelConfig);
+
+    mimeType = "";
+    status = metaData->findCString(kKeyMIMEType, &mimeType);
+    ASSERT_TRUE(status) << "Failed to get mime type";
+    ASSERT_STREQ(mimeType, MEDIA_MIMETYPE_AUDIO_AAC);
+
+    void *csdAMediaFormatBuffer = nullptr;
+    size_t csdAMediaFormatSize = 0;
+    status = AMediaFormat_getBuffer(csdData, AMEDIAFORMAT_KEY_CSD_0, &csdAMediaFormatBuffer,
+                                    &csdAMediaFormatSize);
+    ASSERT_TRUE(status) << "Failed to get the AMediaFormat CSD";
+    ASSERT_GT(csdAMediaFormatSize, 0) << "CSD size must be greater than 0";
+    ASSERT_NE(csdAMediaFormatBuffer, nullptr) << "Invalid CSD found";
+
+    const void *csdMetaDataBaseBuffer;
+    size_t csdMetaDataBaseSize = 0;
+    uint32_t mediaType;
+    status = metaData->findData(kKeyESDS, &mediaType, &csdMetaDataBaseBuffer, &csdMetaDataBaseSize);
+    ASSERT_TRUE(status) << "Failed to get the ESDS data from MetaDataBase";
+    ASSERT_GT(csdMetaDataBaseSize, 0) << "CSD size must be greater than 0";
+
+    ESDS esds(csdMetaDataBaseBuffer, csdMetaDataBaseSize);
+    status_t result = esds.getCodecSpecificInfo(&csdMetaDataBaseBuffer, &csdMetaDataBaseSize);
+    ASSERT_EQ(result, (status_t)OK) << "Failed to get CSD from ESDS data";
+    ASSERT_NE(csdMetaDataBaseBuffer, nullptr) << "Invalid CSD found";
+    ASSERT_EQ(csdAMediaFormatSize, csdMetaDataBaseSize)
+            << "CSD size do not match between AMediaFormat type and MetaDataBase type";
+
+    int32_t memcmpResult =
+            memcmp(csdAMediaFormatBuffer, csdMetaDataBaseBuffer, csdAMediaFormatSize);
+    ASSERT_EQ(memcmpResult, 0) << "AMediaFormat and MetaDataBase CSDs do not match";
+
+    AMediaFormat_delete(csdData);
+    delete metaData;
+}
+
+TEST_P(AacADTSTest, AacADTSValidationTest) {
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
+
+    bool status = MakeAACCodecSpecificData(*metaData, mInputBuffer, kAdtsCsdSize);
+    ASSERT_TRUE(status) << "Failed to make AAC CSD from MetaDataBase";
+
+    int32_t sampleRate = -1;
+    status = metaData->findInt32(kKeySampleRate, &sampleRate);
+    ASSERT_TRUE(status) << "Failed to get sampling rate";
+    ASSERT_EQ(sampleRate, mAacSampleRate);
+
+    int32_t channelCount = -1;
+    status = metaData->findInt32(kKeyChannelCount, &channelCount);
+    ASSERT_TRUE(status) << "Failed to get channel count";
+    ASSERT_EQ(channelCount, mAacChannelCount);
+
+    const char *mimeType = "";
+    status = metaData->findCString(kKeyMIMEType, &mimeType);
+    ASSERT_TRUE(status) << "Failed to get mime type";
+    ASSERT_STREQ(mimeType, MEDIA_MIMETYPE_AUDIO_AAC);
+
+    delete metaData;
+}
+
+TEST_P(AacCSDValidateTest, AacInvalidInputTest) {
+    MetaDataBase *metaData = new MetaDataBase();
+    ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
+
+    bool status = MakeAACCodecSpecificData(*metaData, mInputBuffer, kAdtsCsdSize);
+    ASSERT_FALSE(status) << "MakeAACCodecSpecificData succeeds with invalid data";
+}
+
+TEST_P(VorbisTest, VorbisCommentTest) {
+    string line;
+    string tag;
+    string key;
+    string value;
+    size_t commentLength;
+    bool status;
+
+    while (getline(mInfoFileStream, line)) {
+        istringstream stringLine(line);
+        stringLine >> tag >> key >> value >> commentLength;
+        ASSERT_GT(commentLength, 0) << "Vorbis comment size must be greater than 0";
+
+        string comment;
+        string dataLine;
+
+        getline(mInputFileStream, dataLine);
+        istringstream dataStringLine(dataLine);
+        dataStringLine >> comment;
+
+        char *buffer = strndup(comment.c_str(), commentLength);
+        ASSERT_NE(buffer, nullptr) << "Failed to allocate buffer of size: " << commentLength;
+
+        AMediaFormat *fileMeta = AMediaFormat_new();
+        ASSERT_NE(fileMeta, nullptr) << "Failed to create AMedia format";
+
+        parseVorbisComment(fileMeta, buffer, commentLength);
+        free(buffer);
+
+        if (!strncasecmp(tag.c_str(), "ANDROID_HAPTIC", sizeof(tag))) {
+            int32_t numChannelExpected = stoi(value);
+            int32_t numChannelFound = -1;
+            status = AMediaFormat_getInt32(fileMeta, key.c_str(), &numChannelFound);
+            ASSERT_TRUE(status) << "Failed to get the channel count";
+            ASSERT_EQ(numChannelExpected, numChannelFound);
+        } else if (!strncasecmp(tag.c_str(), "ANDROID_LOOP", sizeof(tag))) {
+            int32_t loopExpected = !value.compare("true");
+            int32_t loopFound = -1;
+
+            status = AMediaFormat_getInt32(fileMeta, "loop", &loopFound);
+            ASSERT_TRUE(status) << "Failed to get the loop count";
+            ASSERT_EQ(loopExpected, loopFound);
+        } else {
+            const char *tagValue = "";
+            status = AMediaFormat_getString(fileMeta, key.c_str(), &tagValue);
+            ASSERT_TRUE(status) << "Failed to get the tag value";
+            ASSERT_STREQ(value.c_str(), tagValue);
+        }
+        AMediaFormat_delete(fileMeta);
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(MetaDataUtilsTestAll, AvcCSDTest,
+                         ::testing::Values(make_tuple("sps_pps_userdata.h264", 8, 8),
+                                           make_tuple("sps_userdata_pps.h264", 8, 8),
+                                           make_tuple("sps_pps_sps_pps.h264", 8, 8)));
+
+// TODO(b/158067691): Add invalid test vectors with incomplete PPS or no PPS
+INSTANTIATE_TEST_SUITE_P(MetaDataUtilsTestAll, AvcCSDValidateTest,
+                         ::testing::Values("sps_pps_only_startcode.h264",
+                                           "sps_incomplete_pps.h264",
+                                           // TODO(b/158067691) "sps_pps_incomplete.h264",
+                                           "randomdata.h264",
+                                           // TODO(b/158067691) "sps.h264",
+                                           "pps.h264"));
+
+INSTANTIATE_TEST_SUITE_P(MetaDataUtilsTestAll, AacCSDTest,
+                         ::testing::Values(make_tuple(AACObjectMain, 1, 1)));
+
+INSTANTIATE_TEST_SUITE_P(MetaDataUtilsTestAll, AacADTSTest,
+                         ::testing::Values(make_tuple("loudsoftaacadts", 1, 44100)));
+
+INSTANTIATE_TEST_SUITE_P(MetaDataUtilsTestAll, AacCSDValidateTest,
+                         ::testing::Values("loudsoftaacadts_invalidheader",
+                                           "loudsoftaacadts_invalidprofile",
+                                           "loudsoftaacadts_invalidchannelconfig"));
+
+// TODO(b/157974508) Add test vector for vorbis thumbnail tag
+// Info file contains TAG, Key, Value and size of the vorbis comment
+INSTANTIATE_TEST_SUITE_P(
+        MetaDataUtilsTestAll, VorbisTest,
+        ::testing::Values(make_pair("vorbiscomment_sintel.dat", "vorbiscomment_sintel.info"),
+                          make_pair("vorbiscomment_album.dat", "vorbiscomment_album.info"),
+                          make_pair("vorbiscomment_loop.dat", "vorbiscomment_loop.info")));
+
+int main(int argc, char **argv) {
+    gEnv = new MetaDataUtilsTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGV("Test result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/tests/metadatautils/MetaDataUtilsTestEnvironment.h b/media/libstagefright/tests/metadatautils/MetaDataUtilsTestEnvironment.h
new file mode 100644
index 0000000..4d642bc
--- /dev/null
+++ b/media/libstagefright/tests/metadatautils/MetaDataUtilsTestEnvironment.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __METADATA_UTILS_TEST_ENVIRONMENT_H__
+#define __METADATA_UTILS_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class MetaDataUtilsTestEnvironment : public::testing::Environment {
+  public:
+    MetaDataUtilsTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int MetaDataUtilsTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"path", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P': {
+                setRes(optarg);
+                break;
+            }
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __METADATA_UTILS_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/tests/metadatautils/README.md b/media/libstagefright/tests/metadatautils/README.md
new file mode 100644
index 0000000..0862a07
--- /dev/null
+++ b/media/libstagefright/tests/metadatautils/README.md
@@ -0,0 +1,39 @@
+## Media Testing ##
+---
+#### MetaDataUtils Test
+The MetaDataUtils Unit Test Suite validates the libstagefright_metadatautils library available in libstagefright.
+
+Run the following steps to build the test suite:
+```
+m MetaDataUtilsTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/MetaDataUtilsTest/MetaDataUtilsTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/MetaDataUtilsTest/MetaDataUtilsTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/metadatautils/MetaDataUtilsTestRes-1.0.zip). Download, unzip and push these files into device for testing.
+
+```
+adb push MetaDataUtilsTestRes-1.0 /data/local/tmp/
+```
+
+usage: MetaDataUtilsTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/MetaDataUtilsTest -P /data/local/tmp/MetaDataUtilsTestRes-1.0/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest MetaDataUtilsTest -- --enable-module-dynamic-download=true
+```
diff --git a/media/libstagefright/tests/writer/Android.bp b/media/libstagefright/tests/writer/Android.bp
index 7e169cb..38d5ecc 100644
--- a/media/libstagefright/tests/writer/Android.bp
+++ b/media/libstagefright/tests/writer/Android.bp
@@ -14,6 +14,17 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_tests_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_tests_license",
+    ],
+}
+
 cc_test {
     name: "writerTest",
     gtest: true,
@@ -28,13 +39,15 @@
         "libcutils",
         "liblog",
         "libutils",
+        "libmedia",
+        "libmediandk",
+        "libstagefright",
     ],
 
     static_libs: [
         "libstagefright_webm",
-        "libdatasource",
-        "libstagefright",
         "libstagefright_foundation",
+        "libdatasource",
         "libstagefright_esds",
         "libogg",
     ],
diff --git a/media/libstagefright/tests/writer/AndroidTest.xml b/media/libstagefright/tests/writer/AndroidTest.xml
index d831555..cc890fe 100644
--- a/media/libstagefright/tests/writer/AndroidTest.xml
+++ b/media/libstagefright/tests/writer/AndroidTest.xml
@@ -19,12 +19,13 @@
         <option name="cleanup" value="true" />
         <option name="push" value="writerTest->/data/local/tmp/writerTest" />
         <option name="push-file"
-            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/writer/Writer.zip?unzip=true"
-            value="/data/local/tmp/writerTestRes/" />
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/writer/WriterTestRes-1.1.zip?unzip=true"
+            value="/data/local/tmp/WriterTestRes/" />
     </target_preparer>
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="writerTest" />
-        <option name="native-test-flag" value="-P /data/local/tmp/writerTestRes/" />
+        <option name="native-test-flag" value="-P /data/local/tmp/WriterTestRes/" />
+        <option name="native-test-flag" value="-C true" />
     </test>
 </configuration>
diff --git a/media/libstagefright/tests/writer/README.md b/media/libstagefright/tests/writer/README.md
index ae07917..0e54ca7 100644
--- a/media/libstagefright/tests/writer/README.md
+++ b/media/libstagefright/tests/writer/README.md
@@ -19,13 +19,18 @@
 
 adb push ${OUT}/data/nativetest/writerTest/writerTest /data/local/tmp/
 
-The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/writer/writerTestRes.zip).
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/tests/writer/WriterTestRes-1.1.zip).
 Download and extract the folder. Push all the files in this folder to /data/local/tmp/ on the device.
 ```
-adb push writerTestRes /data/local/tmp/
+adb push WriterTestRes-1.1/. /data/local/tmp/WriterTestRes/
 ```
 
-usage: writerTest -P \<path_to_res_folder\>
+usage: writerTest -P \<path_to_res_folder\> -C <remove_output_file>
 ```
-adb shell /data/local/tmp/writerTest -P /data/local/tmp/
+adb shell /data/local/tmp/writerTest -P /data/local/tmp/WriterTestRes/ -C true
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest writerTest -- --enable-module-dynamic-download=true
 ```
diff --git a/media/libstagefright/tests/writer/WriterListener.h b/media/libstagefright/tests/writer/WriterListener.h
new file mode 100644
index 0000000..81f0a7c
--- /dev/null
+++ b/media/libstagefright/tests/writer/WriterListener.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WRITER_LISTENER_H_
+#define WRITER_LISTENER_H_
+
+#include <mutex>
+
+#include <media/IMediaRecorderClient.h>
+#include <media/mediarecorder.h>
+
+using namespace android;
+using namespace std;
+
+class WriterListener : public BnMediaRecorderClient {
+  public:
+    WriterListener() : mSignaledSize(false), mSignaledDuration(false) {}
+
+    virtual void notify(int32_t msg, int32_t ext1, int32_t ext2) {
+        ALOGV("msg : %d, ext1 : %d, ext2 : %d", msg, ext1, ext2);
+        if (ext1 == MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED) {
+            mSignaledSize = true;
+        } else if (ext1 == MEDIA_RECORDER_INFO_MAX_DURATION_REACHED) {
+            mSignaledDuration = true;
+        }
+    }
+
+    volatile bool mSignaledSize;
+    volatile bool mSignaledDuration;
+};
+
+#endif  // WRITER_LISTENER_H_
diff --git a/media/libstagefright/tests/writer/WriterTest.cpp b/media/libstagefright/tests/writer/WriterTest.cpp
index ff063e3..d170e7c 100644
--- a/media/libstagefright/tests/writer/WriterTest.cpp
+++ b/media/libstagefright/tests/writer/WriterTest.cpp
@@ -18,9 +18,13 @@
 #define LOG_TAG "WriterTest"
 #include <utils/Log.h>
 
+#include <binder/ProcessState.h>
+
+#include <inttypes.h>
 #include <fstream>
 #include <iostream>
 
+#include <media/NdkMediaExtractor.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/Utils.h>
@@ -39,18 +43,40 @@
 
 #define OUTPUT_FILE_NAME "/data/local/tmp/writer.out"
 
+// Stts values within 0.1ms(100us) difference are fudged to save too
+// many stts entries in MPEG4Writer.
+constexpr int32_t kMpeg4MuxToleranceTimeUs = 100;
+// Tolerance value for other writers
+constexpr int32_t kMuxToleranceTimeUs = 1;
+
 static WriterTestEnvironment *gEnv = nullptr;
 
-struct configFormat {
-    char mime[128];
-    int32_t width;
-    int32_t height;
-    int32_t sampleRate;
-    int32_t channelCount;
+enum inputId {
+    // audio streams
+    AAC_1,
+    AAC_ADTS_1,
+    AMR_NB_1,
+    AMR_WB_1,
+    FLAC_1,
+    OPUS_1,
+    VORBIS_1,
+    // video streams
+    AV1_1,
+    AVC_1,
+    H263_1,
+    HEVC_1,
+    MPEG4_1,
+    VP8_1,
+    VP9_1,
+    // heif stream
+    HEIC_1,
+    UNUSED_ID,
+    UNKNOWN_ID,
 };
 
 // LookUpTable of clips and metadata for component testing
 static const struct InputData {
+    inputId inpId;
     const char *mime;
     string inputFile;
     string info;
@@ -58,65 +84,73 @@
     int32_t secondParam;
     bool isAudio;
 } kInputData[] = {
-        {MEDIA_MIMETYPE_AUDIO_OPUS, "bbb_opus_stereo_128kbps_48000hz.opus",
-         "bbb_opus_stereo_128kbps_48000hz.info", 48000, 2, true},
-        {MEDIA_MIMETYPE_AUDIO_AAC, "bbb_aac_stereo_128kbps_48000hz.aac",
-         "bbb_aac_stereo_128kbps_48000hz.info", 48000, 2, true},
-        {MEDIA_MIMETYPE_AUDIO_AAC_ADTS, "Mps_2_c2_fr1_Sc1_Dc2_0x03_raw.adts",
+        {AAC_1, MEDIA_MIMETYPE_AUDIO_AAC, "audio_aac_stereo_8kbps_11025hz.aac",
+         "audio_aac_stereo_8kbps_11025hz.info", 11025, 2, true},
+        {AAC_ADTS_1, MEDIA_MIMETYPE_AUDIO_AAC_ADTS, "Mps_2_c2_fr1_Sc1_Dc2_0x03_raw.adts",
          "Mps_2_c2_fr1_Sc1_Dc2_0x03_raw.info", 48000, 2, true},
-        {MEDIA_MIMETYPE_AUDIO_AMR_NB, "sine_amrnb_1ch_12kbps_8000hz.amrnb",
+        {AMR_NB_1, MEDIA_MIMETYPE_AUDIO_AMR_NB, "sine_amrnb_1ch_12kbps_8000hz.amrnb",
          "sine_amrnb_1ch_12kbps_8000hz.info", 8000, 1, true},
-        {MEDIA_MIMETYPE_AUDIO_AMR_WB, "bbb_amrwb_1ch_14kbps_16000hz.amrwb",
+        {AMR_WB_1, MEDIA_MIMETYPE_AUDIO_AMR_WB, "bbb_amrwb_1ch_14kbps_16000hz.amrwb",
          "bbb_amrwb_1ch_14kbps_16000hz.info", 16000, 1, true},
-        {MEDIA_MIMETYPE_AUDIO_VORBIS, "bbb_vorbis_stereo_128kbps_48000hz.vorbis",
-         "bbb_vorbis_stereo_128kbps_48000hz.info", 48000, 2, true},
-        {MEDIA_MIMETYPE_AUDIO_FLAC, "bbb_flac_stereo_680kbps_48000hz.flac",
+        {FLAC_1, MEDIA_MIMETYPE_AUDIO_FLAC, "bbb_flac_stereo_680kbps_48000hz.flac",
          "bbb_flac_stereo_680kbps_48000hz.info", 48000, 2, true},
-        {MEDIA_MIMETYPE_VIDEO_VP9, "bbb_vp9_176x144_285kbps_60fps.vp9",
-         "bbb_vp9_176x144_285kbps_60fps.info", 176, 144, false},
-        {MEDIA_MIMETYPE_VIDEO_VP8, "bbb_vp8_176x144_240kbps_60fps.vp8",
-         "bbb_vp8_176x144_240kbps_60fps.info", 176, 144, false},
-        {MEDIA_MIMETYPE_VIDEO_AVC, "bbb_avc_176x144_300kbps_60fps.h264",
-         "bbb_avc_176x144_300kbps_60fps.info", 176, 144, false},
-        {MEDIA_MIMETYPE_VIDEO_HEVC, "bbb_hevc_176x144_176kbps_60fps.hevc",
-         "bbb_hevc_176x144_176kbps_60fps.info", 176, 144, false},
-        {MEDIA_MIMETYPE_VIDEO_AV1, "bbb_av1_176_144.av1", "bbb_av1_176_144.info", 176, 144, false},
-        {MEDIA_MIMETYPE_VIDEO_H263, "bbb_h263_352x288_300kbps_12fps.h263",
+        {OPUS_1, MEDIA_MIMETYPE_AUDIO_OPUS, "bbb_opus_stereo_128kbps_48000hz.opus",
+         "bbb_opus_stereo_128kbps_48000hz.info", 48000, 2, true},
+        {VORBIS_1, MEDIA_MIMETYPE_AUDIO_VORBIS, "bbb_vorbis_1ch_64kbps_16kHz.vorbis",
+         "bbb_vorbis_1ch_64kbps_16kHz.info", 16000, 1, true},
+
+        {AV1_1, MEDIA_MIMETYPE_VIDEO_AV1, "bbb_av1_176_144.av1", "bbb_av1_176_144.info", 176, 144,
+         false},
+        {AVC_1, MEDIA_MIMETYPE_VIDEO_AVC, "bbb_avc_352x288_768kbps_30fps.avc",
+         "bbb_avc_352x288_768kbps_30fps.info", 352, 288, false},
+        {H263_1, MEDIA_MIMETYPE_VIDEO_H263, "bbb_h263_352x288_300kbps_12fps.h263",
          "bbb_h263_352x288_300kbps_12fps.info", 352, 288, false},
-        {MEDIA_MIMETYPE_VIDEO_MPEG4, "bbb_mpeg4_352x288_512kbps_30fps.m4v",
+        {HEVC_1, MEDIA_MIMETYPE_VIDEO_HEVC, "bbb_hevc_340x280_768kbps_30fps.hevc",
+         "bbb_hevc_340x280_768kbps_30fps.info", 340, 280, false},
+        {MPEG4_1, MEDIA_MIMETYPE_VIDEO_MPEG4, "bbb_mpeg4_352x288_512kbps_30fps.m4v",
          "bbb_mpeg4_352x288_512kbps_30fps.info", 352, 288, false},
+        {VP8_1, MEDIA_MIMETYPE_VIDEO_VP8, "bbb_vp8_176x144_240kbps_60fps.vp8",
+         "bbb_vp8_176x144_240kbps_60fps.info", 176, 144, false},
+        {VP9_1, MEDIA_MIMETYPE_VIDEO_VP9, "bbb_vp9_176x144_285kbps_60fps.vp9",
+         "bbb_vp9_176x144_285kbps_60fps.info", 176, 144, false},
+
+        {HEIC_1, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, "bbb_hevc_176x144_176kbps_60fps.hevc",
+         "bbb_heic_176x144_176kbps_60fps.info", 176, 144, false},
 };
 
-class WriterTest : public ::testing::TestWithParam<pair<string, int32_t>> {
+class WriterTest {
   public:
-    WriterTest() : mWriter(nullptr), mFileMeta(nullptr), mCurrentTrack(nullptr) {}
+    WriterTest() : mWriter(nullptr), mFileMeta(nullptr) {}
 
     ~WriterTest() {
-        if (mWriter) {
-            mWriter.clear();
-            mWriter = nullptr;
-        }
         if (mFileMeta) {
             mFileMeta.clear();
             mFileMeta = nullptr;
         }
-        if (mCurrentTrack) {
-            mCurrentTrack.clear();
-            mCurrentTrack = nullptr;
+        if (mWriter) {
+            mWriter.clear();
+            mWriter = nullptr;
+        }
+        if (gEnv->cleanUp()) remove(OUTPUT_FILE_NAME);
+
+        for (int32_t idx = 0; idx < kMaxTrackCount; idx++) {
+            mBufferInfo[idx].clear();
+            if (mCurrentTrack[idx]) {
+                mCurrentTrack[idx]->stop();
+                mCurrentTrack[idx].clear();
+                mCurrentTrack[idx] = nullptr;
+            }
+            if (mInputStream[idx].is_open()) mInputStream[idx].close();
         }
     }
 
-    virtual void SetUp() override {
-        mNumCsds = 0;
-        mInputFrameId = 0;
+    void setupWriterType(string writerFormat) {
         mWriterName = unknown_comp;
         mDisableTest = false;
-
         static const std::map<std::string, standardWriters> mapWriter = {
                 {"ogg", OGG},     {"aac", AAC},      {"aac_adts", AAC_ADTS}, {"webm", WEBM},
                 {"mpeg4", MPEG4}, {"amrnb", AMR_NB}, {"amrwb", AMR_WB},      {"mpeg2Ts", MPEG2TS}};
         // Find the component type
-        string writerFormat = GetParam().first;
         if (mapWriter.find(writerFormat) != mapWriter.end()) {
             mWriterName = mapWriter.at(writerFormat);
         }
@@ -126,16 +160,19 @@
         }
     }
 
-    virtual void TearDown() override {
-        mBufferInfo.clear();
-        if (mInputStream.is_open()) mInputStream.close();
-    }
-
-    void getInputBufferInfo(string inputFileName, string inputInfo);
+    void getInputBufferInfo(string inputFileName, string inputInfo, int32_t idx = 0);
 
     int32_t createWriter(int32_t fd);
 
-    int32_t addWriterSource(bool isAudio, configFormat params);
+    int32_t addWriterSource(bool isAudio, configFormat params, int32_t idx = 0);
+
+    void setupExtractor(AMediaExtractor *extractor, string inputFileName, int32_t &trackCount);
+
+    void extract(AMediaExtractor *extractor, configFormat &params, vector<BufferInfo> &bufferInfo,
+                 uint8_t *buffer, size_t bufSize, size_t *bytesExtracted, int32_t idx);
+
+    void compareParams(configFormat srcParam, configFormat dstParam, vector<BufferInfo> dstBufInfo,
+                       int32_t index);
 
     enum standardWriters {
         OGG,
@@ -152,32 +189,42 @@
     standardWriters mWriterName;
     sp<MediaWriter> mWriter;
     sp<MetaData> mFileMeta;
-    sp<MediaAdapter> mCurrentTrack;
+    sp<MediaAdapter> mCurrentTrack[kMaxTrackCount]{};
 
     bool mDisableTest;
-    int32_t mNumCsds;
-    int32_t mInputFrameId;
-    ifstream mInputStream;
-    vector<BufferInfo> mBufferInfo;
+    int32_t mNumCsds[kMaxTrackCount]{};
+    int32_t mInputFrameId[kMaxTrackCount]{};
+    ifstream mInputStream[kMaxTrackCount]{};
+    vector<BufferInfo> mBufferInfo[kMaxTrackCount];
 };
 
-void WriterTest::getInputBufferInfo(string inputFileName, string inputInfo) {
+class WriteFunctionalityTest
+    : public WriterTest,
+      public ::testing::TestWithParam<tuple<string /* writerFormat*/, inputId /* inputId0*/,
+                                            inputId /* inputId1*/, float /* BufferInterval*/>> {
+  public:
+    virtual void SetUp() override { setupWriterType(get<0>(GetParam())); }
+};
+
+void WriterTest::getInputBufferInfo(string inputFileName, string inputInfo, int32_t idx) {
     std::ifstream eleInfo;
     eleInfo.open(inputInfo.c_str());
     ASSERT_EQ(eleInfo.is_open(), true);
     int32_t bytesCount = 0;
     uint32_t flags = 0;
     int64_t timestamp = 0;
+    int32_t numCsds = 0;
     while (1) {
         if (!(eleInfo >> bytesCount)) break;
         eleInfo >> flags;
         eleInfo >> timestamp;
-        mBufferInfo.push_back({bytesCount, flags, timestamp});
-        if (flags == CODEC_CONFIG_FLAG) mNumCsds++;
+        mBufferInfo[idx].push_back({bytesCount, flags, timestamp});
+        if (flags == CODEC_CONFIG_FLAG) numCsds++;
     }
     eleInfo.close();
-    mInputStream.open(inputFileName.c_str(), std::ifstream::binary);
-    ASSERT_EQ(mInputStream.is_open(), true);
+    mNumCsds[idx] = numCsds;
+    mInputStream[idx].open(inputFileName.c_str(), std::ifstream::binary);
+    ASSERT_EQ(mInputStream[idx].is_open(), true);
 }
 
 int32_t WriterTest::createWriter(int32_t fd) {
@@ -223,10 +270,10 @@
     return 0;
 }
 
-int32_t WriterTest::addWriterSource(bool isAudio, configFormat params) {
-    if (mInputFrameId) return -1;
+int32_t WriterTest::addWriterSource(bool isAudio, configFormat params, int32_t idx) {
+    if (mInputFrameId[idx]) return -1;
     sp<AMessage> format = new AMessage;
-    if (mInputStream.is_open()) {
+    if (mInputStream[idx].is_open()) {
         format->setString("mime", params.mime);
         if (isAudio) {
             format->setInt32("channel-count", params.channelCount);
@@ -235,25 +282,34 @@
             format->setInt32("width", params.width);
             format->setInt32("height", params.height);
         }
-
-        int32_t status =
-                writeHeaderBuffers(mInputStream, mBufferInfo, mInputFrameId, format, mNumCsds);
-        if (status != 0) return -1;
+        if (mNumCsds[idx]) {
+            int32_t status = writeHeaderBuffers(mInputStream[idx], mBufferInfo[idx],
+                                                mInputFrameId[idx], format, mNumCsds[idx]);
+            if (status != 0) return -1;
+        }
     }
+
     sp<MetaData> trackMeta = new MetaData;
     convertMessageToMetaData(format, trackMeta);
-    mCurrentTrack = new MediaAdapter(trackMeta);
-    if (mCurrentTrack == nullptr) {
+    mCurrentTrack[idx] = new MediaAdapter(trackMeta);
+    if (mCurrentTrack[idx] == nullptr) {
         ALOGE("MediaAdapter returned nullptr");
         return -1;
     }
-    status_t result = mWriter->addSource(mCurrentTrack);
+    status_t result = mWriter->addSource(mCurrentTrack[idx]);
     return result;
 }
 
 void getFileDetails(string &inputFilePath, string &info, configFormat &params, bool &isAudio,
-                    int32_t streamIndex = 0) {
-    if (streamIndex >= sizeof(kInputData) / sizeof(kInputData[0])) {
+                    inputId inpId) {
+    int32_t inputDataSize = sizeof(kInputData) / sizeof(kInputData[0]);
+    int32_t streamIndex = 0;
+    for (; streamIndex < inputDataSize; streamIndex++) {
+        if (inpId == kInputData[streamIndex].inpId) {
+            break;
+        }
+    }
+    if (streamIndex == inputDataSize) {
         return;
     }
     inputFilePath += kInputData[streamIndex].inputFile;
@@ -270,7 +326,147 @@
     return;
 }
 
-TEST_P(WriterTest, CreateWriterTest) {
+void WriterTest::setupExtractor(AMediaExtractor *extractor, string inputFileName,
+                                int32_t &trackCount) {
+    ALOGV("Input file for extractor: %s", inputFileName.c_str());
+
+    int32_t fd = open(inputFileName.c_str(), O_RDONLY);
+    ASSERT_GE(fd, 0) << "Failed to open writer's output file to validate";
+
+    struct stat buf;
+    int32_t status = fstat(fd, &buf);
+    ASSERT_EQ(status, 0) << "Failed to get properties of input file for extractor";
+
+    size_t fileSize = buf.st_size;
+    ALOGV("Size of input file to extractor: %zu", fileSize);
+
+    status = AMediaExtractor_setDataSourceFd(extractor, fd, 0, fileSize);
+    ASSERT_EQ(status, AMEDIA_OK) << "Failed to set data source for extractor";
+
+    trackCount = AMediaExtractor_getTrackCount(extractor);
+    ASSERT_GT(trackCount, 0) << "No tracks reported by extractor";
+    ALOGV("Number of tracks reported by extractor : %d", trackCount);
+    return;
+}
+
+void WriterTest::extract(AMediaExtractor *extractor, configFormat &params,
+                         vector<BufferInfo> &bufferInfo, uint8_t *buffer, size_t bufSize,
+                         size_t *bytesExtracted, int32_t idx) {
+    AMediaExtractor_selectTrack(extractor, idx);
+    AMediaFormat *format = AMediaExtractor_getTrackFormat(extractor, idx);
+    ASSERT_NE(format, nullptr) << "Track format is NULL";
+    ALOGI("Track format = %s", AMediaFormat_toString(format));
+
+    const char *mime = nullptr;
+    AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime);
+    ASSERT_NE(mime, nullptr) << "Track mime is NULL";
+    ALOGI("Track mime = %s", mime);
+    strlcpy(params.mime, mime, kMimeSize);
+
+    if (!strncmp(mime, "audio/", 6)) {
+        ASSERT_TRUE(
+                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT, &params.channelCount))
+                << "Extractor did not report channel count";
+        ASSERT_TRUE(AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE, &params.sampleRate))
+                << "Extractor did not report sample rate";
+    } else if (!strncmp(mime, "video/", 6) || !strncmp(mime, "image/", 6)) {
+        ASSERT_TRUE(AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_WIDTH, &params.width))
+                << "Extractor did not report width";
+        ASSERT_TRUE(AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_HEIGHT, &params.height))
+                << "Extractor did not report height";
+    } else {
+        ASSERT_TRUE(false) << "Invalid mime " << mime;
+    }
+
+    int32_t bufferOffset = 0;
+    // Get CSD data
+    int index = 0;
+    void *csdBuf;
+    while (1) {
+        csdBuf = nullptr;
+        char csdName[16];
+        snprintf(csdName, 16, "csd-%d", index);
+        size_t csdSize = 0;
+        bool csdFound = AMediaFormat_getBuffer(format, csdName, &csdBuf, &csdSize);
+        if (!csdFound || !csdBuf || !csdSize) break;
+
+        bufferInfo.push_back({static_cast<int32_t>(csdSize), CODEC_CONFIG_FLAG, 0});
+        memcpy(buffer + bufferOffset, csdBuf, csdSize);
+        bufferOffset += csdSize;
+        index++;
+    }
+
+    // Get frame data
+    while (1) {
+        ssize_t sampleSize = AMediaExtractor_getSampleSize(extractor);
+        if (sampleSize < 0) break;
+
+        uint8_t *sampleBuffer = (uint8_t *)malloc(sampleSize);
+        ASSERT_NE(sampleBuffer, nullptr) << "Failed to allocate the buffer of size " << sampleSize;
+
+        int bytesRead = AMediaExtractor_readSampleData(extractor, sampleBuffer, sampleSize);
+        ASSERT_EQ(bytesRead, sampleSize)
+                << "Number of bytes extracted does not match with sample size";
+        int64_t pts = AMediaExtractor_getSampleTime(extractor);
+        uint32_t flag = AMediaExtractor_getSampleFlags(extractor);
+
+        if (mime == MEDIA_MIMETYPE_AUDIO_VORBIS) {
+            // Removing 4 bytes of AMEDIAFORMAT_KEY_VALID_SAMPLES from sample size
+            bytesRead = bytesRead - 4;
+        }
+
+        ASSERT_LE(bufferOffset + bytesRead, bufSize)
+                << "Size of the buffer is insufficient to store the extracted data";
+        bufferInfo.push_back({bytesRead, flag, pts});
+        memcpy(buffer + bufferOffset, sampleBuffer, bytesRead);
+        bufferOffset += bytesRead;
+
+        AMediaExtractor_advance(extractor);
+        free(sampleBuffer);
+    }
+    *bytesExtracted = bufferOffset;
+    return;
+}
+
+void WriterTest::compareParams(configFormat srcParam, configFormat dstParam,
+                               vector<BufferInfo> dstBufInfo, int32_t index) {
+    ASSERT_STREQ(srcParam.mime, dstParam.mime)
+            << "Extracted mime type does not match with input mime type";
+
+    if (!strncmp(srcParam.mime, "audio/", 6)) {
+        ASSERT_EQ(srcParam.channelCount, dstParam.channelCount)
+                << "Extracted channel count does not match with input channel count";
+        ASSERT_EQ(srcParam.sampleRate, dstParam.sampleRate)
+                << "Extracted sample rate does not match with input sample rate";
+    } else if (!strncmp(srcParam.mime, "video/", 6) || !strncmp(srcParam.mime, "image/", 6)) {
+        ASSERT_EQ(srcParam.width, dstParam.width)
+                << "Extracted width does not match with input width";
+        ASSERT_EQ(srcParam.height, dstParam.height)
+                << "Extracted height does not match with input height";
+    } else {
+        ASSERT_TRUE(false) << "Invalid mime type" << srcParam.mime;
+    }
+
+    int32_t toleranceValueUs = kMuxToleranceTimeUs;
+    if (mWriterName == MPEG4) {
+        toleranceValueUs = kMpeg4MuxToleranceTimeUs;
+    }
+    for (int32_t i = 0; i < dstBufInfo.size(); i++) {
+        ASSERT_EQ(mBufferInfo[index][i].size, dstBufInfo[i].size)
+                << "Input size " << mBufferInfo[index][i].size << " mismatched with extracted size "
+                << dstBufInfo[i].size;
+        ASSERT_EQ(mBufferInfo[index][i].flags, dstBufInfo[i].flags)
+                << "Input flag " << mBufferInfo[index][i].flags
+                << " mismatched with extracted size " << dstBufInfo[i].flags;
+        ASSERT_LE(abs(mBufferInfo[index][i].timeUs - dstBufInfo[i].timeUs), toleranceValueUs)
+                << "Difference between original timestamp " << mBufferInfo[index][i].timeUs
+                << " and extracted timestamp " << dstBufInfo[i].timeUs
+                << "is greater than tolerance value = " << toleranceValueUs << " micro seconds";
+    }
+    return;
+}
+
+TEST_P(WriteFunctionalityTest, CreateWriterTest) {
     if (mDisableTest) return;
     ALOGV("Tests the creation of writers");
 
@@ -281,14 +477,14 @@
 
     // Creating writer within a test scope. Destructor should be called when the test ends
     ASSERT_EQ((status_t)OK, createWriter(fd))
-            << "Failed to create writer for output format:" << GetParam().first;
+            << "Failed to create writer for output format:" << get<0>(GetParam());
 }
 
-TEST_P(WriterTest, WriterTest) {
+TEST_P(WriteFunctionalityTest, WriterTest) {
     if (mDisableTest) return;
     ALOGV("Checks if for a given input, a valid muxed file has been created or not");
 
-    string writerFormat = GetParam().first;
+    string writerFormat = get<0>(GetParam());
     string outputFile = OUTPUT_FILE_NAME;
     int32_t fd =
             open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
@@ -297,35 +493,110 @@
     int32_t status = createWriter(fd);
     ASSERT_EQ((status_t)OK, status) << "Failed to create writer for output format:" << writerFormat;
 
-    string inputFile = gEnv->getRes();
-    string inputInfo = gEnv->getRes();
-    configFormat param;
-    bool isAudio;
-    int32_t inputFileIdx = GetParam().second;
-    getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
-    ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+    inputId inpId[] = {get<1>(GetParam()), get<2>(GetParam())};
+    ASSERT_NE(inpId[0], UNUSED_ID) << "Test expects first inputId to be a valid id";
 
-    ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
-    status = addWriterSource(isAudio, param);
-    ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+    int32_t numTracks = 1;
+    if (inpId[1] != UNUSED_ID) {
+        numTracks++;
+    }
+
+    size_t fileSize[numTracks];
+    configFormat param[numTracks];
+    for (int32_t idx = 0; idx < numTracks; idx++) {
+        string inputFile = gEnv->getRes();
+        string inputInfo = gEnv->getRes();
+        bool isAudio;
+        getFileDetails(inputFile, inputInfo, param[idx], isAudio, inpId[idx]);
+        ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
+        struct stat buf;
+        status = stat(inputFile.c_str(), &buf);
+        ASSERT_EQ(status, 0) << "Failed to get properties of input file:" << inputFile;
+        fileSize[idx] = buf.st_size;
+
+        ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo, idx));
+        status = addWriterSource(isAudio, param[idx], idx);
+        ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+    }
 
     status = mWriter->start(mFileMeta.get());
     ASSERT_EQ((status_t)OK, status);
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
-                                 mBufferInfo.size());
-    ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
-    mCurrentTrack->stop();
+    float interval = get<3>(GetParam());
+    ASSERT_LE(interval, 1.0f) << "Buffer interval invalid. Should be less than or equal to 1.0";
 
+    size_t range = 0;
+    int32_t loopCount = 0;
+    int32_t offset[kMaxTrackCount]{};
+    while (loopCount < ceil(1.0 / interval)) {
+        for (int32_t idx = 0; idx < numTracks; idx++) {
+            range = mBufferInfo[idx].size() * interval;
+            status = sendBuffersToWriter(mInputStream[idx], mBufferInfo[idx], mInputFrameId[idx],
+                                         mCurrentTrack[idx], offset[idx], range);
+            ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+            offset[idx] += range;
+        }
+        loopCount++;
+    }
+    for (int32_t idx = 0; idx < kMaxTrackCount; idx++) {
+        if (mCurrentTrack[idx]) {
+            mCurrentTrack[idx]->stop();
+        }
+    }
     status = mWriter->stop();
     ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
     close(fd);
+
+    // Validate the output muxed file created by writer
+    // TODO(b/146423022): Skip validating output for webm writer
+    // TODO(b/146421018): Skip validating output for ogg writer
+    if (mWriterName != OGG && mWriterName != WEBM) {
+        configFormat extractorParams[numTracks];
+        vector<BufferInfo> extractorBufferInfo[numTracks];
+        int32_t trackCount = -1;
+
+        AMediaExtractor *extractor = AMediaExtractor_new();
+        ASSERT_NE(extractor, nullptr) << "Failed to create extractor";
+        ASSERT_NO_FATAL_FAILURE(setupExtractor(extractor, outputFile, trackCount));
+        ASSERT_EQ(trackCount, numTracks)
+                << "Tracks reported by extractor does not match with input number of tracks";
+
+        for (int32_t idx = 0; idx < numTracks; idx++) {
+            char *inputBuffer = (char *)malloc(fileSize[idx]);
+            ASSERT_NE(inputBuffer, nullptr)
+                    << "Failed to allocate the buffer of size " << fileSize[idx];
+            mInputStream[idx].seekg(0, mInputStream[idx].beg);
+            mInputStream[idx].read(inputBuffer, fileSize[idx]);
+            ASSERT_EQ(mInputStream[idx].gcount(), fileSize[idx]);
+
+            uint8_t *extractedBuffer = (uint8_t *)malloc(fileSize[idx]);
+            ASSERT_NE(extractedBuffer, nullptr)
+                    << "Failed to allocate the buffer of size " << fileSize[idx];
+            size_t bytesExtracted = 0;
+
+            ASSERT_NO_FATAL_FAILURE(extract(extractor, extractorParams[idx],
+                                            extractorBufferInfo[idx], extractedBuffer,
+                                            fileSize[idx], &bytesExtracted, idx));
+            ASSERT_GT(bytesExtracted, 0) << "Total bytes extracted by extractor cannot be zero";
+
+            ASSERT_NO_FATAL_FAILURE(
+                    compareParams(param[idx], extractorParams[idx], extractorBufferInfo[idx], idx));
+
+            ASSERT_EQ(memcmp(extractedBuffer, (uint8_t *)inputBuffer, bytesExtracted), 0)
+                    << "Extracted bit stream does not match with input bit stream";
+
+            free(inputBuffer);
+            free(extractedBuffer);
+        }
+        AMediaExtractor_delete(extractor);
+    }
 }
 
-TEST_P(WriterTest, PauseWriterTest) {
+TEST_P(WriteFunctionalityTest, PauseWriterTest) {
     if (mDisableTest) return;
     ALOGV("Validates the pause() api of writers");
 
-    string writerFormat = GetParam().first;
+    string writerFormat = get<0>(GetParam());
     string outputFile = OUTPUT_FILE_NAME;
     int32_t fd =
             open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
@@ -338,8 +609,10 @@
     string inputInfo = gEnv->getRes();
     configFormat param;
     bool isAudio;
-    int32_t inputFileIdx = GetParam().second;
-    getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
+    inputId inpId = get<1>(GetParam());
+    ASSERT_NE(inpId, UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+    getFileDetails(inputFile, inputInfo, param, isAudio, inpId);
     ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
 
     ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
@@ -348,8 +621,8 @@
 
     status = mWriter->start(mFileMeta.get());
     ASSERT_EQ((status_t)OK, status);
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
-                                 mBufferInfo.size() / 4);
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], 0, mBufferInfo[0].size() / 4);
     ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
 
     bool isPaused = false;
@@ -359,26 +632,26 @@
         isPaused = true;
     }
     // In the pause state, writers shouldn't write anything. Testing the writers for the same
-    int32_t numFramesPaused = mBufferInfo.size() / 4;
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
-                                  mInputFrameId, numFramesPaused, isPaused);
+    int32_t numFramesPaused = mBufferInfo[0].size() / 4;
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], mInputFrameId[0], numFramesPaused, isPaused);
     ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
 
     if (isPaused) {
         status = mWriter->start(mFileMeta.get());
         ASSERT_EQ((status_t)OK, status);
     }
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
-                                  mInputFrameId, mBufferInfo.size());
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], mInputFrameId[0], mBufferInfo[0].size());
     ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
-    mCurrentTrack->stop();
+    mCurrentTrack[0]->stop();
 
     status = mWriter->stop();
     ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
     close(fd);
 }
 
-TEST_P(WriterTest, MultiStartStopPauseTest) {
+TEST_P(WriteFunctionalityTest, MultiStartStopPauseTest) {
     // TODO: (b/144821804)
     // Enable the test for MPE2TS writer
     if (mDisableTest || mWriterName == standardWriters::MPEG2TS) return;
@@ -389,7 +662,7 @@
             open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
     ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
 
-    string writerFormat = GetParam().first;
+    string writerFormat = get<0>(GetParam());
     int32_t status = createWriter(fd);
     ASSERT_EQ(status, (status_t)OK) << "Failed to create writer for output format:" << writerFormat;
 
@@ -397,8 +670,10 @@
     string inputInfo = gEnv->getRes();
     configFormat param;
     bool isAudio;
-    int32_t inputFileIdx = GetParam().second;
-    getFileDetails(inputFile, inputInfo, param, isAudio, inputFileIdx);
+    inputId inpId = get<1>(GetParam());
+    ASSERT_NE(inpId, UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+    getFileDetails(inputFile, inputInfo, param, isAudio, inpId);
     ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
 
     ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
@@ -415,8 +690,8 @@
         mWriter->start(mFileMeta.get());
     }
 
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack, 0,
-                              mBufferInfo.size() / 4);
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], 0, mBufferInfo[0].size() / 4);
     ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
 
     for (int32_t count = 0; count < kMaxCount; count++) {
@@ -425,20 +700,20 @@
     }
 
     mWriter->pause();
-    int32_t numFramesPaused = mBufferInfo.size() / 4;
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
-                              mInputFrameId, numFramesPaused, true);
+    int32_t numFramesPaused = mBufferInfo[0].size() / 4;
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], mInputFrameId[0], numFramesPaused, true);
     ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
 
     for (int32_t count = 0; count < kMaxCount; count++) {
         mWriter->start(mFileMeta.get());
     }
 
-    status = sendBuffersToWriter(mInputStream, mBufferInfo, mInputFrameId, mCurrentTrack,
-                              mInputFrameId, mBufferInfo.size());
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], mInputFrameId[0], mBufferInfo[0].size());
     ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
 
-    mCurrentTrack->stop();
+    mCurrentTrack[0]->stop();
 
     // first stop should succeed.
     status = mWriter->stop();
@@ -451,19 +726,380 @@
     close(fd);
 }
 
+class WriterValidityTest
+    : public WriterTest,
+      public ::testing::TestWithParam<
+              tuple<string /* writerFormat*/, inputId /* inputId0*/, bool /* addSourceFail*/>> {
+  public:
+    virtual void SetUp() override { setupWriterType(get<0>(GetParam())); }
+};
+
+TEST_P(WriterValidityTest, InvalidInputTest) {
+    if (mDisableTest) return;
+    ALOGV("Validates writer's behavior for invalid inputs");
+
+    string writerFormat = get<0>(GetParam());
+    inputId inpId = get<1>(GetParam());
+    bool addSourceFailExpected = get<2>(GetParam());
+
+    // Test writers for invalid FD value
+    int32_t fd = -1;
+    int32_t status = createWriter(fd);
+    if (status != OK) {
+        ALOGV("createWriter failed for invalid FD, this is expected behavior");
+        return;
+    }
+
+    // If writer was created for invalid fd, test it further.
+    string inputFile = gEnv->getRes();
+    string inputInfo = gEnv->getRes();
+    configFormat param;
+    bool isAudio;
+    ASSERT_NE(inpId, UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+    getFileDetails(inputFile, inputInfo, param, isAudio, inpId);
+    ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
+    ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
+    status = addWriterSource(isAudio, param);
+    if (status != OK) {
+        ASSERT_TRUE(addSourceFailExpected)
+                << "Failed to add source for " << writerFormat << " writer";
+        ALOGV("addWriterSource failed for invalid FD, this is expected behavior");
+        return;
+    }
+
+    // start the writer with valid argument but invalid FD
+    status = mWriter->start(mFileMeta.get());
+    ASSERT_NE((status_t)OK, status) << "Writer did not fail for invalid FD";
+
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], 0, mBufferInfo[0].size());
+    ASSERT_NE((status_t)OK, status) << "Writer did not report error for invalid FD";
+
+    status = mCurrentTrack[0]->stop();
+    ASSERT_EQ((status_t)OK, status) << "Failed to stop the track";
+
+    status = mWriter->stop();
+    ASSERT_EQ((status_t)OK, status) << "Failed to stop " << writerFormat << " writer";
+}
+
+TEST_P(WriterValidityTest, MalFormedDataTest) {
+    if (mDisableTest) return;
+    // Enable test for Ogg writer
+    ASSERT_NE(mWriterName, OGG) << "TODO(b/160105646)";
+    ALOGV("Test writer for malformed inputs");
+
+    string writerFormat = get<0>(GetParam());
+    inputId inpId = get<1>(GetParam());
+    bool addSourceFailExpected = get<2>(GetParam());
+    int32_t fd =
+            open(OUTPUT_FILE_NAME, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+    ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
+
+    int32_t status = createWriter(fd);
+    ASSERT_EQ(status, (status_t)OK)
+            << "Failed to create writer for " << writerFormat << " output format";
+
+    string inputFile = gEnv->getRes();
+    string inputInfo = gEnv->getRes();
+    configFormat param;
+    bool isAudio;
+    ASSERT_NE(inpId, UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+    getFileDetails(inputFile, inputInfo, param, isAudio, inpId);
+    ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
+    ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
+    // Remove CSD data from input
+    mNumCsds[0] = 0;
+    status = addWriterSource(isAudio, param);
+    if (status != OK) {
+        ASSERT_TRUE(addSourceFailExpected)
+                << "Failed to add source for " << writerFormat << " writer";
+        ALOGV("%s writer failed to addSource after removing CSD from input", writerFormat.c_str());
+        return;
+    }
+
+    status = mWriter->start(mFileMeta.get());
+    ASSERT_EQ((status_t)OK, status) << "Could not start " << writerFormat << "writer";
+
+    // Skip first few frames. These may contain sync frames also.
+    int32_t frameID = mInputFrameId[0] + mBufferInfo[0].size() / 4;
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], frameID, mCurrentTrack[0], 0,
+                                 mBufferInfo[0].size());
+    ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+
+    status = mCurrentTrack[0]->stop();
+    ASSERT_EQ((status_t)OK, status) << "Failed to stop the track";
+
+    Vector<String16> args;
+    status = mWriter->dump(fd, args);
+    ASSERT_EQ((status_t)OK, status) << "Failed to dump statistics from writer";
+
+    status = mWriter->stop();
+    ASSERT_EQ((status_t)OK, status) << "Failed to stop " << writerFormat << " writer";
+    close(fd);
+}
+
+// This test is specific to MPEG4Writer to test more APIs
+TEST_P(WriteFunctionalityTest, Mpeg4WriterTest) {
+    if (mDisableTest) return;
+    if (mWriterName != standardWriters::MPEG4) return;
+    ALOGV("Test MPEG4 writer specific APIs");
+
+    inputId inpId = get<1>(GetParam());
+    int32_t fd =
+            open(OUTPUT_FILE_NAME, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+    ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
+
+    int32_t status = createWriter(fd);
+    ASSERT_EQ(status, (status_t)OK) << "Failed to create writer for mpeg4 output format";
+
+    string inputFile = gEnv->getRes();
+    string inputInfo = gEnv->getRes();
+    configFormat param;
+    bool isAudio;
+    ASSERT_NE(inpId, UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+    getFileDetails(inputFile, inputInfo, param, isAudio, inpId);
+    ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
+    ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo));
+    status = addWriterSource(isAudio, param);
+    ASSERT_EQ((status_t)OK, status) << "Failed to add source for mpeg4 Writer";
+
+    // signal meta data for the writer
+    sp<MPEG4Writer> mp4writer = static_cast<MPEG4Writer *>(mWriter.get());
+    status = mp4writer->setInterleaveDuration(kDefaultInterleaveDuration);
+    ASSERT_EQ((status_t)OK, status) << "setInterleaveDuration failed";
+
+    status = mp4writer->setGeoData(kDefaultLatitudex10000, kDefaultLongitudex10000);
+    ASSERT_EQ((status_t)OK, status) << "setGeoData failed";
+
+    status = mp4writer->setCaptureRate(kDefaultFPS);
+    ASSERT_EQ((status_t)OK, status) << "setCaptureRate failed";
+
+    status = mWriter->start(mFileMeta.get());
+    ASSERT_EQ((status_t)OK, status) << "Could not start the writer";
+
+    status = sendBuffersToWriter(mInputStream[0], mBufferInfo[0], mInputFrameId[0],
+                                 mCurrentTrack[0], 0, mBufferInfo[0].size());
+    ASSERT_EQ((status_t)OK, status) << "mpeg4 writer failed";
+
+    status = mCurrentTrack[0]->stop();
+    ASSERT_EQ((status_t)OK, status) << "Failed to stop the track";
+
+    status = mWriter->stop();
+    ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
+    mp4writer.clear();
+    close(fd);
+}
+
+class ListenerTest
+    : public WriterTest,
+      public ::testing::TestWithParam<tuple<
+              string /* writerFormat*/, inputId /* inputId0*/, inputId /* inputId1*/,
+              float /* FileSizeLimit*/, float /* FileDurationLimit*/, float /* BufferInterval*/>> {
+  public:
+    virtual void SetUp() override { setupWriterType(get<0>(GetParam())); }
+};
+
+TEST_P(ListenerTest, SetMaxFileLimitsTest) {
+    // TODO(b/151892414): Enable test for other writers
+    if (mDisableTest || mWriterName != MPEG4) return;
+    ALOGV("Validates writer when max file limits are set");
+
+    string writerFormat = get<0>(GetParam());
+    string outputFile = OUTPUT_FILE_NAME;
+    int32_t fd =
+            open(outputFile.c_str(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+    ASSERT_GE(fd, 0) << "Failed to open output file to dump writer's data";
+
+    int32_t status = createWriter(fd);
+    ASSERT_EQ((status_t)OK, status) << "Failed to create writer for output format:" << writerFormat;
+
+    inputId inpId[] = {get<1>(GetParam()), get<2>(GetParam())};
+    ASSERT_NE(inpId[0], UNUSED_ID) << "Test expects first inputId to be a valid id";
+
+    size_t inputFileSize = 0;
+    int64_t lastFrameTimeStampUs = INT_MAX;
+    int32_t numTracks = 1;
+    if (inpId[1] != UNUSED_ID) {
+        numTracks++;
+    }
+    for (int32_t idx = 0; idx < numTracks; idx++) {
+        string inputFile = gEnv->getRes();
+        string inputInfo = gEnv->getRes();
+        configFormat param;
+        bool isAudio;
+        getFileDetails(inputFile, inputInfo, param, isAudio, inpId[idx]);
+        ASSERT_NE(inputFile.compare(gEnv->getRes()), 0) << "No input file specified";
+
+        ASSERT_NO_FATAL_FAILURE(getInputBufferInfo(inputFile, inputInfo, idx));
+        status = addWriterSource(isAudio, param, idx);
+        ASSERT_EQ((status_t)OK, status) << "Failed to add source for " << writerFormat << "Writer";
+
+        // Read file properties
+        struct stat buf;
+        status = stat(inputFile.c_str(), &buf);
+        ASSERT_EQ(0, status);
+
+        inputFileSize += buf.st_size;
+        if (lastFrameTimeStampUs > mBufferInfo[idx][mBufferInfo[idx].size() - 1].timeUs) {
+            lastFrameTimeStampUs = mBufferInfo[idx][mBufferInfo[idx].size() - 1].timeUs;
+        }
+    }
+
+    float fileSizeLimit = get<3>(GetParam());
+    float fileDurationLimit = get<4>(GetParam());
+    int64_t maxFileSize = 0;
+    int64_t maxFileDuration = 0;
+    if (fileSizeLimit > 0) {
+        maxFileSize = (int64_t)(fileSizeLimit * inputFileSize);
+        mWriter->setMaxFileSize(maxFileSize);
+    }
+    if (fileDurationLimit > 0) {
+        maxFileDuration = (int64_t)(fileDurationLimit * lastFrameTimeStampUs);
+        mWriter->setMaxFileDuration(maxFileDuration);
+    }
+
+    sp<WriterListener> listener = new WriterListener();
+    ASSERT_NE(listener, nullptr) << "unable to allocate listener";
+
+    mWriter->setListener(listener);
+    status = mWriter->start(mFileMeta.get());
+    ASSERT_EQ((status_t)OK, status);
+
+    float interval = get<5>(GetParam());
+    ASSERT_LE(interval, 1.0f) << "Buffer interval invalid. Should be less than or equal to 1.0";
+
+    size_t range = 0;
+    int32_t loopCount = 0;
+    int32_t offset[kMaxTrackCount]{};
+    while (loopCount < ceil(1.0 / interval)) {
+        for (int32_t idx = 0; idx < numTracks; idx++) {
+            range = mBufferInfo[idx].size() * interval;
+            status = sendBuffersToWriter(mInputStream[idx], mBufferInfo[idx], mInputFrameId[idx],
+                                         mCurrentTrack[idx], offset[idx], range, false, listener);
+            ASSERT_EQ((status_t)OK, status) << writerFormat << " writer failed";
+            offset[idx] += range;
+        }
+        loopCount++;
+    }
+
+    ASSERT_TRUE(mWriter->reachedEOS()) << "EOS not signalled.";
+
+    for (int32_t idx = 0; idx < kMaxTrackCount; idx++) {
+        if (mCurrentTrack[idx]) {
+            mCurrentTrack[idx]->stop();
+        }
+    }
+
+    status = mWriter->stop();
+    ASSERT_EQ((status_t)OK, status) << "Failed to stop the writer";
+    close(fd);
+
+    if (maxFileSize <= 0) {
+        ASSERT_FALSE(listener->mSignaledSize);
+    } else if (maxFileDuration <= 0) {
+        ASSERT_FALSE(listener->mSignaledDuration);
+    } else if (maxFileSize > 0 && maxFileDuration <= 0) {
+        ASSERT_TRUE(listener->mSignaledSize);
+    } else if (maxFileDuration > 0 && maxFileSize <= 0) {
+        ASSERT_TRUE(listener->mSignaledDuration);
+    } else {
+        ASSERT_TRUE(listener->mSignaledSize || listener->mSignaledDuration);
+    }
+
+    if (maxFileSize > 0) {
+        struct stat buf;
+        status = stat(outputFile.c_str(), &buf);
+        ASSERT_EQ(0, status);
+        ASSERT_LE(buf.st_size, maxFileSize);
+    }
+}
+
+// TODO: (b/150923387)
+// Add WEBM input
+INSTANTIATE_TEST_SUITE_P(ListenerTestAll, ListenerTest,
+                         ::testing::Values(make_tuple("aac", AAC_1, UNUSED_ID, 0.6, 0.7, 1),
+                                           make_tuple("amrnb", AMR_NB_1, UNUSED_ID, 0.2, 0.6, 1),
+                                           make_tuple("amrwb", AMR_WB_1, UNUSED_ID, 0.5, 0.5, 1),
+                                           make_tuple("mpeg2Ts", AAC_1, UNUSED_ID, 0.2, 1, 1),
+                                           make_tuple("mpeg4", AAC_1, UNUSED_ID, 0.4, 0.3, 0.25),
+                                           make_tuple("mpeg4", AAC_1, UNUSED_ID, 0.3, 1, 0.5),
+                                           make_tuple("ogg", OPUS_1, UNUSED_ID, 0.7, 0.3, 1)));
+
 // TODO: (b/144476164)
 // Add AAC_ADTS, FLAC, AV1 input
-INSTANTIATE_TEST_SUITE_P(WriterTestAll, WriterTest,
-                         ::testing::Values(make_pair("ogg", 0), make_pair("webm", 0),
-                                           make_pair("aac", 1), make_pair("mpeg4", 1),
-                                           make_pair("amrnb", 3), make_pair("amrwb", 4),
-                                           make_pair("webm", 5), make_pair("webm", 7),
-                                           make_pair("webm", 8), make_pair("mpeg4", 9),
-                                           make_pair("mpeg4", 10), make_pair("mpeg4", 12),
-                                           make_pair("mpeg4", 13), make_pair("mpeg2Ts", 1),
-                                           make_pair("mpeg2Ts", 9)));
+INSTANTIATE_TEST_SUITE_P(
+        WriterTestAll, WriteFunctionalityTest,
+        ::testing::Values(
+                make_tuple("aac", AAC_1, UNUSED_ID, 1),
+
+                make_tuple("amrnb", AMR_NB_1, UNUSED_ID, 1),
+                make_tuple("amrwb", AMR_WB_1, UNUSED_ID, 1),
+
+                // TODO(b/144902018): Enable test for mpeg2ts
+                // make_tuple("mpeg2Ts", AAC_1, UNUSED_ID, 1),
+                // make_tuple("mpeg2Ts", AVC_1, UNUSED_ID, 1),
+                // TODO(b/156355857): Add multitrack for mpeg2ts
+                // make_tuple("mpeg2Ts", AAC_1, AVC_1, 0.50),
+                // make_tuple("mpeg2Ts", AVC_1, AAC_1, 0.25),
+
+                make_tuple("mpeg4", AAC_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", AMR_NB_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", AMR_WB_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", AVC_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", H263_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", HEIC_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", HEVC_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", MPEG4_1, UNUSED_ID, 1),
+                make_tuple("mpeg4", AAC_1, AVC_1, 0.25),
+                make_tuple("mpeg4", AVC_1, AAC_1, 0.75),
+                make_tuple("mpeg4", AMR_WB_1, AAC_1, 0.75),
+                make_tuple("mpeg4", HEVC_1, AMR_WB_1, 0.25),
+                make_tuple("mpeg4", H263_1, AMR_NB_1, 0.50),
+                make_tuple("mpeg4", MPEG4_1, AAC_1, 0.75),
+                make_tuple("mpeg4", AMR_NB_1, AMR_WB_1, 0.25),
+                make_tuple("mpeg4", H263_1, AMR_NB_1, 0.50),
+                make_tuple("mpeg4", MPEG4_1, HEVC_1, 0.75),
+
+                make_tuple("ogg", OPUS_1, UNUSED_ID, 1),
+
+                make_tuple("webm", OPUS_1, UNUSED_ID, 1),
+                make_tuple("webm", VORBIS_1, UNUSED_ID, 1),
+                make_tuple("webm", VP8_1, UNUSED_ID, 1),
+                make_tuple("webm", VP9_1, UNUSED_ID, 1),
+                make_tuple("webm", VP8_1, OPUS_1, 0.50),
+                make_tuple("webm", VORBIS_1, VP8_1, 0.25)));
+
+INSTANTIATE_TEST_SUITE_P(
+        WriterValidityTest, WriterValidityTest,
+        ::testing::Values(
+                make_tuple("aac", AAC_1, true),
+
+                make_tuple("amrnb", AMR_NB_1, true),
+                make_tuple("amrwb", AMR_WB_1, true),
+
+                make_tuple("mpeg4", AAC_1, false),
+                make_tuple("mpeg4", AMR_NB_1, false),
+                make_tuple("mpeg4", AVC_1, false),
+                make_tuple("mpeg4", H263_1, false),
+                make_tuple("mpeg4", HEIC_1, false),
+                make_tuple("mpeg4", HEVC_1, false),
+                make_tuple("mpeg4", MPEG4_1, false),
+
+                make_tuple("ogg", OPUS_1, true),
+
+                make_tuple("webm", OPUS_1, false),
+                make_tuple("webm", VORBIS_1, true),
+                make_tuple("webm", VP8_1, false),
+                make_tuple("webm", VP9_1, false)));
 
 int main(int argc, char **argv) {
+    ProcessState::self()->startThreadPool();
     gEnv = new WriterTestEnvironment();
     ::testing::AddGlobalTestEnvironment(gEnv);
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/libstagefright/tests/writer/WriterTestEnvironment.h b/media/libstagefright/tests/writer/WriterTestEnvironment.h
index 99e686f..7da0a62 100644
--- a/media/libstagefright/tests/writer/WriterTestEnvironment.h
+++ b/media/libstagefright/tests/writer/WriterTestEnvironment.h
@@ -25,7 +25,7 @@
 
 class WriterTestEnvironment : public ::testing::Environment {
   public:
-    WriterTestEnvironment() : res("/data/local/tmp/") {}
+    WriterTestEnvironment() : res("/data/local/tmp/"), deleteOutput(true) {}
 
     // Parses the command line arguments
     int initFromOptions(int argc, char **argv);
@@ -34,16 +34,21 @@
 
     const string getRes() const { return res; }
 
+    bool cleanUp() const { return deleteOutput; }
+
   private:
     string res;
+    bool deleteOutput;
 };
 
 int WriterTestEnvironment::initFromOptions(int argc, char **argv) {
-    static struct option options[] = {{"res", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+    static struct option options[] = {{"res", required_argument, 0, 'P'},
+                                      {"cleanUp", optional_argument, 0, 'C'},
+                                      {0, 0, 0, 0}};
 
     while (true) {
         int index = 0;
-        int c = getopt_long(argc, argv, "P:", options, &index);
+        int c = getopt_long(argc, argv, "P:C:", options, &index);
         if (c == -1) {
             break;
         }
@@ -52,6 +57,11 @@
             case 'P':
                 setRes(optarg);
                 break;
+            case 'C':
+                if (!strcmp(optarg, "false")) {
+                    deleteOutput = false;
+                }
+                break;
             default:
                 break;
         }
@@ -62,7 +72,8 @@
                 "unrecognized option: %s\n\n"
                 "usage: %s <gtest options> <test options>\n\n"
                 "test options are:\n\n"
-                "-P, --path: Resource files directory location\n",
+                "-P, --path: Resource files directory location\n"
+                "-C, default:true. Delete output file after test completes\n",
                 argv[optind ?: 1], argv[0]);
         return 2;
     }
diff --git a/media/libstagefright/tests/writer/WriterUtility.cpp b/media/libstagefright/tests/writer/WriterUtility.cpp
index f24ccb6..a3043fe 100644
--- a/media/libstagefright/tests/writer/WriterUtility.cpp
+++ b/media/libstagefright/tests/writer/WriterUtility.cpp
@@ -24,9 +24,16 @@
 
 int32_t sendBuffersToWriter(ifstream &inputStream, vector<BufferInfo> &bufferInfo,
                             int32_t &inputFrameId, sp<MediaAdapter> &currentTrack, int32_t offset,
-                            int32_t range, bool isPaused) {
+                            int32_t range, bool isPaused, sp<WriterListener> listener) {
     while (1) {
         if (inputFrameId >= (int)bufferInfo.size() || inputFrameId >= (offset + range)) break;
+        if (listener != nullptr) {
+            if (listener->mSignaledDuration || listener->mSignaledSize) {
+                ALOGV("Max File limit reached. No more buffers will be sent to the writer");
+                break;
+            }
+        }
+
         int32_t size = bufferInfo[inputFrameId].size;
         char *data = (char *)malloc(size);
         if (!data) {
diff --git a/media/libstagefright/tests/writer/WriterUtility.h b/media/libstagefright/tests/writer/WriterUtility.h
index cdd6246..6b456fb 100644
--- a/media/libstagefright/tests/writer/WriterUtility.h
+++ b/media/libstagefright/tests/writer/WriterUtility.h
@@ -27,13 +27,19 @@
 
 #include <media/stagefright/MediaAdapter.h>
 
-using namespace android;
-using namespace std;
+#include "WriterListener.h"
 
 #define CODEC_CONFIG_FLAG 32
 
+constexpr uint32_t kMaxTrackCount = 2;
 constexpr uint32_t kMaxCSDStrlen = 16;
 constexpr uint32_t kMaxCount = 20;
+constexpr int32_t kMimeSize = 128;
+constexpr int32_t kDefaultInterleaveDuration = 0;
+// Geodata is set according to ISO-6709 standard.
+constexpr int32_t kDefaultLatitudex10000 = 500000;
+constexpr int32_t kDefaultLongitudex10000 = 1000000;
+constexpr float kDefaultFPS = 30.0f;
 
 struct BufferInfo {
     int32_t size;
@@ -41,9 +47,18 @@
     int64_t timeUs;
 };
 
+struct configFormat {
+    char mime[kMimeSize];
+    int32_t width;
+    int32_t height;
+    int32_t sampleRate;
+    int32_t channelCount;
+};
+
 int32_t sendBuffersToWriter(ifstream &inputStream, vector<BufferInfo> &bufferInfo,
                             int32_t &inputFrameId, sp<MediaAdapter> &currentTrack, int32_t offset,
-                            int32_t range, bool isPaused = false);
+                            int32_t range, bool isPaused = false,
+                            sp<WriterListener> listener = nullptr);
 
 int32_t writeHeaderBuffers(ifstream &inputStream, vector<BufferInfo> &bufferInfo,
                            int32_t &inputFrameId, sp<AMessage> &format, int32_t numCsds);
diff --git a/media/libstagefright/timedtext/Android.bp b/media/libstagefright/timedtext/Android.bp
index 4f4ceb1..6590ef7 100644
--- a/media/libstagefright/timedtext/Android.bp
+++ b/media/libstagefright/timedtext/Android.bp
@@ -1,3 +1,22 @@
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_timedtext_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_libstagefright_timedtext_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_static {
     name: "libstagefright_timedtext",
 
@@ -22,5 +41,3 @@
 
     shared_libs: ["libmedia"],
 }
-
-
diff --git a/media/libstagefright/timedtext/TEST_MAPPING b/media/libstagefright/timedtext/TEST_MAPPING
new file mode 100644
index 0000000..35a5b11
--- /dev/null
+++ b/media/libstagefright/timedtext/TEST_MAPPING
@@ -0,0 +1,9 @@
+// mappings for frameworks/av/media/libstagefright/timedtext
+{
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "TimedTextUnitTest" }
+  ]
+}
diff --git a/media/libstagefright/timedtext/test/Android.bp b/media/libstagefright/timedtext/test/Android.bp
new file mode 100644
index 0000000..0b632bf
--- /dev/null
+++ b/media/libstagefright/timedtext/test/Android.bp
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_timedtext_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_timedtext_license",
+    ],
+}
+
+cc_test {
+    name: "TimedTextUnitTest",
+    test_suites: ["device-tests"],
+    gtest: true,
+
+    srcs: [
+        "TimedTextUnitTest.cpp",
+    ],
+
+    static_libs: [
+        "libstagefright_timedtext",
+        "libstagefright_foundation",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libmedia",
+        "libbinder",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libstagefright/timedtext/test/AndroidTest.xml b/media/libstagefright/timedtext/test/AndroidTest.xml
new file mode 100644
index 0000000..3654e23
--- /dev/null
+++ b/media/libstagefright/timedtext/test/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for TimedText unit test">
+    <option name="test-suite-tag" value="TimedTextUnitTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="TimedTextUnitTest->/data/local/tmp/TimedTextUnitTest" />
+        <option name="push-file"
+            key="https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/timedtext/test/TimedTextUnitTest.zip?unzip=true"
+            value="/data/local/tmp/TimedTextUnitTestRes/" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="TimedTextUnitTest" />
+        <option name="native-test-flag" value="-P /data/local/tmp/TimedTextUnitTestRes/" />
+    </test>
+</configuration>
diff --git a/media/libstagefright/timedtext/test/README.md b/media/libstagefright/timedtext/test/README.md
new file mode 100644
index 0000000..3a774bd
--- /dev/null
+++ b/media/libstagefright/timedtext/test/README.md
@@ -0,0 +1,40 @@
+## Media Testing ##
+---
+#### TimedText Unit Test :
+The TimedText Unit Test Suite validates the TextDescription class available in libstagefright.
+
+Run the following steps to build the test suite:
+```
+m TimedTextUnitTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/TimedTextUnitTest/TimedTextUnitTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/TimedTextUnitTest/TimedTextUnitTest /data/local/tmp/
+```
+
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/timedtext/test/TimedTextUnitTest.zip).
+Download, unzip and push these files into device for testing.
+
+```
+adb push TimedTextUnitTestRes/. /data/local/tmp/
+```
+
+usage: TimedTextUnitTest -P \<path_to_folder\>
+```
+adb shell /data/local/tmp/TimedTextUnitTest -P /data/local/tmp/TimedTextUnitTestRes/
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest TimedTextUnitTest -- --enable-module-dynamic-download=true
+```
diff --git a/media/libstagefright/timedtext/test/TimedTextTestEnvironment.h b/media/libstagefright/timedtext/test/TimedTextTestEnvironment.h
new file mode 100644
index 0000000..52280c1
--- /dev/null
+++ b/media/libstagefright/timedtext/test/TimedTextTestEnvironment.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __TIMEDTEXT_TEST_ENVIRONMENT_H__
+#define __TIMEDTEXT_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class TimedTextTestEnvironment : public ::testing::Environment {
+  public:
+    TimedTextTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int TimedTextTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"res", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P':
+                setRes(optarg);
+                break;
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __TIMEDTEXT_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/timedtext/test/TimedTextUnitTest.cpp b/media/libstagefright/timedtext/test/TimedTextUnitTest.cpp
new file mode 100644
index 0000000..d85ae39
--- /dev/null
+++ b/media/libstagefright/timedtext/test/TimedTextUnitTest.cpp
@@ -0,0 +1,379 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TimedTextUnitTest"
+#include <utils/Log.h>
+
+#include <stdio.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <fstream>
+
+#include <binder/Parcel.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/foundation/ByteUtils.h>
+
+#include "timedtext/TextDescriptions.h"
+
+#include "TimedTextTestEnvironment.h"
+
+constexpr int32_t kStartTimeMs = 10000;
+
+enum {
+    // These keys must be in sync with the keys in
+    // frameworks/av/media/libstagefright/timedtext/TextDescriptions.h
+    KEY_DISPLAY_FLAGS = 1,
+    KEY_STYLE_FLAGS = 2,
+    KEY_BACKGROUND_COLOR_RGBA = 3,
+    KEY_HIGHLIGHT_COLOR_RGBA = 4,
+    KEY_SCROLL_DELAY = 5,
+    KEY_WRAP_TEXT = 6,
+    KEY_START_TIME = 7,
+    KEY_STRUCT_BLINKING_TEXT_LIST = 8,
+    KEY_STRUCT_FONT_LIST = 9,
+    KEY_STRUCT_HIGHLIGHT_LIST = 10,
+    KEY_STRUCT_HYPER_TEXT_LIST = 11,
+    KEY_STRUCT_KARAOKE_LIST = 12,
+    KEY_STRUCT_STYLE_LIST = 13,
+    KEY_STRUCT_TEXT_POS = 14,
+    KEY_STRUCT_JUSTIFICATION = 15,
+    KEY_STRUCT_TEXT = 16,
+
+    KEY_GLOBAL_SETTING = 101,
+    KEY_LOCAL_SETTING = 102,
+    KEY_START_CHAR = 103,
+    KEY_END_CHAR = 104,
+    KEY_FONT_ID = 105,
+    KEY_FONT_SIZE = 106,
+    KEY_TEXT_COLOR_RGBA = 107,
+};
+
+struct FontInfo {
+    int32_t displayFlag = -1;
+    int32_t horizontalJustification = -1;
+    int32_t verticalJustification = -1;
+    int32_t rgbaBackground = -1;
+    int32_t leftPos = -1;
+    int32_t topPos = -1;
+    int32_t bottomPos = -1;
+    int32_t rightPos = -1;
+    int32_t startchar = -1;
+    int32_t endChar = -1;
+    int32_t fontId = -1;
+    int32_t faceStyle = -1;
+    int32_t fontSize = -1;
+    int32_t rgbaText = -1;
+    int32_t entryCount = -1;
+};
+
+struct FontRecord {
+    int32_t fontID = -1;
+    int32_t fontNameLength = -1;
+    const uint8_t *font = nullptr;
+};
+
+using namespace android;
+
+static TimedTextTestEnvironment *gEnv = nullptr;
+
+class TimedTextUnitTest : public ::testing::TestWithParam</*filename*/ string> {
+  public:
+    TimedTextUnitTest(){};
+
+    ~TimedTextUnitTest() {
+        if (mEleStream) mEleStream.close();
+    }
+
+    virtual void SetUp() override {
+        mInputFileName = gEnv->getRes() + GetParam();
+        mEleStream.open(mInputFileName, ifstream::binary);
+        ASSERT_EQ(mEleStream.is_open(), true) << "Failed to open " << GetParam();
+
+        struct stat buf;
+        status_t status = stat(mInputFileName.c_str(), &buf);
+        ASSERT_EQ(status, 0) << "Failed to get properties of input file: " << GetParam();
+        mFileSize = buf.st_size;
+        ALOGI("Size of the input file %s = %zu", GetParam().c_str(), mFileSize);
+    }
+
+    string mInputFileName;
+    size_t mFileSize;
+    ifstream mEleStream;
+};
+
+class SRTDescriptionTest : public TimedTextUnitTest {
+  public:
+    virtual void SetUp() override { TimedTextUnitTest::SetUp(); }
+};
+
+class Text3GPPDescriptionTest : public TimedTextUnitTest {
+  public:
+    virtual void SetUp() override { TimedTextUnitTest::SetUp(); }
+};
+
+TEST_P(SRTDescriptionTest, extractSRTDescriptionTest) {
+    char data[mFileSize];
+    mEleStream.read(data, sizeof(data));
+    ASSERT_EQ(mEleStream.gcount(), mFileSize);
+
+    Parcel parcel;
+    int32_t flag = TextDescriptions::OUT_OF_BAND_TEXT_SRT | TextDescriptions::LOCAL_DESCRIPTIONS;
+    status_t status = TextDescriptions::getParcelOfDescriptions((const uint8_t *)data, mFileSize,
+                                                                flag, kStartTimeMs, &parcel);
+    ASSERT_EQ(status, 0) << "getParcelOfDescriptions returned error";
+    ALOGI("Size of the Parcel: %zu", parcel.dataSize());
+    ASSERT_GT(parcel.dataSize(), 0) << "Parcel is empty";
+
+    parcel.setDataPosition(0);
+    int32_t key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_LOCAL_SETTING) << "Parcel has invalid key";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_START_TIME) << "Parcel has invalid start time key";
+    ASSERT_EQ(parcel.readInt32(), kStartTimeMs) << "Parcel has invalid timings";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_STRUCT_TEXT) << "Parcel has invalid struct text key";
+    ASSERT_EQ(parcel.readInt32(), mFileSize) << "Parcel has invalid text data";
+    int32_t fileSize = parcel.readInt32();
+    ASSERT_EQ(fileSize, mFileSize) << "Parcel has invalid file size value";
+    uint8_t tmpData[fileSize];
+    status = parcel.read((void *)tmpData, fileSize);
+    ASSERT_EQ(status, 0) << "Failed to read the data from parcel";
+    // To make sure end of parcel is reached
+    ASSERT_EQ(parcel.dataAvail(), 0) << "Parcel has some data left to read";
+}
+
+// This test uses the properties of tx3g box mentioned in 3GPP Timed Text Format
+// Specification#: 26.245 / Section: 5.16(Sample Description Format)
+// https://www.3gpp.org/ftp/Specs/archive/26_series/26.245/
+
+TEST_P(Text3GPPDescriptionTest, Text3GPPGlobalDescriptionTest) {
+    char data[mFileSize];
+    mEleStream.read(data, sizeof(data));
+    ASSERT_EQ(mEleStream.gcount(), mFileSize);
+
+    const uint8_t *tmpData = (const uint8_t *)data;
+    int32_t remaining = mFileSize;
+    FontInfo fontInfo;
+    vector<FontRecord> fontRecordEntries;
+
+    // Skipping the bytes containing information about the type of subbox(tx3g)
+    tmpData += 16;
+    remaining -= 16;
+
+    fontInfo.displayFlag = U32_AT(tmpData);
+    ALOGI("Display flag: %d", fontInfo.displayFlag);
+    fontInfo.horizontalJustification = tmpData[4];
+    ALOGI("Horizontal Justification: %d", fontInfo.horizontalJustification);
+    fontInfo.verticalJustification = tmpData[5];
+    ALOGI("Vertical Justification: %d", fontInfo.verticalJustification);
+    fontInfo.rgbaBackground =
+            *(tmpData + 6) << 24 | *(tmpData + 7) << 16 | *(tmpData + 8) << 8 | *(tmpData + 9);
+    ALOGI("rgba value of background: %d", fontInfo.rgbaBackground);
+
+    tmpData += 10;
+    remaining -= 10;
+
+    if (remaining >= 8) {
+        fontInfo.leftPos = U16_AT(tmpData);
+        ALOGI("Left: %d", fontInfo.leftPos);
+        fontInfo.topPos = U16_AT(tmpData + 2);
+        ALOGI("Top: %d", fontInfo.topPos);
+        fontInfo.bottomPos = U16_AT(tmpData + 4);
+        ALOGI("Bottom: %d", fontInfo.bottomPos);
+        fontInfo.rightPos = U16_AT(tmpData + 6);
+        ALOGI("Right: %d", fontInfo.rightPos);
+
+        tmpData += 8;
+        remaining -= 8;
+
+        if (remaining >= 12) {
+            fontInfo.startchar = U16_AT(tmpData);
+            ALOGI("Start character: %d", fontInfo.startchar);
+            fontInfo.endChar = U16_AT(tmpData + 2);
+            ALOGI("End character: %d", fontInfo.endChar);
+            fontInfo.fontId = U16_AT(tmpData + 4);
+            ALOGI("Value of font Identifier: %d", fontInfo.fontId);
+            fontInfo.faceStyle = *(tmpData + 6);
+            ALOGI("Face style flag : %d", fontInfo.faceStyle);
+            fontInfo.fontSize = *(tmpData + 7);
+            ALOGI("Size of the font: %d", fontInfo.fontSize);
+            fontInfo.rgbaText = *(tmpData + 8) << 24 | *(tmpData + 9) << 16 | *(tmpData + 10) << 8 |
+                                *(tmpData + 11);
+            ALOGI("rgba value of the text: %d", fontInfo.rgbaText);
+
+            tmpData += 12;
+            remaining -= 12;
+
+            if (remaining >= 10) {
+                // Skipping the bytes containing information about the type of subbox(ftab)
+                fontInfo.entryCount = U16_AT(tmpData + 8);
+                ALOGI("Value of entry count: %d", fontInfo.entryCount);
+
+                tmpData += 10;
+                remaining -= 10;
+
+                for (int32_t i = 0; i < fontInfo.entryCount; i++) {
+                    if (remaining < 3) break;
+                    int32_t tempFontID = U16_AT(tmpData);
+                    ALOGI("Font Id: %d", tempFontID);
+                    int32_t tempFontNameLength = *(tmpData + 2);
+                    ALOGI("Length of font name: %d", tempFontNameLength);
+
+                    tmpData += 3;
+                    remaining -= 3;
+
+                    if (remaining < tempFontNameLength) break;
+                    const uint8_t *tmpFont = tmpData;
+                    char *tmpFontName = strndup((const char *)tmpFont, tempFontNameLength);
+                    ASSERT_NE(tmpFontName, nullptr) << "Font Name is null";
+                    ALOGI("FontName = %s", tmpFontName);
+                    free(tmpFontName);
+                    tmpData += tempFontNameLength;
+                    remaining -= tempFontNameLength;
+                    fontRecordEntries.push_back({tempFontID, tempFontNameLength, tmpFont});
+                }
+            }
+        }
+    }
+
+    Parcel parcel;
+    int32_t flag = TextDescriptions::IN_BAND_TEXT_3GPP | TextDescriptions::GLOBAL_DESCRIPTIONS;
+    status_t status = TextDescriptions::getParcelOfDescriptions((const uint8_t *)data, mFileSize,
+                                                                flag, kStartTimeMs, &parcel);
+    ASSERT_EQ(status, 0) << "getParcelOfDescriptions returned error";
+    ALOGI("Size of the Parcel: %zu", parcel.dataSize());
+    ASSERT_GT(parcel.dataSize(), 0) << "Parcel is empty";
+
+    parcel.setDataPosition(0);
+    int32_t key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_GLOBAL_SETTING) << "Parcel has invalid key";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_DISPLAY_FLAGS) << "Parcel has invalid DISPLAY FLAGS Key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.displayFlag)
+            << "Parcel has invalid value of display flag";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_STRUCT_JUSTIFICATION) << "Parcel has invalid STRUCT JUSTIFICATION key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.horizontalJustification)
+            << "Parcel has invalid value of Horizontal justification";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.verticalJustification)
+            << "Parcel has invalid value of Vertical justification";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_BACKGROUND_COLOR_RGBA) << "Parcel has invalid BACKGROUND COLOR key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.rgbaBackground)
+            << "Parcel has invalid rgba background color value";
+
+    if (parcel.dataAvail() == 0) {
+        ALOGV("Completed reading the parcel");
+        return;
+    }
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_STRUCT_TEXT_POS) << "Parcel has invalid STRUCT TEXT POSITION key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.leftPos)
+            << "Parcel has invalid rgba background color value";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.topPos)
+            << "Parcel has invalid rgba background color value";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.bottomPos)
+            << "Parcel has invalid rgba background color value";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.rightPos)
+            << "Parcel has invalid rgba background color value";
+
+    if (parcel.dataAvail() == 0) {
+        ALOGV("Completed reading the parcel");
+        return;
+    }
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_STRUCT_STYLE_LIST) << "Parcel has invalid STRUCT STYLE LIST key";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_START_CHAR) << "Parcel has invalid START CHAR key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.startchar)
+            << "Parcel has invalid value of start character";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_END_CHAR) << "Parcel has invalid END CHAR key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.endChar) << "Parcel has invalid value of end character";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_FONT_ID) << "Parcel has invalid FONT ID key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.fontId) << "Parcel has invalid value of font Id";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_STYLE_FLAGS) << "Parcel has invalid STYLE FLAGS key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.faceStyle) << "Parcel has invalid value of style flags";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_FONT_SIZE) << "Parcel has invalid FONT SIZE key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.fontSize) << "Parcel has invalid value of font size";
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_TEXT_COLOR_RGBA) << "Parcel has invalid TEXT COLOR RGBA key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.rgbaText) << "Parcel has invalid rgba text color value";
+
+    if (parcel.dataAvail() == 0) {
+        ALOGV("Completed reading the parcel");
+        return;
+    }
+
+    key = parcel.readInt32();
+    ASSERT_EQ(key, KEY_STRUCT_FONT_LIST) << "Parcel has invalid STRUCT FONT LIST key";
+    ASSERT_EQ(parcel.readInt32(), fontInfo.entryCount) << "Parcel has invalid value of entry count";
+    ASSERT_EQ(fontInfo.entryCount, fontRecordEntries.size())
+            << "Array size does not match expected number of entries";
+    for (int32_t i = 0; i < fontInfo.entryCount; i++) {
+        ASSERT_EQ(parcel.readInt32(), fontRecordEntries[i].fontID)
+                << "Parcel has invalid value of font Id";
+        ASSERT_EQ(parcel.readInt32(), fontRecordEntries[i].fontNameLength)
+                << "Parcel has invalid value of font name length";
+        uint8_t fontName[fontRecordEntries[i].fontNameLength];
+        // written with writeByteArray() writes count, then the actual data
+        ASSERT_EQ(parcel.readInt32(), fontRecordEntries[i].fontNameLength);
+        status = parcel.read((void *)fontName, fontRecordEntries[i].fontNameLength);
+        ASSERT_EQ(status, 0) << "Failed to read the font name from parcel";
+        ASSERT_EQ(memcmp(fontName, fontRecordEntries[i].font, fontRecordEntries[i].fontNameLength),
+                  0)
+                << "Parcel has invalid font";
+    }
+    // To make sure end of parcel is reached
+    ASSERT_EQ(parcel.dataAvail(), 0) << "Parcel has some data left to read";
+}
+
+INSTANTIATE_TEST_SUITE_P(TimedTextUnitTestAll, SRTDescriptionTest,
+                         ::testing::Values(("sampleTest1.srt"),
+                                           ("sampleTest2.srt")));
+
+INSTANTIATE_TEST_SUITE_P(TimedTextUnitTestAll, Text3GPPDescriptionTest,
+                         ::testing::Values(("tx3gBox1"),
+                                           ("tx3gBox2")));
+
+int main(int argc, char **argv) {
+    gEnv = new TimedTextTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGV("Test result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/webm/Android.bp b/media/libstagefright/webm/Android.bp
index 2cebe8f..32a22ba 100644
--- a/media/libstagefright/webm/Android.bp
+++ b/media/libstagefright/webm/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_library_static {
     name: "libstagefright_webm",
 
@@ -31,6 +40,7 @@
         "libstagefright_foundation",
         "libutils",
         "liblog",
+        "framework-permission-aidl-cpp",
     ],
 
     header_libs: [
diff --git a/media/libstagefright/webm/tests/Android.bp b/media/libstagefright/webm/tests/Android.bp
index 5183a49..4443766 100644
--- a/media/libstagefright/webm/tests/Android.bp
+++ b/media/libstagefright/webm/tests/Android.bp
@@ -14,6 +14,15 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_test {
     name: "WebmFrameThreadUnitTest",
     gtest: true,
diff --git a/media/libstagefright/writer_fuzzers/Android.bp b/media/libstagefright/writer_fuzzers/Android.bp
new file mode 100644
index 0000000..a33b888
--- /dev/null
+++ b/media/libstagefright/writer_fuzzers/Android.bp
@@ -0,0 +1,125 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
+cc_defaults {
+    name: "writer-fuzzerbase-defaults",
+    local_include_dirs: [
+        "include",
+    ],
+    export_include_dirs: [
+        "include",
+    ],
+    static_libs: [
+        "liblog",
+        "libstagefright_foundation",
+        "libstagefright",
+    ],
+    shared_libs: [
+        "libbinder",
+        "libcutils",
+        "libutils",
+    ],
+}
+
+cc_defaults {
+    name: "writer-fuzzer-defaults",
+    defaults: ["writer-fuzzerbase-defaults"],
+    static_libs: [
+        "libwriterfuzzerbase",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_library_static {
+    name: "libwriterfuzzerbase",
+    defaults: ["writer-fuzzerbase-defaults"],
+    srcs: [
+        "WriterFuzzerBase.cpp",
+    ],
+}
+
+cc_fuzz {
+    name: "amrnb_writer_fuzzer",
+    defaults: ["writer-fuzzer-defaults"],
+    srcs: [
+        "amr_writer_fuzzer.cpp",
+    ],
+    cflags: [
+        "-DAMRNB",
+    ],
+}
+
+cc_fuzz {
+    name: "amrwb_writer_fuzzer",
+    defaults: ["writer-fuzzer-defaults"],
+    srcs: [
+        "amr_writer_fuzzer.cpp",
+    ],
+}
+
+cc_fuzz {
+    name : "mpeg4_writer_fuzzer",
+    defaults : ["writer-fuzzer-defaults"],
+    srcs : [
+        "mpeg4_writer_fuzzer.cpp",
+    ],
+    static_libs: [
+        "libstagefright_esds",
+    ],
+}
+
+cc_fuzz {
+    name : "ogg_writer_fuzzer",
+    defaults : ["writer-fuzzer-defaults"],
+    srcs : [
+        "ogg_writer_fuzzer.cpp",
+    ],
+    static_libs: [
+        "libogg",
+    ],
+}
+
+cc_fuzz {
+    name : "webm_writer_fuzzer",
+    defaults : ["writer-fuzzer-defaults"],
+    srcs : [
+        "webm_writer_fuzzer.cpp",
+    ],
+    static_libs: [
+        "libstagefright_webm",
+        "libdatasource",
+    ],
+    include_dirs: [
+        "frameworks/av/media/libstagefright",
+    ],
+}
diff --git a/media/libstagefright/writer_fuzzers/README.md b/media/libstagefright/writer_fuzzers/README.md
new file mode 100644
index 0000000..6f95ecc
--- /dev/null
+++ b/media/libstagefright/writer_fuzzers/README.md
@@ -0,0 +1,100 @@
+# Fuzzer for writers
+
+## Table of contents
++  [libwriterfuzzerbase](#WriterFuzzerBase)
++  [Amr Writer](#amrWriterFuzzer)
++  [MPEG4 Writer](#mpeg4WriterFuzzer)
++  [OGG Writer](#oggWriterFuzzer)
++  [WEBM Writer](#webmWriterFuzzer)
+
+# <a name="WriterFuzzerBase"></a> Fuzzer for libwriterfuzzerbase
+All the writers have a common API - creating a writer, adding a source for
+all the tracks, etc. These common APIs have been abstracted in a base class
+called `WriterFuzzerBase` to ensure code is reused between fuzzer plugins.
+
+## Plugin Design Considerations
+The fuzzer plugin for writers is designed based on the understanding of the
+writer and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+Fuzzer for writers supports the following parameters:
+1. Track Mime (parameter name: `mime`)
+2. Channel Count (parameter name: `channel-count`)
+3. Sample Rate (parameter name: `sample-rate`)
+4. Frame Height (parameter name: `height`)
+5. Frame Width (parameter name: `width`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `mime` | 0. `audio/3gpp` 1. `audio/amr-wb` 2. `audio/vorbis` 3. `audio/opus` 4. `audio/mp4a-latm` 5. `audio/mpeg` 6. `audio/mpeg-L1` 7. `audio/mpeg-L2` 8. `audio/midi` 9. `audio/qcelp` 10. `audio/g711-alaw` 11. `audio/g711-mlaw` 12. `audio/flac` 13. `audio/aac-adts` 14. `audio/gsm` 15. `audio/ac3` 16. `audio/eac3` 17. `audio/eac3-joc` 18. `audio/ac4` 19. `audio/scrambled` 20. `audio/alac` 21. `audio/x-ms-wma` 22. `audio/x-adpcm-ms` 23. `audio/x-adpcm-dvi-ima` 24. `video/avc` 25. `video/hevc` 26. `video/mp4v-es` 27. `video/3gpp` 28. `video/x-vnd.on2.vp8` 29. `video/x-vnd.on2.vp9` 30. `video/av01` 31. `video/mpeg2` 32. `video/dolby-vision` 33. `video/scrambled` 34. `video/divx` 35. `video/divx3` 36. `video/xvid` 37. `video/x-motion-jpeg` 38. `text/3gpp-tt` 39. `application/x-subrip` 40. `text/vtt` 41. `text/cea-608` 42. `text/cea-708` 43. `application/x-id3v4` | All the bits of 2nd byte of data for first track and 11th byte of data for second track and 20th byte of data for third track(if present) modulus 44 |
+| `channel-count` | In the range `0 to INT32_MAX` | All the bits of 3rd byte to 6th bytes of data if first track is audio and 12th to 15th bytes of data if second track is audio |
+| `sample-rate` | In the range `1 to INT32_MAX` | All the bits of 7th byte to 10th bytes of data if first track is audio and 16th to 19th bytes of data if second track is audio |
+| `height` | In the range `0 to INT32_MAX` | All the bits of 3rd byte to 6th bytes of data if first track is video and 12th to 15th bytes of data if second track is video |
+| `width` | In the range `0 to INT32_MAX` | All the bits of 7th byte to 10th bytes of data if first track is video and 16th to 19th bytes of data if second track is video |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin divides the entire input data into frames based on frame markers.
+If no frame marker is found then the entire input data is treated as single frame.
+
+This ensures that the plugin tolerates any kind of input (huge,
+malformed, etc) and thereby increasing the chance of identifying vulnerabilities.
+
+# <a name="amrWriterFuzzer"></a> Fuzzer for Amr Writer
+
+## Plugin Design Considerations
+The fuzzer plugin for AMR writer uses the `WriterFuzzerBase` class and
+implements only the `createWriter` to create the AMR writer class.
+
+##### Other considerations
+ * Two fuzzer binaries - amrnb_writer_fuzzer and amrwb_writer_fuzzer are generated based on the presence of a flag - 'AMRNB'
+
+# <a name="mpeg4WriterFuzzer"></a> Fuzzer for MPEG4 Writer
+
+## Plugin Design Considerations
+The fuzzer plugin for MPEG4 writer uses the `WriterFuzzerBase` class and
+implements only the `createWriter` to create the MPEG4 writer class.
+
+# <a name="oggWriterFuzzer"></a> Fuzzer for OGG Writer
+
+## Plugin Design Considerations
+The fuzzer plugin for OGG writer uses the `WriterFuzzerBase` class and
+implements only the `createWriter` to create the OGG writer class.
+
+# <a name="webmWriterFuzzer"></a> Fuzzer for WEBM Writer
+
+## Plugin Design Considerations
+The fuzzer plugin for WEBM writer uses the `WriterFuzzerBase` class and
+implements only the `createWriter` to create the WEBM writer class.
+
+## Build
+
+This describes steps to build writer fuzzer binaries.
+
+### Android
+
+`*` = amrnb/amrwb/mpeg4/ogg/webm
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) *_writer_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some media files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/*_writer_fuzzer/*_writer_fuzzer CORPUS_DIR
+```
+
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp b/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
new file mode 100644
index 0000000..ee7af70
--- /dev/null
+++ b/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
@@ -0,0 +1,291 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <utils/Log.h>
+
+#include "WriterFuzzerBase.h"
+
+using namespace android;
+
+/**
+ * Buffer source implementations to parse input file
+ */
+
+uint32_t WriterFuzzerBase::BufferSource::getNumTracks() {
+    uint32_t numTracks = 0;
+    if (mSize > sizeof(uint8_t)) {
+        numTracks = min(mData[0], kMaxTrackCount);
+        mReadIndex += sizeof(uint8_t);
+    }
+    return numTracks;
+}
+
+bool WriterFuzzerBase::BufferSource::searchForMarker(size_t startIndex) {
+    while (true) {
+        if (isMarker()) {
+            return true;
+        }
+        --mReadIndex;
+        if (mReadIndex < startIndex) {
+            break;
+        }
+    }
+    return false;
+}
+
+ConfigFormat WriterFuzzerBase::BufferSource::getConfigFormat(int32_t trackIndex) {
+    return mParams[trackIndex];
+}
+
+int32_t WriterFuzzerBase::BufferSource::getNumCsds(int32_t trackIndex) {
+    return mNumCsds[trackIndex];
+}
+
+vector<FrameData> &WriterFuzzerBase::BufferSource::getFrameList(int32_t trackIndex) {
+    return mFrameList[trackIndex];
+}
+
+void WriterFuzzerBase::BufferSource::getFrameInfo() {
+    size_t readIndexStart = mReadIndex;
+    if (mSize - mReadIndex > kMarkerSize + kMarkerSuffixSize) {
+        bool isFrameAvailable = true;
+        size_t bytesRemaining = mSize;
+        mReadIndex = mSize - kMarkerSize;
+        while (isFrameAvailable) {
+            isFrameAvailable = searchForMarker(readIndexStart);
+            if (isFrameAvailable) {
+                size_t location = mReadIndex + kMarkerSize;
+                if (location + kMarkerSuffixSize >= bytesRemaining) {
+                    break;
+                }
+                bool isCSD = isCSDMarker(location);
+                location += kMarkerSuffixSize;
+                uint8_t *framePtr = const_cast<uint8_t *>(&mData[location]);
+                size_t frameSize = bytesRemaining - location, bufferSize = 0;
+                uint8_t trackIndex = framePtr[0] % kMaxTrackCount;
+                ++framePtr;
+                uint8_t flags = 0;
+                int64_t pts = 0;
+                if (isCSD && frameSize > 1) {
+                    flags |= kCodecConfigFlag;
+                    pts = 0;
+                    ++mNumCsds[trackIndex];
+                    bufferSize = frameSize - 1;
+                } else if (frameSize > sizeof(uint8_t) + sizeof(int64_t) + 1) {
+                    flags = flagTypes[framePtr[0] % size(flagTypes)];
+                    ++framePtr;
+                    copy(framePtr, framePtr + sizeof(int64_t), reinterpret_cast<uint8_t *>(&pts));
+                    framePtr += sizeof(int64_t);
+                    bufferSize = frameSize - (sizeof(uint8_t) + sizeof(int64_t)) - 1;
+                } else {
+                    break;
+                }
+                mFrameList[trackIndex].insert(mFrameList[trackIndex].begin(),
+                                              FrameData{bufferSize, flags, pts, framePtr});
+                bytesRemaining -= (frameSize + kMarkerSize + kMarkerSuffixSize);
+                --mReadIndex;
+            }
+        }
+    }
+    if (mFrameList[0].empty() && mFrameList[1].empty()) {
+        /**
+         * Scenario where input data does not contain the custom frame markers.
+         * Hence feed the entire data as single frame.
+         */
+        mFrameList[0].emplace_back(FrameData{mSize - readIndexStart, 0, 0, mData + readIndexStart});
+    }
+}
+bool WriterFuzzerBase::BufferSource::getTrackInfo(int32_t trackIndex) {
+    if (mSize <= mReadIndex + sizeof(uint8_t)) {
+        return false;
+    }
+    size_t mimeTypeIdx = mData[mReadIndex] % kSupportedMimeTypes;
+    char *mime = (char *)supportedMimeTypes[mimeTypeIdx].c_str();
+    mParams[trackIndex].mime = mime;
+    mReadIndex += sizeof(uint8_t);
+
+    if (mSize > mReadIndex + 2 * sizeof(int32_t)) {
+        if (!strncmp(mime, "audio/", 6)) {
+            copy(mData + mReadIndex, mData + mReadIndex + sizeof(int32_t),
+                 reinterpret_cast<char *>(&mParams[trackIndex].channelCount));
+            copy(mData + mReadIndex + sizeof(int32_t), mData + mReadIndex + 2 * sizeof(int32_t),
+                 reinterpret_cast<char *>(&mParams[trackIndex].sampleRate));
+        } else if (!strncmp(mime, "video/", 6)) {
+            copy(mData + mReadIndex, mData + mReadIndex + sizeof(int32_t),
+                 reinterpret_cast<char *>(&mParams[trackIndex].height));
+            copy(mData + mReadIndex + sizeof(int32_t), mData + mReadIndex + 2 * sizeof(int32_t),
+                 reinterpret_cast<char *>(&mParams[trackIndex].width));
+        }
+        mReadIndex += 2 * sizeof(int32_t);
+    } else {
+        if (strncmp(mime, "text/", 5) && strncmp(mime, "application/", 12)) {
+            return false;
+        }
+    }
+    return true;
+}
+
+void writeHeaderBuffers(vector<FrameData> &bufferInfo, sp<AMessage> &format, int32_t numCsds) {
+    char csdName[kMaxCSDStrlen];
+    for (int csdId = 0; csdId < numCsds; ++csdId) {
+        int32_t flags = bufferInfo[csdId].flags;
+        if (flags == kCodecConfigFlag) {
+            sp<ABuffer> csdBuffer =
+                ABuffer::CreateAsCopy((void *)bufferInfo[csdId].buf, bufferInfo[csdId].size);
+            if (csdBuffer.get() == nullptr || csdBuffer->base() == nullptr) {
+                return;
+            }
+            snprintf(csdName, sizeof(csdName), "csd-%d", csdId);
+            format->setBuffer(csdName, csdBuffer);
+        }
+    }
+}
+
+bool WriterFuzzerBase::createOutputFile() {
+    mFd = memfd_create(mOutputFileName.c_str(), MFD_ALLOW_SEALING);
+    if (mFd == -1) {
+        return false;
+    }
+    return true;
+}
+
+void WriterFuzzerBase::addWriterSource(int32_t trackIndex) {
+    ConfigFormat params = mBufferSource->getConfigFormat(trackIndex);
+    sp<AMessage> format = new AMessage;
+    format->setString("mime", params.mime);
+    if (!strncmp(params.mime, "audio/", 6)) {
+        if (!strncmp(params.mime, "audio/3gpp", 10)) {
+            params.channelCount = 1;
+            params.sampleRate = 8000;
+        } else if (!strncmp(params.mime, "audio/amr-wb", 12)) {
+            params.channelCount = 1;
+            params.sampleRate = 16000;
+        } else {
+            params.sampleRate = max(1, params.sampleRate);
+        }
+        format->setInt32("channel-count", params.channelCount);
+        format->setInt32("sample-rate", params.sampleRate);
+    } else if (!strncmp(params.mime, "video/", 6)) {
+        format->setInt32("width", params.width);
+        format->setInt32("height", params.height);
+    }
+    int32_t numCsds = mBufferSource->getNumCsds(trackIndex);
+    if (numCsds) {
+        vector<FrameData> mFrames = mBufferSource->getFrameList(trackIndex);
+        writeHeaderBuffers(mFrames, format, numCsds);
+    }
+    sp<MetaData> trackMeta = new MetaData;
+    convertMessageToMetaData(format, trackMeta);
+    mCurrentTrack[trackIndex] = new MediaAdapter(trackMeta);
+    mWriter->addSource(mCurrentTrack[trackIndex]);
+}
+
+void WriterFuzzerBase::start() {
+    mFileMeta->setInt32(kKeyRealTimeRecording, false);
+    mWriter->start(mFileMeta.get());
+}
+
+void WriterFuzzerBase::sendBuffersToWriter(sp<MediaAdapter> &currentTrack, int32_t trackIndex,
+                                           int32_t startFrameIndex, int32_t endFrameIndex) {
+    vector<FrameData> bufferInfo = mBufferSource->getFrameList(trackIndex);
+    for (int idx = startFrameIndex; idx < endFrameIndex; ++idx) {
+        sp<ABuffer> buffer = new ABuffer((void *)bufferInfo[idx].buf, bufferInfo[idx].size);
+        MediaBuffer *mediaBuffer = new MediaBuffer(buffer);
+
+        // Released in MediaAdapter::signalBufferReturned().
+        mediaBuffer->add_ref();
+        mediaBuffer->set_range(buffer->offset(), buffer->size());
+        MetaDataBase &sampleMetaData = mediaBuffer->meta_data();
+        sampleMetaData.setInt64(kKeyTime, bufferInfo[idx].timeUs);
+
+        // Just set the kKeyDecodingTime as the presentation time for now.
+        sampleMetaData.setInt64(kKeyDecodingTime, bufferInfo[idx].timeUs);
+        if (bufferInfo[idx].flags == SampleFlag::SYNC_FLAG) {
+            sampleMetaData.setInt32(kKeyIsSyncFrame, true);
+        }
+
+        // This pushBuffer will wait until the mediaBuffer is consumed.
+        currentTrack->pushBuffer(mediaBuffer);
+    }
+}
+
+void WriterFuzzerBase::sendBuffersInterleave(int32_t numTracks, uint8_t numBuffersInterleave) {
+    int32_t currentFrameIndex[numTracks], remainingNumFrames[numTracks], numTrackFramesDone;
+    for (int32_t idx = 0; idx < numTracks; ++idx) {
+        currentFrameIndex[idx] = mBufferSource->getNumCsds(idx);
+        remainingNumFrames[idx] = mBufferSource->getFrameList(idx).size() - currentFrameIndex[idx];
+    }
+    do {
+        numTrackFramesDone = numTracks;
+        for (int32_t idx = 0; idx < numTracks; ++idx) {
+            if (remainingNumFrames[idx] > 0) {
+                int32_t numFramesInterleave =
+                    min(remainingNumFrames[idx], static_cast<int32_t>(numBuffersInterleave));
+                sendBuffersToWriter(mCurrentTrack[idx], idx, currentFrameIndex[idx],
+                                    currentFrameIndex[idx] + numFramesInterleave);
+                currentFrameIndex[idx] += numFramesInterleave;
+                remainingNumFrames[idx] -= numFramesInterleave;
+                --numTrackFramesDone;
+            }
+        }
+    } while (numTrackFramesDone < numTracks);
+}
+
+void WriterFuzzerBase::initFileWriterAndProcessData(const uint8_t *data, size_t size) {
+    if (!createOutputFile()) {
+        return;
+    }
+    if (!createWriter()) {
+        return;
+    }
+
+    if (size < 1) {
+        return;
+    }
+    uint8_t numBuffersInterleave = (data[0] == 0 ? 1 : data[0]);
+    ++data;
+    --size;
+
+    mBufferSource = new BufferSource(data, size);
+    if (!mBufferSource) {
+        return;
+    }
+    mNumTracks = mBufferSource->getNumTracks();
+    if (mNumTracks > 0) {
+        for (int32_t idx = 0; idx < mNumTracks; ++idx) {
+            if (!mBufferSource->getTrackInfo(idx)) {
+                if (idx == 0) {
+                    delete mBufferSource;
+                    return;
+                }
+                mNumTracks = idx;
+                break;
+            }
+        }
+        mBufferSource->getFrameInfo();
+        for (int32_t idx = 0; idx < mNumTracks; ++idx) {
+            addWriterSource(idx);
+        }
+        start();
+        sendBuffersInterleave(mNumTracks, numBuffersInterleave);
+        for (int32_t idx = 0; idx < mNumTracks; ++idx) {
+            if (mCurrentTrack[idx]) {
+                mCurrentTrack[idx]->stop();
+            }
+        }
+    }
+    delete mBufferSource;
+    mWriter->stop();
+}
diff --git a/media/libstagefright/writer_fuzzers/amr_writer_fuzzer.cpp b/media/libstagefright/writer_fuzzers/amr_writer_fuzzer.cpp
new file mode 100644
index 0000000..bbb6f9f
--- /dev/null
+++ b/media/libstagefright/writer_fuzzers/amr_writer_fuzzer.cpp
@@ -0,0 +1,50 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "WriterFuzzerBase.h"
+
+#include <media/stagefright/AMRWriter.h>
+
+using namespace android;
+
+class AmrWriterFuzzer : public WriterFuzzerBase {
+   public:
+    bool createWriter();
+};
+
+bool AmrWriterFuzzer::createWriter() {
+    mWriter = new AMRWriter(mFd);
+    if (!mWriter) {
+        return false;
+    }
+    mFileMeta = new MetaData;
+#ifdef AMRNB
+    mFileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AMR_NB);
+#else
+    mFileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AMR_WB);
+#endif
+    return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    AmrWriterFuzzer writerFuzzer;
+    writerFuzzer.initFileWriterAndProcessData(data, size);
+    return 0;
+}
diff --git a/media/libstagefright/writer_fuzzers/include/WriterFuzzerBase.h b/media/libstagefright/writer_fuzzers/include/WriterFuzzerBase.h
new file mode 100644
index 0000000..4315322
--- /dev/null
+++ b/media/libstagefright/writer_fuzzers/include/WriterFuzzerBase.h
@@ -0,0 +1,211 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#ifndef __WRITER_FUZZER_BASE_H__
+#define __WRITER_FUZZER_BASE_H__
+
+#include <media/stagefright/MediaAdapter.h>
+#include <media/stagefright/MediaWriter.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <algorithm>
+#include <cstring>
+#include <vector>
+
+using namespace std;
+
+constexpr uint32_t kMimeSize = 128;
+constexpr uint8_t kMaxTrackCount = 3;
+constexpr uint32_t kMaxCSDStrlen = 16;
+constexpr uint32_t kCodecConfigFlag = 32;
+
+namespace android {
+
+struct ConfigFormat {
+    char* mime;
+    int32_t width;
+    int32_t height;
+    int32_t sampleRate;
+    int32_t channelCount;
+};
+
+struct FrameData {
+    size_t size;
+    uint8_t flags;
+    int64_t timeUs;
+    const uint8_t* buf;
+};
+
+static string supportedMimeTypes[] = {"audio/3gpp",
+                                      "audio/amr-wb",
+                                      "audio/vorbis",
+                                      "audio/opus",
+                                      "audio/mp4a-latm",
+                                      "audio/mpeg",
+                                      "audio/mpeg-L1",
+                                      "audio/mpeg-L2",
+                                      "audio/midi",
+                                      "audio/qcelp",
+                                      "audio/g711-alaw",
+                                      "audio/g711-mlaw",
+                                      "audio/flac",
+                                      "audio/aac-adts",
+                                      "audio/gsm",
+                                      "audio/ac3",
+                                      "audio/eac3",
+                                      "audio/eac3-joc",
+                                      "audio/ac4",
+                                      "audio/scrambled",
+                                      "audio/alac",
+                                      "audio/x-ms-wma",
+                                      "audio/x-adpcm-ms",
+                                      "audio/x-adpcm-dvi-ima",
+                                      "video/avc",
+                                      "video/hevc",
+                                      "video/mp4v-es",
+                                      "video/3gpp",
+                                      "video/x-vnd.on2.vp8",
+                                      "video/x-vnd.on2.vp9",
+                                      "video/av01",
+                                      "video/mpeg2",
+                                      "video/dolby-vision",
+                                      "video/scrambled",
+                                      "video/divx",
+                                      "video/divx3",
+                                      "video/xvid",
+                                      "video/x-motion-jpeg",
+                                      "text/3gpp-tt",
+                                      "application/x-subrip",
+                                      "text/vtt",
+                                      "text/cea-608",
+                                      "text/cea-708",
+                                      "application/x-id3v4"};
+
+enum SampleFlag {
+    DEFAULT_FLAG = 0,
+    SYNC_FLAG = 1,
+    ENCRYPTED_FLAG = 2,
+};
+
+static uint8_t flagTypes[] = {SampleFlag::DEFAULT_FLAG, SampleFlag::SYNC_FLAG,
+                              SampleFlag::ENCRYPTED_FLAG};
+
+class WriterFuzzerBase {
+   public:
+    WriterFuzzerBase() = default;
+    virtual ~WriterFuzzerBase() {
+        if (mFileMeta) {
+            mFileMeta.clear();
+            mFileMeta = nullptr;
+        }
+        if (mWriter) {
+            mWriter.clear();
+            mWriter = nullptr;
+        }
+        for (int32_t idx = 0; idx < kMaxTrackCount; ++idx) {
+            if (mCurrentTrack[idx]) {
+                mCurrentTrack[idx]->stop();
+                mCurrentTrack[idx].clear();
+                mCurrentTrack[idx] = nullptr;
+            }
+        }
+        close(mFd);
+    };
+
+    /** Function to create the media writer component.
+     * To be implemented by the derived class.
+     */
+    virtual bool createWriter() = 0;
+
+    /** Parent class functions to be reused by derived class.
+     * These are common for all media writer components.
+     */
+    bool createOutputFile();
+
+    void addWriterSource(int32_t trackIndex);
+
+    void start();
+
+    void sendBuffersToWriter(sp<MediaAdapter>& currentTrack, int32_t trackIndex,
+                             int32_t startFrameIndex, int32_t endFrameIndex);
+
+    void sendBuffersInterleave(int32_t numTracks, uint8_t numBuffersInterleave);
+
+    void initFileWriterAndProcessData(const uint8_t* data, size_t size);
+
+   protected:
+    class BufferSource {
+       public:
+        BufferSource(const uint8_t* data, size_t size) : mData(data), mSize(size), mReadIndex(0) {}
+        ~BufferSource() {
+            mData = nullptr;
+            mSize = 0;
+            mReadIndex = 0;
+            for (int32_t idx = 0; idx < kMaxTrackCount; ++idx) {
+                mFrameList[idx].clear();
+            }
+        }
+        uint32_t getNumTracks();
+        bool getTrackInfo(int32_t trackIndex);
+        void getFrameInfo();
+        ConfigFormat getConfigFormat(int32_t trackIndex);
+        int32_t getNumCsds(int32_t trackIndex);
+        vector<FrameData>& getFrameList(int32_t trackIndex);
+
+       private:
+        bool isMarker() { return (memcmp(&mData[mReadIndex], kMarker, kMarkerSize) == 0); }
+
+        bool isCSDMarker(size_t position) {
+            return (memcmp(&mData[position], kCsdMarkerSuffix, kMarkerSuffixSize) == 0);
+        }
+
+        bool searchForMarker(size_t startIndex);
+
+        const uint8_t* mData = nullptr;
+        size_t mSize = 0;
+        size_t mReadIndex = 0;
+        ConfigFormat mParams[kMaxTrackCount] = {};
+        int32_t mNumCsds[kMaxTrackCount] = {0};
+        vector<FrameData> mFrameList[kMaxTrackCount];
+
+        static constexpr int kSupportedMimeTypes = size(supportedMimeTypes);
+        static constexpr uint8_t kMarker[] = "_MARK";
+        static constexpr uint8_t kCsdMarkerSuffix[] = "_H_";
+        static constexpr uint8_t kFrameMarkerSuffix[] = "_F_";
+        // All markers should be 5 bytes long ( sizeof '_MARK' which is 5)
+        static constexpr size_t kMarkerSize = (sizeof(kMarker) - 1);
+        // All marker types should be 3 bytes long ('_H_', '_F_')
+        static constexpr size_t kMarkerSuffixSize = 3;
+    };
+
+    BufferSource* mBufferSource = nullptr;
+    int32_t mFd = -1;
+    uint32_t mNumTracks = 0;
+    string mOutputFileName = "writer.out";
+    sp<MediaWriter> mWriter = nullptr;
+    sp<MetaData> mFileMeta = nullptr;
+    sp<MediaAdapter> mCurrentTrack[kMaxTrackCount] = {};
+};
+
+}  // namespace android
+
+#endif  // __WRITER_FUZZER_BASE_H__
diff --git a/media/libstagefright/writer_fuzzers/mpeg4_writer_fuzzer.cpp b/media/libstagefright/writer_fuzzers/mpeg4_writer_fuzzer.cpp
new file mode 100644
index 0000000..99bd2b6
--- /dev/null
+++ b/media/libstagefright/writer_fuzzers/mpeg4_writer_fuzzer.cpp
@@ -0,0 +1,46 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "WriterFuzzerBase.h"
+
+#include <media/stagefright/MPEG4Writer.h>
+
+using namespace android;
+
+class Mpeg4WriterFuzzer : public WriterFuzzerBase {
+   public:
+    bool createWriter();
+};
+
+bool Mpeg4WriterFuzzer::createWriter() {
+    mWriter = new MPEG4Writer(mFd);
+    if (!mWriter) {
+        return false;
+    }
+    mFileMeta = new MetaData;
+    mFileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_MPEG_4);
+    return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    Mpeg4WriterFuzzer writerFuzzer;
+    writerFuzzer.initFileWriterAndProcessData(data, size);
+    return 0;
+}
diff --git a/media/libstagefright/writer_fuzzers/ogg_writer_fuzzer.cpp b/media/libstagefright/writer_fuzzers/ogg_writer_fuzzer.cpp
new file mode 100644
index 0000000..ae9c94c
--- /dev/null
+++ b/media/libstagefright/writer_fuzzers/ogg_writer_fuzzer.cpp
@@ -0,0 +1,46 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "WriterFuzzerBase.h"
+
+#include <media/stagefright/OggWriter.h>
+
+using namespace android;
+
+class OGGWriterFuzzer : public WriterFuzzerBase {
+   public:
+    bool createWriter();
+};
+
+bool OGGWriterFuzzer::createWriter() {
+    mWriter = new OggWriter(mFd);
+    if (!mWriter) {
+        return false;
+    }
+    mFileMeta = new MetaData;
+    mFileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_OGG);
+    return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    OGGWriterFuzzer writerFuzzer;
+    writerFuzzer.initFileWriterAndProcessData(data, size);
+    return 0;
+}
diff --git a/media/libstagefright/writer_fuzzers/webm_writer_fuzzer.cpp b/media/libstagefright/writer_fuzzers/webm_writer_fuzzer.cpp
new file mode 100644
index 0000000..0a91b72
--- /dev/null
+++ b/media/libstagefright/writer_fuzzers/webm_writer_fuzzer.cpp
@@ -0,0 +1,46 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include "WriterFuzzerBase.h"
+
+#include <webm/WebmWriter.h>
+
+using namespace android;
+
+class WEBMWriterFuzzer : public WriterFuzzerBase {
+   public:
+    bool createWriter();
+};
+
+bool WEBMWriterFuzzer::createWriter() {
+    mWriter = new WebmWriter(mFd);
+    if (!mWriter) {
+        return false;
+    }
+    mFileMeta = new MetaData;
+    mFileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_WEBM);
+    return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    WEBMWriterFuzzer writerFuzzer;
+    writerFuzzer.initFileWriterAndProcessData(data, size);
+    return 0;
+}
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index 7ed0e88..055dd80 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_library_headers {
     name: "libstagefright_xmlparser_headers",
     export_include_dirs: ["include"],
@@ -49,4 +58,3 @@
     srcs: ["media_codecs.xsd"],
     package_name: "media.codecs",
 }
-
diff --git a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
index a232150..67c6102 100644
--- a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
+++ b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
@@ -146,7 +146,10 @@
         };
     static std::vector<std::string> names = {
             prefixes[0] + variants[0] + ".xml",
-            prefixes[1] + variants[1] + ".xml"
+            prefixes[1] + variants[1] + ".xml",
+
+            // shaping information is not currently variant specific.
+            "media_codecs_shaping.xml"
         };
     return names;
 }
@@ -346,6 +349,8 @@
         status_t addAlias(const char **attrs);
         status_t addFeature(const char **attrs);
         status_t addLimit(const char **attrs);
+        status_t addMapping(const char **attrs);
+        status_t addTuning(const char **attrs);
         status_t addQuirk(const char **attrs, const char *prefix = nullptr);
         status_t addSetting(const char **attrs, const char *prefix = nullptr);
         status_t enterMediaCodec(const char **attrs, bool encoder);
@@ -428,7 +433,7 @@
         if (findFileInDirs(searchDirs, fileName, &path)) {
             err = parseXmlPath(path);
         } else {
-            ALOGD("Cannot find %s", path.c_str());
+            ALOGI("Did not find %s in search path", fileName.c_str());
         }
         res = combineStatus(res, err);
     }
@@ -438,7 +443,7 @@
 status_t MediaCodecsXmlParser::Impl::parseXmlPath(const std::string &path) {
     std::lock_guard<std::mutex> guard(mLock);
     if (!fileExists(path)) {
-        ALOGD("Cannot find %s", path.c_str());
+        ALOGV("Cannot find %s", path.c_str());
         mParsingStatus = combineStatus(mParsingStatus, NAME_NOT_FOUND);
         return NAME_NOT_FOUND;
     }
@@ -493,7 +498,7 @@
       mPath(path),
       mStatus(NO_INIT) {
     // determine href_base
-    std::string::size_type end = path.rfind("/");
+    std::string::size_type end = path.rfind('/');
     if (end != std::string::npos) {
         mHrefBase = path.substr(0, end + 1);
     }
@@ -741,13 +746,19 @@
         {
             // ignore limits and features specified outside of type
             if (!mState->inType()
-                    && (strEq(name, "Limit") || strEq(name, "Feature") || strEq(name, "Variant"))) {
+                    && (strEq(name, "Limit") || strEq(name, "Feature")
+                        || strEq(name, "Variant") || strEq(name, "Mapping")
+                        || strEq(name, "Tuning"))) {
                 PLOGD("ignoring %s specified outside of a Type", name);
                 return;
             } else if (strEq(name, "Limit")) {
                 err = addLimit(attrs);
             } else if (strEq(name, "Feature")) {
                 err = addFeature(attrs);
+            } else if (strEq(name, "Mapping")) {
+                err = addMapping(attrs);
+            } else if (strEq(name, "Tuning")) {
+                err = addTuning(attrs);
             } else if (strEq(name, "Variant") && section != SECTION_VARIANT) {
                 err = limitVariants(attrs);
                 mState->enterSection(err == OK ? SECTION_VARIANT : SECTION_UNKNOWN);
@@ -981,7 +992,9 @@
     TypeMap::iterator typeIt;
     if (codecIt == mData->mCodecMap.end()) { // New codec name
         if (updating) {
-            return { NAME_NOT_FOUND, "MediaCodec: cannot update non-existing codec" };
+            std::string msg = "MediaCodec: cannot update non-existing codec: ";
+            msg = msg + name;
+            return { NAME_NOT_FOUND, msg };
         }
         // Create a new codec in mCodecMap
         codecIt = mData->mCodecMap.insert(Codec(name, CodecProperties())).first;
@@ -994,19 +1007,25 @@
         codecIt->second.order = mData->mCodecMap.size();
     } else { // Existing codec name
         if (!updating) {
-            return { ALREADY_EXISTS, "MediaCodec: cannot add existing codec" };
+            std::string msg = "MediaCodec: cannot add existing codec: ";
+            msg = msg + name;
+            return { ALREADY_EXISTS, msg };
         }
         if (type != nullptr) {
             typeIt = codecIt->second.typeMap.find(type);
             if (typeIt == codecIt->second.typeMap.end()) {
-                return { NAME_NOT_FOUND, "MediaCodec: cannot update non-existing type for codec" };
+                std::string msg = "MediaCodec: cannot update non-existing type for codec: ";
+                msg = msg + name;
+                return { NAME_NOT_FOUND, msg };
             }
         } else {
             // This should happen only when the codec has at most one type.
             typeIt = codecIt->second.typeMap.begin();
             if (typeIt == codecIt->second.typeMap.end()
                     || codecIt->second.typeMap.size() != 1) {
-                return { BAD_VALUE, "MediaCodec: cannot update codec without type specified" };
+                std::string msg = "MediaCodec: cannot update codec without type specified: ";
+                msg = msg + name;
+                return { BAD_VALUE, msg };
             }
         }
     }
@@ -1386,6 +1405,92 @@
     return OK;
 }
 
+status_t MediaCodecsXmlParser::Impl::Parser::addMapping(const char **attrs) {
+    CHECK(mState->inType());
+    size_t i = 0;
+    const char *a_name = nullptr;
+    const char *a_value = nullptr;
+    const char *a_kind = nullptr;
+
+    while (attrs[i] != nullptr) {
+        CHECK((i & 1) == 0);
+        if (attrs[i + 1] == nullptr) {
+            PLOGD("Mapping: attribute '%s' is null", attrs[i]);
+            return BAD_VALUE;
+        }
+
+        if (strEq(attrs[i], "name")) {
+            a_name = attrs[++i];
+        } else if (strEq(attrs[i], "kind")) {
+            a_kind = attrs[++i];
+        } else if (strEq(attrs[i], "value")) {
+            a_value = attrs[++i];
+        } else {
+            PLOGD("Mapping: ignoring unrecognized attribute '%s'", attrs[i]);
+            ++i;
+        }
+        ++i;
+    }
+
+    // Every mapping must have all 3 fields
+    if (a_name == nullptr) {
+        PLOGD("Mapping with no 'name' attribute");
+        return BAD_VALUE;
+    }
+
+    if (a_kind == nullptr) {
+        PLOGD("Mapping with no 'kind' attribute");
+        return BAD_VALUE;
+    }
+
+    if (a_value == nullptr) {
+        PLOGD("Mapping with no 'value' attribute");
+        return BAD_VALUE;
+    }
+
+    mState->addDetail(std::string("mapping-") + a_kind + "-" + a_name, a_value);
+    return OK;
+}
+
+status_t MediaCodecsXmlParser::Impl::Parser::addTuning(const char **attrs) {
+    CHECK(mState->inType());
+    size_t i = 0;
+    const char *a_name = nullptr;
+    const char *a_value = nullptr;
+
+    while (attrs[i] != nullptr) {
+        CHECK((i & 1) == 0);
+        if (attrs[i + 1] == nullptr) {
+            PLOGD("Mapping: attribute '%s' is null", attrs[i]);
+            return BAD_VALUE;
+        }
+
+        if (strEq(attrs[i], "name")) {
+            a_name = attrs[++i];
+        } else if (strEq(attrs[i], "value")) {
+            a_value = attrs[++i];
+        } else {
+            PLOGD("Tuning: ignoring unrecognized attribute '%s'", attrs[i]);
+            ++i;
+        }
+        ++i;
+    }
+
+    // Every tuning must have both fields
+    if (a_name == nullptr) {
+        PLOGD("Tuning with no 'name' attribute");
+        return BAD_VALUE;
+    }
+
+    if (a_value == nullptr) {
+        PLOGD("Tuning with no 'value' attribute");
+        return BAD_VALUE;
+    }
+
+    mState->addDetail(std::string("tuning-") + a_name, a_value);
+    return OK;
+}
+
 status_t MediaCodecsXmlParser::Impl::Parser::addAlias(const char **attrs) {
     CHECK(mState->inCodec());
     size_t i = 0;
@@ -1525,7 +1630,7 @@
                 nodeInfo.attributeList.push_back(Attribute{"rank", rank});
             }
             nodeList->insert(std::make_pair(
-                    std::move(order), std::move(nodeInfo)));
+                    order, std::move(nodeInfo)));
         }
     }
 }
diff --git a/media/libstagefright/xmlparser/TEST_MAPPING b/media/libstagefright/xmlparser/TEST_MAPPING
new file mode 100644
index 0000000..8626d72
--- /dev/null
+++ b/media/libstagefright/xmlparser/TEST_MAPPING
@@ -0,0 +1,6 @@
+// test mapping for frameworks/av/media/libstagefright/xmlparser
+{
+  "presubmit": [
+    { "name": "XMLParserTest" }
+  ]
+}
diff --git a/media/libstagefright/xmlparser/api/current.txt b/media/libstagefright/xmlparser/api/current.txt
index 16c8af8..ecfd85e 100644
--- a/media/libstagefright/xmlparser/api/current.txt
+++ b/media/libstagefright/xmlparser/api/current.txt
@@ -65,6 +65,16 @@
     method public void set_default(String);
   }
 
+  public class Mapping {
+    ctor public Mapping();
+    method public String getKind();
+    method public String getName();
+    method public String getValue();
+    method public void setKind(String);
+    method public void setName(String);
+    method public void setValue(String);
+  }
+
   public class MediaCodec {
     ctor public MediaCodec();
     method public java.util.List<media.codecs.Alias> getAlias_optional();
@@ -73,9 +83,11 @@
     method public String getEnabled();
     method public java.util.List<media.codecs.Feature> getFeature_optional();
     method public java.util.List<media.codecs.Limit> getLimit_optional();
+    method public java.util.List<media.codecs.Mapping> getMapping_optional();
     method public String getName();
     method public java.util.List<media.codecs.Quirk> getQuirk_optional();
     method public String getRank();
+    method public java.util.List<media.codecs.Tuning> getTuning_optional();
     method public String getType();
     method public java.util.List<media.codecs.Type> getType_optional();
     method public String getUpdate();
@@ -125,6 +137,14 @@
     method public java.util.List<media.codecs.Setting> getVariant_optional();
   }
 
+  public class Tuning {
+    ctor public Tuning();
+    method public String getName();
+    method public String getValue();
+    method public void setName(String);
+    method public void setValue(String);
+  }
+
   public class Type {
     ctor public Type();
     method public java.util.List<media.codecs.Alias> getAlias();
diff --git a/media/libstagefright/xmlparser/media_codecs.xsd b/media/libstagefright/xmlparser/media_codecs.xsd
index 3b5681f..c9a7efc 100644
--- a/media/libstagefright/xmlparser/media_codecs.xsd
+++ b/media/libstagefright/xmlparser/media_codecs.xsd
@@ -63,6 +63,8 @@
             <xs:element name="Alias" type="Alias" minOccurs="0" maxOccurs="unbounded"/>
             <xs:element name="Limit" type="Limit" minOccurs="0" maxOccurs="unbounded"/>
             <xs:element name="Feature" type="Feature" minOccurs="0" maxOccurs="unbounded"/>
+            <xs:element name="Mapping" type="Mapping" minOccurs="0" maxOccurs="unbounded"/>
+            <xs:element name="Tuning" type="Tuning" minOccurs="0" maxOccurs="unbounded"/>
             <xs:element name="Variant" type="Variant" minOccurs="0" maxOccurs="unbounded"/>
         </xs:choice>
         <xs:attribute name="name" type="xs:string"/>
@@ -122,6 +124,15 @@
         <xs:attribute name="enabled" type="xs:string"/>
         <xs:attribute name="update" type="xs:string"/>
     </xs:complexType>
+    <xs:complexType name="Mapping">
+        <xs:attribute name="name" type="xs:string"/>
+        <xs:attribute name="kind" type="xs:string"/>
+        <xs:attribute name="value" type="xs:string"/>
+    </xs:complexType>
+    <xs:complexType name="Tuning">
+        <xs:attribute name="name" type="xs:string"/>
+        <xs:attribute name="value" type="xs:string"/>
+    </xs:complexType>
     <xs:complexType name="Include">
         <xs:attribute name="href" type="xs:string"/>
     </xs:complexType>
diff --git a/media/libstagefright/xmlparser/test/Android.bp b/media/libstagefright/xmlparser/test/Android.bp
new file mode 100644
index 0000000..06a52f1
--- /dev/null
+++ b/media/libstagefright/xmlparser/test/Android.bp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
+cc_test {
+    name: "XMLParserTest",
+    test_suites: ["device-tests"],
+    gtest: true,
+
+    srcs: [
+        "XMLParserTest.cpp",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libstagefright_xmlparser",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    data: [":xmlparsertest_test_files",],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+}
+
+filegroup {
+    name: "xmlparsertest_test_files",
+    srcs: [
+        "testdata/media_codecs_unit_test.xml",
+        "testdata/media_codecs_unit_test_caller.xml",
+    ],
+}
diff --git a/media/libstagefright/xmlparser/test/AndroidTest.xml b/media/libstagefright/xmlparser/test/AndroidTest.xml
new file mode 100644
index 0000000..2e11b1b
--- /dev/null
+++ b/media/libstagefright/xmlparser/test/AndroidTest.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for xml parser unit test">
+    <option name="test-suite-tag" value="XMLParserTest" />
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="media_codecs_unit_test.xml->/data/local/tmp/media_codecs_unit_test.xml" />
+        <option name="push" value="media_codecs_unit_test_caller.xml->/data/local/tmp/media_codecs_unit_test_caller.xml" />
+        <option name="push" value="XMLParserTest->/data/local/tmp/XMLParserTest" />
+    </target_preparer>
+   <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="XMLParserTest" />
+    </test>
+</configuration>
diff --git a/media/libstagefright/xmlparser/test/README.md b/media/libstagefright/xmlparser/test/README.md
new file mode 100644
index 0000000..e9363fd
--- /dev/null
+++ b/media/libstagefright/xmlparser/test/README.md
@@ -0,0 +1,33 @@
+## Media Testing ##
+---
+#### XML Parser
+The XMLParser Test Suite validates the XMLParser available in libstagefright.
+
+Run the following steps to build the test suite:
+```
+m XMLParserTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/XMLParserTest/XMLParserTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/XMLParserTest/XMLParserTest /data/local/tmp/
+```
+
+usage: XMLParserTest
+```
+adb shell /data/local/tmp/XMLParserTest
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest XMLParserTest
+```
diff --git a/media/libstagefright/xmlparser/test/XMLParserTest.cpp b/media/libstagefright/xmlparser/test/XMLParserTest.cpp
new file mode 100644
index 0000000..7629d97
--- /dev/null
+++ b/media/libstagefright/xmlparser/test/XMLParserTest.cpp
@@ -0,0 +1,421 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "XMLParserTest"
+
+#include <utils/Log.h>
+
+#include <fstream>
+
+#include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
+
+#include "XMLParserTestEnvironment.h"
+
+#define XML_FILE_NAME "media_codecs_unit_test_caller.xml"
+
+using namespace android;
+
+static XMLParserTestEnvironment *gEnv = nullptr;
+
+struct CodecProperties {
+    string codecName;
+    MediaCodecsXmlParser::CodecProperties codecProp;
+};
+
+struct RoleProperties {
+    string roleName;
+    string typeName;
+    string codecName;
+    bool isEncoder;
+    size_t order;
+    vector<pair<string, string>> attributeMap;
+};
+
+class XMLParseTest : public ::testing::Test {
+  public:
+    ~XMLParseTest() {
+        if (mEleStream.is_open()) mEleStream.close();
+        mInputDataVector.clear();
+        mInputRoleVector.clear();
+    }
+
+    virtual void SetUp() override { setUpDatabase(); }
+
+    void setUpDatabase();
+
+    void setCodecProperties(string codecName, bool isEncoder, int32_t order, set<string> quirkSet,
+                            set<string> domainSet, set<string> variantSet, string typeName,
+                            vector<pair<string, string>> domain, vector<string> aliases,
+                            string rank);
+
+    void setRoleProperties(string roleName, bool isEncoder, int32_t order, string typeName,
+                           string codecName, vector<pair<string, string>> domain);
+
+    void setServiceAttribute(map<string, string> serviceAttributeNameValuePair);
+
+    void printCodecMap(const MediaCodecsXmlParser::Codec mcodec);
+
+    void checkRoleMap(int32_t index, bool isEncoder, string typeName, string codecName,
+                      vector<pair<string, string>> attrMap);
+
+    bool compareMap(const map<string, string> &lhs, const map<string, string> &rhs);
+
+    ifstream mEleStream;
+    MediaCodecsXmlParser mParser;
+    vector<CodecProperties> mInputDataVector;
+    vector<RoleProperties> mInputRoleVector;
+    map<string, string> mInputServiceAttributeMap;
+};
+
+void XMLParseTest::setUpDatabase() {
+    // The values set below are specific to test vector testdata/media_codecs_unit_test.xml
+    setCodecProperties("test1.decoder", false, 1, {"attribute::disabled", "quirk::quirk1"},
+                       {"telephony"}, {}, "audio/mpeg", {}, {"alias1.decoder"}, "4");
+
+    setCodecProperties("test2.decoder", false, 2, {"quirk::quirk1"}, {}, {}, "audio/3gpp", {}, {},
+                       "");
+
+    setCodecProperties("test3.decoder", false, 3, {}, {}, {}, "audio/amr-wb",
+                       {
+                               pair<string, string>("feature-feature1", "feature1Val"),
+                               pair<string, string>("feature-feature2", "0"),
+                               pair<string, string>("feature-feature3", "0"),
+                       },
+                       {}, "");
+
+    setCodecProperties("test4.decoder", false, 4, {}, {}, {}, "audio/flac",
+                       {pair<string, string>("feature-feature1", "feature1Val")}, {}, "");
+
+    setCodecProperties("test5.decoder", false, 5, {"attribute::attributeQuirk1"}, {}, {},
+                       "audio/g711-mlaw", {}, {}, "");
+
+    setCodecProperties("test6.decoder", false, 6, {}, {}, {"variant1", "variant2"},
+                       "audio/mp4a-latm",
+                       {pair<string, string>("variant1:::variant1Limit1-range",
+                                             "variant1Limit1Min-variant1Limit1Max"),
+                        pair<string, string>("variant1:::variant1Limit2-range",
+                                             "variant1Limit2Low-variant1Limit2High"),
+                        pair<string, string>("variant2:::variant2Limit1", "variant2Limit1Value")},
+                       {}, "");
+
+    setCodecProperties(
+            "test7.decoder", false, 7, {}, {}, {}, "audio/vorbis",
+            {
+                    pair<string, string>("-min-limit1", "limit1Min"),
+                    /*pair<string, string>("limit1-in", "limit1In"),*/
+                    pair<string, string>("limit2-range", "limit2Min-limit2Max"),
+                    pair<string, string>("limit2-scale", "limit2Scale"),
+                    pair<string, string>("limit3-default", "limit3Val3"),
+                    pair<string, string>("limit3-ranges", "limit3Val1,limit3Val2,limit3Val3"),
+            },
+            {}, "");
+
+    setCodecProperties("test8.encoder", true, 8, {}, {}, {}, "audio/opus",
+                       {pair<string, string>("max-limit1", "limit1Max")}, {}, "");
+
+    setCodecProperties("test9.encoder", true, 9, {}, {}, {}, "video/avc",
+           {
+                   pair<string, string>("mapping-sure-before", "after"),
+           },
+           {}, "");
+
+    setCodecProperties("test10.encoder", true, 10, {}, {}, {}, "video/hevc",
+           {
+                   pair<string, string>("mapping-fire-from", "to"),
+           },
+           {}, "");
+    setCodecProperties("test11.encoder", true, 11, {}, {}, {}, "video/av01",
+           {
+                   pair<string, string>("tuning-hungry", "yes"),
+                   pair<string, string>("tuning-pi", "3.1415"),
+           },
+           {}, "");
+
+    setRoleProperties("audio_decoder.mp3", false, 1, "audio/mpeg", "test1.decoder",
+                      {pair<string, string>("attribute::disabled", "present"),
+                       pair<string, string>("rank", "4")});
+
+    setRoleProperties("audio_decoder.amrnb", false, 2, "audio/3gpp", "test2.decoder", {});
+
+    setRoleProperties("audio_decoder.amrwb", false, 3, "audio/amr-wb", "test3.decoder",
+                      {pair<string, string>("feature-feature1", "feature1Val"),
+                       pair<string, string>("feature-feature2", "0"),
+                       pair<string, string>("feature-feature3", "0")});
+
+    setRoleProperties("audio/flac", false, 4, "audio/flac", "test4.decoder",
+                      {pair<string, string>("feature-feature1", "feature1Val")});
+
+    setRoleProperties("audio_decoder.g711mlaw", false, 5, "audio/g711-mlaw", "test5.decoder",
+                      {pair<string, string>("attribute::attributeQuirk1", "present")});
+
+    setRoleProperties("audio_decoder.aac", false, 6, "audio/mp4a-latm", "test6.decoder",
+                      {pair<string, string>("variant1:::variant1Limit1-range",
+                                            "variant1Limit1Min-variant1Limit1Max"),
+                       pair<string, string>("variant1:::variant1Limit2-range",
+                                            "variant1Limit2Low-variant1Limit2High"),
+                       pair<string, string>("variant2:::variant2Limit1", "variant2Limit1Value")});
+
+    setRoleProperties("audio_decoder.vorbis", false, 7, "audio/vorbis", "test7.decoder",
+                      {pair<string, string>("-min-limit1", "limit1Min"),
+                       /*pair<string, string>("limit1-in", "limit1In"),*/
+                       pair<string, string>("limit2-range", "limit2Min-limit2Max"),
+                       pair<string, string>("limit2-scale", "limit2Scale"),
+                       pair<string, string>("limit3-default", "limit3Val3"),
+                       pair<string, string>("limit3-ranges", "limit3Val1,limit3Val2,limit3Val3")});
+
+    setRoleProperties("audio_encoder.opus", true, 8, "audio/opus", "test8.encoder",
+                      {pair<string, string>("max-limit1", "limit1Max")});
+
+    setRoleProperties("video_encoder.avc", true, 9, "video/avc", "test9.encoder",
+                       {pair<string, string>("mapping-sure-before", "after")});
+
+    setRoleProperties("video_encoder.hevc", true, 10, "video/hevc", "test10.encoder",
+                       { pair<string, string>("mapping-fire-from", "to")});
+
+    setRoleProperties("video_encoder.av01", true, 11, "video/av01", "test11.encoder",
+                       {pair<string, string>("tuning-hungry", "yes"),
+                        pair<string, string>("tuning-pi", "3.1415")
+                       });
+
+    setServiceAttribute(
+            {pair<string, string>("domain-telephony", "0"), pair<string, string>("domain-tv", "0"),
+             pair<string, string>("setting2", "0"), pair<string, string>("variant-variant1", "0")});
+}
+
+bool XMLParseTest::compareMap(const map<string, string> &lhs, const map<string, string> &rhs) {
+    return lhs.size() == rhs.size() && equal(lhs.begin(), lhs.end(), rhs.begin());
+}
+
+void XMLParseTest::setCodecProperties(string codecName, bool isEncoder, int32_t order,
+                                      set<string> quirkSet, set<string> domainSet,
+                                      set<string> variantSet, string typeName,
+                                      vector<pair<string, string>> domain, vector<string> aliases,
+                                      string rank) {
+    map<string, string> AttributeMapDB;
+    for (const auto &AttrStr : domain) {
+        AttributeMapDB.insert(AttrStr);
+    }
+    map<string, MediaCodecsXmlParser::AttributeMap> TypeMapDataBase;
+    TypeMapDataBase.insert(
+            pair<string, MediaCodecsXmlParser::AttributeMap>(typeName, AttributeMapDB));
+    CodecProperties codecProperty;
+    codecProperty.codecName = codecName;
+    codecProperty.codecProp.isEncoder = isEncoder;
+    codecProperty.codecProp.order = order;
+    codecProperty.codecProp.quirkSet = quirkSet;
+    codecProperty.codecProp.domainSet = domainSet;
+    codecProperty.codecProp.variantSet = variantSet;
+    codecProperty.codecProp.typeMap = TypeMapDataBase;
+    codecProperty.codecProp.aliases = aliases;
+    codecProperty.codecProp.rank = rank;
+    mInputDataVector.push_back(codecProperty);
+}
+
+void XMLParseTest::setRoleProperties(string roleName, bool isEncoder, int32_t order,
+                                     string typeName, string codecName,
+                                     vector<pair<string, string>> attributeNameValuePair) {
+    struct RoleProperties roleProperty;
+    roleProperty.roleName = roleName;
+    roleProperty.typeName = typeName;
+    roleProperty.codecName = codecName;
+    roleProperty.isEncoder = isEncoder;
+    roleProperty.order = order;
+    roleProperty.attributeMap = attributeNameValuePair;
+    mInputRoleVector.push_back(roleProperty);
+}
+
+void XMLParseTest::setServiceAttribute(map<string, string> serviceAttributeNameValuePair) {
+    for (const auto &serviceAttrStr : serviceAttributeNameValuePair) {
+        mInputServiceAttributeMap.insert(serviceAttrStr);
+    }
+}
+
+void XMLParseTest::printCodecMap(const MediaCodecsXmlParser::Codec mcodec) {
+    const string &name = mcodec.first;
+    ALOGV("codec name = %s\n", name.c_str());
+    const MediaCodecsXmlParser::CodecProperties &properties = mcodec.second;
+    bool isEncoder = properties.isEncoder;
+    ALOGV("isEncoder = %d\n", isEncoder);
+    size_t order = properties.order;
+    ALOGV("order = %zu\n", order);
+    string rank = properties.rank;
+    ALOGV("rank = %s\n", rank.c_str());
+
+    for (auto &itrQuirkSet : properties.quirkSet) {
+        ALOGV("quirkSet= %s", itrQuirkSet.c_str());
+    }
+
+    for (auto &itrDomainSet : properties.domainSet) {
+        ALOGV("domainSet= %s", itrDomainSet.c_str());
+    }
+
+    for (auto &itrVariantSet : properties.variantSet) {
+        ALOGV("variantSet= %s", itrVariantSet.c_str());
+    }
+
+    map<string, MediaCodecsXmlParser::AttributeMap> TypeMap = properties.typeMap;
+    ALOGV("The TypeMap is :");
+
+    for (auto &itrTypeMap : TypeMap) {
+        ALOGV("itrTypeMap->first\t%s\t", itrTypeMap.first.c_str());
+
+        for (auto &itrAttributeMap : itrTypeMap.second) {
+            ALOGV("AttributeMap->first = %s", itrAttributeMap.first.c_str());
+            ALOGV("AttributeMap->second = %s", itrAttributeMap.second.c_str());
+        }
+    }
+}
+
+void XMLParseTest::checkRoleMap(int32_t index, bool isEncoder, string typeName, string codecName,
+                                vector<pair<string, string>> AttributePairMap) {
+    ASSERT_EQ(isEncoder, mInputRoleVector.at(index).isEncoder)
+            << "Invalid RoleMap data. IsEncoder mismatch";
+    ASSERT_EQ(typeName, mInputRoleVector.at(index).typeName)
+            << "Invalid RoleMap data. typeName mismatch";
+    ASSERT_EQ(codecName, mInputRoleVector.at(index).codecName)
+            << "Invalid RoleMap data. codecName mismatch";
+
+    vector<pair<string, string>>::iterator itr_attributeMapDB =
+            (mInputRoleVector.at(index).attributeMap).begin();
+    vector<pair<string, string>>::iterator itr_attributeMap = AttributePairMap.begin();
+    for (; itr_attributeMap != AttributePairMap.end() &&
+           itr_attributeMapDB != mInputRoleVector.at(index).attributeMap.end();
+         ++itr_attributeMap, ++itr_attributeMapDB) {
+        string attributeName = itr_attributeMap->first;
+        string attributeNameDB = itr_attributeMapDB->first;
+        string attributevalue = itr_attributeMap->second;
+        string attributeValueDB = itr_attributeMapDB->second;
+        ASSERT_EQ(attributeName, attributeNameDB)
+                << "Invalid RoleMap data. Attribute name mismatch\t" << attributeName << " != "
+                << "attributeNameDB";
+        ASSERT_EQ(attributevalue, attributeValueDB)
+                << "Invalid RoleMap data. Attribute value mismatch\t" << attributevalue << " != "
+                << "attributeValueDB";
+    }
+}
+
+TEST_F(XMLParseTest, CodecMapParseTest) {
+    string inputFileName = gEnv->getRes() + XML_FILE_NAME;
+    mEleStream.open(inputFileName, ifstream::binary);
+    ASSERT_EQ(mEleStream.is_open(), true) << "Failed to open inputfile " << inputFileName;
+
+    mParser.parseXmlPath(inputFileName);
+    for (const MediaCodecsXmlParser::Codec &mcodec : mParser.getCodecMap()) {
+        printCodecMap(mcodec);
+        const MediaCodecsXmlParser::CodecProperties &properties = mcodec.second;
+        int32_t index = properties.order - 1;
+        ASSERT_GE(index, 0) << "Invalid order";
+        ASSERT_EQ(mInputDataVector.at(index).codecName, mcodec.first.c_str())
+                << "Invalid CodecMap data. codecName mismatch";
+        ASSERT_EQ(properties.isEncoder, mInputDataVector.at(index).codecProp.isEncoder)
+                << "Invalid CodecMap data. isEncoder mismatch";
+        ASSERT_EQ(properties.order, mInputDataVector.at(index).codecProp.order)
+                << "Invalid CodecMap data. order mismatch";
+
+        set<string> quirkSetDB = mInputDataVector.at(index).codecProp.quirkSet;
+        set<string> quirkSet = properties.quirkSet;
+        set<string> quirkDifference;
+        set_difference(quirkSetDB.begin(), quirkSetDB.end(), quirkSet.begin(), quirkSet.end(),
+                       inserter(quirkDifference, quirkDifference.end()));
+        ASSERT_EQ(quirkDifference.size(), 0) << "CodecMap:quirk mismatch";
+
+        map<string, MediaCodecsXmlParser::AttributeMap> TypeMapDB =
+                mInputDataVector.at(index).codecProp.typeMap;
+        map<string, MediaCodecsXmlParser::AttributeMap> TypeMap = properties.typeMap;
+        map<string, MediaCodecsXmlParser::AttributeMap>::iterator itr_TypeMapDB = TypeMapDB.begin();
+        map<string, MediaCodecsXmlParser::AttributeMap>::iterator itr_TypeMap = TypeMap.begin();
+
+        ASSERT_EQ(TypeMapDB.size(), TypeMap.size())
+                << "Invalid CodecMap data. Typemap size mismatch";
+
+        for (; itr_TypeMap != TypeMap.end() && itr_TypeMapDB != TypeMapDB.end();
+             ++itr_TypeMap, ++itr_TypeMapDB) {
+            ASSERT_EQ(itr_TypeMap->first, itr_TypeMapDB->first)
+                    << "Invalid CodecMap data. type mismatch";
+            bool flag = compareMap(itr_TypeMap->second, itr_TypeMapDB->second);
+            ASSERT_TRUE(flag) << "typeMap mismatch";
+        }
+        ASSERT_EQ(mInputDataVector.at(index).codecProp.rank, properties.rank)
+                << "Invalid CodecMap data. rank mismatch";
+    }
+}
+
+TEST_F(XMLParseTest, RoleMapParseTest) {
+    string inputFileName = gEnv->getRes() + XML_FILE_NAME;
+    mEleStream.open(inputFileName, ifstream::binary);
+    ASSERT_EQ(mEleStream.is_open(), true) << "Failed to open inputfile " << inputFileName;
+
+    mParser.parseXmlPath(inputFileName);
+
+    for (auto &mRole : mParser.getRoleMap()) {
+        typedef pair<string, string> Attribute;
+        const string &roleName = mRole.first;
+        ALOGV("Role map:name = %s\n", roleName.c_str());
+        const MediaCodecsXmlParser::RoleProperties &properties = mRole.second;
+        string type = properties.type;
+        ALOGV("Role map: type = %s\n", type.c_str());
+
+        bool isEncoder = properties.isEncoder;
+        ALOGV("Role map: isEncoder = %d\n", isEncoder);
+
+        multimap<size_t, MediaCodecsXmlParser::NodeInfo> nodeList = properties.nodeList;
+        multimap<size_t, MediaCodecsXmlParser::NodeInfo>::iterator itr_Node;
+        ALOGV("\nThe multimap nodeList is : \n");
+        for (itr_Node = nodeList.begin(); itr_Node != nodeList.end(); ++itr_Node) {
+            ALOGV("itr_Node->first=ORDER=\t%zu\t", itr_Node->first);
+            int32_t index = itr_Node->first - 1;
+            MediaCodecsXmlParser::NodeInfo nodePtr = itr_Node->second;
+            ALOGV("Role map:itr_Node->second.name = %s\n", nodePtr.name.c_str());
+            vector<Attribute> attrList = nodePtr.attributeList;
+            for (auto attrNameValueList = attrList.begin(); attrNameValueList != attrList.end();
+                 ++attrNameValueList) {
+                ALOGV("Role map:nodePtr.attributeList->first = %s\n",
+                      attrNameValueList->first.c_str());
+                ALOGV("Role map:nodePtr.attributeList->second = %s\n",
+                      attrNameValueList->second.c_str());
+            }
+            checkRoleMap(index, isEncoder, properties.type, nodePtr.name.c_str(), attrList);
+        }
+    }
+}
+
+TEST_F(XMLParseTest, ServiceAttributeMapParseTest) {
+    string inputFileName = gEnv->getRes() + XML_FILE_NAME;
+    mEleStream.open(inputFileName, ifstream::binary);
+    ASSERT_EQ(mEleStream.is_open(), true) << "Failed to open inputfile " << inputFileName;
+
+    mParser.parseXmlPath(inputFileName);
+    const auto serviceAttributeMap = mParser.getServiceAttributeMap();
+    for (const auto &attributePair : serviceAttributeMap) {
+        ALOGV("serviceAttribute.key = %s \t serviceAttribute.value = %s",
+              attributePair.first.c_str(), attributePair.second.c_str());
+    }
+    bool flag = compareMap(mInputServiceAttributeMap, serviceAttributeMap);
+    ASSERT_TRUE(flag) << "ServiceMapParseTest: typeMap mismatch";
+}
+
+int main(int argc, char **argv) {
+    gEnv = new XMLParserTestEnvironment();
+    ::testing::AddGlobalTestEnvironment(gEnv);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = gEnv->initFromOptions(argc, argv);
+    if (status == 0) {
+        status = RUN_ALL_TESTS();
+        ALOGD("XML Parser Test Result = %d\n", status);
+    }
+    return status;
+}
diff --git a/media/libstagefright/xmlparser/test/XMLParserTestEnvironment.h b/media/libstagefright/xmlparser/test/XMLParserTestEnvironment.h
new file mode 100644
index 0000000..61a09e6
--- /dev/null
+++ b/media/libstagefright/xmlparser/test/XMLParserTestEnvironment.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __XML_PARSER_TEST_ENVIRONMENT_H__
+#define __XML_PARSER_TEST_ENVIRONMENT_H__
+
+#include <gtest/gtest.h>
+
+#include <getopt.h>
+
+using namespace std;
+
+class XMLParserTestEnvironment : public ::testing::Environment {
+  public:
+    XMLParserTestEnvironment() : res("/data/local/tmp/") {}
+
+    // Parses the command line arguments
+    int initFromOptions(int argc, char **argv);
+
+    void setRes(const char *_res) { res = _res; }
+
+    const string getRes() const { return res; }
+
+  private:
+    string res;
+};
+
+int XMLParserTestEnvironment::initFromOptions(int argc, char **argv) {
+    static struct option options[] = {{"path", required_argument, 0, 'P'}, {0, 0, 0, 0}};
+
+    while (true) {
+        int index = 0;
+        int c = getopt_long(argc, argv, "P:", options, &index);
+        if (c == -1) {
+            break;
+        }
+
+        switch (c) {
+            case 'P': {
+                setRes(optarg);
+                break;
+            }
+            default:
+                break;
+        }
+    }
+
+    if (optind < argc) {
+        fprintf(stderr,
+                "unrecognized option: %s\n\n"
+                "usage: %s <gtest options> <test options>\n\n"
+                "test options are:\n\n"
+                "-P, --path: Resource files directory location\n",
+                argv[optind ?: 1], argv[0]);
+        return 2;
+    }
+    return 0;
+}
+
+#endif  // __XML_PARSER_TEST_ENVIRONMENT_H__
diff --git a/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test.xml b/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test.xml
new file mode 100644
index 0000000..8cae423
--- /dev/null
+++ b/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<!-- REFERENCE : frameworks/av/media/libstagefright/xmlparser/media_codecs.xsd -->
+<Included>
+    <Settings>
+        <Domain name="telephony" enabled="false" />
+        <Domain name="tv" enabled="false" />
+        <Variant name="variant1" enabled="false" />
+        <Setting name="setting1" value="settingValue1" update="true" />
+        <Setting name="setting2" enabled="false" />
+    </Settings>
+    <Decoders>
+        <!-- entry for enabled, domain, rank and update properties -->
+        <MediaCodec name="test1.decoder" type="audio/mpeg" update="false" domain="telephony" enabled="false" rank="4">
+            <Alias name="alias1.decoder" />
+            <Quirk name="quirk1" value="quirk1Value"/>
+        </MediaCodec>
+        <!-- entry for testing Quirk -->
+        <MediaCodec name="test2.decoder" type="audio/3gpp" enabled="true" >
+            <Quirk name="quirk1" value="quirk1Value"/>
+        </MediaCodec>
+        <!-- entry for testing Feature -->
+        <!-- feature2 takes value 0 (feature with same name takes lower feature's value) -->
+        <!-- feature3 gives value as 0 since it's optional -->
+        <!-- optional="true" required="true" is not a valid combination. -->
+        <!-- optional="false" required="false" is not a valid combination. -->
+        <MediaCodec name="test3.decoder" type="audio/amr-wb" >
+            <Feature name="feature1" value="feature1Val" />
+            <Feature name="feature2" value="feature2Val"/>
+            <Feature name="feature2" />
+            <Feature name="feature3" optional="true" required="false" />
+        </MediaCodec>
+        <!-- entry for testing Type -->
+        <MediaCodec name="test4.decoder">
+            <Type name="audio/flac">
+                <Feature name="feature1" value="feature1Val" />
+            </Type>
+        </MediaCodec>
+        <!-- entry for testing Attribute -->
+        <MediaCodec name="test5.decoder" type="audio/g711-mlaw" >
+            <Attribute name="attributeQuirk1" />
+        </MediaCodec>
+        <!-- entry for testing Variant -->
+        <MediaCodec name="test6.decoder" type="audio/mp4a-latm" variant="variant1,variant2" >
+            <Variant name="variant1">
+                <Limit name="variant1Limit1" min="variant1Limit1Min" max="variant1Limit1Max" />
+                <Limit name="variant1Limit2" range="variant1Limit2Low-variant1Limit2High" />
+            </Variant>
+            <Variant name="variant2">
+                <Limit name="variant2Limit1" value="variant2Limit1Value" />
+            </Variant>
+        </MediaCodec>
+        <!-- entry for testing Limit -->
+        <!-- 'in' is present in xsd file but not handled in MediaCodecsXmlParser -->
+        <MediaCodec name="test7.decoder" type="audio/vorbis" >
+            <Limit name="limit1" in="limit1In" min="limit1Min"/>
+            <Limit name="limit2" min="limit2Min" max="limit2Max" scale="limit2Scale" />
+            <Limit name="limit3" ranges="limit3Val1,limit3Val2,limit3Val3" default="limit3Val3" />
+        </MediaCodec>
+    </Decoders>
+    <Encoders>
+        <MediaCodec name="test8.encoder" type="audio/opus">
+            <Limit name="limit1" max="limit1Max" />
+        </MediaCodec>
+        <!-- entry for testing Mapping -->
+        <MediaCodec name="test9.encoder" type="video/avc" >
+            <Mapping kind="sure" name="before" value="after"/>
+        </MediaCodec>
+        <MediaCodec name="test10.encoder" type="video/hevc" >
+            <Mapping kind="fire" name="from" value="to"/>
+        </MediaCodec>
+        <!-- entry for testing Tuning -->
+        <MediaCodec name="test11.encoder" type="video/av01" >
+            <Tuning name="hungry" value="yes"/>
+            <Tuning name="pi" value="3.1415"/>
+        </MediaCodec>
+    </Encoders>
+</Included>
diff --git a/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test_caller.xml b/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test_caller.xml
new file mode 100644
index 0000000..d864ce9
--- /dev/null
+++ b/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test_caller.xml
@@ -0,0 +1,4 @@
+<!-- entry for testing Include -->
+<MediaCodecs>
+    <Include href="media_codecs_unit_test.xml" />
+</MediaCodecs>
diff --git a/media/libstagefright/xmlparser/vts/Android.bp b/media/libstagefright/xmlparser/vts/Android.bp
index 132ce82..1e36c8f 100644
--- a/media/libstagefright/xmlparser/vts/Android.bp
+++ b/media/libstagefright/xmlparser/vts/Android.bp
@@ -14,6 +14,15 @@
 // limitations under the License.
 //
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
 cc_test {
     name: "vts_mediaCodecs_validate_test",
     srcs: [
diff --git a/media/libstagefright/xmlparser/vts/Android.mk b/media/libstagefright/xmlparser/vts/Android.mk
deleted file mode 100644
index d5290ba..0000000
--- a/media/libstagefright/xmlparser/vts/Android.mk
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-# Copyright (C) 2019 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := VtsValidateMediaCodecs
-include test/vts/tools/build/Android.host_config.mk
diff --git a/media/libstagefright/xmlparser/vts/AndroidTest.xml b/media/libstagefright/xmlparser/vts/AndroidTest.xml
deleted file mode 100644
index 97ee107..0000000
--- a/media/libstagefright/xmlparser/vts/AndroidTest.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2019 The Android Open Source Project
-
-     Licensed under the Apache License, Version 2.0 (the "License");
-     you may not use this file except in compliance with the License.
-     You may obtain a copy of the License at
-
-          http://www.apache.org/licenses/LICENSE-2.0
-
-     Unless required by applicable law or agreed to in writing, software
-     distributed under the License is distributed on an "AS IS" BASIS,
-     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     See the License for the specific language governing permissions and
-     limitations under the License.
--->
-<configuration description="Config for VTS VtsValidateMediaCodecs.">
-    <option name="config-descriptor:metadata" key="plan" value="vts-treble" />
-    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.VtsFilePusher">
-        <option name="abort-on-push-failure" value="false"/>
-        <option name="push-group" value="HostDrivenTest.push"/>
-        <option name="push" value="DATA/etc/media_codecs.xsd->/data/local/tmp/media_codecs.xsd"/>
-    </target_preparer>
-    <test class="com.android.tradefed.testtype.VtsMultiDeviceTest">
-        <option name="test-module-name" value="VtsValidateMediaCodecs"/>
-        <option name="binary-test-source" value="_32bit::DATA/nativetest/vts_mediaCodecs_validate_test/vts_mediaCodecs_validate_test" />
-        <option name="binary-test-source" value="_64bit::DATA/nativetest64/vts_mediaCodecs_validate_test/vts_mediaCodecs_validate_test" />
-        <option name="binary-test-type" value="gtest"/>
-        <option name="test-timeout" value="30s"/>
-    </test>
-</configuration>
diff --git a/media/libwatchdog/Android.bp b/media/libwatchdog/Android.bp
index 1a87824..411c206 100644
--- a/media/libwatchdog/Android.bp
+++ b/media/libwatchdog/Android.bp
@@ -12,8 +12,18 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library {
     name: "libwatchdog",
+    host_supported: true,
     srcs: [
         "Watchdog.cpp",
     ],
@@ -29,6 +39,11 @@
         darwin: {
             enabled: false,
         },
+        linux_glibc: {
+            cflags: [
+                "-Dsigev_notify_thread_id=_sigev_un._tid",
+            ],
+        },
     },
     apex_available: ["com.android.media"],
     min_sdk_version: "29",
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index afca7c4..79b192e 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -1,4 +1,21 @@
 
+package {
+    default_applicable_licenses: ["frameworks_av_media_mediaserver_license"],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_mediaserver_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_static {
     name: "libregistermsext",
     srcs: ["register.cpp"],
@@ -17,6 +34,8 @@
     shared_libs: [
         "android.hardware.media.omx@1.0",
         "libandroidicu",
+        "libfmq",
+        "libbase",
         "libbinder",
         "libhidlbase",
         "liblog",
@@ -34,8 +53,13 @@
         "frameworks/av/services/mediaresourcemanager",
     ],
 
-    // back to 32-bit, b/126502613
-    compile_multilib: "32",
+    // By default mediaserver runs in 32-bit to save memory, except
+    // on 64-bit-only lunch targets.
+    // ****************************************************************
+    // TO ENABLE 64-BIT MEDIASERVER ON MIXED 32/64-BIT DEVICES, COMMENT
+    // OUT THE FOLLOWING LINE:
+    // ****************************************************************
+    compile_multilib: "prefer32",
 
     init_rc: ["mediaserver.rc"],
 
diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp
index 316732b..dc1b9b8 100644
--- a/media/mediaserver/main_mediaserver.cpp
+++ b/media/mediaserver/main_mediaserver.cpp
@@ -18,7 +18,7 @@
 #define LOG_TAG "mediaserver"
 //#define LOG_NDEBUG 0
 
-#include <aicu/AIcu.h>
+#include <android-base/properties.h>
 #include <binder/IPCThreadState.h>
 #include <binder/ProcessState.h>
 #include <binder/IServiceManager.h>
@@ -39,11 +39,16 @@
     sp<ProcessState> proc(ProcessState::self());
     sp<IServiceManager> sm(defaultServiceManager());
     ALOGI("ServiceManager: %p", sm.get());
-    AIcu_initializeIcuOrDie();
     MediaPlayerService::instantiate();
     ResourceManagerService::instantiate();
     registerExtensions();
     ::android::hardware::configureRpcThreadpool(16, false);
+
+    if (!android::base::GetBoolProperty("ro.config.low_ram", false)) {
+        // Start the media.transcoding service if the device is not low ram
+        // device.
+        android::base::SetProperty("ctl.start", "media.transcoding");
+    }
     ProcessState::self()->startThreadPool();
     IPCThreadState::self()->joinThreadPool();
     ::android::hardware::joinRpcThreadpool();
diff --git a/media/mediaserver/manifest_media_c2_software.xml b/media/mediaserver/manifest_media_c2_software.xml
index f23ed44..a5b4896 100644
--- a/media/mediaserver/manifest_media_c2_software.xml
+++ b/media/mediaserver/manifest_media_c2_software.xml
@@ -2,7 +2,7 @@
     <hal>
         <name>android.hardware.media.c2</name>
         <transport>hwbinder</transport>
-        <version>1.1</version>
+        <version>1.2</version>
         <interface>
             <name>IComponentStore</name>
             <instance>software</instance>
diff --git a/media/mtp/Android.bp b/media/mtp/Android.bp
index 66a3139..97e2a22 100644
--- a/media/mtp/Android.bp
+++ b/media/mtp/Android.bp
@@ -14,6 +14,23 @@
 // limitations under the License.
 //
 
+package {
+    default_applicable_licenses: ["frameworks_av_media_mtp_license"],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_mtp_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libmtp",
     srcs: [
@@ -52,5 +69,5 @@
         "liblog",
         "libusbhost",
     ],
+    header_libs: ["libcutils_headers"],
 }
-
diff --git a/media/mtp/IMtpDatabase.h b/media/mtp/IMtpDatabase.h
index 81fa60c..3b9bbb0 100644
--- a/media/mtp/IMtpDatabase.h
+++ b/media/mtp/IMtpDatabase.h
@@ -39,7 +39,7 @@
     // Called to report success or failure of the SendObject file transfer.
     virtual void                    endSendObject(MtpObjectHandle handle,
                                             bool succeeded) = 0;
-    
+
     // Called to rescan a file, such as after an edit.
     virtual void                    rescanFile(const char* path,
                                             MtpObjectHandle handle,
@@ -91,6 +91,8 @@
                                             int64_t& outFileLength,
                                             MtpObjectFormat& outFormat) = 0;
 
+    virtual int                     openFilePath(const char* path, bool transcode) = 0;
+
     virtual MtpResponseCode         beginDeleteObject(MtpObjectHandle handle) = 0;
     virtual void                    endDeleteObject(MtpObjectHandle handle, bool succeeded) = 0;
 
diff --git a/media/mtp/MtpDevice.cpp b/media/mtp/MtpDevice.cpp
index 9665c58..5be8ef5 100644
--- a/media/mtp/MtpDevice.cpp
+++ b/media/mtp/MtpDevice.cpp
@@ -608,6 +608,32 @@
     return NULL;
 }
 
+bool MtpDevice::setDevicePropValueStr(MtpProperty* property) {
+    if (property == nullptr)
+        return false;
+
+    std::lock_guard<std::mutex> lg(mMutex);
+
+    if (property->getDataType() != MTP_TYPE_STR) {
+        return false;
+    }
+
+    mRequest.reset();
+    mRequest.setParameter(1, property->getPropertyCode());
+
+    mData.reset();
+    mData.putString(property->getCurrentValue().str);
+
+   if (sendRequest(MTP_OPERATION_SET_DEVICE_PROP_VALUE) && sendData()) {
+        MtpResponseCode ret = readResponse();
+        if (ret != MTP_RESPONSE_OK) {
+            ALOGW("%s: Response=0x%04X\n", __func__, ret);
+            return false;
+        }
+    }
+    return true;
+}
+
 MtpProperty* MtpDevice::getObjectPropDesc(MtpObjectProperty code, MtpObjectFormat format) {
     std::lock_guard<std::mutex> lg(mMutex);
 
diff --git a/media/mtp/MtpDevice.h b/media/mtp/MtpDevice.h
index 01bc3db..b1b30e4 100644
--- a/media/mtp/MtpDevice.h
+++ b/media/mtp/MtpDevice.h
@@ -112,6 +112,7 @@
     MtpObjectPropertyList*  getObjectPropsSupported(MtpObjectFormat format);
 
     MtpProperty*            getDevicePropDesc(MtpDeviceProperty code);
+    bool                    setDevicePropValueStr(MtpProperty* property);
     MtpProperty*            getObjectPropDesc(MtpObjectProperty code, MtpObjectFormat format);
 
     // Reads value of |property| for |handle|. Returns true on success.
diff --git a/media/mtp/MtpFfsHandle.cpp b/media/mtp/MtpFfsHandle.cpp
index bd6a6c6..2ffd775 100644
--- a/media/mtp/MtpFfsHandle.cpp
+++ b/media/mtp/MtpFfsHandle.cpp
@@ -74,6 +74,7 @@
 
 MtpFfsHandle::MtpFfsHandle(int controlFd) {
     mControl.reset(controlFd);
+    mBatchCancel = android::base::GetBoolProperty("sys.usb.mtp.batchcancel", false);
 }
 
 MtpFfsHandle::~MtpFfsHandle() {}
@@ -114,11 +115,11 @@
 void MtpFfsHandle::advise(int fd) {
     for (unsigned i = 0; i < NUM_IO_BUFS; i++) {
         if (posix_madvise(mIobuf[i].bufs.data(), MAX_FILE_CHUNK_SIZE,
-                POSIX_MADV_SEQUENTIAL | POSIX_MADV_WILLNEED) < 0)
+                POSIX_MADV_SEQUENTIAL | POSIX_MADV_WILLNEED) != 0)
             PLOG(ERROR) << "Failed to madvise";
     }
     if (posix_fadvise(fd, 0, 0,
-                POSIX_FADV_SEQUENTIAL | POSIX_FADV_NOREUSE | POSIX_FADV_WILLNEED) < 0)
+                POSIX_FADV_SEQUENTIAL | POSIX_FADV_NOREUSE | POSIX_FADV_WILLNEED) != 0)
         PLOG(ERROR) << "Failed to fadvise";
 }
 
@@ -370,7 +371,7 @@
 }
 
 int MtpFfsHandle::cancelEvents(struct iocb **iocb, struct io_event *events, unsigned start,
-        unsigned end) {
+        unsigned end, bool is_batch_cancel) {
     // Some manpages for io_cancel are out of date and incorrect.
     // io_cancel will return -EINPROGRESS on success and does
     // not place the event in the given memory. We have to use
@@ -386,6 +387,10 @@
         } else {
             num_events++;
         }
+        if (is_batch_cancel && num_events == 1) {
+            num_events = end - start;
+            break;
+        }
     }
     if (num_events != end - start) {
         ret = -1;
@@ -495,7 +500,8 @@
                 num_events += this_events;
 
                 if (event_ret == -1) {
-                    cancelEvents(mIobuf[i].iocb.data(), ioevs, num_events, mIobuf[i].actual);
+                    cancelEvents(mIobuf[i].iocb.data(), ioevs, num_events, mIobuf[i].actual,
+                            mBatchCancel);
                     return -1;
                 }
                 ret += event_ret;
@@ -512,7 +518,8 @@
                 }
             }
             if (short_packet) {
-                if (cancelEvents(mIobuf[i].iocb.data(), ioevs, short_i, mIobuf[i].actual)) {
+                if (cancelEvents(mIobuf[i].iocb.data(), ioevs, short_i, mIobuf[i].actual,
+                        mBatchCancel)) {
                     write_error = true;
                 }
             }
@@ -613,7 +620,7 @@
                         &num_events) != ret) {
                 error = true;
                 cancelEvents(mIobuf[(i-1)%NUM_IO_BUFS].iocb.data(), ioevs, num_events,
-                        mIobuf[(i-1)%NUM_IO_BUFS].actual);
+                        mIobuf[(i-1)%NUM_IO_BUFS].actual, false);
             }
             has_write = false;
         }
diff --git a/media/mtp/MtpFfsHandle.h b/media/mtp/MtpFfsHandle.h
index fe343f7..e552e03 100644
--- a/media/mtp/MtpFfsHandle.h
+++ b/media/mtp/MtpFfsHandle.h
@@ -17,6 +17,7 @@
 #ifndef _MTP_FFS_HANDLE_H
 #define _MTP_FFS_HANDLE_H
 
+#include <android-base/properties.h>
 #include <android-base/unique_fd.h>
 #include <linux/aio_abi.h>
 #include <mutex>
@@ -57,6 +58,7 @@
     static int getPacketSize(int ffs_fd);
 
     bool mCanceled;
+    bool mBatchCancel;
 
     android::base::unique_fd mControl;
     // "in" from the host's perspective => sink for mtp server
@@ -76,7 +78,8 @@
     int iobufSubmit(struct io_buffer *buf, int fd, unsigned length, bool read);
 
     // Cancel submitted requests from start to end in the given array. Return 0 or -1.
-    int cancelEvents(struct iocb **iocb, struct io_event *events, unsigned start, unsigned end);
+    int cancelEvents(struct iocb **iocb, struct io_event *events, unsigned start, unsigned end,
+		     bool is_batch_cancel);
 
     // Wait for at minimum the given number of events. Returns the amount of data in the returned
     // events. Increments counter by the number of events returned.
diff --git a/media/mtp/MtpPacket.cpp b/media/mtp/MtpPacket.cpp
index 3b298a9..f069a83 100644
--- a/media/mtp/MtpPacket.cpp
+++ b/media/mtp/MtpPacket.cpp
@@ -157,7 +157,7 @@
                             request->endpoint,
                             request->buffer,
                             request->buffer_length,
-                            1000);
+                            5000);
     request->actual_length = result;
     return result;
 }
diff --git a/media/mtp/MtpProperty.cpp b/media/mtp/MtpProperty.cpp
index 5c02a0d..98a2ad3 100644
--- a/media/mtp/MtpProperty.cpp
+++ b/media/mtp/MtpProperty.cpp
@@ -240,6 +240,16 @@
         mCurrentValue.str = NULL;
 }
 
+void MtpProperty::setCurrentValue(const char* string) {
+    free(mCurrentValue.str);
+    if (string) {
+        MtpStringBuffer buffer(string);
+        mCurrentValue.str = strdup(buffer);
+    }
+    else
+        mCurrentValue.str = NULL;
+}
+
 void MtpProperty::setCurrentValue(MtpDataPacket& packet) {
     free(mCurrentValue.str);
     mCurrentValue.str = NULL;
diff --git a/media/mtp/MtpProperty.h b/media/mtp/MtpProperty.h
index bfd5f7f..36d7360 100644
--- a/media/mtp/MtpProperty.h
+++ b/media/mtp/MtpProperty.h
@@ -91,6 +91,7 @@
 
     void                setDefaultValue(const uint16_t* string);
     void                setCurrentValue(const uint16_t* string);
+    void                setCurrentValue(const char* string);
     void                setCurrentValue(MtpDataPacket& packet);
     const MtpPropertyValue& getCurrentValue() { return mCurrentValue; }
 
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
index 8677b90..6fcf119 100644
--- a/media/mtp/MtpServer.cpp
+++ b/media/mtp/MtpServer.cpp
@@ -790,11 +790,40 @@
     auto start = std::chrono::steady_clock::now();
 
     const char* filePath = (const char *)pathBuf;
-    mtp_file_range  mfr;
-    mfr.fd = open(filePath, O_RDONLY);
-    if (mfr.fd < 0) {
-        return MTP_RESPONSE_GENERAL_ERROR;
+    mtp_file_range mfr;
+    struct stat sstat;
+    uint64_t finalsize;
+    bool transcode = android::base::GetBoolProperty("sys.fuse.transcode_mtp", false);
+    bool filePathAccess = true;
+    ALOGD("Mtp transcode = %d", transcode);
+
+    // For performance reasons, only attempt a ContentResolver open when transcode is required.
+    // This is fine as long as we don't transcode by default on the device. If we suddenly
+    // transcode by default, we'll need to ensure that MTP doesn't transcode by default and we
+    // might need to make a binder call to avoid transcoding or come up with a better strategy.
+    if (transcode) {
+        mfr.fd = mDatabase->openFilePath(filePath, true);
+        fstat(mfr.fd, &sstat);
+        finalsize = sstat.st_size;
+        fileLength = finalsize;
+        if (mfr.fd < 0) {
+            ALOGW("Mtp open via IMtpDatabase failed for %s. Falling back to the original",
+                  filePath);
+            filePathAccess = true;
+        } else {
+            filePathAccess = false;
+        }
     }
+
+    if (filePathAccess) {
+        mfr.fd = open(filePath, O_RDONLY);
+        if (mfr.fd < 0) {
+            return MTP_RESPONSE_GENERAL_ERROR;
+        }
+        fstat(mfr.fd, &sstat);
+        finalsize = sstat.st_size;
+    }
+
     mfr.offset = 0;
     mfr.length = fileLength;
     mfr.command = mRequest.getOperationCode();
@@ -815,9 +844,6 @@
 
     auto end = std::chrono::steady_clock::now();
     std::chrono::duration<double> diff = end - start;
-    struct stat sstat;
-    fstat(mfr.fd, &sstat);
-    uint64_t finalsize = sstat.st_size;
     ALOGV("Sent a file over MTP. Time: %f s, Size: %" PRIu64 ", Rate: %f bytes/s",
             diff.count(), finalsize, ((double) finalsize) / diff.count());
     closeObjFd(mfr.fd, filePath);
diff --git a/media/mtp/tests/Android.bp b/media/mtp/tests/Android.bp
deleted file mode 100644
index 0750208..0000000
--- a/media/mtp/tests/Android.bp
+++ /dev/null
@@ -1,47 +0,0 @@
-//
-// Copyright (C) 2017 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-cc_test {
-    name: "mtp_ffs_handle_test",
-    test_suites: ["device-tests"],
-    srcs: ["MtpFfsHandle_test.cpp"],
-    shared_libs: [
-        "libbase",
-        "libmtp",
-        "liblog",
-    ],
-    cflags: [
-        "-Wall",
-        "-Wextra",
-        "-Werror",
-    ],
-}
-
-cc_test {
-    name: "posix_async_io_test",
-    test_suites: ["device-tests"],
-    srcs: ["PosixAsyncIO_test.cpp"],
-    shared_libs: [
-        "libbase",
-        "libmtp",
-        "liblog",
-    ],
-    cflags: [
-        "-Wall",
-        "-Wextra",
-        "-Werror",
-    ],
-}
diff --git a/media/mtp/tests/AndroidTest.xml b/media/mtp/tests/AndroidTest.xml
deleted file mode 100644
index c1f4753..0000000
--- a/media/mtp/tests/AndroidTest.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2017 The Android Open Source Project
-
-     Licensed under the Apache License, Version 2.0 (the "License");
-     you may not use this file except in compliance with the License.
-     You may obtain a copy of the License at
-
-          http://www.apache.org/licenses/LICENSE-2.0
-
-     Unless required by applicable law or agreed to in writing, software
-     distributed under the License is distributed on an "AS IS" BASIS,
-     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     See the License for the specific language governing permissions and
-     limitations under the License.
--->
-<configuration description="Config for mtp_ffs_handle_test">
-    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
-        <option name="cleanup" value="true" />
-        <option name="push" value="mtp_ffs_handle_test->/data/local/tmp/mtp_ffs_handle_test" />
-    </target_preparer>
-    <option name="test-suite-tag" value="apct" />
-    <test class="com.android.tradefed.testtype.GTest" >
-        <option name="native-test-device-path" value="/data/local/tmp" />
-        <option name="module-name" value="mtp_ffs_handle_test" />
-    </test>
-</configuration>
\ No newline at end of file
diff --git a/media/mtp/tests/MtpFfsHandleTest/Android.bp b/media/mtp/tests/MtpFfsHandleTest/Android.bp
new file mode 100644
index 0000000..ec9c7a4
--- /dev/null
+++ b/media/mtp/tests/MtpFfsHandleTest/Android.bp
@@ -0,0 +1,40 @@
+//
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_mtp_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_mtp_license"],
+}
+
+cc_test {
+    name: "mtp_ffs_handle_test",
+    test_suites: ["device-tests"],
+    srcs: ["MtpFfsHandle_test.cpp"],
+    shared_libs: [
+        "libbase",
+        "libmtp",
+        "liblog",
+    ],
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+    ],
+}
diff --git a/media/mtp/tests/MtpFfsHandleTest/AndroidTest.xml b/media/mtp/tests/MtpFfsHandleTest/AndroidTest.xml
new file mode 100644
index 0000000..38bab27
--- /dev/null
+++ b/media/mtp/tests/MtpFfsHandleTest/AndroidTest.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Config for mtp_ffs_handle_test">
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="mtp_ffs_handle_test->/data/local/tmp/mtp_ffs_handle_test" />
+    </target_preparer>
+    <option name="test-suite-tag" value="apct" />
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="mtp_ffs_handle_test" />
+    </test>
+</configuration>
\ No newline at end of file
diff --git a/media/mtp/tests/MtpFfsHandle_test.cpp b/media/mtp/tests/MtpFfsHandleTest/MtpFfsHandle_test.cpp
similarity index 100%
rename from media/mtp/tests/MtpFfsHandle_test.cpp
rename to media/mtp/tests/MtpFfsHandleTest/MtpFfsHandle_test.cpp
diff --git a/media/mtp/tests/MtpFuzzer/Android.bp b/media/mtp/tests/MtpFuzzer/Android.bp
new file mode 100644
index 0000000..5365f4b
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/Android.bp
@@ -0,0 +1,40 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_mtp_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_mtp_license"],
+}
+
+cc_fuzz {
+    name: "mtp_fuzzer",
+    srcs: [
+        "mtp_fuzzer.cpp",
+        "MtpMockDatabase.cpp",
+    ],
+    shared_libs: [
+	"libmtp",
+	"libbase",
+	"liblog",
+	"libutils",
+    ],
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+        "-DMTP_DEVICE",
+        "-Wno-unused-parameter",
+    ],
+    dictionary: "mtp_fuzzer.dict",
+    corpus: ["corpus/*"],
+
+    fuzz_config: {
+
+        cc: ["jameswei@google.com"],
+        componentid: 1344,
+        acknowledgement: [
+            "Grant Hernandez of Google",
+        ],
+    },
+}
diff --git a/media/mtp/tests/MtpFuzzer/MtpMockDatabase.cpp b/media/mtp/tests/MtpFuzzer/MtpMockDatabase.cpp
new file mode 100644
index 0000000..8aafe33
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/MtpMockDatabase.cpp
@@ -0,0 +1,320 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdlib.h>
+#include <string.h>
+#include <sys/stat.h>
+
+#include <string>
+
+#define LOG_TAG "MtpFuzzer"
+
+#include <log/log.h>
+
+#include "MtpDebug.h"
+#include "MtpMockDatabase.h"
+#include "MtpObjectInfo.h"
+
+namespace android {
+
+MtpMockDatabase::MtpMockDatabase() : mLastObjectHandle(0) {}
+
+MtpMockDatabase::~MtpMockDatabase() {
+    for (MtpObjectInfo* i : mObjects) {
+        delete i;
+    }
+    mObjects.clear();
+}
+
+void MtpMockDatabase::addObject(MtpObjectInfo* info) {
+    assert(hasStorage(info->storageID));
+
+    // we take ownership
+    mObjects.push_back(info);
+
+    return;
+}
+
+MtpObjectHandle MtpMockDatabase::allocateObjectHandle() {
+    // this is in sync with our mObjects database
+    return mLastObjectHandle++;
+}
+
+// Called from SendObjectInfo to reserve a database entry for the incoming file.
+MtpObjectHandle MtpMockDatabase::beginSendObject(const char* path, MtpObjectFormat format,
+                                                 MtpObjectHandle parent, MtpStorageID storage) {
+    if (!hasStorage(storage)) {
+        ALOGW("%s: Tried to lookup storageID %u, but doesn't exist\n", __func__, storage);
+        return kInvalidObjectHandle;
+    }
+
+    ALOGD("MockDatabase %s: path=%s oformat=0x%04x parent_handle=%u "
+          "storage_id=%u\n",
+          __func__, path, format, parent, storage);
+
+    return mLastObjectHandle;
+}
+
+// Called to report success or failure of the SendObject file transfer.
+void MtpMockDatabase::endSendObject(MtpObjectHandle handle, bool succeeded) {
+    ALOGD("MockDatabase %s: ohandle=%u succeeded=%d\n", __func__, handle, succeeded);
+}
+
+// Called to rescan a file, such as after an edit.
+void MtpMockDatabase::rescanFile(const char* path, MtpObjectHandle handle, MtpObjectFormat format) {
+    ALOGD("MockDatabase %s: path=%s ohandle=%u, oformat=0x%04x\n", __func__, path, handle, format);
+}
+
+MtpObjectHandleList* MtpMockDatabase::getObjectList(MtpStorageID storageID, MtpObjectFormat format,
+                                                    MtpObjectHandle parent) {
+    ALOGD("MockDatabase %s: storage_id=%u oformat=0x%04x ohandle=%u\n", __func__, storageID, format,
+          parent);
+    return nullptr;
+}
+
+int MtpMockDatabase::getNumObjects(MtpStorageID storageID, MtpObjectFormat format,
+                                   MtpObjectHandle parent) {
+    ALOGD("MockDatabase %s: storage_id=%u oformat=0x%04x ohandle=%u\n", __func__, storageID, format,
+          parent);
+    // TODO: return MTP_RESPONSE_OK when it stops segfaulting
+    return 0;
+}
+
+// callee should delete[] the results from these
+// results can be NULL
+MtpObjectFormatList* MtpMockDatabase::getSupportedPlaybackFormats() {
+    ALOGD("MockDatabase %s\n", __func__);
+    return nullptr;
+}
+MtpObjectFormatList* MtpMockDatabase::getSupportedCaptureFormats() {
+    ALOGD("MockDatabase %s\n", __func__);
+    return nullptr;
+}
+MtpObjectPropertyList* MtpMockDatabase::getSupportedObjectProperties(MtpObjectFormat format) {
+    ALOGD("MockDatabase %s: oformat=0x%04x\n", __func__, format);
+    return nullptr;
+}
+MtpDevicePropertyList* MtpMockDatabase::getSupportedDeviceProperties() {
+    ALOGD("MockDatabase %s\n", __func__);
+    return nullptr;
+}
+
+MtpResponseCode MtpMockDatabase::getObjectPropertyValue(MtpObjectHandle handle,
+                                                        MtpObjectProperty property,
+                                                        MtpDataPacket& packet) {
+    ALOGD("MockDatabase %s: ohandle=%u property=%s\n", __func__, handle,
+          MtpDebug::getObjectPropCodeName(property));
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::setObjectPropertyValue(MtpObjectHandle handle,
+                                                        MtpObjectProperty property,
+                                                        MtpDataPacket& packet) {
+    ALOGD("MockDatabase %s: ohandle=%u property=%s\n", __func__, handle,
+          MtpDebug::getObjectPropCodeName(property));
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::getDevicePropertyValue(MtpDeviceProperty property,
+                                                        MtpDataPacket& packet) {
+    ALOGD("MockDatabase %s: property=%s\n", __func__, MtpDebug::getDevicePropCodeName(property));
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::setDevicePropertyValue(MtpDeviceProperty property,
+                                                        MtpDataPacket& packet) {
+    ALOGD("MockDatabase %s: property=%s\n", __func__, MtpDebug::getDevicePropCodeName(property));
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::resetDeviceProperty(MtpDeviceProperty property) {
+    ALOGD("MockDatabase %s: property=%s\n", __func__, MtpDebug::getDevicePropCodeName(property));
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::getObjectPropertyList(MtpObjectHandle handle, uint32_t format,
+                                                       uint32_t property, int groupCode, int depth,
+                                                       MtpDataPacket& packet) {
+    ALOGD("MockDatabase %s: ohandle=%u format=%s property=%s groupCode=%d "
+          "depth=%d\n",
+          __func__, handle, MtpDebug::getFormatCodeName(format),
+          MtpDebug::getObjectPropCodeName(property), groupCode, depth);
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpMockDatabase::getObjectInfo(MtpObjectHandle handle, MtpObjectInfo& info) {
+    ALOGD("MockDatabase %s: ohandle=%u\n", __func__, handle);
+
+    // used for the root
+    if (handle == kInvalidObjectHandle) {
+        return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+    } else {
+        if (mObjects.size() == 0) {
+            return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+        }
+
+        // this is used to let the fuzzer make progress, otherwise
+        // it has to brute-force a 32-bit handle
+        MtpObjectHandle reducedHandle = handle % mObjects.size();
+        MtpObjectInfo* obj = mObjects[reducedHandle];
+
+        // make a copy, but make sure to maintain ownership of string pointers
+        info = *obj;
+
+        // fixup the response handle
+        info.mHandle = handle;
+
+        if (obj->mName) info.mName = strdup(obj->mName);
+        if (obj->mKeywords) info.mKeywords = strdup(obj->mKeywords);
+
+        return MTP_RESPONSE_OK;
+    }
+}
+
+void* MtpMockDatabase::getThumbnail(MtpObjectHandle handle, size_t& outThumbSize) {
+    ALOGD("MockDatabase %s: ohandle=%u\n", __func__, handle);
+
+    size_t allocSize = handle % 0x1000;
+    void* data = calloc(allocSize, sizeof(uint8_t));
+    if (!data) {
+        return nullptr;
+    } else {
+        ALOGD("MockDatabase %s\n", __func__);
+        outThumbSize = allocSize;
+        return data;
+    }
+}
+
+MtpResponseCode MtpMockDatabase::getObjectFilePath(MtpObjectHandle handle,
+                                                   MtpStringBuffer& outFilePath,
+                                                   int64_t& outFileLength,
+                                                   MtpObjectFormat& outFormat) {
+    ALOGD("MockDatabase %s: ohandle=%u\n", __func__, handle);
+
+    if (mObjects.size() == 0) {
+        return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+    }
+
+    // this is used to let the fuzzer make progress, otherwise
+    // it has to brute-force a 32-bit handle
+    MtpObjectHandle reducedHandle = handle % mObjects.size();
+    MtpObjectInfo* obj = mObjects[reducedHandle];
+    MtpStorage* storage = mStorage[obj->mStorageID];
+
+    // walk up the tree to build a full path of the object
+    MtpObjectHandle currentHandle = reducedHandle;
+    std::string path = "";
+
+    while (currentHandle != MTP_PARENT_ROOT) {
+        MtpObjectInfo* next = mObjects[currentHandle];
+
+        // prepend the name
+        if (path == "")
+            path = std::string(next->mName);
+        else
+            path = std::string(next->mName) + "/" + path;
+
+        currentHandle = next->mParent;
+    }
+
+    outFilePath.set(storage->getPath());
+    outFilePath.append("/");
+    outFilePath.append(path.c_str());
+
+    outFormat = obj->mFormat;
+
+    ALOGD("MockDatabase %s: get file %s\n", __func__, (const char*)outFilePath);
+
+    struct stat sstat;
+    // this should not happen unless our database view of the filesystem is out of
+    // sync
+    if (stat((const char*)outFilePath, &sstat) < 0) {
+        ALOGE("MockDatabase %s: unable to stat %s\n", __func__, (const char*)outFilePath);
+
+        return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+    }
+
+    outFileLength = sstat.st_size;
+
+    return MTP_RESPONSE_OK;
+}
+
+int MtpMockDatabase::openFilePath(const char* path, bool transcode) {
+    ALOGD("MockDatabase %s: filePath=%s transcode=%d\n", __func__, path, transcode);
+    return 0;
+}
+
+MtpResponseCode MtpMockDatabase::beginDeleteObject(MtpObjectHandle handle) {
+    ALOGD("MockDatabase %s: ohandle=%u\n", __func__, handle);
+    return MTP_RESPONSE_OK;
+}
+void MtpMockDatabase::endDeleteObject(MtpObjectHandle handle, bool succeeded) {
+    ALOGD("MockDatabase %s: ohandle=%u succeeded=%d\n", __func__, handle, succeeded);
+    return;
+}
+
+MtpObjectHandleList* MtpMockDatabase::getObjectReferences(MtpObjectHandle handle) {
+    ALOGD("MockDatabase %s: ohandle=%u\n", __func__, handle);
+    return nullptr;
+}
+
+MtpResponseCode MtpMockDatabase::setObjectReferences(MtpObjectHandle handle,
+                                                     MtpObjectHandleList* references) {
+    ALOGD("MockDatabase %s: ohandle=%u\n", __func__, handle);
+    return MTP_RESPONSE_OK;
+}
+
+MtpProperty* MtpMockDatabase::getObjectPropertyDesc(MtpObjectProperty property,
+                                                    MtpObjectFormat format) {
+    ALOGD("MockDatabase %s: property=%s format=%s\n", __func__,
+          MtpDebug::getObjectPropCodeName(property), MtpDebug::getFormatCodeName(format));
+
+    return nullptr;
+}
+
+MtpProperty* MtpMockDatabase::getDevicePropertyDesc(MtpDeviceProperty property) {
+    ALOGD("MockDatabase %s: property=%s\n", __func__, MtpDebug::getDevicePropCodeName(property));
+    return nullptr;
+}
+
+MtpResponseCode MtpMockDatabase::beginMoveObject(MtpObjectHandle handle, MtpObjectHandle newParent,
+                                                 MtpStorageID newStorage) {
+    ALOGD("MockDatabase %s: ohandle=%u newParent=%u newStorage=%u\n", __func__, handle, newParent,
+          newStorage);
+    return MTP_RESPONSE_OK;
+}
+
+void MtpMockDatabase::endMoveObject(MtpObjectHandle oldParent, MtpObjectHandle newParent,
+                                    MtpStorageID oldStorage, MtpStorageID newStorage,
+                                    MtpObjectHandle handle, bool succeeded) {
+    ALOGD("MockDatabase %s: oldParent=%u newParent=%u oldStorage=%u newStorage=%u "
+          "ohandle=%u succeeded=%d\n",
+          __func__, oldParent, newParent, oldStorage, newStorage, handle, succeeded);
+    return;
+}
+
+MtpResponseCode MtpMockDatabase::beginCopyObject(MtpObjectHandle handle, MtpObjectHandle newParent,
+                                                 MtpStorageID newStorage) {
+    ALOGD("MockDatabase %s: ohandle=%u newParent=%u newStorage=%u\n", __func__, handle, newParent,
+          newStorage);
+    return MTP_RESPONSE_OK;
+}
+
+void MtpMockDatabase::endCopyObject(MtpObjectHandle handle, bool succeeded) {
+    ALOGD("MockDatabase %s: ohandle=%u succeeded=%d\n", __func__, handle, succeeded);
+}
+
+}; // namespace android
diff --git a/media/mtp/tests/MtpFuzzer/MtpMockDatabase.h b/media/mtp/tests/MtpFuzzer/MtpMockDatabase.h
new file mode 100644
index 0000000..e9e6a64
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/MtpMockDatabase.h
@@ -0,0 +1,120 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef _MTP_MOCK_DATABASE_H
+#define _MTP_MOCK_DATABASE_H
+
+#include <map>
+
+#include "IMtpDatabase.h"
+#include "MtpStorage.h"
+
+namespace android {
+
+class MtpMockDatabase : public IMtpDatabase {
+    std::map<MtpStorageID, MtpStorage*> mStorage;
+    std::vector<MtpObjectInfo*> mObjects;
+    uint32_t mLastObjectHandle;
+
+public:
+    MtpMockDatabase();
+    virtual ~MtpMockDatabase();
+
+    // MtpFuzzer methods
+    void addStorage(MtpStorage* storage) {
+        // we don't own this
+        mStorage[storage->getStorageID()] = storage;
+    }
+
+    bool hasStorage(MtpStorageID storage) { return mStorage.find(storage) != mStorage.end(); }
+
+    void addObject(MtpObjectInfo* info);
+    MtpObjectHandle allocateObjectHandle();
+
+    // libmtp interface methods
+    // Called from SendObjectInfo to reserve a database entry for the incoming
+    // file.
+    MtpObjectHandle beginSendObject(const char* path, MtpObjectFormat format,
+                                    MtpObjectHandle parent, MtpStorageID storage);
+
+    // Called to report success or failure of the SendObject file transfer.
+    void endSendObject(MtpObjectHandle handle, bool succeeded);
+
+    // Called to rescan a file, such as after an edit.
+    void rescanFile(const char* path, MtpObjectHandle handle, MtpObjectFormat format);
+
+    MtpObjectHandleList* getObjectList(MtpStorageID storageID, MtpObjectFormat format,
+                                       MtpObjectHandle parent);
+
+    int getNumObjects(MtpStorageID storageID, MtpObjectFormat format, MtpObjectHandle parent);
+
+    // callee should delete[] the results from these
+    // results can be NULL
+    MtpObjectFormatList* getSupportedPlaybackFormats();
+    MtpObjectFormatList* getSupportedCaptureFormats();
+    MtpObjectPropertyList* getSupportedObjectProperties(MtpObjectFormat format);
+    MtpDevicePropertyList* getSupportedDeviceProperties();
+
+    MtpResponseCode getObjectPropertyValue(MtpObjectHandle handle, MtpObjectProperty property,
+                                           MtpDataPacket& packet);
+
+    MtpResponseCode setObjectPropertyValue(MtpObjectHandle handle, MtpObjectProperty property,
+                                           MtpDataPacket& packet);
+
+    MtpResponseCode getDevicePropertyValue(MtpDeviceProperty property, MtpDataPacket& packet);
+
+    MtpResponseCode setDevicePropertyValue(MtpDeviceProperty property, MtpDataPacket& packet);
+
+    MtpResponseCode resetDeviceProperty(MtpDeviceProperty property);
+
+    MtpResponseCode getObjectPropertyList(MtpObjectHandle handle, uint32_t format,
+                                          uint32_t property, int groupCode, int depth,
+                                          MtpDataPacket& packet);
+
+    MtpResponseCode getObjectInfo(MtpObjectHandle handle, MtpObjectInfo& info);
+
+    void* getThumbnail(MtpObjectHandle handle, size_t& outThumbSize);
+
+    MtpResponseCode getObjectFilePath(MtpObjectHandle handle, MtpStringBuffer& outFilePath,
+                                      int64_t& outFileLength, MtpObjectFormat& outFormat);
+
+    int openFilePath(const char* path, bool transcode);
+
+    MtpResponseCode beginDeleteObject(MtpObjectHandle handle);
+    void endDeleteObject(MtpObjectHandle handle, bool succeeded);
+
+    MtpObjectHandleList* getObjectReferences(MtpObjectHandle handle);
+
+    MtpResponseCode setObjectReferences(MtpObjectHandle handle, MtpObjectHandleList* references);
+
+    MtpProperty* getObjectPropertyDesc(MtpObjectProperty property, MtpObjectFormat format);
+
+    MtpProperty* getDevicePropertyDesc(MtpDeviceProperty property);
+
+    MtpResponseCode beginMoveObject(MtpObjectHandle handle, MtpObjectHandle newParent,
+                                    MtpStorageID newStorage);
+
+    void endMoveObject(MtpObjectHandle oldParent, MtpObjectHandle newParent,
+                       MtpStorageID oldStorage, MtpStorageID newStorage, MtpObjectHandle handle,
+                       bool succeeded);
+
+    MtpResponseCode beginCopyObject(MtpObjectHandle handle, MtpObjectHandle newParent,
+                                    MtpStorageID newStorage);
+    void endCopyObject(MtpObjectHandle handle, bool succeeded);
+};
+
+}; // namespace android
+
+#endif // _MTP_MOCK_DATABASE_H
diff --git a/media/mtp/tests/MtpFuzzer/MtpMockHandle.h b/media/mtp/tests/MtpFuzzer/MtpMockHandle.h
new file mode 100644
index 0000000..111485c
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/MtpMockHandle.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_MOCK_HANDLE_H
+#define _MTP_MOCK_HANDLE_H
+
+#include <vector>
+
+typedef std::vector<uint8_t> packet_t;
+
+namespace android {
+class MtpMockHandle : public IMtpHandle {
+private:
+    size_t mPacketNumber;
+    size_t mPacketOffset;
+    std::vector<packet_t> mPackets;
+
+public:
+    MtpMockHandle() : mPacketNumber(0), mPacketOffset(0) {}
+
+    void add_packet(packet_t pkt) { mPackets.push_back(pkt); }
+
+    // Return number of bytes read/written, or -1 and errno is set
+    int read(void *data, size_t len) {
+        if (mPacketNumber >= mPackets.size()) {
+            return 0;
+        } else {
+            int readAmt = 0;
+            packet_t pkt = mPackets[mPacketNumber];
+
+            ALOGD("%s: sz %zu, pkt %zu+%zu/%zu\n", __func__, len, mPacketNumber, mPacketOffset,
+                  pkt.size());
+
+            // packet is bigger than what the caller can handle,
+            if (pkt.size() > len) {
+                memcpy(data, pkt.data() + mPacketOffset, len);
+
+                mPacketOffset += len;
+                readAmt = len;
+                // packet is equal or smaller than the caller buffer
+            } else {
+                memcpy(data, pkt.data() + mPacketOffset, pkt.size());
+
+                mPacketNumber++;
+                mPacketOffset = 0;
+                readAmt = pkt.size();
+            }
+
+            return readAmt;
+        }
+    }
+    int write(const void *data, size_t len) {
+        ALOGD("MockHandle %s: len=%zu\n", __func__, len);
+        // fake the write
+        return len;
+    }
+
+    // Return 0 if send/receive is successful, or -1 and errno is set
+    int receiveFile(mtp_file_range mfr, bool zero_packet) {
+        ALOGD("MockHandle %s\n", __func__);
+        return 0;
+    }
+    int sendFile(mtp_file_range mfr) {
+        ALOGD("MockHandle %s\n", __func__);
+        return 0;
+    }
+    int sendEvent(mtp_event me) {
+        ALOGD("MockHandle %s: len=%zu\n", __func__, me.length);
+        return 0;
+    }
+
+    // Return 0 if operation is successful, or -1 else
+    int start(bool ptp) { return 0; }
+
+    void close() {}
+
+    virtual ~MtpMockHandle() {}
+};
+}; // namespace android
+
+#endif // _MTP_MOCK_HANDLE_H
diff --git a/media/mtp/tests/MtpFuzzer/corpus/1-mtp-open_session.pkt b/media/mtp/tests/MtpFuzzer/corpus/1-mtp-open_session.pkt
new file mode 100644
index 0000000..38f8ed2
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/corpus/1-mtp-open_session.pkt
Binary files differ
diff --git a/media/mtp/tests/MtpFuzzer/corpus/2-mtp-get_device_info.pkt b/media/mtp/tests/MtpFuzzer/corpus/2-mtp-get_device_info.pkt
new file mode 100644
index 0000000..7759380
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/corpus/2-mtp-get_device_info.pkt
Binary files differ
diff --git a/media/mtp/tests/MtpFuzzer/corpus/3-mtp-get_object_handles.pkt b/media/mtp/tests/MtpFuzzer/corpus/3-mtp-get_object_handles.pkt
new file mode 100644
index 0000000..e88410f
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/corpus/3-mtp-get_object_handles.pkt
Binary files differ
diff --git a/media/mtp/tests/MtpFuzzer/corpus/4-mtp-get_object_info.pkt b/media/mtp/tests/MtpFuzzer/corpus/4-mtp-get_object_info.pkt
new file mode 100644
index 0000000..e283fb4
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/corpus/4-mtp-get_object_info.pkt
Binary files differ
diff --git a/media/mtp/tests/MtpFuzzer/corpus/5-mtp-send_object_info.pkt b/media/mtp/tests/MtpFuzzer/corpus/5-mtp-send_object_info.pkt
new file mode 100644
index 0000000..7627f88
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/corpus/5-mtp-send_object_info.pkt
Binary files differ
diff --git a/media/mtp/tests/MtpFuzzer/mtp_fuzzer.cpp b/media/mtp/tests/MtpFuzzer/mtp_fuzzer.cpp
new file mode 100644
index 0000000..f578462
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/mtp_fuzzer.cpp
@@ -0,0 +1,165 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/unique_fd.h>
+#include <stddef.h>
+#include <stdint.h>
+#include <stdlib.h>
+#include <unistd.h>
+
+#include <string>
+
+#define LOG_TAG "MtpFuzzer"
+
+#include "IMtpHandle.h"
+#include "MtpMockDatabase.h"
+#include "MtpMockHandle.h"
+#include "MtpObjectInfo.h"
+#include "MtpServer.h"
+#include "MtpStorage.h"
+#include "MtpUtils.h"
+
+const char* storage_desc = "Fuzz Storage";
+// prefer tmpfs for file operations to avoid wearing out flash
+const char* storage_path = "/storage/fuzzer/0";
+const char* source_database = "srcdb/";
+
+namespace android {
+class MtpMockServer {
+public:
+    std::unique_ptr<MtpMockHandle> mHandle;
+    std::unique_ptr<MtpStorage> mStorage;
+    std::unique_ptr<MtpMockDatabase> mDatabase;
+    std::unique_ptr<MtpServer> mMtp;
+    int mStorageId;
+
+    MtpMockServer(const char* storage_path) : mStorageId(0) {
+        bool ptp = false;
+        const char* manu = "Google";
+        const char* model = "Pixel 3XL";
+        const char* version = "1.0";
+        const char* serial = "ABDEF1231";
+
+        // This is unused in our harness
+        int controlFd = -1;
+
+        mHandle = std::make_unique<MtpMockHandle>();
+        mStorage = std::make_unique<MtpStorage>(mStorageId, storage_path, storage_desc, true,
+                                                0x200000000L);
+        mDatabase = std::make_unique<MtpMockDatabase>();
+        mDatabase->addStorage(mStorage.get());
+
+        mMtp = std::make_unique<MtpServer>(mDatabase.get(), controlFd, ptp, manu, model, version,
+                                           serial);
+        mMtp->addStorage(mStorage.get());
+
+        // clear the old handle first, so we don't leak memory
+        delete mMtp->mHandle;
+        mMtp->mHandle = mHandle.get();
+    }
+
+    void run() { mMtp->run(); }
+
+    int createDatabaseFromSourceDir(const char* fromPath, const char* toPath,
+                                    MtpObjectHandle parentHandle) {
+        int ret = 0;
+        std::string fromPathStr(fromPath);
+        std::string toPathStr(toPath);
+
+        DIR* dir = opendir(fromPath);
+        if (!dir) {
+            ALOGE("opendir %s failed", fromPath);
+            return -1;
+        }
+        if (fromPathStr[fromPathStr.size() - 1] != '/') fromPathStr += '/';
+        if (toPathStr[toPathStr.size() - 1] != '/') toPathStr += '/';
+
+        struct dirent* entry;
+        while ((entry = readdir(dir))) {
+            const char* name = entry->d_name;
+
+            // ignore "." and ".."
+            if (name[0] == '.' && (name[1] == 0 || (name[1] == '.' && name[2] == 0))) {
+                continue;
+            }
+
+            std::string oldFile = fromPathStr + name;
+            std::string newFile = toPathStr + name;
+
+            if (entry->d_type == DT_DIR) {
+                ret += makeFolder(newFile.c_str());
+
+                MtpObjectInfo* objectInfo = new MtpObjectInfo(mDatabase->allocateObjectHandle());
+                objectInfo->mStorageID = mStorage->getStorageID();
+                objectInfo->mParent = parentHandle;
+                objectInfo->mFormat = MTP_FORMAT_ASSOCIATION; // folder
+                objectInfo->mName = strdup(name);
+                objectInfo->mKeywords = strdup("");
+
+                mDatabase->addObject(objectInfo);
+
+                ret += createDatabaseFromSourceDir(oldFile.c_str(), newFile.c_str(),
+                                                   objectInfo->mHandle);
+            } else {
+                ret += copyFile(oldFile.c_str(), newFile.c_str());
+
+                MtpObjectInfo* objectInfo = new MtpObjectInfo(mDatabase->allocateObjectHandle());
+                objectInfo->mStorageID = mStorage->getStorageID();
+                objectInfo->mParent = parentHandle;
+                objectInfo->mFormat = MTP_FORMAT_TEXT;
+                objectInfo->mName = strdup(name);
+                objectInfo->mKeywords = strdup("");
+
+                mDatabase->addObject(objectInfo);
+            }
+        }
+
+        closedir(dir);
+        return ret;
+    }
+};
+}; // namespace android
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) __attribute__((optnone)) {
+    // reset our storage (from MtpUtils.h)
+    android::deletePath(storage_path);
+    android::makeFolder("/storage/fuzzer");
+    android::makeFolder(storage_path);
+
+    std::unique_ptr<android::MtpMockServer> mtp =
+            std::make_unique<android::MtpMockServer>(storage_path);
+
+    size_t off = 0;
+
+    // Packetize the input stream
+    for (size_t i = 0; i < size; i++) {
+        // A longer delimiter could be used, but this worked in practice
+        if (data[i] == '@') {
+            size_t pktsz = i - off;
+            if (pktsz > 0) {
+                packet_t pkt = packet_t((unsigned char*)data + off, (unsigned char*)data + i);
+                // insert into packet buffer
+                mtp->mHandle->add_packet(pkt);
+                off = i;
+            }
+        }
+    }
+
+    mtp->createDatabaseFromSourceDir(source_database, storage_path, MTP_PARENT_ROOT);
+    mtp->run();
+
+    return 0;
+}
diff --git a/media/mtp/tests/MtpFuzzer/mtp_fuzzer.dict b/media/mtp/tests/MtpFuzzer/mtp_fuzzer.dict
new file mode 100644
index 0000000..4c3f136
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/mtp_fuzzer.dict
@@ -0,0 +1,74 @@
+mtp_operation_get_device_info="\x01\x10"
+mtp_operation_open_session="\x02\x10"
+mtp_operation_close_session="\x03\x10"
+mtp_operation_get_storage_ids="\x04\x10"
+mtp_operation_get_storage_info="\x05\x10"
+mtp_operation_get_num_objects="\x06\x10"
+mtp_operation_get_object_handles="\x07\x10"
+mtp_operation_get_object_info="\x08\x10"
+mtp_operation_get_object="\x09\x10"
+mtp_operation_get_thumb="\x0A\x10"
+mtp_operation_delete_object="\x0B\x10"
+mtp_operation_send_object_info="\x0C\x10"
+mtp_operation_send_object="\x0D\x10"
+mtp_operation_initiate_capture="\x0E\x10"
+mtp_operation_format_store="\x0F\x10"
+mtp_operation_reset_device="\x10\x10"
+mtp_operation_self_test="\x11\x10"
+mtp_operation_set_object_protection="\x12\x10"
+mtp_operation_power_down="\x13\x10"
+mtp_operation_get_device_prop_desc="\x14\x10"
+mtp_operation_get_device_prop_value="\x15\x10"
+mtp_operation_set_device_prop_value="\x16\x10"
+mtp_operation_reset_device_prop_value="\x17\x10"
+mtp_operation_terminate_open_capture="\x18\x10"
+mtp_operation_move_object="\x19\x10"
+mtp_operation_copy_object="\x1A\x10"
+mtp_operation_get_partial_object="\x1B\x10"
+mtp_operation_initiate_open_capture="\x1C\x10"
+mtp_operation_get_object_props_supported="\x01\x98"
+mtp_operation_get_object_prop_desc="\x02\x98"
+mtp_operation_get_object_prop_value="\x03\x98"
+mtp_operation_set_object_prop_value="\x04\x98"
+mtp_operation_get_object_prop_list="\x05\x98"
+mtp_operation_set_object_prop_list="\x06\x98"
+mtp_operation_get_interdependent_prop_desc="\x07\x98"
+mtp_operation_send_object_prop_list="\x08\x98"
+mtp_operation_get_object_references="\x10\x98"
+mtp_operation_set_object_references="\x11\x98"
+mtp_operation_skip="\x20\x98"
+mtp_operation_get_partial_object_64="\xC1\x95"
+mtp_operation_send_partial_object="\xC2\x95"
+mtp_operation_truncate_object="\xC3\x95"
+mtp_operation_begin_edit_object="\xC4\x95"
+mtp_operation_end_edit_object="\xC5\x95"
+
+# Association (for example, a folder)
+mtp_format_association="\x01\x30"
+
+# types
+mtp_type_undefined="\x00\x00"
+mtp_type_int8="\x01\x00"
+mtp_type_uint8="\x02\x00"
+mtp_type_int16="\x03\x00"
+mtp_type_uint16="\x04\x00"
+mtp_type_int32="\x05\x00"
+mtp_type_uint32="\x06\x00"
+mtp_type_int64="\x07\x00"
+mtp_type_uint64="\x08\x00"
+mtp_type_int128="\x09\x00"
+mtp_type_uint128="\x0A\x00"
+mtp_type_aint8="\x01\x40"
+mtp_type_auint8="\x02\x40"
+mtp_type_aint16="\x03\x40"
+mtp_type_auint16="\x04\x40"
+mtp_type_aint32="\x05\x40"
+mtp_type_auint32="\x06\x40"
+mtp_type_aint64="\x07\x40"
+mtp_type_auint64="\x08\x40"
+mtp_type_aint128="\x09\x40"
+mtp_type_auint128="\x0A\x40"
+mtp_type_str="\xFF\xFF"
+
+# also used for max size (>4GB)
+mtp_parent_root="\xFF\xFF\xFF\xFF"
diff --git a/media/mtp/tests/PosixAsyncIOTest/Android.bp b/media/mtp/tests/PosixAsyncIOTest/Android.bp
new file mode 100644
index 0000000..09cf6b7
--- /dev/null
+++ b/media/mtp/tests/PosixAsyncIOTest/Android.bp
@@ -0,0 +1,40 @@
+//
+// Copyright (C) 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_mtp_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_mtp_license"],
+}
+
+cc_test {
+    name: "posix_async_io_test",
+    test_suites: ["device-tests"],
+    srcs: ["PosixAsyncIO_test.cpp"],
+    shared_libs: [
+        "libbase",
+        "libmtp",
+        "liblog",
+    ],
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+    ],
+}
diff --git a/media/mtp/tests/PosixAsyncIOTest/AndroidTest.xml b/media/mtp/tests/PosixAsyncIOTest/AndroidTest.xml
new file mode 100644
index 0000000..cbb10fb
--- /dev/null
+++ b/media/mtp/tests/PosixAsyncIOTest/AndroidTest.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Config for posix_async_io_test">
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="posix_async_io_test->/data/local/tmp/posix_async_io_test" />
+    </target_preparer>
+    <option name="test-suite-tag" value="apct" />
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="posix_async_io_test" />
+    </test>
+</configuration>
\ No newline at end of file
diff --git a/media/mtp/tests/PosixAsyncIO_test.cpp b/media/mtp/tests/PosixAsyncIOTest/PosixAsyncIO_test.cpp
similarity index 100%
rename from media/mtp/tests/PosixAsyncIO_test.cpp
rename to media/mtp/tests/PosixAsyncIOTest/PosixAsyncIO_test.cpp
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 43989bb..8d527e9 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -16,6 +16,37 @@
 // to refer to headers in parent directories and the headers live in
 // frameworks/av/include.
 
+package {
+    default_applicable_licenses: ["frameworks_av_media_ndk_license"],
+}
+
+// Added automatically by a large-scale-change that took the approach of
+// 'apply every license found to every target'. While this makes sure we respect
+// every license restriction, it may not be entirely correct.
+//
+// e.g. GPL in an MIT project might only apply to the contrib/ directory.
+//
+// Please consider splitting the single license below into multiple licenses,
+// taking care not to lose any license_kind information, and overriding the
+// default license using the 'licenses: [...]' property on targets as needed.
+//
+// For unused files, consider creating a 'fileGroup' with "//visibility:private"
+// to attach the license to, and including a comment whether the files may be
+// used in the current project.
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_media_ndk_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+        "SPDX-license-identifier-MIT",
+        "SPDX-license-identifier-Unicode-DFS",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 ndk_library {
     name: "libmediandk",
     symbol_file: "libmediandk.map.txt",
@@ -43,11 +74,20 @@
         "com.android.media.swcodec",
     ],
     min_sdk_version: "29",
-    export_include_dirs: ["include"]
+    export_include_dirs: ["include"],
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 cc_library_shared {
     name: "libmediandk",
+    llndk: {
+        symbol_file: "libmediandk.map.txt",
+    },
 
     srcs: [
         "NdkJavaVMHelper.cpp",
@@ -76,9 +116,11 @@
 
     static_libs: [
         "libgrallocusage",
+        "libnativehelper_lazy",
     ],
 
     header_libs: [
+        "jni_headers",
         "libmediadrm_headers",
         "libmediametrics_headers",
     ],
@@ -103,10 +145,14 @@
         "libgui",
         "libui",
         "libmediandk_utils",
-        "libnativehelper",
     ],
 
-    export_include_dirs: ["include"],
+    export_header_lib_headers: ["jni_headers"],
+
+    export_include_dirs: [
+        "include",
+        "include_platform",
+    ],
 
     export_shared_lib_headers: [
         "libgui",
@@ -124,14 +170,6 @@
     },
 }
 
-llndk_library {
-    name: "libmediandk",
-    symbol_file: "libmediandk.map.txt",
-    export_include_dirs: [
-        "include",
-    ],
-}
-
 cc_library {
     name: "libmediandk_utils",
 
@@ -172,6 +210,7 @@
 
 cc_test {
     name: "AImageReaderWindowHandleTest",
+    test_suites: ["device-tests"],
     srcs: ["tests/AImageReaderWindowHandleTest.cpp"],
     shared_libs: [
         "libbinder",
@@ -195,3 +234,45 @@
         "frameworks/av/media/ndk/",
     ],
 }
+
+cc_library_static {
+    name: "libmediandk_format",
+
+    host_supported: true,
+
+    srcs: [
+        "NdkMediaFormat.cpp",
+    ],
+
+    header_libs: [
+        "libstagefright_foundation_headers",
+    ],
+
+    cflags: [
+        "-DEXPORT=__attribute__((visibility(\"default\")))",
+        "-Werror",
+        "-Wall",
+    ],
+
+    export_include_dirs: ["include"],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
+}
diff --git a/media/ndk/NdkImage.cpp b/media/ndk/NdkImage.cpp
index 1145b7b..5fdd45a 100644
--- a/media/ndk/NdkImage.cpp
+++ b/media/ndk/NdkImage.cpp
@@ -58,7 +58,7 @@
     if (mIsClosed) {
         return;
     }
-    if (!mReader->mIsClosed) {
+    if (mReader->mIsOpen) {
         mReader->releaseImageLocked(this, releaseFenceFd);
     }
     // Should have been set to nullptr in releaseImageLocked
diff --git a/media/ndk/NdkImagePriv.h b/media/ndk/NdkImagePriv.h
index 0e8cbcb..05115b9 100644
--- a/media/ndk/NdkImagePriv.h
+++ b/media/ndk/NdkImagePriv.h
@@ -30,6 +30,26 @@
 
 using namespace android;
 
+// Formats not listed in the public API, but still available to AImageReader
+enum AIMAGE_PRIVATE_FORMATS {
+    /**
+     * Unprocessed implementation-dependent raw
+     * depth measurements, opaque with 16 bit
+     * samples.
+     *
+     */
+
+    AIMAGE_FORMAT_RAW_DEPTH = 0x1002,
+
+    /**
+     * Device specific 10 bits depth RAW image format.
+     *
+     * <p>Unprocessed implementation-dependent raw depth measurements, opaque with 10 bit samples
+     * and device specific bit layout.</p>
+     */
+    AIMAGE_FORMAT_RAW_DEPTH10 = 0x1003,
+};
+
 // TODO: this only supports ImageReader
 struct AImage {
     AImage(AImageReader* reader, int32_t format, uint64_t usage, BufferItem* buffer,
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index c0ceb3d..1067e24 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -21,7 +21,6 @@
 
 #include "NdkImagePriv.h"
 #include "NdkImageReaderPriv.h"
-#include <private/media/NdkImage.h>
 
 #include <cutils/atomic.h>
 #include <utils/Log.h>
@@ -73,6 +72,7 @@
         case AIMAGE_FORMAT_Y8:
         case AIMAGE_FORMAT_HEIC:
         case AIMAGE_FORMAT_DEPTH_JPEG:
+        case AIMAGE_FORMAT_RAW_DEPTH10:
             return true;
         case AIMAGE_FORMAT_PRIVATE:
             // For private format, cpu usage is prohibited.
@@ -103,6 +103,7 @@
         case AIMAGE_FORMAT_Y8:
         case AIMAGE_FORMAT_HEIC:
         case AIMAGE_FORMAT_DEPTH_JPEG:
+        case AIMAGE_FORMAT_RAW_DEPTH10:
             return 1;
         case AIMAGE_FORMAT_PRIVATE:
             return 0;
@@ -274,7 +275,7 @@
 
 AImageReader::~AImageReader() {
     Mutex::Autolock _l(mLock);
-    LOG_FATAL_IF("AImageReader not closed before destruction", mIsClosed != true);
+    LOG_FATAL_IF(mIsOpen, "AImageReader not closed before destruction");
 }
 
 media_status_t
@@ -348,16 +349,16 @@
     }
     mHandler = new CallbackHandler(this);
     mCbLooper->registerHandler(mHandler);
-
+    mIsOpen = true;
     return AMEDIA_OK;
 }
 
 void AImageReader::close() {
     Mutex::Autolock _l(mLock);
-    if (mIsClosed) {
+    if (!mIsOpen) {
         return;
     }
-    mIsClosed = true;
+    mIsOpen = false;
     AImageReader_ImageListener nullListener = {nullptr, nullptr};
     setImageListenerLocked(&nullListener);
 
diff --git a/media/ndk/NdkImageReaderPriv.h b/media/ndk/NdkImageReaderPriv.h
index 0779a71..37c606e 100644
--- a/media/ndk/NdkImageReaderPriv.h
+++ b/media/ndk/NdkImageReaderPriv.h
@@ -166,7 +166,7 @@
     native_handle_t*           mWindowHandle = nullptr;
 
     List<AImage*>              mAcquiredImages;
-    bool                       mIsClosed = false;
+    bool                       mIsOpen = false;
 
     Mutex                      mLock;
 };
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index af21a99..1ae2b44 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -19,7 +19,7 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "NdkMediaCodec"
 
-#include <media/NdkMediaCodec.h>
+#include <media/NdkMediaCodecPlatform.h>
 #include <media/NdkMediaError.h>
 #include <media/NdkMediaFormatPriv.h>
 #include "NdkMediaCryptoPriv.h"
@@ -45,6 +45,10 @@
         return AMEDIA_OK;
     } else if (err == -EAGAIN) {
         return (media_status_t) AMEDIACODEC_INFO_TRY_AGAIN_LATER;
+    } else if (err == NO_MEMORY) {
+        return AMEDIACODEC_ERROR_INSUFFICIENT_RESOURCE;
+    } else if (err == DEAD_OBJECT) {
+        return AMEDIACODEC_ERROR_RECLAIMED;
     }
     ALOGE("sf error code: %d", err);
     return AMEDIA_ERROR_UNKNOWN;
@@ -255,7 +259,7 @@
                          break;
                      }
                      msg->findString("detail", &detail);
-                     ALOGE("Decoder reported error(0x%x), actionCode(%d), detail(%s)",
+                     ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
                            err, actionCode, detail.c_str());
 
                      Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
@@ -308,7 +312,11 @@
 
 extern "C" {
 
-static AMediaCodec * createAMediaCodec(const char *name, bool name_is_type, bool encoder) {
+static AMediaCodec * createAMediaCodec(const char *name,
+                                       bool name_is_type,
+                                       bool encoder,
+                                       pid_t pid = android::MediaCodec::kNoPid,
+                                       uid_t uid = android::MediaCodec::kNoUid) {
     AMediaCodec *mData = new AMediaCodec();
     mData->mLooper = new ALooper;
     mData->mLooper->setName("NDK MediaCodec_looper");
@@ -322,9 +330,20 @@
         return NULL;
     }
     if (name_is_type) {
-        mData->mCodec = android::MediaCodec::CreateByType(mData->mLooper, name, encoder);
+        mData->mCodec = android::MediaCodec::CreateByType(
+                mData->mLooper,
+                name,
+                encoder,
+                nullptr /* err */,
+                pid,
+                uid);
     } else {
-        mData->mCodec = android::MediaCodec::CreateByComponentName(mData->mLooper, name);
+        mData->mCodec = android::MediaCodec::CreateByComponentName(
+                mData->mLooper,
+                name,
+                nullptr /* err */,
+                pid,
+                uid);
     }
     if (mData->mCodec == NULL) {  // failed to create codec
         AMediaCodec_delete(mData);
@@ -344,17 +363,38 @@
 
 EXPORT
 AMediaCodec* AMediaCodec_createCodecByName(const char *name) {
-    return createAMediaCodec(name, false, false);
+    return createAMediaCodec(name, false /* name_is_type */, false /* encoder */);
 }
 
 EXPORT
 AMediaCodec* AMediaCodec_createDecoderByType(const char *mime_type) {
-    return createAMediaCodec(mime_type, true, false);
+    return createAMediaCodec(mime_type, true /* name_is_type */, false /* encoder */);
 }
 
 EXPORT
 AMediaCodec* AMediaCodec_createEncoderByType(const char *name) {
-    return createAMediaCodec(name, true, true);
+    return createAMediaCodec(name, true /* name_is_type */, true /* encoder */);
+}
+
+EXPORT
+AMediaCodec* AMediaCodec_createCodecByNameForClient(const char *name,
+                                                    pid_t pid,
+                                                    uid_t uid) {
+    return createAMediaCodec(name, false /* name_is_type */, false /* encoder */, pid, uid);
+}
+
+EXPORT
+AMediaCodec* AMediaCodec_createDecoderByTypeForClient(const char *mime_type,
+                                                      pid_t pid,
+                                                      uid_t uid) {
+    return createAMediaCodec(mime_type, true /* name_is_type */, false /* encoder */, pid, uid);
+}
+
+EXPORT
+AMediaCodec* AMediaCodec_createEncoderByTypeForClient(const char *name,
+                                                      pid_t pid,
+                                                      uid_t uid) {
+    return createAMediaCodec(name, true /* name_is_type */, true /* encoder */, pid, uid);
 }
 
 EXPORT
@@ -914,5 +954,13 @@
     return AMEDIA_OK;
 }
 
+EXPORT const char* AMEDIACODEC_KEY_HDR10_PLUS_INFO = AMEDIAFORMAT_KEY_HDR10_PLUS_INFO;
+EXPORT const char* AMEDIACODEC_KEY_LOW_LATENCY = AMEDIAFORMAT_KEY_LOW_LATENCY;
+EXPORT const char* AMEDIACODEC_KEY_OFFSET_TIME = "time-offset-us";
+EXPORT const char* AMEDIACODEC_KEY_REQUEST_SYNC_FRAME = "request-sync";
+EXPORT const char* AMEDIACODEC_KEY_SUSPEND = "drop-input-frames";
+EXPORT const char* AMEDIACODEC_KEY_SUSPEND_TIME = "drop-start-time-us";
+EXPORT const char* AMEDIACODEC_KEY_VIDEO_BITRATE = "video-bitrate";
+
 } // extern "C"
 
diff --git a/media/ndk/NdkMediaDrm.cpp b/media/ndk/NdkMediaDrm.cpp
index 3af9771..6e9945d 100644
--- a/media/ndk/NdkMediaDrm.cpp
+++ b/media/ndk/NdkMediaDrm.cpp
@@ -380,12 +380,15 @@
     }
     Vector<uint8_t> session;
     status_t status = mObj->mDrm->openSession(DrmPlugin::kSecurityLevelMax, session);
-    if (status == OK) {
-        mObj->mIds.push_front(session);
-        List<idvec_t>::iterator iter = mObj->mIds.begin();
-        sessionId->ptr = iter->array();
-        sessionId->length = iter->size();
+    if (status != OK) {
+        sessionId->ptr = NULL;
+        sessionId->length = 0;
+        return translateStatus(status);
     }
+    mObj->mIds.push_front(session);
+    List<idvec_t>::iterator iter = mObj->mIds.begin();
+    sessionId->ptr = iter->array();
+    sessionId->length = iter->size();
     return AMEDIA_OK;
 }
 
@@ -489,6 +492,7 @@
     } else {
         keySetId->ptr = NULL;
         keySetId->length = 0;
+        return translateStatus(status);
     }
     return AMEDIA_OK;
 }
diff --git a/media/ndk/NdkMediaExtractor.cpp b/media/ndk/NdkMediaExtractor.cpp
index 0da0740..07fc5de 100644
--- a/media/ndk/NdkMediaExtractor.cpp
+++ b/media/ndk/NdkMediaExtractor.cpp
@@ -22,6 +22,7 @@
 #include <media/NdkMediaExtractor.h>
 #include <media/NdkMediaErrorPriv.h>
 #include <media/NdkMediaFormatPriv.h>
+#include "NdkJavaVMHelperPriv.h"
 #include "NdkMediaDataSourcePriv.h"
 
 
@@ -63,7 +64,10 @@
 AMediaExtractor* AMediaExtractor_new() {
     ALOGV("ctor");
     AMediaExtractor *mData = new AMediaExtractor();
-    mData->mImpl = new NuMediaExtractor();
+    mData->mImpl = new NuMediaExtractor(
+        NdkJavaVMHelper::getJNIEnv() != nullptr
+                ? NuMediaExtractor::EntryPoint::NDK_WITH_JVM
+                : NuMediaExtractor::EntryPoint::NDK_NO_JVM );
     return mData;
 }
 
@@ -415,6 +419,7 @@
 
 EXPORT
 media_status_t AMediaExtractor_getSampleFormat(AMediaExtractor *ex, AMediaFormat *fmt) {
+    ALOGV("AMediaExtractor_getSampleFormat");
     if (fmt == NULL) {
         return AMEDIA_ERROR_INVALID_PARAMETER;
     }
@@ -424,6 +429,9 @@
     if (err != OK) {
         return translate_error(err);
     }
+#ifdef LOG_NDEBUG
+    sampleMeta->dumpToLog();
+#endif
 
     sp<AMessage> meta;
     AMediaFormat_getFormat(fmt, &meta);
@@ -479,6 +487,19 @@
         meta->setBuffer(AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_INFO, audioPresentationsData);
     }
 
+    int64_t val64;
+    if (sampleMeta->findInt64(kKeySampleFileOffset, &val64)) {
+        meta->setInt64("sample-file-offset", val64);
+        ALOGV("SampleFileOffset Found");
+    }
+    if (sampleMeta->findInt64(kKeyLastSampleIndexInChunk, &val64)) {
+        meta->setInt64("last-sample-index-in-chunk" /*AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK*/,
+                       val64);
+        ALOGV("kKeyLastSampleIndexInChunk Found");
+    }
+
+    ALOGV("AMediaFormat_toString:%s", AMediaFormat_toString(fmt));
+
     return AMEDIA_OK;
 }
 
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
index ab0cb63..c1793ce 100644
--- a/media/ndk/NdkMediaFormat.cpp
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -26,9 +26,6 @@
 #include <utils/StrongPointer.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/AMessage.h>
-#include <android_util_Binder.h>
-
-#include <jni.h>
 
 using namespace android;
 
@@ -337,6 +334,7 @@
 EXPORT const char* AMEDIAFORMAT_KEY_IS_SYNC_FRAME = "is-sync-frame";
 EXPORT const char* AMEDIAFORMAT_KEY_I_FRAME_INTERVAL = "i-frame-interval";
 EXPORT const char* AMEDIAFORMAT_KEY_LANGUAGE = "language";
+EXPORT const char* AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK = "last-sample-index-in-chunk";
 EXPORT const char* AMEDIAFORMAT_KEY_LATENCY = "latency";
 EXPORT const char* AMEDIAFORMAT_KEY_LEVEL = "level";
 EXPORT const char* AMEDIAFORMAT_KEY_LOCATION = "location";
@@ -362,17 +360,21 @@
 EXPORT const char* AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP = "push-blank-buffers-on-shutdown";
 EXPORT const char* AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER = "repeat-previous-frame-after";
 EXPORT const char* AMEDIAFORMAT_KEY_ROTATION = "rotation-degrees";
+EXPORT const char* AMEDIAFORMAT_KEY_SAMPLE_FILE_OFFSET = "sample-file-offset";
 EXPORT const char* AMEDIAFORMAT_KEY_SAMPLE_RATE = "sample-rate";
+EXPORT const char* AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND = "sample-time-before-append";
 EXPORT const char* AMEDIAFORMAT_KEY_SAR_HEIGHT = "sar-height";
 EXPORT const char* AMEDIAFORMAT_KEY_SAR_WIDTH = "sar-width";
 EXPORT const char* AMEDIAFORMAT_KEY_SEI = "sei";
 EXPORT const char* AMEDIAFORMAT_KEY_SLICE_HEIGHT = "slice-height";
+EXPORT const char* AMEDIAFORMAT_KEY_SLOW_MOTION_MARKERS = "slow-motion-markers";
 EXPORT const char* AMEDIAFORMAT_KEY_STRIDE = "stride";
 EXPORT const char* AMEDIAFORMAT_KEY_TARGET_TIME = "target-time";
 EXPORT const char* AMEDIAFORMAT_KEY_TEMPORAL_LAYER_COUNT = "temporal-layer-count";
 EXPORT const char* AMEDIAFORMAT_KEY_TEMPORAL_LAYER_ID = "temporal-layer-id";
 EXPORT const char* AMEDIAFORMAT_KEY_TEMPORAL_LAYERING = "ts-schema";
 EXPORT const char* AMEDIAFORMAT_KEY_TEXT_FORMAT_DATA = "text-format-data";
+EXPORT const char* AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C = "thumbnail-csd-av1c";
 EXPORT const char* AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC = "thumbnail-csd-hevc";
 EXPORT const char* AMEDIAFORMAT_KEY_THUMBNAIL_HEIGHT = "thumbnail-height";
 EXPORT const char* AMEDIAFORMAT_KEY_THUMBNAIL_TIME = "thumbnail-time";
@@ -384,7 +386,17 @@
 EXPORT const char* AMEDIAFORMAT_KEY_TRACK_ID = "track-id";
 EXPORT const char* AMEDIAFORMAT_KEY_TRACK_INDEX = "track-index";
 EXPORT const char* AMEDIAFORMAT_KEY_VALID_SAMPLES = "valid-samples";
+EXPORT const char* AMEDIAFORMAT_VIDEO_QP_B_MAX = "video-qp-b-max";
+EXPORT const char* AMEDIAFORMAT_VIDEO_QP_B_MIN = "video-qp-b-min";
+EXPORT const char* AMEDIAFORMAT_VIDEO_QP_I_MAX = "video-qp-i-max";
+EXPORT const char* AMEDIAFORMAT_VIDEO_QP_I_MIN = "video-qp-i-min";
+EXPORT const char* AMEDIAFORMAT_VIDEO_QP_MAX = "video-qp-max";
+EXPORT const char* AMEDIAFORMAT_VIDEO_QP_MIN = "video-qp-min";
+EXPORT const char* AMEDIAFORMAT_VIDEO_QP_P_MAX = "video-qp-p-max";
+EXPORT const char* AMEDIAFORMAT_VIDEO_QP_P_MIN = "video-qp-p-min";
 EXPORT const char* AMEDIAFORMAT_KEY_WIDTH = "width";
+EXPORT const char* AMEDIAFORMAT_KEY_XMP_OFFSET = "xmp-offset";
+EXPORT const char* AMEDIAFORMAT_KEY_XMP_SIZE = "xmp-size";
 EXPORT const char* AMEDIAFORMAT_KEY_YEAR = "year";
 
 } // extern "C"
diff --git a/media/ndk/NdkMediaMuxer.cpp b/media/ndk/NdkMediaMuxer.cpp
index d1992bf..1965e62 100644
--- a/media/ndk/NdkMediaMuxer.cpp
+++ b/media/ndk/NdkMediaMuxer.cpp
@@ -17,28 +17,24 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "NdkMediaMuxer"
 
-
-#include <media/NdkMediaMuxer.h>
+#include <android_util_Binder.h>
+#include <jni.h>
+#include <media/IMediaHTTPService.h>
 #include <media/NdkMediaCodec.h>
 #include <media/NdkMediaErrorPriv.h>
 #include <media/NdkMediaFormatPriv.h>
-
-
-#include <utils/Log.h>
-#include <utils/StrongPointer.h>
+#include <media/NdkMediaMuxer.h>
+#include <media/stagefright/MediaAppender.h>
+#include <media/stagefright/MediaMuxer.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MediaMuxer.h>
-#include <media/IMediaHTTPService.h>
-#include <android_util_Binder.h>
-
-#include <jni.h>
+#include <utils/Log.h>
+#include <utils/StrongPointer.h>
 
 using namespace android;
 
 struct AMediaMuxer {
-    sp<MediaMuxer> mImpl;
-
+    sp<MediaMuxerBase> mImpl;
 };
 
 extern "C" {
@@ -46,8 +42,15 @@
 EXPORT
 AMediaMuxer* AMediaMuxer_new(int fd, OutputFormat format) {
     ALOGV("ctor");
-    AMediaMuxer *mData = new AMediaMuxer();
-    mData->mImpl = new MediaMuxer(fd, (android::MediaMuxer::OutputFormat)format);
+    AMediaMuxer *mData = new (std::nothrow) AMediaMuxer();
+    if (mData == nullptr) {
+        return nullptr;
+    }
+    mData->mImpl = new (std::nothrow) MediaMuxer(fd, (android::MediaMuxer::OutputFormat)format);
+    if (mData->mImpl == nullptr) {
+        delete mData;
+        return nullptr;
+    }
     return mData;
 }
 
@@ -94,6 +97,34 @@
             muxer->mImpl->writeSampleData(buf, trackIdx, info->presentationTimeUs, info->flags));
 }
 
+EXPORT
+AMediaMuxer* AMediaMuxer_append(int fd, AppendMode mode) {
+    ALOGV("append");
+    AMediaMuxer* mData = new (std::nothrow) AMediaMuxer();
+    if (mData == nullptr) {
+        return nullptr;
+    }
+    mData->mImpl = MediaAppender::create(fd, (android::MediaAppender::AppendMode)mode);
+    if (mData->mImpl == nullptr) {
+        delete mData;
+        return nullptr;
+    }
+    return mData;
+}
+
+EXPORT
+ssize_t AMediaMuxer_getTrackCount(AMediaMuxer* muxer) {
+    return muxer->mImpl->getTrackCount();
+}
+
+EXPORT
+AMediaFormat* AMediaMuxer_getTrackFormat(AMediaMuxer* muxer, size_t idx) {
+    sp<AMessage> format = muxer->mImpl->getTrackFormat(idx);
+    if (format != nullptr) {
+        return AMediaFormat_fromMsg(&format);
+    }
+    return nullptr;
+}
 
 } // extern "C"
 
diff --git a/media/ndk/TEST_MAPPING b/media/ndk/TEST_MAPPING
new file mode 100644
index 0000000..e420812
--- /dev/null
+++ b/media/ndk/TEST_MAPPING
@@ -0,0 +1,7 @@
+// mappings for frameworks/av/media/ndk
+{
+  "presubmit": [
+    { "name": "AImageReaderWindowHandleTest" },
+    { "name": "libmediandk_test" }
+  ]
+}
diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h
index 62b8624..71bc6d9 100644
--- a/media/ndk/include/media/NdkImage.h
+++ b/media/ndk/include/media/NdkImage.h
@@ -50,7 +50,10 @@
  */
 typedef struct AImage AImage;
 
-// Formats not listed here will not be supported by AImageReader
+/**
+ * AImage supported formats: AImageReader only guarantees the support for the formats
+ * listed here.
+ */
 enum AIMAGE_FORMATS {
     /**
      * 32 bits RGBA format, 8 bits for each of the four channels.
@@ -559,8 +562,6 @@
     int32_t bottom;
 } AImageCropRect;
 
-#if __ANDROID_API__ >= 24
-
 /**
  * Return the image back the the system and delete the AImage object from memory.
  *
@@ -777,10 +778,6 @@
         const AImage* image, int planeIdx,
         /*out*/uint8_t** data, /*out*/int* dataLength) __INTRODUCED_IN(24);
 
-#endif /* __ANDROID_API__ >= 24 */
-
-#if __ANDROID_API__ >= 26
-
 /**
  * Return the image back the the system and delete the AImage object from memory asynchronously.
  *
@@ -819,7 +816,7 @@
  * Available since API level 26.
  *
  * @param image the {@link AImage} of interest.
- * @param outBuffer The memory area pointed to by buffer will contain the acquired AHardwareBuffer
+ * @param buffer The memory area pointed to by buffer will contain the acquired AHardwareBuffer
  *         handle.
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -829,8 +826,6 @@
  */
 media_status_t AImage_getHardwareBuffer(const AImage* image, /*out*/AHardwareBuffer** buffer) __INTRODUCED_IN(26);
 
-#endif /* __ANDROID_API__ >= 26 */
-
 __END_DECLS
 
 #endif //_NDK_IMAGE_H
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index 600ffc9..4bd7f2a 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -53,8 +53,6 @@
  */
 typedef struct AImageReader AImageReader;
 
-#if __ANDROID_API__ >= 24
-
 /**
  * Create a new reader for images of the desired size and format.
  *
@@ -320,10 +318,6 @@
 media_status_t AImageReader_setImageListener(
         AImageReader* reader, AImageReader_ImageListener* listener) __INTRODUCED_IN(24);
 
-#endif /* __ANDROID_API__ >= 24 */
-
-#if __ANDROID_API__ >= 26
-
 /**
  * AImageReader constructor similar to {@link AImageReader_new} that takes an additional parameter
  * for the consumer usage. All other parameters and the return values are identical to those passed
@@ -334,10 +328,10 @@
  * still acquire images from this {@link AImageReader} and access {@link AHardwareBuffer} via
  * {@link AImage_getHardwareBuffer()}. The {@link AHardwareBuffer} gained this way can then
  * be passed back to hardware (such as GPU or hardware encoder if supported) for future processing.
- * For example, you can obtain an {@link EGLClientBuffer} from the {@link AHardwareBuffer} by using
- * {@link eglGetNativeClientBufferANDROID} extension and pass that {@link EGLClientBuffer} to {@link
- * eglCreateImageKHR} to create an {@link EGLImage} resource type, which may then be bound to a
- * texture via {@link glEGLImageTargetTexture2DOES} on supported devices. This can be useful for
+ * For example, you can obtain an EGLClientBuffer from the {@link AHardwareBuffer} by using
+ * eglGetNativeClientBufferANDROID extension and pass that EGLClientBuffer to
+ * eglCreateImageKHR to create an EGLImage resource type, which may then be bound to a
+ * texture via glEGLImageTargetTexture2DOES on supported devices. This can be useful for
  * transporting textures that may be shared cross-process.</p>
  * <p>In general, when software access to image data is not necessary, an {@link AImageReader}
  * created with {@link AIMAGE_FORMAT_PRIVATE} format is more efficient, compared with {@link
@@ -345,7 +339,7 @@
  *
  * <p>Note that not all format and usage flag combination is supported by the {@link AImageReader},
  * especially if \c format is {@link AIMAGE_FORMAT_PRIVATE}, \c usage must not include either
- * {@link AHARDWAREBUFFER_USAGE_READ_RARELY} or {@link AHARDWAREBUFFER_USAGE_READ_OFTEN}</p>
+ * {@link AHARDWAREBUFFER_USAGE_CPU_READ_RARELY} or {@link AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN}</p>
  *
  * @param width The default width in pixels of the Images that this reader will produce.
  * @param height The default height in pixels of the Images that this reader will produce.
@@ -364,7 +358,7 @@
  *   <th>Compatible usage flags</th>
  * </tr>
  * <tr>
- *   <td>non-{@link AIMAGE_FORMAT_PRIVATE PRIVATE} formats defined in {@link AImage.h}
+ *   <td>non-{@link AIMAGE_FORMAT_PRIVATE} formats defined in {@link NdkImage.h}
  * </td>
  *   <td>{@link AHARDWAREBUFFER_USAGE_CPU_READ_RARELY} or
  *   {@link AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN}</td>
@@ -447,6 +441,10 @@
         AImageReader* reader,
         AHardwareBuffer* buffer);
 
+/**
+ * A listener to the AHardwareBuffer removal event, use
+ * {@link AImageReader_setBufferRemovedListener} to register the listener object to AImageReader.
+ */
 typedef struct AImageReader_BufferRemovedListener {
     /// Optional application context passed as the first parameter of the callback.
     void*                      context;
@@ -510,8 +508,6 @@
     AImageReader *reader, /* out */native_handle_t **handle);
 #endif
 
-#endif /* __ANDROID_API__ >= 26 */
-
 __END_DECLS
 
 #endif //_NDK_IMAGE_READER_H
diff --git a/media/ndk/include/media/NdkMediaCodec.h b/media/ndk/include/media/NdkMediaCodec.h
index 8fb6a87..519148e 100644
--- a/media/ndk/include/media/NdkMediaCodec.h
+++ b/media/ndk/include/media/NdkMediaCodec.h
@@ -114,14 +114,12 @@
         int32_t actionCode,
         const char *detail);
 
-struct AMediaCodecOnAsyncNotifyCallback {
+typedef struct AMediaCodecOnAsyncNotifyCallback {
       AMediaCodecOnAsyncInputAvailable  onAsyncInputAvailable;
       AMediaCodecOnAsyncOutputAvailable onAsyncOutputAvailable;
       AMediaCodecOnAsyncFormatChanged   onAsyncFormatChanged;
       AMediaCodecOnAsyncError           onAsyncError;
-};
-
-#if __ANDROID_API__ >= 21
+} AMediaCodecOnAsyncNotifyCallback;
 
 /**
  * Create codec by name. Use this if you know the exact codec you want to use.
@@ -311,8 +309,6 @@
 media_status_t AMediaCodec_releaseOutputBufferAtTime(
         AMediaCodec *mData, size_t idx, int64_t timestampNs) __INTRODUCED_IN(21);
 
-#if __ANDROID_API__ >= 26
-
 /**
  * Creates a Surface that can be used as the input to encoder, in place of input buffers
  *
@@ -393,10 +389,6 @@
  */
 media_status_t AMediaCodec_signalEndOfInputStream(AMediaCodec *mData) __INTRODUCED_IN(26);
 
-#endif /* __ANDROID_API__ >= 26 */
-
-#if __ANDROID_API__ >= 28
-
 /**
  * Get format of the buffer. The specified buffer index must have been previously obtained from
  * dequeueOutputBuffer.
@@ -482,8 +474,6 @@
  */
 bool AMediaCodecActionCode_isTransient(int32_t actionCode) __INTRODUCED_IN(28);
 
-#endif /* __ANDROID_API__ >= 28 */
-
 typedef enum {
     AMEDIACODECRYPTOINFO_MODE_CLEAR = 0,
     AMEDIACODECRYPTOINFO_MODE_AES_CTR = 1,
@@ -580,7 +570,13 @@
  */
 media_status_t AMediaCodecCryptoInfo_getEncryptedBytes(AMediaCodecCryptoInfo*, size_t *dst) __INTRODUCED_IN(21);
 
-#endif /* __ANDROID_API__ >= 21 */
+extern const char* AMEDIACODEC_KEY_HDR10_PLUS_INFO __INTRODUCED_IN(31);
+extern const char* AMEDIACODEC_KEY_LOW_LATENCY __INTRODUCED_IN(31);
+extern const char* AMEDIACODEC_KEY_OFFSET_TIME __INTRODUCED_IN(31);
+extern const char* AMEDIACODEC_KEY_REQUEST_SYNC_FRAME __INTRODUCED_IN(31);
+extern const char* AMEDIACODEC_KEY_SUSPEND __INTRODUCED_IN(31);
+extern const char* AMEDIACODEC_KEY_SUSPEND_TIME __INTRODUCED_IN(31);
+extern const char* AMEDIACODEC_KEY_VIDEO_BITRATE __INTRODUCED_IN(31);
 
 __END_DECLS
 
diff --git a/media/ndk/include/media/NdkMediaCrypto.h b/media/ndk/include/media/NdkMediaCrypto.h
index 3fa07c7..590d51d 100644
--- a/media/ndk/include/media/NdkMediaCrypto.h
+++ b/media/ndk/include/media/NdkMediaCrypto.h
@@ -47,8 +47,6 @@
 
 typedef uint8_t AMediaUUID[16];
 
-#if __ANDROID_API__ >= 21
-
 /**
  * Available since API level 21.
  */
@@ -69,8 +67,6 @@
  */
 void AMediaCrypto_delete(AMediaCrypto* crypto) __INTRODUCED_IN(21);
 
-#endif /* __ANDROID_API__ >= 21 */
-
 __END_DECLS
 
 #endif // _NDK_MEDIA_CRYPTO_H
diff --git a/media/ndk/include/media/NdkMediaDataSource.h b/media/ndk/include/media/NdkMediaDataSource.h
index 0577df2..4158a97 100644
--- a/media/ndk/include/media/NdkMediaDataSource.h
+++ b/media/ndk/include/media/NdkMediaDataSource.h
@@ -38,8 +38,6 @@
 struct AMediaDataSource;
 typedef struct AMediaDataSource AMediaDataSource;
 
-#if __ANDROID_API__ >= 28
-
 /*
  * AMediaDataSource's callbacks will be invoked on an implementation-defined thread
  * or thread pool. No guarantees are provided about which thread(s) will be used for
@@ -93,8 +91,6 @@
  */
 AMediaDataSource* AMediaDataSource_new() __INTRODUCED_IN(28);
 
-#if __ANDROID_API__ >= 29
-
 /**
  * Called to get an estimate of the number of bytes that can be read from this data source
  * starting at |offset| without blocking for I/O.
@@ -124,8 +120,6 @@
         int numheaders,
         const char * const *key_values) __INTRODUCED_IN(29);
 
-#endif  /*__ANDROID_API__ >= 29 */
-
 /**
  * Delete a previously created media data source.
  *
@@ -185,10 +179,6 @@
         AMediaDataSource*,
         AMediaDataSourceClose) __INTRODUCED_IN(28);
 
-#endif  /*__ANDROID_API__ >= 28 */
-
-#if __ANDROID_API__ >= 29
-
 /**
  * Close the data source, unblock reads, and release associated resources.
  *
@@ -213,8 +203,6 @@
         AMediaDataSource*,
         AMediaDataSourceGetAvailableSize) __INTRODUCED_IN(29);
 
-#endif  /*__ANDROID_API__ >= 29 */
-
 __END_DECLS
 
 #endif // _NDK_MEDIA_DATASOURCE_H
diff --git a/media/ndk/include/media/NdkMediaDrm.h b/media/ndk/include/media/NdkMediaDrm.h
index 31f5c7d..849a8f9 100644
--- a/media/ndk/include/media/NdkMediaDrm.h
+++ b/media/ndk/include/media/NdkMediaDrm.h
@@ -165,8 +165,6 @@
         const AMediaDrmSessionId *sessionId, const AMediaDrmKeyStatus *keyStatus,
         size_t numKeys, bool hasNewUsableKey);
 
-#if __ANDROID_API__ >= 21
-
 /**
  * Query if the given scheme identified by its UUID is supported on this device, and
  * whether the drm plugin is able to handle the media container format specified by mimeType.
@@ -576,8 +574,6 @@
         const char *macAlgorithm, uint8_t *keyId, const uint8_t *message, size_t messageSize,
         const uint8_t *signature, size_t signatureSize) __INTRODUCED_IN(21);
 
-#endif /* __ANDROID_API__ >= 21 */
-
 __END_DECLS
 
 #endif //_NDK_MEDIA_DRM_H
diff --git a/media/ndk/include/media/NdkMediaError.h b/media/ndk/include/media/NdkMediaError.h
index 2be1d6e..02fdc79 100644
--- a/media/ndk/include/media/NdkMediaError.h
+++ b/media/ndk/include/media/NdkMediaError.h
@@ -40,7 +40,11 @@
 
 __BEGIN_DECLS
 
+/**
+ * Media error message types returned from NDK media functions.
+ */
 typedef enum {
+    /** The requested media operation completed successfully. */
     AMEDIA_OK = 0,
 
     /**
@@ -55,14 +59,34 @@
     AMEDIACODEC_ERROR_RECLAIMED             = 1101,
 
     AMEDIA_ERROR_BASE                  = -10000,
+
+    /** The called media function failed with an unknown error. */
     AMEDIA_ERROR_UNKNOWN               = AMEDIA_ERROR_BASE,
+
+    /** The input media data is corrupt or incomplete. */
     AMEDIA_ERROR_MALFORMED             = AMEDIA_ERROR_BASE - 1,
+
+    /** The required operation or media formats are not supported. */
     AMEDIA_ERROR_UNSUPPORTED           = AMEDIA_ERROR_BASE - 2,
+
+    /** An invalid (or already closed) object is used in the function call. */
     AMEDIA_ERROR_INVALID_OBJECT        = AMEDIA_ERROR_BASE - 3,
+
+    /** At least one of the invalid parameters is used. */
     AMEDIA_ERROR_INVALID_PARAMETER     = AMEDIA_ERROR_BASE - 4,
+
+    /** The media object is not in the right state for the required operation. */
     AMEDIA_ERROR_INVALID_OPERATION     = AMEDIA_ERROR_BASE - 5,
+
+    /** Media stream ends while processing the requested operation. */
     AMEDIA_ERROR_END_OF_STREAM         = AMEDIA_ERROR_BASE - 6,
+
+    /** An Error occurred when the Media object is carrying IO operation. */
     AMEDIA_ERROR_IO                    = AMEDIA_ERROR_BASE - 7,
+
+    /** The required operation would have to be blocked (on I/O or others),
+     *   but blocking is not enabled.
+     */
     AMEDIA_ERROR_WOULD_BLOCK           = AMEDIA_ERROR_BASE - 8,
 
     AMEDIA_DRM_ERROR_BASE              = -20000,
@@ -77,10 +101,20 @@
     AMEDIA_DRM_LICENSE_EXPIRED         = AMEDIA_DRM_ERROR_BASE - 9,
 
     AMEDIA_IMGREADER_ERROR_BASE          = -30000,
+
+    /** There are no more image buffers to read/write image data. */
     AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE = AMEDIA_IMGREADER_ERROR_BASE - 1,
+
+    /** The AImage object has used up the allowed maximum image buffers. */
     AMEDIA_IMGREADER_MAX_IMAGES_ACQUIRED = AMEDIA_IMGREADER_ERROR_BASE - 2,
+
+    /** The required image buffer could not be locked to read. */
     AMEDIA_IMGREADER_CANNOT_LOCK_IMAGE   = AMEDIA_IMGREADER_ERROR_BASE - 3,
+
+    /** The media data or buffer could not be unlocked. */
     AMEDIA_IMGREADER_CANNOT_UNLOCK_IMAGE = AMEDIA_IMGREADER_ERROR_BASE - 4,
+
+    /** The media/buffer needs to be locked to perform the required operation. */
     AMEDIA_IMGREADER_IMAGE_NOT_LOCKED    = AMEDIA_IMGREADER_ERROR_BASE - 5,
 
 } media_status_t;
diff --git a/media/ndk/include/media/NdkMediaExtractor.h b/media/ndk/include/media/NdkMediaExtractor.h
index 14319c4..e429820 100644
--- a/media/ndk/include/media/NdkMediaExtractor.h
+++ b/media/ndk/include/media/NdkMediaExtractor.h
@@ -36,6 +36,7 @@
 #ifndef _NDK_MEDIA_EXTRACTOR_H
 #define _NDK_MEDIA_EXTRACTOR_H
 
+#include <stdbool.h>
 #include <sys/cdefs.h>
 #include <sys/types.h>
 
@@ -49,8 +50,6 @@
 struct AMediaExtractor;
 typedef struct AMediaExtractor AMediaExtractor;
 
-#if __ANDROID_API__ >= 21
-
 /**
  * Create new media extractor.
  *
@@ -81,8 +80,6 @@
 media_status_t AMediaExtractor_setDataSource(AMediaExtractor*,
         const char *location) __INTRODUCED_IN(21);
 
-#if __ANDROID_API__ >= 28
-
 /**
  * Set the custom data source implementation from which the extractor will read.
  *
@@ -91,8 +88,6 @@
 media_status_t AMediaExtractor_setDataSourceCustom(AMediaExtractor*,
         AMediaDataSource *src) __INTRODUCED_IN(28);
 
-#endif /* __ANDROID_API__ >= 28 */
-
 /**
  * Return the number of tracks in the previously specified media file
  *
@@ -210,8 +205,6 @@
     AMEDIAEXTRACTOR_SAMPLE_FLAG_ENCRYPTED = 2,
 };
 
-#if __ANDROID_API__ >= 28
-
 /**
  * Returns the format of the extractor. The caller must free the returned format
  * using AMediaFormat_delete(format).
@@ -265,10 +258,6 @@
 media_status_t AMediaExtractor_getSampleFormat(AMediaExtractor *ex,
         AMediaFormat *fmt) __INTRODUCED_IN(28);
 
-#endif /* __ANDROID_API__ >= 28 */
-
-#endif /* __ANDROID_API__ >= 21 */
-
 __END_DECLS
 
 #endif // _NDK_MEDIA_EXTRACTOR_H
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index 49d8b4a..fbd855d 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -36,9 +36,14 @@
 #ifndef _NDK_MEDIA_FORMAT_H
 #define _NDK_MEDIA_FORMAT_H
 
+#include <stdbool.h>
 #include <sys/cdefs.h>
 #include <sys/types.h>
 
+#if !defined(__INTRODUCED_IN)
+#define __INTRODUCED_IN(__api_level) /* nothing */
+#endif
+
 #include "NdkMediaError.h"
 
 __BEGIN_DECLS
@@ -46,8 +51,6 @@
 struct AMediaFormat;
 typedef struct AMediaFormat AMediaFormat;
 
-#if __ANDROID_API__ >= 21
-
 /**
  * Available since API level 21.
  */
@@ -190,9 +193,7 @@
 extern const char* AMEDIAFORMAT_KEY_TRACK_ID __INTRODUCED_IN(28);
 extern const char* AMEDIAFORMAT_KEY_TRACK_INDEX __INTRODUCED_IN(28);
 extern const char* AMEDIAFORMAT_KEY_WIDTH __INTRODUCED_IN(21);
-#endif /* __ANDROID_API__ >= 21 */
 
-#if __ANDROID_API__ >= 28
 /**
  * Available since API level 28.
  */
@@ -216,9 +217,7 @@
  */
 void AMediaFormat_setRect(AMediaFormat*, const char* name,
         int32_t left, int32_t top, int32_t right, int32_t bottom) __INTRODUCED_IN(28);
-#endif /* __ANDROID_API__ >= 28 */
 
-#if __ANDROID_API__ >= 29
 /**
  * Remove all key/value pairs from the given AMediaFormat.
  *
@@ -292,9 +291,6 @@
 extern const char* AMEDIAFORMAT_KEY_VALID_SAMPLES __INTRODUCED_IN(29);
 extern const char* AMEDIAFORMAT_KEY_YEAR __INTRODUCED_IN(29);
 
-#endif /* __ANDROID_API__ >= 29 */
-
-#if __ANDROID_API__ >= 30
 /**
  * An optional key describing the low latency decoding mode. This is an optional parameter
  * that applies only to decoders. If enabled, the decoder doesn't hold input and output
@@ -305,7 +301,24 @@
  * Available since API level 30.
  */
 extern const char* AMEDIAFORMAT_KEY_LOW_LATENCY __INTRODUCED_IN(30);
-#endif /* __ANDROID_API__ >= 30 */
+
+extern const char* AMEDIAFORMAT_KEY_HDR10_PLUS_INFO __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_KEY_SLOW_MOTION_MARKERS __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_KEY_XMP_OFFSET __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_KEY_XMP_SIZE __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_KEY_SAMPLE_FILE_OFFSET __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND __INTRODUCED_IN(31);
+
+extern const char* AMEDIAFORMAT_VIDEO_QP_B_MAX __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_VIDEO_QP_B_MIN __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_VIDEO_QP_I_MAX __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_VIDEO_QP_I_MIN __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_VIDEO_QP_MAX __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_VIDEO_QP_MIN __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_VIDEO_QP_P_MAX __INTRODUCED_IN(31);
+extern const char* AMEDIAFORMAT_VIDEO_QP_P_MIN __INTRODUCED_IN(31);
 
 __END_DECLS
 
diff --git a/media/ndk/include/media/NdkMediaMuxer.h b/media/ndk/include/media/NdkMediaMuxer.h
index 9de3fbf..d7eccb8 100644
--- a/media/ndk/include/media/NdkMediaMuxer.h
+++ b/media/ndk/include/media/NdkMediaMuxer.h
@@ -54,7 +54,16 @@
     AMEDIAMUXER_OUTPUT_FORMAT_THREE_GPP   = 2,
 } OutputFormat;
 
-#if __ANDROID_API__ >= 21
+typedef enum {
+    /* Last group of pictures(GOP) of video track can be incomplete, so it would be safe to
+     * scrap that and rewrite.  If both audio and video tracks are present in a file, then
+     * samples of audio track after last GOP of video would be scrapped too.
+     * If only audio track is present, then no sample would be discarded.
+     */
+    AMEDIAMUXER_APPEND_IGNORE_LAST_VIDEO_GOP = 0,
+    // Keep all existing samples as it is and append new samples after that only.
+    AMEDIAMUXER_APPEND_TO_EXISTING_DATA = 1,
+} AppendMode;
 
 /**
  * Create new media muxer.
@@ -140,7 +149,45 @@
         size_t trackIdx, const uint8_t *data,
         const AMediaCodecBufferInfo *info) __INTRODUCED_IN(21);
 
-#endif /* __ANDROID_API__ >= 21 */
+/**
+ * Creates a new media muxer for appending data to an existing MPEG4 file.
+ * This is a synchronous API call and could take a while to return if the existing file is large.
+ * Only works for MPEG4 files matching one of the following characteristics:
+ * <ul>
+ *    <li>a single audio track.</li>
+ *    <li>a single video track.</li>
+ *    <li>a single audio and a single video track.</li>
+ * </ul>
+ * @param fd Must be opened with read and write permission. Does not take ownership of
+ * this fd i.e., caller is responsible for closing fd.
+ * @param mode Specifies how data will be appended; the AppendMode enum describes
+ *             the possible methods for appending..
+ * @return Pointer to AMediaMuxer if the file(fd) has tracks already, otherwise, nullptr.
+ * {@link AMediaMuxer_delete} should be used to free the returned pointer.
+ *
+ * Available since API level 31.
+ */
+AMediaMuxer* AMediaMuxer_append(int fd, AppendMode mode) __INTRODUCED_IN(31);
+
+/**
+ * Returns the number of tracks added in the file passed to {@link AMediaMuxer_new} or
+ * the number of existing tracks in the file passed to {@link AMediaMuxer_append}.
+ * Should be called in INITIALIZED or STARTED state, otherwise returns -1.
+ *
+ * Available since API level 31.
+ */
+ssize_t AMediaMuxer_getTrackCount(AMediaMuxer*) __INTRODUCED_IN(31);
+
+/**
+ * Returns AMediaFormat of the added track with index idx in the file passed to
+ * {@link AMediaMuxer_new} or the AMediaFormat of the existing track with index idx
+ * in the file passed to {@link AMediaMuxer_append}.
+ * Should be called in INITIALIZED or STARTED state, otherwise returns nullptr.
+ * {@link AMediaFormat_delete} should be used to free the returned pointer.
+ *
+ * Available since API level 31.
+ */
+AMediaFormat* AMediaMuxer_getTrackFormat(AMediaMuxer* muxer, size_t idx) __INTRODUCED_IN(31);
 
 __END_DECLS
 
diff --git a/media/ndk/include/private/media/NdkImage.h b/media/ndk/include/private/media/NdkImage.h
deleted file mode 100644
index 4368a56..0000000
--- a/media/ndk/include/private/media/NdkImage.h
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef _PRIVATE_MEDIA_NDKIMAGE_H_
-#define _PRIVATE_MEDIA_NDKIMAGE_H_
-// Formats not listed in the public API, but still available to AImageReader
-enum AIMAGE_PRIVATE_FORMATS {
-    /**
-     * Unprocessed implementation-dependent raw
-     * depth measurements, opaque with 16 bit
-     * samples.
-     *
-     */
-
-    AIMAGE_FORMAT_RAW_DEPTH = 0x1002,
-};
-#endif // _PRIVATE_MEDIA_NDKIMAGE
diff --git a/media/ndk/include_platform/media/NdkMediaCodecPlatform.h b/media/ndk/include_platform/media/NdkMediaCodecPlatform.h
new file mode 100644
index 0000000..6edd712
--- /dev/null
+++ b/media/ndk/include_platform/media/NdkMediaCodecPlatform.h
@@ -0,0 +1,96 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _NDK_MEDIA_CODEC_PLATFORM_H
+#define _NDK_MEDIA_CODEC_PLATFORM_H
+
+#include <stdint.h>
+#include <sys/cdefs.h>
+
+#include <media/NdkMediaCodec.h>
+
+__BEGIN_DECLS
+
+/**
+ * Special uid and pid values used with AMediaCodec_createCodecByNameForClient,
+ * AMediaCodec_createDecoderByTypeForClient and AMediaCodec_createEncoderByTypeForClient.
+ *
+ * Introduced in API 31.
+ */
+enum {
+    /**
+     * Uid value to indicate using calling uid.
+     */
+    AMEDIACODEC_CALLING_UID = -1,
+    /**
+     * Pid value to indicate using calling pid.
+     */
+    AMEDIACODEC_CALLING_PID = -1,
+};
+
+/**
+ * Create codec by name on behalf of a client.
+ *
+ * The usage is similar to AMediaCodec_createCodecByName(), except that the codec instance
+ * will be attributed to the client of {uid, pid}, instead of the caller.
+ *
+ * Only certain privileged users are allowed to specify {uid, pid} that's different from the
+ * caller's. Without the privilege, this API will behave the same as
+ * AMediaCodec_createCodecByName().
+ *
+ * Available since API level 31.
+ */
+AMediaCodec* AMediaCodec_createCodecByNameForClient(const char *name,
+                                                    pid_t pid,
+                                                    uid_t uid) __INTRODUCED_IN(31);
+
+/**
+ * Create codec by mime type on behalf of a client.
+ *
+ * The usage is similar to AMediaCodec_createDecoderByType(), except that the codec instance
+ * will be attributed to the client of {uid, pid}, instead of the caller.
+ *
+ * Only certain privileged users are allowed to specify {uid, pid} that's different from the
+ * caller's. Without the privilege, this API will behave the same as
+ * AMediaCodec_createDecoderByType().
+ *
+ * Available since API level 31.
+ */
+AMediaCodec* AMediaCodec_createDecoderByTypeForClient(const char *mime_type,
+                                                      pid_t pid,
+                                                      uid_t uid) __INTRODUCED_IN(31);
+
+/**
+ * Create encoder by name on behalf of a client.
+ *
+ * The usage is similar to AMediaCodec_createEncoderByType(), except that the codec instance
+ * will be attributed to the client of {uid, pid}, instead of the caller.
+ *
+ * Only certain privileged users are allowed to specify {uid, pid} that's different from the
+ * caller's. Without the privilege, this API will behave the same as
+ * AMediaCodec_createEncoderByType().
+ *
+ * Available since API level 31.
+ */
+AMediaCodec* AMediaCodec_createEncoderByTypeForClient(const char *mime_type,
+                                                      pid_t pid,
+                                                      uid_t uid) __INTRODUCED_IN(31);
+
+__END_DECLS
+
+#endif //_NDK_MEDIA_CODEC_PLATFORM_H
+
+/** @} */
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 29f1da8..7e9e57e 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -27,6 +27,13 @@
     AImage_getPlaneRowStride; # introduced=24
     AImage_getTimestamp; # introduced=24
     AImage_getWidth; # introduced=24
+    AMEDIACODEC_KEY_HDR10_PLUS_INFO; # var introduced=31
+    AMEDIACODEC_KEY_LOW_LATENCY; # var introduced=31
+    AMEDIACODEC_KEY_OFFSET_TIME; # var introduced=31
+    AMEDIACODEC_KEY_REQUEST_SYNC_FRAME; # var introduced=31
+    AMEDIACODEC_KEY_SUSPEND; # var introduced=31
+    AMEDIACODEC_KEY_SUSPEND_TIME; # var introduced=31
+    AMEDIACODEC_KEY_VIDEO_BITRATE; # var introduced=31
     AMEDIAFORMAT_KEY_AAC_DRC_ATTENUATION_FACTOR; # var introduced=28
     AMEDIAFORMAT_KEY_AAC_DRC_BOOST_FACTOR; # var introduced=28
     AMEDIAFORMAT_KEY_AAC_DRC_HEAVY_COMPRESSION; # var introduced=28
@@ -91,6 +98,7 @@
     AMEDIAFORMAT_KEY_GRID_ROWS; # var introduced=28
     AMEDIAFORMAT_KEY_HAPTIC_CHANNEL_COUNT; # var introduced=29
     AMEDIAFORMAT_KEY_HDR_STATIC_INFO; # var introduced=28
+    AMEDIAFORMAT_KEY_HDR10_PLUS_INFO; # var introduced=31
     AMEDIAFORMAT_KEY_HEIGHT; # var introduced=21
     AMEDIAFORMAT_KEY_ICC_PROFILE; # var introduced=29
     AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD; # var introduced=28
@@ -101,6 +109,7 @@
     AMEDIAFORMAT_KEY_IS_SYNC_FRAME; # var introduced=29
     AMEDIAFORMAT_KEY_I_FRAME_INTERVAL; # var introduced=21
     AMEDIAFORMAT_KEY_LANGUAGE; # var introduced=21
+    AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK; # var introduced=31
     AMEDIAFORMAT_KEY_LATENCY; # var introduced=28
     AMEDIAFORMAT_KEY_LEVEL; # var introduced=28
     AMEDIAFORMAT_KEY_LOCATION; # var introduced=29
@@ -126,17 +135,21 @@
     AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP; # var introduced=21
     AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER; # var introduced=21
     AMEDIAFORMAT_KEY_ROTATION; # var introduced=28
+    AMEDIAFORMAT_KEY_SAMPLE_FILE_OFFSET; # var introduced=31
+    AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND; # var introduced=31
     AMEDIAFORMAT_KEY_SAMPLE_RATE; # var introduced=21
     AMEDIAFORMAT_KEY_SAR_HEIGHT; # var introduced=29
     AMEDIAFORMAT_KEY_SAR_WIDTH; # var introduced=29
     AMEDIAFORMAT_KEY_SEI; # var introduced=28
     AMEDIAFORMAT_KEY_SLICE_HEIGHT; # var introduced=28
+    AMEDIAFORMAT_KEY_SLOW_MOTION_MARKERS; # var introduced=31
     AMEDIAFORMAT_KEY_STRIDE; # var introduced=21
     AMEDIAFORMAT_KEY_TARGET_TIME; # var introduced=29
     AMEDIAFORMAT_KEY_TEMPORAL_LAYER_COUNT; # var introduced=29
     AMEDIAFORMAT_KEY_TEMPORAL_LAYER_ID; # var introduced=28
     AMEDIAFORMAT_KEY_TEMPORAL_LAYERING; # var introduced=28
     AMEDIAFORMAT_KEY_TEXT_FORMAT_DATA; # var introduced=29
+    AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C; # var introduced=31
     AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC; # var introduced=29
     AMEDIAFORMAT_KEY_THUMBNAIL_HEIGHT; # var introduced=29
     AMEDIAFORMAT_KEY_THUMBNAIL_TIME; # var introduced=29
@@ -149,6 +162,8 @@
     AMEDIAFORMAT_KEY_TRACK_ID; # var introduced=28
     AMEDIAFORMAT_KEY_VALID_SAMPLES; # var introduced=29
     AMEDIAFORMAT_KEY_WIDTH; # var introduced=21
+    AMEDIAFORMAT_KEY_XMP_OFFSET; # var introduced=31
+    AMEDIAFORMAT_KEY_XMP_SIZE; # var introduced=31
     AMEDIAFORMAT_KEY_YEAR; # var introduced=29
     AMediaCodecActionCode_isRecoverable; # introduced=28
     AMediaCodecActionCode_isTransient; # introduced=28
@@ -163,8 +178,11 @@
     AMediaCodecCryptoInfo_setPattern; # introduced=24
     AMediaCodec_configure;
     AMediaCodec_createCodecByName;
+    AMediaCodec_createCodecByNameForClient; # apex # introduced=31
     AMediaCodec_createDecoderByType;
+    AMediaCodec_createDecoderByTypeForClient; # apex # introduced=31
     AMediaCodec_createEncoderByType;
+    AMediaCodec_createEncoderByTypeForClient; # apex # introduced=31
     AMediaCodec_delete;
     AMediaCodec_dequeueInputBuffer;
     AMediaCodec_dequeueOutputBuffer;
@@ -271,7 +289,10 @@
     AMediaFormat_setString;
     AMediaFormat_toString;
     AMediaMuxer_addTrack;
+    AMediaMuxer_append; # introduced=31
     AMediaMuxer_delete;
+    AMediaMuxer_getTrackCount; # introduced=31
+    AMediaMuxer_getTrackFormat; # introduced=31
     AMediaMuxer_new;
     AMediaMuxer_setLocation;
     AMediaMuxer_setOrientationHint;
diff --git a/media/ndk/tests/AImageReaderWindowHandleTest.cpp b/media/ndk/tests/AImageReaderWindowHandleTest.cpp
index 5b65064..27864c2 100644
--- a/media/ndk/tests/AImageReaderWindowHandleTest.cpp
+++ b/media/ndk/tests/AImageReaderWindowHandleTest.cpp
@@ -17,10 +17,10 @@
 #include <gtest/gtest.h>
 #include <media/NdkImageReader.h>
 #include <media/NdkImage.h>
-#include <private/media/NdkImage.h>
 #include <mediautils/AImageReaderUtils.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
+#include <NdkImagePriv.h>
 #include <NdkImageReaderPriv.h>
 #include <vndk/hardware_buffer.h>
 #include <memory>
diff --git a/media/ndk/tests/Android.bp b/media/ndk/tests/Android.bp
new file mode 100644
index 0000000..984b3ee
--- /dev/null
+++ b/media/ndk/tests/Android.bp
@@ -0,0 +1,41 @@
+// Copyright (C) 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Headers module is in frameworks/av/Android.bp because modules are not allowed
+// to refer to headers in parent directories and the headers live in
+// frameworks/av/include.
+
+package {
+    default_applicable_licenses: ["frameworks_av_media_ndk_license"],
+}
+
+cc_test {
+    name: "libmediandk_test",
+    test_suites: ["device-tests"],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libmediandk",
+        "libutils",
+    ],
+
+    srcs: [
+        "NdkMediaFormat_test.cpp",
+    ],
+}
diff --git a/media/ndk/tests/NdkMediaFormat_test.cpp b/media/ndk/tests/NdkMediaFormat_test.cpp
new file mode 100644
index 0000000..668d0a4
--- /dev/null
+++ b/media/ndk/tests/NdkMediaFormat_test.cpp
@@ -0,0 +1,196 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaFormat_test"
+
+#include <gtest/gtest.h>
+#include <utils/RefBase.h>
+
+#include <media/NdkMediaFormat.h>
+
+namespace android {
+
+class NdkMediaFormatTest : public ::testing::Test {
+};
+
+
+TEST(NdkMediaFormat_tests, test_create) {
+
+   AMediaFormat *fmt1 = AMediaFormat_new();
+   AMediaFormat *fmt2 = AMediaFormat_new();
+
+   EXPECT_NE(fmt1, fmt2);
+   EXPECT_NE(fmt1, nullptr);
+   EXPECT_NE(fmt2, nullptr);
+
+   AMediaFormat_delete(fmt1);
+   AMediaFormat_delete(fmt2);
+}
+
+TEST(NdkMediaFormat_tests, test_int32) {
+   AMediaFormat *fmt1 = AMediaFormat_new();
+   int32_t i32;
+   int64_t i64;
+   AMediaFormat_setInt32(fmt1, "five", 5);
+
+   EXPECT_TRUE(AMediaFormat_getInt32(fmt1, "five", &i32));
+   EXPECT_FALSE(AMediaFormat_getInt64(fmt1, "five", &i64));
+   EXPECT_EQ(i32, 5);
+
+   AMediaFormat_delete(fmt1);
+}
+
+TEST(NdkMediaFormat_tests, test_int64) {
+   AMediaFormat *fmt1 = AMediaFormat_new();
+   int64_t i64;
+   AMediaFormat_setInt64(fmt1, "verylarge", INT64_MAX);
+
+   EXPECT_TRUE(AMediaFormat_getInt64(fmt1, "verylarge", &i64));
+   EXPECT_EQ(i64, INT64_MAX);
+
+   // return unchanged if not found
+   i64 = -1;
+   EXPECT_FALSE(AMediaFormat_getInt64(fmt1, "five", &i64));
+   EXPECT_EQ(i64, -1);
+
+   AMediaFormat_delete(fmt1);
+}
+
+TEST(NdkMediaFormat_tests, test_size) {
+   AMediaFormat *fmt1 = AMediaFormat_new();
+
+   size_t size = -15;
+   AMediaFormat_setSize(fmt1, "small", 1);
+   AMediaFormat_setSize(fmt1, "medium", 10);
+   AMediaFormat_setSize(fmt1, "large", 100);
+   EXPECT_TRUE(AMediaFormat_getSize(fmt1, "medium", &size));
+   EXPECT_EQ(size, 10);
+
+   AMediaFormat_delete(fmt1);
+}
+
+TEST(NdkMediaFormat_tests, test_float) {
+   AMediaFormat *fmt1 = AMediaFormat_new();
+   float f;
+   AMediaFormat_setFloat(fmt1, "boat", 1.5);
+   AMediaFormat_setFloat(fmt1, "ship", 0.5);
+   EXPECT_TRUE(AMediaFormat_getFloat(fmt1, "boat", &f));
+   EXPECT_EQ(f, 1.5);
+   AMediaFormat_delete(fmt1);
+}
+
+TEST(NdkMediaFormat_tests, test_double) {
+   AMediaFormat *fmt1 = AMediaFormat_new();
+   double d;
+   AMediaFormat_setDouble(fmt1, "trouble", 100.5);
+   AMediaFormat_setDouble(fmt1, "dip", 0.5);
+   EXPECT_TRUE(AMediaFormat_getDouble(fmt1, "trouble", &d));
+   EXPECT_EQ(d, 100.5);
+   AMediaFormat_delete(fmt1);
+}
+
+TEST(NdkMediaFormat_tests, test_string) {
+   AMediaFormat *fmt1 = AMediaFormat_new();
+
+   const char *content = "This is my test string";
+   const char *out = nullptr;
+   AMediaFormat_setString(fmt1, "stringtheory", content);
+   EXPECT_TRUE(AMediaFormat_getString(fmt1, "stringtheory", &out));
+   EXPECT_NE(out, nullptr);
+   EXPECT_EQ(strcmp(out,content), 0);
+
+   AMediaFormat_delete(fmt1);
+}
+
+
+TEST(NdkMediaFormat_tests, test_clear) {
+   AMediaFormat *fmt1 = AMediaFormat_new();
+
+   int32_t i32;
+   AMediaFormat_setInt32(fmt1, "five", 5);
+   size_t size = -15;
+   AMediaFormat_setSize(fmt1, "medium", 10);
+   float f;
+   AMediaFormat_setFloat(fmt1, "boat", 1.5);
+
+   AMediaFormat_clear(fmt1);
+   EXPECT_FALSE(AMediaFormat_getInt32(fmt1, "five", &i32));
+   EXPECT_FALSE(AMediaFormat_getSize(fmt1, "medium", &size));
+   EXPECT_FALSE(AMediaFormat_getFloat(fmt1, "boat", &f));
+
+   AMediaFormat_delete(fmt1);
+}
+
+TEST(NdkMediaFormat_tests, test_copy) {
+   AMediaFormat *fmt1 = AMediaFormat_new();
+   AMediaFormat *fmt2 = AMediaFormat_new();
+
+   double d;
+   int32_t i32;
+
+   // test copy functionality (NB: we cleared everything just above here)
+   AMediaFormat_setDouble(fmt1, "trouble", 100.5);
+   EXPECT_TRUE(AMediaFormat_getDouble(fmt1, "trouble", &d));
+   EXPECT_FALSE(AMediaFormat_getDouble(fmt2, "trouble", &d));
+
+   EXPECT_EQ(AMEDIA_OK, AMediaFormat_copy(fmt2, fmt1));
+
+   EXPECT_TRUE(AMediaFormat_getDouble(fmt2, "trouble", &d));
+   EXPECT_EQ(d, 100.5);
+
+   AMediaFormat *fmt3 = nullptr;
+   EXPECT_NE(AMEDIA_OK, AMediaFormat_copy(fmt3, fmt1));
+   EXPECT_NE(AMEDIA_OK, AMediaFormat_copy(fmt1, fmt3));
+
+   // we should lose an entry when we copy over it
+   AMediaFormat_setInt32(fmt2, "vanishing", 50);
+   EXPECT_FALSE(AMediaFormat_getInt32(fmt1, "vanishing", &i32));
+   EXPECT_TRUE(AMediaFormat_getInt32(fmt2, "vanishing", &i32));
+   EXPECT_EQ(AMEDIA_OK, AMediaFormat_copy(fmt2, fmt1));
+   EXPECT_FALSE(AMediaFormat_getInt32(fmt2, "vanishing", &i32));
+
+   AMediaFormat_delete(fmt1);
+   AMediaFormat_delete(fmt2);
+}
+
+TEST(NdkMediaFormat_tests, test_buffer) {
+   AMediaFormat *fmt1 = AMediaFormat_new();
+
+   typedef struct blockomem {
+        int leading;
+        int filled[100];
+        int trailing;
+   } block_t;
+   block_t buf = {};
+   buf.leading = 1;
+   buf.trailing = 2;
+   void *data;
+   size_t bsize;
+
+   AMediaFormat_setBuffer(fmt1, "mybuffer", &buf, sizeof(buf));
+   EXPECT_TRUE(AMediaFormat_getBuffer(fmt1, "mybuffer", &data, &bsize));
+   EXPECT_NE(&buf, data);
+   EXPECT_EQ(sizeof(buf), bsize);
+   block_t *bufp = (block_t*) data;
+   EXPECT_EQ(bufp->leading, buf.leading);
+   EXPECT_EQ(bufp->trailing, buf.trailing);
+   EXPECT_EQ(0, memcmp(&buf, data, bsize));
+
+   AMediaFormat_delete(fmt1);
+}
+
+} // namespace android
diff --git a/media/tests/SampleVideoEncoder/README.md b/media/tests/SampleVideoEncoder/README.md
new file mode 100644
index 0000000..2e275c5
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/README.md
@@ -0,0 +1,56 @@
+# B-Frames Encoding App
+
+This is a sample android application for encoding AVC/HEVC streams with B-Frames enabled. It uses MediaRecorder APIs to record B-frames enabled video from camera2 input and MediaCodec APIs to encode reference test vector using input surface.
+
+This page describes how to get started with the Encoder App and how to run the tests for it.
+
+
+# Getting Started
+
+This app uses the Gradle build system as well as Soong Build System.
+
+To build this project using Gradle build, use the "gradlew build" command or use "Import Project" in Android Studio.
+
+To build the app using Soong Build System, run the following command:
+```
+mmm frameworks/av/media/tests/SampleVideoEncoder/
+```
+
+The apk is generated at the following location:
+```
+out\target\product\sargo\testcases\SampleVideoEncoder\arm64\SampleVideoEncoder.apk
+```
+
+Command to install the apk:
+```
+adb install SampleVideoEncoder.apk
+```
+
+Command to launch the app:
+```
+adb shell am start -n "com.android.media.samplevideoencoder/com.android.media.samplevideoencoder.MainActivity"
+```
+
+After installing the app, a TextureView showing camera preview is dispalyed on one third of the screen. It also features checkboxes to select either avc/hevc and hw/sw codecs. It also has an option to select either MediaRecorder APIs or MediaCodec, along with the 'Start' button to start/stop recording.
+
+# Running Tests
+
+The app also contains a test, which will test the MediaCodec APIs for encoding avc/hevc streams with B-frames enabled. This does not require us to use application UI.
+
+## Running the tests using atest
+Note that atest command will install the SampleVideoEncoder app on the device.
+
+Command to run the tests:
+```
+atest SampleVideoEncoder
+```
+
+# Ouput
+
+The muxed ouptput video is saved in the app data at:
+```
+/storage/emulated/0/Android/data/com.android.media.samplevideoencoder/files/
+```
+
+The total number of I-frames, P-frames and B-frames after encoding has been done using MediaCodec APIs are displayed on the screen.
+The results of the tests can be obtained from the logcats of the test.
diff --git a/media/tests/SampleVideoEncoder/app/Android.bp b/media/tests/SampleVideoEncoder/app/Android.bp
new file mode 100644
index 0000000..58b219b
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/Android.bp
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+android_test {
+    name: "SampleVideoEncoder",
+
+    manifest: "src/main/AndroidManifest.xml",
+
+    srcs: ["src/**/*.java"],
+
+    sdk_version: "current",
+    min_sdk_version: "24", // N
+
+    resource_dirs: [
+        "src/main/res",
+    ],
+
+    static_libs: [
+        "androidx.annotation_annotation",
+        "androidx.appcompat_appcompat",
+        "androidx-constraintlayout_constraintlayout",
+        "junit",
+        "androidx.test.core",
+        "androidx.test.runner",
+        "hamcrest-library",
+    ],
+
+    javacflags: [
+        "-Xlint:deprecation",
+        "-Xlint:unchecked",
+    ],
+}
diff --git a/media/tests/SampleVideoEncoder/app/AndroidTest.xml b/media/tests/SampleVideoEncoder/app/AndroidTest.xml
new file mode 100644
index 0000000..91f4304
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/AndroidTest.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Runs SampleVideoEncoder Tests">
+    <target_preparer class="com.android.tradefed.targetprep.TestAppInstallSetup">
+        <option name="cleanup-apks" value="false" />
+        <option name="test-file-name" value="SampleVideoEncoder.apk" />
+    </target_preparer>
+
+    <option name="test-tag" value="SampleVideoEncoder" />
+    <test class="com.android.tradefed.testtype.AndroidJUnitTest" >
+        <option name="package" value="com.android.media.samplevideoencoder" />
+        <option name="runner" value="androidx.test.runner.AndroidJUnitRunner" />
+        <option name="hidden-api-checks" value="false"/>
+    </test>
+</configuration>
diff --git a/media/tests/SampleVideoEncoder/app/build.gradle b/media/tests/SampleVideoEncoder/app/build.gradle
new file mode 100644
index 0000000..cc54981
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/build.gradle
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+apply plugin: 'com.android.application'
+
+android {
+    compileSdkVersion 30
+    buildToolsVersion "30.0.2"
+
+    defaultConfig {
+        applicationId "com.android.media.samplevideoencoder"
+        minSdkVersion 24
+        targetSdkVersion 30
+        versionCode 1
+        versionName "1.0"
+        testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
+    }
+
+    buildTypes {
+        release {
+            minifyEnabled false
+            proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
+        }
+    }
+}
+
+dependencies {
+    implementation fileTree(dir: "libs", include: ["*.jar"])
+    implementation 'androidx.appcompat:appcompat:1.2.0'
+    implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
+    testImplementation 'junit:junit:4.13.1'
+    androidTestImplementation 'androidx.test:runner:1.3.0'
+    androidTestImplementation 'androidx.test.ext:junit:1.1.2'
+    androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0'
+}
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/androidTest/java/com/android/media/samplevideoencoder/tests/SampleVideoEncoderTest.java b/media/tests/SampleVideoEncoder/app/src/androidTest/java/com/android/media/samplevideoencoder/tests/SampleVideoEncoderTest.java
new file mode 100644
index 0000000..1ef332e
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/androidTest/java/com/android/media/samplevideoencoder/tests/SampleVideoEncoderTest.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.samplevideoencoder.tests;
+
+import androidx.test.platform.app.InstrumentationRegistry;
+
+import android.content.Context;
+import android.media.MediaFormat;
+import android.util.Log;
+
+import com.android.media.samplevideoencoder.MediaCodecSurfaceEncoder;
+import com.android.media.samplevideoencoder.R;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collection;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertThat;
+
+@RunWith(Parameterized.class)
+public class SampleVideoEncoderTest {
+    private static final String TAG = SampleVideoEncoderTest.class.getSimpleName();
+    private final Context mContext;
+    private int mMaxBFrames;
+    private int mInputResId;
+    private String mMime;
+    private boolean mIsSoftwareEncoder;
+
+    @Parameterized.Parameters
+    public static Collection<Object[]> inputFiles() {
+        return Arrays.asList(new Object[][]{
+                // Parameters: MimeType, isSoftwareEncoder, maxBFrames
+                {MediaFormat.MIMETYPE_VIDEO_AVC, false, 1},
+                {MediaFormat.MIMETYPE_VIDEO_AVC, true, 1},
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, false, 1},
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, true, 1}});
+    }
+
+    public SampleVideoEncoderTest(String mimeType, boolean isSoftwareEncoder, int maxBFrames) {
+        this.mContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
+        this.mInputResId = R.raw.crowd_1920x1080_25fps_4000kbps_h265;
+        this.mMime = mimeType;
+        this.mIsSoftwareEncoder = isSoftwareEncoder;
+        this.mMaxBFrames = maxBFrames;
+    }
+
+    private String getOutputPath() {
+        File dir = mContext.getExternalFilesDir(null);
+        if (dir == null) {
+            Log.e(TAG, "Cannot get external directory path to save output video");
+            return null;
+        }
+        String videoPath = dir.getAbsolutePath() + "/Video-" + System.currentTimeMillis() + ".mp4";
+        Log.i(TAG, "Output video is saved at: " + videoPath);
+        return videoPath;
+    }
+
+    @Test
+    public void testMediaSurfaceEncoder() throws IOException, InterruptedException {
+        String outputFilePath = getOutputPath();
+        MediaCodecSurfaceEncoder surfaceEncoder =
+                new MediaCodecSurfaceEncoder(mContext, mInputResId, mMime, mIsSoftwareEncoder,
+                        outputFilePath, mMaxBFrames);
+        int encodingStatus = surfaceEncoder.startEncodingSurface();
+        assertThat(encodingStatus, is(equalTo(0)));
+        int[] frameNumArray = surfaceEncoder.getFrameTypes();
+        Log.i(TAG, "Results: I-Frames: " + frameNumArray[0] + "; P-Frames: " + frameNumArray[1] +
+                "\n " + "; B-Frames:" + frameNumArray[2]);
+        assertNotEquals("Encoder mime: " + mMime + " isSoftware: " + mIsSoftwareEncoder +
+                " failed to generate B Frames", frameNumArray[2], 0);
+    }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml b/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..b17541d
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml
@@ -0,0 +1,45 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ -->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="com.android.media.samplevideoencoder">
+
+    <uses-permission android:name="android.permission.CAMERA"/>
+    <uses-permission android:name="android.permission.RECORD_AUDIO"/>
+
+    <application
+        android:configChanges="orientation"
+        android:screenOrientation="portrait"
+        android:allowBackup="true"
+        android:icon="@mipmap/ic_launcher"
+        android:label="@string/app_name"
+        android:roundIcon="@mipmap/ic_launcher_round"
+        android:supportsRtl="true"
+        android:theme="@style/AppTheme">
+        <activity android:name="com.android.media.samplevideoencoder.MainActivity">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+    </application>
+
+    <instrumentation android:name="androidx.test.runner.AndroidJUnitRunner"
+        android:targetPackage="com.android.media.samplevideoencoder"
+        android:label="SampleVideoEncoder Test"/>
+
+</manifest>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/AutoFitTextureView.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/AutoFitTextureView.java
new file mode 100644
index 0000000..a3ea4c7
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/AutoFitTextureView.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.samplevideoencoder;
+
+import android.content.Context;
+import android.util.AttributeSet;
+import android.view.TextureView;
+
+public class AutoFitTextureView extends TextureView {
+
+    public AutoFitTextureView(Context context) {
+        this(context, null);
+    }
+
+    public AutoFitTextureView(Context context, AttributeSet attrs) {
+        this(context, attrs, 0);
+    }
+
+    public AutoFitTextureView(Context context, AttributeSet attrs, int defStyle) {
+        super(context, attrs, defStyle);
+    }
+
+    public void setAspectRatio(int width, int height) {
+        if (width < 0 || height < 0) {
+            throw new IllegalArgumentException("Size cannot be negative.");
+        }
+        requestLayout();
+    }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MainActivity.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MainActivity.java
new file mode 100644
index 0000000..a7a353c
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MainActivity.java
@@ -0,0 +1,671 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.samplevideoencoder;
+
+import androidx.appcompat.app.AppCompatActivity;
+import androidx.core.app.ActivityCompat;
+
+import android.Manifest;
+import android.app.Activity;
+
+import android.content.Context;
+import android.content.pm.PackageManager;
+
+import android.graphics.Matrix;
+import android.graphics.RectF;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.graphics.SurfaceTexture;
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.media.MediaRecorder;
+
+import android.os.AsyncTask;
+import android.os.Build;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.view.Surface;
+import android.view.View;
+import android.view.TextureView;
+import android.widget.Button;
+import android.widget.CheckBox;
+
+import java.io.File;
+import java.io.IOException;
+
+import android.util.Log;
+import android.util.Size;
+import android.widget.RadioGroup;
+import android.widget.TextView;
+import android.widget.Toast;
+
+import java.lang.ref.WeakReference;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.TimeUnit;
+
+import static java.lang.Boolean.FALSE;
+import static java.lang.Boolean.TRUE;
+
+public class MainActivity extends AppCompatActivity
+        implements View.OnClickListener, ActivityCompat.OnRequestPermissionsResultCallback {
+
+    private static final String TAG = "SampleVideoEncoder";
+    private static final String[] RECORD_PERMISSIONS =
+            {Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO};
+    private static final int REQUEST_RECORD_PERMISSIONS = 1;
+    private final Semaphore mCameraOpenCloseLock = new Semaphore(1);
+    private static final int VIDEO_BITRATE = 8000000 /* 8 Mbps */;
+    private static final int VIDEO_FRAMERATE = 30;
+
+    /**
+     * Constant values to frame types assigned here are internal to this app.
+     * These values does not correspond to the actual values defined in avc/hevc specifications.
+     */
+    public static final int FRAME_TYPE_I = 0;
+    public static final int FRAME_TYPE_P = 1;
+    public static final int FRAME_TYPE_B = 2;
+
+    private String mMime = MediaFormat.MIMETYPE_VIDEO_AVC;
+    private String mOutputVideoPath = null;
+
+    private final boolean mIsFrontCamera = true;
+    private boolean mIsCodecSoftware = false;
+    private boolean mIsMediaRecorder = true;
+    private boolean mIsRecording;
+
+    private AutoFitTextureView mTextureView;
+    private TextView mTextView;
+    private CameraDevice mCameraDevice;
+    private CameraCaptureSession mPreviewSession;
+    private CaptureRequest.Builder mPreviewBuilder;
+    private MediaRecorder mMediaRecorder;
+    private Size mVideoSize;
+    private Size mPreviewSize;
+
+    private Handler mBackgroundHandler;
+    private HandlerThread mBackgroundThread;
+
+    private Button mStartButton;
+
+    private int[] mFrameTypeOccurrences;
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        setContentView(R.layout.activity_main);
+
+        final RadioGroup radioGroup_mime = findViewById(R.id.radio_group_mime);
+        radioGroup_mime.setOnCheckedChangeListener(new RadioGroup.OnCheckedChangeListener() {
+            @Override
+            public void onCheckedChanged(RadioGroup group, int checkedId) {
+                if (checkedId == R.id.avc) {
+                    mMime = MediaFormat.MIMETYPE_VIDEO_AVC;
+                } else {
+                    mMime = MediaFormat.MIMETYPE_VIDEO_HEVC;
+                }
+            }
+        });
+
+        final RadioGroup radioGroup_codec = findViewById(R.id.radio_group_codec);
+        radioGroup_codec.setOnCheckedChangeListener(new RadioGroup.OnCheckedChangeListener() {
+            @Override
+            public void onCheckedChanged(RadioGroup group, int checkedId) {
+                mIsCodecSoftware = checkedId == R.id.sw;
+            }
+        });
+
+        final CheckBox checkBox_mr = findViewById(R.id.checkBox_media_recorder);
+        final CheckBox checkBox_mc = findViewById(R.id.checkBox_media_codec);
+        mTextureView = findViewById(R.id.texture);
+        mTextView = findViewById(R.id.textViewResults);
+
+        checkBox_mr.setOnClickListener(new View.OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                boolean checked = ((CheckBox) v).isChecked();
+                if (checked) {
+                    checkBox_mc.setChecked(false);
+                    mIsMediaRecorder = TRUE;
+                    for (int i = 0; i < radioGroup_codec.getChildCount(); i++) {
+                        radioGroup_codec.getChildAt(i).setEnabled(false);
+                    }
+                }
+            }
+        });
+        checkBox_mc.setOnClickListener(new View.OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                boolean checked = ((CheckBox) v).isChecked();
+                if (checked) {
+                    checkBox_mr.setChecked(false);
+                    mIsMediaRecorder = FALSE;
+                    for (int i = 0; i < radioGroup_codec.getChildCount(); i++) {
+                        radioGroup_codec.getChildAt(i).setEnabled(true);
+                    }
+                }
+            }
+        });
+        mStartButton = findViewById(R.id.start_button);
+        mStartButton.setOnClickListener(this);
+    }
+
+    @Override
+    public void onClick(View v) {
+        if (v.getId() == R.id.start_button) {
+            mTextView.setText(null);
+            if (mIsMediaRecorder) {
+                if (mIsRecording) {
+                    stopRecordingVideo();
+                } else {
+                    mStartButton.setEnabled(false);
+                    startRecordingVideo();
+                }
+            } else {
+                mStartButton.setEnabled(false);
+                mOutputVideoPath = getVideoPath(MainActivity.this);
+                MediaCodecSurfaceAsync codecAsyncTask = new MediaCodecSurfaceAsync(this);
+                codecAsyncTask.execute(
+                        "Encoding reference test vector with MediaCodec APIs using surface");
+            }
+        }
+    }
+
+    private static class MediaCodecSurfaceAsync extends AsyncTask<String, String, Integer> {
+
+        private final WeakReference<MainActivity> activityReference;
+
+        MediaCodecSurfaceAsync(MainActivity context) {
+            activityReference = new WeakReference<>(context);
+        }
+
+        @Override
+        protected Integer doInBackground(String... strings) {
+            MainActivity mainActivity = activityReference.get();
+            int resId = R.raw.crowd_1920x1080_25fps_4000kbps_h265;
+            int encodingStatus = 1;
+            MediaCodecSurfaceEncoder codecSurfaceEncoder =
+                    new MediaCodecSurfaceEncoder(mainActivity.getApplicationContext(), resId,
+                            mainActivity.mMime, mainActivity.mIsCodecSoftware,
+                            mainActivity.mOutputVideoPath);
+            try {
+                encodingStatus = codecSurfaceEncoder.startEncodingSurface();
+                mainActivity.mFrameTypeOccurrences = codecSurfaceEncoder.getFrameTypes();
+            } catch (IOException | InterruptedException e) {
+                e.printStackTrace();
+            }
+            return encodingStatus;
+        }
+
+        @Override
+        protected void onPostExecute(Integer encodingStatus) {
+            MainActivity mainActivity = activityReference.get();
+            mainActivity.mStartButton.setEnabled(true);
+            if (encodingStatus == 0) {
+                Toast.makeText(mainActivity.getApplicationContext(), "Encoding Completed",
+                        Toast.LENGTH_SHORT).show();
+                mainActivity.mTextView.append("\n Encoded stream contains: ");
+                mainActivity.mTextView.append("\n Number of I-Frames: " +
+                        mainActivity.mFrameTypeOccurrences[FRAME_TYPE_I]);
+                mainActivity.mTextView.append("\n Number of P-Frames: " +
+                        mainActivity.mFrameTypeOccurrences[FRAME_TYPE_P]);
+                mainActivity.mTextView.append("\n Number of B-Frames: " +
+                        mainActivity.mFrameTypeOccurrences[FRAME_TYPE_B]);
+            } else {
+                Toast.makeText(mainActivity.getApplicationContext(),
+                        "Error occurred while " + "encoding", Toast.LENGTH_SHORT).show();
+            }
+            mainActivity.mOutputVideoPath = null;
+            super.onPostExecute(encodingStatus);
+        }
+    }
+
+    private final TextureView.SurfaceTextureListener mSurfaceTextureListener =
+            new TextureView.SurfaceTextureListener() {
+
+                @Override
+                public void onSurfaceTextureAvailable(SurfaceTexture surface, int width,
+                                                      int height) {
+                    openCamera(width, height);
+                }
+
+                @Override
+                public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width,
+                                                        int height) {
+                    configureTransform(width, height);
+                    Log.v(TAG, "Keeping camera preview size fixed");
+                }
+
+                @Override
+                public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
+                    return true;
+                }
+
+                @Override
+                public void onSurfaceTextureUpdated(SurfaceTexture surface) {
+                }
+            };
+
+
+    private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
+
+        @Override
+        public void onOpened(CameraDevice cameraDevice) {
+            mCameraDevice = cameraDevice;
+            startPreview();
+            mCameraOpenCloseLock.release();
+        }
+
+        @Override
+        public void onDisconnected(CameraDevice cameraDevice) {
+            mCameraOpenCloseLock.release();
+            cameraDevice.close();
+            mCameraDevice = null;
+        }
+
+        @Override
+        public void onError(CameraDevice cameraDevice, int error) {
+            mCameraOpenCloseLock.release();
+            cameraDevice.close();
+            mCameraDevice = null;
+            Activity activity = MainActivity.this;
+            activity.finish();
+        }
+    };
+
+    private boolean shouldShowRequestPermissionRationale(String[] recordPermissions) {
+        for (String permission : recordPermissions) {
+            if (ActivityCompat.shouldShowRequestPermissionRationale(this, permission)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    private void requestRecordPermissions() {
+        if (!shouldShowRequestPermissionRationale(RECORD_PERMISSIONS)) {
+            ActivityCompat.requestPermissions(this, RECORD_PERMISSIONS, REQUEST_RECORD_PERMISSIONS);
+        }
+    }
+
+    @Override
+    public void onRequestPermissionsResult(int requestCode, String[] permissions,
+                                           int[] grantResults) {
+        if (requestCode == REQUEST_RECORD_PERMISSIONS) {
+            if (grantResults.length == RECORD_PERMISSIONS.length) {
+                for (int result : grantResults) {
+                    if (result != PackageManager.PERMISSION_GRANTED) {
+                        Log.e(TAG, "Permission is not granted");
+                        break;
+                    }
+                }
+            }
+        } else {
+            super.onRequestPermissionsResult(requestCode, permissions, grantResults);
+        }
+    }
+
+    @SuppressWarnings("MissingPermission")
+    private void openCamera(int width, int height) {
+        if (!hasPermissionGranted(RECORD_PERMISSIONS)) {
+            Log.e(TAG, "Camera does not have permission to record video");
+            requestRecordPermissions();
+            return;
+        }
+        final Activity activity = MainActivity.this;
+        if (activity == null || activity.isFinishing()) {
+            Log.e(TAG, "Activity not found");
+            return;
+        }
+        CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
+        try {
+            Log.v(TAG, "Acquire Camera");
+            if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
+                throw new RuntimeException("Timed out waiting to lock camera opening");
+            }
+            Log.d(TAG, "Camera Acquired");
+
+            String cameraId = manager.getCameraIdList()[0];
+            if (mIsFrontCamera) {
+                cameraId = manager.getCameraIdList()[1];
+            }
+
+            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
+            StreamConfigurationMap map =
+                    characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+            mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
+            mPreviewSize =
+                    chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height,
+                            mVideoSize);
+            mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
+            configureTransform(width, height);
+            mMediaRecorder = new MediaRecorder();
+            manager.openCamera(cameraId, mStateCallback, null);
+        } catch (InterruptedException | CameraAccessException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void closeCamera() {
+        try {
+            mCameraOpenCloseLock.acquire();
+            closePreviewSession();
+            if (null != mCameraDevice) {
+                mCameraDevice.close();
+                mCameraDevice = null;
+            }
+            if (null != mMediaRecorder) {
+                mMediaRecorder.release();
+                mMediaRecorder = null;
+            }
+        } catch (InterruptedException e) {
+            throw new RuntimeException("Interrupted while trying to lock camera closing.");
+        } finally {
+            mCameraOpenCloseLock.release();
+        }
+    }
+
+    private static Size chooseVideoSize(Size[] choices) {
+        for (Size size : choices) {
+            if (size.getWidth() == size.getHeight() * 16 / 9 && size.getWidth() <= 1920) {
+                return size;
+            }
+        }
+        Log.e(TAG, "Couldn't find any suitable video size");
+        return choices[choices.length - 1];
+    }
+
+    private static Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) {
+        List<Size> bigEnough = new ArrayList<>();
+        int w = aspectRatio.getWidth();
+        int h = aspectRatio.getHeight();
+        for (Size option : choices) {
+            if (option.getHeight() == option.getWidth() * h / w && option.getWidth() >= width &&
+                    option.getHeight() >= height) {
+                bigEnough.add(option);
+            }
+        }
+
+        // Pick the smallest of those, assuming we found any
+        if (bigEnough.size() > 0) {
+            return Collections.min(bigEnough, new CompareSizesByArea());
+        } else {
+            Log.e(TAG, "Couldn't find any suitable preview size");
+            return choices[0];
+        }
+    }
+
+    private boolean hasPermissionGranted(String[] recordPermissions) {
+        for (String permission : recordPermissions) {
+            if (ActivityCompat.checkSelfPermission(MainActivity.this, permission) !=
+                    PackageManager.PERMISSION_GRANTED) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    @Override
+    public void onResume() {
+        super.onResume();
+        startBackgroundThread();
+        if (mTextureView.isAvailable()) {
+            openCamera(mTextureView.getWidth(), mTextureView.getHeight());
+        } else {
+            mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
+        }
+    }
+
+    @Override
+    public void onPause() {
+        closeCamera();
+        stopBackgroundThread();
+        super.onPause();
+    }
+
+    private void startBackgroundThread() {
+        mBackgroundThread = new HandlerThread("CameraBackground");
+        mBackgroundThread.start();
+        mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
+    }
+
+    private void stopBackgroundThread() {
+        mBackgroundThread.quitSafely();
+        try {
+            mBackgroundThread.join();
+            mBackgroundThread = null;
+            mBackgroundHandler = null;
+        } catch (InterruptedException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void startRecordingVideo() {
+        if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
+            Toast.makeText(MainActivity.this, "Cannot start recording.", Toast.LENGTH_SHORT).show();
+            Log.e(TAG, "Cannot start recording.");
+            return;
+        }
+        try {
+            closePreviewSession();
+            setUpMediaRecorder();
+            SurfaceTexture texture = mTextureView.getSurfaceTexture();
+            assert texture != null;
+            texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
+            mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
+            List<Surface> surfaces = new ArrayList<>();
+
+            // Set up Surface for the camera preview
+            Surface previewSurface = new Surface(texture);
+            surfaces.add(previewSurface);
+            mPreviewBuilder.addTarget(previewSurface);
+
+            // Set up Surface for the MediaRecorder
+            Surface recorderSurface = mMediaRecorder.getSurface();
+            surfaces.add(recorderSurface);
+            mPreviewBuilder.addTarget(recorderSurface);
+
+            //Start a capture session
+            mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
+
+                @Override
+                public void onConfigured(CameraCaptureSession session) {
+                    mPreviewSession = session;
+                    updatePreview();
+                    MainActivity.this.runOnUiThread(new Runnable() {
+                        @Override
+                        public void run() {
+                            mIsRecording = true;
+                            mMediaRecorder.start();
+                            mStartButton.setText(R.string.stop);
+                            mStartButton.setEnabled(true);
+                        }
+                    });
+                }
+
+                @Override
+                public void onConfigureFailed(CameraCaptureSession session) {
+                    Log.e(TAG, "Failed to configure. Cannot start Recording");
+                }
+            }, mBackgroundHandler);
+        } catch (CameraAccessException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void setUpMediaRecorder() {
+        final Activity activity = MainActivity.this;
+        if (activity == null) {
+            Toast.makeText(MainActivity.this, "Error occurred while setting up the MediaRecorder",
+                    Toast.LENGTH_SHORT).show();
+            Log.e(TAG, "Error occurred while setting up the MediaRecorder");
+            return;
+        }
+        try {
+            mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
+            mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
+            mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
+        } catch (IllegalStateException e) {
+            e.printStackTrace();
+        }
+        if (mOutputVideoPath == null) {
+            mOutputVideoPath = getVideoPath(MainActivity.this);
+        }
+        mMediaRecorder.setOutputFile(mOutputVideoPath);
+        mMediaRecorder.setVideoEncodingBitRate(VIDEO_BITRATE);
+        mMediaRecorder.setVideoFrameRate(VIDEO_FRAMERATE);
+        mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight());
+        mMediaRecorder.setOrientationHint(270);
+        if (mMime.equals(MediaFormat.MIMETYPE_VIDEO_HEVC)) {
+            mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.HEVC);
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+                mMediaRecorder.setVideoEncodingProfileLevel(
+                        MediaCodecInfo.CodecProfileLevel.HEVCProfileMain,
+                        MediaCodecInfo.CodecProfileLevel.HEVCMainTierLevel4);
+            }
+        } else {
+            mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+                mMediaRecorder.setVideoEncodingProfileLevel(
+                        MediaCodecInfo.CodecProfileLevel.AVCProfileMain,
+                        MediaCodecInfo.CodecProfileLevel.AVCLevel4);
+            }
+        }
+        mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
+        try {
+            mMediaRecorder.prepare();
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private String getVideoPath(Activity activity) {
+        File dir = activity.getApplicationContext().getExternalFilesDir(null);
+        if (dir == null) {
+            Log.e(TAG, "Cannot get external directory path to save output video");
+            return null;
+        }
+        String videoPath = dir.getAbsolutePath() + "/Video-" + System.currentTimeMillis() + ".mp4";
+        Log.d(TAG, "Output video is saved at: " + videoPath);
+        return videoPath;
+    }
+
+    private void closePreviewSession() {
+        if (mPreviewSession != null) {
+            mPreviewSession.close();
+            mPreviewSession = null;
+        }
+    }
+
+    private void stopRecordingVideo() {
+        mIsRecording = false;
+        mStartButton.setText(R.string.start);
+        mMediaRecorder.stop();
+        mMediaRecorder.reset();
+        Toast.makeText(MainActivity.this, "Recording Finished", Toast.LENGTH_SHORT).show();
+        mOutputVideoPath = null;
+        startPreview();
+    }
+
+    private void startPreview() {
+        if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
+            return;
+        }
+        try {
+            closePreviewSession();
+            SurfaceTexture texture = mTextureView.getSurfaceTexture();
+            assert texture != null;
+            texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
+            mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+
+            Surface previewSurface = new Surface(texture);
+            mPreviewBuilder.addTarget(previewSurface);
+
+            mCameraDevice.createCaptureSession(Collections.singletonList(previewSurface),
+                    new CameraCaptureSession.StateCallback() {
+
+                        @Override
+                        public void onConfigured(CameraCaptureSession session) {
+                            mPreviewSession = session;
+                            updatePreview();
+                        }
+
+                        @Override
+                        public void onConfigureFailed(CameraCaptureSession session) {
+                            Toast.makeText(MainActivity.this,
+                                    "Configure Failed; Cannot start " + "preview",
+                                    Toast.LENGTH_SHORT).show();
+                            Log.e(TAG, "Configure failed; Cannot start preview");
+                        }
+                    }, mBackgroundHandler);
+        } catch (CameraAccessException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void updatePreview() {
+        if (mCameraDevice == null) {
+            Toast.makeText(MainActivity.this, "Camera not found; Cannot update " + "preview",
+                    Toast.LENGTH_SHORT).show();
+            Log.e(TAG, "Camera not found; Cannot update preview");
+            return;
+        }
+        try {
+            mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
+            HandlerThread thread = new HandlerThread("Camera preview");
+            thread.start();
+            mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
+        } catch (CameraAccessException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void configureTransform(int viewWidth, int viewHeight) {
+        Activity activity = MainActivity.this;
+        if (null == mTextureView || null == mPreviewSize || null == activity) {
+            return;
+        }
+        Matrix matrix = new Matrix();
+        RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
+        RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
+        float centerX = viewRect.centerX();
+        float centerY = viewRect.centerY();
+        bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
+        matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
+        float scale = Math.max((float) viewHeight / mPreviewSize.getHeight(),
+                (float) viewWidth / mPreviewSize.getWidth());
+        matrix.postScale(scale, scale, centerX, centerY);
+        mTextureView.setTransform(matrix);
+    }
+
+    static class CompareSizesByArea implements Comparator<Size> {
+        @Override
+        public int compare(Size lhs, Size rhs) {
+            return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
+                    (long) rhs.getWidth() * rhs.getHeight());
+        }
+    }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecBase.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecBase.java
new file mode 100644
index 0000000..88ce73b
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecBase.java
@@ -0,0 +1,193 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.samplevideoencoder;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.os.Build;
+import android.util.Log;
+import android.util.Pair;
+
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+class CodecAsyncHandler extends MediaCodec.Callback {
+    private static final String TAG = CodecAsyncHandler.class.getSimpleName();
+    private final Lock mLock = new ReentrantLock();
+    private final Condition mCondition = mLock.newCondition();
+    private final LinkedList<Pair<Integer, MediaCodec.BufferInfo>> mCbInputQueue;
+    private final LinkedList<Pair<Integer, MediaCodec.BufferInfo>> mCbOutputQueue;
+    private volatile boolean mSignalledError;
+
+    CodecAsyncHandler() {
+        mCbInputQueue = new LinkedList<>();
+        mCbOutputQueue = new LinkedList<>();
+        mSignalledError = false;
+    }
+
+    void clearQueues() {
+        mLock.lock();
+        mCbInputQueue.clear();
+        mCbOutputQueue.clear();
+        mLock.unlock();
+    }
+
+    void resetContext() {
+        clearQueues();
+        mSignalledError = false;
+    }
+
+    @Override
+    public void onInputBufferAvailable(MediaCodec codec, int bufferIndex) {
+        mLock.lock();
+        mCbInputQueue.add(new Pair<>(bufferIndex, (MediaCodec.BufferInfo) null));
+        mCondition.signalAll();
+        mLock.unlock();
+    }
+
+    @Override
+    public void onOutputBufferAvailable(MediaCodec codec, int bufferIndex,
+                                        MediaCodec.BufferInfo info) {
+        mLock.lock();
+        mCbOutputQueue.add(new Pair<>(bufferIndex, info));
+        mCondition.signalAll();
+        mLock.unlock();
+    }
+
+    @Override
+    public void onError(MediaCodec codec, MediaCodec.CodecException e) {
+        mLock.lock();
+        mSignalledError = true;
+        mCondition.signalAll();
+        mLock.unlock();
+        Log.e(TAG, "Received media codec error : " + e.getMessage());
+    }
+
+    @Override
+    public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
+        Log.i(TAG, "Output format changed: " + format.toString());
+    }
+
+    void setCallBack(MediaCodec codec, boolean isCodecInAsyncMode) {
+        if (isCodecInAsyncMode) {
+            codec.setCallback(this);
+        }
+    }
+
+    Pair<Integer, MediaCodec.BufferInfo> getOutput() throws InterruptedException {
+        Pair<Integer, MediaCodec.BufferInfo> element = null;
+        mLock.lock();
+        while (!mSignalledError) {
+            if (mCbOutputQueue.isEmpty()) {
+                mCondition.await();
+            } else {
+                element = mCbOutputQueue.remove(0);
+                break;
+            }
+        }
+        mLock.unlock();
+        return element;
+    }
+
+    Pair<Integer, MediaCodec.BufferInfo> getWork() throws InterruptedException {
+        Pair<Integer, MediaCodec.BufferInfo> element = null;
+        mLock.lock();
+        while (!mSignalledError) {
+            if (mCbInputQueue.isEmpty() && mCbOutputQueue.isEmpty()) {
+                mCondition.await();
+            } else {
+                if (!mCbOutputQueue.isEmpty()) {
+                    element = mCbOutputQueue.remove(0);
+                    break;
+                }
+                if (!mCbInputQueue.isEmpty()) {
+                    element = mCbInputQueue.remove(0);
+                    break;
+                }
+            }
+        }
+        mLock.unlock();
+        return element;
+    }
+
+    boolean hasSeenError() {
+        return mSignalledError;
+    }
+}
+
+abstract public class MediaCodecBase {
+    static ArrayList<String> selectCodecs(String mime, ArrayList<MediaFormat> formats,
+                                          String[] features, boolean isEncoder,
+                                          boolean isSoftware) {
+
+        MediaCodecList codecList = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
+        MediaCodecInfo[] codecInfos = codecList.getCodecInfos();
+        ArrayList<String> listOfCodecs = new ArrayList<>();
+        for (MediaCodecInfo codecInfo : codecInfos) {
+            if (isEncoder) {
+                if (!codecInfo.isEncoder()) continue;
+            } else {
+                if (codecInfo.isEncoder()) continue;
+            }
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q && codecInfo.isAlias()) continue;
+            String[] types = codecInfo.getSupportedTypes();
+            for (String type : types) {
+                if (type.equalsIgnoreCase(mime)) {
+                    boolean isOk = true;
+                    MediaCodecInfo.CodecCapabilities codecCapabilities =
+                            codecInfo.getCapabilitiesForType(type);
+                    if (formats != null) {
+                        for (MediaFormat format : formats) {
+                            if (!codecCapabilities.isFormatSupported(format)) {
+                                isOk = false;
+                                break;
+                            }
+                        }
+                    }
+                    if (features != null) {
+                        for (String feature : features) {
+                            if (!codecCapabilities.isFeatureSupported(feature)) {
+                                isOk = false;
+                                break;
+                            }
+                        }
+                    }
+                    if (isSoftware) {
+                        if (codecInfo.getName().contains("software") ||
+                                codecInfo.getName().contains("android") ||
+                                codecInfo.getName().contains("google")) {
+                            if (isOk) listOfCodecs.add(codecInfo.getName());
+                        }
+                    } else {
+                        if (codecInfo.getName().contains("software") ||
+                                codecInfo.getName().contains("android") ||
+                                codecInfo.getName().contains("google")) {
+                            continue;
+                        } else {
+                            if (isOk) listOfCodecs.add(codecInfo.getName());
+                        }
+                    }
+                }
+            }
+        }
+        return listOfCodecs;
+    }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java
new file mode 100644
index 0000000..011c38c
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java
@@ -0,0 +1,385 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.samplevideoencoder;
+
+import android.content.Context;
+import android.content.res.AssetFileDescriptor;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.media.MediaMuxer;
+import android.os.Build;
+import android.util.Log;
+import android.util.Pair;
+import android.view.Surface;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_B;
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_I;
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_P;
+
+public class MediaCodecSurfaceEncoder {
+    private static final String TAG = MediaCodecSurfaceEncoder.class.getSimpleName();
+    private static final boolean DEBUG = false;
+    private static final int VIDEO_BITRATE = 8000000  /*8 Mbps*/;
+    private static final int VIDEO_FRAMERATE = 30;
+    private final Context mActivityContext;
+    private final int mResID;
+    private final int mMaxBFrames;
+    private final String mMime;
+    private final String mOutputPath;
+    private int mTrackID = -1;
+    private int mFrameNum = 0;
+    private int[] mFrameTypeOccurrences = {0, 0, 0};
+
+    private Surface mSurface;
+    private MediaExtractor mExtractor;
+    private MediaCodec mDecoder;
+    private MediaCodec mEncoder;
+    private MediaMuxer mMuxer;
+
+    private final boolean mIsCodecSoftware;
+    private boolean mSawDecInputEOS;
+    private boolean mSawDecOutputEOS;
+    private boolean mSawEncOutputEOS;
+    private int mDecOutputCount;
+    private int mEncOutputCount;
+
+    private final CodecAsyncHandler mAsyncHandleEncoder = new CodecAsyncHandler();
+    private final CodecAsyncHandler mAsyncHandleDecoder = new CodecAsyncHandler();
+
+    public MediaCodecSurfaceEncoder(Context context, int resId, String mime, boolean isSoftware,
+                                    String outputPath, int maxBFrames) {
+        mActivityContext = context;
+        mResID = resId;
+        mMime = mime;
+        mIsCodecSoftware = isSoftware;
+        mOutputPath = outputPath;
+        mMaxBFrames = maxBFrames;
+    }
+
+    public MediaCodecSurfaceEncoder(Context context, int resId, String mime, boolean isSoftware,
+                                    String outputPath) {
+        // Default value of MediaFormat.KEY_MAX_B_FRAMES is set to 1, if not passed as a parameter.
+        this(context, resId, mime, isSoftware, outputPath, 1);
+    }
+
+    public int startEncodingSurface() throws IOException, InterruptedException {
+        MediaFormat decoderFormat = setUpSource();
+        if (decoderFormat == null) {
+            return -1;
+        }
+
+        String decoderMime = decoderFormat.getString(MediaFormat.KEY_MIME);
+        ArrayList<String> listOfDeocders =
+                MediaCodecBase.selectCodecs(decoderMime, null, null, false, mIsCodecSoftware);
+        if (listOfDeocders.isEmpty()) {
+            Log.e(TAG, "No suitable decoder found for mime: " + decoderMime);
+            return -1;
+        }
+        mDecoder = MediaCodec.createByCodecName(listOfDeocders.get(0));
+
+        MediaFormat encoderFormat = setUpEncoderFormat(decoderFormat);
+        ArrayList<String> listOfEncoders =
+                MediaCodecBase.selectCodecs(mMime, null, null, true, mIsCodecSoftware);
+        if (listOfEncoders.isEmpty()) {
+            Log.e(TAG, "No suitable encoder found for mime: " + mMime);
+            return -1;
+        }
+
+        boolean muxOutput = true;
+        for (String encoder : listOfEncoders) {
+            mEncoder = MediaCodec.createByCodecName(encoder);
+            mExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
+            if (muxOutput) {
+                int muxerFormat = MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4;
+                mMuxer = new MediaMuxer(mOutputPath, muxerFormat);
+            }
+            configureCodec(decoderFormat, encoderFormat);
+            mEncoder.start();
+            mDecoder.start();
+            doWork(Integer.MAX_VALUE);
+            queueEOS();
+            waitForAllEncoderOutputs();
+            if (muxOutput) {
+                if (mTrackID != -1) {
+                    mMuxer.stop();
+                    mTrackID = -1;
+                }
+                if (mMuxer != null) {
+                    mMuxer.release();
+                    mMuxer = null;
+                }
+            }
+            mDecoder.reset();
+            mEncoder.reset();
+            mSurface.release();
+            mSurface = null;
+            Log.i(TAG, "Number of I-frames = " + mFrameTypeOccurrences[FRAME_TYPE_I]);
+            Log.i(TAG, "Number of P-frames = " + mFrameTypeOccurrences[FRAME_TYPE_P]);
+            Log.i(TAG, "Number of B-frames = " + mFrameTypeOccurrences[FRAME_TYPE_B]);
+        }
+        mEncoder.release();
+        mDecoder.release();
+        mExtractor.release();
+        return 0;
+    }
+
+    private MediaFormat setUpSource() throws IOException {
+        mExtractor = new MediaExtractor();
+        AssetFileDescriptor fd = mActivityContext.getResources().openRawResourceFd(mResID);
+        mExtractor.setDataSource(fd.getFileDescriptor(), fd.getStartOffset(), fd.getLength());
+        for (int trackID = 0; trackID < mExtractor.getTrackCount(); trackID++) {
+            MediaFormat format = mExtractor.getTrackFormat(trackID);
+            String mime = format.getString(MediaFormat.KEY_MIME);
+            if (mime.startsWith("video/")) {
+                mExtractor.selectTrack(trackID);
+                format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
+                        MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
+                return format;
+            }
+        }
+        mExtractor.release();
+        return null;
+    }
+
+    private MediaFormat setUpEncoderFormat(MediaFormat decoderFormat) {
+        MediaFormat encoderFormat = new MediaFormat();
+        encoderFormat.setString(MediaFormat.KEY_MIME, mMime);
+        encoderFormat
+                .setInteger(MediaFormat.KEY_WIDTH, decoderFormat.getInteger(MediaFormat.KEY_WIDTH));
+        encoderFormat.setInteger(MediaFormat.KEY_HEIGHT,
+                decoderFormat.getInteger(MediaFormat.KEY_HEIGHT));
+        encoderFormat.setInteger(MediaFormat.KEY_FRAME_RATE, VIDEO_FRAMERATE);
+        encoderFormat.setInteger(MediaFormat.KEY_BIT_RATE, VIDEO_BITRATE);
+        encoderFormat.setFloat(MediaFormat.KEY_I_FRAME_INTERVAL, 1.0f);
+        encoderFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
+                MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
+        if (mMime.equals(MediaFormat.MIMETYPE_VIDEO_HEVC)) {
+            encoderFormat.setInteger(MediaFormat.KEY_PROFILE,
+                    MediaCodecInfo.CodecProfileLevel.HEVCProfileMain);
+            encoderFormat.setInteger(MediaFormat.KEY_LEVEL,
+                    MediaCodecInfo.CodecProfileLevel.HEVCMainTierLevel4);
+        } else {
+            encoderFormat.setInteger(MediaFormat.KEY_PROFILE,
+                    MediaCodecInfo.CodecProfileLevel.AVCProfileMain);
+            encoderFormat
+                    .setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel4);
+        }
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+            encoderFormat.setInteger(MediaFormat.KEY_MAX_B_FRAMES, mMaxBFrames);
+        } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+            encoderFormat.setInteger(MediaFormat.KEY_LATENCY, 1);
+        }
+        return encoderFormat;
+    }
+
+    private void resetContext() {
+        mAsyncHandleDecoder.resetContext();
+        mAsyncHandleEncoder.resetContext();
+        mSawDecInputEOS = false;
+        mSawDecOutputEOS = false;
+        mSawEncOutputEOS = false;
+        mDecOutputCount = 0;
+        mEncOutputCount = 0;
+        mFrameNum = 0;
+        Arrays.fill(mFrameTypeOccurrences, 0);
+    }
+
+    private void configureCodec(MediaFormat decFormat, MediaFormat encFormat) {
+        resetContext();
+        mAsyncHandleEncoder.setCallBack(mEncoder, true);
+        mEncoder.configure(encFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+        mSurface = mEncoder.createInputSurface();
+        if (!mSurface.isValid()) {
+            Log.e(TAG, "Surface is not valid");
+            return;
+        }
+        mAsyncHandleDecoder.setCallBack(mDecoder, true);
+        mDecoder.configure(decFormat, mSurface, null, 0);
+        Log.d(TAG, "Codec configured");
+        if (DEBUG) {
+            Log.d(TAG, "Encoder Output format: " + mEncoder.getOutputFormat());
+        }
+    }
+
+    private void dequeueDecoderOutput(int bufferIndex, MediaCodec.BufferInfo info) {
+        if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+            mSawDecOutputEOS = true;
+        }
+        if (DEBUG) {
+            Log.d(TAG,
+                    "output: id: " + bufferIndex + " flags: " + info.flags + " size: " + info.size +
+                            " timestamp: " + info.presentationTimeUs);
+        }
+        if (info.size > 0 && (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
+            mDecOutputCount++;
+        }
+        mDecoder.releaseOutputBuffer(bufferIndex, mSurface != null);
+    }
+
+    private void enqueueDecoderInput(int bufferIndex) {
+        ByteBuffer inputBuffer = mDecoder.getInputBuffer(bufferIndex);
+        int size = mExtractor.readSampleData(inputBuffer, 0);
+        if (size < 0) {
+            enqueueDecoderEOS(bufferIndex);
+        } else {
+            long pts = mExtractor.getSampleTime();
+            int extractorFlags = mExtractor.getSampleFlags();
+            int codecFlags = 0;
+            if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_SYNC) != 0) {
+                codecFlags |= MediaCodec.BUFFER_FLAG_KEY_FRAME;
+            }
+            if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_PARTIAL_FRAME) != 0) {
+                codecFlags |= MediaCodec.BUFFER_FLAG_PARTIAL_FRAME;
+            }
+            if (!mExtractor.advance()) {
+                codecFlags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
+                mSawDecInputEOS = true;
+            }
+            if (DEBUG) {
+                Log.d(TAG, "input: id: " + bufferIndex + " size: " + size + " pts: " + pts +
+                        " flags: " + codecFlags);
+            }
+            mDecoder.queueInputBuffer(bufferIndex, 0, size, pts, codecFlags);
+        }
+    }
+
+    private void doWork(int frameLimit) throws InterruptedException {
+        int frameCount = 0;
+        while (!hasSeenError() && !mSawDecInputEOS && frameCount < frameLimit) {
+            Pair<Integer, MediaCodec.BufferInfo> element = mAsyncHandleDecoder.getWork();
+            if (element != null) {
+                int bufferID = element.first;
+                MediaCodec.BufferInfo info = element.second;
+                if (info != null) {
+                    // <id, info> corresponds to output callback.
+                    dequeueDecoderOutput(bufferID, info);
+                } else {
+                    // <id, null> corresponds to input callback.
+                    enqueueDecoderInput(bufferID);
+                    frameCount++;
+                }
+            }
+            // check decoder EOS
+            if (mSawDecOutputEOS) mEncoder.signalEndOfInputStream();
+            // encoder output
+            if (mDecOutputCount - mEncOutputCount > mMaxBFrames) {
+                tryEncoderOutput();
+            }
+        }
+    }
+
+    private void queueEOS() throws InterruptedException {
+        while (!mAsyncHandleDecoder.hasSeenError() && !mSawDecInputEOS) {
+            Pair<Integer, MediaCodec.BufferInfo> element = mAsyncHandleDecoder.getWork();
+            if (element != null) {
+                int bufferID = element.first;
+                MediaCodec.BufferInfo info = element.second;
+                if (info != null) {
+                    dequeueDecoderOutput(bufferID, info);
+                } else {
+                    enqueueDecoderEOS(element.first);
+                }
+            }
+        }
+
+        while (!hasSeenError() && !mSawDecOutputEOS) {
+            Pair<Integer, MediaCodec.BufferInfo> decOp = mAsyncHandleDecoder.getOutput();
+            if (decOp != null) dequeueDecoderOutput(decOp.first, decOp.second);
+            if (mSawDecOutputEOS) mEncoder.signalEndOfInputStream();
+            if (mDecOutputCount - mEncOutputCount > mMaxBFrames) {
+                tryEncoderOutput();
+            }
+        }
+    }
+
+    private void tryEncoderOutput() throws InterruptedException {
+        if (!hasSeenError() && !mSawEncOutputEOS) {
+            Pair<Integer, MediaCodec.BufferInfo> element = mAsyncHandleEncoder.getOutput();
+            if (element != null) {
+                dequeueEncoderOutput(element.first, element.second);
+            }
+        }
+    }
+
+    private void waitForAllEncoderOutputs() throws InterruptedException {
+        while (!hasSeenError() && !mSawEncOutputEOS) {
+            tryEncoderOutput();
+        }
+    }
+
+    private void enqueueDecoderEOS(int bufferIndex) {
+        if (!mSawDecInputEOS) {
+            mDecoder.queueInputBuffer(bufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+            mSawDecInputEOS = true;
+            Log.d(TAG, "Queued End of Stream");
+        }
+    }
+
+    private void dequeueEncoderOutput(int bufferIndex, MediaCodec.BufferInfo info) {
+        if (DEBUG) {
+            Log.d(TAG, "encoder output: id: " + bufferIndex + " flags: " + info.flags + " size: " +
+                    info.size + " timestamp: " + info.presentationTimeUs);
+        }
+        if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+            mSawEncOutputEOS = true;
+        }
+        if (info.size > 0) {
+            ByteBuffer buf = mEncoder.getOutputBuffer(bufferIndex);
+            // Parse the buffer to get the frame type
+            if (DEBUG) Log.d(TAG, "[ Frame : " + (mFrameNum++) + " ]");
+            int frameTypeResult = -1;
+            if (mMime == MediaFormat.MIMETYPE_VIDEO_AVC) {
+                frameTypeResult = NalUnitUtil.getStandardizedFrameTypesFromAVC(buf);
+            } else if (mMime == MediaFormat.MIMETYPE_VIDEO_HEVC){
+                frameTypeResult = NalUnitUtil.getStandardizedFrameTypesFromHEVC(buf);
+            } else {
+                Log.e(TAG, "Mime type " + mMime + " is not supported.");
+                return;
+            }
+            if (frameTypeResult != -1) {
+                mFrameTypeOccurrences[frameTypeResult]++;
+            }
+
+            if (mMuxer != null) {
+                if (mTrackID == -1) {
+                    mTrackID = mMuxer.addTrack(mEncoder.getOutputFormat());
+                    mMuxer.start();
+                }
+                mMuxer.writeSampleData(mTrackID, buf, info);
+            }
+            if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
+                mEncOutputCount++;
+            }
+        }
+        mEncoder.releaseOutputBuffer(bufferIndex, false);
+    }
+
+    private boolean hasSeenError() {
+        return mAsyncHandleDecoder.hasSeenError() || mAsyncHandleEncoder.hasSeenError();
+    }
+
+    public int[] getFrameTypes() {
+        return mFrameTypeOccurrences;
+    }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/NalUnitUtil.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/NalUnitUtil.java
new file mode 100644
index 0000000..efff4fd
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/NalUnitUtil.java
@@ -0,0 +1,168 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.samplevideoencoder;
+
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_B;
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_I;
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_P;
+
+public class NalUnitUtil {
+    private static final String TAG = MediaCodecSurfaceEncoder.class.getSimpleName();
+    private static final boolean DEBUG = false;
+
+    public static int findNalUnit(byte[] dataArray, int pos, int limit) {
+        int startOffset = 0;
+        if (limit - pos < 4) {
+            return startOffset;
+        }
+        if (dataArray[pos] == 0 && dataArray[pos + 1] == 0 && dataArray[pos + 2] == 1) {
+            startOffset = 3;
+        } else {
+            if (dataArray[pos] == 0 && dataArray[pos + 1] == 0 && dataArray[pos + 2] == 0 &&
+                    dataArray[pos + 3] == 1) {
+                startOffset = 4;
+            }
+        }
+        return startOffset;
+    }
+
+    private static int getAVCNalUnitType(byte[] dataArray, int nalUnitOffset) {
+        return dataArray[nalUnitOffset] & 0x1F;
+    }
+
+    private static int parseAVCNALUnitData(byte[] dataArray, int offset, int limit) {
+        ParsableBitArray bitArray = new ParsableBitArray(dataArray);
+        bitArray.reset(dataArray, offset, limit);
+
+        bitArray.skipBit(); // forbidden_zero_bit
+        bitArray.readBits(2); // nal_ref_idc
+        bitArray.skipBits(5); // nal_unit_type
+
+        bitArray.readUEV(); // first_mb_in_slice
+        if (!bitArray.canReadUEV()) {
+            return -1;
+        }
+        int sliceType = bitArray.readUEV();
+        if (DEBUG) Log.d(TAG, "slice_type = " + sliceType);
+        if (sliceType == 0) {
+            return FRAME_TYPE_P;
+        } else if (sliceType == 1) {
+            return FRAME_TYPE_B;
+        } else if (sliceType == 2) {
+            return FRAME_TYPE_I;
+        } else {
+            return -1;
+        }
+    }
+
+    private static int getHEVCNalUnitType(byte[] dataArray, int nalUnitOffset) {
+        return (dataArray[nalUnitOffset] & 0x7E) >> 1;
+    }
+
+    private static int parseHEVCNALUnitData(byte[] dataArray, int offset, int limit,
+                                            int nalUnitType) {
+        // nal_unit_type values from H.265/HEVC Table 7-1.
+        final int BLA_W_LP = 16;
+        final int RSV_IRAP_VCL23 = 23;
+
+        ParsableBitArray bitArray = new ParsableBitArray(dataArray);
+        bitArray.reset(dataArray, offset, limit);
+
+        bitArray.skipBit(); // forbidden zero bit
+        bitArray.readBits(6); // nal_unit_header
+        bitArray.readBits(6); // nuh_layer_id
+        bitArray.readBits(3); // nuh_temporal_id_plus1
+
+        // Parsing slice_segment_header values from H.265/HEVC Table 7.3.6.1
+        boolean first_slice_segment = bitArray.readBit(); // first_slice_segment_in_pic_flag
+        if (!first_slice_segment) return -1;
+        if (nalUnitType >= BLA_W_LP && nalUnitType <= RSV_IRAP_VCL23) {
+            bitArray.readBit();  // no_output_of_prior_pics_flag
+        }
+        bitArray.readUEV(); // slice_pic_parameter_set_id
+        // Assume num_extra_slice_header_bits element of PPS data to be 0
+        int sliceType = bitArray.readUEV();
+        if (DEBUG) Log.d(TAG, "slice_type = " + sliceType);
+        if (sliceType == 0) {
+            return FRAME_TYPE_B;
+        } else if (sliceType == 1) {
+            return FRAME_TYPE_P;
+        } else if (sliceType == 2) {
+            return FRAME_TYPE_I;
+        } else {
+            return -1;
+        }
+    }
+
+    public static int getStandardizedFrameTypesFromAVC(ByteBuffer buf) {
+        int limit = buf.limit();
+        byte[] dataArray = new byte[buf.remaining()];
+        buf.get(dataArray);
+        int frameType = -1;
+        for (int pos = 0; pos + 3 < limit; ) {
+            int startOffset = NalUnitUtil.findNalUnit(dataArray, pos, limit);
+            if (startOffset != 0) {
+                int nalUnitType = getAVCNalUnitType(dataArray, (pos + startOffset));
+                if (DEBUG) {
+                    Log.d(TAG, "NalUnitOffset = " + (pos + startOffset));
+                    Log.d(TAG, "NalUnitType = " + nalUnitType);
+                }
+                // SLICE_NAL = 1; IDR_SLICE_NAL = 5
+                if (nalUnitType == 1 || nalUnitType == 5) {
+                    frameType = parseAVCNALUnitData(dataArray, (pos + startOffset),
+                            (limit - pos - startOffset));
+                    break;
+                }
+                pos += 3;
+            } else {
+                pos++;
+            }
+        }
+        return frameType;
+    }
+
+    public static int getStandardizedFrameTypesFromHEVC(ByteBuffer buf) {
+        int limit = buf.limit();
+        byte[] dataArray = new byte[buf.remaining()];
+        buf.get(dataArray);
+        int frameType = -1;
+        for (int pos = 0; pos + 3 < limit; ) {
+            int startOffset = NalUnitUtil.findNalUnit(dataArray, pos, limit);
+            if (startOffset != 0) {
+                int nalUnitType = NalUnitUtil.getHEVCNalUnitType(dataArray, (pos + startOffset));
+                if (DEBUG) {
+                    Log.d(TAG, "NalUnitOffset = " + (pos + startOffset));
+                    Log.d(TAG, "NalUnitType = " + nalUnitType);
+                }
+                // Parse NALUnits containing slice_headers which lies in the range of 0 to 21
+                if (nalUnitType >= 0 && nalUnitType <= 21) {
+                    frameType = parseHEVCNALUnitData(dataArray, (pos + startOffset),
+                            (limit - pos - startOffset), nalUnitType);
+                    break;
+                }
+                pos += 3;
+            } else {
+                pos++;
+            }
+        }
+        return frameType;
+    }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/ParsableBitArray.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/ParsableBitArray.java
new file mode 100644
index 0000000..e4bfaa3
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/ParsableBitArray.java
@@ -0,0 +1,128 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.samplevideoencoder;
+
+public class ParsableBitArray {
+    public byte[] data;
+    private int byteOffset;
+    private int bitOffset;
+    private int byteLimit;
+
+    public ParsableBitArray(byte[] dataArray) {
+        this(dataArray, dataArray.length);
+    }
+
+    public ParsableBitArray(byte[] dataArray, int limit) {
+        this.data = dataArray;
+        byteLimit = limit;
+    }
+
+    public void reset(byte[] data, int offset, int limit) {
+        this.data = data;
+        byteOffset = offset;
+        bitOffset = 0;
+        byteLimit = limit;
+    }
+
+    public void skipBit() {
+        if (++bitOffset == 8) {
+            bitOffset = 0;
+            byteOffset++;
+        }
+    }
+
+    public void skipBits(int numBits) {
+        int numBytes = numBits / 8;
+        byteOffset += numBytes;
+        bitOffset += numBits - (numBytes * 8);
+        if (bitOffset > 7) {
+            byteOffset++;
+            bitOffset -= 8;
+        }
+    }
+
+    public boolean readBit() {
+        boolean returnValue = (data[byteOffset] & (0x80 >> bitOffset)) != 0;
+        skipBit();
+        return returnValue;
+    }
+
+    public int readBits(int numBits) {
+        if (numBits == 0) {
+            return 0;
+        }
+        int returnValue = 0;
+        bitOffset += numBits;
+        while (bitOffset > 8) {
+            bitOffset -= 8;
+            returnValue |= (data[byteOffset++] & 0xFF) << bitOffset;
+        }
+        returnValue |= (data[byteOffset] & 0xFF) >> (8 - bitOffset);
+        returnValue &= 0xFFFFFFFF >>> (32 - numBits);
+        if (bitOffset == 8) {
+            bitOffset = 0;
+            byteOffset++;
+        }
+        return returnValue;
+    }
+
+    public boolean canReadUEV() {
+        int initialByteOffset = byteOffset;
+        int initialBitOffset = bitOffset;
+        int leadingZeros = 0;
+        while (byteOffset < byteLimit && !readBit()) {
+            leadingZeros++;
+        }
+        boolean hitLimit = byteOffset == byteLimit;
+        byteOffset = initialByteOffset;
+        bitOffset = initialBitOffset;
+        return !hitLimit && canReadBits(leadingZeros * 2 + 1);
+    }
+
+    public int readUEV() {
+        int leadingZeros = 0;
+        while (!readBit()) {
+            leadingZeros++;
+        }
+        return (1 << leadingZeros) - 1 + (leadingZeros > 0 ? readBits(leadingZeros) : 0);
+    }
+
+    public boolean canReadBits(int numBits) {
+        int oldByteOffset = byteOffset;
+        int numBytes = numBits / 8;
+        int newByteOffset = byteOffset + numBytes;
+        int newBitOffset = bitOffset + numBits - (numBytes * 8);
+        if (newBitOffset > 7) {
+            newByteOffset++;
+            newBitOffset -= 8;
+        }
+        for (int i = oldByteOffset + 1; i <= newByteOffset && newByteOffset < byteLimit; i++) {
+            if (shouldSkipByte(i)) {
+                // Skip the byte and check three bytes ahead.
+                newByteOffset++;
+                i += 2;
+            }
+        }
+        return newByteOffset < byteLimit || (newByteOffset == byteLimit && newBitOffset == 0);
+    }
+
+    private boolean shouldSkipByte(int offset) {
+        return (2 <= offset && offset < byteLimit && data[offset] == (byte) 0x03 &&
+                data[offset - 2] == (byte) 0x00 && data[offset - 1] == (byte) 0x00);
+    }
+
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/drawable-v24/ic_launcher_foreground.xml b/media/tests/SampleVideoEncoder/app/src/main/res/drawable-v24/ic_launcher_foreground.xml
new file mode 100644
index 0000000..2b068d1
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/drawable-v24/ic_launcher_foreground.xml
@@ -0,0 +1,30 @@
+<vector xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:aapt="http://schemas.android.com/aapt"
+    android:width="108dp"
+    android:height="108dp"
+    android:viewportWidth="108"
+    android:viewportHeight="108">
+    <path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
+        <aapt:attr name="android:fillColor">
+            <gradient
+                android:endX="85.84757"
+                android:endY="92.4963"
+                android:startX="42.9492"
+                android:startY="49.59793"
+                android:type="linear">
+                <item
+                    android:color="#44000000"
+                    android:offset="0.0" />
+                <item
+                    android:color="#00000000"
+                    android:offset="1.0" />
+            </gradient>
+        </aapt:attr>
+    </path>
+    <path
+        android:fillColor="#FFFFFF"
+        android:fillType="nonZero"
+        android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
+        android:strokeWidth="1"
+        android:strokeColor="#00000000" />
+</vector>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/drawable/ic_launcher_background.xml b/media/tests/SampleVideoEncoder/app/src/main/res/drawable/ic_launcher_background.xml
new file mode 100644
index 0000000..07d5da9
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/drawable/ic_launcher_background.xml
@@ -0,0 +1,170 @@
+<?xml version="1.0" encoding="utf-8"?>
+<vector xmlns:android="http://schemas.android.com/apk/res/android"
+    android:width="108dp"
+    android:height="108dp"
+    android:viewportWidth="108"
+    android:viewportHeight="108">
+    <path
+        android:fillColor="#3DDC84"
+        android:pathData="M0,0h108v108h-108z" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M9,0L9,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,0L19,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M29,0L29,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M39,0L39,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M49,0L49,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M59,0L59,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M69,0L69,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M79,0L79,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M89,0L89,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M99,0L99,108"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,9L108,9"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,19L108,19"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,29L108,29"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,39L108,39"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,49L108,49"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,59L108,59"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,69L108,69"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,79L108,79"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,89L108,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,99L108,99"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,29L89,29"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,39L89,39"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,49L89,49"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,59L89,59"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,69L89,69"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,79L89,79"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M29,19L29,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M39,19L39,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M49,19L49,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M59,19L59,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M69,19L69,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M79,19L79,89"
+        android:strokeWidth="0.8"
+        android:strokeColor="#33FFFFFF" />
+</vector>
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/layout/activity_main.xml b/media/tests/SampleVideoEncoder/app/src/main/res/layout/activity_main.xml
new file mode 100644
index 0000000..017012d
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/layout/activity_main.xml
@@ -0,0 +1,138 @@
+<?xml version="1.0" encoding="utf-8"?>
+<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    xmlns:tools="http://schemas.android.com/tools"
+    android:id="@+id/container"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:layout_gravity="center"
+    tools:context="com.android.media.samplevideoencoder.MainActivity">
+
+    <com.android.media.samplevideoencoder.AutoFitTextureView
+        android:id="@+id/texture"
+        android:layout_width="wrap_content"
+        android:layout_height="300dp"
+        android:layout_alignParentStart="true"
+        android:layout_alignParentTop="true"
+        android:layout_marginBottom="16dp"
+        android:gravity="center"
+        app:layout_constraintBottom_toTopOf="@+id/checkBox_media_recorder"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintTop_toTopOf="parent" />
+
+    <CheckBox
+        android:id="@+id/checkBox_media_recorder"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_marginStart="90dp"
+        android:layout_marginTop="10dp"
+        android:fontFamily="sans-serif-medium"
+        android:text="@string/media_recorder"
+        android:textAppearance="@style/TextAppearance.AppCompat.Large"
+        android:textStyle="normal"
+        android:checked="true"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintTop_toBottomOf="@+id/texture"/>
+
+    <CheckBox
+        android:id="@+id/checkBox_media_codec"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_marginStart="90dp"
+        android:fontFamily="sans-serif-medium"
+        android:text="@string/media_codec"
+        android:textAppearance="@style/TextAppearance.AppCompat.Large"
+        android:textStyle="normal"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintTop_toBottomOf="@+id/checkBox_media_recorder" />
+
+    <RadioGroup
+        android:id="@+id/radio_group_mime"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_marginStart="40dp"
+        android:layout_marginTop="10dp"
+        android:orientation="vertical"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintBottom_toTopOf="@+id/frameLayout2"
+        app:layout_constraintTop_toBottomOf="@+id/checkBox_media_codec">
+
+        <RadioButton
+            android:id="@+id/avc"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:layout_weight="1"
+            android:checked="true"
+            android:text="@string/avc" />
+
+        <RadioButton
+            android:id="@+id/hevc"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:text="@string/hevc" />
+    </RadioGroup>
+
+    <RadioGroup
+        android:id="@+id/radio_group_codec"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_marginTop="10dp"
+        android:layout_marginEnd="40dp"
+        android:orientation="vertical"
+        app:layout_constraintEnd_toEndOf="parent"
+        app:layout_constraintTop_toBottomOf="@+id/checkBox_media_codec">
+
+        <RadioButton
+            android:id="@+id/hw"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:layout_weight="1"
+            android:checked="true"
+            android:enabled="false"
+            android:text="@string/hardware" />
+
+        <RadioButton
+            android:id="@+id/sw"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:layout_weight="1"
+            android:enabled="false"
+            android:text="@string/software" />
+    </RadioGroup>
+
+    <FrameLayout
+        android:id="@+id/frameLayout2"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:layout_below="@id/radio_group_mime"
+        android:layout_alignParentStart="true"
+        android:layout_alignParentBottom="true"
+        android:layout_marginTop="10dp"
+        android:background="@color/colorPrimary"
+        app:layout_constraintTop_toBottomOf="@+id/radio_group_mime"
+        tools:layout_editor_absoluteX="80dp">
+
+        <Button
+            android:id="@+id/start_button"
+            android:layout_width="108dp"
+            android:layout_height="wrap_content"
+            android:layout_gravity="center"
+            android:gravity="center"
+            android:text="@string/start_button"
+            tools:layout_editor_absoluteX="155dp"
+            tools:layout_editor_absoluteY="455dp" />
+
+    </FrameLayout>
+
+    <TextView
+        android:id="@+id/textViewResults"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_marginTop="10dp"
+        android:fontFamily="sans-serif-medium"
+        android:textSize="18sp"
+        android:textStyle="normal"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintTop_toBottomOf = "@+id/frameLayout2" />
+
+</androidx.constraintlayout.widget.ConstraintLayout>
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml b/media/tests/SampleVideoEncoder/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
new file mode 100644
index 0000000..eca70cf
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
+    <background android:drawable="@drawable/ic_launcher_background" />
+    <foreground android:drawable="@drawable/ic_launcher_foreground" />
+</adaptive-icon>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml b/media/tests/SampleVideoEncoder/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
new file mode 100644
index 0000000..eca70cf
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
+    <background android:drawable="@drawable/ic_launcher_background" />
+    <foreground android:drawable="@drawable/ic_launcher_foreground" />
+</adaptive-icon>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/raw/crowd_1920x1080_25fps_4000kbps_h265.mp4 b/media/tests/SampleVideoEncoder/app/src/main/res/raw/crowd_1920x1080_25fps_4000kbps_h265.mp4
new file mode 100644
index 0000000..6204008
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/raw/crowd_1920x1080_25fps_4000kbps_h265.mp4
Binary files differ
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/values/colors.xml b/media/tests/SampleVideoEncoder/app/src/main/res/values/colors.xml
new file mode 100644
index 0000000..4faecfa
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/values/colors.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+    <color name="colorPrimary">#6200EE</color>
+    <color name="colorPrimaryDark">#3700B3</color>
+    <color name="colorAccent">#03DAC5</color>
+</resources>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/values/strings.xml b/media/tests/SampleVideoEncoder/app/src/main/res/values/strings.xml
new file mode 100644
index 0000000..f825a7f
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/values/strings.xml
@@ -0,0 +1,13 @@
+<resources>
+    <string name="app_name">SampleVideoEncoder</string>
+    <string name="media_recorder">MediaRecorder</string>
+    <string name="media_codec">MediaCodec</string>
+    <string name="start_button">Start</string>
+    <string name="stop">Stop</string>
+    <string name="start">Start</string>
+    <string name="avc">AVC</string>
+    <string name="hevc">HEVC</string>
+    <string name="hardware">H/W</string>
+    <string name="software">S/W</string>
+
+</resources>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/values/styles.xml b/media/tests/SampleVideoEncoder/app/src/main/res/values/styles.xml
new file mode 100644
index 0000000..fac9291
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/values/styles.xml
@@ -0,0 +1,10 @@
+<resources>
+    <!-- Base application theme. -->
+    <style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
+        <!-- Customize your theme here. -->
+        <item name="colorPrimary">@color/colorPrimary</item>
+        <item name="colorPrimaryDark">@color/colorPrimaryDark</item>
+        <item name="colorAccent">@color/colorAccent</item>
+    </style>
+
+</resources>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/build.gradle b/media/tests/SampleVideoEncoder/build.gradle
new file mode 100644
index 0000000..4ca0c7e
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/build.gradle
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+buildscript {
+    repositories {
+        google()
+        jcenter()
+    }
+    dependencies {
+        classpath 'com.android.tools.build:gradle:4.1.1'
+    }
+}
+
+allprojects {
+    repositories {
+        google()
+        jcenter()
+    }
+}
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/gradle.properties b/media/tests/SampleVideoEncoder/gradle.properties
new file mode 100644
index 0000000..5ae443b
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/gradle.properties
@@ -0,0 +1,4 @@
+# Project-wide Gradle settings.
+org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
+android.useAndroidX=true
+android.enableJetifier=true
diff --git a/media/tests/SampleVideoEncoder/gradle/wrapper/gradle-wrapper.jar b/media/tests/SampleVideoEncoder/gradle/wrapper/gradle-wrapper.jar
new file mode 100644
index 0000000..f6b961f
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/gradle/wrapper/gradle-wrapper.jar
Binary files differ
diff --git a/media/tests/SampleVideoEncoder/gradle/wrapper/gradle-wrapper.properties b/media/tests/SampleVideoEncoder/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..a9a12eb
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,6 @@
+#Wed Dec 16 10:06:45 IST 2020
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip
diff --git a/media/tests/SampleVideoEncoder/gradlew b/media/tests/SampleVideoEncoder/gradlew
new file mode 100644
index 0000000..cccdd3d
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/gradlew
@@ -0,0 +1,172 @@
+#!/usr/bin/env sh
+
+##############################################################################
+##
+##  Gradle start up script for UN*X
+##
+##############################################################################
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+    ls=`ls -ld "$PRG"`
+    link=`expr "$ls" : '.*-> \(.*\)$'`
+    if expr "$link" : '/.*' > /dev/null; then
+        PRG="$link"
+    else
+        PRG=`dirname "$PRG"`"/$link"
+    fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/" >/dev/null
+APP_HOME="`pwd -P`"
+cd "$SAVED" >/dev/null
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS=""
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn () {
+    echo "$*"
+}
+
+die () {
+    echo
+    echo "$*"
+    echo
+    exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+nonstop=false
+case "`uname`" in
+  CYGWIN* )
+    cygwin=true
+    ;;
+  Darwin* )
+    darwin=true
+    ;;
+  MINGW* )
+    msys=true
+    ;;
+  NONSTOP* )
+    nonstop=true
+    ;;
+esac
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+    if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+        # IBM's JDK on AIX uses strange locations for the executables
+        JAVACMD="$JAVA_HOME/jre/sh/java"
+    else
+        JAVACMD="$JAVA_HOME/bin/java"
+    fi
+    if [ ! -x "$JAVACMD" ] ; then
+        die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+    fi
+else
+    JAVACMD="java"
+    which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
+    MAX_FD_LIMIT=`ulimit -H -n`
+    if [ $? -eq 0 ] ; then
+        if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+            MAX_FD="$MAX_FD_LIMIT"
+        fi
+        ulimit -n $MAX_FD
+        if [ $? -ne 0 ] ; then
+            warn "Could not set maximum file descriptor limit: $MAX_FD"
+        fi
+    else
+        warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
+    fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+    GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin ; then
+    APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+    CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+    JAVACMD=`cygpath --unix "$JAVACMD"`
+
+    # We build the pattern for arguments to be converted via cygpath
+    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+    SEP=""
+    for dir in $ROOTDIRSRAW ; do
+        ROOTDIRS="$ROOTDIRS$SEP$dir"
+        SEP="|"
+    done
+    OURCYGPATTERN="(^($ROOTDIRS))"
+    # Add a user-defined pattern to the cygpath arguments
+    if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+        OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+    fi
+    # Now convert the arguments - kludge to limit ourselves to /bin/sh
+    i=0
+    for arg in "$@" ; do
+        CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+        CHECK2=`echo "$arg"|egrep -c "^-"`                                 ### Determine if an option
+
+        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition
+            eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+        else
+            eval `echo args$i`="\"$arg\""
+        fi
+        i=$((i+1))
+    done
+    case $i in
+        (0) set -- ;;
+        (1) set -- "$args0" ;;
+        (2) set -- "$args0" "$args1" ;;
+        (3) set -- "$args0" "$args1" "$args2" ;;
+        (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+        (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+        (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+        (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+        (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+        (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+    esac
+fi
+
+# Escape application args
+save () {
+    for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
+    echo " "
+}
+APP_ARGS=$(save "$@")
+
+# Collect all arguments for the java command, following the shell quoting and substitution rules
+eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
+
+# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
+if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
+  cd "$(dirname "$0")"
+fi
+
+exec "$JAVACMD" "$@"
diff --git a/media/tests/SampleVideoEncoder/gradlew.bat b/media/tests/SampleVideoEncoder/gradlew.bat
new file mode 100644
index 0000000..f955316
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/gradlew.bat
@@ -0,0 +1,84 @@
+@if "%DEBUG%" == "" @echo off
+@rem ##########################################################################
+@rem
+@rem  Gradle startup script for Windows
+@rem
+@rem ##########################################################################
+
+@rem Set local scope for the variables with windows NT shell
+if "%OS%"=="Windows_NT" setlocal
+
+set DIRNAME=%~dp0
+if "%DIRNAME%" == "" set DIRNAME=.
+set APP_BASE_NAME=%~n0
+set APP_HOME=%DIRNAME%
+
+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+set DEFAULT_JVM_OPTS=
+
+@rem Find java.exe
+if defined JAVA_HOME goto findJavaFromJavaHome
+
+set JAVA_EXE=java.exe
+%JAVA_EXE% -version >NUL 2>&1
+if "%ERRORLEVEL%" == "0" goto init
+
+echo.
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:findJavaFromJavaHome
+set JAVA_HOME=%JAVA_HOME:"=%
+set JAVA_EXE=%JAVA_HOME%/bin/java.exe
+
+if exist "%JAVA_EXE%" goto init
+
+echo.
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:init
+@rem Get command-line arguments, handling Windows variants
+
+if not "%OS%" == "Windows_NT" goto win9xME_args
+
+:win9xME_args
+@rem Slurp the command line arguments.
+set CMD_LINE_ARGS=
+set _SKIP=2
+
+:win9xME_args_slurp
+if "x%~1" == "x" goto execute
+
+set CMD_LINE_ARGS=%*
+
+:execute
+@rem Setup the command line
+
+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
+
+@rem Execute Gradle
+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
+
+:end
+@rem End local scope for the variables with windows NT shell
+if "%ERRORLEVEL%"=="0" goto mainEnd
+
+:fail
+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
+rem the _cmd.exe /c_ return code!
+if  not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
+exit /b 1
+
+:mainEnd
+if "%OS%"=="Windows_NT" endlocal
+
+:omega
diff --git a/media/tests/SampleVideoEncoder/settings.gradle b/media/tests/SampleVideoEncoder/settings.gradle
new file mode 100644
index 0000000..4d3c3a5
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/settings.gradle
@@ -0,0 +1,2 @@
+include ':app'
+rootProject.name = "SampleVideoEncoder"
\ No newline at end of file
diff --git a/media/tests/benchmark/Android.bp b/media/tests/benchmark/Android.bp
index de408dd..8503a9c 100644
--- a/media/tests/benchmark/Android.bp
+++ b/media/tests/benchmark/Android.bp
@@ -14,6 +14,15 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 subdirs = [
     "src",
     "tests",
diff --git a/media/tests/benchmark/MediaBenchmarkTest/Android.bp b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
index d80d9a5..2e06da5 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/Android.bp
+++ b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
@@ -14,6 +14,15 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 android_test {
     name: "MediaBenchmarkTest",
 
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java
index 48e1422..4202732 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java
@@ -34,6 +34,7 @@
 import com.android.media.benchmark.library.Native;
 import com.android.media.benchmark.library.Stats;
 
+import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -57,40 +58,87 @@
 public class EncoderTest {
     private static final Context mContext =
             InstrumentationRegistry.getInstrumentation().getTargetContext();
+    private static final String mFileDirPath = mContext.getFilesDir() + "/";
     private static final String mInputFilePath = mContext.getString(R.string.input_file_path);
     private static final String mOutputFilePath = mContext.getString(R.string.output_file_path);
     private static final String mStatsFile =
             mContext.getExternalFilesDir(null) + "/Encoder." + System.currentTimeMillis() + ".csv";
     private static final String TAG = "EncoderTest";
-    private static final long PER_TEST_TIMEOUT_MS = 120000;
     private static final boolean DEBUG = false;
     private static final boolean WRITE_OUTPUT = false;
+    private static final long PER_TEST_TIMEOUT_MS = 120000;
     private static final int ENCODE_DEFAULT_FRAME_RATE = 25;
-    private static final int ENCODE_DEFAULT_BIT_RATE = 8000000 /* 8 Mbps */;
-    private static final int ENCODE_MIN_BIT_RATE = 600000 /* 600 Kbps */;
+    private static final int ENCODE_DEFAULT_VIDEO_BIT_RATE = 8000000 /* 8 Mbps */;
+    private static final int ENCODE_MIN_VIDEO_BIT_RATE = 600000 /* 600 Kbps */;
     private static final int ENCODE_DEFAULT_AUDIO_BIT_RATE = 128000 /* 128 Kbps */;
+    private static int mColorFormat = COLOR_FormatYUV420Flexible;
+    private static File mDecodedFileQcif;
+    private static File mDecodedFileFullHd;
+    private static File mDecodedFileAudio;
     private String mInputFile;
+    private String mMime;
+    private int mBitRate;
+    private int mIFrameInterval;
+    private int mWidth;
+    private int mHeight;
+    private int mProfile;
+    private int mLevel;
+    private int mSampleRate;
+    private int mNumChannel;
+    private static final String DECODE_FULLHD_INPUT = "crowd_1920x1080_25fps_4000kbps_h265.mkv";
+    private static final String DECODE_QCIF_INPUT = "crowd_176x144_25fps_6000kbps_mpeg4.mp4";
+    private static final String DECODE_AUDIO_INPUT = "bbb_48000hz_2ch_100kbps_opus_30sec.webm";
+    private static final String DECODE_FULLHD_UNPACKED = "crowd_1920x1080_25fps_4000kbps_h265.yuv";
+    private static final String DECODE_QCIF_UNPACKED = "crowd_176x144_25fps_6000kbps_mpeg4.yuv";
+    private static final String DECODE_AUDIO_UNPACKED = "bbb_48000hz_2ch_100kbps_opus_30sec.raw";
 
     @Parameterized.Parameters
     public static Collection<Object[]> inputFiles() {
         return Arrays.asList(new Object[][]{
                 // Audio Test
-                {"bbb_44100hz_2ch_128kbps_aac_30sec.mp4"},
-                {"bbb_8000hz_1ch_8kbps_amrnb_30sec.3gp"},
-                {"bbb_16000hz_1ch_9kbps_amrwb_30sec.3gp"},
-                {"bbb_44100hz_2ch_600kbps_flac_30sec.mp4"},
-                {"bbb_48000hz_2ch_100kbps_opus_30sec.webm"},
+                // Parameters: Filename, mimeType, bitrate, width, height, iFrameInterval,
+                // profile, level, sampleRate, channelCount
+                {DECODE_AUDIO_UNPACKED, MediaFormat.MIMETYPE_AUDIO_AAC,
+                        ENCODE_DEFAULT_AUDIO_BIT_RATE, -1, -1, -1, -1, -1, 44100, 2},
+                {DECODE_AUDIO_UNPACKED, MediaFormat.MIMETYPE_AUDIO_AMR_NB,
+                        ENCODE_DEFAULT_AUDIO_BIT_RATE, -1, -1, -1, -1, -1, 8000, 1},
+                {DECODE_AUDIO_UNPACKED, MediaFormat.MIMETYPE_AUDIO_AMR_WB,
+                        ENCODE_DEFAULT_AUDIO_BIT_RATE, -1, -1, -1, -1, -1, 16000, 1},
+                {DECODE_AUDIO_UNPACKED, MediaFormat.MIMETYPE_AUDIO_FLAC,
+                        ENCODE_DEFAULT_AUDIO_BIT_RATE, -1, -1, -1, -1, -1, 44100, 2},
+                {DECODE_AUDIO_UNPACKED, MediaFormat.MIMETYPE_AUDIO_OPUS,
+                        ENCODE_DEFAULT_AUDIO_BIT_RATE, -1, -1, -1, -1, -1, 48000, 2},
+
                 // Video Test
-                {"crowd_1920x1080_25fps_4000kbps_vp8.webm"},
-                {"crowd_1920x1080_25fps_6700kbps_h264.ts"},
-                {"crowd_1920x1080_25fps_4000kbps_h265.mkv"},
-                {"crowd_1920x1080_25fps_4000kbps_vp9.webm"},
-                {"crowd_176x144_25fps_6000kbps_mpeg4.mp4"},
-                {"crowd_176x144_25fps_6000kbps_h263.3gp"}});
+                // Parameters: Filename, mimeType, bitrate, width, height, iFrameInterval,
+                // profile, level, sampleRate, channelCount
+                {DECODE_FULLHD_UNPACKED, MediaFormat.MIMETYPE_VIDEO_VP8,
+                        ENCODE_DEFAULT_VIDEO_BIT_RATE, 1920, 1080, 1, -1, -1, -1, -1},
+                {DECODE_FULLHD_UNPACKED, MediaFormat.MIMETYPE_VIDEO_AVC,
+                        ENCODE_DEFAULT_VIDEO_BIT_RATE, 1920, 1080, 1, -1, -1, -1, -1},
+                {DECODE_FULLHD_UNPACKED, MediaFormat.MIMETYPE_VIDEO_HEVC,
+                        ENCODE_DEFAULT_VIDEO_BIT_RATE, 1920, 1080, 1, -1, -1, -1, -1},
+                {DECODE_FULLHD_UNPACKED, MediaFormat.MIMETYPE_VIDEO_VP9,
+                        ENCODE_DEFAULT_VIDEO_BIT_RATE, 1920, 1080, 1, -1, -1, -1, -1},
+                {DECODE_QCIF_UNPACKED, MediaFormat.MIMETYPE_VIDEO_MPEG4, ENCODE_MIN_VIDEO_BIT_RATE,
+                        176, 144, 1, -1, -1, -1, -1},
+                {DECODE_QCIF_UNPACKED, MediaFormat.MIMETYPE_VIDEO_H263, ENCODE_MIN_VIDEO_BIT_RATE,
+                        176, 144, 1, -1, -1, -1, -1}});
     }
 
-    public EncoderTest(String inputFileName) {
-        this.mInputFile = inputFileName;
+    public EncoderTest(String filename, String mime, int bitrate, int width, int height,
+                       int frameInterval, int profile, int level, int samplerate,
+                       int channelCount) {
+        this.mInputFile = filename;
+        this.mMime = mime;
+        this.mBitRate = bitrate;
+        this.mIFrameInterval = frameInterval;
+        this.mWidth = width;
+        this.mHeight = height;
+        this.mProfile = profile;
+        this.mLevel = level;
+        this.mSampleRate = samplerate;
+        this.mNumChannel = channelCount;
     }
 
     @BeforeClass
@@ -101,33 +149,36 @@
         Log.d(TAG, "Saving Benchmark results in: " + mStatsFile);
     }
 
-    @Test(timeout = PER_TEST_TIMEOUT_MS)
-    public void testEncoder() throws Exception {
-        int status;
-        int frameSize;
-        //Parameters for video
-        int width = 0;
-        int height = 0;
-        int profile = 0;
-        int level = 0;
-        int frameRate = 0;
+    @BeforeClass
+    public static void prepareInput() throws IOException {
 
-        //Parameters for audio
-        int bitRate = 0;
-        int sampleRate = 0;
-        int numChannels = 0;
-        File inputFile = new File(mInputFilePath + mInputFile);
-        assertTrue("Cannot find " + mInputFile + " in directory " + mInputFilePath,
-                inputFile.exists());
+        mDecodedFileFullHd = new File(mFileDirPath + DECODE_FULLHD_UNPACKED);
+        int status = decodeFile(mInputFilePath + DECODE_FULLHD_INPUT, mDecodedFileFullHd);
+        assertEquals("Decoder returned error " + status, 0, status);
+
+        mDecodedFileQcif = new File(mFileDirPath + DECODE_QCIF_UNPACKED);
+        status = decodeFile(mInputFilePath + DECODE_QCIF_INPUT, mDecodedFileQcif);
+        assertEquals("Decoder returned error " + status, 0, status);
+
+        mDecodedFileAudio = new File(mFileDirPath + DECODE_AUDIO_UNPACKED);
+        status = decodeFile(mInputFilePath + DECODE_AUDIO_INPUT, mDecodedFileAudio);
+        assertEquals("Decoder returned error " + status, 0, status);
+    }
+
+    private static int decodeFile(String inputFileName, File outputDecodeFile) throws IOException {
+        int status = -1;
+        File inputFile = new File(inputFileName);
+        assertTrue("Cannot open input file " + inputFileName, inputFile.exists());
         FileInputStream fileInput = new FileInputStream(inputFile);
         FileDescriptor fileDescriptor = fileInput.getFD();
+        FileOutputStream decodeOutputStream = new FileOutputStream(outputDecodeFile);
+
         Extractor extractor = new Extractor();
         int trackCount = extractor.setUpExtractor(fileDescriptor);
-        assertTrue("Extraction failed. No tracks for file: " + mInputFile, (trackCount > 0));
+        assertTrue("Extraction failed. No tracks for the given input file", (trackCount > 0));
         ArrayList<ByteBuffer> inputBuffer = new ArrayList<>();
         ArrayList<MediaCodec.BufferInfo> frameInfo = new ArrayList<>();
         for (int currentTrack = 0; currentTrack < trackCount; currentTrack++) {
-            int colorFormat = COLOR_FormatYUV420Flexible;
             extractor.selectExtractorTrack(currentTrack);
             MediaFormat format = extractor.getFormat(currentTrack);
             // Get samples from extractor
@@ -146,163 +197,135 @@
                             bufInfo.presentationTimeUs + " size = " + bufInfo.size);
                 }
             } while (sampleSize > 0);
-            int tid = android.os.Process.myTid();
-            File decodedFile = new File(mContext.getFilesDir() + "/decoder_" + tid + ".out");
-            FileOutputStream decodeOutputStream = new FileOutputStream(decodedFile);
             Decoder decoder = new Decoder();
             decoder.setupDecoder(decodeOutputStream);
             status = decoder.decode(inputBuffer, frameInfo, false, format, "");
-            assertEquals("Decoder returned error " + status + " for file: " + mInputFile, 0,
-                    status);
             MediaFormat decoderFormat = decoder.getFormat();
+            if (decoderFormat.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
+                mColorFormat = decoderFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT);
+            }
             decoder.deInitCodec();
             extractor.unselectExtractorTrack(currentTrack);
             inputBuffer.clear();
             frameInfo.clear();
-            if (decodeOutputStream != null) {
-                decodeOutputStream.close();
-            }
-            String mime = format.getString(MediaFormat.KEY_MIME);
-            ArrayList<String> mediaCodecs = CodecUtils.selectCodecs(mime, true);
-            assertTrue("No suitable codecs found for file: " + mInputFile + " track : " +
-                    currentTrack + " mime: " + mime, (mediaCodecs.size() > 0));
-            Boolean[] encodeMode = {true, false};
-            /* Encoding the decoder's output */
-            for (Boolean asyncMode : encodeMode) {
-                for (String codecName : mediaCodecs) {
-                    FileOutputStream encodeOutputStream = null;
-                    if (WRITE_OUTPUT) {
-                        File outEncodeFile = new File(mOutputFilePath + "encoder.out");
-                        if (outEncodeFile.exists()) {
-                            assertTrue(" Unable to delete existing file" + outEncodeFile.toString(),
-                                    outEncodeFile.delete());
-                        }
-                        assertTrue("Unable to create file to write encoder output: " +
-                                outEncodeFile.toString(), outEncodeFile.createNewFile());
-                        encodeOutputStream = new FileOutputStream(outEncodeFile);
-                    }
-                    File rawFile = new File(mContext.getFilesDir() + "/decoder_" + tid + ".out");
-                    assertTrue("Cannot open file to write decoded output", rawFile.exists());
-                    if (DEBUG) {
-                        Log.i(TAG, "Path of decoded input file: " + rawFile.toString());
-                    }
-                    FileInputStream eleStream = new FileInputStream(rawFile);
-                    if (mime.startsWith("video/")) {
-                        width = format.getInteger(MediaFormat.KEY_WIDTH);
-                        height = format.getInteger(MediaFormat.KEY_HEIGHT);
-                        if (format.containsKey(MediaFormat.KEY_FRAME_RATE)) {
-                            frameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE);
-                        } else if (frameRate <= 0) {
-                            frameRate = ENCODE_DEFAULT_FRAME_RATE;
-                        }
-                        if (format.containsKey(MediaFormat.KEY_BIT_RATE)) {
-                            bitRate = format.getInteger(MediaFormat.KEY_BIT_RATE);
-                        } else if (bitRate <= 0) {
-                            if (mime.contains("video/3gpp") || mime.contains("video/mp4v-es")) {
-                                bitRate = ENCODE_MIN_BIT_RATE;
-                            } else {
-                                bitRate = ENCODE_DEFAULT_BIT_RATE;
-                            }
-                        }
-                        if (format.containsKey(MediaFormat.KEY_PROFILE)) {
-                            profile = format.getInteger(MediaFormat.KEY_PROFILE);
-                        }
-                        if (format.containsKey(MediaFormat.KEY_PROFILE)) {
-                            level = format.getInteger(MediaFormat.KEY_LEVEL);
-                        }
-                        if (decoderFormat.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
-                            colorFormat = decoderFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT);
-                        }
-                    } else {
-                        sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
-                        numChannels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
-                        if (decoderFormat.containsKey(MediaFormat.KEY_BIT_RATE)) {
-                            bitRate = decoderFormat.getInteger(MediaFormat.KEY_BIT_RATE);
-                        } else {
-                            bitRate = ENCODE_DEFAULT_AUDIO_BIT_RATE;
-                        }
-                    }
-                    /*Setup Encode Format*/
-                    MediaFormat encodeFormat;
-                    if (mime.startsWith("video/")) {
-                        frameSize = width * height * 3 / 2;
-                        encodeFormat = MediaFormat.createVideoFormat(mime, width, height);
-                        encodeFormat.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
-                        encodeFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
-                        encodeFormat.setInteger(MediaFormat.KEY_PROFILE, profile);
-                        encodeFormat.setInteger(MediaFormat.KEY_LEVEL, level);
-                        encodeFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
-                        encodeFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, frameSize);
-                        encodeFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
-                    } else {
-                        encodeFormat = MediaFormat.createAudioFormat(mime, sampleRate, numChannels);
-                        encodeFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
-                        frameSize = 4096;
-                    }
-                    Encoder encoder = new Encoder();
-                    encoder.setupEncoder(encodeOutputStream, eleStream);
-                    status = encoder.encode(codecName, encodeFormat, mime, frameRate, sampleRate,
-                            frameSize, asyncMode);
-                    encoder.deInitEncoder();
-                    assertEquals(
-                            codecName + " encoder returned error " + status + " for " + "file:" +
-                                    " " + mInputFile, 0, status);
-                    encoder.dumpStatistics(mInputFile, codecName, (asyncMode ? "async" : "sync"),
-                            extractor.getClipDuration(), mStatsFile);
-                    Log.i(TAG, "Encoding complete for file: " + mInputFile + " with codec: " +
-                            codecName + " for aSyncMode = " + asyncMode);
-                    encoder.resetEncoder();
-                    eleStream.close();
-                    if (encodeOutputStream != null) {
-                        encodeOutputStream.close();
-                    }
-
-                }
-            }
-            //Cleanup temporary input file
-            if (decodedFile.exists()) {
-                assertTrue(" Unable to delete decoded file" + decodedFile.toString(),
-                        decodedFile.delete());
-                Log.i(TAG, "Successfully deleted decoded file");
-            }
         }
         extractor.deinitExtractor();
         fileInput.close();
+        decodeOutputStream.close();
+        return status;
     }
 
     @Test(timeout = PER_TEST_TIMEOUT_MS)
-    public void testNativeEncoder() throws Exception {
-        File inputFile = new File(mInputFilePath + mInputFile);
-        assertTrue("Cannot find " + mInputFile + " in directory " + mInputFilePath,
-                inputFile.exists());
-        int tid = android.os.Process.myTid();
-        final String mDecodedFile = mContext.getFilesDir() + "/decoder_" + tid + ".out";
-        FileInputStream fileInput = new FileInputStream(inputFile);
-        FileDescriptor fileDescriptor = fileInput.getFD();
-        Extractor extractor = new Extractor();
-        int trackCount = extractor.setUpExtractor(fileDescriptor);
-        assertTrue("Extraction failed. No tracks for file: ", trackCount > 0);
-        for (int currentTrack = 0; currentTrack < trackCount; currentTrack++) {
-            extractor.selectExtractorTrack(currentTrack);
-            MediaFormat format = extractor.getFormat(currentTrack);
-            String mime = format.getString(MediaFormat.KEY_MIME);
-            ArrayList<String> mediaCodecs = CodecUtils.selectCodecs(mime, true);
-            // Encoding the decoder's output
+    public void testEncoder() throws Exception {
+        int status;
+        int frameSize;
+
+        ArrayList<String> mediaCodecs = CodecUtils.selectCodecs(mMime, true);
+        assertTrue("No suitable codecs found for mimetype: " + mMime, (mediaCodecs.size() > 0));
+        Boolean[] encodeMode = {true, false};
+        // Encoding the decoded input file
+        for (Boolean asyncMode : encodeMode) {
             for (String codecName : mediaCodecs) {
-                Native nativeEncoder = new Native();
-                int status = nativeEncoder
-                        .Encode(mInputFilePath, mInputFile, mDecodedFile, mStatsFile, codecName);
+                FileOutputStream encodeOutputStream = null;
+                if (WRITE_OUTPUT) {
+                    File outEncodeFile = new File(mOutputFilePath + "encoder.out");
+                    if (outEncodeFile.exists()) {
+                        assertTrue(" Unable to delete existing file" + outEncodeFile.toString(),
+                                outEncodeFile.delete());
+                    }
+                    assertTrue("Unable to create file to write encoder output: " +
+                            outEncodeFile.toString(), outEncodeFile.createNewFile());
+                    encodeOutputStream = new FileOutputStream(outEncodeFile);
+                }
+                File rawFile = new File(mFileDirPath + mInputFile);
+                assertTrue("Cannot open decoded input file", rawFile.exists());
+                if (DEBUG) {
+                    Log.i(TAG, "Path of decoded input file: " + rawFile.toString());
+                }
+                FileInputStream eleStream = new FileInputStream(rawFile);
+                // Setup Encode Format
+                MediaFormat encodeFormat;
+                if (mMime.startsWith("video/")) {
+                    frameSize = mWidth * mHeight * 3 / 2;
+                    encodeFormat = MediaFormat.createVideoFormat(mMime, mWidth, mHeight);
+                    encodeFormat.setInteger(MediaFormat.KEY_FRAME_RATE, ENCODE_DEFAULT_FRAME_RATE);
+                    encodeFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, mIFrameInterval);
+                    encodeFormat.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
+                    encodeFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mColorFormat);
+                    if (mProfile != -1 && mLevel != -1) {
+                        encodeFormat.setInteger(MediaFormat.KEY_PROFILE, mProfile);
+                        encodeFormat.setInteger(MediaFormat.KEY_LEVEL, mLevel);
+                    }
+                } else {
+                    frameSize = 4096;
+                    encodeFormat = MediaFormat.createAudioFormat(mMime, mSampleRate, mNumChannel);
+                    encodeFormat.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
+                }
+                Encoder encoder = new Encoder();
+                encoder.setupEncoder(encodeOutputStream, eleStream);
+                status = encoder.encode(codecName, encodeFormat, mMime, ENCODE_DEFAULT_FRAME_RATE,
+                        mSampleRate, frameSize, asyncMode);
+                encoder.deInitEncoder();
                 assertEquals(
-                        codecName + " encoder returned error " + status + " for " + "file:" + " " +
-                                mInputFile, 0, status);
+                        codecName + " encoder returned error " + status + " for " + "mime:" + " " +
+                                mMime, 0, status);
+                String inputReference;
+                long durationUs;
+                if (mMime.startsWith("video/")) {
+                    inputReference =
+                            mInputFile + "_" + mWidth + "x" + mHeight + "_" + mBitRate + "bps";
+                    durationUs = (((eleStream.getChannel().size() + frameSize - 1) / frameSize) /
+                            ENCODE_DEFAULT_FRAME_RATE) * 1000000;
+                } else {
+                    inputReference = mInputFile + "_" + mSampleRate + "hz_" + mNumChannel + "ch_" +
+                            mBitRate + "bps";
+                    durationUs =
+                            (eleStream.getChannel().size() / (mSampleRate * mNumChannel)) * 1000000;
+                }
+                encoder.dumpStatistics(inputReference, codecName, (asyncMode ? "async" : "sync"),
+                        durationUs, mStatsFile);
+                Log.i(TAG, "Encoding complete for mime: " + mMime + " with codec: " + codecName +
+                        " for aSyncMode = " + asyncMode);
+                encoder.resetEncoder();
+                eleStream.close();
+                if (encodeOutputStream != null) {
+                    encodeOutputStream.close();
+                }
             }
         }
-        File decodedFile = new File(mDecodedFile);
-        // Cleanup temporary input file
-        if (decodedFile.exists()) {
-            assertTrue("Unable to delete - " + mDecodedFile, decodedFile.delete());
-            Log.i(TAG, "Successfully deleted - " + mDecodedFile);
+    }
+
+    @Test(timeout = PER_TEST_TIMEOUT_MS)
+    public void testNativeEncoder() {
+        ArrayList<String> mediaCodecs = CodecUtils.selectCodecs(mMime, true);
+        assertTrue("No suitable codecs found for mimetype: " + mMime, (mediaCodecs.size() > 0));
+        for (String codecName : mediaCodecs) {
+            Native nativeEncoder = new Native();
+            int status = nativeEncoder
+                    .Encode(mFileDirPath, mInputFile, mStatsFile, codecName, mMime, mBitRate,
+                            mColorFormat, mIFrameInterval, mWidth, mHeight, mProfile, mLevel,
+                            mSampleRate, mNumChannel);
+            assertEquals(codecName + " encoder returned error " + status + " for " + "mime:" + " " +
+                    mMime, 0, status);
         }
-        fileInput.close();
+    }
+
+    @AfterClass
+    public static void deleteDecodedFiles() {
+        if (mDecodedFileFullHd.exists()) {
+            assertTrue(" Unable to delete decoded file" + mDecodedFileFullHd.toString(),
+                    mDecodedFileFullHd.delete());
+            Log.i(TAG, "Successfully deleted decoded file" + mDecodedFileFullHd.toString());
+        }
+        if (mDecodedFileQcif.exists()) {
+            assertTrue(" Unable to delete decoded file" + mDecodedFileQcif.toString(),
+                    mDecodedFileQcif.delete());
+            Log.i(TAG, "Successfully deleted decoded file" + mDecodedFileQcif.toString());
+        }
+        if (mDecodedFileAudio.exists()) {
+            assertTrue(" Unable to delete decoded file" + mDecodedFileAudio.toString(),
+                    mDecodedFileAudio.delete());
+            Log.i(TAG, "Successfully deleted decoded file" + mDecodedFileAudio.toString());
+        }
     }
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
index 3e5e4c8..af92424 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test_library {
     name: "libmediabenchmark_jni",
     sdk_version: "current",
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeEncoder.cpp b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeEncoder.cpp
index 1277c8b..2f658e3 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeEncoder.cpp
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeEncoder.cpp
@@ -30,189 +30,81 @@
 #include <stdio.h>
 
 constexpr int32_t ENCODE_DEFAULT_FRAME_RATE = 25;
-constexpr int32_t ENCODE_DEFAULT_AUDIO_BIT_RATE = 128000 /* 128 Kbps */;
-constexpr int32_t ENCODE_DEFAULT_BIT_RATE = 8000000 /* 8 Mbps */;
-constexpr int32_t ENCODE_MIN_BIT_RATE = 600000 /* 600 Kbps */;
 
 extern "C" JNIEXPORT int JNICALL Java_com_android_media_benchmark_library_Native_Encode(
-        JNIEnv *env, jobject thiz, jstring jFilePath, jstring jFileName, jstring jOutFilePath,
-        jstring jStatsFile, jstring jCodecName) {
+        JNIEnv *env, jobject thiz, jstring jFilePath, jstring jFileName, jstring jStatsFile,
+        jstring jCodecName, jstring jMime, jint jBitRate, jint jColorFormat, jint jFrameInterval,
+        jint jWidth, jint jHeight, jint jProfile, jint jLevel, jint jSampleRate,
+        jint jNumChannels) {
+    UNUSED(thiz);
     const char *filePath = env->GetStringUTFChars(jFilePath, nullptr);
     const char *fileName = env->GetStringUTFChars(jFileName, nullptr);
-    string sFilePath = string(filePath) + string(fileName);
-    UNUSED(thiz);
-    FILE *inputFp = fopen(sFilePath.c_str(), "rb");
-    env->ReleaseStringUTFChars(jFileName, fileName);
-    env->ReleaseStringUTFChars(jFilePath, filePath);
-    if (!inputFp) {
-        ALOGE("Unable to open input file for reading");
+    string inputFile = string(filePath) + string(fileName);
+    const char *codecName = env->GetStringUTFChars(jCodecName, nullptr);
+    string sCodecName = string(codecName);
+    const char *mime = env->GetStringUTFChars(jMime, nullptr);
+
+    ifstream eleStream;
+    eleStream.open(inputFile, ifstream::binary | ifstream::ate);
+    if (!eleStream.is_open()) {
+        ALOGE("%s - File failed to open for reading!", fileName);
+        env->ReleaseStringUTFChars(jFileName, fileName);
         return -1;
     }
 
-    Decoder *decoder = new Decoder();
-    Extractor *extractor = decoder->getExtractor();
-    if (!extractor) {
-        ALOGE("Extractor creation failed");
-        return -1;
-    }
+    bool asyncMode[2] = {true, false};
+    for (bool mode : asyncMode) {
+        size_t eleSize = eleStream.tellg();
+        eleStream.seekg(0, ifstream::beg);
 
-    // Read file properties
-    struct stat buf;
-    stat(sFilePath.c_str(), &buf);
-    size_t fileSize = buf.st_size;
-    if (fileSize > kMaxBufferSize) {
-        ALOGE("File size greater than maximum buffer size");
-        return -1;
-    }
-    int32_t fd = fileno(inputFp);
-    int32_t trackCount = extractor->initExtractor(fd, fileSize);
-    if (trackCount <= 0) {
-        ALOGE("initExtractor failed");
-        return -1;
-    }
+        // Set encoder params
+        encParameter encParams;
+        encParams.width = jWidth;
+        encParams.height = jHeight;
+        encParams.bitrate = jBitRate;
+        encParams.iFrameInterval = jFrameInterval;
+        encParams.sampleRate = jSampleRate;
+        encParams.numChannels = jNumChannels;
+        encParams.frameRate = ENCODE_DEFAULT_FRAME_RATE;
+        encParams.colorFormat = jColorFormat;
+        encParams.profile = jProfile;
+        encParams.level = jLevel;
 
-    for (int curTrack = 0; curTrack < trackCount; curTrack++) {
-        int32_t status = extractor->setupTrackFormat(curTrack);
-        if (status != 0) {
-            ALOGE("Track Format invalid");
-            return -1;
-        }
-        uint8_t *inputBuffer = (uint8_t *)malloc(fileSize);
-        if (!inputBuffer) {
-            ALOGE("Insufficient memory");
-            return -1;
-        }
-        vector<AMediaCodecBufferInfo> frameInfo;
-        AMediaCodecBufferInfo info;
-        uint32_t inputBufferOffset = 0;
-
-        // Get frame data
-        while (1) {
-            status = extractor->getFrameSample(info);
-            if (status || !info.size) break;
-            // copy the meta data and buffer to be passed to decoder
-            if (inputBufferOffset + info.size > kMaxBufferSize) {
-                ALOGE("Memory allocated not sufficient");
-                free(inputBuffer);
-                return -1;
-            }
-            memcpy(inputBuffer + inputBufferOffset, extractor->getFrameBuf(), info.size);
-            frameInfo.push_back(info);
-            inputBufferOffset += info.size;
-        }
-        string decName = "";
-        const char *outputFilePath = env->GetStringUTFChars(jOutFilePath, nullptr);
-        FILE *outFp = fopen(outputFilePath, "wb");
-        if (outFp == nullptr) {
-            ALOGE("%s - File failed to open for writing!", outputFilePath);
-            free(inputBuffer);
-            return -1;
-        }
-        decoder->setupDecoder();
-        status = decoder->decode(inputBuffer, frameInfo, decName, false /*asyncMode */, outFp);
+        Encoder *encoder = new Encoder();
+        encoder->setupEncoder();
+        auto status = encoder->encode(sCodecName, eleStream, eleSize, mode, encParams,
+                                      const_cast<char *>(mime));
         if (status != AMEDIA_OK) {
-            ALOGE("Decode returned error");
-            free(inputBuffer);
+            ALOGE("Encoder returned error");
             return -1;
         }
-
-        AMediaFormat *decoderFormat = decoder->getFormat();
-        AMediaFormat *format = extractor->getFormat();
-        if (inputBuffer) {
-            free(inputBuffer);
-            inputBuffer = nullptr;
+        ALOGV("Encoding complete with codec %s for asyncMode = %d", sCodecName.c_str(), mode);
+        encoder->deInitCodec();
+        const char *statsFile = env->GetStringUTFChars(jStatsFile, nullptr);
+        string inputReference;
+        int64_t clipDurationUs;
+        if (!strncmp(mime, "video/", 6)) {
+            inputReference = string(fileName) + "_" + to_string(jWidth) + "x" + to_string(jHeight) +
+                             "_" + to_string(jBitRate) + "bps";
+            int32_t frameSize = jWidth * jHeight * 3 / 2;
+            clipDurationUs =
+                    (((eleSize + frameSize - 1) / frameSize) / ENCODE_DEFAULT_FRAME_RATE) * 1000000;
+        } else {
+            inputReference = string(fileName) + "_" + to_string(jSampleRate) + "hz_" +
+                             to_string(jNumChannels) + "ch_" + to_string(jBitRate) + "bps";
+            clipDurationUs = (eleSize / (jSampleRate * jNumChannels)) * 1000000;
         }
-        const char *mime = nullptr;
-        AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime);
-        if (!mime) {
-            ALOGE("Error in AMediaFormat_getString");
-            return -1;
-        }
-        ifstream eleStream;
-        eleStream.open(outputFilePath, ifstream::binary | ifstream::ate);
-        if (!eleStream.is_open()) {
-            ALOGE("%s - File failed to open for reading!", outputFilePath);
-            env->ReleaseStringUTFChars(jOutFilePath, outputFilePath);
-            return -1;
-        }
-        const char *codecName = env->GetStringUTFChars(jCodecName, NULL);
-        const char *inputReference = env->GetStringUTFChars(jFileName, nullptr);
-        string sCodecName = string(codecName);
-        string sInputReference = string(inputReference);
-
-        bool asyncMode[2] = {true, false};
-        for (int i = 0; i < 2; i++) {
-            size_t eleSize = eleStream.tellg();
-            eleStream.seekg(0, ifstream::beg);
-
-            // Get encoder params
-            encParameter encParams;
-            if (!strncmp(mime, "video/", 6)) {
-                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_WIDTH, &encParams.width);
-                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_HEIGHT, &encParams.height);
-                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_FRAME_RATE, &encParams.frameRate);
-                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, &encParams.bitrate);
-                if (encParams.bitrate <= 0 || encParams.frameRate <= 0) {
-                    encParams.frameRate = ENCODE_DEFAULT_FRAME_RATE;
-                    if (!strcmp(mime, "video/3gpp") || !strcmp(mime, "video/mp4v-es")) {
-                        encParams.bitrate = ENCODE_MIN_BIT_RATE /* 600 Kbps */;
-                    } else {
-                        encParams.bitrate = ENCODE_DEFAULT_BIT_RATE /* 8 Mbps */;
-                    }
-                }
-                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_PROFILE, &encParams.profile);
-                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_LEVEL, &encParams.level);
-                AMediaFormat_getInt32(decoderFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT,
-                                      &encParams.colorFormat);
-            } else {
-                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE, &encParams.sampleRate);
-                AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT,
-                                      &encParams.numChannels);
-                encParams.bitrate = ENCODE_DEFAULT_AUDIO_BIT_RATE;
-            }
-            Encoder *encoder = new Encoder();
-            encoder->setupEncoder();
-            status = encoder->encode(sCodecName, eleStream, eleSize, asyncMode[i], encParams,
-                                     (char *)mime);
-            if (status != AMEDIA_OK) {
-                ALOGE("Encoder returned error");
-                return -1;
-            }
-            ALOGV("Encoding complete with codec %s for asyncMode = %d", sCodecName.c_str(),
-                  asyncMode[i]);
-            encoder->deInitCodec();
-            const char *statsFile = env->GetStringUTFChars(jStatsFile, nullptr);
-            encoder->dumpStatistics(sInputReference, extractor->getClipDuration(), sCodecName,
-                                    (asyncMode[i] ? "async" : "sync"), statsFile);
-            env->ReleaseStringUTFChars(jStatsFile, statsFile);
-            encoder->resetEncoder();
-            delete encoder;
-            encoder = nullptr;
-        }
-        eleStream.close();
-        if (outFp) {
-            fclose(outFp);
-            outFp = nullptr;
-        }
-        env->ReleaseStringUTFChars(jFileName, inputReference);
-        env->ReleaseStringUTFChars(jCodecName, codecName);
-        env->ReleaseStringUTFChars(jOutFilePath, outputFilePath);
-        if (format) {
-            AMediaFormat_delete(format);
-            format = nullptr;
-        }
-        if (decoderFormat) {
-            AMediaFormat_delete(decoderFormat);
-            decoderFormat = nullptr;
-        }
-        decoder->deInitCodec();
-        decoder->resetDecoder();
+        encoder->dumpStatistics(inputReference, clipDurationUs, sCodecName,
+                                (mode ? "async" : "sync"), statsFile);
+        env->ReleaseStringUTFChars(jStatsFile, statsFile);
+        encoder->resetEncoder();
+        delete encoder;
+        encoder = nullptr;
     }
-    if (inputFp) {
-        fclose(inputFp);
-        inputFp = nullptr;
-    }
-    extractor->deInitExtractor();
-    delete decoder;
+    eleStream.close();
+    env->ReleaseStringUTFChars(jFilePath, filePath);
+    env->ReleaseStringUTFChars(jFileName, fileName);
+    env->ReleaseStringUTFChars(jMime, mime);
+    env->ReleaseStringUTFChars(jCodecName, codecName);
     return 0;
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java
index 45e5574..754cd8e 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java
@@ -175,10 +175,10 @@
 
                 @Override
                 public void onError(@NonNull MediaCodec mediaCodec, @NonNull CodecException e) {
-                    mediaCodec.stop();
-                    mediaCodec.release();
-                    Log.e(TAG, "CodecError: " + e.toString());
+                    mSignalledError = true;
+                    Log.e(TAG, "Codec Error: " + e.toString());
                     e.printStackTrace();
+                    synchronized (mLock) { mLock.notify(); }
                 }
 
                 @Override
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Native.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Native.java
index 38b608a..3e3969c 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Native.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Native.java
@@ -27,6 +27,7 @@
     public native int Decode(String inputFilePath, String inputFileName, String statsFile,
             String codecName, boolean asyncMode);
 
-    public native int Encode(String inputFilePath, String inputFileName, String outputFilePath,
-            String statsFile, String codecName);
+    public native int Encode(String inputFilePath, String inputFileName, String statsFile,
+            String codecName, String mime, int bitRate, int colorFormat, int frameInterval,
+            int width, int height, int profile, int level, int sampleRate, int numChannel);
 }
diff --git a/media/tests/benchmark/README.md b/media/tests/benchmark/README.md
index 05fbe6f..047c289 100644
--- a/media/tests/benchmark/README.md
+++ b/media/tests/benchmark/README.md
@@ -1,7 +1,7 @@
 # Benchmark tests
 
 Benchmark app analyses the time taken by MediaCodec, MediaExtractor and MediaMuxer for given set of inputs. It is used to benchmark these modules on android devices.
-Benchmark results are emitted to logcat.
+Benchmark results are published as a CSV report.
 
 This page describes steps to run the NDK and SDK layer test.
 
@@ -10,35 +10,49 @@
 mmm frameworks/av/media/tests/benchmark/
 ```
 
-# NDK
-
-To run the test suite for measuring performance of the native layer, follow the following steps:
-
-The binaries will be created in the following path : $OUT/data/nativetest64/
-
-adb push $OUT/data/nativetest64/* /data/local/tmp/
-
-Eg. adb push $OUT/data/nativetest64/extractorTest/extractorTest /data/local/tmp/
-
-To run the binary, follow the commands mentioned below under each module.
-
-The resource file for the tests is taken from [here](https://drive.google.com/open?id=1ghMr17BBJ7n0pqbm7oREiTN_MNemJUqy)
+# Resources
+The resource file for the tests is taken from [here](https://storage.googleapis.com/android_media/frameworks/av/media/tests/benchmark/MediaBenchmark.zip)
 
 Download the MediaBenchmark.zip file, unzip and push it to /data/local/tmp/ on the device.
 
 ```
 unzip MediaBenchmark.zip
-adb push MediaBenchmark /data/local/tmp
+adb push MediaBenchmark /data/local/tmp/MediaBenchmark/res/
 ```
 
+The resource files are assumed to be at /data/local/tmp/MediaBenchmark/res/. You can use a different location, but you have to modify the rest of the instructions to replace /data/local/tmp/MediaBenchmark/res/ with wherever you chose to put the files.
+
+# NDK CLI Tests
+Note: [Benchmark Application](#BenchmarkApplication) now supports profiling both SDK and NDK APIs and that is the preferred way to benchmark codecs
+
+To run the test suite for measuring performance of the native layer, follow the following steps:
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+
+adb push $OUT/data/nativetest64/* /data/local/tmp/. For example
+
+```
+adb push $OUT/data/nativetest64/extractorTest/extractorTest /data/local/tmp/
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+To test 32-bit binary push binaries from nativetest.
+
+adb push $OUT/data/nativetest/* /data/local/tmp/. For example
+
+```
+adb push $OUT/data/nativetest/extractorTest/extractorTest /data/local/tmp/
+```
+
+To run the binary, follow the commands mentioned below under each module.
+
 ## Extractor
 
 The test extracts elementary stream and benchmarks the extractors available in NDK.
 
-The resource files are assumed to be at /data/local/tmp/MediaBenchmark/res/. You can use a different location, but you have to modify the rest of the instructions to replace /data/local/tmp/MediaBenchmark/res/ with wherever you chose to put the files.
-
-The path to these files on the device is required to be given for the test.
-
 ```
 adb shell /data/local/tmp/extractorTest -P /data/local/tmp/MediaBenchmark/res/
 ```
@@ -47,8 +61,6 @@
 
 The test decodes input stream and benchmarks the decoders available in NDK.
 
-Setup steps are same as extractor.
-
 ```
 adb shell /data/local/tmp/decoderTest -P /data/local/tmp/MediaBenchmark/res/
 ```
@@ -57,8 +69,6 @@
 
 The test muxes elementary stream and benchmarks the muxers available in NDK.
 
-Setup steps are same as extractor.
-
 ```
 adb shell /data/local/tmp/muxerTest -P /data/local/tmp/MediaBenchmark/res/
 ```
@@ -67,55 +77,82 @@
 
 The test encodes input stream and benchmarks the encoders available in NDK.
 
-Setup steps are same as extractor.
-
 ```
 adb shell /data/local/tmp/encoderTest -P /data/local/tmp/MediaBenchmark/res/
 ```
 
-# SDK
+# <a name="BenchmarkApplication"></a> Benchmark Application
+To run the test suite for measuring performance of the SDK and NDK APIs, follow the following steps:
+Benchmark Application can be run in two ways.
 
-To run the test suite for measuring performance of the SDK APIs, follow the following steps:
+## Steps to run with atest
+Note that atest command will install Benchmark application and push the required test files to the device as well.
+
+For running all the tests, run the following command
+```
+atest com.android.media.benchmark.tests -- --enable-module-dynamic-download=true
+```
+
+For running the tests individually, run the following atest commands:
+
+```
+atest com.android.media.benchmark.tests.ExtractorTest -- --enable-module-dynamic-download=true
+atest com.android.media.benchmark.tests.DecoderTest -- --enable-module-dynamic-download=true
+atest com.android.media.benchmark.tests.MuxerTest -- --enable-module-dynamic-download=true
+atest com.android.media.benchmark.tests.EncoderTest -- --enable-module-dynamic-download=true
+```
+
+## Steps to run without atest
 
 The apk will be created at the following path:
-$OUT/testcases/MediaBenchmarkTest/arm64/
 
-To get the resorce files for the test follow instructions given in [NDK](#NDK)
+The 64-bit apk will be created in the following path :
+$OUT/testcases/MediaBenchmarkTest/arm64/
 
 For installing the apk, run the command:
 ```
 adb install -f -r $OUT/testcases/MediaBenchmarkTest/arm64/MediaBenchmarkTest.apk
 ```
 
-For running all the tests, run the command:
+The 32-bit apk will be created in the following path :
+$OUT/testcases/MediaBenchmarkTest/arm/
+
+For installing the apk, run the command:
+```
+adb install -f -r $OUT/testcases/MediaBenchmarkTest/arm/MediaBenchmarkTest.apk
+```
+
+To get the resource files for the test follow instructions given in [Resources](#Resources)
+
+For running all the tests, run the following command
 ```
 adb shell am instrument -w -r -e package com.android.media.benchmark.tests com.android.media.benchmark/androidx.test.runner.AndroidJUnitRunner
 ```
 
 ## Extractor
 
-The test extracts elementary stream and benchmarks the extractors available in SDK.
+The test extracts elementary stream and benchmarks the extractors available in SDK and NDK.
 ```
 adb shell am instrument -w -r -e class 'com.android.media.benchmark.tests.ExtractorTest' com.android.media.benchmark/androidx.test.runner.AndroidJUnitRunner
 ```
 
 ## Decoder
 
-The test decodes input stream and benchmarks the decoders available in SDK.
+The test decodes input stream and benchmarks the decoders available in SDK and NDK.
 ```
 adb shell am instrument -w -r -e class 'com.android.media.benchmark.tests.DecoderTest' com.android.media.benchmark/androidx.test.runner.AndroidJUnitRunner
 ```
 
 ## Muxer
 
-The test muxes elementary stream and benchmarks different writers available in SDK.
+The test muxes elementary stream and benchmarks different writers available in SDK and NDK.
 ```
 adb shell am instrument -w -r -e class 'com.android.media.benchmark.tests.MuxerTest' com.android.media.benchmark/androidx.test.runner.AndroidJUnitRunner
 ```
 
 ## Encoder
 
-The test encodes input stream and benchmarks the encoders available in SDK.
+The test encodes input stream and benchmarks the encoders available in SDK and NDK.
 ```
 adb shell am instrument -w -r -e class 'com.android.media.benchmark.tests.EncoderTest' com.android.media.benchmark/androidx.test.runner.AndroidJUnitRunner
 ```
@@ -124,24 +161,27 @@
 To run the test suite for measuring performance of the codec2 layer, follow the following steps:
 
 The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
 The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
 
 To test 64-bit binary push binaries from nativetest64.
 adb push $(OUT)/data/nativetest64/* /data/local/tmp/
-Eg. adb push $(OUT)/data/nativetest64/C2DecoderTest/C2DecoderTest /data/local/tmp/
+```
+adb push $(OUT)/data/nativetest64/C2DecoderTest/C2DecoderTest /data/local/tmp/
+```
 
 To test 32-bit binary push binaries from nativetest.
 adb push $(OUT)/data/nativetest/* /data/local/tmp/
-Eg. adb push $(OUT)/data/nativetest/C2DecoderTest/C2DecoderTest /data/local/tmp/
+```
+adb push $(OUT)/data/nativetest/C2DecoderTest/C2DecoderTest /data/local/tmp/
+```
 
-To get the resource files for the test follow instructions given in [NDK](#NDK)
+To get the resource files for the test follow instructions given in [Resources](#Resources)
 
 ## C2 Decoder
 
 The test decodes input stream and benchmarks the codec2 decoders available in device.
 
-Setup steps are same as [extractor](#extractor).
-
 ```
 adb shell /data/local/tmp/C2DecoderTest -P /data/local/tmp/MediaBenchmark/res/
 ```
@@ -149,8 +189,95 @@
 
 The test encodes input stream and benchmarks the codec2 encoders available in device.
 
-Setup steps are same as [extractor](#extractor).
-
 ```
 adb shell /data/local/tmp/C2EncoderTest -P /data/local/tmp/MediaBenchmark/res/
 ```
+
+# Analysis
+
+The benchmark results are stored in a CSV file which can be used for analysis. These results are stored in following format:
+<app directory>/<module_name>.<timestamp>.csv
+
+Note: This timestamp is in nano seconds and will change based on current system time.
+
+To find the location of the CSV file, look for the path in logs. Example log below -
+
+```
+com.android.media.benchmark D/DecoderTest: Saving Benchmark results in: /storage/emulated/0/Android/data/com.android.media.benchmark/files/Decoder.1587732395387.csv
+```
+
+This file can be pulled from the device using "adb pull" command.
+```
+adb pull /storage/emulated/0/Android/data/com.android.media.benchmark/files/Decoder.1587732395387.csv ./Decoder.1587732395387.csv
+```
+
+## CSV Columns
+
+Following columns are available in CSV.
+
+Note: All time values are in nano seconds
+
+1. **currentTime** : The time recorded at the creation of the stats. This may be used to estimate time between consecutive test clips.
+
+2. **fileName**: The file being used as an input for the benchmark test.
+
+3. **operation**: The current operation on the input test vector i.e. Extract/Mux/Encode/Decode.
+
+4. **NDK/SDK**: The target APIs i.e. AMedia vs Media calls for the operation being performed.
+
+5. **sync/async**: This is specific to MediaCodec objects (i.e. Encoder and Decoder). It specifies the mode in which MediaCodec APIs are working. For async mode, callbacks are set. For sync mode, we have to poll the dequeueBuffer APIs to queue and dequeue input output buffers respectively.
+
+6. **setupTime**: The time taken to set up the MediaExtractor/Muxer/Codec instance.
+
+    * MediaCodec: includes setting async/sync mode, configuring with a format and codec.start
+
+    * MediaExtractor: includes AMediaExtractor_new and setDataSource.
+
+    * MediaMuxer: includes creating the object, adding track, and starting the muxer.
+
+7. **destroyTime**: The time taken to stop and close MediaExtractor/Muxer/Codec instance.
+
+8. **minimumTime**: The minimum time taken to extract/mux/encode/decode a frame.
+
+9. **maximumTime**: The maximum time taken to extract/mux/encode/decode a frame.
+
+10. **averageTime**: Average time taken to extract/mux/encode/decode per frame.
+
+    * MediaCodec: computed as the total time taken to encode/decode all frames divided by the number of frames encoded/decoded.
+
+    * MediaExtractor: computed as the total time taken to extract all frames divided by the number of frames extracted.
+
+    * MediaMuxer: computed as the total time taken to mux all frames divided by the number of frames muxed.
+
+11. **timeToProcess1SecContent**: The time required to process one second worth input data.
+
+12. **totalBytesProcessedPerSec**: The number of bytes extracted/muxed/decoded/encoded per second.
+
+13. **timeToFirstFrame**: The time taken to receive the first output frame.
+
+14. **totalSizeInBytes**: The total output size of the operation (in bytes).
+
+15. **totalTime**: The time taken to perform the complete operation (i.e. Extract/Mux/Decode/Encode) for respective test vector.
+
+
+## Muxer
+1. **componentName**: The format of the output Media file. Following muxers are currently supported:
+     * Ogg, Webm, 3gpp, and mp4.
+
+## Decoder
+1. **componentName**: Includes all supported codecs on the device. Aliased components are skipped.
+    *   Video: H263, H264, H265, VPx, Mpeg4, Mpeg2, AV1
+    *   Audio: AAC, Flac, Opus, MP3, Vorbis, GSM, AMR-NB/WB
+
+## Encoder
+1. **componentName**: Includes all supported codecs on the device. Aliased components are skipped.
+    *   Video: H263, H264, H265, VPx, Mpeg4
+    *   Audio: AAC, Flac, Opus, AMR-NB/WB
+
+## Common Failures
+On some devices, if a codec isn't supported some tests may report a failure like "codec not found for"
+
+For example: On mobile devices without support for mpeg2 decoder, following failure is observed:
+```
+Unable to create codec by mime: video/mpeg2
+```
diff --git a/media/tests/benchmark/src/native/common/Android.bp b/media/tests/benchmark/src/native/common/Android.bp
index d4389da..6b54c6a 100644
--- a/media/tests/benchmark/src/native/common/Android.bp
+++ b/media/tests/benchmark/src/native/common/Android.bp
@@ -14,6 +14,15 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_static {
     name: "libmediabenchmark_common",
     defaults: [
diff --git a/media/tests/benchmark/src/native/decoder/Android.bp b/media/tests/benchmark/src/native/decoder/Android.bp
index 9791c11..601b2f6 100644
--- a/media/tests/benchmark/src/native/decoder/Android.bp
+++ b/media/tests/benchmark/src/native/decoder/Android.bp
@@ -14,6 +14,15 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_static {
     name: "libmediabenchmark_decoder",
     defaults: [
diff --git a/media/tests/benchmark/src/native/decoder/C2Decoder.cpp b/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
index 20a1468..6539f24 100644
--- a/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
+++ b/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
@@ -53,8 +53,8 @@
     }
 
     int64_t sTime = mStats->getCurTime();
-    mComponent = mClient->CreateComponentByName(compName.c_str(), mListener, &mClient);
-    if (mComponent == nullptr) {
+    if (mClient->CreateComponentByName(compName.c_str(), mListener, &mComponent, &mClient) !=
+        C2_OK) {
         ALOGE("Create component failed for %s", compName.c_str());
         return -1;
     }
@@ -156,9 +156,11 @@
     mStats->setDeInitTime(timeTaken);
 }
 
-void C2Decoder::dumpStatistics(string inputReference, int64_t durationUs) {
+void C2Decoder::dumpStatistics(string inputReference, int64_t durationUs, string componentName,
+                               string statsFile) {
     string operation = "c2decode";
-    mStats->dumpStatistics(operation, inputReference, durationUs);
+    string mode = "async";
+    mStats->dumpStatistics(operation, inputReference, durationUs, componentName, mode, statsFile);
 }
 
 void C2Decoder::resetDecoder() {
diff --git a/media/tests/benchmark/src/native/decoder/C2Decoder.h b/media/tests/benchmark/src/native/decoder/C2Decoder.h
index 4a3eb96..fb35a66 100644
--- a/media/tests/benchmark/src/native/decoder/C2Decoder.h
+++ b/media/tests/benchmark/src/native/decoder/C2Decoder.h
@@ -31,7 +31,8 @@
 
     void deInitCodec();
 
-    void dumpStatistics(string inputReference, int64_t durationUs);
+    void dumpStatistics(string inputReference, int64_t durationUs, string componentName,
+                        string statsFile);
 
     void resetDecoder();
 
diff --git a/media/tests/benchmark/src/native/encoder/Android.bp b/media/tests/benchmark/src/native/encoder/Android.bp
index 8de7823..a787068 100644
--- a/media/tests/benchmark/src/native/encoder/Android.bp
+++ b/media/tests/benchmark/src/native/encoder/Android.bp
@@ -14,6 +14,15 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_static {
     name: "libmediabenchmark_encoder",
     defaults: [
diff --git a/media/tests/benchmark/src/native/encoder/C2Encoder.cpp b/media/tests/benchmark/src/native/encoder/C2Encoder.cpp
index 33429ef..ca79881 100644
--- a/media/tests/benchmark/src/native/encoder/C2Encoder.cpp
+++ b/media/tests/benchmark/src/native/encoder/C2Encoder.cpp
@@ -68,8 +68,8 @@
     }
 
     int64_t sTime = mStats->getCurTime();
-    mComponent = mClient->CreateComponentByName(compName.c_str(), mListener, &mClient);
-    if (mComponent == nullptr) {
+    if (mClient->CreateComponentByName(compName.c_str(), mListener, &mComponent, &mClient) !=
+        C2_OK) {
         ALOGE("Create component failed for %s", compName.c_str());
         return -1;
     }
@@ -251,9 +251,11 @@
     mStats->setDeInitTime(timeTaken);
 }
 
-void C2Encoder::dumpStatistics(string inputReference, int64_t durationUs) {
+void C2Encoder::dumpStatistics(string inputReference, int64_t durationUs, string componentName,
+                               string statsFile) {
     string operation = "c2encode";
-    mStats->dumpStatistics(operation, inputReference, durationUs);
+    string mode = "async";
+    mStats->dumpStatistics(operation, inputReference, durationUs, componentName, mode, statsFile);
 }
 
 void C2Encoder::resetEncoder() {
diff --git a/media/tests/benchmark/src/native/encoder/C2Encoder.h b/media/tests/benchmark/src/native/encoder/C2Encoder.h
index a4ca097..7a021f4 100644
--- a/media/tests/benchmark/src/native/encoder/C2Encoder.h
+++ b/media/tests/benchmark/src/native/encoder/C2Encoder.h
@@ -44,7 +44,8 @@
 
     void deInitCodec();
 
-    void dumpStatistics(string inputReference, int64_t durationUs);
+    void dumpStatistics(string inputReference, int64_t durationUs, string componentName,
+                        string statsFile);
 
     void resetEncoder();
 
diff --git a/media/tests/benchmark/src/native/encoder/Encoder.cpp b/media/tests/benchmark/src/native/encoder/Encoder.cpp
index 26fb1b9..15c479d 100644
--- a/media/tests/benchmark/src/native/encoder/Encoder.cpp
+++ b/media/tests/benchmark/src/native/encoder/Encoder.cpp
@@ -203,13 +203,13 @@
         AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_WIDTH, mParams.width);
         AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_HEIGHT, mParams.height);
         AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_FRAME_RATE, mParams.frameRate);
+        AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, mParams.iFrameInterval);
         AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_BIT_RATE, mParams.bitrate);
-        AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, 1);
-        if (mParams.profile && mParams.level) {
+        AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT, mParams.colorFormat);
+        if (mParams.profile != -1 && mParams.level != -1) {
             AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_PROFILE, mParams.profile);
             AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_LEVEL, mParams.level);
         }
-        AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT, mParams.colorFormat);
     } else {
         AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_SAMPLE_RATE, mParams.sampleRate);
         AMediaFormat_setInt32(mFormat, AMEDIAFORMAT_KEY_CHANNEL_COUNT, mParams.numChannels);
diff --git a/media/tests/benchmark/src/native/encoder/Encoder.h b/media/tests/benchmark/src/native/encoder/Encoder.h
index 5ad142b..324317c 100644
--- a/media/tests/benchmark/src/native/encoder/Encoder.h
+++ b/media/tests/benchmark/src/native/encoder/Encoder.h
@@ -23,10 +23,11 @@
 #include <queue>
 #include <thread>
 
-#include "media/NdkImage.h"
 #include "BenchmarkCommon.h"
 #include "Stats.h"
 
+// constant not defined in NDK api
+constexpr int32_t COLOR_FormatYUV420Flexible = 0x7F420888;
 
 struct encParameter {
     int32_t bitrate = -1;
@@ -38,9 +39,10 @@
     int32_t width = 0;
     int32_t height = 0;
     int32_t frameRate = -1;
-    int32_t profile = 0;
-    int32_t level = 0;
-    int32_t colorFormat = AIMAGE_FORMAT_YUV_420_888;
+    int32_t iFrameInterval = 0;
+    int32_t profile = -1;
+    int32_t level = -1;
+    int32_t colorFormat = COLOR_FormatYUV420Flexible;
 };
 
 class Encoder : public CallBackHandle {
diff --git a/media/tests/benchmark/src/native/extractor/Android.bp b/media/tests/benchmark/src/native/extractor/Android.bp
index 7ed9476..4946990 100644
--- a/media/tests/benchmark/src/native/extractor/Android.bp
+++ b/media/tests/benchmark/src/native/extractor/Android.bp
@@ -14,6 +14,15 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_static {
     name: "libmediabenchmark_extractor",
     defaults: [
diff --git a/media/tests/benchmark/src/native/muxer/Android.bp b/media/tests/benchmark/src/native/muxer/Android.bp
index f669d4a..84985e4 100644
--- a/media/tests/benchmark/src/native/muxer/Android.bp
+++ b/media/tests/benchmark/src/native/muxer/Android.bp
@@ -14,6 +14,15 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_static {
     name: "libmediabenchmark_muxer",
     defaults: [
diff --git a/media/tests/benchmark/tests/Android.bp b/media/tests/benchmark/tests/Android.bp
index f46fa4a..0fbd20d 100644
--- a/media/tests/benchmark/tests/Android.bp
+++ b/media/tests/benchmark/tests/Android.bp
@@ -14,6 +14,15 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "extractorTest",
     gtest: true,
diff --git a/media/tests/benchmark/tests/BenchmarkTestEnvironment.h b/media/tests/benchmark/tests/BenchmarkTestEnvironment.h
index ae2eee1..4edb048 100644
--- a/media/tests/benchmark/tests/BenchmarkTestEnvironment.h
+++ b/media/tests/benchmark/tests/BenchmarkTestEnvironment.h
@@ -25,7 +25,9 @@
 
 class BenchmarkTestEnvironment : public ::testing::Environment {
   public:
-    BenchmarkTestEnvironment() : res("/sdcard/media/") {}
+    BenchmarkTestEnvironment()
+        : res("/data/local/tmp/MediaBenchmark/res/"),
+          statsFile("/data/local/tmp/MediaBenchmark/res/stats.csv") {}
 
     // Parses the command line argument
     int initFromOptions(int argc, char **argv);
@@ -34,8 +36,15 @@
 
     const string getRes() const { return res; }
 
+    void setStatsFile(const string module) { statsFile = getRes() + module; }
+
+    const string getStatsFile() const { return statsFile; }
+
+    bool writeStatsHeader();
+
   private:
     string res;
+    string statsFile;
 };
 
 int BenchmarkTestEnvironment::initFromOptions(int argc, char **argv) {
@@ -70,4 +79,26 @@
     return 0;
 }
 
+/**
+ * Writes the stats header to a file
+ * <p>
+ * \param statsFile    file where the stats data is to be written
+ **/
+bool BenchmarkTestEnvironment::writeStatsHeader() {
+    char statsHeader[] =
+        "currentTime, fileName, operation, componentName, NDK/SDK, sync/async, setupTime, "
+        "destroyTime, minimumTime, maximumTime, averageTime, timeToProcess1SecContent, "
+        "totalBytesProcessedPerSec, timeToFirstFrame, totalSizeInBytes, totalTime\n";
+    FILE *fpStats = fopen(statsFile.c_str(), "w");
+    if(!fpStats) {
+        return false;
+    }
+    int32_t numBytes = fwrite(statsHeader, sizeof(char), sizeof(statsHeader), fpStats);
+    fclose(fpStats);
+    if(numBytes != sizeof(statsHeader)) {
+        return false;
+    }
+    return true;
+}
+
 #endif  // __BENCHMARK_TEST_ENVIRONMENT_H__
diff --git a/media/tests/benchmark/tests/C2DecoderTest.cpp b/media/tests/benchmark/tests/C2DecoderTest.cpp
index dedc743..85dcbc1 100644
--- a/media/tests/benchmark/tests/C2DecoderTest.cpp
+++ b/media/tests/benchmark/tests/C2DecoderTest.cpp
@@ -136,7 +136,8 @@
                 mDecoder->deInitCodec();
                 int64_t durationUs = extractor->getClipDuration();
                 ALOGV("codec : %s", codecName.c_str());
-                mDecoder->dumpStatistics(GetParam().first, durationUs);
+                mDecoder->dumpStatistics(GetParam().first, durationUs, codecName,
+                                         gEnv->getStatsFile());
                 mDecoder->resetDecoder();
             }
         }
@@ -178,6 +179,9 @@
     ::testing::InitGoogleTest(&argc, argv);
     int status = gEnv->initFromOptions(argc, argv);
     if (status == 0) {
+        gEnv->setStatsFile("C2Decoder.csv");
+        status = gEnv->writeStatsHeader();
+        ALOGV("Stats file = %d\n", status);
         status = RUN_ALL_TESTS();
         ALOGV("C2 Decoder Test result = %d\n", status);
     }
diff --git a/media/tests/benchmark/tests/C2EncoderTest.cpp b/media/tests/benchmark/tests/C2EncoderTest.cpp
index 98eb17a..b18d856 100644
--- a/media/tests/benchmark/tests/C2EncoderTest.cpp
+++ b/media/tests/benchmark/tests/C2EncoderTest.cpp
@@ -108,7 +108,7 @@
         }
 
         string decName = "";
-        string outputFileName = "decode.out";
+        string outputFileName = "/data/local/tmp/decode.out";
         FILE *outFp = fopen(outputFileName.c_str(), "wb");
         ASSERT_NE(outFp, nullptr) << "Unable to open output file" << outputFileName
                                   << " for dumping decoder's output";
@@ -140,7 +140,8 @@
                 mEncoder->deInitCodec();
                 int64_t durationUs = extractor->getClipDuration();
                 ALOGV("codec : %s", codecName.c_str());
-                mEncoder->dumpStatistics(GetParam().first, durationUs);
+                mEncoder->dumpStatistics(GetParam().first, durationUs, codecName,
+                                         gEnv->getStatsFile());
                 mEncoder->resetEncoder();
             }
         }
@@ -180,6 +181,9 @@
     ::testing::InitGoogleTest(&argc, argv);
     int status = gEnv->initFromOptions(argc, argv);
     if (status == 0) {
+        gEnv->setStatsFile("C2Encoder.csv");
+        status = gEnv->writeStatsHeader();
+        ALOGV("Stats file = %d\n", status);
         status = RUN_ALL_TESTS();
         ALOGV("C2 Encoder Test result = %d\n", status);
     }
diff --git a/media/tests/benchmark/tests/DecoderTest.cpp b/media/tests/benchmark/tests/DecoderTest.cpp
index 9f96d3b..81ef02a 100644
--- a/media/tests/benchmark/tests/DecoderTest.cpp
+++ b/media/tests/benchmark/tests/DecoderTest.cpp
@@ -84,7 +84,8 @@
         decoder->deInitCodec();
         ALOGV("codec : %s", codecName.c_str());
         string inputReference = get<0>(params);
-        decoder->dumpStatistics(inputReference);
+        decoder->dumpStatistics(inputReference, codecName, (asyncMode ? "async" : "sync"),
+                                gEnv->getStatsFile());
         free(inputBuffer);
         decoder->resetDecoder();
     }
@@ -179,8 +180,11 @@
     ::testing::InitGoogleTest(&argc, argv);
     int status = gEnv->initFromOptions(argc, argv);
     if (status == 0) {
+        gEnv->setStatsFile("Decoder.csv");
+        status = gEnv->writeStatsHeader();
+        ALOGV("Stats file = %d\n", status);
         status = RUN_ALL_TESTS();
-        ALOGD("Decoder Test result = %d\n", status);
+        ALOGV("Decoder Test result = %d\n", status);
     }
     return status;
 }
\ No newline at end of file
diff --git a/media/tests/benchmark/tests/EncoderTest.cpp b/media/tests/benchmark/tests/EncoderTest.cpp
index dc2a2dd..7e1681d 100644
--- a/media/tests/benchmark/tests/EncoderTest.cpp
+++ b/media/tests/benchmark/tests/EncoderTest.cpp
@@ -23,6 +23,10 @@
 #include "Decoder.h"
 #include "Encoder.h"
 
+constexpr int32_t kEncodeDefaultVideoBitRate = 8000000 /* 8 Mbps */;
+constexpr int32_t kEncodeMinVideoBitRate = 600000 /* 600 Kbps */;
+constexpr int32_t kEncodeDefaultAudioBitRate = 128000 /* 128 Kbps */;
+
 static BenchmarkTestEnvironment *gEnv = nullptr;
 
 class EncoderTest : public ::testing::TestWithParam<tuple<string, string, bool>> {};
@@ -78,7 +82,7 @@
         }
 
         string decName = "";
-        string outputFileName = "decode.out";
+        string outputFileName = "/data/local/tmp/decode.out";
         FILE *outFp = fopen(outputFileName.c_str(), "wb");
         ASSERT_NE(outFp, nullptr) << "Unable to open output file" << outputFileName
                                   << " for dumping decoder's output";
@@ -86,6 +90,7 @@
         decoder->setupDecoder();
         status = decoder->decode(inputBuffer, frameInfo, decName, false /*asyncMode */, outFp);
         ASSERT_EQ(status, AMEDIA_OK) << "Decode returned error : " << status;
+        AMediaFormat *decoderFormat = decoder->getFormat();
 
         ifstream eleStream;
         eleStream.open(outputFileName.c_str(), ifstream::binary | ifstream::ate);
@@ -108,11 +113,13 @@
             if (encParams.bitrate <= 0 || encParams.frameRate <= 0) {
                 encParams.frameRate = 25;
                 if (!strcmp(mime, "video/3gpp") || !strcmp(mime, "video/mp4v-es")) {
-                    encParams.bitrate = 600000 /* 600 Kbps */;
+                    encParams.bitrate = kEncodeMinVideoBitRate;
                 } else {
-                    encParams.bitrate = 8000000 /* 8 Mbps */;
+                    encParams.bitrate = kEncodeDefaultVideoBitRate;
                 }
             }
+            AMediaFormat_getInt32(decoderFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT,
+                                  &encParams.colorFormat);
             AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_PROFILE, &encParams.profile);
             AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_LEVEL, &encParams.level);
         } else {
@@ -120,8 +127,7 @@
                                               &encParams.sampleRate));
             ASSERT_TRUE(AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT,
                                               &encParams.numChannels));
-            encParams.bitrate =
-                    encParams.sampleRate * encParams.numChannels * 16 /* bitsPerSample */;
+            encParams.bitrate = kEncodeDefaultAudioBitRate;
         }
 
         encoder->setupEncoder();
@@ -133,7 +139,8 @@
         encoder->deInitCodec();
         ALOGV("codec : %s", codecName.c_str());
         string inputReference = get<0>(params);
-        encoder->dumpStatistics(inputReference, extractor->getClipDuration());
+        encoder->dumpStatistics(inputReference, extractor->getClipDuration(), codecName,
+                                (asyncMode ? "async" : "sync"), gEnv->getStatsFile());
         eleStream.close();
         if (outFp) fclose(outFp);
 
@@ -141,6 +148,10 @@
             AMediaFormat_delete(format);
             format = nullptr;
         }
+        if (decoderFormat) {
+            AMediaFormat_delete(decoderFormat);
+            decoderFormat = nullptr;
+        }
         encoder->resetEncoder();
         decoder->deInitCodec();
         free(inputBuffer);
@@ -214,8 +225,11 @@
     ::testing::InitGoogleTest(&argc, argv);
     int status = gEnv->initFromOptions(argc, argv);
     if (status == 0) {
+        gEnv->setStatsFile("Encoder.csv");
+        status = gEnv->writeStatsHeader();
+        ALOGV("Stats file = %d\n", status);
         status = RUN_ALL_TESTS();
-        ALOGD("Encoder Test result = %d\n", status);
+        ALOGV("Encoder Test result = %d\n", status);
     }
     return status;
 }
diff --git a/media/tests/benchmark/tests/ExtractorTest.cpp b/media/tests/benchmark/tests/ExtractorTest.cpp
index ad8f1e6..d14d15b 100644
--- a/media/tests/benchmark/tests/ExtractorTest.cpp
+++ b/media/tests/benchmark/tests/ExtractorTest.cpp
@@ -48,8 +48,7 @@
     ASSERT_EQ(status, AMEDIA_OK) << "Extraction failed \n";
 
     extractObj->deInitExtractor();
-
-    extractObj->dumpStatistics(GetParam().first);
+    extractObj->dumpStatistics(GetParam().first, "", gEnv->getStatsFile());
 
     fclose(inputFp);
     delete extractObj;
@@ -79,8 +78,11 @@
     ::testing::InitGoogleTest(&argc, argv);
     int status = gEnv->initFromOptions(argc, argv);
     if (status == 0) {
+        gEnv->setStatsFile("Extractor.csv");
+        status = gEnv->writeStatsHeader();
+        ALOGV("Stats file = %d\n", status);
         status = RUN_ALL_TESTS();
-        ALOGD(" Extractor Test result = %d\n", status);
+        ALOGV("Extractor Test result = %d\n", status);
     }
     return status;
 }
diff --git a/media/tests/benchmark/tests/MuxerTest.cpp b/media/tests/benchmark/tests/MuxerTest.cpp
index fa2635d..991644b 100644
--- a/media/tests/benchmark/tests/MuxerTest.cpp
+++ b/media/tests/benchmark/tests/MuxerTest.cpp
@@ -113,7 +113,7 @@
         ASSERT_EQ(status, 0) << "Mux failed";
 
         muxerObj->deInitMuxer();
-        muxerObj->dumpStatistics(GetParam().first + "." + fmt.c_str());
+        muxerObj->dumpStatistics(GetParam().first + "." + fmt.c_str(), fmt, gEnv->getStatsFile());
         free(inputBuffer);
         fclose(outputFp);
         muxerObj->resetMuxer();
@@ -151,8 +151,11 @@
     ::testing::InitGoogleTest(&argc, argv);
     int status = gEnv->initFromOptions(argc, argv);
     if (status == 0) {
+        gEnv->setStatsFile("Muxer.csv");
+        status = gEnv->writeStatsHeader();
+        ALOGV("Stats file = %d\n", status);
         status = RUN_ALL_TESTS();
-        ALOGV("Test result = %d\n", status);
+        ALOGV("Muxer Test result = %d\n", status);
     }
     return status;
 }
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index e3f1e44..bfe73d5 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -12,6 +12,15 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library {
     name: "libmediautils",
 
@@ -28,17 +37,24 @@
     ],
     static_libs: [
         "libc_malloc_debug_backtrace",
+        "libbatterystats_aidl",
+        "libprocessinfoservice_aidl",
     ],
     shared_libs: [
+        "libaudioclient_aidl_conversion",
         "libaudioutils", // for clock.h
         "libbinder",
         "libcutils",
         "liblog",
         "libutils",
         "libhidlbase",
+        "libpermission",
         "android.hardware.graphics.bufferqueue@1.0",
         "android.hidl.token@1.0-utils",
     ],
+    export_static_lib_headers: [
+        "libbatterystats_aidl",
+    ],
 
     logtags: ["EventLogTags.logtags"],
 
@@ -53,6 +69,10 @@
         "libmedia_headers",
     ],
 
+    export_shared_lib_headers: [
+        "libpermission",
+    ],
+
     include_dirs: [
         // For DEBUGGER_SIGNAL
         "system/core/debuggerd/include",
@@ -60,3 +80,10 @@
     local_include_dirs: ["include"],
     export_include_dirs: ["include"],
 }
+
+cc_library_headers {
+    name: "libmediautils_headers",
+    vendor_available: true,  // required for platform/hardware/interfaces
+
+    export_include_dirs: ["include"],
+}
diff --git a/media/utils/EventLogTags.logtags b/media/utils/EventLogTags.logtags
index 67f0ea8..c397f34 100644
--- a/media/utils/EventLogTags.logtags
+++ b/media/utils/EventLogTags.logtags
@@ -31,7 +31,7 @@
 # 6: Percent
 # Default value for data of type int/long is 2 (bytes).
 #
-# See system/core/logcat/event.logtags for the master copy of the tags.
+# See system/core/logcat/event.logtags for the original definition of the tags.
 
 # 61000 - 61199 reserved for audioserver
 
diff --git a/media/utils/ProcessInfo.cpp b/media/utils/ProcessInfo.cpp
index 113e4a7..e212794 100644
--- a/media/utils/ProcessInfo.cpp
+++ b/media/utils/ProcessInfo.cpp
@@ -21,12 +21,15 @@
 #include <media/stagefright/ProcessInfo.h>
 
 #include <binder/IPCThreadState.h>
-#include <binder/IProcessInfoService.h>
 #include <binder/IServiceManager.h>
 #include <private/android_filesystem_config.h>
+#include <processinfo/IProcessInfoService.h>
 
 namespace android {
 
+static constexpr int32_t INVALID_ADJ = -10000;
+static constexpr int32_t NATIVE_ADJ = -1000;
+
 ProcessInfo::ProcessInfo() {}
 
 bool ProcessInfo::getPriority(int pid, int* priority) {
@@ -35,8 +38,6 @@
 
     size_t length = 1;
     int32_t state;
-    static const int32_t INVALID_ADJ = -10000;
-    static const int32_t NATIVE_ADJ = -1000;
     int32_t score = INVALID_ADJ;
     status_t err = service->getProcessStatesAndOomScoresFromPids(length, &pid, &state, &score);
     if (err != OK) {
@@ -45,8 +46,17 @@
     }
     ALOGV("pid %d state %d score %d", pid, state, score);
     if (score <= NATIVE_ADJ) {
-        ALOGE("pid %d invalid OOM adjustments value %d", pid, score);
-        return false;
+        std::scoped_lock lock{mOverrideLock};
+
+        // If this process if not tracked by ActivityManagerService, look for overrides.
+        auto it = mOverrideMap.find(pid);
+        if (it != mOverrideMap.end()) {
+            ALOGI("pid %d invalid OOM score %d, override to %d", pid, score, it->second.oomScore);
+            score = it->second.oomScore;
+        } else {
+            ALOGE("pid %d invalid OOM score %d", pid, score);
+            return false;
+        }
     }
 
     // Use OOM adjustments value as the priority. Lower the value, higher the priority.
@@ -61,6 +71,26 @@
     return (callingPid == getpid()) || (callingPid == pid) || (callingUid == AID_MEDIA);
 }
 
+bool ProcessInfo::overrideProcessInfo(int pid, int procState, int oomScore) {
+    std::scoped_lock lock{mOverrideLock};
+
+    mOverrideMap.erase(pid);
+
+    // Disable the override if oomScore is set to NATIVE_ADJ or below.
+    if (oomScore <= NATIVE_ADJ) {
+        return false;
+    }
+
+    mOverrideMap.emplace(pid, ProcessInfoOverride{procState, oomScore});
+    return true;
+}
+
+void ProcessInfo::removeProcessInfoOverride(int pid) {
+    std::scoped_lock lock{mOverrideLock};
+
+    mOverrideMap.erase(pid);
+}
+
 ProcessInfo::~ProcessInfo() {}
 
 }  // namespace android
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 87ea084..9c7b863 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -22,6 +22,10 @@
 #include <binder/IServiceManager.h>
 #include <binder/PermissionCache.h>
 #include "mediautils/ServiceUtilities.h"
+#include <system/audio-hal-enums.h>
+#include <media/AidlConversion.h>
+#include <media/AidlConversionUtil.h>
+#include <android/content/AttributionSourceState.h>
 
 #include <iterator>
 #include <algorithm>
@@ -36,14 +40,16 @@
 
 namespace android {
 
+using content::AttributionSourceState;
+
 static const String16 sAndroidPermissionRecordAudio("android.permission.RECORD_AUDIO");
 static const String16 sModifyPhoneState("android.permission.MODIFY_PHONE_STATE");
 static const String16 sModifyAudioRouting("android.permission.MODIFY_AUDIO_ROUTING");
 
 static String16 resolveCallingPackage(PermissionController& permissionController,
-        const String16& opPackageName, uid_t uid) {
-    if (opPackageName.size() > 0) {
-        return opPackageName;
+        const std::optional<String16> opPackageName, uid_t uid) {
+    if (opPackageName.has_value() && opPackageName.value().size() > 0) {
+        return opPackageName.value();
     }
     // In some cases the calling code has no access to the package it runs under.
     // For example, code using the wilhelm framework's OpenSL-ES APIs. In this
@@ -56,80 +62,120 @@
     permissionController.getPackagesForUid(uid, packages);
     if (packages.isEmpty()) {
         ALOGE("No packages for uid %d", uid);
-        return opPackageName; // empty string
+        return String16();
     }
     return packages[0];
 }
 
-static bool checkRecordingInternal(const String16& opPackageName, pid_t pid,
-        uid_t uid, bool start) {
+int32_t getOpForSource(audio_source_t source) {
+  switch (source) {
+    case AUDIO_SOURCE_HOTWORD:
+      return AppOpsManager::OP_RECORD_AUDIO_HOTWORD;
+    case AUDIO_SOURCE_REMOTE_SUBMIX:
+      return AppOpsManager::OP_RECORD_AUDIO_OUTPUT;
+    case AUDIO_SOURCE_VOICE_DOWNLINK:
+      return AppOpsManager::OP_RECORD_INCOMING_PHONE_AUDIO;
+    case AUDIO_SOURCE_DEFAULT:
+    default:
+      return AppOpsManager::OP_RECORD_AUDIO;
+  }
+}
+
+std::optional<AttributionSourceState> resolveAttributionSource(
+        const AttributionSourceState& callerAttributionSource) {
+    AttributionSourceState nextAttributionSource = callerAttributionSource;
+
+    if (!nextAttributionSource.packageName.has_value()) {
+        nextAttributionSource = AttributionSourceState(nextAttributionSource);
+        PermissionController permissionController;
+        const uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(nextAttributionSource.uid));
+        nextAttributionSource.packageName = VALUE_OR_FATAL(legacy2aidl_String16_string(
+                resolveCallingPackage(permissionController, VALUE_OR_FATAL(
+                aidl2legacy_string_view_String16(nextAttributionSource.packageName.value_or(""))),
+                uid)));
+        if (!nextAttributionSource.packageName.has_value()) {
+            return std::nullopt;
+        }
+    }
+
+    AttributionSourceState myAttributionSource;
+    myAttributionSource.uid = VALUE_OR_FATAL(android::legacy2aidl_uid_t_int32_t(getuid()));
+    myAttributionSource.pid = VALUE_OR_FATAL(android::legacy2aidl_pid_t_int32_t(getpid()));
+    myAttributionSource.token = sp<BBinder>::make();
+    myAttributionSource.next.push_back(nextAttributionSource);
+
+    return std::optional<AttributionSourceState>{myAttributionSource};
+}
+
+static bool checkRecordingInternal(const AttributionSourceState& attributionSource,
+        const String16& msg, bool start, audio_source_t source) {
     // Okay to not track in app ops as audio server or media server is us and if
     // device is rooted security model is considered compromised.
     // system_server loses its RECORD_AUDIO permission when a secondary
     // user is active, but it is a core system service so let it through.
     // TODO(b/141210120): UserManager.DISALLOW_RECORD_AUDIO should not affect system user 0
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
     if (isAudioServerOrMediaServerOrSystemServerOrRootUid(uid)) return true;
 
     // We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
-    // may open a record track on behalf of a client.  Note that pid may be a tid.
+    // may open a record track on behalf of a client. Note that pid may be a tid.
     // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
-    PermissionController permissionController;
-    const bool ok = permissionController.checkPermission(sAndroidPermissionRecordAudio, pid, uid);
-    if (!ok) {
-        ALOGE("Request requires %s", String8(sAndroidPermissionRecordAudio).c_str());
+    const std::optional<AttributionSourceState> resolvedAttributionSource =
+            resolveAttributionSource(attributionSource);
+    if (!resolvedAttributionSource.has_value()) {
         return false;
     }
 
-    String16 resolvedOpPackageName = resolveCallingPackage(
-            permissionController, opPackageName, uid);
-    if (resolvedOpPackageName.size() == 0) {
-        return false;
-    }
+    const int32_t attributedOpCode = getOpForSource(source);
 
-    AppOpsManager appOps;
-    const int32_t op = appOps.permissionToOpCode(sAndroidPermissionRecordAudio);
+    permission::PermissionChecker permissionChecker;
+    bool permitted = false;
     if (start) {
-        if (appOps.startOpNoThrow(op, uid, resolvedOpPackageName, /*startIfModeDefault*/ false)
-                != AppOpsManager::MODE_ALLOWED) {
-            ALOGE("Request denied by app op: %d", op);
-            return false;
-        }
+        permitted = (permissionChecker.checkPermissionForStartDataDeliveryFromDatasource(
+                sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
+                attributedOpCode) != permission::PermissionChecker::PERMISSION_HARD_DENIED);
     } else {
-        if (appOps.checkOp(op, uid, resolvedOpPackageName) != AppOpsManager::MODE_ALLOWED) {
-            ALOGE("Request denied by app op: %d", op);
-            return false;
-        }
+        permitted = (permissionChecker.checkPermissionForPreflightFromDatasource(
+                sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
+                attributedOpCode) != permission::PermissionChecker::PERMISSION_HARD_DENIED);
     }
 
-    return true;
+    return permitted;
 }
 
-bool recordingAllowed(const String16& opPackageName, pid_t pid, uid_t uid) {
-    return checkRecordingInternal(opPackageName, pid, uid, /*start*/ false);
+bool recordingAllowed(const AttributionSourceState& attributionSource, audio_source_t source) {
+    return checkRecordingInternal(attributionSource, String16(), /*start*/ false, source);
 }
 
-bool startRecording(const String16& opPackageName, pid_t pid, uid_t uid) {
-     return checkRecordingInternal(opPackageName, pid, uid, /*start*/ true);
+bool startRecording(const AttributionSourceState& attributionSource, const String16& msg,
+        audio_source_t source) {
+    return checkRecordingInternal(attributionSource, msg, /*start*/ true, source);
 }
 
-void finishRecording(const String16& opPackageName, uid_t uid) {
+void finishRecording(const AttributionSourceState& attributionSource, audio_source_t source) {
     // Okay to not track in app ops as audio server is us and if
     // device is rooted security model is considered compromised.
-    if (isAudioServerOrRootUid(uid)) return;
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
+    if (isAudioServerOrMediaServerOrSystemServerOrRootUid(uid)) return;
 
-    PermissionController permissionController;
-    String16 resolvedOpPackageName = resolveCallingPackage(
-            permissionController, opPackageName, uid);
-    if (resolvedOpPackageName.size() == 0) {
+    // We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
+    // may open a record track on behalf of a client. Note that pid may be a tid.
+    // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
+    const std::optional<AttributionSourceState> resolvedAttributionSource =
+            resolveAttributionSource(attributionSource);
+    if (!resolvedAttributionSource.has_value()) {
         return;
     }
 
-    AppOpsManager appOps;
-    const int32_t op = appOps.permissionToOpCode(sAndroidPermissionRecordAudio);
-    appOps.finishOp(op, uid, resolvedOpPackageName);
+    const int32_t attributedOpCode = getOpForSource(source);
+    permission::PermissionChecker permissionChecker;
+    permissionChecker.finishDataDeliveryFromDatasource(attributedOpCode,
+            resolvedAttributionSource.value());
 }
 
-bool captureAudioOutputAllowed(pid_t pid, uid_t uid) {
+bool captureAudioOutputAllowed(const AttributionSourceState& attributionSource) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
     if (isAudioServerOrRootUid(uid)) return true;
     static const String16 sCaptureAudioOutput("android.permission.CAPTURE_AUDIO_OUTPUT");
     bool ok = PermissionCache::checkPermission(sCaptureAudioOutput, pid, uid);
@@ -137,7 +183,9 @@
     return ok;
 }
 
-bool captureMediaOutputAllowed(pid_t pid, uid_t uid) {
+bool captureMediaOutputAllowed(const AttributionSourceState& attributionSource) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
     if (isAudioServerOrRootUid(uid)) return true;
     static const String16 sCaptureMediaOutput("android.permission.CAPTURE_MEDIA_OUTPUT");
     bool ok = PermissionCache::checkPermission(sCaptureMediaOutput, pid, uid);
@@ -145,7 +193,19 @@
     return ok;
 }
 
-bool captureVoiceCommunicationOutputAllowed(pid_t pid, uid_t uid) {
+bool captureTunerAudioInputAllowed(const AttributionSourceState& attributionSource) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
+    if (isAudioServerOrRootUid(uid)) return true;
+    static const String16 sCaptureTunerAudioInput("android.permission.CAPTURE_TUNER_AUDIO_INPUT");
+    bool ok = PermissionCache::checkPermission(sCaptureTunerAudioInput, pid, uid);
+    if (!ok) ALOGV("Request requires android.permission.CAPTURE_TUNER_AUDIO_INPUT");
+    return ok;
+}
+
+bool captureVoiceCommunicationOutputAllowed(const AttributionSourceState& attributionSource) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
+    uid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
     if (isAudioServerOrRootUid(uid)) return true;
     static const String16 sCaptureVoiceCommOutput(
         "android.permission.CAPTURE_VOICE_COMMUNICATION_OUTPUT");
@@ -154,14 +214,18 @@
     return ok;
 }
 
-bool captureHotwordAllowed(const String16& opPackageName, pid_t pid, uid_t uid) {
+bool captureHotwordAllowed(const AttributionSourceState& attributionSource) {
     // CAPTURE_AUDIO_HOTWORD permission implies RECORD_AUDIO permission
-    bool ok = recordingAllowed(opPackageName, pid, uid);
+    bool ok = recordingAllowed(attributionSource);
 
     if (ok) {
         static const String16 sCaptureHotwordAllowed("android.permission.CAPTURE_AUDIO_HOTWORD");
-        // IMPORTANT: Use PermissionCache - not a runtime permission and may not change.
-        ok = PermissionCache::checkPermission(sCaptureHotwordAllowed, pid, uid);
+        // Use PermissionChecker, which includes some logic for allowing the isolated
+        // HotwordDetectionService to hold certain permissions.
+        permission::PermissionChecker permissionChecker;
+        ok = (permissionChecker.checkPermissionForPreflight(
+                sCaptureHotwordAllowed, attributionSource, String16(),
+                AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED);
     }
     if (!ok) ALOGV("android.permission.CAPTURE_AUDIO_HOTWORD");
     return ok;
@@ -178,11 +242,12 @@
 }
 
 bool modifyAudioRoutingAllowed() {
-    return modifyAudioRoutingAllowed(
-        IPCThreadState::self()->getCallingPid(), IPCThreadState::self()->getCallingUid());
+    return modifyAudioRoutingAllowed(getCallingAttributionSource());
 }
 
-bool modifyAudioRoutingAllowed(pid_t pid, uid_t uid) {
+bool modifyAudioRoutingAllowed(const AttributionSourceState& attributionSource) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
     if (isAudioServerUid(IPCThreadState::self()->getCallingUid())) return true;
     // IMPORTANT: Use PermissionCache - not a runtime permission and may not change.
     bool ok = PermissionCache::checkPermission(sModifyAudioRouting, pid, uid);
@@ -192,11 +257,12 @@
 }
 
 bool modifyDefaultAudioEffectsAllowed() {
-    return modifyDefaultAudioEffectsAllowed(
-        IPCThreadState::self()->getCallingPid(), IPCThreadState::self()->getCallingUid());
+    return modifyDefaultAudioEffectsAllowed(getCallingAttributionSource());
 }
 
-bool modifyDefaultAudioEffectsAllowed(pid_t pid, uid_t uid) {
+bool modifyDefaultAudioEffectsAllowed(const AttributionSourceState& attributionSource) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
     if (isAudioServerUid(IPCThreadState::self()->getCallingUid())) return true;
 
     static const String16 sModifyDefaultAudioEffectsAllowed(
@@ -217,14 +283,18 @@
     return ok;
 }
 
-bool modifyPhoneStateAllowed(pid_t pid, uid_t uid) {
+bool modifyPhoneStateAllowed(const AttributionSourceState& attributionSource) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
     bool ok = PermissionCache::checkPermission(sModifyPhoneState, pid, uid);
     ALOGE_IF(!ok, "Request requires %s", String8(sModifyPhoneState).c_str());
     return ok;
 }
 
 // privileged behavior needed by Dialer, Settings, SetupWizard and CellBroadcastReceiver
-bool bypassInterruptionPolicyAllowed(pid_t pid, uid_t uid) {
+bool bypassInterruptionPolicyAllowed(const AttributionSourceState& attributionSource) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
     static const String16 sWriteSecureSettings("android.permission.WRITE_SECURE_SETTINGS");
     bool ok = PermissionCache::checkPermission(sModifyPhoneState, pid, uid)
         || PermissionCache::checkPermission(sWriteSecureSettings, pid, uid)
@@ -234,6 +304,20 @@
     return ok;
 }
 
+AttributionSourceState getCallingAttributionSource() {
+    AttributionSourceState attributionSource = AttributionSourceState();
+    attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(
+            IPCThreadState::self()->getCallingPid()));
+    attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(
+            IPCThreadState::self()->getCallingUid()));
+    attributionSource.token = sp<BBinder>::make();
+  return attributionSource;
+}
+
+void purgePermissionCache() {
+    PermissionCache::purgeCache();
+}
+
 status_t checkIMemory(const sp<IMemory>& iMemory)
 {
     if (iMemory == 0) {
@@ -259,7 +343,7 @@
     return NO_ERROR;
 }
 
-sp<content::pm::IPackageManagerNative> MediaPackageManager::retreivePackageManager() {
+sp<content::pm::IPackageManagerNative> MediaPackageManager::retrievePackageManager() {
     const sp<IServiceManager> sm = defaultServiceManager();
     if (sm == nullptr) {
         ALOGW("%s: failed to retrieve defaultServiceManager", __func__);
@@ -276,27 +360,27 @@
 std::optional<bool> MediaPackageManager::doIsAllowed(uid_t uid) {
     if (mPackageManager == nullptr) {
         /** Can not fetch package manager at construction it may not yet be registered. */
-        mPackageManager = retreivePackageManager();
+        mPackageManager = retrievePackageManager();
         if (mPackageManager == nullptr) {
             ALOGW("%s: Playback capture is denied as package manager is not reachable", __func__);
             return std::nullopt;
         }
     }
 
+    // Retrieve package names for the UID and transform to a std::vector<std::string>.
+    Vector<String16> str16PackageNames;
+    PermissionController{}.getPackagesForUid(uid, str16PackageNames);
     std::vector<std::string> packageNames;
-    auto status = mPackageManager->getNamesForUids({(int32_t)uid}, &packageNames);
-    if (!status.isOk()) {
-        ALOGW("%s: Playback capture is denied for uid %u as the package names could not be "
-              "retrieved from the package manager: %s", __func__, uid, status.toString8().c_str());
-        return std::nullopt;
+    for (const auto& str16PackageName : str16PackageNames) {
+        packageNames.emplace_back(String8(str16PackageName).string());
     }
     if (packageNames.empty()) {
         ALOGW("%s: Playback capture for uid %u is denied as no package name could be retrieved "
-              "from the package manager: %s", __func__, uid, status.toString8().c_str());
+              "from the package manager.", __func__, uid);
         return std::nullopt;
     }
     std::vector<bool> isAllowed;
-    status = mPackageManager->isAudioPlaybackCaptureAllowed(packageNames, &isAllowed);
+    auto status = mPackageManager->isAudioPlaybackCaptureAllowed(packageNames, &isAllowed);
     if (!status.isOk()) {
         ALOGW("%s: Playback capture is denied for uid %u as the manifest property could not be "
               "retrieved from the package manager: %s", __func__, uid, status.toString8().c_str());
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index 59d74de..819e146 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -39,10 +39,9 @@
     static std::atomic<int> curAudioHalPids = 0;
 
     if (update) {
-        audioHalPids[(curAudioHalPids + 1) % kNumAudioHalPidsVectors] = *pids;
-        curAudioHalPids++;
+        audioHalPids[(curAudioHalPids++ + 1) % kNumAudioHalPidsVectors] = *pids;
     } else {
-        *pids = audioHalPids[curAudioHalPids];
+        *pids = audioHalPids[curAudioHalPids % kNumAudioHalPidsVectors];
     }
 }
 
diff --git a/media/utils/fuzzers/Android.bp b/media/utils/fuzzers/Android.bp
new file mode 100644
index 0000000..c1698dc
--- /dev/null
+++ b/media/utils/fuzzers/Android.bp
@@ -0,0 +1,63 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+    name: "libmediautils_fuzzer_defaults",
+    shared_libs: [
+        "libbatterystats_aidl",
+        "libbinder",
+        "libcutils",
+        "liblog",
+        "libmediautils",
+        "libutils",
+        "libbinder",
+        "framework-permission-aidl-cpp",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+        "-Wno-c++2a-extensions",
+    ],
+
+    header_libs: [
+        "bionic_libc_platform_headers",
+        "libmedia_headers",
+    ],
+
+    include_dirs: [
+        // For DEBUGGER_SIGNAL
+        "system/core/debuggerd/include",
+    ],
+}
+
+cc_fuzz {
+    name: "libmediautils_fuzzer_battery_notifier",
+    defaults: ["libmediautils_fuzzer_defaults"],
+    srcs: ["BatteryNotifierFuzz.cpp"],
+}
+
+cc_fuzz {
+    name: "libmediautils_fuzzer_scheduling_policy_service",
+    defaults: ["libmediautils_fuzzer_defaults"],
+    srcs: ["SchedulingPolicyServiceFuzz.cpp"],
+}
+
+cc_fuzz {
+    name: "libmediautils_fuzzer_service_utilities",
+    defaults: ["libmediautils_fuzzer_defaults"],
+    srcs: ["ServiceUtilitiesFuzz.cpp"],
+}
+
+cc_fuzz {
+    name: "libmediautils_fuzzer_time_check",
+    defaults: ["libmediautils_fuzzer_defaults"],
+    srcs: ["TimeCheckFuzz.cpp"],
+}
diff --git a/media/utils/fuzzers/BatteryNotifierFuzz.cpp b/media/utils/fuzzers/BatteryNotifierFuzz.cpp
new file mode 100644
index 0000000..00b3cce
--- /dev/null
+++ b/media/utils/fuzzers/BatteryNotifierFuzz.cpp
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <functional>
+#include <string>
+#include <vector>
+
+#include <utils/String8.h>
+
+#include "fuzzer/FuzzedDataProvider.h"
+#include "mediautils/BatteryNotifier.h"
+
+static constexpr int kMaxOperations = 30;
+static constexpr int kMaxStringLength = 500;
+using android::BatteryNotifier;
+
+std::vector<std::function<void(std::string /*flashlight_name*/, std::string /*camera_name*/,
+                               uid_t /*video_id*/, uid_t /*audio_id*/, uid_t /*light_id*/,
+                               uid_t /*camera_id*/)>>
+    operations = {
+        [](std::string, std::string, uid_t, uid_t, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteResetVideo();
+        },
+        [](std::string, std::string, uid_t, uid_t, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteResetAudio();
+        },
+        [](std::string, std::string, uid_t, uid_t, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteResetFlashlight();
+        },
+        [](std::string, std::string, uid_t, uid_t, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteResetCamera();
+        },
+        [](std::string, std::string, uid_t video_id, uid_t, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteStartVideo(video_id);
+        },
+        [](std::string, std::string, uid_t video_id, uid_t, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteStopVideo(video_id);
+        },
+        [](std::string, std::string, uid_t, uid_t audio_id, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteStartAudio(audio_id);
+        },
+        [](std::string, std::string, uid_t, uid_t audio_id, uid_t, uid_t) -> void {
+            BatteryNotifier::getInstance().noteStopAudio(audio_id);
+        },
+        [](std::string flashlight_name, std::string, uid_t, uid_t, uid_t light_id, uid_t) -> void {
+            android::String8 name(flashlight_name.c_str());
+            BatteryNotifier::getInstance().noteFlashlightOn(name, light_id);
+        },
+        [](std::string flashlight_name, std::string, uid_t, uid_t, uid_t light_id, uid_t) -> void {
+            android::String8 name(flashlight_name.c_str());
+            BatteryNotifier::getInstance().noteFlashlightOff(name, light_id);
+        },
+        [](std::string, std::string camera_name, uid_t, uid_t, uid_t, uid_t camera_id) -> void {
+            android::String8 name(camera_name.c_str());
+            BatteryNotifier::getInstance().noteStartCamera(name, camera_id);
+        },
+        [](std::string, std::string camera_name, uid_t, uid_t, uid_t, uid_t camera_id) -> void {
+            android::String8 name(camera_name.c_str());
+            BatteryNotifier::getInstance().noteStopCamera(name, camera_id);
+        },
+};
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider data_provider(data, size);
+    std::string camera_name = data_provider.ConsumeRandomLengthString(kMaxStringLength);
+    std::string flashlight_name = data_provider.ConsumeRandomLengthString(kMaxStringLength);
+    uid_t video_id = data_provider.ConsumeIntegral<uid_t>();
+    uid_t audio_id = data_provider.ConsumeIntegral<uid_t>();
+    uid_t light_id = data_provider.ConsumeIntegral<uid_t>();
+    uid_t camera_id = data_provider.ConsumeIntegral<uid_t>();
+    size_t ops_run = 0;
+    while (data_provider.remaining_bytes() > 0 && ops_run++ < kMaxOperations) {
+        uint8_t op = data_provider.ConsumeIntegralInRange<uint8_t>(0, operations.size() - 1);
+        operations[op](flashlight_name, camera_name, video_id, audio_id, light_id, camera_id);
+    }
+    return 0;
+}
diff --git a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
new file mode 100644
index 0000000..130feee
--- /dev/null
+++ b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_TAG "BatteryNotifierFuzzer"
+#include <batterystats/IBatteryStats.h>
+#include <binder/IServiceManager.h>
+#include <utils/String16.h>
+#include <android/log.h>
+#include <mediautils/SchedulingPolicyService.h>
+#include "fuzzer/FuzzedDataProvider.h"
+using android::IBatteryStats;
+using android::IBinder;
+using android::IInterface;
+using android::IServiceManager;
+using android::sp;
+using android::String16;
+using android::defaultServiceManager;
+using android::requestCpusetBoost;
+using android::requestPriority;
+sp<IBatteryStats> getBatteryService() {
+    sp<IBatteryStats> batteryStatService;
+    const sp<IServiceManager> sm(defaultServiceManager());
+    if (sm != nullptr) {
+        const String16 name("batterystats");
+        batteryStatService = checked_interface_cast<IBatteryStats>(sm->checkService(name));
+        if (batteryStatService == nullptr) {
+            ALOGW("batterystats service unavailable!");
+            return nullptr;
+        }
+    }
+    return batteryStatService;
+}
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider data_provider(data, size);
+    sp<IBatteryStats> batteryStatService = getBatteryService();
+    // There is some state here, but it's mostly focused around thread-safety, so
+    // we won't worry about order.
+    int32_t priority = data_provider.ConsumeIntegral<int32_t>();
+    bool is_for_app = data_provider.ConsumeBool();
+    bool async = data_provider.ConsumeBool();
+    requestPriority(getpid(), gettid(), priority, is_for_app, async);
+    // TODO: Verify and re-enable in AOSP (R).
+    // bool enable = data_provider.ConsumeBool();
+    // We are just using batterystats to avoid the need
+    // to register a new service.
+    // requestCpusetBoost(enable, IInterface::asBinder(batteryStatService));
+    return 0;
+}
+
diff --git a/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
new file mode 100644
index 0000000..6e52512
--- /dev/null
+++ b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fcntl.h>
+
+#include <functional>
+#include <type_traits>
+
+#include <android/content/AttributionSourceState.h>
+#include "fuzzer/FuzzedDataProvider.h"
+#include "mediautils/ServiceUtilities.h"
+
+static constexpr int kMaxOperations = 50;
+static constexpr int kMaxStringLen = 256;
+
+using android::content::AttributionSourceState;
+
+const std::vector<std::function<void(FuzzedDataProvider*, android::MediaPackageManager)>>
+    operations = {
+        [](FuzzedDataProvider* data_provider, android::MediaPackageManager pm) -> void {
+            uid_t uid = data_provider->ConsumeIntegral<uid_t>();
+            pm.allowPlaybackCapture(uid);
+        },
+        [](FuzzedDataProvider* data_provider, android::MediaPackageManager pm) -> void {
+            int spaces = data_provider->ConsumeIntegral<int>();
+
+            // Dump everything into /dev/null
+            int fd = open("/dev/null", O_WRONLY);
+            pm.dump(fd, spaces);
+            close(fd);
+        },
+};
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider data_provider(data, size);
+    int32_t uid = data_provider.ConsumeIntegral<int32_t>();
+    int32_t pid = data_provider.ConsumeIntegral<int32_t>();
+    audio_source_t source = static_cast<audio_source_t>(data_provider
+        .ConsumeIntegral<std::underlying_type_t<audio_source_t>>());
+
+    std::string packageNameStr = data_provider.ConsumeRandomLengthString(kMaxStringLen);
+    std::string msgStr = data_provider.ConsumeRandomLengthString(kMaxStringLen);
+    android::String16 msgStr16(packageNameStr.c_str());
+    AttributionSourceState attributionSource;
+    attributionSource.packageName = packageNameStr;
+    attributionSource.uid = uid;
+    attributionSource.pid = pid;
+    attributionSource.token = android::sp<android::BBinder>::make();
+
+    // There is not state here, and order is not significant,
+    // so we can simply call all of the target functions
+    android::isServiceUid(uid);
+    android::isAudioServerUid(uid);
+    android::isAudioServerOrSystemServerUid(uid);
+    android::isAudioServerOrMediaServerUid(uid);
+    android::recordingAllowed(attributionSource);
+    android::startRecording(attributionSource, msgStr16, source);
+    android::finishRecording(attributionSource, source);
+    android::captureAudioOutputAllowed(attributionSource);
+    android::captureMediaOutputAllowed(attributionSource);
+    android::captureHotwordAllowed(attributionSource);
+    android::modifyPhoneStateAllowed(attributionSource);
+    android::bypassInterruptionPolicyAllowed(attributionSource);
+    android::settingsAllowed();
+    android::modifyAudioRoutingAllowed();
+    android::modifyDefaultAudioEffectsAllowed();
+    android::dumpAllowed();
+
+    // MediaPackageManager does have state, so we need the fuzzer to decide order
+    android::MediaPackageManager packageManager;
+    size_t ops_run = 0;
+    while (data_provider.remaining_bytes() > 0 && ops_run++ < kMaxOperations) {
+        uint8_t op = data_provider.ConsumeIntegralInRange<uint8_t>(0, operations.size() - 1);
+        operations[op](&data_provider, packageManager);
+    }
+
+    return 0;
+}
diff --git a/media/utils/fuzzers/TimeCheckFuzz.cpp b/media/utils/fuzzers/TimeCheckFuzz.cpp
new file mode 100644
index 0000000..eeb6ba6
--- /dev/null
+++ b/media/utils/fuzzers/TimeCheckFuzz.cpp
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <chrono>
+#include <thread>
+
+#include "fuzzer/FuzzedDataProvider.h"
+#include "mediautils/TimeCheck.h"
+
+static constexpr int kMaxStringLen = 256;
+
+// While it might be interesting to test long-running
+// jobs, it seems unlikely it'd lead to the types of crashes
+// we're looking for, and would mean a significant increase in fuzzer time.
+// Therefore, we are setting a low cap.
+static constexpr uint32_t kMaxTimeoutMs = 1000;
+static constexpr uint32_t kMinTimeoutMs = 200;
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider data_provider(data, size);
+
+    // There's essentially 5 operations that we can access in this class
+    // 1. The time it takes to run this operation. As mentioned above,
+    //    long-running tasks are not good for fuzzing, but there will be
+    //    some change in the run time.
+    uint32_t timeoutMs =
+        data_provider.ConsumeIntegralInRange<uint32_t>(kMinTimeoutMs, kMaxTimeoutMs);
+    uint8_t pid_size = data_provider.ConsumeIntegral<uint8_t>();
+    std::vector<pid_t> pids(pid_size);
+    for (auto& pid : pids) {
+        pid = data_provider.ConsumeIntegral<pid_t>();
+    }
+
+    // 2. We also have setAudioHalPids, which is populated with the pids set
+    // above.
+    android::TimeCheck::setAudioHalPids(pids);
+    std::string name = data_provider.ConsumeRandomLengthString(kMaxStringLen);
+
+    // 3. The constructor, which is fuzzed here:
+    android::TimeCheck timeCheck(name.c_str(), timeoutMs);
+    // We will leave some buffer to avoid sleeping too long
+    uint8_t sleep_amount_ms = data_provider.ConsumeIntegralInRange<uint8_t>(0, timeoutMs / 2);
+
+    // We want to make sure we can cover the time out functionality.
+    if (sleep_amount_ms) {
+        auto ms = std::chrono::milliseconds(sleep_amount_ms);
+        std::this_thread::sleep_for(ms);
+    }
+
+    // 4. Finally, the destructor on timecheck. These seem to be the only factors
+    // in play.
+    return 0;
+}
diff --git a/media/utils/include/mediautils/BatteryNotifier.h b/media/utils/include/mediautils/BatteryNotifier.h
index a4e42ad..3812d7a 100644
--- a/media/utils/include/mediautils/BatteryNotifier.h
+++ b/media/utils/include/mediautils/BatteryNotifier.h
@@ -17,7 +17,7 @@
 #ifndef MEDIA_BATTERY_NOTIFIER_H
 #define MEDIA_BATTERY_NOTIFIER_H
 
-#include <binder/IBatteryStats.h>
+#include <batterystats/IBatteryStats.h>
 #include <utils/Singleton.h>
 #include <utils/String8.h>
 
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index 212599a..734313c 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -24,6 +24,10 @@
 #include <binder/PermissionController.h>
 #include <cutils/multiuser.h>
 #include <private/android_filesystem_config.h>
+#include <system/audio-hal-enums.h>
+#include <android/content/AttributionSourceState.h>
+#include <binder/PermissionController.h>
+#include <android/permission/PermissionChecker.h>
 
 #include <map>
 #include <optional>
@@ -33,6 +37,8 @@
 
 namespace android {
 
+using content::AttributionSourceState;
+
 // Audio permission utilities
 
 // Used for calls that should originate from system services.
@@ -78,21 +84,30 @@
     }
 }
 
-bool recordingAllowed(const String16& opPackageName, pid_t pid, uid_t uid);
-bool startRecording(const String16& opPackageName, pid_t pid, uid_t uid);
-void finishRecording(const String16& opPackageName, uid_t uid);
-bool captureAudioOutputAllowed(pid_t pid, uid_t uid);
-bool captureMediaOutputAllowed(pid_t pid, uid_t uid);
-bool captureVoiceCommunicationOutputAllowed(pid_t pid, uid_t uid);
-bool captureHotwordAllowed(const String16& opPackageName, pid_t pid, uid_t uid);
+bool recordingAllowed(const AttributionSourceState& attributionSource,
+        audio_source_t source = AUDIO_SOURCE_DEFAULT);
+bool startRecording(const AttributionSourceState& attributionSource,
+    const String16& msg, audio_source_t source);
+void finishRecording(const AttributionSourceState& attributionSource, audio_source_t source);
+std::optional<AttributionSourceState> resolveAttributionSource(
+    const AttributionSourceState& callerAttributionSource);
+bool captureAudioOutputAllowed(const AttributionSourceState& attributionSource);
+bool captureMediaOutputAllowed(const AttributionSourceState& attributionSource);
+bool captureTunerAudioInputAllowed(const AttributionSourceState& attributionSource);
+bool captureVoiceCommunicationOutputAllowed(const AttributionSourceState& attributionSource);
+bool captureHotwordAllowed(const AttributionSourceState& attributionSource);
 bool settingsAllowed();
 bool modifyAudioRoutingAllowed();
-bool modifyAudioRoutingAllowed(pid_t pid, uid_t uid);
+bool modifyAudioRoutingAllowed(const AttributionSourceState& attributionSource);
 bool modifyDefaultAudioEffectsAllowed();
-bool modifyDefaultAudioEffectsAllowed(pid_t pid, uid_t uid);
+bool modifyDefaultAudioEffectsAllowed(const AttributionSourceState& attributionSource);
 bool dumpAllowed();
-bool modifyPhoneStateAllowed(pid_t pid, uid_t uid);
-bool bypassInterruptionPolicyAllowed(pid_t pid, uid_t uid);
+bool modifyPhoneStateAllowed(const AttributionSourceState& attributionSource);
+bool bypassInterruptionPolicyAllowed(const AttributionSourceState& attributionSource);
+void purgePermissionCache();
+int32_t getOpForSource(audio_source_t source);
+
+AttributionSourceState getCallingAttributionSource();
 
 status_t checkIMemory(const sp<IMemory>& iMemory);
 
@@ -110,7 +125,7 @@
 private:
     static constexpr const char* nativePackageManagerName = "package_native";
     std::optional<bool> doIsAllowed(uid_t uid);
-    sp<content::pm::IPackageManagerNative> retreivePackageManager();
+    sp<content::pm::IPackageManagerNative> retrievePackageManager();
     sp<content::pm::IPackageManagerNative> mPackageManager; // To check apps manifest
     uint_t mPackageManagerErrors = 0;
     struct Package {
diff --git a/media/utils/include/mediautils/Synchronization.h b/media/utils/include/mediautils/Synchronization.h
new file mode 100644
index 0000000..aef4967
--- /dev/null
+++ b/media/utils/include/mediautils/Synchronization.h
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <mutex>
+#include <utils/RefBase.h>
+
+namespace android::mediautils {
+
+/**
+ * The LockItem class introduces a simple template which mimics atomic<T>
+ * for non-trivially copyable types.  For trivially copyable types,
+ * the LockItem will statically assert that an atomic<T> should be used instead.
+ *
+ * The default lock mutex is std::mutex which is suitable for all but rare cases
+ * e.g. recursive constructors that might be found in tree construction,
+ * setters that might recurse onto the same object.
+ */
+
+template <typename T, typename L = std::mutex, int FLAGS = 0>
+class LockItem {
+protected:
+    mutable L mLock;
+    mutable T mT;
+
+public:
+    enum {
+        // Best practices for smart pointers and complex containers is to move to a temp
+        // and invoke destructor outside of lock.  This reduces time under lock and in
+        // some cases eliminates deadlock.
+        FLAG_DTOR_OUT_OF_LOCK = 1,
+    };
+
+    // Check type, suggest std::atomic if possible.
+    static_assert(!std::is_trivially_copyable_v<T>,
+            "type is trivially copyable, please use std::atomic instead");
+
+    // Allow implicit conversions as expected for some types, e.g. sp -> wp.
+    template <typename... Args>
+    LockItem(Args&&... args) : mT(std::forward<Args>(args)...) {
+    }
+
+    // NOT copy or move / assignable or constructible.
+
+    // Do not enable this because it may lead to confusion because it returns
+    // a copy-value not a reference.
+    // operator T() const { return load(); }
+
+    // any conversion done under lock.
+    template <typename U>
+    void operator=(U&& u) {
+        store(std::forward<U>(u));
+    }
+
+    // returns a copy-value not a reference.
+    T load() const {
+        std::lock_guard lock(mLock);
+        return mT;
+    }
+
+    // any conversion done under lock.
+    template <typename U>
+    void store(U&& u) {
+        if constexpr ((FLAGS & FLAG_DTOR_OUT_OF_LOCK) != 0) {
+             std::unique_lock lock(mLock);
+             T temp = std::move(mT);
+             mT = std::forward<U>(u);
+             lock.unlock();
+        } else {
+            std::lock_guard lock(mLock);
+            mT = std::forward<U>(u);
+        }
+    }
+};
+
+/**
+ * atomic_wp<> and atomic_sp<> are used for concurrent access to Android
+ * sp<> and wp<> smart pointers, including their modifiers.  We
+ * return a copy of the smart pointer with load().
+ *
+ * Historical: The importance of an atomic<std::shared_ptr<T>> class is described
+ * by Herb Sutter in the following ISO document https://isocpp.org/files/papers/N4162.pdf
+ * and is part of C++20.  Lock free versions of atomic smart pointers are available
+ * publicly but usually require specialized smart pointer structs.
+ * See also https://en.cppreference.com/w/cpp/memory/shared_ptr/atomic
+ * and https://en.cppreference.com/w/cpp/memory/shared_ptr/atomic2
+ *
+ * We offer lock based atomic_wp<> and atomic_sp<> objects here. This is useful to
+ * copy the Android smart pointer to a different variable for subsequent local access,
+ * where the change of the original object after copy is acceptable.
+ *
+ * Note: Instead of atomics, it is often preferrable to create an explicit visible lock to
+ * ensure complete transaction consistency.  For example, one might want to ensure
+ * that the method called from the smart pointer is also done under lock.
+ * This may not be possible for callbacks due to inverted lock ordering.
+ */
+
+template <typename T>
+using atomic_wp = LockItem<::android::wp<T>>;
+
+template <typename T>
+using atomic_sp = LockItem<
+        ::android::sp<T>, std::mutex, LockItem<::android::sp<T>>::FLAG_DTOR_OUT_OF_LOCK>;
+
+/**
+ * Defers a function to run in the RAII destructor.
+ * A C++ implementation of Go _defer_ https://golangr.com/defer/.
+ */
+class Defer {
+public:
+    template <typename U>
+    explicit Defer(U &&f) : mThunk(std::forward<U>(f)) {}
+    ~Defer() { mThunk(); }
+
+private:
+    const std::function<void()> mThunk;
+};
+
+} // namespace android::mediautils
+
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
new file mode 100644
index 0000000..6593d56
--- /dev/null
+++ b/media/utils/tests/Android.bp
@@ -0,0 +1,28 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_test {
+    name: "media_synchronization_tests",
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libmediautils",
+        "libutils",
+    ],
+
+    srcs: [
+        "media_synchronization_tests.cpp",
+    ],
+}
diff --git a/media/utils/tests/media_synchronization_tests.cpp b/media/utils/tests/media_synchronization_tests.cpp
new file mode 100644
index 0000000..169768e
--- /dev/null
+++ b/media/utils/tests/media_synchronization_tests.cpp
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "media_synchronization_tests"
+
+#include <mediautils/Synchronization.h>
+
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+using namespace android;
+using namespace android::mediautils;
+
+// Simple Test Class
+template <typename T>
+class MyObject : public RefBase {
+    T value_;
+  public:
+    MyObject(const T& value) : value_(value) {}
+    MyObject(const MyObject<T>& mo) : value_(mo.get()) {}
+    T get() const { return value_; }
+    void set(const T& value) { value_ = value; }
+};
+
+TEST(media_synchronization_tests, atomic_wp) {
+  sp<MyObject<int>> refobj = new MyObject<int>(20);
+  atomic_wp<MyObject<int>> wpobj = refobj;
+
+  // we can promote.
+  ASSERT_EQ(20, wpobj.load().promote()->get());
+
+  // same underlying object for sp and atomic_wp.
+  ASSERT_EQ(refobj.get(), wpobj.load().promote().get());
+
+  // behavior is consistent with same underlying object.
+  wpobj.load().promote()->set(10);
+  ASSERT_EQ(10, refobj->get());
+  refobj->set(5);
+  ASSERT_EQ(5, wpobj.load().promote()->get());
+
+  // we can clear our weak ptr.
+  wpobj = nullptr;
+  ASSERT_EQ(nullptr, wpobj.load().promote());
+
+  // didn't affect our original obj.
+  ASSERT_NE(nullptr, refobj.get());
+}
+
+TEST(media_synchronization_tests, atomic_sp) {
+  sp<MyObject<int>> refobj = new MyObject<int>(20);
+  atomic_sp<MyObject<int>> spobj = refobj;
+
+  // same underlying object for sp and atomic_sp.
+  ASSERT_EQ(refobj.get(), spobj.load().get());
+
+  // behavior is consistent with same underlying object.
+  ASSERT_EQ(20, spobj.load()->get());
+  spobj.load()->set(10);
+  ASSERT_EQ(10, refobj->get());
+  refobj->set(5);
+  ASSERT_EQ(5, spobj.load()->get());
+
+  // we can clear spobj.
+  spobj = nullptr;
+  ASSERT_EQ(nullptr, spobj.load().get());
+
+  // didn't affect our original obj.
+  ASSERT_NE(nullptr, refobj.get());
+}
diff --git a/services/OWNERS b/services/OWNERS
index 66a4bcb..f0b5e2f 100644
--- a/services/OWNERS
+++ b/services/OWNERS
@@ -5,3 +5,5 @@
 gkasten@google.com
 hunga@google.com
 marcone@google.com
+nchalko@google.com
+quxiangfang@google.com
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 3873600..b91f302 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -1,5 +1,24 @@
 
 
+package {
+    default_applicable_licenses: [
+        "frameworks_av_services_audioflinger_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_services_audioflinger_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libaudioflinger",
 
@@ -35,6 +54,11 @@
     ],
 
     shared_libs: [
+        "audioflinger-aidl-cpp",
+        "audioclient-types-aidl-cpp",
+        "av-types-aidl-cpp",
+        "effect-aidl-cpp",
+        "libaudioclient_aidl_conversion",
         "libaudiofoundation",
         "libaudiohal",
         "libaudioprocessing",
@@ -50,23 +74,31 @@
         "libmediautils",
         "libnbaio",
         "libnblog",
+        "libpermission",
         "libpowermanager",
         "libmediautils",
         "libmemunreachable",
         "libmedia_helper",
+        "libshmemcompat",
         "libvibrator",
     ],
 
     static_libs: [
         "libcpustats",
         "libsndfile",
+        "libpermission",
     ],
 
     header_libs: [
+        "libaudioclient_headers",
         "libaudiohal_headers",
         "libmedia_headers",
     ],
 
+    export_shared_lib_headers: [
+        "libpermission",
+    ],
+
     cflags: [
         "-DSTATE_QUEUE_INSTANTIATIONS=\"StateQueueInstantiations.cpp\"",
         "-fvisibility=hidden",
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index f014209..65a163f 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -31,6 +31,7 @@
 #include <sys/resource.h>
 #include <thread>
 
+#include <android/media/IAudioPolicyService.h>
 #include <android/os/IExternalVibratorService.h>
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
@@ -43,6 +44,7 @@
 #include <media/AudioParameter.h>
 #include <media/MediaMetricsItem.h>
 #include <media/TypeConverter.h>
+#include <mediautils/TimeCheck.h>
 #include <memunreachable/memunreachable.h>
 #include <utils/String16.h>
 #include <utils/threads.h>
@@ -61,12 +63,15 @@
 #include <system/audio_effects/effect_visualizer.h>
 #include <system/audio_effects/effect_ns.h>
 #include <system/audio_effects/effect_aec.h>
+#include <system/audio_effects/effect_hapticgenerator.h>
 
 #include <audio_utils/primitives.h>
 
 #include <powermanager/PowerManager.h>
 
 #include <media/IMediaLogService.h>
+#include <media/AidlConversion.h>
+#include <media/AudioValidator.h>
 #include <media/nbaio/Pipe.h>
 #include <media/nbaio/PipeReader.h>
 #include <mediautils/BatteryNotifier.h>
@@ -97,6 +102,9 @@
 
 namespace android {
 
+using media::IEffectClient;
+using android::content::AttributionSourceState;
+
 static const char kDeadlockedString[] = "AudioFlinger may be deadlocked\n";
 static const char kHardwareLockedString[] = "Hardware lock is taken\n";
 static const char kClientLockedString[] = "Client lock is taken\n";
@@ -111,9 +119,6 @@
 // we define a minimum time during which a global effect is considered enabled.
 static const nsecs_t kMinGlobalEffectEnabletimeNs = seconds(7200);
 
-Mutex gLock;
-wp<AudioFlinger> gAudioFlinger;
-
 // Keep a strong reference to media.log service around forever.
 // The service is within our parent process so it can never die in a way that we could observe.
 // These two variables are const after initialization.
@@ -158,6 +163,34 @@
     }
 };
 
+// TODO b/182392769: use attribution source util
+/* static */
+AttributionSourceState AudioFlinger::checkAttributionSourcePackage(
+        const AttributionSourceState& attributionSource) {
+    Vector<String16> packages;
+    PermissionController{}.getPackagesForUid(attributionSource.uid, packages);
+
+    AttributionSourceState checkedAttributionSource = attributionSource;
+    if (!attributionSource.packageName.has_value()
+            || attributionSource.packageName.value().size() == 0) {
+        if (!packages.isEmpty()) {
+            checkedAttributionSource.packageName =
+                std::move(legacy2aidl_String16_string(packages[0]).value());
+        }
+    } else {
+        String16 opPackageLegacy = VALUE_OR_FATAL(
+            aidl2legacy_string_view_String16(attributionSource.packageName.value_or("")));
+        if (std::find_if(packages.begin(), packages.end(),
+                [&opPackageLegacy](const auto& package) {
+                return opPackageLegacy == package; }) == packages.end()) {
+            ALOGW("The package name(%s) provided does not correspond to the uid %d",
+                    attributionSource.packageName.value_or("").c_str(), attributionSource.uid);
+            checkedAttributionSource.packageName = std::optional<std::string>();
+        }
+    }
+    return checkedAttributionSource;
+}
+
 // ----------------------------------------------------------------------------
 
 std::string formatToString(audio_format_t format) {
@@ -168,9 +201,15 @@
 
 // ----------------------------------------------------------------------------
 
+void AudioFlinger::instantiate() {
+    sp<IServiceManager> sm(defaultServiceManager());
+    sm->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+                   new AudioFlingerServerAdapter(new AudioFlinger()), false,
+                   IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT);
+}
+
 AudioFlinger::AudioFlinger()
-    : BnAudioFlinger(),
-      mMediaLogNotifier(new AudioFlinger::MediaLogNotifier()),
+    : mMediaLogNotifier(new AudioFlinger::MediaLogNotifier()),
       mPrimaryHardwareDev(NULL),
       mAudioHwDevs(NULL),
       mHardwareStatus(AUDIO_HW_IDLE),
@@ -188,13 +227,30 @@
       mDeviceEffectManager(this),
       mSystemReady(false)
 {
+    // Move the audio session unique ID generator start base as time passes to limit risk of
+    // generating the same ID again after an audioserver restart.
+    // This is important because clients will reuse previously allocated audio session IDs
+    // when reconnecting after an audioserver restart and newly allocated IDs may conflict with
+    // active clients.
+    // Moving the base by 1 for each elapsed second is a good compromise between avoiding overlap
+    // between allocation ranges and not reaching wrap around too soon.
+    timespec ts{};
+    clock_gettime(CLOCK_MONOTONIC, &ts);
+    // zero ID has a special meaning, so start allocation at least at AUDIO_UNIQUE_ID_USE_MAX
+    uint32_t movingBase = (uint32_t)std::max((long)1, ts.tv_sec);
     // unsigned instead of audio_unique_id_use_t, because ++ operator is unavailable for enum
     for (unsigned use = AUDIO_UNIQUE_ID_USE_UNSPECIFIED; use < AUDIO_UNIQUE_ID_USE_MAX; use++) {
-        // zero ID has a special meaning, so unavailable
-        mNextUniqueIds[use] = AUDIO_UNIQUE_ID_USE_MAX;
+        mNextUniqueIds[use] =
+                ((use == AUDIO_UNIQUE_ID_USE_SESSION || use == AUDIO_UNIQUE_ID_USE_CLIENT) ?
+                        movingBase : 1) * AUDIO_UNIQUE_ID_USE_MAX;
     }
 
+#if 1
+    // FIXME See bug 165702394 and bug 168511485
+    const bool doLog = false;
+#else
     const bool doLog = property_get_bool("ro.test_harness", false);
+#endif
     if (doLog) {
         mLogMemoryDealer = new MemoryDealer(kLogMemorySize, "LogWriters",
                 MemoryHeapBase::READ_ONLY);
@@ -240,7 +296,7 @@
 
     mMode = AUDIO_MODE_NORMAL;
 
-    gAudioFlinger = this;
+    gAudioFlinger = this;  // we are already refcounted, store into atomic pointer.
 
     mDevicesFactoryHalCallback = new DevicesFactoryHalCallbackImpl;
     mDevicesFactoryHal->setCallbackOnce(mDevicesFactoryHalCallback);
@@ -251,6 +307,42 @@
   return NO_ERROR;
 }
 
+status_t AudioFlinger::setVibratorInfos(
+        const std::vector<media::AudioVibratorInfo>& vibratorInfos) {
+    Mutex::Autolock _l(mLock);
+    mAudioVibratorInfos = vibratorInfos;
+    return NO_ERROR;
+}
+
+status_t AudioFlinger::updateSecondaryOutputs(
+        const TrackSecondaryOutputsMap& trackSecondaryOutputs) {
+    Mutex::Autolock _l(mLock);
+    for (const auto& [trackId, secondaryOutputs] : trackSecondaryOutputs) {
+        size_t i = 0;
+        for (; i < mPlaybackThreads.size(); ++i) {
+            PlaybackThread *thread = mPlaybackThreads.valueAt(i).get();
+            Mutex::Autolock _tl(thread->mLock);
+            sp<PlaybackThread::Track> track = thread->getTrackById_l(trackId);
+            if (track != nullptr) {
+                ALOGD("%s trackId: %u", __func__, trackId);
+                updateSecondaryOutputsForTrack_l(track.get(), thread, secondaryOutputs);
+                break;
+            }
+        }
+        ALOGW_IF(i >= mPlaybackThreads.size(),
+                 "%s cannot find track with id %u", __func__, trackId);
+    }
+    return NO_ERROR;
+}
+
+// getDefaultVibratorInfo_l must be called with AudioFlinger lock held.
+const media::AudioVibratorInfo* AudioFlinger::getDefaultVibratorInfo_l() {
+    if (mAudioVibratorInfos.empty()) {
+        return nullptr;
+    }
+    return &mAudioVibratorInfos.front();
+}
+
 AudioFlinger::~AudioFlinger()
 {
     while (!mRecordThreads.isEmpty()) {
@@ -297,11 +389,9 @@
                                              sp<MmapStreamInterface>& interface,
                                              audio_port_handle_t *handle)
 {
-    sp<AudioFlinger> af;
-    {
-        Mutex::Autolock _l(gLock);
-        af = gAudioFlinger.promote();
-    }
+    // TODO: Use ServiceManager to get IAudioFlinger instead of by atomic pointer.
+    // This allows moving oboeservice (AAudio) to a separate process in the future.
+    sp<AudioFlinger> af = AudioFlinger::gAudioFlinger.load();  // either nullptr or singleton AF.
     status_t ret = NO_INIT;
     if (af != 0) {
         ret = af->openMmapStream(
@@ -342,7 +432,7 @@
 
         ret = AudioSystem::getOutputForAttr(&localAttr, &io,
                                             actualSessionId,
-                                            &streamType, client.clientPid, client.clientUid,
+                                            &streamType, client.attributionSource,
                                             &fullConfig,
                                             (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ |
                                                     AUDIO_OUTPUT_FLAG_DIRECT),
@@ -353,9 +443,7 @@
         ret = AudioSystem::getInputForAttr(&localAttr, &io,
                                               RECORD_RIID_INVALID,
                                               actualSessionId,
-                                              client.clientPid,
-                                              client.clientUid,
-                                              client.packageName,
+                                              client.attributionSource,
                                               config,
                                               AUDIO_INPUT_FLAG_MMAP_NOIRQ, deviceId, &portId);
     }
@@ -391,14 +479,19 @@
 /* static */
 int AudioFlinger::onExternalVibrationStart(const sp<os::ExternalVibration>& externalVibration) {
     sp<os::IExternalVibratorService> evs = getExternalVibratorService();
-    if (evs != 0) {
+    if (evs != nullptr) {
         int32_t ret;
         binder::Status status = evs->onExternalVibrationStart(*externalVibration, &ret);
         if (status.isOk()) {
+            ALOGD("%s, start external vibration with intensity as %d", __func__, ret);
             return ret;
         }
     }
-    return AudioMixer::HAPTIC_SCALE_MUTE;
+    ALOGD("%s, start external vibration with intensity as MUTE due to %s",
+            __func__,
+            evs == nullptr ? "external vibration service not found"
+                           : "error when querying intensity");
+    return static_cast<int>(os::HapticScale::MUTE);
 }
 
 /* static */
@@ -684,8 +777,8 @@
 
 sp<NBLog::Writer> AudioFlinger::newWriter_l(size_t size, const char *name)
 {
-    // If there is no memory allocated for logs, return a dummy writer that does nothing.
-    // Similarly if we can't contact the media.log service, also return a dummy writer.
+    // If there is no memory allocated for logs, return a no-op writer that does nothing.
+    // Similarly if we can't contact the media.log service, also return a no-op writer.
     if (mLogMemoryDealer == 0 || sMediaLogService == 0) {
         return new NBLog::Writer();
     }
@@ -711,7 +804,7 @@
             }
         }
         // Even after garbage-collecting all old writers, there is still not enough memory,
-        // so return a dummy writer
+        // so return a no-op writer
         return new NBLog::Writer();
     }
 success:
@@ -739,10 +832,13 @@
 
 // IAudioFlinger interface
 
-sp<IAudioTrack> AudioFlinger::createTrack(const CreateTrackInput& input,
-                                          CreateTrackOutput& output,
-                                          status_t *status)
+status_t AudioFlinger::createTrack(const media::CreateTrackRequest& _input,
+                                   media::CreateTrackResponse& _output)
 {
+    // Local version of VALUE_OR_RETURN, specific to this method's calling conventions.
+    CreateTrackInput input = VALUE_OR_RETURN_STATUS(CreateTrackInput::fromAidl(_input));
+    CreateTrackOutput output;
+
     sp<PlaybackThread::Track> track;
     sp<TrackHandle> trackHandle;
     sp<Client> client;
@@ -751,27 +847,33 @@
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
     std::vector<audio_io_handle_t> secondaryOutputs;
 
-    bool updatePid = (input.clientInfo.clientPid == -1);
+    // TODO b/182392553: refactor or make clearer
+    pid_t clientPid =
+        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(input.clientInfo.attributionSource.pid));
+    bool updatePid = (clientPid == (pid_t)-1);
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    uid_t clientUid = input.clientInfo.clientUid;
+    uid_t clientUid =
+        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(input.clientInfo.attributionSource.uid));
     audio_io_handle_t effectThreadId = AUDIO_IO_HANDLE_NONE;
     std::vector<int> effectIds;
     audio_attributes_t localAttr = input.attr;
 
+    AttributionSourceState adjAttributionSource = input.clientInfo.attributionSource;
     if (!isAudioServerOrMediaServerUid(callingUid)) {
         ALOGW_IF(clientUid != callingUid,
                 "%s uid %d tried to pass itself off as %d",
                 __FUNCTION__, callingUid, clientUid);
+        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
         clientUid = callingUid;
         updatePid = true;
     }
-    pid_t clientPid = input.clientInfo.clientPid;
     const pid_t callingPid = IPCThreadState::self()->getCallingPid();
     if (updatePid) {
-        ALOGW_IF(clientPid != -1 && clientPid != callingPid,
+        ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
                  "%s uid %d pid %d tried to pass itself off as pid %d",
                  __func__, callingUid, callingPid, clientPid);
         clientPid = callingPid;
+        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
     }
 
     audio_session_t sessionId = input.sessionId;
@@ -786,7 +888,7 @@
     output.outputId = AUDIO_IO_HANDLE_NONE;
     output.selectedDeviceId = input.selectedDeviceId;
     lStatus = AudioSystem::getOutputForAttr(&localAttr, &output.outputId, sessionId, &streamType,
-                                            clientPid, clientUid, &input.config, input.flags,
+                                            adjAttributionSource, &input.config, input.flags,
                                             &output.selectedDeviceId, &portId, &secondaryOutputs);
 
     if (lStatus != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
@@ -845,13 +947,14 @@
         output.frameCount = input.frameCount;
         output.notificationFrameCount = input.notificationFrameCount;
         output.flags = input.flags;
+        output.streamType = streamType;
 
         track = thread->createTrack_l(client, streamType, localAttr, &output.sampleRate,
                                       input.config.format, input.config.channel_mask,
                                       &output.frameCount, &output.notificationFrameCount,
                                       input.notificationsPerBuffer, input.speed,
                                       input.sharedBuffer, sessionId, &output.flags,
-                                      callingPid, input.clientInfo.clientTid, clientUid,
+                                      callingPid, adjAttributionSource, input.clientInfo.clientTid,
                                       &lStatus, portId, input.audioTrackCallback);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
         // we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
@@ -865,88 +968,7 @@
             // Connect secondary outputs. Failure on a secondary output must not imped the primary
             // Any secondary output setup failure will lead to a desync between the AP and AF until
             // the track is destroyed.
-            TeePatches teePatches;
-            for (audio_io_handle_t secondaryOutput : secondaryOutputs) {
-                PlaybackThread *secondaryThread = checkPlaybackThread_l(secondaryOutput);
-                if (secondaryThread == NULL) {
-                    ALOGE("no playback thread found for secondary output %d", output.outputId);
-                    continue;
-                }
-
-                size_t sourceFrameCount = thread->frameCount() * output.sampleRate
-                                          / thread->sampleRate();
-                size_t sinkFrameCount = secondaryThread->frameCount() * output.sampleRate
-                                          / secondaryThread->sampleRate();
-                // If the secondary output has just been opened, the first secondaryThread write
-                // will not block as it will fill the empty startup buffer of the HAL,
-                // so a second sink buffer needs to be ready for the immediate next blocking write.
-                // Additionally, have a margin of one main thread buffer as the scheduling jitter
-                // can reorder the writes (eg if thread A&B have the same write intervale,
-                // the scheduler could schedule AB...BA)
-                size_t frameCountToBeReady = 2 * sinkFrameCount + sourceFrameCount;
-                // Total secondary output buffer must be at least as the read frames plus
-                // the margin of a few buffers on both sides in case the
-                // threads scheduling has some jitter.
-                // That value should not impact latency as the secondary track is started before
-                // its buffer is full, see frameCountToBeReady.
-                size_t frameCount = frameCountToBeReady + 2 * (sourceFrameCount + sinkFrameCount);
-                // The frameCount should also not be smaller than the secondary thread min frame
-                // count
-                size_t minFrameCount = AudioSystem::calculateMinFrameCount(
-                            [&] { Mutex::Autolock _l(secondaryThread->mLock);
-                                  return secondaryThread->latency_l(); }(),
-                            secondaryThread->mNormalFrameCount,
-                            secondaryThread->mSampleRate,
-                            output.sampleRate,
-                            input.speed);
-                frameCount = std::max(frameCount, minFrameCount);
-
-                using namespace std::chrono_literals;
-                auto inChannelMask = audio_channel_mask_out_to_in(input.config.channel_mask);
-                sp patchRecord = new RecordThread::PatchRecord(nullptr /* thread */,
-                                                               output.sampleRate,
-                                                               inChannelMask,
-                                                               input.config.format,
-                                                               frameCount,
-                                                               NULL /* buffer */,
-                                                               (size_t)0 /* bufferSize */,
-                                                               AUDIO_INPUT_FLAG_DIRECT,
-                                                               0ns /* timeout */);
-                status_t status = patchRecord->initCheck();
-                if (status != NO_ERROR) {
-                    ALOGE("Secondary output patchRecord init failed: %d", status);
-                    continue;
-                }
-
-                // TODO: We could check compatibility of the secondaryThread with the PatchTrack
-                // for fast usage: thread has fast mixer, sample rate matches, etc.;
-                // for now, we exclude fast tracks by removing the Fast flag.
-                const audio_output_flags_t outputFlags =
-                        (audio_output_flags_t)(output.flags & ~AUDIO_OUTPUT_FLAG_FAST);
-                sp patchTrack = new PlaybackThread::PatchTrack(secondaryThread,
-                                                               streamType,
-                                                               output.sampleRate,
-                                                               input.config.channel_mask,
-                                                               input.config.format,
-                                                               frameCount,
-                                                               patchRecord->buffer(),
-                                                               patchRecord->bufferSize(),
-                                                               outputFlags,
-                                                               0ns /* timeout */,
-                                                               frameCountToBeReady);
-                status = patchTrack->initCheck();
-                if (status != NO_ERROR) {
-                    ALOGE("Secondary output patchTrack init failed: %d", status);
-                    continue;
-                }
-                teePatches.push_back({patchRecord, patchTrack});
-                secondaryThread->addPatchTrack(patchTrack);
-                // In case the downstream patchTrack on the secondaryThread temporarily outlives
-                // our created track, ensure the corresponding patchRecord is still alive.
-                patchTrack->setPeerProxy(patchRecord, true /* holdReference */);
-                patchRecord->setPeerProxy(patchTrack, false /* holdReference */);
-            }
-            track->setTeePatches(std::move(teePatches));
+            updateSecondaryOutputsForTrack_l(track.get(), thread, secondaryOutputs);
         }
 
         // move effect chain to this output thread if an effect on same session was waiting
@@ -998,15 +1020,14 @@
         AudioSystem::moveEffectsToIo(effectIds, effectThreadId);
     }
 
-    // return handle to client
-    trackHandle = new TrackHandle(track);
+    output.audioTrack = new TrackHandle(track);
+    _output = VALUE_OR_FATAL(output.toAidl());
 
 Exit:
     if (lStatus != NO_ERROR && output.outputId != AUDIO_IO_HANDLE_NONE) {
         AudioSystem::releaseOutput(portId);
     }
-    *status = lStatus;
-    return trackHandle;
+    return lStatus;
 }
 
 uint32_t AudioFlinger::sampleRate(audio_io_handle_t ioHandle) const
@@ -1283,9 +1304,9 @@
     }
 
     // Now set the master mute in each playback thread.  Playback threads
-    // assigned to HALs which do not have master mute support will apply master
-    // mute during the mix operation.  Threads with HALs which do support master
-    // mute will simply ignore the setting.
+    // assigned to HALs which do not have master mute support will apply master mute
+    // during the mix operation.  Threads with HALs which do support master mute
+    // will simply ignore the setting.
     Vector<VolumeInterface *> volumeInterfaces = getAllVolumeInterfaces_l();
     for (size_t i = 0; i < volumeInterfaces.size(); i++) {
         volumeInterfaces[i]->setMasterMute(muted);
@@ -1430,7 +1451,7 @@
 }
 
 
-void AudioFlinger::broacastParametersToRecordThreads_l(const String8& keyValuePairs)
+void AudioFlinger::broadcastParametersToRecordThreads_l(const String8& keyValuePairs)
 {
     for (size_t i = 0; i < mRecordThreads.size(); i++) {
         mRecordThreads.valueAt(i)->setParameters(keyValuePairs);
@@ -1461,6 +1482,20 @@
     }
 }
 
+// Update downstream patches for all playback threads attached to an MSD module
+void AudioFlinger::updateDownStreamPatches_l(const struct audio_patch *patch,
+                                             const std::set<audio_io_handle_t> streams)
+{
+    for (const audio_io_handle_t stream : streams) {
+        PlaybackThread *playbackThread = checkPlaybackThread_l(stream);
+        if (playbackThread == nullptr || !playbackThread->isMsdDevice()) {
+            continue;
+        }
+        playbackThread->setDownStreamPatch(patch);
+        playbackThread->sendIoConfigEvent(AUDIO_OUTPUT_CONFIG_CHANGED);
+    }
+}
+
 // Filter reserved keys from setParameters() before forwarding to audio HAL or acting upon.
 // Some keys are used for audio routing and audio path configuration and should be reserved for use
 // by audio policy and audio flinger for functional, privacy and security reasons.
@@ -1588,7 +1623,7 @@
             int value;
             if ((param.getInt(String8(AudioParameter::keyRouting), value) == NO_ERROR) &&
                     (value != 0)) {
-                broacastParametersToRecordThreads_l(filteredKeyValuePairs);
+                broadcastParametersToRecordThreads_l(filteredKeyValuePairs);
             }
         }
     }
@@ -1765,7 +1800,7 @@
     return BAD_VALUE;
 }
 
-void AudioFlinger::registerClient(const sp<IAudioFlingerClient>& client)
+void AudioFlinger::registerClient(const sp<media::IAudioFlingerClient>& client)
 {
     Mutex::Autolock _l(mLock);
     if (client == 0) {
@@ -1840,13 +1875,18 @@
 
 void AudioFlinger::ioConfigChanged(audio_io_config_event event,
                                    const sp<AudioIoDescriptor>& ioDesc,
-                                   pid_t pid)
-{
+                                   pid_t pid) {
+    media::AudioIoDescriptor descAidl = VALUE_OR_FATAL(
+            legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(ioDesc));
+    media::AudioIoConfigEvent eventAidl = VALUE_OR_FATAL(
+            legacy2aidl_audio_io_config_event_AudioIoConfigEvent(event));
+
     Mutex::Autolock _l(mClientLock);
     size_t size = mNotificationClients.size();
     for (size_t i = 0; i < size; i++) {
         if ((pid == 0) || (mNotificationClients.keyAt(i) == pid)) {
-            mNotificationClients.valueAt(i)->audioFlingerClient()->ioConfigChanged(event, ioDesc);
+            mNotificationClients.valueAt(i)->audioFlingerClient()->ioConfigChanged(eventAidl,
+                                                                                   descAidl);
         }
     }
 }
@@ -1920,7 +1960,7 @@
 // ----------------------------------------------------------------------------
 
 AudioFlinger::NotificationClient::NotificationClient(const sp<AudioFlinger>& audioFlinger,
-                                                     const sp<IAudioFlingerClient>& client,
+                                                     const sp<media::IAudioFlingerClient>& client,
                                                      pid_t pid,
                                                      uid_t uid)
     : mAudioFlinger(audioFlinger), mPid(pid), mUid(uid), mAudioFlingerClient(client)
@@ -1975,10 +2015,12 @@
 
 // ----------------------------------------------------------------------------
 
-sp<media::IAudioRecord> AudioFlinger::createRecord(const CreateRecordInput& input,
-                                                   CreateRecordOutput& output,
-                                                   status_t *status)
+status_t AudioFlinger::createRecord(const media::CreateRecordRequest& _input,
+                                    media::CreateRecordResponse& _output)
 {
+    CreateRecordInput input = VALUE_OR_RETURN_STATUS(CreateRecordInput::fromAidl(_input));
+    CreateRecordOutput output;
+
     sp<RecordThread::RecordTrack> recordTrack;
     sp<RecordHandle> recordHandle;
     sp<Client> client;
@@ -1990,23 +2032,27 @@
     output.buffers.clear();
     output.inputId = AUDIO_IO_HANDLE_NONE;
 
-    bool updatePid = (input.clientInfo.clientPid == -1);
+    // TODO b/182392553: refactor or clean up
+    AttributionSourceState adjAttributionSource = input.clientInfo.attributionSource;
+    bool updatePid = (adjAttributionSource.pid == -1);
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    uid_t clientUid = input.clientInfo.clientUid;
+    const uid_t currentUid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(
+           adjAttributionSource.uid));
     if (!isAudioServerOrMediaServerUid(callingUid)) {
-        ALOGW_IF(clientUid != callingUid,
+        ALOGW_IF(currentUid != callingUid,
                 "%s uid %d tried to pass itself off as %d",
-                __FUNCTION__, callingUid, clientUid);
-        clientUid = callingUid;
+                __FUNCTION__, callingUid, currentUid);
+        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
         updatePid = true;
     }
-    pid_t clientPid = input.clientInfo.clientPid;
     const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+    const pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(
+            adjAttributionSource.pid));
     if (updatePid) {
-        ALOGW_IF(clientPid != -1 && clientPid != callingPid,
+        ALOGW_IF(currentPid != (pid_t)-1 && currentPid != callingPid,
                  "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, clientPid);
-        clientPid = callingPid;
+                 __func__, callingUid, callingPid, currentPid);
+        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
     }
 
     // we don't yet support anything other than linear PCM
@@ -2034,7 +2080,7 @@
     output.selectedDeviceId = input.selectedDeviceId;
     output.flags = input.flags;
 
-    client = registerPid(clientPid);
+    client = registerPid(VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid)));
 
     // Not a conventional loop, but a retry loop for at most two iterations total.
     // Try first maybe with FAST flag then try again without FAST flag if that fails.
@@ -2054,9 +2100,7 @@
                                       input.riid,
                                       sessionId,
                                     // FIXME compare to AudioTrack
-                                      clientPid,
-                                      clientUid,
-                                      input.opPackageName,
+                                      adjAttributionSource,
                                       &input.config,
                                       output.flags, &output.selectedDeviceId, &portId);
     if (lStatus != NO_ERROR) {
@@ -2068,8 +2112,8 @@
         Mutex::Autolock _l(mLock);
         RecordThread *thread = checkRecordThread_l(output.inputId);
         if (thread == NULL) {
-            ALOGE("createRecord() checkRecordThread_l failed, input handle %d", output.inputId);
-            lStatus = BAD_VALUE;
+            ALOGW("createRecord() checkRecordThread_l failed, input handle %d", output.inputId);
+            lStatus = FAILED_TRANSACTION;
             goto Exit;
         }
 
@@ -2083,10 +2127,9 @@
                                                   input.config.format, input.config.channel_mask,
                                                   &output.frameCount, sessionId,
                                                   &output.notificationFrameCount,
-                                                  callingPid, clientUid, &output.flags,
+                                                  callingPid, adjAttributionSource, &output.flags,
                                                   input.clientInfo.clientTid,
-                                                  &lStatus, portId,
-                                                  input.opPackageName);
+                                                  &lStatus, portId, input.maxSharedAudioHistoryMs);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (recordTrack == 0));
 
         // lStatus == BAD_TYPE means FAST flag was rejected: request a new input from
@@ -2116,8 +2159,8 @@
     output.buffers = recordTrack->getBuffers();
     output.portId = portId;
 
-    // return handle to client
-    recordHandle = new RecordHandle(recordTrack);
+    output.audioRecord = new RecordHandle(recordTrack);
+    _output = VALUE_OR_FATAL(output.toAidl());
 
 Exit:
     if (lStatus != NO_ERROR) {
@@ -2135,8 +2178,7 @@
         }
     }
 
-    *status = lStatus;
-    return recordHandle;
+    return lStatus;
 }
 
 
@@ -2308,6 +2350,11 @@
 {
     ALOGV(__func__);
 
+    status_t status = AudioValidator::validateAudioPortConfig(*config);
+    if (status != NO_ERROR) {
+        return status;
+    }
+
     audio_module_handle_t module;
     if (config->type == AUDIO_PORT_TYPE_DEVICE) {
         module = config->ext.device.hw_module;
@@ -2533,7 +2580,11 @@
                       *output, thread.get());
             }
             mPlaybackThreads.add(*output, thread);
-            mPatchPanel.notifyStreamOpened(outHwDev, *output);
+            struct audio_patch patch;
+            mPatchPanel.notifyStreamOpened(outHwDev, *output, &patch);
+            if (thread->isMsdDevice()) {
+                thread->setDownStreamPatch(&patch);
+            }
             return thread;
         }
     }
@@ -2541,20 +2592,28 @@
     return 0;
 }
 
-status_t AudioFlinger::openOutput(audio_module_handle_t module,
-                                  audio_io_handle_t *output,
-                                  audio_config_t *config,
-                                  const sp<DeviceDescriptorBase>& device,
-                                  uint32_t *latencyMs,
-                                  audio_output_flags_t flags)
+status_t AudioFlinger::openOutput(const media::OpenOutputRequest& request,
+                                media::OpenOutputResponse* response)
 {
+    audio_module_handle_t module = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_int32_t_audio_module_handle_t(request.module));
+    audio_config_t config = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioConfig_audio_config_t(request.config));
+    sp<DeviceDescriptorBase> device = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_DeviceDescriptorBase(request.device));
+    audio_output_flags_t flags = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_int32_t_audio_output_flags_t_mask(request.flags));
+
+    audio_io_handle_t output;
+    uint32_t latencyMs;
+
     ALOGI("openOutput() this %p, module %d Device %s, SamplingRate %d, Format %#08x, "
               "Channels %#x, flags %#x",
               this, module,
               device->toString().c_str(),
-              config->sample_rate,
-              config->format,
-              config->channel_mask,
+              config.sample_rate,
+              config.format,
+              config.channel_mask,
               flags);
 
     audio_devices_t deviceType = device->type();
@@ -2566,11 +2625,11 @@
 
     Mutex::Autolock _l(mLock);
 
-    sp<ThreadBase> thread = openOutput_l(module, output, config, deviceType, address, flags);
+    sp<ThreadBase> thread = openOutput_l(module, &output, &config, deviceType, address, flags);
     if (thread != 0) {
         if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) == 0) {
             PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
-            *latencyMs = playbackThread->latency();
+            latencyMs = playbackThread->latency();
 
             // notify client processes of the new output creation
             playbackThread->ioConfigChanged(AUDIO_OUTPUT_OPENED);
@@ -2590,6 +2649,11 @@
             MmapThread *mmapThread = (MmapThread *)thread.get();
             mmapThread->ioConfigChanged(AUDIO_OUTPUT_OPENED);
         }
+        response->output = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+        response->config = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_config_t_AudioConfig(config));
+        response->latencyMs = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(latencyMs));
+        response->flags = VALUE_OR_RETURN_STATUS(
+                legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
         return NO_ERROR;
     }
 
@@ -2742,22 +2806,36 @@
     return NO_ERROR;
 }
 
-status_t AudioFlinger::openInput(audio_module_handle_t module,
-                                          audio_io_handle_t *input,
-                                          audio_config_t *config,
-                                          audio_devices_t *devices,
-                                          const String8& address,
-                                          audio_source_t source,
-                                          audio_input_flags_t flags)
+status_t AudioFlinger::openInput(const media::OpenInputRequest& request,
+                                 media::OpenInputResponse* response)
 {
     Mutex::Autolock _l(mLock);
 
-    if (*devices == AUDIO_DEVICE_NONE) {
+    if (request.device.type == AUDIO_DEVICE_NONE) {
         return BAD_VALUE;
     }
 
+    audio_io_handle_t input = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_int32_t_audio_io_handle_t(request.input));
+    audio_config_t config = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioConfig_audio_config_t(request.config));
+    AudioDeviceTypeAddr device = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioDeviceTypeAddress(request.device));
+
     sp<ThreadBase> thread = openInput_l(
-            module, input, config, *devices, address, source, flags, AUDIO_DEVICE_NONE, String8{});
+            VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_module_handle_t(request.module)),
+            &input,
+            &config,
+            device.mType,
+            device.address().c_str(),
+            VALUE_OR_RETURN_STATUS(aidl2legacy_AudioSourceType_audio_source_t(request.source)),
+            VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_input_flags_t_mask(request.flags)),
+            AUDIO_DEVICE_NONE,
+            String8{});
+
+    response->input = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(input));
+    response->config = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_config_t_AudioConfig(config));
+    response->device = request.device;
 
     if (thread != 0) {
         // notify client processes of the new input creation
@@ -2771,7 +2849,7 @@
                                                          audio_io_handle_t *input,
                                                          audio_config_t *config,
                                                          audio_devices_t devices,
-                                                         const String8& address,
+                                                         const char* address,
                                                          audio_source_t source,
                                                          audio_input_flags_t flags,
                                                          audio_devices_t outputDevice,
@@ -2801,7 +2879,7 @@
     sp<DeviceHalInterface> inHwHal = inHwDev->hwDevice();
     sp<StreamInHalInterface> inStream;
     status_t status = inHwHal->openInputStream(
-            *input, devices, &halconfig, flags, address.string(), source,
+            *input, devices, &halconfig, flags, address, source,
             outputDevice, outputDeviceAddress, &inStream);
     ALOGV("openInput_l() openInputStream returned input %p, devices %#x, SamplingRate %d"
            ", Format %#x, Channels %#x, flags %#x, status %d addr %s",
@@ -2811,7 +2889,7 @@
             halconfig.format,
             halconfig.channel_mask,
             flags,
-            status, address.string());
+            status, address);
 
     // If the input could not be opened with the requested parameters and we can handle the
     // conversion internally, try to open again with the proposed parameters.
@@ -2819,13 +2897,13 @@
         audio_is_linear_pcm(config->format) &&
         audio_is_linear_pcm(halconfig.format) &&
         (halconfig.sample_rate <= AUDIO_RESAMPLER_DOWN_RATIO_MAX * config->sample_rate) &&
-        (audio_channel_count_from_in_mask(halconfig.channel_mask) <= FCC_8) &&
-        (audio_channel_count_from_in_mask(config->channel_mask) <= FCC_8)) {
+        (audio_channel_count_from_in_mask(halconfig.channel_mask) <= FCC_LIMIT) &&
+        (audio_channel_count_from_in_mask(config->channel_mask) <= FCC_LIMIT)) {
         // FIXME describe the change proposed by HAL (save old values so we can log them here)
         ALOGV("openInput_l() reopening with proposed sampling rate and channel mask");
         inStream.clear();
         status = inHwHal->openInputStream(
-                *input, devices, &halconfig, flags, address.string(), source,
+                *input, devices, &halconfig, flags, address, source,
                 outputDevice, outputDeviceAddress, &inStream);
         // FIXME log this new status; HAL should not propose any further changes
     }
@@ -3141,7 +3219,8 @@
 // dumpToThreadLog_l() must be called with AudioFlinger::mLock held
 void AudioFlinger::dumpToThreadLog_l(const sp<ThreadBase> &thread)
 {
-    audio_utils::FdToString fdToString;
+    constexpr int THREAD_DUMP_TIMEOUT_MS = 2;
+    audio_utils::FdToString fdToString("- ", THREAD_DUMP_TIMEOUT_MS);
     const int fd = fdToString.fd();
     if (fd >= 0) {
         thread->dump(fd, {} /* args */);
@@ -3297,6 +3376,104 @@
     return minThread;
 }
 
+AudioFlinger::ThreadBase *AudioFlinger::hapticPlaybackThread_l() const {
+    for (size_t i  = 0; i < mPlaybackThreads.size(); ++i) {
+        PlaybackThread *thread = mPlaybackThreads.valueAt(i).get();
+        if (thread->hapticChannelMask() != AUDIO_CHANNEL_NONE) {
+            return thread;
+        }
+    }
+    return nullptr;
+}
+
+void AudioFlinger::updateSecondaryOutputsForTrack_l(
+        PlaybackThread::Track* track,
+        PlaybackThread* thread,
+        const std::vector<audio_io_handle_t> &secondaryOutputs) const {
+    TeePatches teePatches;
+    for (audio_io_handle_t secondaryOutput : secondaryOutputs) {
+        PlaybackThread *secondaryThread = checkPlaybackThread_l(secondaryOutput);
+        if (secondaryThread == nullptr) {
+            ALOGE("no playback thread found for secondary output %d", thread->id());
+            continue;
+        }
+
+        size_t sourceFrameCount = thread->frameCount() * track->sampleRate()
+                                  / thread->sampleRate();
+        size_t sinkFrameCount = secondaryThread->frameCount() * track->sampleRate()
+                                  / secondaryThread->sampleRate();
+        // If the secondary output has just been opened, the first secondaryThread write
+        // will not block as it will fill the empty startup buffer of the HAL,
+        // so a second sink buffer needs to be ready for the immediate next blocking write.
+        // Additionally, have a margin of one main thread buffer as the scheduling jitter
+        // can reorder the writes (eg if thread A&B have the same write intervale,
+        // the scheduler could schedule AB...BA)
+        size_t frameCountToBeReady = 2 * sinkFrameCount + sourceFrameCount;
+        // Total secondary output buffer must be at least as the read frames plus
+        // the margin of a few buffers on both sides in case the
+        // threads scheduling has some jitter.
+        // That value should not impact latency as the secondary track is started before
+        // its buffer is full, see frameCountToBeReady.
+        size_t frameCount = frameCountToBeReady + 2 * (sourceFrameCount + sinkFrameCount);
+        // The frameCount should also not be smaller than the secondary thread min frame
+        // count
+        size_t minFrameCount = AudioSystem::calculateMinFrameCount(
+                    [&] { Mutex::Autolock _l(secondaryThread->mLock);
+                          return secondaryThread->latency_l(); }(),
+                    secondaryThread->mNormalFrameCount,
+                    secondaryThread->mSampleRate,
+                    track->sampleRate(),
+                    track->getSpeed());
+        frameCount = std::max(frameCount, minFrameCount);
+
+        using namespace std::chrono_literals;
+        auto inChannelMask = audio_channel_mask_out_to_in(track->channelMask());
+        sp patchRecord = new RecordThread::PatchRecord(nullptr /* thread */,
+                                                       track->sampleRate(),
+                                                       inChannelMask,
+                                                       track->format(),
+                                                       frameCount,
+                                                       nullptr /* buffer */,
+                                                       (size_t)0 /* bufferSize */,
+                                                       AUDIO_INPUT_FLAG_DIRECT,
+                                                       0ns /* timeout */);
+        status_t status = patchRecord->initCheck();
+        if (status != NO_ERROR) {
+            ALOGE("Secondary output patchRecord init failed: %d", status);
+            continue;
+        }
+
+        // TODO: We could check compatibility of the secondaryThread with the PatchTrack
+        // for fast usage: thread has fast mixer, sample rate matches, etc.;
+        // for now, we exclude fast tracks by removing the Fast flag.
+        const audio_output_flags_t outputFlags =
+                (audio_output_flags_t)(track->getOutputFlags() & ~AUDIO_OUTPUT_FLAG_FAST);
+        sp patchTrack = new PlaybackThread::PatchTrack(secondaryThread,
+                                                       track->streamType(),
+                                                       track->sampleRate(),
+                                                       track->channelMask(),
+                                                       track->format(),
+                                                       frameCount,
+                                                       patchRecord->buffer(),
+                                                       patchRecord->bufferSize(),
+                                                       outputFlags,
+                                                       0ns /* timeout */,
+                                                       frameCountToBeReady);
+        status = patchTrack->initCheck();
+        if (status != NO_ERROR) {
+            ALOGE("Secondary output patchTrack init failed: %d", status);
+            continue;
+        }
+        teePatches.push_back({patchRecord, patchTrack});
+        secondaryThread->addPatchTrack(patchTrack);
+        // In case the downstream patchTrack on the secondaryThread temporarily outlives
+        // our created track, ensure the corresponding patchRecord is still alive.
+        patchTrack->setPeerProxy(patchRecord, true /* holdReference */);
+        patchRecord->setPeerProxy(patchTrack, false /* holdReference */);
+    }
+    track->setTeePatches(std::move(teePatches));
+}
+
 sp<AudioFlinger::SyncEvent> AudioFlinger::createSyncEvent(AudioSystem::sync_event_t type,
                                     audio_session_t triggerSession,
                                     audio_session_t listenerSession,
@@ -3427,40 +3604,44 @@
     return status;
 }
 
-sp<IEffect> AudioFlinger::createEffect(
-        effect_descriptor_t *pDesc,
-        const sp<IEffectClient>& effectClient,
-        int32_t priority,
-        audio_io_handle_t io,
-        audio_session_t sessionId,
-        const AudioDeviceTypeAddr& device,
-        const String16& opPackageName,
-        pid_t pid,
-        bool probe,
-        status_t *status,
-        int *id,
-        int *enabled)
-{
-    status_t lStatus = NO_ERROR;
-    sp<EffectHandle> handle;
-    effect_descriptor_t desc;
+status_t AudioFlinger::createEffect(const media::CreateEffectRequest& request,
+                                    media::CreateEffectResponse* response) {
+    const sp<IEffectClient>& effectClient = request.client;
+    const int32_t priority = request.priority;
+    const AudioDeviceTypeAddr device = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioDeviceTypeAddress(request.device));
+    AttributionSourceState adjAttributionSource = request.attributionSource;
+    const audio_session_t sessionId = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_int32_t_audio_session_t(request.sessionId));
+    audio_io_handle_t io = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_int32_t_audio_io_handle_t(request.output));
+    const effect_descriptor_t descIn = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_EffectDescriptor_effect_descriptor_t(request.desc));
+    const bool probe = request.probe;
 
+    sp<EffectHandle> handle;
+    effect_descriptor_t descOut;
+    int enabledOut = 0;
+    int idOut = -1;
+
+    status_t lStatus = NO_ERROR;
+
+    // TODO b/182392553: refactor or make clearer
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    if (pid == -1 || !isAudioServerOrMediaServerUid(callingUid)) {
+    adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
+    pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid));
+    if (currentPid == -1 || !isAudioServerOrMediaServerUid(callingUid)) {
         const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-        ALOGW_IF(pid != -1 && pid != callingPid,
+        ALOGW_IF(currentPid != -1 && currentPid != callingPid,
                  "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, pid);
-        pid = callingPid;
+                 __func__, callingUid, callingPid, currentPid);
+        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
+        currentPid = callingPid;
     }
 
     ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d, factory %p",
-            pid, effectClient.get(), priority, sessionId, io, mEffectsFactoryHal.get());
-
-    if (pDesc == NULL) {
-        lStatus = BAD_VALUE;
-        goto Exit;
-    }
+          adjAttributionSource.pid, effectClient.get(), priority, sessionId, io,
+          mEffectsFactoryHal.get());
 
     if (mEffectsFactoryHal == 0) {
         ALOGE("%s: no effects factory hal", __func__);
@@ -3488,7 +3669,7 @@
             goto Exit;
         }
     } else if (sessionId == AUDIO_SESSION_DEVICE) {
-        if (!modifyDefaultAudioEffectsAllowed(pid, callingUid)) {
+        if (!modifyDefaultAudioEffectsAllowed(adjAttributionSource)) {
             ALOGE("%s: device effect permission denied for uid %d", __func__, callingUid);
             lStatus = PERMISSION_DENIED;
             goto Exit;
@@ -3517,7 +3698,7 @@
         // otherwise no preference.
         uint32_t preferredType = (sessionId == AUDIO_SESSION_OUTPUT_MIX ?
                                   EFFECT_FLAG_TYPE_AUXILIARY : EFFECT_FLAG_TYPE_MASK);
-        lStatus = getEffectDescriptor(&pDesc->uuid, &pDesc->type, preferredType, &desc);
+        lStatus = getEffectDescriptor(&descIn.uuid, &descIn.type, preferredType, &descOut);
         if (lStatus < 0) {
             ALOGW("createEffect() error %d from getEffectDescriptor", lStatus);
             goto Exit;
@@ -3525,44 +3706,52 @@
 
         // Do not allow auxiliary effects on a session different from 0 (output mix)
         if (sessionId != AUDIO_SESSION_OUTPUT_MIX &&
-             (desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
+             (descOut.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
             lStatus = INVALID_OPERATION;
             goto Exit;
         }
 
         // check recording permission for visualizer
-        if ((memcmp(&desc.type, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0) &&
+        if ((memcmp(&descOut.type, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0) &&
             // TODO: Do we need to start/stop op - i.e. is there recording being performed?
-            !recordingAllowed(opPackageName, pid, callingUid)) {
+            !recordingAllowed(adjAttributionSource)) {
             lStatus = PERMISSION_DENIED;
             goto Exit;
         }
 
-        // return effect descriptor
-        *pDesc = desc;
+        const bool hapticPlaybackRequired = EffectModule::isHapticGenerator(&descOut.type);
+        if (hapticPlaybackRequired
+                && (sessionId == AUDIO_SESSION_DEVICE
+                        || sessionId == AUDIO_SESSION_OUTPUT_MIX
+                        || sessionId == AUDIO_SESSION_OUTPUT_STAGE)) {
+            // haptic-generating effect is only valid when the session id is a general session id
+            lStatus = INVALID_OPERATION;
+            goto Exit;
+        }
+
         if (io == AUDIO_IO_HANDLE_NONE && sessionId == AUDIO_SESSION_OUTPUT_MIX) {
             // if the output returned by getOutputForEffect() is removed before we lock the
             // mutex below, the call to checkPlaybackThread_l(io) below will detect it
             // and we will exit safely
-            io = AudioSystem::getOutputForEffect(&desc);
+            io = AudioSystem::getOutputForEffect(&descOut);
             ALOGV("createEffect got output %d", io);
         }
 
         Mutex::Autolock _l(mLock);
 
         if (sessionId == AUDIO_SESSION_DEVICE) {
-            sp<Client> client = registerPid(pid);
+            sp<Client> client = registerPid(currentPid);
             ALOGV("%s device type %#x address %s", __func__, device.mType, device.getAddress());
             handle = mDeviceEffectManager.createEffect_l(
-                    &desc, device, client, effectClient, mPatchPanel.patches_l(),
-                    enabled, &lStatus, probe);
+                    &descOut, device, client, effectClient, mPatchPanel.patches_l(),
+                    &enabledOut, &lStatus, probe);
             if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
                 // remove local strong reference to Client with mClientLock held
                 Mutex::Autolock _cl(mClientLock);
                 client.clear();
             } else {
                 // handle must be valid here, but check again to be safe.
-                if (handle.get() != nullptr && id != nullptr) *id = handle->id();
+                if (handle.get() != nullptr) idOut = handle->id();
             }
             goto Register;
         }
@@ -3592,8 +3781,8 @@
             // Detect if the effect is created after an AudioRecord is destroyed.
             if (getOrphanEffectChain_l(sessionId).get() != nullptr) {
                 ALOGE("%s: effect %s with no specified io handle is denied because the AudioRecord"
-                        " for session %d no longer exists",
-                         __func__, desc.name, sessionId);
+                      " for session %d no longer exists",
+                      __func__, descOut.name, sessionId);
                 lStatus = PERMISSION_DENIED;
                 goto Exit;
             }
@@ -3607,17 +3796,27 @@
             if (io == AUDIO_IO_HANDLE_NONE && mPlaybackThreads.size() > 0) {
                 io = mPlaybackThreads.keyAt(0);
             }
-            ALOGV("createEffect() got io %d for effect %s", io, desc.name);
+            ALOGV("createEffect() got io %d for effect %s", io, descOut.name);
         } else if (checkPlaybackThread_l(io) != nullptr) {
             // allow only one effect chain per sessionId on mPlaybackThreads.
             for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
                 const audio_io_handle_t checkIo = mPlaybackThreads.keyAt(i);
-                if (io == checkIo) continue;
+                if (io == checkIo) {
+                    if (hapticPlaybackRequired
+                            && mPlaybackThreads.valueAt(i)
+                                    ->hapticChannelMask() == AUDIO_CHANNEL_NONE) {
+                        ALOGE("%s: haptic playback thread is required while the required playback "
+                              "thread(io=%d) doesn't support", __func__, (int)io);
+                        lStatus = BAD_VALUE;
+                        goto Exit;
+                    }
+                    continue;
+                }
                 const uint32_t sessionType =
                         mPlaybackThreads.valueAt(i)->hasAudioSession(sessionId);
                 if ((sessionType & ThreadBase::EFFECT_SESSION) != 0) {
                     ALOGE("%s: effect %s io %d denied because session %d effect exists on io %d",
-                            __func__, desc.name, (int)io, (int)sessionId, (int)checkIo);
+                          __func__, descOut.name, (int) io, (int) sessionId, (int) checkIo);
                     android_errorWriteLog(0x534e4554, "123237974");
                     lStatus = BAD_VALUE;
                     goto Exit;
@@ -3645,19 +3844,38 @@
             }
         }
 
-        sp<Client> client = registerPid(pid);
+        sp<Client> client = registerPid(currentPid);
 
         // create effect on selected output thread
         bool pinned = !audio_is_global_session(sessionId) && isSessionAcquired_l(sessionId);
+        ThreadBase *oriThread = nullptr;
+        if (hapticPlaybackRequired && thread->hapticChannelMask() == AUDIO_CHANNEL_NONE) {
+            ThreadBase *hapticThread = hapticPlaybackThread_l();
+            if (hapticThread == nullptr) {
+                ALOGE("%s haptic thread not found while it is required", __func__);
+                lStatus = INVALID_OPERATION;
+                goto Exit;
+            }
+            if (hapticThread != thread) {
+                // Force to use haptic thread for haptic-generating effect.
+                oriThread = thread;
+                thread = hapticThread;
+            }
+        }
         handle = thread->createEffect_l(client, effectClient, priority, sessionId,
-                &desc, enabled, &lStatus, pinned, probe);
+                                        &descOut, &enabledOut, &lStatus, pinned, probe);
         if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
             // remove local strong reference to Client with mClientLock held
             Mutex::Autolock _cl(mClientLock);
             client.clear();
         } else {
             // handle must be valid here, but check again to be safe.
-            if (handle.get() != nullptr && id != nullptr) *id = handle->id();
+            if (handle.get() != nullptr) idOut = handle->id();
+            // Invalidate audio session when haptic playback is created.
+            if (hapticPlaybackRequired && oriThread != nullptr) {
+                // invalidateTracksForAudioSession will trigger locking the thread.
+                oriThread->invalidateTracksForAudioSession(sessionId);
+            }
         }
     }
 
@@ -3675,9 +3893,14 @@
         handle.clear();
     }
 
+    response->id = idOut;
+    response->enabled = enabledOut != 0;
+    response->effect = handle;
+    response->desc = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_effect_descriptor_t_EffectDescriptor(descOut));
+
 Exit:
-    *status = lStatus;
-    return handle;
+    return lStatus;
 }
 
 status_t AudioFlinger::moveEffects(audio_session_t sessionId, audio_io_handle_t srcOutput,
@@ -3774,7 +3997,7 @@
         // if the move request is not received from audio policy manager, the effect must be
         // re-registered with the new strategy and output
         if (dstChain == 0) {
-            dstChain = effect->callback()->chain().promote();
+            dstChain = effect->getCallback()->chain().promote();
             if (dstChain == 0) {
                 ALOGW("moveEffectChain_l() cannot get chain from effect %p", effect.get());
                 status = NO_INIT;
@@ -3824,7 +4047,7 @@
             goto Exit;
         }
 
-        dstChain = effect->callback()->chain().promote();
+        dstChain = effect->getCallback()->chain().promote();
         if (dstChain == 0) {
             thread->addEffect_l(effect);
             status = INVALID_OPERATION;
@@ -3926,10 +4149,117 @@
 
 // ----------------------------------------------------------------------------
 
-status_t AudioFlinger::onTransact(
-        uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    return BnAudioFlinger::onTransact(code, data, reply, flags);
+status_t AudioFlinger::onTransactWrapper(TransactionCode code,
+                                         const Parcel& data,
+                                         uint32_t flags,
+                                         const std::function<status_t()>& delegate) {
+    (void) data;
+    (void) flags;
+
+    // make sure transactions reserved to AudioPolicyManager do not come from other processes
+    switch (code) {
+        case TransactionCode::SET_STREAM_VOLUME:
+        case TransactionCode::SET_STREAM_MUTE:
+        case TransactionCode::OPEN_OUTPUT:
+        case TransactionCode::OPEN_DUPLICATE_OUTPUT:
+        case TransactionCode::CLOSE_OUTPUT:
+        case TransactionCode::SUSPEND_OUTPUT:
+        case TransactionCode::RESTORE_OUTPUT:
+        case TransactionCode::OPEN_INPUT:
+        case TransactionCode::CLOSE_INPUT:
+        case TransactionCode::INVALIDATE_STREAM:
+        case TransactionCode::SET_VOICE_VOLUME:
+        case TransactionCode::MOVE_EFFECTS:
+        case TransactionCode::SET_EFFECT_SUSPENDED:
+        case TransactionCode::LOAD_HW_MODULE:
+        case TransactionCode::GET_AUDIO_PORT:
+        case TransactionCode::CREATE_AUDIO_PATCH:
+        case TransactionCode::RELEASE_AUDIO_PATCH:
+        case TransactionCode::LIST_AUDIO_PATCHES:
+        case TransactionCode::SET_AUDIO_PORT_CONFIG:
+        case TransactionCode::SET_RECORD_SILENCED:
+            ALOGW("%s: transaction %d received from PID %d",
+                  __func__, code, IPCThreadState::self()->getCallingPid());
+            // return status only for non void methods
+            switch (code) {
+                case TransactionCode::SET_RECORD_SILENCED:
+                case TransactionCode::SET_EFFECT_SUSPENDED:
+                    break;
+                default:
+                    return INVALID_OPERATION;
+            }
+            // Fail silently in these cases.
+            return OK;
+        default:
+            break;
+    }
+
+    // make sure the following transactions come from system components
+    switch (code) {
+        case TransactionCode::SET_MASTER_VOLUME:
+        case TransactionCode::SET_MASTER_MUTE:
+        case TransactionCode::MASTER_MUTE:
+        case TransactionCode::SET_MODE:
+        case TransactionCode::SET_MIC_MUTE:
+        case TransactionCode::SET_LOW_RAM_DEVICE:
+        case TransactionCode::SYSTEM_READY:
+        case TransactionCode::SET_AUDIO_HAL_PIDS:
+        case TransactionCode::SET_VIBRATOR_INFOS:
+        case TransactionCode::UPDATE_SECONDARY_OUTPUTS: {
+            if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
+                ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
+                      __func__, code, IPCThreadState::self()->getCallingPid(),
+                      IPCThreadState::self()->getCallingUid());
+                // return status only for non void methods
+                switch (code) {
+                    case TransactionCode::SYSTEM_READY:
+                        break;
+                    default:
+                        return INVALID_OPERATION;
+                }
+                // Fail silently in these cases.
+                return OK;
+            }
+        } break;
+        default:
+            break;
+    }
+
+    // List of relevant events that trigger log merging.
+    // Log merging should activate during audio activity of any kind. This are considered the
+    // most relevant events.
+    // TODO should select more wisely the items from the list
+    switch (code) {
+        case TransactionCode::CREATE_TRACK:
+        case TransactionCode::CREATE_RECORD:
+        case TransactionCode::SET_MASTER_VOLUME:
+        case TransactionCode::SET_MASTER_MUTE:
+        case TransactionCode::SET_MIC_MUTE:
+        case TransactionCode::SET_PARAMETERS:
+        case TransactionCode::CREATE_EFFECT:
+        case TransactionCode::SYSTEM_READY: {
+            requestLogMerge();
+            break;
+        }
+        default:
+            break;
+    }
+
+    std::string tag("IAudioFlinger command " +
+                    std::to_string(static_cast<std::underlying_type_t<TransactionCode>>(code)));
+    TimeCheck check(tag.c_str());
+
+    // Make sure we connect to Audio Policy Service before calling into AudioFlinger:
+    //  - AudioFlinger can call into Audio Policy Service with its global mutex held
+    //  - If this is the first time Audio Policy Service is queried from inside audioserver process
+    //  this will trigger Audio Policy Manager initialization.
+    //  - Audio Policy Manager initialization calls into AudioFlinger which will try to lock
+    //  its global mutex and a deadlock will occur.
+    if (IPCThreadState::self()->getCallingPid() != getpid()) {
+        AudioSystem::get_audio_policy_service();
+    }
+
+    return delegate();
 }
 
 } // namespace android
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 20f561e..fff61f8 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -33,17 +33,19 @@
 #include <sys/types.h>
 #include <limits.h>
 
+#include <android/media/BnAudioTrack.h>
+#include <android/media/IAudioFlingerClient.h>
 #include <android/media/IAudioTrackCallback.h>
 #include <android/os/BnExternalVibrationController.h>
-#include <android-base/macros.h>
+#include <android/content/AttributionSourceState.h>
 
+
+#include <android-base/macros.h>
 #include <cutils/atomic.h>
 #include <cutils/compiler.h>
-#include <cutils/properties.h>
 
+#include <cutils/properties.h>
 #include <media/IAudioFlinger.h>
-#include <media/IAudioFlingerClient.h>
-#include <media/IAudioTrack.h>
 #include <media/AudioSystem.h>
 #include <media/AudioTrack.h>
 #include <media/MmapStreamInterface.h>
@@ -73,6 +75,7 @@
 #include <media/ExtendedAudioBufferProvider.h>
 #include <media/VolumeShaper.h>
 #include <mediautils/ServiceUtilities.h>
+#include <mediautils/Synchronization.h>
 
 #include <audio_utils/clock.h>
 #include <audio_utils/FdToString.h>
@@ -91,15 +94,17 @@
 #include "ThreadMetrics.h"
 #include "TrackMetrics.h"
 
-#include <powermanager/IPowerManager.h>
+#include <android/os/IPowerManager.h>
 
 #include <media/nblog/NBLog.h>
 #include <private/media/AudioEffectShared.h>
 #include <private/media/AudioTrackShared.h>
 
 #include <vibrator/ExternalVibration.h>
+#include <vibrator/ExternalVibrationUtils.h>
 
 #include "android/media/BnAudioRecord.h"
+#include "android/media/BnEffect.h"
 
 namespace android {
 
@@ -121,25 +126,24 @@
 
 #define INCLUDING_FROM_AUDIOFLINGER_H
 
-class AudioFlinger :
-    public BinderService<AudioFlinger>,
-    public BnAudioFlinger
+using android::content::AttributionSourceState;
+
+class AudioFlinger : public AudioFlingerServerAdapter::Delegate
 {
-    friend class BinderService<AudioFlinger>;   // for AudioFlinger()
-
 public:
-    static const char* getServiceName() ANDROID_API { return "media.audio_flinger"; }
+    static void instantiate() ANDROID_API;
 
-    virtual     status_t    dump(int fd, const Vector<String16>& args);
+    static AttributionSourceState checkAttributionSourcePackage(
+        const AttributionSourceState& attributionSource);
+
+    status_t dump(int fd, const Vector<String16>& args) override;
 
     // IAudioFlinger interface, in binder opcode order
-    virtual sp<IAudioTrack> createTrack(const CreateTrackInput& input,
-                                        CreateTrackOutput& output,
-                                        status_t *status);
+    status_t createTrack(const media::CreateTrackRequest& input,
+                         media::CreateTrackResponse& output) override;
 
-    virtual sp<media::IAudioRecord> createRecord(const CreateRecordInput& input,
-                                                 CreateRecordOutput& output,
-                                                 status_t *status);
+    status_t createRecord(const media::CreateRecordRequest& input,
+                          media::CreateRecordResponse& output) override;
 
     virtual     uint32_t    sampleRate(audio_io_handle_t ioHandle) const;
     virtual     audio_format_t format(audio_io_handle_t output) const;
@@ -175,17 +179,13 @@
     virtual     status_t    setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs);
     virtual     String8     getParameters(audio_io_handle_t ioHandle, const String8& keys) const;
 
-    virtual     void        registerClient(const sp<IAudioFlingerClient>& client);
+    virtual     void        registerClient(const sp<media::IAudioFlingerClient>& client);
 
     virtual     size_t      getInputBufferSize(uint32_t sampleRate, audio_format_t format,
                                                audio_channel_mask_t channelMask) const;
 
-    virtual status_t openOutput(audio_module_handle_t module,
-                                audio_io_handle_t *output,
-                                audio_config_t *config,
-                                const sp<DeviceDescriptorBase>& device,
-                                uint32_t *latencyMs,
-                                audio_output_flags_t flags);
+    virtual status_t openOutput(const media::OpenOutputRequest& request,
+                                media::OpenOutputResponse* response);
 
     virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1,
                                                   audio_io_handle_t output2);
@@ -196,13 +196,8 @@
 
     virtual status_t restoreOutput(audio_io_handle_t output);
 
-    virtual status_t openInput(audio_module_handle_t module,
-                               audio_io_handle_t *input,
-                               audio_config_t *config,
-                               audio_devices_t *device,
-                               const String8& address,
-                               audio_source_t source,
-                               audio_input_flags_t flags);
+    virtual status_t openInput(const media::OpenInputRequest& request,
+                               media::OpenInputResponse* response);
 
     virtual status_t closeInput(audio_io_handle_t input);
 
@@ -231,19 +226,8 @@
                                          uint32_t preferredTypeFlag,
                                          effect_descriptor_t *descriptor) const;
 
-    virtual sp<IEffect> createEffect(
-                        effect_descriptor_t *pDesc,
-                        const sp<IEffectClient>& effectClient,
-                        int32_t priority,
-                        audio_io_handle_t io,
-                        audio_session_t sessionId,
-                        const AudioDeviceTypeAddr& device,
-                        const String16& opPackageName,
-                        pid_t pid,
-                        bool probe,
-                        status_t *status /*non-NULL*/,
-                        int *id,
-                        int *enabled);
+    virtual status_t createEffect(const media::CreateEffectRequest& request,
+                                  media::CreateEffectResponse* response);
 
     virtual status_t moveEffects(audio_session_t sessionId, audio_io_handle_t srcOutput,
                         audio_io_handle_t dstOutput);
@@ -264,7 +248,7 @@
                                     struct audio_port *ports);
 
     /* Get attributes for a given audio port */
-    virtual status_t getAudioPort(struct audio_port *port);
+    virtual status_t getAudioPort(struct audio_port_v7 *port);
 
     /* Create an audio patch between several source and sink ports */
     virtual status_t createAudioPatch(const struct audio_patch *patch,
@@ -290,11 +274,13 @@
 
     virtual status_t setAudioHalPids(const std::vector<pid_t>& pids);
 
-    virtual     status_t    onTransact(
-                                uint32_t code,
-                                const Parcel& data,
-                                Parcel* reply,
-                                uint32_t flags);
+    virtual status_t setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos);
+
+    virtual status_t updateSecondaryOutputs(
+            const TrackSecondaryOutputsMap& trackSecondaryOutputs);
+
+    status_t onTransactWrapper(TransactionCode code, const Parcel& data, uint32_t flags,
+        const std::function<status_t()>& delegate) override;
 
     // end of IAudioFlinger interface
 
@@ -320,6 +306,11 @@
     status_t removeEffectFromHal(audio_port_handle_t deviceId,
             audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect);
 
+    void updateDownStreamPatches_l(const struct audio_patch *patch,
+                                   const std::set<audio_io_handle_t> streams);
+
+    const media::AudioVibratorInfo* getDefaultVibratorInfo_l();
+
 private:
     // FIXME The 400 is temporarily too high until a leak of writers in media.log is fixed.
     static const size_t kLogMemorySize = 400 * 1024;
@@ -330,6 +321,24 @@
     Mutex               mUnregisteredWritersLock;
 
 public:
+    // Life cycle of gAudioFlinger and AudioFlinger:
+    //
+    // AudioFlinger is created once and survives until audioserver crashes
+    // irrespective of sp<> and wp<> as it is refcounted by ServiceManager and we
+    // don't issue a ServiceManager::tryUnregisterService().
+    //
+    // gAudioFlinger is an atomic pointer set on AudioFlinger::onFirstRef().
+    // After this is set, it is safe to obtain a wp<> or sp<> from it as the
+    // underlying object does not go away.
+    //
+    // Note: For most inner classes, it is acceptable to hold a reference to the outer
+    // AudioFlinger instance as creation requires AudioFlinger to exist in the first place.
+    //
+    // An atomic here ensures underlying writes have completed before setting
+    // the pointer. Access by memory_order_seq_cst.
+    //
+
+    static inline std::atomic<AudioFlinger *> gAudioFlinger = nullptr;
 
     class SyncEvent;
 
@@ -405,7 +414,7 @@
         case AUDIO_CHANNEL_REPRESENTATION_POSITION: {
             // Haptic channel mask is only applicable for channel position mask.
             const uint32_t channelCount = audio_channel_count_from_out_mask(
-                    channelMask & ~AUDIO_CHANNEL_HAPTIC_ALL);
+                    static_cast<audio_channel_mask_t>(channelMask & ~AUDIO_CHANNEL_HAPTIC_ALL));
             const uint32_t maxChannelCount = kEnableExtendedChannels
                     ? AudioMixer::MAX_NUM_CHANNELS : FCC_2;
             if (channelCount < FCC_2 // mono is not supported at this time
@@ -488,12 +497,12 @@
     class NotificationClient : public IBinder::DeathRecipient {
     public:
                             NotificationClient(const sp<AudioFlinger>& audioFlinger,
-                                                const sp<IAudioFlingerClient>& client,
+                                                const sp<media::IAudioFlingerClient>& client,
                                                 pid_t pid,
                                                 uid_t uid);
         virtual             ~NotificationClient();
 
-                sp<IAudioFlingerClient> audioFlingerClient() const { return mAudioFlingerClient; }
+                sp<media::IAudioFlingerClient> audioFlingerClient() const { return mAudioFlingerClient; }
                 pid_t getPid() const { return mPid; }
                 uid_t getUid() const { return mUid; }
 
@@ -506,7 +515,7 @@
         const sp<AudioFlinger>  mAudioFlinger;
         const pid_t             mPid;
         const uid_t             mUid;
-        const sp<IAudioFlingerClient> mAudioFlingerClient;
+        const sp<media::IAudioFlingerClient> mAudioFlingerClient;
     };
 
     // --- MediaLogNotifier ---
@@ -539,6 +548,7 @@
     const sp<MediaLogNotifier> mMediaLogNotifier;
 
     // This is a helper that is called during incoming binder calls.
+    // Requests media.log to start merging log buffers
     void requestLogMerge();
 
     class TrackHandle;
@@ -624,27 +634,38 @@
     }
 
     // server side of the client's IAudioTrack
-    class TrackHandle : public android::BnAudioTrack {
+    class TrackHandle : public android::media::BnAudioTrack {
     public:
         explicit            TrackHandle(const sp<PlaybackThread::Track>& track);
         virtual             ~TrackHandle();
-        virtual sp<IMemory> getCblk() const;
-        virtual status_t    start();
-        virtual void        stop();
-        virtual void        flush();
-        virtual void        pause();
-        virtual status_t    attachAuxEffect(int effectId);
-        virtual status_t    setParameters(const String8& keyValuePairs);
-        virtual status_t    selectPresentation(int presentationId, int programId);
-        virtual media::VolumeShaper::Status applyVolumeShaper(
-                const sp<media::VolumeShaper::Configuration>& configuration,
-                const sp<media::VolumeShaper::Operation>& operation) override;
-        virtual sp<media::VolumeShaper::State> getVolumeShaperState(int id) override;
-        virtual status_t    getTimestamp(AudioTimestamp& timestamp);
-        virtual void        signal(); // signal playback thread for a change in control block
 
-        virtual status_t onTransact(
-            uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags);
+        binder::Status getCblk(std::optional<media::SharedFileRegion>* _aidl_return) override;
+        binder::Status start(int32_t* _aidl_return) override;
+        binder::Status stop() override;
+        binder::Status flush() override;
+        binder::Status pause() override;
+        binder::Status attachAuxEffect(int32_t effectId, int32_t* _aidl_return) override;
+        binder::Status setParameters(const std::string& keyValuePairs,
+                                     int32_t* _aidl_return) override;
+        binder::Status selectPresentation(int32_t presentationId, int32_t programId,
+                                          int32_t* _aidl_return) override;
+        binder::Status getTimestamp(media::AudioTimestampInternal* timestamp,
+                                    int32_t* _aidl_return) override;
+        binder::Status signal() override;
+        binder::Status applyVolumeShaper(const media::VolumeShaperConfiguration& configuration,
+                                         const media::VolumeShaperOperation& operation,
+                                         int32_t* _aidl_return) override;
+        binder::Status getVolumeShaperState(
+                int32_t id,
+                std::optional<media::VolumeShaperState>* _aidl_return) override;
+        binder::Status getDualMonoMode(media::AudioDualMonoMode* _aidl_return) override;
+        binder::Status setDualMonoMode(media::AudioDualMonoMode mode) override;
+        binder::Status getAudioDescriptionMixLevel(float* _aidl_return) override;
+        binder::Status setAudioDescriptionMixLevel(float leveldB) override;
+        binder::Status getPlaybackRateParameters(
+                media::AudioPlaybackRate* _aidl_return) override;
+        binder::Status setPlaybackRateParameters(
+                const media::AudioPlaybackRate& playbackRate) override;
 
     private:
         const sp<PlaybackThread::Track> mTrack;
@@ -659,10 +680,12 @@
                 int /*audio_session_t*/ triggerSession);
         virtual binder::Status   stop();
         virtual binder::Status   getActiveMicrophones(
-                std::vector<media::MicrophoneInfo>* activeMicrophones);
+                std::vector<media::MicrophoneInfoData>* activeMicrophones);
         virtual binder::Status   setPreferredMicrophoneDirection(
                 int /*audio_microphone_direction_t*/ direction);
         virtual binder::Status   setPreferredMicrophoneFieldDimension(float zoom);
+        virtual binder::Status   shareAudioHistory(const std::string& sharedAudioPackageName,
+                                                   int64_t sharedAudioStartMs);
 
     private:
         const sp<RecordThread::RecordTrack> mRecordTrack;
@@ -682,6 +705,7 @@
         virtual status_t createMmapBuffer(int32_t minSizeFrames,
                                           struct audio_mmap_buffer_info *info);
         virtual status_t getMmapPosition(struct audio_mmap_position *position);
+        virtual status_t getExternalPosition(uint64_t *position, int64_t *timeNanos);
         virtual status_t start(const AudioClient& client,
                                const audio_attributes_t *attr,
                                audio_port_handle_t *handle);
@@ -704,7 +728,7 @@
                                            audio_io_handle_t *input,
                                            audio_config_t *config,
                                            audio_devices_t device,
-                                           const String8& address,
+                                           const char* address,
                                            audio_source_t source,
                                            audio_input_flags_t flags,
                                            audio_devices_t outputDevice,
@@ -756,6 +780,13 @@
 
               sp<ThreadBase> getEffectThread_l(audio_session_t sessionId, int effectId);
 
+              ThreadBase *hapticPlaybackThread_l() const;
+
+              void updateSecondaryOutputsForTrack_l(
+                      PlaybackThread::Track* track,
+                      PlaybackThread* thread,
+                      const std::vector<audio_io_handle_t>& secondaryOutputs) const;
+
 
                 void        removeClient_l(pid_t pid);
                 void        removeNotificationClient(pid_t pid);
@@ -782,7 +813,7 @@
 
                 std::vector< sp<EffectModule> > purgeStaleEffects_l();
 
-                void broacastParametersToRecordThreads_l(const String8& keyValuePairs);
+                void broadcastParametersToRecordThreads_l(const String8& keyValuePairs);
                 void updateOutDevicesForRecordThreads_l(const DeviceDescriptorBaseVector& devices);
                 void forwardParametersToDownstreamPatches_l(
                         audio_io_handle_t upStream, const String8& keyValuePairs,
@@ -962,7 +993,12 @@
     SimpleLog  mAppSetParameterLog;
     SimpleLog  mSystemSetParameterLog;
 
+    std::vector<media::AudioVibratorInfo> mAudioVibratorInfos;
+
     static inline constexpr const char *mMetricsId = AMEDIAMETRICS_KEY_AUDIO_FLINGER;
+
+    // Keep in sync with java definition in media/java/android/media/AudioRecord.java
+    static constexpr int32_t kMaxSharedAudioHistoryMs = 5000;
 };
 
 #undef INCLUDING_FROM_AUDIOFLINGER_H
diff --git a/services/audioflinger/AudioHwDevice.cpp b/services/audioflinger/AudioHwDevice.cpp
index dda164c..16b25f6 100644
--- a/services/audioflinger/AudioHwDevice.cpp
+++ b/services/audioflinger/AudioHwDevice.cpp
@@ -98,5 +98,9 @@
     return mHwDevice->supportsAudioPatches(&result) == OK ? result : false;
 }
 
+status_t AudioHwDevice::getAudioPort(struct audio_port_v7 *port) const {
+    return mHwDevice->getAudioPort(port);
+}
+
 
 }; // namespace android
diff --git a/services/audioflinger/AudioHwDevice.h b/services/audioflinger/AudioHwDevice.h
index 6709d17..fc2c693 100644
--- a/services/audioflinger/AudioHwDevice.h
+++ b/services/audioflinger/AudioHwDevice.h
@@ -83,6 +83,8 @@
 
     bool supportsAudioPatches() const;
 
+    status_t getAudioPort(struct audio_port_v7 *port) const;
+
 private:
     const audio_module_handle_t mHandle;
     const char * const          mModuleName;
diff --git a/services/audioflinger/AudioStreamOut.cpp b/services/audioflinger/AudioStreamOut.cpp
index 7e06096..d8565bd 100644
--- a/services/audioflinger/AudioStreamOut.cpp
+++ b/services/audioflinger/AudioStreamOut.cpp
@@ -173,22 +173,15 @@
     return status;
 }
 
-audio_format_t AudioStreamOut::getFormat() const
+audio_config_base_t AudioStreamOut::getAudioProperties() const
 {
-    audio_format_t result;
-    return stream->getFormat(&result) == OK ? result : AUDIO_FORMAT_INVALID;
-}
-
-uint32_t AudioStreamOut::getSampleRate() const
-{
-    uint32_t result;
-    return stream->getSampleRate(&result) == OK ? result : 0;
-}
-
-audio_channel_mask_t AudioStreamOut::getChannelMask() const
-{
-    audio_channel_mask_t result;
-    return stream->getChannelMask(&result) == OK ? result : AUDIO_CHANNEL_INVALID;
+    audio_config_base_t result = AUDIO_CONFIG_BASE_INITIALIZER;
+    if (stream->getAudioProperties(&result) != OK) {
+        result.sample_rate = 0;
+        result.channel_mask = AUDIO_CHANNEL_INVALID;
+        result.format = AUDIO_FORMAT_INVALID;
+    }
+    return result;
 }
 
 int AudioStreamOut::flush()
diff --git a/services/audioflinger/AudioStreamOut.h b/services/audioflinger/AudioStreamOut.h
index 16fbcf2..565f43a 100644
--- a/services/audioflinger/AudioStreamOut.h
+++ b/services/audioflinger/AudioStreamOut.h
@@ -81,22 +81,14 @@
     virtual size_t getFrameSize() const { return mHalFrameSize; }
 
     /**
-     * @return format from the perspective of the application and the AudioFlinger.
+     * @return audio stream configuration: channel mask, format, sample rate:
+     *   - channel mask from the perspective of the application and the AudioFlinger,
+     *     The HAL is in stereo mode when playing multi-channel compressed audio over HDMI;
+     *   - format from the perspective of the application and the AudioFlinger;
+     *   - sample rate from the perspective of the application and the AudioFlinger,
+     *     The HAL may be running at a higher sample rate if, for example, playing wrapped EAC3.
      */
-    virtual audio_format_t getFormat() const;
-
-    /**
-     * The HAL may be running at a higher sample rate if, for example, playing wrapped EAC3.
-     * @return sample rate from the perspective of the application and the AudioFlinger.
-     */
-    virtual uint32_t getSampleRate() const;
-
-    /**
-     * The HAL is in stereo mode when playing multi-channel compressed audio over HDMI.
-     * @return channel mask from the perspective of the application and the AudioFlinger.
-     */
-    virtual audio_channel_mask_t getChannelMask() const;
-
+    virtual audio_config_base_t getAudioProperties() const;
 
     virtual status_t flush();
     virtual status_t standby();
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index 5ff7215..cecd52b 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -30,6 +30,8 @@
 
 namespace android {
 
+using media::IEffectClient;
+
 void AudioFlinger::DeviceEffectManager::createAudioPatch(audio_patch_handle_t handle,
         const PatchPanel::Patch& patch) {
     ALOGV("%s handle %d mHalHandle %d num sinks %d device sink %08x",
@@ -115,10 +117,19 @@
 
 status_t AudioFlinger::DeviceEffectManager::checkEffectCompatibility(
         const effect_descriptor_t *desc) {
+    sp<EffectsFactoryHalInterface> effectsFactory = mAudioFlinger.getEffectsFactory();
+    if (effectsFactory == nullptr) {
+        return BAD_VALUE;
+    }
 
-    if ((desc->flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_PRE_PROC
-        && (desc->flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_POST_PROC) {
-        ALOGW("%s() non pre/post processing device effect %s", __func__, desc->name);
+    static const float sMinDeviceEffectHalVersion = 6.0;
+    float halVersion = effectsFactory->getHalVersion();
+
+    if (((desc->flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_PRE_PROC
+            && (desc->flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_POST_PROC)
+            || halVersion < sMinDeviceEffectHalVersion) {
+        ALOGW("%s() non pre/post processing device effect %s or incompatible API version %f",
+                __func__, desc->name, halVersion);
         return BAD_VALUE;
     }
 
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
index 81e6065..a05f5fe 100644
--- a/services/audioflinger/DeviceEffectManager.h
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -33,7 +33,7 @@
     sp<EffectHandle> createEffect_l(effect_descriptor_t *descriptor,
                 const AudioDeviceTypeAddr& device,
                 const sp<AudioFlinger::Client>& client,
-                const sp<IEffectClient>& effectClient,
+                const sp<media::IEffectClient>& effectClient,
                 const std::map<audio_patch_handle_t, PatchPanel::Patch>& patches,
                 int *enabled,
                 status_t *status,
@@ -165,6 +165,7 @@
     uint32_t  sampleRate() const override { return 0; }
     audio_channel_mask_t channelMask() const override { return AUDIO_CHANNEL_NONE; }
     uint32_t channelCount() const override { return 0; }
+    audio_channel_mask_t hapticChannelMask() const override { return AUDIO_CHANNEL_NONE; }
     size_t    frameCount() const override  { return 0; }
     uint32_t  latency() const override  { return 0; }
 
@@ -182,7 +183,7 @@
     void checkSuspendOnEffectEnabled(const sp<EffectBase>& effect __unused,
                           bool enabled __unused, bool threadLocked __unused) override {}
     void resetVolume() override {}
-    uint32_t strategy() const override  { return 0; }
+    product_strategy_t strategy() const override  { return static_cast<product_strategy_t>(0); }
     int32_t activeTrackCnt() const override { return 0; }
     void onEffectEnable(const sp<EffectBase>& effect __unused) override {}
     void onEffectDisable(const sp<EffectBase>& effect __unused) override {}
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 3dfeb83..b267d88 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -25,6 +25,7 @@
 #include <utils/Log.h>
 #include <system/audio_effects/effect_aec.h>
 #include <system/audio_effects/effect_dynamicsprocessing.h>
+#include <system/audio_effects/effect_hapticgenerator.h>
 #include <system/audio_effects/effect_ns.h>
 #include <system/audio_effects/effect_visualizer.h>
 #include <audio_utils/channels.h>
@@ -33,6 +34,7 @@
 #include <media/AudioContainers.h>
 #include <media/AudioEffect.h>
 #include <media/AudioDeviceTypeAddr.h>
+#include <media/ShmemCompat.h>
 #include <media/audiohal/EffectHalInterface.h>
 #include <media/audiohal/EffectsFactoryHalInterface.h>
 #include <mediautils/ServiceUtilities.h>
@@ -58,6 +60,28 @@
 
 namespace android {
 
+using aidl_utils::statusTFromBinderStatus;
+using binder::Status;
+
+namespace {
+
+// Append a POD value into a vector of bytes.
+template<typename T>
+void appendToBuffer(const T& value, std::vector<uint8_t>* buffer) {
+    const uint8_t* ar(reinterpret_cast<const uint8_t*>(&value));
+    buffer->insert(buffer->end(), ar, ar + sizeof(T));
+}
+
+// Write a POD value into a vector of bytes (clears the previous buffer
+// content).
+template<typename T>
+void writeToBuffer(const T& value, std::vector<uint8_t>* buffer) {
+    buffer->clear();
+    appendToBuffer(value, buffer);
+}
+
+}  // namespace
+
 // ----------------------------------------------------------------------------
 //  EffectBase implementation
 // ----------------------------------------------------------------------------
@@ -128,12 +152,12 @@
     if (fromHandle) {
         if (enabled) {
             if (status != NO_ERROR) {
-                mCallback->checkSuspendOnEffectEnabled(this, false, false /*threadLocked*/);
+                getCallback()->checkSuspendOnEffectEnabled(this, false, false /*threadLocked*/);
             } else {
-                mCallback->onEffectEnable(this);
+                getCallback()->onEffectEnable(this);
             }
         } else {
-            mCallback->onEffectDisable(this);
+            getCallback()->onEffectDisable(this);
         }
     }
     return status;
@@ -214,7 +238,7 @@
     bool doEnable = false;
     bool enabled = false;
     audio_io_handle_t io = AUDIO_IO_HANDLE_NONE;
-    uint32_t strategy = PRODUCT_STRATEGY_NONE;
+    product_strategy_t strategy = PRODUCT_STRATEGY_NONE;
 
     {
         Mutex::Autolock _l(mLock);
@@ -223,8 +247,9 @@
             doRegister = true;
             mPolicyRegistered = mHandles.size() > 0;
             if (mPolicyRegistered) {
-                io = mCallback->io();
-                strategy = mCallback->strategy();
+                const auto callback = getCallback();
+                io = callback->io();
+                strategy = callback->strategy();
             }
         }
         // enable effect when registered according to enable state requested by controlling handle
@@ -237,6 +262,12 @@
         }
         registered = mPolicyRegistered;
         enabled = mPolicyEnabled;
+        // The simultaneous release of two EffectHandles with the same EffectModule
+        // may cause us to call this method at the same time.
+        // This may deadlock under some circumstances (b/180941720).  Avoid this.
+        if (!doRegister && !(registered && doEnable)) {
+            return NO_ERROR;
+        }
         mPolicyLock.lock();
     }
     ALOGV("%s name %s id %d session %d doRegister %d registered %d doEnable %d enabled %d",
@@ -292,6 +323,9 @@
         }
     }
 
+    // Prevent calls to process() and other functions on effect interface from now on.
+    // The effect engine will be released by the destructor when the last strong reference on
+    // this object is released which can happen after next process is called.
     if (mHandles.size() == 0 && !mPinned) {
         mState = DESTROYED;
     }
@@ -316,8 +350,9 @@
 // unsafe method called when the effect parent thread has been destroyed
 ssize_t AudioFlinger::EffectBase::disconnectHandle(EffectHandle *handle, bool unpinIfLast)
 {
+    const auto callback = getCallback();
     ALOGV("disconnect() %p handle %p", this, handle);
-    if (mCallback->disconnectEffectHandle(handle, unpinIfLast)) {
+    if (callback->disconnectEffectHandle(handle, unpinIfLast)) {
         return mHandles.size();
     }
 
@@ -325,7 +360,7 @@
     ssize_t numHandles = removeHandle_l(handle);
     if ((numHandles == 0) && (!mPinned || unpinIfLast)) {
         mLock.unlock();
-        mCallback->updateOrphanEffectChains(this);
+        callback->updateOrphanEffectChains(this);
         mLock.lock();
     }
     return numHandles;
@@ -344,7 +379,7 @@
 }
 
 void AudioFlinger::EffectBase::checkSuspendOnEffectEnabled(bool enabled, bool threadLocked) {
-    mCallback->checkSuspendOnEffectEnabled(this, enabled, threadLocked);
+    getCallback()->checkSuspendOnEffectEnabled(this, enabled, threadLocked);
 }
 
 static String8 effectFlagsToString(uint32_t flags) {
@@ -523,7 +558,8 @@
       mStatus(NO_INIT),
       mMaxDisableWaitCnt(1), // set by configure(), should be >= 1
       mDisableWaitCnt(0),    // set by process() and updateState()
-      mOffloaded(false)
+      mOffloaded(false),
+      mAddedToHal(false)
 #ifdef FLOAT_EFFECT_CHAIN
       , mSupportsFloat(false)
 #endif
@@ -565,20 +601,6 @@
 
 }
 
-ssize_t AudioFlinger::EffectModule::removeHandle_l(EffectHandle *handle)
-{
-    ssize_t status = EffectBase::removeHandle_l(handle);
-
-    // Prevent calls to process() and other functions on effect interface from now on.
-    // The effect engine will be released by the destructor when the last strong reference on
-    // this object is released which can happen after next process is called.
-    if (status == 0 && !mPinned) {
-        mEffectInterface->close();
-    }
-
-    return status;
-}
-
 bool AudioFlinger::EffectModule::updateState() {
     Mutex::Autolock _l(mLock);
 
@@ -816,7 +838,7 @@
                 mConfig.inputCfg.buffer.raw != mConfig.outputCfg.buffer.raw) {
         // If an insert effect is idle and input buffer is different from output buffer,
         // accumulate input onto output
-        if (mCallback->activeTrackCnt() != 0) {
+        if (getCallback()->activeTrackCnt() != 0) {
             // similar handling with data_bypass above.
             if (mConfig.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
                 accumulateInputToOutput();
@@ -841,6 +863,7 @@
     status_t status;
     uint32_t size;
     audio_channel_mask_t channelMask;
+    sp<EffectCallbackInterface> callback;
 
     if (mEffectInterface == 0) {
         status = NO_INIT;
@@ -851,7 +874,8 @@
     // TODO: handle configuration of input (record) SW effects above the HAL,
     // similar to output EFFECT_FLAG_TYPE_INSERT/REPLACE,
     // in which case input channel masks should be used here.
-    channelMask = mCallback->channelMask();
+    callback = getCallback();
+    channelMask = callback->channelMask();
     mConfig.inputCfg.channels = channelMask;
     mConfig.outputCfg.channels = channelMask;
 
@@ -879,6 +903,11 @@
         }
 #endif
     }
+    if (isHapticGenerator()) {
+        audio_channel_mask_t hapticChannelMask = callback->hapticChannelMask();
+        mConfig.inputCfg.channels |= hapticChannelMask;
+        mConfig.outputCfg.channels |= hapticChannelMask;
+    }
     mInChannelCountRequested =
             audio_channel_count_from_out_mask(mConfig.inputCfg.channels);
     mOutChannelCountRequested =
@@ -888,11 +917,11 @@
     mConfig.outputCfg.format = EFFECT_BUFFER_FORMAT;
 
     // Don't use sample rate for thread if effect isn't offloadable.
-    if (mCallback->isOffloadOrDirect() && !isOffloaded()) {
+    if (callback->isOffloadOrDirect() && !isOffloaded()) {
         mConfig.inputCfg.samplingRate = DEFAULT_OUTPUT_SAMPLE_RATE;
         ALOGV("Overriding effect input as 48kHz");
     } else {
-        mConfig.inputCfg.samplingRate = mCallback->sampleRate();
+        mConfig.inputCfg.samplingRate = callback->sampleRate();
     }
     mConfig.outputCfg.samplingRate = mConfig.inputCfg.samplingRate;
     mConfig.inputCfg.bufferProvider.cookie = NULL;
@@ -918,11 +947,11 @@
     }
     mConfig.inputCfg.mask = EFFECT_CONFIG_ALL;
     mConfig.outputCfg.mask = EFFECT_CONFIG_ALL;
-    mConfig.inputCfg.buffer.frameCount = mCallback->frameCount();
+    mConfig.inputCfg.buffer.frameCount = callback->frameCount();
     mConfig.outputCfg.buffer.frameCount = mConfig.inputCfg.buffer.frameCount;
 
     ALOGV("configure() %p chain %p buffer %p framecount %zu",
-          this, mCallback->chain().promote().get(),
+          this, callback->chain().promote().get(),
           mConfig.inputCfg.buffer.raw, mConfig.inputCfg.buffer.frameCount);
 
     status_t cmdStatus;
@@ -938,7 +967,7 @@
 
 #ifdef MULTICHANNEL_EFFECT_CHAIN
     if (status != NO_ERROR &&
-            mCallback->isOutput() &&
+            callback->isOutput() &&
             (mConfig.inputCfg.channels != AUDIO_CHANNEL_OUT_STEREO
                     || mConfig.outputCfg.channels != AUDIO_CHANNEL_OUT_STEREO)) {
         // Older effects may require exact STEREO position mask.
@@ -1005,7 +1034,7 @@
             size = sizeof(int);
             *(int32_t *)p->data = VISUALIZER_PARAM_LATENCY;
 
-            uint32_t latency = mCallback->latency();
+            uint32_t latency = callback->latency();
 
             *((int32_t *)p->data + 1)= latency;
             mEffectInterface->command(EFFECT_CMD_SET_PARAM,
@@ -1052,7 +1081,12 @@
 {
     if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC ||
          (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) {
-        (void)mCallback->addEffectToHal(mEffectInterface);
+        if (mAddedToHal) {
+            return;
+        }
+
+        (void)getCallback()->addEffectToHal(mEffectInterface);
+        mAddedToHal = true;
     }
 }
 
@@ -1065,7 +1099,7 @@
         status = start_l();
     }
     if (status == NO_ERROR) {
-        mCallback->resetVolume();
+        getCallback()->resetVolume();
     }
     return status;
 }
@@ -1115,7 +1149,7 @@
         // We have the EffectChain and EffectModule lock, permit a reentrant call to setVolume:
         // resetVolume_l --> setVolume_l --> EffectModule::setVolume
         mSetVolumeReentrantTid = gettid();
-        mCallback->resetVolume();
+        getCallback()->resetVolume();
         mSetVolumeReentrantTid = INVALID_PID;
     }
 
@@ -1148,7 +1182,12 @@
 {
     if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC ||
              (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) {
-        mCallback->removeEffectFromHal(mEffectInterface);
+        if (!mAddedToHal) {
+            return NO_ERROR;
+        }
+
+        getCallback()->removeEffectFromHal(mEffectInterface);
+        mAddedToHal = false;
     }
     return NO_ERROR;
 }
@@ -1160,11 +1199,10 @@
     return remainder == 0 ? 0 : divisor - remainder;
 }
 
-status_t AudioFlinger::EffectModule::command(uint32_t cmdCode,
-                                             uint32_t cmdSize,
-                                             void *pCmdData,
-                                             uint32_t *replySize,
-                                             void *pReplyData)
+status_t AudioFlinger::EffectModule::command(int32_t cmdCode,
+                     const std::vector<uint8_t>& cmdData,
+                     int32_t maxReplySize,
+                     std::vector<uint8_t>* reply)
 {
     Mutex::Autolock _l(mLock);
     ALOGVV("command(), cmdCode: %d, mEffectInterface: %p", cmdCode, mEffectInterface.get());
@@ -1175,63 +1213,68 @@
     if (mStatus != NO_ERROR) {
         return mStatus;
     }
+    if (maxReplySize < 0 || maxReplySize > EFFECT_PARAM_SIZE_MAX) {
+        return -EINVAL;
+    }
+    size_t cmdSize = cmdData.size();
+    const effect_param_t* param = cmdSize >= sizeof(effect_param_t)
+                                  ? reinterpret_cast<const effect_param_t*>(cmdData.data())
+                                  : nullptr;
     if (cmdCode == EFFECT_CMD_GET_PARAM &&
-            (sizeof(effect_param_t) > cmdSize ||
-                    ((effect_param_t *)pCmdData)->psize > cmdSize
-                                                          - sizeof(effect_param_t))) {
+            (param == nullptr || param->psize > cmdSize - sizeof(effect_param_t))) {
         android_errorWriteLog(0x534e4554, "32438594");
         android_errorWriteLog(0x534e4554, "33003822");
         return -EINVAL;
     }
     if (cmdCode == EFFECT_CMD_GET_PARAM &&
-            (*replySize < sizeof(effect_param_t) ||
-                    ((effect_param_t *)pCmdData)->psize > *replySize - sizeof(effect_param_t))) {
+            (maxReplySize < sizeof(effect_param_t) ||
+                   param->psize > maxReplySize - sizeof(effect_param_t))) {
         android_errorWriteLog(0x534e4554, "29251553");
         return -EINVAL;
     }
     if (cmdCode == EFFECT_CMD_GET_PARAM &&
-        (sizeof(effect_param_t) > *replySize
-          || ((effect_param_t *)pCmdData)->psize > *replySize
-                                                   - sizeof(effect_param_t)
-          || ((effect_param_t *)pCmdData)->vsize > *replySize
-                                                   - sizeof(effect_param_t)
-                                                   - ((effect_param_t *)pCmdData)->psize
-          || roundUpDelta(((effect_param_t *)pCmdData)->psize, (uint32_t)sizeof(int)) >
-                                                   *replySize
-                                                   - sizeof(effect_param_t)
-                                                   - ((effect_param_t *)pCmdData)->psize
-                                                   - ((effect_param_t *)pCmdData)->vsize)) {
+            (sizeof(effect_param_t) > maxReplySize
+                    || param->psize > maxReplySize - sizeof(effect_param_t)
+                    || param->vsize > maxReplySize - sizeof(effect_param_t)
+                            - param->psize
+                    || roundUpDelta(param->psize, (uint32_t) sizeof(int)) >
+                            maxReplySize
+                                    - sizeof(effect_param_t)
+                                    - param->psize
+                                    - param->vsize)) {
         ALOGV("\tLVM_ERROR : EFFECT_CMD_GET_PARAM: reply size inconsistent");
                      android_errorWriteLog(0x534e4554, "32705438");
         return -EINVAL;
     }
     if ((cmdCode == EFFECT_CMD_SET_PARAM
-            || cmdCode == EFFECT_CMD_SET_PARAM_DEFERRED) &&  // DEFERRED not generally used
-        (sizeof(effect_param_t) > cmdSize
-            || ((effect_param_t *)pCmdData)->psize > cmdSize
-                                                     - sizeof(effect_param_t)
-            || ((effect_param_t *)pCmdData)->vsize > cmdSize
-                                                     - sizeof(effect_param_t)
-                                                     - ((effect_param_t *)pCmdData)->psize
-            || roundUpDelta(((effect_param_t *)pCmdData)->psize, (uint32_t)sizeof(int)) >
-                                                     cmdSize
-                                                     - sizeof(effect_param_t)
-                                                     - ((effect_param_t *)pCmdData)->psize
-                                                     - ((effect_param_t *)pCmdData)->vsize)) {
+            || cmdCode == EFFECT_CMD_SET_PARAM_DEFERRED)
+            &&  // DEFERRED not generally used
+                    (param == nullptr
+                            || param->psize > cmdSize - sizeof(effect_param_t)
+                            || param->vsize > cmdSize - sizeof(effect_param_t)
+                                    - param->psize
+                            || roundUpDelta(param->psize,
+                                            (uint32_t) sizeof(int)) >
+                                    cmdSize
+                                            - sizeof(effect_param_t)
+                                            - param->psize
+                                            - param->vsize)) {
         android_errorWriteLog(0x534e4554, "30204301");
         return -EINVAL;
     }
+    uint32_t replySize = maxReplySize;
+    reply->resize(replySize);
     status_t status = mEffectInterface->command(cmdCode,
                                                 cmdSize,
-                                                pCmdData,
-                                                replySize,
-                                                pReplyData);
+                                                const_cast<uint8_t*>(cmdData.data()),
+                                                &replySize,
+                                                reply->data());
+    reply->resize(status == NO_ERROR ? replySize : 0);
     if (cmdCode != EFFECT_CMD_GET_PARAM && status == NO_ERROR) {
-        uint32_t size = (replySize == NULL) ? 0 : *replySize;
         for (size_t i = 1; i < mHandles.size(); i++) {
             EffectHandle *h = mHandles[i];
             if (h != NULL && !h->disconnected()) {
-                h->commandExecuted(cmdCode, cmdSize, pCmdData, size, pReplyData);
+                h->commandExecuted(cmdCode, cmdData, *reply);
             }
         }
     }
@@ -1260,7 +1303,7 @@
 
 bool AudioFlinger::EffectModule::isOffloadedOrDirect() const
 {
-    return mCallback->isOffloadOrDirect();
+    return getCallback()->isOffloadOrDirect();
 }
 
 bool AudioFlinger::EffectModule::isVolumeControlEnabled() const
@@ -1304,7 +1347,7 @@
                 || size > mInConversionBuffer->getSize())) {
             mInConversionBuffer.clear();
             ALOGV("%s: allocating mInConversionBuffer %zu", __func__, size);
-            (void)mCallback->allocateHalBuffer(size, &mInConversionBuffer);
+            (void)getCallback()->allocateHalBuffer(size, &mInConversionBuffer);
         }
         if (mInConversionBuffer != nullptr) {
             mInConversionBuffer->setFrameCount(inFrameCount);
@@ -1348,7 +1391,7 @@
                 || size > mOutConversionBuffer->getSize())) {
             mOutConversionBuffer.clear();
             ALOGV("%s: allocating mOutConversionBuffer %zu", __func__, size);
-            (void)mCallback->allocateHalBuffer(size, &mOutConversionBuffer);
+            (void)getCallback()->allocateHalBuffer(size, &mOutConversionBuffer);
         }
         if (mOutConversionBuffer != nullptr) {
             mOutConversionBuffer->setFrameCount(outFrameCount);
@@ -1522,6 +1565,69 @@
     return mOffloaded;
 }
 
+/*static*/
+bool AudioFlinger::EffectModule::isHapticGenerator(const effect_uuid_t *type) {
+    return memcmp(type, FX_IID_HAPTICGENERATOR, sizeof(effect_uuid_t)) == 0;
+}
+
+bool AudioFlinger::EffectModule::isHapticGenerator() const {
+    return isHapticGenerator(&mDescriptor.type);
+}
+
+status_t AudioFlinger::EffectModule::setHapticIntensity(int id, int intensity)
+{
+    if (mStatus != NO_ERROR) {
+        return mStatus;
+    }
+    if (!isHapticGenerator()) {
+        ALOGW("Should not set haptic intensity for effects that are not HapticGenerator");
+        return INVALID_OPERATION;
+    }
+
+    std::vector<uint8_t> request(sizeof(effect_param_t) + 3 * sizeof(uint32_t));
+    effect_param_t *param = (effect_param_t*) request.data();
+    param->psize = sizeof(int32_t);
+    param->vsize = sizeof(int32_t) * 2;
+    *(int32_t*)param->data = HG_PARAM_HAPTIC_INTENSITY;
+    *((int32_t*)param->data + 1) = id;
+    *((int32_t*)param->data + 2) = intensity;
+    std::vector<uint8_t> response;
+    status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
+    if (status == NO_ERROR) {
+        LOG_ALWAYS_FATAL_IF(response.size() != 4);
+        status = *reinterpret_cast<const status_t*>(response.data());
+    }
+    return status;
+}
+
+status_t AudioFlinger::EffectModule::setVibratorInfo(const media::AudioVibratorInfo* vibratorInfo)
+{
+    if (mStatus != NO_ERROR) {
+        return mStatus;
+    }
+    if (!isHapticGenerator()) {
+        ALOGW("Should not set vibrator info for effects that are not HapticGenerator");
+        return INVALID_OPERATION;
+    }
+
+    std::vector<uint8_t> request(
+            sizeof(effect_param_t) + sizeof(int32_t) + 2 * sizeof(float));
+    effect_param_t *param = (effect_param_t*) request.data();
+    param->psize = sizeof(int32_t);
+    param->vsize = 2 * sizeof(float);
+    *(int32_t*)param->data = HG_PARAM_VIBRATOR_INFO;
+    float* vibratorInfoPtr = reinterpret_cast<float*>(param->data + sizeof(int32_t));
+    vibratorInfoPtr[0] = vibratorInfo->resonantFrequency;
+    vibratorInfoPtr[1] = vibratorInfo->qFactor;
+    std::vector<uint8_t> response;
+    status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
+    if (status == NO_ERROR) {
+        LOG_ALWAYS_FATAL_IF(response.size() != sizeof(status_t));
+        status = *reinterpret_cast<const status_t*>(response.data());
+    }
+    return status;
+}
+
 static std::string dumpInOutBuffer(bool isInput, const sp<EffectBufferHalInterface> &buffer) {
     std::stringstream ss;
 
@@ -1600,9 +1706,9 @@
 #define LOG_TAG "AudioFlinger::EffectHandle"
 
 AudioFlinger::EffectHandle::EffectHandle(const sp<EffectBase>& effect,
-                                        const sp<AudioFlinger::Client>& client,
-                                        const sp<IEffectClient>& effectClient,
-                                        int32_t priority)
+                                         const sp<AudioFlinger::Client>& client,
+                                         const sp<media::IEffectClient>& effectClient,
+                                         int32_t priority)
     : BnEffect(),
     mEffect(effect), mEffectClient(effectClient), mClient(client), mCblk(NULL),
     mPriority(priority), mHasControl(false), mEnabled(false), mDisconnected(false)
@@ -1636,20 +1742,24 @@
     return mClient == 0 || mCblkMemory != 0 ? OK : NO_MEMORY;
 }
 
-status_t AudioFlinger::EffectHandle::enable()
+#define RETURN(code) \
+  *_aidl_return = (code); \
+  return Status::ok();
+
+Status AudioFlinger::EffectHandle::enable(int32_t* _aidl_return)
 {
     AutoMutex _l(mLock);
     ALOGV("enable %p", this);
     sp<EffectBase> effect = mEffect.promote();
     if (effect == 0 || mDisconnected) {
-        return DEAD_OBJECT;
+        RETURN(DEAD_OBJECT);
     }
     if (!mHasControl) {
-        return INVALID_OPERATION;
+        RETURN(INVALID_OPERATION);
     }
 
     if (mEnabled) {
-        return NO_ERROR;
+        RETURN(NO_ERROR);
     }
 
     mEnabled = true;
@@ -1657,54 +1767,55 @@
     status_t status = effect->updatePolicyState();
     if (status != NO_ERROR) {
         mEnabled = false;
-        return status;
+        RETURN(status);
     }
 
     effect->checkSuspendOnEffectEnabled(true, false /*threadLocked*/);
 
     // checkSuspendOnEffectEnabled() can suspend this same effect when enabled
     if (effect->suspended()) {
-        return NO_ERROR;
+        RETURN(NO_ERROR);
     }
 
     status = effect->setEnabled(true, true /*fromHandle*/);
     if (status != NO_ERROR) {
         mEnabled = false;
     }
-    return status;
+    RETURN(status);
 }
 
-status_t AudioFlinger::EffectHandle::disable()
+Status AudioFlinger::EffectHandle::disable(int32_t* _aidl_return)
 {
     ALOGV("disable %p", this);
     AutoMutex _l(mLock);
     sp<EffectBase> effect = mEffect.promote();
     if (effect == 0 || mDisconnected) {
-        return DEAD_OBJECT;
+        RETURN(DEAD_OBJECT);
     }
     if (!mHasControl) {
-        return INVALID_OPERATION;
+        RETURN(INVALID_OPERATION);
     }
 
     if (!mEnabled) {
-        return NO_ERROR;
+        RETURN(NO_ERROR);
     }
     mEnabled = false;
 
     effect->updatePolicyState();
 
     if (effect->suspended()) {
-        return NO_ERROR;
+        RETURN(NO_ERROR);
     }
 
     status_t status = effect->setEnabled(false, true /*fromHandle*/);
-    return status;
+    RETURN(status);
 }
 
-void AudioFlinger::EffectHandle::disconnect()
+Status AudioFlinger::EffectHandle::disconnect()
 {
     ALOGV("%s %p", __FUNCTION__, this);
     disconnect(true);
+    return Status::ok();
 }
 
 void AudioFlinger::EffectHandle::disconnect(bool unpinIfLast)
@@ -1741,11 +1852,16 @@
     }
 }
 
-status_t AudioFlinger::EffectHandle::command(uint32_t cmdCode,
-                                             uint32_t cmdSize,
-                                             void *pCmdData,
-                                             uint32_t *replySize,
-                                             void *pReplyData)
+Status AudioFlinger::EffectHandle::getCblk(media::SharedFileRegion* _aidl_return) {
+    LOG_ALWAYS_FATAL_IF(!convertIMemoryToSharedFileRegion(mCblkMemory, _aidl_return));
+    return Status::ok();
+}
+
+Status AudioFlinger::EffectHandle::command(int32_t cmdCode,
+                       const std::vector<uint8_t>& cmdData,
+                       int32_t maxResponseSize,
+                       std::vector<uint8_t>* response,
+                       int32_t* _aidl_return)
 {
     ALOGVV("command(), cmdCode: %d, mHasControl: %d, mEffect: %p",
             cmdCode, mHasControl, mEffect.unsafe_get());
@@ -1765,49 +1881,46 @@
                 break;
             }
             android_errorWriteLog(0x534e4554, "62019992");
-            return BAD_VALUE;
+            RETURN(BAD_VALUE);
     }
 
     if (cmdCode == EFFECT_CMD_ENABLE) {
-        if (*replySize < sizeof(int)) {
+        if (maxResponseSize < sizeof(int)) {
             android_errorWriteLog(0x534e4554, "32095713");
-            return BAD_VALUE;
+            RETURN(BAD_VALUE);
         }
-        *(int *)pReplyData = NO_ERROR;
-        *replySize = sizeof(int);
-        return enable();
+        writeToBuffer(NO_ERROR, response);
+        return enable(_aidl_return);
     } else if (cmdCode == EFFECT_CMD_DISABLE) {
-        if (*replySize < sizeof(int)) {
+        if (maxResponseSize < sizeof(int)) {
             android_errorWriteLog(0x534e4554, "32095713");
-            return BAD_VALUE;
+            RETURN(BAD_VALUE);
         }
-        *(int *)pReplyData = NO_ERROR;
-        *replySize = sizeof(int);
-        return disable();
+        writeToBuffer(NO_ERROR, response);
+        return disable(_aidl_return);
     }
 
     AutoMutex _l(mLock);
     sp<EffectBase> effect = mEffect.promote();
     if (effect == 0 || mDisconnected) {
-        return DEAD_OBJECT;
+        RETURN(DEAD_OBJECT);
     }
     // only get parameter command is permitted for applications not controlling the effect
     if (!mHasControl && cmdCode != EFFECT_CMD_GET_PARAM) {
-        return INVALID_OPERATION;
+        RETURN(INVALID_OPERATION);
     }
 
     // handle commands that are not forwarded transparently to effect engine
     if (cmdCode == EFFECT_CMD_SET_PARAM_COMMIT) {
         if (mClient == 0) {
-            return INVALID_OPERATION;
+            RETURN(INVALID_OPERATION);
         }
 
-        if (*replySize < sizeof(int)) {
+        if (maxResponseSize < sizeof(int)) {
             android_errorWriteLog(0x534e4554, "32095713");
-            return BAD_VALUE;
+            RETURN(BAD_VALUE);
         }
-        *(int *)pReplyData = NO_ERROR;
-        *replySize = sizeof(int);
+        writeToBuffer(NO_ERROR, response);
 
         // No need to trylock() here as this function is executed in the binder thread serving a
         // particular client process:  no risk to block the whole media server process or mixer
@@ -1820,10 +1933,10 @@
             serverIndex > EFFECT_PARAM_BUFFER_SIZE) {
             mCblk->serverIndex = 0;
             mCblk->clientIndex = 0;
-            return BAD_VALUE;
+            RETURN(BAD_VALUE);
         }
         status_t status = NO_ERROR;
-        effect_param_t *param = NULL;
+        std::vector<uint8_t> param;
         for (uint32_t index = serverIndex; index < clientIndex;) {
             int *p = (int *)(mBuffer + index);
             const int size = *p++;
@@ -1835,23 +1948,16 @@
                 break;
             }
 
-            // copy to local memory in case of client corruption b/32220769
-            auto *newParam = (effect_param_t *)realloc(param, size);
-            if (newParam == NULL) {
-                ALOGW("command(): out of memory");
-                status = NO_MEMORY;
-                break;
-            }
-            param = newParam;
-            memcpy(param, p, size);
+            std::copy(reinterpret_cast<const uint8_t*>(p),
+                      reinterpret_cast<const uint8_t*>(p) + size,
+                      std::back_inserter(param));
 
-            int reply = 0;
-            uint32_t rsize = sizeof(reply);
+            std::vector<uint8_t> replyBuffer;
             status_t ret = effect->command(EFFECT_CMD_SET_PARAM,
-                                            size,
                                             param,
-                                            &rsize,
-                                            &reply);
+                                            sizeof(int),
+                                            &replyBuffer);
+            int reply = *reinterpret_cast<const int*>(replyBuffer.data());
 
             // verify shared memory: server index shouldn't change; client index can't go back.
             if (serverIndex != mCblk->serverIndex
@@ -1864,21 +1970,24 @@
             // stop at first error encountered
             if (ret != NO_ERROR) {
                 status = ret;
-                *(int *)pReplyData = reply;
+                writeToBuffer(reply, response);
                 break;
             } else if (reply != NO_ERROR) {
-                *(int *)pReplyData = reply;
+                writeToBuffer(reply, response);
                 break;
             }
             index += size;
         }
-        free(param);
         mCblk->serverIndex = 0;
         mCblk->clientIndex = 0;
-        return status;
+        RETURN(status);
     }
 
-    return effect->command(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
+    status_t status = effect->command(cmdCode,
+                                      cmdData,
+                                      maxResponseSize,
+                                      response);
+    RETURN(status);
 }
 
 void AudioFlinger::EffectHandle::setControl(bool hasControl, bool signal, bool enabled)
@@ -1894,13 +2003,11 @@
 }
 
 void AudioFlinger::EffectHandle::commandExecuted(uint32_t cmdCode,
-                                                 uint32_t cmdSize,
-                                                 void *pCmdData,
-                                                 uint32_t replySize,
-                                                 void *pReplyData)
+                         const std::vector<uint8_t>& cmdData,
+                         const std::vector<uint8_t>& replyData)
 {
     if (mEffectClient != 0) {
-        mEffectClient->commandExecuted(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
+        mEffectClient->commandExecuted(cmdCode, cmdData, replyData);
     }
 }
 
@@ -1913,13 +2020,6 @@
     }
 }
 
-status_t AudioFlinger::EffectHandle::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    return BnEffect::onTransact(code, data, reply, flags);
-}
-
-
 void AudioFlinger::EffectHandle::dumpToBuffer(char* buffer, size_t size)
 {
     bool locked = mCblk != NULL && AudioFlinger::dumpTryLock(mCblk->lock);
@@ -2385,6 +2485,25 @@
     }
 }
 
+// containsHapticGeneratingEffect_l must be called with ThreadBase::mLock or EffectChain::mLock held
+bool AudioFlinger::EffectChain::containsHapticGeneratingEffect_l()
+{
+    for (size_t i = 0; i < mEffects.size(); ++i) {
+        if (mEffects[i]->isHapticGenerator()) {
+            return true;
+        }
+    }
+    return false;
+}
+
+void AudioFlinger::EffectChain::setHapticIntensity_l(int id, int intensity)
+{
+    Mutex::Autolock _l(mLock);
+    for (size_t i = 0; i < mEffects.size(); ++i) {
+        mEffects[i]->setHapticIntensity(id, intensity);
+    }
+}
+
 void AudioFlinger::EffectChain::syncHalEffectsState()
 {
     Mutex::Autolock _l(mLock);
@@ -2715,11 +2834,7 @@
         const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t deviceId,
         sp<EffectHalInterface> *effect) {
     status_t status = NO_INIT;
-    sp<AudioFlinger> af = mAudioFlinger.promote();
-    if (af == nullptr) {
-        return status;
-    }
-    sp<EffectsFactoryHalInterface> effectsFactory = af->getEffectsFactory();
+    sp<EffectsFactoryHalInterface> effectsFactory = mAudioFlinger.getEffectsFactory();
     if (effectsFactory != 0) {
         status = effectsFactory->createEffect(pEffectUuid, sessionId, io(), deviceId, effect);
     }
@@ -2728,25 +2843,19 @@
 
 bool AudioFlinger::EffectChain::EffectCallback::updateOrphanEffectChains(
         const sp<AudioFlinger::EffectBase>& effect) {
-    sp<AudioFlinger> af = mAudioFlinger.promote();
-    if (af == nullptr) {
-        return false;
-    }
     // in EffectChain context, an EffectBase is always from an EffectModule so static cast is safe
-    return af->updateOrphanEffectChains(effect->asEffectModule());
+    return mAudioFlinger.updateOrphanEffectChains(effect->asEffectModule());
 }
 
 status_t AudioFlinger::EffectChain::EffectCallback::allocateHalBuffer(
         size_t size, sp<EffectBufferHalInterface>* buffer) {
-    sp<AudioFlinger> af = mAudioFlinger.promote();
-    LOG_ALWAYS_FATAL_IF(af == nullptr, "allocateHalBuffer() could not retrieved audio flinger");
-    return af->mEffectsFactoryHal->allocateBuffer(size, buffer);
+    return mAudioFlinger.mEffectsFactoryHal->allocateBuffer(size, buffer);
 }
 
 status_t AudioFlinger::EffectChain::EffectCallback::addEffectToHal(
         sp<EffectHalInterface> effect) {
     status_t result = NO_INIT;
-    sp<ThreadBase> t = mThread.promote();
+    sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return result;
     }
@@ -2762,7 +2871,7 @@
 status_t AudioFlinger::EffectChain::EffectCallback::removeEffectFromHal(
         sp<EffectHalInterface> effect) {
     status_t result = NO_INIT;
-    sp<ThreadBase> t = mThread.promote();
+    sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return result;
     }
@@ -2776,7 +2885,7 @@
 }
 
 audio_io_handle_t AudioFlinger::EffectChain::EffectCallback::io() const {
-    sp<ThreadBase> t = mThread.promote();
+    sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return AUDIO_IO_HANDLE_NONE;
     }
@@ -2784,7 +2893,7 @@
 }
 
 bool AudioFlinger::EffectChain::EffectCallback::isOutput() const {
-    sp<ThreadBase> t = mThread.promote();
+    sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return true;
     }
@@ -2792,7 +2901,7 @@
 }
 
 bool AudioFlinger::EffectChain::EffectCallback::isOffload() const {
-    sp<ThreadBase> t = mThread.promote();
+    sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return false;
     }
@@ -2800,7 +2909,7 @@
 }
 
 bool AudioFlinger::EffectChain::EffectCallback::isOffloadOrDirect() const {
-    sp<ThreadBase> t = mThread.promote();
+    sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return false;
     }
@@ -2808,7 +2917,7 @@
 }
 
 bool AudioFlinger::EffectChain::EffectCallback::isOffloadOrMmap() const {
-    sp<ThreadBase> t = mThread.promote();
+    sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return false;
     }
@@ -2816,7 +2925,7 @@
 }
 
 uint32_t AudioFlinger::EffectChain::EffectCallback::sampleRate() const {
-    sp<ThreadBase> t = mThread.promote();
+    sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return 0;
     }
@@ -2824,7 +2933,7 @@
 }
 
 audio_channel_mask_t AudioFlinger::EffectChain::EffectCallback::channelMask() const {
-    sp<ThreadBase> t = mThread.promote();
+    sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return AUDIO_CHANNEL_NONE;
     }
@@ -2832,15 +2941,23 @@
 }
 
 uint32_t AudioFlinger::EffectChain::EffectCallback::channelCount() const {
-    sp<ThreadBase> t = mThread.promote();
+    sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return 0;
     }
     return t->channelCount();
 }
 
+audio_channel_mask_t AudioFlinger::EffectChain::EffectCallback::hapticChannelMask() const {
+    sp<ThreadBase> t = thread().promote();
+    if (t == nullptr) {
+        return AUDIO_CHANNEL_NONE;
+    }
+    return t->hapticChannelMask();
+}
+
 size_t AudioFlinger::EffectChain::EffectCallback::frameCount() const {
-    sp<ThreadBase> t = mThread.promote();
+    sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return 0;
     }
@@ -2848,7 +2965,7 @@
 }
 
 uint32_t AudioFlinger::EffectChain::EffectCallback::latency() const {
-    sp<ThreadBase> t = mThread.promote();
+    sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return 0;
     }
@@ -2856,7 +2973,7 @@
 }
 
 void AudioFlinger::EffectChain::EffectCallback::setVolumeForOutput(float left, float right) const {
-    sp<ThreadBase> t = mThread.promote();
+    sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return;
     }
@@ -2865,13 +2982,13 @@
 
 void AudioFlinger::EffectChain::EffectCallback::checkSuspendOnEffectEnabled(
         const sp<EffectBase>& effect, bool enabled, bool threadLocked) {
-    sp<ThreadBase> t = mThread.promote();
+    sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return;
     }
     t->checkSuspendOnEffectEnabled(enabled, effect->sessionId(), threadLocked);
 
-    sp<EffectChain> c = mChain.promote();
+    sp<EffectChain> c = chain().promote();
     if (c == nullptr) {
         return;
     }
@@ -2880,7 +2997,7 @@
 }
 
 void AudioFlinger::EffectChain::EffectCallback::onEffectEnable(const sp<EffectBase>& effect) {
-    sp<ThreadBase> t = mThread.promote();
+    sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return;
     }
@@ -2891,7 +3008,7 @@
 void AudioFlinger::EffectChain::EffectCallback::onEffectDisable(const sp<EffectBase>& effect) {
     checkSuspendOnEffectEnabled(effect, false, false /*threadLocked*/);
 
-    sp<ThreadBase> t = mThread.promote();
+    sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return;
     }
@@ -2900,7 +3017,7 @@
 
 bool AudioFlinger::EffectChain::EffectCallback::disconnectEffectHandle(EffectHandle *handle,
                                                       bool unpinIfLast) {
-    sp<ThreadBase> t = mThread.promote();
+    sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return false;
     }
@@ -2909,7 +3026,7 @@
 }
 
 void AudioFlinger::EffectChain::EffectCallback::resetVolume() {
-    sp<EffectChain> c = mChain.promote();
+    sp<EffectChain> c = chain().promote();
     if (c == nullptr) {
         return;
     }
@@ -2917,8 +3034,8 @@
 
 }
 
-uint32_t AudioFlinger::EffectChain::EffectCallback::strategy() const {
-    sp<EffectChain> c = mChain.promote();
+product_strategy_t AudioFlinger::EffectChain::EffectCallback::strategy() const {
+    sp<EffectChain> c = chain().promote();
     if (c == nullptr) {
         return PRODUCT_STRATEGY_NONE;
     }
@@ -2926,7 +3043,7 @@
 }
 
 int32_t AudioFlinger::EffectChain::EffectCallback::activeTrackCnt() const {
-    sp<EffectChain> c = mChain.promote();
+    sp<EffectChain> c = chain().promote();
     if (c == nullptr) {
         return 0;
     }
@@ -2943,10 +3060,14 @@
     Mutex::Autolock _l(mProxyLock);
     if (status == NO_ERROR) {
         for (auto& handle : mEffectHandles) {
+            Status bs;
             if (enabled) {
-                status = handle.second->enable();
+                bs = handle.second->enable(&status);
             } else {
-                status = handle.second->disable();
+                bs = handle.second->disable(&status);
+            }
+            if (!bs.isOk()) {
+              status = statusTFromBinderStatus(bs);
             }
         }
     }
@@ -3005,7 +3126,7 @@
             __func__, port->type, port->ext.device.type,
             port->ext.device.address, port->id, patch.isSoftware());
     if (port->type != AUDIO_PORT_TYPE_DEVICE || port->ext.device.type != mDevice.mType
-        || port->ext.device.address != mDevice.mAddress) {
+        || port->ext.device.address != mDevice.address()) {
         return NAME_NOT_FOUND;
     }
     status_t status = NAME_NOT_FOUND;
@@ -3054,10 +3175,14 @@
         status = BAD_VALUE;
     }
     if (status == NO_ERROR || status == ALREADY_EXISTS) {
+        Status bs;
         if (isEnabled()) {
-            (*handle)->enable();
+            bs = (*handle)->enable(&status);
         } else {
-            (*handle)->disable();
+            bs = (*handle)->disable(&status);
+        }
+        if (!bs.isOk()) {
+            status = statusTFromBinderStatus(bs);
         }
     }
     return status;
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 2826297..a727e04 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -36,6 +36,7 @@
     virtual uint32_t sampleRate() const = 0;
     virtual audio_channel_mask_t channelMask() const = 0;
     virtual uint32_t channelCount() const = 0;
+    virtual audio_channel_mask_t hapticChannelMask() const = 0;
     virtual size_t frameCount() const = 0;
 
     // Non trivial methods usually implemented with help from ThreadBase:
@@ -58,7 +59,7 @@
     virtual bool updateOrphanEffectChains(const sp<EffectBase>& effect) = 0;
 
     // Methods usually implemented with help from EffectChain: pay attention to mutex locking order
-    virtual uint32_t strategy() const = 0;
+    virtual product_strategy_t strategy() const = 0;
     virtual int32_t activeTrackCnt() const = 0;
     virtual void resetVolume() = 0;
 
@@ -132,19 +133,19 @@
     void             setSuspended(bool suspended);
     bool             suspended() const;
 
-    virtual status_t command(uint32_t cmdCode __unused,
-                 uint32_t cmdSize __unused,
-                 void *pCmdData __unused,
-                 uint32_t *replySize __unused,
-                 void *pReplyData __unused) { return NO_ERROR; };
+    virtual status_t command(int32_t __unused,
+                             const std::vector<uint8_t>& __unused,
+                             int32_t __unused,
+                             std::vector<uint8_t>* __unused) { return NO_ERROR; };
 
+    // mCallback is atomic so this can be lock-free.
     void setCallback(const sp<EffectCallbackInterface>& callback) { mCallback = callback; }
-    sp<EffectCallbackInterface>&     callback() { return mCallback; }
+    sp<EffectCallbackInterface> getCallback() const { return mCallback.load(); }
 
     status_t addHandle(EffectHandle *handle);
     ssize_t disconnectHandle(EffectHandle *handle, bool unpinIfLast);
     ssize_t removeHandle(EffectHandle *handle);
-    virtual ssize_t removeHandle_l(EffectHandle *handle);
+    ssize_t removeHandle_l(EffectHandle *handle);
     EffectHandle* controlHandle_l();
     bool purgeHandles();
 
@@ -170,7 +171,7 @@
     DISALLOW_COPY_AND_ASSIGN(EffectBase);
 
 mutable Mutex                 mLock;      // mutex for process, commands and handles list protection
-    sp<EffectCallbackInterface> mCallback; // parent effect chain
+    mediautils::atomic_sp<EffectCallbackInterface> mCallback; // parent effect chain
     const int                 mId;        // this instance unique ID
     const audio_session_t     mSessionId; // audio session ID
     const effect_descriptor_t mDescriptor;// effect descriptor received from effect engine
@@ -213,11 +214,10 @@
 
     void process();
     bool updateState();
-    status_t command(uint32_t cmdCode,
-                     uint32_t cmdSize,
-                     void *pCmdData,
-                     uint32_t *replySize,
-                     void *pReplyData) override;
+    status_t command(int32_t cmdCode,
+                     const std::vector<uint8_t>& cmdData,
+                     int32_t maxReplySize,
+                     std::vector<uint8_t>* reply) override;
 
     void reset_l();
     status_t configure();
@@ -240,8 +240,6 @@
         return mOutBuffer != 0 ? reinterpret_cast<int16_t*>(mOutBuffer->ptr()) : NULL;
     }
 
-    ssize_t removeHandle_l(EffectHandle *handle) override;
-
     status_t         setDevices(const AudioDeviceTypeAddrVector &devices);
     status_t         setInputDevice(const AudioDeviceTypeAddr &device);
     status_t         setVolume(uint32_t *left, uint32_t *right, bool controller);
@@ -257,6 +255,12 @@
 
     sp<EffectModule> asEffectModule() override { return this; }
 
+    static bool      isHapticGenerator(const effect_uuid_t* type);
+    bool             isHapticGenerator() const;
+
+    status_t         setHapticIntensity(int id, int intensity);
+    status_t         setVibratorInfo(const media::AudioVibratorInfo* vibratorInfo);
+
     void             dump(int fd, const Vector<String16>& args);
 
 private:
@@ -282,6 +286,7 @@
                                     // sending disable command.
     uint32_t mDisableWaitCnt;       // current process() calls count during disable period.
     bool     mOffloaded;            // effect is currently offloaded to the audio DSP
+    bool     mAddedToHal;           // effect has been added to the audio HAL
 
 #ifdef FLOAT_EFFECT_CHAIN
     bool    mSupportsFloat;         // effect supports float processing
@@ -316,32 +321,29 @@
 // There is one EffectHandle object for each application controlling (or using)
 // an effect module.
 // The EffectHandle is obtained by calling AudioFlinger::createEffect().
-class EffectHandle: public android::BnEffect {
+class EffectHandle: public android::media::BnEffect {
 public:
 
     EffectHandle(const sp<EffectBase>& effect,
             const sp<AudioFlinger::Client>& client,
-            const sp<IEffectClient>& effectClient,
+            const sp<media::IEffectClient>& effectClient,
             int32_t priority);
     virtual ~EffectHandle();
     virtual status_t initCheck();
 
     // IEffect
-    virtual status_t enable();
-    virtual status_t disable();
-    virtual status_t command(uint32_t cmdCode,
-                             uint32_t cmdSize,
-                             void *pCmdData,
-                             uint32_t *replySize,
-                             void *pReplyData);
-    virtual void disconnect();
-private:
-            void disconnect(bool unpinIfLast);
-public:
-    virtual sp<IMemory> getCblk() const { return mCblkMemory; }
-    virtual status_t onTransact(uint32_t code, const Parcel& data,
-            Parcel* reply, uint32_t flags);
+    android::binder::Status enable(int32_t* _aidl_return) override;
+    android::binder::Status disable(int32_t* _aidl_return) override;
+    android::binder::Status command(int32_t cmdCode,
+                                    const std::vector<uint8_t>& cmdData,
+                                    int32_t maxResponseSize,
+                                    std::vector<uint8_t>* response,
+                                    int32_t* _aidl_return) override;
+    android::binder::Status disconnect() override;
+    android::binder::Status getCblk(media::SharedFileRegion* _aidl_return) override;
 
+private:
+    void disconnect(bool unpinIfLast);
 
     // Give or take control of effect module
     // - hasControl: true if control is given, false if removed
@@ -349,10 +351,8 @@
     // - enabled: state of the effect when control is passed
     void setControl(bool hasControl, bool signal, bool enabled);
     void commandExecuted(uint32_t cmdCode,
-                         uint32_t cmdSize,
-                         void *pCmdData,
-                         uint32_t replySize,
-                         void *pReplyData);
+                         const std::vector<uint8_t>& cmdData,
+                         const std::vector<uint8_t>& replyData);
     void setEnabled(bool enabled);
     bool enabled() const { return mEnabled; }
 
@@ -375,19 +375,20 @@
     friend class AudioFlinger;          // for mEffect, mHasControl, mEnabled
     DISALLOW_COPY_AND_ASSIGN(EffectHandle);
 
-    Mutex mLock;                        // protects IEffect method calls
-    wp<EffectBase> mEffect;           // pointer to controlled EffectModule
-    sp<IEffectClient> mEffectClient;    // callback interface for client notifications
-    /*const*/ sp<Client> mClient;       // client for shared memory allocation, see disconnect()
-    sp<IMemory>         mCblkMemory;    // shared memory for control block
-    effect_param_cblk_t* mCblk;         // control block for deferred parameter setting via
-                                        // shared memory
-    uint8_t*            mBuffer;        // pointer to parameter area in shared memory
-    int mPriority;                      // client application priority to control the effect
-    bool mHasControl;                   // true if this handle is controlling the effect
-    bool mEnabled;                      // cached enable state: needed when the effect is
-                                        // restored after being suspended
-    bool mDisconnected;                 // Set to true by disconnect()
+    Mutex mLock;                             // protects IEffect method calls
+    const wp<EffectBase> mEffect;            // pointer to controlled EffectModule
+    const sp<media::IEffectClient> mEffectClient;  // callback interface for client notifications
+    /*const*/ sp<Client> mClient;            // client for shared memory allocation, see
+                                             //   disconnect()
+    sp<IMemory> mCblkMemory;                 // shared memory for control block
+    effect_param_cblk_t* mCblk;              // control block for deferred parameter setting via
+                                             // shared memory
+    uint8_t* mBuffer;                        // pointer to parameter area in shared memory
+    int mPriority;                           // client application priority to control the effect
+    bool mHasControl;                        // true if this handle is controlling the effect
+    bool mEnabled;                           // cached enable state: needed when the effect is
+                                             // restored after being suspended
+    bool mDisconnected;                      // Set to true by disconnect()
 };
 
 // the EffectChain class represents a group of effects associated to one audio session.
@@ -467,8 +468,8 @@
     void decActiveTrackCnt() { android_atomic_dec(&mActiveTrackCnt); }
     int32_t activeTrackCnt() const { return android_atomic_acquire_load(&mActiveTrackCnt); }
 
-    uint32_t strategy() const { return mStrategy; }
-    void setStrategy(uint32_t strategy)
+    product_strategy_t strategy() const { return mStrategy; }
+    void setStrategy(product_strategy_t strategy)
             { mStrategy = strategy; }
 
     // suspend or restore effects of the specified type. The number of suspend requests is counted
@@ -503,6 +504,10 @@
     // isCompatibleWithThread_l() must be called with thread->mLock held
     bool isCompatibleWithThread_l(const sp<ThreadBase>& thread) const;
 
+    bool containsHapticGeneratingEffect_l();
+
+    void setHapticIntensity_l(int id, int intensity);
+
     sp<EffectCallbackInterface> effectCallback() const { return mEffectCallback; }
     wp<ThreadBase> thread() const { return mEffectCallback->thread(); }
 
@@ -510,14 +515,21 @@
 
 private:
 
+    // For transaction consistency, please consider holding the EffectChain lock before
+    // calling the EffectChain::EffectCallback methods, excepting
+    // createEffectHal and allocateHalBuffer.
+    //
+    // This prevents migration of the EffectChain to another PlaybackThread
+    // for the purposes of the EffectCallback.
     class EffectCallback :  public EffectCallbackInterface {
     public:
         // Note: ctors taking a weak pointer to their owner must not promote it
         // during construction (but may keep a reference for later promotion).
         EffectCallback(const wp<EffectChain>& owner,
                        const wp<ThreadBase>& thread)
-            : mChain(owner) {
-            setThread(thread);
+            : mChain(owner)
+            , mThread(thread)
+            , mAudioFlinger(*gAudioFlinger) {
         }
 
         status_t createEffectHal(const effect_uuid_t *pEffectUuid,
@@ -534,6 +546,7 @@
         uint32_t sampleRate() const override;
         audio_channel_mask_t channelMask() const override;
         uint32_t channelCount() const override;
+        audio_channel_mask_t hapticChannelMask() const override;
         size_t frameCount() const override;
         uint32_t latency() const override;
 
@@ -546,25 +559,23 @@
         void checkSuspendOnEffectEnabled(const sp<EffectBase>& effect,
                               bool enabled, bool threadLocked) override;
         void resetVolume() override;
-        uint32_t strategy() const override;
+        product_strategy_t strategy() const override;
         int32_t activeTrackCnt() const override;
         void onEffectEnable(const sp<EffectBase>& effect) override;
         void onEffectDisable(const sp<EffectBase>& effect) override;
 
         wp<EffectChain> chain() const override { return mChain; }
 
-        wp<ThreadBase> thread() { return mThread; }
+        wp<ThreadBase> thread() const { return mThread.load(); }
 
         void setThread(const wp<ThreadBase>& thread) {
             mThread = thread;
-            sp<ThreadBase> p = thread.promote();
-            mAudioFlinger = p ? p->mAudioFlinger : nullptr;
         }
 
     private:
-        wp<EffectChain> mChain;
-        wp<ThreadBase> mThread;
-        wp<AudioFlinger> mAudioFlinger;
+        const wp<EffectChain> mChain;
+        mediautils::atomic_wp<ThreadBase> mThread;
+        AudioFlinger &mAudioFlinger;  // implementation detail: outer instance always exists.
     };
 
     friend class AudioFlinger;  // for mThread, mEffects
@@ -618,7 +629,7 @@
              uint32_t mRightVolume;      // previous volume on right channel
              uint32_t mNewLeftVolume;       // new volume on left channel
              uint32_t mNewRightVolume;      // new volume on right channel
-             uint32_t mStrategy; // strategy for this effect chain
+             product_strategy_t mStrategy; // strategy for this effect chain
              // mSuspendedEffects lists all effects currently suspended in the chain.
              // Use effect type UUID timelow field as key. There is no real risk of identical
              // timeLow fields among effect type UUIDs.
@@ -685,6 +696,7 @@
         uint32_t sampleRate() const override;
         audio_channel_mask_t channelMask() const override;
         uint32_t channelCount() const override;
+        audio_channel_mask_t hapticChannelMask() const override { return AUDIO_CHANNEL_NONE; }
         size_t frameCount() const override  { return 0; }
         uint32_t latency() const override  { return 0; }
 
@@ -697,7 +709,7 @@
         void checkSuspendOnEffectEnabled(const sp<EffectBase>& effect __unused,
                               bool enabled __unused, bool threadLocked __unused) override {}
         void resetVolume() override {}
-        uint32_t strategy() const override  { return 0; }
+        product_strategy_t strategy() const override  { return static_cast<product_strategy_t>(0); }
         int32_t activeTrackCnt() const override { return 0; }
         void onEffectEnable(const sp<EffectBase>& effect __unused) override {}
         void onEffectDisable(const sp<EffectBase>& effect __unused) override {}
diff --git a/services/audioflinger/FastCapture.cpp b/services/audioflinger/FastCapture.cpp
index d6d6e25..2963202 100644
--- a/services/audioflinger/FastCapture.cpp
+++ b/services/audioflinger/FastCapture.cpp
@@ -107,7 +107,7 @@
             mSampleRate = Format_sampleRate(mFormat);
 #if !LOG_NDEBUG
             unsigned channelCount = Format_channelCount(mFormat);
-            ALOG_ASSERT(channelCount >= 1 && channelCount <= FCC_8);
+            ALOG_ASSERT(channelCount >= 1 && channelCount <= FCC_LIMIT);
 #endif
         }
         dumpState->mSampleRate = mSampleRate;
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp
index 3eacc8c..88d4eaf 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/FastMixer.cpp
@@ -27,7 +27,6 @@
 
 #include "Configuration.h"
 #include <time.h>
-#include <utils/Debug.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
 #include <system/audio.h>
@@ -40,6 +39,7 @@
 #include <audio_utils/channels.h>
 #include <audio_utils/format.h>
 #include <audio_utils/mono_blend.h>
+#include <cutils/bitops.h>
 #include <media/AudioMixer.h>
 #include "FastMixer.h"
 #include "TypedLogger.h"
@@ -48,6 +48,15 @@
 
 /*static*/ const FastMixerState FastMixer::sInitial;
 
+static audio_channel_mask_t getChannelMaskFromCount(size_t count) {
+    const audio_channel_mask_t mask = audio_channel_out_mask_from_count(count);
+    if (mask == AUDIO_CHANNEL_INVALID) {
+        // some counts have no positional masks. TODO: Update this to return index count?
+        return audio_channel_mask_for_index_assignment_from_count(count);
+    }
+    return mask;
+}
+
 FastMixer::FastMixer(audio_io_handle_t parentIoHandle)
     : FastThread("cycle_ms", "load_us"),
     // mFastTrackNames
@@ -79,7 +88,7 @@
     mDummyDumpState = &mDummyFastMixerDumpState;
     // TODO: Add channel mask to NBAIO_Format.
     // We assume that the channel mask must be a valid positional channel mask.
-    mSinkChannelMask = audio_channel_out_mask_from_count(mSinkChannelCount);
+    mSinkChannelMask = getChannelMaskFromCount(mSinkChannelCount);
 
     unsigned i;
     for (i = 0; i < FastMixerState::sMaxFastTracks; ++i) {
@@ -238,7 +247,7 @@
             LOG_ALWAYS_FATAL_IF(mSinkChannelCount > AudioMixer::MAX_NUM_CHANNELS);
 
             if (mSinkChannelMask == AUDIO_CHANNEL_NONE) {
-                mSinkChannelMask = audio_channel_out_mask_from_count(mSinkChannelCount);
+                mSinkChannelMask = getChannelMaskFromCount(mSinkChannelCount);
             }
             mAudioChannelCount = mSinkChannelCount - audio_channel_count_from_out_mask(
                     mSinkChannelMask & AUDIO_CHANNEL_HAPTIC_ALL);
@@ -353,7 +362,8 @@
 #endif
         //ALOGD("Eric FastMixer::onWork() mIsWarm");
     } else {
-        dumpState->mTimestampVerifier.discontinuity();
+        dumpState->mTimestampVerifier.discontinuity(
+            dumpState->mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS);
         // See comment in if block.
 #ifdef FASTMIXER_LOG_HIST_TS
         LOG_AUDIO_STATE();
diff --git a/services/audioflinger/FastMixerDumpState.cpp b/services/audioflinger/FastMixerDumpState.cpp
index a42e09c..3f20282 100644
--- a/services/audioflinger/FastMixerDumpState.cpp
+++ b/services/audioflinger/FastMixerDumpState.cpp
@@ -24,7 +24,6 @@
 #include <cpustats/ThreadCpuUsage.h>
 #endif
 #endif
-#include <utils/Debug.h>
 #include <utils/Log.h>
 #include "FastMixerDumpState.h"
 
diff --git a/services/audioflinger/FastMixerState.h b/services/audioflinger/FastMixerState.h
index 396c797..857d3de 100644
--- a/services/audioflinger/FastMixerState.h
+++ b/services/audioflinger/FastMixerState.h
@@ -23,6 +23,7 @@
 #include <media/ExtendedAudioBufferProvider.h>
 #include <media/nbaio/NBAIO.h>
 #include <media/nblog/NBLog.h>
+#include <vibrator/ExternalVibrationUtils.h>
 #include "FastThreadState.h"
 
 namespace android {
@@ -49,8 +50,7 @@
     audio_format_t          mFormat;         // track format
     int                     mGeneration;     // increment when any field is assigned
     bool                    mHapticPlaybackEnabled = false; // haptic playback is enabled or not
-    AudioMixer::haptic_intensity_t mHapticIntensity = AudioMixer::HAPTIC_SCALE_MUTE; // intensity of
-                                                                                     // haptic data
+    os::HapticScale         mHapticIntensity = os::HapticScale::MUTE; // intensity of haptic data
 };
 
 // Represents a single state of the fast mixer
diff --git a/services/audioflinger/MmapTracks.h b/services/audioflinger/MmapTracks.h
index b83f6b5..eb640bb 100644
--- a/services/audioflinger/MmapTracks.h
+++ b/services/audioflinger/MmapTracks.h
@@ -29,8 +29,7 @@
                             audio_channel_mask_t channelMask,
                             audio_session_t sessionId,
                             bool isOut,
-                            uid_t uid,
-                            pid_t pid,
+                            const android::content::AttributionSourceState& attributionSource,
                             pid_t creatorPid,
                             audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
     virtual             ~MmapTrack();
diff --git a/services/audioflinger/OWNERS b/services/audioflinger/OWNERS
index d02d9e0..034d161 100644
--- a/services/audioflinger/OWNERS
+++ b/services/audioflinger/OWNERS
@@ -1,4 +1,4 @@
+gkasten@google.com
 hunga@google.com
 jmtrivi@google.com
 mnaganov@google.com
-gkasten@google.com
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index b58fd8b..a381c7d 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -25,6 +25,7 @@
 
 #include "AudioFlinger.h"
 #include <media/AudioParameter.h>
+#include <media/AudioValidator.h>
 #include <media/DeviceDescriptorBase.h>
 #include <media/PatchBuilder.h>
 #include <mediautils/ServiceUtilities.h>
@@ -55,8 +56,12 @@
 }
 
 /* Get supported attributes for a given audio port */
-status_t AudioFlinger::getAudioPort(struct audio_port *port)
-{
+status_t AudioFlinger::getAudioPort(struct audio_port_v7 *port) {
+    status_t status = AudioValidator::validateAudioPort(*port);
+    if (status != NO_ERROR) {
+        return status;
+    }
+
     Mutex::Autolock _l(mLock);
     return mPatchPanel.getAudioPort(port);
 }
@@ -65,6 +70,11 @@
 status_t AudioFlinger::createAudioPatch(const struct audio_patch *patch,
                                    audio_patch_handle_t *handle)
 {
+    status_t status = AudioValidator::validateAudioPatch(*patch);
+    if (status != NO_ERROR) {
+        return status;
+    }
+
     Mutex::Autolock _l(mLock);
     return mPatchPanel.createAudioPatch(patch, handle);
 }
@@ -103,15 +113,28 @@
 }
 
 /* Get supported attributes for a given audio port */
-status_t AudioFlinger::PatchPanel::getAudioPort(struct audio_port *port __unused)
+status_t AudioFlinger::PatchPanel::getAudioPort(struct audio_port_v7 *port)
 {
-    ALOGV(__func__);
-    return NO_ERROR;
+    if (port->type != AUDIO_PORT_TYPE_DEVICE) {
+        // Only query the HAL when the port is a device.
+        // TODO: implement getAudioPort for mix.
+        return INVALID_OPERATION;
+    }
+    AudioHwDevice* hwDevice = findAudioHwDeviceByModule(port->ext.device.hw_module);
+    if (hwDevice == nullptr) {
+        ALOGW("%s cannot find hw module %d", __func__, port->ext.device.hw_module);
+        return BAD_VALUE;
+    }
+    if (!hwDevice->supportsAudioPatches()) {
+        return INVALID_OPERATION;
+    }
+    return hwDevice->getAudioPort(port);
 }
 
 /* Connect a patch between several source and sink ports */
 status_t AudioFlinger::PatchPanel::createAudioPatch(const struct audio_patch *patch,
-                                   audio_patch_handle_t *handle)
+                                   audio_patch_handle_t *handle,
+                                   bool endpointPatch)
 {
     if (handle == NULL || patch == NULL) {
         return BAD_VALUE;
@@ -174,7 +197,7 @@
         }
     }
 
-    Patch newPatch{*patch};
+    Patch newPatch{*patch, endpointPatch};
     audio_module_handle_t insertedModule = AUDIO_MODULE_HANDLE_NONE;
 
     switch (patch->sources[0].type) {
@@ -396,10 +419,15 @@
             }
 
             // remove stale audio patch with same output as source if any
-            for (auto& iter : mPatches) {
-                if (iter.second.mAudioPatch.sources[0].ext.mix.handle == thread->id()) {
-                    erasePatch(iter.first);
-                    break;
+            // Prevent to remove endpoint patches (involved in a SwBridge)
+            // Prevent to remove AudioPatch used to route an output involved in an endpoint.
+            if (!endpointPatch) {
+                for (auto& iter : mPatches) {
+                    if (iter.second.mAudioPatch.sources[0].ext.mix.handle == thread->id() &&
+                            !iter.second.mIsEndpointPatch) {
+                        erasePatch(iter.first);
+                        break;
+                    }
                 }
             }
         } break;
@@ -413,10 +441,10 @@
         *handle = (audio_patch_handle_t) mAudioFlinger.nextUniqueId(AUDIO_UNIQUE_ID_USE_PATCH);
         newPatch.mHalHandle = halHandle;
         mAudioFlinger.mDeviceEffectManager.createAudioPatch(*handle, newPatch);
-        mPatches.insert(std::make_pair(*handle, std::move(newPatch)));
         if (insertedModule != AUDIO_MODULE_HANDLE_NONE) {
-            addSoftwarePatchToInsertedModules(insertedModule, *handle);
+            addSoftwarePatchToInsertedModules(insertedModule, *handle, &newPatch.mAudioPatch);
         }
+        mPatches.insert(std::make_pair(*handle, std::move(newPatch)));
     } else {
         newPatch.clearConnections(this);
     }
@@ -435,7 +463,8 @@
     status_t status = panel->createAudioPatch(
             PatchBuilder().addSource(mAudioPatch.sources[0]).
                 addSink(mRecord.thread(), { .source = AUDIO_SOURCE_MIC }).patch(),
-            mRecord.handlePtr());
+            mRecord.handlePtr(),
+            true /*endpointPatch*/);
     if (status != NO_ERROR) {
         *mRecord.handlePtr() = AUDIO_PATCH_HANDLE_NONE;
         return status;
@@ -445,7 +474,8 @@
     if (mAudioPatch.num_sinks != 0) {
         status = panel->createAudioPatch(
                 PatchBuilder().addSource(mPlayback.thread()).addSink(mAudioPatch.sinks[0]).patch(),
-                mPlayback.handlePtr());
+                mPlayback.handlePtr(),
+                true /*endpointPatch*/);
         if (status != NO_ERROR) {
             *mPlayback.handlePtr() = AUDIO_PATCH_HANDLE_NONE;
             return status;
@@ -781,10 +811,20 @@
 }
 
 void AudioFlinger::PatchPanel::notifyStreamOpened(
-        AudioHwDevice *audioHwDevice, audio_io_handle_t stream)
+        AudioHwDevice *audioHwDevice, audio_io_handle_t stream, struct audio_patch *patch)
 {
     if (audioHwDevice->isInsert()) {
         mInsertedModules[audioHwDevice->handle()].streams.insert(stream);
+        if (patch != nullptr) {
+            std::vector <SoftwarePatch> swPatches;
+            getDownstreamSoftwarePatches(stream, &swPatches);
+            if (swPatches.size() > 0) {
+                auto iter = mPatches.find(swPatches[0].getPatchHandle());
+                if (iter != mPatches.end()) {
+                    *patch = iter->second.mAudioPatch;
+                }
+            }
+        }
     }
 }
 
@@ -813,9 +853,13 @@
 }
 
 void AudioFlinger::PatchPanel::addSoftwarePatchToInsertedModules(
-        audio_module_handle_t module, audio_patch_handle_t handle)
+        audio_module_handle_t module, audio_patch_handle_t handle,
+        const struct audio_patch *patch)
 {
     mInsertedModules[module].sw_patches.insert(handle);
+    if (!mInsertedModules[module].streams.empty()) {
+        mAudioFlinger.updateDownStreamPatches_l(patch, mInsertedModules[module].streams);
+    }
 }
 
 void AudioFlinger::PatchPanel::removeSoftwarePatchFromInsertedModules(
diff --git a/services/audioflinger/PatchPanel.h b/services/audioflinger/PatchPanel.h
index 89d4eb1..93593a3 100644
--- a/services/audioflinger/PatchPanel.h
+++ b/services/audioflinger/PatchPanel.h
@@ -52,11 +52,12 @@
                                     struct audio_port *ports);
 
     /* Get supported attributes for a given audio port */
-    status_t getAudioPort(struct audio_port *port);
+    status_t getAudioPort(struct audio_port_v7 *port);
 
     /* Create a patch between several source and sink ports */
     status_t createAudioPatch(const struct audio_patch *patch,
-                                       audio_patch_handle_t *handle);
+                              audio_patch_handle_t *handle,
+                              bool endpointPatch = false);
 
     /* Release a patch */
     status_t releaseAudioPatch(audio_patch_handle_t handle);
@@ -71,7 +72,8 @@
             std::vector<SoftwarePatch> *patches) const;
 
     // Notifies patch panel about all opened and closed streams.
-    void notifyStreamOpened(AudioHwDevice *audioHwDevice, audio_io_handle_t stream);
+    void notifyStreamOpened(AudioHwDevice *audioHwDevice, audio_io_handle_t stream,
+                            struct audio_patch *patch);
     void notifyStreamClosed(audio_io_handle_t stream);
 
     void dump(int fd) const;
@@ -160,7 +162,8 @@
 
     class Patch final {
     public:
-        explicit Patch(const struct audio_patch &patch) : mAudioPatch(patch) {}
+        Patch(const struct audio_patch &patch, bool endpointPatch) :
+            mAudioPatch(patch), mIsEndpointPatch(endpointPatch) {}
         Patch() = default;
         ~Patch();
         Patch(const Patch& other) noexcept {
@@ -169,6 +172,7 @@
             mPlayback = other.mPlayback;
             mRecord = other.mRecord;
             mThread = other.mThread;
+            mIsEndpointPatch = other.mIsEndpointPatch;
         }
         Patch(Patch&& other) noexcept { swap(other); }
         Patch& operator=(Patch&& other) noexcept {
@@ -183,6 +187,7 @@
             swap(mPlayback, other.mPlayback);
             swap(mRecord, other.mRecord);
             swap(mThread, other.mThread);
+            swap(mIsEndpointPatch, other.mIsEndpointPatch);
         }
 
         friend void swap(Patch &a, Patch &b) noexcept {
@@ -217,6 +222,7 @@
         Endpoint<RecordThread, RecordThread::PatchRecord> mRecord;
 
         wp<ThreadBase> mThread;
+        bool mIsEndpointPatch;
     };
 
     // Call with AudioFlinger mLock held
@@ -226,7 +232,8 @@
     AudioHwDevice* findAudioHwDeviceByModule(audio_module_handle_t module);
     sp<DeviceHalInterface> findHwDeviceByModule(audio_module_handle_t module);
     void addSoftwarePatchToInsertedModules(
-            audio_module_handle_t module, audio_patch_handle_t handle);
+            audio_module_handle_t module, audio_patch_handle_t handle,
+            const struct audio_patch *patch);
     void removeSoftwarePatchFromInsertedModules(audio_patch_handle_t handle);
     void erasePatch(audio_patch_handle_t handle);
 
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index d8eebf3..0929055 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -26,10 +26,13 @@
     bool hasOpPlayAudio() const;
 
     static sp<OpPlayAudioMonitor> createIfNeeded(
-            uid_t uid, const audio_attributes_t& attr, int id, audio_stream_type_t streamType);
+            const AttributionSourceState& attributionSource,
+            const audio_attributes_t& attr, int id,
+            audio_stream_type_t streamType);
 
 private:
-    OpPlayAudioMonitor(uid_t uid, audio_usage_t usage, int id);
+    OpPlayAudioMonitor(const AttributionSourceState& attributionSource,
+        audio_usage_t usage, int id);
     void onFirstRef() override;
     static void getPackagesForUid(uid_t uid, Vector<String16>& packages);
 
@@ -49,8 +52,7 @@
     void checkPlayAudioForUsage();
 
     std::atomic_bool mHasOpPlayAudio;
-    Vector<String16> mPackages;
-    const uid_t mUid;
+    const AttributionSourceState mAttributionSource;
     const int32_t mUsage; // on purpose not audio_usage_t because always checked in appOps as int32_t
     const int mId; // for logging purposes only
 };
@@ -71,13 +73,14 @@
                                 const sp<IMemory>& sharedBuffer,
                                 audio_session_t sessionId,
                                 pid_t creatorPid,
-                                uid_t uid,
+                                const AttributionSourceState& attributionSource,
                                 audio_output_flags_t flags,
                                 track_type type,
                                 audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE,
                                 /** default behaviour is to start when there are as many frames
                                   * ready as possible (aka. Buffer is full). */
-                                size_t frameCountToBeReady = SIZE_MAX);
+                                size_t frameCountToBeReady = SIZE_MAX,
+                                float speed = 1.0f);
     virtual             ~Track();
     virtual status_t    initCheck() const;
 
@@ -115,6 +118,12 @@
             int         auxEffectId() const { return mAuxEffectId; }
     virtual status_t    getTimestamp(AudioTimestamp& timestamp);
             void        signal();
+            status_t    getDualMonoMode(audio_dual_mono_mode_t* mode);
+            status_t    setDualMonoMode(audio_dual_mono_mode_t mode);
+            status_t    getAudioDescriptionMixLevel(float* leveldB);
+            status_t    setAudioDescriptionMixLevel(float leveldB);
+            status_t    getPlaybackRateParameters(audio_playback_rate_t* playbackRate);
+            status_t    setPlaybackRateParameters(const audio_playback_rate_t& playbackRate);
 
 // implement FastMixerState::VolumeProvider interface
     virtual gain_minifloat_packed_t getVolumeLR();
@@ -138,15 +147,7 @@
     void                setFinalVolume(float volume);
     float               getFinalVolume() const { return mFinalVolume; }
 
-    /** @return true if the track has changed (metadata or volume) since
-     *          the last time this function was called,
-     *          true if this function was never called since the track creation,
-     *          false otherwise.
-     *  Thread safe.
-     */
-    bool            readAndClearHasChanged() { return !mChangeNotified.test_and_set(); }
-
-    using SourceMetadatas = std::vector<playback_track_metadata_t>;
+    using SourceMetadatas = std::vector<playback_track_metadata_v7_t>;
     using MetadataInserter = std::back_insert_iterator<SourceMetadatas>;
     /** Copy the track metadata in the provided iterator. Thread safe. */
     virtual void    copyMetadataTo(MetadataInserter& backInserter) const;
@@ -159,12 +160,12 @@
                 mHapticPlaybackEnabled = hapticPlaybackEnabled;
             }
             /** Return at what intensity to play haptics, used in mixer. */
-            AudioMixer::haptic_intensity_t getHapticIntensity() const { return mHapticIntensity; }
+            os::HapticScale getHapticIntensity() const { return mHapticIntensity; }
             /** Set intensity of haptic playback, should be set after querying vibrator service. */
-            void    setHapticIntensity(AudioMixer::haptic_intensity_t hapticIntensity) {
-                if (AudioMixer::isValidHapticIntensity(hapticIntensity)) {
+            void    setHapticIntensity(os::HapticScale hapticIntensity) {
+                if (os::isValidHapticScale(hapticIntensity)) {
                     mHapticIntensity = hapticIntensity;
-                    setHapticPlaybackEnabled(mHapticIntensity != AudioMixer::HAPTIC_SCALE_MUTE);
+                    setHapticPlaybackEnabled(mHapticIntensity != os::HapticScale::MUTE);
                 }
             }
             sp<os::ExternalVibration> getExternalVibration() const { return mExternalVibration; }
@@ -181,6 +182,9 @@
                    mAudioTrackServerProxy->getUnderrunFrames());
        }
     }
+
+    audio_output_flags_t getOutputFlags() const { return mFlags; }
+    float getSpeed() const { return mSpeed; }
 protected:
     // for numerous
     friend class PlaybackThread;
@@ -209,19 +213,34 @@
     void flushAck();
     bool isResumePending();
     void resumeAck();
+    // For direct or offloaded tracks ensure that the pause state is acknowledged
+    // by the playback thread in case of an immediate flush.
+    bool isPausePending() const { return mPauseHwPending; }
+    void pauseAck();
     void updateTrackFrameInfo(int64_t trackFramesReleased, int64_t sinkFramesWritten,
             uint32_t halSampleRate, const ExtendedTimestamp &timeStamp);
 
     sp<IMemory> sharedBuffer() const { return mSharedBuffer; }
 
+    // presentationComplete checked by frames. (Mixed Tracks).
     // framesWritten is cumulative, never reset, and is shared all tracks
     // audioHalFrames is derived from output latency
-    // FIXME parameters not needed, could get them from the thread
     bool presentationComplete(int64_t framesWritten, size_t audioHalFrames);
+
+    // presentationComplete checked by time. (Direct Tracks).
+    bool presentationComplete(uint32_t latencyMs);
+
+    void resetPresentationComplete() {
+        mPresentationCompleteFrames = 0;
+        mPresentationCompleteTimeNs = 0;
+    }
+
+    // notifyPresentationComplete is called when presentationComplete() detects
+    // that the track is finished stopping.
+    void notifyPresentationComplete();
+
     void signalClientFlag(int32_t flag);
 
-    /** Set that a metadata has changed and needs to be notified to backend. Thread safe. */
-    void setMetadataHasChanged() { mChangeNotified.clear(); }
 public:
     void triggerEvents(AudioSystem::sync_event_t type);
     virtual void invalidate();
@@ -250,9 +269,6 @@
     int32_t             *mAuxBuffer;
     int                 mAuxEffectId;
     bool                mHasVolumeController;
-    size_t              mPresentationCompleteFrames; // number of frames written to the
-                                    // audio HAL when this track will be fully rendered
-                                    // zero means not monitoring
 
     // access these three variables only when holding thread lock.
     LinearMap<int64_t> mFrameMap;           // track frame to server frame mapping
@@ -265,7 +281,7 @@
 
     bool                mHapticPlaybackEnabled = false; // indicates haptic playback enabled or not
     // intensity to play haptic data
-    AudioMixer::haptic_intensity_t mHapticIntensity = AudioMixer::HAPTIC_SCALE_MUTE;
+    os::HapticScale mHapticIntensity = os::HapticScale::MUTE;
     class AudioVibrationController : public os::BnExternalVibrationController {
     public:
         explicit AudioVibrationController(Track* track) : mTrack(track) {}
@@ -276,8 +292,10 @@
     };
     sp<AudioVibrationController> mAudioVibrationController;
     sp<os::ExternalVibration>    mExternalVibration;
-    /** How many frames should be in the buffer before the track is considered ready */
-    const size_t        mFrameCountToBeReady;
+
+    audio_dual_mono_mode_t mDualMonoMode = AUDIO_DUAL_MONO_MODE_OFF;
+    float               mAudioDescriptionMixLevel = -std::numeric_limits<float>::infinity();
+    audio_playback_rate_t  mPlaybackRateParameters = AUDIO_PLAYBACK_RATE_INITIALIZER;
 
 private:
     void                interceptBuffer(const AudioBufferProvider::Buffer& buffer);
@@ -286,6 +304,14 @@
         for (auto& tp : mTeePatches) { f(tp.patchTrack); }
     };
 
+    size_t              mPresentationCompleteFrames = 0; // (Used for Mixed tracks)
+                                    // The number of frames written to the
+                                    // audio HAL when this track is considered fully rendered.
+                                    // Zero means not monitoring.
+    int64_t             mPresentationCompleteTimeNs = 0; // (Used for Direct tracks)
+                                    // The time when this track is considered fully rendered.
+                                    // Zero means not monitoring.
+
     // The following fields are only for fast tracks, and should be in a subclass
     int                 mFastIndex; // index within FastMixerState::mFastTracks[];
                                     // either mFastIndex == -1 if not isFastTrack()
@@ -302,10 +328,10 @@
     sp<AudioTrackServerProxy>  mAudioTrackServerProxy;
     bool                mResumeToStopping; // track was paused in stopping state.
     bool                mFlushHwPending; // track requests for thread flush
+    bool                mPauseHwPending = false; // direct/offload track request for thread pause
     audio_output_flags_t mFlags;
-    // If the last track change was notified to the client with readAndClearHasChanged
-    std::atomic_flag     mChangeNotified = ATOMIC_FLAG_INIT;
     TeePatches  mTeePatches;
+    const float         mSpeed;
 };  // end of Track
 
 
@@ -324,7 +350,7 @@
                                 audio_format_t format,
                                 audio_channel_mask_t channelMask,
                                 size_t frameCount,
-                                uid_t uid);
+                                const AttributionSourceState& attributionSource);
     virtual             ~OutputTrack();
 
     virtual status_t    start(AudioSystem::sync_event_t event =
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index d87239d..e8552c4 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -15,44 +15,12 @@
 ** limitations under the License.
 */
 
+#include <android/content/AttributionSourceState.h>
+
 #ifndef INCLUDING_FROM_AUDIOFLINGER_H
     #error This header file should only be included from AudioFlinger.h
 #endif
 
-// Checks and monitors OP_RECORD_AUDIO
-class OpRecordAudioMonitor : public RefBase {
-public:
-    ~OpRecordAudioMonitor() override;
-    bool hasOpRecordAudio() const;
-
-    static sp<OpRecordAudioMonitor> createIfNeeded
-        (uid_t uid, const audio_attributes_t& attr, const String16& opPackageName);
-
-private:
-    OpRecordAudioMonitor(uid_t uid, const String16& opPackageName);
-    void onFirstRef() override;
-
-    AppOpsManager mAppOpsManager;
-
-    class RecordAudioOpCallback : public BnAppOpsCallback {
-    public:
-        explicit RecordAudioOpCallback(const wp<OpRecordAudioMonitor>& monitor);
-        void opChanged(int32_t op, const String16& packageName) override;
-
-    private:
-        const wp<OpRecordAudioMonitor> mMonitor;
-    };
-
-    sp<RecordAudioOpCallback> mOpCallback;
-    // called by RecordAudioOpCallback when OP_RECORD_AUDIO is updated in AppOp callback
-    // and in onFirstRef()
-    void checkRecordAudio();
-
-    std::atomic_bool mHasOpRecordAudio;
-    const uid_t mUid;
-    const String16 mPackage;
-};
-
 // record track
 class RecordTrack : public TrackBase {
 public:
@@ -67,11 +35,11 @@
                                 size_t bufferSize,
                                 audio_session_t sessionId,
                                 pid_t creatorPid,
-                                uid_t uid,
+                                const AttributionSourceState& attributionSource,
                                 audio_input_flags_t flags,
                                 track_type type,
-                                const String16& opPackageName,
-                                audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
+                                audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE,
+                                int32_t startFrames = -1);
     virtual             ~RecordTrack();
     virtual status_t    initCheck() const;
 
@@ -103,12 +71,15 @@
                                 { return (mFlags & AUDIO_INPUT_FLAG_DIRECT) != 0; }
 
             void        setSilenced(bool silenced) { if (!isPatchTrack()) mSilenced = silenced; }
-            bool        isSilenced() const;
+            bool        isSilenced() const { return mSilenced; }
 
             status_t    getActiveMicrophones(std::vector<media::MicrophoneInfo>* activeMicrophones);
 
             status_t    setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
             status_t    setPreferredMicrophoneFieldDimension(float zoom);
+            status_t    shareAudioHistory(const std::string& sharedAudioPackageName,
+                                          int64_t sharedAudioStartMs);
+            int32_t     startFrames() { return mStartFrames; }
 
     static  bool        checkServerLatencySupported(
                                 audio_format_t format, audio_input_flags_t flags) {
@@ -147,10 +118,8 @@
 
             bool                               mSilenced;
 
-            // used to enforce OP_RECORD_AUDIO
-            uid_t                              mUid;
-            String16                           mOpPackageName;
-            sp<OpRecordAudioMonitor>           mOpRecordAudioMonitor;
+            std::string                        mSharedAudioPackageName = {};
+            int32_t                            mStartFrames = -1;
 };
 
 // playback track, used by PatchPanel
diff --git a/services/audioflinger/SpdifStreamOut.cpp b/services/audioflinger/SpdifStreamOut.cpp
index c7aba79..0ce5681 100644
--- a/services/audioflinger/SpdifStreamOut.cpp
+++ b/services/audioflinger/SpdifStreamOut.cpp
@@ -39,7 +39,7 @@
         , mSpdifEncoder(this, format)
         , mApplicationFormat(AUDIO_FORMAT_DEFAULT)
         , mApplicationSampleRate(0)
-        , mApplicationChannelMask(0)
+        , mApplicationChannelMask(AUDIO_CHANNEL_NONE)
 {
 }
 
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 2af27d8..b9cdab8 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -29,6 +29,7 @@
 #include <linux/futex.h>
 #include <sys/stat.h>
 #include <sys/syscall.h>
+#include <cutils/bitops.h>
 #include <cutils/properties.h>
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
@@ -107,6 +108,7 @@
 
 // TODO: Move these macro/inlines to a header file.
 #define max(a, b) ((a) > (b) ? (a) : (b))
+
 template <typename T>
 static inline T min(const T& a, const T& b)
 {
@@ -115,16 +117,33 @@
 
 namespace android {
 
+using media::IEffectClient;
+using content::AttributionSourceState;
+
 // retry counts for buffer fill timeout
 // 50 * ~20msecs = 1 second
 static const int8_t kMaxTrackRetries = 50;
 static const int8_t kMaxTrackStartupRetries = 50;
+
 // allow less retry attempts on direct output thread.
 // direct outputs can be a scarce resource in audio hardware and should
 // be released as quickly as possible.
-static const int8_t kMaxTrackRetriesDirect = 2;
-
-
+// Notes:
+// 1) The retry duration kMaxTrackRetriesDirectMs may be increased
+//    in case the data write is bursty for the AudioTrack.  The application
+//    should endeavor to write at least once every kMaxTrackRetriesDirectMs
+//    to prevent an underrun situation.  If the data is bursty, then
+//    the application can also throttle the data sent to be even.
+// 2) For compressed audio data, any data present in the AudioTrack buffer
+//    will be sent and reset the retry count.  This delivers data as
+//    it arrives, with approximately kDirectMinSleepTimeUs = 10ms checking interval.
+// 3) For linear PCM or proportional PCM, we wait one period for a period's worth
+//    of data to be available, then any remaining data is delivered.
+//    This is required to ensure the last bit of data is delivered before underrun.
+//
+// Sleep time per cycle is kDirectMinSleepTimeUs for compressed tracks
+// or the size of the HAL period for proportional / linear PCM tracks.
+static const int32_t kMaxTrackRetriesDirectMs = 200;
 
 // don't warn about blocked writes or record buffer overflows more often than this
 static const nsecs_t kWarningThrottleNs = seconds(5);
@@ -621,7 +640,7 @@
     mIoJitterMs.reset();
     mLatencyMs.reset();
     mProcessTimeMs.reset();
-    mTimestampVerifier.discontinuity();
+    mTimestampVerifier.discontinuity(mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS);
 
     sp<ConfigEvent> configEvent = (ConfigEvent *)new IoConfigEvent(event, pid, portId);
     sendConfigEvent_l(configEvent);
@@ -695,6 +714,13 @@
     return sendConfigEvent_l(configEvent);
 }
 
+void AudioFlinger::ThreadBase::sendResizeBufferConfigEvent_l(int32_t maxSharedAudioHistoryMs)
+{
+    ALOG_ASSERT(type() == RECORD, "sendResizeBufferConfigEvent_l() called on non record thread");
+    sp<ConfigEvent> configEvent =
+            (ConfigEvent *)new ResizeBufferConfigEvent(maxSharedAudioHistoryMs);
+    sendConfigEvent_l(configEvent);
+}
 
 // post condition: mConfigEvents.isEmpty()
 void AudioFlinger::ThreadBase::processConfigEvents_l()
@@ -753,6 +779,11 @@
                     (UpdateOutDevicesConfigEventData *)event->mData.get();
             updateOutDevices(data->mOutDevices);
         } break;
+        case CFG_EVENT_RESIZE_BUFFER: {
+            ResizeBufferConfigEventData *data =
+                    (ResizeBufferConfigEventData *)event->mData.get();
+            resizeInputBuffer_l(data->mMaxSharedAudioHistoryMs);
+        } break;
         default:
             ALOG_ASSERT(false, "processConfigEvents_l() unknown event type %d", event->mType);
             break;
@@ -784,7 +815,7 @@
             if (mask & AUDIO_CHANNEL_OUT_FRONT_LEFT) s.append("front-left, ");
             if (mask & AUDIO_CHANNEL_OUT_FRONT_RIGHT) s.append("front-right, ");
             if (mask & AUDIO_CHANNEL_OUT_FRONT_CENTER) s.append("front-center, ");
-            if (mask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY) s.append("low freq, ");
+            if (mask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY) s.append("low-frequency, ");
             if (mask & AUDIO_CHANNEL_OUT_BACK_LEFT) s.append("back-left, ");
             if (mask & AUDIO_CHANNEL_OUT_BACK_RIGHT) s.append("back-right, ");
             if (mask & AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER) s.append("front-left-of-center, ");
@@ -797,12 +828,16 @@
             if (mask & AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER) s.append("top-front-center, ");
             if (mask & AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT) s.append("top-front-right, ");
             if (mask & AUDIO_CHANNEL_OUT_TOP_BACK_LEFT) s.append("top-back-left, ");
-            if (mask & AUDIO_CHANNEL_OUT_TOP_BACK_CENTER) s.append("top-back-center, " );
-            if (mask & AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT) s.append("top-back-right, " );
-            if (mask & AUDIO_CHANNEL_OUT_TOP_SIDE_LEFT) s.append("top-side-left, " );
-            if (mask & AUDIO_CHANNEL_OUT_TOP_SIDE_RIGHT) s.append("top-side-right, " );
-            if (mask & AUDIO_CHANNEL_OUT_HAPTIC_B) s.append("haptic-B, " );
-            if (mask & AUDIO_CHANNEL_OUT_HAPTIC_A) s.append("haptic-A, " );
+            if (mask & AUDIO_CHANNEL_OUT_TOP_BACK_CENTER) s.append("top-back-center, ");
+            if (mask & AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT) s.append("top-back-right, ");
+            if (mask & AUDIO_CHANNEL_OUT_TOP_SIDE_LEFT) s.append("top-side-left, ");
+            if (mask & AUDIO_CHANNEL_OUT_TOP_SIDE_RIGHT) s.append("top-side-right, ");
+            if (mask & AUDIO_CHANNEL_OUT_BOTTOM_FRONT_LEFT) s.append("bottom-front-left, ");
+            if (mask & AUDIO_CHANNEL_OUT_BOTTOM_FRONT_CENTER) s.append("bottom-front-center, ");
+            if (mask & AUDIO_CHANNEL_OUT_BOTTOM_FRONT_RIGHT) s.append("bottom-front-right, ");
+            if (mask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY_2) s.append("low-frequency-2, ");
+            if (mask & AUDIO_CHANNEL_OUT_HAPTIC_B) s.append("haptic-B, ");
+            if (mask & AUDIO_CHANNEL_OUT_HAPTIC_A) s.append("haptic-A, ");
             if (mask & ~AUDIO_CHANNEL_OUT_ALL) s.append("unknown,  ");
         } else {
             if (mask & AUDIO_CHANNEL_IN_LEFT) s.append("left, ");
@@ -820,9 +855,9 @@
             if (mask & AUDIO_CHANNEL_IN_BACK_LEFT) s.append("back-left, ");
             if (mask & AUDIO_CHANNEL_IN_BACK_RIGHT) s.append("back-right, ");
             if (mask & AUDIO_CHANNEL_IN_CENTER) s.append("center, ");
-            if (mask & AUDIO_CHANNEL_IN_LOW_FREQUENCY) s.append("low freq, ");
-            if (mask & AUDIO_CHANNEL_IN_TOP_LEFT) s.append("top-left, " );
-            if (mask & AUDIO_CHANNEL_IN_TOP_RIGHT) s.append("top-right, " );
+            if (mask & AUDIO_CHANNEL_IN_LOW_FREQUENCY) s.append("low-frequency, ");
+            if (mask & AUDIO_CHANNEL_IN_TOP_LEFT) s.append("top-left, ");
+            if (mask & AUDIO_CHANNEL_IN_TOP_RIGHT) s.append("top-right, ");
             if (mask & AUDIO_CHANNEL_IN_VOICE_UPLINK) s.append("voice-uplink, ");
             if (mask & AUDIO_CHANNEL_IN_VOICE_DNLINK) s.append("voice-dnlink, ");
             if (mask & ~AUDIO_CHANNEL_IN_ALL) s.append("unknown,  ");
@@ -985,15 +1020,16 @@
     if (mPowerManager != 0) {
         sp<IBinder> binder = new BBinder();
         // Uses AID_AUDIOSERVER for wakelock.  updateWakeLockUids_l() updates with client uids.
-        status_t status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK,
-                    binder,
+        binder::Status status = mPowerManager->acquireWakeLockAsync(binder,
+                    POWERMANAGER_PARTIAL_WAKE_LOCK,
                     getWakeLockTag(),
                     String16("audioserver"),
-                    true /* FIXME force oneway contrary to .aidl */);
-        if (status == NO_ERROR) {
+                    {} /* workSource */,
+                    {} /* historyTag */);
+        if (status.isOk()) {
             mWakeLockToken = binder;
         }
-        ALOGV("acquireWakeLock_l() %s status %d", mThreadName, status);
+        ALOGV("acquireWakeLock_l() %s status %d", mThreadName, status.exceptionCode());
     }
 
     gBoottime.acquire(mWakeLockToken);
@@ -1013,8 +1049,7 @@
     if (mWakeLockToken != 0) {
         ALOGV("releaseWakeLock_l() %s", mThreadName);
         if (mPowerManager != 0) {
-            mPowerManager->releaseWakeLock(mWakeLockToken, 0,
-                    true /* FIXME force oneway contrary to .aidl */);
+            mPowerManager->releaseWakeLockAsync(mWakeLockToken, 0);
         }
         mWakeLockToken.clear();
     }
@@ -1028,7 +1063,7 @@
         if (binder == 0) {
             ALOGW("Thread %s cannot connect to the power manager service", mThreadName);
         } else {
-            mPowerManager = interface_cast<IPowerManager>(binder);
+            mPowerManager = interface_cast<os::IPowerManager>(binder);
             binder->linkToDeath(mDeathRecipient);
         }
     }
@@ -1055,10 +1090,9 @@
     }
     if (mPowerManager != 0) {
         std::vector<int> uidsAsInt(uids.begin(), uids.end()); // powermanager expects uids as ints
-        status_t status = mPowerManager->updateWakeLockUids(
-                mWakeLockToken, uidsAsInt.size(), uidsAsInt.data(),
-                true /* FIXME force oneway contrary to .aidl */);
-        ALOGV("updateWakeLockUids_l() %s status %d", mThreadName, status);
+        binder::Status status = mPowerManager->updateWakeLockUidsAsync(
+                mWakeLockToken, uidsAsInt);
+        ALOGV("updateWakeLockUids_l() %s status %d", mThreadName, status.exceptionCode());
     }
 }
 
@@ -1075,6 +1109,11 @@
     ALOGE("%s should only be called in RecordThread", __func__);
 }
 
+void AudioFlinger::ThreadBase::resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs __unused)
+{
+    ALOGE("%s should only be called in RecordThread", __func__);
+}
+
 void AudioFlinger::ThreadBase::PMDeathRecipient::binderDied(const wp<IBinder>& who __unused)
 {
     sp<ThreadBase> thread = mThread.promote();
@@ -1243,6 +1282,11 @@
             return BAD_VALUE;
         }
     }
+
+    if (EffectModule::isHapticGenerator(&desc->type)) {
+        ALOGE("%s(): HapticGenerator is not supported in RecordThread", __func__);
+        return BAD_VALUE;
+    }
     return NO_ERROR;
 }
 
@@ -1262,6 +1306,12 @@
         return NO_ERROR;
     }
 
+    if (EffectModule::isHapticGenerator(&desc->type) && mHapticChannelCount == 0) {
+        ALOGW("%s: thread doesn't support haptic playback while the effect is HapticGenerator",
+                __func__);
+        return BAD_VALUE;
+    }
+
     switch (mType) {
     case MIXER: {
 #ifndef MULTICHANNEL_EFFECT_CHAIN
@@ -1424,6 +1474,16 @@
             effect->setMode(mAudioFlinger->getMode());
             effect->setAudioSource(mAudioSource);
         }
+        if (effect->isHapticGenerator()) {
+            // TODO(b/184194057): Use the vibrator information from the vibrator that will be used
+            // for the HapticGenerator.
+            const media::AudioVibratorInfo* defaultVibratorInfo =
+                    mAudioFlinger->getDefaultVibratorInfo_l();
+            if (defaultVibratorInfo != nullptr) {
+                // Only set the vibrator info when it is a valid one.
+                effect->setVibratorInfo(defaultVibratorInfo);
+            }
+        }
         // create effect handle and connect it to effect module
         handle = new EffectHandle(effect, client, effectClient, priority);
         lStatus = handle->initCheck();
@@ -1578,7 +1638,7 @@
         detachAuxEffect_l(effect->id());
     }
 
-    sp<EffectChain> chain = effect->callback()->chain().promote();
+    sp<EffectChain> chain = effect->getCallback()->chain().promote();
     if (chain != 0) {
         // remove effect chain if removing last effect
         if (chain->removeEffect_l(effect, release) == 0) {
@@ -1738,8 +1798,14 @@
 
 template <typename T>
 bool AudioFlinger::ThreadBase::ActiveTracks<T>::readAndClearHasChanged() {
-    const bool hasChanged = mHasChanged;
+    bool hasChanged = mHasChanged;
     mHasChanged = false;
+
+    for (const sp<T> &track : mActiveTracks) {
+        // Do not short-circuit as all hasChanged states must be reset
+        // as all the metadata are going to be sent
+        hasChanged |= track->readAndClearHasChanged();
+    }
     return hasChanged;
 }
 
@@ -1863,7 +1929,8 @@
         // index 0 is reserved for normal mixer's submix
         mFastTrackAvailMask(((1 << FastMixerState::sMaxFastTracks) - 1) & ~1),
         mHwSupportsPause(false), mHwPaused(false), mFlushPending(false),
-        mLeftVolFloat(-1.0), mRightVolFloat(-1.0)
+        mLeftVolFloat(-1.0), mRightVolFloat(-1.0),
+        mDownStreamPatch{}
 {
     snprintf(mThreadName, kThreadNameLength, "AudioOut_%X", id);
     mNBLogWriter = audioFlinger->newWriter_l(kLogSize, mThreadName);
@@ -1901,9 +1968,8 @@
                                        : AUDIO_DEVICE_NONE));
     }
 
-    // ++ operator does not compile
-    for (audio_stream_type_t stream = AUDIO_STREAM_MIN; stream < AUDIO_STREAM_FOR_POLICY_CNT;
-            stream = (audio_stream_type_t) (stream + 1)) {
+    for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_FOR_POLICY_CNT; ++i) {
+        const audio_stream_type_t stream{static_cast<audio_stream_type_t>(i)};
         mStreamTypes[stream].volume = 0.0f;
         mStreamTypes[stream].mute = mAudioFlinger->streamMute_l(stream);
     }
@@ -1926,14 +1992,14 @@
 
 void AudioFlinger::PlaybackThread::onFirstRef()
 {
-    if (mOutput == nullptr || mOutput->stream == nullptr) {
+    if (!isStreamInitialized()) {
         ALOGE("The stream is not open yet"); // This should not happen.
     } else {
         // setEventCallback will need a strong pointer as a parameter. Calling it
         // here instead of constructor of PlaybackThread so that the onFirstRef
         // callback would not be made on an incompletely constructed object.
         if (mOutput->stream->setEventCallback(this) != OK) {
-            ALOGE("Failed to add event callback");
+            ALOGD("Failed to add event callback");
         }
     }
     run(mThreadName, ANDROID_PRIORITY_URGENT_AUDIO);
@@ -2063,8 +2129,8 @@
         audio_session_t sessionId,
         audio_output_flags_t *flags,
         pid_t creatorPid,
+        const AttributionSourceState& attributionSource,
         pid_t tid,
-        uid_t uid,
         status_t *status,
         audio_port_handle_t portId,
         const sp<media::IAudioTrackCallback>& callback)
@@ -2158,8 +2224,8 @@
                 "sampleRate=%u mSampleRate=%u "
                 "hasFastMixer=%d tid=%d fastTrackAvailMask=%#x",
                 sharedBuffer.get(), frameCount, mFrameCount, format, mFormat,
-                audio_is_linear_pcm(format),
-                channelMask, sampleRate, mSampleRate, hasFastMixer(), tid, mFastTrackAvailMask);
+                audio_is_linear_pcm(format), channelMask, sampleRate,
+                mSampleRate, hasFastMixer(), tid, mFastTrackAvailMask);
         *flags = (audio_output_flags_t)(*flags & ~AUDIO_OUTPUT_FLAG_FAST);
       }
     }
@@ -2331,11 +2397,11 @@
         // all tracks in same audio session must share the same routing strategy otherwise
         // conflicts will happen when tracks are moved from one output to another by audio policy
         // manager
-        uint32_t strategy = AudioSystem::getStrategyForStream(streamType);
+        product_strategy_t strategy = AudioSystem::getStrategyForStream(streamType);
         for (size_t i = 0; i < mTracks.size(); ++i) {
             sp<Track> t = mTracks[i];
             if (t != 0 && t->isExternalTrack()) {
-                uint32_t actual = AudioSystem::getStrategyForStream(t->streamType());
+                product_strategy_t actual = AudioSystem::getStrategyForStream(t->streamType());
                 if (sessionId == t->sessionId() && strategy != actual) {
                     ALOGE("createTrack_l() mismatched strategy; expected %u but found %u",
                             strategy, actual);
@@ -2345,10 +2411,21 @@
             }
         }
 
+        // Set DIRECT flag if current thread is DirectOutputThread. This can
+        // happen when the playback is rerouted to direct output thread by
+        // dynamic audio policy.
+        // Do NOT report the flag changes back to client, since the client
+        // doesn't explicitly request a direct flag.
+        audio_output_flags_t trackFlags = *flags;
+        if (mType == DIRECT) {
+            trackFlags = static_cast<audio_output_flags_t>(trackFlags | AUDIO_OUTPUT_FLAG_DIRECT);
+        }
+
         track = new Track(this, client, streamType, attr, sampleRate, format,
                           channelMask, frameCount,
                           nullptr /* buffer */, (size_t)0 /* bufferSize */, sharedBuffer,
-                          sessionId, creatorPid, uid, *flags, TrackBase::TYPE_DEFAULT, portId);
+                          sessionId, creatorPid, attributionSource, trackFlags,
+                          TrackBase::TYPE_DEFAULT, portId, SIZE_MAX /*frameCountToBeReady*/, speed);
 
         lStatus = track != 0 ? track->initCheck() : (status_t) NO_MEMORY;
         if (lStatus != NO_ERROR) {
@@ -2360,7 +2437,7 @@
         {
             Mutex::Autolock _atCbL(mAudioTrackCbLock);
             if (callback.get() != nullptr) {
-                mAudioTrackCallbacks.emplace(callback);
+                mAudioTrackCallbacks.emplace(track, callback);
             }
         }
 
@@ -2527,15 +2604,17 @@
                     track->sharedBuffer() != 0 ? Track::FS_FILLED : Track::FS_FILLING;
         }
 
-        if ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
-                && mHapticChannelMask != AUDIO_CHANNEL_NONE) {
+        sp<EffectChain> chain = getEffectChain_l(track->sessionId());
+        if (mHapticChannelMask != AUDIO_CHANNEL_NONE
+                && ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
+                        || (chain != nullptr && chain->containsHapticGeneratingEffect_l()))) {
             // Unlock due to VibratorService will lock for this call and will
             // call Tracks.mute/unmute which also require thread's lock.
             mLock.unlock();
             const int intensity = AudioFlinger::onExternalVibrationStart(
                     track->getExternalVibration());
             mLock.lock();
-            track->setHapticIntensity(static_cast<AudioMixer::haptic_intensity_t>(intensity));
+            track->setHapticIntensity(static_cast<os::HapticScale>(intensity));
             // Haptic playback should be enabled by vibrator service.
             if (track->getHapticPlaybackEnabled()) {
                 // Disable haptic playback of all active track to ensure only
@@ -2544,12 +2623,16 @@
                     t->setHapticPlaybackEnabled(false);
                 }
             }
+
+            // Set haptic intensity for effect
+            if (chain != nullptr) {
+                chain->setHapticIntensity_l(track->id(), intensity);
+            }
         }
 
         track->mResetDone = false;
-        track->mPresentationCompleteFrames = 0;
+        track->resetPresentationComplete();
         mActiveTracks.add(track);
-        sp<EffectChain> chain = getEffectChain_l(track->sessionId());
         if (chain != 0) {
             ALOGV("addTrack_l() starting track on chain %p for session %d", chain.get(),
                     track->sessionId());
@@ -2588,6 +2671,10 @@
     mLocalLog.log("removeTrack_l (%p) %s", track.get(), result.string());
 
     mTracks.remove(track);
+    {
+        Mutex::Autolock _atCbL(mAudioTrackCbLock);
+        mAudioTrackCallbacks.erase(track);
+    }
     if (track->isFastTrack()) {
         int index = track->mFastIndex;
         ALOG_ASSERT(0 < index && index < (int)FastMixerState::sMaxFastTracks);
@@ -2614,7 +2701,7 @@
 
 status_t AudioFlinger::DirectOutputThread::selectPresentation(int presentationId, int programId) {
     Mutex::Autolock _l(mLock);
-    if (mOutput == nullptr || mOutput->stream == nullptr) {
+    if (!isStreamInitialized()) {
         return NO_INIT;
     }
     return mOutput->stream->selectPresentation(presentationId, programId);
@@ -2626,12 +2713,16 @@
     ALOGV("PlaybackThread::ioConfigChanged, thread %p, event %d", this, event);
 
     desc->mIoHandle = mId;
+    struct audio_patch patch = mPatch;
+    if (isMsdDevice()) {
+        patch = mDownStreamPatch;
+    }
 
     switch (event) {
     case AUDIO_OUTPUT_OPENED:
     case AUDIO_OUTPUT_REGISTERED:
     case AUDIO_OUTPUT_CONFIG_CHANGED:
-        desc->mPatch = mPatch;
+        desc->mPatch = patch;
         desc->mChannelMask = mChannelMask;
         desc->mSamplingRate = mSampleRate;
         desc->mFormat = mFormat;
@@ -2641,7 +2732,7 @@
         desc->mLatency = latency_l();
         break;
     case AUDIO_CLIENT_STARTED:
-        desc->mPatch = mPatch;
+        desc->mPatch = patch;
         desc->mPortId = portId;
         break;
     case AUDIO_OUTPUT_CLOSED:
@@ -2683,8 +2774,8 @@
                     audio_utils::metadata::byteStringFromData(metadata);
             std::vector metadataVec(metaDataStr.begin(), metaDataStr.end());
             Mutex::Autolock _l(mAudioTrackCbLock);
-            for (const auto& callback : mAudioTrackCallbacks) {
-                callback->onCodecFormatChanged(metadataVec);
+            for (const auto& callbackPair : mAudioTrackCallbacks) {
+                callbackPair.second->onCodecFormatChanged(metadataVec);
             }
     }).detach();
 }
@@ -2708,7 +2799,7 @@
         // the timestamp frame position to reset to 0 for direct and offload threads.
         // (Out of sequence requests are ignored, since the discontinuity would be handled
         // elsewhere, e.g. in flush).
-        mTimestampVerifier.discontinuity();
+        mTimestampVerifier.discontinuity(mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
         mDrainSequence &= ~1;
         mWaitWorkCV.signal();
     }
@@ -2717,8 +2808,9 @@
 void AudioFlinger::PlaybackThread::readOutputParameters_l()
 {
     // unfortunately we have no way of recovering from errors here, hence the LOG_ALWAYS_FATAL
-    mSampleRate = mOutput->getSampleRate();
-    mChannelMask = mOutput->getChannelMask();
+    const audio_config_base_t audioConfig = mOutput->getAudioProperties();
+    mSampleRate = audioConfig.sample_rate;
+    mChannelMask = audioConfig.channel_mask;
     if (!audio_is_output_channel(mChannelMask)) {
         LOG_ALWAYS_FATAL("HAL channel mask %#x not valid for output", mChannelMask);
     }
@@ -2731,11 +2823,11 @@
     mBalance.setChannelMask(mChannelMask);
 
     // Get actual HAL format.
-    status_t result = mOutput->stream->getFormat(&mHALFormat);
+    status_t result = mOutput->stream->getAudioProperties(nullptr, nullptr, &mHALFormat);
     LOG_ALWAYS_FATAL_IF(result != OK, "Error when retrieving output stream format: %d", result);
     // Get format from the shim, which will be different than the HAL format
     // if playing compressed audio over HDMI passthrough.
-    mFormat = mOutput->getFormat();
+    mFormat = audioConfig.format;
     if (!audio_is_valid_format(mFormat)) {
         LOG_ALWAYS_FATAL("HAL format %#x not valid for output", mFormat);
     }
@@ -2862,8 +2954,8 @@
         (void)posix_memalign(&mEffectBuffer, 32, mEffectBufferSize);
     }
 
-    mHapticChannelMask = mChannelMask & AUDIO_CHANNEL_HAPTIC_ALL;
-    mChannelMask &= ~mHapticChannelMask;
+    mHapticChannelMask = static_cast<audio_channel_mask_t>(mChannelMask & AUDIO_CHANNEL_HAPTIC_ALL);
+    mChannelMask = static_cast<audio_channel_mask_t>(mChannelMask & ~mHapticChannelMask);
     mHapticChannelCount = audio_channel_count_from_out_mask(mHapticChannelMask);
     mChannelCount -= mHapticChannelCount;
 
@@ -2906,23 +2998,17 @@
 
 void AudioFlinger::PlaybackThread::updateMetadata_l()
 {
-    if (mOutput == nullptr || mOutput->stream == nullptr ) {
-        return; // That should not happen
-    }
-    bool hasChanged = mActiveTracks.readAndClearHasChanged();
-    for (const sp<Track> &track : mActiveTracks) {
-        // Do not short-circuit as all hasChanged states must be reset
-        // as all the metadata are going to be sent
-        hasChanged |= track->readAndClearHasChanged();
-    }
-    if (!hasChanged) {
+    if (!isStreamInitialized() || !mActiveTracks.readAndClearHasChanged()) {
         return; // nothing to do
     }
     StreamOutHalInterface::SourceMetadata metadata;
     auto backInserter = std::back_inserter(metadata.tracks);
     for (const sp<Track> &track : mActiveTracks) {
         // No track is invalid as this is called after prepareTrack_l in the same critical section
-        track->copyMetadataTo(backInserter);
+        // Do not forward metadata for PatchTrack with unspecified stream type
+        if (track->streamType() != AUDIO_STREAM_PATCH) {
+            track->copyMetadataTo(backInserter);
+        }
     }
     sendMetadataToBackend_l(metadata);
 }
@@ -2960,7 +3046,7 @@
     }
 }
 
-uint32_t AudioFlinger::PlaybackThread::getStrategyForSession_l(audio_session_t sessionId)
+product_strategy_t AudioFlinger::PlaybackThread::getStrategyForSession_l(audio_session_t sessionId)
 {
     // session AUDIO_SESSION_OUTPUT_MIX is placed in same strategy as MUSIC stream so that
     // it is moved to correct output by audio policy manager when A2DP is connected or disconnected
@@ -3235,6 +3321,17 @@
     invalidateTracks_l(streamType);
 }
 
+// getTrackById_l must be called with holding thread lock
+AudioFlinger::PlaybackThread::Track* AudioFlinger::PlaybackThread::getTrackById_l(
+        audio_port_handle_t trackPortId) {
+    for (size_t i = 0; i < mTracks.size(); i++) {
+        if (mTracks[i]->portId() == trackPortId) {
+            return mTracks[i].get();
+        }
+    }
+    return nullptr;
+}
+
 status_t AudioFlinger::PlaybackThread::addEffectChain_l(const sp<EffectChain>& chain)
 {
     audio_session_t session = chain->sessionId();
@@ -3397,7 +3494,6 @@
 
     mStandbyTimeNs = systemTime();
     int64_t lastLoopCountWritten = -2; // never matches "previous" loop, when loopCount = 0.
-    int64_t lastFramesWritten = -1;    // track changes in timestamp server frames written
 
     // MIXER
     nsecs_t lastWarning = 0;
@@ -3433,14 +3529,6 @@
 
     checkSilentMode_l();
 
-    // DIRECT and OFFLOAD threads should reset frame count to zero on stop/flush
-    // TODO: add confirmation checks:
-    // 1) DIRECT threads and linear PCM format really resets to 0?
-    // 2) Is frame count really valid if not linear pcm?
-    // 3) Are all 64 bits of position returned, not just lowest 32 bits?
-    if (mType == OFFLOAD || mType == DIRECT) {
-        mTimestampVerifier.setDiscontinuityMode(mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
-    }
     audio_patch_handle_t lastDownstreamPatchHandle = AUDIO_PATCH_HANDLE_NONE;
 
     // loopCount is used for statistics and diagnostics.
@@ -3481,7 +3569,7 @@
                     // latency of 5 seconds).
                     const double minLatency = 0., maxLatency = 5000.;
                     if (latencyMs >= minLatency && latencyMs <= maxLatency) {
-                        ALOGV("new downstream latency %lf ms", latencyMs);
+                        ALOGVV("new downstream latency %lf ms", latencyMs);
                     } else {
                         ALOGD("out of range downstream latency %lf ms", latencyMs);
                         if (latencyMs < minLatency) latencyMs = minLatency;
@@ -3512,135 +3600,8 @@
                 logString = NULL;
             }
 
-            // Collect timestamp statistics for the Playback Thread types that support it.
-            if (mType == MIXER
-                    || mType == DUPLICATING
-                    || mType == DIRECT
-                    || mType == OFFLOAD) { // no indentation
-            // Gather the framesReleased counters for all active tracks,
-            // and associate with the sink frames written out.  We need
-            // this to convert the sink timestamp to the track timestamp.
-            bool kernelLocationUpdate = false;
-            ExtendedTimestamp timestamp; // use private copy to fetch
-            if (mStandby) {
-                mTimestampVerifier.discontinuity();
-            } else if (threadloop_getHalTimestamp_l(&timestamp) == OK) {
-                mTimestampVerifier.add(timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL],
-                        timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
-                        mSampleRate);
+            collectTimestamps_l();
 
-                if (isTimestampCorrectionEnabled()) {
-                    ALOGV("TS_BEFORE: %d %lld %lld", id(),
-                            (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
-                            (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
-                    auto correctedTimestamp = mTimestampVerifier.getLastCorrectedTimestamp();
-                    timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
-                            = correctedTimestamp.mFrames;
-                    timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL]
-                            = correctedTimestamp.mTimeNs;
-                    ALOGV("TS_AFTER: %d %lld %lld", id(),
-                            (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
-                            (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
-
-                    // Note: Downstream latency only added if timestamp correction enabled.
-                    if (mDownstreamLatencyStatMs.getN() > 0) { // we have latency info.
-                        const int64_t newPosition =
-                                timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
-                                - int64_t(mDownstreamLatencyStatMs.getMean() * mSampleRate * 1e-3);
-                        // prevent retrograde
-                        timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] = max(
-                                newPosition,
-                                (mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
-                                        - mSuspendedFrames));
-                    }
-                }
-
-                // We always fetch the timestamp here because often the downstream
-                // sink will block while writing.
-
-                // We keep track of the last valid kernel position in case we are in underrun
-                // and the normal mixer period is the same as the fast mixer period, or there
-                // is some error from the HAL.
-                if (mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
-                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
-                            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
-                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
-                            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
-
-                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
-                            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER];
-                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
-                            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER];
-                }
-
-                if (timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
-                    kernelLocationUpdate = true;
-                } else {
-                    ALOGVV("getTimestamp error - no valid kernel position");
-                }
-
-                // copy over kernel info
-                mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] =
-                        timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
-                        + mSuspendedFrames; // add frames discarded when suspended
-                mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] =
-                        timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
-            } else {
-                mTimestampVerifier.error();
-            }
-
-            // mFramesWritten for non-offloaded tracks are contiguous
-            // even after standby() is called. This is useful for the track frame
-            // to sink frame mapping.
-            bool serverLocationUpdate = false;
-            if (mFramesWritten != lastFramesWritten) {
-                serverLocationUpdate = true;
-                lastFramesWritten = mFramesWritten;
-            }
-            // Only update timestamps if there is a meaningful change.
-            // Either the kernel timestamp must be valid or we have written something.
-            if (kernelLocationUpdate || serverLocationUpdate) {
-                if (serverLocationUpdate) {
-                    // use the time before we called the HAL write - it is a bit more accurate
-                    // to when the server last read data than the current time here.
-                    //
-                    // If we haven't written anything, mLastIoBeginNs will be -1
-                    // and we use systemTime().
-                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER] = mFramesWritten;
-                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] = mLastIoBeginNs == -1
-                            ? systemTime() : mLastIoBeginNs;
-                }
-
-                for (const sp<Track> &t : mActiveTracks) {
-                    if (!t->isFastTrack()) {
-                        t->updateTrackFrameInfo(
-                                t->mAudioTrackServerProxy->framesReleased(),
-                                mFramesWritten,
-                                mSampleRate,
-                                mTimestamp);
-                    }
-                }
-            }
-
-            if (audio_has_proportional_frames(mFormat)) {
-                const double latencyMs = mTimestamp.getOutputServerLatencyMs(mSampleRate);
-                if (latencyMs != 0.) { // note 0. means timestamp is empty.
-                    mLatencyMs.add(latencyMs);
-                }
-            }
-
-            } // if (mType ... ) { // no indentation
-#if 0
-            // logFormat example
-            if (z % 100 == 0) {
-                timespec ts;
-                clock_gettime(CLOCK_MONOTONIC, &ts);
-                LOGT("This is an integer %d, this is a float %f, this is my "
-                    "pid %p %% %s %t", 42, 3.14, "and this is a timestamp", ts);
-                LOGT("A deceptive null-terminated string %\0");
-            }
-            ++z;
-#endif
             saveOutputTracks();
             if (mSignalPending) {
                 // A signal was raised while we were unlocked
@@ -3732,9 +3693,15 @@
 
             // Determine which session to pick up haptic data.
             // This must be done under the same lock as prepareTracks_l().
+            // The haptic data from the effect is at a higher priority than the one from track.
             // TODO: Write haptic data directly to sink buffer when mixing.
             if (mHapticChannelCount > 0 && effectChains.size() > 0) {
                 for (const auto& track : mActiveTracks) {
+                    sp<EffectChain> effectChain = getEffectChain_l(track->sessionId());
+                    if (effectChain != nullptr && effectChain->containsHapticGeneratingEffect_l()) {
+                        activeHapticSessionId = track->sessionId();
+                        break;
+                    }
                     if (track->getHapticPlaybackEnabled()) {
                         activeHapticSessionId = track->sessionId();
                         break;
@@ -4080,6 +4047,148 @@
     return false;
 }
 
+void AudioFlinger::PlaybackThread::collectTimestamps_l()
+{
+    // Collect timestamp statistics for the Playback Thread types that support it.
+    if (mType != MIXER
+            && mType != DUPLICATING
+            && mType != DIRECT
+            && mType != OFFLOAD) {
+        return;
+    }
+    if (mStandby) {
+        mTimestampVerifier.discontinuity(discontinuityForStandbyOrFlush());
+        return;
+    } else if (mHwPaused) {
+        mTimestampVerifier.discontinuity(mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS);
+        return;
+    }
+
+    // Gather the framesReleased counters for all active tracks,
+    // and associate with the sink frames written out.  We need
+    // this to convert the sink timestamp to the track timestamp.
+    bool kernelLocationUpdate = false;
+    ExtendedTimestamp timestamp; // use private copy to fetch
+
+    // Always query HAL timestamp and update timestamp verifier. In standby or pause,
+    // HAL may be draining some small duration buffered data for fade out.
+    if (threadloop_getHalTimestamp_l(&timestamp) == OK) {
+        mTimestampVerifier.add(timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL],
+                timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
+                mSampleRate);
+
+        if (isTimestampCorrectionEnabled()) {
+            ALOGVV("TS_BEFORE: %d %lld %lld", id(),
+                    (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
+                    (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
+            auto correctedTimestamp = mTimestampVerifier.getLastCorrectedTimestamp();
+            timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+                    = correctedTimestamp.mFrames;
+            timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL]
+                    = correctedTimestamp.mTimeNs;
+            ALOGVV("TS_AFTER: %d %lld %lld", id(),
+                    (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
+                    (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
+
+            // Note: Downstream latency only added if timestamp correction enabled.
+            if (mDownstreamLatencyStatMs.getN() > 0) { // we have latency info.
+                const int64_t newPosition =
+                        timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+                        - int64_t(mDownstreamLatencyStatMs.getMean() * mSampleRate * 1e-3);
+                // prevent retrograde
+                timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] = max(
+                        newPosition,
+                        (mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+                                - mSuspendedFrames));
+            }
+        }
+
+        // We always fetch the timestamp here because often the downstream
+        // sink will block while writing.
+
+        // We keep track of the last valid kernel position in case we are in underrun
+        // and the normal mixer period is the same as the fast mixer period, or there
+        // is some error from the HAL.
+        if (mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
+            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
+                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
+            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
+                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
+
+            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
+                    mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER];
+            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
+                    mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER];
+        }
+
+        if (timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
+            kernelLocationUpdate = true;
+        } else {
+            ALOGVV("getTimestamp error - no valid kernel position");
+        }
+
+        // copy over kernel info
+        mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] =
+                timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+                + mSuspendedFrames; // add frames discarded when suspended
+        mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] =
+                timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
+    } else {
+        mTimestampVerifier.error();
+    }
+
+    // mFramesWritten for non-offloaded tracks are contiguous
+    // even after standby() is called. This is useful for the track frame
+    // to sink frame mapping.
+    bool serverLocationUpdate = false;
+    if (mFramesWritten != mLastFramesWritten) {
+        serverLocationUpdate = true;
+        mLastFramesWritten = mFramesWritten;
+    }
+    // Only update timestamps if there is a meaningful change.
+    // Either the kernel timestamp must be valid or we have written something.
+    if (kernelLocationUpdate || serverLocationUpdate) {
+        if (serverLocationUpdate) {
+            // use the time before we called the HAL write - it is a bit more accurate
+            // to when the server last read data than the current time here.
+            //
+            // If we haven't written anything, mLastIoBeginNs will be -1
+            // and we use systemTime().
+            mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER] = mFramesWritten;
+            mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] = mLastIoBeginNs == -1
+                    ? systemTime() : mLastIoBeginNs;
+        }
+
+        for (const sp<Track> &t : mActiveTracks) {
+            if (!t->isFastTrack()) {
+                t->updateTrackFrameInfo(
+                        t->mAudioTrackServerProxy->framesReleased(),
+                        mFramesWritten,
+                        mSampleRate,
+                        mTimestamp);
+            }
+        }
+    }
+
+    if (audio_has_proportional_frames(mFormat)) {
+        const double latencyMs = mTimestamp.getOutputServerLatencyMs(mSampleRate);
+        if (latencyMs != 0.) { // note 0. means timestamp is empty.
+            mLatencyMs.add(latencyMs);
+        }
+    }
+#if 0
+    // logFormat example
+    if (z % 100 == 0) {
+        timespec ts;
+        clock_gettime(CLOCK_MONOTONIC, &ts);
+        LOGT("This is an integer %d, this is a float %f, this is my "
+            "pid %p %% %s %t", 42, 3.14, "and this is a timestamp", ts);
+        LOGT("A deceptive null-terminated string %\0");
+    }
+    ++z;
+#endif
+}
+
 // removeTracks_l() must be called with ThreadBase::mLock held
 void AudioFlinger::PlaybackThread::removeTracks_l(const Vector< sp<Track> >& tracksToRemove)
 {
@@ -4104,13 +4213,20 @@
             // remove from our tracks vector
             removeTrack_l(track);
         }
-        if ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
-                && mHapticChannelCount > 0) {
+        if (mHapticChannelCount > 0 &&
+                ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
+                        || (chain != nullptr && chain->containsHapticGeneratingEffect_l()))) {
             mLock.unlock();
             // Unlock due to VibratorService will lock for this call and will
             // call Tracks.mute/unmute which also require thread's lock.
             AudioFlinger::onExternalVibrationStop(track->getExternalVibration());
             mLock.lock();
+
+            // When the track is stop, set the haptic intensity as MUTE
+            // for the HapticGenerator effect.
+            if (chain != nullptr) {
+                chain->setHapticIntensity_l(track->id(), static_cast<int>(os::HapticScale::MUTE));
+            }
         }
     }
 }
@@ -4126,20 +4242,15 @@
         return status;
     }
     if ((mType == OFFLOAD || mType == DIRECT) && mOutput != NULL) {
-        uint64_t position64;
-        if (mOutput->getPresentationPosition(&position64, &timestamp.mTime) == OK) {
-            timestamp.mPosition = (uint32_t)position64;
-            if (mDownstreamLatencyStatMs.getN() > 0) {
-                const uint32_t positionOffset =
-                    (uint32_t)(mDownstreamLatencyStatMs.getMean() * mSampleRate * 1e-3);
-                if (positionOffset > timestamp.mPosition) {
-                    timestamp.mPosition = 0;
-                } else {
-                    timestamp.mPosition -= positionOffset;
-                }
-            }
-            return NO_ERROR;
+        collectTimestamps_l();
+        if (mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] <= 0) {
+            return INVALID_OPERATION;
         }
+        timestamp.mPosition = mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
+        const int64_t timeNs = mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
+        timestamp.mTime.tv_sec = timeNs / NANOS_PER_SECOND;
+        timestamp.mTime.tv_nsec = timeNs - (timestamp.mTime.tv_sec * NANOS_PER_SECOND);
+        return NO_ERROR;
     }
     return INVALID_OPERATION;
 }
@@ -4199,7 +4310,7 @@
                             "Enumerated device type(%#x) must not be used "
                             "as it does not support audio patches",
                             patch->sinks[i].ext.device.type);
-        type |= patch->sinks[i].ext.device.type;
+        type = static_cast<audio_devices_t>(type | patch->sinks[i].ext.device.type);
         deviceTypeAddrs.push_back(AudioDeviceTypeAddr(patch->sinks[i].ext.device.type,
                 patch->sinks[i].ext.device.address));
     }
@@ -4449,11 +4560,12 @@
         // wrap the source side of the MonoPipe to make it an AudioBufferProvider
         fastTrack->mBufferProvider = new SourceAudioBufferProvider(new MonoPipeReader(monoPipe));
         fastTrack->mVolumeProvider = NULL;
-        fastTrack->mChannelMask = mChannelMask | mHapticChannelMask; // mPipeSink channel mask for
-                                                                     // audio to FastMixer
+        fastTrack->mChannelMask = static_cast<audio_channel_mask_t>(
+                mChannelMask | mHapticChannelMask); // mPipeSink channel mask for
+                                                    // audio to FastMixer
         fastTrack->mFormat = mFormat; // mPipeSink format for audio to FastMixer
         fastTrack->mHapticPlaybackEnabled = mHapticChannelMask != AUDIO_CHANNEL_NONE;
-        fastTrack->mHapticIntensity = AudioMixer::HAPTIC_SCALE_NONE;
+        fastTrack->mHapticIntensity = os::HapticScale::NONE;
         fastTrack->mGeneration++;
         state->mFastTracksGen++;
         state->mTrackMask = 1;
@@ -4464,7 +4576,8 @@
         // specify sink channel mask when haptic channel mask present as it can not
         // be calculated directly from channel count
         state->mSinkChannelMask = mHapticChannelMask == AUDIO_CHANNEL_NONE
-                ? AUDIO_CHANNEL_NONE : mChannelMask | mHapticChannelMask;
+                ? AUDIO_CHANNEL_NONE
+                : static_cast<audio_channel_mask_t>(mChannelMask | mHapticChannelMask);
         state->mCommand = FastMixerState::COLD_IDLE;
         // already done in constructor initialization list
         //mFastMixerFutex = 0;
@@ -5538,8 +5651,6 @@
                                                        status_t& status)
 {
     bool reconfig = false;
-    bool a2dpDeviceChanged = false;
-
     status = NO_ERROR;
 
     AutoPark<FastMixer> park(mFastMixer);
@@ -5611,7 +5722,7 @@
         }
     }
 
-    return reconfig || a2dpDeviceChanged;
+    return reconfig;
 }
 
 
@@ -5812,8 +5923,15 @@
         sp<Track> l = mActiveTracks.getLatest();
         bool last = l.get() == track;
 
-        if (track->isPausing()) {
-            track->setPaused();
+        if (track->isPausePending()) {
+            track->pauseAck();
+            // It is possible a track might have been flushed or stopped.
+            // Other operations such as flush pending might occur on the next prepare.
+            if (track->isPausing()) {
+                track->setPaused();
+            }
+            // Always perform pause, as an immediate flush will change
+            // the pause state to be no longer isPausing().
             if (mHwSupportsPause && last && !mHwPaused) {
                 doHwPause = true;
                 mHwPaused = true;
@@ -5839,11 +5957,19 @@
         // Allow draining the buffer in case the client
         // app does not call stop() and relies on underrun to stop:
         // hence the test on (track->mRetryCount > 1).
-        // If retryCount<=1 then track is about to underrun and be removed.
+        // If track->mRetryCount <= 1 then track is about to be disabled, paused, removed,
+        // so we accept any nonzero amount of data delivered by the AudioTrack (which will
+        // reset the retry counter).
         // Do not use a high threshold for compressed audio.
+
+        // target retry count that we will use is based on the time we wait for retries.
+        const int32_t targetRetryCount = kMaxTrackRetriesDirectMs * 1000 / mActiveSleepTimeUs;
+        // the retry threshold is when we accept any size for PCM data.  This is slightly
+        // smaller than the retry count so we can push small bits of data without a glitch.
+        const int32_t retryThreshold = targetRetryCount > 2 ? targetRetryCount - 1 : 1;
         uint32_t minFrames;
         if ((track->sharedBuffer() == 0) && !track->isStopping_1() && !track->isPausing()
-            && (track->mRetryCount > 1) && audio_has_proportional_frames(mFormat)) {
+            && (track->mRetryCount > retryThreshold) && audio_has_proportional_frames(mFormat)) {
             minFrames = mNormalFrameCount;
         } else {
             minFrames = 1;
@@ -5887,7 +6013,7 @@
                 mPreviousTrack = track;
 
                 // reset retry count
-                track->mRetryCount = kMaxTrackRetriesDirect;
+                track->mRetryCount = targetRetryCount;
                 mActiveTrack = t;
                 mixerStatus = MIXER_TRACKS_READY;
                 if (mHwPaused) {
@@ -5912,16 +6038,8 @@
                     track->isStopping_2() || track->isPaused()) {
                 // We have consumed all the buffers of this track.
                 // Remove it from the list of active tracks.
-                size_t audioHALFrames;
-                if (audio_has_proportional_frames(mFormat)) {
-                    audioHALFrames = (latency_l() * mSampleRate) / 1000;
-                } else {
-                    audioHALFrames = 0;
-                }
-
-                int64_t framesWritten = mBytesWritten / mFrameSize;
                 if (mStandby || !last ||
-                        track->presentationComplete(framesWritten, audioHALFrames) ||
+                        track->presentationComplete(latency_l()) ||
                         track->isPaused() || mHwPaused) {
                     if (track->isStopping_2()) {
                         track->mState = TrackBase::STOPPED;
@@ -5941,15 +6059,17 @@
                     // indicate to client process that the track was disabled because of underrun;
                     // it will then automatically call start() when data is available
                     track->disable();
-                } else if (last) {
+                    // only do hw pause when track is going to be removed due to BUFFER TIMEOUT.
+                    // unlike mixerthread, HAL can be paused for direct output
                     ALOGW("pause because of UNDERRUN, framesReady = %zu,"
                             "minFrames = %u, mFormat = %#x",
                             framesReady, minFrames, mFormat);
-                    mixerStatus = MIXER_TRACKS_ENABLED;
-                    if (mHwSupportsPause && !mHwPaused && !mStandby) {
+                    if (last && mHwSupportsPause && !mHwPaused && !mStandby) {
                         doHwPause = true;
                         mHwPaused = true;
                     }
+                } else if (last) {
+                    mixerStatus = MIXER_TRACKS_ENABLED;
                 }
             }
         }
@@ -6021,16 +6141,13 @@
         mSleepTimeUs = mIdleSleepTimeUs;
         return;
     }
-    if (mSleepTimeUs == 0) {
-        if (mMixerStatus == MIXER_TRACKS_ENABLED) {
-            mSleepTimeUs = mActiveSleepTimeUs;
-        } else {
-            mSleepTimeUs = mIdleSleepTimeUs;
-        }
-    } else if (mBytesWritten != 0 && audio_has_proportional_frames(mFormat)) {
-        memset(mSinkBuffer, 0, mFrameCount * mFrameSize);
-        mSleepTimeUs = 0;
+    if (mMixerStatus == MIXER_TRACKS_ENABLED) {
+        mSleepTimeUs = mActiveSleepTimeUs;
+    } else {
+        mSleepTimeUs = mIdleSleepTimeUs;
     }
+    // Note: In S or later, we do not write zeroes for
+    // linear or proportional PCM direct tracks in underrun.
 }
 
 void AudioFlinger::DirectOutputThread::threadLoop_exit()
@@ -6072,8 +6189,6 @@
                                                               status_t& status)
 {
     bool reconfig = false;
-    bool a2dpDeviceChanged = false;
-
     status = NO_ERROR;
 
     AudioParameter param = AudioParameter(keyValuePair);
@@ -6108,7 +6223,7 @@
         }
     }
 
-    return reconfig || a2dpDeviceChanged;
+    return reconfig;
 }
 
 uint32_t AudioFlinger::DirectOutputThread::activeSleepTimeUs() const
@@ -6165,7 +6280,7 @@
     mOutput->flush();
     mHwPaused = false;
     mFlushPending = false;
-    mTimestampVerifier.discontinuity(); // DIRECT and OFFLOADED flush resets frame count.
+    mTimestampVerifier.discontinuity(discontinuityForStandbyOrFlush());
     mTimestamp.clear();
 }
 
@@ -6357,8 +6472,15 @@
             continue;
         }
 
-        if (track->isPausing()) {
-            track->setPaused();
+        if (track->isPausePending()) {
+            track->pauseAck();
+            // It is possible a track might have been flushed or stopped.
+            // Other operations such as flush pending might occur on the next prepare.
+            if (track->isPausing()) {
+                track->setPaused();
+            }
+            // Always perform pause if last, as an immediate flush will change
+            // the pause state to be no longer isPausing().
             if (last) {
                 if (mHwSupportsPause && !mHwPaused) {
                     doHwPause = true;
@@ -6491,23 +6613,17 @@
                 // Drain has completed or we are in standby, signal presentation complete
                 if (!(mDrainSequence & 1) || !last || mStandby) {
                     track->mState = TrackBase::STOPPED;
-                    uint32_t latency = 0;
-                    status_t result = mOutput->stream->getLatency(&latency);
-                    ALOGE_IF(result != OK,
-                            "Error when retrieving output stream latency: %d", result);
-                    size_t audioHALFrames = (latency * mSampleRate) / 1000;
-                    int64_t framesWritten =
-                            mBytesWritten / mOutput->getFrameSize();
-                    track->presentationComplete(framesWritten, audioHALFrames);
+                    track->presentationComplete(latency_l());
                     track->reset();
                     tracksToRemove->add(track);
-                    // DIRECT and OFFLOADED stop resets frame counts.
+                    // OFFLOADED stop resets frame counts.
                     if (!mUseAsyncWrite) {
                         // If we don't get explicit drain notification we must
                         // register discontinuity regardless of whether this is
                         // the previous (!last) or the upcoming (last) track
                         // to avoid skipping the discontinuity.
-                        mTimestampVerifier.discontinuity();
+                        mTimestampVerifier.discontinuity(
+                                mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
                     }
                 }
             } else {
@@ -6755,13 +6871,20 @@
     // from different OutputTracks and their associated MixerThreads (e.g. one may
     // nearly empty and the other may be dropping data).
 
+    // TODO b/182392769: use attribution source util, move to server edge
+    AttributionSourceState attributionSource = AttributionSourceState();
+    attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(
+        IPCThreadState::self()->getCallingUid()));
+    attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(
+      IPCThreadState::self()->getCallingPid()));
+    attributionSource.token = sp<BBinder>::make();
     sp<OutputTrack> outputTrack = new OutputTrack(thread,
                                             this,
                                             mSampleRate,
                                             mFormat,
                                             mChannelMask,
                                             frameCount,
-                                            IPCThreadState::self()->getCallingUid());
+                                            attributionSource);
     status_t status = outputTrack != 0 ? outputTrack->initCheck() : (status_t) NO_MEMORY;
     if (status != NO_ERROR) {
         ALOGE("addOutputTrack() initCheck failed %d", status);
@@ -6881,7 +7004,7 @@
     snprintf(mThreadName, kThreadNameLength, "AudioIn_%X", id);
     mNBLogWriter = audioFlinger->newWriter_l(kLogSize, mThreadName);
 
-    if (mInput != nullptr && mInput->audioHwDev != nullptr) {
+    if (mInput->audioHwDev != nullptr) {
         mIsMsdDevice = strcmp(
                 mInput->audioHwDev->moduleName(), AUDIO_HARDWARE_MODULE_ID_MSD) == 0;
     }
@@ -7176,19 +7299,29 @@
                     //    the only active track
                     // 2) invalidate this track: this will cause the client to reconnect and possibly
                     //    be invalidated again until unsilenced
+                    bool invalidate = false;
                     if (activeTrack->isSilenced()) {
                         if (size > 1) {
-                            activeTrack->invalidate();
-                            ALOG_ASSERT(fastTrackToRemove == 0);
-                            fastTrackToRemove = activeTrack;
-                            removeTrack_l(activeTrack);
-                            mActiveTracks.remove(activeTrack);
-                            size--;
-                            continue;
+                            invalidate = true;
                         } else {
                             silenceFastCapture = true;
                         }
                     }
+                    // Invalidate fast tracks if access to audio history is required as this is not
+                    // possible with fast tracks. Once the fast track has been invalidated, no new
+                    // fast track will be created until mMaxSharedAudioHistoryMs is cleared.
+                    if (mMaxSharedAudioHistoryMs != 0) {
+                        invalidate = true;
+                    }
+                    if (invalidate) {
+                        activeTrack->invalidate();
+                        ALOG_ASSERT(fastTrackToRemove == 0);
+                        fastTrackToRemove = activeTrack;
+                        removeTrack_l(activeTrack);
+                        mActiveTracks.remove(activeTrack);
+                        size--;
+                        continue;
+                    }
                     fastTrack = activeTrack;
                 }
 
@@ -7312,7 +7445,7 @@
 
             const ssize_t availableToRead = mPipeSource->availableToRead();
             if (availableToRead >= 0) {
-                // PipeSource is the master clock.  It is up to the AudioRecord client to keep up.
+                // PipeSource is the primary clock.  It is up to the AudioRecord client to keep up.
                 LOG_ALWAYS_FATAL_IF((size_t)availableToRead > mPipeFramesP2,
                         "more frames to read than fifo size, %zd > %zu",
                         availableToRead, mPipeFramesP2);
@@ -7365,7 +7498,9 @@
         if (mPipeSource.get() == nullptr /* don't obtain for FastCapture, could block */) {
             int64_t position, time;
             if (mStandby) {
-                mTimestampVerifier.discontinuity();
+                mTimestampVerifier.discontinuity(audio_is_linear_pcm(mFormat) ?
+                    mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS :
+                    mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
             } else if (mSource->getCapturePosition(&position, &time) == NO_ERROR
                     && time > mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL]) {
 
@@ -7373,12 +7508,12 @@
 
                 // Correct timestamps
                 if (isTimestampCorrectionEnabled()) {
-                    ALOGV("TS_BEFORE: %d %lld %lld",
+                    ALOGVV("TS_BEFORE: %d %lld %lld",
                             id(), (long long)time, (long long)position);
                     auto correctedTimestamp = mTimestampVerifier.getLastCorrectedTimestamp();
                     position = correctedTimestamp.mFrames;
                     time = correctedTimestamp.mTimeNs;
-                    ALOGV("TS_AFTER: %d %lld %lld",
+                    ALOGVV("TS_AFTER: %d %lld %lld",
                             id(), (long long)time, (long long)position);
                 }
 
@@ -7428,7 +7563,7 @@
                         (framesRead - part1) * mFrameSize);
             }
         }
-        rear = mRsmpInRear += framesRead;
+        mRsmpInRear = audio_utils::safe_add_overflow(mRsmpInRear, (int32_t)framesRead);
 
         size = activeTracks.size();
 
@@ -7673,12 +7808,12 @@
         audio_session_t sessionId,
         size_t *pNotificationFrameCount,
         pid_t creatorPid,
-        uid_t uid,
+        const AttributionSourceState& attributionSource,
         audio_input_flags_t *flags,
         pid_t tid,
         status_t *status,
         audio_port_handle_t portId,
-        const String16& opPackageName)
+        int32_t maxSharedAudioHistoryMs)
 {
     size_t frameCount = *pFrameCount;
     size_t notificationFrameCount = *pNotificationFrameCount;
@@ -7687,6 +7822,8 @@
     audio_input_flags_t inputFlags = mInput->flags;
     audio_input_flags_t requestedFlags = *flags;
     uint32_t sampleRate;
+    AttributionSourceState checkedAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+            attributionSource);
 
     lStatus = initCheck();
     if (lStatus != NO_ERROR) {
@@ -7700,13 +7837,25 @@
         goto Exit;
     }
 
+    if (maxSharedAudioHistoryMs != 0) {
+        if (!captureHotwordAllowed(checkedAttributionSource)) {
+            lStatus = PERMISSION_DENIED;
+            goto Exit;
+        }
+        if (maxSharedAudioHistoryMs < 0
+                || maxSharedAudioHistoryMs > AudioFlinger::kMaxSharedAudioHistoryMs) {
+            lStatus = BAD_VALUE;
+            goto Exit;
+        }
+    }
     if (*pSampleRate == 0) {
         *pSampleRate = mSampleRate;
     }
     sampleRate = *pSampleRate;
 
-    // special case for FAST flag considered OK if fast capture is present
-    if (hasFastCapture()) {
+    // special case for FAST flag considered OK if fast capture is present and access to
+    // audio history is not required
+    if (hasFastCapture() && mMaxSharedAudioHistoryMs == 0) {
         inputFlags = (audio_input_flags_t)(inputFlags | AUDIO_INPUT_FLAG_FAST);
     }
 
@@ -7718,8 +7867,9 @@
         *flags = (audio_input_flags_t)(*flags & inputFlags);
     }
 
-    // client expresses a preference for FAST, but we get the final say
-    if (*flags & AUDIO_INPUT_FLAG_FAST) {
+    // client expresses a preference for FAST and no access to audio history,
+    // but we get the final say
+    if (*flags & AUDIO_INPUT_FLAG_FAST && maxSharedAudioHistoryMs == 0) {
       if (
             // we formerly checked for a callback handler (non-0 tid),
             // but that is no longer required for TRANSFER_OBTAIN mode
@@ -7808,11 +7958,19 @@
 
     { // scope for mLock
         Mutex::Autolock _l(mLock);
+        int32_t startFrames = -1;
+        if (!mSharedAudioPackageName.empty()
+                && mSharedAudioPackageName == checkedAttributionSource.packageName
+                && mSharedAudioSessionId == sessionId
+                && captureHotwordAllowed(checkedAttributionSource)) {
+            startFrames = mSharedAudioStartFrames;
+        }
 
         track = new RecordTrack(this, client, attr, sampleRate,
                       format, channelMask, frameCount,
-                      nullptr /* buffer */, (size_t)0 /* bufferSize */, sessionId, creatorPid, uid,
-                      *flags, TrackBase::TYPE_DEFAULT, opPackageName, portId);
+                      nullptr /* buffer */, (size_t)0 /* bufferSize */, sessionId, creatorPid,
+                      checkedAttributionSource, *flags, TrackBase::TYPE_DEFAULT, portId,
+                      startFrames);
 
         lStatus = track->initCheck();
         if (lStatus != NO_ERROR) {
@@ -7828,6 +7986,10 @@
             // so ask activity manager to do this on our behalf
             sendPrioConfigEvent_l(callingPid, tid, kPriorityAudioApp, true /*forApp*/);
         }
+
+        if (maxSharedAudioHistoryMs != 0) {
+            sendResizeBufferConfigEvent_l(maxSharedAudioHistoryMs);
+        }
     }
 
     lStatus = NO_ERROR;
@@ -7869,7 +8031,8 @@
         AutoMutex lock(mLock);
         if (recordTrack->isInvalid()) {
             recordTrack->clearSyncStartEvent();
-            return INVALID_OPERATION;
+            ALOGW("%s track %d: invalidated before startInput", __func__, recordTrack->portId());
+            return DEAD_OBJECT;
         }
         if (mActiveTracks.indexOf(recordTrack) >= 0) {
             if (recordTrack->mState == TrackBase::PAUSING) {
@@ -7899,7 +8062,8 @@
                     recordTrack->mState = TrackBase::STARTING_2;
                     // STARTING_2 forces destroy to call stopInput.
                 }
-                return INVALID_OPERATION;
+                ALOGW("%s track %d: invalidated after startInput", __func__, recordTrack->portId());
+                return DEAD_OBJECT;
             }
             if (recordTrack->mState != TrackBase::STARTING_1) {
                 ALOGW("%s(%d): unsynchronized mState:%d change",
@@ -8018,6 +8182,9 @@
 {
     ALOGV("RecordThread::getActiveMicrophones");
     AutoMutex _l(mLock);
+    if (!isStreamInitialized()) {
+        return NO_INIT;
+    }
     status_t status = mInput->stream->getActiveMicrophones(activeMicrophones);
     return status;
 }
@@ -8027,6 +8194,9 @@
 {
     ALOGV("setPreferredMicrophoneDirection(%d)", direction);
     AutoMutex _l(mLock);
+    if (!isStreamInitialized()) {
+        return NO_INIT;
+    }
     return mInput->stream->setPreferredMicrophoneDirection(direction);
 }
 
@@ -8034,22 +8204,86 @@
 {
     ALOGV("setPreferredMicrophoneFieldDimension(%f)", zoom);
     AutoMutex _l(mLock);
+    if (!isStreamInitialized()) {
+        return NO_INIT;
+    }
     return mInput->stream->setPreferredMicrophoneFieldDimension(zoom);
 }
 
+status_t AudioFlinger::RecordThread::shareAudioHistory(
+        const std::string& sharedAudioPackageName, audio_session_t sharedSessionId,
+        int64_t sharedAudioStartMs) {
+    AutoMutex _l(mLock);
+    return shareAudioHistory_l(sharedAudioPackageName, sharedSessionId, sharedAudioStartMs);
+}
+
+status_t AudioFlinger::RecordThread::shareAudioHistory_l(
+        const std::string& sharedAudioPackageName, audio_session_t sharedSessionId,
+        int64_t sharedAudioStartMs) {
+
+    if ((hasAudioSession_l(sharedSessionId) & ThreadBase::TRACK_SESSION) == 0) {
+        return BAD_VALUE;
+    }
+
+    if (sharedAudioStartMs < 0
+        || sharedAudioStartMs > INT64_MAX / mSampleRate) {
+        return BAD_VALUE;
+    }
+
+    // Current implementation of the input resampling buffer wraps around indexes at 32 bit.
+    // As we cannot detect more than one wraparound, only accept values up current write position
+    // after one wraparound
+    // We assume recent wraparounds on mRsmpInRear only given it is unlikely that the requesting
+    // app waits several hours after the start time was computed.
+    int64_t sharedAudioStartFrames = sharedAudioStartMs * mSampleRate / 1000;
+    const int32_t sharedOffset = audio_utils::safe_sub_overflow(mRsmpInRear,
+          (int32_t)sharedAudioStartFrames);
+    // Bring the start frame position within the input buffer to match the documented
+    // "best effort" behavior of the API.
+    if (sharedOffset < 0) {
+        sharedAudioStartFrames = mRsmpInRear;
+    } else if (sharedOffset > mRsmpInFrames) {
+        sharedAudioStartFrames =
+                audio_utils::safe_sub_overflow(mRsmpInRear, (int32_t)mRsmpInFrames);
+    }
+
+    mSharedAudioPackageName = sharedAudioPackageName;
+    if (mSharedAudioPackageName.empty()) {
+        resetAudioHistory_l();
+    } else {
+        mSharedAudioSessionId = sharedSessionId;
+        mSharedAudioStartFrames = (int32_t)sharedAudioStartFrames;
+    }
+    return NO_ERROR;
+}
+
+void AudioFlinger::RecordThread::resetAudioHistory_l() {
+    mSharedAudioSessionId = AUDIO_SESSION_NONE;
+    mSharedAudioStartFrames = -1;
+    mSharedAudioPackageName = "";
+}
+
 void AudioFlinger::RecordThread::updateMetadata_l()
 {
-    if (mInput == nullptr || mInput->stream == nullptr ||
-            !mActiveTracks.readAndClearHasChanged()) {
-        return;
+    if (!isStreamInitialized() || !mActiveTracks.readAndClearHasChanged()) {
+        return; // nothing to do
     }
     StreamInHalInterface::SinkMetadata metadata;
     for (const sp<RecordTrack> &track : mActiveTracks) {
+        // Do not forward PatchRecord metadata to audio HAL
+        if (track->isPatchTrack()) {
+            continue;
+        }
         // No track is invalid as this is called after prepareTrack_l in the same critical section
-        metadata.tracks.push_back({
+        record_track_metadata_v7_t trackMetadata;
+        trackMetadata.base = {
                 .source = track->attributes().source,
                 .gain = 1, // capture tracks do not have volumes
-        });
+        };
+        trackMetadata.channel_mask = track->channelMask(),
+        strncpy(trackMetadata.tags, track->attributes().tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+
+        metadata.tracks.push_back(trackMetadata);
     }
     mInput->stream->updateSinkMetadata(metadata);
 }
@@ -8059,6 +8293,7 @@
 {
     track->terminate();
     track->mState = TrackBase::STOPPED;
+
     // active tracks are removed by threadLoop()
     if (mActiveTracks.indexOf(track) < 0) {
         removeTrack_l(track);
@@ -8166,8 +8401,23 @@
 {
     sp<ThreadBase> threadBase = mRecordTrack->mThread.promote();
     RecordThread *recordThread = (RecordThread *) threadBase.get();
-    mRsmpInFront = recordThread->mRsmpInRear;
     mRsmpInUnrel = 0;
+    const int32_t rear = recordThread->mRsmpInRear;
+    ssize_t deltaFrames = 0;
+    if (mRecordTrack->startFrames() >= 0) {
+        int32_t startFrames = mRecordTrack->startFrames();
+        // Accept a recent wraparound of mRsmpInRear
+        if (startFrames <= rear) {
+            deltaFrames = rear - startFrames;
+        } else {
+            deltaFrames = (int32_t)((int64_t)rear + UINT32_MAX + 1 - startFrames);
+        }
+        // start frame cannot be further in the past than start of resampling buffer
+        if ((size_t) deltaFrames > recordThread->mRsmpInFrames) {
+            deltaFrames = recordThread->mRsmpInFrames;
+        }
+    }
+    mRsmpInFront = audio_utils::safe_sub_overflow(rear, static_cast<int32_t>(deltaFrames));
 }
 
 void AudioFlinger::RecordThread::ResamplerBufferProvider::sync(
@@ -8221,6 +8471,7 @@
     // FIXME if client not keeping up, discard
     LOG_ALWAYS_FATAL_IF(!(0 <= filled && (size_t) filled <= recordThread->mRsmpInFrames));
     // 'filled' may be non-contiguous, so return only the first contiguous chunk
+
     front &= recordThread->mRsmpInFramesP2 - 1;
     size_t part1 = recordThread->mRsmpInFramesP2 - front;
     if (part1 > (size_t) filled) {
@@ -8317,7 +8568,7 @@
     if (param.getInt(String8(AudioParameter::keyChannels), value) == NO_ERROR) {
         audio_channel_mask_t mask = (audio_channel_mask_t) value;
         if (!audio_is_input_channel(mask) ||
-                audio_channel_count_from_in_mask(mask) > FCC_8) {
+                audio_channel_count_from_in_mask(mask) > FCC_LIMIT) {
             status = BAD_VALUE;
         } else {
             channelMask = mask;
@@ -8350,13 +8601,11 @@
         }
         if (reconfig) {
             if (status == BAD_VALUE) {
-                uint32_t sRate;
-                audio_channel_mask_t channelMask;
-                audio_format_t format;
-                if (mInput->stream->getAudioProperties(&sRate, &channelMask, &format) == OK &&
-                        audio_is_linear_pcm(format) && audio_is_linear_pcm(reqFormat) &&
-                        sRate <= (AUDIO_RESAMPLER_DOWN_RATIO_MAX * samplingRate) &&
-                        audio_channel_count_from_in_mask(channelMask) <= FCC_8) {
+                audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+                if (mInput->stream->getAudioProperties(&config) == OK &&
+                        audio_is_linear_pcm(config.format) && audio_is_linear_pcm(reqFormat) &&
+                        config.sample_rate <= (AUDIO_RESAMPLER_DOWN_RATIO_MAX * samplingRate) &&
+                        audio_channel_count_from_in_mask(config.channel_mask) <= FCC_LIMIT) {
                     status = NO_ERROR;
                 }
             }
@@ -8418,10 +8667,10 @@
     mFormat = mHALFormat;
     mChannelCount = audio_channel_count_from_in_mask(mChannelMask);
     if (audio_is_linear_pcm(mFormat)) {
-        LOG_ALWAYS_FATAL_IF(mChannelCount > FCC_8, "HAL channel count %d > %d",
-                mChannelCount, FCC_8);
+        LOG_ALWAYS_FATAL_IF(mChannelCount > FCC_LIMIT, "HAL channel count %d > %d",
+                mChannelCount, FCC_LIMIT);
     } else {
-        // Can have more that FCC_8 channels in encoded streams.
+        // Can have more that FCC_LIMIT channels in encoded streams.
         ALOGI("HAL format %#x is not linear pcm", mFormat);
     }
     result = mInput->stream->getFrameSize(&mFrameSize);
@@ -8434,31 +8683,10 @@
     ALOGV("%p RecordThread params: mChannelCount=%u, mFormat=%#x, mFrameSize=%zu, "
             "mBufferSize=%zu, mFrameCount=%zu",
             this, mChannelCount, mFormat, mFrameSize, mBufferSize, mFrameCount);
-    // This is the formula for calculating the temporary buffer size.
-    // With 7 HAL buffers, we can guarantee ability to down-sample the input by ratio of 6:1 to
-    // 1 full output buffer, regardless of the alignment of the available input.
-    // The value is somewhat arbitrary, and could probably be even larger.
-    // A larger value should allow more old data to be read after a track calls start(),
-    // without increasing latency.
-    //
-    // Note this is independent of the maximum downsampling ratio permitted for capture.
-    mRsmpInFrames = mFrameCount * 7;
-    mRsmpInFramesP2 = roundup(mRsmpInFrames);
-    free(mRsmpInBuffer);
-    mRsmpInBuffer = NULL;
 
-    // TODO optimize audio capture buffer sizes ...
-    // Here we calculate the size of the sliding buffer used as a source
-    // for resampling.  mRsmpInFramesP2 is currently roundup(mFrameCount * 7).
-    // For current HAL frame counts, this is usually 2048 = 40 ms.  It would
-    // be better to have it derived from the pipe depth in the long term.
-    // The current value is higher than necessary.  However it should not add to latency.
-
-    // Over-allocate beyond mRsmpInFramesP2 to permit a HAL read past end of buffer
-    mRsmpInFramesOA = mRsmpInFramesP2 + mFrameCount - 1;
-    (void)posix_memalign(&mRsmpInBuffer, 32, mRsmpInFramesOA * mFrameSize);
-    // if posix_memalign fails, will segv here.
-    memset(mRsmpInBuffer, 0, mRsmpInFramesOA * mFrameSize);
+    // mRsmpInFrames must be 0 before calling resizeInputBuffer_l for the first time
+    mRsmpInFrames = 0;
+    resizeInputBuffer_l(0 /*maxSharedAudioHistoryMs*/);
 
     // AudioRecord mSampleRate and mChannelCount are constant due to AudioRecord API constraints.
     // But if thread's mSampleRate or mChannelCount changes, how will that affect active tracks?
@@ -8554,7 +8782,7 @@
 
     // store new device and send to effects
     mInDeviceTypeAddr.mType = patch->sources[0].ext.device.type;
-    mInDeviceTypeAddr.mAddress = patch->sources[0].ext.device.address;
+    mInDeviceTypeAddr.setAddress(patch->sources[0].ext.device.address);
     audio_port_handle_t deviceId = patch->sources[0].id;
     for (size_t i = 0; i < mEffectChains.size(); i++) {
         mEffectChains[i]->setInputDevice_l(inDeviceTypeAddr());
@@ -8633,6 +8861,7 @@
 
 void AudioFlinger::RecordThread::updateOutDevices(const DeviceDescriptorBaseVector& outDevices)
 {
+    Mutex::Autolock _l(mLock);
     mOutDevices = outDevices;
     mOutDeviceTypeAddrs = deviceTypeAddrsFromDescriptors(mOutDevices);
     for (size_t i = 0; i < mEffectChains.size(); i++) {
@@ -8640,6 +8869,133 @@
     }
 }
 
+int32_t AudioFlinger::RecordThread::getOldestFront_l()
+{
+    if (mTracks.size() == 0) {
+        return mRsmpInRear;
+    }
+    int32_t oldestFront = mRsmpInRear;
+    int32_t maxFilled = 0;
+    for (size_t i = 0; i < mTracks.size(); i++) {
+        int32_t front = mTracks[i]->mResamplerBufferProvider->getFront();
+        int32_t filled;
+        (void)__builtin_sub_overflow(mRsmpInRear, front, &filled);
+        if (filled > maxFilled) {
+            oldestFront = front;
+            maxFilled = filled;
+        }
+    }
+    if (maxFilled > mRsmpInFrames) {
+        (void)__builtin_sub_overflow(mRsmpInRear, mRsmpInFrames, &oldestFront);
+    }
+    return oldestFront;
+}
+
+void AudioFlinger::RecordThread::updateFronts_l(int32_t offset)
+{
+    if (offset == 0) {
+        return;
+    }
+    for (size_t i = 0; i < mTracks.size(); i++) {
+        int32_t front = mTracks[i]->mResamplerBufferProvider->getFront();
+        front = audio_utils::safe_sub_overflow(front, offset);
+        mTracks[i]->mResamplerBufferProvider->setFront(front);
+    }
+}
+
+void AudioFlinger::RecordThread::resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs)
+{
+    // This is the formula for calculating the temporary buffer size.
+    // With 7 HAL buffers, we can guarantee ability to down-sample the input by ratio of 6:1 to
+    // 1 full output buffer, regardless of the alignment of the available input.
+    // The value is somewhat arbitrary, and could probably be even larger.
+    // A larger value should allow more old data to be read after a track calls start(),
+    // without increasing latency.
+    //
+    // Note this is independent of the maximum downsampling ratio permitted for capture.
+    size_t minRsmpInFrames = mFrameCount * 7;
+
+    // maxSharedAudioHistoryMs != 0 indicates a request to possibly make some part of the audio
+    // capture history available to another client using the same session ID:
+    // dimension the resampler input buffer accordingly.
+
+    // Get oldest client read position:  getOldestFront_l() must be called before altering
+    // mRsmpInRear, or mRsmpInFrames
+    int32_t previousFront = getOldestFront_l();
+    size_t previousRsmpInFramesP2 = mRsmpInFramesP2;
+    int32_t previousRear = mRsmpInRear;
+    mRsmpInRear = 0;
+
+    ALOG_ASSERT(maxSharedAudioHistoryMs >= 0
+            && maxSharedAudioHistoryMs <= AudioFlinger::kMaxSharedAudioHistoryMs,
+            "resizeInputBuffer_l() called with invalid max shared history %d",
+            maxSharedAudioHistoryMs);
+    if (maxSharedAudioHistoryMs != 0) {
+        // resizeInputBuffer_l should never be called with a non zero shared history if the
+        // buffer was not already allocated
+        ALOG_ASSERT(mRsmpInBuffer != nullptr && mRsmpInFrames != 0,
+                "resizeInputBuffer_l() called with shared history and unallocated buffer");
+        size_t rsmpInFrames = (size_t)maxSharedAudioHistoryMs * mSampleRate / 1000;
+        // never reduce resampler input buffer size
+        if (rsmpInFrames <= mRsmpInFrames) {
+            return;
+        }
+        mRsmpInFrames = rsmpInFrames;
+    }
+    mMaxSharedAudioHistoryMs = maxSharedAudioHistoryMs;
+    // Note: mRsmpInFrames is 0 when called with maxSharedAudioHistoryMs equals to 0 so it is always
+    // initialized
+    if (mRsmpInFrames < minRsmpInFrames) {
+        mRsmpInFrames = minRsmpInFrames;
+    }
+    mRsmpInFramesP2 = roundup(mRsmpInFrames);
+
+    // TODO optimize audio capture buffer sizes ...
+    // Here we calculate the size of the sliding buffer used as a source
+    // for resampling.  mRsmpInFramesP2 is currently roundup(mFrameCount * 7).
+    // For current HAL frame counts, this is usually 2048 = 40 ms.  It would
+    // be better to have it derived from the pipe depth in the long term.
+    // The current value is higher than necessary.  However it should not add to latency.
+
+    // Over-allocate beyond mRsmpInFramesP2 to permit a HAL read past end of buffer
+    mRsmpInFramesOA = mRsmpInFramesP2 + mFrameCount - 1;
+
+    void *rsmpInBuffer;
+    (void)posix_memalign(&rsmpInBuffer, 32, mRsmpInFramesOA * mFrameSize);
+    // if posix_memalign fails, will segv here.
+    memset(rsmpInBuffer, 0, mRsmpInFramesOA * mFrameSize);
+
+    // Copy audio history if any from old buffer before freeing it
+    if (previousRear != 0) {
+        ALOG_ASSERT(mRsmpInBuffer != nullptr,
+                "resizeInputBuffer_l() called with null buffer but frames already read from HAL");
+
+        ssize_t unread = audio_utils::safe_sub_overflow(previousRear, previousFront);
+        previousFront &= previousRsmpInFramesP2 - 1;
+        size_t part1 = previousRsmpInFramesP2 - previousFront;
+        if (part1 > (size_t) unread) {
+            part1 = unread;
+        }
+        if (part1 != 0) {
+            memcpy(rsmpInBuffer, (const uint8_t*)mRsmpInBuffer + previousFront * mFrameSize,
+                   part1 * mFrameSize);
+            mRsmpInRear = part1;
+            part1 = unread - part1;
+            if (part1 != 0) {
+                memcpy((uint8_t*)rsmpInBuffer + mRsmpInRear * mFrameSize,
+                       (const uint8_t*)mRsmpInBuffer, part1 * mFrameSize);
+                mRsmpInRear += part1;
+            }
+        }
+        // Update front for all clients according to new rear
+        updateFronts_l(audio_utils::safe_sub_overflow(previousRear, mRsmpInRear));
+    } else {
+        mRsmpInRear = 0;
+    }
+    free(mRsmpInBuffer);
+    mRsmpInBuffer = rsmpInBuffer;
+}
+
 void AudioFlinger::RecordThread::addPatchTrack(const sp<PatchRecord>& record)
 {
     Mutex::Autolock _l(mLock);
@@ -8696,6 +9052,11 @@
     return mThread->getMmapPosition(position);
 }
 
+status_t AudioFlinger::MmapThreadHandle::getExternalPosition(uint64_t *position,
+                                                             int64_t *timeNanos) {
+    return mThread->getExternalPosition(position, timeNanos);
+}
+
 status_t AudioFlinger::MmapThreadHandle::start(const AudioClient& client,
         const audio_attributes_t *attr, audio_port_handle_t *handle)
 
@@ -8731,7 +9092,6 @@
 
 AudioFlinger::MmapThread::~MmapThread()
 {
-    releaseWakeLock_l();
 }
 
 void AudioFlinger::MmapThread::onFirstRef()
@@ -8781,7 +9141,6 @@
         return NO_INIT;
     }
     mStandby = true;
-    acquireWakeLock();
     return mHalStream->createMmapBuffer(minSizeFrames, info);
 }
 
@@ -8812,7 +9171,7 @@
                                          audio_port_handle_t *handle)
 {
     ALOGV("%s clientUid %d mStandby %d mPortId %d *handle %d", __FUNCTION__,
-          client.clientUid, mStandby, mPortId, *handle);
+          client.attributionSource.uid, mStandby, mPortId, *handle);
     if (mHalStream == 0) {
         return NO_INIT;
     }
@@ -8820,8 +9179,12 @@
     status_t ret;
 
     if (*handle == mPortId) {
-        // for the first track, reuse portId and session allocated when the stream was opened
-        return exitStandby();
+        // For the first track, reuse portId and session allocated when the stream was opened.
+        ret = exitStandby();
+        if (ret == NO_ERROR) {
+            acquireWakeLock();
+        }
+        return ret;
     }
 
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
@@ -8840,8 +9203,7 @@
         ret = AudioSystem::getOutputForAttr(&mAttr, &io,
                                             mSessionId,
                                             &stream,
-                                            client.clientPid,
-                                            client.clientUid,
+                                            client.attributionSource,
                                             &config,
                                             flags,
                                             &deviceId,
@@ -8858,9 +9220,7 @@
         ret = AudioSystem::getInputForAttr(&mAttr, &io,
                                               RECORD_RIID_INVALID,
                                               mSessionId,
-                                              client.clientPid,
-                                              client.clientUid,
-                                              client.packageName,
+                                              client.attributionSource,
                                               &config,
                                               AUDIO_INPUT_FLAG_MMAP_NOIRQ,
                                               &deviceId,
@@ -8900,16 +9260,16 @@
 
     // Given that MmapThread::mAttr is mutable, should a MmapTrack have attributes ?
     sp<MmapTrack> track = new MmapTrack(this, attr == nullptr ? mAttr : *attr, mSampleRate, mFormat,
-                                        mChannelMask, mSessionId, isOutput(), client.clientUid,
-                                        client.clientPid, IPCThreadState::self()->getCallingPid(),
-                                        portId);
+                                        mChannelMask, mSessionId, isOutput(),
+                                        client.attributionSource,
+                                        IPCThreadState::self()->getCallingPid(), portId);
 
     if (isOutput()) {
         // force volume update when a new track is added
         mHalVolFloat = -1.0f;
     } else if (!track->isSilenced_l()) {
         for (const sp<MmapTrack> &t : mActiveTracks) {
-            if (t->isSilenced_l() && t->uid() != client.clientUid)
+            if (t->isSilenced_l() && t->uid() != client.attributionSource.uid)
                 t->invalidate();
         }
     }
@@ -8942,6 +9302,7 @@
 
     if (handle == mPortId) {
         mHalStream->stop();
+        releaseWakeLock();
         return NO_ERROR;
     }
 
@@ -9184,7 +9545,7 @@
                                 "Enumerated device type(%#x) must not be used "
                                 "as it does not support audio patches",
                                 patch->sinks[i].ext.device.type);
-            type |= patch->sinks[i].ext.device.type;
+            type = static_cast<audio_devices_t>(type | patch->sinks[i].ext.device.type);
             sinkDeviceTypeAddrs.push_back(AudioDeviceTypeAddr(patch->sinks[i].ext.device.type,
                     patch->sinks[i].ext.device.address));
         }
@@ -9195,7 +9556,7 @@
         deviceId = patch->sources[0].id;
         numDevices = mPatch.num_sources;
         sourceDeviceTypeAddr.mType = patch->sources[0].ext.device.type;
-        sourceDeviceTypeAddr.mAddress = patch->sources[0].ext.device.address;
+        sourceDeviceTypeAddr.setAddress(patch->sources[0].ext.device.address);
     }
 
     for (size_t i = 0; i < mEffectChains.size(); i++) {
@@ -9393,6 +9754,11 @@
         return BAD_VALUE;
     }
 
+    if (EffectModule::isHapticGenerator(&desc->type)) {
+        ALOGE("%s(): HapticGenerator is not supported for MmapThread", __func__);
+        return BAD_VALUE;
+    }
+
     return NO_ERROR;
 }
 
@@ -9598,23 +9964,29 @@
                 }
             }
         }
+        for (const sp<MmapTrack> &track : mActiveTracks) {
+            track->setMetadataHasChanged();
+        }
     }
 }
 
 void AudioFlinger::MmapPlaybackThread::updateMetadata_l()
 {
-    if (mOutput == nullptr || mOutput->stream == nullptr ||
-            !mActiveTracks.readAndClearHasChanged()) {
-        return;
+    if (!isStreamInitialized() || !mActiveTracks.readAndClearHasChanged()) {
+        return; // nothing to do
     }
     StreamOutHalInterface::SourceMetadata metadata;
     for (const sp<MmapTrack> &track : mActiveTracks) {
         // No track is invalid as this is called after prepareTrack_l in the same critical section
-        metadata.tracks.push_back({
+        playback_track_metadata_v7_t trackMetadata;
+        trackMetadata.base = {
                 .usage = track->attributes().usage,
                 .content_type = track->attributes().content_type,
                 .gain = mHalVolFloat, // TODO: propagate from aaudio pre-mix volume
-        });
+        };
+        trackMetadata.channel_mask = track->channelMask(),
+        strncpy(trackMetadata.tags, track->attributes().tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+        metadata.tracks.push_back(trackMetadata);
     }
     mOutput->stream->updateSourceMetadata(metadata);
 }
@@ -9645,6 +10017,20 @@
     }
 }
 
+status_t AudioFlinger::MmapPlaybackThread::getExternalPosition(uint64_t *position,
+                                                               int64_t *timeNanos)
+{
+    if (mOutput == nullptr) {
+        return NO_INIT;
+    }
+    struct timespec timestamp;
+    status_t status = mOutput->getPresentationPosition(position, &timestamp);
+    if (status == NO_ERROR) {
+        *timeNanos = timestamp.tv_sec * NANOS_PER_SECOND + timestamp.tv_nsec;
+    }
+    return status;
+}
+
 void AudioFlinger::MmapPlaybackThread::dumpInternals_l(int fd, const Vector<String16>& args)
 {
     MmapThread::dumpInternals_l(fd, args);
@@ -9714,17 +10100,20 @@
 
 void AudioFlinger::MmapCaptureThread::updateMetadata_l()
 {
-    if (mInput == nullptr || mInput->stream == nullptr ||
-            !mActiveTracks.readAndClearHasChanged()) {
-        return;
+    if (!isStreamInitialized() || !mActiveTracks.readAndClearHasChanged()) {
+        return; // nothing to do
     }
     StreamInHalInterface::SinkMetadata metadata;
     for (const sp<MmapTrack> &track : mActiveTracks) {
         // No track is invalid as this is called after prepareTrack_l in the same critical section
-        metadata.tracks.push_back({
+        record_track_metadata_v7_t trackMetadata;
+        trackMetadata.base = {
                 .source = track->attributes().source,
                 .gain = 1, // capture tracks do not have volumes
-        });
+        };
+        trackMetadata.channel_mask = track->channelMask(),
+        strncpy(trackMetadata.tags, track->attributes().tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+        metadata.tracks.push_back(trackMetadata);
     }
     mInput->stream->updateSinkMetadata(metadata);
 }
@@ -9749,4 +10138,13 @@
     }
 }
 
+status_t AudioFlinger::MmapCaptureThread::getExternalPosition(
+        uint64_t *position, int64_t *timeNanos)
+{
+    if (mInput == nullptr) {
+        return NO_INIT;
+    }
+    return mInput->getCapturePosition((int64_t*)position, timeNanos);
+}
+
 } // namespace android
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index c1ac2e4..16082a9 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -53,6 +53,7 @@
         CFG_EVENT_CREATE_AUDIO_PATCH,
         CFG_EVENT_RELEASE_AUDIO_PATCH,
         CFG_EVENT_UPDATE_OUT_DEVICE,
+        CFG_EVENT_RESIZE_BUFFER
     };
 
     class ConfigEventData: public RefBase {
@@ -242,6 +243,28 @@
         virtual ~UpdateOutDevicesConfigEvent();
     };
 
+    class ResizeBufferConfigEventData : public ConfigEventData {
+    public:
+        explicit ResizeBufferConfigEventData(int32_t maxSharedAudioHistoryMs) :
+            mMaxSharedAudioHistoryMs(maxSharedAudioHistoryMs) {}
+
+        virtual void dump(char *buffer, size_t size) {
+            snprintf(buffer, size, "mMaxSharedAudioHistoryMs: %d", mMaxSharedAudioHistoryMs);
+        }
+
+        int32_t mMaxSharedAudioHistoryMs;
+    };
+
+    class ResizeBufferConfigEvent : public ConfigEvent {
+    public:
+        explicit ResizeBufferConfigEvent(int32_t maxSharedAudioHistoryMs) :
+            ConfigEvent(CFG_EVENT_RESIZE_BUFFER) {
+            mData = new ResizeBufferConfigEventData(maxSharedAudioHistoryMs);
+        }
+
+        virtual ~ResizeBufferConfigEvent() {}
+    };
+
     class PMDeathRecipient : public IBinder::DeathRecipient {
     public:
         explicit    PMDeathRecipient(const wp<ThreadBase>& thread) : mThread(thread) {}
@@ -272,6 +295,7 @@
                 // Called by AudioFlinger::frameCount(audio_io_handle_t output) and effects,
                 // and returns the [normal mix] buffer's frame count.
     virtual     size_t      frameCount() const = 0;
+    virtual     audio_channel_mask_t hapticChannelMask() const { return AUDIO_CHANNEL_NONE; }
     virtual     uint32_t    latency_l() const { return 0; }
     virtual     void        setVolumeForOutput_l(float left __unused, float right __unused) const {}
 
@@ -305,6 +329,7 @@
                 status_t    sendReleaseAudioPatchConfigEvent(audio_patch_handle_t handle);
                 status_t    sendUpdateOutDeviceConfigEvent(
                                     const DeviceDescriptorBaseVector& outDevices);
+                void        sendResizeBufferConfigEvent_l(int32_t maxSharedAudioHistoryMs);
                 void        processConfigEvents_l();
     virtual     void        cacheParameters_l() = 0;
     virtual     status_t    createAudioPatch_l(const struct audio_patch *patch,
@@ -313,6 +338,9 @@
     virtual     void        updateOutDevices(const DeviceDescriptorBaseVector& outDevices);
     virtual     void        toAudioPortConfig(struct audio_port_config *config) = 0;
 
+    virtual     void        resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs);
+
+
 
                 // see note at declaration of mStandby, mOutDevice and mInDevice
                 bool        standby() const { return mStandby; }
@@ -348,7 +376,7 @@
 
                 sp<EffectHandle> createEffect_l(
                                     const sp<AudioFlinger::Client>& client,
-                                    const sp<IEffectClient>& effectClient,
+                                    const sp<media::IEffectClient>& effectClient,
                                     int32_t priority,
                                     audio_session_t sessionId,
                                     effect_descriptor_t *desc,
@@ -434,8 +462,10 @@
 
                 // the value returned by default implementation is not important as the
                 // strategy is only meaningful for PlaybackThread which implements this method
-                virtual uint32_t getStrategyForSession_l(audio_session_t sessionId __unused)
-                        { return 0; }
+                virtual product_strategy_t getStrategyForSession_l(
+                        audio_session_t sessionId __unused) {
+                    return static_cast<product_strategy_t>(0);
+                }
 
                 // check if some effects must be suspended/restored when an effect is enabled
                 // or disabled
@@ -478,6 +508,27 @@
                 void onEffectEnable(const sp<EffectModule>& effect);
                 void onEffectDisable();
 
+                // invalidateTracksForAudioSession_l must be called with holding mLock.
+    virtual     void invalidateTracksForAudioSession_l(audio_session_t sessionId __unused) const { }
+                // Invalidate all the tracks with the given audio session.
+                void invalidateTracksForAudioSession(audio_session_t sessionId) const {
+                    Mutex::Autolock _l(mLock);
+                    invalidateTracksForAudioSession_l(sessionId);
+                }
+
+                template <typename T>
+                void invalidateTracksForAudioSession_l(audio_session_t sessionId,
+                                                       const T& tracks) const {
+                    for (size_t i = 0; i < tracks.size(); ++i) {
+                        const sp<TrackBase>& track = tracks[i];
+                        if (sessionId == track->sessionId()) {
+                            track->invalidate();
+                        }
+                    }
+                }
+
+    virtual     bool isStreamInitialized() = 0;
+
 protected:
 
                 // entry describing an effect being suspended in mSuspendedSessions keyed vector
@@ -575,7 +626,7 @@
 
                 static const int        kThreadNameLength = 16; // prctl(PR_SET_NAME) limit
                 char                    mThreadName[kThreadNameLength]; // guaranteed NUL-terminated
-                sp<IPowerManager>       mPowerManager;
+                sp<os::IPowerManager>   mPowerManager;
                 sp<IBinder>             mWakeLockToken;
                 const sp<PMDeathRecipient> mDeathRecipient;
                 // list of suspended effects per session and per type. The first (outer) vector is
@@ -590,6 +641,11 @@
                 ExtendedTimestamp       mTimestamp;
                 TimestampVerifier< // For timestamp statistics.
                         int64_t /* frame count */, int64_t /* time ns */> mTimestampVerifier;
+                // DIRECT and OFFLOAD threads should reset frame count to zero on stop/flush
+                // TODO: add confirmation checks:
+                // 1) DIRECT threads and linear PCM format really resets to 0?
+                // 2) Is frame count really valid if not linear pcm?
+                // 3) Are all 64 bits of position returned, not just lowest 32 bits?
                 // Timestamp corrected device should be a single device.
                 audio_devices_t         mTimestampCorrectedDevice = AUDIO_DEVICE_NONE;
 
@@ -687,7 +743,9 @@
                     void            updatePowerState(sp<ThreadBase> thread, bool force = false);
 
                     /** @return true if one or move active tracks was added or removed since the
-                     *          last time this function was called or the vector was created. */
+                     *          last time this function was called or the vector was created.
+                     *          true if volume of one of active tracks was changed.
+                     */
                     bool            readAndClearHasChanged();
 
                 private:
@@ -860,8 +918,8 @@
                                 audio_session_t sessionId,
                                 audio_output_flags_t *flags,
                                 pid_t creatorPid,
+                                const AttributionSourceState& attributionSource,
                                 pid_t tid,
-                                uid_t uid,
                                 status_t *status /*non-NULL*/,
                                 audio_port_handle_t portId,
                                 const sp<media::IAudioTrackCallback>& callback);
@@ -903,7 +961,7 @@
                         uint32_t hasAudioSession_l(audio_session_t sessionId) const override {
                             return ThreadBase::hasAudioSession_l(sessionId, mTracks);
                         }
-                virtual uint32_t getStrategyForSession_l(audio_session_t sessionId);
+                virtual product_strategy_t getStrategyForSession_l(audio_session_t sessionId);
 
 
                 virtual status_t setSyncEvent(const sp<SyncEvent>& event);
@@ -939,6 +997,24 @@
                                         && outDeviceTypes().count(mTimestampCorrectedDevice) != 0;
                             }
 
+    virtual     bool        isStreamInitialized() {
+                                return !(mOutput == nullptr || mOutput->stream == nullptr);
+                            }
+
+                audio_channel_mask_t hapticChannelMask() const override {
+                                         return mHapticChannelMask;
+                                     }
+                bool supportsHapticPlayback() const {
+                    return (mHapticChannelMask & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE;
+                }
+
+                void setDownStreamPatch(const struct audio_patch *patch) {
+                    Mutex::Autolock _l(mLock);
+                    mDownStreamPatch = *patch;
+                }
+
+                PlaybackThread::Track* getTrackById_l(audio_port_handle_t trackId);
+
 protected:
     // updated by readOutputParameters_l()
     size_t                          mNormalFrameCount;  // normal mixer and effects
@@ -1017,6 +1093,8 @@
 
     int64_t                         mBytesWritten;
     int64_t                         mFramesWritten; // not reset on standby
+    int64_t                         mLastFramesWritten = -1; // track changes in timestamp
+                                                             // server frames written.
     int64_t                         mSuspendedFrames; // not reset on standby
 
     // mHapticChannelMask and mHapticChannelCount will only be valid when the thread support
@@ -1029,6 +1107,14 @@
     // copy rather than the one in AudioFlinger.  This optimization saves a lock.
     bool                            mMasterMute;
                 void        setMasterMute_l(bool muted) { mMasterMute = muted; }
+
+                auto discontinuityForStandbyOrFlush() const { // call on threadLoop or with lock.
+                    return ((mType == DIRECT && !audio_is_linear_pcm(mFormat))
+                                    || mType == OFFLOAD)
+                            ? mTimestampVerifier.DISCONTINUITY_MODE_ZERO
+                            : mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS;
+                }
+
 protected:
     ActiveTracks<Track>     mActiveTracks;
 
@@ -1061,6 +1147,11 @@
 
                 uint32_t    trackCountForUid_l(uid_t uid) const;
 
+                void        invalidateTracksForAudioSession_l(
+                                    audio_session_t sessionId) const override {
+                                ThreadBase::invalidateTracksForAudioSession_l(sessionId, mTracks);
+                            }
+
 private:
 
     friend class AudioFlinger;      // for numerous
@@ -1075,6 +1166,8 @@
     void        updateMetadata_l() final;
     virtual void sendMetadataToBackend_l(const StreamOutHalInterface::SourceMetadata& metadata);
 
+    void        collectTimestamps_l();
+
     // The Tracks class manages tracks added and removed from the Thread.
     template <typename T>
     class Tracks {
@@ -1186,7 +1279,7 @@
 
     Mutex                                    mAudioTrackCbLock;
     // Record of IAudioTrackCallback
-    std::set<sp<media::IAudioTrackCallback>> mAudioTrackCallbacks;
+    std::map<sp<Track>, sp<media::IAudioTrackCallback>> mAudioTrackCallbacks;
 
 private:
     // The HAL output sink is treated as non-blocking, but current implementation is blocking
@@ -1217,6 +1310,10 @@
                 // volumes last sent to audio HAL with stream->setVolume()
                 float mLeftVolFloat;
                 float mRightVolFloat;
+
+                // audio patch used by the downstream software patch.
+                // Only used if ThreadBase::mIsMsdDevice is true.
+                struct audio_patch mDownStreamPatch;
 };
 
 class MixerThread : public PlaybackThread {
@@ -1553,6 +1650,9 @@
         // AudioBufferProvider interface
         virtual status_t    getNextBuffer(AudioBufferProvider::Buffer* buffer);
         virtual void        releaseBuffer(AudioBufferProvider::Buffer* buffer);
+
+                int32_t     getFront() const { return mRsmpInFront; }
+                void        setFront(int32_t front) { mRsmpInFront = front; }
     private:
         RecordTrack * const mRecordTrack;
         size_t              mRsmpInUnrel;   // unreleased frames remaining from
@@ -1598,12 +1698,12 @@
                     audio_session_t sessionId,
                     size_t *pNotificationFrameCount,
                     pid_t creatorPid,
-                    uid_t uid,
+                    const AttributionSourceState& attributionSource,
                     audio_input_flags_t *flags,
                     pid_t tid,
                     status_t *status /*non-NULL*/,
                     audio_port_handle_t portId,
-                    const String16& opPackageName);
+                    int32_t maxSharedAudioHistoryMs);
 
             status_t    start(RecordTrack* recordTrack,
                               AudioSystem::sync_event_t event,
@@ -1627,6 +1727,7 @@
                                            audio_patch_handle_t *handle);
     virtual status_t    releaseAudioPatch_l(const audio_patch_handle_t handle);
             void        updateOutDevices(const DeviceDescriptorBaseVector& outDevices) override;
+            void        resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs) override;
 
             void        addPatchTrack(const sp<PatchRecord>& record);
             void        deletePatchTrack(const sp<PatchRecord>& record);
@@ -1682,6 +1783,18 @@
                                     && inDeviceType() == mTimestampCorrectedDevice;
                         }
 
+            status_t    shareAudioHistory(const std::string& sharedAudioPackageName,
+                                          audio_session_t sharedSessionId = AUDIO_SESSION_NONE,
+                                          int64_t sharedAudioStartMs = -1);
+            status_t    shareAudioHistory_l(const std::string& sharedAudioPackageName,
+                                          audio_session_t sharedSessionId = AUDIO_SESSION_NONE,
+                                          int64_t sharedAudioStartMs = -1);
+            void        resetAudioHistory_l();
+
+    virtual bool        isStreamInitialized() {
+                            return !(mInput == nullptr || mInput->stream == nullptr);
+                        }
+
 protected:
             void        dumpInternals_l(int fd, const Vector<String16>& args) override;
             void        dumpTracks_l(int fd, const Vector<String16>& args) override;
@@ -1695,6 +1808,9 @@
 
             void    checkBtNrec_l();
 
+            int32_t getOldestFront_l();
+            void    updateFronts_l(int32_t offset);
+
             AudioStreamIn                       *mInput;
             Source                              *mSource;
             SortedVector < sp<RecordTrack> >    mTracks;
@@ -1760,6 +1876,11 @@
             int64_t                             mFramesRead = 0;    // continuous running counter.
 
             DeviceDescriptorBaseVector          mOutDevices;
+
+            int32_t                             mMaxSharedAudioHistoryMs = 0;
+            std::string                         mSharedAudioPackageName = {};
+            int32_t                             mSharedAudioStartFrames = -1;
+            audio_session_t                     mSharedAudioSessionId = AUDIO_SESSION_NONE;
 };
 
 class MmapThread : public ThreadBase
@@ -1791,6 +1912,7 @@
                    audio_port_handle_t *handle);
     status_t stop(audio_port_handle_t handle);
     status_t standby();
+    virtual status_t getExternalPosition(uint64_t *position, int64_t *timeNaos) = 0;
 
     // RefBase
     virtual     void        onFirstRef();
@@ -1842,6 +1964,8 @@
     virtual     void        setRecordSilenced(audio_port_handle_t portId __unused,
                                               bool silenced __unused) {}
 
+    virtual     bool        isStreamInitialized() { return false; }
+
  protected:
                 void        dumpInternals_l(int fd, const Vector<String16>& args) override;
                 void        dumpTracks_l(int fd, const Vector<String16>& args) override;
@@ -1902,6 +2026,12 @@
 
     virtual     void        toAudioPortConfig(struct audio_port_config *config);
 
+                status_t    getExternalPosition(uint64_t *position, int64_t *timeNanos) override;
+
+    virtual     bool        isStreamInitialized() {
+                                return !(mOutput == nullptr || mOutput->stream == nullptr);
+                            }
+
 protected:
                 void        dumpInternals_l(int fd, const Vector<String16>& args) override;
 
@@ -1932,6 +2062,12 @@
 
     virtual     void           toAudioPortConfig(struct audio_port_config *config);
 
+                status_t       getExternalPosition(uint64_t *position, int64_t *timeNanos) override;
+
+    virtual     bool           isStreamInitialized() {
+                                   return !(mInput == nullptr || mInput->stream == nullptr);
+                               }
+
 protected:
 
                 AudioStreamIn*  mInput;
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index 01d5345..92f129c 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -253,6 +253,19 @@
     // Called to tally underrun frames in playback.
     virtual void tallyUnderrunFrames(size_t /* frames */) {}
 
+    audio_channel_mask_t channelMask() const { return mChannelMask; }
+
+    /** @return true if the track has changed (metadata or volume) since
+     *          the last time this function was called,
+     *          true if this function was never called since the track creation,
+     *          false otherwise.
+     *  Thread safe.
+     */
+    bool readAndClearHasChanged() { return !mChangeNotified.test_and_set(); }
+
+    /** Set that a metadata has changed and needs to be notified to backend. Thread safe. */
+    void setMetadataHasChanged() { mChangeNotified.clear(); }
+
 protected:
     DISALLOW_COPY_AND_ASSIGN(TrackBase);
 
@@ -278,8 +291,6 @@
 
     size_t frameSize() const { return mFrameSize; }
 
-    audio_channel_mask_t channelMask() const { return mChannelMask; }
-
     virtual uint32_t sampleRate() const { return mSampleRate; }
 
     bool isStopped() const {
@@ -391,6 +402,9 @@
     std::atomic<FrameTime> mKernelFrameTime{};     // last frame time on kernel side.
     const pid_t         mCreatorPid;  // can be different from mclient->pid() for instance
                                       // when created by NuPlayer on behalf of a client
+
+    // If the last track change was notified to the client with readAndClearHasChanged
+    std::atomic_flag    mChangeNotified = ATOMIC_FLAG_INIT;
 };
 
 // PatchProxyBufferProvider interface is implemented by PatchTrack and PatchRecord.
diff --git a/services/audioflinger/TrackMetrics.h b/services/audioflinger/TrackMetrics.h
index af16448..7fb69be 100644
--- a/services/audioflinger/TrackMetrics.h
+++ b/services/audioflinger/TrackMetrics.h
@@ -67,7 +67,7 @@
         mIntervalStartTimeNs = systemTime();
     }
 
-    void logConstructor(pid_t creatorPid, uid_t creatorUid,
+    void logConstructor(pid_t creatorPid, uid_t creatorUid, int32_t internalTrackId,
             const std::string& traits = {},
             audio_stream_type_t streamType = AUDIO_STREAM_DEFAULT) const {
         // Once this item is logged by the server, the client can add properties.
@@ -78,6 +78,7 @@
             .set(AMEDIAMETRICS_PROP_ALLOWUID, (int32_t)creatorUid)
             .set(AMEDIAMETRICS_PROP_EVENT,
                     AMEDIAMETRICS_PROP_PREFIX_SERVER AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR)
+            .set(AMEDIAMETRICS_PROP_INTERNALTRACKID, internalTrackId)
             .set(AMEDIAMETRICS_PROP_TRAITS, traits);
         // log streamType from the service, since client doesn't know chosen streamType.
         if (streamType != AUDIO_STREAM_DEFAULT) {
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 126015f..d2a30b1 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -33,6 +33,7 @@
 
 #include <media/nbaio/Pipe.h>
 #include <media/nbaio/PipeReader.h>
+#include <media/AudioValidator.h>
 #include <media/RecordBufferConverter.h>
 #include <mediautils/ServiceUtilities.h>
 #include <audio_utils/minifloat.h>
@@ -52,8 +53,19 @@
 #define ALOGVV(a...) do { } while(0)
 #endif
 
+// TODO: Remove when this is put into AidlConversionUtil.h
+#define VALUE_OR_RETURN_BINDER_STATUS(x)    \
+    ({                                      \
+       auto _tmp = (x);                     \
+       if (!_tmp.ok()) return ::android::aidl_utils::binderStatusFromStatusT(_tmp.error()); \
+       std::move(_tmp.value());             \
+     })
+
 namespace android {
 
+using ::android::aidl_utils::binderStatusFromStatusT;
+using binder::Status;
+using content::AttributionSourceState;
 using media::VolumeShaper;
 // ----------------------------------------------------------------------------
 //      TrackBase
@@ -226,6 +238,15 @@
     }
 }
 
+// TODO b/182392769: use attribution source util
+static AttributionSourceState audioServerAttributionSource(pid_t pid) {
+   AttributionSourceState attributionSource{};
+   attributionSource.uid = AID_AUDIOSERVER;
+   attributionSource.pid = pid;
+   attributionSource.token = sp<BBinder>::make();
+   return attributionSource;
+}
+
 status_t AudioFlinger::ThreadBase::TrackBase::initCheck() const
 {
     status_t status;
@@ -319,64 +340,156 @@
     mTrack->destroy();
 }
 
-sp<IMemory> AudioFlinger::TrackHandle::getCblk() const {
-    return mTrack->getCblk();
+Status AudioFlinger::TrackHandle::getCblk(
+        std::optional<media::SharedFileRegion>* _aidl_return) {
+    *_aidl_return = legacy2aidl_NullableIMemory_SharedFileRegion(mTrack->getCblk()).value();
+    return Status::ok();
 }
 
-status_t AudioFlinger::TrackHandle::start() {
-    return mTrack->start();
+Status AudioFlinger::TrackHandle::start(int32_t* _aidl_return) {
+    *_aidl_return = mTrack->start();
+    return Status::ok();
 }
 
-void AudioFlinger::TrackHandle::stop() {
+Status AudioFlinger::TrackHandle::stop() {
     mTrack->stop();
+    return Status::ok();
 }
 
-void AudioFlinger::TrackHandle::flush() {
+Status AudioFlinger::TrackHandle::flush() {
     mTrack->flush();
+    return Status::ok();
 }
 
-void AudioFlinger::TrackHandle::pause() {
+Status AudioFlinger::TrackHandle::pause() {
     mTrack->pause();
+    return Status::ok();
 }
 
-status_t AudioFlinger::TrackHandle::attachAuxEffect(int EffectId)
+Status AudioFlinger::TrackHandle::attachAuxEffect(int32_t effectId,
+                                                  int32_t* _aidl_return) {
+    *_aidl_return = mTrack->attachAuxEffect(effectId);
+    return Status::ok();
+}
+
+Status AudioFlinger::TrackHandle::setParameters(const std::string& keyValuePairs,
+                                                int32_t* _aidl_return) {
+    *_aidl_return = mTrack->setParameters(String8(keyValuePairs.c_str()));
+    return Status::ok();
+}
+
+Status AudioFlinger::TrackHandle::selectPresentation(int32_t presentationId, int32_t programId,
+                                                     int32_t* _aidl_return) {
+    *_aidl_return = mTrack->selectPresentation(presentationId, programId);
+    return Status::ok();
+}
+
+Status AudioFlinger::TrackHandle::getTimestamp(media::AudioTimestampInternal* timestamp,
+                                               int32_t* _aidl_return) {
+    AudioTimestamp legacy;
+    *_aidl_return = mTrack->getTimestamp(legacy);
+    if (*_aidl_return != OK) {
+        return Status::ok();
+    }
+    *timestamp = legacy2aidl_AudioTimestamp_AudioTimestampInternal(legacy).value();
+    return Status::ok();
+}
+
+Status AudioFlinger::TrackHandle::signal() {
+    mTrack->signal();
+    return Status::ok();
+}
+
+Status AudioFlinger::TrackHandle::applyVolumeShaper(
+        const media::VolumeShaperConfiguration& configuration,
+        const media::VolumeShaperOperation& operation,
+        int32_t* _aidl_return) {
+    sp<VolumeShaper::Configuration> conf = new VolumeShaper::Configuration();
+    *_aidl_return = conf->readFromParcelable(configuration);
+    if (*_aidl_return != OK) {
+        return Status::ok();
+    }
+
+    sp<VolumeShaper::Operation> op = new VolumeShaper::Operation();
+    *_aidl_return = op->readFromParcelable(operation);
+    if (*_aidl_return != OK) {
+        return Status::ok();
+    }
+
+    *_aidl_return = mTrack->applyVolumeShaper(conf, op);
+    return Status::ok();
+}
+
+Status AudioFlinger::TrackHandle::getVolumeShaperState(
+        int32_t id,
+        std::optional<media::VolumeShaperState>* _aidl_return) {
+    sp<VolumeShaper::State> legacy = mTrack->getVolumeShaperState(id);
+    if (legacy == nullptr) {
+        _aidl_return->reset();
+        return Status::ok();
+    }
+    media::VolumeShaperState aidl;
+    legacy->writeToParcelable(&aidl);
+    *_aidl_return = aidl;
+    return Status::ok();
+}
+
+Status AudioFlinger::TrackHandle::getDualMonoMode(media::AudioDualMonoMode* _aidl_return)
 {
-    return mTrack->attachAuxEffect(EffectId);
+    audio_dual_mono_mode_t mode = AUDIO_DUAL_MONO_MODE_OFF;
+    const status_t status = mTrack->getDualMonoMode(&mode)
+            ?: AudioValidator::validateDualMonoMode(mode);
+    if (status == OK) {
+        *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+                legacy2aidl_audio_dual_mono_mode_t_AudioDualMonoMode(mode));
+    }
+    return binderStatusFromStatusT(status);
 }
 
-status_t AudioFlinger::TrackHandle::setParameters(const String8& keyValuePairs) {
-    return mTrack->setParameters(keyValuePairs);
-}
-
-status_t AudioFlinger::TrackHandle::selectPresentation(int presentationId, int programId) {
-    return mTrack->selectPresentation(presentationId, programId);
-}
-
-VolumeShaper::Status AudioFlinger::TrackHandle::applyVolumeShaper(
-        const sp<VolumeShaper::Configuration>& configuration,
-        const sp<VolumeShaper::Operation>& operation) {
-    return mTrack->applyVolumeShaper(configuration, operation);
-}
-
-sp<VolumeShaper::State> AudioFlinger::TrackHandle::getVolumeShaperState(int id) {
-    return mTrack->getVolumeShaperState(id);
-}
-
-status_t AudioFlinger::TrackHandle::getTimestamp(AudioTimestamp& timestamp)
+Status AudioFlinger::TrackHandle::setDualMonoMode(
+        media::AudioDualMonoMode mode)
 {
-    return mTrack->getTimestamp(timestamp);
+    const auto localMonoMode = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioDualMonoMode_audio_dual_mono_mode_t(mode));
+    return binderStatusFromStatusT(AudioValidator::validateDualMonoMode(localMonoMode)
+            ?: mTrack->setDualMonoMode(localMonoMode));
 }
 
-
-void AudioFlinger::TrackHandle::signal()
+Status AudioFlinger::TrackHandle::getAudioDescriptionMixLevel(float* _aidl_return)
 {
-    return mTrack->signal();
+    float leveldB = -std::numeric_limits<float>::infinity();
+    const status_t status = mTrack->getAudioDescriptionMixLevel(&leveldB)
+            ?: AudioValidator::validateAudioDescriptionMixLevel(leveldB);
+    if (status == OK) *_aidl_return = leveldB;
+    return binderStatusFromStatusT(status);
 }
 
-status_t AudioFlinger::TrackHandle::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
+Status AudioFlinger::TrackHandle::setAudioDescriptionMixLevel(float leveldB)
 {
-    return BnAudioTrack::onTransact(code, data, reply, flags);
+    return binderStatusFromStatusT(AudioValidator::validateAudioDescriptionMixLevel(leveldB)
+             ?: mTrack->setAudioDescriptionMixLevel(leveldB));
+}
+
+Status AudioFlinger::TrackHandle::getPlaybackRateParameters(
+        media::AudioPlaybackRate* _aidl_return)
+{
+    audio_playback_rate_t localPlaybackRate{};
+    status_t status = mTrack->getPlaybackRateParameters(&localPlaybackRate)
+            ?: AudioValidator::validatePlaybackRate(localPlaybackRate);
+    if (status == NO_ERROR) {
+        *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+                legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(localPlaybackRate));
+    }
+    return binderStatusFromStatusT(status);
+}
+
+Status AudioFlinger::TrackHandle::setPlaybackRateParameters(
+        const media::AudioPlaybackRate& playbackRate)
+{
+    const audio_playback_rate_t localPlaybackRate = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioPlaybackRate_audio_playback_rate_t(playbackRate));
+    return binderStatusFromStatusT(AudioValidator::validatePlaybackRate(localPlaybackRate)
+            ?: mTrack->setPlaybackRateParameters(localPlaybackRate));
 }
 
 // ----------------------------------------------------------------------------
@@ -386,11 +499,13 @@
 // static
 sp<AudioFlinger::PlaybackThread::OpPlayAudioMonitor>
 AudioFlinger::PlaybackThread::OpPlayAudioMonitor::createIfNeeded(
-            uid_t uid, const audio_attributes_t& attr, int id, audio_stream_type_t streamType)
+            const AttributionSourceState& attributionSource, const audio_attributes_t& attr, int id,
+            audio_stream_type_t streamType)
 {
+    Vector <String16> packages;
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
+    getPackagesForUid(uid, packages);
     if (isServiceUid(uid)) {
-        Vector <String16> packages;
-        getPackagesForUid(uid, packages);
         if (packages.isEmpty()) {
             ALOGD("OpPlayAudio: not muting track:%d usage:%d for service UID %d",
                   id,
@@ -410,12 +525,16 @@
             id, attr.flags);
         return nullptr;
     }
-    return new OpPlayAudioMonitor(uid, attr.usage, id);
+
+    AttributionSourceState checkedAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+            attributionSource);
+    return new OpPlayAudioMonitor(checkedAttributionSource, attr.usage, id);
 }
 
 AudioFlinger::PlaybackThread::OpPlayAudioMonitor::OpPlayAudioMonitor(
-        uid_t uid, audio_usage_t usage, int id)
-        : mHasOpPlayAudio(true), mUid(uid), mUsage((int32_t) usage), mId(id)
+        const AttributionSourceState& attributionSource, audio_usage_t usage, int id)
+        : mHasOpPlayAudio(true), mAttributionSource(attributionSource), mUsage((int32_t) usage),
+        mId(id)
 {
 }
 
@@ -429,11 +548,13 @@
 
 void AudioFlinger::PlaybackThread::OpPlayAudioMonitor::onFirstRef()
 {
-    getPackagesForUid(mUid, mPackages);
     checkPlayAudioForUsage();
-    if (!mPackages.isEmpty()) {
+    if (mAttributionSource.packageName.has_value()) {
         mOpCallback = new PlayAudioOpCallback(this);
-        mAppOpsManager.startWatchingMode(AppOpsManager::OP_PLAY_AUDIO, mPackages[0], mOpCallback);
+        mAppOpsManager.startWatchingMode(AppOpsManager::OP_PLAY_AUDIO,
+            VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+            mAttributionSource.packageName.value_or("")))
+            , mOpCallback);
     }
 }
 
@@ -446,18 +567,14 @@
 // - not called from PlayAudioOpCallback because the callback is not installed in this case
 void AudioFlinger::PlaybackThread::OpPlayAudioMonitor::checkPlayAudioForUsage()
 {
-    if (mPackages.isEmpty()) {
+    if (!mAttributionSource.packageName.has_value()) {
         mHasOpPlayAudio.store(false);
     } else {
-        bool hasIt = true;
-        for (const String16& packageName : mPackages) {
-            const int32_t mode = mAppOpsManager.checkAudioOpNoThrow(AppOpsManager::OP_PLAY_AUDIO,
-                    mUsage, mUid, packageName);
-            if (mode != AppOpsManager::MODE_ALLOWED) {
-                hasIt = false;
-                break;
-            }
-        }
+        uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mAttributionSource.uid));
+        String16 packageName = VALUE_OR_FATAL(
+            aidl2legacy_string_view_String16(mAttributionSource.packageName.value_or("")));
+        bool hasIt = mAppOpsManager.checkAudioOpNoThrow(AppOpsManager::OP_PLAY_AUDIO,
+                    mUsage, uid, packageName) == AppOpsManager::MODE_ALLOWED;
         ALOGD("OpPlayAudio: track:%d usage:%d %smuted", mId, mUsage, hasIt ? "not " : "");
         mHasOpPlayAudio.store(hasIt);
     }
@@ -507,11 +624,12 @@
             const sp<IMemory>& sharedBuffer,
             audio_session_t sessionId,
             pid_t creatorPid,
-            uid_t uid,
+            const AttributionSourceState& attributionSource,
             audio_output_flags_t flags,
             track_type type,
             audio_port_handle_t portId,
-            size_t frameCountToBeReady)
+            size_t frameCountToBeReady,
+            float speed)
     :   TrackBase(thread, client, attr, sampleRate, format, channelMask, frameCount,
                   // TODO: Using unsecurePointer() has some associated security pitfalls
                   //       (see declaration for details).
@@ -519,7 +637,8 @@
                   //       issue (e.g. by copying).
                   (sharedBuffer != 0) ? sharedBuffer->unsecurePointer() : buffer,
                   (sharedBuffer != 0) ? sharedBuffer->size() : bufferSize,
-                  sessionId, creatorPid, uid, true /*isOut*/,
+                  sessionId, creatorPid,
+                  VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid)), true /*isOut*/,
                   (type == TYPE_PATCH) ? ( buffer == NULL ? ALLOC_LOCAL : ALLOC_NONE) : ALLOC_CBLK,
                   type,
                   portId,
@@ -531,12 +650,11 @@
     mMainBuffer(thread->sinkBuffer()),
     mAuxBuffer(NULL),
     mAuxEffectId(0), mHasVolumeController(false),
-    mPresentationCompleteFrames(0),
     mFrameMap(16 /* sink-frame-to-track-frame map memory */),
     mVolumeHandler(new media::VolumeHandler(sampleRate)),
-    mOpPlayAudioMonitor(OpPlayAudioMonitor::createIfNeeded(uid, attr, id(), streamType)),
+    mOpPlayAudioMonitor(OpPlayAudioMonitor::createIfNeeded(attributionSource, attr, id(),
+        streamType)),
     // mSinkTimestamp
-    mFrameCountToBeReady(frameCountToBeReady),
     mFastIndex(-1),
     mCachedVolume(1.0),
     /* The track might not play immediately after being active, similarly as if its volume was 0.
@@ -544,7 +662,8 @@
     mFinalVolume(0.f),
     mResumeToStopping(false),
     mFlushHwPending(false),
-    mFlags(flags)
+    mFlags(flags),
+    mSpeed(speed)
 {
     // client == 0 implies sharedBuffer == 0
     ALOG_ASSERT(!(client == 0 && sharedBuffer != 0));
@@ -556,6 +675,7 @@
         return;
     }
 
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
     if (!thread->isTrackAllowed_l(channelMask, format, sessionId, uid)) {
         ALOGE("%s(%d): no more tracks available", __func__, mId);
         releaseCblk(); // this makes the track invalid.
@@ -570,6 +690,7 @@
                 mFrameSize, sampleRate);
     }
     mServerProxy = mAudioTrackServerProxy;
+    mServerProxy->setStartThresholdInFrames(frameCountToBeReady); // update the Cblk value
 
     // only allocate a fast track index if we were able to allocate a normal track name
     if (flags & AUDIO_OUTPUT_FLAG_FAST) {
@@ -595,15 +716,20 @@
             + "_" + std::to_string(mId) + "_T");
 #endif
 
-    if (channelMask & AUDIO_CHANNEL_HAPTIC_ALL) {
+    if (thread->supportsHapticPlayback()) {
+        // If the track is attached to haptic playback thread, it is potentially to have
+        // HapticGenerator effect, which will generate haptic data, on the track. In that case,
+        // external vibration is always created for all tracks attached to haptic playback thread.
         mAudioVibrationController = new AudioVibrationController(this);
+        std::string packageName = attributionSource.packageName.has_value() ?
+            attributionSource.packageName.value() : "";
         mExternalVibration = new os::ExternalVibration(
-                mUid, "" /* pkg */, mAttr, mAudioVibrationController);
+                mUid, packageName, mAttr, mAudioVibrationController);
     }
 
     // Once this item is logged by the server, the client can add properties.
     const char * const traits = sharedBuffer == 0 ? "" : "static";
-    mTrackMetrics.logConstructor(creatorPid, uid, traits, streamType);
+    mTrackMetrics.logConstructor(creatorPid, uid, id(), traits, streamType);
 }
 
 AudioFlinger::PlaybackThread::Track::~Track()
@@ -897,7 +1023,10 @@
     }
 
     size_t bufferSizeInFrames = mServerProxy->getBufferSizeInFrames();
-    size_t framesToBeReady = std::min(mFrameCountToBeReady, bufferSizeInFrames);
+    // Note: mServerProxy->getStartThresholdInFrames() is clamped.
+    const size_t startThresholdInFrames = mServerProxy->getStartThresholdInFrames();
+    const size_t framesToBeReady = std::clamp(  // clamp again to validate client values.
+            std::min(startThresholdInFrames, bufferSizeInFrames), size_t(1), mFrameCount);
 
     if (framesReady() >= framesToBeReady || (mCblk->mFlags & CBLK_FORCEREADY)) {
         ALOGV("%s(%d): consider track ready with %zu/%zu, target was %zu)",
@@ -936,6 +1065,13 @@
         // initial state-stopping. next state-pausing.
         // What if resume is called ?
 
+        if (state == FLUSHED) {
+            // avoid underrun glitches when starting after flush
+            reset();
+        }
+
+        // clear mPauseHwPending because of pause (and possibly flush) during underrun.
+        mPauseHwPending = false;
         if (state == PAUSED || state == PAUSING) {
             if (mResumeToStopping) {
                 // happened we need to resume to STOPPING_1
@@ -1067,6 +1203,9 @@
             mState = PAUSING;
             ALOGV("%s(%d): ACTIVE/RESUMING => PAUSING on thread %d",
                     __func__, mId, (int)mThreadIoHandle);
+            if (isOffloadedOrDirect()) {
+                mPauseHwPending = true;
+            }
             playbackThread->broadcast_l();
             break;
 
@@ -1154,6 +1293,11 @@
     mFlushHwPending = false;
 }
 
+void AudioFlinger::PlaybackThread::Track::pauseAck()
+{
+    mPauseHwPending = false;
+}
+
 void AudioFlinger::PlaybackThread::Track::reset()
 {
     // Do not reset twice to avoid discarding data written just after a flush and before
@@ -1251,16 +1395,24 @@
 
 void AudioFlinger::PlaybackThread::Track::copyMetadataTo(MetadataInserter& backInserter) const
 {
-    *backInserter++ = {
+    playback_track_metadata_v7_t metadata;
+    metadata.base = {
             .usage = mAttr.usage,
             .content_type = mAttr.content_type,
             .gain = mFinalVolume,
     };
+    metadata.channel_mask = mChannelMask,
+    strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+    *backInserter++ = metadata;
 }
 
 void AudioFlinger::PlaybackThread::Track::setTeePatches(TeePatches teePatches) {
     forEachTeePatchTrack([](auto patchTrack) { patchTrack->destroy(); });
     mTeePatches = std::move(teePatches);
+    if (mState == TrackBase::ACTIVE || mState == TrackBase::RESUMING ||
+            mState == TrackBase::STOPPING_1) {
+        forEachTeePatchTrack([](auto patchTrack) { patchTrack->start(); });
+    }
 }
 
 status_t AudioFlinger::PlaybackThread::Track::getTimestamp(AudioTimestamp& timestamp)
@@ -1309,6 +1461,7 @@
     mAuxBuffer = buffer;
 }
 
+// presentationComplete verified by frames, used by Mixed tracks.
 bool AudioFlinger::PlaybackThread::Track::presentationComplete(
         int64_t framesWritten, size_t audioHalFrames)
 {
@@ -1327,30 +1480,70 @@
             (long long)mPresentationCompleteFrames, (long long)framesWritten);
     if (mPresentationCompleteFrames == 0) {
         mPresentationCompleteFrames = framesWritten + audioHalFrames;
-        ALOGV("%s(%d): presentationComplete() reset:"
+        ALOGV("%s(%d): set:"
                 " mPresentationCompleteFrames %lld audioHalFrames %zu",
                 __func__, mId,
                 (long long)mPresentationCompleteFrames, audioHalFrames);
     }
 
     bool complete;
-    if (isOffloaded()) {
-        complete = true;
-    } else if (isDirect() || isFastTrack()) { // these do not go through linear map
+    if (isFastTrack()) { // does not go through linear map
         complete = framesWritten >= (int64_t) mPresentationCompleteFrames;
+        ALOGV("%s(%d): %s framesWritten:%lld  mPresentationCompleteFrames:%lld",
+                __func__, mId, (complete ? "complete" : "waiting"),
+                (long long) framesWritten, (long long) mPresentationCompleteFrames);
     } else {  // Normal tracks, OutputTracks, and PatchTracks
         complete = framesWritten >= (int64_t) mPresentationCompleteFrames
                 && mAudioTrackServerProxy->isDrained();
     }
 
     if (complete) {
-        triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE);
-        mAudioTrackServerProxy->setStreamEndDone();
+        notifyPresentationComplete();
         return true;
     }
     return false;
 }
 
+// presentationComplete checked by time, used by DirectTracks.
+bool AudioFlinger::PlaybackThread::Track::presentationComplete(uint32_t latencyMs)
+{
+    // For Offloaded or Direct tracks.
+
+    // For a direct track, we incorporated time based testing for presentationComplete.
+
+    // For an offloaded track the HAL+h/w delay is variable so a HAL drain() is used
+    // to detect when all frames have been played. In this case latencyMs isn't
+    // useful because it doesn't always reflect whether there is data in the h/w
+    // buffers, particularly if a track has been paused and resumed during draining
+
+    constexpr float MIN_SPEED = 0.125f; // min speed scaling allowed for timely response.
+    if (mPresentationCompleteTimeNs == 0) {
+        mPresentationCompleteTimeNs = systemTime() + latencyMs * 1e6 / fmax(mSpeed, MIN_SPEED);
+        ALOGV("%s(%d): set: latencyMs %u  mPresentationCompleteTimeNs:%lld",
+                __func__, mId, latencyMs, (long long) mPresentationCompleteTimeNs);
+    }
+
+    bool complete;
+    if (isOffloaded()) {
+        complete = true;
+    } else { // Direct
+        complete = systemTime() >= mPresentationCompleteTimeNs;
+        ALOGV("%s(%d): %s", __func__, mId, (complete ? "complete" : "waiting"));
+    }
+    if (complete) {
+        notifyPresentationComplete();
+        return true;
+    }
+    return false;
+}
+
+void AudioFlinger::PlaybackThread::Track::notifyPresentationComplete()
+{
+    // This only triggers once. TODO: should we enforce this?
+    triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE);
+    mAudioTrackServerProxy->setStreamEndDone();
+}
+
 void AudioFlinger::PlaybackThread::Track::triggerEvents(AudioSystem::sync_event_t type)
 {
     for (size_t i = 0; i < mSyncEvents.size();) {
@@ -1437,6 +1630,108 @@
     }
 }
 
+status_t AudioFlinger::PlaybackThread::Track::getDualMonoMode(audio_dual_mono_mode_t* mode)
+{
+    status_t status = INVALID_OPERATION;
+    if (isOffloadedOrDirect()) {
+        sp<ThreadBase> thread = mThread.promote();
+        if (thread != nullptr) {
+            PlaybackThread *t = (PlaybackThread *)thread.get();
+            Mutex::Autolock _l(t->mLock);
+            status = t->mOutput->stream->getDualMonoMode(mode);
+            ALOGD_IF((status == NO_ERROR) && (mDualMonoMode != *mode),
+                    "%s: mode %d inconsistent", __func__, mDualMonoMode);
+        }
+    }
+    return status;
+}
+
+status_t AudioFlinger::PlaybackThread::Track::setDualMonoMode(audio_dual_mono_mode_t mode)
+{
+    status_t status = INVALID_OPERATION;
+    if (isOffloadedOrDirect()) {
+        sp<ThreadBase> thread = mThread.promote();
+        if (thread != nullptr) {
+            auto t = static_cast<PlaybackThread *>(thread.get());
+            Mutex::Autolock lock(t->mLock);
+            status = t->mOutput->stream->setDualMonoMode(mode);
+            if (status == NO_ERROR) {
+                mDualMonoMode = mode;
+            }
+        }
+    }
+    return status;
+}
+
+status_t AudioFlinger::PlaybackThread::Track::getAudioDescriptionMixLevel(float* leveldB)
+{
+    status_t status = INVALID_OPERATION;
+    if (isOffloadedOrDirect()) {
+        sp<ThreadBase> thread = mThread.promote();
+        if (thread != nullptr) {
+            auto t = static_cast<PlaybackThread *>(thread.get());
+            Mutex::Autolock lock(t->mLock);
+            status = t->mOutput->stream->getAudioDescriptionMixLevel(leveldB);
+            ALOGD_IF((status == NO_ERROR) && (mAudioDescriptionMixLevel != *leveldB),
+                    "%s: level %.3f inconsistent", __func__, mAudioDescriptionMixLevel);
+        }
+    }
+    return status;
+}
+
+status_t AudioFlinger::PlaybackThread::Track::setAudioDescriptionMixLevel(float leveldB)
+{
+    status_t status = INVALID_OPERATION;
+    if (isOffloadedOrDirect()) {
+        sp<ThreadBase> thread = mThread.promote();
+        if (thread != nullptr) {
+            auto t = static_cast<PlaybackThread *>(thread.get());
+            Mutex::Autolock lock(t->mLock);
+            status = t->mOutput->stream->setAudioDescriptionMixLevel(leveldB);
+            if (status == NO_ERROR) {
+                mAudioDescriptionMixLevel = leveldB;
+            }
+        }
+    }
+    return status;
+}
+
+status_t AudioFlinger::PlaybackThread::Track::getPlaybackRateParameters(
+        audio_playback_rate_t* playbackRate)
+{
+    status_t status = INVALID_OPERATION;
+    if (isOffloadedOrDirect()) {
+        sp<ThreadBase> thread = mThread.promote();
+        if (thread != nullptr) {
+            auto t = static_cast<PlaybackThread *>(thread.get());
+            Mutex::Autolock lock(t->mLock);
+            status = t->mOutput->stream->getPlaybackRateParameters(playbackRate);
+            ALOGD_IF((status == NO_ERROR) &&
+                    !isAudioPlaybackRateEqual(mPlaybackRateParameters, *playbackRate),
+                    "%s: playbackRate inconsistent", __func__);
+        }
+    }
+    return status;
+}
+
+status_t AudioFlinger::PlaybackThread::Track::setPlaybackRateParameters(
+        const audio_playback_rate_t& playbackRate)
+{
+    status_t status = INVALID_OPERATION;
+    if (isOffloadedOrDirect()) {
+        sp<ThreadBase> thread = mThread.promote();
+        if (thread != nullptr) {
+            auto t = static_cast<PlaybackThread *>(thread.get());
+            Mutex::Autolock lock(t->mLock);
+            status = t->mOutput->stream->setPlaybackRateParameters(playbackRate);
+            if (status == NO_ERROR) {
+                mPlaybackRateParameters = playbackRate;
+            }
+        }
+    }
+    return status;
+}
+
 //To be called with thread lock held
 bool AudioFlinger::PlaybackThread::Track::isResumePending() {
 
@@ -1589,12 +1884,12 @@
             audio_format_t format,
             audio_channel_mask_t channelMask,
             size_t frameCount,
-            uid_t uid)
+            const AttributionSourceState& attributionSource)
     :   Track(playbackThread, NULL, AUDIO_STREAM_PATCH,
               audio_attributes_t{} /* currently unused for output track */,
               sampleRate, format, channelMask, frameCount,
               nullptr /* buffer */, (size_t)0 /* bufferSize */, nullptr /* sharedBuffer */,
-              AUDIO_SESSION_NONE, getpid(), uid, AUDIO_OUTPUT_FLAG_NONE,
+              AUDIO_SESSION_NONE, getpid(), attributionSource, AUDIO_OUTPUT_FLAG_NONE,
               TYPE_OUTPUT),
     mActive(false), mSourceThread(sourceThread)
 {
@@ -1824,8 +2119,8 @@
               audio_attributes_t{} /* currently unused for patch track */,
               sampleRate, format, channelMask, frameCount,
               buffer, bufferSize, nullptr /* sharedBuffer */,
-              AUDIO_SESSION_NONE, getpid(), AID_AUDIOSERVER, flags, TYPE_PATCH,
-              AUDIO_PORT_HANDLE_NONE, frameCountToBeReady),
+              AUDIO_SESSION_NONE, getpid(), audioServerAttributionSource(getpid()), flags,
+              TYPE_PATCH, AUDIO_PORT_HANDLE_NONE, frameCountToBeReady),
         PatchTrackBase(new ClientProxy(mCblk, mBuffer, frameCount, mFrameSize, true, true),
                        *playbackThread, timeout)
 {
@@ -1918,6 +2213,25 @@
 {
     mProxy->releaseBuffer(buffer);
     restartIfDisabled();
+
+    // Check if the PatchTrack has enough data to write once in releaseBuffer().
+    // If not, prevent an underrun from occurring by moving the track into FS_FILLING;
+    // this logic avoids glitches when suspending A2DP with AudioPlaybackCapture.
+    // TODO: perhaps underrun avoidance could be a track property checked in isReady() instead.
+    if (mFillingUpStatus == FS_ACTIVE
+            && audio_is_linear_pcm(mFormat)
+            && !isOffloadedOrDirect()) {
+        if (sp<ThreadBase> thread = mThread.promote();
+            thread != 0) {
+            PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
+            const size_t frameCount = playbackThread->frameCount() * sampleRate()
+                    / playbackThread->sampleRate();
+            if (framesReady() < frameCount) {
+                ALOGD("%s(%d) Not enough data, wait for buffer to fill", __func__, mId);
+                mFillingUpStatus = FS_FILLING;
+            }
+        }
+    }
 }
 
 void AudioFlinger::PlaybackThread::PatchTrack::restartIfDisabled()
@@ -1933,110 +2247,6 @@
 // ----------------------------------------------------------------------------
 
 
-// ----------------------------------------------------------------------------
-//      AppOp for audio recording
-// -------------------------------
-
-#undef LOG_TAG
-#define LOG_TAG "AF::OpRecordAudioMonitor"
-
-// static
-sp<AudioFlinger::RecordThread::OpRecordAudioMonitor>
-AudioFlinger::RecordThread::OpRecordAudioMonitor::createIfNeeded(
-            uid_t uid, const audio_attributes_t& attr, const String16& opPackageName)
-{
-    if (isServiceUid(uid)) {
-        ALOGV("not silencing record for service uid:%d pack:%s",
-                uid, String8(opPackageName).string());
-        return nullptr;
-    }
-
-    // Capturing from FM TUNER output is not controlled by OP_RECORD_AUDIO
-    // because it does not affect users privacy as does capturing from an actual microphone.
-    if (attr.source == AUDIO_SOURCE_FM_TUNER) {
-        ALOGV("not muting FM TUNER capture for uid %d", uid);
-        return nullptr;
-    }
-
-    if (opPackageName.size() == 0) {
-        Vector<String16> packages;
-        // no package name, happens with SL ES clients
-        // query package manager to find one
-        PermissionController permissionController;
-        permissionController.getPackagesForUid(uid, packages);
-        if (packages.isEmpty()) {
-            return nullptr;
-        } else {
-            ALOGV("using pack:%s for uid:%d", String8(packages[0]).string(), uid);
-            return new OpRecordAudioMonitor(uid, packages[0]);
-        }
-    }
-
-    return new OpRecordAudioMonitor(uid, opPackageName);
-}
-
-AudioFlinger::RecordThread::OpRecordAudioMonitor::OpRecordAudioMonitor(
-        uid_t uid, const String16& opPackageName)
-        : mHasOpRecordAudio(true), mUid(uid), mPackage(opPackageName)
-{
-}
-
-AudioFlinger::RecordThread::OpRecordAudioMonitor::~OpRecordAudioMonitor()
-{
-    if (mOpCallback != 0) {
-        mAppOpsManager.stopWatchingMode(mOpCallback);
-    }
-    mOpCallback.clear();
-}
-
-void AudioFlinger::RecordThread::OpRecordAudioMonitor::onFirstRef()
-{
-    checkRecordAudio();
-    mOpCallback = new RecordAudioOpCallback(this);
-    ALOGV("start watching OP_RECORD_AUDIO for pack:%s", String8(mPackage).string());
-    mAppOpsManager.startWatchingMode(AppOpsManager::OP_RECORD_AUDIO, mPackage, mOpCallback);
-}
-
-bool AudioFlinger::RecordThread::OpRecordAudioMonitor::hasOpRecordAudio() const {
-    return mHasOpRecordAudio.load();
-}
-
-// Called by RecordAudioOpCallback when OP_RECORD_AUDIO is updated in AppOp callback
-// and in onFirstRef()
-// Note this method is never called (and never to be) for audio server / root track
-// due to the UID in createIfNeeded(). As a result for those record track, it's:
-// - not called from constructor,
-// - not called from RecordAudioOpCallback because the callback is not installed in this case
-void AudioFlinger::RecordThread::OpRecordAudioMonitor::checkRecordAudio()
-{
-    const int32_t mode = mAppOpsManager.checkOp(AppOpsManager::OP_RECORD_AUDIO,
-            mUid, mPackage);
-    const bool hasIt =  (mode == AppOpsManager::MODE_ALLOWED);
-    // verbose logging only log when appOp changed
-    ALOGI_IF(hasIt != mHasOpRecordAudio.load(),
-            "OP_RECORD_AUDIO missing, %ssilencing record uid%d pack:%s",
-            hasIt ? "un" : "", mUid, String8(mPackage).string());
-    mHasOpRecordAudio.store(hasIt);
-}
-
-AudioFlinger::RecordThread::OpRecordAudioMonitor::RecordAudioOpCallback::RecordAudioOpCallback(
-        const wp<OpRecordAudioMonitor>& monitor) : mMonitor(monitor)
-{ }
-
-void AudioFlinger::RecordThread::OpRecordAudioMonitor::RecordAudioOpCallback::opChanged(int32_t op,
-            const String16& packageName) {
-    UNUSED(packageName);
-    if (op != AppOpsManager::OP_RECORD_AUDIO) {
-        return;
-    }
-    sp<OpRecordAudioMonitor> monitor = mMonitor.promote();
-    if (monitor != NULL) {
-        monitor->checkRecordAudio();
-    }
-}
-
-
-
 #undef LOG_TAG
 #define LOG_TAG "AF::RecordHandle"
 
@@ -2055,7 +2265,7 @@
 binder::Status AudioFlinger::RecordHandle::start(int /*AudioSystem::sync_event_t*/ event,
         int /*audio_session_t*/ triggerSession) {
     ALOGV("%s()", __func__);
-    return binder::Status::fromStatusT(
+    return binderStatusFromStatusT(
         mRecordTrack->start((AudioSystem::sync_event_t)event, (audio_session_t) triggerSession));
 }
 
@@ -2070,22 +2280,33 @@
 }
 
 binder::Status AudioFlinger::RecordHandle::getActiveMicrophones(
-        std::vector<media::MicrophoneInfo>* activeMicrophones) {
+        std::vector<media::MicrophoneInfoData>* activeMicrophones) {
     ALOGV("%s()", __func__);
-    return binder::Status::fromStatusT(
-            mRecordTrack->getActiveMicrophones(activeMicrophones));
+    std::vector<media::MicrophoneInfo> mics;
+    status_t status = mRecordTrack->getActiveMicrophones(&mics);
+    activeMicrophones->resize(mics.size());
+    for (size_t i = 0; status == OK && i < mics.size(); ++i) {
+       status = mics[i].writeToParcelable(&activeMicrophones->at(i));
+    }
+    return binderStatusFromStatusT(status);
 }
 
 binder::Status AudioFlinger::RecordHandle::setPreferredMicrophoneDirection(
         int /*audio_microphone_direction_t*/ direction) {
     ALOGV("%s()", __func__);
-    return binder::Status::fromStatusT(mRecordTrack->setPreferredMicrophoneDirection(
+    return binderStatusFromStatusT(mRecordTrack->setPreferredMicrophoneDirection(
             static_cast<audio_microphone_direction_t>(direction)));
 }
 
 binder::Status AudioFlinger::RecordHandle::setPreferredMicrophoneFieldDimension(float zoom) {
     ALOGV("%s()", __func__);
-    return binder::Status::fromStatusT(mRecordTrack->setPreferredMicrophoneFieldDimension(zoom));
+    return binderStatusFromStatusT(mRecordTrack->setPreferredMicrophoneFieldDimension(zoom));
+}
+
+binder::Status AudioFlinger::RecordHandle::shareAudioHistory(
+        const std::string& sharedAudioPackageName, int64_t sharedAudioStartMs) {
+    return binderStatusFromStatusT(
+            mRecordTrack->shareAudioHistory(sharedAudioPackageName, sharedAudioStartMs));
 }
 
 // ----------------------------------------------------------------------------
@@ -2105,14 +2326,16 @@
             size_t bufferSize,
             audio_session_t sessionId,
             pid_t creatorPid,
-            uid_t uid,
+            const AttributionSourceState& attributionSource,
             audio_input_flags_t flags,
             track_type type,
-            const String16& opPackageName,
-            audio_port_handle_t portId)
+            audio_port_handle_t portId,
+            int32_t startFrames)
     :   TrackBase(thread, client, attr, sampleRate, format,
                   channelMask, frameCount, buffer, bufferSize, sessionId,
-                  creatorPid, uid, false /*isOut*/,
+                  creatorPid,
+                  VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid)),
+                  false /*isOut*/,
                   (type == TYPE_DEFAULT) ?
                           ((flags & AUDIO_INPUT_FLAG_FAST) ? ALLOC_PIPE : ALLOC_CBLK) :
                           ((buffer == NULL) ? ALLOC_LOCAL : ALLOC_NONE),
@@ -2124,7 +2347,7 @@
         mRecordBufferConverter(NULL),
         mFlags(flags),
         mSilenced(false),
-        mOpRecordAudioMonitor(OpRecordAudioMonitor::createIfNeeded(uid, attr, opPackageName))
+        mStartFrames(startFrames)
 {
     if (mCblk == NULL) {
         return;
@@ -2165,7 +2388,7 @@
 #endif
 
     // Once this item is logged by the server, the client can add properties.
-    mTrackMetrics.logConstructor(creatorPid, uid);
+    mTrackMetrics.logConstructor(creatorPid, uid(), id());
 }
 
 AudioFlinger::RecordThread::RecordTrack::~RecordTrack()
@@ -2207,7 +2430,8 @@
         RecordThread *recordThread = (RecordThread *)thread.get();
         return recordThread->start(this, event, triggerSession);
     } else {
-        return BAD_VALUE;
+        ALOGW("%s track %d: thread was destroyed", __func__, portId());
+        return DEAD_OBJECT;
     }
 }
 
@@ -2233,6 +2457,9 @@
             Mutex::Autolock _l(thread->mLock);
             RecordThread *recordThread = (RecordThread *) thread.get();
             priorState = mState;
+            if (!mSharedAudioPackageName.empty()) {
+                recordThread->resetAudioHistory_l();
+            }
             recordThread->destroyTrack_l(this); // move mState to STOPPED, terminate
         }
         // APM portid/client management done outside of lock.
@@ -2378,14 +2605,6 @@
     mServerLatencyMs.store(latencyMs);
 }
 
-bool AudioFlinger::RecordThread::RecordTrack::isSilenced() const {
-    if (mSilenced) {
-        return true;
-    }
-    // The monitor is only created for record tracks that can be silenced.
-    return mOpRecordAudioMonitor ? !mOpRecordAudioMonitor->hasOpRecordAudio() : false;
-}
-
 status_t AudioFlinger::RecordThread::RecordTrack::getActiveMicrophones(
         std::vector<media::MicrophoneInfo>* activeMicrophones)
 {
@@ -2419,6 +2638,38 @@
     }
 }
 
+status_t AudioFlinger::RecordThread::RecordTrack::shareAudioHistory(
+        const std::string& sharedAudioPackageName, int64_t sharedAudioStartMs) {
+
+    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+    const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+    if (callingUid != mUid || callingPid != mCreatorPid) {
+        return PERMISSION_DENIED;
+    }
+
+    AttributionSourceState attributionSource{};
+    attributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
+    attributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingPid));
+    attributionSource.token = sp<BBinder>::make();
+    if (!captureHotwordAllowed(attributionSource)) {
+        return PERMISSION_DENIED;
+    }
+
+    sp<ThreadBase> thread = mThread.promote();
+    if (thread != 0) {
+        RecordThread *recordThread = (RecordThread *)thread.get();
+        status_t status = recordThread->shareAudioHistory(
+                sharedAudioPackageName, mSessionId, sharedAudioStartMs);
+        if (status == NO_ERROR) {
+            mSharedAudioPackageName = sharedAudioPackageName;
+        }
+        return status;
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+
 // ----------------------------------------------------------------------------
 #undef LOG_TAG
 #define LOG_TAG "AF::PatchRecord"
@@ -2435,8 +2686,8 @@
     :   RecordTrack(recordThread, NULL,
                 audio_attributes_t{} /* currently unused for patch track */,
                 sampleRate, format, channelMask, frameCount,
-                buffer, bufferSize, AUDIO_SESSION_NONE, getpid(), AID_AUDIOSERVER,
-                flags, TYPE_PATCH, String16()),
+                buffer, bufferSize, AUDIO_SESSION_NONE, getpid(),
+                audioServerAttributionSource(getpid()), flags, TYPE_PATCH),
         PatchTrackBase(new ClientProxy(mCblk, mBuffer, frameCount, mFrameSize, false, true),
                        *recordThread, timeout)
 {
@@ -2713,21 +2964,23 @@
         audio_channel_mask_t channelMask,
         audio_session_t sessionId,
         bool isOut,
-        uid_t uid,
-        pid_t pid,
+        const AttributionSourceState& attributionSource,
         pid_t creatorPid,
         audio_port_handle_t portId)
     :   TrackBase(thread, NULL, attr, sampleRate, format,
                   channelMask, (size_t)0 /* frameCount */,
                   nullptr /* buffer */, (size_t)0 /* bufferSize */,
-                  sessionId, creatorPid, uid, isOut,
+                  sessionId, creatorPid,
+                  VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid)),
+                  isOut,
                   ALLOC_NONE,
                   TYPE_DEFAULT, portId,
                   std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_MMAP) + std::to_string(portId)),
-        mPid(pid), mSilenced(false), mSilencedNotified(false)
+        mPid(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.pid))),
+            mSilenced(false), mSilencedNotified(false)
 {
     // Once this item is logged by the server, the client can add properties.
-    mTrackMetrics.logConstructor(creatorPid, uid);
+    mTrackMetrics.logConstructor(creatorPid, uid(), id());
 }
 
 AudioFlinger::MmapThread::MmapTrack::~MmapTrack()
diff --git a/services/audioflinger/TypedLogger.h b/services/audioflinger/TypedLogger.h
index 6ef19bf..feb71e3 100644
--- a/services/audioflinger/TypedLogger.h
+++ b/services/audioflinger/TypedLogger.h
@@ -80,7 +80,7 @@
 
 // TODO Permit disabling of logging at compile-time.
 
-// TODO A non-nullptr dummy implementation that is a nop would be faster than checking for nullptr
+// TODO A non-nullptr stub implementation that is a nop would be faster than checking for nullptr
 //      in the case when logging is enabled at compile-time and enabled at runtime, but it might be
 //      slower than nullptr check when logging is enabled at compile-time and disabled at runtime.
 
@@ -129,8 +129,8 @@
 
 namespace android {
 extern "C" {
-// TODO consider adding a thread_local NBLog::Writer tlDummyNBLogWriter and then
-// initialize below tlNBLogWriter to &tlDummyNBLogWriter to remove the need to
+// TODO consider adding a thread_local NBLog::Writer tlStubNBLogWriter and then
+// initialize below tlNBLogWriter to &tlStubNBLogWriter to remove the need to
 // check for nullptr every time. Also reduces the need to add a new logging macro above
 // each time we want to log a new type.
 extern thread_local NBLog::Writer *tlNBLogWriter;
diff --git a/services/audiopolicy/Android.bp b/services/audiopolicy/Android.bp
index a42b89f..e018dd3 100644
--- a/services/audiopolicy/Android.bp
+++ b/services/audiopolicy/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_headers {
     name: "libaudiopolicymanager_interface_headers",
     host_supported: true,
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 8d0e5db..2e49e71 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -17,14 +17,18 @@
 #ifndef ANDROID_AUDIOPOLICY_INTERFACE_H
 #define ANDROID_AUDIOPOLICY_INTERFACE_H
 
+#include <media/AudioCommonTypes.h>
 #include <media/AudioDeviceTypeAddr.h>
 #include <media/AudioSystem.h>
 #include <media/AudioPolicy.h>
 #include <media/DeviceDescriptorBase.h>
+#include <android/content/AttributionSourceState.h>
 #include <utils/String8.h>
 
 namespace android {
 
+using content::AttributionSourceState;
+
 // ----------------------------------------------------------------------------
 
 // The AudioPolicyInterface and AudioPolicyClientInterface classes define the communication interfaces
@@ -122,7 +126,7 @@
                                         audio_io_handle_t *output,
                                         audio_session_t session,
                                         audio_stream_type_t *stream,
-                                        uid_t uid,
+                                        const AttributionSourceState& attributionSouce,
                                         const audio_config_t *config,
                                         audio_output_flags_t *flags,
                                         audio_port_handle_t *selectedDeviceId,
@@ -133,15 +137,15 @@
     virtual status_t startOutput(audio_port_handle_t portId) = 0;
     // indicates to the audio policy manager that the output stops being used by corresponding stream.
     virtual status_t stopOutput(audio_port_handle_t portId) = 0;
-    // releases the output.
-    virtual void releaseOutput(audio_port_handle_t portId) = 0;
+    // releases the output, return true if the output descriptor is reopened.
+    virtual bool releaseOutput(audio_port_handle_t portId) = 0;
 
     // request an input appropriate for record from the supplied device with supplied parameters.
     virtual status_t getInputForAttr(const audio_attributes_t *attr,
                                      audio_io_handle_t *input,
                                      audio_unique_id_t riid,
                                      audio_session_t session,
-                                     uid_t uid,
+                                     const AttributionSourceState& attributionSouce,
                                      const audio_config_base_t *config,
                                      audio_input_flags_t flags,
                                      audio_port_handle_t *selectedDeviceId,
@@ -191,7 +195,7 @@
                                                     int &index) = 0;
 
     // return the strategy corresponding to a given stream type
-    virtual uint32_t getStrategyForStream(audio_stream_type_t stream) = 0;
+    virtual product_strategy_t getStrategyForStream(audio_stream_type_t stream) = 0;
 
     // return the enabled output devices for the given stream type
     virtual audio_devices_t getDevicesForStream(audio_stream_type_t stream) = 0;
@@ -204,7 +208,7 @@
     virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc) = 0;
     virtual status_t registerEffect(const effect_descriptor_t *desc,
                                     audio_io_handle_t io,
-                                    uint32_t strategy,
+                                    product_strategy_t strategy,
                                     int session,
                                     int id) = 0;
     virtual status_t unregisterEffect(int id) = 0;
@@ -220,16 +224,16 @@
     virtual status_t    dump(int fd) = 0;
 
     virtual status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t flags) = 0;
-    virtual bool isOffloadSupported(const audio_offload_info_t& offloadInfo) = 0;
+    virtual audio_offload_mode_t getOffloadSupport(const audio_offload_info_t& offloadInfo) = 0;
     virtual bool isDirectOutputSupported(const audio_config_base_t& config,
                                          const audio_attributes_t& attributes) = 0;
 
     virtual status_t listAudioPorts(audio_port_role_t role,
                                     audio_port_type_t type,
                                     unsigned int *num_ports,
-                                    struct audio_port *ports,
+                                    struct audio_port_v7 *ports,
                                     unsigned int *generation) = 0;
-    virtual status_t getAudioPort(struct audio_port *port) = 0;
+    virtual status_t getAudioPort(struct audio_port_v7 *port) = 0;
     virtual status_t createAudioPatch(const struct audio_patch *patch,
                                        audio_patch_handle_t *handle,
                                        uid_t uid) = 0;
@@ -250,12 +254,12 @@
     virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes) = 0;
     virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes) = 0;
 
-    virtual status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices)
+    virtual status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices)
             = 0;
     virtual status_t removeUidDeviceAffinities(uid_t uid) = 0;
 
     virtual status_t setUserIdDeviceAffinities(int userId,
-            const Vector<AudioDeviceTypeAddr>& devices) = 0;
+            const AudioDeviceTypeAddrVector& devices) = 0;
     virtual status_t removeUserIdDeviceAffinities(int userId) = 0;
 
     virtual status_t startAudioSource(const struct audio_port_config *source,
@@ -272,8 +276,11 @@
 
     virtual status_t getSurroundFormats(unsigned int *numSurroundFormats,
                                         audio_format_t *surroundFormats,
-                                        bool *surroundFormatsEnabled,
-                                        bool reported) = 0;
+                                        bool *surroundFormatsEnabled) = 0;
+
+    virtual status_t getReportedSurroundFormats(unsigned int *numSurroundFormats,
+                                                audio_format_t *surroundFormats) = 0;
+
     virtual status_t setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled) = 0;
 
     virtual bool     isHapticPlaybackSupported() = 0;
@@ -285,23 +292,47 @@
 
     virtual status_t listAudioProductStrategies(AudioProductStrategyVector &strategies) = 0;
 
-    virtual status_t getProductStrategyFromAudioAttributes(const AudioAttributes &aa,
-                                                           product_strategy_t &productStrategy) = 0;
+    virtual status_t getProductStrategyFromAudioAttributes(
+            const AudioAttributes &aa, product_strategy_t &productStrategy,
+            bool fallbackOnDefault) = 0;
 
     virtual status_t listAudioVolumeGroups(AudioVolumeGroupVector &groups) = 0;
 
-    virtual status_t getVolumeGroupFromAudioAttributes(const AudioAttributes &aa,
-                                                       volume_group_t &volumeGroup) = 0;
+    virtual status_t getVolumeGroupFromAudioAttributes(
+            const AudioAttributes &aa, volume_group_t &volumeGroup, bool fallbackOnDefault) = 0;
 
     virtual bool     isCallScreenModeSupported() = 0;
 
-    virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   const AudioDeviceTypeAddr &device) = 0;
+    virtual status_t setDevicesRoleForStrategy(product_strategy_t strategy,
+                                               device_role_t role,
+                                               const AudioDeviceTypeAddrVector &devices) = 0;
 
-    virtual status_t removePreferredDeviceForStrategy(product_strategy_t strategy) = 0;
+    virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy,
+                                                  device_role_t role) = 0;
 
-    virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   AudioDeviceTypeAddr &device) = 0;
+
+    virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
+                                                  device_role_t role,
+                                                  AudioDeviceTypeAddrVector &devices) = 0;
+
+    virtual status_t setDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                    device_role_t role,
+                                                    const AudioDeviceTypeAddrVector &devices) = 0;
+
+    virtual status_t addDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                    device_role_t role,
+                                                    const AudioDeviceTypeAddrVector &devices) = 0;
+
+    virtual status_t removeDevicesRoleForCapturePreset(
+            audio_source_t audioSource, device_role_t role,
+            const AudioDeviceTypeAddrVector& devices) = 0;
+
+    virtual status_t clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                      device_role_t role) = 0;
+
+    virtual status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+                                                       device_role_t role,
+                                                       AudioDeviceTypeAddrVector &devices) = 0;
 };
 
 
@@ -416,16 +447,25 @@
                                                 audio_patch_handle_t patchHandle,
                                                 audio_source_t source) = 0;
 
+    virtual void onRoutingUpdated() = 0;
+
     // Used to notify the sound trigger module that an audio capture is about to
     // take place. This should typically result in any active recognition
     // sessions to be preempted on modules that do not support sound trigger
     // recognition concurrently with audio capture.
     virtual void setSoundTriggerCaptureState(bool active) = 0;
+
+    virtual status_t getAudioPort(struct audio_port_v7 *port) = 0;
+
+    virtual status_t updateSecondaryOutputs(
+            const TrackSecondaryOutputsMap& trackSecondaryOutputs) = 0;
 };
 
-extern "C" AudioPolicyInterface* createAudioPolicyManager(AudioPolicyClientInterface *clientInterface);
-extern "C" void destroyAudioPolicyManager(AudioPolicyInterface *interface);
-
+    // These are the signatures of createAudioPolicyManager/destroyAudioPolicyManager
+    // methods respectively, expected by AudioPolicyService, needs to be exposed by
+    // libaudiopolicymanagercustom.
+    using CreateAudioPolicyManagerInstance = AudioPolicyInterface* (*)(AudioPolicyClientInterface*);
+    using DestroyAudioPolicyManagerInstance = void (*)(AudioPolicyInterface*);
 
 } // namespace android
 
diff --git a/services/audiopolicy/OWNERS b/services/audiopolicy/OWNERS
index a8483fa..da9d32f 100644
--- a/services/audiopolicy/OWNERS
+++ b/services/audiopolicy/OWNERS
@@ -1,3 +1,2 @@
 jmtrivi@google.com
-krocard@google.com
 mnaganov@google.com
diff --git a/services/audiopolicy/common/Android.bp b/services/audiopolicy/common/Android.bp
index 6e0d2f6..91701ad 100644
--- a/services/audiopolicy/common/Android.bp
+++ b/services/audiopolicy/common/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_headers {
     name: "libaudiopolicycommon",
     header_libs: [
diff --git a/services/audiopolicy/common/include/Volume.h b/services/audiopolicy/common/include/Volume.h
index 7c8ce83..736f8b2 100644
--- a/services/audiopolicy/common/include/Volume.h
+++ b/services/audiopolicy/common/include/Volume.h
@@ -126,6 +126,7 @@
         case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
         case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
         case AUDIO_DEVICE_OUT_USB_HEADSET:
+        case AUDIO_DEVICE_OUT_BLE_HEADSET:
             return DEVICE_CATEGORY_HEADSET;
         case AUDIO_DEVICE_OUT_HEARING_AID:
             return DEVICE_CATEGORY_HEARING_AID;
@@ -139,6 +140,7 @@
         case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER:
         case AUDIO_DEVICE_OUT_USB_ACCESSORY:
         case AUDIO_DEVICE_OUT_REMOTE_SUBMIX:
+        case AUDIO_DEVICE_OUT_BLE_SPEAKER:
         default:
             return DEVICE_CATEGORY_SPEAKER;
         }
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index 0537365..577f641 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -42,7 +42,7 @@
 
 // For mixed output and inputs, the policy will use max mixer channel count.
 // Do not limit channel count otherwise
-#define MAX_MIXER_CHANNEL_COUNT FCC_8
+#define MAX_MIXER_CHANNEL_COUNT FCC_LIMIT
 
 /**
  * Alias to AUDIO_DEVICE_OUT_DEFAULT defined for clarification when this value is used by volume
@@ -226,6 +226,8 @@
             return AUDIO_DEVICE_OUT_SPEAKER_SAFE;
         } else if (deviceTypes.count(AUDIO_DEVICE_OUT_HDMI_ARC) != 0) {
             return AUDIO_DEVICE_OUT_HDMI_ARC;
+        } else if (deviceTypes.count(AUDIO_DEVICE_OUT_HDMI_EARC) != 0) {
+            return AUDIO_DEVICE_OUT_HDMI_EARC;
         } else if (deviceTypes.count(AUDIO_DEVICE_OUT_AUX_LINE) != 0) {
             return AUDIO_DEVICE_OUT_AUX_LINE;
         } else if (deviceTypes.count(AUDIO_DEVICE_OUT_SPDIF) != 0) {
@@ -240,4 +242,4 @@
             return a2dpDevices.empty() ? AUDIO_DEVICE_NONE : a2dpDevices[0];
         }
     }
-}
\ No newline at end of file
+}
diff --git a/services/audiopolicy/common/managerdefinitions/Android.bp b/services/audiopolicy/common/managerdefinitions/Android.bp
index 57f0b5b..227c2d8 100644
--- a/services/audiopolicy/common/managerdefinitions/Android.bp
+++ b/services/audiopolicy/common/managerdefinitions/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_static {
     name: "libaudiopolicycomponents",
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
index 6f47abc..a40f6aa 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
@@ -72,7 +72,7 @@
             const struct audio_port_config *srcConfig = NULL) const;
     virtual sp<AudioPort> getAudioPort() const { return mProfile; }
 
-    void toAudioPort(struct audio_port *port) const;
+    void toAudioPort(struct audio_port_v7 *port) const;
     void setPreemptedSessions(const SortedVector<audio_session_t>& sessions);
     SortedVector<audio_session_t> getPreemptedSessions() const;
     bool hasPreemptedSession(audio_session_t session) const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 39d1140..1f9b535 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -182,6 +182,7 @@
      * Active ref count of the client will be incremented/decremented through setActive API
      */
     virtual void setClientActive(const sp<TrackClientDescriptor>& client, bool active);
+    bool isClientActive(const sp<TrackClientDescriptor>& client);
 
     bool isActive(uint32_t inPastMs) const;
     bool isActive(VolumeSource volumeSource = VOLUME_SOURCE_NONE,
@@ -260,7 +261,7 @@
                            const struct audio_port_config *srcConfig = NULL) const;
     virtual sp<AudioPort> getAudioPort() const { return mPolicyAudioPort->asAudioPort(); }
 
-    virtual void toAudioPort(struct audio_port *port) const;
+    virtual void toAudioPort(struct audio_port_v7 *port) const;
 
     audio_module_handle_t getModuleHandle() const;
 
@@ -337,6 +338,8 @@
     bool sharesHwModuleWith(const sp<SwAudioOutputDescriptor>& outputDesc);
     virtual DeviceVector supportedDevices() const;
     virtual bool devicesSupportEncodedFormats(const DeviceTypeSet& deviceTypes);
+    virtual bool containsSingleDeviceSupportingEncodedFormats(
+            const sp<DeviceDescriptor>& device) const;
     virtual uint32_t latency();
     virtual bool isDuplicated() const { return (mOutput1 != NULL && mOutput2 != NULL); }
     virtual bool isFixedVolume(const DeviceTypeSet& deviceTypes);
@@ -357,7 +360,7 @@
 
     virtual void toAudioPortConfig(struct audio_port_config *dstConfig,
                            const struct audio_port_config *srcConfig = NULL) const;
-    virtual void toAudioPort(struct audio_port *port) const;
+    virtual void toAudioPort(struct audio_port_v7 *port) const;
 
         status_t open(const audio_config_t *config,
                       const DeviceVector &devices,
@@ -395,6 +398,14 @@
     bool supportsAllDevices(const DeviceVector &devices) const;
 
     /**
+     * @brief supportsDevicesForPlayback
+     * @param devices to be checked against
+     * @return true if the devices is a supported combo for playback
+     *         false otherwise
+     */
+    bool supportsDevicesForPlayback(const DeviceVector &devices) const;
+
+    /**
      * @brief filterSupportedDevices takes a vector of devices and filters them according to the
      * device supported by this output (the profile from which this output derives from)
      * @param devices reference device vector to be filtered
@@ -411,6 +422,7 @@
     sp<SwAudioOutputDescriptor> mOutput2;    // used by duplicated outputs: second output
     uint32_t mDirectOpenCount; // number of clients using this output (direct outputs only)
     audio_session_t mDirectClientSession; // session id of the direct output client
+    bool mPendingReopenToQueryProfiles = false;
 };
 
 // Audio output driven by an input device directly.
@@ -431,7 +443,7 @@
 
     virtual void toAudioPortConfig(struct audio_port_config *dstConfig,
                            const struct audio_port_config *srcConfig = NULL) const;
-    virtual void toAudioPort(struct audio_port *port) const;
+    virtual void toAudioPort(struct audio_port_v7 *port) const;
 
     const sp<SourceClientDescriptor> mSource;
 
@@ -498,11 +510,6 @@
      */
     bool isA2dpOffloadedOnPrimary() const;
 
-    /**
-     * returns true if A2DP is supported (either via hardware offload or software encoding)
-     */
-    bool isA2dpSupported() const;
-
     sp<SwAudioOutputDescriptor> getOutputFromId(audio_port_handle_t id) const;
 
     sp<SwAudioOutputDescriptor> getPrimaryOutput() const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
index 395bc70..cf1f64c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
@@ -33,6 +33,15 @@
 
 namespace android {
 
+// This class gathers together various bits of AudioPolicyManager
+// configuration, which are usually filled out as a result of parsing
+// the audio_policy_configuration.xml file.
+//
+// Note that AudioPolicyConfig doesn't own some of the data,
+// it simply proxies access to the fields of AudioPolicyManager
+// class. Be careful about the fields that are references,
+// e.g. 'mOutputDevices'. This also means that it's impossible
+// to implement "deep copying" of this class without re-designing it.
 class AudioPolicyConfig
 {
 public:
@@ -40,14 +49,24 @@
                       DeviceVector &outputDevices,
                       DeviceVector &inputDevices,
                       sp<DeviceDescriptor> &defaultOutputDevice)
-        : mEngineLibraryNameSuffix(kDefaultEngineLibraryNameSuffix),
-          mHwModules(hwModules),
+        : mHwModules(hwModules),
           mOutputDevices(outputDevices),
           mInputDevices(inputDevices),
-          mDefaultOutputDevice(defaultOutputDevice),
-          mIsSpeakerDrcEnabled(false),
-          mIsCallScreenModeSupported(false)
-    {}
+          mDefaultOutputDevice(defaultOutputDevice) {
+        clear();
+    }
+
+    void clear() {
+        mSource = {};
+        mEngineLibraryNameSuffix = kDefaultEngineLibraryNameSuffix;
+        mHwModules.clear();
+        mOutputDevices.clear();
+        mInputDevices.clear();
+        mDefaultOutputDevice.clear();
+        mIsSpeakerDrcEnabled = false;
+        mIsCallScreenModeSupported = false;
+        mSurroundFormats.clear();
+    }
 
     const std::string& getSource() const {
         return mSource;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index b82305d..c2a20c6 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -77,6 +77,7 @@
 
     sp<DeviceDescriptor> getDeviceAndMixForInputSource(audio_source_t inputSource,
                                                        const DeviceVector &availableDeviceTypes,
+                                                       uid_t uid,
                                                        sp<AudioPolicyMix> *policyMix) const;
 
     /**
@@ -101,7 +102,7 @@
      *    An example of failure is when there are already rules in place to restrict
      *    a mix to the given uid (i.e. when a MATCH_UID rule was set for it).
      */
-    status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices);
+    status_t setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices);
     status_t removeUidDeviceAffinities(uid_t uid);
     status_t getDevicesForUid(uid_t uid, Vector<AudioDeviceTypeAddr>& devices) const;
 
@@ -115,7 +116,7 @@
      *    An example of failure is when there are already rules in place to restrict
      *    a mix to the given userId (i.e. when a MATCH_USERID rule was set for it).
      */
-    status_t setUserIdDeviceAffinities(int userId, const Vector<AudioDeviceTypeAddr>& devices);
+    status_t setUserIdDeviceAffinities(int userId, const AudioDeviceTypeAddrVector& devices);
     status_t removeUserIdDeviceAffinities(int userId);
     status_t getDevicesForUserId(int userId, Vector<AudioDeviceTypeAddr>& devices) const;
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index 923310c..74b3405 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -109,10 +109,16 @@
     const std::vector<wp<SwAudioOutputDescriptor>>& getSecondaryOutputs() const {
         return mSecondaryOutputs;
     };
+    void setSecondaryOutputs(std::vector<wp<SwAudioOutputDescriptor>>&& secondaryOutputs) {
+        mSecondaryOutputs = std::move(secondaryOutputs);
+    }
     VolumeSource volumeSource() const { return mVolumeSource; }
     const sp<AudioPolicyMix> getPrimaryMix() const {
         return mPrimaryMix.promote();
     };
+    bool hasLostPrimaryMix() const {
+        return mPrimaryMix.unsafe_get() && !mPrimaryMix.promote();
+    }
 
     void setActive(bool active) override
     {
@@ -140,7 +146,7 @@
     const product_strategy_t mStrategy;
     const VolumeSource mVolumeSource;
     const audio_output_flags_t mFlags;
-    const std::vector<wp<SwAudioOutputDescriptor>> mSecondaryOutputs;
+    std::vector<wp<SwAudioOutputDescriptor>> mSecondaryOutputs;
     const wp<AudioPolicyMix> mPrimaryMix;
     /**
      * required for duplicating thread, prevent from removing active client from an output
@@ -195,10 +201,18 @@
 
     ~SourceClientDescriptor() override = default;
 
+    void connect(audio_patch_handle_t patchHandle, const sp<DeviceDescriptor>& sinkDevice) {
+        mPatchHandle = patchHandle;
+        mSinkDevice = sinkDevice;
+    }
+    void disconnect() {
+        mPatchHandle = AUDIO_PATCH_HANDLE_NONE;
+        mSinkDevice = nullptr;
+    }
+    bool isConnected() const { return mPatchHandle != AUDIO_PATCH_HANDLE_NONE; }
     audio_patch_handle_t getPatchHandle() const { return mPatchHandle; }
-    void setPatchHandle(audio_patch_handle_t patchHandle) { mPatchHandle = patchHandle; }
-
     sp<DeviceDescriptor> srcDevice() const { return mSrcDevice; }
+    sp<DeviceDescriptor> sinkDevice() const { return mSinkDevice; }
     wp<SwAudioOutputDescriptor> swOutput() const { return mSwOutput; }
     void setSwOutput(const sp<SwAudioOutputDescriptor>& swOutput);
     wp<HwAudioOutputDescriptor> hwOutput() const { return mHwOutput; }
@@ -210,6 +224,7 @@
  private:
     audio_patch_handle_t mPatchHandle = AUDIO_PATCH_HANDLE_NONE;
     const sp<DeviceDescriptor> mSrcDevice;
+    sp<DeviceDescriptor> mSinkDevice;
     wp<SwAudioOutputDescriptor> mSwOutput;
     wp<HwAudioOutputDescriptor> mHwOutput;
 };
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index dd1499c..20b4044 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -65,6 +65,9 @@
 
     bool supportsFormat(audio_format_t format);
 
+    void setDynamic() { mIsDynamic = true; }
+    bool isDynamic() const { return mIsDynamic; }
+
     // PolicyAudioPortConfig
     virtual sp<PolicyAudioPort> getPolicyAudioPort() const {
         return static_cast<PolicyAudioPort*>(const_cast<DeviceDescriptor*>(this));
@@ -85,6 +88,7 @@
 
     // AudioPort
     virtual void toAudioPort(struct audio_port *port) const;
+    virtual void toAudioPort(struct audio_port_v7 *port) const;
 
     void importAudioPortAndPickAudioProfile(const sp<PolicyAudioPort>& policyPort,
                                             bool force = false);
@@ -94,9 +98,18 @@
     void dump(String8 *dst, int spaces, int index, bool verbose = true) const;
 
 private:
+    template <typename T, std::enable_if_t<std::is_same<T, struct audio_port>::value
+                                        || std::is_same<T, struct audio_port_v7>::value, int> = 0>
+    void toAudioPortInternal(T* port) const {
+        DeviceDescriptorBase::toAudioPort(port);
+        port->ext.device.hw_module = getModuleHandle();
+    }
+
     std::string mTagName; // Unique human readable identifier for a device port found in conf file.
     FormatVector        mEncodedFormats;
     audio_format_t      mCurrentEncodedFormat;
+    bool                mIsDynamic = false;
+    const std::string   mDeclaredAddress; // Original device address
 };
 
 class DeviceVector : public SortedVector<sp<DeviceDescriptor> >
@@ -146,6 +159,15 @@
     //     4) the combination of all devices is invalid for selection
     sp<DeviceDescriptor> getDeviceForOpening() const;
 
+    // Return the device descriptor that matches the given AudioDeviceTypeAddr
+    sp<DeviceDescriptor> getDeviceFromDeviceTypeAddr(
+            const AudioDeviceTypeAddr& deviceTypeAddr) const;
+
+    // Return the device vector that contains device descriptor whose AudioDeviceTypeAddr appears
+    // in the given AudioDeviceTypeAddrVector
+    DeviceVector getDevicesFromDeviceTypeAddrVec(
+            const AudioDeviceTypeAddrVector& deviceTypeAddrVector) const;
+
     // If there are devices with the given type and the devices to add is not empty,
     // remove all the devices with the given type and add all the devices to add.
     void replaceDevicesByType(audio_devices_t typeToRemove, const DeviceVector &devicesToAdd);
@@ -204,7 +226,9 @@
             add(devices);
             return size();
         }
-        return SortedVector::merge(devices);
+        ssize_t ret = SortedVector::merge(devices);
+        refreshTypes();
+        return ret;
     }
 
     /**
@@ -248,13 +272,21 @@
         return String8("");
     }
 
-    std::string toString() const;
+    const AudioProfileVector& getSupportedProfiles() { return mSupportedProfiles; }
+
+    // Return a string to describe the DeviceVector. The sensitive information will only be
+    // added to the string if `includeSensitiveInfo` is true.
+    std::string toString(bool includeSensitiveInfo = false) const;
 
     void dump(String8 *dst, const String8 &tag, int spaces = 0, bool verbose = true) const;
 
+protected:
+    int     do_compare(const void* lhs, const void* rhs) const;
 private:
     void refreshTypes();
+    void refreshAudioProfiles();
     DeviceTypeSet mDeviceTypes;
+    AudioProfileVector mSupportedProfiles;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
index c4eab30..59eee52 100644
--- a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
@@ -72,6 +72,9 @@
                      audio_io_handle_t dstOutput);
     void moveEffects(const std::vector<int>& ids, audio_io_handle_t dstOutput);
 
+    audio_io_handle_t getIoForSession(audio_session_t sessionId,
+                                      const effect_uuid_t *effectType = nullptr);
+
     void dump(String8 *dst, int spaces = 0, bool verbose = true) const;
 
 private:
diff --git a/services/audiopolicy/common/managerdefinitions/include/HwModule.h b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
index 23f0c9a..9ba745a 100644
--- a/services/audiopolicy/common/managerdefinitions/include/HwModule.h
+++ b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
@@ -52,8 +52,12 @@
         devices.merge(mDynamicDevices);
         return devices;
     }
+    std::string getTagForDevice(audio_devices_t device,
+                                const String8 &address = String8(),
+                                audio_format_t codec = AUDIO_FORMAT_DEFAULT);
     void addDynamicDevice(const sp<DeviceDescriptor> &device)
     {
+        device->setDynamic();
         mDynamicDevices.add(device);
     }
 
@@ -131,8 +135,17 @@
 public:
     sp<HwModule> getModuleFromName(const char *name) const;
 
+    /**
+     * @brief getModuleForDeviceType try to get a device from type / format on all modules
+     * @param device type to consider
+     * @param encodedFormat to consider
+     * @param[out] tagName if not null, if a matching device is found, will return the tagName
+     * of original device from XML file so that audio routes matchin rules work.
+     * @return valid module if considered device found, nullptr otherwise.
+     */
     sp<HwModule> getModuleForDeviceType(audio_devices_t device,
-                                        audio_format_t encodedFormat) const;
+                                        audio_format_t encodedFormat,
+                                        std::string *tagName = nullptr) const;
 
     sp<HwModule> getModuleForDevice(const sp<DeviceDescriptor> &device,
                                     audio_format_t encodedFormat) const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index 5f551d5..a74cefa 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -112,6 +112,19 @@
     }
 
     /**
+     * @brief getTag
+     * @param deviceTypes to be considered
+     * @return tagName of first matching device for the considered types, empty string otherwise.
+     */
+    std::string getTag(const DeviceTypeSet& deviceTypes) const
+    {
+        if (supportsDeviceTypes(deviceTypes)) {
+            return mSupportedDevices.getDevicesFromTypes(deviceTypes).itemAt(0)->getTagName();
+        }
+        return {};
+    }
+
+    /**
      * @brief supportsDevice
      * @param device to be checked against
      *        forceCheckOnAddress if true, check on type and address whatever the type, otherwise
@@ -131,7 +144,7 @@
     bool devicesSupportEncodedFormats(DeviceTypeSet deviceTypes) const
     {
         if (deviceTypes.empty()) {
-            return true; // required for isOffloadSupported() check
+            return true; // required for getOffloadSupport() check
         }
         DeviceVector deviceList =
             mSupportedDevices.getDevicesFromTypes(deviceTypes);
@@ -143,6 +156,8 @@
         return false;
     }
 
+    bool containsSingleDeviceSupportingEncodedFormats(const sp<DeviceDescriptor>& device) const;
+
     void clearSupportedDevices() { mSupportedDevices.clear(); }
     void addSupportedDevice(const sp<DeviceDescriptor> &device)
     {
@@ -150,6 +165,12 @@
     }
     void removeSupportedDevice(const sp<DeviceDescriptor> &device)
     {
+        ssize_t ret = mSupportedDevices.indexOf(device);
+        if (ret >= 0 && !mSupportedDevices.itemAt(ret)->isDynamic()) {
+            // devices equality checks only type, address, name and format
+            // Prevents from removing non dynamically added devices
+            return;
+        }
         mSupportedDevices.remove(device);
     }
     void setSupportedDevices(const DeviceVector &devices)
diff --git a/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h b/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
index d2f6297..ab33b38 100644
--- a/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
+++ b/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
@@ -42,6 +42,11 @@
 
     virtual const std::string getTagName() const = 0;
 
+    bool equals(const sp<PolicyAudioPort> &right) const
+    {
+        return right != 0 && getTagName() == right->getTagName();
+    }
+
     virtual sp<AudioPort> asAudioPort() const = 0;
 
     virtual void setFlags(uint32_t flags)
diff --git a/services/audiopolicy/common/managerdefinitions/include/Serializer.h b/services/audiopolicy/common/managerdefinitions/include/Serializer.h
index 48c4147..b70c595 100644
--- a/services/audiopolicy/common/managerdefinitions/include/Serializer.h
+++ b/services/audiopolicy/common/managerdefinitions/include/Serializer.h
@@ -21,5 +21,9 @@
 namespace android {
 
 status_t deserializeAudioPolicyFile(const char *fileName, AudioPolicyConfig *config);
+// In VTS mode all vendor extensions are ignored. This is done because
+// VTS tests are built using AOSP code and thus can not use vendor overlays
+// of system libraries.
+status_t deserializeAudioPolicyFileForVts(const char *fileName, AudioPolicyConfig *config);
 
 } // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
index b963121..7016a08 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
@@ -92,7 +92,7 @@
     dstConfig->ext.mix.usecase.source = source();
 }
 
-void AudioInputDescriptor::toAudioPort(struct audio_port *port) const
+void AudioInputDescriptor::toAudioPort(struct audio_port_v7 *port) const
 {
     ALOG_ASSERT(mProfile != 0, "toAudioPort() called on input with null profile %d", mIoHandle);
 
@@ -516,7 +516,7 @@
     dst->appendFormat(" Sampling rate: %d\n", mSamplingRate);
     dst->appendFormat(" Format: %d\n", mFormat);
     dst->appendFormat(" Channels: %08x\n", mChannelMask);
-    dst->appendFormat(" Devices %s\n", mDevice->toString().c_str());
+    dst->appendFormat(" Devices %s\n", mDevice->toString(true /*includeSensitiveInfo*/).c_str());
     mEnabledEffects.dump(dst, 1 /*spaces*/, false /*verbose*/);
     dst->append(" AudioRecord Clients:\n");
     ClientMapHandler<RecordClientDescriptor>::dump(dst);
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index d6d472b..6b08f7c 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -123,6 +123,12 @@
     client->setActive(active);
 }
 
+bool AudioOutputDescriptor::isClientActive(const sp<TrackClientDescriptor>& client)
+{
+    return client != nullptr &&
+            std::find(begin(mActiveClients), end(mActiveClients), client) != end(mActiveClients);
+}
+
 bool AudioOutputDescriptor::isActive(VolumeSource vs, uint32_t inPastMs, nsecs_t sysTime) const
 {
     return (vs == VOLUME_SOURCE_NONE) ?
@@ -209,7 +215,7 @@
     dstConfig->ext.mix.usecase.stream = AUDIO_STREAM_DEFAULT;
 }
 
-void AudioOutputDescriptor::toAudioPort(struct audio_port *port) const
+void AudioOutputDescriptor::toAudioPort(struct audio_port_v7 *port) const
 {
     // Should not be called for duplicated ports, see SwAudioOutputDescriptor::toAudioPortConfig.
     mPolicyAudioPort->asAudioPort()->toAudioPort(port);
@@ -245,7 +251,7 @@
     dst->appendFormat(" Sampling rate: %d\n", mSamplingRate);
     dst->appendFormat(" Format: %08x\n", mFormat);
     dst->appendFormat(" Channels: %08x\n", mChannelMask);
-    dst->appendFormat(" Devices: %s\n", devices().toString().c_str());
+    dst->appendFormat(" Devices: %s\n", devices().toString(true /*includeSensitiveInfo*/).c_str());
     dst->appendFormat(" Global active count: %u\n", mGlobalActiveCount);
     for (const auto &iter : mRoutingActivities) {
         dst->appendFormat(" Product Strategy id: %d", iter.first);
@@ -338,6 +344,13 @@
     return supportedDevices().containsAllDevices(devices);
 }
 
+bool SwAudioOutputDescriptor::supportsDevicesForPlayback(const DeviceVector &devices) const
+{
+    // No considering duplicated output
+    // TODO: need to verify if the profile supports the devices combo for playback.
+    return !isDuplicated() && supportsAllDevices(devices);
+}
+
 DeviceVector SwAudioOutputDescriptor::filterSupportedDevices(const DeviceVector &devices) const
 {
     DeviceVector filteredDevices = supportedDevices();
@@ -354,6 +367,16 @@
     }
 }
 
+bool SwAudioOutputDescriptor::containsSingleDeviceSupportingEncodedFormats(
+        const sp<DeviceDescriptor>& device) const
+{
+    if (isDuplicated()) {
+        return (mOutput1->containsSingleDeviceSupportingEncodedFormats(device) &&
+                mOutput2->containsSingleDeviceSupportingEncodedFormats(device));
+    }
+    return mProfile->containsSingleDeviceSupportingEncodedFormats(device);
+}
+
 uint32_t SwAudioOutputDescriptor::latency()
 {
     if (isDuplicated()) {
@@ -400,8 +423,7 @@
     dstConfig->ext.mix.handle = mIoHandle;
 }
 
-void SwAudioOutputDescriptor::toAudioPort(
-                                                    struct audio_port *port) const
+void SwAudioOutputDescriptor::toAudioPort(struct audio_port_v7 *port) const
 {
     ALOG_ASSERT(!isDuplicated(), "toAudioPort() called on duplicated output %d", mIoHandle);
 
@@ -505,6 +527,9 @@
         lConfig.offload_info.duration_us = -1;
         lConfig.offload_info.has_video = true; // conservative
         lConfig.offload_info.is_streaming = true; // likely
+        lConfig.offload_info.encapsulation_mode = lConfig.offload_info.encapsulation_mode;
+        lConfig.offload_info.content_id = lConfig.offload_info.content_id;
+        lConfig.offload_info.sync_id = lConfig.offload_info.sync_id;
     }
 
     mFlags = (audio_output_flags_t)(mFlags | flags);
@@ -648,8 +673,7 @@
     mSource->srcDevice()->toAudioPortConfig(dstConfig, srcConfig);
 }
 
-void HwAudioOutputDescriptor::toAudioPort(
-                                                    struct audio_port *port) const
+void HwAudioOutputDescriptor::toAudioPort(struct audio_port_v7 *port) const
 {
     mSource->srcDevice()->toAudioPort(port);
 }
@@ -764,11 +788,6 @@
     return false;
 }
 
-bool SwAudioOutputCollection::isA2dpSupported() const
-{
-    return (isA2dpOffloadedOnPrimary() || (getA2dpOutput() != 0));
-}
-
 sp<SwAudioOutputDescriptor> SwAudioOutputCollection::getPrimaryOutput() const
 {
     for (size_t i = 0; i < size(); i++) {
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index b6de4be..b209a88 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -208,24 +208,25 @@
         // Loopback render mixes are created from a public API and thus restricted
         // to non sensible audio that have not opted out.
         if (is_mix_loopback_render(mix->mRouteFlags)) {
-            auto hasFlag = [](auto flags, auto flag) { return (flags & flag) == flag; };
-            if (hasFlag(attributes.flags, AUDIO_FLAG_NO_SYSTEM_CAPTURE)) {
-                return MixMatchStatus::NO_MATCH;
-            }
-            if (!mix->mAllowPrivilegedPlaybackCapture &&
-                hasFlag(attributes.flags, AUDIO_FLAG_NO_MEDIA_PROJECTION)) {
-                return MixMatchStatus::NO_MATCH;
-            }
-            if (attributes.usage == AUDIO_USAGE_VOICE_COMMUNICATION &&
-                !mix->mVoiceCommunicationCaptureAllowed) {
-                return MixMatchStatus::NO_MATCH;
-            }
             if (!(attributes.usage == AUDIO_USAGE_UNKNOWN ||
                   attributes.usage == AUDIO_USAGE_MEDIA ||
                   attributes.usage == AUDIO_USAGE_GAME ||
                   attributes.usage == AUDIO_USAGE_VOICE_COMMUNICATION)) {
                 return MixMatchStatus::NO_MATCH;
             }
+            auto hasFlag = [](auto flags, auto flag) { return (flags & flag) == flag; };
+            if (hasFlag(attributes.flags, AUDIO_FLAG_NO_SYSTEM_CAPTURE)) {
+                return MixMatchStatus::NO_MATCH;
+            }
+
+            if (attributes.usage == AUDIO_USAGE_VOICE_COMMUNICATION) {
+                if (!mix->mVoiceCommunicationCaptureAllowed) {
+                    return MixMatchStatus::NO_MATCH;
+                }
+            } else if (!mix->mAllowPrivilegedMediaPlaybackCapture &&
+                hasFlag(attributes.flags, AUDIO_FLAG_NO_MEDIA_PROJECTION)) {
+                return MixMatchStatus::NO_MATCH;
+            }
         }
 
         int userId = (int) multiuser_get_user_id(uid);
@@ -390,6 +391,7 @@
 sp<DeviceDescriptor> AudioPolicyMixCollection::getDeviceAndMixForInputSource(
         audio_source_t inputSource,
         const DeviceVector &availDevices,
+        uid_t uid,
         sp<AudioPolicyMix> *policyMix) const
 {
     for (size_t i = 0; i < size(); i++) {
@@ -401,7 +403,11 @@
             if ((RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET == mix->mCriteria[j].mRule &&
                     mix->mCriteria[j].mValue.mSource == inputSource) ||
                (RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET == mix->mCriteria[j].mRule &&
-                    mix->mCriteria[j].mValue.mSource != inputSource)) {
+                    mix->mCriteria[j].mValue.mSource != inputSource) ||
+               (RULE_MATCH_UID == mix->mCriteria[j].mRule &&
+                    mix->mCriteria[j].mValue.mUid == uid) ||
+               (RULE_EXCLUDE_UID == mix->mCriteria[j].mRule &&
+                    mix->mCriteria[j].mValue.mUid != uid)) {
                 // assuming PolicyMix only for remote submix for input
                 // so mix->mDeviceType can only be AUDIO_DEVICE_OUT_REMOTE_SUBMIX
                 audio_devices_t device = AUDIO_DEVICE_IN_REMOTE_SUBMIX;
@@ -463,7 +469,7 @@
 }
 
 status_t AudioPolicyMixCollection::setUidDeviceAffinities(uid_t uid,
-        const Vector<AudioDeviceTypeAddr>& devices) {
+        const AudioDeviceTypeAddrVector& devices) {
     // verify feasibility: for each player mix: if it already contains a
     //    "match uid" rule for this uid, return an error
     //    (adding a uid-device affinity would result in contradictory rules)
@@ -565,7 +571,7 @@
 }
 
 status_t AudioPolicyMixCollection::setUserIdDeviceAffinities(int userId,
-        const Vector<AudioDeviceTypeAddr>& devices) {
+        const AudioDeviceTypeAddrVector& devices) {
     // verify feasibility: for each player mix: if it already contains a
     //    "match userId" rule for this userId, return an error
     //    (adding a userId-device affinity would result in contradictory rules)
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp
index 2a18f19..866417e 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp
@@ -39,12 +39,12 @@
 bool AudioRoute::supportsPatch(const sp<PolicyAudioPort> &srcPort,
                                const sp<PolicyAudioPort> &dstPort) const
 {
-    if (mSink == 0 || dstPort == 0 || dstPort != mSink) {
+    if (mSink == 0 || srcPort == 0 || dstPort == 0 || !dstPort->equals(mSink)) {
         return false;
     }
     ALOGV("%s: sinks %s matching", __FUNCTION__, mSink->getTagName().c_str());
     for (const auto &sourcePort : mSources) {
-        if (sourcePort == srcPort) {
+        if (sourcePort->equals(srcPort)) {
             ALOGV("%s: sources %s matching", __FUNCTION__, sourcePort->getTagName().c_str());
             return true;
         }
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index a29e60e..a92d31e 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -49,10 +49,13 @@
 {
 }
 
+// Let DeviceDescriptorBase initialize the address since it handles specific cases like
+// legacy remote submix where "0" is added as default address.
 DeviceDescriptor::DeviceDescriptor(const AudioDeviceTypeAddr &deviceTypeAddr,
                                    const std::string &tagName,
                                    const FormatVector &encodedFormats) :
-        DeviceDescriptorBase(deviceTypeAddr), mTagName(tagName), mEncodedFormats(encodedFormats)
+        DeviceDescriptorBase(deviceTypeAddr), mTagName(tagName), mEncodedFormats(encodedFormats),
+        mDeclaredAddress(DeviceDescriptorBase::address())
 {
     mCurrentEncodedFormat = AUDIO_FORMAT_DEFAULT;
     /* If framework runs against a pre 5.0 Audio HAL, encoded formats are absent from the config.
@@ -75,6 +78,10 @@
 void DeviceDescriptor::detach() {
     mId = AUDIO_PORT_HANDLE_NONE;
     PolicyAudioPort::detach();
+    // The device address may have been overwritten on device connection
+    setAddress(mDeclaredAddress);
+    // Device Port does not have a name unless provided by setDeviceConnectionState
+    setName("");
 }
 
 template<typename T>
@@ -155,8 +162,12 @@
 void DeviceDescriptor::toAudioPort(struct audio_port *port) const
 {
     ALOGV("DeviceDescriptor::toAudioPort() handle %d type %08x", mId, mDeviceTypeAddr.mType);
-    DeviceDescriptorBase::toAudioPort(port);
-    port->ext.device.hw_module = getModuleHandle();
+    toAudioPortInternal(port);
+}
+
+void DeviceDescriptor::toAudioPort(struct audio_port_v7 *port) const {
+    ALOGV("DeviceDescriptor::toAudioPort() v7 handle %d type %08x", mId, mDeviceTypeAddr.mType);
+    toAudioPortInternal(port);
 }
 
 void DeviceDescriptor::importAudioPortAndPickAudioProfile(
@@ -213,6 +224,18 @@
     ALOGV("DeviceVector::refreshTypes() mDeviceTypes %s", dumpDeviceTypes(mDeviceTypes).c_str());
 }
 
+void DeviceVector::refreshAudioProfiles() {
+    if (empty()) {
+        mSupportedProfiles.clear();
+        return;
+    }
+    mSupportedProfiles = itemAt(0)->getAudioProfiles();
+    for (size_t i = 1; i < size(); ++i) {
+        mSupportedProfiles = intersectAudioProfiles(
+                mSupportedProfiles, itemAt(i)->getAudioProfiles());
+    }
+}
+
 ssize_t DeviceVector::indexOf(const sp<DeviceDescriptor>& item) const
 {
     for (size_t i = 0; i < size(); i++) {
@@ -227,23 +250,27 @@
 {
     bool added = false;
     for (const auto& device : devices) {
+        ALOG_ASSERT(device != nullptr, "Null pointer found when adding DeviceVector");
         if (indexOf(device) < 0 && SortedVector::add(device) >= 0) {
             added = true;
         }
     }
     if (added) {
         refreshTypes();
+        refreshAudioProfiles();
     }
 }
 
 ssize_t DeviceVector::add(const sp<DeviceDescriptor>& item)
 {
+    ALOG_ASSERT(item != nullptr, "Adding null pointer to DeviceVector");
     ssize_t ret = indexOf(item);
 
     if (ret < 0) {
         ret = SortedVector::add(item);
         if (ret >= 0) {
             refreshTypes();
+            refreshAudioProfiles();
         }
     } else {
         ALOGW("DeviceVector::add device %08x already in", item->type());
@@ -252,6 +279,23 @@
     return ret;
 }
 
+int DeviceVector::do_compare(const void* lhs, const void* rhs) const {
+    const auto ldevice = *reinterpret_cast<const sp<DeviceDescriptor>*>(lhs);
+    const auto rdevice = *reinterpret_cast<const sp<DeviceDescriptor>*>(rhs);
+    int ret = 0;
+
+    // sort by type.
+    ret = compare_type(ldevice->type(), rdevice->type());
+    if (ret != 0)
+        return ret;
+    // for same type higher priority for latest device.
+    ret = compare_type(rdevice->getId(), ldevice->getId());
+    if (ret != 0)
+        return ret;
+    // fallback to default sort using pointer address
+    return SortedVector::do_compare(lhs, rhs);
+}
+
 ssize_t DeviceVector::remove(const sp<DeviceDescriptor>& item)
 {
     ssize_t ret = indexOf(item);
@@ -262,6 +306,7 @@
         ret = SortedVector::removeAt(ret);
         if (ret >= 0) {
             refreshTypes();
+            refreshAudioProfiles();
         }
     }
     return ret;
@@ -375,7 +420,7 @@
     if (isEmpty()) {
         // Return nullptr if this collection is empty.
         return nullptr;
-    } else if (areAllOfSameDeviceType(types(), audio_is_input_device)) {
+    } else if (areAllOfSameDeviceType(types(), audio_call_is_input_device)) {
         // For input case, return the first one when there is only one device.
         return size() > 1 ? nullptr : *begin();
     } else if (areAllOfSameDeviceType(types(), audio_is_output_device)) {
@@ -388,6 +433,24 @@
     return nullptr;
 }
 
+sp<DeviceDescriptor> DeviceVector::getDeviceFromDeviceTypeAddr(
+            const AudioDeviceTypeAddr& deviceTypeAddr) const {
+    return getDevice(deviceTypeAddr.mType, String8(deviceTypeAddr.getAddress()),
+            AUDIO_FORMAT_DEFAULT);
+}
+
+DeviceVector DeviceVector::getDevicesFromDeviceTypeAddrVec(
+        const AudioDeviceTypeAddrVector& deviceTypeAddrVector) const {
+    DeviceVector devices;
+    for (const auto& deviceTypeAddr : deviceTypeAddrVector) {
+        sp<DeviceDescriptor> device = getDeviceFromDeviceTypeAddr(deviceTypeAddr);
+        if (device != nullptr) {
+            devices.add(device);
+        }
+    }
+    return devices;
+}
+
 void DeviceVector::replaceDevicesByType(
         audio_devices_t typeToRemove, const DeviceVector &devicesToAdd) {
     DeviceVector devicesToRemove = getDevicesFromType(typeToRemove);
@@ -408,7 +471,7 @@
     }
 }
 
-std::string DeviceVector::toString() const
+std::string DeviceVector::toString(bool includeSensitiveInfo) const
 {
     if (isEmpty()) {
         return {"AUDIO_DEVICE_NONE"};
@@ -418,7 +481,7 @@
         if (device != *begin()) {
            result += ";";
         }
-        result += device->toString();
+        result += device->toString(includeSensitiveInfo);
     }
     return result + "}";
 }
diff --git a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
index 415962a..843f5da 100644
--- a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
@@ -202,6 +202,19 @@
     }
 }
 
+audio_io_handle_t EffectDescriptorCollection::getIoForSession(audio_session_t sessionId,
+                                                              const effect_uuid_t *effectType)
+{
+    for (size_t i = 0; i < size(); ++i) {
+        sp<EffectDescriptor> effect = valueAt(i);
+        if (effect->mSession == sessionId && (effectType == nullptr ||
+                memcmp(&effect->mDesc.type, effectType, sizeof(effect_uuid_t)) == 0)) {
+            return effect->mIo;
+        }
+    }
+    return AUDIO_IO_HANDLE_NONE;
+}
+
 EffectDescriptorCollection EffectDescriptorCollection::getEffectsForIo(audio_io_handle_t io) const
 {
     EffectDescriptorCollection effects;
diff --git a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
index d31e443..3a143b0 100644
--- a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
@@ -41,6 +41,14 @@
     }
 }
 
+std::string HwModule::getTagForDevice(audio_devices_t device, const String8 &address,
+                                          audio_format_t codec)
+{
+    DeviceVector declaredDevices = getDeclaredDevices();
+    sp<DeviceDescriptor> deviceDesc = declaredDevices.getDevice(device, address, codec);
+    return deviceDesc ? deviceDesc->getTagName() : std::string{};
+}
+
 status_t HwModule::addOutputProfile(const std::string& name, const audio_config_t *config,
                                     audio_devices_t device, const String8& address)
 {
@@ -49,7 +57,8 @@
     profile->addAudioProfile(new AudioProfile(config->format, config->channel_mask,
                                               config->sample_rate));
 
-    sp<DeviceDescriptor> devDesc = new DeviceDescriptor(device, "" /*tagName*/, address.string());
+    sp<DeviceDescriptor> devDesc =
+            new DeviceDescriptor(device, getTagForDevice(device), address.string());
     addDynamicDevice(devDesc);
     // Reciprocally attach the device to the module
     devDesc->attach(this);
@@ -116,7 +125,8 @@
     profile->addAudioProfile(new AudioProfile(config->format, config->channel_mask,
                                               config->sample_rate));
 
-    sp<DeviceDescriptor> devDesc = new DeviceDescriptor(device, "" /*tagName*/, address.string());
+    sp<DeviceDescriptor> devDesc =
+            new DeviceDescriptor(device, getTagForDevice(device), address.string());
     addDynamicDevice(devDesc);
     // Reciprocally attach the device to the module
     devDesc->attach(this);
@@ -271,8 +281,9 @@
     return nullptr;
 }
 
-sp <HwModule> HwModuleCollection::getModuleForDeviceType(audio_devices_t type,
-                                                         audio_format_t encodedFormat) const
+sp<HwModule> HwModuleCollection::getModuleForDeviceType(audio_devices_t type,
+                                                        audio_format_t encodedFormat,
+                                                        std::string *tagName) const
 {
     for (const auto& module : *this) {
         const auto& profiles = audio_is_output_device(type) ?
@@ -284,9 +295,15 @@
                     sp <DeviceDescriptor> deviceDesc =
                             declaredDevices.getDevice(type, String8(), encodedFormat);
                     if (deviceDesc) {
+                        if (tagName != nullptr) {
+                            *tagName = deviceDesc->getTagName();
+                        }
                         return module;
                     }
                 } else {
+                    if (tagName != nullptr) {
+                        *tagName = profile->getTag({type});
+                    }
                     return module;
                 }
             }
@@ -325,15 +342,32 @@
     }
 
     for (const auto& hwModule : *this) {
+        if (!allowToCreate) {
+            auto dynamicDevices = hwModule->getDynamicDevices();
+            auto dynamicDevice = dynamicDevices.getDevice(deviceType, devAddress, encodedFormat);
+            if (dynamicDevice) {
+                return dynamicDevice;
+            }
+        }
         DeviceVector moduleDevices = hwModule->getAllDevices();
         auto moduleDevice = moduleDevices.getDevice(deviceType, devAddress, encodedFormat);
+
+        // Prevent overwritting moduleDevice address if connected device does not have the same
+        // address (since getDevice with empty address ignores match on address), use dynamic device
+        if (moduleDevice && allowToCreate &&
+                (!moduleDevice->address().empty() &&
+                 (moduleDevice->address().compare(devAddress.c_str()) != 0))) {
+            break;
+        }
         if (moduleDevice) {
             if (encodedFormat != AUDIO_FORMAT_DEFAULT) {
                 moduleDevice->setEncodedFormat(encodedFormat);
             }
             if (allowToCreate) {
                 moduleDevice->attach(hwModule);
+                // Name may be overwritten, restored on detach.
                 moduleDevice->setAddress(devAddress.string());
+                // Name may be overwritten, restored on detach.
                 moduleDevice->setName(name);
             }
             return moduleDevice;
@@ -352,18 +386,19 @@
                                                       const char *name,
                                                       const audio_format_t encodedFormat) const
 {
-    sp<HwModule> hwModule = getModuleForDeviceType(type, encodedFormat);
+    std::string tagName = {};
+    sp<HwModule> hwModule = getModuleForDeviceType(type, encodedFormat, &tagName);
     if (hwModule == 0) {
         ALOGE("%s: could not find HW module for device %04x address %s", __FUNCTION__, type,
               address);
         return nullptr;
     }
 
-    sp<DeviceDescriptor> device = new DeviceDescriptor(type, name, address);
+    sp<DeviceDescriptor> device = new DeviceDescriptor(type, tagName, address);
     device->setName(name);
     device->setEncodedFormat(encodedFormat);
-
-  // Add the device to the list of dynamic devices
+    device->setDynamic();
+    // Add the device to the list of dynamic devices
     hwModule->addDynamicDevice(device);
     // Reciprocally attach the device to the module
     device->attach(hwModule);
@@ -375,7 +410,7 @@
     for (const auto &profile : profiles) {
         // Add the device as supported to all profile supporting "weakly" or not the device
         // according to its type
-        if (profile->supportsDevice(device, false /*matchAdress*/)) {
+        if (profile->supportsDevice(device, false /*matchAddress*/)) {
 
             // @todo quid of audio profile? import the profile from device of the same type?
             const auto &isoTypeDeviceForProfile =
@@ -406,10 +441,9 @@
 
         device->detach();
         // Only remove from dynamic list, not from declared list!!!
-        if (!hwModule->getDynamicDevices().contains(device)) {
+        if (!hwModule->removeDynamicDevice(device)) {
             return;
         }
-        hwModule->removeDynamicDevice(device);
         ALOGV("%s: removed dynamic device %s from module %s", __FUNCTION__,
               device->toString().c_str(), hwModule->getName());
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index bf1a0f7..09b614d 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -17,7 +17,7 @@
 #define LOG_TAG "APM::IOProfile"
 //#define LOG_NDEBUG 0
 
-#include <system/audio-base.h>
+#include <system/audio.h>
 #include "IOProfile.h"
 #include "HwModule.h"
 #include "TypeConverter.h"
@@ -105,6 +105,17 @@
     return true;
 }
 
+bool IOProfile::containsSingleDeviceSupportingEncodedFormats(
+        const sp<DeviceDescriptor>& device) const {
+    if (device == nullptr) {
+        return false;
+    }
+    DeviceVector deviceList = mSupportedDevices.getDevicesFromType(device->type());
+    return std::count_if(deviceList.begin(), deviceList.end(),
+            [&device](sp<DeviceDescriptor> deviceDesc) {
+                return device == deviceDesc && deviceDesc->hasCurrentEncodedFormat(); }) == 1;
+}
+
 void IOProfile::dump(String8 *dst) const
 {
     std::string portStr;
@@ -112,12 +123,11 @@
     dst->append(portStr.c_str());
 
     dst->appendFormat("    - flags: 0x%04x", getFlags());
-    std::string flagsLiteral;
-    if (getRole() == AUDIO_PORT_ROLE_SINK) {
-        InputFlagConverter::maskToString(getFlags(), flagsLiteral);
-    } else if (getRole() == AUDIO_PORT_ROLE_SOURCE) {
-        OutputFlagConverter::maskToString(getFlags(), flagsLiteral);
-    }
+    std::string flagsLiteral =
+            getRole() == AUDIO_PORT_ROLE_SINK ?
+            toString(static_cast<audio_input_flags_t>(getFlags())) :
+            getRole() == AUDIO_PORT_ROLE_SOURCE ?
+            toString(static_cast<audio_output_flags_t>(getFlags())) : "";
     if (!flagsLiteral.empty()) {
         dst->appendFormat(" (%s)", flagsLiteral.c_str());
     }
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index 883e713..84ed656 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -20,8 +20,8 @@
 #include <memory>
 #include <string>
 #include <utility>
+#include <variant>
 
-#include <hidl/Status.h>
 #include <libxml/parser.h>
 #include <libxml/xinclude.h>
 #include <media/convert.h>
@@ -36,11 +36,14 @@
 
 namespace {
 
-// TODO(mnaganov): Consider finding an alternative for using HIDL code.
-using hardware::Return;
-using hardware::Status;
 using utilities::convertTo;
 
+static inline bool maybeVendorExtension(const std::string& s) {
+    // Only checks whether the string starts with the "vendor prefix".
+    static const std::string vendorPrefix = "VX_";
+    return s.size() > vendorPrefix.size() && s.substr(0, vendorPrefix.size()) == vendorPrefix;
+}
+
 template<typename E, typename C>
 struct AndroidCollectionTraits {
     typedef sp<E> Element;
@@ -89,7 +92,6 @@
 
     };
 
-    static Return<Element> deserialize(const xmlNode *cur, PtrSerializingCtx serializingContext);
     // No children
 };
 
@@ -106,8 +108,6 @@
         static constexpr const char *format = "format";
         static constexpr const char *channelMasks = "channelMasks";
     };
-
-    static Return<Element> deserialize(const xmlNode *cur, PtrSerializingCtx serializingContext);
 };
 
 struct MixPortTraits : public AndroidCollectionTraits<IOProfile, IOProfileCollection>
@@ -125,7 +125,6 @@
         static constexpr const char *maxActiveCount = "maxActiveCount";
     };
 
-    static Return<Element> deserialize(const xmlNode *cur, PtrSerializingCtx serializingContext);
     // Children: GainTraits
 };
 
@@ -147,7 +146,6 @@
         static constexpr const char *encodedFormats = "encodedFormats";
     };
 
-    static Return<Element> deserialize(const xmlNode *cur, PtrSerializingCtx serializingContext);
     // Children: GainTraits (optional)
 };
 
@@ -166,8 +164,6 @@
     };
 
     typedef HwModule *PtrSerializingCtx;
-
-    static Return<Element> deserialize(const xmlNode *cur, PtrSerializingCtx serializingContext);
 };
 
 struct ModuleTraits : public AndroidCollectionTraits<HwModule, HwModuleCollection>
@@ -187,13 +183,14 @@
 
     typedef AudioPolicyConfig *PtrSerializingCtx;
 
-    static Return<Element> deserialize(const xmlNode *cur, PtrSerializingCtx serializingContext);
     // Children: mixPortTraits, devicePortTraits, and routeTraits
     // Need to call deserialize on each child
 };
 
 struct GlobalConfigTraits
 {
+    typedef std::monostate Element;
+
     static constexpr const char *tag = "globalConfiguration";
 
     struct Attributes
@@ -203,14 +200,16 @@
         static constexpr const char *engineLibrarySuffix = "engine_library";
     };
 
-    static status_t deserialize(const xmlNode *root, AudioPolicyConfig *config);
+    typedef AudioPolicyConfig *PtrSerializingCtx;
 };
 
 struct SurroundSoundTraits
 {
+    typedef std::monostate Element;
+
     static constexpr const char *tag = "surroundSound";
 
-    static status_t deserialize(const xmlNode *root, AudioPolicyConfig *config);
+    typedef AudioPolicyConfig *PtrSerializingCtx;
     // Children: SurroundSoundFormatTraits
 };
 
@@ -224,39 +223,54 @@
         static constexpr const char *name = "name";
         static constexpr const char *subformats = "subformats";
     };
-
-    static Return<Element> deserialize(const xmlNode *cur, PtrSerializingCtx serializingContext);
 };
 
 class PolicySerializer
 {
 public:
-    PolicySerializer() : mVersion{std::to_string(gMajor) + "." + std::to_string(gMinor)}
-    {
-        ALOGV("%s: Version=%s Root=%s", __func__, mVersion.c_str(), rootName);
-    }
-    status_t deserialize(const char *configFile, AudioPolicyConfig *config);
+    status_t deserialize(const char *configFile, AudioPolicyConfig *config,
+            bool ignoreVendorExtensions = false);
+
+    template <class Trait>
+    status_t deserializeCollection(const xmlNode *cur,
+            typename Trait::Collection *collection,
+            typename Trait::PtrSerializingCtx serializingContext);
+    template <class Trait>
+    std::variant<status_t, typename Trait::Element> deserialize(const xmlNode *cur,
+            typename Trait::PtrSerializingCtx serializingContext);
 
 private:
     static constexpr const char *rootName = "audioPolicyConfiguration";
     static constexpr const char *versionAttribute = "version";
-    static constexpr uint32_t gMajor = 1; /**< the major number of the policy xml format version. */
-    static constexpr uint32_t gMinor = 0; /**< the minor number of the policy xml format version. */
 
     typedef AudioPolicyConfig Element;
 
-    const std::string mVersion;
+    bool mIgnoreVendorExtensions = false;
+    std::string mChannelMasksSeparator = ",";
+    std::string mSamplingRatesSeparator = ",";
+    std::string mFlagsSeparator = "|";
 
     // Children: ModulesTraits, VolumeTraits, SurroundSoundTraits (optional)
 };
 
+// Deleter using free() for use with std::unique_ptr<>. See also UniqueCPtr<> below.
+struct FreeDelete {
+    // NOTE: Deleting a const object is valid but free() takes a non-const pointer.
+    void operator()(const void* ptr) const {
+        free(const_cast<void*>(ptr));
+    }
+};
+
+// Alias for std::unique_ptr<> that uses the C function free() to delete objects.
+template <typename T>
+using UniqueCPtr = std::unique_ptr<T, FreeDelete>;
+
 template <class T>
 constexpr void (*xmlDeleter)(T* t);
 template <>
 constexpr auto xmlDeleter<xmlDoc> = xmlFreeDoc;
-// http://b/111067277 - Add back constexpr when we switch to C++17.
 template <>
-auto xmlDeleter<xmlChar> = [](xmlChar *s) { xmlFree(s); };
+constexpr auto xmlDeleter<xmlChar> = [](xmlChar *s) { xmlFree(s); };
 
 /** @return a unique_ptr with the correct deleter for the libxml2 object. */
 template <class T>
@@ -296,7 +310,7 @@
 }
 
 template <class Trait>
-status_t deserializeCollection(const xmlNode *cur,
+status_t PolicySerializer::deserializeCollection(const xmlNode *cur,
         typename Trait::Collection *collection,
         typename Trait::PtrSerializingCtx serializingContext)
 {
@@ -309,14 +323,17 @@
         }
         for (; child != NULL; child = child->next) {
             if (!xmlStrcmp(child->name, reinterpret_cast<const xmlChar*>(Trait::tag))) {
-                auto element = Trait::deserialize(child, serializingContext);
-                if (element.isOk()) {
-                    status_t status = Trait::addElementToCollection(element, collection);
+                auto maybeElement = deserialize<Trait>(child, serializingContext);
+                if (maybeElement.index() == 1) {
+                    status_t status = Trait::addElementToCollection(
+                            std::get<1>(maybeElement), collection);
                     if (status != NO_ERROR) {
                         ALOGE("%s: could not add element to %s collection", __func__,
                             Trait::collectionTag);
                         return status;
                     }
+                } else if (mIgnoreVendorExtensions && std::get<status_t>(maybeElement) == NO_INIT) {
+                    // Skip a vendor extension element.
                 } else {
                     return BAD_VALUE;
                 }
@@ -329,15 +346,18 @@
     return NO_ERROR;
 }
 
-Return<AudioGainTraits::Element> AudioGainTraits::deserialize(const xmlNode *cur,
-        PtrSerializingCtx /*serializingContext*/)
+template<>
+std::variant<status_t, AudioGainTraits::Element> PolicySerializer::deserialize<AudioGainTraits>(
+        const xmlNode *cur, AudioGainTraits::PtrSerializingCtx /*serializingContext*/)
 {
+    using Attributes = AudioGainTraits::Attributes;
+
     static uint32_t index = 0;
-    Element gain = new AudioGain(index++, true);
+    AudioGainTraits::Element gain = new AudioGain(index++, true);
 
     std::string mode = getXmlAttribute(cur, Attributes::mode);
     if (!mode.empty()) {
-        gain->setMode(GainModeConverter::maskFromString(mode));
+        gain->setMode(GainModeConverter::maskFromString(mode, " "));
     }
 
     std::string channelsLiteral = getXmlAttribute(cur, Attributes::channelMask);
@@ -392,20 +412,28 @@
     if (gain->getMode() != 0) {
         return gain;
     } else {
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
 }
 
-Return<AudioProfileTraits::Element> AudioProfileTraits::deserialize(const xmlNode *cur,
-        PtrSerializingCtx /*serializingContext*/)
+template<>
+std::variant<status_t, AudioProfileTraits::Element>
+PolicySerializer::deserialize<AudioProfileTraits>(
+        const xmlNode *cur, AudioProfileTraits::PtrSerializingCtx /*serializingContext*/)
 {
+    using Attributes = AudioProfileTraits::Attributes;
+
     std::string samplingRates = getXmlAttribute(cur, Attributes::samplingRates);
     std::string format = getXmlAttribute(cur, Attributes::format);
     std::string channels = getXmlAttribute(cur, Attributes::channelMasks);
 
-    Element profile = new AudioProfile(formatFromString(format, gDynamicFormat),
-            channelMasksFromString(channels, ","),
-            samplingRatesFromString(samplingRates, ","));
+    if (mIgnoreVendorExtensions && maybeVendorExtension(format)) {
+        ALOGI("%s: vendor extension format \"%s\" skipped", __func__, format.c_str());
+        return NO_INIT;
+    }
+    AudioProfileTraits::Element profile = new AudioProfile(formatFromString(format, gDynamicFormat),
+            channelMasksFromString(channels, mChannelMasksSeparator.c_str()),
+            samplingRatesFromString(samplingRates, mSamplingRatesSeparator.c_str()));
 
     profile->setDynamicFormat(profile->getFormat() == gDynamicFormat);
     profile->setDynamicChannels(profile->getChannels().empty());
@@ -414,30 +442,33 @@
     return profile;
 }
 
-Return<MixPortTraits::Element> MixPortTraits::deserialize(const xmlNode *child,
-        PtrSerializingCtx /*serializingContext*/)
+template<>
+std::variant<status_t, MixPortTraits::Element> PolicySerializer::deserialize<MixPortTraits>(
+        const xmlNode *child, MixPortTraits::PtrSerializingCtx /*serializingContext*/)
 {
+    using Attributes = MixPortTraits::Attributes;
+
     std::string name = getXmlAttribute(child, Attributes::name);
     if (name.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::name);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
-    ALOGV("%s: %s %s=%s", __func__, tag, Attributes::name, name.c_str());
+    ALOGV("%s: %s %s=%s", __func__, MixPortTraits::tag, Attributes::name, name.c_str());
     std::string role = getXmlAttribute(child, Attributes::role);
     if (role.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::role);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     ALOGV("%s: Role=%s", __func__, role.c_str());
     audio_port_role_t portRole = (role == Attributes::roleSource) ?
             AUDIO_PORT_ROLE_SOURCE : AUDIO_PORT_ROLE_SINK;
 
-    Element mixPort = new IOProfile(name, portRole);
+    MixPortTraits::Element mixPort = new IOProfile(name, portRole);
 
     AudioProfileTraits::Collection profiles;
     status_t status = deserializeCollection<AudioProfileTraits>(child, &profiles, NULL);
     if (status != NO_ERROR) {
-        return Status::fromStatusT(status);
+        return status;
     }
     if (profiles.empty()) {
         profiles.add(AudioProfile::createFullDynamic(gDynamicFormat));
@@ -451,10 +482,10 @@
     if (!flags.empty()) {
         // Source role
         if (portRole == AUDIO_PORT_ROLE_SOURCE) {
-            mixPort->setFlags(OutputFlagConverter::maskFromString(flags));
+            mixPort->setFlags(OutputFlagConverter::maskFromString(flags, mFlagsSeparator.c_str()));
         } else {
             // Sink role
-            mixPort->setFlags(InputFlagConverter::maskFromString(flags));
+            mixPort->setFlags(InputFlagConverter::maskFromString(flags, mFlagsSeparator.c_str()));
         }
     }
     std::string maxOpenCount = getXmlAttribute(child, Attributes::maxOpenCount);
@@ -469,43 +500,51 @@
     AudioGainTraits::Collection gains;
     status = deserializeCollection<AudioGainTraits>(child, &gains, NULL);
     if (status != NO_ERROR) {
-        return Status::fromStatusT(status);
+        return status;
     }
     mixPort->setGains(gains);
 
     return mixPort;
 }
 
-Return<DevicePortTraits::Element> DevicePortTraits::deserialize(const xmlNode *cur,
-        PtrSerializingCtx /*serializingContext*/)
+template<>
+std::variant<status_t, DevicePortTraits::Element> PolicySerializer::deserialize<DevicePortTraits>(
+        const xmlNode *cur, DevicePortTraits::PtrSerializingCtx /*serializingContext*/)
 {
+    using Attributes = DevicePortTraits::Attributes;
+    auto& tag = DevicePortTraits::tag;
+
     std::string name = getXmlAttribute(cur, Attributes::tagName);
     if (name.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::tagName);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     ALOGV("%s: %s %s=%s", __func__, tag, Attributes::tagName, name.c_str());
     std::string typeName = getXmlAttribute(cur, Attributes::type);
     if (typeName.empty()) {
         ALOGE("%s: no type for %s", __func__, name.c_str());
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     ALOGV("%s: %s %s=%s", __func__, tag, Attributes::type, typeName.c_str());
     std::string role = getXmlAttribute(cur, Attributes::role);
     if (role.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::role);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     ALOGV("%s: %s %s=%s", __func__, tag, Attributes::role, role.c_str());
     audio_port_role_t portRole = (role == Attributes::roleSource) ?
                 AUDIO_PORT_ROLE_SOURCE : AUDIO_PORT_ROLE_SINK;
 
+    if (mIgnoreVendorExtensions && maybeVendorExtension(typeName)) {
+        ALOGI("%s: vendor extension device type \"%s\" skipped", __func__, typeName.c_str());
+        return NO_INIT;
+    }
     audio_devices_t type = AUDIO_DEVICE_NONE;
-    if (!deviceFromString(typeName, type) ||
+    if (!DeviceConverter::fromString(typeName, type) ||
             (!audio_is_input_device(type) && portRole == AUDIO_PORT_ROLE_SOURCE) ||
             (!audio_is_output_devices(type) && portRole == AUDIO_PORT_ROLE_SINK)) {
         ALOGW("%s: bad type %08x", __func__, type);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     std::string encodedFormatsLiteral = getXmlAttribute(cur, Attributes::encodedFormats);
     ALOGV("%s: %s %s=%s", __func__, tag, Attributes::encodedFormats, encodedFormatsLiteral.c_str());
@@ -514,12 +553,13 @@
         encodedFormats = formatsFromString(encodedFormatsLiteral, " ");
     }
     std::string address = getXmlAttribute(cur, Attributes::address);
-    Element deviceDesc = new DeviceDescriptor(type, name, address, encodedFormats);
+    DevicePortTraits::Element deviceDesc =
+            new DeviceDescriptor(type, name, address, encodedFormats);
 
     AudioProfileTraits::Collection profiles;
     status_t status = deserializeCollection<AudioProfileTraits>(cur, &profiles, NULL);
     if (status != NO_ERROR) {
-        return Status::fromStatusT(status);
+        return status;
     }
     if (profiles.empty()) {
         profiles.add(AudioProfile::createFullDynamic(gDynamicFormat));
@@ -532,57 +572,69 @@
     // Deserialize AudioGain children
     status = deserializeCollection<AudioGainTraits>(cur, &deviceDesc->mGains, NULL);
     if (status != NO_ERROR) {
-        return Status::fromStatusT(status);
+        return status;
     }
     ALOGV("%s: adding device tag %s type %08x address %s", __func__,
           deviceDesc->getName().c_str(), type, deviceDesc->address().c_str());
     return deviceDesc;
 }
 
-Return<RouteTraits::Element> RouteTraits::deserialize(const xmlNode *cur, PtrSerializingCtx ctx)
+template<>
+std::variant<status_t, RouteTraits::Element> PolicySerializer::deserialize<RouteTraits>(
+        const xmlNode *cur, RouteTraits::PtrSerializingCtx ctx)
 {
+    using Attributes = RouteTraits::Attributes;
+
     std::string type = getXmlAttribute(cur, Attributes::type);
     if (type.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::type);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     audio_route_type_t routeType = (type == Attributes::typeMix) ?
                 AUDIO_ROUTE_MIX : AUDIO_ROUTE_MUX;
 
-    ALOGV("%s: %s %s=%s", __func__, tag, Attributes::type, type.c_str());
-    Element route = new AudioRoute(routeType);
+    ALOGV("%s: %s %s=%s", __func__, RouteTraits::tag, Attributes::type, type.c_str());
+    RouteTraits::Element route = new AudioRoute(routeType);
 
     std::string sinkAttr = getXmlAttribute(cur, Attributes::sink);
     if (sinkAttr.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::sink);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     // Convert Sink name to port pointer
     sp<PolicyAudioPort> sink = ctx->findPortByTagName(sinkAttr);
-    if (sink == NULL) {
+    if (sink == NULL && !mIgnoreVendorExtensions) {
         ALOGE("%s: no sink found with name=%s", __func__, sinkAttr.c_str());
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
+    } else if (sink == NULL) {
+        ALOGW("Skipping route to sink \"%s\" as it likely has vendor extension type",
+                sinkAttr.c_str());
+        return NO_INIT;
     }
     route->setSink(sink);
 
     std::string sourcesAttr = getXmlAttribute(cur, Attributes::sources);
     if (sourcesAttr.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::sources);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     // Tokenize and Convert Sources name to port pointer
     PolicyAudioPortVector sources;
-    std::unique_ptr<char[]> sourcesLiteral{strndup(
+    UniqueCPtr<char> sourcesLiteral{strndup(
                 sourcesAttr.c_str(), strlen(sourcesAttr.c_str()))};
     char *devTag = strtok(sourcesLiteral.get(), ",");
     while (devTag != NULL) {
         if (strlen(devTag) != 0) {
             sp<PolicyAudioPort> source = ctx->findPortByTagName(devTag);
-            if (source == NULL) {
+            if (source == NULL && !mIgnoreVendorExtensions) {
                 ALOGE("%s: no source found with name=%s", __func__, devTag);
-                return Status::fromStatusT(BAD_VALUE);
+                return BAD_VALUE;
+            } else if (source == NULL) {
+                ALOGW("Skipping route source \"%s\" as it likely has vendor extension type",
+                        devTag);
+            } else {
+                sources.add(source);
             }
-            sources.add(source);
         }
         devTag = strtok(NULL, ",");
     }
@@ -596,12 +648,20 @@
     return route;
 }
 
-Return<ModuleTraits::Element> ModuleTraits::deserialize(const xmlNode *cur, PtrSerializingCtx ctx)
+template<>
+std::variant<status_t, ModuleTraits::Element> PolicySerializer::deserialize<ModuleTraits>(
+        const xmlNode *cur, ModuleTraits::PtrSerializingCtx ctx)
 {
+    using Attributes = ModuleTraits::Attributes;
+    auto& tag = ModuleTraits::tag;
+    auto& childAttachedDevicesTag = ModuleTraits::childAttachedDevicesTag;
+    auto& childAttachedDeviceTag = ModuleTraits::childAttachedDeviceTag;
+    auto& childDefaultOutputDeviceTag = ModuleTraits::childDefaultOutputDeviceTag;
+
     std::string name = getXmlAttribute(cur, Attributes::name);
     if (name.empty()) {
         ALOGE("%s: No %s found", __func__, Attributes::name);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
     uint32_t versionMajor = 0, versionMinor = 0;
     std::string versionLiteral = getXmlAttribute(cur, Attributes::version);
@@ -611,29 +671,29 @@
               versionMajor, versionMajor);
     }
 
-    ALOGV("%s: %s %s=%s", __func__, tag, Attributes::name, name.c_str());
+    ALOGV("%s: %s %s=%s", __func__, ModuleTraits::tag, Attributes::name, name.c_str());
 
-    Element module = new HwModule(name.c_str(), versionMajor, versionMinor);
+    ModuleTraits::Element module = new HwModule(name.c_str(), versionMajor, versionMinor);
 
-    // Deserialize childrens: Audio Mix Port, Audio Device Ports (Source/Sink), Audio Routes
+    // Deserialize children: Audio Mix Port, Audio Device Ports (Source/Sink), Audio Routes
     MixPortTraits::Collection mixPorts;
     status_t status = deserializeCollection<MixPortTraits>(cur, &mixPorts, NULL);
     if (status != NO_ERROR) {
-        return Status::fromStatusT(status);
+        return status;
     }
     module->setProfiles(mixPorts);
 
     DevicePortTraits::Collection devicePorts;
     status = deserializeCollection<DevicePortTraits>(cur, &devicePorts, NULL);
     if (status != NO_ERROR) {
-        return Status::fromStatusT(status);
+        return status;
     }
     module->setDeclaredDevices(devicePorts);
 
     RouteTraits::Collection routes;
     status = deserializeCollection<RouteTraits>(cur, &routes, module.get());
     if (status != NO_ERROR) {
-        return Status::fromStatusT(status);
+        return status;
     }
     module->setRoutes(routes);
 
@@ -653,6 +713,12 @@
                         sp<DeviceDescriptor> device = module->getDeclaredDevices().
                                 getDeviceFromTagName(std::string(reinterpret_cast<const char*>(
                                                         attachedDevice.get())));
+                        if (device == nullptr && mIgnoreVendorExtensions) {
+                            ALOGW("Skipped attached device \"%s\" because it likely uses a vendor"
+                                    "extension type",
+                                    reinterpret_cast<const char*>(attachedDevice.get()));
+                            continue;
+                        }
                         ctx->addDevice(device);
                     }
                 }
@@ -678,10 +744,15 @@
     return module;
 }
 
-status_t GlobalConfigTraits::deserialize(const xmlNode *root, AudioPolicyConfig *config)
+template<>
+std::variant<status_t, GlobalConfigTraits::Element>
+PolicySerializer::deserialize<GlobalConfigTraits>(
+        const xmlNode *root, GlobalConfigTraits::PtrSerializingCtx config)
 {
+    using Attributes = GlobalConfigTraits::Attributes;
+
     for (const xmlNode *cur = root->xmlChildrenNode; cur != NULL; cur = cur->next) {
-        if (!xmlStrcmp(cur->name, reinterpret_cast<const xmlChar*>(tag))) {
+        if (!xmlStrcmp(cur->name, reinterpret_cast<const xmlChar*>(GlobalConfigTraits::tag))) {
             bool value;
             std::string attr = getXmlAttribute(cur, Attributes::speakerDrcEnabled);
             if (!attr.empty() &&
@@ -703,12 +774,15 @@
     return NO_ERROR;
 }
 
-status_t SurroundSoundTraits::deserialize(const xmlNode *root, AudioPolicyConfig *config)
+template<>
+std::variant<status_t, SurroundSoundTraits::Element>
+PolicySerializer::deserialize<SurroundSoundTraits>(
+        const xmlNode *root, SurroundSoundTraits::PtrSerializingCtx config)
 {
     config->setDefaultSurroundFormats();
 
     for (const xmlNode *cur = root->xmlChildrenNode; cur != NULL; cur = cur->next) {
-        if (!xmlStrcmp(cur->name, reinterpret_cast<const xmlChar*>(tag))) {
+        if (!xmlStrcmp(cur->name, reinterpret_cast<const xmlChar*>(SurroundSoundTraits::tag))) {
             AudioPolicyConfig::SurroundFormats formats;
             status_t status = deserializeCollection<SurroundSoundFormatTraits>(
                     cur, &formats, nullptr);
@@ -721,20 +795,29 @@
     return NO_ERROR;
 }
 
-Return<SurroundSoundFormatTraits::Element> SurroundSoundFormatTraits::deserialize(
-        const xmlNode *cur, PtrSerializingCtx /*serializingContext*/)
+template<>
+std::variant<status_t, SurroundSoundFormatTraits::Element>
+PolicySerializer::deserialize<SurroundSoundFormatTraits>(
+        const xmlNode *cur, SurroundSoundFormatTraits::PtrSerializingCtx /*serializingContext*/)
 {
+    using Attributes = SurroundSoundFormatTraits::Attributes;
+
     std::string formatLiteral = getXmlAttribute(cur, Attributes::name);
     if (formatLiteral.empty()) {
         ALOGE("%s: No %s found for a surround format", __func__, Attributes::name);
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
+    }
+    if (mIgnoreVendorExtensions && maybeVendorExtension(formatLiteral)) {
+        ALOGI("%s: vendor extension format \"%s\" skipped", __func__, formatLiteral.c_str());
+        return NO_INIT;
     }
     audio_format_t format = formatFromString(formatLiteral);
     if (format == AUDIO_FORMAT_DEFAULT) {
         ALOGE("%s: Unrecognized format %s", __func__, formatLiteral.c_str());
-        return Status::fromStatusT(BAD_VALUE);
+        return BAD_VALUE;
     }
-    Element pair = std::make_pair(format, Collection::mapped_type{});
+    SurroundSoundFormatTraits::Element pair = std::make_pair(
+            format, SurroundSoundFormatTraits::Collection::mapped_type{});
 
     std::string subformatsLiteral = getXmlAttribute(cur, Attributes::subformats);
     if (subformatsLiteral.empty()) return pair;
@@ -743,14 +826,16 @@
         auto result = pair.second.insert(subformat);
         if (!result.second) {
             ALOGE("%s: could not add subformat %x to collection", __func__, subformat);
-            return Status::fromStatusT(BAD_VALUE);
+            return BAD_VALUE;
         }
     }
     return pair;
 }
 
-status_t PolicySerializer::deserialize(const char *configFile, AudioPolicyConfig *config)
+status_t PolicySerializer::deserialize(const char *configFile, AudioPolicyConfig *config,
+                                       bool ignoreVendorExtensions)
 {
+    mIgnoreVendorExtensions = ignoreVendorExtensions;
     auto doc = make_xmlUnique(xmlParseFile(configFile));
     if (doc == nullptr) {
         ALOGE("%s: Could not parse %s document.", __func__, configFile);
@@ -776,12 +861,14 @@
         ALOGE("%s: No version found in root node %s", __func__, rootName);
         return BAD_VALUE;
     }
-    if (version != mVersion) {
-        ALOGE("%s: Version does not match; expect %s got %s", __func__, mVersion.c_str(),
-              version.c_str());
+    if (version == "7.0") {
+        mChannelMasksSeparator = mSamplingRatesSeparator = mFlagsSeparator = " ";
+    } else if (version != "1.0") {
+        ALOGE("%s: Version does not match; expected \"1.0\" or \"7.0\" got \"%s\"",
+                __func__, version.c_str());
         return BAD_VALUE;
     }
-    // Lets deserialize children
+    // Let's deserialize children
     // Modules
     ModuleTraits::Collection modules;
     status_t status = deserializeCollection<ModuleTraits>(root, &modules, config);
@@ -791,10 +878,10 @@
     config->setHwModules(modules);
 
     // Global Configuration
-    GlobalConfigTraits::deserialize(root, config);
+    deserialize<GlobalConfigTraits>(root, config);
 
     // Surround configuration
-    SurroundSoundTraits::deserialize(root, config);
+    deserialize<SurroundSoundTraits>(root, config);
 
     return android::OK;
 }
@@ -804,7 +891,17 @@
 status_t deserializeAudioPolicyFile(const char *fileName, AudioPolicyConfig *config)
 {
     PolicySerializer serializer;
-    return serializer.deserialize(fileName, config);
+    status_t status = serializer.deserialize(fileName, config);
+    if (status != OK) config->clear();
+    return status;
+}
+
+status_t deserializeAudioPolicyFileForVts(const char *fileName, AudioPolicyConfig *config)
+{
+    PolicySerializer serializer;
+    status_t status = serializer.deserialize(fileName, config, true /*ignoreVendorExtensions*/);
+    if (status != OK) config->clear();
+    return status;
 }
 
 } // namespace android
diff --git a/services/audiopolicy/config/Android.bp b/services/audiopolicy/config/Android.bp
index f4610bb..671b30a 100644
--- a/services/audiopolicy/config/Android.bp
+++ b/services/audiopolicy/config/Android.bp
@@ -17,6 +17,15 @@
 soong_namespace {
 }
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 prebuilt_etc {
     name: "a2dp_in_audio_policy_configuration.xml",
     vendor: true,
diff --git a/services/audiopolicy/config/a2dp_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/a2dp_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..2d323f6
--- /dev/null
+++ b/services/audiopolicy/config/a2dp_audio_policy_configuration_7_0.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- A2dp Audio HAL Audio Policy Configuration file -->
+<module name="a2dp" halVersion="2.0">
+    <mixPorts>
+        <mixPort name="a2dp output" role="source"/>
+        <mixPort name="a2dp input" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100 48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+        </mixPort>
+    </mixPorts>
+    <devicePorts>
+        <devicePort tagName="BT A2DP Out" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </devicePort>
+        <devicePort tagName="BT A2DP Headphones" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </devicePort>
+        <devicePort tagName="BT A2DP Speaker" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </devicePort>
+        <devicePort tagName="BT A2DP In" type="AUDIO_DEVICE_IN_BLUETOOTH_A2DP" role="source">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100 48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+        </devicePort>
+    </devicePorts>
+    <routes>
+        <route type="mix" sink="BT A2DP Out"
+               sources="a2dp output"/>
+        <route type="mix" sink="BT A2DP Headphones"
+               sources="a2dp output"/>
+        <route type="mix" sink="BT A2DP Speaker"
+               sources="a2dp output"/>
+        <route type="mix" sink="a2dp input"
+               sources="BT A2DP In"/>
+    </routes>
+</module>
diff --git a/services/audiopolicy/config/a2dp_in_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/a2dp_in_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..d59ad70
--- /dev/null
+++ b/services/audiopolicy/config/a2dp_in_audio_policy_configuration_7_0.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Bluetooth Input Audio HAL Audio Policy Configuration file -->
+<module name="a2dp" halVersion="2.0">
+    <mixPorts>
+        <mixPort name="a2dp input" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100 48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+        </mixPort>
+    </mixPorts>
+    <devicePorts>
+        <devicePort tagName="BT A2DP In" type="AUDIO_DEVICE_IN_BLUETOOTH_A2DP" role="source">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100 48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+        </devicePort>
+    </devicePorts>
+    <routes>
+        <route type="mix" sink="a2dp input"
+               sources="BT A2DP In"/>
+    </routes>
+</module>
diff --git a/services/audiopolicy/config/audio_policy_configuration.xml b/services/audiopolicy/config/audio_policy_configuration.xml
index b28381b..dcdc035 100644
--- a/services/audiopolicy/config/audio_policy_configuration.xml
+++ b/services/audiopolicy/config/audio_policy_configuration.xml
@@ -91,7 +91,7 @@
                 <!-- Output devices declaration, i.e. Sink DEVICE PORT -->
                 <devicePort tagName="Earpiece" type="AUDIO_DEVICE_OUT_EARPIECE" role="sink">
                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
-                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
                 </devicePort>
                 <devicePort tagName="Speaker" role="sink" type="AUDIO_DEVICE_OUT_SPEAKER" address="">
                     <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
diff --git a/services/audiopolicy/config/audio_policy_configuration_7_0.xml b/services/audiopolicy/config/audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..9961a00
--- /dev/null
+++ b/services/audiopolicy/config/audio_policy_configuration_7_0.xml
@@ -0,0 +1,211 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2019 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<audioPolicyConfiguration version="7.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+    <!-- version section contains a “version” tag in the form “major.minor” e.g version=”1.0” -->
+
+    <!-- Global configuration Decalaration -->
+    <globalConfiguration speaker_drc_enabled="true"/>
+
+
+    <!-- Modules section:
+        There is one section per audio HW module present on the platform.
+        Each module section will contains two mandatory tags for audio HAL “halVersion” and “name”.
+        The module names are the same as in current .conf file:
+                “primary”, “A2DP”, “remote_submix”, “USB”
+        Each module will contain the following sections:
+        “devicePorts”: a list of device descriptors for all input and output devices accessible via this
+        module.
+        This contains both permanently attached devices and removable devices.
+        “mixPorts”: listing all output and input streams exposed by the audio HAL
+        “routes”: list of possible connections between input and output devices or between stream and
+        devices.
+            "route": is defined by an attribute:
+                -"type": <mux|mix> means all sources are mutual exclusive (mux) or can be mixed (mix)
+                -"sink": the sink involved in this route
+                -"sources": all the sources than can be connected to the sink via vis route
+        “attachedDevices”: permanently attached devices.
+        The attachedDevices section is a list of devices names. The names correspond to device names
+        defined in <devicePorts> section.
+        “defaultOutputDevice”: device to be used by default when no policy rule applies
+    -->
+    <modules>
+        <!-- Primary Audio HAL -->
+        <module name="primary" halVersion="3.0">
+            <attachedDevices>
+                <item>Speaker</item>
+                <item>Built-In Mic</item>
+                <item>Built-In Back Mic</item>
+            </attachedDevices>
+            <defaultOutputDevice>Speaker</defaultOutputDevice>
+            <mixPorts>
+                <mixPort name="primary output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="deep_buffer" role="source"
+                        flags="AUDIO_OUTPUT_FLAG_DEEP_BUFFER">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="compressed_offload" role="source"
+                         flags="AUDIO_OUTPUT_FLAG_DIRECT AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD AUDIO_OUTPUT_FLAG_NON_BLOCKING">
+                    <profile name="" format="AUDIO_FORMAT_MP3"
+                             samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_MONO"/>
+                    <profile name="" format="AUDIO_FORMAT_AAC"
+                             samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_MONO"/>
+                    <profile name="" format="AUDIO_FORMAT_AAC_LC"
+                             samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_MONO"/>
+                </mixPort>
+                <mixPort name="voice_tx" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
+                </mixPort>
+                <mixPort name="primary input" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+                             channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO AUDIO_CHANNEL_IN_FRONT_BACK"/>
+                </mixPort>
+                <mixPort name="voice_rx" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+                </mixPort>
+            </mixPorts>
+            <devicePorts>
+                <!-- Output devices declaration, i.e. Sink DEVICE PORT -->
+                <devicePort tagName="Earpiece" type="AUDIO_DEVICE_OUT_EARPIECE" role="sink">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
+                </devicePort>
+                <devicePort tagName="Speaker" role="sink" type="AUDIO_DEVICE_OUT_SPEAKER" address="">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                    <gains>
+                        <gain name="gain_1" mode="AUDIO_GAIN_MODE_JOINT"
+                              minValueMB="-8400"
+                              maxValueMB="4000"
+                              defaultValueMB="0"
+                              stepValueMB="100"/>
+                    </gains>
+                </devicePort>
+                <devicePort tagName="Wired Headset" type="AUDIO_DEVICE_OUT_WIRED_HEADSET" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </devicePort>
+                <devicePort tagName="Wired Headphones" type="AUDIO_DEVICE_OUT_WIRED_HEADPHONE" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </devicePort>
+                <devicePort tagName="BT SCO" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
+                </devicePort>
+                <devicePort tagName="BT SCO Headset" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
+                </devicePort>
+                <devicePort tagName="BT SCO Car Kit" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
+                </devicePort>
+                <devicePort tagName="Telephony Tx" type="AUDIO_DEVICE_OUT_TELEPHONY_TX" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_OUT_MONO"/>
+                </devicePort>
+
+                <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+                             channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO AUDIO_CHANNEL_IN_FRONT_BACK"/>
+                </devicePort>
+                <devicePort tagName="Built-In Back Mic" type="AUDIO_DEVICE_IN_BACK_MIC" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+                             channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO AUDIO_CHANNEL_IN_FRONT_BACK"/>
+                </devicePort>
+                <devicePort tagName="Wired Headset Mic" type="AUDIO_DEVICE_IN_WIRED_HEADSET" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000"
+                             channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO AUDIO_CHANNEL_IN_FRONT_BACK"/>
+                </devicePort>
+                <devicePort tagName="BT SCO Headset Mic" type="AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+                </devicePort>
+                <devicePort tagName="Telephony Rx" type="AUDIO_DEVICE_IN_TELEPHONY_RX" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+                </devicePort>
+            </devicePorts>
+            <!-- route declaration, i.e. list all available sources for a given sink -->
+            <routes>
+                <route type="mix" sink="Earpiece"
+                       sources="primary output,deep_buffer,BT SCO Headset Mic"/>
+                <route type="mix" sink="Speaker"
+                       sources="primary output,deep_buffer,compressed_offload,BT SCO Headset Mic,Telephony Rx"/>
+                <route type="mix" sink="Wired Headset"
+                       sources="primary output,deep_buffer,compressed_offload,BT SCO Headset Mic,Telephony Rx"/>
+                <route type="mix" sink="Wired Headphones"
+                       sources="primary output,deep_buffer,compressed_offload,BT SCO Headset Mic,Telephony Rx"/>
+                <route type="mix" sink="primary input"
+                       sources="Built-In Mic,Built-In Back Mic,Wired Headset Mic,BT SCO Headset Mic"/>
+                <route type="mix" sink="Telephony Tx"
+                       sources="Built-In Mic,Built-In Back Mic,Wired Headset Mic,BT SCO Headset Mic, voice_tx"/>
+                <route type="mix" sink="voice_rx"
+                       sources="Telephony Rx"/>
+            </routes>
+
+        </module>
+
+        <!-- A2dp Input Audio HAL -->
+        <xi:include href="a2dp_in_audio_policy_configuration_7_0.xml"/>
+
+        <!-- Usb Audio HAL -->
+        <xi:include href="usb_audio_policy_configuration.xml"/>
+
+        <!-- Remote Submix Audio HAL -->
+        <xi:include href="r_submix_audio_policy_configuration.xml"/>
+
+        <!-- Bluetooth Audio HAL -->
+        <xi:include href="bluetooth_audio_policy_configuration_7_0.xml"/>
+
+        <!-- MSD Audio HAL (optional) -->
+        <xi:include href="msd_audio_policy_configuration_7_0.xml"/>
+
+    </modules>
+    <!-- End of Modules section -->
+
+    <!-- Volume section:
+        IMPORTANT NOTE: Volume tables have been moved to engine configuration.
+                        Keep it here for legacy.
+                        Engine will fallback on these files if none are provided by engine.
+     -->
+
+    <xi:include href="audio_policy_volumes.xml"/>
+    <xi:include href="default_volume_tables.xml"/>
+
+    <!-- End of Volume section -->
+
+    <!-- Surround Sound configuration -->
+
+    <xi:include href="surround_sound_configuration_5_0.xml"/>
+
+    <!-- End of Surround Sound configuration -->
+
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/config/audio_policy_configuration_bluetooth_legacy_hal.xml b/services/audiopolicy/config/audio_policy_configuration_bluetooth_legacy_hal.xml
index b4cc1d3..5f4e5f2 100644
--- a/services/audiopolicy/config/audio_policy_configuration_bluetooth_legacy_hal.xml
+++ b/services/audiopolicy/config/audio_policy_configuration_bluetooth_legacy_hal.xml
@@ -185,6 +185,9 @@
         <!-- Hearing aid Audio HAL -->
         <xi:include href="hearing_aid_audio_policy_configuration.xml"/>
 
+        <!-- Le Audio Audio HAL -->
+        <xi:include href="le_audio_policy_configuration.xml"/>
+
         <!-- MSD Audio HAL (optional) -->
         <xi:include href="msd_audio_policy_configuration.xml"/>
 
diff --git a/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml b/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
index ce78eb0..98415b7 100644
--- a/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
+++ b/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
@@ -10,6 +10,18 @@
                      samplingRates="24000,16000"
                      channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
         </mixPort>
+        <!-- Le Audio Audio Ports -->
+        <mixPort name="le audio output" role="source">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="8000,16000,24000,32000,44100,48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
+            <profile name="" format="AUDIO_FORMAT_PCM_24_BIT_PACKED"
+                     samplingRates="8000,16000,24000,32000,44100,48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
+            <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
+                     samplingRates="8000,16000,24000,32000,44100,48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
+        </mixPort>
     </mixPorts>
     <devicePorts>
         <!-- A2DP Audio Ports -->
@@ -30,6 +42,13 @@
         </devicePort>
         <!-- Hearing AIDs Audio Ports -->
         <devicePort tagName="BT Hearing Aid Out" type="AUDIO_DEVICE_OUT_HEARING_AID" role="sink"/>
+        <!-- BLE Audio Ports -->
+        <!-- Note that these device types are not valid in HAL versions < 7. Any device
+             running pre-V7 HAL and using this file will not pass VTS. Need to use
+             bluetooth_audio_policy_configuration_7_0.xml instead.
+        -->
+        <devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
+        <devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
     </devicePorts>
     <routes>
         <route type="mix" sink="BT A2DP Out"
@@ -40,5 +59,9 @@
                sources="a2dp output"/>
         <route type="mix" sink="BT Hearing Aid Out"
                sources="hearing aid output"/>
+        <route type="mix" sink="BLE Headset Out"
+               sources="le audio output"/>
+        <route type="mix" sink="BLE Speaker Out"
+               sources="le audio output"/>
     </routes>
 </module>
diff --git a/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..fbe7571
--- /dev/null
+++ b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
@@ -0,0 +1,63 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Bluetooth Audio HAL Audio Policy Configuration file -->
+<module name="bluetooth" halVersion="2.0">
+    <mixPorts>
+        <!-- A2DP Audio Ports -->
+        <mixPort name="a2dp output" role="source"/>
+        <!-- Hearing AIDs Audio Ports -->
+        <mixPort name="hearing aid output" role="source">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="24000 16000"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </mixPort>
+        <!-- Le Audio Audio Ports -->
+        <mixPort name="le audio output" role="source">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="8000 16000 24000 32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
+            <profile name="" format="AUDIO_FORMAT_PCM_24_BIT_PACKED"
+                     samplingRates="8000 16000 24000 32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
+            <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
+                     samplingRates="8000 16000 24000 32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
+        </mixPort>
+    </mixPorts>
+    <devicePorts>
+        <!-- A2DP Audio Ports -->
+        <devicePort tagName="BT A2DP Out" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100 48000 88200 96000"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </devicePort>
+        <devicePort tagName="BT A2DP Headphones" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100 48000 88200 96000"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </devicePort>
+        <devicePort tagName="BT A2DP Speaker" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100 48000 88200 96000"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </devicePort>
+        <!-- Hearing AIDs Audio Ports -->
+        <devicePort tagName="BT Hearing Aid Out" type="AUDIO_DEVICE_OUT_HEARING_AID" role="sink"/>
+        <!-- BLE Audio Ports -->
+        <devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
+        <devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
+    </devicePorts>
+    <routes>
+        <route type="mix" sink="BT A2DP Out"
+               sources="a2dp output"/>
+        <route type="mix" sink="BT A2DP Headphones"
+               sources="a2dp output"/>
+        <route type="mix" sink="BT A2DP Speaker"
+               sources="a2dp output"/>
+        <route type="mix" sink="BT Hearing Aid Out"
+               sources="hearing aid output"/>
+        <route type="mix" sink="BLE Headset Out"
+               sources="le audio output"/>
+        <route type="mix" sink="BLE Speaker Out"
+               sources="le audio output"/>
+    </routes>
+</module>
diff --git a/services/audiopolicy/config/hearing_aid_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/hearing_aid_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..8c364e4
--- /dev/null
+++ b/services/audiopolicy/config/hearing_aid_audio_policy_configuration_7_0.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Hearing aid Audio HAL Audio Policy Configuration file -->
+<module name="hearing_aid" halVersion="2.0">
+    <mixPorts>
+        <mixPort name="hearing aid output" role="source">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="24000 16000"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </mixPort>
+    </mixPorts>
+    <devicePorts>
+        <devicePort tagName="BT Hearing Aid Out" type="AUDIO_DEVICE_OUT_HEARING_AID" role="sink"/>
+    </devicePorts>
+    <routes>
+        <route type="mix" sink="BT Hearing Aid Out" sources="hearing aid output"/>
+    </routes>
+</module>
diff --git a/services/audiopolicy/config/le_audio_policy_configuration.xml b/services/audiopolicy/config/le_audio_policy_configuration.xml
new file mode 100644
index 0000000..a3dc72b
--- /dev/null
+++ b/services/audiopolicy/config/le_audio_policy_configuration.xml
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Le Audio HAL Audio Policy Configuration file -->
+<module name="bluetooth" halVersion="2.1">
+    <mixPorts>
+        <mixPort name="le audio output" role="source">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT,AUDIO_FORMAT_PCM_24_BIT,AUDIO_FORMAT_PCM_32_BIT"
+                     samplingRates="8000,16000,24000,32000,44100,48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
+        </mixPort>
+    </mixPorts>
+    <devicePorts>
+        <devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
+        <devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
+    </devicePorts>
+    <routes>
+        <route type="mix" sink="BLE Headset Out" sources="le audio output"/>
+        <route type="mix" sink="BLE Speaker Out" sources="le audio output"/>
+    </routes>
+</module>
diff --git a/services/audiopolicy/config/msd_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/msd_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..f167f0b
--- /dev/null
+++ b/services/audiopolicy/config/msd_audio_policy_configuration_7_0.xml
@@ -0,0 +1,78 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Copyright (C) 2017-2018 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<!-- Multi Stream Decoder Audio Policy Configuration file -->
+<module name="msd" halVersion="2.0">
+    <attachedDevices>
+        <item>MS12 Input</item>
+        <item>MS12 Output</item>
+    </attachedDevices>
+    <mixPorts>
+        <mixPort name="ms12 input" role="source">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </mixPort>
+        <mixPort name="ms12 compressed input" role="source"
+                flags="AUDIO_OUTPUT_FLAG_DIRECT AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD AUDIO_OUTPUT_FLAG_NON_BLOCKING">
+            <profile name="" format="AUDIO_FORMAT_AC3"
+                     samplingRates="32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1"/>
+            <profile name="" format="AUDIO_FORMAT_E_AC3"
+                     samplingRates="32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1 AUDIO_CHANNEL_OUT_7POINT1"/>
+            <profile name="" format="AUDIO_FORMAT_E_AC3_JOC"
+                     samplingRates="32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1 AUDIO_CHANNEL_OUT_7POINT1"/>
+            <profile name="" format="AUDIO_FORMAT_AC4"
+                     samplingRates="32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1 AUDIO_CHANNEL_OUT_7POINT1"/>
+        </mixPort>
+        <!-- The HW AV Sync flag is not required, but is recommended -->
+        <mixPort name="ms12 output" role="sink" flags="AUDIO_INPUT_FLAG_HW_AV_SYNC AUDIO_INPUT_FLAG_DIRECT">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+            <profile name="" format="AUDIO_FORMAT_AC3"
+                     samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_5POINT1"/>
+            <profile name="" format="AUDIO_FORMAT_E_AC3"
+                     samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_5POINT1"/>
+        </mixPort>
+   </mixPorts>
+   <devicePorts>
+       <devicePort tagName="MS12 Input" type="AUDIO_DEVICE_OUT_BUS"  role="sink">
+           <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                    samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+           <profile name="" format="AUDIO_FORMAT_AC3"
+                    samplingRates="32000 44100 48000"
+                    channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1"/>
+           <profile name="" format="AUDIO_FORMAT_E_AC3"
+                    samplingRates="32000 44100 48000"
+                    channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1 AUDIO_CHANNEL_OUT_7POINT1"/>
+            <profile name="" format="AUDIO_FORMAT_E_AC3_JOC"
+                     samplingRates="32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1 AUDIO_CHANNEL_OUT_7POINT1"/>
+           <profile name="" format="AUDIO_FORMAT_AC4"
+                    samplingRates="32000 44100 48000"
+                    channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_5POINT1 AUDIO_CHANNEL_OUT_7POINT1"/>
+       </devicePort>
+       <devicePort tagName="MS12 Output" type="AUDIO_DEVICE_IN_BUS"  role="source">
+           <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                    samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+        </devicePort>
+    </devicePorts>
+    <routes>
+        <route type="mix" sink="MS12 Input" sources="ms12 input,ms12 compressed input"/>
+        <route type="mix" sink="ms12 output" sources="MS12 Output"/>
+    </routes>
+</module>
diff --git a/services/audiopolicy/config/primary_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/primary_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..68a56b2
--- /dev/null
+++ b/services/audiopolicy/config/primary_audio_policy_configuration_7_0.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Default Primary Audio HAL Module Audio Policy Configuration include file -->
+<module name="primary" halVersion="2.0">
+    <attachedDevices>
+        <item>Speaker</item>
+        <item>Built-In Mic</item>
+    </attachedDevices>
+    <defaultOutputDevice>Speaker</defaultOutputDevice>
+    <mixPorts>
+        <mixPort name="primary output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </mixPort>
+        <mixPort name="primary input" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="8000 16000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+        </mixPort>
+   </mixPorts>
+   <devicePorts>
+        <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
+        </devicePort>
+
+        <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
+        </devicePort>
+    </devicePorts>
+    <routes>
+        <route type="mix" sink="Speaker"
+               sources="primary output"/>
+        <route type="mix" sink="primary input"
+               sources="Built-In Mic"/>
+    </routes>
+</module>
diff --git a/services/audiopolicy/engine/common/Android.bp b/services/audiopolicy/engine/common/Android.bp
old mode 100755
new mode 100644
index a1c69f2..50c5eab
--- a/services/audiopolicy/engine/common/Android.bp
+++ b/services/audiopolicy/engine/common/Android.bp
@@ -12,6 +12,15 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_headers {
     name: "libaudiopolicyengine_common_headers",
     host_supported: true,
diff --git a/services/audiopolicy/engine/common/include/EngineBase.h b/services/audiopolicy/engine/common/include/EngineBase.h
old mode 100755
new mode 100644
index 7f339dc..0f8b0a5
--- a/services/audiopolicy/engine/common/include/EngineBase.h
+++ b/services/audiopolicy/engine/common/include/EngineBase.h
@@ -53,7 +53,7 @@
                                                audio_policy_dev_state_t /*state*/) override;
 
     product_strategy_t getProductStrategyForAttributes(
-            const audio_attributes_t &attr) const override;
+            const audio_attributes_t &attr, bool fallbackOnDefault = true) const override;
 
     audio_stream_type_t getStreamTypeForAttributes(const audio_attributes_t &attr) const override;
 
@@ -79,9 +79,11 @@
 
     VolumeGroupVector getVolumeGroups() const override;
 
-    volume_group_t getVolumeGroupForAttributes(const audio_attributes_t &attr) const override;
+    volume_group_t getVolumeGroupForAttributes(
+            const audio_attributes_t &attr, bool fallbackOnDefault = true) const override;
 
-    volume_group_t getVolumeGroupForStreamType(audio_stream_type_t stream) const override;
+    volume_group_t getVolumeGroupForStreamType(
+            audio_stream_type_t stream, bool fallbackOnDefault = true) const override;
 
     status_t listAudioVolumeGroups(AudioVolumeGroupVector &groups) const override;
 
@@ -93,13 +95,13 @@
 
     void dump(String8 *dst) const override;
 
-    status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
-            const AudioDeviceTypeAddr &device) override;
+    status_t setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices) override;
 
-    status_t removePreferredDeviceForStrategy(product_strategy_t strategy) override;
+    status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role) override;
 
-    status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
-            AudioDeviceTypeAddr &device) const override;
+    status_t getDevicesForRoleAndStrategy(product_strategy_t strategy, device_role_t role,
+            AudioDeviceTypeAddrVector &devices) const override;
 
     engineConfig::ParsingResult loadAudioPolicyEngineConfig();
 
@@ -127,11 +129,54 @@
 
     status_t restoreOriginVolumeCurve(audio_stream_type_t stream);
 
+    status_t setDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices) override;
+
+    status_t addDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices) override;
+
+    /**
+     * Remove devices role for capture preset. When `forceMatched` is true, the devices to be
+     * removed must all show as role for the capture preset. Otherwise, only devices that has shown
+     * as role for the capture preset will be remove.
+     */
+    status_t doRemoveDevicesRoleForCapturePreset(audio_source_t audioSource,
+            device_role_t role, const AudioDeviceTypeAddrVector& devices,
+            bool forceMatched=true);
+
+    status_t removeDevicesRoleForCapturePreset(audio_source_t audioSource,
+            device_role_t role, const AudioDeviceTypeAddrVector& devices) override;
+
+    status_t clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+            device_role_t role) override;
+
+    status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+            device_role_t role, AudioDeviceTypeAddrVector &devices) const override;
+
+    DeviceVector getActiveMediaDevices(const DeviceVector& availableDevices) const override;
+
 private:
+    /**
+     * Get media devices as the given role
+     *
+     * @param role the audio devices role
+     * @param availableDevices all available devices
+     * @param devices the DeviceVector to store devices as the given role
+     * @return NO_ERROR if all devices associated to the given role are present in available devices
+     *         NAME_NO_FOUND if there is no strategy for media or there are no devices associate to
+     *         the given role
+     *         NOT_ENOUGH_DATA if not all devices as given role are present in available devices
+     */
+    status_t getMediaDevicesForRole(device_role_t role, const DeviceVector& availableDevices,
+            DeviceVector& devices) const;
+
+    void dumpCapturePresetDevicesRoleMap(String8 *dst, int spaces) const;
+
     AudioPolicyManagerObserver *mApmObserver = nullptr;
 
     ProductStrategyMap mProductStrategies;
-    ProductStrategyPreferredRoutingMap mProductStrategyPreferredDevices;
+    ProductStrategyDevicesRoleMap mProductStrategyDeviceRoleMap;
+    CapturePresetDevicesRoleMap mCapturePresetDevicesRoleMap;
     VolumeGroupMap mVolumeGroups;
     LastRemovableMediaDevices mLastRemovableMediaDevices;
     audio_mode_t mPhoneState = AUDIO_MODE_NORMAL;  /**< current phone state. */
diff --git a/services/audiopolicy/engine/common/include/LastRemovableMediaDevices.h b/services/audiopolicy/engine/common/include/LastRemovableMediaDevices.h
old mode 100755
new mode 100644
index a3053a4..d7f8b1e
--- a/services/audiopolicy/engine/common/include/LastRemovableMediaDevices.h
+++ b/services/audiopolicy/engine/common/include/LastRemovableMediaDevices.h
@@ -36,6 +36,8 @@
     void setRemovableMediaDevices(sp<DeviceDescriptor> desc, audio_policy_dev_state_t state);
     std::vector<audio_devices_t> getLastRemovableMediaDevices(
             device_out_group_t group = GROUP_NONE) const;
+    sp<DeviceDescriptor> getLastRemovableMediaDevice(
+            const DeviceVector& excludedDevices, device_out_group_t group = GROUP_NONE) const;
 
 private:
     struct DeviceGroupDescriptor {
diff --git a/services/audiopolicy/engine/common/include/ProductStrategy.h b/services/audiopolicy/engine/common/include/ProductStrategy.h
index 3ebe7d1..2aa2f9a 100644
--- a/services/audiopolicy/engine/common/include/ProductStrategy.h
+++ b/services/audiopolicy/engine/common/include/ProductStrategy.h
@@ -18,17 +18,20 @@
 
 #include "VolumeGroup.h"
 
-#include <system/audio.h>
-#include <utils/RefBase.h>
-#include <HandleGenerator.h>
-#include <string>
-#include <vector>
 #include <map>
-#include <utils/Errors.h>
-#include <utils/String8.h>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include <HandleGenerator.h>
 #include <media/AudioAttributes.h>
 #include <media/AudioContainers.h>
+#include <media/AudioDeviceTypeAddr.h>
 #include <media/AudioPolicy.h>
+#include <system/audio.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+#include <utils/String8.h>
 
 namespace android {
 
@@ -126,7 +129,8 @@
      * @param attr
      * @return applicable product strategy for the given attribute, default if none applicable.
      */
-    product_strategy_t getProductStrategyForAttributes(const audio_attributes_t &attr) const;
+    product_strategy_t getProductStrategyForAttributes(
+            const audio_attributes_t &attr, bool fallbackOnDefault = true) const;
 
     product_strategy_t getProductStrategyForStream(audio_stream_type_t stream) const;
 
@@ -150,9 +154,11 @@
 
     std::string getDeviceAddressForProductStrategy(product_strategy_t strategy) const;
 
-    volume_group_t getVolumeGroupForAttributes(const audio_attributes_t &attr) const;
+    volume_group_t getVolumeGroupForAttributes(
+            const audio_attributes_t &attr, bool fallbackOnDefault = true) const;
 
-    volume_group_t getVolumeGroupForStreamType(audio_stream_type_t stream) const;
+    volume_group_t getVolumeGroupForStreamType(
+            audio_stream_type_t stream, bool fallbackOnDefault = true) const;
 
     volume_group_t getDefaultVolumeGroup() const;
 
@@ -164,10 +170,12 @@
     product_strategy_t mDefaultStrategy = PRODUCT_STRATEGY_NONE;
 };
 
-class ProductStrategyPreferredRoutingMap : public std::map<product_strategy_t, AudioDeviceTypeAddr>
-{
-public:
-    void dump(String8 *dst, int spaces = 0) const;
-};
+using ProductStrategyDevicesRoleMap =
+        std::map<std::pair<product_strategy_t, device_role_t>, AudioDeviceTypeAddrVector>;
+
+void dumpProductStrategyDevicesRoleMap(
+        const ProductStrategyDevicesRoleMap& productStrategyDeviceRoleMap,
+        String8 *dst,
+        int spaces);
 
 } // namespace android
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 1bc7fe3..150a9a8 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -17,6 +17,10 @@
 #define LOG_TAG "APM::AudioPolicyEngine/Base"
 //#define LOG_NDEBUG 0
 
+#include <functional>
+#include <string>
+#include <sys/stat.h>
+
 #include "EngineBase.h"
 #include "EngineDefaultConfig.h"
 #include <TypeConverter.h>
@@ -74,9 +78,10 @@
     return NO_ERROR;
 }
 
-product_strategy_t EngineBase::getProductStrategyForAttributes(const audio_attributes_t &attr) const
+product_strategy_t EngineBase::getProductStrategyForAttributes(
+        const audio_attributes_t &attr, bool fallbackOnDefault) const
 {
-    return mProductStrategies.getProductStrategyForAttributes(attr);
+    return mProductStrategies.getProductStrategyForAttributes(attr, fallbackOnDefault);
 }
 
 audio_stream_type_t EngineBase::getStreamTypeForAttributes(const audio_attributes_t &attr) const
@@ -147,10 +152,15 @@
         });
         return iter != end(volumeGroups);
     };
+    auto fileExists = [](const char* path) {
+        struct stat fileStat;
+        return stat(path, &fileStat) == 0 && S_ISREG(fileStat.st_mode);
+    };
 
-    auto result = engineConfig::parse();
+    auto result = fileExists(engineConfig::DEFAULT_PATH) ?
+            engineConfig::parse(engineConfig::DEFAULT_PATH) : engineConfig::ParsingResult{};
     if (result.parsedConfig == nullptr) {
-        ALOGW("%s: No configuration found, using default matching phone experience.", __FUNCTION__);
+        ALOGD("%s: No configuration found, using default matching phone experience.", __FUNCTION__);
         engineConfig::Config config = gDefaultEngineConfig;
         android::status_t ret = engineConfig::parseLegacyVolumes(config.volumeGroups);
         result = {std::make_unique<engineConfig::Config>(config),
@@ -320,14 +330,16 @@
     return group;
 }
 
-volume_group_t EngineBase::getVolumeGroupForAttributes(const audio_attributes_t &attr) const
+volume_group_t EngineBase::getVolumeGroupForAttributes(
+        const audio_attributes_t &attr, bool fallbackOnDefault) const
 {
-    return mProductStrategies.getVolumeGroupForAttributes(attr);
+    return mProductStrategies.getVolumeGroupForAttributes(attr, fallbackOnDefault);
 }
 
-volume_group_t EngineBase::getVolumeGroupForStreamType(audio_stream_type_t stream) const
+volume_group_t EngineBase::getVolumeGroupForStreamType(
+        audio_stream_type_t stream, bool fallbackOnDefault) const
 {
-    return mProductStrategies.getVolumeGroupForStreamType(stream);
+    return mProductStrategies.getVolumeGroupForStreamType(stream, fallbackOnDefault);
 }
 
 status_t EngineBase::listAudioVolumeGroups(AudioVolumeGroupVector &groups) const
@@ -339,57 +351,292 @@
     return NO_ERROR;
 }
 
-status_t EngineBase::setPreferredDeviceForStrategy(product_strategy_t strategy,
-            const AudioDeviceTypeAddr &device)
-{
-    // verify strategy exists
-    if (mProductStrategies.find(strategy) == mProductStrategies.end()) {
-        ALOGE("%s invalid strategy %u", __func__, strategy);
+namespace {
+template <typename T>
+status_t setDevicesRoleForT(
+        std::map<std::pair<T, device_role_t>, AudioDeviceTypeAddrVector>& tDevicesRoleMap,
+        T t, device_role_t role, const AudioDeviceTypeAddrVector &devices,
+        const std::string& logStr, std::function<bool(T)> p) {
+    if (!p(t)) {
+        ALOGE("%s invalid %s %u", __func__, logStr.c_str(), t);
         return BAD_VALUE;
     }
 
-    mProductStrategyPreferredDevices[strategy] = device;
-    return NO_ERROR;
-}
-
-status_t EngineBase::removePreferredDeviceForStrategy(product_strategy_t strategy)
-{
-    // verify strategy exists
-    if (mProductStrategies.find(strategy) == mProductStrategies.end()) {
-        ALOGE("%s invalid strategy %u", __func__, strategy);
+    switch (role) {
+    case DEVICE_ROLE_PREFERRED:
+    case DEVICE_ROLE_DISABLED: {
+        tDevicesRoleMap[std::make_pair(t, role)] = devices;
+        // The preferred devices and disabled devices are mutually exclusive. Once a device is added
+        // the a list, it must be removed from the other one.
+        const device_role_t roleToRemove = role == DEVICE_ROLE_PREFERRED ? DEVICE_ROLE_DISABLED
+                                                                         : DEVICE_ROLE_PREFERRED;
+        auto it = tDevicesRoleMap.find(std::make_pair(t, roleToRemove));
+        if (it != tDevicesRoleMap.end()) {
+            it->second = excludeDeviceTypeAddrsFrom(it->second, devices);
+            if (it->second.empty()) {
+                tDevicesRoleMap.erase(it);
+            }
+        }
+    } break;
+    case DEVICE_ROLE_NONE:
+        // Intentionally fall-through as it is no need to set device role as none for a strategy.
+    default:
+        ALOGE("%s invalid role %d", __func__, role);
         return BAD_VALUE;
     }
-
-    if (mProductStrategyPreferredDevices.erase(strategy) == 0) {
-        // no preferred device was set
-        return NAME_NOT_FOUND;
-    }
     return NO_ERROR;
 }
 
-status_t EngineBase::getPreferredDeviceForStrategy(product_strategy_t strategy,
-            AudioDeviceTypeAddr &device) const
-{
-    // verify strategy exists
-    if (mProductStrategies.find(strategy) == mProductStrategies.end()) {
-        ALOGE("%s unknown strategy %u", __func__, strategy);
+template <typename T>
+status_t removeAllDevicesRoleForT(
+        std::map<std::pair<T, device_role_t>, AudioDeviceTypeAddrVector>& tDevicesRoleMap,
+        T t, device_role_t role, const std::string& logStr, std::function<bool(T)> p) {
+    if (!p(t)) {
+        ALOGE("%s invalid %s %u", __func__, logStr.c_str(), t);
         return BAD_VALUE;
     }
-    // preferred device for this strategy?
-    auto devIt = mProductStrategyPreferredDevices.find(strategy);
-    if (devIt == mProductStrategyPreferredDevices.end()) {
-        ALOGV("%s no preferred device for strategy %u", __func__, strategy);
-        return NAME_NOT_FOUND;
+
+    switch (role) {
+    case DEVICE_ROLE_PREFERRED:
+    case DEVICE_ROLE_DISABLED:
+        if (tDevicesRoleMap.erase(std::make_pair(t, role)) == 0) {
+            // no preferred/disabled device was set
+            return NAME_NOT_FOUND;
+        }
+        break;
+    case DEVICE_ROLE_NONE:
+        // Intentionally fall-through as it makes no sense to remove devices with
+        // role as DEVICE_ROLE_NONE
+    default:
+        ALOGE("%s invalid role %d", __func__, role);
+        return BAD_VALUE;
+    }
+    return NO_ERROR;
+}
+
+template <typename T>
+status_t getDevicesRoleForT(
+        const std::map<std::pair<T, device_role_t>, AudioDeviceTypeAddrVector>& tDevicesRoleMap,
+        T t, device_role_t role, AudioDeviceTypeAddrVector &devices, const std::string& logStr,
+        std::function<bool(T)> p) {
+    if (!p(t)) {
+        ALOGE("%s invalid %s %u", __func__, logStr.c_str(), t);
+        return BAD_VALUE;
     }
 
-    device = devIt->second;
+    switch (role) {
+    case DEVICE_ROLE_PREFERRED:
+    case DEVICE_ROLE_DISABLED: {
+        auto it = tDevicesRoleMap.find(std::make_pair(t, role));
+        if (it == tDevicesRoleMap.end()) {
+            ALOGV("%s no device as role %u for %s %u", __func__, role, logStr.c_str(), t);
+            return NAME_NOT_FOUND;
+        }
+
+        devices = it->second;
+    } break;
+    case DEVICE_ROLE_NONE:
+        // Intentionally fall-through as the DEVICE_ROLE_NONE is never set
+    default:
+        ALOGE("%s invalid role %d", __func__, role);
+        return BAD_VALUE;
+    }
     return NO_ERROR;
 }
 
+} // namespace
+
+status_t EngineBase::setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices)
+{
+    std::function<bool(product_strategy_t)> p = [this](product_strategy_t strategy) {
+        return mProductStrategies.find(strategy) != mProductStrategies.end();
+    };
+    return setDevicesRoleForT(
+            mProductStrategyDeviceRoleMap, strategy, role, devices, "strategy" /*logStr*/, p);
+}
+
+status_t EngineBase::removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role)
+{
+    std::function<bool(product_strategy_t)> p = [this](product_strategy_t strategy) {
+        return mProductStrategies.find(strategy) != mProductStrategies.end();
+    };
+    return removeAllDevicesRoleForT(
+            mProductStrategyDeviceRoleMap, strategy, role, "strategy" /*logStr*/, p);
+}
+
+status_t EngineBase::getDevicesForRoleAndStrategy(product_strategy_t strategy, device_role_t role,
+            AudioDeviceTypeAddrVector &devices) const
+{
+    std::function<bool(product_strategy_t)> p = [this](product_strategy_t strategy) {
+        return mProductStrategies.find(strategy) != mProductStrategies.end();
+    };
+    return getDevicesRoleForT(
+            mProductStrategyDeviceRoleMap, strategy, role, devices, "strategy" /*logStr*/, p);
+}
+
+status_t EngineBase::setDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
+        const AudioDeviceTypeAddrVector &devices)
+{
+    std::function<bool(audio_source_t)> p = [](audio_source_t audioSource) {
+        return audio_is_valid_audio_source(audioSource);
+    };
+    return setDevicesRoleForT(
+            mCapturePresetDevicesRoleMap, audioSource, role, devices, "audio source" /*logStr*/, p);
+}
+
+status_t EngineBase::addDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
+        const AudioDeviceTypeAddrVector &devices)
+{
+    // verify if the audio source is valid
+    if (!audio_is_valid_audio_source(audioSource)) {
+        ALOGE("%s unknown audio source %u", __func__, audioSource);
+    }
+
+    switch (role) {
+    case DEVICE_ROLE_PREFERRED:
+    case DEVICE_ROLE_DISABLED: {
+        const auto audioSourceRole = std::make_pair(audioSource, role);
+        mCapturePresetDevicesRoleMap[audioSourceRole] = excludeDeviceTypeAddrsFrom(
+                mCapturePresetDevicesRoleMap[audioSourceRole], devices);
+        for (const auto &device : devices) {
+            mCapturePresetDevicesRoleMap[audioSourceRole].push_back(device);
+        }
+        // When the devices are set as preferred devices, remove them from the disabled devices.
+        doRemoveDevicesRoleForCapturePreset(
+                audioSource,
+                role == DEVICE_ROLE_PREFERRED ? DEVICE_ROLE_DISABLED : DEVICE_ROLE_PREFERRED,
+                devices,
+                false /*forceMatched*/);
+    } break;
+    case DEVICE_ROLE_NONE:
+        // Intentionally fall-through as it is no need to set device role as none
+    default:
+        ALOGE("%s invalid role %d", __func__, role);
+        return BAD_VALUE;
+    }
+    return NO_ERROR;
+}
+
+status_t EngineBase::removeDevicesRoleForCapturePreset(
+        audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector& devices) {
+    return doRemoveDevicesRoleForCapturePreset(audioSource, role, devices);
+}
+
+status_t EngineBase::doRemoveDevicesRoleForCapturePreset(audio_source_t audioSource,
+        device_role_t role, const AudioDeviceTypeAddrVector& devices, bool forceMatched)
+{
+    // verify if the audio source is valid
+    if (!audio_is_valid_audio_source(audioSource)) {
+        ALOGE("%s unknown audio source %u", __func__, audioSource);
+    }
+
+    switch (role) {
+    case DEVICE_ROLE_PREFERRED:
+    case DEVICE_ROLE_DISABLED: {
+        const auto audioSourceRole = std::make_pair(audioSource, role);
+        if (mCapturePresetDevicesRoleMap.find(audioSourceRole) ==
+                mCapturePresetDevicesRoleMap.end()) {
+            return NAME_NOT_FOUND;
+        }
+        AudioDeviceTypeAddrVector remainingDevices = excludeDeviceTypeAddrsFrom(
+                mCapturePresetDevicesRoleMap[audioSourceRole], devices);
+        if (forceMatched && remainingDevices.size() !=
+                mCapturePresetDevicesRoleMap[audioSourceRole].size() - devices.size()) {
+            // There are some devices from `devicesToRemove` that are not shown in the cached record
+            return BAD_VALUE;
+        }
+        mCapturePresetDevicesRoleMap[audioSourceRole] = remainingDevices;
+        if (mCapturePresetDevicesRoleMap[audioSourceRole].empty()) {
+            // Remove the role when device list is empty
+            mCapturePresetDevicesRoleMap.erase(audioSourceRole);
+        }
+    } break;
+    case DEVICE_ROLE_NONE:
+        // Intentionally fall-through as it makes no sense to remove devices with
+        // role as DEVICE_ROLE_NONE
+    default:
+        ALOGE("%s invalid role %d", __func__, role);
+        return BAD_VALUE;
+    }
+    return NO_ERROR;
+}
+
+status_t EngineBase::clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                      device_role_t role)
+{
+    std::function<bool(audio_source_t)> p = [](audio_source_t audioSource) {
+        return audio_is_valid_audio_source(audioSource);
+    };
+    return removeAllDevicesRoleForT(
+            mCapturePresetDevicesRoleMap, audioSource, role, "audio source" /*logStr*/, p);
+}
+
+status_t EngineBase::getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+        device_role_t role, AudioDeviceTypeAddrVector &devices) const
+{
+    std::function<bool(audio_source_t)> p = [](audio_source_t audioSource) {
+        return audio_is_valid_audio_source(audioSource);
+    };
+    return getDevicesRoleForT(
+            mCapturePresetDevicesRoleMap, audioSource, role, devices, "audio source" /*logStr*/, p);
+}
+
+status_t EngineBase::getMediaDevicesForRole(device_role_t role,
+        const DeviceVector& availableDevices, DeviceVector& devices) const
+{
+    product_strategy_t strategy = getProductStrategyByName("STRATEGY_MEDIA" /*name*/);
+    if (strategy == PRODUCT_STRATEGY_NONE) {
+        strategy = getProductStrategyForStream(AUDIO_STREAM_MUSIC);
+    }
+    if (strategy == PRODUCT_STRATEGY_NONE) {
+        return NAME_NOT_FOUND;
+    }
+    AudioDeviceTypeAddrVector deviceAddrVec;
+    status_t status = getDevicesForRoleAndStrategy(strategy, role, deviceAddrVec);
+    if (status != NO_ERROR) {
+        return status;
+    }
+    devices = availableDevices.getDevicesFromDeviceTypeAddrVec(deviceAddrVec);
+    return deviceAddrVec.size() == devices.size() ? NO_ERROR : NOT_ENOUGH_DATA;
+}
+
+DeviceVector EngineBase::getActiveMediaDevices(const DeviceVector& availableDevices) const
+{
+    // The priority of active devices as follows:
+    // 1: the available preferred devices for media
+    // 2: the latest connected removable media device that is enabled
+    DeviceVector activeDevices;
+    if (getMediaDevicesForRole(
+            DEVICE_ROLE_PREFERRED, availableDevices, activeDevices) != NO_ERROR) {
+        activeDevices.clear();
+        DeviceVector disabledDevices;
+        getMediaDevicesForRole(DEVICE_ROLE_DISABLED, availableDevices, disabledDevices);
+        sp<DeviceDescriptor> device =
+                mLastRemovableMediaDevices.getLastRemovableMediaDevice(disabledDevices);
+        if (device != nullptr) {
+            activeDevices.add(device);
+        }
+    }
+    return activeDevices;
+}
+
+void EngineBase::dumpCapturePresetDevicesRoleMap(String8 *dst, int spaces) const
+{
+    dst->appendFormat("\n%*sDevice role per capture preset dump:", spaces, "");
+    for (const auto& [capturePresetRolePair, devices] : mCapturePresetDevicesRoleMap) {
+        dst->appendFormat("\n%*sCapture preset(%u) Device Role(%u) Devices(%s)", spaces + 2, "",
+                capturePresetRolePair.first, capturePresetRolePair.second,
+                dumpAudioDeviceTypeAddrVector(devices, true /*includeSensitiveInfo*/).c_str());
+    }
+    dst->appendFormat("\n");
+}
+
 void EngineBase::dump(String8 *dst) const
 {
     mProductStrategies.dump(dst, 2);
-    mProductStrategyPreferredDevices.dump(dst, 2);
+    dumpProductStrategyDevicesRoleMap(mProductStrategyDeviceRoleMap, dst, 2);
+    dumpCapturePresetDevicesRoleMap(dst, 2);
     mVolumeGroups.dump(dst, 2);
 }
 
diff --git a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
index 981582e..d39eff6 100644
--- a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
+++ b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
@@ -26,8 +26,8 @@
     {"STRATEGY_PHONE",
      {
          {"phone", AUDIO_STREAM_VOICE_CALL, "AUDIO_STREAM_VOICE_CALL",
-          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VOICE_COMMUNICATION, AUDIO_SOURCE_DEFAULT, 0,
-            ""}},
+          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VOICE_COMMUNICATION, AUDIO_SOURCE_DEFAULT,
+            AUDIO_FLAG_NONE, ""}},
          },
          {"sco", AUDIO_STREAM_BLUETOOTH_SCO, "AUDIO_STREAM_BLUETOOTH_SCO",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_SCO,
@@ -39,10 +39,11 @@
      {
          {"ring", AUDIO_STREAM_RING, "AUDIO_STREAM_RING",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
-            AUDIO_SOURCE_DEFAULT, 0, ""}}
+            AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
          },
          {"alarm", AUDIO_STREAM_ALARM, "AUDIO_STREAM_ALARM",
-          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM, AUDIO_SOURCE_DEFAULT, 0, ""}},
+          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM, AUDIO_SOURCE_DEFAULT,
+            AUDIO_FLAG_NONE, ""}},
          }
      },
     },
@@ -58,7 +59,7 @@
      {
          {"", AUDIO_STREAM_ACCESSIBILITY, "AUDIO_STREAM_ACCESSIBILITY",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
-            AUDIO_SOURCE_DEFAULT, 0, ""}}
+            AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
          }
      },
     },
@@ -66,15 +67,16 @@
      {
          {"", AUDIO_STREAM_NOTIFICATION, "AUDIO_STREAM_NOTIFICATION",
           {
-              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION, AUDIO_SOURCE_DEFAULT, 0, ""},
+              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION, AUDIO_SOURCE_DEFAULT,
+               AUDIO_FLAG_NONE, ""},
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
-               AUDIO_SOURCE_DEFAULT, 0, ""},
+               AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
-               AUDIO_SOURCE_DEFAULT, 0, ""},
+               AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
-               AUDIO_SOURCE_DEFAULT, 0, ""},
+               AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_EVENT,
-               AUDIO_SOURCE_DEFAULT, 0, ""}
+               AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
           }
          }
      },
@@ -83,21 +85,25 @@
      {
          {"assistant", AUDIO_STREAM_ASSISTANT, "AUDIO_STREAM_ASSISTANT",
           {{AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
-            AUDIO_SOURCE_DEFAULT, 0, ""}}
+            AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
          },
          {"music", AUDIO_STREAM_MUSIC, "AUDIO_STREAM_MUSIC",
           {
-              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA, AUDIO_SOURCE_DEFAULT, 0, ""},
-              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_GAME, AUDIO_SOURCE_DEFAULT, 0, ""},
-              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANT, AUDIO_SOURCE_DEFAULT, 0, ""},
+              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA, AUDIO_SOURCE_DEFAULT,
+               AUDIO_FLAG_NONE, ""},
+              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_GAME, AUDIO_SOURCE_DEFAULT,
+               AUDIO_FLAG_NONE, ""},
+              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANT, AUDIO_SOURCE_DEFAULT,
+               AUDIO_FLAG_NONE, ""},
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
-               AUDIO_SOURCE_DEFAULT, 0, ""},
-              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT, 0, ""}
+               AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
+              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
+               AUDIO_FLAG_NONE, ""}
           },
          },
          {"system", AUDIO_STREAM_SYSTEM, "AUDIO_STREAM_SYSTEM",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_SONIFICATION,
-            AUDIO_SOURCE_DEFAULT, 0, ""}}
+            AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
          }
      },
     },
@@ -106,7 +112,7 @@
          {"", AUDIO_STREAM_DTMF, "AUDIO_STREAM_DTMF",
           {
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
-               AUDIO_SOURCE_DEFAULT, 0, ""}
+               AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
           }
          }
      },
@@ -114,7 +120,8 @@
     {"STRATEGY_CALL_ASSISTANT",
      {
          {"", AUDIO_STREAM_CALL_ASSISTANT, "AUDIO_STREAM_CALL_ASSISTANT",
-          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_CALL_ASSISTANT, AUDIO_SOURCE_DEFAULT, 0, ""}}
+          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_CALL_ASSISTANT, AUDIO_SOURCE_DEFAULT,
+            AUDIO_FLAG_NONE, ""}}
          }
      },
     },
@@ -136,14 +143,16 @@
     {"rerouting",
      {
          {"", AUDIO_STREAM_REROUTING, "AUDIO_STREAM_REROUTING",
-          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT, 0, ""}}
+          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VIRTUAL_SOURCE, AUDIO_SOURCE_DEFAULT,
+            AUDIO_FLAG_NONE, ""}}
          }
      },
     },
     {"patch",
      {
          {"", AUDIO_STREAM_PATCH, "AUDIO_STREAM_PATCH",
-          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT, 0, ""}}
+          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
+            AUDIO_FLAG_NONE, ""}}
          }
      },
     }
diff --git a/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp b/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
old mode 100755
new mode 100644
index 87b6aaf..b3f8947
--- a/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
+++ b/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
@@ -55,6 +55,17 @@
     return ret;
 }
 
+sp<DeviceDescriptor> LastRemovableMediaDevices::getLastRemovableMediaDevice(
+        const DeviceVector& excludedDevices, device_out_group_t group) const {
+    for (auto iter = mMediaDevices.begin(); iter != mMediaDevices.end(); ++iter) {
+        if ((group == GROUP_NONE || group == getDeviceOutGroup((iter->desc)->type())) &&
+                !excludedDevices.contains(iter->desc)) {
+            return iter->desc;
+        }
+    }
+    return nullptr;
+}
+
 device_out_group_t LastRemovableMediaDevices::getDeviceOutGroup(audio_devices_t device) const
 {
     switch (device) {
@@ -69,6 +80,11 @@
     case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
     case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
     case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER:
+    // TODO (b/122931261): remove when preferred device for strategy media will be used instead of
+    //  AUDIO_POLICY_FORCE_NO_BT_A2DP.
+    case AUDIO_DEVICE_OUT_HEARING_AID:
+    case AUDIO_DEVICE_OUT_BLE_HEADSET:
+    case AUDIO_DEVICE_OUT_BLE_SPEAKER:
         return GROUP_BT_A2DP;
     default:
         return GROUP_NONE;
diff --git a/services/audiopolicy/engine/common/src/ProductStrategy.cpp b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
index 151c7bb..b3d144f 100644
--- a/services/audiopolicy/engine/common/src/ProductStrategy.cpp
+++ b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
@@ -169,7 +169,7 @@
 }
 
 product_strategy_t ProductStrategyMap::getProductStrategyForAttributes(
-        const audio_attributes_t &attr) const
+        const audio_attributes_t &attr, bool fallbackOnDefault) const
 {
     for (const auto &iter : *this) {
         if (iter.second->matches(attr)) {
@@ -178,7 +178,7 @@
     }
     ALOGV("%s: No matching product strategy for attributes %s, return default", __FUNCTION__,
           toString(attr).c_str());
-    return getDefault();
+    return fallbackOnDefault? getDefault() : PRODUCT_STRATEGY_NONE;
 }
 
 audio_attributes_t ProductStrategyMap::getAttributesForStreamType(audio_stream_type_t stream) const
@@ -272,7 +272,8 @@
     return at(psId)->getDeviceAddress();
 }
 
-volume_group_t ProductStrategyMap::getVolumeGroupForAttributes(const audio_attributes_t &attr) const
+volume_group_t ProductStrategyMap::getVolumeGroupForAttributes(
+        const audio_attributes_t &attr, bool fallbackOnDefault) const
 {
     for (const auto &iter : *this) {
         volume_group_t group = iter.second->getVolumeGroupForAttributes(attr);
@@ -280,10 +281,11 @@
             return group;
         }
     }
-    return getDefaultVolumeGroup();
+    return fallbackOnDefault ? getDefaultVolumeGroup() : VOLUME_GROUP_NONE;
 }
 
-volume_group_t ProductStrategyMap::getVolumeGroupForStreamType(audio_stream_type_t stream) const
+volume_group_t ProductStrategyMap::getVolumeGroupForStreamType(
+        audio_stream_type_t stream, bool fallbackOnDefault) const
 {
     for (const auto &iter : *this) {
         volume_group_t group = iter.second->getVolumeGroupForStreamType(stream);
@@ -292,7 +294,7 @@
         }
     }
     ALOGW("%s: no volume group for %s, using default", __func__, toString(stream).c_str());
-    return getDefaultVolumeGroup();
+    return fallbackOnDefault ? getDefaultVolumeGroup() : VOLUME_GROUP_NONE;
 }
 
 volume_group_t ProductStrategyMap::getDefaultVolumeGroup() const
@@ -318,13 +320,15 @@
     }
 }
 
-void ProductStrategyPreferredRoutingMap::dump(android::String8* dst, int spaces) const {
-    dst->appendFormat("\n%*sPreferred devices per product strategy dump:", spaces, "");
-    for (const auto& iter : *this) {
-        dst->appendFormat("\n%*sStrategy %u dev:%08x addr:%s",
-                          spaces + 2, "",
-                          (uint32_t) iter.first,
-                          iter.second.mType, iter.second.mAddress.c_str());
+void dumpProductStrategyDevicesRoleMap(
+        const ProductStrategyDevicesRoleMap& productStrategyDeviceRoleMap,
+        String8 *dst,
+        int spaces) {
+    dst->appendFormat("\n%*sDevice role per product strategy dump:", spaces, "");
+    for (const auto& [strategyRolePair, devices] : productStrategyDeviceRoleMap) {
+        dst->appendFormat("\n%*sStrategy(%u) Device Role(%u) Devices(%s)", spaces + 2, "",
+                strategyRolePair.first, strategyRolePair.second,
+                dumpAudioDeviceTypeAddrVector(devices, true /*includeSensitiveInfo*/).c_str());
     }
     dst->appendFormat("\n");
 }
diff --git a/services/audiopolicy/engine/common/src/VolumeCurve.cpp b/services/audiopolicy/engine/common/src/VolumeCurve.cpp
index c352578..8aa4b08 100644
--- a/services/audiopolicy/engine/common/src/VolumeCurve.cpp
+++ b/services/audiopolicy/engine/common/src/VolumeCurve.cpp
@@ -43,10 +43,24 @@
         indexInUi = volIndexMax;
     }
 
+    // Calculate the new volume index
     size_t nbCurvePoints = mCurvePoints.size();
-    // the volume index in the UI is relative to the min and max volume indices for this stream
-    int nbSteps = 1 + mCurvePoints[nbCurvePoints - 1].mIndex - mCurvePoints[0].mIndex;
-    int volIdx = (nbSteps * (indexInUi - volIndexMin)) / (volIndexMax - volIndexMin);
+
+    int volIdx;
+    if (volIndexMin == volIndexMax) {
+        if (indexInUi == volIndexMin) {
+            volIdx = volIndexMin;
+        } else {
+            // This would result in a divide-by-zero below
+            ALOG_ASSERT(volIndexmin != volIndexMax, "Invalid volume index range & value: 0");
+            return NAN;
+        }
+    } else {
+        // interpolaate
+        // the volume index in the UI is relative to the min and max volume indices for this stream
+        int nbSteps = 1 + mCurvePoints[nbCurvePoints - 1].mIndex - mCurvePoints[0].mIndex;
+        volIdx = (nbSteps * (indexInUi - volIndexMin)) / (volIndexMax - volIndexMin);
+    }
 
     // Where would this volume index been inserted in the curve point
     size_t indexInUiPosition = mCurvePoints.orderOf(CurvePoint(volIdx, 0));
diff --git a/services/audiopolicy/engine/config/Android.bp b/services/audiopolicy/engine/config/Android.bp
index ff840f9..459cc78 100644
--- a/services/audiopolicy/engine/config/Android.bp
+++ b/services/audiopolicy/engine/config/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library {
     name: "libaudiopolicyengine_config",
     export_include_dirs: ["include"],
diff --git a/services/audiopolicy/engine/config/TEST_MAPPING b/services/audiopolicy/engine/config/TEST_MAPPING
new file mode 100644
index 0000000..06ce111
--- /dev/null
+++ b/services/audiopolicy/engine/config/TEST_MAPPING
@@ -0,0 +1,7 @@
+{
+  "presubmit": [
+    {
+       "name": "audiopolicy_engineconfig_tests"
+    }
+  ]
+}
diff --git a/services/audiopolicy/engine/config/include/EngineConfig.h b/services/audiopolicy/engine/config/include/EngineConfig.h
index 5d22c24..c565926 100644
--- a/services/audiopolicy/engine/config/include/EngineConfig.h
+++ b/services/audiopolicy/engine/config/include/EngineConfig.h
@@ -111,6 +111,8 @@
  */
 ParsingResult parse(const char* path = DEFAULT_PATH);
 android::status_t parseLegacyVolumes(VolumeGroups &volumeGroups);
+// Exposed for testing.
+android::status_t parseLegacyVolumeFile(const char* path, VolumeGroups &volumeGroups);
 
 } // namespace engineConfig
 } // namespace android
diff --git a/services/audiopolicy/engine/config/src/EngineConfig.cpp b/services/audiopolicy/engine/config/src/EngineConfig.cpp
index 4842cb2..81e803f 100644
--- a/services/audiopolicy/engine/config/src/EngineConfig.cpp
+++ b/services/audiopolicy/engine/config/src/EngineConfig.cpp
@@ -139,11 +139,24 @@
                                          Collection &collection);
 };
 
-using xmlCharUnique = std::unique_ptr<xmlChar, decltype(xmlFree)>;
+template <class T>
+constexpr void (*xmlDeleter)(T* t);
+template <>
+constexpr auto xmlDeleter<xmlDoc> = xmlFreeDoc;
+template <>
+constexpr auto xmlDeleter<xmlChar> = [](xmlChar *s) { xmlFree(s); };
+
+/** @return a unique_ptr with the correct deleter for the libxml2 object. */
+template <class T>
+constexpr auto make_xmlUnique(T *t) {
+    // Wrap deleter in lambda to enable empty base optimization
+    auto deleter = [](T *t) { xmlDeleter<T>(t); };
+    return std::unique_ptr<T, decltype(deleter)>{t, deleter};
+}
 
 std::string getXmlAttribute(const xmlNode *cur, const char *attribute)
 {
-    xmlCharUnique charPtr(xmlGetProp(cur, reinterpret_cast<const xmlChar *>(attribute)), xmlFree);
+    auto charPtr = make_xmlUnique(xmlGetProp(cur, reinterpret_cast<const xmlChar *>(attribute)));
     if (charPtr == NULL) {
         return "";
     }
@@ -228,7 +241,8 @@
             std::string flags = getXmlAttribute(cur, "value");
 
             ALOGV("%s flags %s",  __FUNCTION__, flags.c_str());
-            attributes.flags = AudioFlagConverter::maskFromString(flags, " ");
+            attributes.flags = static_cast<audio_flags_mask_t>(
+                    AudioFlagConverter::maskFromString(flags, " "));
         }
         if (!xmlStrcmp(cur->name, (const xmlChar *)("Bundle"))) {
             std::string bundleKey = getXmlAttribute(cur, "key");
@@ -440,7 +454,7 @@
     for (const xmlNode *child = referenceName.empty() ?
          root->xmlChildrenNode : ref->xmlChildrenNode; child != NULL; child = child->next) {
         if (!xmlStrcmp(child->name, (const xmlChar *)volumePointTag)) {
-            xmlCharUnique pointXml(xmlNodeListGetString(doc, child->xmlChildrenNode, 1), xmlFree);
+            auto pointXml = make_xmlUnique(xmlNodeListGetString(doc, child->xmlChildrenNode, 1));
             if (pointXml == NULL) {
                 return BAD_VALUE;
             }
@@ -470,14 +484,14 @@
 
     for (const xmlNode *child = root->xmlChildrenNode; child != NULL; child = child->next) {
         if (not xmlStrcmp(child->name, (const xmlChar *)Attributes::name)) {
-            xmlCharUnique nameXml(xmlNodeListGetString(doc, child->xmlChildrenNode, 1), xmlFree);
+            auto nameXml = make_xmlUnique(xmlNodeListGetString(doc, child->xmlChildrenNode, 1));
             if (nameXml == nullptr) {
                 return BAD_VALUE;
             }
             name = reinterpret_cast<const char*>(nameXml.get());
         }
         if (not xmlStrcmp(child->name, (const xmlChar *)Attributes::indexMin)) {
-            xmlCharUnique indexMinXml(xmlNodeListGetString(doc, child->xmlChildrenNode, 1), xmlFree);
+            auto indexMinXml = make_xmlUnique(xmlNodeListGetString(doc, child->xmlChildrenNode, 1));
             if (indexMinXml == nullptr) {
                 return BAD_VALUE;
             }
@@ -487,7 +501,7 @@
             }
         }
         if (not xmlStrcmp(child->name, (const xmlChar *)Attributes::indexMax)) {
-            xmlCharUnique indexMaxXml(xmlNodeListGetString(doc, child->xmlChildrenNode, 1), xmlFree);
+            auto indexMaxXml = make_xmlUnique(xmlNodeListGetString(doc, child->xmlChildrenNode, 1));
             if (indexMaxXml == nullptr) {
                 return BAD_VALUE;
             }
@@ -547,7 +561,7 @@
     for (const xmlNode *child = referenceName.empty() ?
          cur->xmlChildrenNode : ref->xmlChildrenNode; child != NULL; child = child->next) {
         if (!xmlStrcmp(child->name, (const xmlChar *)VolumeTraits::volumePointTag)) {
-            xmlCharUnique pointXml(xmlNodeListGetString(doc, child->xmlChildrenNode, 1), xmlFree);
+            auto pointXml = make_xmlUnique(xmlNodeListGetString(doc, child->xmlChildrenNode, 1));
             if (pointXml == NULL) {
                 return BAD_VALUE;
             }
@@ -588,6 +602,7 @@
             }
         }
     }
+    VolumeGroups tempVolumeGroups = volumeGroups;
     for (const auto &volumeMapIter : legacyVolumeMap) {
         // In order to let AudioService setting the min and max (compatibility), set Min and Max
         // to -1 except for private streams
@@ -598,8 +613,10 @@
         }
         int indexMin = streamType >= AUDIO_STREAM_PUBLIC_CNT ? 0 : -1;
         int indexMax = streamType >= AUDIO_STREAM_PUBLIC_CNT ? 100 : -1;
-        volumeGroups.push_back({ volumeMapIter.first, indexMin, indexMax, volumeMapIter.second });
+        tempVolumeGroups.push_back(
+                { volumeMapIter.first, indexMin, indexMax, volumeMapIter.second });
     }
+    std::swap(tempVolumeGroups, volumeGroups);
     return NO_ERROR;
 }
 
@@ -636,19 +653,21 @@
 
 ParsingResult parse(const char* path) {
     XmlErrorHandler errorHandler;
-    xmlDocPtr doc;
-    doc = xmlParseFile(path);
+    auto doc = make_xmlUnique(xmlParseFile(path));
     if (doc == NULL) {
-        ALOGE("%s: Could not parse document %s", __FUNCTION__, path);
+        // It is OK not to find an engine config file at the default location
+        // as the caller will default to hardcoded default config
+        if (strncmp(path, DEFAULT_PATH, strlen(DEFAULT_PATH))) {
+            ALOGW("%s: Could not parse document %s", __FUNCTION__, path);
+        }
         return {nullptr, 0};
     }
-    xmlNodePtr cur = xmlDocGetRootElement(doc);
+    xmlNodePtr cur = xmlDocGetRootElement(doc.get());
     if (cur == NULL) {
         ALOGE("%s: Could not parse: empty document %s", __FUNCTION__, path);
-        xmlFreeDoc(doc);
         return {nullptr, 0};
     }
-    if (xmlXIncludeProcess(doc) < 0) {
+    if (xmlXIncludeProcess(doc.get()) < 0) {
         ALOGE("%s: libxml failed to resolve XIncludes on document %s", __FUNCTION__, path);
         return {nullptr, 0};
     }
@@ -661,68 +680,45 @@
     auto config = std::make_unique<Config>();
     config->version = std::stof(version);
     deserializeCollection<ProductStrategyTraits>(
-                doc, cur, config->productStrategies, nbSkippedElements);
+                doc.get(), cur, config->productStrategies, nbSkippedElements);
     deserializeCollection<CriterionTraits>(
-                doc, cur, config->criteria, nbSkippedElements);
+                doc.get(), cur, config->criteria, nbSkippedElements);
     deserializeCollection<CriterionTypeTraits>(
-                doc, cur, config->criterionTypes, nbSkippedElements);
+                doc.get(), cur, config->criterionTypes, nbSkippedElements);
     deserializeCollection<VolumeGroupTraits>(
-                doc, cur, config->volumeGroups, nbSkippedElements);
+                doc.get(), cur, config->volumeGroups, nbSkippedElements);
 
     return {std::move(config), nbSkippedElements};
 }
 
 android::status_t parseLegacyVolumeFile(const char* path, VolumeGroups &volumeGroups) {
     XmlErrorHandler errorHandler;
-    xmlDocPtr doc;
-    doc = xmlParseFile(path);
+    auto doc = make_xmlUnique(xmlParseFile(path));
     if (doc == NULL) {
         ALOGE("%s: Could not parse document %s", __FUNCTION__, path);
         return BAD_VALUE;
     }
-    xmlNodePtr cur = xmlDocGetRootElement(doc);
+    xmlNodePtr cur = xmlDocGetRootElement(doc.get());
     if (cur == NULL) {
         ALOGE("%s: Could not parse: empty document %s", __FUNCTION__, path);
-        xmlFreeDoc(doc);
         return BAD_VALUE;
     }
-    if (xmlXIncludeProcess(doc) < 0) {
+    if (xmlXIncludeProcess(doc.get()) < 0) {
         ALOGE("%s: libxml failed to resolve XIncludes on document %s", __FUNCTION__, path);
         return BAD_VALUE;
     }
     size_t nbSkippedElements = 0;
-    return deserializeLegacyVolumeCollection(doc, cur, volumeGroups, nbSkippedElements);
+    return deserializeLegacyVolumeCollection(doc.get(), cur, volumeGroups, nbSkippedElements);
 }
 
-static const int gApmXmlConfigFilePathMaxLength = 128;
-
-static constexpr const char *apmXmlConfigFileName = "audio_policy_configuration.xml";
-static constexpr const char *apmA2dpOffloadDisabledXmlConfigFileName =
-        "audio_policy_configuration_a2dp_offload_disabled.xml";
-
 android::status_t parseLegacyVolumes(VolumeGroups &volumeGroups) {
-    char audioPolicyXmlConfigFile[gApmXmlConfigFilePathMaxLength];
-    std::vector<const char *> fileNames;
-    status_t ret;
-
-    if (property_get_bool("ro.bluetooth.a2dp_offload.supported", false) &&
-            property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
-        // A2DP offload supported but disabled: try to use special XML file
-        fileNames.push_back(apmA2dpOffloadDisabledXmlConfigFileName);
+    if (std::string audioPolicyXmlConfigFile = audio_get_audio_policy_config_file();
+            !audioPolicyXmlConfigFile.empty()) {
+        return parseLegacyVolumeFile(audioPolicyXmlConfigFile.c_str(), volumeGroups);
+    } else {
+        ALOGE("No readable audio policy config file found");
+        return BAD_VALUE;
     }
-    fileNames.push_back(apmXmlConfigFileName);
-
-    for (const char* fileName : fileNames) {
-        for (const auto& path : audio_get_configuration_paths()) {
-            snprintf(audioPolicyXmlConfigFile, sizeof(audioPolicyXmlConfigFile),
-                     "%s/%s", path.c_str(), fileName);
-            ret = parseLegacyVolumeFile(audioPolicyXmlConfigFile, volumeGroups);
-            if (ret == NO_ERROR) {
-                return ret;
-            }
-        }
-    }
-    return BAD_VALUE;
 }
 
 } // namespace engineConfig
diff --git a/services/audiopolicy/engine/config/tests/Android.bp b/services/audiopolicy/engine/config/tests/Android.bp
new file mode 100644
index 0000000..5791f17
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/Android.bp
@@ -0,0 +1,34 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_test {
+    name: "audiopolicy_engineconfig_tests",
+
+    shared_libs: [
+        "libbase",
+        "liblog",
+        "libmedia_helper",
+        "libutils",
+        "libxml2",
+    ],
+    static_libs: [
+        "libaudiopolicyengine_config",
+    ],
+
+    srcs: ["engineconfig_tests.cpp"],
+
+    data: [":audiopolicy_engineconfig_files"],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    test_suites: ["device-tests"],
+}
diff --git a/services/audiopolicy/engine/config/tests/engineconfig_tests.cpp b/services/audiopolicy/engine/config/tests/engineconfig_tests.cpp
new file mode 100644
index 0000000..f61e02f
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/engineconfig_tests.cpp
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#define LOG_TAG "APM_Test"
+#include <android-base/file.h>
+#include <log/log.h>
+
+#include "EngineConfig.h"
+
+using namespace android;
+
+TEST(EngineConfigTestInit, LegacyVolumeGroupsLoadingIsTransactional) {
+    engineConfig::VolumeGroups groups;
+    ASSERT_TRUE(groups.empty());
+    status_t status = engineConfig::parseLegacyVolumeFile(
+            (base::GetExecutableDirectory() + "/test_invalid_apm_volume_tables.xml").c_str(),
+            groups);
+    ASSERT_NE(NO_ERROR, status);
+    EXPECT_TRUE(groups.empty());
+    status = engineConfig::parseLegacyVolumeFile(
+            (base::GetExecutableDirectory() + "/test_apm_volume_tables.xml").c_str(),
+            groups);
+    ASSERT_EQ(NO_ERROR, status);
+    EXPECT_FALSE(groups.empty());
+}
diff --git a/services/audiopolicy/engine/config/tests/resources/Android.bp b/services/audiopolicy/engine/config/tests/resources/Android.bp
new file mode 100644
index 0000000..9cee978
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/resources/Android.bp
@@ -0,0 +1,16 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+filegroup {
+    name: "audiopolicy_engineconfig_files",
+    srcs: [
+        "test_apm_volume_tables.xml",
+        "test_invalid_apm_volume_tables.xml",
+    ],
+}
diff --git a/services/audiopolicy/engine/config/tests/resources/test_apm_volume_tables.xml b/services/audiopolicy/engine/config/tests/resources/test_apm_volume_tables.xml
new file mode 100644
index 0000000..16126b6
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/resources/test_apm_volume_tables.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+    <globalConfiguration speaker_drc_enabled="true"/>
+    <volumes>
+        <volume stream="AUDIO_STREAM_VOICE_CALL" deviceCategory="DEVICE_CATEGORY_HEADSET">
+            <point>0,-4200</point>
+            <point>33,-2800</point>
+            <point>66,-1400</point>
+            <point>100,0</point>
+        </volume>
+        <volume stream="AUDIO_STREAM_VOICE_CALL" deviceCategory="DEVICE_CATEGORY_SPEAKER">
+            <point>0,-2400</point>
+            <point>33,-1600</point>
+            <point>66,-800</point>
+            <point>100,0</point>
+        </volume>
+        <volume stream="AUDIO_STREAM_VOICE_CALL" deviceCategory="DEVICE_CATEGORY_HEARING_AID"
+                ref="FULL_SCALE_VOLUME_CURVE"/>
+    </volumes>
+    <volumes>
+        <reference name="FULL_SCALE_VOLUME_CURVE">
+            <!-- Full Scale reference Volume Curve -->
+            <point>0,0</point>
+            <point>100,0</point>
+        </reference>
+    </volumes>
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/engine/config/tests/resources/test_invalid_apm_volume_tables.xml b/services/audiopolicy/engine/config/tests/resources/test_invalid_apm_volume_tables.xml
new file mode 100644
index 0000000..3ec5d10
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/resources/test_invalid_apm_volume_tables.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<!-- This file uses a non-existent audio stream name. -->
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+    <globalConfiguration speaker_drc_enabled="true"/>
+    <volumes>
+        <volume stream="AUDIO_STREAM_VOICE_CALL" deviceCategory="DEVICE_CATEGORY_HEADSET">
+            <point>0,-4200</point>
+            <point>33,-2800</point>
+            <point>66,-1400</point>
+            <point>100,0</point>
+        </volume>
+        <volume stream="AUDIO_STREAM_NON_EXISTING" deviceCategory="DEVICE_CATEGORY_SPEAKER">
+            <point>0,-2400</point>
+            <point>33,-1600</point>
+            <point>66,-800</point>
+            <point>100,0</point>
+        </volume>
+        <volume stream="AUDIO_STREAM_RING" deviceCategory="DEVICE_CATEGORY_HEADSET"
+                ref="FULL_SCALE_VOLUME_CURVE"/>
+        <volume stream="AUDIO_STREAM_MUSIC" deviceCategory="DEVICE_CATEGORY_HEADSET"
+                ref="FULL_SCALE_VOLUME_CURVE"/>
+        <volume stream="AUDIO_STREAM_ALARM" deviceCategory="DEVICE_CATEGORY_SPEAKER">
+            <point>0,-2970</point>
+            <point>33,-2010</point>
+            <point>66,-1020</point>
+            <point>100,0</point>
+        </volume>
+        <volume stream="AUDIO_STREAM_NOTIFICATION" deviceCategory="DEVICE_CATEGORY_HEADSET"
+                ref="FULL_SCALE_VOLUME_CURVE"/>
+        <volume stream="AUDIO_STREAM_BLUETOOTH_SCO" deviceCategory="DEVICE_CATEGORY_EXT_MEDIA"
+                ref="FULL_SCALE_VOLUME_CURVE"/>
+        <volume stream="AUDIO_STREAM_ENFORCED_AUDIBLE" deviceCategory="DEVICE_CATEGORY_HEARING_AID"
+                ref="FULL_SCALE_VOLUME_CURVE"/>
+        <volume stream="AUDIO_STREAM_DTMF" deviceCategory="DEVICE_CATEGORY_SPEAKER"
+                ref="FULL_SCALE_VOLUME_CURVE"/>
+    </volumes>
+    <volumes>
+        <reference name="FULL_SCALE_VOLUME_CURVE">
+            <!-- Full Scale reference Volume Curve -->
+            <point>0,0</point>
+            <point>100,0</point>
+        </reference>
+    </volumes>
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/engine/interface/Android.bp b/services/audiopolicy/engine/interface/Android.bp
index 2ea42b6..5dd5adb 100644
--- a/services/audiopolicy/engine/interface/Android.bp
+++ b/services/audiopolicy/engine/interface/Android.bp
@@ -12,6 +12,15 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_headers {
     name: "libaudiopolicyengine_interface_headers",
     host_supported: true,
diff --git a/services/audiopolicy/engine/interface/EngineInterface.h b/services/audiopolicy/engine/interface/EngineInterface.h
index dfb20b5..518f86e 100644
--- a/services/audiopolicy/engine/interface/EngineInterface.h
+++ b/services/audiopolicy/engine/interface/EngineInterface.h
@@ -16,6 +16,8 @@
 
 #pragma once
 
+#include <utility>
+
 #include <AudioPolicyManagerObserver.h>
 #include <media/AudioProductStrategy.h>
 #include <media/AudioVolumeGroup.h>
@@ -34,6 +36,8 @@
 using DeviceStrategyMap = std::map<product_strategy_t, DeviceVector>;
 using StrategyVector = std::vector<product_strategy_t>;
 using VolumeGroupVector = std::vector<volume_group_t>;
+using CapturePresetDevicesRoleMap =
+        std::map<std::pair<audio_source_t, device_role_t>, AudioDeviceTypeAddrVector>;
 
 /**
  * This interface is dedicated to the policy manager that a Policy Engine shall implement.
@@ -108,11 +112,12 @@
      * Get the strategy selected for a given audio attributes.
      *
      * @param[in] audio attributes to get the selected @product_strategy_t followed by.
-     *
+     * @param fallbackOnDefault if true, will return the fallback strategy if the attributes
+     * are not explicitly assigned to a given strategy.
      * @return @product_strategy_t to be followed.
      */
     virtual product_strategy_t getProductStrategyForAttributes(
-            const audio_attributes_t &attr) const = 0;
+            const audio_attributes_t &attr, bool fallbackOnDefault = true) const = 0;
 
     /**
      * @brief getOutputDevicesForAttributes retrieves the devices to be used for given
@@ -168,8 +173,10 @@
      * @param[out] mix to be used if a mix has been installed for the given audio attributes.
      * @return selected input device for the audio attributes, may be null if error.
      */
-    virtual sp<DeviceDescriptor> getInputDeviceForAttributes(
-            const audio_attributes_t &attr, sp<AudioPolicyMix> *mix = nullptr) const = 0;
+    virtual sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                             uid_t uid = 0,
+                                                             sp<AudioPolicyMix> *mix = nullptr)
+                                                             const = 0;
 
     /**
      * Get the legacy stream type for a given audio attributes.
@@ -269,19 +276,25 @@
      * @brief getVolumeGroupForAttributes gets the appropriate volume group to be used for a given
      * Audio Attributes.
      * @param attr to be considered
+     * @param fallbackOnDefault if true, will return the fallback volume group if the attributes
+     * are not associated to any volume group.
      * @return volume group associated to the given audio attributes, default group if none
      * applicable, VOLUME_GROUP_NONE if no default group defined.
      */
-    virtual volume_group_t getVolumeGroupForAttributes(const audio_attributes_t &attr) const = 0;
+    virtual volume_group_t getVolumeGroupForAttributes(
+            const audio_attributes_t &attr, bool fallbackOnDefault = true) const = 0;
 
     /**
      * @brief getVolumeGroupForStreamType gets the appropriate volume group to be used for a given
      * legacy stream type
      * @param stream type to be considered
+     * @param fallbackOnDefault if true, will return the fallback volume group if the stream type
+     * is not associated to any volume group.
      * @return volume group associated to the given stream type, default group if none applicable,
      * VOLUME_GROUP_NONE if no default group defined.
      */
-    virtual volume_group_t getVolumeGroupForStreamType(audio_stream_type_t stream) const = 0;
+    virtual volume_group_t getVolumeGroupForStreamType(
+            audio_stream_type_t stream, bool fallbackOnDefault = true) const = 0;
 
     /**
      * @brief listAudioVolumeGroups introspection API to get the Audio Volume Groups, aka
@@ -293,37 +306,120 @@
     virtual status_t listAudioVolumeGroups(AudioVolumeGroupVector &groups) const = 0;
 
     /**
-     * @brief setPreferredDeviceForStrategy sets the default device to be used for a
-     * strategy when available
+     * @brief setDevicesRoleForStrategy sets devices role for a strategy when available. To remove
+     * devices role, removeDevicesRoleForStrategy must be called. When devices role is set
+     * successfully, previously set devices for the same role and strategy will be removed.
      * @param strategy the audio strategy whose routing will be affected
-     * @param device the audio device to route to when available
-     * @return BAD_VALUE if the strategy is invalid,
-     *     or NO_ERROR if the preferred device was set
+     * @param role the role of the devices for the strategy. All device roles are defined at
+     *             system/media/audio/include/system/audio_policy.h. DEVICE_ROLE_NONE is invalid
+     *             for setting.
+     * @param devices the audio devices to be set
+     * @return BAD_VALUE if the strategy or role is invalid,
+     *     or NO_ERROR if the role of the devices for strategy was set
      */
-    virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
-            const AudioDeviceTypeAddr &device) = 0;
+    virtual status_t setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices) = 0;
 
     /**
-     * @brief removePreferredDeviceForStrategy removes the preferred device previously set
+     * @brief removeDevicesRoleForStrategy removes the role of device(s) previously set
      * for the given strategy
      * @param strategy the audio strategy whose routing will be affected
-     * @return BAD_VALUE if the strategy is invalid,
-     *     or NO_ERROR if the preferred device was removed
+     * @param role the role of the devices for strategy
+     * @return BAD_VALUE if the strategy or role is invalid,
+     *     or NO_ERROR if the devices for this role was removed
      */
-    virtual status_t removePreferredDeviceForStrategy(product_strategy_t strategy) = 0;
+    virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy,
+            device_role_t role) = 0;
 
     /**
-     * @brief getPreferredDeviceForStrategy queries which device is set as the
-     * preferred device for the given strategy
+     * @brief getDevicesForRoleAndStrategy queries which devices have the specified role for the
+     * specified strategy
      * @param strategy the strategy to query
-     * @param device returns configured as the preferred device if one was set
-     * @return BAD_VALUE if the strategy is invalid,
-     *     or NAME_NOT_FOUND if no preferred device was set
-     *     or NO_ERROR if the device parameter was initialized to the preferred device
+     * @param role the role of the devices to query
+     * @param devices returns list of devices with matching role for the specified strategy.
+     *                DEVICE_ROLE_NONE is invalid as input.
+     * @return BAD_VALUE if the strategy or role is invalid,
+     *     or NAME_NOT_FOUND if no device for the role and strategy was set
+     *     or NO_ERROR if the devices parameter contains a list of devices
      */
-    virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
-            AudioDeviceTypeAddr &device) const = 0;
+    virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy, device_role_t role,
+            AudioDeviceTypeAddrVector &devices) const = 0;
 
+    /**
+     * @brief setDevicesRoleForCapturePreset sets devices role for a capture preset when available.
+     * To remove devices role, removeDevicesRoleForCapturePreset must be called. Calling
+     * clearDevicesRoleForCapturePreset will remove all devices as role. When devices role is set
+     * successfully, previously set devices for the same role and capture preset will be removed.
+     * @param audioSource the audio capture preset whose routing will be affected
+     * @param role the role of the devices for the capture preset. All device roles are defined at
+     *             system/media/audio/include/system/audio_policy.h. DEVICE_ROLE_NONE is invalid
+     *             for setting.
+     * @param devices the audio devices to be set
+     * @return BAD_VALUE if the capture preset or role is invalid,
+     *     or NO_ERROR if the role of the devices for capture preset was set
+     */
+    virtual status_t setDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices) = 0;
+
+    /**
+     * @brief addDevicesRoleForCapturePreset adds devices role for a capture preset when available.
+     * To remove devices role, removeDevicesRoleForCapturePreset must be called. Calling
+     * clearDevicesRoleForCapturePreset will remove all devices as role.
+     * @param audioSource the audio capture preset whose routing will be affected
+     * @param role the role of the devices for the capture preset. All device roles are defined at
+     *             system/media/audio/include/system/audio_policy.h. DEVICE_ROLE_NONE is invalid
+     *             for setting.
+     * @param devices the audio devices to be added
+     * @return BAD_VALUE if the capture preset or role is invalid,
+     *     or NO_ERROR if the role of the devices for capture preset was added
+     */
+    virtual status_t addDevicesRoleForCapturePreset(audio_source_t audioSource, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices) = 0;
+
+    /**
+     * @brief removeDevicesRoleForCapturePreset removes the role of device(s) previously set
+     * for the given capture preset
+     * @param audioSource the audio capture preset whose routing will be affected
+     * @param role the role of the devices for the capture preset
+     * @param devices the devices to be removed
+     * @return BAD_VALUE if 1) the capture preset is invalid, 2) role is invalid or 3) the list of
+     *     devices to be removed are not all present as role for a capture preset
+     *     or NO_ERROR if the devices for this role was removed
+     */
+    virtual status_t removeDevicesRoleForCapturePreset(audio_source_t audioSource,
+            device_role_t role, const AudioDeviceTypeAddrVector& devices) = 0;
+
+    /**
+     * @brief clearDevicesRoleForCapturePreset removes the role of all device(s) previously set
+     * for the given capture preset
+     * @param audioSource the audio capture preset whose routing will be affected
+     * @param role the role of the devices for the capture preset
+     * @return BAD_VALUE if the capture preset or role is invalid,
+     *     or NO_ERROR if the devices for this role was removed
+     */
+    virtual status_t clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+            device_role_t role);
+
+    /**
+     * @brief getDevicesForRoleAndCapturePreset queries which devices have the specified role for
+     * the specified capture preset
+     * @param audioSource the capture preset to query
+     * @param role the role of the devices to query
+     * @param devices returns list of devices with matching role for the specified capture preset.
+     *                DEVICE_ROLE_NONE is invalid as input.
+     * @return BAD_VALUE if the capture preset or role is invalid,
+     *     or NAME_NOT_FOUND if no device for the role and capture preset was set
+     *     or NO_ERROR if the devices parameter contains a list of devices
+     */
+    virtual status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+            device_role_t role, AudioDeviceTypeAddrVector &devices) const = 0;
+
+    /**
+     * @brief getActiveMediaDevices returns which devices will most likely to be used for media
+     * @param availableDevices all available devices
+     * @return collection of active devices
+     */
+    virtual DeviceVector getActiveMediaDevices(const DeviceVector& availableDevices) const = 0;
 
     virtual void dump(String8 *dst) const = 0;
 
diff --git a/services/audiopolicy/engineconfigurable/Android.bp b/services/audiopolicy/engineconfigurable/Android.bp
index 8f522f0..a747822 100644
--- a/services/audiopolicy/engineconfigurable/Android.bp
+++ b/services/audiopolicy/engineconfigurable/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_headers {
     name: "libaudiopolicyengineconfigurable_interface_headers",
     host_supported: true,
diff --git a/services/audiopolicy/engineconfigurable/config/Android.bp b/services/audiopolicy/engineconfigurable/config/Android.bp
index fe3eae0..b3d1f97 100644
--- a/services/audiopolicy/engineconfigurable/config/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/Android.bp
@@ -16,6 +16,15 @@
 
 // Root soong_namespace for common components
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 prebuilt_etc {
     name: "audio_policy_engine_criteria.xml",
     vendor: true,
diff --git a/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp b/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
index f913a14..e46b60f 100644
--- a/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
@@ -22,6 +22,15 @@
     ],
 }
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 prebuilt_etc {
     name: "audio_policy_engine_configuration.xml",
     vendor: true,
diff --git a/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp b/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
index fae6b7b..ad6eeb1 100644
--- a/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
@@ -23,6 +23,15 @@
     ],
 }
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 prebuilt_etc {
     name: "audio_policy_engine_configuration.xml",
     vendor: true,
diff --git a/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp b/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
index 94d33bd..773a99a 100644
--- a/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
@@ -22,6 +22,15 @@
     ],
 }
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 prebuilt_etc {
     name: "audio_policy_engine_configuration.xml",
     vendor: true,
diff --git a/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml b/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
index a7388da..bc32416 100644
--- a/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
+++ b/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
@@ -65,6 +65,12 @@
     </ProductStrategy>
 
     <ProductStrategy name="STRATEGY_MEDIA">
+        <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
+            <Attributes>
+                <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
+                <Usage value="AUDIO_USAGE_ASSISTANT"/>
+            </Attributes>
+        </AttributesGroup>
          <AttributesGroup streamType="AUDIO_STREAM_MUSIC" volumeGroup="music">
             <Attributes> <Usage value="AUDIO_USAGE_MEDIA"/> </Attributes>
             <Attributes> <Usage value="AUDIO_USAGE_GAME"/> </Attributes>
@@ -72,12 +78,6 @@
             <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE"/> </Attributes>
             <Attributes></Attributes>
         </AttributesGroup>
-        <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
-            <Attributes>
-                <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
-                <Usage value="AUDIO_USAGE_ASSISTANT"/>
-            </Attributes>
-        </AttributesGroup>
         <AttributesGroup streamType="AUDIO_STREAM_SYSTEM" volumeGroup="system">
             <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_SONIFICATION"/> </Attributes>
         </AttributesGroup>
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
index 90ebffd..ee62d5e 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
@@ -16,6 +16,15 @@
 
 // Root soong_namespace for common components
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 prebuilt_etc {
     name: "PolicyClass.xml",
     vendor: true,
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
index 82b1b6d..bc72484 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
@@ -26,6 +26,15 @@
 //
 // Generate Audio Policy Parameter Framework Product Strategies Structure file from template
 //
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 prebuilt_etc {
     name: "ProductStrategies.xml",
     vendor: true,
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
index e4605b2..11da8c7 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
@@ -27,6 +27,15 @@
 //
 // Generate Audio Policy Parameter Framework Product Strategies Structure file from template
 //
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 prebuilt_etc {
     name: "ProductStrategies.xml",
     vendor: true,
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
index 61b54cf..91ffeb5 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
@@ -26,6 +26,15 @@
 //
 // Generate Audio Policy Parameter Framework Product Strategies Structure file from template
 //
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 prebuilt_etc {
     name: "ProductStrategies.xml",
     vendor: true,
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
index 9abcb70..cac63fc 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
@@ -23,6 +23,15 @@
     ],
 }
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 prebuilt_etc {
     name: "parameter-framework.policy",
     filename_from_src: true,
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
index 27172a4..337f358 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
@@ -23,6 +23,15 @@
     ],
 }
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 prebuilt_etc {
     name: "parameter-framework.policy",
     filename_from_src: true,
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
index 2685c6d..0398fc7 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_shared {
     name: "libpolicy-subsystem",
     srcs: [
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.cpp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.cpp
index f91f8d7..f8a6fc0 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.cpp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.cpp
@@ -45,7 +45,7 @@
 
 bool InputSource::sendToHW(string & /*error*/)
 {
-    uint32_t applicableInputDevice;
+    audio_devices_t applicableInputDevice;
     blackboardRead(&applicableInputDevice, sizeof(applicableInputDevice));
     return mPolicyPluginInterface->setDeviceForInputSource(mId, applicableInputDevice);
 }
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.h b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.h
index 244f082..6c8eb65 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.h
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.h
@@ -32,7 +32,7 @@
 
     struct Device
     {
-        uint32_t applicableDevice; /**< applicable device for this strategy. */
+        audio_devices_t applicableDevice; /**< applicable device for this strategy. */
         char deviceAddress[mMaxStringSize]; /**< device address associated with this strategy. */
     } __attribute__((packed));
 
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index 6d42fcf..b0c376a 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -310,6 +310,7 @@
 }
 
 sp<DeviceDescriptor> Engine::getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                         uid_t uid,
                                                          sp<AudioPolicyMix> *mix) const
 {
     const auto &policyMixes = getApmObserver()->getAudioPolicyMixCollection();
@@ -328,7 +329,10 @@
         return device;
     }
 
-    device = policyMixes.getDeviceAndMixForInputSource(attr.source, availableInputDevices, mix);
+    device = policyMixes.getDeviceAndMixForInputSource(attr.source,
+                                                       availableInputDevices,
+                                                       uid,
+                                                       mix);
     if (device != nullptr) {
         return device;
     }
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.h b/services/audiopolicy/engineconfigurable/src/Engine.h
index 3b371d8..d8e2742 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.h
+++ b/services/audiopolicy/engineconfigurable/src/Engine.h
@@ -61,8 +61,10 @@
     DeviceVector getOutputDevicesForStream(audio_stream_type_t stream,
                                            bool fromCache = false) const override;
 
-    sp<DeviceDescriptor> getInputDeviceForAttributes(
-            const audio_attributes_t &attr, sp<AudioPolicyMix> *mix = nullptr) const override;
+    sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                     uid_t uid = 0,
+                                                     sp<AudioPolicyMix> *mix = nullptr)
+                                                     const override;
 
     void updateDeviceSelectionCache() override;
 
diff --git a/services/audiopolicy/engineconfigurable/src/InputSource.cpp b/services/audiopolicy/engineconfigurable/src/InputSource.cpp
index aa06ae3..f4645e6 100644
--- a/services/audiopolicy/engineconfigurable/src/InputSource.cpp
+++ b/services/audiopolicy/engineconfigurable/src/InputSource.cpp
@@ -51,7 +51,7 @@
         mApplicableDevices = devices;
         return NO_ERROR;
     }
-    devices |= AUDIO_DEVICE_BIT_IN;
+    devices = static_cast<audio_devices_t>(devices | AUDIO_DEVICE_BIT_IN);
     if (!audio_is_input_device(devices)) {
         ALOGE("%s: trying to set an invalid device 0x%X for input source %s",
               __FUNCTION__, devices, getName().c_str());
diff --git a/services/audiopolicy/engineconfigurable/tools/Android.bp b/services/audiopolicy/engineconfigurable/tools/Android.bp
index 3e47324..40efb3d 100644
--- a/services/audiopolicy/engineconfigurable/tools/Android.bp
+++ b/services/audiopolicy/engineconfigurable/tools/Android.bp
@@ -12,6 +12,15 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 python_defaults {
     name: "tools_default",
     version: {
@@ -42,8 +51,8 @@
     cmd: "cp $(locations :audio_policy_configuration_files) $(genDir)/. && " +
          "cp $(location :audio_policy_configuration_top_file) $(genDir)/audio_policy_configuration.xml && " +
          "$(location buildPolicyCriterionTypes.py) " +
-         // @todo update if 1428659 is merged "--androidaudiobaseheader $(location :android_audio_base_header_file) " +
-         " --androidaudiobaseheader system/media/audio/include/system/audio-base.h " +
+         " --androidaudiobaseheader $(location :libaudio_system_audio_base) " +
+         " --androidaudiocommonbaseheader $(location :libaudio_system_audio_common_base) " +
          "--audiopolicyconfigurationfile $(genDir)/audio_policy_configuration.xml " +
          "--criteriontypes $(location :audio_policy_engine_criterion_types_template) " +
          "--outputfile $(out)",
@@ -51,6 +60,8 @@
         // The commented inputs must be provided to use this genrule_defaults
         // @todo uncomment if 1428659 is merged":android_audio_base_header_file",
         ":audio_policy_engine_criterion_types_template",
+        ":libaudio_system_audio_base",
+        ":libaudio_system_audio_common_base",
         // ":audio_policy_configuration_top_file",
         // ":audio_policy_configuration_files",
     ],
diff --git a/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py b/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py
index 9a7fa8f..43b3dd2 100755
--- a/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py
+++ b/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py
@@ -52,13 +52,19 @@
 def findBitPos(decimal):
     pos = 0
     i = 1
-    while i != decimal:
+    while i < decimal:
         i = i << 1
         pos = pos + 1
         if pos == 32:
             return -1
-    return pos
 
+    # TODO: b/168065706. This is just to fix the build. That the problem of devices with
+    # multiple bits set must be addressed more generally in the configurable audio policy
+    # and parameter framework.
+    if i > decimal:
+        logging.info("Device:{} which has multiple bits set is skipped. b/168065706".format(decimal))
+        return -2
+    return pos
 
 def generateXmlStructureFile(componentTypeDict, structureTypesFile, outputFile):
 
@@ -74,10 +80,12 @@
                 if bitparameters_node is not None:
                     ordered_values = OrderedDict(sorted(values_dict.items(), key=lambda x: x[1]))
                     for key, value in ordered_values.items():
-                        value_node = ET.SubElement(bitparameters_node, "BitParameter")
-                        value_node.set('Name', key)
-                        value_node.set('Size', "1")
-                        value_node.set('Pos', str(findBitPos(value)))
+                        pos = findBitPos(value)
+                        if pos >= 0:
+                            value_node = ET.SubElement(bitparameters_node, "BitParameter")
+                            value_node.set('Name', key)
+                            value_node.set('Size', "1")
+                            value_node.set('Pos', str(pos))
 
                 enum_parameter_node = component_type.find("EnumParameter")
                 if enum_parameter_node is not None:
@@ -118,9 +126,9 @@
     ignored_values = ['CNT', 'MAX', 'ALL', 'NONE']
 
     criteria_pattern = re.compile(
-        r"\s*(?P<type>(?:"+'|'.join(component_type_mapping_table.keys()) + "))_" \
-        r"(?P<literal>(?!" + '|'.join(ignored_values) + ")\w*)\s*=\s*" \
-        r"(?P<values>(?:0[xX])?[0-9a-fA-F]+)")
+        r"\s*V\((?P<type>(?:"+'|'.join(component_type_mapping_table.keys()) + "))_" \
+        r"(?P<literal>(?!" + '|'.join(ignored_values) + ")\w*)\s*,\s*" \
+        r"(?:AUDIO_DEVICE_BIT_IN \| )?(?P<values>(?:0[xX])[0-9a-fA-F]+|[0-9]+)")
 
     logging.info("Checking Android Header file {}".format(androidaudiobaseheaderFile))
 
@@ -156,13 +164,14 @@
 
             logging.debug("type:{}, literal:{}, values:{}.".format(component_type_name, component_type_literal, component_type_numerical_value))
 
-    # Transform input source in inclusive criterion
-    shift = len(all_component_types['OutputDevicesMask'])
-    if shift > 32:
-        logging.critical("OutputDevicesMask incompatible with criterion representation on 32 bits")
-        logging.info("EXIT ON FAILURE")
-        exit(1)
+    if "stub" not in all_component_types["OutputDevicesMask"]:
+        all_component_types["OutputDevicesMask"]["stub"] = 0x40000000
+        logging.info("added stub output device mask")
+    if "stub" not in all_component_types["InputDevicesMask"]:
+        all_component_types["InputDevicesMask"]["stub"] = 0x40000000
+        logging.info("added stub input device mask")
 
+    # Transform input source in inclusive criterion
     for component_types in all_component_types:
         values = ','.join('{}:{}'.format(value, key) for key, value in all_component_types[component_types].items())
         logging.info("{}: <{}>".format(component_types, values))
diff --git a/services/audiopolicy/engineconfigurable/tools/buildPolicyCriterionTypes.py b/services/audiopolicy/engineconfigurable/tools/buildPolicyCriterionTypes.py
index b8b60c1..76c35c1 100755
--- a/services/audiopolicy/engineconfigurable/tools/buildPolicyCriterionTypes.py
+++ b/services/audiopolicy/engineconfigurable/tools/buildPolicyCriterionTypes.py
@@ -55,6 +55,11 @@
                            metavar="ANDROID_AUDIO_BASE_HEADER",
                            type=argparse.FileType('r'),
                            required=True)
+    argparser.add_argument('--androidaudiocommonbaseheader',
+                           help="Android Audio CommonBase C header file, Mandatory.",
+                           metavar="ANDROID_AUDIO_COMMON_BASE_HEADER",
+                           type=argparse.FileType('r'),
+                           required=True)
     argparser.add_argument('--audiopolicyconfigurationfile',
                            help="Android Audio Policy Configuration file, Mandatory.",
                            metavar="(AUDIO_POLICY_CONFIGURATION_FILE)",
@@ -176,12 +181,12 @@
 #   -Output devices type
 #   -Input devices type
 #
-def parseAndroidAudioFile(androidaudiobaseheaderFile):
+def parseAndroidAudioFile(androidaudiobaseheaderFile, androidaudiocommonbaseheaderFile):
     #
     # Adaptation table between Android Enumeration prefix and Audio PFW Criterion type names
     #
     criterion_mapping_table = {
-        'AUDIO_MODE' : "AndroidModeType",
+        'HAL_AUDIO_MODE' : "AndroidModeType",
         'AUDIO_DEVICE_OUT' : "OutputDevicesMaskType",
         'AUDIO_DEVICE_IN' : "InputDevicesMaskType"}
 
@@ -195,10 +200,15 @@
     #
     ignored_values = ['CNT', 'MAX', 'ALL', 'NONE']
 
+    #
+    # Reaching 32 bit limit for inclusive criterion out devices: removing
+    #
+    ignored_output_device_values = ['BleSpeaker', 'BleHeadset']
+
     criteria_pattern = re.compile(
-        r"\s*(?P<type>(?:"+'|'.join(criterion_mapping_table.keys()) + "))_" \
-        r"(?P<literal>(?!" + '|'.join(ignored_values) + ")\w*)\s*=\s*" \
-        r"(?P<values>(?:0[xX])?[0-9a-fA-F]+)")
+        r"\s*V\((?P<type>(?:"+'|'.join(criterion_mapping_table.keys()) + "))_" \
+        r"(?P<literal>(?!" + '|'.join(ignored_values) + ")\w*)\s*,\s*" \
+        r"(?:AUDIO_DEVICE_BIT_IN \| )?(?P<values>(?:0[xX])?[0-9a-fA-F]+|[0-9]+)")
 
     logging.info("Checking Android Header file {}".format(androidaudiobaseheaderFile))
 
@@ -209,27 +219,93 @@
                 androidaudiobaseheaderFile.name, line_number, line))
 
             criterion_name = criterion_mapping_table[match.groupdict()['type']]
-            literal = ''.join((w.capitalize() for w in match.groupdict()['literal'].split('_')))
-            numerical_value = match.groupdict()['values']
+            criterion_literal = \
+                ''.join((w.capitalize() for w in match.groupdict()['literal'].split('_')))
+            criterion_numerical_value = match.groupdict()['values']
 
-            # for AUDIO_DEVICE_IN: need to remove sign bit
+            # for AUDIO_DEVICE_IN: need to remove sign bit / rename default to stub
             if criterion_name == "InputDevicesMaskType":
-                numerical_value = str(int(numerical_value, 0) & ~2147483648)
+                if criterion_literal == "Default":
+                    criterion_numerical_value = str(int("0x40000000", 0))
+                else:
+                    try:
+                        string_int = int(criterion_numerical_value, 0)
+                    except ValueError:
+                        # Handle the exception
+                        logging.info("value {}:{} for criterion {} is not a number, ignoring"
+                            .format(criterion_numerical_value, criterion_literal, criterion_name))
+                        continue
+                    criterion_numerical_value = str(int(criterion_numerical_value, 0) & ~2147483648)
+
+            if criterion_name == "OutputDevicesMaskType":
+                if criterion_literal == "Default":
+                    criterion_numerical_value = str(int("0x40000000", 0))
+                if criterion_literal in ignored_output_device_values:
+                    logging.info("OutputDevicesMaskType skipping {}".format(criterion_literal))
+                    continue
+            try:
+                string_int = int(criterion_numerical_value, 0)
+            except ValueError:
+                # Handle the exception
+                logging.info("The value {}:{} is for criterion {} is not a number, ignoring"
+                    .format(criterion_numerical_value, criterion_literal, criterion_name))
+                continue
 
             # Remove duplicated numerical values
-            if int(numerical_value, 0) in all_criteria[criterion_name].values():
+            if int(criterion_numerical_value, 0) in all_criteria[criterion_name].values():
                 logging.info("criterion {} duplicated values:".format(criterion_name))
-                logging.info("{}:{}".format(numerical_value, literal))
+                logging.info("{}:{}".format(criterion_numerical_value, criterion_literal))
                 logging.info("KEEPING LATEST")
                 for key in list(all_criteria[criterion_name]):
-                    if all_criteria[criterion_name][key] == int(numerical_value, 0):
+                    if all_criteria[criterion_name][key] == int(criterion_numerical_value, 0):
                         del all_criteria[criterion_name][key]
 
-            all_criteria[criterion_name][literal] = int(numerical_value, 0)
+            all_criteria[criterion_name][criterion_literal] = int(criterion_numerical_value, 0)
 
             logging.debug("type:{},".format(criterion_name))
-            logging.debug("iteral:{},".format(literal))
-            logging.debug("values:{}.".format(numerical_value))
+            logging.debug("iteral:{},".format(criterion_literal))
+            logging.debug("values:{}.".format(criterion_numerical_value))
+
+    logging.info("Checking Android Common Header file {}".format(androidaudiocommonbaseheaderFile))
+
+    criteria_pattern = re.compile(
+        r"\s*(?P<type>(?:"+'|'.join(criterion_mapping_table.keys()) + "))_" \
+        r"(?P<literal>(?!" + '|'.join(ignored_values) + ")\w*)\s*=\s*" \
+        r"(?:AUDIO_DEVICE_BIT_IN \| )?(?P<values>(?:0[xX])?[0-9a-fA-F]+|[0-9]+)")
+
+    for line_number, line in enumerate(androidaudiocommonbaseheaderFile):
+        match = criteria_pattern.match(line)
+        if match:
+            logging.debug("The following line is VALID: {}:{}\n{}".format(
+                androidaudiocommonbaseheaderFile.name, line_number, line))
+
+            criterion_name = criterion_mapping_table[match.groupdict()['type']]
+            criterion_literal = \
+                ''.join((w.capitalize() for w in match.groupdict()['literal'].split('_')))
+            criterion_numerical_value = match.groupdict()['values']
+
+            try:
+                string_int = int(criterion_numerical_value, 0)
+            except ValueError:
+                # Handle the exception
+                logging.info("The value {}:{} is for criterion {} is not a number, ignoring"
+                    .format(criterion_numerical_value, criterion_literal, criterion_name))
+                continue
+
+            # Remove duplicated numerical values
+            if int(criterion_numerical_value, 0) in all_criteria[criterion_name].values():
+                logging.info("criterion {} duplicated values:".format(criterion_name))
+                logging.info("{}:{}".format(criterion_numerical_value, criterion_literal))
+                logging.info("KEEPING LATEST")
+                for key in list(all_criteria[criterion_name]):
+                    if all_criteria[criterion_name][key] == int(criterion_numerical_value, 0):
+                        del all_criteria[criterion_name][key]
+
+            all_criteria[criterion_name][criterion_literal] = int(criterion_numerical_value, 0)
+
+            logging.debug("type:{},".format(criterion_name))
+            logging.debug("iteral:{},".format(criterion_literal))
+            logging.debug("values:{}.".format(criterion_numerical_value))
 
     return all_criteria
 
@@ -238,7 +314,8 @@
     logging.root.setLevel(logging.INFO)
     args = parseArgs()
 
-    all_criteria = parseAndroidAudioFile(args.androidaudiobaseheader)
+    all_criteria = parseAndroidAudioFile(args.androidaudiobaseheader,
+                                         args.androidaudiocommonbaseheader)
 
     address_criteria = parseAndroidAudioPolicyConfigurationFile(args.audiopolicyconfigurationfile)
 
diff --git a/services/audiopolicy/engineconfigurable/wrapper/Android.bp b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
index 301ecc0..3e04b68 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/Android.bp
+++ b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library {
     name: "libaudiopolicyengineconfigurable_pfwwrapper",
     export_include_dirs: ["include"],
diff --git a/services/audiopolicy/enginedefault/Android.bp b/services/audiopolicy/enginedefault/Android.bp
index aaf4158..7f9c0ac 100644
--- a/services/audiopolicy/enginedefault/Android.bp
+++ b/services/audiopolicy/enginedefault/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_shared {
     name: "libaudiopolicyenginedefault",
     srcs: [
diff --git a/services/audiopolicy/enginedefault/config/example/Android.bp b/services/audiopolicy/enginedefault/config/example/Android.bp
index 0bfcaa1..59a704b 100644
--- a/services/audiopolicy/enginedefault/config/example/Android.bp
+++ b/services/audiopolicy/enginedefault/config/example/Android.bp
@@ -19,6 +19,15 @@
 soong_namespace {
 }
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 prebuilt_etc {
     name: "audio_policy_engine_configuration.xml",
     vendor: true,
diff --git a/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml b/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
index a7388da..bc32416 100644
--- a/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
+++ b/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
@@ -65,6 +65,12 @@
     </ProductStrategy>
 
     <ProductStrategy name="STRATEGY_MEDIA">
+        <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
+            <Attributes>
+                <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
+                <Usage value="AUDIO_USAGE_ASSISTANT"/>
+            </Attributes>
+        </AttributesGroup>
          <AttributesGroup streamType="AUDIO_STREAM_MUSIC" volumeGroup="music">
             <Attributes> <Usage value="AUDIO_USAGE_MEDIA"/> </Attributes>
             <Attributes> <Usage value="AUDIO_USAGE_GAME"/> </Attributes>
@@ -72,12 +78,6 @@
             <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE"/> </Attributes>
             <Attributes></Attributes>
         </AttributesGroup>
-        <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
-            <Attributes>
-                <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
-                <Usage value="AUDIO_USAGE_ASSISTANT"/>
-            </Attributes>
-        </AttributesGroup>
         <AttributesGroup streamType="AUDIO_STREAM_SYSTEM" volumeGroup="system">
             <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_SONIFICATION"/> </Attributes>
         </AttributesGroup>
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
old mode 100755
new mode 100644
index b14d2bb..ca8e96c
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -142,58 +142,22 @@
     return EngineBase::setForceUse(usage, config);
 }
 
-DeviceVector Engine::getDevicesForStrategyInt(legacy_strategy strategy,
-                                              DeviceVector availableOutputDevices,
-                                              DeviceVector availableInputDevices,
-                                              const SwAudioOutputCollection &outputs) const
+void Engine::filterOutputDevicesForStrategy(legacy_strategy strategy,
+                                            DeviceVector& availableOutputDevices,
+                                            const SwAudioOutputCollection &outputs) const
 {
-    DeviceVector devices;
+    DeviceVector availableInputDevices = getApmObserver()->getAvailableInputDevices();
 
     switch (strategy) {
-
-    case STRATEGY_TRANSMITTED_THROUGH_SPEAKER:
-        devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER);
-        break;
-
-    case STRATEGY_SONIFICATION_RESPECTFUL:
-        if (isInCall() || outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_VOICE_CALL))) {
-            devices = getDevicesForStrategyInt(
-                    STRATEGY_SONIFICATION, availableOutputDevices, availableInputDevices, outputs);
-        } else {
-            bool media_active_locally =
-                    outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_MUSIC),
-                                            SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)
-                    || outputs.isActiveLocally(
-                        toVolumeSource(AUDIO_STREAM_ACCESSIBILITY),
-                        SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY);
+    case STRATEGY_SONIFICATION_RESPECTFUL: {
+        if (!(isInCall() || outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_VOICE_CALL)))) {
             // routing is same as media without the "remote" device
             availableOutputDevices.remove(availableOutputDevices.getDevicesFromType(
                     AUDIO_DEVICE_OUT_REMOTE_SUBMIX));
-            devices = getDevicesForStrategyInt(STRATEGY_MEDIA,
-                    availableOutputDevices,
-                    availableInputDevices, outputs);
-            // if no media is playing on the device, check for mandatory use of "safe" speaker
-            // when media would have played on speaker, and the safe speaker path is available
-            if (!media_active_locally) {
-                devices.replaceDevicesByType(
-                        AUDIO_DEVICE_OUT_SPEAKER,
-                        availableOutputDevices.getDevicesFromType(
-                                AUDIO_DEVICE_OUT_SPEAKER_SAFE));
-            }
         }
-        break;
-
+        } break;
     case STRATEGY_DTMF:
-        if (!isInCall()) {
-            // when off call, DTMF strategy follows the same rules as MEDIA strategy
-            devices = getDevicesForStrategyInt(
-                    STRATEGY_MEDIA, availableOutputDevices, availableInputDevices, outputs);
-            break;
-        }
-        // when in call, DTMF and PHONE strategies follow the same rules
-        FALLTHROUGH_INTENDED;
-
-    case STRATEGY_PHONE:
+    case STRATEGY_PHONE: {
         // Force use of only devices on primary output if:
         // - in call AND
         //   - cannot route from voice call RX OR
@@ -216,87 +180,102 @@
                     availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID));
 
             if ((availableInputDevices.getDevice(AUDIO_DEVICE_IN_TELEPHONY_RX,
-                    String8(""), AUDIO_FORMAT_DEFAULT) == nullptr) ||
-                    ((availPrimaryInputDevices.getDevice(
-                            txDevice, String8(""), AUDIO_FORMAT_DEFAULT) != nullptr) &&
-                            (primaryOutput->getPolicyAudioPort()->getModuleVersionMajor() < 3))) {
+                                                 String8(""), AUDIO_FORMAT_DEFAULT) == nullptr) ||
+                ((availPrimaryInputDevices.getDevice(
+                        txDevice, String8(""), AUDIO_FORMAT_DEFAULT) != nullptr) &&
+                 (primaryOutput->getPolicyAudioPort()->getModuleVersionMajor() < 3))) {
                 availableOutputDevices = availPrimaryOutputDevices;
             }
-        }
-        // for phone strategy, we first consider the forced use and then the available devices by
-        // order of priority
-        switch (getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION)) {
-        case AUDIO_POLICY_FORCE_BT_SCO:
-            if (!isInCall() || strategy != STRATEGY_DTMF) {
-                devices = availableOutputDevices.getDevicesFromType(
-                        AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT);
-                if (!devices.isEmpty()) break;
-            }
-            devices = availableOutputDevices.getFirstDevicesFromTypes({
-                    AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET, AUDIO_DEVICE_OUT_BLUETOOTH_SCO});
-            if (!devices.isEmpty()) break;
-            // if SCO device is requested but no SCO device is available, fall back to default case
-            FALLTHROUGH_INTENDED;
 
-        default:    // FORCE_NONE
-            devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID);
-            if (!devices.isEmpty()) break;
-            // when not in a phone call, phone strategy should route STREAM_VOICE_CALL to A2DP
-            if (!isInCall() &&
-                    (getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP) &&
-                     outputs.isA2dpSupported()) {
-                devices = availableOutputDevices.getFirstDevicesFromTypes({
-                        AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
-                        AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES});
-                if (!devices.isEmpty()) break;
+        }
+        // Do not use A2DP devices when in call but use them when not in call
+        // (e.g for voice mail playback)
+        if (isInCall()) {
+            availableOutputDevices.remove(availableOutputDevices.getDevicesFromTypes({
+                    AUDIO_DEVICE_OUT_BLUETOOTH_A2DP, AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES,
+                    AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER, }));
+        }
+        } break;
+    case STRATEGY_ACCESSIBILITY: {
+        // do not route accessibility prompts to a digital output currently configured with a
+        // compressed format as they would likely not be mixed and dropped.
+        for (size_t i = 0; i < outputs.size(); i++) {
+            sp<AudioOutputDescriptor> desc = outputs.valueAt(i);
+            if (desc->isActive() && !audio_is_linear_pcm(desc->getFormat())) {
+                availableOutputDevices.remove(desc->devices().getDevicesFromTypes({
+                        AUDIO_DEVICE_OUT_HDMI, AUDIO_DEVICE_OUT_SPDIF,
+                        AUDIO_DEVICE_OUT_HDMI_ARC, AUDIO_DEVICE_OUT_HDMI_EARC}));
             }
-            devices = availableOutputDevices.getFirstDevicesFromTypes({
-                    AUDIO_DEVICE_OUT_WIRED_HEADPHONE, AUDIO_DEVICE_OUT_WIRED_HEADSET,
-                    AUDIO_DEVICE_OUT_LINE, AUDIO_DEVICE_OUT_USB_HEADSET,
-                    AUDIO_DEVICE_OUT_USB_DEVICE});
-            if (!devices.isEmpty()) break;
-            if (!isInCall()) {
-                devices = availableOutputDevices.getFirstDevicesFromTypes({
-                        AUDIO_DEVICE_OUT_USB_ACCESSORY, AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET,
-                        AUDIO_DEVICE_OUT_AUX_DIGITAL, AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET});
-                if (!devices.isEmpty()) break;
-            }
-            devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_EARPIECE);
+        }
+        } break;
+    default:
+        break;
+    }
+}
+
+product_strategy_t Engine::remapStrategyFromContext(product_strategy_t strategy,
+                                                 const SwAudioOutputCollection &outputs) const {
+    auto legacyStrategy = mLegacyStrategyMap.find(strategy) != end(mLegacyStrategyMap) ?
+                          mLegacyStrategyMap.at(strategy) : STRATEGY_NONE;
+
+    if (isInCall()) {
+        switch (legacyStrategy) {
+        case STRATEGY_ACCESSIBILITY:
+        case STRATEGY_DTMF:
+        case STRATEGY_MEDIA:
+        case STRATEGY_SONIFICATION:
+        case STRATEGY_SONIFICATION_RESPECTFUL:
+            legacyStrategy = STRATEGY_PHONE;
             break;
 
-        case AUDIO_POLICY_FORCE_SPEAKER:
-            // when not in a phone call, phone strategy should route STREAM_VOICE_CALL to
-            // A2DP speaker when forcing to speaker output
-            if (!isInCall() &&
-                    (getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP) &&
-                     outputs.isA2dpSupported()) {
-                devices = availableOutputDevices.getDevicesFromType(
-                        AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER);
-                if (!devices.isEmpty()) break;
-            }
-            if (!isInCall()) {
-                devices = availableOutputDevices.getFirstDevicesFromTypes({
-                        AUDIO_DEVICE_OUT_USB_ACCESSORY, AUDIO_DEVICE_OUT_USB_DEVICE,
-                        AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_AUX_DIGITAL,
-                        AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET});
-                if (!devices.isEmpty()) break;
-            }
-            devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER);
-            break;
+        default:
+            return strategy;
         }
-    break;
+    } else {
+        switch (legacyStrategy) {
+        case STRATEGY_SONIFICATION_RESPECTFUL:
+        case STRATEGY_SONIFICATION:
+            if (outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_VOICE_CALL))) {
+                legacyStrategy = STRATEGY_PHONE;
+            }
+            break;
+
+        case STRATEGY_ACCESSIBILITY:
+            if (outputs.isActive(toVolumeSource(AUDIO_STREAM_RING)) ||
+                    outputs.isActive(toVolumeSource(AUDIO_STREAM_ALARM))) {
+                legacyStrategy = STRATEGY_SONIFICATION;
+            }
+            break;
+
+        default:
+            return strategy;
+        }
+    }
+    return getProductStrategyFromLegacy(legacyStrategy);
+}
+
+DeviceVector Engine::getDevicesForStrategyInt(legacy_strategy strategy,
+                                              DeviceVector availableOutputDevices,
+                                              const SwAudioOutputCollection &outputs) const
+{
+    DeviceVector devices;
+
+    switch (strategy) {
+
+    case STRATEGY_TRANSMITTED_THROUGH_SPEAKER:
+        devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER);
+        break;
+
+    case STRATEGY_PHONE: {
+        devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID);
+        if (!devices.isEmpty()) break;
+        devices = availableOutputDevices.getFirstDevicesFromTypes(
+                                          getLastRemovableMediaDevices());
+        if (!devices.isEmpty()) break;
+        devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_EARPIECE);
+    } break;
 
     case STRATEGY_SONIFICATION:
-
-        // If incall, just select the STRATEGY_PHONE device
-        if (isInCall() ||
-                outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_VOICE_CALL))) {
-            devices = getDevicesForStrategyInt(
-                    STRATEGY_PHONE, availableOutputDevices, availableInputDevices, outputs);
-            break;
-        }
-        FALLTHROUGH_INTENDED;
-
     case STRATEGY_ENFORCED_AUDIBLE:
         // strategy STRATEGY_ENFORCED_AUDIBLE uses same routing policy as STRATEGY_SONIFICATION
         // except:
@@ -327,7 +306,8 @@
                 }
             }
             // Use both Bluetooth SCO and phone default output when ringing in normal mode
-            if (getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION) == AUDIO_POLICY_FORCE_BT_SCO) {
+            if (audio_is_bluetooth_out_sco_device(getPreferredDeviceTypeForLegacyStrategy(
+                    availableOutputDevices, STRATEGY_PHONE))) {
                 if (strategy == STRATEGY_SONIFICATION) {
                     devices.replaceDevicesByType(
                             AUDIO_DEVICE_OUT_SPEAKER,
@@ -343,32 +323,9 @@
         // The second device used for sonification is the same as the device used by media strategy
         FALLTHROUGH_INTENDED;
 
+    case STRATEGY_DTMF:
     case STRATEGY_ACCESSIBILITY:
-        if (strategy == STRATEGY_ACCESSIBILITY) {
-            // do not route accessibility prompts to a digital output currently configured with a
-            // compressed format as they would likely not be mixed and dropped.
-            for (size_t i = 0; i < outputs.size(); i++) {
-                sp<AudioOutputDescriptor> desc = outputs.valueAt(i);
-                if (desc->isActive() && !audio_is_linear_pcm(desc->getFormat())) {
-                    availableOutputDevices.remove(desc->devices().getDevicesFromTypes({
-                            AUDIO_DEVICE_OUT_HDMI, AUDIO_DEVICE_OUT_SPDIF,
-                            AUDIO_DEVICE_OUT_HDMI_ARC}));
-                }
-            }
-            if (outputs.isActive(toVolumeSource(AUDIO_STREAM_RING)) ||
-                    outputs.isActive(toVolumeSource(AUDIO_STREAM_ALARM))) {
-                return getDevicesForStrategyInt(
-                    STRATEGY_SONIFICATION, availableOutputDevices, availableInputDevices, outputs);
-            }
-            if (isInCall()) {
-                return getDevicesForStrategyInt(
-                        STRATEGY_PHONE, availableOutputDevices, availableInputDevices, outputs);
-            }
-        }
-        // For other cases, STRATEGY_ACCESSIBILITY behaves like STRATEGY_MEDIA
-        FALLTHROUGH_INTENDED;
-
-    // FIXME: STRATEGY_REROUTING follow STRATEGY_MEDIA for now
+    case STRATEGY_SONIFICATION_RESPECTFUL:
     case STRATEGY_REROUTING:
     case STRATEGY_MEDIA: {
         DeviceVector devices2;
@@ -381,23 +338,13 @@
                 devices2.add(remoteSubmix);
             }
         }
-        if (isInCall() && (strategy == STRATEGY_MEDIA)) {
-            devices = getDevicesForStrategyInt(
-                    STRATEGY_PHONE, availableOutputDevices, availableInputDevices, outputs);
-            break;
-        }
-        // FIXME: Find a better solution to prevent routing to BT hearing aid(b/122931261).
-        if ((devices2.isEmpty()) &&
-                (getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP)) {
-            devices2 = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID);
-        }
+
         if ((devices2.isEmpty()) &&
             (getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) == AUDIO_POLICY_FORCE_SPEAKER)) {
             devices2 = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER);
         }
         if (devices2.isEmpty() && (getLastRemovableMediaDevices().size() > 0)) {
-            if ((getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP) &&
-                    outputs.isA2dpSupported()) {
+            if ((getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP)) {
                 // Get the last connected device of wired and bluetooth a2dp
                 devices2 = availableOutputDevices.getFirstDevicesFromTypes(
                         getLastRemovableMediaDevices());
@@ -423,7 +370,9 @@
         if (strategy == STRATEGY_MEDIA) {
             // ARC, SPDIF and AUX_LINE can co-exist with others.
             devices3 = availableOutputDevices.getDevicesFromTypes({
-                    AUDIO_DEVICE_OUT_HDMI_ARC, AUDIO_DEVICE_OUT_SPDIF, AUDIO_DEVICE_OUT_AUX_LINE});
+                    AUDIO_DEVICE_OUT_HDMI_ARC, AUDIO_DEVICE_OUT_HDMI_EARC,
+                    AUDIO_DEVICE_OUT_SPDIF, AUDIO_DEVICE_OUT_AUX_LINE,
+                    });
         }
 
         devices2.add(devices3);
@@ -438,9 +387,19 @@
             devices.remove(devices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER));
         }
 
-        // for STRATEGY_SONIFICATION:
+        bool mediaActiveLocally =
+                outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_MUSIC),
+                                        SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)
+                || outputs.isActiveLocally(
+                    toVolumeSource(AUDIO_STREAM_ACCESSIBILITY),
+                    SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY);
+        // - for STRATEGY_SONIFICATION:
         // if SPEAKER was selected, and SPEAKER_SAFE is available, use SPEAKER_SAFE instead
-        if (strategy == STRATEGY_SONIFICATION) {
+        // - for STRATEGY_SONIFICATION_RESPECTFUL:
+        // if no media is playing on the device, check for mandatory use of "safe" speaker
+        // when media would have played on speaker, and the safe speaker path is available
+        if (strategy == STRATEGY_SONIFICATION
+            || (strategy == STRATEGY_SONIFICATION_RESPECTFUL && !mediaActiveLocally)) {
             devices.replaceDevicesByType(
                     AUDIO_DEVICE_OUT_SPEAKER,
                     availableOutputDevices.getDevicesFromType(
@@ -452,22 +411,26 @@
         devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_TELEPHONY_TX);
         break;
 
+    case STRATEGY_NONE:
+        // Happens when internal strategies are processed ("rerouting", "patch"...)
+        break;
+
     default:
-        ALOGW("getDevicesForStrategy() unknown strategy: %d", strategy);
+        ALOGW("%s unknown strategy: %d", __func__, strategy);
         break;
     }
 
     if (devices.isEmpty()) {
-        ALOGV("getDevicesForStrategy() no device found for strategy %d", strategy);
+        ALOGV("%s no device found for strategy %d", __func__, strategy);
         sp<DeviceDescriptor> defaultOutputDevice = getApmObserver()->getDefaultOutputDevice();
         if (defaultOutputDevice != nullptr) {
             devices.add(defaultOutputDevice);
         }
         ALOGE_IF(devices.isEmpty(),
-                 "getDevicesForStrategy() no default device defined");
+                 "%s no default device defined", __func__);
     }
 
-    ALOGVV("getDevices ForStrategy() strategy %d, device %s",
+    ALOGVV("%s strategy %d, device %s", __func__,
            strategy, dumpDeviceTypes(devices.types()).c_str());
     return devices;
 }
@@ -502,20 +465,24 @@
         }
     }
 
+    audio_devices_t commDeviceType =
+        getPreferredDeviceTypeForLegacyStrategy(availableOutputDevices, STRATEGY_PHONE);
+
     switch (inputSource) {
     case AUDIO_SOURCE_DEFAULT:
     case AUDIO_SOURCE_MIC:
         device = availableDevices.getDevice(
                 AUDIO_DEVICE_IN_BLUETOOTH_A2DP, String8(""), AUDIO_FORMAT_DEFAULT);
         if (device != nullptr) break;
-        if (getForceUse(AUDIO_POLICY_FORCE_FOR_RECORD) == AUDIO_POLICY_FORCE_BT_SCO) {
+        if (audio_is_bluetooth_out_sco_device(commDeviceType)) {
             device = availableDevices.getDevice(
                     AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
             if (device != nullptr) break;
         }
         device = availableDevices.getFirstExistingDevice({
-                AUDIO_DEVICE_IN_WIRED_HEADSET, AUDIO_DEVICE_IN_USB_HEADSET,
-                AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_BUILTIN_MIC});
+                AUDIO_DEVICE_IN_BLE_HEADSET, AUDIO_DEVICE_IN_WIRED_HEADSET,
+                AUDIO_DEVICE_IN_USB_HEADSET, AUDIO_DEVICE_IN_USB_DEVICE,
+                AUDIO_DEVICE_IN_BLUETOOTH_BLE, AUDIO_DEVICE_IN_BUILTIN_MIC});
         break;
 
     case AUDIO_SOURCE_VOICE_COMMUNICATION:
@@ -528,46 +495,64 @@
             availableDevices = availablePrimaryDevices;
         }
 
-        switch (getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION)) {
-        case AUDIO_POLICY_FORCE_BT_SCO:
+        if (audio_is_bluetooth_out_sco_device(commDeviceType)) {
             // if SCO device is requested but no SCO device is available, fall back to default case
             device = availableDevices.getDevice(
                     AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
             if (device != nullptr) {
                 break;
             }
-            FALLTHROUGH_INTENDED;
-
+        }
+        switch (commDeviceType) {
+        case AUDIO_DEVICE_OUT_BLE_HEADSET:
+            device = availableDevices.getDevice(
+                    AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
+            break;
+        case AUDIO_DEVICE_OUT_SPEAKER:
+            device = availableDevices.getFirstExistingDevice({
+                    AUDIO_DEVICE_IN_BACK_MIC, AUDIO_DEVICE_IN_BUILTIN_MIC,
+                    AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_USB_HEADSET});
+            break;
         default:    // FORCE_NONE
             device = availableDevices.getFirstExistingDevice({
                     AUDIO_DEVICE_IN_WIRED_HEADSET, AUDIO_DEVICE_IN_USB_HEADSET,
-                    AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_BUILTIN_MIC});
+                    AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_BLUETOOTH_BLE,
+                    AUDIO_DEVICE_IN_BUILTIN_MIC});
             break;
 
-        case AUDIO_POLICY_FORCE_SPEAKER:
-            device = availableDevices.getFirstExistingDevice({
-                    AUDIO_DEVICE_IN_BACK_MIC, AUDIO_DEVICE_IN_BUILTIN_MIC});
-            break;
         }
         break;
 
     case AUDIO_SOURCE_VOICE_RECOGNITION:
     case AUDIO_SOURCE_UNPROCESSED:
-    case AUDIO_SOURCE_HOTWORD:
-        if (inputSource == AUDIO_SOURCE_HOTWORD) {
-            // We should not use primary output criteria for Hotword but rather limit
-            // to devices attached to the same HW module as the build in mic
-            LOG_ALWAYS_FATAL_IF(availablePrimaryDevices.isEmpty(), "Primary devices not found");
-            availableDevices = availablePrimaryDevices;
+        if (audio_is_bluetooth_out_sco_device(commDeviceType)) {
+            device = availableDevices.getDevice(
+                    AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
+            if (device != nullptr) break;
         }
-        if (getForceUse(AUDIO_POLICY_FORCE_FOR_RECORD) == AUDIO_POLICY_FORCE_BT_SCO) {
+        // we need to make BLUETOOTH_BLE has higher priority than BUILTIN_MIC,
+        // because sometimes user want to do voice search by bt remote
+        // even if BUILDIN_MIC is available.
+        device = availableDevices.getFirstExistingDevice({
+                AUDIO_DEVICE_IN_BLE_HEADSET, AUDIO_DEVICE_IN_WIRED_HEADSET,
+                AUDIO_DEVICE_IN_USB_HEADSET, AUDIO_DEVICE_IN_USB_DEVICE,
+                AUDIO_DEVICE_IN_BLUETOOTH_BLE, AUDIO_DEVICE_IN_BUILTIN_MIC});
+
+        break;
+    case AUDIO_SOURCE_HOTWORD:
+        // We should not use primary output criteria for Hotword but rather limit
+        // to devices attached to the same HW module as the build in mic
+        LOG_ALWAYS_FATAL_IF(availablePrimaryDevices.isEmpty(), "Primary devices not found");
+        availableDevices = availablePrimaryDevices;
+        if (audio_is_bluetooth_out_sco_device(commDeviceType)) {
             device = availableDevices.getDevice(
                     AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
             if (device != nullptr) break;
         }
         device = availableDevices.getFirstExistingDevice({
-                AUDIO_DEVICE_IN_WIRED_HEADSET, AUDIO_DEVICE_IN_USB_HEADSET,
-                AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_BUILTIN_MIC});
+                AUDIO_DEVICE_IN_BLE_HEADSET, AUDIO_DEVICE_IN_WIRED_HEADSET,
+                AUDIO_DEVICE_IN_USB_HEADSET, AUDIO_DEVICE_IN_USB_DEVICE,
+                AUDIO_DEVICE_IN_BUILTIN_MIC});
         break;
     case AUDIO_SOURCE_CAMCORDER:
         // For a device without built-in mic, adding usb device
@@ -584,7 +569,8 @@
     case AUDIO_SOURCE_VOICE_PERFORMANCE:
         device = availableDevices.getFirstExistingDevice({
                 AUDIO_DEVICE_IN_WIRED_HEADSET, AUDIO_DEVICE_IN_USB_HEADSET,
-                AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_BUILTIN_MIC});
+                AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_BLUETOOTH_BLE,
+                AUDIO_DEVICE_IN_BUILTIN_MIC});
         break;
     case AUDIO_SOURCE_REMOTE_SUBMIX:
         device = availableDevices.getDevice(
@@ -609,6 +595,7 @@
         ALOGE_IF(device == nullptr,
                  "getDeviceForInputSource() no default device defined");
     }
+
     ALOGV_IF(device != nullptr,
              "getDeviceForInputSource()input source %d, device %08x",
              inputSource, device->type());
@@ -626,34 +613,71 @@
     }
 }
 
-DeviceVector Engine::getDevicesForProductStrategy(product_strategy_t strategy) const {
-    DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
-
-    // check if this strategy has a preferred device that is available,
-    // if yes, give priority to it
-    AudioDeviceTypeAddr preferredStrategyDevice;
-    const status_t status = getPreferredDeviceForStrategy(strategy, preferredStrategyDevice);
-    if (status == NO_ERROR) {
-        // there is a preferred device, is it available?
-        sp<DeviceDescriptor> preferredAvailableDevDescr = availableOutputDevices.getDevice(
-                preferredStrategyDevice.mType,
-                String8(preferredStrategyDevice.mAddress.c_str()),
-                AUDIO_FORMAT_DEFAULT);
-        if (preferredAvailableDevDescr != nullptr) {
-            ALOGVV("%s using pref device 0x%08x/%s for strategy %u",
-                   __func__, preferredStrategyDevice.mType,
-                   preferredStrategyDevice.mAddress.c_str(), strategy);
-            return DeviceVector(preferredAvailableDevDescr);
+product_strategy_t Engine::getProductStrategyFromLegacy(legacy_strategy legacyStrategy) const {
+    for (const auto& strategyMap : mLegacyStrategyMap) {
+        if (strategyMap.second == legacyStrategy) {
+            return strategyMap.first;
         }
     }
+    return PRODUCT_STRATEGY_NONE;
+}
 
-    DeviceVector availableInputDevices = getApmObserver()->getAvailableInputDevices();
+audio_devices_t Engine::getPreferredDeviceTypeForLegacyStrategy(
+        const DeviceVector& availableOutputDevices, legacy_strategy legacyStrategy) const {
+    product_strategy_t strategy = getProductStrategyFromLegacy(legacyStrategy);
+    DeviceVector devices = getPreferredAvailableDevicesForProductStrategy(
+            availableOutputDevices, strategy);
+    if (devices.size() > 0) {
+        return devices[0]->type();
+    }
+    return AUDIO_DEVICE_NONE;
+}
+
+DeviceVector Engine::getPreferredAvailableDevicesForProductStrategy(
+        const DeviceVector& availableOutputDevices, product_strategy_t strategy) const {
+    DeviceVector preferredAvailableDevVec = {};
+    AudioDeviceTypeAddrVector preferredStrategyDevices;
+    const status_t status = getDevicesForRoleAndStrategy(
+            strategy, DEVICE_ROLE_PREFERRED, preferredStrategyDevices);
+    if (status == NO_ERROR) {
+        // there is a preferred device, is it available?
+        preferredAvailableDevVec =
+                availableOutputDevices.getDevicesFromDeviceTypeAddrVec(preferredStrategyDevices);
+        if (preferredAvailableDevVec.size() == preferredAvailableDevVec.size()) {
+            ALOGVV("%s using pref device %s for strategy %u",
+                   __func__, preferredAvailableDevVec.toString().c_str(), strategy);
+            return preferredAvailableDevVec;
+        }
+    }
+    return preferredAvailableDevVec;
+}
+
+
+DeviceVector Engine::getDevicesForProductStrategy(product_strategy_t strategy) const {
     const SwAudioOutputCollection& outputs = getApmObserver()->getOutputs();
+
+    // Take context into account to remap product strategy before
+    // checking preferred device for strategy and applying default routing rules
+    strategy = remapStrategyFromContext(strategy, outputs);
+
     auto legacyStrategy = mLegacyStrategyMap.find(strategy) != end(mLegacyStrategyMap) ?
                           mLegacyStrategyMap.at(strategy) : STRATEGY_NONE;
+
+    DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+
+    filterOutputDevicesForStrategy(legacyStrategy, availableOutputDevices, outputs);
+
+    // check if this strategy has a preferred device that is available,
+    // if yes, give priority to it.
+    DeviceVector preferredAvailableDevVec =
+            getPreferredAvailableDevicesForProductStrategy(availableOutputDevices, strategy);
+    if (!preferredAvailableDevVec.isEmpty()) {
+        return preferredAvailableDevVec;
+    }
+
     return getDevicesForStrategyInt(legacyStrategy,
                                     availableOutputDevices,
-                                    availableInputDevices, outputs);
+                                    outputs);
 }
 
 DeviceVector Engine::getOutputDevicesForAttributes(const audio_attributes_t &attributes,
@@ -689,6 +713,7 @@
 }
 
 sp<DeviceDescriptor> Engine::getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                         uid_t uid,
                                                          sp<AudioPolicyMix> *mix) const
 {
     const auto &policyMixes = getApmObserver()->getAudioPolicyMixCollection();
@@ -708,7 +733,10 @@
         return device;
     }
 
-    device = policyMixes.getDeviceAndMixForInputSource(attr.source, availableInputDevices, mix);
+    device = policyMixes.getDeviceAndMixForInputSource(attr.source,
+                                                       availableInputDevices,
+                                                       uid,
+                                                       mix);
     if (device != nullptr) {
         return device;
     }
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index bb9e2df..595e289 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -62,8 +62,10 @@
     DeviceVector getOutputDevicesForStream(audio_stream_type_t stream,
                                            bool fromCache = false) const override;
 
-    sp<DeviceDescriptor> getInputDeviceForAttributes(
-            const audio_attributes_t &attr, sp<AudioPolicyMix> *mix = nullptr) const override;
+    sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
+                                                     uid_t uid = 0,
+                                                     sp<AudioPolicyMix> *mix = nullptr)
+                                                     const override;
 
     void updateDeviceSelectionCache() override;
 
@@ -74,15 +76,27 @@
 
     status_t setDefaultDevice(audio_devices_t device);
 
+    void filterOutputDevicesForStrategy(legacy_strategy strategy,
+                                            DeviceVector& availableOutputDevices,
+                                            const SwAudioOutputCollection &outputs) const;
+
+    product_strategy_t remapStrategyFromContext(product_strategy_t strategy,
+                                            const SwAudioOutputCollection &outputs) const;
+
     DeviceVector getDevicesForStrategyInt(legacy_strategy strategy,
                                           DeviceVector availableOutputDevices,
-                                          DeviceVector availableInputDevices,
                                           const SwAudioOutputCollection &outputs) const;
 
     DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const;
 
     sp<DeviceDescriptor> getDeviceForInputSource(audio_source_t inputSource) const;
 
+    product_strategy_t getProductStrategyFromLegacy(legacy_strategy legacyStrategy) const;
+    audio_devices_t getPreferredDeviceTypeForLegacyStrategy(
+        const DeviceVector& availableOutputDevices, legacy_strategy legacyStrategy) const;
+    DeviceVector getPreferredAvailableDevicesForProductStrategy(
+        const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
+
     DeviceStrategyMap mDevicesForStrategies;
 
     std::map<product_strategy_t, legacy_strategy> mLegacyStrategyMap;
diff --git a/services/audiopolicy/fuzzer/Android.bp b/services/audiopolicy/fuzzer/Android.bp
new file mode 100644
index 0000000..faf15d6
--- /dev/null
+++ b/services/audiopolicy/fuzzer/Android.bp
@@ -0,0 +1,65 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ ******************************************************************************/
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_fuzz {
+    name: "audiopolicy_fuzzer",
+    srcs: [
+        "audiopolicy_fuzzer.cpp",
+    ],
+    include_dirs: [
+        "frameworks/av/services/audiopolicy",
+    ],
+    shared_libs: [
+        "android.hardware.audio.common-util",
+        "capture_state_listener-aidl-cpp",
+        "libaudioclient",
+        "libaudiofoundation",
+        "libbase",
+        "libcutils",
+        "libhidlbase",
+        "libdl",
+        "liblog",
+        "libmedia_helper",
+        "libmediametrics",
+        "libutils",
+        "libxml2",
+        "libbinder",
+        "libaudiopolicy",
+        "libaudiopolicymanagerdefault",
+        "framework-permission-aidl-cpp",
+    ],
+    static_libs: [
+        "android.hardware.audio.common@7.0-enums",
+        "libaudiopolicycomponents",
+    ],
+    header_libs: [
+        "libaudiopolicycommon",
+        "libaudiopolicyengine_interface_headers",
+        "libaudiopolicymanager_interface_headers",
+    ],
+    data: [":audiopolicyfuzzer_configuration_files"],
+}
diff --git a/services/audiopolicy/fuzzer/README.md b/services/audiopolicy/fuzzer/README.md
new file mode 100644
index 0000000..08d7213
--- /dev/null
+++ b/services/audiopolicy/fuzzer/README.md
@@ -0,0 +1,63 @@
+# Fuzzer for libaudiopolicy
+
+## Plugin Design Considerations
+The fuzzer plugin for libaudiopolicy is designed based on the
+understanding of the service and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+AudioPolicy APIs contain the following parameters:
+1. AudioFormats
+2. AudioChannelMasks
+3. AudioOutputFlags
+4. AudioDevices
+5. MixTypes
+6. MixRouteFlags
+7. SampleRates
+8. AudioUsages
+9. AudioContentTypes
+10. AudioSources
+11. AudioFlagMasks
+12. AudioPolicyDeviceStates
+
+| Parameter| Valid Input Values| Configured Value|
+|------------- |-------------| ----- |
+| `AudioFormat` | 77 values of type `audio_format_t` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `AudioChannelMask` | 83 values of type `audio_channel_mask_t` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `AudioOutputFlag` | 16 values of type `audio_output_flags_t` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `AudioDevice`   | `AUDIO_DEVICE_OUT_AUX_DIGITAL`, `AUDIO_DEVICE_OUT_STUB`, `AUDIO_DEVICE_IN_VOICE_CALL`, `AUDIO_DEVICE_IN_AUX_DIGITAL`, `AUDIO_DEVICE_IN_STUB` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `MixType`   | `MIX_TYPE_PLAYERS`, `MIX_TYPE_RECORDERS` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `MixRouteFlag`   | `MIX_ROUTE_FLAG_RENDER`, `MIX_ROUTE_FLAG_LOOP_BACK`, `MIX_ROUTE_FLAG_LOOP_BACK_AND_RENDER`, `MIX_ROUTE_FLAG_ALL` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `SampleRate` | `0` to `UINT32_MAX` | Value obtained from FuzzedDataProvider |
+| `AudioUsage` | `AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST`, `AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT`, `AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED`, `AUDIO_USAGE_NOTIFICATION_EVENT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `AudioContentType` | `AUDIO_CONTENT_TYPE_UNKNOWN`, `AUDIO_CONTENT_TYPE_SPEECH`, `AUDIO_CONTENT_TYPE_MUSIC`, `AUDIO_CONTENT_TYPE_MOVIE`, `AUDIO_CONTENT_TYPE_SONIFICATION` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `AudioSource` | 14 values of type `audio_source_t` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `AudioFlagMask` | 15 values of type `audio_flags_mask_t` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `AudioPolicyDeviceStates` | `AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE`, `AUDIO_POLICY_DEVICE_STATE_AVAILABLE`, `AUDIO_POLICY_DEVICE_STATE_CNT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+## Build
+
+This describes steps to build audiopolicy_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) audiopolicy_fuzzer
+```
+
+#### Steps to run
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/audiopolicy_fuzzer/audiopolicy_fuzzer
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
new file mode 100644
index 0000000..7000cd9
--- /dev/null
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -0,0 +1,986 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ ******************************************************************************/
+#include <stdint.h>
+#include <sys/wait.h>
+#include <unistd.h>
+#include <algorithm>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include <Serializer.h>
+#include <android-base/file.h>
+#include <android/content/AttributionSourceState.h>
+#include <libxml/parser.h>
+#include <libxml/xinclude.h>
+#include <media/AudioPolicy.h>
+#include <media/PatchBuilder.h>
+#include <media/RecordingActivityTracker.h>
+
+#include <AudioPolicyInterface.h>
+#include <android_audio_policy_configuration_V7_0-enums.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <tests/AudioPolicyManagerTestClient.h>
+#include <tests/AudioPolicyTestClient.h>
+#include <tests/AudioPolicyTestManager.h>
+#include <xsdc/XsdcSupport.h>
+
+using namespace android;
+
+namespace xsd {
+using namespace ::android::audio::policy::configuration::V7_0;
+}
+
+using content::AttributionSourceState;
+
+static const std::vector<audio_format_t> kAudioFormats = [] {
+    std::vector<audio_format_t> result;
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioFormat>{}) {
+        audio_format_t audioFormatHal;
+        std::string audioFormat = toString(enumVal);
+        if (audio_format_from_string(audioFormat.c_str(), &audioFormatHal)) {
+            result.push_back(audioFormatHal);
+        }
+    }
+    return result;
+}();
+
+static const std::vector<audio_channel_mask_t> kAudioChannelOutMasks = [] {
+    std::vector<audio_channel_mask_t> result;
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioChannelMask>{}) {
+        audio_channel_mask_t audioChannelMaskHal;
+        std::string audioChannelMask = toString(enumVal);
+        if (enumVal != xsd::AudioChannelMask::AUDIO_CHANNEL_NONE &&
+            audioChannelMask.find("_IN_") == std::string::npos &&
+            audio_channel_mask_from_string(audioChannelMask.c_str(), &audioChannelMaskHal)) {
+            result.push_back(audioChannelMaskHal);
+        }
+    }
+    return result;
+}();
+
+static const std::vector<audio_channel_mask_t> kAudioChannelInMasks = [] {
+    std::vector<audio_channel_mask_t> result;
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioChannelMask>{}) {
+        audio_channel_mask_t audioChannelMaskHal;
+        std::string audioChannelMask = toString(enumVal);
+        if (enumVal != xsd::AudioChannelMask::AUDIO_CHANNEL_NONE &&
+            audioChannelMask.find("_OUT_") == std::string::npos &&
+            audio_channel_mask_from_string(audioChannelMask.c_str(), &audioChannelMaskHal)) {
+            result.push_back(audioChannelMaskHal);
+        }
+    }
+    return result;
+}();
+
+static const std::vector<audio_output_flags_t> kAudioOutputFlags = [] {
+    std::vector<audio_output_flags_t> result;
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioInOutFlag>{}) {
+        audio_output_flags_t audioOutputFlagHal;
+        std::string audioOutputFlag = toString(enumVal);
+        if (audioOutputFlag.find("_OUTPUT_") != std::string::npos &&
+            audio_output_flag_from_string(audioOutputFlag.c_str(), &audioOutputFlagHal)) {
+            result.push_back(audioOutputFlagHal);
+        }
+    }
+    return result;
+}();
+
+static const std::vector<audio_devices_t> kAudioDevices = [] {
+    std::vector<audio_devices_t> result;
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioDevice>{}) {
+        audio_devices_t audioDeviceHal;
+        std::string audioDevice = toString(enumVal);
+        if (audio_device_from_string(audioDevice.c_str(), &audioDeviceHal)) {
+            result.push_back(audioDeviceHal);
+        }
+    }
+    return result;
+}();
+
+static const std::vector<audio_usage_t> kAudioUsages = [] {
+    std::vector<audio_usage_t> result;
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioUsage>{}) {
+        audio_usage_t audioUsageHal;
+        std::string audioUsage = toString(enumVal);
+        if (audio_usage_from_string(audioUsage.c_str(), &audioUsageHal)) {
+            result.push_back(audioUsageHal);
+        }
+    }
+    return result;
+}();
+
+static const std::vector<audio_source_t> kAudioSources = [] {
+    std::vector<audio_source_t> result;
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioSource>{}) {
+        audio_source_t audioSourceHal;
+        std::string audioSource = toString(enumVal);
+        if (audio_source_from_string(audioSource.c_str(), &audioSourceHal)) {
+            result.push_back(audioSourceHal);
+        }
+    }
+    return result;
+}();
+
+static const std::vector<audio_content_type_t> kAudioContentTypes = [] {
+    std::vector<audio_content_type_t> result;
+    for (const auto enumVal : xsdc_enum_range<xsd::AudioContentType>{}) {
+        audio_content_type_t audioContentTypeHal;
+        std::string audioContentType = toString(enumVal);
+        if (audio_content_type_from_string(audioContentType.c_str(), &audioContentTypeHal)) {
+            result.push_back(audioContentTypeHal);
+        }
+    }
+    return result;
+}();
+
+std::vector<int> kMixTypes = {MIX_TYPE_PLAYERS, MIX_TYPE_RECORDERS};
+
+std::vector<int> kMixRouteFlags = {MIX_ROUTE_FLAG_RENDER, MIX_ROUTE_FLAG_LOOP_BACK,
+                                   MIX_ROUTE_FLAG_LOOP_BACK_AND_RENDER, MIX_ROUTE_FLAG_ALL};
+
+std::vector<audio_flags_mask_t> kAudioFlagMasks = {
+    AUDIO_FLAG_NONE,           AUDIO_FLAG_AUDIBILITY_ENFORCED,
+    AUDIO_FLAG_SECURE,         AUDIO_FLAG_SCO,
+    AUDIO_FLAG_BEACON,         AUDIO_FLAG_HW_AV_SYNC,
+    AUDIO_FLAG_HW_HOTWORD,     AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY,
+    AUDIO_FLAG_BYPASS_MUTE,    AUDIO_FLAG_LOW_LATENCY,
+    AUDIO_FLAG_DEEP_BUFFER,    AUDIO_FLAG_NO_MEDIA_PROJECTION,
+    AUDIO_FLAG_MUTE_HAPTIC,    AUDIO_FLAG_NO_SYSTEM_CAPTURE,
+    AUDIO_FLAG_CAPTURE_PRIVATE};
+
+std::vector<audio_policy_dev_state_t> kAudioPolicyDeviceStates = {
+    AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+    AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+    AUDIO_POLICY_DEVICE_STATE_CNT,
+};
+
+std::vector<uint32_t> kSamplingRates = {8000, 16000, 44100, 48000, 88200, 96000};
+
+template <typename T>
+T getValueFromVector(FuzzedDataProvider *fdp, std::vector<T> arr) {
+    if (fdp->ConsumeBool()) {
+        return arr[fdp->ConsumeIntegralInRange<int32_t>(0, arr.size() - 1)];
+    } else {
+        return (T)fdp->ConsumeIntegral<uint32_t>();
+    }
+}
+
+class AudioPolicyManagerFuzzer {
+   public:
+    explicit AudioPolicyManagerFuzzer(FuzzedDataProvider *fdp);
+    virtual ~AudioPolicyManagerFuzzer() = default;
+    virtual bool initialize();
+    virtual void SetUpManagerConfig();
+    bool getOutputForAttr(audio_port_handle_t *selectedDeviceId, audio_format_t format,
+                          audio_channel_mask_t channelMask, int sampleRate,
+                          audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
+                          audio_io_handle_t *output = nullptr,
+                          audio_port_handle_t *portId = nullptr, audio_attributes_t attr = {});
+    bool getInputForAttr(const audio_attributes_t &attr, audio_unique_id_t riid,
+                         audio_port_handle_t *selectedDeviceId, audio_format_t format,
+                         audio_channel_mask_t channelMask, int sampleRate,
+                         audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
+                         audio_port_handle_t *portId = nullptr);
+    bool findDevicePort(audio_port_role_t role, audio_devices_t deviceType,
+                        const std::string &address, audio_port_v7 *foundPort);
+    static audio_port_handle_t getDeviceIdFromPatch(const struct audio_patch *patch);
+    audio_patch createFuzzedPatch();
+    void fuzzPatchCreation();
+    virtual void process();
+
+   protected:
+    std::unique_ptr<AudioPolicyManagerTestClient> mClient{new AudioPolicyManagerTestClient};
+    std::unique_ptr<AudioPolicyTestManager> mManager{new AudioPolicyTestManager(mClient.get())};
+    FuzzedDataProvider *mFdp;
+};
+
+AudioPolicyManagerFuzzer::AudioPolicyManagerFuzzer(FuzzedDataProvider *fdp)
+        : mFdp(fdp) {}
+
+bool AudioPolicyManagerFuzzer::initialize() {
+    if (mFdp->remaining_bytes() < 1) {
+        return false;
+    }
+    // init code
+    SetUpManagerConfig();
+
+    if (mManager->initialize() != NO_ERROR) {
+        return false;
+    }
+    if (mManager->initCheck() != NO_ERROR) {
+        return false;
+    }
+    return true;
+}
+
+void AudioPolicyManagerFuzzer::SetUpManagerConfig() { mManager->getConfig().setDefault(); }
+
+bool AudioPolicyManagerFuzzer::getOutputForAttr(
+    audio_port_handle_t *selectedDeviceId, audio_format_t format, audio_channel_mask_t channelMask,
+    int sampleRate, audio_output_flags_t flags, audio_io_handle_t *output,
+    audio_port_handle_t *portId, audio_attributes_t attr) {
+    audio_io_handle_t localOutput;
+    if (!output) output = &localOutput;
+    *output = AUDIO_IO_HANDLE_NONE;
+    audio_stream_type_t stream = AUDIO_STREAM_DEFAULT;
+    audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+    config.sample_rate = sampleRate;
+    config.channel_mask = channelMask;
+    config.format = format;
+    audio_port_handle_t localPortId;
+    if (!portId) portId = &localPortId;
+    *portId = AUDIO_PORT_HANDLE_NONE;
+    AudioPolicyInterface::output_type_t outputType;
+
+    // TODO b/182392769: use attribution source util
+    AttributionSourceState attributionSource;
+    attributionSource.uid = 0;
+    attributionSource.token = sp<BBinder>::make();
+    if (mManager->getOutputForAttr(&attr, output, AUDIO_SESSION_NONE, &stream, attributionSource,
+            &config, &flags, selectedDeviceId, portId, {}, &outputType) != OK) {
+        return false;
+    }
+    if (*output == AUDIO_IO_HANDLE_NONE || *portId == AUDIO_PORT_HANDLE_NONE) {
+        return false;
+    }
+    return true;
+}
+
+bool AudioPolicyManagerFuzzer::getInputForAttr(
+    const audio_attributes_t &attr, audio_unique_id_t riid, audio_port_handle_t *selectedDeviceId,
+    audio_format_t format, audio_channel_mask_t channelMask, int sampleRate,
+    audio_input_flags_t flags, audio_port_handle_t *portId) {
+    audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
+    audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+    config.sample_rate = sampleRate;
+    config.channel_mask = channelMask;
+    config.format = format;
+    audio_port_handle_t localPortId;
+    if (!portId) portId = &localPortId;
+    *portId = AUDIO_PORT_HANDLE_NONE;
+    AudioPolicyInterface::input_type_t inputType;
+
+    AttributionSourceState attributionSource;
+    attributionSource.uid = 0;
+    attributionSource.token = sp<BBinder>::make();
+    if (mManager->getInputForAttr(&attr, &input, riid, AUDIO_SESSION_NONE, attributionSource,
+            &config, flags, selectedDeviceId, &inputType, portId) != OK) {
+        return false;
+    }
+    if (*portId == AUDIO_PORT_HANDLE_NONE || input == AUDIO_IO_HANDLE_NONE) {
+        return false;
+    }
+    return true;
+}
+
+bool AudioPolicyManagerFuzzer::findDevicePort(audio_port_role_t role, audio_devices_t deviceType,
+                                              const std::string &address,
+                                              audio_port_v7 *foundPort) {
+    uint32_t numPorts = 0;
+    uint32_t generation1;
+    status_t ret;
+
+    ret = mManager->listAudioPorts(role, AUDIO_PORT_TYPE_DEVICE, &numPorts, nullptr, &generation1);
+    if (ret != NO_ERROR) {
+        return false;
+    }
+
+    uint32_t generation2;
+    struct audio_port_v7 ports[numPorts];
+    ret = mManager->listAudioPorts(role, AUDIO_PORT_TYPE_DEVICE, &numPorts, ports, &generation2);
+    if (ret != NO_ERROR) {
+        return false;
+    }
+
+    for (const auto &port : ports) {
+        if (port.role == role && port.ext.device.type == deviceType &&
+            (strncmp(port.ext.device.address, address.c_str(), AUDIO_DEVICE_MAX_ADDRESS_LEN) ==
+             0)) {
+            if (foundPort) *foundPort = port;
+            return true;
+        }
+    }
+    return false;
+}
+
+audio_port_handle_t AudioPolicyManagerFuzzer::getDeviceIdFromPatch(
+    const struct audio_patch *patch) {
+    if (patch->num_sources != 0 && patch->num_sinks != 0) {
+        if (patch->sources[0].type == AUDIO_PORT_TYPE_MIX) {
+            return patch->sinks[0].id;
+        } else {
+            return patch->sources[0].id;
+        }
+    }
+    return AUDIO_PORT_HANDLE_NONE;
+}
+
+audio_patch AudioPolicyManagerFuzzer::createFuzzedPatch() {
+    audio_patch patch{};
+    patch.id = mFdp->ConsumeIntegral<uint32_t>();
+    patch.num_sources = mFdp->ConsumeIntegralInRange(0, AUDIO_PATCH_PORTS_MAX);
+    for (int i = 0; i < patch.num_sources; ++i) {
+        audio_port_config config{};
+        std::vector<uint8_t> bytes = mFdp->ConsumeBytes<uint8_t>(sizeof(config));
+        memcpy(reinterpret_cast<uint8_t *>(&config), &bytes[0], bytes.size());
+        patch.sources[i] = config;
+    }
+    patch.num_sinks = mFdp->ConsumeIntegralInRange(0, AUDIO_PATCH_PORTS_MAX);
+    for (int i = 0; i < patch.num_sinks; ++i) {
+        audio_port_config config{};
+        std::vector<uint8_t> bytes = mFdp->ConsumeBytes<uint8_t>(sizeof(config));
+        memcpy(reinterpret_cast<uint8_t *>(&config), &bytes[0], bytes.size());
+        patch.sinks[i] = config;
+    }
+    return patch;
+}
+
+void AudioPolicyManagerFuzzer::fuzzPatchCreation() {
+    if (mFdp->remaining_bytes()) {
+        audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
+        uid_t uid = mFdp->ConsumeIntegral<uint32_t>();
+
+        // create a fuzzed patch
+        handle = AUDIO_PATCH_HANDLE_NONE;
+        audio_patch patch = createFuzzedPatch();
+        uid = mFdp->ConsumeIntegral<uint32_t>();
+        if (mManager->createAudioPatch(&patch, &handle, uid) == NO_ERROR) {
+            mManager->releaseAudioPatch(handle, uid);
+        }
+    }
+}
+
+void AudioPolicyManagerFuzzer::process() {
+    if (initialize()) {
+        fuzzPatchCreation();
+    }
+}
+
+class AudioPolicyManagerFuzzerWithConfigurationFile : public AudioPolicyManagerFuzzer {
+   public:
+    explicit AudioPolicyManagerFuzzerWithConfigurationFile(FuzzedDataProvider *fdp)
+        : AudioPolicyManagerFuzzer(fdp){};
+
+   protected:
+    void SetUpManagerConfig() override;
+    virtual std::string getConfigFile();
+    void traverseAndFuzzXML(xmlDocPtr pDoc, xmlNodePtr curr);
+    std::string fuzzXML(std::string xmlPath);
+
+    static inline const std::string sExecutableDir = base::GetExecutableDirectory() + "/";
+    static inline const std::string sDefaultConfig =
+            sExecutableDir + "data/test_audio_policy_configuration.xml";
+    static inline const std::string sFuzzedConfig = sExecutableDir + "fuzzed.xml";;
+};
+
+std::string AudioPolicyManagerFuzzerWithConfigurationFile::getConfigFile() {
+    return fuzzXML(sDefaultConfig);
+}
+
+void AudioPolicyManagerFuzzerWithConfigurationFile::SetUpManagerConfig() {
+    deserializeAudioPolicyFile(getConfigFile().c_str(), &mManager->getConfig());
+}
+
+void AudioPolicyManagerFuzzerWithConfigurationFile::traverseAndFuzzXML(xmlDocPtr pDoc,
+                                                                       xmlNodePtr curr) {
+    if (curr == nullptr) {
+        return;
+    }
+
+    xmlAttr *attribute = curr->properties;
+    while (attribute) {
+        if (!xmlStrcmp(attribute->name, reinterpret_cast<const xmlChar *>("format"))) {
+            const char *newFormat =
+                audio_format_to_string(getValueFromVector<audio_format_t>(mFdp, kAudioFormats));
+            xmlSetProp(curr, attribute->name, reinterpret_cast<const xmlChar *>(newFormat));
+        }
+        if (!xmlStrcmp(attribute->name, reinterpret_cast<const xmlChar *>("flags"))) {
+            std::string newFlag = "";
+            uint16_t numFlags = std::max((uint16_t)1, mFdp->ConsumeIntegral<uint16_t>());
+            for (uint16_t i = 0; i < numFlags; ++i) {
+                newFlag += std::string(audio_output_flag_to_string(
+                    getValueFromVector<audio_output_flags_t>(mFdp, kAudioOutputFlags)));
+                if (i != (numFlags - 1)) {
+                    newFlag += std::string("|");
+                }
+            }
+            xmlSetProp(curr, attribute->name, reinterpret_cast<const xmlChar *>(newFlag.c_str()));
+        }
+        if (!xmlStrcmp(attribute->name, reinterpret_cast<const xmlChar *>("samplingRates"))) {
+            std::string newRate = "";
+            uint16_t numRates = std::max((uint16_t)1, mFdp->ConsumeIntegral<uint16_t>());
+            for (uint16_t i = 0; i < numRates; ++i) {
+                newRate += std::to_string(getValueFromVector<uint32_t>(mFdp, kSamplingRates));
+                if (i != (numRates - 1)) {
+                    newRate += std::string(",");
+                }
+            }
+            xmlSetProp(curr, attribute->name, reinterpret_cast<const xmlChar *>(newRate.c_str()));
+        }
+        if (!xmlStrcmp(attribute->name, reinterpret_cast<const xmlChar *>("channelMasks"))) {
+            int isOutMask = -1;
+            char *value =
+                reinterpret_cast<char *>(xmlNodeListGetString(pDoc, attribute->children, 1));
+            if (std::string(value).find(std::string("_OUT_")) != std::string::npos) {
+                // OUT mask
+                isOutMask = 1;
+            } else if (std::string(value).find(std::string("_IN_")) != std::string::npos) {
+                // IN mask
+                isOutMask = 0;
+            }
+            if (isOutMask != -1) {
+                std::string newMask = "";
+                uint16_t numMasks = std::max((uint16_t)1, mFdp->ConsumeIntegral<uint16_t>());
+                for (uint16_t i = 0; i < numMasks; ++i) {
+                    if (isOutMask) {
+                        newMask += std::string(audio_channel_out_mask_to_string(
+                            getValueFromVector<audio_channel_mask_t>(mFdp, kAudioChannelOutMasks)));
+                    } else {
+                        newMask += std::string(audio_channel_in_mask_to_string(
+                            getValueFromVector<audio_channel_mask_t>(mFdp, kAudioChannelInMasks)));
+                    }
+                    if (i != (numMasks - 1)) {
+                        newMask += std::string(",");
+                    }
+                }
+                xmlSetProp(curr, attribute->name,
+                           reinterpret_cast<const xmlChar *>(newMask.c_str()));
+            }
+            xmlFree(value);
+        }
+        attribute = attribute->next;
+    }
+
+    curr = curr->xmlChildrenNode;
+    while (curr != nullptr) {
+        traverseAndFuzzXML(pDoc, curr);
+        curr = curr->next;
+    }
+}
+
+std::string AudioPolicyManagerFuzzerWithConfigurationFile::fuzzXML(std::string xmlPath) {
+    std::string outPath = sFuzzedConfig;
+
+    // Load in the xml file from disk
+    xmlDocPtr pDoc = xmlParseFile(xmlPath.c_str());
+    xmlNodePtr root = xmlDocGetRootElement(pDoc);
+
+    traverseAndFuzzXML(pDoc, root);
+
+    // Save the document back out to disk.
+    xmlSaveFileEnc(outPath.c_str(), pDoc, "UTF-8");
+    xmlFreeDoc(pDoc);
+
+    return outPath;
+}
+
+class AudioPolicyManagerFuzzerMsd : public AudioPolicyManagerFuzzerWithConfigurationFile {
+   public:
+    explicit AudioPolicyManagerFuzzerMsd(FuzzedDataProvider *fdp)
+        : AudioPolicyManagerFuzzerWithConfigurationFile(fdp) {}
+
+   protected:
+    std::string getConfigFile() override;
+
+    static inline const std::string sMsdConfig =
+            sExecutableDir + "data/test_audio_policy_msd_configuration.xml";
+};
+
+std::string AudioPolicyManagerFuzzerMsd::getConfigFile() { return fuzzXML(sMsdConfig); }
+
+using PolicyMixTuple = std::tuple<audio_usage_t, audio_source_t, uint32_t>;
+
+class AudioPolicyManagerFuzzerDynamicPolicy : public AudioPolicyManagerFuzzerWithConfigurationFile {
+   public:
+    explicit AudioPolicyManagerFuzzerDynamicPolicy(FuzzedDataProvider *fdp)
+        : AudioPolicyManagerFuzzerWithConfigurationFile(fdp){};
+    ~AudioPolicyManagerFuzzerDynamicPolicy() override;
+    void process() override;
+
+   protected:
+    status_t addPolicyMix(int mixType, int mixFlag, audio_devices_t deviceType,
+                          std::string mixAddress, const audio_config_t &audioConfig,
+                          const std::vector<PolicyMixTuple> &rules);
+    void clearPolicyMix();
+    void registerPolicyMixes();
+    void unregisterPolicyMixes();
+
+    Vector<AudioMix> mAudioMixes;
+    const std::string mMixAddress = "remote_submix_media";
+};
+
+AudioPolicyManagerFuzzerDynamicPolicy::~AudioPolicyManagerFuzzerDynamicPolicy() {
+    clearPolicyMix();
+}
+
+status_t AudioPolicyManagerFuzzerDynamicPolicy::addPolicyMix(
+    int mixType, int mixFlag, audio_devices_t deviceType, std::string mixAddress,
+    const audio_config_t &audioConfig, const std::vector<PolicyMixTuple> &rules) {
+    Vector<AudioMixMatchCriterion> myMixMatchCriteria;
+
+    for (const auto &rule : rules) {
+        myMixMatchCriteria.add(
+            AudioMixMatchCriterion(std::get<0>(rule), std::get<1>(rule), std::get<2>(rule)));
+    }
+
+    AudioMix myAudioMix(myMixMatchCriteria, mixType, audioConfig, mixFlag,
+                        String8(mixAddress.c_str()), 0);
+    myAudioMix.mDeviceType = deviceType;
+    // Clear mAudioMix before add new one to make sure we don't add already existing mixes.
+    mAudioMixes.clear();
+    mAudioMixes.add(myAudioMix);
+
+    // As the policy mixes registration may fail at some case,
+    // caller need to check the returned status.
+    status_t ret = mManager->registerPolicyMixes(mAudioMixes);
+    return ret;
+}
+
+void AudioPolicyManagerFuzzerDynamicPolicy::clearPolicyMix() {
+    if (mManager != nullptr) {
+        mManager->unregisterPolicyMixes(mAudioMixes);
+    }
+    mAudioMixes.clear();
+}
+
+void AudioPolicyManagerFuzzerDynamicPolicy::registerPolicyMixes() {
+    const uint32_t numPolicies = mFdp->ConsumeIntegralInRange<uint32_t>(1, MAX_MIXES_PER_POLICY);
+
+    for (int i = 0; i < numPolicies; ++i) {
+        audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+        audioConfig.channel_mask = getValueFromVector<audio_channel_mask_t>(
+            mFdp, mFdp->ConsumeBool() ? kAudioChannelInMasks : kAudioChannelOutMasks);
+        audioConfig.format = getValueFromVector<audio_format_t>(mFdp, kAudioFormats);
+        audioConfig.sample_rate = getValueFromVector<uint32_t>(mFdp, kSamplingRates);
+        addPolicyMix(getValueFromVector<int>(mFdp, kMixTypes),
+                     getValueFromVector<int>(mFdp, kMixRouteFlags),
+                     getValueFromVector<audio_devices_t>(mFdp, kAudioDevices), "", audioConfig,
+                     std::vector<PolicyMixTuple>());
+    }
+}
+
+void AudioPolicyManagerFuzzerDynamicPolicy::unregisterPolicyMixes() {
+    mManager->unregisterPolicyMixes(mAudioMixes);
+}
+
+void AudioPolicyManagerFuzzerDynamicPolicy::process() {
+    if (initialize()) {
+        registerPolicyMixes();
+        fuzzPatchCreation();
+        unregisterPolicyMixes();
+    }
+}
+
+class AudioPolicyManagerFuzzerDPNoRemoteSubmixModule
+    : public AudioPolicyManagerFuzzerDynamicPolicy {
+   public:
+    explicit AudioPolicyManagerFuzzerDPNoRemoteSubmixModule(FuzzedDataProvider *fdp)
+        : AudioPolicyManagerFuzzerDynamicPolicy(fdp){};
+
+   protected:
+    std::string getConfigFile() override;
+
+    static inline const std::string sPrimaryOnlyConfig =
+            sExecutableDir + "data/test_audio_policy_primary_only_configuration.xml";
+};
+
+std::string AudioPolicyManagerFuzzerDPNoRemoteSubmixModule::getConfigFile() {
+    return fuzzXML(sPrimaryOnlyConfig);
+}
+
+class AudioPolicyManagerFuzzerDPPlaybackReRouting : public AudioPolicyManagerFuzzerDynamicPolicy {
+   public:
+    explicit AudioPolicyManagerFuzzerDPPlaybackReRouting(FuzzedDataProvider *fdp);
+    ~AudioPolicyManagerFuzzerDPPlaybackReRouting() override;
+    void process() override;
+
+   protected:
+    bool initialize() override;
+    void playBackReRouting();
+
+    std::unique_ptr<RecordingActivityTracker> mTracker;
+
+    std::vector<PolicyMixTuple> mUsageRules = {
+        {AUDIO_USAGE_MEDIA, AUDIO_SOURCE_DEFAULT, RULE_MATCH_ATTRIBUTE_USAGE},
+        {AUDIO_USAGE_ALARM, AUDIO_SOURCE_DEFAULT, RULE_MATCH_ATTRIBUTE_USAGE}};
+
+    struct audio_port_v7 mInjectionPort;
+    audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
+    audio_config_t mAudioConfig;
+};
+
+AudioPolicyManagerFuzzerDPPlaybackReRouting::AudioPolicyManagerFuzzerDPPlaybackReRouting(
+        FuzzedDataProvider *fdp)
+        : AudioPolicyManagerFuzzerDynamicPolicy(fdp) {
+    const uint32_t numRules = mFdp->ConsumeIntegralInRange<uint32_t>(1, 10);
+    for (int i = 0; i < numRules; ++i) {
+        PolicyMixTuple rule = {getValueFromVector<audio_usage_t>(mFdp, kAudioUsages),
+                               getValueFromVector<audio_source_t>(mFdp, kAudioSources),
+                               RULE_MATCH_ATTRIBUTE_USAGE};
+        mUsageRules.push_back(rule);
+    }
+}
+
+AudioPolicyManagerFuzzerDPPlaybackReRouting::~AudioPolicyManagerFuzzerDPPlaybackReRouting() {
+    mManager->stopInput(mPortId);
+}
+
+bool AudioPolicyManagerFuzzerDPPlaybackReRouting::initialize() {
+    if (!AudioPolicyManagerFuzzerDynamicPolicy::initialize()) {
+        return false;
+    }
+    mTracker.reset(new RecordingActivityTracker());
+
+    mAudioConfig = AUDIO_CONFIG_INITIALIZER;
+    mAudioConfig.channel_mask =
+        getValueFromVector<audio_channel_mask_t>(mFdp, kAudioChannelOutMasks);
+    mAudioConfig.format = getValueFromVector<audio_format_t>(mFdp, kAudioFormats);
+    mAudioConfig.sample_rate = getValueFromVector<uint32_t>(mFdp, kSamplingRates);
+    status_t ret = addPolicyMix(getValueFromVector<int>(mFdp, kMixTypes),
+                                getValueFromVector<int>(mFdp, kMixRouteFlags),
+                                getValueFromVector<audio_devices_t>(mFdp, kAudioDevices),
+                                mMixAddress, mAudioConfig, mUsageRules);
+    if (ret != NO_ERROR) {
+        return false;
+    }
+
+    struct audio_port_v7 extractionPort;
+    findDevicePort(AUDIO_PORT_ROLE_SOURCE, getValueFromVector<audio_devices_t>(mFdp, kAudioDevices),
+                   mMixAddress, &extractionPort);
+
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_source_t source = getValueFromVector<audio_source_t>(mFdp, kAudioSources);
+    audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source,
+                               AUDIO_FLAG_NONE, ""};
+    std::string tags = "addr=" + mMixAddress;
+    strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
+    getInputForAttr(attr, mTracker->getRiid(), &selectedDeviceId, mAudioConfig.format,
+                    mAudioConfig.channel_mask, mAudioConfig.sample_rate, AUDIO_INPUT_FLAG_NONE,
+                    &mPortId);
+
+    ret = mManager->startInput(mPortId);
+    if (ret != NO_ERROR) {
+        return false;
+    }
+    if (!findDevicePort(AUDIO_PORT_ROLE_SINK,
+                        getValueFromVector<audio_devices_t>(mFdp, kAudioDevices), mMixAddress,
+                        &mInjectionPort)) {
+        return false;
+    }
+
+    return true;
+}
+
+void AudioPolicyManagerFuzzerDPPlaybackReRouting::playBackReRouting() {
+    const uint32_t numTestCases = mFdp->ConsumeIntegralInRange<uint32_t>(1, 10);
+    for (int i = 0; i < numTestCases; ++i) {
+        audio_attributes_t attr;
+        attr.content_type = getValueFromVector<audio_content_type_t>(mFdp, kAudioContentTypes);
+        attr.usage = getValueFromVector<audio_usage_t>(mFdp, kAudioUsages);
+        attr.source = getValueFromVector<audio_source_t>(mFdp, kAudioSources);
+        attr.flags = getValueFromVector<audio_flags_mask_t>(mFdp, kAudioFlagMasks);
+        std::string tags(mFdp->ConsumeBool() ? "" : "addr=remote_submix_media");
+        strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
+
+        audio_port_handle_t playbackRoutedPortId = AUDIO_PORT_HANDLE_NONE;
+        getOutputForAttr(&playbackRoutedPortId, mAudioConfig.format, mAudioConfig.channel_mask,
+                         mAudioConfig.sample_rate, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/,
+                         nullptr /*portId*/, attr);
+    }
+}
+
+void AudioPolicyManagerFuzzerDPPlaybackReRouting::process() {
+    if (initialize()) {
+        playBackReRouting();
+        registerPolicyMixes();
+        fuzzPatchCreation();
+        unregisterPolicyMixes();
+    }
+}
+
+class AudioPolicyManagerFuzzerDPMixRecordInjection : public AudioPolicyManagerFuzzerDynamicPolicy {
+   public:
+    explicit AudioPolicyManagerFuzzerDPMixRecordInjection(FuzzedDataProvider *fdp);
+    ~AudioPolicyManagerFuzzerDPMixRecordInjection() override;
+    void process() override;
+
+   protected:
+    bool initialize() override;
+    void recordingInjection();
+
+    std::unique_ptr<RecordingActivityTracker> mTracker;
+
+    std::vector<PolicyMixTuple> mSourceRules = {
+        {AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_CAMCORDER, RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET},
+        {AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_MIC, RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET},
+        {AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_VOICE_COMMUNICATION,
+         RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET}};
+
+    struct audio_port_v7 mExtractionPort;
+    audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
+    audio_config_t mAudioConfig;
+};
+
+AudioPolicyManagerFuzzerDPMixRecordInjection::AudioPolicyManagerFuzzerDPMixRecordInjection(
+        FuzzedDataProvider *fdp)
+        : AudioPolicyManagerFuzzerDynamicPolicy(fdp) {
+    const uint32_t numRules = mFdp->ConsumeIntegralInRange<uint32_t>(1, 10);
+    for (int i = 0; i < numRules; ++i) {
+        PolicyMixTuple rule = {getValueFromVector<audio_usage_t>(mFdp, kAudioUsages),
+                               getValueFromVector<audio_source_t>(mFdp, kAudioSources),
+                               RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET};
+        mSourceRules.push_back(rule);
+    }
+}
+
+AudioPolicyManagerFuzzerDPMixRecordInjection::~AudioPolicyManagerFuzzerDPMixRecordInjection() {
+    mManager->stopOutput(mPortId);
+}
+
+bool AudioPolicyManagerFuzzerDPMixRecordInjection::initialize() {
+    if (!AudioPolicyManagerFuzzerDynamicPolicy::initialize()) {
+        return false;
+    }
+
+    mTracker.reset(new RecordingActivityTracker());
+
+    mAudioConfig = AUDIO_CONFIG_INITIALIZER;
+    mAudioConfig.channel_mask =
+        getValueFromVector<audio_channel_mask_t>(mFdp, kAudioChannelInMasks);
+    mAudioConfig.format = getValueFromVector<audio_format_t>(mFdp, kAudioFormats);
+    mAudioConfig.sample_rate = getValueFromVector<uint32_t>(mFdp, kSamplingRates);
+    status_t ret = addPolicyMix(getValueFromVector<int>(mFdp, kMixTypes),
+                                getValueFromVector<int>(mFdp, kMixRouteFlags),
+                                getValueFromVector<audio_devices_t>(mFdp, kAudioDevices),
+                                mMixAddress, mAudioConfig, mSourceRules);
+    if (ret != NO_ERROR) {
+        return false;
+    }
+
+    struct audio_port_v7 injectionPort;
+    findDevicePort(AUDIO_PORT_ROLE_SINK, getValueFromVector<audio_devices_t>(mFdp, kAudioDevices),
+                   mMixAddress, &injectionPort);
+
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_usage_t usage = getValueFromVector<audio_usage_t>(mFdp, kAudioUsages);
+    audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, usage, AUDIO_SOURCE_DEFAULT,
+                               AUDIO_FLAG_NONE, ""};
+    std::string tags = std::string("addr=") + mMixAddress;
+    strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
+    getOutputForAttr(&selectedDeviceId, mAudioConfig.format, mAudioConfig.channel_mask,
+                     mAudioConfig.sample_rate /*sampleRate*/, AUDIO_OUTPUT_FLAG_NONE,
+                     nullptr /*output*/, &mPortId, attr);
+    ret = mManager->startOutput(mPortId);
+    if (ret != NO_ERROR) {
+        return false;
+    }
+    getDeviceIdFromPatch(mClient->getLastAddedPatch());
+    if (!findDevicePort(AUDIO_PORT_ROLE_SOURCE,
+                        getValueFromVector<audio_devices_t>(mFdp, kAudioDevices), mMixAddress,
+                        &mExtractionPort)) {
+        return false;
+    }
+
+    return true;
+}
+
+void AudioPolicyManagerFuzzerDPMixRecordInjection::recordingInjection() {
+    const uint32_t numTestCases = mFdp->ConsumeIntegralInRange<uint32_t>(1, 10);
+    for (int i = 0; i < numTestCases; ++i) {
+        audio_attributes_t attr;
+        attr.content_type = getValueFromVector<audio_content_type_t>(mFdp, kAudioContentTypes);
+        attr.usage = getValueFromVector<audio_usage_t>(mFdp, kAudioUsages);
+        attr.source = getValueFromVector<audio_source_t>(mFdp, kAudioSources);
+        attr.flags = getValueFromVector<audio_flags_mask_t>(mFdp, kAudioFlagMasks);
+        std::string tags(mFdp->ConsumeBool() ? "" : "addr=remote_submix_media");
+        strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
+
+        audio_port_handle_t captureRoutedPortId = AUDIO_PORT_HANDLE_NONE;
+        audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+        getInputForAttr(attr, mTracker->getRiid(), &captureRoutedPortId, mAudioConfig.format,
+                        mAudioConfig.channel_mask, mAudioConfig.sample_rate, AUDIO_INPUT_FLAG_NONE,
+                        &portId);
+    }
+}
+
+void AudioPolicyManagerFuzzerDPMixRecordInjection::process() {
+    if (initialize()) {
+        recordingInjection();
+        registerPolicyMixes();
+        fuzzPatchCreation();
+        unregisterPolicyMixes();
+    }
+}
+
+using DeviceConnectionTestParams =
+    std::tuple<audio_devices_t /*type*/, std::string /*name*/, std::string /*address*/>;
+
+class AudioPolicyManagerFuzzerDeviceConnection
+    : public AudioPolicyManagerFuzzerWithConfigurationFile {
+   public:
+    explicit AudioPolicyManagerFuzzerDeviceConnection(FuzzedDataProvider *fdp)
+        : AudioPolicyManagerFuzzerWithConfigurationFile(fdp){};
+    void process() override;
+
+   protected:
+    void setDeviceConnectionState();
+    void explicitlyRoutingAfterConnection();
+};
+
+void AudioPolicyManagerFuzzerDeviceConnection::setDeviceConnectionState() {
+    const uint32_t numTestCases = mFdp->ConsumeIntegralInRange<uint32_t>(1, 10);
+    for (int i = 0; i < numTestCases; ++i) {
+        const audio_devices_t type = getValueFromVector<audio_devices_t>(mFdp, kAudioDevices);
+        const std::string name = mFdp->ConsumeRandomLengthString();
+        const std::string address = mFdp->ConsumeRandomLengthString();
+        mManager->setDeviceConnectionState(
+            type, getValueFromVector<audio_policy_dev_state_t>(mFdp, kAudioPolicyDeviceStates),
+            address.c_str(), name.c_str(), getValueFromVector<audio_format_t>(mFdp, kAudioFormats));
+    }
+}
+
+void AudioPolicyManagerFuzzerDeviceConnection::explicitlyRoutingAfterConnection() {
+    const uint32_t numTestCases = mFdp->ConsumeIntegralInRange<uint32_t>(1, 10);
+    for (int i = 0; i < numTestCases; ++i) {
+        const audio_devices_t type = getValueFromVector<audio_devices_t>(mFdp, kAudioDevices);
+        const std::string name = mFdp->ConsumeRandomLengthString();
+        const std::string address = mFdp->ConsumeRandomLengthString();
+        mManager->setDeviceConnectionState(
+            type, getValueFromVector<audio_policy_dev_state_t>(mFdp, kAudioPolicyDeviceStates),
+            address.c_str(), name.c_str(), getValueFromVector<audio_format_t>(mFdp, kAudioFormats));
+
+        audio_port_v7 devicePort;
+        const audio_port_role_t role =
+            audio_is_output_device(type) ? AUDIO_PORT_ROLE_SINK : AUDIO_PORT_ROLE_SOURCE;
+        findDevicePort(role, type, address, &devicePort);
+
+        audio_port_handle_t routedPortId = devicePort.id;
+        // Try start input or output according to the device type
+        if (audio_is_output_devices(type)) {
+            getOutputForAttr(&routedPortId, getValueFromVector<audio_format_t>(mFdp, kAudioFormats),
+                             getValueFromVector<audio_channel_mask_t>(mFdp, kAudioChannelOutMasks),
+                             getValueFromVector<uint32_t>(mFdp, kSamplingRates),
+                             AUDIO_OUTPUT_FLAG_NONE);
+        } else if (audio_is_input_device(type)) {
+            RecordingActivityTracker tracker;
+            getInputForAttr({}, tracker.getRiid(), &routedPortId,
+                            getValueFromVector<audio_format_t>(mFdp, kAudioFormats),
+                            getValueFromVector<audio_channel_mask_t>(mFdp, kAudioChannelInMasks),
+                            getValueFromVector<uint32_t>(mFdp, kSamplingRates),
+                            AUDIO_INPUT_FLAG_NONE);
+        }
+    }
+}
+
+void AudioPolicyManagerFuzzerDeviceConnection::process() {
+    if (initialize()) {
+        setDeviceConnectionState();
+        explicitlyRoutingAfterConnection();
+        fuzzPatchCreation();
+    }
+}
+
+class AudioPolicyManagerTVFuzzer : public AudioPolicyManagerFuzzerWithConfigurationFile {
+   public:
+    explicit AudioPolicyManagerTVFuzzer(FuzzedDataProvider *fdp)
+        : AudioPolicyManagerFuzzerWithConfigurationFile(fdp){};
+    void process() override;
+
+   protected:
+    std::string getConfigFile();
+    void testHDMIPortSelection(audio_output_flags_t flags);
+
+    static inline const std::string sTvConfig =
+            AudioPolicyManagerTVFuzzer::sExecutableDir + "data/test_tv_apm_configuration.xml";
+};
+
+std::string AudioPolicyManagerTVFuzzer::getConfigFile() { return fuzzXML(sTvConfig); }
+
+void AudioPolicyManagerTVFuzzer::testHDMIPortSelection(audio_output_flags_t flags) {
+    audio_devices_t audioDevice = getValueFromVector<audio_devices_t>(mFdp, kAudioDevices);
+    audio_format_t audioFormat = getValueFromVector<audio_format_t>(mFdp, kAudioFormats);
+    status_t ret = mManager->setDeviceConnectionState(
+        audioDevice, AUDIO_POLICY_DEVICE_STATE_AVAILABLE, "" /*address*/, "" /*name*/, audioFormat);
+    if (ret != NO_ERROR) {
+        return;
+    }
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t output;
+    audio_port_handle_t portId;
+    getOutputForAttr(&selectedDeviceId, getValueFromVector<audio_format_t>(mFdp, kAudioFormats),
+                     getValueFromVector<audio_channel_mask_t>(mFdp, kAudioChannelOutMasks),
+                     getValueFromVector<uint32_t>(mFdp, kSamplingRates), flags, &output, &portId);
+    sp<SwAudioOutputDescriptor> outDesc = mManager->getOutputs().valueFor(output);
+    if (outDesc.get() == nullptr) {
+        return;
+    }
+    audio_port_v7 port = {};
+    outDesc->toAudioPort(&port);
+    mManager->releaseOutput(portId);
+    mManager->setDeviceConnectionState(audioDevice, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                       "" /*address*/, "" /*name*/, audioFormat);
+}
+
+void AudioPolicyManagerTVFuzzer::process() {
+    if (initialize()) {
+        testHDMIPortSelection(getValueFromVector<audio_output_flags_t>(mFdp, kAudioOutputFlags));
+        fuzzPatchCreation();
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    if (size < 1) {
+        return 0;
+    }
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    while (fdp.remaining_bytes() > 0) {
+        AudioPolicyManagerFuzzer audioPolicyManagerFuzzer(&fdp);
+        audioPolicyManagerFuzzer.process();
+
+        AudioPolicyManagerFuzzerMsd audioPolicyManagerFuzzerMsd(&fdp);
+        audioPolicyManagerFuzzerMsd.process();
+
+        AudioPolicyManagerFuzzerWithConfigurationFile audioPolicyManagerFuzzerWithConfigurationFile(
+            &fdp);
+        audioPolicyManagerFuzzerWithConfigurationFile.process();
+
+        AudioPolicyManagerFuzzerDynamicPolicy audioPolicyManagerFuzzerDynamicPolicy(&fdp);
+        audioPolicyManagerFuzzerDynamicPolicy.process();
+
+        AudioPolicyManagerFuzzerDPNoRemoteSubmixModule
+            audioPolicyManagerFuzzerDPNoRemoteSubmixModule(&fdp);
+        audioPolicyManagerFuzzerDPNoRemoteSubmixModule.process();
+
+        AudioPolicyManagerFuzzerDPPlaybackReRouting audioPolicyManagerFuzzerDPPlaybackReRouting(
+            &fdp);
+        audioPolicyManagerFuzzerDPPlaybackReRouting.process();
+
+        AudioPolicyManagerFuzzerDPMixRecordInjection audioPolicyManagerFuzzerDPMixRecordInjection(
+            &fdp);
+        audioPolicyManagerFuzzerDPMixRecordInjection.process();
+
+        AudioPolicyManagerFuzzerDeviceConnection audioPolicyManagerFuzzerDeviceConnection(&fdp);
+        audioPolicyManagerFuzzerDeviceConnection.process();
+
+        AudioPolicyManagerTVFuzzer audioPolicyManagerTVFuzzer(&fdp);
+        audioPolicyManagerTVFuzzer.process();
+    }
+    return 0;
+}
diff --git a/services/audiopolicy/fuzzer/resources/Android.bp b/services/audiopolicy/fuzzer/resources/Android.bp
new file mode 100644
index 0000000..22ee256
--- /dev/null
+++ b/services/audiopolicy/fuzzer/resources/Android.bp
@@ -0,0 +1,36 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ ******************************************************************************/
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+filegroup {
+    name: "audiopolicyfuzzer_configuration_files",
+    srcs: [
+        "test_audio_policy_configuration.xml",
+        "test_audio_policy_msd_configuration.xml",
+        "test_audio_policy_primary_only_configuration.xml",
+        "test_tv_apm_configuration.xml",
+    ],
+}
diff --git a/services/audiopolicy/fuzzer/resources/test_audio_policy_configuration.xml b/services/audiopolicy/fuzzer/resources/test_audio_policy_configuration.xml
new file mode 100644
index 0000000..7e26c33
--- /dev/null
+++ b/services/audiopolicy/fuzzer/resources/test_audio_policy_configuration.xml
@@ -0,0 +1,111 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+    <globalConfiguration speaker_drc_enabled="true"/>
+
+    <modules>
+        <!-- Primary module -->
+        <module name="primary" halVersion="2.0">
+            <attachedDevices>
+                <item>Speaker</item>
+                <item>Built-In Mic</item>
+            </attachedDevices>
+            <defaultOutputDevice>Speaker</defaultOutputDevice>
+            <mixPorts>
+                <mixPort name="primary output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="primary input" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bt_hfp_output" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bt_hfp_input" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000,11025,16000,44100,48000"
+                             channelMasks="AUDIO_CHANNEL_IN_STEREO,AUDIO_CHANNEL_IN_MONO"/>
+                </mixPort>
+            </mixPorts>
+            <devicePorts>
+                <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
+                </devicePort>
+                <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
+                </devicePort>
+                <devicePort tagName="Hdmi" type="AUDIO_DEVICE_OUT_HDMI" role="sink">
+                </devicePort>
+                <devicePort tagName="Hdmi-In Mic" type="AUDIO_DEVICE_IN_HDMI" role="source">
+                </devicePort>
+                <devicePort tagName="BT SCO" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO"
+                            role="sink" address="hfp_client_out">
+                </devicePort>
+                <devicePort tagName="BT SCO Headset Mic" type="AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET"
+                            role="source" address="hfp_client_in">
+                </devicePort>
+            </devicePorts>
+            <routes>
+                <route type="mix" sink="Speaker"
+                       sources="primary output"/>
+                <route type="mix" sink="primary input"
+                       sources="Built-In Mic,Hdmi-In Mic"/>
+                <route type="mix" sink="Hdmi"
+                       sources="primary output"/>
+                <route type="mix" sink="BT SCO"
+                       sources="mixport_bt_hfp_output"/>
+                <route type="mix" sink="mixport_bt_hfp_input"
+                       sources="BT SCO Headset Mic"/>
+            </routes>
+        </module>
+
+        <!-- Remote Submix module -->
+        <module name="r_submix" halVersion="2.0">
+            <attachedDevices>
+                <item>Remote Submix In</item>
+            </attachedDevices>
+            <mixPorts>
+                <mixPort name="r_submix output" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="r_submix input" role="sink">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
+           </mixPorts>
+           <devicePorts>
+               <devicePort tagName="Remote Submix Out" type="AUDIO_DEVICE_OUT_REMOTE_SUBMIX"  role="sink">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+               </devicePort>
+               <devicePort tagName="Remote Submix In" type="AUDIO_DEVICE_IN_REMOTE_SUBMIX"  role="source">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </devicePort>
+            </devicePorts>
+            <routes>
+                <route type="mix" sink="Remote Submix Out"
+                       sources="r_submix output"/>
+                <route type="mix" sink="r_submix input"
+                       sources="Remote Submix In"/>
+            </routes>
+        </module>
+    </modules>
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/fuzzer/resources/test_audio_policy_msd_configuration.xml b/services/audiopolicy/fuzzer/resources/test_audio_policy_msd_configuration.xml
new file mode 100644
index 0000000..5248d79
--- /dev/null
+++ b/services/audiopolicy/fuzzer/resources/test_audio_policy_msd_configuration.xml
@@ -0,0 +1,84 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+    <globalConfiguration speaker_drc_enabled="true"/>
+
+    <modules>
+        <module name="msd" halVersion="2.0">
+            <attachedDevices>
+                <item>MS12 Input</item>
+                <item>MS12 Output</item>
+            </attachedDevices>
+            <mixPorts>
+                <mixPort name="ms12 input" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="ms12 compressed input" role="source"
+                        flags="AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD|AUDIO_OUTPUT_FLAG_NON_BLOCKING">
+                    <profile name="" format="AUDIO_FORMAT_AC3"
+                             samplingRates="32000,44100,48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_MONO|AUDIO_CHANNEL_OUT_STEREO|AUDIO_CHANNEL_OUT_5POINT1"/>
+                    <profile name="" format="AUDIO_FORMAT_E_AC3"
+                             samplingRates="32000,44100,48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_MONO|AUDIO_CHANNEL_OUT_STEREO|AUDIO_CHANNEL_OUT_5POINT1|AUDIO_CHANNEL_OUT_7POINT1"/>
+                    <profile name="" format="AUDIO_FORMAT_E_AC3_JOC"
+                             samplingRates="32000,44100,48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_MONO|AUDIO_CHANNEL_OUT_STEREO|AUDIO_CHANNEL_OUT_5POINT1|AUDIO_CHANNEL_OUT_7POINT1"/>
+                    <profile name="" format="AUDIO_FORMAT_AC4"
+                             samplingRates="32000,44100,48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_MONO|AUDIO_CHANNEL_OUT_STEREO|AUDIO_CHANNEL_OUT_5POINT1|AUDIO_CHANNEL_OUT_7POINT1"/>
+                </mixPort>
+                <!-- The HW AV Sync flag is not required, but is recommended -->
+                <mixPort name="ms12 output" role="sink" flags="AUDIO_INPUT_FLAG_HW_AV_SYNC|AUDIO_INPUT_FLAG_DIRECT">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                    <profile name="" format="AUDIO_FORMAT_AC3"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_5POINT1"/>
+                    <profile name="" format="AUDIO_FORMAT_E_AC3"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_5POINT1"/>
+                </mixPort>
+           </mixPorts>
+           <devicePorts>
+               <devicePort tagName="MS12 Input" type="AUDIO_DEVICE_OUT_BUS"  role="sink">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                   <profile name="" format="AUDIO_FORMAT_AC3"
+                            samplingRates="32000,44100,48000"
+                            channelMasks="AUDIO_CHANNEL_OUT_MONO|AUDIO_CHANNEL_OUT_STEREO|AUDIO_CHANNEL_OUT_5POINT1"/>
+                   <profile name="" format="AUDIO_FORMAT_E_AC3"
+                            samplingRates="32000,44100,48000"
+                            channelMasks="AUDIO_CHANNEL_OUT_MONO|AUDIO_CHANNEL_OUT_STEREO|AUDIO_CHANNEL_OUT_5POINT1|AUDIO_CHANNEL_OUT_7POINT1"/>
+                    <profile name="" format="AUDIO_FORMAT_E_AC3_JOC"
+                             samplingRates="32000,44100,48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_MONO|AUDIO_CHANNEL_OUT_STEREO|AUDIO_CHANNEL_OUT_5POINT1|AUDIO_CHANNEL_OUT_7POINT1"/>
+                   <profile name="" format="AUDIO_FORMAT_AC4"
+                            samplingRates="32000,44100,48000"
+                            channelMasks="AUDIO_CHANNEL_OUT_MONO|AUDIO_CHANNEL_OUT_STEREO|AUDIO_CHANNEL_OUT_5POINT1|AUDIO_CHANNEL_OUT_7POINT1"/>
+               </devicePort>
+               <devicePort tagName="MS12 Output" type="AUDIO_DEVICE_IN_BUS"  role="source">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </devicePort>
+            </devicePorts>
+            <routes>
+                <route type="mix" sink="MS12 Input" sources="ms12 input,ms12 compressed input"/>
+                <route type="mix" sink="ms12 output" sources="MS12 Output"/>
+            </routes>
+        </module>
+    </modules>
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/fuzzer/resources/test_audio_policy_primary_only_configuration.xml b/services/audiopolicy/fuzzer/resources/test_audio_policy_primary_only_configuration.xml
new file mode 100644
index 0000000..15e3773
--- /dev/null
+++ b/services/audiopolicy/fuzzer/resources/test_audio_policy_primary_only_configuration.xml
@@ -0,0 +1,53 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+    <globalConfiguration speaker_drc_enabled="true"/>
+
+    <modules>
+        <!-- Primary module -->
+        <module name="primary" halVersion="2.0">
+            <attachedDevices>
+                <item>Speaker</item>
+                <item>Built-In Mic</item>
+            </attachedDevices>
+            <defaultOutputDevice>Speaker</defaultOutputDevice>
+            <mixPorts>
+                <mixPort name="primary output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="primary input" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
+            </mixPorts>
+            <devicePorts>
+                <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
+                </devicePort>
+                <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
+                </devicePort>
+            </devicePorts>
+            <routes>
+                <route type="mix" sink="Speaker"
+                       sources="primary output"/>
+                <route type="mix" sink="primary input"
+                       sources="Built-In Mic"/>
+            </routes>
+        </module>
+    </modules>
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/fuzzer/resources/test_tv_apm_configuration.xml b/services/audiopolicy/fuzzer/resources/test_tv_apm_configuration.xml
new file mode 100644
index 0000000..658d3ce
--- /dev/null
+++ b/services/audiopolicy/fuzzer/resources/test_tv_apm_configuration.xml
@@ -0,0 +1,58 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+    <globalConfiguration speaker_drc_enabled="false"/>
+    <modules>
+        <module name="primary" halVersion="2.0">
+            <attachedDevices>
+                <item>Speaker</item>
+            </attachedDevices>
+            <defaultOutputDevice>Speaker</defaultOutputDevice>
+            <mixPorts>
+                <!-- Profiles on the HDMI port are explicit for simplicity. In reality they are dynamic -->
+                <!-- Note: ports are intentionally arranged from more specific to less
+                     specific in order to test b/140447125 for HW AV Sync, and similar "explicit matches" -->
+                <mixPort name="tunnel" role="source"
+                         flags="AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_HW_AV_SYNC">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="low latency" role="source"
+                         flags="AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_MMAP_NOIRQ">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="direct" role="source" flags="AUDIO_OUTPUT_FLAG_DIRECT">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="primary output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+           </mixPorts>
+           <devicePorts>
+                <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink" />
+                <devicePort tagName="Out Aux Digital" type="AUDIO_DEVICE_OUT_AUX_DIGITAL" role="sink" />
+            </devicePorts>
+            <routes>
+                <route type="mix" sink="Speaker" sources="primary output"/>
+                <route type="mix" sink="Out Aux Digital" sources="primary output,tunnel,direct,low latency"/>
+            </routes>
+        </module>
+    </modules>
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/manager/Android.mk b/services/audiopolicy/manager/Android.mk
deleted file mode 100644
index cae6cfa..0000000
--- a/services/audiopolicy/manager/Android.mk
+++ /dev/null
@@ -1,30 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-ifneq ($(USE_CUSTOM_AUDIO_POLICY), 1)
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-    AudioPolicyFactory.cpp
-
-LOCAL_SHARED_LIBRARIES := \
-    libaudiopolicymanagerdefault
-
-LOCAL_STATIC_LIBRARIES := \
-    libaudiopolicycomponents
-
-LOCAL_C_INCLUDES += \
-    $(call include-path-for, audio-utils)
-
-LOCAL_HEADER_LIBRARIES := \
-    libaudiopolicycommon \
-    libaudiopolicyengine_interface_headers \
-    libaudiopolicymanager_interface_headers
-
-LOCAL_CFLAGS := -Wall -Werror
-
-LOCAL_MODULE:= libaudiopolicymanager
-
-include $(BUILD_SHARED_LIBRARY)
-
-endif #ifneq ($(USE_CUSTOM_AUDIO_POLICY), 1)
diff --git a/services/audiopolicy/manager/AudioPolicyFactory.cpp b/services/audiopolicy/manager/AudioPolicyFactory.cpp
deleted file mode 100644
index 476a1ec..0000000
--- a/services/audiopolicy/manager/AudioPolicyFactory.cpp
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <AudioPolicyManager.h>
-
-namespace android {
-
-extern "C" AudioPolicyInterface* createAudioPolicyManager(
-        AudioPolicyClientInterface *clientInterface)
-{
-    AudioPolicyManager *apm = new AudioPolicyManager(clientInterface);
-    status_t status = apm->initialize();
-    if (status != NO_ERROR) {
-        delete apm;
-        apm = nullptr;
-    }
-    return apm;
-}
-
-extern "C" void destroyAudioPolicyManager(AudioPolicyInterface *interface)
-{
-    delete interface;
-}
-
-} // namespace android
diff --git a/services/audiopolicy/managerdefault/Android.bp b/services/audiopolicy/managerdefault/Android.bp
index 577b42f..0165dc8 100644
--- a/services/audiopolicy/managerdefault/Android.bp
+++ b/services/audiopolicy/managerdefault/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_library_shared {
     name: "libaudiopolicymanagerdefault",
 
@@ -25,6 +34,8 @@
         // a dependency on it in the device makefile. There will be no build time
         // conflict with libaudiopolicyenginedefault.
         "libaudiopolicyenginedefault",
+        "framework-permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
     ],
 
     header_libs: [
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index c5c13e9..83a4a37 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -29,32 +29,32 @@
 #define ALOGVV(a...) do { } while(0)
 #endif
 
-#define AUDIO_POLICY_XML_CONFIG_FILE_PATH_MAX_LENGTH 128
-#define AUDIO_POLICY_XML_CONFIG_FILE_NAME "audio_policy_configuration.xml"
-#define AUDIO_POLICY_A2DP_OFFLOAD_DISABLED_XML_CONFIG_FILE_NAME \
-        "audio_policy_configuration_a2dp_offload_disabled.xml"
-#define AUDIO_POLICY_BLUETOOTH_LEGACY_HAL_XML_CONFIG_FILE_NAME \
-        "audio_policy_configuration_bluetooth_legacy_hal.xml"
-
 #include <algorithm>
 #include <inttypes.h>
+#include <map>
 #include <math.h>
 #include <set>
 #include <unordered_set>
 #include <vector>
+
+#include <Serializer.h>
+#include <cutils/bitops.h>
 #include <cutils/properties.h>
-#include <utils/Log.h>
 #include <media/AudioParameter.h>
+#include <policy.h>
 #include <private/android_filesystem_config.h>
 #include <system/audio.h>
 #include <system/audio_config.h>
+#include <system/audio_effects/effect_hapticgenerator.h>
+#include <utils/Log.h>
+
 #include "AudioPolicyManager.h"
-#include <Serializer.h>
 #include "TypeConverter.h"
-#include <policy.h>
 
 namespace android {
 
+using content::AttributionSourceState;
+
 //FIXME: workaround for truncated touch sounds
 // to be removed when the problem is handled by system UI
 #define TOUCH_SOUND_FIXED_DELAY_MS 100
@@ -64,11 +64,11 @@
 constexpr float IN_CALL_EARPIECE_HEADROOM_DB = 3.f;
 
 // Compressed formats for MSD module, ordered from most preferred to least preferred.
-static const std::vector<audio_format_t> compressedFormatsOrder = {{
-        AUDIO_FORMAT_MAT_2_1, AUDIO_FORMAT_MAT_2_0, AUDIO_FORMAT_E_AC3,
+static const std::vector<audio_format_t> msdCompressedFormatsOrder = {{
+        AUDIO_FORMAT_IEC60958, AUDIO_FORMAT_MAT_2_1, AUDIO_FORMAT_MAT_2_0, AUDIO_FORMAT_E_AC3,
         AUDIO_FORMAT_AC3, AUDIO_FORMAT_PCM_16_BIT }};
 // Channel masks for MSD module, 3D > 2D > 1D ordering (most preferred to least preferred).
-static const std::vector<audio_channel_mask_t> surroundChannelMasksOrder = {{
+static const std::vector<audio_channel_mask_t> msdSurroundChannelMasksOrder = {{
         AUDIO_CHANNEL_OUT_3POINT1POINT2, AUDIO_CHANNEL_OUT_3POINT0POINT2,
         AUDIO_CHANNEL_OUT_2POINT1POINT2, AUDIO_CHANNEL_OUT_2POINT0POINT2,
         AUDIO_CHANNEL_OUT_5POINT1, AUDIO_CHANNEL_OUT_STEREO }};
@@ -209,6 +209,9 @@
             // Reset active device codec
             device->setEncodedFormat(AUDIO_FORMAT_DEFAULT);
 
+            // remove device from mReportedFormatsMap cache
+            mReportedFormatsMap.erase(device);
+
             } break;
 
         default:
@@ -246,6 +249,7 @@
                     if ((state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) ||
                             (((desc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) != 0) &&
                                 (desc->mDirectOpenCount == 0))) {
+                        clearAudioSourcesForOutput(output);
                         closeOutput(output);
                     }
                 }
@@ -260,15 +264,15 @@
         } else {
             checkCloseOutputs();
         }
-
-        if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
-            DeviceVector newDevices = getNewOutputDevices(mPrimaryOutput, false /*fromCache*/);
-            updateCallRouting(newDevices);
-        }
+        (void)updateCallRouting(false /*fromCache*/);
+        std::vector<audio_io_handle_t> outputsToReopen;
         const DeviceVector msdOutDevices = getMsdAudioOutDevices();
+        const DeviceVector activeMediaDevices =
+                mEngine->getActiveMediaDevices(mAvailableOutputDevices);
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
-            if ((mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) || (desc != mPrimaryOutput)) {
+            if (desc->isActive() && ((mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) ||
+                (desc != mPrimaryOutput))) {
                 DeviceVector newDevices = getNewOutputDevices(desc, true /*fromCache*/);
                 // do not force device change on duplicated output because if device is 0, it will
                 // also force a device 0 for the two outputs it is duplicated to which may override
@@ -280,6 +284,28 @@
                                 || (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
                 setOutputDevices(desc, newDevices, force, 0);
             }
+            if (!desc->isDuplicated() && desc->mProfile->hasDynamicAudioProfile() &&
+                    !activeMediaDevices.empty() && desc->devices() != activeMediaDevices &&
+                    desc->supportsDevicesForPlayback(activeMediaDevices)) {
+                // Reopen the output to query the dynamic profiles when there is not active
+                // clients or all active clients will be rerouted. Otherwise, set the flag
+                // `mPendingReopenToQueryProfiles` in the SwOutputDescriptor so that the output
+                // can be reopened to query dynamic profiles when all clients are inactive.
+                if (areAllActiveTracksRerouted(desc)) {
+                    outputsToReopen.push_back(mOutputs.keyAt(i));
+                } else {
+                    desc->mPendingReopenToQueryProfiles = true;
+                }
+            }
+            if (!desc->supportsDevicesForPlayback(activeMediaDevices)) {
+                // Clear the flag that previously set for re-querying profiles.
+                desc->mPendingReopenToQueryProfiles = false;
+            }
+        }
+        for (const auto& output : outputsToReopen) {
+            sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(output);
+            closeOutput(output);
+            openOutputWithProfileAndDevice(desc->mProfile, activeMediaDevices);
         }
 
         if (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) {
@@ -337,6 +363,9 @@
             mAvailableInputDevices.remove(device);
 
             checkInputsForDevice(device, state);
+
+            // remove device from mReportedFormatsMap cache
+            mReportedFormatsMap.erase(device);
         } break;
 
         default:
@@ -352,11 +381,12 @@
         // getDeviceForStrategy() cache
         updateDevicesAndOutputs();
 
-        if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
-            DeviceVector newDevices = getNewOutputDevices(mPrimaryOutput, false /*fromCache*/);
-            updateCallRouting(newDevices);
+        (void)updateCallRouting(false /*fromCache*/);
+        // Reconnect Audio Source
+        for (const auto &strategy : mEngine->getOrderedProductStrategies()) {
+            auto attributes = mEngine->getAllAttributesForProductStrategy(strategy).front();
+            checkAudioSourceForAttributes(attributes);
         }
-
         if (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) {
             cleanUpForDevice(device);
         }
@@ -441,7 +471,7 @@
     // Case 1: A2DP active device switches from primary to primary
     // module
     // Case 2: A2DP device config changes on primary module.
-    if (audio_is_a2dp_out_device(device)) {
+    if (audio_is_a2dp_out_device(device) && hasPrimaryOutput()) {
         sp<HwModule> module = mHwModules.getModuleForDeviceType(device, encodedFormat);
         audio_module_handle_t primaryHandle = mPrimaryOutput->getModuleHandle();
         if (availablePrimaryOutputDevices().contains(devDesc) &&
@@ -461,7 +491,16 @@
             }
         }
     }
-
+    auto musicStrategy = streamToStrategy(AUDIO_STREAM_MUSIC);
+    for (size_t i = 0; i < mOutputs.size(); i++) {
+       sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
+       // mute media strategies and delay device switch by the largest
+       // This avoid sending the music tail into the earpiece or headset.
+       setStrategyMute(musicStrategy, true, desc);
+       setStrategyMute(musicStrategy, false, desc, MUTE_TIME_MS,
+          mEngine->getOutputDevicesForAttributes(attributes_initializer(AUDIO_USAGE_MEDIA),
+                                              nullptr, true /*fromCache*/).types());
+    }
     // Toggle the device state: UNAVAILABLE -> AVAILABLE
     // This will force reading again the device configuration
     status = setDeviceConnectionState(device,
@@ -507,30 +546,61 @@
     return status;
 }
 
-uint32_t AudioPolicyManager::updateCallRouting(const DeviceVector &rxDevices, uint32_t delayMs)
+DeviceVector AudioPolicyManager::selectBestRxSinkDevicesForCall(bool fromCache)
+{
+    DeviceVector rxSinkdevices{};
+    rxSinkdevices = mEngine->getOutputDevicesForAttributes(
+                attributes_initializer(AUDIO_USAGE_VOICE_COMMUNICATION), nullptr, fromCache);
+    if (!rxSinkdevices.isEmpty() && mAvailableOutputDevices.contains(rxSinkdevices.itemAt(0))) {
+        auto rxSinkDevice = rxSinkdevices.itemAt(0);
+        auto telephonyRxModule = mHwModules.getModuleForDeviceType(
+                    AUDIO_DEVICE_IN_TELEPHONY_RX, AUDIO_FORMAT_DEFAULT);
+        // retrieve Rx Source device descriptor
+        sp<DeviceDescriptor> rxSourceDevice = mAvailableInputDevices.getDevice(
+                    AUDIO_DEVICE_IN_TELEPHONY_RX, String8(), AUDIO_FORMAT_DEFAULT);
+
+        // RX Telephony and Rx sink devices are declared by Primary Audio HAL
+        if (isPrimaryModule(telephonyRxModule) && (telephonyRxModule->getHalVersionMajor() >= 3) &&
+                telephonyRxModule->supportsPatch(rxSourceDevice, rxSinkDevice)) {
+            ALOGW("%s() device %s using HW Bridge", __func__, rxSinkDevice->toString().c_str());
+            return DeviceVector(rxSinkDevice);
+        }
+    }
+    // Note that despite the fact that getNewOutputDevices() is called on the primary output,
+    // the device returned is not necessarily reachable via this output
+    // (filter later by setOutputDevices())
+    return getNewOutputDevices(mPrimaryOutput, fromCache);
+}
+
+status_t AudioPolicyManager::updateCallRouting(bool fromCache, uint32_t delayMs, uint32_t *waitMs)
+{
+    if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
+        DeviceVector rxDevices = selectBestRxSinkDevicesForCall(fromCache);
+        return updateCallRoutingInternal(rxDevices, delayMs, waitMs);
+    }
+    return INVALID_OPERATION;
+}
+
+status_t AudioPolicyManager::updateCallRoutingInternal(
+        const DeviceVector &rxDevices, uint32_t delayMs, uint32_t *waitMs)
 {
     bool createTxPatch = false;
     bool createRxPatch = false;
     uint32_t muteWaitMs = 0;
-
     if(!hasPrimaryOutput() ||
             mPrimaryOutput->devices().onlyContainsDevicesWithType(AUDIO_DEVICE_OUT_STUB)) {
-        return muteWaitMs;
+        return INVALID_OPERATION;
     }
-    ALOG_ASSERT(!rxDevices.isEmpty(), "updateCallRouting() no selected output device");
+    ALOG_ASSERT(!rxDevices.isEmpty(), "%s() no selected output device", __func__);
 
     audio_attributes_t attr = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
     auto txSourceDevice = mEngine->getInputDeviceForAttributes(attr);
-    ALOG_ASSERT(txSourceDevice != 0, "updateCallRouting() input selected device not available");
+    ALOG_ASSERT(txSourceDevice != 0, "%s() input selected device not available", __func__);
 
-    ALOGV("updateCallRouting device rxDevice %s txDevice %s",
+    ALOGV("%s device rxDevice %s txDevice %s", __func__,
           rxDevices.itemAt(0)->toString().c_str(), txSourceDevice->toString().c_str());
 
-    // release existing RX patch if any
-    if (mCallRxPatch != 0) {
-        releaseAudioPatchInternal(mCallRxPatch->getHandle());
-        mCallRxPatch.clear();
-    }
+    disconnectTelephonyRxAudioSource();
     // release TX patch if any
     if (mCallTxPatch != 0) {
         releaseAudioPatchInternal(mCallTxPatch->getHandle());
@@ -556,8 +626,8 @@
             (telephonyRxModule->getHalVersionMajor() >= 3)) {
         if (rxSourceDevice == 0 || txSinkDevice == 0) {
             // RX / TX Telephony device(s) is(are) not currently available
-            ALOGE("updateCallRouting() no telephony Tx and/or RX device");
-            return muteWaitMs;
+            ALOGE("%s() no telephony Tx and/or RX device", __func__);
+            return INVALID_OPERATION;
         }
         // createAudioPatchInternal now supports both HW / SW bridging
         createRxPatch = true;
@@ -578,8 +648,7 @@
     if (!createRxPatch) {
         muteWaitMs = setOutputDevices(mPrimaryOutput, rxDevices, true, delayMs);
     } else { // create RX path audio patch
-        mCallRxPatch = createTelephonyPatch(true /*isRx*/, rxDevices.itemAt(0), delayMs);
-
+        connectTelephonyRxAudioSource();
         // If the TX device is on the primary HW module but RX device is
         // on other HW module, SinkMetaData of telephony input should handle it
         // assuming the device uses audio HAL V5.0 and above
@@ -596,8 +665,10 @@
         }
         mCallTxPatch = createTelephonyPatch(false /*isRx*/, txSourceDevice, delayMs);
     }
-
-    return muteWaitMs;
+    if (waitMs != nullptr) {
+        *waitMs = muteWaitMs;
+    }
+    return NO_ERROR;
 }
 
 sp<AudioPatch> AudioPolicyManager::createTelephonyPatch(
@@ -642,6 +713,24 @@
     return false;
 }
 
+void AudioPolicyManager::connectTelephonyRxAudioSource()
+{
+    disconnectTelephonyRxAudioSource();
+    const struct audio_port_config source = {
+        .role = AUDIO_PORT_ROLE_SOURCE, .type = AUDIO_PORT_TYPE_DEVICE,
+        .ext.device.type = AUDIO_DEVICE_IN_TELEPHONY_RX, .ext.device.address = ""
+    };
+    const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
+    status_t status = startAudioSource(&source, &aa, &mCallRxSourceClientPort, 0/*uid*/);
+    ALOGE_IF(status != NO_ERROR, "%s failed to start Telephony Rx AudioSource", __func__);
+}
+
+void AudioPolicyManager::disconnectTelephonyRxAudioSource()
+{
+    stopAudioSource(mCallRxSourceClientPort);
+    mCallRxSourceClientPort = AUDIO_PORT_HANDLE_NONE;
+}
+
 void AudioPolicyManager::setPhoneState(audio_mode_t state)
 {
     ALOGV("setPhoneState() state %d", state);
@@ -697,28 +786,22 @@
     }
 
     if (hasPrimaryOutput()) {
-        // Note that despite the fact that getNewOutputDevices() is called on the primary output,
-        // the device returned is not necessarily reachable via this output
-        DeviceVector rxDevices = getNewOutputDevices(mPrimaryOutput, false /*fromCache*/);
-        // force routing command to audio hardware when ending call
-        // even if no device change is needed
-        if (isStateInCall(oldState) && rxDevices.isEmpty()) {
-            rxDevices = mPrimaryOutput->devices();
-        }
-
         if (state == AUDIO_MODE_IN_CALL) {
-            updateCallRouting(rxDevices, delayMs);
-        } else if (oldState == AUDIO_MODE_IN_CALL) {
-            if (mCallRxPatch != 0) {
-                releaseAudioPatchInternal(mCallRxPatch->getHandle());
-                mCallRxPatch.clear();
-            }
-            if (mCallTxPatch != 0) {
-                releaseAudioPatchInternal(mCallTxPatch->getHandle());
-                mCallTxPatch.clear();
-            }
-            setOutputDevices(mPrimaryOutput, rxDevices, force, 0);
+            (void)updateCallRouting(false /*fromCache*/, delayMs);
         } else {
+            DeviceVector rxDevices = getNewOutputDevices(mPrimaryOutput, false /*fromCache*/);
+            // force routing command to audio hardware when ending call
+            // even if no device change is needed
+            if (isStateInCall(oldState) && rxDevices.isEmpty()) {
+                rxDevices = mPrimaryOutput->devices();
+            }
+            if (oldState == AUDIO_MODE_IN_CALL) {
+                disconnectTelephonyRxAudioSource();
+                if (mCallTxPatch != 0) {
+                    releaseAudioPatchInternal(mCallTxPatch->getHandle());
+                    mCallTxPatch.clear();
+                }
+            }
             setOutputDevices(mPrimaryOutput, rxDevices, force, 0);
         }
     }
@@ -780,16 +863,7 @@
     }
 
     updateCallAndOutputRouting(forceVolumeReeval, delayMs);
-
-    for (const auto& activeDesc : mInputs.getActiveInputs()) {
-        auto newDevice = getNewInputDevice(activeDesc);
-        // Force new input selection if the new device can not be reached via current input
-        if (activeDesc->mProfile->getSupportedDevices().contains(newDevice)) {
-            setInputDevice(activeDesc->mIoHandle, newDevice);
-        } else {
-            closeInput(activeDesc->mIoHandle);
-        }
-    }
+    updateInputRouting();
 }
 
 void AudioPolicyManager::setSystemProperty(const char* property, const char* value)
@@ -893,7 +967,8 @@
     // Only honor audibility enforced when required. The client will be
     // forced to reconnect if the forced usage changes.
     if (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) != AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) {
-        dstAttr->flags &= ~AUDIO_FLAG_AUDIBILITY_ENFORCED;
+        dstAttr->flags = static_cast<audio_flags_mask_t>(
+                dstAttr->flags & ~AUDIO_FLAG_AUDIBILITY_ENFORCED);
     }
 
     return NO_ERROR;
@@ -925,7 +1000,7 @@
         return status;
     }
     if (auto it = mAllowedCapturePolicies.find(uid); it != end(mAllowedCapturePolicies)) {
-        resultAttr->flags |= it->second;
+        resultAttr->flags = static_cast<audio_flags_mask_t>(resultAttr->flags | it->second);
     }
     *stream = mEngine->getStreamTypeForAttributes(*resultAttr);
 
@@ -1020,11 +1095,9 @@
     *output = AUDIO_IO_HANDLE_NONE;
     if (!msdDevices.isEmpty()) {
         *output = getOutputForDevices(msdDevices, session, *stream, config, flags);
-        sp<DeviceDescriptor> device = outputDevices.isEmpty() ? nullptr : outputDevices.itemAt(0);
-        if (*output != AUDIO_IO_HANDLE_NONE && setMsdPatch(device) == NO_ERROR) {
+        if (*output != AUDIO_IO_HANDLE_NONE && setMsdOutputPatches(&outputDevices) == NO_ERROR) {
             ALOGV("%s() Using MSD devices %s instead of devices %s",
                   __func__, msdDevices.toString().c_str(), outputDevices.toString().c_str());
-            outputDevices = msdDevices;
         } else {
             *output = AUDIO_IO_HANDLE_NONE;
         }
@@ -1038,6 +1111,12 @@
     }
 
     *selectedDeviceId = getFirstDeviceId(outputDevices);
+    for (auto &outputDevice : outputDevices) {
+        if (outputDevice->getId() == getConfig().getDefaultOutputDevice()->getId()) {
+            *selectedDeviceId = outputDevice->getId();
+            break;
+        }
+    }
 
     if (outputDevices.onlyContainsDevicesWithType(AUDIO_DEVICE_OUT_TELEPHONY_TX)) {
         *outputType = API_OUTPUT_TELEPHONY_TX;
@@ -1054,7 +1133,7 @@
                                               audio_io_handle_t *output,
                                               audio_session_t session,
                                               audio_stream_type_t *stream,
-                                              uid_t uid,
+                                              const AttributionSourceState& attributionSource,
                                               const audio_config_t *config,
                                               audio_output_flags_t *flags,
                                               audio_port_handle_t *selectedDeviceId,
@@ -1066,6 +1145,8 @@
     if (*portId != AUDIO_PORT_HANDLE_NONE) {
         return INVALID_OPERATION;
     }
+    const uid_t uid = VALUE_OR_RETURN_STATUS(
+        aidl2legacy_int32_t_uid_t(attributionSource.uid));
     const audio_port_handle_t requestedPortId = *selectedDeviceId;
     audio_attributes_t resultAttr;
     bool isRequestedDeviceForExclusiveUse = false;
@@ -1165,7 +1246,7 @@
                 (config->channel_mask == desc->getChannelMask()) &&
                 (session == desc->mDirectClientSession)) {
                 desc->mDirectOpenCount++;
-                ALOGI("%s reusing direct output %d for session %d", __func__,
+                ALOGV("%s reusing direct output %d for session %d", __func__,
                     mOutputs.keyAt(i), session);
                 *output = mOutputs.keyAt(i);
                 return NO_ERROR;
@@ -1180,24 +1261,9 @@
     sp<SwAudioOutputDescriptor> outputDesc =
             new SwAudioOutputDescriptor(profile, mpClientInterface);
 
-    String8 address = getFirstDeviceAddress(devices);
-
-    // MSD patch may be using the only output stream that can service this request. Release
-    // MSD patch to prioritize this request over any active output on MSD.
-    AudioPatchCollection msdPatches = getMsdPatches();
-    for (size_t i = 0; i < msdPatches.size(); i++) {
-        const auto& patch = msdPatches[i];
-        for (size_t j = 0; j < patch->mPatch.num_sinks; ++j) {
-            const struct audio_port_config *sink = &patch->mPatch.sinks[j];
-            if (sink->type == AUDIO_PORT_TYPE_DEVICE &&
-                    devices.containsDeviceWithType(sink->ext.device.type) &&
-                    (address.isEmpty() || strncmp(sink->ext.device.address, address.string(),
-                            AUDIO_DEVICE_MAX_ADDRESS_LEN) == 0)) {
-                releaseAudioPatch(patch->getHandle(), mUidCached);
-                break;
-            }
-        }
-    }
+    // An MSD patch may be using the only output stream that can service this request. Release
+    // all MSD patches to prioritize this request over any active output on MSD.
+    releaseMsdOutputPatches(devices);
 
     status_t status = outputDesc->open(config, devices, stream, flags, output);
 
@@ -1243,7 +1309,8 @@
 
     // Discard haptic channel mask when forcing muting haptic channels.
     audio_channel_mask_t channelMask = forceMutingHaptic
-            ? (config->channel_mask & ~AUDIO_CHANNEL_HAPTIC_ALL) : config->channel_mask;
+            ? static_cast<audio_channel_mask_t>(config->channel_mask & ~AUDIO_CHANNEL_HAPTIC_ALL)
+            : config->channel_mask;
 
     // open a direct output if required by specified parameters
     //force direct flag if offload flag is set: offloading implies a direct output stream
@@ -1299,7 +1366,8 @@
 
         // at this stage we should ignore the DIRECT flag as no direct output could be found earlier
         *flags = (audio_output_flags_t)(*flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
-        output = selectOutput(outputs, *flags, config->format, channelMask, config->sample_rate);
+        output = selectOutput(
+                outputs, *flags, config->format, channelMask, config->sample_rate, session);
     }
     ALOGW_IF((output == 0), "getOutputForDevices() could not find output for stream %d, "
             "sampling rate %d, format %#x, channels %#x, flags %#x",
@@ -1319,7 +1387,7 @@
                                                         mAvailableOutputDevices);
 }
 
-const AudioPatchCollection AudioPolicyManager::getMsdPatches() const {
+const AudioPatchCollection AudioPolicyManager::getMsdOutputPatches() const {
     AudioPatchCollection msdPatches;
     sp<HwModule> msdModule = mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
     if (msdModule != 0) {
@@ -1337,49 +1405,47 @@
     return msdPatches;
 }
 
-status_t AudioPolicyManager::getBestMsdAudioProfileFor(const sp<DeviceDescriptor> &outputDevice,
-        bool hwAvSync, audio_port_config *sourceConfig, audio_port_config *sinkConfig) const
-{
-    sp<HwModule> msdModule = mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
-    if (msdModule == nullptr) {
-        ALOGE("%s() unable to get MSD module", __func__);
-        return NO_INIT;
-    }
-    sp<HwModule> deviceModule = mHwModules.getModuleForDevice(outputDevice, AUDIO_FORMAT_DEFAULT);
-    if (deviceModule == nullptr) {
-        ALOGE("%s() unable to get module for %s", __func__, outputDevice->toString().c_str());
-        return NO_INIT;
-    }
-    const InputProfileCollection &inputProfiles = msdModule->getInputProfiles();
+status_t AudioPolicyManager::getMsdProfiles(bool hwAvSync,
+                                            const InputProfileCollection &inputProfiles,
+                                            const OutputProfileCollection &outputProfiles,
+                                            const sp<DeviceDescriptor> &sourceDevice,
+                                            const sp<DeviceDescriptor> &sinkDevice,
+                                            AudioProfileVector& sourceProfiles,
+                                            AudioProfileVector& sinkProfiles) const {
     if (inputProfiles.isEmpty()) {
-        ALOGE("%s() no input profiles for MSD module", __func__);
+        ALOGE("%s() no input profiles for source module", __func__);
         return NO_INIT;
     }
-    const OutputProfileCollection &outputProfiles = deviceModule->getOutputProfiles();
     if (outputProfiles.isEmpty()) {
-        ALOGE("%s() no output profiles for device %s", __func__, outputDevice->toString().c_str());
+        ALOGE("%s() no output profiles for sink module", __func__);
         return NO_INIT;
     }
-    AudioProfileVector msdProfiles;
-    // Each IOProfile represents a MixPort from audio_policy_configuration.xml
     for (const auto &inProfile : inputProfiles) {
-        if (hwAvSync == ((inProfile->getFlags() & AUDIO_INPUT_FLAG_HW_AV_SYNC) != 0)) {
-            appendAudioProfiles(msdProfiles, inProfile->getAudioProfiles());
+        if (hwAvSync == ((inProfile->getFlags() & AUDIO_INPUT_FLAG_HW_AV_SYNC) != 0) &&
+                inProfile->supportsDevice(sourceDevice)) {
+            appendAudioProfiles(sourceProfiles, inProfile->getAudioProfiles());
         }
     }
-    AudioProfileVector deviceProfiles;
     for (const auto &outProfile : outputProfiles) {
-        if (hwAvSync == ((outProfile->getFlags() & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) != 0)) {
-            appendAudioProfiles(deviceProfiles, outProfile->getAudioProfiles());
+        if (hwAvSync == ((outProfile->getFlags() & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) != 0) &&
+                outProfile->supportsDevice(sinkDevice)) {
+            appendAudioProfiles(sinkProfiles, outProfile->getAudioProfiles());
         }
     }
+    return NO_ERROR;
+}
+
+status_t AudioPolicyManager::getBestMsdConfig(bool hwAvSync,
+        const AudioProfileVector &sourceProfiles, const AudioProfileVector &sinkProfiles,
+        audio_port_config *sourceConfig, audio_port_config *sinkConfig) const
+{
     struct audio_config_base bestSinkConfig;
-    status_t result = findBestMatchingOutputConfig(msdProfiles, deviceProfiles,
-            compressedFormatsOrder, surroundChannelMasksOrder, true /*preferHigherSamplingRates*/,
-            bestSinkConfig);
+    status_t result = findBestMatchingOutputConfig(sourceProfiles, sinkProfiles,
+            msdCompressedFormatsOrder, msdSurroundChannelMasksOrder,
+            true /*preferHigherSamplingRates*/, bestSinkConfig);
     if (result != NO_ERROR) {
-        ALOGD("%s() no matching profiles found for device: %s, hwAvSync: %d",
-                __func__, outputDevice->toString().c_str(), hwAvSync);
+        ALOGD("%s() no matching config found for sink, hwAvSync: %d",
+                __func__, hwAvSync);
         return result;
     }
     sinkConfig->sample_rate = bestSinkConfig.sample_rate;
@@ -1390,7 +1456,7 @@
             sinkConfig->flags.output | AUDIO_OUTPUT_FLAG_DIRECT);
     if (audio_is_iec61937_compatible(sinkConfig->format)) {
         // For formats compatible with IEC61937 encapsulation, assume that
-        // the record thread input from MSD is IEC61937 framed (for proportional buffer sizing).
+        // the input is IEC61937 framed (for proportional buffer sizing).
         // Add the AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO flag so downstream HAL can distinguish between
         // raw and IEC61937 framed streams.
         sinkConfig->flags.output = static_cast<audio_output_flags_t>(
@@ -1416,70 +1482,149 @@
     return NO_ERROR;
 }
 
-PatchBuilder AudioPolicyManager::buildMsdPatch(const sp<DeviceDescriptor> &outputDevice) const
+PatchBuilder AudioPolicyManager::buildMsdPatch(bool msdIsSource,
+                                               const sp<DeviceDescriptor> &device) const
 {
     PatchBuilder patchBuilder;
-    patchBuilder.addSource(getMsdAudioInDevice()).addSink(outputDevice);
+    sp<HwModule> msdModule = mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
+    ALOG_ASSERT(msdModule != nullptr, "MSD module not available");
+    sp<HwModule> deviceModule = mHwModules.getModuleForDevice(device, AUDIO_FORMAT_DEFAULT);
+    if (deviceModule == nullptr) {
+        ALOGE("%s() unable to get module for %s", __func__, device->toString().c_str());
+        return patchBuilder;
+    }
+    const InputProfileCollection inputProfiles = msdIsSource ?
+            msdModule->getInputProfiles() : deviceModule->getInputProfiles();
+    const OutputProfileCollection outputProfiles = msdIsSource ?
+            deviceModule->getOutputProfiles() : msdModule->getOutputProfiles();
+
+    const sp<DeviceDescriptor> sourceDevice = msdIsSource ? getMsdAudioInDevice() : device;
+    const sp<DeviceDescriptor> sinkDevice = msdIsSource ?
+            device : getMsdAudioOutDevices().itemAt(0);
+    patchBuilder.addSource(sourceDevice).addSink(sinkDevice);
+
     audio_port_config sourceConfig = patchBuilder.patch()->sources[0];
     audio_port_config sinkConfig = patchBuilder.patch()->sinks[0];
+    AudioProfileVector sourceProfiles;
+    AudioProfileVector sinkProfiles;
     // TODO: Figure out whether MSD module has HW_AV_SYNC flag set in the AP config file.
     // For now, we just forcefully try with HwAvSync first.
-    status_t res = getBestMsdAudioProfileFor(outputDevice, true /*hwAvSync*/,
-            &sourceConfig, &sinkConfig) == NO_ERROR ? NO_ERROR :
-            getBestMsdAudioProfileFor(
-                    outputDevice, false /*hwAvSync*/, &sourceConfig, &sinkConfig);
-    if (res == NO_ERROR) {
-        // Found a matching profile for encoded audio. Re-create PatchBuilder with this config.
-        return (PatchBuilder()).addSource(sourceConfig).addSink(sinkConfig);
+    for (auto hwAvSync : { true, false }) {
+        if (getMsdProfiles(hwAvSync, inputProfiles, outputProfiles, sourceDevice, sinkDevice,
+                sourceProfiles, sinkProfiles) != NO_ERROR) {
+            continue;
+        }
+        if (getBestMsdConfig(hwAvSync, sourceProfiles, sinkProfiles, &sourceConfig,
+                &sinkConfig) == NO_ERROR) {
+            // Found a matching config. Re-create PatchBuilder with this config.
+            return (PatchBuilder()).addSource(sourceConfig).addSink(sinkConfig);
+        }
     }
-    ALOGV("%s() no matching profile found. Fall through to default PCM patch"
+    ALOGV("%s() no matching config found. Fall through to default PCM patch"
             " supporting PCM format conversion.", __func__);
     return patchBuilder;
 }
 
-status_t AudioPolicyManager::setMsdPatch(const sp<DeviceDescriptor> &outputDevice) {
-    sp<DeviceDescriptor> device = outputDevice;
-    if (device == nullptr) {
+status_t AudioPolicyManager::setMsdOutputPatches(const DeviceVector *outputDevices) {
+    DeviceVector devices;
+    if (outputDevices != nullptr && outputDevices->size() > 0) {
+        devices.add(*outputDevices);
+    } else {
         // Use media strategy for unspecified output device. This should only
         // occur on checkForDeviceAndOutputChanges(). Device connection events may
         // therefore invalidate explicit routing requests.
-        DeviceVector devices = mEngine->getOutputDevicesForAttributes(
+        devices = mEngine->getOutputDevicesForAttributes(
                     attributes_initializer(AUDIO_USAGE_MEDIA), nullptr, false /*fromCache*/);
-        LOG_ALWAYS_FATAL_IF(devices.isEmpty(), "no outpudevice to set Msd Patch");
-        device = devices.itemAt(0);
+        LOG_ALWAYS_FATAL_IF(devices.isEmpty(), "no output device to set MSD patch");
     }
-    ALOGV("%s() for device %s", __func__, device->toString().c_str());
-    PatchBuilder patchBuilder = buildMsdPatch(device);
-    const struct audio_patch* patch = patchBuilder.patch();
-    const AudioPatchCollection msdPatches = getMsdPatches();
-    if (!msdPatches.isEmpty()) {
-        LOG_ALWAYS_FATAL_IF(msdPatches.size() > 1,
-                "The current MSD prototype only supports one output patch");
-        sp<AudioPatch> currentPatch = msdPatches.valueAt(0);
-        if (audio_patches_are_equal(&currentPatch->mPatch, patch)) {
-            return NO_ERROR;
+    std::vector<PatchBuilder> patchesToCreate;
+    for (auto i = 0u; i < devices.size(); ++i) {
+        ALOGV("%s() for device %s", __func__, devices[i]->toString().c_str());
+        patchesToCreate.push_back(buildMsdPatch(true /*msdIsSource*/, devices[i]));
+    }
+    // Retain only the MSD patches associated with outputDevices request.
+    // Tear down the others, and create new ones as needed.
+    AudioPatchCollection patchesToRemove = getMsdOutputPatches();
+    for (auto it = patchesToCreate.begin(); it != patchesToCreate.end(); ) {
+        auto retainedPatch = false;
+        for (auto i = 0u; i < patchesToRemove.size(); ++i) {
+            if (audio_patches_are_equal(it->patch(), &patchesToRemove[i]->mPatch)) {
+                patchesToRemove.removeItemsAt(i);
+                retainedPatch = true;
+                break;
+            }
         }
+        if (retainedPatch) {
+            it = patchesToCreate.erase(it);
+            continue;
+        }
+        ++it;
+    }
+    if (patchesToCreate.size() == 0 && patchesToRemove.size() == 0) {
+        return NO_ERROR;
+    }
+    for (auto i = 0u; i < patchesToRemove.size(); ++i) {
+        auto &currentPatch = patchesToRemove.valueAt(i);
         releaseAudioPatch(currentPatch->getHandle(), mUidCached);
     }
-    status_t status = installPatch(__func__, -1 /*index*/, nullptr /*patchHandle*/,
-            patch, 0 /*delayMs*/, mUidCached, nullptr /*patchDescPtr*/);
-    ALOGE_IF(status != NO_ERROR, "%s() error %d creating MSD audio patch", __func__, status);
-    ALOGI_IF(status == NO_ERROR, "%s() Patch created from MSD_IN to "
-           "device:%s (format:%#x channels:%#x samplerate:%d)", __func__,
-             device->toString().c_str(), patch->sources[0].format,
-             patch->sources[0].channel_mask, patch->sources[0].sample_rate);
+    status_t status = NO_ERROR;
+    for (const auto &p : patchesToCreate) {
+        auto currStatus = installPatch(__func__, -1 /*index*/, nullptr /*patchHandle*/,
+                p.patch(), 0 /*delayMs*/, mUidCached, nullptr /*patchDescPtr*/);
+        char message[256];
+        snprintf(message, sizeof(message), "%s() %s: creating MSD patch from device:IN_BUS to "
+            "device:%#x (format:%#x channels:%#x samplerate:%d)", __func__,
+                currStatus == NO_ERROR ? "Success" : "Error",
+                p.patch()->sinks[0].ext.device.type, p.patch()->sources[0].format,
+                p.patch()->sources[0].channel_mask, p.patch()->sources[0].sample_rate);
+        if (currStatus == NO_ERROR) {
+            ALOGD("%s", message);
+        } else {
+            ALOGE("%s", message);
+            if (status == NO_ERROR) {
+                status = currStatus;
+            }
+        }
+    }
     return status;
 }
 
+void AudioPolicyManager::releaseMsdOutputPatches(const DeviceVector& devices) {
+    AudioPatchCollection msdPatches = getMsdOutputPatches();
+    for (size_t i = 0; i < msdPatches.size(); i++) {
+        const auto& patch = msdPatches[i];
+        for (size_t j = 0; j < patch->mPatch.num_sinks; ++j) {
+            const struct audio_port_config *sink = &patch->mPatch.sinks[j];
+            if (sink->type == AUDIO_PORT_TYPE_DEVICE && devices.getDevice(sink->ext.device.type,
+                    String8(sink->ext.device.address), AUDIO_FORMAT_DEFAULT) != nullptr) {
+                releaseAudioPatch(patch->getHandle(), mUidCached);
+                break;
+            }
+        }
+    }
+}
+
 audio_io_handle_t AudioPolicyManager::selectOutput(const SortedVector<audio_io_handle_t>& outputs,
-                                                       audio_output_flags_t flags,
-                                                       audio_format_t format,
-                                                       audio_channel_mask_t channelMask,
-                                                       uint32_t samplingRate)
+                                                   audio_output_flags_t flags,
+                                                   audio_format_t format,
+                                                   audio_channel_mask_t channelMask,
+                                                   uint32_t samplingRate,
+                                                   audio_session_t sessionId)
 {
     LOG_ALWAYS_FATAL_IF(!(format == AUDIO_FORMAT_INVALID || audio_is_linear_pcm(format)),
         "%s called with format %#x", __func__, format);
 
+    // Return the output that haptic-generating attached to when 1) session id is specified,
+    // 2) haptic-generating effect exists for given session id and 3) the output that
+    // haptic-generating effect attached to is in given outputs.
+    if (sessionId != AUDIO_SESSION_NONE) {
+        audio_io_handle_t hapticGeneratingOutput = mEffects.getIoForSession(
+                sessionId, FX_IID_HAPTICGENERATOR);
+        if (outputs.indexOf(hapticGeneratingOutput) >= 0) {
+            return hapticGeneratingOutput;
+        }
+    }
+
     // Flags disqualifying an output: the match must happen before calling selectOutput()
     static const audio_output_flags_t kExcludedFlags = (audio_output_flags_t)
         (AUDIO_OUTPUT_FLAG_HW_AV_SYNC | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ | AUDIO_OUTPUT_FLAG_DIRECT);
@@ -1765,7 +1910,8 @@
         checkAndSetVolume(curves, client->volumeSource(),
                           curves.getVolumeIndex(outputDesc->devices().types()),
                           outputDesc,
-                          outputDesc->devices().types());
+                          outputDesc->devices().types(), 0 /*delay*/,
+                          outputDesc->useHwGain() /*force*/);
 
         // update the outputs if starting an output with a stream that can affect notification
         // routing
@@ -1914,7 +2060,7 @@
     }
 }
 
-void AudioPolicyManager::releaseOutput(audio_port_handle_t portId)
+bool AudioPolicyManager::releaseOutput(audio_port_handle_t portId)
 {
     ALOGV("%s portId %d", __FUNCTION__, portId);
 
@@ -1927,33 +2073,49 @@
         //
         // Here we just log a warning, instead of a fatal error.
         ALOGW("releaseOutput() no output for client %d", portId);
-        return;
+        return false;
     }
 
     ALOGV("releaseOutput() %d", outputDesc->mIoHandle);
 
+    sp<TrackClientDescriptor> client = outputDesc->getClient(portId);
+    if (outputDesc->isClientActive(client)) {
+        ALOGW("releaseOutput() inactivates portId %d in good faith", portId);
+        stopOutput(portId);
+    }
+
     if (outputDesc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) {
         if (outputDesc->mDirectOpenCount <= 0) {
             ALOGW("releaseOutput() invalid open count %d for output %d",
                   outputDesc->mDirectOpenCount, outputDesc->mIoHandle);
-            return;
+            return false;
         }
         if (--outputDesc->mDirectOpenCount == 0) {
             closeOutput(outputDesc->mIoHandle);
             mpClientInterface->onAudioPortListUpdate();
         }
     }
-    // stopOutput() needs to be successfully called before releaseOutput()
-    // otherwise there may be inaccurate stream reference counts.
-    // This is checked in outputDesc->removeClient below.
+
     outputDesc->removeClient(portId);
+    if (outputDesc->mPendingReopenToQueryProfiles && outputDesc->getClientCount() == 0) {
+        // The output is pending reopened to query dynamic profiles and
+        // there is no active clients
+        closeOutput(outputDesc->mIoHandle);
+        sp<SwAudioOutputDescriptor> newOutputDesc = openOutputWithProfileAndDevice(
+                outputDesc->mProfile, mEngine->getActiveMediaDevices(mAvailableOutputDevices));
+        if (newOutputDesc == nullptr) {
+            ALOGE("%s failed to open output", __func__);
+        }
+        return true;
+    }
+    return false;
 }
 
 status_t AudioPolicyManager::getInputForAttr(const audio_attributes_t *attr,
                                              audio_io_handle_t *input,
                                              audio_unique_id_t riid,
                                              audio_session_t session,
-                                             uid_t uid,
+                                             const AttributionSourceState& attributionSource,
                                              const audio_config_base_t *config,
                                              audio_input_flags_t flags,
                                              audio_port_handle_t *selectedDeviceId,
@@ -1972,6 +2134,7 @@
     sp<AudioInputDescriptor> inputDesc;
     sp<RecordClientDescriptor> clientDesc;
     audio_port_handle_t requestedDeviceId = *selectedDeviceId;
+    uid_t uid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(attributionSource.uid));
     bool isSoundTrigger;
 
     // The supplied portId must be AUDIO_PORT_HANDLE_NONE
@@ -2028,7 +2191,7 @@
         *inputType = API_INPUT_LEGACY;
         device = inputDesc->getDevice();
 
-        ALOGI("%s reusing MMAP input %d for session %d", __FUNCTION__, *input, session);
+        ALOGV("%s reusing MMAP input %d for session %d", __FUNCTION__, *input, session);
         goto exit;
     }
 
@@ -2066,14 +2229,18 @@
         } else {
             // Prevent from storing invalid requested device id in clients
             requestedDeviceId = AUDIO_PORT_HANDLE_NONE;
-            device = mEngine->getInputDeviceForAttributes(attributes, &policyMix);
+            device = mEngine->getInputDeviceForAttributes(attributes, uid, &policyMix);
+            ALOGV_IF(device != nullptr, "%s found device type is 0x%X",
+                __FUNCTION__, device->type());
         }
         if (device == nullptr) {
             ALOGW("getInputForAttr() could not find device for source %d", attributes.source);
             status = BAD_VALUE;
             goto error;
         }
-        if (policyMix) {
+        if (device->type() == AUDIO_DEVICE_IN_ECHO_REFERENCE) {
+            *inputType = API_INPUT_MIX_CAPTURE;
+        } else if (policyMix) {
             ALOG_ASSERT(policyMix->mMixType == MIX_TYPE_RECORDERS, "Invalid Mix Type");
             // there is an external policy, but this input is attached to a mix of recorders,
             // meaning it receives audio injected into the framework, so the recorder doesn't
@@ -2182,6 +2349,21 @@
         return input;
     }
 
+    // Reuse an already opened input if a client with the same session ID already exists
+    // on that input
+    for (size_t i = 0; i < mInputs.size(); i++) {
+        sp <AudioInputDescriptor> desc = mInputs.valueAt(i);
+        if (desc->mProfile != profile) {
+            continue;
+        }
+        RecordClientVector clients = desc->clientsList();
+        for (const auto &client : clients) {
+            if (session == client->session()) {
+                return desc->mIoHandle;
+            }
+        }
+    }
+
     if (!profile->canOpenNewIo()) {
         for (size_t i = 0; i < mInputs.size(); ) {
             sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
@@ -2256,7 +2438,7 @@
     sp<AudioInputDescriptor> inputDesc = mInputs.getInputForClient(portId);
     if (inputDesc == 0) {
         ALOGW("%s no input for client %d", __FUNCTION__, portId);
-        return BAD_VALUE;
+        return DEAD_OBJECT;
     }
     audio_io_handle_t input = inputDesc->mIoHandle;
     sp<RecordClientDescriptor> client = inputDesc->getClient(portId);
@@ -2349,12 +2531,14 @@
         ALOGW("%s input %d client %d already stopped", __FUNCTION__, input, client->portId());
         return INVALID_OPERATION;
     }
-
+    auto old_source = inputDesc->source();
     inputDesc->setClientActive(client, false);
 
     inputDesc->stop();
     if (inputDesc->isActive()) {
-        setInputDevice(input, getNewInputDevice(inputDesc), false /* force */);
+        auto current_source = inputDesc->source();
+        setInputDevice(input, getNewInputDevice(inputDesc),
+                old_source != current_source /* force */);
     } else {
         sp<AudioPolicyMix> policyMix = inputDesc->mPolicyMix.promote();
         // if input maps to a dynamic policy with an activity listener, notify of state change
@@ -2450,7 +2634,7 @@
             bool close = false;
             for (const auto& client : input->clientsList()) {
                 sp<DeviceDescriptor> device =
-                    mEngine->getInputDeviceForAttributes(client->attributes());
+                    mEngine->getInputDeviceForAttributes(client->attributes(), client->uid());
                 if (!input->supportedDevices().contains(device)) {
                     close = true;
                     break;
@@ -2778,7 +2962,7 @@
 
 status_t AudioPolicyManager::registerEffect(const effect_descriptor_t *desc,
                                 audio_io_handle_t io,
-                                uint32_t strategy,
+                                product_strategy_t strategy,
                                 int session,
                                 int id)
 {
@@ -3083,16 +3267,16 @@
 
 // Returns true if all devices types match the predicate and are supported by one HW module
 bool  AudioPolicyManager::areAllDevicesSupported(
-        const Vector<AudioDeviceTypeAddr>& devices,
+        const AudioDeviceTypeAddrVector& devices,
         std::function<bool(audio_devices_t)> predicate,
         const char *context) {
     for (size_t i = 0; i < devices.size(); i++) {
         sp<DeviceDescriptor> devDesc = mHwModules.getDeviceDescriptor(
-                devices[i].mType, devices[i].mAddress.c_str(), String8(),
+                devices[i].mType, devices[i].getAddress(), String8(),
                 AUDIO_FORMAT_DEFAULT, false /*allowToCreate*/, true /*matchAddress*/);
         if (devDesc == nullptr || (predicate != nullptr && !predicate(devices[i].mType))) {
-            ALOGE("%s: device type %#x address %s not supported or not an output device",
-                    context, devices[i].mType, devices[i].mAddress.c_str());
+            ALOGE("%s: device type %#x address %s not supported or not match predicate",
+                    context, devices[i].mType, devices[i].getAddress());
             return false;
         }
     }
@@ -3100,7 +3284,7 @@
 }
 
 status_t AudioPolicyManager::setUidDeviceAffinities(uid_t uid,
-        const Vector<AudioDeviceTypeAddr>& devices) {
+        const AudioDeviceTypeAddrVector& devices) {
     ALOGV("%s() uid=%d num devices %zu", __FUNCTION__, uid, devices.size());
     if (!areAllDevicesSupported(devices, audio_is_output_device, __func__)) {
         return BAD_VALUE;
@@ -3132,25 +3316,35 @@
     return res;
 }
 
-status_t AudioPolicyManager::setPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   const AudioDeviceTypeAddr &device) {
-    ALOGV("%s() strategy=%d device=%08x addr=%s", __FUNCTION__,
-            strategy, device.mType, device.mAddress.c_str());
 
-    Vector<AudioDeviceTypeAddr> devices;
-    devices.add(device);
+status_t AudioPolicyManager::setDevicesRoleForStrategy(product_strategy_t strategy,
+                                                       device_role_t role,
+                                                       const AudioDeviceTypeAddrVector &devices) {
+    ALOGV("%s() strategy=%d role=%d %s", __func__, strategy, role,
+            dumpAudioDeviceTypeAddrVector(devices).c_str());
+
     if (!areAllDevicesSupported(devices, audio_is_output_device, __func__)) {
         return BAD_VALUE;
     }
-    status_t status = mEngine->setPreferredDeviceForStrategy(strategy, device);
+    status_t status = mEngine->setDevicesRoleForStrategy(strategy, role, devices);
     if (status != NO_ERROR) {
-        ALOGW("Engine could not set preferred device %08x %s for strategy %d",
-                device.mType, device.mAddress.c_str(), strategy);
+        ALOGW("Engine could not set preferred devices %s for strategy %d role %d",
+                dumpAudioDeviceTypeAddrVector(devices).c_str(), strategy, role);
         return status;
     }
 
     checkForDeviceAndOutputChanges();
-    updateCallAndOutputRouting();
+
+    bool forceVolumeReeval = false;
+    // FIXME: workaround for truncated touch sounds
+    // to be removed when the problem is handled by system UI
+    uint32_t delayMs = 0;
+    if (strategy == mCommunnicationStrategy) {
+        forceVolumeReeval = true;
+        delayMs = TOUCH_SOUND_FIXED_DELAY_MS;
+        updateInputRouting();
+    }
+    updateCallAndOutputRouting(forceVolumeReeval, delayMs);
 
     return NO_ERROR;
 }
@@ -3158,9 +3352,9 @@
 void AudioPolicyManager::updateCallAndOutputRouting(bool forceVolumeReeval, uint32_t delayMs)
 {
     uint32_t waitMs = 0;
-    if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
-        DeviceVector newDevices = getNewOutputDevices(mPrimaryOutput, true /*fromCache*/);
-        waitMs = updateCallRouting(newDevices, delayMs);
+    if (updateCallRouting(true /*fromCache*/, delayMs, &waitMs) == NO_ERROR) {
+        // Only apply special touch sound delay once
+        delayMs = 0;
     }
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueAt(i);
@@ -3170,6 +3364,8 @@
             // preventing the force re-routing in case of default dev that distinguishes on address.
             // Let's give back to engine full device choice decision however.
             waitMs = setOutputDevices(outputDesc, newDevices, !newDevices.isEmpty(), delayMs);
+            // Only apply special touch sound delay once
+            delayMs = 0;
         }
         if (forceVolumeReeval && !newDevices.isEmpty()) {
             applyStreamVolumes(outputDesc, newDevices.types(), waitMs, true);
@@ -3177,30 +3373,129 @@
     }
 }
 
-status_t AudioPolicyManager::removePreferredDeviceForStrategy(product_strategy_t strategy)
-{
-    ALOGI("%s() strategy=%d", __FUNCTION__, strategy);
+void AudioPolicyManager::updateInputRouting() {
+    for (const auto& activeDesc : mInputs.getActiveInputs()) {
+        // Skip for hotword recording as the input device switch
+        // is handled within sound trigger HAL
+        if (activeDesc->isSoundTrigger() && activeDesc->source() == AUDIO_SOURCE_HOTWORD) {
+            continue;
+        }
+        auto newDevice = getNewInputDevice(activeDesc);
+        // Force new input selection if the new device can not be reached via current input
+        if (activeDesc->mProfile->getSupportedDevices().contains(newDevice)) {
+            setInputDevice(activeDesc->mIoHandle, newDevice);
+        } else {
+            closeInput(activeDesc->mIoHandle);
+        }
+    }
+}
 
-    status_t status = mEngine->removePreferredDeviceForStrategy(strategy);
+status_t AudioPolicyManager::removeDevicesRoleForStrategy(product_strategy_t strategy,
+                                                          device_role_t role)
+{
+    ALOGV("%s() strategy=%d role=%d", __func__, strategy, role);
+
+    status_t status = mEngine->removeDevicesRoleForStrategy(strategy, role);
     if (status != NO_ERROR) {
-        ALOGW("Engine could not remove preferred device for strategy %d", strategy);
+        ALOGV("Engine could not remove preferred device for strategy %d status %d",
+                strategy, status);
         return status;
     }
 
     checkForDeviceAndOutputChanges();
-    updateCallAndOutputRouting();
+
+    bool forceVolumeReeval = false;
+    // FIXME: workaround for truncated touch sounds
+    // to be removed when the problem is handled by system UI
+    uint32_t delayMs = 0;
+    if (strategy == mCommunnicationStrategy) {
+        forceVolumeReeval = true;
+        delayMs = TOUCH_SOUND_FIXED_DELAY_MS;
+        updateInputRouting();
+    }
+    updateCallAndOutputRouting(forceVolumeReeval, delayMs);
 
     return NO_ERROR;
 }
 
-status_t AudioPolicyManager::getPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   AudioDeviceTypeAddr &device) {
-    return mEngine->getPreferredDeviceForStrategy(strategy, device);
+status_t AudioPolicyManager::getDevicesForRoleAndStrategy(product_strategy_t strategy,
+                                                          device_role_t role,
+                                                          AudioDeviceTypeAddrVector &devices) {
+    return mEngine->getDevicesForRoleAndStrategy(strategy, role, devices);
+}
+
+status_t AudioPolicyManager::setDevicesRoleForCapturePreset(
+        audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector &devices) {
+    ALOGV("%s() audioSource=%d role=%d %s", __func__, audioSource, role,
+            dumpAudioDeviceTypeAddrVector(devices).c_str());
+
+    if (!areAllDevicesSupported(devices, audio_call_is_input_device, __func__)) {
+        return BAD_VALUE;
+    }
+    status_t status = mEngine->setDevicesRoleForCapturePreset(audioSource, role, devices);
+    ALOGW_IF(status != NO_ERROR,
+            "Engine could not set preferred devices %s for audio source %d role %d",
+            dumpAudioDeviceTypeAddrVector(devices).c_str(), audioSource, role);
+
+    return status;
+}
+
+status_t AudioPolicyManager::addDevicesRoleForCapturePreset(
+        audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector &devices) {
+    ALOGV("%s() audioSource=%d role=%d %s", __func__, audioSource, role,
+            dumpAudioDeviceTypeAddrVector(devices).c_str());
+
+    if (!areAllDevicesSupported(devices, audio_call_is_input_device, __func__)) {
+        return BAD_VALUE;
+    }
+    status_t status = mEngine->addDevicesRoleForCapturePreset(audioSource, role, devices);
+    ALOGW_IF(status != NO_ERROR,
+            "Engine could not add preferred devices %s for audio source %d role %d",
+            dumpAudioDeviceTypeAddrVector(devices).c_str(), audioSource, role);
+
+    updateInputRouting();
+    return status;
+}
+
+status_t AudioPolicyManager::removeDevicesRoleForCapturePreset(
+        audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector& devices)
+{
+    ALOGV("%s() audioSource=%d role=%d devices=%s", __func__, audioSource, role,
+            dumpAudioDeviceTypeAddrVector(devices).c_str());
+
+    if (!areAllDevicesSupported(devices, audio_call_is_input_device, __func__)) {
+        return BAD_VALUE;
+    }
+
+    status_t status = mEngine->removeDevicesRoleForCapturePreset(
+            audioSource, role, devices);
+    ALOGW_IF(status != NO_ERROR,
+            "Engine could not remove devices role (%d) for capture preset %d", role, audioSource);
+
+    updateInputRouting();
+    return status;
+}
+
+status_t AudioPolicyManager::clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                              device_role_t role) {
+    ALOGV("%s() audioSource=%d role=%d", __func__, audioSource, role);
+
+    status_t status = mEngine->clearDevicesRoleForCapturePreset(audioSource, role);
+    ALOGW_IF(status != NO_ERROR,
+            "Engine could not clear devices role (%d) for capture preset %d", role, audioSource);
+
+    updateInputRouting();
+    return status;
+}
+
+status_t AudioPolicyManager::getDevicesForRoleAndCapturePreset(
+        audio_source_t audioSource, device_role_t role, AudioDeviceTypeAddrVector &devices) {
+    return mEngine->getDevicesForRoleAndCapturePreset(audioSource, role, devices);
 }
 
 status_t AudioPolicyManager::setUserIdDeviceAffinities(int userId,
-        const Vector<AudioDeviceTypeAddr>& devices) {
-    ALOGI("%s() userId=%d num devices %zu", __FUNCTION__, userId, devices.size());\
+        const AudioDeviceTypeAddrVector& devices) {
+    ALOGV("%s() userId=%d num devices %zu", __func__, userId, devices.size());
     if (!areAllDevicesSupported(devices, audio_is_output_device, __func__)) {
         return BAD_VALUE;
     }
@@ -3219,7 +3514,7 @@
 }
 
 status_t AudioPolicyManager::removeUserIdDeviceAffinities(int userId) {
-    ALOGI("%s() userId=%d", __FUNCTION__, userId);
+    ALOGV("%s() userId=%d", __FUNCTION__, userId);
     status_t status = mPolicyMixes.removeUserIdDeviceAffinities(userId);
     if (status != NO_ERROR) {
         ALOGE("%s() Could not remove all device affinities fo userId = %d",
@@ -3259,7 +3554,9 @@
     }
     dst->appendFormat(" TTS output %savailable\n", mTtsOutputAvailable ? "" : "not ");
     dst->appendFormat(" Master mono: %s\n", mMasterMono ? "on" : "off");
+    dst->appendFormat(" Communnication Strategy: %d\n", mCommunnicationStrategy);
     dst->appendFormat(" Config source: %s\n", mConfig.getSource().c_str()); // getConfig not const
+
     mAvailableOutputDevices.dump(dst, String8("Available output"));
     mAvailableInputDevices.dump(dst, String8("Available input"));
     mHwModulesAll.dump(dst);
@@ -3296,38 +3593,38 @@
 // This function checks for the parameters which can be offloaded.
 // This can be enhanced depending on the capability of the DSP and policy
 // of the system.
-bool AudioPolicyManager::isOffloadSupported(const audio_offload_info_t& offloadInfo)
+audio_offload_mode_t AudioPolicyManager::getOffloadSupport(const audio_offload_info_t& offloadInfo)
 {
-    ALOGV("isOffloadSupported: SR=%u, CM=0x%x, Format=0x%x, StreamType=%d,"
+    ALOGV("%s: SR=%u, CM=0x%x, Format=0x%x, StreamType=%d,"
      " BitRate=%u, duration=%" PRId64 " us, has_video=%d",
-     offloadInfo.sample_rate, offloadInfo.channel_mask,
+     __func__, offloadInfo.sample_rate, offloadInfo.channel_mask,
      offloadInfo.format,
      offloadInfo.stream_type, offloadInfo.bit_rate, offloadInfo.duration_us,
      offloadInfo.has_video);
 
     if (mMasterMono) {
-        return false; // no offloading if mono is set.
+        return AUDIO_OFFLOAD_NOT_SUPPORTED; // no offloading if mono is set.
     }
 
     // Check if offload has been disabled
     if (property_get_bool("audio.offload.disable", false /* default_value */)) {
-        ALOGV("offload disabled by audio.offload.disable");
-        return false;
+        ALOGV("%s: offload disabled by audio.offload.disable", __func__);
+        return AUDIO_OFFLOAD_NOT_SUPPORTED;
     }
 
     // Check if stream type is music, then only allow offload as of now.
     if (offloadInfo.stream_type != AUDIO_STREAM_MUSIC)
     {
-        ALOGV("isOffloadSupported: stream_type != MUSIC, returning false");
-        return false;
+        ALOGV("%s: stream_type != MUSIC, returning false", __func__);
+        return AUDIO_OFFLOAD_NOT_SUPPORTED;
     }
 
     //TODO: enable audio offloading with video when ready
     const bool allowOffloadWithVideo =
             property_get_bool("audio.offload.video", false /* default_value */);
     if (offloadInfo.has_video && !allowOffloadWithVideo) {
-        ALOGV("isOffloadSupported: has_video == true, returning false");
-        return false;
+        ALOGV("%s: has_video == true, returning false", __func__);
+        return AUDIO_OFFLOAD_NOT_SUPPORTED;
     }
 
     //If duration is less than minimum value defined in property, return false
@@ -3335,13 +3632,14 @@
             "audio.offload.min.duration.secs", -1 /* default_value */);
     if (min_duration_secs >= 0) {
         if (offloadInfo.duration_us < min_duration_secs * 1000000LL) {
-            ALOGV("Offload denied by duration < audio.offload.min.duration.secs(=%d)",
-                    min_duration_secs);
-            return false;
+            ALOGV("%s: Offload denied by duration < audio.offload.min.duration.secs(=%d)",
+                    __func__, min_duration_secs);
+            return AUDIO_OFFLOAD_NOT_SUPPORTED;
         }
     } else if (offloadInfo.duration_us < OFFLOAD_DEFAULT_MIN_DURATION_SECS * 1000000) {
-        ALOGV("Offload denied by duration < default min(=%u)", OFFLOAD_DEFAULT_MIN_DURATION_SECS);
-        return false;
+        ALOGV("%s: Offload denied by duration < default min(=%u)",
+                __func__, OFFLOAD_DEFAULT_MIN_DURATION_SECS);
+        return AUDIO_OFFLOAD_NOT_SUPPORTED;
     }
 
     // Do not allow offloading if one non offloadable effect is enabled. This prevents from
@@ -3351,7 +3649,7 @@
     // This may prevent offloading in rare situations where effects are left active by apps
     // in the background.
     if (mEffects.isNonOffloadableEffectEnabled()) {
-        return false;
+        return AUDIO_OFFLOAD_NOT_SUPPORTED;
     }
 
     // See if there is a profile to support this.
@@ -3362,8 +3660,16 @@
                                             offloadInfo.channel_mask,
                                             AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD,
                                             true /* directOnly */);
-    ALOGV("isOffloadSupported() profile %sfound", profile != 0 ? "" : "NOT ");
-    return (profile != 0);
+    ALOGV("%s: profile %sfound%s", __func__, profile != nullptr ? "" : "NOT ",
+            (profile != nullptr && (profile->getFlags() & AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD) != 0)
+            ? ", supports gapless" : "");
+    if (profile == nullptr) {
+        return AUDIO_OFFLOAD_NOT_SUPPORTED;
+    }
+    if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD) != 0) {
+        return AUDIO_OFFLOAD_GAPLESS_SUPPORTED;
+    }
+    return AUDIO_OFFLOAD_SUPPORTED;
 }
 
 bool AudioPolicyManager::isDirectOutputSupported(const audio_config_base_t& config,
@@ -3387,15 +3693,15 @@
 status_t AudioPolicyManager::listAudioPorts(audio_port_role_t role,
                                             audio_port_type_t type,
                                             unsigned int *num_ports,
-                                            struct audio_port *ports,
+                                            struct audio_port_v7 *ports,
                                             unsigned int *generation)
 {
-    if (num_ports == NULL || (*num_ports != 0 && ports == NULL) ||
-            generation == NULL) {
+    if (num_ports == nullptr || (*num_ports != 0 && ports == nullptr) ||
+            generation == nullptr) {
         return BAD_VALUE;
     }
     ALOGV("listAudioPorts() role %d type %d num_ports %d ports %p", role, type, *num_ports, ports);
-    if (ports == NULL) {
+    if (ports == nullptr) {
         *num_ports = 0;
     }
 
@@ -3453,7 +3759,7 @@
     return NO_ERROR;
 }
 
-status_t AudioPolicyManager::getAudioPort(struct audio_port *port)
+status_t AudioPolicyManager::getAudioPort(struct audio_port_v7 *port)
 {
     if (port == nullptr || port->id == AUDIO_PORT_HANDLE_NONE) {
         return BAD_VALUE;
@@ -3665,6 +3971,15 @@
             // be incomplete.
             PatchBuilder patchBuilder;
             audio_port_config sourcePortConfig = {};
+
+            // if first sink is to MSD, establish single MSD patch
+            if (getMsdAudioOutDevices().contains(
+                        mAvailableOutputDevices.getDeviceFromId(patch->sinks[0].id))) {
+                ALOGV("%s patching to MSD", __FUNCTION__);
+                patchBuilder = buildMsdPatch(false /*msdIsSource*/, srcDevice);
+                goto installPatch;
+            }
+
             srcDevice->toAudioPortConfig(&sourcePortConfig, &patch->sources[0]);
             patchBuilder.addSource(sourcePortConfig);
 
@@ -3682,6 +3997,41 @@
                 sinkDevice->toAudioPortConfig(&sinkPortConfig, &patch->sinks[i]);
                 patchBuilder.addSink(sinkPortConfig);
 
+                // Whatever Sw or Hw bridge, we do attach an SwOutput to an Audio Source for
+                // volume management purpose (tracking activity)
+                // In case of Hw bridge, it is a Work Around. The mixPort used is the one declared
+                // in config XML to reach the sink so that is can be declared as available.
+                audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+                sp<SwAudioOutputDescriptor> outputDesc = nullptr;
+                if (sourceDesc != nullptr) {
+                    // take care of dynamic routing for SwOutput selection,
+                    audio_attributes_t attributes = sourceDesc->attributes();
+                    audio_stream_type_t stream = sourceDesc->stream();
+                    audio_attributes_t resultAttr;
+                    audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+                    config.sample_rate = sourceDesc->config().sample_rate;
+                    config.channel_mask = sourceDesc->config().channel_mask;
+                    config.format = sourceDesc->config().format;
+                    audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
+                    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+                    bool isRequestedDeviceForExclusiveUse = false;
+                    output_type_t outputType;
+                    getOutputForAttrInt(&resultAttr, &output, AUDIO_SESSION_NONE, &attributes,
+                                        &stream, sourceDesc->uid(), &config, &flags,
+                                        &selectedDeviceId, &isRequestedDeviceForExclusiveUse,
+                                        nullptr, &outputType);
+                    if (output == AUDIO_IO_HANDLE_NONE) {
+                        ALOGV("%s no output for device %s",
+                              __FUNCTION__, sinkDevice->toString().c_str());
+                        return INVALID_OPERATION;
+                    }
+                    outputDesc = mOutputs.valueFor(output);
+                    if (outputDesc->isDuplicated()) {
+                        ALOGE("%s output is duplicated", __func__);
+                        return INVALID_OPERATION;
+                    }
+                    sourceDesc->setSwOutput(outputDesc);
+                }
                 // create a software bridge in PatchPanel if:
                 // - source and sink devices are on different HW modules OR
                 // - audio HAL version is < 3.0
@@ -3697,49 +4047,25 @@
                     if (patch->num_sinks > 1) {
                         return INVALID_OPERATION;
                     }
-                    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
-                    if (sourceDesc != nullptr) {
-                        // take care of dynamic routing for SwOutput selection,
-                        audio_attributes_t attributes = sourceDesc->attributes();
-                        audio_stream_type_t stream = sourceDesc->stream();
-                        audio_attributes_t resultAttr;
-                        audio_config_t config = AUDIO_CONFIG_INITIALIZER;
-                        config.sample_rate = sourceDesc->config().sample_rate;
-                        config.channel_mask = sourceDesc->config().channel_mask;
-                        config.format = sourceDesc->config().format;
-                        audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
-                        audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-                        bool isRequestedDeviceForExclusiveUse = false;
-                        output_type_t outputType;
-                        getOutputForAttrInt(&resultAttr, &output, AUDIO_SESSION_NONE, &attributes,
-                                            &stream, sourceDesc->uid(), &config, &flags,
-                                            &selectedDeviceId, &isRequestedDeviceForExclusiveUse,
-                                            nullptr, &outputType);
-                        if (output == AUDIO_IO_HANDLE_NONE) {
-                            ALOGV("%s no output for device %s",
-                                  __FUNCTION__, sinkDevice->toString().c_str());
-                            return INVALID_OPERATION;
-                        }
-                    } else {
+                    if (sourceDesc == nullptr) {
                         SortedVector<audio_io_handle_t> outputs =
                                 getOutputsForDevices(DeviceVector(sinkDevice), mOutputs);
                         // if the sink device is reachable via an opened output stream, request to
                         // go via this output stream by adding a second source to the patch
                         // description
                         output = selectOutput(outputs);
-                    }
-                    if (output != AUDIO_IO_HANDLE_NONE) {
-                        sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueFor(output);
-                        if (outputDesc->isDuplicated()) {
-                            ALOGV("%s output for device %s is duplicated",
-                                  __FUNCTION__, sinkDevice->toString().c_str());
-                            return INVALID_OPERATION;
+                        if (output != AUDIO_IO_HANDLE_NONE) {
+                            outputDesc = mOutputs.valueFor(output);
+                            if (outputDesc->isDuplicated()) {
+                                ALOGV("%s output for device %s is duplicated",
+                                      __FUNCTION__, sinkDevice->toString().c_str());
+                                return INVALID_OPERATION;
+                            }
                         }
+                    }
+                    if (outputDesc != nullptr) {
                         audio_port_config srcMixPortConfig = {};
                         outputDesc->toAudioPortConfig(&srcMixPortConfig, &patch->sources[0]);
-                        if (sourceDesc != nullptr) {
-                            sourceDesc->setSwOutput(outputDesc);
-                        }
                         // for volume control, we may need a valid stream
                         srcMixPortConfig.ext.mix.usecase.stream = sourceDesc != nullptr ?
                                     sourceDesc->stream() : AUDIO_STREAM_PATCH;
@@ -3749,6 +4075,7 @@
             }
             // TODO: check from routing capabilities in config file and other conflicting patches
 
+installPatch:
             status_t status = installPatch(
                         __func__, index, handle, patchBuilder.patch(), delayMs, uid, &patchDesc);
             if (status != NO_ERROR) {
@@ -3830,8 +4157,9 @@
                 sp<SwAudioOutputDescriptor> outputDesc =
                         mOutputs.getOutputFromId(patch->sources[1].id);
                 if (outputDesc == NULL) {
-                    ALOGE("%s output not found for id %d", __func__, patch->sources[0].id);
-                    return BAD_VALUE;
+                    ALOGW("%s output not found for id %d", __func__, patch->sources[0].id);
+                    // releaseOutput has already called closeOuput in case of direct output
+                    return NO_ERROR;
                 }
                 if (patchDesc->getHandle() != outputDesc->getPatchHandle()) {
                     // force SwOutput patch removal as AF counter part patch has already gone.
@@ -4097,15 +4425,19 @@
     disconnectAudioSource(sourceDesc);
 
     audio_attributes_t attributes = sourceDesc->attributes();
-    sp<DeviceDescriptor> srcDevice = sourceDesc->srcDevice();
-
+    // May the device (dynamic) have been disconnected/reconnected, id has changed.
+    sp<DeviceDescriptor> srcDevice = mAvailableInputDevices.getDevice(
+                sourceDesc->srcDevice()->type(),
+                String8(sourceDesc->srcDevice()->address().c_str()),
+                AUDIO_FORMAT_DEFAULT);
     DeviceVector sinkDevices =
-            mEngine->getOutputDevicesForAttributes(attributes, nullptr, true);
+            mEngine->getOutputDevicesForAttributes(attributes, nullptr, false /*fromCache*/);
     ALOG_ASSERT(!sinkDevices.isEmpty(), "connectAudioSource(): no device found for attributes");
     sp<DeviceDescriptor> sinkDevice = sinkDevices.itemAt(0);
-    ALOG_ASSERT(mAvailableOutputDevices.contains(sinkDevice), "%s: Device %s not available",
-                __FUNCTION__, sinkDevice->toString().c_str());
-
+    if (!mAvailableOutputDevices.contains(sinkDevice)) {
+        ALOGE("%s Device %s not available", __func__, sinkDevice->toString().c_str());
+        return INVALID_OPERATION;
+    }
     PatchBuilder patchBuilder;
     patchBuilder.addSink(sinkDevice).addSource(srcDevice);
     audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
@@ -4115,7 +4447,7 @@
         ALOGW("%s patch panel could not connect device patch, error %d", __func__, status);
         return INVALID_OPERATION;
     }
-    sourceDesc->setPatchHandle(handle);
+    sourceDesc->connect(handle, sinkDevice);
     // SW Bridge? (@todo: HW bridge, keep track of HwOutput for device selection "reconsideration")
     sp<SwAudioOutputDescriptor> swOutput = sourceDesc->swOutput().promote();
     if (swOutput != 0) {
@@ -4217,54 +4549,28 @@
 
 status_t AudioPolicyManager::getSurroundFormats(unsigned int *numSurroundFormats,
                                                 audio_format_t *surroundFormats,
-                                                bool *surroundFormatsEnabled,
-                                                bool reported)
+                                                bool *surroundFormatsEnabled)
 {
-    if (numSurroundFormats == NULL || (*numSurroundFormats != 0 &&
-            (surroundFormats == NULL || surroundFormatsEnabled == NULL))) {
+    if (numSurroundFormats == nullptr || (*numSurroundFormats != 0 &&
+            (surroundFormats == nullptr || surroundFormatsEnabled == nullptr))) {
         return BAD_VALUE;
     }
-    ALOGV("%s() numSurroundFormats %d surroundFormats %p surroundFormatsEnabled %p reported %d",
-            __func__, *numSurroundFormats, surroundFormats, surroundFormatsEnabled, reported);
+    ALOGV("%s() numSurroundFormats %d surroundFormats %p surroundFormatsEnabled %p",
+            __func__, *numSurroundFormats, surroundFormats, surroundFormatsEnabled);
 
     size_t formatsWritten = 0;
     size_t formatsMax = *numSurroundFormats;
-    std::unordered_set<audio_format_t> formats; // Uses primary surround formats only
-    if (reported) {
-        // Return formats from all device profiles that have already been resolved by
-        // checkOutputsForDevice().
-        for (size_t i = 0; i < mAvailableOutputDevices.size(); i++) {
-            sp<DeviceDescriptor> device = mAvailableOutputDevices[i];
-            FormatVector supportedFormats =
-                    device->getAudioPort()->getAudioProfiles().getSupportedFormats();
-            for (size_t j = 0; j < supportedFormats.size(); j++) {
-                if (mConfig.getSurroundFormats().count(supportedFormats[j]) != 0) {
-                    formats.insert(supportedFormats[j]);
-                } else {
-                    for (const auto& pair : mConfig.getSurroundFormats()) {
-                        if (pair.second.count(supportedFormats[j]) != 0) {
-                            formats.insert(pair.first);
-                            break;
-                        }
-                    }
-                }
-            }
-        }
-    } else {
-        for (const auto& pair : mConfig.getSurroundFormats()) {
-            formats.insert(pair.first);
-        }
-    }
-    *numSurroundFormats = formats.size();
+
+    *numSurroundFormats = mConfig.getSurroundFormats().size();
     audio_policy_forced_cfg_t forceUse = mEngine->getForceUse(
             AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND);
-    for (const auto& format: formats) {
+    for (const auto& format: mConfig.getSurroundFormats()) {
         if (formatsWritten < formatsMax) {
-            surroundFormats[formatsWritten] = format;
+            surroundFormats[formatsWritten] = format.first;
             bool formatEnabled = true;
             switch (forceUse) {
                 case AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL:
-                    formatEnabled = mManualSurroundFormats.count(format) != 0;
+                    formatEnabled = mManualSurroundFormats.count(format.first) != 0;
                     break;
                 case AUDIO_POLICY_FORCE_ENCODED_SURROUND_NEVER:
                     formatEnabled = false;
@@ -4278,6 +4584,60 @@
     return NO_ERROR;
 }
 
+status_t AudioPolicyManager::getReportedSurroundFormats(unsigned int *numSurroundFormats,
+                                                        audio_format_t *surroundFormats) {
+    if (numSurroundFormats == nullptr || (*numSurroundFormats != 0 && surroundFormats == nullptr)) {
+        return BAD_VALUE;
+    }
+    ALOGV("%s() numSurroundFormats %d surroundFormats %p",
+            __func__, *numSurroundFormats, surroundFormats);
+
+    size_t formatsWritten = 0;
+    size_t formatsMax = *numSurroundFormats;
+    std::unordered_set<audio_format_t> formats; // Uses primary surround formats only
+
+    // Return formats from all device profiles that have already been resolved by
+    // checkOutputsForDevice().
+    for (size_t i = 0; i < mAvailableOutputDevices.size(); i++) {
+        sp<DeviceDescriptor> device = mAvailableOutputDevices[i];
+        audio_devices_t deviceType = device->type();
+        // Enabling/disabling formats are applied to only HDMI devices. So, this function
+        // returns formats reported by HDMI devices.
+        if (deviceType != AUDIO_DEVICE_OUT_HDMI) {
+            continue;
+        }
+        // Formats reported by sink devices
+        std::unordered_set<audio_format_t> formatset;
+        if (auto it = mReportedFormatsMap.find(device); it != mReportedFormatsMap.end()) {
+            formatset.insert(it->second.begin(), it->second.end());
+        }
+
+        // Formats hard-coded in the in policy configuration file (if any).
+        FormatVector encodedFormats = device->encodedFormats();
+        formatset.insert(encodedFormats.begin(), encodedFormats.end());
+        // Filter the formats which are supported by the vendor hardware.
+        for (auto it = formatset.begin(); it != formatset.end(); ++it) {
+            if (mConfig.getSurroundFormats().count(*it) != 0) {
+                formats.insert(*it);
+            } else {
+                for (const auto& pair : mConfig.getSurroundFormats()) {
+                    if (pair.second.count(*it) != 0) {
+                        formats.insert(pair.first);
+                        break;
+                    }
+                }
+            }
+        }
+    }
+    *numSurroundFormats = formats.size();
+    for (const auto& format: formats) {
+        if (formatsWritten < formatsMax) {
+            surroundFormats[formatsWritten++] = format;
+        }
+    }
+    return NO_ERROR;
+}
+
 status_t AudioPolicyManager::setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled)
 {
     ALOGV("%s() format 0x%X enabled %d", __func__, audioFormat, enabled);
@@ -4398,13 +4758,21 @@
 status_t AudioPolicyManager::disconnectAudioSource(const sp<SourceClientDescriptor>& sourceDesc)
 {
     ALOGV("%s port Id %d", __FUNCTION__, sourceDesc->portId());
+    if (!sourceDesc->isConnected()) {
+        ALOGV("%s port Id %d already disconnected", __FUNCTION__, sourceDesc->portId());
+        return NO_ERROR;
+    }
     sp<SwAudioOutputDescriptor> swOutput = sourceDesc->swOutput().promote();
     if (swOutput != 0) {
         status_t status = stopSource(swOutput, sourceDesc);
         if (status == NO_ERROR) {
             swOutput->stop();
         }
-        releaseOutput(sourceDesc->portId());
+        if (releaseOutput(sourceDesc->portId())) {
+            // The output descriptor is reopened to query dynamic profiles. In that case, there is
+            // no need to release audio patch here but just return NO_ERROR.
+            return NO_ERROR;
+        }
     } else {
         sp<HwAudioOutputDescriptor> hwOutputDesc = sourceDesc->hwOutput().promote();
         if (hwOutputDesc != 0) {
@@ -4413,7 +4781,9 @@
             ALOGW("%s source has neither SW nor HW output", __FUNCTION__);
         }
     }
-    return releaseAudioPatchInternal(sourceDesc->getPatchHandle());
+    status_t status = releaseAudioPatchInternal(sourceDesc->getPatchHandle());
+    sourceDesc->disconnect();
+    return status;
 }
 
 sp<SourceClientDescriptor> AudioPolicyManager::getSourceForAttributesOnOutput(
@@ -4441,37 +4811,15 @@
 }
 
 static status_t deserializeAudioPolicyXmlConfig(AudioPolicyConfig &config) {
-    char audioPolicyXmlConfigFile[AUDIO_POLICY_XML_CONFIG_FILE_PATH_MAX_LENGTH];
-    std::vector<const char*> fileNames;
-    status_t ret;
-
-    if (property_get_bool("ro.bluetooth.a2dp_offload.supported", false)) {
-        if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.disabled", false) &&
-            property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
-            // Both BluetoothAudio@2.0 and BluetoothA2dp@1.0 (Offlaod) are disabled, and uses
-            // the legacy hardware module for A2DP and hearing aid.
-            fileNames.push_back(AUDIO_POLICY_BLUETOOTH_LEGACY_HAL_XML_CONFIG_FILE_NAME);
-        } else if (property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
-            // A2DP offload supported but disabled: try to use special XML file
-            fileNames.push_back(AUDIO_POLICY_A2DP_OFFLOAD_DISABLED_XML_CONFIG_FILE_NAME);
+    if (std::string audioPolicyXmlConfigFile = audio_get_audio_policy_config_file();
+            !audioPolicyXmlConfigFile.empty()) {
+        status_t ret = deserializeAudioPolicyFile(audioPolicyXmlConfigFile.c_str(), &config);
+        if (ret == NO_ERROR) {
+            config.setSource(audioPolicyXmlConfigFile);
         }
-    } else if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.disabled", false)) {
-        fileNames.push_back(AUDIO_POLICY_BLUETOOTH_LEGACY_HAL_XML_CONFIG_FILE_NAME);
+        return ret;
     }
-    fileNames.push_back(AUDIO_POLICY_XML_CONFIG_FILE_NAME);
-
-    for (const char* fileName : fileNames) {
-        for (const auto& path : audio_get_configuration_paths()) {
-            snprintf(audioPolicyXmlConfigFile, sizeof(audioPolicyXmlConfigFile),
-                     "%s/%s", path.c_str(), fileName);
-            ret = deserializeAudioPolicyFile(audioPolicyXmlConfigFile, &config);
-            if (ret == NO_ERROR) {
-                config.setSource(audioPolicyXmlConfigFile);
-                return ret;
-            }
-        }
-    }
-    return ret;
+    return BAD_VALUE;
 }
 
 AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterface,
@@ -4526,6 +4874,9 @@
         return status;
     }
 
+    mCommunnicationStrategy = mEngine->getProductStrategyForAttributes(
+        mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL));
+
     // after parsing the config, mOutputDevicesAll and mInputDevicesAll contain all known devices;
     // open all output streams needed to access attached devices
     onNewAudioModulesAvailableInt(nullptr /*newDevices*/);
@@ -4547,10 +4898,7 @@
         }
     }
 
-    if (mPrimaryOutput == 0) {
-        ALOGE("Failed to open primary output");
-        status = NO_INIT;
-    }
+    ALOGW_IF(mPrimaryOutput == nullptr, "The policy configuration does not declare a primary output");
 
     // Silence ALOGV statements
     property_set("log.tag." LOG_TAG, "D");
@@ -4660,7 +5008,7 @@
                     setEngineDeviceConnectionState(device, AUDIO_POLICY_DEVICE_STATE_AVAILABLE);
                 }
             }
-            if (mPrimaryOutput == 0 &&
+            if (mPrimaryOutput == nullptr &&
                     outProfile->getFlags() & AUDIO_OUTPUT_FLAG_PRIMARY) {
                 mPrimaryOutput = outputDesc;
             }
@@ -4692,7 +5040,8 @@
             const DeviceVector &supportedDevices = inProfile->getSupportedDevices();
             DeviceVector availProfileDevices = supportedDevices.filter(mInputDevicesAll);
             if (availProfileDevices.isEmpty()) {
-                ALOGE("%s: Input device list is empty!", __FUNCTION__);
+                ALOGV("%s: Input device list is empty! for profile %s",
+                    __func__, inProfile->getTagName().c_str());
                 continue;
             }
             sp<AudioInputDescriptor> inputDesc =
@@ -4737,6 +5086,10 @@
 
 void AudioPolicyManager::removeOutput(audio_io_handle_t output)
 {
+    if (mPrimaryOutput != 0 && mPrimaryOutput == mOutputs.valueFor(output)) {
+        ALOGV("%s: removing primary output", __func__);
+        mPrimaryOutput = nullptr;
+    }
     mOutputs.removeItem(output);
     selectOutputForMusicEffects();
 }
@@ -4762,7 +5115,15 @@
     }
 
     if (state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE) {
-        // first list already open outputs that can be routed to this device
+        // first call getAudioPort to get the supported attributes from the HAL
+        struct audio_port_v7 port = {};
+        device->toAudioPort(&port);
+        status_t status = mpClientInterface->getAudioPort(&port);
+        if (status == NO_ERROR) {
+            device->importAudioPort(port);
+        }
+
+        // then list already open outputs that can be routed to this device
         for (size_t i = 0; i < mOutputs.size(); i++) {
             desc = mOutputs.valueAt(i);
             if (!desc->isDuplicated() && desc->supportsDevice(device)
@@ -4822,83 +5183,8 @@
 
             ALOGV("opening output for device %08x with params %s profile %p name %s",
                   deviceType, address.string(), profile.get(), profile->getName().c_str());
-            desc = new SwAudioOutputDescriptor(profile, mpClientInterface);
-            audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
-            status_t status = desc->open(nullptr, DeviceVector(device),
-                                         AUDIO_STREAM_DEFAULT, AUDIO_OUTPUT_FLAG_NONE, &output);
-
-            if (status == NO_ERROR) {
-                // Here is where the out_set_parameters() for card & device gets called
-                if (!address.isEmpty()) {
-                    char *param = audio_device_address_to_parameter(deviceType, address);
-                    mpClientInterface->setParameters(output, String8(param));
-                    free(param);
-                }
-                updateAudioProfiles(device, output, profile->getAudioProfiles());
-                if (!profile->hasValidAudioProfile()) {
-                    ALOGW("checkOutputsForDevice() missing param");
-                    desc->close();
-                    output = AUDIO_IO_HANDLE_NONE;
-                } else if (profile->hasDynamicAudioProfile()) {
-                    desc->close();
-                    output = AUDIO_IO_HANDLE_NONE;
-                    audio_config_t config = AUDIO_CONFIG_INITIALIZER;
-                    profile->pickAudioProfile(
-                            config.sample_rate, config.channel_mask, config.format);
-                    config.offload_info.sample_rate = config.sample_rate;
-                    config.offload_info.channel_mask = config.channel_mask;
-                    config.offload_info.format = config.format;
-
-                    status_t status = desc->open(&config, DeviceVector(device),
-                                                 AUDIO_STREAM_DEFAULT,
-                                                 AUDIO_OUTPUT_FLAG_NONE, &output);
-                    if (status != NO_ERROR) {
-                        output = AUDIO_IO_HANDLE_NONE;
-                    }
-                }
-
-                if (output != AUDIO_IO_HANDLE_NONE) {
-                    addOutput(output, desc);
-                    if (audio_is_remote_submix_device(deviceType) && address != "0") {
-                        sp<AudioPolicyMix> policyMix;
-                        if (mPolicyMixes.getAudioPolicyMix(deviceType, address, policyMix)
-                                == NO_ERROR) {
-                            policyMix->setOutput(desc);
-                            desc->mPolicyMix = policyMix;
-                        } else {
-                            ALOGW("checkOutputsForDevice() cannot find policy for address %s",
-                                  address.string());
-                        }
-
-                    } else if (((desc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) == 0) &&
-                                    hasPrimaryOutput()) {
-                        // no duplicated output for direct outputs and
-                        // outputs used by dynamic policy mixes
-                        audio_io_handle_t duplicatedOutput = AUDIO_IO_HANDLE_NONE;
-
-                        //TODO: configure audio effect output stage here
-
-                        // open a duplicating output thread for the new output and the primary output
-                        sp<SwAudioOutputDescriptor> dupOutputDesc =
-                                new SwAudioOutputDescriptor(NULL, mpClientInterface);
-                        status_t status = dupOutputDesc->openDuplicating(mPrimaryOutput, desc,
-                                                                         &duplicatedOutput);
-                        if (status == NO_ERROR) {
-                            // add duplicated output descriptor
-                            addOutput(duplicatedOutput, dupOutputDesc);
-                        } else {
-                            ALOGW("checkOutputsForDevice() could not open dup output for %d and %d",
-                                    mPrimaryOutput->mIoHandle, output);
-                            desc->close();
-                            removeOutput(output);
-                            nextAudioPortGeneration();
-                            output = AUDIO_IO_HANDLE_NONE;
-                        }
-                    }
-                }
-            } else {
-                output = AUDIO_IO_HANDLE_NONE;
-            }
+            desc = openOutputWithProfileAndDevice(profile, DeviceVector(device));
+            audio_io_handle_t output = desc == nullptr ? AUDIO_IO_HANDLE_NONE : desc->mIoHandle;
             if (output == AUDIO_IO_HANDLE_NONE) {
                 ALOGW("checkOutputsForDevice() could not open output for device %x", deviceType);
                 profiles.removeAt(profile_index);
@@ -4907,6 +5193,8 @@
                 outputs.add(output);
                 // Load digital format info only for digital devices
                 if (audio_device_is_digital(deviceType)) {
+                    // TODO: when getAudioPort is ready, it may not be needed to import the audio
+                    // port but just pick audio profile
                     device->importAudioPortAndPickAudioProfile(profile);
                 }
 
@@ -4931,7 +5219,7 @@
             if (!desc->isDuplicated()) {
                 // exact match on device
                 if (device_distinguishes_on_address(deviceType) && desc->supportsDevice(device)
-                        && desc->devicesSupportEncodedFormats({deviceType})) {
+                        && desc->containsSingleDeviceSupportingEncodedFormats(device)) {
                     outputs.add(mOutputs.keyAt(i));
                 } else if (!mAvailableOutputDevices.containsAtLeastOne(desc->supportedDevices())) {
                     ALOGV("checkOutputsForDevice(): disconnecting adding output %d",
@@ -4944,11 +5232,30 @@
         for (const auto& hwModule : mHwModules) {
             for (size_t j = 0; j < hwModule->getOutputProfiles().size(); j++) {
                 sp<IOProfile> profile = hwModule->getOutputProfiles()[j];
-                if (profile->supportsDevice(device)) {
-                    ALOGV("checkOutputsForDevice(): "
-                            "clearing direct output profile %zu on module %s",
-                            j, hwModule->getName());
-                    profile->clearAudioProfiles();
+                if (!profile->supportsDevice(device)) {
+                    continue;
+                }
+                ALOGV("checkOutputsForDevice(): "
+                        "clearing direct output profile %zu on module %s",
+                        j, hwModule->getName());
+                profile->clearAudioProfiles();
+                if (!profile->hasDynamicAudioProfile()) {
+                    continue;
+                }
+                // When a device is disconnected, if there is an IOProfile that contains dynamic
+                // profiles and supports the disconnected device, call getAudioPort to repopulate
+                // the capabilities of the devices that is supported by the IOProfile.
+                for (const auto& supportedDevice : profile->getSupportedDevices()) {
+                    if (supportedDevice == device ||
+                            !mAvailableOutputDevices.contains(supportedDevice)) {
+                        continue;
+                    }
+                    struct audio_port_v7 port;
+                    supportedDevice->toAudioPort(&port);
+                    status_t status = mpClientInterface->getAudioPort(&port);
+                    if (status == NO_ERROR) {
+                        supportedDevice->importAudioPort(port);
+                    }
                 }
             }
         }
@@ -5150,8 +5457,13 @@
             }
         }
         if (!directOutputOpen) {
-            ALOGV("no direct outputs open, reset MSD patch");
-            setMsdPatch();
+            ALOGV("no direct outputs open, reset MSD patches");
+            // TODO: The MSD patches to be established here may differ to current MSD patches due to
+            // how output devices for patching are resolved. Avoid by caching and reusing the
+            // arguments to mEngine->getOutputDevicesForAttributes() when resolving which output
+            // devices to patch to. This may be complicated by the fact that devices may become
+            // unavailable.
+            setMsdOutputPatches();
         }
     }
 }
@@ -5218,8 +5530,16 @@
     if (onOutputsChecked != nullptr && onOutputsChecked()) checkA2dpSuspend();
     updateDevicesAndOutputs();
     if (mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD) != 0) {
-        setMsdPatch();
+        // TODO: The MSD patches to be established here may differ to current MSD patches due to how
+        // output devices for patching are resolved. Nevertheless, AudioTracks affected by device
+        // configuration changes will ultimately be rerouted correctly. We can still avoid
+        // unnecessary rerouting by caching and reusing the arguments to
+        // mEngine->getOutputDevicesForAttributes() when resolving which output devices to patch to.
+        // This may be complicated by the fact that devices may become unavailable.
+        setMsdOutputPatches();
     }
+    // an event that changed routing likely occurred, inform upper layers
+    mpClientInterface->onRoutingUpdated();
 }
 
 bool AudioPolicyManager::followsSameRouting(const audio_attributes_t &lAttr,
@@ -5229,6 +5549,29 @@
             mEngine->getProductStrategyForAttributes(rAttr);
 }
 
+void AudioPolicyManager::checkAudioSourceForAttributes(const audio_attributes_t &attr)
+{
+    for (size_t i = 0; i < mAudioSources.size(); i++)  {
+        sp<SourceClientDescriptor> sourceDesc = mAudioSources.valueAt(i);
+        if (sourceDesc != nullptr && followsSameRouting(attr, sourceDesc->attributes())
+                && sourceDesc->getPatchHandle() == AUDIO_PATCH_HANDLE_NONE
+                && !isCallRxAudioSource(sourceDesc)) {
+            connectAudioSource(sourceDesc);
+        }
+    }
+}
+
+void AudioPolicyManager::clearAudioSourcesForOutput(audio_io_handle_t output)
+{
+    for (size_t i = 0; i < mAudioSources.size(); i++)  {
+        sp<SourceClientDescriptor> sourceDesc = mAudioSources.valueAt(i);
+        if (sourceDesc != nullptr && sourceDesc->swOutput().promote() != nullptr
+                && sourceDesc->swOutput().promote()->mIoHandle == output) {
+            disconnectAudioSource(sourceDesc);
+        }
+    }
+}
+
 void AudioPolicyManager::checkOutputForAttributes(const audio_attributes_t &attr)
 {
     auto psId = mEngine->getProductStrategyForAttributes(attr);
@@ -5258,7 +5601,7 @@
             if (status != OK) {
                 continue;
             }
-            if (client->getPrimaryMix() != primaryMix) {
+            if (client->getPrimaryMix() != primaryMix || client->hasLostPrimaryMix()) {
                 invalidate = true;
                 if (desc->isStrategyActive(psId)) {
                     maxLatency = desc->latency();
@@ -5289,7 +5632,8 @@
                                                                      client->flags(),
                                                                      client->config().format,
                                                                      client->config().channel_mask,
-                                                                     client->config().sample_rate);
+                                                                     client->config().sample_rate,
+                                                                     client->session());
                     if (newOutput != srcOut) {
                         invalidate = true;
                         break;
@@ -5324,7 +5668,7 @@
                                 newDevices.types());
             }
             sp<SourceClientDescriptor> source = getSourceForAttributesOnOutput(srcOut, attr);
-            if (source != 0){
+            if (source != nullptr && !isCallRxAudioSource(source)) {
                 connectAudioSource(source);
             }
         }
@@ -5347,11 +5691,13 @@
     for (const auto &strategy : mEngine->getOrderedProductStrategies()) {
         auto attributes = mEngine->getAllAttributesForProductStrategy(strategy).front();
         checkOutputForAttributes(attributes);
+        checkAudioSourceForAttributes(attributes);
     }
 }
 
 void AudioPolicyManager::checkSecondaryOutputs() {
     std::set<audio_stream_type_t> streamsToInvalidate;
+    TrackSecondaryOutputsMap trackSecondaryOutputs;
     for (size_t i = 0; i < mOutputs.size(); i++) {
         const sp<SwAudioOutputDescriptor>& outputDescriptor = mOutputs[i];
         for (const sp<TrackClientDescriptor>& client : outputDescriptor->getClientIterable()) {
@@ -5368,20 +5714,43 @@
                 }
             }
 
-            if (status != OK ||
-                !std::equal(client->getSecondaryOutputs().begin(),
-                            client->getSecondaryOutputs().end(),
-                            secondaryDescs.begin(), secondaryDescs.end())) {
+            if (status != OK) {
                 streamsToInvalidate.insert(client->stream());
+            } else if (!std::equal(
+                    client->getSecondaryOutputs().begin(),
+                    client->getSecondaryOutputs().end(),
+                    secondaryDescs.begin(), secondaryDescs.end())) {
+                std::vector<wp<SwAudioOutputDescriptor>> weakSecondaryDescs;
+                std::vector<audio_io_handle_t> secondaryOutputIds;
+                for (const auto& secondaryDesc : secondaryDescs) {
+                    secondaryOutputIds.push_back(secondaryDesc->mIoHandle);
+                    weakSecondaryDescs.push_back(secondaryDesc);
+                }
+                trackSecondaryOutputs.emplace(client->portId(), secondaryOutputIds);
+                client->setSecondaryOutputs(std::move(weakSecondaryDescs));
             }
         }
     }
+    if (!trackSecondaryOutputs.empty()) {
+        mpClientInterface->updateSecondaryOutputs(trackSecondaryOutputs);
+    }
     for (audio_stream_type_t stream : streamsToInvalidate) {
-        ALOGD("%s Invalidate stream %d due to secondary output change", __func__, stream);
+        ALOGD("%s Invalidate stream %d due to fail getting output for attr", __func__, stream);
         mpClientInterface->invalidateStream(stream);
     }
 }
 
+bool AudioPolicyManager::isScoRequestedForComm() const {
+    AudioDeviceTypeAddrVector devices;
+    mEngine->getDevicesForRoleAndStrategy(mCommunnicationStrategy, DEVICE_ROLE_PREFERRED, devices);
+    for (const auto &device : devices) {
+        if (audio_is_bluetooth_out_sco_device(device.mType)) {
+            return true;
+        }
+    }
+    return false;
+}
+
 void AudioPolicyManager::checkA2dpSuspend()
 {
     audio_io_handle_t a2dpOutput = mOutputs.getA2dpOutput();
@@ -5393,23 +5762,21 @@
     bool isScoConnected =
             (mAvailableInputDevices.types().count(AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET) != 0 ||
              !Intersection(mAvailableOutputDevices.types(), getAudioDeviceOutAllScoSet()).empty());
+    bool isScoRequested = isScoRequestedForComm();
 
     // if suspended, restore A2DP output if:
     //      ((SCO device is NOT connected) ||
-    //       ((forced usage communication is NOT SCO) && (forced usage for record is NOT SCO) &&
+    //       ((SCO is not requested) &&
     //        (phone state is NOT in call) && (phone state is NOT ringing)))
     //
     // if not suspended, suspend A2DP output if:
     //      (SCO device is connected) &&
-    //       ((forced usage for communication is SCO) || (forced usage for record is SCO) ||
+    //       ((SCO is requested) ||
     //       ((phone state is in call) || (phone state is ringing)))
     //
     if (mA2dpSuspended) {
         if (!isScoConnected ||
-             ((mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION) !=
-                     AUDIO_POLICY_FORCE_BT_SCO) &&
-              (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_RECORD) !=
-                      AUDIO_POLICY_FORCE_BT_SCO) &&
+             (!isScoRequested &&
               (mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) &&
               (mEngine->getPhoneState() != AUDIO_MODE_RINGTONE))) {
 
@@ -5418,10 +5785,7 @@
         }
     } else {
         if (isScoConnected &&
-             ((mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION) ==
-                     AUDIO_POLICY_FORCE_BT_SCO) ||
-              (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_RECORD) ==
-                      AUDIO_POLICY_FORCE_BT_SCO) ||
+             (isScoRequested ||
               (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL) ||
               (mEngine->getPhoneState() == AUDIO_MODE_RINGTONE))) {
 
@@ -5446,6 +5810,12 @@
         }
     }
 
+    // Do not retrieve engine device for outputs through MSD
+    // TODO: support explicit routing requests by resetting MSD patch to engine device.
+    if (outputDesc->devices() == getMsdAudioOutDevices()) {
+        return outputDesc->devices();
+    }
+
     // Honor explicit routing requests only if no client using default routing is active on this
     // input: a specific app can not force routing for other apps by setting a preferred device.
     bool active; // unused
@@ -5465,13 +5835,28 @@
     for (const auto &productStrategy : mEngine->getOrderedProductStrategies()) {
         StreamTypeVector streams = mEngine->getStreamTypesForProductStrategy(productStrategy);
         auto attr = mEngine->getAllAttributesForProductStrategy(productStrategy).front();
+        auto hasStreamActive = [&](auto stream) {
+            return hasStream(streams, stream) && isStreamActive(stream, 0);
+        };
 
-        if ((hasVoiceStream(streams) &&
-             (isInCall() || mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc)) &&
-             !isStreamActive(AUDIO_STREAM_ENFORCED_AUDIBLE, 0)) ||
-             ((hasStream(streams, AUDIO_STREAM_ALARM) || hasStream(streams, AUDIO_STREAM_ENFORCED_AUDIBLE)) &&
-                mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc)) ||
-                outputDesc->isStrategyActive(productStrategy)) {
+        auto doGetOutputDevicesForVoice = [&]() {
+            return hasVoiceStream(streams) && (outputDesc == mPrimaryOutput ||
+                outputDesc->isActive(toVolumeSource(AUDIO_STREAM_VOICE_CALL))) &&
+                (isInCall() ||
+                 mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc));
+        };
+
+        // With low-latency playing on speaker, music on WFD, when the first low-latency
+        // output is stopped, getNewOutputDevices checks for a product strategy
+        // from the list, as STRATEGY_SONIFICATION comes prior to STRATEGY_MEDIA.
+        // If an ALARM, RING or ENFORCED_AUDIBLE stream is supported by the product strategy,
+        // devices are returned for STRATEGY_SONIFICATION without checking whether the
+        // stream is associated to the output descriptor.
+        if (doGetOutputDevicesForVoice() || outputDesc->isStrategyActive(productStrategy) ||
+               ((hasStreamActive(AUDIO_STREAM_ALARM) ||
+                hasStreamActive(AUDIO_STREAM_RING) ||
+                hasStreamActive(AUDIO_STREAM_ENFORCED_AUDIBLE)) &&
+                mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc))) {
             // Retrieval of devices for voice DL is done on primary output profile, cannot
             // check the route (would force modifying configuration file for this profile)
             devices = mEngine->getOutputDevicesForAttributes(attr, nullptr, fromCache);
@@ -5507,12 +5892,22 @@
 
     // If we are not in call and no client is active on this input, this methods returns
     // a null sp<>, causing the patch on the input stream to be released.
-    audio_attributes_t attributes = inputDesc->getHighestPriorityAttributes();
+    audio_attributes_t attributes;
+    uid_t uid;
+    sp<RecordClientDescriptor> topClient = inputDesc->getHighestPriorityClient();
+    if (topClient != nullptr) {
+      attributes = topClient->attributes();
+      uid = topClient->uid();
+    } else {
+      attributes = { .source = AUDIO_SOURCE_DEFAULT };
+      uid = 0;
+    }
+
     if (attributes.source == AUDIO_SOURCE_DEFAULT && isInCall()) {
         attributes.source = AUDIO_SOURCE_VOICE_COMMUNICATION;
     }
     if (attributes.source != AUDIO_SOURCE_DEFAULT) {
-        device = mEngine->getInputDeviceForAttributes(attributes);
+        device = mEngine->getInputDeviceForAttributes(attributes, uid);
     }
 
     return device;
@@ -5533,8 +5928,8 @@
     }
     DeviceVector activeDevices;
     DeviceVector devices;
-    for (audio_stream_type_t curStream = AUDIO_STREAM_MIN; curStream < AUDIO_STREAM_PUBLIC_CNT;
-         curStream = (audio_stream_type_t) (curStream + 1)) {
+    for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_PUBLIC_CNT; ++i) {
+        const audio_stream_type_t curStream{static_cast<audio_stream_type_t>(i)};
         if (!streamsMatchForvolume(stream, curStream)) {
             continue;
         }
@@ -5648,7 +6043,7 @@
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
             setVolumeSourceMute(ttsVolumeSource, mute/*on*/, desc, 0 /*delay*/, DeviceTypeSet());
             const uint32_t latency = desc->latency() * 2;
-            if (latency > maxLatency) {
+            if (desc->isActive(latency * 2) && latency > maxLatency) {
                 maxLatency = latency;
             }
         }
@@ -6031,7 +6426,8 @@
     if (!Intersection(deviceTypes,
             {AUDIO_DEVICE_OUT_BLUETOOTH_A2DP, AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES,
              AUDIO_DEVICE_OUT_WIRED_HEADSET, AUDIO_DEVICE_OUT_WIRED_HEADPHONE,
-             AUDIO_DEVICE_OUT_USB_HEADSET, AUDIO_DEVICE_OUT_HEARING_AID}).empty() &&
+             AUDIO_DEVICE_OUT_USB_HEADSET, AUDIO_DEVICE_OUT_HEARING_AID,
+             AUDIO_DEVICE_OUT_BLE_HEADSET}).empty() &&
             ((volumeSource == alarmVolumeSrc ||
               volumeSource == ringVolumeSrc) ||
              (volumeSource == toVolumeSource(AUDIO_STREAM_NOTIFICATION)) ||
@@ -6060,8 +6456,9 @@
                 volumeDb = minVolDb;
                 ALOGV("computeVolume limiting volume to %f musicVol %f", minVolDb, musicVolDb);
             }
-            if (!Intersection(deviceTypes, {AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
-                    AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES}).empty()) {
+            if (Volume::getDeviceForVolume(deviceTypes) != AUDIO_DEVICE_OUT_SPEAKER
+                    &&  !Intersection(deviceTypes, {AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+                        AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES}).empty()) {
                 // on A2DP, also ensure notification volume is not too low compared to media when
                 // intended to be played
                 if ((volumeDb > -96.0f) &&
@@ -6126,16 +6523,17 @@
     bool isVoiceVolSrc = callVolSrc == volumeSource;
     bool isBtScoVolSrc = btScoVolSrc == volumeSource;
 
-    audio_policy_forced_cfg_t forceUseForComm =
-            mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION);
+    bool isScoRequested = isScoRequestedForComm();
     // do not change in call volume if bluetooth is connected and vice versa
     // if sco and call follow same curves, bypass forceUseForComm
     if ((callVolSrc != btScoVolSrc) &&
-            ((isVoiceVolSrc && forceUseForComm == AUDIO_POLICY_FORCE_BT_SCO) ||
-             (isBtScoVolSrc && forceUseForComm != AUDIO_POLICY_FORCE_BT_SCO))) {
-        ALOGV("%s cannot set volume group %d volume with force use = %d for comm", __func__,
-             volumeSource, forceUseForComm);
-        return INVALID_OPERATION;
+            ((isVoiceVolSrc && isScoRequested) ||
+             (isBtScoVolSrc && !isScoRequested))) {
+        ALOGV("%s cannot set volume group %d volume when is%srequested for comm", __func__,
+             volumeSource, isScoRequested ? " " : "n ot ");
+        // Do not return an error here as AudioService will always set both voice call
+        // and bluetooth SCO volumes due to stream aliasing.
+        return NO_ERROR;
     }
     if (deviceTypes.empty()) {
         deviceTypes = outputDesc->devices().types();
@@ -6143,9 +6541,8 @@
 
     float volumeDb = computeVolume(curves, volumeSource, index, deviceTypes);
     if (outputDesc->isFixedVolume(deviceTypes) ||
-            // Force VoIP volume to max for bluetooth SCO
-
-            ((isVoiceVolSrc || isBtScoVolSrc) &&
+            // Force VoIP volume to max for bluetooth SCO device except if muted
+            (index != 0 && (isVoiceVolSrc || isBtScoVolSrc) &&
                     isSingleDeviceType(deviceTypes, audio_is_bluetooth_out_sco_device))) {
         volumeDb = 0.0f;
     }
@@ -6305,9 +6702,10 @@
 {
     for (ssize_t i = (ssize_t)mAudioSources.size() - 1; i >= 0; i--)  {
         sp<SourceClientDescriptor> sourceDesc = mAudioSources.valueAt(i);
-        if (sourceDesc->srcDevice()->equals(deviceDesc)) {
-            ALOGV("%s releasing audio source %d", __FUNCTION__, sourceDesc->portId());
-            stopAudioSource(sourceDesc->portId());
+        if (sourceDesc->isConnected() && (sourceDesc->srcDevice()->equals(deviceDesc) ||
+                                          sourceDesc->sinkDevice()->equals(deviceDesc))
+                && !isCallRxAudioSource(sourceDesc)) {
+            disconnectAudioSource(sourceDesc);
         }
     }
 
@@ -6321,10 +6719,14 @@
                 release = true;
             }
         }
+        const char *address = deviceDesc->address().c_str();
         for (size_t j = 0; j < patchDesc->mPatch.num_sinks && !release; j++)  {
             const struct audio_port_config *sink = &patchDesc->mPatch.sinks[j];
             if (sink->type == AUDIO_PORT_TYPE_DEVICE &&
-                    sink->ext.device.type == deviceDesc->type()) {
+                    sink->ext.device.type == deviceDesc->type() &&
+                    (strnlen(address, AUDIO_DEVICE_MAX_ADDRESS_LEN) == 0
+                     || strncmp(sink->ext.device.address, address,
+                                 AUDIO_DEVICE_MAX_ADDRESS_LEN) == 0)) {
                 release = true;
             }
         }
@@ -6403,7 +6805,7 @@
         for (auto it = channelMasks.begin(); it != channelMasks.end();) {
             audio_channel_mask_t channelMask = *it;
             if (channelMask & ~AUDIO_CHANNEL_OUT_STEREO) {
-                ALOGI("%s: force NEVER, so remove channelMask 0x%08x", __FUNCTION__, channelMask);
+                ALOGV("%s: force NEVER, so remove channelMask 0x%08x", __FUNCTION__, channelMask);
                 it = channelMasks.erase(it);
             } else {
                 ++it;
@@ -6423,7 +6825,7 @@
         // If not then add 5.1 support.
         if (!supports5dot1) {
             channelMasks.insert(AUDIO_CHANNEL_OUT_5POINT1);
-            ALOGI("%s: force MANUAL or ALWAYS, so adding channelMask for 5.1 surround", __func__);
+            ALOGV("%s: force MANUAL or ALWAYS, so adding channelMask for 5.1 surround", __func__);
         }
     }
 }
@@ -6447,6 +6849,7 @@
             return;
         }
         FormatVector formats = formatsFromString(reply.string());
+        mReportedFormatsMap[devDesc] = formats;
         if (device == AUDIO_DEVICE_OUT_HDMI
                 || isDeviceOfModule(devDesc, AUDIO_HARDWARE_MODULE_ID_MSD)) {
             modifySurroundFormats(devDesc, &formats);
@@ -6546,4 +6949,129 @@
     return status;
 }
 
+bool AudioPolicyManager::areAllActiveTracksRerouted(const sp<SwAudioOutputDescriptor>& output)
+{
+    const TrackClientVector activeClients = output->getActiveClients();
+    if (activeClients.empty()) {
+        return true;
+    }
+    ssize_t index = mAudioPatches.indexOfKey(output->getPatchHandle());
+    if (index < 0) {
+        ALOGE("%s, no audio patch found while there are active clients on output %d",
+                __func__, output->getId());
+        return false;
+    }
+    sp<AudioPatch> patchDesc = mAudioPatches.valueAt(index);
+    DeviceVector routedDevices;
+    for (int i = 0; i < patchDesc->mPatch.num_sinks; ++i) {
+        sp<DeviceDescriptor> device = mAvailableOutputDevices.getDeviceFromId(
+                patchDesc->mPatch.sinks[i].id);
+        if (device == nullptr) {
+            ALOGE("%s, no audio device found with id(%d)",
+                    __func__, patchDesc->mPatch.sinks[i].id);
+            return false;
+        }
+        routedDevices.add(device);
+    }
+    for (const auto& client : activeClients) {
+        // TODO: b/175343099 only travel the valid client
+        sp<DeviceDescriptor> preferredDevice =
+                mAvailableOutputDevices.getDeviceFromId(client->preferredDeviceId());
+        if (mEngine->getOutputDevicesForAttributes(
+                client->attributes(), preferredDevice, false) == routedDevices) {
+            return false;
+        }
+    }
+    return true;
+}
+
+sp<SwAudioOutputDescriptor> AudioPolicyManager::openOutputWithProfileAndDevice(
+        const sp<IOProfile>& profile, const DeviceVector& devices)
+{
+    for (const auto& device : devices) {
+        // TODO: This should be checking if the profile supports the device combo.
+        if (!profile->supportsDevice(device)) {
+            return nullptr;
+        }
+    }
+    sp<SwAudioOutputDescriptor> desc = new SwAudioOutputDescriptor(profile, mpClientInterface);
+    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+    status_t status = desc->open(nullptr, devices,
+            AUDIO_STREAM_DEFAULT, AUDIO_OUTPUT_FLAG_NONE, &output);
+    if (status != NO_ERROR) {
+        return nullptr;
+    }
+
+    // Here is where the out_set_parameters() for card & device gets called
+    sp<DeviceDescriptor> device = devices.getDeviceForOpening();
+    const audio_devices_t deviceType = device->type();
+    const String8 &address = String8(device->address().c_str());
+    if (!address.isEmpty()) {
+        char *param = audio_device_address_to_parameter(deviceType, address.c_str());
+        mpClientInterface->setParameters(output, String8(param));
+        free(param);
+    }
+    updateAudioProfiles(device, output, profile->getAudioProfiles());
+    if (!profile->hasValidAudioProfile()) {
+        ALOGW("%s() missing param", __func__);
+        desc->close();
+        return nullptr;
+    } else if (profile->hasDynamicAudioProfile()) {
+        desc->close();
+        output = AUDIO_IO_HANDLE_NONE;
+        audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+        profile->pickAudioProfile(
+                config.sample_rate, config.channel_mask, config.format);
+        config.offload_info.sample_rate = config.sample_rate;
+        config.offload_info.channel_mask = config.channel_mask;
+        config.offload_info.format = config.format;
+
+        status = desc->open(&config, devices,
+                            AUDIO_STREAM_DEFAULT, AUDIO_OUTPUT_FLAG_NONE, &output);
+        if (status != NO_ERROR) {
+            return nullptr;
+        }
+    }
+
+    addOutput(output, desc);
+    if (audio_is_remote_submix_device(deviceType) && address != "0") {
+        sp<AudioPolicyMix> policyMix;
+        if (mPolicyMixes.getAudioPolicyMix(deviceType, address, policyMix) == NO_ERROR) {
+            policyMix->setOutput(desc);
+            desc->mPolicyMix = policyMix;
+        } else {
+            ALOGW("checkOutputsForDevice() cannot find policy for address %s",
+                    address.string());
+        }
+
+    } else if (((desc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) == 0) && hasPrimaryOutput()) {
+        // no duplicated output for direct outputs and
+        // outputs used by dynamic policy mixes
+        audio_io_handle_t duplicatedOutput = AUDIO_IO_HANDLE_NONE;
+
+        //TODO: configure audio effect output stage here
+
+        // open a duplicating output thread for the new output and the primary output
+        sp<SwAudioOutputDescriptor> dupOutputDesc =
+                new SwAudioOutputDescriptor(nullptr, mpClientInterface);
+        status = dupOutputDesc->openDuplicating(mPrimaryOutput, desc, &duplicatedOutput);
+        if (status == NO_ERROR) {
+            // add duplicated output descriptor
+            addOutput(duplicatedOutput, dupOutputDesc);
+        } else {
+            ALOGW("checkOutputsForDevice() could not open dup output for %d and %d",
+                  mPrimaryOutput->mIoHandle, output);
+            desc->close();
+            removeOutput(output);
+            nextAudioPortGeneration();
+            return nullptr;
+        }
+    }
+    if (mPrimaryOutput == nullptr && profile->getFlags() & AUDIO_OUTPUT_FLAG_PRIMARY) {
+        ALOGV("%s(): re-assigning mPrimaryOutput", __func__);
+        mPrimaryOutput = desc;
+    }
+    return desc;
+}
+
 } // namespace android
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index b588f89..98f96d1 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -52,6 +52,8 @@
 
 namespace android {
 
+using content::AttributionSourceState;
+
 // ----------------------------------------------------------------------------
 
 // Attenuation applied to STRATEGY_SONIFICATION streams when a headset is connected: 6dB
@@ -116,7 +118,7 @@
                                   audio_io_handle_t *output,
                                   audio_session_t session,
                                   audio_stream_type_t *stream,
-                                  uid_t uid,
+                                  const AttributionSourceState& attributionSource,
                                   const audio_config_t *config,
                                   audio_output_flags_t *flags,
                                   audio_port_handle_t *selectedDeviceId,
@@ -125,12 +127,12 @@
                                   output_type_t *outputType) override;
         virtual status_t startOutput(audio_port_handle_t portId);
         virtual status_t stopOutput(audio_port_handle_t portId);
-        virtual void releaseOutput(audio_port_handle_t portId);
+        virtual bool releaseOutput(audio_port_handle_t portId);
         virtual status_t getInputForAttr(const audio_attributes_t *attr,
                                          audio_io_handle_t *input,
                                          audio_unique_id_t riid,
                                          audio_session_t session,
-                                         uid_t uid,
+                                         const AttributionSourceState& attributionSource,
                                          const audio_config_base_t *config,
                                          audio_input_flags_t flags,
                                          audio_port_handle_t *selectedDeviceId,
@@ -180,7 +182,7 @@
                                 const DeviceTypeSet& deviceTypes) const;
 
         // return the strategy corresponding to a given stream type
-        virtual uint32_t getStrategyForStream(audio_stream_type_t stream)
+        virtual product_strategy_t getStrategyForStream(audio_stream_type_t stream)
         {
             return streamToStrategy(stream);
         }
@@ -200,7 +202,7 @@
         virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc = NULL);
         virtual status_t registerEffect(const effect_descriptor_t *desc,
                                         audio_io_handle_t io,
-                                        uint32_t strategy,
+                                        product_strategy_t strategy,
                                         int session,
                                         int id);
         virtual status_t unregisterEffect(int id);
@@ -225,7 +227,7 @@
         status_t dump(int fd) override;
 
         status_t setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t capturePolicy) override;
-        virtual bool isOffloadSupported(const audio_offload_info_t& offloadInfo);
+        virtual audio_offload_mode_t getOffloadSupport(const audio_offload_info_t& offloadInfo);
 
         virtual bool isDirectOutputSupported(const audio_config_base_t& config,
                                              const audio_attributes_t& attributes);
@@ -233,9 +235,9 @@
         virtual status_t listAudioPorts(audio_port_role_t role,
                                         audio_port_type_t type,
                                         unsigned int *num_ports,
-                                        struct audio_port *ports,
+                                        struct audio_port_v7 *ports,
                                         unsigned int *generation);
-        virtual status_t getAudioPort(struct audio_port *port);
+        virtual status_t getAudioPort(struct audio_port_v7 *port);
         virtual status_t createAudioPatch(const struct audio_patch *patch,
                                            audio_patch_handle_t *handle,
                                            uid_t uid) {
@@ -263,17 +265,42 @@
         virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes);
         virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes);
         virtual status_t setUidDeviceAffinities(uid_t uid,
-                const Vector<AudioDeviceTypeAddr>& devices);
+                const AudioDeviceTypeAddrVector& devices);
         virtual status_t removeUidDeviceAffinities(uid_t uid);
         virtual status_t setUserIdDeviceAffinities(int userId,
-                const Vector<AudioDeviceTypeAddr>& devices);
+                const AudioDeviceTypeAddrVector& devices);
         virtual status_t removeUserIdDeviceAffinities(int userId);
 
-        virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   const AudioDeviceTypeAddr &device);
-        virtual status_t removePreferredDeviceForStrategy(product_strategy_t strategy);
-        virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   AudioDeviceTypeAddr &device);
+        virtual status_t setDevicesRoleForStrategy(product_strategy_t strategy,
+                                                   device_role_t role,
+                                                   const AudioDeviceTypeAddrVector &devices);
+
+        virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy,
+                                                      device_role_t role);
+
+
+        virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
+                                                      device_role_t role,
+                                                      AudioDeviceTypeAddrVector &devices);
+
+        virtual status_t setDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                        device_role_t role,
+                                                        const AudioDeviceTypeAddrVector &devices);
+
+        virtual status_t addDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                        device_role_t role,
+                                                        const AudioDeviceTypeAddrVector &devices);
+
+        virtual status_t removeDevicesRoleForCapturePreset(
+                audio_source_t audioSource, device_role_t role,
+                const AudioDeviceTypeAddrVector& devices);
+
+        virtual status_t clearDevicesRoleForCapturePreset(audio_source_t audioSource,
+                                                          device_role_t role);
+
+        virtual status_t getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
+                                                           device_role_t role,
+                                                           AudioDeviceTypeAddrVector &devices);
 
         virtual status_t startAudioSource(const struct audio_port_config *source,
                                           const audio_attributes_t *attributes,
@@ -288,8 +315,9 @@
 
         virtual status_t getSurroundFormats(unsigned int *numSurroundFormats,
                                             audio_format_t *surroundFormats,
-                                            bool *surroundFormatsEnabled,
-                                            bool reported);
+                                            bool *surroundFormatsEnabled);
+        virtual status_t getReportedSurroundFormats(unsigned int *numSurroundFormats,
+                                                    audio_format_t *surroundFormats);
         virtual status_t setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled);
 
         virtual status_t getHwOffloadEncodingFormatsSupportedForA2DP(
@@ -304,11 +332,14 @@
             return mEngine->listAudioProductStrategies(strategies);
         }
 
-        virtual status_t getProductStrategyFromAudioAttributes(const AudioAttributes &aa,
-                                                               product_strategy_t &productStrategy)
+        virtual status_t getProductStrategyFromAudioAttributes(
+                const AudioAttributes &aa, product_strategy_t &productStrategy,
+                bool fallbackOnDefault)
         {
-            productStrategy = mEngine->getProductStrategyForAttributes(aa.getAttributes());
-            return productStrategy != PRODUCT_STRATEGY_NONE ? NO_ERROR : BAD_VALUE;
+            productStrategy = mEngine->getProductStrategyForAttributes(
+                    aa.getAttributes(), fallbackOnDefault);
+            return (fallbackOnDefault && productStrategy == PRODUCT_STRATEGY_NONE) ?
+                    BAD_VALUE : NO_ERROR;
         }
 
         virtual status_t listAudioVolumeGroups(AudioVolumeGroupVector &groups)
@@ -316,11 +347,13 @@
             return mEngine->listAudioVolumeGroups(groups);
         }
 
-        virtual status_t getVolumeGroupFromAudioAttributes(const AudioAttributes &aa,
-                                                           volume_group_t &volumeGroup)
+        virtual status_t getVolumeGroupFromAudioAttributes(
+                const AudioAttributes &aa, volume_group_t &volumeGroup, bool fallbackOnDefault)
         {
-            volumeGroup = mEngine->getVolumeGroupForAttributes(aa.getAttributes());
-            return volumeGroup != VOLUME_GROUP_NONE ? NO_ERROR : BAD_VALUE;
+            volumeGroup = mEngine->getVolumeGroupForAttributes(
+                        aa.getAttributes(), fallbackOnDefault);
+            return (fallbackOnDefault && volumeGroup == VOLUME_GROUP_NONE) ?
+                    BAD_VALUE : NO_ERROR;
         }
 
         bool isCallScreenModeSupported() override;
@@ -531,6 +564,20 @@
          */
         void updateCallAndOutputRouting(bool forceVolumeReeval = true, uint32_t delayMs = 0);
 
+        bool isCallRxAudioSource(const sp<SourceClientDescriptor> &source) {
+            return mCallRxSourceClientPort != AUDIO_PORT_HANDLE_NONE
+                && source == mAudioSources.valueFor(mCallRxSourceClientPort);
+        }
+
+        void connectTelephonyRxAudioSource();
+
+        void disconnectTelephonyRxAudioSource();
+
+        /**
+         * @brief updates routing for all inputs.
+         */
+        void updateInputRouting();
+
         /**
          * @brief checkOutputForAttributes checks and if necessary changes outputs used for the
          * given audio attributes.
@@ -541,6 +588,16 @@
          */
         void checkOutputForAttributes(const audio_attributes_t &attr);
 
+        /**
+         * @brief checkAudioSourceForAttributes checks if any AudioSource following the same routing
+         * as the given audio attributes is not routed and try to connect it.
+         * It must be called once checkOutputForAttributes has been called for orphans AudioSource,
+         * aka AudioSource not attached to any Audio Output (e.g. AudioSource connected to direct
+         * Output which has been disconnected (and output closed) due to sink device unavailable).
+         * @param attr to be considered
+         */
+        void checkAudioSourceForAttributes(const audio_attributes_t &attr);
+
         bool followsSameRouting(const audio_attributes_t &lAttr,
                                 const audio_attributes_t &rAttr) const;
 
@@ -608,7 +665,8 @@
                                        audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
                                        audio_format_t format = AUDIO_FORMAT_INVALID,
                                        audio_channel_mask_t channelMask = AUDIO_CHANNEL_NONE,
-                                       uint32_t samplingRate = 0);
+                                       uint32_t samplingRate = 0,
+                                       audio_session_t sessionId = AUDIO_SESSION_NONE);
         // samplingRate, format, channelMask are in/out and so may be modified
         sp<IOProfile> getInputProfile(const sp<DeviceDescriptor> & device,
                                       uint32_t& samplingRate,
@@ -680,9 +738,22 @@
                     String8(devices.itemAt(0)->address().c_str()) : String8("");
         }
 
-        uint32_t updateCallRouting(const DeviceVector &rxDevices, uint32_t delayMs = 0);
+        status_t updateCallRouting(
+                bool fromCache, uint32_t delayMs = 0, uint32_t *waitMs = nullptr);
+        status_t updateCallRoutingInternal(
+                const DeviceVector &rxDevices, uint32_t delayMs, uint32_t *waitMs);
         sp<AudioPatch> createTelephonyPatch(bool isRx, const sp<DeviceDescriptor> &device,
                                             uint32_t delayMs);
+        /**
+         * @brief selectBestRxSinkDevicesForCall: if the primary module host both Telephony Rx/Tx
+         * devices, and it declares also supporting a HW bridge between the Telephony Rx and the
+         * given sink device for Voice Call audio attributes, select this device in prio.
+         * Otherwise, getNewOutputDevices() is called on the primary output to select sink device.
+         * @param fromCache true to prevent engine reconsidering all product strategies and retrieve
+         * from engine cache.
+         * @return vector of devices, empty if none is found.
+         */
+        DeviceVector selectBestRxSinkDevicesForCall(bool fromCache);
         bool isDeviceOfModule(const sp<DeviceDescriptor>& devDesc, const char *moduleId) const;
 
         status_t startSource(const sp<SwAudioOutputDescriptor>& outputDesc,
@@ -709,6 +780,7 @@
 
         sp<SourceClientDescriptor> getSourceForAttributesOnOutput(audio_io_handle_t output,
                                                                   const audio_attributes_t &attr);
+        void clearAudioSourcesForOutput(audio_io_handle_t output);
 
         void cleanUpForDevice(const sp<DeviceDescriptor>& deviceDesc);
 
@@ -755,7 +827,6 @@
         SoundTriggerSessionCollection mSoundTriggerSessions;
 
         sp<AudioPatch> mCallTxPatch;
-        sp<AudioPatch> mCallRxPatch;
 
         HwAudioOutputCollection mHwOutputs;
         SourceClientCollection mAudioSources;
@@ -787,6 +858,36 @@
         std::unordered_set<audio_format_t> mManualSurroundFormats;
 
         std::unordered_map<uid_t, audio_flags_mask_t> mAllowedCapturePolicies;
+
+        // The map of device descriptor and formats reported by the device.
+        std::map<wp<DeviceDescriptor>, FormatVector> mReportedFormatsMap;
+
+        // Cached product strategy ID corresponding to legacy strategy STRATEGY_PHONE
+        product_strategy_t mCommunnicationStrategy;
+
+        // The port handle of the hardware audio source created internally for the Call RX audio
+        // end point.
+        audio_port_handle_t mCallRxSourceClientPort = AUDIO_PORT_HANDLE_NONE;
+
+        // Support for Multi-Stream Decoder (MSD) module
+        sp<DeviceDescriptor> getMsdAudioInDevice() const;
+        DeviceVector getMsdAudioOutDevices() const;
+        const AudioPatchCollection getMsdOutputPatches() const;
+        status_t getMsdProfiles(bool hwAvSync,
+                const InputProfileCollection &inputProfiles,
+                const OutputProfileCollection &outputProfiles,
+                const sp<DeviceDescriptor> &sourceDevice,
+                const sp<DeviceDescriptor> &sinkDevice,
+                AudioProfileVector &sourceProfiles,
+                AudioProfileVector &sinkProfiles) const;
+        status_t getBestMsdConfig(bool hwAvSync,
+                const AudioProfileVector &sourceProfiles,
+                const AudioProfileVector &sinkProfiles,
+                audio_port_config *sourceConfig,
+                audio_port_config *sinkConfig) const;
+        PatchBuilder buildMsdPatch(bool msdIsSource, const sp<DeviceDescriptor> &device) const;
+        status_t setMsdOutputPatches(const DeviceVector *outputDevices = nullptr);
+        void releaseMsdOutputPatches(const DeviceVector& devices);
 private:
         void onNewAudioModulesAvailableInt(DeviceVector *newDevices);
 
@@ -794,17 +895,6 @@
         void modifySurroundFormats(const sp<DeviceDescriptor>& devDesc, FormatVector *formatsPtr);
         void modifySurroundChannelMasks(ChannelMaskSet *channelMasksPtr);
 
-        // Support for Multi-Stream Decoder (MSD) module
-        sp<DeviceDescriptor> getMsdAudioInDevice() const;
-        DeviceVector getMsdAudioOutDevices() const;
-        const AudioPatchCollection getMsdPatches() const;
-        status_t getBestMsdAudioProfileFor(const sp<DeviceDescriptor> &outputDevice,
-                                           bool hwAvSync,
-                                           audio_port_config *sourceConfig,
-                                           audio_port_config *sinkConfig) const;
-        PatchBuilder buildMsdPatch(const sp<DeviceDescriptor> &outputDevice) const;
-        status_t setMsdPatch(const sp<DeviceDescriptor> &outputDevice = nullptr);
-
         // If any, resolve any "dynamic" fields of an Audio Profiles collection
         void updateAudioProfiles(const sp<DeviceDescriptor>& devDesc, audio_io_handle_t ioHandle,
                 AudioProfileVector &profiles);
@@ -938,10 +1028,17 @@
                 sp<AudioPatch> *patchDescPtr);
 
         bool areAllDevicesSupported(
-                const Vector<AudioDeviceTypeAddr>& devices,
+                const AudioDeviceTypeAddrVector& devices,
                 std::function<bool(audio_devices_t)> predicate,
                 const char* context);
 
+        bool isScoRequestedForComm() const;
+
+        bool areAllActiveTracksRerouted(const sp<SwAudioOutputDescriptor>& output);
+
+        sp<SwAudioOutputDescriptor> openOutputWithProfileAndDevice(const sp<IOProfile>& profile,
+                                                                   const DeviceVector& devices);
+
 };
 
 };
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
new file mode 100644
index 0000000..454c020
--- /dev/null
+++ b/services/audiopolicy/service/Android.bp
@@ -0,0 +1,76 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library_shared {
+    name: "libaudiopolicyservice",
+
+    srcs: [
+        "AudioPolicyClientImpl.cpp",
+        "AudioPolicyEffects.cpp",
+        "AudioPolicyInterfaceImpl.cpp",
+        "AudioPolicyService.cpp",
+        "CaptureStateNotifier.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/services/audioflinger"
+    ],
+
+    shared_libs: [
+        "libactivitymanager_aidl",
+        "libaudioclient",
+        "libaudioclient_aidl_conversion",
+        "libaudiofoundation",
+        "libaudiopolicy",
+        "libaudiopolicymanagerdefault",
+        "libaudioutils",
+        "libbinder",
+        "libcutils",
+        "libeffectsconfig",
+        "libhardware_legacy",
+        "liblog",
+        "libmedia_helper",
+        "libmediametrics",
+        "libmediautils",
+        "libpermission",
+        "libsensorprivacy",
+        "libutils",
+        "audioclient-types-aidl-cpp",
+        "audioflinger-aidl-cpp",
+        "audiopolicy-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
+        "capture_state_listener-aidl-cpp",
+        "framework-permission-aidl-cpp",
+    ],
+
+    static_libs: [
+        "libaudiopolicycomponents",
+        "framework-permission-aidl-cpp",
+    ],
+
+    header_libs: [
+        "libaudiopolicycommon",
+        "libaudiopolicyengine_interface_headers",
+        "libaudiopolicymanager_interface_headers",
+        "libaudioutils_headers",
+    ],
+
+    cflags: [
+        "-fvisibility=hidden",
+        "-Werror",
+        "-Wall",
+        "-Wthread-safety",
+    ],
+
+    export_shared_lib_headers: [
+        "libactivitymanager_aidl",
+        "libsensorprivacy",
+        "framework-permission-aidl-cpp",
+    ],
+}
diff --git a/services/audiopolicy/service/Android.mk b/services/audiopolicy/service/Android.mk
deleted file mode 100644
index 680b077..0000000
--- a/services/audiopolicy/service/Android.mk
+++ /dev/null
@@ -1,50 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-    AudioPolicyService.cpp \
-    AudioPolicyEffects.cpp \
-    AudioPolicyInterfaceImpl.cpp \
-    AudioPolicyClientImpl.cpp \
-    CaptureStateNotifier.cpp
-
-LOCAL_C_INCLUDES := \
-    frameworks/av/services/audioflinger \
-    $(call include-path-for, audio-utils)
-
-LOCAL_HEADER_LIBRARIES := \
-    libaudiopolicycommon \
-    libaudiopolicyengine_interface_headers \
-    libaudiopolicymanager_interface_headers
-
-LOCAL_SHARED_LIBRARIES := \
-    libcutils \
-    libutils \
-    liblog \
-    libbinder \
-    libaudioclient \
-    libaudioutils \
-    libaudiofoundation \
-    libhardware_legacy \
-    libaudiopolicymanager \
-    libmedia_helper \
-    libmediametrics \
-    libmediautils \
-    libeffectsconfig \
-    libsensorprivacy \
-    capture_state_listener-aidl-cpp
-
-LOCAL_EXPORT_SHARED_LIBRARY_HEADERS := \
-    libsensorprivacy
-
-LOCAL_STATIC_LIBRARIES := \
-    libaudiopolicycomponents
-
-LOCAL_MODULE:= libaudiopolicyservice
-
-LOCAL_CFLAGS += -fvisibility=hidden
-LOCAL_CFLAGS += -Wall -Werror -Wthread-safety
-
-include $(BUILD_SHARED_LIBRARY)
-
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 9fa7a53..cd53073 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -50,7 +50,22 @@
         ALOGW("%s: could not get AudioFlinger", __func__);
         return PERMISSION_DENIED;
     }
-    return af->openOutput(module, output, config, device, latencyMs, flags);
+
+    media::OpenOutputRequest request;
+    media::OpenOutputResponse response;
+
+    request.module = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_module_handle_t_int32_t(module));
+    request.config = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_config_t_AudioConfig(*config));
+    request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_DeviceDescriptorBase(device));
+    request.flags = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+
+    status_t status = af->openOutput(request, &response);
+    if (status == OK) {
+        *output = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_io_handle_t(response.output));
+        *config = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioConfig_audio_config_t(response.config));
+        *latencyMs = VALUE_OR_RETURN_STATUS(convertIntegral<uint32_t>(response.latencyMs));
+    }
+    return status;
 }
 
 audio_io_handle_t AudioPolicyService::AudioPolicyClient::openDuplicateOutput(
@@ -111,7 +126,22 @@
         return PERMISSION_DENIED;
     }
 
-    return af->openInput(module, input, config, device, address, source, flags);
+    AudioDeviceTypeAddr deviceTypeAddr(*device, address.c_str());
+
+    media::OpenInputRequest request;
+    request.module = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_module_handle_t_int32_t(module));
+    request.input = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(*input));
+    request.config = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_config_t_AudioConfig(*config));
+    request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioDeviceTypeAddress(deviceTypeAddr));
+    request.source = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_source_t_AudioSourceType(source));
+    request.flags = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_input_flags_t_int32_t_mask(flags));
+
+    media::OpenInputResponse response;
+    status_t status = af->openInput(request, &response);
+    if (status == OK) {
+        *input = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_module_handle_t(response.input));
+    }
+    return status;
 }
 
 status_t AudioPolicyService::AudioPolicyClient::closeInput(audio_io_handle_t input)
@@ -236,6 +266,11 @@
     mAudioPolicyService->onAudioVolumeGroupChanged(group, flags);
 }
 
+void AudioPolicyService::AudioPolicyClient::onRoutingUpdated()
+{
+    mAudioPolicyService->onRoutingUpdated();
+}
+
 audio_unique_id_t AudioPolicyService::AudioPolicyClient::newAudioUniqueId(audio_unique_id_use_t use)
 {
     return AudioSystem::newAudioUniqueId(use);
@@ -246,4 +281,24 @@
     mAudioPolicyService->mCaptureStateNotifier.setCaptureState(active);
 }
 
+status_t AudioPolicyService::AudioPolicyClient::getAudioPort(struct audio_port_v7 *port)
+{
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (af == 0) {
+        ALOGW("%s: could not get AudioFlinger", __func__);
+        return PERMISSION_DENIED;
+    }
+    return af->getAudioPort(port);
+}
+
+status_t AudioPolicyService::AudioPolicyClient::updateSecondaryOutputs(
+        const TrackSecondaryOutputsMap& trackSecondaryOutputs) {
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        ALOGW("%s: could not get AudioFlinger", __func__);
+        return PERMISSION_DENIED;
+    }
+    return af->updateSecondaryOutputs(trackSecondaryOutputs);
+}
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index 1ec0c5e..3f01de9 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -35,6 +35,8 @@
 
 namespace android {
 
+using content::AttributionSourceState;
+
 // ----------------------------------------------------------------------------
 // AudioPolicyEffects Implementation
 // ----------------------------------------------------------------------------
@@ -121,8 +123,11 @@
         Vector <EffectDesc *> effects = mInputSources.valueAt(index)->mEffects;
         for (size_t i = 0; i < effects.size(); i++) {
             EffectDesc *effect = effects[i];
-            sp<AudioEffect> fx = new AudioEffect(NULL, String16("android"), &effect->mUuid, -1, 0,
-                                                 0, audioSession, input);
+            AttributionSourceState attributionSource;
+            attributionSource.packageName = "android";
+            attributionSource.token = sp<BBinder>::make();
+            sp<AudioEffect> fx = new AudioEffect(attributionSource);
+            fx->set(NULL, &effect->mUuid, -1, 0, 0, audioSession, input);
             status_t status = fx->initCheck();
             if (status != NO_ERROR && status != ALREADY_EXISTS) {
                 ALOGW("addInputEffects(): failed to create Fx %s on source %d",
@@ -270,8 +275,11 @@
         Vector <EffectDesc *> effects = mOutputStreams.valueAt(index)->mEffects;
         for (size_t i = 0; i < effects.size(); i++) {
             EffectDesc *effect = effects[i];
-            sp<AudioEffect> fx = new AudioEffect(NULL, String16("android"), &effect->mUuid, 0, 0, 0,
-                                                 audioSession, output);
+            AttributionSourceState attributionSource;
+            attributionSource.packageName = "android";
+            attributionSource.token = sp<BBinder>::make();
+            sp<AudioEffect> fx = new AudioEffect(attributionSource);
+            fx->set(NULL, &effect->mUuid, 0, 0, 0, audioSession, output);
             status_t status = fx->initCheck();
             if (status != NO_ERROR && status != ALREADY_EXISTS) {
                 ALOGE("addOutputSessionEffects(): failed to create Fx  %s on session %d",
@@ -970,11 +978,14 @@
     for (const auto& deviceEffectsIter : mDeviceEffects) {
         const auto& deviceEffects =  deviceEffectsIter.second;
         for (const auto& effectDesc : deviceEffects->mEffectDescriptors->mEffects) {
-            auto fx = std::make_unique<AudioEffect>(
-                        EFFECT_UUID_NULL, String16("android"), &effectDesc->mUuid, 0, nullptr,
-                        nullptr, AUDIO_SESSION_DEVICE, AUDIO_IO_HANDLE_NONE,
-                        AudioDeviceTypeAddr{deviceEffects->getDeviceType(),
-                                            deviceEffects->getDeviceAddress()});
+            AttributionSourceState attributionSource;
+            attributionSource.packageName = "android";
+            attributionSource.token = sp<BBinder>::make();
+            sp<AudioEffect> fx = new AudioEffect(attributionSource);
+            fx->set(EFFECT_UUID_NULL, &effectDesc->mUuid, 0, nullptr,
+                    nullptr, AUDIO_SESSION_DEVICE, AUDIO_IO_HANDLE_NONE,
+                    AudioDeviceTypeAddr{deviceEffects->getDeviceType(),
+                                        deviceEffects->getDeviceAddress()});
             status_t status = fx->initCheck();
             if (status != NO_ERROR && status != ALREADY_EXISTS) {
                 ALOGE("%s(): failed to create Fx %s on port type=%d address=%s", __func__,
@@ -987,7 +998,7 @@
             ALOGV("%s(): create Fx %s added on port type=%d address=%s", __func__,
                   effectDesc->mName, deviceEffects->getDeviceType(),
                   deviceEffects->getDeviceAddress().c_str());
-            deviceEffects->mEffects.push_back(std::move(fx));
+            deviceEffects->mEffects.push_back(fx);
         }
     }
 }
diff --git a/services/audiopolicy/service/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h
index 81c728d..13d5d0c 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.h
+++ b/services/audiopolicy/service/AudioPolicyEffects.h
@@ -207,7 +207,7 @@
             mDeviceType(device), mDeviceAddress(address) {}
         /*virtual*/ ~DeviceEffects() = default;
 
-        std::vector<std::unique_ptr<AudioEffect>> mEffects;
+        std::vector< sp<AudioEffect> > mEffects;
         audio_devices_t getDeviceType() const { return mDeviceType; }
         std::string getDeviceAddress() const { return mDeviceAddress; }
         const std::unique_ptr<EffectDescVector> mEffectDescriptors;
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 34d07b6..9987252 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -19,11 +19,31 @@
 
 #include "AudioPolicyService.h"
 #include "TypeConverter.h"
-#include <media/MediaMetricsItem.h>
+#include <media/AidlConversion.h>
 #include <media/AudioPolicy.h>
+#include <media/AudioValidator.h>
+#include <media/MediaMetricsItem.h>
+#include <media/PolicyAidlConversion.h>
 #include <utils/Log.h>
+#include <android/content/AttributionSourceState.h>
+
+#define VALUE_OR_RETURN_BINDER_STATUS(x) \
+    ({ auto _tmp = (x); \
+       if (!_tmp.ok()) return aidl_utils::binderStatusFromStatusT(_tmp.error()); \
+       std::move(_tmp.value()); })
+
+#define RETURN_IF_BINDER_ERROR(x)      \
+    {                                  \
+        binder::Status _tmp = (x);     \
+        if (!_tmp.isOk()) return _tmp; \
+    }
+
+#define MAX_ITEMS_PER_LIST 1024
 
 namespace android {
+using binder::Status;
+using aidl_utils::binderStatusFromStatusT;
+using content::AttributionSourceState;
 
 const std::vector<audio_usage_t>& SYSTEM_USAGES = {
     AUDIO_USAGE_CALL_ASSISTANT,
@@ -44,15 +64,16 @@
 }
 
 status_t AudioPolicyService::validateUsage(audio_usage_t usage) {
-     return validateUsage(usage, IPCThreadState::self()->getCallingPid(),
-        IPCThreadState::self()->getCallingUid());
+     return validateUsage(usage, getCallingAttributionSource());
 }
 
-status_t AudioPolicyService::validateUsage(audio_usage_t usage, pid_t pid, uid_t uid) {
+status_t AudioPolicyService::validateUsage(audio_usage_t usage,
+        const AttributionSourceState& attributionSource) {
     if (isSystemUsage(usage)) {
         if (isSupportedSystemUsage(usage)) {
-            if (!modifyAudioRoutingAllowed(pid, uid)) {
-                ALOGE("permission denied: modify audio routing not allowed for uid %d", uid);
+            if (!modifyAudioRoutingAllowed(attributionSource)) {
+                ALOGE(("permission denied: modify audio routing not allowed "
+                       "for attributionSource %s"), attributionSource.toString().c_str());
                 return PERMISSION_DENIED;
             }
         } else {
@@ -74,71 +95,94 @@
     mAudioPolicyManager->onNewAudioModulesAvailable();
 }
 
-status_t AudioPolicyService::setDeviceConnectionState(audio_devices_t device,
-                                                  audio_policy_dev_state_t state,
-                                                  const char *device_address,
-                                                  const char *device_name,
-                                                  audio_format_t encodedFormat)
-{
+Status AudioPolicyService::setDeviceConnectionState(
+        const media::AudioDevice& deviceAidl,
+        media::AudioPolicyDeviceState stateAidl,
+        const std::string& deviceNameAidl,
+        media::audio::common::AudioFormat encodedFormatAidl) {
+    audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_devices_t(deviceAidl.type));
+    audio_policy_dev_state_t state = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioPolicyDeviceState_audio_policy_dev_state_t(stateAidl));
+    audio_format_t encodedFormat = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioFormat_audio_format_t(encodedFormatAidl));
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     if (!settingsAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (state != AUDIO_POLICY_DEVICE_STATE_AVAILABLE &&
             state != AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) {
-        return BAD_VALUE;
+        return binderStatusFromStatusT(BAD_VALUE);
     }
 
     ALOGV("setDeviceConnectionState()");
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->setDeviceConnectionState(device, state,
-                                                         device_address, device_name, encodedFormat);
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->setDeviceConnectionState(device, state,
+                                                          deviceAidl.address.c_str(),
+                                                          deviceNameAidl.c_str(),
+                                                          encodedFormat));
 }
 
-audio_policy_dev_state_t AudioPolicyService::getDeviceConnectionState(
-                                                              audio_devices_t device,
-                                                              const char *device_address)
-{
+Status AudioPolicyService::getDeviceConnectionState(const media::AudioDevice& deviceAidl,
+                                                    media::AudioPolicyDeviceState* _aidl_return) {
+    audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_devices_t(deviceAidl.type));
     if (mAudioPolicyManager == NULL) {
-        return AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE;
+        *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+                legacy2aidl_audio_policy_dev_state_t_AudioPolicyDeviceState(
+                        AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
+        return Status::ok();
     }
     AutoCallerClear acc;
-    return mAudioPolicyManager->getDeviceConnectionState(device,
-                                                      device_address);
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_audio_policy_dev_state_t_AudioPolicyDeviceState(
+                    mAudioPolicyManager->getDeviceConnectionState(device,
+                                                                  deviceAidl.address.c_str())));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::handleDeviceConfigChange(audio_devices_t device,
-                                                  const char *device_address,
-                                                  const char *device_name,
-                                                  audio_format_t encodedFormat)
-{
+Status AudioPolicyService::handleDeviceConfigChange(
+        const media::AudioDevice& deviceAidl,
+        const std::string& deviceNameAidl,
+        media::audio::common::AudioFormat encodedFormatAidl) {
+    audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_devices_t(deviceAidl.type));
+    audio_format_t encodedFormat = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioFormat_audio_format_t(encodedFormatAidl));
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     if (!settingsAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
     ALOGV("handleDeviceConfigChange()");
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->handleDeviceConfigChange(device, device_address,
-                                                         device_name, encodedFormat);
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->handleDeviceConfigChange(device, deviceAidl.address.c_str(),
+                                                          deviceNameAidl.c_str(), encodedFormat));
 }
 
-status_t AudioPolicyService::setPhoneState(audio_mode_t state, uid_t uid)
+Status AudioPolicyService::setPhoneState(media::AudioMode stateAidl, int32_t uidAidl)
 {
+    audio_mode_t state = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioMode_audio_mode_t(stateAidl));
+    uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     if (!settingsAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (uint32_t(state) >= AUDIO_MODE_CNT) {
-        return BAD_VALUE;
+        return binderStatusFromStatusT(BAD_VALUE);
     }
 
     ALOGV("setPhoneState()");
@@ -154,109 +198,150 @@
     mAudioPolicyManager->setPhoneState(state);
     mPhoneState = state;
     mPhoneStateOwnerUid = uid;
-    return NO_ERROR;
+    updateUidStates_l();
+    return Status::ok();
 }
 
-audio_mode_t AudioPolicyService::getPhoneState()
-{
+Status AudioPolicyService::getPhoneState(media::AudioMode* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    return mPhoneState;
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_mode_t_AudioMode(mPhoneState));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::setForceUse(audio_policy_force_use_t usage,
-                                         audio_policy_forced_cfg_t config)
+Status AudioPolicyService::setForceUse(media::AudioPolicyForceUse usageAidl,
+                                       media::AudioPolicyForcedConfig configAidl)
 {
+    audio_policy_force_use_t usage = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(usageAidl));
+    audio_policy_forced_cfg_t config = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioPolicyForcedConfig_audio_policy_forced_cfg_t(configAidl));
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
 
     if (!modifyAudioRoutingAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
     if (usage < 0 || usage >= AUDIO_POLICY_FORCE_USE_CNT) {
-        return BAD_VALUE;
+        return binderStatusFromStatusT(BAD_VALUE);
     }
     if (config < 0 || config >= AUDIO_POLICY_FORCE_CFG_CNT) {
-        return BAD_VALUE;
+        return binderStatusFromStatusT(BAD_VALUE);
     }
     ALOGV("setForceUse()");
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
     mAudioPolicyManager->setForceUse(usage, config);
-    return NO_ERROR;
+    return Status::ok();
 }
 
-audio_policy_forced_cfg_t AudioPolicyService::getForceUse(audio_policy_force_use_t usage)
-{
+Status AudioPolicyService::getForceUse(media::AudioPolicyForceUse usageAidl,
+                                       media::AudioPolicyForcedConfig* _aidl_return) {
+    audio_policy_force_use_t usage = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(usageAidl));
+
     if (mAudioPolicyManager == NULL) {
-        return AUDIO_POLICY_FORCE_NONE;
+        return binderStatusFromStatusT(NO_INIT);
     }
     if (usage < 0 || usage >= AUDIO_POLICY_FORCE_USE_CNT) {
-        return AUDIO_POLICY_FORCE_NONE;
+        *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_audio_policy_forced_cfg_t_AudioPolicyForcedConfig(AUDIO_POLICY_FORCE_NONE));
+        return Status::ok();
     }
     AutoCallerClear acc;
-    return mAudioPolicyManager->getForceUse(usage);
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_audio_policy_forced_cfg_t_AudioPolicyForcedConfig(
+                    mAudioPolicyManager->getForceUse(usage)));
+    return Status::ok();
 }
 
-audio_io_handle_t AudioPolicyService::getOutput(audio_stream_type_t stream)
+Status AudioPolicyService::getOutput(media::AudioStreamType streamAidl, int32_t* _aidl_return)
 {
+    audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
+
     if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
-        return AUDIO_IO_HANDLE_NONE;
+        *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_audio_io_handle_t_int32_t(AUDIO_IO_HANDLE_NONE));
+        return Status::ok();
     }
     if (mAudioPolicyManager == NULL) {
-        return AUDIO_IO_HANDLE_NONE;
+        return binderStatusFromStatusT(NO_INIT);
     }
     ALOGV("getOutput()");
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->getOutput(stream);
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_audio_io_handle_t_int32_t(mAudioPolicyManager->getOutput(stream)));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::getOutputForAttr(audio_attributes_t *attr,
-                                              audio_io_handle_t *output,
-                                              audio_session_t session,
-                                              audio_stream_type_t *stream,
-                                              pid_t pid,
-                                              uid_t uid,
-                                              const audio_config_t *config,
-                                              audio_output_flags_t flags,
-                                              audio_port_handle_t *selectedDeviceId,
-                                              audio_port_handle_t *portId,
-                                              std::vector<audio_io_handle_t> *secondaryOutputs)
+Status AudioPolicyService::getOutputForAttr(const media::AudioAttributesInternal& attrAidl,
+                                            int32_t sessionAidl,
+                                            const AttributionSourceState& attributionSource,
+                                            const media::AudioConfig& configAidl,
+                                            int32_t flagsAidl,
+                                            int32_t selectedDeviceIdAidl,
+                                            media::GetOutputForAttrResponse* _aidl_return)
 {
+    audio_attributes_t attr = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+    audio_session_t session = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_session_t(sessionAidl));
+    audio_stream_type_t stream = AUDIO_STREAM_DEFAULT;
+    audio_config_t config = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioConfig_audio_config_t(configAidl));
+    audio_output_flags_t flags = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_output_flags_t_mask(flagsAidl));
+    audio_port_handle_t selectedDeviceId = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(selectedDeviceIdAidl));
+
+    audio_io_handle_t output;
+    audio_port_handle_t portId;
+    std::vector<audio_io_handle_t> secondaryOutputs;
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
 
-    status_t result = validateUsage(attr->usage, pid, uid);
-    if (result != NO_ERROR) {
-        return result;
-    }
+    RETURN_IF_BINDER_ERROR(
+            binderStatusFromStatusT(AudioValidator::validateAudioAttributes(attr, "68953950")));
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr.usage, attributionSource)));
 
     ALOGV("%s()", __func__);
     Mutex::Autolock _l(mLock);
 
+    // TODO b/182392553: refactor or remove
+    AttributionSourceState adjAttributionSource = attributionSource;
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    if (!isAudioServerOrMediaServerUid(callingUid) || uid == (uid_t)-1) {
-        ALOGW_IF(uid != (uid_t)-1 && uid != callingUid,
-                "%s uid %d tried to pass itself off as %d", __func__, callingUid, uid);
-        uid = callingUid;
+    if (!isAudioServerOrMediaServerUid(callingUid) || attributionSource.uid == -1) {
+        int32_t callingUidAidl = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_uid_t_int32_t(callingUid));
+        ALOGW_IF(attributionSource.uid != -1 && attributionSource.uid != callingUidAidl,
+                "%s uid %d tried to pass itself off as %d", __func__,
+                callingUidAidl, attributionSource.uid);
+        adjAttributionSource.uid = callingUidAidl;
     }
-    if (!mPackageManager.allowPlaybackCapture(uid)) {
-        attr->flags |= AUDIO_FLAG_NO_MEDIA_PROJECTION;
+    if (!mPackageManager.allowPlaybackCapture(VALUE_OR_RETURN_BINDER_STATUS(
+        aidl2legacy_int32_t_uid_t(adjAttributionSource.uid)))) {
+        attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_NO_MEDIA_PROJECTION);
     }
-    if (((attr->flags & (AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE)) != 0)
-            && !bypassInterruptionPolicyAllowed(pid, uid)) {
-        attr->flags &= ~(AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE);
+    if (((attr.flags & (AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE)) != 0)
+            && !bypassInterruptionPolicyAllowed(adjAttributionSource)) {
+        attr.flags = static_cast<audio_flags_mask_t>(
+                attr.flags & ~(AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE));
     }
     AutoCallerClear acc;
     AudioPolicyInterface::output_type_t outputType;
-    result = mAudioPolicyManager->getOutputForAttr(attr, output, session, stream, uid,
-                                                 config,
-                                                 &flags, selectedDeviceId, portId,
-                                                 secondaryOutputs,
-                                                 &outputType);
+    status_t result = mAudioPolicyManager->getOutputForAttr(&attr, &output, session,
+                                                            &stream,
+                                                            adjAttributionSource,
+                                                            &config,
+                                                            &flags, &selectedDeviceId, &portId,
+                                                            &secondaryOutputs,
+                                                            &outputType);
 
     // FIXME: Introduce a way to check for the the telephony device before opening the output
     if (result == NO_ERROR) {
@@ -265,16 +350,16 @@
         case AudioPolicyInterface::API_OUTPUT_LEGACY:
             break;
         case AudioPolicyInterface::API_OUTPUT_TELEPHONY_TX:
-            if (!modifyPhoneStateAllowed(pid, uid)) {
+            if (!modifyPhoneStateAllowed(adjAttributionSource)) {
                 ALOGE("%s() permission denied: modify phone state not allowed for uid %d",
-                    __func__, uid);
+                    __func__, adjAttributionSource.uid);
                 result = PERMISSION_DENIED;
             }
             break;
         case AudioPolicyInterface::API_OUT_MIX_PLAYBACK:
-            if (!modifyAudioRoutingAllowed(pid, uid)) {
+            if (!modifyAudioRoutingAllowed(adjAttributionSource)) {
                 ALOGE("%s() permission denied: modify audio routing not allowed for uid %d",
-                    __func__, uid);
+                    __func__, adjAttributionSource.uid);
                 result = PERMISSION_DENIED;
             }
             break;
@@ -286,11 +371,24 @@
     }
 
     if (result == NO_ERROR) {
-        sp <AudioPlaybackClient> client =
-            new AudioPlaybackClient(*attr, *output, uid, pid, session, *portId, *selectedDeviceId, *stream);
-        mAudioPlaybackClients.add(*portId, client);
+        sp<AudioPlaybackClient> client =
+                new AudioPlaybackClient(attr, output, adjAttributionSource, session,
+                    portId, selectedDeviceId, stream);
+        mAudioPlaybackClients.add(portId, client);
+
+        _aidl_return->output = VALUE_OR_RETURN_BINDER_STATUS(
+                legacy2aidl_audio_io_handle_t_int32_t(output));
+        _aidl_return->stream = VALUE_OR_RETURN_BINDER_STATUS(
+                legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
+        _aidl_return->selectedDeviceId = VALUE_OR_RETURN_BINDER_STATUS(
+                legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+        _aidl_return->portId = VALUE_OR_RETURN_BINDER_STATUS(
+                legacy2aidl_audio_port_handle_t_int32_t(portId));
+        _aidl_return->secondaryOutputs = VALUE_OR_RETURN_BINDER_STATUS(
+                convertContainer<std::vector<int32_t>>(secondaryOutputs,
+                                                       legacy2aidl_audio_io_handle_t_int32_t));
     }
-    return result;
+    return binderStatusFromStatusT(result);
 }
 
 void AudioPolicyService::getPlaybackClientAndEffects(audio_port_handle_t portId,
@@ -308,10 +406,12 @@
     effects = mAudioPolicyEffects;
 }
 
-status_t AudioPolicyService::startOutput(audio_port_handle_t portId)
+Status AudioPolicyService::startOutput(int32_t portIdAidl)
 {
+    audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     ALOGV("startOutput()");
     sp<AudioPlaybackClient> client;
@@ -333,17 +433,19 @@
     if (status == NO_ERROR) {
         client->active = true;
     }
-    return status;
+    return binderStatusFromStatusT(status);
 }
 
-status_t AudioPolicyService::stopOutput(audio_port_handle_t portId)
+Status AudioPolicyService::stopOutput(int32_t portIdAidl)
 {
+    audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     ALOGV("stopOutput()");
     mOutputCommandThread->stopOutputCommand(portId);
-    return NO_ERROR;
+    return Status::ok();
 }
 
 status_t  AudioPolicyService::doStopOutput(audio_port_handle_t portId)
@@ -371,13 +473,16 @@
     return status;
 }
 
-void AudioPolicyService::releaseOutput(audio_port_handle_t portId)
+Status AudioPolicyService::releaseOutput(int32_t portIdAidl)
 {
+    audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
     if (mAudioPolicyManager == NULL) {
-        return;
+        return binderStatusFromStatusT(NO_INIT);
     }
     ALOGV("releaseOutput()");
     mOutputCommandThread->releaseOutputCommand(portId);
+    return Status::ok();
 }
 
 void AudioPolicyService::doReleaseOutput(audio_port_handle_t portId)
@@ -400,28 +505,40 @@
     mAudioPolicyManager->releaseOutput(portId);
 }
 
-status_t AudioPolicyService::getInputForAttr(const audio_attributes_t *attr,
-                                             audio_io_handle_t *input,
-                                             audio_unique_id_t riid,
-                                             audio_session_t session,
-                                             pid_t pid,
-                                             uid_t uid,
-                                             const String16& opPackageName,
-                                             const audio_config_base_t *config,
-                                             audio_input_flags_t flags,
-                                             audio_port_handle_t *selectedDeviceId,
-                                             audio_port_handle_t *portId)
-{
+Status AudioPolicyService::getInputForAttr(const media::AudioAttributesInternal& attrAidl,
+                                           int32_t inputAidl,
+                                           int32_t riidAidl,
+                                           int32_t sessionAidl,
+                                           const AttributionSourceState& attributionSource,
+                                           const media::AudioConfigBase& configAidl,
+                                           int32_t flagsAidl,
+                                           int32_t selectedDeviceIdAidl,
+                                           media::GetInputForAttrResponse* _aidl_return) {
+    audio_attributes_t attr = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+    audio_io_handle_t input = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_io_handle_t(inputAidl));
+    audio_unique_id_t riid = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_unique_id_t(riidAidl));
+    audio_session_t session = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_session_t(sessionAidl));
+    audio_config_base_t config = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioConfigBase_audio_config_base_t(configAidl));
+    audio_input_flags_t flags = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_input_flags_t_mask(flagsAidl));
+    audio_port_handle_t selectedDeviceId = VALUE_OR_RETURN_BINDER_STATUS(
+                aidl2legacy_int32_t_audio_port_handle_t(selectedDeviceIdAidl));
+
+    audio_port_handle_t portId;
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
 
-    status_t result = validateUsage(attr->usage, pid, uid);
-    if (result != NO_ERROR) {
-        return result;
-    }
+    RETURN_IF_BINDER_ERROR(
+            binderStatusFromStatusT(AudioValidator::validateAudioAttributes(attr, "68953950")));
 
-    audio_source_t inputSource = attr->source;
+    audio_source_t inputSource = attr.source;
     if (inputSource == AUDIO_SOURCE_DEFAULT) {
         inputSource = AUDIO_SOURCE_MIC;
     }
@@ -432,48 +549,73 @@
                 && inputSource != AUDIO_SOURCE_HOTWORD
                 && inputSource != AUDIO_SOURCE_FM_TUNER
                 && inputSource != AUDIO_SOURCE_ECHO_REFERENCE)) {
-        return BAD_VALUE;
+        return binderStatusFromStatusT(BAD_VALUE);
     }
 
-    bool updatePid = (pid == -1);
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+    // Make sure attribution source represents the current caller
+    AttributionSourceState adjAttributionSource = attributionSource;
+    // TODO b/182392553: refactor or remove
+    bool updatePid = (attributionSource.pid == -1);
+    const uid_t callingUid =IPCThreadState::self()->getCallingUid();
+    const uid_t currentUid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(
+            attributionSource.uid));
     if (!isAudioServerOrMediaServerUid(callingUid)) {
-        ALOGW_IF(uid != (uid_t)-1 && uid != callingUid,
-                "%s uid %d tried to pass itself off as %d", __FUNCTION__, callingUid, uid);
-        uid = callingUid;
+        ALOGW_IF(currentUid != (uid_t)-1 && currentUid != callingUid,
+                "%s uid %d tried to pass itself off as %d", __FUNCTION__, callingUid,
+                currentUid);
+        adjAttributionSource.uid = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_uid_t_int32_t(
+                callingUid));
         updatePid = true;
     }
 
     if (updatePid) {
-        const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-        ALOGW_IF(pid != (pid_t)-1 && pid != callingPid,
+        const int32_t callingPid = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_pid_t_int32_t(
+            IPCThreadState::self()->getCallingPid()));
+        ALOGW_IF(attributionSource.pid != -1 && attributionSource.pid != callingPid,
                  "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, pid);
-        pid = callingPid;
+                 __func__, adjAttributionSource.uid, callingPid, attributionSource.pid);
+        adjAttributionSource.pid = callingPid;
     }
 
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr.usage,
+            adjAttributionSource)));
+
     // check calling permissions.
-    // Capturing from FM_TUNER source is controlled by captureAudioOutputAllowed() only as this
-    // does not affect users privacy as does capturing from an actual microphone.
-    if (!(recordingAllowed(opPackageName, pid, uid) || attr->source == AUDIO_SOURCE_FM_TUNER)) {
-        ALOGE("%s permission denied: recording not allowed for uid %d pid %d",
-                __func__, uid, pid);
-        return PERMISSION_DENIED;
+    // Capturing from FM_TUNER source is controlled by captureTunerAudioInputAllowed() and
+    // captureAudioOutputAllowed() (deprecated) as this does not affect users privacy
+    // as does capturing from an actual microphone.
+    if (!(recordingAllowed(adjAttributionSource, attr.source)
+            || attr.source == AUDIO_SOURCE_FM_TUNER)) {
+        ALOGE("%s permission denied: recording not allowed for %s",
+                __func__, adjAttributionSource.toString().c_str());
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
-    bool canCaptureOutput = captureAudioOutputAllowed(pid, uid);
+    bool canCaptureOutput = captureAudioOutputAllowed(adjAttributionSource);
     if ((inputSource == AUDIO_SOURCE_VOICE_UPLINK ||
         inputSource == AUDIO_SOURCE_VOICE_DOWNLINK ||
         inputSource == AUDIO_SOURCE_VOICE_CALL ||
-        inputSource == AUDIO_SOURCE_ECHO_REFERENCE||
-        inputSource == AUDIO_SOURCE_FM_TUNER) &&
-        !canCaptureOutput) {
-        return PERMISSION_DENIED;
+        inputSource == AUDIO_SOURCE_ECHO_REFERENCE)
+        && !canCaptureOutput) {
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
-    bool canCaptureHotword = captureHotwordAllowed(opPackageName, pid, uid);
+    if (inputSource == AUDIO_SOURCE_FM_TUNER
+        && !captureTunerAudioInputAllowed(adjAttributionSource)
+        && !canCaptureOutput) {
+        return binderStatusFromStatusT(PERMISSION_DENIED);
+    }
+
+    bool canCaptureHotword = captureHotwordAllowed(adjAttributionSource);
     if ((inputSource == AUDIO_SOURCE_HOTWORD) && !canCaptureHotword) {
-        return BAD_VALUE;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
+    }
+
+    if (((flags & AUDIO_INPUT_FLAG_HW_HOTWORD) != 0)
+            && !canCaptureHotword) {
+        ALOGE("%s: permission denied: hotword mode not allowed"
+              " for uid %d pid %d", __func__, adjAttributionSource.uid, adjAttributionSource.pid);
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
     sp<AudioPolicyEffects>audioPolicyEffects;
@@ -485,10 +627,11 @@
         {
             AutoCallerClear acc;
             // the audio_in_acoustics_t parameter is ignored by get_input()
-            status = mAudioPolicyManager->getInputForAttr(attr, input, riid, session, uid,
-                                                         config,
-                                                         flags, selectedDeviceId,
-                                                         &inputType, portId);
+            status = mAudioPolicyManager->getInputForAttr(&attr, &input, riid, session,
+                                                          adjAttributionSource, &config,
+                                                          flags, &selectedDeviceId,
+                                                          &inputType, &portId);
+
         }
         audioPolicyEffects = mAudioPolicyEffects;
 
@@ -509,7 +652,7 @@
                 }
                 break;
             case AudioPolicyInterface::API_INPUT_MIX_EXT_POLICY_REROUTE:
-                if (!modifyAudioRoutingAllowed(pid, uid)) {
+                if (!modifyAudioRoutingAllowed(adjAttributionSource)) {
                     ALOGE("getInputForAttr() permission denied: modify audio routing not allowed");
                     status = PERMISSION_DENIED;
                 }
@@ -524,29 +667,37 @@
         if (status != NO_ERROR) {
             if (status == PERMISSION_DENIED) {
                 AutoCallerClear acc;
-                mAudioPolicyManager->releaseInput(*portId);
+                mAudioPolicyManager->releaseInput(portId);
             }
-            return status;
+            return binderStatusFromStatusT(status);
         }
 
-        sp<AudioRecordClient> client = new AudioRecordClient(*attr, *input, uid, pid, session, *portId,
-                                                             *selectedDeviceId, opPackageName,
-                                                             canCaptureOutput, canCaptureHotword);
-        mAudioRecordClients.add(*portId, client);
+        sp<AudioRecordClient> client = new AudioRecordClient(attr, input, session, portId,
+                                                             selectedDeviceId, adjAttributionSource,
+                                                             canCaptureOutput, canCaptureHotword,
+                                                             mOutputCommandThread);
+        mAudioRecordClients.add(portId, client);
     }
 
     if (audioPolicyEffects != 0) {
         // create audio pre processors according to input source
-        status_t status = audioPolicyEffects->addInputEffects(*input, inputSource, session);
+        status_t status = audioPolicyEffects->addInputEffects(input, inputSource, session);
         if (status != NO_ERROR && status != ALREADY_EXISTS) {
-            ALOGW("Failed to add effects on input %d", *input);
+            ALOGW("Failed to add effects on input %d", input);
         }
     }
-    return NO_ERROR;
+
+    _aidl_return->input = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_audio_io_handle_t_int32_t(input));
+    _aidl_return->selectedDeviceId = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+    _aidl_return->portId = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_audio_port_handle_t_int32_t(portId));
+    return Status::ok();
 }
 
 std::string AudioPolicyService::getDeviceTypeStrForPortId(audio_port_handle_t portId) {
-    struct audio_port port = {};
+    struct audio_port_v7 port = {};
     port.id = portId;
     status_t status = mAudioPolicyManager->getAudioPort(&port);
     if (status == NO_ERROR && port.type == AUDIO_PORT_TYPE_DEVICE) {
@@ -555,10 +706,13 @@
     return {};
 }
 
-status_t AudioPolicyService::startInput(audio_port_handle_t portId)
+Status AudioPolicyService::startInput(int32_t portIdAidl)
 {
+    audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     sp<AudioRecordClient> client;
     {
@@ -566,17 +720,21 @@
 
         ssize_t index = mAudioRecordClients.indexOfKey(portId);
         if (index < 0) {
-            return INVALID_OPERATION;
+            return binderStatusFromStatusT(INVALID_OPERATION);
         }
         client = mAudioRecordClients.valueAt(index);
     }
 
+    std::stringstream msg;
+    msg << "Audio recording on session " << client->session;
+
     // check calling permissions
-    if (!(startRecording(client->opPackageName, client->pid, client->uid)
+    if (!(startRecording(client->attributionSource, String16(msg.str().c_str()),
+        client->attributes.source)
             || client->attributes.source == AUDIO_SOURCE_FM_TUNER)) {
-        ALOGE("%s permission denied: recording not allowed for uid %d pid %d",
-                __func__, client->uid, client->pid);
-        return PERMISSION_DENIED;
+        ALOGE("%s permission denied: recording not allowed for attribution source %s",
+                __func__, client->attributionSource.toString().c_str());
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
     Mutex::Autolock _l(mLock);
@@ -619,11 +777,13 @@
             item->setCString(kAudioPolicyRqstSrc,
                              toString(client->attributes.source).c_str());
             item->setInt32(kAudioPolicyRqstSession, client->session);
-            if (client->opPackageName.size() != 0) {
+            if (client->attributionSource.packageName.has_value() &&
+                client->attributionSource.packageName.value().size() != 0) {
                 item->setCString(kAudioPolicyRqstPkg,
-                                 std::string(String8(client->opPackageName).string()).c_str());
+                    client->attributionSource.packageName.value().c_str());
             } else {
-                item->setCString(kAudioPolicyRqstPkg, std::to_string(client->uid).c_str());
+                item->setCString(kAudioPolicyRqstPkg,
+                    std::to_string(client->attributionSource.uid).c_str());
             }
             item->setCString(
                     kAudioPolicyRqstDevice, getDeviceTypeStrForPortId(client->deviceId).c_str());
@@ -639,12 +799,13 @@
                     item->setCString(kAudioPolicyActiveSrc,
                                      toString(other->attributes.source).c_str());
                     item->setInt32(kAudioPolicyActiveSession, other->session);
-                    if (other->opPackageName.size() != 0) {
+                    if (other->attributionSource.packageName.has_value() &&
+                        other->attributionSource.packageName.value().size() != 0) {
                         item->setCString(kAudioPolicyActivePkg,
-                             std::string(String8(other->opPackageName).string()).c_str());
+                            other->attributionSource.packageName.value().c_str());
                     } else {
-                        item->setCString(kAudioPolicyRqstPkg,
-                                         std::to_string(other->uid).c_str());
+                        item->setCString(kAudioPolicyRqstPkg, std::to_string(
+                            other->attributionSource.uid).c_str());
                     }
                     item->setCString(kAudioPolicyActiveDevice,
                                      getDeviceTypeStrForPortId(other->deviceId).c_str());
@@ -660,23 +821,26 @@
         client->active = false;
         client->startTimeNs = 0;
         updateUidStates_l();
-        finishRecording(client->opPackageName, client->uid);
+        finishRecording(client->attributionSource, client->attributes.source);
     }
 
-    return status;
+    return binderStatusFromStatusT(status);
 }
 
-status_t AudioPolicyService::stopInput(audio_port_handle_t portId)
+Status AudioPolicyService::stopInput(int32_t portIdAidl)
 {
+    audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
 
     Mutex::Autolock _l(mLock);
 
     ssize_t index = mAudioRecordClients.indexOfKey(portId);
     if (index < 0) {
-        return INVALID_OPERATION;
+        return binderStatusFromStatusT(INVALID_OPERATION);
     }
     sp<AudioRecordClient> client = mAudioRecordClients.valueAt(index);
 
@@ -686,15 +850,18 @@
     updateUidStates_l();
 
     // finish the recording app op
-    finishRecording(client->opPackageName, client->uid);
+    finishRecording(client->attributionSource, client->attributes.source);
     AutoCallerClear acc;
-    return mAudioPolicyManager->stopInput(portId);
+    return binderStatusFromStatusT(mAudioPolicyManager->stopInput(portId));
 }
 
-void AudioPolicyService::releaseInput(audio_port_handle_t portId)
+Status AudioPolicyService::releaseInput(int32_t portIdAidl)
 {
+    audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
+
     if (mAudioPolicyManager == NULL) {
-        return;
+        return binderStatusFromStatusT(NO_INIT);
     }
     sp<AudioPolicyEffects>audioPolicyEffects;
     sp<AudioRecordClient> client;
@@ -703,7 +870,7 @@
         audioPolicyEffects = mAudioPolicyEffects;
         ssize_t index = mAudioRecordClients.indexOfKey(portId);
         if (index < 0) {
-            return;
+            return Status::ok();
         }
         client = mAudioRecordClients.valueAt(index);
 
@@ -717,7 +884,7 @@
         mAudioRecordClients.removeItem(portId);
     }
     if (client == 0) {
-        return;
+        return Status::ok();
     }
     if (audioPolicyEffects != 0) {
         // release audio processors from the input
@@ -731,239 +898,354 @@
         AutoCallerClear acc;
         mAudioPolicyManager->releaseInput(portId);
     }
+    return Status::ok();
 }
 
-status_t AudioPolicyService::initStreamVolume(audio_stream_type_t stream,
-                                            int indexMin,
-                                            int indexMax)
-{
+Status AudioPolicyService::initStreamVolume(media::AudioStreamType streamAidl,
+                                            int32_t indexMinAidl,
+                                            int32_t indexMaxAidl) {
+    audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
+    int indexMin = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int>(indexMinAidl));
+    int indexMax = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int>(indexMaxAidl));
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     if (!settingsAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
-        return BAD_VALUE;
+        return binderStatusFromStatusT(BAD_VALUE);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
     mAudioPolicyManager->initStreamVolume(stream, indexMin, indexMax);
-    return NO_ERROR;
+    return binderStatusFromStatusT(NO_ERROR);
 }
 
-status_t AudioPolicyService::setStreamVolumeIndex(audio_stream_type_t stream,
-                                                  int index,
-                                                  audio_devices_t device)
-{
+Status AudioPolicyService::setStreamVolumeIndex(media::AudioStreamType streamAidl,
+                                                int32_t deviceAidl, int32_t indexAidl) {
+    audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
+    int index = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int>(indexAidl));
+    audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_devices_t(deviceAidl));
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     if (!settingsAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
-        return BAD_VALUE;
+        return binderStatusFromStatusT(BAD_VALUE);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->setStreamVolumeIndex(stream,
-                                                    index,
-                                                    device);
+    return binderStatusFromStatusT(mAudioPolicyManager->setStreamVolumeIndex(stream,
+                                                                             index,
+                                                                             device));
 }
 
-status_t AudioPolicyService::getStreamVolumeIndex(audio_stream_type_t stream,
-                                                  int *index,
-                                                  audio_devices_t device)
-{
+Status AudioPolicyService::getStreamVolumeIndex(media::AudioStreamType streamAidl,
+                                                int32_t deviceAidl, int32_t* _aidl_return) {
+    audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
+    audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_devices_t(deviceAidl));
+    int index;
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
-        return BAD_VALUE;
+        return binderStatusFromStatusT(BAD_VALUE);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->getStreamVolumeIndex(stream,
-                                                    index,
-                                                    device);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->getStreamVolumeIndex(stream, &index, device)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int32_t>(index));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::setVolumeIndexForAttributes(const audio_attributes_t &attributes,
-                                                         int index, audio_devices_t device)
-{
+Status AudioPolicyService::setVolumeIndexForAttributes(
+        const media::AudioAttributesInternal& attrAidl, int32_t deviceAidl, int32_t indexAidl) {
+    audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+    int index = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int>(indexAidl));
+    audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_devices_t(deviceAidl));
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            AudioValidator::validateAudioAttributes(attributes, "169572641")));
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     if (!settingsAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->setVolumeIndexForAttributes(attributes, index, device);
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->setVolumeIndexForAttributes(attributes, index, device));
 }
 
-status_t AudioPolicyService::getVolumeIndexForAttributes(const audio_attributes_t &attributes,
-                                                         int &index, audio_devices_t device)
-{
+Status AudioPolicyService::getVolumeIndexForAttributes(
+        const media::AudioAttributesInternal& attrAidl, int32_t deviceAidl, int32_t* _aidl_return) {
+    audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+    audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_devices_t(deviceAidl));
+    int index;
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            AudioValidator::validateAudioAttributes(attributes, "169572641")));
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->getVolumeIndexForAttributes(attributes, index, device);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->getVolumeIndexForAttributes(attributes, index, device)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int32_t>(index));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::getMinVolumeIndexForAttributes(const audio_attributes_t &attributes,
-                                                            int &index)
-{
+Status AudioPolicyService::getMinVolumeIndexForAttributes(
+        const media::AudioAttributesInternal& attrAidl, int32_t* _aidl_return) {
+    audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+    int index;
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            AudioValidator::validateAudioAttributes(attributes, "169572641")));
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->getMinVolumeIndexForAttributes(attributes, index);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->getMinVolumeIndexForAttributes(attributes, index)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int32_t>(index));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::getMaxVolumeIndexForAttributes(const audio_attributes_t &attributes,
-                                                            int &index)
-{
+Status AudioPolicyService::getMaxVolumeIndexForAttributes(
+        const media::AudioAttributesInternal& attrAidl, int32_t* _aidl_return) {
+    audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+    int index;
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            AudioValidator::validateAudioAttributes(attributes, "169572641")));
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->getMaxVolumeIndexForAttributes(attributes, index);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->getMaxVolumeIndexForAttributes(attributes, index)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int32_t>(index));
+    return Status::ok();
 }
 
-uint32_t AudioPolicyService::getStrategyForStream(audio_stream_type_t stream)
-{
+Status AudioPolicyService::getStrategyForStream(media::AudioStreamType streamAidl,
+                                                int32_t* _aidl_return) {
+    audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
+
     if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
-        return PRODUCT_STRATEGY_NONE;
+        *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+                convertReinterpret<int32_t>(PRODUCT_STRATEGY_NONE));
+        return Status::ok();
     }
     if (mAudioPolicyManager == NULL) {
-        return PRODUCT_STRATEGY_NONE;
+        return binderStatusFromStatusT(NO_INIT);
     }
+
     // DO NOT LOCK, may be called from AudioFlinger with lock held, reaching deadlock
     AutoCallerClear acc;
-    return mAudioPolicyManager->getStrategyForStream(stream);
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_product_strategy_t_int32_t(
+                    mAudioPolicyManager->getStrategyForStream(stream)));
+    return Status::ok();
 }
 
 //audio policy: use audio_device_t appropriately
 
-audio_devices_t AudioPolicyService::getDevicesForStream(audio_stream_type_t stream)
-{
+Status AudioPolicyService::getDevicesForStream(media::AudioStreamType streamAidl,
+                                               int32_t* _aidl_return) {
+    audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
+
     if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
-        return AUDIO_DEVICE_NONE;
+        *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+                legacy2aidl_audio_devices_t_int32_t(AUDIO_DEVICE_NONE));
+        return Status::ok();
     }
     if (mAudioPolicyManager == NULL) {
-        return AUDIO_DEVICE_NONE;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->getDevicesForStream(stream);
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_audio_devices_t_int32_t(mAudioPolicyManager->getDevicesForStream(stream)));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::getDevicesForAttributes(const AudioAttributes &aa,
-                                                     AudioDeviceTypeAddrVector *devices) const
+Status AudioPolicyService::getDevicesForAttributes(const media::AudioAttributesEx& attrAidl,
+                                                   std::vector<media::AudioDevice>* _aidl_return)
 {
+    AudioAttributes aa = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesEx_AudioAttributes(attrAidl));
+    AudioDeviceTypeAddrVector devices;
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->getDevicesForAttributes(aa.getAttributes(), devices);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->getDevicesForAttributes(aa.getAttributes(), &devices)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            convertContainer<std::vector<media::AudioDevice>>(devices,
+                                                              legacy2aidl_AudioDeviceTypeAddress));
+    return Status::ok();
 }
 
-audio_io_handle_t AudioPolicyService::getOutputForEffect(const effect_descriptor_t *desc)
-{
-    // FIXME change return type to status_t, and return NO_INIT here
+Status AudioPolicyService::getOutputForEffect(const media::EffectDescriptor& descAidl,
+                                              int32_t* _aidl_return) {
+    effect_descriptor_t desc = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_EffectDescriptor_effect_descriptor_t(descAidl));
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            AudioValidator::validateEffectDescriptor(desc, "73126106")));
+
     if (mAudioPolicyManager == NULL) {
-        return 0;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->getOutputForEffect(desc);
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_audio_io_handle_t_int32_t(mAudioPolicyManager->getOutputForEffect(&desc)));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::registerEffect(const effect_descriptor_t *desc,
-                                audio_io_handle_t io,
-                                uint32_t strategy,
-                                audio_session_t session,
-                                int id)
-{
+Status AudioPolicyService::registerEffect(const media::EffectDescriptor& descAidl, int32_t ioAidl,
+                                          int32_t strategyAidl, int32_t sessionAidl,
+                                          int32_t idAidl) {
+    effect_descriptor_t desc = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_EffectDescriptor_effect_descriptor_t(descAidl));
+    audio_io_handle_t io = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_io_handle_t(ioAidl));
+    product_strategy_t strategy = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_product_strategy_t(strategyAidl));
+    audio_session_t session = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_session_t(sessionAidl));
+    int id = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(idAidl));
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            AudioValidator::validateEffectDescriptor(desc, "73126106")));
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->registerEffect(desc, io, strategy, session, id);
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->registerEffect(&desc, io, strategy, session, id));
 }
 
-status_t AudioPolicyService::unregisterEffect(int id)
+Status AudioPolicyService::unregisterEffect(int32_t idAidl)
 {
+    int id = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(idAidl));
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->unregisterEffect(id);
+    return binderStatusFromStatusT(mAudioPolicyManager->unregisterEffect(id));
 }
 
-status_t AudioPolicyService::setEffectEnabled(int id, bool enabled)
+Status AudioPolicyService::setEffectEnabled(int32_t idAidl, bool enabled)
 {
+    int id = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(idAidl));
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->setEffectEnabled(id, enabled);
+    return binderStatusFromStatusT(mAudioPolicyManager->setEffectEnabled(id, enabled));
 }
 
-status_t AudioPolicyService::moveEffectsToIo(const std::vector<int>& ids, audio_io_handle_t io)
+Status AudioPolicyService::moveEffectsToIo(const std::vector<int32_t>& idsAidl, int32_t ioAidl)
+
 {
+    const std::vector<int>& ids = VALUE_OR_RETURN_BINDER_STATUS(
+            convertContainer<std::vector<int>>(idsAidl, convertReinterpret<int, int32_t>));
+    audio_io_handle_t io = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_io_handle_t(ioAidl));
+    if (ids.size() > MAX_ITEMS_PER_LIST) {
+        return binderStatusFromStatusT(BAD_VALUE);
+    }
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->moveEffectsToIo(ids, io);
+    return binderStatusFromStatusT(mAudioPolicyManager->moveEffectsToIo(ids, io));
 }
 
-bool AudioPolicyService::isStreamActive(audio_stream_type_t stream, uint32_t inPastMs) const
-{
+Status AudioPolicyService::isStreamActive(media::AudioStreamType streamAidl, int32_t inPastMsAidl,
+                                          bool* _aidl_return) {
+    audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
+    uint32_t inPastMs = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<uint32_t>(inPastMsAidl));
+
     if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
-        return false;
+        *_aidl_return = false;
+        return Status::ok();
     }
     if (mAudioPolicyManager == NULL) {
-        return false;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->isStreamActive(stream, inPastMs);
+    *_aidl_return = mAudioPolicyManager->isStreamActive(stream, inPastMs);
+    return Status::ok();
 }
 
-bool AudioPolicyService::isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs) const
-{
+Status AudioPolicyService::isStreamActiveRemotely(media::AudioStreamType streamAidl,
+                                                  int32_t inPastMsAidl,
+                                                  bool* _aidl_return) {
+    audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
+    uint32_t inPastMs = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<uint32_t>(inPastMsAidl));
+
     if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
-        return false;
+        *_aidl_return = false;
+        return Status::ok();
     }
     if (mAudioPolicyManager == NULL) {
-        return false;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->isStreamActiveRemotely(stream, inPastMs);
+    *_aidl_return = mAudioPolicyManager->isStreamActiveRemotely(stream, inPastMs);
+    return Status::ok();
 }
 
-bool AudioPolicyService::isSourceActive(audio_source_t source) const
-{
+Status AudioPolicyService::isSourceActive(media::AudioSourceType sourceAidl, bool* _aidl_return) {
+    audio_source_t source = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioSourceType_audio_source_t(sourceAidl));
     if (mAudioPolicyManager == NULL) {
-        return false;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->isSourceActive(source);
+    *_aidl_return = mAudioPolicyManager->isSourceActive(source);
+    return Status::ok();
 }
 
 status_t AudioPolicyService::getAudioPolicyEffects(sp<AudioPolicyEffects>& audioPolicyEffects)
@@ -982,246 +1264,378 @@
     return OK;
 }
 
-status_t AudioPolicyService::queryDefaultPreProcessing(audio_session_t audioSession,
-                                                       effect_descriptor_t *descriptors,
-                                                       uint32_t *count)
-{
-    sp<AudioPolicyEffects>audioPolicyEffects;
-    status_t status = getAudioPolicyEffects(audioPolicyEffects);
-    if (status != OK) {
-        *count = 0;
-        return status;
+Status AudioPolicyService::queryDefaultPreProcessing(
+        int32_t audioSessionAidl,
+        media::Int* countAidl,
+        std::vector<media::EffectDescriptor>* _aidl_return) {
+    audio_session_t audioSession = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_session_t(audioSessionAidl));
+    uint32_t count = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<uint32_t>(countAidl->value));
+    if (count > AudioEffect::kMaxPreProcessing) {
+        count = AudioEffect::kMaxPreProcessing;
     }
-    return audioPolicyEffects->queryDefaultInputEffects(
-            (audio_session_t)audioSession, descriptors, count);
+    uint32_t countReq = count;
+    std::unique_ptr<effect_descriptor_t[]> descriptors(new effect_descriptor_t[count]);
+
+    sp<AudioPolicyEffects> audioPolicyEffects;
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(getAudioPolicyEffects(audioPolicyEffects)));
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(audioPolicyEffects->queryDefaultInputEffects(
+            (audio_session_t) audioSession, descriptors.get(), &count)));
+    countReq = std::min(count, countReq);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            convertRange(descriptors.get(), descriptors.get() + countReq,
+                         std::back_inserter(*_aidl_return),
+                         legacy2aidl_effect_descriptor_t_EffectDescriptor)));
+    countAidl->value = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<uint32_t>(count));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::addSourceDefaultEffect(const effect_uuid_t *type,
-                                                    const String16& opPackageName,
-                                                    const effect_uuid_t *uuid,
-                                                    int32_t priority,
-                                                    audio_source_t source,
-                                                    audio_unique_id_t* id)
-{
+Status AudioPolicyService::addSourceDefaultEffect(const media::AudioUuid& typeAidl,
+                                                  const std::string& opPackageNameAidl,
+                                                  const media::AudioUuid& uuidAidl,
+                                                  int32_t priority,
+                                                  media::AudioSourceType sourceAidl,
+                                                  int32_t* _aidl_return) {
+    effect_uuid_t type = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioUuid_audio_uuid_t(typeAidl));
+    String16 opPackageName = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_string_view_String16(opPackageNameAidl));
+    effect_uuid_t uuid = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioUuid_audio_uuid_t(uuidAidl));
+    audio_source_t source = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioSourceType_audio_source_t(sourceAidl));
+    audio_unique_id_t id;
+
     sp<AudioPolicyEffects>audioPolicyEffects;
-    status_t status = getAudioPolicyEffects(audioPolicyEffects);
-    if (status != OK) {
-        return status;
-    }
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(getAudioPolicyEffects(audioPolicyEffects)));
     if (!modifyDefaultAudioEffectsAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
-    return audioPolicyEffects->addSourceDefaultEffect(
-            type, opPackageName, uuid, priority, source, id);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(audioPolicyEffects->addSourceDefaultEffect(
+            &type, opPackageName, &uuid, priority, source, &id)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_unique_id_t_int32_t(id));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::addStreamDefaultEffect(const effect_uuid_t *type,
-                                                    const String16& opPackageName,
-                                                    const effect_uuid_t *uuid,
-                                                    int32_t priority,
-                                                    audio_usage_t usage,
-                                                    audio_unique_id_t* id)
-{
-    sp<AudioPolicyEffects>audioPolicyEffects;
-    status_t status = getAudioPolicyEffects(audioPolicyEffects);
-    if (status != OK) {
-        return status;
-    }
+Status AudioPolicyService::addStreamDefaultEffect(const media::AudioUuid& typeAidl,
+                                                  const std::string& opPackageNameAidl,
+                                                  const media::AudioUuid& uuidAidl,
+                                                  int32_t priority, media::AudioUsage usageAidl,
+                                                  int32_t* _aidl_return) {
+    effect_uuid_t type = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioUuid_audio_uuid_t(typeAidl));
+    String16 opPackageName = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_string_view_String16(opPackageNameAidl));
+    effect_uuid_t uuid = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioUuid_audio_uuid_t(uuidAidl));
+    audio_usage_t usage = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioUsage_audio_usage_t(usageAidl));
+    audio_unique_id_t id;
+
+    sp<AudioPolicyEffects> audioPolicyEffects;
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(getAudioPolicyEffects(audioPolicyEffects)));
     if (!modifyDefaultAudioEffectsAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
-    return audioPolicyEffects->addStreamDefaultEffect(
-            type, opPackageName, uuid, priority, usage, id);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(audioPolicyEffects->addStreamDefaultEffect(
+            &type, opPackageName, &uuid, priority, usage, &id)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_unique_id_t_int32_t(id));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::removeSourceDefaultEffect(audio_unique_id_t id)
+Status AudioPolicyService::removeSourceDefaultEffect(int32_t idAidl)
 {
+    audio_unique_id_t id = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_unique_id_t(idAidl));
     sp<AudioPolicyEffects>audioPolicyEffects;
-    status_t status = getAudioPolicyEffects(audioPolicyEffects);
-    if (status != OK) {
-        return status;
-    }
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(getAudioPolicyEffects(audioPolicyEffects)));
     if (!modifyDefaultAudioEffectsAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
-    return audioPolicyEffects->removeSourceDefaultEffect(id);
+    return binderStatusFromStatusT(audioPolicyEffects->removeSourceDefaultEffect(id));
 }
 
-status_t AudioPolicyService::removeStreamDefaultEffect(audio_unique_id_t id)
+Status AudioPolicyService::removeStreamDefaultEffect(int32_t idAidl)
 {
+    audio_unique_id_t id = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_unique_id_t(idAidl));
     sp<AudioPolicyEffects>audioPolicyEffects;
-    status_t status = getAudioPolicyEffects(audioPolicyEffects);
-    if (status != OK) {
-        return status;
-    }
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(getAudioPolicyEffects(audioPolicyEffects)));
     if (!modifyDefaultAudioEffectsAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
-    return audioPolicyEffects->removeStreamDefaultEffect(id);
+    return binderStatusFromStatusT(audioPolicyEffects->removeStreamDefaultEffect(id));
 }
 
-status_t AudioPolicyService::setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) {
+Status AudioPolicyService::setSupportedSystemUsages(
+        const std::vector<media::AudioUsage>& systemUsagesAidl) {
+    size_t size = systemUsagesAidl.size();
+    if (size > MAX_ITEMS_PER_LIST) {
+        size = MAX_ITEMS_PER_LIST;
+    }
+    std::vector<audio_usage_t> systemUsages;
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            convertRange(systemUsagesAidl.begin(), systemUsagesAidl.begin() + size,
+                         std::back_inserter(systemUsages), aidl2legacy_AudioUsage_audio_usage_t)));
+
     Mutex::Autolock _l(mLock);
     if(!modifyAudioRoutingAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
     bool areAllSystemUsages = std::all_of(begin(systemUsages), end(systemUsages),
         [](audio_usage_t usage) { return isSystemUsage(usage); });
     if (!areAllSystemUsages) {
-        return BAD_VALUE;
+        return binderStatusFromStatusT(BAD_VALUE);
     }
 
     mSupportedSystemUsages = systemUsages;
-    return NO_ERROR;
+    return Status::ok();
 }
 
-status_t AudioPolicyService::setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t capturePolicy) {
+Status AudioPolicyService::setAllowedCapturePolicy(int32_t uidAidl, int32_t capturePolicyAidl) {
+    uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
+    audio_flags_mask_t capturePolicy = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_flags_mask_t_mask(capturePolicyAidl));
+
     Mutex::Autolock _l(mLock);
     if (mAudioPolicyManager == NULL) {
         ALOGV("%s() mAudioPolicyManager == NULL", __func__);
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
-    return mAudioPolicyManager->setAllowedCapturePolicy(uid, capturePolicy);
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->setAllowedCapturePolicy(uid, capturePolicy));
 }
 
-bool AudioPolicyService::isOffloadSupported(const audio_offload_info_t& info)
-{
+Status AudioPolicyService::getOffloadSupport(const media::AudioOffloadInfo& infoAidl,
+                                             media::AudioOffloadMode* _aidl_return) {
+    audio_offload_info_t info = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioOffloadInfo_audio_offload_info_t(infoAidl));
     if (mAudioPolicyManager == NULL) {
         ALOGV("mAudioPolicyManager == NULL");
-        return false;
+        return binderStatusFromStatusT(AUDIO_OFFLOAD_NOT_SUPPORTED);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->isOffloadSupported(info);
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_offload_mode_t_AudioOffloadMode(
+            mAudioPolicyManager->getOffloadSupport(info)));
+    return Status::ok();
 }
 
-bool AudioPolicyService::isDirectOutputSupported(const audio_config_base_t& config,
-                                                 const audio_attributes_t& attributes) {
+Status AudioPolicyService::isDirectOutputSupported(
+        const media::AudioConfigBase& configAidl,
+        const media::AudioAttributesInternal& attributesAidl,
+        bool* _aidl_return) {
+    audio_config_base_t config = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioConfigBase_audio_config_base_t(configAidl));
+    audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attributesAidl));
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            AudioValidator::validateAudioAttributes(attributes, "169572641")));
+
     if (mAudioPolicyManager == NULL) {
         ALOGV("mAudioPolicyManager == NULL");
-        return false;
+        return binderStatusFromStatusT(NO_INIT);
     }
 
-    status_t result = validateUsage(attributes.usage);
-    if (result != NO_ERROR) {
-        return result;
-    }
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attributes.usage)));
 
     Mutex::Autolock _l(mLock);
-    return mAudioPolicyManager->isDirectOutputSupported(config, attributes);
+    *_aidl_return = mAudioPolicyManager->isDirectOutputSupported(config, attributes);
+    return Status::ok();
 }
 
 
-status_t AudioPolicyService::listAudioPorts(audio_port_role_t role,
-                                            audio_port_type_t type,
-                                            unsigned int *num_ports,
-                                            struct audio_port *ports,
-                                            unsigned int *generation)
-{
+Status AudioPolicyService::listAudioPorts(media::AudioPortRole roleAidl,
+                                          media::AudioPortType typeAidl, media::Int* count,
+                                          std::vector<media::AudioPort>* portsAidl,
+                                          int32_t* _aidl_return) {
+    audio_port_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioPortRole_audio_port_role_t(roleAidl));
+    audio_port_type_t type = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioPortType_audio_port_type_t(typeAidl));
+    unsigned int num_ports = VALUE_OR_RETURN_BINDER_STATUS(
+            convertIntegral<unsigned int>(count->value));
+    if (num_ports > MAX_ITEMS_PER_LIST) {
+        num_ports = MAX_ITEMS_PER_LIST;
+    }
+    unsigned int numPortsReq = num_ports;
+    std::unique_ptr<audio_port_v7[]> ports(new audio_port_v7[num_ports]);
+    unsigned int generation;
+
     Mutex::Autolock _l(mLock);
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     AutoCallerClear acc;
-    return mAudioPolicyManager->listAudioPorts(role, type, num_ports, ports, generation);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->listAudioPorts(role, type, &num_ports, ports.get(), &generation)));
+    numPortsReq = std::min(numPortsReq, num_ports);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            convertRange(ports.get(), ports.get() + numPortsReq, std::back_inserter(*portsAidl),
+                         legacy2aidl_audio_port_v7_AudioPort)));
+    count->value = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int32_t>(num_ports));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int32_t>(generation));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::getAudioPort(struct audio_port *port)
-{
+Status AudioPolicyService::getAudioPort(const media::AudioPort& portAidl,
+                                        media::AudioPort* _aidl_return) {
+    audio_port_v7 port = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioPort_audio_port_v7(portAidl));
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(AudioValidator::validateAudioPort(port)));
+
     Mutex::Autolock _l(mLock);
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     AutoCallerClear acc;
-    return mAudioPolicyManager->getAudioPort(port);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(mAudioPolicyManager->getAudioPort(&port)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_port_v7_AudioPort(port));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::createAudioPatch(const struct audio_patch *patch,
-        audio_patch_handle_t *handle)
-{
+Status AudioPolicyService::createAudioPatch(const media::AudioPatch& patchAidl, int32_t handleAidl,
+                                            int32_t* _aidl_return) {
+    audio_patch patch = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioPatch_audio_patch(patchAidl));
+    audio_patch_handle_t handle = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(handleAidl));
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(AudioValidator::validateAudioPatch(patch)));
+
     Mutex::Autolock _l(mLock);
     if(!modifyAudioRoutingAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     AutoCallerClear acc;
-    return mAudioPolicyManager->createAudioPatch(patch, handle,
-                                                  IPCThreadState::self()->getCallingUid());
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->createAudioPatch(&patch, &handle,
+                                                  IPCThreadState::self()->getCallingUid())));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(handle));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::releaseAudioPatch(audio_patch_handle_t handle)
+Status AudioPolicyService::releaseAudioPatch(int32_t handleAidl)
 {
+    audio_patch_handle_t handle = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_patch_handle_t(handleAidl));
     Mutex::Autolock _l(mLock);
     if(!modifyAudioRoutingAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     AutoCallerClear acc;
-    return mAudioPolicyManager->releaseAudioPatch(handle,
-                                                     IPCThreadState::self()->getCallingUid());
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->releaseAudioPatch(handle,
+                                                   IPCThreadState::self()->getCallingUid()));
 }
 
-status_t AudioPolicyService::listAudioPatches(unsigned int *num_patches,
-        struct audio_patch *patches,
-        unsigned int *generation)
-{
+Status AudioPolicyService::listAudioPatches(media::Int* count,
+                                            std::vector<media::AudioPatch>* patchesAidl,
+                                            int32_t* _aidl_return) {
+    unsigned int num_patches = VALUE_OR_RETURN_BINDER_STATUS(
+            convertIntegral<unsigned int>(count->value));
+    if (num_patches > MAX_ITEMS_PER_LIST) {
+        num_patches = MAX_ITEMS_PER_LIST;
+    }
+    unsigned int numPatchesReq = num_patches;
+    std::unique_ptr<audio_patch[]> patches(new audio_patch[num_patches]);
+    unsigned int generation;
+
     Mutex::Autolock _l(mLock);
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     AutoCallerClear acc;
-    return mAudioPolicyManager->listAudioPatches(num_patches, patches, generation);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->listAudioPatches(&num_patches, patches.get(), &generation)));
+    numPatchesReq = std::min(numPatchesReq, num_patches);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            convertRange(patches.get(), patches.get() + numPatchesReq,
+                         std::back_inserter(*patchesAidl), legacy2aidl_audio_patch_AudioPatch)));
+    count->value = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int32_t>(num_patches));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int32_t>(generation));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::setAudioPortConfig(const struct audio_port_config *config)
+Status AudioPolicyService::setAudioPortConfig(const media::AudioPortConfig& configAidl)
 {
+    audio_port_config config = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioPortConfig_audio_port_config(configAidl));
+    RETURN_IF_BINDER_ERROR(
+            binderStatusFromStatusT(AudioValidator::validateAudioPortConfig(config)));
+
     Mutex::Autolock _l(mLock);
     if(!modifyAudioRoutingAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     AutoCallerClear acc;
-    return mAudioPolicyManager->setAudioPortConfig(config);
+    return binderStatusFromStatusT(mAudioPolicyManager->setAudioPortConfig(&config));
 }
 
-status_t AudioPolicyService::acquireSoundTriggerSession(audio_session_t *session,
-                                       audio_io_handle_t *ioHandle,
-                                       audio_devices_t *device)
+Status AudioPolicyService::acquireSoundTriggerSession(media::SoundTriggerSession* _aidl_return)
 {
+    audio_session_t session;
+    audio_io_handle_t ioHandle;
+    audio_devices_t device;
+
+    {
+        Mutex::Autolock _l(mLock);
+        if (mAudioPolicyManager == NULL) {
+            return binderStatusFromStatusT(NO_INIT);
+        }
+        AutoCallerClear acc;
+        RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+                mAudioPolicyManager->acquireSoundTriggerSession(&session, &ioHandle, &device)));
+    }
+
+    _aidl_return->session = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_audio_session_t_int32_t(session));
+    _aidl_return->ioHandle = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_audio_io_handle_t_int32_t(ioHandle));
+    _aidl_return->device = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_audio_devices_t_int32_t(device));
+    return Status::ok();
+}
+
+Status AudioPolicyService::releaseSoundTriggerSession(int32_t sessionAidl)
+{
+    audio_session_t session = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_session_t(sessionAidl));
     Mutex::Autolock _l(mLock);
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     AutoCallerClear acc;
-    return mAudioPolicyManager->acquireSoundTriggerSession(session, ioHandle, device);
+    return binderStatusFromStatusT(mAudioPolicyManager->releaseSoundTriggerSession(session));
 }
 
-status_t AudioPolicyService::releaseSoundTriggerSession(audio_session_t session)
-{
-    Mutex::Autolock _l(mLock);
-    if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+Status AudioPolicyService::registerPolicyMixes(const std::vector<media::AudioMix>& mixesAidl,
+                                               bool registration) {
+    size_t size = mixesAidl.size();
+    if (size > MAX_MIXES_PER_POLICY) {
+        size = MAX_MIXES_PER_POLICY;
     }
-    AutoCallerClear acc;
-    return mAudioPolicyManager->releaseSoundTriggerSession(session);
-}
+    Vector<AudioMix> mixes;
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            convertRange(mixesAidl.begin(), mixesAidl.begin() + size, std::back_inserter(mixes),
+                         aidl2legacy_AudioMix)));
 
-status_t AudioPolicyService::registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration)
-{
     Mutex::Autolock _l(mLock);
 
     // loopback|render only need a MediaProjection (checked in caller AudioService.java)
     bool needModifyAudioRouting = std::any_of(mixes.begin(), mixes.end(), [](auto& mix) {
             return !is_mix_loopback_render(mix.mRouteFlags); });
     if (needModifyAudioRouting && !modifyAudioRoutingAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
     // If one of the mixes has needCaptureVoiceCommunicationOutput set to true, then we
@@ -1231,304 +1645,556 @@
             return mix.mVoiceCommunicationCaptureAllowed; });
 
     bool needCaptureMediaOutput = std::any_of(mixes.begin(), mixes.end(), [](auto& mix) {
-            return mix.mAllowPrivilegedPlaybackCapture; });
+            return mix.mAllowPrivilegedMediaPlaybackCapture; });
 
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+    const AttributionSourceState attributionSource = getCallingAttributionSource();
 
-    if (needCaptureMediaOutput && !captureMediaOutputAllowed(callingPid, callingUid)) {
-        return PERMISSION_DENIED;
+
+    if (needCaptureMediaOutput && !captureMediaOutputAllowed(attributionSource)) {
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
     if (needCaptureVoiceCommunicationOutput &&
-        !captureVoiceCommunicationOutputAllowed(callingPid, callingUid)) {
-        return PERMISSION_DENIED;
+        !captureVoiceCommunicationOutputAllowed(attributionSource)) {
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     AutoCallerClear acc;
     if (registration) {
-        return mAudioPolicyManager->registerPolicyMixes(mixes);
+        return binderStatusFromStatusT(mAudioPolicyManager->registerPolicyMixes(mixes));
     } else {
-        return mAudioPolicyManager->unregisterPolicyMixes(mixes);
+        return binderStatusFromStatusT(mAudioPolicyManager->unregisterPolicyMixes(mixes));
     }
 }
 
-status_t AudioPolicyService::setUidDeviceAffinities(uid_t uid,
-        const Vector<AudioDeviceTypeAddr>& devices) {
+Status AudioPolicyService::setUidDeviceAffinities(
+        int32_t uidAidl,
+        const std::vector<media::AudioDevice>& devicesAidl) {
+    uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
+    AudioDeviceTypeAddrVector devices = VALUE_OR_RETURN_BINDER_STATUS(
+            convertContainer<AudioDeviceTypeAddrVector>(devicesAidl,
+                                                        aidl2legacy_AudioDeviceTypeAddress));
+
     Mutex::Autolock _l(mLock);
     if(!modifyAudioRoutingAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     AutoCallerClear acc;
-    return mAudioPolicyManager->setUidDeviceAffinities(uid, devices);
+    return binderStatusFromStatusT(mAudioPolicyManager->setUidDeviceAffinities(uid, devices));
 }
 
-status_t AudioPolicyService::removeUidDeviceAffinities(uid_t uid) {
+Status AudioPolicyService::removeUidDeviceAffinities(int32_t uidAidl) {
+    uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
+
     Mutex::Autolock _l(mLock);
     if(!modifyAudioRoutingAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     AutoCallerClear acc;
-    return mAudioPolicyManager->removeUidDeviceAffinities(uid);
+    return binderStatusFromStatusT(mAudioPolicyManager->removeUidDeviceAffinities(uid));
 }
 
-status_t AudioPolicyService::setUserIdDeviceAffinities(int userId,
-        const Vector<AudioDeviceTypeAddr>& devices) {
+Status AudioPolicyService::setUserIdDeviceAffinities(
+        int32_t userIdAidl,
+        const std::vector<media::AudioDevice>& devicesAidl) {
+    int userId = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(userIdAidl));
+    AudioDeviceTypeAddrVector devices = VALUE_OR_RETURN_BINDER_STATUS(
+            convertContainer<AudioDeviceTypeAddrVector>(devicesAidl,
+                                                        aidl2legacy_AudioDeviceTypeAddress));
+
     Mutex::Autolock _l(mLock);
     if(!modifyAudioRoutingAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     AutoCallerClear acc;
-    return mAudioPolicyManager->setUserIdDeviceAffinities(userId, devices);
+    return binderStatusFromStatusT(mAudioPolicyManager->setUserIdDeviceAffinities(userId, devices));
 }
 
-status_t AudioPolicyService::removeUserIdDeviceAffinities(int userId) {
+Status AudioPolicyService::removeUserIdDeviceAffinities(int32_t userIdAidl) {
+    int userId = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(userIdAidl));
+
     Mutex::Autolock _l(mLock);
     if(!modifyAudioRoutingAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     AutoCallerClear acc;
-    return mAudioPolicyManager->removeUserIdDeviceAffinities(userId);
+    return binderStatusFromStatusT(mAudioPolicyManager->removeUserIdDeviceAffinities(userId));
 }
 
-status_t AudioPolicyService::startAudioSource(const struct audio_port_config *source,
-                                              const audio_attributes_t *attributes,
-                                              audio_port_handle_t *portId)
-{
+Status AudioPolicyService::startAudioSource(const media::AudioPortConfig& sourceAidl,
+                                            const media::AudioAttributesInternal& attributesAidl,
+                                            int32_t* _aidl_return) {
+    audio_port_config source = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioPortConfig_audio_port_config(sourceAidl));
+    audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attributesAidl));
+    audio_port_handle_t portId;
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            AudioValidator::validateAudioPortConfig(source)));
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            AudioValidator::validateAudioAttributes(attributes, "68953950")));
+
     Mutex::Autolock _l(mLock);
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
 
-    status_t result = validateUsage(attributes->usage);
-    if (result != NO_ERROR) {
-        return result;
-    }
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attributes.usage)));
 
     // startAudioSource should be created as the calling uid
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
     AutoCallerClear acc;
-    return mAudioPolicyManager->startAudioSource(source, attributes, portId, callingUid);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->startAudioSource(&source, &attributes, &portId, callingUid)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::stopAudioSource(audio_port_handle_t portId)
+Status AudioPolicyService::stopAudioSource(int32_t portIdAidl)
 {
+    audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
+
     Mutex::Autolock _l(mLock);
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     AutoCallerClear acc;
-    return mAudioPolicyManager->stopAudioSource(portId);
+    return binderStatusFromStatusT(mAudioPolicyManager->stopAudioSource(portId));
 }
 
-status_t AudioPolicyService::setMasterMono(bool mono)
+Status AudioPolicyService::setMasterMono(bool mono)
 {
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     if (!settingsAllowed()) {
-        return PERMISSION_DENIED;
+        return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->setMasterMono(mono);
+    return binderStatusFromStatusT(mAudioPolicyManager->setMasterMono(mono));
 }
 
-status_t AudioPolicyService::getMasterMono(bool *mono)
+Status AudioPolicyService::getMasterMono(bool* _aidl_return)
 {
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->getMasterMono(mono);
+    return binderStatusFromStatusT(mAudioPolicyManager->getMasterMono(_aidl_return));
 }
 
 
-float AudioPolicyService::getStreamVolumeDB(
-            audio_stream_type_t stream, int index, audio_devices_t device)
-{
+Status AudioPolicyService::getStreamVolumeDB(media::AudioStreamType streamAidl, int32_t indexAidl,
+                                             int32_t deviceAidl, float* _aidl_return) {
+    audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
+    int index = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int>(indexAidl));
+    audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_devices_t(deviceAidl));
+
     if (mAudioPolicyManager == NULL) {
-        return NAN;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->getStreamVolumeDB(stream, index, device);
+    *_aidl_return = mAudioPolicyManager->getStreamVolumeDB(stream, index, device);
+    return Status::ok();
 }
 
-status_t AudioPolicyService::getSurroundFormats(unsigned int *numSurroundFormats,
-                                                audio_format_t *surroundFormats,
-                                                bool *surroundFormatsEnabled,
-                                                bool reported)
-{
+Status AudioPolicyService::getSurroundFormats(media::Int* count,
+        std::vector<media::audio::common::AudioFormat>* formats,
+        std::vector<bool>* formatsEnabled) {
+    unsigned int numSurroundFormats = VALUE_OR_RETURN_BINDER_STATUS(
+            convertIntegral<unsigned int>(count->value));
+    if (numSurroundFormats > MAX_ITEMS_PER_LIST) {
+        numSurroundFormats = MAX_ITEMS_PER_LIST;
+    }
+    unsigned int numSurroundFormatsReq = numSurroundFormats;
+    std::unique_ptr<audio_format_t[]>surroundFormats(new audio_format_t[numSurroundFormats]);
+    std::unique_ptr<bool[]>surroundFormatsEnabled(new bool[numSurroundFormats]);
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->getSurroundFormats(numSurroundFormats, surroundFormats,
-                                                   surroundFormatsEnabled, reported);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->getSurroundFormats(&numSurroundFormats, surroundFormats.get(),
+                                                    surroundFormatsEnabled.get())));
+    numSurroundFormatsReq = std::min(numSurroundFormats, numSurroundFormatsReq);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            convertRange(surroundFormats.get(), surroundFormats.get() + numSurroundFormatsReq,
+                         std::back_inserter(*formats), legacy2aidl_audio_format_t_AudioFormat)));
+    formatsEnabled->insert(
+            formatsEnabled->begin(),
+            surroundFormatsEnabled.get(),
+            surroundFormatsEnabled.get() + numSurroundFormatsReq);
+    count->value = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<uint32_t>(numSurroundFormats));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::getHwOffloadEncodingFormatsSupportedForA2DP(
-                                        std::vector<audio_format_t> *formats)
-{
+Status AudioPolicyService::getReportedSurroundFormats(
+        media::Int* count, std::vector<media::audio::common::AudioFormat>* formats) {
+    unsigned int numSurroundFormats = VALUE_OR_RETURN_BINDER_STATUS(
+            convertIntegral<unsigned int>(count->value));
+    if (numSurroundFormats > MAX_ITEMS_PER_LIST) {
+        numSurroundFormats = MAX_ITEMS_PER_LIST;
+    }
+    unsigned int numSurroundFormatsReq = numSurroundFormats;
+    std::unique_ptr<audio_format_t[]>surroundFormats(new audio_format_t[numSurroundFormats]);
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->getHwOffloadEncodingFormatsSupportedForA2DP(formats);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->getReportedSurroundFormats(
+                    &numSurroundFormats, surroundFormats.get())));
+    numSurroundFormatsReq = std::min(numSurroundFormats, numSurroundFormatsReq);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            convertRange(surroundFormats.get(), surroundFormats.get() + numSurroundFormatsReq,
+                         std::back_inserter(*formats), legacy2aidl_audio_format_t_AudioFormat)));
+    count->value = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<uint32_t>(numSurroundFormats));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled)
-{
+Status AudioPolicyService::getHwOffloadEncodingFormatsSupportedForA2DP(
+        std::vector<media::audio::common::AudioFormat>* _aidl_return) {
+    std::vector<audio_format_t> formats;
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->setSurroundFormatEnabled(audioFormat, enabled);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->getHwOffloadEncodingFormatsSupportedForA2DP(&formats)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            convertContainer<std::vector<media::audio::common::AudioFormat>>(
+                    formats,
+                    legacy2aidl_audio_format_t_AudioFormat));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::setAssistantUid(uid_t uid)
+Status AudioPolicyService::setSurroundFormatEnabled(
+        media::audio::common::AudioFormat audioFormatAidl, bool enabled) {
+    audio_format_t audioFormat = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioFormat_audio_format_t(audioFormatAidl));
+    if (mAudioPolicyManager == NULL) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+    Mutex::Autolock _l(mLock);
+    AutoCallerClear acc;
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->setSurroundFormatEnabled(audioFormat, enabled));
+}
+
+Status AudioPolicyService::setAssistantUid(int32_t uidAidl)
 {
+    uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
     Mutex::Autolock _l(mLock);
     mUidPolicy->setAssistantUid(uid);
-    return NO_ERROR;
+    return Status::ok();
 }
 
-status_t AudioPolicyService::setA11yServicesUids(const std::vector<uid_t>& uids)
+Status AudioPolicyService::setA11yServicesUids(const std::vector<int32_t>& uidsAidl)
 {
+    size_t size = uidsAidl.size();
+    if (size > MAX_ITEMS_PER_LIST) {
+        size = MAX_ITEMS_PER_LIST;
+    }
+    std::vector<uid_t> uids;
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            convertRange(uidsAidl.begin(),
+                         uidsAidl.begin() + size,
+                         std::back_inserter(uids),
+                         aidl2legacy_int32_t_uid_t)));
     Mutex::Autolock _l(mLock);
     mUidPolicy->setA11yUids(uids);
-    return NO_ERROR;
+    return Status::ok();
 }
 
-status_t AudioPolicyService::setCurrentImeUid(uid_t uid)
+Status AudioPolicyService::setCurrentImeUid(int32_t uidAidl)
 {
+    uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
     Mutex::Autolock _l(mLock);
     mUidPolicy->setCurrentImeUid(uid);
-    return NO_ERROR;
+    return Status::ok();
 }
 
-bool AudioPolicyService::isHapticPlaybackSupported()
+Status AudioPolicyService::isHapticPlaybackSupported(bool* _aidl_return)
 {
     if (mAudioPolicyManager == NULL) {
-        ALOGW("%s, mAudioPolicyManager == NULL", __func__);
-        return false;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->isHapticPlaybackSupported();
+    *_aidl_return = mAudioPolicyManager->isHapticPlaybackSupported();
+    return Status::ok();
 }
 
-status_t AudioPolicyService::listAudioProductStrategies(AudioProductStrategyVector &strategies)
-{
+Status AudioPolicyService::listAudioProductStrategies(
+        std::vector<media::AudioProductStrategy>* _aidl_return) {
+    AudioProductStrategyVector strategies;
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
-    return mAudioPolicyManager->listAudioProductStrategies(strategies);
+    RETURN_IF_BINDER_ERROR(
+            binderStatusFromStatusT(mAudioPolicyManager->listAudioProductStrategies(strategies)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            convertContainer<std::vector<media::AudioProductStrategy>>(
+                    strategies,
+                    legacy2aidl_AudioProductStrategy));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::getProductStrategyFromAudioAttributes(
-        const AudioAttributes &aa, product_strategy_t &productStrategy)
-{
+Status AudioPolicyService::getProductStrategyFromAudioAttributes(
+        const media::AudioAttributesEx& aaAidl, bool fallbackOnDefault, int32_t* _aidl_return) {
+    AudioAttributes aa = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesEx_AudioAttributes(aaAidl));
+    product_strategy_t productStrategy;
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
-    return mAudioPolicyManager->getProductStrategyFromAudioAttributes(aa, productStrategy);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->getProductStrategyFromAudioAttributes(
+                    aa, productStrategy, fallbackOnDefault)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_product_strategy_t_int32_t(productStrategy));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::listAudioVolumeGroups(AudioVolumeGroupVector &groups)
+Status AudioPolicyService::listAudioVolumeGroups(std::vector<media::AudioVolumeGroup>* _aidl_return)
 {
+    AudioVolumeGroupVector groups;
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
-    return mAudioPolicyManager->listAudioVolumeGroups(groups);
+    RETURN_IF_BINDER_ERROR(
+            binderStatusFromStatusT(mAudioPolicyManager->listAudioVolumeGroups(groups)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            convertContainer<std::vector<media::AudioVolumeGroup>>(groups,
+                                                                   legacy2aidl_AudioVolumeGroup));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::getVolumeGroupFromAudioAttributes(const AudioAttributes &aa,
-                                                               volume_group_t &volumeGroup)
-{
+Status AudioPolicyService::getVolumeGroupFromAudioAttributes(
+        const media::AudioAttributesEx& aaAidl, bool fallbackOnDefault, int32_t* _aidl_return) {
+    AudioAttributes aa = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesEx_AudioAttributes(aaAidl));
+    volume_group_t volumeGroup;
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
-    return mAudioPolicyManager->getVolumeGroupFromAudioAttributes(aa, volumeGroup);
+    RETURN_IF_BINDER_ERROR(
+            binderStatusFromStatusT(
+                    mAudioPolicyManager->getVolumeGroupFromAudioAttributes(
+                            aa, volumeGroup, fallbackOnDefault)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_volume_group_t_int32_t(volumeGroup));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::setRttEnabled(bool enabled)
+Status AudioPolicyService::setRttEnabled(bool enabled)
 {
     Mutex::Autolock _l(mLock);
     mUidPolicy->setRttEnabled(enabled);
-    return NO_ERROR;
+    return Status::ok();
 }
 
-bool AudioPolicyService::isCallScreenModeSupported()
+Status AudioPolicyService::isCallScreenModeSupported(bool* _aidl_return)
 {
     if (mAudioPolicyManager == NULL) {
-        ALOGW("%s, mAudioPolicyManager == NULL", __func__);
-        return false;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    return mAudioPolicyManager->isCallScreenModeSupported();
+    *_aidl_return = mAudioPolicyManager->isCallScreenModeSupported();
+    return Status::ok();
 }
 
-status_t AudioPolicyService::setPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   const AudioDeviceTypeAddr &device)
-{
+Status AudioPolicyService::setDevicesRoleForStrategy(
+        int32_t strategyAidl,
+        media::DeviceRole roleAidl,
+        const std::vector<media::AudioDevice>& devicesAidl) {
+    product_strategy_t strategy = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_product_strategy_t(strategyAidl));
+    device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_DeviceRole_device_role_t(roleAidl));
+    AudioDeviceTypeAddrVector devices = VALUE_OR_RETURN_BINDER_STATUS(
+            convertContainer<AudioDeviceTypeAddrVector>(devicesAidl,
+                                                        aidl2legacy_AudioDeviceTypeAddress));
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
-    return mAudioPolicyManager->setPreferredDeviceForStrategy(strategy, device);
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->setDevicesRoleForStrategy(strategy, role, devices));
 }
 
-status_t AudioPolicyService::removePreferredDeviceForStrategy(product_strategy_t strategy)
-{
-    if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+Status AudioPolicyService::removeDevicesRoleForStrategy(int32_t strategyAidl,
+                                                        media::DeviceRole roleAidl) {
+     product_strategy_t strategy = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_product_strategy_t(strategyAidl));
+    device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_DeviceRole_device_role_t(roleAidl));
+   if (mAudioPolicyManager == NULL) {
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
-    return mAudioPolicyManager->removePreferredDeviceForStrategy(strategy);
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->removeDevicesRoleForStrategy(strategy, role));
 }
 
-status_t AudioPolicyService::getPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   AudioDeviceTypeAddr &device)
-{
+Status AudioPolicyService::getDevicesForRoleAndStrategy(
+        int32_t strategyAidl,
+        media::DeviceRole roleAidl,
+        std::vector<media::AudioDevice>* _aidl_return) {
+    product_strategy_t strategy = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_product_strategy_t(strategyAidl));
+    device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_DeviceRole_device_role_t(roleAidl));
+    AudioDeviceTypeAddrVector devices;
+
     if (mAudioPolicyManager == NULL) {
-        return NO_INIT;
+        return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
-    return mAudioPolicyManager->getPreferredDeviceForStrategy(strategy, device);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->getDevicesForRoleAndStrategy(strategy, role, devices)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            convertContainer<std::vector<media::AudioDevice>>(devices,
+                                                              legacy2aidl_AudioDeviceTypeAddress));
+    return Status::ok();
 }
 
-status_t AudioPolicyService::registerSoundTriggerCaptureStateListener(
-    const sp<media::ICaptureStateListener>& listener,
-    bool* result)
-{
-    *result = mCaptureStateNotifier.RegisterListener(listener);
-    return NO_ERROR;
+Status AudioPolicyService::registerSoundTriggerCaptureStateListener(
+        const sp<media::ICaptureStateListener>& listener, bool* _aidl_return) {
+    *_aidl_return = mCaptureStateNotifier.RegisterListener(listener);
+    return Status::ok();
+}
+
+Status AudioPolicyService::setDevicesRoleForCapturePreset(
+        media::AudioSourceType audioSourceAidl,
+        media::DeviceRole roleAidl,
+        const std::vector<media::AudioDevice>& devicesAidl) {
+    audio_source_t audioSource = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioSourceType_audio_source_t(audioSourceAidl));
+    device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_DeviceRole_device_role_t(roleAidl));
+    AudioDeviceTypeAddrVector devices = VALUE_OR_RETURN_BINDER_STATUS(
+            convertContainer<AudioDeviceTypeAddrVector>(devicesAidl,
+                                                        aidl2legacy_AudioDeviceTypeAddress));
+
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+    Mutex::Autolock _l(mLock);
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->setDevicesRoleForCapturePreset(audioSource, role, devices));
+}
+
+Status AudioPolicyService::addDevicesRoleForCapturePreset(
+        media::AudioSourceType audioSourceAidl,
+        media::DeviceRole roleAidl,
+        const std::vector<media::AudioDevice>& devicesAidl) {
+    audio_source_t audioSource = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioSourceType_audio_source_t(audioSourceAidl));
+    device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_DeviceRole_device_role_t(roleAidl));
+    AudioDeviceTypeAddrVector devices = VALUE_OR_RETURN_BINDER_STATUS(
+            convertContainer<AudioDeviceTypeAddrVector>(devicesAidl,
+                                                        aidl2legacy_AudioDeviceTypeAddress));
+
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+    Mutex::Autolock _l(mLock);
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->addDevicesRoleForCapturePreset(audioSource, role, devices));
+}
+
+Status AudioPolicyService::removeDevicesRoleForCapturePreset(
+        media::AudioSourceType audioSourceAidl,
+        media::DeviceRole roleAidl,
+        const std::vector<media::AudioDevice>& devicesAidl) {
+    audio_source_t audioSource = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioSourceType_audio_source_t(audioSourceAidl));
+    device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_DeviceRole_device_role_t(roleAidl));
+    AudioDeviceTypeAddrVector devices = VALUE_OR_RETURN_BINDER_STATUS(
+            convertContainer<AudioDeviceTypeAddrVector>(devicesAidl,
+                                                        aidl2legacy_AudioDeviceTypeAddress));
+
+   if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+    Mutex::Autolock _l(mLock);
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->removeDevicesRoleForCapturePreset(audioSource, role, devices));
+}
+
+Status AudioPolicyService::clearDevicesRoleForCapturePreset(media::AudioSourceType audioSourceAidl,
+                                                            media::DeviceRole roleAidl) {
+    audio_source_t audioSource = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioSourceType_audio_source_t(audioSourceAidl));
+    device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_DeviceRole_device_role_t(roleAidl));
+
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+    Mutex::Autolock _l(mLock);
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->clearDevicesRoleForCapturePreset(audioSource, role));
+}
+
+Status AudioPolicyService::getDevicesForRoleAndCapturePreset(
+        media::AudioSourceType audioSourceAidl,
+        media::DeviceRole roleAidl,
+        std::vector<media::AudioDevice>* _aidl_return) {
+    audio_source_t audioSource = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioSourceType_audio_source_t(audioSourceAidl));
+    device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_DeviceRole_device_role_t(roleAidl));
+    AudioDeviceTypeAddrVector devices;
+
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+    Mutex::Autolock _l(mLock);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            convertContainer<std::vector<media::AudioDevice>>(devices,
+                                                              legacy2aidl_AudioDeviceTypeAddress));
+    return Status::ok();
 }
 
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index e847f9f..4d0e1f1 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -23,6 +23,7 @@
 #define __STDC_LIMIT_MACROS
 #include <stdint.h>
 #include <sys/time.h>
+#include <dlfcn.h>
 
 #include <audio_utils/clock.h>
 #include <binder/IServiceManager.h>
@@ -35,18 +36,23 @@
 #include <utils/threads.h>
 #include "AudioPolicyService.h"
 #include <hardware_legacy/power.h>
+#include <media/AidlConversion.h>
 #include <media/AudioEffect.h>
 #include <media/AudioParameter.h>
 #include <mediautils/ServiceUtilities.h>
+#include <mediautils/TimeCheck.h>
 #include <sensorprivacy/SensorPrivacyManager.h>
 
 #include <system/audio.h>
 #include <system/audio_policy.h>
+#include <AudioPolicyManager.h>
 
 namespace android {
+using binder::Status;
 
 static const char kDeadlockedString[] = "AudioPolicyService may be deadlocked\n";
 static const char kCmdDeadlockedString[] = "AudioPolicyService command thread may be deadlocked\n";
+static const char kAudioPolicyManagerCustomPath[] = "libaudiopolicymanagercustom.so";
 
 static const int kDumpLockTimeoutNs = 1 * NANOS_PER_SECOND;
 
@@ -56,12 +62,54 @@
 
 // ----------------------------------------------------------------------------
 
+static AudioPolicyInterface* createAudioPolicyManager(AudioPolicyClientInterface *clientInterface)
+{
+    AudioPolicyManager *apm = new AudioPolicyManager(clientInterface);
+    status_t status = apm->initialize();
+    if (status != NO_ERROR) {
+        delete apm;
+        apm = nullptr;
+    }
+    return apm;
+}
+
+static void destroyAudioPolicyManager(AudioPolicyInterface *interface)
+{
+    delete interface;
+}
+// ----------------------------------------------------------------------------
+
 AudioPolicyService::AudioPolicyService()
     : BnAudioPolicyService(),
       mAudioPolicyManager(NULL),
       mAudioPolicyClient(NULL),
       mPhoneState(AUDIO_MODE_INVALID),
-      mCaptureStateNotifier(false) {
+      mCaptureStateNotifier(false),
+      mCreateAudioPolicyManager(createAudioPolicyManager),
+      mDestroyAudioPolicyManager(destroyAudioPolicyManager) {
+}
+
+void AudioPolicyService::loadAudioPolicyManager()
+{
+    mLibraryHandle = dlopen(kAudioPolicyManagerCustomPath, RTLD_NOW);
+    if (mLibraryHandle != nullptr) {
+        ALOGI("%s loading %s", __func__, kAudioPolicyManagerCustomPath);
+        mCreateAudioPolicyManager = reinterpret_cast<CreateAudioPolicyManagerInstance>
+                                            (dlsym(mLibraryHandle, "createAudioPolicyManager"));
+        const char *lastError = dlerror();
+        ALOGW_IF(mCreateAudioPolicyManager == nullptr, "%s createAudioPolicyManager is null %s",
+                    __func__, lastError != nullptr ? lastError : "no error");
+
+        mDestroyAudioPolicyManager = reinterpret_cast<DestroyAudioPolicyManagerInstance>(
+                                        dlsym(mLibraryHandle, "destroyAudioPolicyManager"));
+        lastError = dlerror();
+        ALOGW_IF(mDestroyAudioPolicyManager == nullptr, "%s destroyAudioPolicyManager is null %s",
+                    __func__, lastError != nullptr ? lastError : "no error");
+        if (mCreateAudioPolicyManager == nullptr || mDestroyAudioPolicyManager == nullptr){
+            unloadAudioPolicyManager();
+            LOG_ALWAYS_FATAL("could not find audiopolicymanager interface methods");
+        }
+    }
 }
 
 void AudioPolicyService::onFirstRef()
@@ -75,7 +123,9 @@
         mOutputCommandThread = new AudioCommandThread(String8("ApmOutput"), this);
 
         mAudioPolicyClient = new AudioPolicyClient(this);
-        mAudioPolicyManager = createAudioPolicyManager(mAudioPolicyClient);
+
+        loadAudioPolicyManager();
+        mAudioPolicyManager = mCreateAudioPolicyManager(mAudioPolicyClient);
     }
     // load audio processing modules
     sp<AudioPolicyEffects> audioPolicyEffects = new AudioPolicyEffects();
@@ -91,12 +141,25 @@
     sensorPrivacyPolicy->registerSelf();
 }
 
+void AudioPolicyService::unloadAudioPolicyManager()
+{
+    ALOGV("%s ", __func__);
+    if (mLibraryHandle != nullptr) {
+        dlclose(mLibraryHandle);
+    }
+    mLibraryHandle = nullptr;
+    mCreateAudioPolicyManager = nullptr;
+    mDestroyAudioPolicyManager = nullptr;
+}
+
 AudioPolicyService::~AudioPolicyService()
 {
     mAudioCommandThread->exit();
     mOutputCommandThread->exit();
 
-    destroyAudioPolicyManager(mAudioPolicyManager);
+    mDestroyAudioPolicyManager(mAudioPolicyManager);
+    unloadAudioPolicyManager();
+
     delete mAudioPolicyClient;
 
     mNotificationClients.clear();
@@ -111,11 +174,11 @@
 
 // A notification client is always registered by AudioSystem when the client process
 // connects to AudioPolicyService.
-void AudioPolicyService::registerClient(const sp<IAudioPolicyServiceClient>& client)
+Status AudioPolicyService::registerClient(const sp<media::IAudioPolicyServiceClient>& client)
 {
     if (client == 0) {
         ALOGW("%s got NULL client", __FUNCTION__);
-        return;
+        return Status::ok();
     }
     Mutex::Autolock _l(mNotificationClientsLock);
 
@@ -135,9 +198,10 @@
         sp<IBinder> binder = IInterface::asBinder(client);
         binder->linkToDeath(notificationClient);
     }
+        return Status::ok();
 }
 
-void AudioPolicyService::setAudioPortCallbacksEnabled(bool enabled)
+Status AudioPolicyService::setAudioPortCallbacksEnabled(bool enabled)
 {
     Mutex::Autolock _l(mNotificationClientsLock);
 
@@ -146,12 +210,13 @@
     int64_t token = ((int64_t)uid<<32) | pid;
 
     if (mNotificationClients.indexOfKey(token) < 0) {
-        return;
+        return Status::ok();
     }
     mNotificationClients.valueFor(token)->setAudioPortCallbacksEnabled(enabled);
+    return Status::ok();
 }
 
-void AudioPolicyService::setAudioVolumeGroupCallbacksEnabled(bool enabled)
+Status AudioPolicyService::setAudioVolumeGroupCallbacksEnabled(bool enabled)
 {
     Mutex::Autolock _l(mNotificationClientsLock);
 
@@ -160,9 +225,10 @@
     int64_t token = ((int64_t)uid<<32) | pid;
 
     if (mNotificationClients.indexOfKey(token) < 0) {
-        return;
+        return Status::ok();
     }
     mNotificationClients.valueFor(token)->setAudioVolumeGroupCallbacksEnabled(enabled);
+    return Status::ok();
 }
 
 // removeNotificationClient() is called when the client process dies.
@@ -274,6 +340,19 @@
     }
 }
 
+void AudioPolicyService::onRoutingUpdated()
+{
+    mOutputCommandThread->routingChangedCommand();
+}
+
+void AudioPolicyService::doOnRoutingUpdated()
+{
+  Mutex::Autolock _l(mNotificationClientsLock);
+    for (size_t i = 0; i < mNotificationClients.size(); i++) {
+        mNotificationClients.valueAt(i)->onRoutingUpdated();
+    }
+}
+
 status_t AudioPolicyService::clientCreateAudioPatch(const struct audio_patch *patch,
                                                 audio_patch_handle_t *handle,
                                                 int delayMs)
@@ -293,10 +372,11 @@
     return mAudioCommandThread->setAudioPortConfigCommand(config, delayMs);
 }
 
-AudioPolicyService::NotificationClient::NotificationClient(const sp<AudioPolicyService>& service,
-                                                     const sp<IAudioPolicyServiceClient>& client,
-                                                     uid_t uid,
-                                                     pid_t pid)
+AudioPolicyService::NotificationClient::NotificationClient(
+        const sp<AudioPolicyService>& service,
+        const sp<media::IAudioPolicyServiceClient>& client,
+        uid_t uid,
+        pid_t pid)
     : mService(service), mUid(uid), mPid(pid), mAudioPolicyServiceClient(client),
       mAudioPortCallbacksEnabled(false), mAudioVolumeGroupCallbacksEnabled(false)
 {
@@ -342,7 +422,8 @@
         const String8& regId, int32_t state)
 {
     if (mAudioPolicyServiceClient != 0 && isServiceUid(mUid)) {
-        mAudioPolicyServiceClient->onDynamicPolicyMixStateUpdate(regId, state);
+        mAudioPolicyServiceClient->onDynamicPolicyMixStateUpdate(
+                legacy2aidl_String8_string(regId).value(), state);
     }
 }
 
@@ -357,8 +438,37 @@
                                             audio_source_t source)
 {
     if (mAudioPolicyServiceClient != 0 && isServiceUid(mUid)) {
-        mAudioPolicyServiceClient->onRecordingConfigurationUpdate(event, clientInfo,
-                clientConfig, clientEffects, deviceConfig, effects, patchHandle, source);
+        status_t status = [&]() -> status_t {
+            int32_t eventAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(event));
+            media::RecordClientInfo clientInfoAidl = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_record_client_info_t_RecordClientInfo(*clientInfo));
+            media::AudioConfigBase clientConfigAidl = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_audio_config_base_t_AudioConfigBase(*clientConfig));
+            std::vector<media::EffectDescriptor> clientEffectsAidl = VALUE_OR_RETURN_STATUS(
+                    convertContainer<std::vector<media::EffectDescriptor>>(
+                            clientEffects,
+                            legacy2aidl_effect_descriptor_t_EffectDescriptor));
+            media::AudioConfigBase deviceConfigAidl = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_audio_config_base_t_AudioConfigBase(*deviceConfig));
+            std::vector<media::EffectDescriptor> effectsAidl = VALUE_OR_RETURN_STATUS(
+                    convertContainer<std::vector<media::EffectDescriptor>>(
+                            effects,
+                            legacy2aidl_effect_descriptor_t_EffectDescriptor));
+            int32_t patchHandleAidl = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_audio_patch_handle_t_int32_t(patchHandle));
+            media::AudioSourceType sourceAidl = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_audio_source_t_AudioSourceType(source));
+            return aidl_utils::statusTFromBinderStatus(
+                    mAudioPolicyServiceClient->onRecordingConfigurationUpdate(eventAidl,
+                                                                              clientInfoAidl,
+                                                                              clientConfigAidl,
+                                                                              clientEffectsAidl,
+                                                                              deviceConfigAidl,
+                                                                              effectsAidl,
+                                                                              patchHandleAidl,
+                                                                              sourceAidl));
+        }();
+        ALOGW_IF(status != OK, "onRecordingConfigurationUpdate() failed: %d", status);
     }
 }
 
@@ -372,6 +482,13 @@
     mAudioVolumeGroupCallbacksEnabled = enabled;
 }
 
+void AudioPolicyService::NotificationClient::onRoutingUpdated()
+{
+    if (mAudioPolicyServiceClient != 0 && isServiceUid(mUid)) {
+        mAudioPolicyServiceClient->onRoutingUpdated();
+    }
+}
+
 void AudioPolicyService::binderDied(const wp<IBinder>& who) {
     ALOGW("binderDied() %p, calling pid %d", who.unsafe_get(),
             IPCThreadState::self()->getCallingPid());
@@ -453,7 +570,7 @@
     sp<AudioRecordClient> topActive;
     sp<AudioRecordClient> latestActive;
     sp<AudioRecordClient> topSensitiveActive;
-    sp<AudioRecordClient> latestSensitiveActive;
+    sp<AudioRecordClient> latestSensitiveActiveOrComm;
 
     nsecs_t topStartNs = 0;
     nsecs_t latestStartNs = 0;
@@ -467,6 +584,7 @@
     bool rttCallActive = (isInCall || isInCommunication)
             && mUidPolicy->isRttEnabled();
     bool onlyHotwordActive = true;
+    bool isPhoneStateOwnerActive = false;
 
     // if Sensor Privacy is enabled then all recordings should be silenced.
     if (mSensorPrivacyPolicy->isSensorPrivacyEnabled()) {
@@ -476,24 +594,27 @@
 
     for (size_t i =0; i < mAudioRecordClients.size(); i++) {
         sp<AudioRecordClient> current = mAudioRecordClients[i];
+        uid_t currentUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(
+                current->attributionSource.uid));
         if (!current->active) {
             continue;
         }
 
-        app_state_t appState = apmStatFromAmState(mUidPolicy->getUidState(current->uid));
+        app_state_t appState = apmStatFromAmState(mUidPolicy->getUidState(currentUid));
         // clients which app is in IDLE state are not eligible for top active or
         // latest active
         if (appState == APP_STATE_IDLE) {
             continue;
         }
 
-        bool isAccessibility = mUidPolicy->isA11yUid(current->uid);
-        // Clients capturing for Accessibility services are not considered
+        bool isAccessibility = mUidPolicy->isA11yUid(currentUid);
+        // Clients capturing for Accessibility services or virtual sources are not considered
         // for top or latest active to avoid masking regular clients started before
-        if (!isAccessibility) {
-            bool isAssistant = mUidPolicy->isAssistantUid(current->uid);
+        if (!isAccessibility && !isVirtualSource(current->attributes.source)) {
+            bool isAssistant = mUidPolicy->isAssistantUid(currentUid);
             bool isPrivacySensitive =
                     (current->attributes.flags & AUDIO_FLAG_CAPTURE_PRIVATE) != 0;
+
             if (appState == APP_STATE_TOP) {
                 if (isPrivacySensitive) {
                     if (current->startTimeNs > topSensitiveStartNs) {
@@ -515,9 +636,17 @@
             if (!(current->attributes.source == AUDIO_SOURCE_HOTWORD
                     || ((isA11yOnTop || rttCallActive) && isAssistant))) {
                 if (isPrivacySensitive) {
-                    if (current->startTimeNs > latestSensitiveStartNs) {
-                        latestSensitiveActive = current;
-                        latestSensitiveStartNs = current->startTimeNs;
+                    // if audio mode is IN_COMMUNICATION, make sure the audio mode owner
+                    // is marked latest sensitive active even if another app qualifies.
+                    if (current->startTimeNs > latestSensitiveStartNs
+                            || (isInCommunication && currentUid == mPhoneStateOwnerUid)) {
+                        if (!isInCommunication || latestSensitiveActiveOrComm == nullptr
+                                || VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(
+                                    latestSensitiveActiveOrComm->attributionSource.uid))
+                                        != mPhoneStateOwnerUid) {
+                            latestSensitiveActiveOrComm = current;
+                            latestSensitiveStartNs = current->startTimeNs;
+                        }
                     }
                     isSensitiveActive = true;
                 } else {
@@ -531,6 +660,9 @@
         if (current->attributes.source != AUDIO_SOURCE_HOTWORD) {
             onlyHotwordActive = false;
         }
+        if (currentUid == mPhoneStateOwnerUid) {
+            isPhoneStateOwnerActive = true;
+        }
     }
 
     // if no active client with UI on Top, consider latest active as top
@@ -539,8 +671,17 @@
         topStartNs = latestStartNs;
     }
     if (topSensitiveActive == nullptr) {
-        topSensitiveActive = latestSensitiveActive;
+        topSensitiveActive = latestSensitiveActiveOrComm;
         topSensitiveStartNs = latestSensitiveStartNs;
+    } else if (latestSensitiveActiveOrComm != nullptr) {
+        // if audio mode is IN_COMMUNICATION, favor audio mode owner over an app with
+        // foreground UI in case both are capturing with privacy sensitive flag.
+        uid_t latestActiveUid = VALUE_OR_FATAL(
+            aidl2legacy_int32_t_uid_t(latestSensitiveActiveOrComm->attributionSource.uid));
+        if (isInCommunication && latestActiveUid == mPhoneStateOwnerUid) {
+            topSensitiveActive = latestSensitiveActiveOrComm;
+            topSensitiveStartNs = latestSensitiveStartNs;
+        }
     }
 
     // If both privacy sensitive and regular capture are active:
@@ -555,24 +696,27 @@
 
     for (size_t i =0; i < mAudioRecordClients.size(); i++) {
         sp<AudioRecordClient> current = mAudioRecordClients[i];
+        uid_t currentUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(
+            current->attributionSource.uid));
         if (!current->active) {
             continue;
         }
 
         audio_source_t source = current->attributes.source;
-        bool isTopOrLatestActive = topActive == nullptr ? false : current->uid == topActive->uid;
-        bool isTopOrLatestSensitive = topSensitiveActive == nullptr ?
-                                 false : current->uid == topSensitiveActive->uid;
+        bool isTopOrLatestActive = topActive == nullptr ? false :
+            current->attributionSource.uid == topActive->attributionSource.uid;
+        bool isTopOrLatestSensitive = topSensitiveActive == nullptr ? false :
+            current->attributionSource.uid == topSensitiveActive->attributionSource.uid;
 
         auto canCaptureIfInCallOrCommunication = [&](const auto &recordClient) REQUIRES(mLock) {
+            uid_t recordUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(
+                recordClient->attributionSource.uid));
             bool canCaptureCall = recordClient->canCaptureOutput;
-            return !(isInCall && !canCaptureCall);
-//TODO(b/160260850): restore restriction to mode owner once fix for misbehaving apps is merged
-//            bool canCaptureCommunication = recordClient->canCaptureOutput
-//                || recordClient->uid == mPhoneStateOwnerUid
-//                || isServiceUid(mPhoneStateOwnerUid);
-//            return !(isInCall && !canCaptureCall)
-//                && !(isInCommunication && !canCaptureCommunication);
+            bool canCaptureCommunication = recordClient->canCaptureOutput
+                || !isPhoneStateOwnerActive
+                || recordUid == mPhoneStateOwnerUid;
+            return !(isInCall && !canCaptureCall)
+                && !(isInCommunication && !canCaptureCommunication);
         };
 
         // By default allow capture if:
@@ -586,10 +730,13 @@
                     && !(isTopOrLatestSensitive || current->canCaptureOutput))
                 && canCaptureIfInCallOrCommunication(current);
 
-        if (isVirtualSource(source)) {
+        if (!current->hasOp()) {
+            // Never allow capture if app op is denied
+            allowCapture = false;
+        } else if (isVirtualSource(source)) {
             // Allow capture for virtual (remote submix, call audio TX or RX...) sources
             allowCapture = true;
-        } else if (mUidPolicy->isAssistantUid(current->uid)) {
+        } else if (mUidPolicy->isAssistantUid(currentUid)) {
             // For assistant allow capture if:
             //     An accessibility service is on TOP or a RTT call is active
             //            AND the source is VOICE_RECOGNITION or HOTWORD
@@ -609,7 +756,7 @@
                     allowCapture = true;
                 }
             }
-        } else if (mUidPolicy->isA11yUid(current->uid)) {
+        } else if (mUidPolicy->isA11yUid(currentUid)) {
             // For accessibility service allow capture if:
             //     The assistant is not on TOP
             //         AND there is no active privacy sensitive capture or call
@@ -635,15 +782,15 @@
                     && canCaptureIfInCallOrCommunication(current)) {
                 allowCapture = true;
             }
-        } else if (mUidPolicy->isCurrentImeUid(current->uid)) {
+        } else if (mUidPolicy->isCurrentImeUid(currentUid)) {
             // For current InputMethodService allow capture if:
             //     A RTT call is active AND the source is VOICE_RECOGNITION
             if (rttCallActive && source == AUDIO_SOURCE_VOICE_RECOGNITION) {
                 allowCapture = true;
             }
         }
-        setAppState_l(current->portId,
-                      allowCapture ? apmStatFromAmState(mUidPolicy->getUidState(current->uid)) :
+        setAppState_l(current,
+                      allowCapture ? apmStatFromAmState(mUidPolicy->getUidState(currentUid)) :
                                 APP_STATE_IDLE);
     }
 }
@@ -652,7 +799,7 @@
     for (size_t i = 0; i < mAudioRecordClients.size(); i++) {
         sp<AudioRecordClient> current = mAudioRecordClients[i];
         if (!isVirtualSource(current->attributes.source)) {
-            setAppState_l(current->portId, APP_STATE_IDLE);
+            setAppState_l(current, APP_STATE_IDLE);
         }
     }
 }
@@ -686,17 +833,45 @@
     return false;
 }
 
-void AudioPolicyService::setAppState_l(audio_port_handle_t portId, app_state_t state)
+/* static */
+bool AudioPolicyService::isAppOpSource(audio_source_t source)
+{
+    switch (source) {
+        case AUDIO_SOURCE_FM_TUNER:
+        case AUDIO_SOURCE_ECHO_REFERENCE:
+            return false;
+        default:
+            break;
+    }
+    return true;
+}
+
+void AudioPolicyService::setAppState_l(sp<AudioRecordClient> client, app_state_t state)
 {
     AutoCallerClear acc;
 
     if (mAudioPolicyManager) {
-        mAudioPolicyManager->setAppState(portId, state);
+        mAudioPolicyManager->setAppState(client->portId, state);
     }
     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
     if (af) {
         bool silenced = state == APP_STATE_IDLE;
-        af->setRecordSilenced(portId, silenced);
+        if (client->silenced != silenced) {
+            if (client->active) {
+                if (silenced) {
+                    finishRecording(client->attributionSource, client->attributes.source);
+                } else {
+                    std::stringstream msg;
+                    msg << "Audio recording un-silenced on session " << client->session;
+                    if (!startRecording(client->attributionSource, String16(msg.str().c_str()),
+                            client->attributes.source)) {
+                        silenced = true;
+                    }
+                }
+            }
+            af->setRecordSilenced(client->portId, silenced);
+            client->silenced = silenced;
+        }
     }
 }
 
@@ -743,6 +918,93 @@
 
 status_t AudioPolicyService::onTransact(
         uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) {
+    // make sure transactions reserved to AudioFlinger do not come from other processes
+    switch (code) {
+        case TRANSACTION_startOutput:
+        case TRANSACTION_stopOutput:
+        case TRANSACTION_releaseOutput:
+        case TRANSACTION_getInputForAttr:
+        case TRANSACTION_startInput:
+        case TRANSACTION_stopInput:
+        case TRANSACTION_releaseInput:
+        case TRANSACTION_getOutputForEffect:
+        case TRANSACTION_registerEffect:
+        case TRANSACTION_unregisterEffect:
+        case TRANSACTION_setEffectEnabled:
+        case TRANSACTION_getStrategyForStream:
+        case TRANSACTION_getOutputForAttr:
+        case TRANSACTION_moveEffectsToIo:
+            ALOGW("%s: transaction %d received from PID %d",
+                  __func__, code, IPCThreadState::self()->getCallingPid());
+            return INVALID_OPERATION;
+        default:
+            break;
+    }
+
+    // make sure the following transactions come from system components
+    switch (code) {
+        case TRANSACTION_setDeviceConnectionState:
+        case TRANSACTION_handleDeviceConfigChange:
+        case TRANSACTION_setPhoneState:
+//FIXME: Allow setForceUse calls from system apps until a better use case routing API is available
+//      case TRANSACTION_setForceUse:
+        case TRANSACTION_initStreamVolume:
+        case TRANSACTION_setStreamVolumeIndex:
+        case TRANSACTION_setVolumeIndexForAttributes:
+        case TRANSACTION_getStreamVolumeIndex:
+        case TRANSACTION_getVolumeIndexForAttributes:
+        case TRANSACTION_getMinVolumeIndexForAttributes:
+        case TRANSACTION_getMaxVolumeIndexForAttributes:
+        case TRANSACTION_isStreamActive:
+        case TRANSACTION_isStreamActiveRemotely:
+        case TRANSACTION_isSourceActive:
+        case TRANSACTION_getDevicesForStream:
+        case TRANSACTION_registerPolicyMixes:
+        case TRANSACTION_setMasterMono:
+        case TRANSACTION_getSurroundFormats:
+        case TRANSACTION_getReportedSurroundFormats:
+        case TRANSACTION_setSurroundFormatEnabled:
+        case TRANSACTION_setAssistantUid:
+        case TRANSACTION_setA11yServicesUids:
+        case TRANSACTION_setUidDeviceAffinities:
+        case TRANSACTION_removeUidDeviceAffinities:
+        case TRANSACTION_setUserIdDeviceAffinities:
+        case TRANSACTION_removeUserIdDeviceAffinities:
+        case TRANSACTION_getHwOffloadEncodingFormatsSupportedForA2DP:
+        case TRANSACTION_listAudioVolumeGroups:
+        case TRANSACTION_getVolumeGroupFromAudioAttributes:
+        case TRANSACTION_acquireSoundTriggerSession:
+        case TRANSACTION_releaseSoundTriggerSession:
+        case TRANSACTION_setRttEnabled:
+        case TRANSACTION_isCallScreenModeSupported:
+        case TRANSACTION_setDevicesRoleForStrategy:
+        case TRANSACTION_setSupportedSystemUsages:
+        case TRANSACTION_removeDevicesRoleForStrategy:
+        case TRANSACTION_getDevicesForRoleAndStrategy:
+        case TRANSACTION_getDevicesForAttributes:
+        case TRANSACTION_setAllowedCapturePolicy:
+        case TRANSACTION_onNewAudioModulesAvailable:
+        case TRANSACTION_setCurrentImeUid:
+        case TRANSACTION_registerSoundTriggerCaptureStateListener:
+        case TRANSACTION_setDevicesRoleForCapturePreset:
+        case TRANSACTION_addDevicesRoleForCapturePreset:
+        case TRANSACTION_removeDevicesRoleForCapturePreset:
+        case TRANSACTION_clearDevicesRoleForCapturePreset:
+        case TRANSACTION_getDevicesForRoleAndCapturePreset: {
+            if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
+                ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
+                      __func__, code, IPCThreadState::self()->getCallingPid(),
+                      IPCThreadState::self()->getCallingUid());
+                return INVALID_OPERATION;
+            }
+        } break;
+        default:
+            break;
+    }
+
+    std::string tag("IAudioPolicyService command " + std::to_string(code));
+    TimeCheck check(tag.c_str());
+
     switch (code) {
         case SHELL_COMMAND_TRANSACTION: {
             int in = data.readFileDescriptor();
@@ -789,6 +1051,9 @@
         return handleResetUidState(args, err);
     } else if (args.size() >= 2 && args[0] == String16("get-uid-state")) {
         return handleGetUidState(args, out, err);
+    } else if (args.size() >= 1 && args[0] == String16("purge_permission-cache")) {
+        purgePermissionCache();
+        return NO_ERROR;
     } else if (args.size() == 1 && args[0] == String16("help")) {
         printHelp(out);
         return NO_ERROR;
@@ -1143,6 +1408,14 @@
     spm.addSensorPrivacyListener(this);
 }
 
+void AudioPolicyService::SensorPrivacyPolicy::registerSelfForMicrophoneOnly(int userId) {
+    SensorPrivacyManager spm;
+    mSensorPrivacyEnabled = spm.isIndividualSensorPrivacyEnabled(userId,
+            SensorPrivacyManager::INDIVIDUAL_SENSOR_MICROPHONE);
+    spm.addIndividualSensorPrivacyListener(userId,
+            SensorPrivacyManager::INDIVIDUAL_SENSOR_MICROPHONE, this);
+}
+
 void AudioPolicyService::SensorPrivacyPolicy::unregisterSelf() {
     SensorPrivacyManager spm;
     spm.removeSensorPrivacyListener(this);
@@ -1161,6 +1434,109 @@
     return binder::Status::ok();
 }
 
+// -----------  AudioPolicyService::OpRecordAudioMonitor implementation ----------
+
+// static
+sp<AudioPolicyService::OpRecordAudioMonitor>
+AudioPolicyService::OpRecordAudioMonitor::createIfNeeded(
+            const AttributionSourceState& attributionSource, const audio_attributes_t& attr,
+            wp<AudioCommandThread> commandThread)
+{
+    if (isAudioServerOrRootUid(attributionSource.uid)) {
+        ALOGV("not silencing record for audio or root source %s",
+                attributionSource.toString().c_str());
+        return nullptr;
+    }
+
+    if (!AudioPolicyService::isAppOpSource(attr.source)) {
+        ALOGD("not monitoring app op for uid %d and source %d",
+                attributionSource.uid, attr.source);
+        return nullptr;
+    }
+
+    if (!attributionSource.packageName.has_value()
+            || attributionSource.packageName.value().size() == 0) {
+        return nullptr;
+    }
+    return new OpRecordAudioMonitor(attributionSource, getOpForSource(attr.source), commandThread);
+}
+
+AudioPolicyService::OpRecordAudioMonitor::OpRecordAudioMonitor(
+        const AttributionSourceState& attributionSource, int32_t appOp,
+        wp<AudioCommandThread> commandThread) :
+            mHasOp(true), mAttributionSource(attributionSource), mAppOp(appOp),
+            mCommandThread(commandThread)
+{
+}
+
+AudioPolicyService::OpRecordAudioMonitor::~OpRecordAudioMonitor()
+{
+    if (mOpCallback != 0) {
+        mAppOpsManager.stopWatchingMode(mOpCallback);
+    }
+    mOpCallback.clear();
+}
+
+void AudioPolicyService::OpRecordAudioMonitor::onFirstRef()
+{
+    checkOp();
+    mOpCallback = new RecordAudioOpCallback(this);
+    ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
+    // TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
+    // since it controls the mic permission for legacy apps.
+    mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+        mAttributionSource.packageName.value_or(""))),
+        mOpCallback);
+}
+
+bool AudioPolicyService::OpRecordAudioMonitor::hasOp() const {
+    return mHasOp.load();
+}
+
+// Called by RecordAudioOpCallback when the app op corresponding to this OpRecordAudioMonitor
+// is updated in AppOp callback and in onFirstRef()
+// Note this method is never called (and never to be) for audio server / root track
+// due to the UID in createIfNeeded(). As a result for those record track, it's:
+// - not called from constructor,
+// - not called from RecordAudioOpCallback because the callback is not installed in this case
+void AudioPolicyService::OpRecordAudioMonitor::checkOp(bool updateUidStates)
+{
+    // TODO: We need to always check AppOpsManager::OP_RECORD_AUDIO too
+    // since it controls the mic permission for legacy apps.
+    const int32_t mode = mAppOpsManager.checkOp(mAppOp,
+            mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+                mAttributionSource.packageName.value_or(""))));
+    const bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
+    // verbose logging only log when appOp changed
+    ALOGI_IF(hasIt != mHasOp.load(),
+            "App op %d missing, %ssilencing record %s",
+            mAppOp, hasIt ? "un" : "", mAttributionSource.toString().c_str());
+    mHasOp.store(hasIt);
+
+    if (updateUidStates) {
+          sp<AudioCommandThread> commandThread = mCommandThread.promote();
+          if (commandThread != nullptr) {
+              commandThread->updateUidStatesCommand();
+          }
+    }
+}
+
+AudioPolicyService::OpRecordAudioMonitor::RecordAudioOpCallback::RecordAudioOpCallback(
+        const wp<OpRecordAudioMonitor>& monitor) : mMonitor(monitor)
+{ }
+
+void AudioPolicyService::OpRecordAudioMonitor::RecordAudioOpCallback::opChanged(int32_t op,
+            const String16& packageName __unused) {
+    sp<OpRecordAudioMonitor> monitor = mMonitor.promote();
+    if (monitor != NULL) {
+        if (op != monitor->getOp()) {
+            return;
+        }
+        monitor->checkOp(true);
+    }
+}
+
+
 // -----------  AudioPolicyService::AudioCommandThread implementation ----------
 
 AudioPolicyService::AudioCommandThread::AudioCommandThread(String8 name,
@@ -1366,6 +1742,27 @@
                     svc->doOnNewAudioModulesAvailable();
                     mLock.lock();
                     } break;
+                case ROUTING_UPDATED: {
+                    ALOGV("AudioCommandThread() processing routing update");
+                    svc = mService.promote();
+                    if (svc == 0) {
+                        break;
+                    }
+                    mLock.unlock();
+                    svc->doOnRoutingUpdated();
+                    mLock.lock();
+                    } break;
+
+                case UPDATE_UID_STATES: {
+                    ALOGV("AudioCommandThread() processing updateUID states");
+                    svc = mService.promote();
+                    if (svc == 0) {
+                        break;
+                    }
+                    mLock.unlock();
+                    svc->updateUidStates();
+                    mLock.lock();
+                    } break;
 
                 default:
                     ALOGW("AudioCommandThread() unknown command %d", command->mCommand);
@@ -1580,6 +1977,14 @@
     sendCommand(command);
 }
 
+void AudioPolicyService::AudioCommandThread::updateUidStatesCommand()
+{
+    sp<AudioCommand> command = new AudioCommand();
+    command->mCommand = UPDATE_UID_STATES;
+    ALOGV("AudioCommandThread() adding update UID states");
+    sendCommand(command);
+}
+
 void AudioPolicyService::AudioCommandThread::updateAudioPatchListCommand()
 {
     sp<AudioCommand>command = new AudioCommand();
@@ -1662,6 +2067,14 @@
     sendCommand(command);
 }
 
+void AudioPolicyService::AudioCommandThread::routingChangedCommand()
+{
+    sp<AudioCommand>command = new AudioCommand();
+    command->mCommand = ROUTING_UPDATED;
+    ALOGV("AudioCommandThread() adding routing update");
+    sendCommand(command);
+}
+
 status_t AudioPolicyService::AudioCommandThread::sendCommand(sp<AudioCommand>& command, int delayMs)
 {
     {
@@ -1825,6 +2238,10 @@
 
         } break;
 
+        case ROUTING_UPDATED: {
+
+        } break;
+
         default:
             break;
         }
@@ -1909,9 +2326,10 @@
     mAudioCommandThread->setEffectSuspendedCommand(effectId, sessionId, suspended);
 }
 
-void AudioPolicyService::onNewAudioModulesAvailable()
+Status AudioPolicyService::onNewAudioModulesAvailable()
 {
     mOutputCommandThread->audioModulesUpdateCommand();
+    return Status::ok();
 }
 
 
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 869a963..3b77ed8 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -1,3 +1,4 @@
+
 /*
  * Copyright (C) 2009 The Android Open Source Project
  *
@@ -17,6 +18,7 @@
 #ifndef ANDROID_AUDIOPOLICYSERVICE_H
 #define ANDROID_AUDIOPOLICYSERVICE_H
 
+#include <android/media/BnAudioPolicyService.h>
 #include <android-base/thread_annotations.h>
 #include <cutils/misc.h>
 #include <cutils/config_utils.h>
@@ -25,11 +27,11 @@
 #include <utils/Vector.h>
 #include <utils/SortedVector.h>
 #include <binder/ActivityManager.h>
+#include <binder/AppOpsManager.h>
 #include <binder/BinderService.h>
 #include <binder/IUidObserver.h>
 #include <system/audio.h>
 #include <system/audio_policy.h>
-#include <media/IAudioPolicyService.h>
 #include <media/ToneGenerator.h>
 #include <media/AudioEffect.h>
 #include <media/AudioPolicy.h>
@@ -38,16 +40,19 @@
 #include "CaptureStateNotifier.h"
 #include <AudioPolicyInterface.h>
 #include <android/hardware/BnSensorPrivacyListener.h>
+#include <android/content/AttributionSourceState.h>
 
 #include <unordered_map>
 
 namespace android {
 
+using content::AttributionSourceState;
+
 // ----------------------------------------------------------------------------
 
 class AudioPolicyService :
     public BinderService<AudioPolicyService>,
-    public BnAudioPolicyService,
+    public media::BnAudioPolicyService,
     public IBinder::DeathRecipient
 {
     friend class BinderService<AudioPolicyService>;
@@ -61,108 +66,181 @@
     //
     // BnAudioPolicyService (see AudioPolicyInterface for method descriptions)
     //
-
-    void onNewAudioModulesAvailable() override;
-    virtual status_t setDeviceConnectionState(audio_devices_t device,
-                                              audio_policy_dev_state_t state,
-                                              const char *device_address,
-                                              const char *device_name,
-                                              audio_format_t encodedFormat);
-    virtual audio_policy_dev_state_t getDeviceConnectionState(
-                                                                audio_devices_t device,
-                                                                const char *device_address);
-    virtual status_t handleDeviceConfigChange(audio_devices_t device,
-                                              const char *device_address,
-                                              const char *device_name,
-                                              audio_format_t encodedFormat);
-    virtual status_t setPhoneState(audio_mode_t state, uid_t uid);
-    virtual status_t setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config);
-    virtual audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage);
-    virtual audio_io_handle_t getOutput(audio_stream_type_t stream);
-    status_t getOutputForAttr(audio_attributes_t *attr,
-                              audio_io_handle_t *output,
-                              audio_session_t session,
-                              audio_stream_type_t *stream,
-                              pid_t pid,
-                              uid_t uid,
-                              const audio_config_t *config,
-                              audio_output_flags_t flags,
-                              audio_port_handle_t *selectedDeviceId,
-                              audio_port_handle_t *portId,
-                              std::vector<audio_io_handle_t> *secondaryOutputs) override;
-    virtual status_t startOutput(audio_port_handle_t portId);
-    virtual status_t stopOutput(audio_port_handle_t portId);
-    virtual void releaseOutput(audio_port_handle_t portId);
-    virtual status_t getInputForAttr(const audio_attributes_t *attr,
-                                     audio_io_handle_t *input,
-                                     audio_unique_id_t riid,
-                                     audio_session_t session,
-                                     pid_t pid,
-                                     uid_t uid,
-                                     const String16& opPackageName,
-                                     const audio_config_base_t *config,
-                                     audio_input_flags_t flags,
-                                     audio_port_handle_t *selectedDeviceId = NULL,
-                                     audio_port_handle_t *portId = NULL);
-    virtual status_t startInput(audio_port_handle_t portId);
-    virtual status_t stopInput(audio_port_handle_t portId);
-    virtual void releaseInput(audio_port_handle_t portId);
-    virtual status_t initStreamVolume(audio_stream_type_t stream,
-                                      int indexMin,
-                                      int indexMax);
-    virtual status_t setStreamVolumeIndex(audio_stream_type_t stream,
-                                          int index,
-                                          audio_devices_t device);
-    virtual status_t getStreamVolumeIndex(audio_stream_type_t stream,
-                                          int *index,
-                                          audio_devices_t device);
-
-    virtual status_t setVolumeIndexForAttributes(const audio_attributes_t &attr,
-                                                 int index,
-                                                 audio_devices_t device);
-    virtual status_t getVolumeIndexForAttributes(const audio_attributes_t &attr,
-                                                 int &index,
-                                                 audio_devices_t device);
-    virtual status_t getMinVolumeIndexForAttributes(const audio_attributes_t &attr,
-                                                    int &index);
-    virtual status_t getMaxVolumeIndexForAttributes(const audio_attributes_t &attr,
-                                                    int &index);
-
-    virtual uint32_t getStrategyForStream(audio_stream_type_t stream);
-    virtual audio_devices_t getDevicesForStream(audio_stream_type_t stream);
-    virtual status_t getDevicesForAttributes(const AudioAttributes &aa,
-                                             AudioDeviceTypeAddrVector *devices) const;
-
-    virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc);
-    virtual status_t registerEffect(const effect_descriptor_t *desc,
-                                    audio_io_handle_t io,
-                                    uint32_t strategy,
-                                    audio_session_t session,
-                                    int id);
-    virtual status_t unregisterEffect(int id);
-    virtual status_t setEffectEnabled(int id, bool enabled);
-    status_t moveEffectsToIo(const std::vector<int>& ids, audio_io_handle_t io) override;
-    virtual bool isStreamActive(audio_stream_type_t stream, uint32_t inPastMs = 0) const;
-    virtual bool isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs = 0) const;
-    virtual bool isSourceActive(audio_source_t source) const;
-
-    virtual status_t queryDefaultPreProcessing(audio_session_t audioSession,
-                                              effect_descriptor_t *descriptors,
-                                              uint32_t *count);
-    virtual status_t addSourceDefaultEffect(const effect_uuid_t *type,
-                                            const String16& opPackageName,
-                                            const effect_uuid_t *uuid,
-                                            int32_t priority,
-                                            audio_source_t source,
-                                            audio_unique_id_t* id);
-    virtual status_t addStreamDefaultEffect(const effect_uuid_t *type,
-                                            const String16& opPackageName,
-                                            const effect_uuid_t *uuid,
-                                            int32_t priority,
-                                            audio_usage_t usage,
-                                            audio_unique_id_t* id);
-    virtual status_t removeSourceDefaultEffect(audio_unique_id_t id);
-    virtual status_t removeStreamDefaultEffect(audio_unique_id_t id);
+    binder::Status onNewAudioModulesAvailable() override;
+    binder::Status setDeviceConnectionState(
+            const media::AudioDevice& device,
+            media::AudioPolicyDeviceState state,
+            const std::string& deviceName,
+            media::audio::common::AudioFormat encodedFormat) override;
+    binder::Status getDeviceConnectionState(const media::AudioDevice& device,
+                                            media::AudioPolicyDeviceState* _aidl_return) override;
+    binder::Status handleDeviceConfigChange(
+            const media::AudioDevice& device,
+            const std::string& deviceName,
+            media::audio::common::AudioFormat encodedFormat) override;
+    binder::Status setPhoneState(media::AudioMode state, int32_t uid) override;
+    binder::Status setForceUse(media::AudioPolicyForceUse usage,
+                               media::AudioPolicyForcedConfig config) override;
+    binder::Status getForceUse(media::AudioPolicyForceUse usage,
+                               media::AudioPolicyForcedConfig* _aidl_return) override;
+    binder::Status getOutput(media::AudioStreamType stream, int32_t* _aidl_return) override;
+    binder::Status getOutputForAttr(const media::AudioAttributesInternal& attr, int32_t session,
+                                    const AttributionSourceState &attributionSource,
+                                    const media::AudioConfig& config,
+                                    int32_t flags, int32_t selectedDeviceId,
+                                    media::GetOutputForAttrResponse* _aidl_return) override;
+    binder::Status startOutput(int32_t portId) override;
+    binder::Status stopOutput(int32_t portId) override;
+    binder::Status releaseOutput(int32_t portId) override;
+    binder::Status getInputForAttr(const media::AudioAttributesInternal& attr, int32_t input,
+                                   int32_t riid, int32_t session,
+                                   const AttributionSourceState &attributionSource,
+                                   const media::AudioConfigBase& config, int32_t flags,
+                                   int32_t selectedDeviceId,
+                                   media::GetInputForAttrResponse* _aidl_return) override;
+    binder::Status startInput(int32_t portId) override;
+    binder::Status stopInput(int32_t portId) override;
+    binder::Status releaseInput(int32_t portId) override;
+    binder::Status initStreamVolume(media::AudioStreamType stream, int32_t indexMin,
+                                    int32_t indexMax) override;
+    binder::Status setStreamVolumeIndex(media::AudioStreamType stream, int32_t device,
+                                        int32_t index) override;
+    binder::Status getStreamVolumeIndex(media::AudioStreamType stream, int32_t device,
+                                        int32_t* _aidl_return) override;
+    binder::Status setVolumeIndexForAttributes(const media::AudioAttributesInternal& attr,
+                                               int32_t device, int32_t index) override;
+    binder::Status getVolumeIndexForAttributes(const media::AudioAttributesInternal& attr,
+                                               int32_t device, int32_t* _aidl_return) override;
+    binder::Status getMaxVolumeIndexForAttributes(const media::AudioAttributesInternal& attr,
+                                                  int32_t* _aidl_return) override;
+    binder::Status getMinVolumeIndexForAttributes(const media::AudioAttributesInternal& attr,
+                                                  int32_t* _aidl_return) override;
+    binder::Status getStrategyForStream(media::AudioStreamType stream,
+                                        int32_t* _aidl_return) override;
+    binder::Status getDevicesForStream(media::AudioStreamType stream,
+                                       int32_t* _aidl_return) override;
+    binder::Status getDevicesForAttributes(const media::AudioAttributesEx& attr,
+                                           std::vector<media::AudioDevice>* _aidl_return) override;
+    binder::Status getOutputForEffect(const media::EffectDescriptor& desc,
+                                      int32_t* _aidl_return) override;
+    binder::Status registerEffect(const media::EffectDescriptor& desc, int32_t io, int32_t strategy,
+                                  int32_t session, int32_t id) override;
+    binder::Status unregisterEffect(int32_t id) override;
+    binder::Status setEffectEnabled(int32_t id, bool enabled) override;
+    binder::Status moveEffectsToIo(const std::vector<int32_t>& ids, int32_t io) override;
+    binder::Status isStreamActive(media::AudioStreamType stream, int32_t inPastMs,
+                                  bool* _aidl_return) override;
+    binder::Status isStreamActiveRemotely(media::AudioStreamType stream, int32_t inPastMs,
+                                          bool* _aidl_return) override;
+    binder::Status isSourceActive(media::AudioSourceType source, bool* _aidl_return) override;
+    binder::Status queryDefaultPreProcessing(
+            int32_t audioSession, media::Int* count,
+            std::vector<media::EffectDescriptor>* _aidl_return) override;
+    binder::Status addSourceDefaultEffect(const media::AudioUuid& type,
+                                          const std::string& opPackageName,
+                                          const media::AudioUuid& uuid, int32_t priority,
+                                          media::AudioSourceType source,
+                                          int32_t* _aidl_return) override;
+    binder::Status addStreamDefaultEffect(const media::AudioUuid& type,
+                                          const std::string& opPackageName,
+                                          const media::AudioUuid& uuid, int32_t priority,
+                                          media::AudioUsage usage, int32_t* _aidl_return) override;
+    binder::Status removeSourceDefaultEffect(int32_t id) override;
+    binder::Status removeStreamDefaultEffect(int32_t id) override;
+    binder::Status setSupportedSystemUsages(
+            const std::vector<media::AudioUsage>& systemUsages) override;
+    binder::Status setAllowedCapturePolicy(int32_t uid, int32_t capturePolicy) override;
+    binder::Status getOffloadSupport(const media::AudioOffloadInfo& info,
+                                     media::AudioOffloadMode* _aidl_return) override;
+    binder::Status isDirectOutputSupported(const media::AudioConfigBase& config,
+                                           const media::AudioAttributesInternal& attributes,
+                                           bool* _aidl_return) override;
+    binder::Status listAudioPorts(media::AudioPortRole role, media::AudioPortType type,
+                                  media::Int* count, std::vector<media::AudioPort>* ports,
+                                  int32_t* _aidl_return) override;
+    binder::Status getAudioPort(const media::AudioPort& port,
+                                media::AudioPort* _aidl_return) override;
+    binder::Status createAudioPatch(const media::AudioPatch& patch, int32_t handle,
+                                    int32_t* _aidl_return) override;
+    binder::Status releaseAudioPatch(int32_t handle) override;
+    binder::Status listAudioPatches(media::Int* count, std::vector<media::AudioPatch>* patches,
+                                    int32_t* _aidl_return) override;
+    binder::Status setAudioPortConfig(const media::AudioPortConfig& config) override;
+    binder::Status registerClient(const sp<media::IAudioPolicyServiceClient>& client) override;
+    binder::Status setAudioPortCallbacksEnabled(bool enabled) override;
+    binder::Status setAudioVolumeGroupCallbacksEnabled(bool enabled) override;
+    binder::Status acquireSoundTriggerSession(media::SoundTriggerSession* _aidl_return) override;
+    binder::Status releaseSoundTriggerSession(int32_t session) override;
+    binder::Status getPhoneState(media::AudioMode* _aidl_return) override;
+    binder::Status registerPolicyMixes(const std::vector<media::AudioMix>& mixes,
+                                       bool registration) override;
+    binder::Status setUidDeviceAffinities(int32_t uid,
+                                          const std::vector<media::AudioDevice>& devices) override;
+    binder::Status removeUidDeviceAffinities(int32_t uid) override;
+    binder::Status setUserIdDeviceAffinities(
+            int32_t userId,
+            const std::vector<media::AudioDevice>& devices) override;
+    binder::Status removeUserIdDeviceAffinities(int32_t userId) override;
+    binder::Status startAudioSource(const media::AudioPortConfig& source,
+                                    const media::AudioAttributesInternal& attributes,
+                                    int32_t* _aidl_return) override;
+    binder::Status stopAudioSource(int32_t portId) override;
+    binder::Status setMasterMono(bool mono) override;
+    binder::Status getMasterMono(bool* _aidl_return) override;
+    binder::Status getStreamVolumeDB(media::AudioStreamType stream, int32_t index, int32_t device,
+                                     float* _aidl_return) override;
+    binder::Status getSurroundFormats(media::Int* count,
+                                      std::vector<media::audio::common::AudioFormat>* formats,
+                                      std::vector<bool>* formatsEnabled) override;
+    binder::Status getReportedSurroundFormats(
+            media::Int* count, std::vector<media::audio::common::AudioFormat>* formats) override;
+    binder::Status getHwOffloadEncodingFormatsSupportedForA2DP(
+            std::vector<media::audio::common::AudioFormat>* _aidl_return) override;
+    binder::Status setSurroundFormatEnabled(media::audio::common::AudioFormat audioFormat,
+                                            bool enabled) override;
+    binder::Status setAssistantUid(int32_t uid) override;
+    binder::Status setA11yServicesUids(const std::vector<int32_t>& uids) override;
+    binder::Status setCurrentImeUid(int32_t uid) override;
+    binder::Status isHapticPlaybackSupported(bool* _aidl_return) override;
+    binder::Status listAudioProductStrategies(
+            std::vector<media::AudioProductStrategy>* _aidl_return) override;
+    binder::Status getProductStrategyFromAudioAttributes(const media::AudioAttributesEx& aa,
+                                                         bool fallbackOnDefault,
+                                                         int32_t* _aidl_return) override;
+    binder::Status listAudioVolumeGroups(
+            std::vector<media::AudioVolumeGroup>* _aidl_return) override;
+    binder::Status getVolumeGroupFromAudioAttributes(const media::AudioAttributesEx& aa,
+                                                     bool fallbackOnDefault,
+                                                     int32_t* _aidl_return) override;
+    binder::Status setRttEnabled(bool enabled) override;
+    binder::Status isCallScreenModeSupported(bool* _aidl_return) override;
+    binder::Status setDevicesRoleForStrategy(
+            int32_t strategy, media::DeviceRole role,
+            const std::vector<media::AudioDevice>& devices) override;
+    binder::Status removeDevicesRoleForStrategy(int32_t strategy, media::DeviceRole role) override;
+    binder::Status getDevicesForRoleAndStrategy(
+            int32_t strategy, media::DeviceRole role,
+            std::vector<media::AudioDevice>* _aidl_return) override;
+    binder::Status setDevicesRoleForCapturePreset(
+            media::AudioSourceType audioSource,
+            media::DeviceRole role,
+            const std::vector<media::AudioDevice>& devices) override;
+    binder::Status addDevicesRoleForCapturePreset(
+            media::AudioSourceType audioSource,
+            media::DeviceRole role,
+            const std::vector<media::AudioDevice>& devices) override;
+    binder::Status removeDevicesRoleForCapturePreset(
+            media::AudioSourceType audioSource,
+            media::DeviceRole role,
+            const std::vector<media::AudioDevice>& devices) override;
+    binder::Status clearDevicesRoleForCapturePreset(media::AudioSourceType audioSource,
+                                                    media::DeviceRole role) override;
+    binder::Status getDevicesForRoleAndCapturePreset(
+            media::AudioSourceType audioSource,
+            media::DeviceRole role,
+            std::vector<media::AudioDevice>* _aidl_return) override;
+    binder::Status registerSoundTriggerCaptureStateListener(
+            const sp<media::ICaptureStateListener>& listener, bool* _aidl_return) override;
 
     virtual     status_t    onTransact(
                                 uint32_t code,
@@ -190,142 +268,53 @@
                                      audio_io_handle_t output,
                                      int delayMs = 0);
     virtual status_t setVoiceVolume(float volume, int delayMs = 0);
-    status_t setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages);
-    status_t setAllowedCapturePolicy(uint_t uid, audio_flags_mask_t capturePolicy) override;
-    virtual bool isOffloadSupported(const audio_offload_info_t &config);
-    virtual bool isDirectOutputSupported(const audio_config_base_t& config,
-                                         const audio_attributes_t& attributes);
 
-    virtual status_t listAudioPorts(audio_port_role_t role,
-                                    audio_port_type_t type,
-                                    unsigned int *num_ports,
-                                    struct audio_port *ports,
-                                    unsigned int *generation);
-    virtual status_t getAudioPort(struct audio_port *port);
-    virtual status_t createAudioPatch(const struct audio_patch *patch,
-                                       audio_patch_handle_t *handle);
-    virtual status_t releaseAudioPatch(audio_patch_handle_t handle);
-    virtual status_t listAudioPatches(unsigned int *num_patches,
-                                      struct audio_patch *patches,
-                                      unsigned int *generation);
-    virtual status_t setAudioPortConfig(const struct audio_port_config *config);
+    void doOnNewAudioModulesAvailable();
+    status_t doStopOutput(audio_port_handle_t portId);
+    void doReleaseOutput(audio_port_handle_t portId);
 
-    virtual void registerClient(const sp<IAudioPolicyServiceClient>& client);
+    status_t clientCreateAudioPatch(const struct audio_patch *patch,
+                              audio_patch_handle_t *handle,
+                              int delayMs);
+    status_t clientReleaseAudioPatch(audio_patch_handle_t handle,
+                                     int delayMs);
+    virtual status_t clientSetAudioPortConfig(const struct audio_port_config *config,
+                                              int delayMs);
 
-    virtual void setAudioPortCallbacksEnabled(bool enabled);
+    void removeNotificationClient(uid_t uid, pid_t pid);
+    void onAudioPortListUpdate();
+    void doOnAudioPortListUpdate();
+    void onAudioPatchListUpdate();
+    void doOnAudioPatchListUpdate();
 
-    virtual void setAudioVolumeGroupCallbacksEnabled(bool enabled);
+    void onDynamicPolicyMixStateUpdate(const String8& regId, int32_t state);
+    void doOnDynamicPolicyMixStateUpdate(const String8& regId, int32_t state);
+    void onRecordingConfigurationUpdate(int event,
+                                        const record_client_info_t *clientInfo,
+                                        const audio_config_base_t *clientConfig,
+                                        std::vector<effect_descriptor_t> clientEffects,
+                                        const audio_config_base_t *deviceConfig,
+                                        std::vector<effect_descriptor_t> effects,
+                                        audio_patch_handle_t patchHandle,
+                                        audio_source_t source);
+    void doOnRecordingConfigurationUpdate(int event,
+                                          const record_client_info_t *clientInfo,
+                                          const audio_config_base_t *clientConfig,
+                                          std::vector<effect_descriptor_t> clientEffects,
+                                          const audio_config_base_t *deviceConfig,
+                                          std::vector<effect_descriptor_t> effects,
+                                          audio_patch_handle_t patchHandle,
+                                          audio_source_t source);
 
-    virtual status_t acquireSoundTriggerSession(audio_session_t *session,
-                                           audio_io_handle_t *ioHandle,
-                                           audio_devices_t *device);
+    void onAudioVolumeGroupChanged(volume_group_t group, int flags);
+    void doOnAudioVolumeGroupChanged(volume_group_t group, int flags);
 
-    virtual status_t releaseSoundTriggerSession(audio_session_t session);
+    void onRoutingUpdated();
+    void doOnRoutingUpdated();
 
-    virtual audio_mode_t getPhoneState();
-
-    virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration);
-
-    virtual status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices);
-
-    virtual status_t removeUidDeviceAffinities(uid_t uid);
-
-    virtual status_t setPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   const AudioDeviceTypeAddr &device);
-
-    virtual status_t removePreferredDeviceForStrategy(product_strategy_t strategy);
-
-
-    virtual status_t getPreferredDeviceForStrategy(product_strategy_t strategy,
-                                                   AudioDeviceTypeAddr &device);
-    virtual status_t setUserIdDeviceAffinities(int userId, const Vector<AudioDeviceTypeAddr>& devices);
-
-    virtual status_t removeUserIdDeviceAffinities(int userId);
-
-    virtual status_t startAudioSource(const struct audio_port_config *source,
-                                      const audio_attributes_t *attributes,
-                                      audio_port_handle_t *portId);
-    virtual status_t stopAudioSource(audio_port_handle_t portId);
-
-    virtual status_t setMasterMono(bool mono);
-    virtual status_t getMasterMono(bool *mono);
-
-    virtual float    getStreamVolumeDB(
-                audio_stream_type_t stream, int index, audio_devices_t device);
-
-    virtual status_t getSurroundFormats(unsigned int *numSurroundFormats,
-                                        audio_format_t *surroundFormats,
-                                        bool *surroundFormatsEnabled,
-                                        bool reported);
-    virtual status_t getHwOffloadEncodingFormatsSupportedForA2DP(
-                                        std::vector<audio_format_t> *formats);
-    virtual status_t setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled);
-
-    virtual status_t setAssistantUid(uid_t uid);
-    virtual status_t setA11yServicesUids(const std::vector<uid_t>& uids);
-    virtual status_t setCurrentImeUid(uid_t uid);
-
-    virtual bool     isHapticPlaybackSupported();
-
-    virtual status_t listAudioProductStrategies(AudioProductStrategyVector &strategies);
-    virtual status_t getProductStrategyFromAudioAttributes(const AudioAttributes &aa,
-                                                           product_strategy_t &productStrategy);
-
-    virtual status_t listAudioVolumeGroups(AudioVolumeGroupVector &groups);
-
-    virtual status_t getVolumeGroupFromAudioAttributes(const AudioAttributes &aa,
-                                                       volume_group_t &volumeGroup);
-
-    status_t registerSoundTriggerCaptureStateListener(
-        const sp<media::ICaptureStateListener>& listener,
-        bool* result) override;
-
-    virtual status_t setRttEnabled(bool enabled);
-
-            bool isCallScreenModeSupported() override;
-
-            void doOnNewAudioModulesAvailable();
-            status_t doStopOutput(audio_port_handle_t portId);
-            void doReleaseOutput(audio_port_handle_t portId);
-
-            status_t clientCreateAudioPatch(const struct audio_patch *patch,
-                                      audio_patch_handle_t *handle,
-                                      int delayMs);
-            status_t clientReleaseAudioPatch(audio_patch_handle_t handle,
-                                             int delayMs);
-            virtual status_t clientSetAudioPortConfig(const struct audio_port_config *config,
-                                                      int delayMs);
-
-            void removeNotificationClient(uid_t uid, pid_t pid);
-            void onAudioPortListUpdate();
-            void doOnAudioPortListUpdate();
-            void onAudioPatchListUpdate();
-            void doOnAudioPatchListUpdate();
-
-            void onDynamicPolicyMixStateUpdate(const String8& regId, int32_t state);
-            void doOnDynamicPolicyMixStateUpdate(const String8& regId, int32_t state);
-            void onRecordingConfigurationUpdate(int event,
-                                                const record_client_info_t *clientInfo,
-                                                const audio_config_base_t *clientConfig,
-                                                std::vector<effect_descriptor_t> clientEffects,
-                                                const audio_config_base_t *deviceConfig,
-                                                std::vector<effect_descriptor_t> effects,
-                                                audio_patch_handle_t patchHandle,
-                                                audio_source_t source);
-            void doOnRecordingConfigurationUpdate(int event,
-                                                  const record_client_info_t *clientInfo,
-                                                  const audio_config_base_t *clientConfig,
-                                                  std::vector<effect_descriptor_t> clientEffects,
-                                                  const audio_config_base_t *deviceConfig,
-                                                  std::vector<effect_descriptor_t> effects,
-                                                  audio_patch_handle_t patchHandle,
-                                                  audio_source_t source);
-
-            void onAudioVolumeGroupChanged(volume_group_t group, int flags);
-            void doOnAudioVolumeGroupChanged(volume_group_t group, int flags);
-            void setEffectSuspended(int effectId,
-                                    audio_session_t sessionId,
-                                    bool suspended);
+    void setEffectSuspended(int effectId,
+                            audio_session_t sessionId,
+                            bool suspended);
 
 private:
                         AudioPolicyService() ANDROID_API;
@@ -336,8 +325,10 @@
     // Handles binder shell commands
     virtual status_t shellCommand(int in, int out, int err, Vector<String16>& args);
 
+    class AudioRecordClient;
+
     // Sets whether the given UID records only silence
-    virtual void setAppState_l(audio_port_handle_t portId, app_state_t state) REQUIRES(mLock);
+    virtual void setAppState_l(sp<AudioRecordClient> client, app_state_t state) REQUIRES(mLock);
 
     // Overrides the UID state as if it is idle
     status_t handleSetUidState(Vector<String16>& args, int err);
@@ -359,7 +350,7 @@
 
     bool isSupportedSystemUsage(audio_usage_t usage);
     status_t validateUsage(audio_usage_t usage);
-    status_t validateUsage(audio_usage_t usage, pid_t pid, uid_t uid);
+    status_t validateUsage(audio_usage_t usage, const AttributionSourceState& attributionSource);
 
     void updateUidStates();
     void updateUidStates_l() REQUIRES(mLock);
@@ -368,6 +359,13 @@
 
     static bool isVirtualSource(audio_source_t source);
 
+    /** returns true if the audio source must be silenced when the corresponding app op is denied.
+     *          false if the audio source does not actually capture from the microphone while still
+     *          being mapped to app op OP_RECORD_AUDIO and not a specialized op tracked separately.
+     *          See getOpForSource().
+     */
+    static bool isAppOpSource(audio_source_t source);
+
     // If recording we need to make sure the UID is allowed to do that. If the UID is idle
     // then it cannot record and gets buffers with zeros - silence. As soon as the UID
     // transitions to an active state we will start reporting buffers with data. This approach
@@ -440,6 +438,7 @@
                     : mService(service) {}
 
             void registerSelf();
+            void registerSelfForMicrophoneOnly(int userId);
             void unregisterSelf();
 
             bool isSensorPrivacyEnabled();
@@ -475,6 +474,8 @@
             RECORDING_CONFIGURATION_UPDATE,
             SET_EFFECT_SUSPENDED,
             AUDIO_MODULES_UPDATE,
+            ROUTING_UPDATED,
+            UPDATE_UID_STATES
         };
 
         AudioCommandThread (String8 name, const wp<AudioPolicyService>& service);
@@ -521,6 +522,8 @@
                                                           audio_session_t sessionId,
                                                           bool suspended);
                     void        audioModulesUpdateCommand();
+                    void        routingChangedCommand();
+                    void        updateUidStatesCommand();
                     void        insertCommand_l(AudioCommand *command, int delayMs = 0);
     private:
         class AudioCommandData;
@@ -739,10 +742,17 @@
 
         virtual void onAudioVolumeGroupChanged(volume_group_t group, int flags);
 
+        virtual void onRoutingUpdated();
+
         virtual audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t use);
 
         void setSoundTriggerCaptureState(bool active) override;
 
+        status_t getAudioPort(struct audio_port_v7 *port) override;
+
+        status_t updateSecondaryOutputs(
+                const TrackSecondaryOutputsMap& trackSecondaryOutputs) override;
+
      private:
         AudioPolicyService *mAudioPolicyService;
     };
@@ -751,7 +761,7 @@
     class NotificationClient : public IBinder::DeathRecipient {
     public:
                             NotificationClient(const sp<AudioPolicyService>& service,
-                                                const sp<IAudioPolicyServiceClient>& client,
+                                                const sp<media::IAudioPolicyServiceClient>& client,
                                                 uid_t uid, pid_t pid);
         virtual             ~NotificationClient();
 
@@ -769,6 +779,7 @@
                                                     std::vector<effect_descriptor_t> effects,
                                                     audio_patch_handle_t patchHandle,
                                                     audio_source_t source);
+                            void      onRoutingUpdated();
                             void      setAudioPortCallbacksEnabled(bool enabled);
                             void setAudioVolumeGroupCallbacksEnabled(bool enabled);
 
@@ -783,66 +794,124 @@
                             NotificationClient(const NotificationClient&);
                             NotificationClient& operator = (const NotificationClient&);
 
-        const wp<AudioPolicyService>        mService;
-        const uid_t                         mUid;
-        const pid_t                         mPid;
-        const sp<IAudioPolicyServiceClient> mAudioPolicyServiceClient;
-              bool                          mAudioPortCallbacksEnabled;
-              bool                          mAudioVolumeGroupCallbacksEnabled;
+        const wp<AudioPolicyService>               mService;
+        const uid_t                                mUid;
+        const pid_t                                mPid;
+        const sp<media::IAudioPolicyServiceClient> mAudioPolicyServiceClient;
+              bool                                 mAudioPortCallbacksEnabled;
+              bool                                 mAudioVolumeGroupCallbacksEnabled;
     };
 
     class AudioClient : public virtual RefBase {
     public:
                 AudioClient(const audio_attributes_t attributes,
-                            const audio_io_handle_t io, uid_t uid, pid_t pid,
+                            const audio_io_handle_t io,
+                            const AttributionSourceState& attributionSource,
                             const audio_session_t session,  audio_port_handle_t portId,
                             const audio_port_handle_t deviceId) :
-                                attributes(attributes), io(io), uid(uid), pid(pid),
-                                session(session), portId(portId), deviceId(deviceId), active(false) {}
+                                attributes(attributes), io(io), attributionSource(
+                                attributionSource), session(session), portId(portId),
+                                deviceId(deviceId), active(false) {}
                 ~AudioClient() override = default;
 
 
         const audio_attributes_t attributes; // source, flags ...
         const audio_io_handle_t io;          // audio HAL stream IO handle
-        const uid_t uid;                     // client UID
-        const pid_t pid;                     // client PID
+        const AttributionSourceState& attributionSource; //client attributionsource
         const audio_session_t session;       // audio session ID
         const audio_port_handle_t portId;
         const audio_port_handle_t deviceId;  // selected input device port ID
               bool active;                   // Playback/Capture is active or inactive
     };
 
+    // Checks and monitors app ops for AudioRecordClient
+    class OpRecordAudioMonitor : public RefBase {
+    public:
+        ~OpRecordAudioMonitor() override;
+        bool hasOp() const;
+        int32_t getOp() const { return mAppOp; }
+
+        static sp<OpRecordAudioMonitor> createIfNeeded(
+                const AttributionSourceState& attributionSource,
+                const audio_attributes_t& attr, wp<AudioCommandThread> commandThread);
+
+    private:
+        OpRecordAudioMonitor(const AttributionSourceState& attributionSource, int32_t appOp,
+                wp<AudioCommandThread> commandThread);
+
+        void onFirstRef() override;
+
+        AppOpsManager mAppOpsManager;
+
+        class RecordAudioOpCallback : public BnAppOpsCallback {
+        public:
+            explicit RecordAudioOpCallback(const wp<OpRecordAudioMonitor>& monitor);
+            void opChanged(int32_t op, const String16& packageName) override;
+
+        private:
+            const wp<OpRecordAudioMonitor> mMonitor;
+        };
+
+        sp<RecordAudioOpCallback> mOpCallback;
+        // called by RecordAudioOpCallback when the app op for this OpRecordAudioMonitor is updated
+        // in AppOp callback and in onFirstRef()
+        // updateUidStates is true when the silenced state of active AudioRecordClients must be
+        // re-evaluated
+        void checkOp(bool updateUidStates = false);
+
+        std::atomic_bool mHasOp;
+        const AttributionSourceState mAttributionSource;
+        const int32_t mAppOp;
+        wp<AudioCommandThread> mCommandThread;
+    };
+
     // --- AudioRecordClient ---
     // Information about each registered AudioRecord client
     // (between calls to getInputForAttr() and releaseInput())
     class AudioRecordClient : public AudioClient {
     public:
                 AudioRecordClient(const audio_attributes_t attributes,
-                          const audio_io_handle_t io, uid_t uid, pid_t pid,
+                          const audio_io_handle_t io,
                           const audio_session_t session, audio_port_handle_t portId,
-                          const audio_port_handle_t deviceId, const String16& opPackageName,
-                          bool canCaptureOutput, bool canCaptureHotword) :
-                    AudioClient(attributes, io, uid, pid, session, portId, deviceId),
-                    opPackageName(opPackageName), startTimeNs(0),
-                    canCaptureOutput(canCaptureOutput), canCaptureHotword(canCaptureHotword) {}
+                          const audio_port_handle_t deviceId,
+                          const AttributionSourceState& attributionSource,
+                          bool canCaptureOutput, bool canCaptureHotword,
+                          wp<AudioCommandThread> commandThread) :
+                    AudioClient(attributes, io, attributionSource,
+                        session, portId, deviceId), attributionSource(attributionSource),
+                        startTimeNs(0), canCaptureOutput(canCaptureOutput),
+                        canCaptureHotword(canCaptureHotword), silenced(false),
+                        mOpRecordAudioMonitor(
+                                OpRecordAudioMonitor::createIfNeeded(attributionSource,
+                                attributes, commandThread)) {}
                 ~AudioRecordClient() override = default;
 
-        const String16 opPackageName;        // client package name
+        bool hasOp() const {
+            return mOpRecordAudioMonitor ? mOpRecordAudioMonitor->hasOp() : true;
+        }
+
+        const AttributionSourceState attributionSource; // attribution source of client
         nsecs_t startTimeNs;
         const bool canCaptureOutput;
         const bool canCaptureHotword;
+        bool silenced;
+
+    private:
+        sp<OpRecordAudioMonitor>           mOpRecordAudioMonitor;
     };
 
+
     // --- AudioPlaybackClient ---
     // Information about each registered AudioTrack client
     // (between calls to getOutputForAttr() and releaseOutput())
     class AudioPlaybackClient : public AudioClient {
     public:
                 AudioPlaybackClient(const audio_attributes_t attributes,
-                      const audio_io_handle_t io, uid_t uid, pid_t pid,
+                      const audio_io_handle_t io, AttributionSourceState attributionSource,
                             const audio_session_t session, audio_port_handle_t portId,
                             audio_port_handle_t deviceId, audio_stream_type_t stream) :
-                    AudioClient(attributes, io, uid, pid, session, portId, deviceId), stream(stream) {}
+                    AudioClient(attributes, io, attributionSource, session, portId,
+                        deviceId), stream(stream) {}
                 ~AudioPlaybackClient() override = default;
 
         const audio_stream_type_t stream;
@@ -872,7 +941,8 @@
 
     // Internal dump utilities.
     status_t dumpPermissionDenial(int fd);
-
+    void loadAudioPolicyManager();
+    void unloadAudioPolicyManager();
 
     mutable Mutex mLock;    // prevents concurrent access to AudioPolicy manager functions changing
                             // device connection state  or routing
@@ -906,6 +976,10 @@
     MediaPackageManager mPackageManager; // To check allowPlaybackCapture
 
     CaptureStateNotifier mCaptureStateNotifier;
+
+    void *mLibraryHandle = nullptr;
+    CreateAudioPolicyManagerInstance mCreateAudioPolicyManager;
+    DestroyAudioPolicyManagerInstance mDestroyAudioPolicyManager;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index efdb241..b296fb0 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "audiopolicy_tests",
 
@@ -16,9 +25,14 @@
         "libmedia_helper",
         "libutils",
         "libxml2",
+        "libpermission",
+        "libbinder",
     ],
 
-    static_libs: ["libaudiopolicycomponents"],
+    static_libs: [
+        "libaudiopolicycomponents",
+        "libgmock"
+    ],
 
     header_libs: [
         "libaudiopolicycommon",
@@ -42,6 +56,7 @@
 
 cc_test {
     name: "audio_health_tests",
+    require_root: true,
 
     shared_libs: [
         "libaudiofoundation",
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index e1721ea..f7b0565 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -75,6 +75,10 @@
     status_t createAudioPatch(const struct audio_patch *patch,
                               audio_patch_handle_t *handle,
                               int /*delayMs*/) override {
+        auto iter = mActivePatches.find(*handle);
+        if (iter != mActivePatches.end()) {
+            mActivePatches.erase(*handle);
+        }
         *handle = mNextPatchHandle++;
         mActivePatches.insert(std::make_pair(*handle, *patch));
         return NO_ERROR;
@@ -117,6 +121,24 @@
 
     size_t getAudioPortListUpdateCount() const { return mAudioPortListUpdateCount; }
 
+    virtual void addSupportedFormat(audio_format_t /* format */) {}
+
+    void onRoutingUpdated() override {
+        mRoutingUpdatedUpdateCount++;
+    }
+
+    void resetRoutingUpdatedCounter() {
+        mRoutingUpdatedUpdateCount = 0;
+    }
+
+    size_t getRoutingUpdatedCounter() const {
+        return mRoutingUpdatedUpdateCount; }
+
+    status_t updateSecondaryOutputs(
+            const TrackSecondaryOutputsMap& trackSecondaryOutputs __unused) override {
+        return NO_ERROR;
+    }
+
 private:
     audio_module_handle_t mNextModuleHandle = AUDIO_MODULE_HANDLE_NONE + 1;
     audio_io_handle_t mNextIoHandle = AUDIO_IO_HANDLE_NONE + 1;
@@ -124,6 +146,7 @@
     std::map<audio_patch_handle_t, struct audio_patch> mActivePatches;
     std::set<std::string> mAllowedModuleNames;
     size_t mAudioPortListUpdateCount = 0;
+    size_t mRoutingUpdatedUpdateCount = 0;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h b/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h
new file mode 100644
index 0000000..a5ad9b1
--- /dev/null
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <map>
+#include <set>
+
+#include <system/audio.h>
+#include <utils/Log.h>
+#include <utils/String8.h>
+
+#include "AudioPolicyTestClient.h"
+
+namespace android {
+
+class AudioPolicyManagerTestClientForHdmi : public AudioPolicyManagerTestClient {
+public:
+    String8 getParameters(audio_io_handle_t /* ioHandle */, const String8&  /* keys*/ ) override {
+        return mAudioParameters.toString();
+    }
+
+    void addSupportedFormat(audio_format_t format) override {
+        mAudioParameters.add(
+                String8(AudioParameter::keyStreamSupportedFormats),
+                String8(audio_format_to_string(format)));
+        mAudioParameters.addInt(String8(AudioParameter::keyStreamSupportedSamplingRates), 48000);
+        mAudioParameters.add(String8(AudioParameter::keyStreamSupportedChannels), String8(""));
+    }
+
+private:
+    AudioParameter mAudioParameters;
+};
+
+} // namespace android
\ No newline at end of file
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index c628e70..1384864 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -83,10 +83,18 @@
                                         std::vector<effect_descriptor_t> effects __unused,
                                         audio_patch_handle_t patchHandle __unused,
                                         audio_source_t source __unused) override { }
+    void onRoutingUpdated() override { }
     void setEffectSuspended(int effectId __unused,
                             audio_session_t sessionId __unused,
                             bool suspended __unused) {}
     void setSoundTriggerCaptureState(bool active __unused) override {};
+    status_t getAudioPort(struct audio_port_v7 *port __unused) override {
+        return INVALID_OPERATION;
+    };
+    status_t updateSecondaryOutputs(
+            const TrackSecondaryOutputsMap& trackSecondaryOutputs __unused) override {
+        return NO_INIT;
+    }
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyTestManager.h b/services/audiopolicy/tests/AudioPolicyTestManager.h
index 8bab020..7f67940 100644
--- a/services/audiopolicy/tests/AudioPolicyTestManager.h
+++ b/services/audiopolicy/tests/AudioPolicyTestManager.h
@@ -29,6 +29,10 @@
     using AudioPolicyManager::getOutputs;
     using AudioPolicyManager::getAvailableOutputDevices;
     using AudioPolicyManager::getAvailableInputDevices;
+    using AudioPolicyManager::setSurroundFormatEnabled;
+    using AudioPolicyManager::releaseMsdOutputPatches;
+    using AudioPolicyManager::setMsdOutputPatches;
+    using AudioPolicyManager::getAudioPatches;
     uint32_t getAudioPortGeneration() const { return mAudioPortGeneration; }
 };
 
diff --git a/services/audiopolicy/tests/audio_health_tests.cpp b/services/audiopolicy/tests/audio_health_tests.cpp
index b5c67a1..df4389b 100644
--- a/services/audiopolicy/tests/audio_health_tests.cpp
+++ b/services/audiopolicy/tests/audio_health_tests.cpp
@@ -16,11 +16,13 @@
 
 #define LOG_TAG "AudioPolicy_Boot_Test"
 
+#include <string>
 #include <unordered_set>
 
 #include <gtest/gtest.h>
 
 #include <media/AudioSystem.h>
+#include <media/TypeConverter.h>
 #include <system/audio.h>
 #include <utils/Log.h>
 
@@ -33,7 +35,7 @@
     unsigned int numPorts;
     unsigned int generation1;
     unsigned int generation;
-    struct audio_port *audioPorts = NULL;
+    struct audio_port_v7 *audioPorts = nullptr;
     int attempts = 10;
     do {
         if (attempts-- < 0) {
@@ -42,13 +44,14 @@
         }
         numPorts = 0;
         ASSERT_EQ(NO_ERROR, AudioSystem::listAudioPorts(
-                AUDIO_PORT_ROLE_NONE, AUDIO_PORT_TYPE_DEVICE, &numPorts, NULL, &generation1));
+                AUDIO_PORT_ROLE_NONE, AUDIO_PORT_TYPE_DEVICE, &numPorts, nullptr, &generation1));
         if (numPorts == 0) {
             free(audioPorts);
             GTEST_FAIL() << "Number of audio ports should not be zero";
         }
 
-        audioPorts = (struct audio_port *)realloc(audioPorts, numPorts * sizeof(struct audio_port));
+        audioPorts = (struct audio_port_v7 *)realloc(
+                audioPorts, numPorts * sizeof(struct audio_port_v7));
         status_t status = AudioSystem::listAudioPorts(
                 AUDIO_PORT_ROLE_NONE, AUDIO_PORT_TYPE_DEVICE, &numPorts, audioPorts, &generation);
         if (status != NO_ERROR) {
@@ -68,9 +71,57 @@
     ASSERT_NE("AudioPolicyConfig::setDefault", manager.getConfig().getSource());
 
     for (auto desc : manager.getConfig().getInputDevices()) {
-        ASSERT_NE(attachedDevices.end(), attachedDevices.find(desc->type()));
+        if (attachedDevices.find(desc->type()) == attachedDevices.end()) {
+            std::string deviceType;
+            (void)DeviceConverter::toString(desc->type(), deviceType);
+            ADD_FAILURE() << "Input device \"" << deviceType << "\" not found";
+        }
     }
     for (auto desc : manager.getConfig().getOutputDevices()) {
-        ASSERT_NE(attachedDevices.end(), attachedDevices.find(desc->type()));
+        if (attachedDevices.find(desc->type()) == attachedDevices.end()) {
+            std::string deviceType;
+            (void)DeviceConverter::toString(desc->type(), deviceType);
+            ADD_FAILURE() << "Output device \"" << deviceType << "\" not found";
+        }
+    }
+}
+
+TEST(AudioHealthTest, ConnectSupportedDevice) {
+    AudioPolicyManagerTestClient client;
+    AudioPolicyTestManager manager(&client);
+    manager.loadConfig();
+    ASSERT_NE("AudioPolicyConfig::setDefault", manager.getConfig().getSource());
+
+    DeviceVector devices;
+    for (const auto& hwModule : manager.getConfig().getHwModules()) {
+        for (const auto& profile : hwModule->getOutputProfiles()) {
+            devices.merge(profile->getSupportedDevices());
+        }
+        for (const auto& profile : hwModule->getInputProfiles()) {
+            devices.merge(profile->getSupportedDevices());
+        }
+    }
+    for (const auto& device : devices) {
+        if (!audio_is_bluetooth_out_sco_device(device->type()) &&
+            !audio_is_bluetooth_in_sco_device(device->type())) {
+            // There are two reasons to only test connecting BT devices.
+            // 1) It is easier to construct a fake address.
+            // 2) This test will be run in presubmit. In that case, it makes sense to make the test
+            //    processing time short.
+            continue;
+        }
+        std::string address = "11:22:33:44:55:66";
+        ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                AudioSystem::getDeviceConnectionState(device->type(), address.c_str()));
+        ASSERT_EQ(NO_ERROR, AudioSystem::setDeviceConnectionState(
+                device->type(), AUDIO_POLICY_DEVICE_STATE_AVAILABLE, address.c_str(),
+                "" /*device_name*/, AUDIO_FORMAT_DEFAULT));
+        ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                AudioSystem::getDeviceConnectionState(device->type(), address.c_str()));
+        ASSERT_EQ(NO_ERROR, AudioSystem::setDeviceConnectionState(
+                device->type(), AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, address.c_str(),
+                "" /*device_name*/, AUDIO_FORMAT_DEFAULT));
+        ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                AudioSystem::getDeviceConnectionState(device->type(), address.c_str()));
     }
 }
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index a0074bc..a16ab7d 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -20,10 +20,12 @@
 #include <unistd.h>
 
 #include <gtest/gtest.h>
+#include <gmock/gmock.h>
 
 #define LOG_TAG "APM_Test"
 #include <Serializer.h>
 #include <android-base/file.h>
+#include <android/content/AttributionSourceState.h>
 #include <media/AudioPolicy.h>
 #include <media/PatchBuilder.h>
 #include <media/RecordingActivityTracker.h>
@@ -32,10 +34,13 @@
 
 #include "AudioPolicyInterface.h"
 #include "AudioPolicyManagerTestClient.h"
+#include "AudioPolicyManagerTestClientForHdmi.h"
 #include "AudioPolicyTestClient.h"
 #include "AudioPolicyTestManager.h"
 
 using namespace android;
+using testing::UnorderedElementsAre;
+using android::content::AttributionSourceState;
 
 TEST(AudioPolicyManagerTestInit, EngineFailure) {
     AudioPolicyTestClient client;
@@ -56,6 +61,34 @@
     ASSERT_EQ(NO_INIT, manager.initCheck());
 }
 
+// Verifies that a failure while loading a config doesn't leave
+// APM config in a "dirty" state. Since AudioPolicyConfig object
+// is a proxy for the data hosted by APM, it isn't possible
+// to "deep copy" it, and thus we have to test its elements
+// individually.
+TEST(AudioPolicyManagerTestInit, ConfigLoadingIsTransactional) {
+    AudioPolicyTestClient client;
+    AudioPolicyTestManager manager(&client);
+    ASSERT_TRUE(manager.getConfig().getHwModules().isEmpty());
+    ASSERT_TRUE(manager.getConfig().getInputDevices().isEmpty());
+    ASSERT_TRUE(manager.getConfig().getOutputDevices().isEmpty());
+    status_t status = deserializeAudioPolicyFile(
+            (base::GetExecutableDirectory() +
+                    "/test_invalid_audio_policy_configuration.xml").c_str(),
+            &manager.getConfig());
+    ASSERT_NE(NO_ERROR, status);
+    EXPECT_TRUE(manager.getConfig().getHwModules().isEmpty());
+    EXPECT_TRUE(manager.getConfig().getInputDevices().isEmpty());
+    EXPECT_TRUE(manager.getConfig().getOutputDevices().isEmpty());
+    status = deserializeAudioPolicyFile(
+            (base::GetExecutableDirectory() + "/test_audio_policy_configuration.xml").c_str(),
+            &manager.getConfig());
+    ASSERT_EQ(NO_ERROR, status);
+    EXPECT_FALSE(manager.getConfig().getHwModules().isEmpty());
+    EXPECT_FALSE(manager.getConfig().getInputDevices().isEmpty());
+    EXPECT_FALSE(manager.getConfig().getOutputDevices().isEmpty());
+}
+
 
 class PatchCountCheck {
   public:
@@ -87,7 +120,7 @@
     void getOutputForAttr(
             audio_port_handle_t *selectedDeviceId,
             audio_format_t format,
-            int channelMask,
+            audio_channel_mask_t channelMask,
             int sampleRate,
             audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
             audio_io_handle_t *output = nullptr,
@@ -98,7 +131,7 @@
             audio_unique_id_t riid,
             audio_port_handle_t *selectedDeviceId,
             audio_format_t format,
-            int channelMask,
+            audio_channel_mask_t channelMask,
             int sampleRate,
             audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
             audio_port_handle_t *portId = nullptr);
@@ -107,15 +140,16 @@
     // Tries to find a device port. If 'foundPort' isn't nullptr,
     // will generate a failure if the port hasn't been found.
     bool findDevicePort(audio_port_role_t role, audio_devices_t deviceType,
-            const std::string &address, audio_port *foundPort);
+            const std::string &address, audio_port_v7 *foundPort);
     static audio_port_handle_t getDeviceIdFromPatch(const struct audio_patch* patch);
+    virtual AudioPolicyManagerTestClient* getClient() { return new AudioPolicyManagerTestClient; }
 
     std::unique_ptr<AudioPolicyManagerTestClient> mClient;
     std::unique_ptr<AudioPolicyTestManager> mManager;
 };
 
 void AudioPolicyManagerTest::SetUp() {
-    mClient.reset(new AudioPolicyManagerTestClient);
+    mClient.reset(getClient());
     mManager.reset(new AudioPolicyTestManager(mClient.get()));
     SetUpManagerConfig();  // Subclasses may want to customize the config.
     ASSERT_EQ(NO_ERROR, mManager->initialize());
@@ -164,7 +198,7 @@
 void AudioPolicyManagerTest::getOutputForAttr(
         audio_port_handle_t *selectedDeviceId,
         audio_format_t format,
-        int channelMask,
+        audio_channel_mask_t channelMask,
         int sampleRate,
         audio_output_flags_t flags,
         audio_io_handle_t *output,
@@ -182,8 +216,12 @@
     if (!portId) portId = &localPortId;
     *portId = AUDIO_PORT_HANDLE_NONE;
     AudioPolicyInterface::output_type_t outputType;
+    // TODO b/182392769: use attribution source util
+    AttributionSourceState attributionSource = AttributionSourceState();
+    attributionSource.uid = 0;
+    attributionSource.token = sp<BBinder>::make();
     ASSERT_EQ(OK, mManager->getOutputForAttr(
-                    &attr, output, AUDIO_SESSION_NONE, &stream, 0 /*uid*/, &config, &flags,
+                    &attr, output, AUDIO_SESSION_NONE, &stream, attributionSource, &config, &flags,
                     selectedDeviceId, portId, {}, &outputType));
     ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
     ASSERT_NE(AUDIO_IO_HANDLE_NONE, *output);
@@ -194,7 +232,7 @@
         audio_unique_id_t riid,
         audio_port_handle_t *selectedDeviceId,
         audio_format_t format,
-        int channelMask,
+        audio_channel_mask_t channelMask,
         int sampleRate,
         audio_input_flags_t flags,
         audio_port_handle_t *portId) {
@@ -207,14 +245,18 @@
     if (!portId) portId = &localPortId;
     *portId = AUDIO_PORT_HANDLE_NONE;
     AudioPolicyInterface::input_type_t inputType;
+    // TODO b/182392769: use attribution source util
+    AttributionSourceState attributionSource = AttributionSourceState();
+    attributionSource.uid = 0;
+    attributionSource.token = sp<BBinder>::make();
     ASSERT_EQ(OK, mManager->getInputForAttr(
-            &attr, &input, riid, AUDIO_SESSION_NONE, 0 /*uid*/, &config, flags,
+            &attr, &input, riid, AUDIO_SESSION_NONE, attributionSource, &config, flags,
             selectedDeviceId, &inputType, portId));
     ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
 }
 
 bool AudioPolicyManagerTest::findDevicePort(audio_port_role_t role,
-        audio_devices_t deviceType, const std::string &address, audio_port *foundPort) {
+        audio_devices_t deviceType, const std::string &address, audio_port_v7 *foundPort) {
     uint32_t numPorts = 0;
     uint32_t generation1;
     status_t ret;
@@ -224,7 +266,7 @@
     if (HasFailure()) return false;
 
     uint32_t generation2;
-    struct audio_port ports[numPorts];
+    struct audio_port_v7 ports[numPorts];
     ret = mManager->listAudioPorts(role, AUDIO_PORT_TYPE_DEVICE, &numPorts, ports, &generation2);
     EXPECT_EQ(NO_ERROR, ret) << "mManager->listAudioPorts returned error";
     EXPECT_EQ(generation1, generation2) << "Generations changed during ports retrieval";
@@ -317,15 +359,46 @@
 
 // TODO: Add patch creation tests that involve already existing patch
 
-class AudioPolicyManagerTestMsd : public AudioPolicyManagerTest {
+enum
+{
+    MSD_AUDIO_PATCH_COUNT_NUM_AUDIO_PATCHES_INDEX = 0,
+    MSD_AUDIO_PATCH_COUNT_NAME_INDEX = 1
+};
+using MsdAudioPatchCountSpecification = std::tuple<size_t, std::string>;
+
+class AudioPolicyManagerTestMsd : public AudioPolicyManagerTest,
+        public ::testing::WithParamInterface<MsdAudioPatchCountSpecification> {
+  public:
+    AudioPolicyManagerTestMsd();
   protected:
     void SetUpManagerConfig() override;
     void TearDown() override;
 
     sp<DeviceDescriptor> mMsdOutputDevice;
     sp<DeviceDescriptor> mMsdInputDevice;
+    sp<DeviceDescriptor> mDefaultOutputDevice;
+
+    const size_t mExpectedAudioPatchCount;
+    sp<DeviceDescriptor> mSpdifDevice;
+
+    sp<DeviceDescriptor> mHdmiInputDevice;
 };
 
+AudioPolicyManagerTestMsd::AudioPolicyManagerTestMsd()
+    : mExpectedAudioPatchCount(std::get<MSD_AUDIO_PATCH_COUNT_NUM_AUDIO_PATCHES_INDEX>(
+            GetParam())) {}
+
+INSTANTIATE_TEST_CASE_P(
+        MsdAudioPatchCount,
+        AudioPolicyManagerTestMsd,
+        ::testing::Values(
+                MsdAudioPatchCountSpecification(1u, "single"),
+                MsdAudioPatchCountSpecification(2u, "dual")
+        ),
+        [](const ::testing::TestParamInfo<MsdAudioPatchCountSpecification> &info) {
+                return std::get<MSD_AUDIO_PATCH_COUNT_NAME_INDEX>(info.param); }
+);
+
 void AudioPolicyManagerTestMsd::SetUpManagerConfig() {
     // TODO: Consider using Serializer to load part of the config from a string.
     AudioPolicyManagerTest::SetUpManagerConfig();
@@ -335,8 +408,11 @@
             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 48000);
     sp<AudioProfile> ac3OutputProfile = new AudioProfile(
             AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000);
+    sp<AudioProfile> iec958OutputProfile = new AudioProfile(
+            AUDIO_FORMAT_IEC60958, AUDIO_CHANNEL_OUT_STEREO, 48000);
     mMsdOutputDevice->addAudioProfile(pcmOutputProfile);
     mMsdOutputDevice->addAudioProfile(ac3OutputProfile);
+    mMsdOutputDevice->addAudioProfile(iec958OutputProfile);
     mMsdInputDevice = new DeviceDescriptor(AUDIO_DEVICE_IN_BUS);
     // Match output profile from AudioPolicyConfig::setDefault.
     sp<AudioProfile> pcmInputProfile = new AudioProfile(
@@ -345,6 +421,19 @@
     config.addDevice(mMsdOutputDevice);
     config.addDevice(mMsdInputDevice);
 
+    if (mExpectedAudioPatchCount == 2) {
+        // Add SPDIF device with PCM output profile as a second device for dual MSD audio patching.
+        mSpdifDevice = new DeviceDescriptor(AUDIO_DEVICE_OUT_SPDIF);
+        mSpdifDevice->addAudioProfile(pcmOutputProfile);
+        config.addDevice(mSpdifDevice);
+
+        sp<OutputProfile> spdifOutputProfile = new OutputProfile("spdif output");
+        spdifOutputProfile->addAudioProfile(pcmOutputProfile);
+        spdifOutputProfile->addSupportedDevice(mSpdifDevice);
+        config.getHwModules().getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)->
+                addOutputProfile(spdifOutputProfile);
+    }
+
     sp<HwModule> msdModule = new HwModule(AUDIO_HARDWARE_MODULE_ID_MSD, 2 /*halVersionMajor*/);
     HwModuleCollection modules = config.getHwModules();
     modules.add(msdModule);
@@ -361,6 +450,11 @@
             AUDIO_OUTPUT_FLAG_NON_BLOCKING);
     msdCompressedOutputProfile->addSupportedDevice(mMsdOutputDevice);
     msdModule->addOutputProfile(msdCompressedOutputProfile);
+    sp<OutputProfile> msdIec958OutputProfile = new OutputProfile("msd iec958 input");
+    msdIec958OutputProfile->addAudioProfile(iec958OutputProfile);
+    msdIec958OutputProfile->setFlags(AUDIO_OUTPUT_FLAG_DIRECT);
+    msdIec958OutputProfile->addSupportedDevice(mMsdOutputDevice);
+    msdModule->addOutputProfile(msdIec958OutputProfile);
 
     sp<InputProfile> msdInputProfile = new InputProfile("msd output");
     msdInputProfile->addAudioProfile(pcmInputProfile);
@@ -378,62 +472,104 @@
     primaryEncodedOutputProfile->addSupportedDevice(config.getDefaultOutputDevice());
     config.getHwModules().getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)->
             addOutputProfile(primaryEncodedOutputProfile);
+
+    mDefaultOutputDevice = config.getDefaultOutputDevice();
+    if (mExpectedAudioPatchCount == 2) {
+        mSpdifDevice->addAudioProfile(dtsOutputProfile);
+        primaryEncodedOutputProfile->addSupportedDevice(mSpdifDevice);
+    }
+
+    // Add HDMI input device with IEC60958 profile for HDMI in -> MSD patching.
+    mHdmiInputDevice = new DeviceDescriptor(AUDIO_DEVICE_IN_HDMI);
+    sp<AudioProfile> iec958InputProfile = new AudioProfile(
+            AUDIO_FORMAT_IEC60958, AUDIO_CHANNEL_IN_STEREO, 48000);
+    mHdmiInputDevice->addAudioProfile(iec958InputProfile);
+    config.addDevice(mHdmiInputDevice);
+    sp<InputProfile> hdmiInputProfile = new InputProfile("hdmi input");
+    hdmiInputProfile->addAudioProfile(iec958InputProfile);
+    hdmiInputProfile->setFlags(AUDIO_INPUT_FLAG_DIRECT);
+    hdmiInputProfile->addSupportedDevice(mHdmiInputDevice);
+    config.getHwModules().getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)->
+            addInputProfile(hdmiInputProfile);
 }
 
 void AudioPolicyManagerTestMsd::TearDown() {
     mMsdOutputDevice.clear();
     mMsdInputDevice.clear();
+    mDefaultOutputDevice.clear();
+    mSpdifDevice.clear();
+    mHdmiInputDevice.clear();
     AudioPolicyManagerTest::TearDown();
 }
 
-TEST_F(AudioPolicyManagerTestMsd, InitSuccess) {
+TEST_P(AudioPolicyManagerTestMsd, InitSuccess) {
     ASSERT_TRUE(mMsdOutputDevice);
     ASSERT_TRUE(mMsdInputDevice);
+    ASSERT_TRUE(mDefaultOutputDevice);
 }
 
-TEST_F(AudioPolicyManagerTestMsd, Dump) {
+TEST_P(AudioPolicyManagerTestMsd, Dump) {
     dumpToLog();
 }
 
-TEST_F(AudioPolicyManagerTestMsd, PatchCreationOnSetForceUse) {
+TEST_P(AudioPolicyManagerTestMsd, PatchCreationOnSetForceUse) {
     const PatchCountCheck patchCount = snapshotPatchCount();
     mManager->setForceUse(AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND,
             AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS);
-    ASSERT_EQ(1, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
 }
 
-TEST_F(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedRoutesToMsd) {
+TEST_P(AudioPolicyManagerTestMsd, PatchCreationSetReleaseMsdOutputPatches) {
+    const PatchCountCheck patchCount = snapshotPatchCount();
+    DeviceVector devices = mManager->getAvailableOutputDevices();
+    // Remove MSD output device to avoid patching to itself
+    devices.remove(mMsdOutputDevice);
+    ASSERT_EQ(mExpectedAudioPatchCount, devices.size());
+    mManager->setMsdOutputPatches(&devices);
+    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+    // Dual patch: exercise creating one new audio patch and reusing another existing audio patch.
+    DeviceVector singleDevice(devices[0]);
+    mManager->releaseMsdOutputPatches(singleDevice);
+    ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
+    mManager->setMsdOutputPatches(&devices);
+    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+    mManager->releaseMsdOutputPatches(devices);
+    ASSERT_EQ(0, patchCount.deltaFromSnapshot());
+}
+
+TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedRoutesToMsd) {
     const PatchCountCheck patchCount = snapshotPatchCount();
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     getOutputForAttr(&selectedDeviceId,
             AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT);
-    ASSERT_EQ(selectedDeviceId, mMsdOutputDevice->getId());
-    ASSERT_EQ(1, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
 }
 
-TEST_F(AudioPolicyManagerTestMsd, GetOutputForAttrPcmRoutesToMsd) {
+TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrPcmRoutesToMsd) {
     const PatchCountCheck patchCount = snapshotPatchCount();
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     getOutputForAttr(&selectedDeviceId,
             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 48000);
-    ASSERT_EQ(selectedDeviceId, mMsdOutputDevice->getId());
-    ASSERT_EQ(1, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
 }
 
-TEST_F(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedPlusPcmRoutesToMsd) {
+TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedPlusPcmRoutesToMsd) {
     const PatchCountCheck patchCount = snapshotPatchCount();
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     getOutputForAttr(&selectedDeviceId,
             AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT);
-    ASSERT_EQ(selectedDeviceId, mMsdOutputDevice->getId());
-    ASSERT_EQ(1, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     getOutputForAttr(&selectedDeviceId,
             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 48000);
-    ASSERT_EQ(selectedDeviceId, mMsdOutputDevice->getId());
-    ASSERT_EQ(1, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
 }
 
-TEST_F(AudioPolicyManagerTestMsd, GetOutputForAttrUnsupportedFormatBypassesMsd) {
+TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrUnsupportedFormatBypassesMsd) {
     const PatchCountCheck patchCount = snapshotPatchCount();
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     getOutputForAttr(&selectedDeviceId,
@@ -442,7 +578,7 @@
     ASSERT_EQ(0, patchCount.deltaFromSnapshot());
 }
 
-TEST_F(AudioPolicyManagerTestMsd, GetOutputForAttrFormatSwitching) {
+TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrFormatSwitching) {
     // Switch between formats that are supported and not supported by MSD.
     {
         const PatchCountCheck patchCount = snapshotPatchCount();
@@ -451,10 +587,10 @@
         getOutputForAttr(&selectedDeviceId,
                 AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT,
                 nullptr /*output*/, &portId);
-        ASSERT_EQ(selectedDeviceId, mMsdOutputDevice->getId());
-        ASSERT_EQ(1, patchCount.deltaFromSnapshot());
+        ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+        ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
         mManager->releaseOutput(portId);
-        ASSERT_EQ(1, patchCount.deltaFromSnapshot());
+        ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
     }
     {
         const PatchCountCheck patchCount = snapshotPatchCount();
@@ -464,7 +600,7 @@
                 AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT,
                 nullptr /*output*/, &portId);
         ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
-        ASSERT_EQ(-1, patchCount.deltaFromSnapshot());
+        ASSERT_EQ(-static_cast<int>(mExpectedAudioPatchCount), patchCount.deltaFromSnapshot());
         mManager->releaseOutput(portId);
         ASSERT_EQ(0, patchCount.deltaFromSnapshot());
     }
@@ -473,11 +609,39 @@
         audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
         getOutputForAttr(&selectedDeviceId,
                 AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT);
-        ASSERT_EQ(selectedDeviceId, mMsdOutputDevice->getId());
+        ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
         ASSERT_EQ(0, patchCount.deltaFromSnapshot());
     }
 }
 
+TEST_P(AudioPolicyManagerTestMsd, PatchCreationFromHdmiInToMsd) {
+    audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
+    uid_t uid = 42;
+    const PatchCountCheck patchCount = snapshotPatchCount();
+    ASSERT_FALSE(mManager->getAvailableInputDevices().isEmpty());
+    PatchBuilder patchBuilder;
+    patchBuilder.
+            addSource(mManager->getAvailableInputDevices().
+                    getDevice(AUDIO_DEVICE_IN_HDMI, String8(""), AUDIO_FORMAT_DEFAULT)).
+            addSink(mManager->getAvailableOutputDevices().
+                    getDevice(AUDIO_DEVICE_OUT_BUS, String8(""), AUDIO_FORMAT_DEFAULT));
+    ASSERT_EQ(NO_ERROR, mManager->createAudioPatch(patchBuilder.patch(), &handle, uid));
+    ASSERT_NE(AUDIO_PATCH_HANDLE_NONE, handle);
+    AudioPatchCollection patches = mManager->getAudioPatches();
+    sp<AudioPatch> patch = patches.valueFor(handle);
+    ASSERT_EQ(1, patch->mPatch.num_sources);
+    ASSERT_EQ(1, patch->mPatch.num_sinks);
+    ASSERT_EQ(AUDIO_PORT_ROLE_SOURCE, patch->mPatch.sources[0].role);
+    ASSERT_EQ(AUDIO_PORT_ROLE_SINK, patch->mPatch.sinks[0].role);
+    ASSERT_EQ(AUDIO_FORMAT_IEC60958, patch->mPatch.sources[0].format);
+    ASSERT_EQ(AUDIO_FORMAT_IEC60958, patch->mPatch.sinks[0].format);
+    ASSERT_EQ(AUDIO_CHANNEL_IN_STEREO, patch->mPatch.sources[0].channel_mask);
+    ASSERT_EQ(AUDIO_CHANNEL_OUT_STEREO, patch->mPatch.sinks[0].channel_mask);
+    ASSERT_EQ(48000, patch->mPatch.sources[0].sample_rate);
+    ASSERT_EQ(48000, patch->mPatch.sinks[0].sample_rate);
+    ASSERT_EQ(1, patchCount.deltaFromSnapshot());
+}
+
 class AudioPolicyManagerTestWithConfigurationFile : public AudioPolicyManagerTest {
 protected:
     void SetUpManagerConfig() override;
@@ -638,6 +802,188 @@
     ASSERT_EQ(INVALID_OPERATION, ret);
 }
 
+class AudioPolicyManagerTestForHdmi
+        : public AudioPolicyManagerTestWithConfigurationFile {
+protected:
+    void SetUp() override;
+    std::string getConfigFile() override { return sTvConfig; }
+    std::map<audio_format_t, bool> getSurroundFormatsHelper();
+    std::vector<audio_format_t> getReportedSurroundFormatsHelper();
+    std::unordered_set<audio_format_t> getFormatsFromPorts();
+    AudioPolicyManagerTestClient* getClient() override {
+        return new AudioPolicyManagerTestClientForHdmi;
+    }
+    void TearDown() override;
+
+    static const std::string sTvConfig;
+
+};
+
+const std::string AudioPolicyManagerTestForHdmi::sTvConfig =
+        AudioPolicyManagerTestForHdmi::sExecutableDir +
+        "test_settop_box_surround_configuration.xml";
+
+void AudioPolicyManagerTestForHdmi::SetUp() {
+    AudioPolicyManagerTest::SetUp();
+    mClient->addSupportedFormat(AUDIO_FORMAT_E_AC3);
+    mManager->setDeviceConnectionState(
+            AUDIO_DEVICE_OUT_HDMI, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+            "" /*address*/, "" /*name*/, AUDIO_FORMAT_DEFAULT);
+}
+
+void AudioPolicyManagerTestForHdmi::TearDown() {
+    mManager->setDeviceConnectionState(
+            AUDIO_DEVICE_OUT_HDMI, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+            "" /*address*/, "" /*name*/, AUDIO_FORMAT_DEFAULT);
+    AudioPolicyManagerTest::TearDown();
+}
+
+std::map<audio_format_t, bool>
+        AudioPolicyManagerTestForHdmi::getSurroundFormatsHelper() {
+    unsigned int numSurroundFormats = 0;
+    std::map<audio_format_t, bool> surroundFormatsMap;
+    status_t ret = mManager->getSurroundFormats(
+            &numSurroundFormats, nullptr /* surroundFormats */,
+            nullptr /* surroundFormatsEnabled */);
+    EXPECT_EQ(NO_ERROR, ret);
+    if (ret != NO_ERROR) {
+        return surroundFormatsMap;
+    }
+    audio_format_t surroundFormats[numSurroundFormats];
+    memset(surroundFormats, 0, sizeof(audio_format_t) * numSurroundFormats);
+    bool surroundFormatsEnabled[numSurroundFormats];
+    memset(surroundFormatsEnabled, 0, sizeof(bool) * numSurroundFormats);
+    ret = mManager->getSurroundFormats(
+            &numSurroundFormats, surroundFormats, surroundFormatsEnabled);
+    EXPECT_EQ(NO_ERROR, ret);
+    if (ret != NO_ERROR) {
+        return surroundFormatsMap;
+    }
+    for (int i = 0; i< numSurroundFormats; i++) {
+        surroundFormatsMap[surroundFormats[i]] = surroundFormatsEnabled[i];
+    }
+    return surroundFormatsMap;
+}
+
+std::vector<audio_format_t> AudioPolicyManagerTestForHdmi::getReportedSurroundFormatsHelper() {
+    unsigned int numSurroundFormats = 0;
+    std::vector<audio_format_t>  surroundFormatsVector;
+    status_t ret = mManager->getReportedSurroundFormats(
+            &numSurroundFormats, nullptr /* surroundFormats */);
+    EXPECT_EQ(NO_ERROR, ret);
+    if (ret != NO_ERROR) {
+        return surroundFormatsVector;
+    }
+    audio_format_t surroundFormats[numSurroundFormats];
+    memset(surroundFormats, 0, sizeof(audio_format_t) * numSurroundFormats);
+    ret = mManager->getReportedSurroundFormats(&numSurroundFormats, surroundFormats);
+    EXPECT_EQ(NO_ERROR, ret);
+    if (ret != NO_ERROR) {
+        return surroundFormatsVector;
+    }
+    for (const auto &surroundFormat : surroundFormats) {
+        surroundFormatsVector.push_back(surroundFormat);
+    }
+    return surroundFormatsVector;
+}
+
+std::unordered_set<audio_format_t>
+        AudioPolicyManagerTestForHdmi::getFormatsFromPorts() {
+    uint32_t numPorts = 0;
+    uint32_t generation1;
+    status_t ret;
+    std::unordered_set<audio_format_t> formats;
+    ret = mManager->listAudioPorts(
+            AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE, &numPorts, nullptr, &generation1);
+    EXPECT_EQ(NO_ERROR, ret) << "mManager->listAudioPorts returned error";
+    if (ret != NO_ERROR) {
+        return formats;
+    }
+    struct audio_port_v7 ports[numPorts];
+    ret = mManager->listAudioPorts(
+            AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE, &numPorts, ports, &generation1);
+    EXPECT_EQ(NO_ERROR, ret) << "mManager->listAudioPorts returned error";
+    if (ret != NO_ERROR) {
+        return formats;
+    }
+    for (const auto &port : ports) {
+        for (size_t i = 0; i < port.num_audio_profiles; ++i) {
+            formats.insert(port.audio_profiles[i].format);
+        }
+    }
+    return formats;
+}
+
+TEST_F(AudioPolicyManagerTestForHdmi, GetSurroundFormatsReturnsSupportedFormats) {
+    mManager->setForceUse(
+            AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS);
+    auto surroundFormats = getSurroundFormatsHelper();
+    ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
+}
+
+TEST_F(AudioPolicyManagerTestForHdmi,
+        GetSurroundFormatsReturnsManipulatedFormats) {
+    mManager->setForceUse(
+            AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL);
+
+    status_t ret =
+            mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, false /*enabled*/);
+    ASSERT_EQ(NO_ERROR, ret);
+    auto surroundFormats = getSurroundFormatsHelper();
+    ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
+    ASSERT_FALSE(surroundFormats[AUDIO_FORMAT_E_AC3]);
+
+    ret = mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, true /*enabled*/);
+    ASSERT_EQ(NO_ERROR, ret);
+    surroundFormats = getSurroundFormatsHelper();
+    ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
+    ASSERT_TRUE(surroundFormats[AUDIO_FORMAT_E_AC3]);
+
+    ret = mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, false /*enabled*/);
+    ASSERT_EQ(NO_ERROR, ret);
+    surroundFormats = getSurroundFormatsHelper();
+    ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
+    ASSERT_FALSE(surroundFormats[AUDIO_FORMAT_E_AC3]);
+}
+
+TEST_F(AudioPolicyManagerTestForHdmi,
+        ListAudioPortsReturnManipulatedHdmiFormats) {
+    mManager->setForceUse(
+            AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL);
+
+    ASSERT_EQ(NO_ERROR, mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, false /*enabled*/));
+    auto formats = getFormatsFromPorts();
+    ASSERT_EQ(0, formats.count(AUDIO_FORMAT_E_AC3));
+
+    ASSERT_EQ(NO_ERROR, mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, true /*enabled*/));
+    formats = getFormatsFromPorts();
+    ASSERT_EQ(1, formats.count(AUDIO_FORMAT_E_AC3));
+}
+
+TEST_F(AudioPolicyManagerTestForHdmi,
+        GetReportedSurroundFormatsReturnsHdmiReportedFormats) {
+    mManager->setForceUse(
+            AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS);
+    auto surroundFormats = getReportedSurroundFormatsHelper();
+    ASSERT_EQ(1, std::count(surroundFormats.begin(), surroundFormats.end(), AUDIO_FORMAT_E_AC3));
+}
+
+TEST_F(AudioPolicyManagerTestForHdmi,
+        GetReportedSurroundFormatsReturnsNonManipulatedHdmiReportedFormats) {
+    mManager->setForceUse(
+            AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL);
+
+    status_t ret = mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, false /*enabled*/);
+    ASSERT_EQ(NO_ERROR, ret);
+    auto surroundFormats = getReportedSurroundFormatsHelper();
+    ASSERT_EQ(1, std::count(surroundFormats.begin(), surroundFormats.end(), AUDIO_FORMAT_E_AC3));
+
+    ret = mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, true /*enabled*/);
+    ASSERT_EQ(NO_ERROR, ret);
+    surroundFormats = getReportedSurroundFormatsHelper();
+    ASSERT_EQ(1, std::count(surroundFormats.begin(), surroundFormats.end(), AUDIO_FORMAT_E_AC3));
+}
+
 class AudioPolicyManagerTestDPNoRemoteSubmixModule : public AudioPolicyManagerTestDynamicPolicy {
 protected:
     std::string getConfigFile() override { return sPrimaryOnlyConfig; }
@@ -684,7 +1030,7 @@
             {AUDIO_USAGE_ALARM, AUDIO_SOURCE_DEFAULT, RULE_MATCH_ATTRIBUTE_USAGE}
     };
 
-    struct audio_port mInjectionPort;
+    struct audio_port_v7 mInjectionPort;
     audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
 };
 
@@ -701,13 +1047,14 @@
             AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig, mUsageRules);
     ASSERT_EQ(NO_ERROR, ret);
 
-    struct audio_port extractionPort;
+    struct audio_port_v7 extractionPort;
     ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SOURCE, AUDIO_DEVICE_IN_REMOTE_SUBMIX,
                     mMixAddress, &extractionPort));
 
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     audio_source_t source = AUDIO_SOURCE_REMOTE_SUBMIX;
-    audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, 0, ""};
+    audio_attributes_t attr = {
+        AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, AUDIO_FLAG_NONE, ""};
     std::string tags = "addr=" + mMixAddress;
     strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
     getInputForAttr(attr, mTracker->getRiid(), &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT,
@@ -757,9 +1104,9 @@
         AudioPolicyManagerTestDPPlaybackReRouting,
         testing::Values(
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_MEDIA,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ALARM,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""}
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
                 )
         );
 
@@ -768,47 +1115,47 @@
         AudioPolicyManagerTestDPPlaybackReRouting,
         testing::Values(
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_MEDIA,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VOICE_COMMUNICATION,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ALARM,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION_EVENT,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_ASSISTANCE_SONIFICATION,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_USAGE_ASSISTANCE_SONIFICATION,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_GAME,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VIRTUAL_SOURCE,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ASSISTANT,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"},
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
-                                     AUDIO_SOURCE_DEFAULT, 0, "addr=remote_submix_media"}
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"}
                 )
         );
 
@@ -817,41 +1164,41 @@
         AudioPolicyManagerTestDPPlaybackReRouting,
         testing::Values(
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VOICE_COMMUNICATION,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
                                      AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
                                      AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
                                      AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
                                      AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
                                      AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION_EVENT,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
                                      AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
                                      AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
                                      AUDIO_USAGE_ASSISTANCE_SONIFICATION,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_GAME,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ASSISTANT,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""},
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
-                                     AUDIO_SOURCE_DEFAULT, 0, ""}
+                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
                 )
         );
 
@@ -869,7 +1216,7 @@
         {AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_VOICE_COMMUNICATION, RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET}
     };
 
-    struct audio_port mExtractionPort;
+    struct audio_port_v7 mExtractionPort;
     audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
 };
 
@@ -886,13 +1233,14 @@
             AUDIO_DEVICE_IN_REMOTE_SUBMIX, mMixAddress, audioConfig, mSourceRules);
     ASSERT_EQ(NO_ERROR, ret);
 
-    struct audio_port injectionPort;
+    struct audio_port_v7 injectionPort;
     ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
                     mMixAddress, &injectionPort));
 
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     audio_usage_t usage = AUDIO_USAGE_VIRTUAL_SOURCE;
-    audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, usage, AUDIO_SOURCE_DEFAULT, 0, ""};
+    audio_attributes_t attr =
+            {AUDIO_CONTENT_TYPE_UNKNOWN, usage, AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
     std::string tags = std::string("addr=") + mMixAddress;
     strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
     getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
@@ -941,17 +1289,19 @@
         AudioPolicyManagerTestDPMixRecordInjection,
         testing::Values(
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_CAMCORDER, 0, ""},
+                                     AUDIO_SOURCE_CAMCORDER, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_CAMCORDER, 0, "addr=remote_submix_media"},
+                                     AUDIO_SOURCE_CAMCORDER, AUDIO_FLAG_NONE,
+                                     "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_MIC, 0, "addr=remote_submix_media"},
+                                     AUDIO_SOURCE_MIC, AUDIO_FLAG_NONE,
+                                     "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_MIC, 0, ""},
+                                     AUDIO_SOURCE_MIC, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_VOICE_COMMUNICATION, 0, ""},
+                                     AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_VOICE_COMMUNICATION, 0,
+                                     AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE,
                                      "addr=remote_submix_media"}
                 )
         );
@@ -962,14 +1312,15 @@
         AudioPolicyManagerTestDPMixRecordInjection,
         testing::Values(
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_VOICE_RECOGNITION, 0, ""},
+                                     AUDIO_SOURCE_VOICE_RECOGNITION, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_HOTWORD, 0, ""},
+                                     AUDIO_SOURCE_HOTWORD, AUDIO_FLAG_NONE, ""},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_VOICE_RECOGNITION, 0,
+                                     AUDIO_SOURCE_VOICE_RECOGNITION, AUDIO_FLAG_NONE,
                                      "addr=remote_submix_media"},
                 (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_HOTWORD, 0, "addr=remote_submix_media"}
+                                     AUDIO_SOURCE_HOTWORD, AUDIO_FLAG_NONE,
+                                     "addr=remote_submix_media"}
                 )
         );
 
@@ -988,6 +1339,34 @@
     dumpToLog();
 }
 
+TEST_F(AudioPolicyManagerTestDeviceConnection, RoutingUpdate) {
+    mClient->resetRoutingUpdatedCounter();
+    // Connecting a valid output device with valid parameters should trigger a routing update
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+            AUDIO_DEVICE_OUT_BLUETOOTH_SCO, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+            "a", "b", AUDIO_FORMAT_DEFAULT));
+    ASSERT_EQ(1, mClient->getRoutingUpdatedCounter());
+
+    // Disconnecting a connected device should succeed and trigger a routing update
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+            AUDIO_DEVICE_OUT_BLUETOOTH_SCO, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+            "a", "b", AUDIO_FORMAT_DEFAULT));
+    ASSERT_EQ(2, mClient->getRoutingUpdatedCounter());
+
+    // Disconnecting a disconnected device should fail and not trigger a routing update
+    ASSERT_EQ(INVALID_OPERATION, mManager->setDeviceConnectionState(
+            AUDIO_DEVICE_OUT_BLUETOOTH_SCO, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+            "a", "b",  AUDIO_FORMAT_DEFAULT));
+    ASSERT_EQ(2, mClient->getRoutingUpdatedCounter());
+
+    // Changing force use should trigger an update
+    auto config = mManager->getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA);
+    auto newConfig = config == AUDIO_POLICY_FORCE_BT_A2DP ?
+            AUDIO_POLICY_FORCE_NONE : AUDIO_POLICY_FORCE_BT_A2DP;
+    mManager->setForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA, newConfig);
+    ASSERT_EQ(3, mClient->getRoutingUpdatedCounter());
+}
+
 TEST_P(AudioPolicyManagerTestDeviceConnection, SetDeviceConnectionState) {
     const audio_devices_t type = std::get<0>(GetParam());
     const std::string name = std::get<1>(GetParam());
@@ -1033,7 +1412,7 @@
             type, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
             address.c_str(), name.c_str(), AUDIO_FORMAT_DEFAULT));
 
-    audio_port devicePort;
+    audio_port_v7 devicePort;
     const audio_port_role_t role = audio_is_output_device(type)
             ? AUDIO_PORT_ROLE_SINK : AUDIO_PORT_ROLE_SOURCE;
     ASSERT_TRUE(findDevicePort(role, type, address, &devicePort));
@@ -1094,7 +1473,7 @@
             flags, &output, &portId);
     sp<SwAudioOutputDescriptor> outDesc = mManager->getOutputs().valueFor(output);
     ASSERT_NE(nullptr, outDesc.get());
-    audio_port port = {};
+    audio_port_v7 port = {};
     outDesc->toAudioPort(&port);
     mManager->releaseOutput(portId);
     ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
@@ -1176,7 +1555,7 @@
             findDevicePort(AUDIO_PORT_ROLE_SOURCE, AUDIO_DEVICE_IN_REMOTE_SUBMIX, "0", nullptr));
     mClient->swapAllowedModuleNames({"primary", "r_submix"});
     mManager->onNewAudioModulesAvailable();
-    struct audio_port port;
+    struct audio_port_v7 port;
     ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SOURCE, AUDIO_DEVICE_IN_REMOTE_SUBMIX, "0", &port));
 }
 
@@ -1188,3 +1567,109 @@
     EXPECT_GT(mClient->getAudioPortListUpdateCount(), prevAudioPortListUpdateCount);
     EXPECT_GT(mManager->getAudioPortGeneration(), prevAudioPortGeneration);
 }
+
+using DevicesRoleForCapturePresetParam = std::tuple<audio_source_t, device_role_t>;
+
+class AudioPolicyManagerDevicesRoleForCapturePresetTest
+        : public AudioPolicyManagerTestWithConfigurationFile,
+          public testing::WithParamInterface<DevicesRoleForCapturePresetParam> {
+protected:
+    // The `inputDevice` and `inputDevice2` indicate the audio devices type to be used for setting
+    // device role. They must be declared in the test_audio_policy_configuration.xml
+    AudioDeviceTypeAddr inputDevice = AudioDeviceTypeAddr(AUDIO_DEVICE_IN_BUILTIN_MIC, "");
+    AudioDeviceTypeAddr inputDevice2 = AudioDeviceTypeAddr(AUDIO_DEVICE_IN_HDMI, "");
+};
+
+TEST_P(AudioPolicyManagerDevicesRoleForCapturePresetTest, DevicesRoleForCapturePreset) {
+    const audio_source_t audioSource = std::get<0>(GetParam());
+    const device_role_t role = std::get<1>(GetParam());
+
+    // Test invalid device when setting
+    const AudioDeviceTypeAddr outputDevice(AUDIO_DEVICE_OUT_SPEAKER, "");
+    const AudioDeviceTypeAddrVector outputDevices = {outputDevice};
+    ASSERT_EQ(BAD_VALUE,
+              mManager->setDevicesRoleForCapturePreset(audioSource, role, outputDevices));
+    ASSERT_EQ(BAD_VALUE,
+              mManager->addDevicesRoleForCapturePreset(audioSource, role, outputDevices));
+    AudioDeviceTypeAddrVector devices;
+    ASSERT_EQ(NAME_NOT_FOUND,
+              mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+    ASSERT_TRUE(devices.empty());
+    ASSERT_EQ(BAD_VALUE,
+              mManager->removeDevicesRoleForCapturePreset(audioSource, role, outputDevices));
+
+    // Without setting, call get/remove/clear must fail
+    ASSERT_EQ(NAME_NOT_FOUND,
+              mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+    ASSERT_EQ(NAME_NOT_FOUND,
+              mManager->removeDevicesRoleForCapturePreset(audioSource, role, devices));
+    ASSERT_EQ(NAME_NOT_FOUND,
+              mManager->clearDevicesRoleForCapturePreset(audioSource, role));
+
+    // Test set/get devices role
+    const AudioDeviceTypeAddrVector inputDevices = {inputDevice};
+    ASSERT_EQ(NO_ERROR,
+              mManager->setDevicesRoleForCapturePreset(audioSource, role, inputDevices));
+    ASSERT_EQ(NO_ERROR, mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+    EXPECT_THAT(devices, UnorderedElementsAre(inputDevice));
+
+    // Test setting will change the previously set devices
+    const AudioDeviceTypeAddrVector inputDevices2 = {inputDevice2};
+    ASSERT_EQ(NO_ERROR,
+              mManager->setDevicesRoleForCapturePreset(audioSource, role, inputDevices2));
+    devices.clear();
+    ASSERT_EQ(NO_ERROR, mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+    EXPECT_THAT(devices, UnorderedElementsAre(inputDevice2));
+
+    // Test add devices
+    ASSERT_EQ(NO_ERROR,
+              mManager->addDevicesRoleForCapturePreset(audioSource, role, inputDevices));
+    devices.clear();
+    ASSERT_EQ(NO_ERROR, mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+    EXPECT_THAT(devices, UnorderedElementsAre(inputDevice, inputDevice2));
+
+    // Test remove devices
+    ASSERT_EQ(NO_ERROR,
+              mManager->removeDevicesRoleForCapturePreset(audioSource, role, inputDevices));
+    devices.clear();
+    ASSERT_EQ(NO_ERROR, mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+    EXPECT_THAT(devices, UnorderedElementsAre(inputDevice2));
+
+    // Test remove devices that are not set as the device role
+    ASSERT_EQ(BAD_VALUE,
+              mManager->removeDevicesRoleForCapturePreset(audioSource, role, inputDevices));
+
+    // Test clear devices
+    ASSERT_EQ(NO_ERROR,
+              mManager->clearDevicesRoleForCapturePreset(audioSource, role));
+    devices.clear();
+    ASSERT_EQ(NAME_NOT_FOUND,
+              mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+}
+
+INSTANTIATE_TEST_CASE_P(
+        DevicesRoleForCapturePresetOperation,
+        AudioPolicyManagerDevicesRoleForCapturePresetTest,
+        testing::Values(
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_MIC, DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_VOICE_UPLINK,
+                                                  DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_VOICE_DOWNLINK,
+                                                  DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_VOICE_CALL, DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_CAMCORDER, DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_VOICE_RECOGNITION,
+                                                  DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_VOICE_COMMUNICATION,
+                                                  DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_REMOTE_SUBMIX,
+                                                  DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_UNPROCESSED, DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_VOICE_PERFORMANCE,
+                                                  DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_ECHO_REFERENCE,
+                                                  DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_FM_TUNER, DEVICE_ROLE_PREFERRED}),
+                DevicesRoleForCapturePresetParam({AUDIO_SOURCE_HOTWORD, DEVICE_ROLE_PREFERRED})
+                )
+        );
diff --git a/services/audiopolicy/tests/resources/Android.bp b/services/audiopolicy/tests/resources/Android.bp
index d9476d9..ff4d568 100644
--- a/services/audiopolicy/tests/resources/Android.bp
+++ b/services/audiopolicy/tests/resources/Android.bp
@@ -1,8 +1,19 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 filegroup {
     name: "audiopolicytest_configuration_files",
     srcs: [
         "test_audio_policy_configuration.xml",
         "test_audio_policy_primary_only_configuration.xml",
+        "test_invalid_audio_policy_configuration.xml",
         "test_tv_apm_configuration.xml",
+        "test_settop_box_surround_configuration.xml",
     ],
 }
diff --git a/services/audiopolicy/tests/resources/test_invalid_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_invalid_audio_policy_configuration.xml
new file mode 100644
index 0000000..25641d5
--- /dev/null
+++ b/services/audiopolicy/tests/resources/test_invalid_audio_policy_configuration.xml
@@ -0,0 +1,113 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<!-- This file contains an unnamed device port in the "r_submix" module section. -->
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+    <globalConfiguration speaker_drc_enabled="true"/>
+
+    <modules>
+        <!-- Primary module -->
+        <module name="primary" halVersion="2.0">
+            <attachedDevices>
+                <item>Speaker</item>
+                <item>Built-In Mic</item>
+            </attachedDevices>
+            <defaultOutputDevice>Speaker</defaultOutputDevice>
+            <mixPorts>
+                <mixPort name="primary output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="primary input" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bt_hfp_output" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bt_hfp_input" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000,11025,16000,44100,48000"
+                             channelMasks="AUDIO_CHANNEL_IN_STEREO,AUDIO_CHANNEL_IN_MONO"/>
+                </mixPort>
+            </mixPorts>
+            <devicePorts>
+                <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
+                </devicePort>
+                <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
+                </devicePort>
+                <devicePort tagName="Hdmi" type="AUDIO_DEVICE_OUT_HDMI" role="sink">
+                </devicePort>
+                <devicePort tagName="Hdmi-In Mic" type="AUDIO_DEVICE_IN_HDMI" role="source">
+                </devicePort>
+                <devicePort tagName="BT SCO" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO"
+                            role="sink" address="hfp_client_out">
+                </devicePort>
+                <devicePort tagName="BT SCO Headset Mic" type="AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET"
+                            role="source" address="hfp_client_in">
+                </devicePort>
+            </devicePorts>
+            <routes>
+                <route type="mix" sink="Speaker"
+                       sources="primary output"/>
+                <route type="mix" sink="primary input"
+                       sources="Built-In Mic,Hdmi-In Mic"/>
+                <route type="mix" sink="Hdmi"
+                       sources="primary output"/>
+                <route type="mix" sink="BT SCO"
+                       sources="mixport_bt_hfp_output"/>
+                <route type="mix" sink="mixport_bt_hfp_input"
+                       sources="BT SCO Headset Mic"/>
+            </routes>
+        </module>
+
+        <!-- Remote Submix module -->
+        <module name="r_submix" halVersion="2.0">
+            <attachedDevices>
+                <item>Remote Submix In</item>
+            </attachedDevices>
+            <mixPorts>
+                <mixPort name="r_submix output" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="r_submix input" role="sink">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
+           </mixPorts>
+           <devicePorts>
+               <!-- This port is missing "tagName" attribute. -->
+               <devicePort type="AUDIO_DEVICE_OUT_REMOTE_SUBMIX"  role="sink">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+               </devicePort>
+               <devicePort tagName="Remote Submix In" type="AUDIO_DEVICE_IN_REMOTE_SUBMIX"  role="source">
+                   <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </devicePort>
+            </devicePorts>
+            <routes>
+                <route type="mix" sink="Remote Submix Out"
+                       sources="r_submix output"/>
+                <route type="mix" sink="r_submix input"
+                       sources="Remote Submix In"/>
+            </routes>
+        </module>
+    </modules>
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/tests/resources/test_settop_box_surround_configuration.xml b/services/audiopolicy/tests/resources/test_settop_box_surround_configuration.xml
new file mode 100644
index 0000000..6f7375e
--- /dev/null
+++ b/services/audiopolicy/tests/resources/test_settop_box_surround_configuration.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!--
+  ~ Copyright (C) 2020 The Android Open Source Project
+  ~
+  ~ Licensed under the Apache License, Version 2.0 (the "License");
+  ~ you may not use this file except in compliance with the License.
+  ~ You may obtain a copy of the License at
+  ~
+  ~      http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+    <globalConfiguration speaker_drc_enabled="false"/>
+    <modules>
+        <module name="primary" halVersion="2.0">
+            <attachedDevices>
+                <item>Stub</item>
+            </attachedDevices>
+            <defaultOutputDevice>Stub</defaultOutputDevice>
+            <mixPorts>
+                <mixPort name="primary pcm" role="source"
+                         flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="multichannel output" role="source"
+                         flags="AUDIO_OUTPUT_FLAG_DIRECT">
+                    <profile name="" />
+                </mixPort>
+            </mixPorts>
+            <devicePorts>
+                <devicePort tagName="Stub" type="AUDIO_DEVICE_OUT_STUB" role="sink" />
+                <devicePort tagName="HDMI" type="AUDIO_DEVICE_OUT_HDMI" role="sink" />
+            </devicePorts>
+            <routes>
+                <route type="mix" sink="Stub" sources="primary pcm"/>
+                <route type="mix" sink="HDMI" sources="primary pcm,multichannel output"/>
+            </routes>
+        </module>
+    </modules>
+</audioPolicyConfiguration>
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 501d922..c28c24b 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -16,6 +16,25 @@
 // libcameraservice
 //
 
+package {
+    default_applicable_licenses: [
+        "frameworks_av_services_camera_libcameraservice_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_services_camera_libcameraservice_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libcameraservice",
 
@@ -30,7 +49,6 @@
         "common/CameraProviderManager.cpp",
         "common/DepthPhotoProcessor.cpp",
         "common/FrameProcessorBase.cpp",
-        "api1/CameraClient.cpp",
         "api1/Camera2Client.cpp",
         "api1/client2/Parameters.cpp",
         "api1/client2/FrameProcessor.cpp",
@@ -46,7 +64,6 @@
         "api2/DepthCompositeStream.cpp",
         "api2/HeicEncoderInfoManager.cpp",
         "api2/HeicCompositeStream.cpp",
-        "device1/CameraHardwareInterface.cpp",
         "device3/BufferUtils.cpp",
         "device3/Camera3Device.cpp",
         "device3/Camera3OfflineSession.cpp",
@@ -54,7 +71,7 @@
         "device3/Camera3IOStreamBase.cpp",
         "device3/Camera3InputStream.cpp",
         "device3/Camera3OutputStream.cpp",
-        "device3/Camera3DummyStream.cpp",
+        "device3/Camera3FakeStream.cpp",
         "device3/Camera3SharedOutputStream.cpp",
         "device3/StatusTracker.cpp",
         "device3/Camera3BufferManager.cpp",
@@ -65,17 +82,20 @@
         "device3/RotateAndCropMapper.cpp",
         "device3/Camera3OutputStreamInterface.cpp",
         "device3/Camera3OutputUtils.cpp",
+        "device3/Camera3DeviceInjectionMethods.cpp",
         "gui/RingBufferConsumer.cpp",
         "hidl/AidlCameraDeviceCallbacks.cpp",
         "hidl/AidlCameraServiceListener.cpp",
         "hidl/Convert.cpp",
         "hidl/HidlCameraDeviceUser.cpp",
         "hidl/HidlCameraService.cpp",
+        "utils/CameraServiceProxyWrapper.cpp",
         "utils/CameraThreadState.cpp",
         "utils/CameraTraces.cpp",
         "utils/AutoConditionLock.cpp",
         "utils/ExifUtils.cpp",
         "utils/SessionConfigurationUtils.cpp",
+        "utils/SessionStatsBuilder.cpp",
         "utils/TagMonitor.cpp",
         "utils/LatencyHistogram.cpp",
     ],
@@ -94,6 +114,8 @@
         "libutilscallstack",
         "libutils",
         "libbinder",
+        "libactivitymanager_aidl",
+        "libpermission",
         "libcutils",
         "libmedia",
         "libmediautils",
@@ -117,25 +139,33 @@
         "android.frameworks.cameraservice.common@2.0",
         "android.frameworks.cameraservice.service@2.0",
         "android.frameworks.cameraservice.service@2.1",
+        "android.frameworks.cameraservice.service@2.2",
         "android.frameworks.cameraservice.device@2.0",
+        "android.frameworks.cameraservice.device@2.1",
         "android.hardware.camera.common@1.0",
         "android.hardware.camera.provider@2.4",
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
-        "android.hardware.camera.device@1.0",
+        "android.hardware.camera.provider@2.7",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.3",
         "android.hardware.camera.device@3.4",
         "android.hardware.camera.device@3.5",
-        "android.hardware.camera.device@3.6"
+        "android.hardware.camera.device@3.6",
+        "android.hardware.camera.device@3.7",
+        "media_permission-aidl-cpp",
     ],
 
     static_libs: [
+        "libprocessinfoservice_aidl",
         "libbinderthreadstateutils",
+        "media_permission-aidl-cpp",
     ],
 
     export_shared_lib_headers: [
         "libbinder",
+        "libactivitymanager_aidl",
+        "libpermission",
         "libcamera_client",
         "libfmq",
         "libsensorprivacy",
@@ -159,4 +189,3 @@
     ],
 
 }
-
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index e629cdd..ccdd9e5 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -59,9 +59,8 @@
     if (mProviderManager->supportSetTorchMode(cameraId.string())) {
         mFlashControl = new ProviderFlashControl(mProviderManager);
     } else {
-        // Only HAL1 devices do not support setTorchMode
-        mFlashControl =
-                new CameraHardwareInterfaceFlashControl(mProviderManager, mCallbacks);
+        ALOGE("Flashlight control not supported by this device!");
+        return NO_INIT;
     }
 
     return OK;
@@ -309,271 +308,4 @@
 }
 // ProviderFlashControl implementation ends
 
-/////////////////////////////////////////////////////////////////////
-// CameraHardwareInterfaceFlashControl implementation begins
-// Flash control for camera module <= v2.3 and camera HAL v1
-/////////////////////////////////////////////////////////////////////
-
-CameraHardwareInterfaceFlashControl::CameraHardwareInterfaceFlashControl(
-        sp<CameraProviderManager> manager,
-        CameraProviderManager::StatusListener* callbacks) :
-        mProviderManager(manager),
-        mCallbacks(callbacks),
-        mTorchEnabled(false) {
-}
-
-CameraHardwareInterfaceFlashControl::~CameraHardwareInterfaceFlashControl() {
-    disconnectCameraDevice();
-
-    mSurface.clear();
-    mSurfaceTexture.clear();
-    mProducer.clear();
-    mConsumer.clear();
-
-    if (mTorchEnabled) {
-        if (mCallbacks) {
-            ALOGV("%s: notify the framework that torch was turned off",
-                    __FUNCTION__);
-            mCallbacks->onTorchStatusChanged(mCameraId, TorchModeStatus::AVAILABLE_OFF);
-        }
-    }
-}
-
-status_t CameraHardwareInterfaceFlashControl::setTorchMode(
-        const String8& cameraId, bool enabled) {
-    Mutex::Autolock l(mLock);
-
-    // pre-check
-    status_t res;
-    if (enabled) {
-        bool hasFlash = false;
-        // Check if it has a flash unit and leave camera device open.
-        res = hasFlashUnitLocked(cameraId, &hasFlash, /*keepDeviceOpen*/true);
-        // invalid camera?
-        if (res) {
-            // hasFlashUnitLocked() returns BAD_INDEX if mDevice is connected to
-            // another camera device.
-            return res == BAD_INDEX ? BAD_INDEX : -EINVAL;
-        }
-        // no flash unit?
-        if (!hasFlash) {
-            // Disconnect camera device if it has no flash.
-            disconnectCameraDevice();
-            return -ENOSYS;
-        }
-    } else if (mDevice == NULL || cameraId != mCameraId) {
-        // disabling the torch mode of an un-opened or different device.
-        return OK;
-    } else {
-        // disabling the torch mode of currently opened device
-        disconnectCameraDevice();
-        mTorchEnabled = false;
-        mCallbacks->onTorchStatusChanged(cameraId, TorchModeStatus::AVAILABLE_OFF);
-        return OK;
-    }
-
-    res = startPreviewAndTorch();
-    if (res) {
-        return res;
-    }
-
-    mTorchEnabled = true;
-    mCallbacks->onTorchStatusChanged(cameraId, TorchModeStatus::AVAILABLE_ON);
-    return OK;
-}
-
-status_t CameraHardwareInterfaceFlashControl::hasFlashUnit(
-        const String8& cameraId, bool *hasFlash) {
-    Mutex::Autolock l(mLock);
-    // Close device after checking if it has a flash unit.
-    return hasFlashUnitLocked(cameraId, hasFlash, /*keepDeviceOpen*/false);
-}
-
-status_t CameraHardwareInterfaceFlashControl::hasFlashUnitLocked(
-        const String8& cameraId, bool *hasFlash, bool keepDeviceOpen) {
-    bool closeCameraDevice = false;
-
-    if (!hasFlash) {
-        return BAD_VALUE;
-    }
-
-    status_t res;
-    if (mDevice == NULL) {
-        // Connect to camera device to query if it has a flash unit.
-        res = connectCameraDevice(cameraId);
-        if (res) {
-            return res;
-        }
-        // Close camera device only when it is just opened and the caller doesn't want to keep
-        // the camera device open.
-        closeCameraDevice = !keepDeviceOpen;
-    }
-
-    if (cameraId != mCameraId) {
-        return BAD_INDEX;
-    }
-
-    const char *flashMode =
-            mParameters.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES);
-    if (flashMode && strstr(flashMode, CameraParameters::FLASH_MODE_TORCH)) {
-        *hasFlash = true;
-    } else {
-        *hasFlash = false;
-    }
-
-    if (closeCameraDevice) {
-        res = disconnectCameraDevice();
-        if (res != OK) {
-            ALOGE("%s: Failed to disconnect camera device. %s (%d)", __FUNCTION__,
-                    strerror(-res), res);
-            return res;
-        }
-    }
-
-    return OK;
-}
-
-status_t CameraHardwareInterfaceFlashControl::startPreviewAndTorch() {
-    status_t res = OK;
-    res = mDevice->startPreview();
-    if (res) {
-        ALOGE("%s: start preview failed. %s (%d)", __FUNCTION__,
-                strerror(-res), res);
-        return res;
-    }
-
-    mParameters.set(CameraParameters::KEY_FLASH_MODE,
-            CameraParameters::FLASH_MODE_TORCH);
-
-    return mDevice->setParameters(mParameters);
-}
-
-status_t CameraHardwareInterfaceFlashControl::getSmallestSurfaceSize(
-        int32_t *width, int32_t *height) {
-    if (!width || !height) {
-        return BAD_VALUE;
-    }
-
-    int32_t w = INT32_MAX;
-    int32_t h = 1;
-    Vector<Size> sizes;
-
-    mParameters.getSupportedPreviewSizes(sizes);
-    for (size_t i = 0; i < sizes.size(); i++) {
-        Size s = sizes[i];
-        if (w * h > s.width * s.height) {
-            w = s.width;
-            h = s.height;
-        }
-    }
-
-    if (w == INT32_MAX) {
-        return NAME_NOT_FOUND;
-    }
-
-    *width = w;
-    *height = h;
-
-    return OK;
-}
-
-status_t CameraHardwareInterfaceFlashControl::initializePreviewWindow(
-        const sp<CameraHardwareInterface>& device, int32_t width, int32_t height) {
-    status_t res;
-    BufferQueue::createBufferQueue(&mProducer, &mConsumer);
-
-    mSurfaceTexture = new GLConsumer(mConsumer, 0, GLConsumer::TEXTURE_EXTERNAL,
-            true, true);
-    if (mSurfaceTexture == NULL) {
-        return NO_MEMORY;
-    }
-
-    int32_t format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-    res = mSurfaceTexture->setDefaultBufferSize(width, height);
-    if (res) {
-        return res;
-    }
-    res = mSurfaceTexture->setDefaultBufferFormat(format);
-    if (res) {
-        return res;
-    }
-
-    mSurface = new Surface(mProducer, /*useAsync*/ true);
-    if (mSurface == NULL) {
-        return NO_MEMORY;
-    }
-
-    res = native_window_api_connect(mSurface.get(), NATIVE_WINDOW_API_CAMERA);
-    if (res) {
-        ALOGE("%s: Unable to connect to native window", __FUNCTION__);
-        return res;
-    }
-
-    return device->setPreviewWindow(mSurface);
-}
-
-status_t CameraHardwareInterfaceFlashControl::connectCameraDevice(
-        const String8& cameraId) {
-    sp<CameraHardwareInterface> device =
-            new CameraHardwareInterface(cameraId.string());
-
-    status_t res = device->initialize(mProviderManager);
-    if (res) {
-        ALOGE("%s: initializing camera %s failed", __FUNCTION__,
-                cameraId.string());
-        return res;
-    }
-
-    // need to set __get_memory in set_callbacks().
-    device->setCallbacks(NULL, NULL, NULL, NULL, NULL);
-
-    mParameters = device->getParameters();
-
-    int32_t width, height;
-    res = getSmallestSurfaceSize(&width, &height);
-    if (res) {
-        ALOGE("%s: failed to get smallest surface size for camera %s",
-                __FUNCTION__, cameraId.string());
-        return res;
-    }
-
-    res = initializePreviewWindow(device, width, height);
-    if (res) {
-        ALOGE("%s: failed to initialize preview window for camera %s",
-                __FUNCTION__, cameraId.string());
-        return res;
-    }
-
-    mCameraId = cameraId;
-    mDevice = device;
-    return OK;
-}
-
-status_t CameraHardwareInterfaceFlashControl::disconnectCameraDevice() {
-    if (mDevice == NULL) {
-        return OK;
-    }
-
-    if (mParameters.get(CameraParameters::KEY_FLASH_MODE)) {
-        // There is a flash, turn if off.
-        // (If there isn't one, leave the parameter null)
-        mParameters.set(CameraParameters::KEY_FLASH_MODE,
-                CameraParameters::FLASH_MODE_OFF);
-        mDevice->setParameters(mParameters);
-    }
-    mDevice->stopPreview();
-    status_t res = native_window_api_disconnect(mSurface.get(),
-            NATIVE_WINDOW_API_CAMERA);
-    if (res) {
-        ALOGW("%s: native_window_api_disconnect failed: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-    }
-    mDevice->setPreviewWindow(NULL);
-    mDevice->release();
-    mDevice = NULL;
-
-    return OK;
-}
-// CameraHardwareInterfaceFlashControl implementation ends
-
 }
diff --git a/services/camera/libcameraservice/CameraFlashlight.h b/services/camera/libcameraservice/CameraFlashlight.h
index 1baaba2..b97fa5f 100644
--- a/services/camera/libcameraservice/CameraFlashlight.h
+++ b/services/camera/libcameraservice/CameraFlashlight.h
@@ -23,8 +23,6 @@
 #include <utils/SortedVector.h>
 #include "common/CameraProviderManager.h"
 #include "common/CameraDeviceBase.h"
-#include "device1/CameraHardwareInterface.h"
-
 
 namespace android {
 
@@ -124,59 +122,6 @@
         Mutex mLock;
 };
 
-/**
- * Flash control for camera module <= v2.3 and camera HAL v1
- */
-class CameraHardwareInterfaceFlashControl : public FlashControlBase {
-    public:
-        CameraHardwareInterfaceFlashControl(
-                sp<CameraProviderManager> manager,
-                CameraProviderManager::StatusListener* callbacks);
-        virtual ~CameraHardwareInterfaceFlashControl();
-
-        // FlashControlBase
-        status_t setTorchMode(const String8& cameraId, bool enabled);
-        status_t hasFlashUnit(const String8& cameraId, bool *hasFlash);
-
-    private:
-        // connect to a camera device
-        status_t connectCameraDevice(const String8& cameraId);
-
-        // disconnect and free mDevice
-        status_t disconnectCameraDevice();
-
-        // initialize the preview window
-        status_t initializePreviewWindow(const sp<CameraHardwareInterface>& device,
-                int32_t width, int32_t height);
-
-        // start preview and enable torch
-        status_t startPreviewAndTorch();
-
-        // get the smallest surface
-        status_t getSmallestSurfaceSize(int32_t *width, int32_t *height);
-
-        // protected by mLock
-        // If this function opens camera device in order to check if it has a flash unit, the
-        // camera device will remain open if keepDeviceOpen is true and the camera device will be
-        // closed if keepDeviceOpen is false. If camera device is already open when calling this
-        // function, keepDeviceOpen is ignored.
-        status_t hasFlashUnitLocked(const String8& cameraId, bool *hasFlash, bool keepDeviceOpen);
-
-        sp<CameraProviderManager> mProviderManager;
-        CameraProviderManager::StatusListener* mCallbacks;
-        sp<CameraHardwareInterface> mDevice;
-        String8 mCameraId;
-        CameraParameters mParameters;
-        bool mTorchEnabled;
-
-        sp<IGraphicBufferProducer> mProducer;
-        sp<IGraphicBufferConsumer>  mConsumer;
-        sp<GLConsumer> mSurfaceTexture;
-        sp<Surface> mSurface;
-
-        Mutex mLock;
-};
-
 } // namespace android
 
 #endif
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index af1e01d..dc101ff 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -21,6 +21,7 @@
 #include <algorithm>
 #include <climits>
 #include <stdio.h>
+#include <cstdlib>
 #include <cstring>
 #include <ctime>
 #include <string>
@@ -37,11 +38,9 @@
 #include <binder/ActivityManager.h>
 #include <binder/AppOpsManager.h>
 #include <binder/IPCThreadState.h>
-#include <binder/IServiceManager.h>
 #include <binder/MemoryBase.h>
 #include <binder/MemoryHeapBase.h>
 #include <binder/PermissionController.h>
-#include <binder/ProcessInfoService.h>
 #include <binder/IResultReceiver.h>
 #include <binderthreadstate/CallerUtils.h>
 #include <cutils/atomic.h>
@@ -57,6 +56,7 @@
 #include <media/IMediaHTTPService.h>
 #include <media/mediaplayer.h>
 #include <mediautils/BatteryNotifier.h>
+#include <processinfo/ProcessInfoService.h>
 #include <utils/Errors.h>
 #include <utils/Log.h>
 #include <utils/String16.h>
@@ -70,12 +70,12 @@
 #include <system/camera.h>
 
 #include "CameraService.h"
-#include "api1/CameraClient.h"
 #include "api1/Camera2Client.h"
 #include "api2/CameraDeviceClient.h"
 #include "utils/CameraTraces.h"
 #include "utils/TagMonitor.h"
 #include "utils/CameraThreadState.h"
+#include "utils/CameraServiceProxyWrapper.h"
 
 namespace {
     const char* kPermissionServiceName = "permission";
@@ -85,13 +85,15 @@
 
 using base::StringPrintf;
 using binder::Status;
+using camera3::SessionConfigurationUtils;
 using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
 using hardware::ICamera;
 using hardware::ICameraClient;
-using hardware::ICameraServiceProxy;
 using hardware::ICameraServiceListener;
 using hardware::camera::common::V1_0::CameraDeviceStatus;
 using hardware::camera::common::V1_0::TorchModeStatus;
+using hardware::camera2::ICameraInjectionCallback;
+using hardware::camera2::ICameraInjectionSession;
 using hardware::camera2::utils::CameraIdAndSessionConfiguration;
 using hardware::camera2::utils::ConcurrentCameraIdCombination;
 
@@ -128,15 +130,16 @@
         sCameraSendSystemEventsPermission("android.permission.CAMERA_SEND_SYSTEM_EVENTS");
 static const String16 sCameraOpenCloseListenerPermission(
         "android.permission.CAMERA_OPEN_CLOSE_LISTENER");
+static const String16
+        sCameraInjectExternalCameraPermission("android.permission.CAMERA_INJECT_EXTERNAL_CAMERA");
+const char *sFileName = "lastOpenSessionDumpFile";
+static constexpr int32_t kVendorClientScore = resource_policy::PERCEPTIBLE_APP_ADJ;
+static constexpr int32_t kVendorClientState = ActivityManager::PROCESS_STATE_PERSISTENT_UI;
 
-// Matches with PERCEPTIBLE_APP_ADJ in ProcessList.java
-static constexpr int32_t kVendorClientScore = 200;
-// Matches with PROCESS_STATE_PERSISTENT_UI in ActivityManager.java
-static constexpr int32_t kVendorClientState = 1;
 const String8 CameraService::kOfflineDevice("offline-");
 
-Mutex CameraService::sProxyMutex;
-sp<hardware::ICameraServiceProxy> CameraService::sCameraServiceProxy;
+// Set to keep track of logged service error events.
+static std::set<String8> sServiceErrorEventSet;
 
 CameraService::CameraService() :
         mEventLog(DEFAULT_EVENT_LOG_LENGTH),
@@ -146,10 +149,15 @@
         mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE) {
     ALOGI("CameraService started (pid=%d)", getpid());
     mServiceLockWrapper = std::make_shared<WaitableMutexWrapper>(&mServiceLock);
+    mMemFd = memfd_create(sFileName, MFD_ALLOW_SEALING);
+    if (mMemFd == -1) {
+        ALOGE("%s: Error while creating the file: %s", __FUNCTION__, sFileName);
+    }
 }
 
 void CameraService::onFirstRef()
 {
+
     ALOGI("CameraService process starting");
 
     BnCameraService::onFirstRef();
@@ -170,6 +178,7 @@
     mUidPolicy->registerSelf();
     mSensorPrivacyPolicy = new SensorPrivacyPolicy(this);
     mSensorPrivacyPolicy->registerSelf();
+    mInjectionStatusListener = new InjectionStatusListener(this);
     mAppOps.setCameraAudioRestriction(mAudioRestriction);
     sp<HidlCameraService> hcs = HidlCameraService::getInstance(this);
     if (hcs->registerAsService() != android::OK) {
@@ -179,7 +188,7 @@
 
     // This needs to be last call in this function, so that it's as close to
     // ServiceManager::addService() as possible.
-    CameraService::pingCameraServiceProxy();
+    CameraServiceProxyWrapper::pingCameraServiceProxy();
     ALOGI("CameraService pinged cameraservice proxy");
 }
 
@@ -196,6 +205,8 @@
             if (res != OK) {
                 ALOGE("%s: Unable to initialize camera provider manager: %s (%d)",
                         __FUNCTION__, strerror(-res), res);
+                logServiceError(String8::format("Unable to initialize camera provider manager"),
+                ERROR_DISCONNECTED);
                 return res;
             }
         }
@@ -226,36 +237,31 @@
         }
     }
 
+    // Derive primary rear/front cameras, and filter their charactierstics.
+    // This needs to be done after all cameras are enumerated and camera ids are sorted.
+    if (SessionConfigurationUtils::IS_PERF_CLASS) {
+        // Assume internal cameras are advertised from the same
+        // provider. If multiple providers are registered at different time,
+        // and each provider contains multiple internal color cameras, the current
+        // logic may filter the characteristics of more than one front/rear color
+        // cameras.
+        Mutex::Autolock l(mServiceLock);
+        filterSPerfClassCharacteristicsLocked();
+    }
+
     return OK;
 }
 
-sp<ICameraServiceProxy> CameraService::getCameraServiceProxy() {
-#ifndef __BRILLO__
-    Mutex::Autolock al(sProxyMutex);
-    if (sCameraServiceProxy == nullptr) {
-        sp<IServiceManager> sm = defaultServiceManager();
-        // Use checkService because cameraserver normally starts before the
-        // system server and the proxy service. So the long timeout that getService
-        // has before giving up is inappropriate.
-        sp<IBinder> binder = sm->checkService(String16("media.camera.proxy"));
-        if (binder != nullptr) {
-            sCameraServiceProxy = interface_cast<ICameraServiceProxy>(binder);
-        }
-    }
-#endif
-    return sCameraServiceProxy;
-}
-
-void CameraService::pingCameraServiceProxy() {
-    sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
-    if (proxyBinder == nullptr) return;
-    proxyBinder->pingForUserUpdate();
-}
-
-void CameraService::broadcastTorchModeStatus(const String8& cameraId, TorchModeStatus status) {
+void CameraService::broadcastTorchModeStatus(const String8& cameraId, TorchModeStatus status,
+        SystemCameraKind systemCameraKind) {
     Mutex::Autolock lock(mStatusListenerLock);
-
     for (auto& i : mListenerList) {
+        if (shouldSkipStatusUpdates(systemCameraKind, i->isVendorListener(), i->getListenerPid(),
+                i->getListenerUid())) {
+            ALOGV("Skipping torch callback for system-only camera device %s",
+                    cameraId.c_str());
+            continue;
+        }
         i->getListener()->onTorchStatusChanged(mapToInterface(status), String16{cameraId});
     }
 }
@@ -264,6 +270,7 @@
     VendorTagDescriptor::clearGlobalVendorTagDescriptor();
     mUidPolicy->unregisterSelf();
     mSensorPrivacyPolicy->unregisterSelf();
+    mInjectionStatusListener->removeListener();
 }
 
 void CameraService::onNewProviderRegistered() {
@@ -312,6 +319,46 @@
     filterAPI1SystemCameraLocked(mNormalDeviceIds);
 }
 
+void CameraService::filterSPerfClassCharacteristicsLocked() {
+    // To claim to be S Performance primary cameras, the cameras must be
+    // backward compatible. So performance class primary camera Ids must be API1
+    // compatible.
+    bool firstRearCameraSeen = false, firstFrontCameraSeen = false;
+    for (const auto& cameraId : mNormalDeviceIdsWithoutSystemCamera) {
+        int facing = -1;
+        int orientation = 0;
+        String8 cameraId8(cameraId.c_str());
+        getDeviceVersion(cameraId8, /*out*/&facing, /*out*/&orientation);
+        if (facing == -1) {
+            ALOGE("%s: Unable to get camera device \"%s\" facing", __FUNCTION__, cameraId.c_str());
+            return;
+        }
+
+        if ((facing == hardware::CAMERA_FACING_BACK && !firstRearCameraSeen) ||
+                (facing == hardware::CAMERA_FACING_FRONT && !firstFrontCameraSeen)) {
+            status_t res = mCameraProviderManager->filterSmallJpegSizes(cameraId);
+            if (res == OK) {
+                mPerfClassPrimaryCameraIds.insert(cameraId);
+            } else {
+                ALOGE("%s: Failed to filter small JPEG sizes for performance class primary "
+                        "camera %s: %s(%d)", __FUNCTION__, cameraId.c_str(), strerror(-res), res);
+                break;
+            }
+
+            if (facing == hardware::CAMERA_FACING_BACK) {
+                firstRearCameraSeen = true;
+            }
+            if (facing == hardware::CAMERA_FACING_FRONT) {
+                firstFrontCameraSeen = true;
+            }
+        }
+
+        if (firstRearCameraSeen && firstFrontCameraSeen) {
+            break;
+        }
+    }
+}
+
 void CameraService::addStates(const String8 id) {
     std::string cameraId(id.c_str());
     hardware::camera::common::V1_0::CameraResourceCost cost;
@@ -341,7 +388,7 @@
         Mutex::Autolock al(mTorchStatusMutex);
         mTorchStatusMap.add(id, TorchModeStatus::AVAILABLE_OFF);
 
-        broadcastTorchModeStatus(id, TorchModeStatus::AVAILABLE_OFF);
+        broadcastTorchModeStatus(id, TorchModeStatus::AVAILABLE_OFF, deviceKind);
     }
 
     updateCameraNumAndIds();
@@ -502,12 +549,19 @@
 
 void CameraService::onTorchStatusChanged(const String8& cameraId,
         TorchModeStatus newStatus) {
+    SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
+    status_t res = getSystemCameraKind(cameraId, &systemCameraKind);
+    if (res != OK) {
+        ALOGE("%s: Could not get system camera kind for camera id %s", __FUNCTION__,
+                cameraId.string());
+        return;
+    }
     Mutex::Autolock al(mTorchStatusMutex);
-    onTorchStatusChangedLocked(cameraId, newStatus);
+    onTorchStatusChangedLocked(cameraId, newStatus, systemCameraKind);
 }
 
 void CameraService::onTorchStatusChangedLocked(const String8& cameraId,
-        TorchModeStatus newStatus) {
+        TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
     ALOGI("%s: Torch status changed for cameraId=%s, newStatus=%d",
             __FUNCTION__, cameraId.string(), newStatus);
 
@@ -556,8 +610,7 @@
             }
         }
     }
-
-    broadcastTorchModeStatus(cameraId, newStatus);
+    broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
 }
 
 static bool hasPermissionsForSystemCamera(int callingPid, int callingUid) {
@@ -606,6 +659,7 @@
     }
 
     if (!mInitialized) {
+        logServiceError(String8::format("Camera subsystem is not available"),ERROR_DISCONNECTED);
         return STATUS_ERROR(ERROR_DISCONNECTED,
                 "Camera subsystem is not available");
     }
@@ -628,6 +682,8 @@
         ret = STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
                 "Error retrieving camera info from device %d: %s (%d)", cameraId,
                 strerror(-err), err);
+        logServiceError(String8::format("Error retrieving camera info from device %d",cameraId),
+            ERROR_INVALID_OPERATION);
     }
 
     return ret;
@@ -656,7 +712,7 @@
 }
 
 Status CameraService::getCameraCharacteristics(const String16& cameraId,
-        CameraMetadata* cameraInfo) {
+        int targetSdkVersion, CameraMetadata* cameraInfo) {
     ATRACE_CALL();
     if (!cameraInfo) {
         ALOGE("%s: cameraInfo is NULL", __FUNCTION__);
@@ -665,6 +721,7 @@
 
     if (!mInitialized) {
         ALOGE("%s: Camera HAL couldn't be initialized", __FUNCTION__);
+        logServiceError(String8::format("Camera subsystem is not available"),ERROR_DISCONNECTED);
         return STATUS_ERROR(ERROR_DISCONNECTED,
                 "Camera subsystem is not available");;
     }
@@ -676,12 +733,25 @@
 
     Status ret{};
 
+
+    std::string cameraIdStr = String8(cameraId).string();
+    bool overrideForPerfClass =
+            SessionConfigurationUtils::targetPerfClassPrimaryCamera(mPerfClassPrimaryCameraIds,
+                    cameraIdStr, targetSdkVersion);
     status_t res = mCameraProviderManager->getCameraCharacteristics(
-            String8(cameraId).string(), cameraInfo);
+            cameraIdStr, overrideForPerfClass, cameraInfo);
     if (res != OK) {
-        return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera "
-                "characteristics for device %s: %s (%d)", String8(cameraId).string(),
-                strerror(-res), res);
+        if (res == NAME_NOT_FOUND) {
+            return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to retrieve camera "
+                    "characteristics for unknown device %s: %s (%d)", String8(cameraId).string(),
+                    strerror(-res), res);
+        } else {
+            logServiceError(String8::format("Unable to retrieve camera characteristics for "
+            "device %s.", String8(cameraId).string()),ERROR_INVALID_OPERATION);
+            return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera "
+                    "characteristics for device %s: %s (%d)", String8(cameraId).string(),
+                    strerror(-res), res);
+        }
     }
     SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
     if (getSystemCameraKind(String8(cameraId), &deviceKind) != OK) {
@@ -761,7 +831,11 @@
     return Status::ok();
 }
 
-int CameraService::getDeviceVersion(const String8& cameraId, int* facing) {
+void CameraService::clearCachedVariables() {
+    BasicClient::BasicClient::sCameraService = nullptr;
+}
+
+int CameraService::getDeviceVersion(const String8& cameraId, int* facing, int* orientation) {
     ATRACE_CALL();
 
     int deviceVersion = 0;
@@ -778,6 +852,9 @@
         res = mCameraProviderManager->getCameraInfo(cameraId.string(), &info);
         if (res != OK) return -1;
         *facing = info.facing;
+        if (orientation) {
+            *orientation = info.orientation;
+        }
     }
 
     return deviceVersion;
@@ -802,74 +879,47 @@
 
 Status CameraService::makeClient(const sp<CameraService>& cameraService,
         const sp<IInterface>& cameraCb, const String16& packageName,
-        const std::unique_ptr<String16>& featureId, const String8& cameraId, int api1CameraId,
-        int facing, int clientPid, uid_t clientUid, int servicePid, int halVersion,
-        int deviceVersion, apiLevel effectiveApiLevel,
+        const std::optional<String16>& featureId,  const String8& cameraId,
+        int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
+        int servicePid, int deviceVersion, apiLevel effectiveApiLevel, bool overrideForPerfClass,
         /*out*/sp<BasicClient>* client) {
 
-    if (halVersion < 0 || halVersion == deviceVersion) {
-        // Default path: HAL version is unspecified by caller, create CameraClient
-        // based on device version reported by the HAL.
-        switch(deviceVersion) {
-          case CAMERA_DEVICE_API_VERSION_1_0:
-            if (effectiveApiLevel == API_1) {  // Camera1 API route
-                sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
-                *client = new CameraClient(cameraService, tmp, packageName, featureId,
-                        api1CameraId, facing, clientPid, clientUid,
-                        getpid());
-            } else { // Camera2 API route
-                ALOGW("Camera using old HAL version: %d", deviceVersion);
-                return STATUS_ERROR_FMT(ERROR_DEPRECATED_HAL,
-                        "Camera device \"%s\" HAL version %d does not support camera2 API",
-                        cameraId.string(), deviceVersion);
-            }
+    // Create CameraClient based on device version reported by the HAL.
+    switch(deviceVersion) {
+        case CAMERA_DEVICE_API_VERSION_1_0:
+            ALOGE("Camera using old HAL version: %d", deviceVersion);
+            return STATUS_ERROR_FMT(ERROR_DEPRECATED_HAL,
+                    "Camera device \"%s\" HAL version %d no longer supported",
+                    cameraId.string(), deviceVersion);
             break;
-          case CAMERA_DEVICE_API_VERSION_3_0:
-          case CAMERA_DEVICE_API_VERSION_3_1:
-          case CAMERA_DEVICE_API_VERSION_3_2:
-          case CAMERA_DEVICE_API_VERSION_3_3:
-          case CAMERA_DEVICE_API_VERSION_3_4:
-          case CAMERA_DEVICE_API_VERSION_3_5:
-          case CAMERA_DEVICE_API_VERSION_3_6:
+        case CAMERA_DEVICE_API_VERSION_3_0:
+        case CAMERA_DEVICE_API_VERSION_3_1:
+        case CAMERA_DEVICE_API_VERSION_3_2:
+        case CAMERA_DEVICE_API_VERSION_3_3:
+        case CAMERA_DEVICE_API_VERSION_3_4:
+        case CAMERA_DEVICE_API_VERSION_3_5:
+        case CAMERA_DEVICE_API_VERSION_3_6:
+        case CAMERA_DEVICE_API_VERSION_3_7:
             if (effectiveApiLevel == API_1) { // Camera1 API route
                 sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
                 *client = new Camera2Client(cameraService, tmp, packageName, featureId,
                         cameraId, api1CameraId,
-                        facing, clientPid, clientUid,
-                        servicePid);
+                        facing, sensorOrientation, clientPid, clientUid,
+                        servicePid, overrideForPerfClass);
             } else { // Camera2 API route
                 sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
                         static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
                 *client = new CameraDeviceClient(cameraService, tmp, packageName, featureId,
-                        cameraId, facing, clientPid, clientUid, servicePid);
+                        cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid,
+                        overrideForPerfClass);
             }
             break;
-          default:
+        default:
             // Should not be reachable
             ALOGE("Unknown camera device HAL version: %d", deviceVersion);
             return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
                     "Camera device \"%s\" has unknown HAL version %d",
                     cameraId.string(), deviceVersion);
-        }
-    } else {
-        // A particular HAL version is requested by caller. Create CameraClient
-        // based on the requested HAL version.
-        if (deviceVersion > CAMERA_DEVICE_API_VERSION_1_0 &&
-            halVersion == CAMERA_DEVICE_API_VERSION_1_0) {
-            // Only support higher HAL version device opened as HAL1.0 device.
-            sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
-            *client = new CameraClient(cameraService, tmp, packageName, featureId,
-                    api1CameraId, facing, clientPid, clientUid,
-                    servicePid);
-        } else {
-            // Other combinations (e.g. HAL3.x open as HAL2.x) are not supported yet.
-            ALOGE("Invalid camera HAL version %x: HAL %x device can only be"
-                    " opened as HAL %x device", halVersion, deviceVersion,
-                    CAMERA_DEVICE_API_VERSION_1_0);
-            return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                    "Camera device \"%s\" (HAL version %d) cannot be opened as HAL version %d",
-                    cameraId.string(), deviceVersion, halVersion);
-        }
     }
     return Status::ok();
 }
@@ -957,9 +1007,9 @@
     sp<Client> tmp = nullptr;
     if (!(ret = connectHelper<ICameraClient,Client>(
             sp<ICameraClient>{nullptr}, id, cameraId,
-            static_cast<int>(CAMERA_HAL_API_VERSION_UNSPECIFIED),
-            internalPackageName, std::unique_ptr<String16>(), uid, USE_CALLING_PID,
-            API_1, /*shimUpdateOnly*/ true, /*out*/ tmp)
+            internalPackageName, {}, uid, USE_CALLING_PID,
+            API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
+            /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*out*/ tmp)
             ).isOk()) {
         ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().string());
     }
@@ -1231,10 +1281,12 @@
 }
 
 void CameraService::finishConnectLocked(const sp<BasicClient>& client,
-        const CameraService::DescriptorPtr& desc) {
+        const CameraService::DescriptorPtr& desc, int oomScoreOffset) {
 
     // Make a descriptor for the incoming client
-    auto clientDescriptor = CameraService::CameraClientManager::makeClientDescriptor(client, desc);
+    auto clientDescriptor =
+            CameraService::CameraClientManager::makeClientDescriptor(client, desc,
+                    oomScoreOffset);
     auto evicted = mActiveClientManager.addAndEvict(clientDescriptor);
 
     logConnected(desc->getKey(), static_cast<int>(desc->getOwnerId()),
@@ -1264,6 +1316,7 @@
 
 status_t CameraService::handleEvictionsLocked(const String8& cameraId, int clientPid,
         apiLevel effectiveApiLevel, const sp<IBinder>& remoteCallback, const String8& packageName,
+        int oomScoreOffset,
         /*out*/
         sp<BasicClient>* client,
         std::shared_ptr<resource_policy::ClientDescriptor<String8, sp<BasicClient>>>* partial) {
@@ -1315,7 +1368,8 @@
         for (size_t i = 0; i < ownerPids.size() - 1; i++) {
             pidToPriorityMap.emplace(ownerPids[i],
                     resource_policy::ClientPriority(priorityScores[i], states[i],
-                            /* isVendorClient won't get copied over*/ false));
+                            /* isVendorClient won't get copied over*/ false,
+                            /* oomScoreOffset won't get copied over*/ 0));
         }
         mActiveClientManager.updatePriorities(pidToPriorityMap);
 
@@ -1328,13 +1382,16 @@
             return BAD_VALUE;
         }
 
-        // Make descriptor for incoming client
+        int32_t actualScore = priorityScores[priorityScores.size() - 1];
+        int32_t actualState = states[states.size() - 1];
+
+        // Make descriptor for incoming client. We store the oomScoreOffset
+        // since we might need it later on new handleEvictionsLocked and
+        // ProcessInfoService would not take that into account.
         clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
                 sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
-                state->getConflicting(),
-                priorityScores[priorityScores.size() - 1],
-                clientPid,
-                states[states.size() - 1]);
+                state->getConflicting(), actualScore, clientPid, actualState,
+                oomScoreOffset);
 
         resource_policy::ClientPriority clientPriority = clientDescriptor->getPriority();
 
@@ -1467,6 +1524,7 @@
         const String16& clientPackageName,
         int clientUid,
         int clientPid,
+        int targetSdkVersion,
         /*out*/
         sp<ICamera>* device) {
 
@@ -1476,35 +1534,8 @@
     String8 id = cameraIdIntToStr(api1CameraId);
     sp<Client> client = nullptr;
     ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId,
-            CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageName, std::unique_ptr<String16>(),
-            clientUid, clientPid, API_1, /*shimUpdateOnly*/ false, /*out*/client);
-
-    if(!ret.isOk()) {
-        logRejected(id, CameraThreadState::getCallingPid(), String8(clientPackageName),
-                ret.toString8());
-        return ret;
-    }
-
-    *device = client;
-    return ret;
-}
-
-Status CameraService::connectLegacy(
-        const sp<ICameraClient>& cameraClient,
-        int api1CameraId, int halVersion,
-        const String16& clientPackageName,
-        int clientUid,
-        /*out*/
-        sp<ICamera>* device) {
-
-    ATRACE_CALL();
-    String8 id = cameraIdIntToStr(api1CameraId);
-
-    Status ret = Status::ok();
-    sp<Client> client = nullptr;
-    ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId, halVersion,
-            clientPackageName, std::unique_ptr<String16>(), clientUid, USE_CALLING_PID, API_1,
-            /*shimUpdateOnly*/ false, /*out*/client);
+            clientPackageName, {}, clientUid, clientPid, API_1,
+            /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion, /*out*/client);
 
     if(!ret.isOk()) {
         logRejected(id, CameraThreadState::getCallingPid(), String8(clientPackageName),
@@ -1545,8 +1576,9 @@
     int cUid = CameraThreadState::getCallingUid();
     SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
     if (getSystemCameraKind(cameraId, &systemCameraKind) != OK) {
-        ALOGE("%s: Invalid camera id %s, ", __FUNCTION__, cameraId.c_str());
-        return true;
+        // This isn't a known camera ID, so it's not a system camera
+        ALOGV("%s: Unknown camera id %s, ", __FUNCTION__, cameraId.c_str());
+        return false;
     }
 
     // (1) Cameraserver trying to connect, accept.
@@ -1578,8 +1610,8 @@
         const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
         const String16& cameraId,
         const String16& clientPackageName,
-        const std::unique_ptr<String16>& clientFeatureId,
-        int clientUid,
+        const std::optional<String16>& clientFeatureId,
+        int clientUid, int oomScoreOffset, int targetSdkVersion,
         /*out*/
         sp<hardware::camera2::ICameraDeviceUser>* device) {
 
@@ -1588,32 +1620,69 @@
     String8 id = String8(cameraId);
     sp<CameraDeviceClient> client = nullptr;
     String16 clientPackageNameAdj = clientPackageName;
+    int callingPid = CameraThreadState::getCallingPid();
 
     if (getCurrentServingCall() == BinderCallType::HWBINDER) {
         std::string vendorClient =
                 StringPrintf("vendor.client.pid<%d>", CameraThreadState::getCallingPid());
         clientPackageNameAdj = String16(vendorClient.c_str());
     }
+
+    if (oomScoreOffset < 0) {
+        String8 msg =
+                String8::format("Cannot increase the priority of a client %s pid %d for "
+                        "camera id %s", String8(clientPackageNameAdj).string(), callingPid,
+                        id.string());
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+
+    // enforce system camera permissions
+    if (oomScoreOffset > 0 &&
+            !hasPermissionsForSystemCamera(callingPid, CameraThreadState::getCallingUid())) {
+        String8 msg =
+                String8::format("Cannot change the priority of a client %s pid %d for "
+                        "camera id %s without SYSTEM_CAMERA permissions",
+                        String8(clientPackageNameAdj).string(), callingPid, id.string());
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(ERROR_PERMISSION_DENIED, msg.string());
+    }
+
     ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id,
-            /*api1CameraId*/-1,
-            CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageNameAdj, clientFeatureId,
-            clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, /*out*/client);
+            /*api1CameraId*/-1, clientPackageNameAdj, clientFeatureId,
+            clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
+            targetSdkVersion, /*out*/client);
 
     if(!ret.isOk()) {
-        logRejected(id, CameraThreadState::getCallingPid(), String8(clientPackageNameAdj),
-                ret.toString8());
+        logRejected(id, callingPid, String8(clientPackageNameAdj), ret.toString8());
         return ret;
     }
 
     *device = client;
+    Mutex::Autolock lock(mServiceLock);
+
+    // Clear the previous cached logs and reposition the
+    // file offset to beginning of the file to log new data.
+    // If either truncate or lseek fails, close the previous file and create a new one.
+    if ((ftruncate(mMemFd, 0) == -1) || (lseek(mMemFd, 0, SEEK_SET) == -1)) {
+        ALOGE("%s: Error while truncating the file: %s", __FUNCTION__, sFileName);
+        // Close the previous memfd.
+        close(mMemFd);
+        // If failure to wipe the data, then create a new file and
+        // assign the new value to mMemFd.
+        mMemFd = memfd_create(sFileName, MFD_ALLOW_SEALING);
+        if (mMemFd == -1) {
+            ALOGE("%s: Error while creating the file: %s", __FUNCTION__, sFileName);
+        }
+    }
     return ret;
 }
 
 template<class CALLBACK, class CLIENT>
 Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
-        int api1CameraId, int halVersion, const String16& clientPackageName,
-        const std::unique_ptr<String16>& clientFeatureId, int clientUid, int clientPid,
-        apiLevel effectiveApiLevel, bool shimUpdateOnly,
+        int api1CameraId, const String16& clientPackageName,
+        const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
+        apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
         /*out*/sp<CLIENT>& device) {
     binder::Status ret = binder::Status::ok();
 
@@ -1621,12 +1690,16 @@
 
     int originalClientPid = 0;
 
-    ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) for HAL version %s and "
+    ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) and "
             "Camera API version %d", clientPid, clientName8.string(), cameraId.string(),
-            (halVersion == -1) ? "default" : std::to_string(halVersion).c_str(),
             static_cast<int>(effectiveApiLevel));
 
+    nsecs_t openTimeNs = systemTime();
+
     sp<CLIENT> client = nullptr;
+    int facing = -1;
+    int orientation = 0;
+    bool isNdk = (clientPackageName.size() == 0);
     {
         // Acquire mServiceLock and prevent other clients from connecting
         std::unique_ptr<AutoConditionLock> lock =
@@ -1660,7 +1733,7 @@
         sp<BasicClient> clientTmp = nullptr;
         std::shared_ptr<resource_policy::ClientDescriptor<String8, sp<BasicClient>>> partial;
         if ((err = handleEvictionsLocked(cameraId, originalClientPid, effectiveApiLevel,
-                IInterface::asBinder(cameraCb), clientName8, /*out*/&clientTmp,
+                IInterface::asBinder(cameraCb), clientName8, oomScoreOffset, /*out*/&clientTmp,
                 /*out*/&partial)) != NO_ERROR) {
             switch (err) {
                 case -ENODEV:
@@ -1691,8 +1764,7 @@
         // give flashlight a chance to close devices if necessary.
         mFlashlight->prepareDeviceOpen(cameraId);
 
-        int facing = -1;
-        int deviceVersion = getDeviceVersion(cameraId, /*out*/&facing);
+        int deviceVersion = getDeviceVersion(cameraId, /*out*/&facing, /*out*/&orientation);
         if (facing == -1) {
             ALOGE("%s: Unable to get camera device \"%s\"  facing", __FUNCTION__, cameraId.string());
             return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
@@ -1700,10 +1772,12 @@
         }
 
         sp<BasicClient> tmp = nullptr;
+        bool overrideForPerfClass = SessionConfigurationUtils::targetPerfClassPrimaryCamera(
+                mPerfClassPrimaryCameraIds, cameraId.string(), targetSdkVersion);
         if(!(ret = makeClient(this, cameraCb, clientPackageName, clientFeatureId,
-                cameraId, api1CameraId, facing,
+                cameraId, api1CameraId, facing, orientation,
                 clientPid, clientUid, getpid(),
-                halVersion, deviceVersion, effectiveApiLevel,
+                deviceVersion, effectiveApiLevel, overrideForPerfClass,
                 /*out*/&tmp)).isOk()) {
             return ret;
         }
@@ -1760,6 +1834,16 @@
         // Set rotate-and-crop override behavior
         if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
             client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
+        } else if (CameraServiceProxyWrapper::isRotateAndCropOverrideNeeded(clientPackageName,
+                    orientation, facing)) {
+            client->setRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_90);
+        }
+
+        // Set camera muting behavior
+        if (client->supportsCameraMute()) {
+            bool isCameraPrivacyEnabled =
+                    mSensorPrivacyPolicy->isCameraPrivacyEnabled(multiuser_get_user_id(clientUid));
+            client->setCameraMute(mOverrideCameraMuteMode || isCameraPrivacyEnabled);
         }
 
         if (shimUpdateOnly) {
@@ -1769,13 +1853,20 @@
             mServiceLock.lock();
         } else {
             // Otherwise, add client to active clients list
-            finishConnectLocked(client, partial);
+            finishConnectLocked(client, partial, oomScoreOffset);
         }
+
+        client->setImageDumpMask(mImageDumpMask);
     } // lock is destroyed, allow further connect calls
 
     // Important: release the mutex here so the client can call back into the service from its
     // destructor (can be at the end of the call)
     device = client;
+
+    int32_t openLatencyMs = ns2ms(systemTime() - openTimeNs);
+    CameraServiceProxyWrapper::logOpen(cameraId, facing, clientPackageName,
+            effectiveApiLevel, isNdk, openLatencyMs);
+
     return ret;
 }
 
@@ -1809,7 +1900,8 @@
         auto offlineClientDesc = CameraClientManager::makeClientDescriptor(
                 kOfflineDevice + onlineClientDesc->getKey(), offlineClient, /*cost*/ 0,
                 /*conflictingKeys*/ std::set<String8>(), onlinePriority.getScore(),
-                onlineClientDesc->getOwnerId(), onlinePriority.getState());
+                onlineClientDesc->getOwnerId(), onlinePriority.getState(),
+                /*ommScoreOffset*/ 0);
 
         // Allow only one offline device per camera
         auto incompatibleClients = mActiveClientManager.getIncompatibleClients(offlineClientDesc);
@@ -1864,6 +1956,10 @@
     String8 id = String8(cameraId.string());
     int uid = CameraThreadState::getCallingUid();
 
+    if (shouldRejectSystemCameraConnection(id)) {
+        return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to set torch mode"
+                " for system only device %s: ", id.string());
+    }
     // verify id is valid.
     auto state = getCameraState(id);
     if (state == nullptr) {
@@ -1944,6 +2040,7 @@
                 errorCode = ERROR_INVALID_OPERATION;
         }
         ALOGE("%s: %s", __FUNCTION__, msg.string());
+        logServiceError(msg,errorCode);
         return STATUS_ERROR(errorCode, msg.string());
     }
 
@@ -2066,7 +2163,50 @@
     return Status::ok();
 }
 
- Status CameraService::getConcurrentCameraIds(
+Status CameraService::notifyDisplayConfigurationChange() {
+    ATRACE_CALL();
+    const int callingPid = CameraThreadState::getCallingPid();
+    const int selfPid = getpid();
+
+    // Permission checks
+    if (callingPid != selfPid) {
+        // Ensure we're being called by system_server, or similar process with
+        // permissions to notify the camera service about system events
+        if (!checkCallingPermission(sCameraSendSystemEventsPermission)) {
+            const int uid = CameraThreadState::getCallingUid();
+            ALOGE("Permission Denial: cannot send updates to camera service about orientation"
+                    " changes from pid=%d, uid=%d", callingPid, uid);
+            return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
+                    "No permission to send updates to camera service about orientation"
+                    " changes from pid=%d, uid=%d", callingPid, uid);
+        }
+    }
+
+    Mutex::Autolock lock(mServiceLock);
+
+    // Don't do anything if rotate-and-crop override via cmd is active
+    if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) return Status::ok();
+
+    const auto clients = mActiveClientManager.getAll();
+    for (auto& current : clients) {
+        if (current != nullptr) {
+            const auto basicClient = current->getValue();
+            if (basicClient.get() != nullptr) {
+                if (CameraServiceProxyWrapper::isRotateAndCropOverrideNeeded(
+                            basicClient->getPackageName(), basicClient->getCameraOrientation(),
+                            basicClient->getCameraFacing())) {
+                    basicClient->setRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_90);
+                } else {
+                    basicClient->setRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE);
+                }
+            }
+        }
+    }
+
+    return Status::ok();
+}
+
+Status CameraService::getConcurrentCameraIds(
         std::vector<ConcurrentCameraIdCombination>* concurrentCameraIds) {
     ATRACE_CALL();
     if (!concurrentCameraIds) {
@@ -2076,6 +2216,7 @@
 
     if (!mInitialized) {
         ALOGE("%s: Camera HAL couldn't be initialized", __FUNCTION__);
+        logServiceError(String8::format("Camera subsystem is not available"),ERROR_DISCONNECTED);
         return STATUS_ERROR(ERROR_DISCONNECTED,
                 "Camera subsystem is not available");
     }
@@ -2111,7 +2252,7 @@
 
 Status CameraService::isConcurrentSessionConfigurationSupported(
         const std::vector<CameraIdAndSessionConfiguration>& cameraIdsAndSessionConfigurations,
-        /*out*/bool* isSupported) {
+        int targetSdkVersion, /*out*/bool* isSupported) {
     if (!isSupported) {
         ALOGE("%s: isSupported is NULL", __FUNCTION__);
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "isSupported is NULL");
@@ -2135,8 +2276,11 @@
 
     status_t res =
             mCameraProviderManager->isConcurrentSessionConfigurationSupported(
-                    cameraIdsAndSessionConfigurations, isSupported);
+                    cameraIdsAndSessionConfigurations, mPerfClassPrimaryCameraIds,
+                    targetSdkVersion, isSupported);
     if (res != OK) {
+        logServiceError(String8::format("Unable to query session configuration support"),
+            ERROR_INVALID_OPERATION);
         return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to query session configuration "
                 "support %s (%d)", strerror(-res), res);
     }
@@ -2149,10 +2293,15 @@
     return addListenerHelper(listener, cameraStatuses);
 }
 
+binder::Status CameraService::addListenerTest(const sp<hardware::ICameraServiceListener>& listener,
+            std::vector<hardware::CameraStatus>* cameraStatuses) {
+    return addListenerHelper(listener, cameraStatuses, false, true);
+}
+
 Status CameraService::addListenerHelper(const sp<ICameraServiceListener>& listener,
         /*out*/
         std::vector<hardware::CameraStatus> *cameraStatuses,
-        bool isVendorListener) {
+        bool isVendorListener, bool isProcessLocalTest) {
 
     ATRACE_CALL();
 
@@ -2183,10 +2332,11 @@
         sp<ServiceListener> serviceListener =
                 new ServiceListener(this, listener, clientUid, clientPid, isVendorListener,
                         openCloseCallbackAllowed);
-        auto ret = serviceListener->initialize();
+        auto ret = serviceListener->initialize(isProcessLocalTest);
         if (ret != NO_ERROR) {
             String8 msg = String8::format("Failed to initialize service listener: %s (%d)",
                     strerror(-ret), ret);
+            logServiceError(msg,ERROR_ILLEGAL_ARGUMENT);
             ALOGE("%s: %s", __FUNCTION__, msg.string());
             return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
         }
@@ -2220,6 +2370,11 @@
                     return shouldSkipStatusUpdates(deviceKind, isVendorListener, clientPid,
                             clientUid);}), cameraStatuses->end());
 
+    //cameraStatuses will have non-eligible camera ids removed.
+    std::set<String16> idsChosenForCallback;
+    for (const auto &s : *cameraStatuses) {
+        idsChosenForCallback.insert(String16(s.cameraId));
+    }
 
     /*
      * Immediately signal current torch status to this listener only
@@ -2229,7 +2384,11 @@
         Mutex::Autolock al(mTorchStatusMutex);
         for (size_t i = 0; i < mTorchStatusMap.size(); i++ ) {
             String16 id = String16(mTorchStatusMap.keyAt(i).string());
-            listener->onTorchStatusChanged(mapToInterface(mTorchStatusMap.valueAt(i)), id);
+            // The camera id is visible to the client. Fine to send torch
+            // callback.
+            if (idsChosenForCallback.find(id) != idsChosenForCallback.end()) {
+                listener->onTorchStatusChanged(mapToInterface(mTorchStatusMap.valueAt(i)), id);
+            }
         }
     }
 
@@ -2330,6 +2489,7 @@
         case CAMERA_DEVICE_API_VERSION_3_4:
         case CAMERA_DEVICE_API_VERSION_3_5:
         case CAMERA_DEVICE_API_VERSION_3_6:
+        case CAMERA_DEVICE_API_VERSION_3_7:
             ALOGV("%s: Camera id %s uses HAL3.2 or newer, supports api1/api2 directly",
                     __FUNCTION__, id.string());
             *isSupported = true;
@@ -2362,6 +2522,42 @@
     return Status::ok();
 }
 
+Status CameraService::injectCamera(
+        const String16& packageName, const String16& internalCamId,
+        const String16& externalCamId,
+        const sp<ICameraInjectionCallback>& callback,
+        /*out*/
+        sp<hardware::camera2::ICameraInjectionSession>* cameraInjectionSession) {
+    ATRACE_CALL();
+
+    if (!checkCallingPermission(sCameraInjectExternalCameraPermission)) {
+        const int pid = CameraThreadState::getCallingPid();
+        const int uid = CameraThreadState::getCallingUid();
+        ALOGE("Permission Denial: can't inject camera pid=%d, uid=%d", pid, uid);
+        return STATUS_ERROR(ERROR_PERMISSION_DENIED,
+                        "Permission Denial: no permission to inject camera");
+    }
+
+    ALOGV(
+        "%s: Package name = %s, Internal camera ID = %s, External camera ID = "
+        "%s",
+        __FUNCTION__, String8(packageName).string(),
+        String8(internalCamId).string(), String8(externalCamId).string());
+
+    binder::Status ret = binder::Status::ok();
+    // TODO: Implement the injection camera function.
+    // ret = internalInjectCamera(...);
+    // if(!ret.isOk()) {
+    //     mInjectionStatusListener->notifyInjectionError(...);
+    //     return ret;
+    // }
+
+    mInjectionStatusListener->addListener(callback);
+    *cameraInjectionSession = new CameraInjectionSession(this);
+
+    return ret;
+}
+
 void CameraService::removeByClient(const BasicClient* client) {
     Mutex::Autolock lock(mServiceLock);
     for (auto& i : mActiveClientManager.getAll()) {
@@ -2525,7 +2721,15 @@
 void CameraService::logEvent(const char* event) {
     String8 curTime = getFormattedCurrentTime();
     Mutex::Autolock l(mLogLock);
-    mEventLog.add(String8::format("%s : %s", curTime.string(), event));
+    String8 msg = String8::format("%s : %s", curTime.string(), event);
+    // For service error events, print the msg only once.
+    if(!msg.contains("SERVICE ERROR")) {
+        mEventLog.add(msg);
+    } else if(sServiceErrorEventSet.find(msg) == sServiceErrorEventSet.end()) {
+        // Error event not added to the dumpsys log before
+        mEventLog.add(msg);
+        sServiceErrorEventSet.insert(msg);
+    }
 }
 
 void CameraService::logDisconnected(const char* cameraId, int clientPid,
@@ -2700,6 +2904,11 @@
     ATRACE_CALL();
 
     LOG1("playSound(%d)", kind);
+    if (kind < 0 || kind >= NUM_SOUNDS) {
+        ALOGE("%s: Invalid sound id requested: %d", __FUNCTION__, kind);
+        return;
+    }
+
     Mutex::Autolock lock(mSoundLock);
     loadSoundLocked(kind);
     sp<MediaPlayer> player = mSoundPlayer[kind];
@@ -2714,15 +2923,15 @@
 CameraService::Client::Client(const sp<CameraService>& cameraService,
         const sp<ICameraClient>& cameraClient,
         const String16& clientPackageName,
-        const std::unique_ptr<String16>& clientFeatureId,
+        const std::optional<String16>& clientFeatureId,
         const String8& cameraIdStr,
-        int api1CameraId, int cameraFacing,
+        int api1CameraId, int cameraFacing, int sensorOrientation,
         int clientPid, uid_t clientUid,
         int servicePid) :
         CameraService::BasicClient(cameraService,
                 IInterface::asBinder(cameraClient),
                 clientPackageName, clientFeatureId,
-                cameraIdStr, cameraFacing,
+                cameraIdStr, cameraFacing, sensorOrientation,
                 clientPid, clientUid,
                 servicePid),
         mCameraId(api1CameraId)
@@ -2751,29 +2960,24 @@
 
 CameraService::BasicClient::BasicClient(const sp<CameraService>& cameraService,
         const sp<IBinder>& remoteCallback,
-        const String16& clientPackageName, const std::unique_ptr<String16>& clientFeatureId,
-        const String8& cameraIdStr, int cameraFacing,
+        const String16& clientPackageName, const std::optional<String16>& clientFeatureId,
+        const String8& cameraIdStr, int cameraFacing, int sensorOrientation,
         int clientPid, uid_t clientUid,
         int servicePid):
-        mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing),
-        mClientPackageName(clientPackageName),
+        mDestructionStarted(false),
+        mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing), mOrientation(sensorOrientation),
+        mClientPackageName(clientPackageName), mClientFeatureId(clientFeatureId),
         mClientPid(clientPid), mClientUid(clientUid),
         mServicePid(servicePid),
         mDisconnected(false), mUidIsTrusted(false),
         mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE),
-        mRemoteBinder(remoteCallback)
+        mRemoteBinder(remoteCallback),
+        mOpsActive(false),
+        mOpsStreaming(false)
 {
-    if (clientFeatureId) {
-        mClientFeatureId = std::unique_ptr<String16>(new String16(*clientFeatureId));
-    } else {
-        mClientFeatureId = std::unique_ptr<String16>();
-    }
-
     if (sCameraService == nullptr) {
         sCameraService = cameraService;
     }
-    mOpsActive = false;
-    mDestructionStarted = false;
 
     // In some cases the calling code has no access to the package it runs under.
     // For example, NDK camera API.
@@ -2858,6 +3062,13 @@
     return mClientPackageName;
 }
 
+int CameraService::BasicClient::getCameraFacing() const {
+    return mCameraFacing;
+}
+
+int CameraService::BasicClient::getCameraOrientation() const {
+    return mOrientation;
+}
 
 int CameraService::BasicClient::getClientPid() const {
     return mClientPid;
@@ -2901,6 +3112,29 @@
     }
 }
 
+status_t CameraService::BasicClient::handleAppOpMode(int32_t mode) {
+    if (mode == AppOpsManager::MODE_ERRORED) {
+        ALOGI("Camera %s: Access for \"%s\" has been revoked",
+                mCameraIdStr.string(), String8(mClientPackageName).string());
+        return PERMISSION_DENIED;
+    } else if (!mUidIsTrusted && mode == AppOpsManager::MODE_IGNORED) {
+        // If the calling Uid is trusted (a native service), the AppOpsManager could
+        // return MODE_IGNORED. Do not treat such case as error.
+        bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid,
+                mClientPackageName);
+        bool isCameraPrivacyEnabled =
+                sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled(
+                    multiuser_get_user_id(mClientUid));
+        if (!isUidActive || !isCameraPrivacyEnabled) {
+            ALOGI("Camera %s: Access for \"%s\" has been restricted",
+                    mCameraIdStr.string(), String8(mClientPackageName).string());
+            // Return the same error as for device policy manager rejection
+            return -EACCES;
+        }
+    }
+    return OK;
+}
+
 status_t CameraService::BasicClient::startCameraOps() {
     ATRACE_CALL();
 
@@ -2911,26 +3145,16 @@
     if (mAppOpsManager != nullptr) {
         // Notify app ops that the camera is not available
         mOpsCallback = new OpsCallback(this);
-        int32_t res;
         mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
                 mClientPackageName, mOpsCallback);
-        res = mAppOpsManager->startOpNoThrow(AppOpsManager::OP_CAMERA, mClientUid,
-                mClientPackageName, /*startIfModeDefault*/ false, mClientFeatureId,
-                String16("start camera ") + String16(mCameraIdStr));
 
-        if (res == AppOpsManager::MODE_ERRORED) {
-            ALOGI("Camera %s: Access for \"%s\" has been revoked",
-                    mCameraIdStr.string(), String8(mClientPackageName).string());
-            return PERMISSION_DENIED;
-        }
-
-        // If the calling Uid is trusted (a native service), the AppOpsManager could
-        // return MODE_IGNORED. Do not treat such case as error.
-        if (!mUidIsTrusted && res == AppOpsManager::MODE_IGNORED) {
-            ALOGI("Camera %s: Access for \"%s\" has been restricted",
-                    mCameraIdStr.string(), String8(mClientPackageName).string());
-            // Return the same error as for device policy manager rejection
-            return -EACCES;
+        // Just check for camera acccess here on open - delay startOp until
+        // camera frames start streaming in startCameraStreamingOps
+        int32_t mode = mAppOpsManager->checkOp(AppOpsManager::OP_CAMERA, mClientUid,
+                mClientPackageName);
+        status_t res = handleAppOpMode(mode);
+        if (res != OK) {
+            return res;
         }
     }
 
@@ -2939,14 +3163,6 @@
     // Transition device availability listeners from PRESENT -> NOT_AVAILABLE
     sCameraService->updateStatus(StatusInternal::NOT_AVAILABLE, mCameraIdStr);
 
-    int apiLevel = hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1;
-    if (canCastToApiClient(API_2)) {
-        apiLevel = hardware::ICameraServiceProxy::CAMERA_API_LEVEL_2;
-    }
-    // Transition device state to OPEN
-    sCameraService->updateProxyDeviceState(ICameraServiceProxy::CAMERA_STATE_OPEN,
-            mCameraIdStr, mCameraFacing, mClientPackageName, apiLevel);
-
     sCameraService->mUidPolicy->registerMonitorUid(mClientUid);
 
     // Notify listeners of camera open/close status
@@ -2955,17 +3171,69 @@
     return OK;
 }
 
+status_t CameraService::BasicClient::startCameraStreamingOps() {
+    ATRACE_CALL();
+
+    if (!mOpsActive) {
+        ALOGE("%s: Calling streaming start when not yet active", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+    if (mOpsStreaming) {
+        ALOGV("%s: Streaming already active!", __FUNCTION__);
+        return OK;
+    }
+
+    ALOGV("%s: Start camera streaming ops, package name = %s, client UID = %d",
+            __FUNCTION__, String8(mClientPackageName).string(), mClientUid);
+
+    if (mAppOpsManager != nullptr) {
+        int32_t mode = mAppOpsManager->startOpNoThrow(AppOpsManager::OP_CAMERA, mClientUid,
+                mClientPackageName, /*startIfModeDefault*/ false, mClientFeatureId,
+                String16("start camera ") + String16(mCameraIdStr));
+        status_t res = handleAppOpMode(mode);
+        if (res != OK) {
+            return res;
+        }
+    }
+
+    mOpsStreaming = true;
+
+    return OK;
+}
+
+status_t CameraService::BasicClient::finishCameraStreamingOps() {
+    ATRACE_CALL();
+
+    if (!mOpsActive) {
+        ALOGE("%s: Calling streaming start when not yet active", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+    if (!mOpsStreaming) {
+        ALOGV("%s: Streaming not active!", __FUNCTION__);
+        return OK;
+    }
+
+    if (mAppOpsManager != nullptr) {
+        mAppOpsManager->finishOp(AppOpsManager::OP_CAMERA, mClientUid,
+                mClientPackageName, mClientFeatureId);
+        mOpsStreaming = false;
+    }
+
+    return OK;
+}
+
 status_t CameraService::BasicClient::finishCameraOps() {
     ATRACE_CALL();
 
+    if (mOpsStreaming) {
+        // Make sure we've notified everyone about camera stopping
+        finishCameraStreamingOps();
+    }
+
     // Check if startCameraOps succeeded, and if so, finish the camera op
     if (mOpsActive) {
-        // Notify app ops that the camera is available again
-        if (mAppOpsManager != nullptr) {
-            mAppOpsManager->finishOp(AppOpsManager::OP_CAMERA, mClientUid,
-                    mClientPackageName, mClientFeatureId);
-            mOpsActive = false;
-        }
+        mOpsActive = false;
+
         // This function is called when a client disconnects. This should
         // release the camera, but actually only if it was in a proper
         // functional state, i.e. with status NOT_AVAILABLE
@@ -2975,14 +3243,6 @@
         // Transition to PRESENT if the camera is not in either of the rejected states
         sCameraService->updateStatus(StatusInternal::PRESENT,
                 mCameraIdStr, rejected);
-
-        int apiLevel = hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1;
-        if (canCastToApiClient(API_2)) {
-            apiLevel = hardware::ICameraServiceProxy::CAMERA_API_LEVEL_2;
-        }
-        // Transition device state to CLOSED
-        sCameraService->updateProxyDeviceState(ICameraServiceProxy::CAMERA_STATE_CLOSED,
-                mCameraIdStr, mCameraFacing, mClientPackageName, apiLevel);
     }
     // Always stop watching, even if no camera op is active
     if (mOpsCallback != nullptr && mAppOpsManager != nullptr) {
@@ -3018,10 +3278,28 @@
             res == AppOpsManager::MODE_ERRORED ? "ERRORED" :
             "UNKNOWN");
 
-    if (res != AppOpsManager::MODE_ALLOWED) {
+    if (res == AppOpsManager::MODE_ERRORED) {
         ALOGI("Camera %s: Access for \"%s\" revoked", mCameraIdStr.string(),
               String8(mClientPackageName).string());
         block();
+    } else if (res == AppOpsManager::MODE_IGNORED) {
+        bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid, mClientPackageName);
+        bool isCameraPrivacyEnabled =
+                sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled(
+                        multiuser_get_user_id(mClientUid));
+        ALOGI("Camera %s: Access for \"%s\" has been restricted, isUidTrusted %d, isUidActive %d",
+                mCameraIdStr.string(), String8(mClientPackageName).string(),
+                mUidIsTrusted, isUidActive);
+        // If the calling Uid is trusted (a native service), or the client Uid is active (WAR for
+        // b/175320666), the AppOpsManager could return MODE_IGNORED. Do not treat such cases as
+        // error.
+        if (!mUidIsTrusted && isUidActive && isCameraPrivacyEnabled) {
+            setCameraMute(true);
+        } else if (!mUidIsTrusted && !isUidActive) {
+            block();
+        }
+    } else if (res == AppOpsManager::MODE_ALLOWED) {
+        setCameraMute(sCameraService->mOverrideCameraMuteMode);
     }
 }
 
@@ -3294,6 +3572,7 @@
     if (mRegistered) {
         return;
     }
+    hasCameraPrivacyFeature(); // Called so the result is cached
     mSpm.addSensorPrivacyListener(this);
     mSensorPrivacyEnabled = mSpm.isSensorPrivacyEnabled();
     status_t res = mSpm.linkToDeath(this);
@@ -3316,6 +3595,14 @@
     return mSensorPrivacyEnabled;
 }
 
+bool CameraService::SensorPrivacyPolicy::isCameraPrivacyEnabled(userid_t userId) {
+    if (!hasCameraPrivacyFeature()) {
+        return false;
+    }
+    return mSpm.isIndividualSensorPrivacyEnabled(userId,
+        SensorPrivacyManager::INDIVIDUAL_SENSOR_CAMERA);
+}
+
 binder::Status CameraService::SensorPrivacyPolicy::onSensorPrivacyChanged(bool enabled) {
     {
         Mutex::Autolock _l(mSensorPrivacyLock);
@@ -3337,6 +3624,10 @@
     mRegistered = false;
 }
 
+bool CameraService::SensorPrivacyPolicy::hasCameraPrivacyFeature() {
+    return mSpm.supportsSensorToggle(SensorPrivacyManager::INDIVIDUAL_SENSOR_CAMERA);
+}
+
 // ----------------------------------------------------------------------------
 //                  CameraState
 // ----------------------------------------------------------------------------
@@ -3485,21 +3776,83 @@
 CameraService::DescriptorPtr CameraService::CameraClientManager::makeClientDescriptor(
         const String8& key, const sp<BasicClient>& value, int32_t cost,
         const std::set<String8>& conflictingKeys, int32_t score, int32_t ownerId,
-        int32_t state) {
+        int32_t state, int32_t oomScoreOffset) {
 
     bool isVendorClient = getCurrentServingCall() == BinderCallType::HWBINDER;
     int32_t score_adj = isVendorClient ? kVendorClientScore : score;
     int32_t state_adj = isVendorClient ? kVendorClientState: state;
 
     return std::make_shared<resource_policy::ClientDescriptor<String8, sp<BasicClient>>>(
-            key, value, cost, conflictingKeys, score_adj, ownerId, state_adj, isVendorClient);
+            key, value, cost, conflictingKeys, score_adj, ownerId, state_adj, isVendorClient,
+            oomScoreOffset);
 }
 
 CameraService::DescriptorPtr CameraService::CameraClientManager::makeClientDescriptor(
-        const sp<BasicClient>& value, const CameraService::DescriptorPtr& partial) {
+        const sp<BasicClient>& value, const CameraService::DescriptorPtr& partial,
+        int32_t oomScoreOffset) {
     return makeClientDescriptor(partial->getKey(), value, partial->getCost(),
             partial->getConflicting(), partial->getPriority().getScore(),
-            partial->getOwnerId(), partial->getPriority().getState());
+            partial->getOwnerId(), partial->getPriority().getState(), oomScoreOffset);
+}
+
+// ----------------------------------------------------------------------------
+//                  InjectionStatusListener
+// ----------------------------------------------------------------------------
+
+void CameraService::InjectionStatusListener::addListener(
+        const sp<ICameraInjectionCallback>& callback) {
+    Mutex::Autolock lock(mListenerLock);
+    if (mCameraInjectionCallback) return;
+    status_t res = IInterface::asBinder(callback)->linkToDeath(this);
+    if (res == OK) {
+        mCameraInjectionCallback = callback;
+    }
+}
+
+void CameraService::InjectionStatusListener::removeListener() {
+    Mutex::Autolock lock(mListenerLock);
+    if (mCameraInjectionCallback == nullptr) {
+        ALOGW("InjectionStatusListener: mCameraInjectionCallback == nullptr");
+        return;
+    }
+    IInterface::asBinder(mCameraInjectionCallback)->unlinkToDeath(this);
+    mCameraInjectionCallback = nullptr;
+}
+
+void CameraService::InjectionStatusListener::notifyInjectionError(
+        int errorCode) {
+    Mutex::Autolock lock(mListenerLock);
+    if (mCameraInjectionCallback == nullptr) {
+        ALOGW("InjectionStatusListener: mCameraInjectionCallback == nullptr");
+        return;
+    }
+    mCameraInjectionCallback->onInjectionError(errorCode);
+}
+
+void CameraService::InjectionStatusListener::binderDied(
+        const wp<IBinder>& /*who*/) {
+    Mutex::Autolock lock(mListenerLock);
+    ALOGV("InjectionStatusListener: ICameraInjectionCallback has died");
+    auto parent = mParent.promote();
+    if (parent != nullptr) {
+        parent->stopInjectionImpl();
+    }
+}
+
+// ----------------------------------------------------------------------------
+//                  CameraInjectionSession
+// ----------------------------------------------------------------------------
+
+binder::Status CameraService::CameraInjectionSession::stopInjection() {
+    Mutex::Autolock lock(mInjectionSessionLock);
+    auto parent = mParent.promote();
+    if (parent == nullptr) {
+        ALOGE("CameraInjectionSession: Parent is gone");
+        return STATUS_ERROR(ICameraInjectionCallback::ERROR_INJECTION_SERVICE,
+                "Camera service encountered error");
+    }
+    parent->stopInjectionImpl();
+    return binder::Status::ok();
 }
 
 // ----------------------------------------------------------------------------
@@ -3520,6 +3873,28 @@
     return locked;
 }
 
+void CameraService::cacheDump() {
+    if (mMemFd != -1) {
+        const Vector<String16> args;
+        ATRACE_CALL();
+        // Acquiring service lock here will avoid the deadlock since
+        // cacheDump will not be called during the second disconnect.
+        Mutex::Autolock lock(mServiceLock);
+
+        Mutex::Autolock l(mCameraStatesLock);
+        // Start collecting the info for open sessions and store it in temp file.
+        for (const auto& state : mCameraStates) {
+            String8 cameraId = state.first;
+            auto clientDescriptor = mActiveClientManager.get(cameraId);
+            if (clientDescriptor != nullptr) {
+                dprintf(mMemFd, "== Camera device %s dynamic info: ==\n", cameraId.string());
+                // Log the current open session info before device is disconnected.
+                dumpOpenSessionClientLogs(mMemFd, args, cameraId);
+            }
+        }
+    }
+}
+
 status_t CameraService::dump(int fd, const Vector<String16>& args) {
     ATRACE_CALL();
 
@@ -3586,21 +3961,10 @@
 
         auto clientDescriptor = mActiveClientManager.get(cameraId);
         if (clientDescriptor != nullptr) {
-            dprintf(fd, "  Device %s is open. Client instance dump:\n",
-                    cameraId.string());
-            dprintf(fd, "    Client priority score: %d state: %d\n",
-                    clientDescriptor->getPriority().getScore(),
-                    clientDescriptor->getPriority().getState());
-            dprintf(fd, "    Client PID: %d\n", clientDescriptor->getOwnerId());
-
-            auto client = clientDescriptor->getValue();
-            dprintf(fd, "    Client package: %s\n",
-                    String8(client->getPackageName()).string());
-
-            client->dumpClient(fd, args);
+            // log the current open session info
+            dumpOpenSessionClientLogs(fd, args, cameraId);
         } else {
-            dprintf(fd, "  Device %s is closed, no client instance\n",
-                    cameraId.string());
+            dumpClosedSessionClientLogs(fd, cameraId);
         }
 
     }
@@ -3657,9 +4021,55 @@
             }
         }
     }
+
+    bool serviceLocked = tryLock(mServiceLock);
+
+    // Dump info from previous open sessions.
+    // Reposition the offset to beginning of the file before reading
+
+    if ((mMemFd >= 0) && (lseek(mMemFd, 0, SEEK_SET) != -1)) {
+        dprintf(fd, "\n**********Dumpsys from previous open session**********\n");
+        ssize_t size_read;
+        char buf[4096];
+        while ((size_read = read(mMemFd, buf, (sizeof(buf) - 1))) > 0) {
+            // Read data from file to a small buffer and write it to fd.
+            write(fd, buf, size_read);
+            if (size_read == -1) {
+                ALOGE("%s: Error during reading the file: %s", __FUNCTION__, sFileName);
+                break;
+            }
+        }
+        dprintf(fd, "\n**********End of Dumpsys from previous open session**********\n");
+    } else {
+        ALOGE("%s: Error during reading the file: %s", __FUNCTION__, sFileName);
+    }
+
+    if (serviceLocked) mServiceLock.unlock();
     return NO_ERROR;
 }
 
+void CameraService::dumpOpenSessionClientLogs(int fd,
+        const Vector<String16>& args, const String8& cameraId) {
+    auto clientDescriptor = mActiveClientManager.get(cameraId);
+    dprintf(fd, "  Device %s is open. Client instance dump:\n",
+        cameraId.string());
+    dprintf(fd, "    Client priority score: %d state: %d\n",
+        clientDescriptor->getPriority().getScore(),
+        clientDescriptor->getPriority().getState());
+    dprintf(fd, "    Client PID: %d\n", clientDescriptor->getOwnerId());
+
+    auto client = clientDescriptor->getValue();
+    dprintf(fd, "    Client package: %s\n",
+        String8(client->getPackageName()).string());
+
+    client->dumpClient(fd, args);
+}
+
+void CameraService::dumpClosedSessionClientLogs(int fd, const String8& cameraId) {
+    dprintf(fd, "  Device %s is closed, no client instance\n",
+                    cameraId.string());
+}
+
 void CameraService::dumpEventLog(int fd) {
     dprintf(fd, "\n== Camera service events log (most recent at top): ==\n");
 
@@ -3739,21 +4149,14 @@
         ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, cameraId.string());
         return;
     }
-    bool supportsHAL3 = false;
-    // supportsCameraApi also holds mInterfaceMutex, we can't call it in the
-    // HIDL onStatusChanged wrapper call (we'll hold mStatusListenerLock and
-    // mInterfaceMutex together, which can lead to deadlocks)
-    binder::Status sRet =
-            supportsCameraApi(String16(cameraId), hardware::ICameraService::API_VERSION_2,
-                    &supportsHAL3);
-    if (!sRet.isOk()) {
-        ALOGW("%s: Failed to determine if device supports HAL3 %s, supportsCameraApi call failed",
-                __FUNCTION__, cameraId.string());
-        return;
-    }
+
+    // Collect the logical cameras without holding mStatusLock in updateStatus
+    // as that can lead to a deadlock(b/162192331).
+    auto logicalCameraIds = getLogicalCameras(cameraId);
     // Update the status for this camera state, then send the onStatusChangedCallbacks to each
     // of the listeners with both the mStatusLock and mStatusListenerLock held
-    state->updateStatus(status, cameraId, rejectSourceStates, [this, &deviceKind, &supportsHAL3]
+    state->updateStatus(status, cameraId, rejectSourceStates, [this, &deviceKind,
+                        &logicalCameraIds]
             (const String8& cameraId, StatusInternal status) {
 
             if (status != StatusInternal::ENUMERATING) {
@@ -3767,21 +4170,21 @@
                             TorchModeStatus::AVAILABLE_OFF :
                             TorchModeStatus::NOT_AVAILABLE;
                     if (torchStatus != newTorchStatus) {
-                        onTorchStatusChangedLocked(cameraId, newTorchStatus);
+                        onTorchStatusChangedLocked(cameraId, newTorchStatus, deviceKind);
                     }
                 }
             }
 
             Mutex::Autolock lock(mStatusListenerLock);
-
-            notifyPhysicalCameraStatusLocked(mapToInterface(status), cameraId, deviceKind);
+            notifyPhysicalCameraStatusLocked(mapToInterface(status), String16(cameraId),
+                    logicalCameraIds, deviceKind);
 
             for (auto& listener : mListenerList) {
                 bool isVendorListener = listener->isVendorListener();
                 if (shouldSkipStatusUpdates(deviceKind, isVendorListener,
                         listener->getListenerPid(), listener->getListenerUid()) ||
-                        (isVendorListener && !supportsHAL3)) {
-                    ALOGV("Skipping discovery callback for system-only camera/HAL1 device %s",
+                        isVendorListener) {
+                    ALOGV("Skipping discovery callback for system-only camera device %s",
                             cameraId.c_str());
                     continue;
                 }
@@ -3857,14 +4260,6 @@
     onStatusUpdatedLocked(cameraId, status);
 }
 
-void CameraService::updateProxyDeviceState(int newState,
-        const String8& cameraId, int facing, const String16& clientName, int apiLevel) {
-    sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
-    if (proxyBinder == nullptr) return;
-    String16 id(cameraId);
-    proxyBinder->notifyCameraState(id, newState, facing, clientName, apiLevel);
-}
-
 status_t CameraService::getTorchStatusLocked(
         const String8& cameraId,
         TorchModeStatus *status) const {
@@ -3892,8 +4287,9 @@
     return OK;
 }
 
-void CameraService::notifyPhysicalCameraStatusLocked(int32_t status, const String8& cameraId,
-        SystemCameraKind deviceKind) {
+std::list<String16> CameraService::getLogicalCameras(
+        const String8& physicalCameraId) {
+    std::list<String16> retList;
     Mutex::Autolock lock(mCameraStatesLock);
     for (const auto& state : mCameraStates) {
         std::vector<std::string> physicalCameraIds;
@@ -3901,26 +4297,39 @@
             // This is not a logical multi-camera.
             continue;
         }
-        if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(), cameraId.c_str())
+        if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(), physicalCameraId.c_str())
                 == physicalCameraIds.end()) {
             // cameraId is not a physical camera of this logical multi-camera.
             continue;
         }
 
-        String16 id16(state.first), physicalId16(cameraId);
+        retList.emplace_back(String16(state.first));
+    }
+    return retList;
+}
+
+void CameraService::notifyPhysicalCameraStatusLocked(int32_t status,
+        const String16& physicalCameraId, const std::list<String16>& logicalCameraIds,
+        SystemCameraKind deviceKind) {
+    // mStatusListenerLock is expected to be locked
+    for (const auto& logicalCameraId : logicalCameraIds) {
         for (auto& listener : mListenerList) {
+            // Note: we check only the deviceKind of the physical camera id
+            // since, logical camera ids and their physical camera ids are
+            // guaranteed to have the same system camera kind.
             if (shouldSkipStatusUpdates(deviceKind, listener->isVendorListener(),
                     listener->getListenerPid(), listener->getListenerUid())) {
                 ALOGV("Skipping discovery callback for system-only camera device %s",
-                        cameraId.c_str());
+                        String8(physicalCameraId).c_str());
                 continue;
             }
             listener->getListener()->onPhysicalCameraStatusChanged(status,
-                    id16, physicalId16);
+                    logicalCameraId, physicalCameraId);
         }
     }
 }
 
+
 void CameraService::blockClientsForUid(uid_t uid) {
     const auto clients = mActiveClientManager.getAll();
     for (auto& current : clients) {
@@ -3963,6 +4372,12 @@
         return handleSetRotateAndCrop(args);
     } else if (args.size() >= 1 && args[0] == String16("get-rotate-and-crop")) {
         return handleGetRotateAndCrop(out);
+    } else if (args.size() >= 2 && args[0] == String16("set-image-dump-mask")) {
+        return handleSetImageDumpMask(args);
+    } else if (args.size() >= 1 && args[0] == String16("get-image-dump-mask")) {
+        return handleGetImageDumpMask(out);
+    } else if (args.size() >= 2 && args[0] == String16("set-camera-mute")) {
+        return handleSetCameraMute(args);
     } else if (args.size() == 1 && args[0] == String16("help")) {
         printHelp(out);
         return NO_ERROR;
@@ -4062,6 +4477,53 @@
     return dprintf(out, "rotateAndCrop override: %d\n", mOverrideRotateAndCropMode);
 }
 
+status_t CameraService::handleSetImageDumpMask(const Vector<String16>& args) {
+    char *endPtr;
+    errno = 0;
+    String8 maskString8 = String8(args[1]);
+    long maskValue = strtol(maskString8.c_str(), &endPtr, 10);
+
+    if (errno != 0) return BAD_VALUE;
+    if (endPtr != maskString8.c_str() + maskString8.size()) return BAD_VALUE;
+    if (maskValue < 0 || maskValue > 1) return BAD_VALUE;
+
+    Mutex::Autolock lock(mServiceLock);
+
+    mImageDumpMask = maskValue;
+
+    return OK;
+}
+
+status_t CameraService::handleGetImageDumpMask(int out) {
+    Mutex::Autolock lock(mServiceLock);
+
+    return dprintf(out, "Image dump mask: %d\n", mImageDumpMask);
+}
+
+status_t CameraService::handleSetCameraMute(const Vector<String16>& args) {
+    int muteValue = strtol(String8(args[1]), nullptr, 10);
+    if (errno != 0) return BAD_VALUE;
+
+    if (muteValue < 0 || muteValue > 1) return BAD_VALUE;
+    Mutex::Autolock lock(mServiceLock);
+
+    mOverrideCameraMuteMode = (muteValue == 1);
+
+    const auto clients = mActiveClientManager.getAll();
+    for (auto& current : clients) {
+        if (current != nullptr) {
+            const auto basicClient = current->getValue();
+            if (basicClient.get() != nullptr) {
+                if (basicClient->supportsCameraMute()) {
+                    basicClient->setCameraMute(mOverrideCameraMuteMode);
+                }
+            }
+        }
+    }
+
+    return OK;
+}
+
 status_t CameraService::printHelp(int out) {
     return dprintf(out, "Camera service commands:\n"
         "  get-uid-state <PACKAGE> [--user USER_ID] gets the uid state\n"
@@ -4070,6 +4532,10 @@
         "  set-rotate-and-crop <ROTATION> overrides the rotate-and-crop value for AUTO backcompat\n"
         "      Valid values 0=0 deg, 1=90 deg, 2=180 deg, 3=270 deg, 4=No override\n"
         "  get-rotate-and-crop returns the current override rotate-and-crop value\n"
+        "  set-image-dump-mask <MASK> specifies the formats to be saved to disk\n"
+        "      Valid values 0=OFF, 1=ON for JPEG\n"
+        "  get-image-dump-mask returns the current image-dump-mask value\n"
+        "  set-camera-mute <0/1> enable or disable camera muting\n"
         "  help print this message\n");
 }
 
@@ -4094,4 +4560,10 @@
     return mode;
 }
 
+void CameraService::stopInjectionImpl() {
+    mInjectionStatusListener->removeListener();
+
+    // TODO: Implement the stop injection function.
+}
+
 }; // namespace android
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 4321201..9021170 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -20,7 +20,8 @@
 #include <android/hardware/BnCameraService.h>
 #include <android/hardware/BnSensorPrivacyListener.h>
 #include <android/hardware/ICameraServiceListener.h>
-#include <android/hardware/ICameraServiceProxy.h>
+#include <android/hardware/camera2/BnCameraInjectionSession.h>
+#include <android/hardware/camera2/ICameraInjectionCallback.h>
 
 #include <cutils/multiuser.h>
 #include <utils/Vector.h>
@@ -49,8 +50,10 @@
 
 #include <set>
 #include <string>
+#include <list>
 #include <map>
 #include <memory>
+#include <optional>
 #include <utility>
 #include <unordered_map>
 #include <unordered_set>
@@ -69,7 +72,6 @@
     public virtual CameraProviderManager::StatusListener
 {
     friend class BinderService<CameraService>;
-    friend class CameraClient;
     friend class CameraOfflineSessionClient;
 public:
     class Client;
@@ -119,7 +121,7 @@
     virtual binder::Status     getCameraInfo(int cameraId,
             hardware::CameraInfo* cameraInfo);
     virtual binder::Status     getCameraCharacteristics(const String16& cameraId,
-            CameraMetadata* cameraInfo);
+            int targetSdkVersion, CameraMetadata* cameraInfo);
     virtual binder::Status     getCameraVendorTagDescriptor(
             /*out*/
             hardware::camera2::params::VendorTagDescriptor* desc);
@@ -129,20 +131,14 @@
 
     virtual binder::Status     connect(const sp<hardware::ICameraClient>& cameraClient,
             int32_t cameraId, const String16& clientPackageName,
-            int32_t clientUid, int clientPid,
-            /*out*/
-            sp<hardware::ICamera>* device);
-
-    virtual binder::Status     connectLegacy(const sp<hardware::ICameraClient>& cameraClient,
-            int32_t cameraId, int32_t halVersion,
-            const String16& clientPackageName, int32_t clientUid,
+            int32_t clientUid, int clientPid, int targetSdkVersion,
             /*out*/
             sp<hardware::ICamera>* device);
 
     virtual binder::Status     connectDevice(
             const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb, const String16& cameraId,
-            const String16& clientPackageName, const std::unique_ptr<String16>& clientFeatureId,
-            int32_t clientUid,
+            const String16& clientPackageName, const std::optional<String16>& clientFeatureId,
+            int32_t clientUid, int scoreOffset, int targetSdkVersion,
             /*out*/
             sp<hardware::camera2::ICameraDeviceUser>* device);
 
@@ -158,7 +154,7 @@
 
     virtual binder::Status isConcurrentSessionConfigurationSupported(
         const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>& sessions,
-        /*out*/bool* supported);
+        int targetSdkVersion, /*out*/bool* supported);
 
     virtual binder::Status    getLegacyParameters(
             int32_t cameraId,
@@ -173,6 +169,8 @@
 
     virtual binder::Status    notifyDeviceStateChange(int64_t newState);
 
+    virtual binder::Status    notifyDisplayConfigurationChange();
+
     // OK = supports api of that version, -EOPNOTSUPP = does not support
     virtual binder::Status    supportsCameraApi(
             const String16& cameraId, int32_t apiVersion,
@@ -184,6 +182,13 @@
             /*out*/
             bool *isSupported);
 
+    virtual binder::Status injectCamera(
+            const String16& packageName, const String16& internalCamId,
+            const String16& externalCamId,
+            const sp<hardware::camera2::ICameraInjectionCallback>& callback,
+            /*out*/
+            sp<hardware::camera2::ICameraInjectionSession>* cameraInjectionSession);
+
     // Extra permissions checks
     virtual status_t    onTransact(uint32_t code, const Parcel& data,
                                    Parcel* reply, uint32_t flags);
@@ -194,11 +199,15 @@
 
     binder::Status      addListenerHelper(const sp<hardware::ICameraServiceListener>& listener,
             /*out*/
-            std::vector<hardware::CameraStatus>* cameraStatuses, bool isVendor = false);
+            std::vector<hardware::CameraStatus>* cameraStatuses, bool isVendor = false,
+            bool isProcessLocalTest = false);
 
     // Monitored UIDs availability notification
     void                notifyMonitoredUids();
 
+    // Stores current open session device info in temp file.
+    void cacheDump();
+
     // Register an offline client for a given active camera id
     status_t addOfflineClient(String8 cameraId, sp<BasicClient> offlineClient);
 
@@ -216,20 +225,24 @@
     void                loadSoundLocked(sound_kind kind);
     void                decreaseSoundRef();
     void                increaseSoundRef();
-    /**
-     * Update the state of a given camera device (open/close/active/idle) with
-     * the camera proxy service in the system service
-     */
-    static void         updateProxyDeviceState(
-            int newState,
-            const String8& cameraId,
-            int facing,
-            const String16& clientName,
-            int apiLevel);
 
     /////////////////////////////////////////////////////////////////////
     // CameraDeviceFactory functionality
-    int                 getDeviceVersion(const String8& cameraId, int* facing = NULL);
+    int                 getDeviceVersion(const String8& cameraId, int* facing = nullptr,
+            int* orientation = nullptr);
+
+    /////////////////////////////////////////////////////////////////////
+    // Methods to be used in CameraService class tests only
+    //
+    // CameraService class test method only - clear static variables in the
+    // cameraserver process, which otherwise might affect multiple test runs.
+    void                clearCachedVariables();
+
+    // Add test listener, linkToDeath won't be called since this is for process
+    // local testing.
+    binder::Status    addListenerTest(const sp<hardware::ICameraServiceListener>& listener,
+            /*out*/
+            std::vector<hardware::CameraStatus>* cameraStatuses);
 
     /////////////////////////////////////////////////////////////////////
     // Shared utilities
@@ -239,6 +252,7 @@
     // CameraClient functionality
 
     class BasicClient : public virtual RefBase {
+    friend class CameraService;
     public:
         virtual status_t       initialize(sp<CameraProviderManager> manager,
                 const String8& monitorTags) = 0;
@@ -261,6 +275,12 @@
         // Return the package name for this client
         virtual String16 getPackageName() const;
 
+        // Return the camera facing for this client
+        virtual int getCameraFacing() const;
+
+        // Return the camera orientation for this client
+        virtual int getCameraOrientation() const;
+
         // Notify client about a fatal error
         virtual void notifyError(int32_t errorCode,
                 const CaptureResultExtras& resultExtras) = 0;
@@ -294,13 +314,20 @@
         // Override rotate-and-crop AUTO behavior
         virtual status_t setRotateAndCropOverride(uint8_t rotateAndCrop) = 0;
 
+        // Whether the client supports camera muting (black only output)
+        virtual bool supportsCameraMute() = 0;
+
+        // Set/reset camera mute
+        virtual status_t setCameraMute(bool enabled) = 0;
+
     protected:
         BasicClient(const sp<CameraService>& cameraService,
                 const sp<IBinder>& remoteCallback,
                 const String16& clientPackageName,
-                const std::unique_ptr<String16>& clientFeatureId,
+                const std::optional<String16>& clientFeatureId,
                 const String8& cameraIdStr,
                 int cameraFacing,
+                int sensorOrientation,
                 int clientPid,
                 uid_t clientUid,
                 int servicePid);
@@ -317,8 +344,9 @@
         static sp<CameraService>        sCameraService;
         const String8                   mCameraIdStr;
         const int                       mCameraFacing;
+        const int                       mOrientation;
         String16                        mClientPackageName;
-        std::unique_ptr<String16>       mClientFeatureId;
+        std::optional<String16>         mClientFeatureId;
         pid_t                           mClientPid;
         const uid_t                     mClientUid;
         const pid_t                     mServicePid;
@@ -331,9 +359,18 @@
         // - The app-side Binder interface to receive callbacks from us
         sp<IBinder>                     mRemoteBinder;   // immutable after constructor
 
-        // permissions management
+        // Permissions management methods for camera lifecycle
+
+        // Notify rest of system/apps about camera opening, and check appops
         virtual status_t                startCameraOps();
+        // Notify rest of system/apps about camera starting to stream data, and confirm appops
+        virtual status_t                startCameraStreamingOps();
+        // Notify rest of system/apps about camera stopping streaming data
+        virtual status_t                finishCameraStreamingOps();
+        // Notify rest of system/apps about camera closing
         virtual status_t                finishCameraOps();
+        // Handle errors for start/checkOps
+        virtual status_t                handleAppOpMode(int32_t mode);
 
         std::unique_ptr<AppOpsManager>  mAppOpsManager = nullptr;
 
@@ -348,9 +385,12 @@
         }; // class OpsCallback
 
         sp<OpsCallback> mOpsCallback;
-        // Track whether startCameraOps was called successfully, to avoid
-        // finishing what we didn't start.
+        // Track whether checkOps was called successfully, to avoid
+        // finishing what we didn't start, on camera open.
         bool            mOpsActive;
+        // Track whether startOps was called successfully on start of
+        // camera streaming.
+        bool            mOpsStreaming;
 
         // IAppOpsCallback interface, indirected through opListener
         virtual void opChanged(int32_t op, const String16& packageName);
@@ -390,10 +430,11 @@
         Client(const sp<CameraService>& cameraService,
                 const sp<hardware::ICameraClient>& cameraClient,
                 const String16& clientPackageName,
-                const std::unique_ptr<String16>& clientFeatureId,
+                const std::optional<String16>& clientFeatureId,
                 const String8& cameraIdStr,
                 int api1CameraId,
                 int cameraFacing,
+                int sensorOrientation,
                 int clientPid,
                 uid_t clientUid,
                 int servicePid);
@@ -414,6 +455,8 @@
         // Check what API level is used for this client. This is used to determine which
         // superclass this can be cast to.
         virtual bool canCastToApiClient(apiLevel level) const;
+
+        void setImageDumpMask(int /*mask*/) { }
     protected:
         // Initialized in constructor
 
@@ -470,14 +513,14 @@
          */
         static DescriptorPtr makeClientDescriptor(const String8& key, const sp<BasicClient>& value,
                 int32_t cost, const std::set<String8>& conflictingKeys, int32_t score,
-                int32_t ownerId, int32_t state);
+                int32_t ownerId, int32_t state, int oomScoreOffset);
 
         /**
          * Make a ClientDescriptor object wrapping the given BasicClient strong pointer with
          * values intialized from a prior ClientDescriptor.
          */
         static DescriptorPtr makeClientDescriptor(const sp<BasicClient>& value,
-                const CameraService::DescriptorPtr& partial);
+                const CameraService::DescriptorPtr& partial, int oomScoreOffset);
 
     }; // class CameraClientManager
 
@@ -651,6 +694,7 @@
             void unregisterSelf();
 
             bool isSensorPrivacyEnabled();
+            bool isCameraPrivacyEnabled(userid_t userId);
 
             binder::Status onSensorPrivacyChanged(bool enabled);
 
@@ -663,6 +707,8 @@
             Mutex mSensorPrivacyLock;
             bool mSensorPrivacyEnabled;
             bool mRegistered;
+
+            bool hasCameraPrivacyFeature();
     };
 
     sp<UidPolicy> mUidPolicy;
@@ -696,6 +742,7 @@
     // Only call with with mServiceLock held.
     status_t handleEvictionsLocked(const String8& cameraId, int clientPid,
         apiLevel effectiveApiLevel, const sp<IBinder>& remoteCallback, const String8& packageName,
+        int scoreOffset,
         /*out*/
         sp<BasicClient>* client,
         std::shared_ptr<resource_policy::ClientDescriptor<String8, sp<BasicClient>>>* partial);
@@ -727,9 +774,10 @@
     // Single implementation shared between the various connect calls
     template<class CALLBACK, class CLIENT>
     binder::Status connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
-            int api1CameraId, int halVersion, const String16& clientPackageName,
-            const std::unique_ptr<String16>& clientFeatureId, int clientUid, int clientPid,
-            apiLevel effectiveApiLevel, bool shimUpdateOnly, /*out*/sp<CLIENT>& device);
+            int api1CameraId, const String16& clientPackageName,
+            const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
+            apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
+            /*out*/sp<CLIENT>& device);
 
     // Lock guarding camera service state
     Mutex               mServiceLock;
@@ -743,6 +791,12 @@
     // Container for managing currently active application-layer clients
     CameraClientManager mActiveClientManager;
 
+    // Adds client logs during open session to the file pointed by fd.
+    void dumpOpenSessionClientLogs(int fd, const Vector<String16>& args, const String8& cameraId);
+
+    // Adds client logs during closed session to the file pointed by fd.
+    void dumpClosedSessionClientLogs(int fd, const String8& cameraId);
+
     // Mapping from camera ID -> state for each device, map is protected by mCameraStatesLock
     std::map<String8, std::shared_ptr<CameraState>> mCameraStates;
 
@@ -788,7 +842,8 @@
      *
      * This method must be called with mServiceLock held.
      */
-    void finishConnectLocked(const sp<BasicClient>& client, const DescriptorPtr& desc);
+    void finishConnectLocked(const sp<BasicClient>& client, const DescriptorPtr& desc,
+            int oomScoreOffset);
 
     /**
      * Returns the underlying camera Id string mapped to a camera id int
@@ -889,6 +944,16 @@
      */
     void updateCameraNumAndIds();
 
+    /**
+     * Filter camera characteristics for S Performance class primary cameras.
+     * mServiceLock should be locked.
+     */
+    void filterSPerfClassCharacteristicsLocked();
+
+    // File descriptor to temp file used for caching previous open
+    // session dumpsys info.
+    int mMemFd;
+
     // Number of camera devices (excluding hidden secure cameras)
     int                 mNumberOfCameras;
     // Number of camera devices (excluding hidden secure cameras and
@@ -897,6 +962,7 @@
 
     std::vector<std::string> mNormalDeviceIds;
     std::vector<std::string> mNormalDeviceIdsWithoutSystemCamera;
+    std::set<std::string> mPerfClassPrimaryCameraIds;
 
     // sounds
     sp<MediaPlayer>     newMediaPlayer(const char *file);
@@ -918,7 +984,10 @@
                       mIsVendorListener(isVendorClient),
                       mOpenCloseCallbackAllowed(openCloseCallbackAllowed) { }
 
-            status_t initialize() {
+            status_t initialize(bool isProcessLocalTest) {
+                if (isProcessLocalTest) {
+                    return OK;
+                }
                 return IInterface::asBinder(mListener)->linkToDeath(this);
             }
 
@@ -994,7 +1063,8 @@
     // handle torch mode status change and invoke callbacks. mTorchStatusMutex
     // should be locked.
     void onTorchStatusChangedLocked(const String8& cameraId,
-            hardware::camera::common::V1_0::TorchModeStatus newStatus);
+            hardware::camera::common::V1_0::TorchModeStatus newStatus,
+            SystemCameraKind systemCameraKind);
 
     // get a camera's torch status. mTorchStatusMutex should be locked.
     status_t getTorchStatusLocked(const String8 &cameraId,
@@ -1005,8 +1075,13 @@
             hardware::camera::common::V1_0::TorchModeStatus status);
 
     // notify physical camera status when the physical camera is public.
-    void notifyPhysicalCameraStatusLocked(int32_t status, const String8& cameraId,
-            SystemCameraKind deviceKind);
+    // Expects mStatusListenerLock to be locked.
+    void notifyPhysicalCameraStatusLocked(int32_t status, const String16& physicalCameraId,
+            const std::list<String16>& logicalCameraIds, SystemCameraKind deviceKind);
+
+    // get list of logical cameras which are backed by physicalCameraId
+    std::list<String16> getLogicalCameras(const String8& physicalCameraId);
+
 
     // IBinder::DeathRecipient implementation
     virtual void        binderDied(const wp<IBinder> &who);
@@ -1047,6 +1122,15 @@
     // Get the rotate-and-crop AUTO override behavior
     status_t handleGetRotateAndCrop(int out);
 
+    // Set the mask for image dump to disk
+    status_t handleSetImageDumpMask(const Vector<String16>& args);
+
+    // Get the mask for image dump to disk
+    status_t handleGetImageDumpMask(int out);
+
+    // Set the camera mute state
+    status_t handleSetCameraMute(const Vector<String16>& args);
+
     // Prints the shell command help
     status_t printHelp(int out);
 
@@ -1057,9 +1141,9 @@
 
     static binder::Status makeClient(const sp<CameraService>& cameraService,
             const sp<IInterface>& cameraCb, const String16& packageName,
-            const std::unique_ptr<String16>& featureId, const String8& cameraId, int api1CameraId,
-            int facing, int clientPid, uid_t clientUid, int servicePid, int halVersion,
-            int deviceVersion, apiLevel effectiveApiLevel,
+            const std::optional<String16>& featureId, const String8& cameraId, int api1CameraId,
+            int facing, int sensorOrientation, int clientPid, uid_t clientUid, int servicePid,
+            int deviceVersion, apiLevel effectiveApiLevel, bool overrideForPerfClass,
             /*out*/sp<BasicClient>* client);
 
     status_t checkCameraAccess(const String16& opPackageName);
@@ -1069,16 +1153,10 @@
     static StatusInternal mapToInternal(hardware::camera::common::V1_0::CameraDeviceStatus status);
     static int32_t mapToInterface(StatusInternal status);
 
-    // Guard mCameraServiceProxy
-    static Mutex sProxyMutex;
-    // Cached interface to the camera service proxy in system service
-    static sp<hardware::ICameraServiceProxy> sCameraServiceProxy;
-
-    static sp<hardware::ICameraServiceProxy> getCameraServiceProxy();
-    static void pingCameraServiceProxy();
 
     void broadcastTorchModeStatus(const String8& cameraId,
-            hardware::camera::common::V1_0::TorchModeStatus status);
+            hardware::camera::common::V1_0::TorchModeStatus status,
+            SystemCameraKind systemCameraKind);
 
     void disconnectClient(const String8& id, sp<BasicClient> clientToDisconnect);
 
@@ -1093,8 +1171,54 @@
     // Aggreated audio restriction mode for all camera clients
     int32_t mAudioRestriction;
 
-    // Current override rotate-and-crop mode
+    // Current override cmd rotate-and-crop mode; AUTO means no override
     uint8_t mOverrideRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
+
+    // Current image dump mask
+    uint8_t mImageDumpMask = 0;
+
+    // Current camera mute mode
+    bool mOverrideCameraMuteMode = false;
+
+    /**
+     * A listener class that implements the IBinder::DeathRecipient interface
+     * for use to call back the error state injected by the external camera, and
+     * camera service can kill the injection when binder signals process death.
+     */
+    class InjectionStatusListener : public virtual IBinder::DeathRecipient {
+        public:
+            InjectionStatusListener(sp<CameraService> parent) : mParent(parent) {}
+
+            void addListener(const sp<hardware::camera2::ICameraInjectionCallback>& callback);
+            void removeListener();
+            void notifyInjectionError(int errorCode);
+
+            // IBinder::DeathRecipient implementation
+            virtual void binderDied(const wp<IBinder>& who);
+
+        private:
+            Mutex mListenerLock;
+            wp<CameraService> mParent;
+            sp<hardware::camera2::ICameraInjectionCallback> mCameraInjectionCallback;
+    };
+
+    sp<InjectionStatusListener> mInjectionStatusListener;
+
+    /**
+     * A class that implements the hardware::camera2::BnCameraInjectionSession interface
+     */
+    class CameraInjectionSession : public hardware::camera2::BnCameraInjectionSession {
+        public:
+            CameraInjectionSession(sp<CameraService> parent) : mParent(parent) {}
+            virtual ~CameraInjectionSession() {}
+            binder::Status stopInjection() override;
+
+        private:
+            Mutex mInjectionSessionLock;
+            wp<CameraService> mParent;
+    };
+
+    void stopInjectionImpl();
 };
 
 } // namespace android
diff --git a/services/camera/libcameraservice/TEST_MAPPING b/services/camera/libcameraservice/TEST_MAPPING
index 6fdac68..ca6cc58 100644
--- a/services/camera/libcameraservice/TEST_MAPPING
+++ b/services/camera/libcameraservice/TEST_MAPPING
@@ -1,7 +1,12 @@
 {
   "presubmit": [
     {
-       "name": "cameraservice_test"
+      "name": "cameraservice_test"
+    }
+  ],
+  "imports": [
+    {
+      "path": "frameworks/av/camera"
     }
   ]
 }
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index b043c0b..944b8ab 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -34,6 +34,7 @@
 #include "api1/client2/CallbackProcessor.h"
 #include "api1/client2/ZslProcessor.h"
 #include "utils/CameraThreadState.h"
+#include "utils/CameraServiceProxyWrapper.h"
 
 #define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
 #define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
@@ -50,16 +51,18 @@
 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
         const sp<hardware::ICameraClient>& cameraClient,
         const String16& clientPackageName,
-        const std::unique_ptr<String16>& clientFeatureId,
+        const std::optional<String16>& clientFeatureId,
         const String8& cameraDeviceId,
         int api1CameraId,
         int cameraFacing,
+        int sensorOrientation,
         int clientPid,
         uid_t clientUid,
-        int servicePid):
+        int servicePid,
+        bool overrideForPerfClass):
         Camera2ClientBase(cameraService, cameraClient, clientPackageName, clientFeatureId,
-                cameraDeviceId, api1CameraId, cameraFacing,
-                clientPid, clientUid, servicePid),
+                cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation,
+                clientPid, clientUid, servicePid, overrideForPerfClass),
         mParameters(api1CameraId, cameraFacing)
 {
     ATRACE_CALL();
@@ -75,7 +78,8 @@
 bool Camera2Client::isZslEnabledInStillTemplate() {
     bool zslEnabled = false;
     CameraMetadata stillTemplate;
-    status_t res = mDevice->createDefaultRequest(CAMERA2_TEMPLATE_STILL_CAPTURE, &stillTemplate);
+    status_t res = mDevice->createDefaultRequest(
+            camera_request_template_t::CAMERA_TEMPLATE_STILL_CAPTURE, &stillTemplate);
     if (res == OK) {
         camera_metadata_entry_t enableZsl = stillTemplate.find(ANDROID_CONTROL_ENABLE_ZSL);
         if (enableZsl.count == 1) {
@@ -396,6 +400,7 @@
 
 binder::Status Camera2Client::disconnect() {
     ATRACE_CALL();
+    nsecs_t startTime = systemTime();
     Mutex::Autolock icl(mBinderSerializationLock);
 
     binder::Status res = binder::Status::ok();
@@ -457,6 +462,9 @@
 
     CameraService::Client::disconnect();
 
+    int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
+    CameraServiceProxyWrapper::logClose(mCameraIdStr, closeLatencyMs);
+
     return res;
 }
 
@@ -929,6 +937,7 @@
         return res;
     }
 
+    mCallbackProcessor->unpauseCallback();
     params.state = Parameters::PREVIEW;
     return OK;
 }
@@ -963,6 +972,7 @@
             FALLTHROUGH_INTENDED;
         case Parameters::RECORD:
         case Parameters::PREVIEW:
+            mCallbackProcessor->pauseCallback();
             syncWithDevice();
             // Due to flush a camera device sync is not a sufficient
             // guarantee that the current client parameters are
@@ -2290,6 +2300,14 @@
         static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop));
 }
 
+bool Camera2Client::supportsCameraMute() {
+    return mDevice->supportsCameraMute();
+}
+
+status_t Camera2Client::setCameraMute(bool enabled) {
+    return mDevice->setCameraMute(enabled);
+}
+
 status_t Camera2Client::waitUntilCurrentRequestIdLocked() {
     int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
     if (activeRequestId != 0) {
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 03ca44a..64ab8ff 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -87,6 +87,9 @@
     virtual int32_t         getGlobalAudioRestriction();
     virtual status_t        setRotateAndCropOverride(uint8_t rotateAndCrop);
 
+    virtual bool            supportsCameraMute();
+    virtual status_t        setCameraMute(bool enabled);
+
     /**
      * Interface used by CameraService
      */
@@ -94,13 +97,15 @@
     Camera2Client(const sp<CameraService>& cameraService,
             const sp<hardware::ICameraClient>& cameraClient,
             const String16& clientPackageName,
-            const std::unique_ptr<String16>& clientFeatureId,
+            const std::optional<String16>& clientFeatureId,
             const String8& cameraDeviceId,
             int api1CameraId,
             int cameraFacing,
+            int sensorOrientation,
             int clientPid,
             uid_t clientUid,
-            int servicePid);
+            int servicePid,
+            bool overrideForPerfClass);
 
     virtual ~Camera2Client();
 
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
deleted file mode 100644
index 892996c..0000000
--- a/services/camera/libcameraservice/api1/CameraClient.cpp
+++ /dev/null
@@ -1,1208 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "CameraClient"
-//#define LOG_NDEBUG 0
-
-#include <cutils/atomic.h>
-#include <cutils/properties.h>
-#include <gui/Surface.h>
-#include <media/hardware/HardwareAPI.h>
-
-#include "api1/CameraClient.h"
-#include "device1/CameraHardwareInterface.h"
-#include "CameraService.h"
-#include "utils/CameraThreadState.h"
-
-namespace android {
-
-#define LOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
-#define LOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
-
-CameraClient::CameraClient(const sp<CameraService>& cameraService,
-        const sp<hardware::ICameraClient>& cameraClient,
-        const String16& clientPackageName, const std::unique_ptr<String16>& clientFeatureId,
-        int cameraId, int cameraFacing,
-        int clientPid, int clientUid,
-        int servicePid):
-        Client(cameraService, cameraClient, clientPackageName, clientFeatureId,
-                String8::format("%d", cameraId), cameraId, cameraFacing, clientPid,
-                clientUid, servicePid)
-{
-    int callingPid = CameraThreadState::getCallingPid();
-    LOG1("CameraClient::CameraClient E (pid %d, id %d)", callingPid, cameraId);
-
-    mHardware = NULL;
-    mMsgEnabled = 0;
-    mSurface = 0;
-    mPreviewWindow = 0;
-    mDestructionStarted = false;
-
-    // Callback is disabled by default
-    mPreviewCallbackFlag = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
-    mOrientation = getOrientation(0, mCameraFacing == CAMERA_FACING_FRONT);
-    mPlayShutterSound = true;
-    LOG1("CameraClient::CameraClient X (pid %d, id %d)", callingPid, cameraId);
-}
-
-status_t CameraClient::initialize(sp<CameraProviderManager> manager,
-        const String8& /*monitorTags*/) {
-    int callingPid = CameraThreadState::getCallingPid();
-    status_t res;
-
-    LOG1("CameraClient::initialize E (pid %d, id %d)", callingPid, mCameraId);
-
-    // Verify ops permissions
-    res = startCameraOps();
-    if (res != OK) {
-        return res;
-    }
-
-    char camera_device_name[10];
-    snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId);
-
-    mHardware = new CameraHardwareInterface(camera_device_name);
-    res = mHardware->initialize(manager);
-    if (res != OK) {
-        ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
-                __FUNCTION__, mCameraId, strerror(-res), res);
-        mHardware.clear();
-        return res;
-    }
-
-    mHardware->setCallbacks(notifyCallback,
-            dataCallback,
-            dataCallbackTimestamp,
-            handleCallbackTimestampBatch,
-            (void *)(uintptr_t)mCameraId);
-
-    // Enable zoom, error, focus, and metadata messages by default
-    enableMsgType(CAMERA_MSG_ERROR | CAMERA_MSG_ZOOM | CAMERA_MSG_FOCUS |
-                  CAMERA_MSG_PREVIEW_METADATA | CAMERA_MSG_FOCUS_MOVE);
-
-    LOG1("CameraClient::initialize X (pid %d, id %d)", callingPid, mCameraId);
-    return OK;
-}
-
-
-// tear down the client
-CameraClient::~CameraClient() {
-    mDestructionStarted = true;
-    int callingPid = CameraThreadState::getCallingPid();
-    LOG1("CameraClient::~CameraClient E (pid %d, this %p)", callingPid, this);
-
-    disconnect();
-    LOG1("CameraClient::~CameraClient X (pid %d, this %p)", callingPid, this);
-}
-
-status_t CameraClient::dump(int fd, const Vector<String16>& args) {
-    return BasicClient::dump(fd, args);
-}
-
-status_t CameraClient::dumpClient(int fd, const Vector<String16>& args) {
-    const size_t SIZE = 256;
-    char buffer[SIZE];
-
-    size_t len = snprintf(buffer, SIZE, "Client[%d] (%p) with UID %d\n",
-            mCameraId,
-            (getRemoteCallback() != NULL ?
-                    IInterface::asBinder(getRemoteCallback()).get() : NULL),
-            mClientUid);
-    len = (len > SIZE - 1) ? SIZE - 1 : len;
-    write(fd, buffer, len);
-
-    len = snprintf(buffer, SIZE, "Latest set parameters:\n");
-    len = (len > SIZE - 1) ? SIZE - 1 : len;
-    write(fd, buffer, len);
-
-    mLatestSetParameters.dump(fd, args);
-
-    const char *enddump = "\n\n";
-    write(fd, enddump, strlen(enddump));
-
-    sp<CameraHardwareInterface> hardware = mHardware;
-    if (hardware != nullptr) {
-        return hardware->dump(fd, args);
-    }
-    ALOGI("%s: camera device closed already, skip dumping", __FUNCTION__);
-    return OK;
-}
-
-// ----------------------------------------------------------------------------
-
-status_t CameraClient::checkPid() const {
-    int callingPid = CameraThreadState::getCallingPid();
-    if (callingPid == mClientPid) return NO_ERROR;
-
-    ALOGW("attempt to use a locked camera from a different process"
-         " (old pid %d, new pid %d)", mClientPid, callingPid);
-    return EBUSY;
-}
-
-status_t CameraClient::checkPidAndHardware() const {
-    if (mHardware == 0) {
-        ALOGE("attempt to use a camera after disconnect() (pid %d)",
-              CameraThreadState::getCallingPid());
-        return INVALID_OPERATION;
-    }
-    status_t result = checkPid();
-    if (result != NO_ERROR) return result;
-    return NO_ERROR;
-}
-
-status_t CameraClient::lock() {
-    int callingPid = CameraThreadState::getCallingPid();
-    LOG1("lock (pid %d)", callingPid);
-    Mutex::Autolock lock(mLock);
-
-    // lock camera to this client if the the camera is unlocked
-    if (mClientPid == 0) {
-        mClientPid = callingPid;
-        return NO_ERROR;
-    }
-
-    // returns NO_ERROR if the client already owns the camera, EBUSY otherwise
-    return checkPid();
-}
-
-status_t CameraClient::unlock() {
-    int callingPid = CameraThreadState::getCallingPid();
-    LOG1("unlock (pid %d)", callingPid);
-    Mutex::Autolock lock(mLock);
-
-    // allow anyone to use camera (after they lock the camera)
-    status_t result = checkPid();
-    if (result == NO_ERROR) {
-        if (mHardware->recordingEnabled()) {
-            ALOGE("Not allowed to unlock camera during recording.");
-            return INVALID_OPERATION;
-        }
-        mClientPid = 0;
-        LOG1("clear mRemoteCallback (pid %d)", callingPid);
-        // we need to remove the reference to ICameraClient so that when the app
-        // goes away, the reference count goes to 0.
-        mRemoteCallback.clear();
-    }
-    return result;
-}
-
-// connect a new client to the camera
-status_t CameraClient::connect(const sp<hardware::ICameraClient>& client) {
-    int callingPid = CameraThreadState::getCallingPid();
-    LOG1("connect E (pid %d)", callingPid);
-    Mutex::Autolock lock(mLock);
-
-    if (mClientPid != 0 && checkPid() != NO_ERROR) {
-        ALOGW("Tried to connect to a locked camera (old pid %d, new pid %d)",
-                mClientPid, callingPid);
-        return EBUSY;
-    }
-
-    if (mRemoteCallback != 0 &&
-        (IInterface::asBinder(client) == IInterface::asBinder(mRemoteCallback))) {
-        LOG1("Connect to the same client");
-        return NO_ERROR;
-    }
-
-    mPreviewCallbackFlag = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
-    mClientPid = callingPid;
-    mRemoteCallback = client;
-
-    LOG1("connect X (pid %d)", callingPid);
-    return NO_ERROR;
-}
-
-static void disconnectWindow(const sp<ANativeWindow>& window) {
-    if (window != 0) {
-        status_t result = native_window_api_disconnect(window.get(),
-                NATIVE_WINDOW_API_CAMERA);
-        if (result != NO_ERROR) {
-            ALOGW("native_window_api_disconnect failed: %s (%d)", strerror(-result),
-                    result);
-        }
-    }
-}
-
-binder::Status CameraClient::disconnect() {
-    int callingPid = CameraThreadState::getCallingPid();
-    LOG1("disconnect E (pid %d)", callingPid);
-    Mutex::Autolock lock(mLock);
-
-    binder::Status res = binder::Status::ok();
-    // Allow both client and the cameraserver to disconnect at all times
-    if (callingPid != mClientPid && callingPid != mServicePid) {
-        ALOGW("different client - don't disconnect");
-        return res;
-    }
-
-    // Make sure disconnect() is done once and once only, whether it is called
-    // from the user directly, or called by the destructor.
-    if (mHardware == 0) return res;
-
-    LOG1("hardware teardown");
-    // Before destroying mHardware, we must make sure it's in the
-    // idle state.
-    // Turn off all messages.
-    disableMsgType(CAMERA_MSG_ALL_MSGS);
-    mHardware->stopPreview();
-    sCameraService->updateProxyDeviceState(
-            hardware::ICameraServiceProxy::CAMERA_STATE_IDLE,
-            mCameraIdStr, mCameraFacing, mClientPackageName,
-            hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1);
-    mHardware->cancelPicture();
-    // Release the hardware resources.
-    mHardware->release();
-
-    // Release the held ANativeWindow resources.
-    if (mPreviewWindow != 0) {
-        disconnectWindow(mPreviewWindow);
-        mPreviewWindow = 0;
-        mHardware->setPreviewWindow(mPreviewWindow);
-    }
-    mHardware.clear();
-
-    CameraService::Client::disconnect();
-
-    LOG1("disconnect X (pid %d)", callingPid);
-
-    return res;
-}
-
-// ----------------------------------------------------------------------------
-
-status_t CameraClient::setPreviewWindow(const sp<IBinder>& binder,
-        const sp<ANativeWindow>& window) {
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    // return if no change in surface.
-    if (binder == mSurface) {
-        return NO_ERROR;
-    }
-
-    if (window != 0) {
-        result = native_window_api_connect(window.get(), NATIVE_WINDOW_API_CAMERA);
-        if (result != NO_ERROR) {
-            ALOGE("native_window_api_connect failed: %s (%d)", strerror(-result),
-                    result);
-            return result;
-        }
-    }
-
-    // If preview has been already started, register preview buffers now.
-    if (mHardware->previewEnabled()) {
-        if (window != 0) {
-            mHardware->setPreviewScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
-            mHardware->setPreviewTransform(mOrientation);
-            result = mHardware->setPreviewWindow(window);
-        }
-    }
-
-    if (result == NO_ERROR) {
-        // Everything has succeeded.  Disconnect the old window and remember the
-        // new window.
-        disconnectWindow(mPreviewWindow);
-        mSurface = binder;
-        mPreviewWindow = window;
-    } else {
-        // Something went wrong after we connected to the new window, so
-        // disconnect here.
-        disconnectWindow(window);
-    }
-
-    return result;
-}
-
-// set the buffer consumer that the preview will use
-status_t CameraClient::setPreviewTarget(
-        const sp<IGraphicBufferProducer>& bufferProducer) {
-    LOG1("setPreviewTarget(%p) (pid %d)", bufferProducer.get(),
-            CameraThreadState::getCallingPid());
-
-    sp<IBinder> binder;
-    sp<ANativeWindow> window;
-    if (bufferProducer != 0) {
-        binder = IInterface::asBinder(bufferProducer);
-        // Using controlledByApp flag to ensure that the buffer queue remains in
-        // async mode for the old camera API, where many applications depend
-        // on that behavior.
-        window = new Surface(bufferProducer, /*controlledByApp*/ true);
-    }
-    return setPreviewWindow(binder, window);
-}
-
-// set the preview callback flag to affect how the received frames from
-// preview are handled.
-void CameraClient::setPreviewCallbackFlag(int callback_flag) {
-    LOG1("setPreviewCallbackFlag(%d) (pid %d)", callback_flag, CameraThreadState::getCallingPid());
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) return;
-
-    mPreviewCallbackFlag = callback_flag;
-    if (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) {
-        enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-    } else {
-        disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-    }
-}
-
-status_t CameraClient::setPreviewCallbackTarget(
-        const sp<IGraphicBufferProducer>& callbackProducer) {
-    (void)callbackProducer;
-    ALOGE("%s: Unimplemented!", __FUNCTION__);
-    return INVALID_OPERATION;
-}
-
-// start preview mode
-status_t CameraClient::startPreview() {
-    LOG1("startPreview (pid %d)", CameraThreadState::getCallingPid());
-    return startCameraMode(CAMERA_PREVIEW_MODE);
-}
-
-// start recording mode
-status_t CameraClient::startRecording() {
-    LOG1("startRecording (pid %d)", CameraThreadState::getCallingPid());
-    return startCameraMode(CAMERA_RECORDING_MODE);
-}
-
-// start preview or recording
-status_t CameraClient::startCameraMode(camera_mode mode) {
-    LOG1("startCameraMode(%d)", mode);
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    switch(mode) {
-        case CAMERA_PREVIEW_MODE:
-            if (mSurface == 0 && mPreviewWindow == 0) {
-                LOG1("mSurface is not set yet.");
-                // still able to start preview in this case.
-            }
-            return startPreviewMode();
-        case CAMERA_RECORDING_MODE:
-            if (mSurface == 0 && mPreviewWindow == 0) {
-                ALOGE("mSurface or mPreviewWindow must be set before startRecordingMode.");
-                return INVALID_OPERATION;
-            }
-            return startRecordingMode();
-        default:
-            return UNKNOWN_ERROR;
-    }
-}
-
-status_t CameraClient::startPreviewMode() {
-    LOG1("startPreviewMode");
-    status_t result = NO_ERROR;
-
-    // if preview has been enabled, nothing needs to be done
-    if (mHardware->previewEnabled()) {
-        return NO_ERROR;
-    }
-
-    if (mPreviewWindow != 0) {
-        mHardware->setPreviewScalingMode(
-            NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
-        mHardware->setPreviewTransform(mOrientation);
-    }
-    mHardware->setPreviewWindow(mPreviewWindow);
-    result = mHardware->startPreview();
-    if (result == NO_ERROR) {
-        sCameraService->updateProxyDeviceState(
-            hardware::ICameraServiceProxy::CAMERA_STATE_ACTIVE,
-            mCameraIdStr, mCameraFacing, mClientPackageName,
-            hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1);
-    }
-    return result;
-}
-
-status_t CameraClient::startRecordingMode() {
-    LOG1("startRecordingMode");
-    status_t result = NO_ERROR;
-
-    // if recording has been enabled, nothing needs to be done
-    if (mHardware->recordingEnabled()) {
-        return NO_ERROR;
-    }
-
-    // if preview has not been started, start preview first
-    if (!mHardware->previewEnabled()) {
-        result = startPreviewMode();
-        if (result != NO_ERROR) {
-            return result;
-        }
-    }
-
-    // start recording mode
-    enableMsgType(CAMERA_MSG_VIDEO_FRAME);
-    sCameraService->playSound(CameraService::SOUND_RECORDING_START);
-    result = mHardware->startRecording();
-    if (result != NO_ERROR) {
-        ALOGE("mHardware->startRecording() failed with status %d", result);
-    }
-    return result;
-}
-
-// stop preview mode
-void CameraClient::stopPreview() {
-    LOG1("stopPreview (pid %d)", CameraThreadState::getCallingPid());
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) return;
-
-
-    disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-    mHardware->stopPreview();
-    sCameraService->updateProxyDeviceState(
-        hardware::ICameraServiceProxy::CAMERA_STATE_IDLE,
-        mCameraIdStr, mCameraFacing, mClientPackageName,
-        hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1);
-    mPreviewBuffer.clear();
-}
-
-// stop recording mode
-void CameraClient::stopRecording() {
-    LOG1("stopRecording (pid %d)", CameraThreadState::getCallingPid());
-    {
-        Mutex::Autolock lock(mLock);
-        if (checkPidAndHardware() != NO_ERROR) return;
-
-        disableMsgType(CAMERA_MSG_VIDEO_FRAME);
-        mHardware->stopRecording();
-        sCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
-
-        mPreviewBuffer.clear();
-    }
-
-    {
-        Mutex::Autolock l(mAvailableCallbackBuffersLock);
-        if (!mAvailableCallbackBuffers.empty()) {
-            mAvailableCallbackBuffers.clear();
-        }
-    }
-}
-
-// release a recording frame
-void CameraClient::releaseRecordingFrame(const sp<IMemory>& mem) {
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) return;
-    if (mem == nullptr) {
-        android_errorWriteWithInfoLog(CameraService::SN_EVENT_LOG_ID, "26164272",
-                CameraThreadState::getCallingUid(), nullptr, 0);
-        return;
-    }
-
-    mHardware->releaseRecordingFrame(mem);
-}
-
-void CameraClient::releaseRecordingFrameHandle(native_handle_t *handle) {
-    if (handle == nullptr) return;
-    Mutex::Autolock lock(mLock);
-    sp<IMemory> dataPtr;
-    {
-        Mutex::Autolock l(mAvailableCallbackBuffersLock);
-        if (!mAvailableCallbackBuffers.empty()) {
-            dataPtr = mAvailableCallbackBuffers.back();
-            mAvailableCallbackBuffers.pop_back();
-        }
-    }
-
-    if (dataPtr == nullptr) {
-        ALOGE("%s: %d: No callback buffer available. Dropping a native handle.", __FUNCTION__,
-                __LINE__);
-        native_handle_close(handle);
-        native_handle_delete(handle);
-        return;
-    } else if (dataPtr->size() != sizeof(VideoNativeHandleMetadata)) {
-        ALOGE("%s: %d: Callback buffer size doesn't match VideoNativeHandleMetadata", __FUNCTION__,
-                __LINE__);
-        native_handle_close(handle);
-        native_handle_delete(handle);
-        return;
-    }
-
-    if (mHardware != nullptr) {
-        VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(dataPtr->unsecurePointer());
-        metadata->eType = kMetadataBufferTypeNativeHandleSource;
-        metadata->pHandle = handle;
-        mHardware->releaseRecordingFrame(dataPtr);
-    }
-}
-
-void CameraClient::releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles) {
-    Mutex::Autolock lock(mLock);
-    bool disconnected = (mHardware == nullptr);
-    size_t n = handles.size();
-    std::vector<sp<IMemory>> frames;
-    if (!disconnected) {
-        frames.reserve(n);
-    }
-    bool error = false;
-    for (auto& handle : handles) {
-        sp<IMemory> dataPtr;
-        {
-            Mutex::Autolock l(mAvailableCallbackBuffersLock);
-            if (!mAvailableCallbackBuffers.empty()) {
-                dataPtr = mAvailableCallbackBuffers.back();
-                mAvailableCallbackBuffers.pop_back();
-            }
-        }
-
-        if (dataPtr == nullptr) {
-            ALOGE("%s: %d: No callback buffer available. Dropping frames.", __FUNCTION__,
-                    __LINE__);
-            error = true;
-            break;
-        } else if (dataPtr->size() != sizeof(VideoNativeHandleMetadata)) {
-            ALOGE("%s: %d: Callback buffer must be VideoNativeHandleMetadata", __FUNCTION__,
-                    __LINE__);
-            error = true;
-            break;
-        }
-
-        if (!disconnected) {
-            VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(dataPtr->unsecurePointer());
-            metadata->eType = kMetadataBufferTypeNativeHandleSource;
-            metadata->pHandle = handle;
-            frames.push_back(dataPtr);
-        }
-    }
-
-    if (error) {
-        for (auto& handle : handles) {
-            native_handle_close(handle);
-            native_handle_delete(handle);
-        }
-    } else if (!disconnected) {
-        mHardware->releaseRecordingFrameBatch(frames);
-    }
-    return;
-}
-
-status_t CameraClient::setVideoBufferMode(int32_t videoBufferMode) {
-    LOG1("setVideoBufferMode: %d", videoBufferMode);
-    bool enableMetadataInBuffers = false;
-
-    if (videoBufferMode == VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA) {
-        enableMetadataInBuffers = true;
-    } else if (videoBufferMode != VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV) {
-        ALOGE("%s: %d: videoBufferMode %d is not supported.", __FUNCTION__, __LINE__,
-                videoBufferMode);
-        return BAD_VALUE;
-    }
-
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) {
-        return UNKNOWN_ERROR;
-    }
-
-    return mHardware->storeMetaDataInBuffers(enableMetadataInBuffers);
-}
-
-bool CameraClient::previewEnabled() {
-    LOG1("previewEnabled (pid %d)", CameraThreadState::getCallingPid());
-
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) return false;
-    return mHardware->previewEnabled();
-}
-
-bool CameraClient::recordingEnabled() {
-    LOG1("recordingEnabled (pid %d)", CameraThreadState::getCallingPid());
-
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) return false;
-    return mHardware->recordingEnabled();
-}
-
-status_t CameraClient::autoFocus() {
-    LOG1("autoFocus (pid %d)", CameraThreadState::getCallingPid());
-
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    return mHardware->autoFocus();
-}
-
-status_t CameraClient::cancelAutoFocus() {
-    LOG1("cancelAutoFocus (pid %d)", CameraThreadState::getCallingPid());
-
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    return mHardware->cancelAutoFocus();
-}
-
-// take a picture - image is returned in callback
-status_t CameraClient::takePicture(int msgType) {
-    LOG1("takePicture (pid %d): 0x%x", CameraThreadState::getCallingPid(), msgType);
-
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    if ((msgType & CAMERA_MSG_RAW_IMAGE) &&
-        (msgType & CAMERA_MSG_RAW_IMAGE_NOTIFY)) {
-        ALOGE("CAMERA_MSG_RAW_IMAGE and CAMERA_MSG_RAW_IMAGE_NOTIFY"
-                " cannot be both enabled");
-        return BAD_VALUE;
-    }
-
-    // We only accept picture related message types
-    // and ignore other types of messages for takePicture().
-    int picMsgType = msgType
-                        & (CAMERA_MSG_SHUTTER |
-                           CAMERA_MSG_POSTVIEW_FRAME |
-                           CAMERA_MSG_RAW_IMAGE |
-                           CAMERA_MSG_RAW_IMAGE_NOTIFY |
-                           CAMERA_MSG_COMPRESSED_IMAGE);
-
-    enableMsgType(picMsgType);
-
-    return mHardware->takePicture();
-}
-
-// set preview/capture parameters - key/value pairs
-status_t CameraClient::setParameters(const String8& params) {
-    LOG1("setParameters (pid %d) (%s)", CameraThreadState::getCallingPid(), params.string());
-
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    mLatestSetParameters = CameraParameters(params);
-    CameraParameters p(params);
-    return mHardware->setParameters(p);
-}
-
-// get preview/capture parameters - key/value pairs
-String8 CameraClient::getParameters() const {
-    Mutex::Autolock lock(mLock);
-    // The camera service can unconditionally get the parameters at all times
-    if (CameraThreadState::getCallingPid() != mServicePid && checkPidAndHardware() != NO_ERROR) {
-        return String8();
-    }
-
-    String8 params(mHardware->getParameters().flatten());
-    LOG1("getParameters (pid %d) (%s)", CameraThreadState::getCallingPid(), params.string());
-    return params;
-}
-
-// enable shutter sound
-status_t CameraClient::enableShutterSound(bool enable) {
-    LOG1("enableShutterSound (pid %d)", CameraThreadState::getCallingPid());
-
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    if (enable) {
-        mPlayShutterSound = true;
-        return OK;
-    }
-
-    mPlayShutterSound = false;
-    return OK;
-}
-
-status_t CameraClient::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {
-    LOG1("sendCommand (pid %d)", CameraThreadState::getCallingPid());
-    int orientation;
-    Mutex::Autolock lock(mLock);
-    status_t result = checkPidAndHardware();
-    if (result != NO_ERROR) return result;
-
-    if (cmd == CAMERA_CMD_SET_DISPLAY_ORIENTATION) {
-        // Mirror the preview if the camera is front-facing.
-        orientation = getOrientation(arg1, mCameraFacing == CAMERA_FACING_FRONT);
-        if (orientation == -1) return BAD_VALUE;
-
-        if (mOrientation != orientation) {
-            mOrientation = orientation;
-            if (mPreviewWindow != 0) {
-                mHardware->setPreviewTransform(mOrientation);
-            }
-        }
-        return OK;
-    } else if (cmd == CAMERA_CMD_ENABLE_SHUTTER_SOUND) {
-        switch (arg1) {
-            case 0:
-                return enableShutterSound(false);
-            case 1:
-                return enableShutterSound(true);
-            default:
-                return BAD_VALUE;
-        }
-        return OK;
-    } else if (cmd == CAMERA_CMD_PLAY_RECORDING_SOUND) {
-        sCameraService->playSound(CameraService::SOUND_RECORDING_START);
-    } else if (cmd == CAMERA_CMD_SET_VIDEO_BUFFER_COUNT) {
-        // Silently ignore this command
-        return INVALID_OPERATION;
-    } else if (cmd == CAMERA_CMD_PING) {
-        // If mHardware is 0, checkPidAndHardware will return error.
-        return OK;
-    }
-
-    return mHardware->sendCommand(cmd, arg1, arg2);
-}
-
-// ----------------------------------------------------------------------------
-
-void CameraClient::enableMsgType(int32_t msgType) {
-    android_atomic_or(msgType, &mMsgEnabled);
-    mHardware->enableMsgType(msgType);
-}
-
-void CameraClient::disableMsgType(int32_t msgType) {
-    android_atomic_and(~msgType, &mMsgEnabled);
-    mHardware->disableMsgType(msgType);
-}
-
-#define CHECK_MESSAGE_INTERVAL 10 // 10ms
-bool CameraClient::lockIfMessageWanted(int32_t msgType) {
-    int sleepCount = 0;
-    while (mMsgEnabled & msgType) {
-        if (mLock.tryLock() == NO_ERROR) {
-            if (sleepCount > 0) {
-                LOG1("lockIfMessageWanted(%d): waited for %d ms",
-                    msgType, sleepCount * CHECK_MESSAGE_INTERVAL);
-            }
-
-            // If messages are no longer enabled after acquiring lock, release and drop message
-            if ((mMsgEnabled & msgType) == 0) {
-                mLock.unlock();
-                break;
-            }
-
-            return true;
-        }
-        if (sleepCount++ == 0) {
-            LOG1("lockIfMessageWanted(%d): enter sleep", msgType);
-        }
-        usleep(CHECK_MESSAGE_INTERVAL * 1000);
-    }
-    ALOGW("lockIfMessageWanted(%d): dropped unwanted message", msgType);
-    return false;
-}
-
-sp<CameraClient> CameraClient::getClientFromCookie(void* user) {
-    String8 cameraId = String8::format("%d", (int)(intptr_t) user);
-    auto clientDescriptor = sCameraService->mActiveClientManager.get(cameraId);
-    if (clientDescriptor != nullptr) {
-        return sp<CameraClient>{
-                static_cast<CameraClient*>(clientDescriptor->getValue().get())};
-    }
-    return sp<CameraClient>{nullptr};
-}
-
-// Callback messages can be dispatched to internal handlers or pass to our
-// client's callback functions, depending on the message type.
-//
-// notifyCallback:
-//      CAMERA_MSG_SHUTTER              handleShutter
-//      (others)                        c->notifyCallback
-// dataCallback:
-//      CAMERA_MSG_PREVIEW_FRAME        handlePreviewData
-//      CAMERA_MSG_POSTVIEW_FRAME       handlePostview
-//      CAMERA_MSG_RAW_IMAGE            handleRawPicture
-//      CAMERA_MSG_COMPRESSED_IMAGE     handleCompressedPicture
-//      (others)                        c->dataCallback
-// dataCallbackTimestamp
-//      (others)                        c->dataCallbackTimestamp
-
-void CameraClient::notifyCallback(int32_t msgType, int32_t ext1,
-        int32_t ext2, void* user) {
-    LOG2("notifyCallback(%d)", msgType);
-
-    sp<CameraClient> client = getClientFromCookie(user);
-    if (client.get() == nullptr) return;
-
-    if (!client->lockIfMessageWanted(msgType)) return;
-
-    switch (msgType) {
-        case CAMERA_MSG_SHUTTER:
-            // ext1 is the dimension of the yuv picture.
-            client->handleShutter();
-            break;
-        default:
-            client->handleGenericNotify(msgType, ext1, ext2);
-            break;
-    }
-}
-
-void CameraClient::dataCallback(int32_t msgType,
-        const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata, void* user) {
-    LOG2("dataCallback(%d)", msgType);
-
-    sp<CameraClient> client = getClientFromCookie(user);
-    if (client.get() == nullptr) return;
-
-    if (!client->lockIfMessageWanted(msgType)) return;
-    if (dataPtr == 0 && metadata == NULL) {
-        ALOGE("Null data returned in data callback");
-        client->handleGenericNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
-        return;
-    }
-
-    switch (msgType & ~CAMERA_MSG_PREVIEW_METADATA) {
-        case CAMERA_MSG_PREVIEW_FRAME:
-            client->handlePreviewData(msgType, dataPtr, metadata);
-            break;
-        case CAMERA_MSG_POSTVIEW_FRAME:
-            client->handlePostview(dataPtr);
-            break;
-        case CAMERA_MSG_RAW_IMAGE:
-            client->handleRawPicture(dataPtr);
-            break;
-        case CAMERA_MSG_COMPRESSED_IMAGE:
-            client->handleCompressedPicture(dataPtr);
-            break;
-        default:
-            client->handleGenericData(msgType, dataPtr, metadata);
-            break;
-    }
-}
-
-void CameraClient::dataCallbackTimestamp(nsecs_t timestamp,
-        int32_t msgType, const sp<IMemory>& dataPtr, void* user) {
-    LOG2("dataCallbackTimestamp(%d)", msgType);
-
-    sp<CameraClient> client = getClientFromCookie(user);
-    if (client.get() == nullptr) return;
-
-    if (!client->lockIfMessageWanted(msgType)) return;
-
-    if (dataPtr == 0) {
-        ALOGE("Null data returned in data with timestamp callback");
-        client->handleGenericNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
-        return;
-    }
-
-    client->handleGenericDataTimestamp(timestamp, msgType, dataPtr);
-}
-
-void CameraClient::handleCallbackTimestampBatch(
-        int32_t msgType, const std::vector<HandleTimestampMessage>& msgs, void* user) {
-    LOG2("dataCallbackTimestampBatch");
-    sp<CameraClient> client = getClientFromCookie(user);
-    if (client.get() == nullptr) return;
-    if (!client->lockIfMessageWanted(msgType)) return;
-
-    sp<hardware::ICameraClient> c = client->mRemoteCallback;
-    client->mLock.unlock();
-    if (c != 0 && msgs.size() > 0) {
-        size_t n = msgs.size();
-        std::vector<nsecs_t> timestamps;
-        std::vector<native_handle_t*> handles;
-        timestamps.reserve(n);
-        handles.reserve(n);
-        for (auto& msg : msgs) {
-            native_handle_t* handle = nullptr;
-            if (msg.dataPtr->size() != sizeof(VideoNativeHandleMetadata)) {
-                ALOGE("%s: dataPtr does not contain VideoNativeHandleMetadata!", __FUNCTION__);
-                return;
-            }
-            // TODO: Using unsecurePointer() has some associated security pitfalls
-            //       (see declaration for details).
-            //       Either document why it is safe in this case or address the
-            //       issue (e.g. by copying).
-            VideoNativeHandleMetadata *metadata =
-                (VideoNativeHandleMetadata*)(msg.dataPtr->unsecurePointer());
-            if (metadata->eType == kMetadataBufferTypeNativeHandleSource) {
-                handle = metadata->pHandle;
-            }
-
-            if (handle == nullptr) {
-                ALOGE("%s: VideoNativeHandleMetadata type mismatch or null handle passed!",
-                        __FUNCTION__);
-                return;
-            }
-            {
-                Mutex::Autolock l(client->mAvailableCallbackBuffersLock);
-                client->mAvailableCallbackBuffers.push_back(msg.dataPtr);
-            }
-            timestamps.push_back(msg.timestamp);
-            handles.push_back(handle);
-        }
-        c->recordingFrameHandleCallbackTimestampBatch(timestamps, handles);
-    }
-}
-
-// snapshot taken callback
-void CameraClient::handleShutter(void) {
-    if (mPlayShutterSound) {
-        sCameraService->playSound(CameraService::SOUND_SHUTTER);
-    }
-
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    if (c != 0) {
-        mLock.unlock();
-        c->notifyCallback(CAMERA_MSG_SHUTTER, 0, 0);
-        if (!lockIfMessageWanted(CAMERA_MSG_SHUTTER)) return;
-    }
-    disableMsgType(CAMERA_MSG_SHUTTER);
-
-    // Shutters only happen in response to takePicture, so mark device as
-    // idle now, until preview is restarted
-    sCameraService->updateProxyDeviceState(
-        hardware::ICameraServiceProxy::CAMERA_STATE_IDLE,
-        mCameraIdStr, mCameraFacing, mClientPackageName,
-        hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1);
-
-    mLock.unlock();
-}
-
-// preview callback - frame buffer update
-void CameraClient::handlePreviewData(int32_t msgType,
-                                              const sp<IMemory>& mem,
-                                              camera_frame_metadata_t *metadata) {
-    ssize_t offset;
-    size_t size;
-    sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
-
-    // local copy of the callback flags
-    int flags = mPreviewCallbackFlag;
-
-    // is callback enabled?
-    if (!(flags & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK)) {
-        // If the enable bit is off, the copy-out and one-shot bits are ignored
-        LOG2("frame callback is disabled");
-        mLock.unlock();
-        return;
-    }
-
-    // hold a strong pointer to the client
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-
-    // clear callback flags if no client or one-shot mode
-    if (c == 0 || (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK)) {
-        LOG2("Disable preview callback");
-        mPreviewCallbackFlag &= ~(CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK |
-                                  CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK |
-                                  CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK);
-        disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-    }
-
-    if (c != 0) {
-        // Is the received frame copied out or not?
-        if (flags & CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK) {
-            LOG2("frame is copied");
-            copyFrameAndPostCopiedFrame(msgType, c, heap, offset, size, metadata);
-        } else {
-            LOG2("frame is forwarded");
-            mLock.unlock();
-            c->dataCallback(msgType, mem, metadata);
-        }
-    } else {
-        mLock.unlock();
-    }
-}
-
-// picture callback - postview image ready
-void CameraClient::handlePostview(const sp<IMemory>& mem) {
-    disableMsgType(CAMERA_MSG_POSTVIEW_FRAME);
-
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    mLock.unlock();
-    if (c != 0) {
-        c->dataCallback(CAMERA_MSG_POSTVIEW_FRAME, mem, NULL);
-    }
-}
-
-// picture callback - raw image ready
-void CameraClient::handleRawPicture(const sp<IMemory>& mem) {
-    disableMsgType(CAMERA_MSG_RAW_IMAGE);
-
-    ssize_t offset;
-    size_t size;
-    sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
-
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    mLock.unlock();
-    if (c != 0) {
-        c->dataCallback(CAMERA_MSG_RAW_IMAGE, mem, NULL);
-    }
-}
-
-// picture callback - compressed picture ready
-void CameraClient::handleCompressedPicture(const sp<IMemory>& mem) {
-    disableMsgType(CAMERA_MSG_COMPRESSED_IMAGE);
-
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    mLock.unlock();
-    if (c != 0) {
-        c->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE, mem, NULL);
-    }
-}
-
-
-void CameraClient::handleGenericNotify(int32_t msgType,
-    int32_t ext1, int32_t ext2) {
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    mLock.unlock();
-    if (c != 0) {
-        c->notifyCallback(msgType, ext1, ext2);
-    }
-}
-
-void CameraClient::handleGenericData(int32_t msgType,
-    const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata) {
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    mLock.unlock();
-    if (c != 0) {
-        c->dataCallback(msgType, dataPtr, metadata);
-    }
-}
-
-void CameraClient::handleGenericDataTimestamp(nsecs_t timestamp,
-    int32_t msgType, const sp<IMemory>& dataPtr) {
-    sp<hardware::ICameraClient> c = mRemoteCallback;
-    mLock.unlock();
-    if (c != 0 && dataPtr != nullptr) {
-        native_handle_t* handle = nullptr;
-
-        // Check if dataPtr contains a VideoNativeHandleMetadata.
-        if (dataPtr->size() == sizeof(VideoNativeHandleMetadata)) {
-            // TODO: Using unsecurePointer() has some associated security pitfalls
-            //       (see declaration for details).
-            //       Either document why it is safe in this case or address the
-            //       issue (e.g. by copying).
-            VideoNativeHandleMetadata *metadata =
-                (VideoNativeHandleMetadata*)(dataPtr->unsecurePointer());
-            if (metadata->eType == kMetadataBufferTypeNativeHandleSource) {
-                handle = metadata->pHandle;
-            }
-        }
-
-        // If dataPtr contains a native handle, send it via recordingFrameHandleCallbackTimestamp.
-        if (handle != nullptr) {
-            {
-                Mutex::Autolock l(mAvailableCallbackBuffersLock);
-                mAvailableCallbackBuffers.push_back(dataPtr);
-            }
-            c->recordingFrameHandleCallbackTimestamp(timestamp, handle);
-        } else {
-            c->dataCallbackTimestamp(timestamp, msgType, dataPtr);
-        }
-    }
-}
-
-void CameraClient::copyFrameAndPostCopiedFrame(
-        int32_t msgType, const sp<hardware::ICameraClient>& client,
-        const sp<IMemoryHeap>& heap, size_t offset, size_t size,
-        camera_frame_metadata_t *metadata) {
-    LOG2("copyFrameAndPostCopiedFrame");
-    // It is necessary to copy out of pmem before sending this to
-    // the callback. For efficiency, reuse the same MemoryHeapBase
-    // provided it's big enough. Don't allocate the memory or
-    // perform the copy if there's no callback.
-    // hold the preview lock while we grab a reference to the preview buffer
-    sp<MemoryHeapBase> previewBuffer;
-
-    if (mPreviewBuffer == 0) {
-        mPreviewBuffer = new MemoryHeapBase(size, 0, NULL);
-    } else if (size > mPreviewBuffer->virtualSize()) {
-        mPreviewBuffer.clear();
-        mPreviewBuffer = new MemoryHeapBase(size, 0, NULL);
-    }
-    if (mPreviewBuffer == 0) {
-        ALOGE("failed to allocate space for preview buffer");
-        mLock.unlock();
-        return;
-    }
-    previewBuffer = mPreviewBuffer;
-
-    void* previewBufferBase = previewBuffer->base();
-    void* heapBase = heap->base();
-
-    if (heapBase == MAP_FAILED) {
-        ALOGE("%s: Failed to mmap heap for preview frame.", __FUNCTION__);
-        mLock.unlock();
-        return;
-    } else if (previewBufferBase == MAP_FAILED) {
-        ALOGE("%s: Failed to mmap preview buffer for preview frame.", __FUNCTION__);
-        mLock.unlock();
-        return;
-    }
-
-    memcpy(previewBufferBase, (uint8_t *) heapBase + offset, size);
-
-    sp<MemoryBase> frame = new MemoryBase(previewBuffer, 0, size);
-    if (frame == 0) {
-        ALOGE("failed to allocate space for frame callback");
-        mLock.unlock();
-        return;
-    }
-
-    mLock.unlock();
-    client->dataCallback(msgType, frame, metadata);
-}
-
-int CameraClient::getOrientation(int degrees, bool mirror) {
-    if (!mirror) {
-        if (degrees == 0) return 0;
-        else if (degrees == 90) return HAL_TRANSFORM_ROT_90;
-        else if (degrees == 180) return HAL_TRANSFORM_ROT_180;
-        else if (degrees == 270) return HAL_TRANSFORM_ROT_270;
-    } else {  // Do mirror (horizontal flip)
-        if (degrees == 0) {           // FLIP_H and ROT_0
-            return HAL_TRANSFORM_FLIP_H;
-        } else if (degrees == 90) {   // FLIP_H and ROT_90
-            return HAL_TRANSFORM_FLIP_H | HAL_TRANSFORM_ROT_90;
-        } else if (degrees == 180) {  // FLIP_H and ROT_180
-            return HAL_TRANSFORM_FLIP_V;
-        } else if (degrees == 270) {  // FLIP_H and ROT_270
-            return HAL_TRANSFORM_FLIP_V | HAL_TRANSFORM_ROT_90;
-        }
-    }
-    ALOGE("Invalid setDisplayOrientation degrees=%d", degrees);
-    return -1;
-}
-
-status_t CameraClient::setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer) {
-    (void)bufferProducer;
-    ALOGE("%s: %d: CameraClient doesn't support setting a video target.", __FUNCTION__, __LINE__);
-    return INVALID_OPERATION;
-}
-
-status_t CameraClient::setAudioRestriction(int mode) {
-    if (!isValidAudioRestriction(mode)) {
-        ALOGE("%s: invalid audio restriction mode %d", __FUNCTION__, mode);
-        return BAD_VALUE;
-    }
-
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) {
-        return INVALID_OPERATION;
-    }
-    return BasicClient::setAudioRestriction(mode);
-}
-
-int32_t CameraClient::getGlobalAudioRestriction() {
-    Mutex::Autolock lock(mLock);
-    if (checkPidAndHardware() != NO_ERROR) {
-        return INVALID_OPERATION;
-    }
-    return BasicClient::getServiceAudioRestriction();
-}
-
-// API1->Device1 does not support this feature
-status_t CameraClient::setRotateAndCropOverride(uint8_t /*rotateAndCrop*/) {
-    return OK;
-}
-
-}; // namespace android
diff --git a/services/camera/libcameraservice/api1/CameraClient.h b/services/camera/libcameraservice/api1/CameraClient.h
deleted file mode 100644
index a7eb960..0000000
--- a/services/camera/libcameraservice/api1/CameraClient.h
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA_CAMERACLIENT_H
-#define ANDROID_SERVERS_CAMERA_CAMERACLIENT_H
-
-#include "CameraService.h"
-
-namespace android {
-
-class MemoryHeapBase;
-class CameraHardwareInterface;
-
-/**
- * Interface between android.hardware.Camera API and Camera HAL device for version
- * CAMERA_DEVICE_API_VERSION_1_0.
- */
-
-class CameraClient : public CameraService::Client
-{
-public:
-    // ICamera interface (see ICamera for details)
-    virtual binder::Status  disconnect();
-    virtual status_t        connect(const sp<hardware::ICameraClient>& client);
-    virtual status_t        lock();
-    virtual status_t        unlock();
-    virtual status_t        setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer);
-    virtual void            setPreviewCallbackFlag(int flag);
-    virtual status_t        setPreviewCallbackTarget(
-            const sp<IGraphicBufferProducer>& callbackProducer);
-    virtual status_t        startPreview();
-    virtual void            stopPreview();
-    virtual bool            previewEnabled();
-    virtual status_t        setVideoBufferMode(int32_t videoBufferMode);
-    virtual status_t        startRecording();
-    virtual void            stopRecording();
-    virtual bool            recordingEnabled();
-    virtual void            releaseRecordingFrame(const sp<IMemory>& mem);
-    virtual void            releaseRecordingFrameHandle(native_handle_t *handle);
-    virtual void            releaseRecordingFrameHandleBatch(
-                                    const std::vector<native_handle_t*>& handles);
-    virtual status_t        autoFocus();
-    virtual status_t        cancelAutoFocus();
-    virtual status_t        takePicture(int msgType);
-    virtual status_t        setParameters(const String8& params);
-    virtual String8         getParameters() const;
-    virtual status_t        sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
-    virtual status_t        setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer);
-    virtual status_t        setAudioRestriction(int mode);
-    virtual int32_t         getGlobalAudioRestriction();
-
-    virtual status_t        setRotateAndCropOverride(uint8_t override);
-
-    // Interface used by CameraService
-    CameraClient(const sp<CameraService>& cameraService,
-            const sp<hardware::ICameraClient>& cameraClient,
-            const String16& clientPackageName,
-            const std::unique_ptr<String16>& clientFeatureId,
-            int cameraId,
-            int cameraFacing,
-            int clientPid,
-            int clientUid,
-            int servicePid);
-    ~CameraClient();
-
-    virtual status_t initialize(sp<CameraProviderManager> manager,
-            const String8& monitorTags) override;
-
-    virtual status_t dump(int fd, const Vector<String16>& args);
-
-    virtual status_t dumpClient(int fd, const Vector<String16>& args);
-
-private:
-
-    // check whether the calling process matches mClientPid.
-    status_t                checkPid() const;
-    status_t                checkPidAndHardware() const;  // also check mHardware != 0
-
-    // these are internal functions used to set up preview buffers
-    status_t                registerPreviewBuffers();
-
-    // camera operation mode
-    enum camera_mode {
-        CAMERA_PREVIEW_MODE   = 0,  // frame automatically released
-        CAMERA_RECORDING_MODE = 1,  // frame has to be explicitly released by releaseRecordingFrame()
-    };
-    // these are internal functions used for preview/recording
-    status_t                startCameraMode(camera_mode mode);
-    status_t                startPreviewMode();
-    status_t                startRecordingMode();
-
-    // internal function used by sendCommand to enable/disable shutter sound.
-    status_t                enableShutterSound(bool enable);
-
-    static sp<CameraClient>        getClientFromCookie(void* user);
-
-    // these are static callback functions
-    static void             notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2, void* user);
-    static void             dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
-            camera_frame_metadata_t *metadata, void* user);
-    static void             dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr, void* user);
-    static void             handleCallbackTimestampBatch(
-                                    int32_t msgType, const std::vector<HandleTimestampMessage>&, void* user);
-    // handlers for messages
-    void                    handleShutter(void);
-    void                    handlePreviewData(int32_t msgType, const sp<IMemory>& mem,
-            camera_frame_metadata_t *metadata);
-    void                    handlePostview(const sp<IMemory>& mem);
-    void                    handleRawPicture(const sp<IMemory>& mem);
-    void                    handleCompressedPicture(const sp<IMemory>& mem);
-    void                    handleGenericNotify(int32_t msgType, int32_t ext1, int32_t ext2);
-    void                    handleGenericData(int32_t msgType, const sp<IMemory>& dataPtr,
-            camera_frame_metadata_t *metadata);
-    void                    handleGenericDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
-
-    void                    copyFrameAndPostCopiedFrame(
-        int32_t msgType,
-        const sp<hardware::ICameraClient>& client,
-        const sp<IMemoryHeap>& heap,
-        size_t offset, size_t size,
-        camera_frame_metadata_t *metadata);
-
-    int                     getOrientation(int orientation, bool mirror);
-
-    status_t                setPreviewWindow(
-        const sp<IBinder>& binder,
-        const sp<ANativeWindow>& window);
-
-
-    // these are initialized in the constructor.
-    sp<CameraHardwareInterface>     mHardware;       // cleared after disconnect()
-    int                             mPreviewCallbackFlag;
-    int                             mOrientation;     // Current display orientation
-    bool                            mPlayShutterSound;
-    bool                            mLegacyMode; // camera2 api legacy mode?
-
-    // Ensures atomicity among the public methods
-    mutable Mutex                   mLock;
-    // This is a binder of Surface or Surface.
-    sp<IBinder>                     mSurface;
-    sp<ANativeWindow>               mPreviewWindow;
-
-    // If the user want us to return a copy of the preview frame (instead
-    // of the original one), we allocate mPreviewBuffer and reuse it if possible.
-    sp<MemoryHeapBase>              mPreviewBuffer;
-
-    // Debugging information
-    CameraParameters                mLatestSetParameters;
-
-    // mAvailableCallbackBuffers stores sp<IMemory> that HAL uses to send VideoNativeHandleMetadata.
-    // It will be used to send VideoNativeHandleMetadata back to HAL when camera receives the
-    // native handle from releaseRecordingFrameHandle.
-    Mutex                           mAvailableCallbackBuffersLock;
-    std::vector<sp<IMemory>>        mAvailableCallbackBuffers;
-
-    // We need to avoid the deadlock when the incoming command thread and
-    // the CameraHardwareInterface callback thread both want to grab mLock.
-    // An extra flag is used to tell the callback thread that it should stop
-    // trying to deliver the callback messages if the client is not
-    // interested in it anymore. For example, if the client is calling
-    // stopPreview(), the preview frame messages do not need to be delivered
-    // anymore.
-
-    // This function takes the same parameter as the enableMsgType() and
-    // disableMsgType() functions in CameraHardwareInterface.
-    void                    enableMsgType(int32_t msgType);
-    void                    disableMsgType(int32_t msgType);
-    volatile int32_t        mMsgEnabled;
-
-    // This function keeps trying to grab mLock, or give up if the message
-    // is found to be disabled. It returns true if mLock is grabbed.
-    bool                    lockIfMessageWanted(int32_t msgType);
-};
-
-}
-
-#endif
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index a71a732..ee764ec 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -31,12 +31,15 @@
 namespace android {
 namespace camera2 {
 
+using android::camera3::CAMERA_STREAM_ROTATION_0;
+
 CallbackProcessor::CallbackProcessor(sp<Camera2Client> client):
         Thread(false),
         mClient(client),
         mDevice(client->getCameraDevice()),
         mId(client->getCameraId()),
         mCallbackAvailable(false),
+        mCallbackPaused(true),
         mCallbackToApp(false),
         mCallbackStreamId(NO_STREAM) {
 }
@@ -154,8 +157,8 @@
                 callbackFormat, params.previewFormat);
         res = device->createStream(mCallbackWindow,
                 params.previewWidth, params.previewHeight, callbackFormat,
-                HAL_DATASPACE_V0_JFIF, CAMERA3_STREAM_ROTATION_0, &mCallbackStreamId,
-                String8());
+                HAL_DATASPACE_V0_JFIF, CAMERA_STREAM_ROTATION_0, &mCallbackStreamId,
+                String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for callbacks: "
                     "%s (%d)", __FUNCTION__, mId,
@@ -214,6 +217,14 @@
     return mCallbackStreamId;
 }
 
+void CallbackProcessor::unpauseCallback() {
+    mCallbackPaused = false;
+}
+
+void CallbackProcessor::pauseCallback() {
+    mCallbackPaused = true;
+}
+
 void CallbackProcessor::dump(int /*fd*/, const Vector<String16>& /*args*/) const {
 }
 
@@ -232,7 +243,7 @@
 
     do {
         sp<Camera2Client> client = mClient.promote();
-        if (client == 0) {
+        if (client == 0 || mCallbackPaused) {
             res = discardNewCallback();
         } else {
             res = processNewCallback(client);
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.h b/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
index 5231688..a336326 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
@@ -17,6 +17,8 @@
 #ifndef ANDROID_SERVERS_CAMERA_CAMERA2_CALLBACKPROCESSOR_H
 #define ANDROID_SERVERS_CAMERA_CAMERA2_CALLBACKPROCESSOR_H
 
+#include <atomic>
+
 #include <utils/Thread.h>
 #include <utils/String16.h>
 #include <utils/Vector.h>
@@ -52,6 +54,9 @@
     status_t deleteStream();
     int getStreamId() const;
 
+    void unpauseCallback();
+    void pauseCallback();
+
     void dump(int fd, const Vector<String16>& args) const;
   private:
     static const nsecs_t kWaitDuration = 10000000; // 10 ms
@@ -67,6 +72,8 @@
         NO_STREAM = -1
     };
 
+    std::atomic<bool> mCallbackPaused;
+
     // True if mCallbackWindow is a remote consumer, false if just the local
     // mCallbackConsumer
     bool mCallbackToApp;
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
index 0c01a91..4c9b7ed 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
@@ -706,7 +706,7 @@
 
     if (mCaptureRequest.entryCount() == 0) {
         res = client->getCameraDevice()->createDefaultRequest(
-                CAMERA2_TEMPLATE_STILL_CAPTURE,
+                camera_request_template_t::CAMERA_TEMPLATE_STILL_CAPTURE,
                 &mCaptureRequest);
         if (res != OK) {
             ALOGE("%s: Camera %d: Unable to create default still image request:"
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index ddfe5e3..eed2654 100755
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -35,6 +35,8 @@
 namespace android {
 namespace camera2 {
 
+using android::camera3::CAMERA_STREAM_ROTATION_0;
+
 JpegProcessor::JpegProcessor(
     sp<Camera2Client> client,
     wp<CaptureSequencer> sequencer):
@@ -148,8 +150,8 @@
         res = device->createStream(mCaptureWindow,
                 params.pictureWidth, params.pictureHeight,
                 HAL_PIXEL_FORMAT_BLOB, HAL_DATASPACE_V0_JFIF,
-                CAMERA3_STREAM_ROTATION_0, &mCaptureStreamId,
-                String8());
+                CAMERA_STREAM_ROTATION_0, &mCaptureStreamId,
+                String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for capture: "
                     "%s (%d)", __FUNCTION__, mId,
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index dbc863b..80508e4 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -29,7 +29,7 @@
 
 #include "Parameters.h"
 #include "system/camera.h"
-#include "hardware/camera_common.h"
+#include <android-base/properties.h>
 #include <android/hardware/ICamera.h>
 #include <media/MediaProfiles.h>
 #include <media/mediarecorder.h>
@@ -37,6 +37,8 @@
 namespace android {
 namespace camera2 {
 
+using android::camera3::CAMERA_TEMPLATE_PREVIEW;
+
 Parameters::Parameters(int cameraId,
         int cameraFacing) :
         cameraId(cameraId),
@@ -1246,6 +1248,7 @@
             }
         }
         fastInfo.maxZslSize = maxPrivInputSize;
+        fastInfo.usedZslSize = maxPrivInputSize;
     } else {
         fastInfo.maxZslSize = {0, 0};
     }
@@ -2046,12 +2049,33 @@
 
     slowJpegMode = false;
     Size pictureSize = { pictureWidth, pictureHeight };
-    int64_t minFrameDurationNs = getJpegStreamMinFrameDurationNs(pictureSize);
-    if (previewFpsRange[1] > 1e9/minFrameDurationNs + FPS_MARGIN) {
+    bool zslFrameRateSupported = false;
+    int64_t jpegMinFrameDurationNs = getJpegStreamMinFrameDurationNs(pictureSize);
+    if (previewFpsRange[1] > 1e9/jpegMinFrameDurationNs + FPS_MARGIN) {
         slowJpegMode = true;
     }
-    if (isDeviceZslSupported || slowJpegMode ||
-            property_get_bool("camera.disable_zsl_mode", false)) {
+    if (isZslReprocessPresent) {
+        unsigned int firstApiLevel =
+            android::base::GetUintProperty<unsigned int>("ro.product.first_api_level", 0);
+        Size chosenSize;
+        if ((firstApiLevel >= __ANDROID_API_S__) &&
+            !android::base::GetBoolProperty("ro.camera.enableCamera1MaxZsl", false)) {
+            chosenSize = pictureSize;
+        } else {
+          // follow old behavior of keeping max zsl size as the input / output
+          // zsl stream size
+          chosenSize = fastInfo.maxZslSize;
+        }
+        int64_t zslMinFrameDurationNs = getZslStreamMinFrameDurationNs(chosenSize);
+        if (zslMinFrameDurationNs > 0 &&
+                previewFpsRange[1] <= (1e9/zslMinFrameDurationNs + FPS_MARGIN)) {
+            zslFrameRateSupported = true;
+            fastInfo.usedZslSize = chosenSize;
+        }
+    }
+
+    if (isDeviceZslSupported || slowJpegMode || !zslFrameRateSupported ||
+            android::base::GetBoolProperty("camera.disable_zsl_mode", false)) {
         allowZslMode = false;
     } else {
         allowZslMode = isZslReprocessPresent;
@@ -2468,7 +2492,7 @@
 
         // Use focal length in preview template if it exists
         CameraMetadata previewTemplate;
-        status_t res = device->createDefaultRequest(CAMERA3_TEMPLATE_PREVIEW, &previewTemplate);
+        status_t res = device->createDefaultRequest(CAMERA_TEMPLATE_PREVIEW, &previewTemplate);
         if (res != OK) {
             ALOGE("%s: Failed to create default PREVIEW request: %s (%d)",
                     __FUNCTION__, strerror(-res), res);
@@ -2975,7 +2999,7 @@
         const StreamConfiguration &sc = scs[i];
         if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
                 sc.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
-                sc.width <= limit.width && sc.height <= limit.height) {
+                ((sc.width * sc.height) <= (limit.width * limit.height))) {
             int64_t minFrameDuration = getMinFrameDurationNs(
                     {sc.width, sc.height}, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
             if (minFrameDuration > MAX_PREVIEW_RECORD_DURATION_NS) {
@@ -3055,6 +3079,10 @@
     return getMinFrameDurationNs(size, HAL_PIXEL_FORMAT_BLOB);
 }
 
+int64_t Parameters::getZslStreamMinFrameDurationNs(Parameters::Size size) {
+    return getMinFrameDurationNs(size, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+}
+
 int64_t Parameters::getMinFrameDurationNs(Parameters::Size size, int fmt) {
     const int STREAM_DURATION_SIZE = 4;
     const int STREAM_FORMAT_OFFSET = 0;
@@ -3253,6 +3281,8 @@
 
 status_t Parameters::calculatePictureFovs(float *horizFov, float *vertFov)
         const {
+    // For external camera, use FOVs = (-1.0, -1.0) as default values. Calculate
+    // FOVs only if there is sufficient information.
     if (fastInfo.isExternalCamera) {
         if (horizFov != NULL) {
             *horizFov = -1.0;
@@ -3260,16 +3290,29 @@
         if (vertFov != NULL) {
             *vertFov = -1.0;
         }
-        return OK;
     }
 
     camera_metadata_ro_entry_t sensorSize =
             staticInfo(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 2, 2);
-    if (!sensorSize.count) return NO_INIT;
+    if (!sensorSize.count) {
+        // It is non-fatal for external cameras since it has default values.
+        if (fastInfo.isExternalCamera) {
+            return OK;
+        } else {
+            return NO_INIT;
+        }
+    }
 
     camera_metadata_ro_entry_t pixelArraySize =
             staticInfo(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 2, 2);
-    if (!pixelArraySize.count) return NO_INIT;
+    if (!pixelArraySize.count) {
+        // It is non-fatal for external cameras since it has default values.
+        if (fastInfo.isExternalCamera) {
+            return OK;
+        } else {
+            return NO_INIT;
+        }
+    }
 
     float arrayAspect = static_cast<float>(fastInfo.arrayWidth) /
             fastInfo.arrayHeight;
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index 3a709c9..e2f8d011 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -56,7 +56,7 @@
     int previewTransform; // set by CAMERA_CMD_SET_DISPLAY_ORIENTATION
 
     int pictureWidth, pictureHeight;
-    // Store the picture size before they are overriden by video snapshot
+    // Store the picture size before they are overridden by video snapshot
     int pictureWidthLastSet, pictureHeightLastSet;
     bool pictureSizeOverriden;
 
@@ -248,6 +248,7 @@
         bool useFlexibleYuv;
         Size maxJpegSize;
         Size maxZslSize;
+        Size usedZslSize;
         bool supportsPreferredConfigs;
     } fastInfo;
 
@@ -426,6 +427,11 @@
     // return -1 if input jpeg size cannot be found in supported size list
     int64_t getJpegStreamMinFrameDurationNs(Parameters::Size size);
 
+    // Helper function to get minimum frame duration for a
+    // IMPLEMENTATION_DEFINED stream of size 'size'
+    // return -1 if input size cannot be found in supported size list
+    int64_t getZslStreamMinFrameDurationNs(Parameters::Size size);
+
     // Helper function to get minimum frame duration for a size/format combination
     // return -1 if input size/format combination cannot be found.
     int64_t getMinFrameDurationNs(Parameters::Size size, int format);
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index 0786f53..2d3597c 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -40,6 +40,10 @@
 namespace android {
 namespace camera2 {
 
+using android::camera3::CAMERA_STREAM_ROTATION_0;
+using android::camera3::CAMERA_TEMPLATE_PREVIEW;
+using android::camera3::CAMERA_TEMPLATE_ZERO_SHUTTER_LAG;
+
 StreamingProcessor::StreamingProcessor(sp<Camera2Client> client):
         mClient(client),
         mDevice(client->getCameraDevice()),
@@ -113,12 +117,12 @@
             return INVALID_OPERATION;
         }
 
-        // Use CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG for ZSL streaming case.
+        // Use CAMERA_TEMPLATE_ZERO_SHUTTER_LAG for ZSL streaming case.
         if (params.useZeroShutterLag() && !params.recordingHint) {
             res = device->createDefaultRequest(
-                    CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG, &mPreviewRequest);
+                    CAMERA_TEMPLATE_ZERO_SHUTTER_LAG, &mPreviewRequest);
         } else {
-            res = device->createDefaultRequest(CAMERA3_TEMPLATE_PREVIEW,
+            res = device->createDefaultRequest(CAMERA_TEMPLATE_PREVIEW,
                     &mPreviewRequest);
         }
 
@@ -194,7 +198,8 @@
         res = device->createStream(mPreviewWindow,
                 params.previewWidth, params.previewHeight,
                 CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, HAL_DATASPACE_UNKNOWN,
-                CAMERA3_STREAM_ROTATION_0, &mPreviewStreamId, String8());
+                CAMERA_STREAM_ROTATION_0, &mPreviewStreamId, String8(),
+                std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Unable to create preview stream: %s (%d)",
                     __FUNCTION__, mId, strerror(-res), res);
@@ -263,7 +268,7 @@
     }
 
     if (mRecordingRequest.entryCount() == 0) {
-        res = device->createDefaultRequest(CAMERA2_TEMPLATE_VIDEO_RECORD,
+        res = device->createDefaultRequest(camera_request_template_t::CAMERA_TEMPLATE_VIDEO_RECORD,
                 &mRecordingRequest);
         if (res != OK) {
             ALOGE("%s: Camera %d: Unable to create default recording request:"
@@ -379,8 +384,8 @@
         res = device->createStream(mRecordingWindow,
                 params.videoWidth, params.videoHeight,
                 params.videoFormat, params.videoDataSpace,
-                CAMERA3_STREAM_ROTATION_0, &mRecordingStreamId,
-                String8());
+                CAMERA_STREAM_ROTATION_0, &mRecordingStreamId,
+                String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for recording: "
                     "%s (%d)", __FUNCTION__, mId,
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 8dc9863..1321e6b 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -42,6 +42,9 @@
 namespace android {
 namespace camera2 {
 
+using android::camera3::CAMERA_STREAM_ROTATION_0;
+using android::camera3::CAMERA_TEMPLATE_STILL_CAPTURE;
+
 namespace {
 struct TimestampFinder : public RingBufferConsumer::RingBufferComparator {
     typedef RingBufferConsumer::BufferInfo BufferInfo;
@@ -172,7 +175,7 @@
     mBufferQueueDepth = mFrameListDepth + 1;
 
     mZslQueue.insertAt(0, mBufferQueueDepth);
-    mFrameList.insertAt(0, mFrameListDepth);
+    mFrameList.resize(mFrameListDepth);
     sp<CaptureSequencer> captureSequencer = mSequencer.promote();
     if (captureSequencer != 0) captureSequencer->setZslProcessor(this);
 }
@@ -208,7 +211,7 @@
     // Corresponding buffer has been cleared. No need to push into mFrameList
     if (timestamp <= mLatestClearedBufferTimestamp) return;
 
-    mFrameList.editItemAt(mFrameListHead) = result.mMetadata;
+    mFrameList[mFrameListHead] = result.mMetadata;
     mFrameListHead = (mFrameListHead + 1) % mFrameListDepth;
 }
 
@@ -232,9 +235,9 @@
     }
 
     if (mInputStreamId == NO_STREAM) {
-        res = device->createInputStream(params.fastInfo.maxZslSize.width,
-            params.fastInfo.maxZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
-            &mInputStreamId);
+        res = device->createInputStream(params.fastInfo.usedZslSize.width,
+            params.fastInfo.usedZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+            /*isMultiResolution*/false, &mInputStreamId);
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create input stream: "
                     "%s (%d)", __FUNCTION__, client->getCameraId(),
@@ -255,10 +258,10 @@
         mProducer->setName(String8("Camera2-ZslRingBufferConsumer"));
         sp<Surface> outSurface = new Surface(producer);
 
-        res = device->createStream(outSurface, params.fastInfo.maxZslSize.width,
-            params.fastInfo.maxZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
-            HAL_DATASPACE_UNKNOWN, CAMERA3_STREAM_ROTATION_0, &mZslStreamId,
-            String8());
+        res = device->createStream(outSurface, params.fastInfo.usedZslSize.width,
+            params.fastInfo.usedZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+            HAL_DATASPACE_UNKNOWN, CAMERA_STREAM_ROTATION_0, &mZslStreamId,
+            String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create ZSL stream: "
                     "%s (%d)", __FUNCTION__, client->getCameraId(),
@@ -348,7 +351,7 @@
     }
 
     CameraMetadata stillTemplate;
-    device->createDefaultRequest(CAMERA3_TEMPLATE_STILL_CAPTURE, &stillTemplate);
+    device->createDefaultRequest(CAMERA_TEMPLATE_STILL_CAPTURE, &stillTemplate);
 
     // Find some of the post-processing tags, and assign the value from template to the request.
     // Only check the aberration mode and noise reduction mode for now, as they are very important
@@ -671,7 +674,7 @@
 void ZslProcessor::clearZslResultQueueLocked() {
     mFrameList.clear();
     mFrameListHead = 0;
-    mFrameList.insertAt(0, mFrameListDepth);
+    mFrameList.resize(mFrameListDepth);
 }
 
 void ZslProcessor::dump(int fd, const Vector<String16>& /*args*/) const {
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.h b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
index 1db2403..3186233 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
@@ -125,7 +125,7 @@
     static const int32_t kDefaultMaxPipelineDepth = 4;
     size_t mBufferQueueDepth;
     size_t mFrameListDepth;
-    Vector<CameraMetadata> mFrameList;
+    std::vector<CameraMetadata> mFrameList;
     size_t mFrameListHead;
 
     ZslPair mNextPair;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index e35b436..1f3d478 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -21,6 +21,7 @@
 #include <cutils/properties.h>
 #include <utils/CameraThreadState.h>
 #include <utils/Log.h>
+#include <utils/SessionConfigurationUtils.h>
 #include <utils/Trace.h>
 #include <gui/Surface.h>
 #include <camera/camera2/CaptureRequest.h>
@@ -30,6 +31,7 @@
 #include "device3/Camera3Device.h"
 #include "device3/Camera3OutputStream.h"
 #include "api2/CameraDeviceClient.h"
+#include "utils/CameraServiceProxyWrapper.h"
 
 #include <camera_metadata_hidden.h>
 
@@ -49,15 +51,18 @@
 
 namespace android {
 using namespace camera2;
+using camera3::camera_stream_rotation_t::CAMERA_STREAM_ROTATION_0;
+using camera3::SessionConfigurationUtils;
 
 CameraDeviceClientBase::CameraDeviceClientBase(
         const sp<CameraService>& cameraService,
         const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
         const String16& clientPackageName,
-        const std::unique_ptr<String16>& clientFeatureId,
+        const std::optional<String16>& clientFeatureId,
         const String8& cameraId,
         int api1CameraId,
         int cameraFacing,
+        int sensorOrientation,
         int clientPid,
         uid_t clientUid,
         int servicePid) :
@@ -67,6 +72,7 @@
             clientFeatureId,
             cameraId,
             cameraFacing,
+            sensorOrientation,
             clientPid,
             clientUid,
             servicePid),
@@ -80,18 +86,21 @@
 CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService,
         const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
         const String16& clientPackageName,
-        const std::unique_ptr<String16>& clientFeatureId,
+        const std::optional<String16>& clientFeatureId,
         const String8& cameraId,
         int cameraFacing,
+        int sensorOrientation,
         int clientPid,
         uid_t clientUid,
-        int servicePid) :
+        int servicePid,
+        bool overrideForPerfClass) :
     Camera2ClientBase(cameraService, remoteCallback, clientPackageName, clientFeatureId,
-                cameraId, /*API1 camera ID*/ -1,
-                cameraFacing, clientPid, clientUid, servicePid),
+                cameraId, /*API1 camera ID*/ -1, cameraFacing, sensorOrientation,
+                clientPid, clientUid, servicePid, overrideForPerfClass),
     mInputStream(),
     mStreamingRequestId(REQUEST_ID_NONE),
-    mRequestIdCounter(0) {
+    mRequestIdCounter(0),
+    mOverrideForPerfClass(overrideForPerfClass) {
 
     ATRACE_CALL();
     ALOGI("CameraDeviceClient %s: Opened", cameraId.string());
@@ -122,8 +131,8 @@
                                       /*listener*/this,
                                       /*sendPartials*/true);
 
-    auto deviceInfo = mDevice->info();
-    camera_metadata_entry_t physicalKeysEntry = deviceInfo.find(
+    const CameraMetadata &deviceInfo = mDevice->info();
+    camera_metadata_ro_entry_t physicalKeysEntry = deviceInfo.find(
             ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS);
     if (physicalKeysEntry.count > 0) {
         mSupportedPhysicalRequestKeys.insert(mSupportedPhysicalRequestKeys.begin(),
@@ -132,6 +141,17 @@
     }
 
     mProviderManager = providerPtr;
+    // Cache physical camera ids corresponding to this device and also the high
+    // resolution sensors in this device + physical camera ids
+    mProviderManager->isLogicalCamera(mCameraIdStr.string(), &mPhysicalCameraIds);
+    if (isUltraHighResolutionSensor(mCameraIdStr)) {
+        mHighResolutionSensors.insert(mCameraIdStr.string());
+    }
+    for (auto &physicalId : mPhysicalCameraIds) {
+        if (isUltraHighResolutionSensor(String8(physicalId.c_str()))) {
+            mHighResolutionSensors.insert(physicalId.c_str());
+        }
+    }
     return OK;
 }
 
@@ -168,7 +188,6 @@
 
     const StreamSurfaceId& streamSurfaceId = mStreamMap.valueAt(idx);
     if (outSurfaceMap->find(streamSurfaceId.streamId()) == outSurfaceMap->end()) {
-        (*outSurfaceMap)[streamSurfaceId.streamId()] = std::vector<size_t>();
         outputStreamIds->push_back(streamSurfaceId.streamId());
     }
     (*outSurfaceMap)[streamSurfaceId.streamId()].push_back(streamSurfaceId.surfaceId());
@@ -184,6 +203,17 @@
     return binder::Status::ok();
 }
 
+static std::list<int> getIntersection(const std::unordered_set<int> &streamIdsForThisCamera,
+        const Vector<int> &streamIdsForThisRequest) {
+    std::list<int> intersection;
+    for (auto &streamId : streamIdsForThisRequest) {
+        if (streamIdsForThisCamera.find(streamId) != streamIdsForThisCamera.end()) {
+            intersection.emplace_back(streamId);
+        }
+    }
+    return intersection;
+}
+
 binder::Status CameraDeviceClient::submitRequestList(
         const std::vector<hardware::camera2::CaptureRequest>& requests,
         bool streaming,
@@ -330,6 +360,24 @@
                         "Request settings are empty");
             }
 
+            // Check whether the physical / logical stream has settings
+            // consistent with the sensor pixel mode(s) it was configured with.
+            // mCameraIdToStreamSet will only have ids that are high resolution
+            const auto streamIdSetIt = mHighResolutionCameraIdToStreamIdSet.find(it.id);
+            if (streamIdSetIt != mHighResolutionCameraIdToStreamIdSet.end()) {
+                std::list<int> streamIdsUsedInRequest = getIntersection(streamIdSetIt->second,
+                        outputStreamIds);
+                if (!request.mIsReprocess &&
+                        !isSensorPixelModeConsistent(streamIdsUsedInRequest, it.settings)) {
+                     ALOGE("%s: Camera %s: Request settings CONTROL_SENSOR_PIXEL_MODE not "
+                            "consistent with configured streams. Rejecting request.",
+                            __FUNCTION__, it.id.c_str());
+                    return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                        "Request settings CONTROL_SENSOR_PIXEL_MODE are not consistent with "
+                        "streams configured");
+                }
+            }
+
             String8 physicalId(it.id.c_str());
             if (physicalId != mDevice->getId()) {
                 auto found = std::find(requestedPhysicalIds.begin(), requestedPhysicalIds.end(),
@@ -470,7 +518,7 @@
 }
 
 binder::Status CameraDeviceClient::endConfigure(int operatingMode,
-        const hardware::camera2::impl::CameraMetadataNative& sessionParams,
+        const hardware::camera2::impl::CameraMetadataNative& sessionParams, int64_t startTimeMs,
         std::vector<int>* offlineStreamIds /*out*/) {
     ATRACE_CALL();
     ALOGV("%s: ending configure (%d input stream, %zu output surfaces)",
@@ -492,7 +540,8 @@
         return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
     }
 
-    res = checkOperatingMode(operatingMode, mDevice->info(), mCameraIdStr);
+    res = SessionConfigurationUtils::checkOperatingMode(operatingMode, mDevice->info(),
+            mCameraIdStr);
     if (!res.isOk()) {
         return res;
     }
@@ -545,256 +594,20 @@
         for (const auto& offlineStreamId : *offlineStreamIds) {
             mStreamInfoMap[offlineStreamId].supportsOffline = true;
         }
+
+        nsecs_t configureEnd = systemTime();
+        int32_t configureDurationMs = ns2ms(configureEnd) - startTimeMs;
+        CameraServiceProxyWrapper::logStreamConfigured(mCameraIdStr, operatingMode,
+                false /*internalReconfig*/, configureDurationMs);
     }
 
     return res;
 }
 
-binder::Status CameraDeviceClient::checkSurfaceType(size_t numBufferProducers,
-        bool deferredConsumer, int surfaceType)  {
-    if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
-        ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
-                __FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
-    } else if ((numBufferProducers == 0) && (!deferredConsumer)) {
-        ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
-    }
-
-    bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
-            (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
-
-    if (deferredConsumer && !validSurfaceType) {
-        ALOGE("%s: Target surface has invalid surfaceType = %d.", __FUNCTION__, surfaceType);
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
-    }
-
-    return binder::Status::ok();
-}
-
-binder::Status CameraDeviceClient::checkPhysicalCameraId(
-        const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId,
-        const String8 &logicalCameraId) {
-    if (physicalCameraId.size() == 0) {
-        return binder::Status::ok();
-    }
-    if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(),
-        physicalCameraId.string()) == physicalCameraIds.end()) {
-        String8 msg = String8::format("Camera %s: Camera doesn't support physicalCameraId %s.",
-                logicalCameraId.string(), physicalCameraId.string());
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-    }
-    return binder::Status::ok();
-}
-
-binder::Status CameraDeviceClient::checkOperatingMode(int operatingMode,
-        const CameraMetadata &staticInfo, const String8 &cameraId) {
-    if (operatingMode < 0) {
-        String8 msg = String8::format(
-            "Camera %s: Invalid operating mode %d requested", cameraId.string(), operatingMode);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                msg.string());
-    }
-
-    bool isConstrainedHighSpeed = (operatingMode == ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE);
-    if (isConstrainedHighSpeed) {
-        camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
-        bool isConstrainedHighSpeedSupported = false;
-        for(size_t i = 0; i < entry.count; ++i) {
-            uint8_t capability = entry.data.u8[i];
-            if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) {
-                isConstrainedHighSpeedSupported = true;
-                break;
-            }
-        }
-        if (!isConstrainedHighSpeedSupported) {
-            String8 msg = String8::format(
-                "Camera %s: Try to create a constrained high speed configuration on a device"
-                " that doesn't support it.", cameraId.string());
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                    msg.string());
-        }
-    }
-
-    return binder::Status::ok();
-}
-
-void CameraDeviceClient::mapStreamInfo(const OutputStreamInfo &streamInfo,
-            camera3_stream_rotation_t rotation, String8 physicalId,
-            hardware::camera::device::V3_4::Stream *stream /*out*/) {
-    if (stream == nullptr) {
-        return;
-    }
-
-    stream->v3_2.streamType = hardware::camera::device::V3_2::StreamType::OUTPUT;
-    stream->v3_2.width = streamInfo.width;
-    stream->v3_2.height = streamInfo.height;
-    stream->v3_2.format = Camera3Device::mapToPixelFormat(streamInfo.format);
-    auto u = streamInfo.consumerUsage;
-    camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
-    stream->v3_2.usage = Camera3Device::mapToConsumerUsage(u);
-    stream->v3_2.dataSpace = Camera3Device::mapToHidlDataspace(streamInfo.dataSpace);
-    stream->v3_2.rotation = Camera3Device::mapToStreamRotation(rotation);
-    stream->v3_2.id = -1; // Invalid stream id
-    stream->physicalCameraId = std::string(physicalId.string());
-    stream->bufferSize = 0;
-}
-
-binder::Status
-CameraDeviceClient::convertToHALStreamCombination(const SessionConfiguration& sessionConfiguration,
-        const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
-        metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
-        hardware::camera::device::V3_4::StreamConfiguration &streamConfiguration,
-        bool *unsupported) {
-    auto operatingMode = sessionConfiguration.getOperatingMode();
-    binder::Status res = checkOperatingMode(operatingMode, deviceInfo, logicalCameraId);
-    if (!res.isOk()) {
-        return res;
-    }
-
-    if (unsupported == nullptr) {
-        String8 msg("unsupported nullptr");
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-    }
-    *unsupported = false;
-    auto ret = Camera3Device::mapToStreamConfigurationMode(
-            static_cast<camera3_stream_configuration_mode_t> (operatingMode),
-            /*out*/ &streamConfiguration.operationMode);
-    if (ret != OK) {
-        String8 msg = String8::format(
-            "Camera %s: Failed mapping operating mode %d requested: %s (%d)",
-            logicalCameraId.string(), operatingMode, strerror(-ret), ret);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                msg.string());
-    }
-
-    bool isInputValid = (sessionConfiguration.getInputWidth() > 0) &&
-            (sessionConfiguration.getInputHeight() > 0) &&
-            (sessionConfiguration.getInputFormat() > 0);
-    auto outputConfigs = sessionConfiguration.getOutputConfigurations();
-    size_t streamCount = outputConfigs.size();
-    streamCount = isInputValid ? streamCount + 1 : streamCount;
-    streamConfiguration.streams.resize(streamCount);
-    size_t streamIdx = 0;
-    if (isInputValid) {
-        streamConfiguration.streams[streamIdx++] = {{/*streamId*/0,
-                hardware::camera::device::V3_2::StreamType::INPUT,
-                static_cast<uint32_t> (sessionConfiguration.getInputWidth()),
-                static_cast<uint32_t> (sessionConfiguration.getInputHeight()),
-                Camera3Device::mapToPixelFormat(sessionConfiguration.getInputFormat()),
-                /*usage*/ 0, HAL_DATASPACE_UNKNOWN,
-                hardware::camera::device::V3_2::StreamRotation::ROTATION_0},
-                /*physicalId*/ nullptr, /*bufferSize*/0};
-    }
-
-    for (const auto &it : outputConfigs) {
-        const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
-            it.getGraphicBufferProducers();
-        bool deferredConsumer = it.isDeferred();
-        String8 physicalCameraId = String8(it.getPhysicalCameraId());
-        size_t numBufferProducers = bufferProducers.size();
-        bool isStreamInfoValid = false;
-        OutputStreamInfo streamInfo;
-
-        res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType());
-        if (!res.isOk()) {
-            return res;
-        }
-        res = checkPhysicalCameraId(physicalCameraIds, physicalCameraId,
-                logicalCameraId);
-        if (!res.isOk()) {
-            return res;
-        }
-
-        if (deferredConsumer) {
-            streamInfo.width = it.getWidth();
-            streamInfo.height = it.getHeight();
-            streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-            streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
-            auto surfaceType = it.getSurfaceType();
-            streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
-            if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
-                streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
-            }
-            mapStreamInfo(streamInfo, CAMERA3_STREAM_ROTATION_0, physicalCameraId,
-                    &streamConfiguration.streams[streamIdx++]);
-            isStreamInfoValid = true;
-
-            if (numBufferProducers == 0) {
-                continue;
-            }
-        }
-
-        for (auto& bufferProducer : bufferProducers) {
-            sp<Surface> surface;
-            const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId);
-            res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
-                    logicalCameraId,
-                    physicalCameraId.size() > 0 ? physicalDeviceInfo : deviceInfo );
-
-            if (!res.isOk())
-                return res;
-
-            if (!isStreamInfoValid) {
-                bool isDepthCompositeStream =
-                        camera3::DepthCompositeStream::isDepthCompositeStream(surface);
-                bool isHeicCompositeStream =
-                        camera3::HeicCompositeStream::isHeicCompositeStream(surface);
-                if (isDepthCompositeStream || isHeicCompositeStream) {
-                    // We need to take in to account that composite streams can have
-                    // additional internal camera streams.
-                    std::vector<OutputStreamInfo> compositeStreams;
-                    if (isDepthCompositeStream) {
-                        ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
-                                deviceInfo, &compositeStreams);
-                    } else {
-                        ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
-                            deviceInfo, &compositeStreams);
-                    }
-                    if (ret != OK) {
-                        String8 msg = String8::format(
-                                "Camera %s: Failed adding composite streams: %s (%d)",
-                                logicalCameraId.string(), strerror(-ret), ret);
-                        ALOGE("%s: %s", __FUNCTION__, msg.string());
-                        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-                    }
-
-                    if (compositeStreams.size() == 0) {
-                        // No internal streams means composite stream not
-                        // supported.
-                        *unsupported = true;
-                        return binder::Status::ok();
-                    } else if (compositeStreams.size() > 1) {
-                        streamCount += compositeStreams.size() - 1;
-                        streamConfiguration.streams.resize(streamCount);
-                    }
-
-                    for (const auto& compositeStream : compositeStreams) {
-                        mapStreamInfo(compositeStream,
-                                static_cast<camera3_stream_rotation_t> (it.getRotation()),
-                                physicalCameraId, &streamConfiguration.streams[streamIdx++]);
-                    }
-                } else {
-                    mapStreamInfo(streamInfo,
-                            static_cast<camera3_stream_rotation_t> (it.getRotation()),
-                            physicalCameraId, &streamConfiguration.streams[streamIdx++]);
-                }
-                isStreamInfoValid = true;
-            }
-        }
-    }
-    return binder::Status::ok();
-}
-
 binder::Status CameraDeviceClient::isSessionConfigurationSupported(
         const SessionConfiguration& sessionConfiguration, bool *status /*out*/) {
-    ATRACE_CALL();
 
+    ATRACE_CALL();
     binder::Status res;
     status_t ret = OK;
     if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
@@ -806,7 +619,8 @@
     }
 
     auto operatingMode = sessionConfiguration.getOperatingMode();
-    res = checkOperatingMode(operatingMode, mDevice->info(), mCameraIdStr);
+    res = SessionConfigurationUtils::checkOperatingMode(operatingMode, mDevice->info(),
+            mCameraIdStr);
     if (!res.isOk()) {
         return res;
     }
@@ -816,13 +630,16 @@
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
-    hardware::camera::device::V3_4::StreamConfiguration streamConfiguration;
+
+    hardware::camera::device::V3_7::StreamConfiguration streamConfiguration;
     bool earlyExit = false;
-    metadataGetter getMetadata = [this](const String8 &id) {return mDevice->infoPhysical(id);};
+    camera3::metadataGetter getMetadata = [this](const String8 &id, bool /*overrideForPerfClass*/) {
+          return mDevice->infoPhysical(id);};
     std::vector<std::string> physicalCameraIds;
     mProviderManager->isLogicalCamera(mCameraIdStr.string(), &physicalCameraIds);
-    res = convertToHALStreamCombination(sessionConfiguration, mCameraIdStr,
-            mDevice->info(), getMetadata, physicalCameraIds, streamConfiguration, &earlyExit);
+    res = SessionConfigurationUtils::convertToHALStreamCombination(sessionConfiguration,
+            mCameraIdStr, mDevice->info(), getMetadata, physicalCameraIds, streamConfiguration,
+            mOverrideForPerfClass, &earlyExit);
     if (!res.isOk()) {
         return res;
     }
@@ -945,6 +762,13 @@
                 }
                 mCompositeStreamMap.removeItemsAt(compositeIndex);
             }
+            for (auto &mapIt: mHighResolutionCameraIdToStreamIdSet) {
+                auto &streamSet = mapIt.second;
+                if (streamSet.find(streamId) != streamSet.end()) {
+                    streamSet.erase(streamId);
+                    break;
+                }
+            }
         }
     }
 
@@ -969,8 +793,9 @@
     bool isShared = outputConfiguration.isShared();
     String8 physicalCameraId = String8(outputConfiguration.getPhysicalCameraId());
     bool deferredConsumerOnly = deferredConsumer && numBufferProducers == 0;
+    bool isMultiResolution = outputConfiguration.isMultiResolution();
 
-    res = checkSurfaceType(numBufferProducers, deferredConsumer,
+    res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
             outputConfiguration.getSurfaceType());
     if (!res.isOk()) {
         return res;
@@ -979,9 +804,8 @@
     if (!mDevice.get()) {
         return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
     }
-    std::vector<std::string> physicalCameraIds;
-    mProviderManager->isLogicalCamera(mCameraIdStr.string(), &physicalCameraIds);
-    res = checkPhysicalCameraId(physicalCameraIds, physicalCameraId, mCameraIdStr);
+    res = SessionConfigurationUtils::checkPhysicalCameraId(mPhysicalCameraIds,
+            physicalCameraId, mCameraIdStr);
     if (!res.isOk()) {
         return res;
     }
@@ -997,6 +821,8 @@
 
     OutputStreamInfo streamInfo;
     bool isStreamInfoValid = false;
+    const std::vector<int32_t> &sensorPixelModesUsed =
+            outputConfiguration.getSensorPixelModesUsed();
     for (auto& bufferProducer : bufferProducers) {
         // Don't create multiple streams for the same target surface
         sp<IBinder> binder = IInterface::asBinder(bufferProducer);
@@ -1009,8 +835,9 @@
         }
 
         sp<Surface> surface;
-        res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
-                mCameraIdStr, mDevice->infoPhysical(physicalCameraId));
+        res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
+                isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
+                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed);
 
         if (!res.isOk())
             return res;
@@ -1023,9 +850,14 @@
         surfaces.push_back(surface);
     }
 
+    // If mOverrideForPerfClass is true, do not fail createStream() for small
+    // JPEG sizes because existing createSurfaceFromGbp() logic will find the
+    // closest possible supported size.
+
     int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
     std::vector<int> surfaceIds;
-    bool isDepthCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
+    bool isDepthCompositeStream =
+            camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
     bool isHeicCompisiteStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
     if (isDepthCompositeStream || isHeicCompisiteStream) {
         sp<CompositeStream> compositeStream;
@@ -1037,9 +869,9 @@
 
         err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width,
                 streamInfo.height, streamInfo.format,
-                static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
-                &streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(),
-                isShared);
+                static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
+                &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
+                outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution);
         if (err == OK) {
             mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
                     compositeStream);
@@ -1047,9 +879,9 @@
     } else {
         err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width,
                 streamInfo.height, streamInfo.format, streamInfo.dataSpace,
-                static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
-                &streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(),
-                isShared);
+                static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
+                &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
+                outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution);
     }
 
     if (err != OK) {
@@ -1077,6 +909,16 @@
         // Set transform flags to ensure preview to be rotated correctly.
         res = setStreamTransformLocked(streamId);
 
+        // Fill in mHighResolutionCameraIdToStreamIdSet map
+        const String8 &cameraIdUsed =
+                physicalCameraId.size() != 0 ? physicalCameraId : mCameraIdStr;
+        const char *cameraIdUsedCStr = cameraIdUsed.string();
+        // Only needed for high resolution sensors
+        if (mHighResolutionSensors.find(cameraIdUsedCStr) !=
+                mHighResolutionSensors.end()) {
+            mHighResolutionCameraIdToStreamIdSet[cameraIdUsedCStr].insert(streamId);
+        }
+
         *newStreamId = streamId;
     }
 
@@ -1113,12 +955,27 @@
     std::vector<sp<Surface>> noSurface;
     std::vector<int> surfaceIds;
     String8 physicalCameraId(outputConfiguration.getPhysicalCameraId());
+    const String8 &cameraIdUsed =
+            physicalCameraId.size() != 0 ? physicalCameraId : mCameraIdStr;
+    // Here, we override sensor pixel modes
+    std::unordered_set<int32_t> overriddenSensorPixelModesUsed;
+    const std::vector<int32_t> &sensorPixelModesUsed =
+            outputConfiguration.getSensorPixelModesUsed();
+    if (SessionConfigurationUtils::checkAndOverrideSensorPixelModesUsed(
+            sensorPixelModesUsed, format, width, height, getStaticInfo(cameraIdUsed),
+            /*allowRounding*/ false, &overriddenSensorPixelModesUsed) != OK) {
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "sensor pixel modes used not valid for deferred stream");
+    }
+
     err = mDevice->createStream(noSurface, /*hasDeferredConsumer*/true, width,
             height, format, dataSpace,
-            static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
-            &streamId, physicalCameraId, &surfaceIds,
+            static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
+            &streamId, physicalCameraId,
+            overriddenSensorPixelModesUsed,
+            &surfaceIds,
             outputConfiguration.getSurfaceSetID(), isShared,
-            consumerUsage);
+            outputConfiguration.isMultiResolution(), consumerUsage);
 
     if (err != OK) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
@@ -1129,9 +986,9 @@
         // a separate list to track. Once the deferred surface is set, this id will be
         // relocated to mStreamMap.
         mDeferredStreams.push_back(streamId);
-
         mStreamInfoMap.emplace(std::piecewise_construct, std::forward_as_tuple(streamId),
-                std::forward_as_tuple(width, height, format, dataSpace, consumerUsage));
+                std::forward_as_tuple(width, height, format, dataSpace, consumerUsage,
+                        overriddenSensorPixelModesUsed));
 
         ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
                 " (%d x %d) stream with format 0x%x.",
@@ -1141,6 +998,13 @@
         res = setStreamTransformLocked(streamId);
 
         *newStreamId = streamId;
+        // Fill in mHighResolutionCameraIdToStreamIdSet
+        const char *cameraIdUsedCStr = cameraIdUsed.string();
+        // Only needed for high resolution sensors
+        if (mHighResolutionSensors.find(cameraIdUsedCStr) !=
+                mHighResolutionSensors.end()) {
+            mHighResolutionCameraIdToStreamIdSet[cameraIdUsed.string()].insert(streamId);
+        }
     }
     return res;
 }
@@ -1173,12 +1037,13 @@
 }
 
 binder::Status CameraDeviceClient::createInputStream(
-        int width, int height, int format,
+        int width, int height, int format, bool isMultiResolution,
         /*out*/
         int32_t* newStreamId) {
 
     ATRACE_CALL();
-    ALOGV("%s (w = %d, h = %d, f = 0x%x)", __FUNCTION__, width, height, format);
+    ALOGV("%s (w = %d, h = %d, f = 0x%x, isMultiResolution %d)", __FUNCTION__,
+            width, height, format, isMultiResolution);
 
     binder::Status res;
     if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
@@ -1197,7 +1062,7 @@
     }
 
     int streamId = -1;
-    status_t err = mDevice->createInputStream(width, height, format, &streamId);
+    status_t err = mDevice->createInputStream(width, height, format, isMultiResolution, &streamId);
     if (err == OK) {
         mInputStream.configured = true;
         mInputStream.width = width;
@@ -1309,12 +1174,15 @@
             newOutputsMap.removeItemsAt(idx);
         }
     }
+    const std::vector<int32_t> &sensorPixelModesUsed =
+            outputConfiguration.getSensorPixelModesUsed();
 
     for (size_t i = 0; i < newOutputsMap.size(); i++) {
         OutputStreamInfo outInfo;
         sp<Surface> surface;
-        res = createSurfaceFromGbp(outInfo, /*isStreamInfoValid*/ false, surface,
-                newOutputsMap.valueAt(i), mCameraIdStr, mDevice->infoPhysical(physicalCameraId));
+        res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
+                /*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
+                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed);
         if (!res.isOk())
             return res;
 
@@ -1364,226 +1232,6 @@
     return res;
 }
 
-bool CameraDeviceClient::isPublicFormat(int32_t format)
-{
-    switch(format) {
-        case HAL_PIXEL_FORMAT_RGBA_8888:
-        case HAL_PIXEL_FORMAT_RGBX_8888:
-        case HAL_PIXEL_FORMAT_RGB_888:
-        case HAL_PIXEL_FORMAT_RGB_565:
-        case HAL_PIXEL_FORMAT_BGRA_8888:
-        case HAL_PIXEL_FORMAT_YV12:
-        case HAL_PIXEL_FORMAT_Y8:
-        case HAL_PIXEL_FORMAT_Y16:
-        case HAL_PIXEL_FORMAT_RAW16:
-        case HAL_PIXEL_FORMAT_RAW10:
-        case HAL_PIXEL_FORMAT_RAW12:
-        case HAL_PIXEL_FORMAT_RAW_OPAQUE:
-        case HAL_PIXEL_FORMAT_BLOB:
-        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
-        case HAL_PIXEL_FORMAT_YCbCr_420_888:
-        case HAL_PIXEL_FORMAT_YCbCr_422_SP:
-        case HAL_PIXEL_FORMAT_YCrCb_420_SP:
-        case HAL_PIXEL_FORMAT_YCbCr_422_I:
-            return true;
-        default:
-            return false;
-    }
-}
-
-binder::Status CameraDeviceClient::createSurfaceFromGbp(
-        OutputStreamInfo& streamInfo, bool isStreamInfoValid,
-        sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
-        const String8 &cameraId, const CameraMetadata &physicalCameraMetadata) {
-
-    // bufferProducer must be non-null
-    if (gbp == nullptr) {
-        String8 msg = String8::format("Camera %s: Surface is NULL", cameraId.string());
-        ALOGW("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-    }
-    // HACK b/10949105
-    // Query consumer usage bits to set async operation mode for
-    // GLConsumer using controlledByApp parameter.
-    bool useAsync = false;
-    uint64_t consumerUsage = 0;
-    status_t err;
-    if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) {
-        String8 msg = String8::format("Camera %s: Failed to query Surface consumer usage: %s (%d)",
-                cameraId.string(), strerror(-err), err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
-    }
-    if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
-        ALOGW("%s: Camera %s with consumer usage flag: %" PRIu64 ": Forcing asynchronous mode for stream",
-                __FUNCTION__, cameraId.string(), consumerUsage);
-        useAsync = true;
-    }
-
-    uint64_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
-                              GRALLOC_USAGE_RENDERSCRIPT;
-    uint64_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
-                           GraphicBuffer::USAGE_HW_TEXTURE |
-                           GraphicBuffer::USAGE_HW_COMPOSER;
-    bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
-            (consumerUsage & allowedFlags) != 0;
-
-    surface = new Surface(gbp, useAsync);
-    ANativeWindow *anw = surface.get();
-
-    int width, height, format;
-    android_dataspace dataSpace;
-    if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
-        String8 msg = String8::format("Camera %s: Failed to query Surface width: %s (%d)",
-                 cameraId.string(), strerror(-err), err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
-    }
-    if ((err = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
-        String8 msg = String8::format("Camera %s: Failed to query Surface height: %s (%d)",
-                cameraId.string(), strerror(-err), err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
-    }
-    if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
-        String8 msg = String8::format("Camera %s: Failed to query Surface format: %s (%d)",
-                cameraId.string(), strerror(-err), err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
-    }
-    if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
-            reinterpret_cast<int*>(&dataSpace))) != OK) {
-        String8 msg = String8::format("Camera %s: Failed to query Surface dataspace: %s (%d)",
-                cameraId.string(), strerror(-err), err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
-    }
-
-    // FIXME: remove this override since the default format should be
-    //       IMPLEMENTATION_DEFINED. b/9487482 & b/35317944
-    if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) &&
-            ((consumerUsage & GRALLOC_USAGE_HW_MASK) &&
-             ((consumerUsage & GRALLOC_USAGE_SW_READ_MASK) == 0))) {
-        ALOGW("%s: Camera %s: Overriding format %#x to IMPLEMENTATION_DEFINED",
-                __FUNCTION__, cameraId.string(), format);
-        format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-    }
-    // Round dimensions to the nearest dimensions available for this format
-    if (flexibleConsumer && isPublicFormat(format) &&
-            !CameraDeviceClient::roundBufferDimensionNearest(width, height,
-            format, dataSpace, physicalCameraMetadata, /*out*/&width, /*out*/&height)) {
-        String8 msg = String8::format("Camera %s: No supported stream configurations with "
-                "format %#x defined, failed to create output stream",
-                cameraId.string(), format);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-    }
-
-    if (!isStreamInfoValid) {
-        streamInfo.width = width;
-        streamInfo.height = height;
-        streamInfo.format = format;
-        streamInfo.dataSpace = dataSpace;
-        streamInfo.consumerUsage = consumerUsage;
-        return binder::Status::ok();
-    }
-    if (width != streamInfo.width) {
-        String8 msg = String8::format("Camera %s:Surface width doesn't match: %d vs %d",
-                cameraId.string(), width, streamInfo.width);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-    }
-    if (height != streamInfo.height) {
-        String8 msg = String8::format("Camera %s:Surface height doesn't match: %d vs %d",
-                 cameraId.string(), height, streamInfo.height);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-    }
-    if (format != streamInfo.format) {
-        String8 msg = String8::format("Camera %s:Surface format doesn't match: %d vs %d",
-                 cameraId.string(), format, streamInfo.format);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-    }
-    if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
-        if (dataSpace != streamInfo.dataSpace) {
-            String8 msg = String8::format("Camera %s:Surface dataSpace doesn't match: %d vs %d",
-                    cameraId.string(), dataSpace, streamInfo.dataSpace);
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-        }
-        //At the native side, there isn't a way to check whether 2 surfaces come from the same
-        //surface class type. Use usage flag to approximate the comparison.
-        if (consumerUsage != streamInfo.consumerUsage) {
-            String8 msg = String8::format(
-                    "Camera %s:Surface usage flag doesn't match %" PRIu64 " vs %" PRIu64 "",
-                    cameraId.string(), consumerUsage, streamInfo.consumerUsage);
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-        }
-    }
-    return binder::Status::ok();
-}
-
-bool CameraDeviceClient::roundBufferDimensionNearest(int32_t width, int32_t height,
-        int32_t format, android_dataspace dataSpace, const CameraMetadata& info,
-        /*out*/int32_t* outWidth, /*out*/int32_t* outHeight) {
-
-    camera_metadata_ro_entry streamConfigs =
-            (dataSpace == HAL_DATASPACE_DEPTH) ?
-            info.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS) :
-            (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
-            info.find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS) :
-            info.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
-
-    int32_t bestWidth = -1;
-    int32_t bestHeight = -1;
-
-    // Iterate through listed stream configurations and find the one with the smallest euclidean
-    // distance from the given dimensions for the given format.
-    for (size_t i = 0; i < streamConfigs.count; i += 4) {
-        int32_t fmt = streamConfigs.data.i32[i];
-        int32_t w = streamConfigs.data.i32[i + 1];
-        int32_t h = streamConfigs.data.i32[i + 2];
-
-        // Ignore input/output type for now
-        if (fmt == format) {
-            if (w == width && h == height) {
-                bestWidth = width;
-                bestHeight = height;
-                break;
-            } else if (w <= ROUNDING_WIDTH_CAP && (bestWidth == -1 ||
-                    CameraDeviceClient::euclidDistSquare(w, h, width, height) <
-                    CameraDeviceClient::euclidDistSquare(bestWidth, bestHeight, width, height))) {
-                bestWidth = w;
-                bestHeight = h;
-            }
-        }
-    }
-
-    if (bestWidth == -1) {
-        // Return false if no configurations for this format were listed
-        return false;
-    }
-
-    // Set the outputs to the closet width/height
-    if (outWidth != NULL) {
-        *outWidth = bestWidth;
-    }
-    if (outHeight != NULL) {
-        *outHeight = bestHeight;
-    }
-
-    // Return true if at least one configuration for this format was listed
-    return true;
-}
-
-int64_t CameraDeviceClient::euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) {
-    int64_t d0 = x0 - x1;
-    int64_t d1 = y0 - y1;
-    return d0 * d0 + d1 * d1;
-}
-
 // Create a request object from a template.
 binder::Status CameraDeviceClient::createDefaultRequest(int templateId,
         /*out*/
@@ -1601,9 +1249,12 @@
         return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
     }
 
-    CameraMetadata metadata;
     status_t err;
-    if ( (err = mDevice->createDefaultRequest(templateId, &metadata) ) == OK &&
+    camera_request_template_t tempId = camera_request_template_t::CAMERA_TEMPLATE_COUNT;
+    if (!(res = mapRequestTemplate(templateId, &tempId)).isOk()) return res;
+
+    CameraMetadata metadata;
+    if ( (err = mDevice->createDefaultRequest(tempId, &metadata) ) == OK &&
         request != NULL) {
 
         request->swap(metadata);
@@ -1886,6 +1537,8 @@
     }
 
     std::vector<sp<Surface>> consumerSurfaces;
+    const std::vector<int32_t> &sensorPixelModesUsed =
+            outputConfiguration.getSensorPixelModesUsed();
     for (auto& bufferProducer : bufferProducers) {
         // Don't create multiple streams for the same target surface
         ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
@@ -1896,8 +1549,9 @@
         }
 
         sp<Surface> surface;
-        res = createSurfaceFromGbp(mStreamInfoMap[streamId], true /*isStreamInfoValid*/,
-                surface, bufferProducer, mCameraIdStr, mDevice->infoPhysical(physicalId));
+        res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
+                true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
+                mDevice->infoPhysical(physicalId), sensorPixelModesUsed);
 
         if (!res.isOk())
             return res;
@@ -1977,6 +1631,14 @@
         static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop));
 }
 
+bool CameraDeviceClient::supportsCameraMute() {
+    return mDevice->supportsCameraMute();
+}
+
+status_t CameraDeviceClient::setCameraMute(bool enabled) {
+    return mDevice->setCameraMute(enabled);
+}
+
 binder::Status CameraDeviceClient::switchToOffline(
         const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
         const std::vector<int>& offlineOutputIds,
@@ -2062,7 +1724,8 @@
     if (offlineSession.get() != nullptr) {
         offlineClient = new CameraOfflineSessionClient(sCameraService,
                 offlineSession, offlineCompositeStreamMap, cameraCb, mClientPackageName,
-                mClientFeatureId, mCameraIdStr, mCameraFacing, mClientPid, mClientUid, mServicePid);
+                mClientFeatureId, mCameraIdStr, mCameraFacing, mOrientation, mClientPid, mClientUid,
+                mServicePid);
         ret = sCameraService->addOfflineClient(mCameraIdStr, offlineClient);
     }
 
@@ -2162,14 +1825,16 @@
     mStreamingRequestId = REQUEST_ID_NONE;
 }
 
-void CameraDeviceClient::notifyIdle() {
+void CameraDeviceClient::notifyIdle(
+        int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+        const std::vector<hardware::CameraStreamStats>& streamStats) {
     // Thread safe. Don't bother locking.
     sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
 
     if (remoteCb != 0) {
         remoteCb->onDeviceIdle();
     }
-    Camera2ClientBase::notifyIdle();
+    Camera2ClientBase::notifyIdle(requestCount, resultErrorCount, deviceError, streamStats);
 }
 
 void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
@@ -2205,6 +1870,7 @@
 void CameraDeviceClient::detachDevice() {
     if (mDevice == 0) return;
 
+    nsecs_t startTime = systemTime();
     ALOGV("Camera %s: Stopping processors", mCameraIdStr.string());
 
     mFrameProcessor->removeListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
@@ -2239,6 +1905,9 @@
     mCompositeStreamMap.clear();
 
     Camera2ClientBase::detachDevice();
+
+    int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
+    CameraServiceProxyWrapper::logClose(mCameraIdStr, closeLatencyMs);
 }
 
 /** Device-related methods */
@@ -2328,4 +1997,91 @@
     return CameraUtils::getRotationTransform(staticInfo, transform);
 }
 
+binder::Status CameraDeviceClient::mapRequestTemplate(int templateId,
+        camera_request_template_t* tempId /*out*/) {
+    binder::Status ret = binder::Status::ok();
+
+    if (tempId == nullptr) {
+        ret = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "Camera %s: Invalid template argument", mCameraIdStr.string());
+        return ret;
+    }
+    switch(templateId) {
+        case ICameraDeviceUser::TEMPLATE_PREVIEW:
+            *tempId = camera_request_template_t::CAMERA_TEMPLATE_PREVIEW;
+            break;
+        case ICameraDeviceUser::TEMPLATE_RECORD:
+            *tempId = camera_request_template_t::CAMERA_TEMPLATE_VIDEO_RECORD;
+            break;
+        case ICameraDeviceUser::TEMPLATE_STILL_CAPTURE:
+            *tempId = camera_request_template_t::CAMERA_TEMPLATE_STILL_CAPTURE;
+            break;
+        case ICameraDeviceUser::TEMPLATE_VIDEO_SNAPSHOT:
+            *tempId = camera_request_template_t::CAMERA_TEMPLATE_VIDEO_SNAPSHOT;
+            break;
+        case ICameraDeviceUser::TEMPLATE_ZERO_SHUTTER_LAG:
+            *tempId = camera_request_template_t::CAMERA_TEMPLATE_ZERO_SHUTTER_LAG;
+            break;
+        case ICameraDeviceUser::TEMPLATE_MANUAL:
+            *tempId = camera_request_template_t::CAMERA_TEMPLATE_MANUAL;
+            break;
+        default:
+            ret = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                    "Camera %s: Template ID %d is invalid or not supported",
+                    mCameraIdStr.string(), templateId);
+            return ret;
+    }
+
+    return ret;
+}
+
+const CameraMetadata &CameraDeviceClient::getStaticInfo(const String8 &cameraId) {
+    if (mDevice->getId() == cameraId) {
+        return mDevice->info();
+    }
+    return mDevice->infoPhysical(cameraId);
+}
+
+bool CameraDeviceClient::isUltraHighResolutionSensor(const String8 &cameraId) {
+    const CameraMetadata &deviceInfo = getStaticInfo(cameraId);
+    return SessionConfigurationUtils::isUltraHighResolutionSensor(deviceInfo);
+}
+
+bool CameraDeviceClient::isSensorPixelModeConsistent(
+        const std::list<int> &streamIdList, const CameraMetadata &settings) {
+    // First we get the sensorPixelMode from the settings metadata.
+    int32_t sensorPixelMode = ANDROID_SENSOR_PIXEL_MODE_DEFAULT;
+    camera_metadata_ro_entry sensorPixelModeEntry = settings.find(ANDROID_SENSOR_PIXEL_MODE);
+    if (sensorPixelModeEntry.count != 0) {
+        sensorPixelMode = sensorPixelModeEntry.data.u8[0];
+        if (sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_DEFAULT &&
+            sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) {
+            ALOGE("%s: Request sensor pixel mode not is not one of the valid values %d",
+                      __FUNCTION__, sensorPixelMode);
+            return false;
+        }
+    }
+    // Check whether each stream has max resolution allowed.
+    bool consistent = true;
+    for (auto it : streamIdList) {
+        auto const streamInfoIt = mStreamInfoMap.find(it);
+        if (streamInfoIt == mStreamInfoMap.end()) {
+            ALOGE("%s: stream id %d not created, skipping", __FUNCTION__, it);
+            return false;
+        }
+        consistent =
+                streamInfoIt->second.sensorPixelModesUsed.find(sensorPixelMode) !=
+                        streamInfoIt->second.sensorPixelModesUsed.end();
+        if (!consistent) {
+            ALOGE("sensorPixelMode used %i not consistent with configured modes", sensorPixelMode);
+            for (auto m : streamInfoIt->second.sensorPixelModesUsed) {
+                ALOGE("sensor pixel mode used list: %i", m);
+            }
+            break;
+        }
+    }
+
+    return consistent;
+}
+
 } // namespace android
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 9d3874f..76b3f53 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -28,14 +28,13 @@
 #include "common/FrameProcessorBase.h"
 #include "common/Camera2ClientBase.h"
 #include "CompositeStream.h"
+#include "utils/SessionConfigurationUtils.h"
 
 using android::camera3::OutputStreamInfo;
 using android::camera3::CompositeStream;
 
 namespace android {
 
-typedef std::function<CameraMetadata (const String8 &)> metadataGetter;
-
 struct CameraDeviceClientBase :
          public CameraService::BasicClient,
          public hardware::camera2::BnCameraDeviceUser
@@ -50,10 +49,11 @@
     CameraDeviceClientBase(const sp<CameraService>& cameraService,
             const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
             const String16& clientPackageName,
-            const std::unique_ptr<String16>& clientFeatureId,
+            const std::optional<String16>& clientFeatureId,
             const String8& cameraId,
             int api1CameraId,
             int cameraFacing,
+            int sensorOrientation,
             int clientPid,
             uid_t clientUid,
             int servicePid);
@@ -95,6 +95,7 @@
 
     virtual binder::Status endConfigure(int operatingMode,
             const hardware::camera2::impl::CameraMetadataNative& sessionParams,
+            int64_t startTimeMs,
             /*out*/
             std::vector<int>* offlineStreamIds) override;
 
@@ -114,6 +115,7 @@
 
     // Create an input stream of width, height, and format.
     virtual binder::Status createInputStream(int width, int height, int format,
+            bool isMultiResolution,
             /*out*/
             int32_t* newStreamId = NULL) override;
 
@@ -175,12 +177,14 @@
     CameraDeviceClient(const sp<CameraService>& cameraService,
             const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
             const String16& clientPackageName,
-            const std::unique_ptr<String16>& clientFeatureId,
+            const std::optional<String16>& clientFeatureId,
             const String8& cameraId,
             int cameraFacing,
+            int sensorOrientation,
             int clientPid,
             uid_t clientUid,
-            int servicePid);
+            int servicePid,
+            bool overrideForPerfClass);
     virtual ~CameraDeviceClient();
 
     virtual status_t      initialize(sp<CameraProviderManager> manager,
@@ -188,6 +192,9 @@
 
     virtual status_t      setRotateAndCropOverride(uint8_t rotateAndCrop) override;
 
+    virtual bool          supportsCameraMute();
+    virtual status_t      setCameraMute(bool enabled);
+
     virtual status_t      dump(int fd, const Vector<String16>& args);
 
     virtual status_t      dumpClient(int fd, const Vector<String16>& args);
@@ -196,7 +203,8 @@
      * Device listener interface
      */
 
-    virtual void notifyIdle();
+    virtual void notifyIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+                            const std::vector<hardware::CameraStreamStats>& streamStats);
     virtual void notifyError(int32_t errorCode,
                              const CaptureResultExtras& resultExtras);
     virtual void notifyShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp);
@@ -204,16 +212,7 @@
     virtual void notifyRequestQueueEmpty();
     virtual void notifyRepeatingRequestError(long lastFrameNumber);
 
-    // utility function to convert AIDL SessionConfiguration to HIDL
-    // streamConfiguration. Also checks for validity of SessionConfiguration and
-    // returns a non-ok binder::Status if the passed in session configuration
-    // isn't valid.
-    static binder::Status
-    convertToHALStreamCombination(const SessionConfiguration& sessionConfiguration,
-            const String8 &cameraId, const CameraMetadata &deviceInfo,
-            metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
-            hardware::camera::device::V3_4::StreamConfiguration &streamConfiguration,
-            bool *earlyExit);
+    void setImageDumpMask(int mask) { if (mDevice != nullptr) mDevice->setImageDumpMask(mask); }
     /**
      * Interface used by independent components of CameraDeviceClient.
      */
@@ -225,6 +224,13 @@
     // Calculate the ANativeWindow transform from android.sensor.orientation
     status_t              getRotationTransformLocked(/*out*/int32_t* transform);
 
+    bool isUltraHighResolutionSensor(const String8 &cameraId);
+
+    bool isSensorPixelModeConsistent(const std::list<int> &streamIdList,
+            const CameraMetadata &settings);
+
+    const CameraMetadata &getStaticInfo(const String8 &cameraId);
+
 private:
     // StreamSurfaceId encapsulates streamId + surfaceId for a particular surface.
     // streamId specifies the index of the stream the surface belongs to, and the
@@ -266,18 +272,8 @@
 
     /** Utility members */
     binder::Status checkPidStatus(const char* checkLocation);
-    static binder::Status checkOperatingMode(int operatingMode, const CameraMetadata &staticInfo,
-            const String8 &cameraId);
-    static binder::Status checkSurfaceType(size_t numBufferProducers, bool deferredConsumer,
-            int surfaceType);
-    static void mapStreamInfo(const OutputStreamInfo &streamInfo,
-            camera3_stream_rotation_t rotation, String8 physicalId,
-            hardware::camera::device::V3_4::Stream *stream /*out*/);
     bool enforceRequestPermissions(CameraMetadata& metadata);
 
-    // Find the square of the euclidean distance between two points
-    static int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
-
     // Create an output stream with surface deferred for future.
     binder::Status createDeferredSurfaceStreamLocked(
             const hardware::camera2::params::OutputConfiguration &outputConfiguration,
@@ -288,32 +284,14 @@
     // cases.
     binder::Status setStreamTransformLocked(int streamId);
 
-    // Find the closest dimensions for a given format in available stream configurations with
-    // a width <= ROUNDING_WIDTH_CAP
-    static const int32_t ROUNDING_WIDTH_CAP = 1920;
-    static bool roundBufferDimensionNearest(int32_t width, int32_t height, int32_t format,
-            android_dataspace dataSpace, const CameraMetadata& info,
-            /*out*/int32_t* outWidth, /*out*/int32_t* outHeight);
-
-    //check if format is not custom format
-    static bool isPublicFormat(int32_t format);
-
-    // Create a Surface from an IGraphicBufferProducer. Returns error if
-    // IGraphicBufferProducer's property doesn't match with streamInfo
-    static binder::Status createSurfaceFromGbp(OutputStreamInfo& streamInfo, bool isStreamInfoValid,
-            sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp, const String8 &cameraId,
-            const CameraMetadata &physicalCameraMetadata);
-
-
     // Utility method to insert the surface into SurfaceMap
     binder::Status insertGbpLocked(const sp<IGraphicBufferProducer>& gbp,
             /*out*/SurfaceMap* surfaceMap, /*out*/Vector<int32_t>* streamIds,
             /*out*/int32_t*  currentStreamId);
 
-    // Check that the physicalCameraId passed in is spported by the camera
-    // device.
-    static binder::Status checkPhysicalCameraId(const std::vector<std::string> &physicalCameraIds,
-            const String8 &physicalCameraId, const String8 &logicalCameraId);
+    // Utility method that maps AIDL request templates.
+    binder::Status mapRequestTemplate(int templateId,
+            camera_request_template_t* tempId /*out*/);
 
     // IGraphicsBufferProducer binder -> Stream ID + Surface ID for output streams
     KeyedVector<sp<IBinder>, StreamSurfaceId> mStreamMap;
@@ -336,6 +314,8 @@
 
     int32_t mRequestIdCounter;
 
+    std::vector<std::string> mPhysicalCameraIds;
+
     // The list of output streams whose surfaces are deferred. We have to track them separately
     // as there are no surfaces available and can not be put into mStreamMap. Once the deferred
     // Surface is configured, the stream id will be moved to mStreamMap.
@@ -344,10 +324,18 @@
     // stream ID -> outputStreamInfo mapping
     std::unordered_map<int32_t, OutputStreamInfo> mStreamInfoMap;
 
+    // map high resolution camera id (logical / physical) -> list of stream ids configured
+    std::unordered_map<std::string, std::unordered_set<int>> mHighResolutionCameraIdToStreamIdSet;
+
+    // set of high resolution camera id (logical / physical)
+    std::unordered_set<std::string> mHighResolutionSensors;
+
     KeyedVector<sp<IBinder>, sp<CompositeStream>> mCompositeStreamMap;
 
-    static const int32_t MAX_SURFACES_PER_STREAM = 4;
     sp<CameraProviderManager> mProviderManager;
+
+    // Override the camera characteristics for performance class primary cameras.
+    bool mOverrideForPerfClass;
 };
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index 237c24b..ef15f2d 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -72,6 +72,16 @@
     return OK;
 }
 
+bool CameraOfflineSessionClient::supportsCameraMute() {
+    // Offline mode doesn't support muting
+    return false;
+}
+
+status_t CameraOfflineSessionClient::setCameraMute(bool) {
+    return INVALID_OPERATION;
+}
+
+
 status_t CameraOfflineSessionClient::dump(int fd, const Vector<String16>& args) {
     return BasicClient::dump(fd, args);
 }
@@ -265,10 +275,17 @@
     }
 }
 
-void CameraOfflineSessionClient::notifyIdle() {
+status_t CameraOfflineSessionClient::notifyActive() {
+    return startCameraStreamingOps();
+}
+
+void CameraOfflineSessionClient::notifyIdle(
+        int64_t /*requestCount*/, int64_t /*resultErrorCount*/, bool /*deviceError*/,
+        const std::vector<hardware::CameraStreamStats>& /*streamStats*/) {
     if (mRemoteCallback.get() != nullptr) {
         mRemoteCallback->onDeviceIdle();
     }
+    finishCameraStreamingOps();
 }
 
 void CameraOfflineSessionClient::notifyAutoFocus(uint8_t newState, int triggerId) {
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index b67fcb3..b219a4c 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -48,14 +48,14 @@
             const KeyedVector<sp<IBinder>, sp<CompositeStream>>& offlineCompositeStreamMap,
             const sp<ICameraDeviceCallbacks>& remoteCallback,
             const String16& clientPackageName,
-            const std::unique_ptr<String16>& clientFeatureId,
-            const String8& cameraIdStr, int cameraFacing,
+            const std::optional<String16>& clientFeatureId,
+            const String8& cameraIdStr, int cameraFacing, int sensorOrientation,
             int clientPid, uid_t clientUid, int servicePid) :
             CameraService::BasicClient(
                     cameraService,
                     IInterface::asBinder(remoteCallback),
                     clientPackageName, clientFeatureId,
-                    cameraIdStr, cameraFacing, clientPid, clientUid, servicePid),
+                    cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid),
             mRemoteCallback(remoteCallback), mOfflineSession(session),
             mCompositeStreamMap(offlineCompositeStreamMap) {}
 
@@ -76,6 +76,9 @@
 
     status_t setRotateAndCropOverride(uint8_t rotateAndCrop) override;
 
+    bool supportsCameraMute() override;
+    status_t setCameraMute(bool enabled) override;
+
     // permissions management
     status_t startCameraOps() override;
     status_t finishCameraOps() override;
@@ -86,7 +89,9 @@
     // NotificationListener API
     void notifyError(int32_t errorCode, const CaptureResultExtras& resultExtras) override;
     void notifyShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) override;
-    void notifyIdle() override;
+    status_t notifyActive() override;
+    void notifyIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+            const std::vector<hardware::CameraStreamStats>& streamStats) override;
     void notifyAutoFocus(uint8_t newState, int triggerId) override;
     void notifyAutoExposure(uint8_t newState, int triggerId) override;
     void notifyAutoWhitebalance(uint8_t newState, int triggerId) override;
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index a61dac7..4b840fc 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -46,8 +46,10 @@
 
 status_t CompositeStream::createStream(const std::vector<sp<Surface>>& consumers,
         bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
-        camera3_stream_rotation_t rotation, int * id, const String8& physicalCameraId,
-        std::vector<int> * surfaceIds, int streamSetId, bool isShared) {
+        camera_stream_rotation_t rotation, int * id, const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        std::vector<int> * surfaceIds,
+        int streamSetId, bool isShared, bool isMultiResolution) {
     if (hasDeferredConsumer) {
         ALOGE("%s: Deferred consumers not supported in case of composite streams!",
                 __FUNCTION__);
@@ -66,8 +68,14 @@
         return BAD_VALUE;
     }
 
-    return createInternalStreams(consumers, hasDeferredConsumer, width, height, format, rotation, id,
-            physicalCameraId, surfaceIds, streamSetId, isShared);
+    if (isMultiResolution) {
+        ALOGE("%s: Multi-resolution output not supported in case of composite streams!",
+                __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    return createInternalStreams(consumers, hasDeferredConsumer, width, height, format, rotation,
+            id, physicalCameraId, sensorPixelModesUsed, surfaceIds, streamSetId, isShared);
 }
 
 status_t CompositeStream::deleteStream() {
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index 5f62d47..600bd28 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -43,8 +43,10 @@
 
     status_t createStream(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
-            camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
-            std::vector<int> *surfaceIds, int streamSetId, bool isShared);
+            camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            std::vector<int> *surfaceIds,
+            int streamSetId, bool isShared, bool isMultiResolution);
 
     status_t deleteStream();
 
@@ -54,8 +56,10 @@
     // Create and register all internal camera streams.
     virtual status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
-            camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
-            std::vector<int> *surfaceIds, int streamSetId, bool isShared) = 0;
+            camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            std::vector<int> *surfaceIds,
+            int streamSetId, bool isShared) = 0;
 
     // Release all internal streams and corresponding resources.
     virtual status_t deleteInternalStreams() = 0;
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index c6859be..a66a592 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -20,6 +20,7 @@
 
 #include "api1/client2/JpegProcessor.h"
 #include "common/CameraProviderManager.h"
+#include "utils/SessionConfigurationUtils.h"
 #include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
@@ -41,17 +42,29 @@
         mDepthBufferAcquired(false),
         mBlobBufferAcquired(false),
         mProducerListener(new ProducerListener()),
-        mMaxJpegSize(-1),
+        mMaxJpegBufferSize(-1),
+        mUHRMaxJpegBufferSize(-1),
         mIsLogicalCamera(false) {
     if (device != nullptr) {
         CameraMetadata staticInfo = device->info();
         auto entry = staticInfo.find(ANDROID_JPEG_MAX_SIZE);
         if (entry.count > 0) {
-            mMaxJpegSize = entry.data.i32[0];
+            mMaxJpegBufferSize = entry.data.i32[0];
         } else {
             ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__);
         }
 
+        mUHRMaxJpegSize =
+                SessionConfigurationUtils::getMaxJpegResolution(staticInfo,
+                        /*ultraHighResolution*/true);
+        mDefaultMaxJpegSize =
+                SessionConfigurationUtils::getMaxJpegResolution(staticInfo,
+                        /*isUltraHighResolution*/false);
+
+        mUHRMaxJpegBufferSize =
+            SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize,
+                    mMaxJpegBufferSize);
+
         entry = staticInfo.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
         if (entry.count == 5) {
             mIntrinsicCalibration.reserve(5);
@@ -78,7 +91,10 @@
             }
         }
 
-        getSupportedDepthSizes(staticInfo, &mSupportedDepthSizes);
+        getSupportedDepthSizes(staticInfo, /*maxResolution*/false, &mSupportedDepthSizes);
+        if (SessionConfigurationUtils::isUltraHighResolutionSensor(staticInfo)) {
+            getSupportedDepthSizes(staticInfo, true, &mSupportedDepthSizesMaximumResolution);
+        }
     }
 }
 
@@ -239,13 +255,22 @@
         jpegSize = inputFrame.jpegBuffer.width;
     }
 
-    size_t maxDepthJpegSize;
-    if (mMaxJpegSize > 0) {
-        maxDepthJpegSize = mMaxJpegSize;
+    size_t maxDepthJpegBufferSize = 0;
+    if (mMaxJpegBufferSize > 0) {
+        // If this is an ultra high resolution sensor and the input frames size
+        // is > default res jpeg.
+        if (mUHRMaxJpegSize.width != 0 &&
+                inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height >
+                mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) {
+            maxDepthJpegBufferSize = mUHRMaxJpegBufferSize;
+        } else {
+            maxDepthJpegBufferSize = mMaxJpegBufferSize;
+        }
     } else {
-        maxDepthJpegSize = std::max<size_t> (jpegSize,
+        maxDepthJpegBufferSize = std::max<size_t> (jpegSize,
                 inputFrame.depthBuffer.width * inputFrame.depthBuffer.height * 3 / 2);
     }
+
     uint8_t jpegQuality = 100;
     auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY);
     if (entry.count > 0) {
@@ -255,7 +280,7 @@
     // The final depth photo will consist of the main jpeg buffer, the depth map buffer (also in
     // jpeg format) and confidence map (jpeg as well). Assume worst case that all 3 jpeg need
     // max jpeg size.
-    size_t finalJpegBufferSize = maxDepthJpegSize * 3;
+    size_t finalJpegBufferSize = maxDepthJpegBufferSize * 3;
 
     if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), finalJpegBufferSize, 1))
             != OK) {
@@ -298,7 +323,7 @@
     depthPhoto.mDepthMapStride = inputFrame.depthBuffer.stride;
     depthPhoto.mJpegQuality = jpegQuality;
     depthPhoto.mIsLogical = mIsLogicalCamera;
-    depthPhoto.mMaxJpegSize = maxDepthJpegSize;
+    depthPhoto.mMaxJpegSize = maxDepthJpegBufferSize;
     // The camera intrinsic calibration layout is as follows:
     // [focalLengthX, focalLengthY, opticalCenterX, opticalCenterY, skew]
     if (mIntrinsicCalibration.size() == 5) {
@@ -341,7 +366,7 @@
         return res;
     }
 
-    size_t finalJpegSize = actualJpegSize + sizeof(struct camera3_jpeg_blob);
+    size_t finalJpegSize = actualJpegSize + sizeof(struct camera_jpeg_blob);
     if (finalJpegSize > finalJpegBufferSize) {
         ALOGE("%s: Final jpeg buffer not large enough for the jpeg blob header", __FUNCTION__);
         outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
@@ -357,9 +382,9 @@
 
     ALOGV("%s: Final jpeg size: %zu", __func__, finalJpegSize);
     uint8_t* header = static_cast<uint8_t *> (dstBuffer) +
-        (gb->getWidth() - sizeof(struct camera3_jpeg_blob));
-    struct camera3_jpeg_blob *blob = reinterpret_cast<struct camera3_jpeg_blob*> (header);
-    blob->jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
+        (gb->getWidth() - sizeof(struct camera_jpeg_blob));
+    struct camera_jpeg_blob *blob = reinterpret_cast<struct camera_jpeg_blob*> (header);
+    blob->jpeg_blob_id = CAMERA_JPEG_BLOB_ID;
     blob->jpeg_size = actualJpegSize;
     outputANW->queueBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
 
@@ -484,17 +509,82 @@
     return false;
 }
 
+static bool setContains(std::unordered_set<int32_t> containerSet, int32_t value) {
+    return containerSet.find(value) != containerSet.end();
+}
+
+status_t DepthCompositeStream::checkAndGetMatchingDepthSize(size_t width, size_t height,
+        const std::vector<std::tuple<size_t, size_t>> &depthSizes,
+        const std::vector<std::tuple<size_t, size_t>> &depthSizesMaximumResolution,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        size_t *depthWidth, size_t *depthHeight) {
+    if (depthWidth == nullptr || depthHeight == nullptr) {
+        return BAD_VALUE;
+    }
+    size_t chosenDepthWidth = 0, chosenDepthHeight = 0;
+    bool hasDefaultSensorPixelMode =
+            setContains(sensorPixelModesUsed, ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
+
+    bool hasMaximumResolutionSensorPixelMode =
+        setContains(sensorPixelModesUsed, ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+
+    if (!hasDefaultSensorPixelMode && !hasMaximumResolutionSensorPixelMode) {
+        ALOGE("%s: sensor pixel modes don't contain either maximum resolution or default modes",
+                __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    if (hasDefaultSensorPixelMode) {
+        auto ret = getMatchingDepthSize(width, height, depthSizes, &chosenDepthWidth,
+                &chosenDepthHeight);
+        if (ret != OK) {
+            ALOGE("%s: No matching depth stream size found", __FUNCTION__);
+            return ret;
+        }
+    }
+
+    if (hasMaximumResolutionSensorPixelMode) {
+        size_t depthWidth = 0, depthHeight = 0;
+        auto ret = getMatchingDepthSize(width, height,
+                depthSizesMaximumResolution, &depthWidth, &depthHeight);
+        if (ret != OK) {
+            ALOGE("%s: No matching max resolution depth stream size found", __FUNCTION__);
+            return ret;
+        }
+        // Both matching depth sizes should be the same.
+        if (chosenDepthWidth != 0 && chosenDepthWidth != depthWidth &&
+                chosenDepthHeight != depthHeight) {
+            ALOGE("%s: Maximum resolution sensor pixel mode and default sensor pixel mode don't"
+                    " have matching depth sizes", __FUNCTION__);
+            return BAD_VALUE;
+        }
+        if (chosenDepthWidth == 0) {
+            chosenDepthWidth = depthWidth;
+            chosenDepthHeight = depthHeight;
+        }
+    }
+    *depthWidth = chosenDepthWidth;
+    *depthHeight = chosenDepthHeight;
+    return OK;
+}
+
+
 status_t DepthCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
         bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
-        camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
-        std::vector<int> *surfaceIds, int /*streamSetId*/, bool /*isShared*/) {
+        camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        std::vector<int> *surfaceIds,
+        int /*streamSetId*/, bool /*isShared*/) {
     if (mSupportedDepthSizes.empty()) {
         ALOGE("%s: This camera device doesn't support any depth map streams!", __FUNCTION__);
         return INVALID_OPERATION;
     }
 
     size_t depthWidth, depthHeight;
-    auto ret = getMatchingDepthSize(width, height, mSupportedDepthSizes, &depthWidth, &depthHeight);
+    auto ret =
+            checkAndGetMatchingDepthSize(width, height, mSupportedDepthSizes,
+                    mSupportedDepthSizesMaximumResolution, sensorPixelModesUsed, &depthWidth,
+                    &depthHeight);
     if (ret != OK) {
         ALOGE("%s: Failed to find an appropriate depth stream size!", __FUNCTION__);
         return ret;
@@ -515,7 +605,7 @@
     mBlobSurface = new Surface(producer);
 
     ret = device->createStream(mBlobSurface, width, height, format, kJpegDataSpace, rotation,
-            id, physicalCameraId, surfaceIds);
+            id, physicalCameraId, sensorPixelModesUsed, surfaceIds);
     if (ret == OK) {
         mBlobStreamId = *id;
         mBlobSurfaceId = (*surfaceIds)[0];
@@ -531,7 +621,8 @@
     mDepthSurface = new Surface(producer);
     std::vector<int> depthSurfaceId;
     ret = device->createStream(mDepthSurface, depthWidth, depthHeight, kDepthMapPixelFormat,
-            kDepthMapDataSpace, rotation, &mDepthStreamId, physicalCameraId, &depthSurfaceId);
+            kDepthMapDataSpace, rotation, &mDepthStreamId, physicalCameraId, sensorPixelModesUsed,
+            &depthSurfaceId);
     if (ret == OK) {
         mDepthSurfaceId = depthSurfaceId[0];
     } else {
@@ -666,13 +757,11 @@
 status_t DepthCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
         Vector<int32_t> * /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
     if (outSurfaceMap->find(mDepthStreamId) == outSurfaceMap->end()) {
-        (*outSurfaceMap)[mDepthStreamId] = std::vector<size_t>();
         outputStreamIds->push_back(mDepthStreamId);
     }
     (*outSurfaceMap)[mDepthStreamId].push_back(mDepthSurfaceId);
 
     if (outSurfaceMap->find(mBlobStreamId) == outSurfaceMap->end()) {
-        (*outSurfaceMap)[mBlobStreamId] = std::vector<size_t>();
         outputStreamIds->push_back(mBlobStreamId);
     }
     (*outSurfaceMap)[mBlobStreamId].push_back(mBlobSurfaceId);
@@ -751,13 +840,15 @@
     return ((*depthWidth > 0) && (*depthHeight > 0)) ? OK : BAD_VALUE;
 }
 
-void DepthCompositeStream::getSupportedDepthSizes(const CameraMetadata& ch,
+void DepthCompositeStream::getSupportedDepthSizes(const CameraMetadata& ch, bool maxResolution,
         std::vector<std::tuple<size_t, size_t>>* depthSizes /*out*/) {
     if (depthSizes == nullptr) {
         return;
     }
 
-    auto entry = ch.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS);
+    auto entry = ch.find(
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxResolution));
     if (entry.count > 0) {
         // Depth stream dimensions have four int32_t components
         // (pixelformat, width, height, type)
@@ -781,30 +872,43 @@
     }
 
     std::vector<std::tuple<size_t, size_t>> depthSizes;
-    getSupportedDepthSizes(ch, &depthSizes);
+    std::vector<std::tuple<size_t, size_t>> depthSizesMaximumResolution;
+    getSupportedDepthSizes(ch, /*maxResolution*/false, &depthSizes);
     if (depthSizes.empty()) {
         ALOGE("%s: No depth stream configurations present", __FUNCTION__);
         return BAD_VALUE;
     }
 
-    size_t depthWidth, depthHeight;
-    auto ret = getMatchingDepthSize(streamInfo.width, streamInfo.height, depthSizes, &depthWidth,
-            &depthHeight);
+    if (SessionConfigurationUtils::isUltraHighResolutionSensor(ch)) {
+        getSupportedDepthSizes(ch, /*maxResolution*/true, &depthSizesMaximumResolution);
+        if (depthSizesMaximumResolution.empty()) {
+            ALOGE("%s: No depth stream configurations for maximum resolution present",
+                    __FUNCTION__);
+            return BAD_VALUE;
+        }
+    }
+
+    size_t chosenDepthWidth = 0, chosenDepthHeight = 0;
+    auto ret = checkAndGetMatchingDepthSize(streamInfo.width, streamInfo.height, depthSizes,
+            depthSizesMaximumResolution, streamInfo.sensorPixelModesUsed, &chosenDepthWidth,
+            &chosenDepthHeight);
+
     if (ret != OK) {
-        ALOGE("%s: No matching depth stream size found", __FUNCTION__);
+        ALOGE("%s: Couldn't get matching depth sizes", __FUNCTION__);
         return ret;
     }
 
     compositeOutput->clear();
     compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
 
+    // Sensor pixel modes should stay the same here. They're already overridden.
     // Jpeg/Blob stream info
     (*compositeOutput)[0].dataSpace = kJpegDataSpace;
     (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
 
     // Depth stream info
-    (*compositeOutput)[1].width = depthWidth;
-    (*compositeOutput)[1].height = depthHeight;
+    (*compositeOutput)[1].width = chosenDepthWidth;
+    (*compositeOutput)[1].height = chosenDepthHeight;
     (*compositeOutput)[1].format = kDepthMapPixelFormat;
     (*compositeOutput)[1].dataSpace = kDepthMapDataSpace;
     (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index cab52b6..c1c75c1 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -50,8 +50,10 @@
     // CompositeStream overrides
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
-            camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
-            std::vector<int> *surfaceIds, int streamSetId, bool isShared) override;
+            camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            std::vector<int> *surfaceIds,
+            int streamSetId, bool isShared) override;
     status_t deleteInternalStreams() override;
     status_t configureStream() override;
     status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
@@ -86,11 +88,17 @@
     };
 
     // Helper methods
-    static void getSupportedDepthSizes(const CameraMetadata& ch,
+    static void getSupportedDepthSizes(const CameraMetadata& ch, bool maxResolution,
             std::vector<std::tuple<size_t, size_t>>* depthSizes /*out*/);
     static status_t getMatchingDepthSize(size_t width, size_t height,
             const std::vector<std::tuple<size_t, size_t>>& supporedDepthSizes,
             size_t *depthWidth /*out*/, size_t *depthHeight /*out*/);
+    static status_t checkAndGetMatchingDepthSize(size_t width, size_t height,
+        const std::vector<std::tuple<size_t, size_t>> &depthSizes,
+        const std::vector<std::tuple<size_t, size_t>> &depthSizesMaximumResolution,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        size_t *depthWidth /*out*/, size_t *depthHeight /*out*/);
+
 
     // Dynamic depth processing
     status_t encodeGrayscaleJpeg(size_t width, size_t height, uint8_t *in, void *out,
@@ -124,8 +132,14 @@
     sp<Surface>          mDepthSurface, mBlobSurface, mOutputSurface;
     sp<ProducerListener> mProducerListener;
 
-    ssize_t              mMaxJpegSize;
+    ssize_t              mMaxJpegBufferSize;
+    ssize_t              mUHRMaxJpegBufferSize;
+
+    camera3::Size        mDefaultMaxJpegSize;
+    camera3::Size        mUHRMaxJpegSize;
+
     std::vector<std::tuple<size_t, size_t>> mSupportedDepthSizes;
+    std::vector<std::tuple<size_t, size_t>> mSupportedDepthSizesMaximumResolution;
     std::vector<float>   mIntrinsicCalibration, mLensDistortion;
     bool                 mIsLogicalCamera;
 
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index a63f402..a73ffb9 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -16,6 +16,7 @@
 
 #define LOG_TAG "Camera3-HeicCompositeStream"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
+#define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
 //#define LOG_NDEBUG 0
 
 #include <linux/memfd.h>
@@ -36,6 +37,7 @@
 
 #include "common/CameraDeviceBase.h"
 #include "utils/ExifUtils.h"
+#include "utils/SessionConfigurationUtils.h"
 #include "HeicEncoderInfoManager.h"
 #include "HeicCompositeStream.h"
 
@@ -65,7 +67,6 @@
         mYuvBufferAcquired(false),
         mProducerListener(new ProducerListener()),
         mDequeuedOutputBufferCnt(0),
-        mLockedAppSegmentBufferCnt(0),
         mCodecOutputCounter(0),
         mQuality(-1),
         mGridTimestampUs(0),
@@ -115,8 +116,10 @@
 
 status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
         bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
-        camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
-        std::vector<int> *surfaceIds, int /*streamSetId*/, bool /*isShared*/) {
+        camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        std::vector<int> *surfaceIds,
+        int /*streamSetId*/, bool /*isShared*/) {
 
     sp<CameraDeviceBase> device = mDevice.promote();
     if (!device.get()) {
@@ -142,7 +145,8 @@
     mStaticInfo = device->info();
 
     res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
-            kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId, surfaceIds);
+            kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId,
+            sensorPixelModesUsed,surfaceIds);
     if (res == OK) {
         mAppSegmentSurfaceId = (*surfaceIds)[0];
     } else {
@@ -178,7 +182,7 @@
     int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
     res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
-            rotation, id, physicalCameraId, &sourceSurfaceId);
+            rotation, id, physicalCameraId, sensorPixelModesUsed, &sourceSurfaceId);
     if (res == OK) {
         mMainImageSurfaceId = sourceSurfaceId[0];
         mMainImageStreamId = *id;
@@ -510,7 +514,8 @@
 
     sp<camera3::StatusTracker> statusTracker = mStatusTracker.promote();
     if (statusTracker != nullptr) {
-        mStatusId = statusTracker->addComponent();
+        std::string name = std::string("HeicStream ") + std::to_string(getStreamId());
+        mStatusId = statusTracker->addComponent(name);
     }
 
     run("HeicCompositeStreamProc");
@@ -521,13 +526,11 @@
 status_t HeicCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
         Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
     if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
-        (*outSurfaceMap)[mAppSegmentStreamId] = std::vector<size_t>();
         outputStreamIds->push_back(mAppSegmentStreamId);
     }
     (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
 
     if (outSurfaceMap->find(mMainImageStreamId) == outSurfaceMap->end()) {
-        (*outSurfaceMap)[mMainImageStreamId] = std::vector<size_t>();
         outputStreamIds->push_back(mMainImageStreamId);
     }
     (*outSurfaceMap)[mMainImageStreamId].push_back(mMainImageSurfaceId);
@@ -634,7 +637,6 @@
             mAppSegmentConsumer->unlockBuffer(imgBuffer);
         } else {
             mPendingInputFrames[frameNumber].appSegmentBuffer = imgBuffer;
-            mLockedAppSegmentBufferCnt++;
         }
         mInputAppSegmentBuffers.erase(it);
         mAppSegmentFrameNumbers.pop();
@@ -897,10 +899,6 @@
                         strerror(-res), res);
                 return res;
             }
-        } else if (mLockedAppSegmentBufferCnt == kMaxAcquiredAppSegment) {
-            ALOGE("%s: Out-of-order app segment buffers reaches limit %u", __FUNCTION__,
-                    kMaxAcquiredAppSegment);
-            return INVALID_OPERATION;
         }
     }
 
@@ -1038,7 +1036,6 @@
     mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
     inputFrame.appSegmentBuffer.data = nullptr;
     inputFrame.exifError = false;
-    mLockedAppSegmentBufferCnt--;
 
     return OK;
 }
@@ -1384,7 +1381,9 @@
     mOutputWidth = width;
     mOutputHeight = height;
     mAppSegmentMaxSize = calcAppSegmentMaxSize(cameraDevice->info());
-    mMaxHeicBufferSize = mOutputWidth * mOutputHeight * 3 / 2 + mAppSegmentMaxSize;
+    mMaxHeicBufferSize =
+        ALIGN(mOutputWidth, HeicEncoderInfoManager::kGridWidth) *
+        ALIGN(mOutputHeight, HeicEncoderInfoManager::kGridHeight) * 3 / 2 + mAppSegmentMaxSize;
 
     return OK;
 }
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 33ca69a..1077a1f 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -45,8 +45,10 @@
 
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
-            camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
-            std::vector<int> *surfaceIds, int streamSetId, bool isShared) override;
+            camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            std::vector<int> *surfaceIds,
+            int streamSetId, bool isShared) override;
 
     status_t deleteInternalStreams() override;
 
@@ -253,7 +255,6 @@
 
     // Keep all incoming APP segment Blob buffer pending further processing.
     std::vector<int64_t> mInputAppSegmentBuffers;
-    int32_t           mLockedAppSegmentBufferCnt;
 
     // Keep all incoming HEIC blob buffer pending further processing.
     std::vector<CodecOutputBufferInfo> mCodecOutputBuffers;
diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
index 58edba2..a65be9c 100644
--- a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
+++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
@@ -38,6 +38,7 @@
     bool isSizeSupported(int32_t width, int32_t height,
             bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName) const;
 
+    // kGridWidth and kGridHeight should be 2^n
     static const auto kGridWidth = 512;
     static const auto kGridHeight = 512;
 private:
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 0a41776..13d044a 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -27,12 +27,15 @@
 #include <gui/Surface.h>
 #include <gui/Surface.h>
 
+#include <camera/CameraSessionStats.h>
+
 #include "common/Camera2ClientBase.h"
 
 #include "api2/CameraDeviceClient.h"
 
 #include "device3/Camera3Device.h"
 #include "utils/CameraThreadState.h"
+#include "utils/CameraServiceProxyWrapper.h"
 
 namespace android {
 using namespace camera2;
@@ -44,18 +47,21 @@
         const sp<CameraService>& cameraService,
         const sp<TCamCallbacks>& remoteCallback,
         const String16& clientPackageName,
-        const std::unique_ptr<String16>& clientFeatureId,
+        const std::optional<String16>& clientFeatureId,
         const String8& cameraId,
         int api1CameraId,
         int cameraFacing,
+        int sensorOrientation,
         int clientPid,
         uid_t clientUid,
-        int servicePid):
+        int servicePid,
+        bool overrideForPerfClass):
         TClientBase(cameraService, remoteCallback, clientPackageName, clientFeatureId,
-                cameraId, api1CameraId, cameraFacing, clientPid, clientUid, servicePid),
+                cameraId, api1CameraId, cameraFacing, sensorOrientation, clientPid, clientUid,
+                servicePid),
         mSharedCameraCallbacks(remoteCallback),
         mDeviceVersion(cameraService->getDeviceVersion(TClientBase::mCameraIdStr)),
-        mDevice(new Camera3Device(cameraId)),
+        mDevice(new Camera3Device(cameraId, overrideForPerfClass)),
         mDeviceActive(false), mApi1CameraId(api1CameraId)
 {
     ALOGI("Camera %s: Opened. Client: %s (PID %d, UID %d)", cameraId.string(),
@@ -190,11 +196,18 @@
 
     ALOGV("Camera %s: Shutting down", TClientBase::mCameraIdStr.string());
 
+    // Before detaching the device, cache the info from current open session.
+    // The disconnected check avoids duplication of info and also prevents
+    // deadlock while acquiring service lock in cacheDump.
+    if (!TClientBase::mDisconnected) {
+        Camera2ClientBase::getCameraService()->cacheDump();
+    }
+
     detachDevice();
 
     CameraService::BasicClient::disconnect();
 
-    ALOGV("Camera %s: Shut down complete complete", TClientBase::mCameraIdStr.string());
+    ALOGV("Camera %s: Shut down complete", TClientBase::mCameraIdStr.string());
 
     return res;
 }
@@ -245,13 +258,34 @@
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyIdle() {
+status_t Camera2ClientBase<TClientBase>::notifyActive() {
+    if (!mDeviceActive) {
+        status_t res = TClientBase::startCameraStreamingOps();
+        if (res != OK) {
+            ALOGE("%s: Camera %s: Error starting camera streaming ops: %d", __FUNCTION__,
+                    TClientBase::mCameraIdStr.string(), res);
+            return res;
+        }
+        CameraServiceProxyWrapper::logActive(TClientBase::mCameraIdStr);
+    }
+    mDeviceActive = true;
+
+    ALOGV("Camera device is now active");
+    return OK;
+}
+
+template <typename TClientBase>
+void Camera2ClientBase<TClientBase>::notifyIdle(
+        int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+        const std::vector<hardware::CameraStreamStats>& streamStats) {
     if (mDeviceActive) {
-        getCameraService()->updateProxyDeviceState(
-            hardware::ICameraServiceProxy::CAMERA_STATE_IDLE, TClientBase::mCameraIdStr,
-            TClientBase::mCameraFacing, TClientBase::mClientPackageName,
-            ((mApi1CameraId < 0) ? hardware::ICameraServiceProxy::CAMERA_API_LEVEL_2 :
-             hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1));
+        status_t res = TClientBase::finishCameraStreamingOps();
+        if (res != OK) {
+            ALOGE("%s: Camera %s: Error finishing streaming ops: %d", __FUNCTION__,
+                    TClientBase::mCameraIdStr.string(), res);
+        }
+        CameraServiceProxyWrapper::logIdle(TClientBase::mCameraIdStr,
+                requestCount, resultErrorCount, deviceError, streamStats);
     }
     mDeviceActive = false;
 
@@ -264,15 +298,6 @@
     (void)resultExtras;
     (void)timestamp;
 
-    if (!mDeviceActive) {
-        getCameraService()->updateProxyDeviceState(
-            hardware::ICameraServiceProxy::CAMERA_STATE_ACTIVE, TClientBase::mCameraIdStr,
-            TClientBase::mCameraFacing, TClientBase::mClientPackageName,
-            ((mApi1CameraId < 0) ? hardware::ICameraServiceProxy::CAMERA_API_LEVEL_2 :
-             hardware::ICameraServiceProxy::CAMERA_API_LEVEL_1));
-    }
-    mDeviceActive = true;
-
     ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
             __FUNCTION__, resultExtras.requestId, timestamp);
 }
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 042f5aa..6246f7b 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -48,13 +48,15 @@
     Camera2ClientBase(const sp<CameraService>& cameraService,
                       const sp<TCamCallbacks>& remoteCallback,
                       const String16& clientPackageName,
-                      const std::unique_ptr<String16>& clientFeatureId,
+                      const std::optional<String16>& clientFeatureId,
                       const String8& cameraId,
                       int api1CameraId,
                       int cameraFacing,
+                      int sensorOrientation,
                       int clientPid,
                       uid_t clientUid,
-                      int servicePid);
+                      int servicePid,
+                      bool overrideForPerfClass);
     virtual ~Camera2ClientBase();
 
     virtual status_t      initialize(sp<CameraProviderManager> manager, const String8& monitorTags);
@@ -66,7 +68,10 @@
 
     virtual void          notifyError(int32_t errorCode,
                                       const CaptureResultExtras& resultExtras);
-    virtual void          notifyIdle();
+    virtual status_t      notifyActive();  // Returns errors on app ops permission failures
+    virtual void          notifyIdle(int64_t requestCount, int64_t resultErrorCount,
+                                     bool deviceError,
+                                     const std::vector<hardware::CameraStreamStats>& streamStats);
     virtual void          notifyShutter(const CaptureResultExtras& resultExtras,
                                         nsecs_t timestamp);
     virtual void          notifyAutoFocus(uint8_t newState, int triggerId);
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index a537ef5..85b0cc2 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -28,7 +28,6 @@
 #include <utils/List.h>
 
 #include "hardware/camera2.h"
-#include "hardware/camera3.h"
 #include "camera/CameraMetadata.h"
 #include "camera/CaptureResult.h"
 #include "gui/IGraphicBufferProducer.h"
@@ -41,6 +40,41 @@
 
 namespace android {
 
+namespace camera3 {
+
+typedef enum camera_request_template {
+    CAMERA_TEMPLATE_PREVIEW = 1,
+    CAMERA_TEMPLATE_STILL_CAPTURE = 2,
+    CAMERA_TEMPLATE_VIDEO_RECORD = 3,
+    CAMERA_TEMPLATE_VIDEO_SNAPSHOT = 4,
+    CAMERA_TEMPLATE_ZERO_SHUTTER_LAG = 5,
+    CAMERA_TEMPLATE_MANUAL = 6,
+    CAMERA_TEMPLATE_COUNT,
+    CAMERA_VENDOR_TEMPLATE_START = 0x40000000
+} camera_request_template_t;
+
+typedef enum camera_stream_configuration_mode {
+    CAMERA_STREAM_CONFIGURATION_NORMAL_MODE = 0,
+    CAMERA_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE = 1,
+    CAMERA_VENDOR_STREAM_CONFIGURATION_MODE_START = 0x8000
+} camera_stream_configuration_mode_t;
+
+typedef struct camera_jpeg_blob {
+    uint16_t jpeg_blob_id;
+    uint32_t jpeg_size;
+} camera_jpeg_blob_t;
+
+enum {
+    CAMERA_JPEG_BLOB_ID = 0x00FF,
+    CAMERA_JPEG_APP_SEGMENTS_BLOB_ID = 0x0100,
+};
+
+} // namespace camera3
+
+using camera3::camera_request_template_t;;
+using camera3::camera_stream_configuration_mode_t;
+using camera3::camera_stream_rotation_t;
+
 class CameraProviderManager;
 
 // Mapping of output stream index to surface ids
@@ -128,11 +162,13 @@
      */
     virtual status_t createStream(sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
-            android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
+            android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t>  &sensorPixelModesUsed,
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
-            bool isShared = false, uint64_t consumerUsage = 0) = 0;
+            bool isShared = false, bool isMultiResolution = false,
+            uint64_t consumerUsage = 0) = 0;
 
     /**
      * Create an output stream of the requested size, format, rotation and
@@ -143,11 +179,13 @@
      */
     virtual status_t createStream(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
-            android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
+            android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
-            bool isShared = false, uint64_t consumerUsage = 0) = 0;
+            bool isShared = false, bool isMultiResolution = false,
+            uint64_t consumerUsage = 0) = 0;
 
     /**
      * Create an input stream of width, height, and format.
@@ -155,7 +193,7 @@
      * Return value is the stream ID if non-negative and an error if negative.
      */
     virtual status_t createInputStream(uint32_t width, uint32_t height,
-            int32_t format, /*out*/ int32_t *id) = 0;
+            int32_t format, bool multiResolution, /*out*/ int32_t *id) = 0;
 
     struct StreamInfo {
         uint32_t width;
@@ -225,7 +263,8 @@
      * - INVALID_OPERATION if the device was in the wrong state
      */
     virtual status_t configureStreams(const CameraMetadata& sessionParams,
-            int operatingMode = 0) = 0;
+            int operatingMode =
+            camera_stream_configuration_mode_t::CAMERA_STREAM_CONFIGURATION_NORMAL_MODE) = 0;
 
     /**
      * Retrieve a list of all stream ids that were advertised as capable of
@@ -241,7 +280,7 @@
      * Create a metadata buffer with fields that the HAL device believes are
      * best for the given use case
      */
-    virtual status_t createDefaultRequest(int templateId,
+    virtual status_t createDefaultRequest(camera_request_template_t templateId,
             CameraMetadata *request) = 0;
 
     /**
@@ -364,9 +403,32 @@
             camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue) = 0;
 
     /**
+     * Whether camera muting (producing black-only output) is supported.
+     *
+     * Calling setCameraMute(true) when this returns false will return an
+     * INVALID_OPERATION error.
+     */
+    virtual bool supportsCameraMute() = 0;
+
+    /**
+     * Mute the camera.
+     *
+     * When muted, black image data is output on all output streams.
+     */
+    virtual status_t setCameraMute(bool enabled) = 0;
+
+    /**
      * Get the status tracker of the camera device
      */
     virtual wp<camera3::StatusTracker> getStatusTracker() = 0;
+
+    /**
+     * Set bitmask for image dump flag
+     */
+    void setImageDumpMask(int mask) { mImageDumpMask = mask; }
+
+protected:
+    bool mImageDumpMask = 0;
 };
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
index 1f835a9..54e42a6 100644
--- a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
+++ b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
@@ -17,11 +17,14 @@
 #ifndef ANDROID_SERVERS_CAMERA_CAMERAOFFLINESESSIONBASE_H
 #define ANDROID_SERVERS_CAMERA_CAMERAOFFLINESESSIONBASE_H
 
+#include <vector>
+
 #include <utils/RefBase.h>
 #include <utils/String8.h>
 #include <utils/Timers.h>
 
 #include "camera/CaptureResult.h"
+#include "camera/CameraSessionStats.h"
 #include "FrameProducer.h"
 
 namespace android {
@@ -37,9 +40,11 @@
     // Required for API 1 and 2
     virtual void notifyError(int32_t errorCode,
                              const CaptureResultExtras &resultExtras) = 0;
+    virtual status_t notifyActive() = 0; // May return an error since it checks appops
+    virtual void notifyIdle(int64_t requestCount, int64_t resultError, bool deviceError,
+            const std::vector<hardware::CameraStreamStats>& streamStats) = 0;
 
     // Required only for API2
-    virtual void notifyIdle() = 0;
     virtual void notifyShutter(const CaptureResultExtras &resultExtras,
             nsecs_t timestamp) = 0;
     virtual void notifyPrepared(int streamId) = 0;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 32d118d..4f2b878 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -20,7 +20,7 @@
 
 #include "CameraProviderManager.h"
 
-#include <android/hardware/camera/device/3.5/ICameraDevice.h>
+#include <android/hardware/camera/device/3.7/ICameraDevice.h>
 
 #include <algorithm>
 #include <chrono>
@@ -28,7 +28,6 @@
 #include <dlfcn.h>
 #include <future>
 #include <inttypes.h>
-#include <hardware/camera_common.h>
 #include <android/hidl/manager/1.2/IServiceManager.h>
 #include <hidl/ServiceManagement.h>
 #include <functional>
@@ -47,9 +46,10 @@
 
 using namespace ::android::hardware::camera;
 using namespace ::android::hardware::camera::common::V1_0;
+using camera3::SessionConfigurationUtils;
 using std::literals::chrono_literals::operator""s;
 using hardware::camera2::utils::CameraIdAndSessionConfiguration;
-using hardware::camera::provider::V2_6::CameraIdAndStreamCombination;
+using hardware::camera::provider::V2_7::CameraIdAndStreamCombination;
 
 namespace {
 const bool kEnableLazyHal(property_get_bool("ro.camera.enableLazyHal", false));
@@ -267,7 +267,7 @@
 }
 
 status_t CameraProviderManager::isSessionConfigurationSupported(const std::string& id,
-        const hardware::camera::device::V3_4::StreamConfiguration &configuration,
+        const hardware::camera::device::V3_7::StreamConfiguration &configuration,
         bool *status /*out*/) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     auto deviceInfo = findDeviceInfoLocked(id);
@@ -279,9 +279,9 @@
 }
 
 status_t CameraProviderManager::getCameraCharacteristics(const std::string &id,
-        CameraMetadata* characteristics) const {
+        bool overrideForPerfClass, CameraMetadata* characteristics) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
-    return getCameraCharacteristicsLocked(id, characteristics);
+    return getCameraCharacteristicsLocked(id, overrideForPerfClass, characteristics);
 }
 
 status_t CameraProviderManager::getHighestSupportedVersion(const std::string &id,
@@ -417,46 +417,6 @@
     return mapToStatusT(status);
 }
 
-status_t CameraProviderManager::openSession(const std::string &id,
-        const sp<device::V1_0::ICameraDeviceCallback>& callback,
-        /*out*/
-        sp<device::V1_0::ICameraDevice> *session) {
-
-    std::lock_guard<std::mutex> lock(mInterfaceMutex);
-
-    auto deviceInfo = findDeviceInfoLocked(id,
-            /*minVersion*/ {1,0}, /*maxVersion*/ {2,0});
-    if (deviceInfo == nullptr) return NAME_NOT_FOUND;
-
-    auto *deviceInfo1 = static_cast<ProviderInfo::DeviceInfo1*>(deviceInfo);
-    sp<ProviderInfo> parentProvider = deviceInfo->mParentProvider.promote();
-    if (parentProvider == nullptr) {
-        return DEAD_OBJECT;
-    }
-    const sp<provider::V2_4::ICameraProvider> provider = parentProvider->startProviderInterface();
-    if (provider == nullptr) {
-        return DEAD_OBJECT;
-    }
-    saveRef(DeviceMode::CAMERA, id, provider);
-
-    auto interface = deviceInfo1->startDeviceInterface<
-            CameraProviderManager::ProviderInfo::DeviceInfo1::InterfaceT>();
-    if (interface == nullptr) {
-        return DEAD_OBJECT;
-    }
-    hardware::Return<Status> status = interface->open(callback);
-    if (!status.isOk()) {
-        removeRef(DeviceMode::CAMERA, id);
-        ALOGE("%s: Transaction error opening a session for camera device %s: %s",
-                __FUNCTION__, id.c_str(), status.description().c_str());
-        return DEAD_OBJECT;
-    }
-    if (status == Status::OK) {
-        *session = interface;
-    }
-    return mapToStatusT(status);
-}
-
 void CameraProviderManager::saveRef(DeviceMode usageType, const std::string &cameraId,
         sp<provider::V2_4::ICameraProvider> provider) {
     if (!kEnableLazyHal) {
@@ -515,16 +475,17 @@
 hardware::Return<void> CameraProviderManager::onRegistration(
         const hardware::hidl_string& /*fqName*/,
         const hardware::hidl_string& name,
-        bool /*preexisting*/) {
+        bool preexisting) {
+    status_t res = OK;
     std::lock_guard<std::mutex> providerLock(mProviderLifecycleLock);
     {
         std::lock_guard<std::mutex> lock(mInterfaceMutex);
 
-        addProviderLocked(name);
+        res = addProviderLocked(name, preexisting);
     }
 
     sp<StatusListener> listener = getStatusListener();
-    if (nullptr != listener.get()) {
+    if (nullptr != listener.get() && res == OK) {
         listener->onNewProviderRegistered();
     }
 
@@ -727,9 +688,39 @@
     }
 }
 
-status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addDynamicDepthTags() {
-    uint32_t depthExclTag = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE;
-    uint32_t depthSizesTag = ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS;
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addDynamicDepthTags(
+        bool maxResolution) {
+    const int32_t depthExclTag = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE;
+
+    const int32_t scalerSizesTag =
+              SessionConfigurationUtils::getAppropriateModeTag(
+                      ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t scalerMinFrameDurationsTag =
+            ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
+    const int32_t scalerStallDurationsTag =
+                 SessionConfigurationUtils::getAppropriateModeTag(
+                        ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, maxResolution);
+
+    const int32_t depthSizesTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t depthStallDurationsTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS, maxResolution);
+    const int32_t depthMinFrameDurationsTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS, maxResolution);
+
+    const int32_t dynamicDepthSizesTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t dynamicDepthStallDurationsTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS, maxResolution);
+    const int32_t dynamicDepthMinFrameDurationsTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                 ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS, maxResolution);
+
     auto& c = mCameraCharacteristics;
     std::vector<std::tuple<size_t, size_t>> supportedBlobSizes, supportedDepthSizes,
             supportedDynamicDepthSizes, internalDepthSizes;
@@ -759,7 +750,7 @@
         return BAD_VALUE;
     }
 
-    getSupportedSizes(c, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, HAL_PIXEL_FORMAT_BLOB,
+    getSupportedSizes(c, scalerSizesTag, HAL_PIXEL_FORMAT_BLOB,
             &supportedBlobSizes);
     getSupportedSizes(c, depthSizesTag, HAL_PIXEL_FORMAT_Y16, &supportedDepthSizes);
     if (supportedBlobSizes.empty() || supportedDepthSizes.empty()) {
@@ -786,10 +777,10 @@
     std::vector<int64_t> blobMinDurations, blobStallDurations;
     std::vector<int64_t> dynamicDepthMinDurations, dynamicDepthStallDurations;
 
-    getSupportedDurations(c, ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
-            HAL_PIXEL_FORMAT_Y16, internalDepthSizes, &depthMinDurations);
-    getSupportedDurations(c, ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
-            HAL_PIXEL_FORMAT_BLOB, supportedDynamicDepthSizes, &blobMinDurations);
+    getSupportedDurations(c, depthMinFrameDurationsTag, HAL_PIXEL_FORMAT_Y16, internalDepthSizes,
+                          &depthMinDurations);
+    getSupportedDurations(c, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
+                          supportedDynamicDepthSizes, &blobMinDurations);
     if (blobMinDurations.empty() || depthMinDurations.empty() ||
             (depthMinDurations.size() != blobMinDurations.size())) {
         ALOGE("%s: Unexpected number of available depth min durations! %zu vs. %zu",
@@ -797,10 +788,10 @@
         return BAD_VALUE;
     }
 
-    getSupportedDurations(c, ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
-            HAL_PIXEL_FORMAT_Y16, internalDepthSizes, &depthStallDurations);
-    getSupportedDurations(c, ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
-            HAL_PIXEL_FORMAT_BLOB, supportedDynamicDepthSizes, &blobStallDurations);
+    getSupportedDurations(c, depthStallDurationsTag, HAL_PIXEL_FORMAT_Y16, internalDepthSizes,
+            &depthStallDurations);
+    getSupportedDurations(c, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
+            supportedDynamicDepthSizes, &blobStallDurations);
     if (blobStallDurations.empty() || depthStallDurations.empty() ||
             (depthStallDurations.size() != blobStallDurations.size())) {
         ALOGE("%s: Unexpected number of available depth stall durations! %zu vs. %zu",
@@ -845,15 +836,14 @@
     supportedChTags.reserve(chTags.count + 3);
     supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
             chTags.data.i32 + chTags.count);
-    supportedChTags.push_back(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
-    supportedChTags.push_back(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS);
-    supportedChTags.push_back(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS);
-    c.update(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS,
-            dynamicDepthEntries.data(), dynamicDepthEntries.size());
-    c.update(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS,
-            dynamicDepthMinDurationEntries.data(), dynamicDepthMinDurationEntries.size());
-    c.update(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS,
-            dynamicDepthStallDurationEntries.data(), dynamicDepthStallDurationEntries.size());
+    supportedChTags.push_back(dynamicDepthSizesTag);
+    supportedChTags.push_back(dynamicDepthMinFrameDurationsTag);
+    supportedChTags.push_back(dynamicDepthStallDurationsTag);
+    c.update(dynamicDepthSizesTag, dynamicDepthEntries.data(), dynamicDepthEntries.size());
+    c.update(dynamicDepthMinFrameDurationsTag, dynamicDepthMinDurationEntries.data(),
+            dynamicDepthMinDurationEntries.size());
+    c.update(dynamicDepthStallDurationsTag, dynamicDepthStallDurationEntries.data(),
+             dynamicDepthStallDurationEntries.size());
     c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
             supportedChTags.size());
 
@@ -1087,7 +1077,24 @@
     return OK;
 }
 
-status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveHeicTags() {
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveHeicTags(bool maxResolution) {
+    int32_t scalerStreamSizesTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
+    int32_t scalerMinFrameDurationsTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, maxResolution);
+
+    int32_t heicStreamSizesTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
+    int32_t heicMinFrameDurationsTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS, maxResolution);
+    int32_t heicStallDurationsTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS, maxResolution);
+
     auto& c = mCameraCharacteristics;
 
     camera_metadata_entry halHeicSupport = c.find(ANDROID_HEIC_INFO_SUPPORTED);
@@ -1116,10 +1123,8 @@
     std::vector<int64_t> heicDurations;
     std::vector<int64_t> heicStallDurations;
 
-    camera_metadata_entry halStreamConfigs =
-            c.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
-    camera_metadata_entry minFrameDurations =
-            c.find(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
+    camera_metadata_entry halStreamConfigs = c.find(scalerStreamSizesTag);
+    camera_metadata_entry minFrameDurations = c.find(scalerMinFrameDurationsTag);
 
     status_t res = fillHeicStreamCombinations(&heicOutputs, &heicDurations, &heicStallDurations,
             halStreamConfigs, minFrameDurations);
@@ -1129,12 +1134,9 @@
         return res;
     }
 
-    c.update(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS,
-           heicOutputs.data(), heicOutputs.size());
-    c.update(ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS,
-            heicDurations.data(), heicDurations.size());
-    c.update(ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS,
-            heicStallDurations.data(), heicStallDurations.size());
+    c.update(heicStreamSizesTag, heicOutputs.data(), heicOutputs.size());
+    c.update(heicMinFrameDurationsTag, heicDurations.data(), heicDurations.size());
+    c.update(heicStallDurationsTag, heicStallDurations.data(), heicStallDurations.size());
 
     return OK;
 }
@@ -1186,6 +1188,17 @@
     return isHiddenPhysicalCameraInternal(cameraId).first;
 }
 
+status_t CameraProviderManager::filterSmallJpegSizes(const std::string& cameraId) {
+    for (auto& provider : mProviders) {
+        for (auto& deviceInfo : provider->mDevices) {
+            if (deviceInfo->mId == cameraId) {
+                return deviceInfo->filterSmallJpegSizes();
+            }
+        }
+    }
+    return NAME_NOT_FOUND;
+}
+
 std::pair<bool, CameraProviderManager::ProviderInfo::DeviceInfo *>
 CameraProviderManager::isHiddenPhysicalCameraInternal(const std::string& cameraId) const {
     auto falseRet = std::make_pair(false, nullptr);
@@ -1201,14 +1214,6 @@
 
     for (auto& provider : mProviders) {
         for (auto& deviceInfo : provider->mDevices) {
-            CameraMetadata info;
-            status_t res = deviceInfo->getCameraCharacteristics(&info);
-            if (res != OK) {
-                ALOGE("%s: Failed to getCameraCharacteristics for id %s", __FUNCTION__,
-                        deviceInfo->mId.c_str());
-                return falseRet;
-            }
-
             std::vector<std::string> physicalIds;
             if (deviceInfo->mIsLogicalCamera) {
                 if (std::find(deviceInfo->mPhysicalIds.begin(), deviceInfo->mPhysicalIds.end(),
@@ -1230,31 +1235,53 @@
     return falseRet;
 }
 
-status_t CameraProviderManager::addProviderLocked(const std::string& newProvider) {
-    for (const auto& providerInfo : mProviders) {
-        if (providerInfo->mProviderName == newProvider) {
-            ALOGW("%s: Camera provider HAL with name '%s' already registered", __FUNCTION__,
-                    newProvider.c_str());
-            return ALREADY_EXISTS;
-        }
-    }
-
+status_t CameraProviderManager::tryToInitializeProviderLocked(
+        const std::string& providerName, const sp<ProviderInfo>& providerInfo) {
     sp<provider::V2_4::ICameraProvider> interface;
-    interface = mServiceProxy->tryGetService(newProvider);
+    interface = mServiceProxy->tryGetService(providerName);
 
     if (interface == nullptr) {
-        ALOGE("%s: Camera provider HAL '%s' is not actually available", __FUNCTION__,
-                newProvider.c_str());
+        // The interface may not be started yet. In that case, this is not a
+        // fatal error.
+        ALOGW("%s: Camera provider HAL '%s' is not actually available", __FUNCTION__,
+                providerName.c_str());
         return BAD_VALUE;
     }
 
-    sp<ProviderInfo> providerInfo = new ProviderInfo(newProvider, this);
-    status_t res = providerInfo->initialize(interface, mDeviceState);
-    if (res != OK) {
-        return res;
+    return providerInfo->initialize(interface, mDeviceState);
+}
+
+status_t CameraProviderManager::addProviderLocked(const std::string& newProvider,
+        bool preexisting) {
+    // Several camera provider instances can be temporarily present.
+    // Defer initialization of a new instance until the older instance is properly removed.
+    auto providerInstance = newProvider + "-" + std::to_string(mProviderInstanceId);
+    bool providerPresent = false;
+    for (const auto& providerInfo : mProviders) {
+        if (providerInfo->mProviderName == newProvider) {
+            ALOGW("%s: Camera provider HAL with name '%s' already registered",
+                    __FUNCTION__, newProvider.c_str());
+            if (preexisting) {
+                return ALREADY_EXISTS;
+            } else{
+                ALOGW("%s: The new provider instance will get initialized immediately after the"
+                        " currently present instance is removed!", __FUNCTION__);
+                providerPresent = true;
+                break;
+            }
+        }
+    }
+
+    sp<ProviderInfo> providerInfo = new ProviderInfo(newProvider, providerInstance, this);
+    if (!providerPresent) {
+        status_t res = tryToInitializeProviderLocked(newProvider, providerInfo);
+        if (res != OK) {
+            return res;
+        }
     }
 
     mProviders.push_back(providerInfo);
+    mProviderInstanceId++;
 
     return OK;
 }
@@ -1264,12 +1291,14 @@
     std::unique_lock<std::mutex> lock(mInterfaceMutex);
     std::vector<String8> removedDeviceIds;
     status_t res = NAME_NOT_FOUND;
+    std::string removedProviderName;
     for (auto it = mProviders.begin(); it != mProviders.end(); it++) {
-        if ((*it)->mProviderName == provider) {
+        if ((*it)->mProviderInstance == provider) {
             removedDeviceIds.reserve((*it)->mDevices.size());
             for (auto& deviceInfo : (*it)->mDevices) {
                 removedDeviceIds.push_back(String8(deviceInfo->mId.c_str()));
             }
+            removedProviderName = (*it)->mProviderName;
             mProviders.erase(it);
             res = OK;
             break;
@@ -1279,6 +1308,14 @@
         ALOGW("%s: Camera provider HAL with name '%s' is not registered", __FUNCTION__,
                 provider.c_str());
     } else {
+        // Check if there are any newer camera instances from the same provider and try to
+        // initialize.
+        for (const auto& providerInfo : mProviders) {
+            if (providerInfo->mProviderName == removedProviderName) {
+                return tryToInitializeProviderLocked(removedProviderName, providerInfo);
+            }
+        }
+
         // Inform camera service of loss of presence for all the devices from this provider,
         // without lock held for reentrancy
         sp<StatusListener> listener = getStatusListener();
@@ -1287,7 +1324,9 @@
             for (auto& id : removedDeviceIds) {
                 listener->onDeviceStatusChanged(id, CameraDeviceStatus::NOT_PRESENT);
             }
+            lock.lock();
         }
+
     }
     return res;
 }
@@ -1301,8 +1340,10 @@
 
 CameraProviderManager::ProviderInfo::ProviderInfo(
         const std::string &providerName,
+        const std::string &providerInstance,
         CameraProviderManager *manager) :
         mProviderName(providerName),
+        mProviderInstance(providerInstance),
         mProviderTagid(generateVendorTagId(providerName)),
         mUniqueDeviceCount(0),
         mManager(manager) {
@@ -1342,6 +1383,28 @@
                 mMinorVersion = 5;
             }
         }
+    } else {
+        auto cast2_7 = provider::V2_7::ICameraProvider::castFrom(interface);
+        if (cast2_7.isOk()) {
+            sp<provider::V2_7::ICameraProvider> interface2_7 = cast2_7;
+            if (interface2_7 != nullptr) {
+                mMinorVersion = 7;
+            }
+        }
+    }
+
+    // cameraDeviceStatusChange callbacks may be called (and causing new devices added)
+    // before setCallback returns
+    hardware::Return<Status> status = interface->setCallback(this);
+    if (!status.isOk()) {
+        ALOGE("%s: Transaction error setting up callbacks with camera provider '%s': %s",
+                __FUNCTION__, mProviderName.c_str(), status.description().c_str());
+        return DEAD_OBJECT;
+    }
+    if (status != Status::OK) {
+        ALOGE("%s: Unable to register callbacks with camera provider '%s'",
+                __FUNCTION__, mProviderName.c_str());
+        return mapToStatusT(status);
     }
 
     hardware::Return<bool> linked = interface->linkToDeath(this, /*cookie*/ mId);
@@ -1372,7 +1435,6 @@
         return res;
     }
 
-    Status status;
     // Get initial list of camera devices, if any
     std::vector<std::string> devices;
     hardware::Return<void> ret = interface->getCameraIdList([&status, this, &devices](
@@ -1437,26 +1499,43 @@
         }
     }
 
-    // cameraDeviceStatusChange callbacks may be called (and causing new devices added)
-    // before setCallback returns. setCallback must be called after addDevice so that
-    // the physical camera status callback can look up available regular
-    // cameras.
-    hardware::Return<Status> st = interface->setCallback(this);
-    if (!st.isOk()) {
-        ALOGE("%s: Transaction error setting up callbacks with camera provider '%s': %s",
-                __FUNCTION__, mProviderName.c_str(), st.description().c_str());
-        return DEAD_OBJECT;
-    }
-    if (st != Status::OK) {
-        ALOGE("%s: Unable to register callbacks with camera provider '%s'",
-                __FUNCTION__, mProviderName.c_str());
-        return mapToStatusT(st);
-    }
-
     ALOGI("Camera provider %s ready with %zu camera devices",
             mProviderName.c_str(), mDevices.size());
 
-    mInitialized = true;
+    // Process cached status callbacks
+    std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus =
+            std::make_unique<std::vector<CameraStatusInfoT>>();
+    {
+        std::lock_guard<std::mutex> lock(mInitLock);
+
+        for (auto& statusInfo : mCachedStatus) {
+            std::string id, physicalId;
+            status_t res = OK;
+            if (statusInfo.isPhysicalCameraStatus) {
+                res = physicalCameraDeviceStatusChangeLocked(&id, &physicalId,
+                    statusInfo.cameraId, statusInfo.physicalCameraId, statusInfo.status);
+            } else {
+                res = cameraDeviceStatusChangeLocked(&id, statusInfo.cameraId, statusInfo.status);
+            }
+            if (res == OK) {
+                cachedStatus->emplace_back(statusInfo.isPhysicalCameraStatus,
+                        id.c_str(), physicalId.c_str(), statusInfo.status);
+            }
+        }
+        mCachedStatus.clear();
+
+        mInitialized = true;
+    }
+
+    // The cached status change callbacks cannot be fired directly from this
+    // function, due to same-thread deadlock trying to acquire mInterfaceMutex
+    // twice.
+    if (listener != nullptr) {
+        mInitialStatusCallbackFuture = std::async(std::launch::async,
+                &CameraProviderManager::ProviderInfo::notifyInitialStatusChange, this,
+                listener, std::move(cachedStatus));
+    }
+
     return OK;
 }
 
@@ -1537,9 +1616,9 @@
     std::unique_ptr<DeviceInfo> deviceInfo;
     switch (major) {
         case 1:
-            deviceInfo = initializeDeviceInfo<DeviceInfo1>(name, mProviderTagid,
-                    id, minor);
-            break;
+            ALOGE("%s: Device %s: Unsupported HIDL device HAL major version %d:", __FUNCTION__,
+                    name.c_str(), major);
+            return BAD_VALUE;
         case 3:
             deviceInfo = initializeDeviceInfo<DeviceInfo3>(name, mProviderTagid,
                     id, minor);
@@ -1588,7 +1667,7 @@
 
 status_t CameraProviderManager::ProviderInfo::dump(int fd, const Vector<String16>&) const {
     dprintf(fd, "== Camera Provider HAL %s (v2.%d, %s) static info: %zu devices: ==\n",
-            mProviderName.c_str(),
+            mProviderInstance.c_str(),
             mMinorVersion,
             mIsRemote ? "remote" : "passthrough",
             mDevices.size());
@@ -1620,7 +1699,7 @@
             dprintf(fd, "    Orientation: %d\n", info.orientation);
         }
         CameraMetadata info2;
-        res = device->getCameraCharacteristics(&info2);
+        res = device->getCameraCharacteristics(true /*overrideForPerfClass*/, &info2);
         if (res == INVALID_OPERATION) {
             dprintf(fd, "  API2 not directly supported\n");
         } else if (res != OK) {
@@ -1734,104 +1813,139 @@
         CameraDeviceStatus newStatus) {
     sp<StatusListener> listener;
     std::string id;
-    bool initialized = false;
+    std::lock_guard<std::mutex> lock(mInitLock);
+
+    if (!mInitialized) {
+        mCachedStatus.emplace_back(false /*isPhysicalCameraStatus*/,
+                cameraDeviceName.c_str(), std::string().c_str(), newStatus);
+        return hardware::Void();
+    }
+
     {
         std::lock_guard<std::mutex> lock(mLock);
-        bool known = false;
-        for (auto& deviceInfo : mDevices) {
-            if (deviceInfo->mName == cameraDeviceName) {
-                ALOGI("Camera device %s status is now %s, was %s", cameraDeviceName.c_str(),
-                        deviceStatusToString(newStatus), deviceStatusToString(deviceInfo->mStatus));
-                deviceInfo->mStatus = newStatus;
-                // TODO: Handle device removal (NOT_PRESENT)
-                id = deviceInfo->mId;
-                known = true;
-                break;
-            }
-        }
-        // Previously unseen device; status must not be NOT_PRESENT
-        if (!known) {
-            if (newStatus == CameraDeviceStatus::NOT_PRESENT) {
-                ALOGW("Camera provider %s says an unknown camera device %s is not present. Curious.",
-                    mProviderName.c_str(), cameraDeviceName.c_str());
-                return hardware::Void();
-            }
-            addDevice(cameraDeviceName, newStatus, &id);
-        } else if (newStatus == CameraDeviceStatus::NOT_PRESENT) {
-            removeDevice(id);
+        if (OK != cameraDeviceStatusChangeLocked(&id, cameraDeviceName, newStatus)) {
+            return hardware::Void();
         }
         listener = mManager->getStatusListener();
-        initialized = mInitialized;
-        if (reCacheConcurrentStreamingCameraIdsLocked() != OK) {
-            ALOGE("%s: CameraProvider %s could not re-cache concurrent streaming camera id list ",
-                      __FUNCTION__, mProviderName.c_str());
-        }
     }
+
     // Call without lock held to allow reentrancy into provider manager
-    // Don't send the callback if providerInfo hasn't been initialized.
-    // CameraService will initialize device status after provider is
-    // initialized
-    if (listener != nullptr && initialized) {
+    if (listener != nullptr) {
         listener->onDeviceStatusChanged(String8(id.c_str()), newStatus);
     }
+
     return hardware::Void();
 }
 
+status_t CameraProviderManager::ProviderInfo::cameraDeviceStatusChangeLocked(
+        std::string* id, const hardware::hidl_string& cameraDeviceName,
+        CameraDeviceStatus newStatus) {
+    bool known = false;
+    std::string cameraId;
+    for (auto& deviceInfo : mDevices) {
+        if (deviceInfo->mName == cameraDeviceName) {
+            ALOGI("Camera device %s status is now %s, was %s", cameraDeviceName.c_str(),
+                    deviceStatusToString(newStatus), deviceStatusToString(deviceInfo->mStatus));
+            deviceInfo->mStatus = newStatus;
+            // TODO: Handle device removal (NOT_PRESENT)
+            cameraId = deviceInfo->mId;
+            known = true;
+            break;
+        }
+    }
+    // Previously unseen device; status must not be NOT_PRESENT
+    if (!known) {
+        if (newStatus == CameraDeviceStatus::NOT_PRESENT) {
+            ALOGW("Camera provider %s says an unknown camera device %s is not present. Curious.",
+                mProviderName.c_str(), cameraDeviceName.c_str());
+            return BAD_VALUE;
+        }
+        addDevice(cameraDeviceName, newStatus, &cameraId);
+    } else if (newStatus == CameraDeviceStatus::NOT_PRESENT) {
+        removeDevice(cameraId);
+    }
+    if (reCacheConcurrentStreamingCameraIdsLocked() != OK) {
+        ALOGE("%s: CameraProvider %s could not re-cache concurrent streaming camera id list ",
+                  __FUNCTION__, mProviderName.c_str());
+    }
+    *id = cameraId;
+    return OK;
+}
+
 hardware::Return<void> CameraProviderManager::ProviderInfo::physicalCameraDeviceStatusChange(
         const hardware::hidl_string& cameraDeviceName,
         const hardware::hidl_string& physicalCameraDeviceName,
         CameraDeviceStatus newStatus) {
     sp<StatusListener> listener;
     std::string id;
-    bool initialized = false;
+    std::string physicalId;
+    std::lock_guard<std::mutex> lock(mInitLock);
+
+    if (!mInitialized) {
+        mCachedStatus.emplace_back(true /*isPhysicalCameraStatus*/, cameraDeviceName,
+                physicalCameraDeviceName, newStatus);
+        return hardware::Void();
+    }
+
     {
         std::lock_guard<std::mutex> lock(mLock);
-        bool known = false;
-        for (auto& deviceInfo : mDevices) {
-            if (deviceInfo->mName == cameraDeviceName) {
-                id = deviceInfo->mId;
 
-                if (!deviceInfo->mIsLogicalCamera) {
-                    ALOGE("%s: Invalid combination of camera id %s, physical id %s",
-                            __FUNCTION__, id.c_str(), physicalCameraDeviceName.c_str());
-                    return hardware::Void();
-                }
-                if (std::find(deviceInfo->mPhysicalIds.begin(), deviceInfo->mPhysicalIds.end(),
-                        physicalCameraDeviceName) == deviceInfo->mPhysicalIds.end()) {
-                    ALOGE("%s: Invalid combination of camera id %s, physical id %s",
-                            __FUNCTION__, id.c_str(), physicalCameraDeviceName.c_str());
-                    return hardware::Void();
-                }
-                ALOGI("Camera device %s physical device %s status is now %s, was %s",
-                        cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(),
-                        deviceStatusToString(newStatus), deviceStatusToString(
-                        deviceInfo->mPhysicalStatus[physicalCameraDeviceName]));
-                known = true;
-                break;
-            }
-        }
-        // Previously unseen device; status must not be NOT_PRESENT
-        if (!known) {
-            ALOGW("Camera provider %s says an unknown camera device %s-%s is not present. Curious.",
-                    mProviderName.c_str(), cameraDeviceName.c_str(),
-                    physicalCameraDeviceName.c_str());
+        if (OK != physicalCameraDeviceStatusChangeLocked(&id, &physicalId, cameraDeviceName,
+                physicalCameraDeviceName, newStatus)) {
             return hardware::Void();
         }
+
         listener = mManager->getStatusListener();
-        initialized = mInitialized;
     }
     // Call without lock held to allow reentrancy into provider manager
-    // Don't send the callback if providerInfo hasn't been initialized.
-    // CameraService will initialize device status after provider is
-    // initialized
-    if (listener != nullptr && initialized) {
-        String8 physicalId(physicalCameraDeviceName.c_str());
+    if (listener != nullptr) {
         listener->onDeviceStatusChanged(String8(id.c_str()),
-                physicalId, newStatus);
+                String8(physicalId.c_str()), newStatus);
     }
     return hardware::Void();
 }
 
+status_t CameraProviderManager::ProviderInfo::physicalCameraDeviceStatusChangeLocked(
+            std::string* id, std::string* physicalId,
+            const hardware::hidl_string& cameraDeviceName,
+            const hardware::hidl_string& physicalCameraDeviceName,
+            CameraDeviceStatus newStatus) {
+    bool known = false;
+    std::string cameraId;
+    for (auto& deviceInfo : mDevices) {
+        if (deviceInfo->mName == cameraDeviceName) {
+            cameraId = deviceInfo->mId;
+            if (!deviceInfo->mIsLogicalCamera) {
+                ALOGE("%s: Invalid combination of camera id %s, physical id %s",
+                        __FUNCTION__, cameraId.c_str(), physicalCameraDeviceName.c_str());
+                return BAD_VALUE;
+            }
+            if (std::find(deviceInfo->mPhysicalIds.begin(), deviceInfo->mPhysicalIds.end(),
+                    physicalCameraDeviceName) == deviceInfo->mPhysicalIds.end()) {
+                ALOGE("%s: Invalid combination of camera id %s, physical id %s",
+                        __FUNCTION__, cameraId.c_str(), physicalCameraDeviceName.c_str());
+                return BAD_VALUE;
+            }
+            ALOGI("Camera device %s physical device %s status is now %s",
+                    cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(),
+                    deviceStatusToString(newStatus));
+            known = true;
+            break;
+        }
+    }
+    // Previously unseen device; status must not be NOT_PRESENT
+    if (!known) {
+        ALOGW("Camera provider %s says an unknown camera device %s-%s is not present. Curious.",
+                mProviderName.c_str(), cameraDeviceName.c_str(),
+                physicalCameraDeviceName.c_str());
+        return BAD_VALUE;
+    }
+
+    *id = cameraId;
+    *physicalId = physicalCameraDeviceName.c_str();
+    return OK;
+}
+
 hardware::Return<void> CameraProviderManager::ProviderInfo::torchModeStatusChange(
         const hardware::hidl_string& cameraDeviceName,
         TorchModeStatus newStatus) {
@@ -1869,12 +1983,12 @@
 void CameraProviderManager::ProviderInfo::serviceDied(uint64_t cookie,
         const wp<hidl::base::V1_0::IBase>& who) {
     (void) who;
-    ALOGI("Camera provider '%s' has died; removing it", mProviderName.c_str());
+    ALOGI("Camera provider '%s' has died; removing it", mProviderInstance.c_str());
     if (cookie != mId) {
         ALOGW("%s: Unexpected serviceDied cookie %" PRIu64 ", expected %" PRIu32,
                 __FUNCTION__, cookie, mId);
     }
-    mManager->removeProvider(mProviderName);
+    mManager->removeProvider(mProviderInstance);
 }
 
 status_t CameraProviderManager::ProviderInfo::setUpVendorTags() {
@@ -1948,44 +2062,88 @@
             // TODO: This might be some other problem
             return INVALID_OPERATION;
         }
-        auto castResult = provider::V2_6::ICameraProvider::castFrom(interface);
-        if (castResult.isOk()) {
-            sp<provider::V2_6::ICameraProvider> interface_2_6 = castResult;
-            if (interface_2_6 != nullptr) {
-                Status callStatus;
-                auto cb =
-                        [&isSupported, &callStatus](Status s, bool supported) {
-                              callStatus = s;
-                              *isSupported = supported; };
+        auto castResult2_6 = provider::V2_6::ICameraProvider::castFrom(interface);
+        auto castResult2_7 = provider::V2_7::ICameraProvider::castFrom(interface);
+        Status callStatus;
+        auto cb =
+                [&isSupported, &callStatus](Status s, bool supported) {
+                      callStatus = s;
+                      *isSupported = supported; };
 
-                auto ret =  interface_2_6->isConcurrentStreamCombinationSupported(
-                            halCameraIdsAndStreamCombinations, cb);
-                if (ret.isOk()) {
-                    switch (callStatus) {
-                        case Status::OK:
-                            // Expected case, do nothing.
-                            res = OK;
-                            break;
-                        case Status::METHOD_NOT_SUPPORTED:
-                            res = INVALID_OPERATION;
-                            break;
-                        default:
-                            ALOGE("%s: Session configuration query failed: %d", __FUNCTION__,
-                                      callStatus);
-                            res = UNKNOWN_ERROR;
-                    }
-                } else {
-                    ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, ret.description().c_str());
-                    res = UNKNOWN_ERROR;
-                }
-                return res;
+        ::android::hardware::Return<void> ret;
+        sp<provider::V2_7::ICameraProvider> interface_2_7;
+        sp<provider::V2_6::ICameraProvider> interface_2_6;
+        if (mMinorVersion >= 7 && castResult2_7.isOk()) {
+            interface_2_7 = castResult2_7;
+            if (interface_2_7 != nullptr) {
+                ret = interface_2_7->isConcurrentStreamCombinationSupported_2_7(
+                        halCameraIdsAndStreamCombinations, cb);
             }
+        } else if (mMinorVersion == 6 && castResult2_6.isOk()) {
+            interface_2_6 = castResult2_6;
+            if (interface_2_6 != nullptr) {
+                hardware::hidl_vec<provider::V2_6::CameraIdAndStreamCombination>
+                        halCameraIdsAndStreamCombinations_2_6;
+                size_t numStreams = halCameraIdsAndStreamCombinations.size();
+                halCameraIdsAndStreamCombinations_2_6.resize(numStreams);
+                for (size_t i = 0; i < numStreams; i++) {
+                    using namespace camera3;
+                    auto const& combination = halCameraIdsAndStreamCombinations[i];
+                    halCameraIdsAndStreamCombinations_2_6[i].cameraId = combination.cameraId;
+                    bool success =
+                            SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
+                                    halCameraIdsAndStreamCombinations_2_6[i].streamConfiguration,
+                                    combination.streamConfiguration);
+                    if (!success) {
+                        *isSupported = false;
+                        return OK;
+                    }
+                }
+                ret = interface_2_6->isConcurrentStreamCombinationSupported(
+                        halCameraIdsAndStreamCombinations_2_6, cb);
+            }
+        }
+
+        if (interface_2_7 != nullptr || interface_2_6 != nullptr) {
+            if (ret.isOk()) {
+                switch (callStatus) {
+                    case Status::OK:
+                        // Expected case, do nothing.
+                        res = OK;
+                        break;
+                    case Status::METHOD_NOT_SUPPORTED:
+                        res = INVALID_OPERATION;
+                        break;
+                    default:
+                        ALOGE("%s: Session configuration query failed: %d", __FUNCTION__,
+                                  callStatus);
+                        res = UNKNOWN_ERROR;
+                }
+            } else {
+                ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, ret.description().c_str());
+                res = UNKNOWN_ERROR;
+            }
+            return res;
         }
     }
     // unsupported operation
     return INVALID_OPERATION;
 }
 
+void CameraProviderManager::ProviderInfo::notifyInitialStatusChange(
+        sp<StatusListener> listener,
+        std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus) {
+    for (auto& statusInfo : *cachedStatus) {
+        if (statusInfo.isPhysicalCameraStatus) {
+            listener->onDeviceStatusChanged(String8(statusInfo.cameraId.c_str()),
+                    String8(statusInfo.physicalCameraId.c_str()), statusInfo.status);
+        } else {
+            listener->onDeviceStatusChanged(
+                    String8(statusInfo.cameraId.c_str()), statusInfo.status);
+        }
+    }
+}
+
 template<class DeviceInfoT>
 std::unique_ptr<CameraProviderManager::ProviderInfo::DeviceInfo>
     CameraProviderManager::ProviderInfo::initializeDeviceInfo(
@@ -2034,35 +2192,6 @@
 }
 
 template<>
-sp<device::V1_0::ICameraDevice>
-CameraProviderManager::ProviderInfo::startDeviceInterface
-        <device::V1_0::ICameraDevice>(const std::string &name) {
-    Status status;
-    sp<device::V1_0::ICameraDevice> cameraInterface;
-    hardware::Return<void> ret;
-    const sp<provider::V2_4::ICameraProvider> interface = startProviderInterface();
-    if (interface == nullptr) {
-        return nullptr;
-    }
-    ret = interface->getCameraDeviceInterface_V1_x(name, [&status, &cameraInterface](
-        Status s, sp<device::V1_0::ICameraDevice> interface) {
-                status = s;
-                cameraInterface = interface;
-            });
-    if (!ret.isOk()) {
-        ALOGE("%s: Transaction error trying to obtain interface for camera device %s: %s",
-                __FUNCTION__, name.c_str(), ret.description().c_str());
-        return nullptr;
-    }
-    if (status != Status::OK) {
-        ALOGE("%s: Unable to obtain interface for camera device %s: %s", __FUNCTION__,
-                name.c_str(), statusToString(status));
-        return nullptr;
-    }
-    return cameraInterface;
-}
-
-template<>
 sp<device::V3_2::ICameraDevice>
 CameraProviderManager::ProviderInfo::startDeviceInterface
         <device::V3_2::ICameraDevice>(const std::string &name) {
@@ -2115,126 +2244,6 @@
     return mapToStatusT(s);
 }
 
-CameraProviderManager::ProviderInfo::DeviceInfo1::DeviceInfo1(const std::string& name,
-        const metadata_vendor_id_t tagId, const std::string &id,
-        uint16_t minorVersion,
-        const CameraResourceCost& resourceCost,
-        sp<ProviderInfo> parentProvider,
-        const std::vector<std::string>& publicCameraIds,
-        sp<InterfaceT> interface) :
-        DeviceInfo(name, tagId, id, hardware::hidl_version{1, minorVersion},
-                   publicCameraIds, resourceCost, parentProvider) {
-    // Get default parameters and initialize flash unit availability
-    // Requires powering on the camera device
-    hardware::Return<Status> status = interface->open(nullptr);
-    if (!status.isOk()) {
-        ALOGE("%s: Transaction error opening camera device %s to check for a flash unit: %s",
-                __FUNCTION__, id.c_str(), status.description().c_str());
-        return;
-    }
-    if (status != Status::OK) {
-        ALOGE("%s: Unable to open camera device %s to check for a flash unit: %s", __FUNCTION__,
-                id.c_str(), CameraProviderManager::statusToString(status));
-        return;
-    }
-    hardware::Return<void> ret;
-    ret = interface->getParameters([this](const hardware::hidl_string& parms) {
-                mDefaultParameters.unflatten(String8(parms.c_str()));
-            });
-    if (!ret.isOk()) {
-        ALOGE("%s: Transaction error reading camera device %s params to check for a flash unit: %s",
-                __FUNCTION__, id.c_str(), status.description().c_str());
-        return;
-    }
-    const char *flashMode =
-            mDefaultParameters.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES);
-    if (flashMode && strstr(flashMode, CameraParameters::FLASH_MODE_TORCH)) {
-        mHasFlashUnit = true;
-    }
-
-    status_t res = cacheCameraInfo(interface);
-    if (res != OK) {
-        ALOGE("%s: Could not cache CameraInfo", __FUNCTION__);
-        return;
-    }
-
-    ret = interface->close();
-    if (!ret.isOk()) {
-        ALOGE("%s: Transaction error closing camera device %s after check for a flash unit: %s",
-                __FUNCTION__, id.c_str(), status.description().c_str());
-    }
-
-    if (!kEnableLazyHal) {
-        // Save HAL reference indefinitely
-        mSavedInterface = interface;
-    }
-}
-
-CameraProviderManager::ProviderInfo::DeviceInfo1::~DeviceInfo1() {}
-
-status_t CameraProviderManager::ProviderInfo::DeviceInfo1::setTorchMode(bool enabled) {
-    return setTorchModeForDevice<InterfaceT>(enabled);
-}
-
-status_t CameraProviderManager::ProviderInfo::DeviceInfo1::getCameraInfo(
-        hardware::CameraInfo *info) const {
-    if (info == nullptr) return BAD_VALUE;
-    *info = mInfo;
-    return OK;
-}
-
-status_t CameraProviderManager::ProviderInfo::DeviceInfo1::cacheCameraInfo(
-        sp<CameraProviderManager::ProviderInfo::DeviceInfo1::InterfaceT> interface) {
-    Status status;
-    device::V1_0::CameraInfo cInfo;
-    hardware::Return<void> ret;
-    ret = interface->getCameraInfo([&status, &cInfo](Status s, device::V1_0::CameraInfo camInfo) {
-                status = s;
-                cInfo = camInfo;
-            });
-    if (!ret.isOk()) {
-        ALOGE("%s: Transaction error reading camera info from device %s: %s",
-                __FUNCTION__, mId.c_str(), ret.description().c_str());
-        return DEAD_OBJECT;
-    }
-    if (status != Status::OK) {
-        return mapToStatusT(status);
-    }
-
-    switch(cInfo.facing) {
-        case device::V1_0::CameraFacing::BACK:
-            mInfo.facing = hardware::CAMERA_FACING_BACK;
-            break;
-        case device::V1_0::CameraFacing::EXTERNAL:
-            // Map external to front for legacy API
-        case device::V1_0::CameraFacing::FRONT:
-            mInfo.facing = hardware::CAMERA_FACING_FRONT;
-            break;
-        default:
-            ALOGW("%s: Device %s: Unknown camera facing: %d",
-                    __FUNCTION__, mId.c_str(), cInfo.facing);
-            mInfo.facing = hardware::CAMERA_FACING_BACK;
-    }
-    mInfo.orientation = cInfo.orientation;
-
-    return OK;
-}
-
-status_t CameraProviderManager::ProviderInfo::DeviceInfo1::dumpState(int fd) {
-    native_handle_t* handle = native_handle_create(1,0);
-    handle->data[0] = fd;
-    const sp<InterfaceT> interface = startDeviceInterface<InterfaceT>();
-    if (interface == nullptr) {
-        return DEAD_OBJECT;
-    }
-    hardware::Return<Status> s = interface->dumpState(handle);
-    native_handle_delete(handle);
-    if (!s.isOk()) {
-        return INVALID_OPERATION;
-    }
-    return mapToStatusT(s);
-}
-
 CameraProviderManager::ProviderInfo::DeviceInfo3::DeviceInfo3(const std::string& name,
         const metadata_vendor_id_t tagId, const std::string &id,
         uint16_t minorVersion,
@@ -2294,6 +2303,21 @@
         ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
     }
+
+    if (SessionConfigurationUtils::isUltraHighResolutionSensor(mCameraCharacteristics)) {
+        status_t status = addDynamicDepthTags(/*maxResolution*/true);
+        if (OK != status) {
+            ALOGE("%s: Failed appending dynamic depth tags for maximum resolution mode: %s (%d)",
+                    __FUNCTION__, strerror(-status), status);
+        }
+
+        status = deriveHeicTags(/*maxResolution*/true);
+        if (OK != status) {
+            ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities for"
+                    "maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
+        }
+    }
+
     res = addRotateCropTags();
     if (OK != res) {
         ALOGE("%s: Unable to add default SCALER_ROTATE_AND_CROP tags: %s (%d)", __FUNCTION__,
@@ -2464,10 +2488,15 @@
 }
 
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraCharacteristics(
-        CameraMetadata *characteristics) const {
+        bool overrideForPerfClass, CameraMetadata *characteristics) const {
     if (characteristics == nullptr) return BAD_VALUE;
 
-    *characteristics = mCameraCharacteristics;
+    if (!overrideForPerfClass && mCameraCharNoPCOverride != nullptr) {
+        *characteristics = *mCameraCharNoPCOverride;
+    } else {
+        *characteristics = mCameraCharacteristics;
+    }
+
     return OK;
 }
 
@@ -2484,7 +2513,7 @@
 }
 
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::isSessionConfigurationSupported(
-        const hardware::camera::device::V3_4::StreamConfiguration &configuration,
+        const hardware::camera::device::V3_7::StreamConfiguration &configuration,
         bool *status /*out*/) {
 
     const sp<CameraProviderManager::ProviderInfo::DeviceInfo3::InterfaceT> interface =
@@ -2492,19 +2521,33 @@
     if (interface == nullptr) {
         return DEAD_OBJECT;
     }
-    auto castResult = device::V3_5::ICameraDevice::castFrom(interface);
-    sp<hardware::camera::device::V3_5::ICameraDevice> interface_3_5 = castResult;
-    if (interface_3_5 == nullptr) {
-        return INVALID_OPERATION;
-    }
+    auto castResult_3_5 = device::V3_5::ICameraDevice::castFrom(interface);
+    sp<hardware::camera::device::V3_5::ICameraDevice> interface_3_5 = castResult_3_5;
+    auto castResult_3_7 = device::V3_7::ICameraDevice::castFrom(interface);
+    sp<hardware::camera::device::V3_7::ICameraDevice> interface_3_7 = castResult_3_7;
 
     status_t res;
     Status callStatus;
-    auto ret =  interface_3_5->isStreamCombinationSupported(configuration,
+    ::android::hardware::Return<void> ret;
+    auto halCb =
             [&callStatus, &status] (Status s, bool combStatus) {
                 callStatus = s;
                 *status = combStatus;
-            });
+            };
+    if (interface_3_7 != nullptr) {
+        ret = interface_3_7->isStreamCombinationSupported_3_7(configuration, halCb);
+    } else if (interface_3_5 != nullptr) {
+        hardware::camera::device::V3_4::StreamConfiguration configuration_3_4;
+        bool success = SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
+                configuration_3_4, configuration);
+        if (!success) {
+            *status = false;
+            return OK;
+        }
+        ret = interface_3_5->isStreamCombinationSupported(configuration_3_4, halCb);
+    } else {
+        return INVALID_OPERATION;
+    }
     if (ret.isOk()) {
         switch (callStatus) {
             case Status::OK:
@@ -2526,6 +2569,97 @@
     return res;
 }
 
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::filterSmallJpegSizes() {
+    int32_t thresholdW = SessionConfigurationUtils::PERF_CLASS_JPEG_THRESH_W;
+    int32_t thresholdH = SessionConfigurationUtils::PERF_CLASS_JPEG_THRESH_H;
+
+    if (mCameraCharNoPCOverride != nullptr) return OK;
+
+    mCameraCharNoPCOverride = std::make_unique<CameraMetadata>(mCameraCharacteristics);
+
+    // Remove small JPEG sizes from available stream configurations
+    size_t largeJpegCount = 0;
+    std::vector<int32_t> newStreamConfigs;
+    camera_metadata_entry streamConfigs =
+            mCameraCharacteristics.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+    for (size_t i = 0; i < streamConfigs.count; i += 4) {
+        if ((streamConfigs.data.i32[i] == HAL_PIXEL_FORMAT_BLOB) && (streamConfigs.data.i32[i+3] ==
+                ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT)) {
+            if (streamConfigs.data.i32[i+1] < thresholdW  ||
+                    streamConfigs.data.i32[i+2] < thresholdH) {
+                continue;
+            } else {
+                largeJpegCount ++;
+            }
+        }
+        newStreamConfigs.insert(newStreamConfigs.end(), streamConfigs.data.i32 + i,
+                streamConfigs.data.i32 + i + 4);
+    }
+    if (newStreamConfigs.size() == 0 || largeJpegCount == 0) {
+        return BAD_VALUE;
+    }
+
+    // Remove small JPEG sizes from available min frame durations
+    largeJpegCount = 0;
+    std::vector<int64_t> newMinDurations;
+    camera_metadata_entry minDurations =
+            mCameraCharacteristics.find(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
+    for (size_t i = 0; i < minDurations.count; i += 4) {
+        if (minDurations.data.i64[i] == HAL_PIXEL_FORMAT_BLOB) {
+            if (minDurations.data.i64[i+1] < thresholdW ||
+                    minDurations.data.i64[i+2] < thresholdH) {
+                continue;
+            } else {
+                largeJpegCount++;
+            }
+        }
+        newMinDurations.insert(newMinDurations.end(), minDurations.data.i64 + i,
+                minDurations.data.i64 + i + 4);
+    }
+    if (newMinDurations.size() == 0 || largeJpegCount == 0) {
+        return BAD_VALUE;
+    }
+
+    // Remove small JPEG sizes from available stall durations
+    largeJpegCount = 0;
+    std::vector<int64_t> newStallDurations;
+    camera_metadata_entry stallDurations =
+            mCameraCharacteristics.find(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS);
+    for (size_t i = 0; i < stallDurations.count; i += 4) {
+        if (stallDurations.data.i64[i] == HAL_PIXEL_FORMAT_BLOB) {
+            if (stallDurations.data.i64[i+1] < thresholdW ||
+                    stallDurations.data.i64[i+2] < thresholdH) {
+                continue;
+            } else {
+                largeJpegCount++;
+            }
+        }
+        newStallDurations.insert(newStallDurations.end(), stallDurations.data.i64 + i,
+                stallDurations.data.i64 + i + 4);
+    }
+    if (newStallDurations.size() == 0 || largeJpegCount == 0) {
+        return BAD_VALUE;
+    }
+
+    mCameraCharacteristics.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+            newStreamConfigs.data(), newStreamConfigs.size());
+    mCameraCharacteristics.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
+            newMinDurations.data(), newMinDurations.size());
+    mCameraCharacteristics.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
+            newStallDurations.data(), newStallDurations.size());
+
+    // Re-generate metadata tags that have dependencies on BLOB sizes
+    auto res = addDynamicDepthTags();
+    if (OK != res) {
+        ALOGE("%s: Failed to append dynamic depth tags: %s (%d)", __FUNCTION__,
+                strerror(-res), res);
+        // Allow filtering of small JPEG sizes to succeed even if dynamic depth
+        // tags fail to generate.
+    }
+
+    return OK;
+}
+
 status_t CameraProviderManager::ProviderInfo::parseProviderName(const std::string& name,
         std::string *type, uint32_t *id) {
     // Format must be "<type>/<id>"
@@ -2689,9 +2823,11 @@
 
 
 CameraProviderManager::ProviderInfo::~ProviderInfo() {
+    if (mInitialStatusCallbackFuture.valid()) {
+        mInitialStatusCallbackFuture.wait();
+    }
     // Destruction of ProviderInfo is only supposed to happen when the respective
     // CameraProvider interface dies, so do not unregister callbacks.
-
 }
 
 status_t CameraProviderManager::mapToStatusT(const Status& s)  {
@@ -2870,6 +3006,8 @@
 
 status_t CameraProviderManager::convertToHALStreamCombinationAndCameraIdsLocked(
         const std::vector<CameraIdAndSessionConfiguration> &cameraIdsAndSessionConfigs,
+        const std::set<std::string>& perfClassPrimaryCameraIds,
+        int targetSdkVersion,
         hardware::hidl_vec<CameraIdAndStreamCombination> *halCameraIdsAndStreamCombinations,
         bool *earlyExit) {
     binder::Status bStatus = binder::Status::ok();
@@ -2877,25 +3015,31 @@
     bool shouldExit = false;
     status_t res = OK;
     for (auto &cameraIdAndSessionConfig : cameraIdsAndSessionConfigs) {
-        hardware::camera::device::V3_4::StreamConfiguration streamConfiguration;
+        const std::string& cameraId = cameraIdAndSessionConfig.mCameraId;
+        hardware::camera::device::V3_7::StreamConfiguration streamConfiguration;
         CameraMetadata deviceInfo;
-        res = getCameraCharacteristicsLocked(cameraIdAndSessionConfig.mCameraId, &deviceInfo);
+        bool overrideForPerfClass =
+                SessionConfigurationUtils::targetPerfClassPrimaryCamera(
+                        perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
+        res = getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo);
         if (res != OK) {
             return res;
         }
-        metadataGetter getMetadata =
-                [this](const String8 &id) {
+        camera3::metadataGetter getMetadata =
+                [this](const String8 &id, bool overrideForPerfClass) {
                     CameraMetadata physicalDeviceInfo;
-                    getCameraCharacteristicsLocked(id.string(), &physicalDeviceInfo);
+                    getCameraCharacteristicsLocked(id.string(), overrideForPerfClass,
+                                                   &physicalDeviceInfo);
                     return physicalDeviceInfo;
                 };
         std::vector<std::string> physicalCameraIds;
-        isLogicalCameraLocked(cameraIdAndSessionConfig.mCameraId, &physicalCameraIds);
+        isLogicalCameraLocked(cameraId, &physicalCameraIds);
         bStatus =
             SessionConfigurationUtils::convertToHALStreamCombination(
                     cameraIdAndSessionConfig.mSessionConfiguration,
-                    String8(cameraIdAndSessionConfig.mCameraId.c_str()), deviceInfo, getMetadata,
-                    physicalCameraIds, streamConfiguration, &shouldExit);
+                    String8(cameraId.c_str()), deviceInfo, getMetadata,
+                    physicalCameraIds, streamConfiguration,
+                    overrideForPerfClass, &shouldExit);
         if (!bStatus.isOk()) {
             ALOGE("%s: convertToHALStreamCombination failed", __FUNCTION__);
             return INVALID_OPERATION;
@@ -2905,7 +3049,7 @@
             return OK;
         }
         CameraIdAndStreamCombination halCameraIdAndStream;
-        halCameraIdAndStream.cameraId = cameraIdAndSessionConfig.mCameraId;
+        halCameraIdAndStream.cameraId = cameraId;
         halCameraIdAndStream.streamConfiguration = streamConfiguration;
         halCameraIdsAndStreamsV.push_back(halCameraIdAndStream);
     }
@@ -2938,7 +3082,8 @@
 
 status_t CameraProviderManager::isConcurrentSessionConfigurationSupported(
         const std::vector<CameraIdAndSessionConfiguration> &cameraIdsAndSessionConfigs,
-        bool *isSupported) {
+        const std::set<std::string>& perfClassPrimaryCameraIds,
+        int targetSdkVersion, bool *isSupported) {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     // Check if all the devices are a subset of devices advertised by the
     // same provider through getConcurrentStreamingCameraIds()
@@ -2952,8 +3097,8 @@
             hardware::hidl_vec<CameraIdAndStreamCombination> halCameraIdsAndStreamCombinations;
             bool knowUnsupported = false;
             status_t res = convertToHALStreamCombinationAndCameraIdsLocked(
-                    cameraIdsAndSessionConfigs, &halCameraIdsAndStreamCombinations,
-                    &knowUnsupported);
+                    cameraIdsAndSessionConfigs, perfClassPrimaryCameraIds,
+                    targetSdkVersion, &halCameraIdsAndStreamCombinations, &knowUnsupported);
             if (res != OK) {
                 ALOGE("%s unable to convert session configurations provided to HAL stream"
                       "combinations", __FUNCTION__);
@@ -2975,10 +3120,10 @@
 }
 
 status_t CameraProviderManager::getCameraCharacteristicsLocked(const std::string &id,
-        CameraMetadata* characteristics) const {
+        bool overrideForPerfClass, CameraMetadata* characteristics) const {
     auto deviceInfo = findDeviceInfoLocked(id, /*minVersion*/ {3,0}, /*maxVersion*/ {5,0});
     if (deviceInfo != nullptr) {
-        return deviceInfo->getCameraCharacteristics(characteristics);
+        return deviceInfo->getCameraCharacteristics(overrideForPerfClass, characteristics);
     }
 
     // Find hidden physical camera characteristics
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 25d3639..1bdbb44 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -20,8 +20,10 @@
 #include <vector>
 #include <unordered_map>
 #include <unordered_set>
+#include <set>
 #include <string>
 #include <mutex>
+#include <future>
 
 #include <camera/camera2/ConcurrentCamera.h>
 #include <camera/CameraParameters2.h>
@@ -32,7 +34,8 @@
 #include <android/hardware/camera/provider/2.5/ICameraProvider.h>
 #include <android/hardware/camera/provider/2.6/ICameraProviderCallback.h>
 #include <android/hardware/camera/provider/2.6/ICameraProvider.h>
-#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
+#include <android/hardware/camera/provider/2.7/ICameraProvider.h>
+#include <android/hardware/camera/device/3.7/types.h>
 #include <android/hidl/manager/1.0/IServiceNotification.h>
 #include <camera/VendorTagDescriptor.h>
 
@@ -77,6 +80,16 @@
    HIDDEN_SECURE_CAMERA
 };
 
+#define CAMERA_DEVICE_API_VERSION_1_0 HARDWARE_DEVICE_API_VERSION(1, 0)
+#define CAMERA_DEVICE_API_VERSION_3_0 HARDWARE_DEVICE_API_VERSION(3, 0)
+#define CAMERA_DEVICE_API_VERSION_3_1 HARDWARE_DEVICE_API_VERSION(3, 1)
+#define CAMERA_DEVICE_API_VERSION_3_2 HARDWARE_DEVICE_API_VERSION(3, 2)
+#define CAMERA_DEVICE_API_VERSION_3_3 HARDWARE_DEVICE_API_VERSION(3, 3)
+#define CAMERA_DEVICE_API_VERSION_3_4 HARDWARE_DEVICE_API_VERSION(3, 4)
+#define CAMERA_DEVICE_API_VERSION_3_5 HARDWARE_DEVICE_API_VERSION(3, 5)
+#define CAMERA_DEVICE_API_VERSION_3_6 HARDWARE_DEVICE_API_VERSION(3, 6)
+#define CAMERA_DEVICE_API_VERSION_3_7 HARDWARE_DEVICE_API_VERSION(3, 7)
+
 /**
  * A manager for all camera providers available on an Android device.
  *
@@ -214,19 +227,20 @@
      * not have a v3 or newer HAL version.
      */
     status_t getCameraCharacteristics(const std::string &id,
-            CameraMetadata* characteristics) const;
+            bool overrideForPerfClass, CameraMetadata* characteristics) const;
 
     status_t isConcurrentSessionConfigurationSupported(
             const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>
                     &cameraIdsAndSessionConfigs,
-            bool *isSupported);
+            const std::set<std::string>& perfClassPrimaryCameraIds,
+            int targetSdkVersion, bool *isSupported);
 
     std::vector<std::unordered_set<std::string>> getConcurrentCameraIds() const;
     /**
      * Check for device support of specific stream combination.
      */
     status_t isSessionConfigurationSupported(const std::string& id,
-            const hardware::camera::device::V3_4::StreamConfiguration &configuration,
+            const hardware::camera::device::V3_7::StreamConfiguration &configuration,
             bool *status /*out*/) const;
 
     /**
@@ -269,11 +283,6 @@
             /*out*/
             sp<hardware::camera::device::V3_2::ICameraDeviceSession> *session);
 
-    status_t openSession(const std::string &id,
-            const sp<hardware::camera::device::V1_0::ICameraDeviceCallback>& callback,
-            /*out*/
-            sp<hardware::camera::device::V1_0::ICameraDevice> *session);
-
     /**
      * Save the ICameraProvider while it is being used by a camera or torch client
      */
@@ -320,6 +329,8 @@
     status_t getSystemCameraKind(const std::string& id, SystemCameraKind *kind) const;
     bool isHiddenPhysicalCamera(const std::string& cameraId) const;
 
+    status_t filterSmallJpegSizes(const std::string& cameraId);
+
     static const float kDepthARTolerance;
 private:
     // All private members, unless otherwise noted, expect mInterfaceMutex to be locked before use
@@ -358,6 +369,7 @@
             virtual public hardware::hidl_death_recipient
     {
         const std::string mProviderName;
+        const std::string mProviderInstance;
         const metadata_vendor_id_t mProviderTagid;
         int mMinorVersion;
         sp<VendorTagDescriptor> mVendorTagDescriptor;
@@ -372,7 +384,7 @@
 
         sp<hardware::camera::provider::V2_4::ICameraProvider> mSavedInterface;
 
-        ProviderInfo(const std::string &providerName,
+        ProviderInfo(const std::string &providerName, const std::string &providerInstance,
                 CameraProviderManager *manager);
         ~ProviderInfo();
 
@@ -403,6 +415,15 @@
                 const hardware::hidl_string& physicalCameraDeviceName,
                 hardware::camera::common::V1_0::CameraDeviceStatus newStatus) override;
 
+        status_t cameraDeviceStatusChangeLocked(
+                std::string* id, const hardware::hidl_string& cameraDeviceName,
+                hardware::camera::common::V1_0::CameraDeviceStatus newStatus);
+        status_t physicalCameraDeviceStatusChangeLocked(
+                std::string* id, std::string* physicalId,
+                const hardware::hidl_string& cameraDeviceName,
+                const hardware::hidl_string& physicalCameraDeviceName,
+                hardware::camera::common::V1_0::CameraDeviceStatus newStatus);
+
         // hidl_death_recipient interface - this locks the parent mInterfaceMutex
         virtual void serviceDied(uint64_t cookie, const wp<hidl::base::V1_0::IBase>& who) override;
 
@@ -425,7 +446,7 @@
          */
         status_t isConcurrentSessionConfigurationSupported(
                 const hardware::hidl_vec<
-                        hardware::camera::provider::V2_6::CameraIdAndStreamCombination>
+                        hardware::camera::provider::V2_7::CameraIdAndStreamCombination>
                                 &halCameraIdsAndStreamCombinations,
                 bool *isSupported);
 
@@ -444,8 +465,6 @@
             const hardware::camera::common::V1_0::CameraResourceCost mResourceCost;
 
             hardware::camera::common::V1_0::CameraDeviceStatus mStatus;
-            std::map<std::string, hardware::camera::common::V1_0::CameraDeviceStatus>
-                    mPhysicalStatus;
 
             wp<ProviderInfo> mParentProvider;
 
@@ -455,7 +474,9 @@
             virtual status_t getCameraInfo(hardware::CameraInfo *info) const = 0;
             virtual bool isAPI1Compatible() const = 0;
             virtual status_t dumpState(int fd) = 0;
-            virtual status_t getCameraCharacteristics(CameraMetadata *characteristics) const {
+            virtual status_t getCameraCharacteristics(bool overrideForPerfClass,
+                    CameraMetadata *characteristics) const {
+                (void) overrideForPerfClass;
                 (void) characteristics;
                 return INVALID_OPERATION;
             }
@@ -467,10 +488,11 @@
             }
 
             virtual status_t isSessionConfigurationSupported(
-                    const hardware::camera::device::V3_4::StreamConfiguration &/*configuration*/,
+                    const hardware::camera::device::V3_7::StreamConfiguration &/*configuration*/,
                     bool * /*status*/) {
                 return INVALID_OPERATION;
             }
+            virtual status_t filterSmallJpegSizes() = 0;
 
             template<class InterfaceT>
             sp<InterfaceT> startDeviceInterface();
@@ -513,27 +535,6 @@
         // physical camera IDs.
         std::vector<std::string> mProviderPublicCameraIds;
 
-        // HALv1-specific camera fields, including the actual device interface
-        struct DeviceInfo1 : public DeviceInfo {
-            typedef hardware::camera::device::V1_0::ICameraDevice InterfaceT;
-
-            virtual status_t setTorchMode(bool enabled) override;
-            virtual status_t getCameraInfo(hardware::CameraInfo *info) const override;
-            //In case of Device1Info assume that we are always API1 compatible
-            virtual bool isAPI1Compatible() const override { return true; }
-            virtual status_t dumpState(int fd) override;
-            DeviceInfo1(const std::string& name, const metadata_vendor_id_t tagId,
-                    const std::string &id, uint16_t minorVersion,
-                    const hardware::camera::common::V1_0::CameraResourceCost& resourceCost,
-                    sp<ProviderInfo> parentProvider,
-                    const std::vector<std::string>& publicCameraIds,
-                    sp<InterfaceT> interface);
-            virtual ~DeviceInfo1();
-        private:
-            CameraParameters2 mDefaultParameters;
-            status_t cacheCameraInfo(sp<InterfaceT> interface);
-        };
-
         // HALv3-specific camera fields, including the actual device interface
         struct DeviceInfo3 : public DeviceInfo {
             typedef hardware::camera::device::V3_2::ICameraDevice InterfaceT;
@@ -543,13 +544,15 @@
             virtual bool isAPI1Compatible() const override;
             virtual status_t dumpState(int fd) override;
             virtual status_t getCameraCharacteristics(
+                    bool overrideForPerfClass,
                     CameraMetadata *characteristics) const override;
             virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
                     CameraMetadata *characteristics) const override;
             virtual status_t isSessionConfigurationSupported(
-                    const hardware::camera::device::V3_4::StreamConfiguration &configuration,
+                    const hardware::camera::device::V3_7::StreamConfiguration &configuration,
                     bool *status /*out*/)
                     override;
+            virtual status_t filterSmallJpegSizes() override;
 
             DeviceInfo3(const std::string& name, const metadata_vendor_id_t tagId,
                     const std::string &id, uint16_t minorVersion,
@@ -559,23 +562,27 @@
             virtual ~DeviceInfo3();
         private:
             CameraMetadata mCameraCharacteristics;
+            // A copy of mCameraCharacteristics without performance class
+            // override
+            std::unique_ptr<CameraMetadata> mCameraCharNoPCOverride;
             std::unordered_map<std::string, CameraMetadata> mPhysicalCameraCharacteristics;
             void queryPhysicalCameraIds();
             SystemCameraKind getSystemCameraKind();
             status_t fixupMonochromeTags();
-            status_t addDynamicDepthTags();
-            status_t deriveHeicTags();
+            status_t addDynamicDepthTags(bool maxResolution = false);
+            status_t deriveHeicTags(bool maxResolution = false);
             status_t addRotateCropTags();
             status_t addPreCorrectionActiveArraySize();
 
             static void getSupportedSizes(const CameraMetadata& ch, uint32_t tag,
                     android_pixel_format_t format,
                     std::vector<std::tuple<size_t, size_t>> *sizes /*out*/);
-            void getSupportedDurations( const CameraMetadata& ch, uint32_t tag,
+            static void getSupportedDurations( const CameraMetadata& ch, uint32_t tag,
                     android_pixel_format_t format,
                     const std::vector<std::tuple<size_t, size_t>>& sizes,
                     std::vector<int64_t> *durations/*out*/);
-            void getSupportedDynamicDepthDurations(const std::vector<int64_t>& depthDurations,
+            static void getSupportedDynamicDepthDurations(
+                    const std::vector<int64_t>& depthDurations,
                     const std::vector<int64_t>& blobDurations,
                     std::vector<int64_t> *dynamicDepthDurations /*out*/);
             static void getSupportedDynamicDepthSizes(
@@ -600,7 +607,27 @@
 
         CameraProviderManager *mManager;
 
+        struct CameraStatusInfoT {
+            bool isPhysicalCameraStatus = false;
+            hardware::hidl_string cameraId;
+            hardware::hidl_string physicalCameraId;
+            hardware::camera::common::V1_0::CameraDeviceStatus status;
+            CameraStatusInfoT(bool isForPhysicalCamera, const hardware::hidl_string& id,
+                    const hardware::hidl_string& physicalId,
+                    hardware::camera::common::V1_0::CameraDeviceStatus s) :
+                    isPhysicalCameraStatus(isForPhysicalCamera), cameraId(id),
+                    physicalCameraId(physicalId), status(s) {}
+        };
+
+        // Lock to synchronize between initialize() and camera status callbacks
+        std::mutex mInitLock;
         bool mInitialized = false;
+        std::vector<CameraStatusInfoT> mCachedStatus;
+        // End of scope for mInitLock
+
+        std::future<void> mInitialStatusCallbackFuture;
+        void notifyInitialStatusChange(sp<StatusListener> listener,
+                std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus);
 
         std::vector<std::unordered_set<std::string>> mConcurrentCameraIdCombinations;
 
@@ -644,7 +671,10 @@
             hardware::hidl_version minVersion = hardware::hidl_version{0,0},
             hardware::hidl_version maxVersion = hardware::hidl_version{1000,0}) const;
 
-    status_t addProviderLocked(const std::string& newProvider);
+    status_t addProviderLocked(const std::string& newProvider, bool preexisting = false);
+
+    status_t tryToInitializeProviderLocked(const std::string& providerName,
+            const sp<ProviderInfo>& providerInfo);
 
     bool isLogicalCameraLocked(const std::string& id, std::vector<std::string>* physicalCameraIds);
 
@@ -653,6 +683,7 @@
 
     bool isValidDeviceLocked(const std::string &id, uint16_t majorVersion) const;
 
+    size_t mProviderInstanceId = 0;
     std::vector<sp<ProviderInfo>> mProviders;
 
     void addProviderToMap(
@@ -668,7 +699,7 @@
     static const char* torchStatusToString(
         const hardware::camera::common::V1_0::TorchModeStatus&);
 
-    status_t getCameraCharacteristicsLocked(const std::string &id,
+    status_t getCameraCharacteristicsLocked(const std::string &id, bool overrideForPerfClass,
             CameraMetadata* characteristics) const;
     void filterLogicalCameraIdsLocked(std::vector<std::string>& deviceIds) const;
 
@@ -682,7 +713,9 @@
     status_t convertToHALStreamCombinationAndCameraIdsLocked(
               const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>
                       &cameraIdsAndSessionConfigs,
-              hardware::hidl_vec<hardware::camera::provider::V2_6::CameraIdAndStreamCombination>
+              const std::set<std::string>& perfClassPrimaryCameraIds,
+              int targetSdkVersion,
+              hardware::hidl_vec<hardware::camera::provider::V2_7::CameraIdAndStreamCombination>
                       *halCameraIdsAndStreamCombinations,
               bool *earlyExit);
 };
diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp b/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
deleted file mode 100644
index 62ef681..0000000
--- a/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
+++ /dev/null
@@ -1,818 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_TAG "CameraHardwareInterface"
-//#define LOG_NDEBUG 0
-
-#include <inttypes.h>
-#include <media/hardware/HardwareAPI.h> // For VideoNativeHandleMetadata
-#include "CameraHardwareInterface.h"
-
-namespace android {
-
-using namespace hardware::camera::device::V1_0;
-using namespace hardware::camera::common::V1_0;
-using hardware::hidl_handle;
-
-CameraHardwareInterface::~CameraHardwareInterface()
-{
-    ALOGI("Destroying camera %s", mName.string());
-    if (mHidlDevice != nullptr) {
-        mHidlDevice->close();
-        mHidlDevice.clear();
-        cleanupCirculatingBuffers();
-    }
-}
-
-status_t CameraHardwareInterface::initialize(sp<CameraProviderManager> manager) {
-    ALOGI("Opening camera %s", mName.string());
-
-    status_t ret = manager->openSession(mName.string(), this, &mHidlDevice);
-    if (ret != OK) {
-        ALOGE("%s: openSession failed! %s (%d)", __FUNCTION__, strerror(-ret), ret);
-    }
-    return ret;
-}
-
-status_t CameraHardwareInterface::setPreviewScalingMode(int scalingMode)
-{
-    int rc = OK;
-    mPreviewScalingMode = scalingMode;
-    if (mPreviewWindow != nullptr) {
-        rc = native_window_set_scaling_mode(mPreviewWindow.get(),
-                scalingMode);
-    }
-    return rc;
-}
-
-status_t CameraHardwareInterface::setPreviewTransform(int transform) {
-    int rc = OK;
-    mPreviewTransform = transform;
-    if (mPreviewWindow != nullptr) {
-        rc = native_window_set_buffers_transform(mPreviewWindow.get(),
-                mPreviewTransform);
-    }
-    return rc;
-}
-
-/**
- * Implementation of android::hardware::camera::device::V1_0::ICameraDeviceCallback
- */
-hardware::Return<void> CameraHardwareInterface::notifyCallback(
-        NotifyCallbackMsg msgType, int32_t ext1, int32_t ext2) {
-    sNotifyCb((int32_t) msgType, ext1, ext2, (void*) this);
-    return hardware::Void();
-}
-
-hardware::Return<uint32_t> CameraHardwareInterface::registerMemory(
-        const hardware::hidl_handle& descriptor,
-        uint32_t bufferSize, uint32_t bufferCount) {
-    if (descriptor->numFds != 1) {
-        ALOGE("%s: camera memory descriptor has numFds %d (expect 1)",
-                __FUNCTION__, descriptor->numFds);
-        return 0;
-    }
-    if (descriptor->data[0] < 0) {
-        ALOGE("%s: camera memory descriptor has FD %d (expect >= 0)",
-                __FUNCTION__, descriptor->data[0]);
-        return 0;
-    }
-
-    camera_memory_t* mem = sGetMemory(descriptor->data[0], bufferSize, bufferCount, this);
-    sp<CameraHeapMemory> camMem(static_cast<CameraHeapMemory *>(mem->handle));
-    int memPoolId = camMem->mHeap->getHeapID();
-    if (memPoolId < 0) {
-        ALOGE("%s: CameraHeapMemory has FD %d (expect >= 0)", __FUNCTION__, memPoolId);
-        return 0;
-    }
-    std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
-    mHidlMemPoolMap.insert(std::make_pair(memPoolId, mem));
-    return memPoolId;
-}
-
-hardware::Return<void> CameraHardwareInterface::unregisterMemory(uint32_t memId) {
-    camera_memory_t* mem = nullptr;
-    {
-        std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
-        if (mHidlMemPoolMap.count(memId) == 0) {
-            ALOGE("%s: memory pool ID %d not found", __FUNCTION__, memId);
-            return hardware::Void();
-        }
-        mem = mHidlMemPoolMap.at(memId);
-        mHidlMemPoolMap.erase(memId);
-    }
-    sPutMemory(mem);
-    return hardware::Void();
-}
-
-hardware::Return<void> CameraHardwareInterface::dataCallback(
-        DataCallbackMsg msgType, uint32_t data, uint32_t bufferIndex,
-        const hardware::camera::device::V1_0::CameraFrameMetadata& metadata) {
-    camera_memory_t* mem = nullptr;
-    {
-        std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
-        if (mHidlMemPoolMap.count(data) == 0) {
-            ALOGE("%s: memory pool ID %d not found", __FUNCTION__, data);
-            return hardware::Void();
-        }
-        mem = mHidlMemPoolMap.at(data);
-    }
-    camera_frame_metadata_t md;
-    md.number_of_faces = metadata.faces.size();
-    md.faces = (camera_face_t*) metadata.faces.data();
-    sDataCb((int32_t) msgType, mem, bufferIndex, &md, this);
-    return hardware::Void();
-}
-
-hardware::Return<void> CameraHardwareInterface::dataCallbackTimestamp(
-        DataCallbackMsg msgType, uint32_t data,
-        uint32_t bufferIndex, int64_t timestamp) {
-    camera_memory_t* mem = nullptr;
-    {
-        std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
-        if (mHidlMemPoolMap.count(data) == 0) {
-            ALOGE("%s: memory pool ID %d not found", __FUNCTION__, data);
-            return hardware::Void();
-        }
-        mem = mHidlMemPoolMap.at(data);
-    }
-    sDataCbTimestamp(timestamp, (int32_t) msgType, mem, bufferIndex, this);
-    return hardware::Void();
-}
-
-hardware::Return<void> CameraHardwareInterface::handleCallbackTimestamp(
-        DataCallbackMsg msgType, const hidl_handle& frameData, uint32_t data,
-        uint32_t bufferIndex, int64_t timestamp) {
-    camera_memory_t* mem = nullptr;
-    {
-        std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
-        if (mHidlMemPoolMap.count(data) == 0) {
-            ALOGE("%s: memory pool ID %d not found", __FUNCTION__, data);
-            return hardware::Void();
-        }
-        mem = mHidlMemPoolMap.at(data);
-    }
-    sp<CameraHeapMemory> heapMem(static_cast<CameraHeapMemory *>(mem->handle));
-    // TODO: Using unsecurePointer() has some associated security pitfalls
-    //       (see declaration for details).
-    //       Either document why it is safe in this case or address the
-    //       issue (e.g. by copying).
-    VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*)
-            heapMem->mBuffers[bufferIndex]->unsecurePointer();
-    md->pHandle = const_cast<native_handle_t*>(frameData.getNativeHandle());
-    sDataCbTimestamp(timestamp, (int32_t) msgType, mem, bufferIndex, this);
-    return hardware::Void();
-}
-
-hardware::Return<void> CameraHardwareInterface::handleCallbackTimestampBatch(
-        DataCallbackMsg msgType,
-        const hardware::hidl_vec<hardware::camera::device::V1_0::HandleTimestampMessage>& messages) {
-    std::vector<android::HandleTimestampMessage> msgs;
-    msgs.reserve(messages.size());
-    {
-        std::lock_guard<std::mutex> lock(mHidlMemPoolMapLock);
-        for (const auto& hidl_msg : messages) {
-            if (mHidlMemPoolMap.count(hidl_msg.data) == 0) {
-                ALOGE("%s: memory pool ID %d not found", __FUNCTION__, hidl_msg.data);
-                return hardware::Void();
-            }
-            sp<CameraHeapMemory> mem(
-                    static_cast<CameraHeapMemory *>(mHidlMemPoolMap.at(hidl_msg.data)->handle));
-
-            if (hidl_msg.bufferIndex >= mem->mNumBufs) {
-                ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__,
-                     hidl_msg.bufferIndex, mem->mNumBufs);
-                return hardware::Void();
-            }
-            // TODO: Using unsecurePointer() has some associated security pitfalls
-            //       (see declaration for details).
-            //       Either document why it is safe in this case or address the
-            //       issue (e.g. by copying).
-            VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*)
-                    mem->mBuffers[hidl_msg.bufferIndex]->unsecurePointer();
-            md->pHandle = const_cast<native_handle_t*>(hidl_msg.frameData.getNativeHandle());
-
-            msgs.push_back({hidl_msg.timestamp, mem->mBuffers[hidl_msg.bufferIndex]});
-        }
-    }
-    mDataCbTimestampBatch((int32_t) msgType, msgs, mCbUser);
-    return hardware::Void();
-}
-
-std::pair<bool, uint64_t> CameraHardwareInterface::getBufferId(
-        ANativeWindowBuffer* anb) {
-    std::lock_guard<std::mutex> lock(mBufferIdMapLock);
-
-    buffer_handle_t& buf = anb->handle;
-    auto it = mBufferIdMap.find(buf);
-    if (it == mBufferIdMap.end()) {
-        uint64_t bufId = mNextBufferId++;
-        mBufferIdMap[buf] = bufId;
-        mReversedBufMap[bufId] = anb;
-        return std::make_pair(true, bufId);
-    } else {
-        return std::make_pair(false, it->second);
-    }
-}
-
-void CameraHardwareInterface::cleanupCirculatingBuffers() {
-    std::lock_guard<std::mutex> lock(mBufferIdMapLock);
-    mBufferIdMap.clear();
-    mReversedBufMap.clear();
-}
-
-hardware::Return<void>
-CameraHardwareInterface::dequeueBuffer(dequeueBuffer_cb _hidl_cb) {
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return hardware::Void();
-    }
-    ANativeWindowBuffer* anb;
-    int rc = native_window_dequeue_buffer_and_wait(a, &anb);
-    Status s = Status::INTERNAL_ERROR;
-    uint64_t bufferId = 0;
-    uint32_t stride = 0;
-    hidl_handle buf = nullptr;
-    if (rc == OK) {
-        s = Status::OK;
-        auto pair = getBufferId(anb);
-        buf = (pair.first) ? anb->handle : nullptr;
-        bufferId = pair.second;
-        stride = anb->stride;
-    }
-
-    _hidl_cb(s, bufferId, buf, stride);
-    return hardware::Void();
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::enqueueBuffer(uint64_t bufferId) {
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return Status::INTERNAL_ERROR;
-    }
-    if (mReversedBufMap.count(bufferId) == 0) {
-        ALOGE("%s: bufferId %" PRIu64 " not found", __FUNCTION__, bufferId);
-        return Status::ILLEGAL_ARGUMENT;
-    }
-    int rc = a->queueBuffer(a, mReversedBufMap.at(bufferId), -1);
-    if (rc == 0) {
-        return Status::OK;
-    }
-    return Status::INTERNAL_ERROR;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::cancelBuffer(uint64_t bufferId) {
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return Status::INTERNAL_ERROR;
-    }
-    if (mReversedBufMap.count(bufferId) == 0) {
-        ALOGE("%s: bufferId %" PRIu64 " not found", __FUNCTION__, bufferId);
-        return Status::ILLEGAL_ARGUMENT;
-    }
-    int rc = a->cancelBuffer(a, mReversedBufMap.at(bufferId), -1);
-    if (rc == 0) {
-        return Status::OK;
-    }
-    return Status::INTERNAL_ERROR;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setBufferCount(uint32_t count) {
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a != nullptr) {
-        // Workaround for b/27039775
-        // Previously, setting the buffer count would reset the buffer
-        // queue's flag that allows for all buffers to be dequeued on the
-        // producer side, instead of just the producer's declared max count,
-        // if no filled buffers have yet been queued by the producer.  This
-        // reset no longer happens, but some HALs depend on this behavior,
-        // so it needs to be maintained for HAL backwards compatibility.
-        // Simulate the prior behavior by disconnecting/reconnecting to the
-        // window and setting the values again.  This has the drawback of
-        // actually causing memory reallocation, which may not have happened
-        // in the past.
-        native_window_api_disconnect(a, NATIVE_WINDOW_API_CAMERA);
-        native_window_api_connect(a, NATIVE_WINDOW_API_CAMERA);
-        if (mPreviewScalingMode != NOT_SET) {
-            native_window_set_scaling_mode(a, mPreviewScalingMode);
-        }
-        if (mPreviewTransform != NOT_SET) {
-            native_window_set_buffers_transform(a, mPreviewTransform);
-        }
-        if (mPreviewWidth != NOT_SET) {
-            native_window_set_buffers_dimensions(a,
-                    mPreviewWidth, mPreviewHeight);
-            native_window_set_buffers_format(a, mPreviewFormat);
-        }
-        if (mPreviewUsage != 0) {
-            native_window_set_usage(a, mPreviewUsage);
-        }
-        if (mPreviewSwapInterval != NOT_SET) {
-            a->setSwapInterval(a, mPreviewSwapInterval);
-        }
-        if (mPreviewCrop.left != NOT_SET) {
-            native_window_set_crop(a, &(mPreviewCrop));
-        }
-    }
-    int rc = native_window_set_buffer_count(a, count);
-    if (rc == OK) {
-        cleanupCirculatingBuffers();
-        return Status::OK;
-    }
-    return Status::INTERNAL_ERROR;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setBuffersGeometry(
-        uint32_t w, uint32_t h, hardware::graphics::common::V1_0::PixelFormat format) {
-    Status s = Status::INTERNAL_ERROR;
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return s;
-    }
-    mPreviewWidth = w;
-    mPreviewHeight = h;
-    mPreviewFormat = (int) format;
-    int rc = native_window_set_buffers_dimensions(a, w, h);
-    if (rc == OK) {
-        rc = native_window_set_buffers_format(a, mPreviewFormat);
-    }
-    if (rc == OK) {
-        cleanupCirculatingBuffers();
-        s = Status::OK;
-    }
-    return s;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setCrop(int32_t left, int32_t top, int32_t right, int32_t bottom) {
-    Status s = Status::INTERNAL_ERROR;
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return s;
-    }
-    mPreviewCrop.left = left;
-    mPreviewCrop.top = top;
-    mPreviewCrop.right = right;
-    mPreviewCrop.bottom = bottom;
-    int rc = native_window_set_crop(a, &mPreviewCrop);
-    if (rc == OK) {
-        s = Status::OK;
-    }
-    return s;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setUsage(hardware::graphics::common::V1_0::BufferUsage usage) {
-    Status s = Status::INTERNAL_ERROR;
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return s;
-    }
-    mPreviewUsage = static_cast<uint64_t> (usage);
-    int rc = native_window_set_usage(a, mPreviewUsage);
-    if (rc == OK) {
-        cleanupCirculatingBuffers();
-        s = Status::OK;
-    }
-    return s;
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setSwapInterval(int32_t interval) {
-    Status s = Status::INTERNAL_ERROR;
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return s;
-    }
-    mPreviewSwapInterval = interval;
-    int rc = a->setSwapInterval(a, interval);
-    if (rc == OK) {
-        s = Status::OK;
-    }
-    return s;
-}
-
-hardware::Return<void>
-CameraHardwareInterface::getMinUndequeuedBufferCount(getMinUndequeuedBufferCount_cb _hidl_cb) {
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return hardware::Void();
-    }
-    int count = 0;
-    int rc = a->query(a, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &count);
-    Status s = Status::INTERNAL_ERROR;
-    if (rc == OK) {
-        s = Status::OK;
-    }
-    _hidl_cb(s, count);
-    return hardware::Void();
-}
-
-hardware::Return<Status>
-CameraHardwareInterface::setTimestamp(int64_t timestamp) {
-    Status s = Status::INTERNAL_ERROR;
-    ANativeWindow *a = mPreviewWindow.get();
-    if (a == nullptr) {
-        ALOGE("%s: preview window is null", __FUNCTION__);
-        return s;
-    }
-    int rc = native_window_set_buffers_timestamp(a, timestamp);
-    if (rc == OK) {
-        s = Status::OK;
-    }
-    return s;
-}
-
-status_t CameraHardwareInterface::setPreviewWindow(const sp<ANativeWindow>& buf)
-{
-    ALOGV("%s(%s) buf %p", __FUNCTION__, mName.string(), buf.get());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        mPreviewWindow = buf;
-        if (buf != nullptr) {
-            if (mPreviewScalingMode != NOT_SET) {
-                setPreviewScalingMode(mPreviewScalingMode);
-            }
-            if (mPreviewTransform != NOT_SET) {
-                setPreviewTransform(mPreviewTransform);
-            }
-        }
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->setPreviewWindow(buf.get() ? this : nullptr));
-    }
-    return INVALID_OPERATION;
-}
-
-void CameraHardwareInterface::setCallbacks(notify_callback notify_cb,
-        data_callback data_cb,
-        data_callback_timestamp data_cb_timestamp,
-        data_callback_timestamp_batch data_cb_timestamp_batch,
-        void* user)
-{
-    mNotifyCb = notify_cb;
-    mDataCb = data_cb;
-    mDataCbTimestamp = data_cb_timestamp;
-    mDataCbTimestampBatch = data_cb_timestamp_batch;
-    mCbUser = user;
-
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-}
-
-void CameraHardwareInterface::enableMsgType(int32_t msgType)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        mHidlDevice->enableMsgType(msgType);
-    }
-}
-
-void CameraHardwareInterface::disableMsgType(int32_t msgType)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        mHidlDevice->disableMsgType(msgType);
-    }
-}
-
-int CameraHardwareInterface::msgTypeEnabled(int32_t msgType)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return mHidlDevice->msgTypeEnabled(msgType);
-    }
-    return false;
-}
-
-status_t CameraHardwareInterface::startPreview()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->startPreview());
-    }
-    return INVALID_OPERATION;
-}
-
-void CameraHardwareInterface::stopPreview()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        mHidlDevice->stopPreview();
-    }
-}
-
-int CameraHardwareInterface::previewEnabled()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return mHidlDevice->previewEnabled();
-    }
-    return false;
-}
-
-status_t CameraHardwareInterface::storeMetaDataInBuffers(int enable)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->storeMetaDataInBuffers(enable));
-    }
-    return enable ? INVALID_OPERATION: OK;
-}
-
-status_t CameraHardwareInterface::startRecording()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->startRecording());
-    }
-    return INVALID_OPERATION;
-}
-
-/**
- * Stop a previously started recording.
- */
-void CameraHardwareInterface::stopRecording()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        mHidlDevice->stopRecording();
-    }
-}
-
-/**
- * Returns true if recording is enabled.
- */
-int CameraHardwareInterface::recordingEnabled()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return mHidlDevice->recordingEnabled();
-    }
-    return false;
-}
-
-void CameraHardwareInterface::releaseRecordingFrame(const sp<IMemory>& mem)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    ssize_t offset;
-    size_t size;
-    sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
-    int heapId = heap->getHeapID();
-    int bufferIndex = offset / size;
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        if (size == sizeof(VideoNativeHandleMetadata)) {
-            // TODO: Using unsecurePointer() has some associated security pitfalls
-            //       (see declaration for details).
-            //       Either document why it is safe in this case or address the
-            //       issue (e.g. by copying).
-            VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*) mem->unsecurePointer();
-            // Caching the handle here because md->pHandle will be subject to HAL's edit
-            native_handle_t* nh = md->pHandle;
-            hidl_handle frame = nh;
-            mHidlDevice->releaseRecordingFrameHandle(heapId, bufferIndex, frame);
-            native_handle_close(nh);
-            native_handle_delete(nh);
-        } else {
-            mHidlDevice->releaseRecordingFrame(heapId, bufferIndex);
-        }
-    }
-}
-
-void CameraHardwareInterface::releaseRecordingFrameBatch(const std::vector<sp<IMemory>>& frames)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    size_t n = frames.size();
-    std::vector<VideoFrameMessage> msgs;
-    msgs.reserve(n);
-    for (auto& mem : frames) {
-        if (CC_LIKELY(mHidlDevice != nullptr)) {
-            ssize_t offset;
-            size_t size;
-            sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
-            if (size == sizeof(VideoNativeHandleMetadata)) {
-                uint32_t heapId = heap->getHeapID();
-                uint32_t bufferIndex = offset / size;
-                // TODO: Using unsecurePointer() has some associated security pitfalls
-                //       (see declaration for details).
-                //       Either document why it is safe in this case or address the
-                //       issue (e.g. by copying).
-                VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*) mem->unsecurePointer();
-                // Caching the handle here because md->pHandle will be subject to HAL's edit
-                native_handle_t* nh = md->pHandle;
-                VideoFrameMessage msg;
-                msgs.push_back({nh, heapId, bufferIndex});
-            } else {
-                ALOGE("%s only supports VideoNativeHandleMetadata mode", __FUNCTION__);
-                return;
-            }
-        }
-    }
-
-    mHidlDevice->releaseRecordingFrameHandleBatch(msgs);
-
-    for (auto& msg : msgs) {
-        native_handle_t* nh = const_cast<native_handle_t*>(msg.frameData.getNativeHandle());
-        native_handle_close(nh);
-        native_handle_delete(nh);
-    }
-}
-
-status_t CameraHardwareInterface::autoFocus()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->autoFocus());
-    }
-    return INVALID_OPERATION;
-}
-
-status_t CameraHardwareInterface::cancelAutoFocus()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->cancelAutoFocus());
-    }
-    return INVALID_OPERATION;
-}
-
-status_t CameraHardwareInterface::takePicture()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->takePicture());
-    }
-    return INVALID_OPERATION;
-}
-
-status_t CameraHardwareInterface::cancelPicture()
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->cancelPicture());
-    }
-    return INVALID_OPERATION;
-}
-
-status_t CameraHardwareInterface::setParameters(const CameraParameters &params)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->setParameters(params.flatten().string()));
-    }
-    return INVALID_OPERATION;
-}
-
-CameraParameters CameraHardwareInterface::getParameters() const
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    CameraParameters parms;
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        hardware::hidl_string outParam;
-        mHidlDevice->getParameters(
-                [&outParam](const auto& outStr) {
-                    outParam = outStr;
-                });
-        String8 tmp(outParam.c_str());
-        parms.unflatten(tmp);
-    }
-    return parms;
-}
-
-status_t CameraHardwareInterface::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        return CameraProviderManager::mapToStatusT(
-                mHidlDevice->sendCommand((CommandType) cmd, arg1, arg2));
-    }
-    return INVALID_OPERATION;
-}
-
-/**
- * Release the hardware resources owned by this object.  Note that this is
- * *not* done in the destructor.
- */
-void CameraHardwareInterface::release() {
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        mHidlDevice->close();
-        mHidlDevice.clear();
-    }
-}
-
-/**
- * Dump state of the camera hardware
- */
-status_t CameraHardwareInterface::dump(int fd, const Vector<String16>& /*args*/) const
-{
-    ALOGV("%s(%s)", __FUNCTION__, mName.string());
-    if (CC_LIKELY(mHidlDevice != nullptr)) {
-        native_handle_t* handle = native_handle_create(1,0);
-        handle->data[0] = fd;
-        Status s = mHidlDevice->dumpState(handle);
-        native_handle_delete(handle);
-        return CameraProviderManager::mapToStatusT(s);
-    }
-    return OK; // It's fine if the HAL doesn't implement dump()
-}
-
-void CameraHardwareInterface::sNotifyCb(int32_t msg_type, int32_t ext1,
-                        int32_t ext2, void *user)
-{
-    ALOGV("%s", __FUNCTION__);
-    CameraHardwareInterface *object =
-            static_cast<CameraHardwareInterface *>(user);
-    object->mNotifyCb(msg_type, ext1, ext2, object->mCbUser);
-}
-
-void CameraHardwareInterface::sDataCb(int32_t msg_type,
-                      const camera_memory_t *data, unsigned int index,
-                      camera_frame_metadata_t *metadata,
-                      void *user)
-{
-    ALOGV("%s", __FUNCTION__);
-    CameraHardwareInterface *object =
-            static_cast<CameraHardwareInterface *>(user);
-    sp<CameraHeapMemory> mem(static_cast<CameraHeapMemory *>(data->handle));
-    if (index >= mem->mNumBufs) {
-        ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__,
-             index, mem->mNumBufs);
-        return;
-    }
-    object->mDataCb(msg_type, mem->mBuffers[index], metadata, object->mCbUser);
-}
-
-void CameraHardwareInterface::sDataCbTimestamp(nsecs_t timestamp, int32_t msg_type,
-                         const camera_memory_t *data, unsigned index,
-                         void *user)
-{
-    ALOGV("%s", __FUNCTION__);
-    CameraHardwareInterface *object =
-            static_cast<CameraHardwareInterface *>(user);
-    // Start refcounting the heap object from here on.  When the clients
-    // drop all references, it will be destroyed (as well as the enclosed
-    // MemoryHeapBase.
-    sp<CameraHeapMemory> mem(static_cast<CameraHeapMemory *>(data->handle));
-    if (index >= mem->mNumBufs) {
-        ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__,
-             index, mem->mNumBufs);
-        return;
-    }
-    object->mDataCbTimestamp(timestamp, msg_type, mem->mBuffers[index], object->mCbUser);
-}
-
-camera_memory_t* CameraHardwareInterface::sGetMemory(
-        int fd, size_t buf_size, uint_t num_bufs,
-        void *user __attribute__((unused)))
-{
-    CameraHeapMemory *mem;
-    if (fd < 0) {
-        mem = new CameraHeapMemory(buf_size, num_bufs);
-    } else {
-        mem = new CameraHeapMemory(fd, buf_size, num_bufs);
-    }
-    mem->incStrong(mem);
-    return &mem->handle;
-}
-
-void CameraHardwareInterface::sPutMemory(camera_memory_t *data)
-{
-    if (!data) {
-        return;
-    }
-
-    CameraHeapMemory *mem = static_cast<CameraHeapMemory *>(data->handle);
-    mem->decStrong(mem);
-}
-
-}; // namespace android
diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.h b/services/camera/libcameraservice/device1/CameraHardwareInterface.h
deleted file mode 100644
index e519b04..0000000
--- a/services/camera/libcameraservice/device1/CameraHardwareInterface.h
+++ /dev/null
@@ -1,488 +0,0 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H
-#define ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H
-
-#include <unordered_map>
-#include <binder/IMemory.h>
-#include <binder/MemoryBase.h>
-#include <binder/MemoryHeapBase.h>
-#include <utils/RefBase.h>
-#include <ui/GraphicBuffer.h>
-#include <camera/Camera.h>
-#include <camera/CameraParameters.h>
-#include <system/window.h>
-#include <hardware/camera.h>
-
-#include <common/CameraProviderManager.h>
-
-namespace android {
-
-typedef void (*notify_callback)(int32_t msgType,
-                            int32_t ext1,
-                            int32_t ext2,
-                            void* user);
-
-typedef void (*data_callback)(int32_t msgType,
-                            const sp<IMemory> &dataPtr,
-                            camera_frame_metadata_t *metadata,
-                            void* user);
-
-typedef void (*data_callback_timestamp)(nsecs_t timestamp,
-                            int32_t msgType,
-                            const sp<IMemory> &dataPtr,
-                            void *user);
-
-struct HandleTimestampMessage {
-    nsecs_t timestamp;
-    const sp<IMemory> dataPtr;
-};
-
-typedef void (*data_callback_timestamp_batch)(
-        int32_t msgType,
-        const std::vector<HandleTimestampMessage>&, void* user);
-
-/**
- * CameraHardwareInterface.h defines the interface to the
- * camera hardware abstraction layer, used for setting and getting
- * parameters, live previewing, and taking pictures. It is used for
- * HAL devices with version CAMERA_DEVICE_API_VERSION_1_0 only.
- *
- * It is a referenced counted interface with RefBase as its base class.
- * CameraService calls openCameraHardware() to retrieve a strong pointer to the
- * instance of this interface and may be called multiple times. The
- * following steps describe a typical sequence:
- *
- *   -# After CameraService calls openCameraHardware(), getParameters() and
- *      setParameters() are used to initialize the camera instance.
- *   -# startPreview() is called.
- *
- * Prior to taking a picture, CameraService often calls autofocus(). When auto
- * focusing has completed, the camera instance sends a CAMERA_MSG_FOCUS notification,
- * which informs the application whether focusing was successful. The camera instance
- * only sends this message once and it is up  to the application to call autoFocus()
- * again if refocusing is desired.
- *
- * CameraService calls takePicture() to request the camera instance take a
- * picture. At this point, if a shutter, postview, raw, and/or compressed
- * callback is desired, the corresponding message must be enabled. Any memory
- * provided in a data callback must be copied if it's needed after returning.
- */
-
-class CameraHardwareInterface :
-        public virtual RefBase,
-        public virtual hardware::camera::device::V1_0::ICameraDeviceCallback,
-        public virtual hardware::camera::device::V1_0::ICameraDevicePreviewCallback {
-
-public:
-    explicit CameraHardwareInterface(const char *name):
-            mHidlDevice(nullptr),
-            mName(name),
-            mPreviewScalingMode(NOT_SET),
-            mPreviewTransform(NOT_SET),
-            mPreviewWidth(NOT_SET),
-            mPreviewHeight(NOT_SET),
-            mPreviewFormat(NOT_SET),
-            mPreviewUsage(0),
-            mPreviewSwapInterval(NOT_SET),
-            mPreviewCrop{NOT_SET,NOT_SET,NOT_SET,NOT_SET}
-    {
-    }
-
-    ~CameraHardwareInterface();
-
-    status_t initialize(sp<CameraProviderManager> manager);
-
-    /** Set the ANativeWindow to which preview frames are sent */
-    status_t setPreviewWindow(const sp<ANativeWindow>& buf);
-
-    status_t setPreviewScalingMode(int scalingMode);
-
-    status_t setPreviewTransform(int transform);
-
-    /** Set the notification and data callbacks */
-    void setCallbacks(notify_callback notify_cb,
-                      data_callback data_cb,
-                      data_callback_timestamp data_cb_timestamp,
-                      data_callback_timestamp_batch data_cb_timestamp_batch,
-                      void* user);
-
-    /**
-     * The following three functions all take a msgtype,
-     * which is a bitmask of the messages defined in
-     * include/ui/Camera.h
-     */
-
-    /**
-     * Enable a message, or set of messages.
-     */
-    void enableMsgType(int32_t msgType);
-
-    /**
-     * Disable a message, or a set of messages.
-     *
-     * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera hal
-     * should not rely on its client to call releaseRecordingFrame() to release
-     * video recording frames sent out by the cameral hal before and after the
-     * disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera hal clients must not
-     * modify/access any video recording frame after calling
-     * disableMsgType(CAMERA_MSG_VIDEO_FRAME).
-     */
-    void disableMsgType(int32_t msgType);
-
-    /**
-     * Query whether a message, or a set of messages, is enabled.
-     * Note that this is operates as an AND, if any of the messages
-     * queried are off, this will return false.
-     */
-    int msgTypeEnabled(int32_t msgType);
-
-    /**
-     * Start preview mode.
-     */
-    status_t startPreview();
-
-    /**
-     * Stop a previously started preview.
-     */
-    void stopPreview();
-
-    /**
-     * Returns true if preview is enabled.
-     */
-    int previewEnabled();
-
-    /**
-     * Request the camera hal to store meta data or real YUV data in
-     * the video buffers send out via CAMERA_MSG_VIDEO_FRRAME for a
-     * recording session. If it is not called, the default camera
-     * hal behavior is to store real YUV data in the video buffers.
-     *
-     * This method should be called before startRecording() in order
-     * to be effective.
-     *
-     * If meta data is stored in the video buffers, it is up to the
-     * receiver of the video buffers to interpret the contents and
-     * to find the actual frame data with the help of the meta data
-     * in the buffer. How this is done is outside of the scope of
-     * this method.
-     *
-     * Some camera hal may not support storing meta data in the video
-     * buffers, but all camera hal should support storing real YUV data
-     * in the video buffers. If the camera hal does not support storing
-     * the meta data in the video buffers when it is requested to do
-     * do, INVALID_OPERATION must be returned. It is very useful for
-     * the camera hal to pass meta data rather than the actual frame
-     * data directly to the video encoder, since the amount of the
-     * uncompressed frame data can be very large if video size is large.
-     *
-     * @param enable if true to instruct the camera hal to store
-     *      meta data in the video buffers; false to instruct
-     *      the camera hal to store real YUV data in the video
-     *      buffers.
-     *
-     * @return OK on success.
-     */
-
-    status_t storeMetaDataInBuffers(int enable);
-
-    /**
-     * Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME
-     * message is sent with the corresponding frame. Every record frame must be released
-     * by a cameral hal client via releaseRecordingFrame() before the client calls
-     * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls
-     * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's responsibility
-     * to manage the life-cycle of the video recording frames, and the client must
-     * not modify/access any video recording frames.
-     */
-    status_t startRecording();
-
-    /**
-     * Stop a previously started recording.
-     */
-    void stopRecording();
-
-    /**
-     * Returns true if recording is enabled.
-     */
-    int recordingEnabled();
-
-    /**
-     * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
-     *
-     * It is camera hal client's responsibility to release video recording
-     * frames sent out by the camera hal before the camera hal receives
-     * a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives
-     * the call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's
-     * responsibility of managing the life-cycle of the video recording
-     * frames.
-     */
-    void releaseRecordingFrame(const sp<IMemory>& mem);
-
-    /**
-     * Release a batch of recording frames previously returned by
-     * CAMERA_MSG_VIDEO_FRAME. This method only supports frames that are
-     * stored as VideoNativeHandleMetadata.
-     *
-     * It is camera hal client's responsibility to release video recording
-     * frames sent out by the camera hal before the camera hal receives
-     * a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives
-     * the call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's
-     * responsibility of managing the life-cycle of the video recording
-     * frames.
-     */
-    void releaseRecordingFrameBatch(const std::vector<sp<IMemory>>& frames);
-
-    /**
-     * Start auto focus, the notification callback routine is called
-     * with CAMERA_MSG_FOCUS once when focusing is complete. autoFocus()
-     * will be called again if another auto focus is needed.
-     */
-    status_t autoFocus();
-
-    /**
-     * Cancels auto-focus function. If the auto-focus is still in progress,
-     * this function will cancel it. Whether the auto-focus is in progress
-     * or not, this function will return the focus position to the default.
-     * If the camera does not support auto-focus, this is a no-op.
-     */
-    status_t cancelAutoFocus();
-
-    /**
-     * Take a picture.
-     */
-    status_t takePicture();
-
-    /**
-     * Cancel a picture that was started with takePicture.  Calling this
-     * method when no picture is being taken is a no-op.
-     */
-    status_t cancelPicture();
-
-    /**
-     * Set the camera parameters. This returns BAD_VALUE if any parameter is
-     * invalid or not supported. */
-    status_t setParameters(const CameraParameters &params);
-
-    /** Return the camera parameters. */
-    CameraParameters getParameters() const;
-
-    /**
-     * Send command to camera driver.
-     */
-    status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
-
-    /**
-     * Release the hardware resources owned by this object.  Note that this is
-     * *not* done in the destructor.
-     */
-    void release();
-
-    /**
-     * Dump state of the camera hardware
-     */
-    status_t dump(int fd, const Vector<String16>& /*args*/) const;
-
-private:
-    sp<hardware::camera::device::V1_0::ICameraDevice> mHidlDevice;
-    String8 mName;
-
-    static void sNotifyCb(int32_t msg_type, int32_t ext1,
-                            int32_t ext2, void *user);
-
-    static void sDataCb(int32_t msg_type,
-                          const camera_memory_t *data, unsigned int index,
-                          camera_frame_metadata_t *metadata,
-                          void *user);
-
-    static void sDataCbTimestamp(nsecs_t timestamp, int32_t msg_type,
-                             const camera_memory_t *data, unsigned index,
-                             void *user);
-
-    // This is a utility class that combines a MemoryHeapBase and a MemoryBase
-    // in one.  Since we tend to use them in a one-to-one relationship, this is
-    // handy.
-    class CameraHeapMemory : public RefBase {
-    public:
-        CameraHeapMemory(int fd, size_t buf_size, uint_t num_buffers = 1) :
-                         mBufSize(buf_size),
-                         mNumBufs(num_buffers)
-        {
-            mHeap = new MemoryHeapBase(fd, buf_size * num_buffers);
-            commonInitialization();
-        }
-
-        explicit CameraHeapMemory(size_t buf_size, uint_t num_buffers = 1) :
-                                  mBufSize(buf_size),
-                                  mNumBufs(num_buffers)
-        {
-            mHeap = new MemoryHeapBase(buf_size * num_buffers);
-            commonInitialization();
-        }
-
-        void commonInitialization()
-        {
-            handle.data = mHeap->base();
-            handle.size = mBufSize * mNumBufs;
-            handle.handle = this;
-
-            mBuffers = new sp<MemoryBase>[mNumBufs];
-            for (uint_t i = 0; i < mNumBufs; i++)
-                mBuffers[i] = new MemoryBase(mHeap,
-                                             i * mBufSize,
-                                             mBufSize);
-
-            handle.release = sPutMemory;
-        }
-
-        virtual ~CameraHeapMemory()
-        {
-            delete [] mBuffers;
-        }
-
-        size_t mBufSize;
-        uint_t mNumBufs;
-        sp<MemoryHeapBase> mHeap;
-        sp<MemoryBase> *mBuffers;
-
-        camera_memory_t handle;
-    };
-
-    static camera_memory_t* sGetMemory(int fd, size_t buf_size, uint_t num_bufs,
-                                         void *user __attribute__((unused)));
-
-    static void sPutMemory(camera_memory_t *data);
-
-    std::pair<bool, uint64_t> getBufferId(ANativeWindowBuffer* anb);
-    void cleanupCirculatingBuffers();
-
-    /**
-     * Implementation of android::hardware::camera::device::V1_0::ICameraDeviceCallback
-     */
-    hardware::Return<void> notifyCallback(
-            hardware::camera::device::V1_0::NotifyCallbackMsg msgType,
-            int32_t ext1, int32_t ext2) override;
-    hardware::Return<uint32_t> registerMemory(
-            const hardware::hidl_handle& descriptor,
-            uint32_t bufferSize, uint32_t bufferCount) override;
-    hardware::Return<void> unregisterMemory(uint32_t memId) override;
-    hardware::Return<void> dataCallback(
-            hardware::camera::device::V1_0::DataCallbackMsg msgType,
-            uint32_t data, uint32_t bufferIndex,
-            const hardware::camera::device::V1_0::CameraFrameMetadata& metadata) override;
-    hardware::Return<void> dataCallbackTimestamp(
-            hardware::camera::device::V1_0::DataCallbackMsg msgType,
-            uint32_t data, uint32_t bufferIndex, int64_t timestamp) override;
-    hardware::Return<void> handleCallbackTimestamp(
-            hardware::camera::device::V1_0::DataCallbackMsg msgType,
-            const hardware::hidl_handle& frameData, uint32_t data,
-            uint32_t bufferIndex, int64_t timestamp) override;
-    hardware::Return<void> handleCallbackTimestampBatch(
-            hardware::camera::device::V1_0::DataCallbackMsg msgType,
-            const hardware::hidl_vec<
-                    hardware::camera::device::V1_0::HandleTimestampMessage>&) override;
-
-    /**
-     * Implementation of android::hardware::camera::device::V1_0::ICameraDevicePreviewCallback
-     */
-    hardware::Return<void> dequeueBuffer(dequeueBuffer_cb _hidl_cb) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            enqueueBuffer(uint64_t bufferId) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            cancelBuffer(uint64_t bufferId) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            setBufferCount(uint32_t count) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            setBuffersGeometry(uint32_t w, uint32_t h,
-                    hardware::graphics::common::V1_0::PixelFormat format) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            setCrop(int32_t left, int32_t top, int32_t right, int32_t bottom) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            setUsage(hardware::graphics::common::V1_0::BufferUsage usage) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            setSwapInterval(int32_t interval) override;
-    hardware::Return<void> getMinUndequeuedBufferCount(
-        getMinUndequeuedBufferCount_cb _hidl_cb) override;
-    hardware::Return<hardware::camera::common::V1_0::Status>
-            setTimestamp(int64_t timestamp) override;
-
-    sp<ANativeWindow>        mPreviewWindow;
-
-    notify_callback               mNotifyCb;
-    data_callback                 mDataCb;
-    data_callback_timestamp       mDataCbTimestamp;
-    data_callback_timestamp_batch mDataCbTimestampBatch;
-    void *mCbUser;
-
-    // Cached values for preview stream parameters
-    static const int NOT_SET = -1;
-    int mPreviewScalingMode;
-    int mPreviewTransform;
-    int mPreviewWidth;
-    int mPreviewHeight;
-    int mPreviewFormat;
-    uint64_t mPreviewUsage;
-    int mPreviewSwapInterval;
-    android_native_rect_t mPreviewCrop;
-
-    struct BufferHasher {
-        size_t operator()(const buffer_handle_t& buf) const {
-            if (buf == nullptr)
-                return 0;
-
-            size_t result = 1;
-            result = 31 * result + buf->numFds;
-            result = 31 * result + buf->numInts;
-            int length = buf->numFds + buf->numInts;
-            for (int i = 0; i < length; i++) {
-                result = 31 * result + buf->data[i];
-            }
-            return result;
-        }
-    };
-
-    struct BufferComparator {
-        bool operator()(const buffer_handle_t& buf1, const buffer_handle_t& buf2) const {
-            if (buf1->numFds == buf2->numFds && buf1->numInts == buf2->numInts) {
-                int length = buf1->numFds + buf1->numInts;
-                for (int i = 0; i < length; i++) {
-                    if (buf1->data[i] != buf2->data[i]) {
-                        return false;
-                    }
-                }
-                return true;
-            }
-            return false;
-        }
-    };
-
-    std::mutex mBufferIdMapLock; // protecting mBufferIdMap and mNextBufferId
-    typedef std::unordered_map<const buffer_handle_t, uint64_t,
-            BufferHasher, BufferComparator> BufferIdMap;
-    // stream ID -> per stream buffer ID map
-    BufferIdMap mBufferIdMap;
-    std::unordered_map<uint64_t, ANativeWindowBuffer*> mReversedBufMap;
-    uint64_t mNextBufferId = 1;
-    static const uint64_t BUFFER_ID_NO_BUFFER = 0;
-
-    std::mutex mHidlMemPoolMapLock; // protecting mHidlMemPoolMap
-    std::unordered_map<int, camera_memory_t*> mHidlMemPoolMap;
-};
-
-};  // namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/device3/BufferUtils.cpp b/services/camera/libcameraservice/device3/BufferUtils.cpp
index cc29390..f3adf20 100644
--- a/services/camera/libcameraservice/device3/BufferUtils.cpp
+++ b/services/camera/libcameraservice/device3/BufferUtils.cpp
@@ -28,14 +28,14 @@
 namespace android {
 namespace camera3 {
 
-camera3_buffer_status_t mapHidlBufferStatus(hardware::camera::device::V3_2::BufferStatus status) {
+camera_buffer_status_t mapHidlBufferStatus(hardware::camera::device::V3_2::BufferStatus status) {
     using hardware::camera::device::V3_2::BufferStatus;
 
     switch (status) {
-        case BufferStatus::OK: return CAMERA3_BUFFER_STATUS_OK;
-        case BufferStatus::ERROR: return CAMERA3_BUFFER_STATUS_ERROR;
+        case BufferStatus::OK: return CAMERA_BUFFER_STATUS_OK;
+        case BufferStatus::ERROR: return CAMERA_BUFFER_STATUS_ERROR;
     }
-    return CAMERA3_BUFFER_STATUS_ERROR;
+    return CAMERA_BUFFER_STATUS_ERROR;
 }
 
 void BufferRecords::takeInflightBufferMap(BufferRecords& other) {
diff --git a/services/camera/libcameraservice/device3/BufferUtils.h b/services/camera/libcameraservice/device3/BufferUtils.h
index 452a908..1e1cd60 100644
--- a/services/camera/libcameraservice/device3/BufferUtils.h
+++ b/services/camera/libcameraservice/device3/BufferUtils.h
@@ -25,9 +25,6 @@
 
 #include <android/hardware/camera/device/3.2/ICameraDevice.h>
 
-// TODO: remove legacy camera3.h references
-#include "hardware/camera3.h"
-
 #include <device3/Camera3OutputInterface.h>
 
 namespace android {
@@ -158,7 +155,7 @@
 
     static const uint64_t BUFFER_ID_NO_BUFFER = 0;
 
-    camera3_buffer_status_t mapHidlBufferStatus(
+    camera_buffer_status_t mapHidlBufferStatus(
             hardware::camera::device::V3_2::BufferStatus status);
 } // namespace camera3
 
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
index d6bf83e..a556200 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
@@ -40,16 +40,17 @@
     ATRACE_CALL();
 
     int streamId = streamInfo.streamId;
-    int streamSetId = streamInfo.streamSetId;
+    StreamSetKey streamSetKey = {streamInfo.streamSetId, streamInfo.isMultiRes};
 
-    if (streamId == CAMERA3_STREAM_ID_INVALID || streamSetId == CAMERA3_STREAM_SET_ID_INVALID) {
+    if (streamId == CAMERA3_STREAM_ID_INVALID ||
+            streamSetKey.id == CAMERA3_STREAM_SET_ID_INVALID) {
         ALOGE("%s: Stream id (%d) or stream set id (%d) is invalid",
-                __FUNCTION__, streamId, streamSetId);
+                __FUNCTION__, streamId, streamSetKey.id);
         return BAD_VALUE;
     }
     if (streamInfo.totalBufferCount > kMaxBufferCount || streamInfo.totalBufferCount == 0) {
         ALOGE("%s: Stream id (%d) with stream set id (%d) total buffer count %zu is invalid",
-                __FUNCTION__, streamId, streamSetId, streamInfo.totalBufferCount);
+                __FUNCTION__, streamId, streamSetKey.id, streamInfo.totalBufferCount);
         return BAD_VALUE;
     }
     if (!streamInfo.isConfigured) {
@@ -75,7 +76,8 @@
     for (size_t i = 0; i < mStreamSetMap.size(); i++) {
         ssize_t streamIdx = mStreamSetMap[i].streamInfoMap.indexOfKey(streamId);
         if (streamIdx != NAME_NOT_FOUND &&
-            mStreamSetMap[i].streamInfoMap[streamIdx].streamSetId != streamInfo.streamSetId) {
+            mStreamSetMap[i].streamInfoMap[streamIdx].streamSetId != streamInfo.streamSetId &&
+            mStreamSetMap[i].streamInfoMap[streamIdx].isMultiRes != streamInfo.isMultiRes) {
             ALOGE("%s: It is illegal to register the same stream id with different stream set",
                     __FUNCTION__);
             return BAD_VALUE;
@@ -83,20 +85,20 @@
     }
     // Check if there is an existing stream set registered; if not, create one; otherwise, add this
     // stream info to the existing stream set entry.
-    ssize_t setIdx = mStreamSetMap.indexOfKey(streamSetId);
+    ssize_t setIdx = mStreamSetMap.indexOfKey(streamSetKey);
     if (setIdx == NAME_NOT_FOUND) {
-        ALOGV("%s: stream set %d is not registered to stream set map yet, create it.",
-                __FUNCTION__, streamSetId);
+        ALOGV("%s: stream set %d(%d) is not registered to stream set map yet, create it.",
+                __FUNCTION__, streamSetKey.id, streamSetKey.isMultiRes);
         // Create stream info map, then add to mStreamsetMap.
         StreamSet newStreamSet;
-        setIdx = mStreamSetMap.add(streamSetId, newStreamSet);
+        setIdx = mStreamSetMap.add(streamSetKey, newStreamSet);
     }
     // Update stream set map and water mark.
     StreamSet& currentStreamSet = mStreamSetMap.editValueAt(setIdx);
     ssize_t streamIdx = currentStreamSet.streamInfoMap.indexOfKey(streamId);
     if (streamIdx != NAME_NOT_FOUND) {
-        ALOGW("%s: stream %d was already registered with stream set %d",
-                __FUNCTION__, streamId, streamSetId);
+        ALOGW("%s: stream %d was already registered with stream set %d(%d)",
+                __FUNCTION__, streamId, streamSetKey.id, streamSetKey.isMultiRes);
         return OK;
     }
     currentStreamSet.streamInfoMap.add(streamId, streamInfo);
@@ -113,21 +115,22 @@
     return OK;
 }
 
-status_t Camera3BufferManager::unregisterStream(int streamId, int streamSetId) {
+status_t Camera3BufferManager::unregisterStream(int streamId, int streamSetId, bool isMultiRes) {
     ATRACE_CALL();
 
     Mutex::Autolock l(mLock);
-    ALOGV("%s: unregister stream %d with stream set %d", __FUNCTION__,
-            streamId, streamSetId);
+    ALOGV("%s: unregister stream %d with stream set %d(%d)", __FUNCTION__,
+            streamId, streamSetId, isMultiRes);
 
-    if (!checkIfStreamRegisteredLocked(streamId, streamSetId)){
-        ALOGE("%s: stream %d with set id %d wasn't properly registered to this buffer manager!",
-                __FUNCTION__, streamId, streamSetId);
+    StreamSetKey streamSetKey = {streamSetId, isMultiRes};
+    if (!checkIfStreamRegisteredLocked(streamId, streamSetKey)){
+        ALOGE("%s: stream %d with set %d(%d) wasn't properly registered to this"
+                " buffer manager!", __FUNCTION__, streamId, streamSetId, isMultiRes);
         return BAD_VALUE;
     }
 
     // De-list all the buffers associated with this stream first.
-    StreamSet& currentSet = mStreamSetMap.editValueFor(streamSetId);
+    StreamSet& currentSet = mStreamSetMap.editValueFor(streamSetKey);
     BufferCountMap& handOutBufferCounts = currentSet.handoutBufferCountMap;
     BufferCountMap& attachedBufferCounts = currentSet.attachedBufferCountMap;
     InfoMap& infoMap = currentSet.streamInfoMap;
@@ -150,26 +153,28 @@
 
     // Remove this stream set if all its streams have been removed.
     if (handOutBufferCounts.size() == 0 && infoMap.size() == 0) {
-        mStreamSetMap.removeItem(streamSetId);
+        mStreamSetMap.removeItem(streamSetKey);
     }
 
     return OK;
 }
 
-void Camera3BufferManager::notifyBufferRemoved(int streamId, int streamSetId) {
+void Camera3BufferManager::notifyBufferRemoved(int streamId, int streamSetId, bool isMultiRes) {
     Mutex::Autolock l(mLock);
-    StreamSet &streamSet = mStreamSetMap.editValueFor(streamSetId);
+    StreamSetKey streamSetKey = {streamSetId, isMultiRes};
+    StreamSet &streamSet = mStreamSetMap.editValueFor(streamSetKey);
     size_t& attachedBufferCount =
             streamSet.attachedBufferCountMap.editValueFor(streamId);
     attachedBufferCount--;
 }
 
 status_t Camera3BufferManager::checkAndFreeBufferOnOtherStreamsLocked(
-        int streamId, int streamSetId) {
+        int streamId, StreamSetKey streamSetKey) {
     StreamId firstOtherStreamId = CAMERA3_STREAM_ID_INVALID;
-    StreamSet &streamSet = mStreamSetMap.editValueFor(streamSetId);
+    StreamSet &streamSet = mStreamSetMap.editValueFor(streamSetKey);
     if (streamSet.streamInfoMap.size() == 1) {
-        ALOGV("StreamSet %d has no other stream available to free", streamSetId);
+        ALOGV("StreamSet %d(%d) has no other stream available to free",
+                streamSetKey.id, streamSetKey.isMultiRes);
         return OK;
     }
 
@@ -190,7 +195,8 @@
         firstOtherStreamId = CAMERA3_STREAM_ID_INVALID;
     }
     if (firstOtherStreamId == CAMERA3_STREAM_ID_INVALID || !freeBufferIsAttached) {
-        ALOGV("StreamSet %d has no buffer available to free", streamSetId);
+        ALOGV("StreamSet %d(%d) has no buffer available to free",
+                streamSetKey.id, streamSetKey.isMultiRes);
         return OK;
     }
 
@@ -237,20 +243,21 @@
 }
 
 status_t Camera3BufferManager::getBufferForStream(int streamId, int streamSetId,
-        sp<GraphicBuffer>* gb, int* fenceFd, bool noFreeBufferAtConsumer) {
+        bool isMultiRes, sp<GraphicBuffer>* gb, int* fenceFd, bool noFreeBufferAtConsumer) {
     ATRACE_CALL();
 
     Mutex::Autolock l(mLock);
-    ALOGV("%s: get buffer for stream %d with stream set %d", __FUNCTION__,
-            streamId, streamSetId);
+    ALOGV("%s: get buffer for stream %d with stream set %d(%d)", __FUNCTION__,
+            streamId, streamSetId, isMultiRes);
 
-    if (!checkIfStreamRegisteredLocked(streamId, streamSetId)) {
-        ALOGE("%s: stream %d is not registered with stream set %d yet!!!",
-                __FUNCTION__, streamId, streamSetId);
+    StreamSetKey streamSetKey = {streamSetId, isMultiRes};
+    if (!checkIfStreamRegisteredLocked(streamId, streamSetKey)) {
+        ALOGE("%s: stream %d is not registered with stream set %d(%d) yet!!!",
+                __FUNCTION__, streamId, streamSetId, isMultiRes);
         return BAD_VALUE;
     }
 
-    StreamSet &streamSet = mStreamSetMap.editValueFor(streamSetId);
+    StreamSet &streamSet = mStreamSetMap.editValueFor(streamSetKey);
     BufferCountMap& handOutBufferCounts = streamSet.handoutBufferCountMap;
     size_t& bufferCount = handOutBufferCounts.editValueFor(streamId);
     BufferCountMap& attachedBufferCounts = streamSet.attachedBufferCountMap;
@@ -272,7 +279,8 @@
         bufferCount++;
         return ALREADY_EXISTS;
     }
-    ALOGV("Stream %d set %d: Get buffer for stream: Allocate new", streamId, streamSetId);
+    ALOGV("Stream %d set %d(%d): Get buffer for stream: Allocate new",
+            streamId, streamSetId, isMultiRes);
 
     if (mGrallocVersion < HARDWARE_DEVICE_API_VERSION(1,0)) {
         const StreamInfo& info = streamSet.streamInfoMap.valueFor(streamId);
@@ -313,13 +321,13 @@
         // in returnBufferForStream() if we want to free buffer more quickly.
         // TODO: probably should find out all the inactive stream IDs, and free the firstly found
         // buffers for them.
-        res = checkAndFreeBufferOnOtherStreamsLocked(streamId, streamSetId);
+        res = checkAndFreeBufferOnOtherStreamsLocked(streamId, streamSetKey);
         if (res != OK) {
             return res;
         }
         // Since we just allocated one new buffer above, try free one more buffer from other streams
         // to prevent total buffer count from growing
-        res = checkAndFreeBufferOnOtherStreamsLocked(streamId, streamSetId);
+        res = checkAndFreeBufferOnOtherStreamsLocked(streamId, streamSetKey);
         if (res != OK) {
             return res;
         }
@@ -332,7 +340,7 @@
 }
 
 status_t Camera3BufferManager::onBufferReleased(
-        int streamId, int streamSetId, bool* shouldFreeBuffer) {
+        int streamId, int streamSetId, bool isMultiRes, bool* shouldFreeBuffer) {
     ATRACE_CALL();
 
     if (shouldFreeBuffer == nullptr) {
@@ -341,22 +349,24 @@
     }
 
     Mutex::Autolock l(mLock);
-    ALOGV("Stream %d set %d: Buffer released", streamId, streamSetId);
+    ALOGV("Stream %d set %d(%d): Buffer released", streamId, streamSetId, isMultiRes);
     *shouldFreeBuffer = false;
 
-    if (!checkIfStreamRegisteredLocked(streamId, streamSetId)){
+    StreamSetKey streamSetKey = {streamSetId, isMultiRes};
+    if (!checkIfStreamRegisteredLocked(streamId, streamSetKey)){
         ALOGV("%s: signaling buffer release for an already unregistered stream "
-                "(stream %d with set id %d)", __FUNCTION__, streamId, streamSetId);
+                "(stream %d with set id %d(%d))", __FUNCTION__, streamId, streamSetId,
+                isMultiRes);
         return OK;
     }
 
     if (mGrallocVersion < HARDWARE_DEVICE_API_VERSION(1,0)) {
-        StreamSet& streamSet = mStreamSetMap.editValueFor(streamSetId);
+        StreamSet& streamSet = mStreamSetMap.editValueFor(streamSetKey);
         BufferCountMap& handOutBufferCounts = streamSet.handoutBufferCountMap;
         size_t& bufferCount = handOutBufferCounts.editValueFor(streamId);
         bufferCount--;
-        ALOGV("%s: Stream %d set %d: Buffer count now %zu", __FUNCTION__, streamId, streamSetId,
-                bufferCount);
+        ALOGV("%s: Stream %d set %d(%d): Buffer count now %zu", __FUNCTION__, streamId,
+                streamSetId, isMultiRes, bufferCount);
 
         size_t totalAllocatedBufferCount = 0;
         size_t totalHandOutBufferCount = 0;
@@ -371,8 +381,9 @@
             // BufferManager got more than enough buffers, so decrease watermark
             // to trigger more buffers free operation.
             streamSet.allocatedBufferWaterMark = newWaterMark;
-            ALOGV("%s: Stream %d set %d: watermark--; now %zu",
-                    __FUNCTION__, streamId, streamSetId, streamSet.allocatedBufferWaterMark);
+            ALOGV("%s: Stream %d set %d(%d): watermark--; now %zu",
+                    __FUNCTION__, streamId, streamSetId, isMultiRes,
+                    streamSet.allocatedBufferWaterMark);
         }
 
         size_t attachedBufferCount = streamSet.attachedBufferCountMap.valueFor(streamId);
@@ -395,20 +406,22 @@
     return OK;
 }
 
-status_t Camera3BufferManager::onBuffersRemoved(int streamId, int streamSetId, size_t count) {
+status_t Camera3BufferManager::onBuffersRemoved(int streamId, int streamSetId,
+        bool isMultiRes, size_t count) {
     ATRACE_CALL();
     Mutex::Autolock l(mLock);
 
-    ALOGV("Stream %d set %d: Buffer removed", streamId, streamSetId);
+    ALOGV("Stream %d set %d(%d): Buffer removed", streamId, streamSetId, isMultiRes);
 
-    if (!checkIfStreamRegisteredLocked(streamId, streamSetId)){
+    StreamSetKey streamSetKey = {streamSetId, isMultiRes};
+    if (!checkIfStreamRegisteredLocked(streamId, streamSetKey)){
         ALOGV("%s: signaling buffer removal for an already unregistered stream "
-                "(stream %d with set id %d)", __FUNCTION__, streamId, streamSetId);
+                "(stream %d with set id %d(%d))", __FUNCTION__, streamId, streamSetId, isMultiRes);
         return OK;
     }
 
     if (mGrallocVersion < HARDWARE_DEVICE_API_VERSION(1,0)) {
-        StreamSet& streamSet = mStreamSetMap.editValueFor(streamSetId);
+        StreamSet& streamSet = mStreamSetMap.editValueFor(streamSetKey);
         BufferCountMap& handOutBufferCounts = streamSet.handoutBufferCountMap;
         size_t& totalHandoutCount = handOutBufferCounts.editValueFor(streamId);
         BufferCountMap& attachedBufferCounts = streamSet.attachedBufferCountMap;
@@ -427,8 +440,9 @@
 
         totalHandoutCount -= count;
         totalAttachedCount -= count;
-        ALOGV("%s: Stream %d set %d: Buffer count now %zu, attached buffer count now %zu",
-                __FUNCTION__, streamId, streamSetId, totalHandoutCount, totalAttachedCount);
+        ALOGV("%s: Stream %d set %d(%d): Buffer count now %zu, attached buffer count now %zu",
+                __FUNCTION__, streamId, streamSetId, isMultiRes, totalHandoutCount,
+                totalAttachedCount);
     } else {
         // TODO: implement gralloc V1 support
         return BAD_VALUE;
@@ -444,7 +458,8 @@
     String8 lines;
     lines.appendFormat("      Total stream sets: %zu\n", mStreamSetMap.size());
     for (size_t i = 0; i < mStreamSetMap.size(); i++) {
-        lines.appendFormat("        Stream set %d has below streams:\n", mStreamSetMap.keyAt(i));
+        lines.appendFormat("        Stream set %d(%d) has below streams:\n",
+                mStreamSetMap.keyAt(i).id, mStreamSetMap.keyAt(i).isMultiRes);
         for (size_t j = 0; j < mStreamSetMap[i].streamInfoMap.size(); j++) {
             lines.appendFormat("          Stream %d\n", mStreamSetMap[i].streamInfoMap[j].streamId);
         }
@@ -470,11 +485,12 @@
     write(fd, lines.string(), lines.size());
 }
 
-bool Camera3BufferManager::checkIfStreamRegisteredLocked(int streamId, int streamSetId) const {
-    ssize_t setIdx = mStreamSetMap.indexOfKey(streamSetId);
+bool Camera3BufferManager::checkIfStreamRegisteredLocked(int streamId,
+        StreamSetKey streamSetKey) const {
+    ssize_t setIdx = mStreamSetMap.indexOfKey(streamSetKey);
     if (setIdx == NAME_NOT_FOUND) {
-        ALOGV("%s: stream set %d is not registered to stream set map yet!",
-                __FUNCTION__, streamSetId);
+        ALOGV("%s: stream set %d(%d) is not registered to stream set map yet!",
+                __FUNCTION__, streamSetKey.id, streamSetKey.isMultiRes);
         return false;
     }
 
@@ -486,9 +502,10 @@
 
     size_t bufferWaterMark = mStreamSetMap[setIdx].maxAllowedBufferCount;
     if (bufferWaterMark == 0 || bufferWaterMark > kMaxBufferCount) {
-        ALOGW("%s: stream %d with stream set %d is not registered correctly to stream set map,"
+        ALOGW("%s: stream %d with stream set %d(%d) is not registered correctly to stream set map,"
                 " as the water mark (%zu) is wrong!",
-                __FUNCTION__, streamId, streamSetId, bufferWaterMark);
+                __FUNCTION__, streamId, streamSetKey.id, streamSetKey.isMultiRes,
+                bufferWaterMark);
         return false;
     }
 
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.h b/services/camera/libcameraservice/device3/Camera3BufferManager.h
index f0de1c1..64aaa23 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.h
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.h
@@ -99,7 +99,7 @@
      *             combination doesn't match what was registered, or this stream wasn't registered
      *             to this buffer manager before.
      */
-    status_t unregisterStream(int streamId, int streamSetId);
+    status_t unregisterStream(int streamId, int streamSetId, bool isMultiRes);
 
     /**
      * This method obtains a buffer for a stream from this buffer manager.
@@ -127,8 +127,8 @@
      *  NO_MEMORY: Unable to allocate a buffer for this stream at this time.
      */
     status_t getBufferForStream(
-            int streamId, int streamSetId, sp<GraphicBuffer>* gb, int* fenceFd,
-            bool noFreeBufferAtConsumer = false);
+            int streamId, int streamSetId, bool isMultiRes, sp<GraphicBuffer>* gb,
+            int* fenceFd, bool noFreeBufferAtConsumer = false);
 
     /**
      * This method notifies the manager that a buffer has been released by the consumer.
@@ -153,7 +153,8 @@
      *             combination doesn't match what was registered, or this stream wasn't registered
      *             to this buffer manager before, or shouldFreeBuffer is null/
      */
-    status_t onBufferReleased(int streamId, int streamSetId, /*out*/bool* shouldFreeBuffer);
+    status_t onBufferReleased(int streamId, int streamSetId, bool isMultiRes,
+                              /*out*/bool* shouldFreeBuffer);
 
     /**
      * This method notifies the manager that certain buffers has been removed from the
@@ -171,13 +172,13 @@
      *             to this buffer manager before, or the removed buffer count is larger than
      *             current total handoutCount or attachedCount.
      */
-    status_t onBuffersRemoved(int streamId, int streamSetId, size_t count);
+    status_t onBuffersRemoved(int streamId, int streamSetId, bool isMultiRes, size_t count);
 
     /**
      * This method notifiers the manager that a buffer is freed from the buffer queue, usually
      * because onBufferReleased signals the caller to free a buffer via the shouldFreeBuffer flag.
      */
-    void notifyBufferRemoved(int streamId, int streamSetId);
+    void notifyBufferRemoved(int streamId, int streamSetId, bool isMultiRes);
 
     /**
      * Dump the buffer manager statistics.
@@ -292,8 +293,20 @@
     /**
      * Stream set map managed by this buffer manager.
      */
-    typedef int StreamSetId;
-    KeyedVector<StreamSetId, StreamSet> mStreamSetMap;
+    struct StreamSetKey {
+        // The stream set ID
+        int id;
+        // Whether this stream set is for multi-resolution output streams. It's
+        // valid for 2 stream sets to have the same stream set ID if: one is for
+        // multi-resolution output stream, and the other one is not.
+        bool isMultiRes;
+
+        inline bool operator<(const StreamSetKey& other) const {
+            return (isMultiRes < other.isMultiRes) ||
+                    ((isMultiRes == other.isMultiRes) && (id < other.id));
+        }
+    };
+    KeyedVector<StreamSetKey, StreamSet> mStreamSetMap;
     KeyedVector<StreamId, wp<Camera3OutputStream>> mStreamMap;
 
     // TODO: There is no easy way to query the Gralloc version in this code yet, we have different
@@ -304,13 +317,13 @@
      * Check if this stream was successfully registered already. This method needs to be called with
      * mLock held.
      */
-    bool checkIfStreamRegisteredLocked(int streamId, int streamSetId) const;
+    bool checkIfStreamRegisteredLocked(int streamId, StreamSetKey streamSetKey) const;
 
     /**
      * Check if other streams in the stream set has extra buffer available to be freed, and
      * free one if so.
      */
-    status_t checkAndFreeBufferOnOtherStreamsLocked(int streamId, int streamSetId);
+    status_t checkAndFreeBufferOnOtherStreamsLocked(int streamId, StreamSetKey streamSetKey);
 };
 
 } // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 4a509aa..0101c58 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -49,6 +49,7 @@
 #include <utils/Timers.h>
 #include <cutils/properties.h>
 
+#include <android/hardware/camera/device/3.7/ICameraInjectionSession.h>
 #include <android/hardware/camera2/ICameraDeviceUser.h>
 
 #include "utils/CameraTraces.h"
@@ -56,11 +57,13 @@
 #include "device3/Camera3Device.h"
 #include "device3/Camera3OutputStream.h"
 #include "device3/Camera3InputStream.h"
-#include "device3/Camera3DummyStream.h"
+#include "device3/Camera3FakeStream.h"
 #include "device3/Camera3SharedOutputStream.h"
 #include "CameraService.h"
 #include "utils/CameraThreadState.h"
+#include "utils/SessionConfigurationUtils.h"
 #include "utils/TraceHFR.h"
+#include "utils/CameraServiceProxyWrapper.h"
 
 #include <algorithm>
 #include <tuple>
@@ -68,10 +71,11 @@
 using namespace android::camera3;
 using namespace android::hardware::camera;
 using namespace android::hardware::camera::device::V3_2;
+using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
 
 namespace android {
 
-Camera3Device::Camera3Device(const String8 &id):
+Camera3Device::Camera3Device(const String8 &id, bool overrideForPerfClass):
         mId(id),
         mOperatingMode(NO_MODE),
         mIsConstrainedHighSpeedConfiguration(false),
@@ -89,7 +93,8 @@
         mListener(NULL),
         mVendorTagId(CAMERA_METADATA_INVALID_VENDOR_ID),
         mLastTemplateId(-1),
-        mNeedFixupMonochromeTags(false)
+        mNeedFixupMonochromeTags(false),
+        mOverrideForPerfClass(overrideForPerfClass)
 {
     ATRACE_CALL();
     ALOGV("%s: Created device for camera %s", __FUNCTION__, mId.string());
@@ -128,7 +133,7 @@
         return res;
     }
 
-    res = manager->getCameraCharacteristics(mId.string(), &mDeviceInfo);
+    res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo);
     if (res != OK) {
         SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
         session->close();
@@ -140,8 +145,9 @@
     bool isLogical = manager->isLogicalCamera(mId.string(), &physicalCameraIds);
     if (isLogical) {
         for (auto& physicalId : physicalCameraIds) {
+            // Do not override characteristics for physical cameras
             res = manager->getCameraCharacteristics(
-                    physicalId, &mPhysicalDeviceInfoMap[physicalId]);
+                    physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId]);
             if (res != OK) {
                 SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
                         physicalId.c_str(), strerror(-res), res);
@@ -270,7 +276,7 @@
     }
 
     /** Register in-flight map to the status tracker */
-    mInFlightStatusId = mStatusTracker->addComponent();
+    mInFlightStatusId = mStatusTracker->addComponent("InflightRequests");
 
     if (mUseHalBufManager) {
         res = mRequestBufferSM.initialize(mStatusTracker);
@@ -309,9 +315,10 @@
 
     internalUpdateStatusLocked(STATUS_UNCONFIGURED);
     mNextStreamId = 0;
-    mDummyStreamId = NO_STREAM;
+    mFakeStreamId = NO_STREAM;
     mNeedConfig = true;
     mPauseStateNotify = false;
+    mIsInputStreamMultiResolution = false;
 
     // Measure the clock domain offset between camera and video/hw_composer
     camera_metadata_entry timestampSource =
@@ -329,17 +336,6 @@
         mUsePartialResult = (mNumPartialResults > 1);
     }
 
-    camera_metadata_entry configs =
-            mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
-    for (uint32_t i = 0; i < configs.count; i += 4) {
-        if (configs.data.i32[i] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
-                configs.data.i32[i + 3] ==
-                ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT) {
-            mSupportedOpaqueInputSizes.add(Size(configs.data.i32[i + 1],
-                    configs.data.i32[i + 2]));
-        }
-    }
-
     bool usePrecorrectArray = DistortionMapper::isDistortionSupported(mDeviceInfo);
     if (usePrecorrectArray) {
         res = mDistortionMappers[mId.c_str()].setupStaticInfo(mDeviceInfo);
@@ -356,6 +352,23 @@
         mRotateAndCropMappers.emplace(mId.c_str(), &mDeviceInfo);
     }
 
+    camera_metadata_entry_t availableTestPatternModes = mDeviceInfo.find(
+            ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES);
+    for (size_t i = 0; i < availableTestPatternModes.count; i++) {
+        if (availableTestPatternModes.data.i32[i] ==
+                ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR) {
+            mSupportCameraMute = true;
+            mSupportTestPatternSolidColor = true;
+            break;
+        } else if (availableTestPatternModes.data.i32[i] ==
+                ANDROID_SENSOR_TEST_PATTERN_MODE_BLACK) {
+            mSupportCameraMute = true;
+            mSupportTestPatternSolidColor = false;
+        }
+    }
+
+    mInjectionMethods = new Camera3DeviceInjectionMethods(this);
+
     return OK;
 }
 
@@ -429,6 +442,10 @@
             mStatusTracker->join();
         }
 
+        if (mInjectionMethods->isInjecting()) {
+            mInjectionMethods->stopInjection();
+        }
+
         HalInterface* interface;
         {
             Mutex::Autolock l(mLock);
@@ -482,37 +499,6 @@
     return gotLock;
 }
 
-Camera3Device::Size Camera3Device::getMaxJpegResolution() const {
-    int32_t maxJpegWidth = 0, maxJpegHeight = 0;
-    const int STREAM_CONFIGURATION_SIZE = 4;
-    const int STREAM_FORMAT_OFFSET = 0;
-    const int STREAM_WIDTH_OFFSET = 1;
-    const int STREAM_HEIGHT_OFFSET = 2;
-    const int STREAM_IS_INPUT_OFFSET = 3;
-    camera_metadata_ro_entry_t availableStreamConfigs =
-            mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
-    if (availableStreamConfigs.count == 0 ||
-            availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
-        return Size(0, 0);
-    }
-
-    // Get max jpeg size (area-wise).
-    for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
-        int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
-        int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
-        int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
-        int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
-        if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
-                && format == HAL_PIXEL_FORMAT_BLOB &&
-                (width * height > maxJpegWidth * maxJpegHeight)) {
-            maxJpegWidth = width;
-            maxJpegHeight = height;
-        }
-    }
-
-    return Size(maxJpegWidth, maxJpegHeight);
-}
-
 nsecs_t Camera3Device::getMonoToBoottimeOffset() {
     // try three times to get the clock offset, choose the one
     // with the minimum gap in measurements.
@@ -546,15 +532,15 @@
     return usage;
 }
 
-StreamRotation Camera3Device::mapToStreamRotation(camera3_stream_rotation_t rotation) {
+StreamRotation Camera3Device::mapToStreamRotation(camera_stream_rotation_t rotation) {
     switch (rotation) {
-        case CAMERA3_STREAM_ROTATION_0:
+        case CAMERA_STREAM_ROTATION_0:
             return StreamRotation::ROTATION_0;
-        case CAMERA3_STREAM_ROTATION_90:
+        case CAMERA_STREAM_ROTATION_90:
             return StreamRotation::ROTATION_90;
-        case CAMERA3_STREAM_ROTATION_180:
+        case CAMERA_STREAM_ROTATION_180:
             return StreamRotation::ROTATION_180;
-        case CAMERA3_STREAM_ROTATION_270:
+        case CAMERA_STREAM_ROTATION_270:
             return StreamRotation::ROTATION_270;
     }
     ALOGE("%s: Unknown stream rotation %d", __FUNCTION__, rotation);
@@ -562,14 +548,14 @@
 }
 
 status_t Camera3Device::mapToStreamConfigurationMode(
-        camera3_stream_configuration_mode_t operationMode, StreamConfigurationMode *mode) {
+        camera_stream_configuration_mode_t operationMode, StreamConfigurationMode *mode) {
     if (mode == nullptr) return BAD_VALUE;
-    if (operationMode < CAMERA3_VENDOR_STREAM_CONFIGURATION_MODE_START) {
+    if (operationMode < CAMERA_VENDOR_STREAM_CONFIGURATION_MODE_START) {
         switch(operationMode) {
-            case CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE:
+            case CAMERA_STREAM_CONFIGURATION_NORMAL_MODE:
                 *mode = StreamConfigurationMode::NORMAL_MODE;
                 break;
-            case CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE:
+            case CAMERA_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE:
                 *mode = StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE;
                 break;
             default:
@@ -603,13 +589,26 @@
 }
 
 ssize_t Camera3Device::getJpegBufferSize(uint32_t width, uint32_t height) const {
-    // Get max jpeg size (area-wise).
-    Size maxJpegResolution = getMaxJpegResolution();
-    if (maxJpegResolution.width == 0) {
+    // Get max jpeg size (area-wise) for default sensor pixel mode
+    camera3::Size maxDefaultJpegResolution =
+            SessionConfigurationUtils::getMaxJpegResolution(mDeviceInfo,
+                    /*isUltraHighResolutionSensor*/false);
+    // Get max jpeg size (area-wise) for max resolution sensor pixel mode / 0 if
+    // not ultra high res sensor
+    camera3::Size uhrMaxJpegResolution =
+            SessionConfigurationUtils::getMaxJpegResolution(mDeviceInfo,
+                    /*isUltraHighResolution*/true);
+    if (maxDefaultJpegResolution.width == 0) {
         ALOGE("%s: Camera %s: Can't find valid available jpeg sizes in static metadata!",
                 __FUNCTION__, mId.string());
         return BAD_VALUE;
     }
+    bool useMaxSensorPixelModeThreshold = false;
+    if (uhrMaxJpegResolution.width != 0 &&
+            width * height > maxDefaultJpegResolution.width * maxDefaultJpegResolution.height) {
+        // Use the ultra high res max jpeg size and max jpeg buffer size
+        useMaxSensorPixelModeThreshold = true;
+    }
 
     // Get max jpeg buffer size
     ssize_t maxJpegBufferSize = 0;
@@ -620,17 +619,26 @@
         return BAD_VALUE;
     }
     maxJpegBufferSize = jpegBufMaxSize.data.i32[0];
+
+    camera3::Size chosenMaxJpegResolution = maxDefaultJpegResolution;
+    if (useMaxSensorPixelModeThreshold) {
+        maxJpegBufferSize =
+                SessionConfigurationUtils::getUHRMaxJpegBufferSize(uhrMaxJpegResolution,
+                        maxDefaultJpegResolution, maxJpegBufferSize);
+        chosenMaxJpegResolution = uhrMaxJpegResolution;
+    }
     assert(kMinJpegBufferSize < maxJpegBufferSize);
 
     // Calculate final jpeg buffer size for the given resolution.
     float scaleFactor = ((float) (width * height)) /
-            (maxJpegResolution.width * maxJpegResolution.height);
+            (chosenMaxJpegResolution.width * chosenMaxJpegResolution.height);
     ssize_t jpegBufferSize = scaleFactor * (maxJpegBufferSize - kMinJpegBufferSize) +
             kMinJpegBufferSize;
     if (jpegBufferSize > maxJpegBufferSize) {
+        ALOGI("%s: jpeg buffer size calculated is > maxJpeg bufferSize(%zd), clamping",
+                  __FUNCTION__, maxJpegBufferSize);
         jpegBufferSize = maxJpegBufferSize;
     }
-
     return jpegBufferSize;
 }
 
@@ -647,13 +655,17 @@
     return maxBytesForPointCloud;
 }
 
-ssize_t Camera3Device::getRawOpaqueBufferSize(int32_t width, int32_t height) const {
+ssize_t Camera3Device::getRawOpaqueBufferSize(int32_t width, int32_t height,
+        bool maxResolution) const {
     const int PER_CONFIGURATION_SIZE = 3;
     const int WIDTH_OFFSET = 0;
     const int HEIGHT_OFFSET = 1;
     const int SIZE_OFFSET = 2;
     camera_metadata_ro_entry rawOpaqueSizes =
-        mDeviceInfo.find(ANDROID_SENSOR_OPAQUE_RAW_SIZE);
+        mDeviceInfo.find(
+            camera3::SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_SENSOR_OPAQUE_RAW_SIZE,
+                    maxResolution));
     size_t count = rawOpaqueSizes.count;
     if (count == 0 || (count % PER_CONFIGURATION_SIZE)) {
         ALOGE("%s: Camera %s: bad opaque RAW size static metadata length(%zu)!",
@@ -751,16 +763,21 @@
     }
 
     lines = String8("    In-flight requests:\n");
-    if (mInFlightMap.size() == 0) {
-        lines.append("      None\n");
-    } else {
-        for (size_t i = 0; i < mInFlightMap.size(); i++) {
-            InFlightRequest r = mInFlightMap.valueAt(i);
-            lines.appendFormat("      Frame %d |  Timestamp: %" PRId64 ", metadata"
-                    " arrived: %s, buffers left: %d\n", mInFlightMap.keyAt(i),
-                    r.shutterTimestamp, r.haveResultMetadata ? "true" : "false",
-                    r.numBuffersLeft);
+    if (mInFlightLock.try_lock()) {
+        if (mInFlightMap.size() == 0) {
+            lines.append("      None\n");
+        } else {
+            for (size_t i = 0; i < mInFlightMap.size(); i++) {
+                InFlightRequest r = mInFlightMap.valueAt(i);
+                lines.appendFormat("      Frame %d |  Timestamp: %" PRId64 ", metadata"
+                        " arrived: %s, buffers left: %d\n", mInFlightMap.keyAt(i),
+                        r.shutterTimestamp, r.haveResultMetadata ? "true" : "false",
+                        r.numBuffersLeft);
+            }
         }
+        mInFlightLock.unlock();
+    } else {
+        lines.append("      Failed to acquire In-flight lock!\n");
     }
     write(fd, lines.string(), lines.size());
 
@@ -778,7 +795,7 @@
     }
 
     if (dumpTemplates) {
-        const char *templateNames[CAMERA3_TEMPLATE_COUNT] = {
+        const char *templateNames[CAMERA_TEMPLATE_COUNT] = {
             "TEMPLATE_PREVIEW",
             "TEMPLATE_STILL_CAPTURE",
             "TEMPLATE_VIDEO_RECORD",
@@ -787,10 +804,10 @@
             "TEMPLATE_MANUAL",
         };
 
-        for (int i = 1; i < CAMERA3_TEMPLATE_COUNT; i++) {
+        for (int i = 1; i < CAMERA_TEMPLATE_COUNT; i++) {
             camera_metadata_t *templateRequest = nullptr;
             mInterface->constructDefaultRequestSettings(
-                    (camera3_request_template_t) i, &templateRequest);
+                    (camera_request_template_t) i, &templateRequest);
             lines = String8::format("    HAL Request %s:\n", templateNames[i-1]);
             if (templateRequest == nullptr) {
                 lines.append("       Not supported\n");
@@ -867,7 +884,7 @@
 status_t Camera3Device::convertMetadataListToRequestListLocked(
         const List<const PhysicalCameraSettingsList> &metadataList,
         const std::list<const SurfaceMap> &surfaceMaps,
-        bool repeating,
+        bool repeating, nsecs_t requestTimeNs,
         RequestList *requestList) {
     if (requestList == NULL) {
         CLOGE("requestList cannot be NULL.");
@@ -886,6 +903,7 @@
         }
 
         newRequest->mRepeating = repeating;
+        newRequest->mRequestTimeNs = requestTimeNs;
 
         // Setup burst Id and request Id
         newRequest->mResultExtras.burstId = burstId++;
@@ -911,6 +929,7 @@
         for (auto& outputStream : (*firstRequest)->mOutputStreams) {
             if (outputStream->isVideoStream()) {
                 (*firstRequest)->mBatchSize = requestList->size();
+                outputStream->setBatchSize(requestList->size());
                 break;
             }
         }
@@ -953,6 +972,8 @@
         /*out*/
         int64_t *lastFrameNumber) {
     ATRACE_CALL();
+    nsecs_t requestTimeNs = systemTime();
+
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
@@ -965,7 +986,7 @@
     RequestList requestList;
 
     res = convertMetadataListToRequestListLocked(requests, surfaceMaps,
-            repeating, /*out*/&requestList);
+            repeating, requestTimeNs, /*out*/&requestList);
     if (res != OK) {
         // error logged by previous call
         return res;
@@ -997,7 +1018,7 @@
         const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
         requestStreamBuffers_cb _hidl_cb) {
     RequestBufferStates states {
-        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams,
+        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder,
         *this, *mInterface, *this};
     camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
     return hardware::Void();
@@ -1006,7 +1027,7 @@
 hardware::Return<void> Camera3Device::returnStreamBuffers(
         const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
     ReturnBufferStates states {
-        mId, mUseHalBufManager, mOutputStreams, *mInterface};
+        mId, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder, *mInterface};
     camera3::returnStreamBuffers(states, buffers);
     return hardware::Void();
 }
@@ -1054,7 +1075,8 @@
         mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
-        mTagMonitor, mInputStream, mOutputStreams, listener, *this, *this, *mInterface
+        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
+        *mInterface
     };
 
     for (const auto& result : results) {
@@ -1112,7 +1134,8 @@
         mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
-        mTagMonitor, mInputStream, mOutputStreams, listener, *this, *this, *mInterface
+        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
+        *mInterface
     };
 
     for (const auto& result : results) {
@@ -1152,7 +1175,8 @@
         mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
-        mTagMonitor, mInputStream, mOutputStreams, listener, *this, *this, *mInterface
+        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
+        *mInterface
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
@@ -1196,7 +1220,7 @@
         // This point should only be reached via API1 (API2 must explicitly call configureStreams)
         // so unilaterally select normal operating mode.
         res = filterParamsAndConfigureLocked(request.begin()->metadata,
-                CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE);
+                CAMERA_STREAM_CONFIGURATION_NORMAL_MODE);
         // Stream configuration failed. Client might try other configuraitons.
         if (res != OK) {
             CLOGE("Can't set up streams: %s (%d)", strerror(-res), res);
@@ -1246,7 +1270,7 @@
 }
 
 status_t Camera3Device::createInputStream(
-        uint32_t width, uint32_t height, int format, int *id) {
+        uint32_t width, uint32_t height, int format, bool isMultiResolution, int *id) {
     ATRACE_CALL();
     Mutex::Autolock il(mInterfaceLock);
     nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
@@ -1293,6 +1317,7 @@
     newStream->setStatusTracker(mStatusTracker);
 
     mInputStream = newStream;
+    mIsInputStreamMultiResolution = isMultiResolution;
 
     *id = mNextStreamId++;
 
@@ -1315,9 +1340,11 @@
 
 status_t Camera3Device::createStream(sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
-            android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
+            android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
             const String8& physicalCameraId,
-            std::vector<int> *surfaceIds, int streamSetId, bool isShared, uint64_t consumerUsage) {
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
+            uint64_t consumerUsage) {
     ATRACE_CALL();
 
     if (consumer == nullptr) {
@@ -1329,24 +1356,37 @@
     consumers.push_back(consumer);
 
     return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
-            format, dataSpace, rotation, id, physicalCameraId, surfaceIds, streamSetId,
-            isShared, consumerUsage);
+            format, dataSpace, rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
+            streamSetId, isShared, isMultiResolution, consumerUsage);
+}
+
+static bool isRawFormat(int format) {
+    switch (format) {
+        case HAL_PIXEL_FORMAT_RAW16:
+        case HAL_PIXEL_FORMAT_RAW12:
+        case HAL_PIXEL_FORMAT_RAW10:
+        case HAL_PIXEL_FORMAT_RAW_OPAQUE:
+            return true;
+        default:
+            return false;
+    }
 }
 
 status_t Camera3Device::createStream(const std::vector<sp<Surface>>& consumers,
         bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
-        android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
-        const String8& physicalCameraId,
-        std::vector<int> *surfaceIds, int streamSetId, bool isShared, uint64_t consumerUsage) {
+        android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
+        const String8& physicalCameraId, const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
+        uint64_t consumerUsage) {
     ATRACE_CALL();
 
     Mutex::Autolock il(mInterfaceLock);
     nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
     Mutex::Autolock l(mLock);
     ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
-            " consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s", mId.string(),
-            mNextStreamId, width, height, format, dataSpace, rotation, consumerUsage, isShared,
-            physicalCameraId.string());
+            " consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d",
+            mId.string(), mNextStreamId, width, height, format, dataSpace, rotation,
+            consumerUsage, isShared, physicalCameraId.string(), isMultiResolution);
 
     status_t res;
     bool wasActive = false;
@@ -1389,6 +1429,12 @@
         return BAD_VALUE;
     }
 
+    if (isRawFormat(format) && sensorPixelModesUsed.size() > 1) {
+        // We can't use one stream with a raw format in both sensor pixel modes since its going to
+        // be found in only one sensor pixel mode.
+        ALOGE("%s: RAW opaque stream cannot be used with > 1 sensor pixel modes", __FUNCTION__);
+        return BAD_VALUE;
+    }
     if (format == HAL_PIXEL_FORMAT_BLOB) {
         ssize_t blobBufferSize;
         if (dataSpace == HAL_DATASPACE_DEPTH) {
@@ -1408,29 +1454,36 @@
         }
         newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                 width, height, blobBufferSize, format, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, streamSetId);
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
+                isMultiResolution);
     } else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
-        ssize_t rawOpaqueBufferSize = getRawOpaqueBufferSize(width, height);
+        bool maxResolution =
+                sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
+                        sensorPixelModesUsed.end();
+        ssize_t rawOpaqueBufferSize = getRawOpaqueBufferSize(width, height, maxResolution);
         if (rawOpaqueBufferSize <= 0) {
             SET_ERR_L("Invalid RAW opaque buffer size %zd", rawOpaqueBufferSize);
             return BAD_VALUE;
         }
         newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                 width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, streamSetId);
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
+                isMultiResolution);
     } else if (isShared) {
         newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
                 width, height, format, consumerUsage, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, streamSetId,
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
                 mUseHalBufManager);
     } else if (consumers.size() == 0 && hasDeferredConsumer) {
         newStream = new Camera3OutputStream(mNextStreamId,
                 width, height, format, consumerUsage, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, streamSetId);
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
+                isMultiResolution);
     } else {
         newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                 width, height, format, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, streamSetId);
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
+                isMultiResolution);
     }
 
     size_t consumerCount = consumers.size();
@@ -1449,12 +1502,16 @@
 
     newStream->setBufferManager(mBufferManager);
 
+    newStream->setImageDumpMask(mImageDumpMask);
+
     res = mOutputStreams.add(mNextStreamId, newStream);
     if (res < 0) {
         SET_ERR_L("Can't add new stream to set: %s (%d)", strerror(-res), res);
         return res;
     }
 
+    mSessionStatsBuilder.addStream(mNextStreamId);
+
     *id = mNextStreamId++;
     mNeedConfig = true;
 
@@ -1578,6 +1635,7 @@
             CLOGE("Stream %d does not exist", id);
             return BAD_VALUE;
         }
+        mSessionStatsBuilder.removeStream(id);
     }
 
     // Delete output stream or the output part of a bi-directional stream.
@@ -1609,7 +1667,7 @@
     // speculative configuration using the values from the last cached
     // default request.
     if (sessionParams.isEmpty() &&
-            ((mLastTemplateId > 0) && (mLastTemplateId < CAMERA3_TEMPLATE_COUNT)) &&
+            ((mLastTemplateId > 0) && (mLastTemplateId < CAMERA_TEMPLATE_COUNT)) &&
             (!mRequestTemplateCache[mLastTemplateId].isEmpty())) {
         ALOGV("%s: Speculative session param configuration with template id: %d", __func__,
                 mLastTemplateId);
@@ -1659,12 +1717,12 @@
     return mInputStream->getInputBufferProducer(producer);
 }
 
-status_t Camera3Device::createDefaultRequest(int templateId,
+status_t Camera3Device::createDefaultRequest(camera_request_template_t templateId,
         CameraMetadata *request) {
     ATRACE_CALL();
     ALOGV("%s: for template %d", __FUNCTION__, templateId);
 
-    if (templateId <= 0 || templateId >= CAMERA3_TEMPLATE_COUNT) {
+    if (templateId <= 0 || templateId >= CAMERA_TEMPLATE_COUNT) {
         android_errorWriteWithInfoLog(CameraService::SN_EVENT_LOG_ID, "26866110",
                 CameraThreadState::getCallingUid(), nullptr, 0);
         return BAD_VALUE;
@@ -1700,7 +1758,7 @@
 
     camera_metadata_t *rawRequest;
     status_t res = mInterface->constructDefaultRequestSettings(
-            (camera3_request_template_t) templateId, &rawRequest);
+            (camera_request_template_t) templateId, &rawRequest);
 
     {
         Mutex::Autolock l(mLock);
@@ -1768,13 +1826,13 @@
             maxExpectedDuration);
     status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
     if (res != OK) {
+        mStatusTracker->dumpActiveComponents();
         SET_ERR_L("Error waiting for HAL to drain: %s (%d)", strerror(-res),
                 res);
     }
     return res;
 }
 
-
 void Camera3Device::internalUpdateStatusLocked(Status status) {
     mStatus = status;
     mRecentStatusUpdates.add(mStatus);
@@ -1833,10 +1891,12 @@
 
     mStatusWaiters++;
 
+    bool signalPipelineDrain = false;
     if (!active && mUseHalBufManager) {
         auto streamIds = mOutputStreams.getStreamIds();
         if (mStatus == STATUS_ACTIVE) {
             mRequestThread->signalPipelineDrain(streamIds);
+            signalPipelineDrain = true;
         }
         mRequestBufferSM.onWaitUntilIdle();
     }
@@ -1866,6 +1926,10 @@
         }
     } while (!stateSeen);
 
+    if (signalPipelineDrain) {
+        mRequestThread->resetPipelineDrain();
+    }
+
     mStatusWaiters--;
 
     return res;
@@ -2003,6 +2067,9 @@
         }
 
         mRequestThread->clear(/*out*/frameNumber);
+
+        // Stop session and stream counter
+        mSessionStatsBuilder.stopCounter();
     }
 
     return mRequestThread->flush();
@@ -2080,6 +2147,9 @@
 
 void Camera3Device::notifyStatus(bool idle) {
     ATRACE_CALL();
+    std::vector<int> streamIds;
+    std::vector<hardware::CameraStreamStats> streamStats;
+
     {
         // Need mLock to safely update state and synchronize to current
         // state of methods in flight.
@@ -2097,6 +2167,24 @@
         // Skip notifying listener if we're doing some user-transparent
         // state changes
         if (mPauseStateNotify) return;
+
+        // Populate stream statistics in case of Idle
+        if (idle) {
+            for (size_t i = 0; i < mOutputStreams.size(); i++) {
+                auto stream = mOutputStreams[i];
+                if (stream.get() == nullptr) continue;
+                streamIds.push_back(stream->getId());
+                Camera3Stream* camera3Stream = Camera3Stream::cast(stream->asHalStream());
+                int64_t usage = 0LL;
+                if (camera3Stream != nullptr) {
+                    usage = camera3Stream->getUsage();
+                }
+                streamStats.emplace_back(stream->getWidth(), stream->getHeight(),
+                    stream->getFormat(), stream->getDataSpace(), usage,
+                    stream->getMaxHalBuffers(),
+                    stream->getMaxTotalBuffers() - stream->getMaxHalBuffers());
+            }
+        }
     }
 
     sp<NotificationListener> listener;
@@ -2104,8 +2192,40 @@
         std::lock_guard<std::mutex> l(mOutputLock);
         listener = mListener.promote();
     }
-    if (idle && listener != NULL) {
-        listener->notifyIdle();
+    status_t res = OK;
+    if (listener != nullptr) {
+        if (idle) {
+            // Get session stats from the builder, and notify the listener.
+            int64_t requestCount, resultErrorCount;
+            bool deviceError;
+            std::map<int, StreamStats> streamStatsMap;
+            mSessionStatsBuilder.buildAndReset(&requestCount, &resultErrorCount,
+                    &deviceError, &streamStatsMap);
+            for (size_t i = 0; i < streamIds.size(); i++) {
+                int streamId = streamIds[i];
+                auto stats = streamStatsMap.find(streamId);
+                if (stats != streamStatsMap.end()) {
+                    streamStats[i].mRequestCount = stats->second.mRequestedFrameCount;
+                    streamStats[i].mErrorCount = stats->second.mDroppedFrameCount;
+                    streamStats[i].mStartLatencyMs = stats->second.mStartLatencyMs;
+                    streamStats[i].mHistogramType =
+                            hardware::CameraStreamStats::HISTOGRAM_TYPE_CAPTURE_LATENCY;
+                    streamStats[i].mHistogramBins.assign(
+                            stats->second.mCaptureLatencyBins.begin(),
+                            stats->second.mCaptureLatencyBins.end());
+                    streamStats[i].mHistogramCounts.assign(
+                           stats->second.mCaptureLatencyHistogram.begin(),
+                           stats->second.mCaptureLatencyHistogram.end());
+                }
+            }
+            listener->notifyIdle(requestCount, resultErrorCount, deviceError, streamStats);
+        } else {
+            res = listener->notifyActive();
+        }
+    }
+    if (res != OK) {
+        SET_ERR("Camera access permission lost mid-operation: %s (%d)",
+                strerror(-res), res);
     }
 }
 
@@ -2215,6 +2335,12 @@
         ALOGE("%s: Stream %d is not found.", __FUNCTION__, streamId);
         return BAD_VALUE;
     }
+
+    if (dropping) {
+        mSessionStatsBuilder.stopCounter(streamId);
+    } else {
+        mSessionStatsBuilder.startCounter(streamId);
+    }
     return stream->dropBuffers(dropping);
 }
 
@@ -2306,6 +2432,35 @@
         newRequest->mRotateAndCropAuto = false;
     }
 
+    auto zoomRatioEntry =
+            newRequest->mSettingsList.begin()->metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
+    if (zoomRatioEntry.count > 0 &&
+            zoomRatioEntry.data.f[0] == 1.0f) {
+        newRequest->mZoomRatioIs1x = true;
+    } else {
+        newRequest->mZoomRatioIs1x = false;
+    }
+
+    if (mSupportCameraMute) {
+        auto testPatternModeEntry =
+                newRequest->mSettingsList.begin()->metadata.find(ANDROID_SENSOR_TEST_PATTERN_MODE);
+        newRequest->mOriginalTestPatternMode = testPatternModeEntry.count > 0 ?
+                testPatternModeEntry.data.i32[0] :
+                ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
+
+        auto testPatternDataEntry =
+                newRequest->mSettingsList.begin()->metadata.find(ANDROID_SENSOR_TEST_PATTERN_DATA);
+        if (testPatternDataEntry.count > 0) {
+            memcpy(newRequest->mOriginalTestPatternData, testPatternModeEntry.data.i32,
+                    sizeof(newRequest->mOriginalTestPatternData));
+        } else {
+            newRequest->mOriginalTestPatternData[0] = 0;
+            newRequest->mOriginalTestPatternData[1] = 0;
+            newRequest->mOriginalTestPatternData[2] = 0;
+            newRequest->mOriginalTestPatternData[3] = 0;
+        }
+    }
+
     return newRequest;
 }
 
@@ -2360,6 +2515,8 @@
     ATRACE_CALL();
     bool ret = false;
 
+    nsecs_t startTime = systemTime();
+
     Mutex::Autolock il(mInterfaceLock);
     nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
 
@@ -2407,6 +2564,9 @@
         ALOGE("%s: Failed to pause streaming: %d", __FUNCTION__, rc);
     }
 
+    CameraServiceProxyWrapper::logStreamConfigured(mId, mOperatingMode, true /*internalReconfig*/,
+        ns2ms(systemTime() - startTime));
+
     if (markClientActive) {
         mStatusTracker->markComponentActive(clientStatusId);
     }
@@ -2443,15 +2603,16 @@
     // any pending requests.
     if (mInputStream != NULL && notifyRequestThread) {
         while (true) {
-            camera3_stream_buffer_t inputBuffer;
+            camera_stream_buffer_t inputBuffer;
+            camera3::Size inputBufferSize;
             status_t res = mInputStream->getInputBuffer(&inputBuffer,
-                    /*respectHalLimit*/ false);
+                    &inputBufferSize, /*respectHalLimit*/ false);
             if (res != OK) {
                 // Exhausted acquiring all input buffers.
                 break;
             }
 
-            inputBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+            inputBuffer.status = CAMERA_BUFFER_STATUS_ERROR;
             res = mInputStream->returnInputBuffer(inputBuffer);
             if (res != OK) {
                 ALOGE("%s: %d: couldn't return input buffer while clearing input queue: "
@@ -2466,12 +2627,12 @@
     }
 
     // Workaround for device HALv3.2 or older spec bug - zero streams requires
-    // adding a dummy stream instead.
+    // adding a fake stream instead.
     // TODO: Bug: 17321404 for fixing the HAL spec and removing this workaround.
     if (mOutputStreams.size() == 0) {
-        addDummyStreamLocked();
+        addFakeStreamLocked();
     } else {
-        tryRemoveDummyStreamLocked();
+        tryRemoveFakeStreamLocked();
     }
 
     // Start configuring the streams
@@ -2479,17 +2640,18 @@
 
     mPreparerThread->pause();
 
-    camera3_stream_configuration config;
+    camera_stream_configuration config;
     config.operation_mode = mOperatingMode;
     config.num_streams = (mInputStream != NULL) + mOutputStreams.size();
+    config.input_is_multi_resolution = false;
 
-    Vector<camera3_stream_t*> streams;
+    Vector<camera3::camera_stream_t*> streams;
     streams.setCapacity(config.num_streams);
     std::vector<uint32_t> bufferSizes(config.num_streams, 0);
 
 
     if (mInputStream != NULL) {
-        camera3_stream_t *inputStream;
+        camera3::camera_stream_t *inputStream;
         inputStream = mInputStream->startConfiguration();
         if (inputStream == NULL) {
             CLOGE("Can't start input stream configuration");
@@ -2497,8 +2659,11 @@
             return INVALID_OPERATION;
         }
         streams.add(inputStream);
+
+        config.input_is_multi_resolution = mIsInputStreamMultiResolution;
     }
 
+    mGroupIdPhysicalCameraMap.clear();
     for (size_t i = 0; i < mOutputStreams.size(); i++) {
 
         // Don't configure bidi streams twice, nor add them twice to the list
@@ -2509,7 +2674,7 @@
             continue;
         }
 
-        camera3_stream_t *outputStream;
+        camera3::camera_stream_t *outputStream;
         outputStream = mOutputStreams[i]->startConfiguration();
         if (outputStream == NULL) {
             CLOGE("Can't start output stream configuration");
@@ -2532,6 +2697,12 @@
                         __FUNCTION__, outputStream->data_space);
             }
         }
+
+        if (mOutputStreams[i]->isMultiResolution()) {
+            int32_t streamGroupId = mOutputStreams[i]->getHalStreamGroupId();
+            const String8& physicalCameraId = mOutputStreams[i]->getPhysicalCameraId();
+            mGroupIdPhysicalCameraMap[streamGroupId].insert(physicalCameraId);
+        }
     }
 
     config.streams = streams.editArray();
@@ -2602,7 +2773,8 @@
     // Request thread needs to know to avoid using repeat-last-settings protocol
     // across configure_streams() calls
     if (notifyRequestThread) {
-        mRequestThread->configurationComplete(mIsConstrainedHighSpeedConfiguration, sessionParams);
+        mRequestThread->configurationComplete(mIsConstrainedHighSpeedConfiguration,
+                sessionParams, mGroupIdPhysicalCameraMap);
     }
 
     char value[PROPERTY_VALUE_MAX];
@@ -2633,7 +2805,7 @@
 
     mNeedConfig = false;
 
-    internalUpdateStatusLocked((mDummyStreamId == NO_STREAM) ?
+    internalUpdateStatusLocked((mFakeStreamId == NO_STREAM) ?
             STATUS_CONFIGURED : STATUS_UNCONFIGURED);
 
     ALOGV("%s: Camera %s: Stream configuration complete", __FUNCTION__, mId.string());
@@ -2647,69 +2819,82 @@
         return rc;
     }
 
-    if (mDummyStreamId == NO_STREAM) {
+    if (mFakeStreamId == NO_STREAM) {
         mRequestBufferSM.onStreamsConfigured();
     }
 
+    // Since the streams configuration of the injection camera is based on the internal camera, we
+    // must wait until the internal camera configure streams before calling injectCamera() to
+    // configure the injection streams.
+    if (mInjectionMethods->isInjecting()) {
+        ALOGV("%s: Injection camera %s: Start to configure streams.",
+              __FUNCTION__, mInjectionMethods->getInjectedCamId().string());
+        res = mInjectionMethods->injectCamera(config, bufferSizes);
+        if (res != OK) {
+            ALOGE("Can't finish inject camera process!");
+            return res;
+        }
+    }
+
     return OK;
 }
 
-status_t Camera3Device::addDummyStreamLocked() {
+status_t Camera3Device::addFakeStreamLocked() {
     ATRACE_CALL();
     status_t res;
 
-    if (mDummyStreamId != NO_STREAM) {
-        // Should never be adding a second dummy stream when one is already
+    if (mFakeStreamId != NO_STREAM) {
+        // Should never be adding a second fake stream when one is already
         // active
-        SET_ERR_L("%s: Camera %s: A dummy stream already exists!",
+        SET_ERR_L("%s: Camera %s: A fake stream already exists!",
                 __FUNCTION__, mId.string());
         return INVALID_OPERATION;
     }
 
-    ALOGV("%s: Camera %s: Adding a dummy stream", __FUNCTION__, mId.string());
+    ALOGV("%s: Camera %s: Adding a fake stream", __FUNCTION__, mId.string());
 
-    sp<Camera3OutputStreamInterface> dummyStream =
-            new Camera3DummyStream(mNextStreamId);
+    sp<Camera3OutputStreamInterface> fakeStream =
+            new Camera3FakeStream(mNextStreamId);
 
-    res = mOutputStreams.add(mNextStreamId, dummyStream);
+    res = mOutputStreams.add(mNextStreamId, fakeStream);
     if (res < 0) {
-        SET_ERR_L("Can't add dummy stream to set: %s (%d)", strerror(-res), res);
+        SET_ERR_L("Can't add fake stream to set: %s (%d)", strerror(-res), res);
         return res;
     }
 
-    mDummyStreamId = mNextStreamId;
+    mFakeStreamId = mNextStreamId;
     mNextStreamId++;
 
     return OK;
 }
 
-status_t Camera3Device::tryRemoveDummyStreamLocked() {
+status_t Camera3Device::tryRemoveFakeStreamLocked() {
     ATRACE_CALL();
     status_t res;
 
-    if (mDummyStreamId == NO_STREAM) return OK;
+    if (mFakeStreamId == NO_STREAM) return OK;
     if (mOutputStreams.size() == 1) return OK;
 
-    ALOGV("%s: Camera %s: Removing the dummy stream", __FUNCTION__, mId.string());
+    ALOGV("%s: Camera %s: Removing the fake stream", __FUNCTION__, mId.string());
 
-    // Ok, have a dummy stream and there's at least one other output stream,
-    // so remove the dummy
+    // Ok, have a fake stream and there's at least one other output stream,
+    // so remove the fake
 
-    sp<Camera3StreamInterface> deletedStream = mOutputStreams.get(mDummyStreamId);
+    sp<Camera3StreamInterface> deletedStream = mOutputStreams.get(mFakeStreamId);
     if (deletedStream == nullptr) {
-        SET_ERR_L("Dummy stream %d does not appear to exist", mDummyStreamId);
+        SET_ERR_L("Fake stream %d does not appear to exist", mFakeStreamId);
         return INVALID_OPERATION;
     }
-    mOutputStreams.remove(mDummyStreamId);
+    mOutputStreams.remove(mFakeStreamId);
 
     // Free up the stream endpoint so that it can be used by some other stream
     res = deletedStream->disconnect();
     if (res != OK) {
-        SET_ERR_L("Can't disconnect deleted dummy stream %d", mDummyStreamId);
+        SET_ERR_L("Can't disconnect deleted fake stream %d", mFakeStreamId);
         // fall through since we want to still list the stream as deleted.
     }
     mDeletedStreams.add(deletedStream);
-    mDummyStreamId = NO_STREAM;
+    mFakeStreamId = NO_STREAM;
 
     return res;
 }
@@ -2760,6 +2945,7 @@
     if (listener != NULL) {
         listener->notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
                 CaptureResultExtras());
+        mSessionStatsBuilder.onDeviceError();
     }
 
     // Save stack trace. View by dumping it later.
@@ -2774,16 +2960,17 @@
 status_t Camera3Device::registerInFlight(uint32_t frameNumber,
         int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
         bool hasAppCallback, nsecs_t maxExpectedDuration,
-        std::set<String8>& physicalCameraIds, bool isStillCapture,
-        bool isZslCapture, bool rotateAndCropAuto, const std::set<std::string>& cameraIdsWithZoom,
-        const SurfaceMap& outputSurfaces) {
+        const std::set<std::set<String8>>& physicalCameraIds,
+        bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
+        const std::set<std::string>& cameraIdsWithZoom,
+        const SurfaceMap& outputSurfaces, nsecs_t requestTimeNs) {
     ATRACE_CALL();
     std::lock_guard<std::mutex> l(mInFlightLock);
 
     ssize_t res;
     res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
             hasAppCallback, maxExpectedDuration, physicalCameraIds, isStillCapture, isZslCapture,
-            rotateAndCropAuto, cameraIdsWithZoom, outputSurfaces));
+            rotateAndCropAuto, cameraIdsWithZoom, requestTimeNs, outputSurfaces));
     if (res < 0) return res;
 
     if (mInFlightMap.size() == 1) {
@@ -2814,7 +3001,7 @@
 }
 
 void Camera3Device::checkInflightMapLengthLocked() {
-    // Sanity check - if we have too many in-flight frames with long total inflight duration,
+    // Validation check - if we have too many in-flight frames with long total inflight duration,
     // something has likely gone wrong. This might still be legit only if application send in
     // a long burst of long exposure requests.
     if (mExpectedInflightDuration > kMinWarnInflightDuration) {
@@ -2853,7 +3040,7 @@
 
     FlushInflightReqStates states {
         mId, mInFlightLock, mInFlightMap, mUseHalBufManager,
-        listener, *this, *mInterface, *this};
+        listener, *this, *mInterface, *this, mSessionStatsBuilder};
 
     camera3::flushInflightRequests(states);
 }
@@ -2893,6 +3080,10 @@
         mSupportOfflineProcessing(supportOfflineProcessing) {
     // Check with hardware service manager if we can downcast these interfaces
     // Somewhat expensive, so cache the results at startup
+    auto castResult_3_7 = device::V3_7::ICameraDeviceSession::castFrom(mHidlSession);
+    if (castResult_3_7.isOk()) {
+        mHidlSession_3_7 = castResult_3_7;
+    }
     auto castResult_3_6 = device::V3_6::ICameraDeviceSession::castFrom(mHidlSession);
     if (castResult_3_6.isOk()) {
         mHidlSession_3_6 = castResult_3_6;
@@ -2926,6 +3117,7 @@
 }
 
 void Camera3Device::HalInterface::clear() {
+    mHidlSession_3_7.clear();
     mHidlSession_3_6.clear();
     mHidlSession_3_5.clear();
     mHidlSession_3_4.clear();
@@ -2934,7 +3126,7 @@
 }
 
 status_t Camera3Device::HalInterface::constructDefaultRequestSettings(
-        camera3_request_template_t templateId,
+        camera_request_template_t templateId,
         /*out*/ camera_metadata_t **requestTemplate) {
     ATRACE_NAME("CameraHal::constructDefaultRequestSettings");
     if (!valid()) return INVALID_OPERATION;
@@ -2966,22 +3158,22 @@
     hardware::Return<void> err;
     RequestTemplate id;
     switch (templateId) {
-        case CAMERA3_TEMPLATE_PREVIEW:
+        case CAMERA_TEMPLATE_PREVIEW:
             id = RequestTemplate::PREVIEW;
             break;
-        case CAMERA3_TEMPLATE_STILL_CAPTURE:
+        case CAMERA_TEMPLATE_STILL_CAPTURE:
             id = RequestTemplate::STILL_CAPTURE;
             break;
-        case CAMERA3_TEMPLATE_VIDEO_RECORD:
+        case CAMERA_TEMPLATE_VIDEO_RECORD:
             id = RequestTemplate::VIDEO_RECORD;
             break;
-        case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
+        case CAMERA_TEMPLATE_VIDEO_SNAPSHOT:
             id = RequestTemplate::VIDEO_SNAPSHOT;
             break;
-        case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
+        case CAMERA_TEMPLATE_ZERO_SHUTTER_LAG:
             id = RequestTemplate::ZERO_SHUTTER_LAG;
             break;
-        case CAMERA3_TEMPLATE_MANUAL:
+        case CAMERA_TEMPLATE_MANUAL:
             id = RequestTemplate::MANUAL;
             break;
         default:
@@ -3047,31 +3239,39 @@
 }
 
 status_t Camera3Device::HalInterface::configureStreams(const camera_metadata_t *sessionParams,
-        camera3_stream_configuration *config, const std::vector<uint32_t>& bufferSizes) {
+        camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes) {
     ATRACE_NAME("CameraHal::configureStreams");
     if (!valid()) return INVALID_OPERATION;
     status_t res = OK;
 
+    if (config->input_is_multi_resolution && mHidlSession_3_7 == nullptr) {
+        ALOGE("%s: Camera device doesn't support multi-resolution input stream", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
     // Convert stream config to HIDL
     std::set<int> activeStreams;
     device::V3_2::StreamConfiguration requestedConfiguration3_2;
     device::V3_4::StreamConfiguration requestedConfiguration3_4;
+    device::V3_7::StreamConfiguration requestedConfiguration3_7;
     requestedConfiguration3_2.streams.resize(config->num_streams);
     requestedConfiguration3_4.streams.resize(config->num_streams);
+    requestedConfiguration3_7.streams.resize(config->num_streams);
     for (size_t i = 0; i < config->num_streams; i++) {
         device::V3_2::Stream &dst3_2 = requestedConfiguration3_2.streams[i];
         device::V3_4::Stream &dst3_4 = requestedConfiguration3_4.streams[i];
-        camera3_stream_t *src = config->streams[i];
+        device::V3_7::Stream &dst3_7 = requestedConfiguration3_7.streams[i];
+        camera3::camera_stream_t *src = config->streams[i];
 
         Camera3Stream* cam3stream = Camera3Stream::cast(src);
         cam3stream->setBufferFreedListener(this);
         int streamId = cam3stream->getId();
         StreamType streamType;
         switch (src->stream_type) {
-            case CAMERA3_STREAM_OUTPUT:
+            case CAMERA_STREAM_OUTPUT:
                 streamType = StreamType::OUTPUT;
                 break;
-            case CAMERA3_STREAM_INPUT:
+            case CAMERA_STREAM_INPUT:
                 streamType = StreamType::INPUT;
                 break;
             default:
@@ -3084,9 +3284,9 @@
         dst3_2.width = src->width;
         dst3_2.height = src->height;
         dst3_2.usage = mapToConsumerUsage(cam3stream->getUsage());
-        dst3_2.rotation = mapToStreamRotation((camera3_stream_rotation_t) src->rotation);
+        dst3_2.rotation = mapToStreamRotation((camera_stream_rotation_t) src->rotation);
         // For HidlSession version 3.5 or newer, the format and dataSpace sent
-        // to HAL are original, not the overriden ones.
+        // to HAL are original, not the overridden ones.
         if (mHidlSession_3_5 != nullptr) {
             dst3_2.format = mapToPixelFormat(cam3stream->isFormatOverridden() ?
                     cam3stream->getOriginalFormat() : src->format);
@@ -3101,7 +3301,14 @@
         if (src->physical_camera_id != nullptr) {
             dst3_4.physicalCameraId = src->physical_camera_id;
         }
-
+        dst3_7.v3_4 = dst3_4;
+        dst3_7.groupId = cam3stream->getHalStreamGroupId();
+        dst3_7.sensorPixelModesUsed.resize(src->sensor_pixel_modes_used.size());
+        size_t j = 0;
+        for (int mode : src->sensor_pixel_modes_used) {
+            dst3_7.sensorPixelModesUsed[j++] =
+                    static_cast<CameraMetadataEnumAndroidSensorPixelMode>(mode);
+        }
         activeStreams.insert(streamId);
         // Create Buffer ID map if necessary
         mBufferRecords.tryCreateBufferCache(streamId);
@@ -3111,16 +3318,22 @@
 
     StreamConfigurationMode operationMode;
     res = mapToStreamConfigurationMode(
-            (camera3_stream_configuration_mode_t) config->operation_mode,
+            (camera_stream_configuration_mode_t) config->operation_mode,
             /*out*/ &operationMode);
     if (res != OK) {
         return res;
     }
     requestedConfiguration3_2.operationMode = operationMode;
     requestedConfiguration3_4.operationMode = operationMode;
+    requestedConfiguration3_7.operationMode = operationMode;
+    size_t sessionParamSize = get_camera_metadata_size(sessionParams);
     requestedConfiguration3_4.sessionParams.setToExternal(
             reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
-            get_camera_metadata_size(sessionParams));
+            sessionParamSize);
+    requestedConfiguration3_7.operationMode = operationMode;
+    requestedConfiguration3_7.sessionParams.setToExternal(
+            reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
+            sessionParamSize);
 
     // Invoke configureStreams
     device::V3_3::HalStreamConfiguration finalConfiguration;
@@ -3167,7 +3380,17 @@
             };
 
     // See which version of HAL we have
-    if (mHidlSession_3_6 != nullptr) {
+    if (mHidlSession_3_7 != nullptr) {
+        ALOGV("%s: v3.7 device found", __FUNCTION__);
+        requestedConfiguration3_7.streamConfigCounter = mNextStreamConfigCounter++;
+        requestedConfiguration3_7.multiResolutionInputImage = config->input_is_multi_resolution;
+        auto err = mHidlSession_3_7->configureStreams_3_7(
+                requestedConfiguration3_7, configStream36Cb);
+        res = postprocConfigStream36(err);
+        if (res != OK) {
+            return res;
+        }
+    } else if (mHidlSession_3_6 != nullptr) {
         ALOGV("%s: v3.6 device found", __FUNCTION__);
         device::V3_5::StreamConfiguration requestedConfiguration3_5;
         requestedConfiguration3_5.v3_4 = requestedConfiguration3_4;
@@ -3240,7 +3463,7 @@
     // And convert output stream configuration from HIDL
 
     for (size_t i = 0; i < config->num_streams; i++) {
-        camera3_stream_t *dst = config->streams[i];
+        camera3::camera_stream_t *dst = config->streams[i];
         int streamId = Camera3Stream::cast(dst)->getId();
 
         // Start scan at i, with the assumption that the stream order matches
@@ -3293,7 +3516,7 @@
             dst->data_space = overrideDataSpace;
         }
 
-        if (dst->stream_type == CAMERA3_STREAM_INPUT) {
+        if (dst->stream_type == CAMERA_STREAM_INPUT) {
             if (src.v3_2.producerUsage != 0) {
                 ALOGE("%s: Stream %d: INPUT streams must have 0 for producer usage",
                         __FUNCTION__, streamId);
@@ -3317,7 +3540,147 @@
     return res;
 }
 
-status_t Camera3Device::HalInterface::wrapAsHidlRequest(camera3_capture_request_t* request,
+status_t Camera3Device::HalInterface::configureInjectedStreams(
+        const camera_metadata_t* sessionParams, camera_stream_configuration* config,
+        const std::vector<uint32_t>& bufferSizes,
+        const CameraMetadata& cameraCharacteristics) {
+    ATRACE_NAME("InjectionCameraHal::configureStreams");
+    if (!valid()) return INVALID_OPERATION;
+    status_t res = OK;
+
+    if (config->input_is_multi_resolution) {
+        ALOGE("%s: Injection camera device doesn't support multi-resolution input "
+                "stream", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    // Convert stream config to HIDL
+    std::set<int> activeStreams;
+    device::V3_2::StreamConfiguration requestedConfiguration3_2;
+    device::V3_4::StreamConfiguration requestedConfiguration3_4;
+    device::V3_7::StreamConfiguration requestedConfiguration3_7;
+    requestedConfiguration3_2.streams.resize(config->num_streams);
+    requestedConfiguration3_4.streams.resize(config->num_streams);
+    requestedConfiguration3_7.streams.resize(config->num_streams);
+    for (size_t i = 0; i < config->num_streams; i++) {
+        device::V3_2::Stream& dst3_2 = requestedConfiguration3_2.streams[i];
+        device::V3_4::Stream& dst3_4 = requestedConfiguration3_4.streams[i];
+        device::V3_7::Stream& dst3_7 = requestedConfiguration3_7.streams[i];
+        camera3::camera_stream_t* src = config->streams[i];
+
+        Camera3Stream* cam3stream = Camera3Stream::cast(src);
+        cam3stream->setBufferFreedListener(this);
+        int streamId = cam3stream->getId();
+        StreamType streamType;
+        switch (src->stream_type) {
+            case CAMERA_STREAM_OUTPUT:
+                streamType = StreamType::OUTPUT;
+                break;
+            case CAMERA_STREAM_INPUT:
+                streamType = StreamType::INPUT;
+                break;
+            default:
+                ALOGE("%s: Stream %d: Unsupported stream type %d", __FUNCTION__,
+                        streamId, config->streams[i]->stream_type);
+            return BAD_VALUE;
+        }
+        dst3_2.id = streamId;
+        dst3_2.streamType = streamType;
+        dst3_2.width = src->width;
+        dst3_2.height = src->height;
+        dst3_2.usage = mapToConsumerUsage(cam3stream->getUsage());
+        dst3_2.rotation =
+                mapToStreamRotation((camera_stream_rotation_t)src->rotation);
+        // For HidlSession version 3.5 or newer, the format and dataSpace sent
+        // to HAL are original, not the overridden ones.
+        if (mHidlSession_3_5 != nullptr) {
+            dst3_2.format = mapToPixelFormat(cam3stream->isFormatOverridden()
+                                            ? cam3stream->getOriginalFormat()
+                                            : src->format);
+            dst3_2.dataSpace =
+                    mapToHidlDataspace(cam3stream->isDataSpaceOverridden()
+                                    ? cam3stream->getOriginalDataSpace()
+                                    : src->data_space);
+        } else {
+            dst3_2.format = mapToPixelFormat(src->format);
+            dst3_2.dataSpace = mapToHidlDataspace(src->data_space);
+        }
+        dst3_4.v3_2 = dst3_2;
+        dst3_4.bufferSize = bufferSizes[i];
+        if (src->physical_camera_id != nullptr) {
+            dst3_4.physicalCameraId = src->physical_camera_id;
+        }
+        dst3_7.v3_4 = dst3_4;
+        dst3_7.groupId = cam3stream->getHalStreamGroupId();
+        dst3_7.sensorPixelModesUsed.resize(src->sensor_pixel_modes_used.size());
+        size_t j = 0;
+        for (int mode : src->sensor_pixel_modes_used) {
+            dst3_7.sensorPixelModesUsed[j++] =
+                    static_cast<CameraMetadataEnumAndroidSensorPixelMode>(mode);
+        }
+        activeStreams.insert(streamId);
+        // Create Buffer ID map if necessary
+        mBufferRecords.tryCreateBufferCache(streamId);
+    }
+    // remove BufferIdMap for deleted streams
+    mBufferRecords.removeInactiveBufferCaches(activeStreams);
+
+    StreamConfigurationMode operationMode;
+    res = mapToStreamConfigurationMode(
+            (camera_stream_configuration_mode_t)config->operation_mode,
+            /*out*/ &operationMode);
+    if (res != OK) {
+        return res;
+    }
+    requestedConfiguration3_7.operationMode = operationMode;
+    size_t sessionParamSize = get_camera_metadata_size(sessionParams);
+    requestedConfiguration3_7.operationMode = operationMode;
+    requestedConfiguration3_7.sessionParams.setToExternal(
+            reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
+            sessionParamSize);
+
+    // See which version of HAL we have
+    if (mHidlSession_3_7 != nullptr) {
+        requestedConfiguration3_7.streamConfigCounter = mNextStreamConfigCounter++;
+        requestedConfiguration3_7.multiResolutionInputImage =
+                config->input_is_multi_resolution;
+
+        const camera_metadata_t* rawMetadata = cameraCharacteristics.getAndLock();
+        ::android::hardware::camera::device::V3_2::CameraMetadata hidlChars = {};
+        hidlChars.setToExternal(
+                reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(rawMetadata)),
+                get_camera_metadata_size(rawMetadata));
+        cameraCharacteristics.unlock(rawMetadata);
+
+        sp<hardware::camera::device::V3_7::ICameraInjectionSession>
+                hidlInjectionSession_3_7;
+        auto castInjectionResult_3_7 =
+                device::V3_7::ICameraInjectionSession::castFrom(mHidlSession_3_7);
+        if (castInjectionResult_3_7.isOk()) {
+            hidlInjectionSession_3_7 = castInjectionResult_3_7;
+        } else {
+            ALOGE("%s: Transaction error: %s", __FUNCTION__,
+                    castInjectionResult_3_7.description().c_str());
+            return DEAD_OBJECT;
+        }
+
+        auto err = hidlInjectionSession_3_7->configureInjectionStreams(
+                requestedConfiguration3_7, hidlChars);
+        if (!err.isOk()) {
+            ALOGE("%s: Transaction error: %s", __FUNCTION__,
+                    err.description().c_str());
+            return DEAD_OBJECT;
+        }
+    } else {
+        ALOGE("%s: mHidlSession_3_7 does not exist, the lowest version of injection "
+                "session is 3.7", __FUNCTION__);
+        return DEAD_OBJECT;
+    }
+
+    return res;
+}
+
+status_t Camera3Device::HalInterface::wrapAsHidlRequest(camera_capture_request_t* request,
         /*out*/device::V3_2::CaptureRequest* captureRequest,
         /*out*/std::vector<native_handle_t*>* handlesCreated,
         /*out*/std::vector<std::pair<int32_t, int32_t>>* inflightBuffers) {
@@ -3362,7 +3725,7 @@
 
         captureRequest->outputBuffers.resize(request->num_output_buffers);
         for (size_t i = 0; i < request->num_output_buffers; i++) {
-            const camera3_stream_buffer_t *src = request->output_buffers + i;
+            const camera_stream_buffer_t *src = request->output_buffers + i;
             StreamBuffer &dst = captureRequest->outputBuffers[i];
             int32_t streamId = Camera3Stream::cast(src->stream)->getId();
             if (src->buffer != nullptr) {
@@ -3420,11 +3783,16 @@
 }
 
 status_t Camera3Device::HalInterface::processBatchCaptureRequests(
-        std::vector<camera3_capture_request_t*>& requests,/*out*/uint32_t* numRequestProcessed) {
+        std::vector<camera_capture_request_t*>& requests,/*out*/uint32_t* numRequestProcessed) {
     ATRACE_NAME("CameraHal::processBatchCaptureRequests");
     if (!valid()) return INVALID_OPERATION;
 
     sp<device::V3_4::ICameraDeviceSession> hidlSession_3_4;
+    sp<device::V3_7::ICameraDeviceSession> hidlSession_3_7;
+    auto castResult_3_7 = device::V3_7::ICameraDeviceSession::castFrom(mHidlSession);
+    if (castResult_3_7.isOk()) {
+        hidlSession_3_7 = castResult_3_7;
+    }
     auto castResult_3_4 = device::V3_4::ICameraDeviceSession::castFrom(mHidlSession);
     if (castResult_3_4.isOk()) {
         hidlSession_3_4 = castResult_3_4;
@@ -3432,8 +3800,11 @@
 
     hardware::hidl_vec<device::V3_2::CaptureRequest> captureRequests;
     hardware::hidl_vec<device::V3_4::CaptureRequest> captureRequests_3_4;
+    hardware::hidl_vec<device::V3_7::CaptureRequest> captureRequests_3_7;
     size_t batchSize = requests.size();
-    if (hidlSession_3_4 != nullptr) {
+    if (hidlSession_3_7 != nullptr) {
+        captureRequests_3_7.resize(batchSize);
+    } else if (hidlSession_3_4 != nullptr) {
         captureRequests_3_4.resize(batchSize);
     } else {
         captureRequests.resize(batchSize);
@@ -3443,7 +3814,10 @@
 
     status_t res = OK;
     for (size_t i = 0; i < batchSize; i++) {
-        if (hidlSession_3_4 != nullptr) {
+        if (hidlSession_3_7 != nullptr) {
+            res = wrapAsHidlRequest(requests[i], /*out*/&captureRequests_3_7[i].v3_4.v3_2,
+                    /*out*/&handlesCreated, /*out*/&inflightBuffers);
+        } else if (hidlSession_3_4 != nullptr) {
             res = wrapAsHidlRequest(requests[i], /*out*/&captureRequests_3_4[i].v3_2,
                     /*out*/&handlesCreated, /*out*/&inflightBuffers);
         } else {
@@ -3474,9 +3848,11 @@
 
     // Write metadata to FMQ.
     for (size_t i = 0; i < batchSize; i++) {
-        camera3_capture_request_t* request = requests[i];
+        camera_capture_request_t* request = requests[i];
         device::V3_2::CaptureRequest* captureRequest;
-        if (hidlSession_3_4 != nullptr) {
+        if (hidlSession_3_7 != nullptr) {
+            captureRequest = &captureRequests_3_7[i].v3_4.v3_2;
+        } else if (hidlSession_3_4 != nullptr) {
             captureRequest = &captureRequests_3_4[i].v3_2;
         } else {
             captureRequest = &captureRequests[i];
@@ -3503,33 +3879,42 @@
             captureRequest->fmqSettingsSize = 0u;
         }
 
-        if (hidlSession_3_4 != nullptr) {
-            captureRequests_3_4[i].physicalCameraSettings.resize(request->num_physcam_settings);
+        // hidl session 3.7 specific handling.
+        if (hidlSession_3_7 != nullptr) {
+            captureRequests_3_7[i].inputWidth = request->input_width;
+            captureRequests_3_7[i].inputHeight = request->input_height;
+        }
+
+        // hidl session 3.7 and 3.4 specific handling.
+        if (hidlSession_3_7 != nullptr || hidlSession_3_4 != nullptr) {
+            hardware::hidl_vec<device::V3_4::PhysicalCameraSetting>& physicalCameraSettings =
+                    (hidlSession_3_7 != nullptr) ?
+                    captureRequests_3_7[i].v3_4.physicalCameraSettings :
+                    captureRequests_3_4[i].physicalCameraSettings;
+            physicalCameraSettings.resize(request->num_physcam_settings);
             for (size_t j = 0; j < request->num_physcam_settings; j++) {
                 if (request->physcam_settings != nullptr) {
                     size_t settingsSize = get_camera_metadata_size(request->physcam_settings[j]);
                     if (mRequestMetadataQueue != nullptr && mRequestMetadataQueue->write(
                                 reinterpret_cast<const uint8_t*>(request->physcam_settings[j]),
                                 settingsSize)) {
-                        captureRequests_3_4[i].physicalCameraSettings[j].settings.resize(0);
-                        captureRequests_3_4[i].physicalCameraSettings[j].fmqSettingsSize =
-                            settingsSize;
+                        physicalCameraSettings[j].settings.resize(0);
+                        physicalCameraSettings[j].fmqSettingsSize = settingsSize;
                     } else {
                         if (mRequestMetadataQueue != nullptr) {
                             ALOGW("%s: couldn't utilize fmq, fallback to hwbinder", __FUNCTION__);
                         }
-                        captureRequests_3_4[i].physicalCameraSettings[j].settings.setToExternal(
+                        physicalCameraSettings[j].settings.setToExternal(
                                 reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(
                                         request->physcam_settings[j])),
                                 get_camera_metadata_size(request->physcam_settings[j]));
-                        captureRequests_3_4[i].physicalCameraSettings[j].fmqSettingsSize = 0u;
+                        physicalCameraSettings[j].fmqSettingsSize = 0u;
                     }
                 } else {
-                    captureRequests_3_4[i].physicalCameraSettings[j].fmqSettingsSize = 0u;
-                    captureRequests_3_4[i].physicalCameraSettings[j].settings.resize(0);
+                    physicalCameraSettings[j].fmqSettingsSize = 0u;
+                    physicalCameraSettings[j].settings.resize(0);
                 }
-                captureRequests_3_4[i].physicalCameraSettings[j].physicalCameraId =
-                    request->physcam_id[j];
+                physicalCameraSettings[j].physicalCameraId = request->physcam_id[j];
             }
         }
     }
@@ -3540,7 +3925,10 @@
                 status = s;
                 *numRequestProcessed = n;
         };
-    if (hidlSession_3_4 != nullptr) {
+    if (hidlSession_3_7 != nullptr) {
+        err = hidlSession_3_7->processCaptureRequest_3_7(captureRequests_3_7, cachesToRemove,
+                                                         resultCallback);
+    } else if (hidlSession_3_4 != nullptr) {
         err = hidlSession_3_4->processCaptureRequest_3_4(captureRequests_3_4, cachesToRemove,
                                                          resultCallback);
     } else {
@@ -3762,6 +4150,7 @@
         mInterface(interface),
         mListener(nullptr),
         mId(getId(parent)),
+        mFirstRepeating(false),
         mReconfigured(false),
         mDoPause(false),
         mPaused(true),
@@ -3771,6 +4160,8 @@
         mCurrentAfTriggerId(0),
         mCurrentPreCaptureTriggerId(0),
         mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
+        mCameraMute(ANDROID_SENSOR_TEST_PATTERN_MODE_OFF),
+        mCameraMuteChanged(false),
         mRepeatingLastFrameNumber(
             hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES),
         mPrepareVideoStream(false),
@@ -3779,7 +4170,7 @@
         mSessionParamKeys(sessionParamKeys),
         mLatestSessionParams(sessionParamKeys.size()),
         mUseHalBufManager(useHalBufManager) {
-    mStatusId = statusTracker->addComponent();
+    mStatusId = statusTracker->addComponent("RequestThread");
 }
 
 Camera3Device::RequestThread::~RequestThread() {}
@@ -3792,11 +4183,13 @@
 }
 
 void Camera3Device::RequestThread::configurationComplete(bool isConstrainedHighSpeed,
-        const CameraMetadata& sessionParams) {
+        const CameraMetadata& sessionParams,
+        const std::map<int32_t, std::set<String8>>& groupIdPhysicalCameraMap) {
     ATRACE_CALL();
     Mutex::Autolock l(mRequestLock);
     mReconfigured = true;
     mLatestSessionParams = sessionParams;
+    mGroupIdPhysicalCameraMap = groupIdPhysicalCameraMap;
     // Prepare video stream for high speed recording.
     mPrepareVideoStream = isConstrainedHighSpeed;
     mConstrainedMode = isConstrainedHighSpeed;
@@ -3892,6 +4285,7 @@
         *lastFrameNumber = mRepeatingLastFrameNumber;
     }
     mRepeatingRequests.clear();
+    mFirstRepeating = true;
     mRepeatingRequests.insert(mRepeatingRequests.begin(),
             requests.begin(), requests.end());
 
@@ -3944,14 +4338,15 @@
                  it != mRequestQueue.end(); ++it) {
             // Abort the input buffers for reprocess requests.
             if ((*it)->mInputStream != NULL) {
-                camera3_stream_buffer_t inputBuffer;
+                camera_stream_buffer_t inputBuffer;
+                camera3::Size inputBufferSize;
                 status_t res = (*it)->mInputStream->getInputBuffer(&inputBuffer,
-                        /*respectHalLimit*/ false);
+                        &inputBufferSize, /*respectHalLimit*/ false);
                 if (res != OK) {
                     ALOGW("%s: %d: couldn't get input buffer while clearing the request "
                             "list: %s (%d)", __FUNCTION__, __LINE__, strerror(-res), res);
                 } else {
-                    inputBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+                    inputBuffer.status = CAMERA_BUFFER_STATUS_ERROR;
                     res = (*it)->mInputStream->returnInputBuffer(inputBuffer);
                     if (res != OK) {
                         ALOGE("%s: %d: couldn't return input buffer while clearing the request "
@@ -4055,7 +4450,7 @@
     ATRACE_CALL();
     status_t res;
     size_t batchSize = mNextRequests.size();
-    std::vector<camera3_capture_request_t*> requests(batchSize);
+    std::vector<camera_capture_request_t*> requests(batchSize);
     uint32_t numRequestProcessed = 0;
     for (size_t i = 0; i < batchSize; i++) {
         requests[i] = &mNextRequests.editItemAt(i).halRequest;
@@ -4152,33 +4547,34 @@
 
 void Camera3Device::RequestThread::updateNextRequest(NextRequest& nextRequest) {
     // Update the latest request sent to HAL
-    if (nextRequest.halRequest.settings != NULL) { // Don't update if they were unchanged
+    camera_capture_request_t& halRequest = nextRequest.halRequest;
+    if (halRequest.settings != NULL) { // Don't update if they were unchanged
         Mutex::Autolock al(mLatestRequestMutex);
 
-        camera_metadata_t* cloned = clone_camera_metadata(nextRequest.halRequest.settings);
+        camera_metadata_t* cloned = clone_camera_metadata(halRequest.settings);
         mLatestRequest.acquire(cloned);
 
         mLatestPhysicalRequest.clear();
-        for (uint32_t i = 0; i < nextRequest.halRequest.num_physcam_settings; i++) {
-            cloned = clone_camera_metadata(nextRequest.halRequest.physcam_settings[i]);
-            mLatestPhysicalRequest.emplace(nextRequest.halRequest.physcam_id[i],
+        for (uint32_t i = 0; i < halRequest.num_physcam_settings; i++) {
+            cloned = clone_camera_metadata(halRequest.physcam_settings[i]);
+            mLatestPhysicalRequest.emplace(halRequest.physcam_id[i],
                     CameraMetadata(cloned));
         }
 
         sp<Camera3Device> parent = mParent.promote();
         if (parent != NULL) {
             parent->monitorMetadata(TagMonitor::REQUEST,
-                    nextRequest.halRequest.frame_number,
+                    halRequest.frame_number,
                     0, mLatestRequest, mLatestPhysicalRequest);
         }
     }
 
-    if (nextRequest.halRequest.settings != NULL) {
+    if (halRequest.settings != NULL) {
         nextRequest.captureRequest->mSettingsList.begin()->metadata.unlock(
-                nextRequest.halRequest.settings);
+                halRequest.settings);
     }
 
-    cleanupPhysicalSettings(nextRequest.captureRequest, &nextRequest.halRequest);
+    cleanupPhysicalSettings(nextRequest.captureRequest, &halRequest);
 }
 
 bool Camera3Device::RequestThread::updateSessionParameters(const CameraMetadata& settings) {
@@ -4374,8 +4770,8 @@
     for (size_t i = 0; i < mNextRequests.size(); i++) {
         auto& nextRequest = mNextRequests.editItemAt(i);
         sp<CaptureRequest> captureRequest = nextRequest.captureRequest;
-        camera3_capture_request_t* halRequest = &nextRequest.halRequest;
-        Vector<camera3_stream_buffer_t>* outputBuffers = &nextRequest.outputBuffers;
+        camera_capture_request_t* halRequest = &nextRequest.halRequest;
+        Vector<camera_stream_buffer_t>* outputBuffers = &nextRequest.outputBuffers;
 
         // Prepare a request to HAL
         halRequest->frame_number = captureRequest->mResultExtras.frameNumber;
@@ -4394,10 +4790,13 @@
         mPrevTriggers = triggerCount;
 
         bool rotateAndCropChanged = overrideAutoRotateAndCrop(captureRequest);
+        bool testPatternChanged = overrideTestPattern(captureRequest);
 
-        // If the request is the same as last, or we had triggers last time
+        // If the request is the same as last, or we had triggers now or last time or
+        // changing overrides this time
         bool newRequest =
-                (mPrevRequest != captureRequest || triggersMixedIn || rotateAndCropChanged) &&
+                (mPrevRequest != captureRequest || triggersMixedIn ||
+                        rotateAndCropChanged || testPatternChanged) &&
                 // Request settings are all the same within one batch, so only treat the first
                 // request in a batch as new
                 !(batchedRequest && i > 0);
@@ -4405,11 +4804,11 @@
             std::set<std::string> cameraIdsWithZoom;
             /**
              * HAL workaround:
-             * Insert a dummy trigger ID if a trigger is set but no trigger ID is
+             * Insert a fake trigger ID if a trigger is set but no trigger ID is
              */
-            res = addDummyTriggerIds(captureRequest);
+            res = addFakeTriggerIds(captureRequest);
             if (res != OK) {
-                SET_ERR("RequestThread: Unable to insert dummy trigger IDs "
+                SET_ERR("RequestThread: Unable to insert fake trigger IDs "
                         "(capture request %d, HAL device: %s (%d)",
                         halRequest->frame_number, strerror(-res), res);
                 return INVALID_OPERATION;
@@ -4426,13 +4825,17 @@
                                 parent->mDistortionMappers.end()) {
                             continue;
                         }
-                        res = parent->mDistortionMappers[it->cameraId].correctCaptureRequest(
-                            &(it->metadata));
-                        if (res != OK) {
-                            SET_ERR("RequestThread: Unable to correct capture requests "
-                                    "for lens distortion for request %d: %s (%d)",
-                                    halRequest->frame_number, strerror(-res), res);
-                            return INVALID_OPERATION;
+
+                        if (!captureRequest->mDistortionCorrectionUpdated) {
+                            res = parent->mDistortionMappers[it->cameraId].correctCaptureRequest(
+                                    &(it->metadata));
+                            if (res != OK) {
+                                SET_ERR("RequestThread: Unable to correct capture requests "
+                                        "for lens distortion for request %d: %s (%d)",
+                                        halRequest->frame_number, strerror(-res), res);
+                                return INVALID_OPERATION;
+                            }
+                            captureRequest->mDistortionCorrectionUpdated = true;
                         }
                     }
 
@@ -4443,21 +4846,24 @@
                             continue;
                         }
 
-                        camera_metadata_entry_t e = it->metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
-                        if (e.count > 0 && e.data.f[0] != 1.0f) {
+                        if (!captureRequest->mZoomRatioIs1x) {
                             cameraIdsWithZoom.insert(it->cameraId);
                         }
 
-                        res = parent->mZoomRatioMappers[it->cameraId].updateCaptureRequest(
-                            &(it->metadata));
-                        if (res != OK) {
-                            SET_ERR("RequestThread: Unable to correct capture requests "
-                                    "for zoom ratio for request %d: %s (%d)",
-                                    halRequest->frame_number, strerror(-res), res);
-                            return INVALID_OPERATION;
+                        if (!captureRequest->mZoomRatioUpdated) {
+                            res = parent->mZoomRatioMappers[it->cameraId].updateCaptureRequest(
+                                    &(it->metadata));
+                            if (res != OK) {
+                                SET_ERR("RequestThread: Unable to correct capture requests "
+                                        "for zoom ratio for request %d: %s (%d)",
+                                        halRequest->frame_number, strerror(-res), res);
+                                return INVALID_OPERATION;
+                            }
+                            captureRequest->mZoomRatioUpdated = true;
                         }
                     }
-                    if (captureRequest->mRotateAndCropAuto) {
+                    if (captureRequest->mRotateAndCropAuto &&
+                            !captureRequest->mRotationAndCropUpdated) {
                         for (it = captureRequest->mSettingsList.begin();
                                 it != captureRequest->mSettingsList.end(); it++) {
                             auto mapper = parent->mRotateAndCropMappers.find(it->cameraId);
@@ -4471,6 +4877,7 @@
                                 }
                             }
                         }
+                        captureRequest->mRotationAndCropUpdated = true;
                     }
                 }
             }
@@ -4530,15 +4937,18 @@
         // Fill in buffers
         if (captureRequest->mInputStream != NULL) {
             halRequest->input_buffer = &captureRequest->mInputBuffer;
+
+            halRequest->input_width = captureRequest->mInputBufferSize.width;
+            halRequest->input_height = captureRequest->mInputBufferSize.height;
             totalNumBuffers += 1;
         } else {
             halRequest->input_buffer = NULL;
         }
 
-        outputBuffers->insertAt(camera3_stream_buffer_t(), 0,
+        outputBuffers->insertAt(camera_stream_buffer_t(), 0,
                 captureRequest->mOutputStreams.size());
         halRequest->output_buffers = outputBuffers->array();
-        std::set<String8> requestedPhysicalCameras;
+        std::set<std::set<String8>> requestedPhysicalCameras;
 
         sp<Camera3Device> parent = mParent.promote();
         if (parent == NULL) {
@@ -4591,12 +5001,16 @@
                     return TIMED_OUT;
                 }
                 // HAL will request buffer through requestStreamBuffer API
-                camera3_stream_buffer_t& buffer = outputBuffers->editItemAt(j);
+                camera_stream_buffer_t& buffer = outputBuffers->editItemAt(j);
                 buffer.stream = outputStream->asHalStream();
                 buffer.buffer = nullptr;
-                buffer.status = CAMERA3_BUFFER_STATUS_OK;
+                buffer.status = CAMERA_BUFFER_STATUS_OK;
                 buffer.acquire_fence = -1;
                 buffer.release_fence = -1;
+                // Mark the output stream as unpreparable to block clients from calling
+                // 'prepare' after this request reaches CameraHal and before the respective
+                // buffers are requested.
+                outputStream->markUnpreparable();
             } else {
                 res = outputStream->getBuffer(&outputBuffers->editItemAt(j),
                         waitDuration,
@@ -4629,14 +5043,11 @@
             }
 
             String8 physicalCameraId = outputStream->getPhysicalCameraId();
-
-            if (!physicalCameraId.isEmpty()) {
-                // Physical stream isn't supported for input request.
-                if (halRequest->input_buffer) {
-                    CLOGE("Physical stream is not supported for input request");
-                    return INVALID_OPERATION;
-                }
-                requestedPhysicalCameras.insert(physicalCameraId);
+            int32_t streamGroupId = outputStream->getHalStreamGroupId();
+            if (streamGroupId != -1 && mGroupIdPhysicalCameraMap.count(streamGroupId) == 1) {
+                requestedPhysicalCameras.insert(mGroupIdPhysicalCameraMap[streamGroupId]);
+            } else if (!physicalCameraId.isEmpty()) {
+                requestedPhysicalCameras.insert(std::set<String8>({physicalCameraId}));
             }
             halRequest->num_output_buffers++;
         }
@@ -4652,15 +5063,22 @@
         }
         bool isStillCapture = false;
         bool isZslCapture = false;
+        const camera_metadata_t* settings = halRequest->settings;
+        bool shouldUnlockSettings = false;
+        if (settings == nullptr) {
+            shouldUnlockSettings = true;
+            settings = captureRequest->mSettingsList.begin()->metadata.getAndLock();
+        }
         if (!mNextRequests[0].captureRequest->mSettingsList.begin()->metadata.isEmpty()) {
             camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
-            find_camera_metadata_ro_entry(halRequest->settings, ANDROID_CONTROL_CAPTURE_INTENT, &e);
+            find_camera_metadata_ro_entry(settings, ANDROID_CONTROL_CAPTURE_INTENT, &e);
             if ((e.count > 0) && (e.data.u8[0] == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE)) {
                 isStillCapture = true;
                 ATRACE_ASYNC_BEGIN("still capture", mNextRequests[i].halRequest.frame_number);
             }
 
-            find_camera_metadata_ro_entry(halRequest->settings, ANDROID_CONTROL_ENABLE_ZSL, &e);
+            e = camera_metadata_ro_entry_t();
+            find_camera_metadata_ro_entry(settings, ANDROID_CONTROL_ENABLE_ZSL, &e);
             if ((e.count > 0) && (e.data.u8[0] == ANDROID_CONTROL_ENABLE_ZSL_TRUE)) {
                 isZslCapture = true;
             }
@@ -4669,16 +5087,21 @@
                 totalNumBuffers, captureRequest->mResultExtras,
                 /*hasInput*/halRequest->input_buffer != NULL,
                 hasCallback,
-                calculateMaxExpectedDuration(halRequest->settings),
+                calculateMaxExpectedDuration(settings),
                 requestedPhysicalCameras, isStillCapture, isZslCapture,
                 captureRequest->mRotateAndCropAuto, mPrevCameraIdsWithZoom,
                 (mUseHalBufManager) ? uniqueSurfaceIdMap :
-                                      SurfaceMap{});
+                                      SurfaceMap{}, captureRequest->mRequestTimeNs);
         ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
                ", burstId = %" PRId32 ".",
                 __FUNCTION__,
                 captureRequest->mResultExtras.requestId, captureRequest->mResultExtras.frameNumber,
                 captureRequest->mResultExtras.burstId);
+
+        if (shouldUnlockSettings) {
+            captureRequest->mSettingsList.begin()->metadata.unlock(settings);
+        }
+
         if (res != OK) {
             SET_ERR("RequestThread: Unable to register new in-flight request:"
                     " %s (%d)", strerror(-res), res);
@@ -4785,6 +5208,12 @@
     mStreamIdsToBeDrained = streamIds;
 }
 
+void Camera3Device::RequestThread::resetPipelineDrain() {
+    Mutex::Autolock pl(mPauseLock);
+    mNotifyPipelineDrain = false;
+    mStreamIdsToBeDrained.clear();
+}
+
 void Camera3Device::RequestThread::clearPreviousRequest() {
     Mutex::Autolock l(mRequestLock);
     mPrevRequest.clear();
@@ -4833,6 +5262,16 @@
     return OK;
 }
 
+status_t Camera3Device::RequestThread::setCameraMute(int32_t muteMode) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mTriggerMutex);
+    if (muteMode != mCameraMute) {
+        mCameraMute = muteMode;
+        mCameraMuteChanged = true;
+    }
+    return OK;
+}
+
 nsecs_t Camera3Device::getExpectedInFlightDuration() {
     ATRACE_CALL();
     std::lock_guard<std::mutex> l(mInFlightLock);
@@ -4841,7 +5280,7 @@
 }
 
 void Camera3Device::RequestThread::cleanupPhysicalSettings(sp<CaptureRequest> request,
-        camera3_capture_request_t *halRequest) {
+        camera_capture_request_t *halRequest) {
     if ((request == nullptr) || (halRequest == nullptr)) {
         ALOGE("%s: Invalid request!", __FUNCTION__);
         return;
@@ -4876,8 +5315,8 @@
         }
 
         sp<CaptureRequest> captureRequest = nextRequest.captureRequest;
-        camera3_capture_request_t* halRequest = &nextRequest.halRequest;
-        Vector<camera3_stream_buffer_t>* outputBuffers = &nextRequest.outputBuffers;
+        camera_capture_request_t* halRequest = &nextRequest.halRequest;
+        Vector<camera_stream_buffer_t>* outputBuffers = &nextRequest.outputBuffers;
 
         if (halRequest->settings != NULL) {
             captureRequest->mSettingsList.begin()->metadata.unlock(halRequest->settings);
@@ -4886,7 +5325,7 @@
         cleanupPhysicalSettings(captureRequest, halRequest);
 
         if (captureRequest->mInputStream != NULL) {
-            captureRequest->mInputBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+            captureRequest->mInputBuffer.status = CAMERA_BUFFER_STATUS_ERROR;
             captureRequest->mInputStream->returnInputBuffer(captureRequest->mInputBuffer);
         }
 
@@ -4900,7 +5339,7 @@
                     close(acquireFence);
                     outputBuffers->editItemAt(i).acquire_fence = -1;
                 }
-                outputBuffers->editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR;
+                outputBuffers->editItemAt(i).status = CAMERA_BUFFER_STATUS_ERROR;
                 captureRequest->mOutputStreams.editItemAt(i)->returnBuffer((*outputBuffers)[i], 0,
                         /*timestampIncreasing*/true, std::vector<size_t> (),
                         captureRequest->mResultExtras.frameNumber);
@@ -4950,7 +5389,7 @@
         return;
     }
 
-    nextRequest.halRequest = camera3_capture_request_t();
+    nextRequest.halRequest = camera_capture_request_t();
     nextRequest.submitted = false;
     mNextRequests.add(nextRequest);
 
@@ -4964,7 +5403,7 @@
             break;
         }
 
-        additionalRequest.halRequest = camera3_capture_request_t();
+        additionalRequest.halRequest = camera_capture_request_t();
         additionalRequest.submitted = false;
         mNextRequests.add(additionalRequest);
     }
@@ -4989,6 +5428,17 @@
             // list. Guarantees a complete in-sequence set of captures to
             // application.
             const RequestList &requests = mRepeatingRequests;
+            if (mFirstRepeating) {
+                mFirstRepeating = false;
+            } else {
+                for (auto& request : requests) {
+                    // For repeating requests, override timestamp request using
+                    // the time a request is inserted into the request queue,
+                    // because the original repeating request will have an old
+                    // fixed timestamp.
+                    request->mRequestTimeNs = systemTime();
+                }
+            }
             RequestList::const_iterator firstRequest =
                     requests.begin();
             nextRequest = *firstRequest;
@@ -5073,7 +5523,8 @@
         // Since RequestThread::clear() removes buffers from the input stream,
         // get the right buffer here before unlocking mRequestLock
         if (nextRequest->mInputStream != NULL) {
-            res = nextRequest->mInputStream->getInputBuffer(&nextRequest->mInputBuffer);
+            res = nextRequest->mInputStream->getInputBuffer(&nextRequest->mInputBuffer,
+                    &nextRequest->mInputBufferSize);
             if (res != OK) {
                 // Can't get input buffer from gralloc queue - this could be due to
                 // disconnected queue or other producer misbehavior, so not a fatal
@@ -5313,26 +5764,26 @@
     return OK;
 }
 
-status_t Camera3Device::RequestThread::addDummyTriggerIds(
+status_t Camera3Device::RequestThread::addFakeTriggerIds(
         const sp<CaptureRequest> &request) {
     // Trigger ID 0 had special meaning in the HAL2 spec, so avoid it here
-    static const int32_t dummyTriggerId = 1;
+    static const int32_t fakeTriggerId = 1;
     status_t res;
 
     CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
 
-    // If AF trigger is active, insert a dummy AF trigger ID if none already
+    // If AF trigger is active, insert a fake AF trigger ID if none already
     // exists
     camera_metadata_entry afTrigger = metadata.find(ANDROID_CONTROL_AF_TRIGGER);
     camera_metadata_entry afId = metadata.find(ANDROID_CONTROL_AF_TRIGGER_ID);
     if (afTrigger.count > 0 &&
             afTrigger.data.u8[0] != ANDROID_CONTROL_AF_TRIGGER_IDLE &&
             afId.count == 0) {
-        res = metadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &dummyTriggerId, 1);
+        res = metadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &fakeTriggerId, 1);
         if (res != OK) return res;
     }
 
-    // If AE precapture trigger is active, insert a dummy precapture trigger ID
+    // If AE precapture trigger is active, insert a fake precapture trigger ID
     // if none already exists
     camera_metadata_entry pcTrigger =
             metadata.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
@@ -5341,7 +5792,7 @@
             pcTrigger.data.u8[0] != ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE &&
             pcId.count == 0) {
         res = metadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
-                &dummyTriggerId, 1);
+                &fakeTriggerId, 1);
         if (res != OK) return res;
     }
 
@@ -5374,6 +5825,73 @@
     return false;
 }
 
+bool Camera3Device::RequestThread::overrideTestPattern(
+        const sp<CaptureRequest> &request) {
+    ATRACE_CALL();
+
+    Mutex::Autolock l(mTriggerMutex);
+
+    bool changed = false;
+
+    int32_t testPatternMode = request->mOriginalTestPatternMode;
+    int32_t testPatternData[4] = {
+        request->mOriginalTestPatternData[0],
+        request->mOriginalTestPatternData[1],
+        request->mOriginalTestPatternData[2],
+        request->mOriginalTestPatternData[3]
+    };
+
+    if (mCameraMute != ANDROID_SENSOR_TEST_PATTERN_MODE_OFF) {
+        testPatternMode = mCameraMute;
+        testPatternData[0] = 0;
+        testPatternData[1] = 0;
+        testPatternData[2] = 0;
+        testPatternData[3] = 0;
+    }
+
+    CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
+
+    auto testPatternEntry = metadata.find(ANDROID_SENSOR_TEST_PATTERN_MODE);
+    if (testPatternEntry.count > 0) {
+        if (testPatternEntry.data.i32[0] != testPatternMode) {
+            testPatternEntry.data.i32[0] = testPatternMode;
+            changed = true;
+        }
+    } else {
+        metadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE,
+                &testPatternMode, 1);
+        changed = true;
+    }
+
+    auto testPatternColor = metadata.find(ANDROID_SENSOR_TEST_PATTERN_DATA);
+    if (testPatternColor.count > 0) {
+        for (size_t i = 0; i < 4; i++) {
+            if (testPatternColor.data.i32[i] != (int32_t)testPatternData[i]) {
+                testPatternColor.data.i32[i] = testPatternData[i];
+                changed = true;
+            }
+        }
+    } else {
+        metadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA,
+                (int32_t*)testPatternData, 4);
+        changed = true;
+    }
+
+    return changed;
+}
+
+status_t Camera3Device::RequestThread::setHalInterface(
+        sp<HalInterface> newHalInterface) {
+    if (newHalInterface.get() == nullptr) {
+        ALOGE("%s: The newHalInterface does not exist!", __FUNCTION__);
+        return DEAD_OBJECT;
+    }
+
+    mInterface = newHalInterface;
+
+    return OK;
+}
+
 /**
  * PreparerThread inner class methods
  */
@@ -5607,7 +6125,7 @@
 
     std::lock_guard<std::mutex> lock(mLock);
     mStatusTracker = statusTracker;
-    mRequestBufferStatusId = statusTracker->addComponent();
+    mRequestBufferStatusId = statusTracker->addComponent("BufferRequestSM");
     return OK;
 }
 
@@ -5999,4 +6517,80 @@
     return mRequestThread->setRotateAndCropAutoBehavior(rotateAndCropValue);
 }
 
+bool Camera3Device::supportsCameraMute() {
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+
+    return mSupportCameraMute;
+}
+
+status_t Camera3Device::setCameraMute(bool enabled) {
+    ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+
+    if (mRequestThread == nullptr || !mSupportCameraMute) {
+        return INVALID_OPERATION;
+    }
+    int32_t muteMode =
+            !enabled                      ? ANDROID_SENSOR_TEST_PATTERN_MODE_OFF :
+            mSupportTestPatternSolidColor ? ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR :
+                                            ANDROID_SENSOR_TEST_PATTERN_MODE_BLACK;
+    return mRequestThread->setCameraMute(muteMode);
+}
+
+status_t Camera3Device::injectCamera(const String8& injectedCamId,
+                                     sp<CameraProviderManager> manager) {
+    ALOGI("%s Injection camera: injectedCamId = %s", __FUNCTION__, injectedCamId.string());
+    ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
+
+    status_t res = NO_ERROR;
+    if (mInjectionMethods->isInjecting()) {
+        if (injectedCamId == mInjectionMethods->getInjectedCamId()) {
+            return OK;
+        } else {
+            res = mInjectionMethods->stopInjection();
+            if (res != OK) {
+                ALOGE("%s: Failed to stop the injection camera! ret != NO_ERROR: %d",
+                        __FUNCTION__, res);
+                return res;
+            }
+        }
+    }
+
+    res = mInjectionMethods->injectionInitialize(injectedCamId, manager, this);
+    if (res != OK) {
+        ALOGE("%s: Failed to initialize the injection camera! ret != NO_ERROR: %d",
+                __FUNCTION__, res);
+        return res;
+    }
+
+    camera3::camera_stream_configuration injectionConfig;
+    std::vector<uint32_t> injectionBufferSizes;
+    mInjectionMethods->getInjectionConfig(&injectionConfig, &injectionBufferSizes);
+    // When the second display of android is cast to the remote device, and the opened camera is
+    // also cast to the second display, in this case, because the camera has configured the streams
+    // at this time, we can directly call injectCamera() to replace the internal camera with
+    // injection camera.
+    if (mOperatingMode >= 0 && injectionConfig.num_streams > 0
+                && injectionBufferSizes.size() > 0) {
+        ALOGV("%s: The opened camera is directly cast to the remote device.", __FUNCTION__);
+        res = mInjectionMethods->injectCamera(
+                injectionConfig, injectionBufferSizes);
+        if (res != OK) {
+            ALOGE("Can't finish inject camera process!");
+            return res;
+        }
+    }
+
+    return OK;
+}
+
+status_t Camera3Device::stopInjection() {
+    ALOGI("%s: Injection camera: stopInjection", __FUNCTION__);
+    Mutex::Autolock il(mInterfaceLock);
+    return mInjectionMethods->stopInjection();
+}
+
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 408f1f9..53a696f 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -35,6 +35,7 @@
 #include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
 #include <android/hardware/camera/device/3.5/ICameraDeviceSession.h>
 #include <android/hardware/camera/device/3.6/ICameraDeviceSession.h>
+#include <android/hardware/camera/device/3.7/ICameraDeviceSession.h>
 #include <android/hardware/camera/device/3.2/ICameraDeviceCallback.h>
 #include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
 #include <android/hardware/camera/device/3.5/ICameraDeviceCallback.h>
@@ -52,10 +53,18 @@
 #include "device3/InFlightRequest.h"
 #include "device3/Camera3OutputInterface.h"
 #include "device3/Camera3OfflineSession.h"
+#include "device3/Camera3StreamInterface.h"
 #include "utils/TagMonitor.h"
 #include "utils/LatencyHistogram.h"
 #include <camera_metadata_hidden.h>
 
+using android::camera3::camera_capture_request_t;
+using android::camera3::camera_jpeg_blob_t;
+using android::camera3::camera_request_template;
+using android::camera3::camera_stream_buffer_t;
+using android::camera3::camera_stream_configuration_t;
+using android::camera3::camera_stream_configuration_mode_t;
+using android::camera3::CAMERA_TEMPLATE_COUNT;
 using android::camera3::OutputStreamInfo;
 
 namespace android {
@@ -64,7 +73,6 @@
 
 class Camera3Stream;
 class Camera3ZslStream;
-class Camera3OutputStreamInterface;
 class Camera3StreamInterface;
 
 } // namespace camera3
@@ -81,7 +89,7 @@
             public camera3::FlushBufferInterface {
   public:
 
-    explicit Camera3Device(const String8& id);
+    explicit Camera3Device(const String8& id, bool overrideForPerfClass);
 
     virtual ~Camera3Device();
 
@@ -122,21 +130,26 @@
     // and finish the stream configuration before starting output streaming.
     status_t createStream(sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
-            android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
+            android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
-            bool isShared = false, uint64_t consumerUsage = 0) override;
+            bool isShared = false, bool isMultiResolution = false,
+            uint64_t consumerUsage = 0) override;
+
     status_t createStream(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
-            android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
+            android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
-            bool isShared = false, uint64_t consumerUsage = 0) override;
+            bool isShared = false, bool isMultiResolution = false,
+            uint64_t consumerUsage = 0) override;
 
     status_t createInputStream(
-            uint32_t width, uint32_t height, int format,
+            uint32_t width, uint32_t height, int format, bool isMultiResolution,
             int *id) override;
 
     status_t getStreamInfo(int id, StreamInfo *streamInfo) override;
@@ -146,14 +159,14 @@
 
     status_t configureStreams(const CameraMetadata& sessionParams,
             int operatingMode =
-            static_cast<int>(hardware::camera::device::V3_2::StreamConfigurationMode::NORMAL_MODE))
-            override;
+            camera_stream_configuration_mode_t::CAMERA_STREAM_CONFIGURATION_NORMAL_MODE) override;
     status_t getInputBufferProducer(
             sp<IGraphicBufferProducer> *producer) override;
 
     void getOfflineStreamIds(std::vector<int> *offlineStreamIds) override;
 
-    status_t createDefaultRequest(int templateId, CameraMetadata *request) override;
+    status_t createDefaultRequest(camera_request_template_t templateId,
+            CameraMetadata *request) override;
 
     // Transitions to the idle state on success
     status_t waitUntilDrained() override;
@@ -180,7 +193,7 @@
 
     ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const override;
     ssize_t getPointCloudBufferSize() const;
-    ssize_t getRawOpaqueBufferSize(int32_t width, int32_t height) const;
+    ssize_t getRawOpaqueBufferSize(int32_t width, int32_t height, bool maxResolution) const;
 
     // Methods called by subclasses
     void             notifyStatus(bool idle); // updates from StatusTracker
@@ -231,10 +244,37 @@
     status_t setRotateAndCropAutoBehavior(
             camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue);
 
+    /**
+     * Whether camera muting (producing black-only output) is supported.
+     *
+     * Calling setCameraMute(true) when this returns false will return an
+     * INVALID_OPERATION error.
+     */
+    bool supportsCameraMute();
+
+    /**
+     * Mute the camera.
+     *
+     * When muted, black image data is output on all output streams.
+     */
+    status_t setCameraMute(bool enabled);
+
     // Get the status trackeer for the camera device
     wp<camera3::StatusTracker> getStatusTracker() { return mStatusTracker; }
 
     /**
+     * The injection camera session to replace the internal camera
+     * session.
+     */
+    status_t injectCamera(const String8& injectedCamId,
+                          sp<CameraProviderManager> manager);
+
+    /**
+     * Stop the injection camera and restore to internal camera session.
+     */
+    status_t stopInjection();
+
+    /**
      * Helper functions to map between framework and HIDL values
      */
     static hardware::graphics::common::V1_0::PixelFormat mapToPixelFormat(int frameworkFormat);
@@ -242,9 +282,9 @@
             android_dataspace dataSpace);
     static hardware::camera::device::V3_2::BufferUsageFlags mapToConsumerUsage(uint64_t usage);
     static hardware::camera::device::V3_2::StreamRotation mapToStreamRotation(
-            camera3_stream_rotation_t rotation);
+            camera_stream_rotation_t rotation);
     // Returns a negative error code if the passed-in operation mode is not valid.
-    static status_t mapToStreamConfigurationMode(camera3_stream_configuration_mode_t operationMode,
+    static status_t mapToStreamConfigurationMode(camera_stream_configuration_mode_t operationMode,
             /*out*/ hardware::camera::device::V3_2::StreamConfigurationMode *mode);
     static int mapToFrameworkFormat(hardware::graphics::common::V1_0::PixelFormat pixelFormat);
     static android_dataspace mapToFrameworkDataspace(
@@ -255,7 +295,6 @@
             hardware::camera::device::V3_2::BufferUsageFlags usage);
 
   private:
-
     status_t disconnectImpl();
 
     // internal typedefs
@@ -275,7 +314,7 @@
 
     struct                     RequestTrigger;
     // minimal jpeg buffer size: 256KB + blob header
-    static const ssize_t       kMinJpegBufferSize = 256 * 1024 + sizeof(camera3_jpeg_blob);
+    static const ssize_t       kMinJpegBufferSize = 256 * 1024 + sizeof(camera_jpeg_blob_t);
     // Constant to use for stream ID when one doesn't exist
     static const int           NO_STREAM = -1;
 
@@ -330,18 +369,25 @@
         // Calls into the HAL interface
 
         // Caller takes ownership of requestTemplate
-        status_t constructDefaultRequestSettings(camera3_request_template_t templateId,
+        status_t constructDefaultRequestSettings(camera_request_template templateId,
                 /*out*/ camera_metadata_t **requestTemplate);
         status_t configureStreams(const camera_metadata_t *sessionParams,
-                /*inout*/ camera3_stream_configuration *config,
+                /*inout*/ camera_stream_configuration_t *config,
                 const std::vector<uint32_t>& bufferSizes);
 
+        // The injection camera configures the streams to hal.
+        status_t configureInjectedStreams(
+                const camera_metadata_t* sessionParams,
+                /*inout*/ camera_stream_configuration_t* config,
+                const std::vector<uint32_t>& bufferSizes,
+                const CameraMetadata& cameraCharacteristics);
+
         // When the call succeeds, the ownership of acquire fences in requests is transferred to
         // HalInterface. More specifically, the current implementation will send the fence to
         // HAL process and close the FD in cameraserver process. When the call fails, the ownership
         // of the acquire fence still belongs to the caller.
         status_t processBatchCaptureRequests(
-                std::vector<camera3_capture_request_t*>& requests,
+                std::vector<camera_capture_request_t*>& requests,
                 /*out*/uint32_t* numRequestProcessed);
         status_t flush();
         status_t dump(int fd);
@@ -397,12 +443,14 @@
         sp<hardware::camera::device::V3_5::ICameraDeviceSession> mHidlSession_3_5;
         // Valid if ICameraDeviceSession is @3.6 or newer
         sp<hardware::camera::device::V3_6::ICameraDeviceSession> mHidlSession_3_6;
+        // Valid if ICameraDeviceSession is @3.7 or newer
+        sp<hardware::camera::device::V3_7::ICameraDeviceSession> mHidlSession_3_7;
 
         std::shared_ptr<RequestMetadataQueue> mRequestMetadataQueue;
 
-        // The output HIDL request still depends on input camera3_capture_request_t
-        // Do not free input camera3_capture_request_t before output HIDL request
-        status_t wrapAsHidlRequest(camera3_capture_request_t* in,
+        // The output HIDL request still depends on input camera_capture_request_t
+        // Do not free input camera_capture_request_t before output HIDL request
+        status_t wrapAsHidlRequest(camera_capture_request_t* in,
                 /*out*/hardware::camera::device::V3_2::CaptureRequest* out,
                 /*out*/std::vector<native_handle_t*>* handlesCreated,
                 /*out*/std::vector<std::pair<int32_t, int32_t>>* inflightBuffers);
@@ -442,15 +490,13 @@
     bool                       mSupportNativeZoomRatio;
     std::unordered_map<std::string, CameraMetadata> mPhysicalDeviceInfoMap;
 
-    CameraMetadata             mRequestTemplateCache[CAMERA3_TEMPLATE_COUNT];
+    CameraMetadata             mRequestTemplateCache[CAMERA_TEMPLATE_COUNT];
 
     struct Size {
         uint32_t width;
         uint32_t height;
         explicit Size(uint32_t w = 0, uint32_t h = 0) : width(w), height(h){}
     };
-    // Map from format to size.
-    Vector<Size>               mSupportedOpaqueInputSizes;
 
     enum Status {
         STATUS_ERROR,
@@ -471,10 +517,15 @@
 
     camera3::StreamSet         mOutputStreams;
     sp<camera3::Camera3Stream> mInputStream;
+    bool                       mIsInputStreamMultiResolution;
+    SessionStatsBuilder        mSessionStatsBuilder;
+    // Map from stream group ID to physical cameras backing the stream group
+    std::map<int32_t, std::set<String8>> mGroupIdPhysicalCameraMap;
+
     int                        mNextStreamId;
     bool                       mNeedConfig;
 
-    int                        mDummyStreamId;
+    int                        mFakeStreamId;
 
     // Whether to send state updates upstream
     // Pause when doing transparent reconfiguration
@@ -501,7 +552,8 @@
       public:
         PhysicalCameraSettingsList          mSettingsList;
         sp<camera3::Camera3Stream>          mInputStream;
-        camera3_stream_buffer_t             mInputBuffer;
+        camera_stream_buffer_t              mInputBuffer;
+        camera3::Size                       mInputBufferSize;
         Vector<sp<camera3::Camera3OutputStreamInterface> >
                                             mOutputStreams;
         SurfaceMap                          mOutputSurfaces;
@@ -517,6 +569,26 @@
         // overriding of ROTATE_AND_CROP value and adjustment of coordinates
         // in several other controls in both the request and the result
         bool                                mRotateAndCropAuto;
+        // Original value of TEST_PATTERN_MODE and DATA so that they can be
+        // restored when sensor muting is turned off
+        int32_t                             mOriginalTestPatternMode;
+        int32_t                             mOriginalTestPatternData[4];
+
+        // Whether this capture request has its zoom ratio set to 1.0x before
+        // the framework overrides it for camera HAL consumption.
+        bool                                mZoomRatioIs1x;
+        // The systemTime timestamp when the request is created.
+        nsecs_t                             mRequestTimeNs;
+
+
+        // Whether this capture request's distortion correction update has
+        // been done.
+        bool                                mDistortionCorrectionUpdated = false;
+        // Whether this capture request's rotation and crop update has been
+        // done.
+        bool                                mRotationAndCropUpdated = false;
+        // Whether this capture request's zoom ratio update has been done.
+        bool                                mZoomRatioUpdated = false;
     };
     typedef List<sp<CaptureRequest> > RequestList;
 
@@ -525,7 +597,7 @@
     status_t convertMetadataListToRequestListLocked(
             const List<const PhysicalCameraSettingsList> &metadataList,
             const std::list<const SurfaceMap> &surfaceMaps,
-            bool repeating,
+            bool repeating, nsecs_t requestTimeNs,
             /*out*/
             RequestList *requestList);
 
@@ -668,15 +740,15 @@
     void               cancelStreamsConfigurationLocked();
 
     /**
-     * Add a dummy stream to the current stream set as a workaround for
+     * Add a fake stream to the current stream set as a workaround for
      * not allowing 0 streams in the camera HAL spec.
      */
-    status_t           addDummyStreamLocked();
+    status_t           addFakeStreamLocked();
 
     /**
-     * Remove a dummy stream if the current config includes real streams.
+     * Remove a fake stream if the current config includes real streams.
      */
-    status_t           tryRemoveDummyStreamLocked();
+    status_t           tryRemoveFakeStreamLocked();
 
     /**
      * Set device into an error state due to some fatal failure, and set an
@@ -704,12 +776,6 @@
     bool               tryLockSpinRightRound(Mutex& lock);
 
     /**
-     * Helper function to get the largest Jpeg resolution (in area)
-     * Return Size(0, 0) if static metatdata is invalid
-     */
-    Size getMaxJpegResolution() const;
-
-    /**
      * Helper function to get the offset between MONOTONIC and BOOTTIME
      * timestamp.
      */
@@ -752,7 +818,8 @@
          * Call after stream (re)-configuration is completed.
          */
         void     configurationComplete(bool isConstrainedHighSpeed,
-                const CameraMetadata& sessionParams);
+                const CameraMetadata& sessionParams,
+                const std::map<int32_t, std::set<String8>>& groupIdPhysicalCameraMap);
 
         /**
          * Set or clear the list of repeating requests. Does not block
@@ -832,6 +899,7 @@
         }
 
         void signalPipelineDrain(const std::vector<int>& streamIds);
+        void resetPipelineDrain();
 
         status_t switchToOffline(
                 const std::vector<int32_t>& streamsToKeep,
@@ -844,6 +912,10 @@
         status_t setRotateAndCropAutoBehavior(
                 camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue);
 
+        status_t setCameraMute(int32_t muteMode);
+
+        status_t setHalInterface(sp<HalInterface> newHalInterface);
+
       protected:
 
         virtual bool threadLoop();
@@ -860,11 +932,15 @@
 
         // HAL workaround: Make sure a trigger ID always exists if
         // a trigger does
-        status_t           addDummyTriggerIds(const sp<CaptureRequest> &request);
+        status_t           addFakeTriggerIds(const sp<CaptureRequest> &request);
 
         // Override rotate_and_crop control if needed; returns true if the current value was changed
         bool               overrideAutoRotateAndCrop(const sp<CaptureRequest> &request);
 
+        // Override test_pattern control if needed for camera mute; returns true
+        // if the current value was changed
+        bool               overrideTestPattern(const sp<CaptureRequest> &request);
+
         static const nsecs_t kRequestTimeout = 50e6; // 50 ms
 
         // TODO: does this need to be adjusted for long exposure requests?
@@ -873,8 +949,8 @@
         // Used to prepare a batch of requests.
         struct NextRequest {
             sp<CaptureRequest>              captureRequest;
-            camera3_capture_request_t       halRequest;
-            Vector<camera3_stream_buffer_t> outputBuffers;
+            camera_capture_request_t       halRequest;
+            Vector<camera_stream_buffer_t> outputBuffers;
             bool                            submitted;
         };
 
@@ -900,7 +976,7 @@
 
         // Release physical camera settings and camera id resources.
         void cleanupPhysicalSettings(sp<CaptureRequest> request,
-                /*out*/camera3_capture_request_t *halRequest);
+                /*out*/camera_capture_request_t *halRequest);
 
         // Pause handling
         bool               waitIfPaused();
@@ -947,6 +1023,7 @@
         Condition          mRequestSubmittedSignal;
         RequestList        mRequestQueue;
         RequestList        mRepeatingRequests;
+        bool               mFirstRepeating;
         // The next batch of requests being prepped for submission to the HAL, no longer
         // on the request queue. Read-only even with mRequestLock held, outside
         // of threadLoop
@@ -986,6 +1063,8 @@
         uint32_t           mCurrentAfTriggerId;
         uint32_t           mCurrentPreCaptureTriggerId;
         camera_metadata_enum_android_scaler_rotate_and_crop_t mRotateAndCropOverride;
+        int32_t            mCameraMute; // 0 = no mute, otherwise the TEST_PATTERN_MODE to use
+        bool               mCameraMuteChanged;
 
         int64_t            mRepeatingLastFrameNumber;
 
@@ -1000,6 +1079,8 @@
         Vector<int32_t>    mSessionParamKeys;
         CameraMetadata     mLatestSessionParams;
 
+        std::map<int32_t, std::set<String8>> mGroupIdPhysicalCameraMap;
+
         const bool         mUseHalBufManager;
     };
     sp<RequestThread> mRequestThread;
@@ -1019,9 +1100,11 @@
 
     status_t registerInFlight(uint32_t frameNumber,
             int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
-            bool callback, nsecs_t maxExpectedDuration, std::set<String8>& physicalCameraIds,
+            bool callback, nsecs_t maxExpectedDuration,
+            const std::set<std::set<String8>>& physicalCameraIds,
             bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
-            const std::set<std::string>& cameraIdsWithZoom, const SurfaceMap& outputSurfaces);
+            const std::set<std::string>& cameraIdsWithZoom, const SurfaceMap& outputSurfaces,
+            nsecs_t requestTimeNs);
 
     /**
      * Tracking for idle detection
@@ -1250,6 +1333,85 @@
 
     // Whether HAL supports offline processing capability.
     bool mSupportOfflineProcessing = false;
+
+    // Whether the HAL supports camera muting via test pattern
+    bool mSupportCameraMute = false;
+    // Whether the HAL supports SOLID_COLOR or BLACK if mSupportCameraMute is true
+    bool mSupportTestPatternSolidColor = false;
+
+    // Whether the camera framework overrides the device characteristics for
+    // performance class.
+    bool mOverrideForPerfClass;
+
+    // Injection camera related methods.
+    class Camera3DeviceInjectionMethods : public virtual RefBase {
+      public:
+        Camera3DeviceInjectionMethods(wp<Camera3Device> parent);
+
+        ~Camera3DeviceInjectionMethods();
+
+        // Initialize the injection camera and generate an hal interface.
+        status_t injectionInitialize(
+                const String8& injectedCamId, sp<CameraProviderManager> manager,
+                const sp<
+                    android::hardware::camera::device::V3_2 ::ICameraDeviceCallback>&
+                    callback);
+
+        // Injection camera will replace the internal camera and configure streams
+        // when device is IDLE and request thread is paused.
+        status_t injectCamera(
+                camera3::camera_stream_configuration& injectionConfig,
+                std::vector<uint32_t>& injectionBufferSizes);
+
+        // Stop the injection camera and switch back to backup hal interface.
+        status_t stopInjection();
+
+        bool isInjecting();
+
+        const String8& getInjectedCamId() const;
+
+        void getInjectionConfig(/*out*/ camera3::camera_stream_configuration* injectionConfig,
+                /*out*/ std::vector<uint32_t>* injectionBufferSizes);
+
+      private:
+        // Configure the streams of injection camera, it need wait until the
+        // output streams are created and configured to the original camera before
+        // proceeding.
+        status_t injectionConfigureStreams(
+                camera3::camera_stream_configuration& injectionConfig,
+                std::vector<uint32_t>& injectionBufferSizes);
+
+        // Disconnect the injection camera and delete the hal interface.
+        void injectionDisconnectImpl();
+
+        // Use injection camera hal interface to replace and backup original
+        // camera hal interface.
+        status_t replaceHalInterface(sp<HalInterface> newHalInterface,
+                bool keepBackup);
+
+        wp<Camera3Device> mParent;
+
+        // Backup of the original camera hal interface.
+        sp<HalInterface> mBackupHalInterface;
+
+        // Generated injection camera hal interface.
+        sp<HalInterface> mInjectedCamHalInterface;
+
+        // Copy the configuration of the internal camera.
+        camera3::camera_stream_configuration mInjectionConfig;
+
+        // Copy the bufferSizes of the output streams of the internal camera.
+        std::vector<uint32_t> mInjectionBufferSizes;
+
+        // Synchronizes access to injection camera between initialize and
+        // disconnect.
+        Mutex mInjectionLock;
+
+        // The injection camera ID.
+        String8 mInjectedCamId;
+    };
+    sp<Camera3DeviceInjectionMethods> mInjectionMethods;
+
 }; // class Camera3Device
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp b/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
new file mode 100644
index 0000000..f145dac
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
@@ -0,0 +1,393 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3DeviceInjectionMethods"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+
+#include "common/CameraProviderManager.h"
+#include "device3/Camera3Device.h"
+
+namespace android {
+
+using hardware::camera::device::V3_2::ICameraDeviceSession;
+
+Camera3Device::Camera3DeviceInjectionMethods::Camera3DeviceInjectionMethods(
+        wp<Camera3Device> parent)
+        : mParent(parent) {
+    ALOGV("%s: Created injection camera methods", __FUNCTION__);
+}
+
+Camera3Device::Camera3DeviceInjectionMethods::~Camera3DeviceInjectionMethods() {
+    ALOGV("%s: Removed injection camera methods", __FUNCTION__);
+    injectionDisconnectImpl();
+}
+
+status_t Camera3Device::Camera3DeviceInjectionMethods::injectionInitialize(
+        const String8& injectedCamId, sp<CameraProviderManager> manager,
+        const sp<android::hardware::camera::device::V3_2::ICameraDeviceCallback>&
+                callback) {
+    ATRACE_CALL();
+    Mutex::Autolock lock(mInjectionLock);
+
+    if (manager == nullptr) {
+        ALOGE("%s: manager does not exist!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    sp<Camera3Device> parent = mParent.promote();
+    if (parent == nullptr) {
+        ALOGE("%s: parent does not exist!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    mInjectedCamId = injectedCamId;
+    sp<ICameraDeviceSession> session;
+    ATRACE_BEGIN("Injection CameraHal::openSession");
+    status_t res = manager->openSession(injectedCamId.string(), callback,
+                                          /*out*/ &session);
+    ATRACE_END();
+    if (res != OK) {
+        ALOGE("Injection camera could not open camera session: %s (%d)",
+                strerror(-res), res);
+        return res;
+    }
+
+    std::shared_ptr<RequestMetadataQueue> queue;
+    auto requestQueueRet =
+        session->getCaptureRequestMetadataQueue([&queue](const auto& descriptor) {
+            queue = std::make_shared<RequestMetadataQueue>(descriptor);
+            if (!queue->isValid() || queue->availableToWrite() <= 0) {
+                ALOGE("Injection camera HAL returns empty request metadata fmq, not "
+                        "use it");
+                queue = nullptr;
+                // don't use the queue onwards.
+            }
+        });
+    if (!requestQueueRet.isOk()) {
+        ALOGE("Injection camera transaction error when getting request metadata fmq: "
+                "%s, not use it", requestQueueRet.description().c_str());
+        return DEAD_OBJECT;
+    }
+
+    std::unique_ptr<ResultMetadataQueue>& resQueue = parent->mResultMetadataQueue;
+    auto resultQueueRet = session->getCaptureResultMetadataQueue(
+        [&resQueue](const auto& descriptor) {
+            resQueue = std::make_unique<ResultMetadataQueue>(descriptor);
+            if (!resQueue->isValid() || resQueue->availableToWrite() <= 0) {
+                ALOGE("Injection camera HAL returns empty result metadata fmq, not use "
+                        "it");
+                resQueue = nullptr;
+                // Don't use the resQueue onwards.
+            }
+        });
+    if (!resultQueueRet.isOk()) {
+        ALOGE("Injection camera transaction error when getting result metadata queue "
+                "from camera session: %s", resultQueueRet.description().c_str());
+        return DEAD_OBJECT;
+    }
+    IF_ALOGV() {
+        session->interfaceChain(
+                [](::android::hardware::hidl_vec<::android::hardware::hidl_string>
+                        interfaceChain) {
+                        ALOGV("Injection camera session interface chain:");
+                        for (const auto& iface : interfaceChain) {
+                            ALOGV("  %s", iface.c_str());
+                        }
+                });
+    }
+
+    ALOGV("%s: Injection camera interface = new HalInterface()", __FUNCTION__);
+    mInjectedCamHalInterface =
+            new HalInterface(session, queue, parent->mUseHalBufManager,
+                       parent->mSupportOfflineProcessing);
+    if (mInjectedCamHalInterface == nullptr) {
+        ALOGE("%s: mInjectedCamHalInterface does not exist!", __FUNCTION__);
+        return DEAD_OBJECT;
+    }
+
+    return OK;
+}
+
+status_t Camera3Device::Camera3DeviceInjectionMethods::injectCamera(
+        camera3::camera_stream_configuration& injectionConfig,
+        std::vector<uint32_t>& injectionBufferSizes) {
+    status_t res = NO_ERROR;
+    mInjectionConfig = injectionConfig;
+    mInjectionBufferSizes = injectionBufferSizes;
+
+    if (mInjectedCamHalInterface == nullptr) {
+        ALOGE("%s: mInjectedCamHalInterface does not exist!", __FUNCTION__);
+        return DEAD_OBJECT;
+    }
+
+    sp<Camera3Device> parent = mParent.promote();
+    if (parent == nullptr) {
+        ALOGE("%s: parent does not exist!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    nsecs_t maxExpectedDuration = parent->getExpectedInFlightDuration();
+    bool wasActive = false;
+    if (parent->mStatus == STATUS_ACTIVE) {
+        ALOGV("%s: Let the device be IDLE and the request thread is paused",
+                __FUNCTION__);
+        parent->mPauseStateNotify = true;
+        res = parent->internalPauseAndWaitLocked(maxExpectedDuration);
+        if (res != OK) {
+            ALOGE("%s: Can't pause captures to inject camera!", __FUNCTION__);
+            return res;
+        }
+        wasActive = true;
+    }
+
+    ALOGV("%s: Injection camera: replaceHalInterface", __FUNCTION__);
+    res = replaceHalInterface(mInjectedCamHalInterface, true);
+    if (res != OK) {
+        ALOGE("%s: Failed to replace the new HalInterface!", __FUNCTION__);
+        injectionDisconnectImpl();
+        return res;
+    }
+
+    res = parent->mRequestThread->setHalInterface(mInjectedCamHalInterface);
+    if (res != OK) {
+        ALOGE("%s: Failed to set new HalInterface in RequestThread!", __FUNCTION__);
+        replaceHalInterface(mBackupHalInterface, false);
+        injectionDisconnectImpl();
+        return res;
+    }
+
+    parent->mNeedConfig = true;
+    res = injectionConfigureStreams(injectionConfig, injectionBufferSizes);
+    parent->mNeedConfig = false;
+    if (res != OK) {
+        ALOGE("Can't injectionConfigureStreams device for streams:  %d: %s "
+                "(%d)", parent->mNextStreamId, strerror(-res), res);
+        replaceHalInterface(mBackupHalInterface, false);
+        injectionDisconnectImpl();
+        return res;
+    }
+
+    if (wasActive) {
+        ALOGV("%s: Restarting activity to inject camera", __FUNCTION__);
+        // Reuse current operating mode and session parameters for new stream
+        // config.
+        parent->internalUpdateStatusLocked(STATUS_ACTIVE);
+    }
+
+    return OK;
+}
+
+status_t Camera3Device::Camera3DeviceInjectionMethods::stopInjection() {
+    status_t res = NO_ERROR;
+
+    sp<Camera3Device> parent = mParent.promote();
+    if (parent == nullptr) {
+        ALOGE("%s: parent does not exist!", __FUNCTION__);
+        return DEAD_OBJECT;
+    }
+
+    nsecs_t maxExpectedDuration = parent->getExpectedInFlightDuration();
+    bool wasActive = false;
+    if (parent->mStatus == STATUS_ACTIVE) {
+        ALOGV("%s: Let the device be IDLE and the request thread is paused",
+                __FUNCTION__);
+        parent->mPauseStateNotify = true;
+        res = parent->internalPauseAndWaitLocked(maxExpectedDuration);
+        if (res != OK) {
+            ALOGE("%s: Can't pause captures to stop injection!", __FUNCTION__);
+            return res;
+        }
+        wasActive = true;
+    }
+
+    res = replaceHalInterface(mBackupHalInterface, false);
+    if (res != OK) {
+        ALOGE("%s: Failed to restore the backup HalInterface!", __FUNCTION__);
+        injectionDisconnectImpl();
+        return res;
+    }
+    injectionDisconnectImpl();
+
+    if (wasActive) {
+        ALOGV("%s: Restarting activity to stop injection", __FUNCTION__);
+        // Reuse current operating mode and session parameters for new stream
+        // config.
+        parent->internalUpdateStatusLocked(STATUS_ACTIVE);
+    }
+
+    return OK;
+}
+
+bool Camera3Device::Camera3DeviceInjectionMethods::isInjecting() {
+    if (mInjectedCamHalInterface == nullptr) {
+        return false;
+    } else {
+        return true;
+    }
+}
+
+const String8& Camera3Device::Camera3DeviceInjectionMethods::getInjectedCamId()
+        const {
+    return mInjectedCamId;
+}
+
+void Camera3Device::Camera3DeviceInjectionMethods::getInjectionConfig(
+        /*out*/ camera3::camera_stream_configuration* injectionConfig,
+        /*out*/ std::vector<uint32_t>* injectionBufferSizes) {
+    if (injectionConfig == nullptr || injectionBufferSizes == nullptr) {
+        ALOGE("%s: Injection configuration arguments must not be null!", __FUNCTION__);
+        return;
+    }
+
+    *injectionConfig = mInjectionConfig;
+    *injectionBufferSizes = mInjectionBufferSizes;
+}
+
+
+status_t Camera3Device::Camera3DeviceInjectionMethods::injectionConfigureStreams(
+        camera3::camera_stream_configuration& injectionConfig,
+        std::vector<uint32_t>& injectionBufferSizes) {
+    ATRACE_CALL();
+    status_t res = NO_ERROR;
+
+    sp<Camera3Device> parent = mParent.promote();
+    if (parent == nullptr) {
+        ALOGE("%s: parent does not exist!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    if (parent->mOperatingMode < 0) {
+        ALOGE("Invalid operating mode: %d", parent->mOperatingMode);
+        return BAD_VALUE;
+    }
+
+    // Start configuring the streams
+    ALOGV("%s: Injection camera %s: Starting stream configuration", __FUNCTION__,
+            mInjectedCamId.string());
+
+    parent->mPreparerThread->pause();
+
+    // Do the HAL configuration; will potentially touch stream
+    // max_buffers, usage, and priv fields, as well as data_space and format
+    // fields for IMPLEMENTATION_DEFINED formats.
+
+    const camera_metadata_t* sessionBuffer = parent->mSessionParams.getAndLock();
+    res = mInjectedCamHalInterface->configureInjectedStreams(
+            sessionBuffer, &injectionConfig, injectionBufferSizes,
+            parent->mDeviceInfo);
+    parent->mSessionParams.unlock(sessionBuffer);
+
+    if (res == BAD_VALUE) {
+        // HAL rejected this set of streams as unsupported, clean up config
+        // attempt and return to unconfigured state
+        ALOGE("Set of requested outputs not supported by HAL");
+        parent->cancelStreamsConfigurationLocked();
+        return BAD_VALUE;
+    } else if (res != OK) {
+        // Some other kind of error from configure_streams - this is not
+        // expected
+        ALOGE("Unable to configure streams with HAL: %s (%d)", strerror(-res),
+                  res);
+        return res;
+    }
+
+    for (size_t i = 0; i < parent->mOutputStreams.size(); i++) {
+        sp<camera3::Camera3OutputStreamInterface> outputStream =
+                parent->mOutputStreams[i];
+        mInjectedCamHalInterface->onStreamReConfigured(outputStream->getId());
+    }
+
+    // Request thread needs to know to avoid using repeat-last-settings protocol
+    // across configure_streams() calls
+    parent->mRequestThread->configurationComplete(
+            parent->mIsConstrainedHighSpeedConfiguration, parent->mSessionParams,
+            parent->mGroupIdPhysicalCameraMap);
+
+    parent->internalUpdateStatusLocked(STATUS_CONFIGURED);
+
+    ALOGV("%s: Injection camera %s: Stream configuration complete", __FUNCTION__,
+            mInjectedCamId.string());
+
+    auto rc = parent->mPreparerThread->resume();
+
+    if (rc != OK) {
+        ALOGE("%s: Injection camera %s: Preparer thread failed to resume!",
+                 __FUNCTION__, mInjectedCamId.string());
+        return rc;
+    }
+
+    return OK;
+}
+
+void Camera3Device::Camera3DeviceInjectionMethods::injectionDisconnectImpl() {
+    ATRACE_CALL();
+    ALOGI("%s: Injection camera disconnect", __FUNCTION__);
+
+    mBackupHalInterface = nullptr;
+    HalInterface* interface = nullptr;
+    {
+        Mutex::Autolock lock(mInjectionLock);
+        if (mInjectedCamHalInterface != nullptr) {
+            interface = mInjectedCamHalInterface.get();
+            // Call close without internal mutex held, as the HAL close may need
+            // to wait on assorted callbacks,etc, to complete before it can
+            // return.
+        }
+    }
+
+    if (interface != nullptr) {
+        interface->close();
+    }
+
+    {
+        Mutex::Autolock lock(mInjectionLock);
+        if (mInjectedCamHalInterface != nullptr) {
+            mInjectedCamHalInterface->clear();
+            mInjectedCamHalInterface = nullptr;
+        }
+    }
+}
+
+status_t Camera3Device::Camera3DeviceInjectionMethods::replaceHalInterface(
+        sp<HalInterface> newHalInterface, bool keepBackup) {
+    Mutex::Autolock lock(mInjectionLock);
+    if (newHalInterface.get() == nullptr) {
+        ALOGE("%s: The newHalInterface does not exist, to stop replacing.",
+                __FUNCTION__);
+        return DEAD_OBJECT;
+    }
+
+    sp<Camera3Device> parent = mParent.promote();
+    if (parent == nullptr) {
+        ALOGE("%s: parent does not exist!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    if (keepBackup && mBackupHalInterface == nullptr) {
+        mBackupHalInterface = parent->mInterface;
+    } else if (!keepBackup) {
+        mBackupHalInterface = nullptr;
+    }
+    parent->mInterface = newHalInterface;
+
+    return OK;
+}
+
+};  // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
deleted file mode 100644
index b637160..0000000
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- * Copyright (C) 2014-2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "Camera3-DummyStream"
-#define ATRACE_TAG ATRACE_TAG_CAMERA
-//#define LOG_NDEBUG 0
-
-#include <utils/Log.h>
-#include <utils/Trace.h>
-#include "Camera3DummyStream.h"
-
-namespace android {
-
-namespace camera3 {
-
-const String8 Camera3DummyStream::DUMMY_ID;
-
-Camera3DummyStream::Camera3DummyStream(int id) :
-        Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, DUMMY_WIDTH, DUMMY_HEIGHT,
-                /*maxSize*/0, DUMMY_FORMAT, DUMMY_DATASPACE, DUMMY_ROTATION,
-                DUMMY_ID) {
-
-}
-
-Camera3DummyStream::~Camera3DummyStream() {
-
-}
-
-status_t Camera3DummyStream::getBufferLocked(camera3_stream_buffer *,
-        const std::vector<size_t>&) {
-    ATRACE_CALL();
-    ALOGE("%s: Stream %d: Dummy stream cannot produce buffers!", __FUNCTION__, mId);
-    return INVALID_OPERATION;
-}
-
-status_t Camera3DummyStream::returnBufferLocked(
-        const camera3_stream_buffer &,
-        nsecs_t, const std::vector<size_t>&) {
-    ATRACE_CALL();
-    ALOGE("%s: Stream %d: Dummy stream cannot return buffers!", __FUNCTION__, mId);
-    return INVALID_OPERATION;
-}
-
-status_t Camera3DummyStream::returnBufferCheckedLocked(
-            const camera3_stream_buffer &,
-            nsecs_t,
-            bool,
-            const std::vector<size_t>&,
-            /*out*/
-            sp<Fence>*) {
-    ATRACE_CALL();
-    ALOGE("%s: Stream %d: Dummy stream cannot return buffers!", __FUNCTION__, mId);
-    return INVALID_OPERATION;
-}
-
-void Camera3DummyStream::dump(int fd, const Vector<String16> &args) const {
-    (void) args;
-    String8 lines;
-    lines.appendFormat("    Stream[%d]: Dummy\n", mId);
-    write(fd, lines.string(), lines.size());
-
-    Camera3IOStreamBase::dump(fd, args);
-}
-
-status_t Camera3DummyStream::setTransform(int) {
-    ATRACE_CALL();
-    // Do nothing
-    return OK;
-}
-
-status_t Camera3DummyStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
-    (void) buffer;
-    (void) fenceFd;
-    // Do nothing
-    return OK;
-}
-
-status_t Camera3DummyStream::configureQueueLocked() {
-    // Do nothing
-    return OK;
-}
-
-status_t Camera3DummyStream::disconnectLocked() {
-    mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG
-                                           : STATE_CONSTRUCTED;
-    return OK;
-}
-
-status_t Camera3DummyStream::getEndpointUsage(uint64_t *usage) const {
-    *usage = DUMMY_USAGE;
-    return OK;
-}
-
-bool Camera3DummyStream::isVideoStream() const {
-    return false;
-}
-
-bool Camera3DummyStream::isConsumerConfigurationDeferred(size_t /*surface_id*/) const {
-    return false;
-}
-
-status_t Camera3DummyStream::dropBuffers(bool /*dropping*/) {
-    return OK;
-}
-
-const String8& Camera3DummyStream::getPhysicalCameraId() const {
-    return DUMMY_ID;
-}
-
-status_t Camera3DummyStream::setConsumers(const std::vector<sp<Surface>>& /*consumers*/) {
-    ALOGE("%s: Stream %d: Dummy stream doesn't support set consumer surface!",
-            __FUNCTION__, mId);
-    return INVALID_OPERATION;
-}
-
-status_t Camera3DummyStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
-            const std::vector<OutputStreamInfo> &/*outputInfo*/,
-            const std::vector<size_t> &/*removedSurfaceIds*/,
-            KeyedVector<sp<Surface>, size_t> * /*outputMap*/) {
-    ALOGE("%s: this method is not supported!", __FUNCTION__);
-    return INVALID_OPERATION;
-}
-
-}; // namespace camera3
-
-}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.h b/services/camera/libcameraservice/device3/Camera3DummyStream.h
deleted file mode 100644
index 4b67ea5..0000000
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.h
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Copyright (C) 2014-2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA3_DUMMY_STREAM_H
-#define ANDROID_SERVERS_CAMERA3_DUMMY_STREAM_H
-
-#include <utils/RefBase.h>
-#include <gui/Surface.h>
-
-#include "Camera3Stream.h"
-#include "Camera3IOStreamBase.h"
-#include "Camera3OutputStreamInterface.h"
-
-namespace android {
-namespace camera3 {
-
-/**
- * A dummy output stream class, to be used as a placeholder when no valid
- * streams are configured by the client.
- * This is necessary because camera HAL v3.2 or older disallow configuring
- * 0 output streams, while the public camera2 API allows for it.
- */
-class Camera3DummyStream :
-        public Camera3IOStreamBase,
-        public Camera3OutputStreamInterface {
-
-  public:
-    /**
-     * Set up a dummy stream; doesn't actually connect to anything, and uses
-     * a default dummy format and size.
-     */
-    explicit Camera3DummyStream(int id);
-
-    virtual ~Camera3DummyStream();
-
-    /**
-     * Camera3Stream interface
-     */
-
-    virtual void     dump(int fd, const Vector<String16> &args) const;
-
-    status_t         setTransform(int transform);
-
-    virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd);
-
-    /**
-     * Drop buffers for stream of streamId if dropping is true. If dropping is false, do not
-     * drop buffers for stream of streamId.
-     */
-    virtual status_t dropBuffers(bool /*dropping*/) override;
-
-    /**
-     * Query the physical camera id for the output stream.
-     */
-    virtual const String8& getPhysicalCameraId() const override;
-
-    /**
-     * Return if this output stream is for video encoding.
-     */
-    bool isVideoStream() const;
-
-    /**
-     * Return if the consumer configuration of this stream is deferred.
-     */
-    virtual bool isConsumerConfigurationDeferred(size_t surface_id) const;
-
-    /**
-     * Set the consumer surfaces to the output stream.
-     */
-    virtual status_t setConsumers(const std::vector<sp<Surface>>& consumers);
-
-    /**
-     * Query the output surface id.
-     */
-    virtual ssize_t getSurfaceId(const sp<Surface> &/*surface*/) { return 0; }
-
-    virtual status_t getUniqueSurfaceIds(const std::vector<size_t>&,
-            /*out*/std::vector<size_t>*) { return INVALID_OPERATION; };
-
-    /**
-     * Update the stream output surfaces.
-     */
-    virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces,
-            const std::vector<OutputStreamInfo> &outputInfo,
-            const std::vector<size_t> &removedSurfaceIds,
-            KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
-
-  protected:
-
-    /**
-     * Note that we release the lock briefly in this function
-     */
-    virtual status_t returnBufferCheckedLocked(
-            const camera3_stream_buffer &buffer,
-            nsecs_t timestamp,
-            bool output,
-            const std::vector<size_t>& surface_ids,
-            /*out*/
-            sp<Fence> *releaseFenceOut);
-
-    virtual status_t disconnectLocked();
-
-  private:
-
-    // Default dummy parameters; 320x240 is a required size for all devices,
-    // otherwise act like a SurfaceView would.
-    static const int DUMMY_WIDTH = 320;
-    static const int DUMMY_HEIGHT = 240;
-    static const int DUMMY_FORMAT = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-    static const android_dataspace DUMMY_DATASPACE = HAL_DATASPACE_UNKNOWN;
-    static const camera3_stream_rotation_t DUMMY_ROTATION = CAMERA3_STREAM_ROTATION_0;
-    static const uint64_t DUMMY_USAGE = GRALLOC_USAGE_HW_COMPOSER;
-    static const String8 DUMMY_ID;
-
-    /**
-     * Internal Camera3Stream interface
-     */
-    virtual status_t getBufferLocked(camera3_stream_buffer *buffer,
-            const std::vector<size_t>& surface_ids = std::vector<size_t>());
-    virtual status_t returnBufferLocked(
-            const camera3_stream_buffer &buffer,
-            nsecs_t timestamp, const std::vector<size_t>& surface_ids);
-
-    virtual status_t configureQueueLocked();
-
-    virtual status_t getEndpointUsage(uint64_t *usage) const;
-
-}; // class Camera3DummyStream
-
-} // namespace camera3
-
-} // namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
new file mode 100644
index 0000000..8cc6833
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
@@ -0,0 +1,144 @@
+/*
+ * Copyright (C) 2014-2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-FakeStream"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+#include "Camera3FakeStream.h"
+
+namespace android {
+
+namespace camera3 {
+
+const String8 Camera3FakeStream::FAKE_ID;
+
+Camera3FakeStream::Camera3FakeStream(int id) :
+        Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, FAKE_WIDTH, FAKE_HEIGHT,
+                /*maxSize*/0, FAKE_FORMAT, FAKE_DATASPACE, FAKE_ROTATION,
+                FAKE_ID, std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT}) {
+
+}
+
+Camera3FakeStream::~Camera3FakeStream() {
+
+}
+
+status_t Camera3FakeStream::getBufferLocked(camera_stream_buffer *,
+        const std::vector<size_t>&) {
+    ATRACE_CALL();
+    ALOGE("%s: Stream %d: Fake stream cannot produce buffers!", __FUNCTION__, mId);
+    return INVALID_OPERATION;
+}
+
+status_t Camera3FakeStream::returnBufferLocked(
+        const camera_stream_buffer &,
+        nsecs_t, const std::vector<size_t>&) {
+    ATRACE_CALL();
+    ALOGE("%s: Stream %d: Fake stream cannot return buffers!", __FUNCTION__, mId);
+    return INVALID_OPERATION;
+}
+
+status_t Camera3FakeStream::returnBufferCheckedLocked(
+            const camera_stream_buffer &,
+            nsecs_t,
+            bool,
+            const std::vector<size_t>&,
+            /*out*/
+            sp<Fence>*) {
+    ATRACE_CALL();
+    ALOGE("%s: Stream %d: Fake stream cannot return buffers!", __FUNCTION__, mId);
+    return INVALID_OPERATION;
+}
+
+void Camera3FakeStream::dump(int fd, const Vector<String16> &args) const {
+    (void) args;
+    String8 lines;
+    lines.appendFormat("    Stream[%d]: Fake\n", mId);
+    write(fd, lines.string(), lines.size());
+
+    Camera3IOStreamBase::dump(fd, args);
+}
+
+status_t Camera3FakeStream::setTransform(int) {
+    ATRACE_CALL();
+    // Do nothing
+    return OK;
+}
+
+status_t Camera3FakeStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
+    (void) buffer;
+    (void) fenceFd;
+    // Do nothing
+    return OK;
+}
+
+status_t Camera3FakeStream::configureQueueLocked() {
+    // Do nothing
+    return OK;
+}
+
+status_t Camera3FakeStream::disconnectLocked() {
+    mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG
+                                           : STATE_CONSTRUCTED;
+    return OK;
+}
+
+status_t Camera3FakeStream::getEndpointUsage(uint64_t *usage) const {
+    *usage = FAKE_USAGE;
+    return OK;
+}
+
+bool Camera3FakeStream::isVideoStream() const {
+    return false;
+}
+
+bool Camera3FakeStream::isConsumerConfigurationDeferred(size_t /*surface_id*/) const {
+    return false;
+}
+
+status_t Camera3FakeStream::dropBuffers(bool /*dropping*/) {
+    return OK;
+}
+
+const String8& Camera3FakeStream::getPhysicalCameraId() const {
+    return FAKE_ID;
+}
+
+status_t Camera3FakeStream::setConsumers(const std::vector<sp<Surface>>& /*consumers*/) {
+    ALOGE("%s: Stream %d: Fake stream doesn't support set consumer surface!",
+            __FUNCTION__, mId);
+    return INVALID_OPERATION;
+}
+
+status_t Camera3FakeStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
+            const std::vector<OutputStreamInfo> &/*outputInfo*/,
+            const std::vector<size_t> &/*removedSurfaceIds*/,
+            KeyedVector<sp<Surface>, size_t> * /*outputMap*/) {
+    ALOGE("%s: this method is not supported!", __FUNCTION__);
+    return INVALID_OPERATION;
+}
+
+status_t Camera3FakeStream::setBatchSize(size_t /*batchSize*/) {
+    ALOGE("%s: this method is not supported!", __FUNCTION__);
+    return INVALID_OPERATION;
+}
+
+}; // namespace camera3
+
+}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.h b/services/camera/libcameraservice/device3/Camera3FakeStream.h
new file mode 100644
index 0000000..914ccbf
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.h
@@ -0,0 +1,149 @@
+/*
+ * Copyright (C) 2014-2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_FAKE_STREAM_H
+#define ANDROID_SERVERS_CAMERA3_FAKE_STREAM_H
+
+#include <utils/RefBase.h>
+#include <gui/Surface.h>
+
+#include "Camera3Stream.h"
+#include "Camera3IOStreamBase.h"
+#include "Camera3OutputStreamInterface.h"
+
+namespace android {
+namespace camera3 {
+
+/**
+ * A fake output stream class, to be used as a placeholder when no valid
+ * streams are configured by the client.
+ * This is necessary because camera HAL v3.2 or older disallow configuring
+ * 0 output streams, while the public camera2 API allows for it.
+ */
+class Camera3FakeStream :
+        public Camera3IOStreamBase,
+        public Camera3OutputStreamInterface {
+
+  public:
+    /**
+     * Set up a fake stream; doesn't actually connect to anything, and uses
+     * a default fake format and size.
+     */
+    explicit Camera3FakeStream(int id);
+
+    virtual ~Camera3FakeStream();
+
+    /**
+     * Camera3Stream interface
+     */
+
+    virtual void     dump(int fd, const Vector<String16> &args) const;
+
+    status_t         setTransform(int transform);
+
+    virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd);
+
+    /**
+     * Drop buffers for stream of streamId if dropping is true. If dropping is false, do not
+     * drop buffers for stream of streamId.
+     */
+    virtual status_t dropBuffers(bool /*dropping*/) override;
+
+    /**
+     * Query the physical camera id for the output stream.
+     */
+    virtual const String8& getPhysicalCameraId() const override;
+
+    /**
+     * Return if this output stream is for video encoding.
+     */
+    bool isVideoStream() const;
+
+    /**
+     * Return if the consumer configuration of this stream is deferred.
+     */
+    virtual bool isConsumerConfigurationDeferred(size_t surface_id) const;
+
+    /**
+     * Set the consumer surfaces to the output stream.
+     */
+    virtual status_t setConsumers(const std::vector<sp<Surface>>& consumers);
+
+    /**
+     * Query the output surface id.
+     */
+    virtual ssize_t getSurfaceId(const sp<Surface> &/*surface*/) { return 0; }
+
+    virtual status_t getUniqueSurfaceIds(const std::vector<size_t>&,
+            /*out*/std::vector<size_t>*) { return INVALID_OPERATION; };
+
+    /**
+     * Update the stream output surfaces.
+     */
+    virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces,
+            const std::vector<OutputStreamInfo> &outputInfo,
+            const std::vector<size_t> &removedSurfaceIds,
+            KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
+
+    virtual status_t setBatchSize(size_t batchSize) override;
+
+  protected:
+
+    /**
+     * Note that we release the lock briefly in this function
+     */
+    virtual status_t returnBufferCheckedLocked(
+            const camera_stream_buffer &buffer,
+            nsecs_t timestamp,
+            bool output,
+            const std::vector<size_t>& surface_ids,
+            /*out*/
+            sp<Fence> *releaseFenceOut);
+
+    virtual status_t disconnectLocked();
+
+  private:
+
+    // Default fake parameters; 320x240 is a required size for all devices,
+    // otherwise act like a SurfaceView would.
+    static const int FAKE_WIDTH = 320;
+    static const int FAKE_HEIGHT = 240;
+    static const int FAKE_FORMAT = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+    static const android_dataspace FAKE_DATASPACE = HAL_DATASPACE_UNKNOWN;
+    static const camera_stream_rotation_t FAKE_ROTATION = CAMERA_STREAM_ROTATION_0;
+    static const uint64_t FAKE_USAGE = GRALLOC_USAGE_HW_COMPOSER;
+    static const String8 FAKE_ID;
+
+    /**
+     * Internal Camera3Stream interface
+     */
+    virtual status_t getBufferLocked(camera_stream_buffer *buffer,
+            const std::vector<size_t>& surface_ids = std::vector<size_t>());
+    virtual status_t returnBufferLocked(
+            const camera_stream_buffer &buffer,
+            nsecs_t timestamp, const std::vector<size_t>& surface_ids);
+
+    virtual status_t configureQueueLocked();
+
+    virtual status_t getEndpointUsage(uint64_t *usage) const;
+
+}; // class Camera3FakeStream
+
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index bda2961..0204d49 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -29,13 +29,15 @@
 
 namespace camera3 {
 
-Camera3IOStreamBase::Camera3IOStreamBase(int id, camera3_stream_type_t type,
+Camera3IOStreamBase::Camera3IOStreamBase(int id, camera_stream_type_t type,
         uint32_t width, uint32_t height, size_t maxSize, int format,
-        android_dataspace dataSpace, camera3_stream_rotation_t rotation,
-        const String8& physicalCameraId, int setId) :
+        android_dataspace dataSpace, camera_stream_rotation_t rotation,
+        const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        int setId, bool isMultiResolution) :
         Camera3Stream(id, type,
                 width, height, maxSize, format, dataSpace, rotation,
-                physicalCameraId, setId),
+                physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
         mTotalBufferCount(0),
         mHandoutTotalBufferCount(0),
         mHandoutOutputBufferCount(0),
@@ -77,13 +79,13 @@
 
     lines.appendFormat("      State: %d\n", mState);
     lines.appendFormat("      Dims: %d x %d, format 0x%x, dataspace 0x%x\n",
-            camera3_stream::width, camera3_stream::height,
-            camera3_stream::format, camera3_stream::data_space);
+            camera_stream::width, camera_stream::height,
+            camera_stream::format, camera_stream::data_space);
     lines.appendFormat("      Max size: %zu\n", mMaxSize);
     lines.appendFormat("      Combined usage: %" PRIu64 ", max HAL buffers: %d\n",
-            mUsage | consumerUsage, camera3_stream::max_buffers);
-    if (strlen(camera3_stream::physical_camera_id) > 0) {
-        lines.appendFormat("      Physical camera id: %s\n", camera3_stream::physical_camera_id);
+            mUsage | consumerUsage, camera_stream::max_buffers);
+    if (strlen(camera_stream::physical_camera_id) > 0) {
+        lines.appendFormat("      Physical camera id: %s\n", camera_stream::physical_camera_id);
     }
     lines.appendFormat("      Frames produced: %d, last timestamp: %" PRId64 " ns\n",
             mFrameCount, mLastTimestamp);
@@ -150,11 +152,11 @@
    return OK;
 }
 
-void Camera3IOStreamBase::handoutBufferLocked(camera3_stream_buffer &buffer,
+void Camera3IOStreamBase::handoutBufferLocked(camera_stream_buffer &buffer,
                                               buffer_handle_t *handle,
                                               int acquireFence,
                                               int releaseFence,
-                                              camera3_buffer_status_t status,
+                                              camera_buffer_status_t status,
                                               bool output) {
     /**
      * Note that all fences are now owned by HAL.
@@ -220,7 +222,7 @@
 }
 
 status_t Camera3IOStreamBase::returnAnyBufferLocked(
-        const camera3_stream_buffer &buffer,
+        const camera_stream_buffer &buffer,
         nsecs_t timestamp,
         bool output,
         const std::vector<size_t>& surface_ids) {
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 448379c..90c8a7b 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -32,11 +32,12 @@
 class Camera3IOStreamBase :
         public Camera3Stream {
   protected:
-    Camera3IOStreamBase(int id, camera3_stream_type_t type,
+    Camera3IOStreamBase(int id, camera_stream_type_t type,
             uint32_t width, uint32_t height, size_t maxSize, int format,
-            android_dataspace dataSpace, camera3_stream_rotation_t rotation,
+            android_dataspace dataSpace, camera_stream_rotation_t rotation,
             const String8& physicalCameraId,
-            int setId = CAMERA3_STREAM_SET_ID_INVALID);
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
 
   public:
 
@@ -48,6 +49,7 @@
 
     virtual void     dump(int fd, const Vector<String16> &args) const;
 
+    int              getMaxTotalBuffers() const { return mTotalBufferCount; }
   protected:
     size_t            mTotalBufferCount;
     // sum of input and output buffers that are currently acquired by HAL
@@ -63,13 +65,13 @@
     sp<Fence>         mCombinedFence;
 
     status_t         returnAnyBufferLocked(
-            const camera3_stream_buffer &buffer,
+            const camera_stream_buffer &buffer,
             nsecs_t timestamp,
             bool output,
             const std::vector<size_t>& surface_ids = std::vector<size_t>());
 
     virtual status_t returnBufferCheckedLocked(
-            const camera3_stream_buffer &buffer,
+            const camera_stream_buffer &buffer,
             nsecs_t timestamp,
             bool output,
             const std::vector<size_t>& surface_ids,
@@ -99,11 +101,11 @@
 
     // Hand out the buffer to a native location,
     //   incrementing the internal refcount and dequeued buffer count
-    void handoutBufferLocked(camera3_stream_buffer &buffer,
+    void handoutBufferLocked(camera_stream_buffer &buffer,
                              buffer_handle_t *handle,
                              int acquire_fence,
                              int release_fence,
-                             camera3_buffer_status_t status,
+                             camera_buffer_status_t status,
                              bool output);
 
 }; // class Camera3IOStreamBase
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index cb59a76..6d8317b 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -27,13 +27,14 @@
 
 namespace camera3 {
 
-const String8 Camera3InputStream::DUMMY_ID;
+const String8 Camera3InputStream::FAKE_ID;
 
 Camera3InputStream::Camera3InputStream(int id,
         uint32_t width, uint32_t height, int format) :
-        Camera3IOStreamBase(id, CAMERA3_STREAM_INPUT, width, height, /*maxSize*/0,
-                            format, HAL_DATASPACE_UNKNOWN, CAMERA3_STREAM_ROTATION_0,
-                            DUMMY_ID) {
+        Camera3IOStreamBase(id, CAMERA_STREAM_INPUT, width, height, /*maxSize*/0,
+                            format, HAL_DATASPACE_UNKNOWN, CAMERA_STREAM_ROTATION_0,
+                            FAKE_ID,
+                            std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT}) {
 
     if (format == HAL_PIXEL_FORMAT_BLOB) {
         ALOGE("%s: Bad format, BLOB not supported", __FUNCTION__);
@@ -46,10 +47,14 @@
 }
 
 status_t Camera3InputStream::getInputBufferLocked(
-        camera3_stream_buffer *buffer) {
+        camera_stream_buffer *buffer, Size *size) {
     ATRACE_CALL();
     status_t res;
 
+    if (size == nullptr) {
+        ALOGE("%s: size must not be null", __FUNCTION__);
+        return BAD_VALUE;
+    }
     // FIXME: will not work in (re-)registration
     if (mState == STATE_IN_CONFIG || mState == STATE_IN_RECONFIG) {
         ALOGE("%s: Stream %d: Buffer registration for input streams"
@@ -77,16 +82,18 @@
         return res;
     }
 
+    size->width  = bufferItem.mGraphicBuffer->getWidth();
+    size->height = bufferItem.mGraphicBuffer->getHeight();
+
     anb = bufferItem.mGraphicBuffer->getNativeBuffer();
     assert(anb != NULL);
     fenceFd = bufferItem.mFence->dup();
-
     /**
      * FenceFD now owned by HAL except in case of error,
      * in which case we reassign it to acquire_fence
      */
     handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd,
-                        /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK, /*output*/false);
+                        /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/false);
     mBuffersInFlight.push_back(bufferItem);
 
     mFrameCount++;
@@ -96,7 +103,7 @@
 }
 
 status_t Camera3InputStream::returnBufferCheckedLocked(
-            const camera3_stream_buffer &buffer,
+            const camera_stream_buffer &buffer,
             nsecs_t timestamp,
             bool output,
             const std::vector<size_t>&,
@@ -134,7 +141,7 @@
         return INVALID_OPERATION;
     }
 
-    if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) {
+    if (buffer.status == CAMERA_BUFFER_STATUS_ERROR) {
         if (buffer.release_fence != -1) {
             ALOGE("%s: Stream %d: HAL should not set release_fence(%d) when "
                   "there is an error", __FUNCTION__, mId, buffer.release_fence);
@@ -144,7 +151,7 @@
         /**
          * Reassign release fence as the acquire fence incase of error
          */
-        const_cast<camera3_stream_buffer*>(&buffer)->release_fence =
+        const_cast<camera_stream_buffer*>(&buffer)->release_fence =
                 buffer.acquire_fence;
     }
 
@@ -165,7 +172,7 @@
 }
 
 status_t Camera3InputStream::returnInputBufferLocked(
-        const camera3_stream_buffer &buffer) {
+        const camera_stream_buffer &buffer) {
     ATRACE_CALL();
 
     return returnAnyBufferLocked(buffer, /*timestamp*/0, /*output*/false);
@@ -224,7 +231,7 @@
     }
 
     assert(mMaxSize == 0);
-    assert(camera3_stream::format != HAL_PIXEL_FORMAT_BLOB);
+    assert(camera_stream::format != HAL_PIXEL_FORMAT_BLOB);
 
     mHandoutTotalBufferCount = 0;
     mFrameCount = 0;
@@ -244,14 +251,14 @@
         }
         size_t minBufs = static_cast<size_t>(minUndequeuedBuffers);
 
-        if (camera3_stream::max_buffers == 0) {
+        if (camera_stream::max_buffers == 0) {
             ALOGE("%s: %d: HAL sets max_buffer to 0. Must be at least 1.",
                     __FUNCTION__, __LINE__);
             return INVALID_OPERATION;
         }
 
         /*
-         * We promise never to 'acquire' more than camera3_stream::max_buffers
+         * We promise never to 'acquire' more than camera_stream::max_buffers
          * at any one time.
          *
          * Boost the number up to meet the minimum required buffer count.
@@ -259,8 +266,8 @@
          * (Note that this sets consumer-side buffer count only,
          * and not the sum of producer+consumer side as in other camera streams).
          */
-        mTotalBufferCount = camera3_stream::max_buffers > minBufs ?
-            camera3_stream::max_buffers : minBufs;
+        mTotalBufferCount = camera_stream::max_buffers > minBufs ?
+            camera_stream::max_buffers : minBufs;
         // TODO: somehow set the total buffer count when producer connects?
 
         mConsumer = new BufferItemConsumer(consumer, mUsage,
@@ -272,17 +279,17 @@
         mConsumer->setBufferFreedListener(this);
     }
 
-    res = mConsumer->setDefaultBufferSize(camera3_stream::width,
-                                          camera3_stream::height);
+    res = mConsumer->setDefaultBufferSize(camera_stream::width,
+                                          camera_stream::height);
     if (res != OK) {
         ALOGE("%s: Stream %d: Could not set buffer dimensions %dx%d",
-              __FUNCTION__, mId, camera3_stream::width, camera3_stream::height);
+              __FUNCTION__, mId, camera_stream::width, camera_stream::height);
         return res;
     }
-    res = mConsumer->setDefaultBufferFormat(camera3_stream::format);
+    res = mConsumer->setDefaultBufferFormat(camera_stream::format);
     if (res != OK) {
         ALOGE("%s: Stream %d: Could not set buffer format %d",
-              __FUNCTION__, mId, camera3_stream::format);
+              __FUNCTION__, mId, camera_stream::format);
         return res;
     }
 
@@ -298,8 +305,8 @@
 void Camera3InputStream::onBufferFreed(const wp<GraphicBuffer>& gb) {
     const sp<GraphicBuffer> buffer = gb.promote();
     if (buffer != nullptr) {
-        camera3_stream_buffer streamBuffer =
-                {nullptr, &buffer->handle, 0, -1, -1};
+        camera_stream_buffer streamBuffer =
+                {nullptr, &buffer->handle, CAMERA_BUFFER_STATUS_OK, -1, -1};
         // Check if this buffer is outstanding.
         if (isOutstandingBuffer(streamBuffer)) {
             ALOGV("%s: Stream %d: Trying to free a buffer that is still being "
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h
index 97a627a..46221d1 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.h
@@ -53,13 +53,13 @@
     sp<IGraphicBufferProducer> mProducer;
     Vector<BufferItem> mBuffersInFlight;
 
-    static const String8 DUMMY_ID;
+    static const String8 FAKE_ID;
 
     /**
      * Camera3IOStreamBase
      */
     virtual status_t returnBufferCheckedLocked(
-            const camera3_stream_buffer &buffer,
+            const camera_stream_buffer &buffer,
             nsecs_t timestamp,
             bool output,
             const std::vector<size_t>& surface_ids,
@@ -70,9 +70,9 @@
      * Camera3Stream interface
      */
 
-    virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer);
+    virtual status_t getInputBufferLocked(camera_stream_buffer *buffer, Size *size);
     virtual status_t returnInputBufferLocked(
-            const camera3_stream_buffer &buffer);
+            const camera_stream_buffer &buffer);
     virtual status_t getInputBufferProducerLocked(
             sp<IGraphicBufferProducer> *producer);
     virtual status_t disconnectLocked();
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
index 95f9633..a7e64ce 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
@@ -176,7 +176,7 @@
 
     FlushInflightReqStates states {
         mId, mOfflineReqsLock, mOfflineReqs, mUseHalBufManager,
-        listener, *this, mBufferRecords, *this};
+        listener, *this, mBufferRecords, *this, mSessionStatsBuilder};
 
     camera3::flushInflightRequests(states);
 
@@ -260,7 +260,8 @@
         mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
-        mTagMonitor, mInputStream, mOutputStreams, listener, *this, *this, mBufferRecords
+        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
+        mBufferRecords
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -299,7 +300,8 @@
         mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
-        mTagMonitor, mInputStream, mOutputStreams, listener, *this, *this, mBufferRecords
+        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
+        mBufferRecords
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -333,7 +335,8 @@
         mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
-        mTagMonitor, mInputStream, mOutputStreams, listener, *this, *this, mBufferRecords
+        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
+        mBufferRecords
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
@@ -353,7 +356,7 @@
     }
 
     RequestBufferStates states {
-        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams,
+        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder,
         *this, mBufferRecords, *this};
     camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
     return hardware::Void();
@@ -370,7 +373,7 @@
     }
 
     ReturnBufferStates states {
-        mId, mUseHalBufManager, mOutputStreams, mBufferRecords};
+        mId, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder, mBufferRecords};
     camera3::returnStreamBuffers(states, buffers);
     return hardware::Void();
 }
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.h b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
index c4c7a85..5581964 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
@@ -36,7 +36,6 @@
 #include "device3/RotateAndCropMapper.h"
 #include "device3/ZoomRatioMapper.h"
 #include "utils/TagMonitor.h"
-#include "utils/LatencyHistogram.h"
 #include <camera_metadata_hidden.h>
 
 namespace android {
@@ -209,6 +208,7 @@
     sp<camera3::Camera3Stream> mInputStream;
     camera3::StreamSet mOutputStreams;
     camera3::BufferRecords mBufferRecords;
+    SessionStatsBuilder mSessionStatsBuilder;
 
     std::mutex mOfflineReqsLock;
     camera3::InFlightRequestMap mOfflineReqs;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 01ca006..03b77fc 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -18,8 +18,15 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <ctime>
+#include <fstream>
+
+#include <android-base/unique_fd.h>
+#include <ui/GraphicBuffer.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
+
+#include "api1/client2/JpegProcessor.h"
 #include "Camera3OutputStream.h"
 #include "utils/TraceHFR.h"
 
@@ -35,12 +42,13 @@
 Camera3OutputStream::Camera3OutputStream(int id,
         sp<Surface> consumer,
         uint32_t width, uint32_t height, int format,
-        android_dataspace dataSpace, camera3_stream_rotation_t rotation,
+        android_dataspace dataSpace, camera_stream_rotation_t rotation,
         nsecs_t timestampOffset, const String8& physicalCameraId,
-        int setId) :
-        Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        int setId, bool isMultiResolution) :
+        Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
                             /*maxSize*/0, format, dataSpace, rotation,
-                            physicalCameraId, setId),
+                            physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
         mConsumer(consumer),
         mTransform(0),
         mTraceFirstBuffer(true),
@@ -62,10 +70,13 @@
 Camera3OutputStream::Camera3OutputStream(int id,
         sp<Surface> consumer,
         uint32_t width, uint32_t height, size_t maxSize, int format,
-        android_dataspace dataSpace, camera3_stream_rotation_t rotation,
-        nsecs_t timestampOffset, const String8& physicalCameraId, int setId) :
-        Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height, maxSize,
-                            format, dataSpace, rotation, physicalCameraId, setId),
+        android_dataspace dataSpace, camera_stream_rotation_t rotation,
+        nsecs_t timestampOffset, const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        int setId, bool isMultiResolution) :
+        Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
+                            format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
+                            setId, isMultiResolution),
         mConsumer(consumer),
         mTransform(0),
         mTraceFirstBuffer(true),
@@ -94,11 +105,13 @@
 Camera3OutputStream::Camera3OutputStream(int id,
         uint32_t width, uint32_t height, int format,
         uint64_t consumerUsage, android_dataspace dataSpace,
-        camera3_stream_rotation_t rotation, nsecs_t timestampOffset,
-        const String8& physicalCameraId, int setId) :
-        Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height,
+        camera_stream_rotation_t rotation, nsecs_t timestampOffset,
+        const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        int setId, bool isMultiResolution) :
+        Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
                             /*maxSize*/0, format, dataSpace, rotation,
-                            physicalCameraId, setId),
+                            physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
         mConsumer(nullptr),
         mTransform(0),
         mTraceFirstBuffer(true),
@@ -114,7 +127,7 @@
         mState = STATE_ERROR;
     }
 
-    // Sanity check for the consumer usage flag.
+    // Validation check for the consumer usage flag.
     if ((consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) == 0 &&
             (consumerUsage & GraphicBuffer::USAGE_HW_COMPOSER) == 0) {
         ALOGE("%s: Deferred consumer usage flag is illegal %" PRIu64 "!",
@@ -127,18 +140,19 @@
     mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
 }
 
-Camera3OutputStream::Camera3OutputStream(int id, camera3_stream_type_t type,
+Camera3OutputStream::Camera3OutputStream(int id, camera_stream_type_t type,
                                          uint32_t width, uint32_t height,
                                          int format,
                                          android_dataspace dataSpace,
-                                         camera3_stream_rotation_t rotation,
+                                         camera_stream_rotation_t rotation,
                                          const String8& physicalCameraId,
+                                        const std::unordered_set<int32_t> &sensorPixelModesUsed,
                                          uint64_t consumerUsage, nsecs_t timestampOffset,
-                                         int setId) :
+                                         int setId, bool isMultiResolution) :
         Camera3IOStreamBase(id, type, width, height,
                             /*maxSize*/0,
                             format, dataSpace, rotation,
-                            physicalCameraId, setId),
+                            physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
         mTransform(0),
         mTraceFirstBuffer(true),
         mUseMonoTimestamp(false),
@@ -159,7 +173,7 @@
     disconnectLocked();
 }
 
-status_t Camera3OutputStream::getBufferLocked(camera3_stream_buffer *buffer,
+status_t Camera3OutputStream::getBufferLocked(camera_stream_buffer *buffer,
         const std::vector<size_t>&) {
     ATRACE_HFR_CALL();
 
@@ -177,11 +191,70 @@
      * in which case we reassign it to acquire_fence
      */
     handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd,
-                        /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK, /*output*/true);
+                        /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/true);
 
     return OK;
 }
 
+status_t Camera3OutputStream::getBuffersLocked(std::vector<OutstandingBuffer>* outBuffers) {
+    status_t res;
+
+    if ((res = getBufferPreconditionCheckLocked()) != OK) {
+        return res;
+    }
+
+    if (mUseBufferManager) {
+        ALOGE("%s: stream %d is managed by buffer manager and does not support batch operation",
+                __FUNCTION__, mId);
+        return INVALID_OPERATION;
+    }
+
+    sp<Surface> consumer = mConsumer;
+    /**
+     * Release the lock briefly to avoid deadlock for below scenario:
+     * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
+     * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
+     * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
+     * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
+     * StreamingProcessor lock.
+     * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
+     * and try to lock bufferQueue lock.
+     * Then there is circular locking dependency.
+     */
+    mLock.unlock();
+
+    size_t numBuffersRequested = outBuffers->size();
+    std::vector<Surface::BatchBuffer> buffers(numBuffersRequested);
+
+    nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
+    res = consumer->dequeueBuffers(&buffers);
+    nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
+    mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
+
+    mLock.lock();
+
+    if (res != OK) {
+        if (shouldLogError(res, mState)) {
+            ALOGE("%s: Stream %d: Can't dequeue %zu output buffers: %s (%d)",
+                    __FUNCTION__, mId, numBuffersRequested, strerror(-res), res);
+        }
+        checkRetAndSetAbandonedLocked(res);
+        return res;
+    }
+    checkRemovedBuffersLocked();
+
+    /**
+     * FenceFD now owned by HAL except in case of error,
+     * in which case we reassign it to acquire_fence
+     */
+    for (size_t i = 0; i < numBuffersRequested; i++) {
+        handoutBufferLocked(*(outBuffers->at(i).outBuffer),
+                &(buffers[i].buffer->handle), /*acquireFence*/buffers[i].fenceFd,
+                /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/true);
+    }
+    return OK;
+}
+
 status_t Camera3OutputStream::queueBufferToConsumer(sp<ANativeWindow>& consumer,
             ANativeWindowBuffer* buffer, int anwReleaseFence,
             const std::vector<size_t>&) {
@@ -189,10 +262,14 @@
 }
 
 status_t Camera3OutputStream::returnBufferLocked(
-        const camera3_stream_buffer &buffer,
+        const camera_stream_buffer &buffer,
         nsecs_t timestamp, const std::vector<size_t>& surface_ids) {
     ATRACE_HFR_CALL();
 
+    if (mHandoutTotalBufferCount == 1) {
+        returnPrefetchedBuffersLocked();
+    }
+
     status_t res = returnAnyBufferLocked(buffer, timestamp, /*output*/true, surface_ids);
 
     if (res != OK) {
@@ -206,7 +283,7 @@
 }
 
 status_t Camera3OutputStream::returnBufferCheckedLocked(
-            const camera3_stream_buffer &buffer,
+            const camera_stream_buffer &buffer,
             nsecs_t timestamp,
             bool output,
             const std::vector<size_t>& surface_ids,
@@ -236,11 +313,11 @@
     /**
      * Return buffer back to ANativeWindow
      */
-    if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR || mDropBuffers || timestamp == 0) {
+    if (buffer.status == CAMERA_BUFFER_STATUS_ERROR || mDropBuffers || timestamp == 0) {
         // Cancel buffer
         if (mDropBuffers) {
             ALOGV("%s: Dropping a frame for stream %d.", __FUNCTION__, mId);
-        } else if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) {
+        } else if (buffer.status == CAMERA_BUFFER_STATUS_ERROR) {
             ALOGV("%s: A frame is dropped for stream %d due to buffer error.", __FUNCTION__, mId);
         } else {
             ALOGE("%s: Stream %d: timestamp shouldn't be 0", __FUNCTION__, mId);
@@ -260,7 +337,7 @@
             mBufferProducerListener->onBufferReleased();
         }
     } else {
-        if (mTraceFirstBuffer && (stream_type == CAMERA3_STREAM_OUTPUT)) {
+        if (mTraceFirstBuffer && (stream_type == CAMERA_STREAM_OUTPUT)) {
             {
                 char traceLog[48];
                 snprintf(traceLog, sizeof(traceLog), "Stream %d: first full buffer\n", mId);
@@ -279,6 +356,12 @@
                   __FUNCTION__, mId, strerror(-res), res);
             return res;
         }
+        // If this is a JPEG output, and image dump mask is set, save image to
+        // disk.
+        if (getFormat() == HAL_PIXEL_FORMAT_BLOB && getDataSpace() == HAL_DATASPACE_V0_JFIF &&
+                mImageDumpMask) {
+            dumpImageToDisk(timestamp, anwBuffer, anwReleaseFence);
+        }
 
         res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
         if (shouldLogError(res, state)) {
@@ -290,7 +373,7 @@
 
     // Once a valid buffer has been returned to the queue, can no longer
     // dequeue all buffers for preallocation.
-    if (buffer.status != CAMERA3_BUFFER_STATUS_ERROR) {
+    if (buffer.status != CAMERA_BUFFER_STATUS_ERROR) {
         mStreamUnpreparable = true;
     }
 
@@ -411,7 +494,7 @@
     if (mMaxSize == 0) {
         // For buffers of known size
         res = native_window_set_buffers_dimensions(mConsumer.get(),
-                camera3_stream::width, camera3_stream::height);
+                camera_stream::width, camera_stream::height);
     } else {
         // For buffers with bounded size
         res = native_window_set_buffers_dimensions(mConsumer.get(),
@@ -420,23 +503,23 @@
     if (res != OK) {
         ALOGE("%s: Unable to configure stream buffer dimensions"
                 " %d x %d (maxSize %zu) for stream %d",
-                __FUNCTION__, camera3_stream::width, camera3_stream::height,
+                __FUNCTION__, camera_stream::width, camera_stream::height,
                 mMaxSize, mId);
         return res;
     }
     res = native_window_set_buffers_format(mConsumer.get(),
-            camera3_stream::format);
+            camera_stream::format);
     if (res != OK) {
         ALOGE("%s: Unable to configure stream buffer format %#x for stream %d",
-                __FUNCTION__, camera3_stream::format, mId);
+                __FUNCTION__, camera_stream::format, mId);
         return res;
     }
 
     res = native_window_set_buffers_data_space(mConsumer.get(),
-            camera3_stream::data_space);
+            camera_stream::data_space);
     if (res != OK) {
         ALOGE("%s: Unable to configure stream dataspace %#x for stream %d",
-                __FUNCTION__, camera3_stream::data_space, mId);
+                __FUNCTION__, camera_stream::data_space, mId);
         return res;
     }
 
@@ -451,14 +534,14 @@
     }
 
     ALOGV("%s: Consumer wants %d buffers, HAL wants %d", __FUNCTION__,
-            maxConsumerBuffers, camera3_stream::max_buffers);
-    if (camera3_stream::max_buffers == 0) {
+            maxConsumerBuffers, camera_stream::max_buffers);
+    if (camera_stream::max_buffers == 0) {
         ALOGE("%s: Camera HAL requested max_buffer count: %d, requires at least 1",
-                __FUNCTION__, camera3_stream::max_buffers);
+                __FUNCTION__, camera_stream::max_buffers);
         return INVALID_OPERATION;
     }
 
-    mTotalBufferCount = maxConsumerBuffers + camera3_stream::max_buffers;
+    mTotalBufferCount = maxConsumerBuffers + camera_stream::max_buffers;
     mHandoutTotalBufferCount = 0;
     mFrameCount = 0;
     mLastTimestamp = 0;
@@ -494,10 +577,12 @@
             !(isConsumedByHWComposer() || isConsumedByHWTexture())) {
         uint64_t consumerUsage = 0;
         getEndpointUsage(&consumerUsage);
+        uint32_t width = (mMaxSize == 0) ? getWidth() : mMaxSize;
+        uint32_t height = (mMaxSize == 0) ? getHeight() : 1;
         StreamInfo streamInfo(
-                getId(), getStreamSetId(), getWidth(), getHeight(), getFormat(), getDataSpace(),
+                getId(), getStreamSetId(), width, height, getFormat(), getDataSpace(),
                 mUsage | consumerUsage, mTotalBufferCount,
-                /*isConfigured*/true);
+                /*isConfigured*/true, isMultiResolution());
         wp<Camera3OutputStream> weakThis(this);
         res = mBufferManager->registerStream(weakThis,
                 streamInfo);
@@ -528,7 +613,8 @@
 
     if (mUseBufferManager) {
         sp<GraphicBuffer> gb;
-        res = mBufferManager->getBufferForStream(getId(), getStreamSetId(), &gb, fenceFd);
+        res = mBufferManager->getBufferForStream(getId(), getStreamSetId(),
+                isMultiResolution(), &gb, fenceFd);
         if (res == OK) {
             // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after a
             // successful return.
@@ -567,11 +653,50 @@
          * and try to lock bufferQueue lock.
          * Then there is circular locking dependency.
          */
-        sp<ANativeWindow> currentConsumer = mConsumer;
+        sp<Surface> consumer = mConsumer;
+        size_t remainingBuffers = (mState == STATE_PREPARING ? mTotalBufferCount :
+                                   camera_stream::max_buffers) - mHandoutTotalBufferCount;
         mLock.unlock();
 
         nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
-        res = currentConsumer->dequeueBuffer(currentConsumer.get(), anb, fenceFd);
+
+        size_t batchSize = mBatchSize.load();
+        if (batchSize == 1) {
+            sp<ANativeWindow> anw = consumer;
+            res = anw->dequeueBuffer(anw.get(), anb, fenceFd);
+        } else {
+            std::unique_lock<std::mutex> batchLock(mBatchLock);
+            res = OK;
+            if (mBatchedBuffers.size() == 0) {
+                if (remainingBuffers == 0) {
+                    ALOGE("%s: cannot get buffer while all buffers are handed out", __FUNCTION__);
+                    return INVALID_OPERATION;
+                }
+                if (batchSize > remainingBuffers) {
+                    batchSize = remainingBuffers;
+                }
+                batchLock.unlock();
+                // Refill batched buffers
+                std::vector<Surface::BatchBuffer> batchedBuffers;
+                batchedBuffers.resize(batchSize);
+                res = consumer->dequeueBuffers(&batchedBuffers);
+                batchLock.lock();
+                if (res != OK) {
+                    ALOGE("%s: batch dequeueBuffers call failed! %s (%d)",
+                            __FUNCTION__, strerror(-res), res);
+                } else {
+                    mBatchedBuffers = std::move(batchedBuffers);
+                }
+            }
+
+            if (res == OK) {
+                // Dispatch batch buffers
+                *anb = mBatchedBuffers.back().buffer;
+                *fenceFd = mBatchedBuffers.back().fenceFd;
+                mBatchedBuffers.pop_back();
+            }
+        }
+
         nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
         mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
 
@@ -582,7 +707,8 @@
 
             sp<GraphicBuffer> gb;
             res = mBufferManager->getBufferForStream(
-                    getId(), getStreamSetId(), &gb, fenceFd, /*noFreeBuffer*/true);
+                    getId(), getStreamSetId(), isMultiResolution(),
+                    &gb, fenceFd, /*noFreeBuffer*/true);
 
             if (res == OK) {
                 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after
@@ -629,7 +755,8 @@
         onBuffersRemovedLocked(removedBuffers);
 
         if (notifyBufferManager && mUseBufferManager && removedBuffers.size() > 0) {
-            mBufferManager->onBuffersRemoved(getId(), getStreamSetId(), removedBuffers.size());
+            mBufferManager->onBuffersRemoved(getId(), getStreamSetId(), isMultiResolution(),
+                    removedBuffers.size());
         }
     }
 }
@@ -665,6 +792,8 @@
         return OK;
     }
 
+    returnPrefetchedBuffersLocked();
+
     ALOGV("%s: disconnecting stream %d from native window", __FUNCTION__, getId());
 
     res = native_window_api_disconnect(mConsumer.get(),
@@ -689,7 +818,7 @@
     // Since device is already idle, there is no getBuffer call to buffer manager, unregister the
     // stream at this point should be safe.
     if (mUseBufferManager) {
-        res = mBufferManager->unregisterStream(getId(), getStreamSetId());
+        res = mBufferManager->unregisterStream(getId(), getStreamSetId(), isMultiResolution());
         if (res != OK) {
             ALOGE("%s: Unable to unregister stream %d from buffer manager "
                     "(error %d %s)", __FUNCTION__, mId, res, strerror(-res));
@@ -750,7 +879,7 @@
     uint64_t u = 0;
 
     res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(surface.get()), &u);
-    applyZSLUsageQuirk(camera3_stream::format, &u);
+    applyZSLUsageQuirk(camera_stream::format, &u);
     *usage = u;
     return res;
 }
@@ -801,7 +930,8 @@
     ALOGV("Stream %d: Buffer released", stream->getId());
     bool shouldFreeBuffer = false;
     status_t res = stream->mBufferManager->onBufferReleased(
-        stream->getId(), stream->getStreamSetId(), &shouldFreeBuffer);
+        stream->getId(), stream->getStreamSetId(), stream->isMultiResolution(),
+        &shouldFreeBuffer);
     if (res != OK) {
         ALOGE("%s: signaling buffer release to buffer manager failed: %s (%d).", __FUNCTION__,
                 strerror(-res), res);
@@ -814,7 +944,7 @@
         stream->detachBufferLocked(&buffer, /*fenceFd*/ nullptr);
         if (buffer.get() != nullptr) {
             stream->mBufferManager->notifyBufferRemoved(
-                    stream->getId(), stream->getStreamSetId());
+                    stream->getId(), stream->getStreamSetId(), stream->isMultiResolution());
         }
     }
 }
@@ -832,7 +962,7 @@
         stream->onBuffersRemovedLocked(buffers);
         if (stream->mUseBufferManager) {
             stream->mBufferManager->onBuffersRemoved(stream->getId(),
-                    stream->getStreamSetId(), buffers.size());
+                    stream->getStreamSetId(), stream->isMultiResolution(), buffers.size());
         }
         ALOGV("Stream %d: %zu Buffers discarded.", stream->getId(), buffers.size());
     }
@@ -957,6 +1087,99 @@
     return (usage & GRALLOC_USAGE_HW_TEXTURE) != 0;
 }
 
+void Camera3OutputStream::dumpImageToDisk(nsecs_t timestamp,
+        ANativeWindowBuffer* anwBuffer, int fence) {
+    // Deriver output file name
+    std::string fileExtension = "jpg";
+    char imageFileName[64];
+    time_t now = time(0);
+    tm *localTime = localtime(&now);
+    snprintf(imageFileName, sizeof(imageFileName), "IMG_%4d%02d%02d_%02d%02d%02d_%" PRId64 ".%s",
+            1900 + localTime->tm_year, localTime->tm_mon + 1, localTime->tm_mday,
+            localTime->tm_hour, localTime->tm_min, localTime->tm_sec,
+            timestamp, fileExtension.c_str());
+
+    // Lock the image for CPU read
+    sp<GraphicBuffer> graphicBuffer = GraphicBuffer::from(anwBuffer);
+    void* mapped = nullptr;
+    base::unique_fd fenceFd(dup(fence));
+    status_t res = graphicBuffer->lockAsync(GraphicBuffer::USAGE_SW_READ_OFTEN, &mapped,
+            fenceFd.get());
+    if (res != OK) {
+        ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
+        return;
+    }
+
+    // Figure out actual file size
+    auto actualJpegSize = android::camera2::JpegProcessor::findJpegSize((uint8_t*)mapped, mMaxSize);
+    if (actualJpegSize == 0) {
+        actualJpegSize = mMaxSize;
+    }
+
+    // Output image data to file
+    std::string filePath = "/data/misc/cameraserver/";
+    filePath += imageFileName;
+    std::ofstream imageFile(filePath.c_str(), std::ofstream::binary);
+    if (!imageFile.is_open()) {
+        ALOGE("%s: Unable to create file %s", __FUNCTION__, filePath.c_str());
+        graphicBuffer->unlock();
+        return;
+    }
+    imageFile.write((const char*)mapped, actualJpegSize);
+
+    graphicBuffer->unlock();
+}
+
+status_t Camera3OutputStream::setBatchSize(size_t batchSize) {
+    Mutex::Autolock l(mLock);
+    if (batchSize == 0) {
+        ALOGE("%s: invalid batch size 0", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    if (mUseBufferManager) {
+        ALOGE("%s: batch operation is not supported with buffer manager", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    if (!isVideoStream()) {
+        ALOGE("%s: batch operation is not supported with non-video stream", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    if (camera_stream::max_buffers < batchSize) {
+        ALOGW("%s: batch size is capped by max_buffers %d", __FUNCTION__,
+                camera_stream::max_buffers);
+        batchSize = camera_stream::max_buffers;
+    }
+
+    size_t defaultBatchSize = 1;
+    if (!mBatchSize.compare_exchange_strong(defaultBatchSize, batchSize)) {
+        ALOGE("%s: change batch size from %zu to %zu dynamically is not supported",
+                __FUNCTION__, defaultBatchSize, batchSize);
+        return INVALID_OPERATION;
+    }
+
+    return OK;
+}
+
+void Camera3OutputStream::returnPrefetchedBuffersLocked() {
+    std::vector<Surface::BatchBuffer> batchedBuffers;
+
+    {
+        std::lock_guard<std::mutex> batchLock(mBatchLock);
+        if (mBatchedBuffers.size() != 0) {
+            ALOGW("%s: %zu extra prefetched buffers detected. Returning",
+                   __FUNCTION__, mBatchedBuffers.size());
+            batchedBuffers = std::move(mBatchedBuffers);
+        }
+    }
+
+    if (batchedBuffers.size() > 0) {
+        mConsumer->cancelBuffers(batchedBuffers);
+    }
+}
+
 }; // namespace camera3
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index b4e49f9..ad03b53 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_H
 #define ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_H
 
+#include <mutex>
 #include <utils/RefBase.h>
 #include <gui/IProducerListener.h>
 #include <gui/Surface.h>
@@ -47,6 +48,7 @@
     uint64_t combinedUsage;
     size_t totalBufferCount;
     bool isConfigured;
+    bool isMultiRes;
     explicit StreamInfo(int id = CAMERA3_STREAM_ID_INVALID,
             int setId = CAMERA3_STREAM_SET_ID_INVALID,
             uint32_t w = 0,
@@ -55,7 +57,8 @@
             android_dataspace ds = HAL_DATASPACE_UNKNOWN,
             uint64_t usage = 0,
             size_t bufferCount = 0,
-            bool configured = false) :
+            bool configured = false,
+            bool multiRes = false) :
                 streamId(id),
                 streamSetId(setId),
                 width(w),
@@ -64,7 +67,8 @@
                 dataSpace(ds),
                 combinedUsage(usage),
                 totalBufferCount(bufferCount),
-                isConfigured(configured){}
+                isConfigured(configured),
+                isMultiRes(multiRes) {}
 };
 
 /**
@@ -81,10 +85,10 @@
      */
     Camera3OutputStream(int id, sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
-            android_dataspace dataSpace, camera3_stream_rotation_t rotation,
+            android_dataspace dataSpace, camera_stream_rotation_t rotation,
             nsecs_t timestampOffset, const String8& physicalCameraId,
-            int setId = CAMERA3_STREAM_SET_ID_INVALID);
-
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
     /**
      * Set up a stream for formats that have a variable buffer size for the same
      * dimensions, such as compressed JPEG.
@@ -93,10 +97,10 @@
      */
     Camera3OutputStream(int id, sp<Surface> consumer,
             uint32_t width, uint32_t height, size_t maxSize, int format,
-            android_dataspace dataSpace, camera3_stream_rotation_t rotation,
+            android_dataspace dataSpace, camera_stream_rotation_t rotation,
             nsecs_t timestampOffset, const String8& physicalCameraId,
-            int setId = CAMERA3_STREAM_SET_ID_INVALID);
-
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
     /**
      * Set up a stream with deferred consumer for formats that have 2 dimensions, such as
      * RAW and YUV. The consumer must be set before using this stream for output. A valid
@@ -104,9 +108,10 @@
      */
     Camera3OutputStream(int id, uint32_t width, uint32_t height, int format,
             uint64_t consumerUsage, android_dataspace dataSpace,
-            camera3_stream_rotation_t rotation, nsecs_t timestampOffset,
+            camera_stream_rotation_t rotation, nsecs_t timestampOffset,
             const String8& physicalCameraId,
-            int setId = CAMERA3_STREAM_SET_ID_INVALID);
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
 
     virtual ~Camera3OutputStream();
 
@@ -206,23 +211,39 @@
             KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
 
     /**
+     * Set the batch size for buffer operations. The output stream will request
+     * buffers from buffer queue on a batch basis. Currently only video streams
+     * are allowed to set the batch size. Also if the stream is managed by
+     * buffer manager (Surface group in Java API) then batching is also not
+     * supported. Changing batch size on the fly while there is already batched
+     * buffers in the stream is also not supported.
+     * If the batch size is larger than the max dequeue count set
+     * by the camera HAL, the batch size will be set to the max dequeue count
+     * instead.
+     */
+    virtual status_t setBatchSize(size_t batchSize = 1) override;
+
+    /**
      * Apply ZSL related consumer usage quirk.
      */
     static void applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/);
 
+    void setImageDumpMask(int mask) { mImageDumpMask = mask; }
+
   protected:
-    Camera3OutputStream(int id, camera3_stream_type_t type,
+    Camera3OutputStream(int id, camera_stream_type_t type,
             uint32_t width, uint32_t height, int format,
-            android_dataspace dataSpace, camera3_stream_rotation_t rotation,
+            android_dataspace dataSpace, camera_stream_rotation_t rotation,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             uint64_t consumerUsage = 0, nsecs_t timestampOffset = 0,
-            int setId = CAMERA3_STREAM_SET_ID_INVALID);
+            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
 
     /**
      * Note that we release the lock briefly in this function
      */
     virtual status_t returnBufferCheckedLocked(
-            const camera3_stream_buffer &buffer,
+            const camera_stream_buffer &buffer,
             nsecs_t timestamp,
             bool output,
             const std::vector<size_t>& surface_ids,
@@ -290,14 +311,27 @@
     // Whether to drop valid buffers.
     bool mDropBuffers;
 
+
+
+    // The batch size for buffer operation
+    std::atomic_size_t mBatchSize = 1;
+
+    // Protecting batch states below, must be acquired after mLock
+    std::mutex mBatchLock;
+    // Prefetched buffers (ready to be handed to client)
+    std::vector<Surface::BatchBuffer> mBatchedBuffers;
+    // ---- End of mBatchLock protected scope ----
+
     /**
      * Internal Camera3Stream interface
      */
-    virtual status_t getBufferLocked(camera3_stream_buffer *buffer,
+    virtual status_t getBufferLocked(camera_stream_buffer *buffer,
             const std::vector<size_t>& surface_ids);
 
+    virtual status_t getBuffersLocked(/*out*/std::vector<OutstandingBuffer>* buffers) override;
+
     virtual status_t returnBufferLocked(
-            const camera3_stream_buffer &buffer,
+            const camera_stream_buffer &buffer,
             nsecs_t timestamp, const std::vector<size_t>& surface_ids);
 
     virtual status_t queueBufferToConsumer(sp<ANativeWindow>& consumer,
@@ -325,9 +359,16 @@
     // STATE_ABANDONED
     static bool shouldLogError(status_t res, StreamState state);
 
+    // Dump images to disk before returning to consumer
+    void dumpImageToDisk(nsecs_t timestamp, ANativeWindowBuffer* anwBuffer, int fence);
+
+    void returnPrefetchedBuffersLocked();
+
     static const int32_t kDequeueLatencyBinSize = 5; // in ms
     CameraLatencyHistogram mDequeueBufferLatency;
 
+    int mImageDumpMask = 0;
+
 }; // class Camera3OutputStream
 
 } // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index 7f5c87a..49f9f62 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -95,6 +95,19 @@
      * Query the physical camera id for the output stream.
      */
     virtual const String8& getPhysicalCameraId() const = 0;
+
+    /**
+     * Set the batch size for buffer operations. The output stream will request
+     * buffers from buffer queue on a batch basis. Currently only video streams
+     * are allowed to set the batch size. Also if the stream is managed by
+     * buffer manager (Surface group in Java API) then batching is also not
+     * supported. Changing batch size on the fly while there is already batched
+     * buffers in the stream is also not supported.
+     * If the batch size is larger than the max dequeue count set
+     * by the camera HAL, the batch size will be set to the max dequeue count
+     * instead.
+     */
+    virtual status_t setBatchSize(size_t batchSize = 1) = 0;
 };
 
 // Helper class to organize a synchronized mapping of stream IDs to stream instances
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index eea5ef1..9f225d0 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -182,7 +182,33 @@
         return;
     }
 
-    insertResultLocked(states, &captureResult, frameNumber);
+    // Update partial result by removing keys remapped by DistortionCorrection, ZoomRatio,
+    // and RotationAndCrop mappers.
+    std::set<uint32_t> keysToRemove;
+
+    auto iter = states.distortionMappers.find(states.cameraId.c_str());
+    if (iter != states.distortionMappers.end()) {
+        const auto& remappedKeys = iter->second.getRemappedKeys();
+        keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
+    }
+
+    const auto& remappedKeys = states.zoomRatioMappers[states.cameraId.c_str()].getRemappedKeys();
+    keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
+
+    auto mapper = states.rotateAndCropMappers.find(states.cameraId.c_str());
+    if (mapper != states.rotateAndCropMappers.end()) {
+        const auto& remappedKeys = iter->second.getRemappedKeys();
+        keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
+    }
+
+    for (uint32_t key : keysToRemove) {
+        captureResult.mMetadata.erase(key);
+    }
+
+    // Send partial result
+    if (captureResult.mMetadata.entryCount() > 0) {
+        insertResultLocked(states, &captureResult, frameNumber);
+    }
 }
 
 void sendCaptureResult(
@@ -397,6 +423,7 @@
     InFlightRequestMap& inflightMap = states.inflightMap;
     const InFlightRequest &request = inflightMap.valueAt(idx);
     const uint32_t frameNumber = inflightMap.keyAt(idx);
+    SessionStatsBuilder& sessionStatsBuilder = states.sessionStatsBuilder;
 
     nsecs_t sensorTimestamp = request.sensorTimestamp;
     nsecs_t shutterTimestamp = request.shutterTimestamp;
@@ -416,7 +443,7 @@
 
         ATRACE_ASYNC_END("frame capture", frameNumber);
 
-        // Sanity check - if sensor timestamp matches shutter timestamp in the
+        // Validation check - if sensor timestamp matches shutter timestamp in the
         // case of request having callback.
         if (request.hasCallback && request.requestStatus == OK &&
                 sensorTimestamp != shutterTimestamp) {
@@ -433,7 +460,9 @@
         returnOutputBuffers(
             states.useHalBufManager, states.listener,
             request.pendingOutputBuffers.array(),
-            request.pendingOutputBuffers.size(), 0, /*timestampIncreasing*/true,
+            request.pendingOutputBuffers.size(), 0,
+            /*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
+            /*timestampIncreasing*/true,
             request.outputSurfaces, request.resultExtras,
             request.errorBufStrategy);
 
@@ -446,6 +475,8 @@
             states.lastCompletedRegularFrameNumber = frameNumber;
         }
 
+        sessionStatsBuilder.incResultCounter(request.skipResultMetadata);
+
         removeInFlightMapEntryLocked(states, idx);
         ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
     }
@@ -453,7 +484,21 @@
     states.inflightIntf.checkInflightMapLengthLocked();
 }
 
-void processCaptureResult(CaptureOutputStates& states, const camera3_capture_result *result) {
+// Erase the subset of physicalCameraIds that contains id
+bool erasePhysicalCameraIdSet(
+        std::set<std::set<String8>>& physicalCameraIds, const String8& id) {
+    bool found = false;
+    for (auto iter = physicalCameraIds.begin(); iter != physicalCameraIds.end(); iter++) {
+        if (iter->count(id) == 1) {
+            physicalCameraIds.erase(iter);
+            found = true;
+            break;
+        }
+    }
+    return found;
+}
+
+void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result) {
     ATRACE_CALL();
 
     status_t res;
@@ -552,12 +597,10 @@
             }
             for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
                 String8 physicalId(result->physcam_ids[i]);
-                std::set<String8>::iterator cameraIdIter =
-                        request.physicalCameraIds.find(physicalId);
-                if (cameraIdIter != request.physicalCameraIds.end()) {
-                    request.physicalCameraIds.erase(cameraIdIter);
-                } else {
-                    SET_ERR("Total result for frame %d has already returned for camera %s",
+                bool validPhysicalCameraMetadata =
+                        erasePhysicalCameraIdSet(request.physicalCameraIds, physicalId);
+                if (!validPhysicalCameraMetadata) {
+                    SET_ERR("Unexpected total result for frame %d camera %s",
                             frameNumber, physicalId.c_str());
                     return;
                 }
@@ -602,7 +645,7 @@
         if (shutterTimestamp != 0) {
             returnAndRemovePendingOutputBuffers(
                 states.useHalBufManager, states.listener,
-                request);
+                request, states.sessionStatsBuilder);
         }
 
         if (result->result != NULL && !isPartialResult) {
@@ -657,7 +700,7 @@
     using hardware::camera::device::V3_2::BufferStatus;
     std::unique_ptr<ResultMetadataQueue>& fmq = states.fmq;
     BufferRecordsInterface& bufferRecords = states.bufferRecordsIntf;
-    camera3_capture_result r;
+    camera_capture_result r;
     status_t res;
     r.frame_number = result.frameNumber;
 
@@ -696,7 +739,7 @@
     r.physcam_ids = physCamIds.data();
     r.physcam_metadata = phyCamMetadatas.data();
 
-    std::vector<camera3_stream_buffer_t> outputBuffers(result.outputBuffers.size());
+    std::vector<camera_stream_buffer_t> outputBuffers(result.outputBuffers.size());
     std::vector<buffer_handle_t> outputBufferHandles(result.outputBuffers.size());
     for (size_t i = 0; i < result.outputBuffers.size(); i++) {
         auto& bDst = outputBuffers[i];
@@ -757,7 +800,7 @@
     r.num_output_buffers = outputBuffers.size();
     r.output_buffers = outputBuffers.data();
 
-    camera3_stream_buffer_t inputBuffer;
+    camera_stream_buffer_t inputBuffer;
     if (result.inputBuffer.streamId == -1) {
         r.input_buffer = nullptr;
     } else {
@@ -798,8 +841,9 @@
 void returnOutputBuffers(
         bool useHalBufManager,
         sp<NotificationListener> listener,
-        const camera3_stream_buffer_t *outputBuffers, size_t numBuffers,
-        nsecs_t timestamp, bool timestampIncreasing,
+        const camera_stream_buffer_t *outputBuffers, size_t numBuffers,
+        nsecs_t timestamp, bool requested, nsecs_t requestTimeNs,
+        SessionStatsBuilder& sessionStatsBuilder, bool timestampIncreasing,
         const SurfaceMap& outputSurfaces,
         const CaptureResultExtras &inResultExtras,
         ERROR_BUF_STRATEGY errorBufStrategy) {
@@ -810,7 +854,7 @@
         int streamId = stream->getId();
 
         // Call notify(ERROR_BUFFER) if necessary.
-        if (outputBuffers[i].status == CAMERA3_BUFFER_STATUS_ERROR &&
+        if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR &&
                 errorBufStrategy == ERROR_BUF_RETURN_NOTIFY) {
             if (listener != nullptr) {
                 CaptureResultExtras extras = inResultExtras;
@@ -827,6 +871,10 @@
                 // has not got a output buffer handle filled yet. This is though illegal if HAL
                 // buffer management API is not being used.
                 ALOGE("%s: cannot return a null buffer!", __FUNCTION__);
+            } else {
+                if (requested) {
+                    sessionStatsBuilder.incCounter(streamId, /*dropped*/true, 0);
+                }
             }
             continue;
         }
@@ -836,7 +884,7 @@
 
         // Do not return the buffer if the buffer status is error, and the error
         // buffer strategy is CACHE.
-        if (outputBuffers[i].status != CAMERA3_BUFFER_STATUS_ERROR ||
+        if (outputBuffers[i].status != CAMERA_BUFFER_STATUS_ERROR ||
                 errorBufStrategy != ERROR_BUF_CACHE) {
             if (it != outputSurfaces.end()) {
                 res = stream->returnBuffer(
@@ -850,20 +898,33 @@
         }
         // Note: stream may be deallocated at this point, if this buffer was
         // the last reference to it.
+        bool dropped = false;
         if (res == NO_INIT || res == DEAD_OBJECT) {
             ALOGV("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
+            sessionStatsBuilder.stopCounter(streamId);
         } else if (res != OK) {
             ALOGE("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
+            dropped = true;
+        } else {
+            if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR || timestamp == 0) {
+                dropped = true;
+            }
+        }
+        if (requested) {
+            nsecs_t bufferTimeNs = systemTime();
+            int32_t captureLatencyMs = ns2ms(bufferTimeNs - requestTimeNs);
+            sessionStatsBuilder.incCounter(streamId, dropped, captureLatencyMs);
         }
 
         // Long processing consumers can cause returnBuffer timeout for shared stream
         // If that happens, cancel the buffer and send a buffer error to client
         if (it != outputSurfaces.end() && res == TIMED_OUT &&
-                outputBuffers[i].status == CAMERA3_BUFFER_STATUS_OK) {
+                outputBuffers[i].status == CAMERA_BUFFER_STATUS_OK) {
             // cancel the buffer
-            camera3_stream_buffer_t sb = outputBuffers[i];
-            sb.status = CAMERA3_BUFFER_STATUS_ERROR;
-            stream->returnBuffer(sb, /*timestamp*/0, timestampIncreasing, std::vector<size_t> (),
+            camera_stream_buffer_t sb = outputBuffers[i];
+            sb.status = CAMERA_BUFFER_STATUS_ERROR;
+            stream->returnBuffer(sb, /*timestamp*/0,
+                    timestampIncreasing, std::vector<size_t> (),
                     inResultExtras.frameNumber);
 
             if (listener != nullptr) {
@@ -878,12 +939,14 @@
 }
 
 void returnAndRemovePendingOutputBuffers(bool useHalBufManager,
-        sp<NotificationListener> listener, InFlightRequest& request) {
+        sp<NotificationListener> listener, InFlightRequest& request,
+        SessionStatsBuilder& sessionStatsBuilder) {
     bool timestampIncreasing = !(request.zslCapture || request.hasInputBuffer);
     returnOutputBuffers(useHalBufManager, listener,
             request.pendingOutputBuffers.array(),
             request.pendingOutputBuffers.size(),
-            request.shutterTimestamp, timestampIncreasing,
+            request.shutterTimestamp, /*requested*/true,
+            request.requestTimeNs, sessionStatsBuilder, timestampIncreasing,
             request.outputSurfaces, request.resultExtras,
             request.errorBufStrategy);
 
@@ -891,7 +954,7 @@
     for (auto iter = request.pendingOutputBuffers.begin();
             iter != request.pendingOutputBuffers.end(); ) {
         if (request.errorBufStrategy != ERROR_BUF_CACHE ||
-                iter->status != CAMERA3_BUFFER_STATUS_ERROR) {
+                iter->status != CAMERA_BUFFER_STATUS_ERROR) {
             iter = request.pendingOutputBuffers.erase(iter);
         } else {
             iter++;
@@ -899,7 +962,7 @@
     }
 }
 
-void notifyShutter(CaptureOutputStates& states, const camera3_shutter_msg_t &msg) {
+void notifyShutter(CaptureOutputStates& states, const camera_shutter_msg_t &msg) {
     ATRACE_CALL();
     ssize_t idx;
 
@@ -966,7 +1029,7 @@
                     r.rotateAndCropAuto, r.cameraIdsWithZoom, r.physicalMetadatas);
             }
             returnAndRemovePendingOutputBuffers(
-                    states.useHalBufManager, states.listener, r);
+                    states.useHalBufManager, states.listener, r, states.sessionStatsBuilder);
 
             removeInFlightRequestIfReadyLocked(states, idx);
         }
@@ -977,26 +1040,26 @@
     }
 }
 
-void notifyError(CaptureOutputStates& states, const camera3_error_msg_t &msg) {
+void notifyError(CaptureOutputStates& states, const camera_error_msg_t &msg) {
     ATRACE_CALL();
     // Map camera HAL error codes to ICameraDeviceCallback error codes
     // Index into this with the HAL error code
-    static const int32_t halErrorMap[CAMERA3_MSG_NUM_ERRORS] = {
+    static const int32_t halErrorMap[CAMERA_MSG_NUM_ERRORS] = {
         // 0 = Unused error code
         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR,
-        // 1 = CAMERA3_MSG_ERROR_DEVICE
+        // 1 = CAMERA_MSG_ERROR_DEVICE
         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
-        // 2 = CAMERA3_MSG_ERROR_REQUEST
+        // 2 = CAMERA_MSG_ERROR_REQUEST
         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
-        // 3 = CAMERA3_MSG_ERROR_RESULT
+        // 3 = CAMERA_MSG_ERROR_RESULT
         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT,
-        // 4 = CAMERA3_MSG_ERROR_BUFFER
+        // 4 = CAMERA_MSG_ERROR_BUFFER
         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER
     };
 
     int32_t errorCode =
             ((msg.error_code >= 0) &&
-                    (msg.error_code < CAMERA3_MSG_NUM_ERRORS)) ?
+                    (msg.error_code < CAMERA_MSG_NUM_ERRORS)) ?
             halErrorMap[msg.error_code] :
             hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR;
 
@@ -1032,14 +1095,14 @@
                             errorCode) {
                         if (physicalCameraId.size() > 0) {
                             String8 cameraId(physicalCameraId);
-                            auto iter = r.physicalCameraIds.find(cameraId);
-                            if (iter == r.physicalCameraIds.end()) {
+                            bool validPhysicalCameraId =
+                                    erasePhysicalCameraIdSet(r.physicalCameraIds, cameraId);
+                            if (!validPhysicalCameraId) {
                                 ALOGE("%s: Reported result failure for physical camera device: %s "
                                         " which is not part of the respective request!",
                                         __FUNCTION__, cameraId.string());
                                 break;
                             }
-                            r.physicalCameraIds.erase(iter);
                             resultExtras.errorPhysicalCameraId = physicalCameraId;
                             physicalDeviceResultError = true;
                         }
@@ -1085,13 +1148,13 @@
     }
 }
 
-void notify(CaptureOutputStates& states, const camera3_notify_msg *msg) {
+void notify(CaptureOutputStates& states, const camera_notify_msg *msg) {
     switch (msg->type) {
-        case CAMERA3_MSG_ERROR: {
+        case CAMERA_MSG_ERROR: {
             notifyError(states, msg->message.error);
             break;
         }
-        case CAMERA3_MSG_SHUTTER: {
+        case CAMERA_MSG_SHUTTER: {
             notifyShutter(states, msg->message.shutter);
             break;
         }
@@ -1107,10 +1170,10 @@
     using android::hardware::camera::device::V3_2::ErrorCode;
 
     ATRACE_CALL();
-    camera3_notify_msg m;
+    camera_notify_msg m;
     switch (msg.type) {
         case MsgType::ERROR:
-            m.type = CAMERA3_MSG_ERROR;
+            m.type = CAMERA_MSG_ERROR;
             m.message.error.frame_number = msg.msg.error.frameNumber;
             if (msg.msg.error.errorStreamId >= 0) {
                 sp<Camera3StreamInterface> stream =
@@ -1126,21 +1189,21 @@
             }
             switch (msg.msg.error.errorCode) {
                 case ErrorCode::ERROR_DEVICE:
-                    m.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
+                    m.message.error.error_code = CAMERA_MSG_ERROR_DEVICE;
                     break;
                 case ErrorCode::ERROR_REQUEST:
-                    m.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
+                    m.message.error.error_code = CAMERA_MSG_ERROR_REQUEST;
                     break;
                 case ErrorCode::ERROR_RESULT:
-                    m.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
+                    m.message.error.error_code = CAMERA_MSG_ERROR_RESULT;
                     break;
                 case ErrorCode::ERROR_BUFFER:
-                    m.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
+                    m.message.error.error_code = CAMERA_MSG_ERROR_BUFFER;
                     break;
             }
             break;
         case MsgType::SHUTTER:
-            m.type = CAMERA3_MSG_SHUTTER;
+            m.type = CAMERA_MSG_SHUTTER;
             m.message.shutter.frame_number = msg.msg.shutter.frameNumber;
             m.message.shutter.timestamp = msg.msg.shutter.timestamp;
             break;
@@ -1218,13 +1281,13 @@
             return;
         }
 
+        bufRet.streamId = streamId;
         if (outputStream->isAbandoned()) {
             bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
             allReqsSucceeds = false;
             continue;
         }
 
-        bufRet.streamId = streamId;
         size_t handOutBufferCount = outputStream->getOutstandingBuffersCount();
         uint32_t numBuffersRequested = bufReq.numBuffersRequested;
         size_t totalHandout = handOutBufferCount + numBuffersRequested;
@@ -1241,11 +1304,11 @@
 
         hardware::hidl_vec<StreamBuffer> tmpRetBuffers(numBuffersRequested);
         bool currentReqSucceeds = true;
-        std::vector<camera3_stream_buffer_t> streamBuffers(numBuffersRequested);
+        std::vector<camera_stream_buffer_t> streamBuffers(numBuffersRequested);
         size_t numAllocatedBuffers = 0;
         size_t numPushedInflightBuffers = 0;
         for (size_t b = 0; b < numBuffersRequested; b++) {
-            camera3_stream_buffer_t& sb = streamBuffers[b];
+            camera_stream_buffer_t& sb = streamBuffers[b];
             // Since this method can run concurrently with request thread
             // We need to update the wait duration everytime we call getbuffer
             nsecs_t waitDuration =  states.reqBufferIntf.getWaitDuration();
@@ -1255,6 +1318,7 @@
                     ALOGV("%s: Can't get output buffer for stream %d: %s (%d)",
                             __FUNCTION__, streamId, strerror(-res), res);
                     bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
+                    states.sessionStatsBuilder.stopCounter(streamId);
                 } else {
                     ALOGE("%s: Can't get output buffer for stream %d: %s (%d)",
                             __FUNCTION__, streamId, strerror(-res), res);
@@ -1315,12 +1379,13 @@
                 }
             }
             for (size_t b = 0; b < numAllocatedBuffers; b++) {
-                camera3_stream_buffer_t& sb = streamBuffers[b];
+                camera_stream_buffer_t& sb = streamBuffers[b];
                 sb.acquire_fence = -1;
-                sb.status = CAMERA3_BUFFER_STATUS_ERROR;
+                sb.status = CAMERA_BUFFER_STATUS_ERROR;
             }
             returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
-                    streamBuffers.data(), numAllocatedBuffers, 0);
+                    streamBuffers.data(), numAllocatedBuffers, 0, /*requested*/false,
+                    /*requestTimeNs*/0, states.sessionStatsBuilder);
         }
     }
 
@@ -1354,9 +1419,9 @@
             continue;
         }
 
-        camera3_stream_buffer_t streamBuffer;
+        camera_stream_buffer_t streamBuffer;
         streamBuffer.buffer = buffer;
-        streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+        streamBuffer.status = CAMERA_BUFFER_STATUS_ERROR;
         streamBuffer.acquire_fence = -1;
         streamBuffer.release_fence = -1;
 
@@ -1377,22 +1442,23 @@
         }
         streamBuffer.stream = stream->asHalStream();
         returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
-                &streamBuffer, /*size*/1, /*timestamp*/ 0);
+                &streamBuffer, /*size*/1, /*timestamp*/ 0, /*requested*/false,
+                /*requestTimeNs*/0, states.sessionStatsBuilder);
     }
 }
 
 void flushInflightRequests(FlushInflightReqStates& states) {
     ATRACE_CALL();
-    { // First return buffers cached in mInFlightMap
+    { // First return buffers cached in inFlightMap
         std::lock_guard<std::mutex> l(states.inflightLock);
         for (size_t idx = 0; idx < states.inflightMap.size(); idx++) {
             const InFlightRequest &request = states.inflightMap.valueAt(idx);
             returnOutputBuffers(
                 states.useHalBufManager, states.listener,
                 request.pendingOutputBuffers.array(),
-                request.pendingOutputBuffers.size(), 0,
-                /*timestampIncreasing*/true, request.outputSurfaces,
-                request.resultExtras, request.errorBufStrategy);
+                request.pendingOutputBuffers.size(), 0, /*requested*/true,
+                request.requestTimeNs, states.sessionStatsBuilder, /*timestampIncreasing*/true,
+                request.outputSurfaces, request.resultExtras, request.errorBufStrategy);
             ALOGW("%s: Frame %d |  Timestamp: %" PRId64 ", metadata"
                     " arrived: %s, buffers left: %d.\n", __FUNCTION__,
                     states.inflightMap.keyAt(idx), request.shutterTimestamp,
@@ -1450,28 +1516,29 @@
         int32_t frameNumber = std::get<1>(tuple);
         buffer_handle_t* buffer = std::get<2>(tuple);
 
-        camera3_stream_buffer_t streamBuffer;
+        camera_stream_buffer_t streamBuffer;
         streamBuffer.buffer = buffer;
-        streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+        streamBuffer.status = CAMERA_BUFFER_STATUS_ERROR;
         streamBuffer.acquire_fence = -1;
         streamBuffer.release_fence = -1;
 
         for (auto& stream : streams) {
             if (streamId == stream->getId()) {
                 // Return buffer to deleted stream
-                camera3_stream* halStream = stream->asHalStream();
+                camera_stream* halStream = stream->asHalStream();
                 streamBuffer.stream = halStream;
                 switch (halStream->stream_type) {
-                    case CAMERA3_STREAM_OUTPUT:
+                    case CAMERA_STREAM_OUTPUT:
                         res = stream->returnBuffer(streamBuffer, /*timestamp*/ 0,
-                                /*timestampIncreasing*/true, std::vector<size_t> (), frameNumber);
+                                /*timestampIncreasing*/true,
+                                std::vector<size_t> (), frameNumber);
                         if (res != OK) {
                             ALOGE("%s: Can't return output buffer for frame %d to"
                                   " stream %d: %s (%d)",  __FUNCTION__,
                                   frameNumber, streamId, strerror(-res), res);
                         }
                         break;
-                    case CAMERA3_STREAM_INPUT:
+                    case CAMERA_STREAM_INPUT:
                         res = stream->returnInputBuffer(streamBuffer);
                         if (res != OK) {
                             ALOGE("%s: Can't return input buffer for frame %d to"
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index 9946312..142889a 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -33,6 +33,7 @@
 #include "device3/InFlightRequest.h"
 #include "device3/Camera3Stream.h"
 #include "device3/Camera3OutputStreamInterface.h"
+#include "utils/SessionStatsBuilder.h"
 #include "utils/TagMonitor.h"
 
 namespace android {
@@ -41,6 +42,66 @@
 
 namespace camera3 {
 
+    typedef struct camera_stream_configuration {
+        uint32_t num_streams;
+        camera_stream_t **streams;
+        uint32_t operation_mode;
+        bool input_is_multi_resolution;
+    } camera_stream_configuration_t;
+
+    typedef struct camera_capture_request {
+        uint32_t frame_number;
+        const camera_metadata_t *settings;
+        camera_stream_buffer_t *input_buffer;
+        uint32_t num_output_buffers;
+        const camera_stream_buffer_t *output_buffers;
+        uint32_t num_physcam_settings;
+        const char **physcam_id;
+        const camera_metadata_t **physcam_settings;
+        int32_t input_width;
+        int32_t input_height;
+    } camera_capture_request_t;
+
+    typedef struct camera_capture_result {
+        uint32_t frame_number;
+        const camera_metadata_t *result;
+        uint32_t num_output_buffers;
+        const camera_stream_buffer_t *output_buffers;
+        const camera_stream_buffer_t *input_buffer;
+        uint32_t partial_result;
+        uint32_t num_physcam_metadata;
+        const char **physcam_ids;
+        const camera_metadata_t **physcam_metadata;
+    } camera_capture_result_t;
+
+    typedef struct camera_shutter_msg {
+        uint32_t frame_number;
+        uint64_t timestamp;
+    } camera_shutter_msg_t;
+
+    typedef struct camera_error_msg {
+        uint32_t frame_number;
+        camera_stream_t *error_stream;
+        int error_code;
+    } camera_error_msg_t;
+
+    typedef enum camera_error_msg_code {
+        CAMERA_MSG_ERROR_DEVICE = 1,
+        CAMERA_MSG_ERROR_REQUEST = 2,
+        CAMERA_MSG_ERROR_RESULT = 3,
+        CAMERA_MSG_ERROR_BUFFER = 4,
+        CAMERA_MSG_NUM_ERRORS
+    } camera_error_msg_code_t;
+
+    typedef struct camera_notify_msg {
+        int type;
+
+        union {
+            camera_error_msg_t error;
+            camera_shutter_msg_t shutter;
+        } message;
+    } camera_notify_msg_t;
+
     /**
      * Helper methods shared between Camera3Device/Camera3OfflineSession for HAL callbacks
      */
@@ -50,8 +111,9 @@
     void returnOutputBuffers(
             bool useHalBufManager,
             sp<NotificationListener> listener, // Only needed when outputSurfaces is not empty
-            const camera3_stream_buffer_t *outputBuffers,
-            size_t numBuffers, nsecs_t timestamp, bool timestampIncreasing = true,
+            const camera_stream_buffer_t *outputBuffers,
+            size_t numBuffers, nsecs_t timestamp, bool requested, nsecs_t requestTimeNs,
+            SessionStatsBuilder& sessionStatsBuilder, bool timestampIncreasing = true,
             // The following arguments are only meant for surface sharing use case
             const SurfaceMap& outputSurfaces = SurfaceMap{},
             // Used to send buffer error callback when failing to return buffer
@@ -64,7 +126,7 @@
     void returnAndRemovePendingOutputBuffers(
             bool useHalBufManager,
             sp<NotificationListener> listener, // Only needed when outputSurfaces is not empty
-            InFlightRequest& request);
+            InFlightRequest& request, SessionStatsBuilder& sessionStatsBuilder);
 
     // Camera3Device/Camera3OfflineSession internal states used in notify/processCaptureResult
     // callbacks
@@ -72,8 +134,8 @@
         const String8& cameraId;
         std::mutex& inflightLock;
         int64_t& lastCompletedRegularFrameNumber;
-        int64_t& lastCompletedZslFrameNumber;
         int64_t& lastCompletedReprocessFrameNumber;
+        int64_t& lastCompletedZslFrameNumber;
         InFlightRequestMap& inflightMap; // end of inflightLock scope
         std::mutex& outputLock;
         std::list<CaptureResult>& resultQueue;
@@ -98,6 +160,7 @@
         TagMonitor& tagMonitor;
         sp<Camera3Stream> inputStream;
         StreamSet& outputStreams;
+        SessionStatsBuilder& sessionStatsBuilder;
         sp<NotificationListener> listener;
         SetErrorInterface& setErrIntf;
         InflightRequestUpdateInterface& inflightIntf;
@@ -121,6 +184,7 @@
         std::mutex& reqBufferLock; // lock to serialize request buffer calls
         const bool useHalBufManager;
         StreamSet& outputStreams;
+        SessionStatsBuilder& sessionStatsBuilder;
         SetErrorInterface& setErrIntf;
         BufferRecordsInterface& bufferRecordsIntf;
         RequestBufferInterface& reqBufferIntf;
@@ -134,6 +198,7 @@
         const String8& cameraId;
         const bool useHalBufManager;
         StreamSet& outputStreams;
+        SessionStatsBuilder& sessionStatsBuilder;
         BufferRecordsInterface& bufferRecordsIntf;
     };
 
@@ -149,6 +214,7 @@
         InflightRequestUpdateInterface& inflightIntf;
         BufferRecordsInterface& bufferRecordsIntf;
         FlushBufferInterface& flushBufferIntf;
+        SessionStatsBuilder& sessionStatsBuilder;
     };
 
     void flushInflightRequests(FlushInflightReqStates& states);
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 86b45cb..15cf7f4 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -30,11 +30,12 @@
         const std::vector<sp<Surface>>& surfaces,
         uint32_t width, uint32_t height, int format,
         uint64_t consumerUsage, android_dataspace dataSpace,
-        camera3_stream_rotation_t rotation,
+        camera_stream_rotation_t rotation,
         nsecs_t timestampOffset, const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
         int setId, bool useHalBufManager) :
-        Camera3OutputStream(id, CAMERA3_STREAM_OUTPUT, width, height,
-                            format, dataSpace, rotation, physicalCameraId,
+        Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
+                            format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
                             consumerUsage, timestampOffset, setId),
         mUseHalBufManager(useHalBufManager) {
     size_t consumerCount = std::min(surfaces.size(), kMaxOutputs);
@@ -65,7 +66,7 @@
         }
     }
 
-    res = mStreamSplitter->connect(initialSurfaces, usage, mUsage, camera3_stream::max_buffers,
+    res = mStreamSplitter->connect(initialSurfaces, usage, mUsage, camera_stream::max_buffers,
             getWidth(), getHeight(), getFormat(), &mConsumer);
     if (res != OK) {
         ALOGE("%s: Failed to connect to stream splitter: %s(%d)",
@@ -157,7 +158,7 @@
     return ret;
 }
 
-status_t Camera3SharedOutputStream::getBufferLocked(camera3_stream_buffer *buffer,
+status_t Camera3SharedOutputStream::getBufferLocked(camera_stream_buffer *buffer,
         const std::vector<size_t>& surfaceIds) {
     ANativeWindowBuffer* anb;
     int fenceFd = -1;
@@ -180,7 +181,7 @@
      * in which case we reassign it to acquire_fence
      */
     handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd,
-                        /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK, /*output*/true);
+                        /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/true);
 
     return OK;
 }
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index e645e05..4b6341b 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -36,8 +36,9 @@
     Camera3SharedOutputStream(int id, const std::vector<sp<Surface>>& surfaces,
             uint32_t width, uint32_t height, int format,
             uint64_t consumerUsage, android_dataspace dataSpace,
-            camera3_stream_rotation_t rotation, nsecs_t timestampOffset,
+            camera_stream_rotation_t rotation, nsecs_t timestampOffset,
             const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId = CAMERA3_STREAM_SET_ID_INVALID,
             bool useHalBufManager = false);
 
@@ -116,7 +117,7 @@
     /**
      * Internal Camera3Stream interface
      */
-    virtual status_t getBufferLocked(camera3_stream_buffer *buffer,
+    virtual status_t getBufferLocked(camera_stream_buffer *buffer,
             const std::vector<size_t>& surface_ids);
 
     virtual status_t queueBufferToConsumer(sp<ANativeWindow>& consumer,
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 20f6168..02b6585 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -37,20 +37,22 @@
     }
 }
 
-Camera3Stream* Camera3Stream::cast(camera3_stream *stream) {
+Camera3Stream* Camera3Stream::cast(camera_stream *stream) {
     return static_cast<Camera3Stream*>(stream);
 }
 
-const Camera3Stream* Camera3Stream::cast(const camera3_stream *stream) {
+const Camera3Stream* Camera3Stream::cast(const camera_stream *stream) {
     return static_cast<const Camera3Stream*>(stream);
 }
 
 Camera3Stream::Camera3Stream(int id,
-        camera3_stream_type type,
+        camera_stream_type type,
         uint32_t width, uint32_t height, size_t maxSize, int format,
-        android_dataspace dataSpace, camera3_stream_rotation_t rotation,
-        const String8& physicalCameraId, int setId) :
-    camera3_stream(),
+        android_dataspace dataSpace, camera_stream_rotation_t rotation,
+        const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        int setId, bool isMultiResolution) :
+    camera_stream(),
     mId(id),
     mSetId(setId),
     mName(String8::format("Camera3Stream[%d]", id)),
@@ -73,17 +75,18 @@
     mDataSpaceOverridden(false),
     mOriginalDataSpace(dataSpace),
     mPhysicalCameraId(physicalCameraId),
-    mLastTimestamp(0) {
+    mLastTimestamp(0),
+    mIsMultiResolution(isMultiResolution) {
 
-    camera3_stream::stream_type = type;
-    camera3_stream::width = width;
-    camera3_stream::height = height;
-    camera3_stream::format = format;
-    camera3_stream::data_space = dataSpace;
-    camera3_stream::rotation = rotation;
-    camera3_stream::max_buffers = 0;
-    camera3_stream::priv = NULL;
-    camera3_stream::physical_camera_id = mPhysicalCameraId.string();
+    camera_stream::stream_type = type;
+    camera_stream::width = width;
+    camera_stream::height = height;
+    camera_stream::format = format;
+    camera_stream::data_space = dataSpace;
+    camera_stream::rotation = rotation;
+    camera_stream::max_buffers = 0;
+    camera_stream::physical_camera_id = mPhysicalCameraId.string();
+    camera_stream::sensor_pixel_modes_used = sensorPixelModesUsed;
 
     if ((format == HAL_PIXEL_FORMAT_BLOB || format == HAL_PIXEL_FORMAT_RAW_OPAQUE) &&
             maxSize == 0) {
@@ -100,20 +103,28 @@
     return mSetId;
 }
 
+int Camera3Stream::getHalStreamGroupId() const {
+    return mIsMultiResolution ? mSetId : -1;
+}
+
+bool Camera3Stream::isMultiResolution() const {
+    return mIsMultiResolution;
+}
+
 uint32_t Camera3Stream::getWidth() const {
-    return camera3_stream::width;
+    return camera_stream::width;
 }
 
 uint32_t Camera3Stream::getHeight() const {
-    return camera3_stream::height;
+    return camera_stream::height;
 }
 
 int Camera3Stream::getFormat() const {
-    return camera3_stream::format;
+    return camera_stream::format;
 }
 
 android_dataspace Camera3Stream::getDataSpace() const {
-    return camera3_stream::data_space;
+    return camera_stream::data_space;
 }
 
 uint64_t Camera3Stream::getUsage() const {
@@ -152,6 +163,10 @@
     return mPhysicalCameraId;
 }
 
+int Camera3Stream::getMaxHalBuffers() const {
+    return camera_stream::max_buffers;
+}
+
 void Camera3Stream::setOfflineProcessingSupport(bool support) {
     mSupportOfflineProcessing = support;
 }
@@ -229,7 +244,7 @@
     return res;
 }
 
-camera3_stream* Camera3Stream::startConfiguration() {
+camera_stream* Camera3Stream::startConfiguration() {
     ATRACE_CALL();
     Mutex::Autolock l(mLock);
     status_t res;
@@ -260,9 +275,9 @@
     }
 
     mOldUsage = mUsage;
-    mOldMaxBuffers = camera3_stream::max_buffers;
-    mOldFormat = camera3_stream::format;
-    mOldDataSpace = camera3_stream::data_space;
+    mOldMaxBuffers = camera_stream::max_buffers;
+    mOldFormat = camera_stream::format;
+    mOldDataSpace = camera_stream::data_space;
 
     res = getEndpointUsage(&mUsage);
     if (res != OK) {
@@ -330,16 +345,17 @@
     // Register for idle tracking
     sp<StatusTracker> statusTracker = mStatusTracker.promote();
     if (statusTracker != 0 && mStatusId == StatusTracker::NO_STATUS_ID) {
-        mStatusId = statusTracker->addComponent();
+        std::string name = std::string("Stream ") + std::to_string(mId);
+        mStatusId = statusTracker->addComponent(name.c_str());
     }
 
     // Check if the stream configuration is unchanged, and skip reallocation if
-    // so. As documented in hardware/camera3.h:configure_streams().
+    // so.
     if (mState == STATE_IN_RECONFIG &&
             mOldUsage == mUsage &&
-            mOldMaxBuffers == camera3_stream::max_buffers &&
-            mOldDataSpace == camera3_stream::data_space &&
-            mOldFormat == camera3_stream::format) {
+            mOldMaxBuffers == camera_stream::max_buffers &&
+            mOldDataSpace == camera_stream::data_space &&
+            mOldFormat == camera_stream::format) {
         mState = STATE_CONFIGURED;
         return OK;
     }
@@ -398,7 +414,7 @@
     }
 
     mUsage = mOldUsage;
-    camera3_stream::max_buffers = mOldMaxBuffers;
+    camera_stream::max_buffers = mOldMaxBuffers;
 
     mState = ((mState == STATE_IN_RECONFIG) || (mState == STATE_IN_IDLE)) ? STATE_CONFIGURED :
             STATE_CONSTRUCTED;
@@ -413,6 +429,13 @@
     return mStreamUnpreparable;
 }
 
+void Camera3Stream::markUnpreparable() {
+    ATRACE_CALL();
+
+    Mutex::Autolock l(mLock);
+    mStreamUnpreparable = true;
+}
+
 status_t Camera3Stream::startPrepare(int maxCount, bool blockRequest) {
     ATRACE_CALL();
 
@@ -458,7 +481,7 @@
 
     mLastMaxCount = bufferCount;
 
-    mPreparedBuffers.insertAt(camera3_stream_buffer_t(), /*index*/0, bufferCount);
+    mPreparedBuffers.insertAt(camera_stream_buffer_t(), /*index*/0, bufferCount);
     mPreparedBufferIdx = 0;
 
     mState = STATE_PREPARING;
@@ -533,7 +556,7 @@
     // they weren't filled.
     for (size_t i = 0; i < mPreparedBufferIdx; i++) {
         mPreparedBuffers.editItemAt(i).release_fence = -1;
-        mPreparedBuffers.editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR;
+        mPreparedBuffers.editItemAt(i).status = CAMERA_BUFFER_STATUS_ERROR;
         returnBufferLocked(mPreparedBuffers[i], 0);
     }
     mPreparedBuffers.clear();
@@ -599,7 +622,7 @@
     return OK;
 }
 
-status_t Camera3Stream::getBuffer(camera3_stream_buffer *buffer,
+status_t Camera3Stream::getBuffer(camera_stream_buffer *buffer,
         nsecs_t waitBufferTimeout,
         const std::vector<size_t>& surface_ids) {
     ATRACE_HFR_CALL();
@@ -618,9 +641,10 @@
     }
 
     // Wait for new buffer returned back if we are running into the limit.
-    if (getHandoutOutputBufferCountLocked() == camera3_stream::max_buffers) {
+    size_t numOutstandingBuffers = getHandoutOutputBufferCountLocked();
+    if (numOutstandingBuffers == camera_stream::max_buffers) {
         ALOGV("%s: Already dequeued max output buffers (%d), wait for next returned one.",
-                        __FUNCTION__, camera3_stream::max_buffers);
+                        __FUNCTION__, camera_stream::max_buffers);
         nsecs_t waitStart = systemTime(SYSTEM_TIME_MONOTONIC);
         if (waitBufferTimeout < kWaitForBufferDuration) {
             waitBufferTimeout = kWaitForBufferDuration;
@@ -632,10 +656,18 @@
             if (res == TIMED_OUT) {
                 ALOGE("%s: wait for output buffer return timed out after %lldms (max_buffers %d)",
                         __FUNCTION__, waitBufferTimeout / 1000000LL,
-                        camera3_stream::max_buffers);
+                        camera_stream::max_buffers);
             }
             return res;
         }
+
+        size_t updatedNumOutstandingBuffers = getHandoutOutputBufferCountLocked();
+        if (updatedNumOutstandingBuffers >= numOutstandingBuffers) {
+            ALOGE("%s: outsanding buffer count goes from %zu to %zu, "
+                    "getBuffer(s) call must not run in parallel!", __FUNCTION__,
+                    numOutstandingBuffers, updatedNumOutstandingBuffers);
+            return INVALID_OPERATION;
+        }
     }
 
     res = getBufferLocked(buffer, surface_ids);
@@ -650,7 +682,7 @@
     return res;
 }
 
-bool Camera3Stream::isOutstandingBuffer(const camera3_stream_buffer &buffer) const{
+bool Camera3Stream::isOutstandingBuffer(const camera_stream_buffer &buffer) const{
     if (buffer.buffer == nullptr) {
         return false;
     }
@@ -665,7 +697,7 @@
     return false;
 }
 
-void Camera3Stream::removeOutstandingBuffer(const camera3_stream_buffer &buffer) {
+void Camera3Stream::removeOutstandingBuffer(const camera_stream_buffer &buffer) {
     if (buffer.buffer == nullptr) {
         return;
     }
@@ -680,7 +712,7 @@
     }
 }
 
-status_t Camera3Stream::returnBuffer(const camera3_stream_buffer &buffer,
+status_t Camera3Stream::returnBuffer(const camera_stream_buffer &buffer,
         nsecs_t timestamp, bool timestampIncreasing,
          const std::vector<size_t>& surface_ids, uint64_t frameNumber) {
     ATRACE_HFR_CALL();
@@ -695,11 +727,11 @@
     removeOutstandingBuffer(buffer);
 
     // Buffer status may be changed, so make a copy of the stream_buffer struct.
-    camera3_stream_buffer b = buffer;
+    camera_stream_buffer b = buffer;
     if (timestampIncreasing && timestamp != 0 && timestamp <= mLastTimestamp) {
         ALOGE("%s: Stream %d: timestamp %" PRId64 " is not increasing. Prev timestamp %" PRId64,
                 __FUNCTION__, mId, timestamp, mLastTimestamp);
-        b.status = CAMERA3_BUFFER_STATUS_ERROR;
+        b.status = CAMERA_BUFFER_STATUS_ERROR;
     }
     mLastTimestamp = timestamp;
 
@@ -723,11 +755,16 @@
     return res;
 }
 
-status_t Camera3Stream::getInputBuffer(camera3_stream_buffer *buffer, bool respectHalLimit) {
+status_t Camera3Stream::getInputBuffer(camera_stream_buffer *buffer,
+        Size* size, bool respectHalLimit) {
     ATRACE_CALL();
     Mutex::Autolock l(mLock);
     status_t res = OK;
 
+    if (size == nullptr) {
+        ALOGE("%s: size must not be null", __FUNCTION__);
+        return BAD_VALUE;
+    }
     // This function should be only called when the stream is configured already.
     if (mState != STATE_CONFIGURED) {
         ALOGE("%s: Stream %d: Can't get input buffers if stream is not in CONFIGURED state %d",
@@ -736,9 +773,9 @@
     }
 
     // Wait for new buffer returned back if we are running into the limit.
-    if (getHandoutInputBufferCountLocked() == camera3_stream::max_buffers && respectHalLimit) {
+    if (getHandoutInputBufferCountLocked() == camera_stream::max_buffers && respectHalLimit) {
         ALOGV("%s: Already dequeued max input buffers (%d), wait for next returned one.",
-                __FUNCTION__, camera3_stream::max_buffers);
+                __FUNCTION__, camera_stream::max_buffers);
         res = mInputBufferReturnedSignal.waitRelative(mLock, kWaitForBufferDuration);
         if (res != OK) {
             if (res == TIMED_OUT) {
@@ -749,7 +786,7 @@
         }
     }
 
-    res = getInputBufferLocked(buffer);
+    res = getInputBufferLocked(buffer, size);
     if (res == OK) {
         fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/false);
         if (buffer->buffer) {
@@ -761,7 +798,7 @@
     return res;
 }
 
-status_t Camera3Stream::returnInputBuffer(const camera3_stream_buffer &buffer) {
+status_t Camera3Stream::returnInputBuffer(const camera_stream_buffer &buffer) {
     ATRACE_CALL();
     Mutex::Autolock l(mLock);
 
@@ -803,7 +840,7 @@
 }
 
 void Camera3Stream::fireBufferListenersLocked(
-        const camera3_stream_buffer& buffer, bool acquired, bool output, nsecs_t timestamp,
+        const camera_stream_buffer& buffer, bool acquired, bool output, nsecs_t timestamp,
         uint64_t frameNumber) {
     List<wp<Camera3StreamBufferListener> >::iterator it, end;
 
@@ -812,7 +849,7 @@
     Camera3StreamBufferListener::BufferInfo info =
         Camera3StreamBufferListener::BufferInfo();
     info.mOutput = output;
-    info.mError = (buffer.status == CAMERA3_BUFFER_STATUS_ERROR);
+    info.mError = (buffer.status == CAMERA_BUFFER_STATUS_ERROR);
     info.mFrameNumber = frameNumber;
     info.mTimestamp = timestamp;
     info.mStreamId = getId();
@@ -882,22 +919,28 @@
             "      Latency histogram for wait on max_buffers");
 }
 
-status_t Camera3Stream::getBufferLocked(camera3_stream_buffer *,
+status_t Camera3Stream::getBufferLocked(camera_stream_buffer *,
         const std::vector<size_t>&) {
     ALOGE("%s: This type of stream does not support output", __FUNCTION__);
     return INVALID_OPERATION;
 }
-status_t Camera3Stream::returnBufferLocked(const camera3_stream_buffer &,
+
+status_t Camera3Stream::getBuffersLocked(std::vector<OutstandingBuffer>*) {
+    ALOGE("%s: This type of stream does not support output", __FUNCTION__);
+    return INVALID_OPERATION;
+}
+
+status_t Camera3Stream::returnBufferLocked(const camera_stream_buffer &,
                                            nsecs_t, const std::vector<size_t>&) {
     ALOGE("%s: This type of stream does not support output", __FUNCTION__);
     return INVALID_OPERATION;
 }
-status_t Camera3Stream::getInputBufferLocked(camera3_stream_buffer *) {
+status_t Camera3Stream::getInputBufferLocked(camera_stream_buffer *, Size *) {
     ALOGE("%s: This type of stream does not support input", __FUNCTION__);
     return INVALID_OPERATION;
 }
 status_t Camera3Stream::returnInputBufferLocked(
-        const camera3_stream_buffer &) {
+        const camera_stream_buffer &) {
     ALOGE("%s: This type of stream does not support input", __FUNCTION__);
     return INVALID_OPERATION;
 }
@@ -960,6 +1003,80 @@
     mBufferFreedListener = listener;
 }
 
+status_t Camera3Stream::getBuffers(std::vector<OutstandingBuffer>* buffers,
+        nsecs_t waitBufferTimeout) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mLock);
+    status_t res = OK;
+
+    if (buffers == nullptr) {
+        ALOGI("%s: buffers must not be null!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    size_t numBuffersRequested = buffers->size();
+    if (numBuffersRequested == 0) {
+        ALOGE("%s: 0 buffers are requested!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    // This function should be only called when the stream is configured already.
+    if (mState != STATE_CONFIGURED) {
+        ALOGE("%s: Stream %d: Can't get buffers if stream is not in CONFIGURED state %d",
+                __FUNCTION__, mId, mState);
+        if (mState == STATE_ABANDONED) {
+            return DEAD_OBJECT;
+        } else {
+            return INVALID_OPERATION;
+        }
+    }
+
+    size_t numOutstandingBuffers = getHandoutOutputBufferCountLocked();
+    // Wait for new buffer returned back if we are running into the limit.
+    while (numOutstandingBuffers + numBuffersRequested > camera_stream::max_buffers) {
+        ALOGV("%s: Already dequeued %zu output buffers and requesting %zu (max is %d), waiting.",
+                __FUNCTION__, numOutstandingBuffers, numBuffersRequested,
+                camera_stream::max_buffers);
+        nsecs_t waitStart = systemTime(SYSTEM_TIME_MONOTONIC);
+        if (waitBufferTimeout < kWaitForBufferDuration) {
+            waitBufferTimeout = kWaitForBufferDuration;
+        }
+        res = mOutputBufferReturnedSignal.waitRelative(mLock, waitBufferTimeout);
+        nsecs_t waitEnd = systemTime(SYSTEM_TIME_MONOTONIC);
+        mBufferLimitLatency.add(waitStart, waitEnd);
+        if (res != OK) {
+            if (res == TIMED_OUT) {
+                ALOGE("%s: wait for output buffer return timed out after %lldms (max_buffers %d)",
+                        __FUNCTION__, waitBufferTimeout / 1000000LL,
+                        camera_stream::max_buffers);
+            }
+            return res;
+        }
+        size_t updatedNumOutstandingBuffers = getHandoutOutputBufferCountLocked();
+        if (updatedNumOutstandingBuffers >= numOutstandingBuffers) {
+            ALOGE("%s: outsanding buffer count goes from %zu to %zu, "
+                    "getBuffer(s) call must not run in parallel!", __FUNCTION__,
+                    numOutstandingBuffers, updatedNumOutstandingBuffers);
+            return INVALID_OPERATION;
+        }
+        numOutstandingBuffers = updatedNumOutstandingBuffers;
+    }
+
+    res = getBuffersLocked(buffers);
+    if (res == OK) {
+        for (auto& outstandingBuffer : *buffers) {
+            camera_stream_buffer* buffer = outstandingBuffer.outBuffer;
+            fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/true);
+            if (buffer->buffer) {
+                Mutex::Autolock l(mOutstandingBuffersLock);
+                mOutstandingBuffers.push_back(*buffer->buffer);
+            }
+        }
+    }
+
+    return res;
+}
+
 }; // namespace camera3
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index d768d3d..5a364ab 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -23,8 +23,6 @@
 #include <utils/String16.h>
 #include <utils/List.h>
 
-#include "hardware/camera3.h"
-
 #include "utils/LatencyHistogram.h"
 #include "Camera3StreamBufferListener.h"
 #include "Camera3StreamInterface.h"
@@ -50,7 +48,7 @@
  *    with the HAL.
  *
  *  STATE_IN_CONFIG: Configuration has started, but not yet concluded. During this
- *    time, the usage, max_buffers, and priv fields of camera3_stream returned by
+ *    time, the usage, max_buffers, and priv fields of camera_stream returned by
  *    startConfiguration() may be modified.
  *
  *  STATE_IN_RE_CONFIG: Configuration has started, and the stream has been
@@ -130,15 +128,15 @@
  *
  */
 class Camera3Stream :
-        protected camera3_stream,
+        protected camera_stream,
         public virtual Camera3StreamInterface,
         public virtual RefBase {
   public:
 
     virtual ~Camera3Stream();
 
-    static Camera3Stream*       cast(camera3_stream *stream);
-    static const Camera3Stream* cast(const camera3_stream *stream);
+    static Camera3Stream*       cast(camera_stream *stream);
+    static const Camera3Stream* cast(const camera_stream *stream);
 
     /**
      * Get the stream's ID
@@ -149,6 +147,14 @@
      * Get the output stream set id.
      */
     int              getStreamSetId() const;
+    /**
+     * Is this stream part of a multi-resolution stream set
+     */
+    bool             isMultiResolution() const;
+    /**
+     * Get the HAL stream group id for a multi-resolution stream set
+     */
+    int              getHalStreamGroupId() const;
 
     /**
      * Get the stream's dimensions and format
@@ -165,12 +171,13 @@
     void              setDataSpaceOverride(bool dataSpaceOverriden);
     bool              isDataSpaceOverridden() const;
     android_dataspace getOriginalDataSpace() const;
+    int               getMaxHalBuffers() const;
     const String8&    physicalCameraId() const;
 
     void              setOfflineProcessingSupport(bool) override;
     bool              getOfflineProcessingSupport() const override;
 
-    camera3_stream*   asHalStream() override {
+    camera_stream*   asHalStream() override {
         return this;
     }
 
@@ -179,14 +186,12 @@
      * information to be passed into the HAL device's configure_streams call.
      *
      * Until finishConfiguration() is called, no other methods on the stream may be
-     * called. The usage and max_buffers fields of camera3_stream may be modified
+     * called. The usage and max_buffers fields of camera_stream may be modified
      * between start/finishConfiguration, but may not be changed after that.
-     * The priv field of camera3_stream may be modified at any time after
-     * startConfiguration.
      *
      * Returns NULL in case of error starting configuration.
      */
-    camera3_stream*  startConfiguration();
+    camera_stream*  startConfiguration();
 
     /**
      * Check if the stream is mid-configuration (start has been called, but not
@@ -226,6 +231,11 @@
     bool             isUnpreparable();
 
     /**
+     * Mark the stream as unpreparable.
+     */
+    void             markUnpreparable() override;
+
+    /**
      * Start stream preparation. May only be called in the CONFIGURED state,
      * when no valid buffers have yet been returned to this stream. Prepares
      * up to maxCount buffers, or the maximum number of buffers needed by the
@@ -307,7 +317,7 @@
     status_t       tearDown();
 
     /**
-     * Fill in the camera3_stream_buffer with the next valid buffer for this
+     * Fill in the camera_stream_buffer with the next valid buffer for this
      * stream, to hand over to the HAL.
      *
      * Multiple surfaces could share the same HAL stream, but a request may
@@ -320,11 +330,17 @@
      * buffers.
      *
      */
-    status_t         getBuffer(camera3_stream_buffer *buffer,
+    status_t         getBuffer(camera_stream_buffer *buffer,
             nsecs_t waitBufferTimeout,
             const std::vector<size_t>& surface_ids = std::vector<size_t>());
 
     /**
+     * Similar to getBuffer() except this method fills multiple buffers.
+     */
+    status_t         getBuffers(std::vector<OutstandingBuffer>* buffers,
+            nsecs_t waitBufferTimeout);
+
+    /**
      * Return a buffer to the stream after use by the HAL.
      *
      * Multiple surfaces could share the same HAL stream, but a request may
@@ -335,23 +351,26 @@
      * This method may only be called for buffers provided by getBuffer().
      * For bidirectional streams, this method applies to the output-side buffers
      */
-    status_t         returnBuffer(const camera3_stream_buffer &buffer,
+    status_t         returnBuffer(const camera_stream_buffer &buffer,
             nsecs_t timestamp, bool timestampIncreasing,
             const std::vector<size_t>& surface_ids = std::vector<size_t>(),
             uint64_t frameNumber = 0);
 
     /**
-     * Fill in the camera3_stream_buffer with the next valid buffer for this
+     * Fill in the camera_stream_buffer with the next valid buffer for this
      * stream, to hand over to the HAL.
      *
      * This method may only be called once finishConfiguration has been called.
      * For bidirectional streams, this method applies to the input-side
      * buffers.
      *
+     * This method also returns the size of the returned input buffer.
+     *
      * Normally this call will block until the handed out buffer count is less than the stream
      * max buffer count; if respectHalLimit is set to false, this is ignored.
      */
-    status_t         getInputBuffer(camera3_stream_buffer *buffer, bool respectHalLimit = true);
+    status_t         getInputBuffer(camera_stream_buffer *buffer,
+                             Size* size, bool respectHalLimit = true);
 
     /**
      * Return a buffer to the stream after use by the HAL.
@@ -359,7 +378,7 @@
      * This method may only be called for buffers provided by getBuffer().
      * For bidirectional streams, this method applies to the input-side buffers
      */
-    status_t         returnInputBuffer(const camera3_stream_buffer &buffer);
+    status_t         returnInputBuffer(const camera_stream_buffer &buffer);
 
     // get the buffer producer of the input buffer queue.
     // only apply to input streams.
@@ -476,10 +495,12 @@
 
     mutable Mutex mLock;
 
-    Camera3Stream(int id, camera3_stream_type type,
+    Camera3Stream(int id, camera_stream_type type,
             uint32_t width, uint32_t height, size_t maxSize, int format,
-            android_dataspace dataSpace, camera3_stream_rotation_t rotation,
-            const String8& physicalCameraId, int setId);
+            android_dataspace dataSpace, camera_stream_rotation_t rotation,
+            const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            int setId, bool isMultiResolution);
 
     wp<Camera3StreamBufferFreedListener> mBufferFreedListener;
 
@@ -489,18 +510,22 @@
 
     // getBuffer / returnBuffer implementations
 
-    // Since camera3_stream_buffer includes a raw pointer to the stream,
-    // cast to camera3_stream*, implementations must increment the
+    // Since camera_stream_buffer includes a raw pointer to the stream,
+    // cast to camera_stream*, implementations must increment the
     // refcount of the stream manually in getBufferLocked, and decrement it in
     // returnBufferLocked.
-    virtual status_t getBufferLocked(camera3_stream_buffer *buffer,
+    virtual status_t getBufferLocked(camera_stream_buffer *buffer,
             const std::vector<size_t>& surface_ids = std::vector<size_t>());
-    virtual status_t returnBufferLocked(const camera3_stream_buffer &buffer,
+    virtual status_t returnBufferLocked(const camera_stream_buffer &buffer,
             nsecs_t timestamp,
             const std::vector<size_t>& surface_ids = std::vector<size_t>());
-    virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer);
+
+    virtual status_t getBuffersLocked(std::vector<OutstandingBuffer>*);
+
+    virtual status_t getInputBufferLocked(camera_stream_buffer *buffer, Size* size);
+
     virtual status_t returnInputBufferLocked(
-            const camera3_stream_buffer &buffer);
+            const camera_stream_buffer &buffer);
     virtual bool     hasOutstandingBuffersLocked() const = 0;
     // Get the buffer producer of the input buffer queue. Only apply to input streams.
     virtual status_t getInputBufferProducerLocked(sp<IGraphicBufferProducer> *producer);
@@ -529,7 +554,7 @@
     virtual status_t getEndpointUsage(uint64_t *usage) const = 0;
 
     // Return whether the buffer is in the list of outstanding buffers.
-    bool isOutstandingBuffer(const camera3_stream_buffer& buffer) const;
+    bool isOutstandingBuffer(const camera_stream_buffer& buffer) const;
 
     // Tracking for idle state
     wp<StatusTracker> mStatusTracker;
@@ -553,14 +578,14 @@
     Condition mInputBufferReturnedSignal;
     static const nsecs_t kWaitForBufferDuration = 3000000000LL; // 3000 ms
 
-    void fireBufferListenersLocked(const camera3_stream_buffer& buffer,
+    void fireBufferListenersLocked(const camera_stream_buffer& buffer,
             bool acquired, bool output, nsecs_t timestamp = 0, uint64_t frameNumber = 0);
     List<wp<Camera3StreamBufferListener> > mBufferListenerList;
 
     status_t        cancelPrepareLocked();
 
     // Remove the buffer from the list of outstanding buffers.
-    void removeOutstandingBuffer(const camera3_stream_buffer& buffer);
+    void removeOutstandingBuffer(const camera_stream_buffer& buffer);
 
     // Tracking for PREPARING state
 
@@ -570,7 +595,7 @@
     bool mPrepared;
     bool mPrepareBlockRequest;
 
-    Vector<camera3_stream_buffer_t> mPreparedBuffers;
+    Vector<camera_stream_buffer_t> mPreparedBuffers;
     size_t mPreparedBufferIdx;
 
     // Number of buffers allocated on last prepare call.
@@ -596,6 +621,7 @@
     String8 mPhysicalCameraId;
     nsecs_t mLastTimestamp;
 
+    bool mIsMultiResolution = false;
     bool mSupportOfflineProcessing = false;
 }; // class Camera3Stream
 
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 667e3bb..2d3397c 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -23,13 +23,63 @@
 #include "Camera3StreamBufferListener.h"
 #include "Camera3StreamBufferFreedListener.h"
 
-struct camera3_stream;
-struct camera3_stream_buffer;
-
 namespace android {
 
 namespace camera3 {
 
+typedef enum camera_buffer_status {
+    CAMERA_BUFFER_STATUS_OK = 0,
+    CAMERA_BUFFER_STATUS_ERROR = 1
+} camera_buffer_status_t;
+
+typedef enum camera_stream_type {
+    CAMERA_STREAM_OUTPUT = 0,
+    CAMERA_STREAM_INPUT = 1,
+    CAMERA_NUM_STREAM_TYPES
+} camera_stream_type_t;
+
+typedef enum camera_stream_rotation {
+    /* No rotation */
+    CAMERA_STREAM_ROTATION_0 = 0,
+
+    /* Rotate by 90 degree counterclockwise */
+    CAMERA_STREAM_ROTATION_90 = 1,
+
+    /* Rotate by 180 degree counterclockwise */
+    CAMERA_STREAM_ROTATION_180 = 2,
+
+    /* Rotate by 270 degree counterclockwise */
+    CAMERA_STREAM_ROTATION_270 = 3
+} camera_stream_rotation_t;
+
+typedef struct camera_stream {
+    camera_stream_type_t stream_type;
+    uint32_t width;
+    uint32_t height;
+    int format;
+    uint32_t usage;
+    uint32_t max_buffers;
+    android_dataspace_t data_space;
+    camera_stream_rotation_t rotation;
+    const char* physical_camera_id;
+
+    std::unordered_set<int32_t> sensor_pixel_modes_used;
+} camera_stream_t;
+
+typedef struct camera_stream_buffer {
+    camera_stream_t *stream;
+    buffer_handle_t *buffer;
+    camera_buffer_status_t status;
+    int acquire_fence;
+    int release_fence;
+} camera_stream_buffer_t;
+
+struct Size {
+    uint32_t width;
+    uint32_t height;
+    explicit Size(uint32_t w = 0, uint32_t h = 0) : width(w), height(h){}
+};
+
 enum {
     /**
      * This stream set ID indicates that the set ID is invalid, and this stream doesn't intend to
@@ -56,13 +106,15 @@
         uint64_t consumerUsage;
         bool finalized = false;
         bool supportsOffline = false;
+        std::unordered_set<int32_t> sensorPixelModesUsed;
         OutputStreamInfo() :
             width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
             consumerUsage(0) {}
         OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
-                uint64_t _consumerUsage) :
+                uint64_t _consumerUsage, const std::unordered_set<int32_t>& _sensorPixelModesUsed) :
             width(_width), height(_height), format(_format),
-            dataSpace(_dataSpace), consumerUsage(_consumerUsage) {}
+            dataSpace(_dataSpace), consumerUsage(_consumerUsage),
+            sensorPixelModesUsed(_sensorPixelModesUsed) {}
 };
 
 /**
@@ -87,6 +139,16 @@
     virtual int      getStreamSetId() const = 0;
 
     /**
+     * Is this stream part of a multi-resolution stream set
+     */
+    virtual bool     isMultiResolution() const = 0;
+
+    /**
+     * Get the HAL stream group id for a multi-resolution stream set
+     */
+    virtual int      getHalStreamGroupId() const = 0;
+
+    /**
      * Get the stream's dimensions and format
      */
     virtual uint32_t getWidth() const = 0;
@@ -99,6 +161,8 @@
     virtual void setDataSpaceOverride(bool dataSpaceOverriden) = 0;
     virtual bool isDataSpaceOverridden() const = 0;
     virtual android_dataspace getOriginalDataSpace() const = 0;
+    virtual int getMaxHalBuffers() const = 0;
+    virtual int getMaxTotalBuffers() const = 0;
 
     /**
      * Offline processing
@@ -107,23 +171,23 @@
     virtual bool getOfflineProcessingSupport() const = 0;
 
     /**
-     * Get a HAL3 handle for the stream, without starting stream configuration.
+     * Get a handle for the stream, without starting stream configuration.
      */
-    virtual camera3_stream* asHalStream() = 0;
+    virtual camera_stream* asHalStream() = 0;
 
     /**
      * Start the stream configuration process. Returns a handle to the stream's
-     * information to be passed into the HAL device's configure_streams call.
+     * information to be passed into the device's configure_streams call.
      *
      * Until finishConfiguration() is called, no other methods on the stream may
-     * be called. The usage and max_buffers fields of camera3_stream may be
+     * be called. The usage and max_buffers fields of camera_stream may be
      * modified between start/finishConfiguration, but may not be changed after
-     * that. The priv field of camera3_stream may be modified at any time after
+     * that. The priv field of camera_stream may be modified at any time after
      * startConfiguration.
      *
      * Returns NULL in case of error starting configuration.
      */
-    virtual camera3_stream* startConfiguration() = 0;
+    virtual camera_stream* startConfiguration() = 0;
 
     /**
      * Check if the stream is mid-configuration (start has been called, but not
@@ -163,6 +227,11 @@
     virtual bool     isUnpreparable() = 0;
 
     /**
+     * Mark the stream as unpreparable.
+     */
+    virtual void     markUnpreparable() = 0;
+
+    /**
      * Start stream preparation. May only be called in the CONFIGURED state,
      * when no valid buffers have yet been returned to this stream. Prepares
      * up to maxCount buffers, or the maximum number of buffers needed by the
@@ -239,7 +308,7 @@
     virtual status_t tearDown() = 0;
 
     /**
-     * Fill in the camera3_stream_buffer with the next valid buffer for this
+     * Fill in the camera_stream_buffer with the next valid buffer for this
      * stream, to hand over to the HAL.
      *
      * Multiple surfaces could share the same HAL stream, but a request may
@@ -253,10 +322,28 @@
      * buffers.
      *
      */
-    virtual status_t getBuffer(camera3_stream_buffer *buffer,
+    virtual status_t getBuffer(camera_stream_buffer *buffer,
             nsecs_t waitBufferTimeout,
             const std::vector<size_t>& surface_ids = std::vector<size_t>()) = 0;
 
+    struct OutstandingBuffer {
+        camera_stream_buffer* outBuffer;
+
+        /**
+         * Multiple surfaces could share the same HAL stream, but a request may
+         * be only for a subset of surfaces. In this case, the
+         * Camera3StreamInterface object needs the surface ID information to acquire
+         * buffers for those surfaces. For the case of single surface for a HAL
+         * stream, surface_ids parameter has no effect.
+         */
+        std::vector<size_t> surface_ids;
+    };
+    /**
+     * Similar to getBuffer() except this method fills multiple buffers.
+     */
+    virtual status_t getBuffers(std::vector<OutstandingBuffer>* buffers,
+            nsecs_t waitBufferTimeout) = 0;
+
     /**
      * Return a buffer to the stream after use by the HAL.
      *
@@ -269,13 +356,13 @@
      * This method may only be called for buffers provided by getBuffer().
      * For bidirectional streams, this method applies to the output-side buffers
      */
-    virtual status_t returnBuffer(const camera3_stream_buffer &buffer,
+    virtual status_t returnBuffer(const camera_stream_buffer &buffer,
             nsecs_t timestamp, bool timestampIncreasing = true,
             const std::vector<size_t>& surface_ids = std::vector<size_t>(),
             uint64_t frameNumber = 0) = 0;
 
     /**
-     * Fill in the camera3_stream_buffer with the next valid buffer for this
+     * Fill in the camera_stream_buffer with the next valid buffer for this
      * stream, to hand over to the HAL.
      *
      * This method may only be called once finishConfiguration has been called.
@@ -285,7 +372,8 @@
      * Normally this call will block until the handed out buffer count is less than the stream
      * max buffer count; if respectHalLimit is set to false, this is ignored.
      */
-    virtual status_t getInputBuffer(camera3_stream_buffer *buffer, bool respectHalLimit = true) = 0;
+    virtual status_t getInputBuffer(camera_stream_buffer *buffer,
+            Size *size, bool respectHalLimit = true) = 0;
 
     /**
      * Return a buffer to the stream after use by the HAL.
@@ -293,7 +381,7 @@
      * This method may only be called for buffers provided by getBuffer().
      * For bidirectional streams, this method applies to the input-side buffers
      */
-    virtual status_t returnInputBuffer(const camera3_stream_buffer &buffer) = 0;
+    virtual status_t returnInputBuffer(const camera_stream_buffer &buffer) = 0;
 
     /**
      * Get the buffer producer of the input buffer queue.
diff --git a/services/camera/libcameraservice/device3/CoordinateMapper.h b/services/camera/libcameraservice/device3/CoordinateMapper.h
index 5164856..558f4c0 100644
--- a/services/camera/libcameraservice/device3/CoordinateMapper.h
+++ b/services/camera/libcameraservice/device3/CoordinateMapper.h
@@ -18,16 +18,23 @@
 #define ANDROID_SERVERS_COORDINATEMAPPER_H
 
 #include <array>
+#include <set>
 
 namespace android {
 
 namespace camera3 {
 
 class CoordinateMapper {
-    // Right now only stores metadata tags containing 2D coordinates
-    // to be corrected.
+public:
+    // The result metadata tags that are to be re-mapped
+    const std::set<uint32_t>& getRemappedKeys() const {
+        return mRemappedKeys;
+    }
+
+    virtual ~CoordinateMapper() = default;
+
 protected:
-    // Metadata key lists to correct
+    // Metadata tags containing 2D coordinates to be corrected.
 
     // Both capture request and result
     static const std::array<uint32_t, 3> kMeteringRegionsToCorrect;
@@ -37,6 +44,10 @@
 
     // Only for capture results; don't clamp
     static const std::array<uint32_t, 2> kResultPointsToCorrectNoClamp;
+
+    virtual void initRemappedKeys() = 0;
+    std::set<uint32_t> mRemappedKeys;
+
 }; // class CoordinateMapper
 
 } // namespace camera3
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.cpp b/services/camera/libcameraservice/device3/DistortionMapper.cpp
index 8132225..89dd115 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.cpp
+++ b/services/camera/libcameraservice/device3/DistortionMapper.cpp
@@ -22,13 +22,28 @@
 #include <cmath>
 
 #include "device3/DistortionMapper.h"
+#include "utils/SessionConfigurationUtils.h"
 
 namespace android {
 
 namespace camera3 {
 
 
-DistortionMapper::DistortionMapper() : mValidMapping(false), mValidGrids(false) {
+DistortionMapper::DistortionMapper() {
+    initRemappedKeys();
+}
+
+void DistortionMapper::initRemappedKeys() {
+    mRemappedKeys.insert(
+            kMeteringRegionsToCorrect.begin(),
+            kMeteringRegionsToCorrect.end());
+    mRemappedKeys.insert(
+            kRectsToCorrect.begin(),
+            kRectsToCorrect.end());
+    mRemappedKeys.insert(
+            kResultPointsToCorrectNoClamp.begin(),
+            kResultPointsToCorrectNoClamp.end());
+    mRemappedKeys.insert(ANDROID_DISTORTION_CORRECTION_MODE);
 }
 
 bool DistortionMapper::isDistortionSupported(const CameraMetadata &deviceInfo) {
@@ -47,41 +62,81 @@
 
 status_t DistortionMapper::setupStaticInfo(const CameraMetadata &deviceInfo) {
     std::lock_guard<std::mutex> lock(mMutex);
+    status_t res = setupStaticInfoLocked(deviceInfo, /*maxResolution*/false);
+    if (res != OK) {
+        return res;
+    }
+
+    bool mMaxResolution = SessionConfigurationUtils::isUltraHighResolutionSensor(deviceInfo);
+    if (mMaxResolution) {
+        res = setupStaticInfoLocked(deviceInfo, /*maxResolution*/true);
+    }
+    return res;
+}
+
+status_t DistortionMapper::setupStaticInfoLocked(const CameraMetadata &deviceInfo,
+        bool maxResolution) {
+    DistortionMapperInfo *mapperInfo = maxResolution ? &mDistortionMapperInfoMaximumResolution :
+            &mDistortionMapperInfo;
+
     camera_metadata_ro_entry_t array;
 
-    array = deviceInfo.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
+    array = deviceInfo.find(
+        SessionConfigurationUtils::getAppropriateModeTag(
+                ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, maxResolution));
     if (array.count != 4) return BAD_VALUE;
 
     float arrayX = static_cast<float>(array.data.i32[0]);
     float arrayY = static_cast<float>(array.data.i32[1]);
-    mArrayWidth = static_cast<float>(array.data.i32[2]);
-    mArrayHeight = static_cast<float>(array.data.i32[3]);
+    mapperInfo->mArrayWidth = static_cast<float>(array.data.i32[2]);
+    mapperInfo->mArrayHeight = static_cast<float>(array.data.i32[3]);
 
-    array = deviceInfo.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+    array = deviceInfo.find(
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, maxResolution));
     if (array.count != 4) return BAD_VALUE;
 
     float activeX = static_cast<float>(array.data.i32[0]);
     float activeY = static_cast<float>(array.data.i32[1]);
-    mActiveWidth = static_cast<float>(array.data.i32[2]);
-    mActiveHeight = static_cast<float>(array.data.i32[3]);
+    mapperInfo->mActiveWidth = static_cast<float>(array.data.i32[2]);
+    mapperInfo->mActiveHeight = static_cast<float>(array.data.i32[3]);
 
-    mArrayDiffX = activeX - arrayX;
-    mArrayDiffY = activeY - arrayY;
+    mapperInfo->mArrayDiffX = activeX - arrayX;
+    mapperInfo->mArrayDiffY = activeY - arrayY;
 
-    return updateCalibration(deviceInfo);
+    return updateCalibration(deviceInfo, /*isStatic*/ true, maxResolution);
+}
+
+static bool doesSettingsHaveMaxResolution(const CameraMetadata *settings) {
+    if (settings == nullptr) {
+        return false;
+    }
+    // First we get the sensorPixelMode from the settings metadata.
+    camera_metadata_ro_entry sensorPixelModeEntry = settings->find(ANDROID_SENSOR_PIXEL_MODE);
+    if (sensorPixelModeEntry.count != 0) {
+        return (sensorPixelModeEntry.data.u8[0] == ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+    }
+    return false;
 }
 
 bool DistortionMapper::calibrationValid() const {
     std::lock_guard<std::mutex> lock(mMutex);
-
-    return mValidMapping;
+    bool isValid =  mDistortionMapperInfo.mValidMapping;
+    if (mMaxResolution) {
+        isValid = isValid && mDistortionMapperInfoMaximumResolution.mValidMapping;
+    }
+    return isValid;
 }
 
 status_t DistortionMapper::correctCaptureRequest(CameraMetadata *request) {
     std::lock_guard<std::mutex> lock(mMutex);
     status_t res;
 
-    if (!mValidMapping) return OK;
+    bool maxResolution = doesSettingsHaveMaxResolution(request);
+    DistortionMapperInfo *mapperInfo = maxResolution ? &mDistortionMapperInfoMaximumResolution :
+            &mDistortionMapperInfo;
+
+    if (!mapperInfo->mValidMapping) return OK;
 
     camera_metadata_entry_t e;
     e = request->find(ANDROID_DISTORTION_CORRECTION_MODE);
@@ -93,27 +148,30 @@
                 if (weight == 0) {
                     continue;
                 }
-                res = mapCorrectedToRaw(e.data.i32 + j, 2, /*clamp*/true);
+                res = mapCorrectedToRaw(e.data.i32 + j, 2, mapperInfo, /*clamp*/true);
                 if (res != OK) return res;
             }
         }
         for (auto rect : kRectsToCorrect) {
             e = request->find(rect);
-            res = mapCorrectedRectToRaw(e.data.i32, e.count / 4, /*clamp*/true);
+            res = mapCorrectedRectToRaw(e.data.i32, e.count / 4, mapperInfo, /*clamp*/true);
             if (res != OK) return res;
         }
     }
-
     return OK;
 }
 
 status_t DistortionMapper::correctCaptureResult(CameraMetadata *result) {
     std::lock_guard<std::mutex> lock(mMutex);
+
+    bool maxResolution = doesSettingsHaveMaxResolution(result);
+    DistortionMapperInfo *mapperInfo = maxResolution ? &mDistortionMapperInfoMaximumResolution :
+            &mDistortionMapperInfo;
     status_t res;
 
-    if (!mValidMapping) return OK;
+    if (!mapperInfo->mValidMapping) return OK;
 
-    res = updateCalibration(*result);
+    res = updateCalibration(*result, /*isStatic*/ false, maxResolution);
     if (res != OK) {
         ALOGE("Failure to update lens calibration information");
         return INVALID_OPERATION;
@@ -129,18 +187,18 @@
                 if (weight == 0) {
                     continue;
                 }
-                res = mapRawToCorrected(e.data.i32 + j, 2, /*clamp*/true);
+                res = mapRawToCorrected(e.data.i32 + j, 2, mapperInfo, /*clamp*/true);
                 if (res != OK) return res;
             }
         }
         for (auto rect : kRectsToCorrect) {
             e = result->find(rect);
-            res = mapRawRectToCorrected(e.data.i32, e.count / 4, /*clamp*/true);
+            res = mapRawRectToCorrected(e.data.i32, e.count / 4, mapperInfo, /*clamp*/true);
             if (res != OK) return res;
         }
         for (auto pts : kResultPointsToCorrectNoClamp) {
             e = result->find(pts);
-            res = mapRawToCorrected(e.data.i32, e.count / 2, /*clamp*/false);
+            res = mapRawToCorrected(e.data.i32, e.count / 2, mapperInfo, /*clamp*/false);
             if (res != OK) return res;
         }
     }
@@ -150,25 +208,37 @@
 
 // Utility methods; not guarded by mutex
 
-status_t DistortionMapper::updateCalibration(const CameraMetadata &result) {
+status_t DistortionMapper::updateCalibration(const CameraMetadata &result, bool isStatic,
+        bool maxResolution) {
     camera_metadata_ro_entry_t calib, distortion;
+    DistortionMapperInfo *mapperInfo =
+            maxResolution ? &mDistortionMapperInfoMaximumResolution : &mDistortionMapperInfo;
+    // We only need maximum resolution version of LENS_INTRINSIC_CALIBRATION and
+    // LENS_DISTORTION since CaptureResults would still use the same key
+    // regardless of sensor pixel mode.
+    int calibrationKey =
+        SessionConfigurationUtils::getAppropriateModeTag(ANDROID_LENS_INTRINSIC_CALIBRATION,
+                maxResolution && isStatic);
+    int distortionKey =
+        SessionConfigurationUtils::getAppropriateModeTag(ANDROID_LENS_DISTORTION,
+                maxResolution && isStatic);
 
-    calib = result.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
-    distortion = result.find(ANDROID_LENS_DISTORTION);
+    calib = result.find(calibrationKey);
+    distortion = result.find(distortionKey);
 
     if (calib.count != 5) return BAD_VALUE;
     if (distortion.count != 5) return BAD_VALUE;
 
     // Skip redoing work if no change to calibration fields
-    if (mValidMapping &&
-            mFx == calib.data.f[0] &&
-            mFy == calib.data.f[1] &&
-            mCx == calib.data.f[2] &&
-            mCy == calib.data.f[3] &&
-            mS == calib.data.f[4]) {
+    if (mapperInfo->mValidMapping &&
+            mapperInfo->mFx == calib.data.f[0] &&
+            mapperInfo->mFy == calib.data.f[1] &&
+            mapperInfo->mCx == calib.data.f[2] &&
+            mapperInfo->mCy == calib.data.f[3] &&
+            mapperInfo->mS == calib.data.f[4]) {
         bool noChange = true;
         for (size_t i = 0; i < distortion.count; i++) {
-            if (mK[i] != distortion.data.f[i]) {
+            if (mapperInfo->mK[i] != distortion.data.f[i]) {
                 noChange = false;
                 break;
             }
@@ -176,39 +246,39 @@
         if (noChange) return OK;
     }
 
-    mFx = calib.data.f[0];
-    mFy = calib.data.f[1];
-    mCx = calib.data.f[2];
-    mCy = calib.data.f[3];
-    mS = calib.data.f[4];
+    mapperInfo->mFx = calib.data.f[0];
+    mapperInfo->mFy = calib.data.f[1];
+    mapperInfo->mCx = calib.data.f[2];
+    mapperInfo->mCy = calib.data.f[3];
+    mapperInfo->mS = calib.data.f[4];
 
-    mInvFx = 1 / mFx;
-    mInvFy = 1 / mFy;
+    mapperInfo->mInvFx = 1 / mapperInfo->mFx;
+    mapperInfo->mInvFy = 1 / mapperInfo->mFy;
 
     for (size_t i = 0; i < distortion.count; i++) {
-        mK[i] = distortion.data.f[i];
+        mapperInfo->mK[i] = distortion.data.f[i];
     }
 
-    mValidMapping = true;
+    mapperInfo->mValidMapping = true;
     // Need to recalculate grid
-    mValidGrids = false;
+    mapperInfo->mValidGrids = false;
 
     return OK;
 }
 
 status_t DistortionMapper::mapRawToCorrected(int32_t *coordPairs, int coordCount,
-        bool clamp, bool simple) {
-    if (!mValidMapping) return INVALID_OPERATION;
+        DistortionMapperInfo *mapperInfo, bool clamp, bool simple) {
+    if (!mapperInfo->mValidMapping) return INVALID_OPERATION;
 
-    if (simple) return mapRawToCorrectedSimple(coordPairs, coordCount, clamp);
+    if (simple) return mapRawToCorrectedSimple(coordPairs, coordCount, mapperInfo, clamp);
 
-    if (!mValidGrids) {
-        status_t res = buildGrids();
+    if (!mapperInfo->mValidGrids) {
+        status_t res = buildGrids(mapperInfo);
         if (res != OK) return res;
     }
 
     for (int i = 0; i < coordCount * 2; i += 2) {
-        const GridQuad *quad = findEnclosingQuad(coordPairs + i, mDistortedGrid);
+        const GridQuad *quad = findEnclosingQuad(coordPairs + i, mapperInfo->mDistortedGrid);
         if (quad == nullptr) {
             ALOGE("Raw to corrected mapping failure: No quad found for (%d, %d)",
                     *(coordPairs + i), *(coordPairs + i + 1));
@@ -244,8 +314,8 @@
 
         // Clamp to within active array
         if (clamp) {
-            corrX = std::min(mActiveWidth - 1, std::max(0.f, corrX));
-            corrY = std::min(mActiveHeight - 1, std::max(0.f, corrY));
+            corrX = std::min(mapperInfo->mActiveWidth - 1, std::max(0.f, corrX));
+            corrY = std::min(mapperInfo->mActiveHeight - 1, std::max(0.f, corrY));
         }
 
         coordPairs[i] = static_cast<int32_t>(std::round(corrX));
@@ -256,19 +326,19 @@
 }
 
 status_t DistortionMapper::mapRawToCorrectedSimple(int32_t *coordPairs, int coordCount,
-        bool clamp) const {
-    if (!mValidMapping) return INVALID_OPERATION;
+       const DistortionMapperInfo *mapperInfo, bool clamp) const {
+    if (!mapperInfo->mValidMapping) return INVALID_OPERATION;
 
-    float scaleX = mActiveWidth / mArrayWidth;
-    float scaleY = mActiveHeight / mArrayHeight;
+    float scaleX = mapperInfo->mActiveWidth / mapperInfo->mArrayWidth;
+    float scaleY = mapperInfo->mActiveHeight / mapperInfo->mArrayHeight;
     for (int i = 0; i < coordCount * 2; i += 2) {
         float x = coordPairs[i];
         float y = coordPairs[i + 1];
         float corrX = x * scaleX;
         float corrY = y * scaleY;
         if (clamp) {
-            corrX = std::min(mActiveWidth - 1, std::max(0.f, corrX));
-            corrY = std::min(mActiveHeight - 1, std::max(0.f, corrY));
+            corrX = std::min(mapperInfo->mActiveWidth - 1, std::max(0.f, corrX));
+            corrY = std::min(mapperInfo->mActiveHeight - 1, std::max(0.f, corrY));
         }
         coordPairs[i] = static_cast<int32_t>(std::round(corrX));
         coordPairs[i + 1] = static_cast<int32_t>(std::round(corrY));
@@ -277,9 +347,9 @@
     return OK;
 }
 
-status_t DistortionMapper::mapRawRectToCorrected(int32_t *rects, int rectCount, bool clamp,
-        bool simple) {
-    if (!mValidMapping) return INVALID_OPERATION;
+status_t DistortionMapper::mapRawRectToCorrected(int32_t *rects, int rectCount,
+       DistortionMapperInfo *mapperInfo, bool clamp, bool simple) {
+    if (!mapperInfo->mValidMapping) return INVALID_OPERATION;
     for (int i = 0; i < rectCount * 4; i += 4) {
         // Map from (l, t, width, height) to (l, t, r, b)
         int32_t coords[4] = {
@@ -289,7 +359,7 @@
             rects[i + 1] + rects[i + 3] - 1
         };
 
-        mapRawToCorrected(coords, 2, clamp, simple);
+        mapRawToCorrected(coords, 2, mapperInfo, clamp, simple);
 
         // Map back to (l, t, width, height)
         rects[i] = coords[0];
@@ -301,60 +371,60 @@
     return OK;
 }
 
-status_t DistortionMapper::mapCorrectedToRaw(int32_t *coordPairs, int coordCount, bool clamp,
-        bool simple) const {
-    return mapCorrectedToRawImpl(coordPairs, coordCount, clamp, simple);
+status_t DistortionMapper::mapCorrectedToRaw(int32_t *coordPairs, int coordCount,
+       const DistortionMapperInfo *mapperInfo, bool clamp, bool simple) const {
+    return mapCorrectedToRawImpl(coordPairs, coordCount, mapperInfo, clamp, simple);
 }
 
 template<typename T>
-status_t DistortionMapper::mapCorrectedToRawImpl(T *coordPairs, int coordCount, bool clamp,
-        bool simple) const {
-    if (!mValidMapping) return INVALID_OPERATION;
+status_t DistortionMapper::mapCorrectedToRawImpl(T *coordPairs, int coordCount,
+       const DistortionMapperInfo *mapperInfo, bool clamp, bool simple) const {
+    if (!mapperInfo->mValidMapping) return INVALID_OPERATION;
 
-    if (simple) return mapCorrectedToRawImplSimple(coordPairs, coordCount, clamp);
+    if (simple) return mapCorrectedToRawImplSimple(coordPairs, coordCount, mapperInfo, clamp);
 
-    float activeCx = mCx - mArrayDiffX;
-    float activeCy = mCy - mArrayDiffY;
+    float activeCx = mapperInfo->mCx - mapperInfo->mArrayDiffX;
+    float activeCy = mapperInfo->mCy - mapperInfo->mArrayDiffY;
     for (int i = 0; i < coordCount * 2; i += 2) {
         // Move to normalized space from active array space
-        float ywi = (coordPairs[i + 1] - activeCy) * mInvFy;
-        float xwi = (coordPairs[i] - activeCx - mS * ywi) * mInvFx;
+        float ywi = (coordPairs[i + 1] - activeCy) * mapperInfo->mInvFy;
+        float xwi = (coordPairs[i] - activeCx - mapperInfo->mS * ywi) * mapperInfo->mInvFx;
         // Apply distortion model to calculate raw image coordinates
+        const std::array<float, 5> &kK = mapperInfo->mK;
         float rSq = xwi * xwi + ywi * ywi;
-        float Fr = 1.f + (mK[0] * rSq) + (mK[1] * rSq * rSq) + (mK[2] * rSq * rSq * rSq);
-        float xc = xwi * Fr + (mK[3] * 2 * xwi * ywi) + mK[4] * (rSq + 2 * xwi * xwi);
-        float yc = ywi * Fr + (mK[4] * 2 * xwi * ywi) + mK[3] * (rSq + 2 * ywi * ywi);
+        float Fr = 1.f + (kK[0] * rSq) + (kK[1] * rSq * rSq) + (kK[2] * rSq * rSq * rSq);
+        float xc = xwi * Fr + (kK[3] * 2 * xwi * ywi) + kK[4] * (rSq + 2 * xwi * xwi);
+        float yc = ywi * Fr + (kK[4] * 2 * xwi * ywi) + kK[3] * (rSq + 2 * ywi * ywi);
         // Move back to image space
-        float xr = mFx * xc + mS * yc + mCx;
-        float yr = mFy * yc + mCy;
+        float xr = mapperInfo->mFx * xc + mapperInfo->mS * yc + mapperInfo->mCx;
+        float yr = mapperInfo->mFy * yc + mapperInfo->mCy;
         // Clamp to within pre-correction active array
         if (clamp) {
-            xr = std::min(mArrayWidth - 1, std::max(0.f, xr));
-            yr = std::min(mArrayHeight - 1, std::max(0.f, yr));
+            xr = std::min(mapperInfo->mArrayWidth - 1, std::max(0.f, xr));
+            yr = std::min(mapperInfo->mArrayHeight - 1, std::max(0.f, yr));
         }
 
         coordPairs[i] = static_cast<T>(std::round(xr));
         coordPairs[i + 1] = static_cast<T>(std::round(yr));
     }
-
     return OK;
 }
 
 template<typename T>
 status_t DistortionMapper::mapCorrectedToRawImplSimple(T *coordPairs, int coordCount,
-        bool clamp) const {
-    if (!mValidMapping) return INVALID_OPERATION;
+       const DistortionMapperInfo *mapperInfo, bool clamp) const {
+    if (!mapperInfo->mValidMapping) return INVALID_OPERATION;
 
-    float scaleX = mArrayWidth / mActiveWidth;
-    float scaleY = mArrayHeight / mActiveHeight;
+    float scaleX = mapperInfo->mArrayWidth / mapperInfo->mActiveWidth;
+    float scaleY = mapperInfo->mArrayHeight / mapperInfo->mActiveHeight;
     for (int i = 0; i < coordCount * 2; i += 2) {
         float x = coordPairs[i];
         float y = coordPairs[i + 1];
         float rawX = x * scaleX;
         float rawY = y * scaleY;
         if (clamp) {
-            rawX = std::min(mArrayWidth - 1, std::max(0.f, rawX));
-            rawY = std::min(mArrayHeight - 1, std::max(0.f, rawY));
+            rawX = std::min(mapperInfo->mArrayWidth - 1, std::max(0.f, rawX));
+            rawY = std::min(mapperInfo->mArrayHeight - 1, std::max(0.f, rawY));
         }
         coordPairs[i] = static_cast<T>(std::round(rawX));
         coordPairs[i + 1] = static_cast<T>(std::round(rawY));
@@ -363,9 +433,9 @@
     return OK;
 }
 
-status_t DistortionMapper::mapCorrectedRectToRaw(int32_t *rects, int rectCount, bool clamp,
-        bool simple) const {
-    if (!mValidMapping) return INVALID_OPERATION;
+status_t DistortionMapper::mapCorrectedRectToRaw(int32_t *rects, int rectCount,
+       const DistortionMapperInfo *mapperInfo, bool clamp, bool simple) const {
+    if (!mapperInfo->mValidMapping) return INVALID_OPERATION;
 
     for (int i = 0; i < rectCount * 4; i += 4) {
         // Map from (l, t, width, height) to (l, t, r, b)
@@ -376,7 +446,7 @@
             rects[i + 1] + rects[i + 3] - 1
         };
 
-        mapCorrectedToRaw(coords, 2, clamp, simple);
+        mapCorrectedToRaw(coords, 2, mapperInfo, clamp, simple);
 
         // Map back to (l, t, width, height)
         rects[i] = coords[0];
@@ -388,37 +458,37 @@
     return OK;
 }
 
-status_t DistortionMapper::buildGrids() {
-    if (mCorrectedGrid.size() != kGridSize * kGridSize) {
-        mCorrectedGrid.resize(kGridSize * kGridSize);
-        mDistortedGrid.resize(kGridSize * kGridSize);
+status_t DistortionMapper::buildGrids(DistortionMapperInfo *mapperInfo) {
+    if (mapperInfo->mCorrectedGrid.size() != kGridSize * kGridSize) {
+        mapperInfo->mCorrectedGrid.resize(kGridSize * kGridSize);
+        mapperInfo->mDistortedGrid.resize(kGridSize * kGridSize);
     }
 
-    float gridMargin = mArrayWidth * kGridMargin;
-    float gridSpacingX = (mArrayWidth + 2 * gridMargin) / kGridSize;
-    float gridSpacingY = (mArrayHeight + 2 * gridMargin) / kGridSize;
+    float gridMargin = mapperInfo->mArrayWidth * kGridMargin;
+    float gridSpacingX = (mapperInfo->mArrayWidth + 2 * gridMargin) / kGridSize;
+    float gridSpacingY = (mapperInfo->mArrayHeight + 2 * gridMargin) / kGridSize;
 
     size_t index = 0;
     float x = -gridMargin;
     for (size_t i = 0; i < kGridSize; i++, x += gridSpacingX) {
         float y = -gridMargin;
         for (size_t j = 0; j < kGridSize; j++, y += gridSpacingY, index++) {
-            mCorrectedGrid[index].src = nullptr;
-            mCorrectedGrid[index].coords = {
+            mapperInfo->mCorrectedGrid[index].src = nullptr;
+            mapperInfo->mCorrectedGrid[index].coords = {
                 x, y,
                 x + gridSpacingX, y,
                 x + gridSpacingX, y + gridSpacingY,
                 x, y + gridSpacingY
             };
-            mDistortedGrid[index].src = &mCorrectedGrid[index];
-            mDistortedGrid[index].coords = mCorrectedGrid[index].coords;
-            status_t res = mapCorrectedToRawImpl(mDistortedGrid[index].coords.data(), 4,
-                    /*clamp*/false, /*simple*/false);
+            mapperInfo->mDistortedGrid[index].src = &(mapperInfo->mCorrectedGrid[index]);
+            mapperInfo->mDistortedGrid[index].coords = mapperInfo->mCorrectedGrid[index].coords;
+            status_t res = mapCorrectedToRawImpl(mapperInfo->mDistortedGrid[index].coords.data(), 4,
+                    mapperInfo, /*clamp*/false, /*simple*/false);
             if (res != OK) return res;
         }
     }
 
-    mValidGrids = true;
+    mapperInfo->mValidGrids = true;
     return OK;
 }
 
@@ -485,7 +555,7 @@
 
     float det = b * b - 4 * a * c;
     if (det < 0) {
-        // Sanity check - should not happen if pt is within the quad
+        // Validation check - should not happen if pt is within the quad
         ALOGE("Bad determinant! a: %f, b: %f, c: %f, det: %f", a,b,c,det);
         return -1;
     }
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.h b/services/camera/libcameraservice/device3/DistortionMapper.h
index 7dcb67b..96f4fda 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.h
+++ b/services/camera/libcameraservice/device3/DistortionMapper.h
@@ -32,18 +32,16 @@
  * Utilities to transform between raw (distorted) and warped (corrected) coordinate systems
  * for cameras that support geometric distortion
  */
-class DistortionMapper : private CoordinateMapper {
+class DistortionMapper : public CoordinateMapper {
   public:
     DistortionMapper();
 
     DistortionMapper(const DistortionMapper& other) :
-            mValidMapping(other.mValidMapping), mValidGrids(other.mValidGrids),
-            mFx(other.mFx), mFy(other.mFy), mCx(other.mCx), mCy(other.mCy), mS(other.mS),
-            mInvFx(other.mInvFx), mInvFy(other.mInvFy), mK(other.mK),
-            mArrayWidth(other.mArrayWidth), mArrayHeight(other.mArrayHeight),
-            mActiveWidth(other.mActiveWidth), mActiveHeight(other.mActiveHeight),
-            mArrayDiffX(other.mArrayDiffX), mArrayDiffY(other.mArrayDiffY),
-            mCorrectedGrid(other.mCorrectedGrid), mDistortedGrid(other.mDistortedGrid) {}
+            mDistortionMapperInfo(other.mDistortionMapperInfo),
+            mDistortionMapperInfoMaximumResolution(other.mDistortionMapperInfoMaximumResolution) {
+            initRemappedKeys(); }
+
+    void initRemappedKeys() override;
 
     /**
      * Check whether distortion correction is supported by the camera HAL
@@ -72,10 +70,14 @@
 
 
   public: // Visible for testing. Not guarded by mutex; do not use concurrently
+
+    struct DistortionMapperInfo;
+
     /**
      * Update lens calibration from capture results or equivalent
      */
-    status_t updateCalibration(const CameraMetadata &result);
+    status_t updateCalibration(const CameraMetadata &result, bool isStatic = false,
+            bool maxResolution = false);
 
     /**
      * Transform from distorted (original) to corrected (warped) coordinates.
@@ -86,8 +88,8 @@
      *   clamp: Whether to clamp the result to the bounds of the active array
      *   simple: Whether to do complex correction or just a simple linear map
      */
-    status_t mapRawToCorrected(int32_t *coordPairs, int coordCount, bool clamp,
-            bool simple = true);
+    status_t mapRawToCorrected(int32_t *coordPairs, int coordCount,
+            DistortionMapperInfo *mapperInfo, bool clamp, bool simple = true);
 
     /**
      * Transform from distorted (original) to corrected (warped) coordinates.
@@ -98,8 +100,8 @@
      *   clamp: Whether to clamp the result to the bounds of the active array
      *   simple: Whether to do complex correction or just a simple linear map
      */
-    status_t mapRawRectToCorrected(int32_t *rects, int rectCount, bool clamp,
-            bool simple = true);
+    status_t mapRawRectToCorrected(int32_t *rects, int rectCount,
+          DistortionMapperInfo *mapperInfo, bool clamp, bool simple = true);
 
     /**
      * Transform from corrected (warped) to distorted (original) coordinates.
@@ -110,8 +112,8 @@
      *   clamp: Whether to clamp the result to the bounds of the precorrection active array
      *   simple: Whether to do complex correction or just a simple linear map
      */
-    status_t mapCorrectedToRaw(int32_t* coordPairs, int coordCount, bool clamp,
-            bool simple = true) const;
+    status_t mapCorrectedToRaw(int32_t* coordPairs, int coordCount,
+            const DistortionMapperInfo *mapperInfo, bool clamp, bool simple = true) const;
 
     /**
      * Transform from corrected (warped) to distorted (original) coordinates.
@@ -122,8 +124,8 @@
      *   clamp: Whether to clamp the result to the bounds of the precorrection active array
      *   simple: Whether to do complex correction or just a simple linear map
      */
-    status_t mapCorrectedRectToRaw(int32_t *rects, int rectCount, bool clamp,
-            bool simple = true) const;
+    status_t mapCorrectedRectToRaw(int32_t *rects, int rectCount,
+           const DistortionMapperInfo *mapperInfo, bool clamp, bool simple = true) const;
 
     struct GridQuad {
         // Source grid quad, or null
@@ -133,6 +135,28 @@
         std::array<float, 8> coords;
     };
 
+    struct DistortionMapperInfo {
+        bool mValidMapping = false;
+        bool mValidGrids = false;
+
+        // intrisic parameters, in pixels
+        float mFx, mFy, mCx, mCy, mS;
+        // pre-calculated inverses for speed
+        float mInvFx, mInvFy;
+        // radial/tangential distortion parameters
+        std::array<float, 5> mK;
+
+        // pre-correction active array dimensions
+        float mArrayWidth, mArrayHeight;
+        // active array dimensions
+        float mActiveWidth, mActiveHeight;
+        // corner offsets between pre-correction and active arrays
+        float mArrayDiffX, mArrayDiffY;
+
+        std::vector<GridQuad> mCorrectedGrid;
+        std::vector<GridQuad> mDistortedGrid;
+    };
+
     // Find which grid quad encloses the point; returns null if none do
     static const GridQuad* findEnclosingQuad(
             const int32_t pt[2], const std::vector<GridQuad>& grid);
@@ -150,6 +174,11 @@
     // if it is false, then an interpolation coordinate for edges E14 and E23 is found.
     static float calculateUorV(const int32_t pt[2], const GridQuad& quad, bool calculateU);
 
+    DistortionMapperInfo *getMapperInfo(bool maxResolution = false) {
+          return maxResolution ? &mDistortionMapperInfoMaximumResolution :
+                  &mDistortionMapperInfo;
+    };
+
   private:
     mutable std::mutex mMutex;
 
@@ -160,39 +189,28 @@
     // Fuzziness for float inequality tests
     constexpr static float kFloatFuzz = 1e-4;
 
+    bool mMaxResolution = false;
+
+    status_t setupStaticInfoLocked(const CameraMetadata &deviceInfo, bool maxResolution);
+
     // Single implementation for various mapCorrectedToRaw methods
     template<typename T>
-    status_t mapCorrectedToRawImpl(T* coordPairs, int coordCount, bool clamp, bool simple) const;
+    status_t mapCorrectedToRawImpl(T* coordPairs, int coordCount,
+            const DistortionMapperInfo *mapperInfo, bool clamp, bool simple) const;
 
     // Simple linear interpolation option
     template<typename T>
-    status_t mapCorrectedToRawImplSimple(T* coordPairs, int coordCount, bool clamp) const;
+    status_t mapCorrectedToRawImplSimple(T* coordPairs, int coordCount,
+            const DistortionMapperInfo *mapperInfo, bool clamp) const;
 
-    status_t mapRawToCorrectedSimple(int32_t *coordPairs, int coordCount, bool clamp) const;
+    status_t mapRawToCorrectedSimple(int32_t *coordPairs, int coordCount,
+            const DistortionMapperInfo *mapperInfo, bool clamp) const;
 
     // Utility to create reverse mapping grids
-    status_t buildGrids();
+    status_t buildGrids(DistortionMapperInfo *mapperInfo);
 
-
-    bool mValidMapping;
-    bool mValidGrids;
-
-    // intrisic parameters, in pixels
-    float mFx, mFy, mCx, mCy, mS;
-    // pre-calculated inverses for speed
-    float mInvFx, mInvFy;
-    // radial/tangential distortion parameters
-    std::array<float, 5> mK;
-
-    // pre-correction active array dimensions
-    float mArrayWidth, mArrayHeight;
-    // active array dimensions
-    float mActiveWidth, mActiveHeight;
-    // corner offsets between pre-correction and active arrays
-    float mArrayDiffX, mArrayDiffY;
-
-    std::vector<GridQuad> mCorrectedGrid;
-    std::vector<GridQuad> mDistortedGrid;
+    DistortionMapperInfo mDistortionMapperInfo;
+    DistortionMapperInfo mDistortionMapperInfoMaximumResolution;
 
 }; // class DistortionMapper
 
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index da4f228..523a2c7 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -24,8 +24,6 @@
 #include <utils/String8.h>
 #include <utils/Timers.h>
 
-#include "hardware/camera3.h"
-
 #include "common/CameraDeviceBase.h"
 
 namespace android {
@@ -75,7 +73,7 @@
     // return from HAL but framework has not yet received the shutter
     // event. They will be returned to the streams when framework receives
     // the shutter event.
-    Vector<camera3_stream_buffer_t> pendingOutputBuffers;
+    Vector<camera_stream_buffer_t> pendingOutputBuffers;
 
     // Whether this inflight request's shutter and result callback are to be
     // called. The policy is that if the request is the last one in the constrained
@@ -98,7 +96,10 @@
     ERROR_BUF_STRATEGY errorBufStrategy;
 
     // The physical camera ids being requested.
-    std::set<String8> physicalCameraIds;
+    // For request on a physical camera stream, the inside set contains one Id
+    // For request on a stream group containing physical camera streams, the
+    // inside set contains all stream Ids in the group.
+    std::set<std::set<String8>> physicalCameraIds;
 
     // Map of physicalCameraId <-> Metadata
     std::vector<PhysicalCaptureResultInfo> physicalMetadatas;
@@ -115,6 +116,9 @@
     // Requested camera ids (both logical and physical) with zoomRatio != 1.0f
     std::set<std::string> cameraIdsWithZoom;
 
+    // Time of capture request (from systemTime) in Ns
+    nsecs_t requestTimeNs;
+
     // What shared surfaces an output should go to
     SurfaceMap outputSurfaces;
 
@@ -135,14 +139,15 @@
             errorBufStrategy(ERROR_BUF_CACHE),
             stillCapture(false),
             zslCapture(false),
-            rotateAndCropAuto(false) {
+            rotateAndCropAuto(false),
+            requestTimeNs(0) {
     }
 
     InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
             bool hasAppCallback, nsecs_t maxDuration,
-            const std::set<String8>& physicalCameraIdSet, bool isStillCapture,
+            const std::set<std::set<String8>>& physicalCameraIdSet, bool isStillCapture,
             bool isZslCapture, bool rotateAndCropAuto, const std::set<std::string>& idsWithZoom,
-            const SurfaceMap& outSurfaces = SurfaceMap{}) :
+            nsecs_t requestNs, const SurfaceMap& outSurfaces = SurfaceMap{}) :
             shutterTimestamp(0),
             sensorTimestamp(0),
             requestStatus(OK),
@@ -159,6 +164,7 @@
             zslCapture(isZslCapture),
             rotateAndCropAuto(rotateAndCropAuto),
             cameraIdsWithZoom(idsWithZoom),
+            requestTimeNs(requestNs),
             outputSurfaces(outSurfaces) {
     }
 };
diff --git a/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp b/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
index 3718f54..a02e5f6 100644
--- a/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
+++ b/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
@@ -27,6 +27,18 @@
 
 namespace camera3 {
 
+void RotateAndCropMapper::initRemappedKeys() {
+    mRemappedKeys.insert(
+            kMeteringRegionsToCorrect.begin(),
+            kMeteringRegionsToCorrect.end());
+    mRemappedKeys.insert(
+            kResultPointsToCorrectNoClamp.begin(),
+            kResultPointsToCorrectNoClamp.end());
+
+    mRemappedKeys.insert(ANDROID_SCALER_ROTATE_AND_CROP);
+    mRemappedKeys.insert(ANDROID_SCALER_CROP_REGION);
+}
+
 bool RotateAndCropMapper::isNeeded(const CameraMetadata* deviceInfo) {
     auto entry = deviceInfo->find(ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES);
     for (size_t i = 0; i < entry.count; i++) {
@@ -36,6 +48,8 @@
 }
 
 RotateAndCropMapper::RotateAndCropMapper(const CameraMetadata* deviceInfo) {
+    initRemappedKeys();
+
     auto entry = deviceInfo->find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
     if (entry.count != 4) return;
 
diff --git a/services/camera/libcameraservice/device3/RotateAndCropMapper.h b/services/camera/libcameraservice/device3/RotateAndCropMapper.h
index 459e27f..f9e2263 100644
--- a/services/camera/libcameraservice/device3/RotateAndCropMapper.h
+++ b/services/camera/libcameraservice/device3/RotateAndCropMapper.h
@@ -32,12 +32,14 @@
  * Utilities to transform between unrotated and rotated-and-cropped coordinate systems
  * for cameras that support SCALER_ROTATE_AND_CROP controls in AUTO mode.
  */
-class RotateAndCropMapper : private CoordinateMapper {
+class RotateAndCropMapper : public CoordinateMapper {
   public:
     static bool isNeeded(const CameraMetadata* deviceInfo);
 
     RotateAndCropMapper(const CameraMetadata* deviceInfo);
 
+    void initRemappedKeys() override;
+
     /**
      * Adjust capture request assuming rotate and crop AUTO is enabled
      */
diff --git a/services/camera/libcameraservice/device3/StatusTracker.cpp b/services/camera/libcameraservice/device3/StatusTracker.cpp
index 723b5c2..ea1f2c1 100644
--- a/services/camera/libcameraservice/device3/StatusTracker.cpp
+++ b/services/camera/libcameraservice/device3/StatusTracker.cpp
@@ -40,7 +40,7 @@
 StatusTracker::~StatusTracker() {
 }
 
-int StatusTracker::addComponent() {
+int StatusTracker::addComponent(std::string componentName) {
     int id;
     ssize_t err;
     {
@@ -49,8 +49,12 @@
         ALOGV("%s: Adding new component %d", __FUNCTION__, id);
 
         err = mStates.add(id, IDLE);
-        ALOGE_IF(err < 0, "%s: Can't add new component %d: %s (%zd)",
-                __FUNCTION__, id, strerror(-err), err);
+        if (componentName.empty()) {
+            componentName = std::to_string(id);
+        }
+        mComponentNames.add(id, componentName);
+        ALOGE_IF(err < 0, "%s: Can't add new component %d (%s): %s (%zd)",
+                __FUNCTION__, id, componentName.c_str(), strerror(-err), err);
     }
 
     if (err >= 0) {
@@ -68,6 +72,7 @@
         Mutex::Autolock l(mLock);
         ALOGV("%s: Removing component %d", __FUNCTION__, id);
         idx = mStates.removeItem(id);
+        mComponentNames.removeItem(id);
     }
 
     if (idx >= 0) {
@@ -80,6 +85,20 @@
 }
 
 
+void StatusTracker::dumpActiveComponents() {
+    Mutex::Autolock l(mLock);
+    if (mDeviceState == IDLE) {
+        ALOGI("%s: all components are IDLE", __FUNCTION__);
+        return;
+    }
+    for (size_t i = 0; i < mStates.size(); i++) {
+        if (mStates.valueAt(i) == ACTIVE) {
+            ALOGI("%s: component %d (%s) is active", __FUNCTION__, mStates.keyAt(i),
+                    mComponentNames.valueAt(i).c_str());
+        }
+    }
+}
+
 void StatusTracker::markComponentIdle(int id, const sp<Fence>& componentFence) {
     markComponent(id, IDLE, componentFence);
 }
diff --git a/services/camera/libcameraservice/device3/StatusTracker.h b/services/camera/libcameraservice/device3/StatusTracker.h
index 3a1d85c..069bff6 100644
--- a/services/camera/libcameraservice/device3/StatusTracker.h
+++ b/services/camera/libcameraservice/device3/StatusTracker.h
@@ -17,13 +17,13 @@
 #ifndef ANDROID_SERVERS_CAMERA3_STATUSTRACKER_H
 #define ANDROID_SERVERS_CAMERA3_STATUSTRACKER_H
 
+#include <string>
 #include <utils/Condition.h>
 #include <utils/Errors.h>
 #include <utils/List.h>
 #include <utils/Mutex.h>
 #include <utils/Thread.h>
 #include <utils/KeyedVector.h>
-#include <hardware/camera3.h>
 
 #include "common/CameraDeviceBase.h"
 
@@ -54,7 +54,7 @@
     // Add a component to track; returns non-negative unique ID for the new
     // component on success, negative error code on failure.
     // New components start in the idle state.
-    int addComponent();
+    int addComponent(std::string componentName);
 
     // Remove existing component from idle tracking. Ignores unknown IDs
     void removeComponent(int id);
@@ -68,6 +68,8 @@
     // Set the state of a tracked component to be active. Ignores unknown IDs.
     void markComponentActive(int id);
 
+    void dumpActiveComponents();
+
     virtual void requestExit();
   protected:
 
@@ -105,6 +107,7 @@
 
     // Current component states
     KeyedVector<int, ComponentState> mStates;
+    KeyedVector<int, std::string> mComponentNames;
     // Merged fence for all processed state changes
     sp<Fence> mIdleFence;
     // Current overall device state
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
index a87de77..1a39510 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
@@ -20,11 +20,25 @@
 #include <algorithm>
 
 #include "device3/ZoomRatioMapper.h"
+#include "utils/SessionConfigurationUtils.h"
 
 namespace android {
 
 namespace camera3 {
 
+void ZoomRatioMapper::initRemappedKeys() {
+    mRemappedKeys.insert(
+            kMeteringRegionsToCorrect.begin(),
+            kMeteringRegionsToCorrect.end());
+    mRemappedKeys.insert(
+            kRectsToCorrect.begin(),
+            kRectsToCorrect.end());
+    mRemappedKeys.insert(
+            kResultPointsToCorrectNoClamp.begin(),
+            kResultPointsToCorrectNoClamp.end());
+
+    mRemappedKeys.insert(ANDROID_CONTROL_ZOOM_RATIO);
+}
 
 status_t ZoomRatioMapper::initZoomRatioInTemplate(CameraMetadata *request) {
     camera_metadata_entry_t entry;
@@ -115,50 +129,142 @@
     return OK;
 }
 
+static bool getArrayWidthAndHeight(const CameraMetadata *deviceInfo,
+        int32_t arrayTag, int32_t *width, int32_t *height) {
+    if (width == nullptr || height == nullptr) {
+        ALOGE("%s: width / height nullptr", __FUNCTION__);
+        return false;
+    }
+    camera_metadata_ro_entry_t entry;
+    entry = deviceInfo->find(arrayTag);
+    if (entry.count != 4) return false;
+    *width = entry.data.i32[2];
+    *height = entry.data.i32[3];
+    return true;
+}
+
 ZoomRatioMapper::ZoomRatioMapper(const CameraMetadata* deviceInfo,
         bool supportNativeZoomRatio, bool usePrecorrectArray) {
-    camera_metadata_ro_entry_t entry;
+    initRemappedKeys();
 
-    entry = deviceInfo->find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
-    if (entry.count != 4) return;
-    int32_t arrayW = entry.data.i32[2];
-    int32_t arrayH = entry.data.i32[3];
+    int32_t arrayW = 0;
+    int32_t arrayH = 0;
+    int32_t arrayMaximumResolutionW = 0;
+    int32_t arrayMaximumResolutionH = 0;
+    int32_t activeW = 0;
+    int32_t activeH = 0;
+    int32_t activeMaximumResolutionW = 0;
+    int32_t activeMaximumResolutionH = 0;
 
-    entry = deviceInfo->find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
-    if (entry.count != 4) return;
-    int32_t activeW = entry.data.i32[2];
-    int32_t activeH = entry.data.i32[3];
+    if (!getArrayWidthAndHeight(deviceInfo, ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
+            &arrayW, &arrayH)) {
+        ALOGE("%s: Couldn't get pre correction active array size", __FUNCTION__);
+        return;
+    }
+     if (!getArrayWidthAndHeight(deviceInfo, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
+            &activeW, &activeH)) {
+        ALOGE("%s: Couldn't get active array size", __FUNCTION__);
+        return;
+    }
+
+    bool isUltraHighResolutionSensor =
+            camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(*deviceInfo);
+    if (isUltraHighResolutionSensor) {
+        if (!getArrayWidthAndHeight(deviceInfo,
+                ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+                &arrayMaximumResolutionW, &arrayMaximumResolutionH)) {
+            ALOGE("%s: Couldn't get maximum resolution pre correction active array size",
+                    __FUNCTION__);
+            return;
+        }
+         if (!getArrayWidthAndHeight(deviceInfo,
+                ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+                &activeMaximumResolutionW, &activeMaximumResolutionH)) {
+            ALOGE("%s: Couldn't get maximum resolution pre correction active array size",
+                    __FUNCTION__);
+            return;
+        }
+    }
 
     if (usePrecorrectArray) {
         mArrayWidth = arrayW;
         mArrayHeight = arrayH;
+        mArrayWidthMaximumResolution = arrayMaximumResolutionW;
+        mArrayHeightMaximumResolution = arrayMaximumResolutionH;
     } else {
         mArrayWidth = activeW;
         mArrayHeight = activeH;
+        mArrayWidthMaximumResolution = activeMaximumResolutionW;
+        mArrayHeightMaximumResolution = activeMaximumResolutionH;
     }
     mHalSupportsZoomRatio = supportNativeZoomRatio;
 
-    ALOGV("%s: array size: %d x %d, mHalSupportsZoomRatio %d",
-            __FUNCTION__, mArrayWidth, mArrayHeight, mHalSupportsZoomRatio);
+    ALOGV("%s: array size: %d x %d, full res array size: %d x %d,  mHalSupportsZoomRatio %d",
+            __FUNCTION__, mArrayWidth, mArrayHeight, mArrayWidthMaximumResolution,
+            mArrayHeightMaximumResolution, mHalSupportsZoomRatio);
     mIsValid = true;
 }
 
+status_t ZoomRatioMapper::getArrayDimensionsToBeUsed(const CameraMetadata *settings,
+        int32_t *arrayWidth, int32_t *arrayHeight) {
+    if (settings == nullptr || arrayWidth == nullptr || arrayHeight == nullptr) {
+        return BAD_VALUE;
+    }
+    // First we get the sensorPixelMode from the settings metadata.
+    int32_t sensorPixelMode = ANDROID_SENSOR_PIXEL_MODE_DEFAULT;
+    camera_metadata_ro_entry sensorPixelModeEntry = settings->find(ANDROID_SENSOR_PIXEL_MODE);
+    if (sensorPixelModeEntry.count != 0) {
+        sensorPixelMode = sensorPixelModeEntry.data.u8[0];
+        if (sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_DEFAULT &&
+            sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) {
+            ALOGE("%s: Request sensor pixel mode is not one of the valid values %d",
+                      __FUNCTION__, sensorPixelMode);
+            return BAD_VALUE;
+        }
+    }
+    if (sensorPixelMode == ANDROID_SENSOR_PIXEL_MODE_DEFAULT) {
+        *arrayWidth = mArrayWidth;
+        *arrayHeight = mArrayHeight;
+    } else {
+        *arrayWidth = mArrayWidthMaximumResolution;
+        *arrayHeight = mArrayHeightMaximumResolution;
+    }
+    return OK;
+}
+
 status_t ZoomRatioMapper::updateCaptureRequest(CameraMetadata* request) {
     if (!mIsValid) return INVALID_OPERATION;
 
     status_t res = OK;
     bool zoomRatioIs1 = true;
     camera_metadata_entry_t entry;
-
+    int arrayHeight, arrayWidth = 0;
+    res = getArrayDimensionsToBeUsed(request, &arrayWidth, &arrayHeight);
+    if (res != OK) {
+        return res;
+    }
     entry = request->find(ANDROID_CONTROL_ZOOM_RATIO);
     if (entry.count == 1 && entry.data.f[0] != 1.0f) {
         zoomRatioIs1 = false;
+
+        // If cropRegion is windowboxing, override it with activeArray
+        camera_metadata_entry_t cropRegionEntry = request->find(ANDROID_SCALER_CROP_REGION);
+        if (cropRegionEntry.count == 4) {
+            int cropWidth = cropRegionEntry.data.i32[2];
+            int cropHeight = cropRegionEntry.data.i32[3];
+            if (cropWidth < arrayWidth && cropHeight < arrayHeight) {
+                cropRegionEntry.data.i32[0] = 0;
+                cropRegionEntry.data.i32[1] = 0;
+                cropRegionEntry.data.i32[2] = arrayWidth;
+                cropRegionEntry.data.i32[3] = arrayHeight;
+            }
+        }
     }
 
     if (mHalSupportsZoomRatio && zoomRatioIs1) {
-        res = separateZoomFromCropLocked(request, false/*isResult*/);
+        res = separateZoomFromCropLocked(request, false/*isResult*/, arrayWidth, arrayHeight);
     } else if (!mHalSupportsZoomRatio && !zoomRatioIs1) {
-        res = combineZoomAndCropLocked(request, false/*isResult*/);
+        res = combineZoomAndCropLocked(request, false/*isResult*/, arrayWidth, arrayHeight);
     }
 
     // If CONTROL_ZOOM_RATIO is in request, but HAL doesn't support
@@ -175,10 +281,15 @@
 
     status_t res = OK;
 
+    int arrayHeight, arrayWidth = 0;
+    res = getArrayDimensionsToBeUsed(result, &arrayWidth, &arrayHeight);
+    if (res != OK) {
+        return res;
+    }
     if (mHalSupportsZoomRatio && requestedZoomRatioIs1) {
-        res = combineZoomAndCropLocked(result, true/*isResult*/);
+        res = combineZoomAndCropLocked(result, true/*isResult*/, arrayWidth, arrayHeight);
     } else if (!mHalSupportsZoomRatio && !requestedZoomRatioIs1) {
-        res = separateZoomFromCropLocked(result, true/*isResult*/);
+        res = separateZoomFromCropLocked(result, true/*isResult*/, arrayWidth, arrayHeight);
     } else {
         camera_metadata_entry_t entry = result->find(ANDROID_CONTROL_ZOOM_RATIO);
         if (entry.count == 0) {
@@ -190,16 +301,22 @@
     return res;
 }
 
-float ZoomRatioMapper::deriveZoomRatio(const CameraMetadata* metadata) {
+status_t ZoomRatioMapper::deriveZoomRatio(const CameraMetadata* metadata, float *zoomRatioRet,
+        int arrayWidth, int arrayHeight) {
+    if (metadata == nullptr || zoomRatioRet == nullptr) {
+        return BAD_VALUE;
+    }
     float zoomRatio = 1.0;
 
     camera_metadata_ro_entry_t entry;
     entry = metadata->find(ANDROID_SCALER_CROP_REGION);
-    if (entry.count != 4) return zoomRatio;
-
+    if (entry.count != 4) {
+        *zoomRatioRet = 1;
+        return OK;
+    }
     // Center of the preCorrection/active size
-    float arrayCenterX = mArrayWidth / 2.0;
-    float arrayCenterY = mArrayHeight / 2.0;
+    float arrayCenterX = arrayWidth / 2.0;
+    float arrayCenterY = arrayHeight / 2.0;
 
     // Re-map crop region to coordinate system centered to (arrayCenterX,
     // arrayCenterY).
@@ -209,22 +326,30 @@
     float cropRegionBottom = entry.data.i32[1] + entry.data.i32[3] - arrayCenterY;
 
     // Calculate the scaling factor for left, top, bottom, right
-    float zoomRatioLeft = std::max(mArrayWidth / (2 * cropRegionLeft), 1.0f);
-    float zoomRatioTop = std::max(mArrayHeight / (2 * cropRegionTop), 1.0f);
-    float zoomRatioRight = std::max(mArrayWidth / (2 * cropRegionRight), 1.0f);
-    float zoomRatioBottom = std::max(mArrayHeight / (2 * cropRegionBottom), 1.0f);
+    float zoomRatioLeft = std::max(arrayWidth / (2 * cropRegionLeft), 1.0f);
+    float zoomRatioTop = std::max(arrayHeight / (2 * cropRegionTop), 1.0f);
+    float zoomRatioRight = std::max(arrayWidth / (2 * cropRegionRight), 1.0f);
+    float zoomRatioBottom = std::max(arrayHeight / (2 * cropRegionBottom), 1.0f);
 
     // Use minimum scaling factor to handle letterboxing or pillarboxing
     zoomRatio = std::min(std::min(zoomRatioLeft, zoomRatioRight),
             std::min(zoomRatioTop, zoomRatioBottom));
 
     ALOGV("%s: derived zoomRatio is %f", __FUNCTION__, zoomRatio);
-    return zoomRatio;
+    *zoomRatioRet = zoomRatio;
+    return OK;
 }
 
-status_t ZoomRatioMapper::separateZoomFromCropLocked(CameraMetadata* metadata, bool isResult) {
-    status_t res;
-    float zoomRatio = deriveZoomRatio(metadata);
+status_t ZoomRatioMapper::separateZoomFromCropLocked(CameraMetadata* metadata, bool isResult,
+        int arrayWidth, int arrayHeight) {
+    float zoomRatio = 1.0;
+    status_t res = deriveZoomRatio(metadata, &zoomRatio, arrayWidth, arrayHeight);
+
+    if (res != OK) {
+        ALOGE("%s: Failed to derive zoom ratio: %s(%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     // Update zoomRatio metadata tag
     res = metadata->update(ANDROID_CONTROL_ZOOM_RATIO, &zoomRatio, 1);
@@ -244,12 +369,14 @@
                 continue;
             }
             // Top left (inclusive)
-            scaleCoordinates(entry.data.i32 + j, 1, zoomRatio, true /*clamp*/);
+            scaleCoordinates(entry.data.i32 + j, 1, zoomRatio, true /*clamp*/, arrayWidth,
+                    arrayHeight);
             // Bottom right (exclusive): Use adjacent inclusive pixel to
             // calculate.
             entry.data.i32[j+2] -= 1;
             entry.data.i32[j+3] -= 1;
-            scaleCoordinates(entry.data.i32 + j + 2, 1, zoomRatio, true /*clamp*/);
+            scaleCoordinates(entry.data.i32 + j + 2, 1, zoomRatio, true /*clamp*/, arrayWidth,
+                    arrayHeight);
             entry.data.i32[j+2] += 1;
             entry.data.i32[j+3] += 1;
         }
@@ -257,20 +384,22 @@
 
     for (auto rect : kRectsToCorrect) {
         entry = metadata->find(rect);
-        scaleRects(entry.data.i32, entry.count / 4, zoomRatio);
+        scaleRects(entry.data.i32, entry.count / 4, zoomRatio, arrayWidth, arrayHeight);
     }
 
     if (isResult) {
         for (auto pts : kResultPointsToCorrectNoClamp) {
             entry = metadata->find(pts);
-            scaleCoordinates(entry.data.i32, entry.count / 2, zoomRatio, false /*clamp*/);
+            scaleCoordinates(entry.data.i32, entry.count / 2, zoomRatio, false /*clamp*/,
+                    arrayWidth, arrayHeight);
         }
     }
 
     return OK;
 }
 
-status_t ZoomRatioMapper::combineZoomAndCropLocked(CameraMetadata* metadata, bool isResult) {
+status_t ZoomRatioMapper::combineZoomAndCropLocked(CameraMetadata* metadata, bool isResult,
+        int arrayWidth, int arrayHeight) {
     float zoomRatio = 1.0f;
     camera_metadata_entry_t entry;
     entry = metadata->find(ANDROID_CONTROL_ZOOM_RATIO);
@@ -279,7 +408,6 @@
     }
 
     // Unscale regions with zoomRatio
-    status_t res;
     for (auto region : kMeteringRegionsToCorrect) {
         entry = metadata->find(region);
         for (size_t j = 0; j < entry.count; j += 5) {
@@ -288,29 +416,32 @@
                 continue;
             }
             // Top-left (inclusive)
-            scaleCoordinates(entry.data.i32 + j, 1, 1.0 / zoomRatio, true /*clamp*/);
+            scaleCoordinates(entry.data.i32 + j, 1, 1.0 / zoomRatio, true /*clamp*/, arrayWidth,
+                    arrayHeight);
             // Bottom-right (exclusive): Use adjacent inclusive pixel to
             // calculate.
             entry.data.i32[j+2] -= 1;
             entry.data.i32[j+3] -= 1;
-            scaleCoordinates(entry.data.i32 + j + 2, 1, 1.0 / zoomRatio, true /*clamp*/);
+            scaleCoordinates(entry.data.i32 + j + 2, 1, 1.0 / zoomRatio, true /*clamp*/, arrayWidth,
+                    arrayHeight);
             entry.data.i32[j+2] += 1;
             entry.data.i32[j+3] += 1;
         }
     }
     for (auto rect : kRectsToCorrect) {
         entry = metadata->find(rect);
-        scaleRects(entry.data.i32, entry.count / 4, 1.0 / zoomRatio);
+        scaleRects(entry.data.i32, entry.count / 4, 1.0 / zoomRatio, arrayWidth, arrayHeight);
     }
     if (isResult) {
         for (auto pts : kResultPointsToCorrectNoClamp) {
             entry = metadata->find(pts);
-            scaleCoordinates(entry.data.i32, entry.count / 2, 1.0 / zoomRatio, false /*clamp*/);
+            scaleCoordinates(entry.data.i32, entry.count / 2, 1.0 / zoomRatio, false /*clamp*/,
+                    arrayWidth, arrayHeight);
         }
     }
 
     zoomRatio = 1.0;
-    res = metadata->update(ANDROID_CONTROL_ZOOM_RATIO, &zoomRatio, 1);
+    status_t res = metadata->update(ANDROID_CONTROL_ZOOM_RATIO, &zoomRatio, 1);
     if (res != OK) {
         return res;
     }
@@ -319,7 +450,7 @@
 }
 
 void ZoomRatioMapper::scaleCoordinates(int32_t* coordPairs, int coordCount,
-        float scaleRatio, bool clamp) {
+        float scaleRatio, bool clamp, int32_t arrayWidth, int32_t arrayHeight) {
     // A pixel's coordinate is represented by the position of its top-left corner.
     // To avoid the rounding error, we use the coordinate for the center of the
     // pixel instead:
@@ -332,18 +463,18 @@
     for (int i = 0; i < coordCount * 2; i += 2) {
         float x = coordPairs[i];
         float y = coordPairs[i + 1];
-        float xCentered = x - (mArrayWidth - 2) / 2;
-        float yCentered = y - (mArrayHeight - 2) / 2;
+        float xCentered = x - (arrayWidth - 2) / 2;
+        float yCentered = y - (arrayHeight - 2) / 2;
         float scaledX = xCentered * scaleRatio;
         float scaledY = yCentered * scaleRatio;
-        scaledX += (mArrayWidth - 2) / 2;
-        scaledY += (mArrayHeight - 2) / 2;
+        scaledX += (arrayWidth - 2) / 2;
+        scaledY += (arrayHeight - 2) / 2;
         coordPairs[i] = static_cast<int32_t>(std::round(scaledX));
         coordPairs[i+1] = static_cast<int32_t>(std::round(scaledY));
         // Clamp to within activeArray/preCorrectionActiveArray
         if (clamp) {
-            int32_t right = mArrayWidth - 1;
-            int32_t bottom = mArrayHeight - 1;
+            int32_t right = arrayWidth - 1;
+            int32_t bottom = arrayHeight - 1;
             coordPairs[i] =
                     std::min(right, std::max(0, coordPairs[i]));
             coordPairs[i+1] =
@@ -354,7 +485,7 @@
 }
 
 void ZoomRatioMapper::scaleRects(int32_t* rects, int rectCount,
-        float scaleRatio) {
+        float scaleRatio, int32_t arrayWidth, int32_t arrayHeight) {
     for (int i = 0; i < rectCount * 4; i += 4) {
         // Map from (l, t, width, height) to (l, t, l+width-1, t+height-1),
         // where both top-left and bottom-right are inclusive.
@@ -366,9 +497,9 @@
         };
 
         // top-left
-        scaleCoordinates(coords, 1, scaleRatio, true /*clamp*/);
+        scaleCoordinates(coords, 1, scaleRatio, true /*clamp*/, arrayWidth, arrayHeight);
         // bottom-right
-        scaleCoordinates(coords+2, 1, scaleRatio, true /*clamp*/);
+        scaleCoordinates(coords+2, 1, scaleRatio, true /*clamp*/, arrayWidth, arrayHeight);
 
         // Map back to (l, t, width, height)
         rects[i] = coords[0];
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.h b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
index 698f87f..b7a9e41 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.h
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
@@ -33,7 +33,7 @@
  * - HAL supports zoomRatio and the application uses cropRegion, or
  * - HAL doesn't support zoomRatio, but the application uses zoomRatio
  */
-class ZoomRatioMapper : private CoordinateMapper {
+class ZoomRatioMapper : public CoordinateMapper {
   public:
     ZoomRatioMapper() = default;
     ZoomRatioMapper(const CameraMetadata *deviceInfo,
@@ -41,7 +41,9 @@
     ZoomRatioMapper(const ZoomRatioMapper& other) :
             mHalSupportsZoomRatio(other.mHalSupportsZoomRatio),
             mArrayWidth(other.mArrayWidth), mArrayHeight(other.mArrayHeight),
-            mIsValid(other.mIsValid) {}
+            mIsValid(other.mIsValid) { initRemappedKeys(); }
+
+    void initRemappedKeys() override;
 
     /**
      * Initialize request template with valid zoomRatio if necessary.
@@ -66,22 +68,31 @@
 
   public: // Visible for testing. Do not use concurently.
     void scaleCoordinates(int32_t* coordPairs, int coordCount,
-            float scaleRatio, bool clamp);
+            float scaleRatio, bool clamp, int32_t arrayWidth, int32_t arrayHeight);
 
     bool isValid() { return mIsValid; }
   private:
     // const after construction
     bool mHalSupportsZoomRatio;
-    // active array / pre-correction array dimension
+
+    // active array / pre-correction array dimension for default and maximum
+    // resolution modes.
     int32_t mArrayWidth, mArrayHeight;
+    int32_t mArrayWidthMaximumResolution, mArrayHeightMaximumResolution;
 
     bool mIsValid = false;
 
-    float deriveZoomRatio(const CameraMetadata* metadata);
-    void scaleRects(int32_t* rects, int rectCount, float scaleRatio);
+    status_t deriveZoomRatio(const CameraMetadata* metadata, float *zoomRatio, int arrayWidth,
+            int arrayHeight);
+    void scaleRects(int32_t* rects, int rectCount, float scaleRatio, int32_t arrayWidth,
+            int32_t arrayHeight);
 
-    status_t separateZoomFromCropLocked(CameraMetadata* metadata, bool isResult);
-    status_t combineZoomAndCropLocked(CameraMetadata* metadata, bool isResult);
+    status_t separateZoomFromCropLocked(CameraMetadata* metadata, bool isResult, int arrayWidth,
+            int arrayHeight);
+    status_t combineZoomAndCropLocked(CameraMetadata* metadata, bool isResult, int arrayWidth,
+            int arrayHeight);
+    status_t getArrayDimensionsToBeUsed(const CameraMetadata *settings, int32_t *arrayWidth,
+            int32_t *arrayHeight);
 };
 
 } // namespace camera3
diff --git a/services/camera/libcameraservice/fuzzer/Android.bp b/services/camera/libcameraservice/fuzzer/Android.bp
new file mode 100644
index 0000000..9a9a03b
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/Android.bp
@@ -0,0 +1,55 @@
+// Copyright 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_services_camera_libcameraservice_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_services_camera_libcameraservice_license",
+    ],
+}
+
+cc_defaults {
+    name: "libcameraservice_fuzz_defaults",
+    fuzz_config: {
+        componentid: 41727
+    },
+}
+
+cc_fuzz {
+    name: "libcameraservice_distortion_mapper_fuzzer",
+    defaults: ["libcameraservice_fuzz_defaults"],
+    srcs: [
+        "DistortionMapperFuzzer.cpp",
+    ],
+    shared_libs: [
+        "libcameraservice",
+        "libcamera_client",
+    ],
+}
+
+cc_fuzz {
+    name: "libcameraservice_depth_processor_fuzzer",
+    defaults: ["libcameraservice_fuzz_defaults"],
+    srcs: [
+        "DepthProcessorFuzzer.cpp",
+    ],
+    shared_libs: [
+        "libcameraservice",
+    ],
+    corpus: ["corpus/*.jpg"],
+}
diff --git a/services/camera/libcameraservice/fuzzer/DepthProcessorFuzzer.cpp b/services/camera/libcameraservice/fuzzer/DepthProcessorFuzzer.cpp
new file mode 100644
index 0000000..650ca91
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/DepthProcessorFuzzer.cpp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <array>
+#include <vector>
+
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "common/DepthPhotoProcessor.h"
+
+using namespace android;
+using namespace android::camera3;
+
+static const size_t kTestBufferWidth = 640;
+static const size_t kTestBufferHeight = 480;
+static const size_t kTestBufferDepthSize (kTestBufferWidth * kTestBufferHeight);
+
+void generateDepth16Buffer(const uint8_t* data, size_t size, std::array<uint16_t, kTestBufferDepthSize> *depth16Buffer /*out*/) {
+    FuzzedDataProvider dataProvider(data, size);
+    for (size_t i = 0; i < depth16Buffer->size(); i++) {
+        (*depth16Buffer)[i] = dataProvider.ConsumeIntegral<uint16_t>();
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    DepthPhotoInputFrame inputFrame;
+    // Worst case both depth and confidence maps have the same size as the main color image.
+    inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
+
+    std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
+    size_t actualDepthPhotoSize = 0;
+
+    std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
+    generateDepth16Buffer(data, size, &depth16Buffer);
+
+    inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (data);
+    inputFrame.mMainJpegSize = size;
+    inputFrame.mDepthMapBuffer = depth16Buffer.data();
+    inputFrame.mDepthMapStride = kTestBufferWidth;
+    inputFrame.mDepthMapWidth = kTestBufferWidth;
+    inputFrame.mDepthMapHeight = kTestBufferHeight;
+    processDepthPhotoFrame(
+        inputFrame,
+        depthPhotoBuffer.size(),
+        depthPhotoBuffer.data(),
+        &actualDepthPhotoSize);
+
+  return 0;
+}
diff --git a/services/camera/libcameraservice/fuzzer/DistortionMapperFuzzer.cpp b/services/camera/libcameraservice/fuzzer/DistortionMapperFuzzer.cpp
new file mode 100644
index 0000000..88ec85c
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/DistortionMapperFuzzer.cpp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <vector>
+
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "device3/DistortionMapper.h"
+#include <camera/CameraMetadata.h>
+
+using namespace android;
+using namespace android::camera3;
+using DistortionMapperInfo = android::camera3::DistortionMapper::DistortionMapperInfo;
+
+int32_t testActiveArray[] = {100, 100, 1000, 750};
+float testICal[] = { 1000.f, 1000.f, 500.f, 500.f, 0.f };
+float identityDistortion[] = { 0.f, 0.f, 0.f, 0.f, 0.f};
+
+void setupTestMapper(DistortionMapper *m,
+        float distortion[5], float intrinsics[5],
+        int32_t activeArray[4], int32_t preCorrectionActiveArray[4]) {
+    CameraMetadata deviceInfo;
+
+    deviceInfo.update(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
+            preCorrectionActiveArray, 4);
+
+    deviceInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
+            activeArray, 4);
+
+    deviceInfo.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
+            intrinsics, 5);
+
+    deviceInfo.update(ANDROID_LENS_DISTORTION,
+            distortion, 5);
+
+    m->setupStaticInfo(deviceInfo);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
+
+    DistortionMapper m;
+    setupTestMapper(&m, identityDistortion, testICal,
+        /*activeArray*/ testActiveArray,
+        /*preCorrectionActiveArray*/ testActiveArray);
+
+    bool clamp = fdp.ConsumeBool();
+    bool simple = fdp.ConsumeBool();
+    std::vector<int32_t> input;
+    for (int index = 0; fdp.remaining_bytes() > 0; index++) {
+        input.push_back(fdp.ConsumeIntegral<int32_t>());
+    }
+    DistortionMapperInfo *mapperInfo = m.getMapperInfo();
+    // The size argument counts how many coordinate pairs there are, so
+    // it is expected to be 1/2 the size of the input.
+    m.mapCorrectedToRaw(input.data(), input.size()/2,  mapperInfo, clamp, simple);
+
+    return 0;
+}
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Canon_MakerNote_variant_type_1.jpg b/services/camera/libcameraservice/fuzzer/corpus/Canon_MakerNote_variant_type_1.jpg
new file mode 100644
index 0000000..1eb37d0
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Canon_MakerNote_variant_type_1.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Fuji_MakerNote_variant_type_1.jpg b/services/camera/libcameraservice/fuzzer/corpus/Fuji_MakerNote_variant_type_1.jpg
new file mode 100644
index 0000000..75e0371
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Fuji_MakerNote_variant_type_1.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_2.jpg b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_2.jpg
new file mode 100644
index 0000000..461d613
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_2.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_3.jpg b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_3.jpg
new file mode 100644
index 0000000..42498e2
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_3.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_4.jpg b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_4.jpg
new file mode 100644
index 0000000..233ff78
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_4.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_5.jpg b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_5.jpg
new file mode 100644
index 0000000..f083f75
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Olympus_MakerNote_variant_type_5.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_2.jpg b/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_2.jpg
new file mode 100644
index 0000000..0ef0ef2
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_2.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_3.jpg b/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_3.jpg
new file mode 100644
index 0000000..d93b86f
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_3.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_4.jpg b/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_4.jpg
new file mode 100644
index 0000000..297ea1c
--- /dev/null
+++ b/services/camera/libcameraservice/fuzzer/corpus/Pentax_MakerNote_variant_type_4.jpg
Binary files differ
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
index bf89ca5..2509e6c 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
@@ -28,7 +28,7 @@
 namespace frameworks {
 namespace cameraservice {
 namespace device {
-namespace V2_0 {
+namespace V2_1 {
 namespace implementation {
 
 using hardware::cameraservice::utils::conversion::convertToHidl;
@@ -115,7 +115,7 @@
         // is guaranteed to be called serially by the client if it decides to
         // use fmq.
         if (e.settings.getDiscriminator() ==
-            FmqSizeOrMetadata::hidl_discriminator::fmqMetadataSize) {
+            V2_0::FmqSizeOrMetadata::hidl_discriminator::fmqMetadataSize) {
             /**
              * Get settings from the fmq.
              */
@@ -196,6 +196,12 @@
 
 Return<HStatus> HidlCameraDeviceUser::endConfigure(StreamConfigurationMode operatingMode,
                                                    const hidl_vec<uint8_t>& sessionParams) {
+    return endConfigure_2_1(operatingMode, sessionParams, systemTime());
+}
+
+Return<HStatus> HidlCameraDeviceUser::endConfigure_2_1(StreamConfigurationMode operatingMode,
+                                                   const hidl_vec<uint8_t>& sessionParams,
+                                                   nsecs_t startTimeNs) {
     android::CameraMetadata cameraMetadata;
     if (!convertFromHidl(sessionParams, &cameraMetadata)) {
         return HStatus::ILLEGAL_ARGUMENT;
@@ -203,7 +209,8 @@
 
     std::vector<int> offlineStreamIds;
     binder::Status ret = mDeviceRemote->endConfigure(convertFromHidl(operatingMode),
-                                                     cameraMetadata, &offlineStreamIds);
+                                                     cameraMetadata, ns2ms(startTimeNs),
+                                                     &offlineStreamIds);
     return B2HStatus(ret);
 }
 
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
index c3a80fe..0e2ab3d 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
@@ -24,6 +24,7 @@
 #include <android/frameworks/cameraservice/common/2.0/types.h>
 #include <android/frameworks/cameraservice/service/2.0/types.h>
 #include <android/frameworks/cameraservice/device/2.0/ICameraDeviceUser.h>
+#include <android/frameworks/cameraservice/device/2.1/ICameraDeviceUser.h>
 #include <android/frameworks/cameraservice/device/2.0/types.h>
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 #include <fmq/MessageQueue.h>
@@ -36,7 +37,7 @@
 namespace frameworks {
 namespace cameraservice {
 namespace device {
-namespace V2_0 {
+namespace V2_1 {
 namespace implementation {
 
 using frameworks::cameraservice::device::V2_0::StreamConfigurationMode;
@@ -50,7 +51,7 @@
 using CaptureRequestMetadataQueue = MessageQueue<uint8_t, kSynchronizedReadWrite>;
 using TemplateId = frameworks::cameraservice::device::V2_0::TemplateId;
 
-using HCameraDeviceUser = device::V2_0::ICameraDeviceUser;
+using HCameraDeviceUser = device::V2_1::ICameraDeviceUser;
 using HCameraMetadata = cameraservice::service::V2_0::CameraMetadata;
 using HCaptureRequest = device::V2_0::CaptureRequest;
 using HSessionConfiguration = frameworks::cameraservice::device::V2_0::SessionConfiguration;
@@ -83,6 +84,10 @@
     virtual Return<HStatus> endConfigure(StreamConfigurationMode operatingMode,
                                          const hidl_vec<uint8_t>& sessionParams);
 
+    virtual Return<HStatus> endConfigure_2_1(StreamConfigurationMode operatingMode,
+                                         const hidl_vec<uint8_t>& sessionParams,
+                                         nsecs_t startTimeNs);
+
     virtual Return<HStatus> deleteStream(int32_t streamId) override;
 
     virtual Return<void> createStream(const HOutputConfiguration& outputConfiguration,
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index a46133e..7d1b3cf 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -38,7 +38,7 @@
 using hardware::Void;
 
 using device::V2_0::implementation::H2BCameraDeviceCallbacks;
-using device::V2_0::implementation::HidlCameraDeviceUser;
+using device::V2_1::implementation::HidlCameraDeviceUser;
 using service::V2_0::implementation::H2BCameraServiceListener;
 using HCameraMetadataType = frameworks::cameraservice::common::V2_0::CameraMetadataType;
 using HVendorTag = frameworks::cameraservice::common::V2_0::VendorTag;
@@ -59,7 +59,8 @@
     android::CameraMetadata cameraMetadata;
     HStatus status = HStatus::NO_ERROR;
     binder::Status serviceRet =
-        mAidlICameraService->getCameraCharacteristics(String16(cameraId.c_str()), &cameraMetadata);
+        mAidlICameraService->getCameraCharacteristics(String16(cameraId.c_str()),
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &cameraMetadata);
     HCameraMetadata hidlMetadata;
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
@@ -103,8 +104,9 @@
     }
     sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = hybridCallbacks;
     binder::Status serviceRet = mAidlICameraService->connectDevice(
-            callbacks, String16(cameraId.c_str()), String16(""), std::unique_ptr<String16>(),
-            hardware::ICameraService::USE_CALLING_UID, /*out*/&deviceRemote);
+            callbacks, String16(cameraId.c_str()), String16(""), {},
+            hardware::ICameraService::USE_CALLING_UID, 0/*oomScoreOffset*/,
+            /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*out*/&deviceRemote);
     HStatus status = HStatus::NO_ERROR;
     if (!serviceRet.isOk()) {
         ALOGE("%s: Unable to connect to camera device", __FUNCTION__);
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.h b/services/camera/libcameraservice/hidl/HidlCameraService.h
index 097f4c5..86a7cec 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.h
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.h
@@ -21,7 +21,7 @@
 #include <thread>
 
 #include <android/frameworks/cameraservice/common/2.0/types.h>
-#include <android/frameworks/cameraservice/service/2.1/ICameraService.h>
+#include <android/frameworks/cameraservice/service/2.2/ICameraService.h>
 #include <android/frameworks/cameraservice/service/2.0/types.h>
 #include <android/frameworks/cameraservice/device/2.0/types.h>
 
@@ -42,7 +42,7 @@
 
 using HCameraDeviceCallback = frameworks::cameraservice::device::V2_0::ICameraDeviceCallback;
 using HCameraMetadata = frameworks::cameraservice::service::V2_0::CameraMetadata;
-using HCameraService = frameworks::cameraservice::service::V2_1::ICameraService;
+using HCameraService = frameworks::cameraservice::service::V2_2::ICameraService;
 using HCameraServiceListener = frameworks::cameraservice::service::V2_0::ICameraServiceListener;
 using HCameraServiceListener2_1 = frameworks::cameraservice::service::V2_1::ICameraServiceListener;
 using HStatus = frameworks::cameraservice::common::V2_0::Status;
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
new file mode 100644
index 0000000..3d74f0b
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -0,0 +1,69 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_services_camera_libcameraservice_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_services_camera_libcameraservice_license",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_service_fuzzer",
+    srcs: [
+        "camera_service_fuzzer.cpp",
+    ],
+    header_libs: [
+        "libmedia_headers",
+    ],
+    shared_libs: [
+        "libbinder",
+        "libbase",
+        "libutils",
+        "libcutils",
+        "libcameraservice",
+        "libcamera_client",
+        "liblog",
+        "libui",
+        "libgui",
+        "android.hardware.camera.common@1.0",
+        "android.hardware.camera.provider@2.4",
+        "android.hardware.camera.provider@2.5",
+        "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
+        "android.hardware.camera.device@1.0",
+        "android.hardware.camera.device@3.2",
+        "android.hardware.camera.device@3.3",
+        "android.hardware.camera.device@3.4",
+        "android.hardware.camera.device@3.5",
+        "android.hardware.camera.device@3.6",
+        "android.hardware.camera.device@3.7",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/README.md b/services/camera/libcameraservice/libcameraservice_fuzzer/README.md
new file mode 100644
index 0000000..c703845
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/README.md
@@ -0,0 +1,59 @@
+# Fuzzer for libcameraservice
+
+## Plugin Design Considerations
+The fuzzer plugin is designed based on the understanding of the
+library and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+libcameraservice supports the following parameters:
+1. Camera Type (parameter name: `cameraType`)
+2. Camera API Version (parameter name: `cameraAPIVersion`)
+3. Event ID (parameter name: `eventId`)
+4. Camera Sound Kind (parameter name: `soundKind`)
+5. Shell Command (parameter name: `shellCommand`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `cameraType` | 0. `CAMERA_TYPE_BACKWARD_COMPATIBLE` 1. `CAMERA_TYPE_ALL` | Value obtained from FuzzedDataProvider |
+| `cameraAPIVersion` |  0. `API_VERSION_1` 1. `API_VERSION_2` | Value obtained from FuzzedDataProvider |
+| `eventId` |  0. `EVENT_USER_SWITCHED` 1. `EVENT_NONE` | Value obtained from FuzzedDataProvider |
+| `soundKind` |  0. `SOUND_SHUTTER` 1. `SOUND_RECORDING_START` 2. `SOUND_RECORDING_STOP`| Value obtained from FuzzedDataProvider |
+| `shellCommand` |  0. `set-uid-state` 1. `reset-uid-state` 2. `get-uid-state` 3. `set-rotate-and-crop` 4. `get-rotate-and-crop` 5. `help`| Value obtained from FuzzedDataProvider |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesn't `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build camera_service_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) camera_service_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR
+```
+  $ adb shell mkdir CORPUS_DIR
+```
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/camera_service_fuzzer/camera_service_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
new file mode 100644
index 0000000..e46bf74
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -0,0 +1,600 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#define LOG_TAG "CameraServiceFuzzer"
+//#define LOG_NDEBUG 0
+
+#include <CameraService.h>
+#include <device3/Camera3StreamInterface.h>
+#include <android/hardware/BnCameraServiceListener.h>
+#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
+#include <android/hardware/ICameraServiceListener.h>
+#include <android/hardware/camera2/ICameraDeviceUser.h>
+#include <camera/camera2/OutputConfiguration.h>
+#include <gui/BufferItemConsumer.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include <private/android_filesystem_config.h>
+#include "fuzzer/FuzzedDataProvider.h"
+
+using namespace android;
+using namespace hardware;
+using namespace std;
+
+const int32_t kPreviewThreshold = 8;
+const int32_t kNumRequestsTested = 8;
+const nsecs_t kPreviewTimeout = 5000000000;  // .5 [s.]
+const nsecs_t kEventTimeout = 10000000000;   // 1 [s.]
+const size_t kMaxNumLines = USHRT_MAX;
+const size_t kMinArgs = 1;
+const size_t kMaxArgs = 5;
+const int32_t kCamType[] = {hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE,
+                            hardware::ICameraService::CAMERA_TYPE_ALL};
+const int kCameraApiVersion[] = {android::CameraService::API_VERSION_1,
+                                 android::CameraService::API_VERSION_2};
+const uint8_t kSensorPixelModes[] = {ANDROID_SENSOR_PIXEL_MODE_DEFAULT,
+        ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION};
+const int32_t kRequestTemplates[] = {
+    hardware::camera2::ICameraDeviceUser::TEMPLATE_PREVIEW,
+    hardware::camera2::ICameraDeviceUser::TEMPLATE_STILL_CAPTURE,
+    hardware::camera2::ICameraDeviceUser::TEMPLATE_RECORD,
+    hardware::camera2::ICameraDeviceUser::TEMPLATE_VIDEO_SNAPSHOT,
+    hardware::camera2::ICameraDeviceUser::TEMPLATE_MANUAL,
+    hardware::camera2::ICameraDeviceUser::TEMPLATE_ZERO_SHUTTER_LAG
+};
+
+const int32_t kRotations[] = {
+    camera3::CAMERA_STREAM_ROTATION_0,
+    camera3::CAMERA_STREAM_ROTATION_90,
+    camera3::CAMERA_STREAM_ROTATION_270
+};
+
+const int kLayerMetadata[] = {
+    0x00100000 /*GRALLOC_USAGE_RENDERSCRIPT*/, 0x00000003 /*GRALLOC_USAGE_SW_READ_OFTEN*/,
+    0x00000100 /*GRALLOC_USAGE_HW_TEXTURE*/,   0x00000800 /*GRALLOC_USAGE_HW_COMPOSER*/,
+    0x00000200 /*GRALLOC_USAGE_HW_RENDER*/,    0x00010000 /*GRALLOC_USAGE_HW_VIDEO_ENCODER*/};
+const int kCameraMsg[] = {0x001 /*CAMERA_MSG_ERROR*/,
+                          0x002 /*CAMERA_MSG_SHUTTER*/,
+                          0x004 /*CAMERA_MSG_FOCUS*/,
+                          0x008 /*CAMERA_MSG_ZOOM*/,
+                          0x010 /*CAMERA_MSG_PREVIEW_FRAME*/,
+                          0x020 /*CAMERA_MSG_VIDEO_FRAME */,
+                          0x040 /*CAMERA_MSG_POSTVIEW_FRAME*/,
+                          0x080 /*CAMERA_MSG_RAW_IMAGE */,
+                          0x100 /*CAMERA_MSG_COMPRESSED_IMAGE*/,
+                          0x200 /*CAMERA_MSG_RAW_IMAGE_NOTIFY*/,
+                          0x400 /*CAMERA_MSG_PREVIEW_METADATA*/,
+                          0x800 /*CAMERA_MSG_FOCUS_MOVE*/};
+const int32_t kEventId[] = {ICameraService::EVENT_USER_SWITCHED, ICameraService::EVENT_NONE};
+const android::CameraService::sound_kind kSoundKind[] = {
+    android::CameraService::SOUND_SHUTTER, android::CameraService::SOUND_RECORDING_START,
+    android::CameraService::SOUND_RECORDING_STOP};
+const String16 kShellCmd[] = {String16("set-uid-state"),       String16("reset-uid-state"),
+                              String16("get-uid-state"),       String16("set-rotate-and-crop"),
+                              String16("get-rotate-and-crop"), String16("help")};
+const size_t kNumLayerMetaData = size(kLayerMetadata);
+const size_t kNumCameraMsg = size(kCameraMsg);
+const size_t kNumSoundKind = size(kSoundKind);
+const size_t kNumShellCmd = size(kShellCmd);
+
+class CameraFuzzer : public ::android::hardware::BnCameraClient {
+   public:
+    CameraFuzzer(sp<CameraService> cs, std::shared_ptr<FuzzedDataProvider> fp) :
+          mCameraService(cs), mFuzzedDataProvider(fp) {};
+    ~CameraFuzzer() { deInit(); }
+    void process();
+    void deInit();
+
+   private:
+    sp<CameraService> mCameraService = nullptr;
+    std::shared_ptr<FuzzedDataProvider> mFuzzedDataProvider = nullptr;
+    sp<SurfaceComposerClient> mComposerClient = nullptr;
+    int32_t mNumCameras = 0;
+    size_t mPreviewBufferCount = 0;
+    bool mAutoFocusMessage = false;
+    bool mSnapshotNotification = false;
+    mutable Mutex mPreviewLock;
+    mutable Condition mPreviewCondition;
+    mutable Mutex mAutoFocusLock;
+    mutable Condition mAutoFocusCondition;
+    mutable Mutex mSnapshotLock;
+    mutable Condition mSnapshotCondition;
+
+    void getNumCameras();
+    void getCameraInformation(int32_t cameraId);
+    void invokeCameraAPIs();
+    void invokeCameraSound();
+    void invokeDump();
+    void invokeShellCommand();
+    void invokeNotifyCalls();
+
+    // CameraClient interface
+    void notifyCallback(int32_t msgType, int32_t, int32_t) override;
+    void dataCallback(int32_t msgType, const sp<IMemory> &, camera_frame_metadata_t *) override;
+    void dataCallbackTimestamp(nsecs_t, int32_t, const sp<IMemory> &) override{};
+    void recordingFrameHandleCallbackTimestamp(nsecs_t, native_handle_t *) override{};
+    void recordingFrameHandleCallbackTimestampBatch(
+        const std::vector<nsecs_t> &, const std::vector<native_handle_t *> &) override{};
+    status_t waitForPreviewStart();
+    status_t waitForEvent(Mutex &mutex, Condition &condition, bool &flag);
+};
+
+void CameraFuzzer::notifyCallback(int32_t msgType, int32_t, int32_t) {
+    if (CAMERA_MSG_FOCUS == msgType) {
+        Mutex::Autolock l(mAutoFocusLock);
+        mAutoFocusMessage = true;
+        mAutoFocusCondition.broadcast();
+    }
+};
+
+void CameraFuzzer::dataCallback(int32_t msgType, const sp<IMemory> & /*data*/,
+                                camera_frame_metadata_t *) {
+    switch (msgType) {
+        case CAMERA_MSG_PREVIEW_FRAME: {
+            Mutex::Autolock l(mPreviewLock);
+            ++mPreviewBufferCount;
+            mPreviewCondition.broadcast();
+            break;
+        }
+        case CAMERA_MSG_COMPRESSED_IMAGE: {
+            Mutex::Autolock l(mSnapshotLock);
+            mSnapshotNotification = true;
+            mSnapshotCondition.broadcast();
+            break;
+        }
+        default:
+            break;
+    }
+};
+
+status_t CameraFuzzer::waitForPreviewStart() {
+    status_t rc = NO_ERROR;
+    Mutex::Autolock l(mPreviewLock);
+    mPreviewBufferCount = 0;
+
+    while (mPreviewBufferCount < kPreviewThreshold) {
+        rc = mPreviewCondition.waitRelative(mPreviewLock, kPreviewTimeout);
+        if (NO_ERROR != rc) {
+            break;
+        }
+    }
+
+    return rc;
+}
+
+status_t CameraFuzzer::waitForEvent(Mutex &mutex, Condition &condition, bool &flag) {
+    status_t rc = NO_ERROR;
+    Mutex::Autolock l(mutex);
+    flag = false;
+
+    while (!flag) {
+        rc = condition.waitRelative(mutex, kEventTimeout);
+        if (NO_ERROR != rc) {
+            break;
+        }
+    }
+
+    return rc;
+}
+
+void CameraFuzzer::deInit() {
+    if (mComposerClient) {
+        mComposerClient->dispose();
+    }
+}
+
+void CameraFuzzer::getNumCameras() {
+    bool shouldPassInvalidCamType = mFuzzedDataProvider->ConsumeBool();
+    int32_t camType;
+    if (shouldPassInvalidCamType) {
+        camType = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
+    } else {
+        camType = kCamType[mFuzzedDataProvider->ConsumeBool()];
+    }
+    mCameraService->getNumberOfCameras(camType, &mNumCameras);
+}
+
+void CameraFuzzer::getCameraInformation(int32_t cameraId) {
+    String16 cameraIdStr = String16(String8::format("%d", cameraId));
+    bool isSupported = false;
+    mCameraService->supportsCameraApi(
+        cameraIdStr, kCameraApiVersion[mFuzzedDataProvider->ConsumeBool()], &isSupported);
+    mCameraService->isHiddenPhysicalCamera(cameraIdStr, &isSupported);
+
+    String16 parameters;
+    mCameraService->getLegacyParameters(cameraId, &parameters);
+
+    std::vector<hardware::camera2::utils::ConcurrentCameraIdCombination> concurrentCameraIds;
+    mCameraService->getConcurrentCameraIds(&concurrentCameraIds);
+
+    hardware::camera2::params::VendorTagDescriptorCache cache;
+    mCameraService->getCameraVendorTagCache(&cache);
+
+    CameraInfo cameraInfo;
+    mCameraService->getCameraInfo(cameraId, &cameraInfo);
+
+    CameraMetadata metadata;
+    mCameraService->getCameraCharacteristics(cameraIdStr,
+            /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
+}
+
+void CameraFuzzer::invokeCameraSound() {
+    mCameraService->increaseSoundRef();
+    mCameraService->decreaseSoundRef();
+    bool shouldPassInvalidPlaySound = mFuzzedDataProvider->ConsumeBool();
+    bool shouldPassInvalidLockSound = mFuzzedDataProvider->ConsumeBool();
+    android::CameraService::sound_kind playSound, lockSound;
+    if (shouldPassInvalidPlaySound) {
+        playSound = static_cast<android::CameraService::sound_kind>(
+            mFuzzedDataProvider->ConsumeIntegral<size_t>());
+    } else {
+        playSound =
+            kSoundKind[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, kNumSoundKind - 1)];
+    }
+
+    if (shouldPassInvalidLockSound) {
+        lockSound = static_cast<android::CameraService::sound_kind>(
+            mFuzzedDataProvider->ConsumeIntegral<size_t>());
+    } else {
+        lockSound =
+            kSoundKind[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, kNumSoundKind - 1)];
+    }
+    mCameraService->playSound(playSound);
+    mCameraService->loadSoundLocked(lockSound);
+}
+
+void CameraFuzzer::invokeDump() {
+    Vector<String16> args;
+    size_t numberOfLines = mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, kMaxNumLines);
+    for (size_t lineIdx = 0; lineIdx < numberOfLines; ++lineIdx) {
+        args.add(static_cast<String16>(mFuzzedDataProvider->ConsumeRandomLengthString().c_str()));
+    }
+    const char *fileName = "logDumpFile";
+    int fd = memfd_create(fileName, MFD_ALLOW_SEALING);
+    mCameraService->dump(fd, args);
+    close(fd);
+}
+
+void CameraFuzzer::invokeShellCommand() {
+    int in = mFuzzedDataProvider->ConsumeIntegral<int>();
+    int out = mFuzzedDataProvider->ConsumeIntegral<int>();
+    int err = mFuzzedDataProvider->ConsumeIntegral<int>();
+    Vector<String16> args;
+    size_t numArgs = mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(kMinArgs, kMaxArgs);
+    for (size_t argsIdx = 0; argsIdx < numArgs; ++argsIdx) {
+        bool shouldPassInvalidCommand = mFuzzedDataProvider->ConsumeBool();
+        if (shouldPassInvalidCommand) {
+            args.add(
+                static_cast<String16>(mFuzzedDataProvider->ConsumeRandomLengthString().c_str()));
+        } else {
+            args.add(kShellCmd[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+                0, kNumShellCmd - 1)]);
+        }
+    }
+    mCameraService->shellCommand(in, out, err, args);
+}
+
+void CameraFuzzer::invokeNotifyCalls() {
+    mCameraService->notifyMonitoredUids();
+    int64_t newState = mFuzzedDataProvider->ConsumeIntegral<int64_t>();
+    mCameraService->notifyDeviceStateChange(newState);
+    std::vector<int32_t> args;
+    size_t numArgs = mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(kMinArgs, kMaxArgs);
+    for (size_t argsIdx = 0; argsIdx < numArgs; ++argsIdx) {
+        args.push_back(mFuzzedDataProvider->ConsumeIntegral<int32_t>());
+    }
+    bool shouldPassInvalidEvent = mFuzzedDataProvider->ConsumeBool();
+    int32_t eventId;
+    if (shouldPassInvalidEvent) {
+        eventId = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
+    } else {
+        eventId = kEventId[mFuzzedDataProvider->ConsumeBool()];
+    }
+    mCameraService->notifySystemEvent(eventId, args);
+}
+
+void CameraFuzzer::invokeCameraAPIs() {
+    for (int32_t cameraId = 0; cameraId < mNumCameras; ++cameraId) {
+        getCameraInformation(cameraId);
+
+        ::android::binder::Status rc;
+        sp<ICamera> cameraDevice;
+
+        rc = mCameraService->connect(this, cameraId, String16(),
+                android::CameraService::USE_CALLING_UID, android::CameraService::USE_CALLING_PID,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &cameraDevice);
+        if (!rc.isOk()) {
+            // camera not connected
+            return;
+        }
+        if (cameraDevice) {
+            sp<Surface> previewSurface;
+            sp<SurfaceControl> surfaceControl;
+            CameraParameters params(cameraDevice->getParameters());
+            String8 focusModes(params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES));
+            bool isAFSupported = false;
+            const char *focusMode = nullptr;
+
+            if (focusModes.contains(CameraParameters::FOCUS_MODE_AUTO)) {
+                isAFSupported = true;
+            } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)) {
+                isAFSupported = true;
+                focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
+            } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO)) {
+                isAFSupported = true;
+                focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO;
+            } else if (focusModes.contains(CameraParameters::FOCUS_MODE_MACRO)) {
+                isAFSupported = true;
+                focusMode = CameraParameters::FOCUS_MODE_MACRO;
+            }
+            if (nullptr != focusMode) {
+                params.set(CameraParameters::KEY_FOCUS_MODE, focusMode);
+                cameraDevice->setParameters(params.flatten());
+            }
+            int previewWidth, previewHeight;
+            params.getPreviewSize(&previewWidth, &previewHeight);
+
+            mComposerClient = new SurfaceComposerClient;
+            mComposerClient->initCheck();
+
+            bool shouldPassInvalidLayerMetaData = mFuzzedDataProvider->ConsumeBool();
+            int layerMetaData;
+            if (shouldPassInvalidLayerMetaData) {
+                layerMetaData = mFuzzedDataProvider->ConsumeIntegral<int>();
+            } else {
+                layerMetaData = kLayerMetadata[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+                    0, kNumLayerMetaData - 1)];
+            }
+            surfaceControl = mComposerClient->createSurface(
+                String8("Test Surface"), previewWidth, previewHeight,
+                CameraParameters::previewFormatToEnum(params.getPreviewFormat()), layerMetaData);
+
+            if (surfaceControl.get() != nullptr) {
+                SurfaceComposerClient::Transaction{}
+                    .setLayer(surfaceControl, 0x7fffffff)
+                    .show(surfaceControl)
+                    .apply();
+
+                previewSurface = surfaceControl->getSurface();
+                cameraDevice->setPreviewTarget(previewSurface->getIGraphicBufferProducer());
+            }
+            cameraDevice->setPreviewCallbackFlag(CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER);
+
+            Vector<Size> pictureSizes;
+            params.getSupportedPictureSizes(pictureSizes);
+
+            for (size_t i = 0; i < pictureSizes.size(); ++i) {
+                params.setPictureSize(pictureSizes[i].width, pictureSizes[i].height);
+                cameraDevice->setParameters(params.flatten());
+                cameraDevice->startPreview();
+                waitForPreviewStart();
+                cameraDevice->autoFocus();
+                waitForEvent(mAutoFocusLock, mAutoFocusCondition, mAutoFocusMessage);
+                bool shouldPassInvalidCameraMsg = mFuzzedDataProvider->ConsumeBool();
+                int msgType;
+                if (shouldPassInvalidCameraMsg) {
+                    msgType = mFuzzedDataProvider->ConsumeIntegral<int>();
+                } else {
+                    msgType = kCameraMsg[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+                        0, kNumCameraMsg - 1)];
+                }
+                cameraDevice->takePicture(msgType);
+
+                waitForEvent(mSnapshotLock, mSnapshotCondition, mSnapshotNotification);
+            }
+
+            Vector<Size> videoSizes;
+            params.getSupportedVideoSizes(videoSizes);
+
+            for (size_t i = 0; i < videoSizes.size(); ++i) {
+                params.setVideoSize(videoSizes[i].width, videoSizes[i].height);
+
+                cameraDevice->setParameters(params.flatten());
+                cameraDevice->startPreview();
+                waitForPreviewStart();
+                cameraDevice->setVideoBufferMode(
+                    android::hardware::BnCamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
+                cameraDevice->setVideoTarget(previewSurface->getIGraphicBufferProducer());
+                cameraDevice->startRecording();
+                cameraDevice->stopRecording();
+            }
+            cameraDevice->stopPreview();
+            cameraDevice->disconnect();
+        }
+    }
+}
+
+void CameraFuzzer::process() {
+    getNumCameras();
+    invokeCameraSound();
+    if (mNumCameras > 0) {
+        invokeCameraAPIs();
+    }
+    invokeDump();
+    invokeShellCommand();
+    invokeNotifyCalls();
+}
+
+class TestCameraServiceListener : public hardware::BnCameraServiceListener {
+public:
+    virtual ~TestCameraServiceListener() {};
+
+    virtual binder::Status onStatusChanged(int32_t , const String16&) {
+        return binder::Status::ok();
+    };
+
+    virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
+            const String16& /*cameraId*/, const String16& /*physicalCameraId*/) {
+        // No op
+        return binder::Status::ok();
+    };
+
+    virtual binder::Status onTorchStatusChanged(int32_t /*status*/, const String16& /*cameraId*/) {
+        return binder::Status::ok();
+    };
+
+    virtual binder::Status onCameraAccessPrioritiesChanged() {
+        // No op
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCameraOpened(const String16& /*cameraId*/,
+            const String16& /*clientPackageName*/) {
+        // No op
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCameraClosed(const String16& /*cameraId*/) {
+        // No op
+        return binder::Status::ok();
+    }
+};
+
+class TestCameraDeviceCallbacks : public hardware::camera2::BnCameraDeviceCallbacks {
+public:
+    TestCameraDeviceCallbacks() {}
+
+    virtual ~TestCameraDeviceCallbacks() {}
+
+    virtual binder::Status onDeviceError(int /*errorCode*/,
+            const CaptureResultExtras& /*resultExtras*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onDeviceIdle() {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCaptureStarted(const CaptureResultExtras& /*resultExtras*/,
+            int64_t /*timestamp*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onResultReceived(const CameraMetadata& /*metadata*/,
+            const CaptureResultExtras& /*resultExtras*/,
+            const std::vector<PhysicalCaptureResultInfo>& /*physicalResultInfos*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onPrepared(int /*streamId*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onRepeatingRequestError(
+            int64_t /*lastFrameNumber*/, int32_t /*stoppedSequenceId*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onRequestQueueEmpty() {
+        return binder::Status::ok();
+    }
+};
+
+class Camera2Fuzzer {
+   public:
+    Camera2Fuzzer(sp<CameraService> cs, std::shared_ptr<FuzzedDataProvider> fp) :
+          mCameraService(cs), mFuzzedDataProvider(fp) { };
+    ~Camera2Fuzzer() {}
+    void process();
+   private:
+    sp<CameraService> mCameraService = nullptr;
+    std::shared_ptr<FuzzedDataProvider> mFuzzedDataProvider = nullptr;
+};
+
+void Camera2Fuzzer::process() {
+    sp<TestCameraServiceListener> listener = new TestCameraServiceListener();
+    std::vector<hardware::CameraStatus> statuses;
+    mCameraService->addListenerTest(listener, &statuses);
+    for (auto s : statuses) {
+        sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
+        sp<hardware::camera2::ICameraDeviceUser> device;
+        mCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &device);
+        if (device == nullptr) {
+            continue;
+        }
+        device->beginConfigure();
+        sp<IGraphicBufferProducer> gbProducer;
+        sp<IGraphicBufferConsumer> gbConsumer;
+        BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
+        sp<BufferItemConsumer> opaqueConsumer = new BufferItemConsumer(gbConsumer,
+                GRALLOC_USAGE_SW_READ_NEVER, /*maxImages*/8, /*controlledByApp*/true);
+        opaqueConsumer->setName(String8("Roger"));
+
+        // Set to VGA dimension for default, as that is guaranteed to be present
+        gbConsumer->setDefaultBufferSize(640, 480);
+        gbConsumer->setDefaultBufferFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+
+        sp<Surface> surface(new Surface(gbProducer, /*controlledByApp*/false));
+
+        String16 noPhysicalId;
+        size_t rotations = sizeof(kRotations) / sizeof(int32_t) - 1;
+        OutputConfiguration output(gbProducer,
+                kRotations[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, rotations)],
+                noPhysicalId);
+        int streamId;
+        device->createStream(output, &streamId);
+        CameraMetadata sessionParams;
+        std::vector<int> offlineStreamIds;
+        device->endConfigure(/*isConstrainedHighSpeed*/ mFuzzedDataProvider->ConsumeBool(),
+                sessionParams, ns2ms(systemTime()), &offlineStreamIds);
+
+        CameraMetadata requestTemplate;
+        size_t requestTemplatesSize =  sizeof(kRequestTemplates) /sizeof(int32_t)  - 1;
+        device->createDefaultRequest(kRequestTemplates[
+                mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, requestTemplatesSize)],
+                /*out*/&requestTemplate);
+        hardware::camera2::CaptureRequest request;
+        request.mSurfaceList.add(surface);
+        request.mIsReprocess = false;
+        hardware::camera2::utils::SubmitInfo info;
+        for (int i = 0; i < kNumRequestsTested; i++) {
+            uint8_t sensorPixelMode =
+                    kSensorPixelModes[mFuzzedDataProvider->ConsumeBool() ? 1 : 0];
+            requestTemplate.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1);
+            request.mPhysicalCameraSettings.clear();
+            request.mPhysicalCameraSettings.push_back({s.cameraId.string(), requestTemplate});
+            device->submitRequest(request, /*streaming*/false, /*out*/&info);
+            ALOGV("%s : camera id %s submit request id %d",__FUNCTION__, s.cameraId.string(),
+                    info.mRequestId);
+        }
+        device->disconnect();
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    if (size < 1) {
+        return 0;
+    }
+    setuid(AID_CAMERASERVER);
+    std::shared_ptr<FuzzedDataProvider> fp = std::make_shared<FuzzedDataProvider>(data, size);
+    sp<CameraService> cs = new CameraService();
+    cs->clearCachedVariables();
+    sp<CameraFuzzer> camerafuzzer = new CameraFuzzer(cs, fp);
+    if (!camerafuzzer) {
+        return 0;
+    }
+    camerafuzzer->process();
+    Camera2Fuzzer camera2fuzzer(cs, fp);
+    camera2fuzzer.process();
+    return 0;
+}
diff --git a/services/camera/libcameraservice/tests/Android.mk b/services/camera/libcameraservice/tests/Android.mk
index 3ead715..0b5ad79 100644
--- a/services/camera/libcameraservice/tests/Android.mk
+++ b/services/camera/libcameraservice/tests/Android.mk
@@ -33,9 +33,11 @@
     android.hardware.camera.provider@2.4 \
     android.hardware.camera.provider@2.5 \
     android.hardware.camera.provider@2.6 \
+    android.hardware.camera.provider@2.7 \
     android.hardware.camera.device@1.0 \
     android.hardware.camera.device@3.2 \
     android.hardware.camera.device@3.4 \
+    android.hardware.camera.device@3.7 \
     android.hidl.token@1.0-utils
 
 LOCAL_STATIC_LIBRARIES := \
@@ -51,6 +53,9 @@
 LOCAL_SANITIZE := address
 
 LOCAL_MODULE:= cameraservice_test
+LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS:= notice
+LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/../NOTICE
 LOCAL_COMPATIBILITY_SUITE := device-tests
 LOCAL_MODULE_TAGS := tests
 
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index 855b5ab..a74fd9d 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -23,7 +23,9 @@
 #include <android/hardware/camera/device/3.2/ICameraDeviceCallback.h>
 #include <android/hardware/camera/device/3.2/ICameraDeviceSession.h>
 #include <camera_metadata_hidden.h>
+#include <hidl/HidlBinderSupport.h>
 #include <gtest/gtest.h>
+#include <utility>
 
 using namespace android;
 using namespace android::hardware::camera;
@@ -173,6 +175,25 @@
         return hardware::Void();
     }
 
+    virtual ::android::hardware::Return<bool> linkToDeath(
+            const ::android::sp<::android::hardware::hidl_death_recipient>& recipient,
+            uint64_t cookie) {
+        if (mInitialDeathRecipient.get() == nullptr) {
+            mInitialDeathRecipient =
+                std::make_unique<::android::hardware::hidl_binder_death_recipient>(recipient,
+                        cookie, this);
+        }
+        return true;
+    }
+
+    void signalInitialBinderDeathRecipient() {
+        if (mInitialDeathRecipient.get() != nullptr) {
+            mInitialDeathRecipient->binderDied(nullptr /*who*/);
+        }
+    }
+
+    std::unique_ptr<::android::hardware::hidl_binder_death_recipient> mInitialDeathRecipient;
+
     enum MethodNames {
         SET_CALLBACK,
         GET_VENDOR_TAGS,
@@ -567,3 +588,47 @@
     ASSERT_EQ(serviceProxy.mLastRequestedServiceNames.back(), testProviderInstanceName) <<
             "Incorrect instance requested from service manager";
 }
+
+// Test that CameraProviderManager can handle races between provider death notifications and
+// provider registration callbacks
+TEST(CameraProviderManagerTest, BinderDeathRegistrationRaceTest) {
+
+    std::vector<hardware::hidl_string> deviceNames;
+    deviceNames.push_back("device@3.2/test/0");
+    deviceNames.push_back("device@3.2/test/1");
+    hardware::hidl_vec<common::V1_0::VendorTagSection> vendorSection;
+    status_t res;
+
+    sp<CameraProviderManager> providerManager = new CameraProviderManager();
+    sp<TestStatusListener> statusListener = new TestStatusListener();
+    TestInteractionProxy serviceProxy;
+    sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
+            vendorSection);
+
+    // Not setting up provider in the service proxy yet, to test cases where a
+    // HAL isn't starting right
+    res = providerManager->initialize(statusListener, &serviceProxy);
+    ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
+
+    // Now set up provider and trigger a registration
+    serviceProxy.setProvider(provider);
+
+    hardware::hidl_string testProviderFqInterfaceName =
+            "android.hardware.camera.provider@2.4::ICameraProvider";
+    hardware::hidl_string testProviderInstanceName = "test/0";
+    serviceProxy.mManagerNotificationInterface->onRegistration(
+            testProviderFqInterfaceName,
+            testProviderInstanceName, false);
+
+    // Simulate artificial delay of the registration callback which arrives before the
+    // death notification
+    serviceProxy.mManagerNotificationInterface->onRegistration(
+            testProviderFqInterfaceName,
+            testProviderInstanceName, false);
+
+    provider->signalInitialBinderDeathRecipient();
+
+    auto deviceCount = static_cast<unsigned> (providerManager->getCameraCount().second);
+    ASSERT_EQ(deviceCount, deviceNames.size()) <<
+            "Unexpected amount of camera devices";
+}
diff --git a/services/camera/libcameraservice/tests/ClientManagerTest.cpp b/services/camera/libcameraservice/tests/ClientManagerTest.cpp
index 6a38427..c79ef45 100644
--- a/services/camera/libcameraservice/tests/ClientManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/ClientManagerTest.cpp
@@ -17,10 +17,13 @@
 #define LOG_NDEBUG 0
 #define LOG_TAG "ClientManagerTest"
 
+#include <binder/ActivityManager.h>
+
 #include "../utils/ClientManager.h"
 #include <gtest/gtest.h>
 
 using namespace android::resource_policy;
+using namespace android;
 
 struct TestClient {
     TestClient(int id, int32_t cost, const std::set<int>& conflictingKeys, int32_t ownerId,
@@ -41,7 +44,7 @@
 
 TestDescriptorPtr makeDescFromTestClient(const TestClient& tc) {
     return std::make_shared<TestClientDescriptor>(/*ID*/tc.mId, tc, tc.mCost, tc.mConflictingKeys,
-            tc.mScore, tc.mOwnerId, tc.mState, tc.mIsVendorClient);
+            tc.mScore, tc.mOwnerId, tc.mState, tc.mIsVendorClient, /*oomScoreOffset*/0);
 }
 
 class TestClientManager : public ClientManager<int, TestClient> {
@@ -59,13 +62,15 @@
 
     TestClientManager cm;
     TestClient cam0Client(/*ID*/0, /*cost*/100, /*conflicts*/{1},
-            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+            /*ownerId*/ 1000, PERCEPTIBLE_RECENT_FOREGROUND_APP_ADJ,
+            ActivityManager::PROCESS_STATE_PERSISTENT_UI, /*isVendorClient*/ false);
     auto cam0Desc = makeDescFromTestClient(cam0Client);
     auto evicted = cm.addAndEvict(cam0Desc);
     ASSERT_EQ(evicted.size(), 0u) << "Evicted list must be empty";
 
     TestClient cam1Client(/*ID*/1, /*cost*/100, /*conflicts*/{0},
-            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+            /*ownerId*/ 1000, PERCEPTIBLE_RECENT_FOREGROUND_APP_ADJ,
+            ActivityManager::PROCESS_STATE_PERSISTENT_UI, /*isVendorClient*/ false);
     auto cam1Desc = makeDescFromTestClient(cam1Client);
 
     // 1. Check with conflicting devices, new client would be evicted
@@ -76,13 +81,15 @@
     cm.removeAll();
 
     TestClient cam2Client(/*ID*/2, /*cost*/100, /*conflicts*/{},
-            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+            /*ownerId*/ 1000, PERCEPTIBLE_RECENT_FOREGROUND_APP_ADJ,
+            ActivityManager::PROCESS_STATE_PERSISTENT_UI, /*isVendorClient*/ false);
     auto cam2Desc = makeDescFromTestClient(cam2Client);
     evicted = cm.addAndEvict(cam2Desc);
     ASSERT_EQ(evicted.size(), 0u) << "Evicted list must be empty";
 
     TestClient cam3Client(/*ID*/3, /*cost*/100, /*conflicts*/{},
-            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+            /*ownerId*/ 1000, PERCEPTIBLE_RECENT_FOREGROUND_APP_ADJ,
+            ActivityManager::PROCESS_STATE_PERSISTENT_UI, /*isVendorClient*/ false);
     auto cam3Desc = makeDescFromTestClient(cam3Client);
 
     // 2. Check without conflicting devices, the pre-existing client won't be evicted
@@ -97,12 +104,42 @@
     ASSERT_EQ(evicted.size(), 0u) << "Evicted list must be empty";
 
     TestClient cam0ClientNew(/*ID*/0, /*cost*/100, /*conflicts*/{1},
-            /*ownerId*/ 1000, /*score*/50, /*state*/ 1, /*isVendorClient*/ false);
+            /*ownerId*/ 1000, PERCEPTIBLE_RECENT_FOREGROUND_APP_ADJ,
+            ActivityManager::PROCESS_STATE_PERSISTENT_UI, /*isVendorClient*/ false);
     auto cam0DescNew = makeDescFromTestClient(cam0ClientNew);
     wouldBeEvicted = cm.wouldEvict(cam0DescNew);
 
     // 3. Check opening the same camera twice will evict the older client
     ASSERT_EQ(wouldBeEvicted.size(), 1u) << "Evicted list length must be 1";
     ASSERT_EQ(wouldBeEvicted[0], cam0Desc) << "cam0 (old) must be evicted";
-}
 
+    // 4. Check that an invalid client (dead process) will be evicted
+
+    cm.removeAll();
+
+    TestClient camDeadClient(/*ID*/ 0, /*cost*/100, /*conflicts*/{},
+            /*ownerId*/ 1000, INVALID_ADJ,
+            ActivityManager::PROCESS_STATE_NONEXISTENT, /*isVendorClient*/ false);
+    auto camDeadDesc = makeDescFromTestClient(camDeadClient);
+    evicted = cm.addAndEvict(camDeadDesc);
+    wouldBeEvicted = cm.wouldEvict(cam0Desc);
+
+    ASSERT_EQ(evicted.size(), 0u) << "Evicted list must be empty";
+    ASSERT_EQ(wouldBeEvicted.size(), 1u) << "Evicted list length must be 1";
+    ASSERT_EQ(wouldBeEvicted[0], camDeadDesc) << "dead cam must be evicted";
+
+    // 5. Check that a more important client will win
+
+    TestClient cam0ForegroundClient(/*ID*/0, /*cost*/100, /*conflicts*/{1},
+            /*ownerId*/ 1000, FOREGROUND_APP_ADJ,
+            ActivityManager::PROCESS_STATE_PERSISTENT_UI, /*isVendorClient*/ false);
+    auto cam0FgDesc = makeDescFromTestClient(cam0ForegroundClient);
+
+    cm.removeAll();
+    evicted = cm.addAndEvict(cam0Desc);
+    wouldBeEvicted = cm.wouldEvict(cam0FgDesc);
+
+    ASSERT_EQ(evicted.size(), 0u);
+    ASSERT_EQ(wouldBeEvicted.size(), 1u);
+    ASSERT_EQ(wouldBeEvicted[0],cam0Desc) << "less important cam0 must be evicted";
+}
diff --git a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
index 54935c9..8331136 100644
--- a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
+++ b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
@@ -27,7 +27,7 @@
 
 using namespace android;
 using namespace android::camera3;
-
+using DistortionMapperInfo = android::camera3::DistortionMapper::DistortionMapperInfo;
 
 int32_t testActiveArray[] = {100, 100, 1000, 750};
 int32_t testPreCorrActiveArray[] = {90, 90, 1020, 770};
@@ -132,14 +132,15 @@
             /*preCorrectionActiveArray*/ testActiveArray);
 
     auto coords = basicCoords;
-    res = m.mapCorrectedToRaw(coords.data(), 5,  /*clamp*/true);
+    DistortionMapperInfo *mapperInfo = m.getMapperInfo();
+    res = m.mapCorrectedToRaw(coords.data(), 5, mapperInfo, /*clamp*/true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_EQ(coords[i], basicCoords[i]);
     }
 
-    res = m.mapRawToCorrected(coords.data(), 5, /*clamp*/true);
+    res = m.mapRawToCorrected(coords.data(), 5, mapperInfo, /*clamp*/true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < coords.size(); i++) {
@@ -152,14 +153,14 @@
     };
 
     auto rectsOrig = rects;
-    res = m.mapCorrectedRectToRaw(rects.data(), 2, /*clamp*/true);
+    res = m.mapCorrectedRectToRaw(rects.data(), 2, mapperInfo, /*clamp*/true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < rects.size(); i++) {
         EXPECT_EQ(rects[i], rectsOrig[i]);
     }
 
-    res = m.mapRawRectToCorrected(rects.data(), 2, /*clamp*/true);
+    res = m.mapRawRectToCorrected(rects.data(), 2, mapperInfo, /*clamp*/true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < rects.size(); i++) {
@@ -176,14 +177,17 @@
             /*preCorrectionActiveArray*/ activeArray.data());
 
     auto rectsOrig = activeArray;
-    res = m.mapCorrectedRectToRaw(activeArray.data(), 1, /*clamp*/true, /*simple*/ true);
+    DistortionMapperInfo *mapperInfo = m.getMapperInfo();
+    res = m.mapCorrectedRectToRaw(activeArray.data(), 1, mapperInfo, /*clamp*/true,
+            /*simple*/ true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < activeArray.size(); i++) {
         EXPECT_EQ(activeArray[i], rectsOrig[i]);
     }
 
-    res = m.mapRawRectToCorrected(activeArray.data(), 1, /*clamp*/true, /*simple*/ true);
+    res = m.mapRawRectToCorrected(activeArray.data(), 1, mapperInfo, /*clamp*/true,
+            /*simple*/ true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < activeArray.size(); i++) {
@@ -200,7 +204,8 @@
             /*preCorrectionActiveArray*/ testPreCorrActiveArray);
 
     auto coords = basicCoords;
-    res = m.mapCorrectedToRaw(coords.data(), 5,  /*clamp*/true, /*simple*/true);
+    DistortionMapperInfo *mapperInfo = m.getMapperInfo();
+    res = m.mapCorrectedToRaw(coords.data(), 5, mapperInfo, /*clamp*/true, /*simple*/true);
     ASSERT_EQ(res, OK);
 
     ASSERT_EQ(coords[0], 0); ASSERT_EQ(coords[1], 0);
@@ -237,12 +242,13 @@
     auto origCoords = randCoords;
 
     base::Timer correctedToRawTimer;
-    res = m.mapCorrectedToRaw(randCoords.data(), randCoords.size() / 2, clamp, simple);
+    DistortionMapperInfo *mapperInfo = m.getMapperInfo();
+    res = m.mapCorrectedToRaw(randCoords.data(), randCoords.size() / 2, mapperInfo, clamp, simple);
     auto correctedToRawDurationMs = correctedToRawTimer.duration();
     EXPECT_EQ(res, OK);
 
     base::Timer rawToCorrectedTimer;
-    res = m.mapRawToCorrected(randCoords.data(), randCoords.size() / 2, clamp, simple);
+    res = m.mapRawToCorrected(randCoords.data(), randCoords.size() / 2, mapperInfo, clamp, simple);
     auto rawToCorrectedDurationMs = rawToCorrectedTimer.duration();
     EXPECT_EQ(res, OK);
 
@@ -363,7 +369,8 @@
 
     using namespace openCvData;
 
-    res = m.mapRawToCorrected(rawCoords.data(), rawCoords.size() / 2, /*clamp*/false,
+    DistortionMapperInfo *mapperInfo = m.getMapperInfo();
+    res = m.mapRawToCorrected(rawCoords.data(), rawCoords.size() / 2, mapperInfo, /*clamp*/false,
             /*simple*/false);
 
     for (size_t i = 0; i < rawCoords.size(); i+=2) {
diff --git a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
index 4e94991..ff7aafd 100644
--- a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
+++ b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
@@ -182,7 +182,7 @@
 
     // Verify 1.0x zoom doesn't change the coordinates
     auto coords = originalCoords;
-    mapper.scaleCoordinates(coords.data(), coords.size()/2, 1.0f, false /*clamp*/);
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 1.0f, false /*clamp*/, width, height);
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_EQ(coords[i], originalCoords[i]);
     }
@@ -199,7 +199,7 @@
             (width - 1) * 5.0f / 4.0f, (height - 1) / 2.0f, // middle-right after 1.33x zoom
     };
     coords = originalCoords;
-    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, false /*clamp*/);
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, false /*clamp*/, width, height);
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_LE(std::abs(coords[i] - expected2xCoords[i]), kMaxAllowedPixelError);
     }
@@ -216,7 +216,7 @@
             width - 1.0f,  (height - 1) / 2.0f, // middle-right after 1.33x zoom
     };
     coords = originalCoords;
-    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, true /*clamp*/);
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, true /*clamp*/, width, height);
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_LE(std::abs(coords[i] - expected2xCoordsClampedInc[i]), kMaxAllowedPixelError);
     }
@@ -233,7 +233,7 @@
             width - 1.0f,  height / 2.0f, // middle-right after 1.33x zoom
     };
     coords = originalCoords;
-    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, true /*clamp*/);
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 2.0f, true /*clamp*/, width, height);
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_LE(std::abs(coords[i] - expected2xCoordsClampedExc[i]), kMaxAllowedPixelError);
     }
@@ -250,7 +250,7 @@
             (width - 1) * 5 / 8.0f, (height - 1) / 2.0f, // middle-right after 1.33x zoom-in
     };
     coords = originalCoords;
-    mapper.scaleCoordinates(coords.data(), coords.size()/2, 1.0f/3, false /*clamp*/);
+    mapper.scaleCoordinates(coords.data(), coords.size()/2, 1.0f/3, false /*clamp*/, width, height);
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_LE(std::abs(coords[i] - expectedZoomOutCoords[i]), kMaxAllowedPixelError);
     }
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
new file mode 100644
index 0000000..76927c0
--- /dev/null
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -0,0 +1,259 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CameraServiceProxyWrapper"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <inttypes.h>
+#include <utils/Log.h>
+#include <binder/IServiceManager.h>
+
+#include "CameraServiceProxyWrapper.h"
+
+namespace android {
+
+using hardware::ICameraServiceProxy;
+using hardware::CameraSessionStats;
+
+Mutex CameraServiceProxyWrapper::sProxyMutex;
+sp<hardware::ICameraServiceProxy> CameraServiceProxyWrapper::sCameraServiceProxy;
+
+Mutex CameraServiceProxyWrapper::mLock;
+std::map<String8, std::shared_ptr<CameraServiceProxyWrapper::CameraSessionStatsWrapper>>
+        CameraServiceProxyWrapper::mSessionStatsMap;
+
+/**
+ * CameraSessionStatsWrapper functions
+ */
+
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onOpen() {
+    Mutex::Autolock l(mLock);
+
+    updateProxyDeviceState(mSessionStats);
+}
+
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onClose(int32_t latencyMs) {
+    Mutex::Autolock l(mLock);
+
+    mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_CLOSED;
+    mSessionStats.mLatencyMs = latencyMs;
+    updateProxyDeviceState(mSessionStats);
+}
+
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onStreamConfigured(
+        int operatingMode, bool internalReconfig, int32_t latencyMs) {
+    Mutex::Autolock l(mLock);
+
+    if (internalReconfig) {
+        mSessionStats.mInternalReconfigure++;
+    } else {
+        mSessionStats.mLatencyMs = latencyMs;
+        mSessionStats.mSessionType = operatingMode;
+    }
+}
+
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onActive() {
+    Mutex::Autolock l(mLock);
+
+    mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_ACTIVE;
+    updateProxyDeviceState(mSessionStats);
+
+    // Reset mCreationDuration to -1 to distinguish between 1st session
+    // after configuration, and all other sessions after configuration.
+    mSessionStats.mLatencyMs = -1;
+}
+
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onIdle(
+        int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+        const std::vector<hardware::CameraStreamStats>& streamStats) {
+    Mutex::Autolock l(mLock);
+
+    mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_IDLE;
+    mSessionStats.mRequestCount = requestCount;
+    mSessionStats.mResultErrorCount = resultErrorCount;
+    mSessionStats.mDeviceError = deviceError;
+    mSessionStats.mStreamStats = streamStats;
+    updateProxyDeviceState(mSessionStats);
+
+    mSessionStats.mInternalReconfigure = 0;
+    mSessionStats.mStreamStats.clear();
+}
+
+/**
+ * CameraServiceProxyWrapper functions
+ */
+
+sp<ICameraServiceProxy> CameraServiceProxyWrapper::getCameraServiceProxy() {
+#ifndef __BRILLO__
+    Mutex::Autolock al(sProxyMutex);
+    if (sCameraServiceProxy == nullptr) {
+        sp<IServiceManager> sm = defaultServiceManager();
+        // Use checkService because cameraserver normally starts before the
+        // system server and the proxy service. So the long timeout that getService
+        // has before giving up is inappropriate.
+        sp<IBinder> binder = sm->checkService(String16("media.camera.proxy"));
+        if (binder != nullptr) {
+            sCameraServiceProxy = interface_cast<ICameraServiceProxy>(binder);
+        }
+    }
+#endif
+    return sCameraServiceProxy;
+}
+
+void CameraServiceProxyWrapper::pingCameraServiceProxy() {
+    sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    if (proxyBinder == nullptr) return;
+    proxyBinder->pingForUserUpdate();
+}
+
+bool CameraServiceProxyWrapper::isRotateAndCropOverrideNeeded(
+        String16 packageName, int sensorOrientation, int lensFacing) {
+    sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    if (proxyBinder == nullptr) return true;
+    bool ret = true;
+    auto status = proxyBinder->isRotateAndCropOverrideNeeded(packageName, sensorOrientation,
+            lensFacing, &ret);
+    if (!status.isOk()) {
+        ALOGE("%s: Failed during top activity orientation query: %s", __FUNCTION__,
+                status.exceptionMessage().c_str());
+    }
+
+    return ret;
+}
+
+void CameraServiceProxyWrapper::updateProxyDeviceState(const CameraSessionStats& sessionStats) {
+    sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    if (proxyBinder == nullptr) return;
+    proxyBinder->notifyCameraState(sessionStats);
+}
+
+void CameraServiceProxyWrapper::logStreamConfigured(const String8& id,
+        int operatingMode, bool internalConfig, int32_t latencyMs) {
+    std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
+    {
+        Mutex::Autolock l(mLock);
+        sessionStats = mSessionStatsMap[id];
+        if (sessionStats == nullptr) {
+            ALOGE("%s: SessionStatsMap should contain camera %s",
+                    __FUNCTION__, id.c_str());
+            return;
+        }
+    }
+
+    ALOGV("%s: id %s, operatingMode %d, internalConfig %d, latencyMs %d",
+            __FUNCTION__, id.c_str(), operatingMode, internalConfig, latencyMs);
+    sessionStats->onStreamConfigured(operatingMode, internalConfig, latencyMs);
+}
+
+void CameraServiceProxyWrapper::logActive(const String8& id) {
+    std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
+    {
+        Mutex::Autolock l(mLock);
+        sessionStats = mSessionStatsMap[id];
+        if (sessionStats == nullptr) {
+            ALOGE("%s: SessionStatsMap should contain camera %s when logActive is called",
+                    __FUNCTION__, id.c_str());
+            return;
+        }
+    }
+
+    ALOGV("%s: id %s", __FUNCTION__, id.c_str());
+    sessionStats->onActive();
+}
+
+void CameraServiceProxyWrapper::logIdle(const String8& id,
+        int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+        const std::vector<hardware::CameraStreamStats>& streamStats) {
+    std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
+    {
+        Mutex::Autolock l(mLock);
+        sessionStats = mSessionStatsMap[id];
+    }
+
+    if (sessionStats == nullptr) {
+        ALOGE("%s: SessionStatsMap should contain camera %s when logIdle is called",
+                __FUNCTION__, id.c_str());
+        return;
+    }
+
+    ALOGV("%s: id %s, requestCount %" PRId64 ", resultErrorCount %" PRId64 ", deviceError %d",
+            __FUNCTION__, id.c_str(), requestCount, resultErrorCount, deviceError);
+    for (size_t i = 0; i < streamStats.size(); i++) {
+        ALOGV("%s: streamStats[%zu]: w %d h %d, requestedCount %" PRId64 ", dropCount %"
+                PRId64 ", startTimeMs %d" ,
+                __FUNCTION__, i, streamStats[i].mWidth, streamStats[i].mHeight,
+                streamStats[i].mRequestCount, streamStats[i].mErrorCount,
+                streamStats[i].mStartLatencyMs);
+    }
+
+    sessionStats->onIdle(requestCount, resultErrorCount, deviceError, streamStats);
+}
+
+void CameraServiceProxyWrapper::logOpen(const String8& id, int facing,
+            const String16& clientPackageName, int effectiveApiLevel, bool isNdk,
+            int32_t latencyMs) {
+    std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
+    {
+        Mutex::Autolock l(mLock);
+        if (mSessionStatsMap.count(id) > 0) {
+            ALOGE("%s: SessionStatsMap shouldn't contain camera %s",
+                    __FUNCTION__, id.c_str());
+            return;
+        }
+
+        int apiLevel = CameraSessionStats::CAMERA_API_LEVEL_1;
+        if (effectiveApiLevel == 2) {
+            apiLevel = CameraSessionStats::CAMERA_API_LEVEL_2;
+        }
+
+        sessionStats = std::make_shared<CameraSessionStatsWrapper>(String16(id), facing,
+                CameraSessionStats::CAMERA_STATE_OPEN, clientPackageName,
+                apiLevel, isNdk, latencyMs);
+        mSessionStatsMap.emplace(id, sessionStats);
+        ALOGV("%s: Adding id %s", __FUNCTION__, id.c_str());
+    }
+
+    ALOGV("%s: id %s, facing %d, effectiveApiLevel %d, isNdk %d, latencyMs %d",
+            __FUNCTION__, id.c_str(), facing, effectiveApiLevel, isNdk, latencyMs);
+    sessionStats->onOpen();
+}
+
+void CameraServiceProxyWrapper::logClose(const String8& id, int32_t latencyMs) {
+    std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
+    {
+        Mutex::Autolock l(mLock);
+        if (mSessionStatsMap.count(id) == 0) {
+            ALOGE("%s: SessionStatsMap should contain camera %s before it's closed",
+                    __FUNCTION__, id.c_str());
+            return;
+        }
+
+        sessionStats = mSessionStatsMap[id];
+        if (sessionStats == nullptr) {
+            ALOGE("%s: SessionStatsMap should contain camera %s",
+                    __FUNCTION__, id.c_str());
+            return;
+        }
+        mSessionStatsMap.erase(id);
+        ALOGV("%s: Erasing id %s", __FUNCTION__, id.c_str());
+    }
+
+    ALOGV("%s: id %s, latencyMs %d", __FUNCTION__, id.c_str(), latencyMs);
+    sessionStats->onClose(latencyMs);
+}
+
+}; // namespace android
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
new file mode 100644
index 0000000..ad9db68
--- /dev/null
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_SERVICE_PROXY_WRAPPER_H_
+#define ANDROID_SERVERS_CAMERA_SERVICE_PROXY_WRAPPER_H_
+
+#include <android/hardware/ICameraServiceProxy.h>
+
+#include <utils/Mutex.h>
+#include <utils/String8.h>
+#include <utils/String16.h>
+#include <utils/StrongPointer.h>
+#include <utils/Timers.h>
+
+#include <camera/CameraSessionStats.h>
+
+namespace android {
+
+class CameraServiceProxyWrapper {
+private:
+    // Guard mCameraServiceProxy
+    static Mutex sProxyMutex;
+    // Cached interface to the camera service proxy in system service
+    static sp<hardware::ICameraServiceProxy> sCameraServiceProxy;
+
+    struct CameraSessionStatsWrapper {
+        hardware::CameraSessionStats mSessionStats;
+        Mutex mLock; // lock for per camera session stats
+
+        CameraSessionStatsWrapper(const String16& cameraId, int facing, int newCameraState,
+                const String16& clientName, int apiLevel, bool isNdk, int32_t latencyMs) :
+            mSessionStats(cameraId, facing, newCameraState, clientName, apiLevel, isNdk, latencyMs)
+            {}
+
+        void onOpen();
+        void onClose(int32_t latencyMs);
+        void onStreamConfigured(int operatingMode, bool internalReconfig, int32_t latencyMs);
+        void onActive();
+        void onIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+                const std::vector<hardware::CameraStreamStats>& streamStats);
+    };
+
+    // Lock for camera session stats map
+    static Mutex mLock;
+    // Map from camera id to the camera's session statistics
+    static std::map<String8, std::shared_ptr<CameraSessionStatsWrapper>> mSessionStatsMap;
+
+    /**
+     * Update the session stats of a given camera device (open/close/active/idle) with
+     * the camera proxy service in the system service
+     */
+    static void updateProxyDeviceState(
+            const hardware::CameraSessionStats& sessionStats);
+
+    static sp<hardware::ICameraServiceProxy> getCameraServiceProxy();
+
+public:
+    // Open
+    static void logOpen(const String8& id, int facing,
+            const String16& clientPackageName, int apiLevel, bool isNdk,
+            int32_t latencyMs);
+
+    // Close
+    static void logClose(const String8& id, int32_t latencyMs);
+
+    // Stream configuration
+    static void logStreamConfigured(const String8& id, int operatingMode, bool internalReconfig,
+            int32_t latencyMs);
+
+    // Session state becomes active
+    static void logActive(const String8& id);
+
+    // Session state becomes idle
+    static void logIdle(const String8& id,
+            int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+            const std::vector<hardware::CameraStreamStats>& streamStats);
+
+    // Ping camera service proxy for user update
+    static void pingCameraServiceProxy();
+
+    // Check whether the current top activity needs a rotate and crop override.
+    static bool isRotateAndCropOverrideNeeded(String16 packageName, int sensorOrientation,
+            int lensFacing);
+};
+
+} // android
+
+#endif // ANDROID_SERVERS_CAMERA_SERVICE_PROXY_WRAPPER_H_
diff --git a/services/camera/libcameraservice/utils/ClientManager.h b/services/camera/libcameraservice/utils/ClientManager.h
index 64be6c5..d164885 100644
--- a/services/camera/libcameraservice/utils/ClientManager.h
+++ b/services/camera/libcameraservice/utils/ClientManager.h
@@ -31,6 +31,31 @@
 namespace android {
 namespace resource_policy {
 
+// Values from frameworks/base/services/core/java/com/android/server/am/ProcessList.java
+const int32_t INVALID_ADJ = -10000;
+const int32_t UNKNOWN_ADJ = 1001;
+const int32_t CACHED_APP_MAX_ADJ = 999;
+const int32_t CACHED_APP_MIN_ADJ = 900;
+const int32_t CACHED_APP_LMK_FIRST_ADJ = 950;
+const int32_t CACHED_APP_IMPORTANCE_LEVELS = 5;
+const int32_t SERVICE_B_ADJ = 800;
+const int32_t PREVIOUS_APP_ADJ = 700;
+const int32_t HOME_APP_ADJ = 600;
+const int32_t SERVICE_ADJ = 500;
+const int32_t HEAVY_WEIGHT_APP_ADJ = 400;
+const int32_t BACKUP_APP_ADJ = 300;
+const int32_t PERCEPTIBLE_LOW_APP_ADJ = 250;
+const int32_t PERCEPTIBLE_MEDIUM_APP_ADJ = 225;
+const int32_t PERCEPTIBLE_APP_ADJ = 200;
+const int32_t VISIBLE_APP_ADJ = 100;
+const int32_t VISIBLE_APP_LAYER_MAX = PERCEPTIBLE_APP_ADJ - VISIBLE_APP_ADJ - 1;
+const int32_t PERCEPTIBLE_RECENT_FOREGROUND_APP_ADJ = 50;
+const int32_t FOREGROUND_APP_ADJ = 0;
+const int32_t PERSISTENT_SERVICE_ADJ = -700;
+const int32_t PERSISTENT_PROC_ADJ = -800;
+const int32_t SYSTEM_ADJ = -900;
+const int32_t NATIVE_ADJ = -1000;
+
 class ClientPriority {
 public:
     /**
@@ -39,18 +64,26 @@
      * case where the construction is offloaded to another thread which isn't a
      * hwbinder thread.
      */
-    ClientPriority(int32_t score, int32_t state, bool isVendorClient) :
-            mScore(score), mState(state), mIsVendorClient(isVendorClient) { }
+    ClientPriority(int32_t score, int32_t state, bool isVendorClient, int32_t scoreOffset = 0) :
+            mIsVendorClient(isVendorClient), mScoreOffset(scoreOffset) {
+        setScore(score);
+        setState(state);
+    }
 
     int32_t getScore() const { return mScore; }
     int32_t getState() const { return mState; }
+    int32_t isVendorClient() const { return mIsVendorClient; }
 
     void setScore(int32_t score) {
         // For vendor clients, the score is set once and for all during
         // construction. Otherwise, it can get reset each time cameraserver
         // queries ActivityManagerService for oom_adj scores / states .
-        if (!mIsVendorClient) {
-            mScore = score;
+        // For clients where the score offset is set by the app, add it to the
+        // score provided by ActivityManagerService.
+        if (score == INVALID_ADJ) {
+            mScore = UNKNOWN_ADJ;
+        } else {
+            mScore = mScoreOffset + score;
         }
     }
 
@@ -60,9 +93,7 @@
       // queries ActivityManagerService for oom_adj scores / states
       // (ActivityManagerService returns a vendor process' state as
       // PROCESS_STATE_NONEXISTENT.
-      if (!mIsVendorClient) {
-          mState = state;
-      }
+      mState = state;
     }
 
     bool operator==(const ClientPriority& rhs) const {
@@ -93,6 +124,7 @@
         int32_t mScore;
         int32_t mState;
         bool mIsVendorClient = false;
+        int32_t mScoreOffset = 0;
 };
 
 // --------------------------------------------------------------------------------
@@ -110,9 +142,10 @@
 public:
     ClientDescriptor(const KEY& key, const VALUE& value, int32_t cost,
             const std::set<KEY>& conflictingKeys, int32_t score, int32_t ownerId, int32_t state,
-            bool isVendorClient);
+            bool isVendorClient, int32_t oomScoreOffset);
     ClientDescriptor(KEY&& key, VALUE&& value, int32_t cost, std::set<KEY>&& conflictingKeys,
-            int32_t score, int32_t ownerId, int32_t state, bool isVendorClient);
+            int32_t score, int32_t ownerId, int32_t state, bool isVendorClient,
+            int32_t oomScoreOffset);
 
     ~ClientDescriptor();
 
@@ -177,18 +210,18 @@
 template<class KEY, class VALUE>
 ClientDescriptor<KEY, VALUE>::ClientDescriptor(const KEY& key, const VALUE& value, int32_t cost,
         const std::set<KEY>& conflictingKeys, int32_t score, int32_t ownerId, int32_t state,
-        bool isVendorClient) :
+        bool isVendorClient, int32_t scoreOffset) :
         mKey{key}, mValue{value}, mCost{cost}, mConflicting{conflictingKeys},
-        mPriority(score, state, isVendorClient),
+        mPriority(score, state, isVendorClient, scoreOffset),
         mOwnerId{ownerId} {}
 
 template<class KEY, class VALUE>
 ClientDescriptor<KEY, VALUE>::ClientDescriptor(KEY&& key, VALUE&& value, int32_t cost,
         std::set<KEY>&& conflictingKeys, int32_t score, int32_t ownerId, int32_t state,
-        bool isVendorClient) :
+        bool isVendorClient, int32_t scoreOffset) :
         mKey{std::forward<KEY>(key)}, mValue{std::forward<VALUE>(value)}, mCost{cost},
         mConflicting{std::forward<std::set<KEY>>(conflictingKeys)},
-        mPriority(score, state, isVendorClient), mOwnerId{ownerId} {}
+        mPriority(score, state, isVendorClient, scoreOffset), mOwnerId{ownerId} {}
 
 template<class KEY, class VALUE>
 ClientDescriptor<KEY, VALUE>::~ClientDescriptor() {}
@@ -239,6 +272,9 @@
     // off in the incoming priority argument since an AIDL thread might have
     // called getCurrentServingCall() == BinderCallType::HWBINDER after refreshing
     // priorities for old clients through ProcessInfoService::getProcessStatesScoresFromPids().
+    if (mPriority.isVendorClient()) {
+        return;
+    }
     mPriority.setScore(priority.getScore());
     mPriority.setState(priority.getState());
 }
diff --git a/services/camera/libcameraservice/utils/ExifUtils.cpp b/services/camera/libcameraservice/utils/ExifUtils.cpp
index 8a0303a..485705c 100644
--- a/services/camera/libcameraservice/utils/ExifUtils.cpp
+++ b/services/camera/libcameraservice/utils/ExifUtils.cpp
@@ -916,11 +916,25 @@
         ALOGV("%s: Cannot find focal length in metadata.", __FUNCTION__);
     }
 
+    int32_t sensorPixelMode = ANDROID_SENSOR_PIXEL_MODE_DEFAULT;
+    camera_metadata_ro_entry sensorPixelModeEntry = metadata.find(ANDROID_SENSOR_PIXEL_MODE);
+    if (sensorPixelModeEntry.count != 0) {
+        sensorPixelMode = sensorPixelModeEntry.data.u8[0];
+        if (sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_DEFAULT ||
+            sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) {
+            ALOGE("%s: Request sensor pixel mode is not one of the valid values %d",
+                      __FUNCTION__, sensorPixelMode);
+            return false;
+        }
+    }
+    int32_t activeArrayTag = sensorPixelMode == ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION ?
+            ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION :
+                    ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE;
     if (metadata.exists(ANDROID_SCALER_CROP_REGION) &&
-            staticInfo.exists(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE)) {
+            staticInfo.exists(activeArrayTag)) {
         entry = metadata.find(ANDROID_SCALER_CROP_REGION);
         camera_metadata_ro_entry activeArrayEntry =
-                staticInfo.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+                staticInfo.find(activeArrayTag);
 
         if (!setDigitalZoomRatio(entry.data.i32[2], entry.data.i32[3],
                 activeArrayEntry.data.i32[2], activeArrayEntry.data.i32[3])) {
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 888671c..454c05f 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -13,21 +13,819 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+#include <cutils/properties.h>
+
 #include "SessionConfigurationUtils.h"
-#include "../api2/CameraDeviceClient.h"
+#include "../api2/DepthCompositeStream.h"
+#include "../api2/HeicCompositeStream.h"
+#include "common/CameraDeviceBase.h"
+#include "../CameraService.h"
+#include "device3/Camera3Device.h"
+#include "device3/Camera3OutputStream.h"
+
+using android::camera3::OutputStreamInfo;
+using android::camera3::OutputStreamInfo;
+using android::hardware::camera2::ICameraDeviceUser;
+using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
 
 namespace android {
+namespace camera3 {
+
+int32_t SessionConfigurationUtils::PERF_CLASS_LEVEL =
+        property_get_int32("ro.odm.build.media_performance_class", 0);
+
+bool SessionConfigurationUtils::IS_PERF_CLASS = (PERF_CLASS_LEVEL == SDK_VERSION_S);
+
+camera3::Size SessionConfigurationUtils::getMaxJpegResolution(const CameraMetadata &metadata,
+        bool ultraHighResolution) {
+    int32_t maxJpegWidth = 0, maxJpegHeight = 0;
+    const int STREAM_CONFIGURATION_SIZE = 4;
+    const int STREAM_FORMAT_OFFSET = 0;
+    const int STREAM_WIDTH_OFFSET = 1;
+    const int STREAM_HEIGHT_OFFSET = 2;
+    const int STREAM_IS_INPUT_OFFSET = 3;
+
+    int32_t scalerSizesTag = ultraHighResolution ?
+            ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION :
+                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
+    camera_metadata_ro_entry_t availableStreamConfigs =
+            metadata.find(scalerSizesTag);
+    if (availableStreamConfigs.count == 0 ||
+            availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
+        return camera3::Size(0, 0);
+    }
+
+    // Get max jpeg size (area-wise).
+    for (size_t i= 0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
+        int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
+        int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
+        int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
+        int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
+        if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
+                && format == HAL_PIXEL_FORMAT_BLOB &&
+                (width * height > maxJpegWidth * maxJpegHeight)) {
+            maxJpegWidth = width;
+            maxJpegHeight = height;
+        }
+    }
+
+    return camera3::Size(maxJpegWidth, maxJpegHeight);
+}
+
+size_t SessionConfigurationUtils::getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
+        camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize) {
+    return (uhrMaxJpegSize.width * uhrMaxJpegSize.height) /
+            (defaultMaxJpegSize.width * defaultMaxJpegSize.height) * defaultMaxJpegBufferSize;
+}
+
+void StreamConfiguration::getStreamConfigurations(
+        const CameraMetadata &staticInfo, int configuration,
+        std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
+    if (scm == nullptr) {
+        ALOGE("%s: StreamConfigurationMap nullptr", __FUNCTION__);
+        return;
+    }
+    const int STREAM_FORMAT_OFFSET = 0;
+    const int STREAM_WIDTH_OFFSET = 1;
+    const int STREAM_HEIGHT_OFFSET = 2;
+    const int STREAM_IS_INPUT_OFFSET = 3;
+
+    camera_metadata_ro_entry availableStreamConfigs = staticInfo.find(configuration);
+    for (size_t i = 0; i < availableStreamConfigs.count; i += 4) {
+        int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
+        int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
+        int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
+        int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
+        StreamConfiguration sc = {format, width, height, isInput};
+        (*scm)[format].push_back(sc);
+    }
+}
+
+void StreamConfiguration::getStreamConfigurations(
+        const CameraMetadata &staticInfo, bool maxRes,
+        std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
+    int32_t scalerKey =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxRes);
+
+    int32_t depthKey =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxRes);
+
+    int32_t dynamicDepthKey =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
+
+    int32_t heicKey =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
+
+    getStreamConfigurations(staticInfo, scalerKey, scm);
+    getStreamConfigurations(staticInfo, depthKey, scm);
+    getStreamConfigurations(staticInfo, dynamicDepthKey, scm);
+    getStreamConfigurations(staticInfo, heicKey, scm);
+}
+
+int32_t SessionConfigurationUtils::getAppropriateModeTag(int32_t defaultTag, bool maxResolution) {
+    if (!maxResolution) {
+        return defaultTag;
+    }
+    switch (defaultTag) {
+        case ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS:
+            return ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS:
+            return ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_SCALER_AVAILABLE_STALL_DURATIONS:
+            return ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS;
+        case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS;
+        case ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS:
+            return ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS:
+            return ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS:
+            return ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS;
+        case ANDROID_SENSOR_OPAQUE_RAW_SIZE:
+            return ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION;
+        case ANDROID_LENS_INTRINSIC_CALIBRATION:
+            return ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION;
+        case ANDROID_LENS_DISTORTION:
+            return ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION;
+        default:
+            ALOGE("%s: Tag %d doesn't have a maximum resolution counterpart", __FUNCTION__,
+                    defaultTag);
+            return -1;
+    }
+    return -1;
+}
+
+
+StreamConfigurationPair
+SessionConfigurationUtils::getStreamConfigurationPair(const CameraMetadata &staticInfo) {
+    camera3::StreamConfigurationPair streamConfigurationPair;
+    camera3::StreamConfiguration::getStreamConfigurations(staticInfo, false,
+            &streamConfigurationPair.mDefaultStreamConfigurationMap);
+    camera3::StreamConfiguration::getStreamConfigurations(staticInfo, true,
+            &streamConfigurationPair.mMaximumResolutionStreamConfigurationMap);
+    return streamConfigurationPair;
+}
+
+int64_t SessionConfigurationUtils::euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) {
+    int64_t d0 = x0 - x1;
+    int64_t d1 = y0 - y1;
+    return d0 * d0 + d1 * d1;
+}
+
+bool SessionConfigurationUtils::roundBufferDimensionNearest(int32_t width, int32_t height,
+        int32_t format, android_dataspace dataSpace,
+        const CameraMetadata& info, bool maxResolution, /*out*/int32_t* outWidth,
+        /*out*/int32_t* outHeight) {
+    const int32_t depthSizesTag =
+            getAppropriateModeTag(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
+                    maxResolution);
+    const int32_t scalerSizesTag =
+            getAppropriateModeTag(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t heicSizesTag =
+            getAppropriateModeTag(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
+
+    camera_metadata_ro_entry streamConfigs =
+            (dataSpace == HAL_DATASPACE_DEPTH) ? info.find(depthSizesTag) :
+            (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
+            info.find(heicSizesTag) :
+            info.find(scalerSizesTag);
+
+    int32_t bestWidth = -1;
+    int32_t bestHeight = -1;
+
+    // Iterate through listed stream configurations and find the one with the smallest euclidean
+    // distance from the given dimensions for the given format.
+    for (size_t i = 0; i < streamConfigs.count; i += 4) {
+        int32_t fmt = streamConfigs.data.i32[i];
+        int32_t w = streamConfigs.data.i32[i + 1];
+        int32_t h = streamConfigs.data.i32[i + 2];
+
+        // Ignore input/output type for now
+        if (fmt == format) {
+            if (w == width && h == height) {
+                bestWidth = width;
+                bestHeight = height;
+                break;
+            } else if (w <= ROUNDING_WIDTH_CAP && (bestWidth == -1 ||
+                    SessionConfigurationUtils::euclidDistSquare(w, h, width, height) <
+                    SessionConfigurationUtils::euclidDistSquare(bestWidth, bestHeight, width,
+                            height))) {
+                bestWidth = w;
+                bestHeight = h;
+            }
+        }
+    }
+
+    if (bestWidth == -1) {
+        // Return false if no configurations for this format were listed
+        return false;
+    }
+
+    // Set the outputs to the closet width/height
+    if (outWidth != NULL) {
+        *outWidth = bestWidth;
+    }
+    if (outHeight != NULL) {
+        *outHeight = bestHeight;
+    }
+
+    // Return true if at least one configuration for this format was listed
+    return true;
+}
+
+bool SessionConfigurationUtils::isPublicFormat(int32_t format)
+{
+    switch(format) {
+        case HAL_PIXEL_FORMAT_RGBA_8888:
+        case HAL_PIXEL_FORMAT_RGBX_8888:
+        case HAL_PIXEL_FORMAT_RGB_888:
+        case HAL_PIXEL_FORMAT_RGB_565:
+        case HAL_PIXEL_FORMAT_BGRA_8888:
+        case HAL_PIXEL_FORMAT_YV12:
+        case HAL_PIXEL_FORMAT_Y8:
+        case HAL_PIXEL_FORMAT_Y16:
+        case HAL_PIXEL_FORMAT_RAW16:
+        case HAL_PIXEL_FORMAT_RAW10:
+        case HAL_PIXEL_FORMAT_RAW12:
+        case HAL_PIXEL_FORMAT_RAW_OPAQUE:
+        case HAL_PIXEL_FORMAT_BLOB:
+        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+        case HAL_PIXEL_FORMAT_YCbCr_420_888:
+        case HAL_PIXEL_FORMAT_YCbCr_422_SP:
+        case HAL_PIXEL_FORMAT_YCrCb_420_SP:
+        case HAL_PIXEL_FORMAT_YCbCr_422_I:
+            return true;
+        default:
+            return false;
+    }
+}
+
+binder::Status SessionConfigurationUtils::createSurfaceFromGbp(
+        OutputStreamInfo& streamInfo, bool isStreamInfoValid,
+        sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
+        const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
+        const std::vector<int32_t> &sensorPixelModesUsed){
+    // bufferProducer must be non-null
+    if (gbp == nullptr) {
+        String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
+        ALOGW("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    // HACK b/10949105
+    // Query consumer usage bits to set async operation mode for
+    // GLConsumer using controlledByApp parameter.
+    bool useAsync = false;
+    uint64_t consumerUsage = 0;
+    status_t err;
+    if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) {
+        String8 msg = String8::format("Camera %s: Failed to query Surface consumer usage: %s (%d)",
+                logicalCameraId.string(), strerror(-err), err);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+    }
+    if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
+        ALOGW("%s: Camera %s with consumer usage flag: %" PRIu64 ": Forcing asynchronous mode for"
+                "stream", __FUNCTION__, logicalCameraId.string(), consumerUsage);
+        useAsync = true;
+    }
+
+    uint64_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
+                              GRALLOC_USAGE_RENDERSCRIPT;
+    uint64_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
+                           GraphicBuffer::USAGE_HW_TEXTURE |
+                           GraphicBuffer::USAGE_HW_COMPOSER;
+    bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
+            (consumerUsage & allowedFlags) != 0;
+
+    surface = new Surface(gbp, useAsync);
+    ANativeWindow *anw = surface.get();
+
+    int width, height, format;
+    android_dataspace dataSpace;
+    if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
+        String8 msg = String8::format("Camera %s: Failed to query Surface width: %s (%d)",
+                 logicalCameraId.string(), strerror(-err), err);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+    }
+    if ((err = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
+        String8 msg = String8::format("Camera %s: Failed to query Surface height: %s (%d)",
+                logicalCameraId.string(), strerror(-err), err);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+    }
+    if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
+        String8 msg = String8::format("Camera %s: Failed to query Surface format: %s (%d)",
+                logicalCameraId.string(), strerror(-err), err);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+    }
+    if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
+            reinterpret_cast<int*>(&dataSpace))) != OK) {
+        String8 msg = String8::format("Camera %s: Failed to query Surface dataspace: %s (%d)",
+                logicalCameraId.string(), strerror(-err), err);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+    }
+
+    // FIXME: remove this override since the default format should be
+    //       IMPLEMENTATION_DEFINED. b/9487482 & b/35317944
+    if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) &&
+            ((consumerUsage & GRALLOC_USAGE_HW_MASK) &&
+             ((consumerUsage & GRALLOC_USAGE_SW_READ_MASK) == 0))) {
+        ALOGW("%s: Camera %s: Overriding format %#x to IMPLEMENTATION_DEFINED",
+                __FUNCTION__, logicalCameraId.string(), format);
+        format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+    }
+    std::unordered_set<int32_t> overriddenSensorPixelModes;
+    if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed, format, width, height,
+            physicalCameraMetadata, flexibleConsumer, &overriddenSensorPixelModes) != OK) {
+        String8 msg = String8::format("Camera %s: sensor pixel modes for stream with "
+                "format %#x are not valid",logicalCameraId.string(), format);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    bool foundInMaxRes = false;
+    if (overriddenSensorPixelModes.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
+            overriddenSensorPixelModes.end()) {
+        // we can use the default stream configuration map
+        foundInMaxRes = true;
+    }
+    // Round dimensions to the nearest dimensions available for this format
+    if (flexibleConsumer && isPublicFormat(format) &&
+            !SessionConfigurationUtils::roundBufferDimensionNearest(width, height,
+            format, dataSpace, physicalCameraMetadata, foundInMaxRes, /*out*/&width,
+            /*out*/&height)) {
+        String8 msg = String8::format("Camera %s: No supported stream configurations with "
+                "format %#x defined, failed to create output stream",
+                logicalCameraId.string(), format);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+
+    if (!isStreamInfoValid) {
+        streamInfo.width = width;
+        streamInfo.height = height;
+        streamInfo.format = format;
+        streamInfo.dataSpace = dataSpace;
+        streamInfo.consumerUsage = consumerUsage;
+        streamInfo.sensorPixelModesUsed = overriddenSensorPixelModes;
+        return binder::Status::ok();
+    }
+    if (width != streamInfo.width) {
+        String8 msg = String8::format("Camera %s:Surface width doesn't match: %d vs %d",
+                logicalCameraId.string(), width, streamInfo.width);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    if (height != streamInfo.height) {
+        String8 msg = String8::format("Camera %s:Surface height doesn't match: %d vs %d",
+                 logicalCameraId.string(), height, streamInfo.height);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    if (format != streamInfo.format) {
+        String8 msg = String8::format("Camera %s:Surface format doesn't match: %d vs %d",
+                 logicalCameraId.string(), format, streamInfo.format);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+        if (dataSpace != streamInfo.dataSpace) {
+            String8 msg = String8::format("Camera %s:Surface dataSpace doesn't match: %d vs %d",
+                    logicalCameraId.string(), dataSpace, streamInfo.dataSpace);
+            ALOGE("%s: %s", __FUNCTION__, msg.string());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        }
+        //At the native side, there isn't a way to check whether 2 surfaces come from the same
+        //surface class type. Use usage flag to approximate the comparison.
+        if (consumerUsage != streamInfo.consumerUsage) {
+            String8 msg = String8::format(
+                    "Camera %s:Surface usage flag doesn't match %" PRIu64 " vs %" PRIu64 "",
+                    logicalCameraId.string(), consumerUsage, streamInfo.consumerUsage);
+            ALOGE("%s: %s", __FUNCTION__, msg.string());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        }
+    }
+    return binder::Status::ok();
+}
+
+void SessionConfigurationUtils::mapStreamInfo(const OutputStreamInfo &streamInfo,
+            camera3::camera_stream_rotation_t rotation, String8 physicalId,
+            int32_t groupId, hardware::camera::device::V3_7::Stream *stream /*out*/) {
+    if (stream == nullptr) {
+        return;
+    }
+
+    stream->v3_4.v3_2.streamType = hardware::camera::device::V3_2::StreamType::OUTPUT;
+    stream->v3_4.v3_2.width = streamInfo.width;
+    stream->v3_4.v3_2.height = streamInfo.height;
+    stream->v3_4.v3_2.format = Camera3Device::mapToPixelFormat(streamInfo.format);
+    auto u = streamInfo.consumerUsage;
+    camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
+    stream->v3_4.v3_2.usage = Camera3Device::mapToConsumerUsage(u);
+    stream->v3_4.v3_2.dataSpace = Camera3Device::mapToHidlDataspace(streamInfo.dataSpace);
+    stream->v3_4.v3_2.rotation = Camera3Device::mapToStreamRotation(rotation);
+    stream->v3_4.v3_2.id = -1; // Invalid stream id
+    stream->v3_4.physicalCameraId = std::string(physicalId.string());
+    stream->v3_4.bufferSize = 0;
+    stream->groupId = groupId;
+    stream->sensorPixelModesUsed.resize(streamInfo.sensorPixelModesUsed.size());
+    size_t idx = 0;
+    for (auto mode : streamInfo.sensorPixelModesUsed) {
+        stream->sensorPixelModesUsed[idx++] =
+                static_cast<CameraMetadataEnumAndroidSensorPixelMode>(mode);
+    }
+}
+
+binder::Status SessionConfigurationUtils::checkPhysicalCameraId(
+        const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId,
+        const String8 &logicalCameraId) {
+    if (physicalCameraId.size() == 0) {
+        return binder::Status::ok();
+    }
+    if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(),
+        physicalCameraId.string()) == physicalCameraIds.end()) {
+        String8 msg = String8::format("Camera %s: Camera doesn't support physicalCameraId %s.",
+                logicalCameraId.string(), physicalCameraId.string());
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    return binder::Status::ok();
+}
+
+binder::Status SessionConfigurationUtils::checkSurfaceType(size_t numBufferProducers,
+        bool deferredConsumer, int surfaceType)  {
+    if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
+        ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
+                __FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
+    } else if ((numBufferProducers == 0) && (!deferredConsumer)) {
+        ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
+    }
+
+    bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
+            (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
+
+    if (deferredConsumer && !validSurfaceType) {
+        ALOGE("%s: Target surface has invalid surfaceType = %d.", __FUNCTION__, surfaceType);
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
+    }
+
+    return binder::Status::ok();
+}
+
+binder::Status SessionConfigurationUtils::checkOperatingMode(int operatingMode,
+        const CameraMetadata &staticInfo, const String8 &cameraId) {
+    if (operatingMode < 0) {
+        String8 msg = String8::format(
+            "Camera %s: Invalid operating mode %d requested", cameraId.string(), operatingMode);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                msg.string());
+    }
+
+    bool isConstrainedHighSpeed = (operatingMode == ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE);
+    if (isConstrainedHighSpeed) {
+        camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+        bool isConstrainedHighSpeedSupported = false;
+        for(size_t i = 0; i < entry.count; ++i) {
+            uint8_t capability = entry.data.u8[i];
+            if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) {
+                isConstrainedHighSpeedSupported = true;
+                break;
+            }
+        }
+        if (!isConstrainedHighSpeedSupported) {
+            String8 msg = String8::format(
+                "Camera %s: Try to create a constrained high speed configuration on a device"
+                " that doesn't support it.", cameraId.string());
+            ALOGE("%s: %s", __FUNCTION__, msg.string());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                    msg.string());
+        }
+    }
+
+    return binder::Status::ok();
+}
 
 binder::Status
 SessionConfigurationUtils::convertToHALStreamCombination(
         const SessionConfiguration& sessionConfiguration,
         const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
         metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
-        hardware::camera::device::V3_4::StreamConfiguration &streamConfiguration, bool *earlyExit) {
-    // TODO: http://b/148329298 Move the other dependencies from
-    // CameraDeviceClient into SessionConfigurationUtils.
-    return CameraDeviceClient::convertToHALStreamCombination(sessionConfiguration, logicalCameraId,
-            deviceInfo, getMetadata, physicalCameraIds, streamConfiguration, earlyExit);
+        hardware::camera::device::V3_7::StreamConfiguration &streamConfiguration,
+        bool overrideForPerfClass, bool *earlyExit) {
+
+    auto operatingMode = sessionConfiguration.getOperatingMode();
+    binder::Status res = checkOperatingMode(operatingMode, deviceInfo, logicalCameraId);
+    if (!res.isOk()) {
+        return res;
+    }
+
+    if (earlyExit == nullptr) {
+        String8 msg("earlyExit nullptr");
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    *earlyExit = false;
+    auto ret = Camera3Device::mapToStreamConfigurationMode(
+            static_cast<camera_stream_configuration_mode_t> (operatingMode),
+            /*out*/ &streamConfiguration.operationMode);
+    if (ret != OK) {
+        String8 msg = String8::format(
+            "Camera %s: Failed mapping operating mode %d requested: %s (%d)",
+            logicalCameraId.string(), operatingMode, strerror(-ret), ret);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                msg.string());
+    }
+
+    bool isInputValid = (sessionConfiguration.getInputWidth() > 0) &&
+            (sessionConfiguration.getInputHeight() > 0) &&
+            (sessionConfiguration.getInputFormat() > 0);
+    auto outputConfigs = sessionConfiguration.getOutputConfigurations();
+    size_t streamCount = outputConfigs.size();
+    streamCount = isInputValid ? streamCount + 1 : streamCount;
+    streamConfiguration.streams.resize(streamCount);
+    size_t streamIdx = 0;
+    if (isInputValid) {
+        hardware::hidl_vec<CameraMetadataEnumAndroidSensorPixelMode> defaultSensorPixelModes;
+        defaultSensorPixelModes.resize(1);
+        defaultSensorPixelModes[0] =
+                static_cast<CameraMetadataEnumAndroidSensorPixelMode>(
+                        ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
+        streamConfiguration.streams[streamIdx++] = {{{/*streamId*/0,
+                hardware::camera::device::V3_2::StreamType::INPUT,
+                static_cast<uint32_t> (sessionConfiguration.getInputWidth()),
+                static_cast<uint32_t> (sessionConfiguration.getInputHeight()),
+                Camera3Device::mapToPixelFormat(sessionConfiguration.getInputFormat()),
+                /*usage*/ 0, HAL_DATASPACE_UNKNOWN,
+                hardware::camera::device::V3_2::StreamRotation::ROTATION_0},
+                /*physicalId*/ nullptr, /*bufferSize*/0}, /*groupId*/-1, defaultSensorPixelModes};
+        streamConfiguration.multiResolutionInputImage =
+                sessionConfiguration.inputIsMultiResolution();
+    }
+
+    for (const auto &it : outputConfigs) {
+        const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
+            it.getGraphicBufferProducers();
+        bool deferredConsumer = it.isDeferred();
+        String8 physicalCameraId = String8(it.getPhysicalCameraId());
+
+        std::vector<int32_t> sensorPixelModesUsed = it.getSensorPixelModesUsed();
+        const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId,
+                overrideForPerfClass);
+        const CameraMetadata &metadataChosen =
+                physicalCameraId.size() > 0 ? physicalDeviceInfo : deviceInfo;
+
+        size_t numBufferProducers = bufferProducers.size();
+        bool isStreamInfoValid = false;
+        int32_t groupId = it.isMultiResolution() ? it.getSurfaceSetID() : -1;
+        OutputStreamInfo streamInfo;
+
+        res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType());
+        if (!res.isOk()) {
+            return res;
+        }
+        res = checkPhysicalCameraId(physicalCameraIds, physicalCameraId,
+                logicalCameraId);
+        if (!res.isOk()) {
+            return res;
+        }
+
+        if (deferredConsumer) {
+            streamInfo.width = it.getWidth();
+            streamInfo.height = it.getHeight();
+            streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+            streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+            auto surfaceType = it.getSurfaceType();
+            streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
+            if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
+                streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
+            }
+            if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
+                    streamInfo.format, streamInfo.width,
+                    streamInfo.height, metadataChosen, false /*flexibleConsumer*/,
+                    &streamInfo.sensorPixelModesUsed) != OK) {
+                        ALOGE("%s: Deferred surface sensor pixel modes not valid",
+                                __FUNCTION__);
+                        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                                "Deferred surface sensor pixel modes not valid");
+            }
+            mapStreamInfo(streamInfo, camera3::CAMERA_STREAM_ROTATION_0, physicalCameraId, groupId,
+                    &streamConfiguration.streams[streamIdx++]);
+            isStreamInfoValid = true;
+
+            if (numBufferProducers == 0) {
+                continue;
+            }
+        }
+
+        for (auto& bufferProducer : bufferProducers) {
+            sp<Surface> surface;
+            res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
+                    logicalCameraId, metadataChosen, sensorPixelModesUsed);
+
+            if (!res.isOk())
+                return res;
+
+            if (!isStreamInfoValid) {
+                bool isDepthCompositeStream =
+                        camera3::DepthCompositeStream::isDepthCompositeStream(surface);
+                bool isHeicCompositeStream =
+                        camera3::HeicCompositeStream::isHeicCompositeStream(surface);
+                if (isDepthCompositeStream || isHeicCompositeStream) {
+                    // We need to take in to account that composite streams can have
+                    // additional internal camera streams.
+                    std::vector<OutputStreamInfo> compositeStreams;
+                    if (isDepthCompositeStream) {
+                      // TODO: Take care of composite streams.
+                        ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
+                                deviceInfo, &compositeStreams);
+                    } else {
+                        ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
+                            deviceInfo, &compositeStreams);
+                    }
+                    if (ret != OK) {
+                        String8 msg = String8::format(
+                                "Camera %s: Failed adding composite streams: %s (%d)",
+                                logicalCameraId.string(), strerror(-ret), ret);
+                        ALOGE("%s: %s", __FUNCTION__, msg.string());
+                        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+                    }
+
+                    if (compositeStreams.size() == 0) {
+                        // No internal streams means composite stream not
+                        // supported.
+                        *earlyExit = true;
+                        return binder::Status::ok();
+                    } else if (compositeStreams.size() > 1) {
+                        streamCount += compositeStreams.size() - 1;
+                        streamConfiguration.streams.resize(streamCount);
+                    }
+
+                    for (const auto& compositeStream : compositeStreams) {
+                        mapStreamInfo(compositeStream,
+                                static_cast<camera_stream_rotation_t> (it.getRotation()),
+                                physicalCameraId, groupId,
+                                &streamConfiguration.streams[streamIdx++]);
+                    }
+                } else {
+                    mapStreamInfo(streamInfo,
+                            static_cast<camera_stream_rotation_t> (it.getRotation()),
+                            physicalCameraId, groupId, &streamConfiguration.streams[streamIdx++]);
+                }
+                isStreamInfoValid = true;
+            }
+        }
+    }
+    return binder::Status::ok();
 }
 
-}// namespace android
+static bool inStreamConfigurationMap(int format, int width, int height,
+        const std::unordered_map<int, std::vector<camera3::StreamConfiguration>> &sm) {
+    auto scs = sm.find(format);
+    if (scs == sm.end()) {
+        return false;
+    }
+    for (auto &sc : scs->second) {
+        if (sc.width == width && sc.height == height && sc.isInput == 0) {
+            return true;
+        }
+    }
+    return false;
+}
+
+static std::unordered_set<int32_t> convertToSet(const std::vector<int32_t> &sensorPixelModesUsed) {
+    return std::unordered_set<int32_t>(sensorPixelModesUsed.begin(), sensorPixelModesUsed.end());
+}
+
+status_t SessionConfigurationUtils::checkAndOverrideSensorPixelModesUsed(
+        const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
+        const CameraMetadata &staticInfo, bool flexibleConsumer,
+        std::unordered_set<int32_t> *overriddenSensorPixelModesUsed) {
+
+    const std::unordered_set<int32_t> &sensorPixelModesUsedSet =
+            convertToSet(sensorPixelModesUsed);
+    if (!isUltraHighResolutionSensor(staticInfo)) {
+        if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
+                sensorPixelModesUsedSet.end()) {
+            // invalid value for non ultra high res sensors
+            return BAD_VALUE;
+        }
+        overriddenSensorPixelModesUsed->clear();
+        overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
+        return OK;
+    }
+
+    StreamConfigurationPair streamConfigurationPair = getStreamConfigurationPair(staticInfo);
+
+    bool isInDefaultStreamConfigurationMap =
+            inStreamConfigurationMap(format, width, height,
+                    streamConfigurationPair.mDefaultStreamConfigurationMap);
+
+    bool isInMaximumResolutionStreamConfigurationMap =
+            inStreamConfigurationMap(format, width, height,
+                    streamConfigurationPair.mMaximumResolutionStreamConfigurationMap);
+
+    // Case 1: The client has not changed the sensor mode defaults. In this case, we check if the
+    // size + format of the OutputConfiguration is found exclusively in 1.
+    // If yes, add that sensorPixelMode to overriddenSensorPixelModes.
+    // If no, add 'DEFAULT' to sensorPixelMode. This maintains backwards
+    // compatibility.
+    if (sensorPixelModesUsedSet.size() == 0) {
+        // Ambiguous case, default to only 'DEFAULT' mode.
+        if (isInDefaultStreamConfigurationMap && isInMaximumResolutionStreamConfigurationMap) {
+            overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
+            return OK;
+        }
+        // We don't allow flexible consumer for max resolution mode.
+        if (isInMaximumResolutionStreamConfigurationMap) {
+            overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+            return OK;
+        }
+        if (isInDefaultStreamConfigurationMap || (flexibleConsumer && width < ROUNDING_WIDTH_CAP)) {
+            overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
+            return OK;
+        }
+        return BAD_VALUE;
+    }
+
+    // Case2: The app has set sensorPixelModesUsed, we need to verify that they
+    // are valid / err out.
+    if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_DEFAULT) !=
+            sensorPixelModesUsedSet.end() && !isInDefaultStreamConfigurationMap) {
+        return BAD_VALUE;
+    }
+
+   if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
+            sensorPixelModesUsedSet.end() && !isInMaximumResolutionStreamConfigurationMap) {
+        return BAD_VALUE;
+    }
+    *overriddenSensorPixelModesUsed = sensorPixelModesUsedSet;
+    return OK;
+}
+
+bool SessionConfigurationUtils::isUltraHighResolutionSensor(const CameraMetadata &deviceInfo) {
+    camera_metadata_ro_entry_t entryCap;
+    entryCap = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+    // Go through the capabilities and check if it has
+    // ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR
+    for (size_t i = 0; i < entryCap.count; ++i) {
+        uint8_t capability = entryCap.data.u8[i];
+        if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) {
+            return true;
+        }
+    }
+    return false;
+}
+
+bool SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
+        hardware::camera::device::V3_4::StreamConfiguration &streamConfigV34,
+        const hardware::camera::device::V3_7::StreamConfiguration &streamConfigV37) {
+    if (streamConfigV37.multiResolutionInputImage) {
+        // ICameraDevice older than 3.7 doesn't support multi-resolution input image.
+        return false;
+    }
+
+    streamConfigV34.streams.resize(streamConfigV37.streams.size());
+    for (size_t i = 0; i < streamConfigV37.streams.size(); i++) {
+        if (streamConfigV37.streams[i].groupId != -1) {
+            // ICameraDevice older than 3.7 doesn't support multi-resolution output
+            // image
+            return false;
+        }
+        streamConfigV34.streams[i] = streamConfigV37.streams[i].v3_4;
+    }
+    streamConfigV34.operationMode = streamConfigV37.operationMode;
+    streamConfigV34.sessionParams = streamConfigV37.sessionParams;
+
+    return true;
+}
+
+bool SessionConfigurationUtils::targetPerfClassPrimaryCamera(
+        const std::set<std::string>& perfClassPrimaryCameraIds, const std::string& cameraId,
+        int targetSdkVersion) {
+    bool isPerfClassPrimaryCamera =
+            perfClassPrimaryCameraIds.find(cameraId) != perfClassPrimaryCameraIds.end();
+    return targetSdkVersion >= SDK_VERSION_S && isPerfClassPrimaryCamera;
+}
+
+} // namespace camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index cfb9f17..1fbaa69 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -21,16 +21,97 @@
 #include <camera/camera2/OutputConfiguration.h>
 #include <camera/camera2/SessionConfiguration.h>
 #include <camera/camera2/SubmitInfo.h>
+#include <android/hardware/camera/device/3.7/types.h>
 #include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
+#include <android/hardware/camera/device/3.7/ICameraDeviceSession.h>
 
+#include <device3/Camera3StreamInterface.h>
+
+#include <set>
 #include <stdint.h>
 
-namespace android {
+// Convenience methods for constructing binder::Status objects for error returns
 
-typedef std::function<CameraMetadata (const String8 &)> metadataGetter;
+#define STATUS_ERROR(errorCode, errorString) \
+    binder::Status::fromServiceSpecificError(errorCode, \
+            String8::format("%s:%d: %s", __FUNCTION__, __LINE__, errorString))
+
+#define STATUS_ERROR_FMT(errorCode, errorString, ...) \
+    binder::Status::fromServiceSpecificError(errorCode, \
+            String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, \
+                    __VA_ARGS__))
+
+namespace android {
+namespace camera3 {
+
+typedef std::function<CameraMetadata (const String8 &, int targetSdkVersion)> metadataGetter;
+
+class StreamConfiguration {
+public:
+    int32_t format;
+    int32_t width;
+    int32_t height;
+    int32_t isInput;
+    static void getStreamConfigurations(
+            const CameraMetadata &static_info, bool maxRes,
+            std::unordered_map<int, std::vector<StreamConfiguration>> *scm);
+    static void getStreamConfigurations(
+            const CameraMetadata &static_info, int configuration,
+            std::unordered_map<int, std::vector<StreamConfiguration>> *scm);
+};
+
+// Holds the default StreamConfigurationMap and Maximum resolution
+// StreamConfigurationMap for a camera device.
+struct StreamConfigurationPair {
+    std::unordered_map<int, std::vector<camera3::StreamConfiguration>>
+            mDefaultStreamConfigurationMap;
+    std::unordered_map<int, std::vector<camera3::StreamConfiguration>>
+            mMaximumResolutionStreamConfigurationMap;
+};
 
 class SessionConfigurationUtils {
 public:
+    static camera3::Size getMaxJpegResolution(const CameraMetadata &metadata,
+            bool ultraHighResolution);
+
+    static size_t getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
+            camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize);
+
+    static int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
+
+    // Find the closest dimensions for a given format in available stream configurations with
+    // a width <= ROUNDING_WIDTH_CAP
+    static bool roundBufferDimensionNearest(int32_t width, int32_t height, int32_t format,
+            android_dataspace dataSpace, const CameraMetadata& info, bool maxResolution,
+            /*out*/int32_t* outWidth, /*out*/int32_t* outHeight);
+
+    //check if format is not custom format
+    static bool isPublicFormat(int32_t format);
+
+    // Create a Surface from an IGraphicBufferProducer. Returns error if
+    // IGraphicBufferProducer's property doesn't match with streamInfo
+    static binder::Status createSurfaceFromGbp(
+        camera3::OutputStreamInfo& streamInfo, bool isStreamInfoValid,
+        sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
+        const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
+        const std::vector<int32_t> &sensorPixelModesUsed);
+
+    static void mapStreamInfo(const camera3::OutputStreamInfo &streamInfo,
+            camera3::camera_stream_rotation_t rotation, String8 physicalId, int32_t groupId,
+            hardware::camera::device::V3_7::Stream *stream /*out*/);
+
+    // Check that the physicalCameraId passed in is spported by the camera
+    // device.
+    static binder::Status checkPhysicalCameraId(
+        const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId,
+        const String8 &logicalCameraId);
+
+    static binder::Status checkSurfaceType(size_t numBufferProducers,
+        bool deferredConsumer, int surfaceType);
+
+    static binder::Status checkOperatingMode(int operatingMode,
+        const CameraMetadata &staticInfo, const String8 &cameraId);
+
     // utility function to convert AIDL SessionConfiguration to HIDL
     // streamConfiguration. Also checks for validity of SessionConfiguration and
     // returns a non-ok binder::Status if the passed in session configuration
@@ -39,9 +120,42 @@
     convertToHALStreamCombination(const SessionConfiguration& sessionConfiguration,
             const String8 &cameraId, const CameraMetadata &deviceInfo,
             metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
-            hardware::camera::device::V3_4::StreamConfiguration &streamConfiguration,
-            bool *earlyExit);
+            hardware::camera::device::V3_7::StreamConfiguration &streamConfiguration,
+            bool overrideForPerfClass, bool *earlyExit);
+
+    // Utility function to convert a V3_7::StreamConfiguration to
+    // V3_4::StreamConfiguration. Return false if the original V3_7 configuration cannot
+    // be used by older version HAL.
+    static bool convertHALStreamCombinationFromV37ToV34(
+            hardware::camera::device::V3_4::StreamConfiguration &streamConfigV34,
+            const hardware::camera::device::V3_7::StreamConfiguration &streamConfigV37);
+
+    static StreamConfigurationPair getStreamConfigurationPair(const CameraMetadata &metadata);
+
+    static status_t checkAndOverrideSensorPixelModesUsed(
+            const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
+            const CameraMetadata &staticInfo, bool flexibleConsumer,
+            std::unordered_set<int32_t> *overriddenSensorPixelModesUsed);
+
+    static bool isUltraHighResolutionSensor(const CameraMetadata &deviceInfo);
+
+    static int32_t getAppropriateModeTag(int32_t defaultTag, bool maxResolution = false);
+
+    static bool targetPerfClassPrimaryCamera(
+            const std::set<std::string>& perfClassPrimaryCameraIds, const std::string& cameraId,
+            int32_t targetSdkVersion);
+
+    static const int32_t MAX_SURFACES_PER_STREAM = 4;
+
+    static const int32_t ROUNDING_WIDTH_CAP = 1920;
+
+    static const int32_t SDK_VERSION_S = 31;
+    static int32_t PERF_CLASS_LEVEL;
+    static bool IS_PERF_CLASS;
+    static const int32_t PERF_CLASS_JPEG_THRESH_W = 1920;
+    static const int32_t PERF_CLASS_JPEG_THRESH_H = 1080;
 };
 
+} // camera3
 } // android
 #endif
diff --git a/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp b/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
new file mode 100644
index 0000000..7a7707c
--- /dev/null
+++ b/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
@@ -0,0 +1,141 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CameraSessionStatsBuilder"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <numeric>
+
+#include <inttypes.h>
+#include <utils/Log.h>
+
+#include "SessionStatsBuilder.h"
+
+namespace android {
+
+// Bins for capture latency: [0, 100], [100, 200], [200, 300], ...
+// [1300, 2100], [2100, inf].
+// Capture latency is in the unit of millisecond.
+const std::array<int32_t, StreamStats::LATENCY_BIN_COUNT-1> StreamStats::mCaptureLatencyBins {
+        { 100, 200, 300, 400, 500, 700, 900, 1300, 2100 } };
+
+status_t SessionStatsBuilder::addStream(int id) {
+    std::lock_guard<std::mutex> l(mLock);
+    StreamStats stats;
+    mStatsMap.emplace(id, stats);
+    return OK;
+}
+
+status_t SessionStatsBuilder::removeStream(int id) {
+    std::lock_guard<std::mutex> l(mLock);
+    mStatsMap.erase(id);
+    return OK;
+}
+
+void SessionStatsBuilder::buildAndReset(int64_t* requestCount,
+        int64_t* errorResultCount, bool* deviceError,
+        std::map<int, StreamStats> *statsMap) {
+    std::lock_guard<std::mutex> l(mLock);
+    *requestCount = mRequestCount;
+    *errorResultCount = mErrorResultCount;
+    *deviceError = mDeviceError;
+    *statsMap = mStatsMap;
+
+    // Reset internal states
+    mRequestCount = 0;
+    mErrorResultCount = 0;
+    mCounterStopped = false;
+    mDeviceError = false;
+    for (auto& streamStats : mStatsMap) {
+        StreamStats& streamStat = streamStats.second;
+        streamStat.mRequestedFrameCount = 0;
+        streamStat.mDroppedFrameCount = 0;
+        streamStat.mCounterStopped = false;
+        streamStat.mStartLatencyMs = 0;
+
+        std::fill(streamStat.mCaptureLatencyHistogram.begin(),
+                streamStat.mCaptureLatencyHistogram.end(), 0);
+    }
+}
+
+void SessionStatsBuilder::startCounter(int id) {
+    std::lock_guard<std::mutex> l(mLock);
+    mStatsMap[id].mCounterStopped = false;
+}
+
+void SessionStatsBuilder::stopCounter(int id) {
+    std::lock_guard<std::mutex> l(mLock);
+    StreamStats& streamStat = mStatsMap[id];
+    streamStat.mCounterStopped = true;
+}
+
+void SessionStatsBuilder::incCounter(int id, bool dropped, int32_t captureLatencyMs) {
+    std::lock_guard<std::mutex> l(mLock);
+
+    auto it = mStatsMap.find(id);
+    if (it == mStatsMap.end()) return;
+
+    StreamStats& streamStat = it->second;
+    if (streamStat.mCounterStopped) return;
+
+    streamStat.mRequestedFrameCount++;
+    if (dropped) {
+        streamStat.mDroppedFrameCount++;
+    } else if (streamStat.mRequestedFrameCount - streamStat.mDroppedFrameCount == 1) {
+        // The capture latency for the first request.
+        streamStat.mStartLatencyMs = captureLatencyMs;
+    }
+
+    streamStat.updateLatencyHistogram(captureLatencyMs);
+}
+
+void SessionStatsBuilder::stopCounter() {
+    std::lock_guard<std::mutex> l(mLock);
+    mCounterStopped = true;
+    for (auto& streamStats : mStatsMap) {
+        streamStats.second.mCounterStopped = true;
+    }
+}
+
+void SessionStatsBuilder::incResultCounter(bool dropped) {
+    std::lock_guard<std::mutex> l(mLock);
+    if (mCounterStopped) return;
+
+    mRequestCount++;
+    if (dropped) mErrorResultCount++;
+}
+
+void SessionStatsBuilder::onDeviceError() {
+    std::lock_guard<std::mutex> l(mLock);
+    mDeviceError = true;
+}
+
+void StreamStats::updateLatencyHistogram(int32_t latencyMs) {
+    size_t i;
+    for (i = 0; i < mCaptureLatencyBins.size(); i++) {
+        if (latencyMs < mCaptureLatencyBins[i]) {
+            mCaptureLatencyHistogram[i] ++;
+            break;
+        }
+    }
+
+    if (i == mCaptureLatencyBins.size()) {
+        mCaptureLatencyHistogram[i]++;
+    }
+}
+
+}; // namespace android
diff --git a/services/camera/libcameraservice/utils/SessionStatsBuilder.h b/services/camera/libcameraservice/utils/SessionStatsBuilder.h
new file mode 100644
index 0000000..c23abb6
--- /dev/null
+++ b/services/camera/libcameraservice/utils/SessionStatsBuilder.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVICE_UTILS_SESSION_STATS_BUILDER_H
+#define ANDROID_SERVICE_UTILS_SESSION_STATS_BUILDER_H
+
+#include <utils/Errors.h>
+
+#include <array>
+#include <map>
+#include <mutex>
+
+namespace android {
+
+// Helper class to build stream stats
+struct StreamStats {
+    // Fields for buffer drop
+    int64_t mRequestedFrameCount;
+    int64_t mDroppedFrameCount;
+    bool mCounterStopped;
+
+    // Fields for stream startup latency
+    int32_t mStartLatencyMs;
+
+    // Fields for capture latency measurement
+    const static int LATENCY_BIN_COUNT = 10;
+    // Boundary values separating between adjacent bins, excluding 0 and
+    // infinity.
+    const static std::array<int32_t, LATENCY_BIN_COUNT-1> mCaptureLatencyBins;
+    // Counter values for all histogram bins. One more entry than mCaptureLatencyBins.
+    std::array<int64_t, LATENCY_BIN_COUNT> mCaptureLatencyHistogram;
+
+    StreamStats() : mRequestedFrameCount(0),
+                     mDroppedFrameCount(0),
+                     mCounterStopped(false),
+                     mStartLatencyMs(0),
+                     mCaptureLatencyHistogram{}
+                  {}
+
+    void updateLatencyHistogram(int32_t latencyMs);
+};
+
+// Helper class to build session stats
+class SessionStatsBuilder {
+public:
+
+    status_t addStream(int streamId);
+    status_t removeStream(int streamId);
+
+    // Return the session statistics and reset the internal states.
+    void buildAndReset(/*out*/int64_t* requestCount,
+            /*out*/int64_t* errorResultCount,
+            /*out*/bool* deviceError,
+            /*out*/std::map<int, StreamStats> *statsMap);
+
+    // Stream specific counter
+    void startCounter(int streamId);
+    void stopCounter(int streamId);
+    void incCounter(int streamId, bool dropped, int32_t captureLatencyMs);
+
+    // Session specific counter
+    void stopCounter();
+    void incResultCounter(bool dropped);
+    void onDeviceError();
+
+    SessionStatsBuilder() : mRequestCount(0), mErrorResultCount(0),
+             mCounterStopped(false), mDeviceError(false) {}
+private:
+    std::mutex mLock;
+    int64_t mRequestCount;
+    int64_t mErrorResultCount;
+    bool mCounterStopped;
+    bool mDeviceError;
+    // Map from stream id to stream statistics
+    std::map<int, StreamStats> mStatsMap;
+};
+
+}; // namespace android
+
+#endif // ANDROID_SERVICE_UTILS_SESSION_STATS_BUILDER_H
diff --git a/services/mediacodec/Android.bp b/services/mediacodec/Android.bp
index c4efbaa..bf4d524 100644
--- a/services/mediacodec/Android.bp
+++ b/services/mediacodec/Android.bp
@@ -1,3 +1,20 @@
+package {
+    default_applicable_licenses: ["frameworks_av_services_mediacodec_license"],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_services_mediacodec_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_binary {
     name: "mediaswcodec",
     vendor_available: true,
@@ -15,24 +32,10 @@
         "libmedia_codecserviceregistrant",
     ],
 
-    target: {
-        android: {
-            product_variables: {
-                malloc_not_svelte: {
-                    // Scudo increases memory footprint, so only enable on
-                    // non-svelte devices.
-                    shared_libs: ["libc_scudo"],
-                },
-            },
-        },
-    },
-
     header_libs: [
         "libmedia_headers",
     ],
 
-    init_rc: ["mediaswcodec.rc"],
-
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/services/mediacodec/mediaswcodec.rc b/services/mediacodec/mediaswcodec.rc
deleted file mode 100644
index 3549666..0000000
--- a/services/mediacodec/mediaswcodec.rc
+++ /dev/null
@@ -1,7 +0,0 @@
-service media.swcodec /system/bin/mediaswcodec
-    class main
-    user mediacodec
-    group camera drmrpc mediadrm
-    updatable
-    ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
diff --git a/services/mediacodec/registrant/Android.bp b/services/mediacodec/registrant/Android.bp
index 0441cfa..696b967 100644
--- a/services/mediacodec/registrant/Android.bp
+++ b/services/mediacodec/registrant/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_services_mediacodec_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_services_mediacodec_license"],
+}
+
 cc_library_shared {
     name: "libmedia_codecserviceregistrant",
     vendor_available: true,
@@ -55,4 +64,3 @@
         "libcodec2_soft_gsmdec",
     ],
 }
-
diff --git a/services/mediacodec/registrant/CodecServiceRegistrant.cpp b/services/mediacodec/registrant/CodecServiceRegistrant.cpp
index 184251a..b479433 100644
--- a/services/mediacodec/registrant/CodecServiceRegistrant.cpp
+++ b/services/mediacodec/registrant/CodecServiceRegistrant.cpp
@@ -25,8 +25,9 @@
 #include <C2PlatformSupport.h>
 #include <codec2/hidl/1.0/ComponentStore.h>
 #include <codec2/hidl/1.1/ComponentStore.h>
-#include <codec2/hidl/1.1/Configurable.h>
-#include <codec2/hidl/1.1/types.h>
+#include <codec2/hidl/1.2/ComponentStore.h>
+#include <codec2/hidl/1.2/Configurable.h>
+#include <codec2/hidl/1.2/types.h>
 #include <hidl/HidlSupport.h>
 #include <media/CodecServiceRegistrant.h>
 
@@ -37,8 +38,8 @@
 using ::android::hardware::Return;
 using ::android::hardware::Void;
 using ::android::sp;
-using namespace ::android::hardware::media::c2::V1_1;
-using namespace ::android::hardware::media::c2::V1_1::utils;
+using namespace ::android::hardware::media::c2::V1_2;
+using namespace ::android::hardware::media::c2::V1_2::utils;
 
 constexpr c2_status_t C2_TRANSACTION_FAILED = C2_CORRUPTED;
 
@@ -420,11 +421,20 @@
     // STOPSHIP: Remove code name checking once platform version bumps up to 30.
     std::string codeName =
         android::base::GetProperty("ro.build.version.codename", "");
-    if (codeName == "R") {
-        platformVersion = 30;
+    if (codeName == "S") {
+        platformVersion = 31;
     }
 
     switch (platformVersion) {
+        case 31: {
+            android::sp<V1_2::IComponentStore> storeV1_2 =
+                new V1_2::utils::ComponentStore(store);
+            if (storeV1_2->registerAsService("software") != android::OK) {
+                LOG(ERROR) << "Cannot register software Codec2 v1.2 service.";
+                return;
+            }
+            break;
+        }
         case 30: {
             android::sp<V1_1::IComponentStore> storeV1_1 =
                 new V1_1::utils::ComponentStore(store);
diff --git a/services/mediacodec/seccomp_policy/mediacodec-x86.policy b/services/mediacodec/seccomp_policy/mediacodec-x86.policy
index a9d32d6..4bcc077 100644
--- a/services/mediacodec/seccomp_policy/mediacodec-x86.policy
+++ b/services/mediacodec/seccomp_policy/mediacodec-x86.policy
@@ -27,6 +27,7 @@
 mmap: 1
 fstat64: 1
 fstat: 1
+stat: 1
 stat64: 1
 statfs64: 1
 madvise: 1
diff --git a/services/mediacodec/seccomp_policy/mediacodec-x86_64.policy b/services/mediacodec/seccomp_policy/mediacodec-x86_64.policy
index a9d32d6..4bcc077 100644
--- a/services/mediacodec/seccomp_policy/mediacodec-x86_64.policy
+++ b/services/mediacodec/seccomp_policy/mediacodec-x86_64.policy
@@ -27,6 +27,7 @@
 mmap: 1
 fstat64: 1
 fstat: 1
+stat: 1
 stat64: 1
 statfs64: 1
 madvise: 1
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-x86.policy b/services/mediacodec/seccomp_policy/mediaswcodec-x86.policy
index eb71e28..9bafe7b 100644
--- a/services/mediacodec/seccomp_policy/mediaswcodec-x86.policy
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-x86.policy
@@ -27,6 +27,7 @@
 mmap: 1
 fstat64: 1
 fstat: 1
+stat: 1
 stat64: 1
 statfs64: 1
 madvise: 1
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-x86_64.policy b/services/mediacodec/seccomp_policy/mediaswcodec-x86_64.policy
index e72d4db..b0ed040 100644
--- a/services/mediacodec/seccomp_policy/mediaswcodec-x86_64.policy
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-x86_64.policy
@@ -27,6 +27,7 @@
 mmap: 1
 fstat64: 1
 fstat: 1
+stat: 1
 stat64: 1
 statfs64: 1
 madvise: 1
diff --git a/services/mediaextractor/Android.bp b/services/mediaextractor/Android.bp
index 0b25d62..85ce110 100644
--- a/services/mediaextractor/Android.bp
+++ b/services/mediaextractor/Android.bp
@@ -1,4 +1,23 @@
 // service library
+package {
+    default_applicable_licenses: [
+        "frameworks_av_services_mediaextractor_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_services_mediaextractor_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
 cc_library_shared {
     name: "libmediaextractorservice",
     cflags: [
@@ -35,20 +54,6 @@
         "liblog",
         "libavservices_minijail",
     ],
-    header_libs: [
-        "bionic_libc_platform_headers",
-    ],
-    target: {
-        android: {
-            product_variables: {
-                malloc_not_svelte: {
-                    // Scudo increases memory footprint, so only enable on
-                    // non-svelte devices.
-                    shared_libs: ["libc_scudo"],
-                },
-            },
-        },
-    },
     init_rc: ["mediaextractor.rc"],
 
     cflags: [
@@ -81,4 +86,3 @@
         "code_coverage.policy",
     ],
 }
-
diff --git a/services/mediaextractor/MediaExtractorService.cpp b/services/mediaextractor/MediaExtractorService.cpp
index 9992d1c..71a5bff 100644
--- a/services/mediaextractor/MediaExtractorService.cpp
+++ b/services/mediaextractor/MediaExtractorService.cpp
@@ -39,23 +39,23 @@
 
 ::android::binder::Status MediaExtractorService::makeExtractor(
         const ::android::sp<::android::IDataSource>& remoteSource,
-        const ::std::unique_ptr< ::std::string> &mime,
+        const ::std::optional< ::std::string> &mime,
         ::android::sp<::android::IMediaExtractor>* _aidl_return) {
-    ALOGV("@@@ MediaExtractorService::makeExtractor for %s", mime.get()->c_str());
+    ALOGV("@@@ MediaExtractorService::makeExtractor for %s", mime ? mime->c_str() : nullptr);
 
     sp<DataSource> localSource = CreateDataSourceFromIDataSource(remoteSource);
 
     MediaBuffer::useSharedMemory();
     sp<IMediaExtractor> extractor = MediaExtractorFactory::CreateFromService(
             localSource,
-            mime.get() ? mime.get()->c_str() : nullptr);
+            mime ? mime->c_str() : nullptr);
 
     ALOGV("extractor service created %p (%s)",
             extractor.get(),
             extractor == nullptr ? "" : extractor->name());
 
     if (extractor != nullptr) {
-        registerMediaExtractor(extractor, localSource, mime.get() ? mime.get()->c_str() : nullptr);
+        registerMediaExtractor(extractor, localSource, mime ? mime->c_str() : nullptr);
     }
     *_aidl_return = extractor;
     return binder::Status::ok();
diff --git a/services/mediaextractor/MediaExtractorService.h b/services/mediaextractor/MediaExtractorService.h
index 1b40bf9..0081e7e 100644
--- a/services/mediaextractor/MediaExtractorService.h
+++ b/services/mediaextractor/MediaExtractorService.h
@@ -33,7 +33,7 @@
 
     virtual ::android::binder::Status makeExtractor(
             const ::android::sp<::android::IDataSource>& source,
-            const ::std::unique_ptr< ::std::string> &mime,
+            const ::std::optional< ::std::string> &mime,
             ::android::sp<::android::IMediaExtractor>* _aidl_return);
 
     virtual ::android::binder::Status makeIDataSource(
diff --git a/services/mediaextractor/main_extractorservice.cpp b/services/mediaextractor/main_extractorservice.cpp
index f21574f..10b8135 100644
--- a/services/mediaextractor/main_extractorservice.cpp
+++ b/services/mediaextractor/main_extractorservice.cpp
@@ -15,6 +15,9 @@
 ** limitations under the License.
 */
 
+//#define LOG_NDEBUG 0
+#define LOG_TAG "main_extractorservice"
+
 #include <fcntl.h>
 #include <sys/prctl.h>
 #include <sys/wait.h>
@@ -26,10 +29,9 @@
 
 #include <android-base/logging.h>
 #include <android-base/properties.h>
+#include <utils/Log.h>
 #include <utils/misc.h>
 
-#include <bionic/reserved_signals.h>
-
 // from LOCAL_C_INCLUDES
 #include "MediaExtractorService.h"
 #include "minijail.h"
@@ -43,17 +45,19 @@
 
 int main(int argc __unused, char** argv)
 {
+
+#if __has_feature(hwaddress_sanitizer)
+    ALOGI("disable media.extractor memory limits (hwasan enabled)");
+#else
+    ALOGI("enable media.extractor memory limits");
     limitProcessMemory(
         "ro.media.maxmem", /* property that defines limit */
         SIZE_MAX, /* upper limit in bytes */
         20 /* upper limit as percentage of physical RAM */);
+#endif
 
     signal(SIGPIPE, SIG_IGN);
 
-    // Do not assist platform profilers (relevant only on debug builds).
-    // Otherwise, the signal handler can violate the seccomp policy.
-    signal(BIONIC_SIGNAL_PROFILER, SIG_IGN);
-
     //b/62255959: this forces libutis.so to dlopen vendor version of libutils.so
     //before minijail is on. This is dirty but required since some syscalls such
     //as pread64 are used by linker but aren't allowed in the minijail. By
diff --git a/services/medialog/Android.bp b/services/medialog/Android.bp
index 74b63d5..cfc4c40 100644
--- a/services/medialog/Android.bp
+++ b/services/medialog/Android.bp
@@ -1,4 +1,13 @@
-cc_library_shared {
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
     name: "libmedialogservice",
 
     srcs: [
diff --git a/services/medialog/fuzzer/Android.bp b/services/medialog/fuzzer/Android.bp
new file mode 100644
index 0000000..9ff0ce4
--- /dev/null
+++ b/services/medialog/fuzzer/Android.bp
@@ -0,0 +1,42 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_fuzz {
+    name: "media_log_fuzzer",
+    static_libs: [
+        "libmedialogservice",
+    ],
+    srcs: [
+        "media_log_fuzzer.cpp",
+    ],
+    header_libs: [
+        "libmedia_headers",
+    ],
+    shared_libs: [
+        "libaudioutils",
+        "libbinder",
+        "liblog",
+        "libmediautils",
+        "libnblog",
+        "libutils",
+    ],
+    include_dirs: [
+        "frameworks/av/services/medialog",
+    ],
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/services/medialog/fuzzer/README.md b/services/medialog/fuzzer/README.md
new file mode 100644
index 0000000..b79e5c8
--- /dev/null
+++ b/services/medialog/fuzzer/README.md
@@ -0,0 +1,50 @@
+# Fuzzer for libmedialogservice
+
+## Plugin Design Considerations
+The fuzzer plugin for libmedialogservice is designed based on the understanding of the
+service and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+medialogservice supports the following parameters:
+1. Writer name (parameter name: `writerNameIdx`)
+2. Log size (parameter name: `logSize`)
+3. Enable dump before unrgister API (parameter name: `shouldDumpBeforeUnregister`)
+5. size of string for log dump (parameter name: `numberOfLines`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `writerNameIdx` | 0. `0` 1. `1` | Value obtained from FuzzedDataProvider |
+| `logSize` | In the range `256 to 65536` | Value obtained from FuzzedDataProvider |
+| `shouldDumpBeforeUnregister` | 0. `0` 1. `1` | Value obtained from FuzzedDataProvider |
+| `numberOfLines` | In the range `0 to 65535` | Value obtained from FuzzedDataProvider |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+## Build
+
+This describes steps to build media_log_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) media_log_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/media_log_fuzzer/media_log_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/services/medialog/fuzzer/media_log_fuzzer.cpp b/services/medialog/fuzzer/media_log_fuzzer.cpp
new file mode 100644
index 0000000..bd50d0f
--- /dev/null
+++ b/services/medialog/fuzzer/media_log_fuzzer.cpp
@@ -0,0 +1,76 @@
+/**
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <binder/IMemory.h>
+#include <binder/MemoryDealer.h>
+#include <private/android_filesystem_config.h>
+#include "MediaLogService.h"
+#include "fuzzer/FuzzedDataProvider.h"
+
+constexpr const char* kWriterNames[2] = {"FastMixer", "FastCapture"};
+constexpr size_t kMinSize = 0x100;
+constexpr size_t kMaxSize = 0x10000;
+constexpr size_t kLogMemorySize = 400 * 1024;
+constexpr size_t kMaxNumLines = USHRT_MAX;
+
+using namespace android;
+
+class MediaLogFuzzer {
+   public:
+    void init();
+    void process(const uint8_t* data, size_t size);
+
+   private:
+    sp<MemoryDealer> mMemoryDealer = nullptr;
+    sp<MediaLogService> mService = nullptr;
+};
+
+void MediaLogFuzzer::init() {
+    setuid(AID_MEDIA);
+    mService = new MediaLogService();
+    mMemoryDealer = new MemoryDealer(kLogMemorySize, "MediaLogFuzzer", MemoryHeapBase::READ_ONLY);
+}
+
+void MediaLogFuzzer::process(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fuzzedDataProvider(data, size);
+    size_t writerNameIdx =
+        fuzzedDataProvider.ConsumeIntegralInRange<size_t>(0, std::size(kWriterNames) - 1);
+    bool shouldDumpBeforeUnregister = fuzzedDataProvider.ConsumeBool();
+    size_t logSize = fuzzedDataProvider.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize);
+    sp<IMemory> logBuffer = mMemoryDealer->allocate(NBLog::Timeline::sharedSize(logSize));
+    Vector<String16> args;
+    size_t numberOfLines = fuzzedDataProvider.ConsumeIntegralInRange<size_t>(0, kMaxNumLines);
+    for (size_t lineIdx = 0; lineIdx < numberOfLines; ++lineIdx) {
+        args.add(static_cast<String16>(fuzzedDataProvider.ConsumeRandomLengthString().c_str()));
+    }
+    const char* fileName = "logDumpFile";
+    int fd = memfd_create(fileName, MFD_ALLOW_SEALING);
+    fuzzedDataProvider.ConsumeData(logBuffer->unsecurePointer(), logBuffer->size());
+    mService->registerWriter(logBuffer, logSize, kWriterNames[writerNameIdx]);
+    if (shouldDumpBeforeUnregister) {
+        mService->dump(fd, args);
+        mService->unregisterWriter(logBuffer);
+    } else {
+        mService->unregisterWriter(logBuffer);
+        mService->dump(fd, args);
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    MediaLogFuzzer mediaLogFuzzer = MediaLogFuzzer();
+    mediaLogFuzzer.init();
+    mediaLogFuzzer.process(data, size);
+    return 0;
+}
diff --git a/services/mediametrics/Android.bp b/services/mediametrics/Android.bp
index 91590e1..5989181 100644
--- a/services/mediametrics/Android.bp
+++ b/services/mediametrics/Android.bp
@@ -1,6 +1,15 @@
 // Media Statistics service
 //
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 tidy_errors = [
     // https://clang.llvm.org/extra/clang-tidy/checks/list.html
     // For many categories, the checks are too many to specify individually.
@@ -21,7 +30,7 @@
     "modernize-loop-convert",
     "modernize-make-shared",
     "modernize-make-unique",
-    "modernize-pass-by-value",
+    // "modernize-pass-by-value", // found in TimeMachine.h
     "modernize-raw-string-literal",
     "modernize-redundant-void-arg",
     "modernize-replace-auto-ptr",
@@ -29,13 +38,13 @@
     "modernize-return-braced-init-list",
     "modernize-shrink-to-fit",
     "modernize-unary-static-assert",
-    "modernize-use-auto",  // debatable - auto can obscure type
+    // "modernize-use-auto",  // found in MediaMetricsService.h, debatable - auto can obscure type
     "modernize-use-bool-literals",
     "modernize-use-default-member-init",
     "modernize-use-emplace",
     "modernize-use-equals-default",
     "modernize-use-equals-delete",
-    "modernize-use-nodiscard",
+    // "modernize-use-nodiscard", // found in TimeMachine.h
     "modernize-use-noexcept",
     "modernize-use-nullptr",
     "modernize-use-override",
@@ -48,6 +57,11 @@
     // Remove some pedantic stylistic requirements.
     "-google-readability-casting", // C++ casts not always necessary and may be verbose
     "-google-readability-todo",    // do not require TODO(info)
+
+    "-bugprone-unhandled-self-assignment", // found in TimeMachine.h
+    "-bugprone-suspicious-string-compare", // found in TimeMachine.h
+    "-cert-oop54-cpp", // found in TransactionLog.h
+    "-bugprone-narrowing-conversions", // b/182410845
 ]
 
 cc_defaults {
@@ -79,8 +93,7 @@
     tidy_checks: tidy_errors,
     tidy_checks_as_errors: tidy_errors,
     tidy_flags: [
-      "-format-style='file'",
-      "--header-filter='frameworks/av/services/mediametrics/'",
+      "-format-style=file",
     ],
 }
 
@@ -100,6 +113,7 @@
         "libmediametricsservice",
         "libmediautils",
         "libutils",
+        "mediametricsservice-aidl-cpp",
     ],
     header_libs: [
         "libaudioutils_headers",
@@ -111,7 +125,7 @@
     ],
 }
 
-cc_library_shared {
+cc_library {
     name: "libmediametricsservice",
     defaults: [
         "mediametrics_flags_defaults",
@@ -142,6 +156,7 @@
     },
 
     shared_libs: [
+        "mediametricsservice-aidl-cpp",
         "libbase", // android logging
         "libbinder",
         "libcutils",
@@ -151,10 +166,18 @@
         "libmediautils",
         "libmemunreachable",
         "libprotobuf-cpp-lite",
+        "libstagefright_foundation",
         "libstatslog",
+        "libstatspull",
+        "libstatssocket",
         "libutils",
     ],
 
+    export_shared_lib_headers: [
+        "libstatspull",
+        "libstatssocket",
+    ],
+
     static_libs: [
         "libplatformprotos",
     ],
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index d78d1e3..11ec993 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -87,6 +87,7 @@
     "selected_device_id",
     "caller",
     "source",
+    "log_session_id",
 };
 
 static constexpr const char * const AudioThreadDeviceUsageFields[] = {
@@ -124,6 +125,7 @@
     "content_type",
     "caller",
     "traits",
+    "log_session_id",
 };
 
 static constexpr const char * const AudioDeviceConnectionFields[] = {
@@ -136,6 +138,56 @@
     "connection_count",
 };
 
+static constexpr const char * const AAudioStreamFields[] {
+    "mediametrics_aaudiostream_reported",
+    "path",
+    "direction",
+    "frames_per_burst",
+    "buffer_size",
+    "buffer_capacity",
+    "channel_count",
+    "total_frames_transferred",
+    "perf_mode_requested",
+    "perf_mode_actual",
+    "sharing",
+    "xrun_count",
+    "device_type",
+    "format_app",
+    "format_device",
+    "log_session_id",
+    "sample_rate",
+    "content_type",
+    "sharing_requested",
+};
+
+/**
+ * printFields is a helper method that prints the fields and corresponding values
+ * in a human readable style.
+ */
+template <size_t N, typename ...Types>
+std::string printFields(const char * const (& fields)[N], Types ... args)
+{
+    std::stringstream ss;
+    ss << " { ";
+    stringutils::fieldPrint(ss, fields, args...);
+    ss << "}";
+    return ss.str();
+}
+
+/**
+ * sendToStatsd is a helper method that sends the arguments to statsd
+ */
+template <typename ...Types>
+int sendToStatsd(Types ... args)
+{
+    int result = 0;
+
+#ifdef STATSD_ENABLE
+    result = android::util::stats_write(args...);
+#endif
+    return result;
+}
+
 /**
  * sendToStatsd is a helper method that sends the arguments to statsd
  * and returns a pair { result, summary_string }.
@@ -156,8 +208,10 @@
     return { result, ss.str() };
 }
 
-AudioAnalytics::AudioAnalytics()
+AudioAnalytics::AudioAnalytics(const std::shared_ptr<StatsdLog>& statsdLog)
     : mDeliverStatistics(property_get_bool(PROP_AUDIO_ANALYTICS_CLOUD_ENABLED, true))
+    , mStatsdLog(statsdLog)
+    , mAudioPowerUsage(this, statsdLog)
 {
     SetMinimumLogSeverity(android::base::DEBUG); // for LOG().
     ALOGD("%s", __func__);
@@ -192,6 +246,33 @@
                 });
             }));
 
+    // Handle legacy aaudio playback stream statistics
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK "*." AMEDIAMETRICS_PROP_EVENT,
+        std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAAUDIOSTREAM),
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item) {
+                mAAudioStreamInfo.endAAudioStream(item, AAudioStreamInfo::CALLER_PATH_LEGACY);
+            }));
+
+    // Handle legacy aaudio capture stream statistics
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD "*." AMEDIAMETRICS_PROP_EVENT,
+        std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAAUDIOSTREAM),
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item) {
+                mAAudioStreamInfo.endAAudioStream(item, AAudioStreamInfo::CALLER_PATH_LEGACY);
+            }));
+
+    // Handle mmap aaudio stream statistics
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_STREAM "*." AMEDIAMETRICS_PROP_EVENT,
+        std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAAUDIOSTREAM),
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item) {
+                mAAudioStreamInfo.endAAudioStream(item, AAudioStreamInfo::CALLER_PATH_MMAP);
+            }));
+
     // Handle device use record statistics
     mActions.addAction(
         AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD "*." AMEDIAMETRICS_PROP_EVENT,
@@ -338,20 +419,6 @@
         ll -= l;
     }
 
-    if (ll > 0) {
-        // Print the statsd atoms we sent out.
-        const std::string statsd = mStatsdLog.dumpToString("  " /* prefix */, ll - 1);
-        const size_t n = std::count(statsd.begin(), statsd.end(), '\n') + 1; // we control this.
-        if ((size_t)ll >= n) {
-            if (n == 1) {
-                ss << "Statsd atoms: empty or truncated\n";
-            } else {
-                ss << "Statsd atoms:\n" << statsd;
-            }
-            ll -= n;
-        }
-    }
-
     if (ll > 0 && prefix == nullptr) {
         auto [s, l] = mAudioPowerUsage.dump(ll);
         ss << s;
@@ -484,12 +551,18 @@
         std::string source;
         mAudioAnalytics.mAnalyticsState->timeMachine().get(
                 key, AMEDIAMETRICS_PROP_SOURCE, &source);
+        // Android S
+        std::string logSessionId;
+        mAudioAnalytics.mAnalyticsState->timeMachine().get(
+                key, AMEDIAMETRICS_PROP_LOGSESSIONID, &logSessionId);
 
         const auto callerNameForStats =
                 types::lookup<types::CALLER_NAME, short_enum_type_t>(callerName);
         const auto encodingForStats = types::lookup<types::ENCODING, short_enum_type_t>(encoding);
         const auto flagsForStats = types::lookup<types::INPUT_FLAG, short_enum_type_t>(flags);
         const auto sourceForStats = types::lookup<types::SOURCE_TYPE, short_enum_type_t>(source);
+        // Android S
+        const auto logSessionIdForStats = stringutils::sanitizeLogSessionId(logSessionId);
 
         LOG(LOG_LEVEL) << "key:" << key
               << " id:" << id
@@ -504,7 +577,9 @@
               << ") packageName:" << packageName
               << " selectedDeviceId:" << selectedDeviceId
               << " callerName:" << callerName << "(" << callerNameForStats
-              << ") source:" << source << "(" << sourceForStats << ")";
+              << ") source:" << source << "(" << sourceForStats
+              << ") logSessionId:" << logSessionId << "(" << logSessionIdForStats
+              << ")";
         if (clientCalled  // only log if client app called AudioRecord.
                 && mAudioAnalytics.mDeliverStatistics) {
             const auto [ result, str ] = sendToStatsd(AudioRecordDeviceUsageFields,
@@ -522,9 +597,11 @@
                     , selectedDeviceId
                     , ENUM_EXTRACT(callerNameForStats)
                     , ENUM_EXTRACT(sourceForStats)
+                    , logSessionIdForStats.c_str()
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED, str);
         }
     } break;
     case THREAD: {
@@ -572,7 +649,8 @@
                 , ENUM_EXTRACT(typeForStats)
             );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED, str);
         }
     } break;
     case TRACK: {
@@ -622,6 +700,10 @@
         std::string usage;
         mAudioAnalytics.mAnalyticsState->timeMachine().get(
                 key, AMEDIAMETRICS_PROP_USAGE, &usage);
+        // Android S
+        std::string logSessionId;
+        mAudioAnalytics.mAnalyticsState->timeMachine().get(
+                key, AMEDIAMETRICS_PROP_LOGSESSIONID, &logSessionId);
 
         const auto callerNameForStats =
                 types::lookup<types::CALLER_NAME, short_enum_type_t>(callerName);
@@ -634,6 +716,8 @@
         const auto traitsForStats =
                  types::lookup<types::TRACK_TRAITS, short_enum_type_t>(traits);
         const auto usageForStats = types::lookup<types::USAGE, short_enum_type_t>(usage);
+        // Android S
+        const auto logSessionIdForStats = stringutils::sanitizeLogSessionId(logSessionId);
 
         LOG(LOG_LEVEL) << "key:" << key
               << " id:" << id
@@ -658,6 +742,7 @@
               << " streamType:" << streamType << "(" << streamTypeForStats
               << ") traits:" << traits << "(" << traitsForStats
               << ") usage:" << usage << "(" << usageForStats
+              << ") logSessionId:" << logSessionId << "(" << logSessionIdForStats
               << ")";
         if (clientCalled // only log if client app called AudioTracks
                 && mAudioAnalytics.mDeliverStatistics) {
@@ -682,9 +767,11 @@
                     , ENUM_EXTRACT(contentTypeForStats)
                     , ENUM_EXTRACT(callerNameForStats)
                     , ENUM_EXTRACT(traitsForStats)
+                    , logSessionIdForStats.c_str()
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED, str);
         }
         } break;
     }
@@ -737,7 +824,7 @@
         mA2dpConnectionServiceNs = 0;
         ++mA2dpConnectionSuccesses;
 
-        const auto connectionTimeMs = float(timeDiffNs * 1e-6);
+        const auto connectionTimeMs = float((double)timeDiffNs * 1e-6);
 
         const auto outputDeviceBits = types::lookup<types::OUTPUT_DEVICE, long_enum_type_t>(
                 "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP");
@@ -760,7 +847,8 @@
                     , /* connection_count */ 1
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
         }
     }
 }
@@ -813,7 +901,8 @@
                     , /* connection_count */ 1
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
-            mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+            mAudioAnalytics.mStatsdLog->log(
+                    android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
         }
         return;
     }
@@ -839,7 +928,166 @@
                 , /* connection_count */ 1
                 );
         ALOGV("%s: statsd %s", __func__, str.c_str());
-        mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+        mAudioAnalytics.mStatsdLog->log(
+                android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
+    }
+}
+
+void AudioAnalytics::AAudioStreamInfo::endAAudioStream(
+        const std::shared_ptr<const android::mediametrics::Item> &item, CallerPath path) const {
+    const std::string& key = item->getKey();
+
+    std::string directionStr;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_DIRECTION, &directionStr);
+    const auto direction = types::lookup<types::AAUDIO_DIRECTION, int32_t>(directionStr);
+
+    int32_t framesPerBurst = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_BURSTFRAMES, &framesPerBurst);
+
+    int32_t bufferSizeInFrames = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_BUFFERSIZEFRAMES, &bufferSizeInFrames);
+
+    int32_t bufferCapacityInFrames = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_BUFFERCAPACITYFRAMES, &bufferCapacityInFrames);
+
+    int32_t channelCount = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_CHANNELCOUNT, &channelCount);
+
+    int64_t totalFramesTransferred = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_FRAMESTRANSFERRED, &totalFramesTransferred);
+
+    std::string perfModeRequestedStr;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_PERFORMANCEMODE, &perfModeRequestedStr);
+    const auto perfModeRequested =
+            types::lookup<types::AAUDIO_PERFORMANCE_MODE, int32_t>(perfModeRequestedStr);
+
+    std::string perfModeActualStr;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_PERFORMANCEMODEACTUAL, &perfModeActualStr);
+    const auto perfModeActual =
+            types::lookup<types::AAUDIO_PERFORMANCE_MODE, int32_t>(perfModeActualStr);
+
+    std::string sharingModeActualStr;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_SHARINGMODEACTUAL, &sharingModeActualStr);
+    const auto sharingModeActual =
+            types::lookup<types::AAUDIO_SHARING_MODE, int32_t>(sharingModeActualStr);
+
+    int32_t xrunCount = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_UNDERRUN, &xrunCount);
+
+    std::string serializedDeviceTypes;
+    // TODO: only routed device id is logged, but no device type
+
+    std::string formatAppStr;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_ENCODINGCLIENT, &formatAppStr);
+    const auto formatApp = types::lookup<types::ENCODING, int32_t>(formatAppStr);
+
+    std::string formatDeviceStr;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_ENCODING, &formatDeviceStr);
+    const auto formatDevice = types::lookup<types::ENCODING, int32_t>(formatDeviceStr);
+
+    std::string logSessionId;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_LOGSESSIONID, &logSessionId);
+
+    int32_t sampleRate = 0;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_SAMPLERATE, &sampleRate);
+
+    std::string contentTypeStr;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_CONTENTTYPE, &contentTypeStr);
+    const auto contentType = types::lookup<types::CONTENT_TYPE, int32_t>(contentTypeStr);
+
+    std::string sharingModeRequestedStr;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_SHARINGMODE, &sharingModeRequestedStr);
+    const auto sharingModeRequested =
+            types::lookup<types::AAUDIO_SHARING_MODE, int32_t>(sharingModeRequestedStr);
+
+    LOG(LOG_LEVEL) << "key:" << key
+            << " path:" << path
+            << " direction:" << direction << "(" << directionStr << ")"
+            << " frames_per_burst:" << framesPerBurst
+            << " buffer_size:" << bufferSizeInFrames
+            << " buffer_capacity:" << bufferCapacityInFrames
+            << " channel_count:" << channelCount
+            << " total_frames_transferred:" << totalFramesTransferred
+            << " perf_mode_requested:" << perfModeRequested << "(" << perfModeRequestedStr << ")"
+            << " perf_mode_actual:" << perfModeActual << "(" << perfModeActualStr << ")"
+            << " sharing:" << sharingModeActual << "(" << sharingModeActualStr << ")"
+            << " xrun_count:" << xrunCount
+            << " device_type:" << serializedDeviceTypes
+            << " format_app:" << formatApp << "(" << formatAppStr << ")"
+            << " format_device: " << formatDevice << "(" << formatDeviceStr << ")"
+            << " log_session_id: " << logSessionId
+            << " sample_rate: " << sampleRate
+            << " content_type: " << contentType << "(" << contentTypeStr << ")"
+            << " sharing_requested:" << sharingModeRequested
+                    << "(" << sharingModeRequestedStr << ")";
+
+    if (mAudioAnalytics.mDeliverStatistics) {
+        android::util::BytesField bf_serialized(
+            serializedDeviceTypes.c_str(), serializedDeviceTypes.size());
+        const auto result = sendToStatsd(
+                CONDITION(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
+                , path
+                , direction
+                , framesPerBurst
+                , bufferSizeInFrames
+                , bufferCapacityInFrames
+                , channelCount
+                , totalFramesTransferred
+                , perfModeRequested
+                , perfModeActual
+                , sharingModeActual
+                , xrunCount
+                , bf_serialized
+                , formatApp
+                , formatDevice
+                , logSessionId.c_str()
+                , sampleRate
+                , contentType
+                , sharingModeRequested
+                );
+        std::stringstream ss;
+        ss << "result:" << result;
+        const auto fieldsStr = printFields(AAudioStreamFields,
+                CONDITION(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
+                , path
+                , direction
+                , framesPerBurst
+                , bufferSizeInFrames
+                , bufferCapacityInFrames
+                , channelCount
+                , totalFramesTransferred
+                , perfModeRequested
+                , perfModeActual
+                , sharingModeActual
+                , xrunCount
+                , serializedDeviceTypes.c_str()
+                , formatApp
+                , formatDevice
+                , logSessionId.c_str()
+                , sampleRate
+                , contentType
+                , sharingModeRequested
+                );
+        ss << " " << fieldsStr;
+        std::string str = ss.str();
+        ALOGV("%s: statsd %s", __func__, str.c_str());
+        mAudioAnalytics.mStatsdLog->log(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED, str);
     }
 }
 
diff --git a/services/mediametrics/AudioAnalytics.h b/services/mediametrics/AudioAnalytics.h
index df097b1..2b41a95 100644
--- a/services/mediametrics/AudioAnalytics.h
+++ b/services/mediametrics/AudioAnalytics.h
@@ -17,10 +17,10 @@
 #pragma once
 
 #include <android-base/thread_annotations.h>
-#include <audio_utils/SimpleLog.h>
 #include "AnalyticsActions.h"
 #include "AnalyticsState.h"
 #include "AudioPowerUsage.h"
+#include "StatsdLog.h"
 #include "TimedAction.h"
 #include "Wrap.h"
 
@@ -32,7 +32,7 @@
     friend AudioPowerUsage;
 
 public:
-    AudioAnalytics();
+    explicit AudioAnalytics(const std::shared_ptr<StatsdLog>& statsdLog);
     ~AudioAnalytics();
 
     /**
@@ -122,8 +122,7 @@
     SharedPtrWrap<AnalyticsState> mPreviousAnalyticsState;
 
     TimedAction mTimedAction; // locked internally
-
-    SimpleLog mStatsdLog{16 /* log lines */}; // locked internally
+    const std::shared_ptr<StatsdLog> mStatsdLog; // locked internally, ok for multiple threads.
 
     // DeviceUse is a nested class which handles audio device usage accounting.
     // We define this class at the end to ensure prior variables all properly constructed.
@@ -189,7 +188,30 @@
         int32_t mA2dpConnectionUnknowns GUARDED_BY(mLock) = 0;
     } mDeviceConnection{*this};
 
-    AudioPowerUsage mAudioPowerUsage{this};
+    // AAudioStreamInfo is a nested class which collect aaudio stream info from both client and
+    // server side.
+    class AAudioStreamInfo {
+    public:
+        // All the enum here must be kept the same as the ones defined in atoms.proto
+        enum CallerPath {
+            CALLER_PATH_UNKNOWN = 0,
+            CALLER_PATH_LEGACY = 1,
+            CALLER_PATH_MMAP = 2,
+        };
+
+        explicit AAudioStreamInfo(AudioAnalytics &audioAnalytics)
+            : mAudioAnalytics(audioAnalytics) {}
+
+        void endAAudioStream(
+                const std::shared_ptr<const android::mediametrics::Item> &item,
+                CallerPath path) const;
+
+    private:
+
+        AudioAnalytics &mAudioAnalytics;
+    } mAAudioStreamInfo{*this};
+
+    AudioPowerUsage mAudioPowerUsage;
 };
 
 } // namespace android::mediametrics
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index cca6b41..ab74c8e 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -28,7 +28,7 @@
 #include <cutils/properties.h>
 #include <statslog.h>
 #include <sys/timerfd.h>
-#include <system/audio-base.h>
+#include <system/audio.h>
 
 // property to disable audio power use metrics feature, default is enabled
 #define PROP_AUDIO_METRICS_DISABLED "persist.media.audio_metrics.power_usage_disabled"
@@ -127,14 +127,13 @@
     return deviceMask;
 }
 
-/* static */
-void AudioPowerUsage::sendItem(const std::shared_ptr<const mediametrics::Item>& item)
+void AudioPowerUsage::sendItem(const std::shared_ptr<const mediametrics::Item>& item) const
 {
     int32_t type;
     if (!item->getInt32(AUDIO_POWER_USAGE_PROP_TYPE, &type)) return;
 
-    int32_t device;
-    if (!item->getInt32(AUDIO_POWER_USAGE_PROP_DEVICE, &device)) return;
+    int32_t audio_device;
+    if (!item->getInt32(AUDIO_POWER_USAGE_PROP_DEVICE, &audio_device)) return;
 
     int64_t duration_ns;
     if (!item->getInt64(AUDIO_POWER_USAGE_PROP_DURATION_NS, &duration_ns)) return;
@@ -142,11 +141,24 @@
     double volume;
     if (!item->getDouble(AUDIO_POWER_USAGE_PROP_VOLUME, &volume)) return;
 
-    (void)android::util::stats_write(android::util::AUDIO_POWER_USAGE_DATA_REPORTED,
-                                         device,
-                                         (int32_t)(duration_ns / NANOS_PER_SECOND),
-                                         (float)volume,
+    const int32_t duration_secs = (int32_t)(duration_ns / NANOS_PER_SECOND);
+    const float average_volume = (float)volume;
+    const int result = android::util::stats_write(android::util::AUDIO_POWER_USAGE_DATA_REPORTED,
+                                         audio_device,
+                                         duration_secs,
+                                         average_volume,
                                          type);
+
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audio_power_usage_data_reported:"
+            << android::util::AUDIO_POWER_USAGE_DATA_REPORTED
+            << " audio_device:" << audio_device
+            << " duration_secs:" << duration_secs
+            << " average_volume:" << average_volume
+            << " type:" << type
+            << " }";
+    mStatsdLog->log(android::util::AUDIO_POWER_USAGE_DATA_REPORTED, log.str());
 }
 
 bool AudioPowerUsage::saveAsItem_l(
@@ -174,8 +186,8 @@
         if (item_device == device && item_type == type) {
             int64_t final_duration_ns = item_duration_ns + duration_ns;
             double final_volume = (device & INPUT_DEVICE_BIT) ? 1.0:
-                            ((item_volume * item_duration_ns +
-                            average_vol * duration_ns) / final_duration_ns);
+                            ((item_volume * (double)item_duration_ns +
+                            average_vol * (double)duration_ns) / (double)final_duration_ns);
 
             item->setInt64(AUDIO_POWER_USAGE_PROP_DURATION_NS, final_duration_ns);
             item->setDouble(AUDIO_POWER_USAGE_PROP_VOLUME, final_volume);
@@ -200,6 +212,34 @@
     return true;
 }
 
+bool AudioPowerUsage::saveAsItems_l(
+        int32_t device, int64_t duration_ns, int32_t type, double average_vol)
+{
+    ALOGV("%s: (%#x, %d, %lld, %f)", __func__, device, type,
+                                   (long long)duration_ns, average_vol );
+    if (duration_ns == 0) {
+        return true; // skip duration 0 usage
+    }
+    if (device == 0) {
+        return true; //ignore unknown device
+    }
+
+    bool ret = false;
+    const int32_t input_bit = device & INPUT_DEVICE_BIT;
+    int32_t device_bits = device ^ input_bit;
+
+    while (device_bits != 0) {
+        int32_t tmp_device = device_bits & -device_bits; // get lowest bit
+        device_bits ^= tmp_device;  // clear lowest bit
+        tmp_device |= input_bit;    // restore input bit
+        ret = saveAsItem_l(tmp_device, duration_ns, type, average_vol);
+
+        ALOGV("%s: device %#x recorded, remaining device_bits = %#x", __func__,
+            tmp_device, device_bits);
+    }
+    return ret;
+}
+
 void AudioPowerUsage::checkTrackRecord(
         const std::shared_ptr<const mediametrics::Item>& item, bool isTrack)
 {
@@ -245,7 +285,7 @@
         ALOGV("device = %s => %d", device_strings.c_str(), device);
     }
     std::lock_guard l(mLock);
-    saveAsItem_l(device, deviceTimeNs, type, deviceVolume);
+    saveAsItems_l(device, deviceTimeNs, type, deviceVolume);
 }
 
 void AudioPowerUsage::checkMode(const std::shared_ptr<const mediametrics::Item>& item)
@@ -261,8 +301,8 @@
         const int64_t durationNs = endCallNs - mDeviceTimeNs;
         if (durationNs > 0) {
             mDeviceVolume = (mDeviceVolume * double(mVolumeTimeNs - mDeviceTimeNs) +
-                    mVoiceVolume * double(endCallNs - mVolumeTimeNs)) / durationNs;
-            saveAsItem_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume);
+                    mVoiceVolume * double(endCallNs - mVolumeTimeNs)) / (double)durationNs;
+            saveAsItems_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume);
         }
     } else if (mode == "AUDIO_MODE_IN_CALL") { // entering call mode
         mStartCallNs = item->getTimestamp(); // advisory only
@@ -289,7 +329,7 @@
         const int64_t durationNs = timeNs - mDeviceTimeNs;
         if (durationNs > 0) {
             mDeviceVolume = (mDeviceVolume * double(mVolumeTimeNs - mDeviceTimeNs) +
-                    mVoiceVolume * double(timeNs - mVolumeTimeNs)) / durationNs;
+                    mVoiceVolume * double(timeNs - mVolumeTimeNs)) / (double)durationNs;
             mVolumeTimeNs = timeNs;
         }
     }
@@ -320,8 +360,8 @@
         const int64_t durationNs = endDeviceNs - mDeviceTimeNs;
         if (durationNs > 0) {
             mDeviceVolume = (mDeviceVolume * double(mVolumeTimeNs - mDeviceTimeNs) +
-                    mVoiceVolume * double(endDeviceNs - mVolumeTimeNs)) / durationNs;
-            saveAsItem_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume);
+                    mVoiceVolume * double(endDeviceNs - mVolumeTimeNs)) / (double)durationNs;
+            saveAsItems_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume);
         }
         // reset statistics
         mDeviceVolume = 0;
@@ -332,8 +372,10 @@
     mPrimaryDevice = device;
 }
 
-AudioPowerUsage::AudioPowerUsage(AudioAnalytics *audioAnalytics)
+AudioPowerUsage::AudioPowerUsage(
+        AudioAnalytics *audioAnalytics, const std::shared_ptr<StatsdLog>& statsdLog)
     : mAudioAnalytics(audioAnalytics)
+    , mStatsdLog(statsdLog)
     , mDisabled(property_get_bool(PROP_AUDIO_METRICS_DISABLED, AUDIO_METRICS_DISABLED_DEFAULT))
     , mIntervalHours(property_get_int32(PROP_AUDIO_METRICS_INTERVAL_HR, INTERVAL_HR_DEFAULT))
 {
diff --git a/services/mediametrics/AudioPowerUsage.h b/services/mediametrics/AudioPowerUsage.h
index 446ff4f..7021902 100644
--- a/services/mediametrics/AudioPowerUsage.h
+++ b/services/mediametrics/AudioPowerUsage.h
@@ -22,13 +22,15 @@
 #include <mutex>
 #include <thread>
 
+#include "StatsdLog.h"
+
 namespace android::mediametrics {
 
 class AudioAnalytics;
 
 class AudioPowerUsage {
 public:
-    explicit AudioPowerUsage(AudioAnalytics *audioAnalytics);
+    AudioPowerUsage(AudioAnalytics *audioAnalytics, const std::shared_ptr<StatsdLog>& statsdLog);
     ~AudioPowerUsage();
 
     void checkTrackRecord(const std::shared_ptr<const mediametrics::Item>& item, bool isTrack);
@@ -83,10 +85,13 @@
 private:
     bool saveAsItem_l(int32_t device, int64_t duration, int32_t type, double average_vol)
          REQUIRES(mLock);
-    static void sendItem(const std::shared_ptr<const mediametrics::Item>& item);
+    void sendItem(const std::shared_ptr<const mediametrics::Item>& item) const;
     void collect();
+    bool saveAsItems_l(int32_t device, int64_t duration, int32_t type, double average_vol)
+         REQUIRES(mLock);
 
     AudioAnalytics * const mAudioAnalytics;
+    const std::shared_ptr<StatsdLog> mStatsdLog;  // mStatsdLog is internally locked
     const bool mDisabled;
     const int32_t mIntervalHours;
 
diff --git a/services/mediametrics/AudioTypes.cpp b/services/mediametrics/AudioTypes.cpp
index aa44447..838cdd5 100644
--- a/services/mediametrics/AudioTypes.cpp
+++ b/services/mediametrics/AudioTypes.cpp
@@ -76,6 +76,8 @@
         {"AUDIO_DEVICE_IN_ECHO_REFERENCE",         1LL << 27},
         {"AUDIO_DEVICE_IN_DEFAULT",                1LL << 28},
         // R values above.
+        {"AUDIO_DEVICE_IN_BLE_HEADSET",            1LL << 29},
+        {"AUDIO_DEVICE_IN_HDMI_EARC",              1LL << 30},
     };
     return map;
 }
@@ -121,6 +123,9 @@
         {"AUDIO_DEVICE_OUT_ECHO_CANCELLER",            1LL << 29},
         {"AUDIO_DEVICE_OUT_DEFAULT",                   1LL << 30},
         // R values above.
+        {"AUDIO_DEVICE_OUT_BLE_HEADSET",               1LL << 31},
+        {"AUDIO_DEVICE_OUT_BLE_SPEAKER",               1LL << 32},
+        {"AUDIO_DEVICE_OUT_HDMI_EARC",                 1LL << 33},
     };
     return map;
 }
@@ -151,6 +156,40 @@
     return map;
 }
 
+const std::unordered_map<std::string, int32_t>& getAAudioDirection() {
+    // DO NOT MODIFY VALUES(OK to add new ones).
+    // This may be found in frameworks/av/media/libaaudio/include/aaudio/AAudio.h
+    static std::unordered_map<std::string, int32_t> map {
+        // UNKNOWN is 0
+        {"AAUDIO_DIRECTION_OUTPUT",    1 /* AAUDIO_DIRECTION_OUTPUT + 1 */},
+        {"AAUDIO_DIRECTION_INPUT",     2 /* AAUDIO_DIRECTION_INPUT + 1*/},
+    };
+    return map;
+}
+
+const std::unordered_map<std::string, int32_t>& getAAudioPerformanceMode() {
+    // DO NOT MODIFY VALUES(OK to add new ones).
+    // This may be found in frameworks/av/media/libaaudio/include/aaudio/AAudio.h
+    static std::unordered_map<std::string, int32_t> map {
+        // UNKNOWN is 0
+        {"AAUDIO_PERFORMANCE_MODE_NONE",            10},
+        {"AAUDIO_PERFORMANCE_MODE_POWER_SAVING",    11},
+        {"AAUDIO_PERFORMANCE_MODE_LOW_LATENCY",     12},
+    };
+    return map;
+}
+
+const std::unordered_map<std::string, int32_t>& getAAudioSharingMode() {
+    // DO NOT MODIFY VALUES(OK to add new ones).
+    // This may be found in frameworks/av/media/libaaudio/include/aaudio/AAudio.h
+    static std::unordered_map<std::string, int32_t> map {
+        // UNKNOWN is 0
+        {"AAUDIO_SHARING_MODE_EXCLUSIVE",    1 /* AAUDIO_SHARING_MODE_EXCLUSIVE + 1 */},
+        {"AAUDIO_SHARING_MODE_SHARED",       2 /* AAUDIO_SHARING_MODE_SHARED + 1 */},
+    };
+    return map;
+}
+
 // Helper: Create the corresponding int32 from string flags split with '|'.
 template <typename Traits>
 int32_t int32FromFlags(const std::string &flags)
@@ -430,4 +469,70 @@
     return flagsFromMap(traits, getAudioTrackTraitsMap());
 }
 
+template <>
+std::string lookup<AAUDIO_DIRECTION>(const std::string &direction)
+{
+    auto& map = getAAudioDirection();
+    auto it = map.find(direction);
+    if (it == map.end()) {
+        return "";
+    }
+    return direction;
+}
+
+template <>
+int32_t lookup<AAUDIO_DIRECTION>(const std::string &direction)
+{
+    auto& map = getAAudioDirection();
+    auto it = map.find(direction);
+    if (it == map.end()) {
+        return 0; // return unknown
+    }
+    return it->second;
+}
+
+template <>
+std::string lookup<AAUDIO_PERFORMANCE_MODE>(const std::string &performanceMode)
+{
+    auto& map = getAAudioPerformanceMode();
+    auto it = map.find(performanceMode);
+    if (it == map.end()) {
+        return "";
+    }
+    return performanceMode;
+}
+
+template <>
+int32_t lookup<AAUDIO_PERFORMANCE_MODE>(const std::string &performanceMode)
+{
+    auto& map = getAAudioPerformanceMode();
+    auto it = map.find(performanceMode);
+    if (it == map.end()) {
+        return 0; // return unknown
+    }
+    return it->second;
+}
+
+template <>
+std::string lookup<AAUDIO_SHARING_MODE>(const std::string &sharingMode)
+{
+    auto& map = getAAudioSharingMode();
+    auto it = map.find(sharingMode);
+    if (it == map.end()) {
+        return "";
+    }
+    return sharingMode;
+}
+
+template <>
+int32_t lookup<AAUDIO_SHARING_MODE>(const std::string &sharingMode)
+{
+    auto& map = getAAudioSharingMode();
+    auto it = map.find(sharingMode);
+    if (it == map.end()) {
+        return 0; // return unknown
+    }
+    return it->second;
+}
+
 } // namespace android::mediametrics::types
diff --git a/services/mediametrics/AudioTypes.h b/services/mediametrics/AudioTypes.h
index e1deeb1..4394d79 100644
--- a/services/mediametrics/AudioTypes.h
+++ b/services/mediametrics/AudioTypes.h
@@ -40,6 +40,9 @@
 
 // Enumeration for all the string translations to integers (generally int32_t) unless noted.
 enum AudioEnumCategory {
+    AAUDIO_DIRECTION,
+    AAUDIO_PERFORMANCE_MODE,
+    AAUDIO_SHARING_MODE,
     CALLER_NAME,
     CONTENT_TYPE,
     ENCODING,
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index bf6e428..1d64878 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -19,19 +19,26 @@
 #include <utils/Log.h>
 
 #include "MediaMetricsService.h"
+#include "iface_statsd.h"
 
 #include <pwd.h> //getpwuid
 
+#include <android-base/stringprintf.h>
 #include <android/content/pm/IPackageManagerNative.h>  // package info
 #include <audio_utils/clock.h>                 // clock conversions
 #include <binder/IPCThreadState.h>             // get calling uid
+#include <binder/IServiceManager.h>            // checkCallingPermission
 #include <cutils/properties.h>                 // for property_get
 #include <mediautils/MemoryLeakTrackUtil.h>
 #include <memunreachable/memunreachable.h>
 #include <private/android_filesystem_config.h> // UID
+#include <statslog.h>
+
+#include <set>
 
 namespace android {
 
+using base::StringPrintf;
 using mediametrics::Item;
 using mediametrics::startsWith;
 
@@ -107,7 +114,7 @@
 {
     ALOGD("%s", __func__);
     // the class destructor clears anyhow, but we enforce clearing items first.
-    mItemsDiscarded += mItems.size();
+    mItemsDiscarded += (int64_t)mItems.size();
     mItems.clear();
 }
 
@@ -199,22 +206,19 @@
 
     (void)mAudioAnalytics.submit(sitem, isTrusted);
 
-    extern bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item);
-    (void)dump2Statsd(sitem);  // failure should be logged in function.
+    (void)dump2Statsd(sitem, mStatsdLog);  // failure should be logged in function.
     saveItem(sitem);
     return NO_ERROR;
 }
 
 status_t MediaMetricsService::dump(int fd, const Vector<String16>& args)
 {
-    String8 result;
-
     if (checkCallingPermission(String16("android.permission.DUMP")) == false) {
-        result.appendFormat("Permission Denial: "
+        const std::string result = StringPrintf("Permission Denial: "
                 "can't dump MediaMetricsService from pid=%d, uid=%d\n",
                 IPCThreadState::self()->getCallingPid(),
                 IPCThreadState::self()->getCallingUid());
-        write(fd, result.string(), result.size());
+        write(fd, result.c_str(), result.size());
         return NO_ERROR;
     }
 
@@ -246,17 +250,18 @@
             // dumpsys media.metrics audiotrack,codec
             // or dumpsys media.metrics audiotrack codec
 
-            result.append("Recognized parameters:\n");
-            result.append("--all         show all records\n");
-            result.append("--clear       clear out saved records\n");
-            result.append("--heap        show heap usage (top 100)\n");
-            result.append("--help        display help\n");
-            result.append("--prefix X    process records for component X\n");
-            result.append("--since X     X < 0: records from -X seconds in the past\n");
-            result.append("              X = 0: ignore\n");
-            result.append("              X > 0: records from X seconds since Unix epoch\n");
-            result.append("--unreachable show unreachable memory (leaks)\n");
-            write(fd, result.string(), result.size());
+            static constexpr char result[] =
+                    "Recognized parameters:\n"
+                    "--all         show all records\n"
+                    "--clear       clear out saved records\n"
+                    "--heap        show heap usage (top 100)\n"
+                    "--help        display help\n"
+                    "--prefix X    process records for component X\n"
+                    "--since X     X < 0: records from -X seconds in the past\n"
+                    "              X = 0: ignore\n"
+                    "              X > 0: records from X seconds since Unix epoch\n"
+                    "--unreachable show unreachable memory (leaks)\n";
+            write(fd, result, std::size(result));
             return NO_ERROR;
         } else if (args[i] == prefixOption) {
             ++i;
@@ -282,30 +287,36 @@
             unreachable = true;
         }
     }
-
+    std::stringstream result;
     {
         std::lock_guard _l(mLock);
 
         if (clear) {
-            mItemsDiscarded += mItems.size();
+            mItemsDiscarded += (int64_t)mItems.size();
             mItems.clear();
             mAudioAnalytics.clear();
         } else {
-            result.appendFormat("Dump of the %s process:\n", kServiceName);
+            result << StringPrintf("Dump of the %s process:\n", kServiceName);
             const char *prefixptr = prefix.size() > 0 ? prefix.c_str() : nullptr;
-            dumpHeaders(result, sinceNs, prefixptr);
-            dumpQueue(result, sinceNs, prefixptr);
+            result << dumpHeaders(sinceNs, prefixptr);
+            result << dumpQueue(sinceNs, prefixptr);
 
             // TODO: maybe consider a better way of dumping audio analytics info.
             const int32_t linesToDump = all ? INT32_MAX : 1000;
             auto [ dumpString, lines ] = mAudioAnalytics.dump(linesToDump, sinceNs, prefixptr);
-            result.append(dumpString.c_str());
+            result << dumpString;
             if (lines == linesToDump) {
-                result.append("-- some lines may be truncated --\n");
+                result << "-- some lines may be truncated --\n";
             }
+
+            // Dump the statsd atoms we sent out.
+            result << "Statsd atoms:\n"
+                   << mStatsdLog->dumpToString("  " /* prefix */,
+                           all ? STATSD_LOG_LINES_MAX : STATSD_LOG_LINES_DUMP);
         }
     }
-    write(fd, result.string(), result.size());
+    const std::string str = result.str();
+    write(fd, str.c_str(), str.size());
 
     // Check heap and unreachable memory outside of lock.
     if (heap) {
@@ -323,38 +334,37 @@
 }
 
 // dump headers
-void MediaMetricsService::dumpHeaders(String8 &result, int64_t sinceNs, const char* prefix)
+std::string MediaMetricsService::dumpHeaders(int64_t sinceNs, const char* prefix)
 {
+    std::stringstream result;
     if (mediametrics::Item::isEnabled()) {
-        result.append("Metrics gathering: enabled\n");
+        result << "Metrics gathering: enabled\n";
     } else {
-        result.append("Metrics gathering: DISABLED via property\n");
+        result << "Metrics gathering: DISABLED via property\n";
     }
-    result.appendFormat(
+    result << StringPrintf(
             "Since Boot: Submissions: %lld Accepted: %lld\n",
             (long long)mItemsSubmitted.load(), (long long)mItemsFinalized);
-    result.appendFormat(
+    result << StringPrintf(
             "Records Discarded: %lld (by Count: %lld by Expiration: %lld)\n",
             (long long)mItemsDiscarded, (long long)mItemsDiscardedCount,
             (long long)mItemsDiscardedExpire);
     if (prefix != nullptr) {
-        result.appendFormat("Restricting to prefix %s", prefix);
+        result << "Restricting to prefix " << prefix << "\n";
     }
     if (sinceNs != 0) {
-        result.appendFormat(
-            "Emitting Queue entries more recent than: %lld\n",
-            (long long)sinceNs);
+        result << "Emitting Queue entries more recent than: " << sinceNs << "\n";
     }
+    return result.str();
 }
 
 // TODO: should prefix be a set<string>?
-void MediaMetricsService::dumpQueue(String8 &result, int64_t sinceNs, const char* prefix)
+std::string MediaMetricsService::dumpQueue(int64_t sinceNs, const char* prefix)
 {
     if (mItems.empty()) {
-        result.append("empty\n");
-        return;
+        return "empty\n";
     }
-
+    std::stringstream result;
     int slot = 0;
     for (const auto &item : mItems) {         // TODO: consider std::lower_bound() on mItems
         if (item->getTimestamp() < sinceNs) { // sinceNs == 0 means all items shown
@@ -365,9 +375,10 @@
                     __func__, item->getKey().c_str(), prefix);
             continue;
         }
-        result.appendFormat("%5d: %s\n", slot, item->toString().c_str());
+        result << StringPrintf("%5d: %s\n", slot, item->toString().c_str());
         slot++;
     }
+    return result.str();
 }
 
 //
@@ -416,10 +427,10 @@
 
     if (const size_t toErase = overlimit + expired;
             toErase > 0) {
-        mItemsDiscardedCount += overlimit;
-        mItemsDiscardedExpire += expired;
-        mItemsDiscarded += toErase;
-        mItems.erase(mItems.begin(), mItems.begin() + toErase); // erase from front
+        mItemsDiscardedCount += (int64_t)overlimit;
+        mItemsDiscardedExpire += (int64_t)expired;
+        mItemsDiscarded += (int64_t)toErase;
+        mItems.erase(mItems.begin(), mItems.begin() + (ptrdiff_t)toErase); // erase from front
     }
     return more;
 }
@@ -439,6 +450,10 @@
     std::lock_guard _l(mLock);
     // we assume the items are roughly in time order.
     mItems.emplace_back(item);
+    if (isPullable(item->getKey())) {
+        registerStatsdCallbacksIfNeeded();
+        mPullableItems[item->getKey()].emplace_back(item);
+    }
     ++mItemsFinalized;
     if (expirations(item)
             && (!mExpireFuture.valid()
@@ -485,4 +500,58 @@
     return false;
 }
 
+void MediaMetricsService::registerStatsdCallbacksIfNeeded()
+{
+    if (mStatsdRegistered.test_and_set()) {
+        return;
+    }
+    auto tag = android::util::MEDIA_DRM_ACTIVITY_INFO;
+    auto cb = MediaMetricsService::pullAtomCallback;
+    AStatsManager_setPullAtomCallback(tag, /* metadata */ nullptr, cb, this);
+}
+
+/* static */
+bool MediaMetricsService::isPullable(const std::string &key)
+{
+    static const std::set<std::string> pullableKeys{
+        "mediadrm",
+    };
+    return pullableKeys.count(key);
+}
+
+/* static */
+std::string MediaMetricsService::atomTagToKey(int32_t atomTag)
+{
+    switch (atomTag) {
+    case android::util::MEDIA_DRM_ACTIVITY_INFO:
+        return "mediadrm";
+    }
+    return {};
+}
+
+/* static */
+AStatsManager_PullAtomCallbackReturn MediaMetricsService::pullAtomCallback(
+        int32_t atomTag, AStatsEventList* data, void* cookie)
+{
+    MediaMetricsService* svc = reinterpret_cast<MediaMetricsService*>(cookie);
+    return svc->pullItems(atomTag, data);
+}
+
+AStatsManager_PullAtomCallbackReturn MediaMetricsService::pullItems(
+        int32_t atomTag, AStatsEventList* data)
+{
+    const std::string key(atomTagToKey(atomTag));
+    if (key.empty()) {
+        return AStatsManager_PULL_SKIP;
+    }
+    std::lock_guard _l(mLock);
+    bool dumped = false;
+    for (auto &item : mPullableItems[key]) {
+        if (const auto sitem = item.lock()) {
+            dumped |= dump2Statsd(sitem, data, mStatsdLog);
+        }
+    }
+    mPullableItems[key].clear();
+    return dumped ? AStatsManager_PULL_SUCCESS : AStatsManager_PULL_SKIP;
+}
 } // namespace android
diff --git a/services/mediametrics/MediaMetricsService.h b/services/mediametrics/MediaMetricsService.h
index 792b7f0..8d0b1cf 100644
--- a/services/mediametrics/MediaMetricsService.h
+++ b/services/mediametrics/MediaMetricsService.h
@@ -24,20 +24,27 @@
 
 // IMediaMetricsService must include Vector, String16, Errors
 #include <android-base/thread_annotations.h>
-#include <media/IMediaMetricsService.h>
+#include <android/media/BnMediaMetricsService.h>
 #include <mediautils/ServiceUtilities.h>
+#include <stats_pull_atom_callback.h>
 #include <utils/String8.h>
 
 #include "AudioAnalytics.h"
 
 namespace android {
 
-class MediaMetricsService : public BnMediaMetricsService
+class MediaMetricsService : public media::BnMediaMetricsService
 {
 public:
     MediaMetricsService();
     ~MediaMetricsService() override;
 
+    // AIDL interface
+    binder::Status submitBuffer(const std::vector<uint8_t>& buffer) override {
+        status_t status = submitBuffer((char *)buffer.data(), buffer.size());
+        return binder::Status::fromStatusT(status);
+    }
+
     /**
      * Submits the indicated record to the mediaanalytics service.
      *
@@ -45,11 +52,11 @@
      * \return status failure, which is negative on binder transaction failure.
      *         As the transaction is one-way, remote failures will not be reported.
      */
-    status_t submit(mediametrics::Item *item) override {
+    status_t submit(mediametrics::Item *item) {
         return submitInternal(item, false /* release */);
     }
 
-    status_t submitBuffer(const char *buffer, size_t length) override {
+    status_t submitBuffer(const char *buffer, size_t length) {
         mediametrics::Item *item = new mediametrics::Item();
         return item->readFromByteString(buffer, length)
                 ?: submitInternal(item, true /* release */);
@@ -81,7 +88,7 @@
 
     // Internal call where release is true if ownership of item is transferred
     // to the service (that is, the service will eventually delete the item).
-    status_t submitInternal(mediametrics::Item *item, bool release) override;
+    status_t submitInternal(mediametrics::Item *item, bool release);
 
 private:
     void processExpirations();
@@ -93,8 +100,17 @@
     bool expirations(const std::shared_ptr<const mediametrics::Item>& item) REQUIRES(mLock);
 
     // support for generating output
-    void dumpQueue(String8 &result, int64_t sinceNs, const char* prefix) REQUIRES(mLock);
-    void dumpHeaders(String8 &result, int64_t sinceNs, const char* prefix) REQUIRES(mLock);
+    std::string dumpQueue(int64_t sinceNs, const char* prefix) REQUIRES(mLock);
+    std::string dumpHeaders(int64_t sinceNs, const char* prefix) REQUIRES(mLock);
+
+    // support statsd pushed atoms
+    static bool isPullable(const std::string &key);
+    static std::string atomTagToKey(int32_t atomTag);
+    static AStatsManager_PullAtomCallbackReturn pullAtomCallback(
+            int32_t atomTag, AStatsEventList* data, void* cookie);
+    AStatsManager_PullAtomCallbackReturn pullItems(int32_t atomTag, AStatsEventList* data);
+    void registerStatsdCallbacksIfNeeded();
+    std::atomic_flag mStatsdRegistered = ATOMIC_FLAG_INIT;
 
     // The following variables accessed without mLock
 
@@ -108,7 +124,14 @@
 
     std::atomic<int64_t> mItemsSubmitted{}; // accessed outside of lock.
 
-    mediametrics::AudioAnalytics mAudioAnalytics; // mAudioAnalytics is locked internally.
+    // mStatsdLog is locked internally (thread-safe) and shows the last atoms logged
+    static constexpr size_t STATSD_LOG_LINES_MAX = 30; // recent log lines to keep
+    static constexpr size_t STATSD_LOG_LINES_DUMP = 4; // normal amount of lines to dump
+    const std::shared_ptr<mediametrics::StatsdLog> mStatsdLog{
+            std::make_shared<mediametrics::StatsdLog>(STATSD_LOG_LINES_MAX)};
+
+    // mAudioAnalytics is locked internally.
+    mediametrics::AudioAnalytics mAudioAnalytics{mStatsdLog};
 
     std::mutex mLock;
     // statistics about our analytics
@@ -124,6 +147,12 @@
     // TODO: Make separate class, use segmented queue, write lock only end.
     // Note: Another analytics module might have ownership of an item longer than the log.
     std::deque<std::shared_ptr<const mediametrics::Item>> mItems GUARDED_BY(mLock);
+
+    // Queues per item key, pending to be pulled by statsd.
+    // Use weak_ptr such that a pullable item can still expire.
+    using ItemKey = std::string;
+    using WeakItemQueue = std::deque<std::weak_ptr<const mediametrics::Item>>;
+    std::unordered_map<ItemKey, WeakItemQueue> mPullableItems GUARDED_BY(mLock);
 };
 
 } // namespace android
diff --git a/services/mediametrics/StatsdLog.h b/services/mediametrics/StatsdLog.h
new file mode 100644
index 0000000..e207bac
--- /dev/null
+++ b/services/mediametrics/StatsdLog.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <audio_utils/SimpleLog.h>
+#include <map>
+#include <mutex>
+#include <sstream>
+
+namespace android::mediametrics {
+
+class StatsdLog {
+public:
+    explicit StatsdLog(size_t lines) : mSimpleLog(lines) {}
+
+    void log(int atom, const std::string& string) {
+        {
+            std::lock_guard lock(mLock);
+            ++mCountMap[atom];
+        }
+        mSimpleLog.log("%s", string.c_str());
+    }
+
+   std::string dumpToString(const char *prefix = "", size_t logLines = 0) const {
+       std::stringstream ss;
+
+       {   // first print out the atom counts
+           std::lock_guard lock(mLock);
+
+           size_t col = 0;
+           for (const auto& count : mCountMap) {
+               if (col == 8) {
+                   col = 0;
+                   ss << "\n" << prefix;
+               } else {
+                   ss << " ";
+               }
+               ss << "[ " << count.first << " : " << count.second << " ]";
+               ++col;
+           }
+           ss << "\n";
+       }
+
+       // then print out the log lines
+       ss << mSimpleLog.dumpToString(prefix, logLines);
+       return ss.str();
+   }
+
+private:
+    SimpleLog mSimpleLog; // internally locked
+    std::map<int /* atom */, size_t /* count */> mCountMap GUARDED_BY(mLock); // sorted
+    mutable std::mutex mLock;
+};
+
+} // namespace android::mediametrics
diff --git a/services/mediametrics/StringUtils.h b/services/mediametrics/StringUtils.h
index 7a8bbee..01034d9 100644
--- a/services/mediametrics/StringUtils.h
+++ b/services/mediametrics/StringUtils.h
@@ -16,6 +16,8 @@
 
 #pragma once
 
+#include <iomanip>
+#include <sstream>
 #include <string>
 #include <vector>
 
@@ -68,4 +70,101 @@
  */
 size_t replace(std::string &str, const char *targetChars, const char replaceChar);
 
+// RFC 1421, 2045, 2152, 4648(4), 4880
+inline constexpr char Base64Table[] =
+    // 0000000000111111111122222222223333333333444444444455555555556666
+    // 0123456789012345678901234567890123456789012345678901234567890123
+    "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
+
+// RFC 4648(5) URL-safe Base64 encoding
+inline constexpr char Base64UrlTable[] =
+    // 0000000000111111111122222222223333333333444444444455555555556666
+    // 0123456789012345678901234567890123456789012345678901234567890123
+    "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_";
+
+// An constexpr struct that transposes/inverts a string conversion table.
+struct Transpose {
+    // constexpr bug, returning char still means -1 == 0xff, so we use unsigned char.
+    using base_char_t = unsigned char;
+    static inline constexpr base_char_t INVALID_CHAR = 0xff;
+
+    template <size_t N>
+    explicit constexpr Transpose(const char(&string)[N]) {
+        for (auto& e : mMap) {
+            e = INVALID_CHAR;
+        }
+        for (size_t i = 0; string[i] != 0; ++i) {
+            mMap[static_cast<size_t>(string[i]) & 0xff] = i;
+        }
+    }
+
+    constexpr base_char_t operator[] (size_t n) const {
+        return n < sizeof(mMap) ? mMap[n] : INVALID_CHAR;
+    }
+
+    constexpr const auto& get() const {
+        return mMap;
+    }
+
+private:
+    base_char_t mMap[256];  // construct an inverse character mapping.
+};
+
+// This table is used to convert an input char to a 6 bit (0 - 63) value.
+// If the input char is not in the Base64Url charset, Transpose::INVALID_CHAR is returned.
+inline constexpr Transpose InverseBase64UrlTable(Base64UrlTable);
+
+// Returns true if s consists of only valid Base64Url characters (no padding chars allowed).
+inline constexpr bool isBase64Url(const char *s) {
+    for (; *s != 0; ++s) {
+        if (InverseBase64UrlTable[(unsigned char)*s] == Transpose::INVALID_CHAR) return false;
+    }
+    return true;
+}
+
+// Returns true if s is a valid log session id: exactly 16 Base64Url characters.
+//
+// logSessionIds are a web-safe Base64Url RFC 4648(5) encoded string of 16 characters
+// (representing 96 unique bits 16 * 6).
+//
+// The string version is considered the reference representation.  However, for ease of
+// manipulation and comparison, it may be converted to an int128.
+//
+// For int128 conversion, some common interpretations exist - for example
+// (1) the 16 Base64 chars can be converted 6 bits per char to a 96 bit value
+// (with the most significant 32 bits as zero) as there are only 12 unique bytes worth of data
+// or (2) the 16 Base64 chars can be used to directly fill the 128 bits of int128 assuming
+// the 16 chars are 16 bytes, filling the layout of the int128 variable.
+// Endianness of the data may follow whatever is convenient in the interpretation as long
+// as it is applied to each such conversion of string to int128 identically.
+//
+inline constexpr bool isLogSessionId(const char *s) {
+    return std::char_traits<std::decay_t<decltype(*s)>>::length(s) == 16 && isBase64Url(s);
+}
+
+// Returns either the original string or an empty string if isLogSessionId check fails.
+inline std::string sanitizeLogSessionId(const std::string& string) {
+    if (isLogSessionId(string.c_str())) return string;
+    return {}; // if not a logSessionId, return an empty string.
+}
+
+inline std::string bytesToString(const std::vector<uint8_t>& bytes, size_t maxSize = SIZE_MAX) {
+    if (bytes.size() == 0) {
+        return "{}";
+    }
+    std::stringstream ss;
+    ss << "{";
+    ss << std::hex << std::setfill('0');
+    maxSize = std::min(maxSize, bytes.size());
+    for (size_t i = 0; i < maxSize; ++i) {
+        ss << " " << std::setw(2) << (int)bytes[i];
+    }
+    if (maxSize != bytes.size()) {
+        ss << " ... }";
+    } else {
+        ss << " }";
+    }
+    return ss.str();
+}
+
 } // namespace android::mediametrics::stringutils
diff --git a/services/mediametrics/benchmarks/Android.bp b/services/mediametrics/benchmarks/Android.bp
index b61f44f..68d4145 100644
--- a/services/mediametrics/benchmarks/Android.bp
+++ b/services/mediametrics/benchmarks/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "mediametrics_benchmarks",
     srcs: ["mediametrics_benchmarks.cpp"],
diff --git a/services/mediametrics/fuzzer/Android.bp b/services/mediametrics/fuzzer/Android.bp
new file mode 100644
index 0000000..b03e518
--- /dev/null
+++ b/services/mediametrics/fuzzer/Android.bp
@@ -0,0 +1,72 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_fuzz {
+    name: "mediametrics_service_fuzzer",
+
+    srcs: [
+        "mediametrics_service_fuzzer.cpp",
+    ],
+
+    static_libs: [
+        "libmediametrics",
+        "libmediametricsservice",
+        "libplatformprotos",
+    ],
+
+    shared_libs: [
+        "libbase",
+        "libbinder",
+        "libcutils",
+        "liblog",
+        "libmedia_helper",
+        "libmediautils",
+        "libmemunreachable",
+        "libprotobuf-cpp-lite",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libstatslog",
+        "libstatspull",
+        "libstatssocket",
+        "libutils",
+        "mediametricsservice-aidl-cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/services/mediametrics",
+        "system/media/audio_utils/include",
+    ],
+
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/services/mediametrics/fuzzer/README.md b/services/mediametrics/fuzzer/README.md
new file mode 100644
index 0000000..a13830e
--- /dev/null
+++ b/services/mediametrics/fuzzer/README.md
@@ -0,0 +1,54 @@
+# Fuzzer for libmediametricsservice
+
+## Plugin Design Considerations
+The fuzzer plugin for libmediametricsservice is designed based on the
+understanding of the service and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+Media Metrics Service contains the following modules:
+1. Media Metrics Item Manipulation (module name: `Item`)
+2. Media Metrics Time Machine Storage (module name: `TimeMachineStorage`)
+3. Media Metrics Transaction Log (module name: `TransactionLog`)
+4. Media Metrics Analytics Action (module name: `AnalyticsAction`)
+5. Media Metrics Audio Analytics (module name: `AudioAnalytics`)
+6. Media Metrics Timed Action (module name: `TimedAction`)
+
+| Module| Valid Input Values| Configured Value|
+|------------- |-------------| ----- |
+| `Item` | Key: `std::string`. Values: `INT32_MIN` to `INT32_MAX`, `INT64_MIN` to `INT64_MAX`, `std::string`, `double`, `pair<INT32_MIN to INT32_MAX, INT32_MIN to INT32_MAX>` | Value obtained from FuzzedDataProvider |
+| `TimeMachineStorage`   | Key: `std::string`. Values: `INT32_MIN` to `INT32_MAX`, `INT64_MIN` to `INT64_MAX`, `std::string`, `double`, `pair<INT32_MIN to INT32_MAX, INT32_MIN to INT32_MAX>` | Value obtained from FuzzedDataProvider |
+| `TranscationLog`   | `mediametrics::Item` | `mediametrics::Item` created by obtaining values from FuzzedDataProvider|
+| `AnalyticsAction`   | URL: `std::string` ending with .event, Value: `std::string`, action: A function | URL and Values obtained from FuzzedDataProvider, a placeholder function was passed as action|
+| `AudioAnalytics`   | `mediametrics::Item` | `mediametrics::Item` created by obtaining values from FuzzedDataProvider|
+| `TimedAction`   | time: `std::chrono::seconds`, function: `std::function` | `std::chrono::seconds` : value obtained from FuzzedDataProvider, `std::function`: a placeholder function was used. |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+## Build
+
+This describes steps to build mediametrics_service_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) mediametrics_service_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mediametrics_service_fuzzer/mediametrics_service_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
new file mode 100644
index 0000000..8b0b479
--- /dev/null
+++ b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
@@ -0,0 +1,374 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/MediaMetricsItem.h>
+#include <stdio.h>
+#include <string.h>
+#include <utils/Log.h>
+#include <algorithm>
+
+#include "AudioTypes.h"
+#include "MediaMetricsService.h"
+#include "StringUtils.h"
+
+using namespace android;
+
+// low water mark
+constexpr size_t kLogItemsLowWater = 1;
+// high water mark
+constexpr size_t kLogItemsHighWater = 2;
+
+class MediaMetricsServiceFuzzer {
+   public:
+    void invokeStartsWith(const uint8_t *data, size_t size);
+    void invokeInstantiate(const uint8_t *data, size_t size);
+    void invokePackageInstallerCheck(const uint8_t *data, size_t size);
+    void invokeItemManipulation(const uint8_t *data, size_t size);
+    void invokeItemExpansion(const uint8_t *data, size_t size);
+    void invokeTimeMachineStorage(const uint8_t *data, size_t size);
+    void invokeTransactionLog(const uint8_t *data, size_t size);
+    void invokeAnalyticsAction(const uint8_t *data, size_t size);
+    void invokeAudioAnalytics(const uint8_t *data, size_t size);
+    void invokeTimedAction(const uint8_t *data, size_t size);
+    void process(const uint8_t *data, size_t size);
+};
+
+void MediaMetricsServiceFuzzer::invokeStartsWith(const uint8_t *data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    while (fdp.remaining_bytes()) {
+        android::mediametrics::startsWith(fdp.ConsumeRandomLengthString(),
+                                          fdp.ConsumeRandomLengthString());
+    }
+}
+
+void MediaMetricsServiceFuzzer::invokeInstantiate(const uint8_t *data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    sp mediaMetricsService = new MediaMetricsService();
+
+    while (fdp.remaining_bytes()) {
+        std::unique_ptr<mediametrics::Item> random_key(
+            mediametrics::Item::create(fdp.ConsumeRandomLengthString()));
+        mediaMetricsService->submit(random_key.get());
+        random_key->setInt32(fdp.ConsumeRandomLengthString().c_str(),
+                             fdp.ConsumeIntegral<int32_t>());
+        mediaMetricsService->submit(random_key.get());
+
+        std::unique_ptr<mediametrics::Item> audiotrack_key(
+            mediametrics::Item::create("audiotrack"));
+        mediaMetricsService->submit(audiotrack_key.get());
+        audiotrack_key->addInt32(fdp.ConsumeRandomLengthString().c_str(),
+                                 fdp.ConsumeIntegral<int32_t>());
+        mediaMetricsService->submit(audiotrack_key.get());
+    }
+}
+
+void MediaMetricsServiceFuzzer::invokePackageInstallerCheck(const uint8_t *data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    while (fdp.remaining_bytes()) {
+        MediaMetricsService::useUidForPackage(fdp.ConsumeRandomLengthString().c_str(),
+                                              fdp.ConsumeRandomLengthString().c_str());
+    }
+}
+
+void MediaMetricsServiceFuzzer::invokeItemManipulation(const uint8_t *data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+    mediametrics::Item item(fdp.ConsumeRandomLengthString().c_str());
+    while (fdp.remaining_bytes()) {
+        const uint8_t action = fdp.ConsumeIntegralInRange<uint8_t>(0, 16);
+        const std::string key = fdp.ConsumeRandomLengthString();
+        if (fdp.remaining_bytes() < 1 || key.length() < 1) {
+            break;
+        }
+        switch (action) {
+            case 0: {
+                item.setInt32(key.c_str(), fdp.ConsumeIntegral<int32_t>());
+                break;
+            }
+            case 1: {
+                item.addInt32(key.c_str(), fdp.ConsumeIntegral<int32_t>());
+                break;
+            }
+            case 2: {
+                int32_t i32 = 0;
+                item.getInt32(key.c_str(), &i32);
+                break;
+            }
+            case 3: {
+                item.setInt64(key.c_str(), fdp.ConsumeIntegral<int64_t>());
+                break;
+            }
+            case 4: {
+                item.addInt64(key.c_str(), fdp.ConsumeIntegral<int64_t>());
+                break;
+            }
+            case 5: {
+                int64_t i64 = 0;
+                item.getInt64(key.c_str(), &i64);
+                break;
+            }
+            case 6: {
+                item.setDouble(key.c_str(), fdp.ConsumeFloatingPoint<double>());
+                break;
+            }
+            case 7: {
+                item.addDouble(key.c_str(), fdp.ConsumeFloatingPoint<double>());
+                break;
+            }
+            case 8: {
+                double d = 0;
+                item.getDouble(key.c_str(), &d);
+                break;
+            }
+            case 9: {
+                item.setCString(key.c_str(), fdp.ConsumeRandomLengthString().c_str());
+                break;
+            }
+            case 10: {
+                char *s = nullptr;
+                item.getCString(key.c_str(), &s);
+                if (s) free(s);
+                break;
+            }
+            case 11: {
+                std::string s;
+                item.getString(key.c_str(), &s);
+                break;
+            }
+            case 12: {
+                item.setRate(key.c_str(), fdp.ConsumeIntegral<int64_t>(),
+                             fdp.ConsumeIntegral<int64_t>());
+                break;
+            }
+            case 13: {
+                int64_t b = 0, h = 0;
+                double d = 0;
+                item.getRate(key.c_str(), &b, &h, &d);
+                break;
+            }
+            case 14: {
+                (void)item.filter(key.c_str());
+                break;
+            }
+            case 15: {
+                const char *arr[1] = {""};
+                arr[0] = const_cast<char *>(key.c_str());
+                (void)item.filterNot(1, arr);
+                break;
+            }
+            case 16: {
+                (void)item.toString().c_str();
+                break;
+            }
+        }
+    }
+
+    Parcel p;
+    mediametrics::Item item2;
+
+    (void)item.writeToParcel(&p);
+    p.setDataPosition(0);  // rewind for reading
+    (void)item2.readFromParcel(p);
+
+    char *byteData = nullptr;
+    size_t length = 0;
+    (void)item.writeToByteString(&byteData, &length);
+    (void)item2.readFromByteString(byteData, length);
+    if (byteData) {
+        free(byteData);
+    }
+
+    sp mediaMetricsService = new MediaMetricsService();
+    mediaMetricsService->submit(&item2);
+}
+
+void MediaMetricsServiceFuzzer::invokeItemExpansion(const uint8_t *data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+    mediametrics::LogItem<1> item("FuzzItem");
+    item.setPid(fdp.ConsumeIntegral<int16_t>()).setUid(fdp.ConsumeIntegral<int16_t>());
+
+    while (fdp.remaining_bytes()) {
+        int32_t i = fdp.ConsumeIntegral<int32_t>();
+        item.set(std::to_string(i).c_str(), (int32_t)i);
+    }
+    item.updateHeader();
+
+    mediametrics::Item item2;
+    (void)item2.readFromByteString(item.getBuffer(), item.getLength());
+
+    sp mediaMetricsService = new MediaMetricsService();
+    mediaMetricsService->submit(&item2);
+}
+
+void MediaMetricsServiceFuzzer::invokeTimeMachineStorage(const uint8_t *data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+    auto item = std::make_shared<mediametrics::Item>("FuzzKey");
+    int32_t i32 = fdp.ConsumeIntegral<int32_t>();
+    int64_t i64 = fdp.ConsumeIntegral<int64_t>();
+    double d = fdp.ConsumeFloatingPoint<double>();
+    std::string str = fdp.ConsumeRandomLengthString();
+    std::pair<int64_t, int64_t> pair(fdp.ConsumeIntegral<int64_t>(),
+                                     fdp.ConsumeIntegral<int64_t>());
+    (*item).set("i32", i32).set("i64", i64).set("double", d).set("string", str).set("rate", pair);
+
+    android::mediametrics::TimeMachine timeMachine;
+    timeMachine.put(item, true);
+
+    timeMachine.get("Key", "i32", &i32, -1);
+
+    timeMachine.get("Key", "i64", &i64, -1);
+
+    timeMachine.get("Key", "double", &d, -1);
+
+    timeMachine.get("Key", "string", &str, -1);
+
+    timeMachine.get("Key.i32", &i32, -1);
+
+    timeMachine.get("Key.i64", &i64, -1);
+
+    timeMachine.get("Key.double", &d, -1);
+
+    str.clear();
+    timeMachine.get("Key.string", &str, -1);
+}
+
+void MediaMetricsServiceFuzzer::invokeTransactionLog(const uint8_t *data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+    auto item = std::make_shared<mediametrics::Item>("Key1");
+    (*item)
+        .set("one", fdp.ConsumeIntegral<int32_t>())
+        .set("two", fdp.ConsumeIntegral<int32_t>())
+        .setTimestamp(fdp.ConsumeIntegral<int32_t>());
+
+    android::mediametrics::TransactionLog transactionLog(
+        kLogItemsLowWater, kLogItemsHighWater);  // keep at most 2 items
+    transactionLog.size();
+
+    transactionLog.put(item);
+    transactionLog.size();
+
+    auto item2 = std::make_shared<mediametrics::Item>("Key2");
+    (*item2)
+        .set("three", fdp.ConsumeIntegral<int32_t>())
+        .set("[Key1]three", fdp.ConsumeIntegral<int32_t>())
+        .setTimestamp(fdp.ConsumeIntegral<int32_t>());
+
+    transactionLog.put(item2);
+    transactionLog.size();
+
+    auto item3 = std::make_shared<mediametrics::Item>("Key3");
+    (*item3)
+        .set("six", fdp.ConsumeIntegral<int32_t>())
+        .set("[Key1]four", fdp.ConsumeIntegral<int32_t>())  // affects Key1
+        .set("[Key1]five", fdp.ConsumeIntegral<int32_t>())  // affects key1
+        .setTimestamp(fdp.ConsumeIntegral<int32_t>());
+
+    transactionLog.put(item3);
+    transactionLog.size();
+}
+
+void MediaMetricsServiceFuzzer::invokeAnalyticsAction(const uint8_t *data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+    mediametrics::AnalyticsActions analyticsActions;
+    bool action = false;
+
+    while (fdp.remaining_bytes()) {
+        analyticsActions.addAction(
+            (fdp.ConsumeRandomLengthString() + std::string(".event")).c_str(),
+            fdp.ConsumeRandomLengthString(),
+            std::make_shared<mediametrics::AnalyticsActions::Function>(
+                [&](const std::shared_ptr<const android::mediametrics::Item> &) {
+                    action = true;
+                }));
+    }
+
+    FuzzedDataProvider fdp2 = FuzzedDataProvider(data, size);
+
+    while (fdp2.remaining_bytes()) {
+        // make a test item
+        auto item = std::make_shared<mediametrics::Item>(fdp2.ConsumeRandomLengthString().c_str());
+        (*item).set("event", fdp2.ConsumeRandomLengthString().c_str());
+
+        // get the actions and execute them
+        auto actions = analyticsActions.getActionsForItem(item);
+        for (const auto &action : actions) {
+            action->operator()(item);
+        }
+    }
+}
+
+void MediaMetricsServiceFuzzer::invokeAudioAnalytics(const uint8_t *data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    std::shared_ptr<android::mediametrics::StatsdLog> statsdLog =
+            std::make_shared<android::mediametrics::StatsdLog>(10);
+    android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
+
+    while (fdp.remaining_bytes()) {
+        auto item = std::make_shared<mediametrics::Item>(fdp.ConsumeRandomLengthString().c_str());
+        int32_t transactionUid = fdp.ConsumeIntegral<int32_t>();  // arbitrary
+        (*item)
+            .set(fdp.ConsumeRandomLengthString().c_str(), fdp.ConsumeIntegral<int32_t>())
+            .set(fdp.ConsumeRandomLengthString().c_str(), fdp.ConsumeIntegral<int32_t>())
+            .set(AMEDIAMETRICS_PROP_ALLOWUID, transactionUid)
+            .setUid(transactionUid)
+            .setTimestamp(fdp.ConsumeIntegral<int32_t>());
+        audioAnalytics.submit(item, fdp.ConsumeBool());
+    }
+
+    audioAnalytics.dump(1000);
+}
+
+void MediaMetricsServiceFuzzer::invokeTimedAction(const uint8_t *data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    android::mediametrics::TimedAction timedAction;
+    std::atomic_int value = 0;
+
+    while (fdp.remaining_bytes()) {
+        timedAction.postIn(std::chrono::seconds(fdp.ConsumeIntegral<int32_t>()),
+                           [&value] { ++value; });
+        timedAction.size();
+    }
+}
+
+void MediaMetricsServiceFuzzer::process(const uint8_t *data, size_t size) {
+    invokeStartsWith(data, size);
+    invokeInstantiate(data, size);
+    invokePackageInstallerCheck(data, size);
+    invokeItemManipulation(data, size);
+    invokeItemExpansion(data, size);
+    invokeTimeMachineStorage(data, size);
+    invokeTransactionLog(data, size);
+    invokeAnalyticsAction(data, size);
+    invokeAudioAnalytics(data, size);
+    invokeTimedAction(data, size);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    if (size < 1) {
+        return 0;
+    }
+    MediaMetricsServiceFuzzer mediaMetricsServiceFuzzer;
+    mediaMetricsServiceFuzzer.process(data, size);
+    return 0;
+}
diff --git a/services/mediametrics/iface_statsd.cpp b/services/mediametrics/iface_statsd.cpp
index 16204de..776f878 100644
--- a/services/mediametrics/iface_statsd.cpp
+++ b/services/mediametrics/iface_statsd.cpp
@@ -27,7 +27,10 @@
 #include <pthread.h>
 #include <unistd.h>
 
+#include <map>
 #include <memory>
+#include <string>
+#include <vector>
 #include <string.h>
 #include <pwd.h>
 
@@ -45,33 +48,12 @@
 // has its own routine to handle this.
 //
 
-bool enabled_statsd = true;
+static bool enabled_statsd = true;
 
-struct statsd_hooks {
-    const char *key;
-    bool (*handler)(const mediametrics::Item *);
-};
-
-// keep this sorted, so we can do binary searches
-static constexpr struct statsd_hooks statsd_handlers[] =
-{
-    { "audiopolicy", statsd_audiopolicy },
-    { "audiorecord", statsd_audiorecord },
-    { "audiothread", statsd_audiothread },
-    { "audiotrack", statsd_audiotrack },
-    { "codec", statsd_codec},
-    { "drm.vendor.Google.WidevineCDM", statsd_widevineCDM },
-    { "drmmanager", statsd_drmmanager },
-    { "extractor", statsd_extractor },
-    { "mediadrm", statsd_mediadrm },
-    { "mediaparser", statsd_mediaparser },
-    { "nuplayer", statsd_nuplayer },
-    { "nuplayer2", statsd_nuplayer },
-    { "recorder", statsd_recorder },
-};
-
-// give me a record, i'll look at the type and upload appropriately
-bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item) {
+namespace {
+template<typename Handler, typename... Args>
+bool dump2StatsdInternal(const std::map<std::string, Handler>& handlers,
+        const std::shared_ptr<const mediametrics::Item>& item, Args... args) {
     if (item == nullptr) return false;
 
     // get the key
@@ -82,12 +64,42 @@
         return false;
     }
 
-    for (const auto &statsd_handler : statsd_handlers) {
-        if (key == statsd_handler.key) {
-            return statsd_handler.handler(item.get());
-        }
+    if (handlers.count(key)) {
+        return (handlers.at(key))(item, args...);
     }
     return false;
 }
+} // namespace
+
+// give me a record, I'll look at the type and upload appropriately
+bool dump2Statsd(
+        const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog) {
+    static const std::map<std::string, statsd_pusher*> statsd_pushers =
+    {
+        { "audiopolicy", statsd_audiopolicy },
+        { "audiorecord", statsd_audiorecord },
+        { "audiothread", statsd_audiothread },
+        { "audiotrack", statsd_audiotrack },
+        { "codec", statsd_codec},
+        { "drmmanager", statsd_drmmanager },
+        { "extractor", statsd_extractor },
+        { "mediadrm", statsd_mediadrm },
+        { "mediaparser", statsd_mediaparser },
+        { "nuplayer", statsd_nuplayer },
+        { "nuplayer2", statsd_nuplayer },
+        { "recorder", statsd_recorder },
+    };
+    return dump2StatsdInternal(statsd_pushers, item, statsdLog);
+}
+
+bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item, AStatsEventList* out,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog) {
+    static const std::map<std::string, statsd_puller*> statsd_pullers =
+    {
+        { "mediadrm", statsd_mediadrm_puller },
+    };
+    return dump2StatsdInternal(statsd_pullers, item, out, statsdLog);
+}
 
 } // namespace android
diff --git a/services/mediametrics/iface_statsd.h b/services/mediametrics/iface_statsd.h
index 9b49556..c2a8b3c 100644
--- a/services/mediametrics/iface_statsd.h
+++ b/services/mediametrics/iface_statsd.h
@@ -14,23 +14,37 @@
  * limitations under the License.
  */
 
+#include <memory>
+#include <stats_event.h>
+
 namespace android {
+namespace mediametrics {
+class Item;
+}
 
-extern bool enabled_statsd;
-
+using statsd_pusher = bool (const std::shared_ptr<const mediametrics::Item>& item,
+         const std::shared_ptr<mediametrics::StatsdLog>& statsdLog);
 // component specific dumpers
-extern bool statsd_audiopolicy(const mediametrics::Item *);
-extern bool statsd_audiorecord(const mediametrics::Item *);
-extern bool statsd_audiothread(const mediametrics::Item *);
-extern bool statsd_audiotrack(const mediametrics::Item *);
-extern bool statsd_codec(const mediametrics::Item *);
-extern bool statsd_extractor(const mediametrics::Item *);
-extern bool statsd_mediaparser(const mediametrics::Item *);
-extern bool statsd_nuplayer(const mediametrics::Item *);
-extern bool statsd_recorder(const mediametrics::Item *);
+extern statsd_pusher statsd_audiopolicy;
+extern statsd_pusher statsd_audiorecord;
+extern statsd_pusher statsd_audiothread;
+extern statsd_pusher statsd_audiotrack;
+extern statsd_pusher statsd_codec;
+extern statsd_pusher statsd_extractor;
+extern statsd_pusher statsd_mediaparser;
 
-extern bool statsd_mediadrm(const mediametrics::Item *);
-extern bool statsd_widevineCDM(const mediametrics::Item *);
-extern bool statsd_drmmanager(const mediametrics::Item *);
+extern statsd_pusher statsd_nuplayer;
+extern statsd_pusher statsd_recorder;
+extern statsd_pusher statsd_mediadrm;
+extern statsd_pusher statsd_drmmanager;
 
+using statsd_puller = bool (const std::shared_ptr<const mediametrics::Item>& item,
+        AStatsEventList *, const std::shared_ptr<mediametrics::StatsdLog>& statsdLog);
+// component specific pullers
+extern statsd_puller statsd_mediadrm_puller;
+
+bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog);
+bool dump2Statsd(const std::shared_ptr<const mediametrics::Item>& item, AStatsEventList* out,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog);
 } // namespace android
diff --git a/services/mediametrics/statsd_audiopolicy.cpp b/services/mediametrics/statsd_audiopolicy.cpp
index 6ef2f2c..3d9376e 100644
--- a/services/mediametrics/statsd_audiopolicy.cpp
+++ b/services/mediametrics/statsd_audiopolicy.cpp
@@ -32,25 +32,25 @@
 #include <statslog.h>
 
 #include "MediaMetricsService.h"
-#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
+#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
 #include "iface_statsd.h"
 
 namespace android {
 
-bool statsd_audiopolicy(const mediametrics::Item *item)
+bool statsd_audiopolicy(const std::shared_ptr<const mediametrics::Item>& item,
+       const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
-    ::android::stats::mediametrics::AudioPolicyData metrics_proto;
+    ::android::stats::mediametrics_message::AudioPolicyData metrics_proto;
 
     // flesh out the protobuf we'll hand off with our data
     //
@@ -60,35 +60,35 @@
         metrics_proto.set_status(status);
     }
     //string char kAudioPolicyRqstSrc[] = "android.media.audiopolicy.rqst.src";
-    std::string rqst_src;
-    if (item->getString("android.media.audiopolicy.rqst.src", &rqst_src)) {
-        metrics_proto.set_request_source(std::move(rqst_src));
+    std::string request_source;
+    if (item->getString("android.media.audiopolicy.rqst.src", &request_source)) {
+        metrics_proto.set_request_source(request_source);
     }
     //string char kAudioPolicyRqstPkg[] = "android.media.audiopolicy.rqst.pkg";
-    std::string rqst_pkg;
-    if (item->getString("android.media.audiopolicy.rqst.pkg", &rqst_pkg)) {
-        metrics_proto.set_request_package(std::move(rqst_pkg));
+    std::string request_package;
+    if (item->getString("android.media.audiopolicy.rqst.pkg", &request_package)) {
+        metrics_proto.set_request_package(request_package);
     }
     //int32 char kAudioPolicyRqstSession[] = "android.media.audiopolicy.rqst.session";
-    int32_t rqst_session = -1;
-    if (item->getInt32("android.media.audiopolicy.rqst.session", &rqst_session)) {
-        metrics_proto.set_request_session(rqst_session);
+    int32_t request_session = -1;
+    if (item->getInt32("android.media.audiopolicy.rqst.session", &request_session)) {
+        metrics_proto.set_request_session(request_session);
     }
     //string char kAudioPolicyRqstDevice[] = "android.media.audiopolicy.rqst.device";
-    std::string rqst_device;
-    if (item->getString("android.media.audiopolicy.rqst.device", &rqst_device)) {
-        metrics_proto.set_request_device(std::move(rqst_device));
+    std::string request_device;
+    if (item->getString("android.media.audiopolicy.rqst.device", &request_device)) {
+        metrics_proto.set_request_device(request_device);
     }
 
     //string char kAudioPolicyActiveSrc[] = "android.media.audiopolicy.active.src";
-    std::string active_src;
-    if (item->getString("android.media.audiopolicy.active.src", &active_src)) {
-        metrics_proto.set_active_source(std::move(active_src));
+    std::string active_source;
+    if (item->getString("android.media.audiopolicy.active.src", &active_source)) {
+        metrics_proto.set_active_source(active_source);
     }
     //string char kAudioPolicyActivePkg[] = "android.media.audiopolicy.active.pkg";
-    std::string active_pkg;
-    if (item->getString("android.media.audiopolicy.active.pkg", &active_pkg)) {
-        metrics_proto.set_active_package(std::move(active_pkg));
+    std::string active_package;
+    if (item->getString("android.media.audiopolicy.active.pkg", &active_package)) {
+        metrics_proto.set_active_package(active_package);
     }
     //int32 char kAudioPolicyActiveSession[] = "android.media.audiopolicy.active.session";
     int32_t active_session = -1;
@@ -98,27 +98,40 @@
     //string char kAudioPolicyActiveDevice[] = "android.media.audiopolicy.active.device";
     std::string active_device;
     if (item->getString("android.media.audiopolicy.active.device", &active_device)) {
-        metrics_proto.set_active_device(std::move(active_device));
+        metrics_proto.set_active_device(active_device);
     }
 
-
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
         ALOGE("Failed to serialize audipolicy metrics");
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audiopolicy_reported:"
+            << android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
-
+            << " status:" << status
+            << " request_source:" << request_source
+            << " request_package:" << request_package
+            << " request_session:" << request_session
+            << " request_device:" << request_device
+            << " active_source:" << active_source
+            << " active_package:" << active_package
+            << " active_session:" << active_session
+            << " active_device:" << active_device
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_audiorecord.cpp b/services/mediametrics/statsd_audiorecord.cpp
index 76f4b59..41efcaa 100644
--- a/services/mediametrics/statsd_audiorecord.cpp
+++ b/services/mediametrics/statsd_audiorecord.cpp
@@ -32,36 +32,36 @@
 #include <statslog.h>
 
 #include "MediaMetricsService.h"
-#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
+#include "StringUtils.h"
+#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
 #include "iface_statsd.h"
 
 namespace android {
 
-bool statsd_audiorecord(const mediametrics::Item *item)
-{
+bool statsd_audiorecord(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog) {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
-    ::android::stats::mediametrics::AudioRecordData metrics_proto;
+    ::android::stats::mediametrics_message::AudioRecordData metrics_proto;
 
     // flesh out the protobuf we'll hand off with our data
     //
     std::string encoding;
     if (item->getString("android.media.audiorecord.encoding", &encoding)) {
-        metrics_proto.set_encoding(std::move(encoding));
+        metrics_proto.set_encoding(encoding);
     }
 
     std::string source;
     if (item->getString("android.media.audiorecord.source", &source)) {
-        metrics_proto.set_source(std::move(source));
+        metrics_proto.set_source(source);
     }
 
     int32_t latency = -1;
@@ -79,14 +79,14 @@
         metrics_proto.set_channels(channels);
     }
 
-    int64_t createdMs = -1;
-    if (item->getInt64("android.media.audiorecord.createdMs", &createdMs)) {
-        metrics_proto.set_created_millis(createdMs);
+    int64_t created_millis = -1;
+    if (item->getInt64("android.media.audiorecord.createdMs", &created_millis)) {
+        metrics_proto.set_created_millis(created_millis);
     }
 
-    int64_t durationMs = -1;
-    if (item->getInt64("android.media.audiorecord.durationMs", &durationMs)) {
-        metrics_proto.set_duration_millis(durationMs);
+    int64_t duration_millis = -1;
+    if (item->getInt64("android.media.audiorecord.durationMs", &duration_millis)) {
+        metrics_proto.set_duration_millis(duration_millis);
     }
 
     int32_t count = -1;
@@ -94,46 +94,44 @@
         metrics_proto.set_count(count);
     }
 
-    int32_t errcode = -1;
-    if (item->getInt32("android.media.audiorecord.errcode", &errcode)) {
-        metrics_proto.set_error_code(errcode);
-    } else if (item->getInt32("android.media.audiorecord.lastError.code", &errcode)) {
-        metrics_proto.set_error_code(errcode);
+    int32_t error_code = -1;
+    if (item->getInt32("android.media.audiorecord.errcode", &error_code)) {
+        metrics_proto.set_error_code(error_code);
+    } else if (item->getInt32("android.media.audiorecord.lastError.code", &error_code)) {
+        metrics_proto.set_error_code(error_code);
     }
 
-    std::string errfunc;
-    if (item->getString("android.media.audiorecord.errfunc", &errfunc)) {
-        metrics_proto.set_error_function(std::move(errfunc));
-    } else if (item->getString("android.media.audiorecord.lastError.at", &errfunc)) {
-        metrics_proto.set_error_function(std::move(errfunc));
+    std::string error_function;
+    if (item->getString("android.media.audiorecord.errfunc", &error_function)) {
+        metrics_proto.set_error_function(error_function);
+    } else if (item->getString("android.media.audiorecord.lastError.at", &error_function)) {
+        metrics_proto.set_error_function(error_function);
     }
 
-    // portId (int32)
     int32_t port_id = -1;
     if (item->getInt32("android.media.audiorecord.portId", &port_id)) {
         metrics_proto.set_port_id(count);
     }
-    // frameCount (int32)
-    int32_t frameCount = -1;
-    if (item->getInt32("android.media.audiorecord.frameCount", &frameCount)) {
-        metrics_proto.set_frame_count(frameCount);
-    }
-    // attributes (string)
-    std::string attributes;
-    if (item->getString("android.media.audiorecord.attributes", &attributes)) {
-        metrics_proto.set_attributes(std::move(attributes));
-    }
-    // channelMask (int64)
-    int64_t channelMask = -1;
-    if (item->getInt64("android.media.audiorecord.channelMask", &channelMask)) {
-        metrics_proto.set_channel_mask(channelMask);
-    }
-    // startcount (int64)
-    int64_t startcount = -1;
-    if (item->getInt64("android.media.audiorecord.startcount", &startcount)) {
-        metrics_proto.set_start_count(startcount);
+
+    int32_t frame_count = -1;
+    if (item->getInt32("android.media.audiorecord.frameCount", &frame_count)) {
+        metrics_proto.set_frame_count(frame_count);
     }
 
+    std::string attributes;
+    if (item->getString("android.media.audiorecord.attributes", &attributes)) {
+        metrics_proto.set_attributes(attributes);
+    }
+
+    int64_t channel_mask = -1;
+    if (item->getInt64("android.media.audiorecord.channelMask", &channel_mask)) {
+        metrics_proto.set_channel_mask(channel_mask);
+    }
+
+    int64_t start_count = -1;
+    if (item->getInt64("android.media.audiorecord.startcount", &start_count)) {
+        metrics_proto.set_start_count(start_count);
+    }
 
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
@@ -141,17 +139,48 @@
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    // Android S
+    // log_session_id (string)
+    std::string logSessionId;
+    (void)item->getString("android.media.audiorecord.logSessionId", &logSessionId);
+    const auto log_session_id =
+            mediametrics::stringutils::sanitizeLogSessionId(logSessionId);
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized,
+        log_session_id.c_str());
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audiorecord_reported:"
+            << android::util::MEDIAMETRICS_AUDIORECORD_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
+            << " encoding:" << encoding
+            << " source:" << source
+            << " latency:" << latency
+            << " samplerate:" << samplerate
+            << " channels:" << channels
+            << " created_millis:" << created_millis
+            << " duration_millis:" << duration_millis
+            << " count:" << count
+            << " error_code:" << error_code
+            << " error_function:" << error_function
+
+            << " port_id:" << port_id
+            << " frame_count:" << frame_count
+            << " attributes:" << attributes
+            << " channel_mask:" << channel_mask
+            << " start_count:" << start_count
+
+            << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_audiothread.cpp b/services/mediametrics/statsd_audiothread.cpp
index 2ad2562..e9b6dd6 100644
--- a/services/mediametrics/statsd_audiothread.cpp
+++ b/services/mediametrics/statsd_audiothread.cpp
@@ -32,25 +32,25 @@
 #include <statslog.h>
 
 #include "MediaMetricsService.h"
-#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
+#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
 #include "iface_statsd.h"
 
 namespace android {
 
-bool statsd_audiothread(const mediametrics::Item *item)
+bool statsd_audiothread(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
-    ::android::stats::mediametrics::AudioThreadData metrics_proto;
+    ::android::stats::mediametrics_message::AudioThreadData metrics_proto;
 
 #define	MM_PREFIX "android.media.audiothread."
 
@@ -68,17 +68,17 @@
     if (item->getInt32(MM_PREFIX "samplerate", &samplerate)) {
         metrics_proto.set_samplerate(samplerate);
     }
-    std::string workhist;
-    if (item->getString(MM_PREFIX "workMs.hist", &workhist)) {
-        metrics_proto.set_work_millis_hist(std::move(workhist));
+    std::string work_millis_hist;
+    if (item->getString(MM_PREFIX "workMs.hist", &work_millis_hist)) {
+        metrics_proto.set_work_millis_hist(work_millis_hist);
     }
-    std::string latencyhist;
-    if (item->getString(MM_PREFIX "latencyMs.hist", &latencyhist)) {
-        metrics_proto.set_latency_millis_hist(std::move(latencyhist));
+    std::string latency_millis_hist;
+    if (item->getString(MM_PREFIX "latencyMs.hist", &latency_millis_hist)) {
+        metrics_proto.set_latency_millis_hist(latency_millis_hist);
     }
-    std::string warmuphist;
-    if (item->getString(MM_PREFIX "warmupMs.hist", &warmuphist)) {
-        metrics_proto.set_warmup_millis_hist(std::move(warmuphist));
+    std::string warmup_millis_hist;
+    if (item->getString(MM_PREFIX "warmupMs.hist", &warmup_millis_hist)) {
+        metrics_proto.set_warmup_millis_hist(warmup_millis_hist);
     }
     int64_t underruns = -1;
     if (item->getInt64(MM_PREFIX "underruns", &underruns)) {
@@ -88,101 +88,99 @@
     if (item->getInt64(MM_PREFIX "overruns", &overruns)) {
         metrics_proto.set_overruns(overruns);
     }
-    int64_t activeMs = -1;
-    if (item->getInt64(MM_PREFIX "activeMs", &activeMs)) {
-        metrics_proto.set_active_millis(activeMs);
+    int64_t active_millis = -1;
+    if (item->getInt64(MM_PREFIX "activeMs", &active_millis)) {
+        metrics_proto.set_active_millis(active_millis);
     }
-    int64_t durationMs = -1;
-    if (item->getInt64(MM_PREFIX "durationMs", &durationMs)) {
-        metrics_proto.set_duration_millis(durationMs);
+    int64_t duration_millis = -1;
+    if (item->getInt64(MM_PREFIX "durationMs", &duration_millis)) {
+        metrics_proto.set_duration_millis(duration_millis);
     }
 
-    // item->setInt32(MM_PREFIX "id", (int32_t)mId); // IO handle
     int32_t id = -1;
     if (item->getInt32(MM_PREFIX "id", &id)) {
         metrics_proto.set_id(id);
     }
-    // item->setInt32(MM_PREFIX "portId", (int32_t)mPortId);
+
     int32_t port_id = -1;
-    if (item->getInt32(MM_PREFIX "portId", &id)) {
+    if (item->getInt32(MM_PREFIX "portId", &port_id)) {
         metrics_proto.set_port_id(port_id);
     }
     // item->setCString(MM_PREFIX "type", threadTypeToString(mType));
     std::string type;
     if (item->getString(MM_PREFIX "type", &type)) {
-        metrics_proto.set_type(std::move(type));
+        metrics_proto.set_type(type);
     }
-    // item->setInt32(MM_PREFIX "sampleRate", (int32_t)mSampleRate);
+
     int32_t sample_rate = -1;
     if (item->getInt32(MM_PREFIX "sampleRate", &sample_rate)) {
         metrics_proto.set_sample_rate(sample_rate);
     }
-    // item->setInt64(MM_PREFIX "channelMask", (int64_t)mChannelMask);
+
     int32_t channel_mask = -1;
     if (item->getInt32(MM_PREFIX "channelMask", &channel_mask)) {
         metrics_proto.set_channel_mask(channel_mask);
     }
-    // item->setCString(MM_PREFIX "encoding", toString(mFormat).c_str());
+
     std::string encoding;
     if (item->getString(MM_PREFIX "encoding", &encoding)) {
-        metrics_proto.set_encoding(std::move(encoding));
+        metrics_proto.set_encoding(encoding);
     }
-    // item->setInt32(MM_PREFIX "frameCount", (int32_t)mFrameCount);
+
     int32_t frame_count = -1;
     if (item->getInt32(MM_PREFIX "frameCount", &frame_count)) {
         metrics_proto.set_frame_count(frame_count);
     }
-    // item->setCString(MM_PREFIX "outDevice", toString(mOutDevice).c_str());
-    std::string outDevice;
-    if (item->getString(MM_PREFIX "outDevice", &outDevice)) {
-        metrics_proto.set_output_device(std::move(outDevice));
-    }
-    // item->setCString(MM_PREFIX "inDevice", toString(mInDevice).c_str());
-    std::string inDevice;
-    if (item->getString(MM_PREFIX "inDevice", &inDevice)) {
-        metrics_proto.set_input_device(std::move(inDevice));
-    }
-    // item->setDouble(MM_PREFIX "ioJitterMs.mean", mIoJitterMs.getMean());
-    double iojitters_ms_mean = -1;
-    if (item->getDouble(MM_PREFIX "ioJitterMs.mean", &iojitters_ms_mean)) {
-        metrics_proto.set_io_jitter_mean_millis(iojitters_ms_mean);
-    }
-    // item->setDouble(MM_PREFIX "ioJitterMs.std", mIoJitterMs.getStdDev());
-    double iojitters_ms_std = -1;
-    if (item->getDouble(MM_PREFIX "ioJitterMs.std", &iojitters_ms_std)) {
-        metrics_proto.set_io_jitter_stddev_millis(iojitters_ms_std);
-    }
-    // item->setDouble(MM_PREFIX "processTimeMs.mean", mProcessTimeMs.getMean());
-    double process_time_ms_mean = -1;
-    if (item->getDouble(MM_PREFIX "processTimeMs.mean", &process_time_ms_mean)) {
-        metrics_proto.set_process_time_mean_millis(process_time_ms_mean);
-    }
-    // item->setDouble(MM_PREFIX "processTimeMs.std", mProcessTimeMs.getStdDev());
-    double process_time_ms_std = -1;
-    if (item->getDouble(MM_PREFIX "processTimeMs.std", &process_time_ms_std)) {
-        metrics_proto.set_process_time_stddev_millis(process_time_ms_std);
-    }
-    // item->setDouble(MM_PREFIX "timestampJitterMs.mean", tsjitter.getMean());
-    double timestamp_jitter_ms_mean = -1;
-    if (item->getDouble(MM_PREFIX "timestampJitterMs.mean", &timestamp_jitter_ms_mean)) {
-        metrics_proto.set_timestamp_jitter_mean_millis(timestamp_jitter_ms_mean);
-    }
-    // item->setDouble(MM_PREFIX "timestampJitterMs.std", tsjitter.getStdDev());
-    double timestamp_jitter_ms_stddev = -1;
-    if (item->getDouble(MM_PREFIX "timestampJitterMs.std", &timestamp_jitter_ms_stddev)) {
-        metrics_proto.set_timestamp_jitter_stddev_millis(timestamp_jitter_ms_stddev);
-    }
-    // item->setDouble(MM_PREFIX "latencyMs.mean", mLatencyMs.getMean());
-    double latency_ms_mean = -1;
-    if (item->getDouble(MM_PREFIX "latencyMs.mean", &latency_ms_mean)) {
-        metrics_proto.set_latency_mean_millis(latency_ms_mean);
-    }
-    // item->setDouble(MM_PREFIX "latencyMs.std", mLatencyMs.getStdDev());
-    double latency_ms_stddev = -1;
-    if (item->getDouble(MM_PREFIX "latencyMs.std", &latency_ms_stddev)) {
-        metrics_proto.set_latency_stddev_millis(latency_ms_stddev);
+
+    std::string output_device;
+    if (item->getString(MM_PREFIX "outDevice", &output_device)) {
+        metrics_proto.set_output_device(output_device);
     }
 
+    std::string input_device;
+    if (item->getString(MM_PREFIX "inDevice", &input_device)) {
+        metrics_proto.set_input_device(input_device);
+    }
+
+    double io_jitter_mean_millis = -1;
+    if (item->getDouble(MM_PREFIX "ioJitterMs.mean", &io_jitter_mean_millis)) {
+        metrics_proto.set_io_jitter_mean_millis(io_jitter_mean_millis);
+    }
+
+    double io_jitter_stddev_millis = -1;
+    if (item->getDouble(MM_PREFIX "ioJitterMs.std", &io_jitter_stddev_millis)) {
+        metrics_proto.set_io_jitter_stddev_millis(io_jitter_stddev_millis);
+    }
+
+    double process_time_mean_millis = -1;
+    if (item->getDouble(MM_PREFIX "processTimeMs.mean", &process_time_mean_millis)) {
+        metrics_proto.set_process_time_mean_millis(process_time_mean_millis);
+    }
+
+    double process_time_stddev_millis = -1;
+    if (item->getDouble(MM_PREFIX "processTimeMs.std", &process_time_stddev_millis)) {
+        metrics_proto.set_process_time_stddev_millis(process_time_stddev_millis);
+    }
+
+    double timestamp_jitter_mean_millis = -1;
+    if (item->getDouble(MM_PREFIX "timestampJitterMs.mean", &timestamp_jitter_mean_millis)) {
+        metrics_proto.set_timestamp_jitter_mean_millis(timestamp_jitter_mean_millis);
+    }
+
+    double timestamp_jitter_stddev_millis = -1;
+    if (item->getDouble(MM_PREFIX "timestampJitterMs.std", &timestamp_jitter_stddev_millis)) {
+        metrics_proto.set_timestamp_jitter_stddev_millis(timestamp_jitter_stddev_millis);
+    }
+
+    double latency_mean_millis = -1;
+    if (item->getDouble(MM_PREFIX "latencyMs.mean", &latency_mean_millis)) {
+        metrics_proto.set_latency_mean_millis(latency_mean_millis);
+    }
+
+    double latency_stddev_millis = -1;
+    if (item->getDouble(MM_PREFIX "latencyMs.std", &latency_stddev_millis)) {
+        metrics_proto.set_latency_stddev_millis(latency_stddev_millis);
+    }
 
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
@@ -190,17 +188,50 @@
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audiothread_reported:"
+            << android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " type:" << type
+            << " framecount:" << framecount
+            << " samplerate:" << samplerate
+            << " work_millis_hist:" << work_millis_hist
+            << " latency_millis_hist:" << latency_millis_hist
+            << " warmup_millis_hist:" << warmup_millis_hist
+            << " underruns:" << underruns
+            << " overruns:" << overruns
+            << " active_millis:" << active_millis
+            << " duration_millis:" << duration_millis
 
+            << " id:" << id
+            << " port_id:" << port_id
+            << " sample_rate:" << sample_rate
+            << " channel_mask:" << channel_mask
+            << " encoding:" << encoding
+            << " frame_count:" << frame_count
+            << " output_device:" << output_device
+            << " input_device:" << input_device
+            << " io_jitter_mean_millis:" << io_jitter_mean_millis
+            << " io_jitter_stddev_millis:" << io_jitter_stddev_millis
+
+            << " process_time_mean_millis:" << process_time_mean_millis
+            << " process_time_stddev_millis:" << process_time_stddev_millis
+            << " timestamp_jitter_mean_millis:" << timestamp_jitter_mean_millis
+            << " timestamp_jitter_stddev_millis:" << timestamp_jitter_stddev_millis
+            << " latency_mean_millis:" << latency_mean_millis
+            << " latency_stddev_millis:" << latency_stddev_millis
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_audiotrack.cpp b/services/mediametrics/statsd_audiotrack.cpp
index 6b08a78..59627ae 100644
--- a/services/mediametrics/statsd_audiotrack.cpp
+++ b/services/mediametrics/statsd_audiotrack.cpp
@@ -32,77 +32,78 @@
 #include <statslog.h>
 
 #include "MediaMetricsService.h"
-#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
+#include "StringUtils.h"
+#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
 #include "iface_statsd.h"
 
 namespace android {
 
-bool statsd_audiotrack(const mediametrics::Item *item)
+bool statsd_audiotrack(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
-    ::android::stats::mediametrics::AudioTrackData metrics_proto;
+    ::android::stats::mediametrics_message::AudioTrackData metrics_proto;
 
     // flesh out the protobuf we'll hand off with our data
     //
 
     // static constexpr char kAudioTrackStreamType[] = "android.media.audiotrack.streamtype";
     // optional string streamType;
-    std::string streamtype;
-    if (item->getString("android.media.audiotrack.streamtype", &streamtype)) {
-        metrics_proto.set_stream_type(std::move(streamtype));
+    std::string stream_type;
+    if (item->getString("android.media.audiotrack.streamtype", &stream_type)) {
+        metrics_proto.set_stream_type(stream_type);
     }
 
     // static constexpr char kAudioTrackContentType[] = "android.media.audiotrack.type";
     // optional string contentType;
-    std::string contenttype;
-    if (item->getString("android.media.audiotrack.type", &contenttype)) {
-        metrics_proto.set_content_type(std::move(contenttype));
+    std::string content_type;
+    if (item->getString("android.media.audiotrack.type", &content_type)) {
+        metrics_proto.set_content_type(content_type);
     }
 
     // static constexpr char kAudioTrackUsage[] = "android.media.audiotrack.usage";
     // optional string trackUsage;
-    std::string trackusage;
-    if (item->getString("android.media.audiotrack.usage", &trackusage)) {
-        metrics_proto.set_track_usage(std::move(trackusage));
+    std::string track_usage;
+    if (item->getString("android.media.audiotrack.usage", &track_usage)) {
+        metrics_proto.set_track_usage(track_usage);
     }
 
     // static constexpr char kAudioTrackSampleRate[] = "android.media.audiotrack.samplerate";
     // optional int32 samplerate;
-    int32_t samplerate = -1;
-    if (item->getInt32("android.media.audiotrack.samplerate", &samplerate)) {
-        metrics_proto.set_sample_rate(samplerate);
+    int32_t sample_rate = -1;
+    if (item->getInt32("android.media.audiotrack.samplerate", &sample_rate)) {
+        metrics_proto.set_sample_rate(sample_rate);
     }
 
     // static constexpr char kAudioTrackChannelMask[] = "android.media.audiotrack.channelmask";
     // optional int64 channelMask;
-    int64_t channelMask = -1;
-    if (item->getInt64("android.media.audiotrack.channelmask", &channelMask)) {
-        metrics_proto.set_channel_mask(channelMask);
+    int64_t channel_mask = -1;
+    if (item->getInt64("android.media.audiotrack.channelmask", &channel_mask)) {
+        metrics_proto.set_channel_mask(channel_mask);
     }
 
     // NB: These are not yet exposed as public Java API constants.
     // static constexpr char kAudioTrackUnderrunFrames[] = "android.media.audiotrack.underrunframes";
     // optional int32 underrunframes;
-    int32_t underrunframes = -1;
-    if (item->getInt32("android.media.audiotrack.underrunframes", &underrunframes)) {
-        metrics_proto.set_underrun_frames(underrunframes);
+    int32_t underrun_frames = -1;
+    if (item->getInt32("android.media.audiotrack.underrunframes", &underrun_frames)) {
+        metrics_proto.set_underrun_frames(underrun_frames);
     }
 
     // static constexpr char kAudioTrackStartupGlitch[] = "android.media.audiotrack.glitch.startup";
     // optional int32 startupglitch;
-    int32_t startupglitch = -1;
-    if (item->getInt32("android.media.audiotrack.glitch.startup", &startupglitch)) {
-        metrics_proto.set_startup_glitch(startupglitch);
+    int32_t startup_glitch = -1;
+    if (item->getInt32("android.media.audiotrack.glitch.startup", &startup_glitch)) {
+        metrics_proto.set_startup_glitch(startup_glitch);
     }
 
     // portId (int32)
@@ -113,7 +114,7 @@
     // encoding (string)
     std::string encoding;
     if (item->getString("android.media.audiotrack.encoding", &encoding)) {
-        metrics_proto.set_encoding(std::move(encoding));
+        metrics_proto.set_encoding(encoding);
     }
     // frameCount (int32)
     int32_t frame_count = -1;
@@ -123,7 +124,7 @@
     // attributes (string)
     std::string attributes;
     if (item->getString("android.media.audiotrack.attributes", &attributes)) {
-        metrics_proto.set_attributes(std::move(attributes));
+        metrics_proto.set_attributes(attributes);
     }
 
     std::string serialized;
@@ -132,17 +133,44 @@
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    // Android S
+    // log_session_id (string)
+    std::string logSessionId;
+    (void)item->getString("android.media.audiotrack.logSessionId", &logSessionId);
+    const auto log_session_id =
+            mediametrics::stringutils::sanitizeLogSessionId(logSessionId);
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED,
+                               timestamp_nanos, package_name.c_str(), package_version_code,
+                               media_apex_version,
+                               bf_serialized,
+                               log_session_id.c_str());
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_audiotrack_reported:"
+            << android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
+            << " stream_type:" << stream_type
+            << " content_type:" << content_type
+            << " track_usage:" << track_usage
+            << " sample_rate:" << sample_rate
+            << " channel_mask:" << channel_mask
+            << " underrun_frames:" << underrun_frames
+            << " startup_glitch:" << startup_glitch
+            << " port_id:" << port_id
+            << " encoding:" << encoding
+            << " frame_count:" << frame_count
+
+            << " attributes:" << attributes
+
+            << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index d502b30..46cbdc8 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -30,178 +30,449 @@
 #include <unistd.h>
 
 #include <statslog.h>
+#include <stats_event.h>
 
 #include "cleaner.h"
 #include "MediaMetricsService.h"
-#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
+#include "StringUtils.h"
+#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
 #include "iface_statsd.h"
 
 namespace android {
 
-bool statsd_codec(const mediametrics::Item *item)
+bool statsd_codec(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
-    // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
+    AStatsEvent* event = AStatsEvent_obtain();
+    AStatsEvent_setAtomId(event, android::util::MEDIA_CODEC_REPORTED);
 
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    AStatsEvent_writeInt64(event, timestamp_nanos);
+
+    std::string package_name = item->getPkgName();
+    AStatsEvent_writeString(event, package_name.c_str());
+
+    int64_t package_version_code = item->getPkgVersionCode();
+    AStatsEvent_writeInt64(event, package_version_code);
+
+    int64_t media_apex_version = 0;
+    AStatsEvent_writeInt64(event, media_apex_version);
 
     // the rest into our own proto
     //
-    ::android::stats::mediametrics::CodecData metrics_proto;
+    ::android::stats::mediametrics_message::CodecData metrics_proto;
 
     // flesh out the protobuf we'll hand off with our data
     //
-    // android.media.mediacodec.codec   string
     std::string codec;
     if (item->getString("android.media.mediacodec.codec", &codec)) {
-        metrics_proto.set_codec(std::move(codec));
+        metrics_proto.set_codec(codec);
     }
-    // android.media.mediacodec.mime    string
+    AStatsEvent_writeString(event, codec.c_str());
+
     std::string mime;
     if (item->getString("android.media.mediacodec.mime", &mime)) {
-        metrics_proto.set_mime(std::move(mime));
+        metrics_proto.set_mime(mime);
     }
-    // android.media.mediacodec.mode    string
+    AStatsEvent_writeString(event, mime.c_str());
+
     std::string mode;
-    if ( item->getString("android.media.mediacodec.mode", &mode)) {
-        metrics_proto.set_mode(std::move(mode));
+    if (item->getString("android.media.mediacodec.mode", &mode)) {
+        metrics_proto.set_mode(mode);
     }
-    // android.media.mediacodec.encoder int32
+    AStatsEvent_writeString(event, mode.c_str());
+
     int32_t encoder = -1;
-    if ( item->getInt32("android.media.mediacodec.encoder", &encoder)) {
+    if (item->getInt32("android.media.mediacodec.encoder", &encoder)) {
         metrics_proto.set_encoder(encoder);
     }
-    // android.media.mediacodec.secure  int32
+    AStatsEvent_writeInt32(event, encoder);
+
     int32_t secure = -1;
-    if ( item->getInt32("android.media.mediacodec.secure", &secure)) {
+    if (item->getInt32("android.media.mediacodec.secure", &secure)) {
         metrics_proto.set_secure(secure);
     }
-    // android.media.mediacodec.width   int32
+    AStatsEvent_writeInt32(event, secure);
+
     int32_t width = -1;
-    if ( item->getInt32("android.media.mediacodec.width", &width)) {
+    if (item->getInt32("android.media.mediacodec.width", &width)) {
         metrics_proto.set_width(width);
     }
-    // android.media.mediacodec.height  int32
+    AStatsEvent_writeInt32(event, width);
+
     int32_t height = -1;
-    if ( item->getInt32("android.media.mediacodec.height", &height)) {
+    if (item->getInt32("android.media.mediacodec.height", &height)) {
         metrics_proto.set_height(height);
     }
-    // android.media.mediacodec.rotation-degrees        int32
+    AStatsEvent_writeInt32(event, height);
+
     int32_t rotation = -1;
-    if ( item->getInt32("android.media.mediacodec.rotation-degrees", &rotation)) {
+    if (item->getInt32("android.media.mediacodec.rotation-degrees", &rotation)) {
         metrics_proto.set_rotation(rotation);
     }
-    // android.media.mediacodec.crypto  int32 (although missing if not needed
+    AStatsEvent_writeInt32(event, rotation);
+
     int32_t crypto = -1;
-    if ( item->getInt32("android.media.mediacodec.crypto", &crypto)) {
+    if (item->getInt32("android.media.mediacodec.crypto", &crypto)) {
         metrics_proto.set_crypto(crypto);
     }
-    // android.media.mediacodec.profile int32
+    AStatsEvent_writeInt32(event, crypto);
+
     int32_t profile = -1;
-    if ( item->getInt32("android.media.mediacodec.profile", &profile)) {
+    if (item->getInt32("android.media.mediacodec.profile", &profile)) {
         metrics_proto.set_profile(profile);
     }
-    // android.media.mediacodec.level   int32
+    AStatsEvent_writeInt32(event, profile);
+
     int32_t level = -1;
-    if ( item->getInt32("android.media.mediacodec.level", &level)) {
+    if (item->getInt32("android.media.mediacodec.level", &level)) {
         metrics_proto.set_level(level);
     }
-    // android.media.mediacodec.maxwidth        int32
-    int32_t maxwidth = -1;
-    if ( item->getInt32("android.media.mediacodec.maxwidth", &maxwidth)) {
-        metrics_proto.set_max_width(maxwidth);
+    AStatsEvent_writeInt32(event, level);
+
+
+    int32_t max_width = -1;
+    if ( item->getInt32("android.media.mediacodec.maxwidth", &max_width)) {
+        metrics_proto.set_max_width(max_width);
     }
-    // android.media.mediacodec.maxheight       int32
-    int32_t maxheight = -1;
-    if ( item->getInt32("android.media.mediacodec.maxheight", &maxheight)) {
-        metrics_proto.set_max_height(maxheight);
+    AStatsEvent_writeInt32(event, max_width);
+
+    int32_t max_height = -1;
+    if ( item->getInt32("android.media.mediacodec.maxheight", &max_height)) {
+        metrics_proto.set_max_height(max_height);
     }
-    // android.media.mediacodec.errcode         int32
-    int32_t errcode = -1;
-    if ( item->getInt32("android.media.mediacodec.errcode", &errcode)) {
-        metrics_proto.set_error_code(errcode);
+    AStatsEvent_writeInt32(event, max_height);
+
+    int32_t error_code = -1;
+    if ( item->getInt32("android.media.mediacodec.errcode", &error_code)) {
+        metrics_proto.set_error_code(error_code);
     }
-    // android.media.mediacodec.errstate        string
-    std::string errstate;
-    if ( item->getString("android.media.mediacodec.errstate", &errstate)) {
-        metrics_proto.set_error_state(std::move(errstate));
+    AStatsEvent_writeInt32(event, error_code);
+
+    std::string error_state;
+    if ( item->getString("android.media.mediacodec.errstate", &error_state)) {
+        metrics_proto.set_error_state(error_state);
     }
-    // android.media.mediacodec.latency.max  int64
+    AStatsEvent_writeString(event, error_state.c_str());
+
     int64_t latency_max = -1;
-    if ( item->getInt64("android.media.mediacodec.latency.max", &latency_max)) {
+    if (item->getInt64("android.media.mediacodec.latency.max", &latency_max)) {
         metrics_proto.set_latency_max(latency_max);
     }
-    // android.media.mediacodec.latency.min  int64
+    AStatsEvent_writeInt64(event, latency_max);
+
     int64_t latency_min = -1;
-    if ( item->getInt64("android.media.mediacodec.latency.min", &latency_min)) {
+    if (item->getInt64("android.media.mediacodec.latency.min", &latency_min)) {
         metrics_proto.set_latency_min(latency_min);
     }
-    // android.media.mediacodec.latency.avg  int64
+    AStatsEvent_writeInt64(event, latency_min);
+
     int64_t latency_avg = -1;
-    if ( item->getInt64("android.media.mediacodec.latency.avg", &latency_avg)) {
+    if (item->getInt64("android.media.mediacodec.latency.avg", &latency_avg)) {
         metrics_proto.set_latency_avg(latency_avg);
     }
-    // android.media.mediacodec.latency.n    int64
+    AStatsEvent_writeInt64(event, latency_avg);
+
     int64_t latency_count = -1;
-    if ( item->getInt64("android.media.mediacodec.latency.n", &latency_count)) {
+    if (item->getInt64("android.media.mediacodec.latency.n", &latency_count)) {
         metrics_proto.set_latency_count(latency_count);
     }
-    // android.media.mediacodec.latency.unknown    int64
+    AStatsEvent_writeInt64(event, latency_count);
+
     int64_t latency_unknown = -1;
-    if ( item->getInt64("android.media.mediacodec.latency.unknown", &latency_unknown)) {
+    if (item->getInt64("android.media.mediacodec.latency.unknown", &latency_unknown)) {
         metrics_proto.set_latency_unknown(latency_unknown);
     }
-    // android.media.mediacodec.queueSecureInputBufferError  int32
-    if (int32_t queueSecureInputBufferError = -1;
-        item->getInt32("android.media.mediacodec.queueSecureInputBufferError",
-                &queueSecureInputBufferError)) {
-        metrics_proto.set_queue_secure_input_buffer_error(queueSecureInputBufferError);
-    }
-    // android.media.mediacodec.queueInputBufferError  int32
-    if (int32_t queueInputBufferError = -1;
-        item->getInt32("android.media.mediacodec.queueInputBufferError",
-                &queueInputBufferError)) {
-        metrics_proto.set_queue_input_buffer_error(queueInputBufferError);
-    }
-    // android.media.mediacodec.latency.hist    NOT EMITTED
+    AStatsEvent_writeInt64(event, latency_unknown);
 
-    // android.media.mediacodec.bitrate_mode string
+    int32_t queue_secure_input_buffer_error = -1;
+    if (item->getInt32("android.media.mediacodec.queueSecureInputBufferError",
+            &queue_secure_input_buffer_error)) {
+        metrics_proto.set_queue_secure_input_buffer_error(queue_secure_input_buffer_error);
+    }
+    AStatsEvent_writeInt32(event, queue_secure_input_buffer_error);
+
+    int32_t queue_input_buffer_error = -1;
+    if (item->getInt32("android.media.mediacodec.queueInputBufferError",
+            &queue_input_buffer_error)) {
+        metrics_proto.set_queue_input_buffer_error(queue_input_buffer_error);
+    }
+    AStatsEvent_writeInt32(event, queue_input_buffer_error);
+
     std::string bitrate_mode;
     if (item->getString("android.media.mediacodec.bitrate_mode", &bitrate_mode)) {
-        metrics_proto.set_bitrate_mode(std::move(bitrate_mode));
+        metrics_proto.set_bitrate_mode(bitrate_mode);
     }
-    // android.media.mediacodec.bitrate int32
+    AStatsEvent_writeString(event, bitrate_mode.c_str());
+
     int32_t bitrate = -1;
     if (item->getInt32("android.media.mediacodec.bitrate", &bitrate)) {
         metrics_proto.set_bitrate(bitrate);
     }
-    // android.media.mediacodec.lifetimeMs int64
-    int64_t lifetimeMs = -1;
-    if ( item->getInt64("android.media.mediacodec.lifetimeMs", &lifetimeMs)) {
-        lifetimeMs = mediametrics::bucket_time_minutes(lifetimeMs);
-        metrics_proto.set_lifetime_millis(lifetimeMs);
+    AStatsEvent_writeInt32(event, bitrate);
+
+    int64_t lifetime_millis = -1;
+    if (item->getInt64("android.media.mediacodec.lifetimeMs", &lifetime_millis)) {
+        lifetime_millis = mediametrics::bucket_time_minutes(lifetime_millis);
+        metrics_proto.set_lifetime_millis(lifetime_millis);
     }
+    AStatsEvent_writeInt64(event, lifetime_millis);
+
+    int64_t playback_duration_sec = -1;
+    item->getInt64("android.media.mediacodec.playback-duration-sec", &playback_duration_sec);
+    // DO NOT record  playback-duration in the metrics_proto - it should only
+    // exist in the flattened atom
+    AStatsEvent_writeInt64(event, playback_duration_sec);
+
+    std::string sessionId;
+    if (item->getString("android.media.mediacodec.log-session-id", &sessionId)) {
+        sessionId = mediametrics::stringutils::sanitizeLogSessionId(sessionId);
+        metrics_proto.set_log_session_id(sessionId);
+    }
+    AStatsEvent_writeString(event, codec.c_str());
+
+    int32_t channelCount = -1;
+    if (item->getInt32("android.media.mediacodec.channelCount", &channelCount)) {
+        metrics_proto.set_channel_count(channelCount);
+    }
+    AStatsEvent_writeInt32(event, channelCount);
+
+    int32_t sampleRate = -1;
+    if (item->getInt32("android.media.mediacodec.sampleRate", &sampleRate)) {
+        metrics_proto.set_sample_rate(sampleRate);
+    }
+    AStatsEvent_writeInt32(event, sampleRate);
+
+    // TODO PWG may want these fuzzed up a bit to obscure some precision
+    int64_t bytes = -1;
+    if (item->getInt64("android.media.mediacodec.vencode.bytes", &bytes)) {
+        metrics_proto.set_video_encode_bytes(bytes);
+    }
+    AStatsEvent_writeInt64(event, bytes);
+
+    int64_t frames = -1;
+    if (item->getInt64("android.media.mediacodec.vencode.frames", &frames)) {
+        metrics_proto.set_video_encode_frames(frames);
+    }
+    AStatsEvent_writeInt64(event, frames);
+
+    int64_t inputBytes = -1;
+    if (item->getInt64("android.media.mediacodec.video.input.bytes", &inputBytes)) {
+        metrics_proto.set_video_input_bytes(inputBytes);
+    }
+    AStatsEvent_writeInt64(event, inputBytes);
+
+    int64_t inputFrames = -1;
+    if (item->getInt64("android.media.mediacodec.video.input.frames", &inputFrames)) {
+        metrics_proto.set_video_input_frames(inputFrames);
+    }
+    AStatsEvent_writeInt64(event, inputFrames);
+
+    int64_t durationUs = -1;
+    if (item->getInt64("android.media.mediacodec.vencode.durationUs", &durationUs)) {
+        metrics_proto.set_video_encode_duration_us(durationUs);
+    }
+    AStatsEvent_writeInt64(event, durationUs);
+
+    int32_t colorFormat = -1;
+    if (item->getInt32("android.media.mediacodec.color-format", &colorFormat)) {
+        metrics_proto.set_color_format(colorFormat);
+    }
+    AStatsEvent_writeInt32(event, colorFormat);
+
+    double frameRate = -1.0;
+    if (item->getDouble("android.media.mediacodec.frame-rate", &frameRate)) {
+        metrics_proto.set_frame_rate(frameRate);
+    }
+    AStatsEvent_writeFloat(event, (float) frameRate);
+
+    double captureRate = -1.0;
+    if (item->getDouble("android.media.mediacodec.capture-rate", &captureRate)) {
+        metrics_proto.set_capture_rate(captureRate);
+    }
+    AStatsEvent_writeFloat(event, (float) captureRate);
+
+    double operatingRate = -1.0;
+    if (item->getDouble("android.media.mediacodec.operating-rate", &operatingRate)) {
+        metrics_proto.set_operating_rate(operatingRate);
+    }
+    AStatsEvent_writeFloat(event, (float) operatingRate);
+
+    int32_t priority = -1;
+    if (item->getInt32("android.media.mediacodec.priority", &priority)) {
+        metrics_proto.set_priority(priority);
+    }
+    AStatsEvent_writeInt32(event, priority);
+
+    int32_t qpIMin = -1;
+    if (item->getInt32("android.media.mediacodec.video-qp-i-min", &qpIMin)) {
+        metrics_proto.set_video_qp_i_min(qpIMin);
+    }
+    AStatsEvent_writeInt32(event, qpIMin);
+
+    int32_t qpIMax = -1;
+    if (item->getInt32("android.media.mediacodec.video-qp-i-max", &qpIMax)) {
+        metrics_proto.set_video_qp_i_max(qpIMax);
+    }
+    AStatsEvent_writeInt32(event, qpIMax);
+
+    int32_t qpPMin = -1;
+    if (item->getInt32("android.media.mediacodec.video-qp-p-min", &qpPMin)) {
+        metrics_proto.set_video_qp_p_min(qpPMin);
+    }
+    AStatsEvent_writeInt32(event, qpPMin);
+
+    int32_t qpPMax = -1;
+    if (item->getInt32("android.media.mediacodec.video-qp-p-max", &qpPMax)) {
+        metrics_proto.set_video_qp_p_max(qpPMax);
+    }
+    AStatsEvent_writeInt32(event, qpPMax);
+
+    int32_t qpBMin = -1;
+    if (item->getInt32("android.media.mediacodec.video-qp-b-min", &qpBMin)) {
+        metrics_proto.set_video_qp_b_min(qpBMin);
+    }
+    AStatsEvent_writeInt32(event, qpBMin);
+
+    int32_t qpBMax = -1;
+    if (item->getInt32("android.media.mediacodec.video-qp-b-max", &qpBMax)) {
+        metrics_proto.set_video_qp_b_max(qpBMax);
+    }
+    AStatsEvent_writeInt32(event, qpBMax);
+
+    int32_t originalBitrate = -1;
+    if (item->getInt32("android.media.mediacodec.original.bitrate", &originalBitrate)) {
+        metrics_proto.set_original_bitrate(originalBitrate);
+    }
+    AStatsEvent_writeInt32(event, originalBitrate);
+
+    int32_t shapingEnhanced = -1;
+    if ( item->getInt32("android.media.mediacodec.shaped", &shapingEnhanced)) {
+        metrics_proto.set_shaping_enhanced(shapingEnhanced);
+    }
+    AStatsEvent_writeInt32(event, shapingEnhanced);
+
+    int32_t qpIMinOri = -1;
+    if ( item->getInt32("android.media.mediacodec.original-video-qp-i-min", &qpIMinOri)) {
+        metrics_proto.set_original_video_qp_i_min(qpIMinOri);
+    }
+    AStatsEvent_writeInt32(event, qpIMinOri);
+
+    int32_t qpIMaxOri = -1;
+    if ( item->getInt32("android.media.mediacodec.original-video-qp-i-max", &qpIMaxOri)) {
+        metrics_proto.set_original_video_qp_i_max(qpIMaxOri);
+    }
+    AStatsEvent_writeInt32(event, qpIMaxOri);
+
+    int32_t qpPMinOri = -1;
+    if ( item->getInt32("android.media.mediacodec.original-video-qp-p-min", &qpPMinOri)) {
+        metrics_proto.set_original_video_qp_p_min(qpPMinOri);
+    }
+    AStatsEvent_writeInt32(event, qpPMinOri);
+
+    int32_t qpPMaxOri = -1;
+    if ( item->getInt32("android.media.mediacodec.original-video-qp-p-max", &qpPMaxOri)) {
+        metrics_proto.set_original_video_qp_p_max(qpPMaxOri);
+    }
+    AStatsEvent_writeInt32(event, qpPMaxOri);
+
+    int32_t qpBMinOri = -1;
+    if ( item->getInt32("android.media.mediacodec.original-video-qp-b-min", &qpBMinOri)) {
+        metrics_proto.set_original_video_qp_b_min(qpBMinOri);
+    }
+    AStatsEvent_writeInt32(event, qpBMinOri);
+
+    int32_t qpBMaxOri = -1;
+    if ( item->getInt32("android.media.mediacodec.original-video-qp-b-max", &qpBMaxOri)) {
+        metrics_proto.set_original_video_qp_b_max(qpBMaxOri);
+    }
+    AStatsEvent_writeInt32(event, qpBMaxOri);
+
+    int err = AStatsEvent_write(event);
+    if (err < 0) {
+      ALOGE("Failed to write codec metrics to statsd (%d)", err);
+    }
+    AStatsEvent_release(event);
 
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
         ALOGE("Failed to serialize codec metrics");
         return false;
     }
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_CODEC_REPORTED,
+                               timestamp_nanos, package_name.c_str(), package_version_code,
+                               media_apex_version,
+                               bf_serialized);
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_CODEC_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_codec_reported:"
+            << android::util::MEDIAMETRICS_CODEC_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " codec:" << codec
+            << " mime:" << mime
+            << " mode:" << mode
+            << " encoder:" << encoder
+            << " secure:" << secure
+            << " width:" << width
+            << " height:" << height
+            << " rotation:" << rotation
+            << " crypto:" << crypto
+            << " profile:" << profile
+
+            << " level:" << level
+            << " max_width:" << max_width
+            << " max_height:" << max_height
+            << " error_code:" << error_code
+            << " error_state:" << error_state
+            << " latency_max:" << latency_max
+            << " latency_min:" << latency_min
+            << " latency_avg:" << latency_avg
+            << " latency_count:" << latency_count
+            << " latency_unknown:" << latency_unknown
+
+            << " queue_input_buffer_error:" << queue_input_buffer_error
+            << " queue_secure_input_buffer_error:" << queue_secure_input_buffer_error
+            << " bitrate_mode:" << bitrate_mode
+            << " bitrate:" << bitrate
+            << " original_bitrate:" << originalBitrate
+            << " lifetime_millis:" << lifetime_millis
+            << " playback_duration_seconds:" << playback_duration_sec
+            << " log_session_id:" << sessionId
+            << " channel_count:" << channelCount
+            << " sample_rate:" << sampleRate
+            << " encode_bytes:" << bytes
+            << " encode_frames:" << frames
+            << " encode_duration_us:" << durationUs
+            << " color_format:" << colorFormat
+            << " frame_rate:" << frameRate
+            << " capture_rate:" << captureRate
+            << " operating_rate:" << operatingRate
+            << " priority:" << priority
+            << " shaping_enhanced:" << shapingEnhanced
+
+            << " qp_i_min:" << qpIMin
+            << " qp_i_max:" << qpIMax
+            << " qp_p_min:" << qpPMin
+            << " qp_p_max:" << qpPMax
+            << " qp_b_min:" << qpBMin
+            << " qp_b_max:" << qpBMax
+            << " original_qp_i_min:" << qpIMinOri
+            << " original_qp_i_max:" << qpIMaxOri
+            << " original_qp_p_min:" << qpPMinOri
+            << " original_qp_p_max:" << qpPMaxOri
+            << " original_qp_b_min:" << qpBMinOri
+            << " original_qp_b_max:" << qpBMaxOri
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_CODEC_REPORTED, log.str());
+
 
     return true;
 }
diff --git a/services/mediametrics/statsd_drm.cpp b/services/mediametrics/statsd_drm.cpp
index ac58929..287fb8d 100644
--- a/services/mediametrics/statsd_drm.cpp
+++ b/services/mediametrics/statsd_drm.cpp
@@ -17,6 +17,7 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "statsd_drm"
 #include <utils/Log.h>
+#include <media/stagefright/foundation/base64.h>
 
 #include <stdint.h>
 #include <inttypes.h>
@@ -31,90 +32,79 @@
 #include <pwd.h>
 
 #include "MediaMetricsService.h"
+#include "StringUtils.h"
 #include "iface_statsd.h"
 
 #include <statslog.h>
 
 #include <array>
 #include <string>
+#include <vector>
 
 namespace android {
 
 // mediadrm
-bool statsd_mediadrm(const mediametrics::Item *item)
+bool statsd_mediadrm(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     std::string vendor;
     (void) item->getString("vendor", &vendor);
     std::string description;
     (void) item->getString("description", &description);
-    std::string serialized_metrics;
-    (void) item->getString("serialized_metrics", &serialized_metrics);
-
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized(serialized_metrics.c_str(),
-                                                serialized_metrics.size());
-        android::util::stats_write(android::util::MEDIAMETRICS_MEDIADRM_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   vendor.c_str(),
-                                   description.c_str(),
-                                   bf_serialized);
-    } else {
-        ALOGV("NOT sending: mediadrm private data (len=%zu)", serialized_metrics.size());
-    }
-
-    return true;
-}
-
-// widevineCDM
-bool statsd_widevineCDM(const mediametrics::Item *item)
-{
-    if (item == nullptr) return false;
-
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
 
     std::string serialized_metrics;
     (void) item->getString("serialized_metrics", &serialized_metrics);
-
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized(serialized_metrics.c_str(),
-                                                serialized_metrics.size());
-        android::util::stats_write(android::util::MEDIAMETRICS_DRM_WIDEVINE_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
-    } else {
-        ALOGV("NOT sending: widevine private data (len=%zu)", serialized_metrics.size());
+    if (serialized_metrics.empty()) {
+        ALOGD("statsd_mediadrm skipping empty entry");
+        return false;
     }
 
+    // This field is left here for backward compatibility.
+    // This field is not used anymore.
+    const std::string  kUnusedField("");
+    android::util::BytesField bf_serialized(kUnusedField.c_str(), kUnusedField.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_MEDIADRM_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        vendor.c_str(),
+        description.c_str(),
+        bf_serialized);
+
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_mediadrm_reported:"
+            << android::util::MEDIAMETRICS_MEDIADRM_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
+
+            << " vendor:" << vendor
+            << " description:" << description
+            // omitting serialized
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_MEDIADRM_REPORTED, log.str());
     return true;
 }
 
 // drmmanager
-bool statsd_drmmanager(const mediametrics::Item *item)
+bool statsd_drmmanager(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     using namespace std::string_literals;
     if (item == nullptr) return false;
 
-    if (!enabled_statsd) {
-        ALOGV("NOT sending: drmmanager data");
-        return true;
-    }
-
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     std::string plugin_id;
     (void) item->getString("plugin_id", &plugin_id);
@@ -132,8 +122,9 @@
         item->getInt64(("method"s + std::to_string(i)).c_str(), &methodCounts[i]);
     }
 
-    android::util::stats_write(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED,
-                               timestamp, pkgName.c_str(), pkgVersionCode, mediaApexVersion,
+    const int result = android::util::stats_write(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED,
+                               timestamp_nanos, package_name.c_str(), package_version_code,
+                               media_apex_version,
                                plugin_id.c_str(), description.c_str(),
                                method_id, mime_types.c_str(),
                                methodCounts[0], methodCounts[1], methodCounts[2],
@@ -142,6 +133,101 @@
                                methodCounts[9], methodCounts[10], methodCounts[11],
                                methodCounts[12]);
 
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_drmmanager_reported:"
+            << android::util::MEDIAMETRICS_DRMMANAGER_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
+
+            << " plugin_id:" << plugin_id
+            << " description:" << description
+            << " method_id:" << method_id
+            << " mime_types:" << mime_types;
+
+    for (size_t i = 0; i < methodCounts.size(); ++i) {
+        log << " method_" << i << ":" << methodCounts[i];
+    }
+    log << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED, log.str());
+    return true;
+}
+
+namespace {
+std::vector<uint8_t> base64DecodeNoPad(std::string& str) {
+    if (str.empty()) {
+        return {};
+    }
+
+    switch (str.length() % 4) {
+    case 3: str += "="; break;
+    case 2: str += "=="; break;
+    case 1: str += "==="; break;
+    case 0: /* unchanged */ break;
+    }
+
+    std::vector<uint8_t> buf(str.length() / 4 * 3, 0);
+    size_t size = buf.size();
+    if (decodeBase64(buf.data(), &size, str.c_str()) && size <= buf.size()) {
+        buf.erase(buf.begin() + size, buf.end());
+        return buf;
+    }
+    return {};
+}
+} // namespace
+
+// |out| and its contents are memory-managed by statsd.
+bool statsd_mediadrm_puller(
+        const std::shared_ptr<const mediametrics::Item>& item, AStatsEventList* out,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
+{
+    if (item == nullptr) {
+        return false;
+    }
+
+    std::string serialized_metrics;
+    (void) item->getString("serialized_metrics", &serialized_metrics);
+    const auto framework_raw(base64DecodeNoPad(serialized_metrics));
+
+    std::string plugin_metrics;
+    (void) item->getString("plugin_metrics", &plugin_metrics);
+    const auto plugin_raw(base64DecodeNoPad(plugin_metrics));
+
+    if (serialized_metrics.size() == 0 && plugin_metrics.size() == 0) {
+        ALOGD("statsd_mediadrm_puller skipping empty entry");
+        return false;
+    }
+
+    std::string vendor;
+    (void) item->getString("vendor", &vendor);
+    std::string description;
+    (void) item->getString("description", &description);
+
+    // Memory for |event| is internally managed by statsd.
+    AStatsEvent* event = AStatsEventList_addStatsEvent(out);
+    AStatsEvent_setAtomId(event, android::util::MEDIA_DRM_ACTIVITY_INFO);
+    AStatsEvent_writeString(event, item->getPkgName().c_str());
+    AStatsEvent_writeInt64(event, item->getPkgVersionCode());
+    AStatsEvent_writeString(event, vendor.c_str());
+    AStatsEvent_writeString(event, description.c_str());
+    AStatsEvent_writeByteArray(event, framework_raw.data(), framework_raw.size());
+    AStatsEvent_writeByteArray(event, plugin_raw.data(), plugin_raw.size());
+    AStatsEvent_build(event);
+
+    std::stringstream log;
+    log << "pulled:" << " {"
+            << " media_drm_activity_info:"
+            << android::util::MEDIA_DRM_ACTIVITY_INFO
+            << " package_name:" << item->getPkgName()
+            << " package_version_code:" << item->getPkgVersionCode()
+            << " vendor:" << vendor
+            << " description:" << description
+            << " framework_metrics:" << mediametrics::stringutils::bytesToString(framework_raw, 8)
+            << " vendor_metrics:" <<  mediametrics::stringutils::bytesToString(plugin_raw, 8)
+            << " }";
+    statsdLog->log(android::util::MEDIA_DRM_ACTIVITY_INFO, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_extractor.cpp b/services/mediametrics/statsd_extractor.cpp
index 16814d9..bcf2e0a 100644
--- a/services/mediametrics/statsd_extractor.cpp
+++ b/services/mediametrics/statsd_extractor.cpp
@@ -32,43 +32,62 @@
 #include <statslog.h>
 
 #include "MediaMetricsService.h"
-#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
+#include "StringUtils.h"
+#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
 #include "iface_statsd.h"
 
 namespace android {
 
-bool statsd_extractor(const mediametrics::Item *item)
+bool statsd_extractor(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
-    ::android::stats::mediametrics::ExtractorData metrics_proto;
+    ::android::stats::mediametrics_message::ExtractorData metrics_proto;
 
-    // flesh out the protobuf we'll hand off with our data
-    //
-
-    // android.media.mediaextractor.fmt         string
-    std::string fmt;
-    if (item->getString("android.media.mediaextractor.fmt", &fmt)) {
-        metrics_proto.set_format(std::move(fmt));
+    std::string format;
+    if (item->getString("android.media.mediaextractor.fmt", &format)) {
+        metrics_proto.set_format(format);
     }
-    // android.media.mediaextractor.mime        string
+
     std::string mime;
     if (item->getString("android.media.mediaextractor.mime", &mime)) {
-        metrics_proto.set_mime(std::move(mime));
+        metrics_proto.set_mime(mime);
     }
-    // android.media.mediaextractor.ntrk        int32
-    int32_t ntrk = -1;
-    if (item->getInt32("android.media.mediaextractor.ntrk", &ntrk)) {
-        metrics_proto.set_tracks(ntrk);
+
+    int32_t tracks = -1;
+    if (item->getInt32("android.media.mediaextractor.ntrk", &tracks)) {
+        metrics_proto.set_tracks(tracks);
+    }
+
+    std::string entry_point_string;
+    stats::mediametrics_message::ExtractorData::EntryPoint entry_point =
+            stats::mediametrics_message::ExtractorData_EntryPoint_OTHER;
+    if (item->getString("android.media.mediaextractor.entry", &entry_point_string)) {
+      if (entry_point_string == "sdk") {
+        entry_point = stats::mediametrics_message::ExtractorData_EntryPoint_SDK;
+      } else if (entry_point_string == "ndk-with-jvm") {
+        entry_point = stats::mediametrics_message::ExtractorData_EntryPoint_NDK_WITH_JVM;
+      } else if (entry_point_string == "ndk-no-jvm") {
+        entry_point = stats::mediametrics_message::ExtractorData_EntryPoint_NDK_NO_JVM;
+      } else {
+        entry_point = stats::mediametrics_message::ExtractorData_EntryPoint_OTHER;
+      }
+      metrics_proto.set_entry_point(entry_point);
+    }
+
+    std::string log_session_id;
+    if (item->getString("android.media.mediaextractor.logSessionId", &log_session_id)) {
+        log_session_id = mediametrics::stringutils::sanitizeLogSessionId(log_session_id);
+        metrics_proto.set_log_session_id(log_session_id);
     }
 
     std::string serialized;
@@ -77,17 +96,27 @@
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_extractor_reported:"
+            << android::util::MEDIAMETRICS_EXTRACTOR_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
-
+            << " format:" << format
+            << " mime:" << mime
+            << " tracks:" << tracks
+            << " entry_point:" << entry_point_string << "(" << entry_point << ")"
+            << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_mediaparser.cpp b/services/mediametrics/statsd_mediaparser.cpp
index 262b2ae..921b320 100644
--- a/services/mediametrics/statsd_mediaparser.cpp
+++ b/services/mediametrics/statsd_mediaparser.cpp
@@ -31,21 +31,20 @@
 #include <statslog.h>
 
 #include "MediaMetricsService.h"
+#include "StringUtils.h"
 #include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
 #include "iface_statsd.h"
 
 namespace android {
 
-bool statsd_mediaparser(const mediametrics::Item *item)
+bool statsd_mediaparser(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
-    if (item == nullptr) {
-        return false;
-    }
+    if (item == nullptr) return false;
 
-    // statsd wrapper data.
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
 
     std::string parserName;
     item->getString("android.media.mediaparser.parserName", &parserName);
@@ -80,26 +79,48 @@
     int32_t videoHeight = -1;
     item->getInt32("android.media.mediaparser.videoHeight", &videoHeight);
 
-    if (enabled_statsd) {
-        (void) android::util::stats_write(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED,
-                                   timestamp,
-                                   pkgName.c_str(),
-                                   pkgVersionCode,
-                                   parserName.c_str(),
-                                   createdByName,
-                                   parserPool.c_str(),
-                                   lastException.c_str(),
-                                   resourceByteCount,
-                                   durationMillis,
-                                   trackMimeTypes.c_str(),
-                                   trackCodecs.c_str(),
-                                   alteredParameters.c_str(),
-                                   videoWidth,
-                                   videoHeight);
-    } else {
-        ALOGV("NOT sending MediaParser media metrics.");
-    }
+    std::string logSessionId;
+    item->getString("android.media.mediaparser.logSessionId", &logSessionId);
+    logSessionId = mediametrics::stringutils::sanitizeLogSessionId(logSessionId);
 
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED,
+                               timestamp_nanos,
+                               package_name.c_str(),
+                               package_version_code,
+                               parserName.c_str(),
+                               createdByName,
+                               parserPool.c_str(),
+                               lastException.c_str(),
+                               resourceByteCount,
+                               durationMillis,
+                               trackMimeTypes.c_str(),
+                               trackCodecs.c_str(),
+                               alteredParameters.c_str(),
+                               videoWidth,
+                               videoHeight,
+                               logSessionId.c_str());
+
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_mediaparser_reported:"
+            << android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " parser_name:" << parserName
+            << " created_by_name:" << createdByName
+            << " parser_pool:" << parserPool
+            << " last_exception:" << lastException
+            << " resource_byte_count:" << resourceByteCount
+            << " duration_millis:" << durationMillis
+            << " track_mime_types:" << trackMimeTypes
+            << " track_codecs:" << trackCodecs
+            << " altered_parameters:" << alteredParameters
+            << " video_width:" << videoWidth
+            << " video_height:" << videoHeight
+            << " log_session_id:" << logSessionId
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_nuplayer.cpp b/services/mediametrics/statsd_nuplayer.cpp
index a8d0f55..bdee1f2 100644
--- a/services/mediametrics/statsd_nuplayer.cpp
+++ b/services/mediametrics/statsd_nuplayer.cpp
@@ -32,7 +32,7 @@
 #include <statslog.h>
 
 #include "MediaMetricsService.h"
-#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
+#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
 #include "iface_statsd.h"
 
 namespace android {
@@ -41,34 +41,35 @@
  *  handles nuplayer AND nuplayer2
  *  checks for the union of what the two players generate
  */
-bool statsd_nuplayer(const mediametrics::Item *item)
+bool statsd_nuplayer(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
-    ::android::stats::mediametrics::NuPlayerData metrics_proto;
+    ::android::stats::mediametrics_message::NuPlayerData metrics_proto;
 
     // flesh out the protobuf we'll hand off with our data
     //
 
     // differentiate between nuplayer and nuplayer2
-    metrics_proto.set_whichplayer(item->getKey().c_str());
+    std::string whichPlayer = item->getKey();
+    metrics_proto.set_whichplayer(whichPlayer.c_str());
 
     std::string video_mime;
     if (item->getString("android.media.mediaplayer.video.mime", &video_mime)) {
-        metrics_proto.set_video_mime(std::move(video_mime));
+        metrics_proto.set_video_mime(video_mime);
     }
     std::string video_codec;
     if (item->getString("android.media.mediaplayer.video.codec", &video_codec)) {
-        metrics_proto.set_video_codec(std::move(video_codec));
+        metrics_proto.set_video_codec(video_codec);
     }
 
     int32_t width = -1;
@@ -92,32 +93,32 @@
     if (item->getInt64("android.media.mediaplayer.startupdropped", &frames_dropped_startup)) {
         metrics_proto.set_frames_dropped_startup(frames_dropped_startup);
     }
-    double fps = -1.0;
-    if (item->getDouble("android.media.mediaplayer.fps", &fps)) {
-        metrics_proto.set_framerate(fps);
+    double framerate = -1.0;
+    if (item->getDouble("android.media.mediaplayer.fps", &framerate)) {
+        metrics_proto.set_framerate(framerate);
     }
 
     std::string audio_mime;
     if (item->getString("android.media.mediaplayer.audio.mime", &audio_mime)) {
-        metrics_proto.set_audio_mime(std::move(audio_mime));
+        metrics_proto.set_audio_mime(audio_mime);
     }
     std::string audio_codec;
     if (item->getString("android.media.mediaplayer.audio.codec", &audio_codec)) {
-        metrics_proto.set_audio_codec(std::move(audio_codec));
+        metrics_proto.set_audio_codec(audio_codec);
     }
 
-    int64_t duration_ms = -1;
-    if (item->getInt64("android.media.mediaplayer.durationMs", &duration_ms)) {
-        metrics_proto.set_duration_millis(duration_ms);
+    int64_t duration_millis = -1;
+    if (item->getInt64("android.media.mediaplayer.durationMs", &duration_millis)) {
+        metrics_proto.set_duration_millis(duration_millis);
     }
-    int64_t playing_ms = -1;
-    if (item->getInt64("android.media.mediaplayer.playingMs", &playing_ms)) {
-        metrics_proto.set_playing_millis(playing_ms);
+    int64_t playing_millis = -1;
+    if (item->getInt64("android.media.mediaplayer.playingMs", &playing_millis)) {
+        metrics_proto.set_playing_millis(playing_millis);
     }
 
-    int32_t err = -1;
-    if (item->getInt32("android.media.mediaplayer.err", &err)) {
-        metrics_proto.set_error(err);
+    int32_t error = -1;
+    if (item->getInt32("android.media.mediaplayer.err", &error)) {
+        metrics_proto.set_error(error);
     }
     int32_t error_code = -1;
     if (item->getInt32("android.media.mediaplayer.errcode", &error_code)) {
@@ -125,45 +126,74 @@
     }
     std::string error_state;
     if (item->getString("android.media.mediaplayer.errstate", &error_state)) {
-        metrics_proto.set_error_state(std::move(error_state));
+        metrics_proto.set_error_state(error_state);
     }
 
     std::string data_source_type;
     if (item->getString("android.media.mediaplayer.dataSource", &data_source_type)) {
-        metrics_proto.set_data_source_type(std::move(data_source_type));
+        metrics_proto.set_data_source_type(data_source_type);
     }
 
-    int64_t rebufferingMs = -1;
-    if (item->getInt64("android.media.mediaplayer.rebufferingMs", &rebufferingMs)) {
-        metrics_proto.set_rebuffering_millis(rebufferingMs);
+    int64_t rebuffering_millis = -1;
+    if (item->getInt64("android.media.mediaplayer.rebufferingMs", &rebuffering_millis)) {
+        metrics_proto.set_rebuffering_millis(rebuffering_millis);
     }
     int32_t rebuffers = -1;
     if (item->getInt32("android.media.mediaplayer.rebuffers", &rebuffers)) {
         metrics_proto.set_rebuffers(rebuffers);
     }
-    int32_t rebufferExit = -1;
-    if (item->getInt32("android.media.mediaplayer.rebufferExit", &rebufferExit)) {
-        metrics_proto.set_rebuffer_at_exit(rebufferExit);
+    int32_t rebuffer_at_exit = -1;
+    if (item->getInt32("android.media.mediaplayer.rebufferExit", &rebuffer_at_exit)) {
+        metrics_proto.set_rebuffer_at_exit(rebuffer_at_exit);
     }
 
-
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
         ALOGE("Failed to serialize nuplayer metrics");
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_NUPLAYER_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_NUPLAYER_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_nuplayer_reported:"
+            << android::util::MEDIAMETRICS_NUPLAYER_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
+            << " whichPlayer:" << whichPlayer
+            << " video_mime:" << video_mime
+            << " video_codec:" << video_codec
+            << " width:" << width
+            << " height:" << height
+            << " frames:" << frames
+            << " frames_dropped:" << frames_dropped
+            << " framerate:" << framerate
+            << " audio_mime:" << audio_mime
+            << " audio_codec:" << media_apex_version
+
+            << " duration_millis:" << duration_millis
+            << " playing_millis:" << playing_millis
+            << " error:" << error
+            << " error_code:" << error_code
+            << " error_state:" << error_state
+            << " data_source_type:" << data_source_type
+            << " rebuffering_millis:" << rebuffering_millis
+            << " rebuffers:" << rebuffers
+            << " rebuffer_at_exit:" << rebuffer_at_exit
+            << " frames_dropped_startup:" << frames_dropped_startup
+
+            // TODO NuPlayer - add log_session_id
+            // << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_NUPLAYER_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_recorder.cpp b/services/mediametrics/statsd_recorder.cpp
index 2e5ada4..b29ad73 100644
--- a/services/mediametrics/statsd_recorder.cpp
+++ b/services/mediametrics/statsd_recorder.cpp
@@ -32,48 +32,55 @@
 #include <statslog.h>
 
 #include "MediaMetricsService.h"
-#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
+#include "StringUtils.h"
+#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
 #include "iface_statsd.h"
 
 namespace android {
 
-bool statsd_recorder(const mediametrics::Item *item)
+bool statsd_recorder(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     // these go into the statsd wrapper
-    const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
-    std::string pkgName = item->getPkgName();
-    int64_t pkgVersionCode = item->getPkgVersionCode();
-    int64_t mediaApexVersion = 0;
-
+    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
+    const std::string package_name = item->getPkgName();
+    const int64_t package_version_code = item->getPkgVersionCode();
+    const int64_t media_apex_version = 0;
 
     // the rest into our own proto
     //
-    ::android::stats::mediametrics::RecorderData metrics_proto;
+    ::android::stats::mediametrics_message::RecorderData metrics_proto;
 
     // flesh out the protobuf we'll hand off with our data
     //
 
+    // string kRecorderLogSessionId = "android.media.mediarecorder.log-session-id";
+    std::string log_session_id;
+    if (item->getString("android.media.mediarecorder.log-session-id", &log_session_id)) {
+        log_session_id = mediametrics::stringutils::sanitizeLogSessionId(log_session_id);
+        metrics_proto.set_log_session_id(log_session_id);
+    }
     // string kRecorderAudioMime = "android.media.mediarecorder.audio.mime";
     std::string audio_mime;
     if (item->getString("android.media.mediarecorder.audio.mime", &audio_mime)) {
-        metrics_proto.set_audio_mime(std::move(audio_mime));
+        metrics_proto.set_audio_mime(audio_mime);
     }
     // string kRecorderVideoMime = "android.media.mediarecorder.video.mime";
     std::string video_mime;
     if (item->getString("android.media.mediarecorder.video.mime", &video_mime)) {
-        metrics_proto.set_video_mime(std::move(video_mime));
+        metrics_proto.set_video_mime(video_mime);
     }
     // int32 kRecorderVideoProfile = "android.media.mediarecorder.video-encoder-profile";
-    int32_t videoProfile = -1;
-    if (item->getInt32("android.media.mediarecorder.video-encoder-profile", &videoProfile)) {
-        metrics_proto.set_video_profile(videoProfile);
+    int32_t video_profile = -1;
+    if (item->getInt32("android.media.mediarecorder.video-encoder-profile", &video_profile)) {
+        metrics_proto.set_video_profile(video_profile);
     }
     // int32 kRecorderVideoLevel = "android.media.mediarecorder.video-encoder-level";
-    int32_t videoLevel = -1;
-    if (item->getInt32("android.media.mediarecorder.video-encoder-level", &videoLevel)) {
-        metrics_proto.set_video_level(videoLevel);
+    int32_t video_level = -1;
+    if (item->getInt32("android.media.mediarecorder.video-encoder-level", &video_level)) {
+        metrics_proto.set_video_level(video_level);
     }
     // int32 kRecorderWidth = "android.media.mediarecorder.width";
     int32_t width = -1;
@@ -97,73 +104,73 @@
     }
 
     // int32 kRecorderCaptureFps = "android.media.mediarecorder.capture-fps";
-    int32_t captureFps = -1;
-    if (item->getInt32("android.media.mediarecorder.capture-fps", &captureFps)) {
-        metrics_proto.set_capture_fps(captureFps);
+    int32_t capture_fps = -1;
+    if (item->getInt32("android.media.mediarecorder.capture-fps", &capture_fps)) {
+        metrics_proto.set_capture_fps(capture_fps);
     }
     // double kRecorderCaptureFpsEnable = "android.media.mediarecorder.capture-fpsenable";
-    double captureFpsEnable = -1;
-    if (item->getDouble("android.media.mediarecorder.capture-fpsenable", &captureFpsEnable)) {
-        metrics_proto.set_capture_fps_enable(captureFpsEnable);
+    double capture_fps_enable = -1;
+    if (item->getDouble("android.media.mediarecorder.capture-fpsenable", &capture_fps_enable)) {
+        metrics_proto.set_capture_fps_enable(capture_fps_enable);
     }
 
     // int64 kRecorderDurationMs = "android.media.mediarecorder.durationMs";
-    int64_t durationMs = -1;
-    if (item->getInt64("android.media.mediarecorder.durationMs", &durationMs)) {
-        metrics_proto.set_duration_millis(durationMs);
+    int64_t duration_millis = -1;
+    if (item->getInt64("android.media.mediarecorder.durationMs", &duration_millis)) {
+        metrics_proto.set_duration_millis(duration_millis);
     }
     // int64 kRecorderPaused = "android.media.mediarecorder.pausedMs";
-    int64_t pausedMs = -1;
-    if (item->getInt64("android.media.mediarecorder.pausedMs", &pausedMs)) {
-        metrics_proto.set_paused_millis(pausedMs);
+    int64_t paused_millis = -1;
+    if (item->getInt64("android.media.mediarecorder.pausedMs", &paused_millis)) {
+        metrics_proto.set_paused_millis(paused_millis);
     }
     // int32 kRecorderNumPauses = "android.media.mediarecorder.NPauses";
-    int32_t pausedCount = -1;
-    if (item->getInt32("android.media.mediarecorder.NPauses", &pausedCount)) {
-        metrics_proto.set_paused_count(pausedCount);
+    int32_t paused_count = -1;
+    if (item->getInt32("android.media.mediarecorder.NPauses", &paused_count)) {
+        metrics_proto.set_paused_count(paused_count);
     }
 
     // int32 kRecorderAudioBitrate = "android.media.mediarecorder.audio-bitrate";
-    int32_t audioBitrate = -1;
-    if (item->getInt32("android.media.mediarecorder.audio-bitrate", &audioBitrate)) {
-        metrics_proto.set_audio_bitrate(audioBitrate);
+    int32_t audio_bitrate = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-bitrate", &audio_bitrate)) {
+        metrics_proto.set_audio_bitrate(audio_bitrate);
     }
     // int32 kRecorderAudioChannels = "android.media.mediarecorder.audio-channels";
-    int32_t audioChannels = -1;
-    if (item->getInt32("android.media.mediarecorder.audio-channels", &audioChannels)) {
-        metrics_proto.set_audio_channels(audioChannels);
+    int32_t audio_channels = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-channels", &audio_channels)) {
+        metrics_proto.set_audio_channels(audio_channels);
     }
     // int32 kRecorderAudioSampleRate = "android.media.mediarecorder.audio-samplerate";
-    int32_t audioSampleRate = -1;
-    if (item->getInt32("android.media.mediarecorder.audio-samplerate", &audioSampleRate)) {
-        metrics_proto.set_audio_samplerate(audioSampleRate);
+    int32_t audio_samplerate = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-samplerate", &audio_samplerate)) {
+        metrics_proto.set_audio_samplerate(audio_samplerate);
     }
 
     // int32 kRecorderMovieTimescale = "android.media.mediarecorder.movie-timescale";
-    int32_t movieTimescale = -1;
-    if (item->getInt32("android.media.mediarecorder.movie-timescale", &movieTimescale)) {
-        metrics_proto.set_movie_timescale(movieTimescale);
+    int32_t movie_timescale = -1;
+    if (item->getInt32("android.media.mediarecorder.movie-timescale", &movie_timescale)) {
+        metrics_proto.set_movie_timescale(movie_timescale);
     }
     // int32 kRecorderAudioTimescale = "android.media.mediarecorder.audio-timescale";
-    int32_t audioTimescale = -1;
-    if (item->getInt32("android.media.mediarecorder.audio-timescale", &audioTimescale)) {
-        metrics_proto.set_audio_timescale(audioTimescale);
+    int32_t audio_timescale = -1;
+    if (item->getInt32("android.media.mediarecorder.audio-timescale", &audio_timescale)) {
+        metrics_proto.set_audio_timescale(audio_timescale);
     }
     // int32 kRecorderVideoTimescale = "android.media.mediarecorder.video-timescale";
-    int32_t videoTimescale = -1;
-    if (item->getInt32("android.media.mediarecorder.video-timescale", &videoTimescale)) {
-        metrics_proto.set_video_timescale(videoTimescale);
+    int32_t video_timescale = -1;
+    if (item->getInt32("android.media.mediarecorder.video-timescale", &video_timescale)) {
+        metrics_proto.set_video_timescale(video_timescale);
     }
 
     // int32 kRecorderVideoBitrate = "android.media.mediarecorder.video-bitrate";
-    int32_t videoBitRate = -1;
-    if (item->getInt32("android.media.mediarecorder.video-bitrate", &videoBitRate)) {
-        metrics_proto.set_video_bitrate(videoBitRate);
+    int32_t video_bitrate = -1;
+    if (item->getInt32("android.media.mediarecorder.video-bitrate", &video_bitrate)) {
+        metrics_proto.set_video_bitrate(video_bitrate);
     }
     // int32 kRecorderVideoIframeInterval = "android.media.mediarecorder.video-iframe-interval";
-    int32_t iFrameInterval = -1;
-    if (item->getInt32("android.media.mediarecorder.video-iframe-interval", &iFrameInterval)) {
-        metrics_proto.set_iframe_interval(iFrameInterval);
+    int32_t iframe_interval = -1;
+    if (item->getInt32("android.media.mediarecorder.video-iframe-interval", &iframe_interval)) {
+        metrics_proto.set_iframe_interval(iframe_interval);
     }
 
     std::string serialized;
@@ -172,17 +179,46 @@
         return false;
     }
 
-    if (enabled_statsd) {
-        android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-        (void)android::util::stats_write(android::util::MEDIAMETRICS_RECORDER_REPORTED,
-                                   timestamp, pkgName.c_str(), pkgVersionCode,
-                                   mediaApexVersion,
-                                   bf_serialized);
+    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    int result = android::util::stats_write(android::util::MEDIAMETRICS_RECORDER_REPORTED,
+        timestamp_nanos, package_name.c_str(), package_version_code,
+        media_apex_version,
+        bf_serialized);
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " mediametrics_recorder_reported:"
+            << android::util::MEDIAMETRICS_RECORDER_REPORTED
+            << " timestamp_nanos:" << timestamp_nanos
+            << " package_name:" << package_name
+            << " package_version_code:" << package_version_code
+            << " media_apex_version:" << media_apex_version
 
-    } else {
-        ALOGV("NOT sending: private data (len=%zu)", strlen(serialized.c_str()));
-    }
+            << " audio_mime:" << audio_mime
+            << " video_mime:" << video_mime
+            << " video_profile:" << video_profile
+            << " video_level:" << video_level
+            << " width:" << width
+            << " height:" << height
+            << " rotation:" << rotation
+            << " framerate:" << framerate
+            << " capture_fps:" << capture_fps
+            << " capture_fps_enable:" << capture_fps_enable
 
+            << " duration_millis:" << duration_millis
+            << " paused_millis:" << paused_millis
+            << " paused_count:" << paused_count
+            << " audio_bitrate:" << audio_bitrate
+            << " audio_channels:" << audio_channels
+            << " audio_samplerate:" << audio_samplerate
+            << " movie_timescale:" << movie_timescale
+            << " audio_timescale:" << audio_timescale
+            << " video_timescale:" << video_timescale
+            << " video_bitrate:" << video_bitrate
+
+            << " iframe_interval:" << iframe_interval
+            << " log_session_id:" << log_session_id
+            << " }";
+    statsdLog->log(android::util::MEDIAMETRICS_RECORDER_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/tests/Android.bp b/services/mediametrics/tests/Android.bp
index c2e0759..3baf739 100644
--- a/services/mediametrics/tests/Android.bp
+++ b/services/mediametrics/tests/Android.bp
@@ -1,7 +1,20 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "mediametrics_tests",
     test_suites: ["device-tests"],
 
+    // not all shared libraries are populated in the 2nd architecture in
+    // particular, libmediametricsservice we use to have a tame copy of the service
+    compile_multilib: "first",
+
     cflags: [
         "-Wall",
         "-Werror",
@@ -19,6 +32,7 @@
         "libmediametricsservice",
         "libmediautils",
         "libutils",
+        "mediametricsservice-aidl-cpp",
     ],
 
     header_libs: [
diff --git a/services/mediametrics/tests/mediametrics_tests.cpp b/services/mediametrics/tests/mediametrics_tests.cpp
index 478355b..2336d6f 100644
--- a/services/mediametrics/tests/mediametrics_tests.cpp
+++ b/services/mediametrics/tests/mediametrics_tests.cpp
@@ -809,7 +809,9 @@
   (*item3).set("four", (int32_t)4)
           .setTimestamp(12);
 
-  android::mediametrics::AudioAnalytics audioAnalytics;
+  std::shared_ptr<mediametrics::StatsdLog> statsdLog =
+          std::make_shared<mediametrics::StatsdLog>(10);
+  android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
 
   // untrusted entities cannot create a new key.
   ASSERT_EQ(PERMISSION_DENIED, audioAnalytics.submit(item, false /* isTrusted */));
@@ -817,7 +819,7 @@
 
   // TODO: Verify contents of AudioAnalytics.
   // Currently there is no getter API in AudioAnalytics besides dump.
-  ASSERT_EQ(11, audioAnalytics.dump(1000).second /* lines */);
+  ASSERT_EQ(10, audioAnalytics.dump(1000).second /* lines */);
 
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
   // untrusted entities can add to an existing key
@@ -845,7 +847,9 @@
   (*item3).set("four", (int32_t)4)
           .setTimestamp(12);
 
-  android::mediametrics::AudioAnalytics audioAnalytics;
+  std::shared_ptr<mediametrics::StatsdLog> statsdLog =
+          std::make_shared<mediametrics::StatsdLog>(10);
+  android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
 
   // untrusted entities cannot create a new key.
   ASSERT_EQ(PERMISSION_DENIED, audioAnalytics.submit(item, false /* isTrusted */));
@@ -853,7 +857,7 @@
 
   // TODO: Verify contents of AudioAnalytics.
   // Currently there is no getter API in AudioAnalytics besides dump.
-  ASSERT_EQ(11, audioAnalytics.dump(1000).second /* lines */);
+  ASSERT_EQ(10, audioAnalytics.dump(1000).second /* lines */);
 
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
   // untrusted entities can add to an existing key
@@ -877,7 +881,9 @@
   (*item3).set("four", (int32_t)4)
           .setTimestamp(12);
 
-  android::mediametrics::AudioAnalytics audioAnalytics;
+  std::shared_ptr<mediametrics::StatsdLog> statsdLog =
+          std::make_shared<mediametrics::StatsdLog>(10);
+  android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
 
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
   // untrusted entities can add to an existing key
@@ -1082,3 +1088,42 @@
   //mediaMetrics->dump(fileno(stdout), {} /* args */);
 }
 #endif
+
+// Base64Url and isLogSessionId string utilities can be tested by static asserts.
+static_assert(mediametrics::stringutils::isBase64Url("abc"));
+static_assert(mediametrics::stringutils::InverseBase64UrlTable['A'] == 0);
+static_assert(mediametrics::stringutils::InverseBase64UrlTable['a'] == 26);
+static_assert(mediametrics::stringutils::InverseBase64UrlTable['!'] ==
+        mediametrics::stringutils::Transpose::INVALID_CHAR);
+static_assert(mediametrics::stringutils::InverseBase64UrlTable['@'] ==
+        mediametrics::stringutils::Transpose::INVALID_CHAR);
+static_assert(mediametrics::stringutils::InverseBase64UrlTable['#'] ==
+        mediametrics::stringutils::Transpose::INVALID_CHAR);
+static_assert(!mediametrics::stringutils::isBase64Url("!@#"));
+
+static_assert(mediametrics::stringutils::isLogSessionId("0123456789abcdef"));
+static_assert(!mediametrics::stringutils::isLogSessionId("abc"));
+static_assert(!mediametrics::stringutils::isLogSessionId("!@#"));
+static_assert(!mediametrics::stringutils::isLogSessionId("0123456789abcde!"));
+
+TEST(mediametrics_tests, sanitizeLogSessionId) {
+   // invalid id returns empty string.
+   ASSERT_EQ("", mediametrics::stringutils::sanitizeLogSessionId("abc"));
+
+   // valid id passes through.
+   std::string validId = "fedcba9876543210";
+   ASSERT_EQ(validId, mediametrics::stringutils::sanitizeLogSessionId(validId));
+
+   // one more char makes the id invalid
+   ASSERT_EQ("", mediametrics::stringutils::sanitizeLogSessionId(validId + "A"));
+
+   std::string validId2 = "ZYXWVUT123456789";
+   ASSERT_EQ(validId2, mediametrics::stringutils::sanitizeLogSessionId(validId2));
+
+   // one fewer char makes the id invalid
+   ASSERT_EQ("", mediametrics::stringutils::sanitizeLogSessionId(validId.c_str() + 1));
+
+   // replacing one character with an invalid character makes an invalid id.
+   validId2[3] = '!';
+   ASSERT_EQ("", mediametrics::stringutils::sanitizeLogSessionId(validId2));
+}
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index a3519d5..f31202b 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -1,11 +1,83 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
 
+filegroup {
+    name: "resourcemanager_aidl",
+    srcs: [
+        "aidl/android/media/IResourceManagerClient.aidl",
+        "aidl/android/media/IResourceManagerService.aidl",
+        "aidl/android/media/MediaResourceType.aidl",
+        "aidl/android/media/MediaResourceSubType.aidl",
+        "aidl/android/media/MediaResourceParcel.aidl",
+        "aidl/android/media/MediaResourcePolicyParcel.aidl",
+    ],
+    path: "aidl",
+}
 
-cc_library_shared {
+filegroup {
+    name: "resourceobserver_aidl",
+    srcs: [
+        "aidl/android/media/IResourceObserver.aidl",
+        "aidl/android/media/IResourceObserverService.aidl",
+        "aidl/android/media/MediaObservableEvent.aidl",
+        "aidl/android/media/MediaObservableFilter.aidl",
+        "aidl/android/media/MediaObservableType.aidl",
+        "aidl/android/media/MediaObservableParcel.aidl",
+    ],
+    path: "aidl",
+}
+
+aidl_interface {
+    name: "resourcemanager_aidl_interface",
+    unstable: true,
+    local_include_dir: "aidl",
+    srcs: [
+        ":resourcemanager_aidl",
+    ],
+}
+
+aidl_interface {
+    name: "resourceobserver_aidl_interface",
+    local_include_dir: "aidl",
+    srcs: [
+        ":resourceobserver_aidl",
+    ],
+    backend: {
+        java: {
+            enabled: false,
+        },
+        cpp: {
+            enabled: false,
+        },
+        ndk: {
+            apex_available: [
+                "//apex_available:platform",
+                "com.android.media",
+                "test_com.android.media",
+            ],
+            min_sdk_version: "29",
+            enabled: true,
+        },
+    },
+    versions: ["1"],
+}
+
+cc_library {
     name: "libresourcemanagerservice",
 
     srcs: [
         "ResourceManagerService.cpp",
+        "ResourceObserverService.cpp",
         "ServiceLog.cpp",
+
+        // TODO: convert to AIDL?
+        "IMediaResourceMonitor.cpp",
     ],
 
     shared_libs: [
@@ -17,6 +89,10 @@
         "liblog",
     ],
 
+    static_libs: [
+        "resourceobserver_aidl_interface-V1-ndk_platform",
+    ],
+
     include_dirs: ["frameworks/av/include"],
 
     cflags: [
@@ -25,5 +101,4 @@
     ],
 
     export_include_dirs: ["."],
-
 }
diff --git a/services/mediaresourcemanager/IMediaResourceMonitor.cpp b/services/mediaresourcemanager/IMediaResourceMonitor.cpp
new file mode 100644
index 0000000..42d7feb
--- /dev/null
+++ b/services/mediaresourcemanager/IMediaResourceMonitor.cpp
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "IMediaResourceMonitor.h"
+#include <binder/Parcel.h>
+#include <utils/Errors.h>
+#include <sys/types.h>
+
+namespace android {
+
+// ----------------------------------------------------------------------
+
+class BpMediaResourceMonitor : public BpInterface<IMediaResourceMonitor> {
+public:
+    explicit BpMediaResourceMonitor(const sp<IBinder>& impl)
+        : BpInterface<IMediaResourceMonitor>(impl) {}
+
+    virtual void notifyResourceGranted(/*in*/ int32_t pid, /*in*/ const int32_t type)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaResourceMonitor::getInterfaceDescriptor());
+        data.writeInt32(pid);
+        data.writeInt32(type);
+        remote()->transact(NOTIFY_RESOURCE_GRANTED, data, &reply, IBinder::FLAG_ONEWAY);
+    }
+};
+
+IMPLEMENT_META_INTERFACE(MediaResourceMonitor, "android.media.IMediaResourceMonitor")
+
+// ----------------------------------------------------------------------
+
+// NOLINTNEXTLINE(google-default-arguments)
+status_t BnMediaResourceMonitor::onTransact( uint32_t code, const Parcel& data, Parcel* reply,
+        uint32_t flags) {
+    switch(code) {
+        case NOTIFY_RESOURCE_GRANTED: {
+            CHECK_INTERFACE(IMediaResourceMonitor, data, reply);
+            int32_t pid = data.readInt32();
+            const int32_t type = data.readInt32();
+            notifyResourceGranted(/*in*/ pid, /*in*/ type);
+            return NO_ERROR;
+        } break;
+        default:
+            return BBinder::onTransact(code, data, reply, flags);
+    }
+}
+
+// ----------------------------------------------------------------------
+
+} // namespace android
diff --git a/services/mediaresourcemanager/IMediaResourceMonitor.h b/services/mediaresourcemanager/IMediaResourceMonitor.h
new file mode 100644
index 0000000..f92d557
--- /dev/null
+++ b/services/mediaresourcemanager/IMediaResourceMonitor.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#ifndef __ANDROID_VNDK__
+
+#include <binder/IInterface.h>
+
+namespace android {
+
+// ----------------------------------------------------------------------
+
+class IMediaResourceMonitor : public IInterface {
+public:
+    DECLARE_META_INTERFACE(MediaResourceMonitor)
+
+    // Values should be in sync with Intent.EXTRA_MEDIA_RESOURCE_TYPE_XXX.
+    enum {
+        TYPE_VIDEO_CODEC = 0,
+        TYPE_AUDIO_CODEC = 1,
+    };
+
+    virtual void notifyResourceGranted(/*in*/ int32_t pid, /*in*/ const int32_t type) = 0;
+
+    enum {
+        NOTIFY_RESOURCE_GRANTED = IBinder::FIRST_CALL_TRANSACTION,
+    };
+};
+
+// ----------------------------------------------------------------------
+
+class BnMediaResourceMonitor : public BnInterface<IMediaResourceMonitor> {
+public:
+    // NOLINTNEXTLINE(google-default-arguments)
+    virtual status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply,
+            uint32_t flags = 0);
+};
+
+// ----------------------------------------------------------------------
+
+} // namespace android
+
+#else // __ANDROID_VNDK__
+#error "This header is not visible to vendors"
+#endif // __ANDROID_VNDK__
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 3d36f8e..953686b 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -21,7 +21,7 @@
 
 #include <android/binder_manager.h>
 #include <android/binder_process.h>
-#include <binder/IMediaResourceMonitor.h>
+#include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
 #include <cutils/sched_policy.h>
 #include <dirent.h>
@@ -35,19 +35,56 @@
 #include <sys/time.h>
 #include <unistd.h>
 
+#include "IMediaResourceMonitor.h"
 #include "ResourceManagerService.h"
+#include "ResourceObserverService.h"
 #include "ServiceLog.h"
 
 namespace android {
 
+//static
+std::mutex ResourceManagerService::sCookieLock;
+//static
+uintptr_t ResourceManagerService::sCookieCounter = 0;
+//static
+std::map<uintptr_t, sp<DeathNotifier> > ResourceManagerService::sCookieToDeathNotifierMap;
+
+class DeathNotifier : public RefBase {
+public:
+    DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
+            int pid, int64_t clientId);
+
+    virtual ~DeathNotifier() {}
+
+    // Implement death recipient
+    static void BinderDiedCallback(void* cookie);
+    virtual void binderDied();
+
+protected:
+    std::weak_ptr<ResourceManagerService> mService;
+    int mPid;
+    int64_t mClientId;
+};
+
 DeathNotifier::DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
         int pid, int64_t clientId)
     : mService(service), mPid(pid), mClientId(clientId) {}
 
 //static
 void DeathNotifier::BinderDiedCallback(void* cookie) {
-    auto thiz = static_cast<DeathNotifier*>(cookie);
-    thiz->binderDied();
+    sp<DeathNotifier> notifier;
+    {
+        std::scoped_lock lock{ResourceManagerService::sCookieLock};
+        auto it = ResourceManagerService::sCookieToDeathNotifierMap.find(
+                reinterpret_cast<uintptr_t>(cookie));
+        if (it == ResourceManagerService::sCookieToDeathNotifierMap.end()) {
+            return;
+        }
+        notifier = it->second;
+    }
+    if (notifier.get() != nullptr) {
+        notifier->binderDied();
+    }
 }
 
 void DeathNotifier::binderDied() {
@@ -60,8 +97,28 @@
 
     service->overridePid(mPid, -1);
     // thiz is freed in the call below, so it must be last call referring thiz
-    service->removeResource(mPid, mClientId, false);
+    service->removeResource(mPid, mClientId, false /*checkValid*/);
+}
 
+class OverrideProcessInfoDeathNotifier : public DeathNotifier {
+public:
+    OverrideProcessInfoDeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
+            int pid) : DeathNotifier(service, pid, 0) {}
+
+    virtual ~OverrideProcessInfoDeathNotifier() {}
+
+    virtual void binderDied();
+};
+
+void OverrideProcessInfoDeathNotifier::binderDied() {
+    // Don't check for pid validity since we know it's already dead.
+    std::shared_ptr<ResourceManagerService> service = mService.lock();
+    if (service == nullptr) {
+        ALOGW("ResourceManagerService is dead as well.");
+        return;
+    }
+
+    service->removeProcessInfoOverride(mPid);
 }
 
 template <typename T>
@@ -116,6 +173,7 @@
         info.uid = uid;
         info.clientId = clientId;
         info.client = client;
+        info.cookie = 0;
         info.pendingRemoval = false;
 
         index = infos.add(clientId, info);
@@ -267,6 +325,13 @@
     if (status != STATUS_OK) {
         return;
     }
+
+    std::shared_ptr<ResourceObserverService> observerService =
+            ResourceObserverService::instantiate();
+
+    if (observerService != nullptr) {
+        service->setObserverService(observerService);
+    }
     // TODO: mediaserver main() is already starting the thread pool,
     // move this to mediaserver main() when other services in mediaserver
     // are converted to ndk-platform aidl.
@@ -275,6 +340,11 @@
 
 ResourceManagerService::~ResourceManagerService() {}
 
+void ResourceManagerService::setObserverService(
+        const std::shared_ptr<ResourceObserverService>& observerService) {
+    mObserverService = observerService;
+}
+
 Status ResourceManagerService::config(const std::vector<MediaResourcePolicyParcel>& policies) {
     String8 log = String8::format("config(%s)", getString(policies).string());
     mServiceLog->add(log);
@@ -353,11 +423,16 @@
 
     Mutex::Autolock lock(mLock);
     if (!mProcessInfo->isValidPid(pid)) {
-        ALOGE("Rejected addResource call with invalid pid.");
-        return Status::fromServiceSpecificError(BAD_VALUE);
+        pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        uid_t callingUid = IPCThreadState::self()->getCallingUid();
+        ALOGW("%s called with untrusted pid %d, using calling pid %d, uid %d", __FUNCTION__,
+                pid, callingPid, callingUid);
+        pid = callingPid;
+        uid = callingUid;
     }
     ResourceInfos& infos = getResourceInfosForEdit(pid, mMap);
     ResourceInfo& info = getResourceInfoForEdit(uid, clientId, client, infos);
+    ResourceList resourceAdded;
 
     for (size_t i = 0; i < resources.size(); ++i) {
         const auto &res = resources[i];
@@ -379,11 +454,20 @@
         } else {
             mergeResources(info.resources[resType], res);
         }
+        // Add it to the list of added resources for observers.
+        auto it = resourceAdded.find(resType);
+        if (it == resourceAdded.end()) {
+            resourceAdded[resType] = res;
+        } else {
+            mergeResources(it->second, res);
+        }
     }
-    if (info.deathNotifier == nullptr && client != nullptr) {
-        info.deathNotifier = new DeathNotifier(ref<ResourceManagerService>(), pid, clientId);
-        AIBinder_linkToDeath(client->asBinder().get(),
-                mDeathRecipient.get(), info.deathNotifier.get());
+    if (info.cookie == 0 && client != nullptr) {
+        info.cookie = addCookieAndLink_l(client->asBinder(),
+                new DeathNotifier(ref<ResourceManagerService>(), pid, clientId));
+    }
+    if (mObserverService != nullptr && !resourceAdded.empty()) {
+        mObserverService->onResourceAdded(uid, pid, resourceAdded);
     }
     notifyResourceGranted(pid, resources);
     return Status::ok();
@@ -398,8 +482,10 @@
 
     Mutex::Autolock lock(mLock);
     if (!mProcessInfo->isValidPid(pid)) {
-        ALOGE("Rejected removeResource call with invalid pid.");
-        return Status::fromServiceSpecificError(BAD_VALUE);
+        pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW("%s called with untrusted pid %d, using calling pid %d", __FUNCTION__,
+                pid, callingPid);
+        pid = callingPid;
     }
     ssize_t index = mMap.indexOfKey(pid);
     if (index < 0) {
@@ -415,7 +501,7 @@
     }
 
     ResourceInfo &info = infos.editValueAt(index);
-
+    ResourceList resourceRemoved;
     for (size_t i = 0; i < resources.size(); ++i) {
         const auto &res = resources[i];
         const auto resType = std::tuple(res.type, res.subType, res.id);
@@ -427,19 +513,32 @@
         // ignore if we don't have it
         if (info.resources.find(resType) != info.resources.end()) {
             MediaResourceParcel &resource = info.resources[resType];
+            MediaResourceParcel actualRemoved = res;
             if (resource.value > res.value) {
                 resource.value -= res.value;
             } else {
                 onLastRemoved(res, info);
+                actualRemoved.value = resource.value;
                 info.resources.erase(resType);
             }
+
+            // Add it to the list of removed resources for observers.
+            auto it = resourceRemoved.find(resType);
+            if (it == resourceRemoved.end()) {
+                resourceRemoved[resType] = actualRemoved;
+            } else {
+                mergeResources(it->second, actualRemoved);
+            }
         }
     }
+    if (mObserverService != nullptr && !resourceRemoved.empty()) {
+        mObserverService->onResourceRemoved(info.uid, pid, resourceRemoved);
+    }
     return Status::ok();
 }
 
 Status ResourceManagerService::removeClient(int32_t pid, int64_t clientId) {
-    removeResource(pid, clientId, true);
+    removeResource(pid, clientId, true /*checkValid*/);
     return Status::ok();
 }
 
@@ -451,8 +550,10 @@
 
     Mutex::Autolock lock(mLock);
     if (checkValid && !mProcessInfo->isValidPid(pid)) {
-        ALOGE("Rejected removeResource call with invalid pid.");
-        return Status::fromServiceSpecificError(BAD_VALUE);
+        pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW("%s called with untrusted pid %d, using calling pid %d", __FUNCTION__,
+                pid, callingPid);
+        pid = callingPid;
     }
     ssize_t index = mMap.indexOfKey(pid);
     if (index < 0) {
@@ -472,8 +573,11 @@
         onLastRemoved(it->second, info);
     }
 
-    AIBinder_unlinkToDeath(info.client->asBinder().get(),
-            mDeathRecipient.get(), info.deathNotifier.get());
+    removeCookieAndUnlink_l(info.client->asBinder(), info.cookie);
+
+    if (mObserverService != nullptr && !info.resources.empty()) {
+        mObserverService->onResourceRemoved(info.uid, pid, info.resources);
+    }
 
     infos.removeItemsAt(index);
     return Status::ok();
@@ -504,8 +608,10 @@
     {
         Mutex::Autolock lock(mLock);
         if (!mProcessInfo->isValidPid(callingPid)) {
-            ALOGE("Rejected reclaimResource call with invalid callingPid.");
-            return Status::fromServiceSpecificError(BAD_VALUE);
+            pid_t actualCallingPid = IPCThreadState::self()->getCallingPid();
+            ALOGW("%s called with untrusted pid %d, using actual calling pid %d", __FUNCTION__,
+                    callingPid, actualCallingPid);
+            callingPid = actualCallingPid;
         }
         const MediaResourceParcel *secureCodec = NULL;
         const MediaResourceParcel *nonSecureCodec = NULL;
@@ -575,13 +681,19 @@
         }
     }
 
+    *_aidl_return = reclaimInternal(clients);
+    return Status::ok();
+}
+
+bool ResourceManagerService::reclaimInternal(
+        const Vector<std::shared_ptr<IResourceManagerClient>> &clients) {
     if (clients.size() == 0) {
-        return Status::ok();
+        return false;
     }
 
     std::shared_ptr<IResourceManagerClient> failedClient;
     for (size_t i = 0; i < clients.size(); ++i) {
-        log = String8::format("reclaimResource from client %p", clients[i].get());
+        String8 log = String8::format("reclaimResource from client %p", clients[i].get());
         mServiceLog->add(log);
         bool success;
         Status status = clients[i]->reclaimResource(&success);
@@ -592,8 +704,7 @@
     }
 
     if (failedClient == NULL) {
-        *_aidl_return = true;
-        return Status::ok();
+        return true;
     }
 
     {
@@ -618,7 +729,7 @@
         }
     }
 
-    return Status::ok();
+    return false;
 }
 
 Status ResourceManagerService::overridePid(
@@ -651,6 +762,83 @@
     return Status::ok();
 }
 
+Status ResourceManagerService::overrideProcessInfo(
+        const std::shared_ptr<IResourceManagerClient>& client,
+        int pid,
+        int procState,
+        int oomScore) {
+    String8 log = String8::format("overrideProcessInfo(pid %d, procState %d, oomScore %d)",
+            pid, procState, oomScore);
+    mServiceLog->add(log);
+
+    // Only allow the override if the caller already can access process state and oom scores.
+    int callingPid = AIBinder_getCallingPid();
+    if (callingPid != getpid() && (callingPid != pid || !checkCallingPermission(String16(
+            "android.permission.GET_PROCESS_STATE_AND_OOM_SCORE")))) {
+        ALOGE("Permission Denial: overrideProcessInfo method from pid=%d", callingPid);
+        return Status::fromServiceSpecificError(PERMISSION_DENIED);
+    }
+
+    if (client == nullptr) {
+        return Status::fromServiceSpecificError(BAD_VALUE);
+    }
+
+    Mutex::Autolock lock(mLock);
+    removeProcessInfoOverride_l(pid);
+
+    if (!mProcessInfo->overrideProcessInfo(pid, procState, oomScore)) {
+        // Override value is rejected by ProcessInfo.
+        return Status::fromServiceSpecificError(BAD_VALUE);
+    }
+
+    uintptr_t cookie = addCookieAndLink_l(client->asBinder(),
+            new OverrideProcessInfoDeathNotifier(ref<ResourceManagerService>(), pid));
+
+    mProcessInfoOverrideMap.emplace(pid, ProcessInfoOverride{cookie, client});
+
+    return Status::ok();
+}
+
+uintptr_t ResourceManagerService::addCookieAndLink_l(
+        ::ndk::SpAIBinder binder, const sp<DeathNotifier>& notifier) {
+    std::scoped_lock lock{sCookieLock};
+
+    uintptr_t cookie;
+    // Need to skip cookie 0 (if it wraps around). ResourceInfo has cookie initialized to 0
+    // indicating the death notifier is not created yet.
+    while ((cookie = ++sCookieCounter) == 0);
+    AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), (void*)cookie);
+    sCookieToDeathNotifierMap.emplace(cookie, notifier);
+
+    return cookie;
+}
+
+void ResourceManagerService::removeCookieAndUnlink_l(
+        ::ndk::SpAIBinder binder, uintptr_t cookie) {
+    std::scoped_lock lock{sCookieLock};
+    AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(), (void*)cookie);
+    sCookieToDeathNotifierMap.erase(cookie);
+}
+
+void ResourceManagerService::removeProcessInfoOverride(int pid) {
+    Mutex::Autolock lock(mLock);
+
+    removeProcessInfoOverride_l(pid);
+}
+
+void ResourceManagerService::removeProcessInfoOverride_l(int pid) {
+    auto it = mProcessInfoOverrideMap.find(pid);
+    if (it == mProcessInfoOverrideMap.end()) {
+        return;
+    }
+
+    mProcessInfo->removeProcessInfoOverride(pid);
+
+    removeCookieAndUnlink_l(it->second.client->asBinder(), it->second.cookie);
+
+    mProcessInfoOverrideMap.erase(pid);
+}
+
 Status ResourceManagerService::markClientForPendingRemoval(int32_t pid, int64_t clientId) {
     String8 log = String8::format(
             "markClientForPendingRemoval(pid %d, clientId %lld)",
@@ -659,8 +847,10 @@
 
     Mutex::Autolock lock(mLock);
     if (!mProcessInfo->isValidPid(pid)) {
-        ALOGE("Rejected markClientForPendingRemoval call with invalid pid.");
-        return Status::fromServiceSpecificError(BAD_VALUE);
+        pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW("%s called with untrusted pid %d, using calling pid %d", __FUNCTION__,
+                pid, callingPid);
+        pid = callingPid;
     }
     ssize_t index = mMap.indexOfKey(pid);
     if (index < 0) {
@@ -681,6 +871,38 @@
     return Status::ok();
 }
 
+Status ResourceManagerService::reclaimResourcesFromClientsPendingRemoval(int32_t pid) {
+    String8 log = String8::format("reclaimResourcesFromClientsPendingRemoval(pid %d)", pid);
+    mServiceLog->add(log);
+
+    Vector<std::shared_ptr<IResourceManagerClient>> clients;
+    {
+        Mutex::Autolock lock(mLock);
+        if (!mProcessInfo->isValidPid(pid)) {
+            pid_t callingPid = IPCThreadState::self()->getCallingPid();
+            ALOGW("%s called with untrusted pid %d, using calling pid %d", __FUNCTION__,
+                    pid, callingPid);
+            pid = callingPid;
+        }
+
+        for (MediaResource::Type type : {MediaResource::Type::kSecureCodec,
+                                         MediaResource::Type::kNonSecureCodec,
+                                         MediaResource::Type::kGraphicMemory,
+                                         MediaResource::Type::kDrmSession}) {
+            std::shared_ptr<IResourceManagerClient> client;
+            if (getBiggestClient_l(pid, type, &client, true /* pendingRemovalOnly */)) {
+                clients.add(client);
+                break;
+            }
+        }
+    }
+
+    if (!clients.empty()) {
+        reclaimInternal(clients);
+    }
+    return Status::ok();
+}
+
 bool ResourceManagerService::getPriority_l(int pid, int* priority) {
     int newPid = pid;
 
@@ -804,7 +1026,8 @@
         bool pendingRemovalOnly) {
     ssize_t index = mMap.indexOfKey(pid);
     if (index < 0) {
-        ALOGE("getBiggestClient_l: can't find resource info for pid %d", pid);
+        ALOGE_IF(!pendingRemovalOnly,
+                 "getBiggestClient_l: can't find resource info for pid %d", pid);
         return false;
     }
 
@@ -828,7 +1051,9 @@
     }
 
     if (clientTemp == NULL) {
-        ALOGE("getBiggestClient_l: can't find resource type %s for pid %d", asString(type), pid);
+        ALOGE_IF(!pendingRemovalOnly,
+                 "getBiggestClient_l: can't find resource type %s for pid %d",
+                 asString(type), pid);
         return false;
     }
 
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index ee982b7..9c2636e 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -19,6 +19,7 @@
 #define ANDROID_MEDIA_RESOURCEMANAGERSERVICE_H
 
 #include <map>
+#include <mutex>
 
 #include <aidl/android/media/BnResourceManagerService.h>
 #include <arpa/inet.h>
@@ -33,6 +34,7 @@
 
 class DeathNotifier;
 class ResourceManagerService;
+class ResourceObserverService;
 class ServiceLog;
 struct ProcessInfoInterface;
 
@@ -43,14 +45,14 @@
 using ::aidl::android::media::MediaResourcePolicyParcel;
 
 typedef std::map<std::tuple<
-        MediaResource::Type, MediaResource::SubType, std::vector<int8_t>>,
+        MediaResource::Type, MediaResource::SubType, std::vector<uint8_t>>,
         MediaResourceParcel> ResourceList;
 
 struct ResourceInfo {
     int64_t clientId;
     uid_t uid;
     std::shared_ptr<IResourceManagerClient> client;
-    sp<DeathNotifier> deathNotifier;
+    uintptr_t cookie{0};
     ResourceList resources;
     bool pendingRemoval{false};
 };
@@ -59,22 +61,6 @@
 typedef KeyedVector<int64_t, ResourceInfo> ResourceInfos;
 typedef KeyedVector<int, ResourceInfos> PidResourceInfosMap;
 
-class DeathNotifier : public RefBase {
-public:
-    DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
-            int pid, int64_t clientId);
-
-    ~DeathNotifier() {}
-
-    // Implement death recipient
-    static void BinderDiedCallback(void* cookie);
-    void binderDied();
-
-private:
-    std::weak_ptr<ResourceManagerService> mService;
-    int mPid;
-    int64_t mClientId;
-};
 class ResourceManagerService : public BnResourceManagerService {
 public:
     struct SystemCallbackInterface : public RefBase {
@@ -95,6 +81,8 @@
             const sp<ProcessInfoInterface> &processInfo,
             const sp<SystemCallbackInterface> &systemResource);
     virtual ~ResourceManagerService();
+    void setObserverService(
+            const std::shared_ptr<ResourceObserverService>& observerService);
 
     // IResourceManagerService interface
     Status config(const std::vector<MediaResourcePolicyParcel>& policies) override;
@@ -125,12 +113,27 @@
             int originalPid,
             int newPid) override;
 
+    Status overrideProcessInfo(
+            const std::shared_ptr<IResourceManagerClient>& client,
+            int pid,
+            int procState,
+            int oomScore) override;
+
     Status markClientForPendingRemoval(int32_t pid, int64_t clientId) override;
 
+    Status reclaimResourcesFromClientsPendingRemoval(int32_t pid) override;
+
     Status removeResource(int pid, int64_t clientId, bool checkValid);
 
 private:
     friend class ResourceManagerServiceTest;
+    friend class DeathNotifier;
+    friend class OverrideProcessInfoDeathNotifier;
+
+    // Reclaims resources from |clients|. Returns true if reclaim succeeded
+    // for all clients.
+    bool reclaimInternal(
+            const Vector<std::shared_ptr<IResourceManagerClient>> &clients);
 
     // Gets the list of all the clients who own the specified resource type.
     // Returns false if any client belongs to a process with higher priority than the
@@ -170,6 +173,12 @@
     // Get priority from process's pid
     bool getPriority_l(int pid, int* priority);
 
+    void removeProcessInfoOverride(int pid);
+
+    void removeProcessInfoOverride_l(int pid);
+    uintptr_t addCookieAndLink_l(::ndk::SpAIBinder binder, const sp<DeathNotifier>& notifier);
+    void removeCookieAndUnlink_l(::ndk::SpAIBinder binder, uintptr_t cookie);
+
     mutable Mutex mLock;
     sp<ProcessInfoInterface> mProcessInfo;
     sp<SystemCallbackInterface> mSystemCB;
@@ -179,7 +188,17 @@
     bool mSupportsSecureWithNonSecureCodec;
     int32_t mCpuBoostCount;
     ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+    struct ProcessInfoOverride {
+        uintptr_t cookie;
+        std::shared_ptr<IResourceManagerClient> client;
+    };
     std::map<int, int> mOverridePidMap;
+    std::map<pid_t, ProcessInfoOverride> mProcessInfoOverrideMap;
+    static std::mutex sCookieLock;
+    static uintptr_t sCookieCounter GUARDED_BY(sCookieLock);
+    static std::map<uintptr_t, sp<DeathNotifier> > sCookieToDeathNotifierMap
+            GUARDED_BY(sCookieLock);
+    std::shared_ptr<ResourceObserverService> mObserverService;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/services/mediaresourcemanager/ResourceObserverService.cpp b/services/mediaresourcemanager/ResourceObserverService.cpp
new file mode 100644
index 0000000..4e97406
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceObserverService.cpp
@@ -0,0 +1,328 @@
+/**
+ *
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceObserverService"
+#include <utils/Log.h>
+
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <binder/IServiceManager.h>
+#include <utils/String16.h>
+#include <aidl/android/media/MediaResourceParcel.h>
+
+#include "ResourceObserverService.h"
+
+namespace android {
+
+using ::aidl::android::media::MediaResourceParcel;
+using ::aidl::android::media::MediaObservableEvent;
+
+// MediaObservableEvent will be used as uint64_t flags.
+static_assert(sizeof(MediaObservableEvent) == sizeof(uint64_t));
+
+static std::vector<MediaObservableEvent> sEvents = {
+        MediaObservableEvent::kBusy,
+        MediaObservableEvent::kIdle,
+};
+
+static MediaObservableType getObservableType(const MediaResourceParcel& res) {
+    if (res.subType == MediaResourceSubType::kVideoCodec) {
+        if (res.type == MediaResourceType::kNonSecureCodec) {
+            return MediaObservableType::kVideoNonSecureCodec;
+        }
+        if (res.type == MediaResourceType::kSecureCodec) {
+            return MediaObservableType::kVideoSecureCodec;
+        }
+    }
+    return MediaObservableType::kInvalid;
+}
+
+//static
+std::mutex ResourceObserverService::sDeathRecipientLock;
+//static
+std::map<uintptr_t, std::shared_ptr<ResourceObserverService::DeathRecipient> >
+ResourceObserverService::sDeathRecipientMap;
+
+struct ResourceObserverService::DeathRecipient {
+    DeathRecipient(ResourceObserverService* _service,
+            const std::shared_ptr<IResourceObserver>& _observer)
+        : service(_service), observer(_observer) {}
+    ~DeathRecipient() {}
+
+    void binderDied() {
+        if (service != nullptr) {
+            service->unregisterObserver(observer);
+        }
+    }
+
+    ResourceObserverService* service;
+    std::shared_ptr<IResourceObserver> observer;
+};
+
+// static
+void ResourceObserverService::BinderDiedCallback(void* cookie) {
+    uintptr_t id = reinterpret_cast<uintptr_t>(cookie);
+
+    ALOGW("Observer %lld is dead", (long long)id);
+
+    std::shared_ptr<DeathRecipient> recipient;
+
+    {
+        std::scoped_lock lock{sDeathRecipientLock};
+
+        auto it = sDeathRecipientMap.find(id);
+        if (it != sDeathRecipientMap.end()) {
+            recipient = it->second;
+        }
+    }
+
+    if (recipient != nullptr) {
+        recipient->binderDied();
+    }
+}
+
+//static
+std::shared_ptr<ResourceObserverService> ResourceObserverService::instantiate() {
+    std::shared_ptr<ResourceObserverService> observerService =
+            ::ndk::SharedRefBase::make<ResourceObserverService>();
+    binder_status_t status = AServiceManager_addService(observerService->asBinder().get(),
+            ResourceObserverService::getServiceName());
+    if (status != STATUS_OK) {
+        return nullptr;
+    }
+    return observerService;
+}
+
+ResourceObserverService::ResourceObserverService()
+    : mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)) {}
+
+binder_status_t ResourceObserverService::dump(
+        int fd, const char** /*args*/, uint32_t /*numArgs*/) {
+    String8 result;
+
+    if (checkCallingPermission(String16("android.permission.DUMP")) == false) {
+        result.format("Permission Denial: "
+                "can't dump ResourceManagerService from pid=%d, uid=%d\n",
+                AIBinder_getCallingPid(),
+                AIBinder_getCallingUid());
+        write(fd, result.string(), result.size());
+        return PERMISSION_DENIED;
+    }
+
+    result.appendFormat("ResourceObserverService: %p\n", this);
+    result.appendFormat("  Registered Observers: %zu\n", mObserverInfoMap.size());
+
+    {
+        std::scoped_lock lock{mObserverLock};
+
+        for (auto &observer : mObserverInfoMap) {
+            result.appendFormat("    Observer %p:\n", observer.second.binder.get());
+            for (auto &observable : observer.second.filters) {
+                String8 enabledEventsStr;
+                for (auto &event : sEvents) {
+                    if (((uint64_t)observable.eventFilter & (uint64_t)event) != 0) {
+                        if (!enabledEventsStr.isEmpty()) {
+                            enabledEventsStr.append("|");
+                        }
+                        enabledEventsStr.append(toString(event).c_str());
+                    }
+                }
+                result.appendFormat("      %s: %s\n",
+                        toString(observable.type).c_str(), enabledEventsStr.c_str());
+            }
+        }
+    }
+
+    write(fd, result.string(), result.size());
+    return OK;
+}
+
+Status ResourceObserverService::registerObserver(
+        const std::shared_ptr<IResourceObserver>& in_observer,
+        const std::vector<MediaObservableFilter>& in_filters) {
+    if ((getpid() != AIBinder_getCallingPid()) &&
+            checkCallingPermission(
+            String16("android.permission.REGISTER_MEDIA_RESOURCE_OBSERVER")) == false) {
+        ALOGE("Permission Denial: "
+                "can't registerObserver from pid=%d, uid=%d\n",
+                AIBinder_getCallingPid(),
+                AIBinder_getCallingUid());
+        return Status::fromServiceSpecificError(PERMISSION_DENIED);
+    }
+
+    if (in_observer == nullptr) {
+        return Status::fromServiceSpecificError(BAD_VALUE);
+    }
+
+    ::ndk::SpAIBinder binder = in_observer->asBinder();
+
+    {
+        std::scoped_lock lock{mObserverLock};
+
+        if (mObserverInfoMap.find((uintptr_t)binder.get()) != mObserverInfoMap.end()) {
+            return Status::fromServiceSpecificError(ALREADY_EXISTS);
+        }
+
+        if (in_filters.empty()) {
+            return Status::fromServiceSpecificError(BAD_VALUE);
+        }
+
+        // Add observer info.
+        mObserverInfoMap.emplace((uintptr_t)binder.get(),
+                ObserverInfo{binder, in_observer, in_filters});
+
+        // Add observer to observable->subscribers map.
+        for (auto &filter : in_filters) {
+            for (auto &event : sEvents) {
+                if (!((uint64_t)filter.eventFilter & (uint64_t)event)) {
+                    continue;
+                }
+                MediaObservableFilter key{filter.type, event};
+                mObservableToSubscribersMap[key].emplace((uintptr_t)binder.get(), in_observer);
+            }
+        }
+    }
+
+    // Add death binder and link.
+    uintptr_t cookie = (uintptr_t)binder.get();
+    {
+        std::scoped_lock lock{sDeathRecipientLock};
+        sDeathRecipientMap.emplace(
+                cookie, std::make_shared<DeathRecipient>(this, in_observer));
+    }
+
+    AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(),
+                         reinterpret_cast<void*>(cookie));
+
+    return Status::ok();
+}
+
+Status ResourceObserverService::unregisterObserver(
+        const std::shared_ptr<IResourceObserver>& in_observer) {
+    if ((getpid() != AIBinder_getCallingPid()) &&
+            checkCallingPermission(
+            String16("android.permission.REGISTER_MEDIA_RESOURCE_OBSERVER")) == false) {
+        ALOGE("Permission Denial: "
+                "can't unregisterObserver from pid=%d, uid=%d\n",
+                AIBinder_getCallingPid(),
+                AIBinder_getCallingUid());
+        return Status::fromServiceSpecificError(PERMISSION_DENIED);
+    }
+
+    if (in_observer == nullptr) {
+        return Status::fromServiceSpecificError(BAD_VALUE);
+    }
+
+    ::ndk::SpAIBinder binder = in_observer->asBinder();
+
+    {
+        std::scoped_lock lock{mObserverLock};
+
+        auto it = mObserverInfoMap.find((uintptr_t)binder.get());
+        if (it == mObserverInfoMap.end()) {
+            return Status::fromServiceSpecificError(NAME_NOT_FOUND);
+        }
+
+        // Remove observer from observable->subscribers map.
+        for (auto &filter : it->second.filters) {
+            for (auto &event : sEvents) {
+                if (!((uint64_t)filter.eventFilter & (uint64_t)event)) {
+                    continue;
+                }
+                MediaObservableFilter key{filter.type, event};
+                mObservableToSubscribersMap[key].erase((uintptr_t)binder.get());
+
+                //Remove the entry if there's no more subscribers.
+                if (mObservableToSubscribersMap[key].empty()) {
+                    mObservableToSubscribersMap.erase(key);
+                }
+            }
+        }
+
+        // Remove observer info.
+        mObserverInfoMap.erase(it);
+    }
+
+    // Unlink and remove death binder.
+    uintptr_t cookie = (uintptr_t)binder.get();
+    AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(),
+            reinterpret_cast<void*>(cookie));
+
+    {
+        std::scoped_lock lock{sDeathRecipientLock};
+        sDeathRecipientMap.erase(cookie);
+    }
+
+    return Status::ok();
+}
+
+void ResourceObserverService::notifyObservers(
+        MediaObservableEvent event, int uid, int pid, const ResourceList &resources) {
+    struct CalleeInfo {
+        std::shared_ptr<IResourceObserver> observer;
+        std::vector<MediaObservableParcel> monitors;
+    };
+    // Build a consolidated list of observers to call with their respective observables.
+    std::map<uintptr_t, CalleeInfo> calleeList;
+
+    {
+        std::scoped_lock lock{mObserverLock};
+
+        for (auto &res : resources) {
+            // Skip if this resource doesn't map to any observable type.
+            MediaObservableType observableType = getObservableType(res.second);
+            if (observableType == MediaObservableType::kInvalid) {
+                continue;
+            }
+            MediaObservableFilter key{observableType, event};
+            // Skip if no one subscribed to this observable.
+            auto observableIt = mObservableToSubscribersMap.find(key);
+            if (observableIt == mObservableToSubscribersMap.end()) {
+                continue;
+            }
+            // Loop through all subsribers.
+            for (auto &subscriber : observableIt->second) {
+                auto calleeIt = calleeList.find(subscriber.first);
+                if (calleeIt == calleeList.end()) {
+                    calleeList.emplace(subscriber.first, CalleeInfo{
+                        subscriber.second, {{observableType, res.second.value}}});
+                } else {
+                    calleeIt->second.monitors.push_back({observableType, res.second.value});
+                }
+            }
+        }
+    }
+
+    // Finally call the observers about the status change.
+    for (auto &calleeInfo : calleeList) {
+        calleeInfo.second.observer->onStatusChanged(
+                event, uid, pid, calleeInfo.second.monitors);
+    }
+}
+
+void ResourceObserverService::onResourceAdded(
+        int uid, int pid, const ResourceList &resources) {
+    notifyObservers(MediaObservableEvent::kBusy, uid, pid, resources);
+}
+
+void ResourceObserverService::onResourceRemoved(
+        int uid, int pid, const ResourceList &resources) {
+    notifyObservers(MediaObservableEvent::kIdle, uid, pid, resources);
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/ResourceObserverService.h b/services/mediaresourcemanager/ResourceObserverService.h
new file mode 100644
index 0000000..46bc5fb
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceObserverService.h
@@ -0,0 +1,95 @@
+/**
+ *
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_RESOURCE_OBSERVER_SERVICE_H
+#define ANDROID_MEDIA_RESOURCE_OBSERVER_SERVICE_H
+
+#include <map>
+
+#include <aidl/android/media/BnResourceObserverService.h>
+#include "ResourceManagerService.h"
+
+namespace android {
+
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::BnResourceObserverService;
+using ::aidl::android::media::IResourceObserver;
+using ::aidl::android::media::MediaObservableFilter;
+using ::aidl::android::media::MediaObservableParcel;
+using ::aidl::android::media::MediaObservableType;
+using ::aidl::android::media::MediaObservableEvent;
+
+class ResourceObserverService : public BnResourceObserverService {
+public:
+
+    static char const *getServiceName() { return "media.resource_observer"; }
+    static std::shared_ptr<ResourceObserverService> instantiate();
+
+    virtual inline binder_status_t dump(
+            int /*fd*/, const char** /*args*/, uint32_t /*numArgs*/);
+
+    ResourceObserverService();
+    virtual ~ResourceObserverService() {}
+
+    // IResourceObserverService interface
+    Status registerObserver(const std::shared_ptr<IResourceObserver>& in_observer,
+            const std::vector<MediaObservableFilter>& in_filters) override;
+
+    Status unregisterObserver(const std::shared_ptr<IResourceObserver>& in_observer) override;
+    // ~IResourceObserverService interface
+
+    // Called by ResourceManagerService when resources are added.
+    void onResourceAdded(int uid, int pid, const ResourceList &resources);
+
+    // Called by ResourceManagerService when resources are removed.
+    void onResourceRemoved(int uid, int pid, const ResourceList &resources);
+
+private:
+    struct ObserverInfo {
+        ::ndk::SpAIBinder binder;
+        std::shared_ptr<IResourceObserver> observer;
+        std::vector<MediaObservableFilter> filters;
+    };
+    struct DeathRecipient;
+
+    // Below maps are all keyed on the observer's binder ptr value.
+    using ObserverInfoMap = std::map<uintptr_t, ObserverInfo>;
+    using SubscriberMap = std::map<uintptr_t, std::shared_ptr<IResourceObserver>>;
+
+    std::mutex mObserverLock;
+    // Binder->ObserverInfo
+    ObserverInfoMap mObserverInfoMap GUARDED_BY(mObserverLock);
+    // Observable(<type,event>)->Subscribers
+    std::map<MediaObservableFilter, SubscriberMap> mObservableToSubscribersMap
+            GUARDED_BY(mObserverLock);
+
+    ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+
+    // Binder death handling.
+    static std::mutex sDeathRecipientLock;
+    static std::map<uintptr_t, std::shared_ptr<DeathRecipient>> sDeathRecipientMap
+            GUARDED_BY(sDeathRecipientLock);
+    static void BinderDiedCallback(void* cookie);
+
+    void notifyObservers(MediaObservableEvent event,
+            int uid, int pid, const ResourceList &resources);
+};
+
+// ----------------------------------------------------------------------------
+} // namespace android
+
+#endif // ANDROID_MEDIA_RESOURCE_OBSERVER_SERVICE_H
diff --git a/services/mediaresourcemanager/TEST_MAPPING b/services/mediaresourcemanager/TEST_MAPPING
index 418b159..52ad441 100644
--- a/services/mediaresourcemanager/TEST_MAPPING
+++ b/services/mediaresourcemanager/TEST_MAPPING
@@ -5,6 +5,9 @@
     },
     {
        "name": "ServiceLog_test"
+    },
+    {
+       "name": "ResourceObserverService_test"
     }
   ]
 }
diff --git a/media/libmedia/aidl/android/media/IResourceManagerClient.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceManagerClient.aidl
similarity index 100%
rename from media/libmedia/aidl/android/media/IResourceManagerClient.aidl
rename to services/mediaresourcemanager/aidl/android/media/IResourceManagerClient.aidl
diff --git a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
new file mode 100644
index 0000000..7a0a50f
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
@@ -0,0 +1,134 @@
+/**
+ * Copyright (c) 2019, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.IResourceManagerClient;
+import android.media.MediaResourceParcel;
+import android.media.MediaResourcePolicyParcel;
+
+/**
+ * ResourceManagerService interface that keeps track of media resource
+ * owned by clients, and reclaims resources based on configured policies
+ * when necessary.
+ *
+ * {@hide}
+ */
+interface IResourceManagerService {
+    const @utf8InCpp String kPolicySupportsMultipleSecureCodecs
+            = "supports-multiple-secure-codecs";
+    const @utf8InCpp String kPolicySupportsSecureWithNonSecureCodec
+            = "supports-secure-with-non-secure-codec";
+
+    /**
+     * Configure the ResourceManagerService to adopted particular policies when
+     * managing the resources.
+     *
+     * @param policies an array of policies to be adopted.
+     */
+    void config(in MediaResourcePolicyParcel[] policies);
+
+    /**
+     * Add a client to a process with a list of resources.
+     *
+     * @param pid pid of the client.
+     * @param uid uid of the client.
+     * @param clientId an identifier that uniquely identifies the client within the pid.
+     * @param client interface for the ResourceManagerService to call the client.
+     * @param resources an array of resources to be added.
+     */
+    void addResource(
+            int pid,
+            int uid,
+            long clientId,
+            IResourceManagerClient client,
+            in MediaResourceParcel[] resources);
+
+    /**
+     * Remove the listed resources from a client.
+     *
+     * @param pid pid from which the list of resources will be removed.
+     * @param clientId clientId within the pid from which the list of resources will be removed.
+     * @param resources an array of resources to be removed from the client.
+     */
+    void removeResource(int pid, long clientId, in MediaResourceParcel[] resources);
+
+    /**
+     * Remove all resources from a client.
+     *
+     * @param pid pid from which the client's resources will be removed.
+     * @param clientId clientId within the pid that will be removed.
+     */
+    void removeClient(int pid, long clientId);
+
+    /**
+     * Tries to reclaim resource from processes with lower priority than the
+     * calling process according to the requested resources.
+     *
+     * @param callingPid pid of the calling process.
+     * @param resources an array of resources to be reclaimed.
+     *
+     * @return true if the reclaim was successful and false otherwise.
+     */
+    boolean reclaimResource(int callingPid, in MediaResourceParcel[] resources);
+
+    /**
+     * Override the pid of original calling process with the pid of the process
+     * who actually use the requested resources.
+     *
+     * @param originalPid pid of the original calling process.
+     * @param newPid pid of the actual process who use the resources.
+     *        remove existing override on originalPid if newPid is -1.
+     */
+    void overridePid(int originalPid, int newPid);
+
+    /**
+     * Override the process state and OOM score of the calling process with the
+     * the specified values. This is used by native service processes to specify
+     * these values for ResourceManagerService to use. ResourceManagerService usually
+     * gets these values from ActivityManagerService, however, ActivityManagerService
+     * doesn't track native service processes.
+     *
+     * @param client a token for the ResourceManagerService to link to the caller and
+     *              receive notification if it goes away. This is needed for clearing
+     *              the overrides.
+     * @param pid pid of the calling process.
+     * @param procState the process state value that ResourceManagerService should
+     *                  use for this pid.
+     * @param oomScore the oom score value that ResourceManagerService should
+     *                  use for this pid.
+     */
+    void overrideProcessInfo(
+            IResourceManagerClient client,
+            int pid,
+            int procState,
+            int oomScore);
+
+    /**
+     * Mark a client for pending removal
+     *
+     * @param pid pid from which the client's resources will be removed.
+     * @param clientId clientId within the pid that will be removed.
+     */
+    void markClientForPendingRemoval(int pid, long clientId);
+
+    /**
+     * Reclaim resources from clients pending removal, if any.
+     *
+     * @param pid pid from which resources will be reclaimed.
+     */
+    void reclaimResourcesFromClientsPendingRemoval(int pid);
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/IResourceObserver.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceObserver.aidl
new file mode 100644
index 0000000..462009a
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/IResourceObserver.aidl
@@ -0,0 +1,39 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.MediaObservableEvent;
+import android.media.MediaObservableParcel;
+
+/**
+ * IResourceObserver interface for receiving observable resource updates
+ * from IResourceObserverService.
+ *
+ * {@hide}
+ */
+interface IResourceObserver {
+    /**
+     * Called when an observed resource is granted to a client.
+     *
+     * @param event the status change that happened to the resource.
+     * @param uid uid to which the resource is associated.
+     * @param pid pid to which the resource is associated.
+     * @param observables the resources whose status has changed.
+     */
+    oneway void onStatusChanged(MediaObservableEvent event,
+        int uid, int pid, in MediaObservableParcel[] observables);
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/IResourceObserverService.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceObserverService.aidl
new file mode 100644
index 0000000..08f4ca0
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/IResourceObserverService.aidl
@@ -0,0 +1,49 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.IResourceObserver;
+import android.media.MediaObservableFilter;
+
+/**
+ * IResourceObserverService interface for registering an IResourceObserver
+ * callback to receive status updates about observable media resources.
+ *
+ * {@hide}
+ */
+interface IResourceObserverService {
+
+    /**
+     * Register an observer on the IResourceObserverService to receive
+     * status updates for observable resources.
+     *
+     * @param observer the observer to register.
+     * @param filters an array of filters for resources and events to receive
+     *                updates for.
+     */
+    void registerObserver(
+            IResourceObserver observer,
+            in MediaObservableFilter[] filters);
+
+    /**
+     * Unregister an observer from the IResourceObserverService.
+     * The observer will stop receiving the status updates.
+     *
+     * @param observer the observer to unregister.
+     */
+    void unregisterObserver(IResourceObserver observer);
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/MediaObservableEvent.aidl b/services/mediaresourcemanager/aidl/android/media/MediaObservableEvent.aidl
new file mode 100644
index 0000000..56ab24d
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/MediaObservableEvent.aidl
@@ -0,0 +1,44 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Enums for media observable events.
+ *
+ * These values are used as bitmasks to indicate the events that the
+ * observer is interested in in the MediaObservableFilter objects passed to
+ * IResourceObserverService::registerObserver().
+ *
+ * {@hide}
+ */
+@Backing(type="long")
+enum MediaObservableEvent {
+    /**
+     * A media resource is granted to a client and becomes busy.
+     */
+    kBusy = 1,
+
+    /**
+     * A media resource is released by a client and becomes idle.
+     */
+    kIdle = 2,
+
+    /**
+     * A bitmask that covers all observable events defined.
+     */
+    kAll = ~0,
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/MediaObservableFilter.aidl b/services/mediaresourcemanager/aidl/android/media/MediaObservableFilter.aidl
new file mode 100644
index 0000000..38f7e39
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/MediaObservableFilter.aidl
@@ -0,0 +1,43 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.MediaObservableType;
+import android.media.MediaObservableEvent;
+
+/**
+ * Description of an observable resource and its associated events that the
+ * observer is interested in.
+ *
+ * {@hide}
+ */
+parcelable MediaObservableFilter {
+    /**
+     * Type of the observable media resource.
+     */
+    MediaObservableType type;
+
+    /**
+     * Events that the observer is interested in.
+     *
+     * This field is a bitwise-OR of the events in MediaObservableEvent. If a
+     * particular event's bit is set, it means that updates should be sent for
+     * that event. For example, if the observer is only interested in receiving
+     * updates when a resource becomes available, it should only set 'kIdle'.
+     */
+    MediaObservableEvent eventFilter;
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/MediaObservableParcel.aidl b/services/mediaresourcemanager/aidl/android/media/MediaObservableParcel.aidl
new file mode 100644
index 0000000..c4233e1
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/MediaObservableParcel.aidl
@@ -0,0 +1,37 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.MediaObservableType;
+
+/**
+ * Description of an observable resource whose status has changed.
+ *
+ * {@hide}
+ */
+parcelable MediaObservableParcel {
+    /**
+     * Type of the observable media resource.
+     */
+    MediaObservableType type;// = MediaObservableType::kInvalid;
+
+    /**
+     * Number of units of the observable resource (number of codecs, bytes of
+     * graphic memory, etc.).
+     */
+    long value = 0;
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/MediaObservableType.aidl b/services/mediaresourcemanager/aidl/android/media/MediaObservableType.aidl
new file mode 100644
index 0000000..ed202da
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/MediaObservableType.aidl
@@ -0,0 +1,35 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Type enums of observable media resources.
+ *
+ * {@hide}
+ */
+@Backing(type="int")
+enum MediaObservableType {
+    kInvalid = 0,
+
+    //kVideoStart = 1000,
+    kVideoSecureCodec = 1000,
+    kVideoNonSecureCodec = 1001,
+
+    //kAudioStart = 2000,
+
+    //kGraphicMemory = 3000,
+}
diff --git a/media/libmedia/aidl/android/media/MediaResourceParcel.aidl b/services/mediaresourcemanager/aidl/android/media/MediaResourceParcel.aidl
similarity index 100%
rename from media/libmedia/aidl/android/media/MediaResourceParcel.aidl
rename to services/mediaresourcemanager/aidl/android/media/MediaResourceParcel.aidl
diff --git a/media/libmedia/aidl/android/media/MediaResourcePolicyParcel.aidl b/services/mediaresourcemanager/aidl/android/media/MediaResourcePolicyParcel.aidl
similarity index 100%
rename from media/libmedia/aidl/android/media/MediaResourcePolicyParcel.aidl
rename to services/mediaresourcemanager/aidl/android/media/MediaResourcePolicyParcel.aidl
diff --git a/media/libmedia/aidl/android/media/MediaResourceSubType.aidl b/services/mediaresourcemanager/aidl/android/media/MediaResourceSubType.aidl
similarity index 100%
rename from media/libmedia/aidl/android/media/MediaResourceSubType.aidl
rename to services/mediaresourcemanager/aidl/android/media/MediaResourceSubType.aidl
diff --git a/media/libmedia/aidl/android/media/MediaResourceType.aidl b/services/mediaresourcemanager/aidl/android/media/MediaResourceType.aidl
similarity index 100%
rename from media/libmedia/aidl/android/media/MediaResourceType.aidl
rename to services/mediaresourcemanager/aidl/android/media/MediaResourceType.aidl
diff --git a/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/.hash b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/.hash
new file mode 100644
index 0000000..df90ffb
--- /dev/null
+++ b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/.hash
@@ -0,0 +1 @@
+7e198281434e9c679b02265e95c51ea25ed180d3
diff --git a/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/android/media/IResourceObserver.aidl b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/android/media/IResourceObserver.aidl
new file mode 100644
index 0000000..73e0e0b
--- /dev/null
+++ b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/android/media/IResourceObserver.aidl
@@ -0,0 +1,38 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media;
+/* @hide */
+interface IResourceObserver {
+  oneway void onStatusChanged(android.media.MediaObservableEvent event, int uid, int pid, in android.media.MediaObservableParcel[] observables);
+}
diff --git a/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/android/media/IResourceObserverService.aidl b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/android/media/IResourceObserverService.aidl
new file mode 100644
index 0000000..c2d60af
--- /dev/null
+++ b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/android/media/IResourceObserverService.aidl
@@ -0,0 +1,39 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media;
+/* @hide */
+interface IResourceObserverService {
+  void registerObserver(android.media.IResourceObserver observer, in android.media.MediaObservableFilter[] filters);
+  void unregisterObserver(android.media.IResourceObserver observer);
+}
diff --git a/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/android/media/MediaObservableEvent.aidl b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/android/media/MediaObservableEvent.aidl
new file mode 100644
index 0000000..a45f0ba
--- /dev/null
+++ b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/android/media/MediaObservableEvent.aidl
@@ -0,0 +1,41 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media;
+/* @hide */
+@Backing(type="long")
+enum MediaObservableEvent {
+  kBusy = 1,
+  kIdle = 2,
+  kAll = -1,
+}
diff --git a/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/android/media/MediaObservableFilter.aidl b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/android/media/MediaObservableFilter.aidl
new file mode 100644
index 0000000..22ae284
--- /dev/null
+++ b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/android/media/MediaObservableFilter.aidl
@@ -0,0 +1,39 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media;
+/* @hide */
+parcelable MediaObservableFilter {
+  android.media.MediaObservableType type;
+  android.media.MediaObservableEvent eventFilter;
+}
diff --git a/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/android/media/MediaObservableParcel.aidl b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/android/media/MediaObservableParcel.aidl
new file mode 100644
index 0000000..2270d87
--- /dev/null
+++ b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/android/media/MediaObservableParcel.aidl
@@ -0,0 +1,39 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media;
+/* @hide */
+parcelable MediaObservableParcel {
+  android.media.MediaObservableType type;
+  long value = 0;
+}
diff --git a/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/android/media/MediaObservableType.aidl b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/android/media/MediaObservableType.aidl
new file mode 100644
index 0000000..eab3f3f
--- /dev/null
+++ b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/1/android/media/MediaObservableType.aidl
@@ -0,0 +1,41 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media;
+/* @hide */
+@Backing(type="int")
+enum MediaObservableType {
+  kInvalid = 0,
+  kVideoSecureCodec = 1000,
+  kVideoNonSecureCodec = 1001,
+}
diff --git a/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/current/android/media/IResourceObserver.aidl b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/current/android/media/IResourceObserver.aidl
new file mode 100644
index 0000000..402704b
--- /dev/null
+++ b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/current/android/media/IResourceObserver.aidl
@@ -0,0 +1,22 @@
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL interface (or parcelable). Do not try to
+// edit this file. It looks like you are doing that because you have modified
+// an AIDL interface in a backward-incompatible way, e.g., deleting a function
+// from an interface or a field from a parcelable and it broke the build. That
+// breakage is intended.
+//
+// You must not make a backward incompatible changes to the AIDL files built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media;
+/* @hide */
+interface IResourceObserver {
+  oneway void onStatusChanged(android.media.MediaObservableEvent event, int uid, int pid, in android.media.MediaObservableParcel[] observables);
+}
diff --git a/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/current/android/media/IResourceObserverService.aidl b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/current/android/media/IResourceObserverService.aidl
new file mode 100644
index 0000000..a0ef761
--- /dev/null
+++ b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/current/android/media/IResourceObserverService.aidl
@@ -0,0 +1,23 @@
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL interface (or parcelable). Do not try to
+// edit this file. It looks like you are doing that because you have modified
+// an AIDL interface in a backward-incompatible way, e.g., deleting a function
+// from an interface or a field from a parcelable and it broke the build. That
+// breakage is intended.
+//
+// You must not make a backward incompatible changes to the AIDL files built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media;
+/* @hide */
+interface IResourceObserverService {
+  void registerObserver(android.media.IResourceObserver observer, in android.media.MediaObservableFilter[] filters);
+  void unregisterObserver(android.media.IResourceObserver observer);
+}
diff --git a/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/current/android/media/MediaObservableEvent.aidl b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/current/android/media/MediaObservableEvent.aidl
new file mode 100644
index 0000000..fd60bea
--- /dev/null
+++ b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/current/android/media/MediaObservableEvent.aidl
@@ -0,0 +1,25 @@
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL interface (or parcelable). Do not try to
+// edit this file. It looks like you are doing that because you have modified
+// an AIDL interface in a backward-incompatible way, e.g., deleting a function
+// from an interface or a field from a parcelable and it broke the build. That
+// breakage is intended.
+//
+// You must not make a backward incompatible changes to the AIDL files built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media;
+/* @hide */
+@Backing(type="long")
+enum MediaObservableEvent {
+  kBusy = 1,
+  kIdle = 2,
+  kAll = -1,
+}
diff --git a/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/current/android/media/MediaObservableFilter.aidl b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/current/android/media/MediaObservableFilter.aidl
new file mode 100644
index 0000000..4045ae0
--- /dev/null
+++ b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/current/android/media/MediaObservableFilter.aidl
@@ -0,0 +1,23 @@
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL interface (or parcelable). Do not try to
+// edit this file. It looks like you are doing that because you have modified
+// an AIDL interface in a backward-incompatible way, e.g., deleting a function
+// from an interface or a field from a parcelable and it broke the build. That
+// breakage is intended.
+//
+// You must not make a backward incompatible changes to the AIDL files built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media;
+/* @hide */
+parcelable MediaObservableFilter {
+  android.media.MediaObservableType type;
+  android.media.MediaObservableEvent eventFilter;
+}
diff --git a/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/current/android/media/MediaObservableParcel.aidl b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/current/android/media/MediaObservableParcel.aidl
new file mode 100644
index 0000000..ff5a8e8
--- /dev/null
+++ b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/current/android/media/MediaObservableParcel.aidl
@@ -0,0 +1,23 @@
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL interface (or parcelable). Do not try to
+// edit this file. It looks like you are doing that because you have modified
+// an AIDL interface in a backward-incompatible way, e.g., deleting a function
+// from an interface or a field from a parcelable and it broke the build. That
+// breakage is intended.
+//
+// You must not make a backward incompatible changes to the AIDL files built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media;
+/* @hide */
+parcelable MediaObservableParcel {
+  android.media.MediaObservableType type;
+  long value = 0;
+}
diff --git a/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/current/android/media/MediaObservableType.aidl b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/current/android/media/MediaObservableType.aidl
new file mode 100644
index 0000000..ebb8d8e
--- /dev/null
+++ b/services/mediaresourcemanager/aidl_api/resourceobserver_aidl_interface/current/android/media/MediaObservableType.aidl
@@ -0,0 +1,25 @@
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL interface (or parcelable). Do not try to
+// edit this file. It looks like you are doing that because you have modified
+// an AIDL interface in a backward-incompatible way, e.g., deleting a function
+// from an interface or a field from a parcelable and it broke the build. That
+// breakage is intended.
+//
+// You must not make a backward incompatible changes to the AIDL files built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media;
+/* @hide */
+@Backing(type="int")
+enum MediaObservableType {
+  kInvalid = 0,
+  kVideoSecureCodec = 1000,
+  kVideoNonSecureCodec = 1001,
+}
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
new file mode 100644
index 0000000..81c85e5
--- /dev/null
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -0,0 +1,51 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_fuzz {
+    name: "mediaresourcemanager_fuzzer",
+    srcs: [
+        "mediaresourcemanager_fuzzer.cpp",
+    ],
+    static_libs: [
+        "liblog",
+        "libresourcemanagerservice",
+    ],
+    shared_libs: [
+        "libbinder",
+        "libbinder_ndk",
+        "libmedia",
+        "libutils",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/services/mediaresourcemanager/fuzzer/README.md b/services/mediaresourcemanager/fuzzer/README.md
new file mode 100644
index 0000000..c600be4
--- /dev/null
+++ b/services/mediaresourcemanager/fuzzer/README.md
@@ -0,0 +1,46 @@
+# Fuzzer for libresourcemanagerservice
+
+## Plugin Design Considerations
+The fuzzer plugin for libresourcemanagerservice is designed based on the
+understanding of the service and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+Media Resource Manager supports the following parameters:
+1. Media Resource Type (parameter name: `mediaResourceType`)
+2. Media Resource SubType (parameter name: `mediaResourceSubType`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `mediaResourceType` | 0.`MediaResource::kSecureCodec` 1.`MediaResource::kNonSecureCodecC` 2.`MediaResource::kGraphicMemory` 3.`MediaResource::kCpuBoost`  4.`MediaResource::kBattery` 5.`MediaResource::kDrmSession`| Value obtained from FuzzedDataProvider |
+| `mediaResourceSubType`   | 0.`MediaResource::kAudioCodec` 1.`MediaResource::kVideoCodec` 2.`MediaResource::kUnspecifiedSubType`  | Value obtained from FuzzedDataProvider |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+## Build
+
+This describes steps to build mediaresourcemanager_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) mediaresourcemanager_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR and copy some files to that folder
+Push this directory to device.
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mediaresourcemanager_fuzzer/mediaresourcemanager_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
new file mode 100644
index 0000000..6690b16
--- /dev/null
+++ b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
@@ -0,0 +1,299 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include <ServiceLog.h>
+#include <aidl/android/media/BnResourceManagerClient.h>
+#include <media/MediaResource.h>
+#include <media/MediaResourcePolicy.h>
+#include <media/stagefright/ProcessInfoInterface.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include "ResourceManagerService.h"
+#include "fuzzer/FuzzedDataProvider.h"
+
+using namespace std;
+using namespace android;
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::BnResourceManagerClient;
+using ::aidl::android::media::IResourceManagerClient;
+using ::aidl::android::media::IResourceManagerService;
+using MedResType = aidl::android::media::MediaResourceType;
+using MedResSubType = aidl::android::media::MediaResourceSubType;
+
+const size_t kMaxStringLength = 100;
+const int32_t kMaxServiceLog = 100;
+const int32_t kMinServiceLog = 1;
+const int32_t kMinResourceType = 0;
+const int32_t kMaxResourceType = 10;
+const int32_t kMinThreadPairs = 1;
+const int32_t kMaxThreadPairs = 3;
+
+const string kPolicyType[] = {IResourceManagerService::kPolicySupportsMultipleSecureCodecs,
+                              IResourceManagerService::kPolicySupportsSecureWithNonSecureCodec};
+
+struct resourceThreadArgs {
+    int32_t pid;
+    int32_t uid;
+    int64_t testClientId;
+    shared_ptr<ResourceManagerService> service;
+    shared_ptr<IResourceManagerClient> testClient;
+    vector<MediaResourceParcel> mediaResource;
+};
+
+static int64_t getId(const shared_ptr<IResourceManagerClient>& client) {
+    return (int64_t)client.get();
+}
+
+struct TestProcessInfo : public ProcessInfoInterface {
+    TestProcessInfo() {}
+    virtual ~TestProcessInfo() {}
+
+    virtual bool getPriority(int pid, int* priority) {
+        // For testing, use pid as priority.
+        // Lower the value higher the priority.
+        *priority = pid;
+        return true;
+    }
+
+    virtual bool isValidPid(int /* pid */) { return true; }
+    virtual bool overrideProcessInfo(int /* pid */, int /*procState*/, int /*oomScore*/) {
+        return true;
+    }
+    virtual void removeProcessInfoOverride(int /* pid */) { return; }
+
+   private:
+    DISALLOW_EVIL_CONSTRUCTORS(TestProcessInfo);
+};
+
+struct TestSystemCallback : public ResourceManagerService::SystemCallbackInterface {
+    TestSystemCallback() : mLastEvent({EventType::INVALID, 0}), mEventCount(0) {}
+
+    enum EventType {
+        INVALID = -1,
+        VIDEO_ON = 0,
+        VIDEO_OFF = 1,
+        VIDEO_RESET = 2,
+        CPUSET_ENABLE = 3,
+        CPUSET_DISABLE = 4,
+    };
+
+    struct EventEntry {
+        EventType type;
+        int arg;
+    };
+
+    virtual void noteStartVideo(int uid) override {
+        mLastEvent = {EventType::VIDEO_ON, uid};
+        ++mEventCount;
+    }
+
+    virtual void noteStopVideo(int uid) override {
+        mLastEvent = {EventType::VIDEO_OFF, uid};
+        ++mEventCount;
+    }
+
+    virtual void noteResetVideo() override {
+        mLastEvent = {EventType::VIDEO_RESET, 0};
+        ++mEventCount;
+    }
+
+    virtual bool requestCpusetBoost(bool enable) override {
+        mLastEvent = {enable ? EventType::CPUSET_ENABLE : EventType::CPUSET_DISABLE, 0};
+        ++mEventCount;
+        return true;
+    }
+
+    size_t eventCount() { return mEventCount; }
+    EventType lastEventType() { return mLastEvent.type; }
+    EventEntry lastEvent() { return mLastEvent; }
+
+   protected:
+    virtual ~TestSystemCallback() {}
+
+   private:
+    EventEntry mLastEvent;
+    size_t mEventCount;
+
+    DISALLOW_EVIL_CONSTRUCTORS(TestSystemCallback);
+};
+
+struct TestClient : public BnResourceManagerClient {
+    TestClient(int pid, const shared_ptr<ResourceManagerService>& service)
+        : mReclaimed(false), mPid(pid), mService(service) {}
+
+    Status reclaimResource(bool* aidlReturn) override {
+        mService->removeClient(mPid, getId(ref<TestClient>()));
+        mReclaimed = true;
+        *aidlReturn = true;
+        return Status::ok();
+    }
+
+    Status getName(string* aidlReturn) override {
+        *aidlReturn = "test_client";
+        return Status::ok();
+    }
+
+    virtual ~TestClient() {}
+
+   private:
+    bool mReclaimed;
+    int mPid;
+    shared_ptr<ResourceManagerService> mService;
+    DISALLOW_EVIL_CONSTRUCTORS(TestClient);
+};
+
+class ResourceManagerServiceFuzzer {
+   public:
+    ResourceManagerServiceFuzzer() = default;
+    ~ResourceManagerServiceFuzzer() {
+        mService = nullptr;
+        delete mFuzzedDataProvider;
+    }
+    void process(const uint8_t* data, size_t size);
+
+   private:
+    void setConfig();
+    void setResources();
+    void setServiceLog();
+
+    static void* addResource(void* arg) {
+        resourceThreadArgs* tArgs = (resourceThreadArgs*)arg;
+        if (tArgs) {
+            (tArgs->service)
+                ->addResource(tArgs->pid, tArgs->uid, tArgs->testClientId, tArgs->testClient,
+                              tArgs->mediaResource);
+        }
+        return nullptr;
+    }
+
+    static void* removeResource(void* arg) {
+        resourceThreadArgs* tArgs = (resourceThreadArgs*)arg;
+        if (tArgs) {
+            bool result;
+            (tArgs->service)->markClientForPendingRemoval(tArgs->pid, tArgs->testClientId);
+            (tArgs->service)->removeResource(tArgs->pid, tArgs->testClientId, tArgs->mediaResource);
+            (tArgs->service)->reclaimResource(tArgs->pid, tArgs->mediaResource, &result);
+            (tArgs->service)->removeClient(tArgs->pid, tArgs->testClientId);
+            (tArgs->service)->overridePid(tArgs->pid, tArgs->pid - 1);
+        }
+        return nullptr;
+    }
+
+    shared_ptr<ResourceManagerService> mService =
+        ::ndk::SharedRefBase::make<ResourceManagerService>(new TestProcessInfo(),
+                                                           new TestSystemCallback());
+    FuzzedDataProvider* mFuzzedDataProvider = nullptr;
+};
+
+void ResourceManagerServiceFuzzer::process(const uint8_t* data, size_t size) {
+    mFuzzedDataProvider = new FuzzedDataProvider(data, size);
+    setConfig();
+    setResources();
+    setServiceLog();
+}
+
+void ResourceManagerServiceFuzzer::setConfig() {
+    bool policyTypeIndex = mFuzzedDataProvider->ConsumeBool();
+    string policyValue = mFuzzedDataProvider->ConsumeRandomLengthString(kMaxStringLength);
+    if (mService) {
+        vector<MediaResourcePolicyParcel> policies;
+        policies.push_back(MediaResourcePolicy(kPolicyType[policyTypeIndex], policyValue));
+        mService->config(policies);
+    }
+}
+
+void ResourceManagerServiceFuzzer::setResources() {
+    if (!mService) {
+        return;
+    }
+    size_t numThreadPairs =
+        mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(kMinThreadPairs, kMaxThreadPairs);
+    // Make even number of threads
+    size_t numThreads = numThreadPairs * 2;
+    resourceThreadArgs threadArgs;
+    vector<MediaResourceParcel> mediaResource;
+    pthread_t pt[numThreads];
+    int i;
+    for (i = 0; i < numThreads - 1; i += 2) {
+        threadArgs.pid = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
+        threadArgs.uid = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
+        int32_t mediaResourceType = mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(
+            kMinResourceType, kMaxResourceType);
+        int32_t mediaResourceSubType = mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(
+            kMinResourceType, kMaxResourceType);
+        uint64_t mediaResourceValue = mFuzzedDataProvider->ConsumeIntegral<uint64_t>();
+        threadArgs.service = mService;
+        shared_ptr<IResourceManagerClient> testClient =
+            ::ndk::SharedRefBase::make<TestClient>(threadArgs.pid, mService);
+        threadArgs.testClient = testClient;
+        threadArgs.testClientId = getId(testClient);
+        mediaResource.push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
+                                              static_cast<MedResSubType>(mediaResourceSubType),
+                                              mediaResourceValue));
+        threadArgs.mediaResource = mediaResource;
+        pthread_create(&pt[i], nullptr, addResource, &threadArgs);
+        pthread_create(&pt[i + 1], nullptr, removeResource, &threadArgs);
+        mediaResource.clear();
+    }
+
+    for (i = 0; i < numThreads; ++i) {
+        pthread_join(pt[i], nullptr);
+    }
+
+    // No resource was added with pid = 0
+    int32_t pidZero = 0;
+    shared_ptr<IResourceManagerClient> testClient =
+        ::ndk::SharedRefBase::make<TestClient>(pidZero, mService);
+    int32_t mediaResourceType =
+        mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(kMinResourceType, kMaxResourceType);
+    int32_t mediaResourceSubType =
+        mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(kMinResourceType, kMaxResourceType);
+    uint64_t mediaResourceValue = mFuzzedDataProvider->ConsumeIntegral<uint64_t>();
+    mediaResource.push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
+                                          static_cast<MedResSubType>(mediaResourceSubType),
+                                          mediaResourceValue));
+    bool result;
+    mService->reclaimResource(pidZero, mediaResource, &result);
+    mService->removeResource(pidZero, getId(testClient), mediaResource);
+    mService->removeClient(pidZero, getId(testClient));
+    mediaResource.clear();
+}
+
+void ResourceManagerServiceFuzzer::setServiceLog() {
+    size_t maxNum =
+        mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(kMinServiceLog, kMaxServiceLog);
+    sp<ServiceLog> serviceLog = new ServiceLog(maxNum);
+    if (serviceLog) {
+        serviceLog->add(String8("log"));
+        serviceLog->toString();
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    if (size < 1) {
+        return 0;
+    }
+    ResourceManagerServiceFuzzer* rmFuzzer = new ResourceManagerServiceFuzzer();
+    if (!rmFuzzer) {
+        return 0;
+    }
+    rmFuzzer->process(data, size);
+    delete rmFuzzer;
+    return 0;
+}
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index b6c548c..ec4ba58 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -1,14 +1,23 @@
 // Build the unit tests.
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 cc_test {
     name: "ResourceManagerService_test",
     srcs: ["ResourceManagerService_test.cpp"],
     test_suites: ["device-tests"],
+    static_libs: ["libresourcemanagerservice"],
     shared_libs: [
         "libbinder",
         "libbinder_ndk",
         "liblog",
         "libmedia",
-        "libresourcemanagerservice",
         "libutils",
     ],
     include_dirs: [
@@ -19,17 +28,16 @@
         "-Werror",
         "-Wall",
     ],
-    compile_multilib: "32",
 }
 
 cc_test {
     name: "ServiceLog_test",
     srcs: ["ServiceLog_test.cpp"],
     test_suites: ["device-tests"],
+    static_libs: ["libresourcemanagerservice"],
     shared_libs: [
         "liblog",
         "libmedia",
-        "libresourcemanagerservice",
         "libutils",
     ],
     include_dirs: [
@@ -40,5 +48,29 @@
         "-Werror",
         "-Wall",
     ],
-    compile_multilib: "32",
+}
+
+cc_test {
+    name: "ResourceObserverService_test",
+    srcs: ["ResourceObserverService_test.cpp"],
+    test_suites: ["device-tests"],
+    static_libs: [
+        "libresourcemanagerservice",
+        "resourceobserver_aidl_interface-V1-ndk_platform",
+    ],
+    shared_libs: [
+        "libbinder",
+        "libbinder_ndk",
+        "liblog",
+        "libmedia",
+        "libutils",
+    ],
+    include_dirs: [
+        "frameworks/av/include",
+        "frameworks/av/services/mediaresourcemanager",
+    ],
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
 }
diff --git a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
new file mode 100644
index 0000000..8e29312
--- /dev/null
+++ b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
@@ -0,0 +1,215 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include "ResourceManagerService.h"
+#include <aidl/android/media/BnResourceManagerClient.h>
+#include <media/MediaResource.h>
+#include <media/MediaResourcePolicy.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/ProcessInfoInterface.h>
+
+namespace android {
+
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::BnResourceManagerClient;
+using ::aidl::android::media::IResourceManagerService;
+using ::aidl::android::media::IResourceManagerClient;
+using ::aidl::android::media::MediaResourceParcel;
+
+static int64_t getId(const std::shared_ptr<IResourceManagerClient>& client) {
+    return (int64_t) client.get();
+}
+
+struct TestProcessInfo : public ProcessInfoInterface {
+    TestProcessInfo() {}
+    virtual ~TestProcessInfo() {}
+
+    virtual bool getPriority(int pid, int *priority) {
+        // For testing, use pid as priority.
+        // Lower the value higher the priority.
+        *priority = pid;
+        return true;
+    }
+
+    virtual bool isValidPid(int /* pid */) {
+        return true;
+    }
+
+    virtual bool overrideProcessInfo(
+            int /* pid */, int /* procState */, int /* oomScore */) {
+        return true;
+    }
+
+    virtual void removeProcessInfoOverride(int /* pid */) {
+    }
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(TestProcessInfo);
+};
+
+struct TestSystemCallback :
+        public ResourceManagerService::SystemCallbackInterface {
+    TestSystemCallback() :
+        mLastEvent({EventType::INVALID, 0}), mEventCount(0) {}
+
+    enum EventType {
+        INVALID          = -1,
+        VIDEO_ON         = 0,
+        VIDEO_OFF        = 1,
+        VIDEO_RESET      = 2,
+        CPUSET_ENABLE    = 3,
+        CPUSET_DISABLE   = 4,
+    };
+
+    struct EventEntry {
+        EventType type;
+        int arg;
+    };
+
+    virtual void noteStartVideo(int uid) override {
+        mLastEvent = {EventType::VIDEO_ON, uid};
+        mEventCount++;
+    }
+
+    virtual void noteStopVideo(int uid) override {
+        mLastEvent = {EventType::VIDEO_OFF, uid};
+        mEventCount++;
+    }
+
+    virtual void noteResetVideo() override {
+        mLastEvent = {EventType::VIDEO_RESET, 0};
+        mEventCount++;
+    }
+
+    virtual bool requestCpusetBoost(bool enable) override {
+        mLastEvent = {enable ? EventType::CPUSET_ENABLE : EventType::CPUSET_DISABLE, 0};
+        mEventCount++;
+        return true;
+    }
+
+    size_t eventCount() { return mEventCount; }
+    EventType lastEventType() { return mLastEvent.type; }
+    EventEntry lastEvent() { return mLastEvent; }
+
+protected:
+    virtual ~TestSystemCallback() {}
+
+private:
+    EventEntry mLastEvent;
+    size_t mEventCount;
+
+    DISALLOW_EVIL_CONSTRUCTORS(TestSystemCallback);
+};
+
+
+struct TestClient : public BnResourceManagerClient {
+    TestClient(int pid, const std::shared_ptr<ResourceManagerService> &service)
+        : mReclaimed(false), mPid(pid), mService(service) {}
+
+    Status reclaimResource(bool* _aidl_return) override {
+        mService->removeClient(mPid, getId(ref<TestClient>()));
+        mReclaimed = true;
+        *_aidl_return = true;
+        return Status::ok();
+    }
+
+    Status getName(::std::string* _aidl_return) override {
+        *_aidl_return = "test_client";
+        return Status::ok();
+    }
+
+    bool reclaimed() const {
+        return mReclaimed;
+    }
+
+    void reset() {
+        mReclaimed = false;
+    }
+
+    virtual ~TestClient() {}
+
+private:
+    bool mReclaimed;
+    int mPid;
+    std::shared_ptr<ResourceManagerService> mService;
+    DISALLOW_EVIL_CONSTRUCTORS(TestClient);
+};
+
+static const int kTestPid1 = 30;
+static const int kTestUid1 = 1010;
+
+static const int kTestPid2 = 20;
+static const int kTestUid2 = 1011;
+
+static const int kLowPriorityPid = 40;
+static const int kMidPriorityPid = 25;
+static const int kHighPriorityPid = 10;
+
+using EventType = TestSystemCallback::EventType;
+using EventEntry = TestSystemCallback::EventEntry;
+bool operator== (const EventEntry& lhs, const EventEntry& rhs) {
+    return lhs.type == rhs.type && lhs.arg == rhs.arg;
+}
+
+#define CHECK_STATUS_TRUE(condition) \
+    EXPECT_TRUE((condition).isOk() && (result))
+
+#define CHECK_STATUS_FALSE(condition) \
+    EXPECT_TRUE((condition).isOk() && !(result))
+
+class ResourceManagerServiceTestBase : public ::testing::Test {
+public:
+    ResourceManagerServiceTestBase()
+        : mSystemCB(new TestSystemCallback()),
+          mService(::ndk::SharedRefBase::make<ResourceManagerService>(
+                  new TestProcessInfo, mSystemCB)),
+          mTestClient1(::ndk::SharedRefBase::make<TestClient>(kTestPid1, mService)),
+          mTestClient2(::ndk::SharedRefBase::make<TestClient>(kTestPid2, mService)),
+          mTestClient3(::ndk::SharedRefBase::make<TestClient>(kTestPid2, mService)) {
+    }
+
+    sp<TestSystemCallback> mSystemCB;
+    std::shared_ptr<ResourceManagerService> mService;
+    std::shared_ptr<IResourceManagerClient> mTestClient1;
+    std::shared_ptr<IResourceManagerClient> mTestClient2;
+    std::shared_ptr<IResourceManagerClient> mTestClient3;
+
+protected:
+    static bool isEqualResources(const std::vector<MediaResourceParcel> &resources1,
+            const ResourceList &resources2) {
+        // convert resource1 to ResourceList
+        ResourceList r1;
+        for (size_t i = 0; i < resources1.size(); ++i) {
+            const auto &res = resources1[i];
+            const auto resType = std::tuple(res.type, res.subType, res.id);
+            r1[resType] = res;
+        }
+        return r1 == resources2;
+    }
+
+    static void expectEqResourceInfo(const ResourceInfo &info,
+            int uid,
+            std::shared_ptr<IResourceManagerClient> client,
+            const std::vector<MediaResourceParcel> &resources) {
+        EXPECT_EQ(uid, info.uid);
+        EXPECT_EQ(client, info.client);
+        EXPECT_TRUE(isEqualResources(resources, info.resources));
+    }
+};
+
+} // namespace android
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index 702935d..a029d45 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -16,197 +16,17 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "ResourceManagerService_test"
+
 #include <utils/Log.h>
 
-#include <gtest/gtest.h>
-
+#include "ResourceManagerServiceTestUtils.h"
 #include "ResourceManagerService.h"
-#include <aidl/android/media/BnResourceManagerClient.h>
-#include <media/MediaResource.h>
-#include <media/MediaResourcePolicy.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/ProcessInfoInterface.h>
-
-namespace aidl {
-namespace android {
-namespace media {
-bool operator== (const MediaResourceParcel& lhs, const MediaResourceParcel& rhs) {
-    return lhs.type == rhs.type && lhs.subType == rhs.subType &&
-            lhs.id == rhs.id && lhs.value == rhs.value;
-}}}}
 
 namespace android {
 
-using Status = ::ndk::ScopedAStatus;
-using ::aidl::android::media::BnResourceManagerClient;
-using ::aidl::android::media::IResourceManagerService;
-using ::aidl::android::media::IResourceManagerClient;
-
-static int64_t getId(const std::shared_ptr<IResourceManagerClient>& client) {
-    return (int64_t) client.get();
-}
-
-struct TestProcessInfo : public ProcessInfoInterface {
-    TestProcessInfo() {}
-    virtual ~TestProcessInfo() {}
-
-    virtual bool getPriority(int pid, int *priority) {
-        // For testing, use pid as priority.
-        // Lower the value higher the priority.
-        *priority = pid;
-        return true;
-    }
-
-    virtual bool isValidPid(int /* pid */) {
-        return true;
-    }
-
-private:
-    DISALLOW_EVIL_CONSTRUCTORS(TestProcessInfo);
-};
-
-struct TestSystemCallback :
-        public ResourceManagerService::SystemCallbackInterface {
-    TestSystemCallback() :
-        mLastEvent({EventType::INVALID, 0}), mEventCount(0) {}
-
-    enum EventType {
-        INVALID          = -1,
-        VIDEO_ON         = 0,
-        VIDEO_OFF        = 1,
-        VIDEO_RESET      = 2,
-        CPUSET_ENABLE    = 3,
-        CPUSET_DISABLE   = 4,
-    };
-
-    struct EventEntry {
-        EventType type;
-        int arg;
-    };
-
-    virtual void noteStartVideo(int uid) override {
-        mLastEvent = {EventType::VIDEO_ON, uid};
-        mEventCount++;
-    }
-
-    virtual void noteStopVideo(int uid) override {
-        mLastEvent = {EventType::VIDEO_OFF, uid};
-        mEventCount++;
-    }
-
-    virtual void noteResetVideo() override {
-        mLastEvent = {EventType::VIDEO_RESET, 0};
-        mEventCount++;
-    }
-
-    virtual bool requestCpusetBoost(bool enable) override {
-        mLastEvent = {enable ? EventType::CPUSET_ENABLE : EventType::CPUSET_DISABLE, 0};
-        mEventCount++;
-        return true;
-    }
-
-    size_t eventCount() { return mEventCount; }
-    EventType lastEventType() { return mLastEvent.type; }
-    EventEntry lastEvent() { return mLastEvent; }
-
-protected:
-    virtual ~TestSystemCallback() {}
-
-private:
-    EventEntry mLastEvent;
-    size_t mEventCount;
-
-    DISALLOW_EVIL_CONSTRUCTORS(TestSystemCallback);
-};
-
-
-struct TestClient : public BnResourceManagerClient {
-    TestClient(int pid, const std::shared_ptr<ResourceManagerService> &service)
-        : mReclaimed(false), mPid(pid), mService(service) {}
-
-    Status reclaimResource(bool* _aidl_return) override {
-        mService->removeClient(mPid, getId(ref<TestClient>()));
-        mReclaimed = true;
-        *_aidl_return = true;
-        return Status::ok();
-    }
-
-    Status getName(::std::string* _aidl_return) override {
-        *_aidl_return = "test_client";
-        return Status::ok();
-    }
-
-    bool reclaimed() const {
-        return mReclaimed;
-    }
-
-    void reset() {
-        mReclaimed = false;
-    }
-
-    virtual ~TestClient() {}
-
-private:
-    bool mReclaimed;
-    int mPid;
-    std::shared_ptr<ResourceManagerService> mService;
-    DISALLOW_EVIL_CONSTRUCTORS(TestClient);
-};
-
-static const int kTestPid1 = 30;
-static const int kTestUid1 = 1010;
-
-static const int kTestPid2 = 20;
-static const int kTestUid2 = 1011;
-
-static const int kLowPriorityPid = 40;
-static const int kMidPriorityPid = 25;
-static const int kHighPriorityPid = 10;
-
-using EventType = TestSystemCallback::EventType;
-using EventEntry = TestSystemCallback::EventEntry;
-bool operator== (const EventEntry& lhs, const EventEntry& rhs) {
-    return lhs.type == rhs.type && lhs.arg == rhs.arg;
-}
-
-#define CHECK_STATUS_TRUE(condition) \
-    EXPECT_TRUE((condition).isOk() && (result))
-
-#define CHECK_STATUS_FALSE(condition) \
-    EXPECT_TRUE((condition).isOk() && !(result))
-
-class ResourceManagerServiceTest : public ::testing::Test {
+class ResourceManagerServiceTest : public ResourceManagerServiceTestBase {
 public:
-    ResourceManagerServiceTest()
-        : mSystemCB(new TestSystemCallback()),
-          mService(::ndk::SharedRefBase::make<ResourceManagerService>(
-                  new TestProcessInfo, mSystemCB)),
-          mTestClient1(::ndk::SharedRefBase::make<TestClient>(kTestPid1, mService)),
-          mTestClient2(::ndk::SharedRefBase::make<TestClient>(kTestPid2, mService)),
-          mTestClient3(::ndk::SharedRefBase::make<TestClient>(kTestPid2, mService)) {
-    }
-
-protected:
-    static bool isEqualResources(const std::vector<MediaResourceParcel> &resources1,
-            const ResourceList &resources2) {
-        // convert resource1 to ResourceList
-        ResourceList r1;
-        for (size_t i = 0; i < resources1.size(); ++i) {
-            const auto &res = resources1[i];
-            const auto resType = std::tuple(res.type, res.subType, res.id);
-            r1[resType] = res;
-        }
-        return r1 == resources2;
-    }
-
-    static void expectEqResourceInfo(const ResourceInfo &info,
-            int uid,
-            std::shared_ptr<IResourceManagerClient> client,
-            const std::vector<MediaResourceParcel> &resources) {
-        EXPECT_EQ(uid, info.uid);
-        EXPECT_EQ(client, info.client);
-        EXPECT_TRUE(isEqualResources(resources, info.resources));
-    }
+    ResourceManagerServiceTest() : ResourceManagerServiceTestBase() {}
 
     void verifyClients(bool c1, bool c2, bool c3) {
         TestClient *client1 = static_cast<TestClient*>(mTestClient1.get());
@@ -520,6 +340,30 @@
             // clean up client 3 which still left
             mService->removeClient(kTestPid2, getId(mTestClient3));
         }
+
+        {
+            addResource();
+            mService->mSupportsSecureWithNonSecureCodec = true;
+
+            mService->markClientForPendingRemoval(kTestPid2, getId(mTestClient2));
+
+            // client marked for pending removal got reclaimed
+            EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
+            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+
+            // No more clients marked for removal
+            EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
+            verifyClients(false /* c1 */, false /* c2 */, false /* c3 */);
+
+            mService->markClientForPendingRemoval(kTestPid2, getId(mTestClient3));
+
+            // client marked for pending removal got reclaimed
+            EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
+            verifyClients(false /* c1 */, false /* c2 */, true /* c3 */);
+
+            // clean up client 1 which still left
+            mService->removeClient(kTestPid1, getId(mTestClient1));
+        }
     }
 
     void testRemoveClient() {
@@ -881,12 +725,6 @@
         EXPECT_EQ(4u, mSystemCB->eventCount());
         EXPECT_EQ(EventType::CPUSET_DISABLE, mSystemCB->lastEventType());
     }
-
-    sp<TestSystemCallback> mSystemCB;
-    std::shared_ptr<ResourceManagerService> mService;
-    std::shared_ptr<IResourceManagerClient> mTestClient1;
-    std::shared_ptr<IResourceManagerClient> mTestClient2;
-    std::shared_ptr<IResourceManagerClient> mTestClient3;
 };
 
 TEST_F(ResourceManagerServiceTest, config) {
diff --git a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
new file mode 100644
index 0000000..acd9df1
--- /dev/null
+++ b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
@@ -0,0 +1,460 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceObserverService_test"
+
+#include <iostream>
+#include <list>
+
+#include <aidl/android/media/BnResourceObserver.h>
+#include <utils/Log.h>
+#include "ResourceObserverService.h"
+#include "ResourceManagerServiceTestUtils.h"
+
+namespace android {
+
+using ::aidl::android::media::BnResourceObserver;
+using ::aidl::android::media::MediaObservableParcel;
+using ::aidl::android::media::MediaObservableType;
+
+#define BUSY ::aidl::android::media::MediaObservableEvent::kBusy
+#define IDLE ::aidl::android::media::MediaObservableEvent::kIdle
+#define ALL ::aidl::android::media::MediaObservableEvent::kAll
+
+struct EventTracker {
+    struct Event {
+        enum { NoEvent, Busy, Idle } type = NoEvent;
+        int uid = 0;
+        int pid = 0;
+        std::vector<MediaObservableParcel> observables;
+    };
+
+    static const Event NoEvent;
+
+    static std::string toString(const MediaObservableParcel& observable) {
+        return "{" + ::aidl::android::media::toString(observable.type)
+        + ", " + std::to_string(observable.value) + "}";
+    }
+    static std::string toString(const Event& event) {
+        std::string eventStr;
+        switch (event.type) {
+        case Event::Busy:
+            eventStr = "Busy";
+            break;
+        case Event::Idle:
+            eventStr = "Idle";
+            break;
+        default:
+            return "NoEvent";
+        }
+        std::string observableStr;
+        for (auto &observable : event.observables) {
+            if (!observableStr.empty()) {
+                observableStr += ", ";
+            }
+            observableStr += toString(observable);
+        }
+        return "{" + eventStr + ", " + std::to_string(event.uid) + ", "
+                + std::to_string(event.pid) + ", {" + observableStr + "}}";
+    }
+
+    static Event Busy(int uid, int pid, const std::vector<MediaObservableParcel>& observables) {
+        return { Event::Busy, uid, pid, observables };
+    }
+    static Event Idle(int uid, int pid, const std::vector<MediaObservableParcel>& observables) {
+        return { Event::Idle, uid, pid, observables };
+    }
+
+    // Pop 1 event from front, wait for up to timeoutUs if empty.
+    const Event& pop(int64_t timeoutUs = 0) {
+        std::unique_lock lock(mLock);
+
+        if (mEventQueue.empty() && timeoutUs > 0) {
+            mCondition.wait_for(lock, std::chrono::microseconds(timeoutUs));
+        }
+
+        if (mEventQueue.empty()) {
+            mPoppedEvent = NoEvent;
+        } else {
+            mPoppedEvent = *mEventQueue.begin();
+            mEventQueue.pop_front();
+        }
+
+        return mPoppedEvent;
+    }
+
+    // Push 1 event to back.
+    void append(const Event& event) {
+        ALOGD("%s", toString(event).c_str());
+
+        std::unique_lock lock(mLock);
+
+        mEventQueue.push_back(event);
+        mCondition.notify_one();
+    }
+
+private:
+    std::mutex mLock;
+    std::condition_variable mCondition;
+    Event mPoppedEvent;
+    std::list<Event> mEventQueue;
+};
+
+const EventTracker::Event EventTracker::NoEvent;
+
+// Operators for GTest macros.
+bool operator==(const EventTracker::Event& lhs, const EventTracker::Event& rhs) {
+    return lhs.type == rhs.type && lhs.uid == rhs.uid && lhs.pid == rhs.pid &&
+            lhs.observables == rhs.observables;
+}
+
+std::ostream& operator<<(std::ostream& str, const EventTracker::Event& v) {
+    str << EventTracker::toString(v);
+    return str;
+}
+
+struct TestObserver : public BnResourceObserver, public EventTracker {
+    TestObserver(const char *name) : mName(name) {}
+    ~TestObserver() = default;
+    Status onStatusChanged(MediaObservableEvent event, int32_t uid, int32_t pid,
+            const std::vector<MediaObservableParcel>& observables) override {
+        ALOGD("%s: %s", mName.c_str(), __FUNCTION__);
+        if (event == MediaObservableEvent::kBusy) {
+            append(Busy(uid, pid, observables));
+        } else {
+            append(Idle(uid, pid, observables));
+        }
+
+        return Status::ok();
+    }
+    std::string mName;
+};
+
+class ResourceObserverServiceTest : public ResourceManagerServiceTestBase {
+public:
+    ResourceObserverServiceTest() : ResourceManagerServiceTestBase(),
+        mObserverService(::ndk::SharedRefBase::make<ResourceObserverService>()),
+        mTestObserver1(::ndk::SharedRefBase::make<TestObserver>("observer1")),
+        mTestObserver2(::ndk::SharedRefBase::make<TestObserver>("observer2")),
+        mTestObserver3(::ndk::SharedRefBase::make<TestObserver>("observer3")) {
+        mService->setObserverService(mObserverService);
+    }
+
+    void registerObservers(MediaObservableEvent filter = ALL) {
+        std::vector<MediaObservableFilter> filters1, filters2, filters3;
+        filters1 = {{MediaObservableType::kVideoSecureCodec, filter}};
+        filters2 = {{MediaObservableType::kVideoNonSecureCodec, filter}};
+        filters3 = {{MediaObservableType::kVideoSecureCodec, filter},
+                   {MediaObservableType::kVideoNonSecureCodec, filter}};
+
+        // mTestObserver1 monitors secure video codecs.
+        EXPECT_TRUE(mObserverService->registerObserver(mTestObserver1, filters1).isOk());
+
+        // mTestObserver2 monitors non-secure video codecs.
+        EXPECT_TRUE(mObserverService->registerObserver(mTestObserver2, filters2).isOk());
+
+        // mTestObserver3 monitors both secure & non-secure video codecs.
+        EXPECT_TRUE(mObserverService->registerObserver(mTestObserver3, filters3).isOk());
+    }
+
+protected:
+    std::shared_ptr<ResourceObserverService> mObserverService;
+    std::shared_ptr<TestObserver> mTestObserver1;
+    std::shared_ptr<TestObserver> mTestObserver2;
+    std::shared_ptr<TestObserver> mTestObserver3;
+};
+
+TEST_F(ResourceObserverServiceTest, testRegisterObserver) {
+    std::vector<MediaObservableFilter> filters1;
+    Status status;
+
+    // Register with null observer should fail.
+    status = mObserverService->registerObserver(nullptr, filters1);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), BAD_VALUE);
+
+    // Register with empty observables should fail.
+    status = mObserverService->registerObserver(mTestObserver1, filters1);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), BAD_VALUE);
+
+    // mTestObserver1 monitors secure video codecs.
+    filters1 = {{MediaObservableType::kVideoSecureCodec, ALL}};
+    EXPECT_TRUE(mObserverService->registerObserver(mTestObserver1, filters1).isOk());
+
+    // Register duplicates should fail.
+    status = mObserverService->registerObserver(mTestObserver1, filters1);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), ALREADY_EXISTS);
+}
+
+TEST_F(ResourceObserverServiceTest, testUnregisterObserver) {
+    std::vector<MediaObservableFilter> filters1;
+    Status status;
+
+    // Unregister without registering first should fail.
+    status = mObserverService->unregisterObserver(mTestObserver1);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), NAME_NOT_FOUND);
+
+    // mTestObserver1 monitors secure video codecs.
+    filters1 = {{MediaObservableType::kVideoSecureCodec, ALL}};
+    EXPECT_TRUE(mObserverService->registerObserver(mTestObserver1, filters1).isOk());
+    EXPECT_TRUE(mObserverService->unregisterObserver(mTestObserver1).isOk());
+
+    // Unregister again should fail.
+    status = mObserverService->unregisterObserver(mTestObserver1);
+    EXPECT_FALSE(status.isOk());
+    EXPECT_EQ(status.getServiceSpecificError(), NAME_NOT_FOUND);
+}
+
+TEST_F(ResourceObserverServiceTest, testAddResourceBasic) {
+    registerObservers();
+
+    std::vector<MediaObservableParcel> observables1, observables2, observables3;
+    observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+    observables2 = {{MediaObservableType::kVideoNonSecureCodec, 1}};
+    observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
+                   {MediaObservableType::kVideoNonSecureCodec, 1}};
+
+    std::vector<MediaResourceParcel> resources;
+    // Add secure video codec.
+    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/)};
+    mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
+
+    // Add non-secure video codec.
+    resources = {MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+
+    // Add secure & non-secure video codecs.
+    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
+
+    // Add additional audio codecs, should be ignored.
+    resources.push_back(MediaResource::CodecResource(1 /*secure*/, 0 /*video*/));
+    resources.push_back(MediaResource::CodecResource(0 /*secure*/, 0 /*video*/));
+    mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables3));
+}
+
+TEST_F(ResourceObserverServiceTest, testAddResourceMultiple) {
+    registerObservers();
+
+    std::vector<MediaObservableParcel> observables1, observables2, observables3;
+    observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+    observables2 = {{MediaObservableType::kVideoNonSecureCodec, 1}};
+    observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
+                   {MediaObservableType::kVideoNonSecureCodec, 1}};
+
+    std::vector<MediaResourceParcel> resources;
+
+    // Add multiple secure & non-secure video codecs.
+    // Multiple entries of the same type should be merged, count should be propagated correctly.
+    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/, 3 /*count*/)};
+    observables1 = {{MediaObservableType::kVideoSecureCodec, 2}};
+    observables2 = {{MediaObservableType::kVideoNonSecureCodec, 3}};
+    observables3 = {{MediaObservableType::kVideoSecureCodec, 2},
+                   {MediaObservableType::kVideoNonSecureCodec, 3}};
+    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
+}
+
+TEST_F(ResourceObserverServiceTest, testRemoveResourceBasic) {
+    registerObservers();
+
+    std::vector<MediaObservableParcel> observables1, observables2, observables3;
+    observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+    observables2 = {{MediaObservableType::kVideoNonSecureCodec, 1}};
+    observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
+                   {MediaObservableType::kVideoNonSecureCodec, 1}};
+
+    std::vector<MediaResourceParcel> resources;
+    // Add secure video codec to client1.
+    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/)};
+    mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
+    // Remove secure video codec. observer 1&3 should receive updates.
+    mService->removeResource(kTestPid1, getId(mTestClient1), resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Idle(kTestUid1, kTestPid1, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid1, kTestPid1, observables1));
+    // Remove secure video codec again, should have no event.
+    mService->removeResource(kTestPid1, getId(mTestClient1), resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
+    // Remove client1, should have no event.
+    mService->removeClient(kTestPid1, getId(mTestClient1));
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
+
+    // Add non-secure video codec to client2.
+    resources = {MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+    // Remove client2, observer 2&3 should receive updates.
+    mService->removeClient(kTestPid2, getId(mTestClient2));
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+    // Remove non-secure codec after client2 removed, should have no event.
+    mService->removeResource(kTestPid2, getId(mTestClient2), resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
+    // Remove client2 again, should have no event.
+    mService->removeClient(kTestPid2, getId(mTestClient2));
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
+
+    // Add secure & non-secure video codecs, plus audio codecs (that's ignored).
+    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/),
+                 MediaResource::CodecResource(1 /*secure*/, 0 /*video*/),
+                 MediaResource::CodecResource(0 /*secure*/, 0 /*video*/)};
+    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
+    // Remove one audio codec, should have no event.
+    resources = {MediaResource::CodecResource(1 /*secure*/, 0 /*video*/)};
+    mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
+    // Remove the other audio codec and the secure video codec, only secure video codec
+    // removal should be reported.
+    resources = {MediaResource::CodecResource(0 /*secure*/, 0 /*video*/),
+                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/)};
+    mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
+    // Remove client3 entirely. Non-secure video codec removal should be reported.
+    mService->removeClient(kTestPid2, getId(mTestClient3));
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+}
+
+TEST_F(ResourceObserverServiceTest, testRemoveResourceMultiple) {
+    registerObservers();
+
+    std::vector<MediaObservableParcel> observables1, observables2, observables3;
+    observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+    observables2 = {{MediaObservableType::kVideoNonSecureCodec, 1}};
+    observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
+                    {MediaObservableType::kVideoNonSecureCodec, 1}};
+
+    std::vector<MediaResourceParcel> resources;
+
+    // Add multiple secure & non-secure video codecs, plus audio codecs (that's ignored).
+    // (ResourceManager will merge these internally.)
+    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/, 4 /*count*/),
+                 MediaResource::CodecResource(1 /*secure*/, 0 /*video*/),
+                 MediaResource::CodecResource(0 /*secure*/, 0 /*video*/)};
+    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+    observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+    observables2 = {{MediaObservableType::kVideoNonSecureCodec, 4}};
+    observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
+                    {MediaObservableType::kVideoNonSecureCodec, 4}};
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
+    // Remove one audio codec, 2 secure video codecs and 2 non-secure video codecs.
+    // 1 secure video codec removal and 2 non-secure video codec removals should be reported.
+    resources = {MediaResource::CodecResource(0 /*secure*/, 0 /*video*/),
+                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/, 2 /*count*/)};
+    mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+    observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+    observables2 = {{MediaObservableType::kVideoNonSecureCodec, 2}};
+    observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
+                    {MediaObservableType::kVideoNonSecureCodec, 2}};
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables3));
+    // Remove client3 entirely. 2 non-secure video codecs removal should be reported.
+    mService->removeClient(kTestPid2, getId(mTestClient3));
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+}
+
+TEST_F(ResourceObserverServiceTest, testEventFilters) {
+    // Register observers with different event filters.
+    std::vector<MediaObservableFilter> filters1, filters2, filters3;
+    filters1 = {{MediaObservableType::kVideoSecureCodec, BUSY}};
+    filters2 = {{MediaObservableType::kVideoNonSecureCodec, IDLE}};
+    filters3 = {{MediaObservableType::kVideoSecureCodec, IDLE},
+               {MediaObservableType::kVideoNonSecureCodec, BUSY}};
+
+    // mTestObserver1 monitors secure video codecs.
+    EXPECT_TRUE(mObserverService->registerObserver(mTestObserver1, filters1).isOk());
+
+    // mTestObserver2 monitors non-secure video codecs.
+    EXPECT_TRUE(mObserverService->registerObserver(mTestObserver2, filters2).isOk());
+
+    // mTestObserver3 monitors both secure & non-secure video codecs.
+    EXPECT_TRUE(mObserverService->registerObserver(mTestObserver3, filters3).isOk());
+
+    std::vector<MediaObservableParcel> observables1, observables2;
+    observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
+    observables2 = {{MediaObservableType::kVideoNonSecureCodec, 1}};
+
+    std::vector<MediaResourceParcel> resources;
+
+    // Add secure & non-secure video codecs.
+    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
+                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
+
+    // Remove secure & non-secure video codecs.
+    mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+    EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
+    EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
+    EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/test/build_and_run_all_unit_tests.sh b/services/mediaresourcemanager/test/build_and_run_all_unit_tests.sh
new file mode 100755
index 0000000..1c4ae98
--- /dev/null
+++ b/services/mediaresourcemanager/test/build_and_run_all_unit_tests.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+#
+# Run tests in this directory.
+#
+
+if [ "$SYNC_FINISHED" != true ]; then
+  if [ -z "$ANDROID_BUILD_TOP" ]; then
+      echo "Android build environment not set"
+      exit -1
+  fi
+
+  # ensure we have mm
+  . $ANDROID_BUILD_TOP/build/envsetup.sh
+
+  mm
+
+  echo "waiting for device"
+
+  adb root && adb wait-for-device remount && adb sync
+fi
+
+echo "========================================"
+
+echo "testing ResourceManagerService"
+#adb shell /data/nativetest64/ResourceManagerService_test/ResourceManagerService_test
+adb shell /data/nativetest/ResourceManagerService_test/ResourceManagerService_test
+
+echo "testing ServiceLog"
+#adb shell /data/nativetest64/ServiceLog_test/ServiceLog_test
+adb shell /data/nativetest/ServiceLog_test/ServiceLog_test
+
+echo "testing ResourceObserverService"
+#adb shell /data/nativetest64/ResourceObserverService_test/ResourceObserverService_test
+adb shell /data/nativetest/ResourceObserverService_test/ResourceObserverService_test
diff --git a/services/mediatranscoding/Android.bp b/services/mediatranscoding/Android.bp
index 17347a9..a9fd14f 100644
--- a/services/mediatranscoding/Android.bp
+++ b/services/mediatranscoding/Android.bp
@@ -1,17 +1,51 @@
 // service library
-cc_library_shared {
+package {
+    default_applicable_licenses: [
+        "frameworks_av_services_mediatranscoding_license",
+    ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_services_mediatranscoding_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library {
     name: "libmediatranscodingservice",
 
-    srcs: ["MediaTranscodingService.cpp"],
+    srcs: [
+        "MediaTranscodingService.cpp",
+        "SimulatedTranscoder.cpp",
+    ],
+
+    min_sdk_version: "29",
+    apex_available: [
+        "com.android.media",
+        "test_com.android.media",
+    ],
 
     shared_libs: [
+        "libandroid#31",
         "libbase",
         "libbinder_ndk",
+        "libcutils",
         "liblog",
         "libmediatranscoding",
         "libutils",
     ],
 
+    export_shared_lib_headers: [
+        "libmediatranscoding",
+    ],
+
     static_libs: [
         "mediatranscoding_aidl_interface-ndk_platform",
     ],
@@ -31,8 +65,7 @@
 
     shared_libs: [
         "libbase",
-        // TODO(hkuang): Use libbinder_ndk
-        "libbinder",
+        "libbinder_ndk",
         "libutils",
         "liblog",
         "libbase",
@@ -40,24 +73,16 @@
         "libmediatranscodingservice",
     ],
 
+    min_sdk_version: "29",
+    apex_available: [
+        "com.android.media",
+        "test_com.android.media",
+    ],
+
     static_libs: [
         "mediatranscoding_aidl_interface-ndk_platform",
     ],
 
-    target: {
-        android: {
-            product_variables: {
-                malloc_not_svelte: {
-                    // Scudo increases memory footprint, so only enable on
-                    // non-svelte devices.
-                    shared_libs: ["libc_scudo"],
-                },
-            },
-        },
-    },
-
-    init_rc: ["mediatranscoding.rc"],
-
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/services/mediatranscoding/MediaTranscodingService.cpp b/services/mediatranscoding/MediaTranscodingService.cpp
index 82d4161..2a20981 100644
--- a/services/mediatranscoding/MediaTranscodingService.cpp
+++ b/services/mediatranscoding/MediaTranscodingService.cpp
@@ -16,13 +16,25 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "MediaTranscodingService"
-#include <MediaTranscodingService.h>
+#include "MediaTranscodingService.h"
+
 #include <android/binder_manager.h>
 #include <android/binder_process.h>
-#include <private/android_filesystem_config.h>
+#include <android/permission_manager.h>
+#include <cutils/properties.h>
+#include <media/TranscoderWrapper.h>
+#include <media/TranscodingClientManager.h>
+#include <media/TranscodingDefs.h>
+#include <media/TranscodingLogger.h>
+#include <media/TranscodingResourcePolicy.h>
+#include <media/TranscodingSessionController.h>
+#include <media/TranscodingThermalPolicy.h>
+#include <media/TranscodingUidPolicy.h>
 #include <utils/Log.h>
 #include <utils/Vector.h>
 
+#include "SimulatedTranscoder.h"
+
 namespace android {
 
 // Convenience methods for constructing binder::Status objects for error returns
@@ -31,23 +43,53 @@
             errorCode,                                \
             String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, ##__VA_ARGS__))
 
-// Can MediaTranscoding service trust the caller based on the calling UID?
-// TODO(hkuang): Add MediaProvider's UID.
-static bool isTrustedCallingUid(uid_t uid) {
-    switch (uid) {
-    case AID_ROOT:  // root user
-    case AID_SYSTEM:
-    case AID_SHELL:
-    case AID_MEDIA:  // mediaserver
-        return true;
-    default:
-        return false;
-    }
-}
+static constexpr int64_t kTranscoderHeartBeatIntervalUs = 1000000LL;
 
 MediaTranscodingService::MediaTranscodingService()
-      : mTranscodingClientManager(TranscodingClientManager::getInstance()) {
+      : mUidPolicy(new TranscodingUidPolicy()),
+        mResourcePolicy(new TranscodingResourcePolicy()),
+        mThermalPolicy(new TranscodingThermalPolicy()),
+        mLogger(new TranscodingLogger()) {
     ALOGV("MediaTranscodingService is created");
+    bool simulated = property_get_bool("debug.transcoding.simulated_transcoder", false);
+    if (simulated) {
+        // Overrid default config params with shorter values for testing.
+        TranscodingSessionController::ControllerConfig config = {
+                .pacerBurstThresholdMs = 500,
+                .pacerBurstCountQuota = 10,
+                .pacerBurstTimeQuotaSeconds = 3,
+        };
+        mSessionController.reset(new TranscodingSessionController(
+                [](const std::shared_ptr<TranscoderCallbackInterface>& cb)
+                        -> std::shared_ptr<TranscoderInterface> {
+                    return std::make_shared<SimulatedTranscoder>(cb);
+                },
+                mUidPolicy, mResourcePolicy, mThermalPolicy, &config));
+    } else {
+        int32_t overrideBurstCountQuota =
+                property_get_int32("persist.transcoding.burst_count_quota", -1);
+        int32_t pacerBurstTimeQuotaSeconds =
+                property_get_int32("persist.transcoding.burst_time_quota_seconds", -1);
+        // Override default config params with properties if present.
+        TranscodingSessionController::ControllerConfig config;
+        if (overrideBurstCountQuota > 0) {
+            config.pacerBurstCountQuota = overrideBurstCountQuota;
+        }
+        if (pacerBurstTimeQuotaSeconds > 0) {
+            config.pacerBurstTimeQuotaSeconds = pacerBurstTimeQuotaSeconds;
+        }
+        mSessionController.reset(new TranscodingSessionController(
+                [logger = mLogger](const std::shared_ptr<TranscoderCallbackInterface>& cb)
+                        -> std::shared_ptr<TranscoderInterface> {
+                    return std::make_shared<TranscoderWrapper>(cb, logger,
+                                                               kTranscoderHeartBeatIntervalUs);
+                },
+                mUidPolicy, mResourcePolicy, mThermalPolicy, &config));
+    }
+    mClientManager.reset(new TranscodingClientManager(mSessionController));
+    mUidPolicy->setCallback(mSessionController);
+    mResourcePolicy->setCallback(mSessionController);
+    mThermalPolicy->setCallback(mSessionController);
 }
 
 MediaTranscodingService::~MediaTranscodingService() {
@@ -56,6 +98,23 @@
 
 binder_status_t MediaTranscodingService::dump(int fd, const char** /*args*/, uint32_t /*numArgs*/) {
     String8 result;
+
+    uid_t callingUid = AIBinder_getCallingUid();
+    pid_t callingPid = AIBinder_getCallingPid();
+    if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+        int32_t permissionResult;
+        if (APermissionManager_checkPermission("android.permission.DUMP", callingPid, callingUid,
+                                               &permissionResult) != PERMISSION_MANAGER_STATUS_OK ||
+            permissionResult != PERMISSION_MANAGER_PERMISSION_GRANTED) {
+            result.format(
+                    "Permission Denial: "
+                    "can't dump MediaTranscodingService from pid=%d, uid=%d\n",
+                    AIBinder_getCallingPid(), AIBinder_getCallingUid());
+            write(fd, result.string(), result.size());
+            return PERMISSION_DENIED;
+        }
+    }
+
     const size_t SIZE = 256;
     char buffer[SIZE];
 
@@ -64,7 +123,8 @@
     write(fd, result.string(), result.size());
 
     Vector<String16> args;
-    mTranscodingClientManager.dumpAllClients(fd, args);
+    mClientManager->dumpAllClients(fd, args);
+    mSessionController->dumpAllSessions(fd, args);
     return OK;
 }
 
@@ -72,126 +132,39 @@
 void MediaTranscodingService::instantiate() {
     std::shared_ptr<MediaTranscodingService> service =
             ::ndk::SharedRefBase::make<MediaTranscodingService>();
-    binder_status_t status =
-            AServiceManager_addService(service->asBinder().get(), getServiceName());
-    if (status != STATUS_OK) {
-        return;
+    if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+        // Once service is started, we want it to stay even is client side perished.
+        AServiceManager_forceLazyServicesPersist(true /*persist*/);
+        (void)AServiceManager_registerLazyService(service->asBinder().get(), getServiceName());
     }
 }
 
 Status MediaTranscodingService::registerClient(
-        const std::shared_ptr<ITranscodingServiceClient>& in_client,
-        const std::string& in_opPackageName, int32_t in_clientUid, int32_t in_clientPid,
-        int32_t* _aidl_return) {
-    if (in_client == nullptr) {
-        ALOGE("Client can not be null");
-        *_aidl_return = kInvalidJobId;
-        return Status::fromServiceSpecificError(ERROR_ILLEGAL_ARGUMENT);
-    }
-
-    int32_t callingPid = AIBinder_getCallingPid();
-    int32_t callingUid = AIBinder_getCallingUid();
-
-    // Check if we can trust clientUid. Only privilege caller could forward the uid on app client's behalf.
-    if (in_clientUid == USE_CALLING_UID) {
-        in_clientUid = callingUid;
-    } else if (!isTrustedCallingUid(callingUid)) {
-        ALOGE("MediaTranscodingService::registerClient failed (calling PID %d, calling UID %d) "
-              "rejected "
-              "(don't trust clientUid %d)",
-              in_clientPid, in_clientUid, in_clientUid);
-        return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
-                                "Untrusted caller (calling PID %d, UID %d) trying to "
-                                "register client",
-                                in_clientPid, in_clientUid);
-    }
-
-    // Check if we can trust clientPid. Only privilege caller could forward the pid on app client's behalf.
-    if (in_clientPid == USE_CALLING_PID) {
-        in_clientPid = callingPid;
-    } else if (!isTrustedCallingUid(callingUid)) {
-        ALOGE("MediaTranscodingService::registerClient client failed (calling PID %d, calling UID "
-              "%d) rejected "
-              "(don't trust clientPid %d)",
-              in_clientPid, in_clientUid, in_clientPid);
-        return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
-                                "Untrusted caller (calling PID %d, UID %d) trying to "
-                                "register client",
-                                in_clientPid, in_clientUid);
-    }
-
-    // We know the clientId must be equal to its pid as we assigned client's pid as its clientId.
-    int32_t clientId = in_clientPid;
-
-    // Checks if the client already registers.
-    if (mTranscodingClientManager.isClientIdRegistered(clientId)) {
-        return Status::fromServiceSpecificError(ERROR_ALREADY_EXISTS);
+        const std::shared_ptr<ITranscodingClientCallback>& in_callback,
+        const std::string& in_clientName, const std::string& in_opPackageName,
+        std::shared_ptr<ITranscodingClient>* _aidl_return) {
+    if (in_callback == nullptr) {
+        *_aidl_return = nullptr;
+        return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Client callback cannot be null!");
     }
 
     // Creates the client and uses its process id as client id.
-    std::unique_ptr<TranscodingClientManager::ClientInfo> newClient =
-            std::make_unique<TranscodingClientManager::ClientInfo>(
-                    in_client, clientId, in_clientPid, in_clientUid, in_opPackageName);
-    status_t err = mTranscodingClientManager.addClient(std::move(newClient));
+    std::shared_ptr<ITranscodingClient> newClient;
+
+    status_t err =
+            mClientManager->addClient(in_callback, in_clientName, in_opPackageName, &newClient);
     if (err != OK) {
-        *_aidl_return = kInvalidClientId;
+        *_aidl_return = nullptr;
         return STATUS_ERROR_FMT(err, "Failed to add client to TranscodingClientManager");
     }
 
-    ALOGD("Assign client: %s pid: %d, uid: %d with id: %d", in_opPackageName.c_str(), in_clientPid,
-          in_clientUid, clientId);
-
-    *_aidl_return = clientId;
-    return Status::ok();
-}
-
-Status MediaTranscodingService::unregisterClient(int32_t clientId, bool* _aidl_return) {
-    ALOGD("unregisterClient id: %d", clientId);
-    int32_t callingUid = AIBinder_getCallingUid();
-    int32_t callingPid = AIBinder_getCallingPid();
-
-    // Only the client with clientId or the trusted caller could unregister the client.
-    if (callingPid != clientId) {
-        if (!isTrustedCallingUid(callingUid)) {
-            ALOGE("Untrusted caller (calling PID %d, UID %d) trying to "
-                  "unregister client with id: %d",
-                  callingUid, callingPid, clientId);
-            *_aidl_return = true;
-            return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
-                                    "Untrusted caller (calling PID %d, UID %d) trying to "
-                                    "unregister client with id: %d",
-                                    callingUid, callingPid, clientId);
-        }
-    }
-
-    *_aidl_return = (mTranscodingClientManager.removeClient(clientId) == OK);
+    *_aidl_return = newClient;
     return Status::ok();
 }
 
 Status MediaTranscodingService::getNumOfClients(int32_t* _aidl_return) {
     ALOGD("MediaTranscodingService::getNumOfClients");
-    *_aidl_return = mTranscodingClientManager.getNumOfClients();
-    return Status::ok();
-}
-
-Status MediaTranscodingService::submitRequest(int32_t /*clientId*/,
-                                              const TranscodingRequestParcel& /*request*/,
-                                              TranscodingJobParcel* /*job*/,
-                                              int32_t* /*_aidl_return*/) {
-    // TODO(hkuang): Add implementation.
-    return Status::ok();
-}
-
-Status MediaTranscodingService::cancelJob(int32_t /*in_clientId*/, int32_t /*in_jobId*/,
-                                          bool* /*_aidl_return*/) {
-    // TODO(hkuang): Add implementation.
-    return Status::ok();
-}
-
-Status MediaTranscodingService::getJobWithId(int32_t /*in_jobId*/,
-                                             TranscodingJobParcel* /*out_job*/,
-                                             bool* /*_aidl_return*/) {
-    // TODO(hkuang): Add implementation.
+    *_aidl_return = mClientManager->getNumOfClients();
     return Status::ok();
 }
 
diff --git a/services/mediatranscoding/MediaTranscodingService.h b/services/mediatranscoding/MediaTranscodingService.h
index cc69727..12be131 100644
--- a/services/mediatranscoding/MediaTranscodingService.h
+++ b/services/mediatranscoding/MediaTranscodingService.h
@@ -19,19 +19,25 @@
 
 #include <aidl/android/media/BnMediaTranscodingService.h>
 #include <binder/IServiceManager.h>
-#include <media/TranscodingClientManager.h>
 
 namespace android {
 
 using Status = ::ndk::ScopedAStatus;
 using ::aidl::android::media::BnMediaTranscodingService;
-using ::aidl::android::media::ITranscodingServiceClient;
-using ::aidl::android::media::TranscodingJobParcel;
+using ::aidl::android::media::ITranscodingClient;
+using ::aidl::android::media::ITranscodingClientCallback;
 using ::aidl::android::media::TranscodingRequestParcel;
+using ::aidl::android::media::TranscodingSessionParcel;
+class TranscodingClientManager;
+class TranscodingLogger;
+class TranscodingSessionController;
+class UidPolicyInterface;
+class ResourcePolicyInterface;
+class ThermalPolicyInterface;
 
 class MediaTranscodingService : public BnMediaTranscodingService {
 public:
-    static constexpr int32_t kInvalidJobId = -1;
+    static constexpr int32_t kInvalidSessionId = -1;
     static constexpr int32_t kInvalidClientId = -1;
 
     MediaTranscodingService();
@@ -41,22 +47,12 @@
 
     static const char* getServiceName() { return "media.transcoding"; }
 
-    Status registerClient(const std::shared_ptr<ITranscodingServiceClient>& in_client,
-                          const std::string& in_opPackageName, int32_t in_clientUid,
-                          int32_t in_clientPid, int32_t* _aidl_return) override;
-
-    Status unregisterClient(int32_t clientId, bool* _aidl_return) override;
+    Status registerClient(const std::shared_ptr<ITranscodingClientCallback>& in_callback,
+                          const std::string& in_clientName, const std::string& in_opPackageName,
+                          std::shared_ptr<ITranscodingClient>* _aidl_return) override;
 
     Status getNumOfClients(int32_t* _aidl_return) override;
 
-    Status submitRequest(int32_t in_clientId, const TranscodingRequestParcel& in_request,
-                         TranscodingJobParcel* out_job, int32_t* _aidl_return) override;
-
-    Status cancelJob(int32_t in_clientId, int32_t in_jobId, bool* _aidl_return) override;
-
-    Status getJobWithId(int32_t in_jobId, TranscodingJobParcel* out_job,
-                        bool* _aidl_return) override;
-
     virtual inline binder_status_t dump(int /*fd*/, const char** /*args*/, uint32_t /*numArgs*/);
 
 private:
@@ -64,7 +60,12 @@
 
     mutable std::mutex mServiceLock;
 
-    TranscodingClientManager& mTranscodingClientManager;
+    std::shared_ptr<UidPolicyInterface> mUidPolicy;
+    std::shared_ptr<ResourcePolicyInterface> mResourcePolicy;
+    std::shared_ptr<ThermalPolicyInterface> mThermalPolicy;
+    std::shared_ptr<TranscodingLogger> mLogger;
+    std::shared_ptr<TranscodingSessionController> mSessionController;
+    std::shared_ptr<TranscodingClientManager> mClientManager;
 };
 
 }  // namespace android
diff --git a/services/mediatranscoding/SimulatedTranscoder.cpp b/services/mediatranscoding/SimulatedTranscoder.cpp
new file mode 100644
index 0000000..e80dbc5
--- /dev/null
+++ b/services/mediatranscoding/SimulatedTranscoder.cpp
@@ -0,0 +1,215 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SimulatedTranscoder"
+#include "SimulatedTranscoder.h"
+
+#include <utils/Log.h>
+
+#include <thread>
+
+namespace android {
+
+//static
+const char* SimulatedTranscoder::toString(Event::Type type) {
+    switch (type) {
+    case Event::Start:
+        return "Start";
+    case Event::Pause:
+        return "Pause";
+    case Event::Resume:
+        return "Resume";
+    case Event::Stop:
+        return "Stop";
+    case Event::Finished:
+        return "Finished";
+    case Event::Failed:
+        return "Failed";
+    case Event::Abandon:
+        return "Abandon";
+    default:
+        break;
+    }
+    return "(unknown)";
+}
+
+SimulatedTranscoder::SimulatedTranscoder(const std::shared_ptr<TranscoderCallbackInterface>& cb)
+      : mCallback(cb), mLooperReady(false) {
+    ALOGV("SimulatedTranscoder CTOR: %p", this);
+}
+
+SimulatedTranscoder::~SimulatedTranscoder() {
+    ALOGV("SimulatedTranscoder DTOR: %p", this);
+}
+
+void SimulatedTranscoder::start(
+        ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
+        uid_t /*callingUid*/,
+        const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) {
+    {
+        auto lock = std::scoped_lock(mLock);
+        int64_t processingTimeUs = kSessionDurationUs;
+        if (request.testConfig.has_value() && request.testConfig->processingTotalTimeMs > 0) {
+            processingTimeUs = request.testConfig->processingTotalTimeMs * 1000;
+        }
+        ALOGI("%s: session {%lld, %d}: processingTimeUs: %lld", __FUNCTION__, (long long)clientId,
+              sessionId, (long long)processingTimeUs);
+        SessionKeyType key = std::make_pair(clientId, sessionId);
+        mRemainingTimeMap.emplace(key, processingTimeUs);
+    }
+
+    queueEvent(Event::Start, clientId, sessionId, [=] {
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onStarted(clientId, sessionId);
+        }
+    });
+}
+
+void SimulatedTranscoder::pause(ClientIdType clientId, SessionIdType sessionId) {
+    queueEvent(Event::Pause, clientId, sessionId, [=] {
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onPaused(clientId, sessionId);
+        }
+    });
+}
+
+void SimulatedTranscoder::resume(
+        ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& /*request*/,
+        uid_t /*callingUid*/,
+        const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) {
+    queueEvent(Event::Resume, clientId, sessionId, [=] {
+        auto callback = mCallback.lock();
+        if (callback != nullptr) {
+            callback->onResumed(clientId, sessionId);
+        }
+    });
+}
+
+void SimulatedTranscoder::stop(ClientIdType clientId, SessionIdType sessionId, bool abandon) {
+    queueEvent(Event::Stop, clientId, sessionId, nullptr);
+
+    if (abandon) {
+        queueEvent(Event::Abandon, 0, 0, nullptr);
+    }
+}
+
+void SimulatedTranscoder::queueEvent(Event::Type type, ClientIdType clientId,
+                                     SessionIdType sessionId, std::function<void()> runnable) {
+    ALOGV("%s: session {%lld, %d}: %s", __FUNCTION__, (long long)clientId, sessionId,
+          toString(type));
+
+    auto lock = std::scoped_lock(mLock);
+
+    if (!mLooperReady) {
+        // A shared_ptr to ourselves is given to the thread's stack, so that SimulatedTranscoder
+        // object doesn't go away until the thread exits. When a watchdog timeout happens, this
+        // allows the session controller to release its reference to the TranscoderWrapper object
+        // without blocking on the thread exits.
+        std::thread([owner = shared_from_this()]() { owner->threadLoop(); }).detach();
+        mLooperReady = true;
+    }
+
+    mQueue.push_back({type, clientId, sessionId, runnable});
+    mCondition.notify_one();
+}
+
+void SimulatedTranscoder::threadLoop() {
+    bool running = false;
+    std::chrono::steady_clock::time_point lastRunningTime;
+    Event lastRunningEvent;
+
+    std::unique_lock<std::mutex> lock(mLock);
+    // SimulatedTranscoder currently lives in the transcoding service, as long as
+    // MediaTranscodingService itself.
+    while (true) {
+        // Wait for the next event.
+        while (mQueue.empty()) {
+            if (!running) {
+                mCondition.wait(lock);
+                continue;
+            }
+            // If running, wait for the remaining life of this session. Report finish if timed out.
+            SessionKeyType key =
+                    std::make_pair(lastRunningEvent.clientId, lastRunningEvent.sessionId);
+            std::cv_status status = mCondition.wait_for(lock, mRemainingTimeMap[key]);
+            if (status == std::cv_status::timeout) {
+                running = false;
+
+                auto callback = mCallback.lock();
+                if (callback != nullptr) {
+                    mRemainingTimeMap.erase(key);
+
+                    lock.unlock();
+                    callback->onFinish(lastRunningEvent.clientId, lastRunningEvent.sessionId);
+                    lock.lock();
+                }
+            } else {
+                // Advance last running time and remaining time. This is needed to guard
+                // against bad events (which will be ignored) or spurious wakeups, in that
+                // case we don't want to wait for the same time again.
+                auto now = std::chrono::steady_clock::now();
+                mRemainingTimeMap[key] -= std::chrono::duration_cast<std::chrono::microseconds>(
+                        now - lastRunningTime);
+                lastRunningTime = now;
+            }
+        }
+
+        // Handle the events, adjust state and send updates to client accordingly.
+        Event event = *mQueue.begin();
+        mQueue.pop_front();
+
+        ALOGD("%s: session {%lld, %d}: %s", __FUNCTION__, (long long)event.clientId,
+              event.sessionId, toString(event.type));
+
+        if (event.type == Event::Abandon) {
+            break;
+        }
+
+        SessionKeyType key = std::make_pair(event.clientId, event.sessionId);
+        if (!running && (event.type == Event::Start || event.type == Event::Resume)) {
+            running = true;
+            lastRunningTime = std::chrono::steady_clock::now();
+            lastRunningEvent = event;
+            ALOGV("%s: session {%lld, %d}: remaining time: %lld", __FUNCTION__,
+                  (long long)event.clientId, event.sessionId,
+                  (long long)mRemainingTimeMap[key].count());
+
+        } else if (running && (event.type == Event::Pause || event.type == Event::Stop)) {
+            running = false;
+            if (event.type == Event::Stop) {
+                mRemainingTimeMap.erase(key);
+            } else {
+                mRemainingTimeMap[key] -= std::chrono::duration_cast<std::chrono::microseconds>(
+                        std::chrono::steady_clock::now() - lastRunningTime);
+            }
+        } else {
+            ALOGW("%s: discarding bad event: session {%lld, %d}: %s", __FUNCTION__,
+                  (long long)event.clientId, event.sessionId, toString(event.type));
+            continue;
+        }
+
+        if (event.runnable != nullptr) {
+            lock.unlock();
+            event.runnable();
+            lock.lock();
+        }
+    }
+}
+
+}  // namespace android
diff --git a/services/mediatranscoding/SimulatedTranscoder.h b/services/mediatranscoding/SimulatedTranscoder.h
new file mode 100644
index 0000000..58e2e30
--- /dev/null
+++ b/services/mediatranscoding/SimulatedTranscoder.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_SIMULATED_TRANSCODER_H
+#define ANDROID_MEDIA_SIMULATED_TRANSCODER_H
+
+#include <android-base/thread_annotations.h>
+#include <media/TranscoderInterface.h>
+
+#include <list>
+#include <map>
+#include <mutex>
+
+namespace android {
+
+/**
+ * SimulatedTranscoder is currently used to instantiate MediaTranscodingService
+ * on service side for testing, so that we could actually test the IPC calls of
+ * MediaTranscodingService to expose issues that's observable only over IPC.
+ * SimulatedTranscoder is used when useSimulatedTranscoder in TranscodingTestConfig
+ * is set to true.
+ *
+ * SimulatedTranscoder simulates session execution by reporting finish after kSessionDurationUs.
+ * Session lifecycle events are reported via progress updates with special progress
+ * numbers (equal to the Event's type).
+ */
+class SimulatedTranscoder : public TranscoderInterface,
+                            public std::enable_shared_from_this<SimulatedTranscoder> {
+public:
+    struct Event {
+        enum Type { NoEvent, Start, Pause, Resume, Stop, Finished, Failed, Abandon } type;
+        ClientIdType clientId;
+        SessionIdType sessionId;
+        std::function<void()> runnable;
+    };
+
+    static constexpr int64_t kSessionDurationUs = 1000000;
+
+    SimulatedTranscoder(const std::shared_ptr<TranscoderCallbackInterface>& cb);
+    ~SimulatedTranscoder();
+
+    // TranscoderInterface
+    void start(ClientIdType clientId, SessionIdType sessionId,
+               const TranscodingRequestParcel& request, uid_t callingUid,
+               const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+    void pause(ClientIdType clientId, SessionIdType sessionId) override;
+    void resume(ClientIdType clientId, SessionIdType sessionId,
+                const TranscodingRequestParcel& request, uid_t callingUid,
+                const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
+    void stop(ClientIdType clientId, SessionIdType sessionId, bool abandon = false) override;
+    // ~TranscoderInterface
+
+private:
+    std::weak_ptr<TranscoderCallbackInterface> mCallback;
+    std::mutex mLock;
+    std::condition_variable mCondition;
+    std::list<Event> mQueue GUARDED_BY(mLock);
+    bool mLooperReady;
+
+    using SessionKeyType = std::pair<ClientIdType, SessionIdType>;
+    // map of session's remaining time in microsec.
+    std::map<SessionKeyType, std::chrono::microseconds> mRemainingTimeMap;
+
+    static const char* toString(Event::Type type);
+    void queueEvent(Event::Type type, ClientIdType clientId, SessionIdType sessionId,
+                    std::function<void()> runnable);
+    void threadLoop();
+};
+
+}  // namespace android
+
+#endif  // ANDROID_MEDIA_SIMULATED_TRANSCODER_H
diff --git a/services/mediatranscoding/main_mediatranscodingservice.cpp b/services/mediatranscoding/main_mediatranscodingservice.cpp
index 7d862e6..14c568e 100644
--- a/services/mediatranscoding/main_mediatranscodingservice.cpp
+++ b/services/mediatranscoding/main_mediatranscodingservice.cpp
@@ -15,8 +15,7 @@
  */
 
 #include <android-base/logging.h>
-#include <binder/IPCThreadState.h>
-#include <binder/ProcessState.h>
+#include <android/binder_process.h>
 
 #include "MediaTranscodingService.h"
 
@@ -25,12 +24,9 @@
 int main(int argc __unused, char** argv) {
     LOG(INFO) << "media transcoding service starting";
 
-    // TODO(hkuang): Start the service with libbinder_ndk.
     strcpy(argv[0], "media.transcoding");
-    sp<ProcessState> proc(ProcessState::self());
-    sp<IServiceManager> sm = defaultServiceManager();
     android::MediaTranscodingService::instantiate();
 
-    ProcessState::self()->startThreadPool();
-    IPCThreadState::self()->joinThreadPool();
+    ABinderProcess_startThreadPool();
+    ABinderProcess_joinThreadPool();
 }
diff --git a/services/mediatranscoding/mediatranscoding.rc b/services/mediatranscoding/mediatranscoding.rc
deleted file mode 100644
index 5bfef59..0000000
--- a/services/mediatranscoding/mediatranscoding.rc
+++ /dev/null
@@ -1,6 +0,0 @@
-service media.transcoding /system/bin/mediatranscoding
-    class main
-    user media
-    group media
-    ioprio rt 4
-    task_profiles ProcessCapacityHigh HighPerformance
diff --git a/services/mediatranscoding/tests/Android.bp b/services/mediatranscoding/tests/Android.bp
index e0e040c..cb180ec 100644
--- a/services/mediatranscoding/tests/Android.bp
+++ b/services/mediatranscoding/tests/Android.bp
@@ -1,5 +1,16 @@
 // Build the unit tests for MediaTranscodingService
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_services_mediatranscoding_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_services_mediatranscoding_license",
+    ],
+}
+
 cc_defaults {
     name: "mediatranscodingservice_test_defaults",
 
@@ -14,22 +25,47 @@
     ],
 
     shared_libs: [
+        "libactivitymanager_aidl",
         "libbinder",
         "libbinder_ndk",
         "liblog",
         "libutils",
-        "libmediatranscodingservice",
+        "libcutils",
     ],
 
     static_libs: [
         "mediatranscoding_aidl_interface-ndk_platform",
+        "resourcemanager_aidl_interface-ndk_platform",
+        "libmediatranscodingservice",
+    ],
+
+    required: [
+        "TranscodingUidPolicy_TestAppA",
+        "TranscodingUidPolicy_TestAppB",
+        "TranscodingUidPolicy_TestAppC",
     ],
 }
 
-// MediaTranscodingService unit test
+// MediaTranscodingService unit test using simulated transcoder
 cc_test {
-    name: "mediatranscodingservice_tests",
+    name: "mediatranscodingservice_simulated_tests",
     defaults: ["mediatranscodingservice_test_defaults"],
 
-    srcs: ["mediatranscodingservice_tests.cpp"],
-}
\ No newline at end of file
+    srcs: ["mediatranscodingservice_simulated_tests.cpp"],
+}
+
+// MediaTranscodingService unit test using real transcoder
+cc_test {
+    name: "mediatranscodingservice_real_tests",
+    defaults: ["mediatranscodingservice_test_defaults"],
+
+    srcs: ["mediatranscodingservice_real_tests.cpp"],
+}
+
+// MediaTranscodingService unit test related to resource management
+cc_test {
+    name: "mediatranscodingservice_resource_tests",
+    defaults: ["mediatranscodingservice_test_defaults"],
+
+    srcs: ["mediatranscodingservice_resource_tests.cpp"],
+}
diff --git a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
new file mode 100644
index 0000000..0cb2fad
--- /dev/null
+++ b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
@@ -0,0 +1,556 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscodingService.
+
+#include <aidl/android/media/BnTranscodingClientCallback.h>
+#include <aidl/android/media/IMediaTranscodingService.h>
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <aidl/android/media/TranscodingSessionParcel.h>
+#include <aidl/android/media/TranscodingSessionPriority.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <binder/PermissionController.h>
+#include <cutils/multiuser.h>
+#include <fcntl.h>
+#include <gtest/gtest.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <utils/Log.h>
+
+#include <iostream>
+#include <list>
+
+#include "SimulatedTranscoder.h"
+
+namespace android {
+
+namespace media {
+
+using Status = ::ndk::ScopedAStatus;
+using aidl::android::media::BnTranscodingClientCallback;
+using aidl::android::media::IMediaTranscodingService;
+using aidl::android::media::ITranscodingClient;
+using aidl::android::media::ITranscodingClientCallback;
+using aidl::android::media::TranscodingRequestParcel;
+using aidl::android::media::TranscodingSessionParcel;
+using aidl::android::media::TranscodingSessionPriority;
+using aidl::android::media::TranscodingTestConfig;
+using aidl::android::media::TranscodingVideoTrackFormat;
+
+constexpr int32_t kClientUseCallingPid = IMediaTranscodingService::USE_CALLING_PID;
+
+constexpr uid_t kClientUid = 5000;
+#define UID(n) (kClientUid + (n))
+
+constexpr pid_t kClientPid = 10000;
+#define PID(n) (kClientPid + (n))
+
+constexpr int32_t kClientId = 0;
+#define CLIENT(n) (kClientId + (n))
+
+constexpr const char* kClientName = "TestClient";
+constexpr const char* kClientPackageA = "com.android.tests.transcoding.testapp.A";
+constexpr const char* kClientPackageB = "com.android.tests.transcoding.testapp.B";
+constexpr const char* kClientPackageC = "com.android.tests.transcoding.testapp.C";
+
+constexpr const char* kTestActivityName = "/com.android.tests.transcoding.MainActivity";
+
+static status_t getUidForPackage(String16 packageName, userid_t userId, /*inout*/ uid_t& uid) {
+    PermissionController pc;
+    uid = pc.getPackageUid(packageName, 0);
+    if (uid <= 0) {
+        ALOGE("Unknown package: '%s'", String8(packageName).string());
+        return BAD_VALUE;
+    }
+
+    if (userId < 0) {
+        ALOGE("Invalid user: %d", userId);
+        return BAD_VALUE;
+    }
+
+    uid = multiuser_get_uid(userId, uid);
+    return NO_ERROR;
+}
+
+struct ShellHelper {
+    static bool RunCmd(const std::string& cmdStr) {
+        int ret = system(cmdStr.c_str());
+        if (ret != 0) {
+            ALOGE("Failed to run cmd: %s, exitcode %d", cmdStr.c_str(), ret);
+            return false;
+        }
+        return true;
+    }
+
+    static bool Start(const char* packageName, const char* activityName) {
+        return RunCmd("am start -W " + std::string(packageName) + std::string(activityName) +
+                      " &> /dev/null");
+    }
+
+    static bool Stop(const char* packageName) {
+        return RunCmd("am force-stop " + std::string(packageName));
+    }
+};
+
+struct EventTracker {
+    struct Event {
+        enum { NoEvent, Start, Pause, Resume, Finished, Failed } type;
+        int64_t clientId;
+        int32_t sessionId;
+    };
+
+#define DECLARE_EVENT(action)                                  \
+    static Event action(int32_t clientId, int32_t sessionId) { \
+        return {Event::action, clientId, sessionId};           \
+    }
+
+    DECLARE_EVENT(Start);
+    DECLARE_EVENT(Pause);
+    DECLARE_EVENT(Resume);
+    DECLARE_EVENT(Finished);
+    DECLARE_EVENT(Failed);
+
+    static constexpr Event NoEvent = {Event::NoEvent, 0, 0};
+
+    static std::string toString(const Event& event) {
+        std::string eventStr;
+        switch (event.type) {
+        case Event::Start:
+            eventStr = "Start";
+            break;
+        case Event::Pause:
+            eventStr = "Pause";
+            break;
+        case Event::Resume:
+            eventStr = "Resume";
+            break;
+        case Event::Finished:
+            eventStr = "Finished";
+            break;
+        case Event::Failed:
+            eventStr = "Failed";
+            break;
+        default:
+            return "NoEvent";
+        }
+        return "session {" + std::to_string(event.clientId) + ", " +
+               std::to_string(event.sessionId) + "}: " + eventStr;
+    }
+
+    // Pop 1 event from front, wait for up to timeoutUs if empty.
+    const Event& pop(int64_t timeoutUs = 0) {
+        std::unique_lock lock(mLock);
+
+        if (mEventQueue.empty() && timeoutUs > 0) {
+            mCondition.wait_for(lock, std::chrono::microseconds(timeoutUs));
+        }
+
+        if (mEventQueue.empty()) {
+            mPoppedEvent = NoEvent;
+        } else {
+            mPoppedEvent = *mEventQueue.begin();
+            mEventQueue.pop_front();
+        }
+
+        return mPoppedEvent;
+    }
+
+    bool waitForSpecificEventAndPop(const Event& target, std::list<Event>* outEvents,
+                                    int64_t timeoutUs = 0) {
+        std::unique_lock lock(mLock);
+
+        auto startTime = std::chrono::system_clock::now();
+        int64_t remainingUs = timeoutUs;
+
+        std::list<Event>::iterator it;
+        while (((it = std::find(mEventQueue.begin(), mEventQueue.end(), target)) ==
+                mEventQueue.end()) &&
+               remainingUs > 0) {
+            std::cv_status status =
+                    mCondition.wait_for(lock, std::chrono::microseconds(remainingUs));
+            if (status == std::cv_status::timeout) {
+                break;
+            }
+            std::chrono::microseconds elapsedTime = std::chrono::system_clock::now() - startTime;
+            remainingUs = timeoutUs - elapsedTime.count();
+        }
+
+        if (it == mEventQueue.end()) {
+            return false;
+        }
+        *outEvents = std::list<Event>(mEventQueue.begin(), std::next(it));
+        mEventQueue.erase(mEventQueue.begin(), std::next(it));
+        return true;
+    }
+
+    // Push 1 event to back.
+    void append(const Event& event,
+                const TranscodingErrorCode err = TranscodingErrorCode::kNoError) {
+        ALOGD("%s", toString(event).c_str());
+
+        std::unique_lock lock(mLock);
+
+        mEventQueue.push_back(event);
+        if (err != TranscodingErrorCode::kNoError) {
+            mLastErrQueue.push_back(err);
+        }
+        mCondition.notify_one();
+    }
+
+    void updateProgress(int progress) {
+        std::unique_lock lock(mLock);
+        mLastProgress = progress;
+        mUpdateCount++;
+    }
+
+    int getUpdateCount(int* lastProgress) {
+        std::unique_lock lock(mLock);
+        *lastProgress = mLastProgress;
+        return mUpdateCount;
+    }
+
+    TranscodingErrorCode getLastError() {
+        std::unique_lock lock(mLock);
+        if (mLastErrQueue.empty()) {
+            return TranscodingErrorCode::kNoError;
+        }
+        TranscodingErrorCode err = mLastErrQueue.front();
+        mLastErrQueue.pop_front();
+        return err;
+    }
+
+private:
+    std::mutex mLock;
+    std::condition_variable mCondition;
+    Event mPoppedEvent;
+    std::list<Event> mEventQueue;
+    std::list<TranscodingErrorCode> mLastErrQueue;
+    int mUpdateCount = 0;
+    int mLastProgress = -1;
+};
+
+// Operators for GTest macros.
+bool operator==(const EventTracker::Event& lhs, const EventTracker::Event& rhs) {
+    return lhs.type == rhs.type && lhs.clientId == rhs.clientId && lhs.sessionId == rhs.sessionId;
+}
+
+std::ostream& operator<<(std::ostream& str, const EventTracker::Event& v) {
+    str << EventTracker::toString(v);
+    return str;
+}
+
+static constexpr bool success = true;
+static constexpr bool fail = false;
+
+struct TestClientCallback : public BnTranscodingClientCallback,
+                            public EventTracker,
+                            public std::enable_shared_from_this<TestClientCallback> {
+    TestClientCallback(const char* packageName, int32_t id)
+          : mClientId(id), mClientPid(PID(id)), mClientUid(UID(id)), mPackageName(packageName) {
+        ALOGI("TestClientCallback %d created: pid %d, uid %d", id, PID(id), UID(id));
+
+        // Use package uid if that's available.
+        uid_t packageUid;
+        if (getUidForPackage(String16(packageName), 0 /*userId*/, packageUid) == NO_ERROR) {
+            mClientUid = packageUid;
+        }
+    }
+
+    virtual ~TestClientCallback() { ALOGI("TestClientCallback %d destroyed", mClientId); }
+
+    Status openFileDescriptor(const std::string& in_fileUri, const std::string& in_mode,
+                              ::ndk::ScopedFileDescriptor* _aidl_return) override {
+        ALOGD("@@@ openFileDescriptor: %s", in_fileUri.c_str());
+        int fd;
+        if (in_mode == "w" || in_mode == "rw") {
+            int kOpenFlags;
+            if (in_mode == "w") {
+                // Write-only, create file if non-existent, truncate existing file.
+                kOpenFlags = O_WRONLY | O_CREAT | O_TRUNC;
+            } else {
+                // Read-Write, create if non-existent, no truncate (service will truncate if needed)
+                kOpenFlags = O_RDWR | O_CREAT;
+            }
+            // User R+W permission.
+            constexpr int kFileMode = S_IRUSR | S_IWUSR;
+            fd = open(in_fileUri.c_str(), kOpenFlags, kFileMode);
+        } else {
+            fd = open(in_fileUri.c_str(), O_RDONLY);
+        }
+        _aidl_return->set(fd);
+        return Status::ok();
+    }
+
+    Status onTranscodingStarted(int32_t in_sessionId) override {
+        append(EventTracker::Start(mClientId, in_sessionId));
+        return Status::ok();
+    }
+
+    Status onTranscodingPaused(int32_t in_sessionId) override {
+        append(EventTracker::Pause(mClientId, in_sessionId));
+        return Status::ok();
+    }
+
+    Status onTranscodingResumed(int32_t in_sessionId) override {
+        append(EventTracker::Resume(mClientId, in_sessionId));
+        return Status::ok();
+    }
+
+    Status onTranscodingFinished(
+            int32_t in_sessionId,
+            const ::aidl::android::media::TranscodingResultParcel& /* in_result */) override {
+        append(Finished(mClientId, in_sessionId));
+        return Status::ok();
+    }
+
+    Status onTranscodingFailed(int32_t in_sessionId,
+                               ::aidl::android::media::TranscodingErrorCode in_errorCode) override {
+        append(Failed(mClientId, in_sessionId), in_errorCode);
+        return Status::ok();
+    }
+
+    Status onAwaitNumberOfSessionsChanged(int32_t /* in_sessionId */,
+                                          int32_t /* in_oldAwaitNumber */,
+                                          int32_t /* in_newAwaitNumber */) override {
+        return Status::ok();
+    }
+
+    Status onProgressUpdate(int32_t /* in_sessionId */, int32_t in_progress) override {
+        updateProgress(in_progress);
+        return Status::ok();
+    }
+
+    Status registerClient(const char* packageName,
+                          const std::shared_ptr<IMediaTranscodingService>& service) {
+        // Override the default uid if the package uid is found.
+        uid_t uid;
+        if (getUidForPackage(String16(packageName), 0 /*userId*/, uid) == NO_ERROR) {
+            mClientUid = uid;
+        }
+
+        ALOGD("registering %s with uid %d", packageName, mClientUid);
+
+        std::shared_ptr<ITranscodingClient> client;
+        Status status =
+                service->registerClient(shared_from_this(), kClientName, packageName, &client);
+
+        mClient = status.isOk() ? client : nullptr;
+        return status;
+    }
+
+    Status unregisterClient() {
+        Status status;
+        if (mClient != nullptr) {
+            status = mClient->unregister();
+            mClient = nullptr;
+        }
+        return status;
+    }
+
+    template <bool expectation = success>
+    bool submit(int32_t sessionId, const char* sourceFilePath, const char* destinationFilePath,
+                TranscodingSessionPriority priority = TranscodingSessionPriority::kNormal,
+                int bitrateBps = -1, int overridePid = -1, int overrideUid = -1,
+                int sessionDurationMs = -1) {
+        constexpr bool shouldSucceed = (expectation == success);
+        bool result;
+        TranscodingRequestParcel request;
+        TranscodingSessionParcel session;
+
+        request.sourceFilePath = sourceFilePath;
+        request.destinationFilePath = destinationFilePath;
+        request.priority = priority;
+        request.clientPid = (overridePid == -1) ? mClientPid : overridePid;
+        request.clientUid = (overrideUid == -1) ? mClientUid : overrideUid;
+        request.clientPackageName = (overrideUid == -1) ? mPackageName : "";
+        if (bitrateBps > 0) {
+            request.requestedVideoTrackFormat.emplace(TranscodingVideoTrackFormat());
+            request.requestedVideoTrackFormat->bitrateBps = bitrateBps;
+        }
+        if (sessionDurationMs > 0) {
+            request.isForTesting = true;
+            request.testConfig.emplace(TranscodingTestConfig());
+            request.testConfig->processingTotalTimeMs = sessionDurationMs;
+        }
+        Status status = mClient->submitRequest(request, &session, &result);
+
+        EXPECT_TRUE(status.isOk());
+        EXPECT_EQ(result, shouldSucceed);
+        if (shouldSucceed) {
+            EXPECT_EQ(session.sessionId, sessionId);
+        }
+
+        return status.isOk() && (result == shouldSucceed) &&
+               (!shouldSucceed || session.sessionId == sessionId);
+    }
+
+    template <bool expectation = success>
+    bool cancel(int32_t sessionId) {
+        constexpr bool shouldSucceed = (expectation == success);
+        bool result;
+        Status status = mClient->cancelSession(sessionId, &result);
+
+        EXPECT_TRUE(status.isOk());
+        EXPECT_EQ(result, shouldSucceed);
+
+        return status.isOk() && (result == shouldSucceed);
+    }
+
+    template <bool expectation = success>
+    bool getSession(int32_t sessionId, const char* sourceFilePath,
+                    const char* destinationFilePath) {
+        constexpr bool shouldSucceed = (expectation == success);
+        bool result;
+        TranscodingSessionParcel session;
+        Status status = mClient->getSessionWithId(sessionId, &session, &result);
+
+        EXPECT_TRUE(status.isOk());
+        EXPECT_EQ(result, shouldSucceed);
+        if (shouldSucceed) {
+            EXPECT_EQ(session.sessionId, sessionId);
+            EXPECT_EQ(session.request.sourceFilePath, sourceFilePath);
+        }
+
+        return status.isOk() && (result == shouldSucceed) &&
+               (!shouldSucceed || (session.sessionId == sessionId &&
+                                   session.request.sourceFilePath == sourceFilePath &&
+                                   session.request.destinationFilePath == destinationFilePath));
+    }
+
+    template <bool expectation = success>
+    bool addClientUid(int32_t sessionId, uid_t clientUid) {
+        constexpr bool shouldSucceed = (expectation == success);
+        bool result;
+        Status status = mClient->addClientUid(sessionId, clientUid, &result);
+
+        EXPECT_TRUE(status.isOk());
+        EXPECT_EQ(result, shouldSucceed);
+
+        return status.isOk() && (result == shouldSucceed);
+    }
+
+    template <bool expectation = success>
+    bool getClientUids(int32_t sessionId, std::vector<int32_t>* clientUids) {
+        constexpr bool shouldSucceed = (expectation == success);
+        std::optional<std::vector<int32_t>> aidl_return;
+        Status status = mClient->getClientUids(sessionId, &aidl_return);
+
+        EXPECT_TRUE(status.isOk());
+        bool success = (aidl_return != std::nullopt);
+        if (success) {
+            *clientUids = *aidl_return;
+        }
+        EXPECT_EQ(success, shouldSucceed);
+
+        return status.isOk() && (success == shouldSucceed);
+    }
+
+    int32_t mClientId;
+    pid_t mClientPid;
+    uid_t mClientUid;
+    std::string mPackageName;
+    std::shared_ptr<ITranscodingClient> mClient;
+};
+
+class MediaTranscodingServiceTestBase : public ::testing::Test {
+public:
+    MediaTranscodingServiceTestBase() { ALOGI("MediaTranscodingServiceTestBase created"); }
+
+    virtual ~MediaTranscodingServiceTestBase() {
+        ALOGI("MediaTranscodingServiceTestBase destroyed");
+    }
+
+    void SetUp() override {
+        // Need thread pool to receive callbacks, otherwise oneway callbacks are
+        // silently ignored.
+        ABinderProcess_startThreadPool();
+        ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.transcoding"));
+        mService = IMediaTranscodingService::fromBinder(binder);
+        if (mService == nullptr) {
+            ALOGE("Failed to connect to the media.trascoding service.");
+            return;
+        }
+
+        mClient1 = ::ndk::SharedRefBase::make<TestClientCallback>(kClientPackageA, 1);
+        mClient2 = ::ndk::SharedRefBase::make<TestClientCallback>(kClientPackageB, 2);
+        mClient3 = ::ndk::SharedRefBase::make<TestClientCallback>(kClientPackageC, 3);
+    }
+
+    Status registerOneClient(const std::shared_ptr<TestClientCallback>& callback) {
+        ALOGD("registering %s with uid %d", callback->mPackageName.c_str(), callback->mClientUid);
+
+        std::shared_ptr<ITranscodingClient> client;
+        Status status =
+                mService->registerClient(callback, kClientName, callback->mPackageName, &client);
+
+        if (status.isOk()) {
+            callback->mClient = client;
+        } else {
+            callback->mClient = nullptr;
+        }
+        return status;
+    }
+
+    void registerMultipleClients() {
+        // Register 3 clients.
+        EXPECT_TRUE(registerOneClient(mClient1).isOk());
+        EXPECT_TRUE(registerOneClient(mClient2).isOk());
+        EXPECT_TRUE(registerOneClient(mClient3).isOk());
+
+        // Check the number of clients.
+        int32_t numOfClients;
+        Status status = mService->getNumOfClients(&numOfClients);
+        EXPECT_TRUE(status.isOk());
+        EXPECT_GE(numOfClients, 3);
+    }
+
+    void unregisterMultipleClients() {
+        // Unregister the clients.
+        EXPECT_TRUE(mClient1->unregisterClient().isOk());
+        EXPECT_TRUE(mClient2->unregisterClient().isOk());
+        EXPECT_TRUE(mClient3->unregisterClient().isOk());
+    }
+
+    const char* prepareOutputFile(const char* path) {
+        deleteFile(path);
+        return path;
+    }
+
+    void deleteFile(const char* path) { unlink(path); }
+
+    void dismissKeyguard() {
+        EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
+        EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
+    }
+
+    void stopAppPackages() {
+        EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+        EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+        EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+    }
+
+    std::shared_ptr<IMediaTranscodingService> mService;
+    std::shared_ptr<TestClientCallback> mClient1;
+    std::shared_ptr<TestClientCallback> mClient2;
+    std::shared_ptr<TestClientCallback> mClient3;
+};
+
+}  // namespace media
+}  // namespace android
diff --git a/services/mediatranscoding/tests/README.txt b/services/mediatranscoding/tests/README.txt
new file mode 100644
index 0000000..cde465e
--- /dev/null
+++ b/services/mediatranscoding/tests/README.txt
@@ -0,0 +1,8 @@
+mediatranscodingservice_simulated_tests:
+	Tests media transcoding service with simulated transcoder.
+
+mediatranscodingservice_real_tests:
+	Tests media transcoding service with real transcoder. Uses the same test assets
+	as the MediaTranscoder unit tests. Before running the test, please make sure
+	to push the test assets to /sdcard:
+	adb push $TOP/frameworks/av/media/libmediatranscoding/tests/assets /data/local/tmp/TranscodingTestAssets
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp
new file mode 100644
index 0000000..df00aa1
--- /dev/null
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp
@@ -0,0 +1,34 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_services_mediatranscoding_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_services_mediatranscoding_license",
+    ],
+}
+
+android_test_helper_app {
+    name: "TranscodingUidPolicy_TestAppA",
+    manifest: "TestAppA.xml",
+    static_libs: ["androidx.test.rules"],
+    sdk_version: "test_current",
+    srcs: ["src/**/*.java"],
+}
+
+android_test_helper_app {
+    name: "TranscodingUidPolicy_TestAppB",
+    manifest: "TestAppB.xml",
+    static_libs: ["androidx.test.rules"],
+    sdk_version: "test_current",
+    srcs: ["src/**/*.java"],
+}
+
+android_test_helper_app {
+    name: "TranscodingUidPolicy_TestAppC",
+    manifest: "TestAppC.xml",
+    static_libs: ["androidx.test.rules"],
+    sdk_version: "test_current",
+    srcs: ["src/**/*.java"],
+}
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml
new file mode 100644
index 0000000..0dff171
--- /dev/null
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml
@@ -0,0 +1,41 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="com.android.tests.transcoding.testapp.A"
+    android:versionCode="1"
+    android:versionName="1.0" >
+
+    <application android:label="TestAppA">
+        <activity android:name="com.android.tests.transcoding.MainActivity"
+            android:exported="true">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.DEFAULT"/>
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+        <activity android:name="com.android.tests.transcoding.ResourcePolicyTestActivity"
+            android:exported="true">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.DEFAULT"/>
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+    </application>
+</manifest>
+
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml
new file mode 100644
index 0000000..4baa35a
--- /dev/null
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="com.android.tests.transcoding.testapp.B"
+    android:versionCode="1"
+    android:versionName="1.0" >
+
+    <application android:label="TestAppB">
+        <activity android:name="com.android.tests.transcoding.MainActivity"
+            android:exported="true">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.DEFAULT"/>
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+    </application>
+</manifest>
+
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml
new file mode 100644
index 0000000..3dde3af
--- /dev/null
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="com.android.tests.transcoding.testapp.C"
+    android:versionCode="1"
+    android:versionName="1.0" >
+
+    <application android:label="TestAppC">
+        <activity android:name="com.android.tests.transcoding.MainActivity"
+            android:exported="true">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.DEFAULT"/>
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+    </application>
+</manifest>
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java
new file mode 100644
index 0000000..b79164d
--- /dev/null
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java
@@ -0,0 +1,107 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tests.transcoding;
+
+import android.app.Activity;
+import android.content.Intent;
+import android.os.Bundle;
+import android.util.Log;
+
+/**
+ * This is an empty activity for testing the UID policy of media transcoding service.
+ */
+public class MainActivity extends Activity {
+    private static final String TAG = "MainActivity";
+
+    // Called at the start of the full lifetime.
+    @Override
+    public void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        // Initialize Activity and inflate the UI.
+    }
+
+    // Called after onCreate has finished, use to restore UI state
+    @Override
+    public void onRestoreInstanceState(Bundle savedInstanceState) {
+        super.onRestoreInstanceState(savedInstanceState);
+        // Restore UI state from the savedInstanceState.
+        // This bundle has also been passed to onCreate.
+        // Will only be called if the Activity has been
+        // killed by the system since it was last visible.
+    }
+
+    // Called before subsequent visible lifetimes
+    // for an activity process.
+    @Override
+    public void onRestart() {
+        super.onRestart();
+        // Load changes knowing that the Activity has already
+        // been visible within this process.
+    }
+
+    // Called at the start of the visible lifetime.
+    @Override
+    public void onStart() {
+        super.onStart();
+        // Apply any required UI change now that the Activity is visible.
+    }
+
+    // Called at the start of the active lifetime.
+    @Override
+    public void onResume() {
+        super.onResume();
+        // Resume any paused UI updates, threads, or processes required
+        // by the Activity but suspended when it was inactive.
+    }
+
+    // Called to save UI state changes at the
+    // end of the active lifecycle.
+    @Override
+    public void onSaveInstanceState(Bundle savedInstanceState) {
+        // Save UI state changes to the savedInstanceState.
+        // This bundle will be passed to onCreate and
+        // onRestoreInstanceState if the process is
+        // killed and restarted by the run time.
+        super.onSaveInstanceState(savedInstanceState);
+    }
+
+    // Called at the end of the active lifetime.
+    @Override
+    public void onPause() {
+        // Suspend UI updates, threads, or CPU intensive processes
+        // that don't need to be updated when the Activity isn't
+        // the active foreground Activity.
+        super.onPause();
+    }
+
+    // Called at the end of the visible lifetime.
+    @Override
+    public void onStop() {
+        // Suspend remaining UI updates, threads, or processing
+        // that aren't required when the Activity isn't visible.
+        // Persist all edits or state changes
+        // as after this call the process is likely to be killed.
+        super.onStop();
+    }
+
+    // Sometimes called at the end of the full lifetime.
+    @Override
+    public void onDestroy() {
+        // Clean up any resources including ending threads,
+        // closing database connections etc.
+        super.onDestroy();
+    }
+}
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/ResourcePolicyTestActivity.java b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/ResourcePolicyTestActivity.java
new file mode 100644
index 0000000..c9e2ddb
--- /dev/null
+++ b/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/ResourcePolicyTestActivity.java
@@ -0,0 +1,272 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.tests.transcoding;
+
+import android.app.Activity;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecInfo.VideoCapabilities;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.os.Bundle;
+import android.util.Log;
+import java.io.IOException;
+import java.util.Vector;
+
+public class ResourcePolicyTestActivity extends Activity {
+    public static final int TYPE_NONSECURE = 0;
+    public static final int TYPE_SECURE = 1;
+    public static final int TYPE_MIX = 2;
+
+    protected String TAG;
+    private static final int FRAME_RATE = 10;
+    private static final int IFRAME_INTERVAL = 10; // 10 seconds between I-frames
+    private static final String MIME = MediaFormat.MIMETYPE_VIDEO_AVC;
+    private static final int TIMEOUT_MS = 5000;
+
+    private Vector<MediaCodec> mCodecs = new Vector<MediaCodec>();
+
+    private class TestCodecCallback extends MediaCodec.Callback {
+        @Override
+        public void onInputBufferAvailable(MediaCodec codec, int index) {
+            Log.d(TAG, "onInputBufferAvailable " + codec.toString());
+        }
+
+        @Override
+        public void onOutputBufferAvailable(
+                MediaCodec codec, int index, MediaCodec.BufferInfo info) {
+            Log.d(TAG, "onOutputBufferAvailable " + codec.toString());
+        }
+
+        @Override
+        public void onError(MediaCodec codec, MediaCodec.CodecException e) {
+            Log.d(TAG, "onError " + codec.toString() + " errorCode " + e.getErrorCode());
+        }
+
+        @Override
+        public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
+            Log.d(TAG, "onOutputFormatChanged " + codec.toString());
+        }
+    }
+
+    private MediaCodec.Callback mCallback = new TestCodecCallback();
+
+    private MediaFormat getTestFormat(CodecCapabilities caps, boolean securePlayback) {
+        VideoCapabilities vcaps = caps.getVideoCapabilities();
+        int width = vcaps.getSupportedWidths().getLower();
+        int height = vcaps.getSupportedHeightsFor(width).getLower();
+        int bitrate = vcaps.getBitrateRange().getLower();
+
+        MediaFormat format = MediaFormat.createVideoFormat(MIME, width, height);
+        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, caps.colorFormats[0]);
+        format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
+        format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
+        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
+        format.setFeatureEnabled(CodecCapabilities.FEATURE_SecurePlayback, securePlayback);
+        return format;
+    }
+
+    private MediaCodecInfo getTestCodecInfo(boolean securePlayback) {
+        // Use avc decoder for testing.
+        boolean isEncoder = false;
+
+        MediaCodecList mcl = new MediaCodecList(MediaCodecList.ALL_CODECS);
+        for (MediaCodecInfo info : mcl.getCodecInfos()) {
+            if (info.isEncoder() != isEncoder) {
+                continue;
+            }
+            CodecCapabilities caps;
+            try {
+                caps = info.getCapabilitiesForType(MIME);
+                boolean securePlaybackSupported =
+                        caps.isFeatureSupported(CodecCapabilities.FEATURE_SecurePlayback);
+                boolean securePlaybackRequired =
+                        caps.isFeatureRequired(CodecCapabilities.FEATURE_SecurePlayback);
+                if ((securePlayback && securePlaybackSupported)
+                        || (!securePlayback && !securePlaybackRequired)) {
+                    Log.d(TAG, "securePlayback " + securePlayback + " will use " + info.getName());
+                } else {
+                    Log.d(TAG, "securePlayback " + securePlayback + " skip " + info.getName());
+                    continue;
+                }
+            } catch (IllegalArgumentException e) {
+                // mime is not supported
+                continue;
+            }
+            return info;
+        }
+
+        return null;
+    }
+
+    protected int allocateCodecs(int max) {
+        Bundle extras = getIntent().getExtras();
+        int type = TYPE_NONSECURE;
+        if (extras != null) {
+            type = extras.getInt("test-type", type);
+            Log.d(TAG, "type is: " + type);
+        }
+
+        boolean shouldSkip = false;
+        boolean securePlayback;
+        if (type == TYPE_NONSECURE || type == TYPE_MIX) {
+            securePlayback = false;
+            MediaCodecInfo info = getTestCodecInfo(securePlayback);
+            if (info != null) {
+                allocateCodecs(max, info, securePlayback);
+            } else {
+                shouldSkip = true;
+            }
+        }
+
+        if (!shouldSkip) {
+            if (type == TYPE_SECURE || type == TYPE_MIX) {
+                securePlayback = true;
+                MediaCodecInfo info = getTestCodecInfo(securePlayback);
+                if (info != null) {
+                    allocateCodecs(max, info, securePlayback);
+                } else {
+                    shouldSkip = true;
+                }
+            }
+        }
+
+        if (shouldSkip) {
+            Log.d(TAG, "test skipped as there's no supported codec.");
+            finishWithResult(RESULT_OK);
+        }
+
+        Log.d(TAG, "allocateCodecs returned " + mCodecs.size());
+        return mCodecs.size();
+    }
+
+    protected void allocateCodecs(int max, MediaCodecInfo info, boolean securePlayback) {
+        String name = info.getName();
+        CodecCapabilities caps = info.getCapabilitiesForType(MIME);
+        MediaFormat format = getTestFormat(caps, securePlayback);
+        MediaCodec codec = null;
+        for (int i = mCodecs.size(); i < max; ++i) {
+            try {
+                Log.d(TAG, "Create codec " + name + " #" + i);
+                codec = MediaCodec.createByCodecName(name);
+                codec.setCallback(mCallback);
+                Log.d(TAG, "Configure codec " + format);
+                codec.configure(format, null, null, 0);
+                Log.d(TAG, "Start codec " + format);
+                codec.start();
+                mCodecs.add(codec);
+                codec = null;
+            } catch (IllegalArgumentException e) {
+                Log.d(TAG, "IllegalArgumentException " + e.getMessage());
+                break;
+            } catch (IOException e) {
+                Log.d(TAG, "IOException " + e.getMessage());
+                break;
+            } catch (MediaCodec.CodecException e) {
+                Log.d(TAG, "CodecException 0x" + Integer.toHexString(e.getErrorCode()));
+                break;
+            } finally {
+                if (codec != null) {
+                    Log.d(TAG, "release codec");
+                    codec.release();
+                    codec = null;
+                }
+            }
+        }
+    }
+
+    protected void finishWithResult(int result) {
+        for (int i = 0; i < mCodecs.size(); ++i) {
+            Log.d(TAG, "release codec #" + i);
+            mCodecs.get(i).release();
+        }
+        mCodecs.clear();
+        setResult(result);
+        finish();
+        Log.d(TAG, "activity finished");
+    }
+
+    private void doUseCodecs() {
+        int current = 0;
+        try {
+            for (current = 0; current < mCodecs.size(); ++current) {
+                mCodecs.get(current).getName();
+            }
+        } catch (MediaCodec.CodecException e) {
+            Log.d(TAG, "useCodecs got CodecException 0x" + Integer.toHexString(e.getErrorCode()));
+            if (e.getErrorCode() == MediaCodec.CodecException.ERROR_RECLAIMED) {
+                Log.d(TAG, "Remove codec " + current + " from the list");
+                mCodecs.get(current).release();
+                mCodecs.remove(current);
+                mGotReclaimedException = true;
+                mUseCodecs = false;
+            }
+            return;
+        }
+    }
+
+    private Thread mWorkerThread;
+    private volatile boolean mUseCodecs = true;
+    private volatile boolean mGotReclaimedException = false;
+    protected void useCodecs() {
+        mWorkerThread = new Thread(new Runnable() {
+            @Override
+            public void run() {
+                long start = System.currentTimeMillis();
+                long timeSinceStartedMs = 0;
+                while (mUseCodecs && (timeSinceStartedMs < TIMEOUT_MS)) {
+                    doUseCodecs();
+                    try {
+                        Thread.sleep(50 /* millis */);
+                    } catch (InterruptedException e) {
+                    }
+                    timeSinceStartedMs = System.currentTimeMillis() - start;
+                }
+                if (mGotReclaimedException) {
+                    Log.d(TAG, "Got expected reclaim exception.");
+                }
+                finishWithResult(RESULT_OK);
+            }
+        });
+        mWorkerThread.start();
+    }
+
+    private static final int MAX_INSTANCES = 32;
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        TAG = "ResourcePolicyTestActivity";
+
+        Log.d(TAG, "onCreate called.");
+        super.onCreate(savedInstanceState);
+
+        if (allocateCodecs(MAX_INSTANCES) == MAX_INSTANCES) {
+            // haven't reached the limit with MAX_INSTANCES, no need to wait for reclaim exception.
+            //mWaitForReclaim = false;
+            Log.d(TAG, "Didn't hit resource limitation");
+        }
+
+        useCodecs();
+    }
+
+    @Override
+    protected void onDestroy() {
+        Log.d(TAG, "onDestroy called.");
+        super.onDestroy();
+    }
+}
diff --git a/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh b/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh
old mode 100644
new mode 100755
index bcdc7f7..edf6778
--- a/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh
+++ b/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh
@@ -13,11 +13,35 @@
 
 mm
 
-echo "waiting for device"
+# Push the files onto the device.
+. $ANDROID_BUILD_TOP/frameworks/av/media/libmediatranscoding/tests/push_assets.sh
 
-adb root && adb wait-for-device remount && adb sync
+echo "[==========] installing test apps"
+adb root
+adb install -t -r -g -d $ANDROID_TARGET_OUT_TESTCASES/TranscodingUidPolicy_TestAppA/arm64/TranscodingUidPolicy_TestAppA.apk
+adb install -t -r -g -d $ANDROID_TARGET_OUT_TESTCASES/TranscodingUidPolicy_TestAppB/arm64/TranscodingUidPolicy_TestAppB.apk
+adb install -t -r -g -d $ANDROID_TARGET_OUT_TESTCASES/TranscodingUidPolicy_TestAppC/arm64/TranscodingUidPolicy_TestAppC.apk
 
-echo "========================================"
+echo "[==========] waiting for device and sync"
+adb wait-for-device remount && adb sync
 
-echo "testing mediatranscodingservice"
-adb shell /data/nativetest64/mediatranscodingservice_tests/mediatranscodingservice_tests
+echo "[==========] running simulated tests"
+adb shell setprop debug.transcoding.simulated_transcoder true
+adb shell kill -9 `pid media.transcoding`
+#adb shell /data/nativetest64/mediatranscodingservice_simulated_tests/mediatranscodingservice_simulated_tests
+adb shell /data/nativetest/mediatranscodingservice_simulated_tests/mediatranscodingservice_simulated_tests
+
+echo "[==========] running real tests"
+adb shell setprop debug.transcoding.simulated_transcoder false
+adb shell kill -9 `pid media.transcoding`
+#adb shell /data/nativetest64/mediatranscodingservice_real_tests/mediatranscodingservice_real_tests
+adb shell /data/nativetest/mediatranscodingservice_real_tests/mediatranscodingservice_real_tests
+
+echo "[==========] running resource tests"
+adb shell kill -9 `pid media.transcoding`
+#adb shell /data/nativetest64/mediatranscodingservice_resource_tests/mediatranscodingservice_resource_tests
+adb shell /data/nativetest/mediatranscodingservice_resource_tests/mediatranscodingservice_resource_tests
+
+echo "[==========] removing debug properties"
+adb shell setprop debug.transcoding.simulated_transcoder \"\"
+adb shell kill -9 `pid media.transcoding`
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
new file mode 100644
index 0000000..e9eebe2
--- /dev/null
+++ b/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
@@ -0,0 +1,455 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscodingService.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscodingServiceRealTest"
+
+#include "MediaTranscodingServiceTestHelper.h"
+
+/*
+ * Tests media transcoding service with real transcoder.
+ *
+ * Uses the same test assets as the MediaTranscoder unit tests. Before running the test,
+ * please make sure to push the test assets to /sdcard:
+ *
+ * adb push $TOP/frameworks/av/media/libmediatranscoding/transcoder/tests/assets /data/local/tmp/TranscodingTestAssets
+ */
+namespace android {
+
+namespace media {
+
+constexpr int64_t kPaddingUs = 1000000;
+constexpr int64_t kSessionWithPaddingUs = 10000000 + kPaddingUs;
+constexpr int32_t kBitRate = 8 * 1000 * 1000;  // 8Mbs
+
+constexpr const char* kShortSrcPath =
+        "/data/local/tmp/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4";
+constexpr const char* kLongSrcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+
+#define OUTPATH(name) "/data/local/tmp/MediaTranscodingService_" #name ".MP4"
+
+class MediaTranscodingServiceRealTest : public MediaTranscodingServiceTestBase {
+public:
+    MediaTranscodingServiceRealTest() { ALOGI("MediaTranscodingServiceResourceTest created"); }
+
+    virtual ~MediaTranscodingServiceRealTest() {
+        ALOGI("MediaTranscodingServiceResourceTest destroyed");
+    }
+};
+
+TEST_F(MediaTranscodingServiceRealTest, TestInvalidSource) {
+    registerMultipleClients();
+
+    const char* srcPath = "bad_file_uri";
+    const char* dstPath = prepareOutputFile(OUTPATH(TestInvalidSource));
+
+    // Submit one session.
+    EXPECT_TRUE(
+            mClient1->submit(0, srcPath, dstPath, TranscodingSessionPriority::kNormal, kBitRate));
+
+    // Check expected error.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Failed(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->getLastError(), TranscodingErrorCode::kErrorIO);
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestPassthru) {
+    registerMultipleClients();
+
+    const char* dstPath = prepareOutputFile(OUTPATH(TestPassthru));
+
+    // Submit one session.
+    EXPECT_TRUE(mClient1->submit(0, kShortSrcPath, dstPath));
+
+    // Wait for session to finish.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestTranscodeVideo) {
+    registerMultipleClients();
+
+    const char* dstPath = prepareOutputFile(OUTPATH(TestTranscodeVideo));
+
+    // Submit one session.
+    EXPECT_TRUE(mClient1->submit(0, kShortSrcPath, dstPath, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+
+    // Wait for session to finish.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestTranscodeVideoProgress) {
+    registerMultipleClients();
+
+    const char* dstPath = prepareOutputFile(OUTPATH(TestTranscodeVideoProgress));
+
+    // Submit one session.
+    EXPECT_TRUE(mClient1->submit(0, kLongSrcPath, dstPath, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+
+    // Wait for session to finish.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+    // Check the progress update messages are received. For this clip (around ~15 second long),
+    // expect at least 10 updates, and the last update should be 100.
+    int lastProgress;
+    EXPECT_GE(mClient1->getUpdateCount(&lastProgress), 10);
+    EXPECT_EQ(lastProgress, 100);
+
+    unregisterMultipleClients();
+}
+
+/*
+ * Test cancel immediately after start.
+ */
+TEST_F(MediaTranscodingServiceRealTest, TestCancelImmediately) {
+    registerMultipleClients();
+
+    const char* srcPath0 = kLongSrcPath;
+    const char* srcPath1 = kShortSrcPath;
+    const char* dstPath0 = prepareOutputFile(OUTPATH(TestCancelImmediately_Session0));
+    const char* dstPath1 = prepareOutputFile(OUTPATH(TestCancelImmediately_Session1));
+
+    // Submit one session, should start immediately.
+    EXPECT_TRUE(
+            mClient1->submit(0, srcPath0, dstPath0, TranscodingSessionPriority::kNormal, kBitRate));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_TRUE(mClient1->getSession(0, srcPath0, dstPath0));
+
+    // Test cancel session immediately, getSession should fail after cancel.
+    EXPECT_TRUE(mClient1->cancel(0));
+    EXPECT_TRUE(mClient1->getSession<fail>(0, "", ""));
+
+    // Submit new session, new session should start immediately and finish.
+    EXPECT_TRUE(
+            mClient1->submit(1, srcPath1, dstPath1, TranscodingSessionPriority::kNormal, kBitRate));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+
+    unregisterMultipleClients();
+}
+
+/*
+ * Test cancel in the middle of transcoding.
+ */
+TEST_F(MediaTranscodingServiceRealTest, TestCancelWhileRunning) {
+    registerMultipleClients();
+
+    const char* srcPath0 = kLongSrcPath;
+    const char* srcPath1 = kShortSrcPath;
+    const char* dstPath0 = prepareOutputFile(OUTPATH(TestCancelWhileRunning_Session0));
+    const char* dstPath1 = prepareOutputFile(OUTPATH(TestCancelWhileRunning_Session1));
+
+    // Submit two sessions, session 0 should start immediately, session 1 should be queued.
+    EXPECT_TRUE(
+            mClient1->submit(0, srcPath0, dstPath0, TranscodingSessionPriority::kNormal, kBitRate));
+    EXPECT_TRUE(
+            mClient1->submit(1, srcPath1, dstPath1, TranscodingSessionPriority::kNormal, kBitRate));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_TRUE(mClient1->getSession(0, srcPath0, dstPath0));
+    EXPECT_TRUE(mClient1->getSession(1, srcPath1, dstPath1));
+
+    // Session 0 (longtest) shouldn't finish in 1 seconds.
+    EXPECT_EQ(mClient1->pop(1000000), EventTracker::NoEvent);
+
+    // Now cancel session 0. Session 1 should start immediately and finish.
+    EXPECT_TRUE(mClient1->cancel(0));
+    EXPECT_TRUE(mClient1->getSession<fail>(0, "", ""));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestPauseResumeSingleClient) {
+    registerMultipleClients();
+
+    const char* srcPath0 = kLongSrcPath;
+    const char* srcPath1 = kShortSrcPath;
+    const char* dstPath0 = prepareOutputFile(OUTPATH(TestPauseResumeSingleClient_Session0));
+    const char* dstPath1 = prepareOutputFile(OUTPATH(TestPauseResumeSingleClient_Session1));
+
+    // Submit one offline session, should start immediately.
+    EXPECT_TRUE(mClient1->submit(0, srcPath0, dstPath0, TranscodingSessionPriority::kUnspecified,
+                                 kBitRate));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    // Test get session after starts.
+    EXPECT_TRUE(mClient1->getSession(0, srcPath0, dstPath0));
+
+    // Submit one realtime session.
+    EXPECT_TRUE(
+            mClient1->submit(1, srcPath1, dstPath1, TranscodingSessionPriority::kNormal, kBitRate));
+
+    // Offline session should pause.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
+    EXPECT_TRUE(mClient1->getSession(0, srcPath0, dstPath0));
+
+    // Realtime session should start immediately, and run to finish.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+
+    // Test get session after finish fails.
+    EXPECT_TRUE(mClient1->getSession<fail>(1, "", ""));
+
+    // Then offline session should resume.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
+    // Test get session after resume.
+    EXPECT_TRUE(mClient1->getSession(0, srcPath0, dstPath0));
+
+    // Offline session should finish.
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    // Test get session after finish fails.
+    EXPECT_TRUE(mClient1->getSession<fail>(0, "", ""));
+
+    unregisterMultipleClients();
+}
+
+/*
+ * Basic test for pause/resume with two clients, with one session each.
+ * Top app's session should preempt the other app's session.
+ */
+TEST_F(MediaTranscodingServiceRealTest, TestPauseResumeMultiClients) {
+    ALOGD("TestPauseResumeMultiClients starting...");
+
+    dismissKeyguard();
+    stopAppPackages();
+
+    registerMultipleClients();
+
+    const char* srcPath0 = kLongSrcPath;
+    const char* srcPath1 = kShortSrcPath;
+    const char* dstPath0 = prepareOutputFile(OUTPATH(TestPauseResumeMultiClients_Client0));
+    const char* dstPath1 = prepareOutputFile(OUTPATH(TestPauseResumeMultiClients_Client1));
+
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+    // Submit session to Client1.
+    ALOGD("Submitting session to client1 (app A) ...");
+    EXPECT_TRUE(
+            mClient1->submit(0, srcPath0, dstPath0, TranscodingSessionPriority::kNormal, kBitRate));
+
+    // Client1's session should start immediately.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    ALOGD("Moving app B to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
+
+    // Client1's session should continue to run, since Client2 (app B) doesn't have any session.
+    EXPECT_EQ(mClient1->pop(1000000), EventTracker::NoEvent);
+
+    // Submit session to Client2.
+    ALOGD("Submitting session to client2 (app B) ...");
+    EXPECT_TRUE(
+            mClient2->submit(0, srcPath1, dstPath1, TranscodingSessionPriority::kNormal, kBitRate));
+
+    // Client1's session should pause, client2's session should start.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 0));
+
+    // Client2's session should finish, then Client1's session should resume.
+    EXPECT_EQ(mClient2->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(2), 0));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
+
+    // Client1's session should finish.
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+    unregisterMultipleClients();
+
+    stopAppPackages();
+
+    ALOGD("TestPauseResumeMultiClients finished.");
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestUidGoneForeground) {
+    ALOGD("TestUidGoneForeground starting...");
+
+    dismissKeyguard();
+    stopAppPackages();
+
+    registerMultipleClients();
+
+    const char* dstPath0 = prepareOutputFile(OUTPATH(TestUidGoneForegroundSession0));
+    const char* dstPath1 = prepareOutputFile(OUTPATH(TestUidGoneForegroundSession1));
+
+    // Test kill foreground app, using only 1 uid.
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+    // Submit sessions to Client1 (app A).
+    ALOGD("Submitting sessions to client1 (app A) ...");
+    EXPECT_TRUE(mClient1->submit(0, kLongSrcPath, dstPath0, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_TRUE(mClient1->submit(1, kLongSrcPath, dstPath1, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::NoEvent);
+
+    // Kill app A, expect to see A's session pause followed by B's session start,
+    // then A's session cancelled with error code kUidGoneCancelled.
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Failed(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->getLastError(), TranscodingErrorCode::kUidGoneCancelled);
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Failed(CLIENT(1), 1));
+    EXPECT_EQ(mClient1->getLastError(), TranscodingErrorCode::kUidGoneCancelled);
+
+    unregisterMultipleClients();
+
+    stopAppPackages();
+
+    ALOGD("TestUidGoneForeground finished.");
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestUidGoneForegroundMultiUids) {
+    ALOGD("TestUidGoneForegroundMultiUids starting...");
+
+    dismissKeyguard();
+    stopAppPackages();
+
+    registerMultipleClients();
+
+    const char* dstPath0 = prepareOutputFile(OUTPATH(TestUidGoneForegroundSession0));
+    const char* dstPath1 = prepareOutputFile(OUTPATH(TestUidGoneForegroundSession1));
+
+    // Test kill foreground app, using two uids.
+    ALOGD("Moving app B to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
+    EXPECT_TRUE(mClient2->submit(0, kLongSrcPath, dstPath0, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 0));
+    EXPECT_TRUE(mClient2->submit(1, kLongSrcPath, dstPath1, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::NoEvent);
+    // Make app A also requesting session 1.
+    EXPECT_TRUE(mClient2->addClientUid(1, mClient1->mClientUid));
+
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Pause(CLIENT(2), 0));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 1));
+
+    // Kill app A, CLIENT(2)'s session 1 should continue because it's also requested by app B.
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::NoEvent);
+
+    // Kill app B, sessions should be cancelled.
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Failed(CLIENT(2), 0));
+    EXPECT_EQ(mClient2->getLastError(), TranscodingErrorCode::kUidGoneCancelled);
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Failed(CLIENT(2), 1));
+    EXPECT_EQ(mClient2->getLastError(), TranscodingErrorCode::kUidGoneCancelled);
+
+    unregisterMultipleClients();
+
+    stopAppPackages();
+
+    ALOGD("TestUidGoneForegroundMultiUids finished.");
+}
+TEST_F(MediaTranscodingServiceRealTest, TestUidGoneBackground) {
+    ALOGD("TestUidGoneBackground starting...");
+
+    dismissKeyguard();
+    stopAppPackages();
+
+    registerMultipleClients();
+
+    const char* dstPath0 = prepareOutputFile(OUTPATH(TestUidGoneForegroundSession0));
+    const char* dstPath1 = prepareOutputFile(OUTPATH(TestUidGoneForegroundSession1));
+
+    // Test kill background app, using two uids.
+    ALOGD("Moving app B to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
+    EXPECT_TRUE(mClient2->submit(0, kLongSrcPath, dstPath0, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 0));
+    EXPECT_TRUE(mClient2->submit(1, kLongSrcPath, dstPath1, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::NoEvent);
+
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+    EXPECT_TRUE(mClient1->submit(0, kLongSrcPath, dstPath0, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Pause(CLIENT(2), 0));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    // Kill app B, all its sessions should be cancelled.
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Failed(CLIENT(2), 0));
+    EXPECT_EQ(mClient2->getLastError(), TranscodingErrorCode::kUidGoneCancelled);
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Failed(CLIENT(2), 1));
+    EXPECT_EQ(mClient2->getLastError(), TranscodingErrorCode::kUidGoneCancelled);
+
+    unregisterMultipleClients();
+
+    stopAppPackages();
+
+    ALOGD("TestUidGoneBackground finished.");
+}
+
+TEST_F(MediaTranscodingServiceRealTest, TestUidGoneBackgroundMultiUids) {
+    ALOGD("TestUidGoneBackgroundMultiUids starting...");
+
+    dismissKeyguard();
+    stopAppPackages();
+
+    registerMultipleClients();
+
+    const char* dstPath0 = prepareOutputFile(OUTPATH(TestUidGoneForegroundSession0));
+    const char* dstPath1 = prepareOutputFile(OUTPATH(TestUidGoneForegroundSession1));
+
+    // Test kill background app, using two uids.
+    ALOGD("Moving app B to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
+    EXPECT_TRUE(mClient2->submit(0, kLongSrcPath, dstPath0, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 0));
+    EXPECT_TRUE(mClient2->submit(1, kLongSrcPath, dstPath1, TranscodingSessionPriority::kNormal,
+                                 kBitRate));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::NoEvent);
+    // Make app A also requesting session 1.
+    EXPECT_TRUE(mClient2->addClientUid(1, mClient1->mClientUid));
+
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Pause(CLIENT(2), 0));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 1));
+
+    // Kill app B, CLIENT(2)'s session 1 should continue to run, session 0 on
+    // the other hand should be cancelled.
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Failed(CLIENT(2), 0));
+    EXPECT_EQ(mClient2->getLastError(), TranscodingErrorCode::kUidGoneCancelled);
+
+    unregisterMultipleClients();
+
+    stopAppPackages();
+
+    ALOGD("TestUidGoneBackgroundMultiUids finished.");
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_resource_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_resource_tests.cpp
new file mode 100644
index 0000000..790e80b
--- /dev/null
+++ b/services/mediatranscoding/tests/mediatranscodingservice_resource_tests.cpp
@@ -0,0 +1,204 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscodingService.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscodingServiceResourceTest"
+
+#include <aidl/android/media/BnResourceManagerClient.h>
+#include <aidl/android/media/IResourceManagerService.h>
+#include <binder/ActivityManager.h>
+
+#include "MediaTranscodingServiceTestHelper.h"
+
+/*
+ * Tests media transcoding service with real transcoder.
+ *
+ * Uses the same test assets as the MediaTranscoder unit tests. Before running the test,
+ * please make sure to push the test assets to /sdcard:
+ *
+ * adb push $TOP/frameworks/av/media/libmediatranscoding/transcoder/tests/assets /data/local/tmp/TranscodingTestAssets
+ */
+namespace android {
+
+namespace media {
+
+constexpr int64_t kPaddingUs = 400000;
+constexpr int32_t kBitRate = 8 * 1000 * 1000;  // 8Mbs
+
+constexpr const char* kLongSrcPath = "/data/local/tmp/TranscodingTestAssets/longtest_15s.mp4";
+
+constexpr const char* kResourcePolicyTestActivity =
+        "/com.android.tests.transcoding.ResourcePolicyTestActivity";
+
+#define OUTPATH(name) "/data/local/tmp/MediaTranscodingService_" #name ".MP4"
+
+/*
+ * The OOM score we're going to ask ResourceManager to use for our native transcoding
+ * service. ResourceManager issues reclaims based on these scores. It gets the scores
+ * from ActivityManagerService, which doesn't track native services. The values of the
+ * OOM scores are defined in:
+ * frameworks/base/services/core/java/com/android/server/am/ProcessList.java
+ * We use SERVICE_ADJ which is lower priority than an app possibly visible to the
+ * user, but higher priority than a cached app (which could be killed without disruption
+ * to the user).
+ */
+constexpr static int32_t SERVICE_ADJ = 500;
+
+using Status = ::ndk::ScopedAStatus;
+using aidl::android::media::BnResourceManagerClient;
+using aidl::android::media::IResourceManagerService;
+
+/*
+ * Placeholder ResourceManagerClient for registering process info override
+ * with the IResourceManagerService. This is only used as a token by the service
+ * to get notifications about binder death, not used for reclaiming resources.
+ */
+struct ResourceManagerClient : public BnResourceManagerClient {
+    explicit ResourceManagerClient() = default;
+
+    Status reclaimResource(bool* _aidl_return) override {
+        *_aidl_return = false;
+        return Status::ok();
+    }
+
+    Status getName(::std::string* _aidl_return) override {
+        _aidl_return->clear();
+        return Status::ok();
+    }
+
+    virtual ~ResourceManagerClient() = default;
+};
+
+static std::shared_ptr<ResourceManagerClient> gResourceManagerClient =
+        ::ndk::SharedRefBase::make<ResourceManagerClient>();
+
+void TranscodingHelper_setProcessInfoOverride(int32_t procState, int32_t oomScore) {
+    ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
+    std::shared_ptr<IResourceManagerService> service = IResourceManagerService::fromBinder(binder);
+    if (service == nullptr) {
+        ALOGE("Failed to get IResourceManagerService");
+        return;
+    }
+    Status status =
+            service->overrideProcessInfo(gResourceManagerClient, getpid(), procState, oomScore);
+    if (!status.isOk()) {
+        ALOGW("Failed to setProcessInfoOverride.");
+    }
+}
+
+class MediaTranscodingServiceResourceTest : public MediaTranscodingServiceTestBase {
+public:
+    MediaTranscodingServiceResourceTest() { ALOGI("MediaTranscodingServiceResourceTest created"); }
+
+    virtual ~MediaTranscodingServiceResourceTest() {
+        ALOGI("MediaTranscodingServiceResourceTest destroyed");
+    }
+};
+
+/**
+ * Basic testing for handling resource lost.
+ *
+ * This test starts a transcoding session (that's somewhat long and takes several seconds),
+ * then launches an activity that allocates video codec instances until it hits insufficient
+ * resource error. Because the activity is running in foreground,
+ * ResourceManager would reclaim codecs from transcoding service which should
+ * cause the session to be paused. The activity will hold the codecs for a few seconds
+ * before releasing them, and the transcoding service should be able to resume
+ * and complete the session.
+ *
+ * Note that this test must run as root. We need to simulate submitting a request for a
+ * client {uid,pid} running at lower priority. As a cmd line test, it's not easy to get the
+ * pid of a living app, so we use our own {uid,pid} to submit. However, since we're a native
+ * process, RM doesn't have our proc info and the reclaim will fail. So we need to use
+ * RM's setProcessInfoOverride to override our proc info, which requires permission (unless root).
+ */
+TEST_F(MediaTranscodingServiceResourceTest, TestResourceLost) {
+    ALOGD("TestResourceLost starting..., pid %d", ::getpid());
+
+    // We're going to submit the request using our own {uid,pid}. Since we're a native
+    // process, RM doesn't have our proc info and the reclaim will fail. So we need to use
+    // RM's setProcessInfoOverride to override our proc info.
+    TranscodingHelper_setProcessInfoOverride(ActivityManager::PROCESS_STATE_SERVICE, SERVICE_ADJ);
+
+    EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
+    EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+
+    registerMultipleClients();
+
+    const char* srcPath0 = kLongSrcPath;
+    const char* dstPath0 = OUTPATH(TestPauseResumeMultiClients_Client0);
+    deleteFile(dstPath0);
+
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+    // Submit session to Client1.
+    ALOGD("Submitting session to client1 (app A) ...");
+    EXPECT_TRUE(mClient1->submit(0, srcPath0, dstPath0, TranscodingSessionPriority::kNormal,
+                                 kBitRate, ::getpid(), ::getuid()));
+
+    // Client1's session should start immediately.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    // Launch ResourcePolicyTestActivity, which will try to allocate up to 32
+    // instances, which should trigger insufficient resources on most devices.
+    // (Note that it's possible that the device supports a very high number of
+    // resource instances, in which case we'll simply require that the session completes.)
+    ALOGD("Launch ResourcePolicyTestActivity...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kResourcePolicyTestActivity));
+
+    // The basic requirement is that the session should complete. Wait for finish
+    // event to come and pop up all events received.
+    std::list<EventTracker::Event> events;
+    EXPECT_TRUE(mClient1->waitForSpecificEventAndPop(EventTracker::Finished(CLIENT(1), 0), &events,
+                                                     15000000));
+
+    // If there is only 1 event, it must be finish (otherwise waitForSpecificEventAndPop
+    // woudldn't pop up anything), and we're ok.
+    //
+    // TODO: If there is only 1 event (finish), and no pause/resume happened, we need
+    // to verify that the ResourcePolicyTestActivity actually was able to allocate
+    // all 32 instances without hitting insufficient resources. Otherwise, it could
+    // be that ResourceManager was not able to reclaim codecs from the transcoding
+    // service at all, which means the resource management is broken.
+    if (events.size() > 1) {
+        EXPECT_TRUE(events.size() >= 3);
+        size_t i = 0;
+        for (auto& event : events) {
+            if (i == 0) {
+                EXPECT_EQ(event, EventTracker::Pause(CLIENT(1), 0));
+            } else if (i == events.size() - 2) {
+                EXPECT_EQ(event, EventTracker::Resume(CLIENT(1), 0));
+            } else if (i == events.size() - 1) {
+                EXPECT_EQ(event, EventTracker::Finished(CLIENT(1), 0));
+            } else {
+                EXPECT_TRUE(event == EventTracker::Pause(CLIENT(1), 0) ||
+                            event == EventTracker::Resume(CLIENT(1), 0));
+            }
+            i++;
+        }
+    }
+
+    unregisterMultipleClients();
+
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
new file mode 100644
index 0000000..cb354f4
--- /dev/null
+++ b/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
@@ -0,0 +1,568 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for MediaTranscodingService.
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscodingServiceSimulatedTest"
+
+#include <aidl/android/media/BnTranscodingClientCallback.h>
+#include <aidl/android/media/IMediaTranscodingService.h>
+#include <aidl/android/media/ITranscodingClient.h>
+#include <aidl/android/media/ITranscodingClientCallback.h>
+#include <aidl/android/media/TranscodingRequestParcel.h>
+#include <aidl/android/media/TranscodingSessionParcel.h>
+#include <aidl/android/media/TranscodingSessionPriority.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <binder/PermissionController.h>
+#include <cutils/multiuser.h>
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+#include <iostream>
+#include <list>
+#include <unordered_set>
+
+#include "MediaTranscodingServiceTestHelper.h"
+#include "SimulatedTranscoder.h"
+
+namespace android {
+
+namespace media {
+
+// Note that -1 is valid and means using calling pid/uid for the service. But only privilege caller
+// could use them. This test is not a privilege caller.
+constexpr int32_t kInvalidClientPid = -5;
+constexpr int32_t kInvalidClientUid = -10;
+constexpr const char* kInvalidClientName = "";
+constexpr const char* kInvalidClientOpPackageName = "";
+
+constexpr int64_t kPaddingUs = 1000000;
+constexpr int64_t kSessionWithPaddingUs = SimulatedTranscoder::kSessionDurationUs + kPaddingUs;
+constexpr int64_t kWatchdogTimeoutUs = 3000000;
+// Pacer settings used for simulated tests. Listed here for reference.
+constexpr int32_t kSimulatedPacerBurstThresholdMs = 500;
+//constexpr int32_t kSimulatedPacerBurstCountQuota = 10;
+//constexpr int32_t kSimulatedPacerBurstTimeQuotaSec = 3;
+
+constexpr const char* kClientOpPackageName = "TestClientPackage";
+
+class MediaTranscodingServiceSimulatedTest : public MediaTranscodingServiceTestBase {
+public:
+    MediaTranscodingServiceSimulatedTest() { ALOGI("MediaTranscodingServiceResourceTest created"); }
+
+    virtual ~MediaTranscodingServiceSimulatedTest() {
+        ALOGI("MediaTranscodingServiceResourceTest destroyed");
+    }
+
+    void testPacerHelper(int numSubmits, int sessionDurationMs, int expectedSuccess) {
+        // Idle to clear out burst history.
+        usleep(kSimulatedPacerBurstThresholdMs * 2 * 1000);
+        for (int i = 0; i < numSubmits; i++) {
+            EXPECT_TRUE(mClient3->submit(i, "test_source_file_0", "test_destination_file_0",
+                                         TranscodingSessionPriority::kNormal, -1 /*bitrateBps*/,
+                                         -1 /*overridePid*/, -1 /*overrideUid*/,
+                                         sessionDurationMs));
+        }
+        for (int i = 0; i < expectedSuccess; i++) {
+            EXPECT_EQ(mClient3->pop(kPaddingUs), EventTracker::Start(CLIENT(3), i));
+            EXPECT_EQ(mClient3->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(3), i));
+        }
+        for (int i = expectedSuccess; i < numSubmits; i++) {
+            EXPECT_EQ(mClient3->pop(kPaddingUs), EventTracker::Failed(CLIENT(3), i));
+            EXPECT_EQ(mClient3->getLastError(), TranscodingErrorCode::kDroppedByService);
+        }
+    }
+};
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterNullClient) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    // Register the client with null callback.
+    Status status = mService->registerClient(nullptr, kClientName, kClientOpPackageName, &client);
+    EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterClientWithInvalidClientName) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    // Register the client with the service.
+    Status status = mService->registerClient(mClient1, kInvalidClientName,
+                                             kInvalidClientOpPackageName, &client);
+    EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterClientWithInvalidClientPackageName) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    // Register the client with the service.
+    Status status =
+            mService->registerClient(mClient1, kClientName, kInvalidClientOpPackageName, &client);
+    EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterOneClient) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    Status status = mService->registerClient(mClient1, kClientName, kClientOpPackageName, &client);
+    EXPECT_TRUE(status.isOk());
+
+    // Validate the client.
+    EXPECT_TRUE(client != nullptr);
+
+    // Check the number of Clients.
+    int32_t numOfClients;
+    status = mService->getNumOfClients(&numOfClients);
+    EXPECT_TRUE(status.isOk());
+    EXPECT_GE(numOfClients, 1);
+
+    // Unregister the client.
+    status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterClientTwice) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    Status status = mService->registerClient(mClient1, kClientName, kClientOpPackageName, &client);
+    EXPECT_TRUE(status.isOk());
+
+    // Validate the client.
+    EXPECT_TRUE(client != nullptr);
+
+    // Register the client again and expects failure.
+    std::shared_ptr<ITranscodingClient> client1;
+    status = mService->registerClient(mClient1, kClientName, kClientOpPackageName, &client1);
+    EXPECT_FALSE(status.isOk());
+
+    // Unregister the client.
+    status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterMultipleClients) {
+    registerMultipleClients();
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestSessionIdIndependence) {
+    registerMultipleClients();
+
+    // Submit 2 requests on client1 first.
+    EXPECT_TRUE(mClient1->submit(0, "test_source_file", "test_destination_file"));
+    EXPECT_TRUE(mClient1->submit(1, "test_source_file", "test_destination_file"));
+
+    // Submit 2 requests on client2, sessionId should be independent for each client.
+    EXPECT_TRUE(mClient2->submit(0, "test_source_file", "test_destination_file"));
+    EXPECT_TRUE(mClient2->submit(1, "test_source_file", "test_destination_file"));
+
+    // Cancel all sessions.
+    EXPECT_TRUE(mClient1->cancel(0));
+    EXPECT_TRUE(mClient1->cancel(1));
+    EXPECT_TRUE(mClient2->cancel(0));
+    EXPECT_TRUE(mClient2->cancel(1));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestSubmitCancelSessions) {
+    registerMultipleClients();
+
+    // Test sessionId assignment.
+    EXPECT_TRUE(mClient1->submit(0, "test_source_file_0", "test_destination_file"));
+    EXPECT_TRUE(mClient1->submit(1, "test_source_file_1", "test_destination_file"));
+    EXPECT_TRUE(mClient1->submit(2, "test_source_file_2", "test_destination_file"));
+
+    // Test submit bad request (no valid sourceFilePath) fails.
+    EXPECT_TRUE(mClient1->submit<fail>(0, "", ""));
+
+    // Test submit bad request (no valid sourceFilePath) fails.
+    EXPECT_TRUE(mClient1->submit<fail>(0, "src", "dst", TranscodingSessionPriority::kNormal,
+                                       1000000, kInvalidClientPid, kInvalidClientUid));
+
+    // Test cancel non-existent session fails.
+    EXPECT_TRUE(mClient1->cancel<fail>(100));
+
+    // Session 0 should start immediately and finish in 2 seconds, followed by Session 1 start.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+    // Test cancel valid sessionId in random order.
+    // Test cancel finished session fails.
+    EXPECT_TRUE(mClient1->cancel(2));
+    EXPECT_TRUE(mClient1->cancel<fail>(0));
+    EXPECT_TRUE(mClient1->cancel(1));
+
+    // Test cancel session again fails.
+    EXPECT_TRUE(mClient1->cancel<fail>(1));
+
+    // Test no more events arriving after cancel.
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::NoEvent);
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestGetSessions) {
+    registerMultipleClients();
+
+    // Submit 3 requests.
+    EXPECT_TRUE(mClient1->submit(0, "test_source_file_0", "test_destination_file_0"));
+    EXPECT_TRUE(mClient1->submit(1, "test_source_file_1", "test_destination_file_1"));
+    EXPECT_TRUE(mClient1->submit(2, "test_source_file_2", "test_destination_file_2"));
+
+    // Test get sessions by id.
+    EXPECT_TRUE(mClient1->getSession(2, "test_source_file_2", "test_destination_file_2"));
+    EXPECT_TRUE(mClient1->getSession(1, "test_source_file_1", "test_destination_file_1"));
+    EXPECT_TRUE(mClient1->getSession(0, "test_source_file_0", "test_destination_file_0"));
+
+    // Test get session by invalid id fails.
+    EXPECT_TRUE(mClient1->getSession<fail>(100, "", ""));
+    EXPECT_TRUE(mClient1->getSession<fail>(-1, "", ""));
+
+    // Test get session after cancel fails.
+    EXPECT_TRUE(mClient1->cancel(2));
+    EXPECT_TRUE(mClient1->getSession<fail>(2, "", ""));
+
+    // Session 0 should start immediately and finish in 2 seconds, followed by Session 1 start.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+    // Test get session after finish fails.
+    EXPECT_TRUE(mClient1->getSession<fail>(0, "", ""));
+
+    // Test get the remaining session 1.
+    EXPECT_TRUE(mClient1->getSession(1, "test_source_file_1", "test_destination_file_1"));
+
+    // Cancel remaining session 1.
+    EXPECT_TRUE(mClient1->cancel(1));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestAddGetClientUids) {
+    registerMultipleClients();
+
+    std::vector<int32_t> clientUids;
+    TranscodingRequestParcel request;
+    TranscodingSessionParcel session;
+    uid_t ownUid = ::getuid();
+
+    // Submit one real-time session.
+    EXPECT_TRUE(mClient1->submit(0, "test_source_file_0", "test_destination_file"));
+
+    // Should have mClientUid in client uid list.
+    EXPECT_TRUE(mClient1->getClientUids(0, &clientUids));
+    EXPECT_EQ(clientUids.size(), 1u);
+    EXPECT_EQ(clientUids[0], (int32_t)mClient1->mClientUid);
+
+    // Adding invalid client uid should fail.
+    EXPECT_TRUE(mClient1->addClientUid<fail>(0, kInvalidClientUid));
+
+    // Adding mClientUid again should fail.
+    EXPECT_TRUE(mClient1->addClientUid<fail>(0, mClient1->mClientUid));
+
+    // Submit one offline session.
+    EXPECT_TRUE(mClient1->submit(1, "test_source_file_1", "test_destination_file_1",
+                                 TranscodingSessionPriority::kUnspecified));
+
+    // Should not have any uids in client uid list.
+    EXPECT_TRUE(mClient1->getClientUids(1, &clientUids));
+    EXPECT_EQ(clientUids.size(), 0u);
+
+    // Add own uid (with IMediaTranscodingService::USE_CALLING_UID), should succeed.
+    EXPECT_TRUE(mClient1->addClientUid(1, IMediaTranscodingService::USE_CALLING_UID));
+    EXPECT_TRUE(mClient1->getClientUids(1, &clientUids));
+    EXPECT_EQ(clientUids.size(), 1u);
+    EXPECT_EQ(clientUids[0], (int32_t)ownUid);
+
+    // Adding mClientUid should succeed.
+    EXPECT_TRUE(mClient1->addClientUid(1, mClient1->mClientUid));
+    EXPECT_TRUE(mClient1->getClientUids(1, &clientUids));
+    std::unordered_set<uid_t> uidSet;
+    uidSet.insert(clientUids.begin(), clientUids.end());
+    EXPECT_EQ(uidSet.size(), 2u);
+    EXPECT_EQ(uidSet.count(ownUid), 1u);
+    EXPECT_EQ(uidSet.count(mClient1->mClientUid), 1u);
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestSubmitCancelWithOfflineSessions) {
+    registerMultipleClients();
+
+    // Submit some offline sessions first.
+    EXPECT_TRUE(mClient1->submit(0, "test_source_file_0", "test_destination_file_0",
+                                 TranscodingSessionPriority::kUnspecified));
+    EXPECT_TRUE(mClient1->submit(1, "test_source_file_1", "test_destination_file_1",
+                                 TranscodingSessionPriority::kUnspecified));
+
+    // Session 0 should start immediately.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    // Submit more real-time sessions.
+    EXPECT_TRUE(mClient1->submit(2, "test_source_file_2", "test_destination_file_2"));
+    EXPECT_TRUE(mClient1->submit(3, "test_source_file_3", "test_destination_file_3"));
+
+    // Session 0 should pause immediately and session 2 should start.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 2));
+
+    // Session 2 should finish in 2 seconds and session 3 should start.
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 2));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 3));
+
+    // Cancel session 3 now
+    EXPECT_TRUE(mClient1->cancel(3));
+
+    // Session 0 should resume and finish in 2 seconds, followed by session 1 start.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+    // Cancel remaining session 1.
+    EXPECT_TRUE(mClient1->cancel(1));
+
+    unregisterMultipleClients();
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestClientUseAfterUnregister) {
+    std::shared_ptr<ITranscodingClient> client;
+
+    // Register a client, then unregister.
+    Status status = mService->registerClient(mClient1, kClientName, kClientOpPackageName, &client);
+    EXPECT_TRUE(status.isOk());
+
+    status = client->unregister();
+    EXPECT_TRUE(status.isOk());
+
+    // Test various operations on the client, should fail with ERROR_DISCONNECTED.
+    TranscodingSessionParcel session;
+    bool result;
+    status = client->getSessionWithId(0, &session, &result);
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    status = client->cancelSession(0, &result);
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+
+    TranscodingRequestParcel request;
+    status = client->submitRequest(request, &session, &result);
+    EXPECT_EQ(status.getServiceSpecificError(), IMediaTranscodingService::ERROR_DISCONNECTED);
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingUidPolicy) {
+    ALOGD("TestTranscodingUidPolicy starting...");
+
+    EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
+    EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+    registerMultipleClients();
+
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+    // Submit 3 requests.
+    ALOGD("Submitting session to client1 (app A) ...");
+    EXPECT_TRUE(mClient1->submit(0, "test_source_file_0", "test_destination_file_0"));
+    EXPECT_TRUE(mClient1->submit(1, "test_source_file_1", "test_destination_file_1"));
+    EXPECT_TRUE(mClient1->submit(2, "test_source_file_2", "test_destination_file_2"));
+
+    // Session 0 should start immediately.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    ALOGD("Moving app B to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
+
+    // Session 0 should continue and finish in 2 seconds, then session 1 should start.
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+    ALOGD("Submitting session to client2 (app B) ...");
+    EXPECT_TRUE(mClient2->submit(0, "test_source_file_0", "test_destination_file_0"));
+
+    // Client1's session should pause, client2's session should start.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 1));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Start(CLIENT(2), 0));
+
+    ALOGD("Moving app A back to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+    // Client2's session should pause, client1's session 1 should resume.
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Pause(CLIENT(2), 0));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 1));
+
+    // Client2's session 1 should finish in 2 seconds, then its session 2 should start.
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 2));
+
+    // After client2's sessions finish, client1's session should resume.
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 2));
+    EXPECT_EQ(mClient2->pop(kPaddingUs), EventTracker::Resume(CLIENT(2), 0));
+
+    unregisterMultipleClients();
+
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+    ALOGD("TestTranscodingUidPolicy finished.");
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingUidPolicyWithMultipleClientUids) {
+    ALOGD("TestTranscodingUidPolicyWithMultipleClientUids starting...");
+
+    EXPECT_TRUE(ShellHelper::RunCmd("input keyevent KEYCODE_WAKEUP"));
+    EXPECT_TRUE(ShellHelper::RunCmd("wm dismiss-keyguard"));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+    registerMultipleClients();
+
+    ALOGD("Moving app A to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+
+    // Submit 3 requests.
+    ALOGD("Submitting session to client1 (app A)...");
+    EXPECT_TRUE(mClient1->submit(0, "test_source_file_0", "test_destination_file_0"));
+    EXPECT_TRUE(mClient1->submit(1, "test_source_file_1", "test_destination_file_1"));
+    EXPECT_TRUE(mClient1->submit(2, "test_source_file_2", "test_destination_file_2"));
+
+    // mClient1's Session 0 should start immediately.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    // Add client2 (app B)'s uid to mClient1's session 1.
+    EXPECT_TRUE(mClient1->addClientUid(1, mClient2->mClientUid));
+
+    ALOGD("Moving app B to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageB, kTestActivityName));
+
+    // mClient1's session 0 should pause, session 1 should start.
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+
+    ALOGD("Moving app A back to top...");
+    EXPECT_TRUE(ShellHelper::Start(kClientPackageA, kTestActivityName));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
+
+    unregisterMultipleClients();
+
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageA));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageB));
+    EXPECT_TRUE(ShellHelper::Stop(kClientPackageC));
+
+    ALOGD("TestTranscodingUidPolicyWithMultipleClientUids finished.");
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingThermalPolicy) {
+    ALOGD("TestTranscodingThermalPolicy starting...");
+
+    registerMultipleClients();
+
+    // Submit request, should start immediately.
+    EXPECT_TRUE(mClient1->submit(0, "test_source_file_0", "test_destination_file_0"));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 0));
+
+    // Now, simulate thermal status change by adb cmd. The status code is as defined in
+    // frameworks/native/include/android/thermal.h.
+    // ATHERMAL_STATUS_SEVERE(3): should start throttling.
+    EXPECT_TRUE(ShellHelper::RunCmd("cmd thermalservice override-status 3"));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Pause(CLIENT(1), 0));
+
+    // ATHERMAL_STATUS_CRITICAL(4): shouldn't start throttling again (already started).
+    EXPECT_TRUE(ShellHelper::RunCmd("cmd thermalservice override-status 4"));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::NoEvent);
+
+    // ATHERMAL_STATUS_MODERATE(2): should stop throttling.
+    EXPECT_TRUE(ShellHelper::RunCmd("cmd thermalservice override-status 2"));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Resume(CLIENT(1), 0));
+
+    // ATHERMAL_STATUS_LIGHT(1): shouldn't stop throttling again (already stopped).
+    EXPECT_TRUE(ShellHelper::RunCmd("cmd thermalservice override-status 1"));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 0));
+
+    unregisterMultipleClients();
+
+    ALOGD("TestTranscodingThermalPolicy finished.");
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingWatchdog) {
+    ALOGD("TestTranscodingWatchdog starting...");
+
+    registerMultipleClients();
+
+    // SimulatedTranscoder itself does not send heartbeat. Its sessions last 1sec
+    // by default, so timeout will not happen normally.
+    // Here we run a session of 4000ms with TranscodingTestConfig. This will trigger
+    // a watchdog timeout on server side. We use it to check that error code is correct.
+    EXPECT_TRUE(mClient1->submit(
+            0, "test_source_file_0", "test_destination_file_0", TranscodingSessionPriority::kNormal,
+            -1 /*bitrateBps*/, -1 /*overridePid*/, -1 /*overrideUid*/, 4000 /*sessionDurationMs*/));
+    EXPECT_EQ(mClient1->pop(100000), EventTracker::Start(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->pop(kWatchdogTimeoutUs - 100000), EventTracker::NoEvent);
+    EXPECT_EQ(mClient1->pop(200000), EventTracker::Failed(CLIENT(1), 0));
+    EXPECT_EQ(mClient1->getLastError(), TranscodingErrorCode::kWatchdogTimeout);
+
+    // After the timeout, submit another request and check it's finished.
+    EXPECT_TRUE(mClient1->submit(1, "test_source_file_1", "test_destination_file_1"));
+    EXPECT_EQ(mClient1->pop(kPaddingUs), EventTracker::Start(CLIENT(1), 1));
+    EXPECT_EQ(mClient1->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(1), 1));
+
+    unregisterMultipleClients();
+
+    ALOGD("TestTranscodingWatchdog finished.");
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingPacerOverCountQuotaOnly) {
+    ALOGD("TestTranscodingPacerOverCountQuotaOnly starting...");
+
+    registerMultipleClients();
+    testPacerHelper(12 /*numSubmits*/, 100 /*sessionDurationMs*/, 12 /*expectedSuccess*/);
+    unregisterMultipleClients();
+
+    ALOGD("TestTranscodingPacerOverCountQuotaOnly finished.");
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingPacerOverTimeQuotaOnly) {
+    ALOGD("TestTranscodingPacerOverTimeQuotaOnly starting...");
+
+    registerMultipleClients();
+    testPacerHelper(5 /*numSubmits*/, 1000 /*sessionDurationMs*/, 5 /*expectedSuccess*/);
+    unregisterMultipleClients();
+
+    ALOGD("TestTranscodingPacerOverTimeQuotaOnly finished.");
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingPacerOverQuota) {
+    ALOGD("TestTranscodingPacerOverQuota starting...");
+
+    registerMultipleClients();
+    testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/);
+    unregisterMultipleClients();
+
+    // Idle to clear out burst history. Since we expect it to actually fail, wait for cooldown.
+    ALOGD("TestTranscodingPacerOverQuota finished.");
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_tests.cpp
deleted file mode 100644
index 5a791fe..0000000
--- a/services/mediatranscoding/tests/mediatranscodingservice_tests.cpp
+++ /dev/null
@@ -1,239 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// Unit Test for MediaTranscoding Service.
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "MediaTranscodingServiceTest"
-
-#include <aidl/android/media/BnTranscodingServiceClient.h>
-#include <aidl/android/media/IMediaTranscodingService.h>
-#include <aidl/android/media/ITranscodingServiceClient.h>
-#include <android-base/logging.h>
-#include <android-base/unique_fd.h>
-#include <android/binder_ibinder_jni.h>
-#include <android/binder_manager.h>
-#include <android/binder_process.h>
-#include <cutils/ashmem.h>
-#include <gtest/gtest.h>
-#include <stdlib.h>
-#include <sys/mman.h>
-#include <utils/Log.h>
-
-namespace android {
-
-namespace media {
-
-using Status = ::ndk::ScopedAStatus;
-using aidl::android::media::BnTranscodingServiceClient;
-using aidl::android::media::IMediaTranscodingService;
-using aidl::android::media::ITranscodingServiceClient;
-
-constexpr int32_t kInvalidClientId = -5;
-
-// Note that -1 is valid and means using calling pid/uid for the service. But only privilege caller could
-// use them. This test is not a privilege caller.
-constexpr int32_t kInvalidClientPid = -5;
-constexpr int32_t kInvalidClientUid = -5;
-constexpr const char* kInvalidClientOpPackageName = "";
-
-constexpr int32_t kClientUseCallingPid = -1;
-constexpr int32_t kClientUseCallingUid = -1;
-constexpr const char* kClientOpPackageName = "TestClient";
-
-class MediaTranscodingServiceTest : public ::testing::Test {
-public:
-    MediaTranscodingServiceTest() { ALOGD("MediaTranscodingServiceTest created"); }
-
-    void SetUp() override {
-        ::ndk::SpAIBinder binder(AServiceManager_getService("media.transcoding"));
-        mService = IMediaTranscodingService::fromBinder(binder);
-        if (mService == nullptr) {
-            ALOGE("Failed to connect to the media.trascoding service.");
-            return;
-        }
-    }
-
-    ~MediaTranscodingServiceTest() { ALOGD("MediaTranscodingingServiceTest destroyed"); }
-
-    std::shared_ptr<IMediaTranscodingService> mService = nullptr;
-};
-
-struct TestClient : public BnTranscodingServiceClient {
-    TestClient(const std::shared_ptr<IMediaTranscodingService>& service) : mService(service) {
-        ALOGD("TestClient Created");
-    }
-
-    Status getName(std::string* _aidl_return) override {
-        *_aidl_return = "test_client";
-        return Status::ok();
-    }
-
-    Status onTranscodingFinished(
-            int32_t /* in_jobId */,
-            const ::aidl::android::media::TranscodingResultParcel& /* in_result */) override {
-        return Status::ok();
-    }
-
-    Status onTranscodingFailed(
-            int32_t /* in_jobId */,
-            ::aidl::android::media::TranscodingErrorCode /*in_errorCode */) override {
-        return Status::ok();
-    }
-
-    Status onAwaitNumberOfJobsChanged(int32_t /* in_jobId */, int32_t /* in_oldAwaitNumber */,
-                                      int32_t /* in_newAwaitNumber */) override {
-        return Status::ok();
-    }
-
-    Status onProgressUpdate(int32_t /* in_jobId */, int32_t /* in_progress */) override {
-        return Status::ok();
-    }
-
-    virtual ~TestClient() { ALOGI("TestClient destroyed"); };
-
-private:
-    std::shared_ptr<IMediaTranscodingService> mService;
-};
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterNullClient) {
-    std::shared_ptr<ITranscodingServiceClient> client = nullptr;
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kClientOpPackageName, kClientUseCallingUid,
-                                             kClientUseCallingPid, &clientId);
-    EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientWithInvalidClientPid) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client with the service.
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kClientOpPackageName, kClientUseCallingUid,
-                                             kInvalidClientPid, &clientId);
-    EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientWithInvalidClientUid) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client with the service.
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kClientOpPackageName, kInvalidClientUid,
-                                             kClientUseCallingPid, &clientId);
-    EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientWithInvalidClientPackageName) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client with the service.
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kInvalidClientOpPackageName,
-                                             kClientUseCallingUid, kClientUseCallingPid, &clientId);
-    EXPECT_FALSE(status.isOk());
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterOneClient) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client with the service.
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kClientOpPackageName, kClientUseCallingPid,
-                                             kClientUseCallingUid, &clientId);
-    ALOGD("client id is %d", clientId);
-    EXPECT_TRUE(status.isOk());
-
-    // Validate the clientId.
-    EXPECT_TRUE(clientId > 0);
-
-    // Check the number of Clients.
-    int32_t numOfClients;
-    status = mService->getNumOfClients(&numOfClients);
-    EXPECT_TRUE(status.isOk());
-    EXPECT_EQ(1, numOfClients);
-
-    // Unregister the client.
-    bool res;
-    status = mService->unregisterClient(clientId, &res);
-    EXPECT_TRUE(status.isOk());
-    EXPECT_TRUE(res);
-}
-
-TEST_F(MediaTranscodingServiceTest, TestUnRegisterClientWithInvalidClientId) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client with the service.
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kClientOpPackageName, kClientUseCallingUid,
-                                             kClientUseCallingPid, &clientId);
-    ALOGD("client id is %d", clientId);
-    EXPECT_TRUE(status.isOk());
-
-    // Validate the clientId.
-    EXPECT_TRUE(clientId > 0);
-
-    // Check the number of Clients.
-    int32_t numOfClients;
-    status = mService->getNumOfClients(&numOfClients);
-    EXPECT_TRUE(status.isOk());
-    EXPECT_EQ(1, numOfClients);
-
-    // Unregister the client with invalid ID
-    bool res;
-    mService->unregisterClient(kInvalidClientId, &res);
-    EXPECT_FALSE(res);
-
-    // Unregister the valid client.
-    mService->unregisterClient(clientId, &res);
-}
-
-TEST_F(MediaTranscodingServiceTest, TestRegisterClientTwice) {
-    std::shared_ptr<ITranscodingServiceClient> client =
-            ::ndk::SharedRefBase::make<TestClient>(mService);
-    EXPECT_TRUE(client != nullptr);
-
-    // Register the client with the service.
-    int32_t clientId = 0;
-    Status status = mService->registerClient(client, kClientOpPackageName, kClientUseCallingUid,
-                                             kClientUseCallingPid, &clientId);
-    EXPECT_TRUE(status.isOk());
-
-    // Validate the clientId.
-    EXPECT_TRUE(clientId > 0);
-
-    // Register the client again and expects failure.
-    status = mService->registerClient(client, kClientOpPackageName, kClientUseCallingUid,
-                                      kClientUseCallingPid, &clientId);
-    EXPECT_FALSE(status.isOk());
-
-    // Unregister the valid client.
-    bool res;
-    mService->unregisterClient(clientId, &res);
-}
-
-}  // namespace media
-}  // namespace android
diff --git a/services/minijail/Android.bp b/services/minijail/Android.bp
index b057968..3a89e12 100644
--- a/services/minijail/Android.bp
+++ b/services/minijail/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 minijail_common_cflags = [
     "-Wall",
     "-Werror",
diff --git a/services/oboeservice/AAudioClientTracker.cpp b/services/oboeservice/AAudioClientTracker.cpp
index 9d9ca63..054a896 100644
--- a/services/oboeservice/AAudioClientTracker.cpp
+++ b/services/oboeservice/AAudioClientTracker.cpp
@@ -41,7 +41,7 @@
         : Singleton<AAudioClientTracker>() {
 }
 
-std::string AAudioClientTracker::dump() const {
+std::string AAudioClientTracker::dump() const NO_THREAD_SAFETY_ANALYSIS {
     std::stringstream result;
     const bool isLocked = AAudio_tryUntilTrue(
             [this]()->bool { return mLock.try_lock(); } /* f */,
@@ -198,7 +198,7 @@
         for (const auto& serviceStream : streamsToClose) {
             aaudio_handle_t handle = serviceStream->getHandle();
             ALOGW("binderDied() close abandoned stream 0x%08X\n", handle);
-            aaudioService->closeStream(handle);
+            aaudioService->asAAudioServiceInterface().closeStream(handle);
         }
         // mStreams should be empty now
     }
@@ -207,7 +207,7 @@
 }
 
 
-std::string AAudioClientTracker::NotificationClient::dump() const {
+std::string AAudioClientTracker::NotificationClient::dump() const NO_THREAD_SAFETY_ANALYSIS {
     std::stringstream result;
     const bool isLocked = AAudio_tryUntilTrue(
             [this]()->bool { return mLock.try_lock(); } /* f */,
diff --git a/services/oboeservice/AAudioClientTracker.h b/services/oboeservice/AAudioClientTracker.h
index 943b809..2b38621 100644
--- a/services/oboeservice/AAudioClientTracker.h
+++ b/services/oboeservice/AAudioClientTracker.h
@@ -21,10 +21,11 @@
 #include <mutex>
 #include <set>
 
+#include <android-base/thread_annotations.h>
 #include <utils/Singleton.h>
 
 #include <aaudio/AAudio.h>
-#include "binding/IAAudioClient.h"
+#include <aaudio/IAAudioClient.h>
 #include "AAudioService.h"
 
 namespace aaudio {
@@ -46,7 +47,7 @@
      */
     std::string dump() const;
 
-    aaudio_result_t registerClient(pid_t pid, const android::sp<android::IAAudioClient>& client);
+    aaudio_result_t registerClient(pid_t pid, const android::sp<IAAudioClient>& client);
 
     void unregisterClient(pid_t pid);
 
@@ -114,10 +115,12 @@
     };
 
     // This must be called under mLock
-    android::sp<NotificationClient> getNotificationClient_l(pid_t pid);
+    android::sp<NotificationClient> getNotificationClient_l(pid_t pid)
+            REQUIRES(mLock);
 
     mutable std::mutex                               mLock;
-    std::map<pid_t, android::sp<NotificationClient>> mNotificationClients;
+    std::map<pid_t, android::sp<NotificationClient>> mNotificationClients
+            GUARDED_BY(mLock);
     android::AAudioService                          *mAAudioService = nullptr;
 };
 
diff --git a/services/oboeservice/AAudioEndpointManager.cpp b/services/oboeservice/AAudioEndpointManager.cpp
index 9f34153..20e4cc5 100644
--- a/services/oboeservice/AAudioEndpointManager.cpp
+++ b/services/oboeservice/AAudioEndpointManager.cpp
@@ -24,6 +24,7 @@
 #include <mutex>
 #include <sstream>
 #include <utility/AAudioUtilities.h>
+#include <media/AidlConversion.h>
 
 #include "AAudioClientTracker.h"
 #include "AAudioEndpointManager.h"
@@ -43,7 +44,7 @@
         , mExclusiveStreams() {
 }
 
-std::string AAudioEndpointManager::dump() const {
+std::string AAudioEndpointManager::dump() const NO_THREAD_SAFETY_ANALYSIS {
     std::stringstream result;
     int index = 0;
 
@@ -182,7 +183,9 @@
             // and START calls. This will help preserve app compatibility.
             // An app can avoid having this happen by closing their streams when
             // the app is paused.
-            AAudioClientTracker::getInstance().setExclusiveEnabled(request.getProcessId(), false);
+            pid_t pid = VALUE_OR_FATAL(
+                aidl2legacy_int32_t_pid_t(request.getAttributionSource().pid));
+            AAudioClientTracker::getInstance().setExclusiveEnabled(pid, false);
             endpointToSteal = endpoint; // return it to caller
         }
         return nullptr;
@@ -306,6 +309,7 @@
                 mSharedStreams.end());
 
         serviceEndpoint->close();
+
         mSharedCloseCount++;
         ALOGV("%s(%p) closed for device %d",
               __func__, serviceEndpoint.get(), serviceEndpoint->getDeviceId());
diff --git a/services/oboeservice/AAudioEndpointManager.h b/services/oboeservice/AAudioEndpointManager.h
index ae776b1..b07bcef 100644
--- a/services/oboeservice/AAudioEndpointManager.h
+++ b/services/oboeservice/AAudioEndpointManager.h
@@ -20,6 +20,8 @@
 #include <map>
 #include <mutex>
 #include <sys/types.h>
+
+#include <android-base/thread_annotations.h>
 #include <utils/Singleton.h>
 
 #include "binding/AAudioServiceMessage.h"
@@ -70,10 +72,12 @@
                                               const aaudio::AAudioStreamRequest &request);
 
     android::sp<AAudioServiceEndpoint> findExclusiveEndpoint_l(
-            const AAudioStreamConfiguration& configuration);
+            const AAudioStreamConfiguration& configuration)
+            REQUIRES(mExclusiveLock);
 
     android::sp<AAudioServiceEndpointShared> findSharedEndpoint_l(
-            const AAudioStreamConfiguration& configuration);
+            const AAudioStreamConfiguration& configuration)
+            REQUIRES(mSharedLock);
 
     void closeExclusiveEndpoint(android::sp<AAudioServiceEndpoint> serviceEndpoint);
     void closeSharedEndpoint(android::sp<AAudioServiceEndpoint> serviceEndpoint);
@@ -83,23 +87,25 @@
     // Lock mSharedLock before mExclusiveLock.
     // it is OK to only lock mExclusiveLock.
     mutable std::mutex                                     mSharedLock;
-    std::vector<android::sp<AAudioServiceEndpointShared>>  mSharedStreams;
+    std::vector<android::sp<AAudioServiceEndpointShared>>  mSharedStreams
+            GUARDED_BY(mSharedLock);
 
     mutable std::mutex                                     mExclusiveLock;
-    std::vector<android::sp<AAudioServiceEndpointMMAP>>    mExclusiveStreams;
+    std::vector<android::sp<AAudioServiceEndpointMMAP>>    mExclusiveStreams
+            GUARDED_BY(mExclusiveLock);
 
-    // Modified under a lock.
-    int32_t mExclusiveSearchCount = 0; // number of times we SEARCHED for an exclusive endpoint
-    int32_t mExclusiveFoundCount  = 0; // number of times we FOUND an exclusive endpoint
-    int32_t mExclusiveOpenCount   = 0; // number of times we OPENED an exclusive endpoint
-    int32_t mExclusiveCloseCount  = 0; // number of times we CLOSED an exclusive endpoint
-    int32_t mExclusiveStolenCount = 0; // number of times we STOLE an exclusive endpoint
+    // Counts related to an exclusive endpoint.
+    int32_t mExclusiveSearchCount GUARDED_BY(mExclusiveLock) = 0; // # SEARCHED
+    int32_t mExclusiveFoundCount  GUARDED_BY(mExclusiveLock) = 0; // # FOUND
+    int32_t mExclusiveOpenCount   GUARDED_BY(mExclusiveLock) = 0; // # OPENED
+    int32_t mExclusiveCloseCount  GUARDED_BY(mExclusiveLock) = 0; // # CLOSED
+    int32_t mExclusiveStolenCount GUARDED_BY(mExclusiveLock) = 0; // # STOLEN
 
     // Same as above but for SHARED endpoints.
-    int32_t mSharedSearchCount    = 0;
-    int32_t mSharedFoundCount     = 0;
-    int32_t mSharedOpenCount      = 0;
-    int32_t mSharedCloseCount     = 0;
+    int32_t mSharedSearchCount    GUARDED_BY(mSharedLock) = 0;
+    int32_t mSharedFoundCount     GUARDED_BY(mSharedLock) = 0;
+    int32_t mSharedOpenCount      GUARDED_BY(mSharedLock) = 0;
+    int32_t mSharedCloseCount     GUARDED_BY(mSharedLock) = 0;
 
     // For easily disabling the stealing of exclusive streams.
     static constexpr bool kStealingEnabled = true;
diff --git a/services/oboeservice/AAudioMixer.cpp b/services/oboeservice/AAudioMixer.cpp
index 1c03b7f..ad4b830 100644
--- a/services/oboeservice/AAudioMixer.cpp
+++ b/services/oboeservice/AAudioMixer.cpp
@@ -33,25 +33,21 @@
 using android::FifoBuffer;
 using android::fifo_frames_t;
 
-AAudioMixer::~AAudioMixer() {
-    delete[] mOutputBuffer;
-}
-
 void AAudioMixer::allocate(int32_t samplesPerFrame, int32_t framesPerBurst) {
     mSamplesPerFrame = samplesPerFrame;
     mFramesPerBurst = framesPerBurst;
     int32_t samplesPerBuffer = samplesPerFrame * framesPerBurst;
-    mOutputBuffer = new float[samplesPerBuffer];
+    mOutputBuffer = std::make_unique<float[]>(samplesPerBuffer);
     mBufferSizeInBytes = samplesPerBuffer * sizeof(float);
 }
 
 void AAudioMixer::clear() {
-    memset(mOutputBuffer, 0, mBufferSizeInBytes);
+    memset(mOutputBuffer.get(), 0, mBufferSizeInBytes);
 }
 
-int32_t AAudioMixer::mix(int streamIndex, FifoBuffer *fifo, bool allowUnderflow) {
+int32_t AAudioMixer::mix(int streamIndex, std::shared_ptr<FifoBuffer> fifo, bool allowUnderflow) {
     WrappingBuffer wrappingBuffer;
-    float *destination = mOutputBuffer;
+    float *destination = mOutputBuffer.get();
 
 #if AAUDIO_MIXER_ATRACE_ENABLED
     ATRACE_BEGIN("aaMix");
@@ -117,5 +113,5 @@
 }
 
 float *AAudioMixer::getOutputBuffer() {
-    return mOutputBuffer;
+    return mOutputBuffer.get();
 }
diff --git a/services/oboeservice/AAudioMixer.h b/services/oboeservice/AAudioMixer.h
index d5abc5b..1a120f2 100644
--- a/services/oboeservice/AAudioMixer.h
+++ b/services/oboeservice/AAudioMixer.h
@@ -25,7 +25,6 @@
 class AAudioMixer {
 public:
     AAudioMixer() {}
-    ~AAudioMixer();
 
     void allocate(int32_t samplesPerFrame, int32_t framesPerBurst);
 
@@ -38,7 +37,7 @@
      * @param allowUnderflow if true then allow mixer to advance read index past the write index
      * @return frames read from this stream
      */
-    int32_t mix(int streamIndex, android::FifoBuffer *fifo, bool allowUnderflow);
+    int32_t mix(int streamIndex, std::shared_ptr<android::FifoBuffer> fifo, bool allowUnderflow);
 
     float *getOutputBuffer();
 
@@ -47,7 +46,7 @@
 private:
     void mixPart(float *destination, float *source, int32_t numFrames);
 
-    float   *mOutputBuffer = nullptr;
+    std::unique_ptr<float[]> mOutputBuffer;
     int32_t  mSamplesPerFrame = 0;
     int32_t  mFramesPerBurst = 0;
     int32_t  mBufferSizeInBytes = 0;
diff --git a/services/oboeservice/AAudioService.cpp b/services/oboeservice/AAudioService.cpp
index 22cdb35..40a664e 100644
--- a/services/oboeservice/AAudioService.cpp
+++ b/services/oboeservice/AAudioService.cpp
@@ -22,7 +22,9 @@
 #include <iostream>
 #include <sstream>
 
+#include <android/content/AttributionSourceState.h>
 #include <aaudio/AAudio.h>
+#include <media/AidlConversion.h>
 #include <mediautils/ServiceUtilities.h>
 #include <utils/String16.h>
 
@@ -32,26 +34,34 @@
 #include "AAudioService.h"
 #include "AAudioServiceStreamMMAP.h"
 #include "AAudioServiceStreamShared.h"
-#include "binding/IAAudioService.h"
 
 using namespace android;
 using namespace aaudio;
 
 #define MAX_STREAMS_PER_PROCESS   8
+#define AIDL_RETURN(x) { *_aidl_return = (x); return Status::ok(); }
+
+#define VALUE_OR_RETURN_ILLEGAL_ARG_STATUS(x) \
+    ({ auto _tmp = (x); \
+       if (!_tmp.ok()) AIDL_RETURN(AAUDIO_ERROR_ILLEGAL_ARGUMENT); \
+       std::move(_tmp.value()); })
 
 using android::AAudioService;
+using android::content::AttributionSourceState;
+using binder::Status;
 
 android::AAudioService::AAudioService()
-    : BnAAudioService() {
-    mAudioClient.clientUid = getuid();   // TODO consider using geteuid()
-    mAudioClient.clientPid = getpid();
-    mAudioClient.packageName = String16("");
+    : BnAAudioService(),
+      mAdapter(this) {
+    // TODO consider using geteuid()
+    // TODO b/182392769: use attribution source util
+    mAudioClient.attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+    mAudioClient.attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
+    mAudioClient.attributionSource.packageName = std::nullopt;
+    mAudioClient.attributionSource.attributionTag = std::nullopt;
     AAudioClientTracker::getInstance().setAAudioService(this);
 }
 
-AAudioService::~AAudioService() {
-}
-
 status_t AAudioService::dump(int fd, const Vector<String16>& args) {
     std::string result;
 
@@ -72,18 +82,21 @@
     return NO_ERROR;
 }
 
-void AAudioService::registerClient(const sp<IAAudioClient>& client) {
+Status AAudioService::registerClient(const sp<IAAudioClient> &client) {
     pid_t pid = IPCThreadState::self()->getCallingPid();
     AAudioClientTracker::getInstance().registerClient(pid, client);
+    return Status::ok();
 }
 
-bool AAudioService::isCallerInService() {
-    return mAudioClient.clientPid == IPCThreadState::self()->getCallingPid() &&
-        mAudioClient.clientUid == IPCThreadState::self()->getCallingUid();
-}
+Status
+AAudioService::openStream(const StreamRequest &_request, StreamParameters* _paramsOut,
+                          int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
 
-aaudio_handle_t AAudioService::openStream(const aaudio::AAudioStreamRequest &request,
-                                          aaudio::AAudioStreamConfiguration &configurationOutput) {
+    // Create wrapper objects for simple usage of the parcelables.
+    const AAudioStreamRequest request(_request);
+    AAudioStreamConfiguration paramsOut;
+
     // A lock in is used to order the opening of endpoints when an
     // EXCLUSIVE endpoint is stolen. We want the order to be:
     // 1) Thread A opens exclusive MMAP endpoint
@@ -102,23 +115,29 @@
     aaudio_sharing_mode_t sharingMode = configurationInput.getSharingMode();
 
     // Enforce limit on client processes.
-    pid_t pid = request.getProcessId();
-    if (pid != mAudioClient.clientPid) {
+    AttributionSourceState attributionSource = request.getAttributionSource();
+    pid_t pid = IPCThreadState::self()->getCallingPid();
+    attributionSource.pid = VALUE_OR_RETURN_ILLEGAL_ARG_STATUS(
+        legacy2aidl_pid_t_int32_t(pid));
+    attributionSource.uid = VALUE_OR_RETURN_ILLEGAL_ARG_STATUS(
+        legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid()));
+    attributionSource.token = sp<BBinder>::make();
+    if (attributionSource.pid != mAudioClient.attributionSource.pid) {
         int32_t count = AAudioClientTracker::getInstance().getStreamCount(pid);
         if (count >= MAX_STREAMS_PER_PROCESS) {
             ALOGE("openStream(): exceeded max streams per process %d >= %d",
                   count,  MAX_STREAMS_PER_PROCESS);
-            return AAUDIO_ERROR_UNAVAILABLE;
+            AIDL_RETURN(AAUDIO_ERROR_UNAVAILABLE);
         }
     }
 
     if (sharingMode != AAUDIO_SHARING_MODE_EXCLUSIVE && sharingMode != AAUDIO_SHARING_MODE_SHARED) {
         ALOGE("openStream(): unrecognized sharing mode = %d", sharingMode);
-        return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+        AIDL_RETURN(AAUDIO_ERROR_ILLEGAL_ARGUMENT);
     }
 
     if (sharingMode == AAUDIO_SHARING_MODE_EXCLUSIVE
-        && AAudioClientTracker::getInstance().isExclusiveEnabled(request.getProcessId())) {
+        && AAudioClientTracker::getInstance().isExclusiveEnabled(pid)) {
         // only trust audioserver for in service indication
         bool inService = false;
         if (isCallerInService()) {
@@ -147,29 +166,125 @@
 
     if (result != AAUDIO_OK) {
         serviceStream.clear();
-        return result;
+        AIDL_RETURN(result);
     } else {
         aaudio_handle_t handle = mStreamTracker.addStreamForHandle(serviceStream.get());
         serviceStream->setHandle(handle);
-        pid_t pid = request.getProcessId();
         AAudioClientTracker::getInstance().registerClientStream(pid, serviceStream);
-        configurationOutput.copyFrom(*serviceStream);
+        paramsOut.copyFrom(*serviceStream);
+        *_paramsOut = std::move(paramsOut).parcelable();
         // Log open in MediaMetrics after we have the handle because we need the handle to
         // create the metrics ID.
         serviceStream->logOpen(handle);
         ALOGV("%s(): return handle = 0x%08X", __func__, handle);
-        return handle;
+        AIDL_RETURN(handle);
     }
 }
 
-aaudio_result_t AAudioService::closeStream(aaudio_handle_t streamHandle) {
+Status AAudioService::closeStream(int32_t streamHandle, int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
     // Check permission and ownership first.
     sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
         ALOGE("closeStream(0x%0x), illegal stream handle", streamHandle);
-        return AAUDIO_ERROR_INVALID_HANDLE;
+        AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
     }
-    return closeStream(serviceStream);
+    AIDL_RETURN(closeStream(serviceStream));
+}
+
+Status AAudioService::getStreamDescription(int32_t streamHandle, Endpoint* endpoint,
+                                           int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    if (serviceStream.get() == nullptr) {
+        ALOGE("getStreamDescription(), illegal stream handle = 0x%0x", streamHandle);
+        AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+    }
+    AudioEndpointParcelable endpointParcelable;
+    aaudio_result_t result = serviceStream->getDescription(endpointParcelable);
+    if (result == AAUDIO_OK) {
+        *endpoint = std::move(endpointParcelable).parcelable();
+    }
+    AIDL_RETURN(result);
+}
+
+Status AAudioService::startStream(int32_t streamHandle, int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    if (serviceStream.get() == nullptr) {
+        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
+        AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+    }
+    AIDL_RETURN(serviceStream->start());
+}
+
+Status AAudioService::pauseStream(int32_t streamHandle, int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    if (serviceStream.get() == nullptr) {
+        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
+        AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+    }
+    AIDL_RETURN(serviceStream->pause());
+}
+
+Status AAudioService::stopStream(int32_t streamHandle, int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    if (serviceStream.get() == nullptr) {
+        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
+        AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+    }
+    AIDL_RETURN(serviceStream->stop());
+}
+
+Status AAudioService::flushStream(int32_t streamHandle, int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    if (serviceStream.get() == nullptr) {
+        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
+        AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+    }
+    AIDL_RETURN(serviceStream->flush());
+}
+
+Status AAudioService::registerAudioThread(int32_t streamHandle, int32_t clientThreadId, int64_t periodNanoseconds,
+                                          int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    if (serviceStream.get() == nullptr) {
+        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
+        AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+    }
+    int32_t priority = isCallerInService()
+        ? kRealTimeAudioPriorityService : kRealTimeAudioPriorityClient;
+    AIDL_RETURN(serviceStream->registerAudioThread(clientThreadId, priority));
+}
+
+Status AAudioService::unregisterAudioThread(int32_t streamHandle, int32_t clientThreadId,
+                                            int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    if (serviceStream.get() == nullptr) {
+        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
+        AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+    }
+    AIDL_RETURN(serviceStream->unregisterAudioThread(clientThreadId));
+}
+
+bool AAudioService::isCallerInService() {
+    pid_t clientPid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mAudioClient.attributionSource.pid));
+    uid_t clientUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mAudioClient.attributionSource.uid));
+    return clientPid == IPCThreadState::self()->getCallingPid() &&
+        clientUid == IPCThreadState::self()->getCallingUid();
 }
 
 aaudio_result_t AAudioService::closeStream(sp<AAudioServiceStreamBase> serviceStream) {
@@ -192,9 +307,11 @@
         // Only allow owner or the aaudio service to access the stream.
         const uid_t callingUserId = IPCThreadState::self()->getCallingUid();
         const uid_t ownerUserId = serviceStream->getOwnerUserId();
+        const uid_t clientUid = VALUE_OR_FATAL(
+            aidl2legacy_int32_t_uid_t(mAudioClient.attributionSource.uid));
         bool callerOwnsIt = callingUserId == ownerUserId;
-        bool serverCalling = callingUserId == mAudioClient.clientUid;
-        bool serverOwnsIt = ownerUserId == mAudioClient.clientUid;
+        bool serverCalling = callingUserId == clientUid;
+        bool serverOwnsIt = ownerUserId == clientUid;
         bool allowed = callerOwnsIt || serverCalling || serverOwnsIt;
         if (!allowed) {
             ALOGE("AAudioService: calling uid %d cannot access stream 0x%08X owned by %d",
@@ -205,76 +322,6 @@
     return serviceStream;
 }
 
-aaudio_result_t AAudioService::getStreamDescription(
-                aaudio_handle_t streamHandle,
-                aaudio::AudioEndpointParcelable &parcelable) {
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
-    if (serviceStream.get() == nullptr) {
-        ALOGE("getStreamDescription(), illegal stream handle = 0x%0x", streamHandle);
-        return AAUDIO_ERROR_INVALID_HANDLE;
-    }
-    return serviceStream->getDescription(parcelable);
-}
-
-aaudio_result_t AAudioService::startStream(aaudio_handle_t streamHandle) {
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
-    if (serviceStream.get() == nullptr) {
-        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
-        return AAUDIO_ERROR_INVALID_HANDLE;
-    }
-    return serviceStream->start();
-}
-
-aaudio_result_t AAudioService::pauseStream(aaudio_handle_t streamHandle) {
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
-    if (serviceStream.get() == nullptr) {
-        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
-        return AAUDIO_ERROR_INVALID_HANDLE;
-    }
-    return serviceStream->pause();
-}
-
-aaudio_result_t AAudioService::stopStream(aaudio_handle_t streamHandle) {
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
-    if (serviceStream.get() == nullptr) {
-        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
-        return AAUDIO_ERROR_INVALID_HANDLE;
-    }
-    return serviceStream->stop();
-}
-
-aaudio_result_t AAudioService::flushStream(aaudio_handle_t streamHandle) {
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
-    if (serviceStream.get() == nullptr) {
-        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
-        return AAUDIO_ERROR_INVALID_HANDLE;
-    }
-    return serviceStream->flush();
-}
-
-aaudio_result_t AAudioService::registerAudioThread(aaudio_handle_t streamHandle,
-                                                   pid_t clientThreadId,
-                                                   int64_t /* periodNanoseconds */) {
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
-    if (serviceStream.get() == nullptr) {
-        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
-        return AAUDIO_ERROR_INVALID_HANDLE;
-    }
-    int32_t priority = isCallerInService()
-        ? kRealTimeAudioPriorityService : kRealTimeAudioPriorityClient;
-    return serviceStream->registerAudioThread(clientThreadId, priority);
-}
-
-aaudio_result_t AAudioService::unregisterAudioThread(aaudio_handle_t streamHandle,
-                                                     pid_t clientThreadId) {
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
-    if (serviceStream.get() == nullptr) {
-        ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
-        return AAUDIO_ERROR_INVALID_HANDLE;
-    }
-    return serviceStream->unregisterAudioThread(clientThreadId);
-}
-
 aaudio_result_t AAudioService::startClient(aaudio_handle_t streamHandle,
                                            const android::AudioClient& client,
                                            const audio_attributes_t *attr,
diff --git a/services/oboeservice/AAudioService.h b/services/oboeservice/AAudioService.h
index caf48a5..7c1b796 100644
--- a/services/oboeservice/AAudioService.h
+++ b/services/oboeservice/AAudioService.h
@@ -24,69 +24,71 @@
 #include <media/AudioClient.h>
 
 #include <aaudio/AAudio.h>
+#include <aaudio/BnAAudioService.h>
 
 #include "binding/AAudioCommon.h"
+#include "binding/AAudioBinderAdapter.h"
 #include "binding/AAudioServiceInterface.h"
-#include "binding/IAAudioService.h"
 
 #include "AAudioServiceStreamBase.h"
 #include "AAudioStreamTracker.h"
 
 namespace android {
 
+#define AAUDIO_SERVICE_NAME  "media.aaudio"
+
 class AAudioService :
     public BinderService<AAudioService>,
-    public BnAAudioService,
-    public aaudio::AAudioServiceInterface
+    public aaudio::BnAAudioService
 {
     friend class BinderService<AAudioService>;
 
 public:
     AAudioService();
-    virtual ~AAudioService();
+    virtual ~AAudioService() = default;
+
+    aaudio::AAudioServiceInterface& asAAudioServiceInterface() {
+        return mAdapter;
+    }
 
     static const char* getServiceName() { return AAUDIO_SERVICE_NAME; }
 
     virtual status_t        dump(int fd, const Vector<String16>& args) override;
 
-    virtual void            registerClient(const sp<IAAudioClient>& client);
+    binder::Status registerClient(const ::android::sp<::aaudio::IAAudioClient>& client) override;
 
-    aaudio::aaudio_handle_t openStream(const aaudio::AAudioStreamRequest &request,
-                                       aaudio::AAudioStreamConfiguration &configurationOutput)
-                                       override;
+    binder::Status openStream(const ::aaudio::StreamRequest& request,
+                              ::aaudio::StreamParameters* paramsOut,
+                              int32_t* _aidl_return) override;
 
-    /*
-     * This is called from Binder. It checks for permissions
-     * and converts the handle passed through Binder to a stream pointer.
-     */
-    aaudio_result_t closeStream(aaudio::aaudio_handle_t streamHandle) override;
+    binder::Status closeStream(int32_t streamHandle, int32_t* _aidl_return) override;
 
-    aaudio_result_t getStreamDescription(
-                aaudio::aaudio_handle_t streamHandle,
-                aaudio::AudioEndpointParcelable &parcelable) override;
+    binder::Status
+    getStreamDescription(int32_t streamHandle, ::aaudio::Endpoint* endpoint,
+                         int32_t* _aidl_return) override;
 
-    aaudio_result_t startStream(aaudio::aaudio_handle_t streamHandle) override;
+    binder::Status startStream(int32_t streamHandle, int32_t* _aidl_return) override;
 
-    aaudio_result_t pauseStream(aaudio::aaudio_handle_t streamHandle) override;
+    binder::Status pauseStream(int32_t streamHandle, int32_t* _aidl_return) override;
 
-    aaudio_result_t stopStream(aaudio::aaudio_handle_t streamHandle) override;
+    binder::Status stopStream(int32_t streamHandle, int32_t* _aidl_return) override;
 
-    aaudio_result_t flushStream(aaudio::aaudio_handle_t streamHandle) override;
+    binder::Status flushStream(int32_t streamHandle, int32_t* _aidl_return) override;
 
-    aaudio_result_t registerAudioThread(aaudio::aaudio_handle_t streamHandle,
-                                                pid_t tid,
-                                                int64_t periodNanoseconds) override;
+    binder::Status
+    registerAudioThread(int32_t streamHandle, int32_t clientThreadId, int64_t periodNanoseconds,
+                        int32_t* _aidl_return) override;
 
-    aaudio_result_t unregisterAudioThread(aaudio::aaudio_handle_t streamHandle,
-                                                  pid_t tid) override;
+    binder::Status unregisterAudioThread(int32_t streamHandle, int32_t clientThreadId,
+                                         int32_t* _aidl_return) override;
 
     aaudio_result_t startClient(aaudio::aaudio_handle_t streamHandle,
                                 const android::AudioClient& client,
                                 const audio_attributes_t *attr,
-                                audio_port_handle_t *clientHandle) override;
+                                audio_port_handle_t *clientHandle);
 
     aaudio_result_t stopClient(aaudio::aaudio_handle_t streamHandle,
-                                       audio_port_handle_t clientHandle) override;
+                                       audio_port_handle_t clientHandle);
 
  // ===============================================================================
  // The following public methods are only called from the service and NOT by Binder.
@@ -101,6 +103,29 @@
     aaudio_result_t closeStream(sp<aaudio::AAudioServiceStreamBase> serviceStream);
 
 private:
+    class Adapter : public aaudio::AAudioBinderAdapter {
+    public:
+        explicit Adapter(AAudioService *service)
+                : aaudio::AAudioBinderAdapter(service),
+                  mService(service) {}
+
+        aaudio_result_t startClient(aaudio::aaudio_handle_t streamHandle,
+                                    const android::AudioClient &client,
+                                    const audio_attributes_t *attr,
+                                    audio_port_handle_t *clientHandle) override {
+            return mService->startClient(streamHandle, client, attr, clientHandle);
+        }
+
+        aaudio_result_t stopClient(aaudio::aaudio_handle_t streamHandle,
+                                   audio_port_handle_t clientHandle) override {
+            return mService->stopClient(streamHandle, clientHandle);
+        }
+
+    private:
+        AAudioService* const mService;
+    };
+
+    Adapter mAdapter;
 
     /** @return true if the client is the audioserver
      */
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index ceefe93..13dd3d3 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -38,7 +38,11 @@
 using namespace android;  // TODO just import names needed
 using namespace aaudio;   // TODO just import names needed
 
-std::string AAudioServiceEndpoint::dump() const {
+AAudioServiceEndpoint::~AAudioServiceEndpoint() {
+    ALOGD("%s() called", __func__);
+}
+
+std::string AAudioServiceEndpoint::dump() const NO_THREAD_SAFETY_ANALYSIS {
     std::stringstream result;
 
     const bool isLocked = AAudio_tryUntilTrue(
@@ -182,11 +186,12 @@
             : AUDIO_SOURCE_DEFAULT;
     audio_flags_mask_t flags;
     if (direction == AAUDIO_DIRECTION_OUTPUT) {
-        flags = AUDIO_FLAG_LOW_LATENCY
-            | AAudioConvert_allowCapturePolicyToAudioFlagsMask(params->getAllowedCapturePolicy());
+        flags = static_cast<audio_flags_mask_t>(AUDIO_FLAG_LOW_LATENCY
+                | AAudioConvert_allowCapturePolicyToAudioFlagsMask(
+                        params->getAllowedCapturePolicy()));
     } else {
-        flags = AUDIO_FLAG_LOW_LATENCY
-            | AAudioConvert_privacySensitiveToAudioFlagsMask(params->isPrivacySensitive());
+        flags = static_cast<audio_flags_mask_t>(AUDIO_FLAG_LOW_LATENCY
+                | AAudioConvert_privacySensitiveToAudioFlagsMask(params->isPrivacySensitive()));
     }
     return {
             .content_type = contentType,
diff --git a/services/oboeservice/AAudioServiceEndpoint.h b/services/oboeservice/AAudioServiceEndpoint.h
index a171cb0..a7f63d3 100644
--- a/services/oboeservice/AAudioServiceEndpoint.h
+++ b/services/oboeservice/AAudioServiceEndpoint.h
@@ -22,6 +22,8 @@
 #include <mutex>
 #include <vector>
 
+#include <android-base/thread_annotations.h>
+
 #include "client/AudioStreamInternal.h"
 #include "client/AudioStreamInternalPlay.h"
 #include "core/AAudioStreamParameters.h"
@@ -41,13 +43,17 @@
         , public AAudioStreamParameters {
 public:
 
-    virtual ~AAudioServiceEndpoint() = default;
+    virtual ~AAudioServiceEndpoint();
 
     virtual std::string dump() const;
 
     virtual aaudio_result_t open(const aaudio::AAudioStreamRequest &request) = 0;
 
-    virtual aaudio_result_t close() = 0;
+    /*
+     * Perform any cleanup necessary before deleting the stream.
+     * This might include releasing and closing internal streams.
+     */
+    virtual void close() = 0;
 
     aaudio_result_t registerStream(android::sp<AAudioServiceStreamBase> stream);
 
@@ -137,7 +143,8 @@
     std::vector<android::sp<AAudioServiceStreamBase>> disconnectRegisteredStreams();
 
     mutable std::mutex       mLockStreams;
-    std::vector<android::sp<AAudioServiceStreamBase>> mRegisteredStreams;
+    std::vector<android::sp<AAudioServiceStreamBase>> mRegisteredStreams
+            GUARDED_BY(mLockStreams);
 
     SimpleDoubleBuffer<Timestamp>  mAtomicEndpointTimestamp;
 
diff --git a/services/oboeservice/AAudioServiceEndpointCapture.cpp b/services/oboeservice/AAudioServiceEndpointCapture.cpp
index 37d105b..bc769f0 100644
--- a/services/oboeservice/AAudioServiceEndpointCapture.cpp
+++ b/services/oboeservice/AAudioServiceEndpointCapture.cpp
@@ -35,22 +35,17 @@
 using namespace android;  // TODO just import names needed
 using namespace aaudio;   // TODO just import names needed
 
-AAudioServiceEndpointCapture::AAudioServiceEndpointCapture(AAudioService &audioService)
-        : mStreamInternalCapture(audioService, true) {
-    mStreamInternal = &mStreamInternalCapture;
-}
-
-AAudioServiceEndpointCapture::~AAudioServiceEndpointCapture() {
-    delete mDistributionBuffer;
+AAudioServiceEndpointCapture::AAudioServiceEndpointCapture(AAudioService& audioService)
+        : AAudioServiceEndpointShared(
+                new AudioStreamInternalCapture(audioService.asAAudioServiceInterface(), true)) {
 }
 
 aaudio_result_t AAudioServiceEndpointCapture::open(const aaudio::AAudioStreamRequest &request) {
     aaudio_result_t result = AAudioServiceEndpointShared::open(request);
     if (result == AAUDIO_OK) {
-        delete mDistributionBuffer;
         int distributionBufferSizeBytes = getStreamInternal()->getFramesPerBurst()
                                           * getStreamInternal()->getBytesPerFrame();
-        mDistributionBuffer = new uint8_t[distributionBufferSizeBytes];
+        mDistributionBuffer = std::make_unique<uint8_t[]>(distributionBufferSizeBytes);
     }
     return result;
 }
@@ -67,9 +62,12 @@
         int64_t mmapFramesRead = getStreamInternal()->getFramesRead();
 
         // Read audio data from stream using a blocking read.
-        result = getStreamInternal()->read(mDistributionBuffer, getFramesPerBurst(), timeoutNanos);
+        result = getStreamInternal()->read(mDistributionBuffer.get(),
+                getFramesPerBurst(), timeoutNanos);
         if (result == AAUDIO_ERROR_DISCONNECTED) {
-            disconnectRegisteredStreams();
+            ALOGD("%s() read() returned AAUDIO_ERROR_DISCONNECTED", __func__);
+            // We do not need the returned vector.
+            (void) AAudioServiceEndpointShared::disconnectRegisteredStreams();
             break;
         } else if (result != getFramesPerBurst()) {
             ALOGW("callbackLoop() read %d / %d",
@@ -79,48 +77,14 @@
 
         // Distribute data to each active stream.
         { // brackets are for lock_guard
-
             std::lock_guard <std::mutex> lock(mLockStreams);
             for (const auto& clientStream : mRegisteredStreams) {
                 if (clientStream->isRunning() && !clientStream->isSuspended()) {
-                    int64_t clientFramesWritten = 0;
-
                     sp<AAudioServiceStreamShared> streamShared =
                             static_cast<AAudioServiceStreamShared *>(clientStream.get());
-
-                    {
-                        // Lock the AudioFifo to protect against close.
-                        std::lock_guard <std::mutex> lock(streamShared->getAudioDataQueueLock());
-
-                        FifoBuffer *fifo = streamShared->getAudioDataFifoBuffer_l();
-                        if (fifo != nullptr) {
-
-                            // Determine offset between framePosition in client's stream
-                            // vs the underlying MMAP stream.
-                            clientFramesWritten = fifo->getWriteCounter();
-                            // There are two indices that refer to the same frame.
-                            int64_t positionOffset = mmapFramesRead - clientFramesWritten;
-                            streamShared->setTimestampPositionOffset(positionOffset);
-
-                            // Is the buffer too full to write a burst?
-                            if (fifo->getEmptyFramesAvailable() <
-                                    getFramesPerBurst()) {
-                                streamShared->incrementXRunCount();
-                            } else {
-                                fifo->write(mDistributionBuffer, getFramesPerBurst());
-                            }
-                            clientFramesWritten = fifo->getWriteCounter();
-                        }
-                    }
-
-                    if (clientFramesWritten > 0) {
-                        // This timestamp represents the completion of data being written into the
-                        // client buffer. It is sent to the client and used in the timing model
-                        // to decide when data will be available to read.
-                        Timestamp timestamp(clientFramesWritten, AudioClock::getNanoseconds());
-                        streamShared->markTransferTime(timestamp);
-                    }
-
+                    streamShared->writeDataIfRoom(mmapFramesRead,
+                                                  mDistributionBuffer.get(),
+                                                  getFramesPerBurst());
                 }
             }
         }
diff --git a/services/oboeservice/AAudioServiceEndpointCapture.h b/services/oboeservice/AAudioServiceEndpointCapture.h
index 971da9a..2ca43cf 100644
--- a/services/oboeservice/AAudioServiceEndpointCapture.h
+++ b/services/oboeservice/AAudioServiceEndpointCapture.h
@@ -17,6 +17,8 @@
 #ifndef AAUDIO_SERVICE_ENDPOINT_CAPTURE_H
 #define AAUDIO_SERVICE_ENDPOINT_CAPTURE_H
 
+#include <memory>
+
 #include "client/AudioStreamInternal.h"
 #include "client/AudioStreamInternalCapture.h"
 
@@ -28,16 +30,14 @@
 class AAudioServiceEndpointCapture : public AAudioServiceEndpointShared {
 public:
     explicit AAudioServiceEndpointCapture(android::AAudioService &audioService);
-    virtual ~AAudioServiceEndpointCapture();
+    virtual ~AAudioServiceEndpointCapture() = default;
 
     aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
 
-
     void *callbackLoop() override;
 
 private:
-    AudioStreamInternalCapture  mStreamInternalCapture;
-    uint8_t                    *mDistributionBuffer = nullptr;
+    std::unique_ptr<uint8_t[]>  mDistributionBuffer;
 };
 
 } /* namespace aaudio */
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 0843e0b..a08098c 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -51,8 +51,6 @@
         : mMmapStream(nullptr)
         , mAAudioService(audioService) {}
 
-AAudioServiceEndpointMMAP::~AAudioServiceEndpointMMAP() {}
-
 std::string AAudioServiceEndpointMMAP::dump() const {
     std::stringstream result;
 
@@ -72,24 +70,54 @@
 
 aaudio_result_t AAudioServiceEndpointMMAP::open(const aaudio::AAudioStreamRequest &request) {
     aaudio_result_t result = AAUDIO_OK;
+    copyFrom(request.getConstantConfiguration());
+    mMmapClient.attributionSource = request.getAttributionSource();
+    // TODO b/182392769: use attribution source util
+    mMmapClient.attributionSource.uid = VALUE_OR_FATAL(
+        legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid()));
+    mMmapClient.attributionSource.pid = VALUE_OR_FATAL(
+        legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
+
+    audio_format_t audioFormat = getFormat();
+
+    // FLOAT is not directly supported by the HAL so ask for a 32-bit.
+    if (audioFormat == AUDIO_FORMAT_PCM_FLOAT) {
+        // TODO remove these logs when finished debugging.
+        ALOGD("%s() change format from %d to 32_BIT", __func__, audioFormat);
+        audioFormat = AUDIO_FORMAT_PCM_32_BIT;
+    }
+
+    result = openWithFormat(audioFormat);
+    if (result == AAUDIO_OK) return result;
+
+    if (result == AAUDIO_ERROR_UNAVAILABLE && audioFormat == AUDIO_FORMAT_PCM_32_BIT) {
+        ALOGD("%s() 32_BIT failed, perhaps due to format. Try again with 24_BIT_PACKED", __func__);
+        audioFormat = AUDIO_FORMAT_PCM_24_BIT_PACKED;
+        result = openWithFormat(audioFormat);
+    }
+    if (result == AAUDIO_OK) return result;
+
+    // TODO The HAL and AudioFlinger should be recommending a format if the open fails.
+    //      But that recommendation is not propagating back from the HAL.
+    //      So for now just try something very likely to work.
+    if (result == AAUDIO_ERROR_UNAVAILABLE && audioFormat == AUDIO_FORMAT_PCM_24_BIT_PACKED) {
+        ALOGD("%s() 24_BIT failed, perhaps due to format. Try again with 16_BIT", __func__);
+        audioFormat = AUDIO_FORMAT_PCM_16_BIT;
+        result = openWithFormat(audioFormat);
+    }
+    return result;
+}
+
+aaudio_result_t AAudioServiceEndpointMMAP::openWithFormat(audio_format_t audioFormat) {
+    aaudio_result_t result = AAUDIO_OK;
     audio_config_base_t config;
     audio_port_handle_t deviceId;
 
-    copyFrom(request.getConstantConfiguration());
-
     const audio_attributes_t attributes = getAudioAttributesFrom(this);
 
-    mMmapClient.clientUid = request.getUserId();
-    mMmapClient.clientPid = request.getProcessId();
-    mMmapClient.packageName.setTo(String16(""));
-
     mRequestedDeviceId = deviceId = getDeviceId();
 
     // Fill in config
-    audio_format_t audioFormat = getFormat();
-    if (audioFormat == AUDIO_FORMAT_DEFAULT || audioFormat == AUDIO_FORMAT_PCM_FLOAT) {
-        audioFormat = AUDIO_FORMAT_PCM_16_BIT;
-    }
     config.format = audioFormat;
 
     int32_t aaudioSampleRate = getSampleRate();
@@ -137,8 +165,8 @@
                                                           this, // callback
                                                           mMmapStream,
                                                           &mPortHandle);
-    ALOGD("%s() mMapClient.uid = %d, pid = %d => portHandle = %d\n",
-          __func__, mMmapClient.clientUid,  mMmapClient.clientPid, mPortHandle);
+    ALOGD("%s() mMapClient.attributionSource = %s => portHandle = %d\n",
+          __func__, mMmapClient.attributionSource.toString().c_str(), mPortHandle);
     if (status != OK) {
         // This can happen if the resource is busy or the config does
         // not match the hardware.
@@ -186,8 +214,9 @@
     setBufferCapacity(mMmapBufferinfo.buffer_size_frames);
     if (!isBufferShareable) {
         // Exclusive mode can only be used by the service because the FD cannot be shared.
-        uid_t audioServiceUid = getuid();
-        if ((mMmapClient.clientUid != audioServiceUid) &&
+        int32_t audioServiceUid =
+            VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+        if ((mMmapClient.attributionSource.uid != audioServiceUid) &&
             getSharingMode() == AAUDIO_SHARING_MODE_EXCLUSIVE) {
             ALOGW("%s() - exclusive FD cannot be used by client", __func__);
             result = AAUDIO_ERROR_UNAVAILABLE;
@@ -208,6 +237,12 @@
         result = AAUDIO_ERROR_INTERNAL;
         goto error;
     }
+    // Call to HAL to make sure the transport FD was able to be closed by binder.
+    // This is a tricky workaround for a problem in Binder.
+    // TODO:[b/192048842] When that problem is fixed we may be able to remove or change this code.
+    struct audio_mmap_position position;
+    mMmapStream->getMmapPosition(&position);
+
     mFramesPerBurst = mMmapBufferinfo.burst_size_frames;
     setFormat(config.format);
     setSampleRate(config.sample_rate);
@@ -226,7 +261,7 @@
     return result;
 }
 
-aaudio_result_t AAudioServiceEndpointMMAP::close() {
+void AAudioServiceEndpointMMAP::close() {
     if (mMmapStream != nullptr) {
         // Needs to be explicitly cleared or CTS will fail but it is not clear why.
         mMmapStream.clear();
@@ -235,8 +270,6 @@
         // FIXME Make closing synchronous.
         AudioClock::sleepForNanos(100 * AAUDIO_NANOS_PER_MILLISECOND);
     }
-
-    return AAUDIO_OK;
 }
 
 aaudio_result_t AAudioServiceEndpointMMAP::startStream(sp<AAudioServiceStreamBase> stream,
@@ -333,7 +366,10 @@
 // This is called by AudioFlinger when it wants to destroy a stream.
 void AAudioServiceEndpointMMAP::onTearDown(audio_port_handle_t portHandle) {
     ALOGD("%s(portHandle = %d) called", __func__, portHandle);
-    std::thread asyncTask(&AAudioServiceEndpointMMAP::handleTearDownAsync, this, portHandle);
+    android::sp<AAudioServiceEndpointMMAP> holdEndpoint(this);
+    std::thread asyncTask([holdEndpoint, portHandle]() {
+        holdEndpoint->handleTearDownAsync(portHandle);
+    });
     asyncTask.detach();
 }
 
@@ -354,9 +390,11 @@
     ALOGD("%s() called with dev %d, old = %d", __func__, deviceId, getDeviceId());
     if (getDeviceId() != deviceId) {
         if (getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
-            std::thread asyncTask([this, deviceId]() {
-                disconnectRegisteredStreams();
-                setDeviceId(deviceId);
+            android::sp<AAudioServiceEndpointMMAP> holdEndpoint(this);
+            std::thread asyncTask([holdEndpoint, deviceId]() {
+                ALOGD("onRoutingChanged() asyncTask launched");
+                holdEndpoint->disconnectRegisteredStreams();
+                holdEndpoint->setDeviceId(deviceId);
             });
             asyncTask.detach();
         } else {
@@ -380,3 +418,18 @@
     parcelable.mDownDataQueueParcelable.setCapacityInFrames(getBufferCapacity());
     return AAUDIO_OK;
 }
+
+aaudio_result_t AAudioServiceEndpointMMAP::getExternalPosition(uint64_t *positionFrames,
+                                                               int64_t *timeNanos)
+{
+    if (!mExternalPositionSupported) {
+        return AAUDIO_ERROR_INVALID_STATE;
+    }
+    status_t status = mMmapStream->getExternalPosition(positionFrames, timeNanos);
+    if (status == INVALID_OPERATION) {
+        // getExternalPosition is not supported. Set mExternalPositionSupported as false
+        // so that the call will not go to the HAL next time.
+        mExternalPositionSupported = false;
+    }
+    return AAudioConvert_androidToAAudioResult(status);
+}
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index 3d10861..5a53885 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -44,13 +44,13 @@
 public:
     explicit AAudioServiceEndpointMMAP(android::AAudioService &audioService);
 
-    virtual ~AAudioServiceEndpointMMAP();
+    virtual ~AAudioServiceEndpointMMAP() = default;
 
     std::string dump() const override;
 
     aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
 
-    aaudio_result_t close() override;
+    void close() override;
 
     aaudio_result_t startStream(android::sp<AAudioServiceStreamBase> stream,
                                 audio_port_handle_t *clientHandle) override;
@@ -85,7 +85,12 @@
         return mHardwareTimeOffsetNanos;
     }
 
+    aaudio_result_t getExternalPosition(uint64_t *positionFrames, int64_t *timeNanos);
+
 private:
+
+    aaudio_result_t openWithFormat(audio_format_t audioFormat);
+
     MonotonicCounter                          mFramesTransferred;
 
     // Interface to the AudioFlinger MMAP support.
@@ -101,6 +106,8 @@
 
     int64_t                                   mHardwareTimeOffsetNanos = 0; // TODO get from HAL
 
+    bool                                      mExternalPositionSupported = true;
+
 };
 
 } /* namespace aaudio */
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.cpp b/services/oboeservice/AAudioServiceEndpointPlay.cpp
index bda4b90..4e46033 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.cpp
+++ b/services/oboeservice/AAudioServiceEndpointPlay.cpp
@@ -41,10 +41,9 @@
 
 #define BURSTS_PER_BUFFER_DEFAULT   2
 
-AAudioServiceEndpointPlay::AAudioServiceEndpointPlay(AAudioService &audioService)
-        : mStreamInternalPlay(audioService, true) {
-    mStreamInternal = &mStreamInternalPlay;
-}
+AAudioServiceEndpointPlay::AAudioServiceEndpointPlay(AAudioService& audioService)
+        : AAudioServiceEndpointShared(
+                new AudioStreamInternalPlay(audioService.asAAudioServiceInterface(), true)) {}
 
 aaudio_result_t AAudioServiceEndpointPlay::open(const aaudio::AAudioStreamRequest &request) {
     aaudio_result_t result = AAudioServiceEndpointShared::open(request);
@@ -99,10 +98,11 @@
 
                 {
                     // Lock the AudioFifo to protect against close.
-                    std::lock_guard <std::mutex> lock(streamShared->getAudioDataQueueLock());
-
-                    FifoBuffer *fifo = streamShared->getAudioDataFifoBuffer_l();
-                    if (fifo != nullptr) {
+                    std::lock_guard <std::mutex> lock(streamShared->audioDataQueueLock);
+                    std::shared_ptr<SharedRingBuffer> audioDataQueue
+                            = streamShared->getAudioDataQueue_l();
+                    std::shared_ptr<FifoBuffer> fifo;
+                    if (audioDataQueue && (fifo = audioDataQueue->getFifoBuffer())) {
 
                         // Determine offset between framePosition in client's stream
                         // vs the underlying MMAP stream.
@@ -145,7 +145,9 @@
         result = getStreamInternal()->write(mMixer.getOutputBuffer(),
                                             getFramesPerBurst(), timeoutNanos);
         if (result == AAUDIO_ERROR_DISCONNECTED) {
-            AAudioServiceEndpointShared::disconnectRegisteredStreams();
+            ALOGD("%s() write() returned AAUDIO_ERROR_DISCONNECTED", __func__);
+            // We do not need the returned vector.
+            (void) AAudioServiceEndpointShared::disconnectRegisteredStreams();
             break;
         } else if (result != getFramesPerBurst()) {
             ALOGW("callbackLoop() wrote %d / %d",
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.h b/services/oboeservice/AAudioServiceEndpointPlay.h
index 981e430..160a1de 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.h
+++ b/services/oboeservice/AAudioServiceEndpointPlay.h
@@ -45,7 +45,6 @@
     void *callbackLoop() override;
 
 private:
-    AudioStreamInternalPlay  mStreamInternalPlay; // for playing output of mixer
     bool                     mLatencyTuningEnabled = false; // TODO implement tuning
     AAudioMixer              mMixer;    //
 };
diff --git a/services/oboeservice/AAudioServiceEndpointShared.cpp b/services/oboeservice/AAudioServiceEndpointShared.cpp
index dc21886..5fbcadb 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.cpp
+++ b/services/oboeservice/AAudioServiceEndpointShared.cpp
@@ -40,6 +40,9 @@
 // This is the maximum size in frames. The effective size can be tuned smaller at runtime.
 #define DEFAULT_BUFFER_CAPACITY   (48 * 8)
 
+AAudioServiceEndpointShared::AAudioServiceEndpointShared(AudioStreamInternal *streamInternal)
+    : mStreamInternal(streamInternal) {}
+
 std::string AAudioServiceEndpointShared::dump() const {
     std::stringstream result;
 
@@ -84,24 +87,31 @@
     return result;
 }
 
-aaudio_result_t AAudioServiceEndpointShared::close() {
-    return getStreamInternal()->releaseCloseFinal();
+void AAudioServiceEndpointShared::close() {
+    stopSharingThread();
+    getStreamInternal()->safeReleaseClose();
 }
 
 // Glue between C and C++ callbacks.
 static void *aaudio_endpoint_thread_proc(void *arg) {
     assert(arg != nullptr);
+    ALOGD("%s() called", __func__);
 
-    // The caller passed in a smart pointer to prevent the endpoint from getting deleted
-    // while the thread was launching.
-    sp<AAudioServiceEndpointShared> *endpointForThread =
-            static_cast<sp<AAudioServiceEndpointShared> *>(arg);
-    sp<AAudioServiceEndpointShared> endpoint = *endpointForThread;
-    delete endpointForThread; // Just use scoped smart pointer. Don't need this anymore.
+    // Prevent the stream from being deleted while being used.
+    // This is just for extra safety. It is probably not needed because
+    // this callback should be joined before the stream is closed.
+    AAudioServiceEndpointShared *endpointPtr =
+        static_cast<AAudioServiceEndpointShared *>(arg);
+    android::sp<AAudioServiceEndpointShared> endpoint(endpointPtr);
+    // Balance the incStrong() in startSharingThread_l().
+    endpoint->decStrong(nullptr);
+
     void *result = endpoint->callbackLoop();
     // Close now so that the HW resource is freed and we can open a new device.
     if (!endpoint->isConnected()) {
-        endpoint->close();
+        ALOGD("%s() call safeReleaseCloseFromCallback()", __func__);
+        // Release and close under a lock with no check for callback collisions.
+        endpoint->getStreamInternal()->safeReleaseCloseInternal();
     }
 
     return result;
@@ -113,38 +123,39 @@
                           * AAUDIO_NANOS_PER_SECOND
                           / getSampleRate();
     mCallbackEnabled.store(true);
-    // Pass a smart pointer so the thread can hold a reference.
-    sp<AAudioServiceEndpointShared> *endpointForThread = new sp<AAudioServiceEndpointShared>(this);
+    // Prevent this object from getting deleted before the thread has a chance to create
+    // its strong pointer. Assume the thread will call decStrong().
+    this->incStrong(nullptr);
     aaudio_result_t result = getStreamInternal()->createThread(periodNanos,
                                                                aaudio_endpoint_thread_proc,
-                                                               endpointForThread);
+                                                               this);
     if (result != AAUDIO_OK) {
-        // The thread can't delete it so we have to do it here.
-        delete endpointForThread;
+        this->decStrong(nullptr); // Because the thread won't do it.
     }
     return result;
 }
 
 aaudio_result_t aaudio::AAudioServiceEndpointShared::stopSharingThread() {
     mCallbackEnabled.store(false);
-    aaudio_result_t result = getStreamInternal()->joinThread(NULL);
-    return result;
+    return getStreamInternal()->joinThread(NULL);
 }
 
-aaudio_result_t AAudioServiceEndpointShared::startStream(sp<AAudioServiceStreamBase> sharedStream,
-                                                         audio_port_handle_t *clientHandle) {
+aaudio_result_t AAudioServiceEndpointShared::startStream(
+        sp<AAudioServiceStreamBase> sharedStream,
+        audio_port_handle_t *clientHandle)
+        NO_THREAD_SAFETY_ANALYSIS {
     aaudio_result_t result = AAUDIO_OK;
 
     {
         std::lock_guard<std::mutex> lock(mLockStreams);
         if (++mRunningStreamCount == 1) { // atomic
-            result = getStreamInternal()->requestStart();
+            result = getStreamInternal()->systemStart();
             if (result != AAUDIO_OK) {
                 --mRunningStreamCount;
             } else {
                 result = startSharingThread_l();
                 if (result != AAUDIO_OK) {
-                    getStreamInternal()->requestStop();
+                    getStreamInternal()->systemStopFromApp();
                     --mRunningStreamCount;
                 }
             }
@@ -158,7 +169,7 @@
         if (result != AAUDIO_OK) {
             if (--mRunningStreamCount == 0) { // atomic
                 stopSharingThread();
-                getStreamInternal()->requestStop();
+                getStreamInternal()->systemStopFromApp();
             }
         }
     }
@@ -173,7 +184,7 @@
 
     if (--mRunningStreamCount == 0) { // atomic
         stopSharingThread(); // the sharing thread locks mLockStreams
-        getStreamInternal()->requestStop();
+        getStreamInternal()->systemStopFromApp();
     }
     return AAUDIO_OK;
 }
diff --git a/services/oboeservice/AAudioServiceEndpointShared.h b/services/oboeservice/AAudioServiceEndpointShared.h
index bfc1744..8357567 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.h
+++ b/services/oboeservice/AAudioServiceEndpointShared.h
@@ -20,6 +20,8 @@
 #include <atomic>
 #include <mutex>
 
+#include <android-base/thread_annotations.h>
+
 #include "AAudioServiceEndpoint.h"
 #include "client/AudioStreamInternal.h"
 #include "client/AudioStreamInternalPlay.h"
@@ -35,12 +37,15 @@
 class AAudioServiceEndpointShared : public AAudioServiceEndpoint {
 
 public:
+    explicit AAudioServiceEndpointShared(AudioStreamInternal *streamInternal);
+
+    virtual ~AAudioServiceEndpointShared() = default;
 
     std::string dump() const override;
 
     aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
 
-    aaudio_result_t close() override;
+    void close() override;
 
     aaudio_result_t startStream(android::sp<AAudioServiceStreamBase> stream,
                                 audio_port_handle_t *clientHandle) override;
@@ -54,18 +59,18 @@
 
     virtual void   *callbackLoop() = 0;
 
-protected:
-
     AudioStreamInternal *getStreamInternal() const {
-        return mStreamInternal;
+        return mStreamInternal.get();
     };
 
-    aaudio_result_t          startSharingThread_l();
+protected:
+
+    aaudio_result_t          startSharingThread_l() REQUIRES(mLockStreams);
 
     aaudio_result_t          stopSharingThread();
 
-    // pointer to object statically allocated in subclasses
-    AudioStreamInternal     *mStreamInternal = nullptr;
+    // An MMAP stream that is shared by multiple clients.
+    android::sp<AudioStreamInternal> mStreamInternal;
 
     std::atomic<bool>        mCallbackEnabled{false};
 
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 663dae2..34ddd4d 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -26,7 +26,6 @@
 #include <media/TypeConverter.h>
 #include <mediautils/SchedulingPolicyService.h>
 
-#include "binding/IAAudioService.h"
 #include "binding/AAudioServiceMessage.h"
 #include "core/AudioGlobal.h"
 #include "utility/AudioClock.h"
@@ -40,22 +39,23 @@
 using namespace android;  // TODO just import names needed
 using namespace aaudio;   // TODO just import names needed
 
+using content::AttributionSourceState;
+
 /**
  * Base class for streams in the service.
  * @return
  */
 
 AAudioServiceStreamBase::AAudioServiceStreamBase(AAudioService &audioService)
-        : mUpMessageQueue(nullptr)
-        , mTimestampThread("AATime")
+        : mTimestampThread("AATime")
         , mAtomicStreamTimestamp()
         , mAudioService(audioService) {
-    mMmapClient.clientUid = -1;
-    mMmapClient.clientPid = -1;
-    mMmapClient.packageName = String16("");
+    mMmapClient.attributionSource = AttributionSourceState();
 }
 
 AAudioServiceStreamBase::~AAudioServiceStreamBase() {
+    ALOGD("%s() called", __func__);
+
     // May not be set if open failed.
     if (mMetricsId.size() > 0) {
         mediametrics::LogItem(mMetricsId)
@@ -67,8 +67,7 @@
     // If the stream is deleted when OPEN or in use then audio resources will leak.
     // This would indicate an internal error. So we want to find this ASAP.
     LOG_ALWAYS_FATAL_IF(!(getState() == AAUDIO_STREAM_STATE_CLOSED
-                        || getState() == AAUDIO_STREAM_STATE_UNINITIALIZED
-                        || getState() == AAUDIO_STREAM_STATE_DISCONNECTED),
+                        || getState() == AAUDIO_STREAM_STATE_UNINITIALIZED),
                         "service stream %p still open, state = %d",
                         this, getState());
 }
@@ -82,7 +81,7 @@
 
     result << "    0x" << std::setfill('0') << std::setw(8) << std::hex << mHandle
            << std::dec << std::setfill(' ') ;
-    result << std::setw(6) << mMmapClient.clientUid;
+    result << std::setw(6) << mMmapClient.attributionSource.uid;
     result << std::setw(7) << mClientHandle;
     result << std::setw(4) << (isRunning() ? "yes" : " no");
     result << std::setw(6) << getState();
@@ -128,9 +127,12 @@
     AAudioEndpointManager &mEndpointManager = AAudioEndpointManager::getInstance();
     aaudio_result_t result = AAUDIO_OK;
 
-    mMmapClient.clientUid = request.getUserId();
-    mMmapClient.clientPid = request.getProcessId();
-    mMmapClient.packageName.setTo(String16("")); // TODO What should we do here?
+    mMmapClient.attributionSource = request.getAttributionSource();
+    // TODO b/182392769: use attribution source util
+    mMmapClient.attributionSource.uid = VALUE_OR_FATAL(
+        legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid()));
+    mMmapClient.attributionSource.pid = VALUE_OR_FATAL(
+        legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
 
     // Limit scope of lock to avoid recursive lock in close().
     {
@@ -140,7 +142,7 @@
             return AAUDIO_ERROR_INVALID_STATE;
         }
 
-        mUpMessageQueue = new SharedRingBuffer();
+        mUpMessageQueue = std::make_shared<SharedRingBuffer>();
         result = mUpMessageQueue->allocate(sizeof(AAudioServiceMessage),
                                            QUEUE_UP_CAPACITY_COMMANDS);
         if (result != AAUDIO_OK) {
@@ -179,6 +181,8 @@
         return AAUDIO_OK;
     }
 
+    // This will call stopTimestampThread() and also stop the stream,
+    // just in case it was not already stopped.
     stop_l();
 
     aaudio_result_t result = AAUDIO_OK;
@@ -194,13 +198,6 @@
         mServiceEndpoint.clear(); // endpoint will hold the pointer after this method returns.
     }
 
-    {
-        std::lock_guard<std::mutex> lock(mUpMessageQueueLock);
-        stopTimestampThread();
-        delete mUpMessageQueue;
-        mUpMessageQueue = nullptr;
-    }
-
     setState(AAUDIO_STREAM_STATE_CLOSED);
 
     mediametrics::LogItem(mMetricsId)
@@ -231,7 +228,7 @@
     aaudio_result_t result = AAUDIO_OK;
 
     if (auto state = getState();
-        state == AAUDIO_STREAM_STATE_CLOSED || state == AAUDIO_STREAM_STATE_DISCONNECTED) {
+        state == AAUDIO_STREAM_STATE_CLOSED || isDisconnected_l()) {
         ALOGW("%s() already CLOSED, returns INVALID_STATE, handle = %d",
                 __func__, getHandle());
         return AAUDIO_ERROR_INVALID_STATE;
@@ -263,8 +260,14 @@
     sendServiceEvent(AAUDIO_SERVICE_EVENT_STARTED);
     setState(AAUDIO_STREAM_STATE_STARTED);
     mThreadEnabled.store(true);
+    // Make sure this object does not get deleted before the run() method
+    // can protect it by making a strong pointer.
+    incStrong(nullptr); // See run() method.
     result = mTimestampThread.start(this);
-    if (result != AAUDIO_OK) goto error;
+    if (result != AAUDIO_OK) {
+        decStrong(nullptr); // run() can't do it so we have to do it here.
+        goto error;
+    }
 
     return result;
 
@@ -293,10 +296,6 @@
             .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)result)
             .record(); });
 
-    // Send it now because the timestamp gets rounded up when stopStream() is called below.
-    // Also we don't need the timestamps while we are shutting down.
-    sendCurrentTimestamp();
-
     result = stopTimestampThread();
     if (result != AAUDIO_OK) {
         disconnect_l();
@@ -342,10 +341,12 @@
 
     setState(AAUDIO_STREAM_STATE_STOPPING);
 
-    // Send it now because the timestamp gets rounded up when stopStream() is called below.
-    // Also we don't need the timestamps while we are shutting down.
-    sendCurrentTimestamp(); // warning - this calls a virtual function
+    // Temporarily unlock because we are joining the timestamp thread and it may try
+    // to acquire mLock.
+    mLock.unlock();
     result = stopTimestampThread();
+    mLock.lock();
+
     if (result != AAUDIO_OK) {
         disconnect_l();
         return result;
@@ -405,15 +406,21 @@
 __attribute__((no_sanitize("integer")))
 void AAudioServiceStreamBase::run() {
     ALOGD("%s() %s entering >>>>>>>>>>>>>> TIMESTAMPS", __func__, getTypeText());
+    // Hold onto the ref counted stream until the end.
+    android::sp<AAudioServiceStreamBase> holdStream(this);
     TimestampScheduler timestampScheduler;
+    // Balance the incStrong from when the thread was launched.
+    holdStream->decStrong(nullptr);
+
     timestampScheduler.setBurstPeriod(mFramesPerBurst, getSampleRate());
     timestampScheduler.start(AudioClock::getNanoseconds());
     int64_t nextTime = timestampScheduler.nextAbsoluteTime();
     int32_t loopCount = 0;
+    aaudio_result_t result = AAUDIO_OK;
     while(mThreadEnabled.load()) {
         loopCount++;
         if (AudioClock::getNanoseconds() >= nextTime) {
-            aaudio_result_t result = sendCurrentTimestamp();
+            result = sendCurrentTimestamp();
             if (result != AAUDIO_OK) {
                 ALOGE("%s() timestamp thread got result = %d", __func__, result);
                 break;
@@ -425,6 +432,11 @@
             AudioClock::sleepUntilNanoTime(nextTime);
         }
     }
+    // This was moved from the calls in stop_l() and pause_l(), which could cause a deadlock
+    // if it resulted in a call to disconnect.
+    if (result == AAUDIO_OK) {
+        (void) sendCurrentTimestamp();
+    }
     ALOGD("%s() %s exiting after %d loops <<<<<<<<<<<<<< TIMESTAMPS",
           __func__, getTypeText(), loopCount);
 }
@@ -435,8 +447,7 @@
 }
 
 void AAudioServiceStreamBase::disconnect_l() {
-    if (getState() != AAUDIO_STREAM_STATE_DISCONNECTED
-        && getState() != AAUDIO_STREAM_STATE_CLOSED) {
+    if (!isDisconnected_l() && getState() != AAUDIO_STREAM_STATE_CLOSED) {
 
         mediametrics::LogItem(mMetricsId)
             .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT)
@@ -444,7 +455,7 @@
             .record();
 
         sendServiceEvent(AAUDIO_SERVICE_EVENT_DISCONNECTED);
-        setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+        setDisconnected_l(true);
     }
 }
 
@@ -514,12 +525,8 @@
         ALOGE("%s(): mUpMessageQueue null! - stream not open", __func__);
         return true;
     }
-    int32_t framesAvailable = mUpMessageQueue->getFifoBuffer()
-        ->getFullFramesAvailable();
-    int32_t capacity = mUpMessageQueue->getFifoBuffer()
-        ->getBufferCapacityInFrames();
     // Is it half full or more
-    return framesAvailable >= (capacity / 2);
+    return mUpMessageQueue->getFractionalFullness() >= 0.5;
 }
 
 aaudio_result_t AAudioServiceStreamBase::writeUpMessageQueue(AAudioServiceMessage *command) {
@@ -604,14 +611,3 @@
 void AAudioServiceStreamBase::onVolumeChanged(float volume) {
     sendServiceEvent(AAUDIO_SERVICE_EVENT_VOLUME, volume);
 }
-
-int32_t AAudioServiceStreamBase::incrementServiceReferenceCount_l() {
-    return ++mCallingCount;
-}
-
-int32_t AAudioServiceStreamBase::decrementServiceReferenceCount_l() {
-    int32_t count = --mCallingCount;
-    // Each call to increment should be balanced with one call to decrement.
-    assert(count >= 0);
-    return count;
-}
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index 94cc980..976996d 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -20,13 +20,16 @@
 #include <assert.h>
 #include <mutex>
 
+#include <android-base/thread_annotations.h>
+#include <media/AidlConversion.h>
 #include <media/AudioClient.h>
 #include <utils/RefBase.h>
 
 #include "fifo/FifoBuffer.h"
-#include "binding/IAAudioService.h"
 #include "binding/AudioEndpointParcelable.h"
 #include "binding/AAudioServiceMessage.h"
+#include "binding/AAudioStreamRequest.h"
+#include "core/AAudioStreamParameters.h"
 #include "utility/AAudioUtilities.h"
 #include "utility/AudioClock.h"
 
@@ -77,7 +80,7 @@
     // because we had to wait until we generated the handle.
     void logOpen(aaudio_handle_t streamHandle);
 
-    aaudio_result_t close();
+    aaudio_result_t close() EXCLUDES(mLock);
 
     /**
      * Start the flow of audio data.
@@ -85,7 +88,7 @@
      * This is not guaranteed to be synchronous but it currently is.
      * An AAUDIO_SERVICE_EVENT_STARTED will be sent to the client when complete.
      */
-    aaudio_result_t start();
+    aaudio_result_t start() EXCLUDES(mLock);
 
     /**
      * Stop the flow of data so that start() can resume without loss of data.
@@ -93,7 +96,7 @@
      * This is not guaranteed to be synchronous but it currently is.
      * An AAUDIO_SERVICE_EVENT_PAUSED will be sent to the client when complete.
     */
-    aaudio_result_t pause();
+    aaudio_result_t pause() EXCLUDES(mLock);
 
     /**
      * Stop the flow of data after the currently queued data has finished playing.
@@ -102,14 +105,14 @@
      * An AAUDIO_SERVICE_EVENT_STOPPED will be sent to the client when complete.
      *
      */
-    aaudio_result_t stop();
+    aaudio_result_t stop() EXCLUDES(mLock);
 
     /**
      * Discard any data held by the underlying HAL or Service.
      *
      * An AAUDIO_SERVICE_EVENT_FLUSHED will be sent to the client when complete.
      */
-    aaudio_result_t flush();
+    aaudio_result_t flush() EXCLUDES(mLock);
 
     virtual aaudio_result_t startClient(const android::AudioClient& client,
                                         const audio_attributes_t *attr __unused,
@@ -123,9 +126,9 @@
         return AAUDIO_ERROR_UNAVAILABLE;
     }
 
-    aaudio_result_t registerAudioThread(pid_t clientThreadId, int priority);
+    aaudio_result_t registerAudioThread(pid_t clientThreadId, int priority) EXCLUDES(mLock);
 
-    aaudio_result_t unregisterAudioThread(pid_t clientThreadId);
+    aaudio_result_t unregisterAudioThread(pid_t clientThreadId) EXCLUDES(mLock);
 
     bool isRunning() const {
         return mState == AAUDIO_STREAM_STATE_STARTED;
@@ -134,7 +137,7 @@
     /**
      * Fill in a parcelable description of stream.
      */
-    aaudio_result_t getDescription(AudioEndpointParcelable &parcelable);
+    aaudio_result_t getDescription(AudioEndpointParcelable &parcelable) EXCLUDES(mLock);
 
     void setRegisteredThread(pid_t pid) {
         mRegisteredClientThread = pid;
@@ -150,18 +153,20 @@
 
     void run() override; // to implement Runnable
 
-    void disconnect();
+    void disconnect() EXCLUDES(mLock);
 
     const android::AudioClient &getAudioClient() {
         return mMmapClient;
     }
 
     uid_t getOwnerUserId() const {
-        return mMmapClient.clientUid;
+        return VALUE_OR_FATAL(android::aidl2legacy_int32_t_uid_t(
+                mMmapClient.attributionSource.uid));
     }
 
     pid_t getOwnerProcessId() const {
-        return mMmapClient.clientPid;
+        return VALUE_OR_FATAL(android::aidl2legacy_int32_t_pid_t(
+                mMmapClient.attributionSource.pid));
     }
 
     aaudio_handle_t getHandle() const {
@@ -208,25 +213,6 @@
         return mSuspended;
     }
 
-    /**
-     * Atomically increment the number of active references to the stream by AAudioService.
-     *
-     * This is called under a global lock in AAudioStreamTracker.
-     *
-     * @return value after the increment
-     */
-    int32_t incrementServiceReferenceCount_l();
-
-    /**
-     * Atomically decrement the number of active references to the stream by AAudioService.
-     * This should only be called after incrementServiceReferenceCount_l().
-     *
-     * This is called under a global lock in AAudioStreamTracker.
-     *
-     * @return value after the decrement
-     */
-    int32_t decrementServiceReferenceCount_l();
-
     bool isCloseNeeded() const {
         return mCloseNeeded.load();
     }
@@ -249,11 +235,10 @@
     aaudio_result_t open(const aaudio::AAudioStreamRequest &request,
                          aaudio_sharing_mode_t sharingMode);
 
-    // These must be called under mLock
-    virtual aaudio_result_t close_l();
-    virtual aaudio_result_t pause_l();
-    virtual aaudio_result_t stop_l();
-    void disconnect_l();
+    virtual aaudio_result_t close_l() REQUIRES(mLock);
+    virtual aaudio_result_t pause_l() REQUIRES(mLock);
+    virtual aaudio_result_t stop_l() REQUIRES(mLock);
+    void disconnect_l() REQUIRES(mLock);
 
     void setState(aaudio_stream_state_t state);
 
@@ -265,7 +250,7 @@
 
     aaudio_result_t writeUpMessageQueue(AAudioServiceMessage *command);
 
-    aaudio_result_t sendCurrentTimestamp();
+    aaudio_result_t sendCurrentTimestamp() EXCLUDES(mLock);
 
     aaudio_result_t sendXRunCount(int32_t xRunCount);
 
@@ -282,10 +267,17 @@
 
     aaudio_stream_state_t   mState = AAUDIO_STREAM_STATE_UNINITIALIZED;
 
+    bool isDisconnected_l() const REQUIRES(mLock) {
+        return mDisconnected;
+    }
+    void setDisconnected_l(bool flag) REQUIRES(mLock) {
+        mDisconnected = flag;
+    }
+
     pid_t                   mRegisteredClientThread = ILLEGAL_THREAD_ID;
 
-    SharedRingBuffer*       mUpMessageQueue;
     std::mutex              mUpMessageQueueLock;
+    std::shared_ptr<SharedRingBuffer> mUpMessageQueue;
 
     AAudioThread            mTimestampThread;
     // This is used by one thread to tell another thread to exit. So it must be atomic.
@@ -331,18 +323,19 @@
     aaudio_handle_t         mHandle = -1;
     bool                    mFlowing = false;
 
-    // This is modified under a global lock in AAudioStreamTracker.
-    int32_t                 mCallingCount = 0;
-
-    // This indicates that a stream that is being referenced by a binder call needs to closed.
-    std::atomic<bool>       mCloseNeeded{false};
+    // This indicates that a stream that is being referenced by a binder call
+    // and needs to closed.
+    std::atomic<bool>       mCloseNeeded{false}; // TODO remove
 
     // This indicate that a running stream should not be processed because of an error,
     // for example a full message queue. Note that this atomic is unrelated to mCloseNeeded.
     std::atomic<bool>       mSuspended{false};
 
+    bool                    mDisconnected GUARDED_BY(mLock) {false};
+
+protected:
     // Locking order is important.
-    // Always acquire mLock before acquiring AAudioServiceEndpoint::mLockStreams
+    // Acquire mLock before acquiring AAudioServiceEndpoint::mLockStreams
     std::mutex              mLock; // Prevent start/stop/close etcetera from colliding
 };
 
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index 54d7d06..57dc1ab 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -19,6 +19,7 @@
 #include <utils/Log.h>
 
 #include <atomic>
+#include <inttypes.h>
 #include <iomanip>
 #include <iostream>
 #include <stdint.h>
@@ -162,7 +163,8 @@
     return result;
 }
 
-// Get timestamp that was written by getFreeRunningPosition()
+// Get timestamp from presentation position.
+// If it fails, get timestamp that was written by getFreeRunningPosition()
 aaudio_result_t AAudioServiceStreamMMAP::getHardwareTimestamp(int64_t *positionFrames,
                                                                 int64_t *timeNanos) {
 
@@ -174,7 +176,17 @@
     sp<AAudioServiceEndpointMMAP> serviceEndpointMMAP =
             static_cast<AAudioServiceEndpointMMAP *>(endpoint.get());
 
-    // TODO Get presentation timestamp from the HAL
+    // Disable this code temporarily because the HAL is not returning
+    // a useful result.
+#if 0
+    uint64_t position;
+    if (serviceEndpointMMAP->getExternalPosition(&position, timeNanos) == AAUDIO_OK) {
+        ALOGD("%s() getExternalPosition() says pos = %" PRIi64 ", time = %" PRIi64,
+                __func__, position, *timeNanos);
+        *positionFrames = (int64_t) position;
+        return AAUDIO_OK;
+    } else
+#endif
     if (mAtomicStreamTimestamp.isValid()) {
         Timestamp timestamp = mAtomicStreamTimestamp.read();
         *positionFrames = timestamp.getPosition();
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index 5902613..667465a 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -19,6 +19,7 @@
 
 #include <atomic>
 
+#include <android-base/thread_annotations.h>
 #include <android-base/unique_fd.h>
 #include <media/audiohal/StreamHalInterface.h>
 #include <media/MmapStreamCallback.h>
@@ -34,10 +35,8 @@
 #include "TimestampScheduler.h"
 #include "utility/MonotonicCounter.h"
 
-
 namespace aaudio {
 
-
 /**
  * These corresponds to an EXCLUSIVE mode MMAP client stream.
  * It has exclusive use of one AAudioServiceEndpointMMAP to communicate with the underlying
@@ -68,13 +67,14 @@
      * This is not guaranteed to be synchronous but it currently is.
      * An AAUDIO_SERVICE_EVENT_PAUSED will be sent to the client when complete.
     */
-    aaudio_result_t pause_l() override;
+    aaudio_result_t pause_l() REQUIRES(mLock) override;
 
-    aaudio_result_t stop_l() override;
+    aaudio_result_t stop_l() REQUIRES(mLock) override;
 
     aaudio_result_t getAudioDataDescription(AudioEndpointParcelable &parcelable) override;
 
-    aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override;
+    aaudio_result_t getFreeRunningPosition(int64_t *positionFrames,
+            int64_t *timeNanos) EXCLUDES(mLock) override;
 
     aaudio_result_t getHardwareTimestamp(int64_t *positionFrames, int64_t *timeNanos) override;
 
diff --git a/services/oboeservice/AAudioServiceStreamShared.cpp b/services/oboeservice/AAudioServiceStreamShared.cpp
index 01b1c2e..c665cda 100644
--- a/services/oboeservice/AAudioServiceStreamShared.cpp
+++ b/services/oboeservice/AAudioServiceStreamShared.cpp
@@ -24,8 +24,6 @@
 
 #include <aaudio/AAudio.h>
 
-#include "binding/IAAudioService.h"
-
 #include "binding/AAudioServiceMessage.h"
 #include "AAudioServiceStreamBase.h"
 #include "AAudioServiceStreamShared.h"
@@ -54,19 +52,26 @@
     return result.str();
 }
 
-std::string AAudioServiceStreamShared::dump() const {
+std::string AAudioServiceStreamShared::dump() const NO_THREAD_SAFETY_ANALYSIS {
     std::stringstream result;
 
+    const bool isLocked = AAudio_tryUntilTrue(
+            [this]()->bool { return audioDataQueueLock.try_lock(); } /* f */,
+            50 /* times */,
+            20 /* sleepMs */);
+    if (!isLocked) {
+        result << "AAudioServiceStreamShared may be deadlocked\n";
+    }
+
     result << AAudioServiceStreamBase::dump();
 
-    auto fifo = mAudioDataQueue->getFifoBuffer();
-    int32_t readCounter = fifo->getReadCounter();
-    int32_t writeCounter = fifo->getWriteCounter();
-    result << std::setw(10) << writeCounter;
-    result << std::setw(10) << readCounter;
-    result << std::setw(8) << (writeCounter - readCounter);
+    result << mAudioDataQueue->dump();
     result << std::setw(8) << getXRunCount();
 
+    if (isLocked) {
+        audioDataQueueLock.unlock();
+    }
+
     return result.str();
 }
 
@@ -105,7 +110,7 @@
     }
     int32_t capacityInFrames = numBursts * framesPerBurst;
 
-    // Final sanity check.
+    // Final range check.
     if (capacityInFrames > MAX_FRAMES_PER_BUFFER) {
         ALOGE("calculateBufferCapacity() calc capacity %d > max %d",
               capacityInFrames, MAX_FRAMES_PER_BUFFER);
@@ -178,9 +183,9 @@
     }
 
     {
-        std::lock_guard<std::mutex> lock(mAudioDataQueueLock);
+        std::lock_guard<std::mutex> lock(audioDataQueueLock);
         // Create audio data shared memory buffer for client.
-        mAudioDataQueue = new SharedRingBuffer();
+        mAudioDataQueue = std::make_shared<SharedRingBuffer>();
         result = mAudioDataQueue->allocate(calculateBytesPerFrame(), getBufferCapacity());
         if (result != AAUDIO_OK) {
             ALOGE("%s() could not allocate FIFO with %d frames",
@@ -203,25 +208,13 @@
     return result;
 }
 
-aaudio_result_t AAudioServiceStreamShared::close_l()  {
-    aaudio_result_t result = AAudioServiceStreamBase::close_l();
-
-    {
-        std::lock_guard<std::mutex> lock(mAudioDataQueueLock);
-        delete mAudioDataQueue;
-        mAudioDataQueue = nullptr;
-    }
-
-    return result;
-}
-
 /**
  * Get an immutable description of the data queue created by this service.
  */
 aaudio_result_t AAudioServiceStreamShared::getAudioDataDescription(
         AudioEndpointParcelable &parcelable)
 {
-    std::lock_guard<std::mutex> lock(mAudioDataQueueLock);
+    std::lock_guard<std::mutex> lock(audioDataQueueLock);
     if (mAudioDataQueue == nullptr) {
         ALOGW("%s(): mUpMessageQueue null! - stream not open", __func__);
         return AAUDIO_ERROR_NULL;
@@ -273,3 +266,37 @@
     *positionFrames = position;
     return result;
 }
+
+void AAudioServiceStreamShared::writeDataIfRoom(int64_t mmapFramesRead,
+                                                const void *buffer, int32_t numFrames) {
+    int64_t clientFramesWritten = 0;
+
+    // Lock the AudioFifo to protect against close.
+    std::lock_guard <std::mutex> lock(audioDataQueueLock);
+
+    if (mAudioDataQueue != nullptr) {
+        std::shared_ptr<FifoBuffer> fifo = mAudioDataQueue->getFifoBuffer();
+        // Determine offset between framePosition in client's stream
+        // vs the underlying MMAP stream.
+        clientFramesWritten = fifo->getWriteCounter();
+        // There are two indices that refer to the same frame.
+        int64_t positionOffset = mmapFramesRead - clientFramesWritten;
+        setTimestampPositionOffset(positionOffset);
+
+        // Is the buffer too full to write a burst?
+        if (fifo->getEmptyFramesAvailable() < getFramesPerBurst()) {
+            incrementXRunCount();
+        } else {
+            fifo->write(buffer, numFrames);
+        }
+        clientFramesWritten = fifo->getWriteCounter();
+    }
+
+    if (clientFramesWritten > 0) {
+        // This timestamp represents the completion of data being written into the
+        // client buffer. It is sent to the client and used in the timing model
+        // to decide when data will be available to read.
+        Timestamp timestamp(clientFramesWritten, AudioClock::getNanoseconds());
+        markTransferTime(timestamp);
+    }
+}
diff --git a/services/oboeservice/AAudioServiceStreamShared.h b/services/oboeservice/AAudioServiceStreamShared.h
index abcb782..4fae5b4 100644
--- a/services/oboeservice/AAudioServiceStreamShared.h
+++ b/services/oboeservice/AAudioServiceStreamShared.h
@@ -52,23 +52,16 @@
 
     aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
 
-    aaudio_result_t close_l() override;
+    void writeDataIfRoom(int64_t mmapFramesRead, const void *buffer, int32_t numFrames);
 
     /**
-     * This must be locked when calling getAudioDataFifoBuffer_l() and while
-     * using the FifoBuffer it returns.
-     */
-    std::mutex &getAudioDataQueueLock() {
-        return mAudioDataQueueLock;
-    }
-
-    /**
-     * This must only be call under getAudioDataQueueLock().
+     * This must only be called under getAudioDataQueueLock().
      * @return
      */
-    android::FifoBuffer *getAudioDataFifoBuffer_l() { return (mAudioDataQueue == nullptr)
-                                                      ? nullptr
-                                                      : mAudioDataQueue->getFifoBuffer(); }
+    std::shared_ptr<SharedRingBuffer> getAudioDataQueue_l()
+            REQUIRES(audioDataQueueLock) {
+        return mAudioDataQueue;
+    }
 
     /* Keep a record of when a buffer transfer completed.
      * This allows for a more accurate timing model.
@@ -89,6 +82,10 @@
 
     const char *getTypeText() const override { return "Shared"; }
 
+    // This is public so that the thread safety annotation, GUARDED_BY(),
+    // Can work when another object takes the lock.
+    mutable std::mutex   audioDataQueueLock;
+
 protected:
 
     aaudio_result_t getAudioDataDescription(AudioEndpointParcelable &parcelable) override;
@@ -106,8 +103,8 @@
                                             int32_t framesPerBurst);
 
 private:
-    SharedRingBuffer        *mAudioDataQueue = nullptr; // protected by mAudioDataQueueLock
-    std::mutex               mAudioDataQueueLock;
+
+    std::shared_ptr<SharedRingBuffer> mAudioDataQueue GUARDED_BY(audioDataQueueLock);
 
     std::atomic<int64_t>     mTimestampPositionOffset;
     std::atomic<int32_t>     mXRunCount;
diff --git a/services/oboeservice/AAudioStreamTracker.cpp b/services/oboeservice/AAudioStreamTracker.cpp
index 8e66b94..9bbbc73 100644
--- a/services/oboeservice/AAudioStreamTracker.cpp
+++ b/services/oboeservice/AAudioStreamTracker.cpp
@@ -96,7 +96,7 @@
     return handle;
 }
 
-std::string AAudioStreamTracker::dump() const {
+std::string AAudioStreamTracker::dump() const NO_THREAD_SAFETY_ANALYSIS {
     std::stringstream result;
     const bool isLocked = AAudio_tryUntilTrue(
             [this]()->bool { return mHandleLock.try_lock(); } /* f */,
diff --git a/services/oboeservice/AAudioStreamTracker.h b/services/oboeservice/AAudioStreamTracker.h
index d1301a2..43870fc 100644
--- a/services/oboeservice/AAudioStreamTracker.h
+++ b/services/oboeservice/AAudioStreamTracker.h
@@ -17,13 +17,13 @@
 #ifndef AAUDIO_AAUDIO_STREAM_TRACKER_H
 #define AAUDIO_AAUDIO_STREAM_TRACKER_H
 
+#include <mutex>
 #include <time.h>
-#include <pthread.h>
 
+#include <android-base/thread_annotations.h>
 #include <aaudio/AAudio.h>
 
 #include "binding/AAudioCommon.h"
-
 #include "AAudioServiceStreamBase.h"
 
 namespace aaudio {
@@ -75,11 +75,10 @@
     static aaudio_handle_t bumpHandle(aaudio_handle_t handle);
 
     // Track stream using a unique handle that wraps. Only use positive half.
-    mutable std::mutex                mHandleLock;
-    // protected by mHandleLock
-    aaudio_handle_t                   mPreviousHandle = 0;
-    // protected by mHandleLock
-    std::map<aaudio_handle_t, android::sp<aaudio::AAudioServiceStreamBase>> mStreamsByHandle;
+    mutable std::mutex            mHandleLock;
+    aaudio_handle_t               mPreviousHandle GUARDED_BY(mHandleLock) = 0;
+    std::map<aaudio_handle_t, android::sp<aaudio::AAudioServiceStreamBase>>
+            mStreamsByHandle GUARDED_BY(mHandleLock);
 };
 
 
diff --git a/services/oboeservice/AAudioThread.cpp b/services/oboeservice/AAudioThread.cpp
index ed7895b..68496ac 100644
--- a/services/oboeservice/AAudioThread.cpp
+++ b/services/oboeservice/AAudioThread.cpp
@@ -37,10 +37,13 @@
     setup("AAudio");
 }
 
-void AAudioThread::setup(const char *prefix) {
-    // mThread is a pthread_t of unknown size so we need memset().
-    memset(&mThread, 0, sizeof(mThread));
+AAudioThread::~AAudioThread() {
+    ALOGE_IF(pthread_equal(pthread_self(), mThread),
+            "%s() destructor running in thread", __func__);
+    ALOGE_IF(mHasThread, "%s() thread never joined", __func__);
+}
 
+void AAudioThread::setup(const char *prefix) {
     // Name the thread with an increasing index, "prefix_#", for debugging.
     uint32_t index = mNextThreadIndex++;
     // Wrap the index so that we do not hit the 16 char limit
@@ -57,7 +60,7 @@
     }
 }
 
-// This is the entry point for the new thread created by createThread().
+// This is the entry point for the new thread created by createThread_l().
 // It converts the 'C' function call to a C++ method call.
 static void * AAudioThread_internalThreadProc(void *arg) {
     AAudioThread *aaudioThread = (AAudioThread *) arg;
@@ -90,13 +93,18 @@
         ALOGE("stop() but no thread running");
         return AAUDIO_ERROR_INVALID_STATE;
     }
+    // Check to see if the thread is trying to stop itself.
+    if (pthread_equal(pthread_self(), mThread)) {
+        ALOGE("%s() attempt to pthread_join() from launched thread!", __func__);
+        return AAUDIO_ERROR_INTERNAL;
+    }
+
     int err = pthread_join(mThread, nullptr);
-    mHasThread = false;
     if (err != 0) {
         ALOGE("stop() - pthread_join() returned %d %s", err, strerror(err));
         return AAudioConvert_androidToAAudioResult(-err);
     } else {
+        mHasThread = false;
         return AAUDIO_OK;
     }
 }
-
diff --git a/services/oboeservice/AAudioThread.h b/services/oboeservice/AAudioThread.h
index dcce68a..08a8a98 100644
--- a/services/oboeservice/AAudioThread.h
+++ b/services/oboeservice/AAudioThread.h
@@ -46,7 +46,7 @@
 
     explicit AAudioThread(const char *prefix);
 
-    virtual ~AAudioThread() = default;
+    virtual ~AAudioThread();
 
     /**
      * Start the thread running.
@@ -73,7 +73,7 @@
 
     Runnable    *mRunnable = nullptr;
     bool         mHasThread = false;
-    pthread_t    mThread; // initialized in constructor
+    pthread_t    mThread = {};
 
     static std::atomic<uint32_t> mNextThreadIndex;
     char         mName[16]; // max length for a pthread_name
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index 8b1e2c0..4c58040 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -12,7 +12,16 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-cc_library_shared {
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
 
     name: "libaaudioservice",
 
@@ -37,6 +46,7 @@
     ],
 
     cflags: [
+        "-Wthread-safety",
         "-Wno-unused-parameter",
         "-Wall",
         "-Werror",
@@ -55,6 +65,14 @@
         "libcutils",
         "liblog",
         "libutils",
+        "aaudio-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+    ],
+
+    export_shared_lib_headers: [
+        "libaaudio_internal",
+        "framework-permission-aidl-cpp",
     ],
 
     header_libs: [
diff --git a/services/oboeservice/SharedMemoryProxy.cpp b/services/oboeservice/SharedMemoryProxy.cpp
index c43ed22..78d4884 100644
--- a/services/oboeservice/SharedMemoryProxy.cpp
+++ b/services/oboeservice/SharedMemoryProxy.cpp
@@ -20,6 +20,7 @@
 
 #include <errno.h>
 #include <string.h>
+#include <unistd.h>
 
 #include <aaudio/AAudio.h>
 #include "SharedMemoryProxy.h"
diff --git a/services/oboeservice/SharedRingBuffer.cpp b/services/oboeservice/SharedRingBuffer.cpp
index 2454446..c1d4e16 100644
--- a/services/oboeservice/SharedRingBuffer.cpp
+++ b/services/oboeservice/SharedRingBuffer.cpp
@@ -18,6 +18,8 @@
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
+#include <iomanip>
+#include <iostream>
 #include <sys/mman.h>
 
 #include "binding/RingBufferParcelable.h"
@@ -30,8 +32,8 @@
 
 SharedRingBuffer::~SharedRingBuffer()
 {
+    mFifoBuffer.reset(); // uses mSharedMemory
     if (mSharedMemory != nullptr) {
-        delete mFifoBuffer;
         munmap(mSharedMemory, mSharedMemorySizeInBytes);
         mSharedMemory = nullptr;
     }
@@ -58,16 +60,18 @@
         return AAUDIO_ERROR_INTERNAL; // TODO convert errno to a better AAUDIO_ERROR;
     }
 
-    // Map the fd to memory addresses.
-    mSharedMemory = (uint8_t *) mmap(0, mSharedMemorySizeInBytes,
+    // Map the fd to memory addresses. Use a temporary pointer to keep the mmap result and update
+    // it to `mSharedMemory` only when mmap operate successfully.
+    uint8_t* tmpPtr = (uint8_t *) mmap(0, mSharedMemorySizeInBytes,
                          PROT_READ|PROT_WRITE,
                          MAP_SHARED,
                          mFileDescriptor.get(), 0);
-    if (mSharedMemory == MAP_FAILED) {
+    if (tmpPtr == MAP_FAILED) {
         ALOGE("allocate() mmap() failed %d", errno);
         mFileDescriptor.reset();
         return AAUDIO_ERROR_INTERNAL; // TODO convert errno to a better AAUDIO_ERROR;
     }
+    mSharedMemory = tmpPtr;
 
     // Get addresses for our counters and data from the shared memory.
     fifo_counter_t *readCounterAddress =
@@ -76,7 +80,7 @@
             (fifo_counter_t *) &mSharedMemory[SHARED_RINGBUFFER_WRITE_OFFSET];
     uint8_t *dataAddress = &mSharedMemory[SHARED_RINGBUFFER_DATA_OFFSET];
 
-    mFifoBuffer = new FifoBuffer(bytesPerFrame, capacityInFrames,
+    mFifoBuffer = std::make_shared<FifoBufferIndirect>(bytesPerFrame, capacityInFrames,
                                  readCounterAddress, writeCounterAddress, dataAddress);
     return AAUDIO_OK;
 }
@@ -94,3 +98,19 @@
     ringBufferParcelable.setFramesPerBurst(1);
     ringBufferParcelable.setCapacityInFrames(mCapacityInFrames);
 }
+
+double SharedRingBuffer::getFractionalFullness() const {
+  int32_t framesAvailable = mFifoBuffer->getFullFramesAvailable();
+  int32_t capacity = mFifoBuffer->getBufferCapacityInFrames();
+  return framesAvailable / (double) capacity;
+}
+
+std::string SharedRingBuffer::dump() const {
+    std::stringstream result;
+    int32_t readCounter = mFifoBuffer->getReadCounter();
+    int32_t writeCounter = mFifoBuffer->getWriteCounter();
+    result << std::setw(10) << writeCounter;
+    result << std::setw(10) << readCounter;
+    result << std::setw(8) << (writeCounter - readCounter);
+    return result.str();
+}
diff --git a/services/oboeservice/SharedRingBuffer.h b/services/oboeservice/SharedRingBuffer.h
index 79169bc..c3a9bb7 100644
--- a/services/oboeservice/SharedRingBuffer.h
+++ b/services/oboeservice/SharedRingBuffer.h
@@ -18,8 +18,9 @@
 #define AAUDIO_SHARED_RINGBUFFER_H
 
 #include <android-base/unique_fd.h>
-#include <stdint.h>
 #include <cutils/ashmem.h>
+#include <stdint.h>
+#include <string>
 #include <sys/mman.h>
 
 #include "fifo/FifoBuffer.h"
@@ -47,15 +48,25 @@
     void fillParcelable(AudioEndpointParcelable &endpointParcelable,
                         RingBufferParcelable &ringBufferParcelable);
 
-    android::FifoBuffer * getFifoBuffer() {
+    /**
+     * Return available frames as a fraction of the capacity.
+     * @return fullness between 0.0 and 1.0
+     */
+    double getFractionalFullness() const;
+
+    // dump: write# read# available
+    std::string dump() const;
+
+    std::shared_ptr<android::FifoBuffer> getFifoBuffer() {
         return mFifoBuffer;
     }
 
 private:
     android::base::unique_fd  mFileDescriptor;
-    android::FifoBuffer      *mFifoBuffer = nullptr;
-    uint8_t                  *mSharedMemory = nullptr;
+    std::shared_ptr<android::FifoBufferIndirect>  mFifoBuffer;
+    uint8_t                  *mSharedMemory = nullptr; // mmap
     int32_t                   mSharedMemorySizeInBytes = 0;
+    // size of memory used for data vs counters
     int32_t                   mDataMemorySizeInBytes = 0;
     android::fifo_frames_t    mCapacityInFrames = 0;
 };
diff --git a/services/oboeservice/fuzzer/Android.bp b/services/oboeservice/fuzzer/Android.bp
new file mode 100644
index 0000000..605ac01
--- /dev/null
+++ b/services/oboeservice/fuzzer/Android.bp
@@ -0,0 +1,72 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_fuzz {
+    name: "oboeservice_fuzzer",
+    srcs: [
+        "oboeservice_fuzzer.cpp",
+    ],
+    shared_libs: [
+        "libaaudio_internal",
+        "libaudioclient",
+        "libaudioflinger",
+        "libaudioutils",
+        "libmedia_helper",
+        "libmediametrics",
+        "libmediautils",
+        "libbase",
+        "libbinder",
+        "libcutils",
+        "liblog",
+        "libutils",
+        "aaudio-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+    ],
+    static_libs: [
+        "libaaudioservice",
+    ],
+    include_dirs: [
+        "frameworks/av/services/oboeservice",
+    ],
+    header_libs: [
+        "libaudiohal_headers",
+    ],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wno-unused-parameter",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/services/oboeservice/fuzzer/README.md b/services/oboeservice/fuzzer/README.md
new file mode 100644
index 0000000..00b85df
--- /dev/null
+++ b/services/oboeservice/fuzzer/README.md
@@ -0,0 +1,65 @@
+# Fuzzer for libaaudioservice
+
+## Plugin Design Considerations
+The fuzzer plugin for libaaudioservice is designed based on the
+understanding of the service and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+AAudio Service request contains the following parameters:
+1. AAudioFormat
+2. UserId
+3. ProcessId
+4. InService
+5. DeviceId
+6. SampleRate
+7. SamplesPerFrame
+8. Direction
+9. SharingMode
+10. Usage
+11. ContentType
+12. InputPreset
+13. BufferCapacity
+
+| Parameter| Valid Input Values| Configured Value|
+|------------- |-------------| ----- |
+| `AAudioFormat` | `AAUDIO_FORMAT_UNSPECIFIED`, `AAUDIO_FORMAT_PCM_I16`, `AAUDIO_FORMAT_PCM_FLOAT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `UserId`   | `INT32_MIN` to `INT32_MAX` | Value obtained from getuid() |
+| `ProcessId`   | `INT32_MIN` to `INT32_MAX` | Value obtained from getpid() |
+| `InService`   | `bool` | Value obtained from FuzzedDataProvider |
+| `DeviceId`   | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `SampleRate`   | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `SamplesPerFrame` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `Direction` | `AAUDIO_DIRECTION_OUTPUT`, `AAUDIO_DIRECTION_INPUT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `SharingMode` | `AAUDIO_SHARING_MODE_EXCLUSIVE`, `AAUDIO_SHARING_MODE_SHARED` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `Usage` | `AAUDIO_USAGE_MEDIA`, `AAUDIO_USAGE_VOICE_COMMUNICATION`, `AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING`, `AAUDIO_USAGE_ALARM`, `AAUDIO_USAGE_NOTIFICATION`, `AAUDIO_USAGE_NOTIFICATION_RINGTONE`, `AAUDIO_USAGE_NOTIFICATION_EVENT`, `AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY`, `AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE`, `AAUDIO_USAGE_ASSISTANCE_SONIFICATION`, `AAUDIO_USAGE_GAME`, `AAUDIO_USAGE_ASSISTANT`, `AAUDIO_SYSTEM_USAGE_EMERGENCY`, `AAUDIO_SYSTEM_USAGE_SAFETY`, `AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS`, `AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `ContentType` | `AAUDIO_CONTENT_TYPE_SPEECH`, `AAUDIO_CONTENT_TYPE_MUSIC`, `AAUDIO_CONTENT_TYPE_MOVIE`, `AAUDIO_CONTENT_TYPE_SONIFICATION` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `InputPreset` | `AAUDIO_INPUT_PRESET_GENERIC`, `AAUDIO_INPUT_PRESET_CAMCORDER`, `AAUDIO_INPUT_PRESET_VOICE_RECOGNITION`, `AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION`, `AAUDIO_INPUT_PRESET_UNPROCESSED`, `AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `BufferCapacity` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+## Build
+
+This describes steps to build oboeservice_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) oboeservice_fuzzer
+```
+
+#### Steps to run
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/oboeservice_fuzzer/oboeservice_fuzzer
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
new file mode 100644
index 0000000..4bc661c
--- /dev/null
+++ b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
@@ -0,0 +1,370 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include <fuzzer/FuzzedDataProvider.h>
+#include <stdio.h>
+
+#include <AAudioService.h>
+#include <aaudio/AAudio.h>
+#include "aaudio/BnAAudioClient.h"
+#include <android/content/AttributionSourceState.h>
+
+#define UNUSED_PARAM __attribute__((unused))
+
+using namespace android;
+using namespace aaudio;
+
+aaudio_format_t kAAudioFormats[] = {
+    AAUDIO_FORMAT_UNSPECIFIED,
+    AAUDIO_FORMAT_PCM_I16,
+    AAUDIO_FORMAT_PCM_FLOAT,
+};
+
+aaudio_usage_t kAAudioUsages[] = {
+    AAUDIO_USAGE_MEDIA,
+    AAUDIO_USAGE_VOICE_COMMUNICATION,
+    AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+    AAUDIO_USAGE_ALARM,
+    AAUDIO_USAGE_NOTIFICATION,
+    AAUDIO_USAGE_NOTIFICATION_RINGTONE,
+    AAUDIO_USAGE_NOTIFICATION_EVENT,
+    AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
+    AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+    AAUDIO_USAGE_ASSISTANCE_SONIFICATION,
+    AAUDIO_USAGE_GAME,
+    AAUDIO_USAGE_ASSISTANT,
+    AAUDIO_SYSTEM_USAGE_EMERGENCY,
+    AAUDIO_SYSTEM_USAGE_SAFETY,
+    AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS,
+    AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT,
+};
+
+aaudio_content_type_t kAAudioContentTypes[] = {
+    AAUDIO_CONTENT_TYPE_SPEECH,
+    AAUDIO_CONTENT_TYPE_MUSIC,
+    AAUDIO_CONTENT_TYPE_MOVIE,
+    AAUDIO_CONTENT_TYPE_SONIFICATION,
+};
+
+aaudio_input_preset_t kAAudioInputPresets[] = {
+    AAUDIO_INPUT_PRESET_GENERIC,           AAUDIO_INPUT_PRESET_CAMCORDER,
+    AAUDIO_INPUT_PRESET_VOICE_RECOGNITION, AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION,
+    AAUDIO_INPUT_PRESET_UNPROCESSED,       AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE,
+};
+
+const size_t kNumAAudioFormats = std::size(kAAudioFormats);
+const size_t kNumAAudioUsages = std::size(kAAudioUsages);
+const size_t kNumAAudioContentTypes = std::size(kAAudioContentTypes);
+const size_t kNumAAudioInputPresets = std::size(kAAudioInputPresets);
+
+class FuzzAAudioClient : public virtual RefBase, public AAudioServiceInterface {
+   public:
+    FuzzAAudioClient(sp<AAudioService> service);
+
+    virtual ~FuzzAAudioClient();
+
+    AAudioServiceInterface *getAAudioService();
+
+    void dropAAudioService();
+
+    void registerClient(const sp<IAAudioClient> &client UNUSED_PARAM) override {}
+
+    aaudio_handle_t openStream(const AAudioStreamRequest &request,
+                               AAudioStreamConfiguration &configurationOutput) override;
+
+    aaudio_result_t closeStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
+                                         AudioEndpointParcelable &parcelable) override;
+
+    aaudio_result_t startStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t pauseStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t stopStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t flushStream(aaudio_handle_t streamHandle) override;
+
+    aaudio_result_t registerAudioThread(aaudio_handle_t streamHandle, pid_t clientThreadId,
+                                        int64_t periodNanoseconds) override;
+
+    aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
+                                          pid_t clientThreadId) override;
+
+    aaudio_result_t startClient(aaudio_handle_t streamHandle UNUSED_PARAM,
+                                const AudioClient &client UNUSED_PARAM,
+                                const audio_attributes_t *attr UNUSED_PARAM,
+                                audio_port_handle_t *clientHandle UNUSED_PARAM) override {
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+
+    aaudio_result_t stopClient(aaudio_handle_t streamHandle UNUSED_PARAM,
+                               audio_port_handle_t clientHandle UNUSED_PARAM) override {
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+
+    void onStreamChange(aaudio_handle_t handle, int32_t opcode, int32_t value) {}
+
+    int getDeathCount() { return mDeathCount; }
+
+    void incDeathCount() { ++mDeathCount; }
+
+    class AAudioClient : public IBinder::DeathRecipient, public BnAAudioClient {
+       public:
+        AAudioClient(wp<FuzzAAudioClient> fuzzAAudioClient) : mBinderClient(fuzzAAudioClient) {}
+
+        virtual void binderDied(const wp<IBinder> &who UNUSED_PARAM) {
+            sp<FuzzAAudioClient> client = mBinderClient.promote();
+            if (client.get()) {
+                client->dropAAudioService();
+                client->incDeathCount();
+            }
+        }
+
+        android::binder::Status onStreamChange(int32_t handle, int32_t opcode, int32_t value) {
+            static_assert(std::is_same_v<aaudio_handle_t, int32_t>);
+            android::sp<FuzzAAudioClient> client = mBinderClient.promote();
+            if (client.get() != nullptr) {
+                client->onStreamChange(handle, opcode, value);
+            }
+            return android::binder::Status::ok();
+        }
+
+       private:
+        wp<FuzzAAudioClient> mBinderClient;
+    };
+
+   private:
+    sp<AAudioService> mAAudioService;
+    sp<AAudioClient> mAAudioClient;
+    AAudioServiceInterface *mAAudioServiceInterface;
+    int mDeathCount;
+};
+
+FuzzAAudioClient::FuzzAAudioClient(sp<AAudioService> service) : AAudioServiceInterface() {
+    mAAudioService = service;
+    mAAudioServiceInterface = &service->asAAudioServiceInterface();
+    mAAudioClient = new AAudioClient(this);
+    mDeathCount = 0;
+    if (mAAudioClient.get() && mAAudioService.get()) {
+        mAAudioService->linkToDeath(mAAudioClient);
+        mAAudioService->registerClient(mAAudioClient);
+    }
+}
+
+FuzzAAudioClient::~FuzzAAudioClient() { dropAAudioService(); }
+
+AAudioServiceInterface *FuzzAAudioClient::getAAudioService() {
+    if (!mAAudioServiceInterface && mAAudioService.get()) {
+        mAAudioServiceInterface = &mAAudioService->asAAudioServiceInterface();
+    }
+    return mAAudioServiceInterface;
+}
+
+void FuzzAAudioClient::dropAAudioService() {
+    mAAudioService.clear();
+}
+
+aaudio_handle_t FuzzAAudioClient::openStream(const AAudioStreamRequest &request,
+                                             AAudioStreamConfiguration &configurationOutput) {
+    aaudio_handle_t stream;
+    for (int i = 0; i < 2; ++i) {
+        AAudioServiceInterface *service = getAAudioService();
+        if (!service) {
+            return AAUDIO_ERROR_NO_SERVICE;
+        }
+
+        stream = service->openStream(request, configurationOutput);
+
+        if (stream == AAUDIO_ERROR_NO_SERVICE) {
+            dropAAudioService();
+        } else {
+            break;
+        }
+    }
+    return stream;
+}
+
+aaudio_result_t FuzzAAudioClient::closeStream(aaudio_handle_t streamHandle) {
+    AAudioServiceInterface *service = getAAudioService();
+    if (!service) {
+        return AAUDIO_ERROR_NO_SERVICE;
+    }
+    return service->closeStream(streamHandle);
+}
+
+aaudio_result_t FuzzAAudioClient::getStreamDescription(aaudio_handle_t streamHandle,
+                                                       AudioEndpointParcelable &parcelable) {
+    AAudioServiceInterface *service = getAAudioService();
+    if (!service) {
+        return AAUDIO_ERROR_NO_SERVICE;
+    }
+    return service->getStreamDescription(streamHandle, parcelable);
+}
+
+aaudio_result_t FuzzAAudioClient::startStream(aaudio_handle_t streamHandle) {
+    AAudioServiceInterface *service = getAAudioService();
+    if (!service) {
+        return AAUDIO_ERROR_NO_SERVICE;
+    }
+    return service->startStream(streamHandle);
+}
+
+aaudio_result_t FuzzAAudioClient::pauseStream(aaudio_handle_t streamHandle) {
+    AAudioServiceInterface *service = getAAudioService();
+    if (!service) {
+        return AAUDIO_ERROR_NO_SERVICE;
+    }
+    return service->pauseStream(streamHandle);
+}
+
+aaudio_result_t FuzzAAudioClient::stopStream(aaudio_handle_t streamHandle) {
+    AAudioServiceInterface *service = getAAudioService();
+    if (!service) {
+        return AAUDIO_ERROR_NO_SERVICE;
+    }
+    return service->stopStream(streamHandle);
+}
+
+aaudio_result_t FuzzAAudioClient::flushStream(aaudio_handle_t streamHandle) {
+    AAudioServiceInterface *service = getAAudioService();
+    if (!service) {
+        return AAUDIO_ERROR_NO_SERVICE;
+    }
+    return service->flushStream(streamHandle);
+}
+
+aaudio_result_t FuzzAAudioClient::registerAudioThread(aaudio_handle_t streamHandle,
+                                                      pid_t clientThreadId,
+                                                      int64_t periodNanoseconds) {
+    AAudioServiceInterface *service = getAAudioService();
+    if (!service) {
+        return AAUDIO_ERROR_NO_SERVICE;
+    }
+    return service->registerAudioThread(streamHandle, clientThreadId, periodNanoseconds);
+}
+
+aaudio_result_t FuzzAAudioClient::unregisterAudioThread(aaudio_handle_t streamHandle,
+                                                        pid_t clientThreadId) {
+    AAudioServiceInterface *service = getAAudioService();
+    if (!service) {
+        return AAUDIO_ERROR_NO_SERVICE;
+    }
+    return service->unregisterAudioThread(streamHandle, clientThreadId);
+}
+
+class OboeserviceFuzzer {
+   public:
+    OboeserviceFuzzer();
+    ~OboeserviceFuzzer() = default;
+    void process(const uint8_t *data, size_t size);
+
+   private:
+    sp<FuzzAAudioClient> mClient;
+};
+
+OboeserviceFuzzer::OboeserviceFuzzer() {
+    sp<AAudioService> service = new AAudioService();
+    mClient = new FuzzAAudioClient(service);
+}
+
+void OboeserviceFuzzer::process(const uint8_t *data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    AAudioStreamRequest request;
+    AAudioStreamConfiguration configurationOutput;
+
+    // Initialize stream request
+    request.getConfiguration().setFormat((audio_format_t)(
+        fdp.ConsumeBool()
+            ? fdp.ConsumeIntegral<int32_t>()
+            : kAAudioFormats[fdp.ConsumeIntegralInRange<int32_t>(0, kNumAAudioFormats - 1)]));
+
+    // TODO b/182392769: use attribution source util
+    android::content::AttributionSourceState attributionSource;
+    attributionSource.uid = getuid();
+    attributionSource.pid = getpid();
+    attributionSource.token = sp<BBinder>::make();
+    request.setAttributionSource(attributionSource);
+    request.setInService(fdp.ConsumeBool());
+
+    request.getConfiguration().setDeviceId(fdp.ConsumeIntegral<int32_t>());
+    request.getConfiguration().setSampleRate(fdp.ConsumeIntegral<int32_t>());
+    request.getConfiguration().setSamplesPerFrame(fdp.ConsumeIntegral<int32_t>());
+    request.getConfiguration().setDirection(
+        fdp.ConsumeBool() ? fdp.ConsumeIntegral<int32_t>()
+                          : (fdp.ConsumeBool() ? AAUDIO_DIRECTION_OUTPUT : AAUDIO_DIRECTION_INPUT));
+    request.getConfiguration().setSharingMode(
+        fdp.ConsumeBool()
+            ? fdp.ConsumeIntegral<int32_t>()
+            : (fdp.ConsumeBool() ? AAUDIO_SHARING_MODE_EXCLUSIVE : AAUDIO_SHARING_MODE_SHARED));
+
+    request.getConfiguration().setUsage(
+        fdp.ConsumeBool()
+            ? fdp.ConsumeIntegral<int32_t>()
+            : kAAudioUsages[fdp.ConsumeIntegralInRange<int32_t>(0, kNumAAudioUsages - 1)]);
+    request.getConfiguration().setContentType(
+        fdp.ConsumeBool() ? fdp.ConsumeIntegral<int32_t>()
+                          : kAAudioContentTypes[fdp.ConsumeIntegralInRange<int32_t>(
+                                0, kNumAAudioContentTypes - 1)]);
+    request.getConfiguration().setInputPreset(
+        fdp.ConsumeBool() ? fdp.ConsumeIntegral<int32_t>()
+                          : kAAudioInputPresets[fdp.ConsumeIntegralInRange<int32_t>(
+                                0, kNumAAudioInputPresets - 1)]);
+    request.getConfiguration().setPrivacySensitive(fdp.ConsumeBool());
+
+    request.getConfiguration().setBufferCapacity(fdp.ConsumeIntegral<int32_t>());
+
+    aaudio_handle_t stream = mClient->openStream(request, configurationOutput);
+    if (stream < 0) {
+        // invalid request, stream not opened.
+        return;
+    }
+    while (fdp.remaining_bytes()) {
+        AudioEndpointParcelable audioEndpointParcelable;
+        int action = fdp.ConsumeIntegralInRange<int32_t>(0, 4);
+        switch (action) {
+            case 0:
+                mClient->getStreamDescription(stream, audioEndpointParcelable);
+                break;
+            case 1:
+                mClient->startStream(stream);
+                break;
+            case 2:
+                mClient->pauseStream(stream);
+                break;
+            case 3:
+                mClient->stopStream(stream);
+                break;
+            case 4:
+                mClient->flushStream(stream);
+                break;
+        }
+    }
+    mClient->closeStream(stream);
+    assert(mClient->getDeathCount() == 0);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    if (size < 1) {
+        return 0;
+    }
+    OboeserviceFuzzer oboeserviceFuzzer;
+    oboeserviceFuzzer.process(data, size);
+    return 0;
+}
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
new file mode 100644
index 0000000..cd11c88
--- /dev/null
+++ b/services/tuner/Android.bp
@@ -0,0 +1,145 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+filegroup {
+    name: "tv_tuner_aidl",
+    srcs: [
+        "aidl/android/media/tv/tuner/*.aidl",
+    ],
+    path: "aidl",
+}
+
+filegroup {
+    name: "tv_tuner_frontend_info",
+    srcs: [
+        "aidl/android/media/tv/tuner/TunerFrontendInfo.aidl",
+        "aidl/android/media/tv/tuner/TunerFrontend*Capabilities.aidl",
+    ],
+    path: "aidl",
+}
+
+aidl_interface {
+    name: "tv_tuner_aidl_interface",
+    unstable: true,
+    local_include_dir: "aidl",
+    srcs: [
+        ":tv_tuner_aidl",
+    ],
+    imports: [
+        "android.hardware.common-V2",
+        "android.hardware.common.fmq-V1",
+    ],
+
+    backend: {
+        java: {
+            enabled: false,
+        },
+        cpp: {
+            enabled: false,
+        },
+        ndk: {
+            enabled: true,
+        },
+    },
+}
+
+aidl_interface {
+    name: "tv_tuner_frontend_info_aidl_interface",
+    unstable: true,
+    local_include_dir: "aidl",
+    srcs: [
+        ":tv_tuner_frontend_info",
+    ],
+
+    backend: {
+        java: {
+            enabled: true,
+        },
+        cpp: {
+            enabled: true,
+        },
+        ndk: {
+            enabled: true,
+        },
+    },
+}
+
+cc_library {
+    name: "libtunerservice",
+
+    srcs: [
+        "Tuner*.cpp",
+    ],
+
+    shared_libs: [
+        "android.hardware.tv.tuner@1.0",
+        "android.hardware.tv.tuner@1.1",
+        "libbase",
+        "libbinder",
+        "libbinder_ndk",
+        "libcutils",
+        "libfmq",
+        "libhidlbase",
+        "liblog",
+        "libmedia",
+        "libutils",
+        "tv_tuner_aidl_interface-ndk_platform",
+        "tv_tuner_resource_manager_aidl_interface-ndk_platform",
+        "tv_tuner_resource_manager_aidl_interface-cpp",
+    ],
+
+    static_libs: [
+        "android.hardware.common.fmq-V1-ndk_platform",
+        "libaidlcommonsupport",
+    ],
+
+    include_dirs: [
+        "frameworks/av/include"
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    export_include_dirs: ["."],
+}
+
+
+cc_binary {
+    name: "mediatuner",
+
+    srcs: [
+        "main_tunerservice.cpp",
+    ],
+
+    shared_libs: [
+        "android.hardware.tv.tuner@1.0",
+        "android.hardware.tv.tuner@1.1",
+        "libbase",
+        "libbinder",
+        "libfmq",
+        "liblog",
+        "libtunerservice",
+        "libutils",
+        "tv_tuner_resource_manager_aidl_interface-ndk_platform",
+        "tv_tuner_resource_manager_aidl_interface-cpp",
+    ],
+
+    static_libs: [
+        "tv_tuner_aidl_interface-ndk_platform",
+    ],
+
+    init_rc: ["mediatuner.rc"],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+}
diff --git a/services/tuner/OWNERS b/services/tuner/OWNERS
new file mode 100644
index 0000000..0ceb8e8
--- /dev/null
+++ b/services/tuner/OWNERS
@@ -0,0 +1,2 @@
+nchalko@google.com
+quxiangfang@google.com
diff --git a/services/tuner/TunerDemux.cpp b/services/tuner/TunerDemux.cpp
new file mode 100644
index 0000000..1122368
--- /dev/null
+++ b/services/tuner/TunerDemux.cpp
@@ -0,0 +1,232 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TunerDemux"
+
+#include "TunerDvr.h"
+#include "TunerDemux.h"
+#include "TunerTimeFilter.h"
+
+using ::android::hardware::tv::tuner::V1_0::DemuxAlpFilterType;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterMainType;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterType;
+using ::android::hardware::tv::tuner::V1_0::DemuxIpFilterType;
+using ::android::hardware::tv::tuner::V1_0::DemuxMmtpFilterType;
+using ::android::hardware::tv::tuner::V1_0::DemuxTlvFilterType;
+using ::android::hardware::tv::tuner::V1_0::DemuxTsFilterType;
+using ::android::hardware::tv::tuner::V1_0::DvrType;
+using ::android::hardware::tv::tuner::V1_0::Result;
+
+namespace android {
+
+TunerDemux::TunerDemux(sp<IDemux> demux, int id) {
+    mDemux = demux;
+    mDemuxId = id;
+}
+
+TunerDemux::~TunerDemux() {
+    mDemux = nullptr;
+}
+
+Status TunerDemux::setFrontendDataSource(const std::shared_ptr<ITunerFrontend>& frontend) {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    int frontendId;
+    frontend->getFrontendId(&frontendId);
+    Result res = mDemux->setFrontendDataSource(frontendId);
+    if (res != Result::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerDemux::openFilter(
+        int type, int subType, int bufferSize, const std::shared_ptr<ITunerFilterCallback>& cb,
+        std::shared_ptr<ITunerFilter>* _aidl_return) {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized.");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    DemuxFilterMainType mainType = static_cast<DemuxFilterMainType>(type);
+    DemuxFilterType filterType {
+        .mainType = mainType,
+    };
+
+    switch(mainType) {
+        case DemuxFilterMainType::TS:
+            filterType.subType.tsFilterType(static_cast<DemuxTsFilterType>(subType));
+            break;
+        case DemuxFilterMainType::MMTP:
+            filterType.subType.mmtpFilterType(static_cast<DemuxMmtpFilterType>(subType));
+            break;
+        case DemuxFilterMainType::IP:
+            filterType.subType.ipFilterType(static_cast<DemuxIpFilterType>(subType));
+            break;
+        case DemuxFilterMainType::TLV:
+            filterType.subType.tlvFilterType(static_cast<DemuxTlvFilterType>(subType));
+            break;
+        case DemuxFilterMainType::ALP:
+            filterType.subType.alpFilterType(static_cast<DemuxAlpFilterType>(subType));
+            break;
+    }
+    Result status;
+    sp<IFilter> filterSp;
+    sp<IFilterCallback> cbSp = new TunerFilter::FilterCallback(cb);
+    mDemux->openFilter(filterType, bufferSize, cbSp,
+            [&](Result r, const sp<IFilter>& filter) {
+                filterSp = filter;
+                status = r;
+            });
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+
+    *_aidl_return = ::ndk::SharedRefBase::make<TunerFilter>(filterSp, type, subType);
+    return Status::ok();
+}
+
+Status TunerDemux::openTimeFilter(shared_ptr<ITunerTimeFilter>* _aidl_return) {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized.");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status;
+    sp<ITimeFilter> filterSp;
+    mDemux->openTimeFilter([&](Result r, const sp<ITimeFilter>& filter) {
+        filterSp = filter;
+        status = r;
+    });
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+
+    *_aidl_return = ::ndk::SharedRefBase::make<TunerTimeFilter>(filterSp);
+    return Status::ok();
+}
+
+Status TunerDemux::getAvSyncHwId(const shared_ptr<ITunerFilter>& tunerFilter, int* _aidl_return) {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized.");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    uint32_t avSyncHwId;
+    Result res;
+    sp<IFilter> halFilter = static_cast<TunerFilter*>(tunerFilter.get())->getHalFilter();
+    mDemux->getAvSyncHwId(halFilter,
+            [&](Result r, uint32_t id) {
+                res = r;
+                avSyncHwId = id;
+            });
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    *_aidl_return = (int)avSyncHwId;
+    return Status::ok();
+}
+
+Status TunerDemux::getAvSyncTime(int avSyncHwId, int64_t* _aidl_return) {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized.");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    uint64_t time;
+    Result res;
+    mDemux->getAvSyncTime(static_cast<uint32_t>(avSyncHwId),
+            [&](Result r, uint64_t ts) {
+                res = r;
+                time = ts;
+            });
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    *_aidl_return = (int64_t)time;
+    return Status::ok();
+}
+
+Status TunerDemux::openDvr(int dvrType, int bufferSize, const shared_ptr<ITunerDvrCallback>& cb,
+        shared_ptr<ITunerDvr>* _aidl_return) {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized.");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res;
+    sp<IDvrCallback> callback = new TunerDvr::DvrCallback(cb);
+    sp<IDvr> hidlDvr;
+    mDemux->openDvr(static_cast<DvrType>(dvrType), bufferSize, callback,
+            [&](Result r, const sp<IDvr>& dvr) {
+                hidlDvr = dvr;
+                res = r;
+            });
+    if (res != Result::SUCCESS) {
+        *_aidl_return = NULL;
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    *_aidl_return = ::ndk::SharedRefBase::make<TunerDvr>(hidlDvr, dvrType);
+    return Status::ok();
+}
+
+Status TunerDemux::connectCiCam(int ciCamId) {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized.");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res = mDemux->connectCiCam(static_cast<uint32_t>(ciCamId));
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerDemux::disconnectCiCam() {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized.");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res = mDemux->disconnectCiCam();
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerDemux::close() {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized.");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res = mDemux->close();
+    mDemux = NULL;
+
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+}  // namespace android
diff --git a/services/tuner/TunerDemux.h b/services/tuner/TunerDemux.h
new file mode 100644
index 0000000..2a9836b
--- /dev/null
+++ b/services/tuner/TunerDemux.h
@@ -0,0 +1,68 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERDEMUX_H
+#define ANDROID_MEDIA_TUNERDEMUX_H
+
+#include <aidl/android/media/tv/tuner/BnTunerDemux.h>
+#include <android/hardware/tv/tuner/1.0/ITuner.h>
+
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::tv::tuner::BnTunerDemux;
+using ::aidl::android::media::tv::tuner::ITunerDvr;
+using ::aidl::android::media::tv::tuner::ITunerDvrCallback;
+using ::aidl::android::media::tv::tuner::ITunerFilter;
+using ::aidl::android::media::tv::tuner::ITunerFilterCallback;
+using ::aidl::android::media::tv::tuner::ITunerFrontend;
+using ::aidl::android::media::tv::tuner::ITunerTimeFilter;
+using ::android::hardware::tv::tuner::V1_0::IDemux;
+using ::android::hardware::tv::tuner::V1_0::IDvr;
+using ::android::hardware::tv::tuner::V1_0::IDvrCallback;
+using ::android::hardware::tv::tuner::V1_0::ITimeFilter;
+
+using namespace std;
+
+namespace android {
+
+class TunerDemux : public BnTunerDemux {
+
+public:
+    TunerDemux(sp<IDemux> demux, int demuxId);
+    virtual ~TunerDemux();
+    Status setFrontendDataSource(const shared_ptr<ITunerFrontend>& frontend) override;
+    Status openFilter(
+        int mainType, int subtype, int bufferSize, const shared_ptr<ITunerFilterCallback>& cb,
+        shared_ptr<ITunerFilter>* _aidl_return) override;
+    Status openTimeFilter(shared_ptr<ITunerTimeFilter>* _aidl_return) override;
+    Status getAvSyncHwId(const shared_ptr<ITunerFilter>& tunerFilter, int* _aidl_return) override;
+    Status getAvSyncTime(int avSyncHwId, int64_t* _aidl_return) override;
+    Status openDvr(
+        int dvbType, int bufferSize, const shared_ptr<ITunerDvrCallback>& cb,
+        shared_ptr<ITunerDvr>* _aidl_return) override;
+    Status connectCiCam(int ciCamId) override;
+    Status disconnectCiCam() override;
+    Status close() override;
+
+    int getId() { return mDemuxId; }
+
+private:
+    sp<IDemux> mDemux;
+    int mDemuxId;
+};
+
+} // namespace android
+
+#endif // ANDROID_MEDIA_TUNERDEMUX_H
diff --git a/services/tuner/TunerDescrambler.cpp b/services/tuner/TunerDescrambler.cpp
new file mode 100644
index 0000000..b7ae167
--- /dev/null
+++ b/services/tuner/TunerDescrambler.cpp
@@ -0,0 +1,124 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TunerDescrambler"
+
+#include "TunerFilter.h"
+#include "TunerDemux.h"
+#include "TunerDescrambler.h"
+
+using ::android::hardware::tv::tuner::V1_0::Result;
+
+using namespace std;
+
+namespace android {
+
+TunerDescrambler::TunerDescrambler(sp<IDescrambler> descrambler) {
+    mDescrambler = descrambler;
+}
+
+TunerDescrambler::~TunerDescrambler() {
+    mDescrambler = nullptr;
+}
+
+Status TunerDescrambler::setDemuxSource(const std::shared_ptr<ITunerDemux>& demux) {
+    if (mDescrambler == nullptr) {
+        ALOGE("IDescrambler is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res = mDescrambler->setDemuxSource(static_cast<TunerDemux*>(demux.get())->getId());
+    if (res != Result::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerDescrambler::setKeyToken(const vector<uint8_t>& keyToken) {
+    if (mDescrambler == nullptr) {
+        ALOGE("IDescrambler is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res = mDescrambler->setKeyToken(keyToken);
+    if (res != Result::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerDescrambler::addPid(const TunerDemuxPid& pid,
+        const shared_ptr<ITunerFilter>& optionalSourceFilter) {
+    if (mDescrambler == nullptr) {
+        ALOGE("IDescrambler is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    sp<IFilter> halFilter = (optionalSourceFilter == NULL)
+            ? NULL : static_cast<TunerFilter*>(optionalSourceFilter.get())->getHalFilter();
+    Result res = mDescrambler->addPid(getHidlDemuxPid(pid), halFilter);
+    if (res != Result::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerDescrambler::removePid(const TunerDemuxPid& pid,
+        const shared_ptr<ITunerFilter>& optionalSourceFilter) {
+    if (mDescrambler == nullptr) {
+        ALOGE("IDescrambler is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    sp<IFilter> halFilter = (optionalSourceFilter == NULL)
+            ? NULL : static_cast<TunerFilter*>(optionalSourceFilter.get())->getHalFilter();
+    Result res = mDescrambler->removePid(getHidlDemuxPid(pid), halFilter);
+    if (res != Result::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerDescrambler::close() {
+    if (mDescrambler == nullptr) {
+        ALOGE("IDescrambler is not initialized.");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res = mDescrambler->close();
+    mDescrambler = NULL;
+
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+DemuxPid TunerDescrambler::getHidlDemuxPid(const TunerDemuxPid& pid) {
+    DemuxPid hidlPid;
+    switch (pid.getTag()) {
+        case TunerDemuxPid::tPid: {
+            hidlPid.tPid((uint16_t)pid.get<TunerDemuxPid::tPid>());
+            break;
+        }
+        case TunerDemuxPid::mmtpPid: {
+            hidlPid.mmtpPid((uint16_t)pid.get<TunerDemuxPid::mmtpPid>());
+            break;
+        }
+    }
+    return hidlPid;
+}
+}  // namespace android
diff --git a/services/tuner/TunerDescrambler.h b/services/tuner/TunerDescrambler.h
new file mode 100644
index 0000000..1970fb7
--- /dev/null
+++ b/services/tuner/TunerDescrambler.h
@@ -0,0 +1,54 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERDESCRAMBLER_H
+#define ANDROID_MEDIA_TUNERDESCRAMBLER_H
+
+#include <aidl/android/media/tv/tuner/BnTunerDescrambler.h>
+#include <android/hardware/tv/tuner/1.0/ITuner.h>
+
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::tv::tuner::BnTunerDescrambler;
+using ::aidl::android::media::tv::tuner::ITunerDemux;
+using ::aidl::android::media::tv::tuner::ITunerFilter;
+using ::aidl::android::media::tv::tuner::TunerDemuxPid;
+using ::android::hardware::tv::tuner::V1_0::DemuxPid;
+using ::android::hardware::tv::tuner::V1_0::IDescrambler;
+
+namespace android {
+
+class TunerDescrambler : public BnTunerDescrambler {
+
+public:
+    TunerDescrambler(sp<IDescrambler> descrambler);
+    virtual ~TunerDescrambler();
+    Status setDemuxSource(const shared_ptr<ITunerDemux>& demux) override;
+    Status setKeyToken(const vector<uint8_t>& keyToken) override;
+    Status addPid(const TunerDemuxPid& pid,
+            const shared_ptr<ITunerFilter>& optionalSourceFilter) override;
+    Status removePid(const TunerDemuxPid& pid,
+            const shared_ptr<ITunerFilter>& optionalSourceFilter) override;
+    Status close() override;
+
+private:
+    DemuxPid getHidlDemuxPid(const TunerDemuxPid& pid);
+
+    sp<IDescrambler> mDescrambler;
+};
+
+} // namespace android
+
+#endif // ANDROID_MEDIA_TUNERDESCRAMBLER_H
diff --git a/services/tuner/TunerDvr.cpp b/services/tuner/TunerDvr.cpp
new file mode 100644
index 0000000..db4e07b
--- /dev/null
+++ b/services/tuner/TunerDvr.cpp
@@ -0,0 +1,209 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TunerDvr"
+
+#include <fmq/ConvertMQDescriptors.h>
+#include "TunerDvr.h"
+#include "TunerFilter.h"
+
+using ::android::hardware::tv::tuner::V1_0::DataFormat;
+using ::android::hardware::tv::tuner::V1_0::Result;
+
+namespace android {
+
+TunerDvr::TunerDvr(sp<IDvr> dvr, int type) {
+    mDvr = dvr;
+    mType = static_cast<DvrType>(type);
+}
+
+TunerDvr::~TunerDvr() {
+    mDvr = NULL;
+}
+
+Status TunerDvr::getQueueDesc(AidlMQDesc* _aidl_return) {
+    if (mDvr == NULL) {
+        ALOGE("IDvr is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    MQDesc dvrMQDesc;
+    Result res;
+    mDvr->getQueueDesc([&](Result r, const MQDesc& desc) {
+        dvrMQDesc = desc;
+        res = r;
+    });
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    AidlMQDesc aidlMQDesc;
+    unsafeHidlToAidlMQDescriptor<uint8_t, int8_t, SynchronizedReadWrite>(
+                dvrMQDesc,  &aidlMQDesc);
+    *_aidl_return = move(aidlMQDesc);
+    return Status::ok();
+}
+
+Status TunerDvr::configure(const TunerDvrSettings& settings) {
+    if (mDvr == NULL) {
+        ALOGE("IDvr is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res = mDvr->configure(getHidlDvrSettingsFromAidl(settings));
+    if (res != Result::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerDvr::attachFilter(const shared_ptr<ITunerFilter>& filter) {
+    if (mDvr == NULL) {
+        ALOGE("IDvr is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    ITunerFilter* tunerFilter = filter.get();
+    sp<IFilter> hidlFilter = static_cast<TunerFilter*>(tunerFilter)->getHalFilter();
+    if (hidlFilter == NULL) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    }
+
+    Result res = mDvr->attachFilter(hidlFilter);
+    if (res != Result::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerDvr::detachFilter(const shared_ptr<ITunerFilter>& filter) {
+    if (mDvr == NULL) {
+        ALOGE("IDvr is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    ITunerFilter* tunerFilter = filter.get();
+    sp<IFilter> hidlFilter = static_cast<TunerFilter*>(tunerFilter)->getHalFilter();
+    if (hidlFilter == NULL) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    }
+
+    Result res = mDvr->detachFilter(hidlFilter);
+    if (res != Result::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerDvr::start() {
+    if (mDvr == NULL) {
+        ALOGE("IDvr is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res = mDvr->start();
+    if (res != Result::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerDvr::stop() {
+    if (mDvr == NULL) {
+        ALOGE("IDvr is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res = mDvr->stop();
+    if (res != Result::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerDvr::flush() {
+    if (mDvr == NULL) {
+        ALOGE("IDvr is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res = mDvr->flush();
+    if (res != Result::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerDvr::close() {
+    if (mDvr == NULL) {
+        ALOGE("IDvr is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res = mDvr->close();
+    mDvr = NULL;
+
+    if (res != Result::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+DvrSettings TunerDvr::getHidlDvrSettingsFromAidl(TunerDvrSettings settings) {
+    DvrSettings s;
+    switch (mType) {
+        case DvrType::PLAYBACK: {
+            s.playback({
+                .statusMask = static_cast<uint8_t>(settings.statusMask),
+                .lowThreshold = static_cast<uint32_t>(settings.lowThreshold),
+                .highThreshold = static_cast<uint32_t>(settings.highThreshold),
+                .dataFormat = static_cast<DataFormat>(settings.dataFormat),
+                .packetSize = static_cast<uint8_t>(settings.packetSize),
+            });
+            return s;
+        }
+        case DvrType::RECORD: {
+            s.record({
+                .statusMask = static_cast<uint8_t>(settings.statusMask),
+                .lowThreshold = static_cast<uint32_t>(settings.lowThreshold),
+                .highThreshold = static_cast<uint32_t>(settings.highThreshold),
+                .dataFormat = static_cast<DataFormat>(settings.dataFormat),
+                .packetSize = static_cast<uint8_t>(settings.packetSize),
+            });
+            return s;
+        }
+        default:
+            break;
+    }
+    return s;
+}
+
+/////////////// IDvrCallback ///////////////////////
+
+Return<void> TunerDvr::DvrCallback::onRecordStatus(const RecordStatus status) {
+    if (mTunerDvrCallback != NULL) {
+        mTunerDvrCallback->onRecordStatus(static_cast<int>(status));
+    }
+    return Void();
+}
+
+Return<void> TunerDvr::DvrCallback::onPlaybackStatus(const PlaybackStatus status) {
+    if (mTunerDvrCallback != NULL) {
+        mTunerDvrCallback->onPlaybackStatus(static_cast<int>(status));
+    }
+    return Void();
+}
+}  // namespace android
diff --git a/services/tuner/TunerDvr.h b/services/tuner/TunerDvr.h
new file mode 100644
index 0000000..a508e99
--- /dev/null
+++ b/services/tuner/TunerDvr.h
@@ -0,0 +1,97 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERDVR_H
+#define ANDROID_MEDIA_TUNERDVR_H
+
+#include <aidl/android/media/tv/tuner/BnTunerDvr.h>
+#include <aidl/android/media/tv/tuner/ITunerDvrCallback.h>
+#include <android/hardware/tv/tuner/1.0/ITuner.h>
+#include <fmq/MessageQueue.h>
+
+#include <TunerFilter.h>
+
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::hardware::common::fmq::GrantorDescriptor;
+using ::aidl::android::hardware::common::fmq::MQDescriptor;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::aidl::android::media::tv::tuner::BnTunerDvr;
+using ::aidl::android::media::tv::tuner::ITunerDvrCallback;
+using ::aidl::android::media::tv::tuner::ITunerFilter;
+using ::aidl::android::media::tv::tuner::TunerDvrSettings;
+
+using ::android::hardware::MQDescriptorSync;
+using ::android::hardware::MessageQueue;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+
+using ::android::hardware::tv::tuner::V1_0::DvrSettings;
+using ::android::hardware::tv::tuner::V1_0::DvrType;
+using ::android::hardware::tv::tuner::V1_0::IDvr;
+using ::android::hardware::tv::tuner::V1_0::IDvrCallback;
+using ::android::hardware::tv::tuner::V1_0::PlaybackStatus;
+using ::android::hardware::tv::tuner::V1_0::RecordStatus;
+
+using namespace std;
+
+namespace android {
+
+using MQDesc = MQDescriptorSync<uint8_t>;
+using AidlMQDesc = MQDescriptor<int8_t, SynchronizedReadWrite>;
+
+class TunerDvr : public BnTunerDvr {
+
+public:
+    TunerDvr(sp<IDvr> dvr, int type);
+    ~TunerDvr();
+
+    Status getQueueDesc(AidlMQDesc* _aidl_return) override;
+
+    Status configure(const TunerDvrSettings& settings) override;
+
+    Status attachFilter(const shared_ptr<ITunerFilter>& filter) override;
+
+    Status detachFilter(const shared_ptr<ITunerFilter>& filter) override;
+
+    Status start() override;
+
+    Status stop() override;
+
+    Status flush() override;
+
+    Status close() override;
+
+    struct DvrCallback : public IDvrCallback {
+        DvrCallback(const shared_ptr<ITunerDvrCallback> tunerDvrCallback)
+                : mTunerDvrCallback(tunerDvrCallback) {};
+
+        virtual Return<void> onRecordStatus(const RecordStatus status);
+        virtual Return<void> onPlaybackStatus(const PlaybackStatus status);
+
+        private:
+            shared_ptr<ITunerDvrCallback> mTunerDvrCallback;
+    };
+
+private:
+    DvrSettings getHidlDvrSettingsFromAidl(TunerDvrSettings settings);
+
+    sp<IDvr> mDvr;
+    DvrType mType;
+};
+
+} // namespace android
+
+#endif // ANDROID_MEDIA_TUNERDVR_H
diff --git a/services/tuner/TunerFilter.cpp b/services/tuner/TunerFilter.cpp
new file mode 100644
index 0000000..039fd31
--- /dev/null
+++ b/services/tuner/TunerFilter.cpp
@@ -0,0 +1,910 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TunerFilter"
+
+#include "TunerFilter.h"
+
+using ::aidl::android::media::tv::tuner::TunerFilterSectionCondition;
+
+using ::android::hardware::hidl_handle;
+using ::android::hardware::tv::tuner::V1_0::DemuxAlpLengthType;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterMainType;
+using ::android::hardware::tv::tuner::V1_0::DemuxIpAddress;
+using ::android::hardware::tv::tuner::V1_0::DemuxMmtpFilterType;
+using ::android::hardware::tv::tuner::V1_0::DemuxMmtpPid;
+using ::android::hardware::tv::tuner::V1_0::DemuxRecordScIndexType;
+using ::android::hardware::tv::tuner::V1_0::DemuxStreamId;
+using ::android::hardware::tv::tuner::V1_0::DemuxTsFilterType;
+using ::android::hardware::tv::tuner::V1_0::Result;
+using ::android::hardware::tv::tuner::V1_1::AudioStreamType;
+using ::android::hardware::tv::tuner::V1_1::Constant;
+using ::android::hardware::tv::tuner::V1_1::VideoStreamType;
+
+namespace android {
+
+using namespace std;
+
+TunerFilter::TunerFilter(
+        sp<IFilter> filter, int mainType, int subType) {
+    mFilter = filter;
+    mFilter_1_1 = ::android::hardware::tv::tuner::V1_1::IFilter::castFrom(filter);
+    mMainType = mainType;
+    mSubType = subType;
+}
+
+TunerFilter::~TunerFilter() {
+    mFilter = nullptr;
+    mFilter_1_1 = nullptr;
+}
+
+Status TunerFilter::getQueueDesc(AidlMQDesc* _aidl_return) {
+    if (mFilter == NULL) {
+        ALOGE("IFilter is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    MQDesc filterMQDesc;
+    Result res;
+    mFilter->getQueueDesc([&](Result r, const MQDesc& desc) {
+        filterMQDesc = desc;
+        res = r;
+    });
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    AidlMQDesc aidlMQDesc;
+    unsafeHidlToAidlMQDescriptor<uint8_t, int8_t, SynchronizedReadWrite>(
+                filterMQDesc,  &aidlMQDesc);
+    *_aidl_return = move(aidlMQDesc);
+    return Status::ok();
+}
+
+Status TunerFilter::getId(int32_t* _aidl_return) {
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res;
+    mFilter->getId([&](Result r, uint32_t filterId) {
+        res = r;
+        mId = filterId;
+    });
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    *_aidl_return = mId;
+    return Status::ok();
+}
+
+Status TunerFilter::getId64Bit(int64_t* _aidl_return) {
+    if (mFilter_1_1 == nullptr) {
+        ALOGE("IFilter_1_1 is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res;
+    mFilter_1_1->getId64Bit([&](Result r, uint64_t filterId) {
+        res = r;
+        mId64Bit = filterId;
+    });
+    if (res != Result::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    *_aidl_return = mId64Bit;
+    return Status::ok();
+}
+
+Status TunerFilter::configure(const TunerFilterConfiguration& config) {
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    DemuxFilterSettings settings;
+    switch (config.getTag()) {
+        case TunerFilterConfiguration::ts: {
+            getHidlTsSettings(config, settings);
+            break;
+        }
+        case TunerFilterConfiguration::mmtp: {
+            getHidlMmtpSettings(config, settings);
+            break;
+        }
+        case TunerFilterConfiguration::ip: {
+            getHidlIpSettings(config, settings);
+            break;
+        }
+        case TunerFilterConfiguration::tlv: {
+            getHidlTlvSettings(config, settings);
+            break;
+        }
+        case TunerFilterConfiguration::alp: {
+            getHidlAlpSettings(config, settings);
+            break;
+        }
+    }
+
+    Result res = mFilter->configure(settings);
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerFilter::configureMonitorEvent(int monitorEventType) {
+    if (mFilter_1_1 == nullptr) {
+        ALOGE("IFilter_1_1 is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res = mFilter_1_1->configureMonitorEvent(monitorEventType);
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerFilter::configureIpFilterContextId(int cid) {
+    if (mFilter_1_1 == nullptr) {
+        ALOGE("IFilter_1_1 is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res = mFilter_1_1->configureIpCid(cid);
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerFilter::configureAvStreamType(int avStreamType) {
+    if (mFilter_1_1 == nullptr) {
+        ALOGE("IFilter_1_1 is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    AvStreamType type;
+    if (!getHidlAvStreamType(avStreamType, type)) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    Result res = mFilter_1_1->configureAvStreamType(type);
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerFilter::setDataSource(const shared_ptr<ITunerFilter>& filter) {
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    ITunerFilter* tunerFilter = filter.get();
+    sp<IFilter> hidlFilter = static_cast<TunerFilter*>(tunerFilter)->getHalFilter();
+    Result res = mFilter->setDataSource(hidlFilter);
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+void TunerFilter::getHidlTsSettings(
+        const TunerFilterConfiguration& config, DemuxFilterSettings& settings) {
+    auto tsConf = config.get<TunerFilterConfiguration::ts>();
+    DemuxTsFilterSettings ts{
+        .tpid = static_cast<uint16_t>(tsConf.tpid),
+    };
+
+    TunerFilterSettings tunerSettings = tsConf.filterSettings;
+    switch (tunerSettings.getTag()) {
+        case TunerFilterSettings::av: {
+            ts.filterSettings.av(getAvSettings(tunerSettings));
+            break;
+        }
+        case TunerFilterSettings::section: {
+            ts.filterSettings.section(getSectionSettings(tunerSettings));
+            break;
+        }
+        case TunerFilterSettings::pesData: {
+            ts.filterSettings.pesData(getPesDataSettings(tunerSettings));
+            break;
+        }
+        case TunerFilterSettings::record: {
+            ts.filterSettings.record(getRecordSettings(tunerSettings));
+            break;
+        }
+        default: {
+            ts.filterSettings.noinit();
+            break;
+        }
+    }
+    settings.ts(ts);
+}
+
+void TunerFilter::getHidlMmtpSettings(
+        const TunerFilterConfiguration& config, DemuxFilterSettings& settings) {
+    auto mmtpConf = config.get<TunerFilterConfiguration::mmtp>();
+    DemuxMmtpFilterSettings mmtp{
+        .mmtpPid = static_cast<DemuxMmtpPid>(mmtpConf.mmtpPid),
+    };
+
+    TunerFilterSettings tunerSettings = mmtpConf.filterSettings;
+    switch (tunerSettings.getTag()) {
+        case TunerFilterSettings::av: {
+            mmtp.filterSettings.av(getAvSettings(tunerSettings));
+            break;
+        }
+        case TunerFilterSettings::section: {
+            mmtp.filterSettings.section(getSectionSettings(tunerSettings));
+            break;
+        }
+        case TunerFilterSettings::pesData: {
+            mmtp.filterSettings.pesData(getPesDataSettings(tunerSettings));
+            break;
+        }
+        case TunerFilterSettings::record: {
+            mmtp.filterSettings.record(getRecordSettings(tunerSettings));
+            break;
+        }
+        case TunerFilterSettings::download: {
+            mmtp.filterSettings.download(getDownloadSettings(tunerSettings));
+            break;
+        }
+        default: {
+            mmtp.filterSettings.noinit();
+            break;
+        }
+    }
+    settings.mmtp(mmtp);
+}
+
+void TunerFilter::getHidlIpSettings(
+        const TunerFilterConfiguration& config, DemuxFilterSettings& settings) {
+    auto ipConf = config.get<TunerFilterConfiguration::ip>();
+    DemuxIpAddress ipAddr{
+        .srcPort = static_cast<uint16_t>(ipConf.ipAddr.srcPort),
+        .dstPort = static_cast<uint16_t>(ipConf.ipAddr.dstPort),
+    };
+
+    ipConf.ipAddr.srcIpAddress.isIpV6
+            ? ipAddr.srcIpAddress.v6(getIpV6Address(ipConf.ipAddr.srcIpAddress))
+            : ipAddr.srcIpAddress.v4(getIpV4Address(ipConf.ipAddr.srcIpAddress));
+    ipConf.ipAddr.dstIpAddress.isIpV6
+            ? ipAddr.dstIpAddress.v6(getIpV6Address(ipConf.ipAddr.dstIpAddress))
+            : ipAddr.dstIpAddress.v4(getIpV4Address(ipConf.ipAddr.dstIpAddress));
+    DemuxIpFilterSettings ip{
+        .ipAddr = ipAddr,
+    };
+
+    TunerFilterSettings tunerSettings = ipConf.filterSettings;
+    switch (tunerSettings.getTag()) {
+        case TunerFilterSettings::section: {
+            ip.filterSettings.section(getSectionSettings(tunerSettings));
+            break;
+        }
+        case TunerFilterSettings::isPassthrough: {
+            ip.filterSettings.bPassthrough(tunerSettings.isPassthrough);
+            break;
+        }
+        default: {
+            ip.filterSettings.noinit();
+            break;
+        }
+    }
+    settings.ip(ip);
+}
+
+hidl_array<uint8_t, IP_V6_LENGTH> TunerFilter::getIpV6Address(TunerDemuxIpAddress addr) {
+    hidl_array<uint8_t, IP_V6_LENGTH> ip;
+    if (addr.addr.size() != IP_V6_LENGTH) {
+        return ip;
+    }
+    copy(addr.addr.begin(), addr.addr.end(), ip.data());
+    return ip;
+}
+
+hidl_array<uint8_t, IP_V4_LENGTH> TunerFilter::getIpV4Address(TunerDemuxIpAddress addr) {
+    hidl_array<uint8_t, IP_V4_LENGTH> ip;
+    if (addr.addr.size() != IP_V4_LENGTH) {
+        return ip;
+    }
+    copy(addr.addr.begin(), addr.addr.end(), ip.data());
+    return ip;
+}
+
+void TunerFilter::getHidlTlvSettings(
+        const TunerFilterConfiguration& config, DemuxFilterSettings& settings) {
+    auto tlvConf = config.get<TunerFilterConfiguration::tlv>();
+    DemuxTlvFilterSettings tlv{
+        .packetType = static_cast<uint8_t>(tlvConf.packetType),
+        .isCompressedIpPacket = tlvConf.isCompressedIpPacket,
+    };
+
+    TunerFilterSettings tunerSettings = tlvConf.filterSettings;
+    switch (tunerSettings.getTag()) {
+        case TunerFilterSettings::section: {
+            tlv.filterSettings.section(getSectionSettings(tunerSettings));
+            break;
+        }
+        case TunerFilterSettings::isPassthrough: {
+            tlv.filterSettings.bPassthrough(tunerSettings.isPassthrough);
+            break;
+        }
+        default: {
+            tlv.filterSettings.noinit();
+            break;
+        }
+    }
+    settings.tlv(tlv);
+}
+
+void TunerFilter::getHidlAlpSettings(
+        const TunerFilterConfiguration& config, DemuxFilterSettings& settings) {
+    auto alpConf = config.get<TunerFilterConfiguration::alp>();
+    DemuxAlpFilterSettings alp{
+        .packetType = static_cast<uint8_t>(alpConf.packetType),
+        .lengthType = static_cast<DemuxAlpLengthType>(alpConf.lengthType),
+    };
+
+    TunerFilterSettings tunerSettings = alpConf.filterSettings;
+    switch (tunerSettings.getTag()) {
+        case TunerFilterSettings::section: {
+            alp.filterSettings.section(getSectionSettings(tunerSettings));
+            break;
+        }
+        default: {
+            alp.filterSettings.noinit();
+            break;
+        }
+    }
+    settings.alp(alp);
+}
+
+DemuxFilterAvSettings TunerFilter::getAvSettings(const TunerFilterSettings& settings) {
+    DemuxFilterAvSettings av {
+        .isPassthrough = settings.get<TunerFilterSettings::av>().isPassthrough,
+    };
+    return av;
+}
+
+DemuxFilterSectionSettings TunerFilter::getSectionSettings(const TunerFilterSettings& settings) {
+    auto s = settings.get<TunerFilterSettings::section>();
+    DemuxFilterSectionSettings section{
+        .isCheckCrc = s.isCheckCrc,
+        .isRepeat = s.isRepeat,
+        .isRaw = s.isRaw,
+    };
+
+    switch (s.condition.getTag()) {
+        case TunerFilterSectionCondition::sectionBits: {
+            auto sectionBits = s.condition.get<TunerFilterSectionCondition::sectionBits>();
+            vector<uint8_t> filter(sectionBits.filter.begin(), sectionBits.filter.end());
+            vector<uint8_t> mask(sectionBits.mask.begin(), sectionBits.mask.end());
+            vector<uint8_t> mode(sectionBits.mode.begin(), sectionBits.mode.end());
+            section.condition.sectionBits({
+                .filter = filter,
+                .mask = mask,
+                .mode = mode,
+            });
+            break;
+        }
+        case TunerFilterSectionCondition::tableInfo: {
+            auto tableInfo = s.condition.get<TunerFilterSectionCondition::tableInfo>();
+            section.condition.tableInfo({
+                .tableId = static_cast<uint16_t>(tableInfo.tableId),
+                .version = static_cast<uint16_t>(tableInfo.version),
+            });
+            break;
+        }
+        default: {
+            break;
+        }
+    }
+    return section;
+}
+
+DemuxFilterPesDataSettings TunerFilter::getPesDataSettings(const TunerFilterSettings& settings) {
+    DemuxFilterPesDataSettings pes{
+        .streamId = static_cast<DemuxStreamId>(
+                settings.get<TunerFilterSettings::pesData>().streamId),
+        .isRaw = settings.get<TunerFilterSettings::pesData>().isRaw,
+    };
+    return pes;
+}
+
+DemuxFilterRecordSettings TunerFilter::getRecordSettings(const TunerFilterSettings& settings) {
+    auto r = settings.get<TunerFilterSettings::record>();
+    DemuxFilterRecordSettings record{
+        .tsIndexMask = static_cast<uint32_t>(r.tsIndexMask),
+        .scIndexType = static_cast<DemuxRecordScIndexType>(r.scIndexType),
+    };
+
+    switch (r.scIndexMask.getTag()) {
+        case TunerFilterScIndexMask::sc: {
+            record.scIndexMask.sc(static_cast<uint32_t>(
+                    r.scIndexMask.get<TunerFilterScIndexMask::sc>()));
+            break;
+        }
+        case TunerFilterScIndexMask::scHevc: {
+            record.scIndexMask.scHevc(static_cast<uint32_t>(
+                    r.scIndexMask.get<TunerFilterScIndexMask::scHevc>()));
+            break;
+        }
+    }
+    return record;
+}
+
+DemuxFilterDownloadSettings TunerFilter::getDownloadSettings(const TunerFilterSettings& settings) {
+    DemuxFilterDownloadSettings download {
+        .downloadId = static_cast<uint32_t>(
+                settings.get<TunerFilterSettings::download>().downloadId),
+    };
+    return download;
+}
+
+Status TunerFilter::getAvSharedHandleInfo(TunerFilterSharedHandleInfo* _aidl_return) {
+    if (mFilter_1_1 == nullptr) {
+        ALOGE("IFilter_1_1 is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res;
+    mFilter_1_1->getAvSharedHandle([&](Result r, hidl_handle avMemory, uint64_t avMemSize) {
+        res = r;
+        if (res == Result::SUCCESS) {
+            TunerFilterSharedHandleInfo info{
+                .handle = dupToAidl(avMemory),
+                .size = static_cast<int64_t>(avMemSize),
+            };
+            *_aidl_return = move(info);
+        } else {
+            _aidl_return = NULL;
+        }
+    });
+
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerFilter::releaseAvHandle(
+        const ::aidl::android::hardware::common::NativeHandle& handle, int64_t avDataId) {
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res = mFilter->releaseAvHandle(hidl_handle(makeFromAidl(handle)), avDataId);
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerFilter::start() {
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+    Result res = mFilter->start();
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerFilter::stop() {
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+    Result res = mFilter->stop();
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerFilter::flush() {
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+    Result res = mFilter->flush();
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+Status TunerFilter::close() {
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+    Result res = mFilter->close();
+    mFilter = NULL;
+    mFilter_1_1 = NULL;
+
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+sp<IFilter> TunerFilter::getHalFilter() {
+    return mFilter;
+}
+
+bool TunerFilter::isAudioFilter() {
+    return (mMainType == (int)DemuxFilterMainType::TS
+                    && mSubType == (int)DemuxTsFilterType::AUDIO)
+            || (mMainType == (int)DemuxFilterMainType::MMTP
+                    && mSubType == (int)DemuxMmtpFilterType::AUDIO);
+}
+
+bool TunerFilter::isVideoFilter() {
+    return (mMainType == (int)DemuxFilterMainType::TS
+                    && mSubType == (int)DemuxTsFilterType::VIDEO)
+            || (mMainType == (int)DemuxFilterMainType::MMTP
+                    && mSubType == (int)DemuxMmtpFilterType::VIDEO);
+}
+
+bool TunerFilter::getHidlAvStreamType(int avStreamType, AvStreamType& type) {
+    if (isAudioFilter()) {
+        type.audio(static_cast<AudioStreamType>(avStreamType));
+        return true;
+    }
+
+    if (isVideoFilter()) {
+        type.video(static_cast<VideoStreamType>(avStreamType));
+        return true;
+    }
+
+    return false;
+}
+
+/////////////// FilterCallback ///////////////////////
+
+Return<void> TunerFilter::FilterCallback::onFilterStatus(DemuxFilterStatus status) {
+    if (mTunerFilterCallback != NULL) {
+        mTunerFilterCallback->onFilterStatus((int)status);
+    }
+    return Void();
+}
+
+Return<void> TunerFilter::FilterCallback::onFilterEvent(const DemuxFilterEvent& filterEvent) {
+    vector<DemuxFilterEventExt::Event> emptyEventsExt;
+    DemuxFilterEventExt emptyFilterEventExt {
+            .events = emptyEventsExt,
+    };
+    onFilterEvent_1_1(filterEvent, emptyFilterEventExt);
+    return Void();
+}
+
+Return<void> TunerFilter::FilterCallback::onFilterEvent_1_1(const DemuxFilterEvent& filterEvent,
+        const DemuxFilterEventExt& filterEventExt) {
+    if (mTunerFilterCallback != NULL) {
+        vector<DemuxFilterEvent::Event> events = filterEvent.events;
+        vector<DemuxFilterEventExt::Event> eventsExt = filterEventExt.events;
+        vector<TunerFilterEvent> tunerEvent;
+
+        getAidlFilterEvent(events, eventsExt, tunerEvent);
+        mTunerFilterCallback->onFilterEvent(tunerEvent);
+    }
+    return Void();
+}
+
+/////////////// FilterCallback Helper Methods ///////////////////////
+
+void TunerFilter::FilterCallback::getAidlFilterEvent(vector<DemuxFilterEvent::Event>& events,
+        vector<DemuxFilterEventExt::Event>& eventsExt,
+        vector<TunerFilterEvent>& tunerEvent) {
+    if (events.empty() && !eventsExt.empty()) {
+        auto eventExt = eventsExt[0];
+        switch (eventExt.getDiscriminator()) {
+            case DemuxFilterEventExt::Event::hidl_discriminator::monitorEvent: {
+                getMonitorEvent(eventsExt, tunerEvent);
+                return;
+            }
+            case DemuxFilterEventExt::Event::hidl_discriminator::startId: {
+                getRestartEvent(eventsExt, tunerEvent);
+                return;
+            }
+            default: {
+                break;
+            }
+        }
+        return;
+    }
+
+    if (!events.empty()) {
+        auto event = events[0];
+        switch (event.getDiscriminator()) {
+            case DemuxFilterEvent::Event::hidl_discriminator::media: {
+                getMediaEvent(events, tunerEvent);
+                break;
+            }
+            case DemuxFilterEvent::Event::hidl_discriminator::section: {
+                getSectionEvent(events, tunerEvent);
+                break;
+            }
+            case DemuxFilterEvent::Event::hidl_discriminator::pes: {
+                getPesEvent(events, tunerEvent);
+                break;
+            }
+            case DemuxFilterEvent::Event::hidl_discriminator::tsRecord: {
+                getTsRecordEvent(events, eventsExt, tunerEvent);
+                break;
+            }
+            case DemuxFilterEvent::Event::hidl_discriminator::mmtpRecord: {
+                getMmtpRecordEvent(events, eventsExt, tunerEvent);
+                break;
+            }
+            case DemuxFilterEvent::Event::hidl_discriminator::download: {
+                getDownloadEvent(events, tunerEvent);
+                break;
+            }
+            case DemuxFilterEvent::Event::hidl_discriminator::ipPayload: {
+                getIpPayloadEvent(events, tunerEvent);
+                break;
+            }
+            case DemuxFilterEvent::Event::hidl_discriminator::temi: {
+                getTemiEvent(events, tunerEvent);
+                break;
+            }
+            default: {
+                break;
+            }
+        }
+    }
+}
+
+void TunerFilter::FilterCallback::getMediaEvent(
+        vector<DemuxFilterEvent::Event>& events, vector<TunerFilterEvent>& res) {
+    for (DemuxFilterEvent::Event e : events) {
+        DemuxFilterMediaEvent mediaEvent = e.media();
+        TunerFilterMediaEvent tunerMedia;
+
+        tunerMedia.streamId = static_cast<char16_t>(mediaEvent.streamId);
+        tunerMedia.isPtsPresent = mediaEvent.isPtsPresent;
+        tunerMedia.pts = static_cast<long>(mediaEvent.pts);
+        tunerMedia.dataLength = static_cast<int>(mediaEvent.dataLength);
+        tunerMedia.offset = static_cast<int>(mediaEvent.offset);
+        tunerMedia.isSecureMemory = mediaEvent.isSecureMemory;
+        tunerMedia.avDataId = static_cast<long>(mediaEvent.avDataId);
+        tunerMedia.mpuSequenceNumber = static_cast<int>(mediaEvent.mpuSequenceNumber);
+        tunerMedia.isPesPrivateData = mediaEvent.isPesPrivateData;
+
+        if (mediaEvent.extraMetaData.getDiscriminator() ==
+                DemuxFilterMediaEvent::ExtraMetaData::hidl_discriminator::audio) {
+            tunerMedia.isAudioExtraMetaData = true;
+            tunerMedia.audio = {
+                .adFade = static_cast<int8_t>(
+                        mediaEvent.extraMetaData.audio().adFade),
+                .adPan = static_cast<int8_t>(
+                        mediaEvent.extraMetaData.audio().adPan),
+                .versionTextTag = static_cast<int8_t>(
+                        mediaEvent.extraMetaData.audio().versionTextTag),
+                .adGainCenter = static_cast<int8_t>(
+                        mediaEvent.extraMetaData.audio().adGainCenter),
+                .adGainFront = static_cast<int8_t>(
+                        mediaEvent.extraMetaData.audio().adGainFront),
+                .adGainSurround = static_cast<int8_t>(
+                        mediaEvent.extraMetaData.audio().adGainSurround),
+            };
+        } else {
+            tunerMedia.isAudioExtraMetaData = false;
+        }
+
+        if (mediaEvent.avMemory.getNativeHandle() != nullptr) {
+            tunerMedia.avMemory = dupToAidl(mediaEvent.avMemory.getNativeHandle());
+        }
+
+        TunerFilterEvent tunerEvent;
+        tunerEvent.set<TunerFilterEvent::media>(move(tunerMedia));
+        res.push_back(move(tunerEvent));
+    }
+}
+
+void TunerFilter::FilterCallback::getSectionEvent(
+        vector<DemuxFilterEvent::Event>& events, vector<TunerFilterEvent>& res) {
+    for (DemuxFilterEvent::Event e : events) {
+        DemuxFilterSectionEvent sectionEvent = e.section();
+        TunerFilterSectionEvent tunerSection;
+
+        tunerSection.tableId = static_cast<char16_t>(sectionEvent.tableId);
+        tunerSection.version = static_cast<char16_t>(sectionEvent.version);
+        tunerSection.sectionNum = static_cast<char16_t>(sectionEvent.sectionNum);
+        tunerSection.dataLength = static_cast<char16_t>(sectionEvent.dataLength);
+
+        TunerFilterEvent tunerEvent;
+        tunerEvent.set<TunerFilterEvent::section>(move(tunerSection));
+        res.push_back(move(tunerEvent));
+    }
+}
+
+void TunerFilter::FilterCallback::getPesEvent(
+        vector<DemuxFilterEvent::Event>& events, vector<TunerFilterEvent>& res) {
+    for (DemuxFilterEvent::Event e : events) {
+        DemuxFilterPesEvent pesEvent = e.pes();
+        TunerFilterPesEvent tunerPes;
+
+        tunerPes.streamId = static_cast<char16_t>(pesEvent.streamId);
+        tunerPes.dataLength = static_cast<char16_t>(pesEvent.dataLength);
+        tunerPes.mpuSequenceNumber = static_cast<int>(pesEvent.mpuSequenceNumber);
+
+        TunerFilterEvent tunerEvent;
+        tunerEvent.set<TunerFilterEvent::pes>(move(tunerPes));
+        res.push_back(move(tunerEvent));
+    }
+}
+
+void TunerFilter::FilterCallback::getTsRecordEvent(vector<DemuxFilterEvent::Event>& events,
+        vector<DemuxFilterEventExt::Event>& eventsExt, vector<TunerFilterEvent>& res) {
+    for (int i = 0; i < events.size(); i++) {
+        TunerFilterTsRecordEvent tunerTsRecord;
+        DemuxFilterTsRecordEvent tsRecordEvent = events[i].tsRecord();
+
+        TunerFilterScIndexMask scIndexMask;
+        if (tsRecordEvent.scIndexMask.getDiscriminator()
+                == DemuxFilterTsRecordEvent::ScIndexMask::hidl_discriminator::sc) {
+            scIndexMask.set<TunerFilterScIndexMask::sc>(
+                    static_cast<int>(tsRecordEvent.scIndexMask.sc()));
+        } else if (tsRecordEvent.scIndexMask.getDiscriminator()
+                == DemuxFilterTsRecordEvent::ScIndexMask::hidl_discriminator::scHevc) {
+            scIndexMask.set<TunerFilterScIndexMask::scHevc>(
+                    static_cast<int>(tsRecordEvent.scIndexMask.scHevc()));
+        }
+
+        if (tsRecordEvent.pid.getDiscriminator() == DemuxPid::hidl_discriminator::tPid) {
+            tunerTsRecord.pid = static_cast<char16_t>(tsRecordEvent.pid.tPid());
+        } else {
+            tunerTsRecord.pid = static_cast<char16_t>(Constant::INVALID_TS_PID);
+        }
+
+        tunerTsRecord.scIndexMask = scIndexMask;
+        tunerTsRecord.tsIndexMask = static_cast<int>(tsRecordEvent.tsIndexMask);
+        tunerTsRecord.byteNumber = static_cast<long>(tsRecordEvent.byteNumber);
+
+        if (eventsExt.size() > i && eventsExt[i].getDiscriminator() ==
+                    DemuxFilterEventExt::Event::hidl_discriminator::tsRecord) {
+            tunerTsRecord.isExtended = true;
+            tunerTsRecord.pts = static_cast<long>(eventsExt[i].tsRecord().pts);
+            tunerTsRecord.firstMbInSlice = static_cast<int>(eventsExt[i].tsRecord().firstMbInSlice);
+        } else {
+            tunerTsRecord.isExtended = false;
+        }
+
+        TunerFilterEvent tunerEvent;
+        tunerEvent.set<TunerFilterEvent::tsRecord>(move(tunerTsRecord));
+        res.push_back(move(tunerEvent));
+    }
+}
+
+void TunerFilter::FilterCallback::getMmtpRecordEvent(vector<DemuxFilterEvent::Event>& events,
+        vector<DemuxFilterEventExt::Event>& eventsExt, vector<TunerFilterEvent>& res) {
+    for (int i = 0; i < events.size(); i++) {
+        TunerFilterMmtpRecordEvent tunerMmtpRecord;
+        DemuxFilterMmtpRecordEvent mmtpRecordEvent = events[i].mmtpRecord();
+
+        tunerMmtpRecord.scHevcIndexMask = static_cast<int>(mmtpRecordEvent.scHevcIndexMask);
+        tunerMmtpRecord.byteNumber = static_cast<long>(mmtpRecordEvent.byteNumber);
+
+        if (eventsExt.size() > i && eventsExt[i].getDiscriminator() ==
+                    DemuxFilterEventExt::Event::hidl_discriminator::mmtpRecord) {
+            tunerMmtpRecord.isExtended = true;
+            tunerMmtpRecord.pts = static_cast<long>(eventsExt[i].mmtpRecord().pts);
+            tunerMmtpRecord.mpuSequenceNumber =
+                    static_cast<int>(eventsExt[i].mmtpRecord().mpuSequenceNumber);
+            tunerMmtpRecord.firstMbInSlice =
+                    static_cast<int>(eventsExt[i].mmtpRecord().firstMbInSlice);
+            tunerMmtpRecord.tsIndexMask = static_cast<int>(eventsExt[i].mmtpRecord().tsIndexMask);
+        } else {
+            tunerMmtpRecord.isExtended = false;
+        }
+
+        TunerFilterEvent tunerEvent;
+        tunerEvent.set<TunerFilterEvent::mmtpRecord>(move(tunerMmtpRecord));
+        res.push_back(move(tunerEvent));
+    }
+}
+
+void TunerFilter::FilterCallback::getDownloadEvent(
+        vector<DemuxFilterEvent::Event>& events, vector<TunerFilterEvent>& res) {
+    for (DemuxFilterEvent::Event e : events) {
+        DemuxFilterDownloadEvent downloadEvent = e.download();
+        TunerFilterDownloadEvent tunerDownload;
+
+        tunerDownload.itemId = static_cast<int>(downloadEvent.itemId);
+        tunerDownload.itemFragmentIndex = static_cast<int>(downloadEvent.itemFragmentIndex);
+        tunerDownload.mpuSequenceNumber = static_cast<int>(downloadEvent.mpuSequenceNumber);
+        tunerDownload.lastItemFragmentIndex = static_cast<int>(downloadEvent.lastItemFragmentIndex);
+        tunerDownload.dataLength = static_cast<char16_t>(downloadEvent.dataLength);
+
+        TunerFilterEvent tunerEvent;
+        tunerEvent.set<TunerFilterEvent::download>(move(tunerDownload));
+        res.push_back(move(tunerEvent));
+    }
+}
+
+void TunerFilter::FilterCallback::getIpPayloadEvent(
+        vector<DemuxFilterEvent::Event>& events, vector<TunerFilterEvent>& res) {
+    for (DemuxFilterEvent::Event e : events) {
+        DemuxFilterIpPayloadEvent ipPayloadEvent = e.ipPayload();
+        TunerFilterIpPayloadEvent tunerIpPayload;
+
+        tunerIpPayload.dataLength = static_cast<char16_t>(ipPayloadEvent.dataLength);
+
+        TunerFilterEvent tunerEvent;
+        tunerEvent.set<TunerFilterEvent::ipPayload>(move(tunerIpPayload));
+        res.push_back(move(tunerEvent));
+    }
+}
+
+void TunerFilter::FilterCallback::getTemiEvent(
+        vector<DemuxFilterEvent::Event>& events, vector<TunerFilterEvent>& res) {
+    for (DemuxFilterEvent::Event e : events) {
+        DemuxFilterTemiEvent temiEvent = e.temi();
+        TunerFilterTemiEvent tunerTemi;
+
+        tunerTemi.pts = static_cast<long>(temiEvent.pts);
+        tunerTemi.descrTag = static_cast<int8_t>(temiEvent.descrTag);
+        vector<uint8_t> descrData = temiEvent.descrData;
+        tunerTemi.descrData.resize(descrData.size());
+        copy(descrData.begin(), descrData.end(), tunerTemi.descrData.begin());
+
+        TunerFilterEvent tunerEvent;
+        tunerEvent.set<TunerFilterEvent::temi>(move(tunerTemi));
+        res.push_back(move(tunerEvent));
+    }
+}
+
+void TunerFilter::FilterCallback::getMonitorEvent(
+        vector<DemuxFilterEventExt::Event>& eventsExt, vector<TunerFilterEvent>& res) {
+    DemuxFilterMonitorEvent monitorEvent = eventsExt[0].monitorEvent();
+    TunerFilterMonitorEvent tunerMonitor;
+
+    switch (monitorEvent.getDiscriminator()) {
+        case DemuxFilterMonitorEvent::hidl_discriminator::scramblingStatus: {
+            tunerMonitor.set<TunerFilterMonitorEvent::scramblingStatus>(
+                    static_cast<int>(monitorEvent.scramblingStatus()));
+            break;
+        }
+        case DemuxFilterMonitorEvent::hidl_discriminator::cid: {
+            tunerMonitor.set<TunerFilterMonitorEvent::cid>(static_cast<int>(monitorEvent.cid()));
+            break;
+        }
+    }
+
+    TunerFilterEvent tunerEvent;
+    tunerEvent.set<TunerFilterEvent::monitor>(move(tunerMonitor));
+    res.push_back(move(tunerEvent));
+}
+
+void TunerFilter::FilterCallback::getRestartEvent(
+        vector<DemuxFilterEventExt::Event>& eventsExt, vector<TunerFilterEvent>& res) {
+    TunerFilterEvent tunerEvent;
+    tunerEvent.set<TunerFilterEvent::startId>(static_cast<int>(eventsExt[0].startId()));
+    res.push_back(move(tunerEvent));
+}
+}  // namespace android
diff --git a/services/tuner/TunerFilter.h b/services/tuner/TunerFilter.h
new file mode 100644
index 0000000..ff4728c
--- /dev/null
+++ b/services/tuner/TunerFilter.h
@@ -0,0 +1,192 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERFILTER_H
+#define ANDROID_MEDIA_TUNERFILTER_H
+
+#include <aidl/android/media/tv/tuner/BnTunerFilter.h>
+#include <aidl/android/media/tv/tuner/ITunerFilterCallback.h>
+#include <aidlcommonsupport/NativeHandle.h>
+#include <android/hardware/tv/tuner/1.0/ITuner.h>
+#include <android/hardware/tv/tuner/1.1/IFilter.h>
+#include <android/hardware/tv/tuner/1.1/IFilterCallback.h>
+#include <android/hardware/tv/tuner/1.1/types.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <fmq/ConvertMQDescriptors.h>
+#include <fmq/MessageQueue.h>
+
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::hardware::common::fmq::MQDescriptor;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::aidl::android::media::tv::tuner::BnTunerFilter;
+using ::aidl::android::media::tv::tuner::ITunerFilterCallback;
+using ::aidl::android::media::tv::tuner::TunerDemuxIpAddress;
+using ::aidl::android::media::tv::tuner::TunerFilterConfiguration;
+using ::aidl::android::media::tv::tuner::TunerFilterDownloadEvent;
+using ::aidl::android::media::tv::tuner::TunerFilterIpPayloadEvent;
+using ::aidl::android::media::tv::tuner::TunerFilterEvent;
+using ::aidl::android::media::tv::tuner::TunerFilterMediaEvent;
+using ::aidl::android::media::tv::tuner::TunerFilterMmtpRecordEvent;
+using ::aidl::android::media::tv::tuner::TunerFilterMonitorEvent;
+using ::aidl::android::media::tv::tuner::TunerFilterPesEvent;
+using ::aidl::android::media::tv::tuner::TunerFilterScIndexMask;
+using ::aidl::android::media::tv::tuner::TunerFilterSectionEvent;
+using ::aidl::android::media::tv::tuner::TunerFilterSharedHandleInfo;
+using ::aidl::android::media::tv::tuner::TunerFilterSettings;
+using ::aidl::android::media::tv::tuner::TunerFilterTemiEvent;
+using ::aidl::android::media::tv::tuner::TunerFilterTsRecordEvent;
+using ::android::hardware::MQDescriptorSync;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::hardware::hidl_array;
+using ::android::hardware::tv::tuner::V1_0::DemuxAlpFilterSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterAvSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterDownloadEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterDownloadSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterIpPayloadEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterMediaEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterMmtpRecordEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterPesDataSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterPesEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterRecordSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterSectionEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterSectionSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterStatus;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterTemiEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterTsRecordEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxIpFilterSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxMmtpFilterSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxTlvFilterSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxTsFilterSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxPid;
+using ::android::hardware::tv::tuner::V1_0::IFilter;
+using ::android::hardware::tv::tuner::V1_1::AvStreamType;
+using ::android::hardware::tv::tuner::V1_1::DemuxFilterEventExt;
+using ::android::hardware::tv::tuner::V1_1::DemuxFilterMonitorEvent;
+using ::android::hardware::tv::tuner::V1_1::DemuxFilterTsRecordEventExt;
+using ::android::hardware::tv::tuner::V1_1::IFilterCallback;
+
+namespace android {
+
+using MQDesc = MQDescriptorSync<uint8_t>;
+using AidlMQDesc = MQDescriptor<int8_t, SynchronizedReadWrite>;
+
+const static int IP_V4_LENGTH = 4;
+const static int IP_V6_LENGTH = 16;
+
+class TunerFilter : public BnTunerFilter {
+
+public:
+    TunerFilter(sp<IFilter> filter, int mainType, int subTyp);
+    virtual ~TunerFilter();
+    Status getId(int32_t* _aidl_return) override;
+    Status getId64Bit(int64_t* _aidl_return) override;
+    Status getQueueDesc(AidlMQDesc* _aidl_return) override;
+    Status configure(const TunerFilterConfiguration& config) override;
+    Status configureMonitorEvent(int monitorEventType) override;
+    Status configureIpFilterContextId(int cid) override;
+    Status configureAvStreamType(int avStreamType) override;
+    Status getAvSharedHandleInfo(TunerFilterSharedHandleInfo* _aidl_return) override;
+    Status releaseAvHandle(const ::aidl::android::hardware::common::NativeHandle& handle,
+            int64_t avDataId) override;
+    Status setDataSource(const std::shared_ptr<ITunerFilter>& filter) override;
+    Status start() override;
+    Status stop() override;
+    Status flush() override;
+    Status close() override;
+    sp<IFilter> getHalFilter();
+
+    struct FilterCallback : public IFilterCallback {
+        FilterCallback(const std::shared_ptr<ITunerFilterCallback> tunerFilterCallback)
+                : mTunerFilterCallback(tunerFilterCallback) {};
+
+        virtual Return<void> onFilterEvent(const DemuxFilterEvent& filterEvent);
+        virtual Return<void> onFilterEvent_1_1(const DemuxFilterEvent& filterEvent,
+                const DemuxFilterEventExt& filterEventExt);
+        virtual Return<void> onFilterStatus(DemuxFilterStatus status);
+
+        void getAidlFilterEvent(std::vector<DemuxFilterEvent::Event>& events,
+                std::vector<DemuxFilterEventExt::Event>& eventsExt,
+                std::vector<TunerFilterEvent>& tunerEvent);
+
+        void getMediaEvent(
+                std::vector<DemuxFilterEvent::Event>& events, std::vector<TunerFilterEvent>& res);
+        void getSectionEvent(
+                std::vector<DemuxFilterEvent::Event>& events, std::vector<TunerFilterEvent>& res);
+        void getPesEvent(
+                std::vector<DemuxFilterEvent::Event>& events, std::vector<TunerFilterEvent>& res);
+        void getTsRecordEvent(
+                std::vector<DemuxFilterEvent::Event>& events,
+                std::vector<DemuxFilterEventExt::Event>& eventsExt,
+                std::vector<TunerFilterEvent>& res);
+        void getMmtpRecordEvent(
+                std::vector<DemuxFilterEvent::Event>& events,
+                std::vector<DemuxFilterEventExt::Event>& eventsExt,
+                std::vector<TunerFilterEvent>& res);
+        void getDownloadEvent(
+                std::vector<DemuxFilterEvent::Event>& events, std::vector<TunerFilterEvent>& res);
+        void getIpPayloadEvent(
+                std::vector<DemuxFilterEvent::Event>& events, std::vector<TunerFilterEvent>& res);
+        void getTemiEvent(
+                std::vector<DemuxFilterEvent::Event>& events, std::vector<TunerFilterEvent>& res);
+        void getMonitorEvent(
+                std::vector<DemuxFilterEventExt::Event>& eventsExt,
+                std::vector<TunerFilterEvent>& res);
+        void getRestartEvent(
+                std::vector<DemuxFilterEventExt::Event>& eventsExt,
+                std::vector<TunerFilterEvent>& res);
+
+        std::shared_ptr<ITunerFilterCallback> mTunerFilterCallback;
+    };
+
+private:
+    DemuxFilterAvSettings getAvSettings(const TunerFilterSettings& settings);
+    DemuxFilterSectionSettings getSectionSettings(const TunerFilterSettings& settings);
+    DemuxFilterPesDataSettings getPesDataSettings(const TunerFilterSettings& settings);
+    DemuxFilterRecordSettings getRecordSettings(const TunerFilterSettings& settings);
+    DemuxFilterDownloadSettings getDownloadSettings(const TunerFilterSettings& settings);
+
+    bool isAudioFilter();
+    bool isVideoFilter();
+    bool getHidlAvStreamType(int avStreamType, AvStreamType& type);
+
+    void getHidlTsSettings(
+        const TunerFilterConfiguration& config, DemuxFilterSettings& settings);
+    void getHidlMmtpSettings(
+        const TunerFilterConfiguration& config, DemuxFilterSettings& settings);
+    void getHidlIpSettings(
+        const TunerFilterConfiguration& config, DemuxFilterSettings& settings);
+    void getHidlTlvSettings(
+        const TunerFilterConfiguration& config, DemuxFilterSettings& settings);
+    void getHidlAlpSettings(
+        const TunerFilterConfiguration& config, DemuxFilterSettings& settings);
+
+    hidl_array<uint8_t, IP_V4_LENGTH> getIpV4Address(TunerDemuxIpAddress addr);
+    hidl_array<uint8_t, IP_V6_LENGTH> getIpV6Address(TunerDemuxIpAddress addr);
+
+    sp<IFilter> mFilter;
+    sp<::android::hardware::tv::tuner::V1_1::IFilter> mFilter_1_1;
+    int32_t mId;
+    int64_t mId64Bit;
+    int mMainType;
+    int mSubType;
+};
+
+} // namespace android
+
+#endif // ANDROID_MEDIA_TUNERFILTER_H
diff --git a/services/tuner/TunerFrontend.cpp b/services/tuner/TunerFrontend.cpp
new file mode 100644
index 0000000..74b5519
--- /dev/null
+++ b/services/tuner/TunerFrontend.cpp
@@ -0,0 +1,1094 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TunerFrontend"
+
+#include "TunerFrontend.h"
+#include "TunerLnb.h"
+
+using ::aidl::android::media::tv::tuner::TunerFrontendAtsc3PlpSettings;
+using ::aidl::android::media::tv::tuner::TunerFrontendScanAtsc3PlpInfo;
+using ::aidl::android::media::tv::tuner::TunerFrontendStatusAtsc3PlpInfo;
+using ::aidl::android::media::tv::tuner::TunerFrontendUnionSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendAnalogSifStandard;
+using ::android::hardware::tv::tuner::V1_0::FrontendAnalogType;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtscModulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3Bandwidth;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3CodeRate;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3DemodOutputFormat;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3Fec;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3Modulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3TimeInterleaveMode;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbcAnnex;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbcModulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbcOuterFec;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbcSpectralInversion;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsModulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsPilot;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsRolloff;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsStandard;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsVcmMode;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtBandwidth;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtCoderate;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtConstellation;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtGuardInterval;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtHierarchy;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtPlpMode;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtStandard;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtTransmissionMode;
+using ::android::hardware::tv::tuner::V1_0::FrontendInnerFec;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Coderate;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Modulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Rolloff;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Settings;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsCoderate;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsModulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsRolloff;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsStreamIdType;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtBandwidth;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtCoderate;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtGuardInterval;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtMode;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtModulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendModulationStatus;
+using ::android::hardware::tv::tuner::V1_0::FrontendScanAtsc3PlpInfo;
+using ::android::hardware::tv::tuner::V1_0::FrontendScanType;
+using ::android::hardware::tv::tuner::V1_0::FrontendStatusType;
+using ::android::hardware::tv::tuner::V1_0::Result;
+using ::android::hardware::tv::tuner::V1_1::FrontendAnalogAftFlag;
+using ::android::hardware::tv::tuner::V1_1::FrontendBandwidth;
+using ::android::hardware::tv::tuner::V1_1::FrontendCableTimeInterleaveMode;
+using ::android::hardware::tv::tuner::V1_1::FrontendDvbcBandwidth;
+using ::android::hardware::tv::tuner::V1_1::FrontendDtmbBandwidth;
+using ::android::hardware::tv::tuner::V1_1::FrontendDtmbCodeRate;
+using ::android::hardware::tv::tuner::V1_1::FrontendDtmbGuardInterval;
+using ::android::hardware::tv::tuner::V1_1::FrontendDtmbModulation;
+using ::android::hardware::tv::tuner::V1_1::FrontendDtmbTimeInterleaveMode;
+using ::android::hardware::tv::tuner::V1_1::FrontendDtmbTransmissionMode;
+using ::android::hardware::tv::tuner::V1_1::FrontendDvbsScanType;
+using ::android::hardware::tv::tuner::V1_1::FrontendGuardInterval;
+using ::android::hardware::tv::tuner::V1_1::FrontendInterleaveMode;
+using ::android::hardware::tv::tuner::V1_1::FrontendModulation;
+using ::android::hardware::tv::tuner::V1_1::FrontendRollOff;
+using ::android::hardware::tv::tuner::V1_1::FrontendTransmissionMode;
+using ::android::hardware::tv::tuner::V1_1::FrontendSpectralInversion;
+using ::android::hardware::tv::tuner::V1_1::FrontendStatusTypeExt1_1;
+
+namespace android {
+
+TunerFrontend::TunerFrontend(sp<IFrontend> frontend, int id) {
+    mFrontend = frontend;
+    mFrontend_1_1 = ::android::hardware::tv::tuner::V1_1::IFrontend::castFrom(mFrontend);
+    mId = id;
+}
+
+TunerFrontend::~TunerFrontend() {
+    mFrontend = NULL;
+    mFrontend_1_1 = NULL;
+    mId = -1;
+}
+
+Status TunerFrontend::setCallback(
+        const shared_ptr<ITunerFrontendCallback>& tunerFrontendCallback) {
+    if (mFrontend == NULL) {
+        ALOGE("IFrontend is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (tunerFrontendCallback == NULL) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    }
+
+    sp<IFrontendCallback> frontendCallback = new FrontendCallback(tunerFrontendCallback);
+    Result status = mFrontend->setCallback(frontendCallback);
+    if (status == Result::SUCCESS) {
+        return Status::ok();
+    }
+
+    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+Status TunerFrontend::tune(const TunerFrontendSettings& settings) {
+    if (mFrontend == NULL) {
+        ALOGE("IFrontend is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status;
+    FrontendSettings frontendSettings = getHidlFrontendSettings(settings);
+    if (settings.isExtended) {
+        if (mFrontend_1_1 == NULL) {
+            ALOGE("IFrontend_1_1 is not initialized");
+            return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        }
+        FrontendSettingsExt1_1 frontendSettingsExt = getHidlFrontendSettingsExt(settings);
+        status = mFrontend_1_1->tune_1_1(frontendSettings, frontendSettingsExt);
+    } else {
+        status = mFrontend->tune(frontendSettings);
+    }
+
+    if (status == Result::SUCCESS) {
+        return Status::ok();
+    }
+
+    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+Status TunerFrontend::stopTune() {
+    if (mFrontend == NULL) {
+        ALOGD("IFrontend is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status = mFrontend->stopTune();
+    if (status == Result::SUCCESS) {
+        return Status::ok();
+    }
+
+    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+Status TunerFrontend::scan(const TunerFrontendSettings& settings, int frontendScanType) {
+    if (mFrontend == NULL) {
+        ALOGD("IFrontend is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status;
+    FrontendSettings frontendSettings = getHidlFrontendSettings(settings);
+    if (settings.isExtended) {
+        if (mFrontend_1_1 == NULL) {
+            ALOGE("IFrontend_1_1 is not initialized");
+            return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        }
+        FrontendSettingsExt1_1 frontendSettingsExt = getHidlFrontendSettingsExt(settings);
+        status = mFrontend_1_1->scan_1_1(frontendSettings,
+                static_cast<FrontendScanType>(frontendScanType), frontendSettingsExt);
+    } else {
+        status = mFrontend->scan(
+                frontendSettings, static_cast<FrontendScanType>(frontendScanType));
+    }
+
+    if (status == Result::SUCCESS) {
+        return Status::ok();
+    }
+
+    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+Status TunerFrontend::stopScan() {
+    if (mFrontend == NULL) {
+        ALOGD("IFrontend is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status = mFrontend->stopScan();
+    if (status == Result::SUCCESS) {
+        return Status::ok();
+    }
+
+    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+Status TunerFrontend::setLnb(const shared_ptr<ITunerLnb>& lnb) {
+    if (mFrontend == NULL) {
+        ALOGD("IFrontend is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status = mFrontend->setLnb(static_cast<TunerLnb*>(lnb.get())->getId());
+    if (status == Result::SUCCESS) {
+        return Status::ok();
+    }
+
+    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+Status TunerFrontend::setLna(bool bEnable) {
+    if (mFrontend == NULL) {
+        ALOGD("IFrontend is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status = mFrontend->setLna(bEnable);
+    if (status == Result::SUCCESS) {
+        return Status::ok();
+    }
+
+    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+Status TunerFrontend::linkCiCamToFrontend(int ciCamId, int32_t* _aidl_return) {
+    if (mFrontend_1_1 == NULL) {
+        ALOGD("IFrontend_1_1 is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    int ltsId;
+    Result status;
+    mFrontend_1_1->linkCiCam(static_cast<uint32_t>(ciCamId),
+            [&](Result r, uint32_t id) {
+                status = r;
+                ltsId = id;
+            });
+
+    if (status == Result::SUCCESS) {
+        *_aidl_return = ltsId;
+        return Status::ok();
+    }
+
+    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+Status TunerFrontend::unlinkCiCamToFrontend(int ciCamId) {
+    if (mFrontend_1_1 == NULL) {
+        ALOGD("IFrontend_1_1 is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status = mFrontend_1_1->unlinkCiCam(ciCamId);
+    if (status == Result::SUCCESS) {
+        return Status::ok();
+    }
+
+    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+Status TunerFrontend::close() {
+    if (mFrontend == NULL) {
+        ALOGD("IFrontend is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status = mFrontend->close();
+    mFrontend = NULL;
+    mFrontend_1_1 = NULL;
+
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
+}
+
+Status TunerFrontend::getStatus(const vector<int32_t>& statusTypes,
+        vector<TunerFrontendStatus>* _aidl_return) {
+    if (mFrontend == NULL) {
+        ALOGD("IFrontend is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res;
+    vector<FrontendStatus> status;
+    vector<FrontendStatusType> types;
+    for (auto s : statusTypes) {
+        types.push_back(static_cast<FrontendStatusType>(s));
+    }
+
+    mFrontend->getStatus(types, [&](Result r, const hidl_vec<FrontendStatus>& s) {
+        res = r;
+        status = s;
+    });
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    getAidlFrontendStatus(status, *_aidl_return);
+    return Status::ok();
+}
+
+Status TunerFrontend::getStatusExtended_1_1(const vector<int32_t>& statusTypes,
+        vector<TunerFrontendStatus>* _aidl_return) {
+    if (mFrontend_1_1 == NULL) {
+        ALOGD("IFrontend_1_1 is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res;
+    vector<FrontendStatusExt1_1> status;
+    vector<FrontendStatusTypeExt1_1> types;
+    for (auto s : statusTypes) {
+        types.push_back(static_cast<FrontendStatusTypeExt1_1>(s));
+    }
+
+    mFrontend_1_1->getStatusExt1_1(types, [&](Result r, const hidl_vec<FrontendStatusExt1_1>& s) {
+        res = r;
+        status = s;
+    });
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    getAidlFrontendStatusExt(status, *_aidl_return);
+    return Status::ok();
+}
+
+Status TunerFrontend::getFrontendId(int* _aidl_return) {
+    *_aidl_return = mId;
+    return Status::ok();
+}
+
+/////////////// FrontendCallback ///////////////////////
+
+Return<void> TunerFrontend::FrontendCallback::onEvent(FrontendEventType frontendEventType) {
+    ALOGD("FrontendCallback::onEvent, type=%d", frontendEventType);
+    mTunerFrontendCallback->onEvent((int)frontendEventType);
+    return Void();
+}
+
+Return<void> TunerFrontend::FrontendCallback::onScanMessage(
+        FrontendScanMessageType type, const FrontendScanMessage& message) {
+    ALOGD("FrontendCallback::onScanMessage, type=%d", type);
+    TunerFrontendScanMessage scanMessage;
+    switch(type) {
+        case FrontendScanMessageType::LOCKED: {
+            scanMessage.set<TunerFrontendScanMessage::isLocked>(message.isLocked());
+            break;
+        }
+        case FrontendScanMessageType::END: {
+            scanMessage.set<TunerFrontendScanMessage::isEnd>(message.isEnd());
+            break;
+        }
+        case FrontendScanMessageType::PROGRESS_PERCENT: {
+            scanMessage.set<TunerFrontendScanMessage::progressPercent>(message.progressPercent());
+            break;
+        }
+        case FrontendScanMessageType::FREQUENCY: {
+            auto f = message.frequencies();
+            vector<int> frequencies(begin(f), end(f));
+            scanMessage.set<TunerFrontendScanMessage::frequencies>(frequencies);
+            break;
+        }
+        case FrontendScanMessageType::SYMBOL_RATE: {
+            auto s = message.symbolRates();
+            vector<int> symbolRates(begin(s), end(s));
+            scanMessage.set<TunerFrontendScanMessage::symbolRates>(symbolRates);
+            break;
+        }
+        case FrontendScanMessageType::HIERARCHY: {
+            scanMessage.set<TunerFrontendScanMessage::hierarchy>((int)message.hierarchy());
+            break;
+        }
+        case FrontendScanMessageType::ANALOG_TYPE: {
+            scanMessage.set<TunerFrontendScanMessage::analogType>((int)message.analogType());
+            break;
+        }
+        case FrontendScanMessageType::PLP_IDS: {
+            auto p = message.plpIds();
+            vector<uint8_t> plpIds(begin(p), end(p));
+            scanMessage.set<TunerFrontendScanMessage::plpIds>(plpIds);
+            break;
+        }
+        case FrontendScanMessageType::GROUP_IDS: {
+            auto g = message.groupIds();
+            vector<uint8_t> groupIds(begin(g), end(g));
+            scanMessage.set<TunerFrontendScanMessage::groupIds>(groupIds);
+            break;
+        }
+        case FrontendScanMessageType::INPUT_STREAM_IDS: {
+            auto i = message.inputStreamIds();
+            vector<char16_t> streamIds(begin(i), end(i));
+            scanMessage.set<TunerFrontendScanMessage::inputStreamIds>(streamIds);
+            break;
+        }
+        case FrontendScanMessageType::STANDARD: {
+            FrontendScanMessage::Standard std = message.std();
+            int standard;
+            if (std.getDiscriminator() == FrontendScanMessage::Standard::hidl_discriminator::sStd) {
+                standard = (int) std.sStd();
+            } else if (std.getDiscriminator() ==
+                    FrontendScanMessage::Standard::hidl_discriminator::tStd) {
+                standard = (int) std.tStd();
+            } else if (std.getDiscriminator() ==
+                    FrontendScanMessage::Standard::hidl_discriminator::sifStd) {
+                standard = (int) std.sifStd();
+            }
+            scanMessage.set<TunerFrontendScanMessage::std>(standard);
+            break;
+        }
+        case FrontendScanMessageType::ATSC3_PLP_INFO: {
+            vector<FrontendScanAtsc3PlpInfo> plpInfos = message.atsc3PlpInfos();
+            vector<TunerFrontendScanAtsc3PlpInfo> tunerPlpInfos;
+            for (int i = 0; i < plpInfos.size(); i++) {
+                auto info = plpInfos[i];
+                int8_t plpId = (int8_t) info.plpId;
+                bool lls = (bool) info.bLlsFlag;
+                TunerFrontendScanAtsc3PlpInfo plpInfo{
+                    .plpId = plpId,
+                    .llsFlag = lls,
+                };
+                tunerPlpInfos.push_back(plpInfo);
+            }
+            scanMessage.set<TunerFrontendScanMessage::atsc3PlpInfos>(tunerPlpInfos);
+            break;
+        }
+        default:
+            break;
+    }
+    mTunerFrontendCallback->onScanMessage((int)type, scanMessage);
+    return Void();
+}
+
+Return<void> TunerFrontend::FrontendCallback::onScanMessageExt1_1(
+        FrontendScanMessageTypeExt1_1 type, const FrontendScanMessageExt1_1& message) {
+    ALOGD("onScanMessageExt1_1::onScanMessage, type=%d", type);
+    TunerFrontendScanMessage scanMessage;
+    switch(type) {
+        case FrontendScanMessageTypeExt1_1::MODULATION: {
+            FrontendModulation m = message.modulation();
+            int modulation;
+            switch (m.getDiscriminator()) {
+                case FrontendModulation::hidl_discriminator::dvbc:
+                    modulation = (int) m.dvbc();
+                    break;
+                case FrontendModulation::hidl_discriminator::dvbt:
+                    modulation = (int) m.dvbt();
+                    break;
+                case FrontendModulation::hidl_discriminator::dvbs:
+                    modulation = (int) m.dvbs();
+                    break;
+                case FrontendModulation::hidl_discriminator::isdbs:
+                    modulation = (int) m.isdbs();
+                    break;
+                case FrontendModulation::hidl_discriminator::isdbs3:
+                    modulation = (int) m.isdbs3();
+                    break;
+                case FrontendModulation::hidl_discriminator::isdbt:
+                    modulation = (int) m.isdbt();
+                    break;
+                case FrontendModulation::hidl_discriminator::atsc:
+                    modulation = (int) m.atsc();
+                    break;
+                case FrontendModulation::hidl_discriminator::atsc3:
+                    modulation = (int) m.atsc3();
+                    break;
+                case FrontendModulation::hidl_discriminator::dtmb:
+                    modulation = (int) m.dtmb();
+                    break;
+            }
+            scanMessage.set<TunerFrontendScanMessage::modulation>(modulation);
+            break;
+        }
+        case FrontendScanMessageTypeExt1_1::DVBC_ANNEX: {
+            scanMessage.set<TunerFrontendScanMessage::annex>((int)message.annex());
+            break;
+        }
+        case FrontendScanMessageTypeExt1_1::HIGH_PRIORITY: {
+            scanMessage.set<TunerFrontendScanMessage::isHighPriority>(message.isHighPriority());
+            break;
+        }
+        default:
+            break;
+    }
+    mTunerFrontendCallback->onScanMessage((int)type, scanMessage);
+    return Void();
+}
+
+/////////////// TunerFrontend Helper Methods ///////////////////////
+
+void TunerFrontend::getAidlFrontendStatus(
+        vector<FrontendStatus>& hidlStatus, vector<TunerFrontendStatus>& aidlStatus) {
+    for (FrontendStatus s : hidlStatus) {
+        TunerFrontendStatus status;
+        switch (s.getDiscriminator()) {
+            case FrontendStatus::hidl_discriminator::isDemodLocked: {
+                status.set<TunerFrontendStatus::isDemodLocked>(s.isDemodLocked());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::snr: {
+                status.set<TunerFrontendStatus::snr>((int)s.snr());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::ber: {
+                status.set<TunerFrontendStatus::ber>((int)s.ber());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::per: {
+                status.set<TunerFrontendStatus::per>((int)s.per());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::preBer: {
+                status.set<TunerFrontendStatus::preBer>((int)s.preBer());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::signalQuality: {
+                status.set<TunerFrontendStatus::signalQuality>((int)s.signalQuality());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::signalStrength: {
+                status.set<TunerFrontendStatus::signalStrength>((int)s.signalStrength());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::symbolRate: {
+                status.set<TunerFrontendStatus::symbolRate>((int)s.symbolRate());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::innerFec: {
+                status.set<TunerFrontendStatus::innerFec>((long)s.innerFec());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::modulation: {
+                switch (s.modulation().getDiscriminator()) {
+                    case FrontendModulationStatus::hidl_discriminator::dvbc:
+                        status.set<TunerFrontendStatus::modulation>((int)s.modulation().dvbc());
+                        aidlStatus.push_back(status);
+                        break;
+                    case FrontendModulationStatus::hidl_discriminator::dvbs:
+                        status.set<TunerFrontendStatus::modulation>((int)s.modulation().dvbs());
+                        aidlStatus.push_back(status);
+                        break;
+                    case FrontendModulationStatus::hidl_discriminator::isdbs:
+                        status.set<TunerFrontendStatus::modulation>((int)s.modulation().isdbs());
+                        aidlStatus.push_back(status);
+                        break;
+                    case FrontendModulationStatus::hidl_discriminator::isdbs3:
+                        status.set<TunerFrontendStatus::modulation>((int)s.modulation().isdbs3());
+                        aidlStatus.push_back(status);
+                        break;
+                    case FrontendModulationStatus::hidl_discriminator::isdbt:
+                        status.set<TunerFrontendStatus::modulation>((int)s.modulation().isdbt());
+                        aidlStatus.push_back(status);
+                        break;
+                }
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::inversion: {
+                status.set<TunerFrontendStatus::inversion>((int)s.inversion());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::lnbVoltage: {
+                status.set<TunerFrontendStatus::lnbVoltage>((int)s.lnbVoltage());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::plpId: {
+                status.set<TunerFrontendStatus::plpId>((int8_t)s.plpId());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::isEWBS: {
+                status.set<TunerFrontendStatus::isEWBS>(s.isEWBS());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::agc: {
+                status.set<TunerFrontendStatus::agc>((int8_t)s.agc());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::isLnaOn: {
+                status.set<TunerFrontendStatus::isLnaOn>(s.isLnaOn());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::isLayerError: {
+                vector<bool> e(s.isLayerError().begin(), s.isLayerError().end());
+                status.set<TunerFrontendStatus::isLayerError>(e);
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::mer: {
+                status.set<TunerFrontendStatus::mer>((int)s.mer());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::freqOffset: {
+                status.set<TunerFrontendStatus::freqOffset>((int)s.freqOffset());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::hierarchy: {
+                status.set<TunerFrontendStatus::hierarchy>((int)s.hierarchy());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::isRfLocked: {
+                status.set<TunerFrontendStatus::isRfLocked>(s.isRfLocked());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatus::hidl_discriminator::plpInfo: {
+                vector<TunerFrontendStatusAtsc3PlpInfo> info;
+                for (auto i : s.plpInfo()) {
+                    info.push_back({
+                        .plpId = (int8_t)i.plpId,
+                        .isLocked = i.isLocked,
+                        .uec = (int)i.uec,
+                    });
+                }
+                status.set<TunerFrontendStatus::plpInfo>(info);
+                aidlStatus.push_back(status);
+                break;
+            }
+        }
+    }
+}
+
+void TunerFrontend::getAidlFrontendStatusExt(
+        vector<FrontendStatusExt1_1>& hidlStatus, vector<TunerFrontendStatus>& aidlStatus) {
+    for (FrontendStatusExt1_1 s : hidlStatus) {
+        TunerFrontendStatus status;
+        switch (s.getDiscriminator()) {
+            case FrontendStatusExt1_1::hidl_discriminator::modulations: {
+                vector<int> aidlMod;
+                for (auto m : s.modulations()) {
+                    switch (m.getDiscriminator()) {
+                        case FrontendModulation::hidl_discriminator::dvbc:
+                            aidlMod.push_back((int)m.dvbc());
+                            break;
+                        case FrontendModulation::hidl_discriminator::dvbs:
+                            aidlMod.push_back((int)m.dvbs());
+                            break;
+                        case FrontendModulation::hidl_discriminator::dvbt:
+                            aidlMod.push_back((int)m.dvbt());
+                            break;
+                        case FrontendModulation::hidl_discriminator::isdbs:
+                            aidlMod.push_back((int)m.isdbs());
+                            break;
+                        case FrontendModulation::hidl_discriminator::isdbs3:
+                            aidlMod.push_back((int)m.isdbs3());
+                            break;
+                        case FrontendModulation::hidl_discriminator::isdbt:
+                            aidlMod.push_back((int)m.isdbt());
+                            break;
+                        case FrontendModulation::hidl_discriminator::atsc:
+                            aidlMod.push_back((int)m.atsc());
+                            break;
+                        case FrontendModulation::hidl_discriminator::atsc3:
+                            aidlMod.push_back((int)m.atsc3());
+                            break;
+                        case FrontendModulation::hidl_discriminator::dtmb:
+                            aidlMod.push_back((int)m.dtmb());
+                            break;
+                    }
+                }
+                status.set<TunerFrontendStatus::modulations>(aidlMod);
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatusExt1_1::hidl_discriminator::bers: {
+                vector<int> b(s.bers().begin(), s.bers().end());
+                status.set<TunerFrontendStatus::bers>(b);
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatusExt1_1::hidl_discriminator::codeRates: {
+                vector<int64_t> codeRates;
+                for (auto c : s.codeRates()) {
+                    codeRates.push_back((long)c);
+                }
+                status.set<TunerFrontendStatus::codeRates>(codeRates);
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatusExt1_1::hidl_discriminator::bandwidth: {
+                switch (s.bandwidth().getDiscriminator()) {
+                    case FrontendBandwidth::hidl_discriminator::atsc3:
+                        status.set<TunerFrontendStatus::bandwidth>((int)s.bandwidth().atsc3());
+                        break;
+                    case FrontendBandwidth::hidl_discriminator::dvbc:
+                        status.set<TunerFrontendStatus::bandwidth>((int)s.bandwidth().dvbc());
+                        break;
+                    case FrontendBandwidth::hidl_discriminator::dvbt:
+                        status.set<TunerFrontendStatus::bandwidth>((int)s.bandwidth().dvbt());
+                        break;
+                    case FrontendBandwidth::hidl_discriminator::isdbt:
+                        status.set<TunerFrontendStatus::bandwidth>((int)s.bandwidth().isdbt());
+                        break;
+                    case FrontendBandwidth::hidl_discriminator::dtmb:
+                        status.set<TunerFrontendStatus::bandwidth>((int)s.bandwidth().dtmb());
+                        break;
+                }
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatusExt1_1::hidl_discriminator::interval: {
+                switch (s.interval().getDiscriminator()) {
+                    case FrontendGuardInterval::hidl_discriminator::dvbt:
+                        status.set<TunerFrontendStatus::interval>((int)s.interval().dvbt());
+                        break;
+                    case FrontendGuardInterval::hidl_discriminator::isdbt:
+                        status.set<TunerFrontendStatus::interval>((int)s.interval().isdbt());
+                        break;
+                    case FrontendGuardInterval::hidl_discriminator::dtmb:
+                        status.set<TunerFrontendStatus::interval>((int)s.interval().dtmb());
+                        break;
+                }
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatusExt1_1::hidl_discriminator::transmissionMode: {
+                switch (s.transmissionMode().getDiscriminator()) {
+                    case FrontendTransmissionMode::hidl_discriminator::dvbt:
+                        status.set<TunerFrontendStatus::transmissionMode>(
+                                (int)s.transmissionMode().dvbt());
+                        break;
+                    case FrontendTransmissionMode::hidl_discriminator::isdbt:
+                        status.set<TunerFrontendStatus::transmissionMode>(
+                                (int)s.transmissionMode().isdbt());
+                        break;
+                    case FrontendTransmissionMode::hidl_discriminator::dtmb:
+                        status.set<TunerFrontendStatus::transmissionMode>(
+                                (int)s.transmissionMode().dtmb());
+                        break;
+                }
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatusExt1_1::hidl_discriminator::uec: {
+                status.set<TunerFrontendStatus::uec>((int)s.uec());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatusExt1_1::hidl_discriminator::systemId: {
+                status.set<TunerFrontendStatus::systemId>((char16_t)s.systemId());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatusExt1_1::hidl_discriminator::interleaving: {
+                vector<int> aidlInter;
+                for (auto i : s.interleaving()) {
+                    switch (i.getDiscriminator()) {
+                        case FrontendInterleaveMode::hidl_discriminator::atsc3:
+                            aidlInter.push_back((int)i.atsc3());
+                            break;
+                        case FrontendInterleaveMode::hidl_discriminator::dvbc:
+                            aidlInter.push_back((int)i.dvbc());
+                            break;
+                        case FrontendInterleaveMode::hidl_discriminator::dtmb:
+                            aidlInter.push_back((int)i.dtmb());
+                            break;
+                    }
+                }
+                status.set<TunerFrontendStatus::interleaving>(aidlInter);
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatusExt1_1::hidl_discriminator::isdbtSegment: {
+                auto seg = s.isdbtSegment();
+                vector<uint8_t> i(seg.begin(), seg.end());
+                status.set<TunerFrontendStatus::isdbtSegment>(i);
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatusExt1_1::hidl_discriminator::tsDataRate: {
+                vector<int> ts(s.tsDataRate().begin(), s.tsDataRate().end());
+                status.set<TunerFrontendStatus::tsDataRate>(ts);
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatusExt1_1::hidl_discriminator::rollOff: {
+                switch (s.rollOff().getDiscriminator()) {
+                    case FrontendRollOff::hidl_discriminator::dvbs:
+                        status.set<TunerFrontendStatus::rollOff>((int)s.rollOff().dvbs());
+                        break;
+                    case FrontendRollOff::hidl_discriminator::isdbs:
+                        status.set<TunerFrontendStatus::rollOff>((int)s.rollOff().isdbs());
+                        break;
+                    case FrontendRollOff::hidl_discriminator::isdbs3:
+                        status.set<TunerFrontendStatus::rollOff>((int)s.rollOff().isdbs3());
+                        break;
+                }
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatusExt1_1::hidl_discriminator::isMiso: {
+                status.set<TunerFrontendStatus::isMiso>(s.isMiso());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatusExt1_1::hidl_discriminator::isLinear: {
+                status.set<TunerFrontendStatus::isLinear>(s.isLinear());
+                aidlStatus.push_back(status);
+                break;
+            }
+            case FrontendStatusExt1_1::hidl_discriminator::isShortFrames: {
+                status.set<TunerFrontendStatus::isShortFrames>(s.isShortFrames());
+                aidlStatus.push_back(status);
+                break;
+            }
+        }
+    }
+}
+
+hidl_vec<FrontendAtsc3PlpSettings> TunerFrontend::getAtsc3PlpSettings(
+        const TunerFrontendAtsc3Settings& settings) {
+    int len = settings.plpSettings.size();
+    hidl_vec<FrontendAtsc3PlpSettings> plps = hidl_vec<FrontendAtsc3PlpSettings>(len);
+    // parse PLP settings
+    for (int i = 0; i < len; i++) {
+        uint8_t plpId = static_cast<uint8_t>(settings.plpSettings[i].plpId);
+        FrontendAtsc3Modulation modulation =
+                static_cast<FrontendAtsc3Modulation>(settings.plpSettings[i].modulation);
+        FrontendAtsc3TimeInterleaveMode interleaveMode =
+                static_cast<FrontendAtsc3TimeInterleaveMode>(
+                        settings.plpSettings[i].interleaveMode);
+        FrontendAtsc3CodeRate codeRate =
+                static_cast<FrontendAtsc3CodeRate>(settings.plpSettings[i].codeRate);
+        FrontendAtsc3Fec fec =
+                static_cast<FrontendAtsc3Fec>(settings.plpSettings[i].fec);
+        FrontendAtsc3PlpSettings frontendAtsc3PlpSettings {
+                .plpId = plpId,
+                .modulation = modulation,
+                .interleaveMode = interleaveMode,
+                .codeRate = codeRate,
+                .fec = fec,
+        };
+        plps[i] = frontendAtsc3PlpSettings;
+    }
+    return plps;
+}
+
+FrontendDvbsCodeRate TunerFrontend::getDvbsCodeRate(const TunerFrontendDvbsCodeRate& codeRate) {
+    FrontendInnerFec innerFec = static_cast<FrontendInnerFec>(codeRate.fec);
+    bool isLinear = codeRate.isLinear;
+    bool isShortFrames = codeRate.isShortFrames;
+    uint32_t bitsPer1000Symbol = static_cast<uint32_t>(codeRate.bitsPer1000Symbol);
+    FrontendDvbsCodeRate coderate {
+            .fec = innerFec,
+            .isLinear = isLinear,
+            .isShortFrames = isShortFrames,
+            .bitsPer1000Symbol = bitsPer1000Symbol,
+    };
+    return coderate;
+}
+
+FrontendSettings TunerFrontend::getHidlFrontendSettings(const TunerFrontendSettings& aidlSettings) {
+    auto settings = aidlSettings.settings;
+    FrontendSettings frontendSettings;
+
+    switch (settings.getTag()) {
+        case TunerFrontendUnionSettings::analog: {
+            auto analog = settings.get<TunerFrontendUnionSettings::analog>();
+            frontendSettings.analog({
+                .frequency = static_cast<uint32_t>(analog.frequency),
+                .type = static_cast<FrontendAnalogType>(analog.signalType),
+                .sifStandard = static_cast<FrontendAnalogSifStandard>(analog.sifStandard),
+            });
+            break;
+        }
+        case TunerFrontendUnionSettings::atsc: {
+            auto atsc = settings.get<TunerFrontendUnionSettings::atsc>();
+            frontendSettings.atsc({
+                .frequency = static_cast<uint32_t>(atsc.frequency),
+                .modulation = static_cast<FrontendAtscModulation>(atsc.modulation),
+            });
+            break;
+        }
+        case TunerFrontendUnionSettings::atsc3: {
+            auto atsc3 = settings.get<TunerFrontendUnionSettings::atsc3>();
+            frontendSettings.atsc3({
+                .frequency = static_cast<uint32_t>(atsc3.frequency),
+                .bandwidth = static_cast<FrontendAtsc3Bandwidth>(atsc3.bandwidth),
+                .demodOutputFormat = static_cast<FrontendAtsc3DemodOutputFormat>(
+                        atsc3.demodOutputFormat),
+                .plpSettings = getAtsc3PlpSettings(atsc3),
+            });
+            break;
+        }
+        case TunerFrontendUnionSettings::cable: {
+            auto dvbc = settings.get<TunerFrontendUnionSettings::cable>();
+            frontendSettings.dvbc({
+                .frequency = static_cast<uint32_t>(dvbc.frequency),
+                .modulation = static_cast<FrontendDvbcModulation>(dvbc.modulation),
+                .fec = static_cast<FrontendInnerFec>(dvbc.innerFec),
+                .symbolRate = static_cast<uint32_t>(dvbc.symbolRate),
+                .outerFec = static_cast<FrontendDvbcOuterFec>(dvbc.outerFec),
+                .annex = static_cast<FrontendDvbcAnnex>(dvbc.annex),
+                .spectralInversion = static_cast<FrontendDvbcSpectralInversion>(
+                        dvbc.spectralInversion),
+            });
+            break;
+        }
+        case TunerFrontendUnionSettings::dvbs: {
+            auto dvbs = settings.get<TunerFrontendUnionSettings::dvbs>();
+            frontendSettings.dvbs({
+                .frequency = static_cast<uint32_t>(dvbs.frequency),
+                .modulation = static_cast<FrontendDvbsModulation>(dvbs.modulation),
+                .coderate = getDvbsCodeRate(dvbs.codeRate),
+                .symbolRate = static_cast<uint32_t>(dvbs.symbolRate),
+                .rolloff = static_cast<FrontendDvbsRolloff>(dvbs.rolloff),
+                .pilot = static_cast<FrontendDvbsPilot>(dvbs.pilot),
+                .inputStreamId = static_cast<uint32_t>(dvbs.inputStreamId),
+                .standard = static_cast<FrontendDvbsStandard>(dvbs.standard),
+                .vcmMode = static_cast<FrontendDvbsVcmMode>(dvbs.vcm),
+            });
+            break;
+        }
+        case TunerFrontendUnionSettings::dvbt: {
+            auto dvbt = settings.get<TunerFrontendUnionSettings::dvbt>();
+            frontendSettings.dvbt({
+                .frequency = static_cast<uint32_t>(dvbt.frequency),
+                .transmissionMode = static_cast<FrontendDvbtTransmissionMode>(
+                        dvbt.transmissionMode),
+                .bandwidth = static_cast<FrontendDvbtBandwidth>(dvbt.bandwidth),
+                .constellation = static_cast<FrontendDvbtConstellation>(dvbt.constellation),
+                .hierarchy = static_cast<FrontendDvbtHierarchy>(dvbt.hierarchy),
+                .hpCoderate = static_cast<FrontendDvbtCoderate>(dvbt.hpCodeRate),
+                .lpCoderate = static_cast<FrontendDvbtCoderate>(dvbt.lpCodeRate),
+                .guardInterval = static_cast<FrontendDvbtGuardInterval>(dvbt.guardInterval),
+                .isHighPriority = dvbt.isHighPriority,
+                .standard = static_cast<FrontendDvbtStandard>(dvbt.standard),
+                .isMiso = dvbt.isMiso,
+                .plpMode = static_cast<FrontendDvbtPlpMode>(dvbt.plpMode),
+                .plpId = static_cast<uint8_t>(dvbt.plpId),
+                .plpGroupId = static_cast<uint8_t>(dvbt.plpGroupId),
+            });
+            break;
+        }
+        case TunerFrontendUnionSettings::isdbs: {
+            auto isdbs = settings.get<TunerFrontendUnionSettings::isdbs>();
+            frontendSettings.isdbs({
+                .frequency = static_cast<uint32_t>(isdbs.frequency),
+                .streamId = static_cast<uint16_t>(isdbs.streamId),
+                .streamIdType = static_cast<FrontendIsdbsStreamIdType>(isdbs.streamIdType),
+                .modulation = static_cast<FrontendIsdbsModulation>(isdbs.modulation),
+                .coderate = static_cast<FrontendIsdbsCoderate>(isdbs.codeRate),
+                .symbolRate = static_cast<uint32_t>(isdbs.symbolRate),
+                .rolloff = static_cast<FrontendIsdbsRolloff>(isdbs.rolloff),
+            });
+            break;
+        }
+        case TunerFrontendUnionSettings::isdbs3: {
+            auto isdbs3 = settings.get<TunerFrontendUnionSettings::isdbs3>();
+            frontendSettings.isdbs3({
+                .frequency = static_cast<uint32_t>(isdbs3.frequency),
+                .streamId = static_cast<uint16_t>(isdbs3.streamId),
+                .streamIdType = static_cast<FrontendIsdbsStreamIdType>(isdbs3.streamIdType),
+                .modulation = static_cast<FrontendIsdbs3Modulation>(isdbs3.modulation),
+                .coderate = static_cast<FrontendIsdbs3Coderate>(isdbs3.codeRate),
+                .symbolRate = static_cast<uint32_t>(isdbs3.symbolRate),
+                .rolloff = static_cast<FrontendIsdbs3Rolloff>(isdbs3.rolloff),
+            });
+            break;
+        }
+        case TunerFrontendUnionSettings::isdbt: {
+            auto isdbt = settings.get<TunerFrontendUnionSettings::isdbt>();
+            frontendSettings.isdbt({
+                .frequency = static_cast<uint32_t>(isdbt.frequency),
+                .modulation = static_cast<FrontendIsdbtModulation>(isdbt.modulation),
+                .bandwidth = static_cast<FrontendIsdbtBandwidth>(isdbt.bandwidth),
+                .mode = static_cast<FrontendIsdbtMode>(isdbt.mode),
+                .coderate = static_cast<FrontendIsdbtCoderate>(isdbt.codeRate),
+                .guardInterval = static_cast<FrontendIsdbtGuardInterval>(isdbt.guardInterval),
+                .serviceAreaId = static_cast<uint32_t>(isdbt.serviceAreaId),
+            });
+            break;
+        }
+        default:
+            break;
+    }
+
+    return frontendSettings;
+}
+
+FrontendSettingsExt1_1 TunerFrontend::getHidlFrontendSettingsExt(
+        const TunerFrontendSettings& aidlSettings) {
+    FrontendSettingsExt1_1 frontendSettingsExt{
+        .endFrequency = static_cast<uint32_t>(aidlSettings.endFrequency),
+        .inversion = static_cast<FrontendSpectralInversion>(aidlSettings.inversion),
+    };
+
+    auto settings = aidlSettings.settings;
+    switch (settings.getTag()) {
+        case TunerFrontendUnionSettings::analog: {
+            auto analog = settings.get<TunerFrontendUnionSettings::analog>();
+            if (analog.isExtended) {
+                frontendSettingsExt.settingExt.analog({
+                    .aftFlag = static_cast<FrontendAnalogAftFlag>(analog.aftFlag),
+                });
+            } else {
+                frontendSettingsExt.settingExt.noinit();
+            }
+            break;
+        }
+        case TunerFrontendUnionSettings::cable: {
+            auto dvbc = settings.get<TunerFrontendUnionSettings::cable>();
+            if (dvbc.isExtended) {
+                frontendSettingsExt.settingExt.dvbc({
+                    .interleaveMode = static_cast<FrontendCableTimeInterleaveMode>(
+                            dvbc.interleaveMode),
+                    .bandwidth = static_cast<FrontendDvbcBandwidth>(
+                            dvbc.bandwidth),
+                });
+            } else {
+                frontendSettingsExt.settingExt.noinit();
+            }
+            break;
+        }
+        case TunerFrontendUnionSettings::dvbs: {
+            auto dvbs = settings.get<TunerFrontendUnionSettings::dvbs>();
+            if (dvbs.isExtended) {
+                frontendSettingsExt.settingExt.dvbs({
+                    .scanType = static_cast<FrontendDvbsScanType>(dvbs.scanType),
+                    .isDiseqcRxMessage = dvbs.isDiseqcRxMessage,
+                });
+            } else {
+                frontendSettingsExt.settingExt.noinit();
+            }
+            break;
+        }
+        case TunerFrontendUnionSettings::dvbt: {
+            auto dvbt = settings.get<TunerFrontendUnionSettings::dvbt>();
+            if (dvbt.isExtended) {
+                frontendSettingsExt.settingExt.dvbt({
+                    .constellation =
+                            static_cast<hardware::tv::tuner::V1_1::FrontendDvbtConstellation>(
+                                    dvbt.constellation),
+                    .transmissionMode =
+                            static_cast<hardware::tv::tuner::V1_1::FrontendDvbtTransmissionMode>(
+                                    dvbt.transmissionMode),
+                });
+            } else {
+                frontendSettingsExt.settingExt.noinit();
+            }
+            break;
+        }
+        case TunerFrontendUnionSettings::dtmb: {
+            auto dtmb = settings.get<TunerFrontendUnionSettings::dtmb>();
+            frontendSettingsExt.settingExt.dtmb({
+                .frequency = static_cast<uint32_t>(dtmb.frequency),
+                .transmissionMode = static_cast<FrontendDtmbTransmissionMode>(
+                        dtmb.transmissionMode),
+                .bandwidth = static_cast<FrontendDtmbBandwidth>(dtmb.bandwidth),
+                .modulation = static_cast<FrontendDtmbModulation>(dtmb.modulation),
+                .codeRate = static_cast<FrontendDtmbCodeRate>(dtmb.codeRate),
+                .guardInterval = static_cast<FrontendDtmbGuardInterval>(dtmb.guardInterval),
+                .interleaveMode = static_cast<FrontendDtmbTimeInterleaveMode>(dtmb.interleaveMode),
+            });
+            break;
+        }
+        default:
+            frontendSettingsExt.settingExt.noinit();
+            break;
+    }
+
+    return frontendSettingsExt;
+}
+}  // namespace android
diff --git a/services/tuner/TunerFrontend.h b/services/tuner/TunerFrontend.h
new file mode 100644
index 0000000..22fd509
--- /dev/null
+++ b/services/tuner/TunerFrontend.h
@@ -0,0 +1,111 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERFRONTEND_H
+#define ANDROID_MEDIA_TUNERFRONTEND_H
+
+#include <aidl/android/media/tv/tuner/BnTunerFrontend.h>
+#include <android/hardware/tv/tuner/1.0/ITuner.h>
+#include <android/hardware/tv/tuner/1.1/IFrontend.h>
+#include <android/hardware/tv/tuner/1.1/IFrontendCallback.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <utils/Log.h>
+
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::tv::tuner::BnTunerFrontend;
+using ::aidl::android::media::tv::tuner::ITunerFrontendCallback;
+using ::aidl::android::media::tv::tuner::ITunerLnb;
+using ::aidl::android::media::tv::tuner::TunerFrontendAtsc3Settings;
+using ::aidl::android::media::tv::tuner::TunerFrontendDvbsCodeRate;
+using ::aidl::android::media::tv::tuner::TunerFrontendScanMessage;
+using ::aidl::android::media::tv::tuner::TunerFrontendSettings;
+using ::aidl::android::media::tv::tuner::TunerFrontendStatus;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3PlpSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsCodeRate;
+using ::android::hardware::tv::tuner::V1_0::FrontendEventType;
+using ::android::hardware::tv::tuner::V1_0::FrontendId;
+using ::android::hardware::tv::tuner::V1_0::FrontendScanMessage;
+using ::android::hardware::tv::tuner::V1_0::FrontendScanMessageType;
+using ::android::hardware::tv::tuner::V1_0::FrontendSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendStatus;
+using ::android::hardware::tv::tuner::V1_0::IFrontend;
+using ::android::hardware::tv::tuner::V1_1::IFrontendCallback;
+using ::android::hardware::tv::tuner::V1_1::FrontendScanMessageExt1_1;
+using ::android::hardware::tv::tuner::V1_1::FrontendScanMessageTypeExt1_1;
+using ::android::hardware::tv::tuner::V1_1::FrontendSettingsExt1_1;
+using ::android::hardware::tv::tuner::V1_1::FrontendStatusExt1_1;
+
+using namespace std;
+
+namespace android {
+
+class TunerFrontend : public BnTunerFrontend {
+
+public:
+    TunerFrontend(sp<IFrontend> frontend, int id);
+    virtual ~TunerFrontend();
+    Status setCallback(
+            const shared_ptr<ITunerFrontendCallback>& tunerFrontendCallback) override;
+    Status tune(const TunerFrontendSettings& settings) override;
+    Status stopTune() override;
+    Status scan(const TunerFrontendSettings& settings, int frontendScanType) override;
+    Status stopScan() override;
+    Status setLnb(const shared_ptr<ITunerLnb>& lnb) override;
+    Status setLna(bool bEnable) override;
+    Status linkCiCamToFrontend(int ciCamId, int32_t* _aidl_return) override;
+    Status unlinkCiCamToFrontend(int ciCamId) override;
+    Status close() override;
+    Status getStatus(const vector<int32_t>& statusTypes,
+            vector<TunerFrontendStatus>* _aidl_return) override;
+    Status getStatusExtended_1_1(const vector<int32_t>& statusTypes,
+            vector<TunerFrontendStatus>* _aidl_return) override;
+    Status getFrontendId(int* _aidl_return) override;
+
+    struct FrontendCallback : public IFrontendCallback {
+        FrontendCallback(const shared_ptr<ITunerFrontendCallback> tunerFrontendCallback)
+                : mTunerFrontendCallback(tunerFrontendCallback) {};
+
+        virtual Return<void> onEvent(FrontendEventType frontendEventType);
+        virtual Return<void> onScanMessage(
+                FrontendScanMessageType type, const FrontendScanMessage& message);
+        virtual Return<void> onScanMessageExt1_1(
+                FrontendScanMessageTypeExt1_1 type, const FrontendScanMessageExt1_1& message);
+
+        shared_ptr<ITunerFrontendCallback> mTunerFrontendCallback;
+    };
+
+private:
+    hidl_vec<FrontendAtsc3PlpSettings> getAtsc3PlpSettings(
+            const TunerFrontendAtsc3Settings& settings);
+    FrontendDvbsCodeRate getDvbsCodeRate(const TunerFrontendDvbsCodeRate& codeRate);
+    FrontendSettings getHidlFrontendSettings(const TunerFrontendSettings& aidlSettings);
+    FrontendSettingsExt1_1 getHidlFrontendSettingsExt(const TunerFrontendSettings& aidlSettings);
+    void getAidlFrontendStatus(
+            vector<FrontendStatus>& hidlStatus, vector<TunerFrontendStatus>& aidlStatus);
+    void getAidlFrontendStatusExt(
+            vector<FrontendStatusExt1_1>& hidlStatus, vector<TunerFrontendStatus>& aidlStatus);
+
+    int mId;
+    sp<IFrontend> mFrontend;
+    sp<::android::hardware::tv::tuner::V1_1::IFrontend> mFrontend_1_1;
+};
+
+} // namespace android
+
+#endif // ANDROID_MEDIA_TUNERFRONTEND_H
diff --git a/services/tuner/TunerLnb.cpp b/services/tuner/TunerLnb.cpp
new file mode 100644
index 0000000..77248d4
--- /dev/null
+++ b/services/tuner/TunerLnb.cpp
@@ -0,0 +1,140 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TunerLnb"
+
+#include "TunerLnb.h"
+
+using ::android::hardware::tv::tuner::V1_0::LnbPosition;
+using ::android::hardware::tv::tuner::V1_0::LnbTone;
+using ::android::hardware::tv::tuner::V1_0::LnbVoltage;
+using ::android::hardware::tv::tuner::V1_0::Result;
+
+namespace android {
+
+TunerLnb::TunerLnb(sp<ILnb> lnb, int id) {
+    mLnb = lnb;
+    mId = id;
+}
+
+TunerLnb::~TunerLnb() {
+    mLnb = NULL;
+    mId = -1;
+}
+
+Status TunerLnb::setCallback(
+        const shared_ptr<ITunerLnbCallback>& tunerLnbCallback) {
+    if (mLnb == NULL) {
+        ALOGE("ILnb is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (tunerLnbCallback == NULL) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    }
+
+    sp<ILnbCallback> lnbCallback = new LnbCallback(tunerLnbCallback);
+    Result status = mLnb->setCallback(lnbCallback);
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
+}
+
+Status TunerLnb::setVoltage(int voltage) {
+    if (mLnb == NULL) {
+        ALOGE("ILnb is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status = mLnb->setVoltage(static_cast<LnbVoltage>(voltage));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
+}
+
+Status TunerLnb::setTone(int tone) {
+    if (mLnb == NULL) {
+        ALOGE("ILnb is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status = mLnb->setTone(static_cast<LnbTone>(tone));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
+}
+
+Status TunerLnb::setSatellitePosition(int position) {
+    if (mLnb == NULL) {
+        ALOGE("ILnb is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status = mLnb->setSatellitePosition(static_cast<LnbPosition>(position));
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
+}
+
+Status TunerLnb::sendDiseqcMessage(const vector<uint8_t>& diseqcMessage) {
+    if (mLnb == NULL) {
+        ALOGE("ILnb is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status = mLnb->sendDiseqcMessage(diseqcMessage);
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
+}
+
+Status TunerLnb::close() {
+    if (mLnb == NULL) {
+        ALOGE("ILnb is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res = mLnb->close();
+    mLnb = NULL;
+
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+
+/////////////// ILnbCallback ///////////////////////
+
+Return<void> TunerLnb::LnbCallback::onEvent(const LnbEventType lnbEventType) {
+    if (mTunerLnbCallback != NULL) {
+        mTunerLnbCallback->onEvent((int)lnbEventType);
+    }
+    return Void();
+}
+
+Return<void> TunerLnb::LnbCallback::onDiseqcMessage(const hidl_vec<uint8_t>& diseqcMessage) {
+    if (mTunerLnbCallback != NULL && diseqcMessage != NULL) {
+        vector<uint8_t> msg(begin(diseqcMessage), end(diseqcMessage));
+        mTunerLnbCallback->onDiseqcMessage(msg);
+    }
+    return Void();
+}
+}  // namespace android
diff --git a/services/tuner/TunerLnb.h b/services/tuner/TunerLnb.h
new file mode 100644
index 0000000..500d072
--- /dev/null
+++ b/services/tuner/TunerLnb.h
@@ -0,0 +1,71 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERFLNB_H
+#define ANDROID_MEDIA_TUNERFLNB_H
+
+#include <aidl/android/media/tv/tuner/BnTunerLnb.h>
+#include <android/hardware/tv/tuner/1.0/ILnb.h>
+#include <android/hardware/tv/tuner/1.0/ILnbCallback.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <utils/Log.h>
+
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::tv::tuner::BnTunerLnb;
+using ::aidl::android::media::tv::tuner::ITunerLnbCallback;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::tv::tuner::V1_0::ILnb;
+using ::android::hardware::tv::tuner::V1_0::ILnbCallback;
+using ::android::hardware::tv::tuner::V1_0::LnbEventType;
+
+using namespace std;
+
+namespace android {
+
+class TunerLnb : public BnTunerLnb {
+
+public:
+    TunerLnb(sp<ILnb> lnb, int id);
+    virtual ~TunerLnb();
+    Status setCallback(const shared_ptr<ITunerLnbCallback>& tunerLnbCallback) override;
+    Status setVoltage(int voltage) override;
+    Status setTone(int tone) override;
+    Status setSatellitePosition(int position) override;
+    Status sendDiseqcMessage(const vector<uint8_t>& diseqcMessage) override;
+    Status close() override;
+
+    int getId() { return mId; }
+
+    struct LnbCallback : public ILnbCallback {
+        LnbCallback(const shared_ptr<ITunerLnbCallback> tunerLnbCallback)
+                : mTunerLnbCallback(tunerLnbCallback) {};
+
+        virtual Return<void> onEvent(const LnbEventType lnbEventType);
+        virtual Return<void> onDiseqcMessage(const hidl_vec<uint8_t>& diseqcMessage);
+
+        shared_ptr<ITunerLnbCallback> mTunerLnbCallback;
+    };
+
+private:
+    int mId;
+    sp<ILnb> mLnb;
+};
+
+} // namespace android
+
+#endif // ANDROID_MEDIA_TUNERFLNB_H
diff --git a/services/tuner/TunerService.cpp b/services/tuner/TunerService.cpp
new file mode 100644
index 0000000..5b4129a
--- /dev/null
+++ b/services/tuner/TunerService.cpp
@@ -0,0 +1,569 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TunerService"
+
+#include <android/binder_manager.h>
+#include <android/content/pm/IPackageManagerNative.h>
+#include <binder/IServiceManager.h>
+#include <utils/Log.h>
+#include "TunerService.h"
+#include "TunerFrontend.h"
+#include "TunerLnb.h"
+#include "TunerDemux.h"
+#include "TunerDescrambler.h"
+
+using ::aidl::android::media::tv::tuner::TunerFrontendAnalogCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendAtsc3Capabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendAtscCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendCableCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendDvbsCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendDvbtCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendIsdbs3Capabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendIsdbsCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendIsdbtCapabilities;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterAvSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterMainType;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterType;
+using ::android::hardware::tv::tuner::V1_0::DemuxTsFilterType;
+using ::android::hardware::tv::tuner::V1_0::FrontendId;
+using ::android::hardware::tv::tuner::V1_0::FrontendType;
+using ::android::hardware::tv::tuner::V1_0::IFrontend;
+using ::android::hardware::tv::tuner::V1_0::ILnb;
+using ::android::hardware::tv::tuner::V1_0::LnbId;
+using ::android::hardware::tv::tuner::V1_0::Result;
+using ::android::hardware::tv::tuner::V1_1::FrontendDtmbCapabilities;
+
+namespace android {
+
+TunerService::TunerService() {
+    sp<IServiceManager> serviceMgr = defaultServiceManager();
+    sp<content::pm::IPackageManagerNative> packageMgr;
+    if (serviceMgr.get() == nullptr) {
+        ALOGE("%s: Cannot find service manager", __func__);
+        return;
+    } else {
+        sp<IBinder> binder = serviceMgr->waitForService(String16("package_native"));
+        packageMgr = interface_cast<content::pm::IPackageManagerNative>(binder);
+    }
+
+    bool hasFeature = false;
+    if (packageMgr != nullptr) {
+        binder::Status status = packageMgr->hasSystemFeature(FEATURE_TUNER, 0, &hasFeature);
+        if (!status.isOk()) {
+            ALOGE("%s: hasSystemFeature failed: %s",
+                    __func__, status.exceptionMessage().c_str());
+            return;
+        }
+        if (!hasFeature) {
+            ALOGD("Current device does not support tuner feaure.");
+            return;
+        }
+    } else {
+        ALOGD("%s: Cannot find package manager.", __func__);
+        return;
+    }
+
+    ::ndk::SpAIBinder binder(AServiceManager_waitForService("tv_tuner_resource_mgr"));
+    mTunerResourceManager = ITunerResourceManager::fromBinder(binder);
+    updateTunerResources();
+}
+
+TunerService::~TunerService() {}
+
+binder_status_t TunerService::instantiate() {
+    shared_ptr<TunerService> service =
+            ::ndk::SharedRefBase::make<TunerService>();
+    return AServiceManager_addService(service->asBinder().get(), getServiceName());
+}
+
+bool TunerService::hasITuner() {
+    ALOGD("hasITuner");
+    if (mTuner != nullptr) {
+        return true;
+    }
+    mTuner = ITuner::getService();
+    if (mTuner == nullptr) {
+        ALOGE("Failed to get ITuner service");
+        return false;
+    }
+    mTunerVersion = TUNER_HAL_VERSION_1_0;
+    mTuner_1_1 = ::android::hardware::tv::tuner::V1_1::ITuner::castFrom(mTuner);
+    if (mTuner_1_1 != nullptr) {
+        mTunerVersion = TUNER_HAL_VERSION_1_1;
+    } else {
+        ALOGE("Failed to get ITuner_1_1 service");
+    }
+    return true;
+}
+
+bool TunerService::hasITuner_1_1() {
+    ALOGD("hasITuner_1_1");
+    hasITuner();
+    return (mTunerVersion == TUNER_HAL_VERSION_1_1);
+}
+
+Status TunerService::openDemux(
+        int /* demuxHandle */, std::shared_ptr<ITunerDemux>* _aidl_return) {
+    ALOGD("openDemux");
+    if (!hasITuner()) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::NOT_INITIALIZED));
+    }
+    Result res;
+    uint32_t id;
+    sp<IDemux> demuxSp = nullptr;
+    shared_ptr<ITunerDemux> tunerDemux = nullptr;
+    mTuner->openDemux([&](Result r, uint32_t demuxId, const sp<IDemux>& demux) {
+        demuxSp = demux;
+        id = demuxId;
+        res = r;
+        ALOGD("open demux, id = %d", demuxId);
+    });
+    if (res == Result::SUCCESS) {
+        tunerDemux = ::ndk::SharedRefBase::make<TunerDemux>(demuxSp, id);
+        *_aidl_return = tunerDemux->ref<ITunerDemux>();
+        return Status::ok();
+    }
+
+    ALOGW("open demux failed, res = %d", res);
+    return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+}
+
+Status TunerService::getDemuxCaps(TunerDemuxCapabilities* _aidl_return) {
+    ALOGD("getDemuxCaps");
+    if (!hasITuner()) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::NOT_INITIALIZED));
+    }
+    Result res;
+    DemuxCapabilities caps;
+    mTuner->getDemuxCaps([&](Result r, const DemuxCapabilities& demuxCaps) {
+        caps = demuxCaps;
+        res = r;
+    });
+    if (res == Result::SUCCESS) {
+        *_aidl_return = getAidlDemuxCaps(caps);
+        return Status::ok();
+    }
+
+    ALOGW("Get demux caps failed, res = %d", res);
+    return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+}
+
+Status TunerService::getFrontendIds(vector<int32_t>* ids) {
+    if (!hasITuner()) {
+        return Status::fromServiceSpecificError(
+                static_cast<int32_t>(Result::NOT_INITIALIZED));
+    }
+    hidl_vec<FrontendId> feIds;
+    Result res = getHidlFrontendIds(feIds);
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    ids->resize(feIds.size());
+    copy(feIds.begin(), feIds.end(), ids->begin());
+
+    return Status::ok();
+}
+
+Status TunerService::getFrontendInfo(int32_t id, TunerFrontendInfo* _aidl_return) {
+    if (!hasITuner()) {
+        ALOGE("ITuner service is not init.");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    FrontendInfo info;
+    Result res = getHidlFrontendInfo(id, info);
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    TunerFrontendInfo tunerInfo = convertToAidlFrontendInfo(info);
+    *_aidl_return = tunerInfo;
+    return Status::ok();
+}
+
+Status TunerService::getFrontendDtmbCapabilities(
+        int32_t id, TunerFrontendDtmbCapabilities* _aidl_return) {
+    if (!hasITuner_1_1()) {
+        ALOGE("ITuner_1_1 service is not init.");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res;
+    FrontendDtmbCapabilities dtmbCaps;
+    mTuner_1_1->getFrontendDtmbCapabilities(id,
+            [&](Result r, const FrontendDtmbCapabilities& caps) {
+        dtmbCaps = caps;
+        res = r;
+    });
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    TunerFrontendDtmbCapabilities aidlDtmbCaps{
+        .transmissionModeCap = (int)dtmbCaps.transmissionModeCap,
+        .bandwidthCap = (int)dtmbCaps.bandwidthCap,
+        .modulationCap = (int)dtmbCaps.modulationCap,
+        .codeRateCap = (int)dtmbCaps.codeRateCap,
+        .guardIntervalCap = (int)dtmbCaps.guardIntervalCap,
+        .interleaveModeCap = (int)dtmbCaps.interleaveModeCap,
+    };
+
+    *_aidl_return = aidlDtmbCaps;
+    return Status::ok();
+}
+
+Status TunerService::openFrontend(
+        int32_t frontendHandle, shared_ptr<ITunerFrontend>* _aidl_return) {
+    if (!hasITuner()) {
+        ALOGE("ITuner service is not init.");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status;
+    sp<IFrontend> frontend;
+    int id = getResourceIdFromHandle(frontendHandle, FRONTEND);
+    mTuner->openFrontendById(id, [&](Result result, const sp<IFrontend>& fe) {
+        frontend = fe;
+        status = result;
+    });
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    *_aidl_return = ::ndk::SharedRefBase::make<TunerFrontend>(frontend, id);
+    return Status::ok();
+}
+
+Status TunerService::openLnb(int lnbHandle, shared_ptr<ITunerLnb>* _aidl_return) {
+    if (!hasITuner()) {
+        ALOGD("get ITuner failed");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status;
+    sp<ILnb> lnb;
+    int id = getResourceIdFromHandle(lnbHandle, LNB);
+    mTuner->openLnbById(id, [&](Result result, const sp<ILnb>& lnbSp){
+        lnb = lnbSp;
+        status = result;
+    });
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+
+    *_aidl_return = ::ndk::SharedRefBase::make<TunerLnb>(lnb, id);
+    return Status::ok();
+}
+
+Status TunerService::openLnbByName(const string& lnbName, shared_ptr<ITunerLnb>* _aidl_return) {
+    if (!hasITuner()) {
+        ALOGE("get ITuner failed");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    int lnbId;
+    Result status;
+    sp<ILnb> lnb;
+    mTuner->openLnbByName(lnbName, [&](Result r, LnbId id, const sp<ILnb>& lnbSp) {
+        status = r;
+        lnb = lnbSp;
+        lnbId = (int)id;
+    });
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+
+    *_aidl_return = ::ndk::SharedRefBase::make<TunerLnb>(lnb, lnbId);
+    return Status::ok();
+}
+
+Status TunerService::openDescrambler(int32_t /*descramblerHandle*/,
+            std::shared_ptr<ITunerDescrambler>* _aidl_return) {
+    if (!hasITuner()) {
+        ALOGD("get ITuner failed");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status;
+    sp<IDescrambler> descrambler;
+    //int id = getResourceIdFromHandle(descramblerHandle, DESCRAMBLER);
+    mTuner->openDescrambler([&](Result r, const sp<IDescrambler>& descramblerSp) {
+        status = r;
+        descrambler = descramblerSp;
+    });
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+
+    *_aidl_return = ::ndk::SharedRefBase::make<TunerDescrambler>(descrambler);
+    return Status::ok();
+}
+
+void TunerService::updateTunerResources() {
+    if (!hasITuner() || mTunerResourceManager == NULL) {
+        ALOGE("Failed to updateTunerResources");
+        return;
+    }
+
+    updateFrontendResources();
+    updateLnbResources();
+    // TODO: update Demux, Descrambler.
+}
+
+Status TunerService::getTunerHalVersion(int* _aidl_return) {
+    hasITuner();
+    *_aidl_return = mTunerVersion;
+    return Status::ok();
+}
+
+void TunerService::updateFrontendResources() {
+    hidl_vec<FrontendId> ids;
+    Result res = getHidlFrontendIds(ids);
+    if (res != Result::SUCCESS) {
+        return;
+    }
+    vector<TunerFrontendInfo> infos;
+    for (int i = 0; i < ids.size(); i++) {
+        FrontendInfo frontendInfo;
+        Result res = getHidlFrontendInfo((int)ids[i], frontendInfo);
+        if (res != Result::SUCCESS) {
+            continue;
+        }
+        TunerFrontendInfo tunerFrontendInfo{
+            .handle = getResourceHandleFromId((int)ids[i], FRONTEND),
+            .type = static_cast<int>(frontendInfo.type),
+            .exclusiveGroupId = static_cast<int>(frontendInfo.exclusiveGroupId),
+        };
+        infos.push_back(tunerFrontendInfo);
+    }
+    mTunerResourceManager->setFrontendInfoList(infos);
+}
+
+void TunerService::updateLnbResources() {
+    vector<int> handles = getLnbHandles();
+    if (handles.size() == 0) {
+        return;
+    }
+    mTunerResourceManager->setLnbInfoList(handles);
+}
+
+vector<int> TunerService::getLnbHandles() {
+    vector<int> lnbHandles;
+    if (mTuner != NULL) {
+        Result res;
+        vector<LnbId> lnbIds;
+        mTuner->getLnbIds([&](Result r, const hardware::hidl_vec<LnbId>& ids) {
+            lnbIds = ids;
+            res = r;
+        });
+        if (res != Result::SUCCESS || lnbIds.size() == 0) {
+        } else {
+            for (int i = 0; i < lnbIds.size(); i++) {
+                lnbHandles.push_back(getResourceHandleFromId((int)lnbIds[i], LNB));
+            }
+        }
+    }
+
+    return lnbHandles;
+}
+
+Result TunerService::getHidlFrontendIds(hidl_vec<FrontendId>& ids) {
+    if (mTuner == NULL) {
+        return Result::NOT_INITIALIZED;
+    }
+    Result res;
+    mTuner->getFrontendIds([&](Result r, const hidl_vec<FrontendId>& frontendIds) {
+        ids = frontendIds;
+        res = r;
+    });
+    return res;
+}
+
+Result TunerService::getHidlFrontendInfo(int id, FrontendInfo& info) {
+    if (mTuner == NULL) {
+        return Result::NOT_INITIALIZED;
+    }
+    Result res;
+    mTuner->getFrontendInfo(id, [&](Result r, const FrontendInfo& feInfo) {
+        info = feInfo;
+        res = r;
+    });
+    return res;
+}
+
+TunerDemuxCapabilities TunerService::getAidlDemuxCaps(DemuxCapabilities caps) {
+    TunerDemuxCapabilities aidlCaps{
+        .numDemux = (int)caps.numDemux,
+        .numRecord = (int)caps.numRecord,
+        .numPlayback = (int)caps.numPlayback,
+        .numTsFilter = (int)caps.numTsFilter,
+        .numSectionFilter = (int)caps.numSectionFilter,
+        .numAudioFilter = (int)caps.numAudioFilter,
+        .numVideoFilter = (int)caps.numVideoFilter,
+        .numPesFilter = (int)caps.numPesFilter,
+        .numPcrFilter = (int)caps.numPcrFilter,
+        .numBytesInSectionFilter = (int)caps.numBytesInSectionFilter,
+        .filterCaps = (int)caps.filterCaps,
+        .bTimeFilter = caps.bTimeFilter,
+    };
+    aidlCaps.linkCaps.resize(caps.linkCaps.size());
+    copy(caps.linkCaps.begin(), caps.linkCaps.end(), aidlCaps.linkCaps.begin());
+    return aidlCaps;
+}
+
+TunerFrontendInfo TunerService::convertToAidlFrontendInfo(FrontendInfo halInfo) {
+    TunerFrontendInfo info{
+        .type = (int)halInfo.type,
+        .minFrequency = (int)halInfo.minFrequency,
+        .maxFrequency = (int)halInfo.maxFrequency,
+        .minSymbolRate = (int)halInfo.minSymbolRate,
+        .maxSymbolRate = (int)halInfo.maxSymbolRate,
+        .acquireRange = (int)halInfo.acquireRange,
+        .exclusiveGroupId = (int)halInfo.exclusiveGroupId,
+    };
+    for (int i = 0; i < halInfo.statusCaps.size(); i++) {
+        info.statusCaps.push_back((int)halInfo.statusCaps[i]);
+    }
+
+    TunerFrontendCapabilities caps;
+    switch (halInfo.type) {
+        case FrontendType::ANALOG: {
+            if (FrontendInfo::FrontendCapabilities::hidl_discriminator::analogCaps
+                    == halInfo.frontendCaps.getDiscriminator()) {
+                TunerFrontendAnalogCapabilities analogCaps{
+                    .typeCap = (int)halInfo.frontendCaps.analogCaps().typeCap,
+                    .sifStandardCap = (int)halInfo.frontendCaps.analogCaps().sifStandardCap,
+                };
+                caps.set<TunerFrontendCapabilities::analogCaps>(analogCaps);
+            }
+            break;
+        }
+        case FrontendType::ATSC: {
+            if (FrontendInfo::FrontendCapabilities::hidl_discriminator::atscCaps
+                    == halInfo.frontendCaps.getDiscriminator()) {
+                TunerFrontendAtscCapabilities atscCaps{
+                    .modulationCap = (int)halInfo.frontendCaps.atscCaps().modulationCap,
+                };
+                caps.set<TunerFrontendCapabilities::atscCaps>(atscCaps);
+            }
+            break;
+        }
+        case FrontendType::ATSC3: {
+            if (FrontendInfo::FrontendCapabilities::hidl_discriminator::atsc3Caps
+                    == halInfo.frontendCaps.getDiscriminator()) {
+                TunerFrontendAtsc3Capabilities atsc3Caps{
+                    .bandwidthCap = (int)halInfo.frontendCaps.atsc3Caps().bandwidthCap,
+                    .modulationCap = (int)halInfo.frontendCaps.atsc3Caps().modulationCap,
+                    .timeInterleaveModeCap =
+                            (int)halInfo.frontendCaps.atsc3Caps().timeInterleaveModeCap,
+                    .codeRateCap = (int)halInfo.frontendCaps.atsc3Caps().codeRateCap,
+                    .demodOutputFormatCap
+                        = (int)halInfo.frontendCaps.atsc3Caps().demodOutputFormatCap,
+                    .fecCap = (int)halInfo.frontendCaps.atsc3Caps().fecCap,
+                };
+                caps.set<TunerFrontendCapabilities::atsc3Caps>(atsc3Caps);
+            }
+            break;
+        }
+        case FrontendType::DVBC: {
+            if (FrontendInfo::FrontendCapabilities::hidl_discriminator::dvbcCaps
+                    == halInfo.frontendCaps.getDiscriminator()) {
+                TunerFrontendCableCapabilities cableCaps{
+                    .modulationCap = (int)halInfo.frontendCaps.dvbcCaps().modulationCap,
+                    .codeRateCap = (int64_t)halInfo.frontendCaps.dvbcCaps().fecCap,
+                    .annexCap = (int)halInfo.frontendCaps.dvbcCaps().annexCap,
+                };
+                caps.set<TunerFrontendCapabilities::cableCaps>(cableCaps);
+            }
+            break;
+        }
+        case FrontendType::DVBS: {
+            if (FrontendInfo::FrontendCapabilities::hidl_discriminator::dvbsCaps
+                    == halInfo.frontendCaps.getDiscriminator()) {
+                TunerFrontendDvbsCapabilities dvbsCaps{
+                    .modulationCap = (int)halInfo.frontendCaps.dvbsCaps().modulationCap,
+                    .codeRateCap = (long)halInfo.frontendCaps.dvbsCaps().innerfecCap,
+                    .standard = (int)halInfo.frontendCaps.dvbsCaps().standard,
+                };
+                caps.set<TunerFrontendCapabilities::dvbsCaps>(dvbsCaps);
+            }
+            break;
+        }
+        case FrontendType::DVBT: {
+            if (FrontendInfo::FrontendCapabilities::hidl_discriminator::dvbtCaps
+                    == halInfo.frontendCaps.getDiscriminator()) {
+                TunerFrontendDvbtCapabilities dvbtCaps{
+                    .transmissionModeCap = (int)halInfo.frontendCaps.dvbtCaps().transmissionModeCap,
+                    .bandwidthCap = (int)halInfo.frontendCaps.dvbtCaps().bandwidthCap,
+                    .constellationCap = (int)halInfo.frontendCaps.dvbtCaps().constellationCap,
+                    .codeRateCap = (int)halInfo.frontendCaps.dvbtCaps().coderateCap,
+                    .hierarchyCap = (int)halInfo.frontendCaps.dvbtCaps().hierarchyCap,
+                    .guardIntervalCap = (int)halInfo.frontendCaps.dvbtCaps().guardIntervalCap,
+                    .isT2Supported = (bool)halInfo.frontendCaps.dvbtCaps().isT2Supported,
+                    .isMisoSupported = (bool)halInfo.frontendCaps.dvbtCaps().isMisoSupported,
+                };
+                caps.set<TunerFrontendCapabilities::dvbtCaps>(dvbtCaps);
+            }
+            break;
+        }
+        case FrontendType::ISDBS: {
+            if (FrontendInfo::FrontendCapabilities::hidl_discriminator::isdbsCaps
+                    == halInfo.frontendCaps.getDiscriminator()) {
+                TunerFrontendIsdbsCapabilities isdbsCaps{
+                    .modulationCap = (int)halInfo.frontendCaps.isdbsCaps().modulationCap,
+                    .codeRateCap = (int)halInfo.frontendCaps.isdbsCaps().coderateCap,
+                };
+                caps.set<TunerFrontendCapabilities::isdbsCaps>(isdbsCaps);
+            }
+            break;
+        }
+        case FrontendType::ISDBS3: {
+            if (FrontendInfo::FrontendCapabilities::hidl_discriminator::isdbs3Caps
+                    == halInfo.frontendCaps.getDiscriminator()) {
+                TunerFrontendIsdbs3Capabilities isdbs3Caps{
+                    .modulationCap = (int)halInfo.frontendCaps.isdbs3Caps().modulationCap,
+                    .codeRateCap = (int)halInfo.frontendCaps.isdbs3Caps().coderateCap,
+                };
+                caps.set<TunerFrontendCapabilities::isdbs3Caps>(isdbs3Caps);
+            }
+            break;
+        }
+        case FrontendType::ISDBT: {
+            if (FrontendInfo::FrontendCapabilities::hidl_discriminator::isdbtCaps
+                    == halInfo.frontendCaps.getDiscriminator()) {
+                TunerFrontendIsdbtCapabilities isdbtCaps{
+                    .modeCap = (int)halInfo.frontendCaps.isdbtCaps().modeCap,
+                    .bandwidthCap = (int)halInfo.frontendCaps.isdbtCaps().bandwidthCap,
+                    .modulationCap = (int)halInfo.frontendCaps.isdbtCaps().modulationCap,
+                    .codeRateCap = (int)halInfo.frontendCaps.isdbtCaps().coderateCap,
+                    .guardIntervalCap = (int)halInfo.frontendCaps.isdbtCaps().guardIntervalCap,
+                };
+                caps.set<TunerFrontendCapabilities::isdbtCaps>(isdbtCaps);
+            }
+            break;
+        }
+        default:
+            break;
+    }
+
+    info.caps = caps;
+    return info;
+}
+} // namespace android
diff --git a/services/tuner/TunerService.h b/services/tuner/TunerService.h
new file mode 100644
index 0000000..f8e2ee6
--- /dev/null
+++ b/services/tuner/TunerService.h
@@ -0,0 +1,157 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERSERVICE_H
+#define ANDROID_MEDIA_TUNERSERVICE_H
+
+#include <aidl/android/media/tv/tunerresourcemanager/ITunerResourceManager.h>
+#include <aidl/android/media/tv/tuner/BnTunerService.h>
+#include <android/hardware/tv/tuner/1.1/ITuner.h>
+#include <fmq/AidlMessageQueue.h>
+#include <fmq/EventFlag.h>
+#include <fmq/MessageQueue.h>
+
+using ::aidl::android::hardware::common::fmq::GrantorDescriptor;
+using ::aidl::android::hardware::common::fmq::MQDescriptor;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::aidl::android::media::tv::tuner::BnTunerService;
+using ::aidl::android::media::tv::tuner::ITunerDemux;
+using ::aidl::android::media::tv::tuner::ITunerDescrambler;
+using ::aidl::android::media::tv::tuner::ITunerFrontend;
+using ::aidl::android::media::tv::tuner::ITunerLnb;
+using ::aidl::android::media::tv::tuner::TunerDemuxCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendDtmbCapabilities;
+using ::aidl::android::media::tv::tuner::TunerFrontendInfo;
+using ::aidl::android::media::tv::tunerresourcemanager::ITunerResourceManager;
+
+using ::android::hardware::details::logError;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::kSynchronizedReadWrite;
+using ::android::hardware::EventFlag;
+using ::android::hardware::MessageQueue;
+using ::android::hardware::MQDescriptorSync;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::hardware::tv::tuner::V1_0::DemuxCapabilities;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterAvSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterMainType;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterStatus;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterType;
+using ::android::hardware::tv::tuner::V1_0::DemuxTsFilterSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxTsFilterType;
+using ::android::hardware::tv::tuner::V1_0::FrontendId;
+using ::android::hardware::tv::tuner::V1_0::FrontendInfo;
+using ::android::hardware::tv::tuner::V1_0::IDemux;
+using ::android::hardware::tv::tuner::V1_0::IDescrambler;
+using ::android::hardware::tv::tuner::V1_0::IFilter;
+using ::android::hardware::tv::tuner::V1_0::IFilterCallback;
+using ::android::hardware::tv::tuner::V1_0::ITuner;
+using ::android::hardware::tv::tuner::V1_0::Result;
+
+using Status = ::ndk::ScopedAStatus;
+
+using namespace std;
+
+namespace android {
+
+const static int TUNER_HAL_VERSION_UNKNOWN = 0;
+const static int TUNER_HAL_VERSION_1_0 = 1 << 16;
+const static int TUNER_HAL_VERSION_1_1 = (1 << 16) | 1;
+// System Feature defined in PackageManager
+static const ::android::String16 FEATURE_TUNER(::android::String16("android.hardware.tv.tuner"));
+
+typedef enum {
+    FRONTEND,
+    LNB,
+    DEMUX,
+    DESCRAMBLER,
+} TunerResourceType;
+
+struct FilterCallback : public IFilterCallback {
+    ~FilterCallback() {}
+    Return<void> onFilterEvent(const DemuxFilterEvent&) {
+        return Void();
+    }
+    Return<void> onFilterStatus(const DemuxFilterStatus) {
+        return Void();
+    }
+};
+
+class TunerService : public BnTunerService {
+    typedef AidlMessageQueue<int8_t, SynchronizedReadWrite> AidlMessageQueue;
+    typedef MessageQueue<uint8_t, kSynchronizedReadWrite> HidlMessageQueue;
+    typedef MQDescriptor<int8_t, SynchronizedReadWrite> AidlMQDesc;
+
+public:
+    static char const *getServiceName() { return "media.tuner"; }
+    static binder_status_t instantiate();
+    TunerService();
+    virtual ~TunerService();
+
+    Status getFrontendIds(vector<int32_t>* ids) override;
+    Status getFrontendInfo(int32_t id, TunerFrontendInfo* _aidl_return) override;
+    Status getFrontendDtmbCapabilities(
+            int32_t id, TunerFrontendDtmbCapabilities* _aidl_return) override;
+    Status openFrontend(
+            int32_t frontendHandle, shared_ptr<ITunerFrontend>* _aidl_return) override;
+    Status openLnb(int lnbHandle, shared_ptr<ITunerLnb>* _aidl_return) override;
+    Status openLnbByName(const string& lnbName, shared_ptr<ITunerLnb>* _aidl_return) override;
+    Status openDemux(int32_t demuxHandle, std::shared_ptr<ITunerDemux>* _aidl_return) override;
+    Status getDemuxCaps(TunerDemuxCapabilities* _aidl_return) override;
+    Status openDescrambler(int32_t descramblerHandle,
+            std::shared_ptr<ITunerDescrambler>* _aidl_return) override;
+    Status getTunerHalVersion(int* _aidl_return) override;
+
+    // TODO: create a map between resource id and handles.
+    static int getResourceIdFromHandle(int resourceHandle, int /*type*/) {
+        return (resourceHandle & 0x00ff0000) >> 16;
+    }
+
+    int getResourceHandleFromId(int id, int resourceType) {
+        // TODO: build up randomly generated id to handle mapping
+        return (resourceType & 0x000000ff) << 24
+                | (id << 16)
+                | (mResourceRequestCount++ & 0xffff);
+    }
+
+private:
+    bool hasITuner();
+    bool hasITuner_1_1();
+    void updateTunerResources();
+
+    void updateFrontendResources();
+    void updateLnbResources();
+    Result getHidlFrontendIds(hidl_vec<FrontendId>& ids);
+    Result getHidlFrontendInfo(int id, FrontendInfo& info);
+    vector<int> getLnbHandles();
+
+    TunerDemuxCapabilities getAidlDemuxCaps(DemuxCapabilities caps);
+    TunerFrontendInfo convertToAidlFrontendInfo(FrontendInfo halInfo);
+
+    sp<ITuner> mTuner;
+    sp<::android::hardware::tv::tuner::V1_1::ITuner> mTuner_1_1;
+
+    shared_ptr<ITunerResourceManager> mTunerResourceManager;
+    int mResourceRequestCount = 0;
+
+    int mTunerVersion = TUNER_HAL_VERSION_UNKNOWN;
+};
+
+} // namespace android
+
+#endif // ANDROID_MEDIA_TUNERSERVICE_H
diff --git a/services/tuner/TunerTimeFilter.cpp b/services/tuner/TunerTimeFilter.cpp
new file mode 100644
index 0000000..ea9da30
--- /dev/null
+++ b/services/tuner/TunerTimeFilter.cpp
@@ -0,0 +1,114 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TunerTimeFilter"
+
+#include "TunerTimeFilter.h"
+
+using ::android::hardware::tv::tuner::V1_0::Result;
+using ::android::hardware::tv::tuner::V1_1::Constant64Bit;
+
+namespace android {
+
+TunerTimeFilter::TunerTimeFilter(sp<ITimeFilter> timeFilter) {
+    mTimeFilter = timeFilter;
+}
+
+TunerTimeFilter::~TunerTimeFilter() {
+    mTimeFilter = NULL;
+}
+
+Status TunerTimeFilter::setTimeStamp(int64_t timeStamp) {
+    if (mTimeFilter == NULL) {
+        ALOGE("ITimeFilter is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status = mTimeFilter->setTimeStamp(timeStamp);
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
+}
+
+Status TunerTimeFilter::clearTimeStamp() {
+    if (mTimeFilter == NULL) {
+        ALOGE("ITimeFilter is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status = mTimeFilter->clearTimeStamp();
+    if (status != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
+}
+
+Status TunerTimeFilter::getSourceTime(int64_t* _aidl_return) {
+    if (mTimeFilter == NULL) {
+        *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
+        ALOGE("ITimeFilter is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status;
+    mTimeFilter->getSourceTime(
+            [&](Result r, uint64_t t) {
+                status = r;
+                *_aidl_return = t;
+            });
+    if (status != Result::SUCCESS) {
+        *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
+}
+
+Status TunerTimeFilter::getTimeStamp(int64_t* _aidl_return) {
+    if (mTimeFilter == NULL) {
+        *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
+        ALOGE("ITimeFilter is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result status;
+    mTimeFilter->getTimeStamp(
+            [&](Result r, uint64_t t) {
+                status = r;
+                *_aidl_return = t;
+            });
+    if (status != Result::SUCCESS) {
+        *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
+        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return Status::ok();
+}
+
+Status TunerTimeFilter::close() {
+    if (mTimeFilter == NULL) {
+        ALOGE("ITimeFilter is not initialized");
+        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Result res = mTimeFilter->close();
+    mTimeFilter = NULL;
+
+    if (res != Result::SUCCESS) {
+        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return Status::ok();
+}
+}  // namespace android
diff --git a/services/tuner/TunerTimeFilter.h b/services/tuner/TunerTimeFilter.h
new file mode 100644
index 0000000..d675319
--- /dev/null
+++ b/services/tuner/TunerTimeFilter.h
@@ -0,0 +1,54 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERFTIMEFILTER_H
+#define ANDROID_MEDIA_TUNERFTIMEFILTER_H
+
+#include <aidl/android/media/tv/tuner/BnTunerTimeFilter.h>
+#include <android/hardware/tv/tuner/1.0/ITimeFilter.h>
+#include <android/hardware/tv/tuner/1.1/types.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <utils/Log.h>
+
+using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::tv::tuner::BnTunerTimeFilter;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::tv::tuner::V1_0::ITimeFilter;
+
+using namespace std;
+
+namespace android {
+
+class TunerTimeFilter : public BnTunerTimeFilter {
+
+public:
+    TunerTimeFilter(sp<ITimeFilter> timeFilter);
+    virtual ~TunerTimeFilter();
+    Status setTimeStamp(int64_t timeStamp) override;
+    Status clearTimeStamp() override;
+    Status getSourceTime(int64_t* _aidl_return) override;
+    Status getTimeStamp(int64_t* _aidl_return) override;
+    Status close() override;
+
+private:
+    sp<ITimeFilter> mTimeFilter;
+};
+
+} // namespace android
+
+#endif // ANDROID_MEDIA_TUNERFTIMEFILTER_H
diff --git a/services/tuner/aidl/android/media/tv/OWNERS b/services/tuner/aidl/android/media/tv/OWNERS
new file mode 100644
index 0000000..0ceb8e8
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/OWNERS
@@ -0,0 +1,2 @@
+nchalko@google.com
+quxiangfang@google.com
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerDemux.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerDemux.aidl
new file mode 100644
index 0000000..73b00ae
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerDemux.aidl
@@ -0,0 +1,78 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.ITunerDvr;
+import android.media.tv.tuner.ITunerDvrCallback;
+import android.media.tv.tuner.ITunerFilter;
+import android.media.tv.tuner.ITunerFilterCallback;
+import android.media.tv.tuner.ITunerFrontend;
+import android.media.tv.tuner.ITunerTimeFilter;
+
+/**
+ * Tuner Demux interface handles tuner related operations.
+ *
+ * {@hide}
+ */
+interface ITunerDemux {
+
+    /**
+     * Set a frontend resource as data input of the demux
+     */
+    void setFrontendDataSource(in ITunerFrontend frontend);
+
+    /**
+     * Open a new filter in the demux
+     */
+    ITunerFilter openFilter(
+        in int mainType, in int subtype, in int bufferSize, in ITunerFilterCallback cb);
+
+    /**
+     * Open time filter of the demux.
+     */
+    ITunerTimeFilter openTimeFilter();
+
+    /**
+     * Get hardware sync ID for audio and video.
+     */
+    int getAvSyncHwId(ITunerFilter tunerFilter);
+
+    /**
+     * Get current time stamp to use for A/V sync.
+     */
+    long getAvSyncTime(in int avSyncHwId);
+
+    /**
+     * Open a DVR (Digital Video Record) instance in the demux.
+     */
+    ITunerDvr openDvr(in int dvbType, in int bufferSize, in ITunerDvrCallback cb);
+
+    /**
+     * Connect Conditional Access Modules (CAM) through Common Interface (CI).
+     */
+    void connectCiCam(in int ciCamId);
+
+    /**
+     * Disconnect Conditional Access Modules (CAM).
+     */
+    void disconnectCiCam();
+
+    /**
+     * Releases the ITunerDemux instance.
+     */
+    void close();
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerDescrambler.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerDescrambler.aidl
new file mode 100644
index 0000000..7370eee
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerDescrambler.aidl
@@ -0,0 +1,53 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.ITunerDemux;
+import android.media.tv.tuner.ITunerFilter;
+import android.media.tv.tuner.TunerDemuxPid;
+
+/**
+ * Tuner Demux interface handles tuner related operations.
+ *
+ * {@hide}
+ */
+interface ITunerDescrambler {
+    /**
+     * Set a demux as source of the descrambler.
+     */
+    void setDemuxSource(in ITunerDemux tunerDemux);
+
+    /**
+     * Set a key token to link descrambler to a key slot.
+     */
+    void setKeyToken(in byte[] keyToken);
+
+    /**
+     * Add packets' PID to the descrambler for descrambling.
+     */
+    void addPid(in TunerDemuxPid pid, in ITunerFilter optionalSourceFilter);
+
+    /**
+     * Remove packets' PID from the descrambler.
+     */
+    void removePid(in TunerDemuxPid pid, in ITunerFilter optionalSourceFilter);
+
+    /**
+     * Close a new interface of ITunerDescrambler.
+     */
+    void close();
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl
new file mode 100644
index 0000000..8f1601b
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl
@@ -0,0 +1,69 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.hardware.common.fmq.MQDescriptor;
+import android.hardware.common.fmq.SynchronizedReadWrite;
+import android.media.tv.tuner.ITunerFilter;
+import android.media.tv.tuner.TunerDvrSettings;
+
+/**
+ * Tuner Dvr interface handles tuner related operations.
+ *
+ * {@hide}
+ */
+interface ITunerDvr {
+    /**
+     * Get the descriptor of the DVR's FMQ.
+     */
+    MQDescriptor<byte, SynchronizedReadWrite> getQueueDesc();
+
+    /**
+     * Configure the DVR.
+     */
+    void configure(in TunerDvrSettings settings);
+
+    /**
+     * Attach one filter to DVR interface for recording.
+     */
+    void attachFilter(in ITunerFilter filter);
+
+    /**
+     * Detach one filter from the DVR's recording.
+     */
+    void detachFilter(in ITunerFilter filter);
+
+    /**
+     * Start DVR.
+     */
+    void start();
+
+    /**
+     * Stop DVR.
+     */
+    void stop();
+
+    /**
+     * Flush DVR data.
+     */
+    void flush();
+
+    /**
+     * close the DVR instance to release resource for DVR.
+     */
+    void close();
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerDvrCallback.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerDvrCallback.aidl
new file mode 100644
index 0000000..e234fe5
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerDvrCallback.aidl
@@ -0,0 +1,34 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * TunerDvrCallback interface handles tuner dvr related callbacks.
+ *
+ * {@hide}
+ */
+interface ITunerDvrCallback {
+    /**
+     * Notify the client a new status of the demux's record.
+     */
+    void onRecordStatus(in int status);
+
+    /**
+     * Notify the client a new status of the demux's playback.
+     */
+    void onPlaybackStatus(in int status);
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerFilter.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerFilter.aidl
new file mode 100644
index 0000000..10d4c3b
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerFilter.aidl
@@ -0,0 +1,100 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.hardware.common.fmq.MQDescriptor;
+import android.hardware.common.fmq.SynchronizedReadWrite;
+import android.hardware.common.NativeHandle;
+import android.media.tv.tuner.TunerFilterConfiguration;
+import android.media.tv.tuner.TunerFilterSharedHandleInfo;
+
+/**
+ * Tuner Filter interface handles tuner related operations.
+ *
+ * {@hide}
+ */
+interface ITunerFilter {
+    /**
+     * Get the filter Id.
+     */
+    int getId();
+
+    /**
+     * Get the 64-bit filter Id.
+     */
+    long getId64Bit();
+
+    /**
+     * Get the descriptor of the Filter's FMQ.
+     */
+    MQDescriptor<byte, SynchronizedReadWrite> getQueueDesc();
+
+    /**
+     * Configure the filter.
+     */
+    void configure(in TunerFilterConfiguration config);
+
+    /**
+     * Configure the monitor event of the Filter.
+     */
+    void configureMonitorEvent(in int monitorEventType);
+
+    /**
+     * Configure the context id of the IP Filter.
+     */
+    void configureIpFilterContextId(in int cid);
+
+    /**
+     * Configure the stream type of the media Filter.
+     */
+    void configureAvStreamType(in int avStreamType);
+
+    /**
+     * Get the a/v shared memory handle
+     */
+    TunerFilterSharedHandleInfo getAvSharedHandleInfo();
+
+    /**
+     * Release the handle reported by the HAL for AV memory.
+     */
+    void releaseAvHandle(in NativeHandle handle, in long avDataId);
+
+    /**
+     * Set the filter's data source.
+     */
+    void setDataSource(ITunerFilter filter);
+
+    /**
+     * Start the filter.
+     */
+    void start();
+
+    /**
+     * Stop the filter.
+     */
+    void stop();
+
+    /**
+     * Flush the filter.
+     */
+    void flush();
+
+    /**
+     * Close the filter.
+     */
+    void close();
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerFilterCallback.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerFilterCallback.aidl
new file mode 100644
index 0000000..e7a52a7
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerFilterCallback.aidl
@@ -0,0 +1,36 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFilterEvent;
+
+/**
+ * TunerFilterCallback interface handles tuner filter related callbacks.
+ *
+ * {@hide}
+ */
+interface ITunerFilterCallback {
+    /**
+     * Notify the client a new status of a filter.
+     */
+    void onFilterStatus(int status);
+
+    /**
+     * Notify the client that a new filter event happened.
+     */
+    void onFilterEvent(in TunerFilterEvent[] filterEvent);
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerFrontend.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerFrontend.aidl
new file mode 100644
index 0000000..ef0255a
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerFrontend.aidl
@@ -0,0 +1,107 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.ITunerFrontendCallback;
+import android.media.tv.tuner.ITunerLnb;
+import android.media.tv.tuner.TunerFrontendSettings;
+import android.media.tv.tuner.TunerFrontendStatus;
+
+/**
+ * Tuner Frontend interface handles frontend related operations.
+ *
+ * {@hide}
+ */
+interface ITunerFrontend {
+    /**
+     * Set the frontend callback.
+     *
+     * @param tunerFrontendCallback the callback to receive frontend related info.
+     */
+    void setCallback(in ITunerFrontendCallback tunerFrontendCallback);
+
+    /**
+     * Tunes the frontend to using the settings given.
+     *
+     * @param settings the settings to tune with.
+     */
+    void tune(in TunerFrontendSettings settings);
+
+    /**
+     * Stop the previous tuning.
+     */
+    void stopTune();
+
+    /**
+     * Scan the frontend to use the settings given.
+     *
+     * @param settings the settings to scan with.
+     * @param frontendScanType scan with given type.
+     */
+    void scan(in TunerFrontendSettings settings, in int frontendScanType);
+
+    /**
+     * Stop the previous scanning.
+     */
+    void stopScan();
+
+    /**
+     * Sets Low-Noise Block downconverter (LNB) for satellite frontend.
+     *
+     * @param tuner lnb interface.
+     */
+    void setLnb(in ITunerLnb lnb);
+
+    /**
+     * Enable or Disable Low Noise Amplifier (LNA).
+     *
+     * @param bEnable enable Lna or not.
+     */
+    void setLna(in boolean bEnable);
+
+    /**
+     * Link Frontend to the cicam with given id.
+     *
+     * @return lts id
+     */
+    int linkCiCamToFrontend(in int ciCamId);
+
+    /**
+     * Unink Frontend to the cicam with given id.
+     */
+    void unlinkCiCamToFrontend(in int ciCamId);
+
+    /**
+     * Releases the ITunerFrontend instance.
+     */
+    void close();
+
+    /**
+     * Gets the statuses of the frontend.
+     */
+    TunerFrontendStatus[] getStatus(in int[] statusTypes);
+
+    /**
+     * Gets the 1.1 extended statuses of the frontend.
+     */
+    TunerFrontendStatus[] getStatusExtended_1_1(in int[] statusTypes);
+
+    /**
+     * Gets the id of the frontend.
+     */
+    int getFrontendId();
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerFrontendCallback.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerFrontendCallback.aidl
new file mode 100644
index 0000000..c92f5ee
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerFrontendCallback.aidl
@@ -0,0 +1,36 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFrontendScanMessage;
+
+/**
+ * TunerFrontendCallback interface handles tuner frontend related callbacks.
+ *
+ * {@hide}
+ */
+interface ITunerFrontendCallback {
+        /**
+     * Notify the client that a new event happened on the frontend.
+     */
+    void onEvent(in int frontendEventType);
+
+    /**
+     * notify the client of scan messages.
+     */
+    void onScanMessage(in int messageType, in TunerFrontendScanMessage message);
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerLnb.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerLnb.aidl
new file mode 100644
index 0000000..d62145e
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerLnb.aidl
@@ -0,0 +1,56 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.ITunerLnbCallback;
+
+/**
+ * Tuner Lnb interface handles Lnb related operations.
+ *
+ * {@hide}
+ */
+interface ITunerLnb {
+    /**
+     * Set the lnb callback.
+     */
+    void setCallback(in ITunerLnbCallback tunerLnbCallback);
+
+    /**
+     * Set the lnb's power voltage.
+     */
+    void setVoltage(in int voltage);
+
+    /**
+     * Set the lnb's tone mode.
+     */
+    void setTone(in int tone);
+
+    /**
+     * Select the lnb's position.
+     */
+    void setSatellitePosition(in int position);
+
+    /**
+     * Sends DiSEqC (Digital Satellite Equipment Control) message.
+     */
+    void sendDiseqcMessage(in byte[] diseqcMessage);
+
+    /**
+     * Releases the LNB instance.
+     */
+    void close();
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerLnbCallback.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerLnbCallback.aidl
new file mode 100644
index 0000000..117352f
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerLnbCallback.aidl
@@ -0,0 +1,34 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * TuneLnbCallback interface handles tuner lnb related callbacks.
+ *
+ * {@hide}
+ */
+interface ITunerLnbCallback {
+    /**
+     * Notify the client that a new event happened on the Lnb.
+     */
+    void onEvent(in int lnbEventType);
+
+    /**
+     * notify the client of new DiSEqC message.
+     */
+    void onDiseqcMessage(in byte[] diseqcMessage);
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
new file mode 100644
index 0000000..755b152
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
@@ -0,0 +1,105 @@
+/**
+ * Copyright (c) 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.hardware.common.fmq.MQDescriptor;
+import android.hardware.common.fmq.SynchronizedReadWrite;
+import android.hardware.common.fmq.UnsynchronizedWrite;
+import android.media.tv.tuner.ITunerDemux;
+import android.media.tv.tuner.ITunerDescrambler;
+import android.media.tv.tuner.ITunerFrontend;
+import android.media.tv.tuner.ITunerLnb;
+import android.media.tv.tuner.TunerDemuxCapabilities;
+import android.media.tv.tuner.TunerFrontendDtmbCapabilities;
+import android.media.tv.tuner.TunerFrontendInfo;
+
+/**
+ * TunerService interface handles tuner related operations.
+ *
+ * {@hide}
+ */
+//@VintfStability
+interface ITunerService {
+
+    /**
+     * Gets frontend IDs.
+     */
+    void getFrontendIds(out int[] ids);
+
+    /**
+     * Retrieve the frontend's information.
+     *
+     * @param frontendHandle the handle of the frontend granted by TRM.
+     * @return the information of the frontend.
+     */
+    TunerFrontendInfo getFrontendInfo(in int frontendHandle);
+
+    /**
+     * Get Dtmb Frontend Capabilities.
+     */
+    TunerFrontendDtmbCapabilities getFrontendDtmbCapabilities(in int id);
+
+    /**
+     * Open a Tuner Frontend interface.
+     *
+     * @param frontendHandle the handle of the frontend granted by TRM.
+     * @return the aidl interface of the frontend.
+     */
+    ITunerFrontend openFrontend(in int frontendHandle);
+
+    /**
+     * Open a new interface of ITunerLnb given a lnbHandle.
+     *
+     * @param lnbHandle the handle of the LNB granted by TRM.
+     * @return a newly created ITunerLnb interface.
+     */
+    ITunerLnb openLnb(in int lnbHandle);
+
+    /**
+     * Open a new interface of ITunerLnb given a LNB name.
+     *
+     * @param lnbName the name for an external LNB to be opened.
+     * @return a newly created ITunerLnb interface.
+     */
+    ITunerLnb openLnbByName(in String lnbName);
+
+    /**
+     * Create a new instance of Demux.
+     */
+    ITunerDemux openDemux(in int demuxHandle);
+
+    /**
+     * Retrieve the Tuner Demux capabilities.
+     *
+     * @return the demux’s capabilities.
+     */
+    TunerDemuxCapabilities getDemuxCaps();
+
+    /* Open a new interface of ITunerDescrambler given a descramblerHandle.
+     *
+     * @param descramblerHandle the handle of the descrambler granted by TRM.
+     * @return a newly created ITunerDescrambler interface.
+     */
+    ITunerDescrambler openDescrambler(in int descramblerHandle);
+
+    /**
+     * Get an integer that carries the Tuner HIDL version. The high 16 bits are the
+     * major version number while the low 16 bits are the minor version. Default
+     * value is unknown version 0.
+     */
+    int getTunerHalVersion();
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerTimeFilter.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerTimeFilter.aidl
new file mode 100644
index 0000000..f84b9bf
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerTimeFilter.aidl
@@ -0,0 +1,49 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Tuner Time Filter interface handles time filter related operations.
+ *
+ * {@hide}
+ */
+interface ITunerTimeFilter {
+    /**
+     * Set time stamp for time based filter.
+     */
+    void setTimeStamp(in long timeStamp);
+
+    /**
+     * Clear the time stamp in the time filter.
+     */
+    void clearTimeStamp();
+
+    /**
+     * Get the time from the beginning of current data source.
+     */
+    long getSourceTime();
+
+    /**
+     * Get the current time in the time filter.
+     */
+    long getTimeStamp();
+
+    /**
+     * Close the Time Filter instance.
+     */
+    void close();
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerAudioExtraMetaData.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerAudioExtraMetaData.aidl
new file mode 100644
index 0000000..df3374a
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerAudioExtraMetaData.aidl
@@ -0,0 +1,36 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Extra Meta Data from AD (Audio Descriptor) according to ETSI TS 101 154 V2.1.1.
+ *
+ * {@hide}
+ */
+parcelable TunerAudioExtraMetaData {
+    byte adFade;
+
+    byte adPan;
+
+    byte versionTextTag;
+
+    byte adGainCenter;
+
+    byte adGainFront;
+
+    byte adGainSurround;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerDemuxCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerDemuxCapabilities.aidl
new file mode 100644
index 0000000..71ab151
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerDemuxCapabilities.aidl
@@ -0,0 +1,50 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Tuner Demux capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerDemuxCapabilities {
+    int numDemux;
+
+    int numRecord;
+
+    int numPlayback;
+
+    int numTsFilter;
+
+    int numSectionFilter;
+
+    int numAudioFilter;
+
+    int numVideoFilter;
+
+    int numPesFilter;
+
+    int numPcrFilter;
+
+    int numBytesInSectionFilter;
+
+    int filterCaps;
+
+    int[] linkCaps;
+
+    boolean bTimeFilter;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerDemuxIpAddress.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerDemuxIpAddress.aidl
new file mode 100644
index 0000000..b65f404
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerDemuxIpAddress.aidl
@@ -0,0 +1,28 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Demux ip address configure.
+ *
+ * {@hide}
+ */
+parcelable TunerDemuxIpAddress {
+    boolean isIpV6;
+
+    byte[] addr;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerDemuxIpAddressSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerDemuxIpAddressSettings.aidl
new file mode 100644
index 0000000..b244388
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerDemuxIpAddressSettings.aidl
@@ -0,0 +1,34 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerDemuxIpAddress;
+
+/**
+ * Filter Settings for an Ip filter.
+ *
+ * {@hide}
+ */
+parcelable TunerDemuxIpAddressSettings {
+    TunerDemuxIpAddress srcIpAddress;
+
+    TunerDemuxIpAddress dstIpAddress;
+
+    char srcPort;
+
+    char dstPort;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerDemuxPid.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerDemuxPid.aidl
new file mode 100644
index 0000000..8b238b6
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerDemuxPid.aidl
@@ -0,0 +1,28 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Demux PID interface.
+ *
+ * {@hide}
+ */
+union TunerDemuxPid {
+    char tPid;
+
+    char mmtpPid;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerDvrSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerDvrSettings.aidl
new file mode 100644
index 0000000..4ec4d75
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerDvrSettings.aidl
@@ -0,0 +1,34 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Dvr Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerDvrSettings {
+    int statusMask;
+
+    int lowThreshold;
+
+    int highThreshold;
+
+    int dataFormat;
+
+    int packetSize;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterAlpConfiguration.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterAlpConfiguration.aidl
new file mode 100644
index 0000000..4c9e3af
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterAlpConfiguration.aidl
@@ -0,0 +1,32 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFilterSettings;
+
+/**
+ * Filter Settings for an ALP filter.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterAlpConfiguration {
+    byte packetType;
+
+    byte lengthType;
+
+    TunerFilterSettings filterSettings;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterAvSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterAvSettings.aidl
new file mode 100644
index 0000000..6bf88f0
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterAvSettings.aidl
@@ -0,0 +1,29 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Filter Settings for a Video and Audio.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterAvSettings {
+    /**
+     * true if the filter output goes to decoder directly in pass through mode.
+     */
+    boolean isPassthrough;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterConfiguration.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterConfiguration.aidl
new file mode 100644
index 0000000..808cfd1
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterConfiguration.aidl
@@ -0,0 +1,40 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFilterAlpConfiguration;
+import android.media.tv.tuner.TunerFilterIpConfiguration;
+import android.media.tv.tuner.TunerFilterMmtpConfiguration;
+import android.media.tv.tuner.TunerFilterTlvConfiguration;
+import android.media.tv.tuner.TunerFilterTsConfiguration;
+
+/**
+ * Filter configuration.
+ *
+ * {@hide}
+ */
+union TunerFilterConfiguration {
+    TunerFilterTsConfiguration ts;
+
+    TunerFilterMmtpConfiguration mmtp;
+
+    TunerFilterIpConfiguration ip;
+
+    TunerFilterTlvConfiguration tlv;
+
+    TunerFilterAlpConfiguration alp;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterDownloadEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterDownloadEvent.aidl
new file mode 100644
index 0000000..b971dd3
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterDownloadEvent.aidl
@@ -0,0 +1,40 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Filter Event for Download data.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterDownloadEvent {
+    int itemId;
+
+    /**
+     * MPU sequence number of filtered data (only for MMTP)
+     */
+    int mpuSequenceNumber;
+
+    int itemFragmentIndex;
+
+    int lastItemFragmentIndex;
+
+    /**
+     * Data size in bytes of filtered data
+     */
+    char dataLength;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterDownloadSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterDownloadSettings.aidl
new file mode 100644
index 0000000..417a5fe
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterDownloadSettings.aidl
@@ -0,0 +1,26 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Filter Settings for downloading.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterDownloadSettings {
+    int downloadId;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterEvent.aidl
new file mode 100644
index 0000000..1305510
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterEvent.aidl
@@ -0,0 +1,54 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFilterDownloadEvent;
+import android.media.tv.tuner.TunerFilterIpPayloadEvent;
+import android.media.tv.tuner.TunerFilterMediaEvent;
+import android.media.tv.tuner.TunerFilterMmtpRecordEvent;
+import android.media.tv.tuner.TunerFilterMonitorEvent;
+import android.media.tv.tuner.TunerFilterPesEvent;
+import android.media.tv.tuner.TunerFilterSectionEvent;
+import android.media.tv.tuner.TunerFilterTemiEvent;
+import android.media.tv.tuner.TunerFilterTsRecordEvent;
+
+/**
+ * Filter events.
+ *
+ * {@hide}
+ */
+union TunerFilterEvent {
+    TunerFilterMediaEvent media;
+
+    TunerFilterSectionEvent section;
+
+    TunerFilterPesEvent pes;
+
+    TunerFilterTsRecordEvent tsRecord;
+
+    TunerFilterMmtpRecordEvent mmtpRecord;
+
+    TunerFilterDownloadEvent download;
+
+    TunerFilterIpPayloadEvent ipPayload;
+
+    TunerFilterTemiEvent temi;
+
+    TunerFilterMonitorEvent monitor;
+
+    int startId;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterIpConfiguration.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterIpConfiguration.aidl
new file mode 100644
index 0000000..8b4d889
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterIpConfiguration.aidl
@@ -0,0 +1,31 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerDemuxIpAddressSettings;
+import android.media.tv.tuner.TunerFilterSettings;
+
+/**
+ * Filter Settings for a ip filter.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterIpConfiguration {
+    TunerDemuxIpAddressSettings ipAddr;
+
+    TunerFilterSettings filterSettings;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterIpPayloadEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterIpPayloadEvent.aidl
new file mode 100644
index 0000000..d5bda93
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterIpPayloadEvent.aidl
@@ -0,0 +1,29 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Filter Event for IP payload data.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterIpPayloadEvent {
+    /**
+     * Data size in bytes of ip data
+     */
+    char dataLength;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterMediaEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterMediaEvent.aidl
new file mode 100644
index 0000000..c3dbce9
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterMediaEvent.aidl
@@ -0,0 +1,84 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.hardware.common.NativeHandle;
+import android.media.tv.tuner.TunerAudioExtraMetaData;
+
+/**
+ * Filter Event for Audio or Video Filter.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterMediaEvent {
+    char streamId;
+
+    /**
+     * true if PTS is present in PES header.
+     */
+    boolean isPtsPresent;
+
+    /**
+     * Presentation Time Stamp for audio or video frame. It based on 90KHz has
+     * the same format as PTS (Presentation Time Stamp).
+     */
+    long pts;
+
+    /**
+     * Data size in bytes of audio or video frame
+     */
+    int dataLength;
+
+    /**
+     *  The offset in the memory block which is shared among multiple
+     *  MediaEvents.
+     */
+    int offset;
+
+    /**
+     * A handle associated to the memory where audio or video data stays.
+     */
+    NativeHandle avMemory;
+
+    /**
+     * True if the avMemory is in secure area, and isn't mappable.
+     */
+    boolean isSecureMemory;
+
+    /**
+     * An Id is used by HAL to provide additional information for AV data.
+     * For secure audio, it's the audio handle used by Audio Track.
+     */
+    long avDataId;
+
+    /**
+     * MPU sequence number of filtered data (only for MMTP)
+     */
+    int mpuSequenceNumber;
+
+    boolean isPesPrivateData;
+
+    /**
+     * If TunerAudioExtraMetaData field is valid or not
+     */
+    boolean isAudioExtraMetaData;
+
+    /**
+     * Only valid when isAudioExtraMetaData is true
+     */
+    TunerAudioExtraMetaData audio;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterMmtpConfiguration.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterMmtpConfiguration.aidl
new file mode 100644
index 0000000..162ca8e
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterMmtpConfiguration.aidl
@@ -0,0 +1,30 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFilterSettings;
+
+/**
+ * Filter Settings for an mmtp filter.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterMmtpConfiguration {
+    char mmtpPid;
+
+    TunerFilterSettings filterSettings;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterMmtpRecordEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterMmtpRecordEvent.aidl
new file mode 100644
index 0000000..b8871cf
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterMmtpRecordEvent.aidl
@@ -0,0 +1,57 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Filter Event for an MMTP Record Filter.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterMmtpRecordEvent {
+    int scHevcIndexMask;
+
+    /**
+     * Byte number from beginning of the filter's output
+     */
+    long byteNumber;
+
+    /**
+     * If the current event contains extended information or not
+     */
+    boolean isExtended;
+
+    /**
+     * The Presentation Time Stamp(PTS) for the audio or video frame. It is based on 90KHz
+     * and has the same format as the PTS in ISO/IEC 13818-1.
+     */
+    long pts;
+
+    /**
+     * MPU sequence number of the filtered data. This is only used for MMTP.
+     */
+    int mpuSequenceNumber;
+
+    /**
+     * Specifies the address of the first macroblock in the slice defined in ITU-T Rec. H.264.
+     */
+    int firstMbInSlice;
+
+    /**
+     * TS index mask.
+     */
+    int tsIndexMask;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterMonitorEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterMonitorEvent.aidl
new file mode 100644
index 0000000..31ab5e6
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterMonitorEvent.aidl
@@ -0,0 +1,34 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Filter monitor events.
+ *
+ * {@hide}
+ */
+union TunerFilterMonitorEvent {
+    /**
+     * New scrambling status.
+     */
+    int scramblingStatus;
+
+    /**
+     * New cid for the IP filter.
+     */
+    int cid;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterPesDataSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterPesDataSettings.aidl
new file mode 100644
index 0000000..312f314
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterPesDataSettings.aidl
@@ -0,0 +1,28 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Filter Settings for Pes Data.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterPesDataSettings {
+    char streamId;
+
+    boolean isRaw;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterPesEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterPesEvent.aidl
new file mode 100644
index 0000000..dc1ecc6
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterPesEvent.aidl
@@ -0,0 +1,36 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Filter Event for PES Filter.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterPesEvent {
+    char streamId;
+
+    /**
+     * Data size in bytes of PES data
+     */
+    char dataLength;
+
+    /**
+     * MPU sequence number of filtered data
+     */
+    int mpuSequenceNumber;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterRecordSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterRecordSettings.aidl
new file mode 100644
index 0000000..29be624
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterRecordSettings.aidl
@@ -0,0 +1,32 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFilterScIndexMask;
+
+/**
+ * Filter Settings for recording.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterRecordSettings {
+    int tsIndexMask;
+
+    int scIndexType;
+
+    TunerFilterScIndexMask scIndexMask;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterScIndexMask.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterScIndexMask.aidl
new file mode 100644
index 0000000..ed37fce
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterScIndexMask.aidl
@@ -0,0 +1,28 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Filter SC Index Mask
+ *
+ * {@hide}
+ */
+union TunerFilterScIndexMask {
+    int sc;
+
+    int scHevc;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionBits.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionBits.aidl
new file mode 100644
index 0000000..dd4f842
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionBits.aidl
@@ -0,0 +1,30 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Bits settings of a section Filter.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterSectionBits {
+    byte[] filter;
+
+    byte[] mask;
+
+    byte[] mode;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionCondition.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionCondition.aidl
new file mode 100644
index 0000000..00aabe4
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionCondition.aidl
@@ -0,0 +1,31 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFilterSectionBits;
+import android.media.tv.tuner.TunerFilterSectionTableInfo;
+
+/**
+ * Section filter condition settings.
+ *
+ * {@hide}
+ */
+union TunerFilterSectionCondition {
+    TunerFilterSectionBits sectionBits;
+
+    TunerFilterSectionTableInfo tableInfo;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionEvent.aidl
new file mode 100644
index 0000000..5f20926
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionEvent.aidl
@@ -0,0 +1,44 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Filter Event for Section Filter.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterSectionEvent {
+    /**
+     * Table ID of filtered data
+     */
+    char tableId;
+
+    /**
+     * Version number of filtered data
+     */
+    char version;
+
+    /**
+     * Section number of filtered data
+     */
+    char sectionNum;
+
+    /**
+     * Data size in bytes of filtered data
+     */
+    char dataLength;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionSettings.aidl
new file mode 100644
index 0000000..22129b6
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionSettings.aidl
@@ -0,0 +1,34 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFilterSectionCondition;
+
+/**
+ * Filter Settings for a section filter.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterSectionSettings {
+    TunerFilterSectionCondition condition;
+
+    boolean isCheckCrc;
+
+    boolean isRepeat;
+
+    boolean isRaw;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionTableInfo.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionTableInfo.aidl
new file mode 100644
index 0000000..cc78c9d
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionTableInfo.aidl
@@ -0,0 +1,28 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Table info settings of a section Filter.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterSectionTableInfo {
+    char tableId;
+
+    char version;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSettings.aidl
new file mode 100644
index 0000000..eb7eaa5
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSettings.aidl
@@ -0,0 +1,44 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFilterAvSettings;
+import android.media.tv.tuner.TunerFilterDownloadSettings;
+import android.media.tv.tuner.TunerFilterPesDataSettings;
+import android.media.tv.tuner.TunerFilterRecordSettings;
+import android.media.tv.tuner.TunerFilterSectionSettings;
+
+/**
+ * Filter Settings.
+ *
+ * {@hide}
+ */
+union TunerFilterSettings {
+    boolean nothing;
+
+    TunerFilterAvSettings av;
+
+    TunerFilterSectionSettings section;
+
+    TunerFilterPesDataSettings pesData;
+
+    TunerFilterRecordSettings record;
+
+    TunerFilterDownloadSettings download;
+
+    boolean isPassthrough;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSharedHandleInfo.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSharedHandleInfo.aidl
new file mode 100644
index 0000000..122dfc3
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSharedHandleInfo.aidl
@@ -0,0 +1,29 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.hardware.common.NativeHandle;
+
+/**
+ * Filter Shared Handle Information.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterSharedHandleInfo {
+    NativeHandle handle;
+    long size;
+}
\ No newline at end of file
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterTemiEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterTemiEvent.aidl
new file mode 100644
index 0000000..4c4e993
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterTemiEvent.aidl
@@ -0,0 +1,40 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Filter Event for Timed External Media Information (TEMI) data.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterTemiEvent {
+    /**
+     * Presentation Time Stamp for audio or video frame. It based on 90KHz has
+     * the same format as PTS (Presentation Time Stamp) in ISO/IEC 13818-1.
+     */
+    long pts;
+
+    /**
+     * TEMI Descriptor Tag
+     */
+    byte descrTag;
+
+    /**
+     * TEMI Descriptor
+     */
+    byte[] descrData;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterTlvConfiguration.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterTlvConfiguration.aidl
new file mode 100644
index 0000000..0b237b4
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterTlvConfiguration.aidl
@@ -0,0 +1,32 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFilterSettings;
+
+/**
+ * Filter Settings for a tlv filter.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterTlvConfiguration {
+    byte packetType;
+
+    boolean isCompressedIpPacket;
+
+    TunerFilterSettings filterSettings;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterTsConfiguration.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterTsConfiguration.aidl
new file mode 100644
index 0000000..2e386e6
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterTsConfiguration.aidl
@@ -0,0 +1,30 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFilterSettings;
+
+/**
+ * Filter Settings for a TS filter.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterTsConfiguration {
+    char tpid;
+
+    TunerFilterSettings filterSettings;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterTsRecordEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterTsRecordEvent.aidl
new file mode 100644
index 0000000..c52a749
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFilterTsRecordEvent.aidl
@@ -0,0 +1,56 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFilterScIndexMask;
+
+/**
+ * Filter Event for TS Record Filter.
+ *
+ * {@hide}
+ */
+parcelable TunerFilterTsRecordEvent {
+    char pid;
+
+    int tsIndexMask;
+
+    /**
+     * Indexes of record output
+     */
+    TunerFilterScIndexMask scIndexMask;
+
+    /**
+     * Byte number from beginning of the filter's output
+     */
+    long byteNumber;
+
+    /**
+     * If the current event contains extended information or not
+     */
+    boolean isExtended;
+
+    /**
+     * The Presentation Time Stamp(PTS) for the audio or video frame. It is based on 90KHz
+     * and has the same format as the PTS in ISO/IEC 13818-1.
+     */
+    long pts;
+
+    /**
+     * Specifies the address of the first macroblock in the slice defined in ITU-T Rec. H.264.
+     */
+    int firstMbInSlice;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogCapabilities.aidl
new file mode 100644
index 0000000..74bf04e
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogCapabilities.aidl
@@ -0,0 +1,34 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Analog Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendAnalogCapabilities {
+	/**
+     * Signal Type capability
+     */
+    int typeCap;
+
+    /**
+     * Standard Interchange Format (SIF) capability
+     */
+    int sifStandardCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogSettings.aidl
new file mode 100644
index 0000000..40cd8c9
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogSettings.aidl
@@ -0,0 +1,43 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Analog Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendAnalogSettings {
+    /**
+     * Signal frequency in Hertz
+     */
+    int frequency;
+
+    int signalType;
+
+    /**
+     * Standard Interchange Format (SIF) setting
+     */
+    int sifStandard;
+
+    /**
+     * Fields after isExtended are only valid when isExtended is true
+     */
+    boolean isExtended;
+
+    int aftFlag;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Capabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Capabilities.aidl
new file mode 100644
index 0000000..6c9be77
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Capabilities.aidl
@@ -0,0 +1,54 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * ATSC3 Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendAtsc3Capabilities {
+    /**
+     * Bandwidth capability
+     */
+    int bandwidthCap;
+
+    /**
+     * Modulation capability
+     */
+    int modulationCap;
+
+    /**
+     * TimeInterleaveMode capability
+     */
+    int timeInterleaveModeCap;
+
+    /**
+     * CodeRate capability
+     */
+    int codeRateCap;
+
+    /**
+     * FEC capability
+     */
+    int fecCap;
+
+    /**
+     * Demodulator Output Format capability
+     */
+    int demodOutputFormatCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3PlpSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3PlpSettings.aidl
new file mode 100644
index 0000000..b29e1f7
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3PlpSettings.aidl
@@ -0,0 +1,37 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Atsc3 Frontend Physical Layer Pipe Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendAtsc3PlpSettings {
+    int plpId;
+
+    int modulation;
+
+    int interleaveMode;
+
+    int codeRate;
+
+    /**
+     * Forward Error Correction Type.
+     */
+    int fec;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Settings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Settings.aidl
new file mode 100644
index 0000000..32fb8c7
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Settings.aidl
@@ -0,0 +1,40 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFrontendAtsc3PlpSettings;
+
+/**
+ * Atsc3 Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendAtsc3Settings {
+    /**
+     * Signal frequency in Hertz
+     */
+    int frequency;
+
+    /**
+     * Bandwidth of tuning band.
+     */
+    int bandwidth;
+
+    int demodOutputFormat;
+
+    TunerFrontendAtsc3PlpSettings[] plpSettings;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscCapabilities.aidl
new file mode 100644
index 0000000..2b6c2fc
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscCapabilities.aidl
@@ -0,0 +1,29 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * ATSC Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendAtscCapabilities {
+    /**
+     * Modulation capability
+     */
+    int modulationCap;
+}
\ No newline at end of file
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscSettings.aidl
new file mode 100644
index 0000000..c7a8c07
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscSettings.aidl
@@ -0,0 +1,31 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Atsc Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendAtscSettings {
+    /**
+     * Signal frequency in Hertz
+     */
+    int frequency;
+
+    int modulation;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableCapabilities.aidl
new file mode 100644
index 0000000..b880c60
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableCapabilities.aidl
@@ -0,0 +1,39 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Cable(DVBC) Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendCableCapabilities {
+    /**
+     * Modulation capability
+     */
+    int modulationCap;
+
+    /**
+     * Code Rate capability
+     */
+    long codeRateCap; // inner FEC will converge to codeRate
+
+    /**
+     * Annex capability
+     */
+    int annexCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableSettings.aidl
new file mode 100644
index 0000000..b9bcf29
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableSettings.aidl
@@ -0,0 +1,63 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Cable Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendCableSettings {
+    /**
+     * Signal frequency in Hertz
+     */
+    int frequency;
+
+    int modulation;
+
+    /**
+     * Inner Forward Error Correction type as specified in ETSI EN 300 468 V1.15.1
+     * and ETSI EN 302 307-2 V1.1.1.
+     */
+    long innerFec;
+
+    /**
+     * Symbols per second
+     */
+    int symbolRate;
+
+    /**
+     * Outer Forward Error Correction (FEC) Type.
+     */
+    int outerFec;
+
+    int annex;
+
+    /**
+     * Spectral Inversion Type.
+     */
+    int spectralInversion;
+
+    /**
+     * Fields after isExtended are only valid when isExtended is true
+     */
+    boolean isExtended;
+
+    int interleaveMode;
+
+    int bandwidth;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCapabilities.aidl
new file mode 100644
index 0000000..19f31f1
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCapabilities.aidl
@@ -0,0 +1,85 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFrontendAnalogCapabilities;
+import android.media.tv.tuner.TunerFrontendAtscCapabilities;
+import android.media.tv.tuner.TunerFrontendAtsc3Capabilities;
+import android.media.tv.tuner.TunerFrontendCableCapabilities;
+import android.media.tv.tuner.TunerFrontendDvbsCapabilities;
+import android.media.tv.tuner.TunerFrontendDvbtCapabilities;
+import android.media.tv.tuner.TunerFrontendIsdbsCapabilities;
+import android.media.tv.tuner.TunerFrontendIsdbs3Capabilities;
+import android.media.tv.tuner.TunerFrontendIsdbtCapabilities;
+
+/**
+ * Frontend Capabilities interface.
+ *
+ * Use a group of vectors as the workaround for Union structure that is not fully supported
+ * in AIDL currently.
+ *
+ * Client may use FrontendInfo.type as the discriminar to check the corresponding vector. If
+ * the vector is not null, it contains valid value.
+ *
+ * {@hide}
+ */
+union TunerFrontendCapabilities {
+    /**
+     * Analog Frontend Capabilities
+     */
+    TunerFrontendAnalogCapabilities analogCaps;
+
+    /**
+     * ATSC Frontend Capabilities
+     */
+    TunerFrontendAtscCapabilities atscCaps;
+
+    /**
+     * ATSC3 Frontend Capabilities
+     */
+    TunerFrontendAtsc3Capabilities atsc3Caps;
+
+    /**
+     * Cable Frontend Capabilities
+     */
+    TunerFrontendCableCapabilities cableCaps;
+
+    /**
+     * DVBS Frontend Capabilities
+     */
+    TunerFrontendDvbsCapabilities dvbsCaps;
+
+    /**
+     * DVBT Frontend Capabilities
+     */
+    TunerFrontendDvbtCapabilities dvbtCaps;
+
+    /**
+     * ISDB-S Frontend Capabilities
+     */
+    TunerFrontendIsdbsCapabilities isdbsCaps;
+
+    /**
+     * ISDB-S3 Frontend Capabilities
+     */
+    TunerFrontendIsdbs3Capabilities isdbs3Caps;
+
+    /**
+     * ISDB-T Frontend Capabilities
+     */
+    TunerFrontendIsdbtCapabilities isdbtCaps;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDtmbCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDtmbCapabilities.aidl
new file mode 100644
index 0000000..e8e4933
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDtmbCapabilities.aidl
@@ -0,0 +1,36 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * DTMB Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendDtmbCapabilities {
+    int transmissionModeCap;
+
+    int bandwidthCap;
+
+    int modulationCap;
+
+    int codeRateCap;
+
+    int guardIntervalCap;
+
+    int interleaveModeCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDtmbSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDtmbSettings.aidl
new file mode 100644
index 0000000..45e7ff9
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDtmbSettings.aidl
@@ -0,0 +1,38 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * DTMB Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendDtmbSettings {
+    int frequency;
+
+    int transmissionMode;
+
+    int bandwidth;
+
+    int modulation;
+
+    int codeRate;
+
+    int guardInterval;
+
+    int interleaveMode;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCapabilities.aidl
new file mode 100644
index 0000000..5e4322c
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCapabilities.aidl
@@ -0,0 +1,39 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * DVBS Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendDvbsCapabilities {
+    /**
+     * Modulation capability
+     */
+    int modulationCap;
+
+    /**
+     * Code Rate capability
+     */
+    long codeRateCap;  // inner FEC will converge to codeRate
+
+    /**
+     * Sub standards capability
+     */
+    int standard;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCodeRate.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCodeRate.aidl
new file mode 100644
index 0000000..59b7de3
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCodeRate.aidl
@@ -0,0 +1,42 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Dvbs Frontend CodeRate interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendDvbsCodeRate {
+    /**
+     * Inner Forward Error Correction type as specified in ETSI EN 300 468 V1.15.1
+     * and ETSI EN 302 307-2 V1.1.1.
+     */
+    long fec;
+
+    boolean isLinear;
+
+    /**
+     * true if enable short frame
+     */
+    boolean isShortFrames;
+
+    /**
+     * bits number in 1000 symbol. 0 if use the default.
+     */
+    int bitsPer1000Symbol;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsSettings.aidl
new file mode 100644
index 0000000..ec3e4b9
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsSettings.aidl
@@ -0,0 +1,65 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFrontendDvbsCodeRate;
+
+/**
+ * Dvbs Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendDvbsSettings {
+    /**
+     * Signal frequency in Hertz
+     */
+    int frequency;
+
+    int modulation;
+
+    TunerFrontendDvbsCodeRate codeRate;
+
+    int symbolRate;
+
+    /**
+     * Roll off type.
+     */
+    int rolloff;
+
+    /**
+     * Pilot mode.
+     */
+    int pilot;
+
+    int inputStreamId;
+
+    int standard;
+
+    /**
+     * Vcm mode.
+     */
+    int vcm;
+
+    /**
+     * Fields after isExtended are only valid when isExtended is true
+     */
+    boolean isExtended;
+
+    int scanType;
+
+    boolean isDiseqcRxMessage;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtCapabilities.aidl
new file mode 100644
index 0000000..73f16dd
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtCapabilities.aidl
@@ -0,0 +1,64 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * DVBT Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendDvbtCapabilities {
+    /**
+     * Transmission Mode capability
+     */
+    int transmissionModeCap;
+
+    /**
+     * Bandwidth capability
+     */
+    int bandwidthCap;
+
+    /**
+     * Constellation capability
+     */
+    int constellationCap;
+
+    /**
+     * Code Rate capability
+     */
+    int codeRateCap;
+
+    /**
+     * Hierarchy Type capability
+     */
+    int hierarchyCap;
+
+    /**
+     * Guard Interval capability
+     */
+    int guardIntervalCap;
+
+    /**
+     * T2 Support capability
+     */
+    boolean isT2Supported;
+
+    /**
+     * Miso Support capability
+     */
+    boolean isMisoSupported;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtSettings.aidl
new file mode 100644
index 0000000..14c942a
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtSettings.aidl
@@ -0,0 +1,75 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Dvbt Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendDvbtSettings {
+    /**
+     * Signal frequency in Hertz
+     */
+    int frequency;
+
+    int transmissionMode;
+
+    int bandwidth;
+
+    int constellation;
+
+    int hierarchy;
+
+    /**
+     * Code Rate for High Priority level
+     */
+    int hpCodeRate;
+
+    /**
+     * Code Rate for Low Priority level
+     */
+    int lpCodeRate;
+
+    int guardInterval;
+
+    boolean isHighPriority;
+
+    int standard;
+
+    boolean isMiso;
+
+    /**
+     * Physical Layer Pipe (PLP) mode
+     */
+    int plpMode;
+
+    /**
+     * Physical Layer Pipe (PLP) Id
+     */
+    int plpId;
+
+    /**
+     * Physical Layer Pipe (PLP) Group Id
+     */
+    int plpGroupId;
+
+    /**
+     * Fields after isExtended are only valid when isExtended is true
+     */
+    boolean isExtended;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendInfo.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendInfo.aidl
new file mode 100644
index 0000000..4bccd56
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendInfo.aidl
@@ -0,0 +1,80 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFrontendCapabilities;
+
+/**
+ * FrontendInfo interface that carries tuner frontend information.
+ *
+ * <p>This is used to update the TunerResourceManager and pass Frontend
+ * information from HAL to the client side.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendInfo {
+    /**
+     * Frontend Handle
+     */
+    int handle;
+
+    /**
+     * Frontend Type
+     */
+    int type;
+
+    /**
+     * Minimum Frequency in Hertz
+     */
+    int minFrequency;
+
+    /**
+     * Maximum Frequency in Hertz
+     */
+    int maxFrequency;
+
+    /**
+     * Minimum symbols per second
+     */
+    int minSymbolRate;
+
+    /**
+     * Maximum symbols per second
+     */
+    int maxSymbolRate;
+
+    /**
+     * Range in Hertz
+     */
+    int acquireRange;
+
+    /**
+     * Frontends are assigned with the same exclusiveGroupId if they can't
+     * function at same time. For instance, they share same hardware module.
+     */
+    int exclusiveGroupId;
+
+    /**
+     * A list of supported status types which client can inquiry
+     */
+    int[] statusCaps;
+
+    /**
+     * Frontend Capabilities
+     */
+    TunerFrontendCapabilities caps;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Capabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Capabilities.aidl
new file mode 100644
index 0000000..84dd67a
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Capabilities.aidl
@@ -0,0 +1,34 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * ISDB-S3 Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendIsdbs3Capabilities {
+    /**
+     * Modulation capability
+     */
+    int modulationCap;
+
+    /**
+     * Code Rate capability
+     */
+    int codeRateCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Settings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Settings.aidl
new file mode 100644
index 0000000..9a11fd5
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Settings.aidl
@@ -0,0 +1,44 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Isdbs3 Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendIsdbs3Settings {
+    /**
+     * Signal frequency in Hertz
+     */
+    int frequency;
+
+    char streamId;
+
+    int streamIdType;
+
+    int modulation;
+
+    int codeRate;
+
+    /**
+     * Symbols per second
+     */
+    int symbolRate;
+
+    int rolloff;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsCapabilities.aidl
new file mode 100644
index 0000000..15dfdf7
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsCapabilities.aidl
@@ -0,0 +1,34 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * ISDB-S Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendIsdbsCapabilities {
+    /**
+     * Modulation capability
+     */
+    int modulationCap;
+
+    /**
+     * Code Rate capability
+     */
+    int codeRateCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsSettings.aidl
new file mode 100644
index 0000000..dff9f4a
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsSettings.aidl
@@ -0,0 +1,44 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Isdbs Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendIsdbsSettings {
+    /**
+     * Signal frequency in Hertz
+     */
+    int frequency;
+
+    char streamId;
+
+    int streamIdType;
+
+    int modulation;
+
+    int codeRate;
+
+    /**
+     * Symbols per second
+     */
+    int symbolRate;
+
+    int rolloff;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtCapabilities.aidl
new file mode 100644
index 0000000..c9295d8
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtCapabilities.aidl
@@ -0,0 +1,49 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * ISDB-T Frontend Capabilities interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendIsdbtCapabilities {
+    /**
+     * ISDB-T Mode capability
+     */
+    int modeCap;
+
+    /**
+     * Bandwidth capability
+     */
+    int bandwidthCap;
+
+    /**
+     * Modulation capability
+     */
+    int modulationCap;
+
+    /**
+     * Code Rate capability
+     */
+    int codeRateCap;
+
+    /**
+     * Guard Interval capability
+     */
+    int guardIntervalCap;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtSettings.aidl
new file mode 100644
index 0000000..191f3a6
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtSettings.aidl
@@ -0,0 +1,41 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Isdbt Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendIsdbtSettings {
+    /**
+     * Signal frequency in Hertz
+     */
+    int frequency;
+
+    int modulation;
+
+    int bandwidth;
+
+    int mode;
+
+    int codeRate;
+
+    int guardInterval;
+
+    int serviceAreaId;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendScanAtsc3PlpInfo.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendScanAtsc3PlpInfo.aidl
new file mode 100644
index 0000000..1b8fcbb
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendScanAtsc3PlpInfo.aidl
@@ -0,0 +1,28 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Atsc3 Frontend Physical Layer Pipe Info.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendScanAtsc3PlpInfo {
+    byte plpId;
+
+    boolean llsFlag;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendScanMessage.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendScanMessage.aidl
new file mode 100644
index 0000000..9921ca1
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendScanMessage.aidl
@@ -0,0 +1,56 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFrontendScanAtsc3PlpInfo;
+
+/**
+ * Tuner Frontend Scan Message interface.
+ *
+ * {@hide}
+ */
+union TunerFrontendScanMessage {
+    boolean isLocked;
+
+    boolean isEnd;
+
+    byte progressPercent;
+
+    int[] frequencies;
+
+    int[] symbolRates;
+
+    int hierarchy;
+
+    int analogType;
+
+    byte[] plpIds;
+
+    byte[] groupIds;
+
+    char[] inputStreamIds;
+
+    int std;
+
+    TunerFrontendScanAtsc3PlpInfo[] atsc3PlpInfos;
+
+    int modulation;
+
+    int annex;
+
+    boolean isHighPriority;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendSettings.aidl
new file mode 100644
index 0000000..70a5f3e
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendSettings.aidl
@@ -0,0 +1,34 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFrontendUnionSettings;
+
+/**
+ * Frontend Settings interface.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendSettings {
+    TunerFrontendUnionSettings settings;
+
+    boolean isExtended;
+
+    int endFrequency;
+
+    int inversion;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendStatus.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendStatus.aidl
new file mode 100644
index 0000000..2b3c01b
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendStatus.aidl
@@ -0,0 +1,187 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFrontendStatusAtsc3PlpInfo;
+
+/**
+ * Tuner Frontend Status interface.
+ *
+ * {@hide}
+ */
+union TunerFrontendStatus {
+    /**
+     * Lock status for Demod in True/False.
+     */
+    boolean isDemodLocked;
+
+    /**
+     * SNR value measured by 0.001 dB.
+     */
+    int snr;
+
+    /**
+     * The number of error bits per 1 billion bits.
+     */
+    int ber;
+
+    /**
+     * The number of error packages per 1 billion packages.
+     */
+    int per;
+
+    /**
+     * The number of error bits per 1 billion bits before FEC.
+     */
+    int preBer;
+
+    /**
+     * Signal Quality in percent.
+     */
+    int signalQuality;
+
+    /**
+     * Signal Strength measured by 0.001 dBm.
+     */
+    int signalStrength;
+
+    /**
+     * Symbols per second
+     */
+    int symbolRate;
+
+    long innerFec;
+
+    /**
+     * Check frontend type to decide the hidl type value
+     */
+    int modulation;
+
+    int inversion;
+
+    int lnbVoltage;
+
+    byte plpId;
+
+    boolean isEWBS;
+
+    /**
+     * AGC value is normalized from 0 to 255.
+     */
+    byte agc;
+
+    boolean isLnaOn;
+
+    boolean[] isLayerError;
+
+    /**
+     * MER value measured by 0.001 dB
+     */
+    int mer;
+
+    /**
+     * Frequency difference in Hertz.
+     */
+    int freqOffset;
+
+    int hierarchy;
+
+    boolean isRfLocked;
+
+    /**
+     * A list of PLP status for tuned PLPs for ATSC3 frontend.
+     */
+    TunerFrontendStatusAtsc3PlpInfo[] plpInfo;
+
+    // 1.1 Extension Starting
+
+    /**
+     * Extended modulation status. Check frontend type to decide the hidl type value.
+     */
+    int[] modulations;
+
+    /**
+     * Extended bit error ratio status.
+     */
+    int[] bers;
+
+    /**
+     * Extended code rate status.
+     */
+    long[] codeRates;
+
+    /**
+     * Extended bandwidth status. Check frontend type to decide the hidl type value.
+     */
+    int bandwidth;
+
+    /**
+     * Extended guard interval status. Check frontend type to decide the hidl type value.
+     */
+    int interval;
+
+    /**
+     * Extended transmission mode status. Check frontend type to decide the hidl type value.
+     */
+    int transmissionMode;
+
+    /**
+     * Uncorrectable Error Counts of the frontend's Physical Layer Pipe (PLP)
+     * since the last tune operation.
+     */
+    int uec;
+
+    /**
+     * The current DVB-T2 system id status.
+     */
+    char systemId;
+
+    /**
+     * Frontend Interleaving Modes. Check frontend type to decide the hidl type value.
+     */
+    int[] interleaving;
+
+    /**
+     * Segments in ISDB-T Specification of all the channels.
+     */
+    byte[] isdbtSegment;
+
+    /**
+     * Transport Stream Data Rate in BPS of the current channel.
+     */
+    int[] tsDataRate;
+
+    /**
+     * Roll Off Type status of the frontend. Check frontend type to decide the hidl type value.
+     */
+    int rollOff;
+
+    /**
+     * If the frontend currently supports MISO or not.
+     */
+    boolean isMiso;
+
+    /**
+     * If the frontend code rate is linear or not.
+     */
+    boolean isLinear;
+
+    /**
+     * If short frames are enabled or not.
+     */
+    boolean isShortFrames;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendStatusAtsc3PlpInfo.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendStatusAtsc3PlpInfo.aidl
new file mode 100644
index 0000000..4116c34
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendStatusAtsc3PlpInfo.aidl
@@ -0,0 +1,39 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+/**
+ * Atsc3 Frontend Physical Layer Pipe Info in Frontend status.
+ *
+ * {@hide}
+ */
+parcelable TunerFrontendStatusAtsc3PlpInfo {
+    /**
+     * PLP Id value.
+     */
+    byte plpId;
+
+    /**
+     * Demod Lock/Unlock status of this particular PLP.
+     */
+    boolean isLocked;
+
+    /**
+     * Uncorrectable Error Counts (UEC) of this particular PLP since last tune operation.
+     */
+    int uec;
+}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendUnionSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendUnionSettings.aidl
new file mode 100644
index 0000000..c362c2a
--- /dev/null
+++ b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendUnionSettings.aidl
@@ -0,0 +1,55 @@
+/**
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv.tuner;
+
+import android.media.tv.tuner.TunerFrontendAnalogSettings;
+import android.media.tv.tuner.TunerFrontendAtscSettings;
+import android.media.tv.tuner.TunerFrontendAtsc3Settings;
+import android.media.tv.tuner.TunerFrontendCableSettings;
+import android.media.tv.tuner.TunerFrontendDtmbSettings;
+import android.media.tv.tuner.TunerFrontendDvbsSettings;
+import android.media.tv.tuner.TunerFrontendDvbtSettings;
+import android.media.tv.tuner.TunerFrontendIsdbsSettings;
+import android.media.tv.tuner.TunerFrontendIsdbs3Settings;
+import android.media.tv.tuner.TunerFrontendIsdbtSettings;
+
+/**
+ * Frontend Settings Union interface.
+ *
+ * {@hide}
+ */
+union TunerFrontendUnionSettings {
+    TunerFrontendAnalogSettings analog;
+
+    TunerFrontendAtscSettings atsc;
+
+    TunerFrontendAtsc3Settings atsc3;
+
+    TunerFrontendCableSettings cable;
+
+    TunerFrontendDvbsSettings dvbs;
+
+    TunerFrontendDvbtSettings dvbt;
+
+    TunerFrontendIsdbsSettings isdbs;
+
+    TunerFrontendIsdbs3Settings isdbs3;
+
+    TunerFrontendIsdbtSettings isdbt;
+
+    TunerFrontendDtmbSettings dtmb;
+}
diff --git a/services/tuner/main_tunerservice.cpp b/services/tuner/main_tunerservice.cpp
new file mode 100644
index 0000000..586a0e2
--- /dev/null
+++ b/services/tuner/main_tunerservice.cpp
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <utils/Log.h>
+#include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
+#include <binder/ProcessState.h>
+#include <hidl/HidlTransportSupport.h>
+
+#include "TunerService.h"
+
+using namespace android;
+
+int main(int argc __unused, char** argv) {
+    ALOGD("Tuner service starting");
+
+    strcpy(argv[0], "media.tuner");
+    sp<ProcessState> proc(ProcessState::self());
+    sp<IServiceManager> sm = defaultServiceManager();
+    ALOGD("ServiceManager: %p", sm.get());
+
+    binder_status_t status = TunerService::instantiate();
+    if (status != STATUS_OK) {
+        ALOGD("Failed to add tuner service as AIDL interface");
+        return -1;
+    }
+
+    ProcessState::self()->startThreadPool();
+    IPCThreadState::self()->joinThreadPool();
+}
diff --git a/services/tuner/mediatuner.rc b/services/tuner/mediatuner.rc
new file mode 100644
index 0000000..fd30618
--- /dev/null
+++ b/services/tuner/mediatuner.rc
@@ -0,0 +1,5 @@
+service media.tuner /system/bin/mediatuner
+    class main
+    group media
+    ioprio rt 4
+    task_profiles ProcessCapacityHigh HighPerformance
diff --git a/tools/mainline_hook.sh b/tools/mainline_hook.sh
deleted file mode 100755
index 58afb49..0000000
--- a/tools/mainline_hook.sh
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/bin/bash
-LOCAL_DIR="$( dirname "${BASH_SOURCE}" )"
-
-MAINLINE_FRAMEWORKS_AV_PATHS=(
-    media/extractors/
-    media/codec2/components/
-    media/libstagefright/codecs/amrnb
-    media/libstagefright/codecs/amrwb
-    media/libstagefright/codecs/amrwbenc
-    media/libstagefright/codecs/common
-    media/libstagefright/codecs/mp3dec
-    media/libstagefright/codecs/m4v_h263
-    media/libstagefright/flac/dec
-    media/libstagefright/mpeg2ts
-)
-
-MAINLINE_EXTERNAL_PROJECTS=(
-    external/aac
-    external/flac
-    external/libaac
-    external/libaom
-    external/libavc
-    external/libgav1
-    external/libgsm
-    external/libhevc
-    external/libmpeg2
-    external/libopus
-    external/libvpx
-    external/libxaac
-    external/sonivox
-    external/tremolo
-)
-
-DEV_BRANCH=qt-aml-media-dev
-RED=$(tput setaf 1)
-NORMAL=$(tput sgr0)
-WARNING_FULL="${RED}Please upload this change in ${DEV_BRANCH} unless it is restricted
-from mainline release until next dessert release. Low/moderate security bugs
-are restricted this way.${NORMAL}"
-WARNING_PARTIAL="${RED}It looks like your change has mainline and non-mainline changes;
-Consider separating them into two separate CLs -- one for mainline files,
-one for non-mainline files.${NORMAL}"
-PWD=`pwd`
-
-if git branch -vv | grep -q -P "^\*[^\[]+\[goog/qt-aml-media-dev"; then
-    # Change appears to be in mainline dev branch
-    exit 0
-fi
-
-for path in "${MAINLINE_EXTERNAL_PROJECTS[@]}"; do
-    if [[ $PWD =~ $path ]]; then
-        echo -e "${RED}The source of truth for '$path' is in ${DEV_BRANCH}.${NORMAL}"
-        echo -e ${WARNING_FULL}
-        exit 1
-    fi
-done
-
-if [[ ! $PWD =~ frameworks/av ]]; then
-    exit 0
-fi
-
-mainline_count=0
-total_count=0
-echo
-while read -r file ; do
-    (( total_count++ ))
-    for path in "${MAINLINE_FRAMEWORKS_AV_PATHS[@]}"; do
-        if [[ $file =~ ^$path ]]; then
-            echo -e "${RED}The source of truth for '$file' is in ${DEV_BRANCH}.${NORMAL}"
-            (( mainline_count++ ))
-            break
-        fi
-    done
-done < <(git show --name-only --pretty=format: $1 | grep -- "$2")
-
-if (( mainline_count != 0 )); then
-    if (( mainline_count == total_count )); then
-        echo -e ${WARNING_FULL}
-    else
-        echo -e ${WARNING_PARTIAL}
-    fi
-    exit 1
-fi
-exit 0
diff --git a/tools/mainline_hook_partial.sh b/tools/mainline_hook_partial.sh
index 3dc6163..bd82315 100755
--- a/tools/mainline_hook_partial.sh
+++ b/tools/mainline_hook_partial.sh
Binary files differ
diff --git a/tools/mainline_hook_project.sh b/tools/mainline_hook_project.sh
index 8d35470..cb5fc44 100755
--- a/tools/mainline_hook_project.sh
+++ b/tools/mainline_hook_project.sh
@@ -16,7 +16,8 @@
 
 
 # tunables
-DEV_BRANCH=rvc-dev
+DEV_BRANCH=master
+MAINLINE_BRANCH=sc-mainline-prod
 
 ###
 RED=$(tput setaf 1)
@@ -25,41 +26,27 @@
 ## check the active branch:
 ## * b131183694 d198c6a [goog/master] Fix to handle missing checks on error returned
 ##
-current=`git branch -vv | grep -P "^\*[^\[]+\[goog/"|sed -e 's/^.*\[//' | sed -e 's/:.*$//'| sed -e 's/^goog\///'`
+current=`git branch -vv | grep -P "^\*[^\[]+\[goog/"|sed -e 's/^.*\[//' | sed -e 's/\].*$//'|sed -e 's/:.*$//'| sed -e 's/^goog\///'`
 if [ "${current}" = "" ] ; then
         current=unknown
 fi
 
-if [ "${current}" = "${DEV_BRANCH}" ] ; then
-    # Change appears to be in mainline dev branch
-    exit 0
+# simple reminder that it should also land in mainline branch
+#
+if [ "${current}" != "${MAINLINE_BRANCH}" ] ; then
+        # simple reminder to ensure it hits mainline
+        cat - <<EOF
+You are uploading repo  ${RED}${REPO_PATH}${NORMAL} to branch ${RED}${current}${NORMAL}.
+The mainline branch for ${RED}${REPO_PATH}${NORMAL} is branch ${RED}${MAINLINE_BRANCH}${NORMAL}.
+
+Ensure an appropriate cherry pick or equivalent lands in branch ${RED}${MAINLINE_BRANCH}${NORMAL}.
+Security bulletin timing or unreleased functionality may determine when that can be landed.
+
+EOF
 fi
 
-## warn the user that about not being on the typical/desired branch.
-
-cat - <<EOF
-
-You are uploading repo  ${RED}${REPO_PATH}${NORMAL} to branch ${RED}${current}${NORMAL}. 
-The source of truth for ${RED}${REPO_PATH}${NORMAL} is branch ${RED}${DEV_BRANCH}${NORMAL}. 
-
-Please upload this change to branch ${RED}${DEV_BRANCH}${NORMAL} unless one or more of
-the following apply:
-- this is a security bug prohibited from disclosure before the next dessert release.
-  (moderate security bugs fall into this category).
-- this is new functionality prohibitied from disclosure before the next dessert release.
-EOF
-
-
-##
-## TODO: prompt the user y/n to continue right now instead of re-invoking with no-verify
-## this has to get around how repo buffers stdout from this script such that the output
-## is not flushed before we try to read the input.
-## 
-
-cat - <<EOF
-If you are sure you want to proceed uploading to branch ${RED}${current}${NORMAL},
-re-run your repo upload command with the '--no-verify' option
-
-EOF
-exit 1
+# exit 0 is "all good, no output passed along to user"
+# exit 77 is "all ok, but output is passed along to the user"
+#
+exit 77